diff --git a/.github/actions/create-cluster/action.yml b/.github/actions/create-cluster/action.yml new file mode 100644 index 00000000000..6939c9f03c7 --- /dev/null +++ b/.github/actions/create-cluster/action.yml @@ -0,0 +1,22 @@ +name: "Set up KFP on KinD" +description: "Step to start and configure KFP on Kind" + +inputs: + k8s_version: + description: "The Kubernetes version to use for the Kind cluster" + required: true + cluster_name: + description: "Provide kind cluster name if you want to name it other than kfp" + required: false + default: 'kfp' + +runs: + using: "composite" + steps: + - name: Create k8s Kind Cluster + uses: container-tools/kind-action@v2 + with: + cluster_name: ${{ inputs.cluster_name }} + kubectl_version: ${{ inputs.k8s_version }} + version: v0.25.0 + node_image: kindest/node:${{ inputs.k8s_version }} diff --git a/.github/actions/deploy/action.yml b/.github/actions/deploy/action.yml new file mode 100644 index 00000000000..6c3b5d95e52 --- /dev/null +++ b/.github/actions/deploy/action.yml @@ -0,0 +1,117 @@ +name: "Set up KFP on KinD" +description: "Step to start and configure KFP on Kind" + +inputs: + pipeline_store: + description: "Flag to deploy KFP with K8s Native API" + default: 'database' + required: false + proxy: + description: "If KFP should be deployed with proxy configuration" + required: false + default: 'false' + cache_enabled: + description: "If KFP should be deployed with cache enabled globally" + required: false + default: 'true' + image_tag: + required: true + description: "Provide the image tag your image was tagged with" + image_path: + required: true + description: "Path within github artifacts where your image tarball is stored" + image_registry: + required: true + description: "Image Registry address of the images" + multi_user: + description: "If KFP should be deployed in multi-user mode" + required: false + default: 'false' + storage_backend: + description: "Storage backend to use (minio or seaweedfs)" + required: false + default: 'seaweedfs' + argo_version: + required: false + description: "Argo version to use for the cluster" + forward_port: + required: false + default: 'true' + description: "If you want to forward API server port to localhost:8888" + +runs: + using: "composite" + steps: + - name: Deploy Squid + id: deploy-squid + if: ${{ inputs.proxy == 'true' }} + shell: bash + run: ./.github/resources/squid/deploy-squid.sh + + - name: Download Docker Images + uses: actions/download-artifact@v4 + with: + path: "images_${{ github.sha }}" + + - name: Load Docker Images + shell: bash + run: | + APPS=("apiserver" "driver" "launcher" "scheduledworkflow" "persistenceagent" "frontend") + for app in "${APPS[@]}"; do + docker image load -i ${{ inputs.image_path }}/$app/$app.tar + docker push ${{ inputs.image_registry }}/$app:${{ inputs.image_tag }} + rm ${{ inputs.image_path }}/$app/$app.tar + docker image rm ${{ inputs.image_registry }}/$app:${{ inputs.image_tag }} + done + + - name: Configure Args + shell: bash + id: configure + run: | + ARGS="" + + if [ "${{ inputs.proxy }}" = "true" ]; then + echo "Enabling Proxy" + ARGS="${ARGS} --proxy" + fi + + if [ "${{inputs.cache_enabled }}" = "false" ]; then + echo "Disabling Cache" + ARGS="${ARGS} --cache-disabled" + fi + + if [ "${{inputs.pipeline_store }}" = "kubernetes" ]; then + echo "Deploying in native K8s API Mode" + ARGS="${ARGS} --deploy-k8s-native" + fi + + if [ "${{ inputs.multi_user }}" = "true" ]; then + echo "Deploying in Multi User Mode" + ARGS="${ARGS} --multi-user" + fi + + if [ "${{ inputs.storage_backend }}" != "seaweedfs" ] && [ -n "${{ inputs.storage_backend }}" ]; then + echo "Deploying with artifact storage ${{ inputs.storage_backend }}" + ARGS="${ARGS} --storage ${{ inputs.storage_backend }}" + fi + + if [ -n "${{ inputs.argo_version }}" ]; then + echo "Deploying with argo version ${{ inputs.argo_version }}" + ARGS="${ARGS} --argo-version ${{ inputs.argo_version }}" + fi + echo "ARGS=$ARGS" >> $GITHUB_OUTPUT + + - name: Deploy KFP + id: deploy-kfp + if: ${{ steps.configure.outcome == 'success' }} + uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 + with: + max_attempts: 2 + timeout_minutes: 15 + command: ./.github/resources/scripts/deploy-kfp.sh ${{ steps.configure.outputs.ARGS }} + + - name: Forward API port + id: forward-api-port + shell: bash + if: ${{ steps.deploy-kfp.outcome == 'success' && inputs.forward_port == 'true'}} + run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 diff --git a/.github/actions/kfp-cluster/action.yml b/.github/actions/kfp-cluster/action.yml deleted file mode 100644 index 3055a6823e2..00000000000 --- a/.github/actions/kfp-cluster/action.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: "Set up KFP on KinD" -description: "Step to start and configure KFP on KinD" - -inputs: - k8s_version: - description: "The Kubernetes version to use for the Kind cluster" - required: true - pipeline_store: - description: "Flag to deploy KFP with K8s Native API" - default: 'database' - required: false - proxy: - description: "If KFP should be deployed with proxy configuration" - required: false - default: false - cache_enabled: - description: "If KFP should be deployed with cache enabled globally" - required: false - default: 'true' - -runs: - using: "composite" - steps: - - name: Create k8s Kind Cluster - uses: container-tools/kind-action@v2 - with: - cluster_name: kfp - kubectl_version: ${{ inputs.k8s_version }} - version: v0.25.0 - node_image: kindest/node:${{ inputs.k8s_version }} - - - name: Deploy Squid - id: deploy-squid - if: ${{ inputs.proxy == 'true' }} - shell: bash - run: ./.github/resources/squid/deploy-squid.sh - - - name: Build images - shell: bash - run: | - if [ "${{ inputs.proxy }}" = "true" ]; then - ./.github/resources/scripts/build-images.sh --proxy - else - ./.github/resources/scripts/build-images.sh - fi - - - name: Deploy KFP - shell: bash - run: | - ARGS="" - - if [ "${{ inputs.proxy }}" = "true" ]; then - ARGS="${ARGS} --proxy" - elif [ "${{inputs.cache_enabled }}" = "false" ]; then - ARGS="${ARGS} --cache-disabled" - elif [ "${{inputs.pipeline_store }}" = "kubernetes" ]; then - ARGS="${ARGS} --deploy-k8s-native" - fi - - ./.github/resources/scripts/deploy-kfp.sh $ARGS diff --git a/.github/actions/kfp-k8s/action.yml b/.github/actions/kfp-k8s/action.yml new file mode 100644 index 00000000000..5b830b10ce1 --- /dev/null +++ b/.github/actions/kfp-k8s/action.yml @@ -0,0 +1,47 @@ +name: "Install kfp & kfp-kubernetes" +inputs: + build_version: + required: true + default: "1.2.2" + description: "build package version" + generate_golang_proto: + required: true + default: "false" + description: "optionally generate golang proto files" +runs: + using: "composite" + steps: + - name: Install build tool + shell: bash + run: pip install build==${{inputs.build_version}} + + - name: Build kfp dist + id: install-kfp + shell: bash + working-directory: sdk/python + run: | + python -m build . + + - name: Generate kfp-kubernetes python proto files from source + id: generate-kfp-kubernetes-proto-files + shell: bash + if: ${{ steps.install-kfp.outcome == 'success' }} + working-directory: ./kubernetes_platform + run: make clean python USE_FIND_LINKS=true + + - name: Generate kfp-kubernetes golang proto files from source + id: generate-kfp-kubernetes-go-proto-files + shell: bash + if: ${{ steps.install-kfp.outcome == 'success' && inputs.generate_golang_proto == 'true' }} + working-directory: ./kubernetes_platform + run: make golang + + # kfp is installed transitively + # --find-links ensures pip first looks in the sdk/python/dist folder + # outputted from generate-kfp-kubernetes-proto-files step before looking at pypi + - name: Install kfp & kfp-kubernetes from source + id: install-kfp-kubernetes + shell: bash + if: ${{ steps.generate-kfp-kubernetes-proto-files.outcome == 'success' }} + run: | + pip install -e ./kubernetes_platform/python[dev] --find-links=sdk/python/dist diff --git a/.github/actions/protobuf/action.yml b/.github/actions/protobuf/action.yml new file mode 100644 index 00000000000..0dfee9c5bf8 --- /dev/null +++ b/.github/actions/protobuf/action.yml @@ -0,0 +1,81 @@ +name: "Install Proto dependencies & Pipeline Spec" +description: | + This action pins various Proto generation packages to default versions and + installs these dependencies in the workflow environment. It will also + install the kfp-pipeline-spec. Whenever KFP project updates generation + packages, the defaults here must be updated. +inputs: + protoc_version: + required: true + default: "31.1" + description: "protoc version" + protobuf_python_version: + required: true + default: "6.31.1" + description: "protobuf python package version" + setuptools_version: + required: true + default: "80.9.0" + description: "setuptools python package version" + wheels_version: + required: true + default: "0.42.0" + description: "wheels python package version" + generate_golang_proto: + required: true + default: "false" + description: "optionally generate golang proto files" +runs: + using: "composite" + steps: + - name: Install protoc + shell: bash + run: | + PROTOC_ZIP=protoc-${{inputs.protoc_version}}-linux-x86_64.zip + curl -sSL -O https://github.com/protocolbuffers/protobuf/releases/download/v${{inputs.protoc_version}}/$PROTOC_ZIP + sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc + sudo unzip -o $PROTOC_ZIP -d /usr/local 'include/*' + rm $PROTOC_ZIP + protoc --version + + - name: Install setuptools + shell: bash + run: | + pip3 install setuptools==${{inputs.setuptools_version}} + pip3 freeze + + - name: Install Wheel + shell: bash + run: pip3 install wheel==${{inputs.wheels_version}} + - name: Install protobuf + shell: bash + run: pip3 install protobuf==${{inputs.protobuf_python_version}} + - name: Generate API proto files + working-directory: ./api + shell: bash + run: make clean python + - name: Install kfp-pipeline-spec from source + shell: bash + run: | + pip install api/v2alpha1/python/dist/*.whl + + - name: Generate kfp-pipeline-spec golang files + if: ${{ inputs.generate_golang_proto == 'true' }} + working-directory: ./api + shell: bash + run: | + make golang + + - name: Summary + shell: bash + run: | + cat <> $GITHUB_OUTPUT + + - name: Upload HTML Report + id: upload + uses: actions/upload-artifact@v4 + if: (!cancelled()) + with: + name: ${{ steps.name_gen.outputs.REPORT_NAME }} + path: ${{ inputs.test_directory }}/reports/test-report.html + retention-days: 30 + continue-on-error: true + + - name: Mark Workflow failure if test step failed + if: steps.run-tests.outcome != 'success' && !cancelled() + shell: bash + run: exit 1 + + - name: Mark Workflow failure if test reporting failed + if: (steps.publish.outcome == 'failure' || steps.upload.outcome != 'success') && !cancelled() + shell: bash + run: exit 1 diff --git a/.github/resources/manifests/argo/overlays/cache-disabled/cache-env.yaml b/.github/resources/manifests/argo/overlays/cache-disabled/cache-env.yaml deleted file mode 100644 index fca8be4e28c..00000000000 --- a/.github/resources/manifests/argo/overlays/cache-disabled/cache-env.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ml-pipeline -spec: - template: - spec: - containers: - - name: ml-pipeline-api-server - env: - - name: CACHEENABLED - value: "false" diff --git a/.github/resources/manifests/argo/overlays/cache-disabled/kustomization.yaml b/.github/resources/manifests/argo/overlays/cache-disabled/kustomization.yaml deleted file mode 100644 index 8aab002dca5..00000000000 --- a/.github/resources/manifests/argo/overlays/cache-disabled/kustomization.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization - -resources: - - ../no-proxy - -patches: - - path: cache-env.yaml - target: - kind: Deployment - name: ml-pipeline diff --git a/.github/resources/manifests/argo/overlays/kubernetes-native/apiserver-env.yaml b/.github/resources/manifests/argo/overlays/kubernetes-native/apiserver-env.yaml deleted file mode 100644 index 1aefd4aa43d..00000000000 --- a/.github/resources/manifests/argo/overlays/kubernetes-native/apiserver-env.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ml-pipeline -spec: - template: - spec: - containers: - - name: ml-pipeline-api-server - env: - - name: V2_DRIVER_IMAGE - value: kind-registry:5000/driver - - name: V2_LAUNCHER_IMAGE - value: kind-registry:5000/launcher diff --git a/.github/resources/manifests/argo/overlays/kubernetes-native/kustomization.yaml b/.github/resources/manifests/argo/overlays/kubernetes-native/kustomization.yaml deleted file mode 100644 index f429a11a21c..00000000000 --- a/.github/resources/manifests/argo/overlays/kubernetes-native/kustomization.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization - -resources: -- ../../../../../../manifests/kustomize/env/cert-manager/platform-agnostic-k8s-native - -images: -- name: ghcr.io/kubeflow/kfp-api-server - newName: kind-registry:5000/apiserver - newTag: latest -- name: ghcr.io/kubeflow/kfp-persistence-agent - newName: kind-registry:5000/persistenceagent - newTag: latest -- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller - newName: kind-registry:5000/scheduledworkflow - newTag: latest - -patchesStrategicMerge: -- apiserver-env.yaml diff --git a/.github/resources/manifests/argo/overlays/no-proxy/kustomization.yaml b/.github/resources/manifests/argo/overlays/no-proxy/kustomization.yaml deleted file mode 100644 index 10189e25bb2..00000000000 --- a/.github/resources/manifests/argo/overlays/no-proxy/kustomization.yaml +++ /dev/null @@ -1,20 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization - -resources: -- ../../../../../../manifests/kustomize/env/platform-agnostic - -images: -- name: ghcr.io/kubeflow/kfp-api-server - newName: kind-registry:5000/apiserver - newTag: latest -- name: ghcr.io/kubeflow/kfp-persistence-agent - newName: kind-registry:5000/persistenceagent - newTag: latest -- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller - newName: kind-registry:5000/scheduledworkflow - newTag: latest - -patches: -- path: apiserver-env.yaml -- path: workflow-disable-logs-patch.yaml diff --git a/.github/resources/manifests/argo/overlays/no-proxy/workflow-disable-logs-patch.yaml b/.github/resources/manifests/argo/overlays/no-proxy/workflow-disable-logs-patch.yaml deleted file mode 100644 index 623bbe9621f..00000000000 --- a/.github/resources/manifests/argo/overlays/no-proxy/workflow-disable-logs-patch.yaml +++ /dev/null @@ -1,20 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: workflow-controller-configmap -data: - artifactRepository: | - archiveLogs: false - s3: - endpoint: "minio-service.$(kfp-namespace):9000" - bucket: "$(kfp-artifact-bucket-name)" - keyFormat: "artifacts/{{workflow.name}}/{{workflow.creationTimestamp.Y}}/{{workflow.creationTimestamp.m}}/{{workflow.creationTimestamp.d}}/{{pod.name}}" - insecure: true - accessKeySecret: - name: mlpipeline-minio-artifact - key: accesskey - secretKeySecret: - name: mlpipeline-minio-artifact - key: secretkey - executor: | - imagePullPolicy: IfNotPresent diff --git a/.github/resources/manifests/argo/overlays/proxy/kustomization.yaml b/.github/resources/manifests/argo/overlays/proxy/kustomization.yaml deleted file mode 100644 index 155489f4063..00000000000 --- a/.github/resources/manifests/argo/overlays/proxy/kustomization.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization - -resources: - - ../no-proxy - -patches: - - path: proxy-env.yaml - target: - kind: Deployment - name: ml-pipeline diff --git a/.github/resources/manifests/argo/overlays/proxy/proxy-env.yaml b/.github/resources/manifests/argo/overlays/proxy/proxy-env.yaml deleted file mode 100644 index 70e03d05e27..00000000000 --- a/.github/resources/manifests/argo/overlays/proxy/proxy-env.yaml +++ /dev/null @@ -1,16 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ml-pipeline -spec: - template: - spec: - containers: - - name: ml-pipeline-api-server - env: - - name: HTTP_PROXY - value: "http://squid.squid.svc.cluster.local:3128" - - name: HTTPS_PROXY - value: "http://squid.squid.svc.cluster.local:3128" - - name: NO_PROXY - value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,metadata-grpc-service,0,1,2,3,4,5,6,7,8,9" diff --git a/.github/resources/manifests/kubernetes-native/base/kustomization.yaml b/.github/resources/manifests/kubernetes-native/base/kustomization.yaml new file mode 100644 index 00000000000..1dd86fa980a --- /dev/null +++ b/.github/resources/manifests/kubernetes-native/base/kustomization.yaml @@ -0,0 +1,19 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../../../../../manifests/kustomize/env/cert-manager/platform-agnostic-k8s-native + +images: +- name: ghcr.io/kubeflow/kfp-api-server + newName: kind-registry:5000/apiserver + newTag: latest +- name: ghcr.io/kubeflow/kfp-persistence-agent + newName: kind-registry:5000/persistenceagent + newTag: latest +- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller + newName: kind-registry:5000/scheduledworkflow + newTag: latest +- name: ghcr.io/kubeflow/kfp-frontend + newName: kind-registry:5000/frontend + newTag: latest diff --git a/.github/resources/manifests/kubernetes-native/cache-disabled/cache-env.yaml b/.github/resources/manifests/kubernetes-native/cache-disabled/cache-env.yaml new file mode 100644 index 00000000000..908eb3d8158 --- /dev/null +++ b/.github/resources/manifests/kubernetes-native/cache-disabled/cache-env.yaml @@ -0,0 +1,18 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: CACHEENABLED + value: "false" + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver:latest + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher:latest + - name: LOG_LEVEL + value: "debug" diff --git a/.github/resources/manifests/kubernetes-native/cache-disabled/kustomization.yaml b/.github/resources/manifests/kubernetes-native/cache-disabled/kustomization.yaml new file mode 100644 index 00000000000..5084fe2f7d4 --- /dev/null +++ b/.github/resources/manifests/kubernetes-native/cache-disabled/kustomization.yaml @@ -0,0 +1,11 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + - ../base + +patches: + - path: cache-env.yaml + target: + kind: Deployment + name: ml-pipeline diff --git a/.github/resources/manifests/kubernetes-native/default/apiserver-env.yaml b/.github/resources/manifests/kubernetes-native/default/apiserver-env.yaml new file mode 100644 index 00000000000..0e711f76eea --- /dev/null +++ b/.github/resources/manifests/kubernetes-native/default/apiserver-env.yaml @@ -0,0 +1,16 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver:latest + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher:latest + - name: LOG_LEVEL + value: "debug" diff --git a/.github/resources/manifests/kubernetes-native/default/kustomization.yaml b/.github/resources/manifests/kubernetes-native/default/kustomization.yaml new file mode 100644 index 00000000000..752d30170c6 --- /dev/null +++ b/.github/resources/manifests/kubernetes-native/default/kustomization.yaml @@ -0,0 +1,8 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../base + +patches: + - path: apiserver-env.yaml diff --git a/.github/resources/manifests/multiuser/base/kustomization.yaml b/.github/resources/manifests/multiuser/base/kustomization.yaml new file mode 100644 index 00000000000..7aac0b33868 --- /dev/null +++ b/.github/resources/manifests/multiuser/base/kustomization.yaml @@ -0,0 +1,19 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../../../../../manifests/kustomize/env/platform-agnostic-multi-user + +images: +- name: ghcr.io/kubeflow/kfp-api-server + newName: kind-registry:5000/apiserver + newTag: latest +- name: ghcr.io/kubeflow/kfp-persistence-agent + newName: kind-registry:5000/persistenceagent + newTag: latest +- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller + newName: kind-registry:5000/scheduledworkflow + newTag: latest +- name: ghcr.io/kubeflow/kfp-frontend + newName: kind-registry:5000/frontend + newTag: latest diff --git a/.github/resources/manifests/argo/overlays/no-proxy/apiserver-env.yaml b/.github/resources/manifests/multiuser/cache-disabled-minio/apiserver-env.yaml similarity index 100% rename from .github/resources/manifests/argo/overlays/no-proxy/apiserver-env.yaml rename to .github/resources/manifests/multiuser/cache-disabled-minio/apiserver-env.yaml diff --git a/.github/resources/manifests/multiuser/cache-disabled-minio/kustomization.yaml b/.github/resources/manifests/multiuser/cache-disabled-minio/kustomization.yaml new file mode 100644 index 00000000000..674d0b71802 --- /dev/null +++ b/.github/resources/manifests/multiuser/cache-disabled-minio/kustomization.yaml @@ -0,0 +1,12 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + - ../minio + +patches: + - path: ../cache-disabled/cache-env.yaml + target: + kind: Deployment + name: ml-pipeline + - path: apiserver-env.yaml diff --git a/.github/resources/manifests/multiuser/cache-disabled/cache-env.yaml b/.github/resources/manifests/multiuser/cache-disabled/cache-env.yaml new file mode 100644 index 00000000000..b413428ace9 --- /dev/null +++ b/.github/resources/manifests/multiuser/cache-disabled/cache-env.yaml @@ -0,0 +1,18 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: CACHEENABLED + value: "false" + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher + - name: LOG_LEVEL + value: "debug" diff --git a/.github/resources/manifests/multiuser/cache-disabled/kustomization.yaml b/.github/resources/manifests/multiuser/cache-disabled/kustomization.yaml new file mode 100644 index 00000000000..8d08abfca18 --- /dev/null +++ b/.github/resources/manifests/multiuser/cache-disabled/kustomization.yaml @@ -0,0 +1,22 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + - ../base + +images: + - name: ghcr.io/kubeflow/kfp-api-server + newName: kind-registry:5000/apiserver + newTag: latest + - name: ghcr.io/kubeflow/kfp-persistence-agent + newName: kind-registry:5000/persistenceagent + newTag: latest + - name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller + newName: kind-registry:5000/scheduledworkflow + newTag: latest + +patches: + - path: cache-env.yaml + target: + kind: Deployment + name: ml-pipeline diff --git a/.github/resources/manifests/multiuser/default/apiserver-env.yaml b/.github/resources/manifests/multiuser/default/apiserver-env.yaml new file mode 100644 index 00000000000..6cb74d096f5 --- /dev/null +++ b/.github/resources/manifests/multiuser/default/apiserver-env.yaml @@ -0,0 +1,16 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher + - name: LOG_LEVEL + value: "debug" diff --git a/.github/resources/manifests/multiuser/default/kustomization.yaml b/.github/resources/manifests/multiuser/default/kustomization.yaml new file mode 100644 index 00000000000..752d30170c6 --- /dev/null +++ b/.github/resources/manifests/multiuser/default/kustomization.yaml @@ -0,0 +1,8 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../base + +patches: + - path: apiserver-env.yaml diff --git a/.github/resources/manifests/multiuser/minio/apiserver-env.yaml b/.github/resources/manifests/multiuser/minio/apiserver-env.yaml new file mode 100644 index 00000000000..6cb74d096f5 --- /dev/null +++ b/.github/resources/manifests/multiuser/minio/apiserver-env.yaml @@ -0,0 +1,16 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher + - name: LOG_LEVEL + value: "debug" diff --git a/.github/resources/manifests/multiuser/minio/kustomization.yaml b/.github/resources/manifests/multiuser/minio/kustomization.yaml new file mode 100644 index 00000000000..ae6bccc9978 --- /dev/null +++ b/.github/resources/manifests/multiuser/minio/kustomization.yaml @@ -0,0 +1,22 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../../../../../manifests/kustomize/env/platform-agnostic-multi-user-minio + +images: +- name: ghcr.io/kubeflow/kfp-api-server + newName: kind-registry:5000/apiserver + newTag: latest +- name: ghcr.io/kubeflow/kfp-persistence-agent + newName: kind-registry:5000/persistenceagent + newTag: latest +- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller + newName: kind-registry:5000/scheduledworkflow + newTag: latest +- name: ghcr.io/kubeflow/kfp-frontend + newName: kind-registry:5000/frontend + newTag: latest + +patches: +- path: apiserver-env.yaml diff --git a/.github/resources/manifests/standalone/base/apiserver-env.yaml b/.github/resources/manifests/standalone/base/apiserver-env.yaml new file mode 100644 index 00000000000..0e711f76eea --- /dev/null +++ b/.github/resources/manifests/standalone/base/apiserver-env.yaml @@ -0,0 +1,16 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver:latest + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher:latest + - name: LOG_LEVEL + value: "debug" diff --git a/.github/resources/manifests/standalone/base/kustomization.yaml b/.github/resources/manifests/standalone/base/kustomization.yaml new file mode 100644 index 00000000000..c5df6ede3f3 --- /dev/null +++ b/.github/resources/manifests/standalone/base/kustomization.yaml @@ -0,0 +1,22 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../../../../../manifests/kustomize/env/platform-agnostic + +images: +- name: ghcr.io/kubeflow/kfp-api-server + newName: kind-registry:5000/apiserver + newTag: latest +- name: ghcr.io/kubeflow/kfp-persistence-agent + newName: kind-registry:5000/persistenceagent + newTag: latest +- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller + newName: kind-registry:5000/scheduledworkflow + newTag: latest +- name: ghcr.io/kubeflow/kfp-frontend + newName: kind-registry:5000/frontend + newTag: latest + +patches: + - path: apiserver-env.yaml diff --git a/.github/resources/manifests/standalone/cache-disabled-minio/cache-env.yaml b/.github/resources/manifests/standalone/cache-disabled-minio/cache-env.yaml new file mode 100644 index 00000000000..908eb3d8158 --- /dev/null +++ b/.github/resources/manifests/standalone/cache-disabled-minio/cache-env.yaml @@ -0,0 +1,18 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: CACHEENABLED + value: "false" + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver:latest + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher:latest + - name: LOG_LEVEL + value: "debug" diff --git a/.github/resources/manifests/standalone/cache-disabled-minio/kustomization.yaml b/.github/resources/manifests/standalone/cache-disabled-minio/kustomization.yaml new file mode 100644 index 00000000000..a170d6149ac --- /dev/null +++ b/.github/resources/manifests/standalone/cache-disabled-minio/kustomization.yaml @@ -0,0 +1,22 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + - ../minio + +images: + - name: ghcr.io/kubeflow/kfp-api-server + newName: kind-registry:5000/apiserver + newTag: latest + - name: ghcr.io/kubeflow/kfp-persistence-agent + newName: kind-registry:5000/persistenceagent + newTag: latest + - name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller + newName: kind-registry:5000/scheduledworkflow + newTag: latest + +patches: + - path: cache-env.yaml + target: + kind: Deployment + name: ml-pipeline diff --git a/.github/resources/manifests/standalone/cache-disabled-proxy-minio/apiserver-env.yaml b/.github/resources/manifests/standalone/cache-disabled-proxy-minio/apiserver-env.yaml new file mode 100644 index 00000000000..b2ecdfe113a --- /dev/null +++ b/.github/resources/manifests/standalone/cache-disabled-proxy-minio/apiserver-env.yaml @@ -0,0 +1,26 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher + - name: LOG_LEVEL + value: "debug" + - name: CACHEENABLED + value: "false" + - name: HTTP_PROXY + value: "http://squid.squid.svc.cluster.local:3128" + - name: HTTPS_PROXY + value: "http://squid.squid.svc.cluster.local:3128" + - name: NO_PROXY + value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,metadata-grpc-service,metadata-grpc-service.kubeflow,ml-pipeline.kubeflow" + - name: OBJECTSTORECONFIG_HOST + value: "minio-service.kubeflow.svc.cluster.local" diff --git a/.github/resources/manifests/standalone/cache-disabled-proxy-minio/kustomization.yaml b/.github/resources/manifests/standalone/cache-disabled-proxy-minio/kustomization.yaml new file mode 100644 index 00000000000..b35b67e9498 --- /dev/null +++ b/.github/resources/manifests/standalone/cache-disabled-proxy-minio/kustomization.yaml @@ -0,0 +1,11 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + - ../minio + +patches: + - path: apiserver-env.yaml + target: + kind: Deployment + name: ml-pipeline diff --git a/.github/resources/manifests/standalone/cache-disabled-proxy/apiserver-env.yaml b/.github/resources/manifests/standalone/cache-disabled-proxy/apiserver-env.yaml new file mode 100644 index 00000000000..aa4084a0221 --- /dev/null +++ b/.github/resources/manifests/standalone/cache-disabled-proxy/apiserver-env.yaml @@ -0,0 +1,26 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: CACHEENABLED + value: "false" + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver:latest + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher:latest + - name: LOG_LEVEL + value: "debug" + - name: HTTP_PROXY + value: "http://squid.squid.svc.cluster.local:3128" + - name: HTTPS_PROXY + value: "http://squid.squid.svc.cluster.local:3128" + - name: NO_PROXY + value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,metadata-grpc-service,metadata-grpc-service.kubeflow,ml-pipeline.kubeflow" + - name: OBJECTSTORECONFIG_HOST + value: "minio-service.kubeflow.svc.cluster.local" diff --git a/.github/resources/manifests/standalone/cache-disabled-proxy/kustomization.yaml b/.github/resources/manifests/standalone/cache-disabled-proxy/kustomization.yaml new file mode 100644 index 00000000000..3e41d8ee530 --- /dev/null +++ b/.github/resources/manifests/standalone/cache-disabled-proxy/kustomization.yaml @@ -0,0 +1,11 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + - ../cache-disabled + +patches: + - path: apiserver-env.yaml + target: + kind: Deployment + name: ml-pipeline diff --git a/.github/resources/manifests/standalone/cache-disabled/cache-env.yaml b/.github/resources/manifests/standalone/cache-disabled/cache-env.yaml new file mode 100644 index 00000000000..908eb3d8158 --- /dev/null +++ b/.github/resources/manifests/standalone/cache-disabled/cache-env.yaml @@ -0,0 +1,18 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: CACHEENABLED + value: "false" + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver:latest + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher:latest + - name: LOG_LEVEL + value: "debug" diff --git a/.github/resources/manifests/standalone/cache-disabled/kustomization.yaml b/.github/resources/manifests/standalone/cache-disabled/kustomization.yaml new file mode 100644 index 00000000000..5084fe2f7d4 --- /dev/null +++ b/.github/resources/manifests/standalone/cache-disabled/kustomization.yaml @@ -0,0 +1,11 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + - ../base + +patches: + - path: cache-env.yaml + target: + kind: Deployment + name: ml-pipeline diff --git a/.github/resources/manifests/standalone/default/apiserver-env.yaml b/.github/resources/manifests/standalone/default/apiserver-env.yaml new file mode 100644 index 00000000000..0e711f76eea --- /dev/null +++ b/.github/resources/manifests/standalone/default/apiserver-env.yaml @@ -0,0 +1,16 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver:latest + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher:latest + - name: LOG_LEVEL + value: "debug" diff --git a/.github/resources/manifests/standalone/default/kustomization.yaml b/.github/resources/manifests/standalone/default/kustomization.yaml new file mode 100644 index 00000000000..752d30170c6 --- /dev/null +++ b/.github/resources/manifests/standalone/default/kustomization.yaml @@ -0,0 +1,8 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../base + +patches: + - path: apiserver-env.yaml diff --git a/.github/resources/manifests/standalone/minio/apiserver-env.yaml b/.github/resources/manifests/standalone/minio/apiserver-env.yaml new file mode 100644 index 00000000000..6cb74d096f5 --- /dev/null +++ b/.github/resources/manifests/standalone/minio/apiserver-env.yaml @@ -0,0 +1,16 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher + - name: LOG_LEVEL + value: "debug" diff --git a/.github/resources/manifests/standalone/minio/kustomization.yaml b/.github/resources/manifests/standalone/minio/kustomization.yaml new file mode 100644 index 00000000000..1513dc3f726 --- /dev/null +++ b/.github/resources/manifests/standalone/minio/kustomization.yaml @@ -0,0 +1,8 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../base + +patches: +- path: apiserver-env.yaml diff --git a/.github/resources/manifests/standalone/proxy-minio/apiserver-env.yaml b/.github/resources/manifests/standalone/proxy-minio/apiserver-env.yaml new file mode 100644 index 00000000000..3591a91cf33 --- /dev/null +++ b/.github/resources/manifests/standalone/proxy-minio/apiserver-env.yaml @@ -0,0 +1,24 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher + - name: LOG_LEVEL + value: "debug" + - name: HTTP_PROXY + value: "http://squid.squid.svc.cluster.local:3128" + - name: HTTPS_PROXY + value: "http://squid.squid.svc.cluster.local:3128" + - name: NO_PROXY + value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,metadata-grpc-service,metadata-grpc-service.kubeflow,ml-pipeline.kubeflow" + - name: OBJECTSTORECONFIG_HOST + value: "minio-service.kubeflow.svc.cluster.local" diff --git a/.github/resources/manifests/standalone/proxy-minio/kustomization.yaml b/.github/resources/manifests/standalone/proxy-minio/kustomization.yaml new file mode 100644 index 00000000000..00b2c00f8ac --- /dev/null +++ b/.github/resources/manifests/standalone/proxy-minio/kustomization.yaml @@ -0,0 +1,22 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + - ../minio + +images: + - name: ghcr.io/kubeflow/kfp-api-server + newName: kind-registry:5000/apiserver + newTag: latest + - name: ghcr.io/kubeflow/kfp-persistence-agent + newName: kind-registry:5000/persistenceagent + newTag: latest + - name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller + newName: kind-registry:5000/scheduledworkflow + newTag: latest + +patches: + - path: apiserver-env.yaml + target: + kind: Deployment + name: ml-pipeline diff --git a/.github/resources/manifests/standalone/proxy/apiserver-env.yaml b/.github/resources/manifests/standalone/proxy/apiserver-env.yaml new file mode 100644 index 00000000000..890abb0e340 --- /dev/null +++ b/.github/resources/manifests/standalone/proxy/apiserver-env.yaml @@ -0,0 +1,24 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline +spec: + template: + spec: + containers: + - name: ml-pipeline-api-server + env: + - name: V2_DRIVER_IMAGE + value: kind-registry:5000/driver:latest + - name: V2_LAUNCHER_IMAGE + value: kind-registry:5000/launcher:latest + - name: LOG_LEVEL + value: "debug" + - name: HTTP_PROXY + value: "http://squid.squid.svc.cluster.local:3128" + - name: HTTPS_PROXY + value: "http://squid.squid.svc.cluster.local:3128" + - name: NO_PROXY + value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,metadata-grpc-service,metadata-grpc-service.kubeflow,ml-pipeline.kubeflow" + - name: OBJECTSTORECONFIG_HOST + value: "minio-service.kubeflow.svc.cluster.local" diff --git a/.github/resources/manifests/standalone/proxy/kustomization.yaml b/.github/resources/manifests/standalone/proxy/kustomization.yaml new file mode 100644 index 00000000000..b7674afb99f --- /dev/null +++ b/.github/resources/manifests/standalone/proxy/kustomization.yaml @@ -0,0 +1,19 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../../../../../manifests/kustomize/env/platform-agnostic + +images: + - name: ghcr.io/kubeflow/kfp-api-server + newName: kind-registry:5000/apiserver + newTag: latest + - name: ghcr.io/kubeflow/kfp-persistence-agent + newName: kind-registry:5000/persistenceagent + newTag: latest + - name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller + newName: kind-registry:5000/scheduledworkflow + newTag: latest + +patches: +- path: apiserver-env.yaml diff --git a/.github/resources/scripts/build-images.sh b/.github/resources/scripts/build-images.sh deleted file mode 100755 index 7cb06b3a037..00000000000 --- a/.github/resources/scripts/build-images.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash -# -# Copyright 2023 kubeflow.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# source: https://raw.githubusercontent.com/open-toolchain/commons/master/scripts/check_registry.sh - -# Remove the x if you need no print out of each command -set -e - -REGISTRY="${REGISTRY:-kind-registry:5000}" -echo "REGISTRY=$REGISTRY" -TAG="${TAG:-latest}" -EXIT_CODE=0 - -docker system prune -a -f - -docker build --progress=plain -t "${REGISTRY}/apiserver:${TAG}" -f backend/Dockerfile . && docker push "${REGISTRY}/apiserver:${TAG}" || EXIT_CODE=$? -if [[ $EXIT_CODE -ne 0 ]] -then - echo "Failed to build apiserver image." - exit $EXIT_CODE -fi - -docker build --progress=plain -t "${REGISTRY}/persistenceagent:${TAG}" -f backend/Dockerfile.persistenceagent . && docker push "${REGISTRY}/persistenceagent:${TAG}" || EXIT_CODE=$? -if [[ $EXIT_CODE -ne 0 ]] -then - echo "Failed to build persistenceagent image." - exit $EXIT_CODE -fi - -docker build --progress=plain -t "${REGISTRY}/scheduledworkflow:${TAG}" -f backend/Dockerfile.scheduledworkflow . && docker push "${REGISTRY}/scheduledworkflow:${TAG}" || EXIT_CODE=$? -if [[ $EXIT_CODE -ne 0 ]] -then - echo "Failed to build scheduledworkflow image." - exit $EXIT_CODE -fi - -docker build --progress=plain -t "${REGISTRY}/driver:${TAG}" -f backend/Dockerfile.driver . && docker push "${REGISTRY}/driver:${TAG}" || EXIT_CODE=$? -if [[ $EXIT_CODE -ne 0 ]] -then - echo "Failed to build driver image." - exit $EXIT_CODE -fi - -docker build --progress=plain -t "${REGISTRY}/launcher:${TAG}" -f backend/Dockerfile.launcher . && docker push "${REGISTRY}/launcher:${TAG}" || EXIT_CODE=$? -if [[ $EXIT_CODE -ne 0 ]] -then - echo "Failed to build launcher image." - exit $EXIT_CODE -fi - -# clean up intermittent build caches to free up disk space -docker system prune -a -f diff --git a/.github/resources/scripts/deploy-kfp.sh b/.github/resources/scripts/deploy-kfp.sh index e5ac85d35a1..5f83d289651 100755 --- a/.github/resources/scripts/deploy-kfp.sh +++ b/.github/resources/scripts/deploy-kfp.sh @@ -24,10 +24,13 @@ C_DIR="${BASH_SOURCE%/*}" if [[ ! -d "$C_DIR" ]]; then C_DIR="$PWD"; fi source "${C_DIR}/helper-functions.sh" -TEST_MANIFESTS=".github/resources/manifests/argo" +TEST_MANIFESTS=".github/resources/manifests" PIPELINES_STORE="database" USE_PROXY=false CACHE_DISABLED=false +MULTI_USER=false +STORAGE_BACKEND="seaweedfs" +AWF_VERSION="" # Loop over script arguments passed. This uses a single switch-case # block with default value in case we want to make alternative deployments @@ -46,18 +49,53 @@ while [ "$#" -gt 0 ]; do CACHE_DISABLED=true shift ;; + --multi-user) + MULTI_USER=true + shift + ;; + --storage) + STORAGE_BACKEND="$2" + shift 2 + ;; + --argo-version) + shift + if [[ -n "$1" ]]; then + AWF_VERSION="$1" + shift + else + echo "ERROR: --argo-version requires an argument" + exit 1 + fi + ;; esac done -if [ "${USE_PROXY}" == "true" && "${PIPELINES_STORE}" == "kubernetes" ]; then +if [ "${USE_PROXY}" == "true" ] && [ "${PIPELINES_STORE}" == "kubernetes" ]; then echo "ERROR: Kubernetes Pipeline store cannot be deployed with proxy support." exit 1 fi -kubectl apply -k "manifests/kustomize/cluster-scoped-resources/" +if [ "${MULTI_USER}" == "true" ] && [ "${USE_PROXY}" == "true" ]; then + echo "ERROR: Multi-user mode cannot be deployed with proxy support." + exit 1 +fi + +if [ "${STORAGE_BACKEND}" != "minio" ] && [ "${STORAGE_BACKEND}" != "seaweedfs" ]; then + echo "ERROR: Storage backend must be either 'minio' or 'seaweedfs'." + exit 1 +fi + +if [ -n "${AWF_VERSION}" ]; then + echo "NOTE: Argo version ${AWF_VERSION} specified, updating Argo Workflow manifests..." + echo "${AWF_VERSION}" > third_party/argo/VERSION + make -C ./manifests/kustomize/third-party/argo update + echo "Manifests updated for Argo version ${AWF_VERSION}." +fi + +kubectl apply -k "manifests/kustomize/cluster-scoped-resources/" || EXIT_CODE=$? + kubectl wait crd/applications.app.k8s.io --for condition=established --timeout=60s || EXIT_CODE=$? -if [[ $EXIT_CODE -ne 0 ]] -then +if [[ $EXIT_CODE -ne 0 ]]; then echo "Failed to deploy cluster-scoped resources." exit $EXIT_CODE fi @@ -73,15 +111,72 @@ if [ "${PIPELINES_STORE}" == "kubernetes" ]; then fi fi + +# Deploy multi-user prerequisites if multi-user mode is enabled +if [ "${MULTI_USER}" == "true" ]; then + echo "Installing Istio..." + kubectl apply -k https://github.com/kubeflow/manifests/common/istio/istio-crds/base?ref=master + kubectl apply -k https://github.com/kubeflow/manifests/common/istio/istio-namespace/base?ref=master + kubectl apply -k https://github.com/kubeflow/manifests/common/istio/istio-install/base?ref=master + echo "Waiting for all Istio Pods to become ready..." + kubectl wait --for=condition=Ready pods --all -n istio-system --timeout=300s + + echo "Deploying Metacontroller CRD..." + kubectl apply -f manifests/kustomize/third-party/metacontroller/base/crd.yaml + kubectl wait --for condition=established --timeout=30s crd/compositecontrollers.metacontroller.k8s.io + + echo "Installing Profile Controller Resources..." + kubectl apply -k https://github.com/kubeflow/manifests/applications/profiles/upstream/overlays/kubeflow?ref=master + kubectl -n kubeflow wait --for=condition=Ready pods -l kustomize.component=profiles --timeout 180s + + echo "Creating KF Profile..." + kubectl apply -f test_data/kubernetes/seaweedfs/test-profiles.yaml + + echo "Applying kubeflow-edit ClusterRole with proper aggregation..." + kubectl apply -f test_data/kubernetes/seaweedfs/kubeflow-edit-clusterrole.yaml + + echo "Applying network policy to allow user namespace access to kubeflow services..." + kubectl apply -f test_data/kubernetes/seaweedfs/allow-user-namespace-access.yaml +fi + # Manifests will be deployed according to the flag provided -if $CACHE_DISABLED; then - TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/cache-disabled" -elif $USE_PROXY; then - TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/proxy" -elif [ "${PIPELINES_STORE}" == "kubernetes" ]; then - TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/kubernetes-native" -else - TEST_MANIFESTS="${TEST_MANIFESTS}/overlays/no-proxy" +if [ "${MULTI_USER}" == "false" ] && [ "${PIPELINES_STORE}" != "kubernetes" ]; then + TEST_MANIFESTS="${TEST_MANIFESTS}/standalone" + if $CACHE_DISABLED; then + TEST_MANIFESTS="${TEST_MANIFESTS}/cache-disabled" + elif $USE_PROXY; then + TEST_MANIFESTS="${TEST_MANIFESTS}/proxy" + elif [ "${STORAGE_BACKEND}" == "minio" ]; then + TEST_MANIFESTS="${TEST_MANIFESTS}/minio" + elif $CACHE_DISABLED && $USE_PROXY; then + TEST_MANIFESTS="${TEST_MANIFESTS}/cache-disabled-proxy" + elif $CACHE_DISABLED && [ "${STORAGE_BACKEND}" == "minio" ]; then + TEST_MANIFESTS="${TEST_MANIFESTS}/cache-disabled-minio" + elif $USE_PROXY && [ "${STORAGE_BACKEND}" == "minio" ]; then + TEST_MANIFESTS="${TEST_MANIFESTS}/proxy-minio" + elif $CACHE_DISABLED && $USE_PROXY && [ "${STORAGE_BACKEND}" == "minio" ]; then + TEST_MANIFESTS="${TEST_MANIFESTS}/cache-disabled-proxy-minio" + else + TEST_MANIFESTS="${TEST_MANIFESTS}/default" + fi +elif [ "${MULTI_USER}" == "false" ] && [ "${PIPELINES_STORE}" == "kubernetes" ]; then + TEST_MANIFESTS="${TEST_MANIFESTS}/kubernetes-native" + if $CACHE_DISABLED; then + TEST_MANIFESTS="${TEST_MANIFESTS}/cache-disabled" + else + TEST_MANIFESTS="${TEST_MANIFESTS}/default" + fi +elif [ "${MULTI_USER}" == "true" ]; then + TEST_MANIFESTS="${TEST_MANIFESTS}/multiuser" + if [ "${STORAGE_BACKEND}" == "minio" ]; then + TEST_MANIFESTS="${TEST_MANIFESTS}/minio" + elif $CACHE_DISABLED; then + TEST_MANIFESTS="${TEST_MANIFESTS}/cache-disabled" + elif $CACHE_DISABLED && [ "${STORAGE_BACKEND}" == "minio" ]; then + TEST_MANIFESTS="${TEST_MANIFESTS}/cache-disabled-minio" + else + TEST_MANIFESTS="${TEST_MANIFESTS}/default" + fi fi echo "Deploying ${TEST_MANIFESTS}..." @@ -101,6 +196,16 @@ then exit 1 fi +# Verify pipeline integration for multi-user mode +if [ "${MULTI_USER}" == "true" ]; then + echo "Verifying Pipeline Integration..." + KF_PROFILE=kubeflow-user-example-com + if ! kubectl get secret mlpipeline-minio-artifact -n $KF_PROFILE > /dev/null 2>&1; then + echo "Error: Secret mlpipeline-minio-artifact not found in namespace $KF_PROFILE" + fi + kubectl get secret mlpipeline-minio-artifact -n "$KF_PROFILE" -o json | jq -r '.data | keys[] as $k | "\($k): \(. | .[$k] | @base64d)"' | tr '\n' ' ' +fi + collect_artifacts kubeflow echo "Finished KFP deployment." diff --git a/.github/resources/scripts/free-disk-space.sh b/.github/resources/scripts/free-disk-space.sh new file mode 100755 index 00000000000..3e149cba1ab --- /dev/null +++ b/.github/resources/scripts/free-disk-space.sh @@ -0,0 +1,49 @@ +#!/bin/bash +set -euo pipefail + +# This script frees up disk space on GitHub Actions runners. +# Several GHA workflows were failing with "no space left on device" errors. +# This script is only meant to run in GitHub Actions CI environment. + +# Safety check: Only run on GitHub Actions +if [[ "${GITHUB_ACTIONS:-false}" != "true" ]]; then + echo "ERROR: This script is for GitHub Actions runners only!" + exit 1 +fi + +echo "=== Initial disk usage ===" +df -h + +echo "=== Freeing up disk space ===" + +# Remove large directories not needed for KFP tests +sudo rm -rf /usr/share/dotnet +sudo rm -rf /opt/ghc +sudo rm -rf /usr/local/share/boost +sudo rm -rf /usr/local/lib/android +sudo rm -rf /usr/local/.ghcup +sudo rm -rf /usr/share/swift + +# Selectively remove large tools from hostedtoolcache while preserving Go, Node, Python +# Remove these specific large tools that aren't needed for KFP tests +sudo rm -rf /opt/hostedtoolcache/CodeQL || true +sudo rm -rf /opt/hostedtoolcache/Java_* || true +sudo rm -rf /opt/hostedtoolcache/Ruby || true +sudo rm -rf /opt/hostedtoolcache/PyPy || true +sudo rm -rf /opt/hostedtoolcache/boost || true + +# Clean package manager +sudo apt-get autoremove -y +sudo apt-get autoclean + +# Clean Docker +docker system prune -af --volumes +docker image prune -af + +# Clean containerd +sudo systemctl stop containerd || true +sudo rm -rf /var/lib/containerd/io.containerd.snapshotter.v1.overlayfs/snapshots/* || true +sudo systemctl start containerd || true + +echo "=== Final disk usage ===" +df -h diff --git a/.github/resources/scripts/helper-functions.sh b/.github/resources/scripts/helper-functions.sh index 7a8797dac9e..941ae5df3f4 100644 --- a/.github/resources/scripts/helper-functions.sh +++ b/.github/resources/scripts/helper-functions.sh @@ -57,7 +57,7 @@ wait_for_namespace () { wait_for_pods () { C_DIR="${BASH_SOURCE%/*}" - pip install -r "${C_DIR}"/../../../sdk/python/requirements.txt + pip install -r "${C_DIR}"/kfp-readiness/requirements.txt python "${C_DIR}"/kfp-readiness/wait_for_pods.py } diff --git a/.github/resources/scripts/kfp-readiness/requirements.txt b/.github/resources/scripts/kfp-readiness/requirements.txt new file mode 100644 index 00000000000..27c1947f574 --- /dev/null +++ b/.github/resources/scripts/kfp-readiness/requirements.txt @@ -0,0 +1,2 @@ +kubernetes==30.1.0 +urllib3==2.5.0 diff --git a/.github/resources/scripts/kfp-readiness/wait_for_pods.py b/.github/resources/scripts/kfp-readiness/wait_for_pods.py index 5a41637d3d3..fc67d5fda0e 100644 --- a/.github/resources/scripts/kfp-readiness/wait_for_pods.py +++ b/.github/resources/scripts/kfp-readiness/wait_for_pods.py @@ -1,7 +1,6 @@ import logging import time import urllib3 -import sys from kubernetes import client, config import subprocess @@ -31,27 +30,37 @@ def get_pod_statuses(): statuses = {} for pod in pods.items: pod_name = pod.metadata.name - pod_status = pod.status.phase - container_statuses = pod.status.container_statuses or [] - ready = 0 - total = 0 - waiting_messages = [] - for status in container_statuses: - total += 1 - if status.ready: - ready += 1 - if status.state.waiting is not None: - if status.state.waiting.message is not None: - waiting_messages.append(f'Waiting on Container: {status.name} - {status.state.waiting.reason}: {status.state.waiting.message}') - else: - waiting_messages.append(f'Waiting on Container: {status.name} - {status.state.waiting.reason}') - statuses[pod_name] = (pod_status, ready, total, waiting_messages) + if "system" not in pod_name: + pod_status = pod.status.phase + container_statuses = pod.status.container_statuses or [] + ready = 0 + total = 0 + waiting_messages = [] + for status in container_statuses: + total += 1 + if status.ready: + ready += 1 + if status.state.waiting is not None: + if status.state.waiting.message is not None: + waiting_messages.append(f'Waiting on Container: {status.name} - {status.state.waiting.reason}: {status.state.waiting.message}') + else: + waiting_messages.append(f'Waiting on Container: {status.name} - {status.state.waiting.reason}') + statuses[pod_name] = (pod_status, ready, total, waiting_messages) return statuses def all_pods_ready(statuses): - return all(pod_status == 'Running' and ready == total - for pod_status, ready, total, _ in statuses.values()) + def is_pod_ready(pod_status, ready, total): + # Jobs/CronJobs are ready when they succeed + if pod_status == 'Succeeded': + return True + # Regular pods are ready when running and all containers are ready + if pod_status == 'Running' and ready == total: + return True + return False + + return all(is_pod_ready(pod_status, ready, total) + for _, (pod_status, ready, total, _) in statuses.items()) def print_get_pods(): @@ -69,7 +78,7 @@ def print_get_pods(): print(f"An error occurred while running kubectl get pods: {e.stderr}") -def check_pods(calm_time=10, timeout=600, retries_after_ready=5): +def check_pods(calm_time=10, timeout=900, retries_after_ready=5): start_time = time.time() stable_count = 0 previous_statuses = {} @@ -108,7 +117,9 @@ def check_pods(calm_time=10, timeout=600, retries_after_ready=5): logging.info("Final pod statuses:") for pod_name, (pod_status, ready, total, _) in previous_statuses.items(): - if pod_status == 'Running' and ready == total: + if pod_status == 'Succeeded': + logging.info(f"Pod {pod_name} completed successfully (Job/CronJob)") + elif pod_status == 'Running' and ready == total: logging.info(f"Pod {pod_name} is fully ready ({ready}/{total})") else: logging.info(f"Pod {pod_name} is not ready (Status: {pod_status}, Ready: {ready}/{total})") diff --git a/.github/scripts/verify-argo-matrix.py b/.github/scripts/verify-argo-matrix.py new file mode 100644 index 00000000000..7fe2a70e78a --- /dev/null +++ b/.github/scripts/verify-argo-matrix.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 +import re +import sys +from pathlib import Path +from typing import List, Set + +from ruamel.yaml import YAML + + +def parse_minor(version: str) -> str: + match = re.match(r'(v\d+\.\d+)', version) + if not match: + raise ValueError(f'Cannot parse minor from version: {version}') + return match.group(1) + + +def _extract_argo_versions_from_e2e(e2e_yaml_text: str) -> List[str]: + yaml = YAML(typ='safe') + data = yaml.load(e2e_yaml_text) or {} + jobs = data.get('jobs') or {} + return jobs.get('end-to-end-critical-scenario-tests', {}).get('strategy', {}).get('matrix', {}).get('argo_version',[]) + + +def main() -> int: + repo_root = Path('.') + e2e_path = repo_root / '.github' / 'workflows' / 'e2e-test.yml' + version_path = repo_root / 'third_party' / 'argo' / 'VERSION' + readme_path = repo_root / 'README.md' + + # Extract argo versions from e2e workflow (drop patch) + try: + e2e_content = e2e_path.read_text(encoding='utf-8') + except Exception as exc: + print(f'ERROR: Failed to read {e2e_path}: {exc}', file=sys.stderr) + return 1 + + argo_versions = _extract_argo_versions_from_e2e(e2e_content) + + if not argo_versions or not isinstance(argo_versions, list): + print('ERROR: No argo_version found in .github/workflows/e2e-test.yml', file=sys.stderr) + return 1 + + # Read VERSION and derive minor + try: + version_content = version_path.read_text(encoding='utf-8').strip() + except Exception as exc: + print(f'ERROR: Failed to read {version_path}: {exc}', file=sys.stderr) + return 1 + + m = re.search(r'v\d+\.\d+', version_content) + if not m: + print('ERROR: Could not parse third_party/argo/VERSION', file=sys.stderr) + return 1 + argo_versions.append(m.group(0)) + + minor_set: Set[str] = {parse_minor(v) for v in argo_versions} + argo_minors = sorted( + minor_set, + key=lambda s: tuple(int(x) for x in s[1:].split('.')), + ) + + expected = ", ".join(argo_minors) + + # Read README and extract Argo Workflows row + try: + readme = readme_path.read_text(encoding='utf-8') + except Exception as exc: + print(f'ERROR: Failed to read {readme_path}: {exc}', file=sys.stderr) + return 1 + + row_match = re.search(r'^\|\s*Argo Workflows\s*\|\s*([^|]+)\|', readme, re.MULTILINE) + if not row_match: + print('ERROR: Could not find "Argo Workflows" row in README.md', file=sys.stderr) + return 1 + cell = row_match.group(1).strip() + + if cell != expected: + print('ERROR: README.md "Dependencies Compatibility Matrix" for Argo Workflows is out of date.', file=sys.stderr) + print(f' Found: "{cell}"', file=sys.stderr) + print(f' Expected: "{expected}"', file=sys.stderr) + return 1 + + print('Argo Workflows compatibility matrix in README.md is up to date.') + return 0 + + +if __name__ == '__main__': + sys.exit(main()) + + diff --git a/.github/workflows/api-server-tests.yml b/.github/workflows/api-server-tests.yml new file mode 100644 index 00000000000..43483cd5090 --- /dev/null +++ b/.github/workflows/api-server-tests.yml @@ -0,0 +1,254 @@ +# This workflow runs tests to verify all the API Server REST Endpoints +name: API Server Tests +env: + API_TESTS_DIR: "./backend/test/v2/api" + TESTS_LABEL: "ApiServerTests" + NUMBER_OF_PARALLEL_NODES: 15 + CLUSTER_NAME: "kfp" + NAMESPACE: "kubeflow" + USER_NAMESPACE: "kubeflow-user-example-com" + PYTHON_VERSION: "3.9" + +on: + push: + branches: [master] + + workflow_dispatch: + inputs: + test_label: + description: "Test label that you want to filter on and run" + default: 'ApiServerTests' + required: true + type: string + number_of_parallel_tests: + description: "Number of ginkgo nodes that you want run in parallel, it essentially is equivalent to number of parallel tests with some caveats" + default: 10 + required: true + type: number + namespace: + description: "Namespace where you want to create your pipelines in" + default: "kubeflow" + required: true + type: string + + pull_request: + paths: + - '.github/workflows/api-server-tests.yml' + - '.github/actions/create-cluster/**' + - '.github/resources/**' + - 'backend/api/v2beta1/**' + - 'backend/src/**' + - 'backend/metadata_writer/**' + - 'backend/test/v2/api/**' + - 'manifests/kustomize/**' + - '../../test_data/sdk_compiled_pipelines/**' + - '!**/*.md' + - '!**/OWNERS' + +jobs: + build: + uses: ./.github/workflows/image-builds-with-cache.yml + + api-test-standalone: + needs: build + runs-on: ubuntu-latest + strategy: + matrix: + k8s_version: [ "v1.31.0" ] + cache_enabled: [ "true", "false" ] + proxy: [ "true", "false" ] + argo_version: [ "v3.7.1", "v3.6.10" ] + pipeline_store: [ "database" ] + include: + - k8s_version: "v1.29.2" + cache_enabled: "true" + argo_version: "v3.6.10" + - k8s_version: "v1.29.2" + cache_enabled: "true" + argo_version: "v3.5.15" + fail-fast: false # So that failure in 1 type of parameterized job does not cause other jobs to terminate prematurely + name: KFP API Server tests Standalone - K8sVersion=${{ matrix.k8s_version }} argo_version=${{ matrix.argo_version }} cacheEnabled=${{ matrix.cache_enabled }} proxyEnabled=${{ matrix.proxy }} + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Create cluster + uses: ./.github/actions/create-cluster + id: create-cluster + with: + k8s_version: ${{ matrix.k8s_version }} + cluster_name: ${{ env.CLUSTER_NAME }} + + - name: Deploy KFP + uses: ./.github/actions/deploy + id: deploy + if: ${{ steps.create-cluster.outcome == 'success' }} + with: + pipeline_store: ${{ matrix.pipeline_store }} + cache_enabled: ${{ matrix.cache_enabled }} + proxy: ${{ matrix.proxy }} + argo_version: ${{ matrix.argo_version }} + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + + - name: Configure Input Variables + shell: bash + id: configure + if: ${{ steps.deploy.outcome == 'success' }} + run: | + NUMBER_OF_NODES=${{ env.NUMBER_OF_PARALLEL_NODES }} + TEST_LABEL=${{ env.TESTS_LABEL }} + NAMESPACE=${{ env.NAMESPACE }} + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + NUMBER_OF_NODES=${{ inputs.number_of_parallel_tests }} + TEST_LABEL=${{ inputs.test_label }} + NAMESPACE=${{ inputs.namespace }} + fi + + PROXY=${{ matrix.proxy }} + if [ -z "${PROXY:-}" ]; then + PROXY=false + fi + + echo "NUMBER_OF_NODES=$NUMBER_OF_NODES" >> $GITHUB_OUTPUT + echo "TEST_LABEL=$TEST_LABEL" >> $GITHUB_OUTPUT + echo "NAMESPACE=$NAMESPACE" >> $GITHUB_OUTPUT + + - name: Run Tests + uses: ./.github/actions/test-and-report + if: ${{ steps.configure.outcome == 'success' }} + id: test-run + with: + pipeline_store: ${{ matrix.pipeline_store }} + cache_enabled: ${{ matrix.cache_enabled }} + proxy: ${{ matrix.proxy }} + test_directory: ${{ env.API_TESTS_DIR }} + test_label: ${{ steps.configure.outputs.TEST_LABEL }} + num_parallel_nodes: ${{ steps.configure.outputs.NUMBER_OF_NODES }} + default_namespace: ${{ steps.configure.outputs.NAMESPACE }} + python_version: ${{ env.PYTHON_VERSION }} + report_name: "Standalone_k8sVersion=${{ matrix.k8s_version }}_argoVersion=${{ matrix.argo_version }}_cacheEnabled=${{ matrix.cache_enabled }}_proxyEnabled=${{ matrix.proxy }}" + + - name: Notify test reports + shell: bash + if: ${{ steps.test-run.outcome == 'success' }} + + run: | + echo "::notice title=Test Summary and HTML Report is now available in the Summary Tab" + + + api-test-k8s-native: + needs: build + runs-on: ubuntu-latest + strategy: + matrix: + k8s_version: [ "v1.31.0", "v1.29.2" ] + cache_enabled: [ "true" ] + uploadPipelinesWithKubernetesClient: [ "true", "false" ] + argo_version: [ "v3.7.1", "v3.6.10" ] + pipeline_store: [ "kubernetes" ] + fail-fast: false # So that failure in 1 type of parameterized job does not cause other jobs to terminate prematurely + name: KFP API Server tests K8s Native API - K8sVersion=${{ matrix.k8s_version }} cacheEnabled=${{ matrix.cache_enabled }} argoVersion=${{ matrix.argo_version }} uploadPipelinesWithKubernetesClient=${{ matrix.uploadPipelinesWithKubernetesClient }} + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Create cluster + uses: ./.github/actions/create-cluster + id: create-cluster + with: + k8s_version: ${{ matrix.k8s_version }} + cluster_name: ${{ env.CLUSTER_NAME }} + + - name: Deploy KFP + uses: ./.github/actions/deploy + id: deploy + if: ${{ steps.create-cluster.outcome == 'success' }} + with: + pipeline_store: ${{ matrix.pipeline_store }} + cache_enabled: ${{ matrix.cache_enabled }} + proxy: ${{ matrix.proxy }} + argo_version: ${{ matrix.argo_version }} + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + + - name: Run Tests + uses: ./.github/actions/test-and-report + if: ${{ steps.deploy.outcome == 'success' }} + id: test-run + with: + pipeline_store: ${{ matrix.pipeline_store }} + cache_enabled: ${{ matrix.cache_enabled }} + proxy: ${{ matrix.proxy }} + test_directory: ${{ env.API_TESTS_DIR }} + test_label: ${{ env.TESTS_LABEL }} + num_parallel_nodes: ${{ env.NUMBER_OF_PARALLEL_NODES }} + default_namespace: ${{ env.NAMESPACE }} + python_version: ${{ env.PYTHON_VERSION }} + report_name: "K8Native_k8sVersion=${{ matrix.k8s_version }}_cacheEnabled=${{ matrix.cache_enabled }}_argoVersion=${{ matrix.argo_version }}_uploadPipelinesWithKubernetesClient=${{ matrix.uploadPipelinesWithKubernetesClient }}" + + - name: Notify test reports + shell: bash + if: ${{ steps.test-run.outcome == 'success' }} + run: | + echo "::notice title=Test Summary and HTML Report is now available in the Summary Tab" + + api-test-multi-user: + needs: build + runs-on: ubuntu-latest + strategy: + matrix: + k8s_version: [ "v1.31.0"] + cache_enabled: [ "true", "false" ] + multi_user: [ "true" ] + fail-fast: false # So that failure in 1 type of parameterized job does not cause other jobs to terminate prematurely + name: KFP API Server Multi User Tests - K8sVersion=${{ matrix.k8s_version }} cacheEnabled=${{ matrix.cache_enabled }} multiUser=${{ matrix.multi_user }} + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Create cluster + uses: ./.github/actions/create-cluster + id: create-cluster + with: + k8s_version: ${{ matrix.k8s_version }} + cluster_name: ${{ env.CLUSTER_NAME }} + + - name: Deploy KFP + uses: ./.github/actions/deploy + id: deploy + if: ${{ steps.create-cluster.outcome == 'success' }} + with: + cache_enabled: ${{ matrix.cache_enabled }} + multi_user: ${{ matrix.multi_user }} + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + + - name: Run Tests + uses: ./.github/actions/test-and-report + id: test-run + if: ${{ steps.deploy.outcome == 'success' }} + env: + LOCAL_API_SERVER: "true" + with: + cache_enabled: ${{ matrix.cache_enabled }} + test_directory: ${{ env.API_TESTS_DIR }} + test_label: ${{ env.TESTS_LABEL }} + num_parallel_nodes: ${{ env.NUMBER_OF_PARALLEL_NODES }} + default_namespace: ${{ env.NAMESPACE }} + python_version: ${{ env.PYTHON_VERSION }} + user_namespace: ${{ env.USER_NAMESPACE }} + multi_user: ${{ matrix.multi_user }} + report_name: "MultiUser_k8sVersion=${{ matrix.k8s_version }}_cacheEnabled=${{ matrix.cache_enabled }}_multiUser=${{ matrix.multi_user }}" + + - name: Notify test reports + shell: bash + if: ${{ steps.test-run.outcome == 'success' }} + run: | + echo "::notice title=Test Summary and HTML Report is now available in the Summary Tab" \ No newline at end of file diff --git a/.github/workflows/backend-visualization.yml b/.github/workflows/backend-visualization.yml index 8d4c626d562..b502afbc40d 100644 --- a/.github/workflows/backend-visualization.yml +++ b/.github/workflows/backend-visualization.yml @@ -21,12 +21,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.9' + python-version: '3.11' - name: Run tests run: ./test/presubmit-backend-visualization.sh diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml new file mode 100644 index 00000000000..d7a96d6cb46 --- /dev/null +++ b/.github/workflows/build-and-push.yml @@ -0,0 +1,193 @@ +name: Build and Push images +run-name: Build images +on: + workflow_call: + inputs: + src_branch: + type: string + default: '' + description: 'Source branch to build KFP from' + required: false + target_tag: + type: string + default: 'X.Y.Z' + description: 'Target Image Tag' + required: true + overwrite_imgs: + type: string + default: 'true' + description: 'Overwrite images in GHCR if they already exist for this tag.' + required: false + set_latest: + type: string + default: 'true' + description: 'Set latest tag on build images.' + required: false + add_sha_tag: + type: string + default: 'true' + description: 'Add a sha image tag.' + required: false + app_to_build: + type: string + default: '' + description: 'Provide the app name to build' + required: true + image_context: + type: string + default: '' + description: 'Provide the docker file path' + required: true + docker_file: + type: string + default: '' + description: 'Provide the docker file name' + required: true + push: + type: boolean + default: false + description: 'Whether to push image to CR or not' + required: false + workflow_dispatch: + inputs: + src_branch: + type: string + default: '' + description: 'Source branch to build KFP from' + required: true + target_tag: + type: string + default: 'X.Y.Z' + description: 'Target Image Tag' + required: true + fail_fast: + type: string + default: 'true' + description: 'Stop running entire Workflow if a single build fails' + required: true + overwrite_imgs: + type: string + default: 'true' + description: 'Overwrite images in GHCR if they already exist for this tag.' + required: true + set_latest: + type: string + default: 'true' + description: 'Set latest tag on build images.' + required: true + add_sha_tag: + type: string + default: 'true' + description: 'Add a sha image tag.' + required: false + app_to_build: + type: string + default: '' + description: 'Provide the app name to build' + required: true + image_context: + type: string + default: '' + description: 'Provide the docker file path' + required: true + docker_file: + type: string + default: '' + description: 'Provide the docker file name' + required: true + push: + type: boolean + default: false + description: 'Whether to push image to CR or not' + required: true +env: + SOURCE_BRANCH: ${{ inputs.src_branch }} + TARGET_IMAGE_TAG: ${{ inputs.target_tag }} + OVERWRITE_IMAGES: ${{ inputs.overwrite_imgs }} + IMAGE_REGISTRY: ghcr.io + IMAGE_ORG: ${{ github.repository_owner }} + SET_LATEST: ${{ inputs.set_latest }} + ADD_SHA_TAG: ${{ inputs.add_sha_tag }} + CACHE_PATH: '/tmp/.buildx-cache' + +jobs: + build-and-push-images: + if: inputs.push && inputs.src_branch + continue-on-error: false + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + attestations: write + id-token: write + steps: + - name: Checkout repository + uses: actions/checkout@v5 + with: + ref: ${{env.SOURCE_BRANCH}} + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.IMAGE_REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Check if image tag already exists + id: check_tag + env: + IMAGE: ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/${{ inputs.app_to_build }}:${{env.TARGET_IMAGE_TAG}} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} + run: | + if docker manifest inspect ${IMAGE} > /dev/null 2>&1; then + echo "Image tag already exists!" + if [ "$OVERWRITE" == "false" ]; then + echo "Overwrite is set to false, exiting." + exit 1 + else + echo "Overwrite is set to true, proceeding with push." + fi + else + echo "No tag conflict, safe to push." + fi + + # This step uses docker/metadata-action to extract tags and labels + # that will be applied to the specified image. The id "meta" allows + # the output of this step to be referenced in a subsequent step. + # The images value provides the base name for the tags and labels. + - name: Extract metadata (tags, labels) for Build + id: meta + uses: docker/metadata-action@v5 + if: steps.check_tag.outcome == 'success' + with: + images: ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/${{ inputs.app_to_build }} + tags: | + type=raw,value=${{env.TARGET_IMAGE_TAG}} + type=raw,value=latest,enable=${{ env.SET_LATEST == 'true'}} + type=sha,enable=${{ env.ADD_SHA_TAG == 'true' }} + + # Build the image. If the build succeeds, it pushes the image to GitHub + # Packages. It uses the context parameter to define the build's context + # as the set of files located in the specified path. + - name: Build and push Image + id: push + uses: docker/build-push-action@v6 + if: steps.check_tag.outcome == 'success' + with: + context: ${{ inputs.image_context }} + file: ${{ inputs.docker_file }} + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + # This step generates an artifact attestation for the image, + # which is an unforgeable statement about where and how it was built. + # It increases supply chain security for people who consume the + # image. + # Ref: https://docs.github.com/en/actions/security-for-github-actions/using-artifact-attestations/using-artifact-attestations-to-establish-provenance-for-builds + - name: Generate artifact attestation + uses: actions/attest-build-provenance@v1 + if: steps.check_tag.outcome == 'success' + with: + subject-name: ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/${{ inputs.app_to_build }} + subject-digest: ${{ steps.push.outputs.digest }} \ No newline at end of file diff --git a/.github/workflows/build-tools-images.yml b/.github/workflows/build-tools-images.yml new file mode 100644 index 00000000000..9ce389a4b7d --- /dev/null +++ b/.github/workflows/build-tools-images.yml @@ -0,0 +1,62 @@ +name: Build tools images + +on: + push: + branches: + - master + - 'release-*' + pull_request: + branches: + - master + - 'release-*' + workflow_dispatch: {} + +permissions: + contents: read + packages: write + +env: + IMAGE_REGISTRY: ghcr.io + IMAGE_ORG: ${{ github.repository_owner }} + IMAGE_TAG: ${{ github.event_name == 'pull_request' && github.base_ref || github.ref_name }} + +concurrency: + group: build-tools-images-${{ github.ref_name }} + cancel-in-progress: false + +jobs: + build-tools: + name: Build tools images + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v5 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + if: github.event_name != 'pull_request' + with: + registry: ${{ env.IMAGE_REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push api-generator + id: build_api + uses: docker/build-push-action@v6 + with: + context: . + file: backend/api/Dockerfile + push: ${{ github.event_name != 'pull_request' }} + tags: | + ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/kfp-api-generator:${{ env.IMAGE_TAG }} + + - name: Build and push release tools + uses: docker/build-push-action@v6 + with: + context: . + file: test/release/Dockerfile.release + push: ${{ github.event_name != 'pull_request' }} + build-args: | + BASE_IMAGE=${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/kfp-api-generator:${{ env.IMAGE_TAG }} + tags: | + ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/kfp-release:${{ env.IMAGE_TAG }} diff --git a/.github/workflows/ci-checks.yml b/.github/workflows/ci-checks.yml index 9b488e09d61..56503d8c04b 100644 --- a/.github/workflows/ci-checks.yml +++ b/.github/workflows/ci-checks.yml @@ -13,7 +13,7 @@ jobs: pull-requests: write steps: - name: Check out the repository - uses: actions/checkout@v3 + uses: actions/checkout@v5 - name: Check for 'needs-ok-to-test' and 'ok-to-test' labels id: label_check diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 2f0b0b472e0..8005c9f19e3 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -36,7 +36,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v5 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/compiler-tests.yml b/.github/workflows/compiler-tests.yml new file mode 100644 index 00000000000..7f209730556 --- /dev/null +++ b/.github/workflows/compiler-tests.yml @@ -0,0 +1,71 @@ +# This workflow runs tests to verify all the API Server REST Endpoints +name: Workflow Compiler Tests +env: + TESTS_DIR: "./backend/test/compiler" + TESTS_LABEL: "WorkflowCompiler" + PYTHON_VERSION: "3.9" + +on: + push: + branches: [master] + + workflow_dispatch: + inputs: + test_label: + description: "Test label that you want to filter on and run" + default: 'ApiServerTests' + required: true + type: string + number_of_parallel_tests: + description: "Number of ginkgo nodes that you want run in parallel, it essentially is equivalent to number of parallel tests with some caveats" + default: 10 + required: true + type: number + namespace: + description: "Namespace where you want to create your pipelines in" + default: "kubeflow" + required: true + type: string + + pull_request: + paths: + - '.github/workflows/compiler-tests.yml' + - 'backend/src/v2/compiler/**' + - 'test_data/**' + - '!**/*.md' + - '!**/OWNERS' + +jobs: + compiler-tests: + runs-on: ubuntu-latest + name: Workflow Compiler Tests + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Configure Input Variables + shell: bash + id: configure + run: | + TEST_LABEL=${{ env.TESTS_LABEL }} + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + TEST_LABEL=${{ inputs.test_label }} + fi + echo "TEST_LABEL=$TEST_LABEL" >> $GITHUB_OUTPUT + + - name: Run Tests + uses: ./.github/actions/test-and-report + id: test-run + if: ${{ steps.configure.outcome == 'success' }} + with: + test_directory: ${{ env.TESTS_DIR }} + test_label: ${{ steps.configure.outputs.TEST_LABEL }} + python_version: ${{ env.PYTHON_VERSION }} + report_name: "Workflow Compiler Tests" + + - name: Notify test reports + shell: bash + if: ${{ steps.test-run.outcome == 'success' }} + run: | + echo "::notice title=Test Summary and HTML Report is now available in the Summary Tab" \ No newline at end of file diff --git a/.github/workflows/docs-freshness.yml b/.github/workflows/docs-freshness.yml new file mode 100644 index 00000000000..9013bbb57b4 --- /dev/null +++ b/.github/workflows/docs-freshness.yml @@ -0,0 +1,30 @@ +name: KFP Docs Freshness + +on: + push: + branches: [ master ] + pull_request: + paths: + - .github/workflows/** + - third_party/** + - README.md + +jobs: + test-docs-freshness: + runs-on: ubuntu-24.04 + steps: + - name: Checkout code + uses: actions/checkout@v5 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Install Dependencies + run: | + pip install ruamel.yaml + + - name: Verify Argo compatibility matrix freshness + run: | + python3 .github/scripts/verify-argo-matrix.py diff --git a/.github/workflows/e2e-test-frontend.yml b/.github/workflows/e2e-test-frontend.yml new file mode 100644 index 00000000000..2c666f592d1 --- /dev/null +++ b/.github/workflows/e2e-test-frontend.yml @@ -0,0 +1,80 @@ +name: KFP e2e frontend tests + +on: + push: + branches: [master] + + pull_request: + paths: + - '.github/workflows/e2e-test-frontend.yml' + - '.github/actions/create-cluster/**' + - 'frontend/**' + - 'manifests/kustomize/**' + - '!**/*.md' + - '!**/OWNERS' + +jobs: + build: + uses: ./.github/workflows/image-builds-with-cache.yml + + frontend-integration-test: + runs-on: ubuntu-latest + needs: build + strategy: + matrix: + k8s_version: [ "v1.29.2", "v1.31.0" ] + name: Frontend Integration Tests - K8s ${{ matrix.k8s_version }} + steps: + - name: Checkout code + uses: actions/checkout@v5 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + + - name: Create KFP cluster + id: create-kfp-cluster + uses: ./.github/actions/create-cluster + with: + k8s_version: ${{ matrix.k8s_version }} + continue-on-error: true + + - name: Deploy + id: deploy + uses: ./.github/actions/deploy + if: ${{ steps.create-kfp-cluster.outcome == 'success' }} + with: + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + + - name: Forward Frontend port + id: forward-frontend-port + if: ${{ steps.deploy.outcome == 'success' }} + run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline-ui" 3000 3000 + continue-on-error: true + + - name: Build frontend integration tests image + working-directory: ./test/frontend-integration-test + run: docker build . -t kfp-frontend-integration-test:local + + - name: Frontend integration tests + id: tests + if: ${{ steps.forward-frontend-port.outcome == 'success' }} + run: docker run --net=host kfp-frontend-integration-test:local --remote-run true + continue-on-error: true + + - name: Collect failed logs + if: ${{ steps.deploy.outcome != 'success' || steps.forward-frontend-port.outcome != 'success' || steps.tests.outcome != 'success' }} + run: | + ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt + exit 1 + + - name: Collect test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: kfp-frontend-integration-test-artifacts-k8s-${{ matrix.k8s_version }} + path: /tmp/tmp*/* \ No newline at end of file diff --git a/.github/workflows/e2e-test.yml b/.github/workflows/e2e-test.yml index 8e961f6a0c6..62fab4ef418 100644 --- a/.github/workflows/e2e-test.yml +++ b/.github/workflows/e2e-test.yml @@ -1,467 +1,233 @@ -name: KFP e2e tests +name: KFP E2E Pipeline tests +env: + E2E_TESTS_DIR: "./backend/test/end2end" + NUMBER_OF_PARALLEL_NODES: 10 + CLUSTER_NAME: "kfp" + NAMESPACE: "kubeflow" + PYTHON_VERSION: "3.9" + USER_NAMESPACE: "kubeflow-user-example-com" on: push: - branches: - - master - - main - - stable - - 'rhoai-*' + branches: [master] pull_request: paths: - '.github/workflows/e2e-test.yml' + - '.github/actions/create-cluster/**' - '.github/resources/**' + - 'api/**' - 'go.mod' - 'go.sum' - 'backend/**' - - 'frontend/**' - 'proxy/**' - 'manifests/kustomize/**' - - 'test/**' - '!**/*.md' - '!**/OWNERS' -jobs: - initialization-tests-v1: - runs-on: ubuntu-latest - strategy: - matrix: - k8s_version: [ "v1.29.2", "v1.31.0" ] - name: Initialization tests v1 - K8s ${{ matrix.k8s_version }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} - continue-on-error: true - - - name: Forward API port - id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: Initialization tests v1 - id: tests - if: ${{ steps.forward-api-port.outcome == 'success' }} - working-directory: ./backend/test/initialization - run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true - env: - PULL_NUMBER: ${{ github.event.pull_request.number }} - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }} - run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-initialization-tests-v1-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* - - initialization-tests-v2: - runs-on: ubuntu-latest - strategy: - matrix: - k8s_version: [ "v1.29.2", "v1.31.0" ] - name: Initialization tests v2 - K8s ${{ matrix.k8s_version }} - steps: - - name: Checkout code - uses: actions/checkout@v4 + workflow_dispatch: + inputs: + test_label: + description: "Test label that you want to filter on and run" + default: 'ApiServerTests' + required: true + type: string + number_of_parallel_tests: + description: "Number of ginkgo nodes that you want run in parallel, it essentially is equivalent to number of parallel tests with some caveats" + default: 10 + required: true + type: number + namespace: + description: "Namespace where you want to create your pipelines in" + default: "kubeflow" + required: true + type: string - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} - continue-on-error: true - - - name: Forward API port - id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: Initialization tests v2 - id: tests - if: ${{ steps.forward-api-port.outcome == 'success' }} - working-directory: ./backend/test/v2/initialization - run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true - env: - PULL_NUMBER: ${{ github.event.pull_request.number }} - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }} - run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-initialization-tests-v2-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* +jobs: + build: + uses: ./.github/workflows/image-builds-with-cache.yml - api-integration-tests-v1: + end-to-end-critical-scenario-tests: runs-on: ubuntu-latest + needs: build strategy: matrix: - k8s_version: [ "v1.29.2", "v1.31.0" ] - name: API integration tests v1 - K8s ${{ matrix.k8s_version }} + k8s_version: ["v1.31.0"] + cache_enabled: ["true", "false"] + argo_version: [ "v3.7.1", "v3.6.10", "v3.5.15"] + storage: [ "seaweedfs", "minio"] + proxy: [ "false" ] + test_label: [ "E2ECritical" ] + include: + - k8s_version: "v1.29.2" + cache_enabled: "false" + argo_version: "v3.5.15" + test_label: "E2ECritical" + - k8s_version: "v1.31.0" + cache_enabled: "false" + proxy: "true" + test_label: "E2EProxy" + - k8s_version: "v1.31.0" + cache_enabled: "false" + test_label: "E2EEssential" + - k8s_version: "v1.31.0" + cache_enabled: "false" + test_label: "E2EFailure" + fail-fast: false + name: End to End ${{ matrix.test_label}} Tests - K8s ${{ matrix.k8s_version }} cacheEnabled=${{ matrix.cache_enabled }} argoVersion=${{ matrix.argo_version}} proxy=${{ matrix.proxy}} storage=${{ matrix.storage }} steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 + - name: Free up disk space + run: ./.github/resources/scripts/free-disk-space.sh - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster + - name: Create cluster + uses: ./.github/actions/create-cluster + id: create-cluster with: k8s_version: ${{ matrix.k8s_version }} - continue-on-error: true - - - name: Forward API port - id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: Forward MySQL port - id: forward-mysql-port - if: ${{ steps.forward-api-port.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "mysql" 3306 3306 - continue-on-error: true - - - name: API integration tests v1 - id: tests - if: ${{ steps.forward-mysql-port.outcome == 'success' }} - working-directory: ./backend/test/integration - run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true - env: - PULL_NUMBER: ${{ github.event.pull_request.number }} - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-mysql-port.outcome != 'success' || steps.tests.outcome != 'success' }} + cluster_name: ${{ env.CLUSTER_NAME }} + + - name: Deploy KFP + uses: ./.github/actions/deploy + if: ${{ steps.create-cluster.outcome == 'success' }} + id: deploy + with: + cache_enabled: ${{ matrix.cache_enabled }} + argo_version: ${{ matrix.argo_version }} + storage_backend: ${{ matrix.storage }} + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + + - name: Configure Input Variables + shell: bash + id: configure + if: ${{ steps.deploy.outcome == 'success' }} run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-api-integration-tests-v1-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* - - api-integration-tests-v2: - runs-on: ubuntu-latest - strategy: - matrix: - pipeline_store: [ "database", "kubernetes" ] - k8s_version: [ "v1.29.2", "v1.31.0" ] - name: API integration tests v2 - K8s with ${{ matrix.pipeline_store }} ${{ matrix.k8s_version }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} - pipeline_store: ${{ matrix.pipeline_store }} - continue-on-error: true - - - name: Forward API port - id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: Forward MLMD port - id: forward-mlmd-port - if: ${{ steps.forward-api-port.outcome == 'success' }} - run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 & - continue-on-error: true - - - name: API integration tests v2 - id: tests - if: ${{ steps.forward-api-port.outcome == 'success' }} - working-directory: ./backend/test/v2/integration - run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true - env: - PULL_NUMBER: ${{ github.event.pull_request.number }} - PIPELINE_STORE: ${{ matrix.pipeline_store }} - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-mlmd-port.outcome != 'success' || steps.tests.outcome != 'success' }} + NUMBER_OF_NODES=${{ env.NUMBER_OF_PARALLEL_NODES }} + TEST_LABEL=${{ matrix.test_label }} + NAMESPACE=${{ env.NAMESPACE }} + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + NUMBER_OF_NODES=${{ inputs.number_of_parallel_tests }} + TEST_LABEL=${{ inputs.test_label }} + NAMESPACE=${{ inputs.namespace }} + fi + echo "NUMBER_OF_NODES=$NUMBER_OF_NODES" >> $GITHUB_OUTPUT + echo "TEST_LABEL=$TEST_LABEL" >> $GITHUB_OUTPUT + echo "NAMESPACE=$NAMESPACE" >> $GITHUB_OUTPUT + + - name: Build and upload the sample Modelcar image to Kind + id: build-sample-modelcar-image + if: ${{ steps.deploy.outcome == 'success' }} run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-api-integration-tests-v2-artifacts-k8s-${{ matrix.k8s_version }}-${{ matrix.pipeline_store }} - path: /tmp/tmp*/* - - api-integration-tests-v2-with-proxy: - runs-on: ubuntu-latest - strategy: - matrix: - k8s_version: [ "v1.31.0" ] - name: API integration tests v2 with proxy - K8s ${{ matrix.k8s_version }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} - proxy: 'true' - continue-on-error: true - - - name: Forward API port - id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: Forward MLMD port - id: forward-mlmd-port - if: ${{ steps.forward-api-port.outcome == 'success' }} - run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 & - continue-on-error: true - - - name: API integration tests v2 - id: tests - if: ${{ steps.forward-mlmd-port.outcome == 'success' }} - working-directory: ./backend/test/v2/integration - run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true -useProxy=true - env: - PULL_NUMBER: ${{ github.event.pull_request.number }} - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }} + docker build -f ./test_data/sdk_compiled_pipelines/valid/critical/modelcar/Dockerfile -t registry.domain.local/modelcar:test . + kind --name kfp load docker-image registry.domain.local/modelcar:test + continue-on-error: true + + - name: Run Tests + uses: ./.github/actions/test-and-report + id: test-run + if: ${{ steps.configure.outcome == 'success' }} + with: + cache_enabled: ${{ matrix.cache_enabled }} + test_directory: ${{ env.E2E_TESTS_DIR }} + test_label: ${{ steps.configure.outputs.TEST_LABEL }} + num_parallel_nodes: ${{ steps.configure.outputs.NUMBER_OF_NODES }} + default_namespace: ${{ steps.configure.outputs.NAMESPACE }} + python_version: ${{ env.PYTHON_VERSION }} + report_name: "${{ matrix.test_label}}Tests_K8s=${{ matrix.k8s_version }}_cacheEnabled=${{ matrix.cache_enabled }}_argoVersion=${{ matrix.argo_version}}_proxy=${{ matrix.proxy }}_storage=${{ matrix.storage }}" + + - name: Notify test reports + shell: bash + if: ${{ steps.test-run.outcome == 'success' }} run: | - ./.github/resources/scripts/collect-logs.sh --ns squid --output /tmp/tmp_squid_pod_log.txt - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-api-integration-tests-v2-with-proxy-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* + echo "::notice title=Test Summary and HTML Report is now available in the Summary Tab" - api-integration-tests-v2-with-cache-disabled: + end-to-end-critical-scenario-multi-user-tests: runs-on: ubuntu-latest + needs: build strategy: matrix: - k8s_version: [ "v1.31.0" ] - name: API integration tests v2 with cache disabled - K8s ${{ matrix.k8s_version }} + k8s_version: ["v1.31.0"] + cache_enabled: ["true", "false"] + storage: [ "seaweedfs", "minio"] + multi_user: [ "true" ] + test_label: [ "E2ECritical" ] + fail-fast: false + name: End to End Critical Scenario Multi User Tests - K8s ${{ matrix.k8s_version }} cacheEnabled=${{ matrix.cache_enabled }} multiUser=${{ matrix.multi_user }} argoVersion=${{ matrix.argo_version}} proxy=${{ matrix.proxy}} storage=${{ matrix.storage }} steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 + - name: Free up disk space + run: ./.github/resources/scripts/free-disk-space.sh - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster + - name: Create cluster + uses: ./.github/actions/create-cluster + id: create-cluster with: k8s_version: ${{ matrix.k8s_version }} - cache_enabled: 'false' - continue-on-error: true - - - name: Forward API port - id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: Forward MLMD port - id: forward-mlmd-port - if: ${{ steps.forward-api-port.outcome == 'success' }} - run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 & - continue-on-error: true - - - name: API integration tests v2 - id: tests - if: ${{ steps.forward-mlmd-port.outcome == 'success' }} - working-directory: ./backend/test/v2/integration - run: go test -v ./... -namespace kubeflow -args -runIntegrationTests=true -cacheEnabled=false - env: - PULL_NUMBER: ${{ github.event.pull_request.number }} - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }} + cluster_name: ${{ env.CLUSTER_NAME }} + + - name: Deploy KFP + uses: ./.github/actions/deploy + if: ${{ steps.create-cluster.outcome == 'success' }} + id: deploy + with: + cache_enabled: ${{ matrix.cache_enabled }} + multi_user: "true" + storage_backend: ${{ matrix.storage }} + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + + - name: Configure Input Variables + shell: bash + id: configure + if: ${{ steps.deploy.outcome == 'success' }} run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-api-integration-tests-v2-with-cache-disabled-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* - - frontend-integration-test: - runs-on: ubuntu-latest - strategy: - matrix: - k8s_version: [ "v1.29.2", "v1.31.0" ] - name: Frontend Integration Tests - K8s ${{ matrix.k8s_version }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} - continue-on-error: true - - - name: Forward API port - id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: Forward Frontend port - id: forward-frontend-port - if: ${{ steps.forward-api-port.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline-ui" 3000 3000 - continue-on-error: true - - - name: Build frontend integration tests image - working-directory: ./test/frontend-integration-test - run: docker build . -t kfp-frontend-integration-test:local - - - name: Frontend integration tests - id: tests - if: ${{ steps.forward-frontend-port.outcome == 'success' }} - run: docker run --net=host kfp-frontend-integration-test:local --remote-run true - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-frontend-port.outcome != 'success' || steps.tests.outcome != 'success' }} + NUMBER_OF_NODES=${{ env.NUMBER_OF_PARALLEL_NODES }} + TEST_LABEL=${{ matrix.test_label }} + NAMESPACE=${{ env.NAMESPACE }} + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + NUMBER_OF_NODES=${{ inputs.number_of_parallel_tests }} + TEST_LABEL=${{ inputs.test_label }} + NAMESPACE=${{ inputs.namespace }} + fi + echo "NUMBER_OF_NODES=$NUMBER_OF_NODES" >> $GITHUB_OUTPUT + echo "TEST_LABEL=$TEST_LABEL" >> $GITHUB_OUTPUT + echo "NAMESPACE=$NAMESPACE" >> $GITHUB_OUTPUT + + - name: Build and upload the sample Modelcar image to Kind + id: build-sample-modelcar-image + if: ${{ steps.deploy.outcome == 'success' }} run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-frontend-integration-test-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* - - basic-sample-tests: - runs-on: ubuntu-latest - strategy: - matrix: - k8s_version: [ "v1.29.2", "v1.31.0" ] - name: Basic Sample Tests - K8s ${{ matrix.k8s_version }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} + docker build -f ./test_data/sdk_compiled_pipelines/valid/critical/modelcar/Dockerfile -t registry.domain.local/modelcar:test . + kind --name kfp load docker-image registry.domain.local/modelcar:test continue-on-error: true - - name: Forward API port - id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: Install prerequisites - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: pip3 install -r ./test/sample-test/requirements.txt - - - name: Basic sample tests - sequential - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - id: sequential-test - run: python3 ./test/sample-test/sample_test_launcher.py sample_test run_test --namespace kubeflow --test-name sequential --results-gcs-dir output - - - name: Basic sample tests - exit_handler - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - id: sample-test - run: python3 ./test/sample-test/sample_test_launcher.py sample_test run_test --namespace kubeflow --test-name exit_handler --expected-result failed --results-gcs-dir output - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.sequential-test.outcome != 'success' || steps.sample-test.outcome != 'success'}} + - name: Run Tests + uses: ./.github/actions/test-and-report + if: ${{ steps.configure.outcome == 'success' }} + id: test-run + env: + LOCAL_API_SERVER: "true" + with: + multi_user: ${{ matrix.multi_user }} + cache_enabled: ${{ matrix.cache_enabled }} + test_directory: ${{ env.E2E_TESTS_DIR }} + test_label: ${{ steps.configure.outputs.TEST_LABEL }} + num_parallel_nodes: ${{ steps.configure.outputs.NUMBER_OF_NODES }} + default_namespace: ${{ steps.configure.outputs.NAMESPACE }} + python_version: ${{ env.PYTHON_VERSION }} + user_namespace: ${{ env.USER_NAMESPACE }} + report_name: "E2EMultiUserTests_K8s=${{ matrix.k8s_version }}_cacheEnabled=${{ matrix.cache_enabled }}_multiUser=${{ matrix.multi_user }}_storage=${{ matrix.storage }}" + + - name: Notify test reports + shell: bash + if: ${{ steps.test-run.outcome == 'success' }} run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-e2e-tests-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* + echo "::notice title=Test Summary and HTML Report is now available in the Summary Tab" \ No newline at end of file diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml index 0cc662a9842..e9516a97e05 100644 --- a/.github/workflows/frontend.yml +++ b/.github/workflows/frontend.yml @@ -26,7 +26,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v5 - name: Set up Node.js uses: actions/setup-node@v4 diff --git a/.github/workflows/gcpc-modules-tests.yml b/.github/workflows/gcpc-modules-tests.yml index 6fbf2e2b048..97bf699bdf9 100644 --- a/.github/workflows/gcpc-modules-tests.yml +++ b/.github/workflows/gcpc-modules-tests.yml @@ -20,19 +20,16 @@ jobs: runs-on: ubuntu-24.04 steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v4 with: python-version: 3.9 - - - name: apt-get update - run: sudo apt-get update - name: Install protobuf-compiler - run: sudo apt-get install protobuf-compiler -y - + run: sudo apt update && sudo apt install -y protobuf-compiler + - name: Install setuptools run: | pip3 install setuptools @@ -41,19 +38,19 @@ jobs: - name: Install Wheel run: pip3 install wheel==0.42.0 - - name: Install python sdk + - name: Install python sdk run: pip install sdk/python + - name: Install google-cloud component + run: pip install components/google-cloud + - name: Generate API proto files working-directory: ./api run: make clean python - name: Install kfp-pipeline-spec from source run: | - python3 -m pip install api/v2alpha1/python - - - name: Install google-cloud component - run: pip install components/google-cloud + python3 -m pip install -I api/v2alpha1/python - name: Install Pytest run: pip install $(grep 'pytest==' sdk/python/requirements-dev.txt) diff --git a/.github/workflows/image-builds-main.yml b/.github/workflows/image-builds-main.yml deleted file mode 100644 index c6398957ea7..00000000000 --- a/.github/workflows/image-builds-main.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: Build images from sources and push to master. -run-name: Build images for master -on: - push: - branches: - - master -jobs: - image-builds: - uses: ./.github/workflows/image-builds.yml - with: - src_branch: master - target_tag: master - fail_fast: true - overwrite_imgs: true - set_latest: false - add_sha_tag: 'false' diff --git a/.github/workflows/image-builds-master.yml b/.github/workflows/image-builds-master.yml new file mode 100644 index 00000000000..d0a1bff1a99 --- /dev/null +++ b/.github/workflows/image-builds-master.yml @@ -0,0 +1,61 @@ +name: Build and Push for master +on: + push: + branches: + - master +jobs: + build-images-for-master: + strategy: + fail-fast: true + matrix: + include: + - image: kfp-api-server + dockerfile: backend/Dockerfile + context: . + - image: kfp-frontend + dockerfile: frontend/Dockerfile + context: . + - image: kfp-persistence-agent + dockerfile: backend/Dockerfile.persistenceagent + context: . + - image: kfp-scheduled-workflow-controller + dockerfile: backend/Dockerfile.scheduledworkflow + context: . + - image: kfp-viewer-crd-controller + dockerfile: backend/Dockerfile.viewercontroller + context: . + - image: kfp-visualization-server + dockerfile: backend/Dockerfile.visualization + context: . + - image: kfp-launcher + dockerfile: backend/Dockerfile.launcher + context: . + - image: kfp-driver + dockerfile: backend/Dockerfile.driver + context: . + - image: kfp-cache-deployer + dockerfile: backend/src/cache/deployer/Dockerfile + context: . + - image: kfp-cache-server + dockerfile: backend/Dockerfile.cacheserver + context: . + - image: kfp-metadata-writer + dockerfile: backend/metadata_writer/Dockerfile + context: . + - image: kfp-metadata-envoy + dockerfile: third_party/metadata_envoy/Dockerfile + context: . + - image: kfp-inverse-proxy-agent + dockerfile: proxy/Dockerfile + context: ./proxy + uses: ./.github/workflows/build-and-push.yml + with: + src_branch: master + target_tag: master + overwrite_imgs: true + set_latest: false + add_sha_tag: 'false' + app_to_build: ${{ matrix.image }} + image_context: ${{ matrix.context }} + docker_file: ${{ matrix.dockerfile }} + push: true diff --git a/.github/workflows/image-builds-release.yml b/.github/workflows/image-builds-release.yml new file mode 100644 index 00000000000..776a5975f5f --- /dev/null +++ b/.github/workflows/image-builds-release.yml @@ -0,0 +1,85 @@ +name: Build and Push for Release +on: + workflow_dispatch: + inputs: + src_branch: + type: string + default: 'release-X.Y' + description: 'Source branch to build KFP from' + required: true + target_tag: + type: string + default: 'X.Y.Z' + description: 'Target Image Tag' + required: true + overwrite_imgs: + type: string + default: 'true' + description: 'Overwrite images in GHCR if they already exist for this tag.' + required: true + set_latest: + type: string + default: 'true' + description: 'Set latest tag on build images.' + required: true + add_sha_tag: + type: string + default: 'true' + description: 'Add a sha image tag.' + required: false +jobs: + build-images-for-release: + strategy: + fail-fast: true + matrix: + include: + - image: kfp-api-server + dockerfile: backend/Dockerfile + context: . + - image: kfp-frontend + dockerfile: frontend/Dockerfile + context: . + - image: kfp-persistence-agent + dockerfile: backend/Dockerfile.persistenceagent + context: . + - image: kfp-scheduled-workflow-controller + dockerfile: backend/Dockerfile.scheduledworkflow + context: . + - image: kfp-viewer-crd-controller + dockerfile: backend/Dockerfile.viewercontroller + context: . + - image: kfp-visualization-server + dockerfile: backend/Dockerfile.visualization + context: . + - image: kfp-launcher + dockerfile: backend/Dockerfile.launcher + context: . + - image: kfp-driver + dockerfile: backend/Dockerfile.driver + context: . + - image: kfp-cache-deployer + dockerfile: backend/src/cache/deployer/Dockerfile + context: . + - image: kfp-cache-server + dockerfile: backend/Dockerfile.cacheserver + context: . + - image: kfp-metadata-writer + dockerfile: backend/metadata_writer/Dockerfile + context: . + - image: kfp-metadata-envoy + dockerfile: third_party/metadata_envoy/Dockerfile + context: . + - image: kfp-inverse-proxy-agent + dockerfile: proxy/Dockerfile + context: ./proxy + uses: ./.github/workflows/build-and-push.yml + with: + src_branch: ${{ inputs.src_branch }} + target_tag: ${{ inputs.target_tag }} + overwrite_imgs: ${{ inputs.overwrite_imgs }} + set_latest: ${{ inputs.set_latest }} + add_sha_tag: ${{ inputs.add_sha_tag }} + app_to_build: ${{ matrix.image }} + image_context: ${{ matrix.context }} + docker_file: ${{ matrix.dockerfile }} + push: true diff --git a/.github/workflows/image-builds-with-cache.yml b/.github/workflows/image-builds-with-cache.yml new file mode 100644 index 00000000000..daff5aeb22b --- /dev/null +++ b/.github/workflows/image-builds-with-cache.yml @@ -0,0 +1,116 @@ +name: Build and Cache +on: + workflow_call: + outputs: + IMAGE_PATH: + description: 'A description of my output' + value: ${{ jobs.image-build-with-cache.outputs.image_path }} + IMAGE_TAG: + description: 'A description of my output' + value: ${{ jobs.image-build-with-cache.outputs.image_tag }} + IMAGE_REGISTRY: + description: 'A description of my output' + value: ${{ jobs.image-build-with-cache.outputs.registry }} + +env: + IMAGE_TAG: "latest" + REGISTRY: "kind-registry:5000" + +jobs: + image-build-with-cache: + runs-on: ubuntu-latest + outputs: + image_path: ${{ steps.configure.outputs.artifact_path }} + image_tag: ${{ steps.configure.outputs.image_tag }} + registry: ${{ steps.configure.outputs.registry }} + strategy: + fail-fast: true + matrix: + include: + - image: apiserver + dockerfile: backend/Dockerfile + context: . + - image: persistenceagent + dockerfile: backend/Dockerfile.persistenceagent + context: . + - image: scheduledworkflow + dockerfile: backend/Dockerfile.scheduledworkflow + context: . + - image: launcher + dockerfile: backend/Dockerfile.launcher + context: . + - image: driver + dockerfile: backend/Dockerfile.driver + context: . + + - image: frontend + dockerfile: frontend/Dockerfile + context: . + env: + ARTIFACT_NAME: "${{ matrix.image }}" + ARTIFACTS_PATH: "images_${{ github.sha }}" + steps: + - name: Checkout repository + uses: actions/checkout@v5 + + - name: Create Artifacts Path Directory + id: configure + run: | + mkdir -p ${{ env.ARTIFACTS_PATH }} + echo "artifact_path=${{ env.ARTIFACTS_PATH }}" >> $GITHUB_OUTPUT + echo "image_tag=${{ env.IMAGE_TAG }}" >> $GITHUB_OUTPUT + echo "registry=${{ env.REGISTRY }}" >> $GITHUB_OUTPUT + + # Check if the image tarball already exists or not, if yes, then skip building the image + - name: Attempt to download the artifact + uses: actions/download-artifact@v4 + with: + name: ${{ env.ARTIFACT_NAME }} + path: ${{ env.ARTIFACTS_PATH }} + continue-on-error: true + id: artifact-download + + # If the image tarball was successfully downloaded, then clean it up as the download should + # happen when image tarballs are actually required for deployment (most likely in a new workflow/job) + - name: Delete the artifact downloaded artifact + if: ${{ steps.artifact-download.outcome == 'success' }} + run: | + rm -rf ${{ env.ARTIFACTS_PATH }}/${{ env.ARTIFACT_NAME }} + shell: bash + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + if: ${{ steps.artifact-download.outcome == 'failure' }} + id: setup-buildx + + - name: Build and save Docker image + uses: docker/build-push-action@v5 + if: ${{ steps.setup-buildx.outcome == 'success' }} + id: save-image + with: + context: ${{ matrix.context }} + file: ${{ matrix.dockerfile }} + push: false + tags: ${{ env.REGISTRY }}/${{ matrix.image }}:${{ env.IMAGE_TAG }} + outputs: type=docker,dest=${{ env.ARTIFACTS_PATH }}/${{ env.ARTIFACT_NAME }}.tar + + - name: Rebuild Images in case of failure + uses: docker/build-push-action@v5 + if: ${{ steps.save-image.outcome != 'success' }} + id: rebuild + with: + context: ${{ matrix.context }} + file: ${{ matrix.dockerfile }} + push: false + tags: ${{ env.REGISTRY }}/${{ matrix.image }}:${{ env.IMAGE_TAG }} + outputs: type=docker,dest=${{ env.ARTIFACTS_PATH }}/${{ env.ARTIFACT_NAME }}.tar + + - name: Upload artifact + uses: actions/upload-artifact@v4 + if: ${{ steps.save-image.outcome == 'success' || steps.rebuild.outcome == 'success' }} + with: + name: ${{ env.ARTIFACT_NAME }} + path: ${{ env.ARTIFACTS_PATH }}/${{ env.ARTIFACT_NAME }}.tar + retention-days: 1 + # Continue the workflow even if the upload failed, because upload can fail if other jobs were able to upload artifact first before the current one + continue-on-error: true \ No newline at end of file diff --git a/.github/workflows/image-builds.yml b/.github/workflows/image-builds.yml deleted file mode 100644 index ed0531be143..00000000000 --- a/.github/workflows/image-builds.yml +++ /dev/null @@ -1,198 +0,0 @@ -name: Build images from sources. -run-name: Build images -on: - workflow_call: - inputs: - src_branch: - type: string - default: 'release-X.Y' - description: 'Source branch to build KFP from' - required: true - target_tag: - type: string - default: 'X.Y.Z' - description: 'Target Image Tag' - required: true - fail_fast: - type: string - default: 'true' - description: 'Stop running entire Workflow if a single build fails' - required: true - overwrite_imgs: - type: string - default: 'true' - description: 'Overwrite images in GHCR if they already exist for this tag.' - required: true - set_latest: - type: string - default: 'true' - description: 'Set latest tag on build images.' - required: true - add_sha_tag: - type: string - default: 'true' - description: 'Add a sha image tag.' - required: false - workflow_dispatch: - inputs: - src_branch: - type: string - default: 'release-X.Y' - description: 'Source branch to build KFP from' - required: true - target_tag: - type: string - default: 'X.Y.Z' - description: 'Target Image Tag' - required: true - fail_fast: - type: string - default: 'true' - description: 'Stop running entire Workflow if a single build fails' - required: true - overwrite_imgs: - type: string - default: 'true' - description: 'Overwrite images in GHCR if they already exist for this tag.' - required: true - set_latest: - type: string - default: 'true' - description: 'Set latest tag on build images.' - required: true - add_sha_tag: - type: string - default: 'true' - description: 'Add a sha image tag.' - required: false -env: - SOURCE_BRANCH: ${{ inputs.src_branch }} - TARGET_IMAGE_TAG: ${{ inputs.target_tag }} - OVERWRITE_IMAGES: ${{ inputs.overwrite_imgs }} - IMAGE_REGISTRY: ghcr.io - IMAGE_ORG: ${{ github.repository_owner }} - SET_LATEST: ${{ inputs.set_latest }} - ADD_SHA_TAG: ${{ inputs.add_sha_tag }} -jobs: - build-images-with-tag: - continue-on-error: false - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - attestations: write - id-token: write - strategy: - fail-fast: ${{ inputs.fail_fast == 'true' }} - matrix: - include: - - image: kfp-api-server - dockerfile: backend/Dockerfile - context: . - - image: kfp-frontend - dockerfile: frontend/Dockerfile - context: . - - image: kfp-persistence-agent - dockerfile: backend/Dockerfile.persistenceagent - context: . - - image: kfp-scheduled-workflow-controller - dockerfile: backend/Dockerfile.scheduledworkflow - context: . - - image: kfp-viewer-crd-controller - dockerfile: backend/Dockerfile.viewercontroller - context: . - - image: kfp-visualization-server - dockerfile: backend/Dockerfile.visualization - context: . - - image: kfp-launcher - dockerfile: backend/Dockerfile.launcher - context: . - - image: kfp-driver - dockerfile: backend/Dockerfile.driver - context: . - - image: kfp-cache-deployer - dockerfile: backend/src/cache/deployer/Dockerfile - context: . - - image: kfp-cache-server - dockerfile: backend/Dockerfile.cacheserver - context: . - - image: kfp-metadata-writer - dockerfile: backend/metadata_writer/Dockerfile - context: . - - image: kfp-metadata-envoy - dockerfile: third_party/metadata_envoy/Dockerfile - context: . - - image: kfp-inverse-proxy-agent - dockerfile: proxy/Dockerfile - context: ./proxy - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - ref: ${{env.SOURCE_BRANCH}} - - name: Log in to the Container registry - uses: docker/login-action@v3 - with: - registry: ${{ env.IMAGE_REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Check if image tag already exists - id: check_tag - env: - IMAGE: ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/${{ matrix.image }}:${{env.TARGET_IMAGE_TAG}} - OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - run: | - if docker manifest inspect ${IMAGE} > /dev/null 2>&1; then - echo "Image tag already exists!" - if [ "$OVERWRITE" == "false" ]; then - echo "Overwrite is set to false, exiting." - exit 1 - else - echo "Overwrite is set to true, proceeding with push." - fi - else - echo "No tag conflict, safe to push." - fi - - # This step uses docker/metadata-action to extract tags and labels - # that will be applied to the specified image. The id "meta" allows - # the output of this step to be referenced in a subsequent step. - # The images value provides the base name for the tags and labels. - - name: Extract metadata (tags, labels) for Build - id: meta - uses: docker/metadata-action@v5 - if: steps.check_tag.outcome == 'success' - with: - images: ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/${{ matrix.image }} - tags: | - type=raw,value=${{env.TARGET_IMAGE_TAG}} - type=raw,value=latest,enable=${{ env.SET_LATEST == 'true'}} - type=sha,enable=${{ env.ADD_SHA_TAG == 'true' }} - - # Build the image. If the build succeeds, it pushes the image to GitHub - # Packages. It uses the context parameter to define the build's context - # as the set of files located in the specified path. - - name: Build and push Image - id: push - uses: docker/build-push-action@v6 - if: steps.check_tag.outcome == 'success' - with: - context: ${{ matrix.context }} - file: ${{ matrix.dockerfile }} - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - - # This step generates an artifact attestation for the image, - # which is an unforgeable statement about where and how it was built. - # It increases supply chain security for people who consume the - # image. - # Ref: https://docs.github.com/en/actions/security-for-github-actions/using-artifact-attestations/using-artifact-attestations-to-establish-provenance-for-builds - - name: Generate artifact attestation - uses: actions/attest-build-provenance@v1 - if: steps.check_tag.outcome == 'success' - with: - subject-name: ${{ env.IMAGE_REGISTRY }}/${{ env.IMAGE_ORG }}/${{ matrix.image }} - subject-digest: ${{ steps.push.outputs.digest }} - diff --git a/.github/workflows/integration-tests-v1.yml b/.github/workflows/integration-tests-v1.yml new file mode 100644 index 00000000000..9abe2356b9b --- /dev/null +++ b/.github/workflows/integration-tests-v1.yml @@ -0,0 +1,92 @@ +name: KFP API Integration v1 tests + +on: + push: + branches: [master] + + pull_request: + paths: + - '.github/workflows/api-integration-tests-v1.yml' + - '.github/actions/create-cluster/**' + - '.github/resources/**' + - 'api/**' + - 'go.mod' + - 'go.sum' + - 'backend/**' + - 'manifests/kustomize/**' + - 'test/**' + - '!**/*.md' + - '!**/OWNERS' + +jobs: + build: + uses: ./.github/workflows/image-builds-with-cache.yml + + initialization-integration-tests-v1: + runs-on: ubuntu-latest + needs: build + strategy: + matrix: + k8s_version: [ "v1.29.2", "v1.31.0" ] + name: Initialization & Integration tests v1 - K8s ${{ matrix.k8s_version }} + steps: + - name: Checkout code + uses: actions/checkout@v5 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - name: Create KFP cluster + id: create-kfp-cluster + uses: ./.github/actions/create-cluster + with: + k8s_version: ${{ matrix.k8s_version }} + continue-on-error: true + + - name: Deploy + id: deploy + uses: ./.github/actions/deploy + if: ${{ steps.create-kfp-cluster.outcome == 'success' }} + with: + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + + - name: Forward MySQL port + id: forward-mysql-port + if: ${{ steps.deploy.outcome == 'success' }} + run: ./.github/resources/scripts/forward-port.sh "kubeflow" "mysql" 3306 3306 + continue-on-error: true + + - name: Initialization tests v1 + id: initialization-tests + if: ${{ steps.forward-mysql-port.outcome == 'success' }} + working-directory: ./backend/test/initialization + run: go test -v ./... -namespace=kubeflow -args -runIntegrationTests=true + env: + PULL_NUMBER: ${{ github.event.pull_request.number }} + continue-on-error: true + + - name: API integration tests v1 + id: integration-tests + if: ${{ steps.forward-mysql-port.outcome == 'success' }} + working-directory: ./backend/test/integration + run: go test -v ./... -args -runIntegrationTests=true -namespace=kubeflow + env: + PULL_NUMBER: ${{ github.event.pull_request.number }} + continue-on-error: true + + - name: Collect failed logs + if: ${{ steps.forward-mysql-port.outcome != 'success' || steps.integration-tests.outcome != 'success' || steps.initialization-tests.outcome != 'success' }} + run: | + ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt + exit 1 + + - name: Collect test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: kfp-initialization-tests-v1-artifacts-k8s-${{ matrix.k8s_version }} + path: /tmp/tmp*/* \ No newline at end of file diff --git a/.github/workflows/kfp-kubernetes-execution-tests.yml b/.github/workflows/kfp-kubernetes-execution-tests.yml deleted file mode 100644 index eb12241a1aa..00000000000 --- a/.github/workflows/kfp-kubernetes-execution-tests.yml +++ /dev/null @@ -1,142 +0,0 @@ -name: k8s execution tests - -on: - push: - branches: - - master - - main - - stable - - 'rhoai-*' - - pull_request: - paths: - - '.github/workflows/kfp-kubernetes-execution-tests.yml' - - '.github/resources/**' - - 'sdk/python/**' - - 'api/v2alpha1/**' - - 'kubernetes_platform/**' - - 'backend/**' - - '!**/*.md' - - '!**/OWNERS' - -jobs: - kfp-kubernetes-execution-tests: - runs-on: ubuntu-24.04 - strategy: - matrix: - k8s_version: [ "v1.29.2", "v1.31.0" ] - name: kfp-kubernetes execution tests - K8s ${{ matrix.k8s_version }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.9' - - # This is intended to address disk space issues that have surfaced - # intermittently during CI - - # https://github.com/actions/runner-images/issues/2840#issuecomment-1284059930 - - name: Free up space in /dev/root - run: | - echo "Disk usage before clean up:" - df -h - sudo rm -rf /usr/share/dotnet - sudo rm -rf "$AGENT_TOOLSDIRECTORY" - echo "Disk usage after clean up:" - df -h - - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} - continue-on-error: true - - - name: Forward API port - id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: apt-get update - id: apt-get-update - if: ${{ steps.forward-api-port.outcome == 'success' }} - run: sudo apt-get update - - - name: Install protobuf-compiler - id: install-protobuf-compiler - if: ${{ steps.apt-get-update.outcome == 'success' }} - run: sudo apt-get install protobuf-compiler -y - - - name: Install setuptools - id: install-setuptools - if: ${{ steps.install-protobuf-compiler.outcome == 'success' }} - run: | - pip3 install setuptools - pip3 freeze - - - name: Install Wheel - id: install-wheel - if: ${{ steps.install-setuptools.outcome == 'success' }} - run: pip3 install wheel==0.42.0 - - - name: Install protobuf - id: install-protobuf - if: ${{ steps.install-wheel.outcome == 'success' }} - run: pip3 install protobuf==4.25.3 - - - name: Generate API proto files - id: generate-api-proto-files - if: ${{ steps.install-protobuf.outcome == 'success' }} - working-directory: ./api - run: make clean python - - - name: Install kfp-pipeline-spec from source - id: install-kfp-pipeline-spec - if: ${{ steps.generate-api-proto-files.outcome == 'success' }} - run: | - python3 -m pip install api/v2alpha1/python - - - name: Generate kfp-kubernetes proto files from source - id: generate-kfp-kubernetes-proto-files - if: ${{ steps.install-kfp-pipeline-spec.outcome == 'success' }} - working-directory: ./kubernetes_platform - run: make clean python - - - name: Install kfp-kubernetes from source - id: install-kfp-kubernetes - if: ${{ steps.generate-kfp-kubernetes-proto-files.outcome == 'success' }} - run: | - pip install -e ./kubernetes_platform/python[dev] - - - name: Install requirements - id: install-requirements - if: ${{ steps.install-kfp-kubernetes.outcome == 'success' }} - run: pip install -r ./test/kfp-kubernetes-execution-tests/requirements.txt - - - name: Run tests - id: test - if: ${{ steps.install-requirements.outcome == 'success' }} - env: - PULL_NUMBER: ${{ github.event.pull_request.number }} - REPO_NAME: ${{ github.repository }} - run: | - export KFP_ENDPOINT="http://localhost:8888" - export TIMEOUT_SECONDS=2700 - pytest ./test/kfp-kubernetes-execution-tests/sdk_execution_tests.py --asyncio-task-timeout $TIMEOUT_SECONDS - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.test.outcome != 'success'}} - run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-execution-tests-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* diff --git a/.github/workflows/kfp-kubernetes-library-test.yml b/.github/workflows/kfp-kubernetes-library-test.yml index 8a08ceb4ec9..d181a224b11 100644 --- a/.github/workflows/kfp-kubernetes-library-test.yml +++ b/.github/workflows/kfp-kubernetes-library-test.yml @@ -27,49 +27,20 @@ jobs: ] steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v4 with: python-version: ${{matrix.python.version}} - - name: apt-get update - run: sudo apt-get update + - name: Install protobuf dependencies & kfp-pipeline-spec + id: install-protobuf-deps + uses: ./.github/actions/protobuf - - name: Install protobuf-compiler - run: sudo apt-get install protobuf-compiler -y - - - name: Install setuptools - run: | - pip3 install setuptools - pip3 freeze - - - name: Install Wheel - run: pip3 install wheel==0.42.0 - - - name: Install protobuf - run: pip3 install protobuf==4.25.3 - - - name: Generate API proto files - working-directory: ./api - run: make clean python - - - name: Install kfp-pipeline-spec from source - run: | - python3 -m pip install api/v2alpha1/python - - - name: Install kfp-sdk from source - run: | - python3 -m pip install sdk/python - - - name: Generate kfp-kubernetes proto files from source - working-directory: ./kubernetes_platform - run: make clean python - - - name: Install kfp-kubernetes from source - run: | - pip install -e ./kubernetes_platform/python[dev] + - name: Install kfp & kfp-kubernetes from source + id: install-kfp-k8s-deps + uses: ./.github/actions/kfp-k8s - name: Run tests - run: pytest ./kubernetes_platform/python/test -n auto + run: pytest ./kubernetes_platform/python/test diff --git a/.github/workflows/kfp-samples.yml b/.github/workflows/kfp-samples.yml deleted file mode 100644 index 70cf414f155..00000000000 --- a/.github/workflows/kfp-samples.yml +++ /dev/null @@ -1,119 +0,0 @@ -name: KFP Samples - -on: - push: - branches: - - master - - main - - stable - - 'rhoai-*' - pull_request: - paths: - - '.github/resources/**' - - '.github/workflows/kfp-samples.yml' - - '.github/workflows/kubeflow-pipelines-integration-v2.yml' - - 'backend/**' - - 'samples/**' - - 'samples/core/dataflow/**' - - 'samples/core/parameterized_tfx_oss/**' - - '!**/*.md' - - '!**/OWNERS' - -jobs: - samples: - runs-on: ubuntu-latest - strategy: - matrix: - k8s_version: [ "v1.29.2", "v1.31.0" ] - name: KFP Samples - K8s ${{ matrix.k8s_version }} - - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - - name: Free up space in /dev/root - run: | - echo "Disk usage before clean up:" - df -h - sudo rm -rf /usr/share/dotnet - sudo rm -rf "$AGENT_TOOLSDIRECTORY" - echo "Disk usage after clean up:" - df -h - - - name: apt-get update - run: sudo apt-get update - - - name: Install protobuf-compiler - run: sudo apt-get install protobuf-compiler -y - - - name: Install setuptools - run: | - pip3 install setuptools - pip3 freeze - - - name: Install Wheel - run: pip3 install wheel==0.42.0 - - - name: Install protobuf - run: pip3 install protobuf==4.25.3 - - - name: Generate API proto files - working-directory: ./api - run: make python - - - name: Install kfp-pipeline-spec from source - run: | - python3 -m pip install api/v2alpha1/python - - - name: Generate, Build, and Install Kubernetes API proto files & packages - working-directory: ./kubernetes_platform - run: make python && pip install python/dist/*.whl - - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} - continue-on-error: true - - - name: Build and upload the sample Modelcar image to Kind - id: build-sample-modelcar-image - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: | - docker build -f samples/v2/modelcar/Dockerfile -t registry.domain.local/modelcar:test . - kind --name kfp load docker-image registry.domain.local/modelcar:test - continue-on-error: true - - - name: Forward API port - id: forward-api-port - if: ${{ steps.build-sample-modelcar-image.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: Run Samples Tests - id: tests - if: ${{ steps.forward-api-port.outcome == 'success' }} - env: - PULL_NUMBER: ${{ github.event.pull_request.number }} - REPO_NAME: ${{ github.repository }} - run: | - ./backend/src/v2/test/sample-test.sh - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success' }} - run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-samples-tests-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* diff --git a/.github/workflows/kfp-sdk-client-tests.yml b/.github/workflows/kfp-sdk-client-tests.yml new file mode 100644 index 00000000000..eb376a13820 --- /dev/null +++ b/.github/workflows/kfp-sdk-client-tests.yml @@ -0,0 +1,99 @@ +name: KFP Python Client Tests + +on: + push: + branches: [master] + + pull_request: + paths: + - '.github/workflows/kfp-sdk-client-tests.yml.yml' + - '.github/actions/create-cluster/**' + - '.github/resources/**' + - 'sdk/python/**' + - 'api/v2alpha1/**' + - 'backend/**' + - '!**/*.md' + - '!**/OWNERS' + +jobs: + build: + uses: ./.github/workflows/image-builds-with-cache.yml + + sdk-client-tests: + runs-on: ubuntu-24.04 + needs: build + strategy: + matrix: + k8s_version: [ "v1.29.2", "v1.31.0" ] + name: KFP SDK Client Tests - K8s ${{ matrix.k8s_version }} + steps: + - name: Checkout code + uses: actions/checkout@v5 + + - name: Free up disk space + run: ./.github/resources/scripts/free-disk-space.sh + + # This must occur after "Free up space" step + # otherwise python version will be overridden + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Create KFP cluster + id: create-kfp-cluster + uses: ./.github/actions/create-cluster + with: + k8s_version: ${{ matrix.k8s_version }} + continue-on-error: true + + - name: Deploy + id: deploy + uses: ./.github/actions/deploy + if: ${{ steps.create-kfp-cluster.outcome == 'success' }} + with: + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + + - name: Forward API port + id: forward-api-port + if: ${{ steps.deploy.outcome == 'success' }} + run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 + continue-on-error: true + + - name: Install protobuf dependencies & kfp-pipeline-spec + if: ${{ steps.forward-api-port.outcome == 'success' }} + id: install-protobuf-deps + uses: ./.github/actions/protobuf + + - name: Install Test dependencies + run: | + pip install -r sdk/python/requirements.txt + pip install -r sdk/python/requirements-dev.txt + pip install pytest + pip install pytest-cov + + - name: Run tests + id: tests + if: ${{ steps.forward-api-port.outcome == 'success' }} + env: + apiUrl: "http://localhost:8888" + namespace: "kubeflow" + SETUP_ENV: false + run: | + ./test/presubmit-tests-sdk-client.sh + continue-on-error: true + + - name: Collect failed logs + if: ${{ steps.deploy.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.tests.outcome != 'success'}} + run: | + ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt + exit 1 + + - name: Collect test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: periodic-functional-artifacts-k8s-${{ matrix.k8s_version }} + path: /tmp/tmp*/* \ No newline at end of file diff --git a/.github/workflows/kfp-sdk-runtime-tests.yml b/.github/workflows/kfp-sdk-runtime-tests.yml deleted file mode 100644 index 16a901b6ecd..00000000000 --- a/.github/workflows/kfp-sdk-runtime-tests.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: KFP Runtime Code Tests - -on: - push: - branches: - - master - - main - - stable - - 'rhoai-*' - - pull_request: - paths: - - '.github/workflows/kfp-sdk-runtime-tests.yml' - - 'sdk/python/**' - - 'test/presubmit-test-kfp-runtime-code.sh' - - '!**/*.md' - - '!**/OWNERS' - -jobs: - kfp-runtime-tests: - runs-on: ubuntu-24.04 - strategy: - matrix: - python: ['3.9', '3.13'] - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python }} - - - name: Run KFP Runtime Code Tests - run: | - export PULL_NUMBER="${{ github.event.inputs.pull_number || github.event.pull_request.number }}" - export REPO_NAME="${{ github.repository }}" - ./test/presubmit-test-kfp-runtime-code.sh diff --git a/.github/workflows/kfp-sdk-tests.yml b/.github/workflows/kfp-sdk-tests.yml index 7e7b19b9960..a9cd0f63614 100644 --- a/.github/workflows/kfp-sdk-tests.yml +++ b/.github/workflows/kfp-sdk-tests.yml @@ -2,15 +2,13 @@ name: KFP SDK Tests on: push: - branches: - - master - - main - - stable - - 'rhoai-*' + branches: [master] pull_request: paths: + - 'api/**' - 'sdk/**' - - 'test/presubmit-tests-sdk.sh' + - 'test_data/sdk_compiled_pipelines/**' + - './test/presubmit-tests-sdk.sh' - '.github/workflows/kfp-sdk-tests.yml' - '!**/*.md' - '!**/OWNERS' @@ -24,13 +22,43 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} + - name: Install protobuf dependencies & kfp-pipeline-spec + id: install-protobuf-deps + uses: ./.github/actions/protobuf + + - name: Install kfp & kfp-kubernetes from source + id: install-kfp-k8s-deps + uses: ./.github/actions/kfp-k8s + + - name: Install Test dependencies + run: | + pip install -r sdk/python/requirements-dev.txt + pip install pytest + pip install docker + pip install pytest-cov + pip install google_cloud_pipeline_components + pip install requests==2.28.1 + + - name: Set up Docker for local execution tests + run: | + # Ensure Docker daemon is running and accessible + sudo systemctl start docker + sudo systemctl enable docker + sudo usermod -aG docker $USER + # Wait for Docker to be ready + timeout 30 bash -c 'until docker info > /dev/null 2>&1; do sleep 1; done' + docker info + - name: Run SDK Tests + env: + # We setup the env in the CI + SETUP_ENV: false run: | ./test/presubmit-tests-sdk.sh diff --git a/.github/workflows/kfp-sdk-unit-tests.yml b/.github/workflows/kfp-sdk-unit-tests.yml new file mode 100644 index 00000000000..63793c77a69 --- /dev/null +++ b/.github/workflows/kfp-sdk-unit-tests.yml @@ -0,0 +1,49 @@ +name: KFP SDK Unit Tests + +on: + push: + branches: [master] + pull_request: + paths: + - 'api/**' + - 'sdk/**' + - './test/presubmit-tests-sdk-unit.sh' + - '.github/workflows/kfp-sdk-unit-tests.yml' + - '!**/*.md' + - '!**/OWNERS' + +jobs: + sdk-unit-tests: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.9', '3.13'] + + steps: + - name: Checkout code + uses: actions/checkout@v5 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install protobuf dependencies & kfp-pipeline-spec + id: install-protobuf-deps + uses: ./.github/actions/protobuf + + - name: Install kfp & kfp-kubernetes from source + id: install-kfp-k8s-deps + uses: ./.github/actions/kfp-k8s + + - name: Install Test dependencies + run: | + pip install pytest-cov + pip install -r sdk/python/requirements-dev.txt + + - name: Run SDK Tests + env: + # We setup the env in the CI + SETUP_ENV: false + run: | + ./test/presubmit-tests-sdk-unit.sh diff --git a/.github/workflows/kfp-webhooks.yml b/.github/workflows/kfp-webhooks.yml index 75fef56de67..5255426e081 100644 --- a/.github/workflows/kfp-webhooks.yml +++ b/.github/workflows/kfp-webhooks.yml @@ -6,6 +6,7 @@ on: - master pull_request: paths: + - '.github/actions/create-cluster/**' - '.github/resources/**' - '.github/workflows/kfp-webhooks.yml' - 'backend/**' @@ -13,41 +14,54 @@ on: - '!**/OWNERS' jobs: + build: + uses: ./.github/workflows/image-builds-with-cache.yml + webhook-tests: runs-on: ubuntu-latest + needs: build strategy: matrix: k8s_version: [ "v1.29.2", "v1.31.0" ] name: KFP Webhooks - K8s ${{ matrix.k8s_version }} steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} - pipeline_store: kubernetes - continue-on-error: true - - - name: Run Webhook Integration Tests - id: tests - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: | - make -C backend/test/integration test-webhook - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.tests.outcome != 'success' }} - run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: kfp-samples-tests-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* + - name: Checkout code + uses: actions/checkout@v5 + + - name: Create KFP cluster + id: create-kfp-cluster + uses: ./.github/actions/create-cluster + with: + k8s_version: ${{ matrix.k8s_version }} + continue-on-error: true + + - name: Deploy + id: deploy + uses: ./.github/actions/deploy + if: ${{ steps.create-kfp-cluster.outcome == 'success' }} + with: + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + pipeline_store: kubernetes + + - name: Run Webhook Integration Tests + id: tests + if: ${{ steps.deploy.outcome == 'success' }} + run: | + make -C backend/test/integration test-webhook + continue-on-error: true + + - name: Collect failed logs + if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.deploy.outcome != 'success' || steps.tests.outcome != 'success' }} + run: | + ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt + exit 1 + + - name: Collect test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: kfp-samples-tests-artifacts-k8s-${{ matrix.k8s_version }} + path: /tmp/tmp*/* \ No newline at end of file diff --git a/.github/workflows/kubeflow-pipelines-manifests.yml b/.github/workflows/kubeflow-pipelines-manifests.yml index 52af8e27bba..8c98a3efc86 100644 --- a/.github/workflows/kubeflow-pipelines-manifests.yml +++ b/.github/workflows/kubeflow-pipelines-manifests.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Run tests run: ./manifests/kustomize/hack/presubmit.sh diff --git a/.github/workflows/legacy-v2-api-integration-tests.yml b/.github/workflows/legacy-v2-api-integration-tests.yml new file mode 100644 index 00000000000..bf1c1d5b173 --- /dev/null +++ b/.github/workflows/legacy-v2-api-integration-tests.yml @@ -0,0 +1,89 @@ +name: KFP API Integration V2 (Legacy) + +on: + push: + branches: [master] + + pull_request: + paths: + - '.github/workflows/legacy-v2-api-integration-tests.yml' + - '.github/actions/create-cluster/**' + - '.github/resources/**' + - 'api/**' + - 'go.mod' + - 'go.sum' + - 'backend/**' + - 'manifests/kustomize/**' + - 'test/**' + - '!**/*.md' + - '!**/OWNERS' + +jobs: + build: + uses: ./.github/workflows/image-builds-with-cache.yml + + api-integration-tests-v2: + runs-on: ubuntu-latest + needs: build + strategy: + matrix: + pipeline_store: [ "database", "kubernetes" ] + k8s_version: [ "v1.29.2", "v1.31.0" ] + name: API integration tests v2 - K8s with ${{ matrix.pipeline_store }} ${{ matrix.k8s_version }} + steps: + - name: Checkout code + uses: actions/checkout@v5 + + - name: Free up disk space + run: ./.github/resources/scripts/free-disk-space.sh + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - name: Create KFP cluster + id: create-kfp-cluster + uses: ./.github/actions/create-cluster + with: + k8s_version: ${{ matrix.k8s_version }} + continue-on-error: true + + - name: Deploy + id: deploy + uses: ./.github/actions/deploy + if: ${{ steps.create-kfp-cluster.outcome == 'success' }} + with: + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + pipeline_store: ${{ matrix.pipeline_store }} + + - name: Forward MLMD port + id: forward-mlmd-port + if: ${{ steps.deploy.outcome == 'success' }} + run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 & + continue-on-error: true + + - name: API integration tests v2 + id: tests + if: ${{ steps.forward-mlmd-port.outcome == 'success' }} + working-directory: ./backend/test/v2/integration + run: go test -v ./... -args -runIntegrationTests=true -namespace=kubeflow + env: + PULL_NUMBER: ${{ github.event.pull_request.number }} + PIPELINE_STORE: ${{ matrix.pipeline_store }} + continue-on-error: true + + - name: Collect failed logs + if: ${{ steps.deploy.outcome != 'success' || steps.forward-mlmd-port.outcome != 'success' || steps.tests.outcome != 'success' }} + run: | + ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt + exit 1 + + - name: Collect test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: kfp-api-integration-tests-v2-artifacts-k8s-${{ matrix.k8s_version }}-${{ matrix.pipeline_store }} + path: /tmp/tmp*/* \ No newline at end of file diff --git a/.github/workflows/periodic.yml b/.github/workflows/periodic.yml deleted file mode 100644 index 9e18e324b7a..00000000000 --- a/.github/workflows/periodic.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: Periodic Functional Tests - -on: - schedule: - - cron: '0 0 * * *' # Run every day at midnight - -jobs: - run_tests: - runs-on: ubuntu-latest - strategy: - matrix: - k8s_version: [ "v1.29.2", "v1.31.0" ] - name: Periodic Functional Tests - K8s ${{ matrix.k8s_version }} - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Create KFP cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} - - - name: Port forward kfp apiserver - run: | - nohup kubectl port-forward --namespace kubeflow svc/ml-pipeline 8888:8888 & - - - name: Run Functional Tests - id: tests - run: | - log_dir=$(mktemp -d) - ./test/kfp-functional-test/kfp-functional-test.sh > $log_dir/periodic_tests.txt - continue-on-error: true - - - name: Collect failed logs - if: steps.tests.outcome != 'success' - run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: periodic-functional-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* diff --git a/.github/workflows/pr-commands.yml b/.github/workflows/pr-commands.yml index bcc85d1d0e7..a708470a067 100644 --- a/.github/workflows/pr-commands.yml +++ b/.github/workflows/pr-commands.yml @@ -14,7 +14,7 @@ jobs: PR_SHA: ${{ steps.fetch-pr-sha.outputs.PR_SHA }} steps: - name: Checkout Main Branch - uses: actions/checkout@v3 + uses: actions/checkout@v5 with: ref: ${{ env.DEFAULT_BRANCH }} - name: Check if the author is a member or Owner @@ -63,7 +63,7 @@ jobs: if: contains(github.event.comment.body, '/ok-to-test') steps: - name: Checkout Main Branch - uses: actions/checkout@v3 + uses: actions/checkout@v5 with: ref: ${{ env.DEFAULT_BRANCH }} - name: Approve Runs diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 437371f3f32..ef253381669 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -1,21 +1,37 @@ name: pre-commit -on: - push: - branches: - - master - pull_request: - branches: - - master +#on: +# push: +# branches: +# - master +# pull_request: +# branches: +# - master + +# This workflow is disabled in downstream because we require linting upstream. +on: [] jobs: pre-commit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 - - uses: pre-commit/action@v3.0.1 - # This is set to only run the golangci-lint pre-commit hooks - # Remove in a later PR to run all hooks - with: - extra_args: golangci-lint --all-files \ No newline at end of file + - uses: actions/checkout@v5 + with: + fetch-depth: 0 + - name: Fetch base branch + run: git fetch origin ${{ github.base_ref || github.ref_name }} + - uses: actions/setup-python@v3 + - uses: actions/setup-go@v5 + with: + go-version-file: go.mod + - name: golangci-lint + uses: golangci/golangci-lint-action@v8 + with: + version: v2.3 + args: --new --new-from-merge-base=origin/${{ github.base_ref || github.ref_name }} + #- uses: pre-commit/action@v3.0.1 + # # This is set to only run the golangci-lint pre-commit hooks + # # Remove in a later PR to run all hooks + # with: + # go-version: '>=1.24.2' + # extra_args: golangci-lint --all-files diff --git a/.github/workflows/presubmit-backend.yml b/.github/workflows/presubmit-backend.yml index 0bac2f68e9e..71bfa930720 100644 --- a/.github/workflows/presubmit-backend.yml +++ b/.github/workflows/presubmit-backend.yml @@ -25,7 +25,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v5 - name: Set up Go uses: actions/setup-go@v5 @@ -33,4 +33,7 @@ jobs: go-version-file: go.mod - name: Run Backend Tests - run: ./test/presubmit-backend-test.sh + run: | + export GIT_BRANCH=${{ github.head_ref || github.ref_name }} + export GIT_REPO=${{ github.event.pull_request.head.repo.full_name }} + ./test/presubmit-backend-test.sh diff --git a/.github/workflows/readthedocs-builds.yml b/.github/workflows/readthedocs-builds.yml new file mode 100644 index 00000000000..ca1238a88fb --- /dev/null +++ b/.github/workflows/readthedocs-builds.yml @@ -0,0 +1,50 @@ +name: KFP Readthedocs Release Readiness + +on: + push: + branches: [ master ] + pull_request: + paths: + - api/** + - sdk/** + - kubernetes_platform/** + - .github/workflows/readthedocs-builds.yml + - .readthedocs.yml + +jobs: + test-readthedocs-builds: + runs-on: ubuntu-24.04 + steps: + - name: Checkout code + uses: actions/checkout@v5 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Install protobuf dependencies & kfp-pipeline-spec + id: install-protobuf-deps + uses: ./.github/actions/protobuf + with: + generate_golang_proto: "false" + + - name: Install Dependencies + run: | + pip install -r docs/sdk/requirements.txt + + - name: Build KFP SDK Docs + working-directory: docs/sdk + run: | + sphinx-build -b html . _build/html + + - name: Build KFP Kubernetes SDK Docs + working-directory: kubernetes_platform/python/docs + run: | + sphinx-build -b html . _build/html + + - name: Test K8s platform release script + working-directory: kubernetes_platform/python + run: | + KFP_KUBERNETES_VERSION=$(python -c 'from kfp.kubernetes.__init__ import __version__; print(__version__)') + source create_release_branch.sh diff --git a/.github/workflows/sdk-component-yaml.yml b/.github/workflows/sdk-component-yaml.yml index e082d795098..8546146f3df 100644 --- a/.github/workflows/sdk-component-yaml.yml +++ b/.github/workflows/sdk-component-yaml.yml @@ -23,40 +23,15 @@ jobs: runs-on: ubuntu-24.04 steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v4 with: python-version: 3.9 - - name: apt-get update - run: sudo apt-get update - - - name: Install protobuf-compiler - run: sudo apt-get install protobuf-compiler -y - - - name: Install setuptools - run: | - pip3 install setuptools - pip3 freeze - - - name: Install Wheel - run: pip3 install wheel==0.42.0 - - - name: Install protobuf - run: pip3 install protobuf==4.25.3 - - - name: Generate API proto files - working-directory: ./api - run: make clean python - - - name: Install kfp-pipeline-spec from source - run: | - python3 -m pip install api/v2alpha1/python - - - name: Install requirements - run: pip install -r ./test/sdk-execution-tests/requirements.txt + - name: Install protobuf dependencies + uses: ./.github/actions/protobuf - name: Run component YAML tests run: ./test/presubmit-component-yaml.sh diff --git a/.github/workflows/sdk-docformatter.yml b/.github/workflows/sdk-docformatter.yml index 48c37daaaac..8c3907c6c60 100644 --- a/.github/workflows/sdk-docformatter.yml +++ b/.github/workflows/sdk-docformatter.yml @@ -21,7 +21,7 @@ jobs: runs-on: ubuntu-24.04 steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v4 diff --git a/.github/workflows/sdk-execution.yml b/.github/workflows/sdk-execution.yml deleted file mode 100644 index 92a68d669e0..00000000000 --- a/.github/workflows/sdk-execution.yml +++ /dev/null @@ -1,142 +0,0 @@ -name: KFP SDK execution tests - -on: - push: - branches: - - master - - main - - stable - - 'rhoai-*' - - pull_request: - paths: - - '.github/workflows/sdk-execution.yml' - - '.github/resources/**' - - 'sdk/python/**' - - 'api/v2alpha1/**' - - 'backend/**' - - '!**/*.md' - - '!**/OWNERS' - -jobs: - sdk-execution-tests: - runs-on: ubuntu-24.04 - strategy: - matrix: - k8s_version: [ "v1.29.2", "v1.31.0" ] - name: KFP SDK Execution Tests - K8s ${{ matrix.k8s_version }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - - # This is intended to address disk space issues that have surfaced - # intermittently during CI - - # https://github.com/actions/runner-images/issues/2840#issuecomment-1284059930 - - name: Free up space in /dev/root - run: | - echo "Disk usage before clean up:" - df -h - sudo rm -rf /usr/share/dotnet - sudo rm -rf "$AGENT_TOOLSDIRECTORY" - echo "Disk usage after clean up:" - df -h - - # This must occur after "Free up space" step - # otherwise python version will be overridden - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.11' - - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster - with: - k8s_version: ${{ matrix.k8s_version }} - continue-on-error: true - - - name: Forward API port - id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 - continue-on-error: true - - - name: Forward MLMD port - id: forward-mlmd-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} - run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 & - continue-on-error: true - - - name: Forward Minio port - id: forward-minio-port - if: ${{ steps.forward-mlmd-port.outcome == 'success' }} - run: kubectl -n kubeflow port-forward service/minio-service 9000:9000 & - continue-on-error: true - - - name: apt-get update - id: apt-get-update - if: ${{ steps.forward-minio-port.outcome == 'success' }} - run: sudo apt-get update - - - name: Install protobuf-compiler - id: install-protobuf-compiler - if: ${{ steps.apt-get-update.outcome == 'success' }} - run: sudo apt-get install protobuf-compiler -y - - - name: Install setuptools - id: install-setuptools - if: ${{ steps.install-protobuf-compiler.outcome == 'success' }} - run: | - pip3 install setuptools - pip3 freeze - - - name: Install Wheel - id: install-wheel - if: ${{ steps.install-setuptools.outcome == 'success' }} - run: pip3 install wheel==0.42.0 - - - name: Install protobuf - id: install-protobuf - if: ${{ steps.install-wheel.outcome == 'success' }} - run: pip3 install protobuf==4.25.3 - - - name: Generate API proto files - id: generate-api-proto-files - if: ${{ steps.install-protobuf.outcome == 'success' }} - working-directory: ./api - run: make clean python - - - name: Install kfp-pipeline-spec from source - id: install-kfp-pipeline-spec - if: ${{ steps.generate-api-proto-files.outcome == 'success' }} - run: | - python3 -m pip install api/v2alpha1/python - - - name: Install requirements - id: install-requirements - if: ${{ steps.install-kfp-pipeline-spec.outcome == 'success' }} - run: pip install -r ./test/sdk-execution-tests/requirements.txt - - - name: Run tests - id: tests - env: - PULL_NUMBER: ${{ github.event.pull_request.number }} - REPO_NAME: ${{ github.repository }} - run: | - export KFP_ENDPOINT="http://localhost:8888" - export TIMEOUT_SECONDS=2700 - pytest -v -n 5 ./test/sdk-execution-tests/sdk_execution_tests.py - continue-on-error: true - - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.forward-mlmd-port.outcome != 'success' || steps.forward-minio-port.outcome != 'success' || steps.tests.outcome != 'success'}} - run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 - - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: periodic-functional-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* diff --git a/.github/workflows/sdk-isort.yml b/.github/workflows/sdk-isort.yml index 0a60c48e896..69146f3c2f8 100644 --- a/.github/workflows/sdk-isort.yml +++ b/.github/workflows/sdk-isort.yml @@ -10,6 +10,7 @@ on: pull_request: paths: + - 'api/**' - 'sdk/python/**' - 'test/presubmit-isort-sdk.sh' - '.github/workflows/sdk-isort.yml' @@ -21,7 +22,7 @@ jobs: runs-on: ubuntu-24.04 steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v4 diff --git a/.github/workflows/sdk-upgrade.yml b/.github/workflows/sdk-upgrade.yml index bd6e80ffe28..4bda2afe559 100644 --- a/.github/workflows/sdk-upgrade.yml +++ b/.github/workflows/sdk-upgrade.yml @@ -1,32 +1,40 @@ name: KFP SDK Upgrade Test -on: - push: - branches: - - master - - main - - stable - - 'rhoai-*' +# This workflow is disabled in downstream because the SDK is always behind upstream, so it does not sufficiently test +# upgrades and will fail. +#on: +# push: +# branches: +# - master +# - main +# - stable +# - 'rhoai-*' - pull_request: - paths: - - 'sdk/python/**' - - 'test/presubmit-test-sdk-upgrade.sh' - - '.github/workflows/sdk-upgrade.yml' - - '!**/*.md' - - '!**/OWNERS' +# pull_request: +# paths: +# - 'sdk/python/**' +# - 'test/presubmit-test-sdk-upgrade.sh' +# - '.github/workflows/sdk-upgrade.yml' +# - '!**/*.md' +# - '!**/OWNERS' jobs: test-upgrade-kfp-sdk: runs-on: ubuntu-24.04 steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v4 with: python-version: 3.9 + - name: Install protobuf dependencies & kfp-pipeline-spec + id: install-protobuf-deps + uses: ./.github/actions/protobuf + with: + generate_golang_proto: "false" + - name: Run SDK upgrade tests run: ./test/presubmit-test-sdk-upgrade.sh diff --git a/.github/workflows/sdk-yapf.yml b/.github/workflows/sdk-yapf.yml index a8891f6e6d1..c29e7769d55 100644 --- a/.github/workflows/sdk-yapf.yml +++ b/.github/workflows/sdk-yapf.yml @@ -8,6 +8,7 @@ on: - stable - 'rhoai-*' paths: + - 'api/**' - 'sdk/python/**' - 'test/presubmit-yapf-sdk.sh' - '.github/workflows/sdk-yapf.yml' @@ -30,7 +31,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v4 diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml new file mode 100644 index 00000000000..3489115fc83 --- /dev/null +++ b/.github/workflows/trivy.yml @@ -0,0 +1,28 @@ +name: Trivy Vulnerability Scan +on: + push: + branches: + - master + pull_request: +jobs: + build: + name: Build + runs-on: ubuntu-24.04 + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Run Trivy vulnerability scanner in repo mode + uses: aquasecurity/trivy-action@0.28.0 + with: + scan-type: 'fs' + ignore-unfixed: true + format: 'sarif' + output: 'trivy-results.sarif' + severity: 'CRITICAL,HIGH' + skip-dirs: 'components' + + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: 'trivy-results.sarif' diff --git a/.github/workflows/upgrade-test.yml b/.github/workflows/upgrade-test.yml index ddbd6c45de6..a2a7b7bb41e 100644 --- a/.github/workflows/upgrade-test.yml +++ b/.github/workflows/upgrade-test.yml @@ -1,5 +1,13 @@ name: KFP upgrade tests +env: + TESTS_DIR: "./backend/test/v2/api" + TESTS_LABEL: "ApiServerTests" + NUMBER_OF_PARALLEL_NODES: 15 + CLUSTER_NAME: "kfp" + NAMESPACE: "kubeflow" + PYTHON_VERSION: "3.9" + on: push: branches: @@ -11,79 +19,99 @@ on: pull_request: paths: - '.github/workflows/upgrade-test.yml' + - '.github/actions/create-cluster/**' - '.github/resources/**' - - 'backend/**' + - 'backend/api/v2beta1/**' + - 'backend/src/**' + - 'backend/metadata_writer/**' + - 'backend/test/v2/api/**' - 'manifests/kustomize/**' - '!**/*.md' - '!**/OWNERS' jobs: + build: + uses: ./.github/workflows/image-builds-with-cache.yml + upgrade-test: + needs: build runs-on: ubuntu-latest strategy: matrix: - k8s_version: [ "v1.29.2", "v1.31.0" ] + k8s_version: [ "v1.31.0" ] name: KFP upgrade tests - K8s ${{ matrix.k8s_version }} steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 + + - name: Free up disk space + run: ./.github/resources/scripts/free-disk-space.sh - name: Set up Python uses: actions/setup-python@v4 with: python-version: 3.9 - - name: Create KFP cluster - id: create-kfp-cluster - uses: ./.github/actions/kfp-cluster + - name: Get last release tag + shell: bash + id: get-tag + run: | + lastRelease=$(curl -sSL -H "Accept: application/vnd.github+json" "https://api.github.com/repos/kubeflow/pipelines/releases/latest" | jq -r .tag_name) + echo "Fetched last release tag: $lastRelease" + echo "lastRelease=$lastRelease" >> $GITHUB_OUTPUT + + - name: Create cluster + uses: ./.github/actions/create-cluster + id: create-cluster with: k8s_version: ${{ matrix.k8s_version }} - continue-on-error: true + + - name: Deploy Last Release + shell: bash + id: deploy-release + run: | + kubectl apply -k https://github.com/kubeflow/pipelines/manifests/kustomize/cluster-scoped-resources?ref=${{ steps.get-tag.outputs.lastRelease }} + kubectl apply -k https://github.com/kubeflow/pipelines/manifests/kustomize/env/platform-agnostic?ref=${{ steps.get-tag.outputs.lastRelease }} + source "./.github/resources/scripts/helper-functions.sh" + wait_for_pods || EXIT_CODE=$? + if [[ $EXIT_CODE -ne 0 ]]; then + echo "Deploy unsuccessful. Not all pods running." + exit 1 + fi - name: Forward API port id: forward-api-port - if: ${{ steps.create-kfp-cluster.outcome == 'success' }} + if: ${{ steps.deploy-release.outcome == 'success' }} + shell: bash run: ./.github/resources/scripts/forward-port.sh "kubeflow" "ml-pipeline" 8888 8888 continue-on-error: true - - name: Prepare upgrade tests - id: upgrade-tests + - name: Prepare for Upgrade + id: prepare-upgrade if: ${{ steps.forward-api-port.outcome == 'success' }} - working-directory: backend/test/integration - run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Prepare - continue-on-error: true - - - name: Prepare verification tests - id: verification-tests - if: ${{ steps.forward-api-port.outcome == 'success' }} - working-directory: backend/test/integration - run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Verify - continue-on-error: true - - - name: Prepare upgrade tests v2 - id: upgrade-tests-v2 - if: ${{ steps.forward-api-port.outcome == 'success' }} - working-directory: backend/test/v2/integration/ - run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Prepare - continue-on-error: true - - - name: Prepare verification tests v2 - id: verification-tests-v2 - if: ${{ steps.forward-api-port.outcome == 'success' }} - working-directory: backend/test/v2/integration - run: go test -v ./... -namespace kubeflow -args -runUpgradeTests=true -testify.m=Verify + working-directory: ${{ env.TESTS_DIR }} + run: | + go run github.com/onsi/ginkgo/v2/ginkgo -r -v --cover -p --keep-going --github-output=true --nodes=${{ env.NUMBER_OF_PARALLEL_NODES }} -v --label-filter="UpgradePreparation" continue-on-error: true - - name: Collect failed logs - if: ${{ steps.create-kfp-cluster.outcome != 'success' || steps.forward-api-port.outcome != 'success' || steps.upgrade-tests.outcome != 'success' || steps.upgrade-tests-v2.outcome != 'success' || steps.verification-tests.outcome != 'success' || steps.verification-tests-v2.outcome != 'success' }} - run: | - ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt - exit 1 + - name: Deploy from Branch + uses: ./.github/actions/deploy + if: ${{ steps.create-cluster.outcome == 'success' }} + id: deploy + with: + image_path: ${{ needs.build.outputs.IMAGE_PATH }} + image_tag: ${{ needs.build.outputs.IMAGE_TAG }} + image_registry: ${{ needs.build.outputs.IMAGE_REGISTRY }} + forward_port: 'false' - - name: Collect test results - if: always() - uses: actions/upload-artifact@v4 + - name: Verify Upgrade + uses: ./.github/actions/test-and-report + if: ${{ steps.configure.outcome == 'success' }} with: - name: periodic-functional-artifacts-k8s-${{ matrix.k8s_version }} - path: /tmp/tmp*/* + test_directory: ${{ env.TESTS_DIR }} + test_label: "UpgradeVerification" + num_parallel_nodes: ${{ env.NUMBER_OF_PARALLEL_NODES }} + default_namespace: ${{ env.NAMESPACE }} + python_version: ${{ env.PYTHON_VERSION }} + report_name: "Upgrade Verification" \ No newline at end of file diff --git a/.github/workflows/validate-generated-files.yml b/.github/workflows/validate-generated-files.yml index 7e413fde46f..5f72994518a 100644 --- a/.github/workflows/validate-generated-files.yml +++ b/.github/workflows/validate-generated-files.yml @@ -30,7 +30,7 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Install Go uses: actions/setup-go@v5 @@ -41,23 +41,62 @@ jobs: uses: actions/setup-python@v4 with: python-version: 3.9 - - - name: Install Dependencies - run: | - sudo apt-get update && sudo apt-get install -y protobuf-compiler jq default-jdk - python3 -m pip install setuptools wheel - - - name: Generate API proto files - working-directory: ./api - run: make clean all - - - name: Generate kfp-kubernetes proto files from source - working-directory: ./kubernetes_platform - run: make clean all + + - name: Install protobuf dependencies & kfp-pipeline-spec + id: install-protobuf-deps + uses: ./.github/actions/protobuf + with: + generate_golang_proto: "true" + + - name: Install kfp & kfp-kubernetes from source + id: install-kfp-k8s-deps + uses: ./.github/actions/kfp-k8s + with: + generate_golang_proto: "true" - name: Generate K8s Native API CRDs working-directory: ./backend/src/crd/kubernetes run: make generate manifests + - name: Generate backend proto code v2beta1 + working-directory: ./backend/api + env: + API_VERSION: v2beta1 + run: make generate + + - name: Generate backend proto code v1beta1 + working-directory: ./backend/api + env: + API_VERSION: v1beta1 + run: make generate + + - name: Generate backend proto code v2beta1 + working-directory: ./backend/api + env: + API_VERSION: v2beta1 + run: make generate-kfp-server-api-package + + - name: Generate backend proto code v1beta1 + working-directory: ./backend/api + env: + API_VERSION: v1beta1 + run: make generate-kfp-server-api-package + - name: Check for Changes run: make check-diff + + validate-backwards-compabitiblity: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + + - name: Install Go + uses: actions/setup-go@v5 + with: + go-version-file: go.mod + + - name: Validate Go proto code backwards compatibility + working-directory: ./backend/test/proto_tests + env: + UPDATE_EXPORTED: false + run: go test . diff --git a/.gitignore b/.gitignore index e1169cb9cf8..823dee22a9f 100644 --- a/.gitignore +++ b/.gitignore @@ -68,6 +68,7 @@ _build # virtualenv .venv/ +venv/ # python sdk package *.tar.gz @@ -93,3 +94,13 @@ kubeconfig_dev-pipelines-api backend/Dockerfile.driver-debug backend/src/crd/kubernetes/bin + +**/allure-* +**/*.html +reports/ +logs/ +*.out +*test_*output* + +# Project-local tools +bin/ diff --git a/.golangci.yaml b/.golangci.yaml index 47a08cbdca0..b00124ac151 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -2,22 +2,25 @@ version: "2" run: timeout: 30m - skip-files: - - "api\\*.go$" - - "backend\\api\\*.go" issues: max-same-issues: 0 linters: - disable-all: true - enable: # please keep this alphabetized + default: none + enable: - gocritic - govet - ineffassign - misspell - staticcheck - unused + + exclusions: + paths: + - "api/*.go" + - "backend/api/*.go" + settings: misspell: locale: US @@ -26,7 +29,6 @@ linters: - "all" formatters: - disable-all: true enable: - gofmt - goimports \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 53d057ead2c..a187625d5a7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,9 +18,9 @@ repos: - repo: https://github.com/PyCQA/flake8 rev: 7.1.1 hooks: - - id: flake8 + - id: flake8 args: - - --select=W605 + - --select=W605 # required formatting jobs (run these last) # add comment "noqa" to ignore an import that should not be removed diff --git a/.readthedocs.yml b/.readthedocs.yml index a1d632dcc99..ecf7e0ada0d 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,10 +1,10 @@ # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details version: 2 sphinx: - configuration: docs/conf.py + configuration: docs/sdk/conf.py python: install: - - requirements: docs/requirements.txt + - requirements: docs/sdk/requirements.txt build: os: ubuntu-22.04 tools: diff --git a/ADOPTERS.md b/ADOPTERS.md index d0e02c78a2a..835c4900fcc 100644 --- a/ADOPTERS.md +++ b/ADOPTERS.md @@ -4,7 +4,10 @@ Below are the adopters of project Kubeflow Pipelines. If you are using Kubeflow please add yourself into the following list by a pull request. Please keep the list in alphabetical order. -| Organization | Contact | Description of Use | -|------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------|--------------------------------------------------------| -| [IBM Research Foundation Model Data Engineering Team](https://www.research.ibm.com/) | [@yuanchi2807](https://github.com/yuanchi2807), [@roytman](https://github.com/roytman) | Foundation Model Data Engineering | -| [Red Hat](https://www.redhat.com/) | [@franciscojavierarceo](https://github.com/franciscojavierarceo) | ML/AI & Data orchestration | +| Organization | Contact | Description of Use | +|--------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------|-----------------------------------| +| [Capital One](https://www.capitalone.com/) | [@droctothorpe](https://github.com/droctothorpe) | ML/AI Workflow orchestration | +| [IBM Research Foundation Model Data Engineering Team](https://www.research.ibm.com/) | [@yuanchi2807](https://github.com/yuanchi2807), [@roytman](https://github.com/roytman) | Foundation Model Data Engineering | +| [Red Hat](https://www.redhat.com/) | [@franciscojavierarceo](https://github.com/franciscojavierarceo) | ML/AI & Data orchestration | +| [Sophotech](https://sopho.tech/) | [@archy-rock3t-cloud](https://github.com/archy-rock3t-cloud) | ML/AI & Workflow orchestration | + diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000000..263631907ee --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,575 @@ +# Agent Guide: Kubeflow Pipelines (KFP) Monorepo + +## Purpose + +- **Who this is for**: AI agents and developers working inside this repo. +- **What you get**: The minimum set of facts, files, and commands to navigate, modify, and run KFP locally. + +### Document metadata + +- Last updated: 2025-09-24 +- Scope: KFP master branch (v2 engine), backend (Go), SDK (Python), frontend (React 16) + +### Maintenance (agents and contributors) + +- If you change commands, file paths, Make targets, environment variables, or workflows in this repo, update this guide in the relevant sections (Local development, Local testing, Local execution, Regenerate protobufs, Frontend development, CI/CD). +- When you add or change generated files, update the "🚫 NEVER EDIT DIRECTLY (Generated files)" section with sources and regeneration commands. +- When you change CI matrices (Kubernetes versions, pipeline stores, proxy/cache toggles, Argo versions) or add/remove workflows, update the CI/CD section. +- If you come across new common errors or fixes, extend "Common error patterns and quick fixes". +- Always bump the "Last updated" date above when you make substantive changes. + +## Baseline architecture + +- Start with inspecting the architectural diagram found here `images/kfp-cluster-wide-architecture.drawio.xml` (rendered format can be found here: `images/kfp-cluster-wide-architecture.png`). + +## End-to-end flow (SDK → API Server → Driver → Launcher → Executor → completion) + +- **SDK**: + - Compiles Python DSL to the pipeline spec (IR YAML). See `sdk/python/kfp/compiler/pipeline_spec_builder.py`. + - The pipeline spec schema is defined via Protobufs under `api/`. + - Can execute pipelines locally via Subprocess or Docker runner modes. +- **API Server**: + - On run creation, compiles the pipeline spec to Argo Workflows `Workflow` objects. + - Uploads and runs pipelines remotely on a Kubernetes cluster. +- **Driver**: + - Resolves input parameters. + - Computes the pod spec patch based on component resource requests/limits. + - All other Kubernetes configuration originates from the platform spec implemented by `kubernetes_platform`. +- **Launcher**: + - Not used by Subprocess/Docker runners. + - Downloads input artifacts, uploads outputs, invokes the Python executor, handles executor results. +- **Python executor**: + - Entrypoint: `sdk/python/kfp/dsl/executor_main.py`. + - Never involved during the pipeline compilation stage. + - During task runtime, `kfp` is installed with `--no-deps` and `_KFP_RUNTIME=true` disables most SDK imports. + - API Server mode: the Go launcher (copied via `init` container) executes the executor inside the user container + defined by the component `base_image` (there is a default). + - Subprocess/Docker runners: the launcher is skipped; executor runs directly. + +## Packages and naming + +- All Python packages are installed under the `kfp` namespace. +- KFP Python packages: + - **kfp**: Primary SDK (DSL, client, local execution). + - **kfp-pipeline-spec**: Protobuf-defined API contract used by SDK and backend. + - **kfp-kubernetes**: Kubernetes Python extension layer for `kfp` located at `kubernetes_platform/python` for + Kubernetes-specific settings and platform spec. +- The `kfp-kubernetes` package imports generated Python code from `kfp-pipeline-spec` and renames imports via + `kubernetes_platform/python/generate_proto.py` to resolve inconsistencies. + +## Local development setup + +- Always use a `.venv` virtual environment. + +```bash +python3 -m venv .venv +source .venv/bin/activate +python -m pip install -U pip setuptools wheel + +make -C api python-dev +make -C kubernetes_platform python-dev + +pip install -e api/v2alpha1/python --config-settings editable_mode=strict +pip install -e sdk/python --config-settings editable_mode=strict +pip install -e kubernetes_platform/python --config-settings editable_mode=strict +``` + +### Required CLI tools + +- Ginkgo CLI for running Go-based test suites. + +Install locally into `./bin`: + +```bash +make ginkgo +export PATH="$PWD/bin:$PATH" # ensure the ginkgo binary is on PATH +``` + +Or install directly with `go install` into a project-local `./bin`: + +```bash +GOBIN=$PWD/bin go install github.com/onsi/ginkgo/v2/ginkgo@latest +export PATH="$PWD/bin:$PATH" +``` + +## Local cluster deployment + +KFP provides Make targets for setting up local Kind clusters for development and testing: + +### Standalone mode deployment + +For deploying the latest master branch in standalone mode (single-user, no authentication): + +```bash +make -C backend kind-cluster-agnostic +``` + +This target: + +- Creates a Kind cluster named `dev-pipelines-api` +- Deploys KFP in standalone mode using `manifests/kustomize/env/platform-agnostic` +- Sets up MySQL database and metadata services +- Switches kubectl context to the `kubeflow` namespace + +### Development mode deployment + +For local API server development with additional debugging capabilities: + +```bash +make -C backend dev-kind-cluster +``` + +This target: + +- Creates a Kind cluster with webhook proxy support +- Installs cert-manager for certificate management +- Deploys KFP using `manifests/kustomize/env/dev-kind` +- Includes webhook proxy for advanced debugging scenarios + +### Deployment modes + +KFP supports two main deployment modes: + +**Standalone Mode:** + +- Single-user deployment without authentication +- Simpler setup, ideal for development and testing +- Uses manifests from `manifests/kustomize/env/platform-agnostic` or + `manifests/kustomize/env/cert-manager/platform-agnostic-k8s-native` +- All users have full access to all pipelines and experiments + +**Multi-user Mode:** + +- Multi-tenant deployment with authentication and authorization +- Requires integration with identity providers (e.g., Dex, OIDC) +- Uses manifests from `manifests/kustomize/env/cert-manager/platform-agnostic-multi-user` or + `manifests/kustomize/env/cert-manager/platform-agnostic-multi-user-k8s-native` +- Includes user isolation, namespace-based access control, and Istio integration +- Suitable for production environments with multiple users/teams + +## Local testing + +- Python (SDK): + +```bash +pip install -r sdk/python/requirements-dev.txt +pytest -v sdk/python/kfp +``` + +- Python (`kfp-kubernetes`): + +```bash +pytest -v kubernetes_platform/python/test +``` + +- Go (backend) unit tests only, excluding integration/API/Compiler/E2E tests: + +```bash +go test -v $(go list ./backend/... | \ + grep -v backend/test/v2/api | \ + grep -v backend/test/integration | \ + grep -v backend/test/v2/integration | \ + grep -v backend/test/initialization | \ + grep -v backend/test/v2/initialization | \ + grep -v backend/test/compiler | \ + grep -v backend/test/end2end) +``` + +Notes: + +- API Server tests under `backend/test/v2/api` are integration tests run with Ginkgo; they require a running cluster and are not part of unit tests. +- Compiler tests live under `backend/test/compiler` and E2E tests under `backend/test/end2end`; both are Ginkgo-based and excluded from unit presubmits. + +### Backend Ginkgo test suites + +- Compiler tests: + +```bash +# Run compiler tests +ginkgo -v ./backend/test/compiler + +# Update compiled workflow goldens when intended +ginkgo -v ./backend/test/compiler -- -updateCompiledFiles=true + +# Auto-create missing goldens (default true); disable with: +ginkgo -v ./backend/test/compiler -- -createGoldenFiles=false +``` + +- v2 API integration tests (label-filterable): + +```bash +# All API tests +ginkgo -v ./backend/test/v2/api + +# Example: run only Smoke-labeled tests with ginkgo +ginkgo -v --label-filter="Smoke" ./backend/test/v2/api +``` + +- End-to-end tests: + +```bash +ginkgo -v ./backend/test/end2end -- -namespace=kubeflow -isDebugMode=true +``` + +Test data is centralized under: + +- `test_data/pipeline_files/valid/` (inputs) with a `valid/critical/` subset for smoke lanes +- `test_data/compiled-workflows/` (expected compiled Argo Workflows) + +## Local execution + +- **Subprocess Runner** (no Docker required): + +```python +from kfp import local +local.init(runner=local.SubprocessRunner()) + +# Run components directly +task = my_component(param="value") +print(task.output) +``` + +- **Docker Runner** (requires Docker): + +```python +from kfp import local +local.init(runner=local.DockerRunner()) + +# Runs components in containers +task = my_component(param="value") +``` + +- **Pipeline execution**: + +```python +# Pipelines can be executed like regular functions +run = my_pipeline(input_param="test") + +# If the pipeline has a single output: +print(run.output) + +# Or, for named outputs: +print(run.outputs['']) +``` + +Note: Local execution outputs are stored in `./local_outputs` by default. + +Notes: + +- SubprocessRunner supports only Lightweight Python Components (executes the KFP Python executor directly). +- Use DockerRunner for Container Components or when task images require containerized execution. + +## Regenerate protobufs after schema changes + +- Pipeline spec Protobufs live under `api/`. +- Run both Python and Go generations: + +```bash +make -C api python && make -C api golang +``` + +- Note for Linux with SELinux: protoc-related steps may fail under enforcing mode. + + - Temporarily disable before generation: `sudo setenforce 0` + - Re-enable after: `sudo setenforce 1` + +- `api/v2alpha1/python/kfp/pipeline_spec/pipeline_spec_pb2.py` is NOT committed. Any workflow or script installing + `kfp/api` from source must generate this file beforehand. + +### 🚫 NEVER EDIT DIRECTLY (Generated files) + +The following files are generated; edit their sources and regenerate: + +- `api/v2alpha1/python/kfp/pipeline_spec/pipeline_spec_pb2.py` + - Source: `api/v2alpha1/pipeline_spec.proto` + - Generate: `make -C api python` (or `make -C api python-dev` for editable local dev) +- `kubernetes_platform/python/kfp/kubernetes/kubernetes_executor_config_pb2.py` + - Source: `kubernetes_platform/proto/kubernetes_executor_config.proto` + - Generate: `make -C kubernetes_platform python` (or `make -C kubernetes_platform python-dev`) +- Frontend API clients under `frontend/src/apis` and `frontend/src/apisv2beta1` + - Sources: Swagger specs under `backend/api/**/swagger/*.json` + - Generate: `cd frontend && npm run apis` / `npm run apis:v2beta1` +- Frontend MLMD proto outputs under `frontend/src/third_party/mlmd/generated` + - Sources: `third_party/ml-metadata/*.proto` + - Generate: `cd frontend && npm run build:protos` + +## Key paths and files + +- Architecture diagram: `images/kfp-cluster-wide-architecture.png` +- SDK compiler: `sdk/python/kfp/compiler/pipeline_spec_builder.py` +- DSL core: `sdk/python/kfp/dsl/` (e.g., `component_factory.py`, `pipeline_task.py`, `pipeline_context.py`) +- Executor entrypoint: `sdk/python/kfp/dsl/executor_main.py` +- Platform integration (Python): `kubernetes_platform/python/kfp/` +- Platform spec proto: `kubernetes_platform/proto/` +- API definitions (Protobufs): `api/` +- Backend (API server, driver, launcher, etc.): `backend/` +- Backend test suites: `backend/test/compiler`, `backend/test/v2/api`, `backend/test/end2end` +- Frontend: `frontend/` (React TypeScript, see `frontend/CONTRIBUTING.md`) +- Manifests (Kustomize bases/overlays for deployments): `manifests/` +- CI manifests and overlays used by workflows: `.github/resources/manifests/{kubernetes-native,multiuser,standalone}` +- Test data (inputs/goldens): `test_data/pipeline_files/valid/`, `test_data/compiled-workflows/` + +## Documentation + +- SDK reference docs are auto-generated with Sphinx using autodoc from Python docstrings. Keep SDK docstrings + user-facing and accurate, as they appear in published documentation. + +## Frontend development + +The KFP frontend is a React TypeScript application that provides the web UI for Kubeflow Pipelines. + +### Prerequisites + +- Node.js version specified in `frontend/.nvmrc` (currently v22.14.0) +- Java 8+ (required for `java -jar swagger-codegen-cli.jar` when generating API clients) +- Use [nvm](https://github.com/nvm-sh/nvm) or [fnm](https://github.com/Schniz/fnm) for Node version management: + + ```bash + # With fnm (faster) + fnm install 22.14.0 && fnm use 22.14.0 + # With nvm + nvm install 22.14.0 && nvm use 22.14.0 + ``` + +### Setup and installation + +```bash +cd frontend +npm ci # Install exact dependencies from package-lock.json +``` + +### Development workflows + +#### Local development with mock API + +Quick start for UI development without backend dependencies: + +```bash +npm run mock:api # Start mock backend server on port 3001 +npm start # Start React dev server on port 3000 (hot reload) +``` + +#### Local development with real cluster + +For full integration testing against a real KFP deployment: + +1. **Single-user mode**: + + ```bash + # Deploy KFP standalone (see Local cluster deployment section) + make -C backend kind-cluster-agnostic + + # Scale down cluster UI + kubectl -n kubeflow scale --replicas=0 deployment/ml-pipeline-ui + + # Start local development + npm run start:proxy-and-server # Proxy to cluster + hot reload + ``` + +2. **Multi-user mode**: + + ```bash + export REACT_APP_NAMESPACE=kubeflow-user-example-com + npm run build + # Install mod-header Chrome extension for auth headers + npm run start:proxy-and-server + ``` + +### Key technologies and architecture + +- **React 16** with TypeScript +- **Material-UI v3** for components +- **React Router v4** for navigation +- **Dagre** for graph layout visualization +- **D3** for data visualization +- **Jest + Enzyme** for testing +- **Prettier + ESLint** for code formatting/linting +- **Storybook** for component development +- **Tailwind CSS** for utility-first styling + +### Essential commands (frontend) + +- `npm start` - Start React dev server with hot reload (port 3000) +- `npm run start:proxy-and-server` - Full development with cluster proxy +- `npm run mock:api` - Start mock backend API server (port 3001) +- `npm run build` - Production build +- `npm run test` - Run unit tests +- `npm run test -u` - Update snapshot tests +- `npm run lint` - Run ESLint +- `npm run format` - Format code with Prettier +- `npm run storybook` - Start Storybook on port 6006 + +### Code generation + +The frontend includes several generated code components: + +- **API clients**: Generated from backend Swagger specs + + ```bash + npm run apis # Generate v1 API clients + npm run apis:v2beta1 # Generate v2beta1 API clients + ``` + + Note: Ensure `swagger-codegen-cli.jar` is available to `java -jar` when running from `frontend/` + (e.g., place the JAR in `frontend/` or reference a full path). + +- **Protocol Buffers**: Generated from proto definitions + + ```bash + npm run build:protos # MLMD protos + npm run build:pipeline-spec # Pipeline spec protos + npm run build:platform-spec:kubernetes-platform # K8s platform spec + ``` + +### Testing + +- **Unit tests**: `npm test` (Jest + Enzyme) +- **Server tests**: `npm run test:server:coverage` +- **Coverage**: `npm run test:coverage` +- **CI pipeline**: `npm run test:ci` (format check + lint + test coverage) +- **Snapshot tests**: Auto-update with `npm test -u` + +## CI/CD (GitHub Actions) + +- Workflows: `.github/workflows/` (build, test, lint, release) +- Composite actions: `.github/actions/` (e.g., `kfp-k8s`, `create-cluster`, `deploy`, `test-and-report`) +- Typical checks: Go unit tests (backend), Python SDK tests, frontend tests/lint, image builds. + +### Test matrices and variants (Kubernetes, stores, proxy, cache) + +- Kubernetes versions: CI runs a matrix across a low and high supported version, commonly `v1.29.2` and `v1.31.0`. + - Examples: `e2e-test.yml`, `sdk-execution.yml`, `upgrade-test.yml`, `kfp-kubernetes-execution-tests.yml`, `kfp-webhooks.yml`, `api-server-tests.yml`, `compiler-tests.yml`, `legacy-v2-api-integration-tests.yml`, `integration-tests-v1.yml`, and frontend integration in `e2e-test-frontend.yml`. +- Pipeline store variants (v2 engine): tests run with `database` and `kubernetes` stores, and a dedicated job compiles pipelines to Kubernetes-native manifests. + - Example: `e2e-test.yml` job "API integration tests v2 - K8s with ${pipeline_store}" and "compile pipelines with Kubernetes". +- Argo Workflows version matrix for compatibility (where relevant): e.g., `e2e-test.yml` includes an Argo job (e.g., `v3.5.14`). +- Proxy / cache toggles: dedicated jobs run with HTTP proxy enabled and with execution cache disabled to validate those modes. +- Artifacts: failing logs and test outputs are uploaded as workflow artifacts for debugging. + +### CI cluster setup and helpers + +- Kind-based clusters are provisioned via the `kfp-cluster` composite action, parameterized by `k8s_version`, `pipeline_store`, `proxy`, `cache_enabled`, and optional `argo_version`. +- The `create-cluster` and `deploy` actions are used by newer suites; `kfp-k8s` installs SDK components from source inside jobs that execute Python-based tests. +- The `protobuf` composite action prepares `protoc` and related dependencies when compiling Python protobufs. + +### Code style and formatting + +- **Prettier** config in `.prettierrc.yaml`: + - Single quotes, trailing commas, 100 char line width + - Format: `npm run format` + - Check: `npm run format:check` +- **ESLint** extends `react-app` with custom rules in `.eslintrc.yaml` +- **Auto-format on save**: Configure your IDE with the Prettier extension + +Notes: + +- Legacy `kfp-samples.yml` and `periodic.yml` workflows were removed. + +### Feature flags + +KFP frontend supports feature flags for development: + +- Configure in `src/features.ts` +- Access via `http://localhost:3000/#/frontend_features` +- Manage locally: `localStorage.setItem('flags', "")` + +### Common development tasks + +- **Add new API**: Update swagger specs, run `npm run apis` +- **Update proto definitions**: Modify protos, run respective build commands +- **Add new component**: Create in `atoms/` or `components/`, add tests and stories +- **Debug server**: Use `npm run start:proxy-and-server-inspect` +- **Bundle analysis**: `npm run analyze-bundle` + +### Troubleshooting + +- **Port conflicts**: Frontend uses 3000 (React), 3001 (Node server), 3002 (API proxy) +- **Node version issues**: Ensure you're using the version in `.nvmrc` +- **API generation failures**: Check that swagger-codegen-cli.jar is in PATH +- **Proto generation**: Requires `protoc` and `protoc-gen-grpc-web` in PATH +- **Mock backend**: Limited API support; use real cluster for full testing + +## Lint and formatting checks + +- Go lint (CI uses `golangci-lint`): + +```bash +golangci-lint run +``` + +- Python SDK import/order and unused import cleanups: + +```bash +pip install -r sdk/python/requirements-dev.txt +pycln --check sdk/python +isort --check --profile google sdk/python +``` + +- Python SDK formatting (YAPF + string fixer): + +```bash +pip install yapf pre_commit_hooks +python3 -m pre_commit_hooks.string_fixer $(find sdk/python/kfp/**/*.py -type f) +yapf --recursive --diff sdk/python/ +``` + +- Python SDK docstring formatting: + +```bash +pip install docformatter +docformatter --check --recursive sdk/python/ --exclude "compiler_test.py" +``` + +## Common agent workflows + +- **Modify pipeline spec schema**: + 1. Edit Protobufs under `api/` + 2. Regenerate: `make -C api python && make -C api golang` + 3. Update SDK/backend usages as needed +- **Adjust Kubernetes behavior for tasks**: + - Resource requests/limits: set on component specs; the Driver converts these into pod spec patches. + - All other Kubernetes config: handled via `kubernetes_platform` platform spec. + +## Quick reference + +### Essential commands + +- Compile pipeline: `kfp dsl compile --py pipeline.py --output pipeline.yaml` +- Generate protos: `make -C api python && make -C api golang` +- Deploy local cluster (standalone): `make -C backend kind-cluster-agnostic` +- Deploy local cluster (development) and run the API server in the IDE: `make -C backend dev-kind-cluster` +- Run SDK tests: `pytest -v sdk/python/kfp` +- Run backend unit tests: `go test -v $(go list ./backend/... | grep -v backend/test/)` +- Run compiler tests: `ginkgo -v ./backend/test/compiler` +- Run API tests: `ginkgo -v --label-filter="Smoke" ./backend/test/v2/api` +- Run E2E tests: `ginkgo -v ./backend/test/end2end -- -namespace=kubeflow` +- Check formatting: + `yapf --recursive --diff sdk/python/ && pycln --check sdk/python && isort --check --profile google sdk/python` +- Frontend dev server: `cd frontend && npm start` +- Frontend with cluster: `cd frontend && npm run start:proxy-and-server` +- Frontend tests: `cd frontend && npm test` +- Frontend formatting: `cd frontend && npm run format` +- Generate frontend APIs: `cd frontend && npm run apis` + +### Key environment variables + +- `_KFP_RUNTIME=true`: Disables SDK imports during task execution +- `LOCAL_API_SERVER=true`: Enables local API server testing mode when running integration tests on a Kind cluster + +## Troubleshooting and pitfalls + +- `_KFP_RUNTIME=true` during executor runtime disables much of the SDK; avoid importing SDK-only modules from task code. +- `kfp` is installed into task containers with `--no-deps`; ensure runtime dependencies are present in `base_image`. +- SELinux enforcing can break proto generation; toggle with `setenforce` as noted above. +- Do not assume `pipeline_spec_pb2.py` exists in the repo; it must be generated. +- Frontend API generation requires `swagger-codegen-cli.jar` in PATH. +- Frontend proto generation requires `protoc` and `protoc-gen-grpc-web` binaries. +- Node version must match `.nvmrc`; use nvm/fnm to manage versions. +- Frontend port conflicts: 3000 (React), 3001 (Node server), 3002 (API proxy), 6006 (Storybook). + +### Common error patterns and quick fixes + +- Protobuf generation fails with "protoc: command not found": use the Make targets that run this in a container. +- Protobuf generation fails under SELinux enforcing: temporarily disable with `sudo setenforce 0`; re-enable after. +- API client generation fails with "Unable to access jarfile swagger-codegen-cli.jar": ensure the JAR is present and use `java -jar /swagger-codegen-cli.jar` from `frontend/`. +- Frontend fails to start due to Node version mismatch: `nvm use $(cat frontend/.nvmrc)` or `fnm use`. +- Runtime component imports SDK-only modules: `_KFP_RUNTIME=true` disables many SDK imports; avoid importing SDK-only modules in task code. diff --git a/CHANGELOG.md b/CHANGELOG.md index 5ccb6be252f..54d2792c2bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,38 +1,130 @@ # Changelog +### [2.14.3](https://github.com/kubeflow/pipelines/compare/2.14.0...2.14.3) (2025-08-28) + + +### Bug Fixes + +* **backend:** Add support for additional filters on the Kubernetes native API ([\#12189](https://github.com/kubeflow/pipelines/issues/12189)) ([8bb97be](https://github.com/kubeflow/pipelines/commit/8bb97be463a99c9abfa1d6d824ebedbe2957c077)) +* **backend:** Fix mutating webhook when pipeline name label value is too long ([\#12162](https://github.com/kubeflow/pipelines/issues/12162)) ([1b6fd87](https://github.com/kubeflow/pipelines/commit/1b6fd87f851fb31e7b81d55f6c6ff1206ef43d18)) +* **backend:** Return from pipeline deletions after the cache is updated ([\#12153](https://github.com/kubeflow/pipelines/issues/12153)) ([df79a7a](https://github.com/kubeflow/pipelines/commit/df79a7ac2574dde0130fa665e9ac72c51aa05304)) +* **backend/sdk:** Fix REST API outputs for pipeline versions with invalid platform specs ([\#12183](https://github.com/kubeflow/pipelines/issues/12183)) ([0cafb3d](https://github.com/kubeflow/pipelines/commit/0cafb3db99fdcea38fc37d0aef2f7dd237559065)) +* **sdk:** Set spec.description when compiling to Kubernetes manifests ([\#12132](https://github.com/kubeflow/pipelines/issues/12132)) ([5ffddad](https://github.com/kubeflow/pipelines/commit/5ffddad045db75e9e5fb2957226b4a28706637bd)) +* **tools:** Fix a typo in convert_to_k8s_format ([\#12161](https://github.com/kubeflow/pipelines/issues/12161)) ([9984fb7](https://github.com/kubeflow/pipelines/commit/9984fb79cd6371e8fa58c23bb4a8b0fc1ee57465)) + + +### Other Pull Requests + +* Cherry-Pick 2.14.2 SDK changes ([\#12123](https://github.com/kubeflow/pipelines/issues/12123)) ([ec00fa3](https://github.com/kubeflow/pipelines/commit/ec00fa3ca34492c92271835fa87764cd1883ebda)) +* ignore adding pb2.py files for kfp-k8s docs ([3fad4df](https://github.com/kubeflow/pipelines/commit/3fad4df5dfa2246d6d52fd43f12b4802704a760a)) +* update sphinx deps ([25805b8](https://github.com/kubeflow/pipelines/commit/25805b8d352631b17bba334e98bdd00911ee8cb5)) + +## [2.14.0](https://github.com/kubeflow/pipelines/compare/2.5.0...2.14.0) (2025-08-05) + + +### Features + +* **backend:** Add CLI flags to support Kubernetes native API implementation ([\#11907](https://github.com/kubeflow/pipelines/issues/11907)) ([c368ac6](https://github.com/kubeflow/pipelines/commit/c368ac6881b75331a3b7aa31e3adb36eacf858a1)) +* **backend:** Add migration script to create Pipeline and PipelineVersion objects from the database to Kubernetes API ([\#11884](https://github.com/kubeflow/pipelines/issues/11884)) ([988477a](https://github.com/kubeflow/pipelines/commit/988477a7de1c9376abb51618c4b926226afe587f)) +* **backend:** Add support for platform specs on K8s native API ([\#12016](https://github.com/kubeflow/pipelines/issues/12016)) ([04407fb](https://github.com/kubeflow/pipelines/commit/04407fbe9397def09fd1e12df34190ba95f5d8a2)) +* **backend:** Add the Kubernetes native pipeline store ([\#11881](https://github.com/kubeflow/pipelines/issues/11881)) ([c03127d](https://github.com/kubeflow/pipelines/commit/c03127d9671ed2ac29350840b4a56c5cf0c227eb)) +* **backend:** add the option to enable/disable cache globally ([\#11831](https://github.com/kubeflow/pipelines/issues/11831)) ([9aebb62](https://github.com/kubeflow/pipelines/commit/9aebb62be1a9412d960fc681787468e1e3ab2001)) +* **backend:** Allow the launcher command to be configurable ([\#11888](https://github.com/kubeflow/pipelines/issues/11888)) ([70d2888](https://github.com/kubeflow/pipelines/commit/70d28885f27bff40397f9c9ea790e5985deb91e1)) +* **backend:** parameterize retryStrategy input in Argo workflow ([\#11861](https://github.com/kubeflow/pipelines/issues/11861)) ([9245739](https://github.com/kubeflow/pipelines/commit/9245739f6fdd76769cd477f8952706cfe0eabc34)) +* **backend:** support for optional input parameters in nested pipelines ([\#11980](https://github.com/kubeflow/pipelines/issues/11980)) ([ecfe94e](https://github.com/kubeflow/pipelines/commit/ecfe94ebc3adc0dd41da9a944056ce4170ce9064)) +* **backend:** Support more than one label & annotations setting per component ([\#12049](https://github.com/kubeflow/pipelines/issues/12049)) ([a870b1a](https://github.com/kubeflow/pipelines/commit/a870b1a325dae0c82c8b6f57941468ee1aea960b)) +* **backend:** update Argo Workflow Compiler to create workspace PVCs ([\#11982](https://github.com/kubeflow/pipelines/issues/11982)) ([daac099](https://github.com/kubeflow/pipelines/commit/daac099508865670f41eeeef135fa22f9ec880f1)) +* **backend:** Use native k8s probes for api-server and visualization ([\#11960](https://github.com/kubeflow/pipelines/issues/11960)) ([cc78308](https://github.com/kubeflow/pipelines/commit/cc7830812ae3ced24962238dcbf5f113f28c4772)) +* **backend/frontend:** Add the name field for pipelines and pipeline versions ([\#11952](https://github.com/kubeflow/pipelines/issues/11952)) ([ea20731](https://github.com/kubeflow/pipelines/commit/ea207310601ffaf807b148a79a9ddcb8d4812886)) +* **backend/sdk:** enable dsl.Collected for parameters & artifacts ([\#11725](https://github.com/kubeflow/pipelines/issues/11725)) ([ed828b5](https://github.com/kubeflow/pipelines/commit/ed828b513aef4826f1f05e47168cf7b08a3c74ab)) +* **backend/sdk:** support PipelineTaskFinalStatus input ([\#11953](https://github.com/kubeflow/pipelines/issues/11953)) ([0d857b6](https://github.com/kubeflow/pipelines/commit/0d857b6f8a1261477bb05bc65aa474d992b57084)) +* **docs:** erdiagram of kfp-db ([\#12009](https://github.com/kubeflow/pipelines/issues/12009)) ([99326e1](https://github.com/kubeflow/pipelines/commit/99326e1bd13e6223c1cfb657524c9e5926d95ce5)) +* **docs:** Guide to report security vulnerabilities ([\#12044](https://github.com/kubeflow/pipelines/issues/12044)) ([9aa3dfb](https://github.com/kubeflow/pipelines/commit/9aa3dfb0d9810e3d3940b728e2e39dad1e956587)) +* **frontend:** Add "Always Use Latest Version" option for recurring runs (fixes [\#11581](https://github.com/kubeflow/pipelines/issues/11581)) ([\#11755](https://github.com/kubeflow/pipelines/issues/11755)) ([0e7e806](https://github.com/kubeflow/pipelines/commit/0e7e806b4cd52c20397220d6e0e8db79cae35894)) +* **proto:** Add WorkspaceConfig and KubernetesWorkspaceConfig message types to pipeline_spec proto ([\#11921](https://github.com/kubeflow/pipelines/issues/11921)) ([67f9b7d](https://github.com/kubeflow/pipelines/commit/67f9b7d73c895fd0fd027488ba20c255918d735e)) +* **sdk:** Add support for compiling pipelines to Kubernetes native format in SDK ([\#12012](https://github.com/kubeflow/pipelines/issues/12012)) ([dc398f6](https://github.com/kubeflow/pipelines/commit/dc398f689eb0b19e86fdbb554b33d9f6cb1095e3)) +* **sdk:** Add Support for Docker Container Run Arguments ([\#12006](https://github.com/kubeflow/pipelines/issues/12006)) ([268e089](https://github.com/kubeflow/pipelines/commit/268e0898ecdd3f9447988b6e675676eb21c584f0)) +* **sdk:** update PipelineConfig to reflect new workspace Protobuf changes ([\#11934](https://github.com/kubeflow/pipelines/issues/11934)) ([dda6033](https://github.com/kubeflow/pipelines/commit/dda6033a03a8f69e51c6672d964169521744357b)) +* **ui:** add `ESC` shortcut for closing SidePanel. Fixes [\#11873](https://github.com/kubeflow/pipelines/issues/11873) ([\#11874](https://github.com/kubeflow/pipelines/issues/11874)) ([c3d05eb](https://github.com/kubeflow/pipelines/commit/c3d05eb0b1ee492098c9db769371d975ebf0241a)) +* add openshift env to manifests ([\#11932](https://github.com/kubeflow/pipelines/issues/11932)) ([8329e64](https://github.com/kubeflow/pipelines/commit/8329e64716dab0e3485381f712aeadc80beb05f3)) + + +### Bug Fixes + +* **backend:** omit unknown fields in json marshaling ([\#12101](https://github.com/kubeflow/pipelines/issues/12101)) ([000a111](https://github.com/kubeflow/pipelines/commit/000a111396213847a437296afb511564ccc0c60d)) +* **deps:** revert metadata_writer k8s version ([\#12099](https://github.com/kubeflow/pipelines/issues/12099)) ([c64feac](https://github.com/kubeflow/pipelines/commit/c64feac20edf6129523fd5d8241d1f7140107fdd)) +* **sdk,backend:** Make the workspace size required ([\#12094](https://github.com/kubeflow/pipelines/issues/12094)) ([4bd3d4b](https://github.com/kubeflow/pipelines/commit/4bd3d4b4e99b5af38380ddad9693a2a0bbe4e968)) +* backwards compatibility for pipeline spec task_name ([\#12061](https://github.com/kubeflow/pipelines/issues/12061)) ([bcb9ee4](https://github.com/kubeflow/pipelines/commit/bcb9ee4324c4606f34ac000315b8b2f60df8c31e)) +* **apiserver:** fix typos in resource_manager.go: ([\#11998](https://github.com/kubeflow/pipelines/issues/11998)) ([3154ef9](https://github.com/kubeflow/pipelines/commit/3154ef9258c1dfb179fdb5d036989b126b2412fe)) +* **backend:** Fix boolean flag syntax for `--cache_disabled` and update test to cover pipelines with outputs ([\#12001](https://github.com/kubeflow/pipelines/issues/12001)) ([f240685](https://github.com/kubeflow/pipelines/commit/f240685bf3169251ca343fc985bbb5607be3f727)) +* **backend:** Fix the pipeline samples ([\#11967](https://github.com/kubeflow/pipelines/issues/11967)) ([b477269](https://github.com/kubeflow/pipelines/commit/b4772693ae0f7d90425f604ebdafbda1a1c4a5f3)) +* **backend:** increase max_metadata_size for ml-metadata grpc server ([\#12062](https://github.com/kubeflow/pipelines/issues/12062)) ([09ced38](https://github.com/kubeflow/pipelines/commit/09ced38500725c5e8542ce4885eee8d3cee58f02)) +* **backend:** Stop logging the strack trace on benign user errors ([\#11883](https://github.com/kubeflow/pipelines/issues/11883)) ([56da004](https://github.com/kubeflow/pipelines/commit/56da004d91d8db9b46b57544d1ad6699ddb4de4c)) +* **backend/sdk:** update proto packages ([\#12067](https://github.com/kubeflow/pipelines/issues/12067)) ([cc35187](https://github.com/kubeflow/pipelines/commit/cc35187dff270008c6bef505c828f442773ec97d)) +* **cache:** Optimize cache expiration query solves [\#11614](https://github.com/kubeflow/pipelines/issues/11614) ([\#11920](https://github.com/kubeflow/pipelines/issues/11920)) ([faa2c8c](https://github.com/kubeflow/pipelines/commit/faa2c8cd16bd5dddec2c30dd456f6d8f55f12471)) +* **components:** Changed method_whitelist. Fixes [\#11880](https://github.com/kubeflow/pipelines/issues/11880) ([\#11961](https://github.com/kubeflow/pipelines/issues/11961)) ([a123d53](https://github.com/kubeflow/pipelines/commit/a123d53be0158b961e8527999392358a4403d191)) +* **sdk:** Fixes for Identifying Untagged Images for Running ([\#11984](https://github.com/kubeflow/pipelines/issues/11984)) ([e8e23f0](https://github.com/kubeflow/pipelines/commit/e8e23f0d7a0a583b93e11e5690504306f4e0091e)) +* **sdk:** Move version info to version.py for editable installs. ([\#11997](https://github.com/kubeflow/pipelines/issues/11997)) ([ada935a](https://github.com/kubeflow/pipelines/commit/ada935a0ad86d02329e3cb6b9a36a399d9fd4a79)) +* **sdk:** Support partial replace of placeholders in dict/list objects ([\#12039](https://github.com/kubeflow/pipelines/issues/12039)) ([ce84873](https://github.com/kubeflow/pipelines/commit/ce848730b8f4bf8a2d07f3b53691d9801b0e60f5)) +* Delete the pipeline along with its all versions. Fixes [\#11665](https://github.com/kubeflow/pipelines/issues/11665) ([\#12019](https://github.com/kubeflow/pipelines/issues/12019)) ([ad2730c](https://github.com/kubeflow/pipelines/commit/ad2730cf19febf2d8a0a7d42ac8eafe1bb458a14)) +* input resolution with set_display_name ([\#11938](https://github.com/kubeflow/pipelines/issues/11938)) ([7f60100](https://github.com/kubeflow/pipelines/commit/7f60100602deefa3ed6ffbef22a948b7790c360f)) +* **sdk:** fix pip install for dev ([\#11891](https://github.com/kubeflow/pipelines/issues/11891)) ([4503eae](https://github.com/kubeflow/pipelines/commit/4503eae10d19518bc35310c57d2ef957b31f959c)) +* **sdk:** resolve issue when creating pipeline version from pipeline name using the cli. Fixes [\#11810](https://github.com/kubeflow/pipelines/issues/11810) ([\#11866](https://github.com/kubeflow/pipelines/issues/11866)) ([c68640d](https://github.com/kubeflow/pipelines/commit/c68640d95038f1b577caa44a2ff0bd966d059b94)) +* **sdk:** Resolves issue when using ParallelFor with param and depending tasks ([\#11903](https://github.com/kubeflow/pipelines/issues/11903)) ([ef94ccd](https://github.com/kubeflow/pipelines/commit/ef94ccd734957bdee3bbb98ea043738bb5795dc7)) +* **test:** Fix the frontend sample test ([\#11968](https://github.com/kubeflow/pipelines/issues/11968)) ([5447563](https://github.com/kubeflow/pipelines/commit/54475637a241f55957149a32b80cb8c44d8f6458)) +* **tests:** free up space in kfp samples test workflow ([\#11942](https://github.com/kubeflow/pipelines/issues/11942)) ([8fe090d](https://github.com/kubeflow/pipelines/commit/8fe090d461d7c2e1226c2fc46a80479790bcd2e5)) + + +### Other Pull Requests + +* revert changelog ([62082a4](https://github.com/kubeflow/pipelines/commit/62082a4812b84ed00102f05a81e8e924f02f36fe)) +* test generating backend proto go code ([\#12108](https://github.com/kubeflow/pipelines/issues/12108)) ([5685e88](https://github.com/kubeflow/pipelines/commit/5685e884f459da6d1f63c46a3185a7a24b22dfca)) +* update python packages to 2.14 ([3ffde88](https://github.com/kubeflow/pipelines/commit/3ffde88e5f306268e00ccbe0e450dd76e7baf1bf)) +* add versioning policy for KFP ([\#12105](https://github.com/kubeflow/pipelines/issues/12105)) ([9f5abab](https://github.com/kubeflow/pipelines/commit/9f5abab7d2d995b5a5f197c7e697fb30bc8c9ab7)) +* feat(sdk) Add Input Parameter support for node affinity ([\#12028](https://github.com/kubeflow/pipelines/issues/12028)) ([ecf488b](https://github.com/kubeflow/pipelines/commit/ecf488b65fed923595ed048a2d0e9ba3d932f409)) +* Fix broken Pipeline Root documentation link ([\#12051](https://github.com/kubeflow/pipelines/issues/12051)) ([f20cec5](https://github.com/kubeflow/pipelines/commit/f20cec5b7097a628f18765d6160dd2316f41dec4)) +* add new KFP maintainers ([\#12059](https://github.com/kubeflow/pipelines/issues/12059)) ([69a1846](https://github.com/kubeflow/pipelines/commit/69a184637a97458037d65f6a9b938013d9e2f579)) +* user master for dev manifests ([\#11977](https://github.com/kubeflow/pipelines/issues/11977)) ([5181358](https://github.com/kubeflow/pipelines/commit/5181358d6a4ca4d0923ac90733cf83470763cdd2)) +* feat(frontend) Use native k8s probes for pipeline-ui ([\#11955](https://github.com/kubeflow/pipelines/issues/11955)) ([48468ae](https://github.com/kubeflow/pipelines/commit/48468ae1fa126f8668e275817f77023a585175f9)) +* feat(backend) implement retryStrategy for nested pipelines ([\#11908](https://github.com/kubeflow/pipelines/issues/11908)) ([beae62f](https://github.com/kubeflow/pipelines/commit/beae62fb528fc9044af54bf2c46771727d5d22b2)) +* - fix(launcher): missing executorInput parameter values caused by {{$}} evaluation order ([\#11925](https://github.com/kubeflow/pipelines/issues/11925)) ([3337b5e](https://github.com/kubeflow/pipelines/commit/3337b5e32377653968f477b05e5cbd5d6a081bdf)) +* add maintainer to kfp ([\#11900](https://github.com/kubeflow/pipelines/issues/11900)) ([e276474](https://github.com/kubeflow/pipelines/commit/e276474f970a1a92db7cf8c01d5ef716acc3ee4b)) +* No public description ([d90e4e8](https://github.com/kubeflow/pipelines/commit/d90e4e8a54fdd08a73ca9b0ebb404e7cb6035f7c)) +* update release doc paths & make script executable ([\#11871](https://github.com/kubeflow/pipelines/issues/11871)) ([8a402c1](https://github.com/kubeflow/pipelines/commit/8a402c10a8e3d36f964fb451760319c99a185e8f)) + ## [2.5.0](https://github.com/kubeflow/pipelines/compare/2.4.1...2.5.0) (2025-04-29) ### Features -* **backend:** Add a mutating webhook for the PipelineVersion kind (#11782) ([c9be64d](https://github.com/kubeflow/pipelines/commit/c9be64dca362a33dcfad186fe579066a646a6df1)) -* **backend:** Add the ability to set a proxy for accessing external resources (#11771) ([6e3548f](https://github.com/kubeflow/pipelines/commit/6e3548f33e226ba374e4d43a175ae8ac9018e268)) -* **backend:** Add types for KFP Kubernete Native API (#11672) ([0d9a7b0](https://github.com/kubeflow/pipelines/commit/0d9a7b00e926130b07058ea71148fbb9cab69d2b)) -* **backend:** Create a validating webhook for the PipelineVersion kind (#11774) ([2efcde5](https://github.com/kubeflow/pipelines/commit/2efcde5efd3952b91ea79a5ee6dbf064282f719a)) -* **backend:** implement logs as artifacts (#11762) ([cd3e747](https://github.com/kubeflow/pipelines/commit/cd3e747b5de3d7e1e338e309cc57311dd4a91258)) -* **backend:** implement logs as artifacts + CI updates (#11809) ([464ca39](https://github.com/kubeflow/pipelines/commit/464ca3974fbbc46e022f863e49c4fbaabd1a8265)) -* **backend/sdk:** Add input parameterization for various k8s resources (#11770) ([fd1b48b](https://github.com/kubeflow/pipelines/commit/fd1b48b4712038afe8a78e37843672d4773dc080)) -* **proto:** Add TTL fields to KFP IR yaml proto (#11758) ([c5aba41](https://github.com/kubeflow/pipelines/commit/c5aba41bcaf3c214d984db4571c1ecae4a0d551d)) -* **sdk:** add upload pipeline and upload pipeline version from pipeline function (#11804) ([1ad4f60](https://github.com/kubeflow/pipelines/commit/1ad4f608a0b9dea2362cf89f9cf7abdebf20e080)) +* **backend:** Add a mutating webhook for the PipelineVersion kind ([\#11782](https://github.com/kubeflow/pipelines/issues/11782)) ([c9be64d](https://github.com/kubeflow/pipelines/commit/c9be64dca362a33dcfad186fe579066a646a6df1)) +* **backend:** Add the ability to set a proxy for accessing external resources ([\#11771](https://github.com/kubeflow/pipelines/issues/11771)) ([6e3548f](https://github.com/kubeflow/pipelines/commit/6e3548f33e226ba374e4d43a175ae8ac9018e268)) +* **backend:** Add types for KFP Kubernete Native API ([\#11672](https://github.com/kubeflow/pipelines/issues/11672)) ([0d9a7b0](https://github.com/kubeflow/pipelines/commit/0d9a7b00e926130b07058ea71148fbb9cab69d2b)) +* **backend:** Create a validating webhook for the PipelineVersion kind ([\#11774](https://github.com/kubeflow/pipelines/issues/11774)) ([2efcde5](https://github.com/kubeflow/pipelines/commit/2efcde5efd3952b91ea79a5ee6dbf064282f719a)) +* **backend:** implement logs as artifacts ([\#11762](https://github.com/kubeflow/pipelines/issues/11762)) ([cd3e747](https://github.com/kubeflow/pipelines/commit/cd3e747b5de3d7e1e338e309cc57311dd4a91258)) +* **backend:** implement logs as artifacts + CI updates ([\#11809](https://github.com/kubeflow/pipelines/issues/11809)) ([464ca39](https://github.com/kubeflow/pipelines/commit/464ca3974fbbc46e022f863e49c4fbaabd1a8265)) +* **backend/sdk:** Add input parameterization for various k8s resources ([\#11770](https://github.com/kubeflow/pipelines/issues/11770)) ([fd1b48b](https://github.com/kubeflow/pipelines/commit/fd1b48b4712038afe8a78e37843672d4773dc080)) +* **proto:** Add TTL fields to KFP IR yaml proto ([\#11758](https://github.com/kubeflow/pipelines/issues/11758)) ([c5aba41](https://github.com/kubeflow/pipelines/commit/c5aba41bcaf3c214d984db4571c1ecae4a0d551d)) +* **sdk:** add upload pipeline and upload pipeline version from pipeline function ([\#11804](https://github.com/kubeflow/pipelines/issues/11804)) ([1ad4f60](https://github.com/kubeflow/pipelines/commit/1ad4f608a0b9dea2362cf89f9cf7abdebf20e080)) ### Bug Fixes -* **backend:** Fix run submissions with OwnerReferencesPermissionEnforcement on (#11821) ([69ba50b](https://github.com/kubeflow/pipelines/commit/69ba50b3fb03bd8441f833950a6c77835a2d47a1)) -* **backend:** fixed Dockerfile (#11841) ([d38418e](https://github.com/kubeflow/pipelines/commit/d38418efeadec3ea3bea55b3373bcc311dacc135)) -* **backend:** Include missing go.mod for cacheserver/viewercontroller images (#11776) ([715ed40](https://github.com/kubeflow/pipelines/commit/715ed40b92f9bca521f94e0df5201425d9d30866)) +* **backend:** Fix run submissions with OwnerReferencesPermissionEnforcement on ([\#11821](https://github.com/kubeflow/pipelines/issues/11821)) ([69ba50b](https://github.com/kubeflow/pipelines/commit/69ba50b3fb03bd8441f833950a6c77835a2d47a1)) +* **backend:** fixed Dockerfile ([\#11841](https://github.com/kubeflow/pipelines/issues/11841)) ([d38418e](https://github.com/kubeflow/pipelines/commit/d38418efeadec3ea3bea55b3373bcc311dacc135)) +* **backend:** Include missing go.mod for cacheserver/viewercontroller images ([\#11776](https://github.com/kubeflow/pipelines/issues/11776)) ([715ed40](https://github.com/kubeflow/pipelines/commit/715ed40b92f9bca521f94e0df5201425d9d30866)) * **components:** Set tensorboard_experiment_name to random uuid when uploading plots in Starry Net pipeline ([cc56d04](https://github.com/kubeflow/pipelines/commit/cc56d04c46d01666a8e091b124473c2654e1b6d3)) -* **deployment:** Update kustomize manifests to use new label and patch syntax (#11733) ([230c1b8](https://github.com/kubeflow/pipelines/commit/230c1b8f1332ffab575b2e69b65d9a6958167195)) -* **deps:** widen urllib3 upper bound to <3.0.0 (#11819) ([866ff35](https://github.com/kubeflow/pipelines/commit/866ff3556a4454ccb52f1594bbca4167a04c0d3e)) -* **docs:** Remove Podman as backend README pre-req (#11824) ([88cff55](https://github.com/kubeflow/pipelines/commit/88cff559142e5a985cf31620f07b71244645cb4a)) -* **docs:** Use the latest driver and launcher images in the dev environment (#11820) ([92e4921](https://github.com/kubeflow/pipelines/commit/92e4921c4cce8155093bf7e332abfbf03bd6eaef)) -* **local:** warn about oci:// not supported too (#11794) ([564522c](https://github.com/kubeflow/pipelines/commit/564522c42de9136dec67f1bf29590bdd64bf2333)) -* **metadata-writer:** use mlmd_store.get_context_types() instead of workaround (#11753) ([35041ef](https://github.com/kubeflow/pipelines/commit/35041ef2bd4d9b3261f1250f5803786ed9e453fe)) -* **sdk:** Add SDK support for setting resource limits on older KFP versions (#11839) ([f9d487c](https://github.com/kubeflow/pipelines/commit/f9d487cb605727f357f58783db298d96898b24d1)) -* **sdk:** allow google-cloud-storage < 4 (#11735) ([bd4fc5c](https://github.com/kubeflow/pipelines/commit/bd4fc5c6677402d5f2d9ac45481ac86f25da4640)) -* **sdk:** avoid conflicting component names in DAG when reusing pipelines (#11071) ([d1b15ef](https://github.com/kubeflow/pipelines/commit/d1b15ef4da33cbeafa491564318c7e2a68dc431f)) -* **tests:** free up space in some test runners (#11818) ([478ca08](https://github.com/kubeflow/pipelines/commit/478ca089012e64edd371feff4ece9d0d156d4710)) -* minio fsgroup for popular clusters (#11734) ([8d0ae53](https://github.com/kubeflow/pipelines/commit/8d0ae5381e8366905c90009c56fd0e4807e94f0f)) +* **deployment:** Update kustomize manifests to use new label and patch syntax ([\#11733](https://github.com/kubeflow/pipelines/issues/11733)) ([230c1b8](https://github.com/kubeflow/pipelines/commit/230c1b8f1332ffab575b2e69b65d9a6958167195)) +* **deps:** widen urllib3 upper bound to <3.0.0 ([\#11819](https://github.com/kubeflow/pipelines/issues/11819)) ([866ff35](https://github.com/kubeflow/pipelines/commit/866ff3556a4454ccb52f1594bbca4167a04c0d3e)) +* **docs:** Remove Podman as backend README pre-req ([\#11824](https://github.com/kubeflow/pipelines/issues/11824)) ([88cff55](https://github.com/kubeflow/pipelines/commit/88cff559142e5a985cf31620f07b71244645cb4a)) +* **docs:** Use the latest driver and launcher images in the dev environment ([\#11820](https://github.com/kubeflow/pipelines/issues/11820)) ([92e4921](https://github.com/kubeflow/pipelines/commit/92e4921c4cce8155093bf7e332abfbf03bd6eaef)) +* **local:** warn about oci:// not supported too ([\#11794](https://github.com/kubeflow/pipelines/issues/11794)) ([564522c](https://github.com/kubeflow/pipelines/commit/564522c42de9136dec67f1bf29590bdd64bf2333)) +* **metadata-writer:** use mlmd_store.get_context_types() instead of workaround ([\#11753](https://github.com/kubeflow/pipelines/issues/11753)) ([35041ef](https://github.com/kubeflow/pipelines/commit/35041ef2bd4d9b3261f1250f5803786ed9e453fe)) +* **sdk:** Add SDK support for setting resource limits on older KFP versions ([\#11839](https://github.com/kubeflow/pipelines/issues/11839)) ([f9d487c](https://github.com/kubeflow/pipelines/commit/f9d487cb605727f357f58783db298d96898b24d1)) +* **sdk:** allow google-cloud-storage < 4 ([\#11735](https://github.com/kubeflow/pipelines/issues/11735)) ([bd4fc5c](https://github.com/kubeflow/pipelines/commit/bd4fc5c6677402d5f2d9ac45481ac86f25da4640)) +* **sdk:** avoid conflicting component names in DAG when reusing pipelines ([\#11071](https://github.com/kubeflow/pipelines/issues/11071)) ([d1b15ef](https://github.com/kubeflow/pipelines/commit/d1b15ef4da33cbeafa491564318c7e2a68dc431f)) +* **tests:** free up space in some test runners ([\#11818](https://github.com/kubeflow/pipelines/issues/11818)) ([478ca08](https://github.com/kubeflow/pipelines/commit/478ca089012e64edd371feff4ece9d0d156d4710)) +* minio fsgroup for popular clusters ([\#11734](https://github.com/kubeflow/pipelines/issues/11734)) ([8d0ae53](https://github.com/kubeflow/pipelines/commit/8d0ae5381e8366905c90009c56fd0e4807e94f0f)) ### Other Pull Requests @@ -41,18 +133,18 @@ * add list or dict support for add toleration json ([fb18235](https://github.com/kubeflow/pipelines/commit/fb182355f08e41eff1ac530be1afac0bad69e15d)) * add backend support for toleration lists. ([90909fc](https://github.com/kubeflow/pipelines/commit/90909fc0ef58b71362017a3e48c924b38c389183)) * switch selenium image to ghcr ([7529bbe](https://github.com/kubeflow/pipelines/commit/7529bbeba7f245366ca1cbc280169e20a7100a6a)) -* add missing release note updates to sdk main branch (#11842) ([611d582](https://github.com/kubeflow/pipelines/commit/611d5820049dc51ddf261d7d1368c4858dad5159)) -* fix component retry test (#11836) ([598826e](https://github.com/kubeflow/pipelines/commit/598826e1ccfecb5f34716876053a22cdc6605ae4)) -* **chore:** add cleanup resources to sdk execution tests (#11823) ([eee4986](https://github.com/kubeflow/pipelines/commit/eee4986f180cd4e7469a65a3c5f4ffbf3ec0b46c)) -* update driver & launcher image handling (#11533) ([38a4653](https://github.com/kubeflow/pipelines/commit/38a46533fcd47aa31e825109e2bf6940d127910a)) -* **chore:** add image builds for default branch (#11800) ([eacb586](https://github.com/kubeflow/pipelines/commit/eacb586f6225bb277642f4977552f799850e06a1)) -* fix setup env for kfp k8s lib tests (#11798) ([f10c7bf](https://github.com/kubeflow/pipelines/commit/f10c7bfbbcf01eb25f2fa8a437da62bbf07dc1f5)) -* Handle optional pipeline inputs in the driver (#11788) ([bb7a108](https://github.com/kubeflow/pipelines/commit/bb7a1082c4c5a3fb308aac2bf37bab476c3c4df6)) -* Fix recurring run output when always using latest (#11790) ([048f283](https://github.com/kubeflow/pipelines/commit/048f28332b6a0b6684632e76dcb284de2f81d829)) -* increase stale action timers (#11792) ([ade8a2d](https://github.com/kubeflow/pipelines/commit/ade8a2d072efa9897a5a0173316836236d629238)) -* Fix PSS restricted warnings (#11751) ([01999b8](https://github.com/kubeflow/pipelines/commit/01999b8fea23db52da0f633e475c457fc06ca531)) -* fix(CI) Github action is vulnerable to code execution via `comment body` (#11772) ([95c3f2c](https://github.com/kubeflow/pipelines/commit/95c3f2c04d8f19b8b656ddbda046ed9f2c81130a)) -* Fix Istio sidecar injection by moving from annotations to labels (#11750) ([df4e9c2](https://github.com/kubeflow/pipelines/commit/df4e9c2bf5b645f4a3fa831b073846eae5eaceb7)) +* add missing release note updates to sdk main branch ([\#11842](https://github.com/kubeflow/pipelines/issues/11842)) ([611d582](https://github.com/kubeflow/pipelines/commit/611d5820049dc51ddf261d7d1368c4858dad5159)) +* fix component retry test ([\#11836](https://github.com/kubeflow/pipelines/issues/11836)) ([598826e](https://github.com/kubeflow/pipelines/commit/598826e1ccfecb5f34716876053a22cdc6605ae4)) +* **chore:** add cleanup resources to sdk execution tests ([\#11823](https://github.com/kubeflow/pipelines/issues/11823)) ([eee4986](https://github.com/kubeflow/pipelines/commit/eee4986f180cd4e7469a65a3c5f4ffbf3ec0b46c)) +* update driver & launcher image handling ([\#11533](https://github.com/kubeflow/pipelines/issues/11533)) ([38a4653](https://github.com/kubeflow/pipelines/commit/38a46533fcd47aa31e825109e2bf6940d127910a)) +* **chore:** add image builds for default branch ([\#11800](https://github.com/kubeflow/pipelines/issues/11800)) ([eacb586](https://github.com/kubeflow/pipelines/commit/eacb586f6225bb277642f4977552f799850e06a1)) +* fix setup env for kfp k8s lib tests ([\#11798](https://github.com/kubeflow/pipelines/issues/11798)) ([f10c7bf](https://github.com/kubeflow/pipelines/commit/f10c7bfbbcf01eb25f2fa8a437da62bbf07dc1f5)) +* Handle optional pipeline inputs in the driver ([\#11788](https://github.com/kubeflow/pipelines/issues/11788)) ([bb7a108](https://github.com/kubeflow/pipelines/commit/bb7a1082c4c5a3fb308aac2bf37bab476c3c4df6)) +* Fix recurring run output when always using latest ([\#11790](https://github.com/kubeflow/pipelines/issues/11790)) ([048f283](https://github.com/kubeflow/pipelines/commit/048f28332b6a0b6684632e76dcb284de2f81d829)) +* increase stale action timers ([\#11792](https://github.com/kubeflow/pipelines/issues/11792)) ([ade8a2d](https://github.com/kubeflow/pipelines/commit/ade8a2d072efa9897a5a0173316836236d629238)) +* Fix PSS restricted warnings ([\#11751](https://github.com/kubeflow/pipelines/issues/11751)) ([01999b8](https://github.com/kubeflow/pipelines/commit/01999b8fea23db52da0f633e475c457fc06ca531)) +* fix(CI) Github action is vulnerable to code execution via `comment body` ([\#11772](https://github.com/kubeflow/pipelines/issues/11772)) ([95c3f2c](https://github.com/kubeflow/pipelines/commit/95c3f2c04d8f19b8b656ddbda046ed9f2c81130a)) +* Fix Istio sidecar injection by moving from annotations to labels ([\#11750](https://github.com/kubeflow/pipelines/issues/11750)) ([df4e9c2](https://github.com/kubeflow/pipelines/commit/df4e9c2bf5b645f4a3fa831b073846eae5eaceb7)) ## [2.5.0](https://github.com/kubeflow/pipelines/compare/2.3.0...2.5.0) (2025-04-28) @@ -4506,7 +4598,6 @@ * **test:** Fix presubmit with python version upgrade ([\#5033](https://github.com/kubeflow/pipelines/issues/5033)) ([a8b7fc9](https://github.com/kubeflow/pipelines/commit/a8b7fc97b177b8fd835315d116482bf293cfa8a4)) * **test:** Pin pip version in presubmit-tests-tfx.sh Fixes [\#5049](https://github.com/kubeflow/pipelines/issues/5049) ([\#5050](https://github.com/kubeflow/pipelines/issues/5050)) ([cad02dc](https://github.com/kubeflow/pipelines/commit/cad02dc2836ee25dac7c29658d0c0f6dea6fc17e)) - ### Other Pull Requests * Add references for Tekton backend ([\#4821](https://github.com/kubeflow/pipelines/issues/4821)) ([b5e820e](https://github.com/kubeflow/pipelines/commit/b5e820e217b7de947131f440a8391f6be475a6bf)) @@ -4561,7 +4652,6 @@ * **test:** Fix presubmit with python version upgrade ([\#5033](https://github.com/kubeflow/pipelines/issues/5033)) ([a8b7fc9](https://github.com/kubeflow/pipelines/commit/a8b7fc97b177b8fd835315d116482bf293cfa8a4)) * **test:** Pin pip version in presubmit-tests-tfx.sh Fixes [\#5049](https://github.com/kubeflow/pipelines/issues/5049) ([\#5050](https://github.com/kubeflow/pipelines/issues/5050)) ([cad02dc](https://github.com/kubeflow/pipelines/commit/cad02dc2836ee25dac7c29658d0c0f6dea6fc17e)) - ### Other Pull Requests * disable discovery cache ([\#5058](https://github.com/kubeflow/pipelines/issues/5058)) ([48a34e6](https://github.com/kubeflow/pipelines/commit/48a34e677dbb7df6cdb462cfeeb51e16305d5f9e)) @@ -6610,9 +6700,9 @@ Detailed PR can be found [here](https://github.com/kubeflow/pipelines/commits) ## [0.1.27](https://github.com/kubeflow/pipelines/tree/0.1.27) (2019-08-22) [Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.26...0.1.27) - **Merged pull requests:** +**Merged pull requests:** - - update namespaced-install.yaml [\#1926](https://github.com/kubeflow/pipelines/pull/1926) ([IronPan](https://github.com/IronPan)) +- update namespaced-install.yaml [\#1926](https://github.com/kubeflow/pipelines/pull/1926) ([IronPan](https://github.com/IronPan)) - Fix lint related issue [\#1922](https://github.com/kubeflow/pipelines/pull/1922) ([numerology](https://github.com/numerology)) - Cleanup pipeline-lite deployment [\#1921](https://github.com/kubeflow/pipelines/pull/1921) ([IronPan](https://github.com/IronPan)) - Allow visualization kernel timeout to be specifiable via environment variables [\#1920](https://github.com/kubeflow/pipelines/pull/1920) ([ajchili](https://github.com/ajchili)) @@ -6842,12 +6932,12 @@ Detailed PR can be found [here](https://github.com/kubeflow/pipelines/commits) - SDK - Travis configuration for Python 3.5 and 3.7 [\#1467](https://github.com/kubeflow/pipelines/pull/1467) ([kvalev](https://github.com/kvalev)) - Add timeout out in dsl [\#1465](https://github.com/kubeflow/pipelines/pull/1465) ([gaoning777](https://github.com/gaoning777)) - ## [0.1.22](https://github.com/kubeflow/pipelines/tree/0.1.22) (2019-06-21) +## [0.1.22](https://github.com/kubeflow/pipelines/tree/0.1.22) (2019-06-21) [Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.21...0.1.22) - **Merged pull requests:** +**Merged pull requests:** - - increment sdk versions [\#1538](https://github.com/kubeflow/pipelines/pull/1538) ([hongye-sun](https://github.com/hongye-sun)) +- increment sdk versions [\#1538](https://github.com/kubeflow/pipelines/pull/1538) ([hongye-sun](https://github.com/hongye-sun)) - SDK/Client - Added support for all APIs [\#1536](https://github.com/kubeflow/pipelines/pull/1536) ([Ark-kun](https://github.com/Ark-kun)) - SDK/Client - Added the upload\_pipeline API [\#1535](https://github.com/kubeflow/pipelines/pull/1535) ([Ark-kun](https://github.com/Ark-kun)) - Update Watson pipeline component source to the latest commit [\#1533](https://github.com/kubeflow/pipelines/pull/1533) ([Tomcli](https://github.com/Tomcli)) @@ -6953,12 +7043,12 @@ Detailed PR can be found [here](https://github.com/kubeflow/pipelines/commits) ## [0.1.20](https://github.com/kubeflow/pipelines/tree/0.1.20) (2019-05-14) [Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.19...0.1.20) - **Closed issues:** +**Closed issues:** - - Cannot create job for experiment via Pipelines Go CLI [\#1321](https://github.com/kubeflow/pipelines/issues/1321) - - Support a container flow inside one pod [\#1313](https://github.com/kubeflow/pipelines/issues/1313) - - toleration support for ContainerOp [\#1265](https://github.com/kubeflow/pipelines/issues/1265) - - Can only create recurring run from within experiment page [\#1217](https://github.com/kubeflow/pipelines/issues/1217) +- Cannot create job for experiment via Pipelines Go CLI [\#1321](https://github.com/kubeflow/pipelines/issues/1321) +- Support a container flow inside one pod [\#1313](https://github.com/kubeflow/pipelines/issues/1313) +- toleration support for ContainerOp [\#1265](https://github.com/kubeflow/pipelines/issues/1265) +- Can only create recurring run from within experiment page [\#1217](https://github.com/kubeflow/pipelines/issues/1217) **Merged pull requests:** @@ -6987,25 +7077,25 @@ Detailed PR can be found [here](https://github.com/kubeflow/pipelines/commits) - changelog for v0.1.19 [\#1296](https://github.com/kubeflow/pipelines/pull/1296) ([hongye-sun](https://github.com/hongye-sun)) - add nuclio components \(to build/deploy, delete, invoke functions\) [\#1295](https://github.com/kubeflow/pipelines/pull/1295) ([yaronha](https://github.com/yaronha)) - SDK - Failing faster in python\_op tests [\#1291](https://github.com/kubeflow/pipelines/pull/1291) ([Ark-kun](https://github.com/Ark-kun)) - - SDK - Renamed ModelBase.from\_struct/to\_struct to from\_dict/to\_dict [\#1290](https://github.com/kubeflow/pipelines/pull/1290) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Renamed ModelBase.from\_struct/to\_struct to from\_dict/to\_dict [\#1290](https://github.com/kubeflow/pipelines/pull/1290) ([Ark-kun](https://github.com/Ark-kun)) - Backend - Marking auto-added artifacts as optional [\#1289](https://github.com/kubeflow/pipelines/pull/1289) ([Ark-kun](https://github.com/Ark-kun)) - Update new Watson OpenScale components and pipeline [\#1287](https://github.com/kubeflow/pipelines/pull/1287) ([Tomcli](https://github.com/Tomcli)) - - Add AWS EMR and Athena components [\#1286](https://github.com/kubeflow/pipelines/pull/1286) ([Jeffwan](https://github.com/Jeffwan)) +- Add AWS EMR and Athena components [\#1286](https://github.com/kubeflow/pipelines/pull/1286) ([Jeffwan](https://github.com/Jeffwan)) - Make confusion\_matrix and roc generic [\#1285](https://github.com/kubeflow/pipelines/pull/1285) ([Jeffwan](https://github.com/Jeffwan)) - - Components - Updating component versions in samples during release [\#1283](https://github.com/kubeflow/pipelines/pull/1283) ([Ark-kun](https://github.com/Ark-kun)) - - Sets the background color for KFP pages [\#1281](https://github.com/kubeflow/pipelines/pull/1281) ([rileyjbauer](https://github.com/rileyjbauer)) - - keep the api image name consistent between the presubmit test and staging [\#1279](https://github.com/kubeflow/pipelines/pull/1279) ([gaoning777](https://github.com/gaoning777)) - - Frontend - Add support for artifacts stored in S3 [\#1278](https://github.com/kubeflow/pipelines/pull/1278) ([Jeffwan](https://github.com/Jeffwan)) +- Components - Updating component versions in samples during release [\#1283](https://github.com/kubeflow/pipelines/pull/1283) ([Ark-kun](https://github.com/Ark-kun)) +- Sets the background color for KFP pages [\#1281](https://github.com/kubeflow/pipelines/pull/1281) ([rileyjbauer](https://github.com/rileyjbauer)) +- keep the api image name consistent between the presubmit test and staging [\#1279](https://github.com/kubeflow/pipelines/pull/1279) ([gaoning777](https://github.com/gaoning777)) +- Frontend - Add support for artifacts stored in S3 [\#1278](https://github.com/kubeflow/pipelines/pull/1278) ([Jeffwan](https://github.com/Jeffwan)) - Release - Simplified python package building [\#1277](https://github.com/kubeflow/pipelines/pull/1277) ([Ark-kun](https://github.com/Ark-kun)) - - Add SageMaker components and example pipeline [\#1276](https://github.com/kubeflow/pipelines/pull/1276) ([Jeffwan](https://github.com/Jeffwan)) +- Add SageMaker components and example pipeline [\#1276](https://github.com/kubeflow/pipelines/pull/1276) ([Jeffwan](https://github.com/Jeffwan)) - Tests/Travis - Simplified the Python SDK package installation [\#1275](https://github.com/kubeflow/pipelines/pull/1275) ([Ark-kun](https://github.com/Ark-kun)) - - Adds a toggle between one-off and recurring runs to NewRun page [\#1274](https://github.com/kubeflow/pipelines/pull/1274) ([rileyjbauer](https://github.com/rileyjbauer)) - - spark components [\#1272](https://github.com/kubeflow/pipelines/pull/1272) ([animeshsingh](https://github.com/animeshsingh)) +- Adds a toggle between one-off and recurring runs to NewRun page [\#1274](https://github.com/kubeflow/pipelines/pull/1274) ([rileyjbauer](https://github.com/rileyjbauer)) +- spark components [\#1272](https://github.com/kubeflow/pipelines/pull/1272) ([animeshsingh](https://github.com/animeshsingh)) - support tolerations for ContainerOps [\#1269](https://github.com/kubeflow/pipelines/pull/1269) ([hamedhsn](https://github.com/hamedhsn)) - - make pending timeout customizable [\#1268](https://github.com/kubeflow/pipelines/pull/1268) ([cheyang](https://github.com/cheyang)) - - SDK/Client - Supporting pipeline packages with multiple files [\#1207](https://github.com/kubeflow/pipelines/pull/1207) ([Ark-kun](https://github.com/Ark-kun)) - - Retaining the component url, digest or tag when loading [\#1090](https://github.com/kubeflow/pipelines/pull/1090) ([Ark-kun](https://github.com/Ark-kun)) - - Allow to specify informers namespace in persistence agent [\#901](https://github.com/kubeflow/pipelines/pull/901) ([ywskycn](https://github.com/ywskycn)) +- make pending timeout customizable [\#1268](https://github.com/kubeflow/pipelines/pull/1268) ([cheyang](https://github.com/cheyang)) +- SDK/Client - Supporting pipeline packages with multiple files [\#1207](https://github.com/kubeflow/pipelines/pull/1207) ([Ark-kun](https://github.com/Ark-kun)) +- Retaining the component url, digest or tag when loading [\#1090](https://github.com/kubeflow/pipelines/pull/1090) ([Ark-kun](https://github.com/Ark-kun)) +- Allow to specify informers namespace in persistence agent [\#901](https://github.com/kubeflow/pipelines/pull/901) ([ywskycn](https://github.com/ywskycn)) ## [0.1.19](https://github.com/kubeflow/pipelines/tree/0.1.19) (2019-05-03) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e35ec01aa9b..40a67f94af6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -181,11 +181,11 @@ usually have different reviewers. If you are not sure, or the PR doesn't fit into above scopes. You can either omit the scope because it's optional, or propose an additional scope here. -## Adding Architectural Decision Records +## Adding Kubernetes Enhancement Proposals (KEPs) -When a change requires a significant change to the underlying system, it should be preceded with an Architectural Decision Record (ADR). +When a change requires a significant change to the underlying system, it should be preceded with an Kubernetes Enhancement Proposal (KEP). -KFP ADRs are found in the `adrs` folder at the root of this repo. Read more about the process [here](adrs/README.md). +KEPs are found in the `proposals` folder at the root of this repo. Read more about the process [here](proposals/README.md). ## Community Guidelines diff --git a/Makefile b/Makefile index c82ffaeb8f3..ccf1e5051f6 100644 --- a/Makefile +++ b/Makefile @@ -11,3 +11,12 @@ check-diff: git diff; \ exit 1; \ fi' + +# Tools +BIN_DIR ?= $(CURDIR)/bin + +.PHONY: ginkgo +ginkgo: + mkdir -p $(BIN_DIR) + GOBIN=$(BIN_DIR) go install github.com/onsi/ginkgo/v2/ginkgo@latest + @echo "Ginkgo installed to $(BIN_DIR)/ginkgo" diff --git a/README.md b/README.md index 9dcb85daaba..7be81b1d4bb 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ [![SDK Package version](https://img.shields.io/pypi/v/kfp?color=%2334D058&label=pypi%20package)](https://pypi.org/project/kfp) [![SDK Supported Python versions](https://img.shields.io/pypi/pyversions/kfp.svg?color=%2334D058)](https://pypi.org/project/kfp) [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9938/badge)](https://www.bestpractices.dev/projects/9938) +[![Ask DeepWiki](https://deepwiki.com/badge.svg)](https://deepwiki.com/kubeflow/pipelines) ## Overview of the Kubeflow pipelines service @@ -24,6 +25,13 @@ The Kubeflow pipelines service has the following goals: * The Docker container runtime has been deprecated on Kubernetes 1.20+. Kubeflow Pipelines has switched to use [Emissary Executor](https://www.kubeflow.org/docs/components/pipelines/legacy-v1/installation/choose-executor/#emissary-executor) by default from Kubeflow Pipelines 1.8. Emissary executor is Container runtime agnostic, meaning you are able to run Kubeflow Pipelines on Kubernetes cluster with any [Container runtimes](https://kubernetes.io/docs/setup/production-environment/container-runtimes/). +### Dependencies Compatibility Matrix + +| Dependency | Versions | +| -------------- |------------------| +| Argo Workflows | v3.5, v3.6, v3.7 | +| MySQL | v8 | + ## Documentation Get started with your first pipeline and read further information in the [Kubeflow Pipelines overview](https://www.kubeflow.org/docs/components/pipelines/overview/). @@ -34,6 +42,11 @@ See the Kubeflow [Pipelines API doc](https://www.kubeflow.org/docs/components/pi Consult the [Python SDK reference docs](https://kubeflow-pipelines.readthedocs.io/en/stable/) when writing pipelines using the Python SDK. +## Deep Wiki +Check out our AI Powered repo documentation on [DeepWiki](https://deepwiki.com/kubeflow/pipelines). + +> :warning: Please note, this is AI generated and may not have completely accurate information. + ## Contributing to Kubeflow Pipelines Before you start contributing to Kubeflow Pipelines, read the guidelines in [How to Contribute](./CONTRIBUTING.md). To learn how to build and deploy Kubeflow Pipelines from source code, read the [developer guide](./developer_guide.md). @@ -56,10 +69,11 @@ We also have a slack channel (#kubeflow-pipelines) on the Cloud Native Computing ## Architecture -Details about the KFP Architecture can be found at [Architecture.md](docs/Architecture.md) +Details about the KFP Architecture can be found at [Architecture.md](docs/sdk/Architecture.md) ## Blog posts +* [From Raw Data to Model Serving: A Blueprint for the AI/ML Lifecycle with Kubeflow](https://blog.kubeflow.org/fraud-detection-e2e/) (By [Helber Belmiro](https://github.com/hbelmiro)) * [Getting started with Kubeflow Pipelines](https://cloud.google.com/blog/products/ai-machine-learning/getting-started-kubeflow-pipelines) (By Amy Unruh) * How to create and deploy a Kubeflow Machine Learning Pipeline (By Lak Lakshmanan) * [Part 1: How to create and deploy a Kubeflow Machine Learning Pipeline](https://medium.com/data-science/how-to-create-and-deploy-a-kubeflow-machine-learning-pipeline-part-1-efea7a4b650f) diff --git a/RELEASE.md b/RELEASE.md index f61a73dfd1d..f55021c5799 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -4,12 +4,12 @@ - [Schedule](#schedule) - [Release Tags and Branches](#release-tags-and-branches) - [Contributor Instructions](#contributor-instructions) - - [Cherry picking pull requests to release branch](#cherry-picking-pull-requests-to-release-branch) + - [Cherry-picking pull requests to release branch](#cherry-picking-pull-requests-to-release-branch) - [Option - (Recommended) cherrypick-approved label](#option---recommended-cherrypick-approved-label) - [Option - git cherry-pick](#option---git-cherry-pick) - [Release Manager Instructions](#release-manager-instructions) - [Common Prerequisites](#common-prerequisites) - - [Cutting a release branch (Optional)](#cutting-a-release-branch-optional) + - [Cutting a release branch (Optional)](#cutting-a-release-branch) - [Before release](#before-release) - [Releasing from release branch](#releasing-from-release-branch) - [Release KFP Python Packages](#releasing-kfp-python-packages) @@ -97,7 +97,10 @@ if you only want to use or contribute to this repo. 1. Clone github.com/kubeflow/pipelines repo into `$KFP_REPO`. 2. `cd $KFP_REPO` -### Cutting a release branch (Optional) +### Cutting a release branch + +KFP releases are required to be cut from a release branch. This includes all python packages. +Release branches must be scoped to a minor version. The following components should always have all future patch versions tagged and released from their respective minor branch. For example, you would cut a release for SDK 2.14.1 from the release-2.14 branch, the same with KFP backend 2.14.4, kfp-kubernetes 2.14.2, and so on. 1. Choose a good commit on master branch with commit hash as `$COMMIT_SHA`. 1. Choose the next release branch's `$MINOR_VERSION` in format `x.y`, e.g. `1.0`, `1.1`... @@ -113,9 +116,9 @@ if you only want to use or contribute to this repo. ### Before release Do the following things before a release: -1. **(Do this step only when releasing from a NON-master release branch)** +1. Cherry-picking - Note: Instead of following this step to cherry pick all PRs, you can also manually cherry pick commits from master branch to release branch, if the number of PRs to cherry pick is minimal. Command for manual cherry pick: + Note: Instead of following this step to cherry-pick all PRs, you can also manually cherry-pick commits from the master branch to release branch, if the number of PRs to cherry-pick is minimal. Command for manual cherry-pick: ``` git cherry-pick @@ -177,7 +180,6 @@ If not, contact the KFP team to determine if the failure(s) would block the rele ### Releasing from release branch -Note, when releasing from master, all the below mentions of "release branch" means master branch. 1. Choose the release's complete `$VERSION` following semantic versioning, e.g. - `1.0.0-rc.1` @@ -193,15 +195,15 @@ Note, when releasing from master, all the below mentions of "release branch" mea ```bash cd ./test/release && TAG=$VERSION BRANCH=$BRANCH make release ``` - - It will prompt you whether to push it to release branch. Press `y` and hit `Enter`. + This script updates the version values for various manifests, and generated code. + Once finished, it will prompt you whether to push it to release branch. You can inspect the changes by navigating to the temporary directory it creates. Once you are comfortable with the changes, press `y` and hit `Enter`. Note, the script will clone kubeflow/pipelines repo into a temporary location on your computer, make those changes and attempt to push to upstream, so that it won't interfere with your current git repo. > [!Note] > If you see error "docker.sock: connect: permission error", you need to [allow managing docker as a non-root user](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user). -1. Build the release images by using the [Build images from sources](https://github.com/kubeflow/pipelines/actions/workflows/image-builds.yml). +1. Build the release images by using the [Build images from sources](https://github.com/kubeflow/pipelines/actions/workflows/image-builds-release.yml). The target tag should be `$VERSION`. @@ -209,51 +211,107 @@ The target tag should be `$VERSION`. ### Releasing KFP Python Packages -1. Release `kfp-server-api` python packages to PyPI. +All Python packages must be released with wheel and source packages. When doing a minor release, you *must* make a release for all Python packages as well, even if there are no new changes there. This includes: + +* kfp-pipeline-spec +* kfp +* kfp-kubernetes +* kfp-server-api + + +> [!Note] +> When making a release, if something goes wrong, always yank the release in pypi, **do not delete** the package and try to re-upload it with the same version, pypi won't let you do this even though it lets you delete the package. In such an event, yank the release and do a new release with a new patch version. +When performing these releases, you should adhere to the order presented below. + +1. Release `kfp-pipeline-spec` Python packages to PyPI. + Update the version in `setup.py` found in `api/v2alpha1/python/setup.py`. ```bash - git checkout $BRANCH - git pull upstream $BRANCH - cd backend/api/v2beta1/python_http_client - rm -r dist - python3 setup.py --quiet sdist - python3 -m twine upload dist/* + git checkout -b release-X.Y + pip3 install twine --user + cd api + make python + cd v2alpha1/python + twine check dist/* + twine upload dist/* ``` -1. Release `kfp` python packages to PyPI. (Note: Please skip this step for backend release, this step will be handled by SDK release.) +1. Release `kfp` Python packages to PyPI. - Update the SDK version in `__init__.py` and `readthedocs` `versions.json`, example PR [here](https://github.com/kubeflow/pipelines/pull/11715/files). + Update the SDK version in `version.py` and `readthedocs` `versions.json`, example PR [here](https://github.com/kubeflow/pipelines/pull/11715/files). ```bash + git checkout -b release-X.Y pip3 install twine --user - cd sdk/python - ./build.sh kfp-$VERSION.tar.gz - python3 -m twine upload kfp-$VERSION.tar.gz + cd sdk + make python + cd python + twine check dist/* + twine upload dist/* ``` !!! The file name must contain the version. See -1. Release `kfp-kubernetes` python packages to PyPI. (Note: Please skip this step for backend release, this step will be handled by SDK release.) +**Update Readthedocs** + +* Create a GitHub release for KFP SDK release. [Here's an example](https://github.com/kubeflow/pipelines/releases/tag/sdk-2.14.1) reference for a template. + * When creating a release create a new tag `sdk-x.y.z` +* Navigate to the readthedocs website [here](https://app.readthedocs.org/projects/kubeflow-pipelines/), login if needed +* You should see a new build under "Versions" section for this new tag, ensure it succeeds. +* Click "Settings" +* Set the default version to `sdk-x.y.z` (the version we just built and released) +* Set the default branch to be the release branch `release-x.y.z` + +1. Release `kfp-kubernetes` Python packages to PyPI. Update the KFP Kubernetes SDK version in `__init__.py` and `readthedocs` `versions.json`, example PR [here](https://github.com/kubeflow/pipelines/pull/11380). ```bash - export KFP_KUBERNETES_VERSION= + git checkout -b release-X.Y pip3 install twine --user + cd kubernetes_platform + make python + cd python + twine check dist/* + twine upload dist/* + + # Cut release-the-docs branch + export KFP_KUBERNETES_VERSION= # Set this to the version being released x.y.z cd kubernetes_platform/python ./create_release_branch.sh ``` + + Follow the output push instructions to **commit and push the read the docs release branch to KFP**. + +> [!Note] +> Note that kfp-kubernetes package has a separate readthedocs site and requires that a new branch be pushed for readthedocs to be able to host multiple pages from the same repo. +> Every new patch version for this package requires us to create a new release branch purely for readthedocs purposes. However always cut this branch from the `release-X.Y` branch. + +**Update Readthedocs** - Follow the output push instructions to **commit and push the branch to KFP**, then do the following: +Once the branch is updated, you need to add this version to readthedocs. Follow these steps: + +* Navigate to the package section on the readthedocs website [here](https://app.readthedocs.org/projects/kfp-kubernetes/). +* Click "Add version" +* Enter the branch `kfp-kubernetes-x.y.z` where x.y.z is the version you released, if you pushed it and it's not showing up, press the "Resync Versions" button and try again +* Add this version, navigate back to the "Versions" section, and you should see a build, make sure it succeeds before moving onto the next section. +* Go to Settings +* Set this version as the default version. +* Click Save +* Click "View Docs" to navigate to the docs page and ensure the new version shows up as the default. + +1. Release `kfp-server-api` Python packages to PyPI. ```bash - # set this to the appropriate version that matches what was set in __init__.py earlier - export KFP_KUBERNETES_VERSION= - cd kubernetes_platform/python - ./release.sh + git checkout $BRANCH + git pull upstream $BRANCH + cd backend/api/v2beta1/python_http_client + rm -r dist + python3 setup.py --quiet sdist + python3 -m twine upload dist/* ``` - - Note that this script will build the package, test install, and push to PyPi. + +Push the changes to the `release-X.Y` branch. ### Create GitHub Release @@ -269,7 +327,7 @@ fill in the description. Detailed steps:
        To deploy Kubeflow Pipelines in an existing cluster, follow the instruction in [here](https://www.kubeflow.org/docs/components/pipelines/operator-guides/installation/)
 
-       Install python SDK (python 3.9 above) by running:
+       Install Python SDK (Python 3.9 above) by running:
 
        ```bash
        python3 -m pip install kfp kfp-server-api --upgrade
@@ -284,7 +342,7 @@ fill in the description. Detailed steps:
        
         To deploy Kubeflow Pipelines in an existing cluster, follow the instruction in [here](https://www.kubeflow.org/docs/components/pipelines/operator-guides/installation/).
 
-        Install kfp-server-api package (python 3.9 above) by running:
+        Install kfp-server-api package (Python 3.9 above) by running:
 
         ```bash
         python3 -m pip install kfp-server-api==$VERSION --upgrade
@@ -294,13 +352,12 @@ fill in the description. Detailed steps:
         * [Upgrade Notes with notices and breaking changes](https://www.kubeflow.org/docs/components/pipelines/installation/upgrade/)
         * [Change Log](https://github.com/kubeflow/pipelines/blob/$VERSION/CHANGELOG.md)
 
-        NOTE, kfp python SDK is **NOT** included and released separately.
+        NOTE, kfp Python SDK is **NOT** included and released separately.
        
### Sync Master Branch with latest release -1. **(Do this step only when releasing from a NON-master release branch)** -Update master branch to the same version and include latest changelog: +1. Update master branch to the same version and include latest changelog: ```bash git checkout master @@ -333,3 +390,30 @@ Update master branch to the same version and include latest changelog: ## Release Process Development Please refer to [./test/release](./test/release). + +## Versioning Policy in KFP + +Starting from version **2.14**, all major and minor versions (X.Y) of the Kubeflow Pipelines (KFP) components are aligned. The following components are included in this alignment: + +* **KFP Backend / UI** +* **KFP Python SDK** +* **KFP Python Kubernetes Platform SDK** +* **KFP Python Pipeline Specification** +* **KFP Server API** + +### Versioning and Compatibility Policy + +* **API Compatibility:** + All KFP components sharing the same major and minor version (X.Y) are guaranteed to be API-compatible. + +* **Backward Compatibility:** + The KFP project aims to maintain backward compatibility within a given **major version** for all Python SDK packages, though there may be exceptions at times. + + Specifically: + + * Newer versions of the KFP Python SDK within the same major release (e.g., 2.x) should continue to function with older versions of the KFP backend. + * However, newly introduced features in a later SDK minor version may require a matching or newer backend version to function correctly. For example: + * A feature introduced in `kfp==2.15` is not guaranteed to be supported by a `2.14` backend. In such cases, upgrading the backend to version `2.15` or later is necessary. + +* **Patch Releases:** + Patch versions (X.Y.Z) may include bug fixes, maintenance updates, and minor feature enhancements. These changes must not break API compatibility or violate the support guarantees outlined above. diff --git a/SECURITY.md b/SECURITY.md index 1e8a799faf1..166b2bdb2fa 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,5 +1,64 @@ -# Private Security Vulnerability Reporting +# Security Policy -When reporting a vulnerability, please include a description of the issue, the steps you took to create the issue, affected versions, and, if known, mitigations for the issue. If the issue is confirmed as a vulnerability, we will open a Security Advisory. This project follows a 90 day disclosure timeline. +## Supported Versions -To report a security issue, follow [these instructions](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability). +Kubeflow Pipelines versions are expressed as `X.Y.Z`, where X is the major version, +Y is the minor version, and Z is the patch version, following the +[Semantic Versioning](https://semver.org/) terminology. + +The Kubeflow Pipelines project maintains release branches for the most recent two minor releases. +Applicable fixes, including security fixes, may be backported to those two release branches, +depending on severity and feasibility. + +Users are encouraged to stay updated with the latest releases to benefit from security patches and +improvements. + +## Reporting a Vulnerability + +We're extremely grateful for security researchers and users that report vulnerabilities to the +Kubeflow Open Source Community. All reports are thoroughly investigated by Kubeflow projects owners. + +You can use the following ways to report security vulnerabilities privately: + +- Using the Kubeflow Pipelines repository [GitHub Security Advisory](https://github.com/kubeflow/pipelines/security/advisories/new). +- Using our private Kubeflow Steering Committee mailing list: ksc@kubeflow.org. + +Please provide detailed information to help us understand and address the issue promptly. + +## Disclosure Process + +**Acknowledgment**: We will acknowledge receipt of your report within 10 business days. + +**Assessment**: The Kubeflow projects owners will investigate the reported issue to determine its +validity and severity. + +**Resolution**: If the issue is confirmed, we will work on a fix and prepare a release. + +**Notification**: Once a fix is available, we will notify the reporter and coordinate a public +disclosure. + +**Public Disclosure**: Details of the vulnerability and the fix will be published in the project's +release notes and communicated through appropriate channels. + +## Prevention Mechanisms + +Kubeflow Pipelines employs several measures to prevent security issues: + +**Code Reviews**: All code changes are reviewed by maintainers to ensure code quality and security. + +**Dependency Management**: Regular updates and monitoring of dependencies (e.g. Dependabot) to +address known vulnerabilities. + +**Continuous Integration**: Automated testing and security checks are integrated into the CI/CD pipeline. + +**Image Scanning**: Container images are scanned for vulnerabilities. + +## Communication Channels + +For the general questions please join the following resources: + +- Kubeflow [Slack channels](https://www.kubeflow.org/docs/about/community/#kubeflow-slack-channels). + +- Kubeflow discuss [mailing list](https://www.kubeflow.org/docs/about/community/#kubeflow-mailing-list). + +Please **do not report** security vulnerabilities through public channels. diff --git a/VERSION b/VERSION index fad066f801a..ecac8bfdcc1 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.5.0 \ No newline at end of file +2.14.3 \ No newline at end of file diff --git a/api/Makefile b/api/Makefile index aaa8ec8c4fe..a000abe635e 100644 --- a/api/Makefile +++ b/api/Makefile @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Contact one of chensun, HumairAK, zijianjoy if this remote image needs an update. -PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:1.1 +# Contact one of chensun, HumairAK if this remote image needs an update. +PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:master .PHONY: all all: golang python @@ -36,14 +36,22 @@ clean-go: rm -rf v2alpha1/go rm -f v2alpha1/google/rpc/status.proto -# Build and locally install Python package. +# Build Python package using pre-built image .PHONY: python -python: v2alpha1/pipeline_spec.proto v2alpha1/google/rpc/status.proto - python3 v2alpha1/python/generate_proto.py && cd v2alpha1/python && python3 setup.py bdist_wheel +python: python fetch-protos + docker run --interactive --rm \ + --user $$(id -u):$$(id -g) \ + -e HOME=/tmp \ + -v "$$(pwd)/..":"/go/src/github.com/kubeflow/pipelines":z \ + $(PREBUILT_REMOTE_IMAGE) \ + sh -c 'cd /go/src/github.com/kubeflow/pipelines/api/v2alpha1/python && \ + python3 -m pip install --user --break-system-packages -r requirements.txt && \ + python3 generate_proto.py && \ + python3 setup.py sdist bdist_wheel --dist-dir ./dist' # Build and locally install Python package using editable mode for development. .PHONY: python-dev -python-dev: v2alpha1/pipeline_spec.proto v2alpha1/google/rpc/status.proto +python-dev: v2alpha1/pipeline_spec.proto fetch-protos python3 v2alpha1/python/generate_proto.py && cd v2alpha1/python && pip install -e . # Delete all generated Python packages @@ -52,12 +60,13 @@ clean-python: rm -rf v2alpha1/python/build rm -rf v2alpha1/python/dist rm -rf v2alpha1/python/kfp_pipeline_spec.egg-info + rm -rf v2alpha1/google rm -f v2alpha1/python/kfp/pipeline_spec/pipeline_spec_pb2.py rm -f v2alpha1/google/rpc/status.proto -########################## +########################################### # The following are IMPLEMENTATION DETAILS. -########################## +########################################### # Generates proto packages locally, this should only be called: # * during development @@ -65,7 +74,7 @@ clean-python: .PHONY: generate generate: go_pipelinespec go_cachekey -go_pipelinespec: v2alpha1/pipeline_spec.proto v2alpha1/google/rpc/status.proto +go_pipelinespec: v2alpha1/pipeline_spec.proto fetch-protos mkdir -p v2alpha1/go/pipelinespec cd v2alpha1 && protoc -I=. \ --go_out=go/pipelinespec \ @@ -79,12 +88,32 @@ go_cachekey: v2alpha1/pipeline_spec.proto v2alpha1/cache_key.proto --go_opt=paths=source_relative \ cache_key.proto -# Fetch dependency proto -v2alpha1/google/rpc/status.proto: - mkdir -p v2alpha1/google/rpc - wget -O v2alpha1/google/rpc/status.proto https://raw.githubusercontent.com/googleapis/googleapis/047d3a8ac7f75383855df0166144f891d7af08d9/google/rpc/status.proto +######################################### +# The following are dependencies +# Required for compiling the proto files +######################################### + +GOOGLEAPIS_COMMIT ?= fecd7d35f46753b45bf4519f6342495a181740c9 +PROTOBUF_TAG ?= v26.0 +PROTO_DST_DIR := v2alpha1/google +TMP_PROTOBUF_DIR := /tmp/protobuf-src + +.PHONY: fetch-protos fetch-googleapis fetch-protobuf protoc-gen-go clean-protobuf-tmp + +fetch-protos: fetch-googleapis fetch-protobuf + +fetch-googleapis: + @echo "Downloading google/rpc/status.proto from googleapis@$(GOOGLEAPIS_COMMIT)..." + mkdir -p $(PROTO_DST_DIR)/rpc + wget -qO $(PROTO_DST_DIR)/rpc/status.proto https://raw.githubusercontent.com/googleapis/googleapis/$(GOOGLEAPIS_COMMIT)/google/rpc/status.proto + +fetch-protobuf: clean-protobuf-tmp + @git clone --depth 1 --branch $(PROTOBUF_TAG) https://github.com/protocolbuffers/protobuf.git $(TMP_PROTOBUF_DIR) + @mkdir -p $(PROTO_DST_DIR)/protobuf + @cp $(TMP_PROTOBUF_DIR)/src/google/protobuf/*.proto $(PROTO_DST_DIR)/protobuf/ + +clean-protobuf-tmp: + @rm -rf $(TMP_PROTOBUF_DIR) -# protoc-gen-go is already installed in api-generator image -.PHONY: protoc-gen-go protoc-gen-go: go install google.golang.org/protobuf/cmd/protoc-gen-go diff --git a/api/go.mod b/api/go.mod index 58063b5cad7..a8528b0a4aa 100644 --- a/api/go.mod +++ b/api/go.mod @@ -1,12 +1,14 @@ module github.com/kubeflow/pipelines/api -go 1.23 +go 1.24.6 require ( - google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 - google.golang.org/protobuf v1.33.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250715232539-7130f93afb79 + google.golang.org/protobuf v1.36.6 ) +require github.com/google/go-cmp v0.6.0 // indirect + replace ( github.com/mattn/go-sqlite3 => github.com/mattn/go-sqlite3 v1.14.18 golang.org/x/net => golang.org/x/net v0.33.0 diff --git a/api/go.sum b/api/go.sum index cdcf5cc8c39..aa46e9af976 100644 --- a/api/go.sum +++ b/api/go.sum @@ -1,8 +1,6 @@ -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= -google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= -google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250715232539-7130f93afb79 h1:1ZwqphdOdWYXsUHgMpU/101nCtf/kSp9hOrcvFsnl10= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250715232539-7130f93afb79/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= diff --git a/api/v2alpha1/cache_key.proto b/api/v2alpha1/cache_key.proto index a8151b37ec2..afdf6e2004d 100644 --- a/api/v2alpha1/cache_key.proto +++ b/api/v2alpha1/cache_key.proto @@ -32,6 +32,7 @@ message CacheKey { message ContainerSpec { string image = 1; repeated string cmdArgs = 2; + repeated string pvcNames = 3; } message ArtifactNameList { diff --git a/api/v2alpha1/go/cachekey/cache_key.pb.go b/api/v2alpha1/go/cachekey/cache_key.pb.go index 73f6320ff8e..763643c4cbe 100644 --- a/api/v2alpha1/go/cachekey/cache_key.pb.go +++ b/api/v2alpha1/go/cachekey/cache_key.pb.go @@ -14,8 +14,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: cache_key.proto package cachekey @@ -27,6 +27,7 @@ import ( structpb "google.golang.org/protobuf/types/known/structpb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -37,26 +38,23 @@ const ( ) type CacheKey struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - InputArtifactNames map[string]*ArtifactNameList `protobuf:"bytes,1,rep,name=inputArtifactNames,proto3" json:"inputArtifactNames,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + state protoimpl.MessageState `protogen:"open.v1"` + InputArtifactNames map[string]*ArtifactNameList `protobuf:"bytes,1,rep,name=inputArtifactNames,proto3" json:"inputArtifactNames,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Deprecated: Marked as deprecated in cache_key.proto. - InputParameters map[string]*pipelinespec.Value `protobuf:"bytes,2,rep,name=inputParameters,proto3" json:"inputParameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - OutputArtifactsSpec map[string]*pipelinespec.RuntimeArtifact `protobuf:"bytes,3,rep,name=outputArtifactsSpec,proto3" json:"outputArtifactsSpec,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - OutputParametersSpec map[string]string `protobuf:"bytes,4,rep,name=outputParametersSpec,proto3" json:"outputParametersSpec,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + InputParameters map[string]*pipelinespec.Value `protobuf:"bytes,2,rep,name=inputParameters,proto3" json:"inputParameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + OutputArtifactsSpec map[string]*pipelinespec.RuntimeArtifact `protobuf:"bytes,3,rep,name=outputArtifactsSpec,proto3" json:"outputArtifactsSpec,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + OutputParametersSpec map[string]string `protobuf:"bytes,4,rep,name=outputParametersSpec,proto3" json:"outputParametersSpec,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` ContainerSpec *ContainerSpec `protobuf:"bytes,5,opt,name=containerSpec,proto3" json:"containerSpec,omitempty"` - InputParameterValues map[string]*structpb.Value `protobuf:"bytes,6,rep,name=input_parameter_values,json=inputParameterValues,proto3" json:"input_parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + InputParameterValues map[string]*structpb.Value `protobuf:"bytes,6,rep,name=input_parameter_values,json=inputParameterValues,proto3" json:"input_parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CacheKey) Reset() { *x = CacheKey{} - if protoimpl.UnsafeEnabled { - mi := &file_cache_key_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_cache_key_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CacheKey) String() string { @@ -67,7 +65,7 @@ func (*CacheKey) ProtoMessage() {} func (x *CacheKey) ProtoReflect() protoreflect.Message { mi := &file_cache_key_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -126,21 +124,19 @@ func (x *CacheKey) GetInputParameterValues() map[string]*structpb.Value { } type ContainerSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` + CmdArgs []string `protobuf:"bytes,2,rep,name=cmdArgs,proto3" json:"cmdArgs,omitempty"` + PvcNames []string `protobuf:"bytes,3,rep,name=pvcNames,proto3" json:"pvcNames,omitempty"` unknownFields protoimpl.UnknownFields - - Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` - CmdArgs []string `protobuf:"bytes,2,rep,name=cmdArgs,proto3" json:"cmdArgs,omitempty"` + sizeCache protoimpl.SizeCache } func (x *ContainerSpec) Reset() { *x = ContainerSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_cache_key_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_cache_key_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ContainerSpec) String() string { @@ -151,7 +147,7 @@ func (*ContainerSpec) ProtoMessage() {} func (x *ContainerSpec) ProtoReflect() protoreflect.Message { mi := &file_cache_key_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -180,21 +176,25 @@ func (x *ContainerSpec) GetCmdArgs() []string { return nil } +func (x *ContainerSpec) GetPvcNames() []string { + if x != nil { + return x.PvcNames + } + return nil +} + type ArtifactNameList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + ArtifactNames []string `protobuf:"bytes,1,rep,name=artifactNames,proto3" json:"artifactNames,omitempty"` unknownFields protoimpl.UnknownFields - - ArtifactNames []string `protobuf:"bytes,1,rep,name=artifactNames,proto3" json:"artifactNames,omitempty"` + sizeCache protoimpl.SizeCache } func (x *ArtifactNameList) Reset() { *x = ArtifactNameList{} - if protoimpl.UnsafeEnabled { - mi := &file_cache_key_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_cache_key_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ArtifactNameList) String() string { @@ -205,7 +205,7 @@ func (*ArtifactNameList) ProtoMessage() {} func (x *ArtifactNameList) ProtoReflect() protoreflect.Message { mi := &file_cache_key_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -229,105 +229,52 @@ func (x *ArtifactNameList) GetArtifactNames() []string { var File_cache_key_proto protoreflect.FileDescriptor -var file_cache_key_proto_rawDesc = []byte{ - 0x0a, 0x0f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x12, 0x0c, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x1a, - 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x22, 0x8a, 0x08, 0x0a, 0x08, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x12, - 0x5e, 0x0a, 0x12, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, - 0x4b, 0x65, 0x79, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, - 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x12, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x12, - 0x59, 0x0a, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, - 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x13, 0x6f, 0x75, - 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, 0x70, 0x65, - 0x63, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x2e, - 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, - 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x13, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x64, 0x0a, - 0x14, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x73, 0x53, 0x70, 0x65, 0x63, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, - 0x4b, 0x65, 0x79, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x14, 0x6f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x53, - 0x70, 0x65, 0x63, 0x12, 0x41, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, - 0x53, 0x70, 0x65, 0x63, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, - 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, - 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x66, 0x0a, 0x16, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x2e, 0x49, - 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x14, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0x65, - 0x0a, 0x17, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, - 0x61, 0x6d, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, - 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x57, 0x0a, 0x14, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, - 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, - 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x65, - 0x0a, 0x18, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x73, 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x33, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, - 0x6d, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x47, 0x0a, 0x19, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x5f, - 0x0a, 0x19, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, - 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, - 0x3f, 0x0a, 0x0d, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, - 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6d, 0x64, 0x41, 0x72, 0x67, - 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6d, 0x64, 0x41, 0x72, 0x67, 0x73, - 0x22, 0x38, 0x0a, 0x10, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, - 0x4c, 0x69, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x61, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x42, 0x38, 0x5a, 0x36, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, - 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x61, 0x70, 0x69, 0x2f, - 0x76, 0x32, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x2f, 0x63, 0x61, 0x63, 0x68, - 0x65, 0x6b, 0x65, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_cache_key_proto_rawDesc = "" + + "\n" + + "\x0fcache_key.proto\x12\fml_pipelines\x1a\x1cgoogle/protobuf/struct.proto\x1a\x13pipeline_spec.proto\"\x8a\b\n" + + "\bCacheKey\x12^\n" + + "\x12inputArtifactNames\x18\x01 \x03(\v2..ml_pipelines.CacheKey.InputArtifactNamesEntryR\x12inputArtifactNames\x12Y\n" + + "\x0finputParameters\x18\x02 \x03(\v2+.ml_pipelines.CacheKey.InputParametersEntryB\x02\x18\x01R\x0finputParameters\x12a\n" + + "\x13outputArtifactsSpec\x18\x03 \x03(\v2/.ml_pipelines.CacheKey.OutputArtifactsSpecEntryR\x13outputArtifactsSpec\x12d\n" + + "\x14outputParametersSpec\x18\x04 \x03(\v20.ml_pipelines.CacheKey.OutputParametersSpecEntryR\x14outputParametersSpec\x12A\n" + + "\rcontainerSpec\x18\x05 \x01(\v2\x1b.ml_pipelines.ContainerSpecR\rcontainerSpec\x12f\n" + + "\x16input_parameter_values\x18\x06 \x03(\v20.ml_pipelines.CacheKey.InputParameterValuesEntryR\x14inputParameterValues\x1ae\n" + + "\x17InputArtifactNamesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x124\n" + + "\x05value\x18\x02 \x01(\v2\x1e.ml_pipelines.ArtifactNameListR\x05value:\x028\x01\x1aW\n" + + "\x14InputParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12)\n" + + "\x05value\x18\x02 \x01(\v2\x13.ml_pipelines.ValueR\x05value:\x028\x01\x1ae\n" + + "\x18OutputArtifactsSpecEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x123\n" + + "\x05value\x18\x02 \x01(\v2\x1d.ml_pipelines.RuntimeArtifactR\x05value:\x028\x01\x1aG\n" + + "\x19OutputParametersSpecEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\x1a_\n" + + "\x19InputParameterValuesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12,\n" + + "\x05value\x18\x02 \x01(\v2\x16.google.protobuf.ValueR\x05value:\x028\x01\"[\n" + + "\rContainerSpec\x12\x14\n" + + "\x05image\x18\x01 \x01(\tR\x05image\x12\x18\n" + + "\acmdArgs\x18\x02 \x03(\tR\acmdArgs\x12\x1a\n" + + "\bpvcNames\x18\x03 \x03(\tR\bpvcNames\"8\n" + + "\x10ArtifactNameList\x12$\n" + + "\rartifactNames\x18\x01 \x03(\tR\rartifactNamesB8Z6github.com/kubeflow/pipelines/api/v2alpha1/go/cachekeyb\x06proto3" var ( file_cache_key_proto_rawDescOnce sync.Once - file_cache_key_proto_rawDescData = file_cache_key_proto_rawDesc + file_cache_key_proto_rawDescData []byte ) func file_cache_key_proto_rawDescGZIP() []byte { file_cache_key_proto_rawDescOnce.Do(func() { - file_cache_key_proto_rawDescData = protoimpl.X.CompressGZIP(file_cache_key_proto_rawDescData) + file_cache_key_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_cache_key_proto_rawDesc), len(file_cache_key_proto_rawDesc))) }) return file_cache_key_proto_rawDescData } var file_cache_key_proto_msgTypes = make([]protoimpl.MessageInfo, 8) -var file_cache_key_proto_goTypes = []interface{}{ +var file_cache_key_proto_goTypes = []any{ (*CacheKey)(nil), // 0: ml_pipelines.CacheKey (*ContainerSpec)(nil), // 1: ml_pipelines.ContainerSpec (*ArtifactNameList)(nil), // 2: ml_pipelines.ArtifactNameList @@ -363,49 +310,11 @@ func file_cache_key_proto_init() { if File_cache_key_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_cache_key_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CacheKey); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_cache_key_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ContainerSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_cache_key_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArtifactNameList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_cache_key_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_cache_key_proto_rawDesc), len(file_cache_key_proto_rawDesc)), NumEnums: 0, NumMessages: 8, NumExtensions: 0, @@ -416,7 +325,6 @@ func file_cache_key_proto_init() { MessageInfos: file_cache_key_proto_msgTypes, }.Build() File_cache_key_proto = out.File - file_cache_key_proto_rawDesc = nil file_cache_key_proto_goTypes = nil file_cache_key_proto_depIdxs = nil } diff --git a/api/v2alpha1/go/pipelinespec/pipeline_spec.pb.go b/api/v2alpha1/go/pipelinespec/pipeline_spec.pb.go index c7181324d9e..d809e54f8fe 100644 --- a/api/v2alpha1/go/pipelinespec/pipeline_spec.pb.go +++ b/api/v2alpha1/go/pipelinespec/pipeline_spec.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: pipeline_spec.proto package pipelinespec @@ -14,6 +14,7 @@ import ( structpb "google.golang.org/protobuf/types/known/structpb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -109,6 +110,9 @@ const ( // Indicates that a parameter is a TaskFinalStatus type; these types can only accept inputs // specified by InputParameterSpec.task_final_status ParameterType_TASK_FINAL_STATUS ParameterType_ParameterTypeEnum = 7 + // Indicates that a parameter is a TaskConfig type; these types are + // injected by the backend to provide the configuration set on the task. + ParameterType_TASK_CONFIG ParameterType_ParameterTypeEnum = 8 ) // Enum value maps for ParameterType_ParameterTypeEnum. @@ -122,6 +126,7 @@ var ( 5: "LIST", 6: "STRUCT", 7: "TASK_FINAL_STATUS", + 8: "TASK_CONFIG", } ParameterType_ParameterTypeEnum_value = map[string]int32{ "PARAMETER_TYPE_ENUM_UNSPECIFIED": 0, @@ -132,6 +137,7 @@ var ( "LIST": 5, "STRUCT": 6, "TASK_FINAL_STATUS": 7, + "TASK_CONFIG": 8, } ) @@ -162,6 +168,81 @@ func (ParameterType_ParameterTypeEnum) EnumDescriptor() ([]byte, []int) { return file_pipeline_spec_proto_rawDescGZIP(), []int{10, 0} } +type TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum int32 + +const ( + // Throwaway default value. + TaskConfigPassthroughType_NONE TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum = 0 + // Indicates that the resource limits and requests should be passed through to the external workload. + // Be cautious about also setting apply_to_task=true since that will double the resources required for + // the task. + TaskConfigPassthroughType_RESOURCES TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum = 1 + // Indicates that the environment variables should be passed through to the external workload. + // It is generally safe to always set apply_to_task=true on this field. + TaskConfigPassthroughType_ENV TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum = 2 + // Indicates that the Kubernetes node affinity should be passed through to the external workload. + TaskConfigPassthroughType_KUBERNETES_AFFINITY TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum = 3 + // Indicates that the Kubernetes node tolerations should be passed through to the external workload. + TaskConfigPassthroughType_KUBERNETES_TOLERATIONS TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum = 4 + // Indicates that the Kubernetes node selector should be passed through to the external workload. + TaskConfigPassthroughType_KUBERNETES_NODE_SELECTOR TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum = 5 + // Indicates that the Kubernetes persistent volumes and ConfigMaps/Secrets mounted as volumes should be + // passed through to the external workload. Be sure that when setting apply_to_task=true, the volumes are + // ReadWriteMany or ReadOnlyMany or else the task's pod may not start. + // This is useful when the task prepares a shared volume for the external workload or defines output artifact + // (e.g. dsl.Model) that is created by the external workload. + TaskConfigPassthroughType_KUBERNETES_VOLUMES TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum = 6 +) + +// Enum value maps for TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum. +var ( + TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum_name = map[int32]string{ + 0: "NONE", + 1: "RESOURCES", + 2: "ENV", + 3: "KUBERNETES_AFFINITY", + 4: "KUBERNETES_TOLERATIONS", + 5: "KUBERNETES_NODE_SELECTOR", + 6: "KUBERNETES_VOLUMES", + } + TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum_value = map[string]int32{ + "NONE": 0, + "RESOURCES": 1, + "ENV": 2, + "KUBERNETES_AFFINITY": 3, + "KUBERNETES_TOLERATIONS": 4, + "KUBERNETES_NODE_SELECTOR": 5, + "KUBERNETES_VOLUMES": 6, + } +) + +func (x TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum) Enum() *TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum { + p := new(TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum) + *p = x + return p +} + +func (x TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum) Descriptor() protoreflect.EnumDescriptor { + return file_pipeline_spec_proto_enumTypes[2].Descriptor() +} + +func (TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum) Type() protoreflect.EnumType { + return &file_pipeline_spec_proto_enumTypes[2] +} + +func (x TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum.Descriptor instead. +func (TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum) EnumDescriptor() ([]byte, []int) { + return file_pipeline_spec_proto_rawDescGZIP(), []int{11, 0} +} + // An enum defines the trigger strategy of when the task will be ready to be // triggered. // ALL_UPSTREAM_TASKS_SUCCEEDED - all upstream tasks in succeeded state. @@ -206,11 +287,11 @@ func (x PipelineTaskSpec_TriggerPolicy_TriggerStrategy) String() string { } func (PipelineTaskSpec_TriggerPolicy_TriggerStrategy) Descriptor() protoreflect.EnumDescriptor { - return file_pipeline_spec_proto_enumTypes[2].Descriptor() + return file_pipeline_spec_proto_enumTypes[3].Descriptor() } func (PipelineTaskSpec_TriggerPolicy_TriggerStrategy) Type() protoreflect.EnumType { - return &file_pipeline_spec_proto_enumTypes[2] + return &file_pipeline_spec_proto_enumTypes[3] } func (x PipelineTaskSpec_TriggerPolicy_TriggerStrategy) Number() protoreflect.EnumNumber { @@ -219,7 +300,7 @@ func (x PipelineTaskSpec_TriggerPolicy_TriggerStrategy) Number() protoreflect.En // Deprecated: Use PipelineTaskSpec_TriggerPolicy_TriggerStrategy.Descriptor instead. func (PipelineTaskSpec_TriggerPolicy_TriggerStrategy) EnumDescriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{11, 1, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{13, 1, 0} } type PipelineStateEnum_PipelineTaskState int32 @@ -303,11 +384,11 @@ func (x PipelineStateEnum_PipelineTaskState) String() string { } func (PipelineStateEnum_PipelineTaskState) Descriptor() protoreflect.EnumDescriptor { - return file_pipeline_spec_proto_enumTypes[3].Descriptor() + return file_pipeline_spec_proto_enumTypes[4].Descriptor() } func (PipelineStateEnum_PipelineTaskState) Type() protoreflect.EnumType { - return &file_pipeline_spec_proto_enumTypes[3] + return &file_pipeline_spec_proto_enumTypes[4] } func (x PipelineStateEnum_PipelineTaskState) Number() protoreflect.EnumNumber { @@ -316,33 +397,30 @@ func (x PipelineStateEnum_PipelineTaskState) Number() protoreflect.EnumNumber { // Deprecated: Use PipelineStateEnum_PipelineTaskState.Descriptor instead. func (PipelineStateEnum_PipelineTaskState) EnumDescriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{26, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{28, 0} } // The spec of a pipeline job. type PipelineJob struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Name of the job. + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Name of the job. // User friendly display name DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` // Definition of the pipeline that is being executed. PipelineSpec *structpb.Struct `protobuf:"bytes,7,opt,name=pipeline_spec,json=pipelineSpec,proto3" json:"pipeline_spec,omitempty"` // The labels with user-defined metadata to organize PipelineJob. - Labels map[string]string `protobuf:"bytes,11,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Labels map[string]string `protobuf:"bytes,11,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Runtime config of the pipeline. RuntimeConfig *PipelineJob_RuntimeConfig `protobuf:"bytes,12,opt,name=runtime_config,json=runtimeConfig,proto3" json:"runtime_config,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineJob) Reset() { *x = PipelineJob{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineJob) String() string { @@ -353,7 +431,7 @@ func (*PipelineJob) ProtoMessage() {} func (x *PipelineJob) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -405,10 +483,7 @@ func (x *PipelineJob) GetRuntimeConfig() *PipelineJob_RuntimeConfig { // The spec of a pipeline. type PipelineSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The metadata of the pipeline. PipelineInfo *PipelineInfo `protobuf:"bytes,1,opt,name=pipeline_info,json=pipelineInfo,proto3" json:"pipeline_info,omitempty"` // The deployment config of the pipeline. @@ -419,21 +494,21 @@ type PipelineSpec struct { // The version of the schema. SchemaVersion string `protobuf:"bytes,5,opt,name=schema_version,json=schemaVersion,proto3" json:"schema_version,omitempty"` // The map of name to definition of all components used in this pipeline. - Components map[string]*ComponentSpec `protobuf:"bytes,8,rep,name=components,proto3" json:"components,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Components map[string]*ComponentSpec `protobuf:"bytes,8,rep,name=components,proto3" json:"components,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // The definition of the main pipeline. Execution of the pipeline is // completed upon the completion of this component. Root *ComponentSpec `protobuf:"bytes,9,opt,name=root,proto3" json:"root,omitempty"` // Optional field. The default root output directory of the pipeline. DefaultPipelineRoot string `protobuf:"bytes,10,opt,name=default_pipeline_root,json=defaultPipelineRoot,proto3" json:"default_pipeline_root,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineSpec) Reset() { *x = PipelineSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineSpec) String() string { @@ -444,7 +519,7 @@ func (*PipelineSpec) ProtoMessage() {} func (x *PipelineSpec) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -510,32 +585,31 @@ func (x *PipelineSpec) GetDefaultPipelineRoot() string { // Definition of a component. type ComponentSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Definition of the input parameters and artifacts of the component. InputDefinitions *ComponentInputsSpec `protobuf:"bytes,1,opt,name=input_definitions,json=inputDefinitions,proto3" json:"input_definitions,omitempty"` // Definition of the output parameters and artifacts of the component. OutputDefinitions *ComponentOutputsSpec `protobuf:"bytes,2,opt,name=output_definitions,json=outputDefinitions,proto3" json:"output_definitions,omitempty"` // Either a DAG or a single execution. // - // Types that are assignable to Implementation: + // Types that are valid to be assigned to Implementation: // // *ComponentSpec_Dag // *ComponentSpec_ExecutorLabel Implementation isComponentSpec_Implementation `protobuf_oneof:"implementation"` // Supports platform-specific component features. SinglePlatformSpecs []*SinglePlatformSpec `protobuf:"bytes,5,rep,name=single_platform_specs,json=singlePlatformSpecs,proto3" json:"single_platform_specs,omitempty"` + // Specifies the task configurations that can be passed through to an external workload. + TaskConfigPassthroughs []*TaskConfigPassthrough `protobuf:"bytes,6,rep,name=task_config_passthroughs,json=taskConfigPassthroughs,proto3" json:"task_config_passthroughs,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ComponentSpec) Reset() { *x = ComponentSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ComponentSpec) String() string { @@ -546,7 +620,7 @@ func (*ComponentSpec) ProtoMessage() {} func (x *ComponentSpec) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -575,23 +649,27 @@ func (x *ComponentSpec) GetOutputDefinitions() *ComponentOutputsSpec { return nil } -func (m *ComponentSpec) GetImplementation() isComponentSpec_Implementation { - if m != nil { - return m.Implementation +func (x *ComponentSpec) GetImplementation() isComponentSpec_Implementation { + if x != nil { + return x.Implementation } return nil } func (x *ComponentSpec) GetDag() *DagSpec { - if x, ok := x.GetImplementation().(*ComponentSpec_Dag); ok { - return x.Dag + if x != nil { + if x, ok := x.Implementation.(*ComponentSpec_Dag); ok { + return x.Dag + } } return nil } func (x *ComponentSpec) GetExecutorLabel() string { - if x, ok := x.GetImplementation().(*ComponentSpec_ExecutorLabel); ok { - return x.ExecutorLabel + if x != nil { + if x, ok := x.Implementation.(*ComponentSpec_ExecutorLabel); ok { + return x.ExecutorLabel + } } return "" } @@ -603,6 +681,13 @@ func (x *ComponentSpec) GetSinglePlatformSpecs() []*SinglePlatformSpec { return nil } +func (x *ComponentSpec) GetTaskConfigPassthroughs() []*TaskConfigPassthrough { + if x != nil { + return x.TaskConfigPassthroughs + } + return nil +} + type isComponentSpec_Implementation interface { isComponentSpec_Implementation() } @@ -621,23 +706,20 @@ func (*ComponentSpec_ExecutorLabel) isComponentSpec_Implementation() {} // A DAG contains multiple tasks. type DagSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The tasks inside the dag. - Tasks map[string]*PipelineTaskSpec `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Tasks map[string]*PipelineTaskSpec `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Defines how the outputs of the dag are linked to the sub tasks. - Outputs *DagOutputsSpec `protobuf:"bytes,2,opt,name=outputs,proto3" json:"outputs,omitempty"` + Outputs *DagOutputsSpec `protobuf:"bytes,2,opt,name=outputs,proto3" json:"outputs,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DagSpec) Reset() { *x = DagSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DagSpec) String() string { @@ -648,7 +730,7 @@ func (*DagSpec) ProtoMessage() {} func (x *DagSpec) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -679,23 +761,20 @@ func (x *DagSpec) GetOutputs() *DagOutputsSpec { // Definition of the output artifacts and parameters of the DAG component. type DagOutputsSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Name to the output artifact channel of the DAG. - Artifacts map[string]*DagOutputsSpec_DagOutputArtifactSpec `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Artifacts map[string]*DagOutputsSpec_DagOutputArtifactSpec `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // The name to the output parameter. - Parameters map[string]*DagOutputsSpec_DagOutputParameterSpec `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*DagOutputsSpec_DagOutputParameterSpec `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DagOutputsSpec) Reset() { *x = DagOutputsSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DagOutputsSpec) String() string { @@ -706,7 +785,7 @@ func (*DagOutputsSpec) ProtoMessage() {} func (x *DagOutputsSpec) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -737,23 +816,20 @@ func (x *DagOutputsSpec) GetParameters() map[string]*DagOutputsSpec_DagOutputPar // Definition specification of the component input parameters and artifacts. type ComponentInputsSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Name to artifact input. - Artifacts map[string]*ComponentInputsSpec_ArtifactSpec `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Artifacts map[string]*ComponentInputsSpec_ArtifactSpec `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Name to parameter input. - Parameters map[string]*ComponentInputsSpec_ParameterSpec `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*ComponentInputsSpec_ParameterSpec `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ComponentInputsSpec) Reset() { *x = ComponentInputsSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ComponentInputsSpec) String() string { @@ -764,7 +840,7 @@ func (*ComponentInputsSpec) ProtoMessage() {} func (x *ComponentInputsSpec) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -795,23 +871,20 @@ func (x *ComponentInputsSpec) GetParameters() map[string]*ComponentInputsSpec_Pa // Definition specification of the component output parameters and artifacts. type ComponentOutputsSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Name to artifact output. - Artifacts map[string]*ComponentOutputsSpec_ArtifactSpec `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Artifacts map[string]*ComponentOutputsSpec_ArtifactSpec `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Name to parameter output. - Parameters map[string]*ComponentOutputsSpec_ParameterSpec `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*ComponentOutputsSpec_ParameterSpec `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ComponentOutputsSpec) Reset() { *x = ComponentOutputsSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ComponentOutputsSpec) String() string { @@ -822,7 +895,7 @@ func (*ComponentOutputsSpec) ProtoMessage() {} func (x *ComponentOutputsSpec) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -853,24 +926,21 @@ func (x *ComponentOutputsSpec) GetParameters() map[string]*ComponentOutputsSpec_ // The spec of task inputs. type TaskInputsSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A map of input parameters which are small values, stored by the system and // can be queriable. - Parameters map[string]*TaskInputsSpec_InputParameterSpec `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*TaskInputsSpec_InputParameterSpec `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // A map of input artifacts. - Artifacts map[string]*TaskInputsSpec_InputArtifactSpec `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Artifacts map[string]*TaskInputsSpec_InputArtifactSpec `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TaskInputsSpec) Reset() { *x = TaskInputsSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TaskInputsSpec) String() string { @@ -881,7 +951,7 @@ func (*TaskInputsSpec) ProtoMessage() {} func (x *TaskInputsSpec) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -912,30 +982,27 @@ func (x *TaskInputsSpec) GetArtifacts() map[string]*TaskInputsSpec_InputArtifact // The spec of task outputs. type TaskOutputsSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A map of output parameters which are small values, stored by the system and // can be queriable. The output key is used // by [TaskInputsSpec.InputParameterSpec][] of the downstream task to specify // the data dependency. The same key will also be used by // [ExecutorInput.Inputs][] to reference the output parameter. - Parameters map[string]*TaskOutputsSpec_OutputParameterSpec `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*TaskOutputsSpec_OutputParameterSpec `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // A map of output artifacts. Keyed by output key. The output key is used // by [TaskInputsSpec.InputArtifactSpec][] of the downstream task to specify // the data dependency. The same key will also be used by // [ExecutorInput.Inputs][] to reference the output artifact. - Artifacts map[string]*TaskOutputsSpec_OutputArtifactSpec `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Artifacts map[string]*TaskOutputsSpec_OutputArtifactSpec `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TaskOutputsSpec) Reset() { *x = TaskOutputsSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TaskOutputsSpec) String() string { @@ -946,7 +1013,7 @@ func (*TaskOutputsSpec) ProtoMessage() {} func (x *TaskOutputsSpec) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -980,18 +1047,16 @@ func (x *TaskOutputsSpec) GetArtifacts() map[string]*TaskOutputsSpec_OutputArtif // // Deprecated: Marked as deprecated in pipeline_spec.proto. type PrimitiveType struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PrimitiveType) Reset() { *x = PrimitiveType{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PrimitiveType) String() string { @@ -1002,7 +1067,7 @@ func (*PrimitiveType) ProtoMessage() {} func (x *PrimitiveType) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1020,18 +1085,16 @@ func (*PrimitiveType) Descriptor() ([]byte, []int) { // Represent parameter types. The wrapper is needed to give a namespace of // enum value so we don't need add `PARAMETER_TYPE_` prefix of each enum value. type ParameterType struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ParameterType) Reset() { *x = ParameterType{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ParameterType) String() string { @@ -1042,7 +1105,7 @@ func (*ParameterType) ProtoMessage() {} func (x *ParameterType) ProtoReflect() protoreflect.Message { mi := &file_pipeline_spec_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1057,12 +1120,98 @@ func (*ParameterType) Descriptor() ([]byte, []int) { return file_pipeline_spec_proto_rawDescGZIP(), []int{10} } -// The spec of a pipeline task. -type PipelineTaskSpec struct { - state protoimpl.MessageState +// Represents the task configurations that can be passed through to an external workload. +type TaskConfigPassthroughType struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache +} + +func (x *TaskConfigPassthroughType) Reset() { + *x = TaskConfigPassthroughType{} + mi := &file_pipeline_spec_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *TaskConfigPassthroughType) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TaskConfigPassthroughType) ProtoMessage() {} + +func (x *TaskConfigPassthroughType) ProtoReflect() protoreflect.Message { + mi := &file_pipeline_spec_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TaskConfigPassthroughType.ProtoReflect.Descriptor instead. +func (*TaskConfigPassthroughType) Descriptor() ([]byte, []int) { + return file_pipeline_spec_proto_rawDescGZIP(), []int{11} +} + +type TaskConfigPassthrough struct { + state protoimpl.MessageState `protogen:"open.v1"` + Field TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum `protobuf:"varint,1,opt,name=field,proto3,enum=ml_pipelines.TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum" json:"field,omitempty"` + ApplyToTask bool `protobuf:"varint,2,opt,name=apply_to_task,json=applyToTask,proto3" json:"apply_to_task,omitempty"` unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *TaskConfigPassthrough) Reset() { + *x = TaskConfigPassthrough{} + mi := &file_pipeline_spec_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} +func (x *TaskConfigPassthrough) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TaskConfigPassthrough) ProtoMessage() {} + +func (x *TaskConfigPassthrough) ProtoReflect() protoreflect.Message { + mi := &file_pipeline_spec_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TaskConfigPassthrough.ProtoReflect.Descriptor instead. +func (*TaskConfigPassthrough) Descriptor() ([]byte, []int) { + return file_pipeline_spec_proto_rawDescGZIP(), []int{12} +} + +func (x *TaskConfigPassthrough) GetField() TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum { + if x != nil { + return x.Field + } + return TaskConfigPassthroughType_NONE +} + +func (x *TaskConfigPassthrough) GetApplyToTask() bool { + if x != nil { + return x.ApplyToTask + } + return false +} + +// The spec of a pipeline task. +type PipelineTaskSpec struct { + state protoimpl.MessageState `protogen:"open.v1"` // Basic info of a pipeline task. TaskInfo *PipelineTaskInfo `protobuf:"bytes,1,opt,name=task_info,json=taskInfo,proto3" json:"task_info,omitempty"` // Specification for task inputs which contains parameters and artifacts. @@ -1097,7 +1246,7 @@ type PipelineTaskSpec struct { // parent_task.outputs.parameters = { 'p': '["v1", "v2"]' } // parent_task.outputs.artifacts = { 'a': [a1, a2] } // - // Types that are assignable to Iterator: + // Types that are valid to be assigned to Iterator: // // *PipelineTaskSpec_ArtifactIterator // *PipelineTaskSpec_ParameterIterator @@ -1107,15 +1256,15 @@ type PipelineTaskSpec struct { RetryPolicy *PipelineTaskSpec_RetryPolicy `protobuf:"bytes,11,opt,name=retry_policy,json=retryPolicy,proto3" json:"retry_policy,omitempty"` // Iterator related settings. IteratorPolicy *PipelineTaskSpec_IteratorPolicy `protobuf:"bytes,12,opt,name=iterator_policy,json=iteratorPolicy,proto3" json:"iterator_policy,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineTaskSpec) Reset() { *x = PipelineTaskSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineTaskSpec) String() string { @@ -1125,8 +1274,8 @@ func (x *PipelineTaskSpec) String() string { func (*PipelineTaskSpec) ProtoMessage() {} func (x *PipelineTaskSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[13] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1138,7 +1287,7 @@ func (x *PipelineTaskSpec) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineTaskSpec.ProtoReflect.Descriptor instead. func (*PipelineTaskSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{11} + return file_pipeline_spec_proto_rawDescGZIP(), []int{13} } func (x *PipelineTaskSpec) GetTaskInfo() *PipelineTaskInfo { @@ -1183,23 +1332,27 @@ func (x *PipelineTaskSpec) GetTriggerPolicy() *PipelineTaskSpec_TriggerPolicy { return nil } -func (m *PipelineTaskSpec) GetIterator() isPipelineTaskSpec_Iterator { - if m != nil { - return m.Iterator +func (x *PipelineTaskSpec) GetIterator() isPipelineTaskSpec_Iterator { + if x != nil { + return x.Iterator } return nil } func (x *PipelineTaskSpec) GetArtifactIterator() *ArtifactIteratorSpec { - if x, ok := x.GetIterator().(*PipelineTaskSpec_ArtifactIterator); ok { - return x.ArtifactIterator + if x != nil { + if x, ok := x.Iterator.(*PipelineTaskSpec_ArtifactIterator); ok { + return x.ArtifactIterator + } } return nil } func (x *PipelineTaskSpec) GetParameterIterator() *ParameterIteratorSpec { - if x, ok := x.GetIterator().(*PipelineTaskSpec_ParameterIterator); ok { - return x.ParameterIterator + if x != nil { + if x, ok := x.Iterator.(*PipelineTaskSpec_ParameterIterator); ok { + return x.ParameterIterator + } } return nil } @@ -1239,24 +1392,21 @@ func (*PipelineTaskSpec_ParameterIterator) isPipelineTaskSpec_Iterator() {} // The spec of an artifact iterator. It supports fan-out a workflow from a list // of artifacts. type ArtifactIteratorSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The items to iterate. Items *ArtifactIteratorSpec_ItemsSpec `protobuf:"bytes,1,opt,name=items,proto3" json:"items,omitempty"` // The name of the input artifact channel which has the artifact item from the // [items][] collection. - ItemInput string `protobuf:"bytes,2,opt,name=item_input,json=itemInput,proto3" json:"item_input,omitempty"` + ItemInput string `protobuf:"bytes,2,opt,name=item_input,json=itemInput,proto3" json:"item_input,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ArtifactIteratorSpec) Reset() { *x = ArtifactIteratorSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ArtifactIteratorSpec) String() string { @@ -1266,8 +1416,8 @@ func (x *ArtifactIteratorSpec) String() string { func (*ArtifactIteratorSpec) ProtoMessage() {} func (x *ArtifactIteratorSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[14] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1279,7 +1429,7 @@ func (x *ArtifactIteratorSpec) ProtoReflect() protoreflect.Message { // Deprecated: Use ArtifactIteratorSpec.ProtoReflect.Descriptor instead. func (*ArtifactIteratorSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{12} + return file_pipeline_spec_proto_rawDescGZIP(), []int{14} } func (x *ArtifactIteratorSpec) GetItems() *ArtifactIteratorSpec_ItemsSpec { @@ -1299,24 +1449,21 @@ func (x *ArtifactIteratorSpec) GetItemInput() string { // The spec of a parameter iterator. It supports fan-out a workflow from a // string parameter which contains a JSON array. type ParameterIteratorSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The items to iterate. Items *ParameterIteratorSpec_ItemsSpec `protobuf:"bytes,1,opt,name=items,proto3" json:"items,omitempty"` // The name of the input parameter which has the item value from the // [items][] collection. - ItemInput string `protobuf:"bytes,2,opt,name=item_input,json=itemInput,proto3" json:"item_input,omitempty"` + ItemInput string `protobuf:"bytes,2,opt,name=item_input,json=itemInput,proto3" json:"item_input,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ParameterIteratorSpec) Reset() { *x = ParameterIteratorSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ParameterIteratorSpec) String() string { @@ -1326,8 +1473,8 @@ func (x *ParameterIteratorSpec) String() string { func (*ParameterIteratorSpec) ProtoMessage() {} func (x *ParameterIteratorSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[15] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1339,7 +1486,7 @@ func (x *ParameterIteratorSpec) ProtoReflect() protoreflect.Message { // Deprecated: Use ParameterIteratorSpec.ProtoReflect.Descriptor instead. func (*ParameterIteratorSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{13} + return file_pipeline_spec_proto_rawDescGZIP(), []int{15} } func (x *ParameterIteratorSpec) GetItems() *ParameterIteratorSpec_ItemsSpec { @@ -1357,22 +1504,19 @@ func (x *ParameterIteratorSpec) GetItemInput() string { } type ComponentRef struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The name of a component. Refer to the key of the // [PipelineSpec.components][] map. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ComponentRef) Reset() { *x = ComponentRef{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ComponentRef) String() string { @@ -1382,8 +1526,8 @@ func (x *ComponentRef) String() string { func (*ComponentRef) ProtoMessage() {} func (x *ComponentRef) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[16] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1395,7 +1539,7 @@ func (x *ComponentRef) ProtoReflect() protoreflect.Message { // Deprecated: Use ComponentRef.ProtoReflect.Descriptor instead. func (*ComponentRef) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{14} + return file_pipeline_spec_proto_rawDescGZIP(), []int{16} } func (x *ComponentRef) GetName() string { @@ -1407,10 +1551,7 @@ func (x *ComponentRef) GetName() string { // Basic info of a pipeline. type PipelineInfo struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required field. The name of the pipeline. // The name will be used to create or find pipeline context in MLMD. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` @@ -1419,16 +1560,16 @@ type PipelineInfo struct { DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` // Optional fields. The readable description for the pipeline template. // Should not exceed 1024 characters. - Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineInfo) Reset() { *x = PipelineInfo{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineInfo) String() string { @@ -1438,8 +1579,8 @@ func (x *PipelineInfo) String() string { func (*PipelineInfo) ProtoMessage() {} func (x *PipelineInfo) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[17] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1451,7 +1592,7 @@ func (x *PipelineInfo) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineInfo.ProtoReflect.Descriptor instead. func (*PipelineInfo) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{15} + return file_pipeline_spec_proto_rawDescGZIP(), []int{17} } func (x *PipelineInfo) GetName() string { @@ -1477,11 +1618,8 @@ func (x *PipelineInfo) GetDescription() string { // The definition of a artifact type in MLMD. type ArtifactTypeSchema struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Kind: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Kind: // // *ArtifactTypeSchema_SchemaTitle // *ArtifactTypeSchema_SchemaUri @@ -1490,15 +1628,15 @@ type ArtifactTypeSchema struct { // The schema version of the artifact. If the value is not set, it defaults // to the the latest version in the system. SchemaVersion string `protobuf:"bytes,4,opt,name=schema_version,json=schemaVersion,proto3" json:"schema_version,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ArtifactTypeSchema) Reset() { *x = ArtifactTypeSchema{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ArtifactTypeSchema) String() string { @@ -1508,8 +1646,8 @@ func (x *ArtifactTypeSchema) String() string { func (*ArtifactTypeSchema) ProtoMessage() {} func (x *ArtifactTypeSchema) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[18] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1521,34 +1659,40 @@ func (x *ArtifactTypeSchema) ProtoReflect() protoreflect.Message { // Deprecated: Use ArtifactTypeSchema.ProtoReflect.Descriptor instead. func (*ArtifactTypeSchema) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{16} + return file_pipeline_spec_proto_rawDescGZIP(), []int{18} } -func (m *ArtifactTypeSchema) GetKind() isArtifactTypeSchema_Kind { - if m != nil { - return m.Kind +func (x *ArtifactTypeSchema) GetKind() isArtifactTypeSchema_Kind { + if x != nil { + return x.Kind } return nil } func (x *ArtifactTypeSchema) GetSchemaTitle() string { - if x, ok := x.GetKind().(*ArtifactTypeSchema_SchemaTitle); ok { - return x.SchemaTitle + if x != nil { + if x, ok := x.Kind.(*ArtifactTypeSchema_SchemaTitle); ok { + return x.SchemaTitle + } } return "" } // Deprecated: Marked as deprecated in pipeline_spec.proto. func (x *ArtifactTypeSchema) GetSchemaUri() string { - if x, ok := x.GetKind().(*ArtifactTypeSchema_SchemaUri); ok { - return x.SchemaUri + if x != nil { + if x, ok := x.Kind.(*ArtifactTypeSchema_SchemaUri); ok { + return x.SchemaUri + } } return "" } func (x *ArtifactTypeSchema) GetInstanceSchema() string { - if x, ok := x.GetKind().(*ArtifactTypeSchema_InstanceSchema); ok { - return x.InstanceSchema + if x != nil { + if x, ok := x.Kind.(*ArtifactTypeSchema_InstanceSchema); ok { + return x.InstanceSchema + } } return "" } @@ -1599,21 +1743,18 @@ func (*ArtifactTypeSchema_InstanceSchema) isArtifactTypeSchema_Kind() {} // The basic info of a task. type PipelineTaskInfo struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The display name of the task. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineTaskInfo) Reset() { *x = PipelineTaskInfo{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineTaskInfo) String() string { @@ -1623,8 +1764,8 @@ func (x *PipelineTaskInfo) String() string { func (*PipelineTaskInfo) ProtoMessage() {} func (x *PipelineTaskInfo) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[19] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1636,7 +1777,7 @@ func (x *PipelineTaskInfo) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineTaskInfo.ProtoReflect.Descriptor instead. func (*PipelineTaskInfo) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{17} + return file_pipeline_spec_proto_rawDescGZIP(), []int{19} } func (x *PipelineTaskInfo) GetName() string { @@ -1651,25 +1792,22 @@ func (x *PipelineTaskInfo) GetName() string { // determined during compilation time, or a runtime parameter which will be // determined during runtime. type ValueOrRuntimeParameter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Value: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Value: // // *ValueOrRuntimeParameter_ConstantValue // *ValueOrRuntimeParameter_RuntimeParameter // *ValueOrRuntimeParameter_Constant - Value isValueOrRuntimeParameter_Value `protobuf_oneof:"value"` + Value isValueOrRuntimeParameter_Value `protobuf_oneof:"value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ValueOrRuntimeParameter) Reset() { *x = ValueOrRuntimeParameter{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[18] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ValueOrRuntimeParameter) String() string { @@ -1679,8 +1817,8 @@ func (x *ValueOrRuntimeParameter) String() string { func (*ValueOrRuntimeParameter) ProtoMessage() {} func (x *ValueOrRuntimeParameter) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[18] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[20] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1692,34 +1830,40 @@ func (x *ValueOrRuntimeParameter) ProtoReflect() protoreflect.Message { // Deprecated: Use ValueOrRuntimeParameter.ProtoReflect.Descriptor instead. func (*ValueOrRuntimeParameter) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{18} + return file_pipeline_spec_proto_rawDescGZIP(), []int{20} } -func (m *ValueOrRuntimeParameter) GetValue() isValueOrRuntimeParameter_Value { - if m != nil { - return m.Value +func (x *ValueOrRuntimeParameter) GetValue() isValueOrRuntimeParameter_Value { + if x != nil { + return x.Value } return nil } // Deprecated: Marked as deprecated in pipeline_spec.proto. func (x *ValueOrRuntimeParameter) GetConstantValue() *Value { - if x, ok := x.GetValue().(*ValueOrRuntimeParameter_ConstantValue); ok { - return x.ConstantValue + if x != nil { + if x, ok := x.Value.(*ValueOrRuntimeParameter_ConstantValue); ok { + return x.ConstantValue + } } return nil } func (x *ValueOrRuntimeParameter) GetRuntimeParameter() string { - if x, ok := x.GetValue().(*ValueOrRuntimeParameter_RuntimeParameter); ok { - return x.RuntimeParameter + if x != nil { + if x, ok := x.Value.(*ValueOrRuntimeParameter_RuntimeParameter); ok { + return x.RuntimeParameter + } } return "" } func (x *ValueOrRuntimeParameter) GetConstant() *structpb.Value { - if x, ok := x.GetValue().(*ValueOrRuntimeParameter_Constant); ok { - return x.Constant + if x != nil { + if x, ok := x.Value.(*ValueOrRuntimeParameter_Constant); ok { + return x.Constant + } } return nil } @@ -1755,21 +1899,18 @@ func (*ValueOrRuntimeParameter_Constant) isValueOrRuntimeParameter_Value() {} // The definition of the deployment config of the pipeline. It contains the // the platform specific executor configs for KFP OSS. type PipelineDeploymentConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Map from executor label to executor spec. - Executors map[string]*PipelineDeploymentConfig_ExecutorSpec `protobuf:"bytes,1,rep,name=executors,proto3" json:"executors,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Executors map[string]*PipelineDeploymentConfig_ExecutorSpec `protobuf:"bytes,1,rep,name=executors,proto3" json:"executors,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig) Reset() { *x = PipelineDeploymentConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[19] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig) String() string { @@ -1779,8 +1920,8 @@ func (x *PipelineDeploymentConfig) String() string { func (*PipelineDeploymentConfig) ProtoMessage() {} func (x *PipelineDeploymentConfig) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[19] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[21] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1792,7 +1933,7 @@ func (x *PipelineDeploymentConfig) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineDeploymentConfig.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21} } func (x *PipelineDeploymentConfig) GetExecutors() map[string]*PipelineDeploymentConfig_ExecutorSpec { @@ -1804,25 +1945,22 @@ func (x *PipelineDeploymentConfig) GetExecutors() map[string]*PipelineDeployment // Value is the value of the field. type Value struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Value: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Value: // // *Value_IntValue // *Value_DoubleValue // *Value_StringValue - Value isValue_Value `protobuf_oneof:"value"` + Value isValue_Value `protobuf_oneof:"value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Value) Reset() { *x = Value{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[20] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Value) String() string { @@ -1832,8 +1970,8 @@ func (x *Value) String() string { func (*Value) ProtoMessage() {} func (x *Value) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[20] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[22] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1845,33 +1983,39 @@ func (x *Value) ProtoReflect() protoreflect.Message { // Deprecated: Use Value.ProtoReflect.Descriptor instead. func (*Value) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{20} + return file_pipeline_spec_proto_rawDescGZIP(), []int{22} } -func (m *Value) GetValue() isValue_Value { - if m != nil { - return m.Value +func (x *Value) GetValue() isValue_Value { + if x != nil { + return x.Value } return nil } func (x *Value) GetIntValue() int64 { - if x, ok := x.GetValue().(*Value_IntValue); ok { - return x.IntValue + if x != nil { + if x, ok := x.Value.(*Value_IntValue); ok { + return x.IntValue + } } return 0 } func (x *Value) GetDoubleValue() float64 { - if x, ok := x.GetValue().(*Value_DoubleValue); ok { - return x.DoubleValue + if x != nil { + if x, ok := x.Value.(*Value_DoubleValue); ok { + return x.DoubleValue + } } return 0 } func (x *Value) GetStringValue() string { - if x, ok := x.GetValue().(*Value_StringValue); ok { - return x.StringValue + if x != nil { + if x, ok := x.Value.(*Value_StringValue); ok { + return x.StringValue + } } return "" } @@ -1903,10 +2047,7 @@ func (*Value_StringValue) isValue_Value() {} // The definition of a runtime artifact. type RuntimeArtifact struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The name of an artifact. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The type of the artifact. @@ -1917,23 +2058,23 @@ type RuntimeArtifact struct { // Deprecated. Use [RuntimeArtifact.metadata][] instead. // // Deprecated: Marked as deprecated in pipeline_spec.proto. - Properties map[string]*Value `protobuf:"bytes,4,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Properties map[string]*Value `protobuf:"bytes,4,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // The custom properties of the artifact. // Deprecated. Use [RuntimeArtifact.metadata][] instead. // // Deprecated: Marked as deprecated in pipeline_spec.proto. - CustomProperties map[string]*Value `protobuf:"bytes,5,rep,name=custom_properties,json=customProperties,proto3" json:"custom_properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + CustomProperties map[string]*Value `protobuf:"bytes,5,rep,name=custom_properties,json=customProperties,proto3" json:"custom_properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Properties of the Artifact. - Metadata *structpb.Struct `protobuf:"bytes,6,opt,name=metadata,proto3" json:"metadata,omitempty"` + Metadata *structpb.Struct `protobuf:"bytes,6,opt,name=metadata,proto3" json:"metadata,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *RuntimeArtifact) Reset() { *x = RuntimeArtifact{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[21] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *RuntimeArtifact) String() string { @@ -1943,8 +2084,8 @@ func (x *RuntimeArtifact) String() string { func (*RuntimeArtifact) ProtoMessage() {} func (x *RuntimeArtifact) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[21] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[23] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1956,7 +2097,7 @@ func (x *RuntimeArtifact) ProtoReflect() protoreflect.Message { // Deprecated: Use RuntimeArtifact.ProtoReflect.Descriptor instead. func (*RuntimeArtifact) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{21} + return file_pipeline_spec_proto_rawDescGZIP(), []int{23} } func (x *RuntimeArtifact) GetName() string { @@ -2005,21 +2146,18 @@ func (x *RuntimeArtifact) GetMetadata() *structpb.Struct { // Message that represents a list of artifacts. type ArtifactList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A list of artifacts. - Artifacts []*RuntimeArtifact `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"` + Artifacts []*RuntimeArtifact `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ArtifactList) Reset() { *x = ArtifactList{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[22] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ArtifactList) String() string { @@ -2029,8 +2167,8 @@ func (x *ArtifactList) String() string { func (*ArtifactList) ProtoMessage() {} func (x *ArtifactList) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[22] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[24] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2042,7 +2180,7 @@ func (x *ArtifactList) ProtoReflect() protoreflect.Message { // Deprecated: Use ArtifactList.ProtoReflect.Descriptor instead. func (*ArtifactList) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{22} + return file_pipeline_spec_proto_rawDescGZIP(), []int{24} } func (x *ArtifactList) GetArtifacts() []*RuntimeArtifact { @@ -2078,23 +2216,20 @@ func (x *ArtifactList) GetArtifacts() []*RuntimeArtifact { // output file and executor output metadata files are set by the container, the // output metadata file will have higher precedence to set output parameters. type ExecutorInput struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The runtime input artifacts of the task invocation. Inputs *ExecutorInput_Inputs `protobuf:"bytes,1,opt,name=inputs,proto3" json:"inputs,omitempty"` // The runtime output artifacts of the task invocation. - Outputs *ExecutorInput_Outputs `protobuf:"bytes,2,opt,name=outputs,proto3" json:"outputs,omitempty"` + Outputs *ExecutorInput_Outputs `protobuf:"bytes,2,opt,name=outputs,proto3" json:"outputs,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ExecutorInput) Reset() { *x = ExecutorInput{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[23] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ExecutorInput) String() string { @@ -2104,8 +2239,8 @@ func (x *ExecutorInput) String() string { func (*ExecutorInput) ProtoMessage() {} func (x *ExecutorInput) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[23] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[25] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2117,7 +2252,7 @@ func (x *ExecutorInput) ProtoReflect() protoreflect.Message { // Deprecated: Use ExecutorInput.ProtoReflect.Descriptor instead. func (*ExecutorInput) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{23} + return file_pipeline_spec_proto_rawDescGZIP(), []int{25} } func (x *ExecutorInput) GetInputs() *ExecutorInput_Inputs { @@ -2137,28 +2272,25 @@ func (x *ExecutorInput) GetOutputs() *ExecutorInput_Outputs { // The schema of the output metadata of an execution. It will be used to parse // the output metadata file. type ExecutorOutput struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The values for output parameters. // Deprecated. Use [ExecutorOutput.parameter_values][] instead. // // Deprecated: Marked as deprecated in pipeline_spec.proto. - Parameters map[string]*Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // The updated metadata for output artifact. - Artifacts map[string]*ArtifactList `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Artifacts map[string]*ArtifactList `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // The values for output parameters. - ParameterValues map[string]*structpb.Value `protobuf:"bytes,3,rep,name=parameter_values,json=parameterValues,proto3" json:"parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + ParameterValues map[string]*structpb.Value `protobuf:"bytes,3,rep,name=parameter_values,json=parameterValues,proto3" json:"parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ExecutorOutput) Reset() { *x = ExecutorOutput{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[24] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ExecutorOutput) String() string { @@ -2168,8 +2300,8 @@ func (x *ExecutorOutput) String() string { func (*ExecutorOutput) ProtoMessage() {} func (x *ExecutorOutput) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[24] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[26] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2181,7 +2313,7 @@ func (x *ExecutorOutput) ProtoReflect() protoreflect.Message { // Deprecated: Use ExecutorOutput.ProtoReflect.Descriptor instead. func (*ExecutorOutput) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{24} + return file_pipeline_spec_proto_rawDescGZIP(), []int{26} } // Deprecated: Marked as deprecated in pipeline_spec.proto. @@ -2209,10 +2341,7 @@ func (x *ExecutorOutput) GetParameterValues() map[string]*structpb.Value { // The final status of a task. The structure will be passed to input parameter // of kind `task_final_status`. type PipelineTaskFinalStatus struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The final state of the task. // The value is the string version of [PipelineStateEnum.PipelineTaskState][] State string `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"` @@ -2231,15 +2360,15 @@ type PipelineTaskFinalStatus struct { PipelineJobResourceName string `protobuf:"bytes,5,opt,name=pipeline_job_resource_name,json=pipelineJobResourceName,proto3" json:"pipeline_job_resource_name,omitempty"` // The pipeline task that produces this status. PipelineTaskName string `protobuf:"bytes,6,opt,name=pipeline_task_name,json=pipelineTaskName,proto3" json:"pipeline_task_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineTaskFinalStatus) Reset() { *x = PipelineTaskFinalStatus{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[25] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineTaskFinalStatus) String() string { @@ -2249,8 +2378,8 @@ func (x *PipelineTaskFinalStatus) String() string { func (*PipelineTaskFinalStatus) ProtoMessage() {} func (x *PipelineTaskFinalStatus) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[25] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[27] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2262,7 +2391,7 @@ func (x *PipelineTaskFinalStatus) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineTaskFinalStatus.ProtoReflect.Descriptor instead. func (*PipelineTaskFinalStatus) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{25} + return file_pipeline_spec_proto_rawDescGZIP(), []int{27} } func (x *PipelineTaskFinalStatus) GetState() string { @@ -2310,18 +2439,16 @@ func (x *PipelineTaskFinalStatus) GetPipelineTaskName() string { } type PipelineStateEnum struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineStateEnum) Reset() { *x = PipelineStateEnum{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[26] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineStateEnum) String() string { @@ -2331,8 +2458,8 @@ func (x *PipelineStateEnum) String() string { func (*PipelineStateEnum) ProtoMessage() {} func (x *PipelineStateEnum) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[26] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[28] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2344,26 +2471,23 @@ func (x *PipelineStateEnum) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineStateEnum.ProtoReflect.Descriptor instead. func (*PipelineStateEnum) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{26} + return file_pipeline_spec_proto_rawDescGZIP(), []int{28} } // Spec for all platforms; second document in IR type PlatformSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Platform key to full platform config - Platforms map[string]*SinglePlatformSpec `protobuf:"bytes,1,rep,name=platforms,proto3" json:"platforms,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Platforms map[string]*SinglePlatformSpec `protobuf:"bytes,1,rep,name=platforms,proto3" json:"platforms,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PlatformSpec) Reset() { *x = PlatformSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[27] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PlatformSpec) String() string { @@ -2373,8 +2497,8 @@ func (x *PlatformSpec) String() string { func (*PlatformSpec) ProtoMessage() {} func (x *PlatformSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[27] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[29] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2386,7 +2510,7 @@ func (x *PlatformSpec) ProtoReflect() protoreflect.Message { // Deprecated: Use PlatformSpec.ProtoReflect.Descriptor instead. func (*PlatformSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{27} + return file_pipeline_spec_proto_rawDescGZIP(), []int{29} } func (x *PlatformSpec) GetPlatforms() map[string]*SinglePlatformSpec { @@ -2397,10 +2521,7 @@ func (x *PlatformSpec) GetPlatforms() map[string]*SinglePlatformSpec { } type SinglePlatformSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Mirrors PipelineSpec.deployment_spec structure DeploymentSpec *PlatformDeploymentConfig `protobuf:"bytes,1,opt,name=deployment_spec,json=deploymentSpec,proto3" json:"deployment_spec,omitempty"` // Name of the platform. For example, "google_cloud" @@ -2409,15 +2530,15 @@ type SinglePlatformSpec struct { // protos/libraries. Config *structpb.Struct `protobuf:"bytes,3,opt,name=config,proto3" json:"config,omitempty"` PipelineConfig *PipelineConfig `protobuf:"bytes,4,opt,name=pipelineConfig,proto3" json:"pipelineConfig,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *SinglePlatformSpec) Reset() { *x = SinglePlatformSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[28] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *SinglePlatformSpec) String() string { @@ -2427,8 +2548,8 @@ func (x *SinglePlatformSpec) String() string { func (*SinglePlatformSpec) ProtoMessage() {} func (x *SinglePlatformSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[28] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[30] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2440,7 +2561,7 @@ func (x *SinglePlatformSpec) ProtoReflect() protoreflect.Message { // Deprecated: Use SinglePlatformSpec.ProtoReflect.Descriptor instead. func (*SinglePlatformSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{28} + return file_pipeline_spec_proto_rawDescGZIP(), []int{30} } func (x *SinglePlatformSpec) GetDeploymentSpec() *PlatformDeploymentConfig { @@ -2472,22 +2593,19 @@ func (x *SinglePlatformSpec) GetPipelineConfig() *PipelineConfig { } type PlatformDeploymentConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Map of executor label to executor-level config // Mirrors PipelineSpec.deployment_spec.executors structure - Executors map[string]*structpb.Struct `protobuf:"bytes,1,rep,name=executors,proto3" json:"executors,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Executors map[string]*structpb.Struct `protobuf:"bytes,1,rep,name=executors,proto3" json:"executors,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PlatformDeploymentConfig) Reset() { *x = PlatformDeploymentConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[29] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PlatformDeploymentConfig) String() string { @@ -2497,8 +2615,8 @@ func (x *PlatformDeploymentConfig) String() string { func (*PlatformDeploymentConfig) ProtoMessage() {} func (x *PlatformDeploymentConfig) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[29] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[31] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2510,7 +2628,7 @@ func (x *PlatformDeploymentConfig) ProtoReflect() protoreflect.Message { // Deprecated: Use PlatformDeploymentConfig.ProtoReflect.Descriptor instead. func (*PlatformDeploymentConfig) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{29} + return file_pipeline_spec_proto_rawDescGZIP(), []int{31} } func (x *PlatformDeploymentConfig) GetExecutors() map[string]*structpb.Struct { @@ -2521,25 +2639,22 @@ func (x *PlatformDeploymentConfig) GetExecutors() map[string]*structpb.Struct { } type WorkspaceConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Size of the workspace // Example: "250Gi" // See https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/quantity/ for valid quantity formats Size string `protobuf:"bytes,1,opt,name=size,proto3" json:"size,omitempty"` // Kubernetes specific configuration for the workspace - Kubernetes *KubernetesWorkspaceConfig `protobuf:"bytes,2,opt,name=kubernetes,proto3,oneof" json:"kubernetes,omitempty"` + Kubernetes *KubernetesWorkspaceConfig `protobuf:"bytes,2,opt,name=kubernetes,proto3,oneof" json:"kubernetes,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *WorkspaceConfig) Reset() { *x = WorkspaceConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[30] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *WorkspaceConfig) String() string { @@ -2549,8 +2664,8 @@ func (x *WorkspaceConfig) String() string { func (*WorkspaceConfig) ProtoMessage() {} func (x *WorkspaceConfig) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[30] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[32] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2562,7 +2677,7 @@ func (x *WorkspaceConfig) ProtoReflect() protoreflect.Message { // Deprecated: Use WorkspaceConfig.ProtoReflect.Descriptor instead. func (*WorkspaceConfig) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{30} + return file_pipeline_spec_proto_rawDescGZIP(), []int{32} } func (x *WorkspaceConfig) GetSize() string { @@ -2580,25 +2695,22 @@ func (x *WorkspaceConfig) GetKubernetes() *KubernetesWorkspaceConfig { } type KubernetesWorkspaceConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Patch of a PersistentVolumeClaim (PVC) spec to override defaults set on the API server for the workspace PVC // Example: { // "storageClassName": "super-fast-storage", // "accessModes": ["ReadWriteMany"] // } - PvcSpecPatch *structpb.Struct `protobuf:"bytes,1,opt,name=pvc_spec_patch,json=pvcSpecPatch,proto3,oneof" json:"pvc_spec_patch,omitempty"` + PvcSpecPatch *structpb.Struct `protobuf:"bytes,1,opt,name=pvc_spec_patch,json=pvcSpecPatch,proto3,oneof" json:"pvc_spec_patch,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *KubernetesWorkspaceConfig) Reset() { *x = KubernetesWorkspaceConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[31] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *KubernetesWorkspaceConfig) String() string { @@ -2608,8 +2720,8 @@ func (x *KubernetesWorkspaceConfig) String() string { func (*KubernetesWorkspaceConfig) ProtoMessage() {} func (x *KubernetesWorkspaceConfig) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[31] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[33] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2621,7 +2733,7 @@ func (x *KubernetesWorkspaceConfig) ProtoReflect() protoreflect.Message { // Deprecated: Use KubernetesWorkspaceConfig.ProtoReflect.Descriptor instead. func (*KubernetesWorkspaceConfig) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{31} + return file_pipeline_spec_proto_rawDescGZIP(), []int{33} } func (x *KubernetesWorkspaceConfig) GetPvcSpecPatch() *structpb.Struct { @@ -2633,10 +2745,7 @@ func (x *KubernetesWorkspaceConfig) GetPvcSpecPatch() *structpb.Struct { // Spec for pipeline-level config options. See PipelineConfig DSL class. type PipelineConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Name of the semaphore key to control pipeline concurrency SemaphoreKey string `protobuf:"bytes,1,opt,name=semaphore_key,json=semaphoreKey,proto3" json:"semaphore_key,omitempty"` // Name of the mutex to ensure mutual exclusion @@ -2646,16 +2755,16 @@ type PipelineConfig struct { ResourceTtl int32 `protobuf:"varint,3,opt,name=resource_ttl,json=resourceTtl,proto3" json:"resource_ttl,omitempty"` // Configuration for a shared storage workspace that persists for the duration of the pipeline run. // The workspace can be configured with size and Kubernetes-specific settings to override default PVC configurations. - Workspace *WorkspaceConfig `protobuf:"bytes,4,opt,name=workspace,proto3,oneof" json:"workspace,omitempty"` + Workspace *WorkspaceConfig `protobuf:"bytes,4,opt,name=workspace,proto3,oneof" json:"workspace,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineConfig) Reset() { *x = PipelineConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[32] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineConfig) String() string { @@ -2665,8 +2774,8 @@ func (x *PipelineConfig) String() string { func (*PipelineConfig) ProtoMessage() {} func (x *PipelineConfig) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[32] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[34] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2678,7 +2787,7 @@ func (x *PipelineConfig) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineConfig.ProtoReflect.Descriptor instead. func (*PipelineConfig) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{32} + return file_pipeline_spec_proto_rawDescGZIP(), []int{34} } func (x *PipelineConfig) GetSemaphoreKey() string { @@ -2711,14 +2820,11 @@ func (x *PipelineConfig) GetWorkspace() *WorkspaceConfig { // The runtime config of a PipelineJob. type PipelineJob_RuntimeConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Deprecated. Use [RuntimeConfig.parameter_values][] instead. // // Deprecated: Marked as deprecated in pipeline_spec.proto. - Parameters map[string]*Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // A path in a Cloud Storage bucket which will be treated as the root // output directory of the pipeline. It is used by the system to // generate the paths of output artifacts. @@ -2727,16 +2833,16 @@ type PipelineJob_RuntimeConfig struct { // The runtime parameters of the PipelineJob. The parameters will be // passed into [PipelineJob.pipeline_spec][] to replace the placeholders // at runtime. - ParameterValues map[string]*structpb.Value `protobuf:"bytes,3,rep,name=parameter_values,json=parameterValues,proto3" json:"parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + ParameterValues map[string]*structpb.Value `protobuf:"bytes,3,rep,name=parameter_values,json=parameterValues,proto3" json:"parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineJob_RuntimeConfig) Reset() { *x = PipelineJob_RuntimeConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[34] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[36] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineJob_RuntimeConfig) String() string { @@ -2746,8 +2852,8 @@ func (x *PipelineJob_RuntimeConfig) String() string { func (*PipelineJob_RuntimeConfig) ProtoMessage() {} func (x *PipelineJob_RuntimeConfig) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[34] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[36] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2786,25 +2892,22 @@ func (x *PipelineJob_RuntimeConfig) GetParameterValues() map[string]*structpb.Va // The definition of the runtime parameter. type PipelineSpec_RuntimeParameter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required field. The type of the runtime parameter. Type PrimitiveType_PrimitiveTypeEnum `protobuf:"varint,1,opt,name=type,proto3,enum=ml_pipelines.PrimitiveType_PrimitiveTypeEnum" json:"type,omitempty"` // Optional field. Default value of the runtime parameter. If not set and // the runtime parameter value is not provided during runtime, an error will // be raised. - DefaultValue *Value `protobuf:"bytes,2,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` + DefaultValue *Value `protobuf:"bytes,2,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineSpec_RuntimeParameter) Reset() { *x = PipelineSpec_RuntimeParameter{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[37] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[39] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineSpec_RuntimeParameter) String() string { @@ -2814,8 +2917,8 @@ func (x *PipelineSpec_RuntimeParameter) String() string { func (*PipelineSpec_RuntimeParameter) ProtoMessage() {} func (x *PipelineSpec_RuntimeParameter) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[37] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[39] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2846,24 +2949,21 @@ func (x *PipelineSpec_RuntimeParameter) GetDefaultValue() *Value { // Selects a defined output artifact from a sub task of the DAG. type DagOutputsSpec_ArtifactSelectorSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The name of the sub task which produces the output that matches with // the `output_artifact_key`. ProducerSubtask string `protobuf:"bytes,1,opt,name=producer_subtask,json=producerSubtask,proto3" json:"producer_subtask,omitempty"` // The key of [ComponentOutputsSpec.artifacts][] map of the producer task. OutputArtifactKey string `protobuf:"bytes,2,opt,name=output_artifact_key,json=outputArtifactKey,proto3" json:"output_artifact_key,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DagOutputsSpec_ArtifactSelectorSpec) Reset() { *x = DagOutputsSpec_ArtifactSelectorSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[40] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[42] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DagOutputsSpec_ArtifactSelectorSpec) String() string { @@ -2873,8 +2973,8 @@ func (x *DagOutputsSpec_ArtifactSelectorSpec) String() string { func (*DagOutputsSpec_ArtifactSelectorSpec) ProtoMessage() {} func (x *DagOutputsSpec_ArtifactSelectorSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[40] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[42] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2906,22 +3006,19 @@ func (x *DagOutputsSpec_ArtifactSelectorSpec) GetOutputArtifactKey() string { // Selects a list of output artifacts that will be aggregated to the single // output artifact channel of the DAG. type DagOutputsSpec_DagOutputArtifactSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The selected artifacts will be aggregated as output as a single // output channel of the DAG. ArtifactSelectors []*DagOutputsSpec_ArtifactSelectorSpec `protobuf:"bytes,1,rep,name=artifact_selectors,json=artifactSelectors,proto3" json:"artifact_selectors,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DagOutputsSpec_DagOutputArtifactSpec) Reset() { *x = DagOutputsSpec_DagOutputArtifactSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[41] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[43] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DagOutputsSpec_DagOutputArtifactSpec) String() string { @@ -2931,8 +3028,8 @@ func (x *DagOutputsSpec_DagOutputArtifactSpec) String() string { func (*DagOutputsSpec_DagOutputArtifactSpec) ProtoMessage() {} func (x *DagOutputsSpec_DagOutputArtifactSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[41] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[43] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -2956,24 +3053,21 @@ func (x *DagOutputsSpec_DagOutputArtifactSpec) GetArtifactSelectors() []*DagOutp // Selects a defined output parameter from a sub task of the DAG. type DagOutputsSpec_ParameterSelectorSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The name of the sub task which produces the output that matches with // the `output_parameter_key`. ProducerSubtask string `protobuf:"bytes,1,opt,name=producer_subtask,json=producerSubtask,proto3" json:"producer_subtask,omitempty"` // The key of [ComponentOutputsSpec.parameters][] map of the producer task. OutputParameterKey string `protobuf:"bytes,2,opt,name=output_parameter_key,json=outputParameterKey,proto3" json:"output_parameter_key,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DagOutputsSpec_ParameterSelectorSpec) Reset() { *x = DagOutputsSpec_ParameterSelectorSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[43] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[45] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DagOutputsSpec_ParameterSelectorSpec) String() string { @@ -2983,8 +3077,8 @@ func (x *DagOutputsSpec_ParameterSelectorSpec) String() string { func (*DagOutputsSpec_ParameterSelectorSpec) ProtoMessage() {} func (x *DagOutputsSpec_ParameterSelectorSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[43] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[45] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3015,20 +3109,17 @@ func (x *DagOutputsSpec_ParameterSelectorSpec) GetOutputParameterKey() string { // Aggregate output parameters from sub tasks into a list object. type DagOutputsSpec_ParameterSelectorsSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` ParameterSelectors []*DagOutputsSpec_ParameterSelectorSpec `protobuf:"bytes,1,rep,name=parameter_selectors,json=parameterSelectors,proto3" json:"parameter_selectors,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DagOutputsSpec_ParameterSelectorsSpec) Reset() { *x = DagOutputsSpec_ParameterSelectorsSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[44] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[46] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DagOutputsSpec_ParameterSelectorsSpec) String() string { @@ -3038,8 +3129,8 @@ func (x *DagOutputsSpec_ParameterSelectorsSpec) String() string { func (*DagOutputsSpec_ParameterSelectorsSpec) ProtoMessage() {} func (x *DagOutputsSpec_ParameterSelectorsSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[44] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[46] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3063,20 +3154,17 @@ func (x *DagOutputsSpec_ParameterSelectorsSpec) GetParameterSelectors() []*DagOu // Aggregates output parameters from sub tasks into a map object. type DagOutputsSpec_MapParameterSelectorsSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - MappedParameters map[string]*DagOutputsSpec_ParameterSelectorSpec `protobuf:"bytes,2,rep,name=mapped_parameters,json=mappedParameters,proto3" json:"mapped_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + state protoimpl.MessageState `protogen:"open.v1"` + MappedParameters map[string]*DagOutputsSpec_ParameterSelectorSpec `protobuf:"bytes,2,rep,name=mapped_parameters,json=mappedParameters,proto3" json:"mapped_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DagOutputsSpec_MapParameterSelectorsSpec) Reset() { *x = DagOutputsSpec_MapParameterSelectorsSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[45] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[47] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DagOutputsSpec_MapParameterSelectorsSpec) String() string { @@ -3086,8 +3174,8 @@ func (x *DagOutputsSpec_MapParameterSelectorsSpec) String() string { func (*DagOutputsSpec_MapParameterSelectorsSpec) ProtoMessage() {} func (x *DagOutputsSpec_MapParameterSelectorsSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[45] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[47] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3117,24 +3205,21 @@ func (x *DagOutputsSpec_MapParameterSelectorsSpec) GetMappedParameters() map[str // 3. Expose a list of outputs from multiple tasks (e.g. iterator flow). // 4. Expose the aggregation of output parameters as a name-value map. type DagOutputsSpec_DagOutputParameterSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Kind: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Kind: // // *DagOutputsSpec_DagOutputParameterSpec_ValueFromParameter // *DagOutputsSpec_DagOutputParameterSpec_ValueFromOneof - Kind isDagOutputsSpec_DagOutputParameterSpec_Kind `protobuf_oneof:"kind"` + Kind isDagOutputsSpec_DagOutputParameterSpec_Kind `protobuf_oneof:"kind"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DagOutputsSpec_DagOutputParameterSpec) Reset() { *x = DagOutputsSpec_DagOutputParameterSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[46] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[48] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DagOutputsSpec_DagOutputParameterSpec) String() string { @@ -3144,8 +3229,8 @@ func (x *DagOutputsSpec_DagOutputParameterSpec) String() string { func (*DagOutputsSpec_DagOutputParameterSpec) ProtoMessage() {} func (x *DagOutputsSpec_DagOutputParameterSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[46] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[48] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3160,23 +3245,27 @@ func (*DagOutputsSpec_DagOutputParameterSpec) Descriptor() ([]byte, []int) { return file_pipeline_spec_proto_rawDescGZIP(), []int{4, 6} } -func (m *DagOutputsSpec_DagOutputParameterSpec) GetKind() isDagOutputsSpec_DagOutputParameterSpec_Kind { - if m != nil { - return m.Kind +func (x *DagOutputsSpec_DagOutputParameterSpec) GetKind() isDagOutputsSpec_DagOutputParameterSpec_Kind { + if x != nil { + return x.Kind } return nil } func (x *DagOutputsSpec_DagOutputParameterSpec) GetValueFromParameter() *DagOutputsSpec_ParameterSelectorSpec { - if x, ok := x.GetKind().(*DagOutputsSpec_DagOutputParameterSpec_ValueFromParameter); ok { - return x.ValueFromParameter + if x != nil { + if x, ok := x.Kind.(*DagOutputsSpec_DagOutputParameterSpec_ValueFromParameter); ok { + return x.ValueFromParameter + } } return nil } func (x *DagOutputsSpec_DagOutputParameterSpec) GetValueFromOneof() *DagOutputsSpec_ParameterSelectorsSpec { - if x, ok := x.GetKind().(*DagOutputsSpec_DagOutputParameterSpec_ValueFromOneof); ok { - return x.ValueFromOneof + if x != nil { + if x, ok := x.Kind.(*DagOutputsSpec_DagOutputParameterSpec_ValueFromOneof); ok { + return x.ValueFromOneof + } } return nil } @@ -3206,11 +3295,8 @@ func (*DagOutputsSpec_DagOutputParameterSpec_ValueFromOneof) isDagOutputsSpec_Da // Definition of an artifact input. type ComponentInputsSpec_ArtifactSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - ArtifactType *ArtifactTypeSchema `protobuf:"bytes,1,opt,name=artifact_type,json=artifactType,proto3" json:"artifact_type,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + ArtifactType *ArtifactTypeSchema `protobuf:"bytes,1,opt,name=artifact_type,json=artifactType,proto3" json:"artifact_type,omitempty"` // Indicates whether input is a single artifact or list of artifacts IsArtifactList bool `protobuf:"varint,2,opt,name=is_artifact_list,json=isArtifactList,proto3" json:"is_artifact_list,omitempty"` // Whether this input artifact is optional or not. @@ -3223,16 +3309,16 @@ type ComponentInputsSpec_ArtifactSpec struct { IsOptional bool `protobuf:"varint,3,opt,name=is_optional,json=isOptional,proto3" json:"is_optional,omitempty"` // The description for this input artifact of the component. // Should not exceed 1024 characters. - Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ComponentInputsSpec_ArtifactSpec) Reset() { *x = ComponentInputsSpec_ArtifactSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[49] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[51] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ComponentInputsSpec_ArtifactSpec) String() string { @@ -3242,8 +3328,8 @@ func (x *ComponentInputsSpec_ArtifactSpec) String() string { func (*ComponentInputsSpec_ArtifactSpec) ProtoMessage() {} func (x *ComponentInputsSpec_ArtifactSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[49] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[51] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3288,10 +3374,7 @@ func (x *ComponentInputsSpec_ArtifactSpec) GetDescription() string { // Definition of a parameter input. type ComponentInputsSpec_ParameterSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Specifies an input parameter's type. // Deprecated. Use [ParameterSpec.parameter_type][] instead. // @@ -3313,16 +3396,16 @@ type ComponentInputsSpec_ParameterSpec struct { IsOptional bool `protobuf:"varint,4,opt,name=is_optional,json=isOptional,proto3" json:"is_optional,omitempty"` // The description for this input parameter of the component. // Should not exceed 1024 characters. - Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ComponentInputsSpec_ParameterSpec) Reset() { *x = ComponentInputsSpec_ParameterSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[50] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[52] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ComponentInputsSpec_ParameterSpec) String() string { @@ -3332,8 +3415,8 @@ func (x *ComponentInputsSpec_ParameterSpec) String() string { func (*ComponentInputsSpec_ParameterSpec) ProtoMessage() {} func (x *ComponentInputsSpec_ParameterSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[50] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[52] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3386,35 +3469,32 @@ func (x *ComponentInputsSpec_ParameterSpec) GetDescription() string { // Definition of an artifact output. type ComponentOutputsSpec_ArtifactSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - ArtifactType *ArtifactTypeSchema `protobuf:"bytes,1,opt,name=artifact_type,json=artifactType,proto3" json:"artifact_type,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + ArtifactType *ArtifactTypeSchema `protobuf:"bytes,1,opt,name=artifact_type,json=artifactType,proto3" json:"artifact_type,omitempty"` // Deprecated. Use [ArtifactSpec.metadata][] instead. // // Deprecated: Marked as deprecated in pipeline_spec.proto. - Properties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,2,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Properties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,2,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Deprecated. Use [ArtifactSpec.metadata][] instead. // // Deprecated: Marked as deprecated in pipeline_spec.proto. - CustomProperties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,3,rep,name=custom_properties,json=customProperties,proto3" json:"custom_properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + CustomProperties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,3,rep,name=custom_properties,json=customProperties,proto3" json:"custom_properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Properties of the Artifact. Metadata *structpb.Struct `protobuf:"bytes,4,opt,name=metadata,proto3" json:"metadata,omitempty"` // Indicates whether output is a single artifact or list of artifacts IsArtifactList bool `protobuf:"varint,5,opt,name=is_artifact_list,json=isArtifactList,proto3" json:"is_artifact_list,omitempty"` // The description for this output artifact of the component. // Should not exceed 1024 characters. - Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ComponentOutputsSpec_ArtifactSpec) Reset() { *x = ComponentOutputsSpec_ArtifactSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[53] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[55] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ComponentOutputsSpec_ArtifactSpec) String() string { @@ -3424,8 +3504,8 @@ func (x *ComponentOutputsSpec_ArtifactSpec) String() string { func (*ComponentOutputsSpec_ArtifactSpec) ProtoMessage() {} func (x *ComponentOutputsSpec_ArtifactSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[53] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[55] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3486,10 +3566,7 @@ func (x *ComponentOutputsSpec_ArtifactSpec) GetDescription() string { // Definition of a parameter output. type ComponentOutputsSpec_ParameterSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Specifies an input parameter's type. // Deprecated. Use [ParameterSpec.parameter_type][] instead. // @@ -3499,16 +3576,16 @@ type ComponentOutputsSpec_ParameterSpec struct { ParameterType ParameterType_ParameterTypeEnum `protobuf:"varint,2,opt,name=parameter_type,json=parameterType,proto3,enum=ml_pipelines.ParameterType_ParameterTypeEnum" json:"parameter_type,omitempty"` // The description for this output parameter of the component. // Should not exceed 1024 characters. - Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ComponentOutputsSpec_ParameterSpec) Reset() { *x = ComponentOutputsSpec_ParameterSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[54] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[56] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ComponentOutputsSpec_ParameterSpec) String() string { @@ -3518,8 +3595,8 @@ func (x *ComponentOutputsSpec_ParameterSpec) String() string { func (*ComponentOutputsSpec_ParameterSpec) ProtoMessage() {} func (x *ComponentOutputsSpec_ParameterSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[54] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[56] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3558,24 +3635,21 @@ func (x *ComponentOutputsSpec_ParameterSpec) GetDescription() string { // The specification of a task input artifact. type TaskInputsSpec_InputArtifactSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Kind: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Kind: // // *TaskInputsSpec_InputArtifactSpec_TaskOutputArtifact // *TaskInputsSpec_InputArtifactSpec_ComponentInputArtifact - Kind isTaskInputsSpec_InputArtifactSpec_Kind `protobuf_oneof:"kind"` + Kind isTaskInputsSpec_InputArtifactSpec_Kind `protobuf_oneof:"kind"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TaskInputsSpec_InputArtifactSpec) Reset() { *x = TaskInputsSpec_InputArtifactSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[59] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[61] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TaskInputsSpec_InputArtifactSpec) String() string { @@ -3585,8 +3659,8 @@ func (x *TaskInputsSpec_InputArtifactSpec) String() string { func (*TaskInputsSpec_InputArtifactSpec) ProtoMessage() {} func (x *TaskInputsSpec_InputArtifactSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[59] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[61] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3601,23 +3675,27 @@ func (*TaskInputsSpec_InputArtifactSpec) Descriptor() ([]byte, []int) { return file_pipeline_spec_proto_rawDescGZIP(), []int{7, 0} } -func (m *TaskInputsSpec_InputArtifactSpec) GetKind() isTaskInputsSpec_InputArtifactSpec_Kind { - if m != nil { - return m.Kind +func (x *TaskInputsSpec_InputArtifactSpec) GetKind() isTaskInputsSpec_InputArtifactSpec_Kind { + if x != nil { + return x.Kind } return nil } func (x *TaskInputsSpec_InputArtifactSpec) GetTaskOutputArtifact() *TaskInputsSpec_InputArtifactSpec_TaskOutputArtifactSpec { - if x, ok := x.GetKind().(*TaskInputsSpec_InputArtifactSpec_TaskOutputArtifact); ok { - return x.TaskOutputArtifact + if x != nil { + if x, ok := x.Kind.(*TaskInputsSpec_InputArtifactSpec_TaskOutputArtifact); ok { + return x.TaskOutputArtifact + } } return nil } func (x *TaskInputsSpec_InputArtifactSpec) GetComponentInputArtifact() string { - if x, ok := x.GetKind().(*TaskInputsSpec_InputArtifactSpec_ComponentInputArtifact); ok { - return x.ComponentInputArtifact + if x != nil { + if x, ok := x.Kind.(*TaskInputsSpec_InputArtifactSpec_ComponentInputArtifact); ok { + return x.ComponentInputArtifact + } } return "" } @@ -3648,11 +3726,8 @@ func (*TaskInputsSpec_InputArtifactSpec_ComponentInputArtifact) isTaskInputsSpec // `output_parameter_key`), or it can be a runtime value, which can either be // determined at compile-time, or from a pipeline parameter. type TaskInputsSpec_InputParameterSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Kind: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Kind: // // *TaskInputsSpec_InputParameterSpec_TaskOutputParameter // *TaskInputsSpec_InputParameterSpec_RuntimeValue @@ -3685,15 +3760,15 @@ type TaskInputsSpec_InputParameterSpec struct { // // If unset, the value will be passed directly to the current task. ParameterExpressionSelector string `protobuf:"bytes,4,opt,name=parameter_expression_selector,json=parameterExpressionSelector,proto3" json:"parameter_expression_selector,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TaskInputsSpec_InputParameterSpec) Reset() { *x = TaskInputsSpec_InputParameterSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[60] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[62] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TaskInputsSpec_InputParameterSpec) String() string { @@ -3703,8 +3778,8 @@ func (x *TaskInputsSpec_InputParameterSpec) String() string { func (*TaskInputsSpec_InputParameterSpec) ProtoMessage() {} func (x *TaskInputsSpec_InputParameterSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[60] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[62] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3719,37 +3794,45 @@ func (*TaskInputsSpec_InputParameterSpec) Descriptor() ([]byte, []int) { return file_pipeline_spec_proto_rawDescGZIP(), []int{7, 1} } -func (m *TaskInputsSpec_InputParameterSpec) GetKind() isTaskInputsSpec_InputParameterSpec_Kind { - if m != nil { - return m.Kind +func (x *TaskInputsSpec_InputParameterSpec) GetKind() isTaskInputsSpec_InputParameterSpec_Kind { + if x != nil { + return x.Kind } return nil } func (x *TaskInputsSpec_InputParameterSpec) GetTaskOutputParameter() *TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec { - if x, ok := x.GetKind().(*TaskInputsSpec_InputParameterSpec_TaskOutputParameter); ok { - return x.TaskOutputParameter + if x != nil { + if x, ok := x.Kind.(*TaskInputsSpec_InputParameterSpec_TaskOutputParameter); ok { + return x.TaskOutputParameter + } } return nil } func (x *TaskInputsSpec_InputParameterSpec) GetRuntimeValue() *ValueOrRuntimeParameter { - if x, ok := x.GetKind().(*TaskInputsSpec_InputParameterSpec_RuntimeValue); ok { - return x.RuntimeValue + if x != nil { + if x, ok := x.Kind.(*TaskInputsSpec_InputParameterSpec_RuntimeValue); ok { + return x.RuntimeValue + } } return nil } func (x *TaskInputsSpec_InputParameterSpec) GetComponentInputParameter() string { - if x, ok := x.GetKind().(*TaskInputsSpec_InputParameterSpec_ComponentInputParameter); ok { - return x.ComponentInputParameter + if x != nil { + if x, ok := x.Kind.(*TaskInputsSpec_InputParameterSpec_ComponentInputParameter); ok { + return x.ComponentInputParameter + } } return "" } func (x *TaskInputsSpec_InputParameterSpec) GetTaskFinalStatus() *TaskInputsSpec_InputParameterSpec_TaskFinalStatus { - if x, ok := x.GetKind().(*TaskInputsSpec_InputParameterSpec_TaskFinalStatus_); ok { - return x.TaskFinalStatus + if x != nil { + if x, ok := x.Kind.(*TaskInputsSpec_InputParameterSpec_TaskFinalStatus_); ok { + return x.TaskFinalStatus + } } return nil } @@ -3797,24 +3880,21 @@ func (*TaskInputsSpec_InputParameterSpec_TaskFinalStatus_) isTaskInputsSpec_Inpu } type TaskInputsSpec_InputArtifactSpec_TaskOutputArtifactSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The name of the upstream task which produces the output that matches // with the `output_artifact_key`. ProducerTask string `protobuf:"bytes,1,opt,name=producer_task,json=producerTask,proto3" json:"producer_task,omitempty"` // The key of [TaskOutputsSpec.artifacts][] map of the producer task. OutputArtifactKey string `protobuf:"bytes,2,opt,name=output_artifact_key,json=outputArtifactKey,proto3" json:"output_artifact_key,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TaskInputsSpec_InputArtifactSpec_TaskOutputArtifactSpec) Reset() { *x = TaskInputsSpec_InputArtifactSpec_TaskOutputArtifactSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[63] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[65] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TaskInputsSpec_InputArtifactSpec_TaskOutputArtifactSpec) String() string { @@ -3824,8 +3904,8 @@ func (x *TaskInputsSpec_InputArtifactSpec_TaskOutputArtifactSpec) String() strin func (*TaskInputsSpec_InputArtifactSpec_TaskOutputArtifactSpec) ProtoMessage() {} func (x *TaskInputsSpec_InputArtifactSpec_TaskOutputArtifactSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[63] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[65] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3856,24 +3936,21 @@ func (x *TaskInputsSpec_InputArtifactSpec_TaskOutputArtifactSpec) GetOutputArtif // Represents an upstream task's output parameter. type TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The name of the upstream task which produces the output parameter that // matches with the `output_parameter_key`. ProducerTask string `protobuf:"bytes,1,opt,name=producer_task,json=producerTask,proto3" json:"producer_task,omitempty"` // The key of [TaskOutputsSpec.parameters][] map of the producer task. OutputParameterKey string `protobuf:"bytes,2,opt,name=output_parameter_key,json=outputParameterKey,proto3" json:"output_parameter_key,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec) Reset() { *x = TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[64] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[66] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec) String() string { @@ -3883,8 +3960,8 @@ func (x *TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec) String() str func (*TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec) ProtoMessage() {} func (x *TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[64] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[66] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3917,21 +3994,18 @@ func (x *TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec) GetOutputPar // the schema version is `2.0.0`. The resolved input parameter will be a // JSON payload in string type. type TaskInputsSpec_InputParameterSpec_TaskFinalStatus struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The name of the upstream task where the final status is coming from. - ProducerTask string `protobuf:"bytes,1,opt,name=producer_task,json=producerTask,proto3" json:"producer_task,omitempty"` + ProducerTask string `protobuf:"bytes,1,opt,name=producer_task,json=producerTask,proto3" json:"producer_task,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TaskInputsSpec_InputParameterSpec_TaskFinalStatus) Reset() { *x = TaskInputsSpec_InputParameterSpec_TaskFinalStatus{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[65] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[67] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TaskInputsSpec_InputParameterSpec_TaskFinalStatus) String() string { @@ -3941,8 +4015,8 @@ func (x *TaskInputsSpec_InputParameterSpec_TaskFinalStatus) String() string { func (*TaskInputsSpec_InputParameterSpec_TaskFinalStatus) ProtoMessage() {} func (x *TaskInputsSpec_InputParameterSpec_TaskFinalStatus) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[65] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[67] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -3966,27 +4040,24 @@ func (x *TaskInputsSpec_InputParameterSpec_TaskFinalStatus) GetProducerTask() st // The specification of a task output artifact. type TaskOutputsSpec_OutputArtifactSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The type of the artifact. ArtifactType *ArtifactTypeSchema `protobuf:"bytes,1,opt,name=artifact_type,json=artifactType,proto3" json:"artifact_type,omitempty"` // The properties of the artifact, which are determined either at // compile-time, or at pipeline submission time through runtime parameters - Properties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,2,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Properties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,2,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // The custom properties of the artifact, which are determined either at // compile-time, or at pipeline submission time through runtime parameters - CustomProperties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,3,rep,name=custom_properties,json=customProperties,proto3" json:"custom_properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + CustomProperties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,3,rep,name=custom_properties,json=customProperties,proto3" json:"custom_properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TaskOutputsSpec_OutputArtifactSpec) Reset() { *x = TaskOutputsSpec_OutputArtifactSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[66] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[68] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TaskOutputsSpec_OutputArtifactSpec) String() string { @@ -3996,8 +4067,8 @@ func (x *TaskOutputsSpec_OutputArtifactSpec) String() string { func (*TaskOutputsSpec_OutputArtifactSpec) ProtoMessage() {} func (x *TaskOutputsSpec_OutputArtifactSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[66] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[68] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4035,21 +4106,18 @@ func (x *TaskOutputsSpec_OutputArtifactSpec) GetCustomProperties() map[string]*V // Specification for output parameters produced by the task. type TaskOutputsSpec_OutputParameterSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required field. The type of the output parameter. - Type PrimitiveType_PrimitiveTypeEnum `protobuf:"varint,1,opt,name=type,proto3,enum=ml_pipelines.PrimitiveType_PrimitiveTypeEnum" json:"type,omitempty"` + Type PrimitiveType_PrimitiveTypeEnum `protobuf:"varint,1,opt,name=type,proto3,enum=ml_pipelines.PrimitiveType_PrimitiveTypeEnum" json:"type,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TaskOutputsSpec_OutputParameterSpec) Reset() { *x = TaskOutputsSpec_OutputParameterSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[67] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[69] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TaskOutputsSpec_OutputParameterSpec) String() string { @@ -4059,8 +4127,8 @@ func (x *TaskOutputsSpec_OutputParameterSpec) String() string { func (*TaskOutputsSpec_OutputParameterSpec) ProtoMessage() {} func (x *TaskOutputsSpec_OutputParameterSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[67] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[69] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4083,24 +4151,21 @@ func (x *TaskOutputsSpec_OutputParameterSpec) GetType() PrimitiveType_PrimitiveT } type PipelineTaskSpec_CachingOptions struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Whether or not to enable cache for this task. Defaults to false. EnableCache bool `protobuf:"varint,1,opt,name=enable_cache,json=enableCache,proto3" json:"enable_cache,omitempty"` // Customized cache key for this task. If set, the cache_key will be used // as the key for the task's cache. - CacheKey string `protobuf:"bytes,2,opt,name=cache_key,json=cacheKey,proto3" json:"cache_key,omitempty"` + CacheKey string `protobuf:"bytes,2,opt,name=cache_key,json=cacheKey,proto3" json:"cache_key,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineTaskSpec_CachingOptions) Reset() { *x = PipelineTaskSpec_CachingOptions{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[72] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[74] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineTaskSpec_CachingOptions) String() string { @@ -4110,8 +4175,8 @@ func (x *PipelineTaskSpec_CachingOptions) String() string { func (*PipelineTaskSpec_CachingOptions) ProtoMessage() {} func (x *PipelineTaskSpec_CachingOptions) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[72] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[74] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4123,7 +4188,7 @@ func (x *PipelineTaskSpec_CachingOptions) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineTaskSpec_CachingOptions.ProtoReflect.Descriptor instead. func (*PipelineTaskSpec_CachingOptions) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{11, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{13, 0} } func (x *PipelineTaskSpec_CachingOptions) GetEnableCache() bool { @@ -4143,10 +4208,7 @@ func (x *PipelineTaskSpec_CachingOptions) GetCacheKey() string { // Trigger policy defines how the task gets triggered. If a task is not // triggered, it will run into SKIPPED state. type PipelineTaskSpec_TriggerPolicy struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // An expression which will be evaluated into a boolean value. True to // trigger the task to run. The expression follows the language of // [CEL Spec][https://github.com/google/cel-spec]. It can access the data @@ -4160,16 +4222,16 @@ type PipelineTaskSpec_TriggerPolicy struct { // the `strategy` is meet. // Unset or set to default value of TRIGGER_STRATEGY_UNDEFINED behaves the // same as ALL_UPSTREAM_TASKS_SUCCEEDED. - Strategy PipelineTaskSpec_TriggerPolicy_TriggerStrategy `protobuf:"varint,2,opt,name=strategy,proto3,enum=ml_pipelines.PipelineTaskSpec_TriggerPolicy_TriggerStrategy" json:"strategy,omitempty"` + Strategy PipelineTaskSpec_TriggerPolicy_TriggerStrategy `protobuf:"varint,2,opt,name=strategy,proto3,enum=ml_pipelines.PipelineTaskSpec_TriggerPolicy_TriggerStrategy" json:"strategy,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineTaskSpec_TriggerPolicy) Reset() { *x = PipelineTaskSpec_TriggerPolicy{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[73] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[75] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineTaskSpec_TriggerPolicy) String() string { @@ -4179,8 +4241,8 @@ func (x *PipelineTaskSpec_TriggerPolicy) String() string { func (*PipelineTaskSpec_TriggerPolicy) ProtoMessage() {} func (x *PipelineTaskSpec_TriggerPolicy) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[73] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[75] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4192,7 +4254,7 @@ func (x *PipelineTaskSpec_TriggerPolicy) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineTaskSpec_TriggerPolicy.ProtoReflect.Descriptor instead. func (*PipelineTaskSpec_TriggerPolicy) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{11, 1} + return file_pipeline_spec_proto_rawDescGZIP(), []int{13, 1} } func (x *PipelineTaskSpec_TriggerPolicy) GetCondition() string { @@ -4211,10 +4273,7 @@ func (x *PipelineTaskSpec_TriggerPolicy) GetStrategy() PipelineTaskSpec_TriggerP // User-configured task-level retry. type PipelineTaskSpec_RetryPolicy struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Number of retries before considering a task as failed. Set to 0 or // unspecified to disallow retry." MaxRetryCount int32 `protobuf:"varint,1,opt,name=max_retry_count,json=maxRetryCount,proto3" json:"max_retry_count,omitempty"` @@ -4227,15 +4286,15 @@ type PipelineTaskSpec_RetryPolicy struct { // the backoff strategy. Max allowed is 1 hour - higher value will be capped // to this limit. If unspecified, will set to 1 hour. BackoffMaxDuration *durationpb.Duration `protobuf:"bytes,4,opt,name=backoff_max_duration,json=backoffMaxDuration,proto3" json:"backoff_max_duration,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineTaskSpec_RetryPolicy) Reset() { *x = PipelineTaskSpec_RetryPolicy{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[74] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[76] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineTaskSpec_RetryPolicy) String() string { @@ -4245,8 +4304,8 @@ func (x *PipelineTaskSpec_RetryPolicy) String() string { func (*PipelineTaskSpec_RetryPolicy) ProtoMessage() {} func (x *PipelineTaskSpec_RetryPolicy) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[74] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[76] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4258,7 +4317,7 @@ func (x *PipelineTaskSpec_RetryPolicy) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineTaskSpec_RetryPolicy.ProtoReflect.Descriptor instead. func (*PipelineTaskSpec_RetryPolicy) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{11, 2} + return file_pipeline_spec_proto_rawDescGZIP(), []int{13, 2} } func (x *PipelineTaskSpec_RetryPolicy) GetMaxRetryCount() int32 { @@ -4291,23 +4350,20 @@ func (x *PipelineTaskSpec_RetryPolicy) GetBackoffMaxDuration() *durationpb.Durat // Iterator related settings. type PipelineTaskSpec_IteratorPolicy struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The limit for the number of concurrent sub-tasks spawned by an iterator // task. The value should be a non-negative integer. A value of 0 represents // unconstrained parallelism. ParallelismLimit int32 `protobuf:"varint,1,opt,name=parallelism_limit,json=parallelismLimit,proto3" json:"parallelism_limit,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineTaskSpec_IteratorPolicy) Reset() { *x = PipelineTaskSpec_IteratorPolicy{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[75] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[77] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineTaskSpec_IteratorPolicy) String() string { @@ -4317,8 +4373,8 @@ func (x *PipelineTaskSpec_IteratorPolicy) String() string { func (*PipelineTaskSpec_IteratorPolicy) ProtoMessage() {} func (x *PipelineTaskSpec_IteratorPolicy) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[75] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[77] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4330,7 +4386,7 @@ func (x *PipelineTaskSpec_IteratorPolicy) ProtoReflect() protoreflect.Message { // Deprecated: Use PipelineTaskSpec_IteratorPolicy.ProtoReflect.Descriptor instead. func (*PipelineTaskSpec_IteratorPolicy) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{11, 3} + return file_pipeline_spec_proto_rawDescGZIP(), []int{13, 3} } func (x *PipelineTaskSpec_IteratorPolicy) GetParallelismLimit() int32 { @@ -4345,21 +4401,18 @@ func (x *PipelineTaskSpec_IteratorPolicy) GetParallelismLimit() int32 { // the collection and pass the item as a new input artifact channel as // specified by [item_input][]. type ArtifactIteratorSpec_ItemsSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The name of the input artifact. InputArtifact string `protobuf:"bytes,1,opt,name=input_artifact,json=inputArtifact,proto3" json:"input_artifact,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ArtifactIteratorSpec_ItemsSpec) Reset() { *x = ArtifactIteratorSpec_ItemsSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[76] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[78] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ArtifactIteratorSpec_ItemsSpec) String() string { @@ -4369,8 +4422,8 @@ func (x *ArtifactIteratorSpec_ItemsSpec) String() string { func (*ArtifactIteratorSpec_ItemsSpec) ProtoMessage() {} func (x *ArtifactIteratorSpec_ItemsSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[76] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[78] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4382,7 +4435,7 @@ func (x *ArtifactIteratorSpec_ItemsSpec) ProtoReflect() protoreflect.Message { // Deprecated: Use ArtifactIteratorSpec_ItemsSpec.ProtoReflect.Descriptor instead. func (*ArtifactIteratorSpec_ItemsSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{12, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{14, 0} } func (x *ArtifactIteratorSpec_ItemsSpec) GetInputArtifact() string { @@ -4394,28 +4447,25 @@ func (x *ArtifactIteratorSpec_ItemsSpec) GetInputArtifact() string { // Specifies the spec to describe the parameter items to iterate. type ParameterIteratorSpec_ItemsSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Specifies where to get the collection of items to iterate. The iterator // will create a sub-task for each item of the collection and pass the item // as a new input parameter as specified by [item_input][]. // - // Types that are assignable to Kind: + // Types that are valid to be assigned to Kind: // // *ParameterIteratorSpec_ItemsSpec_Raw // *ParameterIteratorSpec_ItemsSpec_InputParameter - Kind isParameterIteratorSpec_ItemsSpec_Kind `protobuf_oneof:"kind"` + Kind isParameterIteratorSpec_ItemsSpec_Kind `protobuf_oneof:"kind"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ParameterIteratorSpec_ItemsSpec) Reset() { *x = ParameterIteratorSpec_ItemsSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[77] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[79] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ParameterIteratorSpec_ItemsSpec) String() string { @@ -4425,8 +4475,8 @@ func (x *ParameterIteratorSpec_ItemsSpec) String() string { func (*ParameterIteratorSpec_ItemsSpec) ProtoMessage() {} func (x *ParameterIteratorSpec_ItemsSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[77] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[79] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4438,26 +4488,30 @@ func (x *ParameterIteratorSpec_ItemsSpec) ProtoReflect() protoreflect.Message { // Deprecated: Use ParameterIteratorSpec_ItemsSpec.ProtoReflect.Descriptor instead. func (*ParameterIteratorSpec_ItemsSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{13, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{15, 0} } -func (m *ParameterIteratorSpec_ItemsSpec) GetKind() isParameterIteratorSpec_ItemsSpec_Kind { - if m != nil { - return m.Kind +func (x *ParameterIteratorSpec_ItemsSpec) GetKind() isParameterIteratorSpec_ItemsSpec_Kind { + if x != nil { + return x.Kind } return nil } func (x *ParameterIteratorSpec_ItemsSpec) GetRaw() string { - if x, ok := x.GetKind().(*ParameterIteratorSpec_ItemsSpec_Raw); ok { - return x.Raw + if x != nil { + if x, ok := x.Kind.(*ParameterIteratorSpec_ItemsSpec_Raw); ok { + return x.Raw + } } return "" } func (x *ParameterIteratorSpec_ItemsSpec) GetInputParameter() string { - if x, ok := x.GetKind().(*ParameterIteratorSpec_ItemsSpec_InputParameter); ok { - return x.InputParameter + if x != nil { + if x, ok := x.Kind.(*ParameterIteratorSpec_ItemsSpec_InputParameter); ok { + return x.InputParameter + } } return "" } @@ -4487,10 +4541,7 @@ func (*ParameterIteratorSpec_ItemsSpec_InputParameter) isParameterIteratorSpec_I // defined in [ExecutorInput](). The output of the container follows the // contract of [ExecutorOutput](). type PipelineDeploymentConfig_PipelineContainerSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The image uri of the container. Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` // The main entrypoint commands of the container to run. If not provided, @@ -4502,16 +4553,16 @@ type PipelineDeploymentConfig_PipelineContainerSpec struct { Lifecycle *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle `protobuf:"bytes,4,opt,name=lifecycle,proto3" json:"lifecycle,omitempty"` Resources *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec `protobuf:"bytes,5,opt,name=resources,proto3" json:"resources,omitempty"` // Environment variables to be passed to the container. - Env []*PipelineDeploymentConfig_PipelineContainerSpec_EnvVar `protobuf:"bytes,6,rep,name=env,proto3" json:"env,omitempty"` + Env []*PipelineDeploymentConfig_PipelineContainerSpec_EnvVar `protobuf:"bytes,6,rep,name=env,proto3" json:"env,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_PipelineContainerSpec) Reset() { *x = PipelineDeploymentConfig_PipelineContainerSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[78] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[80] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_PipelineContainerSpec) String() string { @@ -4521,8 +4572,8 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec) String() string { func (*PipelineDeploymentConfig_PipelineContainerSpec) ProtoMessage() {} func (x *PipelineDeploymentConfig_PipelineContainerSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[78] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[80] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4534,7 +4585,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec) ProtoReflect() protoref // Deprecated: Use PipelineDeploymentConfig_PipelineContainerSpec.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_PipelineContainerSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 0} } func (x *PipelineDeploymentConfig_PipelineContainerSpec) GetImage() string { @@ -4581,10 +4632,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec) GetEnv() []*PipelineDep // The specification to import or reimport a new artifact to the pipeline. type PipelineDeploymentConfig_ImporterSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The URI of the artifact. ArtifactUri *ValueOrRuntimeParameter `protobuf:"bytes,1,opt,name=artifact_uri,json=artifactUri,proto3" json:"artifact_uri,omitempty"` // The type of the artifact. @@ -4593,25 +4641,25 @@ type PipelineDeploymentConfig_ImporterSpec struct { // Deprecated. Use [ImporterSpec.metadata][] instead. // // Deprecated: Marked as deprecated in pipeline_spec.proto. - Properties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,3,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Properties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,3,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // The custom properties of the artifact. // Deprecated. Use [ImporterSpec.metadata][] instead. // // Deprecated: Marked as deprecated in pipeline_spec.proto. - CustomProperties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,4,rep,name=custom_properties,json=customProperties,proto3" json:"custom_properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + CustomProperties map[string]*ValueOrRuntimeParameter `protobuf:"bytes,4,rep,name=custom_properties,json=customProperties,proto3" json:"custom_properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Properties of the Artifact. Metadata *structpb.Struct `protobuf:"bytes,6,opt,name=metadata,proto3" json:"metadata,omitempty"` // Whether or not import an artifact regardless it has been imported before. - Reimport bool `protobuf:"varint,5,opt,name=reimport,proto3" json:"reimport,omitempty"` + Reimport bool `protobuf:"varint,5,opt,name=reimport,proto3" json:"reimport,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_ImporterSpec) Reset() { *x = PipelineDeploymentConfig_ImporterSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[79] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[81] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_ImporterSpec) String() string { @@ -4621,8 +4669,8 @@ func (x *PipelineDeploymentConfig_ImporterSpec) String() string { func (*PipelineDeploymentConfig_ImporterSpec) ProtoMessage() {} func (x *PipelineDeploymentConfig_ImporterSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[79] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[81] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4634,7 +4682,7 @@ func (x *PipelineDeploymentConfig_ImporterSpec) ProtoReflect() protoreflect.Mess // Deprecated: Use PipelineDeploymentConfig_ImporterSpec.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_ImporterSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 1} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 1} } func (x *PipelineDeploymentConfig_ImporterSpec) GetArtifactUri() *ValueOrRuntimeParameter { @@ -4685,24 +4733,21 @@ func (x *PipelineDeploymentConfig_ImporterSpec) GetReimport() bool { // to the pipeline as output artifacts of the resolver task. The downstream // tasks can consume them as their input artifacts. type PipelineDeploymentConfig_ResolverSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A list of resolver output definitions. The // key of the map must be exactly the same as // the keys in the [PipelineTaskOutputsSpec.artifacts][] map. // At least one output must be defined. - OutputArtifactQueries map[string]*PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec `protobuf:"bytes,1,rep,name=output_artifact_queries,json=outputArtifactQueries,proto3" json:"output_artifact_queries,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + OutputArtifactQueries map[string]*PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec `protobuf:"bytes,1,rep,name=output_artifact_queries,json=outputArtifactQueries,proto3" json:"output_artifact_queries,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_ResolverSpec) Reset() { *x = PipelineDeploymentConfig_ResolverSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[80] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[82] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_ResolverSpec) String() string { @@ -4712,8 +4757,8 @@ func (x *PipelineDeploymentConfig_ResolverSpec) String() string { func (*PipelineDeploymentConfig_ResolverSpec) ProtoMessage() {} func (x *PipelineDeploymentConfig_ResolverSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[80] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[82] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4725,7 +4770,7 @@ func (x *PipelineDeploymentConfig_ResolverSpec) ProtoReflect() protoreflect.Mess // Deprecated: Use PipelineDeploymentConfig_ResolverSpec.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_ResolverSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 2} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 2} } func (x *PipelineDeploymentConfig_ResolverSpec) GetOutputArtifactQueries() map[string]*PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec { @@ -4737,10 +4782,7 @@ func (x *PipelineDeploymentConfig_ResolverSpec) GetOutputArtifactQueries() map[s // Deprecated: Marked as deprecated in pipeline_spec.proto. type PipelineDeploymentConfig_AIPlatformCustomJobSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // API Specification for invoking a Google Cloud AI Platform CustomJob. // The fields must match the field names and structures of CustomJob // defined in @@ -4749,16 +4791,16 @@ type PipelineDeploymentConfig_AIPlatformCustomJobSpec struct { // string based placeholder contract defined in [ExecutorInput](). The // placeholders will be replaced with the actual value during the runtime // before the job is launched. - CustomJob *structpb.Struct `protobuf:"bytes,1,opt,name=custom_job,json=customJob,proto3" json:"custom_job,omitempty"` + CustomJob *structpb.Struct `protobuf:"bytes,1,opt,name=custom_job,json=customJob,proto3" json:"custom_job,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_AIPlatformCustomJobSpec) Reset() { *x = PipelineDeploymentConfig_AIPlatformCustomJobSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[81] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[83] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_AIPlatformCustomJobSpec) String() string { @@ -4768,8 +4810,8 @@ func (x *PipelineDeploymentConfig_AIPlatformCustomJobSpec) String() string { func (*PipelineDeploymentConfig_AIPlatformCustomJobSpec) ProtoMessage() {} func (x *PipelineDeploymentConfig_AIPlatformCustomJobSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[81] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[83] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4781,7 +4823,7 @@ func (x *PipelineDeploymentConfig_AIPlatformCustomJobSpec) ProtoReflect() protor // Deprecated: Use PipelineDeploymentConfig_AIPlatformCustomJobSpec.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_AIPlatformCustomJobSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 3} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 3} } func (x *PipelineDeploymentConfig_AIPlatformCustomJobSpec) GetCustomJob() *structpb.Struct { @@ -4793,26 +4835,23 @@ func (x *PipelineDeploymentConfig_AIPlatformCustomJobSpec) GetCustomJob() *struc // The specification of the executor. type PipelineDeploymentConfig_ExecutorSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Spec: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Spec: // // *PipelineDeploymentConfig_ExecutorSpec_Container // *PipelineDeploymentConfig_ExecutorSpec_Importer // *PipelineDeploymentConfig_ExecutorSpec_Resolver // *PipelineDeploymentConfig_ExecutorSpec_CustomJob - Spec isPipelineDeploymentConfig_ExecutorSpec_Spec `protobuf_oneof:"spec"` + Spec isPipelineDeploymentConfig_ExecutorSpec_Spec `protobuf_oneof:"spec"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_ExecutorSpec) Reset() { *x = PipelineDeploymentConfig_ExecutorSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[82] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[84] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_ExecutorSpec) String() string { @@ -4822,8 +4861,8 @@ func (x *PipelineDeploymentConfig_ExecutorSpec) String() string { func (*PipelineDeploymentConfig_ExecutorSpec) ProtoMessage() {} func (x *PipelineDeploymentConfig_ExecutorSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[82] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[84] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4835,41 +4874,49 @@ func (x *PipelineDeploymentConfig_ExecutorSpec) ProtoReflect() protoreflect.Mess // Deprecated: Use PipelineDeploymentConfig_ExecutorSpec.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_ExecutorSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 4} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 4} } -func (m *PipelineDeploymentConfig_ExecutorSpec) GetSpec() isPipelineDeploymentConfig_ExecutorSpec_Spec { - if m != nil { - return m.Spec +func (x *PipelineDeploymentConfig_ExecutorSpec) GetSpec() isPipelineDeploymentConfig_ExecutorSpec_Spec { + if x != nil { + return x.Spec } return nil } func (x *PipelineDeploymentConfig_ExecutorSpec) GetContainer() *PipelineDeploymentConfig_PipelineContainerSpec { - if x, ok := x.GetSpec().(*PipelineDeploymentConfig_ExecutorSpec_Container); ok { - return x.Container + if x != nil { + if x, ok := x.Spec.(*PipelineDeploymentConfig_ExecutorSpec_Container); ok { + return x.Container + } } return nil } func (x *PipelineDeploymentConfig_ExecutorSpec) GetImporter() *PipelineDeploymentConfig_ImporterSpec { - if x, ok := x.GetSpec().(*PipelineDeploymentConfig_ExecutorSpec_Importer); ok { - return x.Importer + if x != nil { + if x, ok := x.Spec.(*PipelineDeploymentConfig_ExecutorSpec_Importer); ok { + return x.Importer + } } return nil } func (x *PipelineDeploymentConfig_ExecutorSpec) GetResolver() *PipelineDeploymentConfig_ResolverSpec { - if x, ok := x.GetSpec().(*PipelineDeploymentConfig_ExecutorSpec_Resolver); ok { - return x.Resolver + if x != nil { + if x, ok := x.Spec.(*PipelineDeploymentConfig_ExecutorSpec_Resolver); ok { + return x.Resolver + } } return nil } // Deprecated: Marked as deprecated in pipeline_spec.proto. func (x *PipelineDeploymentConfig_ExecutorSpec) GetCustomJob() *PipelineDeploymentConfig_AIPlatformCustomJobSpec { - if x, ok := x.GetSpec().(*PipelineDeploymentConfig_ExecutorSpec_CustomJob); ok { - return x.CustomJob + if x != nil { + if x, ok := x.Spec.(*PipelineDeploymentConfig_ExecutorSpec_CustomJob); ok { + return x.CustomJob + } } return nil } @@ -4918,24 +4965,21 @@ func (*PipelineDeploymentConfig_ExecutorSpec_CustomJob) isPipelineDeploymentConf // (-- TODO(b/165323565): add more documentation on caching and lifecycle // hooks. --) type PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // This hook is invoked before caching check. It can change the properties // of the execution and output artifacts before they are used to compute // the cache key. The updated metadata will be passed into the main // container entrypoint. PreCacheCheck *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec `protobuf:"bytes,1,opt,name=pre_cache_check,json=preCacheCheck,proto3" json:"pre_cache_check,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle) Reset() { *x = PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[84] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[86] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle) String() string { @@ -4945,8 +4989,8 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle) String() stri func (*PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle) ProtoMessage() {} func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[84] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[86] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -4958,7 +5002,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle) ProtoReflect( // Deprecated: Use PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 0, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 0, 0} } func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle) GetPreCacheCheck() *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec { @@ -4972,10 +5016,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle) GetPreCacheCh // This can include specification of vCPU, memory requirements, as well as // accelerator types and counts. type PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The limit of the number of vCPU cores. This container execution needs // at most cpu_limit vCPU to run. // Deprecated. Use [ResourceSpec.resource_cpu_limit] instead. @@ -5017,15 +5058,15 @@ type PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec struct { // values and placeholders. ResourceMemoryRequest string `protobuf:"bytes,10,opt,name=resource_memory_request,json=resourceMemoryRequest,proto3" json:"resource_memory_request,omitempty"` Accelerator *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorConfig `protobuf:"bytes,3,opt,name=accelerator,proto3" json:"accelerator,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec) Reset() { *x = PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[85] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[87] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec) String() string { @@ -5035,8 +5076,8 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec) String() s func (*PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec) ProtoMessage() {} func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[85] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[87] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -5048,7 +5089,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec) ProtoRefle // Deprecated: Use PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 0, 1} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 0, 1} } // Deprecated: Marked as deprecated in pipeline_spec.proto. @@ -5121,10 +5162,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec) GetAcceler // Environment variables to be passed to the container. // Represents an environment variable present in a container. type PipelineDeploymentConfig_PipelineContainerSpec_EnvVar struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Name of the environment variable. Must be a valid C identifier. It can // be composed of characters such as uppercase, lowercase characters, // underscore, digits, but the leading character should be either a @@ -5137,16 +5175,16 @@ type PipelineDeploymentConfig_PipelineContainerSpec_EnvVar struct { // will be unchanged. The $(VAR_NAME) syntax can be escaped with a double // $$, ie: $$(VAR_NAME). Escaped references will never be expanded, // regardless of whether the variable exists or not. - Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_PipelineContainerSpec_EnvVar) Reset() { *x = PipelineDeploymentConfig_PipelineContainerSpec_EnvVar{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[86] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[88] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_PipelineContainerSpec_EnvVar) String() string { @@ -5156,8 +5194,8 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_EnvVar) String() string func (*PipelineDeploymentConfig_PipelineContainerSpec_EnvVar) ProtoMessage() {} func (x *PipelineDeploymentConfig_PipelineContainerSpec_EnvVar) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[86] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[88] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -5169,7 +5207,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_EnvVar) ProtoReflect() p // Deprecated: Use PipelineDeploymentConfig_PipelineContainerSpec_EnvVar.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_PipelineContainerSpec_EnvVar) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 0, 2} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 0, 2} } func (x *PipelineDeploymentConfig_PipelineContainerSpec_EnvVar) GetName() string { @@ -5188,23 +5226,20 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_EnvVar) GetValue() strin // The command and args to execute a program. type PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The command of the exec program. Command []string `protobuf:"bytes,2,rep,name=command,proto3" json:"command,omitempty"` // The args of the exec program. - Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec) Reset() { *x = PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[87] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[89] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec) String() string { @@ -5214,8 +5249,8 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec) String() func (*PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec) ProtoMessage() {} func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[87] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[89] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -5227,7 +5262,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec) ProtoRef // Deprecated: Use PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 0, 0, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 0, 0, 0} } func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec) GetCommand() []string { @@ -5246,10 +5281,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec) GetArgs( // The specification on the accelerators being attached to this container. type PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The type of accelerators. // Deprecated. Use [ResourceSpec.AcceleratorConfig.resource_type] // instead. @@ -5268,15 +5300,15 @@ type PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorConf // The number of accelerators. Handles static values and // placeholders. ResourceCount string `protobuf:"bytes,4,opt,name=resource_count,json=resourceCount,proto3" json:"resource_count,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorConfig) Reset() { *x = PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[88] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[90] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorConfig) String() string { @@ -5287,8 +5319,8 @@ func (*PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorCo } func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorConfig) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[88] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[90] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -5300,7 +5332,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_Accelerator // Deprecated: Use PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorConfig.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorConfig) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 0, 1, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 0, 1, 0} } // Deprecated: Marked as deprecated in pipeline_spec.proto. @@ -5335,10 +5367,7 @@ func (x *PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_Accelerator // The query to fetch artifacts. type PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The filter of the artifact query. The supported syntax are: // - `in_context("")` // - `artifact_type=""` @@ -5351,16 +5380,16 @@ type PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec struct { Filter string `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` // The maximum number of the artifacts to be returned from the // query. If not defined, the default limit is `1`. - Limit int32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` + Limit int32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec) Reset() { *x = PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[91] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[93] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec) String() string { @@ -5370,8 +5399,8 @@ func (x *PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec) String() strin func (*PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec) ProtoMessage() {} func (x *PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[91] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[93] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -5383,7 +5412,7 @@ func (x *PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec) ProtoReflect() // Deprecated: Use PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec.ProtoReflect.Descriptor instead. func (*PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{19, 2, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{21, 2, 0} } func (x *PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec) GetFilter() string { @@ -5402,28 +5431,25 @@ func (x *PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec) GetLimit() int // The runtime inputs data of the execution. type ExecutorInput_Inputs struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Input parameters of the execution. // Deprecated. Use [ExecutorInput.Inputs.parameter_values][] instead. // // Deprecated: Marked as deprecated in pipeline_spec.proto. - Parameters map[string]*Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Input artifacts of the execution. - Artifacts map[string]*ArtifactList `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Artifacts map[string]*ArtifactList `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Input parameters of the execution. - ParameterValues map[string]*structpb.Value `protobuf:"bytes,3,rep,name=parameter_values,json=parameterValues,proto3" json:"parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + ParameterValues map[string]*structpb.Value `protobuf:"bytes,3,rep,name=parameter_values,json=parameterValues,proto3" json:"parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ExecutorInput_Inputs) Reset() { *x = ExecutorInput_Inputs{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[95] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[97] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ExecutorInput_Inputs) String() string { @@ -5433,8 +5459,8 @@ func (x *ExecutorInput_Inputs) String() string { func (*ExecutorInput_Inputs) ProtoMessage() {} func (x *ExecutorInput_Inputs) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[95] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[97] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -5446,7 +5472,7 @@ func (x *ExecutorInput_Inputs) ProtoReflect() protoreflect.Message { // Deprecated: Use ExecutorInput_Inputs.ProtoReflect.Descriptor instead. func (*ExecutorInput_Inputs) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{23, 0} + return file_pipeline_spec_proto_rawDescGZIP(), []int{25, 0} } // Deprecated: Marked as deprecated in pipeline_spec.proto. @@ -5473,22 +5499,19 @@ func (x *ExecutorInput_Inputs) GetParameterValues() map[string]*structpb.Value { // The runtime output parameter. type ExecutorInput_OutputParameter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The file path which is used by the executor to pass the parameter value // to the system. - OutputFile string `protobuf:"bytes,1,opt,name=output_file,json=outputFile,proto3" json:"output_file,omitempty"` + OutputFile string `protobuf:"bytes,1,opt,name=output_file,json=outputFile,proto3" json:"output_file,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ExecutorInput_OutputParameter) Reset() { *x = ExecutorInput_OutputParameter{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[96] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[98] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ExecutorInput_OutputParameter) String() string { @@ -5498,8 +5521,8 @@ func (x *ExecutorInput_OutputParameter) String() string { func (*ExecutorInput_OutputParameter) ProtoMessage() {} func (x *ExecutorInput_OutputParameter) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[96] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[98] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -5511,7 +5534,7 @@ func (x *ExecutorInput_OutputParameter) ProtoReflect() protoreflect.Message { // Deprecated: Use ExecutorInput_OutputParameter.ProtoReflect.Descriptor instead. func (*ExecutorInput_OutputParameter) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{23, 1} + return file_pipeline_spec_proto_rawDescGZIP(), []int{25, 1} } func (x *ExecutorInput_OutputParameter) GetOutputFile() string { @@ -5523,29 +5546,26 @@ func (x *ExecutorInput_OutputParameter) GetOutputFile() string { // The runtime outputs data of the execution. type ExecutorInput_Outputs struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The runtime output parameters. - Parameters map[string]*ExecutorInput_OutputParameter `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*ExecutorInput_OutputParameter `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // The runtime output artifacts. - Artifacts map[string]*ArtifactList `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Artifacts map[string]*ArtifactList `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // The file path of the full output metadata JSON. The schema of the output // file is [ExecutorOutput][]. // // When the full output metadata file is set by the container, the output // parameter files will be ignored. - OutputFile string `protobuf:"bytes,3,opt,name=output_file,json=outputFile,proto3" json:"output_file,omitempty"` + OutputFile string `protobuf:"bytes,3,opt,name=output_file,json=outputFile,proto3" json:"output_file,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ExecutorInput_Outputs) Reset() { *x = ExecutorInput_Outputs{} - if protoimpl.UnsafeEnabled { - mi := &file_pipeline_spec_proto_msgTypes[97] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_pipeline_spec_proto_msgTypes[99] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ExecutorInput_Outputs) String() string { @@ -5555,8 +5575,8 @@ func (x *ExecutorInput_Outputs) String() string { func (*ExecutorInput_Outputs) ProtoMessage() {} func (x *ExecutorInput_Outputs) ProtoReflect() protoreflect.Message { - mi := &file_pipeline_spec_proto_msgTypes[97] - if protoimpl.UnsafeEnabled && x != nil { + mi := &file_pipeline_spec_proto_msgTypes[99] + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -5568,7 +5588,7 @@ func (x *ExecutorInput_Outputs) ProtoReflect() protoreflect.Message { // Deprecated: Use ExecutorInput_Outputs.ProtoReflect.Descriptor instead. func (*ExecutorInput_Outputs) Descriptor() ([]byte, []int) { - return file_pipeline_spec_proto_rawDescGZIP(), []int{23, 2} + return file_pipeline_spec_proto_rawDescGZIP(), []int{25, 2} } func (x *ExecutorInput_Outputs) GetParameters() map[string]*ExecutorInput_OutputParameter { @@ -5594,1412 +5614,792 @@ func (x *ExecutorInput_Outputs) GetOutputFile() string { var File_pipeline_spec_proto protoreflect.FileDescriptor -var file_pipeline_spec_proto_rawDesc = []byte{ - 0x0a, 0x13, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x1a, 0x1e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x73, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xb0, 0x06, 0x0a, 0x0b, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x21, - 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, - 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x70, - 0x65, 0x63, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x52, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, - 0x3d, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x25, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x4a, 0x6f, 0x62, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, - 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x4e, - 0x0a, 0x0e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x4a, 0x6f, - 0x62, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, - 0x0d, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x39, - 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, - 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0xb7, 0x03, 0x0a, 0x0d, 0x52, 0x75, - 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x5b, 0x0a, 0x0a, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x37, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x4a, 0x6f, 0x62, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, - 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x30, 0x0a, 0x14, 0x67, 0x63, 0x73, 0x5f, - 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x67, 0x63, 0x73, 0x4f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x67, 0x0a, 0x10, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x4a, 0x6f, 0x62, 0x2e, - 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x1a, 0x52, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x5a, 0x0a, 0x14, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x4a, 0x04, 0x08, 0x04, 0x10, 0x05, 0x4a, - 0x04, 0x08, 0x05, 0x10, 0x06, 0x4a, 0x04, 0x08, 0x06, 0x10, 0x07, 0x4a, 0x04, 0x08, 0x08, 0x10, - 0x09, 0x4a, 0x04, 0x08, 0x09, 0x10, 0x0a, 0x4a, 0x04, 0x08, 0x0a, 0x10, 0x0b, 0x22, 0xf8, 0x04, - 0x0a, 0x0c, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x3f, - 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x6e, 0x66, - 0x6f, 0x52, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, - 0x40, 0x0a, 0x0f, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x70, - 0x65, 0x63, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x52, 0x0e, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x70, 0x65, - 0x63, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x64, 0x6b, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x64, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x76, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x4a, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, - 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x2f, 0x0a, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x09, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x53, 0x70, 0x65, 0x63, - 0x52, 0x04, 0x72, 0x6f, 0x6f, 0x74, 0x12, 0x32, 0x0a, 0x15, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, - 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x1a, 0x8f, 0x01, 0x0a, 0x10, 0x52, - 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, - 0x41, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2d, 0x2e, - 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x72, 0x69, - 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, - 0x74, 0x69, 0x76, 0x65, 0x54, 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x12, 0x38, 0x0a, 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, - 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x5a, 0x0a, 0x0f, - 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x31, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xee, 0x02, 0x0a, 0x0d, 0x43, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x53, 0x70, 0x65, 0x63, 0x12, 0x4e, 0x0a, 0x11, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x5f, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x52, 0x10, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x44, - 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x51, 0x0a, 0x12, 0x6f, 0x75, - 0x74, 0x70, 0x75, 0x74, 0x5f, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x4f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x52, 0x11, 0x6f, 0x75, 0x74, 0x70, - 0x75, 0x74, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x29, 0x0a, - 0x03, 0x64, 0x61, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x67, 0x53, 0x70, 0x65, - 0x63, 0x48, 0x00, 0x52, 0x03, 0x64, 0x61, 0x67, 0x12, 0x27, 0x0a, 0x0e, 0x65, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x6f, 0x72, 0x5f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x48, 0x00, 0x52, 0x0d, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x4c, 0x61, 0x62, 0x65, - 0x6c, 0x12, 0x54, 0x0a, 0x15, 0x73, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x5f, 0x70, 0x6c, 0x61, 0x74, - 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x20, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x53, 0x70, - 0x65, 0x63, 0x52, 0x13, 0x73, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, - 0x72, 0x6d, 0x53, 0x70, 0x65, 0x63, 0x73, 0x42, 0x10, 0x0a, 0x0e, 0x69, 0x6d, 0x70, 0x6c, 0x65, - 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xd3, 0x01, 0x0a, 0x07, 0x44, 0x61, - 0x67, 0x53, 0x70, 0x65, 0x63, 0x12, 0x36, 0x0a, 0x05, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x67, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x54, 0x61, 0x73, 0x6b, - 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x05, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x36, 0x0a, - 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, - 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x44, 0x61, - 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x52, 0x07, 0x6f, 0x75, - 0x74, 0x70, 0x75, 0x74, 0x73, 0x1a, 0x58, 0x0a, 0x0a, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, - 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, - 0xf6, 0x0a, 0x0a, 0x0e, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, - 0x65, 0x63, 0x12, 0x49, 0x0a, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, - 0x70, 0x65, 0x63, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x52, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x12, 0x4c, 0x0a, - 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x2c, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2e, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x1a, 0x71, 0x0a, 0x14, 0x41, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x53, - 0x70, 0x65, 0x63, 0x12, 0x29, 0x0a, 0x10, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, 0x5f, - 0x73, 0x75, 0x62, 0x74, 0x61, 0x73, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x70, - 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, 0x53, 0x75, 0x62, 0x74, 0x61, 0x73, 0x6b, 0x12, 0x2e, - 0x0a, 0x13, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, - 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x6f, 0x75, 0x74, - 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4b, 0x65, 0x79, 0x1a, 0x79, - 0x0a, 0x15, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, 0x12, 0x60, 0x0a, 0x12, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, - 0x63, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x11, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x1a, 0x70, 0x0a, 0x0e, 0x41, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, - 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x48, 0x0a, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x67, 0x4f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x44, 0x61, 0x67, 0x4f, 0x75, - 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x74, 0x0a, 0x15, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x53, 0x70, 0x65, 0x63, 0x12, 0x29, 0x0a, 0x10, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, - 0x5f, 0x73, 0x75, 0x62, 0x74, 0x61, 0x73, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, - 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, 0x53, 0x75, 0x62, 0x74, 0x61, 0x73, 0x6b, 0x12, - 0x30, 0x0a, 0x14, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x6f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x4b, 0x65, - 0x79, 0x1a, 0x7d, 0x0a, 0x16, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x63, 0x0a, 0x13, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x12, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, - 0x1a, 0x8f, 0x02, 0x0a, 0x19, 0x4d, 0x61, 0x70, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x79, - 0x0a, 0x11, 0x6d, 0x61, 0x70, 0x70, 0x65, 0x64, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x4c, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, - 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x4d, 0x61, 0x70, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x53, 0x70, 0x65, - 0x63, 0x2e, 0x4d, 0x61, 0x70, 0x70, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x10, 0x6d, 0x61, 0x70, 0x70, 0x65, 0x64, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x1a, 0x77, 0x0a, 0x15, 0x4d, 0x61, 0x70, - 0x70, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x48, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, - 0x63, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x65, 0x6c, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, - 0x38, 0x01, 0x1a, 0xef, 0x01, 0x0a, 0x16, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x66, 0x0a, - 0x14, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x67, 0x4f, 0x75, - 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x53, 0x70, 0x65, 0x63, 0x48, - 0x00, 0x52, 0x12, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x5f, 0x0a, 0x10, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x66, - 0x72, 0x6f, 0x6d, 0x5f, 0x6f, 0x6e, 0x65, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x33, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x44, - 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, - 0x53, 0x70, 0x65, 0x63, 0x48, 0x00, 0x52, 0x0e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x46, 0x72, 0x6f, - 0x6d, 0x4f, 0x6e, 0x65, 0x6f, 0x66, 0x42, 0x06, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x4a, 0x04, - 0x08, 0x03, 0x10, 0x04, 0x1a, 0x72, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x49, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x44, 0x61, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x8a, 0x07, 0x0a, 0x13, 0x43, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, - 0x12, 0x4e, 0x0a, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, - 0x12, 0x51, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x70, - 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x73, 0x1a, 0xc2, 0x01, 0x0a, 0x0c, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x53, 0x70, 0x65, 0x63, 0x12, 0x45, 0x0a, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x0c, 0x61, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x28, 0x0a, 0x10, 0x69, - 0x73, 0x5f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x69, 0x73, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, - 0x74, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x73, 0x5f, 0x6f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x69, 0x73, 0x4f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xac, 0x02, 0x0a, 0x0d, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x45, 0x0a, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2d, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, - 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x54, - 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, 0x6d, 0x42, 0x02, 0x18, 0x01, 0x52, 0x04, 0x74, 0x79, 0x70, - 0x65, 0x12, 0x54, 0x0a, 0x0e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x74, - 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2d, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x54, 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x0d, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x12, 0x3b, 0x0a, 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x73, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x61, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x69, 0x73, 0x4f, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x6c, 0x0a, 0x0e, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x44, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x41, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x6e, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x45, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xf4, 0x09, 0x0a, 0x14, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x4f, - 0x0a, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x31, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, - 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x12, - 0x52, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x4f, 0x75, 0x74, 0x70, - 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x73, 0x1a, 0x85, 0x05, 0x0a, 0x0c, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x53, 0x70, 0x65, 0x63, 0x12, 0x45, 0x0a, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x0c, 0x61, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x63, 0x0a, 0x0a, 0x70, - 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x3f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, - 0x70, 0x65, 0x63, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, - 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, - 0x12, 0x76, 0x0a, 0x11, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, - 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x45, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, - 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x43, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x10, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x50, 0x72, - 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x12, 0x33, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, - 0x75, 0x63, 0x74, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x28, 0x0a, - 0x10, 0x69, 0x73, 0x5f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x6c, 0x69, 0x73, - 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x69, 0x73, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x64, 0x0a, 0x0f, 0x50, 0x72, 0x6f, - 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3b, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, - 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x4f, 0x72, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, - 0x6a, 0x0a, 0x15, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, - 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3b, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x72, - 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0xce, 0x01, 0x0a, 0x0d, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x45, 0x0a, - 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2d, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, - 0x74, 0x69, 0x76, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, - 0x76, 0x65, 0x54, 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, 0x6d, 0x42, 0x02, 0x18, 0x01, 0x52, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x12, 0x54, 0x0a, 0x0e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2d, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x0d, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x6d, 0x0a, 0x0e, - 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x45, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, - 0x70, 0x65, 0x63, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x6f, 0x0a, 0x0f, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x46, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x30, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, - 0x70, 0x65, 0x63, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, - 0x63, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xd8, 0x0a, 0x0a, - 0x0e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, - 0x4c, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, - 0x63, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x49, 0x0a, - 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x2b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x41, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x61, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x1a, 0xc7, 0x02, 0x0a, 0x11, 0x49, 0x6e, 0x70, - 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, 0x12, 0x79, - 0x0a, 0x14, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x61, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x45, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, - 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, - 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x54, 0x61, 0x73, - 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, - 0x70, 0x65, 0x63, 0x48, 0x00, 0x52, 0x12, 0x74, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x12, 0x3a, 0x0a, 0x18, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x61, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x16, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x1a, 0x6d, 0x0a, 0x16, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, - 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, 0x12, - 0x23, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, 0x5f, 0x74, 0x61, 0x73, 0x6b, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, - 0x54, 0x61, 0x73, 0x6b, 0x12, 0x2e, 0x0a, 0x13, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x61, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x11, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, - 0x74, 0x4b, 0x65, 0x79, 0x42, 0x06, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x4a, 0x04, 0x08, 0x05, - 0x10, 0x06, 0x1a, 0x84, 0x05, 0x0a, 0x12, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x7d, 0x0a, 0x15, 0x74, 0x61, 0x73, - 0x6b, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x47, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, - 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, - 0x63, 0x48, 0x00, 0x52, 0x13, 0x74, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x4c, 0x0a, 0x0d, 0x72, 0x75, 0x6e, 0x74, - 0x69, 0x6d, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x25, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x72, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x0c, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, - 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x3c, 0x0a, 0x19, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x17, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x12, 0x6d, 0x0a, 0x11, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x66, 0x69, 0x6e, - 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x3f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, - 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, - 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x48, 0x00, 0x52, 0x0f, 0x74, 0x61, 0x73, 0x6b, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x53, 0x74, 0x61, - 0x74, 0x75, 0x73, 0x12, 0x42, 0x0a, 0x1d, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x6c, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1b, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x53, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x1a, 0x70, 0x0a, 0x17, 0x54, 0x61, 0x73, 0x6b, 0x4f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, - 0x65, 0x63, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, 0x5f, 0x74, - 0x61, 0x73, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x64, 0x75, - 0x63, 0x65, 0x72, 0x54, 0x61, 0x73, 0x6b, 0x12, 0x30, 0x0a, 0x14, 0x6f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x6b, 0x65, 0x79, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x4b, 0x65, 0x79, 0x1a, 0x36, 0x0a, 0x0f, 0x54, 0x61, 0x73, - 0x6b, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x23, 0x0a, 0x0d, - 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, 0x54, 0x61, 0x73, - 0x6b, 0x42, 0x06, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x1a, 0x6e, 0x0a, 0x0f, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x45, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, - 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, - 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x6c, 0x0a, 0x0e, 0x41, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, - 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x44, 0x0a, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, - 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, - 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xef, 0x07, 0x0a, 0x0f, 0x54, 0x61, 0x73, 0x6b, - 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x4d, 0x0a, 0x0a, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x2d, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, - 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x4a, 0x0a, 0x09, 0x61, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, - 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, - 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x41, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x61, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x1a, 0x84, 0x04, 0x0a, 0x12, 0x4f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, 0x12, 0x45, 0x0a, - 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x0c, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x54, 0x79, 0x70, 0x65, 0x12, 0x60, 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, - 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x40, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, - 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x65, - 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x72, 0x6f, 0x70, - 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x12, 0x73, 0x0a, 0x11, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, - 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x46, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, - 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, - 0x70, 0x65, 0x63, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, - 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x10, 0x63, 0x75, 0x73, 0x74, 0x6f, - 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x64, 0x0a, 0x0f, 0x50, - 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x3b, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x25, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x72, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x1a, 0x6a, 0x0a, 0x15, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x65, - 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3b, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x4f, 0x72, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x58, 0x0a, - 0x13, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x53, 0x70, 0x65, 0x63, 0x12, 0x41, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0e, 0x32, 0x2d, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, - 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x54, 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, - 0x6d, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x1a, 0x70, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x47, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x6e, 0x0a, 0x0e, 0x41, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, - 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x46, 0x0a, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, - 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x4f, 0x75, 0x74, 0x70, - 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x6d, 0x0a, 0x0d, 0x50, 0x72, 0x69, - 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x54, 0x79, 0x70, 0x65, 0x22, 0x58, 0x0a, 0x11, 0x50, 0x72, - 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x54, 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, 0x6d, 0x12, - 0x1e, 0x0a, 0x1a, 0x50, 0x52, 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x54, 0x59, 0x50, - 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, - 0x07, 0x0a, 0x03, 0x49, 0x4e, 0x54, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x4f, 0x55, 0x42, - 0x4c, 0x45, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x03, - 0x1a, 0x02, 0x18, 0x01, 0x3a, 0x02, 0x18, 0x01, 0x22, 0xb7, 0x01, 0x0a, 0x0d, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x22, 0xa5, 0x01, 0x0a, 0x11, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x54, 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, 0x6d, - 0x12, 0x23, 0x0a, 0x1f, 0x50, 0x41, 0x52, 0x41, 0x4d, 0x45, 0x54, 0x45, 0x52, 0x5f, 0x54, 0x59, - 0x50, 0x45, 0x5f, 0x45, 0x4e, 0x55, 0x4d, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, - 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x4e, 0x55, 0x4d, 0x42, 0x45, 0x52, 0x5f, - 0x44, 0x4f, 0x55, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x12, 0x12, 0x0a, 0x0e, 0x4e, 0x55, 0x4d, 0x42, - 0x45, 0x52, 0x5f, 0x49, 0x4e, 0x54, 0x45, 0x47, 0x45, 0x52, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, - 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x03, 0x12, 0x0b, 0x0a, 0x07, 0x42, 0x4f, 0x4f, 0x4c, - 0x45, 0x41, 0x4e, 0x10, 0x04, 0x12, 0x08, 0x0a, 0x04, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x05, 0x12, - 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x52, 0x55, 0x43, 0x54, 0x10, 0x06, 0x12, 0x15, 0x0a, 0x11, 0x54, - 0x41, 0x53, 0x4b, 0x5f, 0x46, 0x49, 0x4e, 0x41, 0x4c, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, - 0x10, 0x07, 0x22, 0xfe, 0x0a, 0x0a, 0x10, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, - 0x61, 0x73, 0x6b, 0x53, 0x70, 0x65, 0x63, 0x12, 0x3b, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, - 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, - 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x34, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, - 0x65, 0x63, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x27, 0x0a, 0x0f, 0x64, 0x65, - 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x05, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x0e, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x74, 0x54, 0x61, - 0x73, 0x6b, 0x73, 0x12, 0x56, 0x0a, 0x0f, 0x63, 0x61, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x5f, 0x6f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x43, 0x61, 0x63, - 0x68, 0x69, 0x6e, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x0e, 0x63, 0x61, 0x63, - 0x68, 0x69, 0x6e, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3f, 0x0a, 0x0d, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x07, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x66, 0x52, 0x0c, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x66, 0x12, 0x53, 0x0a, 0x0e, - 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x18, 0x08, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, - 0x53, 0x70, 0x65, 0x63, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x50, 0x6f, 0x6c, 0x69, - 0x63, 0x79, 0x52, 0x0d, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x50, 0x6f, 0x6c, 0x69, 0x63, - 0x79, 0x12, 0x51, 0x0a, 0x11, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x69, 0x74, - 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, - 0x66, 0x61, 0x63, 0x74, 0x49, 0x74, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x53, 0x70, 0x65, 0x63, - 0x48, 0x00, 0x52, 0x10, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x49, 0x74, 0x65, 0x72, - 0x61, 0x74, 0x6f, 0x72, 0x12, 0x54, 0x0a, 0x12, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x5f, 0x69, 0x74, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x23, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x49, 0x74, 0x65, 0x72, 0x61, 0x74, 0x6f, - 0x72, 0x53, 0x70, 0x65, 0x63, 0x48, 0x00, 0x52, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x49, 0x74, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x4d, 0x0a, 0x0c, 0x72, 0x65, - 0x74, 0x72, 0x79, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x2a, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x53, 0x70, 0x65, 0x63, - 0x2e, 0x52, 0x65, 0x74, 0x72, 0x79, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x52, 0x0b, 0x72, 0x65, - 0x74, 0x72, 0x79, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x56, 0x0a, 0x0f, 0x69, 0x74, 0x65, - 0x72, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x18, 0x0c, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x53, 0x70, - 0x65, 0x63, 0x2e, 0x49, 0x74, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x50, 0x6f, 0x6c, 0x69, 0x63, - 0x79, 0x52, 0x0e, 0x69, 0x74, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x50, 0x6f, 0x6c, 0x69, 0x63, - 0x79, 0x1a, 0x50, 0x0a, 0x0e, 0x43, 0x61, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x4f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x61, - 0x63, 0x68, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x65, 0x6e, 0x61, 0x62, 0x6c, - 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, - 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x61, 0x63, 0x68, 0x65, - 0x4b, 0x65, 0x79, 0x1a, 0x80, 0x02, 0x0a, 0x0d, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x50, - 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x58, 0x0a, 0x08, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3c, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, - 0x6b, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x50, 0x6f, 0x6c, - 0x69, 0x63, 0x79, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x53, 0x74, 0x72, 0x61, 0x74, - 0x65, 0x67, 0x79, 0x52, 0x08, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x22, 0x77, 0x0a, - 0x0f, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, - 0x12, 0x20, 0x0a, 0x1c, 0x54, 0x52, 0x49, 0x47, 0x47, 0x45, 0x52, 0x5f, 0x53, 0x54, 0x52, 0x41, - 0x54, 0x45, 0x47, 0x59, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, - 0x10, 0x00, 0x12, 0x20, 0x0a, 0x1c, 0x41, 0x4c, 0x4c, 0x5f, 0x55, 0x50, 0x53, 0x54, 0x52, 0x45, - 0x41, 0x4d, 0x5f, 0x54, 0x41, 0x53, 0x4b, 0x53, 0x5f, 0x53, 0x55, 0x43, 0x43, 0x45, 0x45, 0x44, - 0x45, 0x44, 0x10, 0x01, 0x12, 0x20, 0x0a, 0x1c, 0x41, 0x4c, 0x4c, 0x5f, 0x55, 0x50, 0x53, 0x54, - 0x52, 0x45, 0x41, 0x4d, 0x5f, 0x54, 0x41, 0x53, 0x4b, 0x53, 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x4c, - 0x45, 0x54, 0x45, 0x44, 0x10, 0x02, 0x1a, 0xef, 0x01, 0x0a, 0x0b, 0x52, 0x65, 0x74, 0x72, 0x79, - 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x26, 0x0a, 0x0f, 0x6d, 0x61, 0x78, 0x5f, 0x72, 0x65, - 0x74, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x0d, 0x6d, 0x61, 0x78, 0x52, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x44, - 0x0a, 0x10, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x0f, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x44, 0x75, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, - 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0d, 0x62, 0x61, - 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x46, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x4b, 0x0a, 0x14, 0x62, - 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x64, 0x75, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x12, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x4d, 0x61, 0x78, - 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x3d, 0x0a, 0x0e, 0x49, 0x74, 0x65, 0x72, - 0x61, 0x74, 0x6f, 0x72, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x2b, 0x0a, 0x11, 0x70, 0x61, - 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, - 0x73, 0x6d, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x42, 0x0a, 0x0a, 0x08, 0x69, 0x74, 0x65, 0x72, 0x61, - 0x74, 0x6f, 0x72, 0x22, 0xad, 0x01, 0x0a, 0x14, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x49, 0x74, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x42, 0x0a, 0x05, - 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x49, 0x74, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, - 0x49, 0x74, 0x65, 0x6d, 0x73, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, - 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x1a, - 0x32, 0x0a, 0x09, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x25, 0x0a, 0x0e, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x22, 0xcf, 0x01, 0x0a, 0x15, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x49, 0x74, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x43, 0x0a, - 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x49, 0x74, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x53, 0x70, 0x65, - 0x63, 0x2e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x69, 0x74, 0x65, - 0x6d, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x1a, 0x52, 0x0a, 0x09, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x12, - 0x0a, 0x03, 0x72, 0x61, 0x77, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x03, 0x72, - 0x61, 0x77, 0x12, 0x29, 0x0a, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0e, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x42, 0x06, 0x0a, - 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x22, 0x22, 0x0a, 0x0c, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x52, 0x65, 0x66, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x67, 0x0a, 0x0c, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x21, 0x0a, - 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, - 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0xb8, 0x01, 0x0a, 0x12, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x54, - 0x79, 0x70, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x5f, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, - 0x00, 0x52, 0x0b, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x23, - 0x0a, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x48, 0x00, 0x52, 0x09, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x55, 0x72, 0x69, 0x12, 0x29, 0x0a, 0x0f, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0e, - 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x25, - 0x0a, 0x0e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x56, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x06, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x22, 0x26, 0x0a, - 0x10, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x66, - 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xc9, 0x01, 0x0a, 0x17, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x4f, - 0x72, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x12, 0x40, 0x0a, 0x0e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x02, - 0x18, 0x01, 0x48, 0x00, 0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x12, 0x2d, 0x0a, 0x11, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, - 0x52, 0x10, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x12, 0x34, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x48, 0x00, 0x52, 0x08, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x22, 0xcf, 0x17, 0x0a, 0x18, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, - 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x53, - 0x0a, 0x09, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x35, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, - 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x6f, 0x72, 0x73, 0x1a, 0xfc, 0x09, 0x0a, 0x15, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x14, 0x0a, - 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6d, - 0x61, 0x67, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x12, 0x0a, - 0x04, 0x61, 0x72, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x61, 0x72, 0x67, - 0x73, 0x12, 0x64, 0x0a, 0x09, 0x6c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x46, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, - 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, - 0x65, 0x63, 0x2e, 0x4c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x52, 0x09, 0x6c, 0x69, - 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x12, 0x67, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x49, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x53, 0x70, 0x65, 0x63, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, - 0x12, 0x55, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x43, 0x2e, - 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, - 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x45, 0x6e, 0x76, 0x56, - 0x61, 0x72, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x1a, 0xb6, 0x01, 0x0a, 0x09, 0x4c, 0x69, 0x66, 0x65, - 0x63, 0x79, 0x63, 0x6c, 0x65, 0x12, 0x73, 0x0a, 0x0f, 0x70, 0x72, 0x65, 0x5f, 0x63, 0x61, 0x63, - 0x68, 0x65, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x4b, - 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, - 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x4c, 0x69, 0x66, - 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x52, 0x0d, 0x70, 0x72, 0x65, - 0x43, 0x61, 0x63, 0x68, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x1a, 0x34, 0x0a, 0x04, 0x45, 0x78, - 0x65, 0x63, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x12, 0x0a, 0x04, - 0x61, 0x72, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x61, 0x72, 0x67, 0x73, - 0x1a, 0x8b, 0x05, 0x0a, 0x0c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x70, 0x65, - 0x63, 0x12, 0x1f, 0x0a, 0x09, 0x63, 0x70, 0x75, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x01, 0x42, 0x02, 0x18, 0x01, 0x52, 0x08, 0x63, 0x70, 0x75, 0x4c, 0x69, 0x6d, - 0x69, 0x74, 0x12, 0x25, 0x0a, 0x0c, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x5f, 0x6c, 0x69, 0x6d, - 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0b, 0x6d, 0x65, - 0x6d, 0x6f, 0x72, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x23, 0x0a, 0x0b, 0x63, 0x70, 0x75, - 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x01, 0x42, 0x02, - 0x18, 0x01, 0x52, 0x0a, 0x63, 0x70, 0x75, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x29, - 0x0a, 0x0e, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x01, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0d, 0x6d, 0x65, 0x6d, 0x6f, - 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2c, 0x0a, 0x12, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x70, 0x75, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, - 0x70, 0x75, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x32, 0x0a, 0x15, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, - 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x30, 0x0a, 0x14, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x70, 0x75, 0x5f, 0x72, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x43, 0x70, 0x75, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x36, 0x0a, - 0x17, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, - 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x15, - 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x7d, 0x0a, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, - 0x61, 0x74, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x5b, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x41, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x6f, - 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, - 0x61, 0x74, 0x6f, 0x72, 0x1a, 0x91, 0x01, 0x0a, 0x11, 0x41, 0x63, 0x63, 0x65, 0x6c, 0x65, 0x72, - 0x61, 0x74, 0x6f, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x16, 0x0a, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x12, 0x18, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x03, 0x42, 0x02, 0x18, 0x01, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x23, 0x0a, 0x0d, - 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x6f, - 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x4a, 0x04, 0x08, 0x04, 0x10, 0x05, 0x1a, 0x32, - 0x0a, 0x06, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x1a, 0xa3, 0x05, 0x0a, 0x0c, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x53, - 0x70, 0x65, 0x63, 0x12, 0x48, 0x0a, 0x0c, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, - 0x75, 0x72, 0x69, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x72, - 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x55, 0x72, 0x69, 0x12, 0x41, 0x0a, - 0x0b, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x53, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x52, 0x0a, 0x74, 0x79, 0x70, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x12, 0x67, 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x43, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, - 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x49, 0x6d, 0x70, - 0x6f, 0x72, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, - 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, 0x70, - 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x12, 0x7a, 0x0a, 0x11, 0x63, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x04, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x49, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, - 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x49, 0x6d, 0x70, - 0x6f, 0x72, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, - 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, - 0x02, 0x18, 0x01, 0x52, 0x10, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x65, - 0x72, 0x74, 0x69, 0x65, 0x73, 0x12, 0x33, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, - 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, - 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x72, 0x65, - 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x64, 0x0a, 0x0f, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, - 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3b, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x4f, - 0x72, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x6a, 0x0a, 0x15, - 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3b, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x72, 0x52, 0x75, 0x6e, - 0x74, 0x69, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0xec, 0x02, 0x0a, 0x0c, 0x52, 0x65, 0x73, - 0x6f, 0x6c, 0x76, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x86, 0x01, 0x0a, 0x17, 0x6f, 0x75, - 0x74, 0x70, 0x75, 0x74, 0x5f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x71, 0x75, - 0x65, 0x72, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x4e, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, - 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x51, - 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x15, 0x6f, 0x75, 0x74, - 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x51, 0x75, 0x65, 0x72, 0x69, - 0x65, 0x73, 0x1a, 0x41, 0x0a, 0x11, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x51, 0x75, - 0x65, 0x72, 0x79, 0x53, 0x70, 0x65, 0x63, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, - 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, - 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, - 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x1a, 0x8f, 0x01, 0x0a, 0x1a, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x51, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x45, - 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x5b, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x45, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, - 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, - 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x51, 0x75, 0x65, 0x72, 0x79, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x55, 0x0a, 0x17, 0x41, 0x49, 0x50, 0x6c, 0x61, - 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x4a, 0x6f, 0x62, 0x53, 0x70, - 0x65, 0x63, 0x12, 0x36, 0x0a, 0x0a, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x6a, 0x6f, 0x62, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, - 0x09, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x4a, 0x6f, 0x62, 0x3a, 0x02, 0x18, 0x01, 0x1a, 0xff, - 0x02, 0x0a, 0x0c, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, - 0x5c, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, - 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, - 0x48, 0x00, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x51, 0x0a, - 0x08, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x33, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, - 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, - 0x53, 0x70, 0x65, 0x63, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, - 0x12, 0x51, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, - 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x6c, - 0x76, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x48, 0x00, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x6c, - 0x76, 0x65, 0x72, 0x12, 0x63, 0x0a, 0x0a, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x6a, 0x6f, - 0x62, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, - 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, - 0x41, 0x49, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, - 0x4a, 0x6f, 0x62, 0x53, 0x70, 0x65, 0x63, 0x42, 0x02, 0x18, 0x01, 0x48, 0x00, 0x52, 0x09, 0x63, - 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x4a, 0x6f, 0x62, 0x42, 0x06, 0x0a, 0x04, 0x73, 0x70, 0x65, 0x63, - 0x1a, 0x71, 0x0a, 0x0e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x49, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x70, 0x6c, 0x6f, - 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x6f, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x22, 0x79, 0x0a, 0x05, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x09, - 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x48, - 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x23, 0x0a, 0x0c, 0x64, - 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x01, 0x48, 0x00, 0x52, 0x0b, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x89, - 0x04, 0x0a, 0x0f, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, - 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x10, 0x0a, 0x03, - 0x75, 0x72, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x69, 0x12, 0x51, - 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, - 0x74, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, - 0x73, 0x12, 0x64, 0x0a, 0x11, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x70, 0x72, 0x6f, 0x70, - 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x52, 0x75, 0x6e, 0x74, - 0x69, 0x6d, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x2e, 0x43, 0x75, 0x73, 0x74, - 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x10, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x50, 0x72, 0x6f, - 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x12, 0x33, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, - 0x63, 0x74, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x52, 0x0a, 0x0f, - 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x13, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x1a, 0x58, 0x0a, 0x15, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, - 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x4b, 0x0a, 0x0c, 0x41, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x3b, 0x0a, 0x09, 0x61, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, - 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x52, 0x75, 0x6e, - 0x74, 0x69, 0x6d, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x09, 0x61, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x22, 0xfa, 0x08, 0x0a, 0x0d, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x3a, 0x0a, 0x06, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, - 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x52, 0x06, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x52, 0x07, 0x6f, 0x75, 0x74, - 0x70, 0x75, 0x74, 0x73, 0x1a, 0x9f, 0x04, 0x0a, 0x06, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, - 0x56, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, - 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x4f, 0x0a, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x6f, 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x2e, 0x41, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x61, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x12, 0x62, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, - 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0x52, 0x0a, 0x0f, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x13, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x1a, 0x58, 0x0a, 0x0e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x30, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x5a, 0x0a, 0x14, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x32, 0x0a, 0x0f, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x6f, 0x75, 0x74, - 0x70, 0x75, 0x74, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, - 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x1a, 0x97, 0x03, 0x0a, 0x07, 0x4f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x12, 0x53, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x6f, 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x2e, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x50, 0x0a, 0x09, 0x61, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, - 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x45, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, 0x4f, 0x75, 0x74, 0x70, - 0x75, 0x74, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x52, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x12, 0x1f, 0x0a, - 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x1a, 0x6a, - 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x41, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x2e, - 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x58, 0x0a, 0x0e, 0x41, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x30, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x02, 0x38, 0x01, 0x22, 0x95, 0x04, 0x0a, 0x0e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, - 0x72, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x50, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x6f, 0x72, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x49, 0x0a, 0x09, 0x61, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x6f, 0x72, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x73, 0x12, 0x5c, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, - 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x45, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x2e, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x1a, 0x52, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x58, 0x0a, 0x0e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, - 0x63, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x30, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, - 0x74, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x1a, 0x5a, 0x0a, 0x14, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xa4, 0x02, 0x0a, - 0x17, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x46, 0x69, 0x6e, - 0x61, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x28, - 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x2e, 0x0a, 0x11, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x75, 0x75, 0x69, 0x64, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x03, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x4a, 0x6f, 0x62, 0x55, 0x75, 0x69, 0x64, 0x12, 0x2e, 0x0a, 0x11, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x4a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x3b, 0x0a, 0x1a, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2c, 0x0a, 0x12, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x10, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x4e, - 0x61, 0x6d, 0x65, 0x22, 0x9f, 0x02, 0x0a, 0x11, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x45, 0x6e, 0x75, 0x6d, 0x22, 0x89, 0x02, 0x0a, 0x11, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, - 0x1a, 0x0a, 0x16, 0x54, 0x41, 0x53, 0x4b, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x45, 0x5f, 0x55, 0x4e, - 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x50, - 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x12, 0x0a, 0x0e, 0x52, 0x55, 0x4e, 0x4e, - 0x49, 0x4e, 0x47, 0x5f, 0x44, 0x52, 0x49, 0x56, 0x45, 0x52, 0x10, 0x02, 0x12, 0x14, 0x0a, 0x10, - 0x44, 0x52, 0x49, 0x56, 0x45, 0x52, 0x5f, 0x53, 0x55, 0x43, 0x43, 0x45, 0x45, 0x44, 0x45, 0x44, - 0x10, 0x03, 0x12, 0x14, 0x0a, 0x10, 0x52, 0x55, 0x4e, 0x4e, 0x49, 0x4e, 0x47, 0x5f, 0x45, 0x58, - 0x45, 0x43, 0x55, 0x54, 0x4f, 0x52, 0x10, 0x04, 0x12, 0x0d, 0x0a, 0x09, 0x53, 0x55, 0x43, 0x43, - 0x45, 0x45, 0x44, 0x45, 0x44, 0x10, 0x05, 0x12, 0x12, 0x0a, 0x0e, 0x43, 0x41, 0x4e, 0x43, 0x45, - 0x4c, 0x5f, 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x06, 0x12, 0x0e, 0x0a, 0x0a, 0x43, - 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x4c, 0x49, 0x4e, 0x47, 0x10, 0x07, 0x12, 0x0d, 0x0a, 0x09, 0x43, - 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x4c, 0x45, 0x44, 0x10, 0x08, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, - 0x49, 0x4c, 0x45, 0x44, 0x10, 0x09, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x4b, 0x49, 0x50, 0x50, 0x45, - 0x44, 0x10, 0x0a, 0x12, 0x0a, 0x0a, 0x06, 0x51, 0x55, 0x45, 0x55, 0x45, 0x44, 0x10, 0x0b, 0x12, - 0x11, 0x0a, 0x0d, 0x4e, 0x4f, 0x54, 0x5f, 0x54, 0x52, 0x49, 0x47, 0x47, 0x45, 0x52, 0x45, 0x44, - 0x10, 0x0c, 0x12, 0x11, 0x0a, 0x0d, 0x55, 0x4e, 0x53, 0x43, 0x48, 0x45, 0x44, 0x55, 0x4c, 0x41, - 0x42, 0x4c, 0x45, 0x10, 0x0d, 0x22, 0xb7, 0x01, 0x0a, 0x0c, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, - 0x72, 0x6d, 0x53, 0x70, 0x65, 0x63, 0x12, 0x47, 0x0a, 0x09, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, - 0x72, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, - 0x6d, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x73, 0x45, - 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x73, 0x1a, - 0x5e, 0x0a, 0x0e, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x36, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, - 0x53, 0x70, 0x65, 0x63, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, - 0xf8, 0x01, 0x0a, 0x12, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, - 0x72, 0x6d, 0x53, 0x70, 0x65, 0x63, 0x12, 0x4f, 0x0a, 0x0f, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, - 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x26, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, - 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, - 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0e, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, - 0x65, 0x6e, 0x74, 0x53, 0x70, 0x65, 0x63, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, - 0x6f, 0x72, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, - 0x6f, 0x72, 0x6d, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x06, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x12, 0x44, 0x0a, 0x0e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0xc6, 0x01, 0x0a, 0x18, 0x50, - 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, - 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x53, 0x0a, 0x09, 0x65, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x50, 0x6c, 0x61, 0x74, 0x66, 0x6f, - 0x72, 0x6d, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x09, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x73, 0x1a, 0x55, 0x0a, 0x0e, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x2d, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x22, 0x82, 0x01, 0x0a, 0x0f, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12, 0x4c, 0x0a, 0x0a, 0x6b, - 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x27, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x4b, - 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0a, 0x6b, 0x75, 0x62, 0x65, - 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x6b, 0x75, - 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x22, 0x72, 0x0a, 0x19, 0x4b, 0x75, 0x62, 0x65, - 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x42, 0x0a, 0x0e, 0x70, 0x76, 0x63, 0x5f, 0x73, 0x70, 0x65, - 0x63, 0x5f, 0x70, 0x61, 0x74, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x48, 0x00, 0x52, 0x0c, 0x70, 0x76, 0x63, 0x53, 0x70, 0x65, - 0x63, 0x50, 0x61, 0x74, 0x63, 0x68, 0x88, 0x01, 0x01, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x70, 0x76, - 0x63, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x5f, 0x70, 0x61, 0x74, 0x63, 0x68, 0x22, 0xc7, 0x01, 0x0a, - 0x0e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, - 0x23, 0x0a, 0x0d, 0x73, 0x65, 0x6d, 0x61, 0x70, 0x68, 0x6f, 0x72, 0x65, 0x5f, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x73, 0x65, 0x6d, 0x61, 0x70, 0x68, 0x6f, 0x72, - 0x65, 0x4b, 0x65, 0x79, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x75, 0x74, 0x65, 0x78, 0x5f, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x75, 0x74, 0x65, 0x78, 0x4e, - 0x61, 0x6d, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, - 0x74, 0x74, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0b, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x54, 0x74, 0x6c, 0x12, 0x40, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x09, 0x77, 0x6f, 0x72, 0x6b, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x3c, 0x5a, 0x3a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x70, 0x65, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_pipeline_spec_proto_rawDesc = "" + + "\n" + + "\x13pipeline_spec.proto\x12\fml_pipelines\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x17google/rpc/status.proto\"\xb0\x06\n" + + "\vPipelineJob\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12!\n" + + "\fdisplay_name\x18\x02 \x01(\tR\vdisplayName\x12<\n" + + "\rpipeline_spec\x18\a \x01(\v2\x17.google.protobuf.StructR\fpipelineSpec\x12=\n" + + "\x06labels\x18\v \x03(\v2%.ml_pipelines.PipelineJob.LabelsEntryR\x06labels\x12N\n" + + "\x0eruntime_config\x18\f \x01(\v2'.ml_pipelines.PipelineJob.RuntimeConfigR\rruntimeConfig\x1a9\n" + + "\vLabelsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\x1a\xb7\x03\n" + + "\rRuntimeConfig\x12[\n" + + "\n" + + "parameters\x18\x01 \x03(\v27.ml_pipelines.PipelineJob.RuntimeConfig.ParametersEntryB\x02\x18\x01R\n" + + "parameters\x120\n" + + "\x14gcs_output_directory\x18\x02 \x01(\tR\x12gcsOutputDirectory\x12g\n" + + "\x10parameter_values\x18\x03 \x03(\v2<.ml_pipelines.PipelineJob.RuntimeConfig.ParameterValuesEntryR\x0fparameterValues\x1aR\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12)\n" + + "\x05value\x18\x02 \x01(\v2\x13.ml_pipelines.ValueR\x05value:\x028\x01\x1aZ\n" + + "\x14ParameterValuesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12,\n" + + "\x05value\x18\x02 \x01(\v2\x16.google.protobuf.ValueR\x05value:\x028\x01J\x04\b\x03\x10\x04J\x04\b\x04\x10\x05J\x04\b\x05\x10\x06J\x04\b\x06\x10\aJ\x04\b\b\x10\tJ\x04\b\t\x10\n" + + "J\x04\b\n" + + "\x10\v\"\xf8\x04\n" + + "\fPipelineSpec\x12?\n" + + "\rpipeline_info\x18\x01 \x01(\v2\x1a.ml_pipelines.PipelineInfoR\fpipelineInfo\x12@\n" + + "\x0fdeployment_spec\x18\a \x01(\v2\x17.google.protobuf.StructR\x0edeploymentSpec\x12\x1f\n" + + "\vsdk_version\x18\x04 \x01(\tR\n" + + "sdkVersion\x12%\n" + + "\x0eschema_version\x18\x05 \x01(\tR\rschemaVersion\x12J\n" + + "\n" + + "components\x18\b \x03(\v2*.ml_pipelines.PipelineSpec.ComponentsEntryR\n" + + "components\x12/\n" + + "\x04root\x18\t \x01(\v2\x1b.ml_pipelines.ComponentSpecR\x04root\x122\n" + + "\x15default_pipeline_root\x18\n" + + " \x01(\tR\x13defaultPipelineRoot\x1a\x8f\x01\n" + + "\x10RuntimeParameter\x12A\n" + + "\x04type\x18\x01 \x01(\x0e2-.ml_pipelines.PrimitiveType.PrimitiveTypeEnumR\x04type\x128\n" + + "\rdefault_value\x18\x02 \x01(\v2\x13.ml_pipelines.ValueR\fdefaultValue\x1aZ\n" + + "\x0fComponentsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x121\n" + + "\x05value\x18\x02 \x01(\v2\x1b.ml_pipelines.ComponentSpecR\x05value:\x028\x01\"\xcd\x03\n" + + "\rComponentSpec\x12N\n" + + "\x11input_definitions\x18\x01 \x01(\v2!.ml_pipelines.ComponentInputsSpecR\x10inputDefinitions\x12Q\n" + + "\x12output_definitions\x18\x02 \x01(\v2\".ml_pipelines.ComponentOutputsSpecR\x11outputDefinitions\x12)\n" + + "\x03dag\x18\x03 \x01(\v2\x15.ml_pipelines.DagSpecH\x00R\x03dag\x12'\n" + + "\x0eexecutor_label\x18\x04 \x01(\tH\x00R\rexecutorLabel\x12T\n" + + "\x15single_platform_specs\x18\x05 \x03(\v2 .ml_pipelines.SinglePlatformSpecR\x13singlePlatformSpecs\x12]\n" + + "\x18task_config_passthroughs\x18\x06 \x03(\v2#.ml_pipelines.TaskConfigPassthroughR\x16taskConfigPassthroughsB\x10\n" + + "\x0eimplementation\"\xd3\x01\n" + + "\aDagSpec\x126\n" + + "\x05tasks\x18\x01 \x03(\v2 .ml_pipelines.DagSpec.TasksEntryR\x05tasks\x126\n" + + "\aoutputs\x18\x02 \x01(\v2\x1c.ml_pipelines.DagOutputsSpecR\aoutputs\x1aX\n" + + "\n" + + "TasksEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x124\n" + + "\x05value\x18\x02 \x01(\v2\x1e.ml_pipelines.PipelineTaskSpecR\x05value:\x028\x01\"\xf6\n" + + "\n" + + "\x0eDagOutputsSpec\x12I\n" + + "\tartifacts\x18\x01 \x03(\v2+.ml_pipelines.DagOutputsSpec.ArtifactsEntryR\tartifacts\x12L\n" + + "\n" + + "parameters\x18\x02 \x03(\v2,.ml_pipelines.DagOutputsSpec.ParametersEntryR\n" + + "parameters\x1aq\n" + + "\x14ArtifactSelectorSpec\x12)\n" + + "\x10producer_subtask\x18\x01 \x01(\tR\x0fproducerSubtask\x12.\n" + + "\x13output_artifact_key\x18\x02 \x01(\tR\x11outputArtifactKey\x1ay\n" + + "\x15DagOutputArtifactSpec\x12`\n" + + "\x12artifact_selectors\x18\x01 \x03(\v21.ml_pipelines.DagOutputsSpec.ArtifactSelectorSpecR\x11artifactSelectors\x1ap\n" + + "\x0eArtifactsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12H\n" + + "\x05value\x18\x02 \x01(\v22.ml_pipelines.DagOutputsSpec.DagOutputArtifactSpecR\x05value:\x028\x01\x1at\n" + + "\x15ParameterSelectorSpec\x12)\n" + + "\x10producer_subtask\x18\x01 \x01(\tR\x0fproducerSubtask\x120\n" + + "\x14output_parameter_key\x18\x02 \x01(\tR\x12outputParameterKey\x1a}\n" + + "\x16ParameterSelectorsSpec\x12c\n" + + "\x13parameter_selectors\x18\x01 \x03(\v22.ml_pipelines.DagOutputsSpec.ParameterSelectorSpecR\x12parameterSelectors\x1a\x8f\x02\n" + + "\x19MapParameterSelectorsSpec\x12y\n" + + "\x11mapped_parameters\x18\x02 \x03(\v2L.ml_pipelines.DagOutputsSpec.MapParameterSelectorsSpec.MappedParametersEntryR\x10mappedParameters\x1aw\n" + + "\x15MappedParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12H\n" + + "\x05value\x18\x02 \x01(\v22.ml_pipelines.DagOutputsSpec.ParameterSelectorSpecR\x05value:\x028\x01\x1a\xef\x01\n" + + "\x16DagOutputParameterSpec\x12f\n" + + "\x14value_from_parameter\x18\x01 \x01(\v22.ml_pipelines.DagOutputsSpec.ParameterSelectorSpecH\x00R\x12valueFromParameter\x12_\n" + + "\x10value_from_oneof\x18\x02 \x01(\v23.ml_pipelines.DagOutputsSpec.ParameterSelectorsSpecH\x00R\x0evalueFromOneofB\x06\n" + + "\x04kindJ\x04\b\x03\x10\x04\x1ar\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12I\n" + + "\x05value\x18\x02 \x01(\v23.ml_pipelines.DagOutputsSpec.DagOutputParameterSpecR\x05value:\x028\x01\"\x8a\a\n" + + "\x13ComponentInputsSpec\x12N\n" + + "\tartifacts\x18\x01 \x03(\v20.ml_pipelines.ComponentInputsSpec.ArtifactsEntryR\tartifacts\x12Q\n" + + "\n" + + "parameters\x18\x02 \x03(\v21.ml_pipelines.ComponentInputsSpec.ParametersEntryR\n" + + "parameters\x1a\xc2\x01\n" + + "\fArtifactSpec\x12E\n" + + "\rartifact_type\x18\x01 \x01(\v2 .ml_pipelines.ArtifactTypeSchemaR\fartifactType\x12(\n" + + "\x10is_artifact_list\x18\x02 \x01(\bR\x0eisArtifactList\x12\x1f\n" + + "\vis_optional\x18\x03 \x01(\bR\n" + + "isOptional\x12 \n" + + "\vdescription\x18\x04 \x01(\tR\vdescription\x1a\xac\x02\n" + + "\rParameterSpec\x12E\n" + + "\x04type\x18\x01 \x01(\x0e2-.ml_pipelines.PrimitiveType.PrimitiveTypeEnumB\x02\x18\x01R\x04type\x12T\n" + + "\x0eparameter_type\x18\x02 \x01(\x0e2-.ml_pipelines.ParameterType.ParameterTypeEnumR\rparameterType\x12;\n" + + "\rdefault_value\x18\x03 \x01(\v2\x16.google.protobuf.ValueR\fdefaultValue\x12\x1f\n" + + "\vis_optional\x18\x04 \x01(\bR\n" + + "isOptional\x12 \n" + + "\vdescription\x18\x05 \x01(\tR\vdescription\x1al\n" + + "\x0eArtifactsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12D\n" + + "\x05value\x18\x02 \x01(\v2..ml_pipelines.ComponentInputsSpec.ArtifactSpecR\x05value:\x028\x01\x1an\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12E\n" + + "\x05value\x18\x02 \x01(\v2/.ml_pipelines.ComponentInputsSpec.ParameterSpecR\x05value:\x028\x01\"\xf4\t\n" + + "\x14ComponentOutputsSpec\x12O\n" + + "\tartifacts\x18\x01 \x03(\v21.ml_pipelines.ComponentOutputsSpec.ArtifactsEntryR\tartifacts\x12R\n" + + "\n" + + "parameters\x18\x02 \x03(\v22.ml_pipelines.ComponentOutputsSpec.ParametersEntryR\n" + + "parameters\x1a\x85\x05\n" + + "\fArtifactSpec\x12E\n" + + "\rartifact_type\x18\x01 \x01(\v2 .ml_pipelines.ArtifactTypeSchemaR\fartifactType\x12c\n" + + "\n" + + "properties\x18\x02 \x03(\v2?.ml_pipelines.ComponentOutputsSpec.ArtifactSpec.PropertiesEntryB\x02\x18\x01R\n" + + "properties\x12v\n" + + "\x11custom_properties\x18\x03 \x03(\v2E.ml_pipelines.ComponentOutputsSpec.ArtifactSpec.CustomPropertiesEntryB\x02\x18\x01R\x10customProperties\x123\n" + + "\bmetadata\x18\x04 \x01(\v2\x17.google.protobuf.StructR\bmetadata\x12(\n" + + "\x10is_artifact_list\x18\x05 \x01(\bR\x0eisArtifactList\x12 \n" + + "\vdescription\x18\x06 \x01(\tR\vdescription\x1ad\n" + + "\x0fPropertiesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12;\n" + + "\x05value\x18\x02 \x01(\v2%.ml_pipelines.ValueOrRuntimeParameterR\x05value:\x028\x01\x1aj\n" + + "\x15CustomPropertiesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12;\n" + + "\x05value\x18\x02 \x01(\v2%.ml_pipelines.ValueOrRuntimeParameterR\x05value:\x028\x01\x1a\xce\x01\n" + + "\rParameterSpec\x12E\n" + + "\x04type\x18\x01 \x01(\x0e2-.ml_pipelines.PrimitiveType.PrimitiveTypeEnumB\x02\x18\x01R\x04type\x12T\n" + + "\x0eparameter_type\x18\x02 \x01(\x0e2-.ml_pipelines.ParameterType.ParameterTypeEnumR\rparameterType\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\x1am\n" + + "\x0eArtifactsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12E\n" + + "\x05value\x18\x02 \x01(\v2/.ml_pipelines.ComponentOutputsSpec.ArtifactSpecR\x05value:\x028\x01\x1ao\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12F\n" + + "\x05value\x18\x02 \x01(\v20.ml_pipelines.ComponentOutputsSpec.ParameterSpecR\x05value:\x028\x01\"\xd8\n" + + "\n" + + "\x0eTaskInputsSpec\x12L\n" + + "\n" + + "parameters\x18\x01 \x03(\v2,.ml_pipelines.TaskInputsSpec.ParametersEntryR\n" + + "parameters\x12I\n" + + "\tartifacts\x18\x02 \x03(\v2+.ml_pipelines.TaskInputsSpec.ArtifactsEntryR\tartifacts\x1a\xc7\x02\n" + + "\x11InputArtifactSpec\x12y\n" + + "\x14task_output_artifact\x18\x03 \x01(\v2E.ml_pipelines.TaskInputsSpec.InputArtifactSpec.TaskOutputArtifactSpecH\x00R\x12taskOutputArtifact\x12:\n" + + "\x18component_input_artifact\x18\x04 \x01(\tH\x00R\x16componentInputArtifact\x1am\n" + + "\x16TaskOutputArtifactSpec\x12#\n" + + "\rproducer_task\x18\x01 \x01(\tR\fproducerTask\x12.\n" + + "\x13output_artifact_key\x18\x02 \x01(\tR\x11outputArtifactKeyB\x06\n" + + "\x04kindJ\x04\b\x05\x10\x06\x1a\x84\x05\n" + + "\x12InputParameterSpec\x12}\n" + + "\x15task_output_parameter\x18\x01 \x01(\v2G.ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskOutputParameterSpecH\x00R\x13taskOutputParameter\x12L\n" + + "\rruntime_value\x18\x02 \x01(\v2%.ml_pipelines.ValueOrRuntimeParameterH\x00R\fruntimeValue\x12<\n" + + "\x19component_input_parameter\x18\x03 \x01(\tH\x00R\x17componentInputParameter\x12m\n" + + "\x11task_final_status\x18\x05 \x01(\v2?.ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskFinalStatusH\x00R\x0ftaskFinalStatus\x12B\n" + + "\x1dparameter_expression_selector\x18\x04 \x01(\tR\x1bparameterExpressionSelector\x1ap\n" + + "\x17TaskOutputParameterSpec\x12#\n" + + "\rproducer_task\x18\x01 \x01(\tR\fproducerTask\x120\n" + + "\x14output_parameter_key\x18\x02 \x01(\tR\x12outputParameterKey\x1a6\n" + + "\x0fTaskFinalStatus\x12#\n" + + "\rproducer_task\x18\x01 \x01(\tR\fproducerTaskB\x06\n" + + "\x04kind\x1an\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12E\n" + + "\x05value\x18\x02 \x01(\v2/.ml_pipelines.TaskInputsSpec.InputParameterSpecR\x05value:\x028\x01\x1al\n" + + "\x0eArtifactsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12D\n" + + "\x05value\x18\x02 \x01(\v2..ml_pipelines.TaskInputsSpec.InputArtifactSpecR\x05value:\x028\x01\"\xef\a\n" + + "\x0fTaskOutputsSpec\x12M\n" + + "\n" + + "parameters\x18\x01 \x03(\v2-.ml_pipelines.TaskOutputsSpec.ParametersEntryR\n" + + "parameters\x12J\n" + + "\tartifacts\x18\x02 \x03(\v2,.ml_pipelines.TaskOutputsSpec.ArtifactsEntryR\tartifacts\x1a\x84\x04\n" + + "\x12OutputArtifactSpec\x12E\n" + + "\rartifact_type\x18\x01 \x01(\v2 .ml_pipelines.ArtifactTypeSchemaR\fartifactType\x12`\n" + + "\n" + + "properties\x18\x02 \x03(\v2@.ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.PropertiesEntryR\n" + + "properties\x12s\n" + + "\x11custom_properties\x18\x03 \x03(\v2F.ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.CustomPropertiesEntryR\x10customProperties\x1ad\n" + + "\x0fPropertiesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12;\n" + + "\x05value\x18\x02 \x01(\v2%.ml_pipelines.ValueOrRuntimeParameterR\x05value:\x028\x01\x1aj\n" + + "\x15CustomPropertiesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12;\n" + + "\x05value\x18\x02 \x01(\v2%.ml_pipelines.ValueOrRuntimeParameterR\x05value:\x028\x01\x1aX\n" + + "\x13OutputParameterSpec\x12A\n" + + "\x04type\x18\x01 \x01(\x0e2-.ml_pipelines.PrimitiveType.PrimitiveTypeEnumR\x04type\x1ap\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12G\n" + + "\x05value\x18\x02 \x01(\v21.ml_pipelines.TaskOutputsSpec.OutputParameterSpecR\x05value:\x028\x01\x1an\n" + + "\x0eArtifactsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12F\n" + + "\x05value\x18\x02 \x01(\v20.ml_pipelines.TaskOutputsSpec.OutputArtifactSpecR\x05value:\x028\x01\"m\n" + + "\rPrimitiveType\"X\n" + + "\x11PrimitiveTypeEnum\x12\x1e\n" + + "\x1aPRIMITIVE_TYPE_UNSPECIFIED\x10\x00\x12\a\n" + + "\x03INT\x10\x01\x12\n" + + "\n" + + "\x06DOUBLE\x10\x02\x12\n" + + "\n" + + "\x06STRING\x10\x03\x1a\x02\x18\x01:\x02\x18\x01\"\xc8\x01\n" + + "\rParameterType\"\xb6\x01\n" + + "\x11ParameterTypeEnum\x12#\n" + + "\x1fPARAMETER_TYPE_ENUM_UNSPECIFIED\x10\x00\x12\x11\n" + + "\rNUMBER_DOUBLE\x10\x01\x12\x12\n" + + "\x0eNUMBER_INTEGER\x10\x02\x12\n" + + "\n" + + "\x06STRING\x10\x03\x12\v\n" + + "\aBOOLEAN\x10\x04\x12\b\n" + + "\x04LIST\x10\x05\x12\n" + + "\n" + + "\x06STRUCT\x10\x06\x12\x15\n" + + "\x11TASK_FINAL_STATUS\x10\a\x12\x0f\n" + + "\vTASK_CONFIG\x10\b\"\xca\x01\n" + + "\x19TaskConfigPassthroughType\"\xac\x01\n" + + "\x1dTaskConfigPassthroughTypeEnum\x12\b\n" + + "\x04NONE\x10\x00\x12\r\n" + + "\tRESOURCES\x10\x01\x12\a\n" + + "\x03ENV\x10\x02\x12\x17\n" + + "\x13KUBERNETES_AFFINITY\x10\x03\x12\x1a\n" + + "\x16KUBERNETES_TOLERATIONS\x10\x04\x12\x1c\n" + + "\x18KUBERNETES_NODE_SELECTOR\x10\x05\x12\x16\n" + + "\x12KUBERNETES_VOLUMES\x10\x06\"\x98\x01\n" + + "\x15TaskConfigPassthrough\x12[\n" + + "\x05field\x18\x01 \x01(\x0e2E.ml_pipelines.TaskConfigPassthroughType.TaskConfigPassthroughTypeEnumR\x05field\x12\"\n" + + "\rapply_to_task\x18\x02 \x01(\bR\vapplyToTask\"\xfe\n" + + "\n" + + "\x10PipelineTaskSpec\x12;\n" + + "\ttask_info\x18\x01 \x01(\v2\x1e.ml_pipelines.PipelineTaskInfoR\btaskInfo\x124\n" + + "\x06inputs\x18\x02 \x01(\v2\x1c.ml_pipelines.TaskInputsSpecR\x06inputs\x12'\n" + + "\x0fdependent_tasks\x18\x05 \x03(\tR\x0edependentTasks\x12V\n" + + "\x0fcaching_options\x18\x06 \x01(\v2-.ml_pipelines.PipelineTaskSpec.CachingOptionsR\x0ecachingOptions\x12?\n" + + "\rcomponent_ref\x18\a \x01(\v2\x1a.ml_pipelines.ComponentRefR\fcomponentRef\x12S\n" + + "\x0etrigger_policy\x18\b \x01(\v2,.ml_pipelines.PipelineTaskSpec.TriggerPolicyR\rtriggerPolicy\x12Q\n" + + "\x11artifact_iterator\x18\t \x01(\v2\".ml_pipelines.ArtifactIteratorSpecH\x00R\x10artifactIterator\x12T\n" + + "\x12parameter_iterator\x18\n" + + " \x01(\v2#.ml_pipelines.ParameterIteratorSpecH\x00R\x11parameterIterator\x12M\n" + + "\fretry_policy\x18\v \x01(\v2*.ml_pipelines.PipelineTaskSpec.RetryPolicyR\vretryPolicy\x12V\n" + + "\x0fiterator_policy\x18\f \x01(\v2-.ml_pipelines.PipelineTaskSpec.IteratorPolicyR\x0eiteratorPolicy\x1aP\n" + + "\x0eCachingOptions\x12!\n" + + "\fenable_cache\x18\x01 \x01(\bR\venableCache\x12\x1b\n" + + "\tcache_key\x18\x02 \x01(\tR\bcacheKey\x1a\x80\x02\n" + + "\rTriggerPolicy\x12\x1c\n" + + "\tcondition\x18\x01 \x01(\tR\tcondition\x12X\n" + + "\bstrategy\x18\x02 \x01(\x0e2<.ml_pipelines.PipelineTaskSpec.TriggerPolicy.TriggerStrategyR\bstrategy\"w\n" + + "\x0fTriggerStrategy\x12 \n" + + "\x1cTRIGGER_STRATEGY_UNSPECIFIED\x10\x00\x12 \n" + + "\x1cALL_UPSTREAM_TASKS_SUCCEEDED\x10\x01\x12 \n" + + "\x1cALL_UPSTREAM_TASKS_COMPLETED\x10\x02\x1a\xef\x01\n" + + "\vRetryPolicy\x12&\n" + + "\x0fmax_retry_count\x18\x01 \x01(\x05R\rmaxRetryCount\x12D\n" + + "\x10backoff_duration\x18\x02 \x01(\v2\x19.google.protobuf.DurationR\x0fbackoffDuration\x12%\n" + + "\x0ebackoff_factor\x18\x03 \x01(\x01R\rbackoffFactor\x12K\n" + + "\x14backoff_max_duration\x18\x04 \x01(\v2\x19.google.protobuf.DurationR\x12backoffMaxDuration\x1a=\n" + + "\x0eIteratorPolicy\x12+\n" + + "\x11parallelism_limit\x18\x01 \x01(\x05R\x10parallelismLimitB\n" + + "\n" + + "\biterator\"\xad\x01\n" + + "\x14ArtifactIteratorSpec\x12B\n" + + "\x05items\x18\x01 \x01(\v2,.ml_pipelines.ArtifactIteratorSpec.ItemsSpecR\x05items\x12\x1d\n" + + "\n" + + "item_input\x18\x02 \x01(\tR\titemInput\x1a2\n" + + "\tItemsSpec\x12%\n" + + "\x0einput_artifact\x18\x01 \x01(\tR\rinputArtifact\"\xcf\x01\n" + + "\x15ParameterIteratorSpec\x12C\n" + + "\x05items\x18\x01 \x01(\v2-.ml_pipelines.ParameterIteratorSpec.ItemsSpecR\x05items\x12\x1d\n" + + "\n" + + "item_input\x18\x02 \x01(\tR\titemInput\x1aR\n" + + "\tItemsSpec\x12\x12\n" + + "\x03raw\x18\x01 \x01(\tH\x00R\x03raw\x12)\n" + + "\x0finput_parameter\x18\x02 \x01(\tH\x00R\x0einputParameterB\x06\n" + + "\x04kind\"\"\n" + + "\fComponentRef\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\"g\n" + + "\fPipelineInfo\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12!\n" + + "\fdisplay_name\x18\x02 \x01(\tR\vdisplayName\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\"\xb8\x01\n" + + "\x12ArtifactTypeSchema\x12#\n" + + "\fschema_title\x18\x01 \x01(\tH\x00R\vschemaTitle\x12#\n" + + "\n" + + "schema_uri\x18\x02 \x01(\tB\x02\x18\x01H\x00R\tschemaUri\x12)\n" + + "\x0finstance_schema\x18\x03 \x01(\tH\x00R\x0einstanceSchema\x12%\n" + + "\x0eschema_version\x18\x04 \x01(\tR\rschemaVersionB\x06\n" + + "\x04kind\"&\n" + + "\x10PipelineTaskInfo\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\"\xc9\x01\n" + + "\x17ValueOrRuntimeParameter\x12@\n" + + "\x0econstant_value\x18\x01 \x01(\v2\x13.ml_pipelines.ValueB\x02\x18\x01H\x00R\rconstantValue\x12-\n" + + "\x11runtime_parameter\x18\x02 \x01(\tH\x00R\x10runtimeParameter\x124\n" + + "\bconstant\x18\x03 \x01(\v2\x16.google.protobuf.ValueH\x00R\bconstantB\a\n" + + "\x05value\"\xcf\x17\n" + + "\x18PipelineDeploymentConfig\x12S\n" + + "\texecutors\x18\x01 \x03(\v25.ml_pipelines.PipelineDeploymentConfig.ExecutorsEntryR\texecutors\x1a\xfc\t\n" + + "\x15PipelineContainerSpec\x12\x14\n" + + "\x05image\x18\x01 \x01(\tR\x05image\x12\x18\n" + + "\acommand\x18\x02 \x03(\tR\acommand\x12\x12\n" + + "\x04args\x18\x03 \x03(\tR\x04args\x12d\n" + + "\tlifecycle\x18\x04 \x01(\v2F.ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.LifecycleR\tlifecycle\x12g\n" + + "\tresources\x18\x05 \x01(\v2I.ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.ResourceSpecR\tresources\x12U\n" + + "\x03env\x18\x06 \x03(\v2C.ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.EnvVarR\x03env\x1a\xb6\x01\n" + + "\tLifecycle\x12s\n" + + "\x0fpre_cache_check\x18\x01 \x01(\v2K.ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.Lifecycle.ExecR\rpreCacheCheck\x1a4\n" + + "\x04Exec\x12\x18\n" + + "\acommand\x18\x02 \x03(\tR\acommand\x12\x12\n" + + "\x04args\x18\x03 \x03(\tR\x04args\x1a\x8b\x05\n" + + "\fResourceSpec\x12\x1f\n" + + "\tcpu_limit\x18\x01 \x01(\x01B\x02\x18\x01R\bcpuLimit\x12%\n" + + "\fmemory_limit\x18\x02 \x01(\x01B\x02\x18\x01R\vmemoryLimit\x12#\n" + + "\vcpu_request\x18\x05 \x01(\x01B\x02\x18\x01R\n" + + "cpuRequest\x12)\n" + + "\x0ememory_request\x18\x06 \x01(\x01B\x02\x18\x01R\rmemoryRequest\x12,\n" + + "\x12resource_cpu_limit\x18\a \x01(\tR\x10resourceCpuLimit\x122\n" + + "\x15resource_memory_limit\x18\b \x01(\tR\x13resourceMemoryLimit\x120\n" + + "\x14resource_cpu_request\x18\t \x01(\tR\x12resourceCpuRequest\x126\n" + + "\x17resource_memory_request\x18\n" + + " \x01(\tR\x15resourceMemoryRequest\x12}\n" + + "\vaccelerator\x18\x03 \x01(\v2[.ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.ResourceSpec.AcceleratorConfigR\vaccelerator\x1a\x91\x01\n" + + "\x11AcceleratorConfig\x12\x16\n" + + "\x04type\x18\x01 \x01(\tB\x02\x18\x01R\x04type\x12\x18\n" + + "\x05count\x18\x02 \x01(\x03B\x02\x18\x01R\x05count\x12#\n" + + "\rresource_type\x18\x03 \x01(\tR\fresourceType\x12%\n" + + "\x0eresource_count\x18\x04 \x01(\tR\rresourceCountJ\x04\b\x04\x10\x05\x1a2\n" + + "\x06EnvVar\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value\x1a\xa3\x05\n" + + "\fImporterSpec\x12H\n" + + "\fartifact_uri\x18\x01 \x01(\v2%.ml_pipelines.ValueOrRuntimeParameterR\vartifactUri\x12A\n" + + "\vtype_schema\x18\x02 \x01(\v2 .ml_pipelines.ArtifactTypeSchemaR\n" + + "typeSchema\x12g\n" + + "\n" + + "properties\x18\x03 \x03(\v2C.ml_pipelines.PipelineDeploymentConfig.ImporterSpec.PropertiesEntryB\x02\x18\x01R\n" + + "properties\x12z\n" + + "\x11custom_properties\x18\x04 \x03(\v2I.ml_pipelines.PipelineDeploymentConfig.ImporterSpec.CustomPropertiesEntryB\x02\x18\x01R\x10customProperties\x123\n" + + "\bmetadata\x18\x06 \x01(\v2\x17.google.protobuf.StructR\bmetadata\x12\x1a\n" + + "\breimport\x18\x05 \x01(\bR\breimport\x1ad\n" + + "\x0fPropertiesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12;\n" + + "\x05value\x18\x02 \x01(\v2%.ml_pipelines.ValueOrRuntimeParameterR\x05value:\x028\x01\x1aj\n" + + "\x15CustomPropertiesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12;\n" + + "\x05value\x18\x02 \x01(\v2%.ml_pipelines.ValueOrRuntimeParameterR\x05value:\x028\x01\x1a\xec\x02\n" + + "\fResolverSpec\x12\x86\x01\n" + + "\x17output_artifact_queries\x18\x01 \x03(\v2N.ml_pipelines.PipelineDeploymentConfig.ResolverSpec.OutputArtifactQueriesEntryR\x15outputArtifactQueries\x1aA\n" + + "\x11ArtifactQuerySpec\x12\x16\n" + + "\x06filter\x18\x01 \x01(\tR\x06filter\x12\x14\n" + + "\x05limit\x18\x02 \x01(\x05R\x05limit\x1a\x8f\x01\n" + + "\x1aOutputArtifactQueriesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12[\n" + + "\x05value\x18\x02 \x01(\v2E.ml_pipelines.PipelineDeploymentConfig.ResolverSpec.ArtifactQuerySpecR\x05value:\x028\x01\x1aU\n" + + "\x17AIPlatformCustomJobSpec\x126\n" + + "\n" + + "custom_job\x18\x01 \x01(\v2\x17.google.protobuf.StructR\tcustomJob:\x02\x18\x01\x1a\xff\x02\n" + + "\fExecutorSpec\x12\\\n" + + "\tcontainer\x18\x01 \x01(\v2<.ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpecH\x00R\tcontainer\x12Q\n" + + "\bimporter\x18\x02 \x01(\v23.ml_pipelines.PipelineDeploymentConfig.ImporterSpecH\x00R\bimporter\x12Q\n" + + "\bresolver\x18\x03 \x01(\v23.ml_pipelines.PipelineDeploymentConfig.ResolverSpecH\x00R\bresolver\x12c\n" + + "\n" + + "custom_job\x18\x04 \x01(\v2>.ml_pipelines.PipelineDeploymentConfig.AIPlatformCustomJobSpecB\x02\x18\x01H\x00R\tcustomJobB\x06\n" + + "\x04spec\x1aq\n" + + "\x0eExecutorsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12I\n" + + "\x05value\x18\x02 \x01(\v23.ml_pipelines.PipelineDeploymentConfig.ExecutorSpecR\x05value:\x028\x01\"y\n" + + "\x05Value\x12\x1d\n" + + "\tint_value\x18\x01 \x01(\x03H\x00R\bintValue\x12#\n" + + "\fdouble_value\x18\x02 \x01(\x01H\x00R\vdoubleValue\x12#\n" + + "\fstring_value\x18\x03 \x01(\tH\x00R\vstringValueB\a\n" + + "\x05value\"\x89\x04\n" + + "\x0fRuntimeArtifact\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x124\n" + + "\x04type\x18\x02 \x01(\v2 .ml_pipelines.ArtifactTypeSchemaR\x04type\x12\x10\n" + + "\x03uri\x18\x03 \x01(\tR\x03uri\x12Q\n" + + "\n" + + "properties\x18\x04 \x03(\v2-.ml_pipelines.RuntimeArtifact.PropertiesEntryB\x02\x18\x01R\n" + + "properties\x12d\n" + + "\x11custom_properties\x18\x05 \x03(\v23.ml_pipelines.RuntimeArtifact.CustomPropertiesEntryB\x02\x18\x01R\x10customProperties\x123\n" + + "\bmetadata\x18\x06 \x01(\v2\x17.google.protobuf.StructR\bmetadata\x1aR\n" + + "\x0fPropertiesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12)\n" + + "\x05value\x18\x02 \x01(\v2\x13.ml_pipelines.ValueR\x05value:\x028\x01\x1aX\n" + + "\x15CustomPropertiesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12)\n" + + "\x05value\x18\x02 \x01(\v2\x13.ml_pipelines.ValueR\x05value:\x028\x01\"K\n" + + "\fArtifactList\x12;\n" + + "\tartifacts\x18\x01 \x03(\v2\x1d.ml_pipelines.RuntimeArtifactR\tartifacts\"\xfa\b\n" + + "\rExecutorInput\x12:\n" + + "\x06inputs\x18\x01 \x01(\v2\".ml_pipelines.ExecutorInput.InputsR\x06inputs\x12=\n" + + "\aoutputs\x18\x02 \x01(\v2#.ml_pipelines.ExecutorInput.OutputsR\aoutputs\x1a\x9f\x04\n" + + "\x06Inputs\x12V\n" + + "\n" + + "parameters\x18\x01 \x03(\v22.ml_pipelines.ExecutorInput.Inputs.ParametersEntryB\x02\x18\x01R\n" + + "parameters\x12O\n" + + "\tartifacts\x18\x02 \x03(\v21.ml_pipelines.ExecutorInput.Inputs.ArtifactsEntryR\tartifacts\x12b\n" + + "\x10parameter_values\x18\x03 \x03(\v27.ml_pipelines.ExecutorInput.Inputs.ParameterValuesEntryR\x0fparameterValues\x1aR\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12)\n" + + "\x05value\x18\x02 \x01(\v2\x13.ml_pipelines.ValueR\x05value:\x028\x01\x1aX\n" + + "\x0eArtifactsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x120\n" + + "\x05value\x18\x02 \x01(\v2\x1a.ml_pipelines.ArtifactListR\x05value:\x028\x01\x1aZ\n" + + "\x14ParameterValuesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12,\n" + + "\x05value\x18\x02 \x01(\v2\x16.google.protobuf.ValueR\x05value:\x028\x01\x1a2\n" + + "\x0fOutputParameter\x12\x1f\n" + + "\voutput_file\x18\x01 \x01(\tR\n" + + "outputFile\x1a\x97\x03\n" + + "\aOutputs\x12S\n" + + "\n" + + "parameters\x18\x01 \x03(\v23.ml_pipelines.ExecutorInput.Outputs.ParametersEntryR\n" + + "parameters\x12P\n" + + "\tartifacts\x18\x02 \x03(\v22.ml_pipelines.ExecutorInput.Outputs.ArtifactsEntryR\tartifacts\x12\x1f\n" + + "\voutput_file\x18\x03 \x01(\tR\n" + + "outputFile\x1aj\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12A\n" + + "\x05value\x18\x02 \x01(\v2+.ml_pipelines.ExecutorInput.OutputParameterR\x05value:\x028\x01\x1aX\n" + + "\x0eArtifactsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x120\n" + + "\x05value\x18\x02 \x01(\v2\x1a.ml_pipelines.ArtifactListR\x05value:\x028\x01\"\x95\x04\n" + + "\x0eExecutorOutput\x12P\n" + + "\n" + + "parameters\x18\x01 \x03(\v2,.ml_pipelines.ExecutorOutput.ParametersEntryB\x02\x18\x01R\n" + + "parameters\x12I\n" + + "\tartifacts\x18\x02 \x03(\v2+.ml_pipelines.ExecutorOutput.ArtifactsEntryR\tartifacts\x12\\\n" + + "\x10parameter_values\x18\x03 \x03(\v21.ml_pipelines.ExecutorOutput.ParameterValuesEntryR\x0fparameterValues\x1aR\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12)\n" + + "\x05value\x18\x02 \x01(\v2\x13.ml_pipelines.ValueR\x05value:\x028\x01\x1aX\n" + + "\x0eArtifactsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x120\n" + + "\x05value\x18\x02 \x01(\v2\x1a.ml_pipelines.ArtifactListR\x05value:\x028\x01\x1aZ\n" + + "\x14ParameterValuesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12,\n" + + "\x05value\x18\x02 \x01(\v2\x16.google.protobuf.ValueR\x05value:\x028\x01\"\xa4\x02\n" + + "\x17PipelineTaskFinalStatus\x12\x14\n" + + "\x05state\x18\x01 \x01(\tR\x05state\x12(\n" + + "\x05error\x18\x02 \x01(\v2\x12.google.rpc.StatusR\x05error\x12.\n" + + "\x11pipeline_job_uuid\x18\x03 \x01(\x03B\x02\x18\x01R\x0fpipelineJobUuid\x12.\n" + + "\x11pipeline_job_name\x18\x04 \x01(\tB\x02\x18\x01R\x0fpipelineJobName\x12;\n" + + "\x1apipeline_job_resource_name\x18\x05 \x01(\tR\x17pipelineJobResourceName\x12,\n" + + "\x12pipeline_task_name\x18\x06 \x01(\tR\x10pipelineTaskName\"\x9f\x02\n" + + "\x11PipelineStateEnum\"\x89\x02\n" + + "\x11PipelineTaskState\x12\x1a\n" + + "\x16TASK_STATE_UNSPECIFIED\x10\x00\x12\v\n" + + "\aPENDING\x10\x01\x12\x12\n" + + "\x0eRUNNING_DRIVER\x10\x02\x12\x14\n" + + "\x10DRIVER_SUCCEEDED\x10\x03\x12\x14\n" + + "\x10RUNNING_EXECUTOR\x10\x04\x12\r\n" + + "\tSUCCEEDED\x10\x05\x12\x12\n" + + "\x0eCANCEL_PENDING\x10\x06\x12\x0e\n" + + "\n" + + "CANCELLING\x10\a\x12\r\n" + + "\tCANCELLED\x10\b\x12\n" + + "\n" + + "\x06FAILED\x10\t\x12\v\n" + + "\aSKIPPED\x10\n" + + "\x12\n" + + "\n" + + "\x06QUEUED\x10\v\x12\x11\n" + + "\rNOT_TRIGGERED\x10\f\x12\x11\n" + + "\rUNSCHEDULABLE\x10\r\"\xb7\x01\n" + + "\fPlatformSpec\x12G\n" + + "\tplatforms\x18\x01 \x03(\v2).ml_pipelines.PlatformSpec.PlatformsEntryR\tplatforms\x1a^\n" + + "\x0ePlatformsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x126\n" + + "\x05value\x18\x02 \x01(\v2 .ml_pipelines.SinglePlatformSpecR\x05value:\x028\x01\"\xf8\x01\n" + + "\x12SinglePlatformSpec\x12O\n" + + "\x0fdeployment_spec\x18\x01 \x01(\v2&.ml_pipelines.PlatformDeploymentConfigR\x0edeploymentSpec\x12\x1a\n" + + "\bplatform\x18\x02 \x01(\tR\bplatform\x12/\n" + + "\x06config\x18\x03 \x01(\v2\x17.google.protobuf.StructR\x06config\x12D\n" + + "\x0epipelineConfig\x18\x04 \x01(\v2\x1c.ml_pipelines.PipelineConfigR\x0epipelineConfig\"\xc6\x01\n" + + "\x18PlatformDeploymentConfig\x12S\n" + + "\texecutors\x18\x01 \x03(\v25.ml_pipelines.PlatformDeploymentConfig.ExecutorsEntryR\texecutors\x1aU\n" + + "\x0eExecutorsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12-\n" + + "\x05value\x18\x02 \x01(\v2\x17.google.protobuf.StructR\x05value:\x028\x01\"\x82\x01\n" + + "\x0fWorkspaceConfig\x12\x12\n" + + "\x04size\x18\x01 \x01(\tR\x04size\x12L\n" + + "\n" + + "kubernetes\x18\x02 \x01(\v2'.ml_pipelines.KubernetesWorkspaceConfigH\x00R\n" + + "kubernetes\x88\x01\x01B\r\n" + + "\v_kubernetes\"r\n" + + "\x19KubernetesWorkspaceConfig\x12B\n" + + "\x0epvc_spec_patch\x18\x01 \x01(\v2\x17.google.protobuf.StructH\x00R\fpvcSpecPatch\x88\x01\x01B\x11\n" + + "\x0f_pvc_spec_patch\"\xc7\x01\n" + + "\x0ePipelineConfig\x12#\n" + + "\rsemaphore_key\x18\x01 \x01(\tR\fsemaphoreKey\x12\x1d\n" + + "\n" + + "mutex_name\x18\x02 \x01(\tR\tmutexName\x12!\n" + + "\fresource_ttl\x18\x03 \x01(\x05R\vresourceTtl\x12@\n" + + "\tworkspace\x18\x04 \x01(\v2\x1d.ml_pipelines.WorkspaceConfigH\x00R\tworkspace\x88\x01\x01B\f\n" + + "\n" + + "_workspaceB google.protobuf.Struct - 37, // 1: ml_pipelines.PipelineJob.labels:type_name -> ml_pipelines.PipelineJob.LabelsEntry - 38, // 2: ml_pipelines.PipelineJob.runtime_config:type_name -> ml_pipelines.PipelineJob.RuntimeConfig - 19, // 3: ml_pipelines.PipelineSpec.pipeline_info:type_name -> ml_pipelines.PipelineInfo - 112, // 4: ml_pipelines.PipelineSpec.deployment_spec:type_name -> google.protobuf.Struct - 42, // 5: ml_pipelines.PipelineSpec.components:type_name -> ml_pipelines.PipelineSpec.ComponentsEntry - 6, // 6: ml_pipelines.PipelineSpec.root:type_name -> ml_pipelines.ComponentSpec - 9, // 7: ml_pipelines.ComponentSpec.input_definitions:type_name -> ml_pipelines.ComponentInputsSpec - 10, // 8: ml_pipelines.ComponentSpec.output_definitions:type_name -> ml_pipelines.ComponentOutputsSpec - 7, // 9: ml_pipelines.ComponentSpec.dag:type_name -> ml_pipelines.DagSpec - 32, // 10: ml_pipelines.ComponentSpec.single_platform_specs:type_name -> ml_pipelines.SinglePlatformSpec - 43, // 11: ml_pipelines.DagSpec.tasks:type_name -> ml_pipelines.DagSpec.TasksEntry - 8, // 12: ml_pipelines.DagSpec.outputs:type_name -> ml_pipelines.DagOutputsSpec - 46, // 13: ml_pipelines.DagOutputsSpec.artifacts:type_name -> ml_pipelines.DagOutputsSpec.ArtifactsEntry - 51, // 14: ml_pipelines.DagOutputsSpec.parameters:type_name -> ml_pipelines.DagOutputsSpec.ParametersEntry - 55, // 15: ml_pipelines.ComponentInputsSpec.artifacts:type_name -> ml_pipelines.ComponentInputsSpec.ArtifactsEntry - 56, // 16: ml_pipelines.ComponentInputsSpec.parameters:type_name -> ml_pipelines.ComponentInputsSpec.ParametersEntry - 59, // 17: ml_pipelines.ComponentOutputsSpec.artifacts:type_name -> ml_pipelines.ComponentOutputsSpec.ArtifactsEntry - 60, // 18: ml_pipelines.ComponentOutputsSpec.parameters:type_name -> ml_pipelines.ComponentOutputsSpec.ParametersEntry - 65, // 19: ml_pipelines.TaskInputsSpec.parameters:type_name -> ml_pipelines.TaskInputsSpec.ParametersEntry - 66, // 20: ml_pipelines.TaskInputsSpec.artifacts:type_name -> ml_pipelines.TaskInputsSpec.ArtifactsEntry - 72, // 21: ml_pipelines.TaskOutputsSpec.parameters:type_name -> ml_pipelines.TaskOutputsSpec.ParametersEntry - 73, // 22: ml_pipelines.TaskOutputsSpec.artifacts:type_name -> ml_pipelines.TaskOutputsSpec.ArtifactsEntry - 21, // 23: ml_pipelines.PipelineTaskSpec.task_info:type_name -> ml_pipelines.PipelineTaskInfo - 11, // 24: ml_pipelines.PipelineTaskSpec.inputs:type_name -> ml_pipelines.TaskInputsSpec - 76, // 25: ml_pipelines.PipelineTaskSpec.caching_options:type_name -> ml_pipelines.PipelineTaskSpec.CachingOptions - 18, // 26: ml_pipelines.PipelineTaskSpec.component_ref:type_name -> ml_pipelines.ComponentRef - 77, // 27: ml_pipelines.PipelineTaskSpec.trigger_policy:type_name -> ml_pipelines.PipelineTaskSpec.TriggerPolicy - 16, // 28: ml_pipelines.PipelineTaskSpec.artifact_iterator:type_name -> ml_pipelines.ArtifactIteratorSpec - 17, // 29: ml_pipelines.PipelineTaskSpec.parameter_iterator:type_name -> ml_pipelines.ParameterIteratorSpec - 78, // 30: ml_pipelines.PipelineTaskSpec.retry_policy:type_name -> ml_pipelines.PipelineTaskSpec.RetryPolicy - 79, // 31: ml_pipelines.PipelineTaskSpec.iterator_policy:type_name -> ml_pipelines.PipelineTaskSpec.IteratorPolicy - 80, // 32: ml_pipelines.ArtifactIteratorSpec.items:type_name -> ml_pipelines.ArtifactIteratorSpec.ItemsSpec - 81, // 33: ml_pipelines.ParameterIteratorSpec.items:type_name -> ml_pipelines.ParameterIteratorSpec.ItemsSpec - 24, // 34: ml_pipelines.ValueOrRuntimeParameter.constant_value:type_name -> ml_pipelines.Value - 113, // 35: ml_pipelines.ValueOrRuntimeParameter.constant:type_name -> google.protobuf.Value - 87, // 36: ml_pipelines.PipelineDeploymentConfig.executors:type_name -> ml_pipelines.PipelineDeploymentConfig.ExecutorsEntry - 20, // 37: ml_pipelines.RuntimeArtifact.type:type_name -> ml_pipelines.ArtifactTypeSchema - 97, // 38: ml_pipelines.RuntimeArtifact.properties:type_name -> ml_pipelines.RuntimeArtifact.PropertiesEntry - 98, // 39: ml_pipelines.RuntimeArtifact.custom_properties:type_name -> ml_pipelines.RuntimeArtifact.CustomPropertiesEntry - 112, // 40: ml_pipelines.RuntimeArtifact.metadata:type_name -> google.protobuf.Struct - 25, // 41: ml_pipelines.ArtifactList.artifacts:type_name -> ml_pipelines.RuntimeArtifact - 99, // 42: ml_pipelines.ExecutorInput.inputs:type_name -> ml_pipelines.ExecutorInput.Inputs - 101, // 43: ml_pipelines.ExecutorInput.outputs:type_name -> ml_pipelines.ExecutorInput.Outputs - 107, // 44: ml_pipelines.ExecutorOutput.parameters:type_name -> ml_pipelines.ExecutorOutput.ParametersEntry - 108, // 45: ml_pipelines.ExecutorOutput.artifacts:type_name -> ml_pipelines.ExecutorOutput.ArtifactsEntry - 109, // 46: ml_pipelines.ExecutorOutput.parameter_values:type_name -> ml_pipelines.ExecutorOutput.ParameterValuesEntry - 114, // 47: ml_pipelines.PipelineTaskFinalStatus.error:type_name -> google.rpc.Status - 110, // 48: ml_pipelines.PlatformSpec.platforms:type_name -> ml_pipelines.PlatformSpec.PlatformsEntry - 33, // 49: ml_pipelines.SinglePlatformSpec.deployment_spec:type_name -> ml_pipelines.PlatformDeploymentConfig - 112, // 50: ml_pipelines.SinglePlatformSpec.config:type_name -> google.protobuf.Struct - 36, // 51: ml_pipelines.SinglePlatformSpec.pipelineConfig:type_name -> ml_pipelines.PipelineConfig - 111, // 52: ml_pipelines.PlatformDeploymentConfig.executors:type_name -> ml_pipelines.PlatformDeploymentConfig.ExecutorsEntry - 35, // 53: ml_pipelines.WorkspaceConfig.kubernetes:type_name -> ml_pipelines.KubernetesWorkspaceConfig - 112, // 54: ml_pipelines.KubernetesWorkspaceConfig.pvc_spec_patch:type_name -> google.protobuf.Struct - 34, // 55: ml_pipelines.PipelineConfig.workspace:type_name -> ml_pipelines.WorkspaceConfig - 39, // 56: ml_pipelines.PipelineJob.RuntimeConfig.parameters:type_name -> ml_pipelines.PipelineJob.RuntimeConfig.ParametersEntry - 40, // 57: ml_pipelines.PipelineJob.RuntimeConfig.parameter_values:type_name -> ml_pipelines.PipelineJob.RuntimeConfig.ParameterValuesEntry - 24, // 58: ml_pipelines.PipelineJob.RuntimeConfig.ParametersEntry.value:type_name -> ml_pipelines.Value - 113, // 59: ml_pipelines.PipelineJob.RuntimeConfig.ParameterValuesEntry.value:type_name -> google.protobuf.Value - 0, // 60: ml_pipelines.PipelineSpec.RuntimeParameter.type:type_name -> ml_pipelines.PrimitiveType.PrimitiveTypeEnum - 24, // 61: ml_pipelines.PipelineSpec.RuntimeParameter.default_value:type_name -> ml_pipelines.Value - 6, // 62: ml_pipelines.PipelineSpec.ComponentsEntry.value:type_name -> ml_pipelines.ComponentSpec - 15, // 63: ml_pipelines.DagSpec.TasksEntry.value:type_name -> ml_pipelines.PipelineTaskSpec - 44, // 64: ml_pipelines.DagOutputsSpec.DagOutputArtifactSpec.artifact_selectors:type_name -> ml_pipelines.DagOutputsSpec.ArtifactSelectorSpec - 45, // 65: ml_pipelines.DagOutputsSpec.ArtifactsEntry.value:type_name -> ml_pipelines.DagOutputsSpec.DagOutputArtifactSpec - 47, // 66: ml_pipelines.DagOutputsSpec.ParameterSelectorsSpec.parameter_selectors:type_name -> ml_pipelines.DagOutputsSpec.ParameterSelectorSpec - 52, // 67: ml_pipelines.DagOutputsSpec.MapParameterSelectorsSpec.mapped_parameters:type_name -> ml_pipelines.DagOutputsSpec.MapParameterSelectorsSpec.MappedParametersEntry - 47, // 68: ml_pipelines.DagOutputsSpec.DagOutputParameterSpec.value_from_parameter:type_name -> ml_pipelines.DagOutputsSpec.ParameterSelectorSpec - 48, // 69: ml_pipelines.DagOutputsSpec.DagOutputParameterSpec.value_from_oneof:type_name -> ml_pipelines.DagOutputsSpec.ParameterSelectorsSpec - 50, // 70: ml_pipelines.DagOutputsSpec.ParametersEntry.value:type_name -> ml_pipelines.DagOutputsSpec.DagOutputParameterSpec - 47, // 71: ml_pipelines.DagOutputsSpec.MapParameterSelectorsSpec.MappedParametersEntry.value:type_name -> ml_pipelines.DagOutputsSpec.ParameterSelectorSpec - 20, // 72: ml_pipelines.ComponentInputsSpec.ArtifactSpec.artifact_type:type_name -> ml_pipelines.ArtifactTypeSchema - 0, // 73: ml_pipelines.ComponentInputsSpec.ParameterSpec.type:type_name -> ml_pipelines.PrimitiveType.PrimitiveTypeEnum - 1, // 74: ml_pipelines.ComponentInputsSpec.ParameterSpec.parameter_type:type_name -> ml_pipelines.ParameterType.ParameterTypeEnum - 113, // 75: ml_pipelines.ComponentInputsSpec.ParameterSpec.default_value:type_name -> google.protobuf.Value - 53, // 76: ml_pipelines.ComponentInputsSpec.ArtifactsEntry.value:type_name -> ml_pipelines.ComponentInputsSpec.ArtifactSpec - 54, // 77: ml_pipelines.ComponentInputsSpec.ParametersEntry.value:type_name -> ml_pipelines.ComponentInputsSpec.ParameterSpec - 20, // 78: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.artifact_type:type_name -> ml_pipelines.ArtifactTypeSchema - 61, // 79: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.properties:type_name -> ml_pipelines.ComponentOutputsSpec.ArtifactSpec.PropertiesEntry - 62, // 80: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.custom_properties:type_name -> ml_pipelines.ComponentOutputsSpec.ArtifactSpec.CustomPropertiesEntry - 112, // 81: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.metadata:type_name -> google.protobuf.Struct - 0, // 82: ml_pipelines.ComponentOutputsSpec.ParameterSpec.type:type_name -> ml_pipelines.PrimitiveType.PrimitiveTypeEnum - 1, // 83: ml_pipelines.ComponentOutputsSpec.ParameterSpec.parameter_type:type_name -> ml_pipelines.ParameterType.ParameterTypeEnum - 57, // 84: ml_pipelines.ComponentOutputsSpec.ArtifactsEntry.value:type_name -> ml_pipelines.ComponentOutputsSpec.ArtifactSpec - 58, // 85: ml_pipelines.ComponentOutputsSpec.ParametersEntry.value:type_name -> ml_pipelines.ComponentOutputsSpec.ParameterSpec - 22, // 86: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.PropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter - 22, // 87: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.CustomPropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter - 67, // 88: ml_pipelines.TaskInputsSpec.InputArtifactSpec.task_output_artifact:type_name -> ml_pipelines.TaskInputsSpec.InputArtifactSpec.TaskOutputArtifactSpec - 68, // 89: ml_pipelines.TaskInputsSpec.InputParameterSpec.task_output_parameter:type_name -> ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskOutputParameterSpec - 22, // 90: ml_pipelines.TaskInputsSpec.InputParameterSpec.runtime_value:type_name -> ml_pipelines.ValueOrRuntimeParameter - 69, // 91: ml_pipelines.TaskInputsSpec.InputParameterSpec.task_final_status:type_name -> ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskFinalStatus - 64, // 92: ml_pipelines.TaskInputsSpec.ParametersEntry.value:type_name -> ml_pipelines.TaskInputsSpec.InputParameterSpec - 63, // 93: ml_pipelines.TaskInputsSpec.ArtifactsEntry.value:type_name -> ml_pipelines.TaskInputsSpec.InputArtifactSpec - 20, // 94: ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.artifact_type:type_name -> ml_pipelines.ArtifactTypeSchema - 74, // 95: ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.properties:type_name -> ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.PropertiesEntry - 75, // 96: ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.custom_properties:type_name -> ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.CustomPropertiesEntry - 0, // 97: ml_pipelines.TaskOutputsSpec.OutputParameterSpec.type:type_name -> ml_pipelines.PrimitiveType.PrimitiveTypeEnum - 71, // 98: ml_pipelines.TaskOutputsSpec.ParametersEntry.value:type_name -> ml_pipelines.TaskOutputsSpec.OutputParameterSpec - 70, // 99: ml_pipelines.TaskOutputsSpec.ArtifactsEntry.value:type_name -> ml_pipelines.TaskOutputsSpec.OutputArtifactSpec - 22, // 100: ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.PropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter - 22, // 101: ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.CustomPropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter - 2, // 102: ml_pipelines.PipelineTaskSpec.TriggerPolicy.strategy:type_name -> ml_pipelines.PipelineTaskSpec.TriggerPolicy.TriggerStrategy - 115, // 103: ml_pipelines.PipelineTaskSpec.RetryPolicy.backoff_duration:type_name -> google.protobuf.Duration - 115, // 104: ml_pipelines.PipelineTaskSpec.RetryPolicy.backoff_max_duration:type_name -> google.protobuf.Duration - 88, // 105: ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.lifecycle:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.Lifecycle - 89, // 106: ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.resources:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.ResourceSpec - 90, // 107: ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.env:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.EnvVar - 22, // 108: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.artifact_uri:type_name -> ml_pipelines.ValueOrRuntimeParameter - 20, // 109: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.type_schema:type_name -> ml_pipelines.ArtifactTypeSchema - 93, // 110: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.properties:type_name -> ml_pipelines.PipelineDeploymentConfig.ImporterSpec.PropertiesEntry - 94, // 111: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.custom_properties:type_name -> ml_pipelines.PipelineDeploymentConfig.ImporterSpec.CustomPropertiesEntry - 112, // 112: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.metadata:type_name -> google.protobuf.Struct - 96, // 113: ml_pipelines.PipelineDeploymentConfig.ResolverSpec.output_artifact_queries:type_name -> ml_pipelines.PipelineDeploymentConfig.ResolverSpec.OutputArtifactQueriesEntry - 112, // 114: ml_pipelines.PipelineDeploymentConfig.AIPlatformCustomJobSpec.custom_job:type_name -> google.protobuf.Struct - 82, // 115: ml_pipelines.PipelineDeploymentConfig.ExecutorSpec.container:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec - 83, // 116: ml_pipelines.PipelineDeploymentConfig.ExecutorSpec.importer:type_name -> ml_pipelines.PipelineDeploymentConfig.ImporterSpec - 84, // 117: ml_pipelines.PipelineDeploymentConfig.ExecutorSpec.resolver:type_name -> ml_pipelines.PipelineDeploymentConfig.ResolverSpec - 85, // 118: ml_pipelines.PipelineDeploymentConfig.ExecutorSpec.custom_job:type_name -> ml_pipelines.PipelineDeploymentConfig.AIPlatformCustomJobSpec - 86, // 119: ml_pipelines.PipelineDeploymentConfig.ExecutorsEntry.value:type_name -> ml_pipelines.PipelineDeploymentConfig.ExecutorSpec - 91, // 120: ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.Lifecycle.pre_cache_check:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.Lifecycle.Exec - 92, // 121: ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.ResourceSpec.accelerator:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.ResourceSpec.AcceleratorConfig - 22, // 122: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.PropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter - 22, // 123: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.CustomPropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter - 95, // 124: ml_pipelines.PipelineDeploymentConfig.ResolverSpec.OutputArtifactQueriesEntry.value:type_name -> ml_pipelines.PipelineDeploymentConfig.ResolverSpec.ArtifactQuerySpec - 24, // 125: ml_pipelines.RuntimeArtifact.PropertiesEntry.value:type_name -> ml_pipelines.Value - 24, // 126: ml_pipelines.RuntimeArtifact.CustomPropertiesEntry.value:type_name -> ml_pipelines.Value - 102, // 127: ml_pipelines.ExecutorInput.Inputs.parameters:type_name -> ml_pipelines.ExecutorInput.Inputs.ParametersEntry - 103, // 128: ml_pipelines.ExecutorInput.Inputs.artifacts:type_name -> ml_pipelines.ExecutorInput.Inputs.ArtifactsEntry - 104, // 129: ml_pipelines.ExecutorInput.Inputs.parameter_values:type_name -> ml_pipelines.ExecutorInput.Inputs.ParameterValuesEntry - 105, // 130: ml_pipelines.ExecutorInput.Outputs.parameters:type_name -> ml_pipelines.ExecutorInput.Outputs.ParametersEntry - 106, // 131: ml_pipelines.ExecutorInput.Outputs.artifacts:type_name -> ml_pipelines.ExecutorInput.Outputs.ArtifactsEntry - 24, // 132: ml_pipelines.ExecutorInput.Inputs.ParametersEntry.value:type_name -> ml_pipelines.Value - 26, // 133: ml_pipelines.ExecutorInput.Inputs.ArtifactsEntry.value:type_name -> ml_pipelines.ArtifactList - 113, // 134: ml_pipelines.ExecutorInput.Inputs.ParameterValuesEntry.value:type_name -> google.protobuf.Value - 100, // 135: ml_pipelines.ExecutorInput.Outputs.ParametersEntry.value:type_name -> ml_pipelines.ExecutorInput.OutputParameter - 26, // 136: ml_pipelines.ExecutorInput.Outputs.ArtifactsEntry.value:type_name -> ml_pipelines.ArtifactList - 24, // 137: ml_pipelines.ExecutorOutput.ParametersEntry.value:type_name -> ml_pipelines.Value - 26, // 138: ml_pipelines.ExecutorOutput.ArtifactsEntry.value:type_name -> ml_pipelines.ArtifactList - 113, // 139: ml_pipelines.ExecutorOutput.ParameterValuesEntry.value:type_name -> google.protobuf.Value - 32, // 140: ml_pipelines.PlatformSpec.PlatformsEntry.value:type_name -> ml_pipelines.SinglePlatformSpec - 112, // 141: ml_pipelines.PlatformDeploymentConfig.ExecutorsEntry.value:type_name -> google.protobuf.Struct - 142, // [142:142] is the sub-list for method output_type - 142, // [142:142] is the sub-list for method input_type - 142, // [142:142] is the sub-list for extension type_name - 142, // [142:142] is the sub-list for extension extendee - 0, // [0:142] is the sub-list for field type_name + 115, // 0: ml_pipelines.PipelineJob.pipeline_spec:type_name -> google.protobuf.Struct + 40, // 1: ml_pipelines.PipelineJob.labels:type_name -> ml_pipelines.PipelineJob.LabelsEntry + 41, // 2: ml_pipelines.PipelineJob.runtime_config:type_name -> ml_pipelines.PipelineJob.RuntimeConfig + 22, // 3: ml_pipelines.PipelineSpec.pipeline_info:type_name -> ml_pipelines.PipelineInfo + 115, // 4: ml_pipelines.PipelineSpec.deployment_spec:type_name -> google.protobuf.Struct + 45, // 5: ml_pipelines.PipelineSpec.components:type_name -> ml_pipelines.PipelineSpec.ComponentsEntry + 7, // 6: ml_pipelines.PipelineSpec.root:type_name -> ml_pipelines.ComponentSpec + 10, // 7: ml_pipelines.ComponentSpec.input_definitions:type_name -> ml_pipelines.ComponentInputsSpec + 11, // 8: ml_pipelines.ComponentSpec.output_definitions:type_name -> ml_pipelines.ComponentOutputsSpec + 8, // 9: ml_pipelines.ComponentSpec.dag:type_name -> ml_pipelines.DagSpec + 35, // 10: ml_pipelines.ComponentSpec.single_platform_specs:type_name -> ml_pipelines.SinglePlatformSpec + 17, // 11: ml_pipelines.ComponentSpec.task_config_passthroughs:type_name -> ml_pipelines.TaskConfigPassthrough + 46, // 12: ml_pipelines.DagSpec.tasks:type_name -> ml_pipelines.DagSpec.TasksEntry + 9, // 13: ml_pipelines.DagSpec.outputs:type_name -> ml_pipelines.DagOutputsSpec + 49, // 14: ml_pipelines.DagOutputsSpec.artifacts:type_name -> ml_pipelines.DagOutputsSpec.ArtifactsEntry + 54, // 15: ml_pipelines.DagOutputsSpec.parameters:type_name -> ml_pipelines.DagOutputsSpec.ParametersEntry + 58, // 16: ml_pipelines.ComponentInputsSpec.artifacts:type_name -> ml_pipelines.ComponentInputsSpec.ArtifactsEntry + 59, // 17: ml_pipelines.ComponentInputsSpec.parameters:type_name -> ml_pipelines.ComponentInputsSpec.ParametersEntry + 62, // 18: ml_pipelines.ComponentOutputsSpec.artifacts:type_name -> ml_pipelines.ComponentOutputsSpec.ArtifactsEntry + 63, // 19: ml_pipelines.ComponentOutputsSpec.parameters:type_name -> ml_pipelines.ComponentOutputsSpec.ParametersEntry + 68, // 20: ml_pipelines.TaskInputsSpec.parameters:type_name -> ml_pipelines.TaskInputsSpec.ParametersEntry + 69, // 21: ml_pipelines.TaskInputsSpec.artifacts:type_name -> ml_pipelines.TaskInputsSpec.ArtifactsEntry + 75, // 22: ml_pipelines.TaskOutputsSpec.parameters:type_name -> ml_pipelines.TaskOutputsSpec.ParametersEntry + 76, // 23: ml_pipelines.TaskOutputsSpec.artifacts:type_name -> ml_pipelines.TaskOutputsSpec.ArtifactsEntry + 2, // 24: ml_pipelines.TaskConfigPassthrough.field:type_name -> ml_pipelines.TaskConfigPassthroughType.TaskConfigPassthroughTypeEnum + 24, // 25: ml_pipelines.PipelineTaskSpec.task_info:type_name -> ml_pipelines.PipelineTaskInfo + 12, // 26: ml_pipelines.PipelineTaskSpec.inputs:type_name -> ml_pipelines.TaskInputsSpec + 79, // 27: ml_pipelines.PipelineTaskSpec.caching_options:type_name -> ml_pipelines.PipelineTaskSpec.CachingOptions + 21, // 28: ml_pipelines.PipelineTaskSpec.component_ref:type_name -> ml_pipelines.ComponentRef + 80, // 29: ml_pipelines.PipelineTaskSpec.trigger_policy:type_name -> ml_pipelines.PipelineTaskSpec.TriggerPolicy + 19, // 30: ml_pipelines.PipelineTaskSpec.artifact_iterator:type_name -> ml_pipelines.ArtifactIteratorSpec + 20, // 31: ml_pipelines.PipelineTaskSpec.parameter_iterator:type_name -> ml_pipelines.ParameterIteratorSpec + 81, // 32: ml_pipelines.PipelineTaskSpec.retry_policy:type_name -> ml_pipelines.PipelineTaskSpec.RetryPolicy + 82, // 33: ml_pipelines.PipelineTaskSpec.iterator_policy:type_name -> ml_pipelines.PipelineTaskSpec.IteratorPolicy + 83, // 34: ml_pipelines.ArtifactIteratorSpec.items:type_name -> ml_pipelines.ArtifactIteratorSpec.ItemsSpec + 84, // 35: ml_pipelines.ParameterIteratorSpec.items:type_name -> ml_pipelines.ParameterIteratorSpec.ItemsSpec + 27, // 36: ml_pipelines.ValueOrRuntimeParameter.constant_value:type_name -> ml_pipelines.Value + 116, // 37: ml_pipelines.ValueOrRuntimeParameter.constant:type_name -> google.protobuf.Value + 90, // 38: ml_pipelines.PipelineDeploymentConfig.executors:type_name -> ml_pipelines.PipelineDeploymentConfig.ExecutorsEntry + 23, // 39: ml_pipelines.RuntimeArtifact.type:type_name -> ml_pipelines.ArtifactTypeSchema + 100, // 40: ml_pipelines.RuntimeArtifact.properties:type_name -> ml_pipelines.RuntimeArtifact.PropertiesEntry + 101, // 41: ml_pipelines.RuntimeArtifact.custom_properties:type_name -> ml_pipelines.RuntimeArtifact.CustomPropertiesEntry + 115, // 42: ml_pipelines.RuntimeArtifact.metadata:type_name -> google.protobuf.Struct + 28, // 43: ml_pipelines.ArtifactList.artifacts:type_name -> ml_pipelines.RuntimeArtifact + 102, // 44: ml_pipelines.ExecutorInput.inputs:type_name -> ml_pipelines.ExecutorInput.Inputs + 104, // 45: ml_pipelines.ExecutorInput.outputs:type_name -> ml_pipelines.ExecutorInput.Outputs + 110, // 46: ml_pipelines.ExecutorOutput.parameters:type_name -> ml_pipelines.ExecutorOutput.ParametersEntry + 111, // 47: ml_pipelines.ExecutorOutput.artifacts:type_name -> ml_pipelines.ExecutorOutput.ArtifactsEntry + 112, // 48: ml_pipelines.ExecutorOutput.parameter_values:type_name -> ml_pipelines.ExecutorOutput.ParameterValuesEntry + 117, // 49: ml_pipelines.PipelineTaskFinalStatus.error:type_name -> google.rpc.Status + 113, // 50: ml_pipelines.PlatformSpec.platforms:type_name -> ml_pipelines.PlatformSpec.PlatformsEntry + 36, // 51: ml_pipelines.SinglePlatformSpec.deployment_spec:type_name -> ml_pipelines.PlatformDeploymentConfig + 115, // 52: ml_pipelines.SinglePlatformSpec.config:type_name -> google.protobuf.Struct + 39, // 53: ml_pipelines.SinglePlatformSpec.pipelineConfig:type_name -> ml_pipelines.PipelineConfig + 114, // 54: ml_pipelines.PlatformDeploymentConfig.executors:type_name -> ml_pipelines.PlatformDeploymentConfig.ExecutorsEntry + 38, // 55: ml_pipelines.WorkspaceConfig.kubernetes:type_name -> ml_pipelines.KubernetesWorkspaceConfig + 115, // 56: ml_pipelines.KubernetesWorkspaceConfig.pvc_spec_patch:type_name -> google.protobuf.Struct + 37, // 57: ml_pipelines.PipelineConfig.workspace:type_name -> ml_pipelines.WorkspaceConfig + 42, // 58: ml_pipelines.PipelineJob.RuntimeConfig.parameters:type_name -> ml_pipelines.PipelineJob.RuntimeConfig.ParametersEntry + 43, // 59: ml_pipelines.PipelineJob.RuntimeConfig.parameter_values:type_name -> ml_pipelines.PipelineJob.RuntimeConfig.ParameterValuesEntry + 27, // 60: ml_pipelines.PipelineJob.RuntimeConfig.ParametersEntry.value:type_name -> ml_pipelines.Value + 116, // 61: ml_pipelines.PipelineJob.RuntimeConfig.ParameterValuesEntry.value:type_name -> google.protobuf.Value + 0, // 62: ml_pipelines.PipelineSpec.RuntimeParameter.type:type_name -> ml_pipelines.PrimitiveType.PrimitiveTypeEnum + 27, // 63: ml_pipelines.PipelineSpec.RuntimeParameter.default_value:type_name -> ml_pipelines.Value + 7, // 64: ml_pipelines.PipelineSpec.ComponentsEntry.value:type_name -> ml_pipelines.ComponentSpec + 18, // 65: ml_pipelines.DagSpec.TasksEntry.value:type_name -> ml_pipelines.PipelineTaskSpec + 47, // 66: ml_pipelines.DagOutputsSpec.DagOutputArtifactSpec.artifact_selectors:type_name -> ml_pipelines.DagOutputsSpec.ArtifactSelectorSpec + 48, // 67: ml_pipelines.DagOutputsSpec.ArtifactsEntry.value:type_name -> ml_pipelines.DagOutputsSpec.DagOutputArtifactSpec + 50, // 68: ml_pipelines.DagOutputsSpec.ParameterSelectorsSpec.parameter_selectors:type_name -> ml_pipelines.DagOutputsSpec.ParameterSelectorSpec + 55, // 69: ml_pipelines.DagOutputsSpec.MapParameterSelectorsSpec.mapped_parameters:type_name -> ml_pipelines.DagOutputsSpec.MapParameterSelectorsSpec.MappedParametersEntry + 50, // 70: ml_pipelines.DagOutputsSpec.DagOutputParameterSpec.value_from_parameter:type_name -> ml_pipelines.DagOutputsSpec.ParameterSelectorSpec + 51, // 71: ml_pipelines.DagOutputsSpec.DagOutputParameterSpec.value_from_oneof:type_name -> ml_pipelines.DagOutputsSpec.ParameterSelectorsSpec + 53, // 72: ml_pipelines.DagOutputsSpec.ParametersEntry.value:type_name -> ml_pipelines.DagOutputsSpec.DagOutputParameterSpec + 50, // 73: ml_pipelines.DagOutputsSpec.MapParameterSelectorsSpec.MappedParametersEntry.value:type_name -> ml_pipelines.DagOutputsSpec.ParameterSelectorSpec + 23, // 74: ml_pipelines.ComponentInputsSpec.ArtifactSpec.artifact_type:type_name -> ml_pipelines.ArtifactTypeSchema + 0, // 75: ml_pipelines.ComponentInputsSpec.ParameterSpec.type:type_name -> ml_pipelines.PrimitiveType.PrimitiveTypeEnum + 1, // 76: ml_pipelines.ComponentInputsSpec.ParameterSpec.parameter_type:type_name -> ml_pipelines.ParameterType.ParameterTypeEnum + 116, // 77: ml_pipelines.ComponentInputsSpec.ParameterSpec.default_value:type_name -> google.protobuf.Value + 56, // 78: ml_pipelines.ComponentInputsSpec.ArtifactsEntry.value:type_name -> ml_pipelines.ComponentInputsSpec.ArtifactSpec + 57, // 79: ml_pipelines.ComponentInputsSpec.ParametersEntry.value:type_name -> ml_pipelines.ComponentInputsSpec.ParameterSpec + 23, // 80: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.artifact_type:type_name -> ml_pipelines.ArtifactTypeSchema + 64, // 81: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.properties:type_name -> ml_pipelines.ComponentOutputsSpec.ArtifactSpec.PropertiesEntry + 65, // 82: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.custom_properties:type_name -> ml_pipelines.ComponentOutputsSpec.ArtifactSpec.CustomPropertiesEntry + 115, // 83: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.metadata:type_name -> google.protobuf.Struct + 0, // 84: ml_pipelines.ComponentOutputsSpec.ParameterSpec.type:type_name -> ml_pipelines.PrimitiveType.PrimitiveTypeEnum + 1, // 85: ml_pipelines.ComponentOutputsSpec.ParameterSpec.parameter_type:type_name -> ml_pipelines.ParameterType.ParameterTypeEnum + 60, // 86: ml_pipelines.ComponentOutputsSpec.ArtifactsEntry.value:type_name -> ml_pipelines.ComponentOutputsSpec.ArtifactSpec + 61, // 87: ml_pipelines.ComponentOutputsSpec.ParametersEntry.value:type_name -> ml_pipelines.ComponentOutputsSpec.ParameterSpec + 25, // 88: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.PropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter + 25, // 89: ml_pipelines.ComponentOutputsSpec.ArtifactSpec.CustomPropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter + 70, // 90: ml_pipelines.TaskInputsSpec.InputArtifactSpec.task_output_artifact:type_name -> ml_pipelines.TaskInputsSpec.InputArtifactSpec.TaskOutputArtifactSpec + 71, // 91: ml_pipelines.TaskInputsSpec.InputParameterSpec.task_output_parameter:type_name -> ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskOutputParameterSpec + 25, // 92: ml_pipelines.TaskInputsSpec.InputParameterSpec.runtime_value:type_name -> ml_pipelines.ValueOrRuntimeParameter + 72, // 93: ml_pipelines.TaskInputsSpec.InputParameterSpec.task_final_status:type_name -> ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskFinalStatus + 67, // 94: ml_pipelines.TaskInputsSpec.ParametersEntry.value:type_name -> ml_pipelines.TaskInputsSpec.InputParameterSpec + 66, // 95: ml_pipelines.TaskInputsSpec.ArtifactsEntry.value:type_name -> ml_pipelines.TaskInputsSpec.InputArtifactSpec + 23, // 96: ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.artifact_type:type_name -> ml_pipelines.ArtifactTypeSchema + 77, // 97: ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.properties:type_name -> ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.PropertiesEntry + 78, // 98: ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.custom_properties:type_name -> ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.CustomPropertiesEntry + 0, // 99: ml_pipelines.TaskOutputsSpec.OutputParameterSpec.type:type_name -> ml_pipelines.PrimitiveType.PrimitiveTypeEnum + 74, // 100: ml_pipelines.TaskOutputsSpec.ParametersEntry.value:type_name -> ml_pipelines.TaskOutputsSpec.OutputParameterSpec + 73, // 101: ml_pipelines.TaskOutputsSpec.ArtifactsEntry.value:type_name -> ml_pipelines.TaskOutputsSpec.OutputArtifactSpec + 25, // 102: ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.PropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter + 25, // 103: ml_pipelines.TaskOutputsSpec.OutputArtifactSpec.CustomPropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter + 3, // 104: ml_pipelines.PipelineTaskSpec.TriggerPolicy.strategy:type_name -> ml_pipelines.PipelineTaskSpec.TriggerPolicy.TriggerStrategy + 118, // 105: ml_pipelines.PipelineTaskSpec.RetryPolicy.backoff_duration:type_name -> google.protobuf.Duration + 118, // 106: ml_pipelines.PipelineTaskSpec.RetryPolicy.backoff_max_duration:type_name -> google.protobuf.Duration + 91, // 107: ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.lifecycle:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.Lifecycle + 92, // 108: ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.resources:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.ResourceSpec + 93, // 109: ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.env:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.EnvVar + 25, // 110: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.artifact_uri:type_name -> ml_pipelines.ValueOrRuntimeParameter + 23, // 111: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.type_schema:type_name -> ml_pipelines.ArtifactTypeSchema + 96, // 112: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.properties:type_name -> ml_pipelines.PipelineDeploymentConfig.ImporterSpec.PropertiesEntry + 97, // 113: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.custom_properties:type_name -> ml_pipelines.PipelineDeploymentConfig.ImporterSpec.CustomPropertiesEntry + 115, // 114: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.metadata:type_name -> google.protobuf.Struct + 99, // 115: ml_pipelines.PipelineDeploymentConfig.ResolverSpec.output_artifact_queries:type_name -> ml_pipelines.PipelineDeploymentConfig.ResolverSpec.OutputArtifactQueriesEntry + 115, // 116: ml_pipelines.PipelineDeploymentConfig.AIPlatformCustomJobSpec.custom_job:type_name -> google.protobuf.Struct + 85, // 117: ml_pipelines.PipelineDeploymentConfig.ExecutorSpec.container:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec + 86, // 118: ml_pipelines.PipelineDeploymentConfig.ExecutorSpec.importer:type_name -> ml_pipelines.PipelineDeploymentConfig.ImporterSpec + 87, // 119: ml_pipelines.PipelineDeploymentConfig.ExecutorSpec.resolver:type_name -> ml_pipelines.PipelineDeploymentConfig.ResolverSpec + 88, // 120: ml_pipelines.PipelineDeploymentConfig.ExecutorSpec.custom_job:type_name -> ml_pipelines.PipelineDeploymentConfig.AIPlatformCustomJobSpec + 89, // 121: ml_pipelines.PipelineDeploymentConfig.ExecutorsEntry.value:type_name -> ml_pipelines.PipelineDeploymentConfig.ExecutorSpec + 94, // 122: ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.Lifecycle.pre_cache_check:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.Lifecycle.Exec + 95, // 123: ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.ResourceSpec.accelerator:type_name -> ml_pipelines.PipelineDeploymentConfig.PipelineContainerSpec.ResourceSpec.AcceleratorConfig + 25, // 124: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.PropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter + 25, // 125: ml_pipelines.PipelineDeploymentConfig.ImporterSpec.CustomPropertiesEntry.value:type_name -> ml_pipelines.ValueOrRuntimeParameter + 98, // 126: ml_pipelines.PipelineDeploymentConfig.ResolverSpec.OutputArtifactQueriesEntry.value:type_name -> ml_pipelines.PipelineDeploymentConfig.ResolverSpec.ArtifactQuerySpec + 27, // 127: ml_pipelines.RuntimeArtifact.PropertiesEntry.value:type_name -> ml_pipelines.Value + 27, // 128: ml_pipelines.RuntimeArtifact.CustomPropertiesEntry.value:type_name -> ml_pipelines.Value + 105, // 129: ml_pipelines.ExecutorInput.Inputs.parameters:type_name -> ml_pipelines.ExecutorInput.Inputs.ParametersEntry + 106, // 130: ml_pipelines.ExecutorInput.Inputs.artifacts:type_name -> ml_pipelines.ExecutorInput.Inputs.ArtifactsEntry + 107, // 131: ml_pipelines.ExecutorInput.Inputs.parameter_values:type_name -> ml_pipelines.ExecutorInput.Inputs.ParameterValuesEntry + 108, // 132: ml_pipelines.ExecutorInput.Outputs.parameters:type_name -> ml_pipelines.ExecutorInput.Outputs.ParametersEntry + 109, // 133: ml_pipelines.ExecutorInput.Outputs.artifacts:type_name -> ml_pipelines.ExecutorInput.Outputs.ArtifactsEntry + 27, // 134: ml_pipelines.ExecutorInput.Inputs.ParametersEntry.value:type_name -> ml_pipelines.Value + 29, // 135: ml_pipelines.ExecutorInput.Inputs.ArtifactsEntry.value:type_name -> ml_pipelines.ArtifactList + 116, // 136: ml_pipelines.ExecutorInput.Inputs.ParameterValuesEntry.value:type_name -> google.protobuf.Value + 103, // 137: ml_pipelines.ExecutorInput.Outputs.ParametersEntry.value:type_name -> ml_pipelines.ExecutorInput.OutputParameter + 29, // 138: ml_pipelines.ExecutorInput.Outputs.ArtifactsEntry.value:type_name -> ml_pipelines.ArtifactList + 27, // 139: ml_pipelines.ExecutorOutput.ParametersEntry.value:type_name -> ml_pipelines.Value + 29, // 140: ml_pipelines.ExecutorOutput.ArtifactsEntry.value:type_name -> ml_pipelines.ArtifactList + 116, // 141: ml_pipelines.ExecutorOutput.ParameterValuesEntry.value:type_name -> google.protobuf.Value + 35, // 142: ml_pipelines.PlatformSpec.PlatformsEntry.value:type_name -> ml_pipelines.SinglePlatformSpec + 115, // 143: ml_pipelines.PlatformDeploymentConfig.ExecutorsEntry.value:type_name -> google.protobuf.Struct + 144, // [144:144] is the sub-list for method output_type + 144, // [144:144] is the sub-list for method input_type + 144, // [144:144] is the sub-list for extension type_name + 144, // [144:144] is the sub-list for extension extendee + 0, // [0:144] is the sub-list for field type_name } func init() { file_pipeline_spec_proto_init() } @@ -7007,917 +6407,51 @@ func file_pipeline_spec_proto_init() { if File_pipeline_spec_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_pipeline_spec_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineJob); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ComponentSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DagSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DagOutputsSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ComponentInputsSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ComponentOutputsSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TaskInputsSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TaskOutputsSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PrimitiveType); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ParameterType); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineTaskSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArtifactIteratorSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ParameterIteratorSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ComponentRef); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineInfo); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArtifactTypeSchema); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineTaskInfo); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ValueOrRuntimeParameter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Value); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RuntimeArtifact); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArtifactList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ExecutorInput); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ExecutorOutput); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineTaskFinalStatus); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineStateEnum); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PlatformSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SinglePlatformSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PlatformDeploymentConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WorkspaceConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*KubernetesWorkspaceConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineJob_RuntimeConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineSpec_RuntimeParameter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DagOutputsSpec_ArtifactSelectorSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DagOutputsSpec_DagOutputArtifactSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[43].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DagOutputsSpec_ParameterSelectorSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[44].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DagOutputsSpec_ParameterSelectorsSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[45].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DagOutputsSpec_MapParameterSelectorsSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[46].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DagOutputsSpec_DagOutputParameterSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[49].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ComponentInputsSpec_ArtifactSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[50].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ComponentInputsSpec_ParameterSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[53].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ComponentOutputsSpec_ArtifactSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[54].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ComponentOutputsSpec_ParameterSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[59].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TaskInputsSpec_InputArtifactSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[60].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TaskInputsSpec_InputParameterSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[63].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TaskInputsSpec_InputArtifactSpec_TaskOutputArtifactSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[64].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[65].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TaskInputsSpec_InputParameterSpec_TaskFinalStatus); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[66].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TaskOutputsSpec_OutputArtifactSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[67].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TaskOutputsSpec_OutputParameterSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[72].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineTaskSpec_CachingOptions); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[73].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineTaskSpec_TriggerPolicy); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[74].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineTaskSpec_RetryPolicy); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[75].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineTaskSpec_IteratorPolicy); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[76].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArtifactIteratorSpec_ItemsSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[77].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ParameterIteratorSpec_ItemsSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[78].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_PipelineContainerSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[79].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_ImporterSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[80].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_ResolverSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[81].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_AIPlatformCustomJobSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[82].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_ExecutorSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[84].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[85].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[86].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_PipelineContainerSpec_EnvVar); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[87].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_PipelineContainerSpec_Lifecycle_Exec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[88].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec_AcceleratorConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[91].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineDeploymentConfig_ResolverSpec_ArtifactQuerySpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[95].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ExecutorInput_Inputs); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[96].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ExecutorInput_OutputParameter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_pipeline_spec_proto_msgTypes[97].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ExecutorInput_Outputs); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_pipeline_spec_proto_msgTypes[2].OneofWrappers = []interface{}{ + file_pipeline_spec_proto_msgTypes[2].OneofWrappers = []any{ (*ComponentSpec_Dag)(nil), (*ComponentSpec_ExecutorLabel)(nil), } - file_pipeline_spec_proto_msgTypes[11].OneofWrappers = []interface{}{ + file_pipeline_spec_proto_msgTypes[13].OneofWrappers = []any{ (*PipelineTaskSpec_ArtifactIterator)(nil), (*PipelineTaskSpec_ParameterIterator)(nil), } - file_pipeline_spec_proto_msgTypes[16].OneofWrappers = []interface{}{ + file_pipeline_spec_proto_msgTypes[18].OneofWrappers = []any{ (*ArtifactTypeSchema_SchemaTitle)(nil), (*ArtifactTypeSchema_SchemaUri)(nil), (*ArtifactTypeSchema_InstanceSchema)(nil), } - file_pipeline_spec_proto_msgTypes[18].OneofWrappers = []interface{}{ + file_pipeline_spec_proto_msgTypes[20].OneofWrappers = []any{ (*ValueOrRuntimeParameter_ConstantValue)(nil), (*ValueOrRuntimeParameter_RuntimeParameter)(nil), (*ValueOrRuntimeParameter_Constant)(nil), } - file_pipeline_spec_proto_msgTypes[20].OneofWrappers = []interface{}{ + file_pipeline_spec_proto_msgTypes[22].OneofWrappers = []any{ (*Value_IntValue)(nil), (*Value_DoubleValue)(nil), (*Value_StringValue)(nil), } - file_pipeline_spec_proto_msgTypes[30].OneofWrappers = []interface{}{} - file_pipeline_spec_proto_msgTypes[31].OneofWrappers = []interface{}{} - file_pipeline_spec_proto_msgTypes[32].OneofWrappers = []interface{}{} - file_pipeline_spec_proto_msgTypes[46].OneofWrappers = []interface{}{ + file_pipeline_spec_proto_msgTypes[32].OneofWrappers = []any{} + file_pipeline_spec_proto_msgTypes[33].OneofWrappers = []any{} + file_pipeline_spec_proto_msgTypes[34].OneofWrappers = []any{} + file_pipeline_spec_proto_msgTypes[48].OneofWrappers = []any{ (*DagOutputsSpec_DagOutputParameterSpec_ValueFromParameter)(nil), (*DagOutputsSpec_DagOutputParameterSpec_ValueFromOneof)(nil), } - file_pipeline_spec_proto_msgTypes[59].OneofWrappers = []interface{}{ + file_pipeline_spec_proto_msgTypes[61].OneofWrappers = []any{ (*TaskInputsSpec_InputArtifactSpec_TaskOutputArtifact)(nil), (*TaskInputsSpec_InputArtifactSpec_ComponentInputArtifact)(nil), } - file_pipeline_spec_proto_msgTypes[60].OneofWrappers = []interface{}{ + file_pipeline_spec_proto_msgTypes[62].OneofWrappers = []any{ (*TaskInputsSpec_InputParameterSpec_TaskOutputParameter)(nil), (*TaskInputsSpec_InputParameterSpec_RuntimeValue)(nil), (*TaskInputsSpec_InputParameterSpec_ComponentInputParameter)(nil), (*TaskInputsSpec_InputParameterSpec_TaskFinalStatus_)(nil), } - file_pipeline_spec_proto_msgTypes[77].OneofWrappers = []interface{}{ + file_pipeline_spec_proto_msgTypes[79].OneofWrappers = []any{ (*ParameterIteratorSpec_ItemsSpec_Raw)(nil), (*ParameterIteratorSpec_ItemsSpec_InputParameter)(nil), } - file_pipeline_spec_proto_msgTypes[82].OneofWrappers = []interface{}{ + file_pipeline_spec_proto_msgTypes[84].OneofWrappers = []any{ (*PipelineDeploymentConfig_ExecutorSpec_Container)(nil), (*PipelineDeploymentConfig_ExecutorSpec_Importer)(nil), (*PipelineDeploymentConfig_ExecutorSpec_Resolver)(nil), @@ -7927,9 +6461,9 @@ func file_pipeline_spec_proto_init() { out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_pipeline_spec_proto_rawDesc, - NumEnums: 4, - NumMessages: 108, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_pipeline_spec_proto_rawDesc), len(file_pipeline_spec_proto_rawDesc)), + NumEnums: 5, + NumMessages: 110, NumExtensions: 0, NumServices: 0, }, @@ -7939,7 +6473,6 @@ func file_pipeline_spec_proto_init() { MessageInfos: file_pipeline_spec_proto_msgTypes, }.Build() File_pipeline_spec_proto = out.File - file_pipeline_spec_proto_rawDesc = nil file_pipeline_spec_proto_goTypes = nil file_pipeline_spec_proto_depIdxs = nil } diff --git a/api/v2alpha1/pipeline_spec.proto b/api/v2alpha1/pipeline_spec.proto index d9d6bec87ad..11dece99820 100644 --- a/api/v2alpha1/pipeline_spec.proto +++ b/api/v2alpha1/pipeline_spec.proto @@ -95,6 +95,8 @@ message ComponentSpec { } // Supports platform-specific component features. repeated SinglePlatformSpec single_platform_specs = 5; + // Specifies the task configurations that can be passed through to an external workload. + repeated TaskConfigPassthrough task_config_passthroughs = 6; } // A DAG contains multiple tasks. @@ -442,9 +444,45 @@ message ParameterType { // Indicates that a parameter is a TaskFinalStatus type; these types can only accept inputs // specified by InputParameterSpec.task_final_status TASK_FINAL_STATUS = 7; + // Indicates that a parameter is a TaskConfig type; these types are + // injected by the backend to provide the configuration set on the task. + TASK_CONFIG = 8; } } + +// Represents the task configurations that can be passed through to an external workload. +message TaskConfigPassthroughType { + enum TaskConfigPassthroughTypeEnum { + // Throwaway default value. + NONE = 0; + // Indicates that the resource limits and requests should be passed through to the external workload. + // Be cautious about also setting apply_to_task=true since that will double the resources required for + // the task. + RESOURCES = 1; + // Indicates that the environment variables should be passed through to the external workload. + // It is generally safe to always set apply_to_task=true on this field. + ENV = 2; + // Indicates that the Kubernetes node affinity should be passed through to the external workload. + KUBERNETES_AFFINITY = 3; + // Indicates that the Kubernetes node tolerations should be passed through to the external workload. + KUBERNETES_TOLERATIONS = 4; + // Indicates that the Kubernetes node selector should be passed through to the external workload. + KUBERNETES_NODE_SELECTOR = 5; + // Indicates that the Kubernetes persistent volumes and ConfigMaps/Secrets mounted as volumes should be + // passed through to the external workload. Be sure that when setting apply_to_task=true, the volumes are + // ReadWriteMany or ReadOnlyMany or else the task's pod may not start. + // This is useful when the task prepares a shared volume for the external workload or defines output artifact + // (e.g. dsl.Model) that is created by the external workload. + KUBERNETES_VOLUMES = 6; + } +} + +message TaskConfigPassthrough { + TaskConfigPassthroughType.TaskConfigPassthroughTypeEnum field = 1; + bool apply_to_task = 2; +} + // The spec of a pipeline task. message PipelineTaskSpec { // Basic info of a pipeline task. diff --git a/api/v2alpha1/python/requirements.txt b/api/v2alpha1/python/requirements.txt new file mode 100644 index 00000000000..1142e92d701 --- /dev/null +++ b/api/v2alpha1/python/requirements.txt @@ -0,0 +1,8 @@ +# Typically we can't support multiple major versions of protobuf +# The runtime protobuf package MUST be aligned with the protobuf +# libraries used to generate the code (protoc, protoc-gen-go, etc.) +# For example protobuf 5.x aligns with protoc 26.x-29.x but +# 6.x aligns with 30.x+. +# See for support tiers: +# https://protobuf.dev/support/version-support/#python +protobuf==6.31.1,<7.0 diff --git a/api/v2alpha1/python/setup.py b/api/v2alpha1/python/setup.py index 6ab2a20cb43..9adf2096ac1 100644 --- a/api/v2alpha1/python/setup.py +++ b/api/v2alpha1/python/setup.py @@ -13,9 +13,21 @@ # limitations under the License. import setuptools +import os +from typing import List NAME = 'kfp-pipeline-spec' -VERSION = '0.7.0' +VERSION = '2.14.0' + +def get_requirements(requirements_file: str) -> List[str]: + """Read requirements from requirements.in.""" + + file_path = os.path.join(os.path.dirname(__file__), requirements_file) + with open(file_path, 'r') as f: + lines = f.readlines() + lines = [line.strip() for line in lines] + lines = [line for line in lines if not line.startswith('#') and line] + return lines setuptools.setup( name=NAME, @@ -26,7 +38,7 @@ url='https://github.com/kubeflow/pipelines', packages=setuptools.find_namespace_packages(include=['kfp.*']), python_requires='>=3.9.0', - install_requires=['protobuf>=4.21.1,<5'], + install_requires=get_requirements('requirements.txt'), include_package_data=True, license='Apache 2.0', ) diff --git a/backend/Dockerfile b/backend/Dockerfile index 3fe05b513ee..0ac8825a143 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -15,7 +15,7 @@ # Build arguments ARG SOURCE_CODE=. -FROM registry.access.redhat.com/ubi9/go-toolset:1.23 AS builder +FROM registry.access.redhat.com/ubi9/go-toolset:1.24 AS builder USER root @@ -35,7 +35,7 @@ RUN GO111MODULE=on CGO_ENABLED=1 GOEXPERIMENT=strictfipsruntime go build -tags s dnf clean all # 2. Compile preloaded pipeline samples -FROM registry.access.redhat.com/ubi9/python-39:9.5 AS compiler +FROM registry.access.redhat.com/ubi9/python-311 AS compiler ARG TARGETOS TARGETARCH @@ -46,7 +46,7 @@ COPY backend/requirements.txt . RUN python3 -m pip install -r requirements.txt --no-cache-dir # Downloading Argo CLI so that the samples are validated -ENV ARGO_VERSION=v3.5.14 +ENV ARGO_VERSION=v3.6.7 RUN curl -sLO https://github.com/argoproj/argo-workflows/releases/download/${ARGO_VERSION}/argo-${TARGETOS:-linux}-${TARGETARCH:-amd64}.gz && \ gunzip argo-${TARGETOS:-linux}-${TARGETARCH:-amd64}.gz && \ chmod +x argo-${TARGETOS:-linux}-${TARGETARCH:-amd64} && \ @@ -85,4 +85,4 @@ USER 1001 EXPOSE 8888 # Start the apiserver -CMD /bin/apiserver --config=/config --sampleconfig=/config/sample_config.json -logtostderr=true --logLevel=${LOG_LEVEL} +CMD ["/bin/sh", "-c", "/bin/apiserver --config=/config --sampleconfig=/config/sample_config.json -logtostderr=true --logLevel=${LOG_LEVEL}"] diff --git a/backend/Dockerfile.cacheserver b/backend/Dockerfile.cacheserver index e9179a99ee7..71aed4177fb 100644 --- a/backend/Dockerfile.cacheserver +++ b/backend/Dockerfile.cacheserver @@ -13,7 +13,7 @@ # limitations under the License. # Dockerfile for building the source code of cache_server -FROM golang:1.23-alpine as builder +FROM golang:1.24-alpine as builder RUN apk update && apk upgrade && \ apk add --no-cache bash git openssh gcc musl-dev diff --git a/backend/Dockerfile.conformance b/backend/Dockerfile.conformance index cb52422c9eb..4e7c61a6bbf 100644 --- a/backend/Dockerfile.conformance +++ b/backend/Dockerfile.conformance @@ -13,7 +13,7 @@ # limitations under the License. # Dockerfile for building the source code of conformance tests -FROM golang:1.23-alpine as builder +FROM golang:1.24-alpine as builder RUN apk update && apk upgrade && \ apk add --no-cache bash git openssh gcc musl-dev diff --git a/backend/Dockerfile.driver b/backend/Dockerfile.driver index e54d50b9686..fe8cb7f3d95 100644 --- a/backend/Dockerfile.driver +++ b/backend/Dockerfile.driver @@ -15,7 +15,7 @@ # Build arguments ARG SOURCE_CODE=. -FROM registry.access.redhat.com/ubi9/go-toolset:1.23 AS builder +FROM registry.access.redhat.com/ubi9/go-toolset:1.24 AS builder ## Build args to be used at this step diff --git a/backend/Dockerfile.launcher b/backend/Dockerfile.launcher index 29f7ffbed18..ac0c1189388 100644 --- a/backend/Dockerfile.launcher +++ b/backend/Dockerfile.launcher @@ -17,7 +17,7 @@ ARG SOURCE_CODE=. ARG CI_CONTAINER_VERSION="unknown" -FROM registry.access.redhat.com/ubi9/go-toolset:1.23 AS builder +FROM registry.access.redhat.com/ubi9/go-toolset:1.24 AS builder ## Build args to be used at this step diff --git a/backend/Dockerfile.persistenceagent b/backend/Dockerfile.persistenceagent index 282436ccd3c..533a5a71b90 100644 --- a/backend/Dockerfile.persistenceagent +++ b/backend/Dockerfile.persistenceagent @@ -16,7 +16,7 @@ ARG SOURCE_CODE=. ARG CI_CONTAINER_VERSION="unknown" -FROM registry.access.redhat.com/ubi9/go-toolset:1.23 AS builder +FROM registry.access.redhat.com/ubi9/go-toolset:1.24 AS builder ## Build args to be used at this step ARG SOURCE_CODE @@ -53,4 +53,4 @@ ENV LOG_LEVEL=info ENV EXECUTIONTYPE=Workflow -CMD persistence_agent --logtostderr=true --namespace=${NAMESPACE} --ttlSecondsAfterWorkflowFinish=${TTL_SECONDS_AFTER_WORKFLOW_FINISH} --numWorker ${NUM_WORKERS} --executionType ${EXECUTIONTYPE} --logLevel=${LOG_LEVEL} +CMD ["/bin/sh", "-c", "persistence_agent --logtostderr=true --namespace=${NAMESPACE} --ttlSecondsAfterWorkflowFinish=${TTL_SECONDS_AFTER_WORKFLOW_FINISH} --numWorker ${NUM_WORKERS} --executionType ${EXECUTIONTYPE} --logLevel=${LOG_LEVEL}"] diff --git a/backend/Dockerfile.scheduledworkflow b/backend/Dockerfile.scheduledworkflow index 435f349128b..32600882149 100644 --- a/backend/Dockerfile.scheduledworkflow +++ b/backend/Dockerfile.scheduledworkflow @@ -15,7 +15,7 @@ # Build arguments ARG SOURCE_CODE=. -FROM registry.access.redhat.com/ubi9/go-toolset:1.23 AS builder +FROM registry.access.redhat.com/ubi9/go-toolset:1.24 AS builder ## Build args to be used at this step ARG SOURCE_CODE diff --git a/backend/Dockerfile.viewercontroller b/backend/Dockerfile.viewercontroller index 1f8e02c5061..bee983ff379 100644 --- a/backend/Dockerfile.viewercontroller +++ b/backend/Dockerfile.viewercontroller @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM golang:1.23-alpine as builder +FROM golang:1.24-alpine as builder RUN apk update && apk upgrade RUN apk add --no-cache git gcc musl-dev @@ -40,4 +40,4 @@ RUN chmod +x /bin/controller ENV MAX_NUM_VIEWERS "50" ENV NAMESPACE "kubeflow" -CMD /bin/controller -logtostderr=true -max_num_viewers=${MAX_NUM_VIEWERS} --namespace=${NAMESPACE} +CMD ["/bin/controller", "-logtostderr=true", "-max_num_viewers=${MAX_NUM_VIEWERS}", "--namespace=${NAMESPACE}"] diff --git a/backend/Dockerfile.visualization b/backend/Dockerfile.visualization index 4e3a1540c06..939808fc59d 100644 --- a/backend/Dockerfile.visualization +++ b/backend/Dockerfile.visualization @@ -17,8 +17,8 @@ # visualization. More details about this process can be found in the server.py # and exporter.py files in the directory specified above. -# This image should be in sync with image in backend/src/apiserver/visualization/update_requirements.sh. -FROM tensorflow/tensorflow:2.10.1 +# This image should run Python 3.11 to satisfy protobuf v4-compatible deps. +FROM python:3.11 RUN apt-get update \ && apt-get install -y wget curl tar openssl \ diff --git a/backend/Makefile b/backend/Makefile index ae9567222c4..22f7db9fe05 100644 --- a/backend/Makefile +++ b/backend/Makefile @@ -119,7 +119,7 @@ dev-kind-cluster: .PHONY: kind-load-driver-debug kind-load-driver-debug: @if [ "${CONTAINER_ENGINE}" = "docker" ]; then \ - kind --name ${KIND_NAME} load docker-image ${IMG_TAG_DRIVER}:debug + kind --name ${KIND_NAME} load docker-image ${IMG_TAG_DRIVER}:debug; \ else \ bash -c "kind load --name ${KIND_NAME} image-archive <( ${CONTAINER_ENGINE} save ${IMG_TAG_DRIVER})"; \ fi diff --git a/backend/README.md b/backend/README.md index c62403cdb0d..78523c9d731 100644 --- a/backend/README.md +++ b/backend/README.md @@ -130,8 +130,8 @@ server locally: "env": { "POD_NAMESPACE": "kubeflow", "DBCONFIG_MYSQLCONFIG_HOST": "localhost", - "MINIO_SERVICE_SERVICE_HOST": "localhost", - "MINIO_SERVICE_SERVICE_PORT": "9000", + "OBJECTSTORECONFIG_HOST": "localhost", + "OBJECTSTORECONFIG_PORT": "9000", "METADATA_GRPC_SERVICE_SERVICE_HOST": "localhost", "METADATA_GRPC_SERVICE_SERVICE_PORT": "8080", "ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST": "localhost", @@ -244,8 +244,8 @@ VSCode configuration: "env": { "POD_NAMESPACE": "kubeflow", "DBCONFIG_MYSQLCONFIG_HOST": "localhost", - "MINIO_SERVICE_SERVICE_HOST": "localhost", - "MINIO_SERVICE_SERVICE_PORT": "9000", + "OBJECTSTORECONFIG_HOST": "localhost", + "OBJECTSTORECONFIG_PORT": "9000", "METADATA_GRPC_SERVICE_SERVICE_HOST": "localhost", "METADATA_GRPC_SERVICE_SERVICE_PORT": "8080", "ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST": "localhost", @@ -268,8 +268,8 @@ GoLand configuration: |----------------------------------------------|-----------| | POD_NAMESPACE | kubeflow | | DBCONFIG_MYSQLCONFIG_HOST | localhost | - | MINIO_SERVICE_SERVICE_HOST | localhost | - | MINIO_SERVICE_SERVICE_PORT | 9000 | + | OBJECTSTORECONFIG_HOST | localhost | + | OBJECTSTORECONFIG_PORT | 9000 | | METADATA_GRPC_SERVICE_SERVICE_HOST | localhost | | METADATA_GRPC_SERVICE_SERVICE_PORT | 8080 | | ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST | localhost | @@ -312,6 +312,9 @@ VSCode configuration: "remotePath": "/go/src/github.com/kubeflow/pipelines", "port": 2345, "host": "127.0.0.1", + "substitutePath": [ + { "from": "${workspaceFolder}", "to": "/go/src/github.com/kubeflow/pipelines" } + ] } ] } diff --git a/backend/api/Dockerfile b/backend/api/Dockerfile index a73be417484..2f19254dd70 100644 --- a/backend/api/Dockerfile +++ b/backend/api/Dockerfile @@ -13,45 +13,74 @@ # limitations under the License. # Generate client code (go & json) from API protocol buffers -FROM golang:1.23 as generator -ENV GRPC_GATEWAY_VERSION v1.9.6 -ENV GO_SWAGGER_VERSION v0.18.0 -ENV GOLANG_PROTOBUF_VERSION v1.5.1 -ENV GRPC_VERSION v1.23.0 -ENV PROTOC_VERSION 3.20.3 +FROM golang:1.24 AS generator +ENV GRPC_GATEWAY_VERSION=v2.27.1 +ENV GO_SWAGGER_VERSION=v0.32.3 +ENV GRPC_VERSION=v1.73.0 +ENV PROTOC_VERSION=31.1 ENV GOBIN=/go/bin +# The googleapis repo doesn't use GitHub releases or version tags, +# so we pin a specific commit to make the clone reproducible. +ENV GOOGLEAPIS_COMMIT=68d5196a529174df97c28c70622ffc1c3721815f + +# **Note** that protoc-gen-go-grpc is packaged with grpc-go but is versioned +# separately. You can find the releases for protoc-gen-go-grpc here: +# https://github.com/grpc/grpc-go/releases +# **Note** that these also include releases for grpc-go which is the grpc Go +# runtime package. protoc-gen-go-grpc is the package used for generating +# Go GRPC code from .proto files. +# to list recent protoc-gen-go-grpc versions you can also do: +# go list -m -versions google.golang.org/grpc/cmd/protoc-gen-go-grpc +# PROTOC_GEN_GO_GRPC & PROTOBUF_GO versions should match reasonably close to each other. +# You can check the protobuf in the go.mod for protoc-gen-go-grpc, like here: +# https://github.com/grpc/grpc-go/blob/cmd/protoc-gen-go-grpc/v1.5.1/cmd/protoc-gen-go-grpc/go.mod#L7 +# **Note** That BOTH PROTOC_GEN_GO_GRPC & PROTOBUF_GO here are used for +# Generating GO Code. These versions should be identical to the +# runtime Go packages (in the project go.mod) +ENV PROTOC_GEN_GO_GRPC=v1.5.1 +ENV PROTOBUF_GO=v1.36.6 # Install protoc. RUN apt-get update -y && apt-get install -y jq sed unzip RUN curl -L -o protoc.zip https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip -RUN unzip -o protoc.zip -d /usr/ bin/protoc -RUN unzip -o protoc.zip -d /usr/ 'include/*' +RUN unzip -o protoc.zip -d /tmp/protoc && \ + mv /tmp/protoc/bin/protoc /usr/bin/protoc && \ + chmod +x /usr/bin/protoc +RUN unzip -o protoc.zip 'include/*' -d /tmp/protoc && \ + mv /tmp/protoc/include/* /usr/include RUN rm -f protoc.zip -ENV PROTOCCOMPILER /usr/bin/protoc -ENV PROTOCINCLUDE /usr/include/google/protobuf +ENV PROTOCCOMPILER=/usr/bin/protoc +ENV PROTOCINCLUDE=/usr/include/google/protobuf # Need grpc-gateway source code for -I in protoc command. -WORKDIR /go/src/github.com -RUN mkdir grpc-ecosystem && cd grpc-ecosystem && git clone --depth 1 --branch $GRPC_GATEWAY_VERSION https://github.com/grpc-ecosystem/grpc-gateway.git -RUN mkdir grpc && git clone --depth 1 --branch $GRPC_VERSION https://github.com/grpc/grpc-go - # Install protoc-gen-rpc-gateway && protoc-gen-swagger. -RUN cd grpc-ecosystem/grpc-gateway && GO111MODULE=on go mod vendor -RUN go install github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway@latest -RUN go install github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger@latest +RUN go install github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-grpc-gateway@${GRPC_GATEWAY_VERSION} +RUN go install github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2@${GRPC_GATEWAY_VERSION} + +# Need to explicitly provide the googleapis protos and the OpenAPI options that were previously present in the grpc-gateway repo. +RUN git init /googleapis && \ + cd /googleapis && \ + git remote add origin https://github.com/googleapis/googleapis.git && \ + git fetch --depth 1 origin ${GOOGLEAPIS_COMMIT} && \ + git checkout FETCH_HEAD +RUN mkdir -p /protoc-gen-openapiv2 && \ + cp -r /go/pkg/mod/github.com/grpc-ecosystem/grpc-gateway/v2@${GRPC_GATEWAY_VERSION}/protoc-gen-openapiv2/options /protoc-gen-openapiv2/options # Download go-swagger binary. RUN curl -LO "https://github.com/go-swagger/go-swagger/releases/download/${GO_SWAGGER_VERSION}/swagger_linux_amd64" RUN chmod +x swagger_linux_amd64 && mv swagger_linux_amd64 /usr/bin/swagger # Need protobuf source code for -I in protoc command. -RUN mkdir golang && cd golang && git clone --depth 1 --branch $GOLANG_PROTOBUF_VERSION https://github.com/golang/protobuf.git # Install protoc-gen-go. -RUN cd golang/protobuf && GO111MODULE=on go mod vendor -RUN go install github.com/golang/protobuf/protoc-gen-go@latest +RUN go install google.golang.org/protobuf/cmd/protoc-gen-go@${PROTOBUF_GO} +RUN go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@${PROTOC_GEN_GO_GRPC} + +# Needed for buildling python packages requiring protoc +RUN apt-get update && apt-get install -y python3-pip +RUN apt-get -y upgrade python3-pip && pip3 install --upgrade setuptools wheel --break-system-packages # WORKAROUND: https://github.com/docker-library/golang/issues/225#issuecomment-403170792 -ENV XDG_CACHE_HOME /tmp/.cache +ENV XDG_CACHE_HOME=/tmp/.cache # Make all files accessible to non-root users. RUN chmod -R 777 /usr/bin/ RUN chmod -R 777 /usr/include/google diff --git a/backend/api/Makefile b/backend/api/Makefile index 447202b4123..f104d2ccaf8 100644 --- a/backend/api/Makefile +++ b/backend/api/Makefile @@ -15,14 +15,14 @@ # Makefile to generate KFP api clients from proto. IMAGE_TAG=kfp-api-generator -# Contact chensun or zijianjoy if this remote image needs an update. +# Contact chensun or HumairAK if this remote image needs an update. REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator # Assume the latest API version by default. API_VERSION ?= v2beta1 # Keep in sync with the version used in test/release/Dockerfile.release -PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:1.1 - +PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:master +RELEASE_IMAGE=ghcr.io/kubeflow/kfp-release:master CONTAINER_ENGINE ?= docker # Generate clients using a pre-built api-generator image. @@ -34,11 +34,21 @@ generate: fetch-dependencies hack/generator.sh $(API_VERSION)/*.proto --mount type=bind,source="$$(pwd)/../..",target=/go/src/github.com/kubeflow/pipelines \ $(PREBUILT_REMOTE_IMAGE) /go/src/github.com/kubeflow/pipelines/backend/api/hack/generator.sh +# Use the release image since it has some additional dependencies +# required by kfp-pipeline-ser generation +.PHONY: generate-kfp-server-api-package +generate-kfp-server-api-package: + ${CONTAINER_ENGINE} run --interactive --rm \ + -e API_VERSION=$(API_VERSION) \ + --user $$(id -u):$$(id -g) \ + --mount type=bind,source="$$(pwd)/../..",target=/go/src/github.com/kubeflow/pipelines \ + $(RELEASE_IMAGE) /go/src/github.com/kubeflow/pipelines/backend/api/build_kfp_server_api_python_package.sh + + # Fetch dependency proto .PHONY: fetch-dependencies fetch-dependencies: v2beta1/google/rpc/status.proto -# TODO(gkcalat): add this as a submodule? v2beta1/google/rpc/status.proto: mkdir -p v2beta1/google/rpc wget -O v2beta1/google/rpc/status.proto https://raw.githubusercontent.com/googleapis/googleapis/047d3a8ac7f75383855df0166144f891d7af08d9/google/rpc/status.proto diff --git a/backend/api/README.md b/backend/api/README.md index 26c4eb1d0bc..285d053bb7e 100644 --- a/backend/api/README.md +++ b/backend/api/README.md @@ -77,13 +77,16 @@ API definitions in this folder are used to generate [`v1beta1`](https://www.kube 4. Create a PR with the changes in [kubeflow.org website repository](https://github.com/kubeflow/website). See an example [here](https://github.com/kubeflow/website/pull/3444). -## Updating API generator image +## Updating API generator image (Manual) + +This is now automatic on pushes to GitHub branches, but the instructions are kept here in case you need to do it +manually. API generator image is defined in [Dockerfile](`./Dockerfile`). If you need to update the container, follow these steps: 1. Login to GHCR container registry: `echo "" | docker login ghcr.io -u --password-stdin` * Replace `` with a GitHub Personal Access Token (PAT) with the write:packages and `read:packages` scopes, as well as `delete:packages` if needed. -1. Update the [Dockerfile](`./Dockerfile`) and build the image by running `docker build -t ghcr.io/kubeflow/kfp-api-generator:$VERSION .` -1. Push the new container by running `docker push ghcr.io/kubeflow/kfp-api-generator:$VERSION`. +1. Update the [Dockerfile](`./Dockerfile`) and build the image by running `docker build -t ghcr.io/kubeflow/kfp-api-generator:$BRANCH .` +1. Push the new container by running `docker push ghcr.io/kubeflow/kfp-api-generator:$BRANCH`. 1. Update the `PREBUILT_REMOTE_IMAGE` variable in the [Makefile](./Makefile) to point to your new image. -1. Similarly, push a new version of the release tools image to `ghcr.io/kubeflow/kfp-release:$VERSION` and run `make push` in [test/release/Makefile](../../test/release/Makefile). +1. Similarly, push a new version of the release tools image to `ghcr.io/kubeflow/kfp-release:$BRANCH` and run `make push` in [test/release/Makefile](../../test/release/Makefile). diff --git a/backend/api/build_kfp_server_api_python_package.sh b/backend/api/build_kfp_server_api_python_package.sh index 54efb37f049..20c80dc5f5e 100755 --- a/backend/api/build_kfp_server_api_python_package.sh +++ b/backend/api/build_kfp_server_api_python_package.sh @@ -58,6 +58,10 @@ java -jar "$codegen_file" generate -g python -t "$CURRENT_DIR/$API_VERSION/pytho "packageUrl": "https://github.com/kubeflow/pipelines" }') +echo "Removing unnecessary GitLab and TravisCI generated files" +rm $CURRENT_DIR/$API_VERSION/python_http_client/.gitlab-ci.yml +rm $CURRENT_DIR/$API_VERSION/python_http_client/.travis.yml + echo "Copying LICENSE to $DIR" cp "$CURRENT_DIR/../../LICENSE" "$DIR" diff --git a/backend/api/hack/generator.sh b/backend/api/hack/generator.sh index f89a99fda89..45fc482a345 100755 --- a/backend/api/hack/generator.sh +++ b/backend/api/hack/generator.sh @@ -35,33 +35,45 @@ mkdir -p backend/api/${API_VERSION}/go_client mkdir -p backend/api/${API_VERSION}/swagger # Generate *.pb.go (grpc api client) from *.proto. -${PROTOCCOMPILER} -I. -Ibackend/api/${API_VERSION} \ - -I/go/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ - -I/go/src/github.com/grpc-ecosystem/grpc-gateway/ \ - -I/go/src/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/ \ - -I/usr/include/ \ - --plugin=protoc-gen-go=/go/bin/protoc-gen-go \ - --go_out=plugins=grpc:${TMP_OUTPUT} \ - backend/api/${API_VERSION}/*.proto +# -I/ allows for absolute import paths +# e.g. "import protoc-gen-openapiv2/options/annotations.proto" +${PROTOCCOMPILER} \ + -I. \ + -Ibackend/api/${API_VERSION} \ + -I/googleapis \ + -I/protoc-gen-openapiv2 \ + -I/usr/include \ + -I/ \ + --go_out=${TMP_OUTPUT} \ + --go-grpc_out=${TMP_OUTPUT} \ + --plugin=protoc-gen-go=/go/bin/protoc-gen-go \ + --plugin=protoc-gen-go-grpc=/go/bin/protoc-gen-go-grpc \ + backend/api/${API_VERSION}/*.proto + # Generate *.pb.gw.go (grpc api rest client) from *.proto. -${PROTOCCOMPILER} -I. -Ibackend/api/${API_VERSION} \ - -I/go/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ - -I/go/src/github.com/grpc-ecosystem/grpc-gateway/ \ - -I/go/src/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/ \ - -I/usr/include/ \ +${PROTOCCOMPILER} \ + -I. \ + -Ibackend/api/${API_VERSION} \ + -I/googleapis \ + -I/protoc-gen-openapiv2 \ + -I/usr/include \ + -I/ \ --plugin=protoc-gen-grpc-gateway=/go/bin/protoc-gen-grpc-gateway \ --grpc-gateway_out=logtostderr=true:${TMP_OUTPUT} \ backend/api/${API_VERSION}/*.proto + # Move *.pb.go and *.gw.go to go_client folder. cp ${TMP_OUTPUT}/github.com/kubeflow/pipelines/backend/api/${API_VERSION}/go_client/* ./backend/api/${API_VERSION}/go_client # Generate *.swagger.json from *.proto into swagger folder. -${PROTOCCOMPILER} -I. -Ibackend/api/${API_VERSION} \ - -I/go/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \ - -I/go/src/github.com/grpc-ecosystem/grpc-gateway/ \ - -I/go/src/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options/ \ - -I//usr/include/ \ - --plugin=protoc-gen-swagger=/go/bin/protoc-gen-swagger \ - --swagger_out=logtostderr=true:${TMP_OUTPUT} \ +${PROTOCCOMPILER} \ + -I. \ + -Ibackend/api/${API_VERSION} \ + -I/googleapis \ + -I/protoc-gen-openapiv2 \ + -I/usr/include \ + -I/ \ + --plugin=protoc-gen-openapiv2=/go/bin/protoc-gen-openapiv2 \ + --openapiv2_out=logtostderr=true,json_names_for_fields=false:${TMP_OUTPUT} \ backend/api/${API_VERSION}/*.proto # Move *.swagger.json files into swagger folder. cp -a ${TMP_OUTPUT}/backend/api/${API_VERSION}/*.swagger.json ./backend/api/${API_VERSION}/swagger diff --git a/backend/api/v1beta1/auth.proto b/backend/api/v1beta1/auth.proto index 04bf584702c..c76c27dbe00 100644 --- a/backend/api/v1beta1/auth.proto +++ b/backend/api/v1beta1/auth.proto @@ -19,9 +19,9 @@ package api; import "google/api/annotations.proto"; import "google/protobuf/empty.proto"; -import "protoc-gen-swagger/options/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; diff --git a/backend/api/v1beta1/experiment.proto b/backend/api/v1beta1/experiment.proto index aa0f896b42b..8444c533707 100644 --- a/backend/api/v1beta1/experiment.proto +++ b/backend/api/v1beta1/experiment.proto @@ -17,14 +17,13 @@ syntax = "proto3"; option go_package = "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client"; package api; -import "backend/api/v1beta1/error.proto"; import "backend/api/v1beta1/resource_reference.proto"; import "google/api/annotations.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; -import "protoc-gen-swagger/options/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; diff --git a/backend/api/v1beta1/filter.proto b/backend/api/v1beta1/filter.proto index 8ae80f56760..3106c9243b5 100644 --- a/backend/api/v1beta1/filter.proto +++ b/backend/api/v1beta1/filter.proto @@ -17,7 +17,6 @@ syntax = "proto3"; option go_package = "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client"; package api; -import "google/api/annotations.proto"; import "google/protobuf/timestamp.proto"; // Predicate captures individual conditions that must be true for a resource diff --git a/backend/api/v1beta1/go_client/auth.pb.go b/backend/api/v1beta1/go_client/auth.pb.go index 76696c0d6c4..f87b44fbd88 100644 --- a/backend/api/v1beta1/go_client/auth.pb.go +++ b/backend/api/v1beta1/go_client/auth.pb.go @@ -14,24 +14,21 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/auth.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -145,22 +142,19 @@ func (AuthorizeRequest_Verb) EnumDescriptor() ([]byte, []int) { // and verb. User identity is not part of the message, because it is expected // to be parsed from request headers. Caller should proxy user request's headers. type AuthorizeRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` // Namespace the resource belongs to. + Resources AuthorizeRequest_Resources `protobuf:"varint,2,opt,name=resources,proto3,enum=api.AuthorizeRequest_Resources" json:"resources,omitempty"` // Resource type asking for authorization. + Verb AuthorizeRequest_Verb `protobuf:"varint,3,opt,name=verb,proto3,enum=api.AuthorizeRequest_Verb" json:"verb,omitempty"` // Verb on the resource asking for authorization. unknownFields protoimpl.UnknownFields - - Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` // Namespace the resource belongs to. - Resources AuthorizeRequest_Resources `protobuf:"varint,2,opt,name=resources,proto3,enum=api.AuthorizeRequest_Resources" json:"resources,omitempty"` // Resource type asking for authorization. - Verb AuthorizeRequest_Verb `protobuf:"varint,3,opt,name=verb,proto3,enum=api.AuthorizeRequest_Verb" json:"verb,omitempty"` // Verb on the resource asking for authorization. + sizeCache protoimpl.SizeCache } func (x *AuthorizeRequest) Reset() { *x = AuthorizeRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_auth_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_auth_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *AuthorizeRequest) String() string { @@ -171,7 +165,7 @@ func (*AuthorizeRequest) ProtoMessage() {} func (x *AuthorizeRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_auth_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -209,67 +203,48 @@ func (x *AuthorizeRequest) GetVerb() AuthorizeRequest_Verb { var File_backend_api_v1beta1_auth_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_auth_proto_rawDesc = []byte{ - 0x0a, 0x1e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x61, 0x75, 0x74, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, - 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, - 0x67, 0x67, 0x65, 0x72, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, - 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x91, - 0x02, 0x0a, 0x10, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x12, 0x3d, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x6f, - 0x72, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x73, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, - 0x12, 0x2e, 0x0a, 0x04, 0x76, 0x65, 0x72, 0x62, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x56, 0x65, 0x72, 0x62, 0x52, 0x04, 0x76, 0x65, 0x72, 0x62, - 0x22, 0x32, 0x0a, 0x09, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x18, 0x0a, - 0x14, 0x55, 0x4e, 0x41, 0x53, 0x53, 0x49, 0x47, 0x4e, 0x45, 0x44, 0x5f, 0x52, 0x45, 0x53, 0x4f, - 0x55, 0x52, 0x43, 0x45, 0x53, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x56, 0x49, 0x45, 0x57, 0x45, - 0x52, 0x53, 0x10, 0x01, 0x22, 0x3c, 0x0a, 0x04, 0x56, 0x65, 0x72, 0x62, 0x12, 0x13, 0x0a, 0x0f, - 0x55, 0x4e, 0x41, 0x53, 0x53, 0x49, 0x47, 0x4e, 0x45, 0x44, 0x5f, 0x56, 0x45, 0x52, 0x42, 0x10, - 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x10, 0x01, 0x12, 0x07, 0x0a, - 0x03, 0x47, 0x45, 0x54, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x45, 0x4c, 0x45, 0x54, 0x45, - 0x10, 0x03, 0x32, 0x67, 0x0a, 0x0b, 0x41, 0x75, 0x74, 0x68, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, - 0x65, 0x12, 0x58, 0x0a, 0x0b, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x56, 0x31, - 0x12, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, - 0x1a, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x12, 0x12, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, - 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x61, 0x75, 0x74, 0x68, 0x42, 0x91, 0x01, 0x92, 0x41, - 0x51, 0x2a, 0x02, 0x01, 0x02, 0x52, 0x1c, 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x12, 0x11, 0x12, 0x0f, 0x0a, 0x0d, 0x1a, 0x0b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, - 0x74, 0x75, 0x73, 0x5a, 0x1f, 0x0a, 0x1d, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, - 0x13, 0x08, 0x02, 0x1a, 0x0d, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x0c, 0x0a, 0x0a, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, - 0x12, 0x00, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_auth_proto_rawDesc = "" + + "\n" + + "\x1ebackend/api/v1beta1/auth.proto\x12\x03api\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\x91\x02\n" + + "\x10AuthorizeRequest\x12\x1c\n" + + "\tnamespace\x18\x01 \x01(\tR\tnamespace\x12=\n" + + "\tresources\x18\x02 \x01(\x0e2\x1f.api.AuthorizeRequest.ResourcesR\tresources\x12.\n" + + "\x04verb\x18\x03 \x01(\x0e2\x1a.api.AuthorizeRequest.VerbR\x04verb\"2\n" + + "\tResources\x12\x18\n" + + "\x14UNASSIGNED_RESOURCES\x10\x00\x12\v\n" + + "\aVIEWERS\x10\x01\"<\n" + + "\x04Verb\x12\x13\n" + + "\x0fUNASSIGNED_VERB\x10\x00\x12\n" + + "\n" + + "\x06CREATE\x10\x01\x12\a\n" + + "\x03GET\x10\x02\x12\n" + + "\n" + + "\x06DELETE\x10\x032g\n" + + "\vAuthService\x12X\n" + + "\vAuthorizeV1\x12\x15.api.AuthorizeRequest\x1a\x16.google.protobuf.Empty\"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/apis/v1beta1/authB\x91\x01\x92AQ*\x02\x01\x02R\x1c\n" + + "\adefault\x12\x11\x12\x0f\n" + + "\r\x1a\v.api.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_auth_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_auth_proto_rawDescData = file_backend_api_v1beta1_auth_proto_rawDesc + file_backend_api_v1beta1_auth_proto_rawDescData []byte ) func file_backend_api_v1beta1_auth_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_auth_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_auth_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_auth_proto_rawDescData) + file_backend_api_v1beta1_auth_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_auth_proto_rawDesc), len(file_backend_api_v1beta1_auth_proto_rawDesc))) }) return file_backend_api_v1beta1_auth_proto_rawDescData } var file_backend_api_v1beta1_auth_proto_enumTypes = make([]protoimpl.EnumInfo, 2) var file_backend_api_v1beta1_auth_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_backend_api_v1beta1_auth_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_auth_proto_goTypes = []any{ (AuthorizeRequest_Resources)(0), // 0: api.AuthorizeRequest.Resources (AuthorizeRequest_Verb)(0), // 1: api.AuthorizeRequest.Verb (*AuthorizeRequest)(nil), // 2: api.AuthorizeRequest @@ -292,25 +267,11 @@ func file_backend_api_v1beta1_auth_proto_init() { if File_backend_api_v1beta1_auth_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_auth_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*AuthorizeRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_auth_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_auth_proto_rawDesc), len(file_backend_api_v1beta1_auth_proto_rawDesc)), NumEnums: 2, NumMessages: 1, NumExtensions: 0, @@ -322,87 +283,6 @@ func file_backend_api_v1beta1_auth_proto_init() { MessageInfos: file_backend_api_v1beta1_auth_proto_msgTypes, }.Build() File_backend_api_v1beta1_auth_proto = out.File - file_backend_api_v1beta1_auth_proto_rawDesc = nil file_backend_api_v1beta1_auth_proto_goTypes = nil file_backend_api_v1beta1_auth_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// AuthServiceClient is the client API for AuthService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type AuthServiceClient interface { - AuthorizeV1(ctx context.Context, in *AuthorizeRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type authServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewAuthServiceClient(cc grpc.ClientConnInterface) AuthServiceClient { - return &authServiceClient{cc} -} - -func (c *authServiceClient) AuthorizeV1(ctx context.Context, in *AuthorizeRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.AuthService/AuthorizeV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// AuthServiceServer is the server API for AuthService service. -type AuthServiceServer interface { - AuthorizeV1(context.Context, *AuthorizeRequest) (*emptypb.Empty, error) -} - -// UnimplementedAuthServiceServer can be embedded to have forward compatible implementations. -type UnimplementedAuthServiceServer struct { -} - -func (*UnimplementedAuthServiceServer) AuthorizeV1(context.Context, *AuthorizeRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method AuthorizeV1 not implemented") -} - -func RegisterAuthServiceServer(s *grpc.Server, srv AuthServiceServer) { - s.RegisterService(&_AuthService_serviceDesc, srv) -} - -func _AuthService_AuthorizeV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AuthorizeRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(AuthServiceServer).AuthorizeV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.AuthService/AuthorizeV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(AuthServiceServer).AuthorizeV1(ctx, req.(*AuthorizeRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _AuthService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "api.AuthService", - HandlerType: (*AuthServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "AuthorizeV1", - Handler: _AuthService_AuthorizeV1_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v1beta1/auth.proto", -} diff --git a/backend/api/v1beta1/go_client/auth.pb.gw.go b/backend/api/v1beta1/go_client/auth.pb.gw.go index bdf0ab485d2..2a0d6521edf 100644 --- a/backend/api/v1beta1/go_client/auth.pb.gw.go +++ b/backend/api/v1beta1/go_client/auth.pb.gw.go @@ -10,92 +10,91 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join - var ( - filter_AuthService_AuthorizeV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join ) -func request_AuthService_AuthorizeV1_0(ctx context.Context, marshaler runtime.Marshaler, client AuthServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq AuthorizeRequest - var metadata runtime.ServerMetadata +var filter_AuthService_AuthorizeV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +func request_AuthService_AuthorizeV1_0(ctx context.Context, marshaler runtime.Marshaler, client AuthServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq AuthorizeRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AuthService_AuthorizeV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.AuthorizeV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_AuthService_AuthorizeV1_0(ctx context.Context, marshaler runtime.Marshaler, server AuthServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq AuthorizeRequest - var metadata runtime.ServerMetadata - + var ( + protoReq AuthorizeRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AuthService_AuthorizeV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.AuthorizeV1(ctx, &protoReq) return msg, metadata, err - } // RegisterAuthServiceHandlerServer registers the http handlers for service AuthService to "mux". // UnaryRPC :call AuthServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterAuthServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterAuthServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server AuthServiceServer) error { - - mux.Handle("GET", pattern_AuthService_AuthorizeV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_AuthService_AuthorizeV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.AuthService/AuthorizeV1", runtime.WithHTTPPathPattern("/apis/v1beta1/auth")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_AuthService_AuthorizeV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_AuthService_AuthorizeV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_AuthService_AuthorizeV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_AuthService_AuthorizeV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -104,25 +103,24 @@ func RegisterAuthServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux // RegisterAuthServiceHandlerFromEndpoint is same as RegisterAuthServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterAuthServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterAuthServiceHandler(ctx, mux, conn) } @@ -136,34 +134,30 @@ func RegisterAuthServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "AuthServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "AuthServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "AuthServiceClient" to call the correct interceptors. +// "AuthServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterAuthServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client AuthServiceClient) error { - - mux.Handle("GET", pattern_AuthService_AuthorizeV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_AuthService_AuthorizeV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.AuthService/AuthorizeV1", runtime.WithHTTPPathPattern("/apis/v1beta1/auth")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AuthService_AuthorizeV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_AuthService_AuthorizeV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_AuthService_AuthorizeV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_AuthService_AuthorizeV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_AuthService_AuthorizeV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "auth"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_AuthService_AuthorizeV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "auth"}, "")) ) var ( diff --git a/backend/api/v1beta1/go_client/auth_grpc.pb.go b/backend/api/v1beta1/go_client/auth_grpc.pb.go new file mode 100644 index 00000000000..a0daa3ebaef --- /dev/null +++ b/backend/api/v1beta1/go_client/auth_grpc.pb.go @@ -0,0 +1,136 @@ +// Copyright 2020 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v1beta1/auth.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + AuthService_AuthorizeV1_FullMethodName = "/api.AuthService/AuthorizeV1" +) + +// AuthServiceClient is the client API for AuthService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type AuthServiceClient interface { + AuthorizeV1(ctx context.Context, in *AuthorizeRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type authServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewAuthServiceClient(cc grpc.ClientConnInterface) AuthServiceClient { + return &authServiceClient{cc} +} + +func (c *authServiceClient) AuthorizeV1(ctx context.Context, in *AuthorizeRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, AuthService_AuthorizeV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AuthServiceServer is the server API for AuthService service. +// All implementations must embed UnimplementedAuthServiceServer +// for forward compatibility. +type AuthServiceServer interface { + AuthorizeV1(context.Context, *AuthorizeRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedAuthServiceServer() +} + +// UnimplementedAuthServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedAuthServiceServer struct{} + +func (UnimplementedAuthServiceServer) AuthorizeV1(context.Context, *AuthorizeRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method AuthorizeV1 not implemented") +} +func (UnimplementedAuthServiceServer) mustEmbedUnimplementedAuthServiceServer() {} +func (UnimplementedAuthServiceServer) testEmbeddedByValue() {} + +// UnsafeAuthServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to AuthServiceServer will +// result in compilation errors. +type UnsafeAuthServiceServer interface { + mustEmbedUnimplementedAuthServiceServer() +} + +func RegisterAuthServiceServer(s grpc.ServiceRegistrar, srv AuthServiceServer) { + // If the following call pancis, it indicates UnimplementedAuthServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&AuthService_ServiceDesc, srv) +} + +func _AuthService_AuthorizeV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AuthorizeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AuthServiceServer).AuthorizeV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: AuthService_AuthorizeV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AuthServiceServer).AuthorizeV1(ctx, req.(*AuthorizeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// AuthService_ServiceDesc is the grpc.ServiceDesc for AuthService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var AuthService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.AuthService", + HandlerType: (*AuthServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AuthorizeV1", + Handler: _AuthService_AuthorizeV1_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v1beta1/auth.proto", +} diff --git a/backend/api/v1beta1/go_client/error.pb.go b/backend/api/v1beta1/go_client/error.pb.go index a4d0642e618..ca3a67bdc44 100644 --- a/backend/api/v1beta1/go_client/error.pb.go +++ b/backend/api/v1beta1/go_client/error.pb.go @@ -14,8 +14,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/error.proto package go_client @@ -26,6 +26,7 @@ import ( anypb "google.golang.org/protobuf/types/known/anypb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -36,21 +37,18 @@ const ( ) type Error struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + ErrorMessage string `protobuf:"bytes,1,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` + ErrorDetails string `protobuf:"bytes,2,opt,name=error_details,json=errorDetails,proto3" json:"error_details,omitempty"` unknownFields protoimpl.UnknownFields - - ErrorMessage string `protobuf:"bytes,1,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` - ErrorDetails string `protobuf:"bytes,2,opt,name=error_details,json=errorDetails,proto3" json:"error_details,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Error) Reset() { *x = Error{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_error_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_error_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Error) String() string { @@ -61,7 +59,7 @@ func (*Error) ProtoMessage() {} func (x *Error) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_error_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -91,22 +89,19 @@ func (x *Error) GetErrorDetails() string { } type Status struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Error string `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` + Code int32 `protobuf:"varint,2,opt,name=code,proto3" json:"code,omitempty"` + Details []*anypb.Any `protobuf:"bytes,3,rep,name=details,proto3" json:"details,omitempty"` unknownFields protoimpl.UnknownFields - - Error string `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` - Code int32 `protobuf:"varint,2,opt,name=code,proto3" json:"code,omitempty"` - Details []*anypb.Any `protobuf:"bytes,3,rep,name=details,proto3" json:"details,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Status) Reset() { *x = Status{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_error_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_error_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Status) String() string { @@ -117,7 +112,7 @@ func (*Status) ProtoMessage() {} func (x *Status) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_error_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -155,43 +150,31 @@ func (x *Status) GetDetails() []*anypb.Any { var File_backend_api_v1beta1_error_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_error_proto_rawDesc = []byte{ - 0x0a, 0x1f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x22, 0x51, 0x0a, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, - 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, - 0x61, 0x69, 0x6c, 0x73, 0x22, 0x62, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x14, - 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x05, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x2e, 0x0a, 0x07, 0x64, 0x65, 0x74, 0x61, - 0x69, 0x6c, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, - 0x07, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, - 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_error_proto_rawDesc = "" + + "\n" + + "\x1fbackend/api/v1beta1/error.proto\x12\x03api\x1a\x19google/protobuf/any.proto\"Q\n" + + "\x05Error\x12#\n" + + "\rerror_message\x18\x01 \x01(\tR\ferrorMessage\x12#\n" + + "\rerror_details\x18\x02 \x01(\tR\ferrorDetails\"b\n" + + "\x06Status\x12\x14\n" + + "\x05error\x18\x01 \x01(\tR\x05error\x12\x12\n" + + "\x04code\x18\x02 \x01(\x05R\x04code\x12.\n" + + "\adetails\x18\x03 \x03(\v2\x14.google.protobuf.AnyR\adetailsB=Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_error_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_error_proto_rawDescData = file_backend_api_v1beta1_error_proto_rawDesc + file_backend_api_v1beta1_error_proto_rawDescData []byte ) func file_backend_api_v1beta1_error_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_error_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_error_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_error_proto_rawDescData) + file_backend_api_v1beta1_error_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_error_proto_rawDesc), len(file_backend_api_v1beta1_error_proto_rawDesc))) }) return file_backend_api_v1beta1_error_proto_rawDescData } var file_backend_api_v1beta1_error_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_backend_api_v1beta1_error_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_error_proto_goTypes = []any{ (*Error)(nil), // 0: api.Error (*Status)(nil), // 1: api.Status (*anypb.Any)(nil), // 2: google.protobuf.Any @@ -210,37 +193,11 @@ func file_backend_api_v1beta1_error_proto_init() { if File_backend_api_v1beta1_error_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_error_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Error); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_error_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Status); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_error_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_error_proto_rawDesc), len(file_backend_api_v1beta1_error_proto_rawDesc)), NumEnums: 0, NumMessages: 2, NumExtensions: 0, @@ -251,7 +208,6 @@ func file_backend_api_v1beta1_error_proto_init() { MessageInfos: file_backend_api_v1beta1_error_proto_msgTypes, }.Build() File_backend_api_v1beta1_error_proto = out.File - file_backend_api_v1beta1_error_proto_rawDesc = nil file_backend_api_v1beta1_error_proto_goTypes = nil file_backend_api_v1beta1_error_proto_depIdxs = nil } diff --git a/backend/api/v1beta1/go_client/experiment.pb.go b/backend/api/v1beta1/go_client/experiment.pb.go index 651695b960f..a80cbbeec78 100644 --- a/backend/api/v1beta1/go_client/experiment.pb.go +++ b/backend/api/v1beta1/go_client/experiment.pb.go @@ -14,25 +14,22 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/experiment.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -92,21 +89,18 @@ func (Experiment_StorageState) EnumDescriptor() ([]byte, []int) { } type CreateExperimentRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The experiment to be created. - Experiment *Experiment `protobuf:"bytes,1,opt,name=experiment,proto3" json:"experiment,omitempty"` + Experiment *Experiment `protobuf:"bytes,1,opt,name=experiment,proto3" json:"experiment,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CreateExperimentRequest) Reset() { *x = CreateExperimentRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreateExperimentRequest) String() string { @@ -117,7 +111,7 @@ func (*CreateExperimentRequest) ProtoMessage() {} func (x *CreateExperimentRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -140,21 +134,18 @@ func (x *CreateExperimentRequest) GetExperiment() *Experiment { } type GetExperimentRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the experiment to be retrieved. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetExperimentRequest) Reset() { *x = GetExperimentRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetExperimentRequest) String() string { @@ -165,7 +156,7 @@ func (*GetExperimentRequest) ProtoMessage() {} func (x *GetExperimentRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -188,10 +179,7 @@ func (x *GetExperimentRequest) GetId() string { } type ListExperimentsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A page token to request the next page of results. The token is acquried // from the nextPageToken field of the response from the previous // ListExperiment call or can be omitted when fetching the first page. @@ -210,15 +198,15 @@ type ListExperimentsRequest struct { // For Experiment, the only valid resource type is Namespace. An sample query string could be // resource_reference_key.type=NAMESPACE&resource_reference_key.id=ns1 ResourceReferenceKey *ResourceKey `protobuf:"bytes,5,opt,name=resource_reference_key,json=resourceReferenceKey,proto3" json:"resource_reference_key,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListExperimentsRequest) Reset() { *x = ListExperimentsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListExperimentsRequest) String() string { @@ -229,7 +217,7 @@ func (*ListExperimentsRequest) ProtoMessage() {} func (x *ListExperimentsRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -280,25 +268,22 @@ func (x *ListExperimentsRequest) GetResourceReferenceKey() *ResourceKey { } type ListExperimentsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A list of experiments returned. Experiments []*Experiment `protobuf:"bytes,1,rep,name=experiments,proto3" json:"experiments,omitempty"` // The total number of experiments for the given query. TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` // The token to list the next page of experiments. NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListExperimentsResponse) Reset() { *x = ListExperimentsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListExperimentsResponse) String() string { @@ -309,7 +294,7 @@ func (*ListExperimentsResponse) ProtoMessage() {} func (x *ListExperimentsResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -346,21 +331,18 @@ func (x *ListExperimentsResponse) GetNextPageToken() string { } type DeleteExperimentRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the experiment to be deleted. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeleteExperimentRequest) Reset() { *x = DeleteExperimentRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeleteExperimentRequest) String() string { @@ -371,7 +353,7 @@ func (*DeleteExperimentRequest) ProtoMessage() {} func (x *DeleteExperimentRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -394,10 +376,7 @@ func (x *DeleteExperimentRequest) GetId() string { } type Experiment struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. Unique experiment ID. Generated by API server. Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // Required input field. Unique experiment name provided by user. @@ -410,16 +389,16 @@ type Experiment struct { // For Experiment, the only valid resource reference is a single Namespace. ResourceReferences []*ResourceReference `protobuf:"bytes,5,rep,name=resource_references,json=resourceReferences,proto3" json:"resource_references,omitempty"` // Output. Specifies whether this experiment is in archived or available state. - StorageState Experiment_StorageState `protobuf:"varint,6,opt,name=storage_state,json=storageState,proto3,enum=api.Experiment_StorageState" json:"storage_state,omitempty"` + StorageState Experiment_StorageState `protobuf:"varint,6,opt,name=storage_state,json=storageState,proto3,enum=api.Experiment_StorageState" json:"storage_state,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Experiment) Reset() { *x = Experiment{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Experiment) String() string { @@ -430,7 +409,7 @@ func (*Experiment) ProtoMessage() {} func (x *Experiment) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -488,21 +467,18 @@ func (x *Experiment) GetStorageState() Experiment_StorageState { } type ArchiveExperimentRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the experiment to be archived. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ArchiveExperimentRequest) Reset() { *x = ArchiveExperimentRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ArchiveExperimentRequest) String() string { @@ -513,7 +489,7 @@ func (*ArchiveExperimentRequest) ProtoMessage() {} func (x *ArchiveExperimentRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -536,21 +512,18 @@ func (x *ArchiveExperimentRequest) GetId() string { } type UnarchiveExperimentRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the experiment to be restored. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *UnarchiveExperimentRequest) Reset() { *x = UnarchiveExperimentRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *UnarchiveExperimentRequest) String() string { @@ -561,7 +534,7 @@ func (*UnarchiveExperimentRequest) ProtoMessage() {} func (x *UnarchiveExperimentRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_experiment_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -585,158 +558,77 @@ func (x *UnarchiveExperimentRequest) GetId() string { var File_backend_api_v1beta1_experiment_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_experiment_proto_rawDesc = []byte{ - 0x0a, 0x24, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1f, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, - 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, - 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x4a, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, - 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x2f, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, - 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, - 0x22, 0x26, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, - 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0xcd, 0x01, 0x0a, 0x16, 0x4c, 0x69, 0x73, - 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, - 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, - 0x65, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, - 0x17, 0x0a, 0x07, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x62, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x06, 0x73, 0x6f, 0x72, 0x74, 0x42, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, - 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, - 0x12, 0x46, 0x0a, 0x16, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, - 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4b, - 0x65, 0x79, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, - 0x72, 0x65, 0x6e, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x22, 0x93, 0x01, 0x0a, 0x17, 0x4c, 0x69, 0x73, - 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x31, 0x0a, 0x0b, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, - 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x0b, 0x65, 0x78, 0x70, 0x65, - 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, - 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x6f, 0x74, - 0x61, 0x6c, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, - 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x29, - 0x0a, 0x17, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, - 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0xfe, 0x02, 0x0a, 0x0a, 0x45, 0x78, - 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, - 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x39, - 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, - 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x47, 0x0a, 0x13, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, - 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x12, - 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, - 0x65, 0x73, 0x12, 0x41, 0x0a, 0x0d, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x0c, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x22, 0x63, 0x0a, 0x0c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x1c, 0x0a, 0x18, 0x53, 0x54, 0x4f, 0x52, 0x41, 0x47, 0x45, - 0x53, 0x54, 0x41, 0x54, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, - 0x44, 0x10, 0x00, 0x12, 0x1a, 0x0a, 0x16, 0x53, 0x54, 0x4f, 0x52, 0x41, 0x47, 0x45, 0x53, 0x54, - 0x41, 0x54, 0x45, 0x5f, 0x41, 0x56, 0x41, 0x49, 0x4c, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x12, - 0x19, 0x0a, 0x15, 0x53, 0x54, 0x4f, 0x52, 0x41, 0x47, 0x45, 0x53, 0x54, 0x41, 0x54, 0x45, 0x5f, - 0x41, 0x52, 0x43, 0x48, 0x49, 0x56, 0x45, 0x44, 0x10, 0x02, 0x22, 0x2a, 0x0a, 0x18, 0x41, 0x72, - 0x63, 0x68, 0x69, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x2c, 0x0a, 0x1a, 0x55, 0x6e, 0x61, 0x72, 0x63, 0x68, - 0x69, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x02, 0x69, 0x64, 0x32, 0xd8, 0x05, 0x0a, 0x11, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, - 0x65, 0x6e, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x72, 0x0a, 0x12, 0x43, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x56, 0x31, - 0x12, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, - 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x22, - 0x2d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x27, 0x3a, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, - 0x65, 0x6e, 0x74, 0x22, 0x19, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x65, - 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x56, - 0x31, 0x12, 0x19, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, - 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x26, 0x82, - 0xd3, 0xe4, 0x93, 0x02, 0x20, 0x12, 0x1e, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, - 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0x71, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x78, 0x70, - 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x56, 0x31, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, - 0x73, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x21, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1b, 0x12, 0x19, 0x2f, - 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, - 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x72, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, - 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x56, 0x31, 0x12, 0x1c, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, - 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, - 0x6d, 0x70, 0x74, 0x79, 0x22, 0x26, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x20, 0x2a, 0x1e, 0x2f, 0x61, - 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, - 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0x7c, 0x0a, 0x13, - 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, - 0x74, 0x56, 0x31, 0x12, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, - 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x2e, 0x82, 0xd3, 0xe4, 0x93, - 0x02, 0x28, 0x22, 0x26, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x7b, 0x69, - 0x64, 0x7d, 0x3a, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x82, 0x01, 0x0a, 0x15, 0x55, - 0x6e, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, - 0x6e, 0x74, 0x56, 0x31, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x6e, 0x61, 0x72, 0x63, - 0x68, 0x69, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x30, 0x82, - 0xd3, 0xe4, 0x93, 0x02, 0x2a, 0x22, 0x28, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, - 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x3a, 0x75, 0x6e, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x42, - 0x91, 0x01, 0x92, 0x41, 0x51, 0x2a, 0x02, 0x01, 0x02, 0x52, 0x1c, 0x0a, 0x07, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x12, 0x11, 0x12, 0x0f, 0x0a, 0x0d, 0x1a, 0x0b, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5a, 0x1f, 0x0a, 0x1d, 0x0a, 0x06, 0x42, 0x65, 0x61, - 0x72, 0x65, 0x72, 0x12, 0x13, 0x08, 0x02, 0x1a, 0x0d, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, - 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x0c, 0x0a, 0x0a, 0x0a, 0x06, 0x42, 0x65, - 0x61, 0x72, 0x65, 0x72, 0x12, 0x00, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, - 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, - 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_experiment_proto_rawDesc = "" + + "\n" + + "$backend/api/v1beta1/experiment.proto\x12\x03api\x1a,backend/api/v1beta1/resource_reference.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"J\n" + + "\x17CreateExperimentRequest\x12/\n" + + "\n" + + "experiment\x18\x01 \x01(\v2\x0f.api.ExperimentR\n" + + "experiment\"&\n" + + "\x14GetExperimentRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"\xcd\x01\n" + + "\x16ListExperimentsRequest\x12\x1d\n" + + "\n" + + "page_token\x18\x01 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x02 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x03 \x01(\tR\x06sortBy\x12\x16\n" + + "\x06filter\x18\x04 \x01(\tR\x06filter\x12F\n" + + "\x16resource_reference_key\x18\x05 \x01(\v2\x10.api.ResourceKeyR\x14resourceReferenceKey\"\x93\x01\n" + + "\x17ListExperimentsResponse\x121\n" + + "\vexperiments\x18\x01 \x03(\v2\x0f.api.ExperimentR\vexperiments\x12\x1d\n" + + "\n" + + "total_size\x18\x03 \x01(\x05R\ttotalSize\x12&\n" + + "\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\")\n" + + "\x17DeleteExperimentRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"\xfe\x02\n" + + "\n" + + "Experiment\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n" + + "\x04name\x18\x02 \x01(\tR\x04name\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\x129\n" + + "\n" + + "created_at\x18\x04 \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12G\n" + + "\x13resource_references\x18\x05 \x03(\v2\x16.api.ResourceReferenceR\x12resourceReferences\x12A\n" + + "\rstorage_state\x18\x06 \x01(\x0e2\x1c.api.Experiment.StorageStateR\fstorageState\"c\n" + + "\fStorageState\x12\x1c\n" + + "\x18STORAGESTATE_UNSPECIFIED\x10\x00\x12\x1a\n" + + "\x16STORAGESTATE_AVAILABLE\x10\x01\x12\x19\n" + + "\x15STORAGESTATE_ARCHIVED\x10\x02\"*\n" + + "\x18ArchiveExperimentRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\",\n" + + "\x1aUnarchiveExperimentRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id2\xd8\x05\n" + + "\x11ExperimentService\x12r\n" + + "\x12CreateExperimentV1\x12\x1c.api.CreateExperimentRequest\x1a\x0f.api.Experiment\"-\x82\xd3\xe4\x93\x02':\n" + + "experiment\"\x19/apis/v1beta1/experiments\x12e\n" + + "\x0fGetExperimentV1\x12\x19.api.GetExperimentRequest\x1a\x0f.api.Experiment\"&\x82\xd3\xe4\x93\x02 \x12\x1e/apis/v1beta1/experiments/{id}\x12q\n" + + "\x11ListExperimentsV1\x12\x1b.api.ListExperimentsRequest\x1a\x1c.api.ListExperimentsResponse\"!\x82\xd3\xe4\x93\x02\x1b\x12\x19/apis/v1beta1/experiments\x12r\n" + + "\x12DeleteExperimentV1\x12\x1c.api.DeleteExperimentRequest\x1a\x16.google.protobuf.Empty\"&\x82\xd3\xe4\x93\x02 *\x1e/apis/v1beta1/experiments/{id}\x12|\n" + + "\x13ArchiveExperimentV1\x12\x1d.api.ArchiveExperimentRequest\x1a\x16.google.protobuf.Empty\".\x82\xd3\xe4\x93\x02(\"&/apis/v1beta1/experiments/{id}:archive\x12\x82\x01\n" + + "\x15UnarchiveExperimentV1\x12\x1f.api.UnarchiveExperimentRequest\x1a\x16.google.protobuf.Empty\"0\x82\xd3\xe4\x93\x02*\"(/apis/v1beta1/experiments/{id}:unarchiveB\x91\x01\x92AQ*\x02\x01\x02R\x1c\n" + + "\adefault\x12\x11\x12\x0f\n" + + "\r\x1a\v.api.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_experiment_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_experiment_proto_rawDescData = file_backend_api_v1beta1_experiment_proto_rawDesc + file_backend_api_v1beta1_experiment_proto_rawDescData []byte ) func file_backend_api_v1beta1_experiment_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_experiment_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_experiment_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_experiment_proto_rawDescData) + file_backend_api_v1beta1_experiment_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_experiment_proto_rawDesc), len(file_backend_api_v1beta1_experiment_proto_rawDesc))) }) return file_backend_api_v1beta1_experiment_proto_rawDescData } var file_backend_api_v1beta1_experiment_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_backend_api_v1beta1_experiment_proto_msgTypes = make([]protoimpl.MessageInfo, 8) -var file_backend_api_v1beta1_experiment_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_experiment_proto_goTypes = []any{ (Experiment_StorageState)(0), // 0: api.Experiment.StorageState (*CreateExperimentRequest)(nil), // 1: api.CreateExperimentRequest (*GetExperimentRequest)(nil), // 2: api.GetExperimentRequest @@ -782,111 +674,12 @@ func file_backend_api_v1beta1_experiment_proto_init() { if File_backend_api_v1beta1_experiment_proto != nil { return } - file_backend_api_v1beta1_error_proto_init() file_backend_api_v1beta1_resource_reference_proto_init() - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_experiment_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateExperimentRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_experiment_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetExperimentRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_experiment_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListExperimentsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_experiment_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListExperimentsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_experiment_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteExperimentRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_experiment_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Experiment); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_experiment_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArchiveExperimentRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_experiment_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UnarchiveExperimentRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_experiment_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_experiment_proto_rawDesc), len(file_backend_api_v1beta1_experiment_proto_rawDesc)), NumEnums: 1, NumMessages: 8, NumExtensions: 0, @@ -898,285 +691,6 @@ func file_backend_api_v1beta1_experiment_proto_init() { MessageInfos: file_backend_api_v1beta1_experiment_proto_msgTypes, }.Build() File_backend_api_v1beta1_experiment_proto = out.File - file_backend_api_v1beta1_experiment_proto_rawDesc = nil file_backend_api_v1beta1_experiment_proto_goTypes = nil file_backend_api_v1beta1_experiment_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// ExperimentServiceClient is the client API for ExperimentService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type ExperimentServiceClient interface { - // Creates a new experiment. - CreateExperimentV1(ctx context.Context, in *CreateExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) - // Finds a specific experiment by ID. - GetExperimentV1(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) - // Finds all experiments. Supports pagination, and sorting on certain fields. - ListExperimentsV1(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsResponse, error) - // Deletes an experiment without deleting the experiment's runs and jobs. To - // avoid unexpected behaviors, delete an experiment's runs and jobs before - // deleting the experiment. - DeleteExperimentV1(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Archives an experiment and the experiment's runs and jobs. - ArchiveExperimentV1(ctx context.Context, in *ArchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Restores an archived experiment. The experiment's archived runs and jobs - // will stay archived. - UnarchiveExperimentV1(ctx context.Context, in *UnarchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type experimentServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewExperimentServiceClient(cc grpc.ClientConnInterface) ExperimentServiceClient { - return &experimentServiceClient{cc} -} - -func (c *experimentServiceClient) CreateExperimentV1(ctx context.Context, in *CreateExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) { - out := new(Experiment) - err := c.cc.Invoke(ctx, "/api.ExperimentService/CreateExperimentV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *experimentServiceClient) GetExperimentV1(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) { - out := new(Experiment) - err := c.cc.Invoke(ctx, "/api.ExperimentService/GetExperimentV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *experimentServiceClient) ListExperimentsV1(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsResponse, error) { - out := new(ListExperimentsResponse) - err := c.cc.Invoke(ctx, "/api.ExperimentService/ListExperimentsV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *experimentServiceClient) DeleteExperimentV1(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.ExperimentService/DeleteExperimentV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *experimentServiceClient) ArchiveExperimentV1(ctx context.Context, in *ArchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.ExperimentService/ArchiveExperimentV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *experimentServiceClient) UnarchiveExperimentV1(ctx context.Context, in *UnarchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.ExperimentService/UnarchiveExperimentV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ExperimentServiceServer is the server API for ExperimentService service. -type ExperimentServiceServer interface { - // Creates a new experiment. - CreateExperimentV1(context.Context, *CreateExperimentRequest) (*Experiment, error) - // Finds a specific experiment by ID. - GetExperimentV1(context.Context, *GetExperimentRequest) (*Experiment, error) - // Finds all experiments. Supports pagination, and sorting on certain fields. - ListExperimentsV1(context.Context, *ListExperimentsRequest) (*ListExperimentsResponse, error) - // Deletes an experiment without deleting the experiment's runs and jobs. To - // avoid unexpected behaviors, delete an experiment's runs and jobs before - // deleting the experiment. - DeleteExperimentV1(context.Context, *DeleteExperimentRequest) (*emptypb.Empty, error) - // Archives an experiment and the experiment's runs and jobs. - ArchiveExperimentV1(context.Context, *ArchiveExperimentRequest) (*emptypb.Empty, error) - // Restores an archived experiment. The experiment's archived runs and jobs - // will stay archived. - UnarchiveExperimentV1(context.Context, *UnarchiveExperimentRequest) (*emptypb.Empty, error) -} - -// UnimplementedExperimentServiceServer can be embedded to have forward compatible implementations. -type UnimplementedExperimentServiceServer struct { -} - -func (*UnimplementedExperimentServiceServer) CreateExperimentV1(context.Context, *CreateExperimentRequest) (*Experiment, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateExperimentV1 not implemented") -} -func (*UnimplementedExperimentServiceServer) GetExperimentV1(context.Context, *GetExperimentRequest) (*Experiment, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetExperimentV1 not implemented") -} -func (*UnimplementedExperimentServiceServer) ListExperimentsV1(context.Context, *ListExperimentsRequest) (*ListExperimentsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListExperimentsV1 not implemented") -} -func (*UnimplementedExperimentServiceServer) DeleteExperimentV1(context.Context, *DeleteExperimentRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeleteExperimentV1 not implemented") -} -func (*UnimplementedExperimentServiceServer) ArchiveExperimentV1(context.Context, *ArchiveExperimentRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method ArchiveExperimentV1 not implemented") -} -func (*UnimplementedExperimentServiceServer) UnarchiveExperimentV1(context.Context, *UnarchiveExperimentRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method UnarchiveExperimentV1 not implemented") -} - -func RegisterExperimentServiceServer(s *grpc.Server, srv ExperimentServiceServer) { - s.RegisterService(&_ExperimentService_serviceDesc, srv) -} - -func _ExperimentService_CreateExperimentV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateExperimentRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).CreateExperimentV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.ExperimentService/CreateExperimentV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).CreateExperimentV1(ctx, req.(*CreateExperimentRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ExperimentService_GetExperimentV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetExperimentRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).GetExperimentV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.ExperimentService/GetExperimentV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).GetExperimentV1(ctx, req.(*GetExperimentRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ExperimentService_ListExperimentsV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListExperimentsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).ListExperimentsV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.ExperimentService/ListExperimentsV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).ListExperimentsV1(ctx, req.(*ListExperimentsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ExperimentService_DeleteExperimentV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeleteExperimentRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).DeleteExperimentV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.ExperimentService/DeleteExperimentV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).DeleteExperimentV1(ctx, req.(*DeleteExperimentRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ExperimentService_ArchiveExperimentV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ArchiveExperimentRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).ArchiveExperimentV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.ExperimentService/ArchiveExperimentV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).ArchiveExperimentV1(ctx, req.(*ArchiveExperimentRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ExperimentService_UnarchiveExperimentV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UnarchiveExperimentRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).UnarchiveExperimentV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.ExperimentService/UnarchiveExperimentV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).UnarchiveExperimentV1(ctx, req.(*UnarchiveExperimentRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _ExperimentService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "api.ExperimentService", - HandlerType: (*ExperimentServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateExperimentV1", - Handler: _ExperimentService_CreateExperimentV1_Handler, - }, - { - MethodName: "GetExperimentV1", - Handler: _ExperimentService_GetExperimentV1_Handler, - }, - { - MethodName: "ListExperimentsV1", - Handler: _ExperimentService_ListExperimentsV1_Handler, - }, - { - MethodName: "DeleteExperimentV1", - Handler: _ExperimentService_DeleteExperimentV1_Handler, - }, - { - MethodName: "ArchiveExperimentV1", - Handler: _ExperimentService_ArchiveExperimentV1_Handler, - }, - { - MethodName: "UnarchiveExperimentV1", - Handler: _ExperimentService_UnarchiveExperimentV1_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v1beta1/experiment.proto", -} diff --git a/backend/api/v1beta1/go_client/experiment.pb.gw.go b/backend/api/v1beta1/go_client/experiment.pb.gw.go index 8a5943375b5..f86810065b4 100644 --- a/backend/api/v1beta1/go_client/experiment.pb.gw.go +++ b/backend/api/v1beta1/go_client/experiment.pb.gw.go @@ -10,457 +10,374 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_ExperimentService_CreateExperimentV1_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateExperimentRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Experiment); err != nil && err != io.EOF { + var ( + protoReq CreateExperimentRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Experiment); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.CreateExperimentV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_CreateExperimentV1_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateExperimentRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Experiment); err != nil && err != io.EOF { + var ( + protoReq CreateExperimentRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Experiment); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreateExperimentV1(ctx, &protoReq) return msg, metadata, err - } func request_ExperimentService_GetExperimentV1_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.GetExperimentV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_GetExperimentV1_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.GetExperimentV1(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_ExperimentService_ListExperimentsV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) +var filter_ExperimentService_ListExperimentsV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} func request_ExperimentService_ListExperimentsV1_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListExperimentsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListExperimentsRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ExperimentService_ListExperimentsV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListExperimentsV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_ListExperimentsV1_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListExperimentsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListExperimentsRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ExperimentService_ListExperimentsV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListExperimentsV1(ctx, &protoReq) return msg, metadata, err - } func request_ExperimentService_DeleteExperimentV1_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.DeleteExperimentV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_DeleteExperimentV1_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.DeleteExperimentV1(ctx, &protoReq) return msg, metadata, err - } func request_ExperimentService_ArchiveExperimentV1_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ArchiveExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ArchiveExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.ArchiveExperimentV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_ArchiveExperimentV1_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ArchiveExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ArchiveExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.ArchiveExperimentV1(ctx, &protoReq) return msg, metadata, err - } func request_ExperimentService_UnarchiveExperimentV1_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq UnarchiveExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq UnarchiveExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.UnarchiveExperimentV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_UnarchiveExperimentV1_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq UnarchiveExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq UnarchiveExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.UnarchiveExperimentV1(ctx, &protoReq) return msg, metadata, err - } // RegisterExperimentServiceHandlerServer registers the http handlers for service ExperimentService to "mux". // UnaryRPC :call ExperimentServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterExperimentServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterExperimentServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ExperimentServiceServer) error { - - mux.Handle("POST", pattern_ExperimentService_CreateExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_CreateExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ExperimentService/CreateExperimentV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_CreateExperimentV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_CreateExperimentV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_CreateExperimentV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_CreateExperimentV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_ExperimentService_GetExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ExperimentService_GetExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ExperimentService/GetExperimentV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_GetExperimentV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_GetExperimentV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_GetExperimentV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_GetExperimentV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_ExperimentService_ListExperimentsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ExperimentService_ListExperimentsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ExperimentService/ListExperimentsV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_ListExperimentsV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_ListExperimentsV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_ListExperimentsV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_ListExperimentsV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_ExperimentService_DeleteExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_ExperimentService_DeleteExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ExperimentService/DeleteExperimentV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_DeleteExperimentV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_DeleteExperimentV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_DeleteExperimentV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_DeleteExperimentV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ExperimentService_ArchiveExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_ArchiveExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ExperimentService/ArchiveExperimentV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments/{id}:archive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_ArchiveExperimentV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_ArchiveExperimentV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_ArchiveExperimentV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_ArchiveExperimentV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ExperimentService_UnarchiveExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_UnarchiveExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ExperimentService/UnarchiveExperimentV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments/{id}:unarchive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_UnarchiveExperimentV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_UnarchiveExperimentV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_UnarchiveExperimentV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_UnarchiveExperimentV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -469,25 +386,24 @@ func RegisterExperimentServiceHandlerServer(ctx context.Context, mux *runtime.Se // RegisterExperimentServiceHandlerFromEndpoint is same as RegisterExperimentServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterExperimentServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterExperimentServiceHandler(ctx, mux, conn) } @@ -501,156 +417,127 @@ func RegisterExperimentServiceHandler(ctx context.Context, mux *runtime.ServeMux // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ExperimentServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ExperimentServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "ExperimentServiceClient" to call the correct interceptors. +// "ExperimentServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterExperimentServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ExperimentServiceClient) error { - - mux.Handle("POST", pattern_ExperimentService_CreateExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_CreateExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.ExperimentService/CreateExperimentV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_CreateExperimentV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_CreateExperimentV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_CreateExperimentV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_CreateExperimentV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_ExperimentService_GetExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ExperimentService_GetExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.ExperimentService/GetExperimentV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_GetExperimentV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_GetExperimentV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_GetExperimentV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_GetExperimentV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_ExperimentService_ListExperimentsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ExperimentService_ListExperimentsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.ExperimentService/ListExperimentsV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_ListExperimentsV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_ListExperimentsV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_ListExperimentsV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_ListExperimentsV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_ExperimentService_DeleteExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_ExperimentService_DeleteExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.ExperimentService/DeleteExperimentV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_DeleteExperimentV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_DeleteExperimentV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_DeleteExperimentV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_DeleteExperimentV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ExperimentService_ArchiveExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_ArchiveExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.ExperimentService/ArchiveExperimentV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments/{id}:archive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_ArchiveExperimentV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_ArchiveExperimentV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_ArchiveExperimentV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_ArchiveExperimentV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ExperimentService_UnarchiveExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_UnarchiveExperimentV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.ExperimentService/UnarchiveExperimentV1", runtime.WithHTTPPathPattern("/apis/v1beta1/experiments/{id}:unarchive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_UnarchiveExperimentV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_UnarchiveExperimentV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_UnarchiveExperimentV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_UnarchiveExperimentV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_ExperimentService_CreateExperimentV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "experiments"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_ExperimentService_GetExperimentV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_ExperimentService_ListExperimentsV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "experiments"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_ExperimentService_DeleteExperimentV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_ExperimentService_ArchiveExperimentV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "archive", runtime.AssumeColonVerbOpt(true))) - - pattern_ExperimentService_UnarchiveExperimentV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "unarchive", runtime.AssumeColonVerbOpt(true))) + pattern_ExperimentService_CreateExperimentV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "experiments"}, "")) + pattern_ExperimentService_GetExperimentV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "")) + pattern_ExperimentService_ListExperimentsV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "experiments"}, "")) + pattern_ExperimentService_DeleteExperimentV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "")) + pattern_ExperimentService_ArchiveExperimentV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "archive")) + pattern_ExperimentService_UnarchiveExperimentV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "unarchive")) ) var ( - forward_ExperimentService_CreateExperimentV1_0 = runtime.ForwardResponseMessage - - forward_ExperimentService_GetExperimentV1_0 = runtime.ForwardResponseMessage - - forward_ExperimentService_ListExperimentsV1_0 = runtime.ForwardResponseMessage - - forward_ExperimentService_DeleteExperimentV1_0 = runtime.ForwardResponseMessage - - forward_ExperimentService_ArchiveExperimentV1_0 = runtime.ForwardResponseMessage - + forward_ExperimentService_CreateExperimentV1_0 = runtime.ForwardResponseMessage + forward_ExperimentService_GetExperimentV1_0 = runtime.ForwardResponseMessage + forward_ExperimentService_ListExperimentsV1_0 = runtime.ForwardResponseMessage + forward_ExperimentService_DeleteExperimentV1_0 = runtime.ForwardResponseMessage + forward_ExperimentService_ArchiveExperimentV1_0 = runtime.ForwardResponseMessage forward_ExperimentService_UnarchiveExperimentV1_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v1beta1/go_client/experiment_grpc.pb.go b/backend/api/v1beta1/go_client/experiment_grpc.pb.go new file mode 100644 index 00000000000..d52c9673a10 --- /dev/null +++ b/backend/api/v1beta1/go_client/experiment_grpc.pb.go @@ -0,0 +1,344 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v1beta1/experiment.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + ExperimentService_CreateExperimentV1_FullMethodName = "/api.ExperimentService/CreateExperimentV1" + ExperimentService_GetExperimentV1_FullMethodName = "/api.ExperimentService/GetExperimentV1" + ExperimentService_ListExperimentsV1_FullMethodName = "/api.ExperimentService/ListExperimentsV1" + ExperimentService_DeleteExperimentV1_FullMethodName = "/api.ExperimentService/DeleteExperimentV1" + ExperimentService_ArchiveExperimentV1_FullMethodName = "/api.ExperimentService/ArchiveExperimentV1" + ExperimentService_UnarchiveExperimentV1_FullMethodName = "/api.ExperimentService/UnarchiveExperimentV1" +) + +// ExperimentServiceClient is the client API for ExperimentService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ExperimentServiceClient interface { + // Creates a new experiment. + CreateExperimentV1(ctx context.Context, in *CreateExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) + // Finds a specific experiment by ID. + GetExperimentV1(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) + // Finds all experiments. Supports pagination, and sorting on certain fields. + ListExperimentsV1(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsResponse, error) + // Deletes an experiment without deleting the experiment's runs and jobs. To + // avoid unexpected behaviors, delete an experiment's runs and jobs before + // deleting the experiment. + DeleteExperimentV1(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Archives an experiment and the experiment's runs and jobs. + ArchiveExperimentV1(ctx context.Context, in *ArchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Restores an archived experiment. The experiment's archived runs and jobs + // will stay archived. + UnarchiveExperimentV1(ctx context.Context, in *UnarchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type experimentServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewExperimentServiceClient(cc grpc.ClientConnInterface) ExperimentServiceClient { + return &experimentServiceClient{cc} +} + +func (c *experimentServiceClient) CreateExperimentV1(ctx context.Context, in *CreateExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Experiment) + err := c.cc.Invoke(ctx, ExperimentService_CreateExperimentV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) GetExperimentV1(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Experiment) + err := c.cc.Invoke(ctx, ExperimentService_GetExperimentV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) ListExperimentsV1(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListExperimentsResponse) + err := c.cc.Invoke(ctx, ExperimentService_ListExperimentsV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) DeleteExperimentV1(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ExperimentService_DeleteExperimentV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) ArchiveExperimentV1(ctx context.Context, in *ArchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ExperimentService_ArchiveExperimentV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) UnarchiveExperimentV1(ctx context.Context, in *UnarchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ExperimentService_UnarchiveExperimentV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ExperimentServiceServer is the server API for ExperimentService service. +// All implementations must embed UnimplementedExperimentServiceServer +// for forward compatibility. +type ExperimentServiceServer interface { + // Creates a new experiment. + CreateExperimentV1(context.Context, *CreateExperimentRequest) (*Experiment, error) + // Finds a specific experiment by ID. + GetExperimentV1(context.Context, *GetExperimentRequest) (*Experiment, error) + // Finds all experiments. Supports pagination, and sorting on certain fields. + ListExperimentsV1(context.Context, *ListExperimentsRequest) (*ListExperimentsResponse, error) + // Deletes an experiment without deleting the experiment's runs and jobs. To + // avoid unexpected behaviors, delete an experiment's runs and jobs before + // deleting the experiment. + DeleteExperimentV1(context.Context, *DeleteExperimentRequest) (*emptypb.Empty, error) + // Archives an experiment and the experiment's runs and jobs. + ArchiveExperimentV1(context.Context, *ArchiveExperimentRequest) (*emptypb.Empty, error) + // Restores an archived experiment. The experiment's archived runs and jobs + // will stay archived. + UnarchiveExperimentV1(context.Context, *UnarchiveExperimentRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedExperimentServiceServer() +} + +// UnimplementedExperimentServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedExperimentServiceServer struct{} + +func (UnimplementedExperimentServiceServer) CreateExperimentV1(context.Context, *CreateExperimentRequest) (*Experiment, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateExperimentV1 not implemented") +} +func (UnimplementedExperimentServiceServer) GetExperimentV1(context.Context, *GetExperimentRequest) (*Experiment, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetExperimentV1 not implemented") +} +func (UnimplementedExperimentServiceServer) ListExperimentsV1(context.Context, *ListExperimentsRequest) (*ListExperimentsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListExperimentsV1 not implemented") +} +func (UnimplementedExperimentServiceServer) DeleteExperimentV1(context.Context, *DeleteExperimentRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteExperimentV1 not implemented") +} +func (UnimplementedExperimentServiceServer) ArchiveExperimentV1(context.Context, *ArchiveExperimentRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method ArchiveExperimentV1 not implemented") +} +func (UnimplementedExperimentServiceServer) UnarchiveExperimentV1(context.Context, *UnarchiveExperimentRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method UnarchiveExperimentV1 not implemented") +} +func (UnimplementedExperimentServiceServer) mustEmbedUnimplementedExperimentServiceServer() {} +func (UnimplementedExperimentServiceServer) testEmbeddedByValue() {} + +// UnsafeExperimentServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ExperimentServiceServer will +// result in compilation errors. +type UnsafeExperimentServiceServer interface { + mustEmbedUnimplementedExperimentServiceServer() +} + +func RegisterExperimentServiceServer(s grpc.ServiceRegistrar, srv ExperimentServiceServer) { + // If the following call pancis, it indicates UnimplementedExperimentServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&ExperimentService_ServiceDesc, srv) +} + +func _ExperimentService_CreateExperimentV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).CreateExperimentV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_CreateExperimentV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).CreateExperimentV1(ctx, req.(*CreateExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_GetExperimentV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).GetExperimentV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_GetExperimentV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).GetExperimentV1(ctx, req.(*GetExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_ListExperimentsV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListExperimentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).ListExperimentsV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_ListExperimentsV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).ListExperimentsV1(ctx, req.(*ListExperimentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_DeleteExperimentV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).DeleteExperimentV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_DeleteExperimentV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).DeleteExperimentV1(ctx, req.(*DeleteExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_ArchiveExperimentV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ArchiveExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).ArchiveExperimentV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_ArchiveExperimentV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).ArchiveExperimentV1(ctx, req.(*ArchiveExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_UnarchiveExperimentV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UnarchiveExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).UnarchiveExperimentV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_UnarchiveExperimentV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).UnarchiveExperimentV1(ctx, req.(*UnarchiveExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// ExperimentService_ServiceDesc is the grpc.ServiceDesc for ExperimentService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ExperimentService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.ExperimentService", + HandlerType: (*ExperimentServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateExperimentV1", + Handler: _ExperimentService_CreateExperimentV1_Handler, + }, + { + MethodName: "GetExperimentV1", + Handler: _ExperimentService_GetExperimentV1_Handler, + }, + { + MethodName: "ListExperimentsV1", + Handler: _ExperimentService_ListExperimentsV1_Handler, + }, + { + MethodName: "DeleteExperimentV1", + Handler: _ExperimentService_DeleteExperimentV1_Handler, + }, + { + MethodName: "ArchiveExperimentV1", + Handler: _ExperimentService_ArchiveExperimentV1_Handler, + }, + { + MethodName: "UnarchiveExperimentV1", + Handler: _ExperimentService_UnarchiveExperimentV1_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v1beta1/experiment.proto", +} diff --git a/backend/api/v1beta1/go_client/filter.pb.go b/backend/api/v1beta1/go_client/filter.pb.go index c6adf00ce61..402bd1d59b9 100644 --- a/backend/api/v1beta1/go_client/filter.pb.go +++ b/backend/api/v1beta1/go_client/filter.pb.go @@ -14,23 +14,19 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/filter.proto package go_client import ( - context "context" - _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -117,13 +113,10 @@ func (Predicate_Op) EnumDescriptor() ([]byte, []int) { // Predicate captures individual conditions that must be true for a resource // being filtered. type Predicate struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Op Predicate_Op `protobuf:"varint,1,opt,name=op,proto3,enum=api.Predicate_Op" json:"op,omitempty"` - Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key,omitempty"` - // Types that are assignable to Value: + state protoimpl.MessageState `protogen:"open.v1"` + Op Predicate_Op `protobuf:"varint,1,opt,name=op,proto3,enum=api.Predicate_Op" json:"op,omitempty"` + Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key,omitempty"` + // Types that are valid to be assigned to Value: // // *Predicate_IntValue // *Predicate_LongValue @@ -132,16 +125,16 @@ type Predicate struct { // *Predicate_IntValues // *Predicate_LongValues // *Predicate_StringValues - Value isPredicate_Value `protobuf_oneof:"value"` + Value isPredicate_Value `protobuf_oneof:"value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Predicate) Reset() { *x = Predicate{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_filter_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_filter_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Predicate) String() string { @@ -152,7 +145,7 @@ func (*Predicate) ProtoMessage() {} func (x *Predicate) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_filter_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -181,58 +174,72 @@ func (x *Predicate) GetKey() string { return "" } -func (m *Predicate) GetValue() isPredicate_Value { - if m != nil { - return m.Value +func (x *Predicate) GetValue() isPredicate_Value { + if x != nil { + return x.Value } return nil } func (x *Predicate) GetIntValue() int32 { - if x, ok := x.GetValue().(*Predicate_IntValue); ok { - return x.IntValue + if x != nil { + if x, ok := x.Value.(*Predicate_IntValue); ok { + return x.IntValue + } } return 0 } func (x *Predicate) GetLongValue() int64 { - if x, ok := x.GetValue().(*Predicate_LongValue); ok { - return x.LongValue + if x != nil { + if x, ok := x.Value.(*Predicate_LongValue); ok { + return x.LongValue + } } return 0 } func (x *Predicate) GetStringValue() string { - if x, ok := x.GetValue().(*Predicate_StringValue); ok { - return x.StringValue + if x != nil { + if x, ok := x.Value.(*Predicate_StringValue); ok { + return x.StringValue + } } return "" } func (x *Predicate) GetTimestampValue() *timestamppb.Timestamp { - if x, ok := x.GetValue().(*Predicate_TimestampValue); ok { - return x.TimestampValue + if x != nil { + if x, ok := x.Value.(*Predicate_TimestampValue); ok { + return x.TimestampValue + } } return nil } func (x *Predicate) GetIntValues() *IntValues { - if x, ok := x.GetValue().(*Predicate_IntValues); ok { - return x.IntValues + if x != nil { + if x, ok := x.Value.(*Predicate_IntValues); ok { + return x.IntValues + } } return nil } func (x *Predicate) GetLongValues() *LongValues { - if x, ok := x.GetValue().(*Predicate_LongValues); ok { - return x.LongValues + if x != nil { + if x, ok := x.Value.(*Predicate_LongValues); ok { + return x.LongValues + } } return nil } func (x *Predicate) GetStringValues() *StringValues { - if x, ok := x.GetValue().(*Predicate_StringValues); ok { - return x.StringValues + if x != nil { + if x, ok := x.Value.(*Predicate_StringValues); ok { + return x.StringValues + } } return nil } @@ -287,20 +294,17 @@ func (*Predicate_LongValues) isPredicate_Value() {} func (*Predicate_StringValues) isPredicate_Value() {} type IntValues struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Values []int32 `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` unknownFields protoimpl.UnknownFields - - Values []int32 `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` + sizeCache protoimpl.SizeCache } func (x *IntValues) Reset() { *x = IntValues{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_filter_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_filter_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *IntValues) String() string { @@ -311,7 +315,7 @@ func (*IntValues) ProtoMessage() {} func (x *IntValues) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_filter_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -334,20 +338,17 @@ func (x *IntValues) GetValues() []int32 { } type StringValues struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Values []string `protobuf:"bytes,2,rep,name=values,proto3" json:"values,omitempty"` unknownFields protoimpl.UnknownFields - - Values []string `protobuf:"bytes,2,rep,name=values,proto3" json:"values,omitempty"` + sizeCache protoimpl.SizeCache } func (x *StringValues) Reset() { *x = StringValues{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_filter_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_filter_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *StringValues) String() string { @@ -358,7 +359,7 @@ func (*StringValues) ProtoMessage() {} func (x *StringValues) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_filter_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -381,20 +382,17 @@ func (x *StringValues) GetValues() []string { } type LongValues struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Values []int64 `protobuf:"varint,3,rep,packed,name=values,proto3" json:"values,omitempty"` unknownFields protoimpl.UnknownFields - - Values []int64 `protobuf:"varint,3,rep,packed,name=values,proto3" json:"values,omitempty"` + sizeCache protoimpl.SizeCache } func (x *LongValues) Reset() { *x = LongValues{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_filter_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_filter_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *LongValues) String() string { @@ -405,7 +403,7 @@ func (*LongValues) ProtoMessage() {} func (x *LongValues) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_filter_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -470,21 +468,18 @@ func (x *LongValues) GetValues() []int64 { // } // } type Filter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // All predicates are AND-ed when this filter is applied. - Predicates []*Predicate `protobuf:"bytes,1,rep,name=predicates,proto3" json:"predicates,omitempty"` + Predicates []*Predicate `protobuf:"bytes,1,rep,name=predicates,proto3" json:"predicates,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Filter) Reset() { *x = Filter{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_filter_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_filter_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Filter) String() string { @@ -495,7 +490,7 @@ func (*Filter) ProtoMessage() {} func (x *Filter) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_filter_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -519,85 +514,64 @@ func (x *Filter) GetPredicates() []*Predicate { var File_backend_api_v1beta1_filter_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_filter_proto_rawDesc = []byte{ - 0x0a, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, - 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xae, 0x04, 0x0a, 0x09, 0x50, 0x72, 0x65, 0x64, 0x69, - 0x63, 0x61, 0x74, 0x65, 0x12, 0x21, 0x0a, 0x02, 0x6f, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x65, - 0x2e, 0x4f, 0x70, 0x52, 0x02, 0x6f, 0x70, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, - 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x08, - 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1f, 0x0a, 0x0a, 0x6c, 0x6f, 0x6e, 0x67, - 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x09, - 0x6c, 0x6f, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, - 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x45, - 0x0a, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x48, 0x00, 0x52, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x2f, 0x0a, 0x0a, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x49, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x48, 0x00, 0x52, 0x09, 0x69, 0x6e, 0x74, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x32, 0x0a, 0x0b, 0x6c, 0x6f, 0x6e, 0x67, 0x5f, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x4c, 0x6f, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x48, 0x00, 0x52, 0x0a, - 0x6c, 0x6f, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x38, 0x0a, 0x0d, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x22, 0x97, 0x01, 0x0a, 0x02, 0x4f, 0x70, 0x12, 0x0b, 0x0a, 0x07, 0x55, - 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x45, 0x51, 0x55, 0x41, - 0x4c, 0x53, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x4e, 0x4f, 0x54, 0x5f, 0x45, 0x51, 0x55, 0x41, - 0x4c, 0x53, 0x10, 0x02, 0x12, 0x10, 0x0a, 0x0c, 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, 0x52, 0x5f, - 0x54, 0x48, 0x41, 0x4e, 0x10, 0x03, 0x12, 0x17, 0x0a, 0x13, 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, - 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x53, 0x10, 0x05, 0x12, - 0x0d, 0x0a, 0x09, 0x4c, 0x45, 0x53, 0x53, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x06, 0x12, 0x14, - 0x0a, 0x10, 0x4c, 0x45, 0x53, 0x53, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x5f, 0x45, 0x51, 0x55, 0x41, - 0x4c, 0x53, 0x10, 0x07, 0x12, 0x06, 0x0a, 0x02, 0x49, 0x4e, 0x10, 0x08, 0x12, 0x10, 0x0a, 0x0c, - 0x49, 0x53, 0x5f, 0x53, 0x55, 0x42, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x09, 0x42, 0x07, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x23, 0x0a, 0x09, 0x49, 0x6e, 0x74, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x05, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x26, 0x0a, 0x0c, - 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x22, 0x24, 0x0a, 0x0a, 0x4c, 0x6f, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x03, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x38, 0x0a, 0x06, 0x46, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x12, 0x2e, 0x0a, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, - 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, - 0x72, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x65, 0x52, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, - 0x61, 0x74, 0x65, 0x73, 0x32, 0x3d, 0x0a, 0x12, 0x44, 0x75, 0x6d, 0x6d, 0x79, 0x46, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x27, 0x0a, 0x09, 0x47, 0x65, - 0x74, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x0b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x46, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x1a, 0x0b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, - 0x72, 0x22, 0x00, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, - 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, - 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_filter_proto_rawDesc = "" + + "\n" + + " backend/api/v1beta1/filter.proto\x12\x03api\x1a\x1fgoogle/protobuf/timestamp.proto\"\xae\x04\n" + + "\tPredicate\x12!\n" + + "\x02op\x18\x01 \x01(\x0e2\x11.api.Predicate.OpR\x02op\x12\x10\n" + + "\x03key\x18\x02 \x01(\tR\x03key\x12\x1d\n" + + "\tint_value\x18\x03 \x01(\x05H\x00R\bintValue\x12\x1f\n" + + "\n" + + "long_value\x18\x04 \x01(\x03H\x00R\tlongValue\x12#\n" + + "\fstring_value\x18\x05 \x01(\tH\x00R\vstringValue\x12E\n" + + "\x0ftimestamp_value\x18\x06 \x01(\v2\x1a.google.protobuf.TimestampH\x00R\x0etimestampValue\x12/\n" + + "\n" + + "int_values\x18\a \x01(\v2\x0e.api.IntValuesH\x00R\tintValues\x122\n" + + "\vlong_values\x18\b \x01(\v2\x0f.api.LongValuesH\x00R\n" + + "longValues\x128\n" + + "\rstring_values\x18\t \x01(\v2\x11.api.StringValuesH\x00R\fstringValues\"\x97\x01\n" + + "\x02Op\x12\v\n" + + "\aUNKNOWN\x10\x00\x12\n" + + "\n" + + "\x06EQUALS\x10\x01\x12\x0e\n" + + "\n" + + "NOT_EQUALS\x10\x02\x12\x10\n" + + "\fGREATER_THAN\x10\x03\x12\x17\n" + + "\x13GREATER_THAN_EQUALS\x10\x05\x12\r\n" + + "\tLESS_THAN\x10\x06\x12\x14\n" + + "\x10LESS_THAN_EQUALS\x10\a\x12\x06\n" + + "\x02IN\x10\b\x12\x10\n" + + "\fIS_SUBSTRING\x10\tB\a\n" + + "\x05value\"#\n" + + "\tIntValues\x12\x16\n" + + "\x06values\x18\x01 \x03(\x05R\x06values\"&\n" + + "\fStringValues\x12\x16\n" + + "\x06values\x18\x02 \x03(\tR\x06values\"$\n" + + "\n" + + "LongValues\x12\x16\n" + + "\x06values\x18\x03 \x03(\x03R\x06values\"8\n" + + "\x06Filter\x12.\n" + + "\n" + + "predicates\x18\x01 \x03(\v2\x0e.api.PredicateR\n" + + "predicates2=\n" + + "\x12DummyFilterService\x12'\n" + + "\tGetFilter\x12\v.api.Filter\x1a\v.api.Filter\"\x00B=Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_filter_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_filter_proto_rawDescData = file_backend_api_v1beta1_filter_proto_rawDesc + file_backend_api_v1beta1_filter_proto_rawDescData []byte ) func file_backend_api_v1beta1_filter_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_filter_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_filter_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_filter_proto_rawDescData) + file_backend_api_v1beta1_filter_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_filter_proto_rawDesc), len(file_backend_api_v1beta1_filter_proto_rawDesc))) }) return file_backend_api_v1beta1_filter_proto_rawDescData } var file_backend_api_v1beta1_filter_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_backend_api_v1beta1_filter_proto_msgTypes = make([]protoimpl.MessageInfo, 5) -var file_backend_api_v1beta1_filter_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_filter_proto_goTypes = []any{ (Predicate_Op)(0), // 0: api.Predicate.Op (*Predicate)(nil), // 1: api.Predicate (*IntValues)(nil), // 2: api.IntValues @@ -627,69 +601,7 @@ func file_backend_api_v1beta1_filter_proto_init() { if File_backend_api_v1beta1_filter_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_filter_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Predicate); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_filter_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*IntValues); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_filter_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StringValues); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_filter_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LongValues); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_filter_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Filter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_backend_api_v1beta1_filter_proto_msgTypes[0].OneofWrappers = []interface{}{ + file_backend_api_v1beta1_filter_proto_msgTypes[0].OneofWrappers = []any{ (*Predicate_IntValue)(nil), (*Predicate_LongValue)(nil), (*Predicate_StringValue)(nil), @@ -702,7 +614,7 @@ func file_backend_api_v1beta1_filter_proto_init() { out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_filter_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_filter_proto_rawDesc), len(file_backend_api_v1beta1_filter_proto_rawDesc)), NumEnums: 1, NumMessages: 5, NumExtensions: 0, @@ -714,87 +626,6 @@ func file_backend_api_v1beta1_filter_proto_init() { MessageInfos: file_backend_api_v1beta1_filter_proto_msgTypes, }.Build() File_backend_api_v1beta1_filter_proto = out.File - file_backend_api_v1beta1_filter_proto_rawDesc = nil file_backend_api_v1beta1_filter_proto_goTypes = nil file_backend_api_v1beta1_filter_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// DummyFilterServiceClient is the client API for DummyFilterService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type DummyFilterServiceClient interface { - GetFilter(ctx context.Context, in *Filter, opts ...grpc.CallOption) (*Filter, error) -} - -type dummyFilterServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewDummyFilterServiceClient(cc grpc.ClientConnInterface) DummyFilterServiceClient { - return &dummyFilterServiceClient{cc} -} - -func (c *dummyFilterServiceClient) GetFilter(ctx context.Context, in *Filter, opts ...grpc.CallOption) (*Filter, error) { - out := new(Filter) - err := c.cc.Invoke(ctx, "/api.DummyFilterService/GetFilter", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// DummyFilterServiceServer is the server API for DummyFilterService service. -type DummyFilterServiceServer interface { - GetFilter(context.Context, *Filter) (*Filter, error) -} - -// UnimplementedDummyFilterServiceServer can be embedded to have forward compatible implementations. -type UnimplementedDummyFilterServiceServer struct { -} - -func (*UnimplementedDummyFilterServiceServer) GetFilter(context.Context, *Filter) (*Filter, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetFilter not implemented") -} - -func RegisterDummyFilterServiceServer(s *grpc.Server, srv DummyFilterServiceServer) { - s.RegisterService(&_DummyFilterService_serviceDesc, srv) -} - -func _DummyFilterService_GetFilter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(Filter) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(DummyFilterServiceServer).GetFilter(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.DummyFilterService/GetFilter", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(DummyFilterServiceServer).GetFilter(ctx, req.(*Filter)) - } - return interceptor(ctx, in, info, handler) -} - -var _DummyFilterService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "api.DummyFilterService", - HandlerType: (*DummyFilterServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "GetFilter", - Handler: _DummyFilterService_GetFilter_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v1beta1/filter.proto", -} diff --git a/backend/api/v1beta1/go_client/filter_grpc.pb.go b/backend/api/v1beta1/go_client/filter_grpc.pb.go new file mode 100644 index 00000000000..a5cdbb5a07d --- /dev/null +++ b/backend/api/v1beta1/go_client/filter_grpc.pb.go @@ -0,0 +1,145 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v1beta1/filter.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + DummyFilterService_GetFilter_FullMethodName = "/api.DummyFilterService/GetFilter" +) + +// DummyFilterServiceClient is the client API for DummyFilterService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +// +// This dummy service is required so that grpc-gateway will generate Swagger +// definitions for the Filter message. Otherwise, it does not get generated +// since Filter itself is not used in any of the RPC calls - only a serialized +// encoded version of it is used. +type DummyFilterServiceClient interface { + GetFilter(ctx context.Context, in *Filter, opts ...grpc.CallOption) (*Filter, error) +} + +type dummyFilterServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewDummyFilterServiceClient(cc grpc.ClientConnInterface) DummyFilterServiceClient { + return &dummyFilterServiceClient{cc} +} + +func (c *dummyFilterServiceClient) GetFilter(ctx context.Context, in *Filter, opts ...grpc.CallOption) (*Filter, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Filter) + err := c.cc.Invoke(ctx, DummyFilterService_GetFilter_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DummyFilterServiceServer is the server API for DummyFilterService service. +// All implementations must embed UnimplementedDummyFilterServiceServer +// for forward compatibility. +// +// This dummy service is required so that grpc-gateway will generate Swagger +// definitions for the Filter message. Otherwise, it does not get generated +// since Filter itself is not used in any of the RPC calls - only a serialized +// encoded version of it is used. +type DummyFilterServiceServer interface { + GetFilter(context.Context, *Filter) (*Filter, error) + mustEmbedUnimplementedDummyFilterServiceServer() +} + +// UnimplementedDummyFilterServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedDummyFilterServiceServer struct{} + +func (UnimplementedDummyFilterServiceServer) GetFilter(context.Context, *Filter) (*Filter, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetFilter not implemented") +} +func (UnimplementedDummyFilterServiceServer) mustEmbedUnimplementedDummyFilterServiceServer() {} +func (UnimplementedDummyFilterServiceServer) testEmbeddedByValue() {} + +// UnsafeDummyFilterServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to DummyFilterServiceServer will +// result in compilation errors. +type UnsafeDummyFilterServiceServer interface { + mustEmbedUnimplementedDummyFilterServiceServer() +} + +func RegisterDummyFilterServiceServer(s grpc.ServiceRegistrar, srv DummyFilterServiceServer) { + // If the following call pancis, it indicates UnimplementedDummyFilterServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&DummyFilterService_ServiceDesc, srv) +} + +func _DummyFilterService_GetFilter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Filter) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DummyFilterServiceServer).GetFilter(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DummyFilterService_GetFilter_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DummyFilterServiceServer).GetFilter(ctx, req.(*Filter)) + } + return interceptor(ctx, in, info, handler) +} + +// DummyFilterService_ServiceDesc is the grpc.ServiceDesc for DummyFilterService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var DummyFilterService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.DummyFilterService", + HandlerType: (*DummyFilterServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFilter", + Handler: _DummyFilterService_GetFilter_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v1beta1/filter.proto", +} diff --git a/backend/api/v1beta1/go_client/healthz.pb.go b/backend/api/v1beta1/go_client/healthz.pb.go index 2ffee3cb906..df2e91b38ba 100644 --- a/backend/api/v1beta1/go_client/healthz.pb.go +++ b/backend/api/v1beta1/go_client/healthz.pb.go @@ -14,24 +14,21 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/healthz.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -42,21 +39,18 @@ const ( ) type GetHealthzResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Returns if KFP in multi-user mode - MultiUser bool `protobuf:"varint,3,opt,name=multi_user,json=multiUser,proto3" json:"multi_user,omitempty"` + MultiUser bool `protobuf:"varint,3,opt,name=multi_user,json=multiUser,proto3" json:"multi_user,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetHealthzResponse) Reset() { *x = GetHealthzResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_healthz_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_healthz_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetHealthzResponse) String() string { @@ -67,7 +61,7 @@ func (*GetHealthzResponse) ProtoMessage() {} func (x *GetHealthzResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_healthz_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -91,55 +85,37 @@ func (x *GetHealthzResponse) GetMultiUser() bool { var File_backend_api_v1beta1_healthz_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_healthz_proto_rawDesc = []byte{ - 0x0a, 0x21, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x7a, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, - 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, - 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x1a, 0x1f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, - 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x22, 0x33, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x7a, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x75, 0x6c, 0x74, - 0x69, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x6d, 0x75, - 0x6c, 0x74, 0x69, 0x55, 0x73, 0x65, 0x72, 0x32, 0x6e, 0x0a, 0x0e, 0x48, 0x65, 0x61, 0x6c, 0x74, - 0x68, 0x7a, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x5c, 0x0a, 0x0a, 0x47, 0x65, 0x74, - 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x7a, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, - 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x7a, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x17, - 0x12, 0x15, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, - 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x7a, 0x42, 0x91, 0x01, 0x92, 0x41, 0x51, 0x2a, 0x02, 0x01, - 0x02, 0x52, 0x1c, 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x11, 0x12, 0x0f, - 0x0a, 0x0d, 0x1a, 0x0b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5a, - 0x1f, 0x0a, 0x1d, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x13, 0x08, 0x02, 0x1a, - 0x0d, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, - 0x62, 0x0c, 0x0a, 0x0a, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x00, 0x5a, 0x3b, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, - 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_healthz_proto_rawDesc = "" + + "\n" + + "!backend/api/v1beta1/healthz.proto\x12\x03api\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"3\n" + + "\x12GetHealthzResponse\x12\x1d\n" + + "\n" + + "multi_user\x18\x03 \x01(\bR\tmultiUser2n\n" + + "\x0eHealthzService\x12\\\n" + + "\n" + + "GetHealthz\x12\x16.google.protobuf.Empty\x1a\x17.api.GetHealthzResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/apis/v1beta1/healthzB\x91\x01\x92AQ*\x02\x01\x02R\x1c\n" + + "\adefault\x12\x11\x12\x0f\n" + + "\r\x1a\v.api.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_healthz_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_healthz_proto_rawDescData = file_backend_api_v1beta1_healthz_proto_rawDesc + file_backend_api_v1beta1_healthz_proto_rawDescData []byte ) func file_backend_api_v1beta1_healthz_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_healthz_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_healthz_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_healthz_proto_rawDescData) + file_backend_api_v1beta1_healthz_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_healthz_proto_rawDesc), len(file_backend_api_v1beta1_healthz_proto_rawDesc))) }) return file_backend_api_v1beta1_healthz_proto_rawDescData } var file_backend_api_v1beta1_healthz_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_backend_api_v1beta1_healthz_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_healthz_proto_goTypes = []any{ (*GetHealthzResponse)(nil), // 0: api.GetHealthzResponse (*emptypb.Empty)(nil), // 1: google.protobuf.Empty } @@ -158,26 +134,11 @@ func file_backend_api_v1beta1_healthz_proto_init() { if File_backend_api_v1beta1_healthz_proto != nil { return } - file_backend_api_v1beta1_error_proto_init() - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_healthz_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetHealthzResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_healthz_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_healthz_proto_rawDesc), len(file_backend_api_v1beta1_healthz_proto_rawDesc)), NumEnums: 0, NumMessages: 1, NumExtensions: 0, @@ -188,89 +149,6 @@ func file_backend_api_v1beta1_healthz_proto_init() { MessageInfos: file_backend_api_v1beta1_healthz_proto_msgTypes, }.Build() File_backend_api_v1beta1_healthz_proto = out.File - file_backend_api_v1beta1_healthz_proto_rawDesc = nil file_backend_api_v1beta1_healthz_proto_goTypes = nil file_backend_api_v1beta1_healthz_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// HealthzServiceClient is the client API for HealthzService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type HealthzServiceClient interface { - // Get healthz data. - GetHealthz(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetHealthzResponse, error) -} - -type healthzServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewHealthzServiceClient(cc grpc.ClientConnInterface) HealthzServiceClient { - return &healthzServiceClient{cc} -} - -func (c *healthzServiceClient) GetHealthz(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetHealthzResponse, error) { - out := new(GetHealthzResponse) - err := c.cc.Invoke(ctx, "/api.HealthzService/GetHealthz", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// HealthzServiceServer is the server API for HealthzService service. -type HealthzServiceServer interface { - // Get healthz data. - GetHealthz(context.Context, *emptypb.Empty) (*GetHealthzResponse, error) -} - -// UnimplementedHealthzServiceServer can be embedded to have forward compatible implementations. -type UnimplementedHealthzServiceServer struct { -} - -func (*UnimplementedHealthzServiceServer) GetHealthz(context.Context, *emptypb.Empty) (*GetHealthzResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetHealthz not implemented") -} - -func RegisterHealthzServiceServer(s *grpc.Server, srv HealthzServiceServer) { - s.RegisterService(&_HealthzService_serviceDesc, srv) -} - -func _HealthzService_GetHealthz_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(emptypb.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(HealthzServiceServer).GetHealthz(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.HealthzService/GetHealthz", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(HealthzServiceServer).GetHealthz(ctx, req.(*emptypb.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -var _HealthzService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "api.HealthzService", - HandlerType: (*HealthzServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "GetHealthz", - Handler: _HealthzService_GetHealthz_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v1beta1/healthz.proto", -} diff --git a/backend/api/v1beta1/go_client/healthz.pb.gw.go b/backend/api/v1beta1/go_client/healthz.pb.gw.go index 960de060c97..94f069393fa 100644 --- a/backend/api/v1beta1/go_client/healthz.pb.gw.go +++ b/backend/api/v1beta1/go_client/healthz.pb.gw.go @@ -10,75 +10,78 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/emptypb" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_HealthzService_GetHealthz_0(ctx context.Context, marshaler runtime.Marshaler, client HealthzServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq emptypb.Empty - var metadata runtime.ServerMetadata - + var ( + protoReq emptypb.Empty + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.GetHealthz(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_HealthzService_GetHealthz_0(ctx context.Context, marshaler runtime.Marshaler, server HealthzServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq emptypb.Empty - var metadata runtime.ServerMetadata - + var ( + protoReq emptypb.Empty + metadata runtime.ServerMetadata + ) msg, err := server.GetHealthz(ctx, &protoReq) return msg, metadata, err - } // RegisterHealthzServiceHandlerServer registers the http handlers for service HealthzService to "mux". // UnaryRPC :call HealthzServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterHealthzServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterHealthzServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server HealthzServiceServer) error { - - mux.Handle("GET", pattern_HealthzService_GetHealthz_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_HealthzService_GetHealthz_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.HealthzService/GetHealthz", runtime.WithHTTPPathPattern("/apis/v1beta1/healthz")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_HealthzService_GetHealthz_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_HealthzService_GetHealthz_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_HealthzService_GetHealthz_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_HealthzService_GetHealthz_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -87,25 +90,24 @@ func RegisterHealthzServiceHandlerServer(ctx context.Context, mux *runtime.Serve // RegisterHealthzServiceHandlerFromEndpoint is same as RegisterHealthzServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterHealthzServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterHealthzServiceHandler(ctx, mux, conn) } @@ -119,34 +121,30 @@ func RegisterHealthzServiceHandler(ctx context.Context, mux *runtime.ServeMux, c // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "HealthzServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "HealthzServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "HealthzServiceClient" to call the correct interceptors. +// "HealthzServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterHealthzServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client HealthzServiceClient) error { - - mux.Handle("GET", pattern_HealthzService_GetHealthz_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_HealthzService_GetHealthz_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.HealthzService/GetHealthz", runtime.WithHTTPPathPattern("/apis/v1beta1/healthz")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_HealthzService_GetHealthz_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_HealthzService_GetHealthz_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_HealthzService_GetHealthz_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_HealthzService_GetHealthz_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_HealthzService_GetHealthz_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "healthz"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_HealthzService_GetHealthz_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "healthz"}, "")) ) var ( diff --git a/backend/api/v1beta1/go_client/healthz_grpc.pb.go b/backend/api/v1beta1/go_client/healthz_grpc.pb.go new file mode 100644 index 00000000000..8e934dbe508 --- /dev/null +++ b/backend/api/v1beta1/go_client/healthz_grpc.pb.go @@ -0,0 +1,138 @@ +// Copyright 2020 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v1beta1/healthz.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + HealthzService_GetHealthz_FullMethodName = "/api.HealthzService/GetHealthz" +) + +// HealthzServiceClient is the client API for HealthzService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type HealthzServiceClient interface { + // Get healthz data. + GetHealthz(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetHealthzResponse, error) +} + +type healthzServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewHealthzServiceClient(cc grpc.ClientConnInterface) HealthzServiceClient { + return &healthzServiceClient{cc} +} + +func (c *healthzServiceClient) GetHealthz(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetHealthzResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetHealthzResponse) + err := c.cc.Invoke(ctx, HealthzService_GetHealthz_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// HealthzServiceServer is the server API for HealthzService service. +// All implementations must embed UnimplementedHealthzServiceServer +// for forward compatibility. +type HealthzServiceServer interface { + // Get healthz data. + GetHealthz(context.Context, *emptypb.Empty) (*GetHealthzResponse, error) + mustEmbedUnimplementedHealthzServiceServer() +} + +// UnimplementedHealthzServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedHealthzServiceServer struct{} + +func (UnimplementedHealthzServiceServer) GetHealthz(context.Context, *emptypb.Empty) (*GetHealthzResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetHealthz not implemented") +} +func (UnimplementedHealthzServiceServer) mustEmbedUnimplementedHealthzServiceServer() {} +func (UnimplementedHealthzServiceServer) testEmbeddedByValue() {} + +// UnsafeHealthzServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to HealthzServiceServer will +// result in compilation errors. +type UnsafeHealthzServiceServer interface { + mustEmbedUnimplementedHealthzServiceServer() +} + +func RegisterHealthzServiceServer(s grpc.ServiceRegistrar, srv HealthzServiceServer) { + // If the following call pancis, it indicates UnimplementedHealthzServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&HealthzService_ServiceDesc, srv) +} + +func _HealthzService_GetHealthz_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(emptypb.Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(HealthzServiceServer).GetHealthz(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: HealthzService_GetHealthz_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(HealthzServiceServer).GetHealthz(ctx, req.(*emptypb.Empty)) + } + return interceptor(ctx, in, info, handler) +} + +// HealthzService_ServiceDesc is the grpc.ServiceDesc for HealthzService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var HealthzService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.HealthzService", + HandlerType: (*HealthzServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetHealthz", + Handler: _HealthzService_GetHealthz_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v1beta1/healthz.proto", +} diff --git a/backend/api/v1beta1/go_client/job.pb.go b/backend/api/v1beta1/go_client/job.pb.go index 93e56b7f7b3..78ef93d97cb 100644 --- a/backend/api/v1beta1/go_client/job.pb.go +++ b/backend/api/v1beta1/go_client/job.pb.go @@ -14,25 +14,22 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/job.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -94,21 +91,18 @@ func (Job_Mode) EnumDescriptor() ([]byte, []int) { } type CreateJobRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The job to be created - Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CreateJobRequest) Reset() { *x = CreateJobRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreateJobRequest) String() string { @@ -119,7 +113,7 @@ func (*CreateJobRequest) ProtoMessage() {} func (x *CreateJobRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -142,21 +136,18 @@ func (x *CreateJobRequest) GetJob() *Job { } type GetJobRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the job to be retrieved - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetJobRequest) Reset() { *x = GetJobRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetJobRequest) String() string { @@ -167,7 +158,7 @@ func (*GetJobRequest) ProtoMessage() {} func (x *GetJobRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -190,10 +181,7 @@ func (x *GetJobRequest) GetId() string { } type ListJobsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A page token to request the next page of results. The token is acquried // from the nextPageToken field of the response from the previous // ListJobs call or can be omitted when fetching the first page. @@ -211,16 +199,16 @@ type ListJobsRequest struct { ResourceReferenceKey *ResourceKey `protobuf:"bytes,4,opt,name=resource_reference_key,json=resourceReferenceKey,proto3" json:"resource_reference_key,omitempty"` // A url-encoded, JSON-serialized Filter protocol buffer (see // [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). - Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListJobsRequest) Reset() { *x = ListJobsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListJobsRequest) String() string { @@ -231,7 +219,7 @@ func (*ListJobsRequest) ProtoMessage() {} func (x *ListJobsRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -282,25 +270,22 @@ func (x *ListJobsRequest) GetFilter() string { } type ListJobsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A list of jobs returned. Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` // The total number of jobs for the given query. TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` // The token to list the next page of jobs. NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListJobsResponse) Reset() { *x = ListJobsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListJobsResponse) String() string { @@ -311,7 +296,7 @@ func (*ListJobsResponse) ProtoMessage() {} func (x *ListJobsResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -348,21 +333,18 @@ func (x *ListJobsResponse) GetNextPageToken() string { } type DeleteJobRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the job to be deleted - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeleteJobRequest) Reset() { *x = DeleteJobRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeleteJobRequest) String() string { @@ -373,7 +355,7 @@ func (*DeleteJobRequest) ProtoMessage() {} func (x *DeleteJobRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -396,21 +378,18 @@ func (x *DeleteJobRequest) GetId() string { } type EnableJobRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the job to be enabled - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *EnableJobRequest) Reset() { *x = EnableJobRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *EnableJobRequest) String() string { @@ -421,7 +400,7 @@ func (*EnableJobRequest) ProtoMessage() {} func (x *EnableJobRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -444,21 +423,18 @@ func (x *EnableJobRequest) GetId() string { } type DisableJobRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the job to be disabled - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DisableJobRequest) Reset() { *x = DisableJobRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DisableJobRequest) String() string { @@ -469,7 +445,7 @@ func (*DisableJobRequest) ProtoMessage() {} func (x *DisableJobRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -493,26 +469,23 @@ func (x *DisableJobRequest) GetId() string { // CronSchedule allow scheduling the job with unix-like cron type CronSchedule struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The start time of the cron job StartTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` // The end time of the cron job EndTime *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` // The cron string. For details how to compose a cron, visit // ttps://en.wikipedia.org/wiki/Cron - Cron string `protobuf:"bytes,3,opt,name=cron,proto3" json:"cron,omitempty"` + Cron string `protobuf:"bytes,3,opt,name=cron,proto3" json:"cron,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CronSchedule) Reset() { *x = CronSchedule{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CronSchedule) String() string { @@ -523,7 +496,7 @@ func (*CronSchedule) ProtoMessage() {} func (x *CronSchedule) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -561,25 +534,22 @@ func (x *CronSchedule) GetCron() string { // PeriodicSchedule allow scheduling the job periodically with certain interval type PeriodicSchedule struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The start time of the periodic job StartTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` // The end time of the periodic job EndTime *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` // The time interval between the starting time of consecutive jobs IntervalSecond int64 `protobuf:"varint,3,opt,name=interval_second,json=intervalSecond,proto3" json:"interval_second,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PeriodicSchedule) Reset() { *x = PeriodicSchedule{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PeriodicSchedule) String() string { @@ -590,7 +560,7 @@ func (*PeriodicSchedule) ProtoMessage() {} func (x *PeriodicSchedule) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -628,24 +598,21 @@ func (x *PeriodicSchedule) GetIntervalSecond() int64 { // Trigger defines what starts a pipeline run. type Trigger struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Trigger: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Trigger: // // *Trigger_CronSchedule // *Trigger_PeriodicSchedule - Trigger isTrigger_Trigger `protobuf_oneof:"trigger"` + Trigger isTrigger_Trigger `protobuf_oneof:"trigger"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Trigger) Reset() { *x = Trigger{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Trigger) String() string { @@ -656,7 +623,7 @@ func (*Trigger) ProtoMessage() {} func (x *Trigger) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -671,23 +638,27 @@ func (*Trigger) Descriptor() ([]byte, []int) { return file_backend_api_v1beta1_job_proto_rawDescGZIP(), []int{9} } -func (m *Trigger) GetTrigger() isTrigger_Trigger { - if m != nil { - return m.Trigger +func (x *Trigger) GetTrigger() isTrigger_Trigger { + if x != nil { + return x.Trigger } return nil } func (x *Trigger) GetCronSchedule() *CronSchedule { - if x, ok := x.GetTrigger().(*Trigger_CronSchedule); ok { - return x.CronSchedule + if x != nil { + if x, ok := x.Trigger.(*Trigger_CronSchedule); ok { + return x.CronSchedule + } } return nil } func (x *Trigger) GetPeriodicSchedule() *PeriodicSchedule { - if x, ok := x.GetTrigger().(*Trigger_PeriodicSchedule); ok { - return x.PeriodicSchedule + if x != nil { + if x, ok := x.Trigger.(*Trigger_PeriodicSchedule); ok { + return x.PeriodicSchedule + } } return nil } @@ -709,10 +680,7 @@ func (*Trigger_CronSchedule) isTrigger_Trigger() {} func (*Trigger_PeriodicSchedule) isTrigger_Trigger() {} type Job struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. Unique run ID. Generated by API server. Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // Required input field. Job name provided by user. Not unique. @@ -750,16 +718,16 @@ type Job struct { // Optional input field. Whether the job should catch up if behind schedule. // If true, the job will only schedule the latest interval if behind schedule. // If false, the job will catch up on each past interval. - NoCatchup bool `protobuf:"varint,17,opt,name=no_catchup,json=noCatchup,proto3" json:"no_catchup,omitempty"` + NoCatchup bool `protobuf:"varint,17,opt,name=no_catchup,json=noCatchup,proto3" json:"no_catchup,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Job) Reset() { *x = Job{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_job_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_job_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Job) String() string { @@ -770,7 +738,7 @@ func (*Job) ProtoMessage() {} func (x *Job) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_job_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -892,189 +860,101 @@ func (x *Job) GetNoCatchup() bool { var File_backend_api_v1beta1_job_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_job_proto_rawDesc = []byte{ - 0x0a, 0x1d, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x6a, 0x6f, 0x62, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, - 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, - 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x1a, 0x27, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, - 0x70, 0x65, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, - 0x67, 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x6f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, - 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x2e, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x03, 0x6a, 0x6f, - 0x62, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4a, 0x6f, - 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62, 0x22, 0x1f, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0xc6, 0x01, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, - 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x70, - 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, - 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, - 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x73, 0x6f, 0x72, 0x74, 0x5f, - 0x62, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x72, 0x74, 0x42, 0x79, - 0x12, 0x46, 0x0a, 0x16, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, - 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4b, - 0x65, 0x79, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, - 0x72, 0x65, 0x6e, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, - 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, - 0x22, 0x77, 0x0a, 0x10, 0x4c, 0x69, 0x73, 0x74, 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1c, 0x0a, 0x04, 0x6a, 0x6f, 0x62, 0x73, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x04, 0x6a, 0x6f, - 0x62, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x73, 0x69, 0x7a, 0x65, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x53, 0x69, 0x7a, - 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, - 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, - 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x22, 0x0a, 0x10, 0x44, 0x65, 0x6c, - 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, - 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x22, 0x0a, - 0x10, 0x45, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, - 0x64, 0x22, 0x23, 0x0a, 0x11, 0x44, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x4a, 0x6f, 0x62, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x94, 0x01, 0x0a, 0x0c, 0x43, 0x72, 0x6f, 0x6e, 0x53, - 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, - 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, - 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x72, 0x6f, - 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x22, 0xad, 0x01, - 0x0a, 0x10, 0x50, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x69, 0x63, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, - 0x6c, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, - 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, - 0x54, 0x69, 0x6d, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, - 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0e, 0x69, - 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x22, 0x94, 0x01, - 0x0a, 0x07, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x12, 0x38, 0x0a, 0x0d, 0x63, 0x72, 0x6f, - 0x6e, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x64, - 0x75, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x0c, 0x63, 0x72, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x64, - 0x75, 0x6c, 0x65, 0x12, 0x44, 0x0a, 0x11, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x69, 0x63, 0x5f, - 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x69, 0x63, 0x53, 0x63, 0x68, - 0x65, 0x64, 0x75, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x10, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x69, - 0x63, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x74, 0x72, 0x69, - 0x67, 0x67, 0x65, 0x72, 0x22, 0xfb, 0x04, 0x0a, 0x03, 0x4a, 0x6f, 0x62, 0x12, 0x0e, 0x0a, 0x02, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x36, 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, - 0x70, 0x65, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, 0x52, 0x0c, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x47, 0x0a, 0x13, 0x72, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, - 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, - 0x12, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, - 0x63, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x5f, 0x61, - 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x12, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x65, - 0x72, 0x76, 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x27, 0x0a, 0x0f, - 0x6d, 0x61, 0x78, 0x5f, 0x63, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x18, - 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0e, 0x6d, 0x61, 0x78, 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x26, 0x0a, 0x07, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, - 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x69, - 0x67, 0x67, 0x65, 0x72, 0x52, 0x07, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x12, 0x21, 0x0a, - 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0d, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x4a, 0x6f, 0x62, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x6d, 0x6f, 0x64, 0x65, - 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x09, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x75, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x14, - 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18, - 0x10, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, 0x1d, - 0x0a, 0x0a, 0x6e, 0x6f, 0x5f, 0x63, 0x61, 0x74, 0x63, 0x68, 0x75, 0x70, 0x18, 0x11, 0x20, 0x01, - 0x28, 0x08, 0x52, 0x09, 0x6e, 0x6f, 0x43, 0x61, 0x74, 0x63, 0x68, 0x75, 0x70, 0x22, 0x33, 0x0a, - 0x04, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x10, 0x0a, 0x0c, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, - 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x45, 0x4e, 0x41, 0x42, 0x4c, - 0x45, 0x44, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x49, 0x53, 0x41, 0x42, 0x4c, 0x45, 0x44, - 0x10, 0x02, 0x32, 0xa1, 0x04, 0x0a, 0x0a, 0x4a, 0x6f, 0x62, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, - 0x65, 0x12, 0x4d, 0x0a, 0x09, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x15, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x08, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4a, 0x6f, 0x62, 0x22, - 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x19, 0x3a, 0x03, 0x6a, 0x6f, 0x62, 0x22, 0x12, 0x2f, 0x61, - 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x6a, 0x6f, 0x62, 0x73, - 0x12, 0x47, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x12, 0x12, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x08, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4a, 0x6f, 0x62, 0x22, 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x19, - 0x12, 0x17, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, - 0x6a, 0x6f, 0x62, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0x53, 0x0a, 0x08, 0x4c, 0x69, 0x73, - 0x74, 0x4a, 0x6f, 0x62, 0x73, 0x12, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, - 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0x1a, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x12, 0x12, 0x2f, 0x61, 0x70, 0x69, - 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x6a, 0x6f, 0x62, 0x73, 0x12, 0x62, - 0x0a, 0x09, 0x45, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x45, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x26, 0x82, 0xd3, 0xe4, 0x93, - 0x02, 0x20, 0x22, 0x1e, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2f, 0x6a, 0x6f, 0x62, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x65, 0x6e, 0x61, 0x62, - 0x6c, 0x65, 0x12, 0x65, 0x0a, 0x0a, 0x44, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x4a, 0x6f, 0x62, - 0x12, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x4a, 0x6f, - 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, - 0x22, 0x27, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x21, 0x22, 0x1f, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, - 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x6a, 0x6f, 0x62, 0x73, 0x2f, 0x7b, 0x69, 0x64, - 0x7d, 0x2f, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x5b, 0x0a, 0x09, 0x44, 0x65, 0x6c, - 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, - 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x19, 0x2a, 0x17, 0x2f, - 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x6a, 0x6f, 0x62, - 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x42, 0x91, 0x01, 0x92, 0x41, 0x51, 0x2a, 0x02, 0x01, 0x02, - 0x52, 0x1c, 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x11, 0x12, 0x0f, 0x0a, - 0x0d, 0x1a, 0x0b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5a, 0x1f, - 0x0a, 0x1d, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x13, 0x08, 0x02, 0x1a, 0x0d, - 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, - 0x0c, 0x0a, 0x0a, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x00, 0x5a, 0x3b, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, -} +const file_backend_api_v1beta1_job_proto_rawDesc = "" + + "\n" + + "\x1dbackend/api/v1beta1/job.proto\x12\x03api\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a'backend/api/v1beta1/pipeline_spec.proto\x1a,backend/api/v1beta1/resource_reference.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\".\n" + + "\x10CreateJobRequest\x12\x1a\n" + + "\x03job\x18\x01 \x01(\v2\b.api.JobR\x03job\"\x1f\n" + + "\rGetJobRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"\xc6\x01\n" + + "\x0fListJobsRequest\x12\x1d\n" + + "\n" + + "page_token\x18\x01 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x02 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x03 \x01(\tR\x06sortBy\x12F\n" + + "\x16resource_reference_key\x18\x04 \x01(\v2\x10.api.ResourceKeyR\x14resourceReferenceKey\x12\x16\n" + + "\x06filter\x18\x05 \x01(\tR\x06filter\"w\n" + + "\x10ListJobsResponse\x12\x1c\n" + + "\x04jobs\x18\x01 \x03(\v2\b.api.JobR\x04jobs\x12\x1d\n" + + "\n" + + "total_size\x18\x03 \x01(\x05R\ttotalSize\x12&\n" + + "\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"\"\n" + + "\x10DeleteJobRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"\"\n" + + "\x10EnableJobRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"#\n" + + "\x11DisableJobRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"\x94\x01\n" + + "\fCronSchedule\x129\n" + + "\n" + + "start_time\x18\x01 \x01(\v2\x1a.google.protobuf.TimestampR\tstartTime\x125\n" + + "\bend_time\x18\x02 \x01(\v2\x1a.google.protobuf.TimestampR\aendTime\x12\x12\n" + + "\x04cron\x18\x03 \x01(\tR\x04cron\"\xad\x01\n" + + "\x10PeriodicSchedule\x129\n" + + "\n" + + "start_time\x18\x01 \x01(\v2\x1a.google.protobuf.TimestampR\tstartTime\x125\n" + + "\bend_time\x18\x02 \x01(\v2\x1a.google.protobuf.TimestampR\aendTime\x12'\n" + + "\x0finterval_second\x18\x03 \x01(\x03R\x0eintervalSecond\"\x94\x01\n" + + "\aTrigger\x128\n" + + "\rcron_schedule\x18\x01 \x01(\v2\x11.api.CronScheduleH\x00R\fcronSchedule\x12D\n" + + "\x11periodic_schedule\x18\x02 \x01(\v2\x15.api.PeriodicScheduleH\x00R\x10periodicScheduleB\t\n" + + "\atrigger\"\xfb\x04\n" + + "\x03Job\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n" + + "\x04name\x18\x02 \x01(\tR\x04name\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\x126\n" + + "\rpipeline_spec\x18\x04 \x01(\v2\x11.api.PipelineSpecR\fpipelineSpec\x12G\n" + + "\x13resource_references\x18\x05 \x03(\v2\x16.api.ResourceReferenceR\x12resourceReferences\x12'\n" + + "\x0fservice_account\x18\x12 \x01(\tR\x0eserviceAccount\x12'\n" + + "\x0fmax_concurrency\x18\x06 \x01(\x03R\x0emaxConcurrency\x12&\n" + + "\atrigger\x18\a \x01(\v2\f.api.TriggerR\atrigger\x12!\n" + + "\x04mode\x18\b \x01(\x0e2\r.api.Job.ModeR\x04mode\x129\n" + + "\n" + + "created_at\x18\t \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x129\n" + + "\n" + + "updated_at\x18\n" + + " \x01(\v2\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x16\n" + + "\x06status\x18\v \x01(\tR\x06status\x12\x14\n" + + "\x05error\x18\f \x01(\tR\x05error\x12\x18\n" + + "\aenabled\x18\x10 \x01(\bR\aenabled\x12\x1d\n" + + "\n" + + "no_catchup\x18\x11 \x01(\bR\tnoCatchup\"3\n" + + "\x04Mode\x12\x10\n" + + "\fUNKNOWN_MODE\x10\x00\x12\v\n" + + "\aENABLED\x10\x01\x12\f\n" + + "\bDISABLED\x10\x022\xa1\x04\n" + + "\n" + + "JobService\x12M\n" + + "\tCreateJob\x12\x15.api.CreateJobRequest\x1a\b.api.Job\"\x1f\x82\xd3\xe4\x93\x02\x19:\x03job\"\x12/apis/v1beta1/jobs\x12G\n" + + "\x06GetJob\x12\x12.api.GetJobRequest\x1a\b.api.Job\"\x1f\x82\xd3\xe4\x93\x02\x19\x12\x17/apis/v1beta1/jobs/{id}\x12S\n" + + "\bListJobs\x12\x14.api.ListJobsRequest\x1a\x15.api.ListJobsResponse\"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/apis/v1beta1/jobs\x12b\n" + + "\tEnableJob\x12\x15.api.EnableJobRequest\x1a\x16.google.protobuf.Empty\"&\x82\xd3\xe4\x93\x02 \"\x1e/apis/v1beta1/jobs/{id}/enable\x12e\n" + + "\n" + + "DisableJob\x12\x16.api.DisableJobRequest\x1a\x16.google.protobuf.Empty\"'\x82\xd3\xe4\x93\x02!\"\x1f/apis/v1beta1/jobs/{id}/disable\x12[\n" + + "\tDeleteJob\x12\x15.api.DeleteJobRequest\x1a\x16.google.protobuf.Empty\"\x1f\x82\xd3\xe4\x93\x02\x19*\x17/apis/v1beta1/jobs/{id}B\x91\x01\x92AQ*\x02\x01\x02R\x1c\n" + + "\adefault\x12\x11\x12\x0f\n" + + "\r\x1a\v.api.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_job_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_job_proto_rawDescData = file_backend_api_v1beta1_job_proto_rawDesc + file_backend_api_v1beta1_job_proto_rawDescData []byte ) func file_backend_api_v1beta1_job_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_job_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_job_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_job_proto_rawDescData) + file_backend_api_v1beta1_job_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_job_proto_rawDesc), len(file_backend_api_v1beta1_job_proto_rawDesc))) }) return file_backend_api_v1beta1_job_proto_rawDescData } var file_backend_api_v1beta1_job_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_backend_api_v1beta1_job_proto_msgTypes = make([]protoimpl.MessageInfo, 11) -var file_backend_api_v1beta1_job_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_job_proto_goTypes = []any{ (Job_Mode)(0), // 0: api.Job.Mode (*CreateJobRequest)(nil), // 1: api.CreateJobRequest (*GetJobRequest)(nil), // 2: api.GetJobRequest @@ -1135,142 +1015,7 @@ func file_backend_api_v1beta1_job_proto_init() { } file_backend_api_v1beta1_pipeline_spec_proto_init() file_backend_api_v1beta1_resource_reference_proto_init() - file_backend_api_v1beta1_error_proto_init() - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_job_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateJobRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_job_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetJobRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_job_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListJobsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_job_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListJobsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_job_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteJobRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_job_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EnableJobRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_job_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DisableJobRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_job_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CronSchedule); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_job_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PeriodicSchedule); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_job_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Trigger); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_job_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Job); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_backend_api_v1beta1_job_proto_msgTypes[9].OneofWrappers = []interface{}{ + file_backend_api_v1beta1_job_proto_msgTypes[9].OneofWrappers = []any{ (*Trigger_CronSchedule)(nil), (*Trigger_PeriodicSchedule)(nil), } @@ -1278,7 +1023,7 @@ func file_backend_api_v1beta1_job_proto_init() { out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_job_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_job_proto_rawDesc), len(file_backend_api_v1beta1_job_proto_rawDesc)), NumEnums: 1, NumMessages: 11, NumExtensions: 0, @@ -1290,279 +1035,6 @@ func file_backend_api_v1beta1_job_proto_init() { MessageInfos: file_backend_api_v1beta1_job_proto_msgTypes, }.Build() File_backend_api_v1beta1_job_proto = out.File - file_backend_api_v1beta1_job_proto_rawDesc = nil file_backend_api_v1beta1_job_proto_goTypes = nil file_backend_api_v1beta1_job_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// JobServiceClient is the client API for JobService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type JobServiceClient interface { - // Creates a new job. - CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) - // Finds a specific job by ID. - GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) - // Finds all jobs. - ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) - // Restarts a job that was previously stopped. All runs associated with the job will continue. - EnableJob(ctx context.Context, in *EnableJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Stops a job and all its associated runs. The job is not deleted. - DisableJob(ctx context.Context, in *DisableJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Deletes a job. - DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type jobServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewJobServiceClient(cc grpc.ClientConnInterface) JobServiceClient { - return &jobServiceClient{cc} -} - -func (c *jobServiceClient) CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) { - out := new(Job) - err := c.cc.Invoke(ctx, "/api.JobService/CreateJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) { - out := new(Job) - err := c.cc.Invoke(ctx, "/api.JobService/GetJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) { - out := new(ListJobsResponse) - err := c.cc.Invoke(ctx, "/api.JobService/ListJobs", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) EnableJob(ctx context.Context, in *EnableJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.JobService/EnableJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) DisableJob(ctx context.Context, in *DisableJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.JobService/DisableJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.JobService/DeleteJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// JobServiceServer is the server API for JobService service. -type JobServiceServer interface { - // Creates a new job. - CreateJob(context.Context, *CreateJobRequest) (*Job, error) - // Finds a specific job by ID. - GetJob(context.Context, *GetJobRequest) (*Job, error) - // Finds all jobs. - ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) - // Restarts a job that was previously stopped. All runs associated with the job will continue. - EnableJob(context.Context, *EnableJobRequest) (*emptypb.Empty, error) - // Stops a job and all its associated runs. The job is not deleted. - DisableJob(context.Context, *DisableJobRequest) (*emptypb.Empty, error) - // Deletes a job. - DeleteJob(context.Context, *DeleteJobRequest) (*emptypb.Empty, error) -} - -// UnimplementedJobServiceServer can be embedded to have forward compatible implementations. -type UnimplementedJobServiceServer struct { -} - -func (*UnimplementedJobServiceServer) CreateJob(context.Context, *CreateJobRequest) (*Job, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateJob not implemented") -} -func (*UnimplementedJobServiceServer) GetJob(context.Context, *GetJobRequest) (*Job, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetJob not implemented") -} -func (*UnimplementedJobServiceServer) ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListJobs not implemented") -} -func (*UnimplementedJobServiceServer) EnableJob(context.Context, *EnableJobRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method EnableJob not implemented") -} -func (*UnimplementedJobServiceServer) DisableJob(context.Context, *DisableJobRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method DisableJob not implemented") -} -func (*UnimplementedJobServiceServer) DeleteJob(context.Context, *DeleteJobRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeleteJob not implemented") -} - -func RegisterJobServiceServer(s *grpc.Server, srv JobServiceServer) { - s.RegisterService(&_JobService_serviceDesc, srv) -} - -func _JobService_CreateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).CreateJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.JobService/CreateJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).CreateJob(ctx, req.(*CreateJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).GetJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.JobService/GetJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).GetJob(ctx, req.(*GetJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListJobsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).ListJobs(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.JobService/ListJobs", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).ListJobs(ctx, req.(*ListJobsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_EnableJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(EnableJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).EnableJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.JobService/EnableJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).EnableJob(ctx, req.(*EnableJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_DisableJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DisableJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).DisableJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.JobService/DisableJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).DisableJob(ctx, req.(*DisableJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeleteJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).DeleteJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.JobService/DeleteJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).DeleteJob(ctx, req.(*DeleteJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _JobService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "api.JobService", - HandlerType: (*JobServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateJob", - Handler: _JobService_CreateJob_Handler, - }, - { - MethodName: "GetJob", - Handler: _JobService_GetJob_Handler, - }, - { - MethodName: "ListJobs", - Handler: _JobService_ListJobs_Handler, - }, - { - MethodName: "EnableJob", - Handler: _JobService_EnableJob_Handler, - }, - { - MethodName: "DisableJob", - Handler: _JobService_DisableJob_Handler, - }, - { - MethodName: "DeleteJob", - Handler: _JobService_DeleteJob_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v1beta1/job.proto", -} diff --git a/backend/api/v1beta1/go_client/job.pb.gw.go b/backend/api/v1beta1/go_client/job.pb.gw.go index f7f28b57942..bcc08e3d72f 100644 --- a/backend/api/v1beta1/go_client/job.pb.gw.go +++ b/backend/api/v1beta1/go_client/job.pb.gw.go @@ -10,457 +10,374 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_JobService_CreateJob_0(ctx context.Context, marshaler runtime.Marshaler, client JobServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateJobRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Job); err != nil && err != io.EOF { + var ( + protoReq CreateJobRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Job); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.CreateJob(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_JobService_CreateJob_0(ctx context.Context, marshaler runtime.Marshaler, server JobServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateJobRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Job); err != nil && err != io.EOF { + var ( + protoReq CreateJobRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Job); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreateJob(ctx, &protoReq) return msg, metadata, err - } func request_JobService_GetJob_0(ctx context.Context, marshaler runtime.Marshaler, client JobServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetJobRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetJobRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.GetJob(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_JobService_GetJob_0(ctx context.Context, marshaler runtime.Marshaler, server JobServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetJobRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetJobRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.GetJob(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_JobService_ListJobs_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) +var filter_JobService_ListJobs_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} func request_JobService_ListJobs_0(ctx context.Context, marshaler runtime.Marshaler, client JobServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListJobsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListJobsRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_JobService_ListJobs_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListJobs(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_JobService_ListJobs_0(ctx context.Context, marshaler runtime.Marshaler, server JobServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListJobsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListJobsRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_JobService_ListJobs_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListJobs(ctx, &protoReq) return msg, metadata, err - } func request_JobService_EnableJob_0(ctx context.Context, marshaler runtime.Marshaler, client JobServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq EnableJobRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq EnableJobRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.EnableJob(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_JobService_EnableJob_0(ctx context.Context, marshaler runtime.Marshaler, server JobServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq EnableJobRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq EnableJobRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.EnableJob(ctx, &protoReq) return msg, metadata, err - } func request_JobService_DisableJob_0(ctx context.Context, marshaler runtime.Marshaler, client JobServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DisableJobRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DisableJobRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.DisableJob(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_JobService_DisableJob_0(ctx context.Context, marshaler runtime.Marshaler, server JobServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DisableJobRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DisableJobRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.DisableJob(ctx, &protoReq) return msg, metadata, err - } func request_JobService_DeleteJob_0(ctx context.Context, marshaler runtime.Marshaler, client JobServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteJobRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteJobRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.DeleteJob(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_JobService_DeleteJob_0(ctx context.Context, marshaler runtime.Marshaler, server JobServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteJobRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteJobRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.DeleteJob(ctx, &protoReq) return msg, metadata, err - } // RegisterJobServiceHandlerServer registers the http handlers for service JobService to "mux". // UnaryRPC :call JobServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterJobServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterJobServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server JobServiceServer) error { - - mux.Handle("POST", pattern_JobService_CreateJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_JobService_CreateJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.JobService/CreateJob", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_JobService_CreateJob_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_JobService_CreateJob_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_CreateJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_CreateJob_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_JobService_GetJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_JobService_GetJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.JobService/GetJob", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_JobService_GetJob_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_JobService_GetJob_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_GetJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_GetJob_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_JobService_ListJobs_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_JobService_ListJobs_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.JobService/ListJobs", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_JobService_ListJobs_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_JobService_ListJobs_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_ListJobs_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_ListJobs_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_JobService_EnableJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_JobService_EnableJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.JobService/EnableJob", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs/{id}/enable")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_JobService_EnableJob_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_JobService_EnableJob_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_EnableJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_EnableJob_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_JobService_DisableJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_JobService_DisableJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.JobService/DisableJob", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs/{id}/disable")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_JobService_DisableJob_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_JobService_DisableJob_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_DisableJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_DisableJob_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_JobService_DeleteJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_JobService_DeleteJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.JobService/DeleteJob", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_JobService_DeleteJob_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_JobService_DeleteJob_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_DeleteJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_DeleteJob_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -469,25 +386,24 @@ func RegisterJobServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, // RegisterJobServiceHandlerFromEndpoint is same as RegisterJobServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterJobServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterJobServiceHandler(ctx, mux, conn) } @@ -501,156 +417,127 @@ func RegisterJobServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "JobServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "JobServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "JobServiceClient" to call the correct interceptors. +// "JobServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterJobServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client JobServiceClient) error { - - mux.Handle("POST", pattern_JobService_CreateJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_JobService_CreateJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.JobService/CreateJob", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_JobService_CreateJob_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_JobService_CreateJob_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_CreateJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_CreateJob_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_JobService_GetJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_JobService_GetJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.JobService/GetJob", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_JobService_GetJob_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_JobService_GetJob_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_GetJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_GetJob_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_JobService_ListJobs_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_JobService_ListJobs_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.JobService/ListJobs", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_JobService_ListJobs_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_JobService_ListJobs_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_ListJobs_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_ListJobs_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_JobService_EnableJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_JobService_EnableJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.JobService/EnableJob", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs/{id}/enable")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_JobService_EnableJob_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_JobService_EnableJob_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_EnableJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_EnableJob_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_JobService_DisableJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_JobService_DisableJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.JobService/DisableJob", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs/{id}/disable")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_JobService_DisableJob_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_JobService_DisableJob_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_DisableJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_DisableJob_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_JobService_DeleteJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_JobService_DeleteJob_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.JobService/DeleteJob", runtime.WithHTTPPathPattern("/apis/v1beta1/jobs/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_JobService_DeleteJob_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_JobService_DeleteJob_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_JobService_DeleteJob_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_JobService_DeleteJob_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_JobService_CreateJob_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "jobs"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_JobService_GetJob_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "jobs", "id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_JobService_ListJobs_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "jobs"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_JobService_EnableJob_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "jobs", "id", "enable"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_JobService_DisableJob_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "jobs", "id", "disable"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_JobService_DeleteJob_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "jobs", "id"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_JobService_CreateJob_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "jobs"}, "")) + pattern_JobService_GetJob_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "jobs", "id"}, "")) + pattern_JobService_ListJobs_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "jobs"}, "")) + pattern_JobService_EnableJob_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "jobs", "id", "enable"}, "")) + pattern_JobService_DisableJob_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "jobs", "id", "disable"}, "")) + pattern_JobService_DeleteJob_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "jobs", "id"}, "")) ) var ( - forward_JobService_CreateJob_0 = runtime.ForwardResponseMessage - - forward_JobService_GetJob_0 = runtime.ForwardResponseMessage - - forward_JobService_ListJobs_0 = runtime.ForwardResponseMessage - - forward_JobService_EnableJob_0 = runtime.ForwardResponseMessage - + forward_JobService_CreateJob_0 = runtime.ForwardResponseMessage + forward_JobService_GetJob_0 = runtime.ForwardResponseMessage + forward_JobService_ListJobs_0 = runtime.ForwardResponseMessage + forward_JobService_EnableJob_0 = runtime.ForwardResponseMessage forward_JobService_DisableJob_0 = runtime.ForwardResponseMessage - - forward_JobService_DeleteJob_0 = runtime.ForwardResponseMessage + forward_JobService_DeleteJob_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v1beta1/go_client/job_grpc.pb.go b/backend/api/v1beta1/go_client/job_grpc.pb.go new file mode 100644 index 00000000000..22538386cb9 --- /dev/null +++ b/backend/api/v1beta1/go_client/job_grpc.pb.go @@ -0,0 +1,338 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v1beta1/job.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + JobService_CreateJob_FullMethodName = "/api.JobService/CreateJob" + JobService_GetJob_FullMethodName = "/api.JobService/GetJob" + JobService_ListJobs_FullMethodName = "/api.JobService/ListJobs" + JobService_EnableJob_FullMethodName = "/api.JobService/EnableJob" + JobService_DisableJob_FullMethodName = "/api.JobService/DisableJob" + JobService_DeleteJob_FullMethodName = "/api.JobService/DeleteJob" +) + +// JobServiceClient is the client API for JobService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type JobServiceClient interface { + // Creates a new job. + CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) + // Finds a specific job by ID. + GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) + // Finds all jobs. + ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) + // Restarts a job that was previously stopped. All runs associated with the job will continue. + EnableJob(ctx context.Context, in *EnableJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Stops a job and all its associated runs. The job is not deleted. + DisableJob(ctx context.Context, in *DisableJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Deletes a job. + DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type jobServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewJobServiceClient(cc grpc.ClientConnInterface) JobServiceClient { + return &jobServiceClient{cc} +} + +func (c *jobServiceClient) CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Job) + err := c.cc.Invoke(ctx, JobService_CreateJob_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Job) + err := c.cc.Invoke(ctx, JobService_GetJob_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListJobsResponse) + err := c.cc.Invoke(ctx, JobService_ListJobs_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) EnableJob(ctx context.Context, in *EnableJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, JobService_EnableJob_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) DisableJob(ctx context.Context, in *DisableJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, JobService_DisableJob_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, JobService_DeleteJob_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// JobServiceServer is the server API for JobService service. +// All implementations must embed UnimplementedJobServiceServer +// for forward compatibility. +type JobServiceServer interface { + // Creates a new job. + CreateJob(context.Context, *CreateJobRequest) (*Job, error) + // Finds a specific job by ID. + GetJob(context.Context, *GetJobRequest) (*Job, error) + // Finds all jobs. + ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) + // Restarts a job that was previously stopped. All runs associated with the job will continue. + EnableJob(context.Context, *EnableJobRequest) (*emptypb.Empty, error) + // Stops a job and all its associated runs. The job is not deleted. + DisableJob(context.Context, *DisableJobRequest) (*emptypb.Empty, error) + // Deletes a job. + DeleteJob(context.Context, *DeleteJobRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedJobServiceServer() +} + +// UnimplementedJobServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedJobServiceServer struct{} + +func (UnimplementedJobServiceServer) CreateJob(context.Context, *CreateJobRequest) (*Job, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateJob not implemented") +} +func (UnimplementedJobServiceServer) GetJob(context.Context, *GetJobRequest) (*Job, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetJob not implemented") +} +func (UnimplementedJobServiceServer) ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListJobs not implemented") +} +func (UnimplementedJobServiceServer) EnableJob(context.Context, *EnableJobRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method EnableJob not implemented") +} +func (UnimplementedJobServiceServer) DisableJob(context.Context, *DisableJobRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DisableJob not implemented") +} +func (UnimplementedJobServiceServer) DeleteJob(context.Context, *DeleteJobRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteJob not implemented") +} +func (UnimplementedJobServiceServer) mustEmbedUnimplementedJobServiceServer() {} +func (UnimplementedJobServiceServer) testEmbeddedByValue() {} + +// UnsafeJobServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to JobServiceServer will +// result in compilation errors. +type UnsafeJobServiceServer interface { + mustEmbedUnimplementedJobServiceServer() +} + +func RegisterJobServiceServer(s grpc.ServiceRegistrar, srv JobServiceServer) { + // If the following call pancis, it indicates UnimplementedJobServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&JobService_ServiceDesc, srv) +} + +func _JobService_CreateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).CreateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: JobService_CreateJob_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).CreateJob(ctx, req.(*CreateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).GetJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: JobService_GetJob_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).GetJob(ctx, req.(*GetJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).ListJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: JobService_ListJobs_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).ListJobs(ctx, req.(*ListJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_EnableJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(EnableJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).EnableJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: JobService_EnableJob_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).EnableJob(ctx, req.(*EnableJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_DisableJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DisableJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).DisableJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: JobService_DisableJob_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).DisableJob(ctx, req.(*DisableJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).DeleteJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: JobService_DeleteJob_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).DeleteJob(ctx, req.(*DeleteJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// JobService_ServiceDesc is the grpc.ServiceDesc for JobService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var JobService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.JobService", + HandlerType: (*JobServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateJob", + Handler: _JobService_CreateJob_Handler, + }, + { + MethodName: "GetJob", + Handler: _JobService_GetJob_Handler, + }, + { + MethodName: "ListJobs", + Handler: _JobService_ListJobs_Handler, + }, + { + MethodName: "EnableJob", + Handler: _JobService_EnableJob_Handler, + }, + { + MethodName: "DisableJob", + Handler: _JobService_DisableJob_Handler, + }, + { + MethodName: "DeleteJob", + Handler: _JobService_DeleteJob_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v1beta1/job.proto", +} diff --git a/backend/api/v1beta1/go_client/parameter.pb.go b/backend/api/v1beta1/go_client/parameter.pb.go index 5ca605e7717..8af3cd68936 100644 --- a/backend/api/v1beta1/go_client/parameter.pb.go +++ b/backend/api/v1beta1/go_client/parameter.pb.go @@ -14,8 +14,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/parameter.proto package go_client @@ -25,6 +25,7 @@ import ( protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -35,21 +36,18 @@ const ( ) type Parameter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` unknownFields protoimpl.UnknownFields - - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Parameter) Reset() { *x = Parameter{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_parameter_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_parameter_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Parameter) String() string { @@ -60,7 +58,7 @@ func (*Parameter) ProtoMessage() {} func (x *Parameter) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_parameter_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -91,34 +89,27 @@ func (x *Parameter) GetValue() string { var File_backend_api_v1beta1_parameter_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_parameter_proto_rawDesc = []byte{ - 0x0a, 0x23, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x22, 0x35, 0x0a, 0x09, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, - 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_parameter_proto_rawDesc = "" + + "\n" + + "#backend/api/v1beta1/parameter.proto\x12\x03api\"5\n" + + "\tParameter\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05valueB=Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_parameter_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_parameter_proto_rawDescData = file_backend_api_v1beta1_parameter_proto_rawDesc + file_backend_api_v1beta1_parameter_proto_rawDescData []byte ) func file_backend_api_v1beta1_parameter_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_parameter_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_parameter_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_parameter_proto_rawDescData) + file_backend_api_v1beta1_parameter_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_parameter_proto_rawDesc), len(file_backend_api_v1beta1_parameter_proto_rawDesc))) }) return file_backend_api_v1beta1_parameter_proto_rawDescData } var file_backend_api_v1beta1_parameter_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_backend_api_v1beta1_parameter_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_parameter_proto_goTypes = []any{ (*Parameter)(nil), // 0: api.Parameter } var file_backend_api_v1beta1_parameter_proto_depIdxs = []int32{ @@ -134,25 +125,11 @@ func file_backend_api_v1beta1_parameter_proto_init() { if File_backend_api_v1beta1_parameter_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_parameter_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Parameter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_parameter_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_parameter_proto_rawDesc), len(file_backend_api_v1beta1_parameter_proto_rawDesc)), NumEnums: 0, NumMessages: 1, NumExtensions: 0, @@ -163,7 +140,6 @@ func file_backend_api_v1beta1_parameter_proto_init() { MessageInfos: file_backend_api_v1beta1_parameter_proto_msgTypes, }.Build() File_backend_api_v1beta1_parameter_proto = out.File - file_backend_api_v1beta1_parameter_proto_rawDesc = nil file_backend_api_v1beta1_parameter_proto_goTypes = nil file_backend_api_v1beta1_parameter_proto_depIdxs = nil } diff --git a/backend/api/v1beta1/go_client/pipeline.pb.go b/backend/api/v1beta1/go_client/pipeline.pb.go index 4869f9d48fd..b298f225507 100644 --- a/backend/api/v1beta1/go_client/pipeline.pb.go +++ b/backend/api/v1beta1/go_client/pipeline.pb.go @@ -14,25 +14,22 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/pipeline.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -43,21 +40,18 @@ const ( ) type Url struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // URL of the pipeline definition or the pipeline version definition. - PipelineUrl string `protobuf:"bytes,1,opt,name=pipeline_url,json=pipelineUrl,proto3" json:"pipeline_url,omitempty"` + PipelineUrl string `protobuf:"bytes,1,opt,name=pipeline_url,json=pipelineUrl,proto3" json:"pipeline_url,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Url) Reset() { *x = Url{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Url) String() string { @@ -68,7 +62,7 @@ func (*Url) ProtoMessage() {} func (x *Url) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -94,20 +88,17 @@ func (x *Url) GetPipelineUrl() string { // and optionally a pipeline name. If name is not provided, file name is used as // pipeline name by default. Maximum size of 32MB is supported. type CreatePipelineRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Pipeline *Pipeline `protobuf:"bytes,1,opt,name=pipeline,proto3" json:"pipeline,omitempty"` unknownFields protoimpl.UnknownFields - - Pipeline *Pipeline `protobuf:"bytes,1,opt,name=pipeline,proto3" json:"pipeline,omitempty"` + sizeCache protoimpl.SizeCache } func (x *CreatePipelineRequest) Reset() { *x = CreatePipelineRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreatePipelineRequest) String() string { @@ -118,7 +109,7 @@ func (*CreatePipelineRequest) ProtoMessage() {} func (x *CreatePipelineRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -141,23 +132,20 @@ func (x *CreatePipelineRequest) GetPipeline() *Pipeline { } type UpdatePipelineDefaultVersionRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the pipeline to be updated. PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` // The ID of the default version. - VersionId string `protobuf:"bytes,2,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + VersionId string `protobuf:"bytes,2,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *UpdatePipelineDefaultVersionRequest) Reset() { *x = UpdatePipelineDefaultVersionRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *UpdatePipelineDefaultVersionRequest) String() string { @@ -168,7 +156,7 @@ func (*UpdatePipelineDefaultVersionRequest) ProtoMessage() {} func (x *UpdatePipelineDefaultVersionRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -198,21 +186,18 @@ func (x *UpdatePipelineDefaultVersionRequest) GetVersionId() string { } type GetPipelineRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the pipeline to be retrieved. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetPipelineRequest) Reset() { *x = GetPipelineRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetPipelineRequest) String() string { @@ -223,7 +208,7 @@ func (*GetPipelineRequest) ProtoMessage() {} func (x *GetPipelineRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -246,10 +231,7 @@ func (x *GetPipelineRequest) GetId() string { } type ListPipelinesRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A page token to request the next page of results. The token is acquried // from the nextPageToken field of the response from the previous // ListPipelines call. @@ -268,15 +250,15 @@ type ListPipelinesRequest struct { // For Pipeline, the only valid resource type is Namespace. An sample query string could be // resource_reference_key.type=NAMESPACE&resource_reference_key.id=ns1 ResourceReferenceKey *ResourceKey `protobuf:"bytes,5,opt,name=resource_reference_key,json=resourceReferenceKey,proto3" json:"resource_reference_key,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListPipelinesRequest) Reset() { *x = ListPipelinesRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListPipelinesRequest) String() string { @@ -287,7 +269,7 @@ func (*ListPipelinesRequest) ProtoMessage() {} func (x *ListPipelinesRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -338,24 +320,21 @@ func (x *ListPipelinesRequest) GetResourceReferenceKey() *ResourceKey { } type ListPipelinesResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Pipelines []*Pipeline `protobuf:"bytes,1,rep,name=pipelines,proto3" json:"pipelines,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + Pipelines []*Pipeline `protobuf:"bytes,1,rep,name=pipelines,proto3" json:"pipelines,omitempty"` // The total number of pipelines for the given query. TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` // The token to list the next page of pipelines. NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListPipelinesResponse) Reset() { *x = ListPipelinesResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListPipelinesResponse) String() string { @@ -366,7 +345,7 @@ func (*ListPipelinesResponse) ProtoMessage() {} func (x *ListPipelinesResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -403,26 +382,23 @@ func (x *ListPipelinesResponse) GetNextPageToken() string { } type GetPipelineByNameRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The Name of the pipeline to be retrieved. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The Namespace the pipeline belongs to. // In the case of shared pipelines and KFPipeline standalone installation, // the pipeline name is the only needed field for unique resource lookup (namespace is not required). // In those case, please provide hyphen (dash character, "-"). - Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` + Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetPipelineByNameRequest) Reset() { *x = GetPipelineByNameRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetPipelineByNameRequest) String() string { @@ -433,7 +409,7 @@ func (*GetPipelineByNameRequest) ProtoMessage() {} func (x *GetPipelineByNameRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -463,21 +439,18 @@ func (x *GetPipelineByNameRequest) GetNamespace() string { } type DeletePipelineRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the pipeline to be deleted. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeletePipelineRequest) Reset() { *x = DeletePipelineRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeletePipelineRequest) String() string { @@ -488,7 +461,7 @@ func (*DeletePipelineRequest) ProtoMessage() {} func (x *DeletePipelineRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -511,21 +484,18 @@ func (x *DeletePipelineRequest) GetId() string { } type GetTemplateRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the pipeline whose template is to be retrieved. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetTemplateRequest) Reset() { *x = GetTemplateRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetTemplateRequest) String() string { @@ -536,7 +506,7 @@ func (*GetTemplateRequest) ProtoMessage() {} func (x *GetTemplateRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -559,22 +529,19 @@ func (x *GetTemplateRequest) GetId() string { } type GetTemplateResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The template of the pipeline specified in a GetTemplate request, or of a // pipeline version specified in a GetPipelinesVersionTemplate request. - Template string `protobuf:"bytes,1,opt,name=template,proto3" json:"template,omitempty"` + Template string `protobuf:"bytes,1,opt,name=template,proto3" json:"template,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetTemplateResponse) Reset() { *x = GetTemplateResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetTemplateResponse) String() string { @@ -585,7 +552,7 @@ func (*GetTemplateResponse) ProtoMessage() {} func (x *GetTemplateResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -608,21 +575,18 @@ func (x *GetTemplateResponse) GetTemplate() string { } type GetPipelineVersionTemplateRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the pipeline version whose template is to be retrieved. - VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetPipelineVersionTemplateRequest) Reset() { *x = GetPipelineVersionTemplateRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetPipelineVersionTemplateRequest) String() string { @@ -633,7 +597,7 @@ func (*GetPipelineVersionTemplateRequest) ProtoMessage() {} func (x *GetPipelineVersionTemplateRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -656,22 +620,19 @@ func (x *GetPipelineVersionTemplateRequest) GetVersionId() string { } type CreatePipelineVersionRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // ResourceReference inside PipelineVersion specifies the pipeline that this // version belongs to. - Version *PipelineVersion `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` + Version *PipelineVersion `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CreatePipelineVersionRequest) Reset() { *x = CreatePipelineVersionRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreatePipelineVersionRequest) String() string { @@ -682,7 +643,7 @@ func (*CreatePipelineVersionRequest) ProtoMessage() {} func (x *CreatePipelineVersionRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -705,21 +666,18 @@ func (x *CreatePipelineVersionRequest) GetVersion() *PipelineVersion { } type GetPipelineVersionRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the pipeline version to be retrieved. - VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetPipelineVersionRequest) Reset() { *x = GetPipelineVersionRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetPipelineVersionRequest) String() string { @@ -730,7 +688,7 @@ func (*GetPipelineVersionRequest) ProtoMessage() {} func (x *GetPipelineVersionRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -753,10 +711,7 @@ func (x *GetPipelineVersionRequest) GetVersionId() string { } type ListPipelineVersionsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // ResourceKey specifies the pipeline whose versions are to be listed. ResourceKey *ResourceKey `protobuf:"bytes,1,opt,name=resource_key,json=resourceKey,proto3" json:"resource_key,omitempty"` // The number of pipeline versions to be listed per page. If there are more @@ -772,16 +727,16 @@ type ListPipelineVersionsRequest struct { SortBy string `protobuf:"bytes,4,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` // A base-64 encoded, JSON-serialized Filter protocol buffer (see // filter.proto). - Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListPipelineVersionsRequest) Reset() { *x = ListPipelineVersionsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListPipelineVersionsRequest) String() string { @@ -792,7 +747,7 @@ func (*ListPipelineVersionsRequest) ProtoMessage() {} func (x *ListPipelineVersionsRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -843,24 +798,21 @@ func (x *ListPipelineVersionsRequest) GetFilter() string { } type ListPipelineVersionsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Versions []*PipelineVersion `protobuf:"bytes,1,rep,name=versions,proto3" json:"versions,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + Versions []*PipelineVersion `protobuf:"bytes,1,rep,name=versions,proto3" json:"versions,omitempty"` // The token to list the next page of pipeline versions. NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` // The total number of pipeline versions for the given query. - TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListPipelineVersionsResponse) Reset() { *x = ListPipelineVersionsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListPipelineVersionsResponse) String() string { @@ -871,7 +823,7 @@ func (*ListPipelineVersionsResponse) ProtoMessage() {} func (x *ListPipelineVersionsResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -908,21 +860,18 @@ func (x *ListPipelineVersionsResponse) GetTotalSize() int32 { } type DeletePipelineVersionRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the pipeline version to be deleted. - VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeletePipelineVersionRequest) Reset() { *x = DeletePipelineVersionRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeletePipelineVersionRequest) String() string { @@ -933,7 +882,7 @@ func (*DeletePipelineVersionRequest) ProtoMessage() {} func (x *DeletePipelineVersionRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -956,10 +905,7 @@ func (x *DeletePipelineVersionRequest) GetVersionId() string { } type Pipeline struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. Unique pipeline ID. Generated by API server. Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // Output. The time this pipeline is created. @@ -988,15 +934,15 @@ type Pipeline struct { // Input field. Specify which resource this pipeline belongs to. // For Pipeline, the only valid resource reference is a single Namespace. ResourceReferences []*ResourceReference `protobuf:"bytes,9,rep,name=resource_references,json=resourceReferences,proto3" json:"resource_references,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Pipeline) Reset() { *x = Pipeline{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Pipeline) String() string { @@ -1007,7 +953,7 @@ func (*Pipeline) ProtoMessage() {} func (x *Pipeline) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1086,10 +1032,7 @@ func (x *Pipeline) GetResourceReferences() []*ResourceReference { } type PipelineVersion struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. Unique version ID. Generated by API server. Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // Optional input field. Version name provided by user. @@ -1111,16 +1054,16 @@ type PipelineVersion struct { // For Experiment, the only valid resource reference is a single Namespace. ResourceReferences []*ResourceReference `protobuf:"bytes,7,rep,name=resource_references,json=resourceReferences,proto3" json:"resource_references,omitempty"` // Input. Optional. Description for the pipeline version. - Description string `protobuf:"bytes,8,opt,name=description,proto3" json:"description,omitempty"` + Description string `protobuf:"bytes,8,opt,name=description,proto3" json:"description,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineVersion) Reset() { *x = PipelineVersion{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineVersion) String() string { @@ -1131,7 +1074,7 @@ func (*PipelineVersion) ProtoMessage() {} func (x *PipelineVersion) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1204,284 +1147,125 @@ func (x *PipelineVersion) GetDescription() string { var File_backend_api_v1beta1_pipeline_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_pipeline_proto_rawDesc = []byte{ - 0x0a, 0x22, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, - 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x23, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, - 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, - 0x6e, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x6f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x28, 0x0a, 0x03, 0x55, 0x72, 0x6c, 0x12, 0x21, - 0x0a, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x72, - 0x6c, 0x22, 0x42, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x29, 0x0a, 0x08, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x65, 0x0a, 0x23, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x56, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x1d, 0x0a, - 0x0a, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0x24, 0x0a, 0x12, - 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, - 0x69, 0x64, 0x22, 0xcb, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x70, - 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, - 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, - 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x73, 0x6f, 0x72, 0x74, 0x5f, - 0x62, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x72, 0x74, 0x42, 0x79, - 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x46, 0x0a, 0x16, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x6b, - 0x65, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x4b, 0x65, 0x79, - 0x22, 0x8b, 0x01, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2b, 0x0a, 0x09, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0d, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x09, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, - 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x6f, 0x74, - 0x61, 0x6c, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, - 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x4c, - 0x0a, 0x18, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x42, 0x79, 0x4e, - 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1c, - 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x27, 0x0a, 0x15, - 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x24, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x31, 0x0a, 0x13, 0x47, - 0x65, 0x74, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x42, - 0x0a, 0x21, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x49, 0x64, 0x22, 0x4e, 0x0a, 0x1c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x2e, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x22, 0x3a, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x1d, 0x0a, 0x0a, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x09, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0xbf, - 0x01, 0x0a, 0x1b, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, - 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x33, - 0x0a, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x0b, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, - 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, - 0x17, 0x0a, 0x07, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x62, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x06, 0x73, 0x6f, 0x72, 0x74, 0x42, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, - 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, - 0x22, 0x97, 0x01, 0x0a, 0x1c, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x30, 0x0a, 0x08, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x76, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, - 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, - 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x74, - 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x09, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x3d, 0x0a, 0x1c, 0x44, 0x65, - 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0xf5, 0x02, 0x0a, 0x08, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, - 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x1a, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x07, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x72, 0x6c, 0x52, 0x03, - 0x75, 0x72, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x06, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x3d, 0x0a, 0x0f, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x47, 0x0a, 0x13, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, - 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x12, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, - 0x73, 0x22, 0xde, 0x02, 0x0a, 0x0f, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x64, 0x41, 0x74, 0x12, 0x2e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x63, - 0x6f, 0x64, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x55, 0x72, 0x6c, 0x12, 0x29, 0x0a, 0x0b, - 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x08, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x72, 0x6c, 0x52, 0x0a, 0x70, 0x61, 0x63, - 0x6b, 0x61, 0x67, 0x65, 0x55, 0x72, 0x6c, 0x12, 0x47, 0x0a, 0x13, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x07, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x12, 0x72, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, - 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, - 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x32, 0x9e, 0x0c, 0x0a, 0x0f, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, - 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x68, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x31, 0x12, 0x1a, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x3a, 0x08, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x17, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, - 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x12, 0x5d, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, - 0x31, 0x12, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0d, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x24, 0x82, 0xd3, 0xe4, 0x93, 0x02, - 0x1e, 0x12, 0x1c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, - 0x82, 0x01, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x42, - 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x56, 0x31, 0x12, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, - 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x3d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x37, 0x12, 0x35, 0x2f, - 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x6e, 0x61, 0x6d, - 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x7d, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x6e, - 0x61, 0x6d, 0x65, 0x7d, 0x12, 0x69, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x56, 0x31, 0x12, 0x19, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, - 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1f, - 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x19, 0x12, 0x17, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, - 0x6c, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x31, 0x12, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, - 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x24, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x2a, - 0x1c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0x70, 0x0a, - 0x0b, 0x47, 0x65, 0x74, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x17, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, - 0x2e, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x28, 0x12, 0x26, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, - 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12, - 0x84, 0x01, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x12, 0x21, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x30, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2a, 0x3a, 0x07, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x1f, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x82, 0x01, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x12, - 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x34, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2e, 0x12, 0x2c, 0x2f, - 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, - 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x12, 0x86, 0x01, 0x0a, 0x16, - 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x73, 0x56, 0x31, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, - 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, - 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x27, 0x82, 0xd3, 0xe4, - 0x93, 0x02, 0x21, 0x12, 0x1f, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x8a, 0x01, 0x0a, 0x17, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x56, 0x31, - 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x34, 0x82, 0xd3, 0xe4, - 0x93, 0x02, 0x2e, 0x2a, 0x2c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, - 0x7d, 0x12, 0x9e, 0x01, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x12, 0x26, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, - 0x65, 0x74, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0x3e, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x38, 0x12, 0x36, 0x2f, 0x61, 0x70, 0x69, - 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x76, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x2f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x12, 0xae, 0x01, 0x0a, 0x1e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x56, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x12, 0x28, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x4a, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x44, 0x22, - 0x42, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x7d, 0x2f, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x76, - 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x2f, 0x7b, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, - 0x69, 0x64, 0x7d, 0x42, 0x91, 0x01, 0x92, 0x41, 0x51, 0x2a, 0x02, 0x01, 0x02, 0x52, 0x1c, 0x0a, - 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x11, 0x12, 0x0f, 0x0a, 0x0d, 0x1a, 0x0b, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5a, 0x1f, 0x0a, 0x1d, 0x0a, - 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x13, 0x08, 0x02, 0x1a, 0x0d, 0x61, 0x75, 0x74, - 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x0c, 0x0a, 0x0a, - 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x00, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, - 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_pipeline_proto_rawDesc = "" + + "\n" + + "\"backend/api/v1beta1/pipeline.proto\x12\x03api\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a#backend/api/v1beta1/parameter.proto\x1a,backend/api/v1beta1/resource_reference.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"(\n" + + "\x03Url\x12!\n" + + "\fpipeline_url\x18\x01 \x01(\tR\vpipelineUrl\"B\n" + + "\x15CreatePipelineRequest\x12)\n" + + "\bpipeline\x18\x01 \x01(\v2\r.api.PipelineR\bpipeline\"e\n" + + "#UpdatePipelineDefaultVersionRequest\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\x12\x1d\n" + + "\n" + + "version_id\x18\x02 \x01(\tR\tversionId\"$\n" + + "\x12GetPipelineRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"\xcb\x01\n" + + "\x14ListPipelinesRequest\x12\x1d\n" + + "\n" + + "page_token\x18\x01 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x02 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x03 \x01(\tR\x06sortBy\x12\x16\n" + + "\x06filter\x18\x04 \x01(\tR\x06filter\x12F\n" + + "\x16resource_reference_key\x18\x05 \x01(\v2\x10.api.ResourceKeyR\x14resourceReferenceKey\"\x8b\x01\n" + + "\x15ListPipelinesResponse\x12+\n" + + "\tpipelines\x18\x01 \x03(\v2\r.api.PipelineR\tpipelines\x12\x1d\n" + + "\n" + + "total_size\x18\x03 \x01(\x05R\ttotalSize\x12&\n" + + "\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"L\n" + + "\x18GetPipelineByNameRequest\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\x1c\n" + + "\tnamespace\x18\x02 \x01(\tR\tnamespace\"'\n" + + "\x15DeletePipelineRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"$\n" + + "\x12GetTemplateRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"1\n" + + "\x13GetTemplateResponse\x12\x1a\n" + + "\btemplate\x18\x01 \x01(\tR\btemplate\"B\n" + + "!GetPipelineVersionTemplateRequest\x12\x1d\n" + + "\n" + + "version_id\x18\x01 \x01(\tR\tversionId\"N\n" + + "\x1cCreatePipelineVersionRequest\x12.\n" + + "\aversion\x18\x01 \x01(\v2\x14.api.PipelineVersionR\aversion\":\n" + + "\x19GetPipelineVersionRequest\x12\x1d\n" + + "\n" + + "version_id\x18\x01 \x01(\tR\tversionId\"\xbf\x01\n" + + "\x1bListPipelineVersionsRequest\x123\n" + + "\fresource_key\x18\x01 \x01(\v2\x10.api.ResourceKeyR\vresourceKey\x12\x1b\n" + + "\tpage_size\x18\x02 \x01(\x05R\bpageSize\x12\x1d\n" + + "\n" + + "page_token\x18\x03 \x01(\tR\tpageToken\x12\x17\n" + + "\asort_by\x18\x04 \x01(\tR\x06sortBy\x12\x16\n" + + "\x06filter\x18\x05 \x01(\tR\x06filter\"\x97\x01\n" + + "\x1cListPipelineVersionsResponse\x120\n" + + "\bversions\x18\x01 \x03(\v2\x14.api.PipelineVersionR\bversions\x12&\n" + + "\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\x12\x1d\n" + + "\n" + + "total_size\x18\x03 \x01(\x05R\ttotalSize\"=\n" + + "\x1cDeletePipelineVersionRequest\x12\x1d\n" + + "\n" + + "version_id\x18\x01 \x01(\tR\tversionId\"\xf5\x02\n" + + "\bPipeline\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x129\n" + + "\n" + + "created_at\x18\x02 \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x12\n" + + "\x04name\x18\x03 \x01(\tR\x04name\x12 \n" + + "\vdescription\x18\x04 \x01(\tR\vdescription\x12.\n" + + "\n" + + "parameters\x18\x05 \x03(\v2\x0e.api.ParameterR\n" + + "parameters\x12\x1a\n" + + "\x03url\x18\a \x01(\v2\b.api.UrlR\x03url\x12\x14\n" + + "\x05error\x18\x06 \x01(\tR\x05error\x12=\n" + + "\x0fdefault_version\x18\b \x01(\v2\x14.api.PipelineVersionR\x0edefaultVersion\x12G\n" + + "\x13resource_references\x18\t \x03(\v2\x16.api.ResourceReferenceR\x12resourceReferences\"\xde\x02\n" + + "\x0fPipelineVersion\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n" + + "\x04name\x18\x02 \x01(\tR\x04name\x129\n" + + "\n" + + "created_at\x18\x03 \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12.\n" + + "\n" + + "parameters\x18\x04 \x03(\v2\x0e.api.ParameterR\n" + + "parameters\x12&\n" + + "\x0fcode_source_url\x18\x05 \x01(\tR\rcodeSourceUrl\x12)\n" + + "\vpackage_url\x18\x06 \x01(\v2\b.api.UrlR\n" + + "packageUrl\x12G\n" + + "\x13resource_references\x18\a \x03(\v2\x16.api.ResourceReferenceR\x12resourceReferences\x12 \n" + + "\vdescription\x18\b \x01(\tR\vdescription2\x9e\f\n" + + "\x0fPipelineService\x12h\n" + + "\x10CreatePipelineV1\x12\x1a.api.CreatePipelineRequest\x1a\r.api.Pipeline\")\x82\xd3\xe4\x93\x02#:\bpipeline\"\x17/apis/v1beta1/pipelines\x12]\n" + + "\rGetPipelineV1\x12\x17.api.GetPipelineRequest\x1a\r.api.Pipeline\"$\x82\xd3\xe4\x93\x02\x1e\x12\x1c/apis/v1beta1/pipelines/{id}\x12\x82\x01\n" + + "\x13GetPipelineByNameV1\x12\x1d.api.GetPipelineByNameRequest\x1a\r.api.Pipeline\"=\x82\xd3\xe4\x93\x027\x125/apis/v1beta1/namespaces/{namespace}/pipelines/{name}\x12i\n" + + "\x0fListPipelinesV1\x12\x19.api.ListPipelinesRequest\x1a\x1a.api.ListPipelinesResponse\"\x1f\x82\xd3\xe4\x93\x02\x19\x12\x17/apis/v1beta1/pipelines\x12l\n" + + "\x10DeletePipelineV1\x12\x1a.api.DeletePipelineRequest\x1a\x16.google.protobuf.Empty\"$\x82\xd3\xe4\x93\x02\x1e*\x1c/apis/v1beta1/pipelines/{id}\x12p\n" + + "\vGetTemplate\x12\x17.api.GetTemplateRequest\x1a\x18.api.GetTemplateResponse\".\x82\xd3\xe4\x93\x02(\x12&/apis/v1beta1/pipelines/{id}/templates\x12\x84\x01\n" + + "\x17CreatePipelineVersionV1\x12!.api.CreatePipelineVersionRequest\x1a\x14.api.PipelineVersion\"0\x82\xd3\xe4\x93\x02*:\aversion\"\x1f/apis/v1beta1/pipeline_versions\x12\x82\x01\n" + + "\x14GetPipelineVersionV1\x12\x1e.api.GetPipelineVersionRequest\x1a\x14.api.PipelineVersion\"4\x82\xd3\xe4\x93\x02.\x12,/apis/v1beta1/pipeline_versions/{version_id}\x12\x86\x01\n" + + "\x16ListPipelineVersionsV1\x12 .api.ListPipelineVersionsRequest\x1a!.api.ListPipelineVersionsResponse\"'\x82\xd3\xe4\x93\x02!\x12\x1f/apis/v1beta1/pipeline_versions\x12\x8a\x01\n" + + "\x17DeletePipelineVersionV1\x12!.api.DeletePipelineVersionRequest\x1a\x16.google.protobuf.Empty\"4\x82\xd3\xe4\x93\x02.*,/apis/v1beta1/pipeline_versions/{version_id}\x12\x9e\x01\n" + + "\x1aGetPipelineVersionTemplate\x12&.api.GetPipelineVersionTemplateRequest\x1a\x18.api.GetTemplateResponse\">\x82\xd3\xe4\x93\x028\x126/apis/v1beta1/pipeline_versions/{version_id}/templates\x12\xae\x01\n" + + "\x1eUpdatePipelineDefaultVersionV1\x12(.api.UpdatePipelineDefaultVersionRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02D\"B/apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}B\x91\x01\x92AQ*\x02\x01\x02R\x1c\n" + + "\adefault\x12\x11\x12\x0f\n" + + "\r\x1a\v.api.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_pipeline_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_pipeline_proto_rawDescData = file_backend_api_v1beta1_pipeline_proto_rawDesc + file_backend_api_v1beta1_pipeline_proto_rawDescData []byte ) func file_backend_api_v1beta1_pipeline_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_pipeline_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_pipeline_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_pipeline_proto_rawDescData) + file_backend_api_v1beta1_pipeline_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_pipeline_proto_rawDesc), len(file_backend_api_v1beta1_pipeline_proto_rawDesc))) }) return file_backend_api_v1beta1_pipeline_proto_rawDescData } var file_backend_api_v1beta1_pipeline_proto_msgTypes = make([]protoimpl.MessageInfo, 18) -var file_backend_api_v1beta1_pipeline_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_pipeline_proto_goTypes = []any{ (*Url)(nil), // 0: api.Url (*CreatePipelineRequest)(nil), // 1: api.CreatePipelineRequest (*UpdatePipelineDefaultVersionRequest)(nil), // 2: api.UpdatePipelineDefaultVersionRequest @@ -1558,232 +1342,13 @@ func file_backend_api_v1beta1_pipeline_proto_init() { if File_backend_api_v1beta1_pipeline_proto != nil { return } - file_backend_api_v1beta1_error_proto_init() file_backend_api_v1beta1_parameter_proto_init() file_backend_api_v1beta1_resource_reference_proto_init() - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_pipeline_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Url); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreatePipelineRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdatePipelineDefaultVersionRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPipelineRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPipelinesRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPipelinesResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPipelineByNameRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeletePipelineRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetTemplateRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetTemplateResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPipelineVersionTemplateRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreatePipelineVersionRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPipelineVersionRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPipelineVersionsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPipelineVersionsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeletePipelineVersionRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Pipeline); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineVersion); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_pipeline_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_pipeline_proto_rawDesc), len(file_backend_api_v1beta1_pipeline_proto_rawDesc)), NumEnums: 0, NumMessages: 18, NumExtensions: 0, @@ -1794,517 +1359,6 @@ func file_backend_api_v1beta1_pipeline_proto_init() { MessageInfos: file_backend_api_v1beta1_pipeline_proto_msgTypes, }.Build() File_backend_api_v1beta1_pipeline_proto = out.File - file_backend_api_v1beta1_pipeline_proto_rawDesc = nil file_backend_api_v1beta1_pipeline_proto_goTypes = nil file_backend_api_v1beta1_pipeline_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// PipelineServiceClient is the client API for PipelineService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type PipelineServiceClient interface { - // Creates a pipeline. - CreatePipelineV1(ctx context.Context, in *CreatePipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) - // Finds a specific pipeline by ID. - GetPipelineV1(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) - // Finds a pipeline by Name (and namespace) - GetPipelineByNameV1(ctx context.Context, in *GetPipelineByNameRequest, opts ...grpc.CallOption) (*Pipeline, error) - // Finds all pipelines. - ListPipelinesV1(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) - // Deletes a pipeline and its pipeline versions. - DeletePipelineV1(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. - GetTemplate(ctx context.Context, in *GetTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) - // Adds a pipeline version to the specified pipeline. - CreatePipelineVersionV1(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) - // Gets a pipeline version by pipeline version ID. - GetPipelineVersionV1(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) - // Lists all pipeline versions of a given pipeline. - ListPipelineVersionsV1(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) - // Deletes a pipeline version by pipeline version ID. If the deleted pipeline - // version is the default pipeline version, the pipeline's default version - // changes to the pipeline's most recent pipeline version. If there are no - // remaining pipeline versions, the pipeline will have no default version. - // Examines the run_service_api.ipynb notebook to learn more about creating a - // run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). - DeletePipelineVersionV1(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. - GetPipelineVersionTemplate(ctx context.Context, in *GetPipelineVersionTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) - // Update the default pipeline version of a specific pipeline. - UpdatePipelineDefaultVersionV1(ctx context.Context, in *UpdatePipelineDefaultVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type pipelineServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewPipelineServiceClient(cc grpc.ClientConnInterface) PipelineServiceClient { - return &pipelineServiceClient{cc} -} - -func (c *pipelineServiceClient) CreatePipelineV1(ctx context.Context, in *CreatePipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) { - out := new(Pipeline) - err := c.cc.Invoke(ctx, "/api.PipelineService/CreatePipelineV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) GetPipelineV1(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) { - out := new(Pipeline) - err := c.cc.Invoke(ctx, "/api.PipelineService/GetPipelineV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) GetPipelineByNameV1(ctx context.Context, in *GetPipelineByNameRequest, opts ...grpc.CallOption) (*Pipeline, error) { - out := new(Pipeline) - err := c.cc.Invoke(ctx, "/api.PipelineService/GetPipelineByNameV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) ListPipelinesV1(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) { - out := new(ListPipelinesResponse) - err := c.cc.Invoke(ctx, "/api.PipelineService/ListPipelinesV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) DeletePipelineV1(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.PipelineService/DeletePipelineV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) GetTemplate(ctx context.Context, in *GetTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) { - out := new(GetTemplateResponse) - err := c.cc.Invoke(ctx, "/api.PipelineService/GetTemplate", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) CreatePipelineVersionV1(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { - out := new(PipelineVersion) - err := c.cc.Invoke(ctx, "/api.PipelineService/CreatePipelineVersionV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) GetPipelineVersionV1(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { - out := new(PipelineVersion) - err := c.cc.Invoke(ctx, "/api.PipelineService/GetPipelineVersionV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) ListPipelineVersionsV1(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) { - out := new(ListPipelineVersionsResponse) - err := c.cc.Invoke(ctx, "/api.PipelineService/ListPipelineVersionsV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) DeletePipelineVersionV1(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.PipelineService/DeletePipelineVersionV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) GetPipelineVersionTemplate(ctx context.Context, in *GetPipelineVersionTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) { - out := new(GetTemplateResponse) - err := c.cc.Invoke(ctx, "/api.PipelineService/GetPipelineVersionTemplate", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) UpdatePipelineDefaultVersionV1(ctx context.Context, in *UpdatePipelineDefaultVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.PipelineService/UpdatePipelineDefaultVersionV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// PipelineServiceServer is the server API for PipelineService service. -type PipelineServiceServer interface { - // Creates a pipeline. - CreatePipelineV1(context.Context, *CreatePipelineRequest) (*Pipeline, error) - // Finds a specific pipeline by ID. - GetPipelineV1(context.Context, *GetPipelineRequest) (*Pipeline, error) - // Finds a pipeline by Name (and namespace) - GetPipelineByNameV1(context.Context, *GetPipelineByNameRequest) (*Pipeline, error) - // Finds all pipelines. - ListPipelinesV1(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) - // Deletes a pipeline and its pipeline versions. - DeletePipelineV1(context.Context, *DeletePipelineRequest) (*emptypb.Empty, error) - // Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. - GetTemplate(context.Context, *GetTemplateRequest) (*GetTemplateResponse, error) - // Adds a pipeline version to the specified pipeline. - CreatePipelineVersionV1(context.Context, *CreatePipelineVersionRequest) (*PipelineVersion, error) - // Gets a pipeline version by pipeline version ID. - GetPipelineVersionV1(context.Context, *GetPipelineVersionRequest) (*PipelineVersion, error) - // Lists all pipeline versions of a given pipeline. - ListPipelineVersionsV1(context.Context, *ListPipelineVersionsRequest) (*ListPipelineVersionsResponse, error) - // Deletes a pipeline version by pipeline version ID. If the deleted pipeline - // version is the default pipeline version, the pipeline's default version - // changes to the pipeline's most recent pipeline version. If there are no - // remaining pipeline versions, the pipeline will have no default version. - // Examines the run_service_api.ipynb notebook to learn more about creating a - // run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). - DeletePipelineVersionV1(context.Context, *DeletePipelineVersionRequest) (*emptypb.Empty, error) - // Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. - GetPipelineVersionTemplate(context.Context, *GetPipelineVersionTemplateRequest) (*GetTemplateResponse, error) - // Update the default pipeline version of a specific pipeline. - UpdatePipelineDefaultVersionV1(context.Context, *UpdatePipelineDefaultVersionRequest) (*emptypb.Empty, error) -} - -// UnimplementedPipelineServiceServer can be embedded to have forward compatible implementations. -type UnimplementedPipelineServiceServer struct { -} - -func (*UnimplementedPipelineServiceServer) CreatePipelineV1(context.Context, *CreatePipelineRequest) (*Pipeline, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreatePipelineV1 not implemented") -} -func (*UnimplementedPipelineServiceServer) GetPipelineV1(context.Context, *GetPipelineRequest) (*Pipeline, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetPipelineV1 not implemented") -} -func (*UnimplementedPipelineServiceServer) GetPipelineByNameV1(context.Context, *GetPipelineByNameRequest) (*Pipeline, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetPipelineByNameV1 not implemented") -} -func (*UnimplementedPipelineServiceServer) ListPipelinesV1(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListPipelinesV1 not implemented") -} -func (*UnimplementedPipelineServiceServer) DeletePipelineV1(context.Context, *DeletePipelineRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeletePipelineV1 not implemented") -} -func (*UnimplementedPipelineServiceServer) GetTemplate(context.Context, *GetTemplateRequest) (*GetTemplateResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetTemplate not implemented") -} -func (*UnimplementedPipelineServiceServer) CreatePipelineVersionV1(context.Context, *CreatePipelineVersionRequest) (*PipelineVersion, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreatePipelineVersionV1 not implemented") -} -func (*UnimplementedPipelineServiceServer) GetPipelineVersionV1(context.Context, *GetPipelineVersionRequest) (*PipelineVersion, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetPipelineVersionV1 not implemented") -} -func (*UnimplementedPipelineServiceServer) ListPipelineVersionsV1(context.Context, *ListPipelineVersionsRequest) (*ListPipelineVersionsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListPipelineVersionsV1 not implemented") -} -func (*UnimplementedPipelineServiceServer) DeletePipelineVersionV1(context.Context, *DeletePipelineVersionRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeletePipelineVersionV1 not implemented") -} -func (*UnimplementedPipelineServiceServer) GetPipelineVersionTemplate(context.Context, *GetPipelineVersionTemplateRequest) (*GetTemplateResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetPipelineVersionTemplate not implemented") -} -func (*UnimplementedPipelineServiceServer) UpdatePipelineDefaultVersionV1(context.Context, *UpdatePipelineDefaultVersionRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method UpdatePipelineDefaultVersionV1 not implemented") -} - -func RegisterPipelineServiceServer(s *grpc.Server, srv PipelineServiceServer) { - s.RegisterService(&_PipelineService_serviceDesc, srv) -} - -func _PipelineService_CreatePipelineV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreatePipelineRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).CreatePipelineV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/CreatePipelineV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).CreatePipelineV1(ctx, req.(*CreatePipelineRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_GetPipelineV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetPipelineRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).GetPipelineV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/GetPipelineV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).GetPipelineV1(ctx, req.(*GetPipelineRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_GetPipelineByNameV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetPipelineByNameRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).GetPipelineByNameV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/GetPipelineByNameV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).GetPipelineByNameV1(ctx, req.(*GetPipelineByNameRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_ListPipelinesV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListPipelinesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).ListPipelinesV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/ListPipelinesV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).ListPipelinesV1(ctx, req.(*ListPipelinesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_DeletePipelineV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeletePipelineRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).DeletePipelineV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/DeletePipelineV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).DeletePipelineV1(ctx, req.(*DeletePipelineRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_GetTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetTemplateRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).GetTemplate(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/GetTemplate", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).GetTemplate(ctx, req.(*GetTemplateRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_CreatePipelineVersionV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreatePipelineVersionRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).CreatePipelineVersionV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/CreatePipelineVersionV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).CreatePipelineVersionV1(ctx, req.(*CreatePipelineVersionRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_GetPipelineVersionV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetPipelineVersionRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).GetPipelineVersionV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/GetPipelineVersionV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).GetPipelineVersionV1(ctx, req.(*GetPipelineVersionRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_ListPipelineVersionsV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListPipelineVersionsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).ListPipelineVersionsV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/ListPipelineVersionsV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).ListPipelineVersionsV1(ctx, req.(*ListPipelineVersionsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_DeletePipelineVersionV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeletePipelineVersionRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).DeletePipelineVersionV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/DeletePipelineVersionV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).DeletePipelineVersionV1(ctx, req.(*DeletePipelineVersionRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_GetPipelineVersionTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetPipelineVersionTemplateRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).GetPipelineVersionTemplate(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/GetPipelineVersionTemplate", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).GetPipelineVersionTemplate(ctx, req.(*GetPipelineVersionTemplateRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_UpdatePipelineDefaultVersionV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UpdatePipelineDefaultVersionRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).UpdatePipelineDefaultVersionV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.PipelineService/UpdatePipelineDefaultVersionV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).UpdatePipelineDefaultVersionV1(ctx, req.(*UpdatePipelineDefaultVersionRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _PipelineService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "api.PipelineService", - HandlerType: (*PipelineServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreatePipelineV1", - Handler: _PipelineService_CreatePipelineV1_Handler, - }, - { - MethodName: "GetPipelineV1", - Handler: _PipelineService_GetPipelineV1_Handler, - }, - { - MethodName: "GetPipelineByNameV1", - Handler: _PipelineService_GetPipelineByNameV1_Handler, - }, - { - MethodName: "ListPipelinesV1", - Handler: _PipelineService_ListPipelinesV1_Handler, - }, - { - MethodName: "DeletePipelineV1", - Handler: _PipelineService_DeletePipelineV1_Handler, - }, - { - MethodName: "GetTemplate", - Handler: _PipelineService_GetTemplate_Handler, - }, - { - MethodName: "CreatePipelineVersionV1", - Handler: _PipelineService_CreatePipelineVersionV1_Handler, - }, - { - MethodName: "GetPipelineVersionV1", - Handler: _PipelineService_GetPipelineVersionV1_Handler, - }, - { - MethodName: "ListPipelineVersionsV1", - Handler: _PipelineService_ListPipelineVersionsV1_Handler, - }, - { - MethodName: "DeletePipelineVersionV1", - Handler: _PipelineService_DeletePipelineVersionV1_Handler, - }, - { - MethodName: "GetPipelineVersionTemplate", - Handler: _PipelineService_GetPipelineVersionTemplate_Handler, - }, - { - MethodName: "UpdatePipelineDefaultVersionV1", - Handler: _PipelineService_UpdatePipelineDefaultVersionV1_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v1beta1/pipeline.proto", -} diff --git a/backend/api/v1beta1/go_client/pipeline.pb.gw.go b/backend/api/v1beta1/go_client/pipeline.pb.gw.go index ceaccdc1bfa..3a430bd4550 100644 --- a/backend/api/v1beta1/go_client/pipeline.pb.gw.go +++ b/backend/api/v1beta1/go_client/pipeline.pb.gw.go @@ -10,925 +10,744 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_PipelineService_CreatePipelineV1_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreatePipelineRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Pipeline); err != nil && err != io.EOF { + var ( + protoReq CreatePipelineRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Pipeline); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.CreatePipelineV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_CreatePipelineV1_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreatePipelineRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Pipeline); err != nil && err != io.EOF { + var ( + protoReq CreatePipelineRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Pipeline); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreatePipelineV1(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_GetPipelineV1_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.GetPipelineV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_GetPipelineV1_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.GetPipelineV1(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_GetPipelineByNameV1_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineByNameRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineByNameRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["namespace"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["namespace"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") } - protoReq.Namespace, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) } - val, ok = pathParams["name"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") } - protoReq.Name, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) } - msg, err := client.GetPipelineByNameV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_GetPipelineByNameV1_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineByNameRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineByNameRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["namespace"] + val, ok := pathParams["namespace"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") } - protoReq.Namespace, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) } - val, ok = pathParams["name"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") } - protoReq.Name, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) } - msg, err := server.GetPipelineByNameV1(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_PipelineService_ListPipelinesV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) +var filter_PipelineService_ListPipelinesV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} func request_PipelineService_ListPipelinesV1_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListPipelinesRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListPipelinesRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_ListPipelinesV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListPipelinesV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_ListPipelinesV1_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListPipelinesRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListPipelinesRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_ListPipelinesV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListPipelinesV1(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_DeletePipelineV1_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeletePipelineRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeletePipelineRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.DeletePipelineV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_DeletePipelineV1_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeletePipelineRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeletePipelineRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.DeletePipelineV1(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_GetTemplate_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetTemplateRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetTemplateRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.GetTemplate(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_GetTemplate_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetTemplateRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetTemplateRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.GetTemplate(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_CreatePipelineVersionV1_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreatePipelineVersionRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Version); err != nil && err != io.EOF { + var ( + protoReq CreatePipelineVersionRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Version); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.CreatePipelineVersionV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_CreatePipelineVersionV1_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreatePipelineVersionRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Version); err != nil && err != io.EOF { + var ( + protoReq CreatePipelineVersionRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Version); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreatePipelineVersionV1(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_GetPipelineVersionV1_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineVersionRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["version_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") } - protoReq.VersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) } - msg, err := client.GetPipelineVersionV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_GetPipelineVersionV1_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineVersionRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["version_id"] + val, ok := pathParams["version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") } - protoReq.VersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) } - msg, err := server.GetPipelineVersionV1(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_PipelineService_ListPipelineVersionsV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) +var filter_PipelineService_ListPipelineVersionsV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} func request_PipelineService_ListPipelineVersionsV1_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListPipelineVersionsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListPipelineVersionsRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_ListPipelineVersionsV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListPipelineVersionsV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_ListPipelineVersionsV1_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListPipelineVersionsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListPipelineVersionsRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_ListPipelineVersionsV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListPipelineVersionsV1(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_DeletePipelineVersionV1_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeletePipelineVersionRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeletePipelineVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["version_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") } - protoReq.VersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) } - msg, err := client.DeletePipelineVersionV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_DeletePipelineVersionV1_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeletePipelineVersionRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeletePipelineVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["version_id"] + val, ok := pathParams["version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") } - protoReq.VersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) } - msg, err := server.DeletePipelineVersionV1(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_GetPipelineVersionTemplate_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineVersionTemplateRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineVersionTemplateRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["version_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") } - protoReq.VersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) } - msg, err := client.GetPipelineVersionTemplate(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_GetPipelineVersionTemplate_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineVersionTemplateRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineVersionTemplateRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["version_id"] + val, ok := pathParams["version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") } - protoReq.VersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) } - msg, err := server.GetPipelineVersionTemplate(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_UpdatePipelineDefaultVersionV1_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq UpdatePipelineDefaultVersionRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq UpdatePipelineDefaultVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - val, ok = pathParams["version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") } - protoReq.VersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) } - msg, err := client.UpdatePipelineDefaultVersionV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_UpdatePipelineDefaultVersionV1_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq UpdatePipelineDefaultVersionRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq UpdatePipelineDefaultVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - val, ok = pathParams["version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") } - protoReq.VersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) } - msg, err := server.UpdatePipelineDefaultVersionV1(ctx, &protoReq) return msg, metadata, err - } // RegisterPipelineServiceHandlerServer registers the http handlers for service PipelineService to "mux". // UnaryRPC :call PipelineServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterPipelineServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterPipelineServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server PipelineServiceServer) error { - - mux.Handle("POST", pattern_PipelineService_CreatePipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_CreatePipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/CreatePipelineV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_CreatePipelineV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_CreatePipelineV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_CreatePipelineV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_CreatePipelineV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/GetPipelineV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_GetPipelineV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_GetPipelineV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineByNameV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineByNameV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/GetPipelineByNameV1", runtime.WithHTTPPathPattern("/apis/v1beta1/namespaces/{namespace}/pipelines/{name}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_GetPipelineByNameV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_GetPipelineByNameV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineByNameV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineByNameV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_ListPipelinesV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_ListPipelinesV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/ListPipelinesV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_ListPipelinesV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_ListPipelinesV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_ListPipelinesV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_ListPipelinesV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_PipelineService_DeletePipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_PipelineService_DeletePipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/DeletePipelineV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_DeletePipelineV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_DeletePipelineV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_DeletePipelineV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_DeletePipelineV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetTemplate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetTemplate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/GetTemplate", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines/{id}/templates")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_GetTemplate_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_GetTemplate_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetTemplate_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetTemplate_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_PipelineService_CreatePipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_CreatePipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/CreatePipelineVersionV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipeline_versions")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_CreatePipelineVersionV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_CreatePipelineVersionV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_CreatePipelineVersionV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_CreatePipelineVersionV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/GetPipelineVersionV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipeline_versions/{version_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_GetPipelineVersionV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_GetPipelineVersionV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineVersionV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineVersionV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_ListPipelineVersionsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_ListPipelineVersionsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/ListPipelineVersionsV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipeline_versions")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_ListPipelineVersionsV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_ListPipelineVersionsV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_ListPipelineVersionsV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_ListPipelineVersionsV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_PipelineService_DeletePipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_PipelineService_DeletePipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/DeletePipelineVersionV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipeline_versions/{version_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_DeletePipelineVersionV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_DeletePipelineVersionV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_DeletePipelineVersionV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_DeletePipelineVersionV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineVersionTemplate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineVersionTemplate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/GetPipelineVersionTemplate", runtime.WithHTTPPathPattern("/apis/v1beta1/pipeline_versions/{version_id}/templates")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_GetPipelineVersionTemplate_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_GetPipelineVersionTemplate_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineVersionTemplate_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineVersionTemplate_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_PipelineService_UpdatePipelineDefaultVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_UpdatePipelineDefaultVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.PipelineService/UpdatePipelineDefaultVersionV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_UpdatePipelineDefaultVersionV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_UpdatePipelineDefaultVersionV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_UpdatePipelineDefaultVersionV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_UpdatePipelineDefaultVersionV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -937,25 +756,24 @@ func RegisterPipelineServiceHandlerServer(ctx context.Context, mux *runtime.Serv // RegisterPipelineServiceHandlerFromEndpoint is same as RegisterPipelineServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterPipelineServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterPipelineServiceHandler(ctx, mux, conn) } @@ -969,300 +787,241 @@ func RegisterPipelineServiceHandler(ctx context.Context, mux *runtime.ServeMux, // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "PipelineServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "PipelineServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "PipelineServiceClient" to call the correct interceptors. +// "PipelineServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterPipelineServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client PipelineServiceClient) error { - - mux.Handle("POST", pattern_PipelineService_CreatePipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_CreatePipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/CreatePipelineV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_CreatePipelineV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_CreatePipelineV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_CreatePipelineV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_CreatePipelineV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/GetPipelineV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_GetPipelineV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_GetPipelineV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineByNameV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineByNameV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/GetPipelineByNameV1", runtime.WithHTTPPathPattern("/apis/v1beta1/namespaces/{namespace}/pipelines/{name}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_GetPipelineByNameV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_GetPipelineByNameV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineByNameV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineByNameV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_ListPipelinesV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_ListPipelinesV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/ListPipelinesV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_ListPipelinesV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_ListPipelinesV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_ListPipelinesV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_ListPipelinesV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_PipelineService_DeletePipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_PipelineService_DeletePipelineV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/DeletePipelineV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_DeletePipelineV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_DeletePipelineV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_DeletePipelineV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_DeletePipelineV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetTemplate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetTemplate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/GetTemplate", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines/{id}/templates")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_GetTemplate_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_GetTemplate_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetTemplate_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetTemplate_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_PipelineService_CreatePipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_CreatePipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/CreatePipelineVersionV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipeline_versions")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_CreatePipelineVersionV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_CreatePipelineVersionV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_CreatePipelineVersionV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_CreatePipelineVersionV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/GetPipelineVersionV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipeline_versions/{version_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_GetPipelineVersionV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_GetPipelineVersionV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineVersionV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineVersionV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_ListPipelineVersionsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_ListPipelineVersionsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/ListPipelineVersionsV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipeline_versions")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_ListPipelineVersionsV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_ListPipelineVersionsV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_ListPipelineVersionsV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_ListPipelineVersionsV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_PipelineService_DeletePipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_PipelineService_DeletePipelineVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/DeletePipelineVersionV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipeline_versions/{version_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_DeletePipelineVersionV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_DeletePipelineVersionV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_DeletePipelineVersionV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_DeletePipelineVersionV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineVersionTemplate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineVersionTemplate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/GetPipelineVersionTemplate", runtime.WithHTTPPathPattern("/apis/v1beta1/pipeline_versions/{version_id}/templates")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_GetPipelineVersionTemplate_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_GetPipelineVersionTemplate_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineVersionTemplate_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineVersionTemplate_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_PipelineService_UpdatePipelineDefaultVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_UpdatePipelineDefaultVersionV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.PipelineService/UpdatePipelineDefaultVersionV1", runtime.WithHTTPPathPattern("/apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_UpdatePipelineDefaultVersionV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_UpdatePipelineDefaultVersionV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_UpdatePipelineDefaultVersionV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_UpdatePipelineDefaultVersionV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_PipelineService_CreatePipelineV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "pipelines"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_GetPipelineV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipelines", "id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_GetPipelineByNameV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v1beta1", "namespaces", "namespace", "pipelines", "name"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_ListPipelinesV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "pipelines"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_DeletePipelineV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipelines", "id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_GetTemplate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "pipelines", "id", "templates"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_CreatePipelineVersionV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "pipeline_versions"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_GetPipelineVersionV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipeline_versions", "version_id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_ListPipelineVersionsV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "pipeline_versions"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_DeletePipelineVersionV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipeline_versions", "version_id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_GetPipelineVersionTemplate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "pipeline_versions", "version_id", "templates"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_UpdatePipelineDefaultVersionV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v1beta1", "pipelines", "pipeline_id", "default_version", "version_id"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_PipelineService_CreatePipelineV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "pipelines"}, "")) + pattern_PipelineService_GetPipelineV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipelines", "id"}, "")) + pattern_PipelineService_GetPipelineByNameV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v1beta1", "namespaces", "namespace", "pipelines", "name"}, "")) + pattern_PipelineService_ListPipelinesV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "pipelines"}, "")) + pattern_PipelineService_DeletePipelineV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipelines", "id"}, "")) + pattern_PipelineService_GetTemplate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "pipelines", "id", "templates"}, "")) + pattern_PipelineService_CreatePipelineVersionV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "pipeline_versions"}, "")) + pattern_PipelineService_GetPipelineVersionV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipeline_versions", "version_id"}, "")) + pattern_PipelineService_ListPipelineVersionsV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "pipeline_versions"}, "")) + pattern_PipelineService_DeletePipelineVersionV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipeline_versions", "version_id"}, "")) + pattern_PipelineService_GetPipelineVersionTemplate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "pipeline_versions", "version_id", "templates"}, "")) + pattern_PipelineService_UpdatePipelineDefaultVersionV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v1beta1", "pipelines", "pipeline_id", "default_version", "version_id"}, "")) ) var ( - forward_PipelineService_CreatePipelineV1_0 = runtime.ForwardResponseMessage - - forward_PipelineService_GetPipelineV1_0 = runtime.ForwardResponseMessage - - forward_PipelineService_GetPipelineByNameV1_0 = runtime.ForwardResponseMessage - - forward_PipelineService_ListPipelinesV1_0 = runtime.ForwardResponseMessage - - forward_PipelineService_DeletePipelineV1_0 = runtime.ForwardResponseMessage - - forward_PipelineService_GetTemplate_0 = runtime.ForwardResponseMessage - - forward_PipelineService_CreatePipelineVersionV1_0 = runtime.ForwardResponseMessage - - forward_PipelineService_GetPipelineVersionV1_0 = runtime.ForwardResponseMessage - - forward_PipelineService_ListPipelineVersionsV1_0 = runtime.ForwardResponseMessage - - forward_PipelineService_DeletePipelineVersionV1_0 = runtime.ForwardResponseMessage - - forward_PipelineService_GetPipelineVersionTemplate_0 = runtime.ForwardResponseMessage - + forward_PipelineService_CreatePipelineV1_0 = runtime.ForwardResponseMessage + forward_PipelineService_GetPipelineV1_0 = runtime.ForwardResponseMessage + forward_PipelineService_GetPipelineByNameV1_0 = runtime.ForwardResponseMessage + forward_PipelineService_ListPipelinesV1_0 = runtime.ForwardResponseMessage + forward_PipelineService_DeletePipelineV1_0 = runtime.ForwardResponseMessage + forward_PipelineService_GetTemplate_0 = runtime.ForwardResponseMessage + forward_PipelineService_CreatePipelineVersionV1_0 = runtime.ForwardResponseMessage + forward_PipelineService_GetPipelineVersionV1_0 = runtime.ForwardResponseMessage + forward_PipelineService_ListPipelineVersionsV1_0 = runtime.ForwardResponseMessage + forward_PipelineService_DeletePipelineVersionV1_0 = runtime.ForwardResponseMessage + forward_PipelineService_GetPipelineVersionTemplate_0 = runtime.ForwardResponseMessage forward_PipelineService_UpdatePipelineDefaultVersionV1_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v1beta1/go_client/pipeline_grpc.pb.go b/backend/api/v1beta1/go_client/pipeline_grpc.pb.go new file mode 100644 index 00000000000..87f741fe09c --- /dev/null +++ b/backend/api/v1beta1/go_client/pipeline_grpc.pb.go @@ -0,0 +1,588 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v1beta1/pipeline.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + PipelineService_CreatePipelineV1_FullMethodName = "/api.PipelineService/CreatePipelineV1" + PipelineService_GetPipelineV1_FullMethodName = "/api.PipelineService/GetPipelineV1" + PipelineService_GetPipelineByNameV1_FullMethodName = "/api.PipelineService/GetPipelineByNameV1" + PipelineService_ListPipelinesV1_FullMethodName = "/api.PipelineService/ListPipelinesV1" + PipelineService_DeletePipelineV1_FullMethodName = "/api.PipelineService/DeletePipelineV1" + PipelineService_GetTemplate_FullMethodName = "/api.PipelineService/GetTemplate" + PipelineService_CreatePipelineVersionV1_FullMethodName = "/api.PipelineService/CreatePipelineVersionV1" + PipelineService_GetPipelineVersionV1_FullMethodName = "/api.PipelineService/GetPipelineVersionV1" + PipelineService_ListPipelineVersionsV1_FullMethodName = "/api.PipelineService/ListPipelineVersionsV1" + PipelineService_DeletePipelineVersionV1_FullMethodName = "/api.PipelineService/DeletePipelineVersionV1" + PipelineService_GetPipelineVersionTemplate_FullMethodName = "/api.PipelineService/GetPipelineVersionTemplate" + PipelineService_UpdatePipelineDefaultVersionV1_FullMethodName = "/api.PipelineService/UpdatePipelineDefaultVersionV1" +) + +// PipelineServiceClient is the client API for PipelineService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type PipelineServiceClient interface { + // Creates a pipeline. + CreatePipelineV1(ctx context.Context, in *CreatePipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) + // Finds a specific pipeline by ID. + GetPipelineV1(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) + // Finds a pipeline by Name (and namespace) + GetPipelineByNameV1(ctx context.Context, in *GetPipelineByNameRequest, opts ...grpc.CallOption) (*Pipeline, error) + // Finds all pipelines. + ListPipelinesV1(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) + // Deletes a pipeline and its pipeline versions. + DeletePipelineV1(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. + GetTemplate(ctx context.Context, in *GetTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) + // Adds a pipeline version to the specified pipeline. + CreatePipelineVersionV1(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) + // Gets a pipeline version by pipeline version ID. + GetPipelineVersionV1(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) + // Lists all pipeline versions of a given pipeline. + ListPipelineVersionsV1(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) + // Deletes a pipeline version by pipeline version ID. If the deleted pipeline + // version is the default pipeline version, the pipeline's default version + // changes to the pipeline's most recent pipeline version. If there are no + // remaining pipeline versions, the pipeline will have no default version. + // Examines the run_service_api.ipynb notebook to learn more about creating a + // run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). + DeletePipelineVersionV1(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. + GetPipelineVersionTemplate(ctx context.Context, in *GetPipelineVersionTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) + // Update the default pipeline version of a specific pipeline. + UpdatePipelineDefaultVersionV1(ctx context.Context, in *UpdatePipelineDefaultVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type pipelineServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewPipelineServiceClient(cc grpc.ClientConnInterface) PipelineServiceClient { + return &pipelineServiceClient{cc} +} + +func (c *pipelineServiceClient) CreatePipelineV1(ctx context.Context, in *CreatePipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Pipeline) + err := c.cc.Invoke(ctx, PipelineService_CreatePipelineV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipelineV1(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Pipeline) + err := c.cc.Invoke(ctx, PipelineService_GetPipelineV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipelineByNameV1(ctx context.Context, in *GetPipelineByNameRequest, opts ...grpc.CallOption) (*Pipeline, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Pipeline) + err := c.cc.Invoke(ctx, PipelineService_GetPipelineByNameV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) ListPipelinesV1(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListPipelinesResponse) + err := c.cc.Invoke(ctx, PipelineService_ListPipelinesV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) DeletePipelineV1(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, PipelineService_DeletePipelineV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetTemplate(ctx context.Context, in *GetTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetTemplateResponse) + err := c.cc.Invoke(ctx, PipelineService_GetTemplate_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) CreatePipelineVersionV1(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PipelineVersion) + err := c.cc.Invoke(ctx, PipelineService_CreatePipelineVersionV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipelineVersionV1(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PipelineVersion) + err := c.cc.Invoke(ctx, PipelineService_GetPipelineVersionV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) ListPipelineVersionsV1(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListPipelineVersionsResponse) + err := c.cc.Invoke(ctx, PipelineService_ListPipelineVersionsV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) DeletePipelineVersionV1(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, PipelineService_DeletePipelineVersionV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipelineVersionTemplate(ctx context.Context, in *GetPipelineVersionTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetTemplateResponse) + err := c.cc.Invoke(ctx, PipelineService_GetPipelineVersionTemplate_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) UpdatePipelineDefaultVersionV1(ctx context.Context, in *UpdatePipelineDefaultVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, PipelineService_UpdatePipelineDefaultVersionV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PipelineServiceServer is the server API for PipelineService service. +// All implementations must embed UnimplementedPipelineServiceServer +// for forward compatibility. +type PipelineServiceServer interface { + // Creates a pipeline. + CreatePipelineV1(context.Context, *CreatePipelineRequest) (*Pipeline, error) + // Finds a specific pipeline by ID. + GetPipelineV1(context.Context, *GetPipelineRequest) (*Pipeline, error) + // Finds a pipeline by Name (and namespace) + GetPipelineByNameV1(context.Context, *GetPipelineByNameRequest) (*Pipeline, error) + // Finds all pipelines. + ListPipelinesV1(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) + // Deletes a pipeline and its pipeline versions. + DeletePipelineV1(context.Context, *DeletePipelineRequest) (*emptypb.Empty, error) + // Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. + GetTemplate(context.Context, *GetTemplateRequest) (*GetTemplateResponse, error) + // Adds a pipeline version to the specified pipeline. + CreatePipelineVersionV1(context.Context, *CreatePipelineVersionRequest) (*PipelineVersion, error) + // Gets a pipeline version by pipeline version ID. + GetPipelineVersionV1(context.Context, *GetPipelineVersionRequest) (*PipelineVersion, error) + // Lists all pipeline versions of a given pipeline. + ListPipelineVersionsV1(context.Context, *ListPipelineVersionsRequest) (*ListPipelineVersionsResponse, error) + // Deletes a pipeline version by pipeline version ID. If the deleted pipeline + // version is the default pipeline version, the pipeline's default version + // changes to the pipeline's most recent pipeline version. If there are no + // remaining pipeline versions, the pipeline will have no default version. + // Examines the run_service_api.ipynb notebook to learn more about creating a + // run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). + DeletePipelineVersionV1(context.Context, *DeletePipelineVersionRequest) (*emptypb.Empty, error) + // Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. + GetPipelineVersionTemplate(context.Context, *GetPipelineVersionTemplateRequest) (*GetTemplateResponse, error) + // Update the default pipeline version of a specific pipeline. + UpdatePipelineDefaultVersionV1(context.Context, *UpdatePipelineDefaultVersionRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedPipelineServiceServer() +} + +// UnimplementedPipelineServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedPipelineServiceServer struct{} + +func (UnimplementedPipelineServiceServer) CreatePipelineV1(context.Context, *CreatePipelineRequest) (*Pipeline, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreatePipelineV1 not implemented") +} +func (UnimplementedPipelineServiceServer) GetPipelineV1(context.Context, *GetPipelineRequest) (*Pipeline, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetPipelineV1 not implemented") +} +func (UnimplementedPipelineServiceServer) GetPipelineByNameV1(context.Context, *GetPipelineByNameRequest) (*Pipeline, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetPipelineByNameV1 not implemented") +} +func (UnimplementedPipelineServiceServer) ListPipelinesV1(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListPipelinesV1 not implemented") +} +func (UnimplementedPipelineServiceServer) DeletePipelineV1(context.Context, *DeletePipelineRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeletePipelineV1 not implemented") +} +func (UnimplementedPipelineServiceServer) GetTemplate(context.Context, *GetTemplateRequest) (*GetTemplateResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetTemplate not implemented") +} +func (UnimplementedPipelineServiceServer) CreatePipelineVersionV1(context.Context, *CreatePipelineVersionRequest) (*PipelineVersion, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreatePipelineVersionV1 not implemented") +} +func (UnimplementedPipelineServiceServer) GetPipelineVersionV1(context.Context, *GetPipelineVersionRequest) (*PipelineVersion, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetPipelineVersionV1 not implemented") +} +func (UnimplementedPipelineServiceServer) ListPipelineVersionsV1(context.Context, *ListPipelineVersionsRequest) (*ListPipelineVersionsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListPipelineVersionsV1 not implemented") +} +func (UnimplementedPipelineServiceServer) DeletePipelineVersionV1(context.Context, *DeletePipelineVersionRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeletePipelineVersionV1 not implemented") +} +func (UnimplementedPipelineServiceServer) GetPipelineVersionTemplate(context.Context, *GetPipelineVersionTemplateRequest) (*GetTemplateResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetPipelineVersionTemplate not implemented") +} +func (UnimplementedPipelineServiceServer) UpdatePipelineDefaultVersionV1(context.Context, *UpdatePipelineDefaultVersionRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method UpdatePipelineDefaultVersionV1 not implemented") +} +func (UnimplementedPipelineServiceServer) mustEmbedUnimplementedPipelineServiceServer() {} +func (UnimplementedPipelineServiceServer) testEmbeddedByValue() {} + +// UnsafePipelineServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to PipelineServiceServer will +// result in compilation errors. +type UnsafePipelineServiceServer interface { + mustEmbedUnimplementedPipelineServiceServer() +} + +func RegisterPipelineServiceServer(s grpc.ServiceRegistrar, srv PipelineServiceServer) { + // If the following call pancis, it indicates UnimplementedPipelineServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&PipelineService_ServiceDesc, srv) +} + +func _PipelineService_CreatePipelineV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreatePipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).CreatePipelineV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_CreatePipelineV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).CreatePipelineV1(ctx, req.(*CreatePipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipelineV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipelineV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_GetPipelineV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipelineV1(ctx, req.(*GetPipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipelineByNameV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineByNameRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipelineByNameV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_GetPipelineByNameV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipelineByNameV1(ctx, req.(*GetPipelineByNameRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_ListPipelinesV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListPipelinesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).ListPipelinesV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_ListPipelinesV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).ListPipelinesV1(ctx, req.(*ListPipelinesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_DeletePipelineV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).DeletePipelineV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_DeletePipelineV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).DeletePipelineV1(ctx, req.(*DeletePipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_GetTemplate_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetTemplate(ctx, req.(*GetTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_CreatePipelineVersionV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreatePipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).CreatePipelineVersionV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_CreatePipelineVersionV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).CreatePipelineVersionV1(ctx, req.(*CreatePipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipelineVersionV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipelineVersionV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_GetPipelineVersionV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipelineVersionV1(ctx, req.(*GetPipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_ListPipelineVersionsV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListPipelineVersionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).ListPipelineVersionsV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_ListPipelineVersionsV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).ListPipelineVersionsV1(ctx, req.(*ListPipelineVersionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_DeletePipelineVersionV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).DeletePipelineVersionV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_DeletePipelineVersionV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).DeletePipelineVersionV1(ctx, req.(*DeletePipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipelineVersionTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineVersionTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipelineVersionTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_GetPipelineVersionTemplate_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipelineVersionTemplate(ctx, req.(*GetPipelineVersionTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_UpdatePipelineDefaultVersionV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdatePipelineDefaultVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).UpdatePipelineDefaultVersionV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_UpdatePipelineDefaultVersionV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).UpdatePipelineDefaultVersionV1(ctx, req.(*UpdatePipelineDefaultVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// PipelineService_ServiceDesc is the grpc.ServiceDesc for PipelineService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var PipelineService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.PipelineService", + HandlerType: (*PipelineServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreatePipelineV1", + Handler: _PipelineService_CreatePipelineV1_Handler, + }, + { + MethodName: "GetPipelineV1", + Handler: _PipelineService_GetPipelineV1_Handler, + }, + { + MethodName: "GetPipelineByNameV1", + Handler: _PipelineService_GetPipelineByNameV1_Handler, + }, + { + MethodName: "ListPipelinesV1", + Handler: _PipelineService_ListPipelinesV1_Handler, + }, + { + MethodName: "DeletePipelineV1", + Handler: _PipelineService_DeletePipelineV1_Handler, + }, + { + MethodName: "GetTemplate", + Handler: _PipelineService_GetTemplate_Handler, + }, + { + MethodName: "CreatePipelineVersionV1", + Handler: _PipelineService_CreatePipelineVersionV1_Handler, + }, + { + MethodName: "GetPipelineVersionV1", + Handler: _PipelineService_GetPipelineVersionV1_Handler, + }, + { + MethodName: "ListPipelineVersionsV1", + Handler: _PipelineService_ListPipelineVersionsV1_Handler, + }, + { + MethodName: "DeletePipelineVersionV1", + Handler: _PipelineService_DeletePipelineVersionV1_Handler, + }, + { + MethodName: "GetPipelineVersionTemplate", + Handler: _PipelineService_GetPipelineVersionTemplate_Handler, + }, + { + MethodName: "UpdatePipelineDefaultVersionV1", + Handler: _PipelineService_UpdatePipelineDefaultVersionV1_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v1beta1/pipeline.proto", +} diff --git a/backend/api/v1beta1/go_client/pipeline_spec.pb.go b/backend/api/v1beta1/go_client/pipeline_spec.pb.go index 34762d99050..0faa3edada3 100644 --- a/backend/api/v1beta1/go_client/pipeline_spec.pb.go +++ b/backend/api/v1beta1/go_client/pipeline_spec.pb.go @@ -14,8 +14,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/pipeline_spec.proto package go_client @@ -26,6 +26,7 @@ import ( structpb "google.golang.org/protobuf/types/known/structpb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -36,10 +37,7 @@ const ( ) type PipelineSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Optional input field. The ID of the pipeline user uploaded before. PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` // Optional output field. The name of the pipeline. @@ -56,15 +54,15 @@ type PipelineSpec struct { Parameters []*Parameter `protobuf:"bytes,4,rep,name=parameters,proto3" json:"parameters,omitempty"` // Runtime config of the pipeline. V2 only RuntimeConfig *PipelineSpec_RuntimeConfig `protobuf:"bytes,6,opt,name=runtime_config,json=runtimeConfig,proto3" json:"runtime_config,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineSpec) Reset() { *x = PipelineSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_spec_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_spec_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineSpec) String() string { @@ -75,7 +73,7 @@ func (*PipelineSpec) ProtoMessage() {} func (x *PipelineSpec) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_spec_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -134,27 +132,24 @@ func (x *PipelineSpec) GetRuntimeConfig() *PipelineSpec_RuntimeConfig { // The runtime config of a PipelineSpec. type PipelineSpec_RuntimeConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The runtime parameters of the PipelineSpec. The parameters will be // used to replace the placeholders // at runtime. - Parameters map[string]*structpb.Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*structpb.Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // A path in a object store bucket which will be treated as the root // output directory of the pipeline. It is used by the system to // generate the paths of output artifacts. Ref:(https://www.kubeflow.org/docs/components/pipelines/pipeline-root/) - PipelineRoot string `protobuf:"bytes,2,opt,name=pipeline_root,json=pipelineRoot,proto3" json:"pipeline_root,omitempty"` + PipelineRoot string `protobuf:"bytes,2,opt,name=pipeline_root,json=pipelineRoot,proto3" json:"pipeline_root,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineSpec_RuntimeConfig) Reset() { *x = PipelineSpec_RuntimeConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_pipeline_spec_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_pipeline_spec_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineSpec_RuntimeConfig) String() string { @@ -165,7 +160,7 @@ func (*PipelineSpec_RuntimeConfig) ProtoMessage() {} func (x *PipelineSpec_RuntimeConfig) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_pipeline_spec_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -196,67 +191,42 @@ func (x *PipelineSpec_RuntimeConfig) GetPipelineRoot() string { var File_backend_api_v1beta1_pipeline_spec_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_pipeline_spec_proto_rawDesc = []byte{ - 0x0a, 0x27, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, - 0x70, 0x65, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x23, - 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x22, 0x85, 0x04, 0x0a, 0x0c, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, - 0x65, 0x63, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x49, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2b, 0x0a, 0x11, 0x77, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x61, 0x6e, - 0x69, 0x66, 0x65, 0x73, 0x74, 0x12, 0x2b, 0x0a, 0x11, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x5f, 0x6d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x10, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x4d, 0x61, 0x6e, 0x69, 0x66, 0x65, - 0x73, 0x74, 0x12, 0x2e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, - 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x12, 0x46, 0x0a, 0x0e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x52, 0x75, - 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0d, 0x72, 0x75, 0x6e, - 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0xdc, 0x01, 0x0a, 0x0d, 0x52, - 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4f, 0x0a, 0x0a, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x2f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, - 0x70, 0x65, 0x63, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x23, 0x0a, - 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x6f, - 0x6f, 0x74, 0x1a, 0x55, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, - 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_pipeline_spec_proto_rawDesc = "" + + "\n" + + "'backend/api/v1beta1/pipeline_spec.proto\x12\x03api\x1a#backend/api/v1beta1/parameter.proto\x1a\x1cgoogle/protobuf/struct.proto\"\x85\x04\n" + + "\fPipelineSpec\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\x12#\n" + + "\rpipeline_name\x18\x05 \x01(\tR\fpipelineName\x12+\n" + + "\x11workflow_manifest\x18\x02 \x01(\tR\x10workflowManifest\x12+\n" + + "\x11pipeline_manifest\x18\x03 \x01(\tR\x10pipelineManifest\x12.\n" + + "\n" + + "parameters\x18\x04 \x03(\v2\x0e.api.ParameterR\n" + + "parameters\x12F\n" + + "\x0eruntime_config\x18\x06 \x01(\v2\x1f.api.PipelineSpec.RuntimeConfigR\rruntimeConfig\x1a\xdc\x01\n" + + "\rRuntimeConfig\x12O\n" + + "\n" + + "parameters\x18\x01 \x03(\v2/.api.PipelineSpec.RuntimeConfig.ParametersEntryR\n" + + "parameters\x12#\n" + + "\rpipeline_root\x18\x02 \x01(\tR\fpipelineRoot\x1aU\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12,\n" + + "\x05value\x18\x02 \x01(\v2\x16.google.protobuf.ValueR\x05value:\x028\x01B=Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_pipeline_spec_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_pipeline_spec_proto_rawDescData = file_backend_api_v1beta1_pipeline_spec_proto_rawDesc + file_backend_api_v1beta1_pipeline_spec_proto_rawDescData []byte ) func file_backend_api_v1beta1_pipeline_spec_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_pipeline_spec_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_pipeline_spec_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_pipeline_spec_proto_rawDescData) + file_backend_api_v1beta1_pipeline_spec_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_pipeline_spec_proto_rawDesc), len(file_backend_api_v1beta1_pipeline_spec_proto_rawDesc))) }) return file_backend_api_v1beta1_pipeline_spec_proto_rawDescData } var file_backend_api_v1beta1_pipeline_spec_proto_msgTypes = make([]protoimpl.MessageInfo, 3) -var file_backend_api_v1beta1_pipeline_spec_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_pipeline_spec_proto_goTypes = []any{ (*PipelineSpec)(nil), // 0: api.PipelineSpec (*PipelineSpec_RuntimeConfig)(nil), // 1: api.PipelineSpec.RuntimeConfig nil, // 2: api.PipelineSpec.RuntimeConfig.ParametersEntry @@ -281,37 +251,11 @@ func file_backend_api_v1beta1_pipeline_spec_proto_init() { return } file_backend_api_v1beta1_parameter_proto_init() - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_pipeline_spec_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_pipeline_spec_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineSpec_RuntimeConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_pipeline_spec_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_pipeline_spec_proto_rawDesc), len(file_backend_api_v1beta1_pipeline_spec_proto_rawDesc)), NumEnums: 0, NumMessages: 3, NumExtensions: 0, @@ -322,7 +266,6 @@ func file_backend_api_v1beta1_pipeline_spec_proto_init() { MessageInfos: file_backend_api_v1beta1_pipeline_spec_proto_msgTypes, }.Build() File_backend_api_v1beta1_pipeline_spec_proto = out.File - file_backend_api_v1beta1_pipeline_spec_proto_rawDesc = nil file_backend_api_v1beta1_pipeline_spec_proto_goTypes = nil file_backend_api_v1beta1_pipeline_spec_proto_depIdxs = nil } diff --git a/backend/api/v1beta1/go_client/report.pb.go b/backend/api/v1beta1/go_client/report.pb.go index fd2f8be385f..97fb8972f09 100644 --- a/backend/api/v1beta1/go_client/report.pb.go +++ b/backend/api/v1beta1/go_client/report.pb.go @@ -14,23 +14,20 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/report.proto package go_client import ( - context "context" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -41,21 +38,18 @@ const ( ) type ReportWorkflowRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Workflow is a workflow custom resource marshalled into a json string. - Workflow string `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` + Workflow string `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ReportWorkflowRequest) Reset() { *x = ReportWorkflowRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_report_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_report_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReportWorkflowRequest) String() string { @@ -66,7 +60,7 @@ func (*ReportWorkflowRequest) ProtoMessage() {} func (x *ReportWorkflowRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_report_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -89,21 +83,18 @@ func (x *ReportWorkflowRequest) GetWorkflow() string { } type ReportScheduledWorkflowRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string. ScheduledWorkflow string `protobuf:"bytes,1,opt,name=scheduled_workflow,json=scheduledWorkflow,proto3" json:"scheduled_workflow,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ReportScheduledWorkflowRequest) Reset() { *x = ReportScheduledWorkflowRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_report_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_report_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReportScheduledWorkflowRequest) String() string { @@ -114,7 +105,7 @@ func (*ReportScheduledWorkflowRequest) ProtoMessage() {} func (x *ReportScheduledWorkflowRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_report_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -138,60 +129,31 @@ func (x *ReportScheduledWorkflowRequest) GetScheduledWorkflow() string { var File_backend_api_v1beta1_report_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_report_proto_rawDesc = []byte{ - 0x0a, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, - 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x22, 0x33, 0x0a, 0x15, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x77, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x22, 0x4f, 0x0a, 0x1e, 0x52, 0x65, 0x70, 0x6f, 0x72, - 0x74, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x12, 0x73, 0x63, 0x68, - 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, - 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x32, 0x9b, 0x02, 0x0a, 0x0d, 0x52, 0x65, 0x70, - 0x6f, 0x72, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x71, 0x0a, 0x10, 0x52, 0x65, - 0x70, 0x6f, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x31, 0x12, 0x1a, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, - 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, - 0x74, 0x79, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x3a, 0x08, 0x77, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x22, 0x17, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x12, 0x96, 0x01, - 0x0a, 0x19, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, - 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x31, 0x12, 0x23, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, - 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x3c, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x36, - 0x3a, 0x12, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x22, 0x20, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2f, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x77, 0x6f, 0x72, - 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, - 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, - 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_report_proto_rawDesc = "" + + "\n" + + " backend/api/v1beta1/report.proto\x12\x03api\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\"3\n" + + "\x15ReportWorkflowRequest\x12\x1a\n" + + "\bworkflow\x18\x01 \x01(\tR\bworkflow\"O\n" + + "\x1eReportScheduledWorkflowRequest\x12-\n" + + "\x12scheduled_workflow\x18\x01 \x01(\tR\x11scheduledWorkflow2\x9b\x02\n" + + "\rReportService\x12q\n" + + "\x10ReportWorkflowV1\x12\x1a.api.ReportWorkflowRequest\x1a\x16.google.protobuf.Empty\")\x82\xd3\xe4\x93\x02#:\bworkflow\"\x17/apis/v1beta1/workflows\x12\x96\x01\n" + + "\x19ReportScheduledWorkflowV1\x12#.api.ReportScheduledWorkflowRequest\x1a\x16.google.protobuf.Empty\"<\x82\xd3\xe4\x93\x026:\x12scheduled_workflow\" /apis/v1beta1/scheduledworkflowsB=Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_report_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_report_proto_rawDescData = file_backend_api_v1beta1_report_proto_rawDesc + file_backend_api_v1beta1_report_proto_rawDescData []byte ) func file_backend_api_v1beta1_report_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_report_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_report_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_report_proto_rawDescData) + file_backend_api_v1beta1_report_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_report_proto_rawDesc), len(file_backend_api_v1beta1_report_proto_rawDesc))) }) return file_backend_api_v1beta1_report_proto_rawDescData } var file_backend_api_v1beta1_report_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_backend_api_v1beta1_report_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_report_proto_goTypes = []any{ (*ReportWorkflowRequest)(nil), // 0: api.ReportWorkflowRequest (*ReportScheduledWorkflowRequest)(nil), // 1: api.ReportScheduledWorkflowRequest (*emptypb.Empty)(nil), // 2: google.protobuf.Empty @@ -213,37 +175,11 @@ func file_backend_api_v1beta1_report_proto_init() { if File_backend_api_v1beta1_report_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_report_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReportWorkflowRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_report_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReportScheduledWorkflowRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_report_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_report_proto_rawDesc), len(file_backend_api_v1beta1_report_proto_rawDesc)), NumEnums: 0, NumMessages: 2, NumExtensions: 0, @@ -254,123 +190,6 @@ func file_backend_api_v1beta1_report_proto_init() { MessageInfos: file_backend_api_v1beta1_report_proto_msgTypes, }.Build() File_backend_api_v1beta1_report_proto = out.File - file_backend_api_v1beta1_report_proto_rawDesc = nil file_backend_api_v1beta1_report_proto_goTypes = nil file_backend_api_v1beta1_report_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// ReportServiceClient is the client API for ReportService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type ReportServiceClient interface { - ReportWorkflowV1(ctx context.Context, in *ReportWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - ReportScheduledWorkflowV1(ctx context.Context, in *ReportScheduledWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type reportServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewReportServiceClient(cc grpc.ClientConnInterface) ReportServiceClient { - return &reportServiceClient{cc} -} - -func (c *reportServiceClient) ReportWorkflowV1(ctx context.Context, in *ReportWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.ReportService/ReportWorkflowV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *reportServiceClient) ReportScheduledWorkflowV1(ctx context.Context, in *ReportScheduledWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.ReportService/ReportScheduledWorkflowV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ReportServiceServer is the server API for ReportService service. -type ReportServiceServer interface { - ReportWorkflowV1(context.Context, *ReportWorkflowRequest) (*emptypb.Empty, error) - ReportScheduledWorkflowV1(context.Context, *ReportScheduledWorkflowRequest) (*emptypb.Empty, error) -} - -// UnimplementedReportServiceServer can be embedded to have forward compatible implementations. -type UnimplementedReportServiceServer struct { -} - -func (*UnimplementedReportServiceServer) ReportWorkflowV1(context.Context, *ReportWorkflowRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method ReportWorkflowV1 not implemented") -} -func (*UnimplementedReportServiceServer) ReportScheduledWorkflowV1(context.Context, *ReportScheduledWorkflowRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method ReportScheduledWorkflowV1 not implemented") -} - -func RegisterReportServiceServer(s *grpc.Server, srv ReportServiceServer) { - s.RegisterService(&_ReportService_serviceDesc, srv) -} - -func _ReportService_ReportWorkflowV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ReportWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ReportServiceServer).ReportWorkflowV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.ReportService/ReportWorkflowV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ReportServiceServer).ReportWorkflowV1(ctx, req.(*ReportWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ReportService_ReportScheduledWorkflowV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ReportScheduledWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ReportServiceServer).ReportScheduledWorkflowV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.ReportService/ReportScheduledWorkflowV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ReportServiceServer).ReportScheduledWorkflowV1(ctx, req.(*ReportScheduledWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _ReportService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "api.ReportService", - HandlerType: (*ReportServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "ReportWorkflowV1", - Handler: _ReportService_ReportWorkflowV1_Handler, - }, - { - MethodName: "ReportScheduledWorkflowV1", - Handler: _ReportService_ReportScheduledWorkflowV1_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v1beta1/report.proto", -} diff --git a/backend/api/v1beta1/go_client/report.pb.gw.go b/backend/api/v1beta1/go_client/report.pb.gw.go index 5384e3cdf5e..ca8f61d6257 100644 --- a/backend/api/v1beta1/go_client/report.pb.gw.go +++ b/backend/api/v1beta1/go_client/report.pb.gw.go @@ -10,147 +10,130 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_ReportService_ReportWorkflowV1_0(ctx context.Context, marshaler runtime.Marshaler, client ReportServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReportWorkflowRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Workflow); err != nil && err != io.EOF { + var ( + protoReq ReportWorkflowRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Workflow); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.ReportWorkflowV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ReportService_ReportWorkflowV1_0(ctx context.Context, marshaler runtime.Marshaler, server ReportServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReportWorkflowRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Workflow); err != nil && err != io.EOF { + var ( + protoReq ReportWorkflowRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Workflow); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ReportWorkflowV1(ctx, &protoReq) return msg, metadata, err - } func request_ReportService_ReportScheduledWorkflowV1_0(ctx context.Context, marshaler runtime.Marshaler, client ReportServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReportScheduledWorkflowRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.ScheduledWorkflow); err != nil && err != io.EOF { + var ( + protoReq ReportScheduledWorkflowRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.ScheduledWorkflow); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.ReportScheduledWorkflowV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ReportService_ReportScheduledWorkflowV1_0(ctx context.Context, marshaler runtime.Marshaler, server ReportServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReportScheduledWorkflowRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.ScheduledWorkflow); err != nil && err != io.EOF { + var ( + protoReq ReportScheduledWorkflowRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.ScheduledWorkflow); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ReportScheduledWorkflowV1(ctx, &protoReq) return msg, metadata, err - } // RegisterReportServiceHandlerServer registers the http handlers for service ReportService to "mux". // UnaryRPC :call ReportServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterReportServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterReportServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ReportServiceServer) error { - - mux.Handle("POST", pattern_ReportService_ReportWorkflowV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ReportService_ReportWorkflowV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ReportService/ReportWorkflowV1", runtime.WithHTTPPathPattern("/apis/v1beta1/workflows")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ReportService_ReportWorkflowV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ReportService_ReportWorkflowV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ReportService_ReportWorkflowV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ReportService_ReportWorkflowV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ReportService_ReportScheduledWorkflowV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ReportService_ReportScheduledWorkflowV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ReportService/ReportScheduledWorkflowV1", runtime.WithHTTPPathPattern("/apis/v1beta1/scheduledworkflows")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ReportService_ReportScheduledWorkflowV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ReportService_ReportScheduledWorkflowV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ReportService_ReportScheduledWorkflowV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ReportService_ReportScheduledWorkflowV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -159,25 +142,24 @@ func RegisterReportServiceHandlerServer(ctx context.Context, mux *runtime.ServeM // RegisterReportServiceHandlerFromEndpoint is same as RegisterReportServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterReportServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterReportServiceHandler(ctx, mux, conn) } @@ -191,60 +173,51 @@ func RegisterReportServiceHandler(ctx context.Context, mux *runtime.ServeMux, co // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ReportServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ReportServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "ReportServiceClient" to call the correct interceptors. +// "ReportServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterReportServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ReportServiceClient) error { - - mux.Handle("POST", pattern_ReportService_ReportWorkflowV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ReportService_ReportWorkflowV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.ReportService/ReportWorkflowV1", runtime.WithHTTPPathPattern("/apis/v1beta1/workflows")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ReportService_ReportWorkflowV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ReportService_ReportWorkflowV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ReportService_ReportWorkflowV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ReportService_ReportWorkflowV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ReportService_ReportScheduledWorkflowV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ReportService_ReportScheduledWorkflowV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.ReportService/ReportScheduledWorkflowV1", runtime.WithHTTPPathPattern("/apis/v1beta1/scheduledworkflows")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ReportService_ReportScheduledWorkflowV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ReportService_ReportScheduledWorkflowV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ReportService_ReportScheduledWorkflowV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ReportService_ReportScheduledWorkflowV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_ReportService_ReportWorkflowV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "workflows"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_ReportService_ReportScheduledWorkflowV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "scheduledworkflows"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_ReportService_ReportWorkflowV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "workflows"}, "")) + pattern_ReportService_ReportScheduledWorkflowV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "scheduledworkflows"}, "")) ) var ( - forward_ReportService_ReportWorkflowV1_0 = runtime.ForwardResponseMessage - + forward_ReportService_ReportWorkflowV1_0 = runtime.ForwardResponseMessage forward_ReportService_ReportScheduledWorkflowV1_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v1beta1/go_client/report_grpc.pb.go b/backend/api/v1beta1/go_client/report_grpc.pb.go new file mode 100644 index 00000000000..919c1c82cb6 --- /dev/null +++ b/backend/api/v1beta1/go_client/report_grpc.pb.go @@ -0,0 +1,174 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v1beta1/report.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + ReportService_ReportWorkflowV1_FullMethodName = "/api.ReportService/ReportWorkflowV1" + ReportService_ReportScheduledWorkflowV1_FullMethodName = "/api.ReportService/ReportScheduledWorkflowV1" +) + +// ReportServiceClient is the client API for ReportService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ReportServiceClient interface { + ReportWorkflowV1(ctx context.Context, in *ReportWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + ReportScheduledWorkflowV1(ctx context.Context, in *ReportScheduledWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type reportServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewReportServiceClient(cc grpc.ClientConnInterface) ReportServiceClient { + return &reportServiceClient{cc} +} + +func (c *reportServiceClient) ReportWorkflowV1(ctx context.Context, in *ReportWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ReportService_ReportWorkflowV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *reportServiceClient) ReportScheduledWorkflowV1(ctx context.Context, in *ReportScheduledWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ReportService_ReportScheduledWorkflowV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ReportServiceServer is the server API for ReportService service. +// All implementations must embed UnimplementedReportServiceServer +// for forward compatibility. +type ReportServiceServer interface { + ReportWorkflowV1(context.Context, *ReportWorkflowRequest) (*emptypb.Empty, error) + ReportScheduledWorkflowV1(context.Context, *ReportScheduledWorkflowRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedReportServiceServer() +} + +// UnimplementedReportServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedReportServiceServer struct{} + +func (UnimplementedReportServiceServer) ReportWorkflowV1(context.Context, *ReportWorkflowRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method ReportWorkflowV1 not implemented") +} +func (UnimplementedReportServiceServer) ReportScheduledWorkflowV1(context.Context, *ReportScheduledWorkflowRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method ReportScheduledWorkflowV1 not implemented") +} +func (UnimplementedReportServiceServer) mustEmbedUnimplementedReportServiceServer() {} +func (UnimplementedReportServiceServer) testEmbeddedByValue() {} + +// UnsafeReportServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ReportServiceServer will +// result in compilation errors. +type UnsafeReportServiceServer interface { + mustEmbedUnimplementedReportServiceServer() +} + +func RegisterReportServiceServer(s grpc.ServiceRegistrar, srv ReportServiceServer) { + // If the following call pancis, it indicates UnimplementedReportServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&ReportService_ServiceDesc, srv) +} + +func _ReportService_ReportWorkflowV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReportWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReportServiceServer).ReportWorkflowV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ReportService_ReportWorkflowV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReportServiceServer).ReportWorkflowV1(ctx, req.(*ReportWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReportService_ReportScheduledWorkflowV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReportScheduledWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReportServiceServer).ReportScheduledWorkflowV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ReportService_ReportScheduledWorkflowV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReportServiceServer).ReportScheduledWorkflowV1(ctx, req.(*ReportScheduledWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// ReportService_ServiceDesc is the grpc.ServiceDesc for ReportService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ReportService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.ReportService", + HandlerType: (*ReportServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ReportWorkflowV1", + Handler: _ReportService_ReportWorkflowV1_Handler, + }, + { + MethodName: "ReportScheduledWorkflowV1", + Handler: _ReportService_ReportScheduledWorkflowV1_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v1beta1/report.proto", +} diff --git a/backend/api/v1beta1/go_client/resource_reference.pb.go b/backend/api/v1beta1/go_client/resource_reference.pb.go index a04bc537132..0103be135e6 100644 --- a/backend/api/v1beta1/go_client/resource_reference.pb.go +++ b/backend/api/v1beta1/go_client/resource_reference.pb.go @@ -14,8 +14,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/resource_reference.proto package go_client @@ -25,6 +25,7 @@ import ( protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -142,23 +143,20 @@ func (Relationship) EnumDescriptor() ([]byte, []int) { } type ResourceKey struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The type of the resource that referred to. Type ResourceType `protobuf:"varint,1,opt,name=type,proto3,enum=api.ResourceType" json:"type,omitempty"` // The ID of the resource that referred to. - Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ResourceKey) Reset() { *x = ResourceKey{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_resource_reference_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_resource_reference_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ResourceKey) String() string { @@ -169,7 +167,7 @@ func (*ResourceKey) ProtoMessage() {} func (x *ResourceKey) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_resource_reference_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -199,24 +197,21 @@ func (x *ResourceKey) GetId() string { } type ResourceReference struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Key *ResourceKey `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + Key *ResourceKey `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` // The name of the resource that referred to. Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` // Required field. The relationship from referred resource to the object. - Relationship Relationship `protobuf:"varint,2,opt,name=relationship,proto3,enum=api.Relationship" json:"relationship,omitempty"` + Relationship Relationship `protobuf:"varint,2,opt,name=relationship,proto3,enum=api.Relationship" json:"relationship,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ResourceReference) Reset() { *x = ResourceReference{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_resource_reference_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_resource_reference_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ResourceReference) String() string { @@ -227,7 +222,7 @@ func (*ResourceReference) ProtoMessage() {} func (x *ResourceReference) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_resource_reference_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -265,56 +260,44 @@ func (x *ResourceReference) GetRelationship() Relationship { var File_backend_api_v1beta1_resource_reference_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_resource_reference_proto_rawDesc = []byte{ - 0x0a, 0x2c, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, - 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, - 0x61, 0x70, 0x69, 0x22, 0x44, 0x0a, 0x0b, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4b, - 0x65, 0x79, 0x12, 0x25, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, - 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x82, 0x01, 0x0a, 0x11, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, - 0x22, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x0c, 0x72, 0x65, 0x6c, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x68, 0x69, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x68, 0x69, 0x70, - 0x52, 0x0c, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x68, 0x69, 0x70, 0x2a, 0x75, - 0x0a, 0x0c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x19, - 0x0a, 0x15, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x5f, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, - 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, 0x45, 0x58, 0x50, - 0x45, 0x52, 0x49, 0x4d, 0x45, 0x4e, 0x54, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x4a, 0x4f, 0x42, - 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x50, 0x49, 0x50, 0x45, 0x4c, 0x49, 0x4e, 0x45, 0x10, 0x03, - 0x12, 0x14, 0x0a, 0x10, 0x50, 0x49, 0x50, 0x45, 0x4c, 0x49, 0x4e, 0x45, 0x5f, 0x56, 0x45, 0x52, - 0x53, 0x49, 0x4f, 0x4e, 0x10, 0x04, 0x12, 0x0d, 0x0a, 0x09, 0x4e, 0x41, 0x4d, 0x45, 0x53, 0x50, - 0x41, 0x43, 0x45, 0x10, 0x05, 0x2a, 0x40, 0x0a, 0x0c, 0x52, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x68, 0x69, 0x70, 0x12, 0x18, 0x0a, 0x14, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, - 0x5f, 0x52, 0x45, 0x4c, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x53, 0x48, 0x49, 0x50, 0x10, 0x00, 0x12, - 0x09, 0x0a, 0x05, 0x4f, 0x57, 0x4e, 0x45, 0x52, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x52, - 0x45, 0x41, 0x54, 0x4f, 0x52, 0x10, 0x02, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, - 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, - 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_resource_reference_proto_rawDesc = "" + + "\n" + + ",backend/api/v1beta1/resource_reference.proto\x12\x03api\"D\n" + + "\vResourceKey\x12%\n" + + "\x04type\x18\x01 \x01(\x0e2\x11.api.ResourceTypeR\x04type\x12\x0e\n" + + "\x02id\x18\x02 \x01(\tR\x02id\"\x82\x01\n" + + "\x11ResourceReference\x12\"\n" + + "\x03key\x18\x01 \x01(\v2\x10.api.ResourceKeyR\x03key\x12\x12\n" + + "\x04name\x18\x03 \x01(\tR\x04name\x125\n" + + "\frelationship\x18\x02 \x01(\x0e2\x11.api.RelationshipR\frelationship*u\n" + + "\fResourceType\x12\x19\n" + + "\x15UNKNOWN_RESOURCE_TYPE\x10\x00\x12\x0e\n" + + "\n" + + "EXPERIMENT\x10\x01\x12\a\n" + + "\x03JOB\x10\x02\x12\f\n" + + "\bPIPELINE\x10\x03\x12\x14\n" + + "\x10PIPELINE_VERSION\x10\x04\x12\r\n" + + "\tNAMESPACE\x10\x05*@\n" + + "\fRelationship\x12\x18\n" + + "\x14UNKNOWN_RELATIONSHIP\x10\x00\x12\t\n" + + "\x05OWNER\x10\x01\x12\v\n" + + "\aCREATOR\x10\x02B=Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_resource_reference_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_resource_reference_proto_rawDescData = file_backend_api_v1beta1_resource_reference_proto_rawDesc + file_backend_api_v1beta1_resource_reference_proto_rawDescData []byte ) func file_backend_api_v1beta1_resource_reference_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_resource_reference_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_resource_reference_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_resource_reference_proto_rawDescData) + file_backend_api_v1beta1_resource_reference_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_resource_reference_proto_rawDesc), len(file_backend_api_v1beta1_resource_reference_proto_rawDesc))) }) return file_backend_api_v1beta1_resource_reference_proto_rawDescData } var file_backend_api_v1beta1_resource_reference_proto_enumTypes = make([]protoimpl.EnumInfo, 2) var file_backend_api_v1beta1_resource_reference_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_backend_api_v1beta1_resource_reference_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_resource_reference_proto_goTypes = []any{ (ResourceType)(0), // 0: api.ResourceType (Relationship)(0), // 1: api.Relationship (*ResourceKey)(nil), // 2: api.ResourceKey @@ -336,37 +319,11 @@ func file_backend_api_v1beta1_resource_reference_proto_init() { if File_backend_api_v1beta1_resource_reference_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_resource_reference_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ResourceKey); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_resource_reference_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ResourceReference); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_resource_reference_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_resource_reference_proto_rawDesc), len(file_backend_api_v1beta1_resource_reference_proto_rawDesc)), NumEnums: 2, NumMessages: 2, NumExtensions: 0, @@ -378,7 +335,6 @@ func file_backend_api_v1beta1_resource_reference_proto_init() { MessageInfos: file_backend_api_v1beta1_resource_reference_proto_msgTypes, }.Build() File_backend_api_v1beta1_resource_reference_proto = out.File - file_backend_api_v1beta1_resource_reference_proto_rawDesc = nil file_backend_api_v1beta1_resource_reference_proto_goTypes = nil file_backend_api_v1beta1_resource_reference_proto_depIdxs = nil } diff --git a/backend/api/v1beta1/go_client/run.pb.go b/backend/api/v1beta1/go_client/run.pb.go index 5186e3300ca..65922b620a6 100644 --- a/backend/api/v1beta1/go_client/run.pb.go +++ b/backend/api/v1beta1/go_client/run.pb.go @@ -14,25 +14,22 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/run.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -201,20 +198,17 @@ func (ReportRunMetricsResponse_ReportRunMetricResult_Status) EnumDescriptor() ([ } type CreateRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Run *Run `protobuf:"bytes,1,opt,name=run,proto3" json:"run,omitempty"` unknownFields protoimpl.UnknownFields - - Run *Run `protobuf:"bytes,1,opt,name=run,proto3" json:"run,omitempty"` + sizeCache protoimpl.SizeCache } func (x *CreateRunRequest) Reset() { *x = CreateRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreateRunRequest) String() string { @@ -225,7 +219,7 @@ func (*CreateRunRequest) ProtoMessage() {} func (x *CreateRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -248,21 +242,18 @@ func (x *CreateRunRequest) GetRun() *Run { } type GetRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the run to be retrieved. - RunId string `protobuf:"bytes,1,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + RunId string `protobuf:"bytes,1,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetRunRequest) Reset() { *x = GetRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetRunRequest) String() string { @@ -273,7 +264,7 @@ func (*GetRunRequest) ProtoMessage() {} func (x *GetRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -296,10 +287,7 @@ func (x *GetRunRequest) GetRunId() string { } type ListRunsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A page token to request the next page of results. The token is acquried // from the nextPageToken field of the response from the previous // ListRuns call or can be omitted when fetching the first page. @@ -317,16 +305,16 @@ type ListRunsRequest struct { ResourceReferenceKey *ResourceKey `protobuf:"bytes,4,opt,name=resource_reference_key,json=resourceReferenceKey,proto3" json:"resource_reference_key,omitempty"` // A url-encoded, JSON-serialized Filter protocol buffer (see // [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). - Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListRunsRequest) Reset() { *x = ListRunsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListRunsRequest) String() string { @@ -337,7 +325,7 @@ func (*ListRunsRequest) ProtoMessage() {} func (x *ListRunsRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -388,21 +376,18 @@ func (x *ListRunsRequest) GetFilter() string { } type TerminateRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the run to be terminated. - RunId string `protobuf:"bytes,1,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + RunId string `protobuf:"bytes,1,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TerminateRunRequest) Reset() { *x = TerminateRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TerminateRunRequest) String() string { @@ -413,7 +398,7 @@ func (*TerminateRunRequest) ProtoMessage() {} func (x *TerminateRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -436,21 +421,18 @@ func (x *TerminateRunRequest) GetRunId() string { } type RetryRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the run to be retried. - RunId string `protobuf:"bytes,1,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + RunId string `protobuf:"bytes,1,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *RetryRunRequest) Reset() { *x = RetryRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *RetryRunRequest) String() string { @@ -461,7 +443,7 @@ func (*RetryRunRequest) ProtoMessage() {} func (x *RetryRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -484,24 +466,21 @@ func (x *RetryRunRequest) GetRunId() string { } type ListRunsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Runs []*Run `protobuf:"bytes,1,rep,name=runs,proto3" json:"runs,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + Runs []*Run `protobuf:"bytes,1,rep,name=runs,proto3" json:"runs,omitempty"` // The total number of runs for the given query. TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` // The token to list the next page of runs. NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListRunsResponse) Reset() { *x = ListRunsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListRunsResponse) String() string { @@ -512,7 +491,7 @@ func (*ListRunsResponse) ProtoMessage() {} func (x *ListRunsResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -549,21 +528,18 @@ func (x *ListRunsResponse) GetNextPageToken() string { } type ArchiveRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the run to be archived. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ArchiveRunRequest) Reset() { *x = ArchiveRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ArchiveRunRequest) String() string { @@ -574,7 +550,7 @@ func (*ArchiveRunRequest) ProtoMessage() {} func (x *ArchiveRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -597,21 +573,18 @@ func (x *ArchiveRunRequest) GetId() string { } type UnarchiveRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the run to be restored. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *UnarchiveRunRequest) Reset() { *x = UnarchiveRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *UnarchiveRunRequest) String() string { @@ -622,7 +595,7 @@ func (*UnarchiveRunRequest) ProtoMessage() {} func (x *UnarchiveRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -645,21 +618,18 @@ func (x *UnarchiveRunRequest) GetId() string { } type DeleteRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the run to be deleted. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeleteRunRequest) Reset() { *x = DeleteRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeleteRunRequest) String() string { @@ -670,7 +640,7 @@ func (*DeleteRunRequest) ProtoMessage() {} func (x *DeleteRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -693,10 +663,7 @@ func (x *DeleteRunRequest) GetId() string { } type Run struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. Unique run ID. Generated by API server. Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // Required input field. Name provided by user, @@ -733,16 +700,16 @@ type Run struct { Error string `protobuf:"bytes,12,opt,name=error,proto3" json:"error,omitempty"` // Output. The metrics of the run. The metrics are reported by ReportMetrics // API. - Metrics []*RunMetric `protobuf:"bytes,9,rep,name=metrics,proto3" json:"metrics,omitempty"` + Metrics []*RunMetric `protobuf:"bytes,9,rep,name=metrics,proto3" json:"metrics,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Run) Reset() { *x = Run{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Run) String() string { @@ -753,7 +720,7 @@ func (*Run) ProtoMessage() {} func (x *Run) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -860,25 +827,22 @@ func (x *Run) GetMetrics() []*RunMetric { } type PipelineRuntime struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. The runtime JSON manifest of the pipeline, including the status // of pipeline steps and fields need for UI visualization etc. PipelineManifest string `protobuf:"bytes,10,opt,name=pipeline_manifest,json=pipelineManifest,proto3" json:"pipeline_manifest,omitempty"` // Output. The runtime JSON manifest of the argo workflow. // This is deprecated after pipeline_runtime_manifest is in use. WorkflowManifest string `protobuf:"bytes,11,opt,name=workflow_manifest,json=workflowManifest,proto3" json:"workflow_manifest,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineRuntime) Reset() { *x = PipelineRuntime{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineRuntime) String() string { @@ -889,7 +853,7 @@ func (*PipelineRuntime) ProtoMessage() {} func (x *PipelineRuntime) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -919,21 +883,18 @@ func (x *PipelineRuntime) GetWorkflowManifest() string { } type RunDetail struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Run *Run `protobuf:"bytes,1,opt,name=run,proto3" json:"run,omitempty"` - PipelineRuntime *PipelineRuntime `protobuf:"bytes,2,opt,name=pipeline_runtime,json=pipelineRuntime,proto3" json:"pipeline_runtime,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + Run *Run `protobuf:"bytes,1,opt,name=run,proto3" json:"run,omitempty"` + PipelineRuntime *PipelineRuntime `protobuf:"bytes,2,opt,name=pipeline_runtime,json=pipelineRuntime,proto3" json:"pipeline_runtime,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *RunDetail) Reset() { *x = RunDetail{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *RunDetail) String() string { @@ -944,7 +905,7 @@ func (*RunDetail) ProtoMessage() {} func (x *RunDetail) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -974,10 +935,7 @@ func (x *RunDetail) GetPipelineRuntime() *PipelineRuntime { } type RunMetric struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required. The user defined name of the metric. It must between 1 and 63 // characters long and must conform to the following regular expression: // `[a-z]([-a-z0-9]*[a-z0-9])?`. @@ -987,21 +945,21 @@ type RunMetric struct { // are considerd as duplicate. Only the first reporting will be recorded. Max // length is 128. NodeId string `protobuf:"bytes,2,opt,name=node_id,json=nodeId,proto3" json:"node_id,omitempty"` - // Types that are assignable to Value: + // Types that are valid to be assigned to Value: // // *RunMetric_NumberValue Value isRunMetric_Value `protobuf_oneof:"value"` // The display format of metric. - Format RunMetric_Format `protobuf:"varint,4,opt,name=format,proto3,enum=api.RunMetric_Format" json:"format,omitempty"` + Format RunMetric_Format `protobuf:"varint,4,opt,name=format,proto3,enum=api.RunMetric_Format" json:"format,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *RunMetric) Reset() { *x = RunMetric{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *RunMetric) String() string { @@ -1012,7 +970,7 @@ func (*RunMetric) ProtoMessage() {} func (x *RunMetric) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1041,16 +999,18 @@ func (x *RunMetric) GetNodeId() string { return "" } -func (m *RunMetric) GetValue() isRunMetric_Value { - if m != nil { - return m.Value +func (x *RunMetric) GetValue() isRunMetric_Value { + if x != nil { + return x.Value } return nil } func (x *RunMetric) GetNumberValue() float64 { - if x, ok := x.GetValue().(*RunMetric_NumberValue); ok { - return x.NumberValue + if x != nil { + if x, ok := x.Value.(*RunMetric_NumberValue); ok { + return x.NumberValue + } } return 0 } @@ -1074,23 +1034,20 @@ type RunMetric_NumberValue struct { func (*RunMetric_NumberValue) isRunMetric_Value() {} type ReportRunMetricsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required. The parent run ID of the metric. RunId string `protobuf:"bytes,1,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` // List of metrics to report. - Metrics []*RunMetric `protobuf:"bytes,2,rep,name=metrics,proto3" json:"metrics,omitempty"` + Metrics []*RunMetric `protobuf:"bytes,2,rep,name=metrics,proto3" json:"metrics,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ReportRunMetricsRequest) Reset() { *x = ReportRunMetricsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReportRunMetricsRequest) String() string { @@ -1101,7 +1058,7 @@ func (*ReportRunMetricsRequest) ProtoMessage() {} func (x *ReportRunMetricsRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1131,20 +1088,17 @@ func (x *ReportRunMetricsRequest) GetMetrics() []*RunMetric { } type ReportRunMetricsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Results []*ReportRunMetricsResponse_ReportRunMetricResult `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` unknownFields protoimpl.UnknownFields - - Results []*ReportRunMetricsResponse_ReportRunMetricResult `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + sizeCache protoimpl.SizeCache } func (x *ReportRunMetricsResponse) Reset() { *x = ReportRunMetricsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReportRunMetricsResponse) String() string { @@ -1155,7 +1109,7 @@ func (*ReportRunMetricsResponse) ProtoMessage() {} func (x *ReportRunMetricsResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1178,25 +1132,22 @@ func (x *ReportRunMetricsResponse) GetResults() []*ReportRunMetricsResponse_Repo } type ReadArtifactRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the run. RunId string `protobuf:"bytes,1,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` // The ID of the running node. NodeId string `protobuf:"bytes,2,opt,name=node_id,json=nodeId,proto3" json:"node_id,omitempty"` // The name of the artifact. - ArtifactName string `protobuf:"bytes,3,opt,name=artifact_name,json=artifactName,proto3" json:"artifact_name,omitempty"` + ArtifactName string `protobuf:"bytes,3,opt,name=artifact_name,json=artifactName,proto3" json:"artifact_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ReadArtifactRequest) Reset() { *x = ReadArtifactRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReadArtifactRequest) String() string { @@ -1207,7 +1158,7 @@ func (*ReadArtifactRequest) ProtoMessage() {} func (x *ReadArtifactRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1244,21 +1195,18 @@ func (x *ReadArtifactRequest) GetArtifactName() string { } type ReadArtifactResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The bytes of the artifact content. - Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ReadArtifactResponse) Reset() { *x = ReadArtifactResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReadArtifactResponse) String() string { @@ -1269,7 +1217,7 @@ func (*ReadArtifactResponse) ProtoMessage() {} func (x *ReadArtifactResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1292,10 +1240,7 @@ func (x *ReadArtifactResponse) GetData() []byte { } type ReportRunMetricsResponse_ReportRunMetricResult struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. The name of the metric. MetricName string `protobuf:"bytes,1,opt,name=metric_name,json=metricName,proto3" json:"metric_name,omitempty"` // Output. The ID of the node which reports the metric. @@ -1303,16 +1248,16 @@ type ReportRunMetricsResponse_ReportRunMetricResult struct { // Output. The status of the metric reporting. Status ReportRunMetricsResponse_ReportRunMetricResult_Status `protobuf:"varint,3,opt,name=status,proto3,enum=api.ReportRunMetricsResponse_ReportRunMetricResult_Status" json:"status,omitempty"` // Output. The detailed message of the error of the reporting. - Message string `protobuf:"bytes,4,opt,name=message,proto3" json:"message,omitempty"` + Message string `protobuf:"bytes,4,opt,name=message,proto3" json:"message,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ReportRunMetricsResponse_ReportRunMetricResult) Reset() { *x = ReportRunMetricsResponse_ReportRunMetricResult{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_run_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_run_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReportRunMetricsResponse_ReportRunMetricResult) String() string { @@ -1323,7 +1268,7 @@ func (*ReportRunMetricsResponse_ReportRunMetricResult) ProtoMessage() {} func (x *ReportRunMetricsResponse_ReportRunMetricResult) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_run_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1368,267 +1313,133 @@ func (x *ReportRunMetricsResponse_ReportRunMetricResult) GetMessage() string { var File_backend_api_v1beta1_run_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_run_proto_rawDesc = []byte{ - 0x0a, 0x1d, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, - 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, - 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, - 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x1a, 0x27, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, - 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, - 0x73, 0x70, 0x65, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x62, 0x61, 0x63, 0x6b, - 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, - 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, - 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, - 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x6f, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x2e, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x03, 0x72, 0x75, - 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x75, - 0x6e, 0x52, 0x03, 0x72, 0x75, 0x6e, 0x22, 0x26, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x22, 0xc6, - 0x01, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, - 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x17, - 0x0a, 0x07, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x62, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x06, 0x73, 0x6f, 0x72, 0x74, 0x42, 0x79, 0x12, 0x46, 0x0a, 0x16, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x6b, 0x65, - 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x12, - 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x2c, 0x0a, 0x13, 0x54, 0x65, 0x72, 0x6d, 0x69, - 0x6e, 0x61, 0x74, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, - 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x72, 0x75, 0x6e, 0x49, 0x64, 0x22, 0x28, 0x0a, 0x0f, 0x52, 0x65, 0x74, 0x72, 0x79, 0x52, 0x75, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x22, - 0x77, 0x0a, 0x10, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x1c, 0x0a, 0x04, 0x72, 0x75, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x08, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x75, 0x6e, 0x52, 0x04, 0x72, 0x75, 0x6e, - 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x53, 0x69, 0x7a, 0x65, - 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, - 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, - 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x23, 0x0a, 0x11, 0x41, 0x72, 0x63, 0x68, - 0x69, 0x76, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, - 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x25, 0x0a, - 0x13, 0x55, 0x6e, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x02, 0x69, 0x64, 0x22, 0x22, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x75, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x87, 0x05, 0x0a, 0x03, 0x52, 0x75, 0x6e, - 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, - 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0d, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x52, 0x75, 0x6e, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x52, 0x0c, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, - 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x36, 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, - 0x70, 0x65, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, 0x52, 0x0c, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x47, 0x0a, 0x13, 0x72, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, - 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, - 0x12, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, - 0x63, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x5f, 0x61, - 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x65, - 0x72, 0x76, 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x39, 0x0a, 0x0a, - 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x3d, 0x0a, 0x0c, 0x73, 0x63, 0x68, 0x65, 0x64, - 0x75, 0x6c, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0b, 0x73, 0x63, 0x68, 0x65, 0x64, - 0x75, 0x6c, 0x65, 0x64, 0x41, 0x74, 0x12, 0x3b, 0x0a, 0x0b, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, - 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, - 0x64, 0x41, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x08, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x12, 0x28, 0x0a, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x18, 0x09, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x72, - 0x69, 0x63, 0x52, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x22, 0x45, 0x0a, 0x0c, 0x53, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x1a, 0x0a, 0x16, 0x53, - 0x54, 0x4f, 0x52, 0x41, 0x47, 0x45, 0x53, 0x54, 0x41, 0x54, 0x45, 0x5f, 0x41, 0x56, 0x41, 0x49, - 0x4c, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x00, 0x12, 0x19, 0x0a, 0x15, 0x53, 0x54, 0x4f, 0x52, 0x41, - 0x47, 0x45, 0x53, 0x54, 0x41, 0x54, 0x45, 0x5f, 0x41, 0x52, 0x43, 0x48, 0x49, 0x56, 0x45, 0x44, - 0x10, 0x01, 0x22, 0x6b, 0x0a, 0x0f, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x75, - 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x2b, 0x0a, 0x11, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x5f, 0x6d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x10, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x4d, 0x61, 0x6e, 0x69, 0x66, 0x65, - 0x73, 0x74, 0x12, 0x2b, 0x0a, 0x11, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, - 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x77, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74, 0x22, - 0x68, 0x0a, 0x09, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x1a, 0x0a, 0x03, - 0x72, 0x75, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x52, 0x75, 0x6e, 0x52, 0x03, 0x72, 0x75, 0x6e, 0x12, 0x3f, 0x0a, 0x10, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x52, 0x0f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x22, 0xc9, 0x01, 0x0a, 0x09, 0x52, 0x75, - 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x6e, - 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6e, 0x6f, - 0x64, 0x65, 0x49, 0x64, 0x12, 0x23, 0x0a, 0x0c, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x5f, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x0b, 0x6e, 0x75, - 0x6d, 0x62, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x2d, 0x0a, 0x06, 0x66, 0x6f, 0x72, - 0x6d, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x2e, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, - 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x22, 0x32, 0x0a, 0x06, 0x46, 0x6f, 0x72, 0x6d, - 0x61, 0x74, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, - 0x44, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x52, 0x41, 0x57, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, - 0x50, 0x45, 0x52, 0x43, 0x45, 0x4e, 0x54, 0x41, 0x47, 0x45, 0x10, 0x02, 0x42, 0x07, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x5a, 0x0a, 0x17, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, - 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x28, 0x0a, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, - 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, - 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x52, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, - 0x73, 0x22, 0x9e, 0x03, 0x0a, 0x18, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x4d, - 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4d, - 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x33, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x4d, - 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, - 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x52, 0x65, - 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x1a, 0xb2, 0x02, - 0x0a, 0x15, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, - 0x63, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x74, 0x72, 0x69, - 0x63, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, - 0x74, 0x72, 0x69, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x24, 0x0a, 0x0e, 0x6d, 0x65, 0x74, 0x72, - 0x69, 0x63, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4e, 0x6f, 0x64, 0x65, 0x49, 0x64, 0x12, 0x52, - 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3a, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x4d, 0x65, - 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, 0x65, - 0x70, 0x6f, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x52, 0x65, 0x73, - 0x75, 0x6c, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x64, 0x0a, 0x06, - 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, - 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x06, 0x0a, 0x02, 0x4f, 0x4b, 0x10, 0x01, 0x12, - 0x14, 0x0a, 0x10, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, 0x41, 0x52, 0x47, 0x55, 0x4d, - 0x45, 0x4e, 0x54, 0x10, 0x02, 0x12, 0x17, 0x0a, 0x13, 0x44, 0x55, 0x50, 0x4c, 0x49, 0x43, 0x41, - 0x54, 0x45, 0x5f, 0x52, 0x45, 0x50, 0x4f, 0x52, 0x54, 0x49, 0x4e, 0x47, 0x10, 0x03, 0x12, 0x12, - 0x0a, 0x0e, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x4e, 0x41, 0x4c, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, - 0x10, 0x04, 0x22, 0x6a, 0x0a, 0x13, 0x52, 0x65, 0x61, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, - 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, - 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, - 0x12, 0x17, 0x0a, 0x07, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x06, 0x6e, 0x6f, 0x64, 0x65, 0x49, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x61, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0c, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x2a, - 0x0a, 0x14, 0x52, 0x65, 0x61, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x32, 0xc6, 0x08, 0x0a, 0x0a, 0x52, - 0x75, 0x6e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x55, 0x0a, 0x0b, 0x43, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x52, 0x75, 0x6e, 0x56, 0x31, 0x12, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x22, - 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x19, 0x3a, 0x03, 0x72, 0x75, 0x6e, 0x22, 0x12, 0x2f, 0x61, - 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, - 0x12, 0x53, 0x0a, 0x08, 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, 0x56, 0x31, 0x12, 0x12, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, - 0x22, 0x23, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1d, 0x12, 0x1b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, - 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, - 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x12, 0x55, 0x0a, 0x0a, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x75, 0x6e, - 0x73, 0x56, 0x31, 0x12, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x75, - 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x4c, 0x69, 0x73, 0x74, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x1a, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x12, 0x12, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, - 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x12, 0x67, 0x0a, 0x0c, - 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x75, 0x6e, 0x56, 0x31, 0x12, 0x16, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x27, 0x82, 0xd3, - 0xe4, 0x93, 0x02, 0x21, 0x22, 0x1f, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x3a, 0x61, 0x72, - 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x6d, 0x0a, 0x0e, 0x55, 0x6e, 0x61, 0x72, 0x63, 0x68, 0x69, - 0x76, 0x65, 0x52, 0x75, 0x6e, 0x56, 0x31, 0x12, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x6e, - 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, - 0x23, 0x22, 0x21, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x3a, 0x75, 0x6e, 0x61, 0x72, 0x63, - 0x68, 0x69, 0x76, 0x65, 0x12, 0x5d, 0x0a, 0x0b, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x75, - 0x6e, 0x56, 0x31, 0x12, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, - 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, - 0x74, 0x79, 0x22, 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x19, 0x2a, 0x17, 0x2f, 0x61, 0x70, 0x69, - 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, - 0x69, 0x64, 0x7d, 0x12, 0x87, 0x01, 0x0a, 0x12, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x75, - 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x56, 0x31, 0x12, 0x1c, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, - 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, - 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x75, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x34, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2e, 0x3a, - 0x01, 0x2a, 0x22, 0x29, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x3a, - 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x99, 0x01, - 0x0a, 0x0e, 0x52, 0x65, 0x61, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x56, 0x31, - 0x12, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x52, 0x65, 0x61, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x52, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x4c, 0x12, 0x4a, 0x2f, - 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, - 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x2f, 0x6e, 0x6f, 0x64, 0x65, 0x73, - 0x2f, 0x7b, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x73, 0x2f, 0x7b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x6e, - 0x61, 0x6d, 0x65, 0x7d, 0x3a, 0x72, 0x65, 0x61, 0x64, 0x12, 0x71, 0x0a, 0x0e, 0x54, 0x65, 0x72, - 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x52, 0x75, 0x6e, 0x56, 0x31, 0x12, 0x18, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x2d, 0x82, - 0xd3, 0xe4, 0x93, 0x02, 0x27, 0x22, 0x25, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, 0x5f, 0x69, - 0x64, 0x7d, 0x2f, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x12, 0x65, 0x0a, 0x0a, - 0x52, 0x65, 0x74, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x56, 0x31, 0x12, 0x14, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x52, 0x65, 0x74, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, - 0x22, 0x21, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, - 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x2f, 0x72, 0x65, - 0x74, 0x72, 0x79, 0x42, 0x91, 0x01, 0x92, 0x41, 0x51, 0x2a, 0x02, 0x01, 0x02, 0x52, 0x1c, 0x0a, - 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x11, 0x12, 0x0f, 0x0a, 0x0d, 0x1a, 0x0b, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5a, 0x1f, 0x0a, 0x1d, 0x0a, - 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x13, 0x08, 0x02, 0x1a, 0x0d, 0x61, 0x75, 0x74, - 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x0c, 0x0a, 0x0a, - 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x00, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, - 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_run_proto_rawDesc = "" + + "\n" + + "\x1dbackend/api/v1beta1/run.proto\x12\x03api\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a'backend/api/v1beta1/pipeline_spec.proto\x1a,backend/api/v1beta1/resource_reference.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\".\n" + + "\x10CreateRunRequest\x12\x1a\n" + + "\x03run\x18\x01 \x01(\v2\b.api.RunR\x03run\"&\n" + + "\rGetRunRequest\x12\x15\n" + + "\x06run_id\x18\x01 \x01(\tR\x05runId\"\xc6\x01\n" + + "\x0fListRunsRequest\x12\x1d\n" + + "\n" + + "page_token\x18\x01 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x02 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x03 \x01(\tR\x06sortBy\x12F\n" + + "\x16resource_reference_key\x18\x04 \x01(\v2\x10.api.ResourceKeyR\x14resourceReferenceKey\x12\x16\n" + + "\x06filter\x18\x05 \x01(\tR\x06filter\",\n" + + "\x13TerminateRunRequest\x12\x15\n" + + "\x06run_id\x18\x01 \x01(\tR\x05runId\"(\n" + + "\x0fRetryRunRequest\x12\x15\n" + + "\x06run_id\x18\x01 \x01(\tR\x05runId\"w\n" + + "\x10ListRunsResponse\x12\x1c\n" + + "\x04runs\x18\x01 \x03(\v2\b.api.RunR\x04runs\x12\x1d\n" + + "\n" + + "total_size\x18\x03 \x01(\x05R\ttotalSize\x12&\n" + + "\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"#\n" + + "\x11ArchiveRunRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"%\n" + + "\x13UnarchiveRunRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"\"\n" + + "\x10DeleteRunRequest\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\"\x87\x05\n" + + "\x03Run\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n" + + "\x04name\x18\x02 \x01(\tR\x04name\x12:\n" + + "\rstorage_state\x18\n" + + " \x01(\x0e2\x15.api.Run.StorageStateR\fstorageState\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\x126\n" + + "\rpipeline_spec\x18\x04 \x01(\v2\x11.api.PipelineSpecR\fpipelineSpec\x12G\n" + + "\x13resource_references\x18\x05 \x03(\v2\x16.api.ResourceReferenceR\x12resourceReferences\x12'\n" + + "\x0fservice_account\x18\x0e \x01(\tR\x0eserviceAccount\x129\n" + + "\n" + + "created_at\x18\x06 \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12=\n" + + "\fscheduled_at\x18\a \x01(\v2\x1a.google.protobuf.TimestampR\vscheduledAt\x12;\n" + + "\vfinished_at\x18\r \x01(\v2\x1a.google.protobuf.TimestampR\n" + + "finishedAt\x12\x16\n" + + "\x06status\x18\b \x01(\tR\x06status\x12\x14\n" + + "\x05error\x18\f \x01(\tR\x05error\x12(\n" + + "\ametrics\x18\t \x03(\v2\x0e.api.RunMetricR\ametrics\"E\n" + + "\fStorageState\x12\x1a\n" + + "\x16STORAGESTATE_AVAILABLE\x10\x00\x12\x19\n" + + "\x15STORAGESTATE_ARCHIVED\x10\x01\"k\n" + + "\x0fPipelineRuntime\x12+\n" + + "\x11pipeline_manifest\x18\n" + + " \x01(\tR\x10pipelineManifest\x12+\n" + + "\x11workflow_manifest\x18\v \x01(\tR\x10workflowManifest\"h\n" + + "\tRunDetail\x12\x1a\n" + + "\x03run\x18\x01 \x01(\v2\b.api.RunR\x03run\x12?\n" + + "\x10pipeline_runtime\x18\x02 \x01(\v2\x14.api.PipelineRuntimeR\x0fpipelineRuntime\"\xc9\x01\n" + + "\tRunMetric\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\x17\n" + + "\anode_id\x18\x02 \x01(\tR\x06nodeId\x12#\n" + + "\fnumber_value\x18\x03 \x01(\x01H\x00R\vnumberValue\x12-\n" + + "\x06format\x18\x04 \x01(\x0e2\x15.api.RunMetric.FormatR\x06format\"2\n" + + "\x06Format\x12\x0f\n" + + "\vUNSPECIFIED\x10\x00\x12\a\n" + + "\x03RAW\x10\x01\x12\x0e\n" + + "\n" + + "PERCENTAGE\x10\x02B\a\n" + + "\x05value\"Z\n" + + "\x17ReportRunMetricsRequest\x12\x15\n" + + "\x06run_id\x18\x01 \x01(\tR\x05runId\x12(\n" + + "\ametrics\x18\x02 \x03(\v2\x0e.api.RunMetricR\ametrics\"\x9e\x03\n" + + "\x18ReportRunMetricsResponse\x12M\n" + + "\aresults\x18\x01 \x03(\v23.api.ReportRunMetricsResponse.ReportRunMetricResultR\aresults\x1a\xb2\x02\n" + + "\x15ReportRunMetricResult\x12\x1f\n" + + "\vmetric_name\x18\x01 \x01(\tR\n" + + "metricName\x12$\n" + + "\x0emetric_node_id\x18\x02 \x01(\tR\fmetricNodeId\x12R\n" + + "\x06status\x18\x03 \x01(\x0e2:.api.ReportRunMetricsResponse.ReportRunMetricResult.StatusR\x06status\x12\x18\n" + + "\amessage\x18\x04 \x01(\tR\amessage\"d\n" + + "\x06Status\x12\x0f\n" + + "\vUNSPECIFIED\x10\x00\x12\x06\n" + + "\x02OK\x10\x01\x12\x14\n" + + "\x10INVALID_ARGUMENT\x10\x02\x12\x17\n" + + "\x13DUPLICATE_REPORTING\x10\x03\x12\x12\n" + + "\x0eINTERNAL_ERROR\x10\x04\"j\n" + + "\x13ReadArtifactRequest\x12\x15\n" + + "\x06run_id\x18\x01 \x01(\tR\x05runId\x12\x17\n" + + "\anode_id\x18\x02 \x01(\tR\x06nodeId\x12#\n" + + "\rartifact_name\x18\x03 \x01(\tR\fartifactName\"*\n" + + "\x14ReadArtifactResponse\x12\x12\n" + + "\x04data\x18\x01 \x01(\fR\x04data2\xc6\b\n" + + "\n" + + "RunService\x12U\n" + + "\vCreateRunV1\x12\x15.api.CreateRunRequest\x1a\x0e.api.RunDetail\"\x1f\x82\xd3\xe4\x93\x02\x19:\x03run\"\x12/apis/v1beta1/runs\x12S\n" + + "\bGetRunV1\x12\x12.api.GetRunRequest\x1a\x0e.api.RunDetail\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/apis/v1beta1/runs/{run_id}\x12U\n" + + "\n" + + "ListRunsV1\x12\x14.api.ListRunsRequest\x1a\x15.api.ListRunsResponse\"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/apis/v1beta1/runs\x12g\n" + + "\fArchiveRunV1\x12\x16.api.ArchiveRunRequest\x1a\x16.google.protobuf.Empty\"'\x82\xd3\xe4\x93\x02!\"\x1f/apis/v1beta1/runs/{id}:archive\x12m\n" + + "\x0eUnarchiveRunV1\x12\x18.api.UnarchiveRunRequest\x1a\x16.google.protobuf.Empty\")\x82\xd3\xe4\x93\x02#\"!/apis/v1beta1/runs/{id}:unarchive\x12]\n" + + "\vDeleteRunV1\x12\x15.api.DeleteRunRequest\x1a\x16.google.protobuf.Empty\"\x1f\x82\xd3\xe4\x93\x02\x19*\x17/apis/v1beta1/runs/{id}\x12\x87\x01\n" + + "\x12ReportRunMetricsV1\x12\x1c.api.ReportRunMetricsRequest\x1a\x1d.api.ReportRunMetricsResponse\"4\x82\xd3\xe4\x93\x02.:\x01*\")/apis/v1beta1/runs/{run_id}:reportMetrics\x12\x99\x01\n" + + "\x0eReadArtifactV1\x12\x18.api.ReadArtifactRequest\x1a\x19.api.ReadArtifactResponse\"R\x82\xd3\xe4\x93\x02L\x12J/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read\x12q\n" + + "\x0eTerminateRunV1\x12\x18.api.TerminateRunRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02'\"%/apis/v1beta1/runs/{run_id}/terminate\x12e\n" + + "\n" + + "RetryRunV1\x12\x14.api.RetryRunRequest\x1a\x16.google.protobuf.Empty\")\x82\xd3\xe4\x93\x02#\"!/apis/v1beta1/runs/{run_id}/retryB\x91\x01\x92AQ*\x02\x01\x02R\x1c\n" + + "\adefault\x12\x11\x12\x0f\n" + + "\r\x1a\v.api.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_run_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_run_proto_rawDescData = file_backend_api_v1beta1_run_proto_rawDesc + file_backend_api_v1beta1_run_proto_rawDescData []byte ) func file_backend_api_v1beta1_run_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_run_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_run_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_run_proto_rawDescData) + file_backend_api_v1beta1_run_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_run_proto_rawDesc), len(file_backend_api_v1beta1_run_proto_rawDesc))) }) return file_backend_api_v1beta1_run_proto_rawDescData } var file_backend_api_v1beta1_run_proto_enumTypes = make([]protoimpl.EnumInfo, 3) var file_backend_api_v1beta1_run_proto_msgTypes = make([]protoimpl.MessageInfo, 18) -var file_backend_api_v1beta1_run_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_run_proto_goTypes = []any{ (Run_StorageState)(0), // 0: api.Run.StorageState (RunMetric_Format)(0), // 1: api.RunMetric.Format (ReportRunMetricsResponse_ReportRunMetricResult_Status)(0), // 2: api.ReportRunMetricsResponse.ReportRunMetricResult.Status @@ -1705,235 +1516,16 @@ func file_backend_api_v1beta1_run_proto_init() { if File_backend_api_v1beta1_run_proto != nil { return } - file_backend_api_v1beta1_error_proto_init() file_backend_api_v1beta1_pipeline_spec_proto_init() file_backend_api_v1beta1_resource_reference_proto_init() - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_run_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListRunsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TerminateRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RetryRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListRunsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArchiveRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UnarchiveRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Run); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineRuntime); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RunDetail); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RunMetric); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReportRunMetricsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReportRunMetricsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReadArtifactRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReadArtifactResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_run_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReportRunMetricsResponse_ReportRunMetricResult); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_backend_api_v1beta1_run_proto_msgTypes[12].OneofWrappers = []interface{}{ + file_backend_api_v1beta1_run_proto_msgTypes[12].OneofWrappers = []any{ (*RunMetric_NumberValue)(nil), } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_run_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_run_proto_rawDesc), len(file_backend_api_v1beta1_run_proto_rawDesc)), NumEnums: 3, NumMessages: 18, NumExtensions: 0, @@ -1945,437 +1537,6 @@ func file_backend_api_v1beta1_run_proto_init() { MessageInfos: file_backend_api_v1beta1_run_proto_msgTypes, }.Build() File_backend_api_v1beta1_run_proto = out.File - file_backend_api_v1beta1_run_proto_rawDesc = nil file_backend_api_v1beta1_run_proto_goTypes = nil file_backend_api_v1beta1_run_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// RunServiceClient is the client API for RunService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type RunServiceClient interface { - // Creates a new run. - CreateRunV1(ctx context.Context, in *CreateRunRequest, opts ...grpc.CallOption) (*RunDetail, error) - // Finds a specific run by ID. - GetRunV1(ctx context.Context, in *GetRunRequest, opts ...grpc.CallOption) (*RunDetail, error) - // Finds all runs. - ListRunsV1(ctx context.Context, in *ListRunsRequest, opts ...grpc.CallOption) (*ListRunsResponse, error) - // Archives a run. - ArchiveRunV1(ctx context.Context, in *ArchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Restores an archived run. - UnarchiveRunV1(ctx context.Context, in *UnarchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Deletes a run. - DeleteRunV1(ctx context.Context, in *DeleteRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // ReportRunMetrics reports metrics of a run. Each metric is reported in its - // own transaction, so this API accepts partial failures. Metric can be - // uniquely identified by (run_id, node_id, name). Duplicate reporting will be - // ignored by the API. First reporting wins. - ReportRunMetricsV1(ctx context.Context, in *ReportRunMetricsRequest, opts ...grpc.CallOption) (*ReportRunMetricsResponse, error) - // Finds a run's artifact data. - ReadArtifactV1(ctx context.Context, in *ReadArtifactRequest, opts ...grpc.CallOption) (*ReadArtifactResponse, error) - // Terminates an active run. - TerminateRunV1(ctx context.Context, in *TerminateRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Re-initiates a failed or terminated run. - RetryRunV1(ctx context.Context, in *RetryRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type runServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewRunServiceClient(cc grpc.ClientConnInterface) RunServiceClient { - return &runServiceClient{cc} -} - -func (c *runServiceClient) CreateRunV1(ctx context.Context, in *CreateRunRequest, opts ...grpc.CallOption) (*RunDetail, error) { - out := new(RunDetail) - err := c.cc.Invoke(ctx, "/api.RunService/CreateRunV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) GetRunV1(ctx context.Context, in *GetRunRequest, opts ...grpc.CallOption) (*RunDetail, error) { - out := new(RunDetail) - err := c.cc.Invoke(ctx, "/api.RunService/GetRunV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) ListRunsV1(ctx context.Context, in *ListRunsRequest, opts ...grpc.CallOption) (*ListRunsResponse, error) { - out := new(ListRunsResponse) - err := c.cc.Invoke(ctx, "/api.RunService/ListRunsV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) ArchiveRunV1(ctx context.Context, in *ArchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.RunService/ArchiveRunV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) UnarchiveRunV1(ctx context.Context, in *UnarchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.RunService/UnarchiveRunV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) DeleteRunV1(ctx context.Context, in *DeleteRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.RunService/DeleteRunV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) ReportRunMetricsV1(ctx context.Context, in *ReportRunMetricsRequest, opts ...grpc.CallOption) (*ReportRunMetricsResponse, error) { - out := new(ReportRunMetricsResponse) - err := c.cc.Invoke(ctx, "/api.RunService/ReportRunMetricsV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) ReadArtifactV1(ctx context.Context, in *ReadArtifactRequest, opts ...grpc.CallOption) (*ReadArtifactResponse, error) { - out := new(ReadArtifactResponse) - err := c.cc.Invoke(ctx, "/api.RunService/ReadArtifactV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) TerminateRunV1(ctx context.Context, in *TerminateRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.RunService/TerminateRunV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) RetryRunV1(ctx context.Context, in *RetryRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/api.RunService/RetryRunV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// RunServiceServer is the server API for RunService service. -type RunServiceServer interface { - // Creates a new run. - CreateRunV1(context.Context, *CreateRunRequest) (*RunDetail, error) - // Finds a specific run by ID. - GetRunV1(context.Context, *GetRunRequest) (*RunDetail, error) - // Finds all runs. - ListRunsV1(context.Context, *ListRunsRequest) (*ListRunsResponse, error) - // Archives a run. - ArchiveRunV1(context.Context, *ArchiveRunRequest) (*emptypb.Empty, error) - // Restores an archived run. - UnarchiveRunV1(context.Context, *UnarchiveRunRequest) (*emptypb.Empty, error) - // Deletes a run. - DeleteRunV1(context.Context, *DeleteRunRequest) (*emptypb.Empty, error) - // ReportRunMetrics reports metrics of a run. Each metric is reported in its - // own transaction, so this API accepts partial failures. Metric can be - // uniquely identified by (run_id, node_id, name). Duplicate reporting will be - // ignored by the API. First reporting wins. - ReportRunMetricsV1(context.Context, *ReportRunMetricsRequest) (*ReportRunMetricsResponse, error) - // Finds a run's artifact data. - ReadArtifactV1(context.Context, *ReadArtifactRequest) (*ReadArtifactResponse, error) - // Terminates an active run. - TerminateRunV1(context.Context, *TerminateRunRequest) (*emptypb.Empty, error) - // Re-initiates a failed or terminated run. - RetryRunV1(context.Context, *RetryRunRequest) (*emptypb.Empty, error) -} - -// UnimplementedRunServiceServer can be embedded to have forward compatible implementations. -type UnimplementedRunServiceServer struct { -} - -func (*UnimplementedRunServiceServer) CreateRunV1(context.Context, *CreateRunRequest) (*RunDetail, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateRunV1 not implemented") -} -func (*UnimplementedRunServiceServer) GetRunV1(context.Context, *GetRunRequest) (*RunDetail, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetRunV1 not implemented") -} -func (*UnimplementedRunServiceServer) ListRunsV1(context.Context, *ListRunsRequest) (*ListRunsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListRunsV1 not implemented") -} -func (*UnimplementedRunServiceServer) ArchiveRunV1(context.Context, *ArchiveRunRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method ArchiveRunV1 not implemented") -} -func (*UnimplementedRunServiceServer) UnarchiveRunV1(context.Context, *UnarchiveRunRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method UnarchiveRunV1 not implemented") -} -func (*UnimplementedRunServiceServer) DeleteRunV1(context.Context, *DeleteRunRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeleteRunV1 not implemented") -} -func (*UnimplementedRunServiceServer) ReportRunMetricsV1(context.Context, *ReportRunMetricsRequest) (*ReportRunMetricsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ReportRunMetricsV1 not implemented") -} -func (*UnimplementedRunServiceServer) ReadArtifactV1(context.Context, *ReadArtifactRequest) (*ReadArtifactResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ReadArtifactV1 not implemented") -} -func (*UnimplementedRunServiceServer) TerminateRunV1(context.Context, *TerminateRunRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method TerminateRunV1 not implemented") -} -func (*UnimplementedRunServiceServer) RetryRunV1(context.Context, *RetryRunRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method RetryRunV1 not implemented") -} - -func RegisterRunServiceServer(s *grpc.Server, srv RunServiceServer) { - s.RegisterService(&_RunService_serviceDesc, srv) -} - -func _RunService_CreateRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).CreateRunV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.RunService/CreateRunV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).CreateRunV1(ctx, req.(*CreateRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_GetRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).GetRunV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.RunService/GetRunV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).GetRunV1(ctx, req.(*GetRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_ListRunsV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListRunsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).ListRunsV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.RunService/ListRunsV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).ListRunsV1(ctx, req.(*ListRunsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_ArchiveRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ArchiveRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).ArchiveRunV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.RunService/ArchiveRunV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).ArchiveRunV1(ctx, req.(*ArchiveRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_UnarchiveRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UnarchiveRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).UnarchiveRunV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.RunService/UnarchiveRunV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).UnarchiveRunV1(ctx, req.(*UnarchiveRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_DeleteRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeleteRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).DeleteRunV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.RunService/DeleteRunV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).DeleteRunV1(ctx, req.(*DeleteRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_ReportRunMetricsV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ReportRunMetricsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).ReportRunMetricsV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.RunService/ReportRunMetricsV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).ReportRunMetricsV1(ctx, req.(*ReportRunMetricsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_ReadArtifactV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ReadArtifactRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).ReadArtifactV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.RunService/ReadArtifactV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).ReadArtifactV1(ctx, req.(*ReadArtifactRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_TerminateRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(TerminateRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).TerminateRunV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.RunService/TerminateRunV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).TerminateRunV1(ctx, req.(*TerminateRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_RetryRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RetryRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).RetryRunV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.RunService/RetryRunV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).RetryRunV1(ctx, req.(*RetryRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _RunService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "api.RunService", - HandlerType: (*RunServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateRunV1", - Handler: _RunService_CreateRunV1_Handler, - }, - { - MethodName: "GetRunV1", - Handler: _RunService_GetRunV1_Handler, - }, - { - MethodName: "ListRunsV1", - Handler: _RunService_ListRunsV1_Handler, - }, - { - MethodName: "ArchiveRunV1", - Handler: _RunService_ArchiveRunV1_Handler, - }, - { - MethodName: "UnarchiveRunV1", - Handler: _RunService_UnarchiveRunV1_Handler, - }, - { - MethodName: "DeleteRunV1", - Handler: _RunService_DeleteRunV1_Handler, - }, - { - MethodName: "ReportRunMetricsV1", - Handler: _RunService_ReportRunMetricsV1_Handler, - }, - { - MethodName: "ReadArtifactV1", - Handler: _RunService_ReadArtifactV1_Handler, - }, - { - MethodName: "TerminateRunV1", - Handler: _RunService_TerminateRunV1_Handler, - }, - { - MethodName: "RetryRunV1", - Handler: _RunService_RetryRunV1_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v1beta1/run.proto", -} diff --git a/backend/api/v1beta1/go_client/run.pb.gw.go b/backend/api/v1beta1/go_client/run.pb.gw.go index da1582e11ac..a34290bd8fe 100644 --- a/backend/api/v1beta1/go_client/run.pb.gw.go +++ b/backend/api/v1beta1/go_client/run.pb.gw.go @@ -10,825 +10,648 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_RunService_CreateRunV1_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateRunRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Run); err != nil && err != io.EOF { + var ( + protoReq CreateRunRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Run); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.CreateRunV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_CreateRunV1_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateRunRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Run); err != nil && err != io.EOF { + var ( + protoReq CreateRunRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Run); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreateRunV1(ctx, &protoReq) return msg, metadata, err - } func request_RunService_GetRunV1_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - msg, err := client.GetRunV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_GetRunV1_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - msg, err := server.GetRunV1(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_RunService_ListRunsV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) +var filter_RunService_ListRunsV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} func request_RunService_ListRunsV1_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListRunsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListRunsRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_ListRunsV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListRunsV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_ListRunsV1_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListRunsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListRunsRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_ListRunsV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListRunsV1(ctx, &protoReq) return msg, metadata, err - } func request_RunService_ArchiveRunV1_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ArchiveRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ArchiveRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.ArchiveRunV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_ArchiveRunV1_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ArchiveRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ArchiveRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.ArchiveRunV1(ctx, &protoReq) return msg, metadata, err - } func request_RunService_UnarchiveRunV1_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq UnarchiveRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq UnarchiveRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.UnarchiveRunV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_UnarchiveRunV1_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq UnarchiveRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq UnarchiveRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.UnarchiveRunV1(ctx, &protoReq) return msg, metadata, err - } func request_RunService_DeleteRunV1_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := client.DeleteRunV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_DeleteRunV1_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["id"] + val, ok := pathParams["id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") } - protoReq.Id, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } - msg, err := server.DeleteRunV1(ctx, &protoReq) return msg, metadata, err - } func request_RunService_ReportRunMetricsV1_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReportRunMetricsRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - var ( - val string - ok bool - err error - _ = err + protoReq ReportRunMetricsRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - msg, err := client.ReportRunMetricsV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_ReportRunMetricsV1_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReportRunMetricsRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - var ( - val string - ok bool - err error - _ = err + protoReq ReportRunMetricsRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - msg, err := server.ReportRunMetricsV1(ctx, &protoReq) return msg, metadata, err - } func request_RunService_ReadArtifactV1_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReadArtifactRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ReadArtifactRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - val, ok = pathParams["node_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_id") } - protoReq.NodeId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_id", err) } - val, ok = pathParams["artifact_name"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_name") } - protoReq.ArtifactName, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_name", err) } - msg, err := client.ReadArtifactV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_ReadArtifactV1_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReadArtifactRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ReadArtifactRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - val, ok = pathParams["node_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_id") } - protoReq.NodeId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_id", err) } - val, ok = pathParams["artifact_name"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_name") } - protoReq.ArtifactName, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_name", err) } - msg, err := server.ReadArtifactV1(ctx, &protoReq) return msg, metadata, err - } func request_RunService_TerminateRunV1_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq TerminateRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq TerminateRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - msg, err := client.TerminateRunV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_TerminateRunV1_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq TerminateRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq TerminateRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - msg, err := server.TerminateRunV1(ctx, &protoReq) return msg, metadata, err - } func request_RunService_RetryRunV1_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq RetryRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq RetryRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - msg, err := client.RetryRunV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_RetryRunV1_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq RetryRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq RetryRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - msg, err := server.RetryRunV1(ctx, &protoReq) return msg, metadata, err - } // RegisterRunServiceHandlerServer registers the http handlers for service RunService to "mux". // UnaryRPC :call RunServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterRunServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterRunServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server RunServiceServer) error { - - mux.Handle("POST", pattern_RunService_CreateRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_CreateRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.RunService/CreateRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_CreateRunV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_CreateRunV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_CreateRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_CreateRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_GetRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_GetRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.RunService/GetRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{run_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_GetRunV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_GetRunV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_GetRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_GetRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_ListRunsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_ListRunsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.RunService/ListRunsV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_ListRunsV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_ListRunsV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ListRunsV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ListRunsV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_ArchiveRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_ArchiveRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.RunService/ArchiveRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{id}:archive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_ArchiveRunV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_ArchiveRunV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ArchiveRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ArchiveRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_UnarchiveRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_UnarchiveRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.RunService/UnarchiveRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{id}:unarchive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_UnarchiveRunV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_UnarchiveRunV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_UnarchiveRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_UnarchiveRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_RunService_DeleteRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_RunService_DeleteRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.RunService/DeleteRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_DeleteRunV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_DeleteRunV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_DeleteRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_DeleteRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_ReportRunMetricsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_ReportRunMetricsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.RunService/ReportRunMetricsV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{run_id}:reportMetrics")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_ReportRunMetricsV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_ReportRunMetricsV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ReportRunMetricsV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ReportRunMetricsV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_ReadArtifactV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_ReadArtifactV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.RunService/ReadArtifactV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_ReadArtifactV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_ReadArtifactV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ReadArtifactV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ReadArtifactV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_TerminateRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_TerminateRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.RunService/TerminateRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{run_id}/terminate")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_TerminateRunV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_TerminateRunV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_TerminateRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_TerminateRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_RetryRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_RetryRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.RunService/RetryRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{run_id}/retry")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_RetryRunV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_RetryRunV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_RetryRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_RetryRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -837,25 +660,24 @@ func RegisterRunServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, // RegisterRunServiceHandlerFromEndpoint is same as RegisterRunServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterRunServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterRunServiceHandler(ctx, mux, conn) } @@ -869,252 +691,203 @@ func RegisterRunServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "RunServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "RunServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "RunServiceClient" to call the correct interceptors. +// "RunServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterRunServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client RunServiceClient) error { - - mux.Handle("POST", pattern_RunService_CreateRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_CreateRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.RunService/CreateRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_CreateRunV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_CreateRunV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_CreateRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_CreateRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_GetRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_GetRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.RunService/GetRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{run_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_GetRunV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_GetRunV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_GetRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_GetRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_ListRunsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_ListRunsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.RunService/ListRunsV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_ListRunsV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_ListRunsV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ListRunsV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ListRunsV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_ArchiveRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_ArchiveRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.RunService/ArchiveRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{id}:archive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_ArchiveRunV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_ArchiveRunV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ArchiveRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ArchiveRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_UnarchiveRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_UnarchiveRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.RunService/UnarchiveRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{id}:unarchive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_UnarchiveRunV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_UnarchiveRunV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_UnarchiveRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_UnarchiveRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_RunService_DeleteRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_RunService_DeleteRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.RunService/DeleteRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_DeleteRunV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_DeleteRunV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_DeleteRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_DeleteRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_ReportRunMetricsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_ReportRunMetricsV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.RunService/ReportRunMetricsV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{run_id}:reportMetrics")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_ReportRunMetricsV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_ReportRunMetricsV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ReportRunMetricsV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ReportRunMetricsV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_ReadArtifactV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_ReadArtifactV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.RunService/ReadArtifactV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_ReadArtifactV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_ReadArtifactV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ReadArtifactV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ReadArtifactV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_TerminateRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_TerminateRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.RunService/TerminateRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{run_id}/terminate")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_TerminateRunV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_TerminateRunV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_TerminateRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_TerminateRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_RetryRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_RetryRunV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.RunService/RetryRunV1", runtime.WithHTTPPathPattern("/apis/v1beta1/runs/{run_id}/retry")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_RetryRunV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_RetryRunV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_RetryRunV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_RetryRunV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_RunService_CreateRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "runs"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_GetRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "runs", "run_id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_ListRunsV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "runs"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_ArchiveRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "runs", "id"}, "archive", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_UnarchiveRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "runs", "id"}, "unarchive", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_DeleteRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "runs", "id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_ReportRunMetricsV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "runs", "run_id"}, "reportMetrics", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_ReadArtifactV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5, 2, 6, 1, 0, 4, 1, 5, 7}, []string{"apis", "v1beta1", "runs", "run_id", "nodes", "node_id", "artifacts", "artifact_name"}, "read", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_TerminateRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "runs", "run_id", "terminate"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_RetryRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "runs", "run_id", "retry"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_RunService_CreateRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "runs"}, "")) + pattern_RunService_GetRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "runs", "run_id"}, "")) + pattern_RunService_ListRunsV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "runs"}, "")) + pattern_RunService_ArchiveRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "runs", "id"}, "archive")) + pattern_RunService_UnarchiveRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "runs", "id"}, "unarchive")) + pattern_RunService_DeleteRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "runs", "id"}, "")) + pattern_RunService_ReportRunMetricsV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "runs", "run_id"}, "reportMetrics")) + pattern_RunService_ReadArtifactV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5, 2, 6, 1, 0, 4, 1, 5, 7}, []string{"apis", "v1beta1", "runs", "run_id", "nodes", "node_id", "artifacts", "artifact_name"}, "read")) + pattern_RunService_TerminateRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "runs", "run_id", "terminate"}, "")) + pattern_RunService_RetryRunV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "runs", "run_id", "retry"}, "")) ) var ( - forward_RunService_CreateRunV1_0 = runtime.ForwardResponseMessage - - forward_RunService_GetRunV1_0 = runtime.ForwardResponseMessage - - forward_RunService_ListRunsV1_0 = runtime.ForwardResponseMessage - - forward_RunService_ArchiveRunV1_0 = runtime.ForwardResponseMessage - - forward_RunService_UnarchiveRunV1_0 = runtime.ForwardResponseMessage - - forward_RunService_DeleteRunV1_0 = runtime.ForwardResponseMessage - + forward_RunService_CreateRunV1_0 = runtime.ForwardResponseMessage + forward_RunService_GetRunV1_0 = runtime.ForwardResponseMessage + forward_RunService_ListRunsV1_0 = runtime.ForwardResponseMessage + forward_RunService_ArchiveRunV1_0 = runtime.ForwardResponseMessage + forward_RunService_UnarchiveRunV1_0 = runtime.ForwardResponseMessage + forward_RunService_DeleteRunV1_0 = runtime.ForwardResponseMessage forward_RunService_ReportRunMetricsV1_0 = runtime.ForwardResponseMessage - - forward_RunService_ReadArtifactV1_0 = runtime.ForwardResponseMessage - - forward_RunService_TerminateRunV1_0 = runtime.ForwardResponseMessage - - forward_RunService_RetryRunV1_0 = runtime.ForwardResponseMessage + forward_RunService_ReadArtifactV1_0 = runtime.ForwardResponseMessage + forward_RunService_TerminateRunV1_0 = runtime.ForwardResponseMessage + forward_RunService_RetryRunV1_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v1beta1/go_client/run_grpc.pb.go b/backend/api/v1beta1/go_client/run_grpc.pb.go new file mode 100644 index 00000000000..c5d34e562ec --- /dev/null +++ b/backend/api/v1beta1/go_client/run_grpc.pb.go @@ -0,0 +1,504 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v1beta1/run.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + RunService_CreateRunV1_FullMethodName = "/api.RunService/CreateRunV1" + RunService_GetRunV1_FullMethodName = "/api.RunService/GetRunV1" + RunService_ListRunsV1_FullMethodName = "/api.RunService/ListRunsV1" + RunService_ArchiveRunV1_FullMethodName = "/api.RunService/ArchiveRunV1" + RunService_UnarchiveRunV1_FullMethodName = "/api.RunService/UnarchiveRunV1" + RunService_DeleteRunV1_FullMethodName = "/api.RunService/DeleteRunV1" + RunService_ReportRunMetricsV1_FullMethodName = "/api.RunService/ReportRunMetricsV1" + RunService_ReadArtifactV1_FullMethodName = "/api.RunService/ReadArtifactV1" + RunService_TerminateRunV1_FullMethodName = "/api.RunService/TerminateRunV1" + RunService_RetryRunV1_FullMethodName = "/api.RunService/RetryRunV1" +) + +// RunServiceClient is the client API for RunService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type RunServiceClient interface { + // Creates a new run. + CreateRunV1(ctx context.Context, in *CreateRunRequest, opts ...grpc.CallOption) (*RunDetail, error) + // Finds a specific run by ID. + GetRunV1(ctx context.Context, in *GetRunRequest, opts ...grpc.CallOption) (*RunDetail, error) + // Finds all runs. + ListRunsV1(ctx context.Context, in *ListRunsRequest, opts ...grpc.CallOption) (*ListRunsResponse, error) + // Archives a run. + ArchiveRunV1(ctx context.Context, in *ArchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Restores an archived run. + UnarchiveRunV1(ctx context.Context, in *UnarchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Deletes a run. + DeleteRunV1(ctx context.Context, in *DeleteRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // ReportRunMetrics reports metrics of a run. Each metric is reported in its + // own transaction, so this API accepts partial failures. Metric can be + // uniquely identified by (run_id, node_id, name). Duplicate reporting will be + // ignored by the API. First reporting wins. + ReportRunMetricsV1(ctx context.Context, in *ReportRunMetricsRequest, opts ...grpc.CallOption) (*ReportRunMetricsResponse, error) + // Finds a run's artifact data. + ReadArtifactV1(ctx context.Context, in *ReadArtifactRequest, opts ...grpc.CallOption) (*ReadArtifactResponse, error) + // Terminates an active run. + TerminateRunV1(ctx context.Context, in *TerminateRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Re-initiates a failed or terminated run. + RetryRunV1(ctx context.Context, in *RetryRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type runServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewRunServiceClient(cc grpc.ClientConnInterface) RunServiceClient { + return &runServiceClient{cc} +} + +func (c *runServiceClient) CreateRunV1(ctx context.Context, in *CreateRunRequest, opts ...grpc.CallOption) (*RunDetail, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(RunDetail) + err := c.cc.Invoke(ctx, RunService_CreateRunV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) GetRunV1(ctx context.Context, in *GetRunRequest, opts ...grpc.CallOption) (*RunDetail, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(RunDetail) + err := c.cc.Invoke(ctx, RunService_GetRunV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) ListRunsV1(ctx context.Context, in *ListRunsRequest, opts ...grpc.CallOption) (*ListRunsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListRunsResponse) + err := c.cc.Invoke(ctx, RunService_ListRunsV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) ArchiveRunV1(ctx context.Context, in *ArchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RunService_ArchiveRunV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) UnarchiveRunV1(ctx context.Context, in *UnarchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RunService_UnarchiveRunV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) DeleteRunV1(ctx context.Context, in *DeleteRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RunService_DeleteRunV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) ReportRunMetricsV1(ctx context.Context, in *ReportRunMetricsRequest, opts ...grpc.CallOption) (*ReportRunMetricsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ReportRunMetricsResponse) + err := c.cc.Invoke(ctx, RunService_ReportRunMetricsV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) ReadArtifactV1(ctx context.Context, in *ReadArtifactRequest, opts ...grpc.CallOption) (*ReadArtifactResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ReadArtifactResponse) + err := c.cc.Invoke(ctx, RunService_ReadArtifactV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) TerminateRunV1(ctx context.Context, in *TerminateRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RunService_TerminateRunV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) RetryRunV1(ctx context.Context, in *RetryRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RunService_RetryRunV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// RunServiceServer is the server API for RunService service. +// All implementations must embed UnimplementedRunServiceServer +// for forward compatibility. +type RunServiceServer interface { + // Creates a new run. + CreateRunV1(context.Context, *CreateRunRequest) (*RunDetail, error) + // Finds a specific run by ID. + GetRunV1(context.Context, *GetRunRequest) (*RunDetail, error) + // Finds all runs. + ListRunsV1(context.Context, *ListRunsRequest) (*ListRunsResponse, error) + // Archives a run. + ArchiveRunV1(context.Context, *ArchiveRunRequest) (*emptypb.Empty, error) + // Restores an archived run. + UnarchiveRunV1(context.Context, *UnarchiveRunRequest) (*emptypb.Empty, error) + // Deletes a run. + DeleteRunV1(context.Context, *DeleteRunRequest) (*emptypb.Empty, error) + // ReportRunMetrics reports metrics of a run. Each metric is reported in its + // own transaction, so this API accepts partial failures. Metric can be + // uniquely identified by (run_id, node_id, name). Duplicate reporting will be + // ignored by the API. First reporting wins. + ReportRunMetricsV1(context.Context, *ReportRunMetricsRequest) (*ReportRunMetricsResponse, error) + // Finds a run's artifact data. + ReadArtifactV1(context.Context, *ReadArtifactRequest) (*ReadArtifactResponse, error) + // Terminates an active run. + TerminateRunV1(context.Context, *TerminateRunRequest) (*emptypb.Empty, error) + // Re-initiates a failed or terminated run. + RetryRunV1(context.Context, *RetryRunRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedRunServiceServer() +} + +// UnimplementedRunServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedRunServiceServer struct{} + +func (UnimplementedRunServiceServer) CreateRunV1(context.Context, *CreateRunRequest) (*RunDetail, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateRunV1 not implemented") +} +func (UnimplementedRunServiceServer) GetRunV1(context.Context, *GetRunRequest) (*RunDetail, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetRunV1 not implemented") +} +func (UnimplementedRunServiceServer) ListRunsV1(context.Context, *ListRunsRequest) (*ListRunsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListRunsV1 not implemented") +} +func (UnimplementedRunServiceServer) ArchiveRunV1(context.Context, *ArchiveRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method ArchiveRunV1 not implemented") +} +func (UnimplementedRunServiceServer) UnarchiveRunV1(context.Context, *UnarchiveRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method UnarchiveRunV1 not implemented") +} +func (UnimplementedRunServiceServer) DeleteRunV1(context.Context, *DeleteRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteRunV1 not implemented") +} +func (UnimplementedRunServiceServer) ReportRunMetricsV1(context.Context, *ReportRunMetricsRequest) (*ReportRunMetricsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ReportRunMetricsV1 not implemented") +} +func (UnimplementedRunServiceServer) ReadArtifactV1(context.Context, *ReadArtifactRequest) (*ReadArtifactResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ReadArtifactV1 not implemented") +} +func (UnimplementedRunServiceServer) TerminateRunV1(context.Context, *TerminateRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method TerminateRunV1 not implemented") +} +func (UnimplementedRunServiceServer) RetryRunV1(context.Context, *RetryRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method RetryRunV1 not implemented") +} +func (UnimplementedRunServiceServer) mustEmbedUnimplementedRunServiceServer() {} +func (UnimplementedRunServiceServer) testEmbeddedByValue() {} + +// UnsafeRunServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to RunServiceServer will +// result in compilation errors. +type UnsafeRunServiceServer interface { + mustEmbedUnimplementedRunServiceServer() +} + +func RegisterRunServiceServer(s grpc.ServiceRegistrar, srv RunServiceServer) { + // If the following call pancis, it indicates UnimplementedRunServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&RunService_ServiceDesc, srv) +} + +func _RunService_CreateRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).CreateRunV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_CreateRunV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).CreateRunV1(ctx, req.(*CreateRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_GetRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).GetRunV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_GetRunV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).GetRunV1(ctx, req.(*GetRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_ListRunsV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListRunsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).ListRunsV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_ListRunsV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).ListRunsV1(ctx, req.(*ListRunsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_ArchiveRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ArchiveRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).ArchiveRunV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_ArchiveRunV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).ArchiveRunV1(ctx, req.(*ArchiveRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_UnarchiveRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UnarchiveRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).UnarchiveRunV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_UnarchiveRunV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).UnarchiveRunV1(ctx, req.(*UnarchiveRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_DeleteRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).DeleteRunV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_DeleteRunV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).DeleteRunV1(ctx, req.(*DeleteRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_ReportRunMetricsV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReportRunMetricsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).ReportRunMetricsV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_ReportRunMetricsV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).ReportRunMetricsV1(ctx, req.(*ReportRunMetricsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_ReadArtifactV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReadArtifactRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).ReadArtifactV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_ReadArtifactV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).ReadArtifactV1(ctx, req.(*ReadArtifactRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_TerminateRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TerminateRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).TerminateRunV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_TerminateRunV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).TerminateRunV1(ctx, req.(*TerminateRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_RetryRunV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RetryRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).RetryRunV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_RetryRunV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).RetryRunV1(ctx, req.(*RetryRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// RunService_ServiceDesc is the grpc.ServiceDesc for RunService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var RunService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.RunService", + HandlerType: (*RunServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateRunV1", + Handler: _RunService_CreateRunV1_Handler, + }, + { + MethodName: "GetRunV1", + Handler: _RunService_GetRunV1_Handler, + }, + { + MethodName: "ListRunsV1", + Handler: _RunService_ListRunsV1_Handler, + }, + { + MethodName: "ArchiveRunV1", + Handler: _RunService_ArchiveRunV1_Handler, + }, + { + MethodName: "UnarchiveRunV1", + Handler: _RunService_UnarchiveRunV1_Handler, + }, + { + MethodName: "DeleteRunV1", + Handler: _RunService_DeleteRunV1_Handler, + }, + { + MethodName: "ReportRunMetricsV1", + Handler: _RunService_ReportRunMetricsV1_Handler, + }, + { + MethodName: "ReadArtifactV1", + Handler: _RunService_ReadArtifactV1_Handler, + }, + { + MethodName: "TerminateRunV1", + Handler: _RunService_TerminateRunV1_Handler, + }, + { + MethodName: "RetryRunV1", + Handler: _RunService_RetryRunV1_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v1beta1/run.proto", +} diff --git a/backend/api/v1beta1/go_client/task.pb.go b/backend/api/v1beta1/go_client/task.pb.go index fd2a4207c97..29b3c4c7845 100644 --- a/backend/api/v1beta1/go_client/task.pb.go +++ b/backend/api/v1beta1/go_client/task.pb.go @@ -14,23 +14,20 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/task.proto package go_client import ( - context "context" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -41,10 +38,7 @@ const ( ) type Task struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. Unique task ID. Generated by API server. Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` // Optional input field. The Namespace to which this pipeline task belongs. @@ -63,16 +57,16 @@ type Task struct { // Optional input field. The time this task is finished. FinishedAt *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=finished_at,json=finishedAt,proto3" json:"finished_at,omitempty"` // Required input field. - Fingerprint string `protobuf:"bytes,8,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` + Fingerprint string `protobuf:"bytes,8,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Task) Reset() { *x = Task{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_task_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_task_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Task) String() string { @@ -83,7 +77,7 @@ func (*Task) ProtoMessage() {} func (x *Task) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_task_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -155,20 +149,17 @@ func (x *Task) GetFingerprint() string { } type CreateTaskRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Task *Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` unknownFields protoimpl.UnknownFields - - Task *Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` + sizeCache protoimpl.SizeCache } func (x *CreateTaskRequest) Reset() { *x = CreateTaskRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_task_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_task_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreateTaskRequest) String() string { @@ -179,7 +170,7 @@ func (*CreateTaskRequest) ProtoMessage() {} func (x *CreateTaskRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_task_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -202,10 +193,7 @@ func (x *CreateTaskRequest) GetTask() *Task { } type ListTasksRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A page token to request the next page of results. The token is acquried // from the nextPageToken field of the response from the previous // ListExperiment call or can be omitted when fetching the first page. @@ -223,16 +211,16 @@ type ListTasksRequest struct { ResourceReferenceKey *ResourceKey `protobuf:"bytes,4,opt,name=resource_reference_key,json=resourceReferenceKey,proto3" json:"resource_reference_key,omitempty"` // A url-encoded, JSON-serialized Filter protocol buffer (see // [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). - Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListTasksRequest) Reset() { *x = ListTasksRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_task_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_task_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListTasksRequest) String() string { @@ -243,7 +231,7 @@ func (*ListTasksRequest) ProtoMessage() {} func (x *ListTasksRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_task_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -294,25 +282,22 @@ func (x *ListTasksRequest) GetFilter() string { } type ListTasksResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A list of tasks returned. Tasks []*Task `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` // The token to list the next page of experiments. NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` // The total number of experiments for the given query. - TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListTasksResponse) Reset() { *x = ListTasksResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_task_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_task_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListTasksResponse) String() string { @@ -323,7 +308,7 @@ func (*ListTasksResponse) ProtoMessage() {} func (x *ListTasksResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_task_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -361,92 +346,52 @@ func (x *ListTasksResponse) GetTotalSize() int32 { var File_backend_api_v1beta1_task_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_task_proto_rawDesc = []byte{ - 0x0a, 0x1e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x74, 0x61, 0x73, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, - 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, - 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x22, 0xb2, 0x02, 0x0a, 0x04, 0x54, 0x61, 0x73, 0x6b, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x6e, - 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x22, 0x0a, 0x0c, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, - 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x75, - 0x6e, 0x49, 0x64, 0x12, 0x28, 0x0a, 0x0f, 0x6d, 0x6c, 0x6d, 0x64, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x44, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x6d, 0x6c, - 0x6d, 0x64, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x44, 0x12, 0x39, 0x0a, - 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x3b, 0x0a, 0x0b, 0x66, 0x69, 0x6e, 0x69, - 0x73, 0x68, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x66, 0x69, 0x6e, 0x69, 0x73, - 0x68, 0x65, 0x64, 0x41, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x66, 0x69, 0x6e, 0x67, 0x65, 0x72, 0x70, - 0x72, 0x69, 0x6e, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x69, 0x6e, 0x67, - 0x65, 0x72, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x22, 0x32, 0x0a, 0x11, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x04, - 0x74, 0x61, 0x73, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x09, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x04, 0x74, 0x61, 0x73, 0x6b, 0x22, 0xc7, 0x01, 0x0a, 0x10, - 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, - 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x17, 0x0a, 0x07, - 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x62, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, - 0x6f, 0x72, 0x74, 0x42, 0x79, 0x12, 0x46, 0x0a, 0x16, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x16, 0x0a, - 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, - 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x7b, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x73, - 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x05, 0x74, 0x61, - 0x73, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x09, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x54, 0x61, 0x73, 0x6b, 0x52, 0x05, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, - 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, - 0x6b, 0x65, 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x73, 0x69, 0x7a, - 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x53, 0x69, - 0x7a, 0x65, 0x32, 0xc0, 0x01, 0x0a, 0x0b, 0x54, 0x61, 0x73, 0x6b, 0x53, 0x65, 0x72, 0x76, 0x69, - 0x63, 0x65, 0x12, 0x55, 0x0a, 0x0c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, - 0x56, 0x31, 0x12, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, - 0x61, 0x73, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x09, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x22, 0x22, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1c, 0x3a, 0x04, 0x74, - 0x61, 0x73, 0x6b, 0x22, 0x14, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, - 0x68, 0x61, 0x31, 0x2f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x5a, 0x0a, 0x0b, 0x4c, 0x69, 0x73, - 0x74, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x56, 0x31, 0x12, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, - 0x69, 0x73, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1c, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x16, 0x12, - 0x14, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, - 0x74, 0x61, 0x73, 0x6b, 0x73, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, - 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, - 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_task_proto_rawDesc = "" + + "\n" + + "\x1ebackend/api/v1beta1/task.proto\x12\x03api\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a,backend/api/v1beta1/resource_reference.proto\"\xb2\x02\n" + + "\x04Task\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12\x1c\n" + + "\tnamespace\x18\x02 \x01(\tR\tnamespace\x12\"\n" + + "\fpipelineName\x18\x03 \x01(\tR\fpipelineName\x12\x14\n" + + "\x05runId\x18\x04 \x01(\tR\x05runId\x12(\n" + + "\x0fmlmdExecutionID\x18\x05 \x01(\tR\x0fmlmdExecutionID\x129\n" + + "\n" + + "created_at\x18\x06 \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12;\n" + + "\vfinished_at\x18\a \x01(\v2\x1a.google.protobuf.TimestampR\n" + + "finishedAt\x12 \n" + + "\vfingerprint\x18\b \x01(\tR\vfingerprint\"2\n" + + "\x11CreateTaskRequest\x12\x1d\n" + + "\x04task\x18\x01 \x01(\v2\t.api.TaskR\x04task\"\xc7\x01\n" + + "\x10ListTasksRequest\x12\x1d\n" + + "\n" + + "page_token\x18\x01 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x02 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x03 \x01(\tR\x06sortBy\x12F\n" + + "\x16resource_reference_key\x18\x04 \x01(\v2\x10.api.ResourceKeyR\x14resourceReferenceKey\x12\x16\n" + + "\x06filter\x18\x05 \x01(\tR\x06filter\"{\n" + + "\x11ListTasksResponse\x12\x1f\n" + + "\x05tasks\x18\x01 \x03(\v2\t.api.TaskR\x05tasks\x12&\n" + + "\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\x12\x1d\n" + + "\n" + + "total_size\x18\x03 \x01(\x05R\ttotalSize2\xc0\x01\n" + + "\vTaskService\x12U\n" + + "\fCreateTaskV1\x12\x16.api.CreateTaskRequest\x1a\t.api.Task\"\"\x82\xd3\xe4\x93\x02\x1c:\x04task\"\x14/apis/v1alpha1/tasks\x12Z\n" + + "\vListTasksV1\x12\x15.api.ListTasksRequest\x1a\x16.api.ListTasksResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/apis/v1alpha1/tasksB=Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_task_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_task_proto_rawDescData = file_backend_api_v1beta1_task_proto_rawDesc + file_backend_api_v1beta1_task_proto_rawDescData []byte ) func file_backend_api_v1beta1_task_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_task_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_task_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_task_proto_rawDescData) + file_backend_api_v1beta1_task_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_task_proto_rawDesc), len(file_backend_api_v1beta1_task_proto_rawDesc))) }) return file_backend_api_v1beta1_task_proto_rawDescData } var file_backend_api_v1beta1_task_proto_msgTypes = make([]protoimpl.MessageInfo, 4) -var file_backend_api_v1beta1_task_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_task_proto_goTypes = []any{ (*Task)(nil), // 0: api.Task (*CreateTaskRequest)(nil), // 1: api.CreateTaskRequest (*ListTasksRequest)(nil), // 2: api.ListTasksRequest @@ -477,61 +422,11 @@ func file_backend_api_v1beta1_task_proto_init() { return } file_backend_api_v1beta1_resource_reference_proto_init() - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_task_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Task); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_task_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateTaskRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_task_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListTasksRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_task_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListTasksResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_task_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_task_proto_rawDesc), len(file_backend_api_v1beta1_task_proto_rawDesc)), NumEnums: 0, NumMessages: 4, NumExtensions: 0, @@ -542,127 +437,6 @@ func file_backend_api_v1beta1_task_proto_init() { MessageInfos: file_backend_api_v1beta1_task_proto_msgTypes, }.Build() File_backend_api_v1beta1_task_proto = out.File - file_backend_api_v1beta1_task_proto_rawDesc = nil file_backend_api_v1beta1_task_proto_goTypes = nil file_backend_api_v1beta1_task_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// TaskServiceClient is the client API for TaskService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type TaskServiceClient interface { - // Creates a new task. - CreateTaskV1(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*Task, error) - // Finds all tasks. Supports pagination, and sorting on certain fields. - ListTasksV1(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) -} - -type taskServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewTaskServiceClient(cc grpc.ClientConnInterface) TaskServiceClient { - return &taskServiceClient{cc} -} - -func (c *taskServiceClient) CreateTaskV1(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*Task, error) { - out := new(Task) - err := c.cc.Invoke(ctx, "/api.TaskService/CreateTaskV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) ListTasksV1(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) { - out := new(ListTasksResponse) - err := c.cc.Invoke(ctx, "/api.TaskService/ListTasksV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// TaskServiceServer is the server API for TaskService service. -type TaskServiceServer interface { - // Creates a new task. - CreateTaskV1(context.Context, *CreateTaskRequest) (*Task, error) - // Finds all tasks. Supports pagination, and sorting on certain fields. - ListTasksV1(context.Context, *ListTasksRequest) (*ListTasksResponse, error) -} - -// UnimplementedTaskServiceServer can be embedded to have forward compatible implementations. -type UnimplementedTaskServiceServer struct { -} - -func (*UnimplementedTaskServiceServer) CreateTaskV1(context.Context, *CreateTaskRequest) (*Task, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateTaskV1 not implemented") -} -func (*UnimplementedTaskServiceServer) ListTasksV1(context.Context, *ListTasksRequest) (*ListTasksResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListTasksV1 not implemented") -} - -func RegisterTaskServiceServer(s *grpc.Server, srv TaskServiceServer) { - s.RegisterService(&_TaskService_serviceDesc, srv) -} - -func _TaskService_CreateTaskV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).CreateTaskV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.TaskService/CreateTaskV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).CreateTaskV1(ctx, req.(*CreateTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_ListTasksV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListTasksRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).ListTasksV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.TaskService/ListTasksV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).ListTasksV1(ctx, req.(*ListTasksRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _TaskService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "api.TaskService", - HandlerType: (*TaskServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateTaskV1", - Handler: _TaskService_CreateTaskV1_Handler, - }, - { - MethodName: "ListTasksV1", - Handler: _TaskService_ListTasksV1_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v1beta1/task.proto", -} diff --git a/backend/api/v1beta1/go_client/task.pb.gw.go b/backend/api/v1beta1/go_client/task.pb.gw.go index dafd412bcab..1d7448888f8 100644 --- a/backend/api/v1beta1/go_client/task.pb.gw.go +++ b/backend/api/v1beta1/go_client/task.pb.gw.go @@ -10,149 +10,138 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_TaskService_CreateTaskV1_0(ctx context.Context, marshaler runtime.Marshaler, client TaskServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateTaskRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Task); err != nil && err != io.EOF { + var ( + protoReq CreateTaskRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Task); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.CreateTaskV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_TaskService_CreateTaskV1_0(ctx context.Context, marshaler runtime.Marshaler, server TaskServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateTaskRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Task); err != nil && err != io.EOF { + var ( + protoReq CreateTaskRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Task); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreateTaskV1(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_TaskService_ListTasksV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) +var filter_TaskService_ListTasksV1_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} func request_TaskService_ListTasksV1_0(ctx context.Context, marshaler runtime.Marshaler, client TaskServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListTasksRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListTasksRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_TaskService_ListTasksV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListTasksV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_TaskService_ListTasksV1_0(ctx context.Context, marshaler runtime.Marshaler, server TaskServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListTasksRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListTasksRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_TaskService_ListTasksV1_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListTasksV1(ctx, &protoReq) return msg, metadata, err - } // RegisterTaskServiceHandlerServer registers the http handlers for service TaskService to "mux". // UnaryRPC :call TaskServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterTaskServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterTaskServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server TaskServiceServer) error { - - mux.Handle("POST", pattern_TaskService_CreateTaskV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_TaskService_CreateTaskV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.TaskService/CreateTaskV1", runtime.WithHTTPPathPattern("/apis/v1alpha1/tasks")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_TaskService_CreateTaskV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_TaskService_CreateTaskV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_TaskService_CreateTaskV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_TaskService_CreateTaskV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_TaskService_ListTasksV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_TaskService_ListTasksV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.TaskService/ListTasksV1", runtime.WithHTTPPathPattern("/apis/v1alpha1/tasks")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_TaskService_ListTasksV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_TaskService_ListTasksV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_TaskService_ListTasksV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_TaskService_ListTasksV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -161,25 +150,24 @@ func RegisterTaskServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux // RegisterTaskServiceHandlerFromEndpoint is same as RegisterTaskServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterTaskServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterTaskServiceHandler(ctx, mux, conn) } @@ -193,60 +181,51 @@ func RegisterTaskServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "TaskServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "TaskServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "TaskServiceClient" to call the correct interceptors. +// "TaskServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterTaskServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client TaskServiceClient) error { - - mux.Handle("POST", pattern_TaskService_CreateTaskV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_TaskService_CreateTaskV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.TaskService/CreateTaskV1", runtime.WithHTTPPathPattern("/apis/v1alpha1/tasks")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_TaskService_CreateTaskV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_TaskService_CreateTaskV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_TaskService_CreateTaskV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_TaskService_CreateTaskV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_TaskService_ListTasksV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_TaskService_ListTasksV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.TaskService/ListTasksV1", runtime.WithHTTPPathPattern("/apis/v1alpha1/tasks")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_TaskService_ListTasksV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_TaskService_ListTasksV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_TaskService_ListTasksV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_TaskService_ListTasksV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_TaskService_CreateTaskV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1alpha1", "tasks"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_TaskService_ListTasksV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1alpha1", "tasks"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_TaskService_CreateTaskV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1alpha1", "tasks"}, "")) + pattern_TaskService_ListTasksV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1alpha1", "tasks"}, "")) ) var ( forward_TaskService_CreateTaskV1_0 = runtime.ForwardResponseMessage - - forward_TaskService_ListTasksV1_0 = runtime.ForwardResponseMessage + forward_TaskService_ListTasksV1_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v1beta1/go_client/task_grpc.pb.go b/backend/api/v1beta1/go_client/task_grpc.pb.go new file mode 100644 index 00000000000..bc7bb23cfbb --- /dev/null +++ b/backend/api/v1beta1/go_client/task_grpc.pb.go @@ -0,0 +1,177 @@ +// Copyright 2021 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v1beta1/task.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + TaskService_CreateTaskV1_FullMethodName = "/api.TaskService/CreateTaskV1" + TaskService_ListTasksV1_FullMethodName = "/api.TaskService/ListTasksV1" +) + +// TaskServiceClient is the client API for TaskService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type TaskServiceClient interface { + // Creates a new task. + CreateTaskV1(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*Task, error) + // Finds all tasks. Supports pagination, and sorting on certain fields. + ListTasksV1(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) +} + +type taskServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewTaskServiceClient(cc grpc.ClientConnInterface) TaskServiceClient { + return &taskServiceClient{cc} +} + +func (c *taskServiceClient) CreateTaskV1(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*Task, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Task) + err := c.cc.Invoke(ctx, TaskService_CreateTaskV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) ListTasksV1(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListTasksResponse) + err := c.cc.Invoke(ctx, TaskService_ListTasksV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TaskServiceServer is the server API for TaskService service. +// All implementations must embed UnimplementedTaskServiceServer +// for forward compatibility. +type TaskServiceServer interface { + // Creates a new task. + CreateTaskV1(context.Context, *CreateTaskRequest) (*Task, error) + // Finds all tasks. Supports pagination, and sorting on certain fields. + ListTasksV1(context.Context, *ListTasksRequest) (*ListTasksResponse, error) + mustEmbedUnimplementedTaskServiceServer() +} + +// UnimplementedTaskServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedTaskServiceServer struct{} + +func (UnimplementedTaskServiceServer) CreateTaskV1(context.Context, *CreateTaskRequest) (*Task, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateTaskV1 not implemented") +} +func (UnimplementedTaskServiceServer) ListTasksV1(context.Context, *ListTasksRequest) (*ListTasksResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListTasksV1 not implemented") +} +func (UnimplementedTaskServiceServer) mustEmbedUnimplementedTaskServiceServer() {} +func (UnimplementedTaskServiceServer) testEmbeddedByValue() {} + +// UnsafeTaskServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to TaskServiceServer will +// result in compilation errors. +type UnsafeTaskServiceServer interface { + mustEmbedUnimplementedTaskServiceServer() +} + +func RegisterTaskServiceServer(s grpc.ServiceRegistrar, srv TaskServiceServer) { + // If the following call pancis, it indicates UnimplementedTaskServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&TaskService_ServiceDesc, srv) +} + +func _TaskService_CreateTaskV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).CreateTaskV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: TaskService_CreateTaskV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).CreateTaskV1(ctx, req.(*CreateTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_ListTasksV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTasksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).ListTasksV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: TaskService_ListTasksV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).ListTasksV1(ctx, req.(*ListTasksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// TaskService_ServiceDesc is the grpc.ServiceDesc for TaskService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var TaskService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.TaskService", + HandlerType: (*TaskServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateTaskV1", + Handler: _TaskService_CreateTaskV1_Handler, + }, + { + MethodName: "ListTasksV1", + Handler: _TaskService_ListTasksV1_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v1beta1/task.proto", +} diff --git a/backend/api/v1beta1/go_client/visualization.pb.go b/backend/api/v1beta1/go_client/visualization.pb.go index c407d2e5c69..f8bc771618a 100644 --- a/backend/api/v1beta1/go_client/visualization.pb.go +++ b/backend/api/v1beta1/go_client/visualization.pb.go @@ -14,23 +14,20 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v1beta1/visualization.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -102,21 +99,18 @@ func (Visualization_Type) EnumDescriptor() ([]byte, []int) { // and input data paths. Input dat paths are assumed to be unique and are used // for determining output path. type CreateVisualizationRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Visualization *Visualization `protobuf:"bytes,1,opt,name=visualization,proto3" json:"visualization,omitempty"` + Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` unknownFields protoimpl.UnknownFields - - Visualization *Visualization `protobuf:"bytes,1,opt,name=visualization,proto3" json:"visualization,omitempty"` - Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` + sizeCache protoimpl.SizeCache } func (x *CreateVisualizationRequest) Reset() { *x = CreateVisualizationRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_visualization_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_visualization_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreateVisualizationRequest) String() string { @@ -127,7 +121,7 @@ func (*CreateVisualizationRequest) ProtoMessage() {} func (x *CreateVisualizationRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_visualization_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -157,11 +151,8 @@ func (x *CreateVisualizationRequest) GetNamespace() string { } type Visualization struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Type Visualization_Type `protobuf:"varint,1,opt,name=type,proto3,enum=api.Visualization_Type" json:"type,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + Type Visualization_Type `protobuf:"varint,1,opt,name=type,proto3,enum=api.Visualization_Type" json:"type,omitempty"` // Path pattern of input data to be used during generation of visualizations. // This is required when creating the pipeline through CreateVisualization // API. @@ -176,16 +167,16 @@ type Visualization struct { // In case any error happens when generating visualizations, only // visualization ID and the error message are returned. Client has the // flexibility of choosing how to handle the error. - Error string `protobuf:"bytes,5,opt,name=error,proto3" json:"error,omitempty"` + Error string `protobuf:"bytes,5,opt,name=error,proto3" json:"error,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Visualization) Reset() { *x = Visualization{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v1beta1_visualization_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v1beta1_visualization_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Visualization) String() string { @@ -196,7 +187,7 @@ func (*Visualization) ProtoMessage() {} func (x *Visualization) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v1beta1_visualization_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -248,76 +239,50 @@ func (x *Visualization) GetError() string { var File_backend_api_v1beta1_visualization_proto protoreflect.FileDescriptor -var file_backend_api_v1beta1_visualization_proto_rawDesc = []byte{ - 0x0a, 0x27, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x76, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, - 0x72, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x74, 0x0a, 0x1a, 0x43, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x38, 0x0a, 0x0d, 0x76, 0x69, 0x73, - 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x12, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x76, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x22, 0xde, 0x01, 0x0a, 0x0d, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x2b, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, - 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x61, 0x72, 0x67, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x74, 0x6d, 0x6c, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x74, 0x6d, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x22, 0x40, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0d, 0x0a, 0x09, 0x52, 0x4f, 0x43, 0x5f, - 0x43, 0x55, 0x52, 0x56, 0x45, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x54, 0x46, 0x44, 0x56, 0x10, - 0x01, 0x12, 0x08, 0x0a, 0x04, 0x54, 0x46, 0x4d, 0x41, 0x10, 0x02, 0x12, 0x09, 0x0a, 0x05, 0x54, - 0x41, 0x42, 0x4c, 0x45, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x55, 0x53, 0x54, 0x4f, 0x4d, - 0x10, 0x04, 0x32, 0xa6, 0x01, 0x0a, 0x14, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x8d, 0x01, 0x0a, 0x15, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x12, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x56, 0x69, 0x73, - 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3f, 0x82, 0xd3, 0xe4, 0x93, - 0x02, 0x39, 0x3a, 0x0d, 0x76, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x28, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x76, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x42, 0x91, 0x01, 0x92, 0x41, - 0x51, 0x2a, 0x02, 0x01, 0x02, 0x52, 0x1c, 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x12, 0x11, 0x12, 0x0f, 0x0a, 0x0d, 0x1a, 0x0b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, - 0x74, 0x75, 0x73, 0x5a, 0x1f, 0x0a, 0x1d, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, - 0x13, 0x08, 0x02, 0x1a, 0x0d, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x0c, 0x0a, 0x0a, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, - 0x12, 0x00, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v1beta1_visualization_proto_rawDesc = "" + + "\n" + + "'backend/api/v1beta1/visualization.proto\x12\x03api\x1a\x1cgoogle/api/annotations.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"t\n" + + "\x1aCreateVisualizationRequest\x128\n" + + "\rvisualization\x18\x01 \x01(\v2\x12.api.VisualizationR\rvisualization\x12\x1c\n" + + "\tnamespace\x18\x02 \x01(\tR\tnamespace\"\xde\x01\n" + + "\rVisualization\x12+\n" + + "\x04type\x18\x01 \x01(\x0e2\x17.api.Visualization.TypeR\x04type\x12\x16\n" + + "\x06source\x18\x02 \x01(\tR\x06source\x12\x1c\n" + + "\targuments\x18\x03 \x01(\tR\targuments\x12\x12\n" + + "\x04html\x18\x04 \x01(\tR\x04html\x12\x14\n" + + "\x05error\x18\x05 \x01(\tR\x05error\"@\n" + + "\x04Type\x12\r\n" + + "\tROC_CURVE\x10\x00\x12\b\n" + + "\x04TFDV\x10\x01\x12\b\n" + + "\x04TFMA\x10\x02\x12\t\n" + + "\x05TABLE\x10\x03\x12\n" + + "\n" + + "\x06CUSTOM\x10\x042\xa6\x01\n" + + "\x14VisualizationService\x12\x8d\x01\n" + + "\x15CreateVisualizationV1\x12\x1f.api.CreateVisualizationRequest\x1a\x12.api.Visualization\"?\x82\xd3\xe4\x93\x029:\rvisualization\"(/apis/v1beta1/visualizations/{namespace}B\x91\x01\x92AQ*\x02\x01\x02R\x1c\n" + + "\adefault\x12\x11\x12\x0f\n" + + "\r\x1a\v.api.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v1beta1/go_clientb\x06proto3" var ( file_backend_api_v1beta1_visualization_proto_rawDescOnce sync.Once - file_backend_api_v1beta1_visualization_proto_rawDescData = file_backend_api_v1beta1_visualization_proto_rawDesc + file_backend_api_v1beta1_visualization_proto_rawDescData []byte ) func file_backend_api_v1beta1_visualization_proto_rawDescGZIP() []byte { file_backend_api_v1beta1_visualization_proto_rawDescOnce.Do(func() { - file_backend_api_v1beta1_visualization_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v1beta1_visualization_proto_rawDescData) + file_backend_api_v1beta1_visualization_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_visualization_proto_rawDesc), len(file_backend_api_v1beta1_visualization_proto_rawDesc))) }) return file_backend_api_v1beta1_visualization_proto_rawDescData } var file_backend_api_v1beta1_visualization_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_backend_api_v1beta1_visualization_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_backend_api_v1beta1_visualization_proto_goTypes = []interface{}{ +var file_backend_api_v1beta1_visualization_proto_goTypes = []any{ (Visualization_Type)(0), // 0: api.Visualization.Type (*CreateVisualizationRequest)(nil), // 1: api.CreateVisualizationRequest (*Visualization)(nil), // 2: api.Visualization @@ -339,38 +304,11 @@ func file_backend_api_v1beta1_visualization_proto_init() { if File_backend_api_v1beta1_visualization_proto != nil { return } - file_backend_api_v1beta1_error_proto_init() - if !protoimpl.UnsafeEnabled { - file_backend_api_v1beta1_visualization_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateVisualizationRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v1beta1_visualization_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Visualization); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v1beta1_visualization_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v1beta1_visualization_proto_rawDesc), len(file_backend_api_v1beta1_visualization_proto_rawDesc)), NumEnums: 1, NumMessages: 2, NumExtensions: 0, @@ -382,87 +320,6 @@ func file_backend_api_v1beta1_visualization_proto_init() { MessageInfos: file_backend_api_v1beta1_visualization_proto_msgTypes, }.Build() File_backend_api_v1beta1_visualization_proto = out.File - file_backend_api_v1beta1_visualization_proto_rawDesc = nil file_backend_api_v1beta1_visualization_proto_goTypes = nil file_backend_api_v1beta1_visualization_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// VisualizationServiceClient is the client API for VisualizationService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type VisualizationServiceClient interface { - CreateVisualizationV1(ctx context.Context, in *CreateVisualizationRequest, opts ...grpc.CallOption) (*Visualization, error) -} - -type visualizationServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewVisualizationServiceClient(cc grpc.ClientConnInterface) VisualizationServiceClient { - return &visualizationServiceClient{cc} -} - -func (c *visualizationServiceClient) CreateVisualizationV1(ctx context.Context, in *CreateVisualizationRequest, opts ...grpc.CallOption) (*Visualization, error) { - out := new(Visualization) - err := c.cc.Invoke(ctx, "/api.VisualizationService/CreateVisualizationV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// VisualizationServiceServer is the server API for VisualizationService service. -type VisualizationServiceServer interface { - CreateVisualizationV1(context.Context, *CreateVisualizationRequest) (*Visualization, error) -} - -// UnimplementedVisualizationServiceServer can be embedded to have forward compatible implementations. -type UnimplementedVisualizationServiceServer struct { -} - -func (*UnimplementedVisualizationServiceServer) CreateVisualizationV1(context.Context, *CreateVisualizationRequest) (*Visualization, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateVisualizationV1 not implemented") -} - -func RegisterVisualizationServiceServer(s *grpc.Server, srv VisualizationServiceServer) { - s.RegisterService(&_VisualizationService_serviceDesc, srv) -} - -func _VisualizationService_CreateVisualizationV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateVisualizationRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(VisualizationServiceServer).CreateVisualizationV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/api.VisualizationService/CreateVisualizationV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(VisualizationServiceServer).CreateVisualizationV1(ctx, req.(*CreateVisualizationRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _VisualizationService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "api.VisualizationService", - HandlerType: (*VisualizationServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateVisualizationV1", - Handler: _VisualizationService_CreateVisualizationV1_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v1beta1/visualization.proto", -} diff --git a/backend/api/v1beta1/go_client/visualization.pb.gw.go b/backend/api/v1beta1/go_client/visualization.pb.gw.go index f3f7d676162..306a3805b7d 100644 --- a/backend/api/v1beta1/go_client/visualization.pb.gw.go +++ b/backend/api/v1beta1/go_client/visualization.pb.gw.go @@ -10,126 +10,101 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_VisualizationService_CreateVisualizationV1_0(ctx context.Context, marshaler runtime.Marshaler, client VisualizationServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateVisualizationRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Visualization); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - var ( - val string - ok bool - err error - _ = err + protoReq CreateVisualizationRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["namespace"] + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Visualization); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["namespace"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") } - protoReq.Namespace, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) } - msg, err := client.CreateVisualizationV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_VisualizationService_CreateVisualizationV1_0(ctx context.Context, marshaler runtime.Marshaler, server VisualizationServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateVisualizationRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Visualization); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - var ( - val string - ok bool - err error - _ = err + protoReq CreateVisualizationRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["namespace"] + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Visualization); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + val, ok := pathParams["namespace"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") } - protoReq.Namespace, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) } - msg, err := server.CreateVisualizationV1(ctx, &protoReq) return msg, metadata, err - } // RegisterVisualizationServiceHandlerServer registers the http handlers for service VisualizationService to "mux". // UnaryRPC :call VisualizationServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterVisualizationServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterVisualizationServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server VisualizationServiceServer) error { - - mux.Handle("POST", pattern_VisualizationService_CreateVisualizationV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_VisualizationService_CreateVisualizationV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.VisualizationService/CreateVisualizationV1", runtime.WithHTTPPathPattern("/apis/v1beta1/visualizations/{namespace}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_VisualizationService_CreateVisualizationV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_VisualizationService_CreateVisualizationV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_VisualizationService_CreateVisualizationV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_VisualizationService_CreateVisualizationV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -138,25 +113,24 @@ func RegisterVisualizationServiceHandlerServer(ctx context.Context, mux *runtime // RegisterVisualizationServiceHandlerFromEndpoint is same as RegisterVisualizationServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterVisualizationServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterVisualizationServiceHandler(ctx, mux, conn) } @@ -170,34 +144,30 @@ func RegisterVisualizationServiceHandler(ctx context.Context, mux *runtime.Serve // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "VisualizationServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "VisualizationServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "VisualizationServiceClient" to call the correct interceptors. +// "VisualizationServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterVisualizationServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client VisualizationServiceClient) error { - - mux.Handle("POST", pattern_VisualizationService_CreateVisualizationV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_VisualizationService_CreateVisualizationV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/api.VisualizationService/CreateVisualizationV1", runtime.WithHTTPPathPattern("/apis/v1beta1/visualizations/{namespace}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_VisualizationService_CreateVisualizationV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_VisualizationService_CreateVisualizationV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_VisualizationService_CreateVisualizationV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_VisualizationService_CreateVisualizationV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_VisualizationService_CreateVisualizationV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "visualizations", "namespace"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_VisualizationService_CreateVisualizationV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "visualizations", "namespace"}, "")) ) var ( diff --git a/backend/api/v1beta1/go_client/visualization_grpc.pb.go b/backend/api/v1beta1/go_client/visualization_grpc.pb.go new file mode 100644 index 00000000000..a6e7831da6b --- /dev/null +++ b/backend/api/v1beta1/go_client/visualization_grpc.pb.go @@ -0,0 +1,135 @@ +// Copyright 2019 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v1beta1/visualization.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + VisualizationService_CreateVisualizationV1_FullMethodName = "/api.VisualizationService/CreateVisualizationV1" +) + +// VisualizationServiceClient is the client API for VisualizationService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type VisualizationServiceClient interface { + CreateVisualizationV1(ctx context.Context, in *CreateVisualizationRequest, opts ...grpc.CallOption) (*Visualization, error) +} + +type visualizationServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewVisualizationServiceClient(cc grpc.ClientConnInterface) VisualizationServiceClient { + return &visualizationServiceClient{cc} +} + +func (c *visualizationServiceClient) CreateVisualizationV1(ctx context.Context, in *CreateVisualizationRequest, opts ...grpc.CallOption) (*Visualization, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Visualization) + err := c.cc.Invoke(ctx, VisualizationService_CreateVisualizationV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VisualizationServiceServer is the server API for VisualizationService service. +// All implementations must embed UnimplementedVisualizationServiceServer +// for forward compatibility. +type VisualizationServiceServer interface { + CreateVisualizationV1(context.Context, *CreateVisualizationRequest) (*Visualization, error) + mustEmbedUnimplementedVisualizationServiceServer() +} + +// UnimplementedVisualizationServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedVisualizationServiceServer struct{} + +func (UnimplementedVisualizationServiceServer) CreateVisualizationV1(context.Context, *CreateVisualizationRequest) (*Visualization, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateVisualizationV1 not implemented") +} +func (UnimplementedVisualizationServiceServer) mustEmbedUnimplementedVisualizationServiceServer() {} +func (UnimplementedVisualizationServiceServer) testEmbeddedByValue() {} + +// UnsafeVisualizationServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to VisualizationServiceServer will +// result in compilation errors. +type UnsafeVisualizationServiceServer interface { + mustEmbedUnimplementedVisualizationServiceServer() +} + +func RegisterVisualizationServiceServer(s grpc.ServiceRegistrar, srv VisualizationServiceServer) { + // If the following call pancis, it indicates UnimplementedVisualizationServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&VisualizationService_ServiceDesc, srv) +} + +func _VisualizationService_CreateVisualizationV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateVisualizationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VisualizationServiceServer).CreateVisualizationV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: VisualizationService_CreateVisualizationV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VisualizationServiceServer).CreateVisualizationV1(ctx, req.(*CreateVisualizationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// VisualizationService_ServiceDesc is the grpc.ServiceDesc for VisualizationService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var VisualizationService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "api.VisualizationService", + HandlerType: (*VisualizationServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateVisualizationV1", + Handler: _VisualizationService_CreateVisualizationV1_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v1beta1/visualization.proto", +} diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_client.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_client.go index f607e00fdea..40004fd51ba 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_client.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_client.go @@ -8,8 +8,7 @@ package experiment_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_client/experiment_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Experiment cli := new(Experiment) cli.Transport = transport - cli.ExperimentService = experiment_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Experiment is a client for experiment type Experiment struct { - ExperimentService *experiment_service.Client + ExperimentService experiment_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Experiment struct { // SetTransport changes the transport on the client and all its subresources func (c *Experiment) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.ExperimentService.SetTransport(transport) - } diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_v1_parameters.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_v1_parameters.go index 3d4f69c3333..afe9d4881c9 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewExperimentServiceArchiveExperimentV1Params creates a new ExperimentServiceArchiveExperimentV1Params object -// with the default values initialized. +// NewExperimentServiceArchiveExperimentV1Params creates a new ExperimentServiceArchiveExperimentV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceArchiveExperimentV1Params() *ExperimentServiceArchiveExperimentV1Params { - var () return &ExperimentServiceArchiveExperimentV1Params{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceArchiveExperimentV1ParamsWithTimeout creates a new ExperimentServiceArchiveExperimentV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceArchiveExperimentV1ParamsWithTimeout(timeout time.Duration) *ExperimentServiceArchiveExperimentV1Params { - var () return &ExperimentServiceArchiveExperimentV1Params{ - timeout: timeout, } } // NewExperimentServiceArchiveExperimentV1ParamsWithContext creates a new ExperimentServiceArchiveExperimentV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceArchiveExperimentV1ParamsWithContext(ctx context.Context) *ExperimentServiceArchiveExperimentV1Params { - var () return &ExperimentServiceArchiveExperimentV1Params{ - Context: ctx, } } // NewExperimentServiceArchiveExperimentV1ParamsWithHTTPClient creates a new ExperimentServiceArchiveExperimentV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceArchiveExperimentV1ParamsWithHTTPClient(client *http.Client) *ExperimentServiceArchiveExperimentV1Params { - var () return &ExperimentServiceArchiveExperimentV1Params{ HTTPClient: client, } } -/*ExperimentServiceArchiveExperimentV1Params contains all the parameters to send to the API endpoint -for the experiment service archive experiment v1 operation typically these are written to a http.Request +/* +ExperimentServiceArchiveExperimentV1Params contains all the parameters to send to the API endpoint + + for the experiment service archive experiment v1 operation. + + Typically these are written to a http.Request. */ type ExperimentServiceArchiveExperimentV1Params struct { - /*ID - The ID of the experiment to be archived. + /* ID. + The ID of the experiment to be archived. */ ID string @@ -72,6 +72,21 @@ type ExperimentServiceArchiveExperimentV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service archive experiment v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceArchiveExperimentV1Params) WithDefaults() *ExperimentServiceArchiveExperimentV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service archive experiment v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceArchiveExperimentV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service archive experiment v1 params func (o *ExperimentServiceArchiveExperimentV1Params) WithTimeout(timeout time.Duration) *ExperimentServiceArchiveExperimentV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_v1_responses.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_v1_responses.go index bce9f4249e6..8f678e89c6b 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_v1_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" ) // ExperimentServiceArchiveExperimentV1Reader is a Reader for the ExperimentServiceArchiveExperimentV1 structure. @@ -24,14 +24,12 @@ type ExperimentServiceArchiveExperimentV1Reader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceArchiveExperimentV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceArchiveExperimentV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceArchiveExperimentV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceArchiveExperimentV1OK() *ExperimentServiceArchiveExperi return &ExperimentServiceArchiveExperimentV1OK{} } -/*ExperimentServiceArchiveExperimentV1OK handles this case with default header values. +/* +ExperimentServiceArchiveExperimentV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceArchiveExperimentV1OK struct { Payload interface{} } +// IsSuccess returns true when this experiment service archive experiment v1 o k response has a 2xx status code +func (o *ExperimentServiceArchiveExperimentV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service archive experiment v1 o k response has a 3xx status code +func (o *ExperimentServiceArchiveExperimentV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service archive experiment v1 o k response has a 4xx status code +func (o *ExperimentServiceArchiveExperimentV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service archive experiment v1 o k response has a 5xx status code +func (o *ExperimentServiceArchiveExperimentV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service archive experiment v1 o k response a status code equal to that given +func (o *ExperimentServiceArchiveExperimentV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service archive experiment v1 o k response +func (o *ExperimentServiceArchiveExperimentV1OK) Code() int { + return 200 +} + func (o *ExperimentServiceArchiveExperimentV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:archive][%d] experimentServiceArchiveExperimentV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:archive][%d] experimentServiceArchiveExperimentV1OK %s", 200, payload) +} + +func (o *ExperimentServiceArchiveExperimentV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:archive][%d] experimentServiceArchiveExperimentV1OK %s", 200, payload) +} + +func (o *ExperimentServiceArchiveExperimentV1OK) GetPayload() interface{} { + return o.Payload } func (o *ExperimentServiceArchiveExperimentV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewExperimentServiceArchiveExperimentV1Default(code int) *ExperimentService } } -/*ExperimentServiceArchiveExperimentV1Default handles this case with default header values. +/* +ExperimentServiceArchiveExperimentV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceArchiveExperimentV1Default struct { _statusCode int - Payload *experiment_model.GatewayruntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service archive experiment v1 default response has a 2xx status code +func (o *ExperimentServiceArchiveExperimentV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service archive experiment v1 default response has a 3xx status code +func (o *ExperimentServiceArchiveExperimentV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service archive experiment v1 default response has a 4xx status code +func (o *ExperimentServiceArchiveExperimentV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service archive experiment v1 default response has a 5xx status code +func (o *ExperimentServiceArchiveExperimentV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service archive experiment v1 default response a status code equal to that given +func (o *ExperimentServiceArchiveExperimentV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service archive experiment v1 default response @@ -94,12 +159,22 @@ func (o *ExperimentServiceArchiveExperimentV1Default) Code() int { } func (o *ExperimentServiceArchiveExperimentV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:archive][%d] ExperimentService_ArchiveExperimentV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:archive][%d] ExperimentService_ArchiveExperimentV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceArchiveExperimentV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:archive][%d] ExperimentService_ArchiveExperimentV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceArchiveExperimentV1Default) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceArchiveExperimentV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.GatewayruntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_client.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_client.go index 2ecc0891f3b..6da8e875627 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_client.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_client.go @@ -7,15 +7,40 @@ package experiment_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new experiment service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new experiment service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new experiment service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for experiment service API */ @@ -24,16 +49,35 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + ExperimentServiceArchiveExperimentV1(params *ExperimentServiceArchiveExperimentV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceArchiveExperimentV1OK, error) + + ExperimentServiceCreateExperimentV1(params *ExperimentServiceCreateExperimentV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceCreateExperimentV1OK, error) + + ExperimentServiceDeleteExperimentV1(params *ExperimentServiceDeleteExperimentV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceDeleteExperimentV1OK, error) + + ExperimentServiceGetExperimentV1(params *ExperimentServiceGetExperimentV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceGetExperimentV1OK, error) + + ExperimentServiceListExperimentsV1(params *ExperimentServiceListExperimentsV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceListExperimentsV1OK, error) + + ExperimentServiceUnarchiveExperimentV1(params *ExperimentServiceUnarchiveExperimentV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceUnarchiveExperimentV1OK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* ExperimentServiceArchiveExperimentV1 archives an experiment and the experiment s runs and jobs */ -func (a *Client) ExperimentServiceArchiveExperimentV1(params *ExperimentServiceArchiveExperimentV1Params, authInfo runtime.ClientAuthInfoWriter) (*ExperimentServiceArchiveExperimentV1OK, error) { +func (a *Client) ExperimentServiceArchiveExperimentV1(params *ExperimentServiceArchiveExperimentV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceArchiveExperimentV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceArchiveExperimentV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_ArchiveExperimentV1", Method: "POST", PathPattern: "/apis/v1beta1/experiments/{id}:archive", @@ -45,24 +89,33 @@ func (a *Client) ExperimentServiceArchiveExperimentV1(params *ExperimentServiceA AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceArchiveExperimentV1OK), nil - + success, ok := result.(*ExperimentServiceArchiveExperimentV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceArchiveExperimentV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* ExperimentServiceCreateExperimentV1 creates a new experiment */ -func (a *Client) ExperimentServiceCreateExperimentV1(params *ExperimentServiceCreateExperimentV1Params, authInfo runtime.ClientAuthInfoWriter) (*ExperimentServiceCreateExperimentV1OK, error) { +func (a *Client) ExperimentServiceCreateExperimentV1(params *ExperimentServiceCreateExperimentV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceCreateExperimentV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceCreateExperimentV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_CreateExperimentV1", Method: "POST", PathPattern: "/apis/v1beta1/experiments", @@ -74,24 +127,33 @@ func (a *Client) ExperimentServiceCreateExperimentV1(params *ExperimentServiceCr AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceCreateExperimentV1OK), nil - + success, ok := result.(*ExperimentServiceCreateExperimentV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceCreateExperimentV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* ExperimentServiceDeleteExperimentV1 deletes an experiment without deleting the experiment s runs and jobs to avoid unexpected behaviors delete an experiment s runs and jobs before deleting the experiment */ -func (a *Client) ExperimentServiceDeleteExperimentV1(params *ExperimentServiceDeleteExperimentV1Params, authInfo runtime.ClientAuthInfoWriter) (*ExperimentServiceDeleteExperimentV1OK, error) { +func (a *Client) ExperimentServiceDeleteExperimentV1(params *ExperimentServiceDeleteExperimentV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceDeleteExperimentV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceDeleteExperimentV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_DeleteExperimentV1", Method: "DELETE", PathPattern: "/apis/v1beta1/experiments/{id}", @@ -103,24 +165,33 @@ func (a *Client) ExperimentServiceDeleteExperimentV1(params *ExperimentServiceDe AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceDeleteExperimentV1OK), nil - + success, ok := result.(*ExperimentServiceDeleteExperimentV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceDeleteExperimentV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* ExperimentServiceGetExperimentV1 finds a specific experiment by ID */ -func (a *Client) ExperimentServiceGetExperimentV1(params *ExperimentServiceGetExperimentV1Params, authInfo runtime.ClientAuthInfoWriter) (*ExperimentServiceGetExperimentV1OK, error) { +func (a *Client) ExperimentServiceGetExperimentV1(params *ExperimentServiceGetExperimentV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceGetExperimentV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceGetExperimentV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_GetExperimentV1", Method: "GET", PathPattern: "/apis/v1beta1/experiments/{id}", @@ -132,24 +203,33 @@ func (a *Client) ExperimentServiceGetExperimentV1(params *ExperimentServiceGetEx AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceGetExperimentV1OK), nil - + success, ok := result.(*ExperimentServiceGetExperimentV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceGetExperimentV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* ExperimentServiceListExperimentsV1 finds all experiments supports pagination and sorting on certain fields */ -func (a *Client) ExperimentServiceListExperimentsV1(params *ExperimentServiceListExperimentsV1Params, authInfo runtime.ClientAuthInfoWriter) (*ExperimentServiceListExperimentsV1OK, error) { +func (a *Client) ExperimentServiceListExperimentsV1(params *ExperimentServiceListExperimentsV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceListExperimentsV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceListExperimentsV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_ListExperimentsV1", Method: "GET", PathPattern: "/apis/v1beta1/experiments", @@ -161,24 +241,33 @@ func (a *Client) ExperimentServiceListExperimentsV1(params *ExperimentServiceLis AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceListExperimentsV1OK), nil - + success, ok := result.(*ExperimentServiceListExperimentsV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceListExperimentsV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* ExperimentServiceUnarchiveExperimentV1 restores an archived experiment the experiment s archived runs and jobs will stay archived */ -func (a *Client) ExperimentServiceUnarchiveExperimentV1(params *ExperimentServiceUnarchiveExperimentV1Params, authInfo runtime.ClientAuthInfoWriter) (*ExperimentServiceUnarchiveExperimentV1OK, error) { +func (a *Client) ExperimentServiceUnarchiveExperimentV1(params *ExperimentServiceUnarchiveExperimentV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ExperimentServiceUnarchiveExperimentV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceUnarchiveExperimentV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_UnarchiveExperimentV1", Method: "POST", PathPattern: "/apis/v1beta1/experiments/{id}:unarchive", @@ -190,12 +279,22 @@ func (a *Client) ExperimentServiceUnarchiveExperimentV1(params *ExperimentServic AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceUnarchiveExperimentV1OK), nil - + success, ok := result.(*ExperimentServiceUnarchiveExperimentV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceUnarchiveExperimentV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_v1_parameters.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_v1_parameters.go index 9ea4ff71e9f..0ce4e13dbf6 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_v1_parameters.go @@ -13,67 +13,82 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" ) -// NewExperimentServiceCreateExperimentV1Params creates a new ExperimentServiceCreateExperimentV1Params object -// with the default values initialized. +// NewExperimentServiceCreateExperimentV1Params creates a new ExperimentServiceCreateExperimentV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceCreateExperimentV1Params() *ExperimentServiceCreateExperimentV1Params { - var () return &ExperimentServiceCreateExperimentV1Params{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceCreateExperimentV1ParamsWithTimeout creates a new ExperimentServiceCreateExperimentV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceCreateExperimentV1ParamsWithTimeout(timeout time.Duration) *ExperimentServiceCreateExperimentV1Params { - var () return &ExperimentServiceCreateExperimentV1Params{ - timeout: timeout, } } // NewExperimentServiceCreateExperimentV1ParamsWithContext creates a new ExperimentServiceCreateExperimentV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceCreateExperimentV1ParamsWithContext(ctx context.Context) *ExperimentServiceCreateExperimentV1Params { - var () return &ExperimentServiceCreateExperimentV1Params{ - Context: ctx, } } // NewExperimentServiceCreateExperimentV1ParamsWithHTTPClient creates a new ExperimentServiceCreateExperimentV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceCreateExperimentV1ParamsWithHTTPClient(client *http.Client) *ExperimentServiceCreateExperimentV1Params { - var () return &ExperimentServiceCreateExperimentV1Params{ HTTPClient: client, } } -/*ExperimentServiceCreateExperimentV1Params contains all the parameters to send to the API endpoint -for the experiment service create experiment v1 operation typically these are written to a http.Request +/* +ExperimentServiceCreateExperimentV1Params contains all the parameters to send to the API endpoint + + for the experiment service create experiment v1 operation. + + Typically these are written to a http.Request. */ type ExperimentServiceCreateExperimentV1Params struct { - /*Body - The experiment to be created. + /* Experiment. + The experiment to be created. */ - Body *experiment_model.APIExperiment + Experiment *experiment_model.APIExperiment timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service create experiment v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceCreateExperimentV1Params) WithDefaults() *ExperimentServiceCreateExperimentV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service create experiment v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceCreateExperimentV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service create experiment v1 params func (o *ExperimentServiceCreateExperimentV1Params) WithTimeout(timeout time.Duration) *ExperimentServiceCreateExperimentV1Params { o.SetTimeout(timeout) @@ -107,15 +122,15 @@ func (o *ExperimentServiceCreateExperimentV1Params) SetHTTPClient(client *http.C o.HTTPClient = client } -// WithBody adds the body to the experiment service create experiment v1 params -func (o *ExperimentServiceCreateExperimentV1Params) WithBody(body *experiment_model.APIExperiment) *ExperimentServiceCreateExperimentV1Params { - o.SetBody(body) +// WithExperiment adds the experiment to the experiment service create experiment v1 params +func (o *ExperimentServiceCreateExperimentV1Params) WithExperiment(experiment *experiment_model.APIExperiment) *ExperimentServiceCreateExperimentV1Params { + o.SetExperiment(experiment) return o } -// SetBody adds the body to the experiment service create experiment v1 params -func (o *ExperimentServiceCreateExperimentV1Params) SetBody(body *experiment_model.APIExperiment) { - o.Body = body +// SetExperiment adds the experiment to the experiment service create experiment v1 params +func (o *ExperimentServiceCreateExperimentV1Params) SetExperiment(experiment *experiment_model.APIExperiment) { + o.Experiment = experiment } // WriteToRequest writes these params to a swagger request @@ -125,9 +140,8 @@ func (o *ExperimentServiceCreateExperimentV1Params) WriteToRequest(r runtime.Cli return err } var res []error - - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { + if o.Experiment != nil { + if err := r.SetBodyParam(o.Experiment); err != nil { return err } } diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_v1_responses.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_v1_responses.go index 6b6c7f6bedf..d6f07325f84 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_v1_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" ) // ExperimentServiceCreateExperimentV1Reader is a Reader for the ExperimentServiceCreateExperimentV1 structure. @@ -24,14 +24,12 @@ type ExperimentServiceCreateExperimentV1Reader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceCreateExperimentV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceCreateExperimentV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceCreateExperimentV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceCreateExperimentV1OK() *ExperimentServiceCreateExperime return &ExperimentServiceCreateExperimentV1OK{} } -/*ExperimentServiceCreateExperimentV1OK handles this case with default header values. +/* +ExperimentServiceCreateExperimentV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceCreateExperimentV1OK struct { Payload *experiment_model.APIExperiment } +// IsSuccess returns true when this experiment service create experiment v1 o k response has a 2xx status code +func (o *ExperimentServiceCreateExperimentV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service create experiment v1 o k response has a 3xx status code +func (o *ExperimentServiceCreateExperimentV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service create experiment v1 o k response has a 4xx status code +func (o *ExperimentServiceCreateExperimentV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service create experiment v1 o k response has a 5xx status code +func (o *ExperimentServiceCreateExperimentV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service create experiment v1 o k response a status code equal to that given +func (o *ExperimentServiceCreateExperimentV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service create experiment v1 o k response +func (o *ExperimentServiceCreateExperimentV1OK) Code() int { + return 200 +} + func (o *ExperimentServiceCreateExperimentV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/experiments][%d] experimentServiceCreateExperimentV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments][%d] experimentServiceCreateExperimentV1OK %s", 200, payload) +} + +func (o *ExperimentServiceCreateExperimentV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments][%d] experimentServiceCreateExperimentV1OK %s", 200, payload) +} + +func (o *ExperimentServiceCreateExperimentV1OK) GetPayload() *experiment_model.APIExperiment { + return o.Payload } func (o *ExperimentServiceCreateExperimentV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewExperimentServiceCreateExperimentV1Default(code int) *ExperimentServiceC } } -/*ExperimentServiceCreateExperimentV1Default handles this case with default header values. +/* +ExperimentServiceCreateExperimentV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceCreateExperimentV1Default struct { _statusCode int - Payload *experiment_model.GatewayruntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service create experiment v1 default response has a 2xx status code +func (o *ExperimentServiceCreateExperimentV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service create experiment v1 default response has a 3xx status code +func (o *ExperimentServiceCreateExperimentV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service create experiment v1 default response has a 4xx status code +func (o *ExperimentServiceCreateExperimentV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service create experiment v1 default response has a 5xx status code +func (o *ExperimentServiceCreateExperimentV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service create experiment v1 default response a status code equal to that given +func (o *ExperimentServiceCreateExperimentV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service create experiment v1 default response @@ -96,12 +161,22 @@ func (o *ExperimentServiceCreateExperimentV1Default) Code() int { } func (o *ExperimentServiceCreateExperimentV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/experiments][%d] ExperimentService_CreateExperimentV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments][%d] ExperimentService_CreateExperimentV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceCreateExperimentV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments][%d] ExperimentService_CreateExperimentV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceCreateExperimentV1Default) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceCreateExperimentV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.GatewayruntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_v1_parameters.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_v1_parameters.go index f7d1d34c010..d1b501b553b 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewExperimentServiceDeleteExperimentV1Params creates a new ExperimentServiceDeleteExperimentV1Params object -// with the default values initialized. +// NewExperimentServiceDeleteExperimentV1Params creates a new ExperimentServiceDeleteExperimentV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceDeleteExperimentV1Params() *ExperimentServiceDeleteExperimentV1Params { - var () return &ExperimentServiceDeleteExperimentV1Params{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceDeleteExperimentV1ParamsWithTimeout creates a new ExperimentServiceDeleteExperimentV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceDeleteExperimentV1ParamsWithTimeout(timeout time.Duration) *ExperimentServiceDeleteExperimentV1Params { - var () return &ExperimentServiceDeleteExperimentV1Params{ - timeout: timeout, } } // NewExperimentServiceDeleteExperimentV1ParamsWithContext creates a new ExperimentServiceDeleteExperimentV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceDeleteExperimentV1ParamsWithContext(ctx context.Context) *ExperimentServiceDeleteExperimentV1Params { - var () return &ExperimentServiceDeleteExperimentV1Params{ - Context: ctx, } } // NewExperimentServiceDeleteExperimentV1ParamsWithHTTPClient creates a new ExperimentServiceDeleteExperimentV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceDeleteExperimentV1ParamsWithHTTPClient(client *http.Client) *ExperimentServiceDeleteExperimentV1Params { - var () return &ExperimentServiceDeleteExperimentV1Params{ HTTPClient: client, } } -/*ExperimentServiceDeleteExperimentV1Params contains all the parameters to send to the API endpoint -for the experiment service delete experiment v1 operation typically these are written to a http.Request +/* +ExperimentServiceDeleteExperimentV1Params contains all the parameters to send to the API endpoint + + for the experiment service delete experiment v1 operation. + + Typically these are written to a http.Request. */ type ExperimentServiceDeleteExperimentV1Params struct { - /*ID - The ID of the experiment to be deleted. + /* ID. + The ID of the experiment to be deleted. */ ID string @@ -72,6 +72,21 @@ type ExperimentServiceDeleteExperimentV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service delete experiment v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceDeleteExperimentV1Params) WithDefaults() *ExperimentServiceDeleteExperimentV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service delete experiment v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceDeleteExperimentV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service delete experiment v1 params func (o *ExperimentServiceDeleteExperimentV1Params) WithTimeout(timeout time.Duration) *ExperimentServiceDeleteExperimentV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_v1_responses.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_v1_responses.go index bff9575bb0a..090733fce21 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_v1_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" ) // ExperimentServiceDeleteExperimentV1Reader is a Reader for the ExperimentServiceDeleteExperimentV1 structure. @@ -24,14 +24,12 @@ type ExperimentServiceDeleteExperimentV1Reader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceDeleteExperimentV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceDeleteExperimentV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceDeleteExperimentV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceDeleteExperimentV1OK() *ExperimentServiceDeleteExperime return &ExperimentServiceDeleteExperimentV1OK{} } -/*ExperimentServiceDeleteExperimentV1OK handles this case with default header values. +/* +ExperimentServiceDeleteExperimentV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceDeleteExperimentV1OK struct { Payload interface{} } +// IsSuccess returns true when this experiment service delete experiment v1 o k response has a 2xx status code +func (o *ExperimentServiceDeleteExperimentV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service delete experiment v1 o k response has a 3xx status code +func (o *ExperimentServiceDeleteExperimentV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service delete experiment v1 o k response has a 4xx status code +func (o *ExperimentServiceDeleteExperimentV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service delete experiment v1 o k response has a 5xx status code +func (o *ExperimentServiceDeleteExperimentV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service delete experiment v1 o k response a status code equal to that given +func (o *ExperimentServiceDeleteExperimentV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service delete experiment v1 o k response +func (o *ExperimentServiceDeleteExperimentV1OK) Code() int { + return 200 +} + func (o *ExperimentServiceDeleteExperimentV1OK) Error() string { - return fmt.Sprintf("[DELETE /apis/v1beta1/experiments/{id}][%d] experimentServiceDeleteExperimentV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/experiments/{id}][%d] experimentServiceDeleteExperimentV1OK %s", 200, payload) +} + +func (o *ExperimentServiceDeleteExperimentV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/experiments/{id}][%d] experimentServiceDeleteExperimentV1OK %s", 200, payload) +} + +func (o *ExperimentServiceDeleteExperimentV1OK) GetPayload() interface{} { + return o.Payload } func (o *ExperimentServiceDeleteExperimentV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewExperimentServiceDeleteExperimentV1Default(code int) *ExperimentServiceD } } -/*ExperimentServiceDeleteExperimentV1Default handles this case with default header values. +/* +ExperimentServiceDeleteExperimentV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceDeleteExperimentV1Default struct { _statusCode int - Payload *experiment_model.GatewayruntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service delete experiment v1 default response has a 2xx status code +func (o *ExperimentServiceDeleteExperimentV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service delete experiment v1 default response has a 3xx status code +func (o *ExperimentServiceDeleteExperimentV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service delete experiment v1 default response has a 4xx status code +func (o *ExperimentServiceDeleteExperimentV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service delete experiment v1 default response has a 5xx status code +func (o *ExperimentServiceDeleteExperimentV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service delete experiment v1 default response a status code equal to that given +func (o *ExperimentServiceDeleteExperimentV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service delete experiment v1 default response @@ -94,12 +159,22 @@ func (o *ExperimentServiceDeleteExperimentV1Default) Code() int { } func (o *ExperimentServiceDeleteExperimentV1Default) Error() string { - return fmt.Sprintf("[DELETE /apis/v1beta1/experiments/{id}][%d] ExperimentService_DeleteExperimentV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/experiments/{id}][%d] ExperimentService_DeleteExperimentV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceDeleteExperimentV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/experiments/{id}][%d] ExperimentService_DeleteExperimentV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceDeleteExperimentV1Default) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceDeleteExperimentV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.GatewayruntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_v1_parameters.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_v1_parameters.go index c0ca54c3023..daa600a0101 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewExperimentServiceGetExperimentV1Params creates a new ExperimentServiceGetExperimentV1Params object -// with the default values initialized. +// NewExperimentServiceGetExperimentV1Params creates a new ExperimentServiceGetExperimentV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceGetExperimentV1Params() *ExperimentServiceGetExperimentV1Params { - var () return &ExperimentServiceGetExperimentV1Params{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceGetExperimentV1ParamsWithTimeout creates a new ExperimentServiceGetExperimentV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceGetExperimentV1ParamsWithTimeout(timeout time.Duration) *ExperimentServiceGetExperimentV1Params { - var () return &ExperimentServiceGetExperimentV1Params{ - timeout: timeout, } } // NewExperimentServiceGetExperimentV1ParamsWithContext creates a new ExperimentServiceGetExperimentV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceGetExperimentV1ParamsWithContext(ctx context.Context) *ExperimentServiceGetExperimentV1Params { - var () return &ExperimentServiceGetExperimentV1Params{ - Context: ctx, } } // NewExperimentServiceGetExperimentV1ParamsWithHTTPClient creates a new ExperimentServiceGetExperimentV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceGetExperimentV1ParamsWithHTTPClient(client *http.Client) *ExperimentServiceGetExperimentV1Params { - var () return &ExperimentServiceGetExperimentV1Params{ HTTPClient: client, } } -/*ExperimentServiceGetExperimentV1Params contains all the parameters to send to the API endpoint -for the experiment service get experiment v1 operation typically these are written to a http.Request +/* +ExperimentServiceGetExperimentV1Params contains all the parameters to send to the API endpoint + + for the experiment service get experiment v1 operation. + + Typically these are written to a http.Request. */ type ExperimentServiceGetExperimentV1Params struct { - /*ID - The ID of the experiment to be retrieved. + /* ID. + The ID of the experiment to be retrieved. */ ID string @@ -72,6 +72,21 @@ type ExperimentServiceGetExperimentV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service get experiment v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceGetExperimentV1Params) WithDefaults() *ExperimentServiceGetExperimentV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service get experiment v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceGetExperimentV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service get experiment v1 params func (o *ExperimentServiceGetExperimentV1Params) WithTimeout(timeout time.Duration) *ExperimentServiceGetExperimentV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_v1_responses.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_v1_responses.go index cc1e54612a1..b7f2bb1c8aa 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_v1_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" ) // ExperimentServiceGetExperimentV1Reader is a Reader for the ExperimentServiceGetExperimentV1 structure. @@ -24,14 +24,12 @@ type ExperimentServiceGetExperimentV1Reader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceGetExperimentV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceGetExperimentV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceGetExperimentV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceGetExperimentV1OK() *ExperimentServiceGetExperimentV1OK return &ExperimentServiceGetExperimentV1OK{} } -/*ExperimentServiceGetExperimentV1OK handles this case with default header values. +/* +ExperimentServiceGetExperimentV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceGetExperimentV1OK struct { Payload *experiment_model.APIExperiment } +// IsSuccess returns true when this experiment service get experiment v1 o k response has a 2xx status code +func (o *ExperimentServiceGetExperimentV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service get experiment v1 o k response has a 3xx status code +func (o *ExperimentServiceGetExperimentV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service get experiment v1 o k response has a 4xx status code +func (o *ExperimentServiceGetExperimentV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service get experiment v1 o k response has a 5xx status code +func (o *ExperimentServiceGetExperimentV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service get experiment v1 o k response a status code equal to that given +func (o *ExperimentServiceGetExperimentV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service get experiment v1 o k response +func (o *ExperimentServiceGetExperimentV1OK) Code() int { + return 200 +} + func (o *ExperimentServiceGetExperimentV1OK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/experiments/{id}][%d] experimentServiceGetExperimentV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/experiments/{id}][%d] experimentServiceGetExperimentV1OK %s", 200, payload) +} + +func (o *ExperimentServiceGetExperimentV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/experiments/{id}][%d] experimentServiceGetExperimentV1OK %s", 200, payload) +} + +func (o *ExperimentServiceGetExperimentV1OK) GetPayload() *experiment_model.APIExperiment { + return o.Payload } func (o *ExperimentServiceGetExperimentV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewExperimentServiceGetExperimentV1Default(code int) *ExperimentServiceGetE } } -/*ExperimentServiceGetExperimentV1Default handles this case with default header values. +/* +ExperimentServiceGetExperimentV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceGetExperimentV1Default struct { _statusCode int - Payload *experiment_model.GatewayruntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service get experiment v1 default response has a 2xx status code +func (o *ExperimentServiceGetExperimentV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service get experiment v1 default response has a 3xx status code +func (o *ExperimentServiceGetExperimentV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service get experiment v1 default response has a 4xx status code +func (o *ExperimentServiceGetExperimentV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service get experiment v1 default response has a 5xx status code +func (o *ExperimentServiceGetExperimentV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service get experiment v1 default response a status code equal to that given +func (o *ExperimentServiceGetExperimentV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service get experiment v1 default response @@ -96,12 +161,22 @@ func (o *ExperimentServiceGetExperimentV1Default) Code() int { } func (o *ExperimentServiceGetExperimentV1Default) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/experiments/{id}][%d] ExperimentService_GetExperimentV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/experiments/{id}][%d] ExperimentService_GetExperimentV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceGetExperimentV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/experiments/{id}][%d] ExperimentService_GetExperimentV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceGetExperimentV1Default) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceGetExperimentV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.GatewayruntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_v1_parameters.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_v1_parameters.go index 09f5860e3be..55aaf41a0a3 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_v1_parameters.go @@ -13,101 +13,98 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" - - strfmt "github.com/go-openapi/strfmt" ) -// NewExperimentServiceListExperimentsV1Params creates a new ExperimentServiceListExperimentsV1Params object -// with the default values initialized. +// NewExperimentServiceListExperimentsV1Params creates a new ExperimentServiceListExperimentsV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceListExperimentsV1Params() *ExperimentServiceListExperimentsV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &ExperimentServiceListExperimentsV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - timeout: cr.DefaultTimeout, } } // NewExperimentServiceListExperimentsV1ParamsWithTimeout creates a new ExperimentServiceListExperimentsV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceListExperimentsV1ParamsWithTimeout(timeout time.Duration) *ExperimentServiceListExperimentsV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &ExperimentServiceListExperimentsV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - timeout: timeout, } } // NewExperimentServiceListExperimentsV1ParamsWithContext creates a new ExperimentServiceListExperimentsV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceListExperimentsV1ParamsWithContext(ctx context.Context) *ExperimentServiceListExperimentsV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &ExperimentServiceListExperimentsV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - Context: ctx, } } // NewExperimentServiceListExperimentsV1ParamsWithHTTPClient creates a new ExperimentServiceListExperimentsV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceListExperimentsV1ParamsWithHTTPClient(client *http.Client) *ExperimentServiceListExperimentsV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &ExperimentServiceListExperimentsV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - HTTPClient: client, + HTTPClient: client, } } -/*ExperimentServiceListExperimentsV1Params contains all the parameters to send to the API endpoint -for the experiment service list experiments v1 operation typically these are written to a http.Request +/* +ExperimentServiceListExperimentsV1Params contains all the parameters to send to the API endpoint + + for the experiment service list experiments v1 operation. + + Typically these are written to a http.Request. */ type ExperimentServiceListExperimentsV1Params struct { - /*Filter - A url-encoded, JSON-serialized Filter protocol buffer (see - [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). + /* Filter. + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). */ Filter *string - /*PageSize - The number of experiments to be listed per page. If there are more + + /* PageSize. + + The number of experiments to be listed per page. If there are more experiments than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + Format: int32 */ PageSize *int32 - /*PageToken - A page token to request the next page of results. The token is acquried + + /* PageToken. + + A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListExperiment call or can be omitted when fetching the first page. - */ PageToken *string - /*ResourceReferenceKeyID - The ID of the resource that referred to. + /* ResourceReferenceKeyID. + + The ID of the resource that referred to. */ ResourceReferenceKeyID *string - /*ResourceReferenceKeyType - The type of the resource that referred to. + /* ResourceReferenceKeyType. + + The type of the resource that referred to. + + Default: "UNKNOWN_RESOURCE_TYPE" */ ResourceReferenceKeyType *string - /*SortBy - Can be format of "field_name", "field_name asc" or "field_name desc" - Ascending by default. + /* SortBy. + + Can be format of "field_name", "field_name asc" or "field_name desc" + Ascending by default. */ SortBy *string @@ -116,6 +113,32 @@ type ExperimentServiceListExperimentsV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service list experiments v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceListExperimentsV1Params) WithDefaults() *ExperimentServiceListExperimentsV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service list experiments v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceListExperimentsV1Params) SetDefaults() { + var ( + resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) + + val := ExperimentServiceListExperimentsV1Params{ + ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, + } + + val.timeout = o.timeout + val.Context = o.Context + val.HTTPClient = o.HTTPClient + *o = val +} + // WithTimeout adds the timeout to the experiment service list experiments v1 params func (o *ExperimentServiceListExperimentsV1Params) WithTimeout(timeout time.Duration) *ExperimentServiceListExperimentsV1Params { o.SetTimeout(timeout) @@ -227,96 +250,102 @@ func (o *ExperimentServiceListExperimentsV1Params) WriteToRequest(r runtime.Clie // query param filter var qrFilter string + if o.Filter != nil { qrFilter = *o.Filter } qFilter := qrFilter if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { return err } } - } if o.PageSize != nil { // query param page_size var qrPageSize int32 + if o.PageSize != nil { qrPageSize = *o.PageSize } qPageSize := swag.FormatInt32(qrPageSize) if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { return err } } - } if o.PageToken != nil { // query param page_token var qrPageToken string + if o.PageToken != nil { qrPageToken = *o.PageToken } qPageToken := qrPageToken if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { return err } } - } if o.ResourceReferenceKeyID != nil { // query param resource_reference_key.id var qrResourceReferenceKeyID string + if o.ResourceReferenceKeyID != nil { qrResourceReferenceKeyID = *o.ResourceReferenceKeyID } qResourceReferenceKeyID := qrResourceReferenceKeyID if qResourceReferenceKeyID != "" { + if err := r.SetQueryParam("resource_reference_key.id", qResourceReferenceKeyID); err != nil { return err } } - } if o.ResourceReferenceKeyType != nil { // query param resource_reference_key.type var qrResourceReferenceKeyType string + if o.ResourceReferenceKeyType != nil { qrResourceReferenceKeyType = *o.ResourceReferenceKeyType } qResourceReferenceKeyType := qrResourceReferenceKeyType if qResourceReferenceKeyType != "" { + if err := r.SetQueryParam("resource_reference_key.type", qResourceReferenceKeyType); err != nil { return err } } - } if o.SortBy != nil { // query param sort_by var qrSortBy string + if o.SortBy != nil { qrSortBy = *o.SortBy } qSortBy := qrSortBy if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_v1_responses.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_v1_responses.go index 9d9fd4f6e6e..fbbdb27f1ac 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_v1_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" ) // ExperimentServiceListExperimentsV1Reader is a Reader for the ExperimentServiceListExperimentsV1 structure. @@ -24,14 +24,12 @@ type ExperimentServiceListExperimentsV1Reader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceListExperimentsV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceListExperimentsV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceListExperimentsV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceListExperimentsV1OK() *ExperimentServiceListExperiments return &ExperimentServiceListExperimentsV1OK{} } -/*ExperimentServiceListExperimentsV1OK handles this case with default header values. +/* +ExperimentServiceListExperimentsV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceListExperimentsV1OK struct { Payload *experiment_model.APIListExperimentsResponse } +// IsSuccess returns true when this experiment service list experiments v1 o k response has a 2xx status code +func (o *ExperimentServiceListExperimentsV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service list experiments v1 o k response has a 3xx status code +func (o *ExperimentServiceListExperimentsV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service list experiments v1 o k response has a 4xx status code +func (o *ExperimentServiceListExperimentsV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service list experiments v1 o k response has a 5xx status code +func (o *ExperimentServiceListExperimentsV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service list experiments v1 o k response a status code equal to that given +func (o *ExperimentServiceListExperimentsV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service list experiments v1 o k response +func (o *ExperimentServiceListExperimentsV1OK) Code() int { + return 200 +} + func (o *ExperimentServiceListExperimentsV1OK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/experiments][%d] experimentServiceListExperimentsV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/experiments][%d] experimentServiceListExperimentsV1OK %s", 200, payload) +} + +func (o *ExperimentServiceListExperimentsV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/experiments][%d] experimentServiceListExperimentsV1OK %s", 200, payload) +} + +func (o *ExperimentServiceListExperimentsV1OK) GetPayload() *experiment_model.APIListExperimentsResponse { + return o.Payload } func (o *ExperimentServiceListExperimentsV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewExperimentServiceListExperimentsV1Default(code int) *ExperimentServiceLi } } -/*ExperimentServiceListExperimentsV1Default handles this case with default header values. +/* +ExperimentServiceListExperimentsV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceListExperimentsV1Default struct { _statusCode int - Payload *experiment_model.GatewayruntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service list experiments v1 default response has a 2xx status code +func (o *ExperimentServiceListExperimentsV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service list experiments v1 default response has a 3xx status code +func (o *ExperimentServiceListExperimentsV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service list experiments v1 default response has a 4xx status code +func (o *ExperimentServiceListExperimentsV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service list experiments v1 default response has a 5xx status code +func (o *ExperimentServiceListExperimentsV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service list experiments v1 default response a status code equal to that given +func (o *ExperimentServiceListExperimentsV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service list experiments v1 default response @@ -96,12 +161,22 @@ func (o *ExperimentServiceListExperimentsV1Default) Code() int { } func (o *ExperimentServiceListExperimentsV1Default) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/experiments][%d] ExperimentService_ListExperimentsV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/experiments][%d] ExperimentService_ListExperimentsV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceListExperimentsV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/experiments][%d] ExperimentService_ListExperimentsV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceListExperimentsV1Default) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceListExperimentsV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.GatewayruntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_v1_parameters.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_v1_parameters.go index c808a07bd8a..79b7a1206f4 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewExperimentServiceUnarchiveExperimentV1Params creates a new ExperimentServiceUnarchiveExperimentV1Params object -// with the default values initialized. +// NewExperimentServiceUnarchiveExperimentV1Params creates a new ExperimentServiceUnarchiveExperimentV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceUnarchiveExperimentV1Params() *ExperimentServiceUnarchiveExperimentV1Params { - var () return &ExperimentServiceUnarchiveExperimentV1Params{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceUnarchiveExperimentV1ParamsWithTimeout creates a new ExperimentServiceUnarchiveExperimentV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceUnarchiveExperimentV1ParamsWithTimeout(timeout time.Duration) *ExperimentServiceUnarchiveExperimentV1Params { - var () return &ExperimentServiceUnarchiveExperimentV1Params{ - timeout: timeout, } } // NewExperimentServiceUnarchiveExperimentV1ParamsWithContext creates a new ExperimentServiceUnarchiveExperimentV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceUnarchiveExperimentV1ParamsWithContext(ctx context.Context) *ExperimentServiceUnarchiveExperimentV1Params { - var () return &ExperimentServiceUnarchiveExperimentV1Params{ - Context: ctx, } } // NewExperimentServiceUnarchiveExperimentV1ParamsWithHTTPClient creates a new ExperimentServiceUnarchiveExperimentV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceUnarchiveExperimentV1ParamsWithHTTPClient(client *http.Client) *ExperimentServiceUnarchiveExperimentV1Params { - var () return &ExperimentServiceUnarchiveExperimentV1Params{ HTTPClient: client, } } -/*ExperimentServiceUnarchiveExperimentV1Params contains all the parameters to send to the API endpoint -for the experiment service unarchive experiment v1 operation typically these are written to a http.Request +/* +ExperimentServiceUnarchiveExperimentV1Params contains all the parameters to send to the API endpoint + + for the experiment service unarchive experiment v1 operation. + + Typically these are written to a http.Request. */ type ExperimentServiceUnarchiveExperimentV1Params struct { - /*ID - The ID of the experiment to be restored. + /* ID. + The ID of the experiment to be restored. */ ID string @@ -72,6 +72,21 @@ type ExperimentServiceUnarchiveExperimentV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service unarchive experiment v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceUnarchiveExperimentV1Params) WithDefaults() *ExperimentServiceUnarchiveExperimentV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service unarchive experiment v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceUnarchiveExperimentV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service unarchive experiment v1 params func (o *ExperimentServiceUnarchiveExperimentV1Params) WithTimeout(timeout time.Duration) *ExperimentServiceUnarchiveExperimentV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_v1_responses.go b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_v1_responses.go index e305187849b..dbd8ffe9883 100644 --- a/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_v1_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" ) // ExperimentServiceUnarchiveExperimentV1Reader is a Reader for the ExperimentServiceUnarchiveExperimentV1 structure. @@ -24,14 +24,12 @@ type ExperimentServiceUnarchiveExperimentV1Reader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceUnarchiveExperimentV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceUnarchiveExperimentV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceUnarchiveExperimentV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceUnarchiveExperimentV1OK() *ExperimentServiceUnarchiveEx return &ExperimentServiceUnarchiveExperimentV1OK{} } -/*ExperimentServiceUnarchiveExperimentV1OK handles this case with default header values. +/* +ExperimentServiceUnarchiveExperimentV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceUnarchiveExperimentV1OK struct { Payload interface{} } +// IsSuccess returns true when this experiment service unarchive experiment v1 o k response has a 2xx status code +func (o *ExperimentServiceUnarchiveExperimentV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service unarchive experiment v1 o k response has a 3xx status code +func (o *ExperimentServiceUnarchiveExperimentV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service unarchive experiment v1 o k response has a 4xx status code +func (o *ExperimentServiceUnarchiveExperimentV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service unarchive experiment v1 o k response has a 5xx status code +func (o *ExperimentServiceUnarchiveExperimentV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service unarchive experiment v1 o k response a status code equal to that given +func (o *ExperimentServiceUnarchiveExperimentV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service unarchive experiment v1 o k response +func (o *ExperimentServiceUnarchiveExperimentV1OK) Code() int { + return 200 +} + func (o *ExperimentServiceUnarchiveExperimentV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:unarchive][%d] experimentServiceUnarchiveExperimentV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:unarchive][%d] experimentServiceUnarchiveExperimentV1OK %s", 200, payload) +} + +func (o *ExperimentServiceUnarchiveExperimentV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:unarchive][%d] experimentServiceUnarchiveExperimentV1OK %s", 200, payload) +} + +func (o *ExperimentServiceUnarchiveExperimentV1OK) GetPayload() interface{} { + return o.Payload } func (o *ExperimentServiceUnarchiveExperimentV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewExperimentServiceUnarchiveExperimentV1Default(code int) *ExperimentServi } } -/*ExperimentServiceUnarchiveExperimentV1Default handles this case with default header values. +/* +ExperimentServiceUnarchiveExperimentV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceUnarchiveExperimentV1Default struct { _statusCode int - Payload *experiment_model.GatewayruntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service unarchive experiment v1 default response has a 2xx status code +func (o *ExperimentServiceUnarchiveExperimentV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service unarchive experiment v1 default response has a 3xx status code +func (o *ExperimentServiceUnarchiveExperimentV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service unarchive experiment v1 default response has a 4xx status code +func (o *ExperimentServiceUnarchiveExperimentV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service unarchive experiment v1 default response has a 5xx status code +func (o *ExperimentServiceUnarchiveExperimentV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service unarchive experiment v1 default response a status code equal to that given +func (o *ExperimentServiceUnarchiveExperimentV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service unarchive experiment v1 default response @@ -94,12 +159,22 @@ func (o *ExperimentServiceUnarchiveExperimentV1Default) Code() int { } func (o *ExperimentServiceUnarchiveExperimentV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:unarchive][%d] ExperimentService_UnarchiveExperimentV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:unarchive][%d] ExperimentService_UnarchiveExperimentV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceUnarchiveExperimentV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:unarchive][%d] ExperimentService_UnarchiveExperimentV1 default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceUnarchiveExperimentV1Default) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceUnarchiveExperimentV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.GatewayruntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/experiment_model/api_experiment.go b/backend/api/v1beta1/go_http_client/experiment_model/api_experiment.go index cc17c74472a..3bef1d49af2 100644 --- a/backend/api/v1beta1/go_http_client/experiment_model/api_experiment.go +++ b/backend/api/v1beta1/go_http_client/experiment_model/api_experiment.go @@ -6,16 +6,17 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // APIExperiment api experiment +// // swagger:model apiExperiment type APIExperiment struct { @@ -37,7 +38,7 @@ type APIExperiment struct { ResourceReferences []*APIResourceReference `json:"resource_references"` // Output. Specifies whether this experiment is in archived or available state. - StorageState APIExperimentStorageState `json:"storage_state,omitempty"` + StorageState *APIExperimentStorageState `json:"storage_state,omitempty"` } // Validate validates this api experiment @@ -63,7 +64,6 @@ func (m *APIExperiment) Validate(formats strfmt.Registry) error { } func (m *APIExperiment) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -76,7 +76,6 @@ func (m *APIExperiment) validateCreatedAt(formats strfmt.Registry) error { } func (m *APIExperiment) validateResourceReferences(formats strfmt.Registry) error { - if swag.IsZero(m.ResourceReferences) { // not required return nil } @@ -90,6 +89,8 @@ func (m *APIExperiment) validateResourceReferences(formats strfmt.Registry) erro if err := m.ResourceReferences[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) } return err } @@ -101,16 +102,83 @@ func (m *APIExperiment) validateResourceReferences(formats strfmt.Registry) erro } func (m *APIExperiment) validateStorageState(formats strfmt.Registry) error { - if swag.IsZero(m.StorageState) { // not required return nil } - if err := m.StorageState.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("storage_state") + if m.StorageState != nil { + if err := m.StorageState.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("storage_state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("storage_state") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api experiment based on the context it is used +func (m *APIExperiment) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateResourceReferences(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateStorageState(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIExperiment) contextValidateResourceReferences(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.ResourceReferences); i++ { + + if m.ResourceReferences[i] != nil { + + if swag.IsZero(m.ResourceReferences[i]) { // not required + return nil + } + + if err := m.ResourceReferences[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIExperiment) contextValidateStorageState(ctx context.Context, formats strfmt.Registry) error { + + if m.StorageState != nil { + + if swag.IsZero(m.StorageState) { // not required + return nil + } + + if err := m.StorageState.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("storage_state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("storage_state") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/experiment_model/api_experiment_storage_state.go b/backend/api/v1beta1/go_http_client/experiment_model/api_experiment_storage_state.go index c7342e69bee..a5532596f76 100644 --- a/backend/api/v1beta1/go_http_client/experiment_model/api_experiment_storage_state.go +++ b/backend/api/v1beta1/go_http_client/experiment_model/api_experiment_storage_state.go @@ -6,18 +6,28 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIExperimentStorageState api experiment storage state +// // swagger:model apiExperimentStorageState type APIExperimentStorageState string +func NewAPIExperimentStorageState(value APIExperimentStorageState) *APIExperimentStorageState { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIExperimentStorageState. +func (m APIExperimentStorageState) Pointer() *APIExperimentStorageState { + return &m +} + const ( // APIExperimentStorageStateSTORAGESTATEUNSPECIFIED captures enum value "STORAGESTATE_UNSPECIFIED" @@ -44,7 +54,7 @@ func init() { } func (m APIExperimentStorageState) validateAPIExperimentStorageStateEnum(path, location string, value APIExperimentStorageState) error { - if err := validate.Enum(path, location, value, apiExperimentStorageStateEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiExperimentStorageStateEnum, true); err != nil { return err } return nil @@ -64,3 +74,8 @@ func (m APIExperimentStorageState) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api experiment storage state based on context it is used +func (m APIExperimentStorageState) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/experiment_model/api_list_experiments_response.go b/backend/api/v1beta1/go_http_client/experiment_model/api_list_experiments_response.go index 0b2294c69a5..e447fbd3628 100644 --- a/backend/api/v1beta1/go_http_client/experiment_model/api_list_experiments_response.go +++ b/backend/api/v1beta1/go_http_client/experiment_model/api_list_experiments_response.go @@ -6,15 +6,16 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIListExperimentsResponse api list experiments response +// // swagger:model apiListExperimentsResponse type APIListExperimentsResponse struct { @@ -43,7 +44,6 @@ func (m *APIListExperimentsResponse) Validate(formats strfmt.Registry) error { } func (m *APIListExperimentsResponse) validateExperiments(formats strfmt.Registry) error { - if swag.IsZero(m.Experiments) { // not required return nil } @@ -57,6 +57,47 @@ func (m *APIListExperimentsResponse) validateExperiments(formats strfmt.Registry if err := m.Experiments[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("experiments" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("experiments" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this api list experiments response based on the context it is used +func (m *APIListExperimentsResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateExperiments(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIListExperimentsResponse) contextValidateExperiments(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Experiments); i++ { + + if m.Experiments[i] != nil { + + if swag.IsZero(m.Experiments[i]) { // not required + return nil + } + + if err := m.Experiments[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("experiments" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("experiments" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v1beta1/go_http_client/experiment_model/api_relationship.go b/backend/api/v1beta1/go_http_client/experiment_model/api_relationship.go index 24c3abcb7ce..f550ebe7923 100644 --- a/backend/api/v1beta1/go_http_client/experiment_model/api_relationship.go +++ b/backend/api/v1beta1/go_http_client/experiment_model/api_relationship.go @@ -6,18 +6,28 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIRelationship api relationship +// // swagger:model apiRelationship type APIRelationship string +func NewAPIRelationship(value APIRelationship) *APIRelationship { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIRelationship. +func (m APIRelationship) Pointer() *APIRelationship { + return &m +} + const ( // APIRelationshipUNKNOWNRELATIONSHIP captures enum value "UNKNOWN_RELATIONSHIP" @@ -44,7 +54,7 @@ func init() { } func (m APIRelationship) validateAPIRelationshipEnum(path, location string, value APIRelationship) error { - if err := validate.Enum(path, location, value, apiRelationshipEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiRelationshipEnum, true); err != nil { return err } return nil @@ -64,3 +74,8 @@ func (m APIRelationship) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api relationship based on context it is used +func (m APIRelationship) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/experiment_model/api_resource_key.go b/backend/api/v1beta1/go_http_client/experiment_model/api_resource_key.go index 7dfbd3f0118..98febf365fb 100644 --- a/backend/api/v1beta1/go_http_client/experiment_model/api_resource_key.go +++ b/backend/api/v1beta1/go_http_client/experiment_model/api_resource_key.go @@ -6,13 +6,15 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIResourceKey api resource key +// // swagger:model apiResourceKey type APIResourceKey struct { @@ -20,7 +22,7 @@ type APIResourceKey struct { ID string `json:"id,omitempty"` // The type of the resource that referred to. - Type APIResourceType `json:"type,omitempty"` + Type *APIResourceType `json:"type,omitempty"` } // Validate validates this api resource key @@ -38,16 +40,54 @@ func (m *APIResourceKey) Validate(formats strfmt.Registry) error { } func (m *APIResourceKey) validateType(formats strfmt.Registry) error { - if swag.IsZero(m.Type) { // not required return nil } - if err := m.Type.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("type") + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api resource key based on the context it is used +func (m *APIResourceKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceKey) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/experiment_model/api_resource_reference.go b/backend/api/v1beta1/go_http_client/experiment_model/api_resource_reference.go index 8103adf2612..78a467bd9e8 100644 --- a/backend/api/v1beta1/go_http_client/experiment_model/api_resource_reference.go +++ b/backend/api/v1beta1/go_http_client/experiment_model/api_resource_reference.go @@ -6,13 +6,15 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIResourceReference api resource reference +// // swagger:model apiResourceReference type APIResourceReference struct { @@ -23,7 +25,7 @@ type APIResourceReference struct { Name string `json:"name,omitempty"` // Required field. The relationship from referred resource to the object. - Relationship APIRelationship `json:"relationship,omitempty"` + Relationship *APIRelationship `json:"relationship,omitempty"` } // Validate validates this api resource reference @@ -45,7 +47,6 @@ func (m *APIResourceReference) Validate(formats strfmt.Registry) error { } func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { - if swag.IsZero(m.Key) { // not required return nil } @@ -54,6 +55,8 @@ func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { if err := m.Key.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("key") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("key") } return err } @@ -63,16 +66,79 @@ func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { } func (m *APIResourceReference) validateRelationship(formats strfmt.Registry) error { - if swag.IsZero(m.Relationship) { // not required return nil } - if err := m.Relationship.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("relationship") + if m.Relationship != nil { + if err := m.Relationship.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("relationship") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api resource reference based on the context it is used +func (m *APIResourceReference) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateKey(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRelationship(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceReference) contextValidateKey(ctx context.Context, formats strfmt.Registry) error { + + if m.Key != nil { + + if swag.IsZero(m.Key) { // not required + return nil + } + + if err := m.Key.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("key") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("key") + } + return err + } + } + + return nil +} + +func (m *APIResourceReference) contextValidateRelationship(ctx context.Context, formats strfmt.Registry) error { + + if m.Relationship != nil { + + if swag.IsZero(m.Relationship) { // not required + return nil + } + + if err := m.Relationship.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("relationship") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/experiment_model/api_resource_type.go b/backend/api/v1beta1/go_http_client/experiment_model/api_resource_type.go index aeb958e1964..5287d5f6656 100644 --- a/backend/api/v1beta1/go_http_client/experiment_model/api_resource_type.go +++ b/backend/api/v1beta1/go_http_client/experiment_model/api_resource_type.go @@ -6,18 +6,28 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIResourceType api resource type +// // swagger:model apiResourceType type APIResourceType string +func NewAPIResourceType(value APIResourceType) *APIResourceType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIResourceType. +func (m APIResourceType) Pointer() *APIResourceType { + return &m +} + const ( // APIResourceTypeUNKNOWNRESOURCETYPE captures enum value "UNKNOWN_RESOURCE_TYPE" @@ -53,7 +63,7 @@ func init() { } func (m APIResourceType) validateAPIResourceTypeEnum(path, location string, value APIResourceType) error { - if err := validate.Enum(path, location, value, apiResourceTypeEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiResourceTypeEnum, true); err != nil { return err } return nil @@ -73,3 +83,8 @@ func (m APIResourceType) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api resource type based on context it is used +func (m APIResourceType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/experiment_model/gatewayruntime_error.go b/backend/api/v1beta1/go_http_client/experiment_model/gatewayruntime_error.go deleted file mode 100644 index 460360100dd..00000000000 --- a/backend/api/v1beta1/go_http_client/experiment_model/gatewayruntime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package experiment_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// GatewayruntimeError gatewayruntime error -// swagger:model gatewayruntimeError -type GatewayruntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this gatewayruntime error -func (m *GatewayruntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *GatewayruntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *GatewayruntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *GatewayruntimeError) UnmarshalBinary(b []byte) error { - var res GatewayruntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v1beta1/go_http_client/experiment_model/googlerpc_status.go b/backend/api/v1beta1/go_http_client/experiment_model/googlerpc_status.go new file mode 100644 index 00000000000..e8c96276d08 --- /dev/null +++ b/backend/api/v1beta1/go_http_client/experiment_model/googlerpc_status.go @@ -0,0 +1,127 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// GooglerpcStatus googlerpc status +// +// swagger:model googlerpcStatus +type GooglerpcStatus struct { + + // code + Code int32 `json:"code,omitempty"` + + // details + Details []*ProtobufAny `json:"details"` + + // message + Message string `json:"message,omitempty"` +} + +// Validate validates this googlerpc status +func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateDetails(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { + if swag.IsZero(m.Details) { // not required + return nil + } + + for i := 0; i < len(m.Details); i++ { + if swag.IsZero(m.Details[i]) { // not required + continue + } + + if m.Details[i] != nil { + if err := m.Details[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *GooglerpcStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *GooglerpcStatus) UnmarshalBinary(b []byte) error { + var res GooglerpcStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v1beta1/go_http_client/experiment_model/protobuf_any.go b/backend/api/v1beta1/go_http_client/experiment_model/protobuf_any.go index 3c40be98525..ed8315b7a0f 100644 --- a/backend/api/v1beta1/go_http_client/experiment_model/protobuf_any.go +++ b/backend/api/v1beta1/go_http_client/experiment_model/protobuf_any.go @@ -6,9 +6,10 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v1beta1/go_http_client/healthz_client/healthz_client.go b/backend/api/v1beta1/go_http_client/healthz_client/healthz_client.go index 51428ac4172..b409c349d32 100644 --- a/backend/api/v1beta1/go_http_client/healthz_client/healthz_client.go +++ b/backend/api/v1beta1/go_http_client/healthz_client/healthz_client.go @@ -8,8 +8,7 @@ package healthz_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/healthz_client/healthz_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Healthz { cli := new(Healthz) cli.Transport = transport - cli.HealthzService = healthz_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Healthz is a client for healthz type Healthz struct { - HealthzService *healthz_service.Client + HealthzService healthz_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Healthz struct { // SetTransport changes the transport on the client and all its subresources func (c *Healthz) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.HealthzService.SetTransport(transport) - } diff --git a/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_client.go b/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_client.go index ec1bd7fd295..4deb9725771 100644 --- a/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_client.go +++ b/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_client.go @@ -7,15 +7,40 @@ package healthz_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new healthz service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new healthz service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new healthz service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for healthz service API */ @@ -24,16 +49,25 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + HealthzServiceGetHealthz(params *HealthzServiceGetHealthzParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*HealthzServiceGetHealthzOK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* HealthzServiceGetHealthz gets healthz data */ -func (a *Client) HealthzServiceGetHealthz(params *HealthzServiceGetHealthzParams, authInfo runtime.ClientAuthInfoWriter) (*HealthzServiceGetHealthzOK, error) { +func (a *Client) HealthzServiceGetHealthz(params *HealthzServiceGetHealthzParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*HealthzServiceGetHealthzOK, error) { // TODO: Validate the params before sending if params == nil { params = NewHealthzServiceGetHealthzParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "HealthzService_GetHealthz", Method: "GET", PathPattern: "/apis/v1beta1/healthz", @@ -45,12 +79,22 @@ func (a *Client) HealthzServiceGetHealthz(params *HealthzServiceGetHealthzParams AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*HealthzServiceGetHealthzOK), nil - + success, ok := result.(*HealthzServiceGetHealthzOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*HealthzServiceGetHealthzDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_parameters.go b/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_parameters.go index cf0c78296ab..4cb71a4238b 100644 --- a/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_parameters.go +++ b/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_parameters.go @@ -13,51 +13,51 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewHealthzServiceGetHealthzParams creates a new HealthzServiceGetHealthzParams object -// with the default values initialized. +// NewHealthzServiceGetHealthzParams creates a new HealthzServiceGetHealthzParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewHealthzServiceGetHealthzParams() *HealthzServiceGetHealthzParams { - return &HealthzServiceGetHealthzParams{ - timeout: cr.DefaultTimeout, } } // NewHealthzServiceGetHealthzParamsWithTimeout creates a new HealthzServiceGetHealthzParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewHealthzServiceGetHealthzParamsWithTimeout(timeout time.Duration) *HealthzServiceGetHealthzParams { - return &HealthzServiceGetHealthzParams{ - timeout: timeout, } } // NewHealthzServiceGetHealthzParamsWithContext creates a new HealthzServiceGetHealthzParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewHealthzServiceGetHealthzParamsWithContext(ctx context.Context) *HealthzServiceGetHealthzParams { - return &HealthzServiceGetHealthzParams{ - Context: ctx, } } // NewHealthzServiceGetHealthzParamsWithHTTPClient creates a new HealthzServiceGetHealthzParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewHealthzServiceGetHealthzParamsWithHTTPClient(client *http.Client) *HealthzServiceGetHealthzParams { - return &HealthzServiceGetHealthzParams{ HTTPClient: client, } } -/*HealthzServiceGetHealthzParams contains all the parameters to send to the API endpoint -for the healthz service get healthz operation typically these are written to a http.Request +/* +HealthzServiceGetHealthzParams contains all the parameters to send to the API endpoint + + for the healthz service get healthz operation. + + Typically these are written to a http.Request. */ type HealthzServiceGetHealthzParams struct { timeout time.Duration @@ -65,6 +65,21 @@ type HealthzServiceGetHealthzParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the healthz service get healthz params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *HealthzServiceGetHealthzParams) WithDefaults() *HealthzServiceGetHealthzParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the healthz service get healthz params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *HealthzServiceGetHealthzParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the healthz service get healthz params func (o *HealthzServiceGetHealthzParams) WithTimeout(timeout time.Duration) *HealthzServiceGetHealthzParams { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_responses.go b/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_responses.go index 3bef0bd962c..1486596d75d 100644 --- a/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_responses.go +++ b/backend/api/v1beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_responses.go @@ -6,14 +6,14 @@ package healthz_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - healthz_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/healthz_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/healthz_model" ) // HealthzServiceGetHealthzReader is a Reader for the HealthzServiceGetHealthz structure. @@ -24,14 +24,12 @@ type HealthzServiceGetHealthzReader struct { // ReadResponse reads a server response into the received o. func (o *HealthzServiceGetHealthzReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewHealthzServiceGetHealthzOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewHealthzServiceGetHealthzDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewHealthzServiceGetHealthzOK() *HealthzServiceGetHealthzOK { return &HealthzServiceGetHealthzOK{} } -/*HealthzServiceGetHealthzOK handles this case with default header values. +/* +HealthzServiceGetHealthzOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type HealthzServiceGetHealthzOK struct { Payload *healthz_model.APIGetHealthzResponse } +// IsSuccess returns true when this healthz service get healthz o k response has a 2xx status code +func (o *HealthzServiceGetHealthzOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this healthz service get healthz o k response has a 3xx status code +func (o *HealthzServiceGetHealthzOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this healthz service get healthz o k response has a 4xx status code +func (o *HealthzServiceGetHealthzOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this healthz service get healthz o k response has a 5xx status code +func (o *HealthzServiceGetHealthzOK) IsServerError() bool { + return false +} + +// IsCode returns true when this healthz service get healthz o k response a status code equal to that given +func (o *HealthzServiceGetHealthzOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the healthz service get healthz o k response +func (o *HealthzServiceGetHealthzOK) Code() int { + return 200 +} + func (o *HealthzServiceGetHealthzOK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/healthz][%d] healthzServiceGetHealthzOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/healthz][%d] healthzServiceGetHealthzOK %s", 200, payload) +} + +func (o *HealthzServiceGetHealthzOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/healthz][%d] healthzServiceGetHealthzOK %s", 200, payload) +} + +func (o *HealthzServiceGetHealthzOK) GetPayload() *healthz_model.APIGetHealthzResponse { + return o.Payload } func (o *HealthzServiceGetHealthzOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewHealthzServiceGetHealthzDefault(code int) *HealthzServiceGetHealthzDefau } } -/*HealthzServiceGetHealthzDefault handles this case with default header values. +/* +HealthzServiceGetHealthzDefault describes a response with status code -1, with default header values. An unexpected error response. */ type HealthzServiceGetHealthzDefault struct { _statusCode int - Payload *healthz_model.GatewayruntimeError + Payload *healthz_model.GooglerpcStatus +} + +// IsSuccess returns true when this healthz service get healthz default response has a 2xx status code +func (o *HealthzServiceGetHealthzDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this healthz service get healthz default response has a 3xx status code +func (o *HealthzServiceGetHealthzDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this healthz service get healthz default response has a 4xx status code +func (o *HealthzServiceGetHealthzDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this healthz service get healthz default response has a 5xx status code +func (o *HealthzServiceGetHealthzDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this healthz service get healthz default response a status code equal to that given +func (o *HealthzServiceGetHealthzDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the healthz service get healthz default response @@ -96,12 +161,22 @@ func (o *HealthzServiceGetHealthzDefault) Code() int { } func (o *HealthzServiceGetHealthzDefault) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/healthz][%d] HealthzService_GetHealthz default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/healthz][%d] HealthzService_GetHealthz default %s", o._statusCode, payload) +} + +func (o *HealthzServiceGetHealthzDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/healthz][%d] HealthzService_GetHealthz default %s", o._statusCode, payload) +} + +func (o *HealthzServiceGetHealthzDefault) GetPayload() *healthz_model.GooglerpcStatus { + return o.Payload } func (o *HealthzServiceGetHealthzDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(healthz_model.GatewayruntimeError) + o.Payload = new(healthz_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/healthz_model/api_get_healthz_response.go b/backend/api/v1beta1/go_http_client/healthz_model/api_get_healthz_response.go index 45943fb9ead..03f6d025137 100644 --- a/backend/api/v1beta1/go_http_client/healthz_model/api_get_healthz_response.go +++ b/backend/api/v1beta1/go_http_client/healthz_model/api_get_healthz_response.go @@ -6,12 +6,14 @@ package healthz_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIGetHealthzResponse api get healthz response +// // swagger:model apiGetHealthzResponse type APIGetHealthzResponse struct { @@ -24,6 +26,11 @@ func (m *APIGetHealthzResponse) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api get healthz response based on context it is used +func (m *APIGetHealthzResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APIGetHealthzResponse) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/healthz_model/gatewayruntime_error.go b/backend/api/v1beta1/go_http_client/healthz_model/gatewayruntime_error.go deleted file mode 100644 index 20d3d613e97..00000000000 --- a/backend/api/v1beta1/go_http_client/healthz_model/gatewayruntime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package healthz_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// GatewayruntimeError gatewayruntime error -// swagger:model gatewayruntimeError -type GatewayruntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this gatewayruntime error -func (m *GatewayruntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *GatewayruntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *GatewayruntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *GatewayruntimeError) UnmarshalBinary(b []byte) error { - var res GatewayruntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v1beta1/go_http_client/healthz_model/googlerpc_status.go b/backend/api/v1beta1/go_http_client/healthz_model/googlerpc_status.go new file mode 100644 index 00000000000..5baf4074d20 --- /dev/null +++ b/backend/api/v1beta1/go_http_client/healthz_model/googlerpc_status.go @@ -0,0 +1,127 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package healthz_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// GooglerpcStatus googlerpc status +// +// swagger:model googlerpcStatus +type GooglerpcStatus struct { + + // code + Code int32 `json:"code,omitempty"` + + // details + Details []*ProtobufAny `json:"details"` + + // message + Message string `json:"message,omitempty"` +} + +// Validate validates this googlerpc status +func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateDetails(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { + if swag.IsZero(m.Details) { // not required + return nil + } + + for i := 0; i < len(m.Details); i++ { + if swag.IsZero(m.Details[i]) { // not required + continue + } + + if m.Details[i] != nil { + if err := m.Details[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *GooglerpcStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *GooglerpcStatus) UnmarshalBinary(b []byte) error { + var res GooglerpcStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v1beta1/go_http_client/healthz_model/protobuf_any.go b/backend/api/v1beta1/go_http_client/healthz_model/protobuf_any.go index cc274ef6e58..9695ae066c7 100644 --- a/backend/api/v1beta1/go_http_client/healthz_model/protobuf_any.go +++ b/backend/api/v1beta1/go_http_client/healthz_model/protobuf_any.go @@ -6,9 +6,10 @@ package healthz_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v1beta1/go_http_client/job_client/job_client.go b/backend/api/v1beta1/go_http_client/job_client/job_client.go index d6b0cbfc0c5..839c19d0257 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_client.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_client.go @@ -8,8 +8,7 @@ package job_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_client/job_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Job { cli := new(Job) cli.Transport = transport - cli.JobService = job_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Job is a client for job type Job struct { - JobService *job_service.Client + JobService job_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Job struct { // SetTransport changes the transport on the client and all its subresources func (c *Job) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.JobService.SetTransport(transport) - } diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_client.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_client.go index 1e10da12d75..ed39efcb98d 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_client.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_client.go @@ -7,15 +7,40 @@ package job_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new job service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new job service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new job service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for job service API */ @@ -24,16 +49,35 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + JobServiceCreateJob(params *JobServiceCreateJobParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceCreateJobOK, error) + + JobServiceDeleteJob(params *JobServiceDeleteJobParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceDeleteJobOK, error) + + JobServiceDisableJob(params *JobServiceDisableJobParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceDisableJobOK, error) + + JobServiceEnableJob(params *JobServiceEnableJobParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceEnableJobOK, error) + + JobServiceGetJob(params *JobServiceGetJobParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceGetJobOK, error) + + JobServiceListJobs(params *JobServiceListJobsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceListJobsOK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* JobServiceCreateJob creates a new job */ -func (a *Client) JobServiceCreateJob(params *JobServiceCreateJobParams, authInfo runtime.ClientAuthInfoWriter) (*JobServiceCreateJobOK, error) { +func (a *Client) JobServiceCreateJob(params *JobServiceCreateJobParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceCreateJobOK, error) { // TODO: Validate the params before sending if params == nil { params = NewJobServiceCreateJobParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "JobService_CreateJob", Method: "POST", PathPattern: "/apis/v1beta1/jobs", @@ -45,24 +89,33 @@ func (a *Client) JobServiceCreateJob(params *JobServiceCreateJobParams, authInfo AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*JobServiceCreateJobOK), nil - + success, ok := result.(*JobServiceCreateJobOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*JobServiceCreateJobDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* JobServiceDeleteJob deletes a job */ -func (a *Client) JobServiceDeleteJob(params *JobServiceDeleteJobParams, authInfo runtime.ClientAuthInfoWriter) (*JobServiceDeleteJobOK, error) { +func (a *Client) JobServiceDeleteJob(params *JobServiceDeleteJobParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceDeleteJobOK, error) { // TODO: Validate the params before sending if params == nil { params = NewJobServiceDeleteJobParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "JobService_DeleteJob", Method: "DELETE", PathPattern: "/apis/v1beta1/jobs/{id}", @@ -74,24 +127,33 @@ func (a *Client) JobServiceDeleteJob(params *JobServiceDeleteJobParams, authInfo AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*JobServiceDeleteJobOK), nil - + success, ok := result.(*JobServiceDeleteJobOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*JobServiceDeleteJobDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* JobServiceDisableJob stops a job and all its associated runs the job is not deleted */ -func (a *Client) JobServiceDisableJob(params *JobServiceDisableJobParams, authInfo runtime.ClientAuthInfoWriter) (*JobServiceDisableJobOK, error) { +func (a *Client) JobServiceDisableJob(params *JobServiceDisableJobParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceDisableJobOK, error) { // TODO: Validate the params before sending if params == nil { params = NewJobServiceDisableJobParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "JobService_DisableJob", Method: "POST", PathPattern: "/apis/v1beta1/jobs/{id}/disable", @@ -103,24 +165,33 @@ func (a *Client) JobServiceDisableJob(params *JobServiceDisableJobParams, authIn AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*JobServiceDisableJobOK), nil - + success, ok := result.(*JobServiceDisableJobOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*JobServiceDisableJobDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* JobServiceEnableJob restarts a job that was previously stopped all runs associated with the job will continue */ -func (a *Client) JobServiceEnableJob(params *JobServiceEnableJobParams, authInfo runtime.ClientAuthInfoWriter) (*JobServiceEnableJobOK, error) { +func (a *Client) JobServiceEnableJob(params *JobServiceEnableJobParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceEnableJobOK, error) { // TODO: Validate the params before sending if params == nil { params = NewJobServiceEnableJobParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "JobService_EnableJob", Method: "POST", PathPattern: "/apis/v1beta1/jobs/{id}/enable", @@ -132,24 +203,33 @@ func (a *Client) JobServiceEnableJob(params *JobServiceEnableJobParams, authInfo AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*JobServiceEnableJobOK), nil - + success, ok := result.(*JobServiceEnableJobOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*JobServiceEnableJobDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* JobServiceGetJob finds a specific job by ID */ -func (a *Client) JobServiceGetJob(params *JobServiceGetJobParams, authInfo runtime.ClientAuthInfoWriter) (*JobServiceGetJobOK, error) { +func (a *Client) JobServiceGetJob(params *JobServiceGetJobParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceGetJobOK, error) { // TODO: Validate the params before sending if params == nil { params = NewJobServiceGetJobParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "JobService_GetJob", Method: "GET", PathPattern: "/apis/v1beta1/jobs/{id}", @@ -161,24 +241,33 @@ func (a *Client) JobServiceGetJob(params *JobServiceGetJobParams, authInfo runti AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*JobServiceGetJobOK), nil - + success, ok := result.(*JobServiceGetJobOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*JobServiceGetJobDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* JobServiceListJobs finds all jobs */ -func (a *Client) JobServiceListJobs(params *JobServiceListJobsParams, authInfo runtime.ClientAuthInfoWriter) (*JobServiceListJobsOK, error) { +func (a *Client) JobServiceListJobs(params *JobServiceListJobsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*JobServiceListJobsOK, error) { // TODO: Validate the params before sending if params == nil { params = NewJobServiceListJobsParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "JobService_ListJobs", Method: "GET", PathPattern: "/apis/v1beta1/jobs", @@ -190,12 +279,22 @@ func (a *Client) JobServiceListJobs(params *JobServiceListJobsParams, authInfo r AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*JobServiceListJobsOK), nil - + success, ok := result.(*JobServiceListJobsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*JobServiceListJobsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_create_job_parameters.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_create_job_parameters.go index e69a3f984ac..bde76e13ae2 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_create_job_parameters.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_create_job_parameters.go @@ -13,67 +13,82 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - job_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" ) -// NewJobServiceCreateJobParams creates a new JobServiceCreateJobParams object -// with the default values initialized. +// NewJobServiceCreateJobParams creates a new JobServiceCreateJobParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewJobServiceCreateJobParams() *JobServiceCreateJobParams { - var () return &JobServiceCreateJobParams{ - timeout: cr.DefaultTimeout, } } // NewJobServiceCreateJobParamsWithTimeout creates a new JobServiceCreateJobParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewJobServiceCreateJobParamsWithTimeout(timeout time.Duration) *JobServiceCreateJobParams { - var () return &JobServiceCreateJobParams{ - timeout: timeout, } } // NewJobServiceCreateJobParamsWithContext creates a new JobServiceCreateJobParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewJobServiceCreateJobParamsWithContext(ctx context.Context) *JobServiceCreateJobParams { - var () return &JobServiceCreateJobParams{ - Context: ctx, } } // NewJobServiceCreateJobParamsWithHTTPClient creates a new JobServiceCreateJobParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewJobServiceCreateJobParamsWithHTTPClient(client *http.Client) *JobServiceCreateJobParams { - var () return &JobServiceCreateJobParams{ HTTPClient: client, } } -/*JobServiceCreateJobParams contains all the parameters to send to the API endpoint -for the job service create job operation typically these are written to a http.Request +/* +JobServiceCreateJobParams contains all the parameters to send to the API endpoint + + for the job service create job operation. + + Typically these are written to a http.Request. */ type JobServiceCreateJobParams struct { - /*Body - The job to be created + /* Job. + The job to be created */ - Body *job_model.APIJob + Job *job_model.APIJob timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the job service create job params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceCreateJobParams) WithDefaults() *JobServiceCreateJobParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the job service create job params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceCreateJobParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the job service create job params func (o *JobServiceCreateJobParams) WithTimeout(timeout time.Duration) *JobServiceCreateJobParams { o.SetTimeout(timeout) @@ -107,15 +122,15 @@ func (o *JobServiceCreateJobParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } -// WithBody adds the body to the job service create job params -func (o *JobServiceCreateJobParams) WithBody(body *job_model.APIJob) *JobServiceCreateJobParams { - o.SetBody(body) +// WithJob adds the job to the job service create job params +func (o *JobServiceCreateJobParams) WithJob(job *job_model.APIJob) *JobServiceCreateJobParams { + o.SetJob(job) return o } -// SetBody adds the body to the job service create job params -func (o *JobServiceCreateJobParams) SetBody(body *job_model.APIJob) { - o.Body = body +// SetJob adds the job to the job service create job params +func (o *JobServiceCreateJobParams) SetJob(job *job_model.APIJob) { + o.Job = job } // WriteToRequest writes these params to a swagger request @@ -125,9 +140,8 @@ func (o *JobServiceCreateJobParams) WriteToRequest(r runtime.ClientRequest, reg return err } var res []error - - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { + if o.Job != nil { + if err := r.SetBodyParam(o.Job); err != nil { return err } } diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_create_job_responses.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_create_job_responses.go index 0d85017afc9..8fb439b36c9 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_create_job_responses.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_create_job_responses.go @@ -6,14 +6,14 @@ package job_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - job_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" ) // JobServiceCreateJobReader is a Reader for the JobServiceCreateJob structure. @@ -24,14 +24,12 @@ type JobServiceCreateJobReader struct { // ReadResponse reads a server response into the received o. func (o *JobServiceCreateJobReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewJobServiceCreateJobOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewJobServiceCreateJobDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewJobServiceCreateJobOK() *JobServiceCreateJobOK { return &JobServiceCreateJobOK{} } -/*JobServiceCreateJobOK handles this case with default header values. +/* +JobServiceCreateJobOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type JobServiceCreateJobOK struct { Payload *job_model.APIJob } +// IsSuccess returns true when this job service create job o k response has a 2xx status code +func (o *JobServiceCreateJobOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this job service create job o k response has a 3xx status code +func (o *JobServiceCreateJobOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this job service create job o k response has a 4xx status code +func (o *JobServiceCreateJobOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this job service create job o k response has a 5xx status code +func (o *JobServiceCreateJobOK) IsServerError() bool { + return false +} + +// IsCode returns true when this job service create job o k response a status code equal to that given +func (o *JobServiceCreateJobOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the job service create job o k response +func (o *JobServiceCreateJobOK) Code() int { + return 200 +} + func (o *JobServiceCreateJobOK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/jobs][%d] jobServiceCreateJobOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs][%d] jobServiceCreateJobOK %s", 200, payload) +} + +func (o *JobServiceCreateJobOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs][%d] jobServiceCreateJobOK %s", 200, payload) +} + +func (o *JobServiceCreateJobOK) GetPayload() *job_model.APIJob { + return o.Payload } func (o *JobServiceCreateJobOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewJobServiceCreateJobDefault(code int) *JobServiceCreateJobDefault { } } -/*JobServiceCreateJobDefault handles this case with default header values. +/* +JobServiceCreateJobDefault describes a response with status code -1, with default header values. An unexpected error response. */ type JobServiceCreateJobDefault struct { _statusCode int - Payload *job_model.GatewayruntimeError + Payload *job_model.GooglerpcStatus +} + +// IsSuccess returns true when this job service create job default response has a 2xx status code +func (o *JobServiceCreateJobDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this job service create job default response has a 3xx status code +func (o *JobServiceCreateJobDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this job service create job default response has a 4xx status code +func (o *JobServiceCreateJobDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this job service create job default response has a 5xx status code +func (o *JobServiceCreateJobDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this job service create job default response a status code equal to that given +func (o *JobServiceCreateJobDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the job service create job default response @@ -96,12 +161,22 @@ func (o *JobServiceCreateJobDefault) Code() int { } func (o *JobServiceCreateJobDefault) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/jobs][%d] JobService_CreateJob default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs][%d] JobService_CreateJob default %s", o._statusCode, payload) +} + +func (o *JobServiceCreateJobDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs][%d] JobService_CreateJob default %s", o._statusCode, payload) +} + +func (o *JobServiceCreateJobDefault) GetPayload() *job_model.GooglerpcStatus { + return o.Payload } func (o *JobServiceCreateJobDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(job_model.GatewayruntimeError) + o.Payload = new(job_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_delete_job_parameters.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_delete_job_parameters.go index c82311fa47c..299890039ae 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_delete_job_parameters.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_delete_job_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewJobServiceDeleteJobParams creates a new JobServiceDeleteJobParams object -// with the default values initialized. +// NewJobServiceDeleteJobParams creates a new JobServiceDeleteJobParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewJobServiceDeleteJobParams() *JobServiceDeleteJobParams { - var () return &JobServiceDeleteJobParams{ - timeout: cr.DefaultTimeout, } } // NewJobServiceDeleteJobParamsWithTimeout creates a new JobServiceDeleteJobParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewJobServiceDeleteJobParamsWithTimeout(timeout time.Duration) *JobServiceDeleteJobParams { - var () return &JobServiceDeleteJobParams{ - timeout: timeout, } } // NewJobServiceDeleteJobParamsWithContext creates a new JobServiceDeleteJobParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewJobServiceDeleteJobParamsWithContext(ctx context.Context) *JobServiceDeleteJobParams { - var () return &JobServiceDeleteJobParams{ - Context: ctx, } } // NewJobServiceDeleteJobParamsWithHTTPClient creates a new JobServiceDeleteJobParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewJobServiceDeleteJobParamsWithHTTPClient(client *http.Client) *JobServiceDeleteJobParams { - var () return &JobServiceDeleteJobParams{ HTTPClient: client, } } -/*JobServiceDeleteJobParams contains all the parameters to send to the API endpoint -for the job service delete job operation typically these are written to a http.Request +/* +JobServiceDeleteJobParams contains all the parameters to send to the API endpoint + + for the job service delete job operation. + + Typically these are written to a http.Request. */ type JobServiceDeleteJobParams struct { - /*ID - The ID of the job to be deleted + /* ID. + The ID of the job to be deleted */ ID string @@ -72,6 +72,21 @@ type JobServiceDeleteJobParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the job service delete job params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceDeleteJobParams) WithDefaults() *JobServiceDeleteJobParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the job service delete job params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceDeleteJobParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the job service delete job params func (o *JobServiceDeleteJobParams) WithTimeout(timeout time.Duration) *JobServiceDeleteJobParams { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_delete_job_responses.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_delete_job_responses.go index bff35c13a40..3f827e999a4 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_delete_job_responses.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_delete_job_responses.go @@ -6,14 +6,14 @@ package job_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - job_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" ) // JobServiceDeleteJobReader is a Reader for the JobServiceDeleteJob structure. @@ -24,14 +24,12 @@ type JobServiceDeleteJobReader struct { // ReadResponse reads a server response into the received o. func (o *JobServiceDeleteJobReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewJobServiceDeleteJobOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewJobServiceDeleteJobDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewJobServiceDeleteJobOK() *JobServiceDeleteJobOK { return &JobServiceDeleteJobOK{} } -/*JobServiceDeleteJobOK handles this case with default header values. +/* +JobServiceDeleteJobOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type JobServiceDeleteJobOK struct { Payload interface{} } +// IsSuccess returns true when this job service delete job o k response has a 2xx status code +func (o *JobServiceDeleteJobOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this job service delete job o k response has a 3xx status code +func (o *JobServiceDeleteJobOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this job service delete job o k response has a 4xx status code +func (o *JobServiceDeleteJobOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this job service delete job o k response has a 5xx status code +func (o *JobServiceDeleteJobOK) IsServerError() bool { + return false +} + +// IsCode returns true when this job service delete job o k response a status code equal to that given +func (o *JobServiceDeleteJobOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the job service delete job o k response +func (o *JobServiceDeleteJobOK) Code() int { + return 200 +} + func (o *JobServiceDeleteJobOK) Error() string { - return fmt.Sprintf("[DELETE /apis/v1beta1/jobs/{id}][%d] jobServiceDeleteJobOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/jobs/{id}][%d] jobServiceDeleteJobOK %s", 200, payload) +} + +func (o *JobServiceDeleteJobOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/jobs/{id}][%d] jobServiceDeleteJobOK %s", 200, payload) +} + +func (o *JobServiceDeleteJobOK) GetPayload() interface{} { + return o.Payload } func (o *JobServiceDeleteJobOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewJobServiceDeleteJobDefault(code int) *JobServiceDeleteJobDefault { } } -/*JobServiceDeleteJobDefault handles this case with default header values. +/* +JobServiceDeleteJobDefault describes a response with status code -1, with default header values. An unexpected error response. */ type JobServiceDeleteJobDefault struct { _statusCode int - Payload *job_model.GatewayruntimeError + Payload *job_model.GooglerpcStatus +} + +// IsSuccess returns true when this job service delete job default response has a 2xx status code +func (o *JobServiceDeleteJobDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this job service delete job default response has a 3xx status code +func (o *JobServiceDeleteJobDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this job service delete job default response has a 4xx status code +func (o *JobServiceDeleteJobDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this job service delete job default response has a 5xx status code +func (o *JobServiceDeleteJobDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this job service delete job default response a status code equal to that given +func (o *JobServiceDeleteJobDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the job service delete job default response @@ -94,12 +159,22 @@ func (o *JobServiceDeleteJobDefault) Code() int { } func (o *JobServiceDeleteJobDefault) Error() string { - return fmt.Sprintf("[DELETE /apis/v1beta1/jobs/{id}][%d] JobService_DeleteJob default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/jobs/{id}][%d] JobService_DeleteJob default %s", o._statusCode, payload) +} + +func (o *JobServiceDeleteJobDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/jobs/{id}][%d] JobService_DeleteJob default %s", o._statusCode, payload) +} + +func (o *JobServiceDeleteJobDefault) GetPayload() *job_model.GooglerpcStatus { + return o.Payload } func (o *JobServiceDeleteJobDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(job_model.GatewayruntimeError) + o.Payload = new(job_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_disable_job_parameters.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_disable_job_parameters.go index a8fef13570f..a96d252aee8 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_disable_job_parameters.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_disable_job_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewJobServiceDisableJobParams creates a new JobServiceDisableJobParams object -// with the default values initialized. +// NewJobServiceDisableJobParams creates a new JobServiceDisableJobParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewJobServiceDisableJobParams() *JobServiceDisableJobParams { - var () return &JobServiceDisableJobParams{ - timeout: cr.DefaultTimeout, } } // NewJobServiceDisableJobParamsWithTimeout creates a new JobServiceDisableJobParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewJobServiceDisableJobParamsWithTimeout(timeout time.Duration) *JobServiceDisableJobParams { - var () return &JobServiceDisableJobParams{ - timeout: timeout, } } // NewJobServiceDisableJobParamsWithContext creates a new JobServiceDisableJobParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewJobServiceDisableJobParamsWithContext(ctx context.Context) *JobServiceDisableJobParams { - var () return &JobServiceDisableJobParams{ - Context: ctx, } } // NewJobServiceDisableJobParamsWithHTTPClient creates a new JobServiceDisableJobParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewJobServiceDisableJobParamsWithHTTPClient(client *http.Client) *JobServiceDisableJobParams { - var () return &JobServiceDisableJobParams{ HTTPClient: client, } } -/*JobServiceDisableJobParams contains all the parameters to send to the API endpoint -for the job service disable job operation typically these are written to a http.Request +/* +JobServiceDisableJobParams contains all the parameters to send to the API endpoint + + for the job service disable job operation. + + Typically these are written to a http.Request. */ type JobServiceDisableJobParams struct { - /*ID - The ID of the job to be disabled + /* ID. + The ID of the job to be disabled */ ID string @@ -72,6 +72,21 @@ type JobServiceDisableJobParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the job service disable job params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceDisableJobParams) WithDefaults() *JobServiceDisableJobParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the job service disable job params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceDisableJobParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the job service disable job params func (o *JobServiceDisableJobParams) WithTimeout(timeout time.Duration) *JobServiceDisableJobParams { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_disable_job_responses.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_disable_job_responses.go index 282ed575b9b..0ce9e1b3e24 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_disable_job_responses.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_disable_job_responses.go @@ -6,14 +6,14 @@ package job_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - job_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" ) // JobServiceDisableJobReader is a Reader for the JobServiceDisableJob structure. @@ -24,14 +24,12 @@ type JobServiceDisableJobReader struct { // ReadResponse reads a server response into the received o. func (o *JobServiceDisableJobReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewJobServiceDisableJobOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewJobServiceDisableJobDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewJobServiceDisableJobOK() *JobServiceDisableJobOK { return &JobServiceDisableJobOK{} } -/*JobServiceDisableJobOK handles this case with default header values. +/* +JobServiceDisableJobOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type JobServiceDisableJobOK struct { Payload interface{} } +// IsSuccess returns true when this job service disable job o k response has a 2xx status code +func (o *JobServiceDisableJobOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this job service disable job o k response has a 3xx status code +func (o *JobServiceDisableJobOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this job service disable job o k response has a 4xx status code +func (o *JobServiceDisableJobOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this job service disable job o k response has a 5xx status code +func (o *JobServiceDisableJobOK) IsServerError() bool { + return false +} + +// IsCode returns true when this job service disable job o k response a status code equal to that given +func (o *JobServiceDisableJobOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the job service disable job o k response +func (o *JobServiceDisableJobOK) Code() int { + return 200 +} + func (o *JobServiceDisableJobOK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/disable][%d] jobServiceDisableJobOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/disable][%d] jobServiceDisableJobOK %s", 200, payload) +} + +func (o *JobServiceDisableJobOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/disable][%d] jobServiceDisableJobOK %s", 200, payload) +} + +func (o *JobServiceDisableJobOK) GetPayload() interface{} { + return o.Payload } func (o *JobServiceDisableJobOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewJobServiceDisableJobDefault(code int) *JobServiceDisableJobDefault { } } -/*JobServiceDisableJobDefault handles this case with default header values. +/* +JobServiceDisableJobDefault describes a response with status code -1, with default header values. An unexpected error response. */ type JobServiceDisableJobDefault struct { _statusCode int - Payload *job_model.GatewayruntimeError + Payload *job_model.GooglerpcStatus +} + +// IsSuccess returns true when this job service disable job default response has a 2xx status code +func (o *JobServiceDisableJobDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this job service disable job default response has a 3xx status code +func (o *JobServiceDisableJobDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this job service disable job default response has a 4xx status code +func (o *JobServiceDisableJobDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this job service disable job default response has a 5xx status code +func (o *JobServiceDisableJobDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this job service disable job default response a status code equal to that given +func (o *JobServiceDisableJobDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the job service disable job default response @@ -94,12 +159,22 @@ func (o *JobServiceDisableJobDefault) Code() int { } func (o *JobServiceDisableJobDefault) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/disable][%d] JobService_DisableJob default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/disable][%d] JobService_DisableJob default %s", o._statusCode, payload) +} + +func (o *JobServiceDisableJobDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/disable][%d] JobService_DisableJob default %s", o._statusCode, payload) +} + +func (o *JobServiceDisableJobDefault) GetPayload() *job_model.GooglerpcStatus { + return o.Payload } func (o *JobServiceDisableJobDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(job_model.GatewayruntimeError) + o.Payload = new(job_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_enable_job_parameters.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_enable_job_parameters.go index 1fe5d10c977..651b1bc8f01 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_enable_job_parameters.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_enable_job_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewJobServiceEnableJobParams creates a new JobServiceEnableJobParams object -// with the default values initialized. +// NewJobServiceEnableJobParams creates a new JobServiceEnableJobParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewJobServiceEnableJobParams() *JobServiceEnableJobParams { - var () return &JobServiceEnableJobParams{ - timeout: cr.DefaultTimeout, } } // NewJobServiceEnableJobParamsWithTimeout creates a new JobServiceEnableJobParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewJobServiceEnableJobParamsWithTimeout(timeout time.Duration) *JobServiceEnableJobParams { - var () return &JobServiceEnableJobParams{ - timeout: timeout, } } // NewJobServiceEnableJobParamsWithContext creates a new JobServiceEnableJobParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewJobServiceEnableJobParamsWithContext(ctx context.Context) *JobServiceEnableJobParams { - var () return &JobServiceEnableJobParams{ - Context: ctx, } } // NewJobServiceEnableJobParamsWithHTTPClient creates a new JobServiceEnableJobParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewJobServiceEnableJobParamsWithHTTPClient(client *http.Client) *JobServiceEnableJobParams { - var () return &JobServiceEnableJobParams{ HTTPClient: client, } } -/*JobServiceEnableJobParams contains all the parameters to send to the API endpoint -for the job service enable job operation typically these are written to a http.Request +/* +JobServiceEnableJobParams contains all the parameters to send to the API endpoint + + for the job service enable job operation. + + Typically these are written to a http.Request. */ type JobServiceEnableJobParams struct { - /*ID - The ID of the job to be enabled + /* ID. + The ID of the job to be enabled */ ID string @@ -72,6 +72,21 @@ type JobServiceEnableJobParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the job service enable job params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceEnableJobParams) WithDefaults() *JobServiceEnableJobParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the job service enable job params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceEnableJobParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the job service enable job params func (o *JobServiceEnableJobParams) WithTimeout(timeout time.Duration) *JobServiceEnableJobParams { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_enable_job_responses.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_enable_job_responses.go index 5fac7f83769..5935b2fc840 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_enable_job_responses.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_enable_job_responses.go @@ -6,14 +6,14 @@ package job_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - job_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" ) // JobServiceEnableJobReader is a Reader for the JobServiceEnableJob structure. @@ -24,14 +24,12 @@ type JobServiceEnableJobReader struct { // ReadResponse reads a server response into the received o. func (o *JobServiceEnableJobReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewJobServiceEnableJobOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewJobServiceEnableJobDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewJobServiceEnableJobOK() *JobServiceEnableJobOK { return &JobServiceEnableJobOK{} } -/*JobServiceEnableJobOK handles this case with default header values. +/* +JobServiceEnableJobOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type JobServiceEnableJobOK struct { Payload interface{} } +// IsSuccess returns true when this job service enable job o k response has a 2xx status code +func (o *JobServiceEnableJobOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this job service enable job o k response has a 3xx status code +func (o *JobServiceEnableJobOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this job service enable job o k response has a 4xx status code +func (o *JobServiceEnableJobOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this job service enable job o k response has a 5xx status code +func (o *JobServiceEnableJobOK) IsServerError() bool { + return false +} + +// IsCode returns true when this job service enable job o k response a status code equal to that given +func (o *JobServiceEnableJobOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the job service enable job o k response +func (o *JobServiceEnableJobOK) Code() int { + return 200 +} + func (o *JobServiceEnableJobOK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/enable][%d] jobServiceEnableJobOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/enable][%d] jobServiceEnableJobOK %s", 200, payload) +} + +func (o *JobServiceEnableJobOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/enable][%d] jobServiceEnableJobOK %s", 200, payload) +} + +func (o *JobServiceEnableJobOK) GetPayload() interface{} { + return o.Payload } func (o *JobServiceEnableJobOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewJobServiceEnableJobDefault(code int) *JobServiceEnableJobDefault { } } -/*JobServiceEnableJobDefault handles this case with default header values. +/* +JobServiceEnableJobDefault describes a response with status code -1, with default header values. An unexpected error response. */ type JobServiceEnableJobDefault struct { _statusCode int - Payload *job_model.GatewayruntimeError + Payload *job_model.GooglerpcStatus +} + +// IsSuccess returns true when this job service enable job default response has a 2xx status code +func (o *JobServiceEnableJobDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this job service enable job default response has a 3xx status code +func (o *JobServiceEnableJobDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this job service enable job default response has a 4xx status code +func (o *JobServiceEnableJobDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this job service enable job default response has a 5xx status code +func (o *JobServiceEnableJobDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this job service enable job default response a status code equal to that given +func (o *JobServiceEnableJobDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the job service enable job default response @@ -94,12 +159,22 @@ func (o *JobServiceEnableJobDefault) Code() int { } func (o *JobServiceEnableJobDefault) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/enable][%d] JobService_EnableJob default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/enable][%d] JobService_EnableJob default %s", o._statusCode, payload) +} + +func (o *JobServiceEnableJobDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/jobs/{id}/enable][%d] JobService_EnableJob default %s", o._statusCode, payload) +} + +func (o *JobServiceEnableJobDefault) GetPayload() *job_model.GooglerpcStatus { + return o.Payload } func (o *JobServiceEnableJobDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(job_model.GatewayruntimeError) + o.Payload = new(job_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_get_job_parameters.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_get_job_parameters.go index 11cef3e6402..09652d7743d 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_get_job_parameters.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_get_job_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewJobServiceGetJobParams creates a new JobServiceGetJobParams object -// with the default values initialized. +// NewJobServiceGetJobParams creates a new JobServiceGetJobParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewJobServiceGetJobParams() *JobServiceGetJobParams { - var () return &JobServiceGetJobParams{ - timeout: cr.DefaultTimeout, } } // NewJobServiceGetJobParamsWithTimeout creates a new JobServiceGetJobParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewJobServiceGetJobParamsWithTimeout(timeout time.Duration) *JobServiceGetJobParams { - var () return &JobServiceGetJobParams{ - timeout: timeout, } } // NewJobServiceGetJobParamsWithContext creates a new JobServiceGetJobParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewJobServiceGetJobParamsWithContext(ctx context.Context) *JobServiceGetJobParams { - var () return &JobServiceGetJobParams{ - Context: ctx, } } // NewJobServiceGetJobParamsWithHTTPClient creates a new JobServiceGetJobParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewJobServiceGetJobParamsWithHTTPClient(client *http.Client) *JobServiceGetJobParams { - var () return &JobServiceGetJobParams{ HTTPClient: client, } } -/*JobServiceGetJobParams contains all the parameters to send to the API endpoint -for the job service get job operation typically these are written to a http.Request +/* +JobServiceGetJobParams contains all the parameters to send to the API endpoint + + for the job service get job operation. + + Typically these are written to a http.Request. */ type JobServiceGetJobParams struct { - /*ID - The ID of the job to be retrieved + /* ID. + The ID of the job to be retrieved */ ID string @@ -72,6 +72,21 @@ type JobServiceGetJobParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the job service get job params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceGetJobParams) WithDefaults() *JobServiceGetJobParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the job service get job params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceGetJobParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the job service get job params func (o *JobServiceGetJobParams) WithTimeout(timeout time.Duration) *JobServiceGetJobParams { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_get_job_responses.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_get_job_responses.go index cc4277681b9..1377740a32b 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_get_job_responses.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_get_job_responses.go @@ -6,14 +6,14 @@ package job_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - job_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" ) // JobServiceGetJobReader is a Reader for the JobServiceGetJob structure. @@ -24,14 +24,12 @@ type JobServiceGetJobReader struct { // ReadResponse reads a server response into the received o. func (o *JobServiceGetJobReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewJobServiceGetJobOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewJobServiceGetJobDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewJobServiceGetJobOK() *JobServiceGetJobOK { return &JobServiceGetJobOK{} } -/*JobServiceGetJobOK handles this case with default header values. +/* +JobServiceGetJobOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type JobServiceGetJobOK struct { Payload *job_model.APIJob } +// IsSuccess returns true when this job service get job o k response has a 2xx status code +func (o *JobServiceGetJobOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this job service get job o k response has a 3xx status code +func (o *JobServiceGetJobOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this job service get job o k response has a 4xx status code +func (o *JobServiceGetJobOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this job service get job o k response has a 5xx status code +func (o *JobServiceGetJobOK) IsServerError() bool { + return false +} + +// IsCode returns true when this job service get job o k response a status code equal to that given +func (o *JobServiceGetJobOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the job service get job o k response +func (o *JobServiceGetJobOK) Code() int { + return 200 +} + func (o *JobServiceGetJobOK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/jobs/{id}][%d] jobServiceGetJobOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/jobs/{id}][%d] jobServiceGetJobOK %s", 200, payload) +} + +func (o *JobServiceGetJobOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/jobs/{id}][%d] jobServiceGetJobOK %s", 200, payload) +} + +func (o *JobServiceGetJobOK) GetPayload() *job_model.APIJob { + return o.Payload } func (o *JobServiceGetJobOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewJobServiceGetJobDefault(code int) *JobServiceGetJobDefault { } } -/*JobServiceGetJobDefault handles this case with default header values. +/* +JobServiceGetJobDefault describes a response with status code -1, with default header values. An unexpected error response. */ type JobServiceGetJobDefault struct { _statusCode int - Payload *job_model.GatewayruntimeError + Payload *job_model.GooglerpcStatus +} + +// IsSuccess returns true when this job service get job default response has a 2xx status code +func (o *JobServiceGetJobDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this job service get job default response has a 3xx status code +func (o *JobServiceGetJobDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this job service get job default response has a 4xx status code +func (o *JobServiceGetJobDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this job service get job default response has a 5xx status code +func (o *JobServiceGetJobDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this job service get job default response a status code equal to that given +func (o *JobServiceGetJobDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the job service get job default response @@ -96,12 +161,22 @@ func (o *JobServiceGetJobDefault) Code() int { } func (o *JobServiceGetJobDefault) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/jobs/{id}][%d] JobService_GetJob default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/jobs/{id}][%d] JobService_GetJob default %s", o._statusCode, payload) +} + +func (o *JobServiceGetJobDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/jobs/{id}][%d] JobService_GetJob default %s", o._statusCode, payload) +} + +func (o *JobServiceGetJobDefault) GetPayload() *job_model.GooglerpcStatus { + return o.Payload } func (o *JobServiceGetJobDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(job_model.GatewayruntimeError) + o.Payload = new(job_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_list_jobs_parameters.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_list_jobs_parameters.go index 49ddab1f533..3381d851905 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_list_jobs_parameters.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_list_jobs_parameters.go @@ -13,101 +13,98 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" - - strfmt "github.com/go-openapi/strfmt" ) -// NewJobServiceListJobsParams creates a new JobServiceListJobsParams object -// with the default values initialized. +// NewJobServiceListJobsParams creates a new JobServiceListJobsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewJobServiceListJobsParams() *JobServiceListJobsParams { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &JobServiceListJobsParams{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - timeout: cr.DefaultTimeout, } } // NewJobServiceListJobsParamsWithTimeout creates a new JobServiceListJobsParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewJobServiceListJobsParamsWithTimeout(timeout time.Duration) *JobServiceListJobsParams { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &JobServiceListJobsParams{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - timeout: timeout, } } // NewJobServiceListJobsParamsWithContext creates a new JobServiceListJobsParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewJobServiceListJobsParamsWithContext(ctx context.Context) *JobServiceListJobsParams { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &JobServiceListJobsParams{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - Context: ctx, } } // NewJobServiceListJobsParamsWithHTTPClient creates a new JobServiceListJobsParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewJobServiceListJobsParamsWithHTTPClient(client *http.Client) *JobServiceListJobsParams { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &JobServiceListJobsParams{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - HTTPClient: client, + HTTPClient: client, } } -/*JobServiceListJobsParams contains all the parameters to send to the API endpoint -for the job service list jobs operation typically these are written to a http.Request +/* +JobServiceListJobsParams contains all the parameters to send to the API endpoint + + for the job service list jobs operation. + + Typically these are written to a http.Request. */ type JobServiceListJobsParams struct { - /*Filter - A url-encoded, JSON-serialized Filter protocol buffer (see - [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). + /* Filter. + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). */ Filter *string - /*PageSize - The number of jobs to be listed per page. If there are more jobs than this + + /* PageSize. + + The number of jobs to be listed per page. If there are more jobs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + Format: int32 */ PageSize *int32 - /*PageToken - A page token to request the next page of results. The token is acquried + + /* PageToken. + + A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListJobs call or can be omitted when fetching the first page. - */ PageToken *string - /*ResourceReferenceKeyID - The ID of the resource that referred to. + /* ResourceReferenceKeyID. + + The ID of the resource that referred to. */ ResourceReferenceKeyID *string - /*ResourceReferenceKeyType - The type of the resource that referred to. + /* ResourceReferenceKeyType. + + The type of the resource that referred to. + + Default: "UNKNOWN_RESOURCE_TYPE" */ ResourceReferenceKeyType *string - /*SortBy - Can be format of "field_name", "field_name asc" or "field_name desc". - Ascending by default. + /* SortBy. + + Can be format of "field_name", "field_name asc" or "field_name desc". + Ascending by default. */ SortBy *string @@ -116,6 +113,32 @@ type JobServiceListJobsParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the job service list jobs params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceListJobsParams) WithDefaults() *JobServiceListJobsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the job service list jobs params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *JobServiceListJobsParams) SetDefaults() { + var ( + resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) + + val := JobServiceListJobsParams{ + ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, + } + + val.timeout = o.timeout + val.Context = o.Context + val.HTTPClient = o.HTTPClient + *o = val +} + // WithTimeout adds the timeout to the job service list jobs params func (o *JobServiceListJobsParams) WithTimeout(timeout time.Duration) *JobServiceListJobsParams { o.SetTimeout(timeout) @@ -227,96 +250,102 @@ func (o *JobServiceListJobsParams) WriteToRequest(r runtime.ClientRequest, reg s // query param filter var qrFilter string + if o.Filter != nil { qrFilter = *o.Filter } qFilter := qrFilter if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { return err } } - } if o.PageSize != nil { // query param page_size var qrPageSize int32 + if o.PageSize != nil { qrPageSize = *o.PageSize } qPageSize := swag.FormatInt32(qrPageSize) if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { return err } } - } if o.PageToken != nil { // query param page_token var qrPageToken string + if o.PageToken != nil { qrPageToken = *o.PageToken } qPageToken := qrPageToken if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { return err } } - } if o.ResourceReferenceKeyID != nil { // query param resource_reference_key.id var qrResourceReferenceKeyID string + if o.ResourceReferenceKeyID != nil { qrResourceReferenceKeyID = *o.ResourceReferenceKeyID } qResourceReferenceKeyID := qrResourceReferenceKeyID if qResourceReferenceKeyID != "" { + if err := r.SetQueryParam("resource_reference_key.id", qResourceReferenceKeyID); err != nil { return err } } - } if o.ResourceReferenceKeyType != nil { // query param resource_reference_key.type var qrResourceReferenceKeyType string + if o.ResourceReferenceKeyType != nil { qrResourceReferenceKeyType = *o.ResourceReferenceKeyType } qResourceReferenceKeyType := qrResourceReferenceKeyType if qResourceReferenceKeyType != "" { + if err := r.SetQueryParam("resource_reference_key.type", qResourceReferenceKeyType); err != nil { return err } } - } if o.SortBy != nil { // query param sort_by var qrSortBy string + if o.SortBy != nil { qrSortBy = *o.SortBy } qSortBy := qrSortBy if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_list_jobs_responses.go b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_list_jobs_responses.go index adbc4587c7f..5652b8241dc 100644 --- a/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_list_jobs_responses.go +++ b/backend/api/v1beta1/go_http_client/job_client/job_service/job_service_list_jobs_responses.go @@ -6,14 +6,14 @@ package job_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - job_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" ) // JobServiceListJobsReader is a Reader for the JobServiceListJobs structure. @@ -24,14 +24,12 @@ type JobServiceListJobsReader struct { // ReadResponse reads a server response into the received o. func (o *JobServiceListJobsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewJobServiceListJobsOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewJobServiceListJobsDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewJobServiceListJobsOK() *JobServiceListJobsOK { return &JobServiceListJobsOK{} } -/*JobServiceListJobsOK handles this case with default header values. +/* +JobServiceListJobsOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type JobServiceListJobsOK struct { Payload *job_model.APIListJobsResponse } +// IsSuccess returns true when this job service list jobs o k response has a 2xx status code +func (o *JobServiceListJobsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this job service list jobs o k response has a 3xx status code +func (o *JobServiceListJobsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this job service list jobs o k response has a 4xx status code +func (o *JobServiceListJobsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this job service list jobs o k response has a 5xx status code +func (o *JobServiceListJobsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this job service list jobs o k response a status code equal to that given +func (o *JobServiceListJobsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the job service list jobs o k response +func (o *JobServiceListJobsOK) Code() int { + return 200 +} + func (o *JobServiceListJobsOK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/jobs][%d] jobServiceListJobsOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/jobs][%d] jobServiceListJobsOK %s", 200, payload) +} + +func (o *JobServiceListJobsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/jobs][%d] jobServiceListJobsOK %s", 200, payload) +} + +func (o *JobServiceListJobsOK) GetPayload() *job_model.APIListJobsResponse { + return o.Payload } func (o *JobServiceListJobsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewJobServiceListJobsDefault(code int) *JobServiceListJobsDefault { } } -/*JobServiceListJobsDefault handles this case with default header values. +/* +JobServiceListJobsDefault describes a response with status code -1, with default header values. An unexpected error response. */ type JobServiceListJobsDefault struct { _statusCode int - Payload *job_model.GatewayruntimeError + Payload *job_model.GooglerpcStatus +} + +// IsSuccess returns true when this job service list jobs default response has a 2xx status code +func (o *JobServiceListJobsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this job service list jobs default response has a 3xx status code +func (o *JobServiceListJobsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this job service list jobs default response has a 4xx status code +func (o *JobServiceListJobsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this job service list jobs default response has a 5xx status code +func (o *JobServiceListJobsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this job service list jobs default response a status code equal to that given +func (o *JobServiceListJobsDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the job service list jobs default response @@ -96,12 +161,22 @@ func (o *JobServiceListJobsDefault) Code() int { } func (o *JobServiceListJobsDefault) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/jobs][%d] JobService_ListJobs default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/jobs][%d] JobService_ListJobs default %s", o._statusCode, payload) +} + +func (o *JobServiceListJobsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/jobs][%d] JobService_ListJobs default %s", o._statusCode, payload) +} + +func (o *JobServiceListJobsDefault) GetPayload() *job_model.GooglerpcStatus { + return o.Payload } func (o *JobServiceListJobsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(job_model.GatewayruntimeError) + o.Payload = new(job_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/job_model/api_cron_schedule.go b/backend/api/v1beta1/go_http_client/job_model/api_cron_schedule.go index 6cf33620b63..e69871a127d 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_cron_schedule.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_cron_schedule.go @@ -6,14 +6,16 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // APICronSchedule CronSchedule allow scheduling the job with unix-like cron +// // swagger:model apiCronSchedule type APICronSchedule struct { @@ -49,7 +51,6 @@ func (m *APICronSchedule) Validate(formats strfmt.Registry) error { } func (m *APICronSchedule) validateEndTime(formats strfmt.Registry) error { - if swag.IsZero(m.EndTime) { // not required return nil } @@ -62,7 +63,6 @@ func (m *APICronSchedule) validateEndTime(formats strfmt.Registry) error { } func (m *APICronSchedule) validateStartTime(formats strfmt.Registry) error { - if swag.IsZero(m.StartTime) { // not required return nil } @@ -74,6 +74,11 @@ func (m *APICronSchedule) validateStartTime(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api cron schedule based on context it is used +func (m *APICronSchedule) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APICronSchedule) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/job_model/api_job.go b/backend/api/v1beta1/go_http_client/job_model/api_job.go index 8bd5e87802d..7fa17059ff2 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_job.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_job.go @@ -6,16 +6,17 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // APIJob api job +// // swagger:model apiJob type APIJob struct { @@ -42,7 +43,7 @@ type APIJob struct { MaxConcurrency int64 `json:"max_concurrency,omitempty,string"` // mode - Mode JobMode `json:"mode,omitempty"` + Mode *JobMode `json:"mode,omitempty"` // Required input field. Job name provided by user. Not unique. Name string `json:"name,omitempty"` @@ -111,7 +112,6 @@ func (m *APIJob) Validate(formats strfmt.Registry) error { } func (m *APIJob) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -124,23 +124,25 @@ func (m *APIJob) validateCreatedAt(formats strfmt.Registry) error { } func (m *APIJob) validateMode(formats strfmt.Registry) error { - if swag.IsZero(m.Mode) { // not required return nil } - if err := m.Mode.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("mode") + if m.Mode != nil { + if err := m.Mode.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("mode") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("mode") + } + return err } - return err } return nil } func (m *APIJob) validatePipelineSpec(formats strfmt.Registry) error { - if swag.IsZero(m.PipelineSpec) { // not required return nil } @@ -149,6 +151,8 @@ func (m *APIJob) validatePipelineSpec(formats strfmt.Registry) error { if err := m.PipelineSpec.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("pipeline_spec") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_spec") } return err } @@ -158,7 +162,6 @@ func (m *APIJob) validatePipelineSpec(formats strfmt.Registry) error { } func (m *APIJob) validateResourceReferences(formats strfmt.Registry) error { - if swag.IsZero(m.ResourceReferences) { // not required return nil } @@ -172,6 +175,8 @@ func (m *APIJob) validateResourceReferences(formats strfmt.Registry) error { if err := m.ResourceReferences[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) } return err } @@ -183,7 +188,6 @@ func (m *APIJob) validateResourceReferences(formats strfmt.Registry) error { } func (m *APIJob) validateTrigger(formats strfmt.Registry) error { - if swag.IsZero(m.Trigger) { // not required return nil } @@ -192,6 +196,8 @@ func (m *APIJob) validateTrigger(formats strfmt.Registry) error { if err := m.Trigger.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("trigger") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("trigger") } return err } @@ -201,7 +207,6 @@ func (m *APIJob) validateTrigger(formats strfmt.Registry) error { } func (m *APIJob) validateUpdatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.UpdatedAt) { // not required return nil } @@ -213,6 +218,120 @@ func (m *APIJob) validateUpdatedAt(formats strfmt.Registry) error { return nil } +// ContextValidate validate this api job based on the context it is used +func (m *APIJob) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateMode(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidatePipelineSpec(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateResourceReferences(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateTrigger(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIJob) contextValidateMode(ctx context.Context, formats strfmt.Registry) error { + + if m.Mode != nil { + + if swag.IsZero(m.Mode) { // not required + return nil + } + + if err := m.Mode.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("mode") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("mode") + } + return err + } + } + + return nil +} + +func (m *APIJob) contextValidatePipelineSpec(ctx context.Context, formats strfmt.Registry) error { + + if m.PipelineSpec != nil { + + if swag.IsZero(m.PipelineSpec) { // not required + return nil + } + + if err := m.PipelineSpec.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipeline_spec") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_spec") + } + return err + } + } + + return nil +} + +func (m *APIJob) contextValidateResourceReferences(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.ResourceReferences); i++ { + + if m.ResourceReferences[i] != nil { + + if swag.IsZero(m.ResourceReferences[i]) { // not required + return nil + } + + if err := m.ResourceReferences[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIJob) contextValidateTrigger(ctx context.Context, formats strfmt.Registry) error { + + if m.Trigger != nil { + + if swag.IsZero(m.Trigger) { // not required + return nil + } + + if err := m.Trigger.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("trigger") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("trigger") + } + return err + } + } + + return nil +} + // MarshalBinary interface implementation func (m *APIJob) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/job_model/api_list_jobs_response.go b/backend/api/v1beta1/go_http_client/job_model/api_list_jobs_response.go index 3e87d2ce10d..581ef6ca487 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_list_jobs_response.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_list_jobs_response.go @@ -6,15 +6,16 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIListJobsResponse api list jobs response +// // swagger:model apiListJobsResponse type APIListJobsResponse struct { @@ -43,7 +44,6 @@ func (m *APIListJobsResponse) Validate(formats strfmt.Registry) error { } func (m *APIListJobsResponse) validateJobs(formats strfmt.Registry) error { - if swag.IsZero(m.Jobs) { // not required return nil } @@ -57,6 +57,47 @@ func (m *APIListJobsResponse) validateJobs(formats strfmt.Registry) error { if err := m.Jobs[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("jobs" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("jobs" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this api list jobs response based on the context it is used +func (m *APIListJobsResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateJobs(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIListJobsResponse) contextValidateJobs(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Jobs); i++ { + + if m.Jobs[i] != nil { + + if swag.IsZero(m.Jobs[i]) { // not required + return nil + } + + if err := m.Jobs[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("jobs" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("jobs" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v1beta1/go_http_client/job_model/api_parameter.go b/backend/api/v1beta1/go_http_client/job_model/api_parameter.go index 999505b4470..d730cc452f1 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_parameter.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_parameter.go @@ -6,12 +6,14 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIParameter api parameter +// // swagger:model apiParameter type APIParameter struct { @@ -27,6 +29,11 @@ func (m *APIParameter) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api parameter based on context it is used +func (m *APIParameter) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APIParameter) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/job_model/api_periodic_schedule.go b/backend/api/v1beta1/go_http_client/job_model/api_periodic_schedule.go index ad1f6a33257..6fc4af534fa 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_periodic_schedule.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_periodic_schedule.go @@ -6,14 +6,16 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // APIPeriodicSchedule PeriodicSchedule allow scheduling the job periodically with certain interval +// // swagger:model apiPeriodicSchedule type APIPeriodicSchedule struct { @@ -48,7 +50,6 @@ func (m *APIPeriodicSchedule) Validate(formats strfmt.Registry) error { } func (m *APIPeriodicSchedule) validateEndTime(formats strfmt.Registry) error { - if swag.IsZero(m.EndTime) { // not required return nil } @@ -61,7 +62,6 @@ func (m *APIPeriodicSchedule) validateEndTime(formats strfmt.Registry) error { } func (m *APIPeriodicSchedule) validateStartTime(formats strfmt.Registry) error { - if swag.IsZero(m.StartTime) { // not required return nil } @@ -73,6 +73,11 @@ func (m *APIPeriodicSchedule) validateStartTime(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api periodic schedule based on context it is used +func (m *APIPeriodicSchedule) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APIPeriodicSchedule) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/job_model/api_pipeline_spec.go b/backend/api/v1beta1/go_http_client/job_model/api_pipeline_spec.go index d6341c7dec1..2a116a3eb4c 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_pipeline_spec.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_pipeline_spec.go @@ -6,15 +6,16 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIPipelineSpec api pipeline spec +// // swagger:model apiPipelineSpec type APIPipelineSpec struct { @@ -60,7 +61,6 @@ func (m *APIPipelineSpec) Validate(formats strfmt.Registry) error { } func (m *APIPipelineSpec) validateParameters(formats strfmt.Registry) error { - if swag.IsZero(m.Parameters) { // not required return nil } @@ -74,6 +74,8 @@ func (m *APIPipelineSpec) validateParameters(formats strfmt.Registry) error { if err := m.Parameters[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) } return err } @@ -85,7 +87,6 @@ func (m *APIPipelineSpec) validateParameters(formats strfmt.Registry) error { } func (m *APIPipelineSpec) validateRuntimeConfig(formats strfmt.Registry) error { - if swag.IsZero(m.RuntimeConfig) { // not required return nil } @@ -94,6 +95,72 @@ func (m *APIPipelineSpec) validateRuntimeConfig(formats strfmt.Registry) error { if err := m.RuntimeConfig.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("runtime_config") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runtime_config") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api pipeline spec based on the context it is used +func (m *APIPipelineSpec) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateParameters(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRuntimeConfig(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIPipelineSpec) contextValidateParameters(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Parameters); i++ { + + if m.Parameters[i] != nil { + + if swag.IsZero(m.Parameters[i]) { // not required + return nil + } + + if err := m.Parameters[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIPipelineSpec) contextValidateRuntimeConfig(ctx context.Context, formats strfmt.Registry) error { + + if m.RuntimeConfig != nil { + + if swag.IsZero(m.RuntimeConfig) { // not required + return nil + } + + if err := m.RuntimeConfig.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("runtime_config") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runtime_config") } return err } diff --git a/backend/api/v1beta1/go_http_client/job_model/api_relationship.go b/backend/api/v1beta1/go_http_client/job_model/api_relationship.go index c3987f80bb8..7b56d9d5bfb 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_relationship.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_relationship.go @@ -6,18 +6,28 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIRelationship api relationship +// // swagger:model apiRelationship type APIRelationship string +func NewAPIRelationship(value APIRelationship) *APIRelationship { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIRelationship. +func (m APIRelationship) Pointer() *APIRelationship { + return &m +} + const ( // APIRelationshipUNKNOWNRELATIONSHIP captures enum value "UNKNOWN_RELATIONSHIP" @@ -44,7 +54,7 @@ func init() { } func (m APIRelationship) validateAPIRelationshipEnum(path, location string, value APIRelationship) error { - if err := validate.Enum(path, location, value, apiRelationshipEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiRelationshipEnum, true); err != nil { return err } return nil @@ -64,3 +74,8 @@ func (m APIRelationship) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api relationship based on context it is used +func (m APIRelationship) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/job_model/api_resource_key.go b/backend/api/v1beta1/go_http_client/job_model/api_resource_key.go index 4a894169800..3120e060680 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_resource_key.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_resource_key.go @@ -6,13 +6,15 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIResourceKey api resource key +// // swagger:model apiResourceKey type APIResourceKey struct { @@ -20,7 +22,7 @@ type APIResourceKey struct { ID string `json:"id,omitempty"` // The type of the resource that referred to. - Type APIResourceType `json:"type,omitempty"` + Type *APIResourceType `json:"type,omitempty"` } // Validate validates this api resource key @@ -38,16 +40,54 @@ func (m *APIResourceKey) Validate(formats strfmt.Registry) error { } func (m *APIResourceKey) validateType(formats strfmt.Registry) error { - if swag.IsZero(m.Type) { // not required return nil } - if err := m.Type.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("type") + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api resource key based on the context it is used +func (m *APIResourceKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceKey) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/job_model/api_resource_reference.go b/backend/api/v1beta1/go_http_client/job_model/api_resource_reference.go index 3e0b921d5da..85568f2550f 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_resource_reference.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_resource_reference.go @@ -6,13 +6,15 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIResourceReference api resource reference +// // swagger:model apiResourceReference type APIResourceReference struct { @@ -23,7 +25,7 @@ type APIResourceReference struct { Name string `json:"name,omitempty"` // Required field. The relationship from referred resource to the object. - Relationship APIRelationship `json:"relationship,omitempty"` + Relationship *APIRelationship `json:"relationship,omitempty"` } // Validate validates this api resource reference @@ -45,7 +47,6 @@ func (m *APIResourceReference) Validate(formats strfmt.Registry) error { } func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { - if swag.IsZero(m.Key) { // not required return nil } @@ -54,6 +55,8 @@ func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { if err := m.Key.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("key") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("key") } return err } @@ -63,16 +66,79 @@ func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { } func (m *APIResourceReference) validateRelationship(formats strfmt.Registry) error { - if swag.IsZero(m.Relationship) { // not required return nil } - if err := m.Relationship.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("relationship") + if m.Relationship != nil { + if err := m.Relationship.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("relationship") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api resource reference based on the context it is used +func (m *APIResourceReference) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateKey(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRelationship(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceReference) contextValidateKey(ctx context.Context, formats strfmt.Registry) error { + + if m.Key != nil { + + if swag.IsZero(m.Key) { // not required + return nil + } + + if err := m.Key.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("key") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("key") + } + return err + } + } + + return nil +} + +func (m *APIResourceReference) contextValidateRelationship(ctx context.Context, formats strfmt.Registry) error { + + if m.Relationship != nil { + + if swag.IsZero(m.Relationship) { // not required + return nil + } + + if err := m.Relationship.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("relationship") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/job_model/api_resource_type.go b/backend/api/v1beta1/go_http_client/job_model/api_resource_type.go index 1d6d8e2eb57..f346dfadfaa 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_resource_type.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_resource_type.go @@ -6,18 +6,28 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIResourceType api resource type +// // swagger:model apiResourceType type APIResourceType string +func NewAPIResourceType(value APIResourceType) *APIResourceType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIResourceType. +func (m APIResourceType) Pointer() *APIResourceType { + return &m +} + const ( // APIResourceTypeUNKNOWNRESOURCETYPE captures enum value "UNKNOWN_RESOURCE_TYPE" @@ -53,7 +63,7 @@ func init() { } func (m APIResourceType) validateAPIResourceTypeEnum(path, location string, value APIResourceType) error { - if err := validate.Enum(path, location, value, apiResourceTypeEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiResourceTypeEnum, true); err != nil { return err } return nil @@ -73,3 +83,8 @@ func (m APIResourceType) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api resource type based on context it is used +func (m APIResourceType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/job_model/api_trigger.go b/backend/api/v1beta1/go_http_client/job_model/api_trigger.go index 9e03e3fbe58..0f6b2e2aa3e 100644 --- a/backend/api/v1beta1/go_http_client/job_model/api_trigger.go +++ b/backend/api/v1beta1/go_http_client/job_model/api_trigger.go @@ -6,13 +6,15 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APITrigger Trigger defines what starts a pipeline run. +// // swagger:model apiTrigger type APITrigger struct { @@ -42,7 +44,6 @@ func (m *APITrigger) Validate(formats strfmt.Registry) error { } func (m *APITrigger) validateCronSchedule(formats strfmt.Registry) error { - if swag.IsZero(m.CronSchedule) { // not required return nil } @@ -51,6 +52,8 @@ func (m *APITrigger) validateCronSchedule(formats strfmt.Registry) error { if err := m.CronSchedule.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("cron_schedule") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("cron_schedule") } return err } @@ -60,7 +63,6 @@ func (m *APITrigger) validateCronSchedule(formats strfmt.Registry) error { } func (m *APITrigger) validatePeriodicSchedule(formats strfmt.Registry) error { - if swag.IsZero(m.PeriodicSchedule) { // not required return nil } @@ -69,6 +71,68 @@ func (m *APITrigger) validatePeriodicSchedule(formats strfmt.Registry) error { if err := m.PeriodicSchedule.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("periodic_schedule") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("periodic_schedule") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api trigger based on the context it is used +func (m *APITrigger) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateCronSchedule(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidatePeriodicSchedule(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APITrigger) contextValidateCronSchedule(ctx context.Context, formats strfmt.Registry) error { + + if m.CronSchedule != nil { + + if swag.IsZero(m.CronSchedule) { // not required + return nil + } + + if err := m.CronSchedule.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("cron_schedule") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("cron_schedule") + } + return err + } + } + + return nil +} + +func (m *APITrigger) contextValidatePeriodicSchedule(ctx context.Context, formats strfmt.Registry) error { + + if m.PeriodicSchedule != nil { + + if swag.IsZero(m.PeriodicSchedule) { // not required + return nil + } + + if err := m.PeriodicSchedule.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("periodic_schedule") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("periodic_schedule") } return err } diff --git a/backend/api/v1beta1/go_http_client/job_model/gatewayruntime_error.go b/backend/api/v1beta1/go_http_client/job_model/gatewayruntime_error.go deleted file mode 100644 index 80a355e3b62..00000000000 --- a/backend/api/v1beta1/go_http_client/job_model/gatewayruntime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package job_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// GatewayruntimeError gatewayruntime error -// swagger:model gatewayruntimeError -type GatewayruntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this gatewayruntime error -func (m *GatewayruntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *GatewayruntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *GatewayruntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *GatewayruntimeError) UnmarshalBinary(b []byte) error { - var res GatewayruntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v1beta1/go_http_client/job_model/googlerpc_status.go b/backend/api/v1beta1/go_http_client/job_model/googlerpc_status.go new file mode 100644 index 00000000000..cbf9d31577e --- /dev/null +++ b/backend/api/v1beta1/go_http_client/job_model/googlerpc_status.go @@ -0,0 +1,127 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package job_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// GooglerpcStatus googlerpc status +// +// swagger:model googlerpcStatus +type GooglerpcStatus struct { + + // code + Code int32 `json:"code,omitempty"` + + // details + Details []*ProtobufAny `json:"details"` + + // message + Message string `json:"message,omitempty"` +} + +// Validate validates this googlerpc status +func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateDetails(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { + if swag.IsZero(m.Details) { // not required + return nil + } + + for i := 0; i < len(m.Details); i++ { + if swag.IsZero(m.Details[i]) { // not required + continue + } + + if m.Details[i] != nil { + if err := m.Details[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *GooglerpcStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *GooglerpcStatus) UnmarshalBinary(b []byte) error { + var res GooglerpcStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v1beta1/go_http_client/job_model/job_mode.go b/backend/api/v1beta1/go_http_client/job_model/job_mode.go index bb24a144823..31602cee932 100644 --- a/backend/api/v1beta1/go_http_client/job_model/job_mode.go +++ b/backend/api/v1beta1/go_http_client/job_model/job_mode.go @@ -6,20 +6,30 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // JobMode Required input. // -// - DISABLED: The job won't schedule any run if disabled. +// - DISABLED: The job won't schedule any run if disabled. +// // swagger:model JobMode type JobMode string +func NewJobMode(value JobMode) *JobMode { + return &value +} + +// Pointer returns a pointer to a freshly-allocated JobMode. +func (m JobMode) Pointer() *JobMode { + return &m +} + const ( // JobModeUNKNOWNMODE captures enum value "UNKNOWN_MODE" @@ -46,7 +56,7 @@ func init() { } func (m JobMode) validateJobModeEnum(path, location string, value JobMode) error { - if err := validate.Enum(path, location, value, jobModeEnum); err != nil { + if err := validate.EnumCase(path, location, value, jobModeEnum, true); err != nil { return err } return nil @@ -66,3 +76,8 @@ func (m JobMode) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this job mode based on context it is used +func (m JobMode) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/job_model/pipeline_spec_runtime_config.go b/backend/api/v1beta1/go_http_client/job_model/pipeline_spec_runtime_config.go index 8cbbe7f42cf..e62b0764dc7 100644 --- a/backend/api/v1beta1/go_http_client/job_model/pipeline_spec_runtime_config.go +++ b/backend/api/v1beta1/go_http_client/job_model/pipeline_spec_runtime_config.go @@ -6,12 +6,14 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // PipelineSpecRuntimeConfig The runtime config of a PipelineSpec. +// // swagger:model PipelineSpecRuntimeConfig type PipelineSpecRuntimeConfig struct { @@ -31,6 +33,11 @@ func (m *PipelineSpecRuntimeConfig) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this pipeline spec runtime config based on context it is used +func (m *PipelineSpecRuntimeConfig) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *PipelineSpecRuntimeConfig) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/job_model/protobuf_any.go b/backend/api/v1beta1/go_http_client/job_model/protobuf_any.go index d2c49ec0702..ebb8205429f 100644 --- a/backend/api/v1beta1/go_http_client/job_model/protobuf_any.go +++ b/backend/api/v1beta1/go_http_client/job_model/protobuf_any.go @@ -6,9 +6,10 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v1beta1/go_http_client/job_model/protobuf_null_value.go b/backend/api/v1beta1/go_http_client/job_model/protobuf_null_value.go index c5f2c520a2b..b2ce74676bf 100644 --- a/backend/api/v1beta1/go_http_client/job_model/protobuf_null_value.go +++ b/backend/api/v1beta1/go_http_client/job_model/protobuf_null_value.go @@ -6,23 +6,33 @@ package job_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // ProtobufNullValue `NullValue` is a singleton enumeration to represent the null value for the // `Value` type union. // -// The JSON representation for `NullValue` is JSON `null`. +// The JSON representation for `NullValue` is JSON `null`. +// +// - NULL_VALUE: Null value. // -// - NULL_VALUE: Null value. // swagger:model protobufNullValue type ProtobufNullValue string +func NewProtobufNullValue(value ProtobufNullValue) *ProtobufNullValue { + return &value +} + +// Pointer returns a pointer to a freshly-allocated ProtobufNullValue. +func (m ProtobufNullValue) Pointer() *ProtobufNullValue { + return &m +} + const ( // ProtobufNullValueNULLVALUE captures enum value "NULL_VALUE" @@ -43,7 +53,7 @@ func init() { } func (m ProtobufNullValue) validateProtobufNullValueEnum(path, location string, value ProtobufNullValue) error { - if err := validate.Enum(path, location, value, protobufNullValueEnum); err != nil { + if err := validate.EnumCase(path, location, value, protobufNullValueEnum, true); err != nil { return err } return nil @@ -63,3 +73,8 @@ func (m ProtobufNullValue) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this protobuf null value based on context it is used +func (m ProtobufNullValue) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_client.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_client.go index 608585517d1..65dce2a6a14 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_client.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_client.go @@ -8,8 +8,7 @@ package pipeline_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Pipeline { cli := new(Pipeline) cli.Transport = transport - cli.PipelineService = pipeline_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Pipeline is a client for pipeline type Pipeline struct { - PipelineService *pipeline_service.Client + PipelineService pipeline_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Pipeline struct { // SetTransport changes the transport on the client and all its subresources func (c *Pipeline) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.PipelineService.SetTransport(transport) - } diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go index 105a522f6d8..308f8e3e4fc 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go @@ -7,15 +7,40 @@ package pipeline_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new pipeline service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new pipeline service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new pipeline service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for pipeline service API */ @@ -24,16 +49,47 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + PipelineServiceCreatePipelineV1(params *PipelineServiceCreatePipelineV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceCreatePipelineV1OK, error) + + PipelineServiceCreatePipelineVersionV1(params *PipelineServiceCreatePipelineVersionV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceCreatePipelineVersionV1OK, error) + + PipelineServiceDeletePipelineV1(params *PipelineServiceDeletePipelineV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceDeletePipelineV1OK, error) + + PipelineServiceDeletePipelineVersionV1(params *PipelineServiceDeletePipelineVersionV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceDeletePipelineVersionV1OK, error) + + PipelineServiceGetPipelineByNameV1(params *PipelineServiceGetPipelineByNameV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineByNameV1OK, error) + + PipelineServiceGetPipelineV1(params *PipelineServiceGetPipelineV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineV1OK, error) + + PipelineServiceGetPipelineVersionTemplate(params *PipelineServiceGetPipelineVersionTemplateParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineVersionTemplateOK, error) + + PipelineServiceGetPipelineVersionV1(params *PipelineServiceGetPipelineVersionV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineVersionV1OK, error) + + PipelineServiceGetTemplate(params *PipelineServiceGetTemplateParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetTemplateOK, error) + + PipelineServiceListPipelineVersionsV1(params *PipelineServiceListPipelineVersionsV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceListPipelineVersionsV1OK, error) + + PipelineServiceListPipelinesV1(params *PipelineServiceListPipelinesV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceListPipelinesV1OK, error) + + PipelineServiceUpdatePipelineDefaultVersionV1(params *PipelineServiceUpdatePipelineDefaultVersionV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceUpdatePipelineDefaultVersionV1OK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* PipelineServiceCreatePipelineV1 creates a pipeline */ -func (a *Client) PipelineServiceCreatePipelineV1(params *PipelineServiceCreatePipelineV1Params, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceCreatePipelineV1OK, error) { +func (a *Client) PipelineServiceCreatePipelineV1(params *PipelineServiceCreatePipelineV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceCreatePipelineV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceCreatePipelineV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_CreatePipelineV1", Method: "POST", PathPattern: "/apis/v1beta1/pipelines", @@ -45,24 +101,33 @@ func (a *Client) PipelineServiceCreatePipelineV1(params *PipelineServiceCreatePi AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceCreatePipelineV1OK), nil - + success, ok := result.(*PipelineServiceCreatePipelineV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceCreatePipelineV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceCreatePipelineVersionV1 adds a pipeline version to the specified pipeline */ -func (a *Client) PipelineServiceCreatePipelineVersionV1(params *PipelineServiceCreatePipelineVersionV1Params, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceCreatePipelineVersionV1OK, error) { +func (a *Client) PipelineServiceCreatePipelineVersionV1(params *PipelineServiceCreatePipelineVersionV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceCreatePipelineVersionV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceCreatePipelineVersionV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_CreatePipelineVersionV1", Method: "POST", PathPattern: "/apis/v1beta1/pipeline_versions", @@ -74,24 +139,33 @@ func (a *Client) PipelineServiceCreatePipelineVersionV1(params *PipelineServiceC AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceCreatePipelineVersionV1OK), nil - + success, ok := result.(*PipelineServiceCreatePipelineVersionV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceCreatePipelineVersionV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceDeletePipelineV1 deletes a pipeline and its pipeline versions */ -func (a *Client) PipelineServiceDeletePipelineV1(params *PipelineServiceDeletePipelineV1Params, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceDeletePipelineV1OK, error) { +func (a *Client) PipelineServiceDeletePipelineV1(params *PipelineServiceDeletePipelineV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceDeletePipelineV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceDeletePipelineV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_DeletePipelineV1", Method: "DELETE", PathPattern: "/apis/v1beta1/pipelines/{id}", @@ -103,24 +177,33 @@ func (a *Client) PipelineServiceDeletePipelineV1(params *PipelineServiceDeletePi AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceDeletePipelineV1OK), nil - + success, ok := result.(*PipelineServiceDeletePipelineV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceDeletePipelineV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceDeletePipelineVersionV1 deletes a pipeline version by pipeline version ID if the deleted pipeline version is the default pipeline version the pipeline s default version changes to the pipeline s most recent pipeline version if there are no remaining pipeline versions the pipeline will have no default version examines the run service api ipynb notebook to learn more about creating a run using a pipeline version https github com kubeflow pipelines blob master tools benchmarks run service api ipynb */ -func (a *Client) PipelineServiceDeletePipelineVersionV1(params *PipelineServiceDeletePipelineVersionV1Params, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceDeletePipelineVersionV1OK, error) { +func (a *Client) PipelineServiceDeletePipelineVersionV1(params *PipelineServiceDeletePipelineVersionV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceDeletePipelineVersionV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceDeletePipelineVersionV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_DeletePipelineVersionV1", Method: "DELETE", PathPattern: "/apis/v1beta1/pipeline_versions/{version_id}", @@ -132,24 +215,33 @@ func (a *Client) PipelineServiceDeletePipelineVersionV1(params *PipelineServiceD AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceDeletePipelineVersionV1OK), nil - + success, ok := result.(*PipelineServiceDeletePipelineVersionV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceDeletePipelineVersionV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceGetPipelineByNameV1 finds a pipeline by name and namespace */ -func (a *Client) PipelineServiceGetPipelineByNameV1(params *PipelineServiceGetPipelineByNameV1Params, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceGetPipelineByNameV1OK, error) { +func (a *Client) PipelineServiceGetPipelineByNameV1(params *PipelineServiceGetPipelineByNameV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineByNameV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceGetPipelineByNameV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_GetPipelineByNameV1", Method: "GET", PathPattern: "/apis/v1beta1/namespaces/{namespace}/pipelines/{name}", @@ -161,24 +253,33 @@ func (a *Client) PipelineServiceGetPipelineByNameV1(params *PipelineServiceGetPi AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceGetPipelineByNameV1OK), nil - + success, ok := result.(*PipelineServiceGetPipelineByNameV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceGetPipelineByNameV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceGetPipelineV1 finds a specific pipeline by ID */ -func (a *Client) PipelineServiceGetPipelineV1(params *PipelineServiceGetPipelineV1Params, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceGetPipelineV1OK, error) { +func (a *Client) PipelineServiceGetPipelineV1(params *PipelineServiceGetPipelineV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceGetPipelineV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_GetPipelineV1", Method: "GET", PathPattern: "/apis/v1beta1/pipelines/{id}", @@ -190,24 +291,33 @@ func (a *Client) PipelineServiceGetPipelineV1(params *PipelineServiceGetPipeline AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceGetPipelineV1OK), nil - + success, ok := result.(*PipelineServiceGetPipelineV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceGetPipelineV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceGetPipelineVersionTemplate returns a y a m l template that contains the specified pipeline version s description parameters and metadata */ -func (a *Client) PipelineServiceGetPipelineVersionTemplate(params *PipelineServiceGetPipelineVersionTemplateParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceGetPipelineVersionTemplateOK, error) { +func (a *Client) PipelineServiceGetPipelineVersionTemplate(params *PipelineServiceGetPipelineVersionTemplateParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineVersionTemplateOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceGetPipelineVersionTemplateParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_GetPipelineVersionTemplate", Method: "GET", PathPattern: "/apis/v1beta1/pipeline_versions/{version_id}/templates", @@ -219,24 +329,33 @@ func (a *Client) PipelineServiceGetPipelineVersionTemplate(params *PipelineServi AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceGetPipelineVersionTemplateOK), nil - + success, ok := result.(*PipelineServiceGetPipelineVersionTemplateOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceGetPipelineVersionTemplateDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceGetPipelineVersionV1 gets a pipeline version by pipeline version ID */ -func (a *Client) PipelineServiceGetPipelineVersionV1(params *PipelineServiceGetPipelineVersionV1Params, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceGetPipelineVersionV1OK, error) { +func (a *Client) PipelineServiceGetPipelineVersionV1(params *PipelineServiceGetPipelineVersionV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineVersionV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceGetPipelineVersionV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_GetPipelineVersionV1", Method: "GET", PathPattern: "/apis/v1beta1/pipeline_versions/{version_id}", @@ -248,24 +367,33 @@ func (a *Client) PipelineServiceGetPipelineVersionV1(params *PipelineServiceGetP AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceGetPipelineVersionV1OK), nil - + success, ok := result.(*PipelineServiceGetPipelineVersionV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceGetPipelineVersionV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceGetTemplate returns a single y a m l template that contains the description parameters and metadata associated with the pipeline provided */ -func (a *Client) PipelineServiceGetTemplate(params *PipelineServiceGetTemplateParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceGetTemplateOK, error) { +func (a *Client) PipelineServiceGetTemplate(params *PipelineServiceGetTemplateParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetTemplateOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceGetTemplateParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_GetTemplate", Method: "GET", PathPattern: "/apis/v1beta1/pipelines/{id}/templates", @@ -277,24 +405,33 @@ func (a *Client) PipelineServiceGetTemplate(params *PipelineServiceGetTemplatePa AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceGetTemplateOK), nil - + success, ok := result.(*PipelineServiceGetTemplateOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceGetTemplateDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceListPipelineVersionsV1 lists all pipeline versions of a given pipeline */ -func (a *Client) PipelineServiceListPipelineVersionsV1(params *PipelineServiceListPipelineVersionsV1Params, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceListPipelineVersionsV1OK, error) { +func (a *Client) PipelineServiceListPipelineVersionsV1(params *PipelineServiceListPipelineVersionsV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceListPipelineVersionsV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceListPipelineVersionsV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_ListPipelineVersionsV1", Method: "GET", PathPattern: "/apis/v1beta1/pipeline_versions", @@ -306,24 +443,33 @@ func (a *Client) PipelineServiceListPipelineVersionsV1(params *PipelineServiceLi AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceListPipelineVersionsV1OK), nil - + success, ok := result.(*PipelineServiceListPipelineVersionsV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceListPipelineVersionsV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceListPipelinesV1 finds all pipelines */ -func (a *Client) PipelineServiceListPipelinesV1(params *PipelineServiceListPipelinesV1Params, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceListPipelinesV1OK, error) { +func (a *Client) PipelineServiceListPipelinesV1(params *PipelineServiceListPipelinesV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceListPipelinesV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceListPipelinesV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_ListPipelinesV1", Method: "GET", PathPattern: "/apis/v1beta1/pipelines", @@ -335,24 +481,33 @@ func (a *Client) PipelineServiceListPipelinesV1(params *PipelineServiceListPipel AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceListPipelinesV1OK), nil - + success, ok := result.(*PipelineServiceListPipelinesV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceListPipelinesV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceUpdatePipelineDefaultVersionV1 updates the default pipeline version of a specific pipeline */ -func (a *Client) PipelineServiceUpdatePipelineDefaultVersionV1(params *PipelineServiceUpdatePipelineDefaultVersionV1Params, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceUpdatePipelineDefaultVersionV1OK, error) { +func (a *Client) PipelineServiceUpdatePipelineDefaultVersionV1(params *PipelineServiceUpdatePipelineDefaultVersionV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceUpdatePipelineDefaultVersionV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceUpdatePipelineDefaultVersionV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_UpdatePipelineDefaultVersionV1", Method: "POST", PathPattern: "/apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}", @@ -364,12 +519,22 @@ func (a *Client) PipelineServiceUpdatePipelineDefaultVersionV1(params *PipelineS AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceUpdatePipelineDefaultVersionV1OK), nil - + success, ok := result.(*PipelineServiceUpdatePipelineDefaultVersionV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceUpdatePipelineDefaultVersionV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_v1_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_v1_parameters.go index 5b1b9ef407d..51df4dd121c 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_v1_parameters.go @@ -13,64 +13,79 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) -// NewPipelineServiceCreatePipelineV1Params creates a new PipelineServiceCreatePipelineV1Params object -// with the default values initialized. +// NewPipelineServiceCreatePipelineV1Params creates a new PipelineServiceCreatePipelineV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceCreatePipelineV1Params() *PipelineServiceCreatePipelineV1Params { - var () return &PipelineServiceCreatePipelineV1Params{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceCreatePipelineV1ParamsWithTimeout creates a new PipelineServiceCreatePipelineV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceCreatePipelineV1ParamsWithTimeout(timeout time.Duration) *PipelineServiceCreatePipelineV1Params { - var () return &PipelineServiceCreatePipelineV1Params{ - timeout: timeout, } } // NewPipelineServiceCreatePipelineV1ParamsWithContext creates a new PipelineServiceCreatePipelineV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceCreatePipelineV1ParamsWithContext(ctx context.Context) *PipelineServiceCreatePipelineV1Params { - var () return &PipelineServiceCreatePipelineV1Params{ - Context: ctx, } } // NewPipelineServiceCreatePipelineV1ParamsWithHTTPClient creates a new PipelineServiceCreatePipelineV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceCreatePipelineV1ParamsWithHTTPClient(client *http.Client) *PipelineServiceCreatePipelineV1Params { - var () return &PipelineServiceCreatePipelineV1Params{ HTTPClient: client, } } -/*PipelineServiceCreatePipelineV1Params contains all the parameters to send to the API endpoint -for the pipeline service create pipeline v1 operation typically these are written to a http.Request +/* +PipelineServiceCreatePipelineV1Params contains all the parameters to send to the API endpoint + + for the pipeline service create pipeline v1 operation. + + Typically these are written to a http.Request. */ type PipelineServiceCreatePipelineV1Params struct { - /*Body*/ - Body *pipeline_model.APIPipeline + // Pipeline. + Pipeline *pipeline_model.APIPipeline timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service create pipeline v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceCreatePipelineV1Params) WithDefaults() *PipelineServiceCreatePipelineV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service create pipeline v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceCreatePipelineV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service create pipeline v1 params func (o *PipelineServiceCreatePipelineV1Params) WithTimeout(timeout time.Duration) *PipelineServiceCreatePipelineV1Params { o.SetTimeout(timeout) @@ -104,15 +119,15 @@ func (o *PipelineServiceCreatePipelineV1Params) SetHTTPClient(client *http.Clien o.HTTPClient = client } -// WithBody adds the body to the pipeline service create pipeline v1 params -func (o *PipelineServiceCreatePipelineV1Params) WithBody(body *pipeline_model.APIPipeline) *PipelineServiceCreatePipelineV1Params { - o.SetBody(body) +// WithPipeline adds the pipeline to the pipeline service create pipeline v1 params +func (o *PipelineServiceCreatePipelineV1Params) WithPipeline(pipeline *pipeline_model.APIPipeline) *PipelineServiceCreatePipelineV1Params { + o.SetPipeline(pipeline) return o } -// SetBody adds the body to the pipeline service create pipeline v1 params -func (o *PipelineServiceCreatePipelineV1Params) SetBody(body *pipeline_model.APIPipeline) { - o.Body = body +// SetPipeline adds the pipeline to the pipeline service create pipeline v1 params +func (o *PipelineServiceCreatePipelineV1Params) SetPipeline(pipeline *pipeline_model.APIPipeline) { + o.Pipeline = pipeline } // WriteToRequest writes these params to a swagger request @@ -122,9 +137,8 @@ func (o *PipelineServiceCreatePipelineV1Params) WriteToRequest(r runtime.ClientR return err } var res []error - - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { + if o.Pipeline != nil { + if err := r.SetBodyParam(o.Pipeline); err != nil { return err } } diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_v1_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_v1_responses.go index 9b5901f05c6..33375608a78 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_v1_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceCreatePipelineV1Reader is a Reader for the PipelineServiceCreatePipelineV1 structure. @@ -24,14 +24,12 @@ type PipelineServiceCreatePipelineV1Reader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceCreatePipelineV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceCreatePipelineV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceCreatePipelineV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceCreatePipelineV1OK() *PipelineServiceCreatePipelineV1OK { return &PipelineServiceCreatePipelineV1OK{} } -/*PipelineServiceCreatePipelineV1OK handles this case with default header values. +/* +PipelineServiceCreatePipelineV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceCreatePipelineV1OK struct { Payload *pipeline_model.APIPipeline } +// IsSuccess returns true when this pipeline service create pipeline v1 o k response has a 2xx status code +func (o *PipelineServiceCreatePipelineV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service create pipeline v1 o k response has a 3xx status code +func (o *PipelineServiceCreatePipelineV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service create pipeline v1 o k response has a 4xx status code +func (o *PipelineServiceCreatePipelineV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service create pipeline v1 o k response has a 5xx status code +func (o *PipelineServiceCreatePipelineV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service create pipeline v1 o k response a status code equal to that given +func (o *PipelineServiceCreatePipelineV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service create pipeline v1 o k response +func (o *PipelineServiceCreatePipelineV1OK) Code() int { + return 200 +} + func (o *PipelineServiceCreatePipelineV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/pipelines][%d] pipelineServiceCreatePipelineV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines][%d] pipelineServiceCreatePipelineV1OK %s", 200, payload) +} + +func (o *PipelineServiceCreatePipelineV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines][%d] pipelineServiceCreatePipelineV1OK %s", 200, payload) +} + +func (o *PipelineServiceCreatePipelineV1OK) GetPayload() *pipeline_model.APIPipeline { + return o.Payload } func (o *PipelineServiceCreatePipelineV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceCreatePipelineV1Default(code int) *PipelineServiceCreateP } } -/*PipelineServiceCreatePipelineV1Default handles this case with default header values. +/* +PipelineServiceCreatePipelineV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceCreatePipelineV1Default struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service create pipeline v1 default response has a 2xx status code +func (o *PipelineServiceCreatePipelineV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service create pipeline v1 default response has a 3xx status code +func (o *PipelineServiceCreatePipelineV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service create pipeline v1 default response has a 4xx status code +func (o *PipelineServiceCreatePipelineV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service create pipeline v1 default response has a 5xx status code +func (o *PipelineServiceCreatePipelineV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service create pipeline v1 default response a status code equal to that given +func (o *PipelineServiceCreatePipelineV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service create pipeline v1 default response @@ -96,12 +161,22 @@ func (o *PipelineServiceCreatePipelineV1Default) Code() int { } func (o *PipelineServiceCreatePipelineV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/pipelines][%d] PipelineService_CreatePipelineV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines][%d] PipelineService_CreatePipelineV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceCreatePipelineV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines][%d] PipelineService_CreatePipelineV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceCreatePipelineV1Default) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceCreatePipelineV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_v1_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_v1_parameters.go index 9cba6ffbf75..ade287ab113 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_v1_parameters.go @@ -13,68 +13,83 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) -// NewPipelineServiceCreatePipelineVersionV1Params creates a new PipelineServiceCreatePipelineVersionV1Params object -// with the default values initialized. +// NewPipelineServiceCreatePipelineVersionV1Params creates a new PipelineServiceCreatePipelineVersionV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceCreatePipelineVersionV1Params() *PipelineServiceCreatePipelineVersionV1Params { - var () return &PipelineServiceCreatePipelineVersionV1Params{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceCreatePipelineVersionV1ParamsWithTimeout creates a new PipelineServiceCreatePipelineVersionV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceCreatePipelineVersionV1ParamsWithTimeout(timeout time.Duration) *PipelineServiceCreatePipelineVersionV1Params { - var () return &PipelineServiceCreatePipelineVersionV1Params{ - timeout: timeout, } } // NewPipelineServiceCreatePipelineVersionV1ParamsWithContext creates a new PipelineServiceCreatePipelineVersionV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceCreatePipelineVersionV1ParamsWithContext(ctx context.Context) *PipelineServiceCreatePipelineVersionV1Params { - var () return &PipelineServiceCreatePipelineVersionV1Params{ - Context: ctx, } } // NewPipelineServiceCreatePipelineVersionV1ParamsWithHTTPClient creates a new PipelineServiceCreatePipelineVersionV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceCreatePipelineVersionV1ParamsWithHTTPClient(client *http.Client) *PipelineServiceCreatePipelineVersionV1Params { - var () return &PipelineServiceCreatePipelineVersionV1Params{ HTTPClient: client, } } -/*PipelineServiceCreatePipelineVersionV1Params contains all the parameters to send to the API endpoint -for the pipeline service create pipeline version v1 operation typically these are written to a http.Request +/* +PipelineServiceCreatePipelineVersionV1Params contains all the parameters to send to the API endpoint + + for the pipeline service create pipeline version v1 operation. + + Typically these are written to a http.Request. */ type PipelineServiceCreatePipelineVersionV1Params struct { - /*Body - ResourceReference inside PipelineVersion specifies the pipeline that this - version belongs to. + /* Version. + ResourceReference inside PipelineVersion specifies the pipeline that this + version belongs to. */ - Body *pipeline_model.APIPipelineVersion + Version *pipeline_model.APIPipelineVersion timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service create pipeline version v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceCreatePipelineVersionV1Params) WithDefaults() *PipelineServiceCreatePipelineVersionV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service create pipeline version v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceCreatePipelineVersionV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service create pipeline version v1 params func (o *PipelineServiceCreatePipelineVersionV1Params) WithTimeout(timeout time.Duration) *PipelineServiceCreatePipelineVersionV1Params { o.SetTimeout(timeout) @@ -108,15 +123,15 @@ func (o *PipelineServiceCreatePipelineVersionV1Params) SetHTTPClient(client *htt o.HTTPClient = client } -// WithBody adds the body to the pipeline service create pipeline version v1 params -func (o *PipelineServiceCreatePipelineVersionV1Params) WithBody(body *pipeline_model.APIPipelineVersion) *PipelineServiceCreatePipelineVersionV1Params { - o.SetBody(body) +// WithVersion adds the version to the pipeline service create pipeline version v1 params +func (o *PipelineServiceCreatePipelineVersionV1Params) WithVersion(version *pipeline_model.APIPipelineVersion) *PipelineServiceCreatePipelineVersionV1Params { + o.SetVersion(version) return o } -// SetBody adds the body to the pipeline service create pipeline version v1 params -func (o *PipelineServiceCreatePipelineVersionV1Params) SetBody(body *pipeline_model.APIPipelineVersion) { - o.Body = body +// SetVersion adds the version to the pipeline service create pipeline version v1 params +func (o *PipelineServiceCreatePipelineVersionV1Params) SetVersion(version *pipeline_model.APIPipelineVersion) { + o.Version = version } // WriteToRequest writes these params to a swagger request @@ -126,9 +141,8 @@ func (o *PipelineServiceCreatePipelineVersionV1Params) WriteToRequest(r runtime. return err } var res []error - - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { + if o.Version != nil { + if err := r.SetBodyParam(o.Version); err != nil { return err } } diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_v1_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_v1_responses.go index fcf00e33037..571009a2fbb 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_v1_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceCreatePipelineVersionV1Reader is a Reader for the PipelineServiceCreatePipelineVersionV1 structure. @@ -24,14 +24,12 @@ type PipelineServiceCreatePipelineVersionV1Reader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceCreatePipelineVersionV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceCreatePipelineVersionV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceCreatePipelineVersionV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceCreatePipelineVersionV1OK() *PipelineServiceCreatePipelin return &PipelineServiceCreatePipelineVersionV1OK{} } -/*PipelineServiceCreatePipelineVersionV1OK handles this case with default header values. +/* +PipelineServiceCreatePipelineVersionV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceCreatePipelineVersionV1OK struct { Payload *pipeline_model.APIPipelineVersion } +// IsSuccess returns true when this pipeline service create pipeline version v1 o k response has a 2xx status code +func (o *PipelineServiceCreatePipelineVersionV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service create pipeline version v1 o k response has a 3xx status code +func (o *PipelineServiceCreatePipelineVersionV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service create pipeline version v1 o k response has a 4xx status code +func (o *PipelineServiceCreatePipelineVersionV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service create pipeline version v1 o k response has a 5xx status code +func (o *PipelineServiceCreatePipelineVersionV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service create pipeline version v1 o k response a status code equal to that given +func (o *PipelineServiceCreatePipelineVersionV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service create pipeline version v1 o k response +func (o *PipelineServiceCreatePipelineVersionV1OK) Code() int { + return 200 +} + func (o *PipelineServiceCreatePipelineVersionV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/pipeline_versions][%d] pipelineServiceCreatePipelineVersionV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipeline_versions][%d] pipelineServiceCreatePipelineVersionV1OK %s", 200, payload) +} + +func (o *PipelineServiceCreatePipelineVersionV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipeline_versions][%d] pipelineServiceCreatePipelineVersionV1OK %s", 200, payload) +} + +func (o *PipelineServiceCreatePipelineVersionV1OK) GetPayload() *pipeline_model.APIPipelineVersion { + return o.Payload } func (o *PipelineServiceCreatePipelineVersionV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceCreatePipelineVersionV1Default(code int) *PipelineService } } -/*PipelineServiceCreatePipelineVersionV1Default handles this case with default header values. +/* +PipelineServiceCreatePipelineVersionV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceCreatePipelineVersionV1Default struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service create pipeline version v1 default response has a 2xx status code +func (o *PipelineServiceCreatePipelineVersionV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service create pipeline version v1 default response has a 3xx status code +func (o *PipelineServiceCreatePipelineVersionV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service create pipeline version v1 default response has a 4xx status code +func (o *PipelineServiceCreatePipelineVersionV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service create pipeline version v1 default response has a 5xx status code +func (o *PipelineServiceCreatePipelineVersionV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service create pipeline version v1 default response a status code equal to that given +func (o *PipelineServiceCreatePipelineVersionV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service create pipeline version v1 default response @@ -96,12 +161,22 @@ func (o *PipelineServiceCreatePipelineVersionV1Default) Code() int { } func (o *PipelineServiceCreatePipelineVersionV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/pipeline_versions][%d] PipelineService_CreatePipelineVersionV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipeline_versions][%d] PipelineService_CreatePipelineVersionV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceCreatePipelineVersionV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipeline_versions][%d] PipelineService_CreatePipelineVersionV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceCreatePipelineVersionV1Default) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceCreatePipelineVersionV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_v1_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_v1_parameters.go index b568eda529c..e6aec22fb4c 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceDeletePipelineV1Params creates a new PipelineServiceDeletePipelineV1Params object -// with the default values initialized. +// NewPipelineServiceDeletePipelineV1Params creates a new PipelineServiceDeletePipelineV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceDeletePipelineV1Params() *PipelineServiceDeletePipelineV1Params { - var () return &PipelineServiceDeletePipelineV1Params{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceDeletePipelineV1ParamsWithTimeout creates a new PipelineServiceDeletePipelineV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceDeletePipelineV1ParamsWithTimeout(timeout time.Duration) *PipelineServiceDeletePipelineV1Params { - var () return &PipelineServiceDeletePipelineV1Params{ - timeout: timeout, } } // NewPipelineServiceDeletePipelineV1ParamsWithContext creates a new PipelineServiceDeletePipelineV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceDeletePipelineV1ParamsWithContext(ctx context.Context) *PipelineServiceDeletePipelineV1Params { - var () return &PipelineServiceDeletePipelineV1Params{ - Context: ctx, } } // NewPipelineServiceDeletePipelineV1ParamsWithHTTPClient creates a new PipelineServiceDeletePipelineV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceDeletePipelineV1ParamsWithHTTPClient(client *http.Client) *PipelineServiceDeletePipelineV1Params { - var () return &PipelineServiceDeletePipelineV1Params{ HTTPClient: client, } } -/*PipelineServiceDeletePipelineV1Params contains all the parameters to send to the API endpoint -for the pipeline service delete pipeline v1 operation typically these are written to a http.Request +/* +PipelineServiceDeletePipelineV1Params contains all the parameters to send to the API endpoint + + for the pipeline service delete pipeline v1 operation. + + Typically these are written to a http.Request. */ type PipelineServiceDeletePipelineV1Params struct { - /*ID - The ID of the pipeline to be deleted. + /* ID. + The ID of the pipeline to be deleted. */ ID string @@ -72,6 +72,21 @@ type PipelineServiceDeletePipelineV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service delete pipeline v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceDeletePipelineV1Params) WithDefaults() *PipelineServiceDeletePipelineV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service delete pipeline v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceDeletePipelineV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service delete pipeline v1 params func (o *PipelineServiceDeletePipelineV1Params) WithTimeout(timeout time.Duration) *PipelineServiceDeletePipelineV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_v1_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_v1_responses.go index 43624f78e20..15b9e072946 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_v1_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceDeletePipelineV1Reader is a Reader for the PipelineServiceDeletePipelineV1 structure. @@ -24,14 +24,12 @@ type PipelineServiceDeletePipelineV1Reader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceDeletePipelineV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceDeletePipelineV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceDeletePipelineV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceDeletePipelineV1OK() *PipelineServiceDeletePipelineV1OK { return &PipelineServiceDeletePipelineV1OK{} } -/*PipelineServiceDeletePipelineV1OK handles this case with default header values. +/* +PipelineServiceDeletePipelineV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceDeletePipelineV1OK struct { Payload interface{} } +// IsSuccess returns true when this pipeline service delete pipeline v1 o k response has a 2xx status code +func (o *PipelineServiceDeletePipelineV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service delete pipeline v1 o k response has a 3xx status code +func (o *PipelineServiceDeletePipelineV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service delete pipeline v1 o k response has a 4xx status code +func (o *PipelineServiceDeletePipelineV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service delete pipeline v1 o k response has a 5xx status code +func (o *PipelineServiceDeletePipelineV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service delete pipeline v1 o k response a status code equal to that given +func (o *PipelineServiceDeletePipelineV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service delete pipeline v1 o k response +func (o *PipelineServiceDeletePipelineV1OK) Code() int { + return 200 +} + func (o *PipelineServiceDeletePipelineV1OK) Error() string { - return fmt.Sprintf("[DELETE /apis/v1beta1/pipelines/{id}][%d] pipelineServiceDeletePipelineV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/pipelines/{id}][%d] pipelineServiceDeletePipelineV1OK %s", 200, payload) +} + +func (o *PipelineServiceDeletePipelineV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/pipelines/{id}][%d] pipelineServiceDeletePipelineV1OK %s", 200, payload) +} + +func (o *PipelineServiceDeletePipelineV1OK) GetPayload() interface{} { + return o.Payload } func (o *PipelineServiceDeletePipelineV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewPipelineServiceDeletePipelineV1Default(code int) *PipelineServiceDeleteP } } -/*PipelineServiceDeletePipelineV1Default handles this case with default header values. +/* +PipelineServiceDeletePipelineV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceDeletePipelineV1Default struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service delete pipeline v1 default response has a 2xx status code +func (o *PipelineServiceDeletePipelineV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service delete pipeline v1 default response has a 3xx status code +func (o *PipelineServiceDeletePipelineV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service delete pipeline v1 default response has a 4xx status code +func (o *PipelineServiceDeletePipelineV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service delete pipeline v1 default response has a 5xx status code +func (o *PipelineServiceDeletePipelineV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service delete pipeline v1 default response a status code equal to that given +func (o *PipelineServiceDeletePipelineV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service delete pipeline v1 default response @@ -94,12 +159,22 @@ func (o *PipelineServiceDeletePipelineV1Default) Code() int { } func (o *PipelineServiceDeletePipelineV1Default) Error() string { - return fmt.Sprintf("[DELETE /apis/v1beta1/pipelines/{id}][%d] PipelineService_DeletePipelineV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/pipelines/{id}][%d] PipelineService_DeletePipelineV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceDeletePipelineV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/pipelines/{id}][%d] PipelineService_DeletePipelineV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceDeletePipelineV1Default) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceDeletePipelineV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_v1_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_v1_parameters.go index 985ff77c5fe..65ef10a2563 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceDeletePipelineVersionV1Params creates a new PipelineServiceDeletePipelineVersionV1Params object -// with the default values initialized. +// NewPipelineServiceDeletePipelineVersionV1Params creates a new PipelineServiceDeletePipelineVersionV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceDeletePipelineVersionV1Params() *PipelineServiceDeletePipelineVersionV1Params { - var () return &PipelineServiceDeletePipelineVersionV1Params{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceDeletePipelineVersionV1ParamsWithTimeout creates a new PipelineServiceDeletePipelineVersionV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceDeletePipelineVersionV1ParamsWithTimeout(timeout time.Duration) *PipelineServiceDeletePipelineVersionV1Params { - var () return &PipelineServiceDeletePipelineVersionV1Params{ - timeout: timeout, } } // NewPipelineServiceDeletePipelineVersionV1ParamsWithContext creates a new PipelineServiceDeletePipelineVersionV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceDeletePipelineVersionV1ParamsWithContext(ctx context.Context) *PipelineServiceDeletePipelineVersionV1Params { - var () return &PipelineServiceDeletePipelineVersionV1Params{ - Context: ctx, } } // NewPipelineServiceDeletePipelineVersionV1ParamsWithHTTPClient creates a new PipelineServiceDeletePipelineVersionV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceDeletePipelineVersionV1ParamsWithHTTPClient(client *http.Client) *PipelineServiceDeletePipelineVersionV1Params { - var () return &PipelineServiceDeletePipelineVersionV1Params{ HTTPClient: client, } } -/*PipelineServiceDeletePipelineVersionV1Params contains all the parameters to send to the API endpoint -for the pipeline service delete pipeline version v1 operation typically these are written to a http.Request +/* +PipelineServiceDeletePipelineVersionV1Params contains all the parameters to send to the API endpoint + + for the pipeline service delete pipeline version v1 operation. + + Typically these are written to a http.Request. */ type PipelineServiceDeletePipelineVersionV1Params struct { - /*VersionID - The ID of the pipeline version to be deleted. + /* VersionID. + The ID of the pipeline version to be deleted. */ VersionID string @@ -72,6 +72,21 @@ type PipelineServiceDeletePipelineVersionV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service delete pipeline version v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceDeletePipelineVersionV1Params) WithDefaults() *PipelineServiceDeletePipelineVersionV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service delete pipeline version v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceDeletePipelineVersionV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service delete pipeline version v1 params func (o *PipelineServiceDeletePipelineVersionV1Params) WithTimeout(timeout time.Duration) *PipelineServiceDeletePipelineVersionV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_v1_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_v1_responses.go index 941f8ad9420..32f4ead486b 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_v1_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceDeletePipelineVersionV1Reader is a Reader for the PipelineServiceDeletePipelineVersionV1 structure. @@ -24,14 +24,12 @@ type PipelineServiceDeletePipelineVersionV1Reader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceDeletePipelineVersionV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceDeletePipelineVersionV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceDeletePipelineVersionV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceDeletePipelineVersionV1OK() *PipelineServiceDeletePipelin return &PipelineServiceDeletePipelineVersionV1OK{} } -/*PipelineServiceDeletePipelineVersionV1OK handles this case with default header values. +/* +PipelineServiceDeletePipelineVersionV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceDeletePipelineVersionV1OK struct { Payload interface{} } +// IsSuccess returns true when this pipeline service delete pipeline version v1 o k response has a 2xx status code +func (o *PipelineServiceDeletePipelineVersionV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service delete pipeline version v1 o k response has a 3xx status code +func (o *PipelineServiceDeletePipelineVersionV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service delete pipeline version v1 o k response has a 4xx status code +func (o *PipelineServiceDeletePipelineVersionV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service delete pipeline version v1 o k response has a 5xx status code +func (o *PipelineServiceDeletePipelineVersionV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service delete pipeline version v1 o k response a status code equal to that given +func (o *PipelineServiceDeletePipelineVersionV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service delete pipeline version v1 o k response +func (o *PipelineServiceDeletePipelineVersionV1OK) Code() int { + return 200 +} + func (o *PipelineServiceDeletePipelineVersionV1OK) Error() string { - return fmt.Sprintf("[DELETE /apis/v1beta1/pipeline_versions/{version_id}][%d] pipelineServiceDeletePipelineVersionV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/pipeline_versions/{version_id}][%d] pipelineServiceDeletePipelineVersionV1OK %s", 200, payload) +} + +func (o *PipelineServiceDeletePipelineVersionV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/pipeline_versions/{version_id}][%d] pipelineServiceDeletePipelineVersionV1OK %s", 200, payload) +} + +func (o *PipelineServiceDeletePipelineVersionV1OK) GetPayload() interface{} { + return o.Payload } func (o *PipelineServiceDeletePipelineVersionV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewPipelineServiceDeletePipelineVersionV1Default(code int) *PipelineService } } -/*PipelineServiceDeletePipelineVersionV1Default handles this case with default header values. +/* +PipelineServiceDeletePipelineVersionV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceDeletePipelineVersionV1Default struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service delete pipeline version v1 default response has a 2xx status code +func (o *PipelineServiceDeletePipelineVersionV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service delete pipeline version v1 default response has a 3xx status code +func (o *PipelineServiceDeletePipelineVersionV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service delete pipeline version v1 default response has a 4xx status code +func (o *PipelineServiceDeletePipelineVersionV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service delete pipeline version v1 default response has a 5xx status code +func (o *PipelineServiceDeletePipelineVersionV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service delete pipeline version v1 default response a status code equal to that given +func (o *PipelineServiceDeletePipelineVersionV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service delete pipeline version v1 default response @@ -94,12 +159,22 @@ func (o *PipelineServiceDeletePipelineVersionV1Default) Code() int { } func (o *PipelineServiceDeletePipelineVersionV1Default) Error() string { - return fmt.Sprintf("[DELETE /apis/v1beta1/pipeline_versions/{version_id}][%d] PipelineService_DeletePipelineVersionV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/pipeline_versions/{version_id}][%d] PipelineService_DeletePipelineVersionV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceDeletePipelineVersionV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/pipeline_versions/{version_id}][%d] PipelineService_DeletePipelineVersionV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceDeletePipelineVersionV1Default) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceDeletePipelineVersionV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_v1_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_v1_parameters.go index 7e44ccc295c..316b2d14375 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_v1_parameters.go @@ -13,65 +13,66 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceGetPipelineByNameV1Params creates a new PipelineServiceGetPipelineByNameV1Params object -// with the default values initialized. +// NewPipelineServiceGetPipelineByNameV1Params creates a new PipelineServiceGetPipelineByNameV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceGetPipelineByNameV1Params() *PipelineServiceGetPipelineByNameV1Params { - var () return &PipelineServiceGetPipelineByNameV1Params{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceGetPipelineByNameV1ParamsWithTimeout creates a new PipelineServiceGetPipelineByNameV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceGetPipelineByNameV1ParamsWithTimeout(timeout time.Duration) *PipelineServiceGetPipelineByNameV1Params { - var () return &PipelineServiceGetPipelineByNameV1Params{ - timeout: timeout, } } // NewPipelineServiceGetPipelineByNameV1ParamsWithContext creates a new PipelineServiceGetPipelineByNameV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceGetPipelineByNameV1ParamsWithContext(ctx context.Context) *PipelineServiceGetPipelineByNameV1Params { - var () return &PipelineServiceGetPipelineByNameV1Params{ - Context: ctx, } } // NewPipelineServiceGetPipelineByNameV1ParamsWithHTTPClient creates a new PipelineServiceGetPipelineByNameV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceGetPipelineByNameV1ParamsWithHTTPClient(client *http.Client) *PipelineServiceGetPipelineByNameV1Params { - var () return &PipelineServiceGetPipelineByNameV1Params{ HTTPClient: client, } } -/*PipelineServiceGetPipelineByNameV1Params contains all the parameters to send to the API endpoint -for the pipeline service get pipeline by name v1 operation typically these are written to a http.Request +/* +PipelineServiceGetPipelineByNameV1Params contains all the parameters to send to the API endpoint + + for the pipeline service get pipeline by name v1 operation. + + Typically these are written to a http.Request. */ type PipelineServiceGetPipelineByNameV1Params struct { - /*Name - The Name of the pipeline to be retrieved. + /* Name. + The Name of the pipeline to be retrieved. */ Name string - /*Namespace - The Namespace the pipeline belongs to. + + /* Namespace. + + The Namespace the pipeline belongs to. In the case of shared pipelines and KFPipeline standalone installation, the pipeline name is the only needed field for unique resource lookup (namespace is not required). In those case, please provide hyphen (dash character, "-"). - */ Namespace string @@ -80,6 +81,21 @@ type PipelineServiceGetPipelineByNameV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service get pipeline by name v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineByNameV1Params) WithDefaults() *PipelineServiceGetPipelineByNameV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service get pipeline by name v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineByNameV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service get pipeline by name v1 params func (o *PipelineServiceGetPipelineByNameV1Params) WithTimeout(timeout time.Duration) *PipelineServiceGetPipelineByNameV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_v1_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_v1_responses.go index b7bed7c899e..36f3420b630 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_v1_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceGetPipelineByNameV1Reader is a Reader for the PipelineServiceGetPipelineByNameV1 structure. @@ -24,14 +24,12 @@ type PipelineServiceGetPipelineByNameV1Reader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceGetPipelineByNameV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceGetPipelineByNameV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceGetPipelineByNameV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceGetPipelineByNameV1OK() *PipelineServiceGetPipelineByName return &PipelineServiceGetPipelineByNameV1OK{} } -/*PipelineServiceGetPipelineByNameV1OK handles this case with default header values. +/* +PipelineServiceGetPipelineByNameV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceGetPipelineByNameV1OK struct { Payload *pipeline_model.APIPipeline } +// IsSuccess returns true when this pipeline service get pipeline by name v1 o k response has a 2xx status code +func (o *PipelineServiceGetPipelineByNameV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service get pipeline by name v1 o k response has a 3xx status code +func (o *PipelineServiceGetPipelineByNameV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service get pipeline by name v1 o k response has a 4xx status code +func (o *PipelineServiceGetPipelineByNameV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service get pipeline by name v1 o k response has a 5xx status code +func (o *PipelineServiceGetPipelineByNameV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service get pipeline by name v1 o k response a status code equal to that given +func (o *PipelineServiceGetPipelineByNameV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service get pipeline by name v1 o k response +func (o *PipelineServiceGetPipelineByNameV1OK) Code() int { + return 200 +} + func (o *PipelineServiceGetPipelineByNameV1OK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/namespaces/{namespace}/pipelines/{name}][%d] pipelineServiceGetPipelineByNameV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/namespaces/{namespace}/pipelines/{name}][%d] pipelineServiceGetPipelineByNameV1OK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineByNameV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/namespaces/{namespace}/pipelines/{name}][%d] pipelineServiceGetPipelineByNameV1OK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineByNameV1OK) GetPayload() *pipeline_model.APIPipeline { + return o.Payload } func (o *PipelineServiceGetPipelineByNameV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceGetPipelineByNameV1Default(code int) *PipelineServiceGetP } } -/*PipelineServiceGetPipelineByNameV1Default handles this case with default header values. +/* +PipelineServiceGetPipelineByNameV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceGetPipelineByNameV1Default struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service get pipeline by name v1 default response has a 2xx status code +func (o *PipelineServiceGetPipelineByNameV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service get pipeline by name v1 default response has a 3xx status code +func (o *PipelineServiceGetPipelineByNameV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service get pipeline by name v1 default response has a 4xx status code +func (o *PipelineServiceGetPipelineByNameV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service get pipeline by name v1 default response has a 5xx status code +func (o *PipelineServiceGetPipelineByNameV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service get pipeline by name v1 default response a status code equal to that given +func (o *PipelineServiceGetPipelineByNameV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service get pipeline by name v1 default response @@ -96,12 +161,22 @@ func (o *PipelineServiceGetPipelineByNameV1Default) Code() int { } func (o *PipelineServiceGetPipelineByNameV1Default) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/namespaces/{namespace}/pipelines/{name}][%d] PipelineService_GetPipelineByNameV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/namespaces/{namespace}/pipelines/{name}][%d] PipelineService_GetPipelineByNameV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineByNameV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/namespaces/{namespace}/pipelines/{name}][%d] PipelineService_GetPipelineByNameV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineByNameV1Default) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceGetPipelineByNameV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_v1_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_v1_parameters.go index 8f2d9313336..5db0e9cf5d9 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceGetPipelineV1Params creates a new PipelineServiceGetPipelineV1Params object -// with the default values initialized. +// NewPipelineServiceGetPipelineV1Params creates a new PipelineServiceGetPipelineV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceGetPipelineV1Params() *PipelineServiceGetPipelineV1Params { - var () return &PipelineServiceGetPipelineV1Params{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceGetPipelineV1ParamsWithTimeout creates a new PipelineServiceGetPipelineV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceGetPipelineV1ParamsWithTimeout(timeout time.Duration) *PipelineServiceGetPipelineV1Params { - var () return &PipelineServiceGetPipelineV1Params{ - timeout: timeout, } } // NewPipelineServiceGetPipelineV1ParamsWithContext creates a new PipelineServiceGetPipelineV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceGetPipelineV1ParamsWithContext(ctx context.Context) *PipelineServiceGetPipelineV1Params { - var () return &PipelineServiceGetPipelineV1Params{ - Context: ctx, } } // NewPipelineServiceGetPipelineV1ParamsWithHTTPClient creates a new PipelineServiceGetPipelineV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceGetPipelineV1ParamsWithHTTPClient(client *http.Client) *PipelineServiceGetPipelineV1Params { - var () return &PipelineServiceGetPipelineV1Params{ HTTPClient: client, } } -/*PipelineServiceGetPipelineV1Params contains all the parameters to send to the API endpoint -for the pipeline service get pipeline v1 operation typically these are written to a http.Request +/* +PipelineServiceGetPipelineV1Params contains all the parameters to send to the API endpoint + + for the pipeline service get pipeline v1 operation. + + Typically these are written to a http.Request. */ type PipelineServiceGetPipelineV1Params struct { - /*ID - The ID of the pipeline to be retrieved. + /* ID. + The ID of the pipeline to be retrieved. */ ID string @@ -72,6 +72,21 @@ type PipelineServiceGetPipelineV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service get pipeline v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineV1Params) WithDefaults() *PipelineServiceGetPipelineV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service get pipeline v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service get pipeline v1 params func (o *PipelineServiceGetPipelineV1Params) WithTimeout(timeout time.Duration) *PipelineServiceGetPipelineV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_v1_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_v1_responses.go index 7596457b514..da3b09f4b1e 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_v1_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceGetPipelineV1Reader is a Reader for the PipelineServiceGetPipelineV1 structure. @@ -24,14 +24,12 @@ type PipelineServiceGetPipelineV1Reader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceGetPipelineV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceGetPipelineV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceGetPipelineV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceGetPipelineV1OK() *PipelineServiceGetPipelineV1OK { return &PipelineServiceGetPipelineV1OK{} } -/*PipelineServiceGetPipelineV1OK handles this case with default header values. +/* +PipelineServiceGetPipelineV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceGetPipelineV1OK struct { Payload *pipeline_model.APIPipeline } +// IsSuccess returns true when this pipeline service get pipeline v1 o k response has a 2xx status code +func (o *PipelineServiceGetPipelineV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service get pipeline v1 o k response has a 3xx status code +func (o *PipelineServiceGetPipelineV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service get pipeline v1 o k response has a 4xx status code +func (o *PipelineServiceGetPipelineV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service get pipeline v1 o k response has a 5xx status code +func (o *PipelineServiceGetPipelineV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service get pipeline v1 o k response a status code equal to that given +func (o *PipelineServiceGetPipelineV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service get pipeline v1 o k response +func (o *PipelineServiceGetPipelineV1OK) Code() int { + return 200 +} + func (o *PipelineServiceGetPipelineV1OK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}][%d] pipelineServiceGetPipelineV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}][%d] pipelineServiceGetPipelineV1OK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}][%d] pipelineServiceGetPipelineV1OK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineV1OK) GetPayload() *pipeline_model.APIPipeline { + return o.Payload } func (o *PipelineServiceGetPipelineV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceGetPipelineV1Default(code int) *PipelineServiceGetPipelin } } -/*PipelineServiceGetPipelineV1Default handles this case with default header values. +/* +PipelineServiceGetPipelineV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceGetPipelineV1Default struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service get pipeline v1 default response has a 2xx status code +func (o *PipelineServiceGetPipelineV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service get pipeline v1 default response has a 3xx status code +func (o *PipelineServiceGetPipelineV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service get pipeline v1 default response has a 4xx status code +func (o *PipelineServiceGetPipelineV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service get pipeline v1 default response has a 5xx status code +func (o *PipelineServiceGetPipelineV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service get pipeline v1 default response a status code equal to that given +func (o *PipelineServiceGetPipelineV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service get pipeline v1 default response @@ -96,12 +161,22 @@ func (o *PipelineServiceGetPipelineV1Default) Code() int { } func (o *PipelineServiceGetPipelineV1Default) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}][%d] PipelineService_GetPipelineV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}][%d] PipelineService_GetPipelineV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}][%d] PipelineService_GetPipelineV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineV1Default) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceGetPipelineV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_template_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_template_parameters.go index 1e388d5d459..ad434eefd7c 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_template_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_template_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceGetPipelineVersionTemplateParams creates a new PipelineServiceGetPipelineVersionTemplateParams object -// with the default values initialized. +// NewPipelineServiceGetPipelineVersionTemplateParams creates a new PipelineServiceGetPipelineVersionTemplateParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceGetPipelineVersionTemplateParams() *PipelineServiceGetPipelineVersionTemplateParams { - var () return &PipelineServiceGetPipelineVersionTemplateParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceGetPipelineVersionTemplateParamsWithTimeout creates a new PipelineServiceGetPipelineVersionTemplateParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceGetPipelineVersionTemplateParamsWithTimeout(timeout time.Duration) *PipelineServiceGetPipelineVersionTemplateParams { - var () return &PipelineServiceGetPipelineVersionTemplateParams{ - timeout: timeout, } } // NewPipelineServiceGetPipelineVersionTemplateParamsWithContext creates a new PipelineServiceGetPipelineVersionTemplateParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceGetPipelineVersionTemplateParamsWithContext(ctx context.Context) *PipelineServiceGetPipelineVersionTemplateParams { - var () return &PipelineServiceGetPipelineVersionTemplateParams{ - Context: ctx, } } // NewPipelineServiceGetPipelineVersionTemplateParamsWithHTTPClient creates a new PipelineServiceGetPipelineVersionTemplateParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceGetPipelineVersionTemplateParamsWithHTTPClient(client *http.Client) *PipelineServiceGetPipelineVersionTemplateParams { - var () return &PipelineServiceGetPipelineVersionTemplateParams{ HTTPClient: client, } } -/*PipelineServiceGetPipelineVersionTemplateParams contains all the parameters to send to the API endpoint -for the pipeline service get pipeline version template operation typically these are written to a http.Request +/* +PipelineServiceGetPipelineVersionTemplateParams contains all the parameters to send to the API endpoint + + for the pipeline service get pipeline version template operation. + + Typically these are written to a http.Request. */ type PipelineServiceGetPipelineVersionTemplateParams struct { - /*VersionID - The ID of the pipeline version whose template is to be retrieved. + /* VersionID. + The ID of the pipeline version whose template is to be retrieved. */ VersionID string @@ -72,6 +72,21 @@ type PipelineServiceGetPipelineVersionTemplateParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service get pipeline version template params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineVersionTemplateParams) WithDefaults() *PipelineServiceGetPipelineVersionTemplateParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service get pipeline version template params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineVersionTemplateParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service get pipeline version template params func (o *PipelineServiceGetPipelineVersionTemplateParams) WithTimeout(timeout time.Duration) *PipelineServiceGetPipelineVersionTemplateParams { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_template_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_template_responses.go index 164b7378eac..1f2a631417a 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_template_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_template_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceGetPipelineVersionTemplateReader is a Reader for the PipelineServiceGetPipelineVersionTemplate structure. @@ -24,14 +24,12 @@ type PipelineServiceGetPipelineVersionTemplateReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceGetPipelineVersionTemplateReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceGetPipelineVersionTemplateOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceGetPipelineVersionTemplateDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceGetPipelineVersionTemplateOK() *PipelineServiceGetPipelin return &PipelineServiceGetPipelineVersionTemplateOK{} } -/*PipelineServiceGetPipelineVersionTemplateOK handles this case with default header values. +/* +PipelineServiceGetPipelineVersionTemplateOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceGetPipelineVersionTemplateOK struct { Payload *pipeline_model.APIGetTemplateResponse } +// IsSuccess returns true when this pipeline service get pipeline version template o k response has a 2xx status code +func (o *PipelineServiceGetPipelineVersionTemplateOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service get pipeline version template o k response has a 3xx status code +func (o *PipelineServiceGetPipelineVersionTemplateOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service get pipeline version template o k response has a 4xx status code +func (o *PipelineServiceGetPipelineVersionTemplateOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service get pipeline version template o k response has a 5xx status code +func (o *PipelineServiceGetPipelineVersionTemplateOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service get pipeline version template o k response a status code equal to that given +func (o *PipelineServiceGetPipelineVersionTemplateOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service get pipeline version template o k response +func (o *PipelineServiceGetPipelineVersionTemplateOK) Code() int { + return 200 +} + func (o *PipelineServiceGetPipelineVersionTemplateOK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}/templates][%d] pipelineServiceGetPipelineVersionTemplateOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}/templates][%d] pipelineServiceGetPipelineVersionTemplateOK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineVersionTemplateOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}/templates][%d] pipelineServiceGetPipelineVersionTemplateOK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineVersionTemplateOK) GetPayload() *pipeline_model.APIGetTemplateResponse { + return o.Payload } func (o *PipelineServiceGetPipelineVersionTemplateOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceGetPipelineVersionTemplateDefault(code int) *PipelineServ } } -/*PipelineServiceGetPipelineVersionTemplateDefault handles this case with default header values. +/* +PipelineServiceGetPipelineVersionTemplateDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceGetPipelineVersionTemplateDefault struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service get pipeline version template default response has a 2xx status code +func (o *PipelineServiceGetPipelineVersionTemplateDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service get pipeline version template default response has a 3xx status code +func (o *PipelineServiceGetPipelineVersionTemplateDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service get pipeline version template default response has a 4xx status code +func (o *PipelineServiceGetPipelineVersionTemplateDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service get pipeline version template default response has a 5xx status code +func (o *PipelineServiceGetPipelineVersionTemplateDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service get pipeline version template default response a status code equal to that given +func (o *PipelineServiceGetPipelineVersionTemplateDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service get pipeline version template default response @@ -96,12 +161,22 @@ func (o *PipelineServiceGetPipelineVersionTemplateDefault) Code() int { } func (o *PipelineServiceGetPipelineVersionTemplateDefault) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}/templates][%d] PipelineService_GetPipelineVersionTemplate default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}/templates][%d] PipelineService_GetPipelineVersionTemplate default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineVersionTemplateDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}/templates][%d] PipelineService_GetPipelineVersionTemplate default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineVersionTemplateDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceGetPipelineVersionTemplateDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_v1_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_v1_parameters.go index 8267909c064..526af64a269 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceGetPipelineVersionV1Params creates a new PipelineServiceGetPipelineVersionV1Params object -// with the default values initialized. +// NewPipelineServiceGetPipelineVersionV1Params creates a new PipelineServiceGetPipelineVersionV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceGetPipelineVersionV1Params() *PipelineServiceGetPipelineVersionV1Params { - var () return &PipelineServiceGetPipelineVersionV1Params{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceGetPipelineVersionV1ParamsWithTimeout creates a new PipelineServiceGetPipelineVersionV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceGetPipelineVersionV1ParamsWithTimeout(timeout time.Duration) *PipelineServiceGetPipelineVersionV1Params { - var () return &PipelineServiceGetPipelineVersionV1Params{ - timeout: timeout, } } // NewPipelineServiceGetPipelineVersionV1ParamsWithContext creates a new PipelineServiceGetPipelineVersionV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceGetPipelineVersionV1ParamsWithContext(ctx context.Context) *PipelineServiceGetPipelineVersionV1Params { - var () return &PipelineServiceGetPipelineVersionV1Params{ - Context: ctx, } } // NewPipelineServiceGetPipelineVersionV1ParamsWithHTTPClient creates a new PipelineServiceGetPipelineVersionV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceGetPipelineVersionV1ParamsWithHTTPClient(client *http.Client) *PipelineServiceGetPipelineVersionV1Params { - var () return &PipelineServiceGetPipelineVersionV1Params{ HTTPClient: client, } } -/*PipelineServiceGetPipelineVersionV1Params contains all the parameters to send to the API endpoint -for the pipeline service get pipeline version v1 operation typically these are written to a http.Request +/* +PipelineServiceGetPipelineVersionV1Params contains all the parameters to send to the API endpoint + + for the pipeline service get pipeline version v1 operation. + + Typically these are written to a http.Request. */ type PipelineServiceGetPipelineVersionV1Params struct { - /*VersionID - The ID of the pipeline version to be retrieved. + /* VersionID. + The ID of the pipeline version to be retrieved. */ VersionID string @@ -72,6 +72,21 @@ type PipelineServiceGetPipelineVersionV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service get pipeline version v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineVersionV1Params) WithDefaults() *PipelineServiceGetPipelineVersionV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service get pipeline version v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineVersionV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service get pipeline version v1 params func (o *PipelineServiceGetPipelineVersionV1Params) WithTimeout(timeout time.Duration) *PipelineServiceGetPipelineVersionV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_v1_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_v1_responses.go index d2d963077ce..16801097449 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_v1_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceGetPipelineVersionV1Reader is a Reader for the PipelineServiceGetPipelineVersionV1 structure. @@ -24,14 +24,12 @@ type PipelineServiceGetPipelineVersionV1Reader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceGetPipelineVersionV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceGetPipelineVersionV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceGetPipelineVersionV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceGetPipelineVersionV1OK() *PipelineServiceGetPipelineVersi return &PipelineServiceGetPipelineVersionV1OK{} } -/*PipelineServiceGetPipelineVersionV1OK handles this case with default header values. +/* +PipelineServiceGetPipelineVersionV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceGetPipelineVersionV1OK struct { Payload *pipeline_model.APIPipelineVersion } +// IsSuccess returns true when this pipeline service get pipeline version v1 o k response has a 2xx status code +func (o *PipelineServiceGetPipelineVersionV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service get pipeline version v1 o k response has a 3xx status code +func (o *PipelineServiceGetPipelineVersionV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service get pipeline version v1 o k response has a 4xx status code +func (o *PipelineServiceGetPipelineVersionV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service get pipeline version v1 o k response has a 5xx status code +func (o *PipelineServiceGetPipelineVersionV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service get pipeline version v1 o k response a status code equal to that given +func (o *PipelineServiceGetPipelineVersionV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service get pipeline version v1 o k response +func (o *PipelineServiceGetPipelineVersionV1OK) Code() int { + return 200 +} + func (o *PipelineServiceGetPipelineVersionV1OK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}][%d] pipelineServiceGetPipelineVersionV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}][%d] pipelineServiceGetPipelineVersionV1OK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineVersionV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}][%d] pipelineServiceGetPipelineVersionV1OK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineVersionV1OK) GetPayload() *pipeline_model.APIPipelineVersion { + return o.Payload } func (o *PipelineServiceGetPipelineVersionV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceGetPipelineVersionV1Default(code int) *PipelineServiceGet } } -/*PipelineServiceGetPipelineVersionV1Default handles this case with default header values. +/* +PipelineServiceGetPipelineVersionV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceGetPipelineVersionV1Default struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service get pipeline version v1 default response has a 2xx status code +func (o *PipelineServiceGetPipelineVersionV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service get pipeline version v1 default response has a 3xx status code +func (o *PipelineServiceGetPipelineVersionV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service get pipeline version v1 default response has a 4xx status code +func (o *PipelineServiceGetPipelineVersionV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service get pipeline version v1 default response has a 5xx status code +func (o *PipelineServiceGetPipelineVersionV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service get pipeline version v1 default response a status code equal to that given +func (o *PipelineServiceGetPipelineVersionV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service get pipeline version v1 default response @@ -96,12 +161,22 @@ func (o *PipelineServiceGetPipelineVersionV1Default) Code() int { } func (o *PipelineServiceGetPipelineVersionV1Default) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}][%d] PipelineService_GetPipelineVersionV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}][%d] PipelineService_GetPipelineVersionV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineVersionV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}][%d] PipelineService_GetPipelineVersionV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineVersionV1Default) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceGetPipelineVersionV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_template_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_template_parameters.go index 695319577eb..c6df8fca736 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_template_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_template_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceGetTemplateParams creates a new PipelineServiceGetTemplateParams object -// with the default values initialized. +// NewPipelineServiceGetTemplateParams creates a new PipelineServiceGetTemplateParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceGetTemplateParams() *PipelineServiceGetTemplateParams { - var () return &PipelineServiceGetTemplateParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceGetTemplateParamsWithTimeout creates a new PipelineServiceGetTemplateParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceGetTemplateParamsWithTimeout(timeout time.Duration) *PipelineServiceGetTemplateParams { - var () return &PipelineServiceGetTemplateParams{ - timeout: timeout, } } // NewPipelineServiceGetTemplateParamsWithContext creates a new PipelineServiceGetTemplateParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceGetTemplateParamsWithContext(ctx context.Context) *PipelineServiceGetTemplateParams { - var () return &PipelineServiceGetTemplateParams{ - Context: ctx, } } // NewPipelineServiceGetTemplateParamsWithHTTPClient creates a new PipelineServiceGetTemplateParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceGetTemplateParamsWithHTTPClient(client *http.Client) *PipelineServiceGetTemplateParams { - var () return &PipelineServiceGetTemplateParams{ HTTPClient: client, } } -/*PipelineServiceGetTemplateParams contains all the parameters to send to the API endpoint -for the pipeline service get template operation typically these are written to a http.Request +/* +PipelineServiceGetTemplateParams contains all the parameters to send to the API endpoint + + for the pipeline service get template operation. + + Typically these are written to a http.Request. */ type PipelineServiceGetTemplateParams struct { - /*ID - The ID of the pipeline whose template is to be retrieved. + /* ID. + The ID of the pipeline whose template is to be retrieved. */ ID string @@ -72,6 +72,21 @@ type PipelineServiceGetTemplateParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service get template params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetTemplateParams) WithDefaults() *PipelineServiceGetTemplateParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service get template params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetTemplateParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service get template params func (o *PipelineServiceGetTemplateParams) WithTimeout(timeout time.Duration) *PipelineServiceGetTemplateParams { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_template_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_template_responses.go index f4197d60615..471c67ab315 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_template_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_template_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceGetTemplateReader is a Reader for the PipelineServiceGetTemplate structure. @@ -24,14 +24,12 @@ type PipelineServiceGetTemplateReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceGetTemplateReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceGetTemplateOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceGetTemplateDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceGetTemplateOK() *PipelineServiceGetTemplateOK { return &PipelineServiceGetTemplateOK{} } -/*PipelineServiceGetTemplateOK handles this case with default header values. +/* +PipelineServiceGetTemplateOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceGetTemplateOK struct { Payload *pipeline_model.APIGetTemplateResponse } +// IsSuccess returns true when this pipeline service get template o k response has a 2xx status code +func (o *PipelineServiceGetTemplateOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service get template o k response has a 3xx status code +func (o *PipelineServiceGetTemplateOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service get template o k response has a 4xx status code +func (o *PipelineServiceGetTemplateOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service get template o k response has a 5xx status code +func (o *PipelineServiceGetTemplateOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service get template o k response a status code equal to that given +func (o *PipelineServiceGetTemplateOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service get template o k response +func (o *PipelineServiceGetTemplateOK) Code() int { + return 200 +} + func (o *PipelineServiceGetTemplateOK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}/templates][%d] pipelineServiceGetTemplateOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}/templates][%d] pipelineServiceGetTemplateOK %s", 200, payload) +} + +func (o *PipelineServiceGetTemplateOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}/templates][%d] pipelineServiceGetTemplateOK %s", 200, payload) +} + +func (o *PipelineServiceGetTemplateOK) GetPayload() *pipeline_model.APIGetTemplateResponse { + return o.Payload } func (o *PipelineServiceGetTemplateOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceGetTemplateDefault(code int) *PipelineServiceGetTemplateD } } -/*PipelineServiceGetTemplateDefault handles this case with default header values. +/* +PipelineServiceGetTemplateDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceGetTemplateDefault struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service get template default response has a 2xx status code +func (o *PipelineServiceGetTemplateDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service get template default response has a 3xx status code +func (o *PipelineServiceGetTemplateDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service get template default response has a 4xx status code +func (o *PipelineServiceGetTemplateDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service get template default response has a 5xx status code +func (o *PipelineServiceGetTemplateDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service get template default response a status code equal to that given +func (o *PipelineServiceGetTemplateDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service get template default response @@ -96,12 +161,22 @@ func (o *PipelineServiceGetTemplateDefault) Code() int { } func (o *PipelineServiceGetTemplateDefault) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}/templates][%d] PipelineService_GetTemplate default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}/templates][%d] PipelineService_GetTemplate default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetTemplateDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines/{id}/templates][%d] PipelineService_GetTemplate default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetTemplateDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceGetTemplateDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_v1_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_v1_parameters.go index a94c61876dc..33a12cc4f8d 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_v1_parameters.go @@ -13,101 +13,98 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" - - strfmt "github.com/go-openapi/strfmt" ) -// NewPipelineServiceListPipelineVersionsV1Params creates a new PipelineServiceListPipelineVersionsV1Params object -// with the default values initialized. +// NewPipelineServiceListPipelineVersionsV1Params creates a new PipelineServiceListPipelineVersionsV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceListPipelineVersionsV1Params() *PipelineServiceListPipelineVersionsV1Params { - var ( - resourceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &PipelineServiceListPipelineVersionsV1Params{ - ResourceKeyType: &resourceKeyTypeDefault, - timeout: cr.DefaultTimeout, } } // NewPipelineServiceListPipelineVersionsV1ParamsWithTimeout creates a new PipelineServiceListPipelineVersionsV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceListPipelineVersionsV1ParamsWithTimeout(timeout time.Duration) *PipelineServiceListPipelineVersionsV1Params { - var ( - resourceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &PipelineServiceListPipelineVersionsV1Params{ - ResourceKeyType: &resourceKeyTypeDefault, - timeout: timeout, } } // NewPipelineServiceListPipelineVersionsV1ParamsWithContext creates a new PipelineServiceListPipelineVersionsV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceListPipelineVersionsV1ParamsWithContext(ctx context.Context) *PipelineServiceListPipelineVersionsV1Params { - var ( - resourceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &PipelineServiceListPipelineVersionsV1Params{ - ResourceKeyType: &resourceKeyTypeDefault, - Context: ctx, } } // NewPipelineServiceListPipelineVersionsV1ParamsWithHTTPClient creates a new PipelineServiceListPipelineVersionsV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceListPipelineVersionsV1ParamsWithHTTPClient(client *http.Client) *PipelineServiceListPipelineVersionsV1Params { - var ( - resourceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &PipelineServiceListPipelineVersionsV1Params{ - ResourceKeyType: &resourceKeyTypeDefault, - HTTPClient: client, + HTTPClient: client, } } -/*PipelineServiceListPipelineVersionsV1Params contains all the parameters to send to the API endpoint -for the pipeline service list pipeline versions v1 operation typically these are written to a http.Request +/* +PipelineServiceListPipelineVersionsV1Params contains all the parameters to send to the API endpoint + + for the pipeline service list pipeline versions v1 operation. + + Typically these are written to a http.Request. */ type PipelineServiceListPipelineVersionsV1Params struct { - /*Filter - A base-64 encoded, JSON-serialized Filter protocol buffer (see - filter.proto). + /* Filter. + A base-64 encoded, JSON-serialized Filter protocol buffer (see + filter.proto). */ Filter *string - /*PageSize - The number of pipeline versions to be listed per page. If there are more + + /* PageSize. + + The number of pipeline versions to be listed per page. If there are more pipeline versions than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + Format: int32 */ PageSize *int32 - /*PageToken - A page token to request the next page of results. The token is acquried + + /* PageToken. + + A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelineVersions call or can be omitted when fetching the first page. - */ PageToken *string - /*ResourceKeyID - The ID of the resource that referred to. + /* ResourceKeyID. + + The ID of the resource that referred to. */ ResourceKeyID *string - /*ResourceKeyType - The type of the resource that referred to. + /* ResourceKeyType. + + The type of the resource that referred to. + + Default: "UNKNOWN_RESOURCE_TYPE" */ ResourceKeyType *string - /*SortBy - Can be format of "field_name", "field_name asc" or "field_name desc" - Ascending by default. + /* SortBy. + + Can be format of "field_name", "field_name asc" or "field_name desc" + Ascending by default. */ SortBy *string @@ -116,6 +113,32 @@ type PipelineServiceListPipelineVersionsV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service list pipeline versions v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceListPipelineVersionsV1Params) WithDefaults() *PipelineServiceListPipelineVersionsV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service list pipeline versions v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceListPipelineVersionsV1Params) SetDefaults() { + var ( + resourceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) + + val := PipelineServiceListPipelineVersionsV1Params{ + ResourceKeyType: &resourceKeyTypeDefault, + } + + val.timeout = o.timeout + val.Context = o.Context + val.HTTPClient = o.HTTPClient + *o = val +} + // WithTimeout adds the timeout to the pipeline service list pipeline versions v1 params func (o *PipelineServiceListPipelineVersionsV1Params) WithTimeout(timeout time.Duration) *PipelineServiceListPipelineVersionsV1Params { o.SetTimeout(timeout) @@ -227,96 +250,102 @@ func (o *PipelineServiceListPipelineVersionsV1Params) WriteToRequest(r runtime.C // query param filter var qrFilter string + if o.Filter != nil { qrFilter = *o.Filter } qFilter := qrFilter if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { return err } } - } if o.PageSize != nil { // query param page_size var qrPageSize int32 + if o.PageSize != nil { qrPageSize = *o.PageSize } qPageSize := swag.FormatInt32(qrPageSize) if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { return err } } - } if o.PageToken != nil { // query param page_token var qrPageToken string + if o.PageToken != nil { qrPageToken = *o.PageToken } qPageToken := qrPageToken if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { return err } } - } if o.ResourceKeyID != nil { // query param resource_key.id var qrResourceKeyID string + if o.ResourceKeyID != nil { qrResourceKeyID = *o.ResourceKeyID } qResourceKeyID := qrResourceKeyID if qResourceKeyID != "" { + if err := r.SetQueryParam("resource_key.id", qResourceKeyID); err != nil { return err } } - } if o.ResourceKeyType != nil { // query param resource_key.type var qrResourceKeyType string + if o.ResourceKeyType != nil { qrResourceKeyType = *o.ResourceKeyType } qResourceKeyType := qrResourceKeyType if qResourceKeyType != "" { + if err := r.SetQueryParam("resource_key.type", qResourceKeyType); err != nil { return err } } - } if o.SortBy != nil { // query param sort_by var qrSortBy string + if o.SortBy != nil { qrSortBy = *o.SortBy } qSortBy := qrSortBy if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_v1_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_v1_responses.go index 647b826e2c8..a40f72c05c5 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_v1_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceListPipelineVersionsV1Reader is a Reader for the PipelineServiceListPipelineVersionsV1 structure. @@ -24,14 +24,12 @@ type PipelineServiceListPipelineVersionsV1Reader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceListPipelineVersionsV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceListPipelineVersionsV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceListPipelineVersionsV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceListPipelineVersionsV1OK() *PipelineServiceListPipelineVe return &PipelineServiceListPipelineVersionsV1OK{} } -/*PipelineServiceListPipelineVersionsV1OK handles this case with default header values. +/* +PipelineServiceListPipelineVersionsV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceListPipelineVersionsV1OK struct { Payload *pipeline_model.APIListPipelineVersionsResponse } +// IsSuccess returns true when this pipeline service list pipeline versions v1 o k response has a 2xx status code +func (o *PipelineServiceListPipelineVersionsV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service list pipeline versions v1 o k response has a 3xx status code +func (o *PipelineServiceListPipelineVersionsV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service list pipeline versions v1 o k response has a 4xx status code +func (o *PipelineServiceListPipelineVersionsV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service list pipeline versions v1 o k response has a 5xx status code +func (o *PipelineServiceListPipelineVersionsV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service list pipeline versions v1 o k response a status code equal to that given +func (o *PipelineServiceListPipelineVersionsV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service list pipeline versions v1 o k response +func (o *PipelineServiceListPipelineVersionsV1OK) Code() int { + return 200 +} + func (o *PipelineServiceListPipelineVersionsV1OK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions][%d] pipelineServiceListPipelineVersionsV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions][%d] pipelineServiceListPipelineVersionsV1OK %s", 200, payload) +} + +func (o *PipelineServiceListPipelineVersionsV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions][%d] pipelineServiceListPipelineVersionsV1OK %s", 200, payload) +} + +func (o *PipelineServiceListPipelineVersionsV1OK) GetPayload() *pipeline_model.APIListPipelineVersionsResponse { + return o.Payload } func (o *PipelineServiceListPipelineVersionsV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceListPipelineVersionsV1Default(code int) *PipelineServiceL } } -/*PipelineServiceListPipelineVersionsV1Default handles this case with default header values. +/* +PipelineServiceListPipelineVersionsV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceListPipelineVersionsV1Default struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service list pipeline versions v1 default response has a 2xx status code +func (o *PipelineServiceListPipelineVersionsV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service list pipeline versions v1 default response has a 3xx status code +func (o *PipelineServiceListPipelineVersionsV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service list pipeline versions v1 default response has a 4xx status code +func (o *PipelineServiceListPipelineVersionsV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service list pipeline versions v1 default response has a 5xx status code +func (o *PipelineServiceListPipelineVersionsV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service list pipeline versions v1 default response a status code equal to that given +func (o *PipelineServiceListPipelineVersionsV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service list pipeline versions v1 default response @@ -96,12 +161,22 @@ func (o *PipelineServiceListPipelineVersionsV1Default) Code() int { } func (o *PipelineServiceListPipelineVersionsV1Default) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions][%d] PipelineService_ListPipelineVersionsV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions][%d] PipelineService_ListPipelineVersionsV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceListPipelineVersionsV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions][%d] PipelineService_ListPipelineVersionsV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceListPipelineVersionsV1Default) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceListPipelineVersionsV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_v1_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_v1_parameters.go index 8481d4c25b6..60bb336f964 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_v1_parameters.go @@ -13,101 +13,98 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" - - strfmt "github.com/go-openapi/strfmt" ) -// NewPipelineServiceListPipelinesV1Params creates a new PipelineServiceListPipelinesV1Params object -// with the default values initialized. +// NewPipelineServiceListPipelinesV1Params creates a new PipelineServiceListPipelinesV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceListPipelinesV1Params() *PipelineServiceListPipelinesV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &PipelineServiceListPipelinesV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - timeout: cr.DefaultTimeout, } } // NewPipelineServiceListPipelinesV1ParamsWithTimeout creates a new PipelineServiceListPipelinesV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceListPipelinesV1ParamsWithTimeout(timeout time.Duration) *PipelineServiceListPipelinesV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &PipelineServiceListPipelinesV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - timeout: timeout, } } // NewPipelineServiceListPipelinesV1ParamsWithContext creates a new PipelineServiceListPipelinesV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceListPipelinesV1ParamsWithContext(ctx context.Context) *PipelineServiceListPipelinesV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &PipelineServiceListPipelinesV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - Context: ctx, } } // NewPipelineServiceListPipelinesV1ParamsWithHTTPClient creates a new PipelineServiceListPipelinesV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceListPipelinesV1ParamsWithHTTPClient(client *http.Client) *PipelineServiceListPipelinesV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &PipelineServiceListPipelinesV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - HTTPClient: client, + HTTPClient: client, } } -/*PipelineServiceListPipelinesV1Params contains all the parameters to send to the API endpoint -for the pipeline service list pipelines v1 operation typically these are written to a http.Request +/* +PipelineServiceListPipelinesV1Params contains all the parameters to send to the API endpoint + + for the pipeline service list pipelines v1 operation. + + Typically these are written to a http.Request. */ type PipelineServiceListPipelinesV1Params struct { - /*Filter - A url-encoded, JSON-serialized Filter protocol buffer (see - [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). + /* Filter. + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). */ Filter *string - /*PageSize - The number of pipelines to be listed per page. If there are more pipelines + + /* PageSize. + + The number of pipelines to be listed per page. If there are more pipelines than this number, the response message will contain a valid value in the nextPageToken field. + Format: int32 */ PageSize *int32 - /*PageToken - A page token to request the next page of results. The token is acquried + + /* PageToken. + + A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListPipelines call. - */ PageToken *string - /*ResourceReferenceKeyID - The ID of the resource that referred to. + /* ResourceReferenceKeyID. + + The ID of the resource that referred to. */ ResourceReferenceKeyID *string - /*ResourceReferenceKeyType - The type of the resource that referred to. + /* ResourceReferenceKeyType. + + The type of the resource that referred to. + + Default: "UNKNOWN_RESOURCE_TYPE" */ ResourceReferenceKeyType *string - /*SortBy - Can be format of "field_name", "field_name asc" or "field_name desc" - Ascending by default. + /* SortBy. + + Can be format of "field_name", "field_name asc" or "field_name desc" + Ascending by default. */ SortBy *string @@ -116,6 +113,32 @@ type PipelineServiceListPipelinesV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service list pipelines v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceListPipelinesV1Params) WithDefaults() *PipelineServiceListPipelinesV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service list pipelines v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceListPipelinesV1Params) SetDefaults() { + var ( + resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) + + val := PipelineServiceListPipelinesV1Params{ + ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, + } + + val.timeout = o.timeout + val.Context = o.Context + val.HTTPClient = o.HTTPClient + *o = val +} + // WithTimeout adds the timeout to the pipeline service list pipelines v1 params func (o *PipelineServiceListPipelinesV1Params) WithTimeout(timeout time.Duration) *PipelineServiceListPipelinesV1Params { o.SetTimeout(timeout) @@ -227,96 +250,102 @@ func (o *PipelineServiceListPipelinesV1Params) WriteToRequest(r runtime.ClientRe // query param filter var qrFilter string + if o.Filter != nil { qrFilter = *o.Filter } qFilter := qrFilter if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { return err } } - } if o.PageSize != nil { // query param page_size var qrPageSize int32 + if o.PageSize != nil { qrPageSize = *o.PageSize } qPageSize := swag.FormatInt32(qrPageSize) if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { return err } } - } if o.PageToken != nil { // query param page_token var qrPageToken string + if o.PageToken != nil { qrPageToken = *o.PageToken } qPageToken := qrPageToken if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { return err } } - } if o.ResourceReferenceKeyID != nil { // query param resource_reference_key.id var qrResourceReferenceKeyID string + if o.ResourceReferenceKeyID != nil { qrResourceReferenceKeyID = *o.ResourceReferenceKeyID } qResourceReferenceKeyID := qrResourceReferenceKeyID if qResourceReferenceKeyID != "" { + if err := r.SetQueryParam("resource_reference_key.id", qResourceReferenceKeyID); err != nil { return err } } - } if o.ResourceReferenceKeyType != nil { // query param resource_reference_key.type var qrResourceReferenceKeyType string + if o.ResourceReferenceKeyType != nil { qrResourceReferenceKeyType = *o.ResourceReferenceKeyType } qResourceReferenceKeyType := qrResourceReferenceKeyType if qResourceReferenceKeyType != "" { + if err := r.SetQueryParam("resource_reference_key.type", qResourceReferenceKeyType); err != nil { return err } } - } if o.SortBy != nil { // query param sort_by var qrSortBy string + if o.SortBy != nil { qrSortBy = *o.SortBy } qSortBy := qrSortBy if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_v1_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_v1_responses.go index 1add38aa4ad..becb45766c8 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_v1_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceListPipelinesV1Reader is a Reader for the PipelineServiceListPipelinesV1 structure. @@ -24,14 +24,12 @@ type PipelineServiceListPipelinesV1Reader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceListPipelinesV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceListPipelinesV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceListPipelinesV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceListPipelinesV1OK() *PipelineServiceListPipelinesV1OK { return &PipelineServiceListPipelinesV1OK{} } -/*PipelineServiceListPipelinesV1OK handles this case with default header values. +/* +PipelineServiceListPipelinesV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceListPipelinesV1OK struct { Payload *pipeline_model.APIListPipelinesResponse } +// IsSuccess returns true when this pipeline service list pipelines v1 o k response has a 2xx status code +func (o *PipelineServiceListPipelinesV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service list pipelines v1 o k response has a 3xx status code +func (o *PipelineServiceListPipelinesV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service list pipelines v1 o k response has a 4xx status code +func (o *PipelineServiceListPipelinesV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service list pipelines v1 o k response has a 5xx status code +func (o *PipelineServiceListPipelinesV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service list pipelines v1 o k response a status code equal to that given +func (o *PipelineServiceListPipelinesV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service list pipelines v1 o k response +func (o *PipelineServiceListPipelinesV1OK) Code() int { + return 200 +} + func (o *PipelineServiceListPipelinesV1OK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipelines][%d] pipelineServiceListPipelinesV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines][%d] pipelineServiceListPipelinesV1OK %s", 200, payload) +} + +func (o *PipelineServiceListPipelinesV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines][%d] pipelineServiceListPipelinesV1OK %s", 200, payload) +} + +func (o *PipelineServiceListPipelinesV1OK) GetPayload() *pipeline_model.APIListPipelinesResponse { + return o.Payload } func (o *PipelineServiceListPipelinesV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceListPipelinesV1Default(code int) *PipelineServiceListPipe } } -/*PipelineServiceListPipelinesV1Default handles this case with default header values. +/* +PipelineServiceListPipelinesV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceListPipelinesV1Default struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service list pipelines v1 default response has a 2xx status code +func (o *PipelineServiceListPipelinesV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service list pipelines v1 default response has a 3xx status code +func (o *PipelineServiceListPipelinesV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service list pipelines v1 default response has a 4xx status code +func (o *PipelineServiceListPipelinesV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service list pipelines v1 default response has a 5xx status code +func (o *PipelineServiceListPipelinesV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service list pipelines v1 default response a status code equal to that given +func (o *PipelineServiceListPipelinesV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service list pipelines v1 default response @@ -96,12 +161,22 @@ func (o *PipelineServiceListPipelinesV1Default) Code() int { } func (o *PipelineServiceListPipelinesV1Default) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/pipelines][%d] PipelineService_ListPipelinesV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines][%d] PipelineService_ListPipelinesV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceListPipelinesV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/pipelines][%d] PipelineService_ListPipelinesV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceListPipelinesV1Default) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceListPipelinesV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_update_pipeline_default_version_v1_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_update_pipeline_default_version_v1_parameters.go index cc3bcf04593..efddd9f37c4 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_update_pipeline_default_version_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_update_pipeline_default_version_v1_parameters.go @@ -13,62 +13,63 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceUpdatePipelineDefaultVersionV1Params creates a new PipelineServiceUpdatePipelineDefaultVersionV1Params object -// with the default values initialized. +// NewPipelineServiceUpdatePipelineDefaultVersionV1Params creates a new PipelineServiceUpdatePipelineDefaultVersionV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceUpdatePipelineDefaultVersionV1Params() *PipelineServiceUpdatePipelineDefaultVersionV1Params { - var () return &PipelineServiceUpdatePipelineDefaultVersionV1Params{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceUpdatePipelineDefaultVersionV1ParamsWithTimeout creates a new PipelineServiceUpdatePipelineDefaultVersionV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceUpdatePipelineDefaultVersionV1ParamsWithTimeout(timeout time.Duration) *PipelineServiceUpdatePipelineDefaultVersionV1Params { - var () return &PipelineServiceUpdatePipelineDefaultVersionV1Params{ - timeout: timeout, } } // NewPipelineServiceUpdatePipelineDefaultVersionV1ParamsWithContext creates a new PipelineServiceUpdatePipelineDefaultVersionV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceUpdatePipelineDefaultVersionV1ParamsWithContext(ctx context.Context) *PipelineServiceUpdatePipelineDefaultVersionV1Params { - var () return &PipelineServiceUpdatePipelineDefaultVersionV1Params{ - Context: ctx, } } // NewPipelineServiceUpdatePipelineDefaultVersionV1ParamsWithHTTPClient creates a new PipelineServiceUpdatePipelineDefaultVersionV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceUpdatePipelineDefaultVersionV1ParamsWithHTTPClient(client *http.Client) *PipelineServiceUpdatePipelineDefaultVersionV1Params { - var () return &PipelineServiceUpdatePipelineDefaultVersionV1Params{ HTTPClient: client, } } -/*PipelineServiceUpdatePipelineDefaultVersionV1Params contains all the parameters to send to the API endpoint -for the pipeline service update pipeline default version v1 operation typically these are written to a http.Request +/* +PipelineServiceUpdatePipelineDefaultVersionV1Params contains all the parameters to send to the API endpoint + + for the pipeline service update pipeline default version v1 operation. + + Typically these are written to a http.Request. */ type PipelineServiceUpdatePipelineDefaultVersionV1Params struct { - /*PipelineID - The ID of the pipeline to be updated. + /* PipelineID. + The ID of the pipeline to be updated. */ PipelineID string - /*VersionID - The ID of the default version. + /* VersionID. + + The ID of the default version. */ VersionID string @@ -77,6 +78,21 @@ type PipelineServiceUpdatePipelineDefaultVersionV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service update pipeline default version v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceUpdatePipelineDefaultVersionV1Params) WithDefaults() *PipelineServiceUpdatePipelineDefaultVersionV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service update pipeline default version v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceUpdatePipelineDefaultVersionV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service update pipeline default version v1 params func (o *PipelineServiceUpdatePipelineDefaultVersionV1Params) WithTimeout(timeout time.Duration) *PipelineServiceUpdatePipelineDefaultVersionV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_update_pipeline_default_version_v1_responses.go b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_update_pipeline_default_version_v1_responses.go index 77967ba81d7..4b3c643ff80 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_update_pipeline_default_version_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_update_pipeline_default_version_v1_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" ) // PipelineServiceUpdatePipelineDefaultVersionV1Reader is a Reader for the PipelineServiceUpdatePipelineDefaultVersionV1 structure. @@ -24,14 +24,12 @@ type PipelineServiceUpdatePipelineDefaultVersionV1Reader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceUpdatePipelineDefaultVersionV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceUpdatePipelineDefaultVersionV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceUpdatePipelineDefaultVersionV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceUpdatePipelineDefaultVersionV1OK() *PipelineServiceUpdate return &PipelineServiceUpdatePipelineDefaultVersionV1OK{} } -/*PipelineServiceUpdatePipelineDefaultVersionV1OK handles this case with default header values. +/* +PipelineServiceUpdatePipelineDefaultVersionV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceUpdatePipelineDefaultVersionV1OK struct { Payload interface{} } +// IsSuccess returns true when this pipeline service update pipeline default version v1 o k response has a 2xx status code +func (o *PipelineServiceUpdatePipelineDefaultVersionV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service update pipeline default version v1 o k response has a 3xx status code +func (o *PipelineServiceUpdatePipelineDefaultVersionV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service update pipeline default version v1 o k response has a 4xx status code +func (o *PipelineServiceUpdatePipelineDefaultVersionV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service update pipeline default version v1 o k response has a 5xx status code +func (o *PipelineServiceUpdatePipelineDefaultVersionV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service update pipeline default version v1 o k response a status code equal to that given +func (o *PipelineServiceUpdatePipelineDefaultVersionV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service update pipeline default version v1 o k response +func (o *PipelineServiceUpdatePipelineDefaultVersionV1OK) Code() int { + return 200 +} + func (o *PipelineServiceUpdatePipelineDefaultVersionV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}][%d] pipelineServiceUpdatePipelineDefaultVersionV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}][%d] pipelineServiceUpdatePipelineDefaultVersionV1OK %s", 200, payload) +} + +func (o *PipelineServiceUpdatePipelineDefaultVersionV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}][%d] pipelineServiceUpdatePipelineDefaultVersionV1OK %s", 200, payload) +} + +func (o *PipelineServiceUpdatePipelineDefaultVersionV1OK) GetPayload() interface{} { + return o.Payload } func (o *PipelineServiceUpdatePipelineDefaultVersionV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewPipelineServiceUpdatePipelineDefaultVersionV1Default(code int) *Pipeline } } -/*PipelineServiceUpdatePipelineDefaultVersionV1Default handles this case with default header values. +/* +PipelineServiceUpdatePipelineDefaultVersionV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceUpdatePipelineDefaultVersionV1Default struct { _statusCode int - Payload *pipeline_model.GatewayruntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service update pipeline default version v1 default response has a 2xx status code +func (o *PipelineServiceUpdatePipelineDefaultVersionV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service update pipeline default version v1 default response has a 3xx status code +func (o *PipelineServiceUpdatePipelineDefaultVersionV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service update pipeline default version v1 default response has a 4xx status code +func (o *PipelineServiceUpdatePipelineDefaultVersionV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service update pipeline default version v1 default response has a 5xx status code +func (o *PipelineServiceUpdatePipelineDefaultVersionV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service update pipeline default version v1 default response a status code equal to that given +func (o *PipelineServiceUpdatePipelineDefaultVersionV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service update pipeline default version v1 default response @@ -94,12 +159,22 @@ func (o *PipelineServiceUpdatePipelineDefaultVersionV1Default) Code() int { } func (o *PipelineServiceUpdatePipelineDefaultVersionV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}][%d] PipelineService_UpdatePipelineDefaultVersionV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}][%d] PipelineService_UpdatePipelineDefaultVersionV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceUpdatePipelineDefaultVersionV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/{pipeline_id}/default_version/{version_id}][%d] PipelineService_UpdatePipelineDefaultVersionV1 default %s", o._statusCode, payload) +} + +func (o *PipelineServiceUpdatePipelineDefaultVersionV1Default) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceUpdatePipelineDefaultVersionV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.GatewayruntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_get_template_response.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_get_template_response.go index bf3917f7f83..c537dd8325a 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_get_template_response.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_get_template_response.go @@ -6,12 +6,14 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIGetTemplateResponse api get template response +// // swagger:model apiGetTemplateResponse type APIGetTemplateResponse struct { @@ -25,6 +27,11 @@ func (m *APIGetTemplateResponse) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api get template response based on context it is used +func (m *APIGetTemplateResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APIGetTemplateResponse) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_list_pipeline_versions_response.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_list_pipeline_versions_response.go index 4c4cc8e6a0a..6d6e87edc82 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_list_pipeline_versions_response.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_list_pipeline_versions_response.go @@ -6,15 +6,16 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIListPipelineVersionsResponse api list pipeline versions response +// // swagger:model apiListPipelineVersionsResponse type APIListPipelineVersionsResponse struct { @@ -43,7 +44,6 @@ func (m *APIListPipelineVersionsResponse) Validate(formats strfmt.Registry) erro } func (m *APIListPipelineVersionsResponse) validateVersions(formats strfmt.Registry) error { - if swag.IsZero(m.Versions) { // not required return nil } @@ -57,6 +57,47 @@ func (m *APIListPipelineVersionsResponse) validateVersions(formats strfmt.Regist if err := m.Versions[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("versions" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("versions" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this api list pipeline versions response based on the context it is used +func (m *APIListPipelineVersionsResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateVersions(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIListPipelineVersionsResponse) contextValidateVersions(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Versions); i++ { + + if m.Versions[i] != nil { + + if swag.IsZero(m.Versions[i]) { // not required + return nil + } + + if err := m.Versions[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("versions" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("versions" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_list_pipelines_response.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_list_pipelines_response.go index 4f3dfc30744..97a2e2fa338 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_list_pipelines_response.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_list_pipelines_response.go @@ -6,15 +6,16 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIListPipelinesResponse api list pipelines response +// // swagger:model apiListPipelinesResponse type APIListPipelinesResponse struct { @@ -43,7 +44,6 @@ func (m *APIListPipelinesResponse) Validate(formats strfmt.Registry) error { } func (m *APIListPipelinesResponse) validatePipelines(formats strfmt.Registry) error { - if swag.IsZero(m.Pipelines) { // not required return nil } @@ -57,6 +57,47 @@ func (m *APIListPipelinesResponse) validatePipelines(formats strfmt.Registry) er if err := m.Pipelines[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("pipelines" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipelines" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this api list pipelines response based on the context it is used +func (m *APIListPipelinesResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidatePipelines(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIListPipelinesResponse) contextValidatePipelines(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Pipelines); i++ { + + if m.Pipelines[i] != nil { + + if swag.IsZero(m.Pipelines[i]) { // not required + return nil + } + + if err := m.Pipelines[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipelines" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipelines" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_parameter.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_parameter.go index 68fa4288480..2b361318330 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_parameter.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_parameter.go @@ -6,12 +6,14 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIParameter api parameter +// // swagger:model apiParameter type APIParameter struct { @@ -27,6 +29,11 @@ func (m *APIParameter) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api parameter based on context it is used +func (m *APIParameter) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APIParameter) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_pipeline.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_pipeline.go index 53f03984bb8..91c073f9583 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_pipeline.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_pipeline.go @@ -6,16 +6,17 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // APIPipeline api pipeline +// // swagger:model apiPipeline type APIPipeline struct { @@ -90,7 +91,6 @@ func (m *APIPipeline) Validate(formats strfmt.Registry) error { } func (m *APIPipeline) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -103,7 +103,6 @@ func (m *APIPipeline) validateCreatedAt(formats strfmt.Registry) error { } func (m *APIPipeline) validateDefaultVersion(formats strfmt.Registry) error { - if swag.IsZero(m.DefaultVersion) { // not required return nil } @@ -112,6 +111,8 @@ func (m *APIPipeline) validateDefaultVersion(formats strfmt.Registry) error { if err := m.DefaultVersion.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("default_version") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("default_version") } return err } @@ -121,7 +122,6 @@ func (m *APIPipeline) validateDefaultVersion(formats strfmt.Registry) error { } func (m *APIPipeline) validateParameters(formats strfmt.Registry) error { - if swag.IsZero(m.Parameters) { // not required return nil } @@ -135,6 +135,8 @@ func (m *APIPipeline) validateParameters(formats strfmt.Registry) error { if err := m.Parameters[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) } return err } @@ -146,7 +148,6 @@ func (m *APIPipeline) validateParameters(formats strfmt.Registry) error { } func (m *APIPipeline) validateResourceReferences(formats strfmt.Registry) error { - if swag.IsZero(m.ResourceReferences) { // not required return nil } @@ -160,6 +161,8 @@ func (m *APIPipeline) validateResourceReferences(formats strfmt.Registry) error if err := m.ResourceReferences[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) } return err } @@ -171,7 +174,6 @@ func (m *APIPipeline) validateResourceReferences(formats strfmt.Registry) error } func (m *APIPipeline) validateURL(formats strfmt.Registry) error { - if swag.IsZero(m.URL) { // not required return nil } @@ -180,6 +182,126 @@ func (m *APIPipeline) validateURL(formats strfmt.Registry) error { if err := m.URL.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("url") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("url") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api pipeline based on the context it is used +func (m *APIPipeline) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDefaultVersion(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateParameters(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateResourceReferences(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateURL(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIPipeline) contextValidateDefaultVersion(ctx context.Context, formats strfmt.Registry) error { + + if m.DefaultVersion != nil { + + if swag.IsZero(m.DefaultVersion) { // not required + return nil + } + + if err := m.DefaultVersion.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("default_version") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("default_version") + } + return err + } + } + + return nil +} + +func (m *APIPipeline) contextValidateParameters(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Parameters); i++ { + + if m.Parameters[i] != nil { + + if swag.IsZero(m.Parameters[i]) { // not required + return nil + } + + if err := m.Parameters[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIPipeline) contextValidateResourceReferences(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.ResourceReferences); i++ { + + if m.ResourceReferences[i] != nil { + + if swag.IsZero(m.ResourceReferences[i]) { // not required + return nil + } + + if err := m.ResourceReferences[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIPipeline) contextValidateURL(ctx context.Context, formats strfmt.Registry) error { + + if m.URL != nil { + + if swag.IsZero(m.URL) { // not required + return nil + } + + if err := m.URL.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("url") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("url") } return err } diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_pipeline_version.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_pipeline_version.go index 7c3b26bec3c..8756a52c9d2 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_pipeline_version.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_pipeline_version.go @@ -6,16 +6,17 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // APIPipelineVersion api pipeline version +// // swagger:model apiPipelineVersion type APIPipelineVersion struct { @@ -78,7 +79,6 @@ func (m *APIPipelineVersion) Validate(formats strfmt.Registry) error { } func (m *APIPipelineVersion) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -91,7 +91,6 @@ func (m *APIPipelineVersion) validateCreatedAt(formats strfmt.Registry) error { } func (m *APIPipelineVersion) validatePackageURL(formats strfmt.Registry) error { - if swag.IsZero(m.PackageURL) { // not required return nil } @@ -100,6 +99,8 @@ func (m *APIPipelineVersion) validatePackageURL(formats strfmt.Registry) error { if err := m.PackageURL.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("package_url") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("package_url") } return err } @@ -109,7 +110,6 @@ func (m *APIPipelineVersion) validatePackageURL(formats strfmt.Registry) error { } func (m *APIPipelineVersion) validateParameters(formats strfmt.Registry) error { - if swag.IsZero(m.Parameters) { // not required return nil } @@ -123,6 +123,8 @@ func (m *APIPipelineVersion) validateParameters(formats strfmt.Registry) error { if err := m.Parameters[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) } return err } @@ -134,7 +136,6 @@ func (m *APIPipelineVersion) validateParameters(formats strfmt.Registry) error { } func (m *APIPipelineVersion) validateResourceReferences(formats strfmt.Registry) error { - if swag.IsZero(m.ResourceReferences) { // not required return nil } @@ -148,6 +149,101 @@ func (m *APIPipelineVersion) validateResourceReferences(formats strfmt.Registry) if err := m.ResourceReferences[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this api pipeline version based on the context it is used +func (m *APIPipelineVersion) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidatePackageURL(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateParameters(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateResourceReferences(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIPipelineVersion) contextValidatePackageURL(ctx context.Context, formats strfmt.Registry) error { + + if m.PackageURL != nil { + + if swag.IsZero(m.PackageURL) { // not required + return nil + } + + if err := m.PackageURL.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("package_url") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("package_url") + } + return err + } + } + + return nil +} + +func (m *APIPipelineVersion) contextValidateParameters(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Parameters); i++ { + + if m.Parameters[i] != nil { + + if swag.IsZero(m.Parameters[i]) { // not required + return nil + } + + if err := m.Parameters[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIPipelineVersion) contextValidateResourceReferences(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.ResourceReferences); i++ { + + if m.ResourceReferences[i] != nil { + + if swag.IsZero(m.ResourceReferences[i]) { // not required + return nil + } + + if err := m.ResourceReferences[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_relationship.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_relationship.go index ab414853c95..bdf835420da 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_relationship.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_relationship.go @@ -6,18 +6,28 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIRelationship api relationship +// // swagger:model apiRelationship type APIRelationship string +func NewAPIRelationship(value APIRelationship) *APIRelationship { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIRelationship. +func (m APIRelationship) Pointer() *APIRelationship { + return &m +} + const ( // APIRelationshipUNKNOWNRELATIONSHIP captures enum value "UNKNOWN_RELATIONSHIP" @@ -44,7 +54,7 @@ func init() { } func (m APIRelationship) validateAPIRelationshipEnum(path, location string, value APIRelationship) error { - if err := validate.Enum(path, location, value, apiRelationshipEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiRelationshipEnum, true); err != nil { return err } return nil @@ -64,3 +74,8 @@ func (m APIRelationship) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api relationship based on context it is used +func (m APIRelationship) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_key.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_key.go index c0fb0fae089..b0647df0afd 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_key.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_key.go @@ -6,13 +6,15 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIResourceKey api resource key +// // swagger:model apiResourceKey type APIResourceKey struct { @@ -20,7 +22,7 @@ type APIResourceKey struct { ID string `json:"id,omitempty"` // The type of the resource that referred to. - Type APIResourceType `json:"type,omitempty"` + Type *APIResourceType `json:"type,omitempty"` } // Validate validates this api resource key @@ -38,16 +40,54 @@ func (m *APIResourceKey) Validate(formats strfmt.Registry) error { } func (m *APIResourceKey) validateType(formats strfmt.Registry) error { - if swag.IsZero(m.Type) { // not required return nil } - if err := m.Type.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("type") + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api resource key based on the context it is used +func (m *APIResourceKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceKey) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_reference.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_reference.go index 975fe1f830a..5f9ee1ebda1 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_reference.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_reference.go @@ -6,13 +6,15 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIResourceReference api resource reference +// // swagger:model apiResourceReference type APIResourceReference struct { @@ -23,7 +25,7 @@ type APIResourceReference struct { Name string `json:"name,omitempty"` // Required field. The relationship from referred resource to the object. - Relationship APIRelationship `json:"relationship,omitempty"` + Relationship *APIRelationship `json:"relationship,omitempty"` } // Validate validates this api resource reference @@ -45,7 +47,6 @@ func (m *APIResourceReference) Validate(formats strfmt.Registry) error { } func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { - if swag.IsZero(m.Key) { // not required return nil } @@ -54,6 +55,8 @@ func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { if err := m.Key.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("key") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("key") } return err } @@ -63,16 +66,79 @@ func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { } func (m *APIResourceReference) validateRelationship(formats strfmt.Registry) error { - if swag.IsZero(m.Relationship) { // not required return nil } - if err := m.Relationship.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("relationship") + if m.Relationship != nil { + if err := m.Relationship.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("relationship") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api resource reference based on the context it is used +func (m *APIResourceReference) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateKey(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRelationship(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceReference) contextValidateKey(ctx context.Context, formats strfmt.Registry) error { + + if m.Key != nil { + + if swag.IsZero(m.Key) { // not required + return nil + } + + if err := m.Key.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("key") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("key") + } + return err + } + } + + return nil +} + +func (m *APIResourceReference) contextValidateRelationship(ctx context.Context, formats strfmt.Registry) error { + + if m.Relationship != nil { + + if swag.IsZero(m.Relationship) { // not required + return nil + } + + if err := m.Relationship.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("relationship") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_type.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_type.go index 8df611d77ff..0dd46e058bd 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_type.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_resource_type.go @@ -6,18 +6,28 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIResourceType api resource type +// // swagger:model apiResourceType type APIResourceType string +func NewAPIResourceType(value APIResourceType) *APIResourceType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIResourceType. +func (m APIResourceType) Pointer() *APIResourceType { + return &m +} + const ( // APIResourceTypeUNKNOWNRESOURCETYPE captures enum value "UNKNOWN_RESOURCE_TYPE" @@ -53,7 +63,7 @@ func init() { } func (m APIResourceType) validateAPIResourceTypeEnum(path, location string, value APIResourceType) error { - if err := validate.Enum(path, location, value, apiResourceTypeEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiResourceTypeEnum, true); err != nil { return err } return nil @@ -73,3 +83,8 @@ func (m APIResourceType) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api resource type based on context it is used +func (m APIResourceType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/api_url.go b/backend/api/v1beta1/go_http_client/pipeline_model/api_url.go index 69be2fc2277..4abcd729b9f 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/api_url.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/api_url.go @@ -6,12 +6,14 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIURL api Url +// // swagger:model apiUrl type APIURL struct { @@ -24,6 +26,11 @@ func (m *APIURL) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api Url based on context it is used +func (m *APIURL) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APIURL) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/gatewayruntime_error.go b/backend/api/v1beta1/go_http_client/pipeline_model/gatewayruntime_error.go deleted file mode 100644 index edc8cf46ff9..00000000000 --- a/backend/api/v1beta1/go_http_client/pipeline_model/gatewayruntime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package pipeline_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// GatewayruntimeError gatewayruntime error -// swagger:model gatewayruntimeError -type GatewayruntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this gatewayruntime error -func (m *GatewayruntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *GatewayruntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *GatewayruntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *GatewayruntimeError) UnmarshalBinary(b []byte) error { - var res GatewayruntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/googlerpc_status.go b/backend/api/v1beta1/go_http_client/pipeline_model/googlerpc_status.go new file mode 100644 index 00000000000..67080c9a32e --- /dev/null +++ b/backend/api/v1beta1/go_http_client/pipeline_model/googlerpc_status.go @@ -0,0 +1,127 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// GooglerpcStatus googlerpc status +// +// swagger:model googlerpcStatus +type GooglerpcStatus struct { + + // code + Code int32 `json:"code,omitempty"` + + // details + Details []*ProtobufAny `json:"details"` + + // message + Message string `json:"message,omitempty"` +} + +// Validate validates this googlerpc status +func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateDetails(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { + if swag.IsZero(m.Details) { // not required + return nil + } + + for i := 0; i < len(m.Details); i++ { + if swag.IsZero(m.Details[i]) { // not required + continue + } + + if m.Details[i] != nil { + if err := m.Details[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *GooglerpcStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *GooglerpcStatus) UnmarshalBinary(b []byte) error { + var res GooglerpcStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v1beta1/go_http_client/pipeline_model/protobuf_any.go b/backend/api/v1beta1/go_http_client/pipeline_model/protobuf_any.go index fb51adb0990..fba86c1d06b 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_model/protobuf_any.go +++ b/backend/api/v1beta1/go_http_client/pipeline_model/protobuf_any.go @@ -6,9 +6,10 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_client.go b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_client.go index d0d7059ecb9..e0c459f0bd2 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_client.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_client.go @@ -8,8 +8,7 @@ package pipeline_upload_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *PipelineUp cli := new(PipelineUpload) cli.Transport = transport - cli.PipelineUploadService = pipeline_upload_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // PipelineUpload is a client for pipeline upload type PipelineUpload struct { - PipelineUploadService *pipeline_upload_service.Client + PipelineUploadService pipeline_upload_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type PipelineUpload struct { // SetTransport changes the transport on the client and all its subresources func (c *PipelineUpload) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.PipelineUploadService.SetTransport(transport) - } diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go index 42494cf4755..1bfc8666f4b 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go @@ -7,15 +7,40 @@ package pipeline_upload_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new pipeline upload service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new pipeline upload service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new pipeline upload service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for pipeline upload service API */ @@ -24,16 +49,51 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// This client is generated with a few options you might find useful for your swagger spec. +// +// Feel free to add you own set of options. + +// WithContentType allows the client to force the Content-Type header +// to negotiate a specific Consumer from the server. +// +// You may use this option to set arbitrary extensions to your MIME media type. +func WithContentType(mime string) ClientOption { + return func(r *runtime.ClientOperation) { + r.ConsumesMediaTypes = []string{mime} + } +} + +// WithContentTypeApplicationJSON sets the Content-Type header to "application/json". +func WithContentTypeApplicationJSON(r *runtime.ClientOperation) { + r.ConsumesMediaTypes = []string{"application/json"} +} + +// WithContentTypeMultipartFormData sets the Content-Type header to "multipart/form-data". +func WithContentTypeMultipartFormData(r *runtime.ClientOperation) { + r.ConsumesMediaTypes = []string{"multipart/form-data"} +} + +// ClientService is the interface for Client methods +type ClientService interface { + UploadPipeline(params *UploadPipelineParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UploadPipelineOK, error) + + UploadPipelineVersion(params *UploadPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UploadPipelineVersionOK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* UploadPipeline upload pipeline API */ -func (a *Client) UploadPipeline(params *UploadPipelineParams, authInfo runtime.ClientAuthInfoWriter) (*UploadPipelineOK, error) { +func (a *Client) UploadPipeline(params *UploadPipelineParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UploadPipelineOK, error) { // TODO: Validate the params before sending if params == nil { params = NewUploadPipelineParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "UploadPipeline", Method: "POST", PathPattern: "/apis/v1beta1/pipelines/upload", @@ -45,24 +105,33 @@ func (a *Client) UploadPipeline(params *UploadPipelineParams, authInfo runtime.C AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*UploadPipelineOK), nil - + success, ok := result.(*UploadPipelineOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*UploadPipelineDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* UploadPipelineVersion upload pipeline version API */ -func (a *Client) UploadPipelineVersion(params *UploadPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*UploadPipelineVersionOK, error) { +func (a *Client) UploadPipelineVersion(params *UploadPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UploadPipelineVersionOK, error) { // TODO: Validate the params before sending if params == nil { params = NewUploadPipelineVersionParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "UploadPipelineVersion", Method: "POST", PathPattern: "/apis/v1beta1/pipelines/upload_version", @@ -74,12 +143,22 @@ func (a *Client) UploadPipelineVersion(params *UploadPipelineVersionParams, auth AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*UploadPipelineVersionOK), nil - + success, ok := result.(*UploadPipelineVersionOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*UploadPipelineVersionDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go index 3945ae005a7..5ab15cc6509 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go @@ -13,63 +13,66 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewUploadPipelineParams creates a new UploadPipelineParams object -// with the default values initialized. +// NewUploadPipelineParams creates a new UploadPipelineParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewUploadPipelineParams() *UploadPipelineParams { - var () return &UploadPipelineParams{ - timeout: cr.DefaultTimeout, } } // NewUploadPipelineParamsWithTimeout creates a new UploadPipelineParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewUploadPipelineParamsWithTimeout(timeout time.Duration) *UploadPipelineParams { - var () return &UploadPipelineParams{ - timeout: timeout, } } // NewUploadPipelineParamsWithContext creates a new UploadPipelineParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewUploadPipelineParamsWithContext(ctx context.Context) *UploadPipelineParams { - var () return &UploadPipelineParams{ - Context: ctx, } } // NewUploadPipelineParamsWithHTTPClient creates a new UploadPipelineParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewUploadPipelineParamsWithHTTPClient(client *http.Client) *UploadPipelineParams { - var () return &UploadPipelineParams{ HTTPClient: client, } } -/*UploadPipelineParams contains all the parameters to send to the API endpoint -for the upload pipeline operation typically these are written to a http.Request +/* +UploadPipelineParams contains all the parameters to send to the API endpoint + + for the upload pipeline operation. + + Typically these are written to a http.Request. */ type UploadPipelineParams struct { - /*Description*/ + // Description. Description *string - /*Name*/ + + // Name. Name *string - /*Namespace*/ + + // Namespace. Namespace *string - /*Uploadfile - The pipeline to upload. Maximum size of 32MB is supported. + /* Uploadfile. + + The pipeline to upload. Maximum size of 32MB is supported. */ Uploadfile runtime.NamedReadCloser @@ -78,6 +81,21 @@ type UploadPipelineParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the upload pipeline params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UploadPipelineParams) WithDefaults() *UploadPipelineParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the upload pipeline params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UploadPipelineParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the upload pipeline params func (o *UploadPipelineParams) WithTimeout(timeout time.Duration) *UploadPipelineParams { o.SetTimeout(timeout) @@ -167,50 +185,52 @@ func (o *UploadPipelineParams) WriteToRequest(r runtime.ClientRequest, reg strfm // query param description var qrDescription string + if o.Description != nil { qrDescription = *o.Description } qDescription := qrDescription if qDescription != "" { + if err := r.SetQueryParam("description", qDescription); err != nil { return err } } - } if o.Name != nil { // query param name var qrName string + if o.Name != nil { qrName = *o.Name } qName := qrName if qName != "" { + if err := r.SetQueryParam("name", qName); err != nil { return err } } - } if o.Namespace != nil { // query param namespace var qrNamespace string + if o.Namespace != nil { qrNamespace = *o.Namespace } qNamespace := qrNamespace if qNamespace != "" { + if err := r.SetQueryParam("namespace", qNamespace); err != nil { return err } } - } - // form file param uploadfile if err := r.SetFileParam("uploadfile", o.Uploadfile); err != nil { return err diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go index 0e40fa36663..0be5a05c0d4 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go @@ -6,14 +6,14 @@ package pipeline_upload_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_upload_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_upload_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_upload_model" ) // UploadPipelineReader is a Reader for the UploadPipeline structure. @@ -24,14 +24,12 @@ type UploadPipelineReader struct { // ReadResponse reads a server response into the received o. func (o *UploadPipelineReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewUploadPipelineOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewUploadPipelineDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewUploadPipelineOK() *UploadPipelineOK { return &UploadPipelineOK{} } -/*UploadPipelineOK handles this case with default header values. +/* +UploadPipelineOK describes a response with status code 200, with default header values. UploadPipelineOK upload pipeline o k */ @@ -57,8 +56,48 @@ type UploadPipelineOK struct { Payload *pipeline_upload_model.APIPipeline } +// IsSuccess returns true when this upload pipeline o k response has a 2xx status code +func (o *UploadPipelineOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this upload pipeline o k response has a 3xx status code +func (o *UploadPipelineOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this upload pipeline o k response has a 4xx status code +func (o *UploadPipelineOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this upload pipeline o k response has a 5xx status code +func (o *UploadPipelineOK) IsServerError() bool { + return false +} + +// IsCode returns true when this upload pipeline o k response a status code equal to that given +func (o *UploadPipelineOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the upload pipeline o k response +func (o *UploadPipelineOK) Code() int { + return 200 +} + func (o *UploadPipelineOK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload][%d] uploadPipelineOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload][%d] uploadPipelineOK %s", 200, payload) +} + +func (o *UploadPipelineOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload][%d] uploadPipelineOK %s", 200, payload) +} + +func (o *UploadPipelineOK) GetPayload() *pipeline_upload_model.APIPipeline { + return o.Payload } func (o *UploadPipelineOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,7 +119,8 @@ func NewUploadPipelineDefault(code int) *UploadPipelineDefault { } } -/*UploadPipelineDefault handles this case with default header values. +/* +UploadPipelineDefault describes a response with status code -1, with default header values. UploadPipelineDefault upload pipeline default */ @@ -90,13 +130,48 @@ type UploadPipelineDefault struct { Payload *pipeline_upload_model.APIStatus } +// IsSuccess returns true when this upload pipeline default response has a 2xx status code +func (o *UploadPipelineDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this upload pipeline default response has a 3xx status code +func (o *UploadPipelineDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this upload pipeline default response has a 4xx status code +func (o *UploadPipelineDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this upload pipeline default response has a 5xx status code +func (o *UploadPipelineDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this upload pipeline default response a status code equal to that given +func (o *UploadPipelineDefault) IsCode(code int) bool { + return o._statusCode == code +} + // Code gets the status code for the upload pipeline default response func (o *UploadPipelineDefault) Code() int { return o._statusCode } func (o *UploadPipelineDefault) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload][%d] UploadPipeline default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload][%d] UploadPipeline default %s", o._statusCode, payload) +} + +func (o *UploadPipelineDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload][%d] UploadPipeline default %s", o._statusCode, payload) +} + +func (o *UploadPipelineDefault) GetPayload() *pipeline_upload_model.APIStatus { + return o.Payload } func (o *UploadPipelineDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go index 863e9c58741..e6d3e84fc0f 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go @@ -13,65 +13,69 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewUploadPipelineVersionParams creates a new UploadPipelineVersionParams object -// with the default values initialized. +// NewUploadPipelineVersionParams creates a new UploadPipelineVersionParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewUploadPipelineVersionParams() *UploadPipelineVersionParams { - var () return &UploadPipelineVersionParams{ - timeout: cr.DefaultTimeout, } } // NewUploadPipelineVersionParamsWithTimeout creates a new UploadPipelineVersionParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewUploadPipelineVersionParamsWithTimeout(timeout time.Duration) *UploadPipelineVersionParams { - var () return &UploadPipelineVersionParams{ - timeout: timeout, } } // NewUploadPipelineVersionParamsWithContext creates a new UploadPipelineVersionParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewUploadPipelineVersionParamsWithContext(ctx context.Context) *UploadPipelineVersionParams { - var () return &UploadPipelineVersionParams{ - Context: ctx, } } // NewUploadPipelineVersionParamsWithHTTPClient creates a new UploadPipelineVersionParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewUploadPipelineVersionParamsWithHTTPClient(client *http.Client) *UploadPipelineVersionParams { - var () return &UploadPipelineVersionParams{ HTTPClient: client, } } -/*UploadPipelineVersionParams contains all the parameters to send to the API endpoint -for the upload pipeline version operation typically these are written to a http.Request +/* +UploadPipelineVersionParams contains all the parameters to send to the API endpoint + + for the upload pipeline version operation. + + Typically these are written to a http.Request. */ type UploadPipelineVersionParams struct { - /*Description*/ + // Description. Description *string - /*Name*/ + + // Name. Name *string - /*Namespace*/ + + // Namespace. Namespace *string - /*Pipelineid*/ + + // Pipelineid. Pipelineid *string - /*Uploadfile - The pipeline to upload. Maximum size of 32MB is supported. + /* Uploadfile. + + The pipeline to upload. Maximum size of 32MB is supported. */ Uploadfile runtime.NamedReadCloser @@ -80,6 +84,21 @@ type UploadPipelineVersionParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the upload pipeline version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UploadPipelineVersionParams) WithDefaults() *UploadPipelineVersionParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the upload pipeline version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UploadPipelineVersionParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the upload pipeline version params func (o *UploadPipelineVersionParams) WithTimeout(timeout time.Duration) *UploadPipelineVersionParams { o.SetTimeout(timeout) @@ -180,66 +199,69 @@ func (o *UploadPipelineVersionParams) WriteToRequest(r runtime.ClientRequest, re // query param description var qrDescription string + if o.Description != nil { qrDescription = *o.Description } qDescription := qrDescription if qDescription != "" { + if err := r.SetQueryParam("description", qDescription); err != nil { return err } } - } if o.Name != nil { // query param name var qrName string + if o.Name != nil { qrName = *o.Name } qName := qrName if qName != "" { + if err := r.SetQueryParam("name", qName); err != nil { return err } } - } if o.Namespace != nil { // query param namespace var qrNamespace string + if o.Namespace != nil { qrNamespace = *o.Namespace } qNamespace := qrNamespace if qNamespace != "" { + if err := r.SetQueryParam("namespace", qNamespace); err != nil { return err } } - } if o.Pipelineid != nil { // query param pipelineid var qrPipelineid string + if o.Pipelineid != nil { qrPipelineid = *o.Pipelineid } qPipelineid := qrPipelineid if qPipelineid != "" { + if err := r.SetQueryParam("pipelineid", qPipelineid); err != nil { return err } } - } - // form file param uploadfile if err := r.SetFileParam("uploadfile", o.Uploadfile); err != nil { return err diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go index fcb2e88dc1c..5b1790b916f 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go @@ -6,14 +6,14 @@ package pipeline_upload_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_upload_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_upload_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_upload_model" ) // UploadPipelineVersionReader is a Reader for the UploadPipelineVersion structure. @@ -24,14 +24,12 @@ type UploadPipelineVersionReader struct { // ReadResponse reads a server response into the received o. func (o *UploadPipelineVersionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewUploadPipelineVersionOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewUploadPipelineVersionDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewUploadPipelineVersionOK() *UploadPipelineVersionOK { return &UploadPipelineVersionOK{} } -/*UploadPipelineVersionOK handles this case with default header values. +/* +UploadPipelineVersionOK describes a response with status code 200, with default header values. UploadPipelineVersionOK upload pipeline version o k */ @@ -57,8 +56,48 @@ type UploadPipelineVersionOK struct { Payload *pipeline_upload_model.APIPipelineVersion } +// IsSuccess returns true when this upload pipeline version o k response has a 2xx status code +func (o *UploadPipelineVersionOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this upload pipeline version o k response has a 3xx status code +func (o *UploadPipelineVersionOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this upload pipeline version o k response has a 4xx status code +func (o *UploadPipelineVersionOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this upload pipeline version o k response has a 5xx status code +func (o *UploadPipelineVersionOK) IsServerError() bool { + return false +} + +// IsCode returns true when this upload pipeline version o k response a status code equal to that given +func (o *UploadPipelineVersionOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the upload pipeline version o k response +func (o *UploadPipelineVersionOK) Code() int { + return 200 +} + func (o *UploadPipelineVersionOK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload_version][%d] uploadPipelineVersionOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload_version][%d] uploadPipelineVersionOK %s", 200, payload) +} + +func (o *UploadPipelineVersionOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload_version][%d] uploadPipelineVersionOK %s", 200, payload) +} + +func (o *UploadPipelineVersionOK) GetPayload() *pipeline_upload_model.APIPipelineVersion { + return o.Payload } func (o *UploadPipelineVersionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,7 +119,8 @@ func NewUploadPipelineVersionDefault(code int) *UploadPipelineVersionDefault { } } -/*UploadPipelineVersionDefault handles this case with default header values. +/* +UploadPipelineVersionDefault describes a response with status code -1, with default header values. UploadPipelineVersionDefault upload pipeline version default */ @@ -90,13 +130,48 @@ type UploadPipelineVersionDefault struct { Payload *pipeline_upload_model.APIStatus } +// IsSuccess returns true when this upload pipeline version default response has a 2xx status code +func (o *UploadPipelineVersionDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this upload pipeline version default response has a 3xx status code +func (o *UploadPipelineVersionDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this upload pipeline version default response has a 4xx status code +func (o *UploadPipelineVersionDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this upload pipeline version default response has a 5xx status code +func (o *UploadPipelineVersionDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this upload pipeline version default response a status code equal to that given +func (o *UploadPipelineVersionDefault) IsCode(code int) bool { + return o._statusCode == code +} + // Code gets the status code for the upload pipeline version default response func (o *UploadPipelineVersionDefault) Code() int { return o._statusCode } func (o *UploadPipelineVersionDefault) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload_version][%d] UploadPipelineVersion default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload_version][%d] UploadPipelineVersion default %s", o._statusCode, payload) +} + +func (o *UploadPipelineVersionDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload_version][%d] UploadPipelineVersion default %s", o._statusCode, payload) +} + +func (o *UploadPipelineVersionDefault) GetPayload() *pipeline_upload_model.APIStatus { + return o.Payload } func (o *UploadPipelineVersionDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_parameter.go b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_parameter.go index f46f394429a..e9d8ab70597 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_parameter.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_parameter.go @@ -6,12 +6,14 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIParameter api parameter +// // swagger:model apiParameter type APIParameter struct { @@ -27,6 +29,11 @@ func (m *APIParameter) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api parameter based on context it is used +func (m *APIParameter) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APIParameter) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_pipeline.go b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_pipeline.go index ba9aa33bb04..c30926d0454 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_pipeline.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_pipeline.go @@ -6,16 +6,17 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // APIPipeline api pipeline +// // swagger:model apiPipeline type APIPipeline struct { @@ -60,7 +61,6 @@ func (m *APIPipeline) Validate(formats strfmt.Registry) error { } func (m *APIPipeline) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -73,7 +73,6 @@ func (m *APIPipeline) validateCreatedAt(formats strfmt.Registry) error { } func (m *APIPipeline) validateParameters(formats strfmt.Registry) error { - if swag.IsZero(m.Parameters) { // not required return nil } @@ -87,6 +86,47 @@ func (m *APIPipeline) validateParameters(formats strfmt.Registry) error { if err := m.Parameters[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this api pipeline based on the context it is used +func (m *APIPipeline) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateParameters(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIPipeline) contextValidateParameters(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Parameters); i++ { + + if m.Parameters[i] != nil { + + if swag.IsZero(m.Parameters[i]) { // not required + return nil + } + + if err := m.Parameters[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_pipeline_version.go b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_pipeline_version.go index 1d6f169f7f0..3821c33972a 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_pipeline_version.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_pipeline_version.go @@ -6,16 +6,17 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // APIPipelineVersion api pipeline version +// // swagger:model apiPipelineVersion type APIPipelineVersion struct { @@ -72,7 +73,6 @@ func (m *APIPipelineVersion) Validate(formats strfmt.Registry) error { } func (m *APIPipelineVersion) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -85,7 +85,6 @@ func (m *APIPipelineVersion) validateCreatedAt(formats strfmt.Registry) error { } func (m *APIPipelineVersion) validatePackageURL(formats strfmt.Registry) error { - if swag.IsZero(m.PackageURL) { // not required return nil } @@ -94,6 +93,8 @@ func (m *APIPipelineVersion) validatePackageURL(formats strfmt.Registry) error { if err := m.PackageURL.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("package_url") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("package_url") } return err } @@ -103,7 +104,6 @@ func (m *APIPipelineVersion) validatePackageURL(formats strfmt.Registry) error { } func (m *APIPipelineVersion) validateParameters(formats strfmt.Registry) error { - if swag.IsZero(m.Parameters) { // not required return nil } @@ -117,6 +117,8 @@ func (m *APIPipelineVersion) validateParameters(formats strfmt.Registry) error { if err := m.Parameters[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) } return err } @@ -128,7 +130,6 @@ func (m *APIPipelineVersion) validateParameters(formats strfmt.Registry) error { } func (m *APIPipelineVersion) validateResourceReferences(formats strfmt.Registry) error { - if swag.IsZero(m.ResourceReferences) { // not required return nil } @@ -142,6 +143,101 @@ func (m *APIPipelineVersion) validateResourceReferences(formats strfmt.Registry) if err := m.ResourceReferences[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this api pipeline version based on the context it is used +func (m *APIPipelineVersion) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidatePackageURL(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateParameters(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateResourceReferences(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIPipelineVersion) contextValidatePackageURL(ctx context.Context, formats strfmt.Registry) error { + + if m.PackageURL != nil { + + if swag.IsZero(m.PackageURL) { // not required + return nil + } + + if err := m.PackageURL.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("package_url") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("package_url") + } + return err + } + } + + return nil +} + +func (m *APIPipelineVersion) contextValidateParameters(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Parameters); i++ { + + if m.Parameters[i] != nil { + + if swag.IsZero(m.Parameters[i]) { // not required + return nil + } + + if err := m.Parameters[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIPipelineVersion) contextValidateResourceReferences(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.ResourceReferences); i++ { + + if m.ResourceReferences[i] != nil { + + if swag.IsZero(m.ResourceReferences[i]) { // not required + return nil + } + + if err := m.ResourceReferences[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_relationship.go b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_relationship.go index b4f85b6da9a..9305d8e165b 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_relationship.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_relationship.go @@ -6,18 +6,28 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIRelationship api relationship +// // swagger:model apiRelationship type APIRelationship string +func NewAPIRelationship(value APIRelationship) *APIRelationship { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIRelationship. +func (m APIRelationship) Pointer() *APIRelationship { + return &m +} + const ( // APIRelationshipUNKNOWNRELATIONSHIP captures enum value "UNKNOWN_RELATIONSHIP" @@ -44,7 +54,7 @@ func init() { } func (m APIRelationship) validateAPIRelationshipEnum(path, location string, value APIRelationship) error { - if err := validate.Enum(path, location, value, apiRelationshipEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiRelationshipEnum, true); err != nil { return err } return nil @@ -64,3 +74,8 @@ func (m APIRelationship) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api relationship based on context it is used +func (m APIRelationship) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_key.go b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_key.go index 2242421be03..23bd4912f0d 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_key.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_key.go @@ -6,13 +6,15 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIResourceKey api resource key +// // swagger:model apiResourceKey type APIResourceKey struct { @@ -20,7 +22,7 @@ type APIResourceKey struct { ID string `json:"id,omitempty"` // The type of the resource that referred to. - Type APIResourceType `json:"type,omitempty"` + Type *APIResourceType `json:"type,omitempty"` } // Validate validates this api resource key @@ -38,16 +40,54 @@ func (m *APIResourceKey) Validate(formats strfmt.Registry) error { } func (m *APIResourceKey) validateType(formats strfmt.Registry) error { - if swag.IsZero(m.Type) { // not required return nil } - if err := m.Type.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("type") + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api resource key based on the context it is used +func (m *APIResourceKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceKey) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_reference.go b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_reference.go index c5fcfce9441..501c4e3af4f 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_reference.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_reference.go @@ -6,13 +6,15 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIResourceReference api resource reference +// // swagger:model apiResourceReference type APIResourceReference struct { @@ -23,7 +25,7 @@ type APIResourceReference struct { Name string `json:"name,omitempty"` // Required field. The relationship from referred resource to the object. - Relationship APIRelationship `json:"relationship,omitempty"` + Relationship *APIRelationship `json:"relationship,omitempty"` } // Validate validates this api resource reference @@ -45,7 +47,6 @@ func (m *APIResourceReference) Validate(formats strfmt.Registry) error { } func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { - if swag.IsZero(m.Key) { // not required return nil } @@ -54,6 +55,8 @@ func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { if err := m.Key.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("key") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("key") } return err } @@ -63,16 +66,79 @@ func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { } func (m *APIResourceReference) validateRelationship(formats strfmt.Registry) error { - if swag.IsZero(m.Relationship) { // not required return nil } - if err := m.Relationship.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("relationship") + if m.Relationship != nil { + if err := m.Relationship.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("relationship") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api resource reference based on the context it is used +func (m *APIResourceReference) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateKey(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRelationship(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceReference) contextValidateKey(ctx context.Context, formats strfmt.Registry) error { + + if m.Key != nil { + + if swag.IsZero(m.Key) { // not required + return nil + } + + if err := m.Key.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("key") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("key") + } + return err + } + } + + return nil +} + +func (m *APIResourceReference) contextValidateRelationship(ctx context.Context, formats strfmt.Registry) error { + + if m.Relationship != nil { + + if swag.IsZero(m.Relationship) { // not required + return nil + } + + if err := m.Relationship.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("relationship") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_type.go b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_type.go index b0fe8702d6e..39bc66c54f5 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_type.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_resource_type.go @@ -6,18 +6,28 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIResourceType api resource type +// // swagger:model apiResourceType type APIResourceType string +func NewAPIResourceType(value APIResourceType) *APIResourceType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIResourceType. +func (m APIResourceType) Pointer() *APIResourceType { + return &m +} + const ( // APIResourceTypeUNKNOWNRESOURCETYPE captures enum value "UNKNOWN_RESOURCE_TYPE" @@ -53,7 +63,7 @@ func init() { } func (m APIResourceType) validateAPIResourceTypeEnum(path, location string, value APIResourceType) error { - if err := validate.Enum(path, location, value, apiResourceTypeEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiResourceTypeEnum, true); err != nil { return err } return nil @@ -73,3 +83,8 @@ func (m APIResourceType) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api resource type based on context it is used +func (m APIResourceType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_status.go b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_status.go index a1df8518689..2c9b53e2998 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_status.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_status.go @@ -6,15 +6,16 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIStatus api status +// // swagger:model apiStatus type APIStatus struct { @@ -43,7 +44,6 @@ func (m *APIStatus) Validate(formats strfmt.Registry) error { } func (m *APIStatus) validateDetails(formats strfmt.Registry) error { - if swag.IsZero(m.Details) { // not required return nil } @@ -57,6 +57,47 @@ func (m *APIStatus) validateDetails(formats strfmt.Registry) error { if err := m.Details[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this api status based on the context it is used +func (m *APIStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_url.go b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_url.go index b1ebfac3c25..19e9cb06865 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_url.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_model/api_url.go @@ -6,12 +6,14 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIURL api Url +// // swagger:model apiUrl type APIURL struct { @@ -24,6 +26,11 @@ func (m *APIURL) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api Url based on context it is used +func (m *APIURL) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APIURL) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/pipeline_upload_model/protobuf_any.go b/backend/api/v1beta1/go_http_client/pipeline_upload_model/protobuf_any.go index 80a4d4cd71f..3031bdebdde 100644 --- a/backend/api/v1beta1/go_http_client/pipeline_upload_model/protobuf_any.go +++ b/backend/api/v1beta1/go_http_client/pipeline_upload_model/protobuf_any.go @@ -6,9 +6,9 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,42 +20,42 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := ptypes.MarshalAny(foo) -// ... -// foo := &pb.Foo{} -// if err := ptypes.UnmarshalAny(any, foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := ptypes.MarshalAny(foo) +// ... +// foo := &pb.Foo{} +// if err := ptypes.UnmarshalAny(any, foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -63,34 +63,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON // ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -129,26 +129,11 @@ type ProtobufAny struct { // Validate validates this protobuf any func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateValue(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { - - if swag.IsZero(m.Value) { // not required - return nil - } - - // Format "byte" (base64 string) is already validated when unmarshalled - +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v1beta1/go_http_client/run_client/run_client.go b/backend/api/v1beta1/go_http_client/run_client/run_client.go index bb259aa215b..3107d976141 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_client.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_client.go @@ -8,8 +8,7 @@ package run_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_client/run_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Run { cli := new(Run) cli.Transport = transport - cli.RunService = run_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Run is a client for run type Run struct { - RunService *run_service.Client + RunService run_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Run struct { // SetTransport changes the transport on the client and all its subresources func (c *Run) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.RunService.SetTransport(transport) - } diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_archive_run_v1_parameters.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_archive_run_v1_parameters.go index bc5669fdc15..b6e0f6d8c72 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_archive_run_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_archive_run_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceArchiveRunV1Params creates a new RunServiceArchiveRunV1Params object -// with the default values initialized. +// NewRunServiceArchiveRunV1Params creates a new RunServiceArchiveRunV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceArchiveRunV1Params() *RunServiceArchiveRunV1Params { - var () return &RunServiceArchiveRunV1Params{ - timeout: cr.DefaultTimeout, } } // NewRunServiceArchiveRunV1ParamsWithTimeout creates a new RunServiceArchiveRunV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceArchiveRunV1ParamsWithTimeout(timeout time.Duration) *RunServiceArchiveRunV1Params { - var () return &RunServiceArchiveRunV1Params{ - timeout: timeout, } } // NewRunServiceArchiveRunV1ParamsWithContext creates a new RunServiceArchiveRunV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceArchiveRunV1ParamsWithContext(ctx context.Context) *RunServiceArchiveRunV1Params { - var () return &RunServiceArchiveRunV1Params{ - Context: ctx, } } // NewRunServiceArchiveRunV1ParamsWithHTTPClient creates a new RunServiceArchiveRunV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceArchiveRunV1ParamsWithHTTPClient(client *http.Client) *RunServiceArchiveRunV1Params { - var () return &RunServiceArchiveRunV1Params{ HTTPClient: client, } } -/*RunServiceArchiveRunV1Params contains all the parameters to send to the API endpoint -for the run service archive run v1 operation typically these are written to a http.Request +/* +RunServiceArchiveRunV1Params contains all the parameters to send to the API endpoint + + for the run service archive run v1 operation. + + Typically these are written to a http.Request. */ type RunServiceArchiveRunV1Params struct { - /*ID - The ID of the run to be archived. + /* ID. + The ID of the run to be archived. */ ID string @@ -72,6 +72,21 @@ type RunServiceArchiveRunV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service archive run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceArchiveRunV1Params) WithDefaults() *RunServiceArchiveRunV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service archive run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceArchiveRunV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service archive run v1 params func (o *RunServiceArchiveRunV1Params) WithTimeout(timeout time.Duration) *RunServiceArchiveRunV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_archive_run_v1_responses.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_archive_run_v1_responses.go index 3a9ae712c67..17611c8ed1c 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_archive_run_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_archive_run_v1_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) // RunServiceArchiveRunV1Reader is a Reader for the RunServiceArchiveRunV1 structure. @@ -24,14 +24,12 @@ type RunServiceArchiveRunV1Reader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceArchiveRunV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceArchiveRunV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceArchiveRunV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceArchiveRunV1OK() *RunServiceArchiveRunV1OK { return &RunServiceArchiveRunV1OK{} } -/*RunServiceArchiveRunV1OK handles this case with default header values. +/* +RunServiceArchiveRunV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceArchiveRunV1OK struct { Payload interface{} } +// IsSuccess returns true when this run service archive run v1 o k response has a 2xx status code +func (o *RunServiceArchiveRunV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service archive run v1 o k response has a 3xx status code +func (o *RunServiceArchiveRunV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service archive run v1 o k response has a 4xx status code +func (o *RunServiceArchiveRunV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service archive run v1 o k response has a 5xx status code +func (o *RunServiceArchiveRunV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service archive run v1 o k response a status code equal to that given +func (o *RunServiceArchiveRunV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service archive run v1 o k response +func (o *RunServiceArchiveRunV1OK) Code() int { + return 200 +} + func (o *RunServiceArchiveRunV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:archive][%d] runServiceArchiveRunV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:archive][%d] runServiceArchiveRunV1OK %s", 200, payload) +} + +func (o *RunServiceArchiveRunV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:archive][%d] runServiceArchiveRunV1OK %s", 200, payload) +} + +func (o *RunServiceArchiveRunV1OK) GetPayload() interface{} { + return o.Payload } func (o *RunServiceArchiveRunV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRunServiceArchiveRunV1Default(code int) *RunServiceArchiveRunV1Default { } } -/*RunServiceArchiveRunV1Default handles this case with default header values. +/* +RunServiceArchiveRunV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceArchiveRunV1Default struct { _statusCode int - Payload *run_model.GatewayruntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service archive run v1 default response has a 2xx status code +func (o *RunServiceArchiveRunV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service archive run v1 default response has a 3xx status code +func (o *RunServiceArchiveRunV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service archive run v1 default response has a 4xx status code +func (o *RunServiceArchiveRunV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service archive run v1 default response has a 5xx status code +func (o *RunServiceArchiveRunV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service archive run v1 default response a status code equal to that given +func (o *RunServiceArchiveRunV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service archive run v1 default response @@ -94,12 +159,22 @@ func (o *RunServiceArchiveRunV1Default) Code() int { } func (o *RunServiceArchiveRunV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:archive][%d] RunService_ArchiveRunV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:archive][%d] RunService_ArchiveRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceArchiveRunV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:archive][%d] RunService_ArchiveRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceArchiveRunV1Default) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceArchiveRunV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.GatewayruntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_client.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_client.go index 2f1bc1883a2..0ded4c15bba 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_client.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_client.go @@ -7,15 +7,40 @@ package run_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new run service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new run service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new run service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for run service API */ @@ -24,16 +49,43 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + RunServiceArchiveRunV1(params *RunServiceArchiveRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceArchiveRunV1OK, error) + + RunServiceCreateRunV1(params *RunServiceCreateRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceCreateRunV1OK, error) + + RunServiceDeleteRunV1(params *RunServiceDeleteRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceDeleteRunV1OK, error) + + RunServiceGetRunV1(params *RunServiceGetRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceGetRunV1OK, error) + + RunServiceListRunsV1(params *RunServiceListRunsV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceListRunsV1OK, error) + + RunServiceReadArtifactV1(params *RunServiceReadArtifactV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceReadArtifactV1OK, error) + + RunServiceReportRunMetricsV1(params *RunServiceReportRunMetricsV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceReportRunMetricsV1OK, error) + + RunServiceRetryRunV1(params *RunServiceRetryRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceRetryRunV1OK, error) + + RunServiceTerminateRunV1(params *RunServiceTerminateRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceTerminateRunV1OK, error) + + RunServiceUnarchiveRunV1(params *RunServiceUnarchiveRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceUnarchiveRunV1OK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* RunServiceArchiveRunV1 archives a run */ -func (a *Client) RunServiceArchiveRunV1(params *RunServiceArchiveRunV1Params, authInfo runtime.ClientAuthInfoWriter) (*RunServiceArchiveRunV1OK, error) { +func (a *Client) RunServiceArchiveRunV1(params *RunServiceArchiveRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceArchiveRunV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceArchiveRunV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_ArchiveRunV1", Method: "POST", PathPattern: "/apis/v1beta1/runs/{id}:archive", @@ -45,24 +97,33 @@ func (a *Client) RunServiceArchiveRunV1(params *RunServiceArchiveRunV1Params, au AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceArchiveRunV1OK), nil - + success, ok := result.(*RunServiceArchiveRunV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceArchiveRunV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceCreateRunV1 creates a new run */ -func (a *Client) RunServiceCreateRunV1(params *RunServiceCreateRunV1Params, authInfo runtime.ClientAuthInfoWriter) (*RunServiceCreateRunV1OK, error) { +func (a *Client) RunServiceCreateRunV1(params *RunServiceCreateRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceCreateRunV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceCreateRunV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_CreateRunV1", Method: "POST", PathPattern: "/apis/v1beta1/runs", @@ -74,24 +135,33 @@ func (a *Client) RunServiceCreateRunV1(params *RunServiceCreateRunV1Params, auth AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceCreateRunV1OK), nil - + success, ok := result.(*RunServiceCreateRunV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceCreateRunV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceDeleteRunV1 deletes a run */ -func (a *Client) RunServiceDeleteRunV1(params *RunServiceDeleteRunV1Params, authInfo runtime.ClientAuthInfoWriter) (*RunServiceDeleteRunV1OK, error) { +func (a *Client) RunServiceDeleteRunV1(params *RunServiceDeleteRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceDeleteRunV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceDeleteRunV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_DeleteRunV1", Method: "DELETE", PathPattern: "/apis/v1beta1/runs/{id}", @@ -103,24 +173,33 @@ func (a *Client) RunServiceDeleteRunV1(params *RunServiceDeleteRunV1Params, auth AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceDeleteRunV1OK), nil - + success, ok := result.(*RunServiceDeleteRunV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceDeleteRunV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceGetRunV1 finds a specific run by ID */ -func (a *Client) RunServiceGetRunV1(params *RunServiceGetRunV1Params, authInfo runtime.ClientAuthInfoWriter) (*RunServiceGetRunV1OK, error) { +func (a *Client) RunServiceGetRunV1(params *RunServiceGetRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceGetRunV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceGetRunV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_GetRunV1", Method: "GET", PathPattern: "/apis/v1beta1/runs/{run_id}", @@ -132,24 +211,33 @@ func (a *Client) RunServiceGetRunV1(params *RunServiceGetRunV1Params, authInfo r AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceGetRunV1OK), nil - + success, ok := result.(*RunServiceGetRunV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceGetRunV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceListRunsV1 finds all runs */ -func (a *Client) RunServiceListRunsV1(params *RunServiceListRunsV1Params, authInfo runtime.ClientAuthInfoWriter) (*RunServiceListRunsV1OK, error) { +func (a *Client) RunServiceListRunsV1(params *RunServiceListRunsV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceListRunsV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceListRunsV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_ListRunsV1", Method: "GET", PathPattern: "/apis/v1beta1/runs", @@ -161,24 +249,33 @@ func (a *Client) RunServiceListRunsV1(params *RunServiceListRunsV1Params, authIn AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceListRunsV1OK), nil - + success, ok := result.(*RunServiceListRunsV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceListRunsV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceReadArtifactV1 finds a run s artifact data */ -func (a *Client) RunServiceReadArtifactV1(params *RunServiceReadArtifactV1Params, authInfo runtime.ClientAuthInfoWriter) (*RunServiceReadArtifactV1OK, error) { +func (a *Client) RunServiceReadArtifactV1(params *RunServiceReadArtifactV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceReadArtifactV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceReadArtifactV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_ReadArtifactV1", Method: "GET", PathPattern: "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read", @@ -190,24 +287,33 @@ func (a *Client) RunServiceReadArtifactV1(params *RunServiceReadArtifactV1Params AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceReadArtifactV1OK), nil - + success, ok := result.(*RunServiceReadArtifactV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceReadArtifactV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceReportRunMetricsV1 reports run metrics reports metrics of a run each metric is reported in its own transaction so this API accepts partial failures metric can be uniquely identified by run id node id name duplicate reporting will be ignored by the API first reporting wins */ -func (a *Client) RunServiceReportRunMetricsV1(params *RunServiceReportRunMetricsV1Params, authInfo runtime.ClientAuthInfoWriter) (*RunServiceReportRunMetricsV1OK, error) { +func (a *Client) RunServiceReportRunMetricsV1(params *RunServiceReportRunMetricsV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceReportRunMetricsV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceReportRunMetricsV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_ReportRunMetricsV1", Method: "POST", PathPattern: "/apis/v1beta1/runs/{run_id}:reportMetrics", @@ -219,24 +325,33 @@ func (a *Client) RunServiceReportRunMetricsV1(params *RunServiceReportRunMetrics AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceReportRunMetricsV1OK), nil - + success, ok := result.(*RunServiceReportRunMetricsV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceReportRunMetricsV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceRetryRunV1 res initiates a failed or terminated run */ -func (a *Client) RunServiceRetryRunV1(params *RunServiceRetryRunV1Params, authInfo runtime.ClientAuthInfoWriter) (*RunServiceRetryRunV1OK, error) { +func (a *Client) RunServiceRetryRunV1(params *RunServiceRetryRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceRetryRunV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceRetryRunV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_RetryRunV1", Method: "POST", PathPattern: "/apis/v1beta1/runs/{run_id}/retry", @@ -248,24 +363,33 @@ func (a *Client) RunServiceRetryRunV1(params *RunServiceRetryRunV1Params, authIn AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceRetryRunV1OK), nil - + success, ok := result.(*RunServiceRetryRunV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceRetryRunV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceTerminateRunV1 terminates an active run */ -func (a *Client) RunServiceTerminateRunV1(params *RunServiceTerminateRunV1Params, authInfo runtime.ClientAuthInfoWriter) (*RunServiceTerminateRunV1OK, error) { +func (a *Client) RunServiceTerminateRunV1(params *RunServiceTerminateRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceTerminateRunV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceTerminateRunV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_TerminateRunV1", Method: "POST", PathPattern: "/apis/v1beta1/runs/{run_id}/terminate", @@ -277,24 +401,33 @@ func (a *Client) RunServiceTerminateRunV1(params *RunServiceTerminateRunV1Params AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceTerminateRunV1OK), nil - + success, ok := result.(*RunServiceTerminateRunV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceTerminateRunV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceUnarchiveRunV1 restores an archived run */ -func (a *Client) RunServiceUnarchiveRunV1(params *RunServiceUnarchiveRunV1Params, authInfo runtime.ClientAuthInfoWriter) (*RunServiceUnarchiveRunV1OK, error) { +func (a *Client) RunServiceUnarchiveRunV1(params *RunServiceUnarchiveRunV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceUnarchiveRunV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceUnarchiveRunV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_UnarchiveRunV1", Method: "POST", PathPattern: "/apis/v1beta1/runs/{id}:unarchive", @@ -306,12 +439,22 @@ func (a *Client) RunServiceUnarchiveRunV1(params *RunServiceUnarchiveRunV1Params AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceUnarchiveRunV1OK), nil - + success, ok := result.(*RunServiceUnarchiveRunV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceUnarchiveRunV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_create_run_v1_parameters.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_create_run_v1_parameters.go index 39f756db49f..ec0e00f67f9 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_create_run_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_create_run_v1_parameters.go @@ -13,64 +13,79 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) -// NewRunServiceCreateRunV1Params creates a new RunServiceCreateRunV1Params object -// with the default values initialized. +// NewRunServiceCreateRunV1Params creates a new RunServiceCreateRunV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceCreateRunV1Params() *RunServiceCreateRunV1Params { - var () return &RunServiceCreateRunV1Params{ - timeout: cr.DefaultTimeout, } } // NewRunServiceCreateRunV1ParamsWithTimeout creates a new RunServiceCreateRunV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceCreateRunV1ParamsWithTimeout(timeout time.Duration) *RunServiceCreateRunV1Params { - var () return &RunServiceCreateRunV1Params{ - timeout: timeout, } } // NewRunServiceCreateRunV1ParamsWithContext creates a new RunServiceCreateRunV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceCreateRunV1ParamsWithContext(ctx context.Context) *RunServiceCreateRunV1Params { - var () return &RunServiceCreateRunV1Params{ - Context: ctx, } } // NewRunServiceCreateRunV1ParamsWithHTTPClient creates a new RunServiceCreateRunV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceCreateRunV1ParamsWithHTTPClient(client *http.Client) *RunServiceCreateRunV1Params { - var () return &RunServiceCreateRunV1Params{ HTTPClient: client, } } -/*RunServiceCreateRunV1Params contains all the parameters to send to the API endpoint -for the run service create run v1 operation typically these are written to a http.Request +/* +RunServiceCreateRunV1Params contains all the parameters to send to the API endpoint + + for the run service create run v1 operation. + + Typically these are written to a http.Request. */ type RunServiceCreateRunV1Params struct { - /*Body*/ - Body *run_model.APIRun + // Run. + Run *run_model.APIRun timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service create run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceCreateRunV1Params) WithDefaults() *RunServiceCreateRunV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service create run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceCreateRunV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service create run v1 params func (o *RunServiceCreateRunV1Params) WithTimeout(timeout time.Duration) *RunServiceCreateRunV1Params { o.SetTimeout(timeout) @@ -104,15 +119,15 @@ func (o *RunServiceCreateRunV1Params) SetHTTPClient(client *http.Client) { o.HTTPClient = client } -// WithBody adds the body to the run service create run v1 params -func (o *RunServiceCreateRunV1Params) WithBody(body *run_model.APIRun) *RunServiceCreateRunV1Params { - o.SetBody(body) +// WithRun adds the run to the run service create run v1 params +func (o *RunServiceCreateRunV1Params) WithRun(run *run_model.APIRun) *RunServiceCreateRunV1Params { + o.SetRun(run) return o } -// SetBody adds the body to the run service create run v1 params -func (o *RunServiceCreateRunV1Params) SetBody(body *run_model.APIRun) { - o.Body = body +// SetRun adds the run to the run service create run v1 params +func (o *RunServiceCreateRunV1Params) SetRun(run *run_model.APIRun) { + o.Run = run } // WriteToRequest writes these params to a swagger request @@ -122,9 +137,8 @@ func (o *RunServiceCreateRunV1Params) WriteToRequest(r runtime.ClientRequest, re return err } var res []error - - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { + if o.Run != nil { + if err := r.SetBodyParam(o.Run); err != nil { return err } } diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_create_run_v1_responses.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_create_run_v1_responses.go index 76f91566331..9f7f9bda9f4 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_create_run_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_create_run_v1_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) // RunServiceCreateRunV1Reader is a Reader for the RunServiceCreateRunV1 structure. @@ -24,14 +24,12 @@ type RunServiceCreateRunV1Reader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceCreateRunV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceCreateRunV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceCreateRunV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceCreateRunV1OK() *RunServiceCreateRunV1OK { return &RunServiceCreateRunV1OK{} } -/*RunServiceCreateRunV1OK handles this case with default header values. +/* +RunServiceCreateRunV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceCreateRunV1OK struct { Payload *run_model.APIRunDetail } +// IsSuccess returns true when this run service create run v1 o k response has a 2xx status code +func (o *RunServiceCreateRunV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service create run v1 o k response has a 3xx status code +func (o *RunServiceCreateRunV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service create run v1 o k response has a 4xx status code +func (o *RunServiceCreateRunV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service create run v1 o k response has a 5xx status code +func (o *RunServiceCreateRunV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service create run v1 o k response a status code equal to that given +func (o *RunServiceCreateRunV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service create run v1 o k response +func (o *RunServiceCreateRunV1OK) Code() int { + return 200 +} + func (o *RunServiceCreateRunV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs][%d] runServiceCreateRunV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs][%d] runServiceCreateRunV1OK %s", 200, payload) +} + +func (o *RunServiceCreateRunV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs][%d] runServiceCreateRunV1OK %s", 200, payload) +} + +func (o *RunServiceCreateRunV1OK) GetPayload() *run_model.APIRunDetail { + return o.Payload } func (o *RunServiceCreateRunV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRunServiceCreateRunV1Default(code int) *RunServiceCreateRunV1Default { } } -/*RunServiceCreateRunV1Default handles this case with default header values. +/* +RunServiceCreateRunV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceCreateRunV1Default struct { _statusCode int - Payload *run_model.GatewayruntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service create run v1 default response has a 2xx status code +func (o *RunServiceCreateRunV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service create run v1 default response has a 3xx status code +func (o *RunServiceCreateRunV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service create run v1 default response has a 4xx status code +func (o *RunServiceCreateRunV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service create run v1 default response has a 5xx status code +func (o *RunServiceCreateRunV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service create run v1 default response a status code equal to that given +func (o *RunServiceCreateRunV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service create run v1 default response @@ -96,12 +161,22 @@ func (o *RunServiceCreateRunV1Default) Code() int { } func (o *RunServiceCreateRunV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs][%d] RunService_CreateRunV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs][%d] RunService_CreateRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceCreateRunV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs][%d] RunService_CreateRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceCreateRunV1Default) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceCreateRunV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.GatewayruntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_delete_run_v1_parameters.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_delete_run_v1_parameters.go index 1196b4c0ac1..ff9d11d039e 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_delete_run_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_delete_run_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceDeleteRunV1Params creates a new RunServiceDeleteRunV1Params object -// with the default values initialized. +// NewRunServiceDeleteRunV1Params creates a new RunServiceDeleteRunV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceDeleteRunV1Params() *RunServiceDeleteRunV1Params { - var () return &RunServiceDeleteRunV1Params{ - timeout: cr.DefaultTimeout, } } // NewRunServiceDeleteRunV1ParamsWithTimeout creates a new RunServiceDeleteRunV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceDeleteRunV1ParamsWithTimeout(timeout time.Duration) *RunServiceDeleteRunV1Params { - var () return &RunServiceDeleteRunV1Params{ - timeout: timeout, } } // NewRunServiceDeleteRunV1ParamsWithContext creates a new RunServiceDeleteRunV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceDeleteRunV1ParamsWithContext(ctx context.Context) *RunServiceDeleteRunV1Params { - var () return &RunServiceDeleteRunV1Params{ - Context: ctx, } } // NewRunServiceDeleteRunV1ParamsWithHTTPClient creates a new RunServiceDeleteRunV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceDeleteRunV1ParamsWithHTTPClient(client *http.Client) *RunServiceDeleteRunV1Params { - var () return &RunServiceDeleteRunV1Params{ HTTPClient: client, } } -/*RunServiceDeleteRunV1Params contains all the parameters to send to the API endpoint -for the run service delete run v1 operation typically these are written to a http.Request +/* +RunServiceDeleteRunV1Params contains all the parameters to send to the API endpoint + + for the run service delete run v1 operation. + + Typically these are written to a http.Request. */ type RunServiceDeleteRunV1Params struct { - /*ID - The ID of the run to be deleted. + /* ID. + The ID of the run to be deleted. */ ID string @@ -72,6 +72,21 @@ type RunServiceDeleteRunV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service delete run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceDeleteRunV1Params) WithDefaults() *RunServiceDeleteRunV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service delete run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceDeleteRunV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service delete run v1 params func (o *RunServiceDeleteRunV1Params) WithTimeout(timeout time.Duration) *RunServiceDeleteRunV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_delete_run_v1_responses.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_delete_run_v1_responses.go index 22f486f9183..6940f001be5 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_delete_run_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_delete_run_v1_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) // RunServiceDeleteRunV1Reader is a Reader for the RunServiceDeleteRunV1 structure. @@ -24,14 +24,12 @@ type RunServiceDeleteRunV1Reader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceDeleteRunV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceDeleteRunV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceDeleteRunV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceDeleteRunV1OK() *RunServiceDeleteRunV1OK { return &RunServiceDeleteRunV1OK{} } -/*RunServiceDeleteRunV1OK handles this case with default header values. +/* +RunServiceDeleteRunV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceDeleteRunV1OK struct { Payload interface{} } +// IsSuccess returns true when this run service delete run v1 o k response has a 2xx status code +func (o *RunServiceDeleteRunV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service delete run v1 o k response has a 3xx status code +func (o *RunServiceDeleteRunV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service delete run v1 o k response has a 4xx status code +func (o *RunServiceDeleteRunV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service delete run v1 o k response has a 5xx status code +func (o *RunServiceDeleteRunV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service delete run v1 o k response a status code equal to that given +func (o *RunServiceDeleteRunV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service delete run v1 o k response +func (o *RunServiceDeleteRunV1OK) Code() int { + return 200 +} + func (o *RunServiceDeleteRunV1OK) Error() string { - return fmt.Sprintf("[DELETE /apis/v1beta1/runs/{id}][%d] runServiceDeleteRunV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/runs/{id}][%d] runServiceDeleteRunV1OK %s", 200, payload) +} + +func (o *RunServiceDeleteRunV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/runs/{id}][%d] runServiceDeleteRunV1OK %s", 200, payload) +} + +func (o *RunServiceDeleteRunV1OK) GetPayload() interface{} { + return o.Payload } func (o *RunServiceDeleteRunV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRunServiceDeleteRunV1Default(code int) *RunServiceDeleteRunV1Default { } } -/*RunServiceDeleteRunV1Default handles this case with default header values. +/* +RunServiceDeleteRunV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceDeleteRunV1Default struct { _statusCode int - Payload *run_model.GatewayruntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service delete run v1 default response has a 2xx status code +func (o *RunServiceDeleteRunV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service delete run v1 default response has a 3xx status code +func (o *RunServiceDeleteRunV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service delete run v1 default response has a 4xx status code +func (o *RunServiceDeleteRunV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service delete run v1 default response has a 5xx status code +func (o *RunServiceDeleteRunV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service delete run v1 default response a status code equal to that given +func (o *RunServiceDeleteRunV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service delete run v1 default response @@ -94,12 +159,22 @@ func (o *RunServiceDeleteRunV1Default) Code() int { } func (o *RunServiceDeleteRunV1Default) Error() string { - return fmt.Sprintf("[DELETE /apis/v1beta1/runs/{id}][%d] RunService_DeleteRunV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/runs/{id}][%d] RunService_DeleteRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceDeleteRunV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v1beta1/runs/{id}][%d] RunService_DeleteRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceDeleteRunV1Default) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceDeleteRunV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.GatewayruntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_get_run_v1_parameters.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_get_run_v1_parameters.go index f29b799f4d8..860756a1b39 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_get_run_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_get_run_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceGetRunV1Params creates a new RunServiceGetRunV1Params object -// with the default values initialized. +// NewRunServiceGetRunV1Params creates a new RunServiceGetRunV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceGetRunV1Params() *RunServiceGetRunV1Params { - var () return &RunServiceGetRunV1Params{ - timeout: cr.DefaultTimeout, } } // NewRunServiceGetRunV1ParamsWithTimeout creates a new RunServiceGetRunV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceGetRunV1ParamsWithTimeout(timeout time.Duration) *RunServiceGetRunV1Params { - var () return &RunServiceGetRunV1Params{ - timeout: timeout, } } // NewRunServiceGetRunV1ParamsWithContext creates a new RunServiceGetRunV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceGetRunV1ParamsWithContext(ctx context.Context) *RunServiceGetRunV1Params { - var () return &RunServiceGetRunV1Params{ - Context: ctx, } } // NewRunServiceGetRunV1ParamsWithHTTPClient creates a new RunServiceGetRunV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceGetRunV1ParamsWithHTTPClient(client *http.Client) *RunServiceGetRunV1Params { - var () return &RunServiceGetRunV1Params{ HTTPClient: client, } } -/*RunServiceGetRunV1Params contains all the parameters to send to the API endpoint -for the run service get run v1 operation typically these are written to a http.Request +/* +RunServiceGetRunV1Params contains all the parameters to send to the API endpoint + + for the run service get run v1 operation. + + Typically these are written to a http.Request. */ type RunServiceGetRunV1Params struct { - /*RunID - The ID of the run to be retrieved. + /* RunID. + The ID of the run to be retrieved. */ RunID string @@ -72,6 +72,21 @@ type RunServiceGetRunV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service get run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceGetRunV1Params) WithDefaults() *RunServiceGetRunV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service get run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceGetRunV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service get run v1 params func (o *RunServiceGetRunV1Params) WithTimeout(timeout time.Duration) *RunServiceGetRunV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_get_run_v1_responses.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_get_run_v1_responses.go index 93c31defca7..e35617ef39c 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_get_run_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_get_run_v1_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) // RunServiceGetRunV1Reader is a Reader for the RunServiceGetRunV1 structure. @@ -24,14 +24,12 @@ type RunServiceGetRunV1Reader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceGetRunV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceGetRunV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceGetRunV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceGetRunV1OK() *RunServiceGetRunV1OK { return &RunServiceGetRunV1OK{} } -/*RunServiceGetRunV1OK handles this case with default header values. +/* +RunServiceGetRunV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceGetRunV1OK struct { Payload *run_model.APIRunDetail } +// IsSuccess returns true when this run service get run v1 o k response has a 2xx status code +func (o *RunServiceGetRunV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service get run v1 o k response has a 3xx status code +func (o *RunServiceGetRunV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service get run v1 o k response has a 4xx status code +func (o *RunServiceGetRunV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service get run v1 o k response has a 5xx status code +func (o *RunServiceGetRunV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service get run v1 o k response a status code equal to that given +func (o *RunServiceGetRunV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service get run v1 o k response +func (o *RunServiceGetRunV1OK) Code() int { + return 200 +} + func (o *RunServiceGetRunV1OK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}][%d] runServiceGetRunV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}][%d] runServiceGetRunV1OK %s", 200, payload) +} + +func (o *RunServiceGetRunV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}][%d] runServiceGetRunV1OK %s", 200, payload) +} + +func (o *RunServiceGetRunV1OK) GetPayload() *run_model.APIRunDetail { + return o.Payload } func (o *RunServiceGetRunV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRunServiceGetRunV1Default(code int) *RunServiceGetRunV1Default { } } -/*RunServiceGetRunV1Default handles this case with default header values. +/* +RunServiceGetRunV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceGetRunV1Default struct { _statusCode int - Payload *run_model.GatewayruntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service get run v1 default response has a 2xx status code +func (o *RunServiceGetRunV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service get run v1 default response has a 3xx status code +func (o *RunServiceGetRunV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service get run v1 default response has a 4xx status code +func (o *RunServiceGetRunV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service get run v1 default response has a 5xx status code +func (o *RunServiceGetRunV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service get run v1 default response a status code equal to that given +func (o *RunServiceGetRunV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service get run v1 default response @@ -96,12 +161,22 @@ func (o *RunServiceGetRunV1Default) Code() int { } func (o *RunServiceGetRunV1Default) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}][%d] RunService_GetRunV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}][%d] RunService_GetRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceGetRunV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}][%d] RunService_GetRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceGetRunV1Default) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceGetRunV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.GatewayruntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_list_runs_v1_parameters.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_list_runs_v1_parameters.go index ab84f6e5485..2d51a20b2ca 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_list_runs_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_list_runs_v1_parameters.go @@ -13,101 +13,98 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" - - strfmt "github.com/go-openapi/strfmt" ) -// NewRunServiceListRunsV1Params creates a new RunServiceListRunsV1Params object -// with the default values initialized. +// NewRunServiceListRunsV1Params creates a new RunServiceListRunsV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceListRunsV1Params() *RunServiceListRunsV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &RunServiceListRunsV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - timeout: cr.DefaultTimeout, } } // NewRunServiceListRunsV1ParamsWithTimeout creates a new RunServiceListRunsV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceListRunsV1ParamsWithTimeout(timeout time.Duration) *RunServiceListRunsV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &RunServiceListRunsV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - timeout: timeout, } } // NewRunServiceListRunsV1ParamsWithContext creates a new RunServiceListRunsV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceListRunsV1ParamsWithContext(ctx context.Context) *RunServiceListRunsV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &RunServiceListRunsV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - Context: ctx, } } // NewRunServiceListRunsV1ParamsWithHTTPClient creates a new RunServiceListRunsV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceListRunsV1ParamsWithHTTPClient(client *http.Client) *RunServiceListRunsV1Params { - var ( - resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") - ) return &RunServiceListRunsV1Params{ - ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, - HTTPClient: client, + HTTPClient: client, } } -/*RunServiceListRunsV1Params contains all the parameters to send to the API endpoint -for the run service list runs v1 operation typically these are written to a http.Request +/* +RunServiceListRunsV1Params contains all the parameters to send to the API endpoint + + for the run service list runs v1 operation. + + Typically these are written to a http.Request. */ type RunServiceListRunsV1Params struct { - /*Filter - A url-encoded, JSON-serialized Filter protocol buffer (see - [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). + /* Filter. + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v1beta1/filter.proto)). */ Filter *string - /*PageSize - The number of runs to be listed per page. If there are more runs than this + + /* PageSize. + + The number of runs to be listed per page. If there are more runs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + Format: int32 */ PageSize *int32 - /*PageToken - A page token to request the next page of results. The token is acquried + + /* PageToken. + + A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListRuns call or can be omitted when fetching the first page. - */ PageToken *string - /*ResourceReferenceKeyID - The ID of the resource that referred to. + /* ResourceReferenceKeyID. + + The ID of the resource that referred to. */ ResourceReferenceKeyID *string - /*ResourceReferenceKeyType - The type of the resource that referred to. + /* ResourceReferenceKeyType. + + The type of the resource that referred to. + + Default: "UNKNOWN_RESOURCE_TYPE" */ ResourceReferenceKeyType *string - /*SortBy - Can be format of "field_name", "field_name asc" or "field_name desc" - (Example, "name asc" or "id desc"). Ascending by default. + /* SortBy. + + Can be format of "field_name", "field_name asc" or "field_name desc" + (Example, "name asc" or "id desc"). Ascending by default. */ SortBy *string @@ -116,6 +113,32 @@ type RunServiceListRunsV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service list runs v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceListRunsV1Params) WithDefaults() *RunServiceListRunsV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service list runs v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceListRunsV1Params) SetDefaults() { + var ( + resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) + + val := RunServiceListRunsV1Params{ + ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, + } + + val.timeout = o.timeout + val.Context = o.Context + val.HTTPClient = o.HTTPClient + *o = val +} + // WithTimeout adds the timeout to the run service list runs v1 params func (o *RunServiceListRunsV1Params) WithTimeout(timeout time.Duration) *RunServiceListRunsV1Params { o.SetTimeout(timeout) @@ -227,96 +250,102 @@ func (o *RunServiceListRunsV1Params) WriteToRequest(r runtime.ClientRequest, reg // query param filter var qrFilter string + if o.Filter != nil { qrFilter = *o.Filter } qFilter := qrFilter if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { return err } } - } if o.PageSize != nil { // query param page_size var qrPageSize int32 + if o.PageSize != nil { qrPageSize = *o.PageSize } qPageSize := swag.FormatInt32(qrPageSize) if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { return err } } - } if o.PageToken != nil { // query param page_token var qrPageToken string + if o.PageToken != nil { qrPageToken = *o.PageToken } qPageToken := qrPageToken if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { return err } } - } if o.ResourceReferenceKeyID != nil { // query param resource_reference_key.id var qrResourceReferenceKeyID string + if o.ResourceReferenceKeyID != nil { qrResourceReferenceKeyID = *o.ResourceReferenceKeyID } qResourceReferenceKeyID := qrResourceReferenceKeyID if qResourceReferenceKeyID != "" { + if err := r.SetQueryParam("resource_reference_key.id", qResourceReferenceKeyID); err != nil { return err } } - } if o.ResourceReferenceKeyType != nil { // query param resource_reference_key.type var qrResourceReferenceKeyType string + if o.ResourceReferenceKeyType != nil { qrResourceReferenceKeyType = *o.ResourceReferenceKeyType } qResourceReferenceKeyType := qrResourceReferenceKeyType if qResourceReferenceKeyType != "" { + if err := r.SetQueryParam("resource_reference_key.type", qResourceReferenceKeyType); err != nil { return err } } - } if o.SortBy != nil { // query param sort_by var qrSortBy string + if o.SortBy != nil { qrSortBy = *o.SortBy } qSortBy := qrSortBy if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_list_runs_v1_responses.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_list_runs_v1_responses.go index a3054546c3f..fe0b5d0c8bb 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_list_runs_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_list_runs_v1_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) // RunServiceListRunsV1Reader is a Reader for the RunServiceListRunsV1 structure. @@ -24,14 +24,12 @@ type RunServiceListRunsV1Reader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceListRunsV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceListRunsV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceListRunsV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceListRunsV1OK() *RunServiceListRunsV1OK { return &RunServiceListRunsV1OK{} } -/*RunServiceListRunsV1OK handles this case with default header values. +/* +RunServiceListRunsV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceListRunsV1OK struct { Payload *run_model.APIListRunsResponse } +// IsSuccess returns true when this run service list runs v1 o k response has a 2xx status code +func (o *RunServiceListRunsV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service list runs v1 o k response has a 3xx status code +func (o *RunServiceListRunsV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service list runs v1 o k response has a 4xx status code +func (o *RunServiceListRunsV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service list runs v1 o k response has a 5xx status code +func (o *RunServiceListRunsV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service list runs v1 o k response a status code equal to that given +func (o *RunServiceListRunsV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service list runs v1 o k response +func (o *RunServiceListRunsV1OK) Code() int { + return 200 +} + func (o *RunServiceListRunsV1OK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/runs][%d] runServiceListRunsV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs][%d] runServiceListRunsV1OK %s", 200, payload) +} + +func (o *RunServiceListRunsV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs][%d] runServiceListRunsV1OK %s", 200, payload) +} + +func (o *RunServiceListRunsV1OK) GetPayload() *run_model.APIListRunsResponse { + return o.Payload } func (o *RunServiceListRunsV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRunServiceListRunsV1Default(code int) *RunServiceListRunsV1Default { } } -/*RunServiceListRunsV1Default handles this case with default header values. +/* +RunServiceListRunsV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceListRunsV1Default struct { _statusCode int - Payload *run_model.GatewayruntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service list runs v1 default response has a 2xx status code +func (o *RunServiceListRunsV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service list runs v1 default response has a 3xx status code +func (o *RunServiceListRunsV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service list runs v1 default response has a 4xx status code +func (o *RunServiceListRunsV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service list runs v1 default response has a 5xx status code +func (o *RunServiceListRunsV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service list runs v1 default response a status code equal to that given +func (o *RunServiceListRunsV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service list runs v1 default response @@ -96,12 +161,22 @@ func (o *RunServiceListRunsV1Default) Code() int { } func (o *RunServiceListRunsV1Default) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/runs][%d] RunService_ListRunsV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs][%d] RunService_ListRunsV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceListRunsV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs][%d] RunService_ListRunsV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceListRunsV1Default) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceListRunsV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.GatewayruntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_read_artifact_v1_parameters.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_read_artifact_v1_parameters.go index 3eddf4d2933..5425fdfa71a 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_read_artifact_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_read_artifact_v1_parameters.go @@ -13,67 +13,69 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceReadArtifactV1Params creates a new RunServiceReadArtifactV1Params object -// with the default values initialized. +// NewRunServiceReadArtifactV1Params creates a new RunServiceReadArtifactV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceReadArtifactV1Params() *RunServiceReadArtifactV1Params { - var () return &RunServiceReadArtifactV1Params{ - timeout: cr.DefaultTimeout, } } // NewRunServiceReadArtifactV1ParamsWithTimeout creates a new RunServiceReadArtifactV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceReadArtifactV1ParamsWithTimeout(timeout time.Duration) *RunServiceReadArtifactV1Params { - var () return &RunServiceReadArtifactV1Params{ - timeout: timeout, } } // NewRunServiceReadArtifactV1ParamsWithContext creates a new RunServiceReadArtifactV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceReadArtifactV1ParamsWithContext(ctx context.Context) *RunServiceReadArtifactV1Params { - var () return &RunServiceReadArtifactV1Params{ - Context: ctx, } } // NewRunServiceReadArtifactV1ParamsWithHTTPClient creates a new RunServiceReadArtifactV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceReadArtifactV1ParamsWithHTTPClient(client *http.Client) *RunServiceReadArtifactV1Params { - var () return &RunServiceReadArtifactV1Params{ HTTPClient: client, } } -/*RunServiceReadArtifactV1Params contains all the parameters to send to the API endpoint -for the run service read artifact v1 operation typically these are written to a http.Request +/* +RunServiceReadArtifactV1Params contains all the parameters to send to the API endpoint + + for the run service read artifact v1 operation. + + Typically these are written to a http.Request. */ type RunServiceReadArtifactV1Params struct { - /*ArtifactName - The name of the artifact. + /* ArtifactName. + The name of the artifact. */ ArtifactName string - /*NodeID - The ID of the running node. + /* NodeID. + + The ID of the running node. */ NodeID string - /*RunID - The ID of the run. + /* RunID. + + The ID of the run. */ RunID string @@ -82,6 +84,21 @@ type RunServiceReadArtifactV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service read artifact v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceReadArtifactV1Params) WithDefaults() *RunServiceReadArtifactV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service read artifact v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceReadArtifactV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service read artifact v1 params func (o *RunServiceReadArtifactV1Params) WithTimeout(timeout time.Duration) *RunServiceReadArtifactV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_read_artifact_v1_responses.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_read_artifact_v1_responses.go index b0ff739c376..5bbcdae862c 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_read_artifact_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_read_artifact_v1_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) // RunServiceReadArtifactV1Reader is a Reader for the RunServiceReadArtifactV1 structure. @@ -24,14 +24,12 @@ type RunServiceReadArtifactV1Reader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceReadArtifactV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceReadArtifactV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceReadArtifactV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceReadArtifactV1OK() *RunServiceReadArtifactV1OK { return &RunServiceReadArtifactV1OK{} } -/*RunServiceReadArtifactV1OK handles this case with default header values. +/* +RunServiceReadArtifactV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceReadArtifactV1OK struct { Payload *run_model.APIReadArtifactResponse } +// IsSuccess returns true when this run service read artifact v1 o k response has a 2xx status code +func (o *RunServiceReadArtifactV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service read artifact v1 o k response has a 3xx status code +func (o *RunServiceReadArtifactV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service read artifact v1 o k response has a 4xx status code +func (o *RunServiceReadArtifactV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service read artifact v1 o k response has a 5xx status code +func (o *RunServiceReadArtifactV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service read artifact v1 o k response a status code equal to that given +func (o *RunServiceReadArtifactV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service read artifact v1 o k response +func (o *RunServiceReadArtifactV1OK) Code() int { + return 200 +} + func (o *RunServiceReadArtifactV1OK) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] runServiceReadArtifactV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] runServiceReadArtifactV1OK %s", 200, payload) +} + +func (o *RunServiceReadArtifactV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] runServiceReadArtifactV1OK %s", 200, payload) +} + +func (o *RunServiceReadArtifactV1OK) GetPayload() *run_model.APIReadArtifactResponse { + return o.Payload } func (o *RunServiceReadArtifactV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRunServiceReadArtifactV1Default(code int) *RunServiceReadArtifactV1Defau } } -/*RunServiceReadArtifactV1Default handles this case with default header values. +/* +RunServiceReadArtifactV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceReadArtifactV1Default struct { _statusCode int - Payload *run_model.GatewayruntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service read artifact v1 default response has a 2xx status code +func (o *RunServiceReadArtifactV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service read artifact v1 default response has a 3xx status code +func (o *RunServiceReadArtifactV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service read artifact v1 default response has a 4xx status code +func (o *RunServiceReadArtifactV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service read artifact v1 default response has a 5xx status code +func (o *RunServiceReadArtifactV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service read artifact v1 default response a status code equal to that given +func (o *RunServiceReadArtifactV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service read artifact v1 default response @@ -96,12 +161,22 @@ func (o *RunServiceReadArtifactV1Default) Code() int { } func (o *RunServiceReadArtifactV1Default) Error() string { - return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] RunService_ReadArtifactV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] RunService_ReadArtifactV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceReadArtifactV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] RunService_ReadArtifactV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceReadArtifactV1Default) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceReadArtifactV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.GatewayruntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_report_run_metrics_v1_parameters.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_report_run_metrics_v1_parameters.go index 606aaa9ca44..07f2dfd22fd 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_report_run_metrics_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_report_run_metrics_v1_parameters.go @@ -13,61 +13,62 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) -// NewRunServiceReportRunMetricsV1Params creates a new RunServiceReportRunMetricsV1Params object -// with the default values initialized. +// NewRunServiceReportRunMetricsV1Params creates a new RunServiceReportRunMetricsV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceReportRunMetricsV1Params() *RunServiceReportRunMetricsV1Params { - var () return &RunServiceReportRunMetricsV1Params{ - timeout: cr.DefaultTimeout, } } // NewRunServiceReportRunMetricsV1ParamsWithTimeout creates a new RunServiceReportRunMetricsV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceReportRunMetricsV1ParamsWithTimeout(timeout time.Duration) *RunServiceReportRunMetricsV1Params { - var () return &RunServiceReportRunMetricsV1Params{ - timeout: timeout, } } // NewRunServiceReportRunMetricsV1ParamsWithContext creates a new RunServiceReportRunMetricsV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceReportRunMetricsV1ParamsWithContext(ctx context.Context) *RunServiceReportRunMetricsV1Params { - var () return &RunServiceReportRunMetricsV1Params{ - Context: ctx, } } // NewRunServiceReportRunMetricsV1ParamsWithHTTPClient creates a new RunServiceReportRunMetricsV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceReportRunMetricsV1ParamsWithHTTPClient(client *http.Client) *RunServiceReportRunMetricsV1Params { - var () return &RunServiceReportRunMetricsV1Params{ HTTPClient: client, } } -/*RunServiceReportRunMetricsV1Params contains all the parameters to send to the API endpoint -for the run service report run metrics v1 operation typically these are written to a http.Request +/* +RunServiceReportRunMetricsV1Params contains all the parameters to send to the API endpoint + + for the run service report run metrics v1 operation. + + Typically these are written to a http.Request. */ type RunServiceReportRunMetricsV1Params struct { - /*Body*/ - Body *run_model.APIReportRunMetricsRequest - /*RunID - Required. The parent run ID of the metric. + // Body. + Body *run_model.RunServiceReportRunMetricsV1Body + + /* RunID. + Required. The parent run ID of the metric. */ RunID string @@ -76,6 +77,21 @@ type RunServiceReportRunMetricsV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service report run metrics v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceReportRunMetricsV1Params) WithDefaults() *RunServiceReportRunMetricsV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service report run metrics v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceReportRunMetricsV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service report run metrics v1 params func (o *RunServiceReportRunMetricsV1Params) WithTimeout(timeout time.Duration) *RunServiceReportRunMetricsV1Params { o.SetTimeout(timeout) @@ -110,13 +126,13 @@ func (o *RunServiceReportRunMetricsV1Params) SetHTTPClient(client *http.Client) } // WithBody adds the body to the run service report run metrics v1 params -func (o *RunServiceReportRunMetricsV1Params) WithBody(body *run_model.APIReportRunMetricsRequest) *RunServiceReportRunMetricsV1Params { +func (o *RunServiceReportRunMetricsV1Params) WithBody(body *run_model.RunServiceReportRunMetricsV1Body) *RunServiceReportRunMetricsV1Params { o.SetBody(body) return o } // SetBody adds the body to the run service report run metrics v1 params -func (o *RunServiceReportRunMetricsV1Params) SetBody(body *run_model.APIReportRunMetricsRequest) { +func (o *RunServiceReportRunMetricsV1Params) SetBody(body *run_model.RunServiceReportRunMetricsV1Body) { o.Body = body } @@ -138,7 +154,6 @@ func (o *RunServiceReportRunMetricsV1Params) WriteToRequest(r runtime.ClientRequ return err } var res []error - if o.Body != nil { if err := r.SetBodyParam(o.Body); err != nil { return err diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_report_run_metrics_v1_responses.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_report_run_metrics_v1_responses.go index 1bd778cee7a..f3d31269452 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_report_run_metrics_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_report_run_metrics_v1_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) // RunServiceReportRunMetricsV1Reader is a Reader for the RunServiceReportRunMetricsV1 structure. @@ -24,14 +24,12 @@ type RunServiceReportRunMetricsV1Reader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceReportRunMetricsV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceReportRunMetricsV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceReportRunMetricsV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceReportRunMetricsV1OK() *RunServiceReportRunMetricsV1OK { return &RunServiceReportRunMetricsV1OK{} } -/*RunServiceReportRunMetricsV1OK handles this case with default header values. +/* +RunServiceReportRunMetricsV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceReportRunMetricsV1OK struct { Payload *run_model.APIReportRunMetricsResponse } +// IsSuccess returns true when this run service report run metrics v1 o k response has a 2xx status code +func (o *RunServiceReportRunMetricsV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service report run metrics v1 o k response has a 3xx status code +func (o *RunServiceReportRunMetricsV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service report run metrics v1 o k response has a 4xx status code +func (o *RunServiceReportRunMetricsV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service report run metrics v1 o k response has a 5xx status code +func (o *RunServiceReportRunMetricsV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service report run metrics v1 o k response a status code equal to that given +func (o *RunServiceReportRunMetricsV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service report run metrics v1 o k response +func (o *RunServiceReportRunMetricsV1OK) Code() int { + return 200 +} + func (o *RunServiceReportRunMetricsV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}:reportMetrics][%d] runServiceReportRunMetricsV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}:reportMetrics][%d] runServiceReportRunMetricsV1OK %s", 200, payload) +} + +func (o *RunServiceReportRunMetricsV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}:reportMetrics][%d] runServiceReportRunMetricsV1OK %s", 200, payload) +} + +func (o *RunServiceReportRunMetricsV1OK) GetPayload() *run_model.APIReportRunMetricsResponse { + return o.Payload } func (o *RunServiceReportRunMetricsV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRunServiceReportRunMetricsV1Default(code int) *RunServiceReportRunMetric } } -/*RunServiceReportRunMetricsV1Default handles this case with default header values. +/* +RunServiceReportRunMetricsV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceReportRunMetricsV1Default struct { _statusCode int - Payload *run_model.GatewayruntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service report run metrics v1 default response has a 2xx status code +func (o *RunServiceReportRunMetricsV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service report run metrics v1 default response has a 3xx status code +func (o *RunServiceReportRunMetricsV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service report run metrics v1 default response has a 4xx status code +func (o *RunServiceReportRunMetricsV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service report run metrics v1 default response has a 5xx status code +func (o *RunServiceReportRunMetricsV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service report run metrics v1 default response a status code equal to that given +func (o *RunServiceReportRunMetricsV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service report run metrics v1 default response @@ -96,12 +161,22 @@ func (o *RunServiceReportRunMetricsV1Default) Code() int { } func (o *RunServiceReportRunMetricsV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}:reportMetrics][%d] RunService_ReportRunMetricsV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}:reportMetrics][%d] RunService_ReportRunMetricsV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceReportRunMetricsV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}:reportMetrics][%d] RunService_ReportRunMetricsV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceReportRunMetricsV1Default) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceReportRunMetricsV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.GatewayruntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_retry_run_v1_parameters.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_retry_run_v1_parameters.go index 2fd57779d01..fa05af305c5 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_retry_run_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_retry_run_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceRetryRunV1Params creates a new RunServiceRetryRunV1Params object -// with the default values initialized. +// NewRunServiceRetryRunV1Params creates a new RunServiceRetryRunV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceRetryRunV1Params() *RunServiceRetryRunV1Params { - var () return &RunServiceRetryRunV1Params{ - timeout: cr.DefaultTimeout, } } // NewRunServiceRetryRunV1ParamsWithTimeout creates a new RunServiceRetryRunV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceRetryRunV1ParamsWithTimeout(timeout time.Duration) *RunServiceRetryRunV1Params { - var () return &RunServiceRetryRunV1Params{ - timeout: timeout, } } // NewRunServiceRetryRunV1ParamsWithContext creates a new RunServiceRetryRunV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceRetryRunV1ParamsWithContext(ctx context.Context) *RunServiceRetryRunV1Params { - var () return &RunServiceRetryRunV1Params{ - Context: ctx, } } // NewRunServiceRetryRunV1ParamsWithHTTPClient creates a new RunServiceRetryRunV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceRetryRunV1ParamsWithHTTPClient(client *http.Client) *RunServiceRetryRunV1Params { - var () return &RunServiceRetryRunV1Params{ HTTPClient: client, } } -/*RunServiceRetryRunV1Params contains all the parameters to send to the API endpoint -for the run service retry run v1 operation typically these are written to a http.Request +/* +RunServiceRetryRunV1Params contains all the parameters to send to the API endpoint + + for the run service retry run v1 operation. + + Typically these are written to a http.Request. */ type RunServiceRetryRunV1Params struct { - /*RunID - The ID of the run to be retried. + /* RunID. + The ID of the run to be retried. */ RunID string @@ -72,6 +72,21 @@ type RunServiceRetryRunV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service retry run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceRetryRunV1Params) WithDefaults() *RunServiceRetryRunV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service retry run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceRetryRunV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service retry run v1 params func (o *RunServiceRetryRunV1Params) WithTimeout(timeout time.Duration) *RunServiceRetryRunV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_retry_run_v1_responses.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_retry_run_v1_responses.go index 15343c4cd44..c9a944e9cdb 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_retry_run_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_retry_run_v1_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) // RunServiceRetryRunV1Reader is a Reader for the RunServiceRetryRunV1 structure. @@ -24,14 +24,12 @@ type RunServiceRetryRunV1Reader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceRetryRunV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceRetryRunV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceRetryRunV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceRetryRunV1OK() *RunServiceRetryRunV1OK { return &RunServiceRetryRunV1OK{} } -/*RunServiceRetryRunV1OK handles this case with default header values. +/* +RunServiceRetryRunV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceRetryRunV1OK struct { Payload interface{} } +// IsSuccess returns true when this run service retry run v1 o k response has a 2xx status code +func (o *RunServiceRetryRunV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service retry run v1 o k response has a 3xx status code +func (o *RunServiceRetryRunV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service retry run v1 o k response has a 4xx status code +func (o *RunServiceRetryRunV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service retry run v1 o k response has a 5xx status code +func (o *RunServiceRetryRunV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service retry run v1 o k response a status code equal to that given +func (o *RunServiceRetryRunV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service retry run v1 o k response +func (o *RunServiceRetryRunV1OK) Code() int { + return 200 +} + func (o *RunServiceRetryRunV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/retry][%d] runServiceRetryRunV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/retry][%d] runServiceRetryRunV1OK %s", 200, payload) +} + +func (o *RunServiceRetryRunV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/retry][%d] runServiceRetryRunV1OK %s", 200, payload) +} + +func (o *RunServiceRetryRunV1OK) GetPayload() interface{} { + return o.Payload } func (o *RunServiceRetryRunV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRunServiceRetryRunV1Default(code int) *RunServiceRetryRunV1Default { } } -/*RunServiceRetryRunV1Default handles this case with default header values. +/* +RunServiceRetryRunV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceRetryRunV1Default struct { _statusCode int - Payload *run_model.GatewayruntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service retry run v1 default response has a 2xx status code +func (o *RunServiceRetryRunV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service retry run v1 default response has a 3xx status code +func (o *RunServiceRetryRunV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service retry run v1 default response has a 4xx status code +func (o *RunServiceRetryRunV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service retry run v1 default response has a 5xx status code +func (o *RunServiceRetryRunV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service retry run v1 default response a status code equal to that given +func (o *RunServiceRetryRunV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service retry run v1 default response @@ -94,12 +159,22 @@ func (o *RunServiceRetryRunV1Default) Code() int { } func (o *RunServiceRetryRunV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/retry][%d] RunService_RetryRunV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/retry][%d] RunService_RetryRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceRetryRunV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/retry][%d] RunService_RetryRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceRetryRunV1Default) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceRetryRunV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.GatewayruntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_terminate_run_v1_parameters.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_terminate_run_v1_parameters.go index 16301885d27..d202c39d3c9 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_terminate_run_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_terminate_run_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceTerminateRunV1Params creates a new RunServiceTerminateRunV1Params object -// with the default values initialized. +// NewRunServiceTerminateRunV1Params creates a new RunServiceTerminateRunV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceTerminateRunV1Params() *RunServiceTerminateRunV1Params { - var () return &RunServiceTerminateRunV1Params{ - timeout: cr.DefaultTimeout, } } // NewRunServiceTerminateRunV1ParamsWithTimeout creates a new RunServiceTerminateRunV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceTerminateRunV1ParamsWithTimeout(timeout time.Duration) *RunServiceTerminateRunV1Params { - var () return &RunServiceTerminateRunV1Params{ - timeout: timeout, } } // NewRunServiceTerminateRunV1ParamsWithContext creates a new RunServiceTerminateRunV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceTerminateRunV1ParamsWithContext(ctx context.Context) *RunServiceTerminateRunV1Params { - var () return &RunServiceTerminateRunV1Params{ - Context: ctx, } } // NewRunServiceTerminateRunV1ParamsWithHTTPClient creates a new RunServiceTerminateRunV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceTerminateRunV1ParamsWithHTTPClient(client *http.Client) *RunServiceTerminateRunV1Params { - var () return &RunServiceTerminateRunV1Params{ HTTPClient: client, } } -/*RunServiceTerminateRunV1Params contains all the parameters to send to the API endpoint -for the run service terminate run v1 operation typically these are written to a http.Request +/* +RunServiceTerminateRunV1Params contains all the parameters to send to the API endpoint + + for the run service terminate run v1 operation. + + Typically these are written to a http.Request. */ type RunServiceTerminateRunV1Params struct { - /*RunID - The ID of the run to be terminated. + /* RunID. + The ID of the run to be terminated. */ RunID string @@ -72,6 +72,21 @@ type RunServiceTerminateRunV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service terminate run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceTerminateRunV1Params) WithDefaults() *RunServiceTerminateRunV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service terminate run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceTerminateRunV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service terminate run v1 params func (o *RunServiceTerminateRunV1Params) WithTimeout(timeout time.Duration) *RunServiceTerminateRunV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_terminate_run_v1_responses.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_terminate_run_v1_responses.go index 0156d8a5bd5..60de7a421a2 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_terminate_run_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_terminate_run_v1_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) // RunServiceTerminateRunV1Reader is a Reader for the RunServiceTerminateRunV1 structure. @@ -24,14 +24,12 @@ type RunServiceTerminateRunV1Reader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceTerminateRunV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceTerminateRunV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceTerminateRunV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceTerminateRunV1OK() *RunServiceTerminateRunV1OK { return &RunServiceTerminateRunV1OK{} } -/*RunServiceTerminateRunV1OK handles this case with default header values. +/* +RunServiceTerminateRunV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceTerminateRunV1OK struct { Payload interface{} } +// IsSuccess returns true when this run service terminate run v1 o k response has a 2xx status code +func (o *RunServiceTerminateRunV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service terminate run v1 o k response has a 3xx status code +func (o *RunServiceTerminateRunV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service terminate run v1 o k response has a 4xx status code +func (o *RunServiceTerminateRunV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service terminate run v1 o k response has a 5xx status code +func (o *RunServiceTerminateRunV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service terminate run v1 o k response a status code equal to that given +func (o *RunServiceTerminateRunV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service terminate run v1 o k response +func (o *RunServiceTerminateRunV1OK) Code() int { + return 200 +} + func (o *RunServiceTerminateRunV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/terminate][%d] runServiceTerminateRunV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/terminate][%d] runServiceTerminateRunV1OK %s", 200, payload) +} + +func (o *RunServiceTerminateRunV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/terminate][%d] runServiceTerminateRunV1OK %s", 200, payload) +} + +func (o *RunServiceTerminateRunV1OK) GetPayload() interface{} { + return o.Payload } func (o *RunServiceTerminateRunV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRunServiceTerminateRunV1Default(code int) *RunServiceTerminateRunV1Defau } } -/*RunServiceTerminateRunV1Default handles this case with default header values. +/* +RunServiceTerminateRunV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceTerminateRunV1Default struct { _statusCode int - Payload *run_model.GatewayruntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service terminate run v1 default response has a 2xx status code +func (o *RunServiceTerminateRunV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service terminate run v1 default response has a 3xx status code +func (o *RunServiceTerminateRunV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service terminate run v1 default response has a 4xx status code +func (o *RunServiceTerminateRunV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service terminate run v1 default response has a 5xx status code +func (o *RunServiceTerminateRunV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service terminate run v1 default response a status code equal to that given +func (o *RunServiceTerminateRunV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service terminate run v1 default response @@ -94,12 +159,22 @@ func (o *RunServiceTerminateRunV1Default) Code() int { } func (o *RunServiceTerminateRunV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/terminate][%d] RunService_TerminateRunV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/terminate][%d] RunService_TerminateRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceTerminateRunV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{run_id}/terminate][%d] RunService_TerminateRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceTerminateRunV1Default) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceTerminateRunV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.GatewayruntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_unarchive_run_v1_parameters.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_unarchive_run_v1_parameters.go index 5eeeb9d4d9a..9648529a45b 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_unarchive_run_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_unarchive_run_v1_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceUnarchiveRunV1Params creates a new RunServiceUnarchiveRunV1Params object -// with the default values initialized. +// NewRunServiceUnarchiveRunV1Params creates a new RunServiceUnarchiveRunV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceUnarchiveRunV1Params() *RunServiceUnarchiveRunV1Params { - var () return &RunServiceUnarchiveRunV1Params{ - timeout: cr.DefaultTimeout, } } // NewRunServiceUnarchiveRunV1ParamsWithTimeout creates a new RunServiceUnarchiveRunV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceUnarchiveRunV1ParamsWithTimeout(timeout time.Duration) *RunServiceUnarchiveRunV1Params { - var () return &RunServiceUnarchiveRunV1Params{ - timeout: timeout, } } // NewRunServiceUnarchiveRunV1ParamsWithContext creates a new RunServiceUnarchiveRunV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceUnarchiveRunV1ParamsWithContext(ctx context.Context) *RunServiceUnarchiveRunV1Params { - var () return &RunServiceUnarchiveRunV1Params{ - Context: ctx, } } // NewRunServiceUnarchiveRunV1ParamsWithHTTPClient creates a new RunServiceUnarchiveRunV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceUnarchiveRunV1ParamsWithHTTPClient(client *http.Client) *RunServiceUnarchiveRunV1Params { - var () return &RunServiceUnarchiveRunV1Params{ HTTPClient: client, } } -/*RunServiceUnarchiveRunV1Params contains all the parameters to send to the API endpoint -for the run service unarchive run v1 operation typically these are written to a http.Request +/* +RunServiceUnarchiveRunV1Params contains all the parameters to send to the API endpoint + + for the run service unarchive run v1 operation. + + Typically these are written to a http.Request. */ type RunServiceUnarchiveRunV1Params struct { - /*ID - The ID of the run to be restored. + /* ID. + The ID of the run to be restored. */ ID string @@ -72,6 +72,21 @@ type RunServiceUnarchiveRunV1Params struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service unarchive run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceUnarchiveRunV1Params) WithDefaults() *RunServiceUnarchiveRunV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service unarchive run v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceUnarchiveRunV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service unarchive run v1 params func (o *RunServiceUnarchiveRunV1Params) WithTimeout(timeout time.Duration) *RunServiceUnarchiveRunV1Params { o.SetTimeout(timeout) diff --git a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_unarchive_run_v1_responses.go b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_unarchive_run_v1_responses.go index 384515fd6bd..f40954ae6ef 100644 --- a/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_unarchive_run_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/run_client/run_service/run_service_unarchive_run_v1_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" ) // RunServiceUnarchiveRunV1Reader is a Reader for the RunServiceUnarchiveRunV1 structure. @@ -24,14 +24,12 @@ type RunServiceUnarchiveRunV1Reader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceUnarchiveRunV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceUnarchiveRunV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceUnarchiveRunV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceUnarchiveRunV1OK() *RunServiceUnarchiveRunV1OK { return &RunServiceUnarchiveRunV1OK{} } -/*RunServiceUnarchiveRunV1OK handles this case with default header values. +/* +RunServiceUnarchiveRunV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceUnarchiveRunV1OK struct { Payload interface{} } +// IsSuccess returns true when this run service unarchive run v1 o k response has a 2xx status code +func (o *RunServiceUnarchiveRunV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service unarchive run v1 o k response has a 3xx status code +func (o *RunServiceUnarchiveRunV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service unarchive run v1 o k response has a 4xx status code +func (o *RunServiceUnarchiveRunV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service unarchive run v1 o k response has a 5xx status code +func (o *RunServiceUnarchiveRunV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service unarchive run v1 o k response a status code equal to that given +func (o *RunServiceUnarchiveRunV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service unarchive run v1 o k response +func (o *RunServiceUnarchiveRunV1OK) Code() int { + return 200 +} + func (o *RunServiceUnarchiveRunV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:unarchive][%d] runServiceUnarchiveRunV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:unarchive][%d] runServiceUnarchiveRunV1OK %s", 200, payload) +} + +func (o *RunServiceUnarchiveRunV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:unarchive][%d] runServiceUnarchiveRunV1OK %s", 200, payload) +} + +func (o *RunServiceUnarchiveRunV1OK) GetPayload() interface{} { + return o.Payload } func (o *RunServiceUnarchiveRunV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRunServiceUnarchiveRunV1Default(code int) *RunServiceUnarchiveRunV1Defau } } -/*RunServiceUnarchiveRunV1Default handles this case with default header values. +/* +RunServiceUnarchiveRunV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceUnarchiveRunV1Default struct { _statusCode int - Payload *run_model.GatewayruntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service unarchive run v1 default response has a 2xx status code +func (o *RunServiceUnarchiveRunV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service unarchive run v1 default response has a 3xx status code +func (o *RunServiceUnarchiveRunV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service unarchive run v1 default response has a 4xx status code +func (o *RunServiceUnarchiveRunV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service unarchive run v1 default response has a 5xx status code +func (o *RunServiceUnarchiveRunV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service unarchive run v1 default response a status code equal to that given +func (o *RunServiceUnarchiveRunV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service unarchive run v1 default response @@ -94,12 +159,22 @@ func (o *RunServiceUnarchiveRunV1Default) Code() int { } func (o *RunServiceUnarchiveRunV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:unarchive][%d] RunService_UnarchiveRunV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:unarchive][%d] RunService_UnarchiveRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceUnarchiveRunV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/runs/{id}:unarchive][%d] RunService_UnarchiveRunV1 default %s", o._statusCode, payload) +} + +func (o *RunServiceUnarchiveRunV1Default) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceUnarchiveRunV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.GatewayruntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/run_model/api_list_runs_response.go b/backend/api/v1beta1/go_http_client/run_model/api_list_runs_response.go index 3681e0edc4a..a00e587b8e4 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_list_runs_response.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_list_runs_response.go @@ -6,15 +6,16 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIListRunsResponse api list runs response +// // swagger:model apiListRunsResponse type APIListRunsResponse struct { @@ -43,7 +44,6 @@ func (m *APIListRunsResponse) Validate(formats strfmt.Registry) error { } func (m *APIListRunsResponse) validateRuns(formats strfmt.Registry) error { - if swag.IsZero(m.Runs) { // not required return nil } @@ -57,6 +57,47 @@ func (m *APIListRunsResponse) validateRuns(formats strfmt.Registry) error { if err := m.Runs[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("runs" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runs" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this api list runs response based on the context it is used +func (m *APIListRunsResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateRuns(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIListRunsResponse) contextValidateRuns(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Runs); i++ { + + if m.Runs[i] != nil { + + if swag.IsZero(m.Runs[i]) { // not required + return nil + } + + if err := m.Runs[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("runs" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runs" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v1beta1/go_http_client/run_model/api_parameter.go b/backend/api/v1beta1/go_http_client/run_model/api_parameter.go index a6d010007a4..9339ce4d06e 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_parameter.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_parameter.go @@ -6,12 +6,14 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIParameter api parameter +// // swagger:model apiParameter type APIParameter struct { @@ -27,6 +29,11 @@ func (m *APIParameter) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api parameter based on context it is used +func (m *APIParameter) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APIParameter) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/run_model/api_pipeline_runtime.go b/backend/api/v1beta1/go_http_client/run_model/api_pipeline_runtime.go index 74c255842ed..80d410b3079 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_pipeline_runtime.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_pipeline_runtime.go @@ -6,12 +6,14 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIPipelineRuntime api pipeline runtime +// // swagger:model apiPipelineRuntime type APIPipelineRuntime struct { @@ -29,6 +31,11 @@ func (m *APIPipelineRuntime) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this api pipeline runtime based on context it is used +func (m *APIPipelineRuntime) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *APIPipelineRuntime) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/run_model/api_pipeline_spec.go b/backend/api/v1beta1/go_http_client/run_model/api_pipeline_spec.go index 49edc9a4d80..eebf3874e41 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_pipeline_spec.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_pipeline_spec.go @@ -6,15 +6,16 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIPipelineSpec api pipeline spec +// // swagger:model apiPipelineSpec type APIPipelineSpec struct { @@ -60,7 +61,6 @@ func (m *APIPipelineSpec) Validate(formats strfmt.Registry) error { } func (m *APIPipelineSpec) validateParameters(formats strfmt.Registry) error { - if swag.IsZero(m.Parameters) { // not required return nil } @@ -74,6 +74,8 @@ func (m *APIPipelineSpec) validateParameters(formats strfmt.Registry) error { if err := m.Parameters[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) } return err } @@ -85,7 +87,6 @@ func (m *APIPipelineSpec) validateParameters(formats strfmt.Registry) error { } func (m *APIPipelineSpec) validateRuntimeConfig(formats strfmt.Registry) error { - if swag.IsZero(m.RuntimeConfig) { // not required return nil } @@ -94,6 +95,72 @@ func (m *APIPipelineSpec) validateRuntimeConfig(formats strfmt.Registry) error { if err := m.RuntimeConfig.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("runtime_config") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runtime_config") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api pipeline spec based on the context it is used +func (m *APIPipelineSpec) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateParameters(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRuntimeConfig(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIPipelineSpec) contextValidateParameters(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Parameters); i++ { + + if m.Parameters[i] != nil { + + if swag.IsZero(m.Parameters[i]) { // not required + return nil + } + + if err := m.Parameters[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIPipelineSpec) contextValidateRuntimeConfig(ctx context.Context, formats strfmt.Registry) error { + + if m.RuntimeConfig != nil { + + if swag.IsZero(m.RuntimeConfig) { // not required + return nil + } + + if err := m.RuntimeConfig.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("runtime_config") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runtime_config") } return err } diff --git a/backend/api/v1beta1/go_http_client/run_model/api_read_artifact_response.go b/backend/api/v1beta1/go_http_client/run_model/api_read_artifact_response.go index c8add16464f..4a68666c516 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_read_artifact_response.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_read_artifact_response.go @@ -6,13 +6,14 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIReadArtifactResponse api read artifact response +// // swagger:model apiReadArtifactResponse type APIReadArtifactResponse struct { @@ -23,26 +24,11 @@ type APIReadArtifactResponse struct { // Validate validates this api read artifact response func (m *APIReadArtifactResponse) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateData(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } return nil } -func (m *APIReadArtifactResponse) validateData(formats strfmt.Registry) error { - - if swag.IsZero(m.Data) { // not required - return nil - } - - // Format "byte" (base64 string) is already validated when unmarshalled - +// ContextValidate validates this api read artifact response based on context it is used +func (m *APIReadArtifactResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v1beta1/go_http_client/run_model/api_relationship.go b/backend/api/v1beta1/go_http_client/run_model/api_relationship.go index 48c2be62cc1..addf51afd4d 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_relationship.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_relationship.go @@ -6,18 +6,28 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIRelationship api relationship +// // swagger:model apiRelationship type APIRelationship string +func NewAPIRelationship(value APIRelationship) *APIRelationship { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIRelationship. +func (m APIRelationship) Pointer() *APIRelationship { + return &m +} + const ( // APIRelationshipUNKNOWNRELATIONSHIP captures enum value "UNKNOWN_RELATIONSHIP" @@ -44,7 +54,7 @@ func init() { } func (m APIRelationship) validateAPIRelationshipEnum(path, location string, value APIRelationship) error { - if err := validate.Enum(path, location, value, apiRelationshipEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiRelationshipEnum, true); err != nil { return err } return nil @@ -64,3 +74,8 @@ func (m APIRelationship) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api relationship based on context it is used +func (m APIRelationship) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/run_model/api_report_run_metrics_request.go b/backend/api/v1beta1/go_http_client/run_model/api_report_run_metrics_request.go deleted file mode 100644 index 7cf45d35d8f..00000000000 --- a/backend/api/v1beta1/go_http_client/run_model/api_report_run_metrics_request.go +++ /dev/null @@ -1,83 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package run_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// APIReportRunMetricsRequest api report run metrics request -// swagger:model apiReportRunMetricsRequest -type APIReportRunMetricsRequest struct { - - // List of metrics to report. - Metrics []*APIRunMetric `json:"metrics"` - - // Required. The parent run ID of the metric. - RunID string `json:"run_id,omitempty"` -} - -// Validate validates this api report run metrics request -func (m *APIReportRunMetricsRequest) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateMetrics(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *APIReportRunMetricsRequest) validateMetrics(formats strfmt.Registry) error { - - if swag.IsZero(m.Metrics) { // not required - return nil - } - - for i := 0; i < len(m.Metrics); i++ { - if swag.IsZero(m.Metrics[i]) { // not required - continue - } - - if m.Metrics[i] != nil { - if err := m.Metrics[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("metrics" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *APIReportRunMetricsRequest) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *APIReportRunMetricsRequest) UnmarshalBinary(b []byte) error { - var res APIReportRunMetricsRequest - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v1beta1/go_http_client/run_model/api_report_run_metrics_response.go b/backend/api/v1beta1/go_http_client/run_model/api_report_run_metrics_response.go index 5afa15d6eef..79dd433464f 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_report_run_metrics_response.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_report_run_metrics_response.go @@ -6,15 +6,16 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIReportRunMetricsResponse api report run metrics response +// // swagger:model apiReportRunMetricsResponse type APIReportRunMetricsResponse struct { @@ -37,7 +38,6 @@ func (m *APIReportRunMetricsResponse) Validate(formats strfmt.Registry) error { } func (m *APIReportRunMetricsResponse) validateResults(formats strfmt.Registry) error { - if swag.IsZero(m.Results) { // not required return nil } @@ -51,6 +51,47 @@ func (m *APIReportRunMetricsResponse) validateResults(formats strfmt.Registry) e if err := m.Results[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("results" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("results" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this api report run metrics response based on the context it is used +func (m *APIReportRunMetricsResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateResults(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIReportRunMetricsResponse) contextValidateResults(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Results); i++ { + + if m.Results[i] != nil { + + if swag.IsZero(m.Results[i]) { // not required + return nil + } + + if err := m.Results[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("results" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("results" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v1beta1/go_http_client/run_model/api_resource_key.go b/backend/api/v1beta1/go_http_client/run_model/api_resource_key.go index 606d5c956c3..3a0fd53836c 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_resource_key.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_resource_key.go @@ -6,13 +6,15 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIResourceKey api resource key +// // swagger:model apiResourceKey type APIResourceKey struct { @@ -20,7 +22,7 @@ type APIResourceKey struct { ID string `json:"id,omitempty"` // The type of the resource that referred to. - Type APIResourceType `json:"type,omitempty"` + Type *APIResourceType `json:"type,omitempty"` } // Validate validates this api resource key @@ -38,16 +40,54 @@ func (m *APIResourceKey) Validate(formats strfmt.Registry) error { } func (m *APIResourceKey) validateType(formats strfmt.Registry) error { - if swag.IsZero(m.Type) { // not required return nil } - if err := m.Type.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("type") + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api resource key based on the context it is used +func (m *APIResourceKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceKey) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/run_model/api_resource_reference.go b/backend/api/v1beta1/go_http_client/run_model/api_resource_reference.go index 5525a679e23..17e2533b6a8 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_resource_reference.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_resource_reference.go @@ -6,13 +6,15 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIResourceReference api resource reference +// // swagger:model apiResourceReference type APIResourceReference struct { @@ -23,7 +25,7 @@ type APIResourceReference struct { Name string `json:"name,omitempty"` // Required field. The relationship from referred resource to the object. - Relationship APIRelationship `json:"relationship,omitempty"` + Relationship *APIRelationship `json:"relationship,omitempty"` } // Validate validates this api resource reference @@ -45,7 +47,6 @@ func (m *APIResourceReference) Validate(formats strfmt.Registry) error { } func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { - if swag.IsZero(m.Key) { // not required return nil } @@ -54,6 +55,8 @@ func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { if err := m.Key.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("key") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("key") } return err } @@ -63,16 +66,79 @@ func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { } func (m *APIResourceReference) validateRelationship(formats strfmt.Registry) error { - if swag.IsZero(m.Relationship) { // not required return nil } - if err := m.Relationship.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("relationship") + if m.Relationship != nil { + if err := m.Relationship.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("relationship") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api resource reference based on the context it is used +func (m *APIResourceReference) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateKey(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRelationship(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceReference) contextValidateKey(ctx context.Context, formats strfmt.Registry) error { + + if m.Key != nil { + + if swag.IsZero(m.Key) { // not required + return nil + } + + if err := m.Key.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("key") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("key") + } + return err + } + } + + return nil +} + +func (m *APIResourceReference) contextValidateRelationship(ctx context.Context, formats strfmt.Registry) error { + + if m.Relationship != nil { + + if swag.IsZero(m.Relationship) { // not required + return nil + } + + if err := m.Relationship.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("relationship") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/run_model/api_resource_type.go b/backend/api/v1beta1/go_http_client/run_model/api_resource_type.go index b686a8783ed..84fba358efc 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_resource_type.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_resource_type.go @@ -6,18 +6,28 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIResourceType api resource type +// // swagger:model apiResourceType type APIResourceType string +func NewAPIResourceType(value APIResourceType) *APIResourceType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIResourceType. +func (m APIResourceType) Pointer() *APIResourceType { + return &m +} + const ( // APIResourceTypeUNKNOWNRESOURCETYPE captures enum value "UNKNOWN_RESOURCE_TYPE" @@ -53,7 +63,7 @@ func init() { } func (m APIResourceType) validateAPIResourceTypeEnum(path, location string, value APIResourceType) error { - if err := validate.Enum(path, location, value, apiResourceTypeEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiResourceTypeEnum, true); err != nil { return err } return nil @@ -73,3 +83,8 @@ func (m APIResourceType) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api resource type based on context it is used +func (m APIResourceType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/run_model/api_run.go b/backend/api/v1beta1/go_http_client/run_model/api_run.go index b03d8ca9bf4..1ed83310d57 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_run.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_run.go @@ -6,16 +6,17 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // APIRun api run +// // swagger:model apiRun type APIRun struct { @@ -70,7 +71,7 @@ type APIRun struct { Status string `json:"status,omitempty"` // Output. Specify whether this run is in archived or available mode. - StorageState APIRunStorageState `json:"storage_state,omitempty"` + StorageState *APIRunStorageState `json:"storage_state,omitempty"` } // Validate validates this api run @@ -112,7 +113,6 @@ func (m *APIRun) Validate(formats strfmt.Registry) error { } func (m *APIRun) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -125,7 +125,6 @@ func (m *APIRun) validateCreatedAt(formats strfmt.Registry) error { } func (m *APIRun) validateFinishedAt(formats strfmt.Registry) error { - if swag.IsZero(m.FinishedAt) { // not required return nil } @@ -138,7 +137,6 @@ func (m *APIRun) validateFinishedAt(formats strfmt.Registry) error { } func (m *APIRun) validateMetrics(formats strfmt.Registry) error { - if swag.IsZero(m.Metrics) { // not required return nil } @@ -152,6 +150,8 @@ func (m *APIRun) validateMetrics(formats strfmt.Registry) error { if err := m.Metrics[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("metrics" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("metrics" + "." + strconv.Itoa(i)) } return err } @@ -163,7 +163,6 @@ func (m *APIRun) validateMetrics(formats strfmt.Registry) error { } func (m *APIRun) validatePipelineSpec(formats strfmt.Registry) error { - if swag.IsZero(m.PipelineSpec) { // not required return nil } @@ -172,6 +171,8 @@ func (m *APIRun) validatePipelineSpec(formats strfmt.Registry) error { if err := m.PipelineSpec.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("pipeline_spec") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_spec") } return err } @@ -181,7 +182,6 @@ func (m *APIRun) validatePipelineSpec(formats strfmt.Registry) error { } func (m *APIRun) validateResourceReferences(formats strfmt.Registry) error { - if swag.IsZero(m.ResourceReferences) { // not required return nil } @@ -195,6 +195,8 @@ func (m *APIRun) validateResourceReferences(formats strfmt.Registry) error { if err := m.ResourceReferences[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) } return err } @@ -206,7 +208,6 @@ func (m *APIRun) validateResourceReferences(formats strfmt.Registry) error { } func (m *APIRun) validateScheduledAt(formats strfmt.Registry) error { - if swag.IsZero(m.ScheduledAt) { // not required return nil } @@ -219,16 +220,137 @@ func (m *APIRun) validateScheduledAt(formats strfmt.Registry) error { } func (m *APIRun) validateStorageState(formats strfmt.Registry) error { - if swag.IsZero(m.StorageState) { // not required return nil } - if err := m.StorageState.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("storage_state") + if m.StorageState != nil { + if err := m.StorageState.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("storage_state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("storage_state") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api run based on the context it is used +func (m *APIRun) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateMetrics(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidatePipelineSpec(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateResourceReferences(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateStorageState(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIRun) contextValidateMetrics(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Metrics); i++ { + + if m.Metrics[i] != nil { + + if swag.IsZero(m.Metrics[i]) { // not required + return nil + } + + if err := m.Metrics[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("metrics" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("metrics" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIRun) contextValidatePipelineSpec(ctx context.Context, formats strfmt.Registry) error { + + if m.PipelineSpec != nil { + + if swag.IsZero(m.PipelineSpec) { // not required + return nil + } + + if err := m.PipelineSpec.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipeline_spec") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_spec") + } + return err + } + } + + return nil +} + +func (m *APIRun) contextValidateResourceReferences(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.ResourceReferences); i++ { + + if m.ResourceReferences[i] != nil { + + if swag.IsZero(m.ResourceReferences[i]) { // not required + return nil + } + + if err := m.ResourceReferences[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIRun) contextValidateStorageState(ctx context.Context, formats strfmt.Registry) error { + + if m.StorageState != nil { + + if swag.IsZero(m.StorageState) { // not required + return nil + } + + if err := m.StorageState.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("storage_state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("storage_state") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/run_model/api_run_detail.go b/backend/api/v1beta1/go_http_client/run_model/api_run_detail.go index 53e76c303b6..145018fdf3e 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_run_detail.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_run_detail.go @@ -6,13 +6,15 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIRunDetail api run detail +// // swagger:model apiRunDetail type APIRunDetail struct { @@ -42,7 +44,6 @@ func (m *APIRunDetail) Validate(formats strfmt.Registry) error { } func (m *APIRunDetail) validatePipelineRuntime(formats strfmt.Registry) error { - if swag.IsZero(m.PipelineRuntime) { // not required return nil } @@ -51,6 +52,8 @@ func (m *APIRunDetail) validatePipelineRuntime(formats strfmt.Registry) error { if err := m.PipelineRuntime.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("pipeline_runtime") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_runtime") } return err } @@ -60,7 +63,6 @@ func (m *APIRunDetail) validatePipelineRuntime(formats strfmt.Registry) error { } func (m *APIRunDetail) validateRun(formats strfmt.Registry) error { - if swag.IsZero(m.Run) { // not required return nil } @@ -69,6 +71,68 @@ func (m *APIRunDetail) validateRun(formats strfmt.Registry) error { if err := m.Run.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("run") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("run") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api run detail based on the context it is used +func (m *APIRunDetail) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidatePipelineRuntime(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRun(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIRunDetail) contextValidatePipelineRuntime(ctx context.Context, formats strfmt.Registry) error { + + if m.PipelineRuntime != nil { + + if swag.IsZero(m.PipelineRuntime) { // not required + return nil + } + + if err := m.PipelineRuntime.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipeline_runtime") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_runtime") + } + return err + } + } + + return nil +} + +func (m *APIRunDetail) contextValidateRun(ctx context.Context, formats strfmt.Registry) error { + + if m.Run != nil { + + if swag.IsZero(m.Run) { // not required + return nil + } + + if err := m.Run.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("run") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("run") } return err } diff --git a/backend/api/v1beta1/go_http_client/run_model/api_run_metric.go b/backend/api/v1beta1/go_http_client/run_model/api_run_metric.go index 9ea781b0b6d..9a61408b3b3 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_run_metric.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_run_metric.go @@ -6,18 +6,20 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIRunMetric api run metric +// // swagger:model apiRunMetric type APIRunMetric struct { // The display format of metric. - Format RunMetricFormat `json:"format,omitempty"` + Format *RunMetricFormat `json:"format,omitempty"` // Required. The user defined name of the metric. It must between 1 and 63 // characters long and must conform to the following regular expression: @@ -49,16 +51,54 @@ func (m *APIRunMetric) Validate(formats strfmt.Registry) error { } func (m *APIRunMetric) validateFormat(formats strfmt.Registry) error { - if swag.IsZero(m.Format) { // not required return nil } - if err := m.Format.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("format") + if m.Format != nil { + if err := m.Format.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("format") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("format") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api run metric based on the context it is used +func (m *APIRunMetric) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateFormat(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIRunMetric) contextValidateFormat(ctx context.Context, formats strfmt.Registry) error { + + if m.Format != nil { + + if swag.IsZero(m.Format) { // not required + return nil + } + + if err := m.Format.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("format") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("format") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/run_model/api_run_storage_state.go b/backend/api/v1beta1/go_http_client/run_model/api_run_storage_state.go index 04ed0e9e976..8995cc44099 100644 --- a/backend/api/v1beta1/go_http_client/run_model/api_run_storage_state.go +++ b/backend/api/v1beta1/go_http_client/run_model/api_run_storage_state.go @@ -6,18 +6,28 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIRunStorageState api run storage state +// // swagger:model apiRunStorageState type APIRunStorageState string +func NewAPIRunStorageState(value APIRunStorageState) *APIRunStorageState { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIRunStorageState. +func (m APIRunStorageState) Pointer() *APIRunStorageState { + return &m +} + const ( // APIRunStorageStateSTORAGESTATEAVAILABLE captures enum value "STORAGESTATE_AVAILABLE" @@ -41,7 +51,7 @@ func init() { } func (m APIRunStorageState) validateAPIRunStorageStateEnum(path, location string, value APIRunStorageState) error { - if err := validate.Enum(path, location, value, apiRunStorageStateEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiRunStorageStateEnum, true); err != nil { return err } return nil @@ -61,3 +71,8 @@ func (m APIRunStorageState) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api run storage state based on context it is used +func (m APIRunStorageState) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/run_model/gatewayruntime_error.go b/backend/api/v1beta1/go_http_client/run_model/gatewayruntime_error.go deleted file mode 100644 index b64134916a3..00000000000 --- a/backend/api/v1beta1/go_http_client/run_model/gatewayruntime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package run_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// GatewayruntimeError gatewayruntime error -// swagger:model gatewayruntimeError -type GatewayruntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this gatewayruntime error -func (m *GatewayruntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *GatewayruntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *GatewayruntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *GatewayruntimeError) UnmarshalBinary(b []byte) error { - var res GatewayruntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v1beta1/go_http_client/run_model/googlerpc_status.go b/backend/api/v1beta1/go_http_client/run_model/googlerpc_status.go new file mode 100644 index 00000000000..a97eaae8d94 --- /dev/null +++ b/backend/api/v1beta1/go_http_client/run_model/googlerpc_status.go @@ -0,0 +1,127 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// GooglerpcStatus googlerpc status +// +// swagger:model googlerpcStatus +type GooglerpcStatus struct { + + // code + Code int32 `json:"code,omitempty"` + + // details + Details []*ProtobufAny `json:"details"` + + // message + Message string `json:"message,omitempty"` +} + +// Validate validates this googlerpc status +func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateDetails(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { + if swag.IsZero(m.Details) { // not required + return nil + } + + for i := 0; i < len(m.Details); i++ { + if swag.IsZero(m.Details[i]) { // not required + continue + } + + if m.Details[i] != nil { + if err := m.Details[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *GooglerpcStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *GooglerpcStatus) UnmarshalBinary(b []byte) error { + var res GooglerpcStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v1beta1/go_http_client/run_model/pipeline_spec_runtime_config.go b/backend/api/v1beta1/go_http_client/run_model/pipeline_spec_runtime_config.go index 73c0a4890fe..243eaabbad0 100644 --- a/backend/api/v1beta1/go_http_client/run_model/pipeline_spec_runtime_config.go +++ b/backend/api/v1beta1/go_http_client/run_model/pipeline_spec_runtime_config.go @@ -6,12 +6,14 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // PipelineSpecRuntimeConfig The runtime config of a PipelineSpec. +// // swagger:model PipelineSpecRuntimeConfig type PipelineSpecRuntimeConfig struct { @@ -31,6 +33,11 @@ func (m *PipelineSpecRuntimeConfig) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this pipeline spec runtime config based on context it is used +func (m *PipelineSpecRuntimeConfig) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *PipelineSpecRuntimeConfig) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v1beta1/go_http_client/run_model/protobuf_any.go b/backend/api/v1beta1/go_http_client/run_model/protobuf_any.go index 9227b6f36df..2b44a6e53c0 100644 --- a/backend/api/v1beta1/go_http_client/run_model/protobuf_any.go +++ b/backend/api/v1beta1/go_http_client/run_model/protobuf_any.go @@ -6,9 +6,10 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v1beta1/go_http_client/run_model/protobuf_null_value.go b/backend/api/v1beta1/go_http_client/run_model/protobuf_null_value.go index 852386abfff..22ebd8311f5 100644 --- a/backend/api/v1beta1/go_http_client/run_model/protobuf_null_value.go +++ b/backend/api/v1beta1/go_http_client/run_model/protobuf_null_value.go @@ -6,23 +6,33 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // ProtobufNullValue `NullValue` is a singleton enumeration to represent the null value for the // `Value` type union. // -// The JSON representation for `NullValue` is JSON `null`. +// The JSON representation for `NullValue` is JSON `null`. +// +// - NULL_VALUE: Null value. // -// - NULL_VALUE: Null value. // swagger:model protobufNullValue type ProtobufNullValue string +func NewProtobufNullValue(value ProtobufNullValue) *ProtobufNullValue { + return &value +} + +// Pointer returns a pointer to a freshly-allocated ProtobufNullValue. +func (m ProtobufNullValue) Pointer() *ProtobufNullValue { + return &m +} + const ( // ProtobufNullValueNULLVALUE captures enum value "NULL_VALUE" @@ -43,7 +53,7 @@ func init() { } func (m ProtobufNullValue) validateProtobufNullValueEnum(path, location string, value ProtobufNullValue) error { - if err := validate.Enum(path, location, value, protobufNullValueEnum); err != nil { + if err := validate.EnumCase(path, location, value, protobufNullValueEnum, true); err != nil { return err } return nil @@ -63,3 +73,8 @@ func (m ProtobufNullValue) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this protobuf null value based on context it is used +func (m ProtobufNullValue) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/run_model/report_run_metrics_response_report_run_metric_result.go b/backend/api/v1beta1/go_http_client/run_model/report_run_metrics_response_report_run_metric_result.go index c0ea5f174af..11983fa3499 100644 --- a/backend/api/v1beta1/go_http_client/run_model/report_run_metrics_response_report_run_metric_result.go +++ b/backend/api/v1beta1/go_http_client/run_model/report_run_metrics_response_report_run_metric_result.go @@ -6,13 +6,15 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // ReportRunMetricsResponseReportRunMetricResult report run metrics response report run metric result +// // swagger:model ReportRunMetricsResponseReportRunMetricResult type ReportRunMetricsResponseReportRunMetricResult struct { @@ -26,7 +28,7 @@ type ReportRunMetricsResponseReportRunMetricResult struct { MetricNodeID string `json:"metric_node_id,omitempty"` // Output. The status of the metric reporting. - Status ReportRunMetricsResponseReportRunMetricResultStatus `json:"status,omitempty"` + Status *ReportRunMetricsResponseReportRunMetricResultStatus `json:"status,omitempty"` } // Validate validates this report run metrics response report run metric result @@ -44,16 +46,54 @@ func (m *ReportRunMetricsResponseReportRunMetricResult) Validate(formats strfmt. } func (m *ReportRunMetricsResponseReportRunMetricResult) validateStatus(formats strfmt.Registry) error { - if swag.IsZero(m.Status) { // not required return nil } - if err := m.Status.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("status") + if m.Status != nil { + if err := m.Status.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("status") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("status") + } + return err + } + } + + return nil +} + +// ContextValidate validate this report run metrics response report run metric result based on the context it is used +func (m *ReportRunMetricsResponseReportRunMetricResult) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateStatus(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *ReportRunMetricsResponseReportRunMetricResult) contextValidateStatus(ctx context.Context, formats strfmt.Registry) error { + + if m.Status != nil { + + if swag.IsZero(m.Status) { // not required + return nil + } + + if err := m.Status.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("status") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("status") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/run_model/report_run_metrics_response_report_run_metric_result_status.go b/backend/api/v1beta1/go_http_client/run_model/report_run_metrics_response_report_run_metric_result_status.go index 19f00402eea..5f7b96fe2b4 100644 --- a/backend/api/v1beta1/go_http_client/run_model/report_run_metrics_response_report_run_metric_result_status.go +++ b/backend/api/v1beta1/go_http_client/run_model/report_run_metrics_response_report_run_metric_result_status.go @@ -6,22 +6,32 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // ReportRunMetricsResponseReportRunMetricResultStatus - UNSPECIFIED: Default value if not present. -// - OK: Indicates successful reporting. -// - INVALID_ARGUMENT: Indicates that the payload of the metric is invalid. -// - DUPLICATE_REPORTING: Indicates that the metric has been reported before. -// - INTERNAL_ERROR: Indicates that something went wrong in the server. +// - OK: Indicates successful reporting. +// - INVALID_ARGUMENT: Indicates that the payload of the metric is invalid. +// - DUPLICATE_REPORTING: Indicates that the metric has been reported before. +// - INTERNAL_ERROR: Indicates that something went wrong in the server. +// // swagger:model ReportRunMetricsResponseReportRunMetricResultStatus type ReportRunMetricsResponseReportRunMetricResultStatus string +func NewReportRunMetricsResponseReportRunMetricResultStatus(value ReportRunMetricsResponseReportRunMetricResultStatus) *ReportRunMetricsResponseReportRunMetricResultStatus { + return &value +} + +// Pointer returns a pointer to a freshly-allocated ReportRunMetricsResponseReportRunMetricResultStatus. +func (m ReportRunMetricsResponseReportRunMetricResultStatus) Pointer() *ReportRunMetricsResponseReportRunMetricResultStatus { + return &m +} + const ( // ReportRunMetricsResponseReportRunMetricResultStatusUNSPECIFIED captures enum value "UNSPECIFIED" @@ -54,7 +64,7 @@ func init() { } func (m ReportRunMetricsResponseReportRunMetricResultStatus) validateReportRunMetricsResponseReportRunMetricResultStatusEnum(path, location string, value ReportRunMetricsResponseReportRunMetricResultStatus) error { - if err := validate.Enum(path, location, value, reportRunMetricsResponseReportRunMetricResultStatusEnum); err != nil { + if err := validate.EnumCase(path, location, value, reportRunMetricsResponseReportRunMetricResultStatusEnum, true); err != nil { return err } return nil @@ -74,3 +84,8 @@ func (m ReportRunMetricsResponseReportRunMetricResultStatus) Validate(formats st } return nil } + +// ContextValidate validates this report run metrics response report run metric result status based on context it is used +func (m ReportRunMetricsResponseReportRunMetricResultStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/run_model/run_metric_format.go b/backend/api/v1beta1/go_http_client/run_model/run_metric_format.go index 6a194f501b0..a454bd9be0c 100644 --- a/backend/api/v1beta1/go_http_client/run_model/run_metric_format.go +++ b/backend/api/v1beta1/go_http_client/run_model/run_metric_format.go @@ -6,20 +6,30 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // RunMetricFormat - UNSPECIFIED: Default value if not present. -// - RAW: Display value as its raw format. -// - PERCENTAGE: Display value in percentage format. +// - RAW: Display value as its raw format. +// - PERCENTAGE: Display value in percentage format. +// // swagger:model RunMetricFormat type RunMetricFormat string +func NewRunMetricFormat(value RunMetricFormat) *RunMetricFormat { + return &value +} + +// Pointer returns a pointer to a freshly-allocated RunMetricFormat. +func (m RunMetricFormat) Pointer() *RunMetricFormat { + return &m +} + const ( // RunMetricFormatUNSPECIFIED captures enum value "UNSPECIFIED" @@ -46,7 +56,7 @@ func init() { } func (m RunMetricFormat) validateRunMetricFormatEnum(path, location string, value RunMetricFormat) error { - if err := validate.Enum(path, location, value, runMetricFormatEnum); err != nil { + if err := validate.EnumCase(path, location, value, runMetricFormatEnum, true); err != nil { return err } return nil @@ -66,3 +76,8 @@ func (m RunMetricFormat) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this run metric format based on context it is used +func (m RunMetricFormat) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/run_model/run_service_report_run_metrics_v1_body.go b/backend/api/v1beta1/go_http_client/run_model/run_service_report_run_metrics_v1_body.go new file mode 100644 index 00000000000..8c506c54268 --- /dev/null +++ b/backend/api/v1beta1/go_http_client/run_model/run_service_report_run_metrics_v1_body.go @@ -0,0 +1,121 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// RunServiceReportRunMetricsV1Body run service report run metrics v1 body +// +// swagger:model RunServiceReportRunMetricsV1Body +type RunServiceReportRunMetricsV1Body struct { + + // List of metrics to report. + Metrics []*APIRunMetric `json:"metrics"` +} + +// Validate validates this run service report run metrics v1 body +func (m *RunServiceReportRunMetricsV1Body) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateMetrics(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *RunServiceReportRunMetricsV1Body) validateMetrics(formats strfmt.Registry) error { + if swag.IsZero(m.Metrics) { // not required + return nil + } + + for i := 0; i < len(m.Metrics); i++ { + if swag.IsZero(m.Metrics[i]) { // not required + continue + } + + if m.Metrics[i] != nil { + if err := m.Metrics[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("metrics" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("metrics" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this run service report run metrics v1 body based on the context it is used +func (m *RunServiceReportRunMetricsV1Body) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateMetrics(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *RunServiceReportRunMetricsV1Body) contextValidateMetrics(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Metrics); i++ { + + if m.Metrics[i] != nil { + + if swag.IsZero(m.Metrics[i]) { // not required + return nil + } + + if err := m.Metrics[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("metrics" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("metrics" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *RunServiceReportRunMetricsV1Body) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *RunServiceReportRunMetricsV1Body) UnmarshalBinary(b []byte) error { + var res RunServiceReportRunMetricsV1Body + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v1beta1/go_http_client/visualization_client/visualization_client.go b/backend/api/v1beta1/go_http_client/visualization_client/visualization_client.go index a5467d284d0..4a13af3f6f1 100644 --- a/backend/api/v1beta1/go_http_client/visualization_client/visualization_client.go +++ b/backend/api/v1beta1/go_http_client/visualization_client/visualization_client.go @@ -8,8 +8,7 @@ package visualization_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/visualization_client/visualization_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Visualizat cli := new(Visualization) cli.Transport = transport - cli.VisualizationService = visualization_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Visualization is a client for visualization type Visualization struct { - VisualizationService *visualization_service.Client + VisualizationService visualization_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Visualization struct { // SetTransport changes the transport on the client and all its subresources func (c *Visualization) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.VisualizationService.SetTransport(transport) - } diff --git a/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_client.go b/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_client.go index 74320936890..dbed783f636 100644 --- a/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_client.go +++ b/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_client.go @@ -7,15 +7,40 @@ package visualization_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new visualization service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new visualization service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new visualization service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for visualization service API */ @@ -24,16 +49,25 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + VisualizationServiceCreateVisualizationV1(params *VisualizationServiceCreateVisualizationV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*VisualizationServiceCreateVisualizationV1OK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* VisualizationServiceCreateVisualizationV1 visualization service create visualization v1 API */ -func (a *Client) VisualizationServiceCreateVisualizationV1(params *VisualizationServiceCreateVisualizationV1Params, authInfo runtime.ClientAuthInfoWriter) (*VisualizationServiceCreateVisualizationV1OK, error) { +func (a *Client) VisualizationServiceCreateVisualizationV1(params *VisualizationServiceCreateVisualizationV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*VisualizationServiceCreateVisualizationV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewVisualizationServiceCreateVisualizationV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "VisualizationService_CreateVisualizationV1", Method: "POST", PathPattern: "/apis/v1beta1/visualizations/{namespace}", @@ -45,12 +79,22 @@ func (a *Client) VisualizationServiceCreateVisualizationV1(params *Visualization AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*VisualizationServiceCreateVisualizationV1OK), nil - + success, ok := result.(*VisualizationServiceCreateVisualizationV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*VisualizationServiceCreateVisualizationV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_parameters.go b/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_parameters.go index 82086e4e212..cdf433ab924 100644 --- a/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_parameters.go +++ b/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_parameters.go @@ -13,66 +13,82 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - visualization_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/visualization_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/visualization_model" ) -// NewVisualizationServiceCreateVisualizationV1Params creates a new VisualizationServiceCreateVisualizationV1Params object -// with the default values initialized. +// NewVisualizationServiceCreateVisualizationV1Params creates a new VisualizationServiceCreateVisualizationV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewVisualizationServiceCreateVisualizationV1Params() *VisualizationServiceCreateVisualizationV1Params { - var () return &VisualizationServiceCreateVisualizationV1Params{ - timeout: cr.DefaultTimeout, } } // NewVisualizationServiceCreateVisualizationV1ParamsWithTimeout creates a new VisualizationServiceCreateVisualizationV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewVisualizationServiceCreateVisualizationV1ParamsWithTimeout(timeout time.Duration) *VisualizationServiceCreateVisualizationV1Params { - var () return &VisualizationServiceCreateVisualizationV1Params{ - timeout: timeout, } } // NewVisualizationServiceCreateVisualizationV1ParamsWithContext creates a new VisualizationServiceCreateVisualizationV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewVisualizationServiceCreateVisualizationV1ParamsWithContext(ctx context.Context) *VisualizationServiceCreateVisualizationV1Params { - var () return &VisualizationServiceCreateVisualizationV1Params{ - Context: ctx, } } // NewVisualizationServiceCreateVisualizationV1ParamsWithHTTPClient creates a new VisualizationServiceCreateVisualizationV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewVisualizationServiceCreateVisualizationV1ParamsWithHTTPClient(client *http.Client) *VisualizationServiceCreateVisualizationV1Params { - var () return &VisualizationServiceCreateVisualizationV1Params{ HTTPClient: client, } } -/*VisualizationServiceCreateVisualizationV1Params contains all the parameters to send to the API endpoint -for the visualization service create visualization v1 operation typically these are written to a http.Request +/* +VisualizationServiceCreateVisualizationV1Params contains all the parameters to send to the API endpoint + + for the visualization service create visualization v1 operation. + + Typically these are written to a http.Request. */ type VisualizationServiceCreateVisualizationV1Params struct { - /*Body*/ - Body *visualization_model.APIVisualization - /*Namespace*/ + // Namespace. Namespace string + // Visualization. + Visualization *visualization_model.APIVisualization + timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the visualization service create visualization v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *VisualizationServiceCreateVisualizationV1Params) WithDefaults() *VisualizationServiceCreateVisualizationV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the visualization service create visualization v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *VisualizationServiceCreateVisualizationV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the visualization service create visualization v1 params func (o *VisualizationServiceCreateVisualizationV1Params) WithTimeout(timeout time.Duration) *VisualizationServiceCreateVisualizationV1Params { o.SetTimeout(timeout) @@ -106,17 +122,6 @@ func (o *VisualizationServiceCreateVisualizationV1Params) SetHTTPClient(client * o.HTTPClient = client } -// WithBody adds the body to the visualization service create visualization v1 params -func (o *VisualizationServiceCreateVisualizationV1Params) WithBody(body *visualization_model.APIVisualization) *VisualizationServiceCreateVisualizationV1Params { - o.SetBody(body) - return o -} - -// SetBody adds the body to the visualization service create visualization v1 params -func (o *VisualizationServiceCreateVisualizationV1Params) SetBody(body *visualization_model.APIVisualization) { - o.Body = body -} - // WithNamespace adds the namespace to the visualization service create visualization v1 params func (o *VisualizationServiceCreateVisualizationV1Params) WithNamespace(namespace string) *VisualizationServiceCreateVisualizationV1Params { o.SetNamespace(namespace) @@ -128,6 +133,17 @@ func (o *VisualizationServiceCreateVisualizationV1Params) SetNamespace(namespace o.Namespace = namespace } +// WithVisualization adds the visualization to the visualization service create visualization v1 params +func (o *VisualizationServiceCreateVisualizationV1Params) WithVisualization(visualization *visualization_model.APIVisualization) *VisualizationServiceCreateVisualizationV1Params { + o.SetVisualization(visualization) + return o +} + +// SetVisualization adds the visualization to the visualization service create visualization v1 params +func (o *VisualizationServiceCreateVisualizationV1Params) SetVisualization(visualization *visualization_model.APIVisualization) { + o.Visualization = visualization +} + // WriteToRequest writes these params to a swagger request func (o *VisualizationServiceCreateVisualizationV1Params) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -136,16 +152,15 @@ func (o *VisualizationServiceCreateVisualizationV1Params) WriteToRequest(r runti } var res []error - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - } - // path param namespace if err := r.SetPathParam("namespace", o.Namespace); err != nil { return err } + if o.Visualization != nil { + if err := r.SetBodyParam(o.Visualization); err != nil { + return err + } + } if len(res) > 0 { return errors.CompositeValidationError(res...) diff --git a/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_responses.go b/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_responses.go index e7bbe9bec0e..a960c858adb 100644 --- a/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_responses.go +++ b/backend/api/v1beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_responses.go @@ -6,14 +6,14 @@ package visualization_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - visualization_model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/visualization_model" + "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/visualization_model" ) // VisualizationServiceCreateVisualizationV1Reader is a Reader for the VisualizationServiceCreateVisualizationV1 structure. @@ -24,14 +24,12 @@ type VisualizationServiceCreateVisualizationV1Reader struct { // ReadResponse reads a server response into the received o. func (o *VisualizationServiceCreateVisualizationV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewVisualizationServiceCreateVisualizationV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewVisualizationServiceCreateVisualizationV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewVisualizationServiceCreateVisualizationV1OK() *VisualizationServiceCreat return &VisualizationServiceCreateVisualizationV1OK{} } -/*VisualizationServiceCreateVisualizationV1OK handles this case with default header values. +/* +VisualizationServiceCreateVisualizationV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type VisualizationServiceCreateVisualizationV1OK struct { Payload *visualization_model.APIVisualization } +// IsSuccess returns true when this visualization service create visualization v1 o k response has a 2xx status code +func (o *VisualizationServiceCreateVisualizationV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this visualization service create visualization v1 o k response has a 3xx status code +func (o *VisualizationServiceCreateVisualizationV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this visualization service create visualization v1 o k response has a 4xx status code +func (o *VisualizationServiceCreateVisualizationV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this visualization service create visualization v1 o k response has a 5xx status code +func (o *VisualizationServiceCreateVisualizationV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this visualization service create visualization v1 o k response a status code equal to that given +func (o *VisualizationServiceCreateVisualizationV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the visualization service create visualization v1 o k response +func (o *VisualizationServiceCreateVisualizationV1OK) Code() int { + return 200 +} + func (o *VisualizationServiceCreateVisualizationV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/visualizations/{namespace}][%d] visualizationServiceCreateVisualizationV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/visualizations/{namespace}][%d] visualizationServiceCreateVisualizationV1OK %s", 200, payload) +} + +func (o *VisualizationServiceCreateVisualizationV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/visualizations/{namespace}][%d] visualizationServiceCreateVisualizationV1OK %s", 200, payload) +} + +func (o *VisualizationServiceCreateVisualizationV1OK) GetPayload() *visualization_model.APIVisualization { + return o.Payload } func (o *VisualizationServiceCreateVisualizationV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewVisualizationServiceCreateVisualizationV1Default(code int) *Visualizatio } } -/*VisualizationServiceCreateVisualizationV1Default handles this case with default header values. +/* +VisualizationServiceCreateVisualizationV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type VisualizationServiceCreateVisualizationV1Default struct { _statusCode int - Payload *visualization_model.GatewayruntimeError + Payload *visualization_model.GooglerpcStatus +} + +// IsSuccess returns true when this visualization service create visualization v1 default response has a 2xx status code +func (o *VisualizationServiceCreateVisualizationV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this visualization service create visualization v1 default response has a 3xx status code +func (o *VisualizationServiceCreateVisualizationV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this visualization service create visualization v1 default response has a 4xx status code +func (o *VisualizationServiceCreateVisualizationV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this visualization service create visualization v1 default response has a 5xx status code +func (o *VisualizationServiceCreateVisualizationV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this visualization service create visualization v1 default response a status code equal to that given +func (o *VisualizationServiceCreateVisualizationV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the visualization service create visualization v1 default response @@ -96,12 +161,22 @@ func (o *VisualizationServiceCreateVisualizationV1Default) Code() int { } func (o *VisualizationServiceCreateVisualizationV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/visualizations/{namespace}][%d] VisualizationService_CreateVisualizationV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/visualizations/{namespace}][%d] VisualizationService_CreateVisualizationV1 default %s", o._statusCode, payload) +} + +func (o *VisualizationServiceCreateVisualizationV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/visualizations/{namespace}][%d] VisualizationService_CreateVisualizationV1 default %s", o._statusCode, payload) +} + +func (o *VisualizationServiceCreateVisualizationV1Default) GetPayload() *visualization_model.GooglerpcStatus { + return o.Payload } func (o *VisualizationServiceCreateVisualizationV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(visualization_model.GatewayruntimeError) + o.Payload = new(visualization_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v1beta1/go_http_client/visualization_model/api_visualization.go b/backend/api/v1beta1/go_http_client/visualization_model/api_visualization.go index 13f1dee9cab..06cdea86a7b 100644 --- a/backend/api/v1beta1/go_http_client/visualization_model/api_visualization.go +++ b/backend/api/v1beta1/go_http_client/visualization_model/api_visualization.go @@ -6,13 +6,15 @@ package visualization_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // APIVisualization api visualization +// // swagger:model apiVisualization type APIVisualization struct { @@ -36,7 +38,7 @@ type APIVisualization struct { Source string `json:"source,omitempty"` // type - Type APIVisualizationType `json:"type,omitempty"` + Type *APIVisualizationType `json:"type,omitempty"` } // Validate validates this api visualization @@ -54,16 +56,54 @@ func (m *APIVisualization) Validate(formats strfmt.Registry) error { } func (m *APIVisualization) validateType(formats strfmt.Registry) error { - if swag.IsZero(m.Type) { // not required return nil } - if err := m.Type.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("type") + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this api visualization based on the context it is used +func (m *APIVisualization) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIVisualization) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err } - return err } return nil diff --git a/backend/api/v1beta1/go_http_client/visualization_model/api_visualization_type.go b/backend/api/v1beta1/go_http_client/visualization_model/api_visualization_type.go index 38a388d30b3..69958d114bf 100644 --- a/backend/api/v1beta1/go_http_client/visualization_model/api_visualization_type.go +++ b/backend/api/v1beta1/go_http_client/visualization_model/api_visualization_type.go @@ -6,20 +6,30 @@ package visualization_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // APIVisualizationType Type of visualization to be generated. // This is required when creating the pipeline through CreateVisualization // API. +// // swagger:model apiVisualizationType type APIVisualizationType string +func NewAPIVisualizationType(value APIVisualizationType) *APIVisualizationType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated APIVisualizationType. +func (m APIVisualizationType) Pointer() *APIVisualizationType { + return &m +} + const ( // APIVisualizationTypeROCCURVE captures enum value "ROC_CURVE" @@ -52,7 +62,7 @@ func init() { } func (m APIVisualizationType) validateAPIVisualizationTypeEnum(path, location string, value APIVisualizationType) error { - if err := validate.Enum(path, location, value, apiVisualizationTypeEnum); err != nil { + if err := validate.EnumCase(path, location, value, apiVisualizationTypeEnum, true); err != nil { return err } return nil @@ -72,3 +82,8 @@ func (m APIVisualizationType) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this api visualization type based on context it is used +func (m APIVisualizationType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v1beta1/go_http_client/visualization_model/gatewayruntime_error.go b/backend/api/v1beta1/go_http_client/visualization_model/gatewayruntime_error.go deleted file mode 100644 index 22c6e3bf981..00000000000 --- a/backend/api/v1beta1/go_http_client/visualization_model/gatewayruntime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package visualization_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// GatewayruntimeError gatewayruntime error -// swagger:model gatewayruntimeError -type GatewayruntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this gatewayruntime error -func (m *GatewayruntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *GatewayruntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *GatewayruntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *GatewayruntimeError) UnmarshalBinary(b []byte) error { - var res GatewayruntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v1beta1/go_http_client/visualization_model/googlerpc_status.go b/backend/api/v1beta1/go_http_client/visualization_model/googlerpc_status.go new file mode 100644 index 00000000000..1abeb9ec4f9 --- /dev/null +++ b/backend/api/v1beta1/go_http_client/visualization_model/googlerpc_status.go @@ -0,0 +1,127 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package visualization_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// GooglerpcStatus googlerpc status +// +// swagger:model googlerpcStatus +type GooglerpcStatus struct { + + // code + Code int32 `json:"code,omitempty"` + + // details + Details []*ProtobufAny `json:"details"` + + // message + Message string `json:"message,omitempty"` +} + +// Validate validates this googlerpc status +func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateDetails(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { + if swag.IsZero(m.Details) { // not required + return nil + } + + for i := 0; i < len(m.Details); i++ { + if swag.IsZero(m.Details[i]) { // not required + continue + } + + if m.Details[i] != nil { + if err := m.Details[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *GooglerpcStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *GooglerpcStatus) UnmarshalBinary(b []byte) error { + var res GooglerpcStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v1beta1/go_http_client/visualization_model/protobuf_any.go b/backend/api/v1beta1/go_http_client/visualization_model/protobuf_any.go index 79170185217..bd86be1550d 100644 --- a/backend/api/v1beta1/go_http_client/visualization_model/protobuf_any.go +++ b/backend/api/v1beta1/go_http_client/visualization_model/protobuf_any.go @@ -6,9 +6,10 @@ package visualization_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v1beta1/healthz.proto b/backend/api/v1beta1/healthz.proto index fba81cc3781..e8045450898 100644 --- a/backend/api/v1beta1/healthz.proto +++ b/backend/api/v1beta1/healthz.proto @@ -19,10 +19,9 @@ package api; import "google/api/annotations.proto"; import "google/protobuf/empty.proto"; -import "protoc-gen-swagger/options/annotations.proto"; -import "backend/api/v1beta1/error.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; diff --git a/backend/api/v1beta1/job.proto b/backend/api/v1beta1/job.proto index c76b64e9143..b3f0e4f21f3 100644 --- a/backend/api/v1beta1/job.proto +++ b/backend/api/v1beta1/job.proto @@ -22,10 +22,9 @@ import "google/protobuf/timestamp.proto"; import "google/protobuf/empty.proto"; import "backend/api/v1beta1/pipeline_spec.proto"; import "backend/api/v1beta1/resource_reference.proto"; -import "protoc-gen-swagger/options/annotations.proto"; -import "backend/api/v1beta1/error.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; diff --git a/backend/api/v1beta1/pipeline.proto b/backend/api/v1beta1/pipeline.proto index 055b08ebce6..e49de4172d7 100644 --- a/backend/api/v1beta1/pipeline.proto +++ b/backend/api/v1beta1/pipeline.proto @@ -20,12 +20,11 @@ package api; import "google/api/annotations.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/empty.proto"; -import "backend/api/v1beta1/error.proto"; import "backend/api/v1beta1/parameter.proto"; import "backend/api/v1beta1/resource_reference.proto"; -import "protoc-gen-swagger/options/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; diff --git a/backend/api/v1beta1/python_http_client/.gitlab-ci.yml b/backend/api/v1beta1/python_http_client/.gitlab-ci.yml deleted file mode 100644 index 1098a4acf21..00000000000 --- a/backend/api/v1beta1/python_http_client/.gitlab-ci.yml +++ /dev/null @@ -1,33 +0,0 @@ -# ref: https://docs.gitlab.com/ee/ci/README.html - -stages: - - test - -.nosetest: - stage: test - script: - - pip install -r requirements.txt - - pip install -r test-requirements.txt - - pytest --cov=kfp_server_api - -nosetest-2.7: - extends: .nosetest - image: python:2.7-alpine -nosetest-3.3: - extends: .nosetest - image: python:3.3-alpine -nosetest-3.4: - extends: .nosetest - image: python:3.4-alpine -nosetest-3.5: - extends: .nosetest - image: python:3.5-alpine -nosetest-3.6: - extends: .nosetest - image: python:3.6-alpine -nosetest-3.7: - extends: .nosetest - image: python:3.7-alpine -nosetest-3.8: - extends: .nosetest - image: python:3.8-alpine diff --git a/backend/api/v1beta1/python_http_client/.travis.yml b/backend/api/v1beta1/python_http_client/.travis.yml deleted file mode 100644 index 7f278fb3d11..00000000000 --- a/backend/api/v1beta1/python_http_client/.travis.yml +++ /dev/null @@ -1,17 +0,0 @@ -# ref: https://docs.travis-ci.com/user/languages/python -language: python -python: - - "2.7" - - "3.2" - - "3.3" - - "3.4" - - "3.5" - - "3.6" - - "3.7" - - "3.8" -# command to install dependencies -install: - - "pip install -r requirements.txt" - - "pip install -r test-requirements.txt" -# command to run tests -script: pytest --cov=kfp_server_api diff --git a/backend/api/v1beta1/python_http_client/README.md b/backend/api/v1beta1/python_http_client/README.md index 129c391be40..72b14fc51ba 100644 --- a/backend/api/v1beta1/python_http_client/README.md +++ b/backend/api/v1beta1/python_http_client/README.md @@ -3,8 +3,8 @@ This file contains REST API specification for Kubeflow Pipelines. The file is au This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: -- API version: 2.5.0 -- Package version: 2.5.0 +- API version: 2.14.3 +- Package version: 2.14.3 - Build package: org.openapitools.codegen.languages.PythonClientCodegen For more information, please visit [https://www.google.com](https://www.google.com) @@ -156,7 +156,6 @@ Class | Method | HTTP request | Description - [ApiPipelineVersion](docs/ApiPipelineVersion.md) - [ApiReadArtifactResponse](docs/ApiReadArtifactResponse.md) - [ApiRelationship](docs/ApiRelationship.md) - - [ApiReportRunMetricsRequest](docs/ApiReportRunMetricsRequest.md) - [ApiReportRunMetricsResponse](docs/ApiReportRunMetricsResponse.md) - [ApiResourceKey](docs/ApiResourceKey.md) - [ApiResourceReference](docs/ApiResourceReference.md) @@ -168,7 +167,7 @@ Class | Method | HTTP request | Description - [ApiStatus](docs/ApiStatus.md) - [ApiTrigger](docs/ApiTrigger.md) - [ApiUrl](docs/ApiUrl.md) - - [GatewayruntimeError](docs/GatewayruntimeError.md) + - [GooglerpcStatus](docs/GooglerpcStatus.md) - [JobMode](docs/JobMode.md) - [PipelineSpecRuntimeConfig](docs/PipelineSpecRuntimeConfig.md) - [ProtobufAny](docs/ProtobufAny.md) @@ -176,6 +175,7 @@ Class | Method | HTTP request | Description - [ReportRunMetricsResponseReportRunMetricResult](docs/ReportRunMetricsResponseReportRunMetricResult.md) - [ReportRunMetricsResponseReportRunMetricResultStatus](docs/ReportRunMetricsResponseReportRunMetricResultStatus.md) - [RunMetricFormat](docs/RunMetricFormat.md) + - [RunServiceReportRunMetricsV1Body](docs/RunServiceReportRunMetricsV1Body.md) ## Documentation For Authorization diff --git a/backend/api/v1beta1/python_http_client/docs/ApiReportRunMetricsRequest.md b/backend/api/v1beta1/python_http_client/docs/ApiReportRunMetricsRequest.md deleted file mode 100644 index 2a7027ebaee..00000000000 --- a/backend/api/v1beta1/python_http_client/docs/ApiReportRunMetricsRequest.md +++ /dev/null @@ -1,11 +0,0 @@ -# ApiReportRunMetricsRequest - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**run_id** | **str** | Required. The parent run ID of the metric. | [optional] -**metrics** | [**list[ApiRunMetric]**](ApiRunMetric.md) | List of metrics to report. | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/backend/api/v1beta1/python_http_client/docs/ExperimentServiceApi.md b/backend/api/v1beta1/python_http_client/docs/ExperimentServiceApi.md index cfa239e2ed1..faf8d914f25 100644 --- a/backend/api/v1beta1/python_http_client/docs/ExperimentServiceApi.md +++ b/backend/api/v1beta1/python_http_client/docs/ExperimentServiceApi.md @@ -89,7 +89,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **experiment_service_create_experiment_v1** -> ApiExperiment experiment_service_create_experiment_v1(body) +> ApiExperiment experiment_service_create_experiment_v1(experiment) Creates a new experiment. @@ -127,11 +127,11 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.ExperimentServiceApi(api_client) - body = kfp_server_api.ApiExperiment() # ApiExperiment | The experiment to be created. + experiment = kfp_server_api.ApiExperiment() # ApiExperiment | The experiment to be created. try: # Creates a new experiment. - api_response = api_instance.experiment_service_create_experiment_v1(body) + api_response = api_instance.experiment_service_create_experiment_v1(experiment) pprint(api_response) except ApiException as e: print("Exception when calling ExperimentServiceApi->experiment_service_create_experiment_v1: %s\n" % e) @@ -141,7 +141,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**ApiExperiment**](ApiExperiment.md)| The experiment to be created. | + **experiment** | [**ApiExperiment**](ApiExperiment.md)| The experiment to be created. | ### Return type diff --git a/backend/api/v1beta1/python_http_client/docs/GatewayruntimeError.md b/backend/api/v1beta1/python_http_client/docs/GatewayruntimeError.md deleted file mode 100644 index 368af295143..00000000000 --- a/backend/api/v1beta1/python_http_client/docs/GatewayruntimeError.md +++ /dev/null @@ -1,13 +0,0 @@ -# GatewayruntimeError - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**error** | **str** | | [optional] -**code** | **int** | | [optional] -**message** | **str** | | [optional] -**details** | [**list[ProtobufAny]**](ProtobufAny.md) | | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/backend/api/v1beta1/python_http_client/docs/GooglerpcStatus.md b/backend/api/v1beta1/python_http_client/docs/GooglerpcStatus.md new file mode 100644 index 00000000000..257d91b44ac --- /dev/null +++ b/backend/api/v1beta1/python_http_client/docs/GooglerpcStatus.md @@ -0,0 +1,12 @@ +# GooglerpcStatus + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**code** | **int** | | [optional] +**message** | **str** | | [optional] +**details** | [**list[ProtobufAny]**](ProtobufAny.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v1beta1/python_http_client/docs/JobServiceApi.md b/backend/api/v1beta1/python_http_client/docs/JobServiceApi.md index 2c74cddccc9..ab72d300612 100644 --- a/backend/api/v1beta1/python_http_client/docs/JobServiceApi.md +++ b/backend/api/v1beta1/python_http_client/docs/JobServiceApi.md @@ -13,7 +13,7 @@ Method | HTTP request | Description # **job_service_create_job** -> ApiJob job_service_create_job(body) +> ApiJob job_service_create_job(job) Creates a new job. @@ -51,11 +51,11 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.JobServiceApi(api_client) - body = kfp_server_api.ApiJob() # ApiJob | The job to be created + job = kfp_server_api.ApiJob() # ApiJob | The job to be created try: # Creates a new job. - api_response = api_instance.job_service_create_job(body) + api_response = api_instance.job_service_create_job(job) pprint(api_response) except ApiException as e: print("Exception when calling JobServiceApi->job_service_create_job: %s\n" % e) @@ -65,7 +65,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**ApiJob**](ApiJob.md)| The job to be created | + **job** | [**ApiJob**](ApiJob.md)| The job to be created | ### Return type diff --git a/backend/api/v1beta1/python_http_client/docs/PipelineServiceApi.md b/backend/api/v1beta1/python_http_client/docs/PipelineServiceApi.md index 062643085ad..6bdadb3c194 100644 --- a/backend/api/v1beta1/python_http_client/docs/PipelineServiceApi.md +++ b/backend/api/v1beta1/python_http_client/docs/PipelineServiceApi.md @@ -19,7 +19,7 @@ Method | HTTP request | Description # **pipeline_service_create_pipeline_v1** -> ApiPipeline pipeline_service_create_pipeline_v1(body) +> ApiPipeline pipeline_service_create_pipeline_v1(pipeline) Creates a pipeline. @@ -57,11 +57,11 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.PipelineServiceApi(api_client) - body = kfp_server_api.ApiPipeline() # ApiPipeline | + pipeline = kfp_server_api.ApiPipeline() # ApiPipeline | try: # Creates a pipeline. - api_response = api_instance.pipeline_service_create_pipeline_v1(body) + api_response = api_instance.pipeline_service_create_pipeline_v1(pipeline) pprint(api_response) except ApiException as e: print("Exception when calling PipelineServiceApi->pipeline_service_create_pipeline_v1: %s\n" % e) @@ -71,7 +71,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**ApiPipeline**](ApiPipeline.md)| | + **pipeline** | [**ApiPipeline**](ApiPipeline.md)| | ### Return type @@ -95,7 +95,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **pipeline_service_create_pipeline_version_v1** -> ApiPipelineVersion pipeline_service_create_pipeline_version_v1(body) +> ApiPipelineVersion pipeline_service_create_pipeline_version_v1(version) Adds a pipeline version to the specified pipeline. @@ -133,11 +133,11 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.PipelineServiceApi(api_client) - body = kfp_server_api.ApiPipelineVersion() # ApiPipelineVersion | ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. + version = kfp_server_api.ApiPipelineVersion() # ApiPipelineVersion | ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. try: # Adds a pipeline version to the specified pipeline. - api_response = api_instance.pipeline_service_create_pipeline_version_v1(body) + api_response = api_instance.pipeline_service_create_pipeline_version_v1(version) pprint(api_response) except ApiException as e: print("Exception when calling PipelineServiceApi->pipeline_service_create_pipeline_version_v1: %s\n" % e) @@ -147,7 +147,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**ApiPipelineVersion**](ApiPipelineVersion.md)| ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. | + **version** | [**ApiPipelineVersion**](ApiPipelineVersion.md)| ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. | ### Return type diff --git a/backend/api/v1beta1/python_http_client/docs/ProtobufAny.md b/backend/api/v1beta1/python_http_client/docs/ProtobufAny.md index 8b34639a9ca..014cca6dd31 100644 --- a/backend/api/v1beta1/python_http_client/docs/ProtobufAny.md +++ b/backend/api/v1beta1/python_http_client/docs/ProtobufAny.md @@ -1,10 +1,11 @@ # ProtobufAny -`Any` contains an arbitrary serialized protocol buffer message along with a URL that describes the type of the serialized message. Protobuf library provides support to pack/unpack Any values in the form of utility functions or additional generated methods of the Any type. Example 1: Pack and unpack a message in C++. Foo foo = ...; Any any; any.PackFrom(foo); ... if (any.UnpackTo(&foo)) { ... } Example 2: Pack and unpack a message in Java. Foo foo = ...; Any any = Any.pack(foo); ... if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } Example 3: Pack and unpack a message in Python. foo = Foo(...) any = Any() any.Pack(foo) ... if any.Is(Foo.DESCRIPTOR): any.Unpack(foo) ... Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} any, err := anypb.New(foo) if err != nil { ... } ... foo := &pb.Foo{} if err := any.UnmarshalTo(foo); err != nil { ... } The pack methods provided by protobuf library will by default use 'type.googleapis.com/full.type.name' as the type URL and the unpack methods only use the fully qualified type name after the last '/' in the type URL, for example \"foo.bar.com/x/y.z\" will yield type name \"y.z\". JSON The JSON representation of an `Any` value uses the regular representation of the deserialized, embedded message, with an additional field `@type` which contains the type URL. Example: package google.profile; message Person { string first_name = 1; string last_name = 2; } { \"@type\": \"type.googleapis.com/google.profile.Person\", \"firstName\": , \"lastName\": } If the embedded message type is well-known and has a custom JSON representation, that representation will be embedded adding a field `value` which holds the custom JSON in addition to the `@type` field. Example (for message [google.protobuf.Duration][]): { \"@type\": \"type.googleapis.com/google.protobuf.Duration\", \"value\": \"1.212s\" } +`Any` contains an arbitrary serialized protocol buffer message along with a URL that describes the type of the serialized message. Protobuf library provides support to pack/unpack Any values in the form of utility functions or additional generated methods of the Any type. Example 1: Pack and unpack a message in C++. Foo foo = ...; Any any; any.PackFrom(foo); ... if (any.UnpackTo(&foo)) { ... } Example 2: Pack and unpack a message in Java. Foo foo = ...; Any any = Any.pack(foo); ... if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } // or ... if (any.isSameTypeAs(Foo.getDefaultInstance())) { foo = any.unpack(Foo.getDefaultInstance()); } Example 3: Pack and unpack a message in Python. foo = Foo(...) any = Any() any.Pack(foo) ... if any.Is(Foo.DESCRIPTOR): any.Unpack(foo) ... Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} any, err := anypb.New(foo) if err != nil { ... } ... foo := &pb.Foo{} if err := any.UnmarshalTo(foo); err != nil { ... } The pack methods provided by protobuf library will by default use 'type.googleapis.com/full.type.name' as the type URL and the unpack methods only use the fully qualified type name after the last '/' in the type URL, for example \"foo.bar.com/x/y.z\" will yield type name \"y.z\". JSON ==== The JSON representation of an `Any` value uses the regular representation of the deserialized, embedded message, with an additional field `@type` which contains the type URL. Example: package google.profile; message Person { string first_name = 1; string last_name = 2; } { \"@type\": \"type.googleapis.com/google.profile.Person\", \"firstName\": , \"lastName\": } If the embedded message type is well-known and has a custom JSON representation, that representation will be embedded adding a field `value` which holds the custom JSON in addition to the `@type` field. Example (for message [google.protobuf.Duration][]): { \"@type\": \"type.googleapis.com/google.protobuf.Duration\", \"value\": \"1.212s\" } ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**type_url** | **str** | A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. | [optional] +**type** | **str** | A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. As of May 2023, there are no widely used type server implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. | [optional] +**type_url** | **str** | A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. | [optional] **value** | **str** | Must be a valid serialized protocol buffer of the above specified type. | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/backend/api/v1beta1/python_http_client/docs/ProtobufNullValue.md b/backend/api/v1beta1/python_http_client/docs/ProtobufNullValue.md index c8e9631389e..c3c32b64fec 100644 --- a/backend/api/v1beta1/python_http_client/docs/ProtobufNullValue.md +++ b/backend/api/v1beta1/python_http_client/docs/ProtobufNullValue.md @@ -1,6 +1,6 @@ # ProtobufNullValue -`NullValue` is a singleton enumeration to represent the null value for the `Value` type union. The JSON representation for `NullValue` is JSON `null`. - NULL_VALUE: Null value. +`NullValue` is a singleton enumeration to represent the null value for the `Value` type union. The JSON representation for `NullValue` is JSON `null`. - NULL_VALUE: Null value. ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- diff --git a/backend/api/v1beta1/python_http_client/docs/RunServiceApi.md b/backend/api/v1beta1/python_http_client/docs/RunServiceApi.md index 0d9b499538a..90c449ca5d8 100644 --- a/backend/api/v1beta1/python_http_client/docs/RunServiceApi.md +++ b/backend/api/v1beta1/python_http_client/docs/RunServiceApi.md @@ -93,7 +93,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **run_service_create_run_v1** -> ApiRunDetail run_service_create_run_v1(body) +> ApiRunDetail run_service_create_run_v1(run) Creates a new run. @@ -131,11 +131,11 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.RunServiceApi(api_client) - body = kfp_server_api.ApiRun() # ApiRun | + run = kfp_server_api.ApiRun() # ApiRun | try: # Creates a new run. - api_response = api_instance.run_service_create_run_v1(body) + api_response = api_instance.run_service_create_run_v1(run) pprint(api_response) except ApiException as e: print("Exception when calling RunServiceApi->run_service_create_run_v1: %s\n" % e) @@ -145,7 +145,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**ApiRun**](ApiRun.md)| | + **run** | [**ApiRun**](ApiRun.md)| | ### Return type @@ -526,7 +526,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.RunServiceApi(api_client) run_id = 'run_id_example' # str | Required. The parent run ID of the metric. -body = kfp_server_api.ApiReportRunMetricsRequest() # ApiReportRunMetricsRequest | +body = kfp_server_api.RunServiceReportRunMetricsV1Body() # RunServiceReportRunMetricsV1Body | try: # ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. @@ -541,7 +541,7 @@ body = kfp_server_api.ApiReportRunMetricsRequest() # ApiReportRunMetricsRequest Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **run_id** | **str**| Required. The parent run ID of the metric. | - **body** | [**ApiReportRunMetricsRequest**](ApiReportRunMetricsRequest.md)| | + **body** | [**RunServiceReportRunMetricsV1Body**](RunServiceReportRunMetricsV1Body.md)| | ### Return type diff --git a/backend/api/v1beta1/python_http_client/docs/RunServiceReportRunMetricsV1Body.md b/backend/api/v1beta1/python_http_client/docs/RunServiceReportRunMetricsV1Body.md new file mode 100644 index 00000000000..6d1e53e7a9f --- /dev/null +++ b/backend/api/v1beta1/python_http_client/docs/RunServiceReportRunMetricsV1Body.md @@ -0,0 +1,10 @@ +# RunServiceReportRunMetricsV1Body + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**metrics** | [**list[ApiRunMetric]**](ApiRunMetric.md) | List of metrics to report. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/__init__.py b/backend/api/v1beta1/python_http_client/kfp_server_api/__init__.py index d056d971470..77f4065da6c 100644 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/__init__.py +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/__init__.py @@ -14,7 +14,7 @@ from __future__ import absolute_import -__version__ = "2.5.0" +__version__ = "2.14.3" # import apis into sdk package from kfp_server_api.api.experiment_service_api import ExperimentServiceApi @@ -52,7 +52,6 @@ from kfp_server_api.models.api_pipeline_version import ApiPipelineVersion from kfp_server_api.models.api_read_artifact_response import ApiReadArtifactResponse from kfp_server_api.models.api_relationship import ApiRelationship -from kfp_server_api.models.api_report_run_metrics_request import ApiReportRunMetricsRequest from kfp_server_api.models.api_report_run_metrics_response import ApiReportRunMetricsResponse from kfp_server_api.models.api_resource_key import ApiResourceKey from kfp_server_api.models.api_resource_reference import ApiResourceReference @@ -64,7 +63,7 @@ from kfp_server_api.models.api_status import ApiStatus from kfp_server_api.models.api_trigger import ApiTrigger from kfp_server_api.models.api_url import ApiUrl -from kfp_server_api.models.gatewayruntime_error import GatewayruntimeError +from kfp_server_api.models.googlerpc_status import GooglerpcStatus from kfp_server_api.models.job_mode import JobMode from kfp_server_api.models.pipeline_spec_runtime_config import PipelineSpecRuntimeConfig from kfp_server_api.models.protobuf_any import ProtobufAny @@ -72,4 +71,5 @@ from kfp_server_api.models.report_run_metrics_response_report_run_metric_result import ReportRunMetricsResponseReportRunMetricResult from kfp_server_api.models.report_run_metrics_response_report_run_metric_result_status import ReportRunMetricsResponseReportRunMetricResultStatus from kfp_server_api.models.run_metric_format import RunMetricFormat +from kfp_server_api.models.run_service_report_run_metrics_v1_body import RunServiceReportRunMetricsV1Body diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/api/experiment_service_api.py b/backend/api/v1beta1/python_http_client/kfp_server_api/api/experiment_service_api.py index 1c5425929b4..76e3eb4773e 100644 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/api/experiment_service_api.py +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/api/experiment_service_api.py @@ -158,17 +158,17 @@ def experiment_service_archive_experiment_v1_with_http_info(self, id, **kwargs): _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) - def experiment_service_create_experiment_v1(self, body, **kwargs): # noqa: E501 + def experiment_service_create_experiment_v1(self, experiment, **kwargs): # noqa: E501 """Creates a new experiment. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.experiment_service_create_experiment_v1(body, async_req=True) + >>> thread = api.experiment_service_create_experiment_v1(experiment, async_req=True) >>> result = thread.get() - :param body: The experiment to be created. (required) - :type body: ApiExperiment + :param experiment: The experiment to be created. (required) + :type experiment: ApiExperiment :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -184,19 +184,19 @@ def experiment_service_create_experiment_v1(self, body, **kwargs): # noqa: E501 :rtype: ApiExperiment """ kwargs['_return_http_data_only'] = True - return self.experiment_service_create_experiment_v1_with_http_info(body, **kwargs) # noqa: E501 + return self.experiment_service_create_experiment_v1_with_http_info(experiment, **kwargs) # noqa: E501 - def experiment_service_create_experiment_v1_with_http_info(self, body, **kwargs): # noqa: E501 + def experiment_service_create_experiment_v1_with_http_info(self, experiment, **kwargs): # noqa: E501 """Creates a new experiment. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.experiment_service_create_experiment_v1_with_http_info(body, async_req=True) + >>> thread = api.experiment_service_create_experiment_v1_with_http_info(experiment, async_req=True) >>> result = thread.get() - :param body: The experiment to be created. (required) - :type body: ApiExperiment + :param experiment: The experiment to be created. (required) + :type experiment: ApiExperiment :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -219,7 +219,7 @@ def experiment_service_create_experiment_v1_with_http_info(self, body, **kwargs) local_var_params = locals() all_params = [ - 'body' + 'experiment' ] all_params.extend( [ @@ -238,10 +238,10 @@ def experiment_service_create_experiment_v1_with_http_info(self, body, **kwargs) ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `experiment_service_create_experiment_v1`") # noqa: E501 + # verify the required parameter 'experiment' is set + if self.api_client.client_side_validation and ('experiment' not in local_var_params or # noqa: E501 + local_var_params['experiment'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `experiment` when calling `experiment_service_create_experiment_v1`") # noqa: E501 collection_formats = {} @@ -255,8 +255,8 @@ def experiment_service_create_experiment_v1_with_http_info(self, body, **kwargs) local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'experiment' in local_var_params: + body_params = local_var_params['experiment'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/api/job_service_api.py b/backend/api/v1beta1/python_http_client/kfp_server_api/api/job_service_api.py index bc8bf96c412..30e1543d5bf 100644 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/api/job_service_api.py +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/api/job_service_api.py @@ -36,17 +36,17 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client - def job_service_create_job(self, body, **kwargs): # noqa: E501 + def job_service_create_job(self, job, **kwargs): # noqa: E501 """Creates a new job. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.job_service_create_job(body, async_req=True) + >>> thread = api.job_service_create_job(job, async_req=True) >>> result = thread.get() - :param body: The job to be created (required) - :type body: ApiJob + :param job: The job to be created (required) + :type job: ApiJob :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -62,19 +62,19 @@ def job_service_create_job(self, body, **kwargs): # noqa: E501 :rtype: ApiJob """ kwargs['_return_http_data_only'] = True - return self.job_service_create_job_with_http_info(body, **kwargs) # noqa: E501 + return self.job_service_create_job_with_http_info(job, **kwargs) # noqa: E501 - def job_service_create_job_with_http_info(self, body, **kwargs): # noqa: E501 + def job_service_create_job_with_http_info(self, job, **kwargs): # noqa: E501 """Creates a new job. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.job_service_create_job_with_http_info(body, async_req=True) + >>> thread = api.job_service_create_job_with_http_info(job, async_req=True) >>> result = thread.get() - :param body: The job to be created (required) - :type body: ApiJob + :param job: The job to be created (required) + :type job: ApiJob :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -97,7 +97,7 @@ def job_service_create_job_with_http_info(self, body, **kwargs): # noqa: E501 local_var_params = locals() all_params = [ - 'body' + 'job' ] all_params.extend( [ @@ -116,10 +116,10 @@ def job_service_create_job_with_http_info(self, body, **kwargs): # noqa: E501 ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `job_service_create_job`") # noqa: E501 + # verify the required parameter 'job' is set + if self.api_client.client_side_validation and ('job' not in local_var_params or # noqa: E501 + local_var_params['job'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `job` when calling `job_service_create_job`") # noqa: E501 collection_formats = {} @@ -133,8 +133,8 @@ def job_service_create_job_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'job' in local_var_params: + body_params = local_var_params['job'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/api/pipeline_service_api.py b/backend/api/v1beta1/python_http_client/kfp_server_api/api/pipeline_service_api.py index cf038d12d66..86f562e7dd8 100644 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/api/pipeline_service_api.py +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/api/pipeline_service_api.py @@ -36,17 +36,17 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client - def pipeline_service_create_pipeline_v1(self, body, **kwargs): # noqa: E501 + def pipeline_service_create_pipeline_v1(self, pipeline, **kwargs): # noqa: E501 """Creates a pipeline. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.pipeline_service_create_pipeline_v1(body, async_req=True) + >>> thread = api.pipeline_service_create_pipeline_v1(pipeline, async_req=True) >>> result = thread.get() - :param body: (required) - :type body: ApiPipeline + :param pipeline: (required) + :type pipeline: ApiPipeline :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -62,19 +62,19 @@ def pipeline_service_create_pipeline_v1(self, body, **kwargs): # noqa: E501 :rtype: ApiPipeline """ kwargs['_return_http_data_only'] = True - return self.pipeline_service_create_pipeline_v1_with_http_info(body, **kwargs) # noqa: E501 + return self.pipeline_service_create_pipeline_v1_with_http_info(pipeline, **kwargs) # noqa: E501 - def pipeline_service_create_pipeline_v1_with_http_info(self, body, **kwargs): # noqa: E501 + def pipeline_service_create_pipeline_v1_with_http_info(self, pipeline, **kwargs): # noqa: E501 """Creates a pipeline. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.pipeline_service_create_pipeline_v1_with_http_info(body, async_req=True) + >>> thread = api.pipeline_service_create_pipeline_v1_with_http_info(pipeline, async_req=True) >>> result = thread.get() - :param body: (required) - :type body: ApiPipeline + :param pipeline: (required) + :type pipeline: ApiPipeline :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -97,7 +97,7 @@ def pipeline_service_create_pipeline_v1_with_http_info(self, body, **kwargs): # local_var_params = locals() all_params = [ - 'body' + 'pipeline' ] all_params.extend( [ @@ -116,10 +116,10 @@ def pipeline_service_create_pipeline_v1_with_http_info(self, body, **kwargs): # ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `pipeline_service_create_pipeline_v1`") # noqa: E501 + # verify the required parameter 'pipeline' is set + if self.api_client.client_side_validation and ('pipeline' not in local_var_params or # noqa: E501 + local_var_params['pipeline'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `pipeline` when calling `pipeline_service_create_pipeline_v1`") # noqa: E501 collection_formats = {} @@ -133,8 +133,8 @@ def pipeline_service_create_pipeline_v1_with_http_info(self, body, **kwargs): # local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'pipeline' in local_var_params: + body_params = local_var_params['pipeline'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -162,17 +162,17 @@ def pipeline_service_create_pipeline_v1_with_http_info(self, body, **kwargs): # _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) - def pipeline_service_create_pipeline_version_v1(self, body, **kwargs): # noqa: E501 + def pipeline_service_create_pipeline_version_v1(self, version, **kwargs): # noqa: E501 """Adds a pipeline version to the specified pipeline. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.pipeline_service_create_pipeline_version_v1(body, async_req=True) + >>> thread = api.pipeline_service_create_pipeline_version_v1(version, async_req=True) >>> result = thread.get() - :param body: ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. (required) - :type body: ApiPipelineVersion + :param version: ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. (required) + :type version: ApiPipelineVersion :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -188,19 +188,19 @@ def pipeline_service_create_pipeline_version_v1(self, body, **kwargs): # noqa: :rtype: ApiPipelineVersion """ kwargs['_return_http_data_only'] = True - return self.pipeline_service_create_pipeline_version_v1_with_http_info(body, **kwargs) # noqa: E501 + return self.pipeline_service_create_pipeline_version_v1_with_http_info(version, **kwargs) # noqa: E501 - def pipeline_service_create_pipeline_version_v1_with_http_info(self, body, **kwargs): # noqa: E501 + def pipeline_service_create_pipeline_version_v1_with_http_info(self, version, **kwargs): # noqa: E501 """Adds a pipeline version to the specified pipeline. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.pipeline_service_create_pipeline_version_v1_with_http_info(body, async_req=True) + >>> thread = api.pipeline_service_create_pipeline_version_v1_with_http_info(version, async_req=True) >>> result = thread.get() - :param body: ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. (required) - :type body: ApiPipelineVersion + :param version: ResourceReference inside PipelineVersion specifies the pipeline that this version belongs to. (required) + :type version: ApiPipelineVersion :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -223,7 +223,7 @@ def pipeline_service_create_pipeline_version_v1_with_http_info(self, body, **kwa local_var_params = locals() all_params = [ - 'body' + 'version' ] all_params.extend( [ @@ -242,10 +242,10 @@ def pipeline_service_create_pipeline_version_v1_with_http_info(self, body, **kwa ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `pipeline_service_create_pipeline_version_v1`") # noqa: E501 + # verify the required parameter 'version' is set + if self.api_client.client_side_validation and ('version' not in local_var_params or # noqa: E501 + local_var_params['version'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `version` when calling `pipeline_service_create_pipeline_version_v1`") # noqa: E501 collection_formats = {} @@ -259,8 +259,8 @@ def pipeline_service_create_pipeline_version_v1_with_http_info(self, body, **kwa local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'version' in local_var_params: + body_params = local_var_params['version'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/api/run_service_api.py b/backend/api/v1beta1/python_http_client/kfp_server_api/api/run_service_api.py index 504c5256d5f..0d16123eb21 100644 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/api/run_service_api.py +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/api/run_service_api.py @@ -158,17 +158,17 @@ def run_service_archive_run_v1_with_http_info(self, id, **kwargs): # noqa: E501 _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) - def run_service_create_run_v1(self, body, **kwargs): # noqa: E501 + def run_service_create_run_v1(self, run, **kwargs): # noqa: E501 """Creates a new run. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.run_service_create_run_v1(body, async_req=True) + >>> thread = api.run_service_create_run_v1(run, async_req=True) >>> result = thread.get() - :param body: (required) - :type body: ApiRun + :param run: (required) + :type run: ApiRun :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -184,19 +184,19 @@ def run_service_create_run_v1(self, body, **kwargs): # noqa: E501 :rtype: ApiRunDetail """ kwargs['_return_http_data_only'] = True - return self.run_service_create_run_v1_with_http_info(body, **kwargs) # noqa: E501 + return self.run_service_create_run_v1_with_http_info(run, **kwargs) # noqa: E501 - def run_service_create_run_v1_with_http_info(self, body, **kwargs): # noqa: E501 + def run_service_create_run_v1_with_http_info(self, run, **kwargs): # noqa: E501 """Creates a new run. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.run_service_create_run_v1_with_http_info(body, async_req=True) + >>> thread = api.run_service_create_run_v1_with_http_info(run, async_req=True) >>> result = thread.get() - :param body: (required) - :type body: ApiRun + :param run: (required) + :type run: ApiRun :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -219,7 +219,7 @@ def run_service_create_run_v1_with_http_info(self, body, **kwargs): # noqa: E50 local_var_params = locals() all_params = [ - 'body' + 'run' ] all_params.extend( [ @@ -238,10 +238,10 @@ def run_service_create_run_v1_with_http_info(self, body, **kwargs): # noqa: E50 ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `run_service_create_run_v1`") # noqa: E501 + # verify the required parameter 'run' is set + if self.api_client.client_side_validation and ('run' not in local_var_params or # noqa: E501 + local_var_params['run'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `run` when calling `run_service_create_run_v1`") # noqa: E501 collection_formats = {} @@ -255,8 +255,8 @@ def run_service_create_run_v1_with_http_info(self, body, **kwargs): # noqa: E50 local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'run' in local_var_params: + body_params = local_var_params['run'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -837,7 +837,7 @@ def run_service_report_run_metrics_v1(self, run_id, body, **kwargs): # noqa: E5 :param run_id: Required. The parent run ID of the metric. (required) :type run_id: str :param body: (required) - :type body: ApiReportRunMetricsRequest + :type body: RunServiceReportRunMetricsV1Body :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -867,7 +867,7 @@ def run_service_report_run_metrics_v1_with_http_info(self, run_id, body, **kwarg :param run_id: Required. The parent run ID of the metric. (required) :type run_id: str :param body: (required) - :type body: ApiReportRunMetricsRequest + :type body: RunServiceReportRunMetricsV1Body :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/api_client.py b/backend/api/v1beta1/python_http_client/kfp_server_api/api_client.py index 61be9c44b00..3adfa2fbe14 100644 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/api_client.py +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/api_client.py @@ -78,7 +78,7 @@ def __init__(self, configuration=None, header_name=None, header_value=None, self.default_headers[header_name] = header_value self.cookie = cookie # Set default User-Agent. - self.user_agent = 'OpenAPI-Generator/2.5.0/python' + self.user_agent = 'OpenAPI-Generator/2.14.3/python' self.client_side_validation = configuration.client_side_validation def __enter__(self): diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/configuration.py b/backend/api/v1beta1/python_http_client/kfp_server_api/configuration.py index 1b7448aff8e..a47f22d1b2f 100644 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/configuration.py +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/configuration.py @@ -351,8 +351,8 @@ def to_debug_report(self): return "Python SDK Debug Report:\n"\ "OS: {env}\n"\ "Python Version: {pyversion}\n"\ - "Version of the API: 2.5.0\n"\ - "SDK Package Version: 2.5.0".\ + "Version of the API: 2.14.3\n"\ + "SDK Package Version: 2.14.3".\ format(env=sys.platform, pyversion=sys.version) def get_host_settings(self): diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/models/__init__.py b/backend/api/v1beta1/python_http_client/kfp_server_api/models/__init__.py index c1a7b499b0f..6bf3a218346 100644 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/models/__init__.py +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/models/__init__.py @@ -33,7 +33,6 @@ from kfp_server_api.models.api_pipeline_version import ApiPipelineVersion from kfp_server_api.models.api_read_artifact_response import ApiReadArtifactResponse from kfp_server_api.models.api_relationship import ApiRelationship -from kfp_server_api.models.api_report_run_metrics_request import ApiReportRunMetricsRequest from kfp_server_api.models.api_report_run_metrics_response import ApiReportRunMetricsResponse from kfp_server_api.models.api_resource_key import ApiResourceKey from kfp_server_api.models.api_resource_reference import ApiResourceReference @@ -45,7 +44,7 @@ from kfp_server_api.models.api_status import ApiStatus from kfp_server_api.models.api_trigger import ApiTrigger from kfp_server_api.models.api_url import ApiUrl -from kfp_server_api.models.gatewayruntime_error import GatewayruntimeError +from kfp_server_api.models.googlerpc_status import GooglerpcStatus from kfp_server_api.models.job_mode import JobMode from kfp_server_api.models.pipeline_spec_runtime_config import PipelineSpecRuntimeConfig from kfp_server_api.models.protobuf_any import ProtobufAny @@ -53,3 +52,4 @@ from kfp_server_api.models.report_run_metrics_response_report_run_metric_result import ReportRunMetricsResponseReportRunMetricResult from kfp_server_api.models.report_run_metrics_response_report_run_metric_result_status import ReportRunMetricsResponseReportRunMetricResultStatus from kfp_server_api.models.run_metric_format import RunMetricFormat +from kfp_server_api.models.run_service_report_run_metrics_v1_body import RunServiceReportRunMetricsV1Body diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/models/api_report_run_metrics_request.py b/backend/api/v1beta1/python_http_client/kfp_server_api/models/api_report_run_metrics_request.py deleted file mode 100644 index bd3647b3caf..00000000000 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/models/api_report_run_metrics_request.py +++ /dev/null @@ -1,150 +0,0 @@ -# coding: utf-8 - -""" - Kubeflow Pipelines API - - This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. - - Contact: kubeflow-pipelines@google.com - Generated by: https://openapi-generator.tech -""" - - -import pprint -import re # noqa: F401 - -import six - -from kfp_server_api.configuration import Configuration - - -class ApiReportRunMetricsRequest(object): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - """ - Attributes: - openapi_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - openapi_types = { - 'run_id': 'str', - 'metrics': 'list[ApiRunMetric]' - } - - attribute_map = { - 'run_id': 'run_id', - 'metrics': 'metrics' - } - - def __init__(self, run_id=None, metrics=None, local_vars_configuration=None): # noqa: E501 - """ApiReportRunMetricsRequest - a model defined in OpenAPI""" # noqa: E501 - if local_vars_configuration is None: - local_vars_configuration = Configuration() - self.local_vars_configuration = local_vars_configuration - - self._run_id = None - self._metrics = None - self.discriminator = None - - if run_id is not None: - self.run_id = run_id - if metrics is not None: - self.metrics = metrics - - @property - def run_id(self): - """Gets the run_id of this ApiReportRunMetricsRequest. # noqa: E501 - - Required. The parent run ID of the metric. # noqa: E501 - - :return: The run_id of this ApiReportRunMetricsRequest. # noqa: E501 - :rtype: str - """ - return self._run_id - - @run_id.setter - def run_id(self, run_id): - """Sets the run_id of this ApiReportRunMetricsRequest. - - Required. The parent run ID of the metric. # noqa: E501 - - :param run_id: The run_id of this ApiReportRunMetricsRequest. # noqa: E501 - :type run_id: str - """ - - self._run_id = run_id - - @property - def metrics(self): - """Gets the metrics of this ApiReportRunMetricsRequest. # noqa: E501 - - List of metrics to report. # noqa: E501 - - :return: The metrics of this ApiReportRunMetricsRequest. # noqa: E501 - :rtype: list[ApiRunMetric] - """ - return self._metrics - - @metrics.setter - def metrics(self, metrics): - """Sets the metrics of this ApiReportRunMetricsRequest. - - List of metrics to report. # noqa: E501 - - :param metrics: The metrics of this ApiReportRunMetricsRequest. # noqa: E501 - :type metrics: list[ApiRunMetric] - """ - - self._metrics = metrics - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.openapi_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ApiReportRunMetricsRequest): - return False - - return self.to_dict() == other.to_dict() - - def __ne__(self, other): - """Returns true if both objects are not equal""" - if not isinstance(other, ApiReportRunMetricsRequest): - return True - - return self.to_dict() != other.to_dict() diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/models/gatewayruntime_error.py b/backend/api/v1beta1/python_http_client/kfp_server_api/models/gatewayruntime_error.py deleted file mode 100644 index ac338f5c30f..00000000000 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/models/gatewayruntime_error.py +++ /dev/null @@ -1,198 +0,0 @@ -# coding: utf-8 - -""" - Kubeflow Pipelines API - - This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. - - Contact: kubeflow-pipelines@google.com - Generated by: https://openapi-generator.tech -""" - - -import pprint -import re # noqa: F401 - -import six - -from kfp_server_api.configuration import Configuration - - -class GatewayruntimeError(object): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - """ - Attributes: - openapi_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - openapi_types = { - 'error': 'str', - 'code': 'int', - 'message': 'str', - 'details': 'list[ProtobufAny]' - } - - attribute_map = { - 'error': 'error', - 'code': 'code', - 'message': 'message', - 'details': 'details' - } - - def __init__(self, error=None, code=None, message=None, details=None, local_vars_configuration=None): # noqa: E501 - """GatewayruntimeError - a model defined in OpenAPI""" # noqa: E501 - if local_vars_configuration is None: - local_vars_configuration = Configuration() - self.local_vars_configuration = local_vars_configuration - - self._error = None - self._code = None - self._message = None - self._details = None - self.discriminator = None - - if error is not None: - self.error = error - if code is not None: - self.code = code - if message is not None: - self.message = message - if details is not None: - self.details = details - - @property - def error(self): - """Gets the error of this GatewayruntimeError. # noqa: E501 - - - :return: The error of this GatewayruntimeError. # noqa: E501 - :rtype: str - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this GatewayruntimeError. - - - :param error: The error of this GatewayruntimeError. # noqa: E501 - :type error: str - """ - - self._error = error - - @property - def code(self): - """Gets the code of this GatewayruntimeError. # noqa: E501 - - - :return: The code of this GatewayruntimeError. # noqa: E501 - :rtype: int - """ - return self._code - - @code.setter - def code(self, code): - """Sets the code of this GatewayruntimeError. - - - :param code: The code of this GatewayruntimeError. # noqa: E501 - :type code: int - """ - - self._code = code - - @property - def message(self): - """Gets the message of this GatewayruntimeError. # noqa: E501 - - - :return: The message of this GatewayruntimeError. # noqa: E501 - :rtype: str - """ - return self._message - - @message.setter - def message(self, message): - """Sets the message of this GatewayruntimeError. - - - :param message: The message of this GatewayruntimeError. # noqa: E501 - :type message: str - """ - - self._message = message - - @property - def details(self): - """Gets the details of this GatewayruntimeError. # noqa: E501 - - - :return: The details of this GatewayruntimeError. # noqa: E501 - :rtype: list[ProtobufAny] - """ - return self._details - - @details.setter - def details(self, details): - """Sets the details of this GatewayruntimeError. - - - :param details: The details of this GatewayruntimeError. # noqa: E501 - :type details: list[ProtobufAny] - """ - - self._details = details - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.openapi_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, GatewayruntimeError): - return False - - return self.to_dict() == other.to_dict() - - def __ne__(self, other): - """Returns true if both objects are not equal""" - if not isinstance(other, GatewayruntimeError): - return True - - return self.to_dict() != other.to_dict() diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/models/googlerpc_status.py b/backend/api/v1beta1/python_http_client/kfp_server_api/models/googlerpc_status.py new file mode 100644 index 00000000000..c5e69cc7e22 --- /dev/null +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/models/googlerpc_status.py @@ -0,0 +1,172 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class GooglerpcStatus(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'code': 'int', + 'message': 'str', + 'details': 'list[ProtobufAny]' + } + + attribute_map = { + 'code': 'code', + 'message': 'message', + 'details': 'details' + } + + def __init__(self, code=None, message=None, details=None, local_vars_configuration=None): # noqa: E501 + """GooglerpcStatus - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._code = None + self._message = None + self._details = None + self.discriminator = None + + if code is not None: + self.code = code + if message is not None: + self.message = message + if details is not None: + self.details = details + + @property + def code(self): + """Gets the code of this GooglerpcStatus. # noqa: E501 + + + :return: The code of this GooglerpcStatus. # noqa: E501 + :rtype: int + """ + return self._code + + @code.setter + def code(self, code): + """Sets the code of this GooglerpcStatus. + + + :param code: The code of this GooglerpcStatus. # noqa: E501 + :type code: int + """ + + self._code = code + + @property + def message(self): + """Gets the message of this GooglerpcStatus. # noqa: E501 + + + :return: The message of this GooglerpcStatus. # noqa: E501 + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this GooglerpcStatus. + + + :param message: The message of this GooglerpcStatus. # noqa: E501 + :type message: str + """ + + self._message = message + + @property + def details(self): + """Gets the details of this GooglerpcStatus. # noqa: E501 + + + :return: The details of this GooglerpcStatus. # noqa: E501 + :rtype: list[ProtobufAny] + """ + return self._details + + @details.setter + def details(self, details): + """Sets the details of this GooglerpcStatus. + + + :param details: The details of this GooglerpcStatus. # noqa: E501 + :type details: list[ProtobufAny] + """ + + self._details = details + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, GooglerpcStatus): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, GooglerpcStatus): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/models/protobuf_any.py b/backend/api/v1beta1/python_http_client/kfp_server_api/models/protobuf_any.py index faef6ec444c..c8cac5bebfb 100644 --- a/backend/api/v1beta1/python_http_client/kfp_server_api/models/protobuf_any.py +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/models/protobuf_any.py @@ -33,35 +33,63 @@ class ProtobufAny(object): and the value is json key in definition. """ openapi_types = { + 'type': 'str', 'type_url': 'str', 'value': 'str' } attribute_map = { + 'type': '@type', 'type_url': 'type_url', 'value': 'value' } - def __init__(self, type_url=None, value=None, local_vars_configuration=None): # noqa: E501 + def __init__(self, type=None, type_url=None, value=None, local_vars_configuration=None): # noqa: E501 """ProtobufAny - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration + self._type = None self._type_url = None self._value = None self.discriminator = None + if type is not None: + self.type = type if type_url is not None: self.type_url = type_url if value is not None: self.value = value + @property + def type(self): + """Gets the type of this ProtobufAny. # noqa: E501 + + A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. As of May 2023, there are no widely used type server implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 + + :return: The type of this ProtobufAny. # noqa: E501 + :rtype: str + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this ProtobufAny. + + A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. As of May 2023, there are no widely used type server implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 + + :param type: The type of this ProtobufAny. # noqa: E501 + :type type: str + """ + + self._type = type + @property def type_url(self): """Gets the type_url of this ProtobufAny. # noqa: E501 - A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 + A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 :return: The type_url of this ProtobufAny. # noqa: E501 :rtype: str @@ -72,7 +100,7 @@ def type_url(self): def type_url(self, type_url): """Sets the type_url of this ProtobufAny. - A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 + A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 :param type_url: The type_url of this ProtobufAny. # noqa: E501 :type type_url: str diff --git a/backend/api/v1beta1/python_http_client/kfp_server_api/models/run_service_report_run_metrics_v1_body.py b/backend/api/v1beta1/python_http_client/kfp_server_api/models/run_service_report_run_metrics_v1_body.py new file mode 100644 index 00000000000..d069113319a --- /dev/null +++ b/backend/api/v1beta1/python_http_client/kfp_server_api/models/run_service_report_run_metrics_v1_body.py @@ -0,0 +1,122 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class RunServiceReportRunMetricsV1Body(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'metrics': 'list[ApiRunMetric]' + } + + attribute_map = { + 'metrics': 'metrics' + } + + def __init__(self, metrics=None, local_vars_configuration=None): # noqa: E501 + """RunServiceReportRunMetricsV1Body - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._metrics = None + self.discriminator = None + + if metrics is not None: + self.metrics = metrics + + @property + def metrics(self): + """Gets the metrics of this RunServiceReportRunMetricsV1Body. # noqa: E501 + + List of metrics to report. # noqa: E501 + + :return: The metrics of this RunServiceReportRunMetricsV1Body. # noqa: E501 + :rtype: list[ApiRunMetric] + """ + return self._metrics + + @metrics.setter + def metrics(self, metrics): + """Sets the metrics of this RunServiceReportRunMetricsV1Body. + + List of metrics to report. # noqa: E501 + + :param metrics: The metrics of this RunServiceReportRunMetricsV1Body. # noqa: E501 + :type metrics: list[ApiRunMetric] + """ + + self._metrics = metrics + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, RunServiceReportRunMetricsV1Body): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, RunServiceReportRunMetricsV1Body): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v1beta1/python_http_client/setup.py b/backend/api/v1beta1/python_http_client/setup.py index c68c56e42b8..67217140497 100644 --- a/backend/api/v1beta1/python_http_client/setup.py +++ b/backend/api/v1beta1/python_http_client/setup.py @@ -13,7 +13,7 @@ from setuptools import setup, find_packages # noqa: H301 NAME = "kfp-server-api" -VERSION = "2.5.0" +VERSION = "2.14.3" # To install the library, run the following # # python setup.py install diff --git a/backend/api/v1beta1/python_http_client/test/test_api_report_run_metrics_request.py b/backend/api/v1beta1/python_http_client/test/test_api_report_run_metrics_request.py deleted file mode 100644 index 5412c1ace2f..00000000000 --- a/backend/api/v1beta1/python_http_client/test/test_api_report_run_metrics_request.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding: utf-8 - -""" - Kubeflow Pipelines API - - This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. - - Contact: kubeflow-pipelines@google.com - Generated by: https://openapi-generator.tech -""" - - -from __future__ import absolute_import - -import unittest -import datetime - -import kfp_server_api -from kfp_server_api.models.api_report_run_metrics_request import ApiReportRunMetricsRequest # noqa: E501 -from kfp_server_api.rest import ApiException - -class TestApiReportRunMetricsRequest(unittest.TestCase): - """ApiReportRunMetricsRequest unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def make_instance(self, include_optional): - """Test ApiReportRunMetricsRequest - include_option is a boolean, when False only required - params are included, when True both required and - optional params are included """ - # model = kfp_server_api.models.api_report_run_metrics_request.ApiReportRunMetricsRequest() # noqa: E501 - if include_optional : - return ApiReportRunMetricsRequest( - run_id = '0', - metrics = [ - kfp_server_api.models.api_run_metric.apiRunMetric( - name = '0', - node_id = '0', - number_value = 1.337, - format = 'UNSPECIFIED', ) - ] - ) - else : - return ApiReportRunMetricsRequest( - ) - - def testApiReportRunMetricsRequest(self): - """Test ApiReportRunMetricsRequest""" - inst_req_only = self.make_instance(include_optional=False) - inst_req_and_optional = self.make_instance(include_optional=True) - - -if __name__ == '__main__': - unittest.main() diff --git a/backend/api/v1beta1/python_http_client/test/test_api_status.py b/backend/api/v1beta1/python_http_client/test/test_api_status.py index 64b94863a38..52e265322d8 100644 --- a/backend/api/v1beta1/python_http_client/test/test_api_status.py +++ b/backend/api/v1beta1/python_http_client/test/test_api_status.py @@ -39,9 +39,9 @@ def make_instance(self, include_optional): error = '0', code = 56, details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ] ) else : diff --git a/backend/api/v1beta1/python_http_client/test/test_gatewayruntime_error.py b/backend/api/v1beta1/python_http_client/test/test_gatewayruntime_error.py deleted file mode 100644 index df62837dfb7..00000000000 --- a/backend/api/v1beta1/python_http_client/test/test_gatewayruntime_error.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding: utf-8 - -""" - Kubeflow Pipelines API - - This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. - - Contact: kubeflow-pipelines@google.com - Generated by: https://openapi-generator.tech -""" - - -from __future__ import absolute_import - -import unittest -import datetime - -import kfp_server_api -from kfp_server_api.models.gatewayruntime_error import GatewayruntimeError # noqa: E501 -from kfp_server_api.rest import ApiException - -class TestGatewayruntimeError(unittest.TestCase): - """GatewayruntimeError unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def make_instance(self, include_optional): - """Test GatewayruntimeError - include_option is a boolean, when False only required - params are included, when True both required and - optional params are included """ - # model = kfp_server_api.models.gatewayruntime_error.GatewayruntimeError() # noqa: E501 - if include_optional : - return GatewayruntimeError( - error = '0', - code = 56, - message = '0', - details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) - ] - ) - else : - return GatewayruntimeError( - ) - - def testGatewayruntimeError(self): - """Test GatewayruntimeError""" - inst_req_only = self.make_instance(include_optional=False) - inst_req_and_optional = self.make_instance(include_optional=True) - - -if __name__ == '__main__': - unittest.main() diff --git a/backend/api/v1beta1/python_http_client/test/test_googlerpc_status.py b/backend/api/v1beta1/python_http_client/test/test_googlerpc_status.py new file mode 100644 index 00000000000..3c682191be8 --- /dev/null +++ b/backend/api/v1beta1/python_http_client/test/test_googlerpc_status.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.googlerpc_status import GooglerpcStatus # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestGooglerpcStatus(unittest.TestCase): + """GooglerpcStatus unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test GooglerpcStatus + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.googlerpc_status.GooglerpcStatus() # noqa: E501 + if include_optional : + return GooglerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ] + ) + else : + return GooglerpcStatus( + ) + + def testGooglerpcStatus(self): + """Test GooglerpcStatus""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v1beta1/python_http_client/test/test_protobuf_any.py b/backend/api/v1beta1/python_http_client/test/test_protobuf_any.py index 1e50cb32660..e69ba489391 100644 --- a/backend/api/v1beta1/python_http_client/test/test_protobuf_any.py +++ b/backend/api/v1beta1/python_http_client/test/test_protobuf_any.py @@ -36,6 +36,7 @@ def make_instance(self, include_optional): # model = kfp_server_api.models.protobuf_any.ProtobufAny() # noqa: E501 if include_optional : return ProtobufAny( + type = '0', type_url = '0', value = 'YQ==' ) diff --git a/backend/api/v1beta1/python_http_client/test/test_run_service_report_run_metrics_v1_body.py b/backend/api/v1beta1/python_http_client/test/test_run_service_report_run_metrics_v1_body.py new file mode 100644 index 00000000000..a2134c48d0a --- /dev/null +++ b/backend/api/v1beta1/python_http_client/test/test_run_service_report_run_metrics_v1_body.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.run_service_report_run_metrics_v1_body import RunServiceReportRunMetricsV1Body # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestRunServiceReportRunMetricsV1Body(unittest.TestCase): + """RunServiceReportRunMetricsV1Body unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test RunServiceReportRunMetricsV1Body + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.run_service_report_run_metrics_v1_body.RunServiceReportRunMetricsV1Body() # noqa: E501 + if include_optional : + return RunServiceReportRunMetricsV1Body( + metrics = [ + kfp_server_api.models.api_run_metric.apiRunMetric( + name = '0', + node_id = '0', + number_value = 1.337, + format = 'UNSPECIFIED', ) + ] + ) + else : + return RunServiceReportRunMetricsV1Body( + ) + + def testRunServiceReportRunMetricsV1Body(self): + """Test RunServiceReportRunMetricsV1Body""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v1beta1/run.proto b/backend/api/v1beta1/run.proto index b6674c3a54f..bb0f5616cdd 100644 --- a/backend/api/v1beta1/run.proto +++ b/backend/api/v1beta1/run.proto @@ -17,15 +17,14 @@ syntax = "proto3"; option go_package = "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client"; package api; -import "backend/api/v1beta1/error.proto"; import "google/protobuf/empty.proto"; import "google/api/annotations.proto"; import "google/protobuf/timestamp.proto"; import "backend/api/v1beta1/pipeline_spec.proto"; import "backend/api/v1beta1/resource_reference.proto"; -import "protoc-gen-swagger/options/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; diff --git a/backend/api/v1beta1/swagger/auth.swagger.json b/backend/api/v1beta1/swagger/auth.swagger.json index bf5093480b8..0d25c0b4ad9 100644 --- a/backend/api/v1beta1/swagger/auth.swagger.json +++ b/backend/api/v1beta1/swagger/auth.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v1beta1/auth.proto", "version": "version not set" }, + "tags": [ + { + "name": "AuthService" + } + ], "schemes": [ "http", "https" @@ -22,25 +27,28 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { "name": "namespace", + "description": "Namespace the resource belongs to.", "in": "query", "required": false, "type": "string" }, { "name": "resources", + "description": "Resource type asking for authorization.", "in": "query", "required": false, "type": "string", @@ -52,6 +60,7 @@ }, { "name": "verb", + "description": "Verb on the resource asking for authorization.", "in": "query", "required": false, "type": "string", @@ -91,12 +100,9 @@ "default": "UNASSIGNED_VERB", "description": "Type of verbs that act on the resources." }, - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -107,6 +113,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -115,17 +122,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } }, "securityDefinitions": { diff --git a/backend/api/v1beta1/swagger/error.swagger.json b/backend/api/v1beta1/swagger/error.swagger.json index f2fde831394..afcd562d76e 100644 --- a/backend/api/v1beta1/swagger/error.swagger.json +++ b/backend/api/v1beta1/swagger/error.swagger.json @@ -12,12 +12,9 @@ ], "paths": {}, "definitions": { - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -28,6 +25,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -36,17 +34,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } diff --git a/backend/api/v1beta1/swagger/experiment.swagger.json b/backend/api/v1beta1/swagger/experiment.swagger.json index ce7b816af72..43165af6d3f 100644 --- a/backend/api/v1beta1/swagger/experiment.swagger.json +++ b/backend/api/v1beta1/swagger/experiment.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v1beta1/experiment.proto", "version": "version not set" }, + "tags": [ + { + "name": "ExperimentService" + } + ], "schemes": [ "http", "https" @@ -29,7 +34,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -104,13 +109,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "experiment", "description": "The experiment to be created.", "in": "body", "required": true, @@ -138,7 +143,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -162,13 +167,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -194,13 +200,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -226,13 +233,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -275,6 +283,7 @@ "resource_references": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiResourceReference" }, "description": "Optional input field. Specify which resource this run belongs to.\nFor Experiment, the only valid resource reference is a single Namespace." @@ -300,6 +309,7 @@ "experiments": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiExperiment" }, "description": "A list of experiments returned." @@ -365,12 +375,9 @@ ], "default": "UNKNOWN_RESOURCE_TYPE" }, - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -381,6 +388,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -389,17 +397,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } }, "securityDefinitions": { diff --git a/backend/api/v1beta1/swagger/filter.swagger.json b/backend/api/v1beta1/swagger/filter.swagger.json index 81e1ef337ac..8d1f8791f47 100644 --- a/backend/api/v1beta1/swagger/filter.swagger.json +++ b/backend/api/v1beta1/swagger/filter.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v1beta1/filter.proto", "version": "version not set" }, + "tags": [ + { + "name": "DummyFilterService" + } + ], "consumes": [ "application/json" ], @@ -34,6 +39,7 @@ "predicates": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiPredicate" }, "description": "All predicates are AND-ed when this filter is applied." @@ -114,12 +120,9 @@ } } }, - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -130,6 +133,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -138,17 +142,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } diff --git a/backend/api/v1beta1/swagger/healthz.swagger.json b/backend/api/v1beta1/swagger/healthz.swagger.json index b462d3d0b09..502ac02cc0f 100644 --- a/backend/api/v1beta1/swagger/healthz.swagger.json +++ b/backend/api/v1beta1/swagger/healthz.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v1beta1/healthz.proto", "version": "version not set" }, + "tags": [ + { + "name": "HealthzService" + } + ], "schemes": [ "http", "https" @@ -29,7 +34,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -49,12 +54,9 @@ } } }, - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -65,6 +67,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -73,17 +76,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } }, "securityDefinitions": { diff --git a/backend/api/v1beta1/swagger/job.swagger.json b/backend/api/v1beta1/swagger/job.swagger.json index f2040b45b3e..46ef040a576 100644 --- a/backend/api/v1beta1/swagger/job.swagger.json +++ b/backend/api/v1beta1/swagger/job.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v1beta1/job.proto", "version": "version not set" }, + "tags": [ + { + "name": "JobService" + } + ], "schemes": [ "http", "https" @@ -29,7 +34,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -104,13 +109,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "job", "description": "The job to be created", "in": "body", "required": true, @@ -138,7 +143,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -162,13 +167,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -194,13 +200,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -226,13 +233,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -267,9 +275,7 @@ "properties": { "parameters": { "type": "object", - "additionalProperties": { - "type": "object" - }, + "additionalProperties": {}, "description": "The runtime parameters of the PipelineSpec. The parameters will be\nused to replace the placeholders\nat runtime." }, "pipeline_root": { @@ -321,6 +327,7 @@ "resource_references": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiResourceReference" }, "description": "Optional input field. Specify which resource this job belongs to." @@ -375,6 +382,7 @@ "jobs": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiJob" }, "description": "A list of jobs returned." @@ -444,6 +452,7 @@ "parameters": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiParameter" }, "title": "The parameter user provide to inject to the pipeline JSON.\nIf a default value of a parameter exist in the JSON,\nthe value user provided here will replace. V1 only" @@ -516,12 +525,9 @@ }, "description": "Trigger defines what starts a pipeline run." }, - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -532,6 +538,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -540,17 +547,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "protobufNullValue": { "type": "string", @@ -558,7 +561,7 @@ "NULL_VALUE" ], "default": "NULL_VALUE", - "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\n The JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." + "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." } }, "securityDefinitions": { diff --git a/backend/api/v1beta1/swagger/kfp_api_single_file.swagger.json b/backend/api/v1beta1/swagger/kfp_api_single_file.swagger.json index 697b76bfb97..7cacd095a7d 100644 --- a/backend/api/v1beta1/swagger/kfp_api_single_file.swagger.json +++ b/backend/api/v1beta1/swagger/kfp_api_single_file.swagger.json @@ -2,7 +2,7 @@ "swagger": "2.0", "info": { "title": "Kubeflow Pipelines API", - "version": "2.5.0", + "version": "2.14.3", "description": "This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition.", "contact": { "name": "google", @@ -14,6 +14,11 @@ "url": "https://raw.githubusercontent.com/kubeflow/pipelines/master/LICENSE" } }, + "tags": [ + { + "name": "HealthzService" + } + ], "schemes": [ "http", "https" @@ -39,7 +44,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -114,13 +119,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "experiment", "description": "The experiment to be created.", "in": "body", "required": true, @@ -148,7 +153,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -172,13 +177,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -204,13 +210,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -236,13 +243,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -274,7 +282,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -349,13 +357,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "run", "in": "body", "required": true, "schema": { @@ -376,13 +384,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -408,13 +417,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -440,13 +450,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -478,7 +489,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -510,7 +521,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -550,13 +561,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -582,13 +594,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -620,7 +633,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -637,7 +650,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/apiReportRunMetricsRequest" + "$ref": "#/definitions/RunServiceReportRunMetricsV1Body" } } ], @@ -660,7 +673,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -735,13 +748,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "job", "description": "The job to be created", "in": "body", "required": true, @@ -769,7 +782,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -793,13 +806,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -825,13 +839,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -857,13 +872,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -895,7 +911,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -934,7 +950,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1009,13 +1025,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "version", "description": "ResourceReference inside PipelineVersion specifies the pipeline that this\nversion belongs to.", "in": "body", "required": true, @@ -1043,7 +1059,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1067,13 +1083,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1105,7 +1122,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1137,7 +1154,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1212,13 +1229,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "pipeline", "in": "body", "required": true, "schema": { @@ -1245,7 +1262,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1269,13 +1286,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1307,7 +1325,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1333,13 +1351,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1494,7 +1513,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1528,6 +1547,7 @@ "resource_references": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiResourceReference" }, "description": "Optional input field. Specify which resource this run belongs to.\nFor Experiment, the only valid resource reference is a single Namespace." @@ -1553,6 +1573,7 @@ "experiments": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiExperiment" }, "description": "A list of experiments returned." @@ -1618,12 +1639,9 @@ ], "default": "UNKNOWN_RESOURCE_TYPE" }, - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -1634,6 +1652,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -1642,9 +1661,13 @@ "protobufAny": { "type": "object", "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, "type_url": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." }, "value": { "type": "string", @@ -1652,16 +1675,15 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "PipelineSpecRuntimeConfig": { "type": "object", "properties": { "parameters": { "type": "object", - "additionalProperties": { - "type": "object" - }, + "additionalProperties": {}, "description": "The runtime parameters of the PipelineSpec. The parameters will be\nused to replace the placeholders\nat runtime." }, "pipeline_root": { @@ -1714,12 +1736,26 @@ "default": "UNSPECIFIED", "description": " - UNSPECIFIED: Default value if not present.\n - RAW: Display value as its raw format.\n - PERCENTAGE: Display value in percentage format." }, + "RunServiceReportRunMetricsV1Body": { + "type": "object", + "properties": { + "metrics": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/apiRunMetric" + }, + "description": "List of metrics to report." + } + } + }, "apiListRunsResponse": { "type": "object", "properties": { "runs": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiRun" } }, @@ -1780,6 +1816,7 @@ "parameters": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiParameter" }, "title": "The parameter user provide to inject to the pipeline JSON.\nIf a default value of a parameter exist in the JSON,\nthe value user provided here will replace. V1 only" @@ -1800,28 +1837,13 @@ } } }, - "apiReportRunMetricsRequest": { - "type": "object", - "properties": { - "run_id": { - "type": "string", - "description": "Required. The parent run ID of the metric." - }, - "metrics": { - "type": "array", - "items": { - "$ref": "#/definitions/apiRunMetric" - }, - "description": "List of metrics to report." - } - } - }, "apiReportRunMetricsResponse": { "type": "object", "properties": { "results": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/ReportRunMetricsResponseReportRunMetricResult" } } @@ -1853,6 +1875,7 @@ "resource_references": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiResourceReference" }, "description": "Optional input field. Specify which resource this run belongs to.\nWhen creating a run from a particular pipeline version, the pipeline\nversion can be specified here." @@ -1887,6 +1910,7 @@ "metrics": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiRunMetric" }, "description": "Output. The metrics of the run. The metrics are reported by ReportMetrics\nAPI." @@ -1940,7 +1964,7 @@ "NULL_VALUE" ], "default": "NULL_VALUE", - "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\n The JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." + "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." }, "JobMode": { "type": "string", @@ -1994,6 +2018,7 @@ "resource_references": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiResourceReference" }, "description": "Optional input field. Specify which resource this job belongs to." @@ -2048,6 +2073,7 @@ "jobs": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiJob" }, "description": "A list of jobs returned." @@ -2111,6 +2137,7 @@ "versions": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiPipelineVersion" } }, @@ -2131,6 +2158,7 @@ "pipelines": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiPipeline" } }, @@ -2168,6 +2196,7 @@ "parameters": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiParameter" }, "description": "Output. The input parameters for this pipeline.\nTODO(jingzhang36): replace this parameters field with the parameters field\ninside PipelineVersion when all usage of the former has been changed to use\nthe latter." @@ -2188,6 +2217,7 @@ "resource_references": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiResourceReference" }, "description": "Input field. Specify which resource this pipeline belongs to.\nFor Pipeline, the only valid resource reference is a single Namespace." @@ -2213,6 +2243,7 @@ "parameters": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiParameter" }, "description": "Output. The input parameters for this pipeline." @@ -2228,6 +2259,7 @@ "resource_references": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiResourceReference" }, "description": "Input. Required. E.g., specify which pipeline this pipeline version belongs\nto." diff --git a/backend/api/v1beta1/swagger/parameter.swagger.json b/backend/api/v1beta1/swagger/parameter.swagger.json index a96ffbcb7e5..44223c92cdb 100644 --- a/backend/api/v1beta1/swagger/parameter.swagger.json +++ b/backend/api/v1beta1/swagger/parameter.swagger.json @@ -12,12 +12,9 @@ ], "paths": {}, "definitions": { - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -28,6 +25,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -36,17 +34,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } diff --git a/backend/api/v1beta1/swagger/pipeline.swagger.json b/backend/api/v1beta1/swagger/pipeline.swagger.json index 941461c94c1..b22b0860cd8 100644 --- a/backend/api/v1beta1/swagger/pipeline.swagger.json +++ b/backend/api/v1beta1/swagger/pipeline.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v1beta1/pipeline.proto", "version": "version not set" }, + "tags": [ + { + "name": "PipelineService" + } + ], "schemes": [ "http", "https" @@ -29,7 +34,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -68,7 +73,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -143,13 +148,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "version", "description": "ResourceReference inside PipelineVersion specifies the pipeline that this\nversion belongs to.", "in": "body", "required": true, @@ -177,7 +182,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -201,13 +206,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -239,7 +245,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -271,7 +277,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -346,13 +352,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "pipeline", "in": "body", "required": true, "schema": { @@ -379,7 +385,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -403,13 +409,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -441,7 +448,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -467,13 +474,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -515,6 +523,7 @@ "versions": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiPipelineVersion" } }, @@ -535,6 +544,7 @@ "pipelines": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiPipeline" } }, @@ -583,6 +593,7 @@ "parameters": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiParameter" }, "description": "Output. The input parameters for this pipeline.\nTODO(jingzhang36): replace this parameters field with the parameters field\ninside PipelineVersion when all usage of the former has been changed to use\nthe latter." @@ -603,6 +614,7 @@ "resource_references": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiResourceReference" }, "description": "Input field. Specify which resource this pipeline belongs to.\nFor Pipeline, the only valid resource reference is a single Namespace." @@ -628,6 +640,7 @@ "parameters": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiParameter" }, "description": "Output. The input parameters for this pipeline." @@ -643,6 +656,7 @@ "resource_references": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiResourceReference" }, "description": "Input field. Specify which resource this pipeline version belongs to.\nFor Experiment, the only valid resource reference is a single Namespace." @@ -712,12 +726,9 @@ } } }, - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -728,6 +739,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -736,17 +748,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } }, "securityDefinitions": { diff --git a/backend/api/v1beta1/swagger/pipeline_spec.swagger.json b/backend/api/v1beta1/swagger/pipeline_spec.swagger.json index 4eba2f9f1c8..370e8f9011f 100644 --- a/backend/api/v1beta1/swagger/pipeline_spec.swagger.json +++ b/backend/api/v1beta1/swagger/pipeline_spec.swagger.json @@ -12,12 +12,9 @@ ], "paths": {}, "definitions": { - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -28,6 +25,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -36,17 +34,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } diff --git a/backend/api/v1beta1/swagger/report.swagger.json b/backend/api/v1beta1/swagger/report.swagger.json index a0aebc85e68..a9cf8175710 100644 --- a/backend/api/v1beta1/swagger/report.swagger.json +++ b/backend/api/v1beta1/swagger/report.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v1beta1/report.proto", "version": "version not set" }, + "tags": [ + { + "name": "ReportService" + } + ], "consumes": [ "application/json" ], @@ -18,19 +23,20 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "scheduled_workflow", "description": "ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string.", "in": "body", "required": true, @@ -51,19 +57,20 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "workflow", "description": "Workflow is a workflow custom resource marshalled into a json string.", "in": "body", "required": true, @@ -79,12 +86,9 @@ } }, "definitions": { - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -95,6 +99,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -103,17 +108,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } diff --git a/backend/api/v1beta1/swagger/resource_reference.swagger.json b/backend/api/v1beta1/swagger/resource_reference.swagger.json index 5419f292ea0..2d0ae3aa03e 100644 --- a/backend/api/v1beta1/swagger/resource_reference.swagger.json +++ b/backend/api/v1beta1/swagger/resource_reference.swagger.json @@ -12,12 +12,9 @@ ], "paths": {}, "definitions": { - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -28,6 +25,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -36,17 +34,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } diff --git a/backend/api/v1beta1/swagger/run.swagger.json b/backend/api/v1beta1/swagger/run.swagger.json index a107ca6e0ad..6a04e552e14 100644 --- a/backend/api/v1beta1/swagger/run.swagger.json +++ b/backend/api/v1beta1/swagger/run.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v1beta1/run.proto", "version": "version not set" }, + "tags": [ + { + "name": "RunService" + } + ], "schemes": [ "http", "https" @@ -29,7 +34,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -104,13 +109,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "run", "in": "body", "required": true, "schema": { @@ -131,13 +136,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -163,13 +169,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -195,13 +202,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -233,7 +241,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -265,7 +273,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -305,13 +313,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -337,13 +346,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -375,7 +385,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -392,7 +402,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/apiReportRunMetricsRequest" + "$ref": "#/definitions/RunServiceReportRunMetricsV1Body" } } ], @@ -408,9 +418,7 @@ "properties": { "parameters": { "type": "object", - "additionalProperties": { - "type": "object" - }, + "additionalProperties": {}, "description": "The runtime parameters of the PipelineSpec. The parameters will be\nused to replace the placeholders\nat runtime." }, "pipeline_root": { @@ -463,12 +471,26 @@ "default": "UNSPECIFIED", "description": " - UNSPECIFIED: Default value if not present.\n - RAW: Display value as its raw format.\n - PERCENTAGE: Display value in percentage format." }, + "RunServiceReportRunMetricsV1Body": { + "type": "object", + "properties": { + "metrics": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/apiRunMetric" + }, + "description": "List of metrics to report." + } + } + }, "apiListRunsResponse": { "type": "object", "properties": { "runs": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiRun" } }, @@ -529,6 +551,7 @@ "parameters": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiParameter" }, "title": "The parameter user provide to inject to the pipeline JSON.\nIf a default value of a parameter exist in the JSON,\nthe value user provided here will replace. V1 only" @@ -558,28 +581,13 @@ ], "default": "UNKNOWN_RELATIONSHIP" }, - "apiReportRunMetricsRequest": { - "type": "object", - "properties": { - "run_id": { - "type": "string", - "description": "Required. The parent run ID of the metric." - }, - "metrics": { - "type": "array", - "items": { - "$ref": "#/definitions/apiRunMetric" - }, - "description": "List of metrics to report." - } - } - }, "apiReportRunMetricsResponse": { "type": "object", "properties": { "results": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/ReportRunMetricsResponseReportRunMetricResult" } } @@ -652,6 +660,7 @@ "resource_references": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiResourceReference" }, "description": "Optional input field. Specify which resource this run belongs to.\nWhen creating a run from a particular pipeline version, the pipeline\nversion can be specified here." @@ -686,6 +695,7 @@ "metrics": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiRunMetric" }, "description": "Output. The metrics of the run. The metrics are reported by ReportMetrics\nAPI." @@ -733,12 +743,9 @@ ], "default": "STORAGESTATE_AVAILABLE" }, - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -749,6 +756,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -757,17 +765,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { + "@type": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "protobufNullValue": { "type": "string", @@ -775,7 +779,7 @@ "NULL_VALUE" ], "default": "NULL_VALUE", - "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\n The JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." + "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." } }, "securityDefinitions": { diff --git a/backend/api/v1beta1/swagger/task.swagger.json b/backend/api/v1beta1/swagger/task.swagger.json index 441858f57fd..f9310ebca5a 100644 --- a/backend/api/v1beta1/swagger/task.swagger.json +++ b/backend/api/v1beta1/swagger/task.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v1beta1/task.proto", "version": "version not set" }, + "tags": [ + { + "name": "TaskService" + } + ], "consumes": [ "application/json" ], @@ -25,7 +30,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -100,13 +105,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "task", "in": "body", "required": true, "schema": { @@ -127,6 +132,7 @@ "tasks": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/apiTask" }, "description": "A list of tasks returned." @@ -206,12 +212,9 @@ } } }, - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -222,6 +225,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -230,17 +234,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } diff --git a/backend/api/v1beta1/swagger/visualization.swagger.json b/backend/api/v1beta1/swagger/visualization.swagger.json index 2f8c41fd381..1eb1d6ca50e 100644 --- a/backend/api/v1beta1/swagger/visualization.swagger.json +++ b/backend/api/v1beta1/swagger/visualization.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v1beta1/visualization.proto", "version": "version not set" }, + "tags": [ + { + "name": "VisualizationService" + } + ], "schemes": [ "http", "https" @@ -28,7 +33,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/gatewayruntimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -40,7 +45,7 @@ "type": "string" }, { - "name": "body", + "name": "visualization", "in": "body", "required": true, "schema": { @@ -91,12 +96,9 @@ "default": "ROC_CURVE", "description": "Type of visualization to be generated.\nThis is required when creating the pipeline through CreateVisualization\nAPI." }, - "gatewayruntimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", "format": "int32" @@ -107,6 +109,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" } } @@ -115,17 +118,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } }, "securityDefinitions": { diff --git a/backend/api/v1beta1/visualization.proto b/backend/api/v1beta1/visualization.proto index 168f84f1cf3..56053ae0d02 100644 --- a/backend/api/v1beta1/visualization.proto +++ b/backend/api/v1beta1/visualization.proto @@ -18,10 +18,9 @@ option go_package = "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client package api; import "google/api/annotations.proto"; -import "backend/api/v1beta1/error.proto"; -import "protoc-gen-swagger/options/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; diff --git a/backend/api/v2beta1/auth.proto b/backend/api/v2beta1/auth.proto index 4d6e0ddc2f9..e119f7c3faf 100644 --- a/backend/api/v2beta1/auth.proto +++ b/backend/api/v2beta1/auth.proto @@ -19,10 +19,9 @@ package kubeflow.pipelines.backend.api.v2beta1; import "google/api/annotations.proto"; import "google/protobuf/empty.proto"; -import "protoc-gen-swagger/options/annotations.proto"; -import "google/rpc/status.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; diff --git a/backend/api/v2beta1/experiment.proto b/backend/api/v2beta1/experiment.proto index 9e4a13f5007..bbf4033d465 100644 --- a/backend/api/v2beta1/experiment.proto +++ b/backend/api/v2beta1/experiment.proto @@ -20,10 +20,10 @@ package kubeflow.pipelines.backend.api.v2beta1; import "google/api/annotations.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; -import "protoc-gen-swagger/options/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { - schemes: [1, 2], // http + https +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + schemes: [1, 2]; // http + https }; service ExperimentService { diff --git a/backend/api/v2beta1/filter.proto b/backend/api/v2beta1/filter.proto index d03c6ca8658..fc8e5737eae 100644 --- a/backend/api/v2beta1/filter.proto +++ b/backend/api/v2beta1/filter.proto @@ -116,17 +116,17 @@ message Predicate { message IntValues { repeated int32 values = 1; } - + // List of strings. message StringValues { repeated string values = 2; } - + // List of long integers. message LongValues { repeated int64 values = 3; } - + // Value for the operation (second argument). oneof value { // Integer. diff --git a/backend/api/v2beta1/go_client/artifacts.pb.go b/backend/api/v2beta1/go_client/artifacts.pb.go index 1889878faba..97e7fb7137b 100644 --- a/backend/api/v2beta1/go_client/artifacts.pb.go +++ b/backend/api/v2beta1/go_client/artifacts.pb.go @@ -14,25 +14,22 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.17.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/artifacts.proto package go_client import ( - context "context" _ "google.golang.org/genproto/googleapis/api/annotations" _ "google.golang.org/genproto/googleapis/api/httpbody" status "google.golang.org/genproto/googleapis/rpc/status" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status1 "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -152,10 +149,7 @@ func (ListArtifactRequest_Field) EnumDescriptor() ([]byte, []int) { } type GetArtifactRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required. The ID of the artifact to be retrieved. ArtifactId string `protobuf:"bytes,1,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"` // Optional. Set to "DOWNLOAD" to included a signed URL with @@ -165,16 +159,16 @@ type GetArtifactRequest struct { // the download_url from server responses, thus preventing the // creation of any signed url. // Defaults to BASIC. - View GetArtifactRequest_ArtifactView `protobuf:"varint,2,opt,name=view,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.GetArtifactRequest_ArtifactView" json:"view,omitempty"` + View GetArtifactRequest_ArtifactView `protobuf:"varint,2,opt,name=view,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.GetArtifactRequest_ArtifactView" json:"view,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetArtifactRequest) Reset() { *x = GetArtifactRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetArtifactRequest) String() string { @@ -185,7 +179,7 @@ func (*GetArtifactRequest) ProtoMessage() {} func (x *GetArtifactRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -217,10 +211,7 @@ func (x *GetArtifactRequest) GetView() GetArtifactRequest_ArtifactView { // Passed onto MLMD ListOperationOptions // https://github.com/kubeflow/pipelines/blob/master/third_party/ml-metadata/ml_metadata/proto/metadata_store.proto#L868 type ListArtifactRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Optional. // Max number of resources to return in the result. A value of zero or less // will result in the default (20). @@ -235,16 +226,16 @@ type ListArtifactRequest struct { // Optional. The next_page_token value returned from a previous List request, if any. NextPageToken string `protobuf:"bytes,4,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` // Required. Namespace of the Artifact's context. - Namespace string `protobuf:"bytes,5,opt,name=namespace,proto3" json:"namespace,omitempty"` + Namespace string `protobuf:"bytes,5,opt,name=namespace,proto3" json:"namespace,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListArtifactRequest) Reset() { *x = ListArtifactRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListArtifactRequest) String() string { @@ -255,7 +246,7 @@ func (*ListArtifactRequest) ProtoMessage() {} func (x *ListArtifactRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -306,23 +297,20 @@ func (x *ListArtifactRequest) GetNamespace() string { } type ListArtifactResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // List of retrieved artifacts. Artifacts []*Artifact `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"` // Token to retrieve the next page of results, or empty if there are none NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListArtifactResponse) Reset() { *x = ListArtifactResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListArtifactResponse) String() string { @@ -333,7 +321,7 @@ func (*ListArtifactResponse) ProtoMessage() {} func (x *ListArtifactResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -363,10 +351,7 @@ func (x *ListArtifactResponse) GetNextPageToken() string { } type Artifact struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Unique Artifact ID. Generated by MLMD. ArtifactId string `protobuf:"bytes,1,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"` // Storage Provider to which this Artifact is located (e.g. S3, Minio, etc.). @@ -399,16 +384,16 @@ type Artifact struct { Error *status.Status `protobuf:"bytes,11,opt,name=error,proto3" json:"error,omitempty"` // Optional Output. Specifies a signed URL that can be used to // render this Artifact directly from its store. - RenderUrl string `protobuf:"bytes,12,opt,name=render_url,json=renderUrl,proto3" json:"render_url,omitempty"` + RenderUrl string `protobuf:"bytes,12,opt,name=render_url,json=renderUrl,proto3" json:"render_url,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Artifact) Reset() { *x = Artifact{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Artifact) String() string { @@ -419,7 +404,7 @@ func (*Artifact) ProtoMessage() {} func (x *Artifact) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_artifacts_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -520,139 +505,69 @@ func (x *Artifact) GetRenderUrl() string { var File_backend_api_v2beta1_artifacts_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_artifacts_proto_rawDesc = []byte{ - 0x0a, 0x23, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x26, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x1a, 0x1c, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x68, 0x74, 0x74, 0x70, 0x62, 0x6f, 0x64, 0x79, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x72, - 0x70, 0x63, 0x2f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, - 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x22, 0xe6, 0x01, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x49, 0x64, 0x12, 0x5b, 0x0a, 0x04, 0x76, 0x69, 0x65, 0x77, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x47, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, - 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, - 0x47, 0x65, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x56, 0x69, 0x65, 0x77, 0x52, - 0x04, 0x76, 0x69, 0x65, 0x77, 0x22, 0x52, 0x0a, 0x0c, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, - 0x74, 0x56, 0x69, 0x65, 0x77, 0x12, 0x1d, 0x0a, 0x19, 0x41, 0x52, 0x54, 0x49, 0x46, 0x41, 0x43, - 0x54, 0x5f, 0x56, 0x49, 0x45, 0x57, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, - 0x45, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x42, 0x41, 0x53, 0x49, 0x43, 0x10, 0x01, 0x12, - 0x0c, 0x0a, 0x08, 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, 0x44, 0x10, 0x02, 0x12, 0x0a, 0x0a, - 0x06, 0x52, 0x45, 0x4e, 0x44, 0x45, 0x52, 0x10, 0x03, 0x22, 0xd6, 0x02, 0x0a, 0x13, 0x4c, 0x69, - 0x73, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x6d, 0x61, 0x78, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5f, - 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x6d, 0x61, 0x78, 0x52, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x67, 0x0a, 0x0e, 0x6f, 0x72, 0x64, - 0x65, 0x72, 0x5f, 0x62, 0x79, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x41, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x41, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x52, 0x0c, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, 0x79, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x12, 0x19, 0x0a, 0x08, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x5f, 0x62, 0x79, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, 0x79, 0x12, 0x26, 0x0a, - 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, - 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x22, 0x4d, 0x0a, 0x05, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x15, 0x0a, 0x11, - 0x46, 0x49, 0x45, 0x4c, 0x44, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, - 0x44, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x5f, 0x54, 0x49, - 0x4d, 0x45, 0x10, 0x01, 0x12, 0x14, 0x0a, 0x10, 0x4c, 0x41, 0x53, 0x54, 0x5f, 0x55, 0x50, 0x44, - 0x41, 0x54, 0x45, 0x5f, 0x54, 0x49, 0x4d, 0x45, 0x10, 0x02, 0x12, 0x06, 0x0a, 0x02, 0x49, 0x44, - 0x10, 0x03, 0x22, 0x8e, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4e, 0x0a, 0x09, 0x61, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, - 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x52, 0x09, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, - 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, - 0x6b, 0x65, 0x6e, 0x22, 0xde, 0x03, 0x0a, 0x08, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x49, - 0x64, 0x12, 0x29, 0x0a, 0x10, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x73, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x21, 0x0a, 0x0c, - 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0b, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, - 0x10, 0x0a, 0x03, 0x75, 0x72, 0x69, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, - 0x69, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x75, 0x72, - 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, - 0x64, 0x55, 0x72, 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x74, - 0x79, 0x70, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, - 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x39, 0x0a, 0x0a, - 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x42, 0x0a, 0x0f, 0x6c, 0x61, 0x73, 0x74, 0x5f, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0d, 0x6c, 0x61, - 0x73, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x28, 0x0a, 0x05, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x05, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x1d, 0x0a, 0x0a, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x5f, - 0x75, 0x72, 0x6c, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x55, 0x72, 0x6c, 0x32, 0xec, 0x02, 0x0a, 0x0f, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, - 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xab, 0x01, 0x0a, 0x0d, 0x4c, 0x69, 0x73, - 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x12, 0x3b, 0x2e, 0x6b, 0x75, 0x62, - 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x3c, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x19, 0x12, 0x17, 0x2f, - 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x61, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x12, 0xaa, 0x01, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x41, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x12, 0x3a, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, - 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, - 0x47, 0x65, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x30, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, - 0x66, 0x61, 0x63, 0x74, 0x22, 0x2d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x27, 0x12, 0x25, 0x2f, 0x61, - 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x61, 0x72, 0x74, 0x69, - 0x66, 0x61, 0x63, 0x74, 0x73, 0x2f, 0x7b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, - 0x69, 0x64, 0x7d, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, - 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, - 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v2beta1_artifacts_proto_rawDesc = "" + + "\n" + + "#backend/api/v2beta1/artifacts.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/httpbody.proto\x1a\x17google/rpc/status.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xe6\x01\n" + + "\x12GetArtifactRequest\x12\x1f\n" + + "\vartifact_id\x18\x01 \x01(\tR\n" + + "artifactId\x12[\n" + + "\x04view\x18\x02 \x01(\x0e2G.kubeflow.pipelines.backend.api.v2beta1.GetArtifactRequest.ArtifactViewR\x04view\"R\n" + + "\fArtifactView\x12\x1d\n" + + "\x19ARTIFACT_VIEW_UNSPECIFIED\x10\x00\x12\t\n" + + "\x05BASIC\x10\x01\x12\f\n" + + "\bDOWNLOAD\x10\x02\x12\n" + + "\n" + + "\x06RENDER\x10\x03\"\xd6\x02\n" + + "\x13ListArtifactRequest\x12&\n" + + "\x0fmax_result_size\x18\x01 \x01(\x05R\rmaxResultSize\x12g\n" + + "\x0eorder_by_field\x18\x02 \x01(\x0e2A.kubeflow.pipelines.backend.api.v2beta1.ListArtifactRequest.FieldR\forderByField\x12\x19\n" + + "\border_by\x18\x03 \x01(\tR\aorderBy\x12&\n" + + "\x0fnext_page_token\x18\x04 \x01(\tR\rnextPageToken\x12\x1c\n" + + "\tnamespace\x18\x05 \x01(\tR\tnamespace\"M\n" + + "\x05Field\x12\x15\n" + + "\x11FIELD_UNSPECIFIED\x10\x00\x12\x0f\n" + + "\vCREATE_TIME\x10\x01\x12\x14\n" + + "\x10LAST_UPDATE_TIME\x10\x02\x12\x06\n" + + "\x02ID\x10\x03\"\x8e\x01\n" + + "\x14ListArtifactResponse\x12N\n" + + "\tartifacts\x18\x01 \x03(\v20.kubeflow.pipelines.backend.api.v2beta1.ArtifactR\tartifacts\x12&\n" + + "\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"\xde\x03\n" + + "\bArtifact\x12\x1f\n" + + "\vartifact_id\x18\x01 \x01(\tR\n" + + "artifactId\x12)\n" + + "\x10storage_provider\x18\x02 \x01(\tR\x0fstorageProvider\x12!\n" + + "\fstorage_path\x18\x03 \x01(\tR\vstoragePath\x12\x10\n" + + "\x03uri\x18\x04 \x01(\tR\x03uri\x12!\n" + + "\fdownload_url\x18\x05 \x01(\tR\vdownloadUrl\x12\x1c\n" + + "\tnamespace\x18\x06 \x01(\tR\tnamespace\x12#\n" + + "\rartifact_type\x18\a \x01(\tR\fartifactType\x12#\n" + + "\rartifact_size\x18\b \x01(\x03R\fartifactSize\x129\n" + + "\n" + + "created_at\x18\t \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12B\n" + + "\x0flast_updated_at\x18\n" + + " \x01(\v2\x1a.google.protobuf.TimestampR\rlastUpdatedAt\x12(\n" + + "\x05error\x18\v \x01(\v2\x12.google.rpc.StatusR\x05error\x12\x1d\n" + + "\n" + + "render_url\x18\f \x01(\tR\trenderUrl2\xec\x02\n" + + "\x0fArtifactService\x12\xab\x01\n" + + "\rListArtifacts\x12;.kubeflow.pipelines.backend.api.v2beta1.ListArtifactRequest\x1a<.kubeflow.pipelines.backend.api.v2beta1.ListArtifactResponse\"\x1f\x82\xd3\xe4\x93\x02\x19\x12\x17/apis/v2beta1/artifacts\x12\xaa\x01\n" + + "\vGetArtifact\x12:.kubeflow.pipelines.backend.api.v2beta1.GetArtifactRequest\x1a0.kubeflow.pipelines.backend.api.v2beta1.Artifact\"-\x82\xd3\xe4\x93\x02'\x12%/apis/v2beta1/artifacts/{artifact_id}B=Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_artifacts_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_artifacts_proto_rawDescData = file_backend_api_v2beta1_artifacts_proto_rawDesc + file_backend_api_v2beta1_artifacts_proto_rawDescData []byte ) func file_backend_api_v2beta1_artifacts_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_artifacts_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_artifacts_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_artifacts_proto_rawDescData) + file_backend_api_v2beta1_artifacts_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_artifacts_proto_rawDesc), len(file_backend_api_v2beta1_artifacts_proto_rawDesc))) }) return file_backend_api_v2beta1_artifacts_proto_rawDescData } var file_backend_api_v2beta1_artifacts_proto_enumTypes = make([]protoimpl.EnumInfo, 2) var file_backend_api_v2beta1_artifacts_proto_msgTypes = make([]protoimpl.MessageInfo, 4) -var file_backend_api_v2beta1_artifacts_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_artifacts_proto_goTypes = []any{ (GetArtifactRequest_ArtifactView)(0), // 0: kubeflow.pipelines.backend.api.v2beta1.GetArtifactRequest.ArtifactView (ListArtifactRequest_Field)(0), // 1: kubeflow.pipelines.backend.api.v2beta1.ListArtifactRequest.Field (*GetArtifactRequest)(nil), // 2: kubeflow.pipelines.backend.api.v2beta1.GetArtifactRequest @@ -685,61 +600,11 @@ func file_backend_api_v2beta1_artifacts_proto_init() { if File_backend_api_v2beta1_artifacts_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_artifacts_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetArtifactRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_artifacts_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListArtifactRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_artifacts_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListArtifactResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_artifacts_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Artifact); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_artifacts_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_artifacts_proto_rawDesc), len(file_backend_api_v2beta1_artifacts_proto_rawDesc)), NumEnums: 2, NumMessages: 4, NumExtensions: 0, @@ -751,133 +616,6 @@ func file_backend_api_v2beta1_artifacts_proto_init() { MessageInfos: file_backend_api_v2beta1_artifacts_proto_msgTypes, }.Build() File_backend_api_v2beta1_artifacts_proto = out.File - file_backend_api_v2beta1_artifacts_proto_rawDesc = nil file_backend_api_v2beta1_artifacts_proto_goTypes = nil file_backend_api_v2beta1_artifacts_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// ArtifactServiceClient is the client API for ArtifactService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type ArtifactServiceClient interface { - // Finds all artifacts within the specified namespace. - // Namespace field is required. In multi-user mode, the caller - // is required to have RBAC verb "list" on the "artifacts" - // resource for the specified namespace. - ListArtifacts(ctx context.Context, in *ListArtifactRequest, opts ...grpc.CallOption) (*ListArtifactResponse, error) - // Finds a specific Artifact by ID. - GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*Artifact, error) -} - -type artifactServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewArtifactServiceClient(cc grpc.ClientConnInterface) ArtifactServiceClient { - return &artifactServiceClient{cc} -} - -func (c *artifactServiceClient) ListArtifacts(ctx context.Context, in *ListArtifactRequest, opts ...grpc.CallOption) (*ListArtifactResponse, error) { - out := new(ListArtifactResponse) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifacts", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *artifactServiceClient) GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*Artifact, error) { - out := new(Artifact) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/GetArtifact", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ArtifactServiceServer is the server API for ArtifactService service. -type ArtifactServiceServer interface { - // Finds all artifacts within the specified namespace. - // Namespace field is required. In multi-user mode, the caller - // is required to have RBAC verb "list" on the "artifacts" - // resource for the specified namespace. - ListArtifacts(context.Context, *ListArtifactRequest) (*ListArtifactResponse, error) - // Finds a specific Artifact by ID. - GetArtifact(context.Context, *GetArtifactRequest) (*Artifact, error) -} - -// UnimplementedArtifactServiceServer can be embedded to have forward compatible implementations. -type UnimplementedArtifactServiceServer struct { -} - -func (*UnimplementedArtifactServiceServer) ListArtifacts(context.Context, *ListArtifactRequest) (*ListArtifactResponse, error) { - return nil, status1.Errorf(codes.Unimplemented, "method ListArtifacts not implemented") -} -func (*UnimplementedArtifactServiceServer) GetArtifact(context.Context, *GetArtifactRequest) (*Artifact, error) { - return nil, status1.Errorf(codes.Unimplemented, "method GetArtifact not implemented") -} - -func RegisterArtifactServiceServer(s *grpc.Server, srv ArtifactServiceServer) { - s.RegisterService(&_ArtifactService_serviceDesc, srv) -} - -func _ArtifactService_ListArtifacts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListArtifactRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactServiceServer).ListArtifacts(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifacts", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactServiceServer).ListArtifacts(ctx, req.(*ListArtifactRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ArtifactService_GetArtifact_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetArtifactRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactServiceServer).GetArtifact(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/GetArtifact", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactServiceServer).GetArtifact(ctx, req.(*GetArtifactRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _ArtifactService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "kubeflow.pipelines.backend.api.v2beta1.ArtifactService", - HandlerType: (*ArtifactServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "ListArtifacts", - Handler: _ArtifactService_ListArtifacts_Handler, - }, - { - MethodName: "GetArtifact", - Handler: _ArtifactService_GetArtifact_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v2beta1/artifacts.proto", -} diff --git a/backend/api/v2beta1/go_client/artifacts.pb.gw.go b/backend/api/v2beta1/go_client/artifacts.pb.gw.go index 7a1b7293dff..a88474f97cf 100644 --- a/backend/api/v2beta1/go_client/artifacts.pb.gw.go +++ b/backend/api/v2beta1/go_client/artifacts.pb.gw.go @@ -10,187 +10,164 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join - var ( - filter_ArtifactService_ListArtifacts_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join ) -func request_ArtifactService_ListArtifacts_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListArtifactRequest - var metadata runtime.ServerMetadata +var filter_ArtifactService_ListArtifacts_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +func request_ArtifactService_ListArtifacts_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListArtifactRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactService_ListArtifacts_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListArtifacts(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ArtifactService_ListArtifacts_0(ctx context.Context, marshaler runtime.Marshaler, server ArtifactServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListArtifactRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListArtifactRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactService_ListArtifacts_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListArtifacts(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_ArtifactService_GetArtifact_0 = &utilities.DoubleArray{Encoding: map[string]int{"artifact_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) +var filter_ArtifactService_GetArtifact_0 = &utilities.DoubleArray{Encoding: map[string]int{"artifact_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} func request_ArtifactService_GetArtifact_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetArtifactRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetArtifactRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["artifact_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["artifact_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_id") } - protoReq.ArtifactId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactService_GetArtifact_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetArtifact(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ArtifactService_GetArtifact_0(ctx context.Context, marshaler runtime.Marshaler, server ArtifactServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetArtifactRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetArtifactRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["artifact_id"] + val, ok := pathParams["artifact_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_id") } - protoReq.ArtifactId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactService_GetArtifact_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.GetArtifact(ctx, &protoReq) return msg, metadata, err - } // RegisterArtifactServiceHandlerServer registers the http handlers for service ArtifactService to "mux". // UnaryRPC :call ArtifactServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterArtifactServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterArtifactServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ArtifactServiceServer) error { - - mux.Handle("GET", pattern_ArtifactService_ListArtifacts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ArtifactService_ListArtifacts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifacts", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ArtifactService_ListArtifacts_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ArtifactService_ListArtifacts_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ArtifactService_ListArtifacts_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ArtifactService_ListArtifacts_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_ArtifactService_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ArtifactService_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/GetArtifact", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts/{artifact_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ArtifactService_GetArtifact_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ArtifactService_GetArtifact_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ArtifactService_GetArtifact_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ArtifactService_GetArtifact_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -199,25 +176,24 @@ func RegisterArtifactServiceHandlerServer(ctx context.Context, mux *runtime.Serv // RegisterArtifactServiceHandlerFromEndpoint is same as RegisterArtifactServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterArtifactServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterArtifactServiceHandler(ctx, mux, conn) } @@ -231,60 +207,51 @@ func RegisterArtifactServiceHandler(ctx context.Context, mux *runtime.ServeMux, // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ArtifactServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ArtifactServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "ArtifactServiceClient" to call the correct interceptors. +// "ArtifactServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterArtifactServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ArtifactServiceClient) error { - - mux.Handle("GET", pattern_ArtifactService_ListArtifacts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ArtifactService_ListArtifacts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifacts", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ArtifactService_ListArtifacts_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ArtifactService_ListArtifacts_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ArtifactService_ListArtifacts_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ArtifactService_ListArtifacts_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_ArtifactService_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ArtifactService_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/GetArtifact", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts/{artifact_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ArtifactService_GetArtifact_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ArtifactService_GetArtifact_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ArtifactService_GetArtifact_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ArtifactService_GetArtifact_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_ArtifactService_ListArtifacts_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "artifacts"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_ArtifactService_GetArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "artifacts", "artifact_id"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_ArtifactService_ListArtifacts_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "artifacts"}, "")) + pattern_ArtifactService_GetArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "artifacts", "artifact_id"}, "")) ) var ( forward_ArtifactService_ListArtifacts_0 = runtime.ForwardResponseMessage - - forward_ArtifactService_GetArtifact_0 = runtime.ForwardResponseMessage + forward_ArtifactService_GetArtifact_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v2beta1/go_client/artifacts_grpc.pb.go b/backend/api/v2beta1/go_client/artifacts_grpc.pb.go new file mode 100644 index 00000000000..1a2201c69a3 --- /dev/null +++ b/backend/api/v2beta1/go_client/artifacts_grpc.pb.go @@ -0,0 +1,183 @@ +// Copyright 2024 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/artifacts.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + ArtifactService_ListArtifacts_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifacts" + ArtifactService_GetArtifact_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/GetArtifact" +) + +// ArtifactServiceClient is the client API for ArtifactService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ArtifactServiceClient interface { + // Finds all artifacts within the specified namespace. + // Namespace field is required. In multi-user mode, the caller + // is required to have RBAC verb "list" on the "artifacts" + // resource for the specified namespace. + ListArtifacts(ctx context.Context, in *ListArtifactRequest, opts ...grpc.CallOption) (*ListArtifactResponse, error) + // Finds a specific Artifact by ID. + GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*Artifact, error) +} + +type artifactServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewArtifactServiceClient(cc grpc.ClientConnInterface) ArtifactServiceClient { + return &artifactServiceClient{cc} +} + +func (c *artifactServiceClient) ListArtifacts(ctx context.Context, in *ListArtifactRequest, opts ...grpc.CallOption) (*ListArtifactResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListArtifactResponse) + err := c.cc.Invoke(ctx, ArtifactService_ListArtifacts_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *artifactServiceClient) GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*Artifact, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Artifact) + err := c.cc.Invoke(ctx, ArtifactService_GetArtifact_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ArtifactServiceServer is the server API for ArtifactService service. +// All implementations must embed UnimplementedArtifactServiceServer +// for forward compatibility. +type ArtifactServiceServer interface { + // Finds all artifacts within the specified namespace. + // Namespace field is required. In multi-user mode, the caller + // is required to have RBAC verb "list" on the "artifacts" + // resource for the specified namespace. + ListArtifacts(context.Context, *ListArtifactRequest) (*ListArtifactResponse, error) + // Finds a specific Artifact by ID. + GetArtifact(context.Context, *GetArtifactRequest) (*Artifact, error) + mustEmbedUnimplementedArtifactServiceServer() +} + +// UnimplementedArtifactServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedArtifactServiceServer struct{} + +func (UnimplementedArtifactServiceServer) ListArtifacts(context.Context, *ListArtifactRequest) (*ListArtifactResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListArtifacts not implemented") +} +func (UnimplementedArtifactServiceServer) GetArtifact(context.Context, *GetArtifactRequest) (*Artifact, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetArtifact not implemented") +} +func (UnimplementedArtifactServiceServer) mustEmbedUnimplementedArtifactServiceServer() {} +func (UnimplementedArtifactServiceServer) testEmbeddedByValue() {} + +// UnsafeArtifactServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ArtifactServiceServer will +// result in compilation errors. +type UnsafeArtifactServiceServer interface { + mustEmbedUnimplementedArtifactServiceServer() +} + +func RegisterArtifactServiceServer(s grpc.ServiceRegistrar, srv ArtifactServiceServer) { + // If the following call pancis, it indicates UnimplementedArtifactServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&ArtifactService_ServiceDesc, srv) +} + +func _ArtifactService_ListArtifacts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListArtifactRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ArtifactServiceServer).ListArtifacts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ArtifactService_ListArtifacts_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ArtifactServiceServer).ListArtifacts(ctx, req.(*ListArtifactRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ArtifactService_GetArtifact_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetArtifactRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ArtifactServiceServer).GetArtifact(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ArtifactService_GetArtifact_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ArtifactServiceServer).GetArtifact(ctx, req.(*GetArtifactRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// ArtifactService_ServiceDesc is the grpc.ServiceDesc for ArtifactService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ArtifactService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.ArtifactService", + HandlerType: (*ArtifactServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListArtifacts", + Handler: _ArtifactService_ListArtifacts_Handler, + }, + { + MethodName: "GetArtifact", + Handler: _ArtifactService_GetArtifact_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/artifacts.proto", +} diff --git a/backend/api/v2beta1/go_client/auth.pb.go b/backend/api/v2beta1/go_client/auth.pb.go index bbf196283bd..55db52b2bf5 100644 --- a/backend/api/v2beta1/go_client/auth.pb.go +++ b/backend/api/v2beta1/go_client/auth.pb.go @@ -14,25 +14,21 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/auth.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - _ "google.golang.org/genproto/googleapis/rpc/status" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -146,22 +142,19 @@ func (AuthorizeRequest_Verb) EnumDescriptor() ([]byte, []int) { // and verb. User identity is not part of the message, because it is expected // to be parsed from request headers. Caller should proxy user request's headers. type AuthorizeRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` // Namespace the resource belongs to. + Resources AuthorizeRequest_Resources `protobuf:"varint,2,opt,name=resources,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.AuthorizeRequest_Resources" json:"resources,omitempty"` // Resource type asking for authorization. + Verb AuthorizeRequest_Verb `protobuf:"varint,3,opt,name=verb,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.AuthorizeRequest_Verb" json:"verb,omitempty"` // Verb on the resource asking for authorization. unknownFields protoimpl.UnknownFields - - Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` // Namespace the resource belongs to. - Resources AuthorizeRequest_Resources `protobuf:"varint,2,opt,name=resources,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.AuthorizeRequest_Resources" json:"resources,omitempty"` // Resource type asking for authorization. - Verb AuthorizeRequest_Verb `protobuf:"varint,3,opt,name=verb,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.AuthorizeRequest_Verb" json:"verb,omitempty"` // Verb on the resource asking for authorization. + sizeCache protoimpl.SizeCache } func (x *AuthorizeRequest) Reset() { *x = AuthorizeRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_auth_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_auth_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *AuthorizeRequest) String() string { @@ -172,7 +165,7 @@ func (*AuthorizeRequest) ProtoMessage() {} func (x *AuthorizeRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_auth_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -210,78 +203,48 @@ func (x *AuthorizeRequest) GetVerb() AuthorizeRequest_Verb { var File_backend_api_v2beta1_auth_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_auth_proto_rawDesc = []byte{ - 0x0a, 0x1e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x61, 0x75, 0x74, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x12, 0x26, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, - 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, - 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x73, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xd7, 0x02, 0x0a, 0x10, 0x41, - 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x60, 0x0a, - 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x42, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, - 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, - 0x51, 0x0a, 0x04, 0x76, 0x65, 0x72, 0x62, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3d, 0x2e, - 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x56, 0x65, 0x72, 0x62, 0x52, 0x04, 0x76, 0x65, - 0x72, 0x62, 0x22, 0x32, 0x0a, 0x09, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, - 0x18, 0x0a, 0x14, 0x55, 0x4e, 0x41, 0x53, 0x53, 0x49, 0x47, 0x4e, 0x45, 0x44, 0x5f, 0x52, 0x45, - 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x53, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x56, 0x49, 0x45, - 0x57, 0x45, 0x52, 0x53, 0x10, 0x01, 0x22, 0x3c, 0x0a, 0x04, 0x56, 0x65, 0x72, 0x62, 0x12, 0x13, - 0x0a, 0x0f, 0x55, 0x4e, 0x41, 0x53, 0x53, 0x49, 0x47, 0x4e, 0x45, 0x44, 0x5f, 0x56, 0x45, 0x52, - 0x42, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x10, 0x01, 0x12, - 0x07, 0x0a, 0x03, 0x47, 0x45, 0x54, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x45, 0x4c, 0x45, - 0x54, 0x45, 0x10, 0x03, 0x32, 0x88, 0x01, 0x0a, 0x0b, 0x41, 0x75, 0x74, 0x68, 0x53, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x12, 0x79, 0x0a, 0x09, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, - 0x65, 0x12, 0x38, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x6f, - 0x72, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, - 0x70, 0x74, 0x79, 0x22, 0x1a, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x12, 0x12, 0x2f, 0x61, 0x70, - 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x61, 0x75, 0x74, 0x68, 0x42, - 0x98, 0x01, 0x92, 0x41, 0x58, 0x2a, 0x02, 0x01, 0x02, 0x52, 0x23, 0x0a, 0x07, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x12, 0x18, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5a, 0x1f, - 0x0a, 0x1d, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x13, 0x08, 0x02, 0x1a, 0x0d, - 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, - 0x0c, 0x0a, 0x0a, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x00, 0x5a, 0x3b, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, -} +const file_backend_api_v2beta1_auth_proto_rawDesc = "" + + "\n" + + "\x1ebackend/api/v2beta1/auth.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\xd7\x02\n" + + "\x10AuthorizeRequest\x12\x1c\n" + + "\tnamespace\x18\x01 \x01(\tR\tnamespace\x12`\n" + + "\tresources\x18\x02 \x01(\x0e2B.kubeflow.pipelines.backend.api.v2beta1.AuthorizeRequest.ResourcesR\tresources\x12Q\n" + + "\x04verb\x18\x03 \x01(\x0e2=.kubeflow.pipelines.backend.api.v2beta1.AuthorizeRequest.VerbR\x04verb\"2\n" + + "\tResources\x12\x18\n" + + "\x14UNASSIGNED_RESOURCES\x10\x00\x12\v\n" + + "\aVIEWERS\x10\x01\"<\n" + + "\x04Verb\x12\x13\n" + + "\x0fUNASSIGNED_VERB\x10\x00\x12\n" + + "\n" + + "\x06CREATE\x10\x01\x12\a\n" + + "\x03GET\x10\x02\x12\n" + + "\n" + + "\x06DELETE\x10\x032\x88\x01\n" + + "\vAuthService\x12y\n" + + "\tAuthorize\x128.kubeflow.pipelines.backend.api.v2beta1.AuthorizeRequest\x1a\x16.google.protobuf.Empty\"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/apis/v2beta1/authB\x98\x01\x92AX*\x02\x01\x02R#\n" + + "\adefault\x12\x18\x12\x16\n" + + "\x14\x1a\x12.google.rpc.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_auth_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_auth_proto_rawDescData = file_backend_api_v2beta1_auth_proto_rawDesc + file_backend_api_v2beta1_auth_proto_rawDescData []byte ) func file_backend_api_v2beta1_auth_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_auth_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_auth_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_auth_proto_rawDescData) + file_backend_api_v2beta1_auth_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_auth_proto_rawDesc), len(file_backend_api_v2beta1_auth_proto_rawDesc))) }) return file_backend_api_v2beta1_auth_proto_rawDescData } var file_backend_api_v2beta1_auth_proto_enumTypes = make([]protoimpl.EnumInfo, 2) var file_backend_api_v2beta1_auth_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_backend_api_v2beta1_auth_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_auth_proto_goTypes = []any{ (AuthorizeRequest_Resources)(0), // 0: kubeflow.pipelines.backend.api.v2beta1.AuthorizeRequest.Resources (AuthorizeRequest_Verb)(0), // 1: kubeflow.pipelines.backend.api.v2beta1.AuthorizeRequest.Verb (*AuthorizeRequest)(nil), // 2: kubeflow.pipelines.backend.api.v2beta1.AuthorizeRequest @@ -304,25 +267,11 @@ func file_backend_api_v2beta1_auth_proto_init() { if File_backend_api_v2beta1_auth_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_auth_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*AuthorizeRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_auth_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_auth_proto_rawDesc), len(file_backend_api_v2beta1_auth_proto_rawDesc)), NumEnums: 2, NumMessages: 1, NumExtensions: 0, @@ -334,87 +283,6 @@ func file_backend_api_v2beta1_auth_proto_init() { MessageInfos: file_backend_api_v2beta1_auth_proto_msgTypes, }.Build() File_backend_api_v2beta1_auth_proto = out.File - file_backend_api_v2beta1_auth_proto_rawDesc = nil file_backend_api_v2beta1_auth_proto_goTypes = nil file_backend_api_v2beta1_auth_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// AuthServiceClient is the client API for AuthService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type AuthServiceClient interface { - Authorize(ctx context.Context, in *AuthorizeRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type authServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewAuthServiceClient(cc grpc.ClientConnInterface) AuthServiceClient { - return &authServiceClient{cc} -} - -func (c *authServiceClient) Authorize(ctx context.Context, in *AuthorizeRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.AuthService/Authorize", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// AuthServiceServer is the server API for AuthService service. -type AuthServiceServer interface { - Authorize(context.Context, *AuthorizeRequest) (*emptypb.Empty, error) -} - -// UnimplementedAuthServiceServer can be embedded to have forward compatible implementations. -type UnimplementedAuthServiceServer struct { -} - -func (*UnimplementedAuthServiceServer) Authorize(context.Context, *AuthorizeRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method Authorize not implemented") -} - -func RegisterAuthServiceServer(s *grpc.Server, srv AuthServiceServer) { - s.RegisterService(&_AuthService_serviceDesc, srv) -} - -func _AuthService_Authorize_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AuthorizeRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(AuthServiceServer).Authorize(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.AuthService/Authorize", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(AuthServiceServer).Authorize(ctx, req.(*AuthorizeRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _AuthService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "kubeflow.pipelines.backend.api.v2beta1.AuthService", - HandlerType: (*AuthServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Authorize", - Handler: _AuthService_Authorize_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v2beta1/auth.proto", -} diff --git a/backend/api/v2beta1/go_client/auth.pb.gw.go b/backend/api/v2beta1/go_client/auth.pb.gw.go index fde469f9c34..37bb8080044 100644 --- a/backend/api/v2beta1/go_client/auth.pb.gw.go +++ b/backend/api/v2beta1/go_client/auth.pb.gw.go @@ -10,92 +10,91 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join - var ( - filter_AuthService_Authorize_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join ) -func request_AuthService_Authorize_0(ctx context.Context, marshaler runtime.Marshaler, client AuthServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq AuthorizeRequest - var metadata runtime.ServerMetadata +var filter_AuthService_Authorize_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +func request_AuthService_Authorize_0(ctx context.Context, marshaler runtime.Marshaler, client AuthServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq AuthorizeRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AuthService_Authorize_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.Authorize(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_AuthService_Authorize_0(ctx context.Context, marshaler runtime.Marshaler, server AuthServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq AuthorizeRequest - var metadata runtime.ServerMetadata - + var ( + protoReq AuthorizeRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AuthService_Authorize_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.Authorize(ctx, &protoReq) return msg, metadata, err - } // RegisterAuthServiceHandlerServer registers the http handlers for service AuthService to "mux". // UnaryRPC :call AuthServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterAuthServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterAuthServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server AuthServiceServer) error { - - mux.Handle("GET", pattern_AuthService_Authorize_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_AuthService_Authorize_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.AuthService/Authorize", runtime.WithHTTPPathPattern("/apis/v2beta1/auth")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_AuthService_Authorize_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_AuthService_Authorize_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_AuthService_Authorize_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_AuthService_Authorize_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -104,25 +103,24 @@ func RegisterAuthServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux // RegisterAuthServiceHandlerFromEndpoint is same as RegisterAuthServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterAuthServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterAuthServiceHandler(ctx, mux, conn) } @@ -136,34 +134,30 @@ func RegisterAuthServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "AuthServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "AuthServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "AuthServiceClient" to call the correct interceptors. +// "AuthServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterAuthServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client AuthServiceClient) error { - - mux.Handle("GET", pattern_AuthService_Authorize_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_AuthService_Authorize_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.AuthService/Authorize", runtime.WithHTTPPathPattern("/apis/v2beta1/auth")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AuthService_Authorize_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_AuthService_Authorize_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_AuthService_Authorize_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_AuthService_Authorize_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_AuthService_Authorize_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "auth"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_AuthService_Authorize_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "auth"}, "")) ) var ( diff --git a/backend/api/v2beta1/go_client/auth_grpc.pb.go b/backend/api/v2beta1/go_client/auth_grpc.pb.go new file mode 100644 index 00000000000..989c0a84118 --- /dev/null +++ b/backend/api/v2beta1/go_client/auth_grpc.pb.go @@ -0,0 +1,136 @@ +// Copyright 2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/auth.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + AuthService_Authorize_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.AuthService/Authorize" +) + +// AuthServiceClient is the client API for AuthService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type AuthServiceClient interface { + Authorize(ctx context.Context, in *AuthorizeRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type authServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewAuthServiceClient(cc grpc.ClientConnInterface) AuthServiceClient { + return &authServiceClient{cc} +} + +func (c *authServiceClient) Authorize(ctx context.Context, in *AuthorizeRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, AuthService_Authorize_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AuthServiceServer is the server API for AuthService service. +// All implementations must embed UnimplementedAuthServiceServer +// for forward compatibility. +type AuthServiceServer interface { + Authorize(context.Context, *AuthorizeRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedAuthServiceServer() +} + +// UnimplementedAuthServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedAuthServiceServer struct{} + +func (UnimplementedAuthServiceServer) Authorize(context.Context, *AuthorizeRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method Authorize not implemented") +} +func (UnimplementedAuthServiceServer) mustEmbedUnimplementedAuthServiceServer() {} +func (UnimplementedAuthServiceServer) testEmbeddedByValue() {} + +// UnsafeAuthServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to AuthServiceServer will +// result in compilation errors. +type UnsafeAuthServiceServer interface { + mustEmbedUnimplementedAuthServiceServer() +} + +func RegisterAuthServiceServer(s grpc.ServiceRegistrar, srv AuthServiceServer) { + // If the following call pancis, it indicates UnimplementedAuthServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&AuthService_ServiceDesc, srv) +} + +func _AuthService_Authorize_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AuthorizeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AuthServiceServer).Authorize(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: AuthService_Authorize_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AuthServiceServer).Authorize(ctx, req.(*AuthorizeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// AuthService_ServiceDesc is the grpc.ServiceDesc for AuthService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var AuthService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.AuthService", + HandlerType: (*AuthServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Authorize", + Handler: _AuthService_Authorize_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/auth.proto", +} diff --git a/backend/api/v2beta1/go_client/experiment.pb.go b/backend/api/v2beta1/go_client/experiment.pb.go index 3a6f69d416d..58212132f29 100644 --- a/backend/api/v2beta1/go_client/experiment.pb.go +++ b/backend/api/v2beta1/go_client/experiment.pb.go @@ -14,25 +14,22 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/experiment.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -96,10 +93,7 @@ func (Experiment_StorageState) EnumDescriptor() ([]byte, []int) { } type Experiment struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. Unique experiment ID. Generated by API server. ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` // Required input field. Unique experiment name provided by user. @@ -114,15 +108,15 @@ type Experiment struct { StorageState Experiment_StorageState `protobuf:"varint,6,opt,name=storage_state,json=storageState,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.Experiment_StorageState" json:"storage_state,omitempty"` // Output. The creation time of the last run in this experiment. LastRunCreatedAt *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=last_run_created_at,json=lastRunCreatedAt,proto3" json:"last_run_created_at,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Experiment) Reset() { *x = Experiment{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Experiment) String() string { @@ -133,7 +127,7 @@ func (*Experiment) ProtoMessage() {} func (x *Experiment) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -198,21 +192,18 @@ func (x *Experiment) GetLastRunCreatedAt() *timestamppb.Timestamp { } type CreateExperimentRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The experiment to be created. - Experiment *Experiment `protobuf:"bytes,1,opt,name=experiment,proto3" json:"experiment,omitempty"` + Experiment *Experiment `protobuf:"bytes,1,opt,name=experiment,proto3" json:"experiment,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CreateExperimentRequest) Reset() { *x = CreateExperimentRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreateExperimentRequest) String() string { @@ -223,7 +214,7 @@ func (*CreateExperimentRequest) ProtoMessage() {} func (x *CreateExperimentRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -246,21 +237,18 @@ func (x *CreateExperimentRequest) GetExperiment() *Experiment { } type GetExperimentRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the experiment to be retrieved. - ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetExperimentRequest) Reset() { *x = GetExperimentRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetExperimentRequest) String() string { @@ -271,7 +259,7 @@ func (*GetExperimentRequest) ProtoMessage() {} func (x *GetExperimentRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -294,10 +282,7 @@ func (x *GetExperimentRequest) GetExperimentId() string { } type ListExperimentsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A page token to request the next page of results. The token is acquried // from the nextPageToken field of the response from the previous // ListExperiments call or can be omitted when fetching the first page. @@ -313,16 +298,16 @@ type ListExperimentsRequest struct { // [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v2beta1/api/filter.proto)). Filter string `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"` // Which namespace to filter the experiments on. - Namespace string `protobuf:"bytes,5,opt,name=namespace,proto3" json:"namespace,omitempty"` + Namespace string `protobuf:"bytes,5,opt,name=namespace,proto3" json:"namespace,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListExperimentsRequest) Reset() { *x = ListExperimentsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListExperimentsRequest) String() string { @@ -333,7 +318,7 @@ func (*ListExperimentsRequest) ProtoMessage() {} func (x *ListExperimentsRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -384,25 +369,22 @@ func (x *ListExperimentsRequest) GetNamespace() string { } type ListExperimentsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A list of experiments returned. Experiments []*Experiment `protobuf:"bytes,1,rep,name=experiments,proto3" json:"experiments,omitempty"` // The number of experiments for the given query. TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` // The token to list the next page of experiments. NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListExperimentsResponse) Reset() { *x = ListExperimentsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListExperimentsResponse) String() string { @@ -413,7 +395,7 @@ func (*ListExperimentsResponse) ProtoMessage() {} func (x *ListExperimentsResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -450,21 +432,18 @@ func (x *ListExperimentsResponse) GetNextPageToken() string { } type DeleteExperimentRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the experiment to be deleted. - ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeleteExperimentRequest) Reset() { *x = DeleteExperimentRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeleteExperimentRequest) String() string { @@ -475,7 +454,7 @@ func (*DeleteExperimentRequest) ProtoMessage() {} func (x *DeleteExperimentRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -498,21 +477,18 @@ func (x *DeleteExperimentRequest) GetExperimentId() string { } type ArchiveExperimentRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the experiment to be archived. - ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ArchiveExperimentRequest) Reset() { *x = ArchiveExperimentRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ArchiveExperimentRequest) String() string { @@ -523,7 +499,7 @@ func (*ArchiveExperimentRequest) ProtoMessage() {} func (x *ArchiveExperimentRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -546,21 +522,18 @@ func (x *ArchiveExperimentRequest) GetExperimentId() string { } type UnarchiveExperimentRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the experiment to be restored. - ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *UnarchiveExperimentRequest) Reset() { *x = UnarchiveExperimentRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *UnarchiveExperimentRequest) String() string { @@ -571,7 +544,7 @@ func (*UnarchiveExperimentRequest) ProtoMessage() {} func (x *UnarchiveExperimentRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_experiment_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -595,184 +568,71 @@ func (x *UnarchiveExperimentRequest) GetExperimentId() string { var File_backend_api_v2beta1_experiment_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_experiment_proto_rawDesc = []byte{ - 0x0a, 0x24, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x26, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x1a, 0x1c, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, - 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x6f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xcc, 0x03, 0x0a, 0x0a, 0x45, 0x78, 0x70, - 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, - 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, - 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, - 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, - 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x1c, 0x0a, 0x09, - 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x64, 0x0a, 0x0d, 0x73, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x3f, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, - 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x52, 0x0c, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, - 0x12, 0x49, 0x0a, 0x13, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x6c, 0x61, 0x73, 0x74, 0x52, - 0x75, 0x6e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x22, 0x4a, 0x0a, 0x0c, 0x53, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x1d, 0x0a, 0x19, 0x53, - 0x54, 0x4f, 0x52, 0x41, 0x47, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x45, 0x5f, 0x55, 0x4e, 0x53, - 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x41, 0x56, - 0x41, 0x49, 0x4c, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x41, 0x52, 0x43, - 0x48, 0x49, 0x56, 0x45, 0x44, 0x10, 0x02, 0x22, 0x6d, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x52, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, - 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, - 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x65, - 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x3b, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, - 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x23, - 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, - 0x74, 0x49, 0x64, 0x22, 0xa3, 0x01, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x78, 0x70, 0x65, - 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, - 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1b, 0x0a, - 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, - 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x73, 0x6f, - 0x72, 0x74, 0x5f, 0x62, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x72, - 0x74, 0x42, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x09, 0x6e, - 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0xb6, 0x01, 0x0a, 0x17, 0x4c, 0x69, - 0x73, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x54, 0x0a, 0x0b, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x6b, 0x75, 0x62, - 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x0b, - 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x74, - 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x09, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, - 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, - 0x65, 0x6e, 0x22, 0x3e, 0x0a, 0x17, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, - 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x23, 0x0a, - 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, - 0x49, 0x64, 0x22, 0x3f, 0x0a, 0x18, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x45, 0x78, 0x70, - 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x23, - 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, - 0x74, 0x49, 0x64, 0x22, 0x41, 0x0a, 0x1a, 0x55, 0x6e, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, - 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, - 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x32, 0xb8, 0x08, 0x0a, 0x11, 0x45, 0x78, 0x70, 0x65, 0x72, - 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xb6, 0x01, 0x0a, - 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, - 0x74, 0x12, 0x3f, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x32, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x65, - 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x2d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x27, 0x3a, 0x0a, - 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x19, 0x2f, 0x61, 0x70, 0x69, - 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, - 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0xb4, 0x01, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, - 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x3c, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2e, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x32, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x45, - 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x31, 0x82, 0xd3, 0xe4, 0x93, 0x02, - 0x2b, 0x12, 0x29, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x7b, 0x65, 0x78, - 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x7d, 0x12, 0xb5, 0x01, 0x0a, - 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, - 0x12, 0x3e, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x78, - 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x3f, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x78, - 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x21, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1b, 0x12, 0x19, 0x2f, 0x61, 0x70, 0x69, 0x73, - 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x12, 0xa8, 0x01, 0x0a, 0x11, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, - 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x40, 0x2e, 0x6b, 0x75, 0x62, - 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, - 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, - 0x6d, 0x70, 0x74, 0x79, 0x22, 0x39, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x33, 0x22, 0x31, 0x2f, 0x61, - 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, - 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x7b, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, - 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x7d, 0x3a, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, - 0xae, 0x01, 0x0a, 0x13, 0x55, 0x6e, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x45, 0x78, 0x70, - 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x42, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2e, 0x55, 0x6e, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, - 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, - 0x70, 0x74, 0x79, 0x22, 0x3b, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x35, 0x22, 0x33, 0x2f, 0x61, 0x70, - 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, - 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x7b, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, - 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x7d, 0x3a, 0x75, 0x6e, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, - 0x12, 0x9e, 0x01, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, - 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x3f, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x44, - 0x65, 0x6c, 0x65, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x31, - 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2b, 0x2a, 0x29, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x2f, 0x7b, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, - 0x7d, 0x42, 0x44, 0x92, 0x41, 0x04, 0x2a, 0x02, 0x01, 0x02, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, - 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v2beta1_experiment_proto_rawDesc = "" + + "\n" + + "$backend/api/v2beta1/experiment.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\xcc\x03\n" + + "\n" + + "Experiment\x12#\n" + + "\rexperiment_id\x18\x01 \x01(\tR\fexperimentId\x12!\n" + + "\fdisplay_name\x18\x02 \x01(\tR\vdisplayName\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\x129\n" + + "\n" + + "created_at\x18\x04 \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x1c\n" + + "\tnamespace\x18\x05 \x01(\tR\tnamespace\x12d\n" + + "\rstorage_state\x18\x06 \x01(\x0e2?.kubeflow.pipelines.backend.api.v2beta1.Experiment.StorageStateR\fstorageState\x12I\n" + + "\x13last_run_created_at\x18\a \x01(\v2\x1a.google.protobuf.TimestampR\x10lastRunCreatedAt\"J\n" + + "\fStorageState\x12\x1d\n" + + "\x19STORAGE_STATE_UNSPECIFIED\x10\x00\x12\r\n" + + "\tAVAILABLE\x10\x01\x12\f\n" + + "\bARCHIVED\x10\x02\"m\n" + + "\x17CreateExperimentRequest\x12R\n" + + "\n" + + "experiment\x18\x01 \x01(\v22.kubeflow.pipelines.backend.api.v2beta1.ExperimentR\n" + + "experiment\";\n" + + "\x14GetExperimentRequest\x12#\n" + + "\rexperiment_id\x18\x01 \x01(\tR\fexperimentId\"\xa3\x01\n" + + "\x16ListExperimentsRequest\x12\x1d\n" + + "\n" + + "page_token\x18\x01 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x02 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x03 \x01(\tR\x06sortBy\x12\x16\n" + + "\x06filter\x18\x04 \x01(\tR\x06filter\x12\x1c\n" + + "\tnamespace\x18\x05 \x01(\tR\tnamespace\"\xb6\x01\n" + + "\x17ListExperimentsResponse\x12T\n" + + "\vexperiments\x18\x01 \x03(\v22.kubeflow.pipelines.backend.api.v2beta1.ExperimentR\vexperiments\x12\x1d\n" + + "\n" + + "total_size\x18\x03 \x01(\x05R\ttotalSize\x12&\n" + + "\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\">\n" + + "\x17DeleteExperimentRequest\x12#\n" + + "\rexperiment_id\x18\x01 \x01(\tR\fexperimentId\"?\n" + + "\x18ArchiveExperimentRequest\x12#\n" + + "\rexperiment_id\x18\x01 \x01(\tR\fexperimentId\"A\n" + + "\x1aUnarchiveExperimentRequest\x12#\n" + + "\rexperiment_id\x18\x01 \x01(\tR\fexperimentId2\xb8\b\n" + + "\x11ExperimentService\x12\xb6\x01\n" + + "\x10CreateExperiment\x12?.kubeflow.pipelines.backend.api.v2beta1.CreateExperimentRequest\x1a2.kubeflow.pipelines.backend.api.v2beta1.Experiment\"-\x82\xd3\xe4\x93\x02':\n" + + "experiment\"\x19/apis/v2beta1/experiments\x12\xb4\x01\n" + + "\rGetExperiment\x12<.kubeflow.pipelines.backend.api.v2beta1.GetExperimentRequest\x1a2.kubeflow.pipelines.backend.api.v2beta1.Experiment\"1\x82\xd3\xe4\x93\x02+\x12)/apis/v2beta1/experiments/{experiment_id}\x12\xb5\x01\n" + + "\x0fListExperiments\x12>.kubeflow.pipelines.backend.api.v2beta1.ListExperimentsRequest\x1a?.kubeflow.pipelines.backend.api.v2beta1.ListExperimentsResponse\"!\x82\xd3\xe4\x93\x02\x1b\x12\x19/apis/v2beta1/experiments\x12\xa8\x01\n" + + "\x11ArchiveExperiment\x12@.kubeflow.pipelines.backend.api.v2beta1.ArchiveExperimentRequest\x1a\x16.google.protobuf.Empty\"9\x82\xd3\xe4\x93\x023\"1/apis/v2beta1/experiments/{experiment_id}:archive\x12\xae\x01\n" + + "\x13UnarchiveExperiment\x12B.kubeflow.pipelines.backend.api.v2beta1.UnarchiveExperimentRequest\x1a\x16.google.protobuf.Empty\";\x82\xd3\xe4\x93\x025\"3/apis/v2beta1/experiments/{experiment_id}:unarchive\x12\x9e\x01\n" + + "\x10DeleteExperiment\x12?.kubeflow.pipelines.backend.api.v2beta1.DeleteExperimentRequest\x1a\x16.google.protobuf.Empty\"1\x82\xd3\xe4\x93\x02+*)/apis/v2beta1/experiments/{experiment_id}BD\x92A\x04*\x02\x01\x02Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_experiment_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_experiment_proto_rawDescData = file_backend_api_v2beta1_experiment_proto_rawDesc + file_backend_api_v2beta1_experiment_proto_rawDescData []byte ) func file_backend_api_v2beta1_experiment_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_experiment_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_experiment_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_experiment_proto_rawDescData) + file_backend_api_v2beta1_experiment_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_experiment_proto_rawDesc), len(file_backend_api_v2beta1_experiment_proto_rawDesc))) }) return file_backend_api_v2beta1_experiment_proto_rawDescData } var file_backend_api_v2beta1_experiment_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_backend_api_v2beta1_experiment_proto_msgTypes = make([]protoimpl.MessageInfo, 8) -var file_backend_api_v2beta1_experiment_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_experiment_proto_goTypes = []any{ (Experiment_StorageState)(0), // 0: kubeflow.pipelines.backend.api.v2beta1.Experiment.StorageState (*Experiment)(nil), // 1: kubeflow.pipelines.backend.api.v2beta1.Experiment (*CreateExperimentRequest)(nil), // 2: kubeflow.pipelines.backend.api.v2beta1.CreateExperimentRequest @@ -815,109 +675,11 @@ func file_backend_api_v2beta1_experiment_proto_init() { if File_backend_api_v2beta1_experiment_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_experiment_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Experiment); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_experiment_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateExperimentRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_experiment_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetExperimentRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_experiment_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListExperimentsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_experiment_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListExperimentsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_experiment_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteExperimentRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_experiment_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArchiveExperimentRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_experiment_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UnarchiveExperimentRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_experiment_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_experiment_proto_rawDesc), len(file_backend_api_v2beta1_experiment_proto_rawDesc)), NumEnums: 1, NumMessages: 8, NumExtensions: 0, @@ -929,285 +691,6 @@ func file_backend_api_v2beta1_experiment_proto_init() { MessageInfos: file_backend_api_v2beta1_experiment_proto_msgTypes, }.Build() File_backend_api_v2beta1_experiment_proto = out.File - file_backend_api_v2beta1_experiment_proto_rawDesc = nil file_backend_api_v2beta1_experiment_proto_goTypes = nil file_backend_api_v2beta1_experiment_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// ExperimentServiceClient is the client API for ExperimentService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type ExperimentServiceClient interface { - // Creates a new experiment. - CreateExperiment(ctx context.Context, in *CreateExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) - // Finds a specific experiment by ID. - GetExperiment(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) - // Finds all experiments. Supports pagination, and sorting on certain fields. - ListExperiments(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsResponse, error) - // Archives an experiment and the experiment's runs and recurring runs. - ArchiveExperiment(ctx context.Context, in *ArchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Restores an archived experiment. The experiment's archived runs and recurring - // runs will stay archived. - UnarchiveExperiment(ctx context.Context, in *UnarchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Deletes an experiment without deleting the experiment's runs and recurring - // runs. To avoid unexpected behaviors, delete an experiment's runs and recurring - // runs before deleting the experiment. - DeleteExperiment(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type experimentServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewExperimentServiceClient(cc grpc.ClientConnInterface) ExperimentServiceClient { - return &experimentServiceClient{cc} -} - -func (c *experimentServiceClient) CreateExperiment(ctx context.Context, in *CreateExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) { - out := new(Experiment) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/CreateExperiment", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *experimentServiceClient) GetExperiment(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) { - out := new(Experiment) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/GetExperiment", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *experimentServiceClient) ListExperiments(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsResponse, error) { - out := new(ListExperimentsResponse) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/ListExperiments", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *experimentServiceClient) ArchiveExperiment(ctx context.Context, in *ArchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/ArchiveExperiment", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *experimentServiceClient) UnarchiveExperiment(ctx context.Context, in *UnarchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/UnarchiveExperiment", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *experimentServiceClient) DeleteExperiment(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/DeleteExperiment", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ExperimentServiceServer is the server API for ExperimentService service. -type ExperimentServiceServer interface { - // Creates a new experiment. - CreateExperiment(context.Context, *CreateExperimentRequest) (*Experiment, error) - // Finds a specific experiment by ID. - GetExperiment(context.Context, *GetExperimentRequest) (*Experiment, error) - // Finds all experiments. Supports pagination, and sorting on certain fields. - ListExperiments(context.Context, *ListExperimentsRequest) (*ListExperimentsResponse, error) - // Archives an experiment and the experiment's runs and recurring runs. - ArchiveExperiment(context.Context, *ArchiveExperimentRequest) (*emptypb.Empty, error) - // Restores an archived experiment. The experiment's archived runs and recurring - // runs will stay archived. - UnarchiveExperiment(context.Context, *UnarchiveExperimentRequest) (*emptypb.Empty, error) - // Deletes an experiment without deleting the experiment's runs and recurring - // runs. To avoid unexpected behaviors, delete an experiment's runs and recurring - // runs before deleting the experiment. - DeleteExperiment(context.Context, *DeleteExperimentRequest) (*emptypb.Empty, error) -} - -// UnimplementedExperimentServiceServer can be embedded to have forward compatible implementations. -type UnimplementedExperimentServiceServer struct { -} - -func (*UnimplementedExperimentServiceServer) CreateExperiment(context.Context, *CreateExperimentRequest) (*Experiment, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateExperiment not implemented") -} -func (*UnimplementedExperimentServiceServer) GetExperiment(context.Context, *GetExperimentRequest) (*Experiment, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetExperiment not implemented") -} -func (*UnimplementedExperimentServiceServer) ListExperiments(context.Context, *ListExperimentsRequest) (*ListExperimentsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListExperiments not implemented") -} -func (*UnimplementedExperimentServiceServer) ArchiveExperiment(context.Context, *ArchiveExperimentRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method ArchiveExperiment not implemented") -} -func (*UnimplementedExperimentServiceServer) UnarchiveExperiment(context.Context, *UnarchiveExperimentRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method UnarchiveExperiment not implemented") -} -func (*UnimplementedExperimentServiceServer) DeleteExperiment(context.Context, *DeleteExperimentRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeleteExperiment not implemented") -} - -func RegisterExperimentServiceServer(s *grpc.Server, srv ExperimentServiceServer) { - s.RegisterService(&_ExperimentService_serviceDesc, srv) -} - -func _ExperimentService_CreateExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateExperimentRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).CreateExperiment(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/CreateExperiment", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).CreateExperiment(ctx, req.(*CreateExperimentRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ExperimentService_GetExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetExperimentRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).GetExperiment(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/GetExperiment", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).GetExperiment(ctx, req.(*GetExperimentRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ExperimentService_ListExperiments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListExperimentsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).ListExperiments(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/ListExperiments", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).ListExperiments(ctx, req.(*ListExperimentsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ExperimentService_ArchiveExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ArchiveExperimentRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).ArchiveExperiment(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/ArchiveExperiment", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).ArchiveExperiment(ctx, req.(*ArchiveExperimentRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ExperimentService_UnarchiveExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UnarchiveExperimentRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).UnarchiveExperiment(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/UnarchiveExperiment", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).UnarchiveExperiment(ctx, req.(*UnarchiveExperimentRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ExperimentService_DeleteExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeleteExperimentRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ExperimentServiceServer).DeleteExperiment(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/DeleteExperiment", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ExperimentServiceServer).DeleteExperiment(ctx, req.(*DeleteExperimentRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _ExperimentService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "kubeflow.pipelines.backend.api.v2beta1.ExperimentService", - HandlerType: (*ExperimentServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateExperiment", - Handler: _ExperimentService_CreateExperiment_Handler, - }, - { - MethodName: "GetExperiment", - Handler: _ExperimentService_GetExperiment_Handler, - }, - { - MethodName: "ListExperiments", - Handler: _ExperimentService_ListExperiments_Handler, - }, - { - MethodName: "ArchiveExperiment", - Handler: _ExperimentService_ArchiveExperiment_Handler, - }, - { - MethodName: "UnarchiveExperiment", - Handler: _ExperimentService_UnarchiveExperiment_Handler, - }, - { - MethodName: "DeleteExperiment", - Handler: _ExperimentService_DeleteExperiment_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v2beta1/experiment.proto", -} diff --git a/backend/api/v2beta1/go_client/experiment.pb.gw.go b/backend/api/v2beta1/go_client/experiment.pb.gw.go index 6ecf6f7f523..d0cc10a4ccc 100644 --- a/backend/api/v2beta1/go_client/experiment.pb.gw.go +++ b/backend/api/v2beta1/go_client/experiment.pb.gw.go @@ -10,457 +10,374 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_ExperimentService_CreateExperiment_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateExperimentRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Experiment); err != nil && err != io.EOF { + var ( + protoReq CreateExperimentRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Experiment); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.CreateExperiment(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_CreateExperiment_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateExperimentRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Experiment); err != nil && err != io.EOF { + var ( + protoReq CreateExperimentRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Experiment); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreateExperiment(ctx, &protoReq) return msg, metadata, err - } func request_ExperimentService_GetExperiment_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["experiment_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["experiment_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "experiment_id") } - protoReq.ExperimentId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "experiment_id", err) } - msg, err := client.GetExperiment(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_GetExperiment_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["experiment_id"] + val, ok := pathParams["experiment_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "experiment_id") } - protoReq.ExperimentId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "experiment_id", err) } - msg, err := server.GetExperiment(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_ExperimentService_ListExperiments_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) +var filter_ExperimentService_ListExperiments_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} func request_ExperimentService_ListExperiments_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListExperimentsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListExperimentsRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ExperimentService_ListExperiments_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListExperiments(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_ListExperiments_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListExperimentsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListExperimentsRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ExperimentService_ListExperiments_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListExperiments(ctx, &protoReq) return msg, metadata, err - } func request_ExperimentService_ArchiveExperiment_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ArchiveExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ArchiveExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["experiment_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["experiment_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "experiment_id") } - protoReq.ExperimentId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "experiment_id", err) } - msg, err := client.ArchiveExperiment(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_ArchiveExperiment_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ArchiveExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ArchiveExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["experiment_id"] + val, ok := pathParams["experiment_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "experiment_id") } - protoReq.ExperimentId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "experiment_id", err) } - msg, err := server.ArchiveExperiment(ctx, &protoReq) return msg, metadata, err - } func request_ExperimentService_UnarchiveExperiment_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq UnarchiveExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq UnarchiveExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["experiment_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["experiment_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "experiment_id") } - protoReq.ExperimentId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "experiment_id", err) } - msg, err := client.UnarchiveExperiment(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_UnarchiveExperiment_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq UnarchiveExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq UnarchiveExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["experiment_id"] + val, ok := pathParams["experiment_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "experiment_id") } - protoReq.ExperimentId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "experiment_id", err) } - msg, err := server.UnarchiveExperiment(ctx, &protoReq) return msg, metadata, err - } func request_ExperimentService_DeleteExperiment_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["experiment_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["experiment_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "experiment_id") } - protoReq.ExperimentId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "experiment_id", err) } - msg, err := client.DeleteExperiment(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ExperimentService_DeleteExperiment_0(ctx context.Context, marshaler runtime.Marshaler, server ExperimentServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteExperimentRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteExperimentRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["experiment_id"] + val, ok := pathParams["experiment_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "experiment_id") } - protoReq.ExperimentId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "experiment_id", err) } - msg, err := server.DeleteExperiment(ctx, &protoReq) return msg, metadata, err - } // RegisterExperimentServiceHandlerServer registers the http handlers for service ExperimentService to "mux". // UnaryRPC :call ExperimentServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterExperimentServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterExperimentServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ExperimentServiceServer) error { - - mux.Handle("POST", pattern_ExperimentService_CreateExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_CreateExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/CreateExperiment", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_CreateExperiment_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_CreateExperiment_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_CreateExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_CreateExperiment_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_ExperimentService_GetExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ExperimentService_GetExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/GetExperiment", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments/{experiment_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_GetExperiment_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_GetExperiment_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_GetExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_GetExperiment_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_ExperimentService_ListExperiments_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ExperimentService_ListExperiments_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/ListExperiments", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_ListExperiments_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_ListExperiments_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_ListExperiments_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_ListExperiments_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ExperimentService_ArchiveExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_ArchiveExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/ArchiveExperiment", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments/{experiment_id}:archive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_ArchiveExperiment_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_ArchiveExperiment_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_ArchiveExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_ArchiveExperiment_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ExperimentService_UnarchiveExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_UnarchiveExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/UnarchiveExperiment", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments/{experiment_id}:unarchive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_UnarchiveExperiment_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_UnarchiveExperiment_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_UnarchiveExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_UnarchiveExperiment_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_ExperimentService_DeleteExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_ExperimentService_DeleteExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/DeleteExperiment", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments/{experiment_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ExperimentService_DeleteExperiment_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ExperimentService_DeleteExperiment_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_DeleteExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_DeleteExperiment_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -469,25 +386,24 @@ func RegisterExperimentServiceHandlerServer(ctx context.Context, mux *runtime.Se // RegisterExperimentServiceHandlerFromEndpoint is same as RegisterExperimentServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterExperimentServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterExperimentServiceHandler(ctx, mux, conn) } @@ -501,156 +417,127 @@ func RegisterExperimentServiceHandler(ctx context.Context, mux *runtime.ServeMux // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ExperimentServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ExperimentServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "ExperimentServiceClient" to call the correct interceptors. +// "ExperimentServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterExperimentServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ExperimentServiceClient) error { - - mux.Handle("POST", pattern_ExperimentService_CreateExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_CreateExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/CreateExperiment", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_CreateExperiment_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_CreateExperiment_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_CreateExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_CreateExperiment_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_ExperimentService_GetExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ExperimentService_GetExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/GetExperiment", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments/{experiment_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_GetExperiment_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_GetExperiment_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_GetExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_GetExperiment_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_ExperimentService_ListExperiments_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_ExperimentService_ListExperiments_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/ListExperiments", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_ListExperiments_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_ListExperiments_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_ListExperiments_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_ListExperiments_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ExperimentService_ArchiveExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_ArchiveExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/ArchiveExperiment", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments/{experiment_id}:archive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_ArchiveExperiment_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_ArchiveExperiment_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_ArchiveExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_ArchiveExperiment_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ExperimentService_UnarchiveExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ExperimentService_UnarchiveExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/UnarchiveExperiment", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments/{experiment_id}:unarchive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_UnarchiveExperiment_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_UnarchiveExperiment_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_UnarchiveExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_UnarchiveExperiment_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_ExperimentService_DeleteExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_ExperimentService_DeleteExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/DeleteExperiment", runtime.WithHTTPPathPattern("/apis/v2beta1/experiments/{experiment_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ExperimentService_DeleteExperiment_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ExperimentService_DeleteExperiment_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ExperimentService_DeleteExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ExperimentService_DeleteExperiment_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_ExperimentService_CreateExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "experiments"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_ExperimentService_GetExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "experiments", "experiment_id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_ExperimentService_ListExperiments_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "experiments"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_ExperimentService_ArchiveExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "experiments", "experiment_id"}, "archive", runtime.AssumeColonVerbOpt(true))) - - pattern_ExperimentService_UnarchiveExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "experiments", "experiment_id"}, "unarchive", runtime.AssumeColonVerbOpt(true))) - - pattern_ExperimentService_DeleteExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "experiments", "experiment_id"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_ExperimentService_CreateExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "experiments"}, "")) + pattern_ExperimentService_GetExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "experiments", "experiment_id"}, "")) + pattern_ExperimentService_ListExperiments_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "experiments"}, "")) + pattern_ExperimentService_ArchiveExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "experiments", "experiment_id"}, "archive")) + pattern_ExperimentService_UnarchiveExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "experiments", "experiment_id"}, "unarchive")) + pattern_ExperimentService_DeleteExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "experiments", "experiment_id"}, "")) ) var ( - forward_ExperimentService_CreateExperiment_0 = runtime.ForwardResponseMessage - - forward_ExperimentService_GetExperiment_0 = runtime.ForwardResponseMessage - - forward_ExperimentService_ListExperiments_0 = runtime.ForwardResponseMessage - - forward_ExperimentService_ArchiveExperiment_0 = runtime.ForwardResponseMessage - + forward_ExperimentService_CreateExperiment_0 = runtime.ForwardResponseMessage + forward_ExperimentService_GetExperiment_0 = runtime.ForwardResponseMessage + forward_ExperimentService_ListExperiments_0 = runtime.ForwardResponseMessage + forward_ExperimentService_ArchiveExperiment_0 = runtime.ForwardResponseMessage forward_ExperimentService_UnarchiveExperiment_0 = runtime.ForwardResponseMessage - - forward_ExperimentService_DeleteExperiment_0 = runtime.ForwardResponseMessage + forward_ExperimentService_DeleteExperiment_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v2beta1/go_client/experiment_grpc.pb.go b/backend/api/v2beta1/go_client/experiment_grpc.pb.go new file mode 100644 index 00000000000..50db23ecc5b --- /dev/null +++ b/backend/api/v2beta1/go_client/experiment_grpc.pb.go @@ -0,0 +1,344 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/experiment.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + ExperimentService_CreateExperiment_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/CreateExperiment" + ExperimentService_GetExperiment_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/GetExperiment" + ExperimentService_ListExperiments_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/ListExperiments" + ExperimentService_ArchiveExperiment_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/ArchiveExperiment" + ExperimentService_UnarchiveExperiment_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/UnarchiveExperiment" + ExperimentService_DeleteExperiment_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ExperimentService/DeleteExperiment" +) + +// ExperimentServiceClient is the client API for ExperimentService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ExperimentServiceClient interface { + // Creates a new experiment. + CreateExperiment(ctx context.Context, in *CreateExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) + // Finds a specific experiment by ID. + GetExperiment(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) + // Finds all experiments. Supports pagination, and sorting on certain fields. + ListExperiments(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsResponse, error) + // Archives an experiment and the experiment's runs and recurring runs. + ArchiveExperiment(ctx context.Context, in *ArchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Restores an archived experiment. The experiment's archived runs and recurring + // runs will stay archived. + UnarchiveExperiment(ctx context.Context, in *UnarchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Deletes an experiment without deleting the experiment's runs and recurring + // runs. To avoid unexpected behaviors, delete an experiment's runs and recurring + // runs before deleting the experiment. + DeleteExperiment(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type experimentServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewExperimentServiceClient(cc grpc.ClientConnInterface) ExperimentServiceClient { + return &experimentServiceClient{cc} +} + +func (c *experimentServiceClient) CreateExperiment(ctx context.Context, in *CreateExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Experiment) + err := c.cc.Invoke(ctx, ExperimentService_CreateExperiment_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) GetExperiment(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Experiment) + err := c.cc.Invoke(ctx, ExperimentService_GetExperiment_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) ListExperiments(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListExperimentsResponse) + err := c.cc.Invoke(ctx, ExperimentService_ListExperiments_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) ArchiveExperiment(ctx context.Context, in *ArchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ExperimentService_ArchiveExperiment_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) UnarchiveExperiment(ctx context.Context, in *UnarchiveExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ExperimentService_UnarchiveExperiment_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) DeleteExperiment(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ExperimentService_DeleteExperiment_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ExperimentServiceServer is the server API for ExperimentService service. +// All implementations must embed UnimplementedExperimentServiceServer +// for forward compatibility. +type ExperimentServiceServer interface { + // Creates a new experiment. + CreateExperiment(context.Context, *CreateExperimentRequest) (*Experiment, error) + // Finds a specific experiment by ID. + GetExperiment(context.Context, *GetExperimentRequest) (*Experiment, error) + // Finds all experiments. Supports pagination, and sorting on certain fields. + ListExperiments(context.Context, *ListExperimentsRequest) (*ListExperimentsResponse, error) + // Archives an experiment and the experiment's runs and recurring runs. + ArchiveExperiment(context.Context, *ArchiveExperimentRequest) (*emptypb.Empty, error) + // Restores an archived experiment. The experiment's archived runs and recurring + // runs will stay archived. + UnarchiveExperiment(context.Context, *UnarchiveExperimentRequest) (*emptypb.Empty, error) + // Deletes an experiment without deleting the experiment's runs and recurring + // runs. To avoid unexpected behaviors, delete an experiment's runs and recurring + // runs before deleting the experiment. + DeleteExperiment(context.Context, *DeleteExperimentRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedExperimentServiceServer() +} + +// UnimplementedExperimentServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedExperimentServiceServer struct{} + +func (UnimplementedExperimentServiceServer) CreateExperiment(context.Context, *CreateExperimentRequest) (*Experiment, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateExperiment not implemented") +} +func (UnimplementedExperimentServiceServer) GetExperiment(context.Context, *GetExperimentRequest) (*Experiment, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetExperiment not implemented") +} +func (UnimplementedExperimentServiceServer) ListExperiments(context.Context, *ListExperimentsRequest) (*ListExperimentsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListExperiments not implemented") +} +func (UnimplementedExperimentServiceServer) ArchiveExperiment(context.Context, *ArchiveExperimentRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method ArchiveExperiment not implemented") +} +func (UnimplementedExperimentServiceServer) UnarchiveExperiment(context.Context, *UnarchiveExperimentRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method UnarchiveExperiment not implemented") +} +func (UnimplementedExperimentServiceServer) DeleteExperiment(context.Context, *DeleteExperimentRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteExperiment not implemented") +} +func (UnimplementedExperimentServiceServer) mustEmbedUnimplementedExperimentServiceServer() {} +func (UnimplementedExperimentServiceServer) testEmbeddedByValue() {} + +// UnsafeExperimentServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ExperimentServiceServer will +// result in compilation errors. +type UnsafeExperimentServiceServer interface { + mustEmbedUnimplementedExperimentServiceServer() +} + +func RegisterExperimentServiceServer(s grpc.ServiceRegistrar, srv ExperimentServiceServer) { + // If the following call pancis, it indicates UnimplementedExperimentServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&ExperimentService_ServiceDesc, srv) +} + +func _ExperimentService_CreateExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).CreateExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_CreateExperiment_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).CreateExperiment(ctx, req.(*CreateExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_GetExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).GetExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_GetExperiment_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).GetExperiment(ctx, req.(*GetExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_ListExperiments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListExperimentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).ListExperiments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_ListExperiments_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).ListExperiments(ctx, req.(*ListExperimentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_ArchiveExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ArchiveExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).ArchiveExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_ArchiveExperiment_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).ArchiveExperiment(ctx, req.(*ArchiveExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_UnarchiveExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UnarchiveExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).UnarchiveExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_UnarchiveExperiment_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).UnarchiveExperiment(ctx, req.(*UnarchiveExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_DeleteExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).DeleteExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ExperimentService_DeleteExperiment_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).DeleteExperiment(ctx, req.(*DeleteExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// ExperimentService_ServiceDesc is the grpc.ServiceDesc for ExperimentService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ExperimentService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.ExperimentService", + HandlerType: (*ExperimentServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateExperiment", + Handler: _ExperimentService_CreateExperiment_Handler, + }, + { + MethodName: "GetExperiment", + Handler: _ExperimentService_GetExperiment_Handler, + }, + { + MethodName: "ListExperiments", + Handler: _ExperimentService_ListExperiments_Handler, + }, + { + MethodName: "ArchiveExperiment", + Handler: _ExperimentService_ArchiveExperiment_Handler, + }, + { + MethodName: "UnarchiveExperiment", + Handler: _ExperimentService_UnarchiveExperiment_Handler, + }, + { + MethodName: "DeleteExperiment", + Handler: _ExperimentService_DeleteExperiment_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/experiment.proto", +} diff --git a/backend/api/v2beta1/go_client/filter.pb.go b/backend/api/v2beta1/go_client/filter.pb.go index 4b41438cb44..13c961a7cd5 100644 --- a/backend/api/v2beta1/go_client/filter.pb.go +++ b/backend/api/v2beta1/go_client/filter.pb.go @@ -14,22 +14,19 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/filter.proto package go_client import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -162,21 +159,18 @@ func (Predicate_Operation) EnumDescriptor() ([]byte, []int) { // } // } type Filter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // All predicates are AND-ed when this filter is applied. - Predicates []*Predicate `protobuf:"bytes,1,rep,name=predicates,proto3" json:"predicates,omitempty"` + Predicates []*Predicate `protobuf:"bytes,1,rep,name=predicates,proto3" json:"predicates,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Filter) Reset() { *x = Filter{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_filter_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_filter_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Filter) String() string { @@ -187,7 +181,7 @@ func (*Filter) ProtoMessage() {} func (x *Filter) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_filter_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -212,16 +206,13 @@ func (x *Filter) GetPredicates() []*Predicate { // Predicate captures individual conditions that must be true for a resource // being filtered. type Predicate struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Operation Predicate_Operation `protobuf:"varint,1,opt,name=operation,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.Predicate_Operation" json:"operation,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + Operation Predicate_Operation `protobuf:"varint,1,opt,name=operation,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.Predicate_Operation" json:"operation,omitempty"` // Key for the operation (first argument). Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key,omitempty"` // Value for the operation (second argument). // - // Types that are assignable to Value: + // Types that are valid to be assigned to Value: // // *Predicate_IntValue // *Predicate_LongValue @@ -230,16 +221,16 @@ type Predicate struct { // *Predicate_IntValues_ // *Predicate_LongValues_ // *Predicate_StringValues_ - Value isPredicate_Value `protobuf_oneof:"value"` + Value isPredicate_Value `protobuf_oneof:"value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Predicate) Reset() { *x = Predicate{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_filter_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_filter_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Predicate) String() string { @@ -250,7 +241,7 @@ func (*Predicate) ProtoMessage() {} func (x *Predicate) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_filter_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -279,58 +270,72 @@ func (x *Predicate) GetKey() string { return "" } -func (m *Predicate) GetValue() isPredicate_Value { - if m != nil { - return m.Value +func (x *Predicate) GetValue() isPredicate_Value { + if x != nil { + return x.Value } return nil } func (x *Predicate) GetIntValue() int32 { - if x, ok := x.GetValue().(*Predicate_IntValue); ok { - return x.IntValue + if x != nil { + if x, ok := x.Value.(*Predicate_IntValue); ok { + return x.IntValue + } } return 0 } func (x *Predicate) GetLongValue() int64 { - if x, ok := x.GetValue().(*Predicate_LongValue); ok { - return x.LongValue + if x != nil { + if x, ok := x.Value.(*Predicate_LongValue); ok { + return x.LongValue + } } return 0 } func (x *Predicate) GetStringValue() string { - if x, ok := x.GetValue().(*Predicate_StringValue); ok { - return x.StringValue + if x != nil { + if x, ok := x.Value.(*Predicate_StringValue); ok { + return x.StringValue + } } return "" } func (x *Predicate) GetTimestampValue() *timestamppb.Timestamp { - if x, ok := x.GetValue().(*Predicate_TimestampValue); ok { - return x.TimestampValue + if x != nil { + if x, ok := x.Value.(*Predicate_TimestampValue); ok { + return x.TimestampValue + } } return nil } func (x *Predicate) GetIntValues() *Predicate_IntValues { - if x, ok := x.GetValue().(*Predicate_IntValues_); ok { - return x.IntValues + if x != nil { + if x, ok := x.Value.(*Predicate_IntValues_); ok { + return x.IntValues + } } return nil } func (x *Predicate) GetLongValues() *Predicate_LongValues { - if x, ok := x.GetValue().(*Predicate_LongValues_); ok { - return x.LongValues + if x != nil { + if x, ok := x.Value.(*Predicate_LongValues_); ok { + return x.LongValues + } } return nil } func (x *Predicate) GetStringValues() *Predicate_StringValues { - if x, ok := x.GetValue().(*Predicate_StringValues_); ok { - return x.StringValues + if x != nil { + if x, ok := x.Value.(*Predicate_StringValues_); ok { + return x.StringValues + } } return nil } @@ -391,20 +396,17 @@ func (*Predicate_StringValues_) isPredicate_Value() {} // List of integers. type Predicate_IntValues struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Values []int32 `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` unknownFields protoimpl.UnknownFields - - Values []int32 `protobuf:"varint,1,rep,packed,name=values,proto3" json:"values,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Predicate_IntValues) Reset() { *x = Predicate_IntValues{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_filter_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_filter_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Predicate_IntValues) String() string { @@ -415,7 +417,7 @@ func (*Predicate_IntValues) ProtoMessage() {} func (x *Predicate_IntValues) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_filter_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -439,20 +441,17 @@ func (x *Predicate_IntValues) GetValues() []int32 { // List of strings. type Predicate_StringValues struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Values []string `protobuf:"bytes,2,rep,name=values,proto3" json:"values,omitempty"` unknownFields protoimpl.UnknownFields - - Values []string `protobuf:"bytes,2,rep,name=values,proto3" json:"values,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Predicate_StringValues) Reset() { *x = Predicate_StringValues{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_filter_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_filter_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Predicate_StringValues) String() string { @@ -463,7 +462,7 @@ func (*Predicate_StringValues) ProtoMessage() {} func (x *Predicate_StringValues) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_filter_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -487,20 +486,17 @@ func (x *Predicate_StringValues) GetValues() []string { // List of long integers. type Predicate_LongValues struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Values []int64 `protobuf:"varint,3,rep,packed,name=values,proto3" json:"values,omitempty"` unknownFields protoimpl.UnknownFields - - Values []int64 `protobuf:"varint,3,rep,packed,name=values,proto3" json:"values,omitempty"` + sizeCache protoimpl.SizeCache } func (x *Predicate_LongValues) Reset() { *x = Predicate_LongValues{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_filter_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_filter_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Predicate_LongValues) String() string { @@ -511,7 +507,7 @@ func (*Predicate_LongValues) ProtoMessage() {} func (x *Predicate_LongValues) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_filter_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -535,105 +531,64 @@ func (x *Predicate_LongValues) GetValues() []int64 { var File_backend_api_v2beta1_filter_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_filter_proto_rawDesc = []byte{ - 0x0a, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x12, 0x26, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5b, 0x0a, 0x06, 0x46, - 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x51, 0x0a, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x61, - 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x6b, 0x75, 0x62, 0x65, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, - 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x2e, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x65, 0x52, 0x0a, 0x70, 0x72, - 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x65, 0x73, 0x22, 0xf5, 0x06, 0x0a, 0x09, 0x50, 0x72, 0x65, - 0x64, 0x69, 0x63, 0x61, 0x74, 0x65, 0x12, 0x59, 0x0a, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3b, 0x2e, 0x6b, 0x75, 0x62, 0x65, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, - 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x2e, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x65, 0x2e, 0x4f, 0x70, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x12, 0x1f, 0x0a, 0x0a, 0x6c, 0x6f, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x09, 0x6c, 0x6f, 0x6e, 0x67, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x45, 0x0a, 0x0f, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x00, 0x52, - 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, - 0x5c, 0x0a, 0x0a, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x07, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x3b, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x65, - 0x64, 0x69, 0x63, 0x61, 0x74, 0x65, 0x2e, 0x49, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x48, 0x00, 0x52, 0x09, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x5f, 0x0a, - 0x0b, 0x6c, 0x6f, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x08, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x72, 0x65, 0x64, - 0x69, 0x63, 0x61, 0x74, 0x65, 0x2e, 0x4c, 0x6f, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x48, 0x00, 0x52, 0x0a, 0x6c, 0x6f, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x65, - 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, - 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, - 0x72, 0x65, 0x64, 0x69, 0x63, 0x61, 0x74, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0x23, 0x0a, 0x09, 0x49, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x05, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0x26, 0x0a, 0x0c, 0x53, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x1a, 0x24, 0x0a, 0x0a, 0x4c, 0x6f, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x03, - 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0xac, 0x01, 0x0a, 0x09, 0x4f, 0x70, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x19, 0x0a, 0x15, 0x4f, 0x50, 0x45, 0x52, 0x41, 0x54, - 0x49, 0x4f, 0x4e, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, - 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x53, 0x10, 0x01, 0x12, 0x0e, 0x0a, - 0x0a, 0x4e, 0x4f, 0x54, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x53, 0x10, 0x02, 0x12, 0x10, 0x0a, - 0x0c, 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x03, 0x12, - 0x17, 0x0a, 0x13, 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x5f, - 0x45, 0x51, 0x55, 0x41, 0x4c, 0x53, 0x10, 0x05, 0x12, 0x0d, 0x0a, 0x09, 0x4c, 0x45, 0x53, 0x53, - 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x06, 0x12, 0x14, 0x0a, 0x10, 0x4c, 0x45, 0x53, 0x53, 0x5f, - 0x54, 0x48, 0x41, 0x4e, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x53, 0x10, 0x07, 0x12, 0x06, 0x0a, - 0x02, 0x49, 0x4e, 0x10, 0x08, 0x12, 0x10, 0x0a, 0x0c, 0x49, 0x53, 0x5f, 0x53, 0x55, 0x42, 0x53, - 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x09, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x32, 0x83, 0x01, 0x0a, 0x12, 0x44, 0x75, 0x6d, 0x6d, 0x79, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, - 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x6d, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x46, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x12, 0x2e, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x46, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x1a, 0x2e, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x46, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x22, 0x00, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, - 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, - 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v2beta1_filter_proto_rawDesc = "" + + "\n" + + " backend/api/v2beta1/filter.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1fgoogle/protobuf/timestamp.proto\"[\n" + + "\x06Filter\x12Q\n" + + "\n" + + "predicates\x18\x01 \x03(\v21.kubeflow.pipelines.backend.api.v2beta1.PredicateR\n" + + "predicates\"\xf5\x06\n" + + "\tPredicate\x12Y\n" + + "\toperation\x18\x01 \x01(\x0e2;.kubeflow.pipelines.backend.api.v2beta1.Predicate.OperationR\toperation\x12\x10\n" + + "\x03key\x18\x02 \x01(\tR\x03key\x12\x1d\n" + + "\tint_value\x18\x03 \x01(\x05H\x00R\bintValue\x12\x1f\n" + + "\n" + + "long_value\x18\x04 \x01(\x03H\x00R\tlongValue\x12#\n" + + "\fstring_value\x18\x05 \x01(\tH\x00R\vstringValue\x12E\n" + + "\x0ftimestamp_value\x18\x06 \x01(\v2\x1a.google.protobuf.TimestampH\x00R\x0etimestampValue\x12\\\n" + + "\n" + + "int_values\x18\a \x01(\v2;.kubeflow.pipelines.backend.api.v2beta1.Predicate.IntValuesH\x00R\tintValues\x12_\n" + + "\vlong_values\x18\b \x01(\v2<.kubeflow.pipelines.backend.api.v2beta1.Predicate.LongValuesH\x00R\n" + + "longValues\x12e\n" + + "\rstring_values\x18\t \x01(\v2>.kubeflow.pipelines.backend.api.v2beta1.Predicate.StringValuesH\x00R\fstringValues\x1a#\n" + + "\tIntValues\x12\x16\n" + + "\x06values\x18\x01 \x03(\x05R\x06values\x1a&\n" + + "\fStringValues\x12\x16\n" + + "\x06values\x18\x02 \x03(\tR\x06values\x1a$\n" + + "\n" + + "LongValues\x12\x16\n" + + "\x06values\x18\x03 \x03(\x03R\x06values\"\xac\x01\n" + + "\tOperation\x12\x19\n" + + "\x15OPERATION_UNSPECIFIED\x10\x00\x12\n" + + "\n" + + "\x06EQUALS\x10\x01\x12\x0e\n" + + "\n" + + "NOT_EQUALS\x10\x02\x12\x10\n" + + "\fGREATER_THAN\x10\x03\x12\x17\n" + + "\x13GREATER_THAN_EQUALS\x10\x05\x12\r\n" + + "\tLESS_THAN\x10\x06\x12\x14\n" + + "\x10LESS_THAN_EQUALS\x10\a\x12\x06\n" + + "\x02IN\x10\b\x12\x10\n" + + "\fIS_SUBSTRING\x10\tB\a\n" + + "\x05value2\x83\x01\n" + + "\x12DummyFilterService\x12m\n" + + "\tGetFilter\x12..kubeflow.pipelines.backend.api.v2beta1.Filter\x1a..kubeflow.pipelines.backend.api.v2beta1.Filter\"\x00B=Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_filter_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_filter_proto_rawDescData = file_backend_api_v2beta1_filter_proto_rawDesc + file_backend_api_v2beta1_filter_proto_rawDescData []byte ) func file_backend_api_v2beta1_filter_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_filter_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_filter_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_filter_proto_rawDescData) + file_backend_api_v2beta1_filter_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_filter_proto_rawDesc), len(file_backend_api_v2beta1_filter_proto_rawDesc))) }) return file_backend_api_v2beta1_filter_proto_rawDescData } var file_backend_api_v2beta1_filter_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_backend_api_v2beta1_filter_proto_msgTypes = make([]protoimpl.MessageInfo, 5) -var file_backend_api_v2beta1_filter_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_filter_proto_goTypes = []any{ (Predicate_Operation)(0), // 0: kubeflow.pipelines.backend.api.v2beta1.Predicate.Operation (*Filter)(nil), // 1: kubeflow.pipelines.backend.api.v2beta1.Filter (*Predicate)(nil), // 2: kubeflow.pipelines.backend.api.v2beta1.Predicate @@ -663,69 +618,7 @@ func file_backend_api_v2beta1_filter_proto_init() { if File_backend_api_v2beta1_filter_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_filter_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Filter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_filter_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Predicate); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_filter_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Predicate_IntValues); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_filter_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Predicate_StringValues); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_filter_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Predicate_LongValues); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_backend_api_v2beta1_filter_proto_msgTypes[1].OneofWrappers = []interface{}{ + file_backend_api_v2beta1_filter_proto_msgTypes[1].OneofWrappers = []any{ (*Predicate_IntValue)(nil), (*Predicate_LongValue)(nil), (*Predicate_StringValue)(nil), @@ -738,7 +631,7 @@ func file_backend_api_v2beta1_filter_proto_init() { out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_filter_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_filter_proto_rawDesc), len(file_backend_api_v2beta1_filter_proto_rawDesc)), NumEnums: 1, NumMessages: 5, NumExtensions: 0, @@ -750,87 +643,6 @@ func file_backend_api_v2beta1_filter_proto_init() { MessageInfos: file_backend_api_v2beta1_filter_proto_msgTypes, }.Build() File_backend_api_v2beta1_filter_proto = out.File - file_backend_api_v2beta1_filter_proto_rawDesc = nil file_backend_api_v2beta1_filter_proto_goTypes = nil file_backend_api_v2beta1_filter_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// DummyFilterServiceClient is the client API for DummyFilterService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type DummyFilterServiceClient interface { - GetFilter(ctx context.Context, in *Filter, opts ...grpc.CallOption) (*Filter, error) -} - -type dummyFilterServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewDummyFilterServiceClient(cc grpc.ClientConnInterface) DummyFilterServiceClient { - return &dummyFilterServiceClient{cc} -} - -func (c *dummyFilterServiceClient) GetFilter(ctx context.Context, in *Filter, opts ...grpc.CallOption) (*Filter, error) { - out := new(Filter) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.DummyFilterService/GetFilter", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// DummyFilterServiceServer is the server API for DummyFilterService service. -type DummyFilterServiceServer interface { - GetFilter(context.Context, *Filter) (*Filter, error) -} - -// UnimplementedDummyFilterServiceServer can be embedded to have forward compatible implementations. -type UnimplementedDummyFilterServiceServer struct { -} - -func (*UnimplementedDummyFilterServiceServer) GetFilter(context.Context, *Filter) (*Filter, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetFilter not implemented") -} - -func RegisterDummyFilterServiceServer(s *grpc.Server, srv DummyFilterServiceServer) { - s.RegisterService(&_DummyFilterService_serviceDesc, srv) -} - -func _DummyFilterService_GetFilter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(Filter) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(DummyFilterServiceServer).GetFilter(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.DummyFilterService/GetFilter", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(DummyFilterServiceServer).GetFilter(ctx, req.(*Filter)) - } - return interceptor(ctx, in, info, handler) -} - -var _DummyFilterService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "kubeflow.pipelines.backend.api.v2beta1.DummyFilterService", - HandlerType: (*DummyFilterServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "GetFilter", - Handler: _DummyFilterService_GetFilter_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v2beta1/filter.proto", -} diff --git a/backend/api/v2beta1/go_client/filter_grpc.pb.go b/backend/api/v2beta1/go_client/filter_grpc.pb.go new file mode 100644 index 00000000000..eff1cb11e4c --- /dev/null +++ b/backend/api/v2beta1/go_client/filter_grpc.pb.go @@ -0,0 +1,145 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/filter.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + DummyFilterService_GetFilter_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.DummyFilterService/GetFilter" +) + +// DummyFilterServiceClient is the client API for DummyFilterService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +// +// This dummy service is required so that grpc-gateway will generate Swagger +// definitions for the Filter message. Otherwise, it does not get generated +// since Filter itself is not used in any of the RPC calls - only a serialized +// encoded version of it is used. +type DummyFilterServiceClient interface { + GetFilter(ctx context.Context, in *Filter, opts ...grpc.CallOption) (*Filter, error) +} + +type dummyFilterServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewDummyFilterServiceClient(cc grpc.ClientConnInterface) DummyFilterServiceClient { + return &dummyFilterServiceClient{cc} +} + +func (c *dummyFilterServiceClient) GetFilter(ctx context.Context, in *Filter, opts ...grpc.CallOption) (*Filter, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Filter) + err := c.cc.Invoke(ctx, DummyFilterService_GetFilter_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DummyFilterServiceServer is the server API for DummyFilterService service. +// All implementations must embed UnimplementedDummyFilterServiceServer +// for forward compatibility. +// +// This dummy service is required so that grpc-gateway will generate Swagger +// definitions for the Filter message. Otherwise, it does not get generated +// since Filter itself is not used in any of the RPC calls - only a serialized +// encoded version of it is used. +type DummyFilterServiceServer interface { + GetFilter(context.Context, *Filter) (*Filter, error) + mustEmbedUnimplementedDummyFilterServiceServer() +} + +// UnimplementedDummyFilterServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedDummyFilterServiceServer struct{} + +func (UnimplementedDummyFilterServiceServer) GetFilter(context.Context, *Filter) (*Filter, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetFilter not implemented") +} +func (UnimplementedDummyFilterServiceServer) mustEmbedUnimplementedDummyFilterServiceServer() {} +func (UnimplementedDummyFilterServiceServer) testEmbeddedByValue() {} + +// UnsafeDummyFilterServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to DummyFilterServiceServer will +// result in compilation errors. +type UnsafeDummyFilterServiceServer interface { + mustEmbedUnimplementedDummyFilterServiceServer() +} + +func RegisterDummyFilterServiceServer(s grpc.ServiceRegistrar, srv DummyFilterServiceServer) { + // If the following call pancis, it indicates UnimplementedDummyFilterServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&DummyFilterService_ServiceDesc, srv) +} + +func _DummyFilterService_GetFilter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Filter) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DummyFilterServiceServer).GetFilter(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DummyFilterService_GetFilter_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DummyFilterServiceServer).GetFilter(ctx, req.(*Filter)) + } + return interceptor(ctx, in, info, handler) +} + +// DummyFilterService_ServiceDesc is the grpc.ServiceDesc for DummyFilterService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var DummyFilterService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.DummyFilterService", + HandlerType: (*DummyFilterServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFilter", + Handler: _DummyFilterService_GetFilter_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/filter.proto", +} diff --git a/backend/api/v2beta1/go_client/healthz.pb.go b/backend/api/v2beta1/go_client/healthz.pb.go index 28de6fb33c7..3c10aecbe82 100644 --- a/backend/api/v2beta1/go_client/healthz.pb.go +++ b/backend/api/v2beta1/go_client/healthz.pb.go @@ -14,25 +14,21 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/healthz.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - _ "google.golang.org/genproto/googleapis/rpc/status" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -43,10 +39,7 @@ const ( ) type GetHealthzResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // TODO(gkcalat): redesign this service to return status // and move server configuration into a separate service // TODO(gkcalat): rename or deprecate v1beta1 HealthzService @@ -55,15 +48,15 @@ type GetHealthzResponse struct { MultiUser bool `protobuf:"varint,3,opt,name=multi_user,json=multiUser,proto3" json:"multi_user,omitempty"` // Returns the pipeline storage type (database or kubernetes) PipelineStore string `protobuf:"bytes,4,opt,name=pipeline_store,json=pipelineStore,proto3" json:"pipeline_store,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetHealthzResponse) Reset() { *x = GetHealthzResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_healthz_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_healthz_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetHealthzResponse) String() string { @@ -74,7 +67,7 @@ func (*GetHealthzResponse) ProtoMessage() {} func (x *GetHealthzResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_healthz_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -105,61 +98,38 @@ func (x *GetHealthzResponse) GetPipelineStore() string { var File_backend_api_v2beta1_healthz_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_healthz_proto_rawDesc = []byte{ - 0x0a, 0x21, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x7a, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x12, 0x26, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, - 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x72, 0x70, 0x63, - 0x2f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5a, 0x0a, - 0x12, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x7a, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x5f, 0x75, 0x73, 0x65, - 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x55, 0x73, - 0x65, 0x72, 0x12, 0x25, 0x0a, 0x0e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x32, 0x91, 0x01, 0x0a, 0x0e, 0x48, 0x65, - 0x61, 0x6c, 0x74, 0x68, 0x7a, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x7f, 0x0a, 0x0a, - 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x7a, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, - 0x74, 0x79, 0x1a, 0x3a, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x48, - 0x65, 0x61, 0x6c, 0x74, 0x68, 0x7a, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1d, - 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x17, 0x12, 0x15, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x7a, 0x42, 0x98, 0x01, - 0x92, 0x41, 0x58, 0x2a, 0x02, 0x01, 0x02, 0x52, 0x23, 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x12, 0x18, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5a, 0x1f, 0x0a, 0x1d, - 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x13, 0x08, 0x02, 0x1a, 0x0d, 0x61, 0x75, - 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x0c, 0x0a, - 0x0a, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x00, 0x5a, 0x3b, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, - 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v2beta1_healthz_proto_rawDesc = "" + + "\n" + + "!backend/api/v2beta1/healthz.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"Z\n" + + "\x12GetHealthzResponse\x12\x1d\n" + + "\n" + + "multi_user\x18\x03 \x01(\bR\tmultiUser\x12%\n" + + "\x0epipeline_store\x18\x04 \x01(\tR\rpipelineStore2\x91\x01\n" + + "\x0eHealthzService\x12\x7f\n" + + "\n" + + "GetHealthz\x12\x16.google.protobuf.Empty\x1a:.kubeflow.pipelines.backend.api.v2beta1.GetHealthzResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/apis/v2beta1/healthzB\x98\x01\x92AX*\x02\x01\x02R#\n" + + "\adefault\x12\x18\x12\x16\n" + + "\x14\x1a\x12.google.rpc.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_healthz_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_healthz_proto_rawDescData = file_backend_api_v2beta1_healthz_proto_rawDesc + file_backend_api_v2beta1_healthz_proto_rawDescData []byte ) func file_backend_api_v2beta1_healthz_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_healthz_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_healthz_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_healthz_proto_rawDescData) + file_backend_api_v2beta1_healthz_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_healthz_proto_rawDesc), len(file_backend_api_v2beta1_healthz_proto_rawDesc))) }) return file_backend_api_v2beta1_healthz_proto_rawDescData } var file_backend_api_v2beta1_healthz_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_backend_api_v2beta1_healthz_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_healthz_proto_goTypes = []any{ (*GetHealthzResponse)(nil), // 0: kubeflow.pipelines.backend.api.v2beta1.GetHealthzResponse (*emptypb.Empty)(nil), // 1: google.protobuf.Empty } @@ -178,25 +148,11 @@ func file_backend_api_v2beta1_healthz_proto_init() { if File_backend_api_v2beta1_healthz_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_healthz_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetHealthzResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_healthz_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_healthz_proto_rawDesc), len(file_backend_api_v2beta1_healthz_proto_rawDesc)), NumEnums: 0, NumMessages: 1, NumExtensions: 0, @@ -207,89 +163,6 @@ func file_backend_api_v2beta1_healthz_proto_init() { MessageInfos: file_backend_api_v2beta1_healthz_proto_msgTypes, }.Build() File_backend_api_v2beta1_healthz_proto = out.File - file_backend_api_v2beta1_healthz_proto_rawDesc = nil file_backend_api_v2beta1_healthz_proto_goTypes = nil file_backend_api_v2beta1_healthz_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// HealthzServiceClient is the client API for HealthzService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type HealthzServiceClient interface { - // Get healthz data. - GetHealthz(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetHealthzResponse, error) -} - -type healthzServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewHealthzServiceClient(cc grpc.ClientConnInterface) HealthzServiceClient { - return &healthzServiceClient{cc} -} - -func (c *healthzServiceClient) GetHealthz(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetHealthzResponse, error) { - out := new(GetHealthzResponse) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.HealthzService/GetHealthz", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// HealthzServiceServer is the server API for HealthzService service. -type HealthzServiceServer interface { - // Get healthz data. - GetHealthz(context.Context, *emptypb.Empty) (*GetHealthzResponse, error) -} - -// UnimplementedHealthzServiceServer can be embedded to have forward compatible implementations. -type UnimplementedHealthzServiceServer struct { -} - -func (*UnimplementedHealthzServiceServer) GetHealthz(context.Context, *emptypb.Empty) (*GetHealthzResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetHealthz not implemented") -} - -func RegisterHealthzServiceServer(s *grpc.Server, srv HealthzServiceServer) { - s.RegisterService(&_HealthzService_serviceDesc, srv) -} - -func _HealthzService_GetHealthz_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(emptypb.Empty) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(HealthzServiceServer).GetHealthz(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.HealthzService/GetHealthz", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(HealthzServiceServer).GetHealthz(ctx, req.(*emptypb.Empty)) - } - return interceptor(ctx, in, info, handler) -} - -var _HealthzService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "kubeflow.pipelines.backend.api.v2beta1.HealthzService", - HandlerType: (*HealthzServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "GetHealthz", - Handler: _HealthzService_GetHealthz_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v2beta1/healthz.proto", -} diff --git a/backend/api/v2beta1/go_client/healthz.pb.gw.go b/backend/api/v2beta1/go_client/healthz.pb.gw.go index ffc2f3cc7d3..5d5b6262a73 100644 --- a/backend/api/v2beta1/go_client/healthz.pb.gw.go +++ b/backend/api/v2beta1/go_client/healthz.pb.gw.go @@ -10,75 +10,78 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/emptypb" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_HealthzService_GetHealthz_0(ctx context.Context, marshaler runtime.Marshaler, client HealthzServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq emptypb.Empty - var metadata runtime.ServerMetadata - + var ( + protoReq emptypb.Empty + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.GetHealthz(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_HealthzService_GetHealthz_0(ctx context.Context, marshaler runtime.Marshaler, server HealthzServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq emptypb.Empty - var metadata runtime.ServerMetadata - + var ( + protoReq emptypb.Empty + metadata runtime.ServerMetadata + ) msg, err := server.GetHealthz(ctx, &protoReq) return msg, metadata, err - } // RegisterHealthzServiceHandlerServer registers the http handlers for service HealthzService to "mux". // UnaryRPC :call HealthzServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterHealthzServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterHealthzServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server HealthzServiceServer) error { - - mux.Handle("GET", pattern_HealthzService_GetHealthz_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_HealthzService_GetHealthz_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.HealthzService/GetHealthz", runtime.WithHTTPPathPattern("/apis/v2beta1/healthz")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_HealthzService_GetHealthz_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_HealthzService_GetHealthz_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_HealthzService_GetHealthz_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_HealthzService_GetHealthz_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -87,25 +90,24 @@ func RegisterHealthzServiceHandlerServer(ctx context.Context, mux *runtime.Serve // RegisterHealthzServiceHandlerFromEndpoint is same as RegisterHealthzServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterHealthzServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterHealthzServiceHandler(ctx, mux, conn) } @@ -119,34 +121,30 @@ func RegisterHealthzServiceHandler(ctx context.Context, mux *runtime.ServeMux, c // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "HealthzServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "HealthzServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "HealthzServiceClient" to call the correct interceptors. +// "HealthzServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterHealthzServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client HealthzServiceClient) error { - - mux.Handle("GET", pattern_HealthzService_GetHealthz_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_HealthzService_GetHealthz_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.HealthzService/GetHealthz", runtime.WithHTTPPathPattern("/apis/v2beta1/healthz")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_HealthzService_GetHealthz_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_HealthzService_GetHealthz_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_HealthzService_GetHealthz_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_HealthzService_GetHealthz_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_HealthzService_GetHealthz_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "healthz"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_HealthzService_GetHealthz_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "healthz"}, "")) ) var ( diff --git a/backend/api/v2beta1/go_client/healthz_grpc.pb.go b/backend/api/v2beta1/go_client/healthz_grpc.pb.go new file mode 100644 index 00000000000..1ed679978eb --- /dev/null +++ b/backend/api/v2beta1/go_client/healthz_grpc.pb.go @@ -0,0 +1,138 @@ +// Copyright 2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/healthz.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + HealthzService_GetHealthz_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.HealthzService/GetHealthz" +) + +// HealthzServiceClient is the client API for HealthzService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type HealthzServiceClient interface { + // Get healthz data. + GetHealthz(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetHealthzResponse, error) +} + +type healthzServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewHealthzServiceClient(cc grpc.ClientConnInterface) HealthzServiceClient { + return &healthzServiceClient{cc} +} + +func (c *healthzServiceClient) GetHealthz(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetHealthzResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetHealthzResponse) + err := c.cc.Invoke(ctx, HealthzService_GetHealthz_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// HealthzServiceServer is the server API for HealthzService service. +// All implementations must embed UnimplementedHealthzServiceServer +// for forward compatibility. +type HealthzServiceServer interface { + // Get healthz data. + GetHealthz(context.Context, *emptypb.Empty) (*GetHealthzResponse, error) + mustEmbedUnimplementedHealthzServiceServer() +} + +// UnimplementedHealthzServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedHealthzServiceServer struct{} + +func (UnimplementedHealthzServiceServer) GetHealthz(context.Context, *emptypb.Empty) (*GetHealthzResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetHealthz not implemented") +} +func (UnimplementedHealthzServiceServer) mustEmbedUnimplementedHealthzServiceServer() {} +func (UnimplementedHealthzServiceServer) testEmbeddedByValue() {} + +// UnsafeHealthzServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to HealthzServiceServer will +// result in compilation errors. +type UnsafeHealthzServiceServer interface { + mustEmbedUnimplementedHealthzServiceServer() +} + +func RegisterHealthzServiceServer(s grpc.ServiceRegistrar, srv HealthzServiceServer) { + // If the following call pancis, it indicates UnimplementedHealthzServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&HealthzService_ServiceDesc, srv) +} + +func _HealthzService_GetHealthz_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(emptypb.Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(HealthzServiceServer).GetHealthz(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: HealthzService_GetHealthz_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(HealthzServiceServer).GetHealthz(ctx, req.(*emptypb.Empty)) + } + return interceptor(ctx, in, info, handler) +} + +// HealthzService_ServiceDesc is the grpc.ServiceDesc for HealthzService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var HealthzService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.HealthzService", + HandlerType: (*HealthzServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetHealthz", + Handler: _HealthzService_GetHealthz_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/healthz.proto", +} diff --git a/backend/api/v2beta1/go_client/pipeline.pb.go b/backend/api/v2beta1/go_client/pipeline.pb.go index 79dd1495dce..4093dea29e7 100644 --- a/backend/api/v2beta1/go_client/pipeline.pb.go +++ b/backend/api/v2beta1/go_client/pipeline.pb.go @@ -14,20 +14,16 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/pipeline.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" status "google.golang.org/genproto/googleapis/rpc/status" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status1 "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" @@ -35,6 +31,7 @@ import ( timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -45,10 +42,7 @@ const ( ) type Pipeline struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. Unique pipeline ID. Generated by API server. PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` // Required if name is not provided. Pipeline display name provided by user. @@ -66,16 +60,16 @@ type Pipeline struct { // In case any error happens retrieving a pipeline field, only pipeline ID, // and the error message is returned. Client has the flexibility of choosing // how to handle the error. This is especially useful during listing call. - Error *status.Status `protobuf:"bytes,6,opt,name=error,proto3" json:"error,omitempty"` + Error *status.Status `protobuf:"bytes,6,opt,name=error,proto3" json:"error,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Pipeline) Reset() { *x = Pipeline{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Pipeline) String() string { @@ -86,7 +80,7 @@ func (*Pipeline) ProtoMessage() {} func (x *Pipeline) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -151,10 +145,7 @@ func (x *Pipeline) GetError() *status.Status { } type PipelineVersion struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required input field. Unique ID of the parent pipeline. // This is ignored in CreatePipelineAndVersion API. PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` @@ -186,16 +177,16 @@ type PipelineVersion struct { // pipeline ID, pipeline version ID, and the error message are returned. // Client has the flexibility of choosing how to handle the error. // This is especially useful during List() calls. - Error *status.Status `protobuf:"bytes,8,opt,name=error,proto3" json:"error,omitempty"` + Error *status.Status `protobuf:"bytes,8,opt,name=error,proto3" json:"error,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineVersion) Reset() { *x = PipelineVersion{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineVersion) String() string { @@ -206,7 +197,7 @@ func (*PipelineVersion) ProtoMessage() {} func (x *PipelineVersion) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -292,21 +283,18 @@ func (x *PipelineVersion) GetError() *status.Status { } type Url struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // URL of the pipeline version definition. - PipelineUrl string `protobuf:"bytes,1,opt,name=pipeline_url,json=pipelineUrl,proto3" json:"pipeline_url,omitempty"` + PipelineUrl string `protobuf:"bytes,1,opt,name=pipeline_url,json=pipelineUrl,proto3" json:"pipeline_url,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Url) Reset() { *x = Url{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Url) String() string { @@ -317,7 +305,7 @@ func (*Url) ProtoMessage() {} func (x *Url) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -340,21 +328,18 @@ func (x *Url) GetPipelineUrl() string { } type CreatePipelineRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required input. Pipeline that needs to be created. - Pipeline *Pipeline `protobuf:"bytes,1,opt,name=pipeline,proto3" json:"pipeline,omitempty"` + Pipeline *Pipeline `protobuf:"bytes,1,opt,name=pipeline,proto3" json:"pipeline,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CreatePipelineRequest) Reset() { *x = CreatePipelineRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreatePipelineRequest) String() string { @@ -365,7 +350,7 @@ func (*CreatePipelineRequest) ProtoMessage() {} func (x *CreatePipelineRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -388,21 +373,18 @@ func (x *CreatePipelineRequest) GetPipeline() *Pipeline { } type GetPipelineRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required input. The ID of the pipeline to be retrieved. - PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` + PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetPipelineRequest) Reset() { *x = GetPipelineRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetPipelineRequest) String() string { @@ -413,7 +395,7 @@ func (*GetPipelineRequest) ProtoMessage() {} func (x *GetPipelineRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -436,10 +418,7 @@ func (x *GetPipelineRequest) GetPipelineId() string { } type ListPipelinesRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Optional input. Namespace for the pipelines. Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` // A page token to request the results page. @@ -453,16 +432,16 @@ type ListPipelinesRequest struct { SortBy string `protobuf:"bytes,4,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` // A url-encoded, JSON-serialized filter protocol buffer (see // [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). - Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListPipelinesRequest) Reset() { *x = ListPipelinesRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListPipelinesRequest) String() string { @@ -473,7 +452,7 @@ func (*ListPipelinesRequest) ProtoMessage() {} func (x *ListPipelinesRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -524,10 +503,7 @@ func (x *ListPipelinesRequest) GetFilter() string { } type ListPipelinesResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Returned pipelines. Pipelines []*Pipeline `protobuf:"bytes,1,rep,name=pipelines,proto3" json:"pipelines,omitempty"` // The total number of pipelines for the given query. @@ -535,15 +511,15 @@ type ListPipelinesResponse struct { // The token to list the next page of pipelines. // This token can be used on the next ListPipelinesRequest. NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListPipelinesResponse) Reset() { *x = ListPipelinesResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListPipelinesResponse) String() string { @@ -554,7 +530,7 @@ func (*ListPipelinesResponse) ProtoMessage() {} func (x *ListPipelinesResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -591,25 +567,22 @@ func (x *ListPipelinesResponse) GetNextPageToken() string { } type GetPipelineByNameRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Optional input. Namespace of the pipeline. // It could be empty if default namespaces needs to be used or if multi-user // support is turned off. Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` // Required input. Name of the pipeline to be retrieved. - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetPipelineByNameRequest) Reset() { *x = GetPipelineByNameRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetPipelineByNameRequest) String() string { @@ -620,7 +593,7 @@ func (*GetPipelineByNameRequest) ProtoMessage() {} func (x *GetPipelineByNameRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -650,21 +623,21 @@ func (x *GetPipelineByNameRequest) GetName() string { } type DeletePipelineRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required input. ID of the pipeline to be deleted. PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` + // Optional. If true, the pipeline and all its versions will be deleted. + // If false (default), only the pipeline will be deleted if it has no versions. + Cascade bool `protobuf:"varint,2,opt,name=cascade,proto3" json:"cascade,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeletePipelineRequest) Reset() { *x = DeletePipelineRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeletePipelineRequest) String() string { @@ -675,7 +648,7 @@ func (*DeletePipelineRequest) ProtoMessage() {} func (x *DeletePipelineRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -697,25 +670,29 @@ func (x *DeletePipelineRequest) GetPipelineId() string { return "" } -type CreatePipelineAndVersionRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields +func (x *DeletePipelineRequest) GetCascade() bool { + if x != nil { + return x.Cascade + } + return false +} +type CreatePipelineAndVersionRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` // Required input. Pipeline (parent) to be created. Pipeline *Pipeline `protobuf:"bytes,1,opt,name=pipeline,proto3" json:"pipeline,omitempty"` // Required input. Pipeline version (child) to be created. // Pipeline spec will be downloaded from pipeline_version.package_url. PipelineVersion *PipelineVersion `protobuf:"bytes,2,opt,name=pipeline_version,json=pipelineVersion,proto3" json:"pipeline_version,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CreatePipelineAndVersionRequest) Reset() { *x = CreatePipelineAndVersionRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreatePipelineAndVersionRequest) String() string { @@ -726,7 +703,7 @@ func (*CreatePipelineAndVersionRequest) ProtoMessage() {} func (x *CreatePipelineAndVersionRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -756,23 +733,20 @@ func (x *CreatePipelineAndVersionRequest) GetPipelineVersion() *PipelineVersion } type CreatePipelineVersionRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required input. ID of the parent pipeline. PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` // Required input. Pipeline version ID to be created. PipelineVersion *PipelineVersion `protobuf:"bytes,2,opt,name=pipeline_version,json=pipelineVersion,proto3" json:"pipeline_version,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CreatePipelineVersionRequest) Reset() { *x = CreatePipelineVersionRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreatePipelineVersionRequest) String() string { @@ -783,7 +757,7 @@ func (*CreatePipelineVersionRequest) ProtoMessage() {} func (x *CreatePipelineVersionRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -813,23 +787,20 @@ func (x *CreatePipelineVersionRequest) GetPipelineVersion() *PipelineVersion { } type GetPipelineVersionRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required input. ID of the parent pipeline. PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` // Required input. ID of the pipeline version to be retrieved. PipelineVersionId string `protobuf:"bytes,2,opt,name=pipeline_version_id,json=pipelineVersionId,proto3" json:"pipeline_version_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetPipelineVersionRequest) Reset() { *x = GetPipelineVersionRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetPipelineVersionRequest) String() string { @@ -840,7 +811,7 @@ func (*GetPipelineVersionRequest) ProtoMessage() {} func (x *GetPipelineVersionRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -870,10 +841,7 @@ func (x *GetPipelineVersionRequest) GetPipelineVersionId() string { } type ListPipelineVersionsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required input. ID of the parent pipeline. PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` // A page token to request the results page. @@ -887,16 +855,16 @@ type ListPipelineVersionsRequest struct { SortBy string `protobuf:"bytes,4,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` // A url-encoded, JSON-serialized filter protocol buffer (see // [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). - Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListPipelineVersionsRequest) Reset() { *x = ListPipelineVersionsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListPipelineVersionsRequest) String() string { @@ -907,7 +875,7 @@ func (*ListPipelineVersionsRequest) ProtoMessage() {} func (x *ListPipelineVersionsRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -958,25 +926,22 @@ func (x *ListPipelineVersionsRequest) GetFilter() string { } type ListPipelineVersionsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Returned pipeline versions. PipelineVersions []*PipelineVersion `protobuf:"bytes,1,rep,name=pipeline_versions,json=pipelineVersions,proto3" json:"pipeline_versions,omitempty"` // The token to list the next page of pipeline versions. NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` // The total number of pipeline versions for the given query. - TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListPipelineVersionsResponse) Reset() { *x = ListPipelineVersionsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListPipelineVersionsResponse) String() string { @@ -987,7 +952,7 @@ func (*ListPipelineVersionsResponse) ProtoMessage() {} func (x *ListPipelineVersionsResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1024,23 +989,20 @@ func (x *ListPipelineVersionsResponse) GetTotalSize() int32 { } type DeletePipelineVersionRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Required input. ID of the parent pipeline. PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` // Required input. The ID of the pipeline version to be deleted. PipelineVersionId string `protobuf:"bytes,2,opt,name=pipeline_version_id,json=pipelineVersionId,proto3" json:"pipeline_version_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeletePipelineVersionRequest) Reset() { *x = DeletePipelineVersionRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeletePipelineVersionRequest) String() string { @@ -1051,7 +1013,7 @@ func (*DeletePipelineVersionRequest) ProtoMessage() {} func (x *DeletePipelineVersionRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_pipeline_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1082,322 +1044,121 @@ func (x *DeletePipelineVersionRequest) GetPipelineVersionId() string { var File_backend_api_v2beta1_pipeline_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_pipeline_proto_rawDesc = []byte{ - 0x0a, 0x22, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x26, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x1a, 0x1c, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, - 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x72, - 0x70, 0x63, 0x2f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, - 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, - 0x67, 0x65, 0x72, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x87, 0x02, - 0x0a, 0x08, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x64, - 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x12, - 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, - 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, - 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x28, 0x0a, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0xd4, 0x03, 0x0a, 0x0f, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x2e, 0x0a, 0x13, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, - 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, - 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, - 0x5f, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, - 0x12, 0x4c, 0x0a, 0x0b, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x5f, 0x75, 0x72, 0x6c, 0x18, - 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x55, - 0x72, 0x6c, 0x52, 0x0a, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x55, 0x72, 0x6c, 0x12, 0x26, - 0x0a, 0x0f, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x75, 0x72, - 0x6c, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x63, 0x6f, 0x64, 0x65, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x55, 0x72, 0x6c, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x53, 0x70, 0x65, 0x63, 0x12, 0x28, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x08, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, - 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x28, - 0x0a, 0x03, 0x55, 0x72, 0x6c, 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x72, 0x6c, 0x22, 0x65, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x4c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, - 0x35, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x22, 0xa1, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1d, 0x0a, - 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1b, 0x0a, 0x09, - 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x73, 0x6f, 0x72, - 0x74, 0x5f, 0x62, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x72, 0x74, - 0x42, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0xae, 0x01, 0x0a, 0x15, 0x4c, - 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4e, 0x0a, 0x09, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x09, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x73, 0x69, - 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x53, - 0x69, 0x7a, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, - 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, - 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x4c, 0x0a, 0x18, 0x47, - 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x38, 0x0a, 0x15, 0x44, 0x65, 0x6c, - 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x49, 0x64, 0x22, 0xd3, 0x01, 0x0a, 0x1f, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x41, 0x6e, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6b, 0x75, 0x62, 0x65, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, - 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x62, 0x0a, 0x10, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x37, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xa3, 0x01, 0x0a, 0x1c, 0x43, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x62, 0x0a, 0x10, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, - 0x6c, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x2e, 0x0a, - 0x13, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0xab, 0x01, - 0x0a, 0x1b, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, - 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x1d, - 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1b, 0x0a, - 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, - 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x73, 0x6f, - 0x72, 0x74, 0x5f, 0x62, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x72, - 0x74, 0x42, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0xcb, 0x01, 0x0a, 0x1c, - 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x64, 0x0a, 0x11, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x52, 0x10, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, - 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, - 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x6f, - 0x74, 0x61, 0x6c, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, - 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x6f, 0x0a, 0x1c, 0x44, 0x65, 0x6c, - 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x2e, 0x0a, 0x13, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, - 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x32, 0x97, 0x0f, 0x0a, 0x0f, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xac, - 0x01, 0x0a, 0x0e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x12, 0x3d, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x30, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x3a, 0x08, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x17, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0xaa, 0x01, - 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x3a, 0x2e, - 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x30, 0x2e, 0x6b, 0x75, 0x62, 0x65, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, - 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x2d, 0x82, 0xd3, 0xe4, - 0x93, 0x02, 0x27, 0x12, 0x25, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x7d, 0x12, 0xb5, 0x01, 0x0a, 0x11, 0x47, - 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, - 0x12, 0x40, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x30, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x2c, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x26, 0x12, 0x24, 0x2f, 0x61, - 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, - 0x65, 0x7d, 0x12, 0xad, 0x01, 0x0a, 0x0d, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x12, 0x3c, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, - 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x3d, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, - 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x19, 0x12, 0x17, 0x2f, 0x61, 0x70, 0x69, 0x73, - 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x12, 0x96, 0x01, 0x0a, 0x0e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x3d, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x44, - 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x2d, 0x82, 0xd3, - 0xe4, 0x93, 0x02, 0x27, 0x2a, 0x25, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x7d, 0x12, 0xc0, 0x01, 0x0a, 0x18, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x41, 0x6e, - 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x47, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x41, 0x6e, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x30, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x3a, 0x01, 0x2a, 0x22, 0x1e, - 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x12, 0xe0, - 0x01, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x44, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x37, - 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x48, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x42, 0x3a, - 0x10, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x22, 0x2e, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x73, 0x12, 0xde, 0x01, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x41, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x37, 0x2e, 0x6b, 0x75, - 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x4c, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x46, 0x12, 0x44, 0x2f, 0x61, - 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, - 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, - 0x64, 0x7d, 0x12, 0xd9, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x43, 0x2e, 0x6b, 0x75, - 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x44, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x36, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x30, 0x12, 0x2e, - 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x5f, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0xc3, - 0x01, 0x0a, 0x15, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x44, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x4c, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x46, 0x2a, 0x44, - 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x5f, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x5f, 0x69, 0x64, 0x7d, 0x42, 0x98, 0x01, 0x92, 0x41, 0x58, 0x2a, 0x02, 0x01, 0x02, 0x52, 0x23, - 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x18, 0x12, 0x16, 0x0a, 0x14, 0x1a, - 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, - 0x74, 0x75, 0x73, 0x5a, 0x1f, 0x0a, 0x1d, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, - 0x13, 0x08, 0x02, 0x1a, 0x0d, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x02, 0x62, 0x0c, 0x0a, 0x0a, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, - 0x12, 0x00, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v2beta1_pipeline_proto_rawDesc = "" + + "\n" + + "\"backend/api/v2beta1/pipeline.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x17google/rpc/status.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\x87\x02\n" + + "\bPipeline\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\x12!\n" + + "\fdisplay_name\x18\x02 \x01(\tR\vdisplayName\x12\x12\n" + + "\x04name\x18\a \x01(\tR\x04name\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\x129\n" + + "\n" + + "created_at\x18\x04 \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x1c\n" + + "\tnamespace\x18\x05 \x01(\tR\tnamespace\x12(\n" + + "\x05error\x18\x06 \x01(\v2\x12.google.rpc.StatusR\x05error\"\xd4\x03\n" + + "\x0fPipelineVersion\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\x12.\n" + + "\x13pipeline_version_id\x18\x02 \x01(\tR\x11pipelineVersionId\x12!\n" + + "\fdisplay_name\x18\x03 \x01(\tR\vdisplayName\x12\x12\n" + + "\x04name\x18\n" + + " \x01(\tR\x04name\x12 \n" + + "\vdescription\x18\x04 \x01(\tR\vdescription\x129\n" + + "\n" + + "created_at\x18\x05 \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12L\n" + + "\vpackage_url\x18\x06 \x01(\v2+.kubeflow.pipelines.backend.api.v2beta1.UrlR\n" + + "packageUrl\x12&\n" + + "\x0fcode_source_url\x18\t \x01(\tR\rcodeSourceUrl\x12<\n" + + "\rpipeline_spec\x18\a \x01(\v2\x17.google.protobuf.StructR\fpipelineSpec\x12(\n" + + "\x05error\x18\b \x01(\v2\x12.google.rpc.StatusR\x05error\"(\n" + + "\x03Url\x12!\n" + + "\fpipeline_url\x18\x01 \x01(\tR\vpipelineUrl\"e\n" + + "\x15CreatePipelineRequest\x12L\n" + + "\bpipeline\x18\x01 \x01(\v20.kubeflow.pipelines.backend.api.v2beta1.PipelineR\bpipeline\"5\n" + + "\x12GetPipelineRequest\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\"\xa1\x01\n" + + "\x14ListPipelinesRequest\x12\x1c\n" + + "\tnamespace\x18\x01 \x01(\tR\tnamespace\x12\x1d\n" + + "\n" + + "page_token\x18\x02 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x03 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x04 \x01(\tR\x06sortBy\x12\x16\n" + + "\x06filter\x18\x05 \x01(\tR\x06filter\"\xae\x01\n" + + "\x15ListPipelinesResponse\x12N\n" + + "\tpipelines\x18\x01 \x03(\v20.kubeflow.pipelines.backend.api.v2beta1.PipelineR\tpipelines\x12\x1d\n" + + "\n" + + "total_size\x18\x02 \x01(\x05R\ttotalSize\x12&\n" + + "\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"L\n" + + "\x18GetPipelineByNameRequest\x12\x1c\n" + + "\tnamespace\x18\x01 \x01(\tR\tnamespace\x12\x12\n" + + "\x04name\x18\x02 \x01(\tR\x04name\"R\n" + + "\x15DeletePipelineRequest\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\x12\x18\n" + + "\acascade\x18\x02 \x01(\bR\acascade\"\xd3\x01\n" + + "\x1fCreatePipelineAndVersionRequest\x12L\n" + + "\bpipeline\x18\x01 \x01(\v20.kubeflow.pipelines.backend.api.v2beta1.PipelineR\bpipeline\x12b\n" + + "\x10pipeline_version\x18\x02 \x01(\v27.kubeflow.pipelines.backend.api.v2beta1.PipelineVersionR\x0fpipelineVersion\"\xa3\x01\n" + + "\x1cCreatePipelineVersionRequest\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\x12b\n" + + "\x10pipeline_version\x18\x02 \x01(\v27.kubeflow.pipelines.backend.api.v2beta1.PipelineVersionR\x0fpipelineVersion\"l\n" + + "\x19GetPipelineVersionRequest\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\x12.\n" + + "\x13pipeline_version_id\x18\x02 \x01(\tR\x11pipelineVersionId\"\xab\x01\n" + + "\x1bListPipelineVersionsRequest\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\x12\x1d\n" + + "\n" + + "page_token\x18\x02 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x03 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x04 \x01(\tR\x06sortBy\x12\x16\n" + + "\x06filter\x18\x05 \x01(\tR\x06filter\"\xcb\x01\n" + + "\x1cListPipelineVersionsResponse\x12d\n" + + "\x11pipeline_versions\x18\x01 \x03(\v27.kubeflow.pipelines.backend.api.v2beta1.PipelineVersionR\x10pipelineVersions\x12&\n" + + "\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\x12\x1d\n" + + "\n" + + "total_size\x18\x03 \x01(\x05R\ttotalSize\"o\n" + + "\x1cDeletePipelineVersionRequest\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\x12.\n" + + "\x13pipeline_version_id\x18\x02 \x01(\tR\x11pipelineVersionId2\x97\x0f\n" + + "\x0fPipelineService\x12\xac\x01\n" + + "\x0eCreatePipeline\x12=.kubeflow.pipelines.backend.api.v2beta1.CreatePipelineRequest\x1a0.kubeflow.pipelines.backend.api.v2beta1.Pipeline\")\x82\xd3\xe4\x93\x02#:\bpipeline\"\x17/apis/v2beta1/pipelines\x12\xaa\x01\n" + + "\vGetPipeline\x12:.kubeflow.pipelines.backend.api.v2beta1.GetPipelineRequest\x1a0.kubeflow.pipelines.backend.api.v2beta1.Pipeline\"-\x82\xd3\xe4\x93\x02'\x12%/apis/v2beta1/pipelines/{pipeline_id}\x12\xb5\x01\n" + + "\x11GetPipelineByName\x12@.kubeflow.pipelines.backend.api.v2beta1.GetPipelineByNameRequest\x1a0.kubeflow.pipelines.backend.api.v2beta1.Pipeline\",\x82\xd3\xe4\x93\x02&\x12$/apis/v2beta1/pipelines/names/{name}\x12\xad\x01\n" + + "\rListPipelines\x12<.kubeflow.pipelines.backend.api.v2beta1.ListPipelinesRequest\x1a=.kubeflow.pipelines.backend.api.v2beta1.ListPipelinesResponse\"\x1f\x82\xd3\xe4\x93\x02\x19\x12\x17/apis/v2beta1/pipelines\x12\x96\x01\n" + + "\x0eDeletePipeline\x12=.kubeflow.pipelines.backend.api.v2beta1.DeletePipelineRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02'*%/apis/v2beta1/pipelines/{pipeline_id}\x12\xc0\x01\n" + + "\x18CreatePipelineAndVersion\x12G.kubeflow.pipelines.backend.api.v2beta1.CreatePipelineAndVersionRequest\x1a0.kubeflow.pipelines.backend.api.v2beta1.Pipeline\")\x82\xd3\xe4\x93\x02#:\x01*\"\x1e/apis/v2beta1/pipelines/create\x12\xe0\x01\n" + + "\x15CreatePipelineVersion\x12D.kubeflow.pipelines.backend.api.v2beta1.CreatePipelineVersionRequest\x1a7.kubeflow.pipelines.backend.api.v2beta1.PipelineVersion\"H\x82\xd3\xe4\x93\x02B:\x10pipeline_version\"./apis/v2beta1/pipelines/{pipeline_id}/versions\x12\xde\x01\n" + + "\x12GetPipelineVersion\x12A.kubeflow.pipelines.backend.api.v2beta1.GetPipelineVersionRequest\x1a7.kubeflow.pipelines.backend.api.v2beta1.PipelineVersion\"L\x82\xd3\xe4\x93\x02F\x12D/apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}\x12\xd9\x01\n" + + "\x14ListPipelineVersions\x12C.kubeflow.pipelines.backend.api.v2beta1.ListPipelineVersionsRequest\x1aD.kubeflow.pipelines.backend.api.v2beta1.ListPipelineVersionsResponse\"6\x82\xd3\xe4\x93\x020\x12./apis/v2beta1/pipelines/{pipeline_id}/versions\x12\xc3\x01\n" + + "\x15DeletePipelineVersion\x12D.kubeflow.pipelines.backend.api.v2beta1.DeletePipelineVersionRequest\x1a\x16.google.protobuf.Empty\"L\x82\xd3\xe4\x93\x02F*D/apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}B\x98\x01\x92AX*\x02\x01\x02R#\n" + + "\adefault\x12\x18\x12\x16\n" + + "\x14\x1a\x12.google.rpc.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_pipeline_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_pipeline_proto_rawDescData = file_backend_api_v2beta1_pipeline_proto_rawDesc + file_backend_api_v2beta1_pipeline_proto_rawDescData []byte ) func file_backend_api_v2beta1_pipeline_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_pipeline_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_pipeline_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_pipeline_proto_rawDescData) + file_backend_api_v2beta1_pipeline_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_pipeline_proto_rawDesc), len(file_backend_api_v2beta1_pipeline_proto_rawDesc))) }) return file_backend_api_v2beta1_pipeline_proto_rawDescData } var file_backend_api_v2beta1_pipeline_proto_msgTypes = make([]protoimpl.MessageInfo, 15) -var file_backend_api_v2beta1_pipeline_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_pipeline_proto_goTypes = []any{ (*Pipeline)(nil), // 0: kubeflow.pipelines.backend.api.v2beta1.Pipeline (*PipelineVersion)(nil), // 1: kubeflow.pipelines.backend.api.v2beta1.PipelineVersion (*Url)(nil), // 2: kubeflow.pipelines.backend.api.v2beta1.Url @@ -1463,193 +1224,11 @@ func file_backend_api_v2beta1_pipeline_proto_init() { if File_backend_api_v2beta1_pipeline_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_pipeline_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Pipeline); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineVersion); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Url); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreatePipelineRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPipelineRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPipelinesRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPipelinesResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPipelineByNameRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeletePipelineRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreatePipelineAndVersionRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreatePipelineVersionRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPipelineVersionRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPipelineVersionsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPipelineVersionsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_pipeline_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeletePipelineVersionRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_pipeline_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_pipeline_proto_rawDesc), len(file_backend_api_v2beta1_pipeline_proto_rawDesc)), NumEnums: 0, NumMessages: 15, NumExtensions: 0, @@ -1660,431 +1239,6 @@ func file_backend_api_v2beta1_pipeline_proto_init() { MessageInfos: file_backend_api_v2beta1_pipeline_proto_msgTypes, }.Build() File_backend_api_v2beta1_pipeline_proto = out.File - file_backend_api_v2beta1_pipeline_proto_rawDesc = nil file_backend_api_v2beta1_pipeline_proto_goTypes = nil file_backend_api_v2beta1_pipeline_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// PipelineServiceClient is the client API for PipelineService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type PipelineServiceClient interface { - // Creates a pipeline. - CreatePipeline(ctx context.Context, in *CreatePipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) - // Finds a specific pipeline by ID. - GetPipeline(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) - // Finds a specific pipeline by name and namespace. - GetPipelineByName(ctx context.Context, in *GetPipelineByNameRequest, opts ...grpc.CallOption) (*Pipeline, error) - // Finds all pipelines within a namespace. - ListPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) - // Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. - DeletePipeline(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Creates a new pipeline and a new pipeline version in a single transaction. - CreatePipelineAndVersion(ctx context.Context, in *CreatePipelineAndVersionRequest, opts ...grpc.CallOption) (*Pipeline, error) - // Adds a pipeline version to the specified pipeline ID. - CreatePipelineVersion(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) - // Gets a pipeline version by pipeline version ID and pipeline ID. - GetPipelineVersion(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) - // Lists all pipeline versions of a given pipeline ID. - ListPipelineVersions(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) - // Deletes a specific pipeline version by pipeline version ID and pipeline ID. - DeletePipelineVersion(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type pipelineServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewPipelineServiceClient(cc grpc.ClientConnInterface) PipelineServiceClient { - return &pipelineServiceClient{cc} -} - -func (c *pipelineServiceClient) CreatePipeline(ctx context.Context, in *CreatePipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) { - out := new(Pipeline) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipeline", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) GetPipeline(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) { - out := new(Pipeline) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipeline", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) GetPipelineByName(ctx context.Context, in *GetPipelineByNameRequest, opts ...grpc.CallOption) (*Pipeline, error) { - out := new(Pipeline) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipelineByName", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) ListPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) { - out := new(ListPipelinesResponse) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/ListPipelines", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) DeletePipeline(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/DeletePipeline", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) CreatePipelineAndVersion(ctx context.Context, in *CreatePipelineAndVersionRequest, opts ...grpc.CallOption) (*Pipeline, error) { - out := new(Pipeline) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipelineAndVersion", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) CreatePipelineVersion(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { - out := new(PipelineVersion) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipelineVersion", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) GetPipelineVersion(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { - out := new(PipelineVersion) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipelineVersion", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) ListPipelineVersions(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) { - out := new(ListPipelineVersionsResponse) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/ListPipelineVersions", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *pipelineServiceClient) DeletePipelineVersion(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/DeletePipelineVersion", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// PipelineServiceServer is the server API for PipelineService service. -type PipelineServiceServer interface { - // Creates a pipeline. - CreatePipeline(context.Context, *CreatePipelineRequest) (*Pipeline, error) - // Finds a specific pipeline by ID. - GetPipeline(context.Context, *GetPipelineRequest) (*Pipeline, error) - // Finds a specific pipeline by name and namespace. - GetPipelineByName(context.Context, *GetPipelineByNameRequest) (*Pipeline, error) - // Finds all pipelines within a namespace. - ListPipelines(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) - // Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. - DeletePipeline(context.Context, *DeletePipelineRequest) (*emptypb.Empty, error) - // Creates a new pipeline and a new pipeline version in a single transaction. - CreatePipelineAndVersion(context.Context, *CreatePipelineAndVersionRequest) (*Pipeline, error) - // Adds a pipeline version to the specified pipeline ID. - CreatePipelineVersion(context.Context, *CreatePipelineVersionRequest) (*PipelineVersion, error) - // Gets a pipeline version by pipeline version ID and pipeline ID. - GetPipelineVersion(context.Context, *GetPipelineVersionRequest) (*PipelineVersion, error) - // Lists all pipeline versions of a given pipeline ID. - ListPipelineVersions(context.Context, *ListPipelineVersionsRequest) (*ListPipelineVersionsResponse, error) - // Deletes a specific pipeline version by pipeline version ID and pipeline ID. - DeletePipelineVersion(context.Context, *DeletePipelineVersionRequest) (*emptypb.Empty, error) -} - -// UnimplementedPipelineServiceServer can be embedded to have forward compatible implementations. -type UnimplementedPipelineServiceServer struct { -} - -func (*UnimplementedPipelineServiceServer) CreatePipeline(context.Context, *CreatePipelineRequest) (*Pipeline, error) { - return nil, status1.Errorf(codes.Unimplemented, "method CreatePipeline not implemented") -} -func (*UnimplementedPipelineServiceServer) GetPipeline(context.Context, *GetPipelineRequest) (*Pipeline, error) { - return nil, status1.Errorf(codes.Unimplemented, "method GetPipeline not implemented") -} -func (*UnimplementedPipelineServiceServer) GetPipelineByName(context.Context, *GetPipelineByNameRequest) (*Pipeline, error) { - return nil, status1.Errorf(codes.Unimplemented, "method GetPipelineByName not implemented") -} -func (*UnimplementedPipelineServiceServer) ListPipelines(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) { - return nil, status1.Errorf(codes.Unimplemented, "method ListPipelines not implemented") -} -func (*UnimplementedPipelineServiceServer) DeletePipeline(context.Context, *DeletePipelineRequest) (*emptypb.Empty, error) { - return nil, status1.Errorf(codes.Unimplemented, "method DeletePipeline not implemented") -} -func (*UnimplementedPipelineServiceServer) CreatePipelineAndVersion(context.Context, *CreatePipelineAndVersionRequest) (*Pipeline, error) { - return nil, status1.Errorf(codes.Unimplemented, "method CreatePipelineAndVersion not implemented") -} -func (*UnimplementedPipelineServiceServer) CreatePipelineVersion(context.Context, *CreatePipelineVersionRequest) (*PipelineVersion, error) { - return nil, status1.Errorf(codes.Unimplemented, "method CreatePipelineVersion not implemented") -} -func (*UnimplementedPipelineServiceServer) GetPipelineVersion(context.Context, *GetPipelineVersionRequest) (*PipelineVersion, error) { - return nil, status1.Errorf(codes.Unimplemented, "method GetPipelineVersion not implemented") -} -func (*UnimplementedPipelineServiceServer) ListPipelineVersions(context.Context, *ListPipelineVersionsRequest) (*ListPipelineVersionsResponse, error) { - return nil, status1.Errorf(codes.Unimplemented, "method ListPipelineVersions not implemented") -} -func (*UnimplementedPipelineServiceServer) DeletePipelineVersion(context.Context, *DeletePipelineVersionRequest) (*emptypb.Empty, error) { - return nil, status1.Errorf(codes.Unimplemented, "method DeletePipelineVersion not implemented") -} - -func RegisterPipelineServiceServer(s *grpc.Server, srv PipelineServiceServer) { - s.RegisterService(&_PipelineService_serviceDesc, srv) -} - -func _PipelineService_CreatePipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreatePipelineRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).CreatePipeline(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipeline", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).CreatePipeline(ctx, req.(*CreatePipelineRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_GetPipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetPipelineRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).GetPipeline(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipeline", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).GetPipeline(ctx, req.(*GetPipelineRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_GetPipelineByName_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetPipelineByNameRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).GetPipelineByName(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipelineByName", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).GetPipelineByName(ctx, req.(*GetPipelineByNameRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_ListPipelines_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListPipelinesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).ListPipelines(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/ListPipelines", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).ListPipelines(ctx, req.(*ListPipelinesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_DeletePipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeletePipelineRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).DeletePipeline(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/DeletePipeline", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).DeletePipeline(ctx, req.(*DeletePipelineRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_CreatePipelineAndVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreatePipelineAndVersionRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).CreatePipelineAndVersion(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipelineAndVersion", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).CreatePipelineAndVersion(ctx, req.(*CreatePipelineAndVersionRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_CreatePipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreatePipelineVersionRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).CreatePipelineVersion(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipelineVersion", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).CreatePipelineVersion(ctx, req.(*CreatePipelineVersionRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_GetPipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetPipelineVersionRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).GetPipelineVersion(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipelineVersion", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).GetPipelineVersion(ctx, req.(*GetPipelineVersionRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_ListPipelineVersions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListPipelineVersionsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).ListPipelineVersions(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/ListPipelineVersions", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).ListPipelineVersions(ctx, req.(*ListPipelineVersionsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _PipelineService_DeletePipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeletePipelineVersionRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(PipelineServiceServer).DeletePipelineVersion(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/DeletePipelineVersion", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(PipelineServiceServer).DeletePipelineVersion(ctx, req.(*DeletePipelineVersionRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _PipelineService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "kubeflow.pipelines.backend.api.v2beta1.PipelineService", - HandlerType: (*PipelineServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreatePipeline", - Handler: _PipelineService_CreatePipeline_Handler, - }, - { - MethodName: "GetPipeline", - Handler: _PipelineService_GetPipeline_Handler, - }, - { - MethodName: "GetPipelineByName", - Handler: _PipelineService_GetPipelineByName_Handler, - }, - { - MethodName: "ListPipelines", - Handler: _PipelineService_ListPipelines_Handler, - }, - { - MethodName: "DeletePipeline", - Handler: _PipelineService_DeletePipeline_Handler, - }, - { - MethodName: "CreatePipelineAndVersion", - Handler: _PipelineService_CreatePipelineAndVersion_Handler, - }, - { - MethodName: "CreatePipelineVersion", - Handler: _PipelineService_CreatePipelineVersion_Handler, - }, - { - MethodName: "GetPipelineVersion", - Handler: _PipelineService_GetPipelineVersion_Handler, - }, - { - MethodName: "ListPipelineVersions", - Handler: _PipelineService_ListPipelineVersions_Handler, - }, - { - MethodName: "DeletePipelineVersion", - Handler: _PipelineService_DeletePipelineVersion_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v2beta1/pipeline.proto", -} diff --git a/backend/api/v2beta1/go_client/pipeline.pb.gw.go b/backend/api/v2beta1/go_client/pipeline.pb.gw.go index a4cd6e7d9a0..8db0f2d3a8f 100644 --- a/backend/api/v2beta1/go_client/pipeline.pb.gw.go +++ b/backend/api/v2beta1/go_client/pipeline.pb.gw.go @@ -10,841 +10,678 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_PipelineService_CreatePipeline_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreatePipelineRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Pipeline); err != nil && err != io.EOF { + var ( + protoReq CreatePipelineRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Pipeline); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.CreatePipeline(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_CreatePipeline_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreatePipelineRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Pipeline); err != nil && err != io.EOF { + var ( + protoReq CreatePipelineRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Pipeline); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreatePipeline(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_GetPipeline_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - msg, err := client.GetPipeline(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_GetPipeline_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - msg, err := server.GetPipeline(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_PipelineService_GetPipelineByName_0 = &utilities.DoubleArray{Encoding: map[string]int{"name": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) +var filter_PipelineService_GetPipelineByName_0 = &utilities.DoubleArray{Encoding: map[string]int{"name": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} func request_PipelineService_GetPipelineByName_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineByNameRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineByNameRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["name"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["name"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") } - protoReq.Name, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_GetPipelineByName_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetPipelineByName(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_GetPipelineByName_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineByNameRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineByNameRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["name"] + val, ok := pathParams["name"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name") } - protoReq.Name, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_GetPipelineByName_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.GetPipelineByName(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_PipelineService_ListPipelines_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) +var filter_PipelineService_ListPipelines_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} func request_PipelineService_ListPipelines_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListPipelinesRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListPipelinesRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_ListPipelines_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListPipelines(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_ListPipelines_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListPipelinesRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListPipelinesRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_ListPipelines_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListPipelines(ctx, &protoReq) return msg, metadata, err - } -func request_PipelineService_DeletePipeline_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeletePipelineRequest - var metadata runtime.ServerMetadata +var filter_PipelineService_DeletePipeline_0 = &utilities.DoubleArray{Encoding: map[string]int{"pipeline_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +func request_PipelineService_DeletePipeline_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var ( - val string - ok bool - err error - _ = err + protoReq DeletePipelineRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_DeletePipeline_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } msg, err := client.DeletePipeline(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_DeletePipeline_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeletePipelineRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeletePipelineRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_DeletePipeline_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } msg, err := server.DeletePipeline(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_CreatePipelineAndVersion_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreatePipelineAndVersionRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + var ( + protoReq CreatePipelineAndVersionRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.CreatePipelineAndVersion(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_CreatePipelineAndVersion_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreatePipelineAndVersionRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + var ( + protoReq CreatePipelineAndVersionRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreatePipelineAndVersion(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_CreatePipelineVersion_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreatePipelineVersionRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.PipelineVersion); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - var ( - val string - ok bool - err error - _ = err + protoReq CreatePipelineVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.PipelineVersion); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - msg, err := client.CreatePipelineVersion(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_CreatePipelineVersion_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreatePipelineVersionRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.PipelineVersion); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - var ( - val string - ok bool - err error - _ = err + protoReq CreatePipelineVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.PipelineVersion); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - msg, err := server.CreatePipelineVersion(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_GetPipelineVersion_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineVersionRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - val, ok = pathParams["pipeline_version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_version_id") } - protoReq.PipelineVersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_version_id", err) } - msg, err := client.GetPipelineVersion(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_GetPipelineVersion_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetPipelineVersionRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetPipelineVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - val, ok = pathParams["pipeline_version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_version_id") } - protoReq.PipelineVersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_version_id", err) } - msg, err := server.GetPipelineVersion(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_PipelineService_ListPipelineVersions_0 = &utilities.DoubleArray{Encoding: map[string]int{"pipeline_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) +var filter_PipelineService_ListPipelineVersions_0 = &utilities.DoubleArray{Encoding: map[string]int{"pipeline_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} func request_PipelineService_ListPipelineVersions_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListPipelineVersionsRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ListPipelineVersionsRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_ListPipelineVersions_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListPipelineVersions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_ListPipelineVersions_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListPipelineVersionsRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ListPipelineVersionsRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_PipelineService_ListPipelineVersions_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListPipelineVersions(ctx, &protoReq) return msg, metadata, err - } func request_PipelineService_DeletePipelineVersion_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeletePipelineVersionRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeletePipelineVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - val, ok = pathParams["pipeline_version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_version_id") } - protoReq.PipelineVersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_version_id", err) } - msg, err := client.DeletePipelineVersion(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_PipelineService_DeletePipelineVersion_0(ctx context.Context, marshaler runtime.Marshaler, server PipelineServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeletePipelineVersionRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeletePipelineVersionRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["pipeline_id"] + val, ok := pathParams["pipeline_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_id") } - protoReq.PipelineId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_id", err) } - val, ok = pathParams["pipeline_version_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "pipeline_version_id") } - protoReq.PipelineVersionId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "pipeline_version_id", err) } - msg, err := server.DeletePipelineVersion(ctx, &protoReq) return msg, metadata, err - } // RegisterPipelineServiceHandlerServer registers the http handlers for service PipelineService to "mux". // UnaryRPC :call PipelineServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterPipelineServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterPipelineServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server PipelineServiceServer) error { - - mux.Handle("POST", pattern_PipelineService_CreatePipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_CreatePipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipeline", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_CreatePipeline_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_CreatePipeline_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_CreatePipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_CreatePipeline_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipeline", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_GetPipeline_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_GetPipeline_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipeline_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineByName_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineByName_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipelineByName", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/names/{name}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_GetPipelineByName_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_GetPipelineByName_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineByName_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineByName_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_ListPipelines_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_ListPipelines_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/ListPipelines", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_ListPipelines_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_ListPipelines_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_ListPipelines_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_ListPipelines_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_PipelineService_DeletePipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_PipelineService_DeletePipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/DeletePipeline", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_DeletePipeline_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_DeletePipeline_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_DeletePipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_DeletePipeline_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_PipelineService_CreatePipelineAndVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_CreatePipelineAndVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipelineAndVersion", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/create")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_CreatePipelineAndVersion_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_CreatePipelineAndVersion_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_CreatePipelineAndVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_CreatePipelineAndVersion_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_PipelineService_CreatePipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_CreatePipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipelineVersion", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}/versions")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_CreatePipelineVersion_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_CreatePipelineVersion_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_CreatePipelineVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_CreatePipelineVersion_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipelineVersion", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_GetPipelineVersion_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_GetPipelineVersion_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineVersion_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_ListPipelineVersions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_ListPipelineVersions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/ListPipelineVersions", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}/versions")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_ListPipelineVersions_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_ListPipelineVersions_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_ListPipelineVersions_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_ListPipelineVersions_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_PipelineService_DeletePipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_PipelineService_DeletePipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/DeletePipelineVersion", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_PipelineService_DeletePipelineVersion_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_PipelineService_DeletePipelineVersion_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_DeletePipelineVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_DeletePipelineVersion_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -853,25 +690,24 @@ func RegisterPipelineServiceHandlerServer(ctx context.Context, mux *runtime.Serv // RegisterPipelineServiceHandlerFromEndpoint is same as RegisterPipelineServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterPipelineServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterPipelineServiceHandler(ctx, mux, conn) } @@ -885,252 +721,203 @@ func RegisterPipelineServiceHandler(ctx context.Context, mux *runtime.ServeMux, // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "PipelineServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "PipelineServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "PipelineServiceClient" to call the correct interceptors. +// "PipelineServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterPipelineServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client PipelineServiceClient) error { - - mux.Handle("POST", pattern_PipelineService_CreatePipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_CreatePipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipeline", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_CreatePipeline_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_CreatePipeline_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_CreatePipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_CreatePipeline_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipeline", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_GetPipeline_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_GetPipeline_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipeline_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineByName_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineByName_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipelineByName", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/names/{name}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_GetPipelineByName_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_GetPipelineByName_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineByName_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineByName_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_ListPipelines_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_ListPipelines_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/ListPipelines", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_ListPipelines_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_ListPipelines_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_ListPipelines_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_ListPipelines_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_PipelineService_DeletePipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_PipelineService_DeletePipeline_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/DeletePipeline", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_DeletePipeline_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_DeletePipeline_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_DeletePipeline_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_DeletePipeline_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_PipelineService_CreatePipelineAndVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_CreatePipelineAndVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipelineAndVersion", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/create")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_CreatePipelineAndVersion_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_CreatePipelineAndVersion_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_CreatePipelineAndVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_CreatePipelineAndVersion_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_PipelineService_CreatePipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_PipelineService_CreatePipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipelineVersion", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}/versions")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_CreatePipelineVersion_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_CreatePipelineVersion_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_CreatePipelineVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_CreatePipelineVersion_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_GetPipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_GetPipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipelineVersion", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_GetPipelineVersion_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_GetPipelineVersion_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_GetPipelineVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_GetPipelineVersion_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_PipelineService_ListPipelineVersions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_PipelineService_ListPipelineVersions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/ListPipelineVersions", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}/versions")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_ListPipelineVersions_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_ListPipelineVersions_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_ListPipelineVersions_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_ListPipelineVersions_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_PipelineService_DeletePipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_PipelineService_DeletePipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/DeletePipelineVersion", runtime.WithHTTPPathPattern("/apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_PipelineService_DeletePipelineVersion_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_PipelineService_DeletePipelineVersion_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_PipelineService_DeletePipelineVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_PipelineService_DeletePipelineVersion_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_PipelineService_CreatePipeline_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "pipelines"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_GetPipeline_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "pipelines", "pipeline_id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_GetPipelineByName_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v2beta1", "pipelines", "names", "name"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_ListPipelines_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "pipelines"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_DeletePipeline_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "pipelines", "pipeline_id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_CreatePipelineAndVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"apis", "v2beta1", "pipelines", "create"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_CreatePipelineVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v2beta1", "pipelines", "pipeline_id", "versions"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_GetPipelineVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v2beta1", "pipelines", "pipeline_id", "versions", "pipeline_version_id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_ListPipelineVersions_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v2beta1", "pipelines", "pipeline_id", "versions"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_PipelineService_DeletePipelineVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v2beta1", "pipelines", "pipeline_id", "versions", "pipeline_version_id"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_PipelineService_CreatePipeline_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "pipelines"}, "")) + pattern_PipelineService_GetPipeline_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "pipelines", "pipeline_id"}, "")) + pattern_PipelineService_GetPipelineByName_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v2beta1", "pipelines", "names", "name"}, "")) + pattern_PipelineService_ListPipelines_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "pipelines"}, "")) + pattern_PipelineService_DeletePipeline_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "pipelines", "pipeline_id"}, "")) + pattern_PipelineService_CreatePipelineAndVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"apis", "v2beta1", "pipelines", "create"}, "")) + pattern_PipelineService_CreatePipelineVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v2beta1", "pipelines", "pipeline_id", "versions"}, "")) + pattern_PipelineService_GetPipelineVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v2beta1", "pipelines", "pipeline_id", "versions", "pipeline_version_id"}, "")) + pattern_PipelineService_ListPipelineVersions_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v2beta1", "pipelines", "pipeline_id", "versions"}, "")) + pattern_PipelineService_DeletePipelineVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v2beta1", "pipelines", "pipeline_id", "versions", "pipeline_version_id"}, "")) ) var ( - forward_PipelineService_CreatePipeline_0 = runtime.ForwardResponseMessage - - forward_PipelineService_GetPipeline_0 = runtime.ForwardResponseMessage - - forward_PipelineService_GetPipelineByName_0 = runtime.ForwardResponseMessage - - forward_PipelineService_ListPipelines_0 = runtime.ForwardResponseMessage - - forward_PipelineService_DeletePipeline_0 = runtime.ForwardResponseMessage - + forward_PipelineService_CreatePipeline_0 = runtime.ForwardResponseMessage + forward_PipelineService_GetPipeline_0 = runtime.ForwardResponseMessage + forward_PipelineService_GetPipelineByName_0 = runtime.ForwardResponseMessage + forward_PipelineService_ListPipelines_0 = runtime.ForwardResponseMessage + forward_PipelineService_DeletePipeline_0 = runtime.ForwardResponseMessage forward_PipelineService_CreatePipelineAndVersion_0 = runtime.ForwardResponseMessage - - forward_PipelineService_CreatePipelineVersion_0 = runtime.ForwardResponseMessage - - forward_PipelineService_GetPipelineVersion_0 = runtime.ForwardResponseMessage - - forward_PipelineService_ListPipelineVersions_0 = runtime.ForwardResponseMessage - - forward_PipelineService_DeletePipelineVersion_0 = runtime.ForwardResponseMessage + forward_PipelineService_CreatePipelineVersion_0 = runtime.ForwardResponseMessage + forward_PipelineService_GetPipelineVersion_0 = runtime.ForwardResponseMessage + forward_PipelineService_ListPipelineVersions_0 = runtime.ForwardResponseMessage + forward_PipelineService_DeletePipelineVersion_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v2beta1/go_client/pipeline_grpc.pb.go b/backend/api/v2beta1/go_client/pipeline_grpc.pb.go new file mode 100644 index 00000000000..62227f743da --- /dev/null +++ b/backend/api/v2beta1/go_client/pipeline_grpc.pb.go @@ -0,0 +1,500 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/pipeline.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + PipelineService_CreatePipeline_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipeline" + PipelineService_GetPipeline_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipeline" + PipelineService_GetPipelineByName_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipelineByName" + PipelineService_ListPipelines_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/ListPipelines" + PipelineService_DeletePipeline_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/DeletePipeline" + PipelineService_CreatePipelineAndVersion_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipelineAndVersion" + PipelineService_CreatePipelineVersion_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/CreatePipelineVersion" + PipelineService_GetPipelineVersion_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/GetPipelineVersion" + PipelineService_ListPipelineVersions_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/ListPipelineVersions" + PipelineService_DeletePipelineVersion_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.PipelineService/DeletePipelineVersion" +) + +// PipelineServiceClient is the client API for PipelineService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type PipelineServiceClient interface { + // Creates a pipeline. + CreatePipeline(ctx context.Context, in *CreatePipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) + // Finds a specific pipeline by ID. + GetPipeline(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) + // Finds a specific pipeline by name and namespace. + GetPipelineByName(ctx context.Context, in *GetPipelineByNameRequest, opts ...grpc.CallOption) (*Pipeline, error) + // Finds all pipelines within a namespace. + ListPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) + // Deletes a pipeline by ID. If cascade is false (default), it returns an error if the + // pipeline has any versions. If cascade is true, it will also delete all pipeline versions. + DeletePipeline(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Creates a new pipeline and a new pipeline version in a single transaction. + CreatePipelineAndVersion(ctx context.Context, in *CreatePipelineAndVersionRequest, opts ...grpc.CallOption) (*Pipeline, error) + // Adds a pipeline version to the specified pipeline ID. + CreatePipelineVersion(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) + // Gets a pipeline version by pipeline version ID and pipeline ID. + GetPipelineVersion(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) + // Lists all pipeline versions of a given pipeline ID. + ListPipelineVersions(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) + // Deletes a specific pipeline version by pipeline version ID and pipeline ID. + DeletePipelineVersion(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type pipelineServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewPipelineServiceClient(cc grpc.ClientConnInterface) PipelineServiceClient { + return &pipelineServiceClient{cc} +} + +func (c *pipelineServiceClient) CreatePipeline(ctx context.Context, in *CreatePipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Pipeline) + err := c.cc.Invoke(ctx, PipelineService_CreatePipeline_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipeline(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Pipeline) + err := c.cc.Invoke(ctx, PipelineService_GetPipeline_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipelineByName(ctx context.Context, in *GetPipelineByNameRequest, opts ...grpc.CallOption) (*Pipeline, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Pipeline) + err := c.cc.Invoke(ctx, PipelineService_GetPipelineByName_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) ListPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListPipelinesResponse) + err := c.cc.Invoke(ctx, PipelineService_ListPipelines_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) DeletePipeline(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, PipelineService_DeletePipeline_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) CreatePipelineAndVersion(ctx context.Context, in *CreatePipelineAndVersionRequest, opts ...grpc.CallOption) (*Pipeline, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Pipeline) + err := c.cc.Invoke(ctx, PipelineService_CreatePipelineAndVersion_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) CreatePipelineVersion(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PipelineVersion) + err := c.cc.Invoke(ctx, PipelineService_CreatePipelineVersion_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipelineVersion(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PipelineVersion) + err := c.cc.Invoke(ctx, PipelineService_GetPipelineVersion_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) ListPipelineVersions(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListPipelineVersionsResponse) + err := c.cc.Invoke(ctx, PipelineService_ListPipelineVersions_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) DeletePipelineVersion(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, PipelineService_DeletePipelineVersion_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PipelineServiceServer is the server API for PipelineService service. +// All implementations must embed UnimplementedPipelineServiceServer +// for forward compatibility. +type PipelineServiceServer interface { + // Creates a pipeline. + CreatePipeline(context.Context, *CreatePipelineRequest) (*Pipeline, error) + // Finds a specific pipeline by ID. + GetPipeline(context.Context, *GetPipelineRequest) (*Pipeline, error) + // Finds a specific pipeline by name and namespace. + GetPipelineByName(context.Context, *GetPipelineByNameRequest) (*Pipeline, error) + // Finds all pipelines within a namespace. + ListPipelines(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) + // Deletes a pipeline by ID. If cascade is false (default), it returns an error if the + // pipeline has any versions. If cascade is true, it will also delete all pipeline versions. + DeletePipeline(context.Context, *DeletePipelineRequest) (*emptypb.Empty, error) + // Creates a new pipeline and a new pipeline version in a single transaction. + CreatePipelineAndVersion(context.Context, *CreatePipelineAndVersionRequest) (*Pipeline, error) + // Adds a pipeline version to the specified pipeline ID. + CreatePipelineVersion(context.Context, *CreatePipelineVersionRequest) (*PipelineVersion, error) + // Gets a pipeline version by pipeline version ID and pipeline ID. + GetPipelineVersion(context.Context, *GetPipelineVersionRequest) (*PipelineVersion, error) + // Lists all pipeline versions of a given pipeline ID. + ListPipelineVersions(context.Context, *ListPipelineVersionsRequest) (*ListPipelineVersionsResponse, error) + // Deletes a specific pipeline version by pipeline version ID and pipeline ID. + DeletePipelineVersion(context.Context, *DeletePipelineVersionRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedPipelineServiceServer() +} + +// UnimplementedPipelineServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedPipelineServiceServer struct{} + +func (UnimplementedPipelineServiceServer) CreatePipeline(context.Context, *CreatePipelineRequest) (*Pipeline, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreatePipeline not implemented") +} +func (UnimplementedPipelineServiceServer) GetPipeline(context.Context, *GetPipelineRequest) (*Pipeline, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetPipeline not implemented") +} +func (UnimplementedPipelineServiceServer) GetPipelineByName(context.Context, *GetPipelineByNameRequest) (*Pipeline, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetPipelineByName not implemented") +} +func (UnimplementedPipelineServiceServer) ListPipelines(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListPipelines not implemented") +} +func (UnimplementedPipelineServiceServer) DeletePipeline(context.Context, *DeletePipelineRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeletePipeline not implemented") +} +func (UnimplementedPipelineServiceServer) CreatePipelineAndVersion(context.Context, *CreatePipelineAndVersionRequest) (*Pipeline, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreatePipelineAndVersion not implemented") +} +func (UnimplementedPipelineServiceServer) CreatePipelineVersion(context.Context, *CreatePipelineVersionRequest) (*PipelineVersion, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreatePipelineVersion not implemented") +} +func (UnimplementedPipelineServiceServer) GetPipelineVersion(context.Context, *GetPipelineVersionRequest) (*PipelineVersion, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetPipelineVersion not implemented") +} +func (UnimplementedPipelineServiceServer) ListPipelineVersions(context.Context, *ListPipelineVersionsRequest) (*ListPipelineVersionsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListPipelineVersions not implemented") +} +func (UnimplementedPipelineServiceServer) DeletePipelineVersion(context.Context, *DeletePipelineVersionRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeletePipelineVersion not implemented") +} +func (UnimplementedPipelineServiceServer) mustEmbedUnimplementedPipelineServiceServer() {} +func (UnimplementedPipelineServiceServer) testEmbeddedByValue() {} + +// UnsafePipelineServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to PipelineServiceServer will +// result in compilation errors. +type UnsafePipelineServiceServer interface { + mustEmbedUnimplementedPipelineServiceServer() +} + +func RegisterPipelineServiceServer(s grpc.ServiceRegistrar, srv PipelineServiceServer) { + // If the following call pancis, it indicates UnimplementedPipelineServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&PipelineService_ServiceDesc, srv) +} + +func _PipelineService_CreatePipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreatePipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).CreatePipeline(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_CreatePipeline_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).CreatePipeline(ctx, req.(*CreatePipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipeline(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_GetPipeline_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipeline(ctx, req.(*GetPipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipelineByName_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineByNameRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipelineByName(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_GetPipelineByName_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipelineByName(ctx, req.(*GetPipelineByNameRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_ListPipelines_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListPipelinesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).ListPipelines(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_ListPipelines_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).ListPipelines(ctx, req.(*ListPipelinesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_DeletePipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).DeletePipeline(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_DeletePipeline_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).DeletePipeline(ctx, req.(*DeletePipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_CreatePipelineAndVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreatePipelineAndVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).CreatePipelineAndVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_CreatePipelineAndVersion_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).CreatePipelineAndVersion(ctx, req.(*CreatePipelineAndVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_CreatePipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreatePipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).CreatePipelineVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_CreatePipelineVersion_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).CreatePipelineVersion(ctx, req.(*CreatePipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipelineVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_GetPipelineVersion_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipelineVersion(ctx, req.(*GetPipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_ListPipelineVersions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListPipelineVersionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).ListPipelineVersions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_ListPipelineVersions_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).ListPipelineVersions(ctx, req.(*ListPipelineVersionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_DeletePipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).DeletePipelineVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: PipelineService_DeletePipelineVersion_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).DeletePipelineVersion(ctx, req.(*DeletePipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// PipelineService_ServiceDesc is the grpc.ServiceDesc for PipelineService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var PipelineService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.PipelineService", + HandlerType: (*PipelineServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreatePipeline", + Handler: _PipelineService_CreatePipeline_Handler, + }, + { + MethodName: "GetPipeline", + Handler: _PipelineService_GetPipeline_Handler, + }, + { + MethodName: "GetPipelineByName", + Handler: _PipelineService_GetPipelineByName_Handler, + }, + { + MethodName: "ListPipelines", + Handler: _PipelineService_ListPipelines_Handler, + }, + { + MethodName: "DeletePipeline", + Handler: _PipelineService_DeletePipeline_Handler, + }, + { + MethodName: "CreatePipelineAndVersion", + Handler: _PipelineService_CreatePipelineAndVersion_Handler, + }, + { + MethodName: "CreatePipelineVersion", + Handler: _PipelineService_CreatePipelineVersion_Handler, + }, + { + MethodName: "GetPipelineVersion", + Handler: _PipelineService_GetPipelineVersion_Handler, + }, + { + MethodName: "ListPipelineVersions", + Handler: _PipelineService_ListPipelineVersions_Handler, + }, + { + MethodName: "DeletePipelineVersion", + Handler: _PipelineService_DeletePipelineVersion_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/pipeline.proto", +} diff --git a/backend/api/v2beta1/go_client/recurring_run.pb.go b/backend/api/v2beta1/go_client/recurring_run.pb.go index 851292412c9..7f3a2a6ea6c 100644 --- a/backend/api/v2beta1/go_client/recurring_run.pb.go +++ b/backend/api/v2beta1/go_client/recurring_run.pb.go @@ -14,20 +14,16 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/recurring_run.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" status "google.golang.org/genproto/googleapis/rpc/status" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status1 "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" @@ -35,6 +31,7 @@ import ( timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -148,10 +145,7 @@ func (RecurringRun_Status) EnumDescriptor() ([]byte, []int) { } type RecurringRun struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Output. Unique run ID generated by API server. RecurringRunId string `protobuf:"bytes,1,opt,name=recurring_run_id,json=recurringRunId,proto3" json:"recurring_run_id,omitempty"` // Required input field. Recurring run name provided by user. Not unique. @@ -161,7 +155,7 @@ type RecurringRun struct { // Required input field. Specifies the source of the pipeline spec for this // recurring run. Can be either a pipeline id, pipeline version id, or a pipeline spec. // - // Types that are assignable to PipelineSource: + // Types that are valid to be assigned to PipelineSource: // // *RecurringRun_PipelineVersionId // *RecurringRun_PipelineSpec @@ -195,16 +189,16 @@ type RecurringRun struct { // Output only. Namespace this recurring run belongs to. Derived from the parent experiment. Namespace string `protobuf:"bytes,16,opt,name=namespace,proto3" json:"namespace,omitempty"` // ID of the parent experiment this recurring run belongs to. - ExperimentId string `protobuf:"bytes,17,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + ExperimentId string `protobuf:"bytes,17,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *RecurringRun) Reset() { *x = RecurringRun{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *RecurringRun) String() string { @@ -215,7 +209,7 @@ func (*RecurringRun) ProtoMessage() {} func (x *RecurringRun) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -251,31 +245,37 @@ func (x *RecurringRun) GetDescription() string { return "" } -func (m *RecurringRun) GetPipelineSource() isRecurringRun_PipelineSource { - if m != nil { - return m.PipelineSource +func (x *RecurringRun) GetPipelineSource() isRecurringRun_PipelineSource { + if x != nil { + return x.PipelineSource } return nil } // Deprecated: Marked as deprecated in backend/api/v2beta1/recurring_run.proto. func (x *RecurringRun) GetPipelineVersionId() string { - if x, ok := x.GetPipelineSource().(*RecurringRun_PipelineVersionId); ok { - return x.PipelineVersionId + if x != nil { + if x, ok := x.PipelineSource.(*RecurringRun_PipelineVersionId); ok { + return x.PipelineVersionId + } } return "" } func (x *RecurringRun) GetPipelineSpec() *structpb.Struct { - if x, ok := x.GetPipelineSource().(*RecurringRun_PipelineSpec); ok { - return x.PipelineSpec + if x != nil { + if x, ok := x.PipelineSource.(*RecurringRun_PipelineSpec); ok { + return x.PipelineSpec + } } return nil } func (x *RecurringRun) GetPipelineVersionReference() *PipelineVersionReference { - if x, ok := x.GetPipelineSource().(*RecurringRun_PipelineVersionReference); ok { - return x.PipelineVersionReference + if x != nil { + if x, ok := x.PipelineSource.(*RecurringRun_PipelineVersionReference); ok { + return x.PipelineVersionReference + } } return nil } @@ -392,21 +392,18 @@ func (*RecurringRun_PipelineSpec) isRecurringRun_PipelineSource() {} func (*RecurringRun_PipelineVersionReference) isRecurringRun_PipelineSource() {} type CreateRecurringRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The recurring run to be created. - RecurringRun *RecurringRun `protobuf:"bytes,1,opt,name=recurring_run,json=recurringRun,proto3" json:"recurring_run,omitempty"` + RecurringRun *RecurringRun `protobuf:"bytes,1,opt,name=recurring_run,json=recurringRun,proto3" json:"recurring_run,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CreateRecurringRunRequest) Reset() { *x = CreateRecurringRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreateRecurringRunRequest) String() string { @@ -417,7 +414,7 @@ func (*CreateRecurringRunRequest) ProtoMessage() {} func (x *CreateRecurringRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -440,21 +437,18 @@ func (x *CreateRecurringRunRequest) GetRecurringRun() *RecurringRun { } type GetRecurringRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the recurring run to be retrieved. RecurringRunId string `protobuf:"bytes,1,opt,name=recurring_run_id,json=recurringRunId,proto3" json:"recurring_run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetRecurringRunRequest) Reset() { *x = GetRecurringRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetRecurringRunRequest) String() string { @@ -465,7 +459,7 @@ func (*GetRecurringRunRequest) ProtoMessage() {} func (x *GetRecurringRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -488,10 +482,7 @@ func (x *GetRecurringRunRequest) GetRecurringRunId() string { } type ListRecurringRunsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A page token to request the next page of results. The token is acquired // from the nextPageToken field of the response from the previous // ListRecurringRuns call or can be omitted when fetching the first page. @@ -509,16 +500,16 @@ type ListRecurringRunsRequest struct { // [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` // The ID of the experiment to be retrieved. If empty, list recurring runs across all experiments. - ExperimentId string `protobuf:"bytes,6,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + ExperimentId string `protobuf:"bytes,6,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListRecurringRunsRequest) Reset() { *x = ListRecurringRunsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListRecurringRunsRequest) String() string { @@ -529,7 +520,7 @@ func (*ListRecurringRunsRequest) ProtoMessage() {} func (x *ListRecurringRunsRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -587,25 +578,22 @@ func (x *ListRecurringRunsRequest) GetExperimentId() string { } type ListRecurringRunsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A list of recurring runs returned. RecurringRuns []*RecurringRun `protobuf:"bytes,1,rep,name=recurringRuns,proto3" json:"recurringRuns,omitempty"` // The total number of recurring runs for the given query. TotalSize int32 `protobuf:"varint,2,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` // The token to list the next page of recurring runs. NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListRecurringRunsResponse) Reset() { *x = ListRecurringRunsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListRecurringRunsResponse) String() string { @@ -616,7 +604,7 @@ func (*ListRecurringRunsResponse) ProtoMessage() {} func (x *ListRecurringRunsResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -653,21 +641,18 @@ func (x *ListRecurringRunsResponse) GetNextPageToken() string { } type EnableRecurringRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the recurring runs to be enabled. RecurringRunId string `protobuf:"bytes,1,opt,name=recurring_run_id,json=recurringRunId,proto3" json:"recurring_run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *EnableRecurringRunRequest) Reset() { *x = EnableRecurringRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *EnableRecurringRunRequest) String() string { @@ -678,7 +663,7 @@ func (*EnableRecurringRunRequest) ProtoMessage() {} func (x *EnableRecurringRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -701,21 +686,18 @@ func (x *EnableRecurringRunRequest) GetRecurringRunId() string { } type DisableRecurringRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the recurring runs to be disabled. RecurringRunId string `protobuf:"bytes,1,opt,name=recurring_run_id,json=recurringRunId,proto3" json:"recurring_run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DisableRecurringRunRequest) Reset() { *x = DisableRecurringRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DisableRecurringRunRequest) String() string { @@ -726,7 +708,7 @@ func (*DisableRecurringRunRequest) ProtoMessage() {} func (x *DisableRecurringRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -749,21 +731,18 @@ func (x *DisableRecurringRunRequest) GetRecurringRunId() string { } type DeleteRecurringRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the recurring run to be deleted. RecurringRunId string `protobuf:"bytes,1,opt,name=recurring_run_id,json=recurringRunId,proto3" json:"recurring_run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeleteRecurringRunRequest) Reset() { *x = DeleteRecurringRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeleteRecurringRunRequest) String() string { @@ -774,7 +753,7 @@ func (*DeleteRecurringRunRequest) ProtoMessage() {} func (x *DeleteRecurringRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -798,26 +777,23 @@ func (x *DeleteRecurringRunRequest) GetRecurringRunId() string { // CronSchedule allow scheduling the recurring run with unix-like cron. type CronSchedule struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The start time of the cron job. StartTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` // The end time of the cron job. EndTime *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` // The cron string. For details how to compose a cron, visit // ttps://en.wikipedia.org/wiki/Cron - Cron string `protobuf:"bytes,3,opt,name=cron,proto3" json:"cron,omitempty"` + Cron string `protobuf:"bytes,3,opt,name=cron,proto3" json:"cron,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CronSchedule) Reset() { *x = CronSchedule{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CronSchedule) String() string { @@ -828,7 +804,7 @@ func (*CronSchedule) ProtoMessage() {} func (x *CronSchedule) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -866,25 +842,22 @@ func (x *CronSchedule) GetCron() string { // PeriodicSchedule allow scheduling the recurring run periodically with certain interval. type PeriodicSchedule struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The start time of the periodic recurring run. StartTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` // The end time of the periodic recurring run. EndTime *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` // The time interval between the starting time of consecutive recurring runs. IntervalSecond int64 `protobuf:"varint,3,opt,name=interval_second,json=intervalSecond,proto3" json:"interval_second,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PeriodicSchedule) Reset() { *x = PeriodicSchedule{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PeriodicSchedule) String() string { @@ -895,7 +868,7 @@ func (*PeriodicSchedule) ProtoMessage() {} func (x *PeriodicSchedule) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -933,24 +906,21 @@ func (x *PeriodicSchedule) GetIntervalSecond() int64 { // Trigger defines what starts a pipeline run. type Trigger struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Trigger: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Trigger: // // *Trigger_CronSchedule // *Trigger_PeriodicSchedule - Trigger isTrigger_Trigger `protobuf_oneof:"trigger"` + Trigger isTrigger_Trigger `protobuf_oneof:"trigger"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Trigger) Reset() { *x = Trigger{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Trigger) String() string { @@ -961,7 +931,7 @@ func (*Trigger) ProtoMessage() {} func (x *Trigger) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_recurring_run_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -976,23 +946,27 @@ func (*Trigger) Descriptor() ([]byte, []int) { return file_backend_api_v2beta1_recurring_run_proto_rawDescGZIP(), []int{10} } -func (m *Trigger) GetTrigger() isTrigger_Trigger { - if m != nil { - return m.Trigger +func (x *Trigger) GetTrigger() isTrigger_Trigger { + if x != nil { + return x.Trigger } return nil } func (x *Trigger) GetCronSchedule() *CronSchedule { - if x, ok := x.GetTrigger().(*Trigger_CronSchedule); ok { - return x.CronSchedule + if x != nil { + if x, ok := x.Trigger.(*Trigger_CronSchedule); ok { + return x.CronSchedule + } } return nil } func (x *Trigger) GetPeriodicSchedule() *PeriodicSchedule { - if x, ok := x.GetTrigger().(*Trigger_PeriodicSchedule); ok { - return x.PeriodicSchedule + if x != nil { + if x, ok := x.Trigger.(*Trigger_PeriodicSchedule); ok { + return x.PeriodicSchedule + } } return nil } @@ -1015,280 +989,102 @@ func (*Trigger_PeriodicSchedule) isTrigger_Trigger() {} var File_backend_api_v2beta1_recurring_run_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_recurring_run_proto_rawDesc = []byte{ - 0x0a, 0x27, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x5f, - 0x72, 0x75, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x26, 0x6b, 0x75, 0x62, 0x65, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x1a, 0x28, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, - 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x72, 0x75, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, - 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x72, 0x70, 0x63, - 0x2f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, - 0x72, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x9e, 0x09, 0x0a, 0x0c, - 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x12, 0x28, 0x0a, 0x10, - 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, - 0x67, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, - 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, - 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, - 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x34, 0x0a, 0x13, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, - 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x48, 0x00, 0x52, 0x11, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, - 0x64, 0x12, 0x3e, 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x70, - 0x65, 0x63, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x48, 0x00, 0x52, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, - 0x63, 0x12, 0x80, 0x01, 0x0a, 0x1a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, - 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, - 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x40, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, - 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, - 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, - 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x18, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, - 0x65, 0x6e, 0x63, 0x65, 0x12, 0x5c, 0x0a, 0x0e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x52, 0x0d, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x5f, 0x61, 0x63, - 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x6d, - 0x61, 0x78, 0x5f, 0x63, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x08, - 0x20, 0x01, 0x28, 0x03, 0x52, 0x0e, 0x6d, 0x61, 0x78, 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x63, 0x79, 0x12, 0x49, 0x0a, 0x07, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x18, - 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x54, - 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x52, 0x07, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x12, - 0x4d, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x39, 0x2e, - 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, - 0x52, 0x75, 0x6e, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x12, 0x39, - 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x0b, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, - 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x64, 0x41, 0x74, 0x12, 0x53, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x0d, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3b, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, - 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x28, 0x0a, 0x05, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x12, 0x1d, 0x0a, 0x0a, 0x6e, 0x6f, 0x5f, 0x63, 0x61, 0x74, 0x63, 0x68, 0x75, - 0x70, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x6e, 0x6f, 0x43, 0x61, 0x74, 0x63, 0x68, - 0x75, 0x70, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, - 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, - 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, - 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x35, 0x0a, 0x04, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x14, 0x0a, - 0x10, 0x4d, 0x4f, 0x44, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, - 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x45, 0x4e, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x12, - 0x0b, 0x0a, 0x07, 0x44, 0x49, 0x53, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x02, 0x22, 0x3b, 0x0a, 0x06, - 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, - 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, - 0x0a, 0x07, 0x45, 0x4e, 0x41, 0x42, 0x4c, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x44, - 0x49, 0x53, 0x41, 0x42, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x42, 0x11, 0x0a, 0x0f, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x76, 0x0a, 0x19, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, - 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x59, 0x0a, 0x0d, 0x72, 0x65, 0x63, - 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x75, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x34, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, - 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x52, 0x0c, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, - 0x67, 0x52, 0x75, 0x6e, 0x22, 0x42, 0x0a, 0x16, 0x47, 0x65, 0x74, 0x52, 0x65, 0x63, 0x75, 0x72, - 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x28, - 0x0a, 0x10, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x75, 0x6e, 0x5f, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, - 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x22, 0xca, 0x01, 0x0a, 0x18, 0x4c, 0x69, 0x73, - 0x74, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, - 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, - 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, - 0x65, 0x12, 0x17, 0x0a, 0x07, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x62, 0x79, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x72, 0x74, 0x42, 0x79, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, - 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, - 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, - 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, - 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, - 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, - 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0xbe, 0x01, 0x0a, 0x19, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, - 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x5a, 0x0a, 0x0d, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, - 0x52, 0x75, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x6b, 0x75, 0x62, - 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, - 0x52, 0x0d, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x73, 0x12, - 0x1d, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x26, - 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, - 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, - 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x45, 0x0a, 0x19, 0x45, 0x6e, 0x61, 0x62, 0x6c, 0x65, - 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, - 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, - 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x22, 0x46, 0x0a, - 0x1a, 0x44, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, - 0x67, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x72, - 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, - 0x52, 0x75, 0x6e, 0x49, 0x64, 0x22, 0x45, 0x0a, 0x19, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, - 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x5f, - 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x65, - 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x22, 0x94, 0x01, 0x0a, - 0x0c, 0x43, 0x72, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x12, 0x39, 0x0a, - 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x73, - 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, - 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, - 0x12, 0x0a, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, - 0x72, 0x6f, 0x6e, 0x22, 0xad, 0x01, 0x0a, 0x10, 0x50, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x69, 0x63, - 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, - 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x03, 0x52, 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x53, 0x65, 0x63, - 0x6f, 0x6e, 0x64, 0x22, 0xda, 0x01, 0x0a, 0x07, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x12, - 0x5b, 0x0a, 0x0d, 0x63, 0x72, 0x6f, 0x6e, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, - 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, - 0x43, 0x72, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x0c, - 0x63, 0x72, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x12, 0x67, 0x0a, 0x11, - 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x69, 0x63, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2e, 0x50, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x69, 0x63, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, - 0x65, 0x48, 0x00, 0x52, 0x10, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x69, 0x63, 0x53, 0x63, 0x68, - 0x65, 0x64, 0x75, 0x6c, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, - 0x32, 0xea, 0x08, 0x0a, 0x13, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, - 0x6e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xc1, 0x01, 0x0a, 0x12, 0x43, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x12, - 0x41, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x34, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x75, - 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x22, 0x32, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2c, - 0x3a, 0x0d, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x75, 0x6e, 0x22, - 0x1b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, - 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x72, 0x75, 0x6e, 0x73, 0x12, 0xbf, 0x01, 0x0a, - 0x0f, 0x47, 0x65, 0x74, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, - 0x12, 0x3e, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, 0x63, - 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x34, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, - 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x22, 0x36, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x30, 0x12, 0x2e, - 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x65, - 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, - 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x12, 0xbd, - 0x01, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, - 0x52, 0x75, 0x6e, 0x73, 0x12, 0x40, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, - 0x73, 0x74, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x73, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x41, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, - 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, - 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x23, 0x82, 0xd3, 0xe4, 0x93, 0x02, - 0x1d, 0x12, 0x1b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x72, 0x75, 0x6e, 0x73, 0x12, 0xae, - 0x01, 0x0a, 0x12, 0x45, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, - 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x12, 0x41, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x45, - 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, - 0x22, 0x3d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x37, 0x22, 0x35, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, - 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, - 0x67, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, - 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x3a, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x12, - 0xb1, 0x01, 0x0a, 0x13, 0x44, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x63, 0x75, 0x72, - 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x12, 0x42, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2e, 0x44, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, - 0x67, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, - 0x70, 0x74, 0x79, 0x22, 0x3e, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x38, 0x22, 0x36, 0x2f, 0x61, 0x70, - 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x65, 0x63, 0x75, 0x72, - 0x72, 0x69, 0x6e, 0x67, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, - 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x3a, 0x64, 0x69, 0x73, 0x61, - 0x62, 0x6c, 0x65, 0x12, 0xa7, 0x01, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, - 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x12, 0x41, 0x2e, 0x6b, 0x75, 0x62, - 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x63, 0x75, 0x72, 0x72, - 0x69, 0x6e, 0x67, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x36, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x30, 0x2a, 0x2e, 0x2f, - 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x65, 0x63, - 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x65, 0x63, 0x75, - 0x72, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x42, 0x44, 0x92, - 0x41, 0x04, 0x2a, 0x02, 0x01, 0x02, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, - 0x69, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, - 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v2beta1_recurring_run_proto_rawDesc = "" + + "\n" + + "'backend/api/v2beta1/recurring_run.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a(backend/api/v2beta1/runtime_config.proto\x1a\x1dbackend/api/v2beta1/run.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x17google/rpc/status.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\x9e\t\n" + + "\fRecurringRun\x12(\n" + + "\x10recurring_run_id\x18\x01 \x01(\tR\x0erecurringRunId\x12!\n" + + "\fdisplay_name\x18\x02 \x01(\tR\vdisplayName\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\x124\n" + + "\x13pipeline_version_id\x18\x04 \x01(\tB\x02\x18\x01H\x00R\x11pipelineVersionId\x12>\n" + + "\rpipeline_spec\x18\x05 \x01(\v2\x17.google.protobuf.StructH\x00R\fpipelineSpec\x12\x80\x01\n" + + "\x1apipeline_version_reference\x18\x12 \x01(\v2@.kubeflow.pipelines.backend.api.v2beta1.PipelineVersionReferenceH\x00R\x18pipelineVersionReference\x12\\\n" + + "\x0eruntime_config\x18\x06 \x01(\v25.kubeflow.pipelines.backend.api.v2beta1.RuntimeConfigR\rruntimeConfig\x12'\n" + + "\x0fservice_account\x18\a \x01(\tR\x0eserviceAccount\x12'\n" + + "\x0fmax_concurrency\x18\b \x01(\x03R\x0emaxConcurrency\x12I\n" + + "\atrigger\x18\t \x01(\v2/.kubeflow.pipelines.backend.api.v2beta1.TriggerR\atrigger\x12M\n" + + "\x04mode\x18\n" + + " \x01(\x0e29.kubeflow.pipelines.backend.api.v2beta1.RecurringRun.ModeR\x04mode\x129\n" + + "\n" + + "created_at\x18\v \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x129\n" + + "\n" + + "updated_at\x18\f \x01(\v2\x1a.google.protobuf.TimestampR\tupdatedAt\x12S\n" + + "\x06status\x18\r \x01(\x0e2;.kubeflow.pipelines.backend.api.v2beta1.RecurringRun.StatusR\x06status\x12(\n" + + "\x05error\x18\x0e \x01(\v2\x12.google.rpc.StatusR\x05error\x12\x1d\n" + + "\n" + + "no_catchup\x18\x0f \x01(\bR\tnoCatchup\x12\x1c\n" + + "\tnamespace\x18\x10 \x01(\tR\tnamespace\x12#\n" + + "\rexperiment_id\x18\x11 \x01(\tR\fexperimentId\"5\n" + + "\x04Mode\x12\x14\n" + + "\x10MODE_UNSPECIFIED\x10\x00\x12\n" + + "\n" + + "\x06ENABLE\x10\x01\x12\v\n" + + "\aDISABLE\x10\x02\";\n" + + "\x06Status\x12\x16\n" + + "\x12STATUS_UNSPECIFIED\x10\x00\x12\v\n" + + "\aENABLED\x10\x01\x12\f\n" + + "\bDISABLED\x10\x03B\x11\n" + + "\x0fpipeline_source\"v\n" + + "\x19CreateRecurringRunRequest\x12Y\n" + + "\rrecurring_run\x18\x01 \x01(\v24.kubeflow.pipelines.backend.api.v2beta1.RecurringRunR\frecurringRun\"B\n" + + "\x16GetRecurringRunRequest\x12(\n" + + "\x10recurring_run_id\x18\x01 \x01(\tR\x0erecurringRunId\"\xca\x01\n" + + "\x18ListRecurringRunsRequest\x12\x1d\n" + + "\n" + + "page_token\x18\x01 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x02 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x03 \x01(\tR\x06sortBy\x12\x1c\n" + + "\tnamespace\x18\x04 \x01(\tR\tnamespace\x12\x16\n" + + "\x06filter\x18\x05 \x01(\tR\x06filter\x12#\n" + + "\rexperiment_id\x18\x06 \x01(\tR\fexperimentId\"\xbe\x01\n" + + "\x19ListRecurringRunsResponse\x12Z\n" + + "\rrecurringRuns\x18\x01 \x03(\v24.kubeflow.pipelines.backend.api.v2beta1.RecurringRunR\rrecurringRuns\x12\x1d\n" + + "\n" + + "total_size\x18\x02 \x01(\x05R\ttotalSize\x12&\n" + + "\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"E\n" + + "\x19EnableRecurringRunRequest\x12(\n" + + "\x10recurring_run_id\x18\x01 \x01(\tR\x0erecurringRunId\"F\n" + + "\x1aDisableRecurringRunRequest\x12(\n" + + "\x10recurring_run_id\x18\x01 \x01(\tR\x0erecurringRunId\"E\n" + + "\x19DeleteRecurringRunRequest\x12(\n" + + "\x10recurring_run_id\x18\x01 \x01(\tR\x0erecurringRunId\"\x94\x01\n" + + "\fCronSchedule\x129\n" + + "\n" + + "start_time\x18\x01 \x01(\v2\x1a.google.protobuf.TimestampR\tstartTime\x125\n" + + "\bend_time\x18\x02 \x01(\v2\x1a.google.protobuf.TimestampR\aendTime\x12\x12\n" + + "\x04cron\x18\x03 \x01(\tR\x04cron\"\xad\x01\n" + + "\x10PeriodicSchedule\x129\n" + + "\n" + + "start_time\x18\x01 \x01(\v2\x1a.google.protobuf.TimestampR\tstartTime\x125\n" + + "\bend_time\x18\x02 \x01(\v2\x1a.google.protobuf.TimestampR\aendTime\x12'\n" + + "\x0finterval_second\x18\x03 \x01(\x03R\x0eintervalSecond\"\xda\x01\n" + + "\aTrigger\x12[\n" + + "\rcron_schedule\x18\x01 \x01(\v24.kubeflow.pipelines.backend.api.v2beta1.CronScheduleH\x00R\fcronSchedule\x12g\n" + + "\x11periodic_schedule\x18\x02 \x01(\v28.kubeflow.pipelines.backend.api.v2beta1.PeriodicScheduleH\x00R\x10periodicScheduleB\t\n" + + "\atrigger2\xea\b\n" + + "\x13RecurringRunService\x12\xc1\x01\n" + + "\x12CreateRecurringRun\x12A.kubeflow.pipelines.backend.api.v2beta1.CreateRecurringRunRequest\x1a4.kubeflow.pipelines.backend.api.v2beta1.RecurringRun\"2\x82\xd3\xe4\x93\x02,:\rrecurring_run\"\x1b/apis/v2beta1/recurringruns\x12\xbf\x01\n" + + "\x0fGetRecurringRun\x12>.kubeflow.pipelines.backend.api.v2beta1.GetRecurringRunRequest\x1a4.kubeflow.pipelines.backend.api.v2beta1.RecurringRun\"6\x82\xd3\xe4\x93\x020\x12./apis/v2beta1/recurringruns/{recurring_run_id}\x12\xbd\x01\n" + + "\x11ListRecurringRuns\x12@.kubeflow.pipelines.backend.api.v2beta1.ListRecurringRunsRequest\x1aA.kubeflow.pipelines.backend.api.v2beta1.ListRecurringRunsResponse\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/apis/v2beta1/recurringruns\x12\xae\x01\n" + + "\x12EnableRecurringRun\x12A.kubeflow.pipelines.backend.api.v2beta1.EnableRecurringRunRequest\x1a\x16.google.protobuf.Empty\"=\x82\xd3\xe4\x93\x027\"5/apis/v2beta1/recurringruns/{recurring_run_id}:enable\x12\xb1\x01\n" + + "\x13DisableRecurringRun\x12B.kubeflow.pipelines.backend.api.v2beta1.DisableRecurringRunRequest\x1a\x16.google.protobuf.Empty\">\x82\xd3\xe4\x93\x028\"6/apis/v2beta1/recurringruns/{recurring_run_id}:disable\x12\xa7\x01\n" + + "\x12DeleteRecurringRun\x12A.kubeflow.pipelines.backend.api.v2beta1.DeleteRecurringRunRequest\x1a\x16.google.protobuf.Empty\"6\x82\xd3\xe4\x93\x020*./apis/v2beta1/recurringruns/{recurring_run_id}BD\x92A\x04*\x02\x01\x02Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_recurring_run_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_recurring_run_proto_rawDescData = file_backend_api_v2beta1_recurring_run_proto_rawDesc + file_backend_api_v2beta1_recurring_run_proto_rawDescData []byte ) func file_backend_api_v2beta1_recurring_run_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_recurring_run_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_recurring_run_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_recurring_run_proto_rawDescData) + file_backend_api_v2beta1_recurring_run_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_recurring_run_proto_rawDesc), len(file_backend_api_v2beta1_recurring_run_proto_rawDesc))) }) return file_backend_api_v2beta1_recurring_run_proto_rawDescData } var file_backend_api_v2beta1_recurring_run_proto_enumTypes = make([]protoimpl.EnumInfo, 2) var file_backend_api_v2beta1_recurring_run_proto_msgTypes = make([]protoimpl.MessageInfo, 11) -var file_backend_api_v2beta1_recurring_run_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_recurring_run_proto_goTypes = []any{ (RecurringRun_Mode)(0), // 0: kubeflow.pipelines.backend.api.v2beta1.RecurringRun.Mode (RecurringRun_Status)(0), // 1: kubeflow.pipelines.backend.api.v2beta1.RecurringRun.Status (*RecurringRun)(nil), // 2: kubeflow.pipelines.backend.api.v2beta1.RecurringRun @@ -1353,146 +1149,12 @@ func file_backend_api_v2beta1_recurring_run_proto_init() { } file_backend_api_v2beta1_runtime_config_proto_init() file_backend_api_v2beta1_run_proto_init() - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_recurring_run_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecurringRun); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateRecurringRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetRecurringRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListRecurringRunsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListRecurringRunsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EnableRecurringRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DisableRecurringRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteRecurringRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CronSchedule); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PeriodicSchedule); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Trigger); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[0].OneofWrappers = []interface{}{ + file_backend_api_v2beta1_recurring_run_proto_msgTypes[0].OneofWrappers = []any{ (*RecurringRun_PipelineVersionId)(nil), (*RecurringRun_PipelineSpec)(nil), (*RecurringRun_PipelineVersionReference)(nil), } - file_backend_api_v2beta1_recurring_run_proto_msgTypes[10].OneofWrappers = []interface{}{ + file_backend_api_v2beta1_recurring_run_proto_msgTypes[10].OneofWrappers = []any{ (*Trigger_CronSchedule)(nil), (*Trigger_PeriodicSchedule)(nil), } @@ -1500,7 +1162,7 @@ func file_backend_api_v2beta1_recurring_run_proto_init() { out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_recurring_run_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_recurring_run_proto_rawDesc), len(file_backend_api_v2beta1_recurring_run_proto_rawDesc)), NumEnums: 2, NumMessages: 11, NumExtensions: 0, @@ -1512,283 +1174,6 @@ func file_backend_api_v2beta1_recurring_run_proto_init() { MessageInfos: file_backend_api_v2beta1_recurring_run_proto_msgTypes, }.Build() File_backend_api_v2beta1_recurring_run_proto = out.File - file_backend_api_v2beta1_recurring_run_proto_rawDesc = nil file_backend_api_v2beta1_recurring_run_proto_goTypes = nil file_backend_api_v2beta1_recurring_run_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// RecurringRunServiceClient is the client API for RecurringRunService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type RecurringRunServiceClient interface { - // Creates a new recurring run in an experiment, given the experiment ID. - CreateRecurringRun(ctx context.Context, in *CreateRecurringRunRequest, opts ...grpc.CallOption) (*RecurringRun, error) - // Finds a specific recurring run by ID. - GetRecurringRun(ctx context.Context, in *GetRecurringRunRequest, opts ...grpc.CallOption) (*RecurringRun, error) - // Finds all recurring runs given experiment and namespace. - // If experiment ID is not specified, find all recurring runs across all experiments. - ListRecurringRuns(ctx context.Context, in *ListRecurringRunsRequest, opts ...grpc.CallOption) (*ListRecurringRunsResponse, error) - // Restarts a recurring run that was previously stopped. All runs associated with the - // recurring run will continue. - EnableRecurringRun(ctx context.Context, in *EnableRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Stops a recurring run and all its associated runs. The recurring run is not deleted. - DisableRecurringRun(ctx context.Context, in *DisableRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Deletes a recurring run. - DeleteRecurringRun(ctx context.Context, in *DeleteRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type recurringRunServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewRecurringRunServiceClient(cc grpc.ClientConnInterface) RecurringRunServiceClient { - return &recurringRunServiceClient{cc} -} - -func (c *recurringRunServiceClient) CreateRecurringRun(ctx context.Context, in *CreateRecurringRunRequest, opts ...grpc.CallOption) (*RecurringRun, error) { - out := new(RecurringRun) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/CreateRecurringRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *recurringRunServiceClient) GetRecurringRun(ctx context.Context, in *GetRecurringRunRequest, opts ...grpc.CallOption) (*RecurringRun, error) { - out := new(RecurringRun) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/GetRecurringRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *recurringRunServiceClient) ListRecurringRuns(ctx context.Context, in *ListRecurringRunsRequest, opts ...grpc.CallOption) (*ListRecurringRunsResponse, error) { - out := new(ListRecurringRunsResponse) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/ListRecurringRuns", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *recurringRunServiceClient) EnableRecurringRun(ctx context.Context, in *EnableRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/EnableRecurringRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *recurringRunServiceClient) DisableRecurringRun(ctx context.Context, in *DisableRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/DisableRecurringRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *recurringRunServiceClient) DeleteRecurringRun(ctx context.Context, in *DeleteRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/DeleteRecurringRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// RecurringRunServiceServer is the server API for RecurringRunService service. -type RecurringRunServiceServer interface { - // Creates a new recurring run in an experiment, given the experiment ID. - CreateRecurringRun(context.Context, *CreateRecurringRunRequest) (*RecurringRun, error) - // Finds a specific recurring run by ID. - GetRecurringRun(context.Context, *GetRecurringRunRequest) (*RecurringRun, error) - // Finds all recurring runs given experiment and namespace. - // If experiment ID is not specified, find all recurring runs across all experiments. - ListRecurringRuns(context.Context, *ListRecurringRunsRequest) (*ListRecurringRunsResponse, error) - // Restarts a recurring run that was previously stopped. All runs associated with the - // recurring run will continue. - EnableRecurringRun(context.Context, *EnableRecurringRunRequest) (*emptypb.Empty, error) - // Stops a recurring run and all its associated runs. The recurring run is not deleted. - DisableRecurringRun(context.Context, *DisableRecurringRunRequest) (*emptypb.Empty, error) - // Deletes a recurring run. - DeleteRecurringRun(context.Context, *DeleteRecurringRunRequest) (*emptypb.Empty, error) -} - -// UnimplementedRecurringRunServiceServer can be embedded to have forward compatible implementations. -type UnimplementedRecurringRunServiceServer struct { -} - -func (*UnimplementedRecurringRunServiceServer) CreateRecurringRun(context.Context, *CreateRecurringRunRequest) (*RecurringRun, error) { - return nil, status1.Errorf(codes.Unimplemented, "method CreateRecurringRun not implemented") -} -func (*UnimplementedRecurringRunServiceServer) GetRecurringRun(context.Context, *GetRecurringRunRequest) (*RecurringRun, error) { - return nil, status1.Errorf(codes.Unimplemented, "method GetRecurringRun not implemented") -} -func (*UnimplementedRecurringRunServiceServer) ListRecurringRuns(context.Context, *ListRecurringRunsRequest) (*ListRecurringRunsResponse, error) { - return nil, status1.Errorf(codes.Unimplemented, "method ListRecurringRuns not implemented") -} -func (*UnimplementedRecurringRunServiceServer) EnableRecurringRun(context.Context, *EnableRecurringRunRequest) (*emptypb.Empty, error) { - return nil, status1.Errorf(codes.Unimplemented, "method EnableRecurringRun not implemented") -} -func (*UnimplementedRecurringRunServiceServer) DisableRecurringRun(context.Context, *DisableRecurringRunRequest) (*emptypb.Empty, error) { - return nil, status1.Errorf(codes.Unimplemented, "method DisableRecurringRun not implemented") -} -func (*UnimplementedRecurringRunServiceServer) DeleteRecurringRun(context.Context, *DeleteRecurringRunRequest) (*emptypb.Empty, error) { - return nil, status1.Errorf(codes.Unimplemented, "method DeleteRecurringRun not implemented") -} - -func RegisterRecurringRunServiceServer(s *grpc.Server, srv RecurringRunServiceServer) { - s.RegisterService(&_RecurringRunService_serviceDesc, srv) -} - -func _RecurringRunService_CreateRecurringRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateRecurringRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RecurringRunServiceServer).CreateRecurringRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/CreateRecurringRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RecurringRunServiceServer).CreateRecurringRun(ctx, req.(*CreateRecurringRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RecurringRunService_GetRecurringRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetRecurringRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RecurringRunServiceServer).GetRecurringRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/GetRecurringRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RecurringRunServiceServer).GetRecurringRun(ctx, req.(*GetRecurringRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RecurringRunService_ListRecurringRuns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListRecurringRunsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RecurringRunServiceServer).ListRecurringRuns(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/ListRecurringRuns", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RecurringRunServiceServer).ListRecurringRuns(ctx, req.(*ListRecurringRunsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RecurringRunService_EnableRecurringRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(EnableRecurringRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RecurringRunServiceServer).EnableRecurringRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/EnableRecurringRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RecurringRunServiceServer).EnableRecurringRun(ctx, req.(*EnableRecurringRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RecurringRunService_DisableRecurringRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DisableRecurringRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RecurringRunServiceServer).DisableRecurringRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/DisableRecurringRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RecurringRunServiceServer).DisableRecurringRun(ctx, req.(*DisableRecurringRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RecurringRunService_DeleteRecurringRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeleteRecurringRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RecurringRunServiceServer).DeleteRecurringRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/DeleteRecurringRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RecurringRunServiceServer).DeleteRecurringRun(ctx, req.(*DeleteRecurringRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _RecurringRunService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "kubeflow.pipelines.backend.api.v2beta1.RecurringRunService", - HandlerType: (*RecurringRunServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateRecurringRun", - Handler: _RecurringRunService_CreateRecurringRun_Handler, - }, - { - MethodName: "GetRecurringRun", - Handler: _RecurringRunService_GetRecurringRun_Handler, - }, - { - MethodName: "ListRecurringRuns", - Handler: _RecurringRunService_ListRecurringRuns_Handler, - }, - { - MethodName: "EnableRecurringRun", - Handler: _RecurringRunService_EnableRecurringRun_Handler, - }, - { - MethodName: "DisableRecurringRun", - Handler: _RecurringRunService_DisableRecurringRun_Handler, - }, - { - MethodName: "DeleteRecurringRun", - Handler: _RecurringRunService_DeleteRecurringRun_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v2beta1/recurring_run.proto", -} diff --git a/backend/api/v2beta1/go_client/recurring_run.pb.gw.go b/backend/api/v2beta1/go_client/recurring_run.pb.gw.go index a62d96adfa7..9555ed911cd 100644 --- a/backend/api/v2beta1/go_client/recurring_run.pb.gw.go +++ b/backend/api/v2beta1/go_client/recurring_run.pb.gw.go @@ -10,457 +10,374 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_RecurringRunService_CreateRecurringRun_0(ctx context.Context, marshaler runtime.Marshaler, client RecurringRunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateRecurringRunRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.RecurringRun); err != nil && err != io.EOF { + var ( + protoReq CreateRecurringRunRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.RecurringRun); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.CreateRecurringRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RecurringRunService_CreateRecurringRun_0(ctx context.Context, marshaler runtime.Marshaler, server RecurringRunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateRecurringRunRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.RecurringRun); err != nil && err != io.EOF { + var ( + protoReq CreateRecurringRunRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.RecurringRun); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreateRecurringRun(ctx, &protoReq) return msg, metadata, err - } func request_RecurringRunService_GetRecurringRun_0(ctx context.Context, marshaler runtime.Marshaler, client RecurringRunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetRecurringRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetRecurringRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["recurring_run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["recurring_run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "recurring_run_id") } - protoReq.RecurringRunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "recurring_run_id", err) } - msg, err := client.GetRecurringRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RecurringRunService_GetRecurringRun_0(ctx context.Context, marshaler runtime.Marshaler, server RecurringRunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetRecurringRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetRecurringRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["recurring_run_id"] + val, ok := pathParams["recurring_run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "recurring_run_id") } - protoReq.RecurringRunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "recurring_run_id", err) } - msg, err := server.GetRecurringRun(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_RecurringRunService_ListRecurringRuns_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) +var filter_RecurringRunService_ListRecurringRuns_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} func request_RecurringRunService_ListRecurringRuns_0(ctx context.Context, marshaler runtime.Marshaler, client RecurringRunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListRecurringRunsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListRecurringRunsRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RecurringRunService_ListRecurringRuns_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListRecurringRuns(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RecurringRunService_ListRecurringRuns_0(ctx context.Context, marshaler runtime.Marshaler, server RecurringRunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListRecurringRunsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListRecurringRunsRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RecurringRunService_ListRecurringRuns_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListRecurringRuns(ctx, &protoReq) return msg, metadata, err - } func request_RecurringRunService_EnableRecurringRun_0(ctx context.Context, marshaler runtime.Marshaler, client RecurringRunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq EnableRecurringRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq EnableRecurringRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["recurring_run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["recurring_run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "recurring_run_id") } - protoReq.RecurringRunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "recurring_run_id", err) } - msg, err := client.EnableRecurringRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RecurringRunService_EnableRecurringRun_0(ctx context.Context, marshaler runtime.Marshaler, server RecurringRunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq EnableRecurringRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq EnableRecurringRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["recurring_run_id"] + val, ok := pathParams["recurring_run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "recurring_run_id") } - protoReq.RecurringRunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "recurring_run_id", err) } - msg, err := server.EnableRecurringRun(ctx, &protoReq) return msg, metadata, err - } func request_RecurringRunService_DisableRecurringRun_0(ctx context.Context, marshaler runtime.Marshaler, client RecurringRunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DisableRecurringRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DisableRecurringRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["recurring_run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["recurring_run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "recurring_run_id") } - protoReq.RecurringRunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "recurring_run_id", err) } - msg, err := client.DisableRecurringRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RecurringRunService_DisableRecurringRun_0(ctx context.Context, marshaler runtime.Marshaler, server RecurringRunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DisableRecurringRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DisableRecurringRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["recurring_run_id"] + val, ok := pathParams["recurring_run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "recurring_run_id") } - protoReq.RecurringRunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "recurring_run_id", err) } - msg, err := server.DisableRecurringRun(ctx, &protoReq) return msg, metadata, err - } func request_RecurringRunService_DeleteRecurringRun_0(ctx context.Context, marshaler runtime.Marshaler, client RecurringRunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteRecurringRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteRecurringRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["recurring_run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["recurring_run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "recurring_run_id") } - protoReq.RecurringRunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "recurring_run_id", err) } - msg, err := client.DeleteRecurringRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RecurringRunService_DeleteRecurringRun_0(ctx context.Context, marshaler runtime.Marshaler, server RecurringRunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteRecurringRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteRecurringRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["recurring_run_id"] + val, ok := pathParams["recurring_run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "recurring_run_id") } - protoReq.RecurringRunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "recurring_run_id", err) } - msg, err := server.DeleteRecurringRun(ctx, &protoReq) return msg, metadata, err - } // RegisterRecurringRunServiceHandlerServer registers the http handlers for service RecurringRunService to "mux". // UnaryRPC :call RecurringRunServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterRecurringRunServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterRecurringRunServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server RecurringRunServiceServer) error { - - mux.Handle("POST", pattern_RecurringRunService_CreateRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RecurringRunService_CreateRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/CreateRecurringRun", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RecurringRunService_CreateRecurringRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RecurringRunService_CreateRecurringRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_CreateRecurringRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_CreateRecurringRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RecurringRunService_GetRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RecurringRunService_GetRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/GetRecurringRun", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns/{recurring_run_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RecurringRunService_GetRecurringRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RecurringRunService_GetRecurringRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_GetRecurringRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_GetRecurringRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RecurringRunService_ListRecurringRuns_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RecurringRunService_ListRecurringRuns_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/ListRecurringRuns", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RecurringRunService_ListRecurringRuns_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RecurringRunService_ListRecurringRuns_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_ListRecurringRuns_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_ListRecurringRuns_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RecurringRunService_EnableRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RecurringRunService_EnableRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/EnableRecurringRun", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns/{recurring_run_id}:enable")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RecurringRunService_EnableRecurringRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RecurringRunService_EnableRecurringRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_EnableRecurringRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_EnableRecurringRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RecurringRunService_DisableRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RecurringRunService_DisableRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/DisableRecurringRun", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns/{recurring_run_id}:disable")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RecurringRunService_DisableRecurringRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RecurringRunService_DisableRecurringRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_DisableRecurringRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_DisableRecurringRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_RecurringRunService_DeleteRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_RecurringRunService_DeleteRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/DeleteRecurringRun", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns/{recurring_run_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RecurringRunService_DeleteRecurringRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RecurringRunService_DeleteRecurringRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_DeleteRecurringRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_DeleteRecurringRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -469,25 +386,24 @@ func RegisterRecurringRunServiceHandlerServer(ctx context.Context, mux *runtime. // RegisterRecurringRunServiceHandlerFromEndpoint is same as RegisterRecurringRunServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterRecurringRunServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterRecurringRunServiceHandler(ctx, mux, conn) } @@ -501,156 +417,127 @@ func RegisterRecurringRunServiceHandler(ctx context.Context, mux *runtime.ServeM // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "RecurringRunServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "RecurringRunServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "RecurringRunServiceClient" to call the correct interceptors. +// "RecurringRunServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterRecurringRunServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client RecurringRunServiceClient) error { - - mux.Handle("POST", pattern_RecurringRunService_CreateRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RecurringRunService_CreateRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/CreateRecurringRun", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RecurringRunService_CreateRecurringRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RecurringRunService_CreateRecurringRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_CreateRecurringRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_CreateRecurringRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RecurringRunService_GetRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RecurringRunService_GetRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/GetRecurringRun", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns/{recurring_run_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RecurringRunService_GetRecurringRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RecurringRunService_GetRecurringRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_GetRecurringRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_GetRecurringRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RecurringRunService_ListRecurringRuns_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RecurringRunService_ListRecurringRuns_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/ListRecurringRuns", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RecurringRunService_ListRecurringRuns_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RecurringRunService_ListRecurringRuns_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_ListRecurringRuns_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_ListRecurringRuns_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RecurringRunService_EnableRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RecurringRunService_EnableRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/EnableRecurringRun", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns/{recurring_run_id}:enable")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RecurringRunService_EnableRecurringRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RecurringRunService_EnableRecurringRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_EnableRecurringRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_EnableRecurringRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RecurringRunService_DisableRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RecurringRunService_DisableRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/DisableRecurringRun", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns/{recurring_run_id}:disable")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RecurringRunService_DisableRecurringRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RecurringRunService_DisableRecurringRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_DisableRecurringRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_DisableRecurringRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_RecurringRunService_DeleteRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_RecurringRunService_DeleteRecurringRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/DeleteRecurringRun", runtime.WithHTTPPathPattern("/apis/v2beta1/recurringruns/{recurring_run_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RecurringRunService_DeleteRecurringRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RecurringRunService_DeleteRecurringRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RecurringRunService_DeleteRecurringRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RecurringRunService_DeleteRecurringRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_RecurringRunService_CreateRecurringRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "recurringruns"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RecurringRunService_GetRecurringRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "recurringruns", "recurring_run_id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RecurringRunService_ListRecurringRuns_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "recurringruns"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RecurringRunService_EnableRecurringRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "recurringruns", "recurring_run_id"}, "enable", runtime.AssumeColonVerbOpt(true))) - - pattern_RecurringRunService_DisableRecurringRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "recurringruns", "recurring_run_id"}, "disable", runtime.AssumeColonVerbOpt(true))) - - pattern_RecurringRunService_DeleteRecurringRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "recurringruns", "recurring_run_id"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_RecurringRunService_CreateRecurringRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "recurringruns"}, "")) + pattern_RecurringRunService_GetRecurringRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "recurringruns", "recurring_run_id"}, "")) + pattern_RecurringRunService_ListRecurringRuns_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "recurringruns"}, "")) + pattern_RecurringRunService_EnableRecurringRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "recurringruns", "recurring_run_id"}, "enable")) + pattern_RecurringRunService_DisableRecurringRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "recurringruns", "recurring_run_id"}, "disable")) + pattern_RecurringRunService_DeleteRecurringRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "recurringruns", "recurring_run_id"}, "")) ) var ( - forward_RecurringRunService_CreateRecurringRun_0 = runtime.ForwardResponseMessage - - forward_RecurringRunService_GetRecurringRun_0 = runtime.ForwardResponseMessage - - forward_RecurringRunService_ListRecurringRuns_0 = runtime.ForwardResponseMessage - - forward_RecurringRunService_EnableRecurringRun_0 = runtime.ForwardResponseMessage - + forward_RecurringRunService_CreateRecurringRun_0 = runtime.ForwardResponseMessage + forward_RecurringRunService_GetRecurringRun_0 = runtime.ForwardResponseMessage + forward_RecurringRunService_ListRecurringRuns_0 = runtime.ForwardResponseMessage + forward_RecurringRunService_EnableRecurringRun_0 = runtime.ForwardResponseMessage forward_RecurringRunService_DisableRecurringRun_0 = runtime.ForwardResponseMessage - - forward_RecurringRunService_DeleteRecurringRun_0 = runtime.ForwardResponseMessage + forward_RecurringRunService_DeleteRecurringRun_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v2beta1/go_client/recurring_run_grpc.pb.go b/backend/api/v2beta1/go_client/recurring_run_grpc.pb.go new file mode 100644 index 00000000000..deb0017997a --- /dev/null +++ b/backend/api/v2beta1/go_client/recurring_run_grpc.pb.go @@ -0,0 +1,342 @@ +// Copyright 2022 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/recurring_run.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + RecurringRunService_CreateRecurringRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/CreateRecurringRun" + RecurringRunService_GetRecurringRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/GetRecurringRun" + RecurringRunService_ListRecurringRuns_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/ListRecurringRuns" + RecurringRunService_EnableRecurringRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/EnableRecurringRun" + RecurringRunService_DisableRecurringRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/DisableRecurringRun" + RecurringRunService_DeleteRecurringRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RecurringRunService/DeleteRecurringRun" +) + +// RecurringRunServiceClient is the client API for RecurringRunService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type RecurringRunServiceClient interface { + // Creates a new recurring run in an experiment, given the experiment ID. + CreateRecurringRun(ctx context.Context, in *CreateRecurringRunRequest, opts ...grpc.CallOption) (*RecurringRun, error) + // Finds a specific recurring run by ID. + GetRecurringRun(ctx context.Context, in *GetRecurringRunRequest, opts ...grpc.CallOption) (*RecurringRun, error) + // Finds all recurring runs given experiment and namespace. + // If experiment ID is not specified, find all recurring runs across all experiments. + ListRecurringRuns(ctx context.Context, in *ListRecurringRunsRequest, opts ...grpc.CallOption) (*ListRecurringRunsResponse, error) + // Restarts a recurring run that was previously stopped. All runs associated with the + // recurring run will continue. + EnableRecurringRun(ctx context.Context, in *EnableRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Stops a recurring run and all its associated runs. The recurring run is not deleted. + DisableRecurringRun(ctx context.Context, in *DisableRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Deletes a recurring run. + DeleteRecurringRun(ctx context.Context, in *DeleteRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type recurringRunServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewRecurringRunServiceClient(cc grpc.ClientConnInterface) RecurringRunServiceClient { + return &recurringRunServiceClient{cc} +} + +func (c *recurringRunServiceClient) CreateRecurringRun(ctx context.Context, in *CreateRecurringRunRequest, opts ...grpc.CallOption) (*RecurringRun, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(RecurringRun) + err := c.cc.Invoke(ctx, RecurringRunService_CreateRecurringRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *recurringRunServiceClient) GetRecurringRun(ctx context.Context, in *GetRecurringRunRequest, opts ...grpc.CallOption) (*RecurringRun, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(RecurringRun) + err := c.cc.Invoke(ctx, RecurringRunService_GetRecurringRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *recurringRunServiceClient) ListRecurringRuns(ctx context.Context, in *ListRecurringRunsRequest, opts ...grpc.CallOption) (*ListRecurringRunsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListRecurringRunsResponse) + err := c.cc.Invoke(ctx, RecurringRunService_ListRecurringRuns_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *recurringRunServiceClient) EnableRecurringRun(ctx context.Context, in *EnableRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RecurringRunService_EnableRecurringRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *recurringRunServiceClient) DisableRecurringRun(ctx context.Context, in *DisableRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RecurringRunService_DisableRecurringRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *recurringRunServiceClient) DeleteRecurringRun(ctx context.Context, in *DeleteRecurringRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RecurringRunService_DeleteRecurringRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// RecurringRunServiceServer is the server API for RecurringRunService service. +// All implementations must embed UnimplementedRecurringRunServiceServer +// for forward compatibility. +type RecurringRunServiceServer interface { + // Creates a new recurring run in an experiment, given the experiment ID. + CreateRecurringRun(context.Context, *CreateRecurringRunRequest) (*RecurringRun, error) + // Finds a specific recurring run by ID. + GetRecurringRun(context.Context, *GetRecurringRunRequest) (*RecurringRun, error) + // Finds all recurring runs given experiment and namespace. + // If experiment ID is not specified, find all recurring runs across all experiments. + ListRecurringRuns(context.Context, *ListRecurringRunsRequest) (*ListRecurringRunsResponse, error) + // Restarts a recurring run that was previously stopped. All runs associated with the + // recurring run will continue. + EnableRecurringRun(context.Context, *EnableRecurringRunRequest) (*emptypb.Empty, error) + // Stops a recurring run and all its associated runs. The recurring run is not deleted. + DisableRecurringRun(context.Context, *DisableRecurringRunRequest) (*emptypb.Empty, error) + // Deletes a recurring run. + DeleteRecurringRun(context.Context, *DeleteRecurringRunRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedRecurringRunServiceServer() +} + +// UnimplementedRecurringRunServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedRecurringRunServiceServer struct{} + +func (UnimplementedRecurringRunServiceServer) CreateRecurringRun(context.Context, *CreateRecurringRunRequest) (*RecurringRun, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateRecurringRun not implemented") +} +func (UnimplementedRecurringRunServiceServer) GetRecurringRun(context.Context, *GetRecurringRunRequest) (*RecurringRun, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetRecurringRun not implemented") +} +func (UnimplementedRecurringRunServiceServer) ListRecurringRuns(context.Context, *ListRecurringRunsRequest) (*ListRecurringRunsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListRecurringRuns not implemented") +} +func (UnimplementedRecurringRunServiceServer) EnableRecurringRun(context.Context, *EnableRecurringRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method EnableRecurringRun not implemented") +} +func (UnimplementedRecurringRunServiceServer) DisableRecurringRun(context.Context, *DisableRecurringRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DisableRecurringRun not implemented") +} +func (UnimplementedRecurringRunServiceServer) DeleteRecurringRun(context.Context, *DeleteRecurringRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteRecurringRun not implemented") +} +func (UnimplementedRecurringRunServiceServer) mustEmbedUnimplementedRecurringRunServiceServer() {} +func (UnimplementedRecurringRunServiceServer) testEmbeddedByValue() {} + +// UnsafeRecurringRunServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to RecurringRunServiceServer will +// result in compilation errors. +type UnsafeRecurringRunServiceServer interface { + mustEmbedUnimplementedRecurringRunServiceServer() +} + +func RegisterRecurringRunServiceServer(s grpc.ServiceRegistrar, srv RecurringRunServiceServer) { + // If the following call pancis, it indicates UnimplementedRecurringRunServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&RecurringRunService_ServiceDesc, srv) +} + +func _RecurringRunService_CreateRecurringRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateRecurringRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecurringRunServiceServer).CreateRecurringRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RecurringRunService_CreateRecurringRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecurringRunServiceServer).CreateRecurringRun(ctx, req.(*CreateRecurringRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RecurringRunService_GetRecurringRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRecurringRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecurringRunServiceServer).GetRecurringRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RecurringRunService_GetRecurringRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecurringRunServiceServer).GetRecurringRun(ctx, req.(*GetRecurringRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RecurringRunService_ListRecurringRuns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListRecurringRunsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecurringRunServiceServer).ListRecurringRuns(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RecurringRunService_ListRecurringRuns_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecurringRunServiceServer).ListRecurringRuns(ctx, req.(*ListRecurringRunsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RecurringRunService_EnableRecurringRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(EnableRecurringRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecurringRunServiceServer).EnableRecurringRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RecurringRunService_EnableRecurringRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecurringRunServiceServer).EnableRecurringRun(ctx, req.(*EnableRecurringRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RecurringRunService_DisableRecurringRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DisableRecurringRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecurringRunServiceServer).DisableRecurringRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RecurringRunService_DisableRecurringRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecurringRunServiceServer).DisableRecurringRun(ctx, req.(*DisableRecurringRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RecurringRunService_DeleteRecurringRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteRecurringRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecurringRunServiceServer).DeleteRecurringRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RecurringRunService_DeleteRecurringRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecurringRunServiceServer).DeleteRecurringRun(ctx, req.(*DeleteRecurringRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// RecurringRunService_ServiceDesc is the grpc.ServiceDesc for RecurringRunService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var RecurringRunService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.RecurringRunService", + HandlerType: (*RecurringRunServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateRecurringRun", + Handler: _RecurringRunService_CreateRecurringRun_Handler, + }, + { + MethodName: "GetRecurringRun", + Handler: _RecurringRunService_GetRecurringRun_Handler, + }, + { + MethodName: "ListRecurringRuns", + Handler: _RecurringRunService_ListRecurringRuns_Handler, + }, + { + MethodName: "EnableRecurringRun", + Handler: _RecurringRunService_EnableRecurringRun_Handler, + }, + { + MethodName: "DisableRecurringRun", + Handler: _RecurringRunService_DisableRecurringRun_Handler, + }, + { + MethodName: "DeleteRecurringRun", + Handler: _RecurringRunService_DeleteRecurringRun_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/recurring_run.proto", +} diff --git a/backend/api/v2beta1/go_client/report.pb.go b/backend/api/v2beta1/go_client/report.pb.go index 8daed141bd9..9b18f42d4d0 100644 --- a/backend/api/v2beta1/go_client/report.pb.go +++ b/backend/api/v2beta1/go_client/report.pb.go @@ -14,23 +14,20 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/report.proto package go_client import ( - context "context" _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -41,21 +38,18 @@ const ( ) type ReportWorkflowRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Workflow is a workflow custom resource marshalled into a json string. - Workflow string `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` + Workflow string `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ReportWorkflowRequest) Reset() { *x = ReportWorkflowRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_report_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_report_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReportWorkflowRequest) String() string { @@ -66,7 +60,7 @@ func (*ReportWorkflowRequest) ProtoMessage() {} func (x *ReportWorkflowRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_report_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -89,21 +83,18 @@ func (x *ReportWorkflowRequest) GetWorkflow() string { } type ReportScheduledWorkflowRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string. ScheduledWorkflow string `protobuf:"bytes,1,opt,name=scheduled_workflow,json=scheduledWorkflow,proto3" json:"scheduled_workflow,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ReportScheduledWorkflowRequest) Reset() { *x = ReportScheduledWorkflowRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_report_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_report_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReportScheduledWorkflowRequest) String() string { @@ -114,7 +105,7 @@ func (*ReportScheduledWorkflowRequest) ProtoMessage() {} func (x *ReportScheduledWorkflowRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_report_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -138,67 +129,31 @@ func (x *ReportScheduledWorkflowRequest) GetScheduledWorkflow() string { var File_backend_api_v2beta1_report_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_report_proto_rawDesc = []byte{ - 0x0a, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x12, 0x26, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x33, 0x0a, 0x15, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x57, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, - 0x0a, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x22, 0x4f, 0x0a, 0x1e, 0x52, 0x65, - 0x70, 0x6f, 0x72, 0x74, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, - 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x12, - 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, - 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x32, 0xde, 0x02, 0x0a, 0x0d, - 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x92, 0x01, - 0x0a, 0x0e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x12, 0x3d, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, - 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x3a, - 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x22, 0x17, 0x2f, 0x61, 0x70, 0x69, 0x73, - 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, - 0x77, 0x73, 0x12, 0xb7, 0x01, 0x0a, 0x17, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x53, 0x63, 0x68, - 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x46, - 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x53, 0x63, - 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x3c, - 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x36, 0x3a, 0x12, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, - 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x22, 0x20, 0x2f, 0x61, 0x70, 0x69, - 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, - 0x6c, 0x65, 0x64, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x42, 0x3d, 0x5a, 0x3b, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, - 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, -} +const file_backend_api_v2beta1_report_proto_rawDesc = "" + + "\n" + + " backend/api/v2beta1/report.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\"3\n" + + "\x15ReportWorkflowRequest\x12\x1a\n" + + "\bworkflow\x18\x01 \x01(\tR\bworkflow\"O\n" + + "\x1eReportScheduledWorkflowRequest\x12-\n" + + "\x12scheduled_workflow\x18\x01 \x01(\tR\x11scheduledWorkflow2\xde\x02\n" + + "\rReportService\x12\x92\x01\n" + + "\x0eReportWorkflow\x12=.kubeflow.pipelines.backend.api.v2beta1.ReportWorkflowRequest\x1a\x16.google.protobuf.Empty\")\x82\xd3\xe4\x93\x02#:\bworkflow\"\x17/apis/v2beta1/workflows\x12\xb7\x01\n" + + "\x17ReportScheduledWorkflow\x12F.kubeflow.pipelines.backend.api.v2beta1.ReportScheduledWorkflowRequest\x1a\x16.google.protobuf.Empty\"<\x82\xd3\xe4\x93\x026:\x12scheduled_workflow\" /apis/v2beta1/scheduledworkflowsB=Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_report_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_report_proto_rawDescData = file_backend_api_v2beta1_report_proto_rawDesc + file_backend_api_v2beta1_report_proto_rawDescData []byte ) func file_backend_api_v2beta1_report_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_report_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_report_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_report_proto_rawDescData) + file_backend_api_v2beta1_report_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_report_proto_rawDesc), len(file_backend_api_v2beta1_report_proto_rawDesc))) }) return file_backend_api_v2beta1_report_proto_rawDescData } var file_backend_api_v2beta1_report_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_backend_api_v2beta1_report_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_report_proto_goTypes = []any{ (*ReportWorkflowRequest)(nil), // 0: kubeflow.pipelines.backend.api.v2beta1.ReportWorkflowRequest (*ReportScheduledWorkflowRequest)(nil), // 1: kubeflow.pipelines.backend.api.v2beta1.ReportScheduledWorkflowRequest (*emptypb.Empty)(nil), // 2: google.protobuf.Empty @@ -220,37 +175,11 @@ func file_backend_api_v2beta1_report_proto_init() { if File_backend_api_v2beta1_report_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_report_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReportWorkflowRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_report_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReportScheduledWorkflowRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_report_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_report_proto_rawDesc), len(file_backend_api_v2beta1_report_proto_rawDesc)), NumEnums: 0, NumMessages: 2, NumExtensions: 0, @@ -261,123 +190,6 @@ func file_backend_api_v2beta1_report_proto_init() { MessageInfos: file_backend_api_v2beta1_report_proto_msgTypes, }.Build() File_backend_api_v2beta1_report_proto = out.File - file_backend_api_v2beta1_report_proto_rawDesc = nil file_backend_api_v2beta1_report_proto_goTypes = nil file_backend_api_v2beta1_report_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// ReportServiceClient is the client API for ReportService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type ReportServiceClient interface { - ReportWorkflow(ctx context.Context, in *ReportWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - ReportScheduledWorkflow(ctx context.Context, in *ReportScheduledWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type reportServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewReportServiceClient(cc grpc.ClientConnInterface) ReportServiceClient { - return &reportServiceClient{cc} -} - -func (c *reportServiceClient) ReportWorkflow(ctx context.Context, in *ReportWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.ReportService/ReportWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *reportServiceClient) ReportScheduledWorkflow(ctx context.Context, in *ReportScheduledWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.ReportService/ReportScheduledWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ReportServiceServer is the server API for ReportService service. -type ReportServiceServer interface { - ReportWorkflow(context.Context, *ReportWorkflowRequest) (*emptypb.Empty, error) - ReportScheduledWorkflow(context.Context, *ReportScheduledWorkflowRequest) (*emptypb.Empty, error) -} - -// UnimplementedReportServiceServer can be embedded to have forward compatible implementations. -type UnimplementedReportServiceServer struct { -} - -func (*UnimplementedReportServiceServer) ReportWorkflow(context.Context, *ReportWorkflowRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method ReportWorkflow not implemented") -} -func (*UnimplementedReportServiceServer) ReportScheduledWorkflow(context.Context, *ReportScheduledWorkflowRequest) (*emptypb.Empty, error) { - return nil, status.Errorf(codes.Unimplemented, "method ReportScheduledWorkflow not implemented") -} - -func RegisterReportServiceServer(s *grpc.Server, srv ReportServiceServer) { - s.RegisterService(&_ReportService_serviceDesc, srv) -} - -func _ReportService_ReportWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ReportWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ReportServiceServer).ReportWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.ReportService/ReportWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ReportServiceServer).ReportWorkflow(ctx, req.(*ReportWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ReportService_ReportScheduledWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ReportScheduledWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ReportServiceServer).ReportScheduledWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.ReportService/ReportScheduledWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ReportServiceServer).ReportScheduledWorkflow(ctx, req.(*ReportScheduledWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _ReportService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "kubeflow.pipelines.backend.api.v2beta1.ReportService", - HandlerType: (*ReportServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "ReportWorkflow", - Handler: _ReportService_ReportWorkflow_Handler, - }, - { - MethodName: "ReportScheduledWorkflow", - Handler: _ReportService_ReportScheduledWorkflow_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v2beta1/report.proto", -} diff --git a/backend/api/v2beta1/go_client/report.pb.gw.go b/backend/api/v2beta1/go_client/report.pb.gw.go index 7bf89cc0d4a..492121bb197 100644 --- a/backend/api/v2beta1/go_client/report.pb.gw.go +++ b/backend/api/v2beta1/go_client/report.pb.gw.go @@ -10,147 +10,130 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_ReportService_ReportWorkflow_0(ctx context.Context, marshaler runtime.Marshaler, client ReportServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReportWorkflowRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Workflow); err != nil && err != io.EOF { + var ( + protoReq ReportWorkflowRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Workflow); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.ReportWorkflow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ReportService_ReportWorkflow_0(ctx context.Context, marshaler runtime.Marshaler, server ReportServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReportWorkflowRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Workflow); err != nil && err != io.EOF { + var ( + protoReq ReportWorkflowRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Workflow); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ReportWorkflow(ctx, &protoReq) return msg, metadata, err - } func request_ReportService_ReportScheduledWorkflow_0(ctx context.Context, marshaler runtime.Marshaler, client ReportServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReportScheduledWorkflowRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.ScheduledWorkflow); err != nil && err != io.EOF { + var ( + protoReq ReportScheduledWorkflowRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.ScheduledWorkflow); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } msg, err := client.ReportScheduledWorkflow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_ReportService_ReportScheduledWorkflow_0(ctx context.Context, marshaler runtime.Marshaler, server ReportServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReportScheduledWorkflowRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.ScheduledWorkflow); err != nil && err != io.EOF { + var ( + protoReq ReportScheduledWorkflowRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.ScheduledWorkflow); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ReportScheduledWorkflow(ctx, &protoReq) return msg, metadata, err - } // RegisterReportServiceHandlerServer registers the http handlers for service ReportService to "mux". // UnaryRPC :call ReportServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterReportServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterReportServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ReportServiceServer) error { - - mux.Handle("POST", pattern_ReportService_ReportWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ReportService_ReportWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ReportService/ReportWorkflow", runtime.WithHTTPPathPattern("/apis/v2beta1/workflows")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ReportService_ReportWorkflow_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ReportService_ReportWorkflow_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ReportService_ReportWorkflow_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ReportService_ReportWorkflow_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ReportService_ReportScheduledWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ReportService_ReportScheduledWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ReportService/ReportScheduledWorkflow", runtime.WithHTTPPathPattern("/apis/v2beta1/scheduledworkflows")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_ReportService_ReportScheduledWorkflow_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_ReportService_ReportScheduledWorkflow_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ReportService_ReportScheduledWorkflow_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ReportService_ReportScheduledWorkflow_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -159,25 +142,24 @@ func RegisterReportServiceHandlerServer(ctx context.Context, mux *runtime.ServeM // RegisterReportServiceHandlerFromEndpoint is same as RegisterReportServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterReportServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterReportServiceHandler(ctx, mux, conn) } @@ -191,60 +173,51 @@ func RegisterReportServiceHandler(ctx context.Context, mux *runtime.ServeMux, co // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ReportServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ReportServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "ReportServiceClient" to call the correct interceptors. +// "ReportServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterReportServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ReportServiceClient) error { - - mux.Handle("POST", pattern_ReportService_ReportWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ReportService_ReportWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ReportService/ReportWorkflow", runtime.WithHTTPPathPattern("/apis/v2beta1/workflows")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ReportService_ReportWorkflow_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ReportService_ReportWorkflow_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ReportService_ReportWorkflow_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ReportService_ReportWorkflow_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_ReportService_ReportScheduledWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_ReportService_ReportScheduledWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ReportService/ReportScheduledWorkflow", runtime.WithHTTPPathPattern("/apis/v2beta1/scheduledworkflows")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_ReportService_ReportScheduledWorkflow_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_ReportService_ReportScheduledWorkflow_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_ReportService_ReportScheduledWorkflow_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_ReportService_ReportScheduledWorkflow_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_ReportService_ReportWorkflow_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "workflows"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_ReportService_ReportScheduledWorkflow_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "scheduledworkflows"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_ReportService_ReportWorkflow_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "workflows"}, "")) + pattern_ReportService_ReportScheduledWorkflow_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "scheduledworkflows"}, "")) ) var ( - forward_ReportService_ReportWorkflow_0 = runtime.ForwardResponseMessage - + forward_ReportService_ReportWorkflow_0 = runtime.ForwardResponseMessage forward_ReportService_ReportScheduledWorkflow_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v2beta1/go_client/report_grpc.pb.go b/backend/api/v2beta1/go_client/report_grpc.pb.go new file mode 100644 index 00000000000..bdb5e3acf48 --- /dev/null +++ b/backend/api/v2beta1/go_client/report_grpc.pb.go @@ -0,0 +1,174 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/report.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + ReportService_ReportWorkflow_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ReportService/ReportWorkflow" + ReportService_ReportScheduledWorkflow_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ReportService/ReportScheduledWorkflow" +) + +// ReportServiceClient is the client API for ReportService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ReportServiceClient interface { + ReportWorkflow(ctx context.Context, in *ReportWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + ReportScheduledWorkflow(ctx context.Context, in *ReportScheduledWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type reportServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewReportServiceClient(cc grpc.ClientConnInterface) ReportServiceClient { + return &reportServiceClient{cc} +} + +func (c *reportServiceClient) ReportWorkflow(ctx context.Context, in *ReportWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ReportService_ReportWorkflow_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *reportServiceClient) ReportScheduledWorkflow(ctx context.Context, in *ReportScheduledWorkflowRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, ReportService_ReportScheduledWorkflow_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ReportServiceServer is the server API for ReportService service. +// All implementations must embed UnimplementedReportServiceServer +// for forward compatibility. +type ReportServiceServer interface { + ReportWorkflow(context.Context, *ReportWorkflowRequest) (*emptypb.Empty, error) + ReportScheduledWorkflow(context.Context, *ReportScheduledWorkflowRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedReportServiceServer() +} + +// UnimplementedReportServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedReportServiceServer struct{} + +func (UnimplementedReportServiceServer) ReportWorkflow(context.Context, *ReportWorkflowRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method ReportWorkflow not implemented") +} +func (UnimplementedReportServiceServer) ReportScheduledWorkflow(context.Context, *ReportScheduledWorkflowRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method ReportScheduledWorkflow not implemented") +} +func (UnimplementedReportServiceServer) mustEmbedUnimplementedReportServiceServer() {} +func (UnimplementedReportServiceServer) testEmbeddedByValue() {} + +// UnsafeReportServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ReportServiceServer will +// result in compilation errors. +type UnsafeReportServiceServer interface { + mustEmbedUnimplementedReportServiceServer() +} + +func RegisterReportServiceServer(s grpc.ServiceRegistrar, srv ReportServiceServer) { + // If the following call pancis, it indicates UnimplementedReportServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&ReportService_ServiceDesc, srv) +} + +func _ReportService_ReportWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReportWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReportServiceServer).ReportWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ReportService_ReportWorkflow_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReportServiceServer).ReportWorkflow(ctx, req.(*ReportWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReportService_ReportScheduledWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReportScheduledWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReportServiceServer).ReportScheduledWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ReportService_ReportScheduledWorkflow_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReportServiceServer).ReportScheduledWorkflow(ctx, req.(*ReportScheduledWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// ReportService_ServiceDesc is the grpc.ServiceDesc for ReportService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ReportService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.ReportService", + HandlerType: (*ReportServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ReportWorkflow", + Handler: _ReportService_ReportWorkflow_Handler, + }, + { + MethodName: "ReportScheduledWorkflow", + Handler: _ReportService_ReportScheduledWorkflow_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/report.proto", +} diff --git a/backend/api/v2beta1/go_client/run.pb.go b/backend/api/v2beta1/go_client/run.pb.go index 6a3e75edd73..5e040d1cd36 100644 --- a/backend/api/v2beta1/go_client/run.pb.go +++ b/backend/api/v2beta1/go_client/run.pb.go @@ -14,20 +14,16 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/run.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" status "google.golang.org/genproto/googleapis/rpc/status" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status1 "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" emptypb "google.golang.org/protobuf/types/known/emptypb" @@ -35,6 +31,7 @@ import ( timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -176,10 +173,7 @@ func (Run_StorageState) EnumDescriptor() ([]byte, []int) { } type Run struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Input. ID of the parent experiment. // The default experiment ID will be used if this is not specified. ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` @@ -195,7 +189,7 @@ type Run struct { // Required input. Specifies the source of the pipeline spec for this // run. Can be either a pipeline version id, or a pipeline spec. // - // Types that are assignable to PipelineSource: + // Types that are valid to be assigned to PipelineSource: // // *Run_PipelineVersionId // *Run_PipelineSpec @@ -225,16 +219,16 @@ type Run struct { RecurringRunId string `protobuf:"bytes,16,opt,name=recurring_run_id,json=recurringRunId,proto3" json:"recurring_run_id,omitempty"` // Output. A sequence of run statuses. This field keeps a record // of state transitions. - StateHistory []*RuntimeStatus `protobuf:"bytes,17,rep,name=state_history,json=stateHistory,proto3" json:"state_history,omitempty"` + StateHistory []*RuntimeStatus `protobuf:"bytes,17,rep,name=state_history,json=stateHistory,proto3" json:"state_history,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Run) Reset() { *x = Run{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Run) String() string { @@ -245,7 +239,7 @@ func (*Run) ProtoMessage() {} func (x *Run) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -295,31 +289,37 @@ func (x *Run) GetDescription() string { return "" } -func (m *Run) GetPipelineSource() isRun_PipelineSource { - if m != nil { - return m.PipelineSource +func (x *Run) GetPipelineSource() isRun_PipelineSource { + if x != nil { + return x.PipelineSource } return nil } // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. func (x *Run) GetPipelineVersionId() string { - if x, ok := x.GetPipelineSource().(*Run_PipelineVersionId); ok { - return x.PipelineVersionId + if x != nil { + if x, ok := x.PipelineSource.(*Run_PipelineVersionId); ok { + return x.PipelineVersionId + } } return "" } func (x *Run) GetPipelineSpec() *structpb.Struct { - if x, ok := x.GetPipelineSource().(*Run_PipelineSpec); ok { - return x.PipelineSpec + if x != nil { + if x, ok := x.PipelineSource.(*Run_PipelineSpec); ok { + return x.PipelineSpec + } } return nil } func (x *Run) GetPipelineVersionReference() *PipelineVersionReference { - if x, ok := x.GetPipelineSource().(*Run_PipelineVersionReference); ok { - return x.PipelineVersionReference + if x != nil { + if x, ok := x.PipelineSource.(*Run_PipelineVersionReference); ok { + return x.PipelineVersionReference + } } return nil } @@ -423,23 +423,20 @@ func (*Run_PipelineVersionReference) isRun_PipelineSource() {} // Reference to an existing pipeline version. type PipelineVersionReference struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Input. Required. Unique ID of the parent pipeline. PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` // Input. Optional. Unique ID of an existing pipeline version. If unset, the latest pipeline version is used. PipelineVersionId string `protobuf:"bytes,2,opt,name=pipeline_version_id,json=pipelineVersionId,proto3" json:"pipeline_version_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineVersionReference) Reset() { *x = PipelineVersionReference{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineVersionReference) String() string { @@ -450,7 +447,7 @@ func (*PipelineVersionReference) ProtoMessage() {} func (x *PipelineVersionReference) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -481,10 +478,7 @@ func (x *PipelineVersionReference) GetPipelineVersionId() string { // Timestamped representation of a runtime state with an optional error. type RuntimeStatus struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Update time of this state. UpdateTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` // The state of a runtime instance. @@ -494,16 +488,16 @@ type RuntimeStatus struct { // If the state is FAILED, the error here is final and not going to be // retried. If the state is a non-final state, the error indicates that a // system-error being retried. - Error *status.Status `protobuf:"bytes,3,opt,name=error,proto3" json:"error,omitempty"` + Error *status.Status `protobuf:"bytes,3,opt,name=error,proto3" json:"error,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *RuntimeStatus) Reset() { *x = RuntimeStatus{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *RuntimeStatus) String() string { @@ -514,7 +508,7 @@ func (*RuntimeStatus) ProtoMessage() {} func (x *RuntimeStatus) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -552,25 +546,22 @@ func (x *RuntimeStatus) GetError() *status.Status { // Runtime details of a run. type RunDetails struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Pipeline context ID of a run. PipelineContextId int64 `protobuf:"varint,1,opt,name=pipeline_context_id,json=pipelineContextId,proto3" json:"pipeline_context_id,omitempty"` // Pipeline run context ID of a run. PipelineRunContextId int64 `protobuf:"varint,2,opt,name=pipeline_run_context_id,json=pipelineRunContextId,proto3" json:"pipeline_run_context_id,omitempty"` // Runtime details of the tasks that belong to the run. - TaskDetails []*PipelineTaskDetail `protobuf:"bytes,3,rep,name=task_details,json=taskDetails,proto3" json:"task_details,omitempty"` + TaskDetails []*PipelineTaskDetail `protobuf:"bytes,3,rep,name=task_details,json=taskDetails,proto3" json:"task_details,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *RunDetails) Reset() { *x = RunDetails{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *RunDetails) String() string { @@ -581,7 +572,7 @@ func (*RunDetails) ProtoMessage() {} func (x *RunDetails) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -619,10 +610,7 @@ func (x *RunDetails) GetTaskDetails() []*PipelineTaskDetail { // Runtime information of a task execution. type PipelineTaskDetail struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // ID of the parent run. RunId string `protobuf:"bytes,1,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` // System-generated ID of a task. @@ -646,9 +634,9 @@ type PipelineTaskDetail struct { // Only populated when the task is in FAILED or CANCELED state. Error *status.Status `protobuf:"bytes,10,opt,name=error,proto3" json:"error,omitempty"` // Input artifacts of the task. - Inputs map[string]*ArtifactList `protobuf:"bytes,11,rep,name=inputs,proto3" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Inputs map[string]*ArtifactList `protobuf:"bytes,11,rep,name=inputs,proto3" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Output artifacts of the task. - Outputs map[string]*ArtifactList `protobuf:"bytes,12,rep,name=outputs,proto3" json:"outputs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Outputs map[string]*ArtifactList `protobuf:"bytes,12,rep,name=outputs,proto3" json:"outputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // ID of the parent task if the task is within a component scope. // Empty if the task is at the root level. ParentTaskId string `protobuf:"bytes,13,opt,name=parent_task_id,json=parentTaskId,proto3" json:"parent_task_id,omitempty"` @@ -659,16 +647,16 @@ type PipelineTaskDetail struct { // Also known as node_id. PodName string `protobuf:"bytes,15,opt,name=pod_name,json=podName,proto3" json:"pod_name,omitempty"` // Sequence of dependen tasks. - ChildTasks []*PipelineTaskDetail_ChildTask `protobuf:"bytes,16,rep,name=child_tasks,json=childTasks,proto3" json:"child_tasks,omitempty"` + ChildTasks []*PipelineTaskDetail_ChildTask `protobuf:"bytes,16,rep,name=child_tasks,json=childTasks,proto3" json:"child_tasks,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineTaskDetail) Reset() { *x = PipelineTaskDetail{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineTaskDetail) String() string { @@ -679,7 +667,7 @@ func (*PipelineTaskDetail) ProtoMessage() {} func (x *PipelineTaskDetail) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -808,10 +796,7 @@ func (x *PipelineTaskDetail) GetChildTasks() []*PipelineTaskDetail_ChildTask { // Runtime information of a pipeline task executor. type PipelineTaskExecutorDetail struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The name of the job for the main container execution. MainJob string `protobuf:"bytes,1,opt,name=main_job,json=mainJob,proto3" json:"main_job,omitempty"` // The name of the job for the pre-caching-check container @@ -828,15 +813,15 @@ type PipelineTaskExecutorDetail struct { // the lifecycle events. // The list includes the all attempts in chronological order. FailedPreCachingCheckJobs []string `protobuf:"bytes,4,rep,name=failed_pre_caching_check_jobs,json=failedPreCachingCheckJobs,proto3" json:"failed_pre_caching_check_jobs,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineTaskExecutorDetail) Reset() { *x = PipelineTaskExecutorDetail{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineTaskExecutorDetail) String() string { @@ -847,7 +832,7 @@ func (*PipelineTaskExecutorDetail) ProtoMessage() {} func (x *PipelineTaskExecutorDetail) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -892,21 +877,18 @@ func (x *PipelineTaskExecutorDetail) GetFailedPreCachingCheckJobs() []string { // A list of artifact metadata. type ArtifactList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // A list of artifact metadata ids. - ArtifactIds []int64 `protobuf:"varint,1,rep,packed,name=artifact_ids,json=artifactIds,proto3" json:"artifact_ids,omitempty"` + ArtifactIds []int64 `protobuf:"varint,1,rep,packed,name=artifact_ids,json=artifactIds,proto3" json:"artifact_ids,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ArtifactList) Reset() { *x = ArtifactList{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ArtifactList) String() string { @@ -917,7 +899,7 @@ func (*ArtifactList) ProtoMessage() {} func (x *ArtifactList) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -940,25 +922,22 @@ func (x *ArtifactList) GetArtifactIds() []int64 { } type CreateRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the parent experiment. // // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` // Run to be created. - Run *Run `protobuf:"bytes,2,opt,name=run,proto3" json:"run,omitempty"` + Run *Run `protobuf:"bytes,2,opt,name=run,proto3" json:"run,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CreateRunRequest) Reset() { *x = CreateRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreateRunRequest) String() string { @@ -969,7 +948,7 @@ func (*CreateRunRequest) ProtoMessage() {} func (x *CreateRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1000,25 +979,22 @@ func (x *CreateRunRequest) GetRun() *Run { } type GetRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the parent experiment. // // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` // The ID of the run to be retrieved. - RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GetRunRequest) Reset() { *x = GetRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GetRunRequest) String() string { @@ -1029,7 +1005,7 @@ func (*GetRunRequest) ProtoMessage() {} func (x *GetRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1060,10 +1036,7 @@ func (x *GetRunRequest) GetRunId() string { } type ListRunsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Optional input field. Filters based on the namespace. Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` // The ID of the parent experiment. If empty, response includes runs across all experiments. @@ -1081,16 +1054,16 @@ type ListRunsRequest struct { SortBy string `protobuf:"bytes,5,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` // A url-encoded, JSON-serialized Filter protocol buffer (see // [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). - Filter string `protobuf:"bytes,6,opt,name=filter,proto3" json:"filter,omitempty"` + Filter string `protobuf:"bytes,6,opt,name=filter,proto3" json:"filter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListRunsRequest) Reset() { *x = ListRunsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListRunsRequest) String() string { @@ -1101,7 +1074,7 @@ func (*ListRunsRequest) ProtoMessage() {} func (x *ListRunsRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1159,25 +1132,22 @@ func (x *ListRunsRequest) GetFilter() string { } type TerminateRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the parent experiment. // // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` // The ID of the run to be terminated. - RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *TerminateRunRequest) Reset() { *x = TerminateRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *TerminateRunRequest) String() string { @@ -1188,7 +1158,7 @@ func (*TerminateRunRequest) ProtoMessage() {} func (x *TerminateRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1219,25 +1189,22 @@ func (x *TerminateRunRequest) GetRunId() string { } type ListRunsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // List of retrieved runs. Runs []*Run `protobuf:"bytes,1,rep,name=runs,proto3" json:"runs,omitempty"` // The total number of runs for the given query. TotalSize int32 `protobuf:"varint,2,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` // The token to list the next page of runs. NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ListRunsResponse) Reset() { *x = ListRunsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ListRunsResponse) String() string { @@ -1248,7 +1215,7 @@ func (*ListRunsResponse) ProtoMessage() {} func (x *ListRunsResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1285,25 +1252,22 @@ func (x *ListRunsResponse) GetNextPageToken() string { } type ArchiveRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the parent experiment. // // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` // The ID of the run to be archived. - RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ArchiveRunRequest) Reset() { *x = ArchiveRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ArchiveRunRequest) String() string { @@ -1314,7 +1278,7 @@ func (*ArchiveRunRequest) ProtoMessage() {} func (x *ArchiveRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1345,25 +1309,22 @@ func (x *ArchiveRunRequest) GetRunId() string { } type UnarchiveRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the parent experiment. // // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` // The ID of the run to be restored. - RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *UnarchiveRunRequest) Reset() { *x = UnarchiveRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *UnarchiveRunRequest) String() string { @@ -1374,7 +1335,7 @@ func (*UnarchiveRunRequest) ProtoMessage() {} func (x *UnarchiveRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1405,25 +1366,22 @@ func (x *UnarchiveRunRequest) GetRunId() string { } type DeleteRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the parent experiment. // // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` // The ID of the run to be deleted. - RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeleteRunRequest) Reset() { *x = DeleteRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeleteRunRequest) String() string { @@ -1434,7 +1392,7 @@ func (*DeleteRunRequest) ProtoMessage() {} func (x *DeleteRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1465,10 +1423,7 @@ func (x *DeleteRunRequest) GetRunId() string { } type ReadArtifactRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the parent experiment. // // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. @@ -1478,16 +1433,16 @@ type ReadArtifactRequest struct { // ID of the running node. NodeId string `protobuf:"bytes,3,opt,name=node_id,json=nodeId,proto3" json:"node_id,omitempty"` // Name of the artifact. - ArtifactName string `protobuf:"bytes,4,opt,name=artifact_name,json=artifactName,proto3" json:"artifact_name,omitempty"` + ArtifactName string `protobuf:"bytes,4,opt,name=artifact_name,json=artifactName,proto3" json:"artifact_name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ReadArtifactRequest) Reset() { *x = ReadArtifactRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReadArtifactRequest) String() string { @@ -1498,7 +1453,7 @@ func (*ReadArtifactRequest) ProtoMessage() {} func (x *ReadArtifactRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1543,21 +1498,18 @@ func (x *ReadArtifactRequest) GetArtifactName() string { } type ReadArtifactResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Byte array of the artifact content. - Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ReadArtifactResponse) Reset() { *x = ReadArtifactResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ReadArtifactResponse) String() string { @@ -1568,7 +1520,7 @@ func (*ReadArtifactResponse) ProtoMessage() {} func (x *ReadArtifactResponse) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1591,25 +1543,22 @@ func (x *ReadArtifactResponse) GetData() []byte { } type RetryRunRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The ID of the parent experiment. // // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` // The ID of the run to be retried. - RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *RetryRunRequest) Reset() { *x = RetryRunRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *RetryRunRequest) String() string { @@ -1620,7 +1569,7 @@ func (*RetryRunRequest) ProtoMessage() {} func (x *RetryRunRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1653,24 +1602,21 @@ func (x *RetryRunRequest) GetRunId() string { // A dependent task that requires this one to succeed. // Represented by either task_id or pod_name. type PipelineTaskDetail_ChildTask struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to ChildTask: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to ChildTask: // // *PipelineTaskDetail_ChildTask_TaskId // *PipelineTaskDetail_ChildTask_PodName - ChildTask isPipelineTaskDetail_ChildTask_ChildTask `protobuf_oneof:"child_task"` + ChildTask isPipelineTaskDetail_ChildTask_ChildTask `protobuf_oneof:"child_task"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PipelineTaskDetail_ChildTask) Reset() { *x = PipelineTaskDetail_ChildTask{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[20] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_run_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PipelineTaskDetail_ChildTask) String() string { @@ -1681,7 +1627,7 @@ func (*PipelineTaskDetail_ChildTask) ProtoMessage() {} func (x *PipelineTaskDetail_ChildTask) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_run_proto_msgTypes[20] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1696,23 +1642,27 @@ func (*PipelineTaskDetail_ChildTask) Descriptor() ([]byte, []int) { return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 2} } -func (m *PipelineTaskDetail_ChildTask) GetChildTask() isPipelineTaskDetail_ChildTask_ChildTask { - if m != nil { - return m.ChildTask +func (x *PipelineTaskDetail_ChildTask) GetChildTask() isPipelineTaskDetail_ChildTask_ChildTask { + if x != nil { + return x.ChildTask } return nil } func (x *PipelineTaskDetail_ChildTask) GetTaskId() string { - if x, ok := x.GetChildTask().(*PipelineTaskDetail_ChildTask_TaskId); ok { - return x.TaskId + if x != nil { + if x, ok := x.ChildTask.(*PipelineTaskDetail_ChildTask_TaskId); ok { + return x.TaskId + } } return "" } func (x *PipelineTaskDetail_ChildTask) GetPodName() string { - if x, ok := x.GetChildTask().(*PipelineTaskDetail_ChildTask_PodName); ok { - return x.PodName + if x != nil { + if x, ok := x.ChildTask.(*PipelineTaskDetail_ChildTask_PodName); ok { + return x.PodName + } } return "" } @@ -1738,433 +1688,180 @@ func (*PipelineTaskDetail_ChildTask_PodName) isPipelineTaskDetail_ChildTask_Chil var File_backend_api_v2beta1_run_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_run_proto_rawDesc = []byte{ - 0x0a, 0x1d, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, - 0x26, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, - 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x73, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2c, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, 0x72, 0x2f, 0x6f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x28, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, - 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x22, 0xcc, 0x09, 0x0a, 0x03, 0x52, 0x75, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x78, - 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, - 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, - 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, - 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x5d, 0x0a, 0x0d, 0x73, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x38, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x2e, 0x53, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x0c, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, - 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x34, 0x0a, 0x13, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, - 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x48, 0x00, 0x52, 0x11, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, - 0x12, 0x3e, 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x70, 0x65, - 0x63, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, - 0x48, 0x00, 0x52, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, - 0x12, 0x80, 0x01, 0x0a, 0x1a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, - 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x40, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, - 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x18, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, - 0x6e, 0x63, 0x65, 0x12, 0x5c, 0x0a, 0x0e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x6b, 0x75, - 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x52, 0x0d, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x5f, 0x61, 0x63, 0x63, - 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x65, 0x72, 0x76, - 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x3d, 0x0a, 0x0c, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, - 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0b, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, - 0x65, 0x64, 0x41, 0x74, 0x12, 0x3b, 0x0a, 0x0b, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, - 0x5f, 0x61, 0x74, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x41, - 0x74, 0x12, 0x4a, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x34, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, - 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x28, 0x0a, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x53, 0x0a, 0x0b, 0x72, 0x75, 0x6e, 0x5f, 0x64, - 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, - 0x52, 0x0a, 0x72, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x28, 0x0a, 0x10, - 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, - 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x65, 0x63, 0x75, 0x72, 0x72, 0x69, 0x6e, - 0x67, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x5a, 0x0a, 0x0d, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x11, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, - 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x52, 0x0c, 0x73, 0x74, 0x61, 0x74, 0x65, 0x48, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x79, 0x22, 0x4a, 0x0a, 0x0c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x12, 0x1d, 0x0a, 0x19, 0x53, 0x54, 0x4f, 0x52, 0x41, 0x47, 0x45, 0x5f, 0x53, 0x54, - 0x41, 0x54, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, - 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x41, 0x56, 0x41, 0x49, 0x4c, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, - 0x12, 0x0c, 0x0a, 0x08, 0x41, 0x52, 0x43, 0x48, 0x49, 0x56, 0x45, 0x44, 0x10, 0x02, 0x42, 0x11, - 0x0a, 0x0f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x22, 0x6b, 0x0a, 0x18, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x1f, 0x0a, - 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x2e, - 0x0a, 0x13, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0xc2, - 0x01, 0x0a, 0x0d, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x12, 0x3b, 0x0a, 0x0b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x52, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x4a, 0x0a, - 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x34, 0x2e, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x28, 0x0a, 0x05, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x22, 0xd2, 0x01, 0x0a, 0x0a, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, - 0x6c, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, - 0x11, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x49, 0x64, 0x12, 0x35, 0x0a, 0x17, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x72, - 0x75, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x03, 0x52, 0x14, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x75, 0x6e, - 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x49, 0x64, 0x12, 0x5d, 0x0a, 0x0c, 0x74, 0x61, 0x73, - 0x6b, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x3a, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x54, 0x61, 0x73, 0x6b, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x52, 0x0b, 0x74, 0x61, 0x73, - 0x6b, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x99, 0x0a, 0x0a, 0x12, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, - 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x69, - 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x61, 0x73, 0x6b, 0x49, 0x64, 0x12, - 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, - 0x6d, 0x65, 0x12, 0x3b, 0x0a, 0x0b, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, - 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x12, - 0x39, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, - 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, - 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, - 0x65, 0x12, 0x6b, 0x0a, 0x0f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x5f, 0x64, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x42, 0x2e, 0x6b, 0x75, 0x62, - 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x52, 0x0e, - 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x4a, - 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x34, 0x2e, - 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x65, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x03, - 0x52, 0x0b, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x28, 0x0a, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x5e, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x46, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x44, 0x65, 0x74, - 0x61, 0x69, 0x6c, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x61, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x47, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x44, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x12, 0x24, 0x0a, 0x0e, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x0d, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0c, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x64, - 0x12, 0x5a, 0x0a, 0x0d, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x0c, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x19, 0x0a, 0x08, - 0x70, 0x6f, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, - 0x70, 0x6f, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x65, 0x0a, 0x0b, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x10, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x44, 0x2e, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x61, - 0x73, 0x6b, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x2e, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x54, 0x61, - 0x73, 0x6b, 0x52, 0x0a, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x1a, 0x6f, - 0x0a, 0x0b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, - 0x4a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, - 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, - 0x70, 0x0a, 0x0c, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x4a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x34, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, - 0x63, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x1a, 0x51, 0x0a, 0x09, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x54, 0x61, 0x73, 0x6b, 0x12, 0x19, - 0x0a, 0x07, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, - 0x00, 0x52, 0x06, 0x74, 0x61, 0x73, 0x6b, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x08, 0x70, 0x6f, 0x64, - 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x70, - 0x6f, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x42, 0x0c, 0x0a, 0x0a, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, - 0x74, 0x61, 0x73, 0x6b, 0x22, 0xd6, 0x01, 0x0a, 0x1a, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x44, 0x65, 0x74, - 0x61, 0x69, 0x6c, 0x12, 0x19, 0x0a, 0x08, 0x6d, 0x61, 0x69, 0x6e, 0x5f, 0x6a, 0x6f, 0x62, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x61, 0x69, 0x6e, 0x4a, 0x6f, 0x62, 0x12, 0x31, - 0x0a, 0x15, 0x70, 0x72, 0x65, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x68, - 0x65, 0x63, 0x6b, 0x5f, 0x6a, 0x6f, 0x62, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x70, - 0x72, 0x65, 0x43, 0x61, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x4a, 0x6f, - 0x62, 0x12, 0x28, 0x0a, 0x10, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x6d, 0x61, 0x69, 0x6e, - 0x5f, 0x6a, 0x6f, 0x62, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x66, 0x61, 0x69, - 0x6c, 0x65, 0x64, 0x4d, 0x61, 0x69, 0x6e, 0x4a, 0x6f, 0x62, 0x73, 0x12, 0x40, 0x0a, 0x1d, 0x66, - 0x61, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x65, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x69, 0x6e, - 0x67, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x5f, 0x6a, 0x6f, 0x62, 0x73, 0x18, 0x04, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x19, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x50, 0x72, 0x65, 0x43, 0x61, 0x63, - 0x68, 0x69, 0x6e, 0x67, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x4a, 0x6f, 0x62, 0x73, 0x22, 0x31, 0x0a, - 0x0c, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x21, 0x0a, - 0x0c, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x03, 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x49, 0x64, 0x73, - 0x22, 0x7a, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, - 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, - 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x3d, 0x0a, - 0x03, 0x72, 0x75, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x6b, 0x75, 0x62, - 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x52, 0x03, 0x72, 0x75, 0x6e, 0x22, 0x4f, 0x0a, 0x0d, - 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, - 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, - 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x22, 0xc1, 0x01, - 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, - 0x23, 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, - 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, - 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, - 0x6b, 0x65, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, - 0x12, 0x17, 0x0a, 0x07, 0x73, 0x6f, 0x72, 0x74, 0x5f, 0x62, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x06, 0x73, 0x6f, 0x72, 0x74, 0x42, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, - 0x72, 0x22, 0x55, 0x0a, 0x13, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x52, 0x75, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, - 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, - 0x02, 0x18, 0x01, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, - 0x64, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x22, 0x9a, 0x01, 0x0a, 0x10, 0x4c, 0x69, 0x73, - 0x74, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3f, 0x0a, - 0x04, 0x72, 0x75, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x6b, 0x75, - 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x52, 0x04, 0x72, 0x75, 0x6e, 0x73, 0x12, 0x1d, - 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x05, 0x52, 0x09, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x26, 0x0a, - 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, - 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x53, 0x0a, 0x11, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, - 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0d, 0x65, 0x78, - 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, - 0x74, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x22, 0x55, 0x0a, 0x13, 0x55, 0x6e, - 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x27, 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0c, 0x65, 0x78, - 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, - 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, - 0x64, 0x22, 0x52, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, - 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, - 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x15, - 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x72, 0x75, 0x6e, 0x49, 0x64, 0x22, 0x93, 0x01, 0x0a, 0x13, 0x52, 0x65, 0x61, 0x64, 0x41, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, - 0x0d, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, - 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x17, 0x0a, - 0x07, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, - 0x6e, 0x6f, 0x64, 0x65, 0x49, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, - 0x63, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x61, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x2a, 0x0a, 0x14, 0x52, - 0x65, 0x61, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x51, 0x0a, 0x0f, 0x52, 0x65, 0x74, 0x72, 0x79, - 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0d, 0x65, 0x78, - 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, - 0x74, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x75, 0x6e, 0x49, 0x64, 0x2a, 0x98, 0x01, 0x0a, 0x0c, 0x52, - 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x1d, 0x0a, 0x19, 0x52, - 0x55, 0x4e, 0x54, 0x49, 0x4d, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x45, 0x5f, 0x55, 0x4e, 0x53, - 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x45, - 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x52, 0x55, 0x4e, 0x4e, 0x49, - 0x4e, 0x47, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x53, 0x55, 0x43, 0x43, 0x45, 0x45, 0x44, 0x45, - 0x44, 0x10, 0x03, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x4b, 0x49, 0x50, 0x50, 0x45, 0x44, 0x10, 0x04, - 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x05, 0x12, 0x0d, 0x0a, 0x09, - 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x49, 0x4e, 0x47, 0x10, 0x06, 0x12, 0x0c, 0x0a, 0x08, 0x43, - 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x45, 0x44, 0x10, 0x07, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x41, 0x55, - 0x53, 0x45, 0x44, 0x10, 0x08, 0x32, 0xf9, 0x0a, 0x0a, 0x0a, 0x52, 0x75, 0x6e, 0x53, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x12, 0x93, 0x01, 0x0a, 0x09, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, - 0x75, 0x6e, 0x12, 0x38, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x22, 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, - 0x19, 0x3a, 0x03, 0x72, 0x75, 0x6e, 0x22, 0x12, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x12, 0x91, 0x01, 0x0a, 0x06, 0x47, - 0x65, 0x74, 0x52, 0x75, 0x6e, 0x12, 0x35, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x47, - 0x65, 0x74, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x22, 0x23, 0x82, 0xd3, 0xe4, 0x93, 0x02, - 0x1d, 0x12, 0x1b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x12, 0x99, - 0x01, 0x0a, 0x08, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x75, 0x6e, 0x73, 0x12, 0x37, 0x2e, 0x6b, 0x75, - 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x38, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x4c, 0x69, - 0x73, 0x74, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1a, - 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x12, 0x12, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x12, 0x8c, 0x01, 0x0a, 0x0a, 0x41, - 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x75, 0x6e, 0x12, 0x39, 0x2e, 0x6b, 0x75, 0x62, 0x65, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, - 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x2b, 0x82, 0xd3, - 0xe4, 0x93, 0x02, 0x25, 0x22, 0x23, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, - 0x7d, 0x3a, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x92, 0x01, 0x0a, 0x0c, 0x55, 0x6e, - 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x75, 0x6e, 0x12, 0x3b, 0x2e, 0x6b, 0x75, 0x62, - 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, - 0x74, 0x61, 0x31, 0x2e, 0x55, 0x6e, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x52, 0x75, 0x6e, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, - 0x2d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x27, 0x22, 0x25, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, - 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, - 0x5f, 0x69, 0x64, 0x7d, 0x3a, 0x75, 0x6e, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x82, - 0x01, 0x0a, 0x09, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x75, 0x6e, 0x12, 0x38, 0x2e, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x75, 0x6e, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x23, - 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1d, 0x2a, 0x1b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, 0x5f, - 0x69, 0x64, 0x7d, 0x12, 0xdd, 0x01, 0x0a, 0x0c, 0x52, 0x65, 0x61, 0x64, 0x41, 0x72, 0x74, 0x69, - 0x66, 0x61, 0x63, 0x74, 0x12, 0x3b, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, - 0x61, 0x64, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x3c, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x41, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, - 0x52, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x4c, 0x12, 0x4a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, - 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, - 0x5f, 0x69, 0x64, 0x7d, 0x2f, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x2f, 0x7b, 0x6e, 0x6f, 0x64, 0x65, - 0x5f, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x2f, 0x7b, - 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x3a, 0x72, - 0x65, 0x61, 0x64, 0x12, 0x92, 0x01, 0x0a, 0x0c, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, - 0x65, 0x52, 0x75, 0x6e, 0x12, 0x3b, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, - 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x54, 0x65, - 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x2d, 0x82, 0xd3, 0xe4, 0x93, 0x02, - 0x27, 0x22, 0x25, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2f, 0x72, 0x75, 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x3a, 0x74, - 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x12, 0x86, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x74, - 0x72, 0x79, 0x52, 0x75, 0x6e, 0x12, 0x37, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, - 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, - 0x65, 0x74, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x22, 0x21, - 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, - 0x6e, 0x73, 0x2f, 0x7b, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x7d, 0x3a, 0x72, 0x65, 0x74, 0x72, - 0x79, 0x42, 0x98, 0x01, 0x92, 0x41, 0x58, 0x2a, 0x02, 0x01, 0x02, 0x52, 0x23, 0x0a, 0x07, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x18, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x5a, 0x1f, 0x0a, 0x1d, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x13, 0x08, 0x02, - 0x1a, 0x0d, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x02, 0x62, 0x0c, 0x0a, 0x0a, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x00, 0x5a, - 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, - 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, - 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v2beta1_run_proto_rawDesc = "" + + "\n" + + "\x1dbackend/api/v2beta1/run.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x17google/rpc/status.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\x1a(backend/api/v2beta1/runtime_config.proto\"\xcc\t\n" + + "\x03Run\x12#\n" + + "\rexperiment_id\x18\x01 \x01(\tR\fexperimentId\x12\x15\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId\x12!\n" + + "\fdisplay_name\x18\x03 \x01(\tR\vdisplayName\x12]\n" + + "\rstorage_state\x18\x04 \x01(\x0e28.kubeflow.pipelines.backend.api.v2beta1.Run.StorageStateR\fstorageState\x12 \n" + + "\vdescription\x18\x05 \x01(\tR\vdescription\x124\n" + + "\x13pipeline_version_id\x18\x06 \x01(\tB\x02\x18\x01H\x00R\x11pipelineVersionId\x12>\n" + + "\rpipeline_spec\x18\a \x01(\v2\x17.google.protobuf.StructH\x00R\fpipelineSpec\x12\x80\x01\n" + + "\x1apipeline_version_reference\x18\x12 \x01(\v2@.kubeflow.pipelines.backend.api.v2beta1.PipelineVersionReferenceH\x00R\x18pipelineVersionReference\x12\\\n" + + "\x0eruntime_config\x18\b \x01(\v25.kubeflow.pipelines.backend.api.v2beta1.RuntimeConfigR\rruntimeConfig\x12'\n" + + "\x0fservice_account\x18\t \x01(\tR\x0eserviceAccount\x129\n" + + "\n" + + "created_at\x18\n" + + " \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12=\n" + + "\fscheduled_at\x18\v \x01(\v2\x1a.google.protobuf.TimestampR\vscheduledAt\x12;\n" + + "\vfinished_at\x18\f \x01(\v2\x1a.google.protobuf.TimestampR\n" + + "finishedAt\x12J\n" + + "\x05state\x18\r \x01(\x0e24.kubeflow.pipelines.backend.api.v2beta1.RuntimeStateR\x05state\x12(\n" + + "\x05error\x18\x0e \x01(\v2\x12.google.rpc.StatusR\x05error\x12S\n" + + "\vrun_details\x18\x0f \x01(\v22.kubeflow.pipelines.backend.api.v2beta1.RunDetailsR\n" + + "runDetails\x12(\n" + + "\x10recurring_run_id\x18\x10 \x01(\tR\x0erecurringRunId\x12Z\n" + + "\rstate_history\x18\x11 \x03(\v25.kubeflow.pipelines.backend.api.v2beta1.RuntimeStatusR\fstateHistory\"J\n" + + "\fStorageState\x12\x1d\n" + + "\x19STORAGE_STATE_UNSPECIFIED\x10\x00\x12\r\n" + + "\tAVAILABLE\x10\x01\x12\f\n" + + "\bARCHIVED\x10\x02B\x11\n" + + "\x0fpipeline_source\"k\n" + + "\x18PipelineVersionReference\x12\x1f\n" + + "\vpipeline_id\x18\x01 \x01(\tR\n" + + "pipelineId\x12.\n" + + "\x13pipeline_version_id\x18\x02 \x01(\tR\x11pipelineVersionId\"\xc2\x01\n" + + "\rRuntimeStatus\x12;\n" + + "\vupdate_time\x18\x01 \x01(\v2\x1a.google.protobuf.TimestampR\n" + + "updateTime\x12J\n" + + "\x05state\x18\x02 \x01(\x0e24.kubeflow.pipelines.backend.api.v2beta1.RuntimeStateR\x05state\x12(\n" + + "\x05error\x18\x03 \x01(\v2\x12.google.rpc.StatusR\x05error\"\xd2\x01\n" + + "\n" + + "RunDetails\x12.\n" + + "\x13pipeline_context_id\x18\x01 \x01(\x03R\x11pipelineContextId\x125\n" + + "\x17pipeline_run_context_id\x18\x02 \x01(\x03R\x14pipelineRunContextId\x12]\n" + + "\ftask_details\x18\x03 \x03(\v2:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetailR\vtaskDetails\"\x99\n" + + "\n" + + "\x12PipelineTaskDetail\x12\x15\n" + + "\x06run_id\x18\x01 \x01(\tR\x05runId\x12\x17\n" + + "\atask_id\x18\x02 \x01(\tR\x06taskId\x12!\n" + + "\fdisplay_name\x18\x03 \x01(\tR\vdisplayName\x12;\n" + + "\vcreate_time\x18\x04 \x01(\v2\x1a.google.protobuf.TimestampR\n" + + "createTime\x129\n" + + "\n" + + "start_time\x18\x05 \x01(\v2\x1a.google.protobuf.TimestampR\tstartTime\x125\n" + + "\bend_time\x18\x06 \x01(\v2\x1a.google.protobuf.TimestampR\aendTime\x12k\n" + + "\x0fexecutor_detail\x18\a \x01(\v2B.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskExecutorDetailR\x0eexecutorDetail\x12J\n" + + "\x05state\x18\b \x01(\x0e24.kubeflow.pipelines.backend.api.v2beta1.RuntimeStateR\x05state\x12!\n" + + "\fexecution_id\x18\t \x01(\x03R\vexecutionId\x12(\n" + + "\x05error\x18\n" + + " \x01(\v2\x12.google.rpc.StatusR\x05error\x12^\n" + + "\x06inputs\x18\v \x03(\v2F.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputsEntryR\x06inputs\x12a\n" + + "\aoutputs\x18\f \x03(\v2G.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.OutputsEntryR\aoutputs\x12$\n" + + "\x0eparent_task_id\x18\r \x01(\tR\fparentTaskId\x12Z\n" + + "\rstate_history\x18\x0e \x03(\v25.kubeflow.pipelines.backend.api.v2beta1.RuntimeStatusR\fstateHistory\x12\x19\n" + + "\bpod_name\x18\x0f \x01(\tR\apodName\x12e\n" + + "\vchild_tasks\x18\x10 \x03(\v2D.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.ChildTaskR\n" + + "childTasks\x1ao\n" + + "\vInputsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12J\n" + + "\x05value\x18\x02 \x01(\v24.kubeflow.pipelines.backend.api.v2beta1.ArtifactListR\x05value:\x028\x01\x1ap\n" + + "\fOutputsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12J\n" + + "\x05value\x18\x02 \x01(\v24.kubeflow.pipelines.backend.api.v2beta1.ArtifactListR\x05value:\x028\x01\x1aQ\n" + + "\tChildTask\x12\x19\n" + + "\atask_id\x18\x01 \x01(\tH\x00R\x06taskId\x12\x1b\n" + + "\bpod_name\x18\x02 \x01(\tH\x00R\apodNameB\f\n" + + "\n" + + "child_task\"\xd6\x01\n" + + "\x1aPipelineTaskExecutorDetail\x12\x19\n" + + "\bmain_job\x18\x01 \x01(\tR\amainJob\x121\n" + + "\x15pre_caching_check_job\x18\x02 \x01(\tR\x12preCachingCheckJob\x12(\n" + + "\x10failed_main_jobs\x18\x03 \x03(\tR\x0efailedMainJobs\x12@\n" + + "\x1dfailed_pre_caching_check_jobs\x18\x04 \x03(\tR\x19failedPreCachingCheckJobs\"1\n" + + "\fArtifactList\x12!\n" + + "\fartifact_ids\x18\x01 \x03(\x03R\vartifactIds\"z\n" + + "\x10CreateRunRequest\x12'\n" + + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12=\n" + + "\x03run\x18\x02 \x01(\v2+.kubeflow.pipelines.backend.api.v2beta1.RunR\x03run\"O\n" + + "\rGetRunRequest\x12'\n" + + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12\x15\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId\"\xc1\x01\n" + + "\x0fListRunsRequest\x12\x1c\n" + + "\tnamespace\x18\x01 \x01(\tR\tnamespace\x12#\n" + + "\rexperiment_id\x18\x02 \x01(\tR\fexperimentId\x12\x1d\n" + + "\n" + + "page_token\x18\x03 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x04 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x05 \x01(\tR\x06sortBy\x12\x16\n" + + "\x06filter\x18\x06 \x01(\tR\x06filter\"U\n" + + "\x13TerminateRunRequest\x12'\n" + + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12\x15\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId\"\x9a\x01\n" + + "\x10ListRunsResponse\x12?\n" + + "\x04runs\x18\x01 \x03(\v2+.kubeflow.pipelines.backend.api.v2beta1.RunR\x04runs\x12\x1d\n" + + "\n" + + "total_size\x18\x02 \x01(\x05R\ttotalSize\x12&\n" + + "\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"S\n" + + "\x11ArchiveRunRequest\x12'\n" + + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12\x15\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId\"U\n" + + "\x13UnarchiveRunRequest\x12'\n" + + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12\x15\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId\"R\n" + + "\x10DeleteRunRequest\x12'\n" + + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12\x15\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId\"\x93\x01\n" + + "\x13ReadArtifactRequest\x12'\n" + + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12\x15\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId\x12\x17\n" + + "\anode_id\x18\x03 \x01(\tR\x06nodeId\x12#\n" + + "\rartifact_name\x18\x04 \x01(\tR\fartifactName\"*\n" + + "\x14ReadArtifactResponse\x12\x12\n" + + "\x04data\x18\x01 \x01(\fR\x04data\"Q\n" + + "\x0fRetryRunRequest\x12'\n" + + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12\x15\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId*\x98\x01\n" + + "\fRuntimeState\x12\x1d\n" + + "\x19RUNTIME_STATE_UNSPECIFIED\x10\x00\x12\v\n" + + "\aPENDING\x10\x01\x12\v\n" + + "\aRUNNING\x10\x02\x12\r\n" + + "\tSUCCEEDED\x10\x03\x12\v\n" + + "\aSKIPPED\x10\x04\x12\n" + + "\n" + + "\x06FAILED\x10\x05\x12\r\n" + + "\tCANCELING\x10\x06\x12\f\n" + + "\bCANCELED\x10\a\x12\n" + + "\n" + + "\x06PAUSED\x10\b2\xf9\n" + + "\n" + + "\n" + + "RunService\x12\x93\x01\n" + + "\tCreateRun\x128.kubeflow.pipelines.backend.api.v2beta1.CreateRunRequest\x1a+.kubeflow.pipelines.backend.api.v2beta1.Run\"\x1f\x82\xd3\xe4\x93\x02\x19:\x03run\"\x12/apis/v2beta1/runs\x12\x91\x01\n" + + "\x06GetRun\x125.kubeflow.pipelines.backend.api.v2beta1.GetRunRequest\x1a+.kubeflow.pipelines.backend.api.v2beta1.Run\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/apis/v2beta1/runs/{run_id}\x12\x99\x01\n" + + "\bListRuns\x127.kubeflow.pipelines.backend.api.v2beta1.ListRunsRequest\x1a8.kubeflow.pipelines.backend.api.v2beta1.ListRunsResponse\"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/apis/v2beta1/runs\x12\x8c\x01\n" + + "\n" + + "ArchiveRun\x129.kubeflow.pipelines.backend.api.v2beta1.ArchiveRunRequest\x1a\x16.google.protobuf.Empty\"+\x82\xd3\xe4\x93\x02%\"#/apis/v2beta1/runs/{run_id}:archive\x12\x92\x01\n" + + "\fUnarchiveRun\x12;.kubeflow.pipelines.backend.api.v2beta1.UnarchiveRunRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02'\"%/apis/v2beta1/runs/{run_id}:unarchive\x12\x82\x01\n" + + "\tDeleteRun\x128.kubeflow.pipelines.backend.api.v2beta1.DeleteRunRequest\x1a\x16.google.protobuf.Empty\"#\x82\xd3\xe4\x93\x02\x1d*\x1b/apis/v2beta1/runs/{run_id}\x12\xdd\x01\n" + + "\fReadArtifact\x12;.kubeflow.pipelines.backend.api.v2beta1.ReadArtifactRequest\x1a<.kubeflow.pipelines.backend.api.v2beta1.ReadArtifactResponse\"R\x82\xd3\xe4\x93\x02L\x12J/apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read\x12\x92\x01\n" + + "\fTerminateRun\x12;.kubeflow.pipelines.backend.api.v2beta1.TerminateRunRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02'\"%/apis/v2beta1/runs/{run_id}:terminate\x12\x86\x01\n" + + "\bRetryRun\x127.kubeflow.pipelines.backend.api.v2beta1.RetryRunRequest\x1a\x16.google.protobuf.Empty\")\x82\xd3\xe4\x93\x02#\"!/apis/v2beta1/runs/{run_id}:retryB\x98\x01\x92AX*\x02\x01\x02R#\n" + + "\adefault\x12\x18\x12\x16\n" + + "\x14\x1a\x12.google.rpc.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_run_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_run_proto_rawDescData = file_backend_api_v2beta1_run_proto_rawDesc + file_backend_api_v2beta1_run_proto_rawDescData []byte ) func file_backend_api_v2beta1_run_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_run_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_run_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_run_proto_rawDescData) + file_backend_api_v2beta1_run_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_run_proto_rawDesc), len(file_backend_api_v2beta1_run_proto_rawDesc))) }) return file_backend_api_v2beta1_run_proto_rawDescData } var file_backend_api_v2beta1_run_proto_enumTypes = make([]protoimpl.EnumInfo, 2) var file_backend_api_v2beta1_run_proto_msgTypes = make([]protoimpl.MessageInfo, 21) -var file_backend_api_v2beta1_run_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_run_proto_goTypes = []any{ (RuntimeState)(0), // 0: kubeflow.pipelines.backend.api.v2beta1.RuntimeState (Run_StorageState)(0), // 1: kubeflow.pipelines.backend.api.v2beta1.Run.StorageState (*Run)(nil), // 2: kubeflow.pipelines.backend.api.v2beta1.Run @@ -2255,242 +1952,12 @@ func file_backend_api_v2beta1_run_proto_init() { return } file_backend_api_v2beta1_runtime_config_proto_init() - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_run_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Run); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineVersionReference); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RuntimeStatus); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RunDetails); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineTaskDetail); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineTaskExecutorDetail); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArtifactList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListRunsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TerminateRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListRunsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArchiveRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UnarchiveRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReadArtifactRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ReadArtifactResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RetryRunRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_run_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PipelineTaskDetail_ChildTask); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_backend_api_v2beta1_run_proto_msgTypes[0].OneofWrappers = []interface{}{ + file_backend_api_v2beta1_run_proto_msgTypes[0].OneofWrappers = []any{ (*Run_PipelineVersionId)(nil), (*Run_PipelineSpec)(nil), (*Run_PipelineVersionReference)(nil), } - file_backend_api_v2beta1_run_proto_msgTypes[20].OneofWrappers = []interface{}{ + file_backend_api_v2beta1_run_proto_msgTypes[20].OneofWrappers = []any{ (*PipelineTaskDetail_ChildTask_TaskId)(nil), (*PipelineTaskDetail_ChildTask_PodName)(nil), } @@ -2498,7 +1965,7 @@ func file_backend_api_v2beta1_run_proto_init() { out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_run_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_run_proto_rawDesc), len(file_backend_api_v2beta1_run_proto_rawDesc)), NumEnums: 2, NumMessages: 21, NumExtensions: 0, @@ -2510,397 +1977,6 @@ func file_backend_api_v2beta1_run_proto_init() { MessageInfos: file_backend_api_v2beta1_run_proto_msgTypes, }.Build() File_backend_api_v2beta1_run_proto = out.File - file_backend_api_v2beta1_run_proto_rawDesc = nil file_backend_api_v2beta1_run_proto_goTypes = nil file_backend_api_v2beta1_run_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// RunServiceClient is the client API for RunService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type RunServiceClient interface { - // Creates a new run in an experiment specified by experiment ID. - // If experiment ID is not specified, the run is created in the default experiment. - CreateRun(ctx context.Context, in *CreateRunRequest, opts ...grpc.CallOption) (*Run, error) - // Finds a specific run by ID. - GetRun(ctx context.Context, in *GetRunRequest, opts ...grpc.CallOption) (*Run, error) - // Finds all runs in an experiment given by experiment ID. - // If experiment id is not specified, finds all runs across all experiments. - ListRuns(ctx context.Context, in *ListRunsRequest, opts ...grpc.CallOption) (*ListRunsResponse, error) - // Archives a run in an experiment given by run ID and experiment ID. - ArchiveRun(ctx context.Context, in *ArchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Restores an archived run in an experiment given by run ID and experiment ID. - UnarchiveRun(ctx context.Context, in *UnarchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Deletes a run in an experiment given by run ID and experiment ID. - DeleteRun(ctx context.Context, in *DeleteRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Finds artifact data in a run. - ReadArtifact(ctx context.Context, in *ReadArtifactRequest, opts ...grpc.CallOption) (*ReadArtifactResponse, error) - // Terminates an active run. - TerminateRun(ctx context.Context, in *TerminateRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) - // Re-initiates a failed or terminated run. - RetryRun(ctx context.Context, in *RetryRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) -} - -type runServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewRunServiceClient(cc grpc.ClientConnInterface) RunServiceClient { - return &runServiceClient{cc} -} - -func (c *runServiceClient) CreateRun(ctx context.Context, in *CreateRunRequest, opts ...grpc.CallOption) (*Run, error) { - out := new(Run) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RunService/CreateRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) GetRun(ctx context.Context, in *GetRunRequest, opts ...grpc.CallOption) (*Run, error) { - out := new(Run) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RunService/GetRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) ListRuns(ctx context.Context, in *ListRunsRequest, opts ...grpc.CallOption) (*ListRunsResponse, error) { - out := new(ListRunsResponse) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ListRuns", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) ArchiveRun(ctx context.Context, in *ArchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ArchiveRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) UnarchiveRun(ctx context.Context, in *UnarchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RunService/UnarchiveRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) DeleteRun(ctx context.Context, in *DeleteRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RunService/DeleteRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) ReadArtifact(ctx context.Context, in *ReadArtifactRequest, opts ...grpc.CallOption) (*ReadArtifactResponse, error) { - out := new(ReadArtifactResponse) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ReadArtifact", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) TerminateRun(ctx context.Context, in *TerminateRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RunService/TerminateRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *runServiceClient) RetryRun(ctx context.Context, in *RetryRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { - out := new(emptypb.Empty) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.RunService/RetryRun", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// RunServiceServer is the server API for RunService service. -type RunServiceServer interface { - // Creates a new run in an experiment specified by experiment ID. - // If experiment ID is not specified, the run is created in the default experiment. - CreateRun(context.Context, *CreateRunRequest) (*Run, error) - // Finds a specific run by ID. - GetRun(context.Context, *GetRunRequest) (*Run, error) - // Finds all runs in an experiment given by experiment ID. - // If experiment id is not specified, finds all runs across all experiments. - ListRuns(context.Context, *ListRunsRequest) (*ListRunsResponse, error) - // Archives a run in an experiment given by run ID and experiment ID. - ArchiveRun(context.Context, *ArchiveRunRequest) (*emptypb.Empty, error) - // Restores an archived run in an experiment given by run ID and experiment ID. - UnarchiveRun(context.Context, *UnarchiveRunRequest) (*emptypb.Empty, error) - // Deletes a run in an experiment given by run ID and experiment ID. - DeleteRun(context.Context, *DeleteRunRequest) (*emptypb.Empty, error) - // Finds artifact data in a run. - ReadArtifact(context.Context, *ReadArtifactRequest) (*ReadArtifactResponse, error) - // Terminates an active run. - TerminateRun(context.Context, *TerminateRunRequest) (*emptypb.Empty, error) - // Re-initiates a failed or terminated run. - RetryRun(context.Context, *RetryRunRequest) (*emptypb.Empty, error) -} - -// UnimplementedRunServiceServer can be embedded to have forward compatible implementations. -type UnimplementedRunServiceServer struct { -} - -func (*UnimplementedRunServiceServer) CreateRun(context.Context, *CreateRunRequest) (*Run, error) { - return nil, status1.Errorf(codes.Unimplemented, "method CreateRun not implemented") -} -func (*UnimplementedRunServiceServer) GetRun(context.Context, *GetRunRequest) (*Run, error) { - return nil, status1.Errorf(codes.Unimplemented, "method GetRun not implemented") -} -func (*UnimplementedRunServiceServer) ListRuns(context.Context, *ListRunsRequest) (*ListRunsResponse, error) { - return nil, status1.Errorf(codes.Unimplemented, "method ListRuns not implemented") -} -func (*UnimplementedRunServiceServer) ArchiveRun(context.Context, *ArchiveRunRequest) (*emptypb.Empty, error) { - return nil, status1.Errorf(codes.Unimplemented, "method ArchiveRun not implemented") -} -func (*UnimplementedRunServiceServer) UnarchiveRun(context.Context, *UnarchiveRunRequest) (*emptypb.Empty, error) { - return nil, status1.Errorf(codes.Unimplemented, "method UnarchiveRun not implemented") -} -func (*UnimplementedRunServiceServer) DeleteRun(context.Context, *DeleteRunRequest) (*emptypb.Empty, error) { - return nil, status1.Errorf(codes.Unimplemented, "method DeleteRun not implemented") -} -func (*UnimplementedRunServiceServer) ReadArtifact(context.Context, *ReadArtifactRequest) (*ReadArtifactResponse, error) { - return nil, status1.Errorf(codes.Unimplemented, "method ReadArtifact not implemented") -} -func (*UnimplementedRunServiceServer) TerminateRun(context.Context, *TerminateRunRequest) (*emptypb.Empty, error) { - return nil, status1.Errorf(codes.Unimplemented, "method TerminateRun not implemented") -} -func (*UnimplementedRunServiceServer) RetryRun(context.Context, *RetryRunRequest) (*emptypb.Empty, error) { - return nil, status1.Errorf(codes.Unimplemented, "method RetryRun not implemented") -} - -func RegisterRunServiceServer(s *grpc.Server, srv RunServiceServer) { - s.RegisterService(&_RunService_serviceDesc, srv) -} - -func _RunService_CreateRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).CreateRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RunService/CreateRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).CreateRun(ctx, req.(*CreateRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_GetRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).GetRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RunService/GetRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).GetRun(ctx, req.(*GetRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_ListRuns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListRunsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).ListRuns(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RunService/ListRuns", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).ListRuns(ctx, req.(*ListRunsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_ArchiveRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ArchiveRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).ArchiveRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RunService/ArchiveRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).ArchiveRun(ctx, req.(*ArchiveRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_UnarchiveRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UnarchiveRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).UnarchiveRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RunService/UnarchiveRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).UnarchiveRun(ctx, req.(*UnarchiveRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_DeleteRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeleteRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).DeleteRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RunService/DeleteRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).DeleteRun(ctx, req.(*DeleteRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_ReadArtifact_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ReadArtifactRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).ReadArtifact(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RunService/ReadArtifact", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).ReadArtifact(ctx, req.(*ReadArtifactRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_TerminateRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(TerminateRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).TerminateRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RunService/TerminateRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).TerminateRun(ctx, req.(*TerminateRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _RunService_RetryRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RetryRunRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(RunServiceServer).RetryRun(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.RunService/RetryRun", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(RunServiceServer).RetryRun(ctx, req.(*RetryRunRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _RunService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "kubeflow.pipelines.backend.api.v2beta1.RunService", - HandlerType: (*RunServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateRun", - Handler: _RunService_CreateRun_Handler, - }, - { - MethodName: "GetRun", - Handler: _RunService_GetRun_Handler, - }, - { - MethodName: "ListRuns", - Handler: _RunService_ListRuns_Handler, - }, - { - MethodName: "ArchiveRun", - Handler: _RunService_ArchiveRun_Handler, - }, - { - MethodName: "UnarchiveRun", - Handler: _RunService_UnarchiveRun_Handler, - }, - { - MethodName: "DeleteRun", - Handler: _RunService_DeleteRun_Handler, - }, - { - MethodName: "ReadArtifact", - Handler: _RunService_ReadArtifact_Handler, - }, - { - MethodName: "TerminateRun", - Handler: _RunService_TerminateRun_Handler, - }, - { - MethodName: "RetryRun", - Handler: _RunService_RetryRun_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v2beta1/run.proto", -} diff --git a/backend/api/v2beta1/go_client/run.pb.gw.go b/backend/api/v2beta1/go_client/run.pb.gw.go index 03ab8f55763..47787f27e2e 100644 --- a/backend/api/v2beta1/go_client/run.pb.gw.go +++ b/backend/api/v2beta1/go_client/run.pb.gw.go @@ -10,876 +10,695 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join - var ( - filter_RunService_CreateRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join ) -func request_RunService_CreateRun_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateRunRequest - var metadata runtime.ServerMetadata +var filter_RunService_CreateRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Run); err != nil && err != io.EOF { +func request_RunService_CreateRun_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateRunRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Run); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_CreateRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.CreateRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_CreateRun_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateRunRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Run); err != nil && err != io.EOF { + var ( + protoReq CreateRunRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Run); err != nil && !errors.Is(err, io.EOF) { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_CreateRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.CreateRun(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_RunService_GetRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) +var filter_RunService_GetRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} func request_RunService_GetRun_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_GetRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_GetRun_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq GetRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_GetRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.GetRun(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_RunService_ListRuns_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) +var filter_RunService_ListRuns_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} func request_RunService_ListRuns_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListRunsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListRunsRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_ListRuns_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListRuns(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_ListRuns_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ListRunsRequest - var metadata runtime.ServerMetadata - + var ( + protoReq ListRunsRequest + metadata runtime.ServerMetadata + ) if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_ListRuns_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ListRuns(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_RunService_ArchiveRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) +var filter_RunService_ArchiveRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} func request_RunService_ArchiveRun_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ArchiveRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ArchiveRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_ArchiveRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ArchiveRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_ArchiveRun_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ArchiveRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ArchiveRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_ArchiveRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ArchiveRun(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_RunService_UnarchiveRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) +var filter_RunService_UnarchiveRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} func request_RunService_UnarchiveRun_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq UnarchiveRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq UnarchiveRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_UnarchiveRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.UnarchiveRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_UnarchiveRun_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq UnarchiveRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq UnarchiveRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_UnarchiveRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.UnarchiveRun(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_RunService_DeleteRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) +var filter_RunService_DeleteRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} func request_RunService_DeleteRun_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_DeleteRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.DeleteRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_DeleteRun_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq DeleteRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq DeleteRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_DeleteRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.DeleteRun(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_RunService_ReadArtifact_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0, "node_id": 1, "artifact_name": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} -) +var filter_RunService_ReadArtifact_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0, "node_id": 1, "artifact_name": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} func request_RunService_ReadArtifact_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReadArtifactRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ReadArtifactRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - val, ok = pathParams["node_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_id") } - protoReq.NodeId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_id", err) } - val, ok = pathParams["artifact_name"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_name") } - protoReq.ArtifactName, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_name", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_ReadArtifact_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ReadArtifact(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_ReadArtifact_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ReadArtifactRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq ReadArtifactRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - val, ok = pathParams["node_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_id") } - protoReq.NodeId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_id", err) } - val, ok = pathParams["artifact_name"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_name") } - protoReq.ArtifactName, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_name", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_ReadArtifact_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.ReadArtifact(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_RunService_TerminateRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) +var filter_RunService_TerminateRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} func request_RunService_TerminateRun_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq TerminateRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq TerminateRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_TerminateRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.TerminateRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_TerminateRun_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq TerminateRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq TerminateRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_TerminateRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.TerminateRun(ctx, &protoReq) return msg, metadata, err - } -var ( - filter_RunService_RetryRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} -) +var filter_RunService_RetryRun_0 = &utilities.DoubleArray{Encoding: map[string]int{"run_id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} func request_RunService_RetryRun_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq RetryRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq RetryRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_RetryRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.RetryRun(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_RunService_RetryRun_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq RetryRunRequest - var metadata runtime.ServerMetadata - var ( - val string - ok bool - err error - _ = err + protoReq RetryRunRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["run_id"] + val, ok := pathParams["run_id"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "run_id") } - protoReq.RunId, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "run_id", err) } - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_RetryRun_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := server.RetryRun(ctx, &protoReq) return msg, metadata, err - } // RegisterRunServiceHandlerServer registers the http handlers for service RunService to "mux". // UnaryRPC :call RunServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterRunServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterRunServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server RunServiceServer) error { - - mux.Handle("POST", pattern_RunService_CreateRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_CreateRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/CreateRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_CreateRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_CreateRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_CreateRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_CreateRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_GetRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_GetRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/GetRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_GetRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_GetRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_GetRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_GetRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_ListRuns_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_ListRuns_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ListRuns", runtime.WithHTTPPathPattern("/apis/v2beta1/runs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_ListRuns_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_ListRuns_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ListRuns_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ListRuns_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_ArchiveRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_ArchiveRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ArchiveRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}:archive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_ArchiveRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_ArchiveRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ArchiveRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ArchiveRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_UnarchiveRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_UnarchiveRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/UnarchiveRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}:unarchive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_UnarchiveRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_UnarchiveRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_UnarchiveRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_UnarchiveRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_RunService_DeleteRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_RunService_DeleteRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/DeleteRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_DeleteRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_DeleteRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_DeleteRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_DeleteRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_ReadArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_ReadArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ReadArtifact", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_ReadArtifact_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_ReadArtifact_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ReadArtifact_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ReadArtifact_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_TerminateRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_TerminateRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/TerminateRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}:terminate")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_TerminateRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_TerminateRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_TerminateRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_TerminateRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_RetryRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_RetryRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/RetryRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}:retry")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_RunService_RetryRun_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_RunService_RetryRun_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_RetryRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_RetryRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -888,25 +707,24 @@ func RegisterRunServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, // RegisterRunServiceHandlerFromEndpoint is same as RegisterRunServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterRunServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterRunServiceHandler(ctx, mux, conn) } @@ -920,228 +738,184 @@ func RegisterRunServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "RunServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "RunServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "RunServiceClient" to call the correct interceptors. +// "RunServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterRunServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client RunServiceClient) error { - - mux.Handle("POST", pattern_RunService_CreateRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_CreateRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/CreateRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_CreateRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_CreateRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_CreateRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_CreateRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_GetRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_GetRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/GetRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_GetRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_GetRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_GetRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_GetRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_ListRuns_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_ListRuns_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ListRuns", runtime.WithHTTPPathPattern("/apis/v2beta1/runs")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_ListRuns_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_ListRuns_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ListRuns_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ListRuns_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_ArchiveRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_ArchiveRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ArchiveRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}:archive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_ArchiveRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_ArchiveRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ArchiveRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ArchiveRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_UnarchiveRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_UnarchiveRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/UnarchiveRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}:unarchive")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_UnarchiveRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_UnarchiveRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_UnarchiveRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_UnarchiveRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("DELETE", pattern_RunService_DeleteRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodDelete, pattern_RunService_DeleteRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/DeleteRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_DeleteRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_DeleteRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_DeleteRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_DeleteRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("GET", pattern_RunService_ReadArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodGet, pattern_RunService_ReadArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ReadArtifact", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_ReadArtifact_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_ReadArtifact_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_ReadArtifact_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_ReadArtifact_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_TerminateRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_TerminateRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/TerminateRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}:terminate")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_TerminateRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_TerminateRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_TerminateRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_TerminateRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - - mux.Handle("POST", pattern_RunService_RetryRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_RunService_RetryRun_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/RetryRun", runtime.WithHTTPPathPattern("/apis/v2beta1/runs/{run_id}:retry")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_RunService_RetryRun_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_RunService_RetryRun_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_RunService_RetryRun_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_RunService_RetryRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_RunService_CreateRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "runs"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_GetRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_ListRuns_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "runs"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_ArchiveRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "archive", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_UnarchiveRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "unarchive", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_DeleteRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_ReadArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5, 2, 6, 1, 0, 4, 1, 5, 7}, []string{"apis", "v2beta1", "runs", "run_id", "nodes", "node_id", "artifacts", "artifact_name"}, "read", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_TerminateRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "terminate", runtime.AssumeColonVerbOpt(true))) - - pattern_RunService_RetryRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "retry", runtime.AssumeColonVerbOpt(true))) + pattern_RunService_CreateRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "runs"}, "")) + pattern_RunService_GetRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "")) + pattern_RunService_ListRuns_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "runs"}, "")) + pattern_RunService_ArchiveRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "archive")) + pattern_RunService_UnarchiveRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "unarchive")) + pattern_RunService_DeleteRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "")) + pattern_RunService_ReadArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5, 2, 6, 1, 0, 4, 1, 5, 7}, []string{"apis", "v2beta1", "runs", "run_id", "nodes", "node_id", "artifacts", "artifact_name"}, "read")) + pattern_RunService_TerminateRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "terminate")) + pattern_RunService_RetryRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "retry")) ) var ( - forward_RunService_CreateRun_0 = runtime.ForwardResponseMessage - - forward_RunService_GetRun_0 = runtime.ForwardResponseMessage - - forward_RunService_ListRuns_0 = runtime.ForwardResponseMessage - - forward_RunService_ArchiveRun_0 = runtime.ForwardResponseMessage - + forward_RunService_CreateRun_0 = runtime.ForwardResponseMessage + forward_RunService_GetRun_0 = runtime.ForwardResponseMessage + forward_RunService_ListRuns_0 = runtime.ForwardResponseMessage + forward_RunService_ArchiveRun_0 = runtime.ForwardResponseMessage forward_RunService_UnarchiveRun_0 = runtime.ForwardResponseMessage - - forward_RunService_DeleteRun_0 = runtime.ForwardResponseMessage - + forward_RunService_DeleteRun_0 = runtime.ForwardResponseMessage forward_RunService_ReadArtifact_0 = runtime.ForwardResponseMessage - forward_RunService_TerminateRun_0 = runtime.ForwardResponseMessage - - forward_RunService_RetryRun_0 = runtime.ForwardResponseMessage + forward_RunService_RetryRun_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v2beta1/go_client/run_grpc.pb.go b/backend/api/v2beta1/go_client/run_grpc.pb.go new file mode 100644 index 00000000000..334fb5bf735 --- /dev/null +++ b/backend/api/v2beta1/go_client/run_grpc.pb.go @@ -0,0 +1,462 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/run.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + emptypb "google.golang.org/protobuf/types/known/emptypb" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + RunService_CreateRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/CreateRun" + RunService_GetRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/GetRun" + RunService_ListRuns_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/ListRuns" + RunService_ArchiveRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/ArchiveRun" + RunService_UnarchiveRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/UnarchiveRun" + RunService_DeleteRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/DeleteRun" + RunService_ReadArtifact_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/ReadArtifact" + RunService_TerminateRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/TerminateRun" + RunService_RetryRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/RetryRun" +) + +// RunServiceClient is the client API for RunService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type RunServiceClient interface { + // Creates a new run in an experiment specified by experiment ID. + // If experiment ID is not specified, the run is created in the default experiment. + CreateRun(ctx context.Context, in *CreateRunRequest, opts ...grpc.CallOption) (*Run, error) + // Finds a specific run by ID. + GetRun(ctx context.Context, in *GetRunRequest, opts ...grpc.CallOption) (*Run, error) + // Finds all runs in an experiment given by experiment ID. + // If experiment id is not specified, finds all runs across all experiments. + ListRuns(ctx context.Context, in *ListRunsRequest, opts ...grpc.CallOption) (*ListRunsResponse, error) + // Archives a run in an experiment given by run ID and experiment ID. + ArchiveRun(ctx context.Context, in *ArchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Restores an archived run in an experiment given by run ID and experiment ID. + UnarchiveRun(ctx context.Context, in *UnarchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Deletes a run in an experiment given by run ID and experiment ID. + DeleteRun(ctx context.Context, in *DeleteRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Finds artifact data in a run. + ReadArtifact(ctx context.Context, in *ReadArtifactRequest, opts ...grpc.CallOption) (*ReadArtifactResponse, error) + // Terminates an active run. + TerminateRun(ctx context.Context, in *TerminateRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + // Re-initiates a failed or terminated run. + RetryRun(ctx context.Context, in *RetryRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) +} + +type runServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewRunServiceClient(cc grpc.ClientConnInterface) RunServiceClient { + return &runServiceClient{cc} +} + +func (c *runServiceClient) CreateRun(ctx context.Context, in *CreateRunRequest, opts ...grpc.CallOption) (*Run, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Run) + err := c.cc.Invoke(ctx, RunService_CreateRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) GetRun(ctx context.Context, in *GetRunRequest, opts ...grpc.CallOption) (*Run, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Run) + err := c.cc.Invoke(ctx, RunService_GetRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) ListRuns(ctx context.Context, in *ListRunsRequest, opts ...grpc.CallOption) (*ListRunsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListRunsResponse) + err := c.cc.Invoke(ctx, RunService_ListRuns_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) ArchiveRun(ctx context.Context, in *ArchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RunService_ArchiveRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) UnarchiveRun(ctx context.Context, in *UnarchiveRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RunService_UnarchiveRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) DeleteRun(ctx context.Context, in *DeleteRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RunService_DeleteRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) ReadArtifact(ctx context.Context, in *ReadArtifactRequest, opts ...grpc.CallOption) (*ReadArtifactResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ReadArtifactResponse) + err := c.cc.Invoke(ctx, RunService_ReadArtifact_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) TerminateRun(ctx context.Context, in *TerminateRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RunService_TerminateRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) RetryRun(ctx context.Context, in *RetryRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(emptypb.Empty) + err := c.cc.Invoke(ctx, RunService_RetryRun_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// RunServiceServer is the server API for RunService service. +// All implementations must embed UnimplementedRunServiceServer +// for forward compatibility. +type RunServiceServer interface { + // Creates a new run in an experiment specified by experiment ID. + // If experiment ID is not specified, the run is created in the default experiment. + CreateRun(context.Context, *CreateRunRequest) (*Run, error) + // Finds a specific run by ID. + GetRun(context.Context, *GetRunRequest) (*Run, error) + // Finds all runs in an experiment given by experiment ID. + // If experiment id is not specified, finds all runs across all experiments. + ListRuns(context.Context, *ListRunsRequest) (*ListRunsResponse, error) + // Archives a run in an experiment given by run ID and experiment ID. + ArchiveRun(context.Context, *ArchiveRunRequest) (*emptypb.Empty, error) + // Restores an archived run in an experiment given by run ID and experiment ID. + UnarchiveRun(context.Context, *UnarchiveRunRequest) (*emptypb.Empty, error) + // Deletes a run in an experiment given by run ID and experiment ID. + DeleteRun(context.Context, *DeleteRunRequest) (*emptypb.Empty, error) + // Finds artifact data in a run. + ReadArtifact(context.Context, *ReadArtifactRequest) (*ReadArtifactResponse, error) + // Terminates an active run. + TerminateRun(context.Context, *TerminateRunRequest) (*emptypb.Empty, error) + // Re-initiates a failed or terminated run. + RetryRun(context.Context, *RetryRunRequest) (*emptypb.Empty, error) + mustEmbedUnimplementedRunServiceServer() +} + +// UnimplementedRunServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedRunServiceServer struct{} + +func (UnimplementedRunServiceServer) CreateRun(context.Context, *CreateRunRequest) (*Run, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateRun not implemented") +} +func (UnimplementedRunServiceServer) GetRun(context.Context, *GetRunRequest) (*Run, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetRun not implemented") +} +func (UnimplementedRunServiceServer) ListRuns(context.Context, *ListRunsRequest) (*ListRunsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListRuns not implemented") +} +func (UnimplementedRunServiceServer) ArchiveRun(context.Context, *ArchiveRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method ArchiveRun not implemented") +} +func (UnimplementedRunServiceServer) UnarchiveRun(context.Context, *UnarchiveRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method UnarchiveRun not implemented") +} +func (UnimplementedRunServiceServer) DeleteRun(context.Context, *DeleteRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteRun not implemented") +} +func (UnimplementedRunServiceServer) ReadArtifact(context.Context, *ReadArtifactRequest) (*ReadArtifactResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ReadArtifact not implemented") +} +func (UnimplementedRunServiceServer) TerminateRun(context.Context, *TerminateRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method TerminateRun not implemented") +} +func (UnimplementedRunServiceServer) RetryRun(context.Context, *RetryRunRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "method RetryRun not implemented") +} +func (UnimplementedRunServiceServer) mustEmbedUnimplementedRunServiceServer() {} +func (UnimplementedRunServiceServer) testEmbeddedByValue() {} + +// UnsafeRunServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to RunServiceServer will +// result in compilation errors. +type UnsafeRunServiceServer interface { + mustEmbedUnimplementedRunServiceServer() +} + +func RegisterRunServiceServer(s grpc.ServiceRegistrar, srv RunServiceServer) { + // If the following call pancis, it indicates UnimplementedRunServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&RunService_ServiceDesc, srv) +} + +func _RunService_CreateRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).CreateRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_CreateRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).CreateRun(ctx, req.(*CreateRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_GetRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).GetRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_GetRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).GetRun(ctx, req.(*GetRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_ListRuns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListRunsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).ListRuns(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_ListRuns_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).ListRuns(ctx, req.(*ListRunsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_ArchiveRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ArchiveRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).ArchiveRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_ArchiveRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).ArchiveRun(ctx, req.(*ArchiveRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_UnarchiveRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UnarchiveRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).UnarchiveRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_UnarchiveRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).UnarchiveRun(ctx, req.(*UnarchiveRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_DeleteRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).DeleteRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_DeleteRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).DeleteRun(ctx, req.(*DeleteRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_ReadArtifact_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReadArtifactRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).ReadArtifact(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_ReadArtifact_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).ReadArtifact(ctx, req.(*ReadArtifactRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_TerminateRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TerminateRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).TerminateRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_TerminateRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).TerminateRun(ctx, req.(*TerminateRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_RetryRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RetryRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).RetryRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_RetryRun_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).RetryRun(ctx, req.(*RetryRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// RunService_ServiceDesc is the grpc.ServiceDesc for RunService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var RunService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.RunService", + HandlerType: (*RunServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateRun", + Handler: _RunService_CreateRun_Handler, + }, + { + MethodName: "GetRun", + Handler: _RunService_GetRun_Handler, + }, + { + MethodName: "ListRuns", + Handler: _RunService_ListRuns_Handler, + }, + { + MethodName: "ArchiveRun", + Handler: _RunService_ArchiveRun_Handler, + }, + { + MethodName: "UnarchiveRun", + Handler: _RunService_UnarchiveRun_Handler, + }, + { + MethodName: "DeleteRun", + Handler: _RunService_DeleteRun_Handler, + }, + { + MethodName: "ReadArtifact", + Handler: _RunService_ReadArtifact_Handler, + }, + { + MethodName: "TerminateRun", + Handler: _RunService_TerminateRun_Handler, + }, + { + MethodName: "RetryRun", + Handler: _RunService_RetryRun_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/run.proto", +} diff --git a/backend/api/v2beta1/go_client/runtime_config.pb.go b/backend/api/v2beta1/go_client/runtime_config.pb.go index 327174b7c0d..cd723ad3e04 100644 --- a/backend/api/v2beta1/go_client/runtime_config.pb.go +++ b/backend/api/v2beta1/go_client/runtime_config.pb.go @@ -14,8 +14,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/runtime_config.proto package go_client @@ -26,6 +26,7 @@ import ( structpb "google.golang.org/protobuf/types/known/structpb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -37,27 +38,24 @@ const ( // The runtime config. type RuntimeConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // The runtime parameters of the Pipeline. The parameters will be // used to replace the placeholders at runtime. - Parameters map[string]*structpb.Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Parameters map[string]*structpb.Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // A path in a object store bucket which will be treated as the root // output directory of the pipeline. It is used by the system to // generate the paths of output artifacts. // Ref:(https://www.kubeflow.org/docs/components/pipelines/pipeline-root/) - PipelineRoot string `protobuf:"bytes,2,opt,name=pipeline_root,json=pipelineRoot,proto3" json:"pipeline_root,omitempty"` + PipelineRoot string `protobuf:"bytes,2,opt,name=pipeline_root,json=pipelineRoot,proto3" json:"pipeline_root,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *RuntimeConfig) Reset() { *x = RuntimeConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_runtime_config_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_runtime_config_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *RuntimeConfig) String() string { @@ -68,7 +66,7 @@ func (*RuntimeConfig) ProtoMessage() {} func (x *RuntimeConfig) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_runtime_config_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -99,50 +97,32 @@ func (x *RuntimeConfig) GetPipelineRoot() string { var File_backend_api_v2beta1_runtime_config_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_runtime_config_proto_rawDesc = []byte{ - 0x0a, 0x28, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x26, 0x6b, 0x75, 0x62, 0x65, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, - 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, - 0x61, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x22, 0xf2, 0x01, 0x0a, 0x0d, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x12, 0x65, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x45, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, - 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, - 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x1a, 0x55, - 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, - 0x70, 0x69, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, - 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v2beta1_runtime_config_proto_rawDesc = "" + + "\n" + + "(backend/api/v2beta1/runtime_config.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1cgoogle/protobuf/struct.proto\"\xf2\x01\n" + + "\rRuntimeConfig\x12e\n" + + "\n" + + "parameters\x18\x01 \x03(\v2E.kubeflow.pipelines.backend.api.v2beta1.RuntimeConfig.ParametersEntryR\n" + + "parameters\x12#\n" + + "\rpipeline_root\x18\x02 \x01(\tR\fpipelineRoot\x1aU\n" + + "\x0fParametersEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12,\n" + + "\x05value\x18\x02 \x01(\v2\x16.google.protobuf.ValueR\x05value:\x028\x01B=Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_runtime_config_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_runtime_config_proto_rawDescData = file_backend_api_v2beta1_runtime_config_proto_rawDesc + file_backend_api_v2beta1_runtime_config_proto_rawDescData []byte ) func file_backend_api_v2beta1_runtime_config_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_runtime_config_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_runtime_config_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_runtime_config_proto_rawDescData) + file_backend_api_v2beta1_runtime_config_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_runtime_config_proto_rawDesc), len(file_backend_api_v2beta1_runtime_config_proto_rawDesc))) }) return file_backend_api_v2beta1_runtime_config_proto_rawDescData } var file_backend_api_v2beta1_runtime_config_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_backend_api_v2beta1_runtime_config_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_runtime_config_proto_goTypes = []any{ (*RuntimeConfig)(nil), // 0: kubeflow.pipelines.backend.api.v2beta1.RuntimeConfig nil, // 1: kubeflow.pipelines.backend.api.v2beta1.RuntimeConfig.ParametersEntry (*structpb.Value)(nil), // 2: google.protobuf.Value @@ -162,25 +142,11 @@ func file_backend_api_v2beta1_runtime_config_proto_init() { if File_backend_api_v2beta1_runtime_config_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_runtime_config_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RuntimeConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_runtime_config_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_runtime_config_proto_rawDesc), len(file_backend_api_v2beta1_runtime_config_proto_rawDesc)), NumEnums: 0, NumMessages: 2, NumExtensions: 0, @@ -191,7 +157,6 @@ func file_backend_api_v2beta1_runtime_config_proto_init() { MessageInfos: file_backend_api_v2beta1_runtime_config_proto_msgTypes, }.Build() File_backend_api_v2beta1_runtime_config_proto = out.File - file_backend_api_v2beta1_runtime_config_proto_rawDesc = nil file_backend_api_v2beta1_runtime_config_proto_goTypes = nil file_backend_api_v2beta1_runtime_config_proto_depIdxs = nil } diff --git a/backend/api/v2beta1/go_client/visualization.pb.go b/backend/api/v2beta1/go_client/visualization.pb.go index b82350c1249..00a601723b1 100644 --- a/backend/api/v2beta1/go_client/visualization.pb.go +++ b/backend/api/v2beta1/go_client/visualization.pb.go @@ -14,24 +14,20 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: backend/api/v2beta1/visualization.proto package go_client import ( - context "context" - _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" _ "google.golang.org/genproto/googleapis/api/annotations" - _ "google.golang.org/genproto/googleapis/rpc/status" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -103,21 +99,18 @@ func (Visualization_Type) EnumDescriptor() ([]byte, []int) { // and input data paths. Input dat paths are assumed to be unique and are used // for determining output path. type CreateVisualizationRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Visualization *Visualization `protobuf:"bytes,1,opt,name=visualization,proto3" json:"visualization,omitempty"` + Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` unknownFields protoimpl.UnknownFields - - Visualization *Visualization `protobuf:"bytes,1,opt,name=visualization,proto3" json:"visualization,omitempty"` - Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` + sizeCache protoimpl.SizeCache } func (x *CreateVisualizationRequest) Reset() { *x = CreateVisualizationRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_visualization_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_visualization_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreateVisualizationRequest) String() string { @@ -128,7 +121,7 @@ func (*CreateVisualizationRequest) ProtoMessage() {} func (x *CreateVisualizationRequest) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_visualization_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -158,11 +151,8 @@ func (x *CreateVisualizationRequest) GetNamespace() string { } type Visualization struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Type Visualization_Type `protobuf:"varint,1,opt,name=type,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.Visualization_Type" json:"type,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + Type Visualization_Type `protobuf:"varint,1,opt,name=type,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.Visualization_Type" json:"type,omitempty"` // Path pattern of input data to be used during generation of visualizations. // This is required when creating the pipeline through CreateVisualization // API. @@ -177,16 +167,16 @@ type Visualization struct { // In case any error happens when generating visualizations, only // visualization ID and the error message are returned. Client has the // flexibility of choosing how to handle the error. - Error string `protobuf:"bytes,5,opt,name=error,proto3" json:"error,omitempty"` + Error string `protobuf:"bytes,5,opt,name=error,proto3" json:"error,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Visualization) Reset() { *x = Visualization{} - if protoimpl.UnsafeEnabled { - mi := &file_backend_api_v2beta1_visualization_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_backend_api_v2beta1_visualization_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Visualization) String() string { @@ -197,7 +187,7 @@ func (*Visualization) ProtoMessage() {} func (x *Visualization) ProtoReflect() protoreflect.Message { mi := &file_backend_api_v2beta1_visualization_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -249,87 +239,50 @@ func (x *Visualization) GetError() string { var File_backend_api_v2beta1_visualization_proto protoreflect.FileDescriptor -var file_backend_api_v2beta1_visualization_proto_rawDesc = []byte{ - 0x0a, 0x27, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x76, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x26, 0x6b, 0x75, 0x62, 0x65, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, - 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, - 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, - 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, - 0x2c, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x73, 0x77, 0x61, 0x67, - 0x67, 0x65, 0x72, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x97, 0x01, 0x0a, 0x1a, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x5b, 0x0a, 0x0d, 0x76, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, - 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x6b, - 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, - 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x76, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x22, 0x81, 0x02, 0x0a, 0x0d, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x4e, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x3a, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, - 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x61, 0x72, - 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x61, - 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x74, 0x6d, 0x6c, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x74, 0x6d, 0x6c, 0x12, 0x14, 0x0a, 0x05, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x22, 0x40, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0d, 0x0a, 0x09, 0x52, 0x4f, - 0x43, 0x5f, 0x43, 0x55, 0x52, 0x56, 0x45, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x54, 0x46, 0x44, - 0x56, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x54, 0x46, 0x4d, 0x41, 0x10, 0x02, 0x12, 0x09, 0x0a, - 0x05, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x55, 0x53, 0x54, - 0x4f, 0x4d, 0x10, 0x04, 0x32, 0xec, 0x01, 0x0a, 0x14, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, - 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xd3, 0x01, - 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x31, 0x12, 0x42, 0x2e, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x62, 0x61, 0x63, - 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, - 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x35, 0x2e, 0x6b, 0x75, - 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2e, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2e, 0x56, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x3f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x39, 0x3a, 0x0d, 0x76, 0x69, 0x73, 0x75, - 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x28, 0x2f, 0x61, 0x70, 0x69, 0x73, - 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x76, 0x69, 0x73, 0x75, 0x61, 0x6c, 0x69, - 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x7d, 0x42, 0x98, 0x01, 0x92, 0x41, 0x58, 0x2a, 0x02, 0x01, 0x02, 0x52, 0x23, 0x0a, - 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x18, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x5a, 0x1f, 0x0a, 0x1d, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, 0x13, - 0x08, 0x02, 0x1a, 0x0d, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x02, 0x62, 0x0c, 0x0a, 0x0a, 0x0a, 0x06, 0x42, 0x65, 0x61, 0x72, 0x65, 0x72, 0x12, - 0x00, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, - 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x62, - 0x65, 0x74, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} +const file_backend_api_v2beta1_visualization_proto_rawDesc = "" + + "\n" + + "'backend/api/v2beta1/visualization.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1cgoogle/api/annotations.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\x97\x01\n" + + "\x1aCreateVisualizationRequest\x12[\n" + + "\rvisualization\x18\x01 \x01(\v25.kubeflow.pipelines.backend.api.v2beta1.VisualizationR\rvisualization\x12\x1c\n" + + "\tnamespace\x18\x02 \x01(\tR\tnamespace\"\x81\x02\n" + + "\rVisualization\x12N\n" + + "\x04type\x18\x01 \x01(\x0e2:.kubeflow.pipelines.backend.api.v2beta1.Visualization.TypeR\x04type\x12\x16\n" + + "\x06source\x18\x02 \x01(\tR\x06source\x12\x1c\n" + + "\targuments\x18\x03 \x01(\tR\targuments\x12\x12\n" + + "\x04html\x18\x04 \x01(\tR\x04html\x12\x14\n" + + "\x05error\x18\x05 \x01(\tR\x05error\"@\n" + + "\x04Type\x12\r\n" + + "\tROC_CURVE\x10\x00\x12\b\n" + + "\x04TFDV\x10\x01\x12\b\n" + + "\x04TFMA\x10\x02\x12\t\n" + + "\x05TABLE\x10\x03\x12\n" + + "\n" + + "\x06CUSTOM\x10\x042\xec\x01\n" + + "\x14VisualizationService\x12\xd3\x01\n" + + "\x15CreateVisualizationV1\x12B.kubeflow.pipelines.backend.api.v2beta1.CreateVisualizationRequest\x1a5.kubeflow.pipelines.backend.api.v2beta1.Visualization\"?\x82\xd3\xe4\x93\x029:\rvisualization\"(/apis/v2beta1/visualizations/{namespace}B\x98\x01\x92AX*\x02\x01\x02R#\n" + + "\adefault\x12\x18\x12\x16\n" + + "\x14\x1a\x12.google.rpc.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rauthorization \x02b\f\n" + + "\n" + + "\n" + + "\x06Bearer\x12\x00Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" var ( file_backend_api_v2beta1_visualization_proto_rawDescOnce sync.Once - file_backend_api_v2beta1_visualization_proto_rawDescData = file_backend_api_v2beta1_visualization_proto_rawDesc + file_backend_api_v2beta1_visualization_proto_rawDescData []byte ) func file_backend_api_v2beta1_visualization_proto_rawDescGZIP() []byte { file_backend_api_v2beta1_visualization_proto_rawDescOnce.Do(func() { - file_backend_api_v2beta1_visualization_proto_rawDescData = protoimpl.X.CompressGZIP(file_backend_api_v2beta1_visualization_proto_rawDescData) + file_backend_api_v2beta1_visualization_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_visualization_proto_rawDesc), len(file_backend_api_v2beta1_visualization_proto_rawDesc))) }) return file_backend_api_v2beta1_visualization_proto_rawDescData } var file_backend_api_v2beta1_visualization_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_backend_api_v2beta1_visualization_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_backend_api_v2beta1_visualization_proto_goTypes = []interface{}{ +var file_backend_api_v2beta1_visualization_proto_goTypes = []any{ (Visualization_Type)(0), // 0: kubeflow.pipelines.backend.api.v2beta1.Visualization.Type (*CreateVisualizationRequest)(nil), // 1: kubeflow.pipelines.backend.api.v2beta1.CreateVisualizationRequest (*Visualization)(nil), // 2: kubeflow.pipelines.backend.api.v2beta1.Visualization @@ -351,37 +304,11 @@ func file_backend_api_v2beta1_visualization_proto_init() { if File_backend_api_v2beta1_visualization_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_backend_api_v2beta1_visualization_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateVisualizationRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_backend_api_v2beta1_visualization_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Visualization); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_backend_api_v2beta1_visualization_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_visualization_proto_rawDesc), len(file_backend_api_v2beta1_visualization_proto_rawDesc)), NumEnums: 1, NumMessages: 2, NumExtensions: 0, @@ -393,87 +320,6 @@ func file_backend_api_v2beta1_visualization_proto_init() { MessageInfos: file_backend_api_v2beta1_visualization_proto_msgTypes, }.Build() File_backend_api_v2beta1_visualization_proto = out.File - file_backend_api_v2beta1_visualization_proto_rawDesc = nil file_backend_api_v2beta1_visualization_proto_goTypes = nil file_backend_api_v2beta1_visualization_proto_depIdxs = nil } - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// VisualizationServiceClient is the client API for VisualizationService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type VisualizationServiceClient interface { - CreateVisualizationV1(ctx context.Context, in *CreateVisualizationRequest, opts ...grpc.CallOption) (*Visualization, error) -} - -type visualizationServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewVisualizationServiceClient(cc grpc.ClientConnInterface) VisualizationServiceClient { - return &visualizationServiceClient{cc} -} - -func (c *visualizationServiceClient) CreateVisualizationV1(ctx context.Context, in *CreateVisualizationRequest, opts ...grpc.CallOption) (*Visualization, error) { - out := new(Visualization) - err := c.cc.Invoke(ctx, "/kubeflow.pipelines.backend.api.v2beta1.VisualizationService/CreateVisualizationV1", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// VisualizationServiceServer is the server API for VisualizationService service. -type VisualizationServiceServer interface { - CreateVisualizationV1(context.Context, *CreateVisualizationRequest) (*Visualization, error) -} - -// UnimplementedVisualizationServiceServer can be embedded to have forward compatible implementations. -type UnimplementedVisualizationServiceServer struct { -} - -func (*UnimplementedVisualizationServiceServer) CreateVisualizationV1(context.Context, *CreateVisualizationRequest) (*Visualization, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateVisualizationV1 not implemented") -} - -func RegisterVisualizationServiceServer(s *grpc.Server, srv VisualizationServiceServer) { - s.RegisterService(&_VisualizationService_serviceDesc, srv) -} - -func _VisualizationService_CreateVisualizationV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateVisualizationRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(VisualizationServiceServer).CreateVisualizationV1(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/kubeflow.pipelines.backend.api.v2beta1.VisualizationService/CreateVisualizationV1", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(VisualizationServiceServer).CreateVisualizationV1(ctx, req.(*CreateVisualizationRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _VisualizationService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "kubeflow.pipelines.backend.api.v2beta1.VisualizationService", - HandlerType: (*VisualizationServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateVisualizationV1", - Handler: _VisualizationService_CreateVisualizationV1_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "backend/api/v2beta1/visualization.proto", -} diff --git a/backend/api/v2beta1/go_client/visualization.pb.gw.go b/backend/api/v2beta1/go_client/visualization.pb.gw.go index 178660b4af2..aa8ddc90e2b 100644 --- a/backend/api/v2beta1/go_client/visualization.pb.gw.go +++ b/backend/api/v2beta1/go_client/visualization.pb.gw.go @@ -10,126 +10,101 @@ package go_client import ( "context" + "errors" "io" "net/http" - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" ) // Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) func request_VisualizationService_CreateVisualizationV1_0(ctx context.Context, marshaler runtime.Marshaler, client VisualizationServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateVisualizationRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Visualization); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - var ( - val string - ok bool - err error - _ = err + protoReq CreateVisualizationRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["namespace"] + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Visualization); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["namespace"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") } - protoReq.Namespace, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) } - msg, err := client.CreateVisualizationV1(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err - } func local_request_VisualizationService_CreateVisualizationV1_0(ctx context.Context, marshaler runtime.Marshaler, server VisualizationServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq CreateVisualizationRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Visualization); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - var ( - val string - ok bool - err error - _ = err + protoReq CreateVisualizationRequest + metadata runtime.ServerMetadata + err error ) - - val, ok = pathParams["namespace"] + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Visualization); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + val, ok := pathParams["namespace"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") } - protoReq.Namespace, err = runtime.String(val) - if err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) } - msg, err := server.CreateVisualizationV1(ctx, &protoReq) return msg, metadata, err - } // RegisterVisualizationServiceHandlerServer registers the http handlers for service VisualizationService to "mux". // UnaryRPC :call VisualizationServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. // Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterVisualizationServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. func RegisterVisualizationServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server VisualizationServiceServer) error { - - mux.Handle("POST", pattern_VisualizationService_CreateVisualizationV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_VisualizationService_CreateVisualizationV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() var stream runtime.ServerTransportStream ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.VisualizationService/CreateVisualizationV1", runtime.WithHTTPPathPattern("/apis/v2beta1/visualizations/{namespace}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := local_request_VisualizationService_CreateVisualizationV1_0(rctx, inboundMarshaler, server, req, pathParams) + resp, md, err := local_request_VisualizationService_CreateVisualizationV1_0(annotatedContext, inboundMarshaler, server, req, pathParams) md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_VisualizationService_CreateVisualizationV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_VisualizationService_CreateVisualizationV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) return nil @@ -138,25 +113,24 @@ func RegisterVisualizationServiceHandlerServer(ctx context.Context, mux *runtime // RegisterVisualizationServiceHandlerFromEndpoint is same as RegisterVisualizationServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterVisualizationServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) + conn, err := grpc.NewClient(endpoint, opts...) if err != nil { return err } defer func() { if err != nil { if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } return } go func() { <-ctx.Done() if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) } }() }() - return RegisterVisualizationServiceHandler(ctx, mux, conn) } @@ -170,34 +144,30 @@ func RegisterVisualizationServiceHandler(ctx context.Context, mux *runtime.Serve // to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "VisualizationServiceClient". // Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "VisualizationServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "VisualizationServiceClient" to call the correct interceptors. +// "VisualizationServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. func RegisterVisualizationServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client VisualizationServiceClient) error { - - mux.Handle("POST", pattern_VisualizationService_CreateVisualizationV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle(http.MethodPost, pattern_VisualizationService_CreateVisualizationV1_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.VisualizationService/CreateVisualizationV1", runtime.WithHTTPPathPattern("/apis/v2beta1/visualizations/{namespace}")) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_VisualizationService_CreateVisualizationV1_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) + resp, md, err := request_VisualizationService_CreateVisualizationV1_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) return } - - forward_VisualizationService_CreateVisualizationV1_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - + forward_VisualizationService_CreateVisualizationV1_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - return nil } var ( - pattern_VisualizationService_CreateVisualizationV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "visualizations", "namespace"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_VisualizationService_CreateVisualizationV1_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "visualizations", "namespace"}, "")) ) var ( diff --git a/backend/api/v2beta1/go_client/visualization_grpc.pb.go b/backend/api/v2beta1/go_client/visualization_grpc.pb.go new file mode 100644 index 00000000000..d79a7a0a30e --- /dev/null +++ b/backend/api/v2beta1/go_client/visualization_grpc.pb.go @@ -0,0 +1,135 @@ +// Copyright 2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/visualization.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + VisualizationService_CreateVisualizationV1_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.VisualizationService/CreateVisualizationV1" +) + +// VisualizationServiceClient is the client API for VisualizationService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type VisualizationServiceClient interface { + CreateVisualizationV1(ctx context.Context, in *CreateVisualizationRequest, opts ...grpc.CallOption) (*Visualization, error) +} + +type visualizationServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewVisualizationServiceClient(cc grpc.ClientConnInterface) VisualizationServiceClient { + return &visualizationServiceClient{cc} +} + +func (c *visualizationServiceClient) CreateVisualizationV1(ctx context.Context, in *CreateVisualizationRequest, opts ...grpc.CallOption) (*Visualization, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Visualization) + err := c.cc.Invoke(ctx, VisualizationService_CreateVisualizationV1_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VisualizationServiceServer is the server API for VisualizationService service. +// All implementations must embed UnimplementedVisualizationServiceServer +// for forward compatibility. +type VisualizationServiceServer interface { + CreateVisualizationV1(context.Context, *CreateVisualizationRequest) (*Visualization, error) + mustEmbedUnimplementedVisualizationServiceServer() +} + +// UnimplementedVisualizationServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedVisualizationServiceServer struct{} + +func (UnimplementedVisualizationServiceServer) CreateVisualizationV1(context.Context, *CreateVisualizationRequest) (*Visualization, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateVisualizationV1 not implemented") +} +func (UnimplementedVisualizationServiceServer) mustEmbedUnimplementedVisualizationServiceServer() {} +func (UnimplementedVisualizationServiceServer) testEmbeddedByValue() {} + +// UnsafeVisualizationServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to VisualizationServiceServer will +// result in compilation errors. +type UnsafeVisualizationServiceServer interface { + mustEmbedUnimplementedVisualizationServiceServer() +} + +func RegisterVisualizationServiceServer(s grpc.ServiceRegistrar, srv VisualizationServiceServer) { + // If the following call pancis, it indicates UnimplementedVisualizationServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&VisualizationService_ServiceDesc, srv) +} + +func _VisualizationService_CreateVisualizationV1_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateVisualizationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VisualizationServiceServer).CreateVisualizationV1(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: VisualizationService_CreateVisualizationV1_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VisualizationServiceServer).CreateVisualizationV1(ctx, req.(*CreateVisualizationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// VisualizationService_ServiceDesc is the grpc.ServiceDesc for VisualizationService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var VisualizationService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.VisualizationService", + HandlerType: (*VisualizationServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateVisualizationV1", + Handler: _VisualizationService_CreateVisualizationV1_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/visualization.proto", +} diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_client.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_client.go index 86641fdf126..5039c046829 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_client.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_client.go @@ -8,8 +8,7 @@ package experiment_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Experiment cli := new(Experiment) cli.Transport = transport - cli.ExperimentService = experiment_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Experiment is a client for experiment type Experiment struct { - ExperimentService *experiment_service.Client + ExperimentService experiment_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Experiment struct { // SetTransport changes the transport on the client and all its subresources func (c *Experiment) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.ExperimentService.SetTransport(transport) - } diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_parameters.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_parameters.go index 07435754939..2450bfcd789 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_parameters.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewExperimentServiceArchiveExperimentParams creates a new ExperimentServiceArchiveExperimentParams object -// with the default values initialized. +// NewExperimentServiceArchiveExperimentParams creates a new ExperimentServiceArchiveExperimentParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceArchiveExperimentParams() *ExperimentServiceArchiveExperimentParams { - var () return &ExperimentServiceArchiveExperimentParams{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceArchiveExperimentParamsWithTimeout creates a new ExperimentServiceArchiveExperimentParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceArchiveExperimentParamsWithTimeout(timeout time.Duration) *ExperimentServiceArchiveExperimentParams { - var () return &ExperimentServiceArchiveExperimentParams{ - timeout: timeout, } } // NewExperimentServiceArchiveExperimentParamsWithContext creates a new ExperimentServiceArchiveExperimentParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceArchiveExperimentParamsWithContext(ctx context.Context) *ExperimentServiceArchiveExperimentParams { - var () return &ExperimentServiceArchiveExperimentParams{ - Context: ctx, } } // NewExperimentServiceArchiveExperimentParamsWithHTTPClient creates a new ExperimentServiceArchiveExperimentParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceArchiveExperimentParamsWithHTTPClient(client *http.Client) *ExperimentServiceArchiveExperimentParams { - var () return &ExperimentServiceArchiveExperimentParams{ HTTPClient: client, } } -/*ExperimentServiceArchiveExperimentParams contains all the parameters to send to the API endpoint -for the experiment service archive experiment operation typically these are written to a http.Request +/* +ExperimentServiceArchiveExperimentParams contains all the parameters to send to the API endpoint + + for the experiment service archive experiment operation. + + Typically these are written to a http.Request. */ type ExperimentServiceArchiveExperimentParams struct { - /*ExperimentID - The ID of the experiment to be archived. + /* ExperimentID. + The ID of the experiment to be archived. */ ExperimentID string @@ -72,6 +72,21 @@ type ExperimentServiceArchiveExperimentParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service archive experiment params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceArchiveExperimentParams) WithDefaults() *ExperimentServiceArchiveExperimentParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service archive experiment params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceArchiveExperimentParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service archive experiment params func (o *ExperimentServiceArchiveExperimentParams) WithTimeout(timeout time.Duration) *ExperimentServiceArchiveExperimentParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_responses.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_responses.go index d0e4155fd99..263528aae90 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_responses.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_archive_experiment_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" ) // ExperimentServiceArchiveExperimentReader is a Reader for the ExperimentServiceArchiveExperiment structure. @@ -24,14 +24,12 @@ type ExperimentServiceArchiveExperimentReader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceArchiveExperimentReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceArchiveExperimentOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceArchiveExperimentDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceArchiveExperimentOK() *ExperimentServiceArchiveExperime return &ExperimentServiceArchiveExperimentOK{} } -/*ExperimentServiceArchiveExperimentOK handles this case with default header values. +/* +ExperimentServiceArchiveExperimentOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceArchiveExperimentOK struct { Payload interface{} } +// IsSuccess returns true when this experiment service archive experiment o k response has a 2xx status code +func (o *ExperimentServiceArchiveExperimentOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service archive experiment o k response has a 3xx status code +func (o *ExperimentServiceArchiveExperimentOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service archive experiment o k response has a 4xx status code +func (o *ExperimentServiceArchiveExperimentOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service archive experiment o k response has a 5xx status code +func (o *ExperimentServiceArchiveExperimentOK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service archive experiment o k response a status code equal to that given +func (o *ExperimentServiceArchiveExperimentOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service archive experiment o k response +func (o *ExperimentServiceArchiveExperimentOK) Code() int { + return 200 +} + func (o *ExperimentServiceArchiveExperimentOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:archive][%d] experimentServiceArchiveExperimentOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:archive][%d] experimentServiceArchiveExperimentOK %s", 200, payload) +} + +func (o *ExperimentServiceArchiveExperimentOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:archive][%d] experimentServiceArchiveExperimentOK %s", 200, payload) +} + +func (o *ExperimentServiceArchiveExperimentOK) GetPayload() interface{} { + return o.Payload } func (o *ExperimentServiceArchiveExperimentOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewExperimentServiceArchiveExperimentDefault(code int) *ExperimentServiceAr } } -/*ExperimentServiceArchiveExperimentDefault handles this case with default header values. +/* +ExperimentServiceArchiveExperimentDefault describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceArchiveExperimentDefault struct { _statusCode int - Payload *experiment_model.RuntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service archive experiment default response has a 2xx status code +func (o *ExperimentServiceArchiveExperimentDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service archive experiment default response has a 3xx status code +func (o *ExperimentServiceArchiveExperimentDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service archive experiment default response has a 4xx status code +func (o *ExperimentServiceArchiveExperimentDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service archive experiment default response has a 5xx status code +func (o *ExperimentServiceArchiveExperimentDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service archive experiment default response a status code equal to that given +func (o *ExperimentServiceArchiveExperimentDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service archive experiment default response @@ -94,12 +159,22 @@ func (o *ExperimentServiceArchiveExperimentDefault) Code() int { } func (o *ExperimentServiceArchiveExperimentDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:archive][%d] ExperimentService_ArchiveExperiment default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:archive][%d] ExperimentService_ArchiveExperiment default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceArchiveExperimentDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:archive][%d] ExperimentService_ArchiveExperiment default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceArchiveExperimentDefault) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceArchiveExperimentDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.RuntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_client.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_client.go index a8e10b88934..a77a67330b4 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_client.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_client.go @@ -7,15 +7,40 @@ package experiment_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new experiment service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new experiment service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new experiment service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for experiment service API */ @@ -24,16 +49,35 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + ExperimentServiceArchiveExperiment(params *ExperimentServiceArchiveExperimentParams, opts ...ClientOption) (*ExperimentServiceArchiveExperimentOK, error) + + ExperimentServiceCreateExperiment(params *ExperimentServiceCreateExperimentParams, opts ...ClientOption) (*ExperimentServiceCreateExperimentOK, error) + + ExperimentServiceDeleteExperiment(params *ExperimentServiceDeleteExperimentParams, opts ...ClientOption) (*ExperimentServiceDeleteExperimentOK, error) + + ExperimentServiceGetExperiment(params *ExperimentServiceGetExperimentParams, opts ...ClientOption) (*ExperimentServiceGetExperimentOK, error) + + ExperimentServiceListExperiments(params *ExperimentServiceListExperimentsParams, opts ...ClientOption) (*ExperimentServiceListExperimentsOK, error) + + ExperimentServiceUnarchiveExperiment(params *ExperimentServiceUnarchiveExperimentParams, opts ...ClientOption) (*ExperimentServiceUnarchiveExperimentOK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* ExperimentServiceArchiveExperiment archives an experiment and the experiment s runs and recurring runs */ -func (a *Client) ExperimentServiceArchiveExperiment(params *ExperimentServiceArchiveExperimentParams) (*ExperimentServiceArchiveExperimentOK, error) { +func (a *Client) ExperimentServiceArchiveExperiment(params *ExperimentServiceArchiveExperimentParams, opts ...ClientOption) (*ExperimentServiceArchiveExperimentOK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceArchiveExperimentParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_ArchiveExperiment", Method: "POST", PathPattern: "/apis/v2beta1/experiments/{experiment_id}:archive", @@ -44,24 +88,33 @@ func (a *Client) ExperimentServiceArchiveExperiment(params *ExperimentServiceArc Reader: &ExperimentServiceArchiveExperimentReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceArchiveExperimentOK), nil - + success, ok := result.(*ExperimentServiceArchiveExperimentOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceArchiveExperimentDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* ExperimentServiceCreateExperiment creates a new experiment */ -func (a *Client) ExperimentServiceCreateExperiment(params *ExperimentServiceCreateExperimentParams) (*ExperimentServiceCreateExperimentOK, error) { +func (a *Client) ExperimentServiceCreateExperiment(params *ExperimentServiceCreateExperimentParams, opts ...ClientOption) (*ExperimentServiceCreateExperimentOK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceCreateExperimentParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_CreateExperiment", Method: "POST", PathPattern: "/apis/v2beta1/experiments", @@ -72,24 +125,33 @@ func (a *Client) ExperimentServiceCreateExperiment(params *ExperimentServiceCrea Reader: &ExperimentServiceCreateExperimentReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceCreateExperimentOK), nil - + success, ok := result.(*ExperimentServiceCreateExperimentOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceCreateExperimentDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* ExperimentServiceDeleteExperiment deletes an experiment without deleting the experiment s runs and recurring runs to avoid unexpected behaviors delete an experiment s runs and recurring runs before deleting the experiment */ -func (a *Client) ExperimentServiceDeleteExperiment(params *ExperimentServiceDeleteExperimentParams) (*ExperimentServiceDeleteExperimentOK, error) { +func (a *Client) ExperimentServiceDeleteExperiment(params *ExperimentServiceDeleteExperimentParams, opts ...ClientOption) (*ExperimentServiceDeleteExperimentOK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceDeleteExperimentParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_DeleteExperiment", Method: "DELETE", PathPattern: "/apis/v2beta1/experiments/{experiment_id}", @@ -100,24 +162,33 @@ func (a *Client) ExperimentServiceDeleteExperiment(params *ExperimentServiceDele Reader: &ExperimentServiceDeleteExperimentReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceDeleteExperimentOK), nil - + success, ok := result.(*ExperimentServiceDeleteExperimentOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceDeleteExperimentDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* ExperimentServiceGetExperiment finds a specific experiment by ID */ -func (a *Client) ExperimentServiceGetExperiment(params *ExperimentServiceGetExperimentParams) (*ExperimentServiceGetExperimentOK, error) { +func (a *Client) ExperimentServiceGetExperiment(params *ExperimentServiceGetExperimentParams, opts ...ClientOption) (*ExperimentServiceGetExperimentOK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceGetExperimentParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_GetExperiment", Method: "GET", PathPattern: "/apis/v2beta1/experiments/{experiment_id}", @@ -128,24 +199,33 @@ func (a *Client) ExperimentServiceGetExperiment(params *ExperimentServiceGetExpe Reader: &ExperimentServiceGetExperimentReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceGetExperimentOK), nil - + success, ok := result.(*ExperimentServiceGetExperimentOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceGetExperimentDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* ExperimentServiceListExperiments finds all experiments supports pagination and sorting on certain fields */ -func (a *Client) ExperimentServiceListExperiments(params *ExperimentServiceListExperimentsParams) (*ExperimentServiceListExperimentsOK, error) { +func (a *Client) ExperimentServiceListExperiments(params *ExperimentServiceListExperimentsParams, opts ...ClientOption) (*ExperimentServiceListExperimentsOK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceListExperimentsParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_ListExperiments", Method: "GET", PathPattern: "/apis/v2beta1/experiments", @@ -156,24 +236,33 @@ func (a *Client) ExperimentServiceListExperiments(params *ExperimentServiceListE Reader: &ExperimentServiceListExperimentsReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceListExperimentsOK), nil - + success, ok := result.(*ExperimentServiceListExperimentsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceListExperimentsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* ExperimentServiceUnarchiveExperiment restores an archived experiment the experiment s archived runs and recurring runs will stay archived */ -func (a *Client) ExperimentServiceUnarchiveExperiment(params *ExperimentServiceUnarchiveExperimentParams) (*ExperimentServiceUnarchiveExperimentOK, error) { +func (a *Client) ExperimentServiceUnarchiveExperiment(params *ExperimentServiceUnarchiveExperimentParams, opts ...ClientOption) (*ExperimentServiceUnarchiveExperimentOK, error) { // TODO: Validate the params before sending if params == nil { params = NewExperimentServiceUnarchiveExperimentParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "ExperimentService_UnarchiveExperiment", Method: "POST", PathPattern: "/apis/v2beta1/experiments/{experiment_id}:unarchive", @@ -184,12 +273,22 @@ func (a *Client) ExperimentServiceUnarchiveExperiment(params *ExperimentServiceU Reader: &ExperimentServiceUnarchiveExperimentReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*ExperimentServiceUnarchiveExperimentOK), nil - + success, ok := result.(*ExperimentServiceUnarchiveExperimentOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ExperimentServiceUnarchiveExperimentDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_parameters.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_parameters.go index 1b9bcbff38a..090caa7fab3 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_parameters.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_parameters.go @@ -13,67 +13,82 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" ) -// NewExperimentServiceCreateExperimentParams creates a new ExperimentServiceCreateExperimentParams object -// with the default values initialized. +// NewExperimentServiceCreateExperimentParams creates a new ExperimentServiceCreateExperimentParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceCreateExperimentParams() *ExperimentServiceCreateExperimentParams { - var () return &ExperimentServiceCreateExperimentParams{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceCreateExperimentParamsWithTimeout creates a new ExperimentServiceCreateExperimentParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceCreateExperimentParamsWithTimeout(timeout time.Duration) *ExperimentServiceCreateExperimentParams { - var () return &ExperimentServiceCreateExperimentParams{ - timeout: timeout, } } // NewExperimentServiceCreateExperimentParamsWithContext creates a new ExperimentServiceCreateExperimentParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceCreateExperimentParamsWithContext(ctx context.Context) *ExperimentServiceCreateExperimentParams { - var () return &ExperimentServiceCreateExperimentParams{ - Context: ctx, } } // NewExperimentServiceCreateExperimentParamsWithHTTPClient creates a new ExperimentServiceCreateExperimentParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceCreateExperimentParamsWithHTTPClient(client *http.Client) *ExperimentServiceCreateExperimentParams { - var () return &ExperimentServiceCreateExperimentParams{ HTTPClient: client, } } -/*ExperimentServiceCreateExperimentParams contains all the parameters to send to the API endpoint -for the experiment service create experiment operation typically these are written to a http.Request +/* +ExperimentServiceCreateExperimentParams contains all the parameters to send to the API endpoint + + for the experiment service create experiment operation. + + Typically these are written to a http.Request. */ type ExperimentServiceCreateExperimentParams struct { - /*Body - The experiment to be created. + /* Experiment. + The experiment to be created. */ - Body *experiment_model.V2beta1Experiment + Experiment *experiment_model.V2beta1Experiment timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service create experiment params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceCreateExperimentParams) WithDefaults() *ExperimentServiceCreateExperimentParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service create experiment params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceCreateExperimentParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service create experiment params func (o *ExperimentServiceCreateExperimentParams) WithTimeout(timeout time.Duration) *ExperimentServiceCreateExperimentParams { o.SetTimeout(timeout) @@ -107,15 +122,15 @@ func (o *ExperimentServiceCreateExperimentParams) SetHTTPClient(client *http.Cli o.HTTPClient = client } -// WithBody adds the body to the experiment service create experiment params -func (o *ExperimentServiceCreateExperimentParams) WithBody(body *experiment_model.V2beta1Experiment) *ExperimentServiceCreateExperimentParams { - o.SetBody(body) +// WithExperiment adds the experiment to the experiment service create experiment params +func (o *ExperimentServiceCreateExperimentParams) WithExperiment(experiment *experiment_model.V2beta1Experiment) *ExperimentServiceCreateExperimentParams { + o.SetExperiment(experiment) return o } -// SetBody adds the body to the experiment service create experiment params -func (o *ExperimentServiceCreateExperimentParams) SetBody(body *experiment_model.V2beta1Experiment) { - o.Body = body +// SetExperiment adds the experiment to the experiment service create experiment params +func (o *ExperimentServiceCreateExperimentParams) SetExperiment(experiment *experiment_model.V2beta1Experiment) { + o.Experiment = experiment } // WriteToRequest writes these params to a swagger request @@ -125,9 +140,8 @@ func (o *ExperimentServiceCreateExperimentParams) WriteToRequest(r runtime.Clien return err } var res []error - - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { + if o.Experiment != nil { + if err := r.SetBodyParam(o.Experiment); err != nil { return err } } diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_responses.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_responses.go index 1a990faff53..019fb7266d2 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_responses.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_create_experiment_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" ) // ExperimentServiceCreateExperimentReader is a Reader for the ExperimentServiceCreateExperiment structure. @@ -24,14 +24,12 @@ type ExperimentServiceCreateExperimentReader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceCreateExperimentReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceCreateExperimentOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceCreateExperimentDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceCreateExperimentOK() *ExperimentServiceCreateExperiment return &ExperimentServiceCreateExperimentOK{} } -/*ExperimentServiceCreateExperimentOK handles this case with default header values. +/* +ExperimentServiceCreateExperimentOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceCreateExperimentOK struct { Payload *experiment_model.V2beta1Experiment } +// IsSuccess returns true when this experiment service create experiment o k response has a 2xx status code +func (o *ExperimentServiceCreateExperimentOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service create experiment o k response has a 3xx status code +func (o *ExperimentServiceCreateExperimentOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service create experiment o k response has a 4xx status code +func (o *ExperimentServiceCreateExperimentOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service create experiment o k response has a 5xx status code +func (o *ExperimentServiceCreateExperimentOK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service create experiment o k response a status code equal to that given +func (o *ExperimentServiceCreateExperimentOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service create experiment o k response +func (o *ExperimentServiceCreateExperimentOK) Code() int { + return 200 +} + func (o *ExperimentServiceCreateExperimentOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/experiments][%d] experimentServiceCreateExperimentOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments][%d] experimentServiceCreateExperimentOK %s", 200, payload) +} + +func (o *ExperimentServiceCreateExperimentOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments][%d] experimentServiceCreateExperimentOK %s", 200, payload) +} + +func (o *ExperimentServiceCreateExperimentOK) GetPayload() *experiment_model.V2beta1Experiment { + return o.Payload } func (o *ExperimentServiceCreateExperimentOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewExperimentServiceCreateExperimentDefault(code int) *ExperimentServiceCre } } -/*ExperimentServiceCreateExperimentDefault handles this case with default header values. +/* +ExperimentServiceCreateExperimentDefault describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceCreateExperimentDefault struct { _statusCode int - Payload *experiment_model.RuntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service create experiment default response has a 2xx status code +func (o *ExperimentServiceCreateExperimentDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service create experiment default response has a 3xx status code +func (o *ExperimentServiceCreateExperimentDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service create experiment default response has a 4xx status code +func (o *ExperimentServiceCreateExperimentDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service create experiment default response has a 5xx status code +func (o *ExperimentServiceCreateExperimentDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service create experiment default response a status code equal to that given +func (o *ExperimentServiceCreateExperimentDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service create experiment default response @@ -96,12 +161,22 @@ func (o *ExperimentServiceCreateExperimentDefault) Code() int { } func (o *ExperimentServiceCreateExperimentDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/experiments][%d] ExperimentService_CreateExperiment default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments][%d] ExperimentService_CreateExperiment default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceCreateExperimentDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments][%d] ExperimentService_CreateExperiment default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceCreateExperimentDefault) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceCreateExperimentDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.RuntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_parameters.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_parameters.go index 1a59065c1cc..aa1344a5829 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_parameters.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewExperimentServiceDeleteExperimentParams creates a new ExperimentServiceDeleteExperimentParams object -// with the default values initialized. +// NewExperimentServiceDeleteExperimentParams creates a new ExperimentServiceDeleteExperimentParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceDeleteExperimentParams() *ExperimentServiceDeleteExperimentParams { - var () return &ExperimentServiceDeleteExperimentParams{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceDeleteExperimentParamsWithTimeout creates a new ExperimentServiceDeleteExperimentParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceDeleteExperimentParamsWithTimeout(timeout time.Duration) *ExperimentServiceDeleteExperimentParams { - var () return &ExperimentServiceDeleteExperimentParams{ - timeout: timeout, } } // NewExperimentServiceDeleteExperimentParamsWithContext creates a new ExperimentServiceDeleteExperimentParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceDeleteExperimentParamsWithContext(ctx context.Context) *ExperimentServiceDeleteExperimentParams { - var () return &ExperimentServiceDeleteExperimentParams{ - Context: ctx, } } // NewExperimentServiceDeleteExperimentParamsWithHTTPClient creates a new ExperimentServiceDeleteExperimentParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceDeleteExperimentParamsWithHTTPClient(client *http.Client) *ExperimentServiceDeleteExperimentParams { - var () return &ExperimentServiceDeleteExperimentParams{ HTTPClient: client, } } -/*ExperimentServiceDeleteExperimentParams contains all the parameters to send to the API endpoint -for the experiment service delete experiment operation typically these are written to a http.Request +/* +ExperimentServiceDeleteExperimentParams contains all the parameters to send to the API endpoint + + for the experiment service delete experiment operation. + + Typically these are written to a http.Request. */ type ExperimentServiceDeleteExperimentParams struct { - /*ExperimentID - The ID of the experiment to be deleted. + /* ExperimentID. + The ID of the experiment to be deleted. */ ExperimentID string @@ -72,6 +72,21 @@ type ExperimentServiceDeleteExperimentParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service delete experiment params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceDeleteExperimentParams) WithDefaults() *ExperimentServiceDeleteExperimentParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service delete experiment params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceDeleteExperimentParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service delete experiment params func (o *ExperimentServiceDeleteExperimentParams) WithTimeout(timeout time.Duration) *ExperimentServiceDeleteExperimentParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_responses.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_responses.go index 92c4eb15697..aa8d6f6bf6a 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_responses.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_delete_experiment_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" ) // ExperimentServiceDeleteExperimentReader is a Reader for the ExperimentServiceDeleteExperiment structure. @@ -24,14 +24,12 @@ type ExperimentServiceDeleteExperimentReader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceDeleteExperimentReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceDeleteExperimentOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceDeleteExperimentDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceDeleteExperimentOK() *ExperimentServiceDeleteExperiment return &ExperimentServiceDeleteExperimentOK{} } -/*ExperimentServiceDeleteExperimentOK handles this case with default header values. +/* +ExperimentServiceDeleteExperimentOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceDeleteExperimentOK struct { Payload interface{} } +// IsSuccess returns true when this experiment service delete experiment o k response has a 2xx status code +func (o *ExperimentServiceDeleteExperimentOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service delete experiment o k response has a 3xx status code +func (o *ExperimentServiceDeleteExperimentOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service delete experiment o k response has a 4xx status code +func (o *ExperimentServiceDeleteExperimentOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service delete experiment o k response has a 5xx status code +func (o *ExperimentServiceDeleteExperimentOK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service delete experiment o k response a status code equal to that given +func (o *ExperimentServiceDeleteExperimentOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service delete experiment o k response +func (o *ExperimentServiceDeleteExperimentOK) Code() int { + return 200 +} + func (o *ExperimentServiceDeleteExperimentOK) Error() string { - return fmt.Sprintf("[DELETE /apis/v2beta1/experiments/{experiment_id}][%d] experimentServiceDeleteExperimentOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/experiments/{experiment_id}][%d] experimentServiceDeleteExperimentOK %s", 200, payload) +} + +func (o *ExperimentServiceDeleteExperimentOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/experiments/{experiment_id}][%d] experimentServiceDeleteExperimentOK %s", 200, payload) +} + +func (o *ExperimentServiceDeleteExperimentOK) GetPayload() interface{} { + return o.Payload } func (o *ExperimentServiceDeleteExperimentOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewExperimentServiceDeleteExperimentDefault(code int) *ExperimentServiceDel } } -/*ExperimentServiceDeleteExperimentDefault handles this case with default header values. +/* +ExperimentServiceDeleteExperimentDefault describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceDeleteExperimentDefault struct { _statusCode int - Payload *experiment_model.RuntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service delete experiment default response has a 2xx status code +func (o *ExperimentServiceDeleteExperimentDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service delete experiment default response has a 3xx status code +func (o *ExperimentServiceDeleteExperimentDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service delete experiment default response has a 4xx status code +func (o *ExperimentServiceDeleteExperimentDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service delete experiment default response has a 5xx status code +func (o *ExperimentServiceDeleteExperimentDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service delete experiment default response a status code equal to that given +func (o *ExperimentServiceDeleteExperimentDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service delete experiment default response @@ -94,12 +159,22 @@ func (o *ExperimentServiceDeleteExperimentDefault) Code() int { } func (o *ExperimentServiceDeleteExperimentDefault) Error() string { - return fmt.Sprintf("[DELETE /apis/v2beta1/experiments/{experiment_id}][%d] ExperimentService_DeleteExperiment default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/experiments/{experiment_id}][%d] ExperimentService_DeleteExperiment default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceDeleteExperimentDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/experiments/{experiment_id}][%d] ExperimentService_DeleteExperiment default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceDeleteExperimentDefault) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceDeleteExperimentDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.RuntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_parameters.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_parameters.go index f6e57e728ed..755019b2f84 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_parameters.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewExperimentServiceGetExperimentParams creates a new ExperimentServiceGetExperimentParams object -// with the default values initialized. +// NewExperimentServiceGetExperimentParams creates a new ExperimentServiceGetExperimentParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceGetExperimentParams() *ExperimentServiceGetExperimentParams { - var () return &ExperimentServiceGetExperimentParams{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceGetExperimentParamsWithTimeout creates a new ExperimentServiceGetExperimentParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceGetExperimentParamsWithTimeout(timeout time.Duration) *ExperimentServiceGetExperimentParams { - var () return &ExperimentServiceGetExperimentParams{ - timeout: timeout, } } // NewExperimentServiceGetExperimentParamsWithContext creates a new ExperimentServiceGetExperimentParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceGetExperimentParamsWithContext(ctx context.Context) *ExperimentServiceGetExperimentParams { - var () return &ExperimentServiceGetExperimentParams{ - Context: ctx, } } // NewExperimentServiceGetExperimentParamsWithHTTPClient creates a new ExperimentServiceGetExperimentParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceGetExperimentParamsWithHTTPClient(client *http.Client) *ExperimentServiceGetExperimentParams { - var () return &ExperimentServiceGetExperimentParams{ HTTPClient: client, } } -/*ExperimentServiceGetExperimentParams contains all the parameters to send to the API endpoint -for the experiment service get experiment operation typically these are written to a http.Request +/* +ExperimentServiceGetExperimentParams contains all the parameters to send to the API endpoint + + for the experiment service get experiment operation. + + Typically these are written to a http.Request. */ type ExperimentServiceGetExperimentParams struct { - /*ExperimentID - The ID of the experiment to be retrieved. + /* ExperimentID. + The ID of the experiment to be retrieved. */ ExperimentID string @@ -72,6 +72,21 @@ type ExperimentServiceGetExperimentParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service get experiment params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceGetExperimentParams) WithDefaults() *ExperimentServiceGetExperimentParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service get experiment params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceGetExperimentParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service get experiment params func (o *ExperimentServiceGetExperimentParams) WithTimeout(timeout time.Duration) *ExperimentServiceGetExperimentParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_responses.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_responses.go index cd05dd71482..8203bbe4676 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_responses.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_get_experiment_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" ) // ExperimentServiceGetExperimentReader is a Reader for the ExperimentServiceGetExperiment structure. @@ -24,14 +24,12 @@ type ExperimentServiceGetExperimentReader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceGetExperimentReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceGetExperimentOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceGetExperimentDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceGetExperimentOK() *ExperimentServiceGetExperimentOK { return &ExperimentServiceGetExperimentOK{} } -/*ExperimentServiceGetExperimentOK handles this case with default header values. +/* +ExperimentServiceGetExperimentOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceGetExperimentOK struct { Payload *experiment_model.V2beta1Experiment } +// IsSuccess returns true when this experiment service get experiment o k response has a 2xx status code +func (o *ExperimentServiceGetExperimentOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service get experiment o k response has a 3xx status code +func (o *ExperimentServiceGetExperimentOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service get experiment o k response has a 4xx status code +func (o *ExperimentServiceGetExperimentOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service get experiment o k response has a 5xx status code +func (o *ExperimentServiceGetExperimentOK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service get experiment o k response a status code equal to that given +func (o *ExperimentServiceGetExperimentOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service get experiment o k response +func (o *ExperimentServiceGetExperimentOK) Code() int { + return 200 +} + func (o *ExperimentServiceGetExperimentOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/experiments/{experiment_id}][%d] experimentServiceGetExperimentOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/experiments/{experiment_id}][%d] experimentServiceGetExperimentOK %s", 200, payload) +} + +func (o *ExperimentServiceGetExperimentOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/experiments/{experiment_id}][%d] experimentServiceGetExperimentOK %s", 200, payload) +} + +func (o *ExperimentServiceGetExperimentOK) GetPayload() *experiment_model.V2beta1Experiment { + return o.Payload } func (o *ExperimentServiceGetExperimentOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewExperimentServiceGetExperimentDefault(code int) *ExperimentServiceGetExp } } -/*ExperimentServiceGetExperimentDefault handles this case with default header values. +/* +ExperimentServiceGetExperimentDefault describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceGetExperimentDefault struct { _statusCode int - Payload *experiment_model.RuntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service get experiment default response has a 2xx status code +func (o *ExperimentServiceGetExperimentDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service get experiment default response has a 3xx status code +func (o *ExperimentServiceGetExperimentDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service get experiment default response has a 4xx status code +func (o *ExperimentServiceGetExperimentDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service get experiment default response has a 5xx status code +func (o *ExperimentServiceGetExperimentDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service get experiment default response a status code equal to that given +func (o *ExperimentServiceGetExperimentDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service get experiment default response @@ -96,12 +161,22 @@ func (o *ExperimentServiceGetExperimentDefault) Code() int { } func (o *ExperimentServiceGetExperimentDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/experiments/{experiment_id}][%d] ExperimentService_GetExperiment default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/experiments/{experiment_id}][%d] ExperimentService_GetExperiment default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceGetExperimentDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/experiments/{experiment_id}][%d] ExperimentService_GetExperiment default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceGetExperimentDefault) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceGetExperimentDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.RuntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_parameters.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_parameters.go index 70f51c4baee..b8323e0b5b7 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_parameters.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_parameters.go @@ -13,84 +13,90 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" - - strfmt "github.com/go-openapi/strfmt" ) -// NewExperimentServiceListExperimentsParams creates a new ExperimentServiceListExperimentsParams object -// with the default values initialized. +// NewExperimentServiceListExperimentsParams creates a new ExperimentServiceListExperimentsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceListExperimentsParams() *ExperimentServiceListExperimentsParams { - var () return &ExperimentServiceListExperimentsParams{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceListExperimentsParamsWithTimeout creates a new ExperimentServiceListExperimentsParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceListExperimentsParamsWithTimeout(timeout time.Duration) *ExperimentServiceListExperimentsParams { - var () return &ExperimentServiceListExperimentsParams{ - timeout: timeout, } } // NewExperimentServiceListExperimentsParamsWithContext creates a new ExperimentServiceListExperimentsParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceListExperimentsParamsWithContext(ctx context.Context) *ExperimentServiceListExperimentsParams { - var () return &ExperimentServiceListExperimentsParams{ - Context: ctx, } } // NewExperimentServiceListExperimentsParamsWithHTTPClient creates a new ExperimentServiceListExperimentsParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceListExperimentsParamsWithHTTPClient(client *http.Client) *ExperimentServiceListExperimentsParams { - var () return &ExperimentServiceListExperimentsParams{ HTTPClient: client, } } -/*ExperimentServiceListExperimentsParams contains all the parameters to send to the API endpoint -for the experiment service list experiments operation typically these are written to a http.Request +/* +ExperimentServiceListExperimentsParams contains all the parameters to send to the API endpoint + + for the experiment service list experiments operation. + + Typically these are written to a http.Request. */ type ExperimentServiceListExperimentsParams struct { - /*Filter - A url-encoded, JSON-serialized Filter protocol buffer (see - [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v2beta1/api/filter.proto)). + /* Filter. + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/v2beta1/api/filter.proto)). */ Filter *string - /*Namespace - Which namespace to filter the experiments on. + /* Namespace. + + Which namespace to filter the experiments on. */ Namespace *string - /*PageSize - The number of experiments to be listed per page. If there are more + + /* PageSize. + + The number of experiments to be listed per page. If there are more experiments than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + Format: int32 */ PageSize *int32 - /*PageToken - A page token to request the next page of results. The token is acquried + + /* PageToken. + + A page token to request the next page of results. The token is acquried from the nextPageToken field of the response from the previous ListExperiments call or can be omitted when fetching the first page. - */ PageToken *string - /*SortBy - Can be format of "field_name", "field_name asc" or "field_name desc" - Ascending by default. + /* SortBy. + + Can be format of "field_name", "field_name asc" or "field_name desc" + Ascending by default. */ SortBy *string @@ -99,6 +105,21 @@ type ExperimentServiceListExperimentsParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service list experiments params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceListExperimentsParams) WithDefaults() *ExperimentServiceListExperimentsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service list experiments params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceListExperimentsParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service list experiments params func (o *ExperimentServiceListExperimentsParams) WithTimeout(timeout time.Duration) *ExperimentServiceListExperimentsParams { o.SetTimeout(timeout) @@ -199,80 +220,85 @@ func (o *ExperimentServiceListExperimentsParams) WriteToRequest(r runtime.Client // query param filter var qrFilter string + if o.Filter != nil { qrFilter = *o.Filter } qFilter := qrFilter if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { return err } } - } if o.Namespace != nil { // query param namespace var qrNamespace string + if o.Namespace != nil { qrNamespace = *o.Namespace } qNamespace := qrNamespace if qNamespace != "" { + if err := r.SetQueryParam("namespace", qNamespace); err != nil { return err } } - } if o.PageSize != nil { // query param page_size var qrPageSize int32 + if o.PageSize != nil { qrPageSize = *o.PageSize } qPageSize := swag.FormatInt32(qrPageSize) if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { return err } } - } if o.PageToken != nil { // query param page_token var qrPageToken string + if o.PageToken != nil { qrPageToken = *o.PageToken } qPageToken := qrPageToken if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { return err } } - } if o.SortBy != nil { // query param sort_by var qrSortBy string + if o.SortBy != nil { qrSortBy = *o.SortBy } qSortBy := qrSortBy if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_responses.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_responses.go index 4f19a7e91cb..4468a68cc22 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_responses.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_list_experiments_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" ) // ExperimentServiceListExperimentsReader is a Reader for the ExperimentServiceListExperiments structure. @@ -24,14 +24,12 @@ type ExperimentServiceListExperimentsReader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceListExperimentsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceListExperimentsOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceListExperimentsDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceListExperimentsOK() *ExperimentServiceListExperimentsOK return &ExperimentServiceListExperimentsOK{} } -/*ExperimentServiceListExperimentsOK handles this case with default header values. +/* +ExperimentServiceListExperimentsOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceListExperimentsOK struct { Payload *experiment_model.V2beta1ListExperimentsResponse } +// IsSuccess returns true when this experiment service list experiments o k response has a 2xx status code +func (o *ExperimentServiceListExperimentsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service list experiments o k response has a 3xx status code +func (o *ExperimentServiceListExperimentsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service list experiments o k response has a 4xx status code +func (o *ExperimentServiceListExperimentsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service list experiments o k response has a 5xx status code +func (o *ExperimentServiceListExperimentsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service list experiments o k response a status code equal to that given +func (o *ExperimentServiceListExperimentsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service list experiments o k response +func (o *ExperimentServiceListExperimentsOK) Code() int { + return 200 +} + func (o *ExperimentServiceListExperimentsOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/experiments][%d] experimentServiceListExperimentsOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/experiments][%d] experimentServiceListExperimentsOK %s", 200, payload) +} + +func (o *ExperimentServiceListExperimentsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/experiments][%d] experimentServiceListExperimentsOK %s", 200, payload) +} + +func (o *ExperimentServiceListExperimentsOK) GetPayload() *experiment_model.V2beta1ListExperimentsResponse { + return o.Payload } func (o *ExperimentServiceListExperimentsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewExperimentServiceListExperimentsDefault(code int) *ExperimentServiceList } } -/*ExperimentServiceListExperimentsDefault handles this case with default header values. +/* +ExperimentServiceListExperimentsDefault describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceListExperimentsDefault struct { _statusCode int - Payload *experiment_model.RuntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service list experiments default response has a 2xx status code +func (o *ExperimentServiceListExperimentsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service list experiments default response has a 3xx status code +func (o *ExperimentServiceListExperimentsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service list experiments default response has a 4xx status code +func (o *ExperimentServiceListExperimentsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service list experiments default response has a 5xx status code +func (o *ExperimentServiceListExperimentsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service list experiments default response a status code equal to that given +func (o *ExperimentServiceListExperimentsDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service list experiments default response @@ -96,12 +161,22 @@ func (o *ExperimentServiceListExperimentsDefault) Code() int { } func (o *ExperimentServiceListExperimentsDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/experiments][%d] ExperimentService_ListExperiments default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/experiments][%d] ExperimentService_ListExperiments default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceListExperimentsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/experiments][%d] ExperimentService_ListExperiments default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceListExperimentsDefault) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceListExperimentsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.RuntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_parameters.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_parameters.go index a1b730480f3..61e2507f3fd 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_parameters.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewExperimentServiceUnarchiveExperimentParams creates a new ExperimentServiceUnarchiveExperimentParams object -// with the default values initialized. +// NewExperimentServiceUnarchiveExperimentParams creates a new ExperimentServiceUnarchiveExperimentParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewExperimentServiceUnarchiveExperimentParams() *ExperimentServiceUnarchiveExperimentParams { - var () return &ExperimentServiceUnarchiveExperimentParams{ - timeout: cr.DefaultTimeout, } } // NewExperimentServiceUnarchiveExperimentParamsWithTimeout creates a new ExperimentServiceUnarchiveExperimentParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewExperimentServiceUnarchiveExperimentParamsWithTimeout(timeout time.Duration) *ExperimentServiceUnarchiveExperimentParams { - var () return &ExperimentServiceUnarchiveExperimentParams{ - timeout: timeout, } } // NewExperimentServiceUnarchiveExperimentParamsWithContext creates a new ExperimentServiceUnarchiveExperimentParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewExperimentServiceUnarchiveExperimentParamsWithContext(ctx context.Context) *ExperimentServiceUnarchiveExperimentParams { - var () return &ExperimentServiceUnarchiveExperimentParams{ - Context: ctx, } } // NewExperimentServiceUnarchiveExperimentParamsWithHTTPClient creates a new ExperimentServiceUnarchiveExperimentParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewExperimentServiceUnarchiveExperimentParamsWithHTTPClient(client *http.Client) *ExperimentServiceUnarchiveExperimentParams { - var () return &ExperimentServiceUnarchiveExperimentParams{ HTTPClient: client, } } -/*ExperimentServiceUnarchiveExperimentParams contains all the parameters to send to the API endpoint -for the experiment service unarchive experiment operation typically these are written to a http.Request +/* +ExperimentServiceUnarchiveExperimentParams contains all the parameters to send to the API endpoint + + for the experiment service unarchive experiment operation. + + Typically these are written to a http.Request. */ type ExperimentServiceUnarchiveExperimentParams struct { - /*ExperimentID - The ID of the experiment to be restored. + /* ExperimentID. + The ID of the experiment to be restored. */ ExperimentID string @@ -72,6 +72,21 @@ type ExperimentServiceUnarchiveExperimentParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the experiment service unarchive experiment params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceUnarchiveExperimentParams) WithDefaults() *ExperimentServiceUnarchiveExperimentParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the experiment service unarchive experiment params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ExperimentServiceUnarchiveExperimentParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the experiment service unarchive experiment params func (o *ExperimentServiceUnarchiveExperimentParams) WithTimeout(timeout time.Duration) *ExperimentServiceUnarchiveExperimentParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_responses.go b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_responses.go index c91860e1292..316b9520584 100644 --- a/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_responses.go +++ b/backend/api/v2beta1/go_http_client/experiment_client/experiment_service/experiment_service_unarchive_experiment_responses.go @@ -6,14 +6,14 @@ package experiment_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - experiment_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" ) // ExperimentServiceUnarchiveExperimentReader is a Reader for the ExperimentServiceUnarchiveExperiment structure. @@ -24,14 +24,12 @@ type ExperimentServiceUnarchiveExperimentReader struct { // ReadResponse reads a server response into the received o. func (o *ExperimentServiceUnarchiveExperimentReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewExperimentServiceUnarchiveExperimentOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewExperimentServiceUnarchiveExperimentDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewExperimentServiceUnarchiveExperimentOK() *ExperimentServiceUnarchiveExpe return &ExperimentServiceUnarchiveExperimentOK{} } -/*ExperimentServiceUnarchiveExperimentOK handles this case with default header values. +/* +ExperimentServiceUnarchiveExperimentOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type ExperimentServiceUnarchiveExperimentOK struct { Payload interface{} } +// IsSuccess returns true when this experiment service unarchive experiment o k response has a 2xx status code +func (o *ExperimentServiceUnarchiveExperimentOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this experiment service unarchive experiment o k response has a 3xx status code +func (o *ExperimentServiceUnarchiveExperimentOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this experiment service unarchive experiment o k response has a 4xx status code +func (o *ExperimentServiceUnarchiveExperimentOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this experiment service unarchive experiment o k response has a 5xx status code +func (o *ExperimentServiceUnarchiveExperimentOK) IsServerError() bool { + return false +} + +// IsCode returns true when this experiment service unarchive experiment o k response a status code equal to that given +func (o *ExperimentServiceUnarchiveExperimentOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the experiment service unarchive experiment o k response +func (o *ExperimentServiceUnarchiveExperimentOK) Code() int { + return 200 +} + func (o *ExperimentServiceUnarchiveExperimentOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:unarchive][%d] experimentServiceUnarchiveExperimentOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:unarchive][%d] experimentServiceUnarchiveExperimentOK %s", 200, payload) +} + +func (o *ExperimentServiceUnarchiveExperimentOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:unarchive][%d] experimentServiceUnarchiveExperimentOK %s", 200, payload) +} + +func (o *ExperimentServiceUnarchiveExperimentOK) GetPayload() interface{} { + return o.Payload } func (o *ExperimentServiceUnarchiveExperimentOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewExperimentServiceUnarchiveExperimentDefault(code int) *ExperimentService } } -/*ExperimentServiceUnarchiveExperimentDefault handles this case with default header values. +/* +ExperimentServiceUnarchiveExperimentDefault describes a response with status code -1, with default header values. An unexpected error response. */ type ExperimentServiceUnarchiveExperimentDefault struct { _statusCode int - Payload *experiment_model.RuntimeError + Payload *experiment_model.GooglerpcStatus +} + +// IsSuccess returns true when this experiment service unarchive experiment default response has a 2xx status code +func (o *ExperimentServiceUnarchiveExperimentDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this experiment service unarchive experiment default response has a 3xx status code +func (o *ExperimentServiceUnarchiveExperimentDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this experiment service unarchive experiment default response has a 4xx status code +func (o *ExperimentServiceUnarchiveExperimentDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this experiment service unarchive experiment default response has a 5xx status code +func (o *ExperimentServiceUnarchiveExperimentDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this experiment service unarchive experiment default response a status code equal to that given +func (o *ExperimentServiceUnarchiveExperimentDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the experiment service unarchive experiment default response @@ -94,12 +159,22 @@ func (o *ExperimentServiceUnarchiveExperimentDefault) Code() int { } func (o *ExperimentServiceUnarchiveExperimentDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:unarchive][%d] ExperimentService_UnarchiveExperiment default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:unarchive][%d] ExperimentService_UnarchiveExperiment default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceUnarchiveExperimentDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/experiments/{experiment_id}:unarchive][%d] ExperimentService_UnarchiveExperiment default %s", o._statusCode, payload) +} + +func (o *ExperimentServiceUnarchiveExperimentDefault) GetPayload() *experiment_model.GooglerpcStatus { + return o.Payload } func (o *ExperimentServiceUnarchiveExperimentDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(experiment_model.RuntimeError) + o.Payload = new(experiment_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/experiment_model/googlerpc_status.go b/backend/api/v2beta1/go_http_client/experiment_model/googlerpc_status.go new file mode 100644 index 00000000000..8bd72b44496 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/experiment_model/googlerpc_status.go @@ -0,0 +1,136 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// GooglerpcStatus The `Status` type defines a logical error model that is suitable for +// different programming environments, including REST APIs and RPC APIs. It is +// used by [gRPC](https://github.com/grpc). Each `Status` message contains +// three pieces of data: error code, error message, and error details. +// +// You can find out more about this error model and how to work with it in the +// [API Design Guide](https://cloud.google.com/apis/design/errors). +// +// swagger:model googlerpcStatus +type GooglerpcStatus struct { + + // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + Code int32 `json:"code,omitempty"` + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + Details []*ProtobufAny `json:"details"` + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. + Message string `json:"message,omitempty"` +} + +// Validate validates this googlerpc status +func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateDetails(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { + if swag.IsZero(m.Details) { // not required + return nil + } + + for i := 0; i < len(m.Details); i++ { + if swag.IsZero(m.Details[i]) { // not required + continue + } + + if m.Details[i] != nil { + if err := m.Details[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *GooglerpcStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *GooglerpcStatus) UnmarshalBinary(b []byte) error { + var res GooglerpcStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/experiment_model/protobuf_any.go b/backend/api/v2beta1/go_http_client/experiment_model/protobuf_any.go index 3c40be98525..ed8315b7a0f 100644 --- a/backend/api/v2beta1/go_http_client/experiment_model/protobuf_any.go +++ b/backend/api/v2beta1/go_http_client/experiment_model/protobuf_any.go @@ -6,9 +6,10 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v2beta1/go_http_client/experiment_model/runtime_error.go b/backend/api/v2beta1/go_http_client/experiment_model/runtime_error.go deleted file mode 100644 index 45761477b70..00000000000 --- a/backend/api/v2beta1/go_http_client/experiment_model/runtime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package experiment_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// RuntimeError runtime error -// swagger:model runtimeError -type RuntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this runtime error -func (m *RuntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *RuntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *RuntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *RuntimeError) UnmarshalBinary(b []byte) error { - var res RuntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_experiment.go b/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_experiment.go index 8b809c17fa1..e24f2579303 100644 --- a/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_experiment.go +++ b/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_experiment.go @@ -6,14 +6,16 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1Experiment v2beta1 experiment +// // swagger:model v2beta1Experiment type V2beta1Experiment struct { @@ -38,7 +40,7 @@ type V2beta1Experiment struct { Namespace string `json:"namespace,omitempty"` // Output. Specifies whether this experiment is in archived or available state. - StorageState V2beta1ExperimentStorageState `json:"storage_state,omitempty"` + StorageState *V2beta1ExperimentStorageState `json:"storage_state,omitempty"` } // Validate validates this v2beta1 experiment @@ -64,7 +66,6 @@ func (m *V2beta1Experiment) Validate(formats strfmt.Registry) error { } func (m *V2beta1Experiment) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -77,7 +78,6 @@ func (m *V2beta1Experiment) validateCreatedAt(formats strfmt.Registry) error { } func (m *V2beta1Experiment) validateLastRunCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.LastRunCreatedAt) { // not required return nil } @@ -90,16 +90,54 @@ func (m *V2beta1Experiment) validateLastRunCreatedAt(formats strfmt.Registry) er } func (m *V2beta1Experiment) validateStorageState(formats strfmt.Registry) error { - if swag.IsZero(m.StorageState) { // not required return nil } - if err := m.StorageState.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("storage_state") + if m.StorageState != nil { + if err := m.StorageState.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("storage_state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("storage_state") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 experiment based on the context it is used +func (m *V2beta1Experiment) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateStorageState(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1Experiment) contextValidateStorageState(ctx context.Context, formats strfmt.Registry) error { + + if m.StorageState != nil { + + if swag.IsZero(m.StorageState) { // not required + return nil + } + + if err := m.StorageState.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("storage_state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("storage_state") + } + return err } - return err } return nil diff --git a/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_experiment_storage_state.go b/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_experiment_storage_state.go index 0a30d1c2b73..2aa9d585a3e 100644 --- a/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_experiment_storage_state.go +++ b/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_experiment_storage_state.go @@ -6,22 +6,32 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // V2beta1ExperimentStorageState Describes whether an entity is available or archived. // -// - STORAGE_STATE_UNSPECIFIED: Default state. This state in not used -// - AVAILABLE: Entity is available. -// - ARCHIVED: Entity is archived. +// - STORAGE_STATE_UNSPECIFIED: Default state. This state in not used +// - AVAILABLE: Entity is available. +// - ARCHIVED: Entity is archived. +// // swagger:model v2beta1ExperimentStorageState type V2beta1ExperimentStorageState string +func NewV2beta1ExperimentStorageState(value V2beta1ExperimentStorageState) *V2beta1ExperimentStorageState { + return &value +} + +// Pointer returns a pointer to a freshly-allocated V2beta1ExperimentStorageState. +func (m V2beta1ExperimentStorageState) Pointer() *V2beta1ExperimentStorageState { + return &m +} + const ( // V2beta1ExperimentStorageStateSTORAGESTATEUNSPECIFIED captures enum value "STORAGE_STATE_UNSPECIFIED" @@ -48,7 +58,7 @@ func init() { } func (m V2beta1ExperimentStorageState) validateV2beta1ExperimentStorageStateEnum(path, location string, value V2beta1ExperimentStorageState) error { - if err := validate.Enum(path, location, value, v2beta1ExperimentStorageStateEnum); err != nil { + if err := validate.EnumCase(path, location, value, v2beta1ExperimentStorageStateEnum, true); err != nil { return err } return nil @@ -68,3 +78,8 @@ func (m V2beta1ExperimentStorageState) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this v2beta1 experiment storage state based on context it is used +func (m V2beta1ExperimentStorageState) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_list_experiments_response.go b/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_list_experiments_response.go index 2e4da629f22..d56ef147466 100644 --- a/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_list_experiments_response.go +++ b/backend/api/v2beta1/go_http_client/experiment_model/v2beta1_list_experiments_response.go @@ -6,15 +6,16 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1ListExperimentsResponse v2beta1 list experiments response +// // swagger:model v2beta1ListExperimentsResponse type V2beta1ListExperimentsResponse struct { @@ -43,7 +44,6 @@ func (m *V2beta1ListExperimentsResponse) Validate(formats strfmt.Registry) error } func (m *V2beta1ListExperimentsResponse) validateExperiments(formats strfmt.Registry) error { - if swag.IsZero(m.Experiments) { // not required return nil } @@ -57,6 +57,47 @@ func (m *V2beta1ListExperimentsResponse) validateExperiments(formats strfmt.Regi if err := m.Experiments[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("experiments" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("experiments" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 list experiments response based on the context it is used +func (m *V2beta1ListExperimentsResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateExperiments(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListExperimentsResponse) contextValidateExperiments(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Experiments); i++ { + + if m.Experiments[i] != nil { + + if swag.IsZero(m.Experiments[i]) { // not required + return nil + } + + if err := m.Experiments[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("experiments" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("experiments" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/healthz_client/healthz_client.go b/backend/api/v2beta1/go_http_client/healthz_client/healthz_client.go index 5034e46519f..ea5303a98fc 100644 --- a/backend/api/v2beta1/go_http_client/healthz_client/healthz_client.go +++ b/backend/api/v2beta1/go_http_client/healthz_client/healthz_client.go @@ -8,8 +8,7 @@ package healthz_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/healthz_client/healthz_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Healthz { cli := new(Healthz) cli.Transport = transport - cli.HealthzService = healthz_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Healthz is a client for healthz type Healthz struct { - HealthzService *healthz_service.Client + HealthzService healthz_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Healthz struct { // SetTransport changes the transport on the client and all its subresources func (c *Healthz) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.HealthzService.SetTransport(transport) - } diff --git a/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_client.go b/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_client.go index 8697b832806..67bdd551fb8 100644 --- a/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_client.go +++ b/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_client.go @@ -7,15 +7,40 @@ package healthz_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new healthz service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new healthz service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new healthz service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for healthz service API */ @@ -24,16 +49,25 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + HealthzServiceGetHealthz(params *HealthzServiceGetHealthzParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*HealthzServiceGetHealthzOK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* HealthzServiceGetHealthz gets healthz data */ -func (a *Client) HealthzServiceGetHealthz(params *HealthzServiceGetHealthzParams, authInfo runtime.ClientAuthInfoWriter) (*HealthzServiceGetHealthzOK, error) { +func (a *Client) HealthzServiceGetHealthz(params *HealthzServiceGetHealthzParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*HealthzServiceGetHealthzOK, error) { // TODO: Validate the params before sending if params == nil { params = NewHealthzServiceGetHealthzParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "HealthzService_GetHealthz", Method: "GET", PathPattern: "/apis/v2beta1/healthz", @@ -45,12 +79,22 @@ func (a *Client) HealthzServiceGetHealthz(params *HealthzServiceGetHealthzParams AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*HealthzServiceGetHealthzOK), nil - + success, ok := result.(*HealthzServiceGetHealthzOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*HealthzServiceGetHealthzDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_parameters.go b/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_parameters.go index cf0c78296ab..4cb71a4238b 100644 --- a/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_parameters.go +++ b/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_parameters.go @@ -13,51 +13,51 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewHealthzServiceGetHealthzParams creates a new HealthzServiceGetHealthzParams object -// with the default values initialized. +// NewHealthzServiceGetHealthzParams creates a new HealthzServiceGetHealthzParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewHealthzServiceGetHealthzParams() *HealthzServiceGetHealthzParams { - return &HealthzServiceGetHealthzParams{ - timeout: cr.DefaultTimeout, } } // NewHealthzServiceGetHealthzParamsWithTimeout creates a new HealthzServiceGetHealthzParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewHealthzServiceGetHealthzParamsWithTimeout(timeout time.Duration) *HealthzServiceGetHealthzParams { - return &HealthzServiceGetHealthzParams{ - timeout: timeout, } } // NewHealthzServiceGetHealthzParamsWithContext creates a new HealthzServiceGetHealthzParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewHealthzServiceGetHealthzParamsWithContext(ctx context.Context) *HealthzServiceGetHealthzParams { - return &HealthzServiceGetHealthzParams{ - Context: ctx, } } // NewHealthzServiceGetHealthzParamsWithHTTPClient creates a new HealthzServiceGetHealthzParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewHealthzServiceGetHealthzParamsWithHTTPClient(client *http.Client) *HealthzServiceGetHealthzParams { - return &HealthzServiceGetHealthzParams{ HTTPClient: client, } } -/*HealthzServiceGetHealthzParams contains all the parameters to send to the API endpoint -for the healthz service get healthz operation typically these are written to a http.Request +/* +HealthzServiceGetHealthzParams contains all the parameters to send to the API endpoint + + for the healthz service get healthz operation. + + Typically these are written to a http.Request. */ type HealthzServiceGetHealthzParams struct { timeout time.Duration @@ -65,6 +65,21 @@ type HealthzServiceGetHealthzParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the healthz service get healthz params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *HealthzServiceGetHealthzParams) WithDefaults() *HealthzServiceGetHealthzParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the healthz service get healthz params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *HealthzServiceGetHealthzParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the healthz service get healthz params func (o *HealthzServiceGetHealthzParams) WithTimeout(timeout time.Duration) *HealthzServiceGetHealthzParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_responses.go b/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_responses.go index a4ed8d9e86e..27d948c114c 100644 --- a/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_responses.go +++ b/backend/api/v2beta1/go_http_client/healthz_client/healthz_service/healthz_service_get_healthz_responses.go @@ -6,14 +6,14 @@ package healthz_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - healthz_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/healthz_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/healthz_model" ) // HealthzServiceGetHealthzReader is a Reader for the HealthzServiceGetHealthz structure. @@ -24,14 +24,12 @@ type HealthzServiceGetHealthzReader struct { // ReadResponse reads a server response into the received o. func (o *HealthzServiceGetHealthzReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewHealthzServiceGetHealthzOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewHealthzServiceGetHealthzDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewHealthzServiceGetHealthzOK() *HealthzServiceGetHealthzOK { return &HealthzServiceGetHealthzOK{} } -/*HealthzServiceGetHealthzOK handles this case with default header values. +/* +HealthzServiceGetHealthzOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type HealthzServiceGetHealthzOK struct { Payload *healthz_model.V2beta1GetHealthzResponse } +// IsSuccess returns true when this healthz service get healthz o k response has a 2xx status code +func (o *HealthzServiceGetHealthzOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this healthz service get healthz o k response has a 3xx status code +func (o *HealthzServiceGetHealthzOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this healthz service get healthz o k response has a 4xx status code +func (o *HealthzServiceGetHealthzOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this healthz service get healthz o k response has a 5xx status code +func (o *HealthzServiceGetHealthzOK) IsServerError() bool { + return false +} + +// IsCode returns true when this healthz service get healthz o k response a status code equal to that given +func (o *HealthzServiceGetHealthzOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the healthz service get healthz o k response +func (o *HealthzServiceGetHealthzOK) Code() int { + return 200 +} + func (o *HealthzServiceGetHealthzOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/healthz][%d] healthzServiceGetHealthzOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/healthz][%d] healthzServiceGetHealthzOK %s", 200, payload) +} + +func (o *HealthzServiceGetHealthzOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/healthz][%d] healthzServiceGetHealthzOK %s", 200, payload) +} + +func (o *HealthzServiceGetHealthzOK) GetPayload() *healthz_model.V2beta1GetHealthzResponse { + return o.Payload } func (o *HealthzServiceGetHealthzOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewHealthzServiceGetHealthzDefault(code int) *HealthzServiceGetHealthzDefau } } -/*HealthzServiceGetHealthzDefault handles this case with default header values. +/* +HealthzServiceGetHealthzDefault describes a response with status code -1, with default header values. An unexpected error response. */ type HealthzServiceGetHealthzDefault struct { _statusCode int - Payload *healthz_model.RuntimeError + Payload *healthz_model.GooglerpcStatus +} + +// IsSuccess returns true when this healthz service get healthz default response has a 2xx status code +func (o *HealthzServiceGetHealthzDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this healthz service get healthz default response has a 3xx status code +func (o *HealthzServiceGetHealthzDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this healthz service get healthz default response has a 4xx status code +func (o *HealthzServiceGetHealthzDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this healthz service get healthz default response has a 5xx status code +func (o *HealthzServiceGetHealthzDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this healthz service get healthz default response a status code equal to that given +func (o *HealthzServiceGetHealthzDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the healthz service get healthz default response @@ -96,12 +161,22 @@ func (o *HealthzServiceGetHealthzDefault) Code() int { } func (o *HealthzServiceGetHealthzDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/healthz][%d] HealthzService_GetHealthz default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/healthz][%d] HealthzService_GetHealthz default %s", o._statusCode, payload) +} + +func (o *HealthzServiceGetHealthzDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/healthz][%d] HealthzService_GetHealthz default %s", o._statusCode, payload) +} + +func (o *HealthzServiceGetHealthzDefault) GetPayload() *healthz_model.GooglerpcStatus { + return o.Payload } func (o *HealthzServiceGetHealthzDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(healthz_model.RuntimeError) + o.Payload = new(healthz_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/healthz_model/googlerpc_status.go b/backend/api/v2beta1/go_http_client/healthz_model/googlerpc_status.go new file mode 100644 index 00000000000..1628ef60f0e --- /dev/null +++ b/backend/api/v2beta1/go_http_client/healthz_model/googlerpc_status.go @@ -0,0 +1,136 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package healthz_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// GooglerpcStatus The `Status` type defines a logical error model that is suitable for +// different programming environments, including REST APIs and RPC APIs. It is +// used by [gRPC](https://github.com/grpc). Each `Status` message contains +// three pieces of data: error code, error message, and error details. +// +// You can find out more about this error model and how to work with it in the +// [API Design Guide](https://cloud.google.com/apis/design/errors). +// +// swagger:model googlerpcStatus +type GooglerpcStatus struct { + + // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + Code int32 `json:"code,omitempty"` + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + Details []*ProtobufAny `json:"details"` + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. + Message string `json:"message,omitempty"` +} + +// Validate validates this googlerpc status +func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateDetails(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { + if swag.IsZero(m.Details) { // not required + return nil + } + + for i := 0; i < len(m.Details); i++ { + if swag.IsZero(m.Details[i]) { // not required + continue + } + + if m.Details[i] != nil { + if err := m.Details[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *GooglerpcStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *GooglerpcStatus) UnmarshalBinary(b []byte) error { + var res GooglerpcStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/healthz_model/protobuf_any.go b/backend/api/v2beta1/go_http_client/healthz_model/protobuf_any.go index cc274ef6e58..9695ae066c7 100644 --- a/backend/api/v2beta1/go_http_client/healthz_model/protobuf_any.go +++ b/backend/api/v2beta1/go_http_client/healthz_model/protobuf_any.go @@ -6,9 +6,10 @@ package healthz_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v2beta1/go_http_client/healthz_model/runtime_error.go b/backend/api/v2beta1/go_http_client/healthz_model/runtime_error.go deleted file mode 100644 index 86feccf8c1e..00000000000 --- a/backend/api/v2beta1/go_http_client/healthz_model/runtime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package healthz_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// RuntimeError runtime error -// swagger:model runtimeError -type RuntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this runtime error -func (m *RuntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *RuntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *RuntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *RuntimeError) UnmarshalBinary(b []byte) error { - var res RuntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v2beta1/go_http_client/healthz_model/v2beta1_get_healthz_response.go b/backend/api/v2beta1/go_http_client/healthz_model/v2beta1_get_healthz_response.go index 0a86ca4dee8..6235cf5011c 100644 --- a/backend/api/v2beta1/go_http_client/healthz_model/v2beta1_get_healthz_response.go +++ b/backend/api/v2beta1/go_http_client/healthz_model/v2beta1_get_healthz_response.go @@ -6,12 +6,14 @@ package healthz_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1GetHealthzResponse v2beta1 get healthz response +// // swagger:model v2beta1GetHealthzResponse type V2beta1GetHealthzResponse struct { @@ -31,6 +33,11 @@ func (m *V2beta1GetHealthzResponse) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this v2beta1 get healthz response based on context it is used +func (m *V2beta1GetHealthzResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1GetHealthzResponse) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_client.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_client.go index 8ac3d9acf9b..5dd8fa723eb 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_client.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_client.go @@ -8,8 +8,7 @@ package pipeline_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Pipeline { cli := new(Pipeline) cli.Transport = transport - cli.PipelineService = pipeline_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Pipeline is a client for pipeline type Pipeline struct { - PipelineService *pipeline_service.Client + PipelineService pipeline_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Pipeline struct { // SetTransport changes the transport on the client and all its subresources func (c *Pipeline) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.PipelineService.SetTransport(transport) - } diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go index 63f2403fcda..a1b9f31d247 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go @@ -7,15 +7,40 @@ package pipeline_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new pipeline service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new pipeline service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new pipeline service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for pipeline service API */ @@ -24,16 +49,43 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + PipelineServiceCreatePipeline(params *PipelineServiceCreatePipelineParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceCreatePipelineOK, error) + + PipelineServiceCreatePipelineAndVersion(params *PipelineServiceCreatePipelineAndVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceCreatePipelineAndVersionOK, error) + + PipelineServiceCreatePipelineVersion(params *PipelineServiceCreatePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceCreatePipelineVersionOK, error) + + PipelineServiceDeletePipeline(params *PipelineServiceDeletePipelineParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceDeletePipelineOK, error) + + PipelineServiceDeletePipelineVersion(params *PipelineServiceDeletePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceDeletePipelineVersionOK, error) + + PipelineServiceGetPipeline(params *PipelineServiceGetPipelineParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineOK, error) + + PipelineServiceGetPipelineByName(params *PipelineServiceGetPipelineByNameParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineByNameOK, error) + + PipelineServiceGetPipelineVersion(params *PipelineServiceGetPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineVersionOK, error) + + PipelineServiceListPipelineVersions(params *PipelineServiceListPipelineVersionsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceListPipelineVersionsOK, error) + + PipelineServiceListPipelines(params *PipelineServiceListPipelinesParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceListPipelinesOK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* PipelineServiceCreatePipeline creates a pipeline */ -func (a *Client) PipelineServiceCreatePipeline(params *PipelineServiceCreatePipelineParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceCreatePipelineOK, error) { +func (a *Client) PipelineServiceCreatePipeline(params *PipelineServiceCreatePipelineParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceCreatePipelineOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceCreatePipelineParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_CreatePipeline", Method: "POST", PathPattern: "/apis/v2beta1/pipelines", @@ -45,24 +97,33 @@ func (a *Client) PipelineServiceCreatePipeline(params *PipelineServiceCreatePipe AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceCreatePipelineOK), nil - + success, ok := result.(*PipelineServiceCreatePipelineOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceCreatePipelineDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceCreatePipelineAndVersion creates a new pipeline and a new pipeline version in a single transaction */ -func (a *Client) PipelineServiceCreatePipelineAndVersion(params *PipelineServiceCreatePipelineAndVersionParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceCreatePipelineAndVersionOK, error) { +func (a *Client) PipelineServiceCreatePipelineAndVersion(params *PipelineServiceCreatePipelineAndVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceCreatePipelineAndVersionOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceCreatePipelineAndVersionParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_CreatePipelineAndVersion", Method: "POST", PathPattern: "/apis/v2beta1/pipelines/create", @@ -74,24 +135,33 @@ func (a *Client) PipelineServiceCreatePipelineAndVersion(params *PipelineService AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceCreatePipelineAndVersionOK), nil - + success, ok := result.(*PipelineServiceCreatePipelineAndVersionOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceCreatePipelineAndVersionDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceCreatePipelineVersion adds a pipeline version to the specified pipeline ID */ -func (a *Client) PipelineServiceCreatePipelineVersion(params *PipelineServiceCreatePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceCreatePipelineVersionOK, error) { +func (a *Client) PipelineServiceCreatePipelineVersion(params *PipelineServiceCreatePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceCreatePipelineVersionOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceCreatePipelineVersionParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_CreatePipelineVersion", Method: "POST", PathPattern: "/apis/v2beta1/pipelines/{pipeline_id}/versions", @@ -103,24 +173,33 @@ func (a *Client) PipelineServiceCreatePipelineVersion(params *PipelineServiceCre AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceCreatePipelineVersionOK), nil - + success, ok := result.(*PipelineServiceCreatePipelineVersionOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceCreatePipelineVersionDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* -PipelineServiceDeletePipeline deletes an empty pipeline by ID returns error if the pipeline has pipeline versions +PipelineServiceDeletePipeline deletes a pipeline by ID if cascade is false default it returns an error if the pipeline has any versions if cascade is true it will also delete all pipeline versions */ -func (a *Client) PipelineServiceDeletePipeline(params *PipelineServiceDeletePipelineParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceDeletePipelineOK, error) { +func (a *Client) PipelineServiceDeletePipeline(params *PipelineServiceDeletePipelineParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceDeletePipelineOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceDeletePipelineParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_DeletePipeline", Method: "DELETE", PathPattern: "/apis/v2beta1/pipelines/{pipeline_id}", @@ -132,24 +211,33 @@ func (a *Client) PipelineServiceDeletePipeline(params *PipelineServiceDeletePipe AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceDeletePipelineOK), nil - + success, ok := result.(*PipelineServiceDeletePipelineOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceDeletePipelineDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceDeletePipelineVersion deletes a specific pipeline version by pipeline version ID and pipeline ID */ -func (a *Client) PipelineServiceDeletePipelineVersion(params *PipelineServiceDeletePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceDeletePipelineVersionOK, error) { +func (a *Client) PipelineServiceDeletePipelineVersion(params *PipelineServiceDeletePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceDeletePipelineVersionOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceDeletePipelineVersionParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_DeletePipelineVersion", Method: "DELETE", PathPattern: "/apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}", @@ -161,24 +249,33 @@ func (a *Client) PipelineServiceDeletePipelineVersion(params *PipelineServiceDel AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceDeletePipelineVersionOK), nil - + success, ok := result.(*PipelineServiceDeletePipelineVersionOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceDeletePipelineVersionDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceGetPipeline finds a specific pipeline by ID */ -func (a *Client) PipelineServiceGetPipeline(params *PipelineServiceGetPipelineParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceGetPipelineOK, error) { +func (a *Client) PipelineServiceGetPipeline(params *PipelineServiceGetPipelineParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceGetPipelineParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_GetPipeline", Method: "GET", PathPattern: "/apis/v2beta1/pipelines/{pipeline_id}", @@ -190,24 +287,33 @@ func (a *Client) PipelineServiceGetPipeline(params *PipelineServiceGetPipelinePa AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceGetPipelineOK), nil - + success, ok := result.(*PipelineServiceGetPipelineOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceGetPipelineDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceGetPipelineByName finds a specific pipeline by name and namespace */ -func (a *Client) PipelineServiceGetPipelineByName(params *PipelineServiceGetPipelineByNameParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceGetPipelineByNameOK, error) { +func (a *Client) PipelineServiceGetPipelineByName(params *PipelineServiceGetPipelineByNameParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineByNameOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceGetPipelineByNameParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_GetPipelineByName", Method: "GET", PathPattern: "/apis/v2beta1/pipelines/names/{name}", @@ -219,24 +325,33 @@ func (a *Client) PipelineServiceGetPipelineByName(params *PipelineServiceGetPipe AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceGetPipelineByNameOK), nil - + success, ok := result.(*PipelineServiceGetPipelineByNameOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceGetPipelineByNameDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceGetPipelineVersion gets a pipeline version by pipeline version ID and pipeline ID */ -func (a *Client) PipelineServiceGetPipelineVersion(params *PipelineServiceGetPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceGetPipelineVersionOK, error) { +func (a *Client) PipelineServiceGetPipelineVersion(params *PipelineServiceGetPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceGetPipelineVersionOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceGetPipelineVersionParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_GetPipelineVersion", Method: "GET", PathPattern: "/apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}", @@ -248,24 +363,33 @@ func (a *Client) PipelineServiceGetPipelineVersion(params *PipelineServiceGetPip AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceGetPipelineVersionOK), nil - + success, ok := result.(*PipelineServiceGetPipelineVersionOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceGetPipelineVersionDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceListPipelineVersions lists all pipeline versions of a given pipeline ID */ -func (a *Client) PipelineServiceListPipelineVersions(params *PipelineServiceListPipelineVersionsParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceListPipelineVersionsOK, error) { +func (a *Client) PipelineServiceListPipelineVersions(params *PipelineServiceListPipelineVersionsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceListPipelineVersionsOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceListPipelineVersionsParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_ListPipelineVersions", Method: "GET", PathPattern: "/apis/v2beta1/pipelines/{pipeline_id}/versions", @@ -277,24 +401,33 @@ func (a *Client) PipelineServiceListPipelineVersions(params *PipelineServiceList AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceListPipelineVersionsOK), nil - + success, ok := result.(*PipelineServiceListPipelineVersionsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceListPipelineVersionsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* PipelineServiceListPipelines finds all pipelines within a namespace */ -func (a *Client) PipelineServiceListPipelines(params *PipelineServiceListPipelinesParams, authInfo runtime.ClientAuthInfoWriter) (*PipelineServiceListPipelinesOK, error) { +func (a *Client) PipelineServiceListPipelines(params *PipelineServiceListPipelinesParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*PipelineServiceListPipelinesOK, error) { // TODO: Validate the params before sending if params == nil { params = NewPipelineServiceListPipelinesParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "PipelineService_ListPipelines", Method: "GET", PathPattern: "/apis/v2beta1/pipelines", @@ -306,12 +439,22 @@ func (a *Client) PipelineServiceListPipelines(params *PipelineServiceListPipelin AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*PipelineServiceListPipelinesOK), nil - + success, ok := result.(*PipelineServiceListPipelinesOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*PipelineServiceListPipelinesDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_and_version_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_and_version_parameters.go index 869fee1da35..b90234313f3 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_and_version_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_and_version_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) -// NewPipelineServiceCreatePipelineAndVersionParams creates a new PipelineServiceCreatePipelineAndVersionParams object -// with the default values initialized. +// NewPipelineServiceCreatePipelineAndVersionParams creates a new PipelineServiceCreatePipelineAndVersionParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceCreatePipelineAndVersionParams() *PipelineServiceCreatePipelineAndVersionParams { - var () return &PipelineServiceCreatePipelineAndVersionParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceCreatePipelineAndVersionParamsWithTimeout creates a new PipelineServiceCreatePipelineAndVersionParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceCreatePipelineAndVersionParamsWithTimeout(timeout time.Duration) *PipelineServiceCreatePipelineAndVersionParams { - var () return &PipelineServiceCreatePipelineAndVersionParams{ - timeout: timeout, } } // NewPipelineServiceCreatePipelineAndVersionParamsWithContext creates a new PipelineServiceCreatePipelineAndVersionParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceCreatePipelineAndVersionParamsWithContext(ctx context.Context) *PipelineServiceCreatePipelineAndVersionParams { - var () return &PipelineServiceCreatePipelineAndVersionParams{ - Context: ctx, } } // NewPipelineServiceCreatePipelineAndVersionParamsWithHTTPClient creates a new PipelineServiceCreatePipelineAndVersionParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceCreatePipelineAndVersionParamsWithHTTPClient(client *http.Client) *PipelineServiceCreatePipelineAndVersionParams { - var () return &PipelineServiceCreatePipelineAndVersionParams{ HTTPClient: client, } } -/*PipelineServiceCreatePipelineAndVersionParams contains all the parameters to send to the API endpoint -for the pipeline service create pipeline and version operation typically these are written to a http.Request +/* +PipelineServiceCreatePipelineAndVersionParams contains all the parameters to send to the API endpoint + + for the pipeline service create pipeline and version operation. + + Typically these are written to a http.Request. */ type PipelineServiceCreatePipelineAndVersionParams struct { - /*Body*/ + // Body. Body *pipeline_model.V2beta1CreatePipelineAndVersionRequest timeout time.Duration @@ -71,6 +71,21 @@ type PipelineServiceCreatePipelineAndVersionParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service create pipeline and version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceCreatePipelineAndVersionParams) WithDefaults() *PipelineServiceCreatePipelineAndVersionParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service create pipeline and version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceCreatePipelineAndVersionParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service create pipeline and version params func (o *PipelineServiceCreatePipelineAndVersionParams) WithTimeout(timeout time.Duration) *PipelineServiceCreatePipelineAndVersionParams { o.SetTimeout(timeout) @@ -122,7 +137,6 @@ func (o *PipelineServiceCreatePipelineAndVersionParams) WriteToRequest(r runtime return err } var res []error - if o.Body != nil { if err := r.SetBodyParam(o.Body); err != nil { return err diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_and_version_responses.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_and_version_responses.go index d00f98352bc..8ebfcaf81e1 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_and_version_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_and_version_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) // PipelineServiceCreatePipelineAndVersionReader is a Reader for the PipelineServiceCreatePipelineAndVersion structure. @@ -24,14 +24,12 @@ type PipelineServiceCreatePipelineAndVersionReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceCreatePipelineAndVersionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceCreatePipelineAndVersionOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceCreatePipelineAndVersionDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceCreatePipelineAndVersionOK() *PipelineServiceCreatePipeli return &PipelineServiceCreatePipelineAndVersionOK{} } -/*PipelineServiceCreatePipelineAndVersionOK handles this case with default header values. +/* +PipelineServiceCreatePipelineAndVersionOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceCreatePipelineAndVersionOK struct { Payload *pipeline_model.V2beta1Pipeline } +// IsSuccess returns true when this pipeline service create pipeline and version o k response has a 2xx status code +func (o *PipelineServiceCreatePipelineAndVersionOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service create pipeline and version o k response has a 3xx status code +func (o *PipelineServiceCreatePipelineAndVersionOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service create pipeline and version o k response has a 4xx status code +func (o *PipelineServiceCreatePipelineAndVersionOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service create pipeline and version o k response has a 5xx status code +func (o *PipelineServiceCreatePipelineAndVersionOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service create pipeline and version o k response a status code equal to that given +func (o *PipelineServiceCreatePipelineAndVersionOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service create pipeline and version o k response +func (o *PipelineServiceCreatePipelineAndVersionOK) Code() int { + return 200 +} + func (o *PipelineServiceCreatePipelineAndVersionOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/pipelines/create][%d] pipelineServiceCreatePipelineAndVersionOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/create][%d] pipelineServiceCreatePipelineAndVersionOK %s", 200, payload) +} + +func (o *PipelineServiceCreatePipelineAndVersionOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/create][%d] pipelineServiceCreatePipelineAndVersionOK %s", 200, payload) +} + +func (o *PipelineServiceCreatePipelineAndVersionOK) GetPayload() *pipeline_model.V2beta1Pipeline { + return o.Payload } func (o *PipelineServiceCreatePipelineAndVersionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceCreatePipelineAndVersionDefault(code int) *PipelineServic } } -/*PipelineServiceCreatePipelineAndVersionDefault handles this case with default header values. +/* +PipelineServiceCreatePipelineAndVersionDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceCreatePipelineAndVersionDefault struct { _statusCode int - Payload *pipeline_model.RuntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service create pipeline and version default response has a 2xx status code +func (o *PipelineServiceCreatePipelineAndVersionDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service create pipeline and version default response has a 3xx status code +func (o *PipelineServiceCreatePipelineAndVersionDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service create pipeline and version default response has a 4xx status code +func (o *PipelineServiceCreatePipelineAndVersionDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service create pipeline and version default response has a 5xx status code +func (o *PipelineServiceCreatePipelineAndVersionDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service create pipeline and version default response a status code equal to that given +func (o *PipelineServiceCreatePipelineAndVersionDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service create pipeline and version default response @@ -96,12 +161,22 @@ func (o *PipelineServiceCreatePipelineAndVersionDefault) Code() int { } func (o *PipelineServiceCreatePipelineAndVersionDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/pipelines/create][%d] PipelineService_CreatePipelineAndVersion default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/create][%d] PipelineService_CreatePipelineAndVersion default %s", o._statusCode, payload) +} + +func (o *PipelineServiceCreatePipelineAndVersionDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/create][%d] PipelineService_CreatePipelineAndVersion default %s", o._statusCode, payload) +} + +func (o *PipelineServiceCreatePipelineAndVersionDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceCreatePipelineAndVersionDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.RuntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_parameters.go index 44dbe451746..3568596225f 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_parameters.go @@ -13,67 +13,82 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) -// NewPipelineServiceCreatePipelineParams creates a new PipelineServiceCreatePipelineParams object -// with the default values initialized. +// NewPipelineServiceCreatePipelineParams creates a new PipelineServiceCreatePipelineParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceCreatePipelineParams() *PipelineServiceCreatePipelineParams { - var () return &PipelineServiceCreatePipelineParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceCreatePipelineParamsWithTimeout creates a new PipelineServiceCreatePipelineParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceCreatePipelineParamsWithTimeout(timeout time.Duration) *PipelineServiceCreatePipelineParams { - var () return &PipelineServiceCreatePipelineParams{ - timeout: timeout, } } // NewPipelineServiceCreatePipelineParamsWithContext creates a new PipelineServiceCreatePipelineParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceCreatePipelineParamsWithContext(ctx context.Context) *PipelineServiceCreatePipelineParams { - var () return &PipelineServiceCreatePipelineParams{ - Context: ctx, } } // NewPipelineServiceCreatePipelineParamsWithHTTPClient creates a new PipelineServiceCreatePipelineParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceCreatePipelineParamsWithHTTPClient(client *http.Client) *PipelineServiceCreatePipelineParams { - var () return &PipelineServiceCreatePipelineParams{ HTTPClient: client, } } -/*PipelineServiceCreatePipelineParams contains all the parameters to send to the API endpoint -for the pipeline service create pipeline operation typically these are written to a http.Request +/* +PipelineServiceCreatePipelineParams contains all the parameters to send to the API endpoint + + for the pipeline service create pipeline operation. + + Typically these are written to a http.Request. */ type PipelineServiceCreatePipelineParams struct { - /*Body - Required input. Pipeline that needs to be created. + /* Pipeline. + Required input. Pipeline that needs to be created. */ - Body *pipeline_model.V2beta1Pipeline + Pipeline *pipeline_model.V2beta1Pipeline timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service create pipeline params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceCreatePipelineParams) WithDefaults() *PipelineServiceCreatePipelineParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service create pipeline params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceCreatePipelineParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service create pipeline params func (o *PipelineServiceCreatePipelineParams) WithTimeout(timeout time.Duration) *PipelineServiceCreatePipelineParams { o.SetTimeout(timeout) @@ -107,15 +122,15 @@ func (o *PipelineServiceCreatePipelineParams) SetHTTPClient(client *http.Client) o.HTTPClient = client } -// WithBody adds the body to the pipeline service create pipeline params -func (o *PipelineServiceCreatePipelineParams) WithBody(body *pipeline_model.V2beta1Pipeline) *PipelineServiceCreatePipelineParams { - o.SetBody(body) +// WithPipeline adds the pipeline to the pipeline service create pipeline params +func (o *PipelineServiceCreatePipelineParams) WithPipeline(pipeline *pipeline_model.V2beta1Pipeline) *PipelineServiceCreatePipelineParams { + o.SetPipeline(pipeline) return o } -// SetBody adds the body to the pipeline service create pipeline params -func (o *PipelineServiceCreatePipelineParams) SetBody(body *pipeline_model.V2beta1Pipeline) { - o.Body = body +// SetPipeline adds the pipeline to the pipeline service create pipeline params +func (o *PipelineServiceCreatePipelineParams) SetPipeline(pipeline *pipeline_model.V2beta1Pipeline) { + o.Pipeline = pipeline } // WriteToRequest writes these params to a swagger request @@ -125,9 +140,8 @@ func (o *PipelineServiceCreatePipelineParams) WriteToRequest(r runtime.ClientReq return err } var res []error - - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { + if o.Pipeline != nil { + if err := r.SetBodyParam(o.Pipeline); err != nil { return err } } diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_responses.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_responses.go index 6a471ddedfc..6d124b27b5b 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) // PipelineServiceCreatePipelineReader is a Reader for the PipelineServiceCreatePipeline structure. @@ -24,14 +24,12 @@ type PipelineServiceCreatePipelineReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceCreatePipelineReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceCreatePipelineOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceCreatePipelineDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceCreatePipelineOK() *PipelineServiceCreatePipelineOK { return &PipelineServiceCreatePipelineOK{} } -/*PipelineServiceCreatePipelineOK handles this case with default header values. +/* +PipelineServiceCreatePipelineOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceCreatePipelineOK struct { Payload *pipeline_model.V2beta1Pipeline } +// IsSuccess returns true when this pipeline service create pipeline o k response has a 2xx status code +func (o *PipelineServiceCreatePipelineOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service create pipeline o k response has a 3xx status code +func (o *PipelineServiceCreatePipelineOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service create pipeline o k response has a 4xx status code +func (o *PipelineServiceCreatePipelineOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service create pipeline o k response has a 5xx status code +func (o *PipelineServiceCreatePipelineOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service create pipeline o k response a status code equal to that given +func (o *PipelineServiceCreatePipelineOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service create pipeline o k response +func (o *PipelineServiceCreatePipelineOK) Code() int { + return 200 +} + func (o *PipelineServiceCreatePipelineOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/pipelines][%d] pipelineServiceCreatePipelineOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines][%d] pipelineServiceCreatePipelineOK %s", 200, payload) +} + +func (o *PipelineServiceCreatePipelineOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines][%d] pipelineServiceCreatePipelineOK %s", 200, payload) +} + +func (o *PipelineServiceCreatePipelineOK) GetPayload() *pipeline_model.V2beta1Pipeline { + return o.Payload } func (o *PipelineServiceCreatePipelineOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceCreatePipelineDefault(code int) *PipelineServiceCreatePip } } -/*PipelineServiceCreatePipelineDefault handles this case with default header values. +/* +PipelineServiceCreatePipelineDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceCreatePipelineDefault struct { _statusCode int - Payload *pipeline_model.RuntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service create pipeline default response has a 2xx status code +func (o *PipelineServiceCreatePipelineDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service create pipeline default response has a 3xx status code +func (o *PipelineServiceCreatePipelineDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service create pipeline default response has a 4xx status code +func (o *PipelineServiceCreatePipelineDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service create pipeline default response has a 5xx status code +func (o *PipelineServiceCreatePipelineDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service create pipeline default response a status code equal to that given +func (o *PipelineServiceCreatePipelineDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service create pipeline default response @@ -96,12 +161,22 @@ func (o *PipelineServiceCreatePipelineDefault) Code() int { } func (o *PipelineServiceCreatePipelineDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/pipelines][%d] PipelineService_CreatePipeline default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines][%d] PipelineService_CreatePipeline default %s", o._statusCode, payload) +} + +func (o *PipelineServiceCreatePipelineDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines][%d] PipelineService_CreatePipeline default %s", o._statusCode, payload) +} + +func (o *PipelineServiceCreatePipelineDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceCreatePipelineDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.RuntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_parameters.go index 4d295dbd391..17858a3f2ad 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_parameters.go @@ -13,72 +13,88 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) -// NewPipelineServiceCreatePipelineVersionParams creates a new PipelineServiceCreatePipelineVersionParams object -// with the default values initialized. +// NewPipelineServiceCreatePipelineVersionParams creates a new PipelineServiceCreatePipelineVersionParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceCreatePipelineVersionParams() *PipelineServiceCreatePipelineVersionParams { - var () return &PipelineServiceCreatePipelineVersionParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceCreatePipelineVersionParamsWithTimeout creates a new PipelineServiceCreatePipelineVersionParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceCreatePipelineVersionParamsWithTimeout(timeout time.Duration) *PipelineServiceCreatePipelineVersionParams { - var () return &PipelineServiceCreatePipelineVersionParams{ - timeout: timeout, } } // NewPipelineServiceCreatePipelineVersionParamsWithContext creates a new PipelineServiceCreatePipelineVersionParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceCreatePipelineVersionParamsWithContext(ctx context.Context) *PipelineServiceCreatePipelineVersionParams { - var () return &PipelineServiceCreatePipelineVersionParams{ - Context: ctx, } } // NewPipelineServiceCreatePipelineVersionParamsWithHTTPClient creates a new PipelineServiceCreatePipelineVersionParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceCreatePipelineVersionParamsWithHTTPClient(client *http.Client) *PipelineServiceCreatePipelineVersionParams { - var () return &PipelineServiceCreatePipelineVersionParams{ HTTPClient: client, } } -/*PipelineServiceCreatePipelineVersionParams contains all the parameters to send to the API endpoint -for the pipeline service create pipeline version operation typically these are written to a http.Request +/* +PipelineServiceCreatePipelineVersionParams contains all the parameters to send to the API endpoint + + for the pipeline service create pipeline version operation. + + Typically these are written to a http.Request. */ type PipelineServiceCreatePipelineVersionParams struct { - /*Body - Required input. Pipeline version ID to be created. + /* PipelineID. + Required input. ID of the parent pipeline. */ - Body *pipeline_model.V2beta1PipelineVersion - /*PipelineID - Required input. ID of the parent pipeline. + PipelineID string + + /* PipelineVersion. + Required input. Pipeline version ID to be created. */ - PipelineID string + PipelineVersion *pipeline_model.V2beta1PipelineVersion timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service create pipeline version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceCreatePipelineVersionParams) WithDefaults() *PipelineServiceCreatePipelineVersionParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service create pipeline version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceCreatePipelineVersionParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service create pipeline version params func (o *PipelineServiceCreatePipelineVersionParams) WithTimeout(timeout time.Duration) *PipelineServiceCreatePipelineVersionParams { o.SetTimeout(timeout) @@ -112,17 +128,6 @@ func (o *PipelineServiceCreatePipelineVersionParams) SetHTTPClient(client *http. o.HTTPClient = client } -// WithBody adds the body to the pipeline service create pipeline version params -func (o *PipelineServiceCreatePipelineVersionParams) WithBody(body *pipeline_model.V2beta1PipelineVersion) *PipelineServiceCreatePipelineVersionParams { - o.SetBody(body) - return o -} - -// SetBody adds the body to the pipeline service create pipeline version params -func (o *PipelineServiceCreatePipelineVersionParams) SetBody(body *pipeline_model.V2beta1PipelineVersion) { - o.Body = body -} - // WithPipelineID adds the pipelineID to the pipeline service create pipeline version params func (o *PipelineServiceCreatePipelineVersionParams) WithPipelineID(pipelineID string) *PipelineServiceCreatePipelineVersionParams { o.SetPipelineID(pipelineID) @@ -134,6 +139,17 @@ func (o *PipelineServiceCreatePipelineVersionParams) SetPipelineID(pipelineID st o.PipelineID = pipelineID } +// WithPipelineVersion adds the pipelineVersion to the pipeline service create pipeline version params +func (o *PipelineServiceCreatePipelineVersionParams) WithPipelineVersion(pipelineVersion *pipeline_model.V2beta1PipelineVersion) *PipelineServiceCreatePipelineVersionParams { + o.SetPipelineVersion(pipelineVersion) + return o +} + +// SetPipelineVersion adds the pipelineVersion to the pipeline service create pipeline version params +func (o *PipelineServiceCreatePipelineVersionParams) SetPipelineVersion(pipelineVersion *pipeline_model.V2beta1PipelineVersion) { + o.PipelineVersion = pipelineVersion +} + // WriteToRequest writes these params to a swagger request func (o *PipelineServiceCreatePipelineVersionParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -142,16 +158,15 @@ func (o *PipelineServiceCreatePipelineVersionParams) WriteToRequest(r runtime.Cl } var res []error - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - } - // path param pipeline_id if err := r.SetPathParam("pipeline_id", o.PipelineID); err != nil { return err } + if o.PipelineVersion != nil { + if err := r.SetBodyParam(o.PipelineVersion); err != nil { + return err + } + } if len(res) > 0 { return errors.CompositeValidationError(res...) diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_responses.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_responses.go index bd5641a7a9a..42116214156 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_create_pipeline_version_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) // PipelineServiceCreatePipelineVersionReader is a Reader for the PipelineServiceCreatePipelineVersion structure. @@ -24,14 +24,12 @@ type PipelineServiceCreatePipelineVersionReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceCreatePipelineVersionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceCreatePipelineVersionOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceCreatePipelineVersionDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceCreatePipelineVersionOK() *PipelineServiceCreatePipelineV return &PipelineServiceCreatePipelineVersionOK{} } -/*PipelineServiceCreatePipelineVersionOK handles this case with default header values. +/* +PipelineServiceCreatePipelineVersionOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceCreatePipelineVersionOK struct { Payload *pipeline_model.V2beta1PipelineVersion } +// IsSuccess returns true when this pipeline service create pipeline version o k response has a 2xx status code +func (o *PipelineServiceCreatePipelineVersionOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service create pipeline version o k response has a 3xx status code +func (o *PipelineServiceCreatePipelineVersionOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service create pipeline version o k response has a 4xx status code +func (o *PipelineServiceCreatePipelineVersionOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service create pipeline version o k response has a 5xx status code +func (o *PipelineServiceCreatePipelineVersionOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service create pipeline version o k response a status code equal to that given +func (o *PipelineServiceCreatePipelineVersionOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service create pipeline version o k response +func (o *PipelineServiceCreatePipelineVersionOK) Code() int { + return 200 +} + func (o *PipelineServiceCreatePipelineVersionOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] pipelineServiceCreatePipelineVersionOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] pipelineServiceCreatePipelineVersionOK %s", 200, payload) +} + +func (o *PipelineServiceCreatePipelineVersionOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] pipelineServiceCreatePipelineVersionOK %s", 200, payload) +} + +func (o *PipelineServiceCreatePipelineVersionOK) GetPayload() *pipeline_model.V2beta1PipelineVersion { + return o.Payload } func (o *PipelineServiceCreatePipelineVersionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceCreatePipelineVersionDefault(code int) *PipelineServiceCr } } -/*PipelineServiceCreatePipelineVersionDefault handles this case with default header values. +/* +PipelineServiceCreatePipelineVersionDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceCreatePipelineVersionDefault struct { _statusCode int - Payload *pipeline_model.RuntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service create pipeline version default response has a 2xx status code +func (o *PipelineServiceCreatePipelineVersionDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service create pipeline version default response has a 3xx status code +func (o *PipelineServiceCreatePipelineVersionDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service create pipeline version default response has a 4xx status code +func (o *PipelineServiceCreatePipelineVersionDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service create pipeline version default response has a 5xx status code +func (o *PipelineServiceCreatePipelineVersionDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service create pipeline version default response a status code equal to that given +func (o *PipelineServiceCreatePipelineVersionDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service create pipeline version default response @@ -96,12 +161,22 @@ func (o *PipelineServiceCreatePipelineVersionDefault) Code() int { } func (o *PipelineServiceCreatePipelineVersionDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] PipelineService_CreatePipelineVersion default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] PipelineService_CreatePipelineVersion default %s", o._statusCode, payload) +} + +func (o *PipelineServiceCreatePipelineVersionDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] PipelineService_CreatePipelineVersion default %s", o._statusCode, payload) +} + +func (o *PipelineServiceCreatePipelineVersionDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceCreatePipelineVersionDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.RuntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_parameters.go index 8c20914c3bf..09f2acc089e 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_parameters.go @@ -13,57 +13,65 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" ) -// NewPipelineServiceDeletePipelineParams creates a new PipelineServiceDeletePipelineParams object -// with the default values initialized. +// NewPipelineServiceDeletePipelineParams creates a new PipelineServiceDeletePipelineParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceDeletePipelineParams() *PipelineServiceDeletePipelineParams { - var () return &PipelineServiceDeletePipelineParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceDeletePipelineParamsWithTimeout creates a new PipelineServiceDeletePipelineParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceDeletePipelineParamsWithTimeout(timeout time.Duration) *PipelineServiceDeletePipelineParams { - var () return &PipelineServiceDeletePipelineParams{ - timeout: timeout, } } // NewPipelineServiceDeletePipelineParamsWithContext creates a new PipelineServiceDeletePipelineParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceDeletePipelineParamsWithContext(ctx context.Context) *PipelineServiceDeletePipelineParams { - var () return &PipelineServiceDeletePipelineParams{ - Context: ctx, } } // NewPipelineServiceDeletePipelineParamsWithHTTPClient creates a new PipelineServiceDeletePipelineParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceDeletePipelineParamsWithHTTPClient(client *http.Client) *PipelineServiceDeletePipelineParams { - var () return &PipelineServiceDeletePipelineParams{ HTTPClient: client, } } -/*PipelineServiceDeletePipelineParams contains all the parameters to send to the API endpoint -for the pipeline service delete pipeline operation typically these are written to a http.Request +/* +PipelineServiceDeletePipelineParams contains all the parameters to send to the API endpoint + + for the pipeline service delete pipeline operation. + + Typically these are written to a http.Request. */ type PipelineServiceDeletePipelineParams struct { - /*PipelineID - Required input. ID of the pipeline to be deleted. + /* Cascade. + Optional. If true, the pipeline and all its versions will be deleted. + If false (default), only the pipeline will be deleted if it has no versions. + */ + Cascade *bool + + /* PipelineID. + + Required input. ID of the pipeline to be deleted. */ PipelineID string @@ -72,6 +80,21 @@ type PipelineServiceDeletePipelineParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service delete pipeline params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceDeletePipelineParams) WithDefaults() *PipelineServiceDeletePipelineParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service delete pipeline params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceDeletePipelineParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service delete pipeline params func (o *PipelineServiceDeletePipelineParams) WithTimeout(timeout time.Duration) *PipelineServiceDeletePipelineParams { o.SetTimeout(timeout) @@ -105,6 +128,17 @@ func (o *PipelineServiceDeletePipelineParams) SetHTTPClient(client *http.Client) o.HTTPClient = client } +// WithCascade adds the cascade to the pipeline service delete pipeline params +func (o *PipelineServiceDeletePipelineParams) WithCascade(cascade *bool) *PipelineServiceDeletePipelineParams { + o.SetCascade(cascade) + return o +} + +// SetCascade adds the cascade to the pipeline service delete pipeline params +func (o *PipelineServiceDeletePipelineParams) SetCascade(cascade *bool) { + o.Cascade = cascade +} + // WithPipelineID adds the pipelineID to the pipeline service delete pipeline params func (o *PipelineServiceDeletePipelineParams) WithPipelineID(pipelineID string) *PipelineServiceDeletePipelineParams { o.SetPipelineID(pipelineID) @@ -124,6 +158,23 @@ func (o *PipelineServiceDeletePipelineParams) WriteToRequest(r runtime.ClientReq } var res []error + if o.Cascade != nil { + + // query param cascade + var qrCascade bool + + if o.Cascade != nil { + qrCascade = *o.Cascade + } + qCascade := swag.FormatBool(qrCascade) + if qCascade != "" { + + if err := r.SetQueryParam("cascade", qCascade); err != nil { + return err + } + } + } + // path param pipeline_id if err := r.SetPathParam("pipeline_id", o.PipelineID); err != nil { return err diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_responses.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_responses.go index 358acdceb3d..f16b165fe6b 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) // PipelineServiceDeletePipelineReader is a Reader for the PipelineServiceDeletePipeline structure. @@ -24,14 +24,12 @@ type PipelineServiceDeletePipelineReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceDeletePipelineReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceDeletePipelineOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceDeletePipelineDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceDeletePipelineOK() *PipelineServiceDeletePipelineOK { return &PipelineServiceDeletePipelineOK{} } -/*PipelineServiceDeletePipelineOK handles this case with default header values. +/* +PipelineServiceDeletePipelineOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceDeletePipelineOK struct { Payload interface{} } +// IsSuccess returns true when this pipeline service delete pipeline o k response has a 2xx status code +func (o *PipelineServiceDeletePipelineOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service delete pipeline o k response has a 3xx status code +func (o *PipelineServiceDeletePipelineOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service delete pipeline o k response has a 4xx status code +func (o *PipelineServiceDeletePipelineOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service delete pipeline o k response has a 5xx status code +func (o *PipelineServiceDeletePipelineOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service delete pipeline o k response a status code equal to that given +func (o *PipelineServiceDeletePipelineOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service delete pipeline o k response +func (o *PipelineServiceDeletePipelineOK) Code() int { + return 200 +} + func (o *PipelineServiceDeletePipelineOK) Error() string { - return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}][%d] pipelineServiceDeletePipelineOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}][%d] pipelineServiceDeletePipelineOK %s", 200, payload) +} + +func (o *PipelineServiceDeletePipelineOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}][%d] pipelineServiceDeletePipelineOK %s", 200, payload) +} + +func (o *PipelineServiceDeletePipelineOK) GetPayload() interface{} { + return o.Payload } func (o *PipelineServiceDeletePipelineOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewPipelineServiceDeletePipelineDefault(code int) *PipelineServiceDeletePip } } -/*PipelineServiceDeletePipelineDefault handles this case with default header values. +/* +PipelineServiceDeletePipelineDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceDeletePipelineDefault struct { _statusCode int - Payload *pipeline_model.RuntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service delete pipeline default response has a 2xx status code +func (o *PipelineServiceDeletePipelineDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service delete pipeline default response has a 3xx status code +func (o *PipelineServiceDeletePipelineDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service delete pipeline default response has a 4xx status code +func (o *PipelineServiceDeletePipelineDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service delete pipeline default response has a 5xx status code +func (o *PipelineServiceDeletePipelineDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service delete pipeline default response a status code equal to that given +func (o *PipelineServiceDeletePipelineDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service delete pipeline default response @@ -94,12 +159,22 @@ func (o *PipelineServiceDeletePipelineDefault) Code() int { } func (o *PipelineServiceDeletePipelineDefault) Error() string { - return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}][%d] PipelineService_DeletePipeline default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}][%d] PipelineService_DeletePipeline default %s", o._statusCode, payload) +} + +func (o *PipelineServiceDeletePipelineDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}][%d] PipelineService_DeletePipeline default %s", o._statusCode, payload) +} + +func (o *PipelineServiceDeletePipelineDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceDeletePipelineDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.RuntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_parameters.go index de95486707c..c9dffbb298c 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_parameters.go @@ -13,62 +13,63 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceDeletePipelineVersionParams creates a new PipelineServiceDeletePipelineVersionParams object -// with the default values initialized. +// NewPipelineServiceDeletePipelineVersionParams creates a new PipelineServiceDeletePipelineVersionParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceDeletePipelineVersionParams() *PipelineServiceDeletePipelineVersionParams { - var () return &PipelineServiceDeletePipelineVersionParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceDeletePipelineVersionParamsWithTimeout creates a new PipelineServiceDeletePipelineVersionParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceDeletePipelineVersionParamsWithTimeout(timeout time.Duration) *PipelineServiceDeletePipelineVersionParams { - var () return &PipelineServiceDeletePipelineVersionParams{ - timeout: timeout, } } // NewPipelineServiceDeletePipelineVersionParamsWithContext creates a new PipelineServiceDeletePipelineVersionParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceDeletePipelineVersionParamsWithContext(ctx context.Context) *PipelineServiceDeletePipelineVersionParams { - var () return &PipelineServiceDeletePipelineVersionParams{ - Context: ctx, } } // NewPipelineServiceDeletePipelineVersionParamsWithHTTPClient creates a new PipelineServiceDeletePipelineVersionParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceDeletePipelineVersionParamsWithHTTPClient(client *http.Client) *PipelineServiceDeletePipelineVersionParams { - var () return &PipelineServiceDeletePipelineVersionParams{ HTTPClient: client, } } -/*PipelineServiceDeletePipelineVersionParams contains all the parameters to send to the API endpoint -for the pipeline service delete pipeline version operation typically these are written to a http.Request +/* +PipelineServiceDeletePipelineVersionParams contains all the parameters to send to the API endpoint + + for the pipeline service delete pipeline version operation. + + Typically these are written to a http.Request. */ type PipelineServiceDeletePipelineVersionParams struct { - /*PipelineID - Required input. ID of the parent pipeline. + /* PipelineID. + Required input. ID of the parent pipeline. */ PipelineID string - /*PipelineVersionID - Required input. The ID of the pipeline version to be deleted. + /* PipelineVersionID. + + Required input. The ID of the pipeline version to be deleted. */ PipelineVersionID string @@ -77,6 +78,21 @@ type PipelineServiceDeletePipelineVersionParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service delete pipeline version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceDeletePipelineVersionParams) WithDefaults() *PipelineServiceDeletePipelineVersionParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service delete pipeline version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceDeletePipelineVersionParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service delete pipeline version params func (o *PipelineServiceDeletePipelineVersionParams) WithTimeout(timeout time.Duration) *PipelineServiceDeletePipelineVersionParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_responses.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_responses.go index bc640f2e5aa..b181e3b279d 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_delete_pipeline_version_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) // PipelineServiceDeletePipelineVersionReader is a Reader for the PipelineServiceDeletePipelineVersion structure. @@ -24,14 +24,12 @@ type PipelineServiceDeletePipelineVersionReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceDeletePipelineVersionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceDeletePipelineVersionOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceDeletePipelineVersionDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceDeletePipelineVersionOK() *PipelineServiceDeletePipelineV return &PipelineServiceDeletePipelineVersionOK{} } -/*PipelineServiceDeletePipelineVersionOK handles this case with default header values. +/* +PipelineServiceDeletePipelineVersionOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceDeletePipelineVersionOK struct { Payload interface{} } +// IsSuccess returns true when this pipeline service delete pipeline version o k response has a 2xx status code +func (o *PipelineServiceDeletePipelineVersionOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service delete pipeline version o k response has a 3xx status code +func (o *PipelineServiceDeletePipelineVersionOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service delete pipeline version o k response has a 4xx status code +func (o *PipelineServiceDeletePipelineVersionOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service delete pipeline version o k response has a 5xx status code +func (o *PipelineServiceDeletePipelineVersionOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service delete pipeline version o k response a status code equal to that given +func (o *PipelineServiceDeletePipelineVersionOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service delete pipeline version o k response +func (o *PipelineServiceDeletePipelineVersionOK) Code() int { + return 200 +} + func (o *PipelineServiceDeletePipelineVersionOK) Error() string { - return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] pipelineServiceDeletePipelineVersionOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] pipelineServiceDeletePipelineVersionOK %s", 200, payload) +} + +func (o *PipelineServiceDeletePipelineVersionOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] pipelineServiceDeletePipelineVersionOK %s", 200, payload) +} + +func (o *PipelineServiceDeletePipelineVersionOK) GetPayload() interface{} { + return o.Payload } func (o *PipelineServiceDeletePipelineVersionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewPipelineServiceDeletePipelineVersionDefault(code int) *PipelineServiceDe } } -/*PipelineServiceDeletePipelineVersionDefault handles this case with default header values. +/* +PipelineServiceDeletePipelineVersionDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceDeletePipelineVersionDefault struct { _statusCode int - Payload *pipeline_model.RuntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service delete pipeline version default response has a 2xx status code +func (o *PipelineServiceDeletePipelineVersionDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service delete pipeline version default response has a 3xx status code +func (o *PipelineServiceDeletePipelineVersionDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service delete pipeline version default response has a 4xx status code +func (o *PipelineServiceDeletePipelineVersionDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service delete pipeline version default response has a 5xx status code +func (o *PipelineServiceDeletePipelineVersionDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service delete pipeline version default response a status code equal to that given +func (o *PipelineServiceDeletePipelineVersionDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service delete pipeline version default response @@ -94,12 +159,22 @@ func (o *PipelineServiceDeletePipelineVersionDefault) Code() int { } func (o *PipelineServiceDeletePipelineVersionDefault) Error() string { - return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] PipelineService_DeletePipelineVersion default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] PipelineService_DeletePipelineVersion default %s", o._statusCode, payload) +} + +func (o *PipelineServiceDeletePipelineVersionDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] PipelineService_DeletePipelineVersion default %s", o._statusCode, payload) +} + +func (o *PipelineServiceDeletePipelineVersionDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceDeletePipelineVersionDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.RuntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_parameters.go index 43f95bbec21..feb627c74aa 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_parameters.go @@ -13,64 +13,65 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceGetPipelineByNameParams creates a new PipelineServiceGetPipelineByNameParams object -// with the default values initialized. +// NewPipelineServiceGetPipelineByNameParams creates a new PipelineServiceGetPipelineByNameParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceGetPipelineByNameParams() *PipelineServiceGetPipelineByNameParams { - var () return &PipelineServiceGetPipelineByNameParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceGetPipelineByNameParamsWithTimeout creates a new PipelineServiceGetPipelineByNameParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceGetPipelineByNameParamsWithTimeout(timeout time.Duration) *PipelineServiceGetPipelineByNameParams { - var () return &PipelineServiceGetPipelineByNameParams{ - timeout: timeout, } } // NewPipelineServiceGetPipelineByNameParamsWithContext creates a new PipelineServiceGetPipelineByNameParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceGetPipelineByNameParamsWithContext(ctx context.Context) *PipelineServiceGetPipelineByNameParams { - var () return &PipelineServiceGetPipelineByNameParams{ - Context: ctx, } } // NewPipelineServiceGetPipelineByNameParamsWithHTTPClient creates a new PipelineServiceGetPipelineByNameParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceGetPipelineByNameParamsWithHTTPClient(client *http.Client) *PipelineServiceGetPipelineByNameParams { - var () return &PipelineServiceGetPipelineByNameParams{ HTTPClient: client, } } -/*PipelineServiceGetPipelineByNameParams contains all the parameters to send to the API endpoint -for the pipeline service get pipeline by name operation typically these are written to a http.Request +/* +PipelineServiceGetPipelineByNameParams contains all the parameters to send to the API endpoint + + for the pipeline service get pipeline by name operation. + + Typically these are written to a http.Request. */ type PipelineServiceGetPipelineByNameParams struct { - /*Name - Required input. Name of the pipeline to be retrieved. + /* Name. + Required input. Name of the pipeline to be retrieved. */ Name string - /*Namespace - Optional input. Namespace of the pipeline. + + /* Namespace. + + Optional input. Namespace of the pipeline. It could be empty if default namespaces needs to be used or if multi-user support is turned off. - */ Namespace *string @@ -79,6 +80,21 @@ type PipelineServiceGetPipelineByNameParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service get pipeline by name params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineByNameParams) WithDefaults() *PipelineServiceGetPipelineByNameParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service get pipeline by name params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineByNameParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service get pipeline by name params func (o *PipelineServiceGetPipelineByNameParams) WithTimeout(timeout time.Duration) *PipelineServiceGetPipelineByNameParams { o.SetTimeout(timeout) @@ -151,16 +167,17 @@ func (o *PipelineServiceGetPipelineByNameParams) WriteToRequest(r runtime.Client // query param namespace var qrNamespace string + if o.Namespace != nil { qrNamespace = *o.Namespace } qNamespace := qrNamespace if qNamespace != "" { + if err := r.SetQueryParam("namespace", qNamespace); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_responses.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_responses.go index 4c33edf2881..918ce9e13b1 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_by_name_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) // PipelineServiceGetPipelineByNameReader is a Reader for the PipelineServiceGetPipelineByName structure. @@ -24,14 +24,12 @@ type PipelineServiceGetPipelineByNameReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceGetPipelineByNameReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceGetPipelineByNameOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceGetPipelineByNameDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceGetPipelineByNameOK() *PipelineServiceGetPipelineByNameOK return &PipelineServiceGetPipelineByNameOK{} } -/*PipelineServiceGetPipelineByNameOK handles this case with default header values. +/* +PipelineServiceGetPipelineByNameOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceGetPipelineByNameOK struct { Payload *pipeline_model.V2beta1Pipeline } +// IsSuccess returns true when this pipeline service get pipeline by name o k response has a 2xx status code +func (o *PipelineServiceGetPipelineByNameOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service get pipeline by name o k response has a 3xx status code +func (o *PipelineServiceGetPipelineByNameOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service get pipeline by name o k response has a 4xx status code +func (o *PipelineServiceGetPipelineByNameOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service get pipeline by name o k response has a 5xx status code +func (o *PipelineServiceGetPipelineByNameOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service get pipeline by name o k response a status code equal to that given +func (o *PipelineServiceGetPipelineByNameOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service get pipeline by name o k response +func (o *PipelineServiceGetPipelineByNameOK) Code() int { + return 200 +} + func (o *PipelineServiceGetPipelineByNameOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/pipelines/names/{name}][%d] pipelineServiceGetPipelineByNameOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/names/{name}][%d] pipelineServiceGetPipelineByNameOK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineByNameOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/names/{name}][%d] pipelineServiceGetPipelineByNameOK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineByNameOK) GetPayload() *pipeline_model.V2beta1Pipeline { + return o.Payload } func (o *PipelineServiceGetPipelineByNameOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceGetPipelineByNameDefault(code int) *PipelineServiceGetPip } } -/*PipelineServiceGetPipelineByNameDefault handles this case with default header values. +/* +PipelineServiceGetPipelineByNameDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceGetPipelineByNameDefault struct { _statusCode int - Payload *pipeline_model.RuntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service get pipeline by name default response has a 2xx status code +func (o *PipelineServiceGetPipelineByNameDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service get pipeline by name default response has a 3xx status code +func (o *PipelineServiceGetPipelineByNameDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service get pipeline by name default response has a 4xx status code +func (o *PipelineServiceGetPipelineByNameDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service get pipeline by name default response has a 5xx status code +func (o *PipelineServiceGetPipelineByNameDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service get pipeline by name default response a status code equal to that given +func (o *PipelineServiceGetPipelineByNameDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service get pipeline by name default response @@ -96,12 +161,22 @@ func (o *PipelineServiceGetPipelineByNameDefault) Code() int { } func (o *PipelineServiceGetPipelineByNameDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/pipelines/names/{name}][%d] PipelineService_GetPipelineByName default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/names/{name}][%d] PipelineService_GetPipelineByName default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineByNameDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/names/{name}][%d] PipelineService_GetPipelineByName default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineByNameDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceGetPipelineByNameDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.RuntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_parameters.go index 17174ebac4b..b94f4121958 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceGetPipelineParams creates a new PipelineServiceGetPipelineParams object -// with the default values initialized. +// NewPipelineServiceGetPipelineParams creates a new PipelineServiceGetPipelineParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceGetPipelineParams() *PipelineServiceGetPipelineParams { - var () return &PipelineServiceGetPipelineParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceGetPipelineParamsWithTimeout creates a new PipelineServiceGetPipelineParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceGetPipelineParamsWithTimeout(timeout time.Duration) *PipelineServiceGetPipelineParams { - var () return &PipelineServiceGetPipelineParams{ - timeout: timeout, } } // NewPipelineServiceGetPipelineParamsWithContext creates a new PipelineServiceGetPipelineParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceGetPipelineParamsWithContext(ctx context.Context) *PipelineServiceGetPipelineParams { - var () return &PipelineServiceGetPipelineParams{ - Context: ctx, } } // NewPipelineServiceGetPipelineParamsWithHTTPClient creates a new PipelineServiceGetPipelineParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceGetPipelineParamsWithHTTPClient(client *http.Client) *PipelineServiceGetPipelineParams { - var () return &PipelineServiceGetPipelineParams{ HTTPClient: client, } } -/*PipelineServiceGetPipelineParams contains all the parameters to send to the API endpoint -for the pipeline service get pipeline operation typically these are written to a http.Request +/* +PipelineServiceGetPipelineParams contains all the parameters to send to the API endpoint + + for the pipeline service get pipeline operation. + + Typically these are written to a http.Request. */ type PipelineServiceGetPipelineParams struct { - /*PipelineID - Required input. The ID of the pipeline to be retrieved. + /* PipelineID. + Required input. The ID of the pipeline to be retrieved. */ PipelineID string @@ -72,6 +72,21 @@ type PipelineServiceGetPipelineParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service get pipeline params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineParams) WithDefaults() *PipelineServiceGetPipelineParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service get pipeline params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service get pipeline params func (o *PipelineServiceGetPipelineParams) WithTimeout(timeout time.Duration) *PipelineServiceGetPipelineParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_responses.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_responses.go index 71802728827..4f9a42a48ac 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) // PipelineServiceGetPipelineReader is a Reader for the PipelineServiceGetPipeline structure. @@ -24,14 +24,12 @@ type PipelineServiceGetPipelineReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceGetPipelineReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceGetPipelineOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceGetPipelineDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceGetPipelineOK() *PipelineServiceGetPipelineOK { return &PipelineServiceGetPipelineOK{} } -/*PipelineServiceGetPipelineOK handles this case with default header values. +/* +PipelineServiceGetPipelineOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceGetPipelineOK struct { Payload *pipeline_model.V2beta1Pipeline } +// IsSuccess returns true when this pipeline service get pipeline o k response has a 2xx status code +func (o *PipelineServiceGetPipelineOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service get pipeline o k response has a 3xx status code +func (o *PipelineServiceGetPipelineOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service get pipeline o k response has a 4xx status code +func (o *PipelineServiceGetPipelineOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service get pipeline o k response has a 5xx status code +func (o *PipelineServiceGetPipelineOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service get pipeline o k response a status code equal to that given +func (o *PipelineServiceGetPipelineOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service get pipeline o k response +func (o *PipelineServiceGetPipelineOK) Code() int { + return 200 +} + func (o *PipelineServiceGetPipelineOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}][%d] pipelineServiceGetPipelineOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}][%d] pipelineServiceGetPipelineOK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}][%d] pipelineServiceGetPipelineOK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineOK) GetPayload() *pipeline_model.V2beta1Pipeline { + return o.Payload } func (o *PipelineServiceGetPipelineOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceGetPipelineDefault(code int) *PipelineServiceGetPipelineD } } -/*PipelineServiceGetPipelineDefault handles this case with default header values. +/* +PipelineServiceGetPipelineDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceGetPipelineDefault struct { _statusCode int - Payload *pipeline_model.RuntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service get pipeline default response has a 2xx status code +func (o *PipelineServiceGetPipelineDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service get pipeline default response has a 3xx status code +func (o *PipelineServiceGetPipelineDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service get pipeline default response has a 4xx status code +func (o *PipelineServiceGetPipelineDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service get pipeline default response has a 5xx status code +func (o *PipelineServiceGetPipelineDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service get pipeline default response a status code equal to that given +func (o *PipelineServiceGetPipelineDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service get pipeline default response @@ -96,12 +161,22 @@ func (o *PipelineServiceGetPipelineDefault) Code() int { } func (o *PipelineServiceGetPipelineDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}][%d] PipelineService_GetPipeline default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}][%d] PipelineService_GetPipeline default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}][%d] PipelineService_GetPipeline default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceGetPipelineDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.RuntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_parameters.go index 0ad7f8636e3..997d717cba3 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_parameters.go @@ -13,62 +13,63 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewPipelineServiceGetPipelineVersionParams creates a new PipelineServiceGetPipelineVersionParams object -// with the default values initialized. +// NewPipelineServiceGetPipelineVersionParams creates a new PipelineServiceGetPipelineVersionParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceGetPipelineVersionParams() *PipelineServiceGetPipelineVersionParams { - var () return &PipelineServiceGetPipelineVersionParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceGetPipelineVersionParamsWithTimeout creates a new PipelineServiceGetPipelineVersionParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceGetPipelineVersionParamsWithTimeout(timeout time.Duration) *PipelineServiceGetPipelineVersionParams { - var () return &PipelineServiceGetPipelineVersionParams{ - timeout: timeout, } } // NewPipelineServiceGetPipelineVersionParamsWithContext creates a new PipelineServiceGetPipelineVersionParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceGetPipelineVersionParamsWithContext(ctx context.Context) *PipelineServiceGetPipelineVersionParams { - var () return &PipelineServiceGetPipelineVersionParams{ - Context: ctx, } } // NewPipelineServiceGetPipelineVersionParamsWithHTTPClient creates a new PipelineServiceGetPipelineVersionParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceGetPipelineVersionParamsWithHTTPClient(client *http.Client) *PipelineServiceGetPipelineVersionParams { - var () return &PipelineServiceGetPipelineVersionParams{ HTTPClient: client, } } -/*PipelineServiceGetPipelineVersionParams contains all the parameters to send to the API endpoint -for the pipeline service get pipeline version operation typically these are written to a http.Request +/* +PipelineServiceGetPipelineVersionParams contains all the parameters to send to the API endpoint + + for the pipeline service get pipeline version operation. + + Typically these are written to a http.Request. */ type PipelineServiceGetPipelineVersionParams struct { - /*PipelineID - Required input. ID of the parent pipeline. + /* PipelineID. + Required input. ID of the parent pipeline. */ PipelineID string - /*PipelineVersionID - Required input. ID of the pipeline version to be retrieved. + /* PipelineVersionID. + + Required input. ID of the pipeline version to be retrieved. */ PipelineVersionID string @@ -77,6 +78,21 @@ type PipelineServiceGetPipelineVersionParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service get pipeline version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineVersionParams) WithDefaults() *PipelineServiceGetPipelineVersionParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service get pipeline version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceGetPipelineVersionParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service get pipeline version params func (o *PipelineServiceGetPipelineVersionParams) WithTimeout(timeout time.Duration) *PipelineServiceGetPipelineVersionParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_responses.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_responses.go index 0e326be7a68..fc9d91e5a86 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_get_pipeline_version_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) // PipelineServiceGetPipelineVersionReader is a Reader for the PipelineServiceGetPipelineVersion structure. @@ -24,14 +24,12 @@ type PipelineServiceGetPipelineVersionReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceGetPipelineVersionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceGetPipelineVersionOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceGetPipelineVersionDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceGetPipelineVersionOK() *PipelineServiceGetPipelineVersion return &PipelineServiceGetPipelineVersionOK{} } -/*PipelineServiceGetPipelineVersionOK handles this case with default header values. +/* +PipelineServiceGetPipelineVersionOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceGetPipelineVersionOK struct { Payload *pipeline_model.V2beta1PipelineVersion } +// IsSuccess returns true when this pipeline service get pipeline version o k response has a 2xx status code +func (o *PipelineServiceGetPipelineVersionOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service get pipeline version o k response has a 3xx status code +func (o *PipelineServiceGetPipelineVersionOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service get pipeline version o k response has a 4xx status code +func (o *PipelineServiceGetPipelineVersionOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service get pipeline version o k response has a 5xx status code +func (o *PipelineServiceGetPipelineVersionOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service get pipeline version o k response a status code equal to that given +func (o *PipelineServiceGetPipelineVersionOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service get pipeline version o k response +func (o *PipelineServiceGetPipelineVersionOK) Code() int { + return 200 +} + func (o *PipelineServiceGetPipelineVersionOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] pipelineServiceGetPipelineVersionOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] pipelineServiceGetPipelineVersionOK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineVersionOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] pipelineServiceGetPipelineVersionOK %s", 200, payload) +} + +func (o *PipelineServiceGetPipelineVersionOK) GetPayload() *pipeline_model.V2beta1PipelineVersion { + return o.Payload } func (o *PipelineServiceGetPipelineVersionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceGetPipelineVersionDefault(code int) *PipelineServiceGetPi } } -/*PipelineServiceGetPipelineVersionDefault handles this case with default header values. +/* +PipelineServiceGetPipelineVersionDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceGetPipelineVersionDefault struct { _statusCode int - Payload *pipeline_model.RuntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service get pipeline version default response has a 2xx status code +func (o *PipelineServiceGetPipelineVersionDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service get pipeline version default response has a 3xx status code +func (o *PipelineServiceGetPipelineVersionDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service get pipeline version default response has a 4xx status code +func (o *PipelineServiceGetPipelineVersionDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service get pipeline version default response has a 5xx status code +func (o *PipelineServiceGetPipelineVersionDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service get pipeline version default response a status code equal to that given +func (o *PipelineServiceGetPipelineVersionDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service get pipeline version default response @@ -96,12 +161,22 @@ func (o *PipelineServiceGetPipelineVersionDefault) Code() int { } func (o *PipelineServiceGetPipelineVersionDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] PipelineService_GetPipelineVersion default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] PipelineService_GetPipelineVersion default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineVersionDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id}][%d] PipelineService_GetPipelineVersion default %s", o._statusCode, payload) +} + +func (o *PipelineServiceGetPipelineVersionDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceGetPipelineVersionDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.RuntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_parameters.go index b39941a37ca..174a17f387b 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_parameters.go @@ -13,82 +13,88 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" - - strfmt "github.com/go-openapi/strfmt" ) -// NewPipelineServiceListPipelineVersionsParams creates a new PipelineServiceListPipelineVersionsParams object -// with the default values initialized. +// NewPipelineServiceListPipelineVersionsParams creates a new PipelineServiceListPipelineVersionsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceListPipelineVersionsParams() *PipelineServiceListPipelineVersionsParams { - var () return &PipelineServiceListPipelineVersionsParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceListPipelineVersionsParamsWithTimeout creates a new PipelineServiceListPipelineVersionsParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceListPipelineVersionsParamsWithTimeout(timeout time.Duration) *PipelineServiceListPipelineVersionsParams { - var () return &PipelineServiceListPipelineVersionsParams{ - timeout: timeout, } } // NewPipelineServiceListPipelineVersionsParamsWithContext creates a new PipelineServiceListPipelineVersionsParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceListPipelineVersionsParamsWithContext(ctx context.Context) *PipelineServiceListPipelineVersionsParams { - var () return &PipelineServiceListPipelineVersionsParams{ - Context: ctx, } } // NewPipelineServiceListPipelineVersionsParamsWithHTTPClient creates a new PipelineServiceListPipelineVersionsParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceListPipelineVersionsParamsWithHTTPClient(client *http.Client) *PipelineServiceListPipelineVersionsParams { - var () return &PipelineServiceListPipelineVersionsParams{ HTTPClient: client, } } -/*PipelineServiceListPipelineVersionsParams contains all the parameters to send to the API endpoint -for the pipeline service list pipeline versions operation typically these are written to a http.Request +/* +PipelineServiceListPipelineVersionsParams contains all the parameters to send to the API endpoint + + for the pipeline service list pipeline versions operation. + + Typically these are written to a http.Request. */ type PipelineServiceListPipelineVersionsParams struct { - /*Filter - A url-encoded, JSON-serialized filter protocol buffer (see - [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). + /* Filter. + A url-encoded, JSON-serialized filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). */ Filter *string - /*PageSize - The number of pipeline versions to be listed per page. If there are more pipeline + + /* PageSize. + + The number of pipeline versions to be listed per page. If there are more pipeline versions than this number, the response message will contain a valid value in the nextPageToken field. + Format: int32 */ PageSize *int32 - /*PageToken - A page token to request the results page. + /* PageToken. + + A page token to request the results page. */ PageToken *string - /*PipelineID - Required input. ID of the parent pipeline. + /* PipelineID. + + Required input. ID of the parent pipeline. */ PipelineID string - /*SortBy - Sorting order in form of "field_name", "field_name asc" or "field_name desc". - Ascending by default. + /* SortBy. + + Sorting order in form of "field_name", "field_name asc" or "field_name desc". + Ascending by default. */ SortBy *string @@ -97,6 +103,21 @@ type PipelineServiceListPipelineVersionsParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service list pipeline versions params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceListPipelineVersionsParams) WithDefaults() *PipelineServiceListPipelineVersionsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service list pipeline versions params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceListPipelineVersionsParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service list pipeline versions params func (o *PipelineServiceListPipelineVersionsParams) WithTimeout(timeout time.Duration) *PipelineServiceListPipelineVersionsParams { o.SetTimeout(timeout) @@ -197,48 +218,51 @@ func (o *PipelineServiceListPipelineVersionsParams) WriteToRequest(r runtime.Cli // query param filter var qrFilter string + if o.Filter != nil { qrFilter = *o.Filter } qFilter := qrFilter if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { return err } } - } if o.PageSize != nil { // query param page_size var qrPageSize int32 + if o.PageSize != nil { qrPageSize = *o.PageSize } qPageSize := swag.FormatInt32(qrPageSize) if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { return err } } - } if o.PageToken != nil { // query param page_token var qrPageToken string + if o.PageToken != nil { qrPageToken = *o.PageToken } qPageToken := qrPageToken if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { return err } } - } // path param pipeline_id @@ -250,16 +274,17 @@ func (o *PipelineServiceListPipelineVersionsParams) WriteToRequest(r runtime.Cli // query param sort_by var qrSortBy string + if o.SortBy != nil { qrSortBy = *o.SortBy } qSortBy := qrSortBy if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_responses.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_responses.go index 35a59bd3344..673146a08fb 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipeline_versions_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) // PipelineServiceListPipelineVersionsReader is a Reader for the PipelineServiceListPipelineVersions structure. @@ -24,14 +24,12 @@ type PipelineServiceListPipelineVersionsReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceListPipelineVersionsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceListPipelineVersionsOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceListPipelineVersionsDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceListPipelineVersionsOK() *PipelineServiceListPipelineVers return &PipelineServiceListPipelineVersionsOK{} } -/*PipelineServiceListPipelineVersionsOK handles this case with default header values. +/* +PipelineServiceListPipelineVersionsOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceListPipelineVersionsOK struct { Payload *pipeline_model.V2beta1ListPipelineVersionsResponse } +// IsSuccess returns true when this pipeline service list pipeline versions o k response has a 2xx status code +func (o *PipelineServiceListPipelineVersionsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service list pipeline versions o k response has a 3xx status code +func (o *PipelineServiceListPipelineVersionsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service list pipeline versions o k response has a 4xx status code +func (o *PipelineServiceListPipelineVersionsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service list pipeline versions o k response has a 5xx status code +func (o *PipelineServiceListPipelineVersionsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service list pipeline versions o k response a status code equal to that given +func (o *PipelineServiceListPipelineVersionsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service list pipeline versions o k response +func (o *PipelineServiceListPipelineVersionsOK) Code() int { + return 200 +} + func (o *PipelineServiceListPipelineVersionsOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] pipelineServiceListPipelineVersionsOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] pipelineServiceListPipelineVersionsOK %s", 200, payload) +} + +func (o *PipelineServiceListPipelineVersionsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] pipelineServiceListPipelineVersionsOK %s", 200, payload) +} + +func (o *PipelineServiceListPipelineVersionsOK) GetPayload() *pipeline_model.V2beta1ListPipelineVersionsResponse { + return o.Payload } func (o *PipelineServiceListPipelineVersionsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceListPipelineVersionsDefault(code int) *PipelineServiceLis } } -/*PipelineServiceListPipelineVersionsDefault handles this case with default header values. +/* +PipelineServiceListPipelineVersionsDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceListPipelineVersionsDefault struct { _statusCode int - Payload *pipeline_model.RuntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service list pipeline versions default response has a 2xx status code +func (o *PipelineServiceListPipelineVersionsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service list pipeline versions default response has a 3xx status code +func (o *PipelineServiceListPipelineVersionsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service list pipeline versions default response has a 4xx status code +func (o *PipelineServiceListPipelineVersionsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service list pipeline versions default response has a 5xx status code +func (o *PipelineServiceListPipelineVersionsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service list pipeline versions default response a status code equal to that given +func (o *PipelineServiceListPipelineVersionsDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service list pipeline versions default response @@ -96,12 +161,22 @@ func (o *PipelineServiceListPipelineVersionsDefault) Code() int { } func (o *PipelineServiceListPipelineVersionsDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] PipelineService_ListPipelineVersions default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] PipelineService_ListPipelineVersions default %s", o._statusCode, payload) +} + +func (o *PipelineServiceListPipelineVersionsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines/{pipeline_id}/versions][%d] PipelineService_ListPipelineVersions default %s", o._statusCode, payload) +} + +func (o *PipelineServiceListPipelineVersionsDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceListPipelineVersionsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.RuntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_parameters.go index 7fcb5e89fd8..f3ef2b280cd 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_parameters.go @@ -13,82 +13,88 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" - - strfmt "github.com/go-openapi/strfmt" ) -// NewPipelineServiceListPipelinesParams creates a new PipelineServiceListPipelinesParams object -// with the default values initialized. +// NewPipelineServiceListPipelinesParams creates a new PipelineServiceListPipelinesParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewPipelineServiceListPipelinesParams() *PipelineServiceListPipelinesParams { - var () return &PipelineServiceListPipelinesParams{ - timeout: cr.DefaultTimeout, } } // NewPipelineServiceListPipelinesParamsWithTimeout creates a new PipelineServiceListPipelinesParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewPipelineServiceListPipelinesParamsWithTimeout(timeout time.Duration) *PipelineServiceListPipelinesParams { - var () return &PipelineServiceListPipelinesParams{ - timeout: timeout, } } // NewPipelineServiceListPipelinesParamsWithContext creates a new PipelineServiceListPipelinesParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewPipelineServiceListPipelinesParamsWithContext(ctx context.Context) *PipelineServiceListPipelinesParams { - var () return &PipelineServiceListPipelinesParams{ - Context: ctx, } } // NewPipelineServiceListPipelinesParamsWithHTTPClient creates a new PipelineServiceListPipelinesParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewPipelineServiceListPipelinesParamsWithHTTPClient(client *http.Client) *PipelineServiceListPipelinesParams { - var () return &PipelineServiceListPipelinesParams{ HTTPClient: client, } } -/*PipelineServiceListPipelinesParams contains all the parameters to send to the API endpoint -for the pipeline service list pipelines operation typically these are written to a http.Request +/* +PipelineServiceListPipelinesParams contains all the parameters to send to the API endpoint + + for the pipeline service list pipelines operation. + + Typically these are written to a http.Request. */ type PipelineServiceListPipelinesParams struct { - /*Filter - A url-encoded, JSON-serialized filter protocol buffer (see - [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). + /* Filter. + A url-encoded, JSON-serialized filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). */ Filter *string - /*Namespace - Optional input. Namespace for the pipelines. + /* Namespace. + + Optional input. Namespace for the pipelines. */ Namespace *string - /*PageSize - The number of pipelines to be listed per page. If there are more pipelines + + /* PageSize. + + The number of pipelines to be listed per page. If there are more pipelines than this number, the response message will contain a valid value in the nextPageToken field. + Format: int32 */ PageSize *int32 - /*PageToken - A page token to request the results page. + /* PageToken. + + A page token to request the results page. */ PageToken *string - /*SortBy - Sorting order in form of "field_name", "field_name asc" or "field_name desc". - Ascending by default. + /* SortBy. + + Sorting order in form of "field_name", "field_name asc" or "field_name desc". + Ascending by default. */ SortBy *string @@ -97,6 +103,21 @@ type PipelineServiceListPipelinesParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the pipeline service list pipelines params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceListPipelinesParams) WithDefaults() *PipelineServiceListPipelinesParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the pipeline service list pipelines params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *PipelineServiceListPipelinesParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the pipeline service list pipelines params func (o *PipelineServiceListPipelinesParams) WithTimeout(timeout time.Duration) *PipelineServiceListPipelinesParams { o.SetTimeout(timeout) @@ -197,80 +218,85 @@ func (o *PipelineServiceListPipelinesParams) WriteToRequest(r runtime.ClientRequ // query param filter var qrFilter string + if o.Filter != nil { qrFilter = *o.Filter } qFilter := qrFilter if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { return err } } - } if o.Namespace != nil { // query param namespace var qrNamespace string + if o.Namespace != nil { qrNamespace = *o.Namespace } qNamespace := qrNamespace if qNamespace != "" { + if err := r.SetQueryParam("namespace", qNamespace); err != nil { return err } } - } if o.PageSize != nil { // query param page_size var qrPageSize int32 + if o.PageSize != nil { qrPageSize = *o.PageSize } qPageSize := swag.FormatInt32(qrPageSize) if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { return err } } - } if o.PageToken != nil { // query param page_token var qrPageToken string + if o.PageToken != nil { qrPageToken = *o.PageToken } qPageToken := qrPageToken if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { return err } } - } if o.SortBy != nil { // query param sort_by var qrSortBy string + if o.SortBy != nil { qrSortBy = *o.SortBy } qSortBy := qrSortBy if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_responses.go b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_responses.go index 7c93e49b258..641e8135885 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service/pipeline_service_list_pipelines_responses.go @@ -6,14 +6,14 @@ package pipeline_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" ) // PipelineServiceListPipelinesReader is a Reader for the PipelineServiceListPipelines structure. @@ -24,14 +24,12 @@ type PipelineServiceListPipelinesReader struct { // ReadResponse reads a server response into the received o. func (o *PipelineServiceListPipelinesReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewPipelineServiceListPipelinesOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewPipelineServiceListPipelinesDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewPipelineServiceListPipelinesOK() *PipelineServiceListPipelinesOK { return &PipelineServiceListPipelinesOK{} } -/*PipelineServiceListPipelinesOK handles this case with default header values. +/* +PipelineServiceListPipelinesOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type PipelineServiceListPipelinesOK struct { Payload *pipeline_model.V2beta1ListPipelinesResponse } +// IsSuccess returns true when this pipeline service list pipelines o k response has a 2xx status code +func (o *PipelineServiceListPipelinesOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this pipeline service list pipelines o k response has a 3xx status code +func (o *PipelineServiceListPipelinesOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this pipeline service list pipelines o k response has a 4xx status code +func (o *PipelineServiceListPipelinesOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this pipeline service list pipelines o k response has a 5xx status code +func (o *PipelineServiceListPipelinesOK) IsServerError() bool { + return false +} + +// IsCode returns true when this pipeline service list pipelines o k response a status code equal to that given +func (o *PipelineServiceListPipelinesOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the pipeline service list pipelines o k response +func (o *PipelineServiceListPipelinesOK) Code() int { + return 200 +} + func (o *PipelineServiceListPipelinesOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/pipelines][%d] pipelineServiceListPipelinesOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines][%d] pipelineServiceListPipelinesOK %s", 200, payload) +} + +func (o *PipelineServiceListPipelinesOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines][%d] pipelineServiceListPipelinesOK %s", 200, payload) +} + +func (o *PipelineServiceListPipelinesOK) GetPayload() *pipeline_model.V2beta1ListPipelinesResponse { + return o.Payload } func (o *PipelineServiceListPipelinesOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewPipelineServiceListPipelinesDefault(code int) *PipelineServiceListPipeli } } -/*PipelineServiceListPipelinesDefault handles this case with default header values. +/* +PipelineServiceListPipelinesDefault describes a response with status code -1, with default header values. An unexpected error response. */ type PipelineServiceListPipelinesDefault struct { _statusCode int - Payload *pipeline_model.RuntimeError + Payload *pipeline_model.GooglerpcStatus +} + +// IsSuccess returns true when this pipeline service list pipelines default response has a 2xx status code +func (o *PipelineServiceListPipelinesDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this pipeline service list pipelines default response has a 3xx status code +func (o *PipelineServiceListPipelinesDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this pipeline service list pipelines default response has a 4xx status code +func (o *PipelineServiceListPipelinesDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this pipeline service list pipelines default response has a 5xx status code +func (o *PipelineServiceListPipelinesDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this pipeline service list pipelines default response a status code equal to that given +func (o *PipelineServiceListPipelinesDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the pipeline service list pipelines default response @@ -96,12 +161,22 @@ func (o *PipelineServiceListPipelinesDefault) Code() int { } func (o *PipelineServiceListPipelinesDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/pipelines][%d] PipelineService_ListPipelines default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines][%d] PipelineService_ListPipelines default %s", o._statusCode, payload) +} + +func (o *PipelineServiceListPipelinesDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/pipelines][%d] PipelineService_ListPipelines default %s", o._statusCode, payload) +} + +func (o *PipelineServiceListPipelinesDefault) GetPayload() *pipeline_model.GooglerpcStatus { + return o.Payload } func (o *PipelineServiceListPipelinesDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(pipeline_model.RuntimeError) + o.Payload = new(pipeline_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/pipeline_model/googlerpc_status.go b/backend/api/v2beta1/go_http_client/pipeline_model/googlerpc_status.go index 6b8a1fa849a..c693c3a254f 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_model/googlerpc_status.go +++ b/backend/api/v2beta1/go_http_client/pipeline_model/googlerpc_status.go @@ -6,11 +6,11 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -21,6 +21,7 @@ import ( // // You can find out more about this error model and how to work with it in the // [API Design Guide](https://cloud.google.com/apis/design/errors). +// // swagger:model googlerpcStatus type GooglerpcStatus struct { @@ -52,7 +53,6 @@ func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { } func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { - if swag.IsZero(m.Details) { // not required return nil } @@ -66,6 +66,47 @@ func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { if err := m.Details[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/pipeline_model/protobuf_any.go b/backend/api/v2beta1/go_http_client/pipeline_model/protobuf_any.go index fb51adb0990..fba86c1d06b 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_model/protobuf_any.go +++ b/backend/api/v2beta1/go_http_client/pipeline_model/protobuf_any.go @@ -6,9 +6,10 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v2beta1/go_http_client/pipeline_model/protobuf_null_value.go b/backend/api/v2beta1/go_http_client/pipeline_model/protobuf_null_value.go index b1a32585701..82e1e427a2d 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_model/protobuf_null_value.go +++ b/backend/api/v2beta1/go_http_client/pipeline_model/protobuf_null_value.go @@ -6,23 +6,33 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // ProtobufNullValue `NullValue` is a singleton enumeration to represent the null value for the // `Value` type union. // -// The JSON representation for `NullValue` is JSON `null`. +// The JSON representation for `NullValue` is JSON `null`. +// +// - NULL_VALUE: Null value. // -// - NULL_VALUE: Null value. // swagger:model protobufNullValue type ProtobufNullValue string +func NewProtobufNullValue(value ProtobufNullValue) *ProtobufNullValue { + return &value +} + +// Pointer returns a pointer to a freshly-allocated ProtobufNullValue. +func (m ProtobufNullValue) Pointer() *ProtobufNullValue { + return &m +} + const ( // ProtobufNullValueNULLVALUE captures enum value "NULL_VALUE" @@ -43,7 +53,7 @@ func init() { } func (m ProtobufNullValue) validateProtobufNullValueEnum(path, location string, value ProtobufNullValue) error { - if err := validate.Enum(path, location, value, protobufNullValueEnum); err != nil { + if err := validate.EnumCase(path, location, value, protobufNullValueEnum, true); err != nil { return err } return nil @@ -63,3 +73,8 @@ func (m ProtobufNullValue) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this protobuf null value based on context it is used +func (m ProtobufNullValue) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/pipeline_model/runtime_error.go b/backend/api/v2beta1/go_http_client/pipeline_model/runtime_error.go deleted file mode 100644 index b622a5d87fe..00000000000 --- a/backend/api/v2beta1/go_http_client/pipeline_model/runtime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package pipeline_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// RuntimeError runtime error -// swagger:model runtimeError -type RuntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this runtime error -func (m *RuntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *RuntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *RuntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *RuntimeError) UnmarshalBinary(b []byte) error { - var res RuntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_create_pipeline_and_version_request.go b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_create_pipeline_and_version_request.go index e17f36fcc27..154669009f9 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_create_pipeline_and_version_request.go +++ b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_create_pipeline_and_version_request.go @@ -6,13 +6,15 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1CreatePipelineAndVersionRequest v2beta1 create pipeline and version request +// // swagger:model v2beta1CreatePipelineAndVersionRequest type V2beta1CreatePipelineAndVersionRequest struct { @@ -43,7 +45,6 @@ func (m *V2beta1CreatePipelineAndVersionRequest) Validate(formats strfmt.Registr } func (m *V2beta1CreatePipelineAndVersionRequest) validatePipeline(formats strfmt.Registry) error { - if swag.IsZero(m.Pipeline) { // not required return nil } @@ -52,6 +53,8 @@ func (m *V2beta1CreatePipelineAndVersionRequest) validatePipeline(formats strfmt if err := m.Pipeline.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("pipeline") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline") } return err } @@ -61,7 +64,6 @@ func (m *V2beta1CreatePipelineAndVersionRequest) validatePipeline(formats strfmt } func (m *V2beta1CreatePipelineAndVersionRequest) validatePipelineVersion(formats strfmt.Registry) error { - if swag.IsZero(m.PipelineVersion) { // not required return nil } @@ -70,6 +72,68 @@ func (m *V2beta1CreatePipelineAndVersionRequest) validatePipelineVersion(formats if err := m.PipelineVersion.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("pipeline_version") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_version") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 create pipeline and version request based on the context it is used +func (m *V2beta1CreatePipelineAndVersionRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidatePipeline(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidatePipelineVersion(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreatePipelineAndVersionRequest) contextValidatePipeline(ctx context.Context, formats strfmt.Registry) error { + + if m.Pipeline != nil { + + if swag.IsZero(m.Pipeline) { // not required + return nil + } + + if err := m.Pipeline.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipeline") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline") + } + return err + } + } + + return nil +} + +func (m *V2beta1CreatePipelineAndVersionRequest) contextValidatePipelineVersion(ctx context.Context, formats strfmt.Registry) error { + + if m.PipelineVersion != nil { + + if swag.IsZero(m.PipelineVersion) { // not required + return nil + } + + if err := m.PipelineVersion.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipeline_version") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_version") } return err } diff --git a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_list_pipeline_versions_response.go b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_list_pipeline_versions_response.go index 8765950ca3f..0692a13bf10 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_list_pipeline_versions_response.go +++ b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_list_pipeline_versions_response.go @@ -6,15 +6,16 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1ListPipelineVersionsResponse v2beta1 list pipeline versions response +// // swagger:model v2beta1ListPipelineVersionsResponse type V2beta1ListPipelineVersionsResponse struct { @@ -43,7 +44,6 @@ func (m *V2beta1ListPipelineVersionsResponse) Validate(formats strfmt.Registry) } func (m *V2beta1ListPipelineVersionsResponse) validatePipelineVersions(formats strfmt.Registry) error { - if swag.IsZero(m.PipelineVersions) { // not required return nil } @@ -57,6 +57,47 @@ func (m *V2beta1ListPipelineVersionsResponse) validatePipelineVersions(formats s if err := m.PipelineVersions[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("pipeline_versions" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_versions" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 list pipeline versions response based on the context it is used +func (m *V2beta1ListPipelineVersionsResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidatePipelineVersions(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListPipelineVersionsResponse) contextValidatePipelineVersions(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.PipelineVersions); i++ { + + if m.PipelineVersions[i] != nil { + + if swag.IsZero(m.PipelineVersions[i]) { // not required + return nil + } + + if err := m.PipelineVersions[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipeline_versions" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_versions" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_list_pipelines_response.go b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_list_pipelines_response.go index e3f0013e1ee..f279082cb3e 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_list_pipelines_response.go +++ b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_list_pipelines_response.go @@ -6,15 +6,16 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1ListPipelinesResponse v2beta1 list pipelines response +// // swagger:model v2beta1ListPipelinesResponse type V2beta1ListPipelinesResponse struct { @@ -44,7 +45,6 @@ func (m *V2beta1ListPipelinesResponse) Validate(formats strfmt.Registry) error { } func (m *V2beta1ListPipelinesResponse) validatePipelines(formats strfmt.Registry) error { - if swag.IsZero(m.Pipelines) { // not required return nil } @@ -58,6 +58,47 @@ func (m *V2beta1ListPipelinesResponse) validatePipelines(formats strfmt.Registry if err := m.Pipelines[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("pipelines" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipelines" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 list pipelines response based on the context it is used +func (m *V2beta1ListPipelinesResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidatePipelines(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListPipelinesResponse) contextValidatePipelines(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Pipelines); i++ { + + if m.Pipelines[i] != nil { + + if swag.IsZero(m.Pipelines[i]) { // not required + return nil + } + + if err := m.Pipelines[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipelines" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipelines" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_pipeline.go b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_pipeline.go index 93a299e2bf7..f29b1c6307e 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_pipeline.go +++ b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_pipeline.go @@ -6,14 +6,16 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1Pipeline v2beta1 pipeline +// // swagger:model v2beta1Pipeline type V2beta1Pipeline struct { @@ -63,7 +65,6 @@ func (m *V2beta1Pipeline) Validate(formats strfmt.Registry) error { } func (m *V2beta1Pipeline) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -76,7 +77,6 @@ func (m *V2beta1Pipeline) validateCreatedAt(formats strfmt.Registry) error { } func (m *V2beta1Pipeline) validateError(formats strfmt.Registry) error { - if swag.IsZero(m.Error) { // not required return nil } @@ -85,6 +85,43 @@ func (m *V2beta1Pipeline) validateError(formats strfmt.Registry) error { if err := m.Error.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 pipeline based on the context it is used +func (m *V2beta1Pipeline) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateError(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1Pipeline) contextValidateError(ctx context.Context, formats strfmt.Registry) error { + + if m.Error != nil { + + if swag.IsZero(m.Error) { // not required + return nil + } + + if err := m.Error.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") } return err } diff --git a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_pipeline_version.go b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_pipeline_version.go index 0e323131a38..24508b9d004 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_pipeline_version.go +++ b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_pipeline_version.go @@ -6,14 +6,16 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1PipelineVersion v2beta1 pipeline version +// // swagger:model v2beta1PipelineVersion type V2beta1PipelineVersion struct { @@ -84,7 +86,6 @@ func (m *V2beta1PipelineVersion) Validate(formats strfmt.Registry) error { } func (m *V2beta1PipelineVersion) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -97,7 +98,6 @@ func (m *V2beta1PipelineVersion) validateCreatedAt(formats strfmt.Registry) erro } func (m *V2beta1PipelineVersion) validateError(formats strfmt.Registry) error { - if swag.IsZero(m.Error) { // not required return nil } @@ -106,6 +106,8 @@ func (m *V2beta1PipelineVersion) validateError(formats strfmt.Registry) error { if err := m.Error.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") } return err } @@ -115,7 +117,6 @@ func (m *V2beta1PipelineVersion) validateError(formats strfmt.Registry) error { } func (m *V2beta1PipelineVersion) validatePackageURL(formats strfmt.Registry) error { - if swag.IsZero(m.PackageURL) { // not required return nil } @@ -124,6 +125,68 @@ func (m *V2beta1PipelineVersion) validatePackageURL(formats strfmt.Registry) err if err := m.PackageURL.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("package_url") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("package_url") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 pipeline version based on the context it is used +func (m *V2beta1PipelineVersion) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateError(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidatePackageURL(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1PipelineVersion) contextValidateError(ctx context.Context, formats strfmt.Registry) error { + + if m.Error != nil { + + if swag.IsZero(m.Error) { // not required + return nil + } + + if err := m.Error.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") + } + return err + } + } + + return nil +} + +func (m *V2beta1PipelineVersion) contextValidatePackageURL(ctx context.Context, formats strfmt.Registry) error { + + if m.PackageURL != nil { + + if swag.IsZero(m.PackageURL) { // not required + return nil + } + + if err := m.PackageURL.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("package_url") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("package_url") } return err } diff --git a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_url.go b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_url.go index 3b167fedbde..cb4c267f6aa 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_url.go +++ b/backend/api/v2beta1/go_http_client/pipeline_model/v2beta1_url.go @@ -6,12 +6,14 @@ package pipeline_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1URL v2beta1 Url +// // swagger:model v2beta1Url type V2beta1URL struct { @@ -24,6 +26,11 @@ func (m *V2beta1URL) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this v2beta1 Url based on context it is used +func (m *V2beta1URL) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1URL) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_client.go b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_client.go index 32e48929fc1..5d8d7829576 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_client.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_client.go @@ -8,8 +8,7 @@ package pipeline_upload_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *PipelineUp cli := new(PipelineUpload) cli.Transport = transport - cli.PipelineUploadService = pipeline_upload_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // PipelineUpload is a client for pipeline upload type PipelineUpload struct { - PipelineUploadService *pipeline_upload_service.Client + PipelineUploadService pipeline_upload_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type PipelineUpload struct { // SetTransport changes the transport on the client and all its subresources func (c *PipelineUpload) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.PipelineUploadService.SetTransport(transport) - } diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go index 0e86bf15295..5fa7ead4322 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go @@ -7,15 +7,40 @@ package pipeline_upload_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new pipeline upload service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new pipeline upload service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new pipeline upload service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for pipeline upload service API */ @@ -24,16 +49,51 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// This client is generated with a few options you might find useful for your swagger spec. +// +// Feel free to add you own set of options. + +// WithContentType allows the client to force the Content-Type header +// to negotiate a specific Consumer from the server. +// +// You may use this option to set arbitrary extensions to your MIME media type. +func WithContentType(mime string) ClientOption { + return func(r *runtime.ClientOperation) { + r.ConsumesMediaTypes = []string{mime} + } +} + +// WithContentTypeApplicationJSON sets the Content-Type header to "application/json". +func WithContentTypeApplicationJSON(r *runtime.ClientOperation) { + r.ConsumesMediaTypes = []string{"application/json"} +} + +// WithContentTypeMultipartFormData sets the Content-Type header to "multipart/form-data". +func WithContentTypeMultipartFormData(r *runtime.ClientOperation) { + r.ConsumesMediaTypes = []string{"multipart/form-data"} +} + +// ClientService is the interface for Client methods +type ClientService interface { + UploadPipeline(params *UploadPipelineParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UploadPipelineOK, error) + + UploadPipelineVersion(params *UploadPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UploadPipelineVersionOK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* UploadPipeline upload pipeline API */ -func (a *Client) UploadPipeline(params *UploadPipelineParams, authInfo runtime.ClientAuthInfoWriter) (*UploadPipelineOK, error) { +func (a *Client) UploadPipeline(params *UploadPipelineParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UploadPipelineOK, error) { // TODO: Validate the params before sending if params == nil { params = NewUploadPipelineParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "UploadPipeline", Method: "POST", PathPattern: "/apis/v2beta1/pipelines/upload", @@ -45,24 +105,33 @@ func (a *Client) UploadPipeline(params *UploadPipelineParams, authInfo runtime.C AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*UploadPipelineOK), nil - + success, ok := result.(*UploadPipelineOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*UploadPipelineDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* UploadPipelineVersion upload pipeline version API */ -func (a *Client) UploadPipelineVersion(params *UploadPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*UploadPipelineVersionOK, error) { +func (a *Client) UploadPipelineVersion(params *UploadPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UploadPipelineVersionOK, error) { // TODO: Validate the params before sending if params == nil { params = NewUploadPipelineVersionParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "UploadPipelineVersion", Method: "POST", PathPattern: "/apis/v2beta1/pipelines/upload_version", @@ -74,12 +143,22 @@ func (a *Client) UploadPipelineVersion(params *UploadPipelineVersionParams, auth AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*UploadPipelineVersionOK), nil - + success, ok := result.(*UploadPipelineVersionOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*UploadPipelineVersionDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go index 8973d65a536..4bd2e970768 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go @@ -13,65 +13,69 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewUploadPipelineParams creates a new UploadPipelineParams object -// with the default values initialized. +// NewUploadPipelineParams creates a new UploadPipelineParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewUploadPipelineParams() *UploadPipelineParams { - var () return &UploadPipelineParams{ - timeout: cr.DefaultTimeout, } } // NewUploadPipelineParamsWithTimeout creates a new UploadPipelineParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewUploadPipelineParamsWithTimeout(timeout time.Duration) *UploadPipelineParams { - var () return &UploadPipelineParams{ - timeout: timeout, } } // NewUploadPipelineParamsWithContext creates a new UploadPipelineParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewUploadPipelineParamsWithContext(ctx context.Context) *UploadPipelineParams { - var () return &UploadPipelineParams{ - Context: ctx, } } // NewUploadPipelineParamsWithHTTPClient creates a new UploadPipelineParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewUploadPipelineParamsWithHTTPClient(client *http.Client) *UploadPipelineParams { - var () return &UploadPipelineParams{ HTTPClient: client, } } -/*UploadPipelineParams contains all the parameters to send to the API endpoint -for the upload pipeline operation typically these are written to a http.Request +/* +UploadPipelineParams contains all the parameters to send to the API endpoint + + for the upload pipeline operation. + + Typically these are written to a http.Request. */ type UploadPipelineParams struct { - /*Description*/ + // Description. Description *string - /*DisplayName*/ + + // DisplayName. DisplayName *string - /*Name*/ + + // Name. Name *string - /*Namespace*/ + + // Namespace. Namespace *string - /*Uploadfile - The pipeline to upload. Maximum size of 32MB is supported. + /* Uploadfile. + + The pipeline to upload. Maximum size of 32MB is supported. */ Uploadfile runtime.NamedReadCloser @@ -80,6 +84,21 @@ type UploadPipelineParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the upload pipeline params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UploadPipelineParams) WithDefaults() *UploadPipelineParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the upload pipeline params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UploadPipelineParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the upload pipeline params func (o *UploadPipelineParams) WithTimeout(timeout time.Duration) *UploadPipelineParams { o.SetTimeout(timeout) @@ -180,66 +199,69 @@ func (o *UploadPipelineParams) WriteToRequest(r runtime.ClientRequest, reg strfm // query param description var qrDescription string + if o.Description != nil { qrDescription = *o.Description } qDescription := qrDescription if qDescription != "" { + if err := r.SetQueryParam("description", qDescription); err != nil { return err } } - } if o.DisplayName != nil { // query param display_name var qrDisplayName string + if o.DisplayName != nil { qrDisplayName = *o.DisplayName } qDisplayName := qrDisplayName if qDisplayName != "" { + if err := r.SetQueryParam("display_name", qDisplayName); err != nil { return err } } - } if o.Name != nil { // query param name var qrName string + if o.Name != nil { qrName = *o.Name } qName := qrName if qName != "" { + if err := r.SetQueryParam("name", qName); err != nil { return err } } - } if o.Namespace != nil { // query param namespace var qrNamespace string + if o.Namespace != nil { qrNamespace = *o.Namespace } qNamespace := qrNamespace if qNamespace != "" { + if err := r.SetQueryParam("namespace", qNamespace); err != nil { return err } } - } - // form file param uploadfile if err := r.SetFileParam("uploadfile", o.Uploadfile); err != nil { return err diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go index beccd61bfe4..3f5479cf0dc 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go @@ -6,14 +6,14 @@ package pipeline_upload_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_upload_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" ) // UploadPipelineReader is a Reader for the UploadPipeline structure. @@ -24,14 +24,12 @@ type UploadPipelineReader struct { // ReadResponse reads a server response into the received o. func (o *UploadPipelineReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewUploadPipelineOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewUploadPipelineDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewUploadPipelineOK() *UploadPipelineOK { return &UploadPipelineOK{} } -/*UploadPipelineOK handles this case with default header values. +/* +UploadPipelineOK describes a response with status code 200, with default header values. UploadPipelineOK upload pipeline o k */ @@ -57,8 +56,48 @@ type UploadPipelineOK struct { Payload *pipeline_upload_model.V2beta1Pipeline } +// IsSuccess returns true when this upload pipeline o k response has a 2xx status code +func (o *UploadPipelineOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this upload pipeline o k response has a 3xx status code +func (o *UploadPipelineOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this upload pipeline o k response has a 4xx status code +func (o *UploadPipelineOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this upload pipeline o k response has a 5xx status code +func (o *UploadPipelineOK) IsServerError() bool { + return false +} + +// IsCode returns true when this upload pipeline o k response a status code equal to that given +func (o *UploadPipelineOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the upload pipeline o k response +func (o *UploadPipelineOK) Code() int { + return 200 +} + func (o *UploadPipelineOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload][%d] uploadPipelineOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload][%d] uploadPipelineOK %s", 200, payload) +} + +func (o *UploadPipelineOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload][%d] uploadPipelineOK %s", 200, payload) +} + +func (o *UploadPipelineOK) GetPayload() *pipeline_upload_model.V2beta1Pipeline { + return o.Payload } func (o *UploadPipelineOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,7 +119,8 @@ func NewUploadPipelineDefault(code int) *UploadPipelineDefault { } } -/*UploadPipelineDefault handles this case with default header values. +/* +UploadPipelineDefault describes a response with status code -1, with default header values. UploadPipelineDefault upload pipeline default */ @@ -90,13 +130,48 @@ type UploadPipelineDefault struct { Payload *pipeline_upload_model.GooglerpcStatus } +// IsSuccess returns true when this upload pipeline default response has a 2xx status code +func (o *UploadPipelineDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this upload pipeline default response has a 3xx status code +func (o *UploadPipelineDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this upload pipeline default response has a 4xx status code +func (o *UploadPipelineDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this upload pipeline default response has a 5xx status code +func (o *UploadPipelineDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this upload pipeline default response a status code equal to that given +func (o *UploadPipelineDefault) IsCode(code int) bool { + return o._statusCode == code +} + // Code gets the status code for the upload pipeline default response func (o *UploadPipelineDefault) Code() int { return o._statusCode } func (o *UploadPipelineDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload][%d] UploadPipeline default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload][%d] UploadPipeline default %s", o._statusCode, payload) +} + +func (o *UploadPipelineDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload][%d] UploadPipeline default %s", o._statusCode, payload) +} + +func (o *UploadPipelineDefault) GetPayload() *pipeline_upload_model.GooglerpcStatus { + return o.Payload } func (o *UploadPipelineDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go index 22172771f1e..74a82210c7b 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go @@ -13,65 +13,69 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewUploadPipelineVersionParams creates a new UploadPipelineVersionParams object -// with the default values initialized. +// NewUploadPipelineVersionParams creates a new UploadPipelineVersionParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewUploadPipelineVersionParams() *UploadPipelineVersionParams { - var () return &UploadPipelineVersionParams{ - timeout: cr.DefaultTimeout, } } // NewUploadPipelineVersionParamsWithTimeout creates a new UploadPipelineVersionParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewUploadPipelineVersionParamsWithTimeout(timeout time.Duration) *UploadPipelineVersionParams { - var () return &UploadPipelineVersionParams{ - timeout: timeout, } } // NewUploadPipelineVersionParamsWithContext creates a new UploadPipelineVersionParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewUploadPipelineVersionParamsWithContext(ctx context.Context) *UploadPipelineVersionParams { - var () return &UploadPipelineVersionParams{ - Context: ctx, } } // NewUploadPipelineVersionParamsWithHTTPClient creates a new UploadPipelineVersionParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewUploadPipelineVersionParamsWithHTTPClient(client *http.Client) *UploadPipelineVersionParams { - var () return &UploadPipelineVersionParams{ HTTPClient: client, } } -/*UploadPipelineVersionParams contains all the parameters to send to the API endpoint -for the upload pipeline version operation typically these are written to a http.Request +/* +UploadPipelineVersionParams contains all the parameters to send to the API endpoint + + for the upload pipeline version operation. + + Typically these are written to a http.Request. */ type UploadPipelineVersionParams struct { - /*Description*/ + // Description. Description *string - /*DisplayName*/ + + // DisplayName. DisplayName *string - /*Name*/ + + // Name. Name *string - /*Pipelineid*/ + + // Pipelineid. Pipelineid *string - /*Uploadfile - The pipeline to upload. Maximum size of 32MB is supported. + /* Uploadfile. + + The pipeline to upload. Maximum size of 32MB is supported. */ Uploadfile runtime.NamedReadCloser @@ -80,6 +84,21 @@ type UploadPipelineVersionParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the upload pipeline version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UploadPipelineVersionParams) WithDefaults() *UploadPipelineVersionParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the upload pipeline version params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UploadPipelineVersionParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the upload pipeline version params func (o *UploadPipelineVersionParams) WithTimeout(timeout time.Duration) *UploadPipelineVersionParams { o.SetTimeout(timeout) @@ -180,66 +199,69 @@ func (o *UploadPipelineVersionParams) WriteToRequest(r runtime.ClientRequest, re // query param description var qrDescription string + if o.Description != nil { qrDescription = *o.Description } qDescription := qrDescription if qDescription != "" { + if err := r.SetQueryParam("description", qDescription); err != nil { return err } } - } if o.DisplayName != nil { // query param display_name var qrDisplayName string + if o.DisplayName != nil { qrDisplayName = *o.DisplayName } qDisplayName := qrDisplayName if qDisplayName != "" { + if err := r.SetQueryParam("display_name", qDisplayName); err != nil { return err } } - } if o.Name != nil { // query param name var qrName string + if o.Name != nil { qrName = *o.Name } qName := qrName if qName != "" { + if err := r.SetQueryParam("name", qName); err != nil { return err } } - } if o.Pipelineid != nil { // query param pipelineid var qrPipelineid string + if o.Pipelineid != nil { qrPipelineid = *o.Pipelineid } qPipelineid := qrPipelineid if qPipelineid != "" { + if err := r.SetQueryParam("pipelineid", qPipelineid); err != nil { return err } } - } - // form file param uploadfile if err := r.SetFileParam("uploadfile", o.Uploadfile); err != nil { return err diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go index d7364ac772a..099d8577e92 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go @@ -6,14 +6,14 @@ package pipeline_upload_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - pipeline_upload_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" ) // UploadPipelineVersionReader is a Reader for the UploadPipelineVersion structure. @@ -24,14 +24,12 @@ type UploadPipelineVersionReader struct { // ReadResponse reads a server response into the received o. func (o *UploadPipelineVersionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewUploadPipelineVersionOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewUploadPipelineVersionDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewUploadPipelineVersionOK() *UploadPipelineVersionOK { return &UploadPipelineVersionOK{} } -/*UploadPipelineVersionOK handles this case with default header values. +/* +UploadPipelineVersionOK describes a response with status code 200, with default header values. UploadPipelineVersionOK upload pipeline version o k */ @@ -57,8 +56,48 @@ type UploadPipelineVersionOK struct { Payload *pipeline_upload_model.V2beta1PipelineVersion } +// IsSuccess returns true when this upload pipeline version o k response has a 2xx status code +func (o *UploadPipelineVersionOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this upload pipeline version o k response has a 3xx status code +func (o *UploadPipelineVersionOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this upload pipeline version o k response has a 4xx status code +func (o *UploadPipelineVersionOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this upload pipeline version o k response has a 5xx status code +func (o *UploadPipelineVersionOK) IsServerError() bool { + return false +} + +// IsCode returns true when this upload pipeline version o k response a status code equal to that given +func (o *UploadPipelineVersionOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the upload pipeline version o k response +func (o *UploadPipelineVersionOK) Code() int { + return 200 +} + func (o *UploadPipelineVersionOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload_version][%d] uploadPipelineVersionOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload_version][%d] uploadPipelineVersionOK %s", 200, payload) +} + +func (o *UploadPipelineVersionOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload_version][%d] uploadPipelineVersionOK %s", 200, payload) +} + +func (o *UploadPipelineVersionOK) GetPayload() *pipeline_upload_model.V2beta1PipelineVersion { + return o.Payload } func (o *UploadPipelineVersionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,7 +119,8 @@ func NewUploadPipelineVersionDefault(code int) *UploadPipelineVersionDefault { } } -/*UploadPipelineVersionDefault handles this case with default header values. +/* +UploadPipelineVersionDefault describes a response with status code -1, with default header values. UploadPipelineVersionDefault upload pipeline version default */ @@ -90,13 +130,48 @@ type UploadPipelineVersionDefault struct { Payload *pipeline_upload_model.GooglerpcStatus } +// IsSuccess returns true when this upload pipeline version default response has a 2xx status code +func (o *UploadPipelineVersionDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this upload pipeline version default response has a 3xx status code +func (o *UploadPipelineVersionDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this upload pipeline version default response has a 4xx status code +func (o *UploadPipelineVersionDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this upload pipeline version default response has a 5xx status code +func (o *UploadPipelineVersionDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this upload pipeline version default response a status code equal to that given +func (o *UploadPipelineVersionDefault) IsCode(code int) bool { + return o._statusCode == code +} + // Code gets the status code for the upload pipeline version default response func (o *UploadPipelineVersionDefault) Code() int { return o._statusCode } func (o *UploadPipelineVersionDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload_version][%d] UploadPipelineVersion default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload_version][%d] UploadPipelineVersion default %s", o._statusCode, payload) +} + +func (o *UploadPipelineVersionDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/pipelines/upload_version][%d] UploadPipelineVersion default %s", o._statusCode, payload) +} + +func (o *UploadPipelineVersionDefault) GetPayload() *pipeline_upload_model.GooglerpcStatus { + return o.Payload } func (o *UploadPipelineVersionDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_model/googlerpc_status.go b/backend/api/v2beta1/go_http_client/pipeline_upload_model/googlerpc_status.go index 15fca7b664b..58d7931ed35 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_model/googlerpc_status.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_model/googlerpc_status.go @@ -6,11 +6,11 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -21,6 +21,7 @@ import ( // // You can find out more about this error model and how to work with it in the // [API Design Guide](https://cloud.google.com/apis/design/errors). +// // swagger:model googlerpcStatus type GooglerpcStatus struct { @@ -52,7 +53,6 @@ func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { } func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { - if swag.IsZero(m.Details) { // not required return nil } @@ -66,6 +66,47 @@ func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { if err := m.Details[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_model/protobuf_any.go b/backend/api/v2beta1/go_http_client/pipeline_upload_model/protobuf_any.go index 80a4d4cd71f..3031bdebdde 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_model/protobuf_any.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_model/protobuf_any.go @@ -6,9 +6,9 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,42 +20,42 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := ptypes.MarshalAny(foo) -// ... -// foo := &pb.Foo{} -// if err := ptypes.UnmarshalAny(any, foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := ptypes.MarshalAny(foo) +// ... +// foo := &pb.Foo{} +// if err := ptypes.UnmarshalAny(any, foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -63,34 +63,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON // ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -129,26 +129,11 @@ type ProtobufAny struct { // Validate validates this protobuf any func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateValue(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { - - if swag.IsZero(m.Value) { // not required - return nil - } - - // Format "byte" (base64 string) is already validated when unmarshalled - +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_pipeline.go b/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_pipeline.go index 10e115525bb..34b18903e2b 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_pipeline.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_pipeline.go @@ -6,14 +6,16 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1Pipeline v2beta1 pipeline +// // swagger:model v2beta1Pipeline type V2beta1Pipeline struct { @@ -63,7 +65,6 @@ func (m *V2beta1Pipeline) Validate(formats strfmt.Registry) error { } func (m *V2beta1Pipeline) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -76,7 +77,6 @@ func (m *V2beta1Pipeline) validateCreatedAt(formats strfmt.Registry) error { } func (m *V2beta1Pipeline) validateError(formats strfmt.Registry) error { - if swag.IsZero(m.Error) { // not required return nil } @@ -85,6 +85,43 @@ func (m *V2beta1Pipeline) validateError(formats strfmt.Registry) error { if err := m.Error.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 pipeline based on the context it is used +func (m *V2beta1Pipeline) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateError(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1Pipeline) contextValidateError(ctx context.Context, formats strfmt.Registry) error { + + if m.Error != nil { + + if swag.IsZero(m.Error) { // not required + return nil + } + + if err := m.Error.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") } return err } diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_pipeline_version.go b/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_pipeline_version.go index 979104ca072..a9704ebba12 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_pipeline_version.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_pipeline_version.go @@ -6,14 +6,16 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1PipelineVersion v2beta1 pipeline version +// // swagger:model v2beta1PipelineVersion type V2beta1PipelineVersion struct { @@ -80,7 +82,6 @@ func (m *V2beta1PipelineVersion) Validate(formats strfmt.Registry) error { } func (m *V2beta1PipelineVersion) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -93,7 +94,6 @@ func (m *V2beta1PipelineVersion) validateCreatedAt(formats strfmt.Registry) erro } func (m *V2beta1PipelineVersion) validateError(formats strfmt.Registry) error { - if swag.IsZero(m.Error) { // not required return nil } @@ -102,6 +102,8 @@ func (m *V2beta1PipelineVersion) validateError(formats strfmt.Registry) error { if err := m.Error.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") } return err } @@ -111,7 +113,6 @@ func (m *V2beta1PipelineVersion) validateError(formats strfmt.Registry) error { } func (m *V2beta1PipelineVersion) validatePackageURL(formats strfmt.Registry) error { - if swag.IsZero(m.PackageURL) { // not required return nil } @@ -120,6 +121,68 @@ func (m *V2beta1PipelineVersion) validatePackageURL(formats strfmt.Registry) err if err := m.PackageURL.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("package_url") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("package_url") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 pipeline version based on the context it is used +func (m *V2beta1PipelineVersion) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateError(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidatePackageURL(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1PipelineVersion) contextValidateError(ctx context.Context, formats strfmt.Registry) error { + + if m.Error != nil { + + if swag.IsZero(m.Error) { // not required + return nil + } + + if err := m.Error.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") + } + return err + } + } + + return nil +} + +func (m *V2beta1PipelineVersion) contextValidatePackageURL(ctx context.Context, formats strfmt.Registry) error { + + if m.PackageURL != nil { + + if swag.IsZero(m.PackageURL) { // not required + return nil + } + + if err := m.PackageURL.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("package_url") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("package_url") } return err } diff --git a/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_url.go b/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_url.go index 0a8ea1c8758..f9f273499fd 100644 --- a/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_url.go +++ b/backend/api/v2beta1/go_http_client/pipeline_upload_model/v2beta1_url.go @@ -6,12 +6,14 @@ package pipeline_upload_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1URL v2beta1 Url +// // swagger:model v2beta1Url type V2beta1URL struct { @@ -24,6 +26,11 @@ func (m *V2beta1URL) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this v2beta1 Url based on context it is used +func (m *V2beta1URL) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1URL) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_client.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_client.go index 8eea9a41bd2..1498270d778 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_client.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_client.go @@ -8,8 +8,7 @@ package recurring_run_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *RecurringR cli := new(RecurringRun) cli.Transport = transport - cli.RecurringRunService = recurring_run_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // RecurringRun is a client for recurring run type RecurringRun struct { - RecurringRunService *recurring_run_service.Client + RecurringRunService recurring_run_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type RecurringRun struct { // SetTransport changes the transport on the client and all its subresources func (c *RecurringRun) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.RecurringRunService.SetTransport(transport) - } diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_client.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_client.go index 0af258f1b63..286ff12f685 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_client.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_client.go @@ -7,15 +7,40 @@ package recurring_run_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new recurring run service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new recurring run service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new recurring run service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for recurring run service API */ @@ -24,16 +49,35 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + RecurringRunServiceCreateRecurringRun(params *RecurringRunServiceCreateRecurringRunParams, opts ...ClientOption) (*RecurringRunServiceCreateRecurringRunOK, error) + + RecurringRunServiceDeleteRecurringRun(params *RecurringRunServiceDeleteRecurringRunParams, opts ...ClientOption) (*RecurringRunServiceDeleteRecurringRunOK, error) + + RecurringRunServiceDisableRecurringRun(params *RecurringRunServiceDisableRecurringRunParams, opts ...ClientOption) (*RecurringRunServiceDisableRecurringRunOK, error) + + RecurringRunServiceEnableRecurringRun(params *RecurringRunServiceEnableRecurringRunParams, opts ...ClientOption) (*RecurringRunServiceEnableRecurringRunOK, error) + + RecurringRunServiceGetRecurringRun(params *RecurringRunServiceGetRecurringRunParams, opts ...ClientOption) (*RecurringRunServiceGetRecurringRunOK, error) + + RecurringRunServiceListRecurringRuns(params *RecurringRunServiceListRecurringRunsParams, opts ...ClientOption) (*RecurringRunServiceListRecurringRunsOK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* RecurringRunServiceCreateRecurringRun creates a new recurring run in an experiment given the experiment ID */ -func (a *Client) RecurringRunServiceCreateRecurringRun(params *RecurringRunServiceCreateRecurringRunParams) (*RecurringRunServiceCreateRecurringRunOK, error) { +func (a *Client) RecurringRunServiceCreateRecurringRun(params *RecurringRunServiceCreateRecurringRunParams, opts ...ClientOption) (*RecurringRunServiceCreateRecurringRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRecurringRunServiceCreateRecurringRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RecurringRunService_CreateRecurringRun", Method: "POST", PathPattern: "/apis/v2beta1/recurringruns", @@ -44,24 +88,33 @@ func (a *Client) RecurringRunServiceCreateRecurringRun(params *RecurringRunServi Reader: &RecurringRunServiceCreateRecurringRunReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RecurringRunServiceCreateRecurringRunOK), nil - + success, ok := result.(*RecurringRunServiceCreateRecurringRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RecurringRunServiceCreateRecurringRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RecurringRunServiceDeleteRecurringRun deletes a recurring run */ -func (a *Client) RecurringRunServiceDeleteRecurringRun(params *RecurringRunServiceDeleteRecurringRunParams) (*RecurringRunServiceDeleteRecurringRunOK, error) { +func (a *Client) RecurringRunServiceDeleteRecurringRun(params *RecurringRunServiceDeleteRecurringRunParams, opts ...ClientOption) (*RecurringRunServiceDeleteRecurringRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRecurringRunServiceDeleteRecurringRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RecurringRunService_DeleteRecurringRun", Method: "DELETE", PathPattern: "/apis/v2beta1/recurringruns/{recurring_run_id}", @@ -72,24 +125,33 @@ func (a *Client) RecurringRunServiceDeleteRecurringRun(params *RecurringRunServi Reader: &RecurringRunServiceDeleteRecurringRunReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RecurringRunServiceDeleteRecurringRunOK), nil - + success, ok := result.(*RecurringRunServiceDeleteRecurringRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RecurringRunServiceDeleteRecurringRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RecurringRunServiceDisableRecurringRun stops a recurring run and all its associated runs the recurring run is not deleted */ -func (a *Client) RecurringRunServiceDisableRecurringRun(params *RecurringRunServiceDisableRecurringRunParams) (*RecurringRunServiceDisableRecurringRunOK, error) { +func (a *Client) RecurringRunServiceDisableRecurringRun(params *RecurringRunServiceDisableRecurringRunParams, opts ...ClientOption) (*RecurringRunServiceDisableRecurringRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRecurringRunServiceDisableRecurringRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RecurringRunService_DisableRecurringRun", Method: "POST", PathPattern: "/apis/v2beta1/recurringruns/{recurring_run_id}:disable", @@ -100,24 +162,33 @@ func (a *Client) RecurringRunServiceDisableRecurringRun(params *RecurringRunServ Reader: &RecurringRunServiceDisableRecurringRunReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RecurringRunServiceDisableRecurringRunOK), nil - + success, ok := result.(*RecurringRunServiceDisableRecurringRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RecurringRunServiceDisableRecurringRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RecurringRunServiceEnableRecurringRun restarts a recurring run that was previously stopped all runs associated with the recurring run will continue */ -func (a *Client) RecurringRunServiceEnableRecurringRun(params *RecurringRunServiceEnableRecurringRunParams) (*RecurringRunServiceEnableRecurringRunOK, error) { +func (a *Client) RecurringRunServiceEnableRecurringRun(params *RecurringRunServiceEnableRecurringRunParams, opts ...ClientOption) (*RecurringRunServiceEnableRecurringRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRecurringRunServiceEnableRecurringRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RecurringRunService_EnableRecurringRun", Method: "POST", PathPattern: "/apis/v2beta1/recurringruns/{recurring_run_id}:enable", @@ -128,24 +199,33 @@ func (a *Client) RecurringRunServiceEnableRecurringRun(params *RecurringRunServi Reader: &RecurringRunServiceEnableRecurringRunReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RecurringRunServiceEnableRecurringRunOK), nil - + success, ok := result.(*RecurringRunServiceEnableRecurringRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RecurringRunServiceEnableRecurringRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RecurringRunServiceGetRecurringRun finds a specific recurring run by ID */ -func (a *Client) RecurringRunServiceGetRecurringRun(params *RecurringRunServiceGetRecurringRunParams) (*RecurringRunServiceGetRecurringRunOK, error) { +func (a *Client) RecurringRunServiceGetRecurringRun(params *RecurringRunServiceGetRecurringRunParams, opts ...ClientOption) (*RecurringRunServiceGetRecurringRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRecurringRunServiceGetRecurringRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RecurringRunService_GetRecurringRun", Method: "GET", PathPattern: "/apis/v2beta1/recurringruns/{recurring_run_id}", @@ -156,24 +236,33 @@ func (a *Client) RecurringRunServiceGetRecurringRun(params *RecurringRunServiceG Reader: &RecurringRunServiceGetRecurringRunReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RecurringRunServiceGetRecurringRunOK), nil - + success, ok := result.(*RecurringRunServiceGetRecurringRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RecurringRunServiceGetRecurringRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RecurringRunServiceListRecurringRuns finds all recurring runs given experiment and namespace if experiment ID is not specified find all recurring runs across all experiments */ -func (a *Client) RecurringRunServiceListRecurringRuns(params *RecurringRunServiceListRecurringRunsParams) (*RecurringRunServiceListRecurringRunsOK, error) { +func (a *Client) RecurringRunServiceListRecurringRuns(params *RecurringRunServiceListRecurringRunsParams, opts ...ClientOption) (*RecurringRunServiceListRecurringRunsOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRecurringRunServiceListRecurringRunsParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RecurringRunService_ListRecurringRuns", Method: "GET", PathPattern: "/apis/v2beta1/recurringruns", @@ -184,12 +273,22 @@ func (a *Client) RecurringRunServiceListRecurringRuns(params *RecurringRunServic Reader: &RecurringRunServiceListRecurringRunsReader{formats: a.formats}, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RecurringRunServiceListRecurringRunsOK), nil - + success, ok := result.(*RecurringRunServiceListRecurringRunsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RecurringRunServiceListRecurringRunsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_create_recurring_run_parameters.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_create_recurring_run_parameters.go index b9fc0c63ad5..b45b5ee7370 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_create_recurring_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_create_recurring_run_parameters.go @@ -13,67 +13,82 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - recurring_run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" ) -// NewRecurringRunServiceCreateRecurringRunParams creates a new RecurringRunServiceCreateRecurringRunParams object -// with the default values initialized. +// NewRecurringRunServiceCreateRecurringRunParams creates a new RecurringRunServiceCreateRecurringRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRecurringRunServiceCreateRecurringRunParams() *RecurringRunServiceCreateRecurringRunParams { - var () return &RecurringRunServiceCreateRecurringRunParams{ - timeout: cr.DefaultTimeout, } } // NewRecurringRunServiceCreateRecurringRunParamsWithTimeout creates a new RecurringRunServiceCreateRecurringRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRecurringRunServiceCreateRecurringRunParamsWithTimeout(timeout time.Duration) *RecurringRunServiceCreateRecurringRunParams { - var () return &RecurringRunServiceCreateRecurringRunParams{ - timeout: timeout, } } // NewRecurringRunServiceCreateRecurringRunParamsWithContext creates a new RecurringRunServiceCreateRecurringRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRecurringRunServiceCreateRecurringRunParamsWithContext(ctx context.Context) *RecurringRunServiceCreateRecurringRunParams { - var () return &RecurringRunServiceCreateRecurringRunParams{ - Context: ctx, } } // NewRecurringRunServiceCreateRecurringRunParamsWithHTTPClient creates a new RecurringRunServiceCreateRecurringRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRecurringRunServiceCreateRecurringRunParamsWithHTTPClient(client *http.Client) *RecurringRunServiceCreateRecurringRunParams { - var () return &RecurringRunServiceCreateRecurringRunParams{ HTTPClient: client, } } -/*RecurringRunServiceCreateRecurringRunParams contains all the parameters to send to the API endpoint -for the recurring run service create recurring run operation typically these are written to a http.Request +/* +RecurringRunServiceCreateRecurringRunParams contains all the parameters to send to the API endpoint + + for the recurring run service create recurring run operation. + + Typically these are written to a http.Request. */ type RecurringRunServiceCreateRecurringRunParams struct { - /*Body - The recurring run to be created. + /* RecurringRun. + The recurring run to be created. */ - Body *recurring_run_model.V2beta1RecurringRun + RecurringRun *recurring_run_model.V2beta1RecurringRun timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the recurring run service create recurring run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceCreateRecurringRunParams) WithDefaults() *RecurringRunServiceCreateRecurringRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the recurring run service create recurring run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceCreateRecurringRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the recurring run service create recurring run params func (o *RecurringRunServiceCreateRecurringRunParams) WithTimeout(timeout time.Duration) *RecurringRunServiceCreateRecurringRunParams { o.SetTimeout(timeout) @@ -107,15 +122,15 @@ func (o *RecurringRunServiceCreateRecurringRunParams) SetHTTPClient(client *http o.HTTPClient = client } -// WithBody adds the body to the recurring run service create recurring run params -func (o *RecurringRunServiceCreateRecurringRunParams) WithBody(body *recurring_run_model.V2beta1RecurringRun) *RecurringRunServiceCreateRecurringRunParams { - o.SetBody(body) +// WithRecurringRun adds the recurringRun to the recurring run service create recurring run params +func (o *RecurringRunServiceCreateRecurringRunParams) WithRecurringRun(recurringRun *recurring_run_model.V2beta1RecurringRun) *RecurringRunServiceCreateRecurringRunParams { + o.SetRecurringRun(recurringRun) return o } -// SetBody adds the body to the recurring run service create recurring run params -func (o *RecurringRunServiceCreateRecurringRunParams) SetBody(body *recurring_run_model.V2beta1RecurringRun) { - o.Body = body +// SetRecurringRun adds the recurringRun to the recurring run service create recurring run params +func (o *RecurringRunServiceCreateRecurringRunParams) SetRecurringRun(recurringRun *recurring_run_model.V2beta1RecurringRun) { + o.RecurringRun = recurringRun } // WriteToRequest writes these params to a swagger request @@ -125,9 +140,8 @@ func (o *RecurringRunServiceCreateRecurringRunParams) WriteToRequest(r runtime.C return err } var res []error - - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { + if o.RecurringRun != nil { + if err := r.SetBodyParam(o.RecurringRun); err != nil { return err } } diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_create_recurring_run_responses.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_create_recurring_run_responses.go index f0ba81fc79b..c4a6d62b190 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_create_recurring_run_responses.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_create_recurring_run_responses.go @@ -6,14 +6,14 @@ package recurring_run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - recurring_run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" ) // RecurringRunServiceCreateRecurringRunReader is a Reader for the RecurringRunServiceCreateRecurringRun structure. @@ -24,14 +24,12 @@ type RecurringRunServiceCreateRecurringRunReader struct { // ReadResponse reads a server response into the received o. func (o *RecurringRunServiceCreateRecurringRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRecurringRunServiceCreateRecurringRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRecurringRunServiceCreateRecurringRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRecurringRunServiceCreateRecurringRunOK() *RecurringRunServiceCreateRecu return &RecurringRunServiceCreateRecurringRunOK{} } -/*RecurringRunServiceCreateRecurringRunOK handles this case with default header values. +/* +RecurringRunServiceCreateRecurringRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RecurringRunServiceCreateRecurringRunOK struct { Payload *recurring_run_model.V2beta1RecurringRun } +// IsSuccess returns true when this recurring run service create recurring run o k response has a 2xx status code +func (o *RecurringRunServiceCreateRecurringRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this recurring run service create recurring run o k response has a 3xx status code +func (o *RecurringRunServiceCreateRecurringRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this recurring run service create recurring run o k response has a 4xx status code +func (o *RecurringRunServiceCreateRecurringRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this recurring run service create recurring run o k response has a 5xx status code +func (o *RecurringRunServiceCreateRecurringRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this recurring run service create recurring run o k response a status code equal to that given +func (o *RecurringRunServiceCreateRecurringRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the recurring run service create recurring run o k response +func (o *RecurringRunServiceCreateRecurringRunOK) Code() int { + return 200 +} + func (o *RecurringRunServiceCreateRecurringRunOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/recurringruns][%d] recurringRunServiceCreateRecurringRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns][%d] recurringRunServiceCreateRecurringRunOK %s", 200, payload) +} + +func (o *RecurringRunServiceCreateRecurringRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns][%d] recurringRunServiceCreateRecurringRunOK %s", 200, payload) +} + +func (o *RecurringRunServiceCreateRecurringRunOK) GetPayload() *recurring_run_model.V2beta1RecurringRun { + return o.Payload } func (o *RecurringRunServiceCreateRecurringRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRecurringRunServiceCreateRecurringRunDefault(code int) *RecurringRunServ } } -/*RecurringRunServiceCreateRecurringRunDefault handles this case with default header values. +/* +RecurringRunServiceCreateRecurringRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RecurringRunServiceCreateRecurringRunDefault struct { _statusCode int - Payload *recurring_run_model.RuntimeError + Payload *recurring_run_model.GooglerpcStatus +} + +// IsSuccess returns true when this recurring run service create recurring run default response has a 2xx status code +func (o *RecurringRunServiceCreateRecurringRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this recurring run service create recurring run default response has a 3xx status code +func (o *RecurringRunServiceCreateRecurringRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this recurring run service create recurring run default response has a 4xx status code +func (o *RecurringRunServiceCreateRecurringRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this recurring run service create recurring run default response has a 5xx status code +func (o *RecurringRunServiceCreateRecurringRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this recurring run service create recurring run default response a status code equal to that given +func (o *RecurringRunServiceCreateRecurringRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the recurring run service create recurring run default response @@ -96,12 +161,22 @@ func (o *RecurringRunServiceCreateRecurringRunDefault) Code() int { } func (o *RecurringRunServiceCreateRecurringRunDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/recurringruns][%d] RecurringRunService_CreateRecurringRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns][%d] RecurringRunService_CreateRecurringRun default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceCreateRecurringRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns][%d] RecurringRunService_CreateRecurringRun default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceCreateRecurringRunDefault) GetPayload() *recurring_run_model.GooglerpcStatus { + return o.Payload } func (o *RecurringRunServiceCreateRecurringRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(recurring_run_model.RuntimeError) + o.Payload = new(recurring_run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_delete_recurring_run_parameters.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_delete_recurring_run_parameters.go index eee7ea35e94..7f606f3c0bf 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_delete_recurring_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_delete_recurring_run_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRecurringRunServiceDeleteRecurringRunParams creates a new RecurringRunServiceDeleteRecurringRunParams object -// with the default values initialized. +// NewRecurringRunServiceDeleteRecurringRunParams creates a new RecurringRunServiceDeleteRecurringRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRecurringRunServiceDeleteRecurringRunParams() *RecurringRunServiceDeleteRecurringRunParams { - var () return &RecurringRunServiceDeleteRecurringRunParams{ - timeout: cr.DefaultTimeout, } } // NewRecurringRunServiceDeleteRecurringRunParamsWithTimeout creates a new RecurringRunServiceDeleteRecurringRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRecurringRunServiceDeleteRecurringRunParamsWithTimeout(timeout time.Duration) *RecurringRunServiceDeleteRecurringRunParams { - var () return &RecurringRunServiceDeleteRecurringRunParams{ - timeout: timeout, } } // NewRecurringRunServiceDeleteRecurringRunParamsWithContext creates a new RecurringRunServiceDeleteRecurringRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRecurringRunServiceDeleteRecurringRunParamsWithContext(ctx context.Context) *RecurringRunServiceDeleteRecurringRunParams { - var () return &RecurringRunServiceDeleteRecurringRunParams{ - Context: ctx, } } // NewRecurringRunServiceDeleteRecurringRunParamsWithHTTPClient creates a new RecurringRunServiceDeleteRecurringRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRecurringRunServiceDeleteRecurringRunParamsWithHTTPClient(client *http.Client) *RecurringRunServiceDeleteRecurringRunParams { - var () return &RecurringRunServiceDeleteRecurringRunParams{ HTTPClient: client, } } -/*RecurringRunServiceDeleteRecurringRunParams contains all the parameters to send to the API endpoint -for the recurring run service delete recurring run operation typically these are written to a http.Request +/* +RecurringRunServiceDeleteRecurringRunParams contains all the parameters to send to the API endpoint + + for the recurring run service delete recurring run operation. + + Typically these are written to a http.Request. */ type RecurringRunServiceDeleteRecurringRunParams struct { - /*RecurringRunID - The ID of the recurring run to be deleted. + /* RecurringRunID. + The ID of the recurring run to be deleted. */ RecurringRunID string @@ -72,6 +72,21 @@ type RecurringRunServiceDeleteRecurringRunParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the recurring run service delete recurring run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceDeleteRecurringRunParams) WithDefaults() *RecurringRunServiceDeleteRecurringRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the recurring run service delete recurring run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceDeleteRecurringRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the recurring run service delete recurring run params func (o *RecurringRunServiceDeleteRecurringRunParams) WithTimeout(timeout time.Duration) *RecurringRunServiceDeleteRecurringRunParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_delete_recurring_run_responses.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_delete_recurring_run_responses.go index 183b8ca191d..ddb97635065 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_delete_recurring_run_responses.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_delete_recurring_run_responses.go @@ -6,14 +6,14 @@ package recurring_run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - recurring_run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" ) // RecurringRunServiceDeleteRecurringRunReader is a Reader for the RecurringRunServiceDeleteRecurringRun structure. @@ -24,14 +24,12 @@ type RecurringRunServiceDeleteRecurringRunReader struct { // ReadResponse reads a server response into the received o. func (o *RecurringRunServiceDeleteRecurringRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRecurringRunServiceDeleteRecurringRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRecurringRunServiceDeleteRecurringRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRecurringRunServiceDeleteRecurringRunOK() *RecurringRunServiceDeleteRecu return &RecurringRunServiceDeleteRecurringRunOK{} } -/*RecurringRunServiceDeleteRecurringRunOK handles this case with default header values. +/* +RecurringRunServiceDeleteRecurringRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RecurringRunServiceDeleteRecurringRunOK struct { Payload interface{} } +// IsSuccess returns true when this recurring run service delete recurring run o k response has a 2xx status code +func (o *RecurringRunServiceDeleteRecurringRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this recurring run service delete recurring run o k response has a 3xx status code +func (o *RecurringRunServiceDeleteRecurringRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this recurring run service delete recurring run o k response has a 4xx status code +func (o *RecurringRunServiceDeleteRecurringRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this recurring run service delete recurring run o k response has a 5xx status code +func (o *RecurringRunServiceDeleteRecurringRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this recurring run service delete recurring run o k response a status code equal to that given +func (o *RecurringRunServiceDeleteRecurringRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the recurring run service delete recurring run o k response +func (o *RecurringRunServiceDeleteRecurringRunOK) Code() int { + return 200 +} + func (o *RecurringRunServiceDeleteRecurringRunOK) Error() string { - return fmt.Sprintf("[DELETE /apis/v2beta1/recurringruns/{recurring_run_id}][%d] recurringRunServiceDeleteRecurringRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/recurringruns/{recurring_run_id}][%d] recurringRunServiceDeleteRecurringRunOK %s", 200, payload) +} + +func (o *RecurringRunServiceDeleteRecurringRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/recurringruns/{recurring_run_id}][%d] recurringRunServiceDeleteRecurringRunOK %s", 200, payload) +} + +func (o *RecurringRunServiceDeleteRecurringRunOK) GetPayload() interface{} { + return o.Payload } func (o *RecurringRunServiceDeleteRecurringRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRecurringRunServiceDeleteRecurringRunDefault(code int) *RecurringRunServ } } -/*RecurringRunServiceDeleteRecurringRunDefault handles this case with default header values. +/* +RecurringRunServiceDeleteRecurringRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RecurringRunServiceDeleteRecurringRunDefault struct { _statusCode int - Payload *recurring_run_model.RuntimeError + Payload *recurring_run_model.GooglerpcStatus +} + +// IsSuccess returns true when this recurring run service delete recurring run default response has a 2xx status code +func (o *RecurringRunServiceDeleteRecurringRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this recurring run service delete recurring run default response has a 3xx status code +func (o *RecurringRunServiceDeleteRecurringRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this recurring run service delete recurring run default response has a 4xx status code +func (o *RecurringRunServiceDeleteRecurringRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this recurring run service delete recurring run default response has a 5xx status code +func (o *RecurringRunServiceDeleteRecurringRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this recurring run service delete recurring run default response a status code equal to that given +func (o *RecurringRunServiceDeleteRecurringRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the recurring run service delete recurring run default response @@ -94,12 +159,22 @@ func (o *RecurringRunServiceDeleteRecurringRunDefault) Code() int { } func (o *RecurringRunServiceDeleteRecurringRunDefault) Error() string { - return fmt.Sprintf("[DELETE /apis/v2beta1/recurringruns/{recurring_run_id}][%d] RecurringRunService_DeleteRecurringRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/recurringruns/{recurring_run_id}][%d] RecurringRunService_DeleteRecurringRun default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceDeleteRecurringRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/recurringruns/{recurring_run_id}][%d] RecurringRunService_DeleteRecurringRun default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceDeleteRecurringRunDefault) GetPayload() *recurring_run_model.GooglerpcStatus { + return o.Payload } func (o *RecurringRunServiceDeleteRecurringRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(recurring_run_model.RuntimeError) + o.Payload = new(recurring_run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_disable_recurring_run_parameters.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_disable_recurring_run_parameters.go index 4388f25402f..299a04721e1 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_disable_recurring_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_disable_recurring_run_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRecurringRunServiceDisableRecurringRunParams creates a new RecurringRunServiceDisableRecurringRunParams object -// with the default values initialized. +// NewRecurringRunServiceDisableRecurringRunParams creates a new RecurringRunServiceDisableRecurringRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRecurringRunServiceDisableRecurringRunParams() *RecurringRunServiceDisableRecurringRunParams { - var () return &RecurringRunServiceDisableRecurringRunParams{ - timeout: cr.DefaultTimeout, } } // NewRecurringRunServiceDisableRecurringRunParamsWithTimeout creates a new RecurringRunServiceDisableRecurringRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRecurringRunServiceDisableRecurringRunParamsWithTimeout(timeout time.Duration) *RecurringRunServiceDisableRecurringRunParams { - var () return &RecurringRunServiceDisableRecurringRunParams{ - timeout: timeout, } } // NewRecurringRunServiceDisableRecurringRunParamsWithContext creates a new RecurringRunServiceDisableRecurringRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRecurringRunServiceDisableRecurringRunParamsWithContext(ctx context.Context) *RecurringRunServiceDisableRecurringRunParams { - var () return &RecurringRunServiceDisableRecurringRunParams{ - Context: ctx, } } // NewRecurringRunServiceDisableRecurringRunParamsWithHTTPClient creates a new RecurringRunServiceDisableRecurringRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRecurringRunServiceDisableRecurringRunParamsWithHTTPClient(client *http.Client) *RecurringRunServiceDisableRecurringRunParams { - var () return &RecurringRunServiceDisableRecurringRunParams{ HTTPClient: client, } } -/*RecurringRunServiceDisableRecurringRunParams contains all the parameters to send to the API endpoint -for the recurring run service disable recurring run operation typically these are written to a http.Request +/* +RecurringRunServiceDisableRecurringRunParams contains all the parameters to send to the API endpoint + + for the recurring run service disable recurring run operation. + + Typically these are written to a http.Request. */ type RecurringRunServiceDisableRecurringRunParams struct { - /*RecurringRunID - The ID of the recurring runs to be disabled. + /* RecurringRunID. + The ID of the recurring runs to be disabled. */ RecurringRunID string @@ -72,6 +72,21 @@ type RecurringRunServiceDisableRecurringRunParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the recurring run service disable recurring run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceDisableRecurringRunParams) WithDefaults() *RecurringRunServiceDisableRecurringRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the recurring run service disable recurring run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceDisableRecurringRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the recurring run service disable recurring run params func (o *RecurringRunServiceDisableRecurringRunParams) WithTimeout(timeout time.Duration) *RecurringRunServiceDisableRecurringRunParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_disable_recurring_run_responses.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_disable_recurring_run_responses.go index 71a5dd9d52e..77413b7e675 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_disable_recurring_run_responses.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_disable_recurring_run_responses.go @@ -6,14 +6,14 @@ package recurring_run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - recurring_run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" ) // RecurringRunServiceDisableRecurringRunReader is a Reader for the RecurringRunServiceDisableRecurringRun structure. @@ -24,14 +24,12 @@ type RecurringRunServiceDisableRecurringRunReader struct { // ReadResponse reads a server response into the received o. func (o *RecurringRunServiceDisableRecurringRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRecurringRunServiceDisableRecurringRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRecurringRunServiceDisableRecurringRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRecurringRunServiceDisableRecurringRunOK() *RecurringRunServiceDisableRe return &RecurringRunServiceDisableRecurringRunOK{} } -/*RecurringRunServiceDisableRecurringRunOK handles this case with default header values. +/* +RecurringRunServiceDisableRecurringRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RecurringRunServiceDisableRecurringRunOK struct { Payload interface{} } +// IsSuccess returns true when this recurring run service disable recurring run o k response has a 2xx status code +func (o *RecurringRunServiceDisableRecurringRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this recurring run service disable recurring run o k response has a 3xx status code +func (o *RecurringRunServiceDisableRecurringRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this recurring run service disable recurring run o k response has a 4xx status code +func (o *RecurringRunServiceDisableRecurringRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this recurring run service disable recurring run o k response has a 5xx status code +func (o *RecurringRunServiceDisableRecurringRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this recurring run service disable recurring run o k response a status code equal to that given +func (o *RecurringRunServiceDisableRecurringRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the recurring run service disable recurring run o k response +func (o *RecurringRunServiceDisableRecurringRunOK) Code() int { + return 200 +} + func (o *RecurringRunServiceDisableRecurringRunOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:disable][%d] recurringRunServiceDisableRecurringRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:disable][%d] recurringRunServiceDisableRecurringRunOK %s", 200, payload) +} + +func (o *RecurringRunServiceDisableRecurringRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:disable][%d] recurringRunServiceDisableRecurringRunOK %s", 200, payload) +} + +func (o *RecurringRunServiceDisableRecurringRunOK) GetPayload() interface{} { + return o.Payload } func (o *RecurringRunServiceDisableRecurringRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRecurringRunServiceDisableRecurringRunDefault(code int) *RecurringRunSer } } -/*RecurringRunServiceDisableRecurringRunDefault handles this case with default header values. +/* +RecurringRunServiceDisableRecurringRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RecurringRunServiceDisableRecurringRunDefault struct { _statusCode int - Payload *recurring_run_model.RuntimeError + Payload *recurring_run_model.GooglerpcStatus +} + +// IsSuccess returns true when this recurring run service disable recurring run default response has a 2xx status code +func (o *RecurringRunServiceDisableRecurringRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this recurring run service disable recurring run default response has a 3xx status code +func (o *RecurringRunServiceDisableRecurringRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this recurring run service disable recurring run default response has a 4xx status code +func (o *RecurringRunServiceDisableRecurringRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this recurring run service disable recurring run default response has a 5xx status code +func (o *RecurringRunServiceDisableRecurringRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this recurring run service disable recurring run default response a status code equal to that given +func (o *RecurringRunServiceDisableRecurringRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the recurring run service disable recurring run default response @@ -94,12 +159,22 @@ func (o *RecurringRunServiceDisableRecurringRunDefault) Code() int { } func (o *RecurringRunServiceDisableRecurringRunDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:disable][%d] RecurringRunService_DisableRecurringRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:disable][%d] RecurringRunService_DisableRecurringRun default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceDisableRecurringRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:disable][%d] RecurringRunService_DisableRecurringRun default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceDisableRecurringRunDefault) GetPayload() *recurring_run_model.GooglerpcStatus { + return o.Payload } func (o *RecurringRunServiceDisableRecurringRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(recurring_run_model.RuntimeError) + o.Payload = new(recurring_run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_enable_recurring_run_parameters.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_enable_recurring_run_parameters.go index 9547b10b11d..8d9dac92d6f 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_enable_recurring_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_enable_recurring_run_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRecurringRunServiceEnableRecurringRunParams creates a new RecurringRunServiceEnableRecurringRunParams object -// with the default values initialized. +// NewRecurringRunServiceEnableRecurringRunParams creates a new RecurringRunServiceEnableRecurringRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRecurringRunServiceEnableRecurringRunParams() *RecurringRunServiceEnableRecurringRunParams { - var () return &RecurringRunServiceEnableRecurringRunParams{ - timeout: cr.DefaultTimeout, } } // NewRecurringRunServiceEnableRecurringRunParamsWithTimeout creates a new RecurringRunServiceEnableRecurringRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRecurringRunServiceEnableRecurringRunParamsWithTimeout(timeout time.Duration) *RecurringRunServiceEnableRecurringRunParams { - var () return &RecurringRunServiceEnableRecurringRunParams{ - timeout: timeout, } } // NewRecurringRunServiceEnableRecurringRunParamsWithContext creates a new RecurringRunServiceEnableRecurringRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRecurringRunServiceEnableRecurringRunParamsWithContext(ctx context.Context) *RecurringRunServiceEnableRecurringRunParams { - var () return &RecurringRunServiceEnableRecurringRunParams{ - Context: ctx, } } // NewRecurringRunServiceEnableRecurringRunParamsWithHTTPClient creates a new RecurringRunServiceEnableRecurringRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRecurringRunServiceEnableRecurringRunParamsWithHTTPClient(client *http.Client) *RecurringRunServiceEnableRecurringRunParams { - var () return &RecurringRunServiceEnableRecurringRunParams{ HTTPClient: client, } } -/*RecurringRunServiceEnableRecurringRunParams contains all the parameters to send to the API endpoint -for the recurring run service enable recurring run operation typically these are written to a http.Request +/* +RecurringRunServiceEnableRecurringRunParams contains all the parameters to send to the API endpoint + + for the recurring run service enable recurring run operation. + + Typically these are written to a http.Request. */ type RecurringRunServiceEnableRecurringRunParams struct { - /*RecurringRunID - The ID of the recurring runs to be enabled. + /* RecurringRunID. + The ID of the recurring runs to be enabled. */ RecurringRunID string @@ -72,6 +72,21 @@ type RecurringRunServiceEnableRecurringRunParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the recurring run service enable recurring run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceEnableRecurringRunParams) WithDefaults() *RecurringRunServiceEnableRecurringRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the recurring run service enable recurring run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceEnableRecurringRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the recurring run service enable recurring run params func (o *RecurringRunServiceEnableRecurringRunParams) WithTimeout(timeout time.Duration) *RecurringRunServiceEnableRecurringRunParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_enable_recurring_run_responses.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_enable_recurring_run_responses.go index 4f0ee34c931..3c6ef1d56f9 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_enable_recurring_run_responses.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_enable_recurring_run_responses.go @@ -6,14 +6,14 @@ package recurring_run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - recurring_run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" ) // RecurringRunServiceEnableRecurringRunReader is a Reader for the RecurringRunServiceEnableRecurringRun structure. @@ -24,14 +24,12 @@ type RecurringRunServiceEnableRecurringRunReader struct { // ReadResponse reads a server response into the received o. func (o *RecurringRunServiceEnableRecurringRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRecurringRunServiceEnableRecurringRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRecurringRunServiceEnableRecurringRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRecurringRunServiceEnableRecurringRunOK() *RecurringRunServiceEnableRecu return &RecurringRunServiceEnableRecurringRunOK{} } -/*RecurringRunServiceEnableRecurringRunOK handles this case with default header values. +/* +RecurringRunServiceEnableRecurringRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RecurringRunServiceEnableRecurringRunOK struct { Payload interface{} } +// IsSuccess returns true when this recurring run service enable recurring run o k response has a 2xx status code +func (o *RecurringRunServiceEnableRecurringRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this recurring run service enable recurring run o k response has a 3xx status code +func (o *RecurringRunServiceEnableRecurringRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this recurring run service enable recurring run o k response has a 4xx status code +func (o *RecurringRunServiceEnableRecurringRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this recurring run service enable recurring run o k response has a 5xx status code +func (o *RecurringRunServiceEnableRecurringRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this recurring run service enable recurring run o k response a status code equal to that given +func (o *RecurringRunServiceEnableRecurringRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the recurring run service enable recurring run o k response +func (o *RecurringRunServiceEnableRecurringRunOK) Code() int { + return 200 +} + func (o *RecurringRunServiceEnableRecurringRunOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:enable][%d] recurringRunServiceEnableRecurringRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:enable][%d] recurringRunServiceEnableRecurringRunOK %s", 200, payload) +} + +func (o *RecurringRunServiceEnableRecurringRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:enable][%d] recurringRunServiceEnableRecurringRunOK %s", 200, payload) +} + +func (o *RecurringRunServiceEnableRecurringRunOK) GetPayload() interface{} { + return o.Payload } func (o *RecurringRunServiceEnableRecurringRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRecurringRunServiceEnableRecurringRunDefault(code int) *RecurringRunServ } } -/*RecurringRunServiceEnableRecurringRunDefault handles this case with default header values. +/* +RecurringRunServiceEnableRecurringRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RecurringRunServiceEnableRecurringRunDefault struct { _statusCode int - Payload *recurring_run_model.RuntimeError + Payload *recurring_run_model.GooglerpcStatus +} + +// IsSuccess returns true when this recurring run service enable recurring run default response has a 2xx status code +func (o *RecurringRunServiceEnableRecurringRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this recurring run service enable recurring run default response has a 3xx status code +func (o *RecurringRunServiceEnableRecurringRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this recurring run service enable recurring run default response has a 4xx status code +func (o *RecurringRunServiceEnableRecurringRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this recurring run service enable recurring run default response has a 5xx status code +func (o *RecurringRunServiceEnableRecurringRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this recurring run service enable recurring run default response a status code equal to that given +func (o *RecurringRunServiceEnableRecurringRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the recurring run service enable recurring run default response @@ -94,12 +159,22 @@ func (o *RecurringRunServiceEnableRecurringRunDefault) Code() int { } func (o *RecurringRunServiceEnableRecurringRunDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:enable][%d] RecurringRunService_EnableRecurringRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:enable][%d] RecurringRunService_EnableRecurringRun default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceEnableRecurringRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/recurringruns/{recurring_run_id}:enable][%d] RecurringRunService_EnableRecurringRun default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceEnableRecurringRunDefault) GetPayload() *recurring_run_model.GooglerpcStatus { + return o.Payload } func (o *RecurringRunServiceEnableRecurringRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(recurring_run_model.RuntimeError) + o.Payload = new(recurring_run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_get_recurring_run_parameters.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_get_recurring_run_parameters.go index 14ab9b6df2c..dbbcd0c43fb 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_get_recurring_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_get_recurring_run_parameters.go @@ -13,57 +13,57 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRecurringRunServiceGetRecurringRunParams creates a new RecurringRunServiceGetRecurringRunParams object -// with the default values initialized. +// NewRecurringRunServiceGetRecurringRunParams creates a new RecurringRunServiceGetRecurringRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRecurringRunServiceGetRecurringRunParams() *RecurringRunServiceGetRecurringRunParams { - var () return &RecurringRunServiceGetRecurringRunParams{ - timeout: cr.DefaultTimeout, } } // NewRecurringRunServiceGetRecurringRunParamsWithTimeout creates a new RecurringRunServiceGetRecurringRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRecurringRunServiceGetRecurringRunParamsWithTimeout(timeout time.Duration) *RecurringRunServiceGetRecurringRunParams { - var () return &RecurringRunServiceGetRecurringRunParams{ - timeout: timeout, } } // NewRecurringRunServiceGetRecurringRunParamsWithContext creates a new RecurringRunServiceGetRecurringRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRecurringRunServiceGetRecurringRunParamsWithContext(ctx context.Context) *RecurringRunServiceGetRecurringRunParams { - var () return &RecurringRunServiceGetRecurringRunParams{ - Context: ctx, } } // NewRecurringRunServiceGetRecurringRunParamsWithHTTPClient creates a new RecurringRunServiceGetRecurringRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRecurringRunServiceGetRecurringRunParamsWithHTTPClient(client *http.Client) *RecurringRunServiceGetRecurringRunParams { - var () return &RecurringRunServiceGetRecurringRunParams{ HTTPClient: client, } } -/*RecurringRunServiceGetRecurringRunParams contains all the parameters to send to the API endpoint -for the recurring run service get recurring run operation typically these are written to a http.Request +/* +RecurringRunServiceGetRecurringRunParams contains all the parameters to send to the API endpoint + + for the recurring run service get recurring run operation. + + Typically these are written to a http.Request. */ type RecurringRunServiceGetRecurringRunParams struct { - /*RecurringRunID - The ID of the recurring run to be retrieved. + /* RecurringRunID. + The ID of the recurring run to be retrieved. */ RecurringRunID string @@ -72,6 +72,21 @@ type RecurringRunServiceGetRecurringRunParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the recurring run service get recurring run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceGetRecurringRunParams) WithDefaults() *RecurringRunServiceGetRecurringRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the recurring run service get recurring run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceGetRecurringRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the recurring run service get recurring run params func (o *RecurringRunServiceGetRecurringRunParams) WithTimeout(timeout time.Duration) *RecurringRunServiceGetRecurringRunParams { o.SetTimeout(timeout) diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_get_recurring_run_responses.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_get_recurring_run_responses.go index 5af212d2f17..ea2e8d4df96 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_get_recurring_run_responses.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_get_recurring_run_responses.go @@ -6,14 +6,14 @@ package recurring_run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - recurring_run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" ) // RecurringRunServiceGetRecurringRunReader is a Reader for the RecurringRunServiceGetRecurringRun structure. @@ -24,14 +24,12 @@ type RecurringRunServiceGetRecurringRunReader struct { // ReadResponse reads a server response into the received o. func (o *RecurringRunServiceGetRecurringRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRecurringRunServiceGetRecurringRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRecurringRunServiceGetRecurringRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRecurringRunServiceGetRecurringRunOK() *RecurringRunServiceGetRecurringR return &RecurringRunServiceGetRecurringRunOK{} } -/*RecurringRunServiceGetRecurringRunOK handles this case with default header values. +/* +RecurringRunServiceGetRecurringRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RecurringRunServiceGetRecurringRunOK struct { Payload *recurring_run_model.V2beta1RecurringRun } +// IsSuccess returns true when this recurring run service get recurring run o k response has a 2xx status code +func (o *RecurringRunServiceGetRecurringRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this recurring run service get recurring run o k response has a 3xx status code +func (o *RecurringRunServiceGetRecurringRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this recurring run service get recurring run o k response has a 4xx status code +func (o *RecurringRunServiceGetRecurringRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this recurring run service get recurring run o k response has a 5xx status code +func (o *RecurringRunServiceGetRecurringRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this recurring run service get recurring run o k response a status code equal to that given +func (o *RecurringRunServiceGetRecurringRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the recurring run service get recurring run o k response +func (o *RecurringRunServiceGetRecurringRunOK) Code() int { + return 200 +} + func (o *RecurringRunServiceGetRecurringRunOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/recurringruns/{recurring_run_id}][%d] recurringRunServiceGetRecurringRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/recurringruns/{recurring_run_id}][%d] recurringRunServiceGetRecurringRunOK %s", 200, payload) +} + +func (o *RecurringRunServiceGetRecurringRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/recurringruns/{recurring_run_id}][%d] recurringRunServiceGetRecurringRunOK %s", 200, payload) +} + +func (o *RecurringRunServiceGetRecurringRunOK) GetPayload() *recurring_run_model.V2beta1RecurringRun { + return o.Payload } func (o *RecurringRunServiceGetRecurringRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRecurringRunServiceGetRecurringRunDefault(code int) *RecurringRunService } } -/*RecurringRunServiceGetRecurringRunDefault handles this case with default header values. +/* +RecurringRunServiceGetRecurringRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RecurringRunServiceGetRecurringRunDefault struct { _statusCode int - Payload *recurring_run_model.RuntimeError + Payload *recurring_run_model.GooglerpcStatus +} + +// IsSuccess returns true when this recurring run service get recurring run default response has a 2xx status code +func (o *RecurringRunServiceGetRecurringRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this recurring run service get recurring run default response has a 3xx status code +func (o *RecurringRunServiceGetRecurringRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this recurring run service get recurring run default response has a 4xx status code +func (o *RecurringRunServiceGetRecurringRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this recurring run service get recurring run default response has a 5xx status code +func (o *RecurringRunServiceGetRecurringRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this recurring run service get recurring run default response a status code equal to that given +func (o *RecurringRunServiceGetRecurringRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the recurring run service get recurring run default response @@ -96,12 +161,22 @@ func (o *RecurringRunServiceGetRecurringRunDefault) Code() int { } func (o *RecurringRunServiceGetRecurringRunDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/recurringruns/{recurring_run_id}][%d] RecurringRunService_GetRecurringRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/recurringruns/{recurring_run_id}][%d] RecurringRunService_GetRecurringRun default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceGetRecurringRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/recurringruns/{recurring_run_id}][%d] RecurringRunService_GetRecurringRun default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceGetRecurringRunDefault) GetPayload() *recurring_run_model.GooglerpcStatus { + return o.Payload } func (o *RecurringRunServiceGetRecurringRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(recurring_run_model.RuntimeError) + o.Payload = new(recurring_run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_list_recurring_runs_parameters.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_list_recurring_runs_parameters.go index a48b68a30ff..8ed3e1d7813 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_list_recurring_runs_parameters.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_list_recurring_runs_parameters.go @@ -13,89 +13,96 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" - - strfmt "github.com/go-openapi/strfmt" ) -// NewRecurringRunServiceListRecurringRunsParams creates a new RecurringRunServiceListRecurringRunsParams object -// with the default values initialized. +// NewRecurringRunServiceListRecurringRunsParams creates a new RecurringRunServiceListRecurringRunsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRecurringRunServiceListRecurringRunsParams() *RecurringRunServiceListRecurringRunsParams { - var () return &RecurringRunServiceListRecurringRunsParams{ - timeout: cr.DefaultTimeout, } } // NewRecurringRunServiceListRecurringRunsParamsWithTimeout creates a new RecurringRunServiceListRecurringRunsParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRecurringRunServiceListRecurringRunsParamsWithTimeout(timeout time.Duration) *RecurringRunServiceListRecurringRunsParams { - var () return &RecurringRunServiceListRecurringRunsParams{ - timeout: timeout, } } // NewRecurringRunServiceListRecurringRunsParamsWithContext creates a new RecurringRunServiceListRecurringRunsParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRecurringRunServiceListRecurringRunsParamsWithContext(ctx context.Context) *RecurringRunServiceListRecurringRunsParams { - var () return &RecurringRunServiceListRecurringRunsParams{ - Context: ctx, } } // NewRecurringRunServiceListRecurringRunsParamsWithHTTPClient creates a new RecurringRunServiceListRecurringRunsParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRecurringRunServiceListRecurringRunsParamsWithHTTPClient(client *http.Client) *RecurringRunServiceListRecurringRunsParams { - var () return &RecurringRunServiceListRecurringRunsParams{ HTTPClient: client, } } -/*RecurringRunServiceListRecurringRunsParams contains all the parameters to send to the API endpoint -for the recurring run service list recurring runs operation typically these are written to a http.Request +/* +RecurringRunServiceListRecurringRunsParams contains all the parameters to send to the API endpoint + + for the recurring run service list recurring runs operation. + + Typically these are written to a http.Request. */ type RecurringRunServiceListRecurringRunsParams struct { - /*ExperimentID - The ID of the experiment to be retrieved. If empty, list recurring runs across all experiments. + /* ExperimentID. + The ID of the experiment to be retrieved. If empty, list recurring runs across all experiments. */ ExperimentID *string - /*Filter - A url-encoded, JSON-serialized Filter protocol buffer (see - [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). + /* Filter. + + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). */ Filter *string - /*Namespace - Optional input. The namespace the recurring runs belong to. + /* Namespace. + + Optional input. The namespace the recurring runs belong to. */ Namespace *string - /*PageSize - The number of recurring runs to be listed per page. If there are more recurring runs + + /* PageSize. + + The number of recurring runs to be listed per page. If there are more recurring runs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + Format: int32 */ PageSize *int32 - /*PageToken - A page token to request the next page of results. The token is acquired + + /* PageToken. + + A page token to request the next page of results. The token is acquired from the nextPageToken field of the response from the previous ListRecurringRuns call or can be omitted when fetching the first page. - */ PageToken *string - /*SortBy - Can be formatted as "field_name", "field_name asc" or "field_name desc". - Ascending by default. + /* SortBy. + + Can be formatted as "field_name", "field_name asc" or "field_name desc". + Ascending by default. */ SortBy *string @@ -104,6 +111,21 @@ type RecurringRunServiceListRecurringRunsParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the recurring run service list recurring runs params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceListRecurringRunsParams) WithDefaults() *RecurringRunServiceListRecurringRunsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the recurring run service list recurring runs params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RecurringRunServiceListRecurringRunsParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the recurring run service list recurring runs params func (o *RecurringRunServiceListRecurringRunsParams) WithTimeout(timeout time.Duration) *RecurringRunServiceListRecurringRunsParams { o.SetTimeout(timeout) @@ -215,96 +237,102 @@ func (o *RecurringRunServiceListRecurringRunsParams) WriteToRequest(r runtime.Cl // query param experiment_id var qrExperimentID string + if o.ExperimentID != nil { qrExperimentID = *o.ExperimentID } qExperimentID := qrExperimentID if qExperimentID != "" { + if err := r.SetQueryParam("experiment_id", qExperimentID); err != nil { return err } } - } if o.Filter != nil { // query param filter var qrFilter string + if o.Filter != nil { qrFilter = *o.Filter } qFilter := qrFilter if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { return err } } - } if o.Namespace != nil { // query param namespace var qrNamespace string + if o.Namespace != nil { qrNamespace = *o.Namespace } qNamespace := qrNamespace if qNamespace != "" { + if err := r.SetQueryParam("namespace", qNamespace); err != nil { return err } } - } if o.PageSize != nil { // query param page_size var qrPageSize int32 + if o.PageSize != nil { qrPageSize = *o.PageSize } qPageSize := swag.FormatInt32(qrPageSize) if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { return err } } - } if o.PageToken != nil { // query param page_token var qrPageToken string + if o.PageToken != nil { qrPageToken = *o.PageToken } qPageToken := qrPageToken if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { return err } } - } if o.SortBy != nil { // query param sort_by var qrSortBy string + if o.SortBy != nil { qrSortBy = *o.SortBy } qSortBy := qrSortBy if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_list_recurring_runs_responses.go b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_list_recurring_runs_responses.go index 53f519829ac..085b8f66314 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_list_recurring_runs_responses.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service/recurring_run_service_list_recurring_runs_responses.go @@ -6,14 +6,14 @@ package recurring_run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - recurring_run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" ) // RecurringRunServiceListRecurringRunsReader is a Reader for the RecurringRunServiceListRecurringRuns structure. @@ -24,14 +24,12 @@ type RecurringRunServiceListRecurringRunsReader struct { // ReadResponse reads a server response into the received o. func (o *RecurringRunServiceListRecurringRunsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRecurringRunServiceListRecurringRunsOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRecurringRunServiceListRecurringRunsDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRecurringRunServiceListRecurringRunsOK() *RecurringRunServiceListRecurri return &RecurringRunServiceListRecurringRunsOK{} } -/*RecurringRunServiceListRecurringRunsOK handles this case with default header values. +/* +RecurringRunServiceListRecurringRunsOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RecurringRunServiceListRecurringRunsOK struct { Payload *recurring_run_model.V2beta1ListRecurringRunsResponse } +// IsSuccess returns true when this recurring run service list recurring runs o k response has a 2xx status code +func (o *RecurringRunServiceListRecurringRunsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this recurring run service list recurring runs o k response has a 3xx status code +func (o *RecurringRunServiceListRecurringRunsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this recurring run service list recurring runs o k response has a 4xx status code +func (o *RecurringRunServiceListRecurringRunsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this recurring run service list recurring runs o k response has a 5xx status code +func (o *RecurringRunServiceListRecurringRunsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this recurring run service list recurring runs o k response a status code equal to that given +func (o *RecurringRunServiceListRecurringRunsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the recurring run service list recurring runs o k response +func (o *RecurringRunServiceListRecurringRunsOK) Code() int { + return 200 +} + func (o *RecurringRunServiceListRecurringRunsOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/recurringruns][%d] recurringRunServiceListRecurringRunsOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/recurringruns][%d] recurringRunServiceListRecurringRunsOK %s", 200, payload) +} + +func (o *RecurringRunServiceListRecurringRunsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/recurringruns][%d] recurringRunServiceListRecurringRunsOK %s", 200, payload) +} + +func (o *RecurringRunServiceListRecurringRunsOK) GetPayload() *recurring_run_model.V2beta1ListRecurringRunsResponse { + return o.Payload } func (o *RecurringRunServiceListRecurringRunsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRecurringRunServiceListRecurringRunsDefault(code int) *RecurringRunServi } } -/*RecurringRunServiceListRecurringRunsDefault handles this case with default header values. +/* +RecurringRunServiceListRecurringRunsDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RecurringRunServiceListRecurringRunsDefault struct { _statusCode int - Payload *recurring_run_model.RuntimeError + Payload *recurring_run_model.GooglerpcStatus +} + +// IsSuccess returns true when this recurring run service list recurring runs default response has a 2xx status code +func (o *RecurringRunServiceListRecurringRunsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this recurring run service list recurring runs default response has a 3xx status code +func (o *RecurringRunServiceListRecurringRunsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this recurring run service list recurring runs default response has a 4xx status code +func (o *RecurringRunServiceListRecurringRunsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this recurring run service list recurring runs default response has a 5xx status code +func (o *RecurringRunServiceListRecurringRunsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this recurring run service list recurring runs default response a status code equal to that given +func (o *RecurringRunServiceListRecurringRunsDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the recurring run service list recurring runs default response @@ -96,12 +161,22 @@ func (o *RecurringRunServiceListRecurringRunsDefault) Code() int { } func (o *RecurringRunServiceListRecurringRunsDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/recurringruns][%d] RecurringRunService_ListRecurringRuns default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/recurringruns][%d] RecurringRunService_ListRecurringRuns default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceListRecurringRunsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/recurringruns][%d] RecurringRunService_ListRecurringRuns default %s", o._statusCode, payload) +} + +func (o *RecurringRunServiceListRecurringRunsDefault) GetPayload() *recurring_run_model.GooglerpcStatus { + return o.Payload } func (o *RecurringRunServiceListRecurringRunsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(recurring_run_model.RuntimeError) + o.Payload = new(recurring_run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/googlerpc_status.go b/backend/api/v2beta1/go_http_client/recurring_run_model/googlerpc_status.go index 9d5a79b5d66..c4df5298e5c 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/googlerpc_status.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/googlerpc_status.go @@ -6,11 +6,11 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -21,6 +21,7 @@ import ( // // You can find out more about this error model and how to work with it in the // [API Design Guide](https://cloud.google.com/apis/design/errors). +// // swagger:model googlerpcStatus type GooglerpcStatus struct { @@ -52,7 +53,6 @@ func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { } func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { - if swag.IsZero(m.Details) { // not required return nil } @@ -66,6 +66,47 @@ func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { if err := m.Details[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/protobuf_any.go b/backend/api/v2beta1/go_http_client/recurring_run_model/protobuf_any.go index c166ea4221d..d6772c9bea4 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/protobuf_any.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/protobuf_any.go @@ -6,9 +6,10 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/protobuf_null_value.go b/backend/api/v2beta1/go_http_client/recurring_run_model/protobuf_null_value.go index 178328ff966..634987561e2 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/protobuf_null_value.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/protobuf_null_value.go @@ -6,23 +6,33 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // ProtobufNullValue `NullValue` is a singleton enumeration to represent the null value for the // `Value` type union. // -// The JSON representation for `NullValue` is JSON `null`. +// The JSON representation for `NullValue` is JSON `null`. +// +// - NULL_VALUE: Null value. // -// - NULL_VALUE: Null value. // swagger:model protobufNullValue type ProtobufNullValue string +func NewProtobufNullValue(value ProtobufNullValue) *ProtobufNullValue { + return &value +} + +// Pointer returns a pointer to a freshly-allocated ProtobufNullValue. +func (m ProtobufNullValue) Pointer() *ProtobufNullValue { + return &m +} + const ( // ProtobufNullValueNULLVALUE captures enum value "NULL_VALUE" @@ -43,7 +53,7 @@ func init() { } func (m ProtobufNullValue) validateProtobufNullValueEnum(path, location string, value ProtobufNullValue) error { - if err := validate.Enum(path, location, value, protobufNullValueEnum); err != nil { + if err := validate.EnumCase(path, location, value, protobufNullValueEnum, true); err != nil { return err } return nil @@ -63,3 +73,8 @@ func (m ProtobufNullValue) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this protobuf null value based on context it is used +func (m ProtobufNullValue) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/recurring_run_mode.go b/backend/api/v2beta1/go_http_client/recurring_run_model/recurring_run_mode.go index 15e9b127e97..50145f3de8c 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/recurring_run_mode.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/recurring_run_mode.go @@ -6,11 +6,11 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) @@ -18,10 +18,20 @@ import ( // User setting to enable or disable the recurring run. // Only used for creation of recurring runs. Later updates use enable/disable API. // -// - DISABLE: The recurring run won't schedule any run if disabled. +// - DISABLE: The recurring run won't schedule any run if disabled. +// // swagger:model RecurringRunMode type RecurringRunMode string +func NewRecurringRunMode(value RecurringRunMode) *RecurringRunMode { + return &value +} + +// Pointer returns a pointer to a freshly-allocated RecurringRunMode. +func (m RecurringRunMode) Pointer() *RecurringRunMode { + return &m +} + const ( // RecurringRunModeMODEUNSPECIFIED captures enum value "MODE_UNSPECIFIED" @@ -48,7 +58,7 @@ func init() { } func (m RecurringRunMode) validateRecurringRunModeEnum(path, location string, value RecurringRunMode) error { - if err := validate.Enum(path, location, value, recurringRunModeEnum); err != nil { + if err := validate.EnumCase(path, location, value, recurringRunModeEnum, true); err != nil { return err } return nil @@ -68,3 +78,8 @@ func (m RecurringRunMode) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this recurring run mode based on context it is used +func (m RecurringRunMode) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/runtime_error.go b/backend/api/v2beta1/go_http_client/recurring_run_model/runtime_error.go deleted file mode 100644 index 470bc222149..00000000000 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/runtime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package recurring_run_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// RuntimeError runtime error -// swagger:model runtimeError -type RuntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this runtime error -func (m *RuntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *RuntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *RuntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *RuntimeError) UnmarshalBinary(b []byte) error { - var res RuntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_cron_schedule.go b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_cron_schedule.go index 50627e34ee3..3bc7d600657 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_cron_schedule.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_cron_schedule.go @@ -6,14 +6,16 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1CronSchedule CronSchedule allow scheduling the recurring run with unix-like cron. +// // swagger:model v2beta1CronSchedule type V2beta1CronSchedule struct { @@ -49,7 +51,6 @@ func (m *V2beta1CronSchedule) Validate(formats strfmt.Registry) error { } func (m *V2beta1CronSchedule) validateEndTime(formats strfmt.Registry) error { - if swag.IsZero(m.EndTime) { // not required return nil } @@ -62,7 +63,6 @@ func (m *V2beta1CronSchedule) validateEndTime(formats strfmt.Registry) error { } func (m *V2beta1CronSchedule) validateStartTime(formats strfmt.Registry) error { - if swag.IsZero(m.StartTime) { // not required return nil } @@ -74,6 +74,11 @@ func (m *V2beta1CronSchedule) validateStartTime(formats strfmt.Registry) error { return nil } +// ContextValidate validates this v2beta1 cron schedule based on context it is used +func (m *V2beta1CronSchedule) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1CronSchedule) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_list_recurring_runs_response.go b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_list_recurring_runs_response.go index eafa98b5b11..3553d15bd2b 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_list_recurring_runs_response.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_list_recurring_runs_response.go @@ -6,15 +6,16 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1ListRecurringRunsResponse v2beta1 list recurring runs response +// // swagger:model v2beta1ListRecurringRunsResponse type V2beta1ListRecurringRunsResponse struct { @@ -43,7 +44,6 @@ func (m *V2beta1ListRecurringRunsResponse) Validate(formats strfmt.Registry) err } func (m *V2beta1ListRecurringRunsResponse) validateRecurringRuns(formats strfmt.Registry) error { - if swag.IsZero(m.RecurringRuns) { // not required return nil } @@ -57,6 +57,47 @@ func (m *V2beta1ListRecurringRunsResponse) validateRecurringRuns(formats strfmt. if err := m.RecurringRuns[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("recurringRuns" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("recurringRuns" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 list recurring runs response based on the context it is used +func (m *V2beta1ListRecurringRunsResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateRecurringRuns(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListRecurringRunsResponse) contextValidateRecurringRuns(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.RecurringRuns); i++ { + + if m.RecurringRuns[i] != nil { + + if swag.IsZero(m.RecurringRuns[i]) { // not required + return nil + } + + if err := m.RecurringRuns[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("recurringRuns" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("recurringRuns" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_periodic_schedule.go b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_periodic_schedule.go index 4e88adff3a1..53279671b87 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_periodic_schedule.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_periodic_schedule.go @@ -6,14 +6,16 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1PeriodicSchedule PeriodicSchedule allow scheduling the recurring run periodically with certain interval. +// // swagger:model v2beta1PeriodicSchedule type V2beta1PeriodicSchedule struct { @@ -48,7 +50,6 @@ func (m *V2beta1PeriodicSchedule) Validate(formats strfmt.Registry) error { } func (m *V2beta1PeriodicSchedule) validateEndTime(formats strfmt.Registry) error { - if swag.IsZero(m.EndTime) { // not required return nil } @@ -61,7 +62,6 @@ func (m *V2beta1PeriodicSchedule) validateEndTime(formats strfmt.Registry) error } func (m *V2beta1PeriodicSchedule) validateStartTime(formats strfmt.Registry) error { - if swag.IsZero(m.StartTime) { // not required return nil } @@ -73,6 +73,11 @@ func (m *V2beta1PeriodicSchedule) validateStartTime(formats strfmt.Registry) err return nil } +// ContextValidate validates this v2beta1 periodic schedule based on context it is used +func (m *V2beta1PeriodicSchedule) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1PeriodicSchedule) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_pipeline_version_reference.go b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_pipeline_version_reference.go index 615a11c457e..086cdc4b00a 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_pipeline_version_reference.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_pipeline_version_reference.go @@ -6,12 +6,14 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1PipelineVersionReference Reference to an existing pipeline version. +// // swagger:model v2beta1PipelineVersionReference type V2beta1PipelineVersionReference struct { @@ -27,6 +29,11 @@ func (m *V2beta1PipelineVersionReference) Validate(formats strfmt.Registry) erro return nil } +// ContextValidate validates this v2beta1 pipeline version reference based on context it is used +func (m *V2beta1PipelineVersionReference) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1PipelineVersionReference) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_recurring_run.go b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_recurring_run.go index b7935a92bc3..3796c3cea12 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_recurring_run.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_recurring_run.go @@ -6,14 +6,16 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1RecurringRun v2beta1 recurring run +// // swagger:model v2beta1RecurringRun type V2beta1RecurringRun struct { @@ -40,7 +42,7 @@ type V2beta1RecurringRun struct { MaxConcurrency int64 `json:"max_concurrency,omitempty,string"` // mode - Mode RecurringRunMode `json:"mode,omitempty"` + Mode *RecurringRunMode `json:"mode,omitempty"` // TODO (gkclat): consider removing this field if it can be obtained from the parent experiment. // Output only. Namespace this recurring run belongs to. Derived from the parent experiment. @@ -71,7 +73,7 @@ type V2beta1RecurringRun struct { ServiceAccount string `json:"service_account,omitempty"` // status - Status V2beta1RecurringRunStatus `json:"status,omitempty"` + Status *V2beta1RecurringRunStatus `json:"status,omitempty"` // Required input field. // Specifies how a run is triggered. Support cron mode or periodic mode. @@ -125,7 +127,6 @@ func (m *V2beta1RecurringRun) Validate(formats strfmt.Registry) error { } func (m *V2beta1RecurringRun) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -138,7 +139,6 @@ func (m *V2beta1RecurringRun) validateCreatedAt(formats strfmt.Registry) error { } func (m *V2beta1RecurringRun) validateError(formats strfmt.Registry) error { - if swag.IsZero(m.Error) { // not required return nil } @@ -147,6 +147,8 @@ func (m *V2beta1RecurringRun) validateError(formats strfmt.Registry) error { if err := m.Error.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") } return err } @@ -156,23 +158,25 @@ func (m *V2beta1RecurringRun) validateError(formats strfmt.Registry) error { } func (m *V2beta1RecurringRun) validateMode(formats strfmt.Registry) error { - if swag.IsZero(m.Mode) { // not required return nil } - if err := m.Mode.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("mode") + if m.Mode != nil { + if err := m.Mode.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("mode") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("mode") + } + return err } - return err } return nil } func (m *V2beta1RecurringRun) validatePipelineVersionReference(formats strfmt.Registry) error { - if swag.IsZero(m.PipelineVersionReference) { // not required return nil } @@ -181,6 +185,8 @@ func (m *V2beta1RecurringRun) validatePipelineVersionReference(formats strfmt.Re if err := m.PipelineVersionReference.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("pipeline_version_reference") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_version_reference") } return err } @@ -190,7 +196,6 @@ func (m *V2beta1RecurringRun) validatePipelineVersionReference(formats strfmt.Re } func (m *V2beta1RecurringRun) validateRuntimeConfig(formats strfmt.Registry) error { - if swag.IsZero(m.RuntimeConfig) { // not required return nil } @@ -199,6 +204,8 @@ func (m *V2beta1RecurringRun) validateRuntimeConfig(formats strfmt.Registry) err if err := m.RuntimeConfig.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("runtime_config") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runtime_config") } return err } @@ -208,23 +215,25 @@ func (m *V2beta1RecurringRun) validateRuntimeConfig(formats strfmt.Registry) err } func (m *V2beta1RecurringRun) validateStatus(formats strfmt.Registry) error { - if swag.IsZero(m.Status) { // not required return nil } - if err := m.Status.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("status") + if m.Status != nil { + if err := m.Status.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("status") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("status") + } + return err } - return err } return nil } func (m *V2beta1RecurringRun) validateTrigger(formats strfmt.Registry) error { - if swag.IsZero(m.Trigger) { // not required return nil } @@ -233,6 +242,8 @@ func (m *V2beta1RecurringRun) validateTrigger(formats strfmt.Registry) error { if err := m.Trigger.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("trigger") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("trigger") } return err } @@ -242,7 +253,6 @@ func (m *V2beta1RecurringRun) validateTrigger(formats strfmt.Registry) error { } func (m *V2beta1RecurringRun) validateUpdatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.UpdatedAt) { // not required return nil } @@ -254,6 +264,179 @@ func (m *V2beta1RecurringRun) validateUpdatedAt(formats strfmt.Registry) error { return nil } +// ContextValidate validate this v2beta1 recurring run based on the context it is used +func (m *V2beta1RecurringRun) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateError(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateMode(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateNamespace(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidatePipelineVersionReference(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRuntimeConfig(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateStatus(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateTrigger(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1RecurringRun) contextValidateError(ctx context.Context, formats strfmt.Registry) error { + + if m.Error != nil { + + if swag.IsZero(m.Error) { // not required + return nil + } + + if err := m.Error.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") + } + return err + } + } + + return nil +} + +func (m *V2beta1RecurringRun) contextValidateMode(ctx context.Context, formats strfmt.Registry) error { + + if m.Mode != nil { + + if swag.IsZero(m.Mode) { // not required + return nil + } + + if err := m.Mode.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("mode") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("mode") + } + return err + } + } + + return nil +} + +func (m *V2beta1RecurringRun) contextValidateNamespace(ctx context.Context, formats strfmt.Registry) error { + + if err := validate.ReadOnly(ctx, "namespace", "body", string(m.Namespace)); err != nil { + return err + } + + return nil +} + +func (m *V2beta1RecurringRun) contextValidatePipelineVersionReference(ctx context.Context, formats strfmt.Registry) error { + + if m.PipelineVersionReference != nil { + + if swag.IsZero(m.PipelineVersionReference) { // not required + return nil + } + + if err := m.PipelineVersionReference.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipeline_version_reference") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_version_reference") + } + return err + } + } + + return nil +} + +func (m *V2beta1RecurringRun) contextValidateRuntimeConfig(ctx context.Context, formats strfmt.Registry) error { + + if m.RuntimeConfig != nil { + + if swag.IsZero(m.RuntimeConfig) { // not required + return nil + } + + if err := m.RuntimeConfig.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("runtime_config") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runtime_config") + } + return err + } + } + + return nil +} + +func (m *V2beta1RecurringRun) contextValidateStatus(ctx context.Context, formats strfmt.Registry) error { + + if m.Status != nil { + + if swag.IsZero(m.Status) { // not required + return nil + } + + if err := m.Status.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("status") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("status") + } + return err + } + } + + return nil +} + +func (m *V2beta1RecurringRun) contextValidateTrigger(ctx context.Context, formats strfmt.Registry) error { + + if m.Trigger != nil { + + if swag.IsZero(m.Trigger) { // not required + return nil + } + + if err := m.Trigger.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("trigger") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("trigger") + } + return err + } + } + + return nil +} + // MarshalBinary interface implementation func (m *V2beta1RecurringRun) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_recurring_run_status.go b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_recurring_run_status.go index 5a6ed03d94d..ad278dc6262 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_recurring_run_status.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_recurring_run_status.go @@ -6,18 +6,28 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // V2beta1RecurringRunStatus Output. The status of the recurring run. +// // swagger:model v2beta1RecurringRunStatus type V2beta1RecurringRunStatus string +func NewV2beta1RecurringRunStatus(value V2beta1RecurringRunStatus) *V2beta1RecurringRunStatus { + return &value +} + +// Pointer returns a pointer to a freshly-allocated V2beta1RecurringRunStatus. +func (m V2beta1RecurringRunStatus) Pointer() *V2beta1RecurringRunStatus { + return &m +} + const ( // V2beta1RecurringRunStatusSTATUSUNSPECIFIED captures enum value "STATUS_UNSPECIFIED" @@ -44,7 +54,7 @@ func init() { } func (m V2beta1RecurringRunStatus) validateV2beta1RecurringRunStatusEnum(path, location string, value V2beta1RecurringRunStatus) error { - if err := validate.Enum(path, location, value, v2beta1RecurringRunStatusEnum); err != nil { + if err := validate.EnumCase(path, location, value, v2beta1RecurringRunStatusEnum, true); err != nil { return err } return nil @@ -64,3 +74,8 @@ func (m V2beta1RecurringRunStatus) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this v2beta1 recurring run status based on context it is used +func (m V2beta1RecurringRunStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_runtime_config.go b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_runtime_config.go index 79f4b25883f..86ae035c698 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_runtime_config.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_runtime_config.go @@ -6,12 +6,14 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1RuntimeConfig The runtime config. +// // swagger:model v2beta1RuntimeConfig type V2beta1RuntimeConfig struct { @@ -31,6 +33,11 @@ func (m *V2beta1RuntimeConfig) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this v2beta1 runtime config based on context it is used +func (m *V2beta1RuntimeConfig) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1RuntimeConfig) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_trigger.go b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_trigger.go index d738fe08810..e553ea38787 100644 --- a/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_trigger.go +++ b/backend/api/v2beta1/go_http_client/recurring_run_model/v2beta1_trigger.go @@ -6,13 +6,15 @@ package recurring_run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1Trigger Trigger defines what starts a pipeline run. +// // swagger:model v2beta1Trigger type V2beta1Trigger struct { @@ -42,7 +44,6 @@ func (m *V2beta1Trigger) Validate(formats strfmt.Registry) error { } func (m *V2beta1Trigger) validateCronSchedule(formats strfmt.Registry) error { - if swag.IsZero(m.CronSchedule) { // not required return nil } @@ -51,6 +52,8 @@ func (m *V2beta1Trigger) validateCronSchedule(formats strfmt.Registry) error { if err := m.CronSchedule.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("cron_schedule") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("cron_schedule") } return err } @@ -60,7 +63,6 @@ func (m *V2beta1Trigger) validateCronSchedule(formats strfmt.Registry) error { } func (m *V2beta1Trigger) validatePeriodicSchedule(formats strfmt.Registry) error { - if swag.IsZero(m.PeriodicSchedule) { // not required return nil } @@ -69,6 +71,68 @@ func (m *V2beta1Trigger) validatePeriodicSchedule(formats strfmt.Registry) error if err := m.PeriodicSchedule.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("periodic_schedule") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("periodic_schedule") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 trigger based on the context it is used +func (m *V2beta1Trigger) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateCronSchedule(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidatePeriodicSchedule(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1Trigger) contextValidateCronSchedule(ctx context.Context, formats strfmt.Registry) error { + + if m.CronSchedule != nil { + + if swag.IsZero(m.CronSchedule) { // not required + return nil + } + + if err := m.CronSchedule.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("cron_schedule") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("cron_schedule") + } + return err + } + } + + return nil +} + +func (m *V2beta1Trigger) contextValidatePeriodicSchedule(ctx context.Context, formats strfmt.Registry) error { + + if m.PeriodicSchedule != nil { + + if swag.IsZero(m.PeriodicSchedule) { // not required + return nil + } + + if err := m.PeriodicSchedule.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("periodic_schedule") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("periodic_schedule") } return err } diff --git a/backend/api/v2beta1/go_http_client/run_client/run_client.go b/backend/api/v2beta1/go_http_client/run_client/run_client.go index 07aff5762d2..fbcf27a2a1f 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_client.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_client.go @@ -8,8 +8,7 @@ package run_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_client/run_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Run { cli := new(Run) cli.Transport = transport - cli.RunService = run_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Run is a client for run type Run struct { - RunService *run_service.Client + RunService run_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Run struct { // SetTransport changes the transport on the client and all its subresources func (c *Run) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.RunService.SetTransport(transport) - } diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_archive_run_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_archive_run_parameters.go index 7862fd62a43..6de30787179 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_archive_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_archive_run_parameters.go @@ -13,57 +13,63 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceArchiveRunParams creates a new RunServiceArchiveRunParams object -// with the default values initialized. +// NewRunServiceArchiveRunParams creates a new RunServiceArchiveRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceArchiveRunParams() *RunServiceArchiveRunParams { - var () return &RunServiceArchiveRunParams{ - timeout: cr.DefaultTimeout, } } // NewRunServiceArchiveRunParamsWithTimeout creates a new RunServiceArchiveRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceArchiveRunParamsWithTimeout(timeout time.Duration) *RunServiceArchiveRunParams { - var () return &RunServiceArchiveRunParams{ - timeout: timeout, } } // NewRunServiceArchiveRunParamsWithContext creates a new RunServiceArchiveRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceArchiveRunParamsWithContext(ctx context.Context) *RunServiceArchiveRunParams { - var () return &RunServiceArchiveRunParams{ - Context: ctx, } } // NewRunServiceArchiveRunParamsWithHTTPClient creates a new RunServiceArchiveRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceArchiveRunParamsWithHTTPClient(client *http.Client) *RunServiceArchiveRunParams { - var () return &RunServiceArchiveRunParams{ HTTPClient: client, } } -/*RunServiceArchiveRunParams contains all the parameters to send to the API endpoint -for the run service archive run operation typically these are written to a http.Request +/* +RunServiceArchiveRunParams contains all the parameters to send to the API endpoint + + for the run service archive run operation. + + Typically these are written to a http.Request. */ type RunServiceArchiveRunParams struct { - /*RunID - The ID of the run to be archived. + /* ExperimentID. + The ID of the parent experiment. + */ + ExperimentID *string + + /* RunID. + + The ID of the run to be archived. */ RunID string @@ -72,6 +78,21 @@ type RunServiceArchiveRunParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service archive run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceArchiveRunParams) WithDefaults() *RunServiceArchiveRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service archive run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceArchiveRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service archive run params func (o *RunServiceArchiveRunParams) WithTimeout(timeout time.Duration) *RunServiceArchiveRunParams { o.SetTimeout(timeout) @@ -105,6 +126,17 @@ func (o *RunServiceArchiveRunParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } +// WithExperimentID adds the experimentID to the run service archive run params +func (o *RunServiceArchiveRunParams) WithExperimentID(experimentID *string) *RunServiceArchiveRunParams { + o.SetExperimentID(experimentID) + return o +} + +// SetExperimentID adds the experimentId to the run service archive run params +func (o *RunServiceArchiveRunParams) SetExperimentID(experimentID *string) { + o.ExperimentID = experimentID +} + // WithRunID adds the runID to the run service archive run params func (o *RunServiceArchiveRunParams) WithRunID(runID string) *RunServiceArchiveRunParams { o.SetRunID(runID) @@ -124,6 +156,23 @@ func (o *RunServiceArchiveRunParams) WriteToRequest(r runtime.ClientRequest, reg } var res []error + if o.ExperimentID != nil { + + // query param experiment_id + var qrExperimentID string + + if o.ExperimentID != nil { + qrExperimentID = *o.ExperimentID + } + qExperimentID := qrExperimentID + if qExperimentID != "" { + + if err := r.SetQueryParam("experiment_id", qExperimentID); err != nil { + return err + } + } + } + // path param run_id if err := r.SetPathParam("run_id", o.RunID); err != nil { return err diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_archive_run_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_archive_run_responses.go index 632449506a2..595c093940d 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_archive_run_responses.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_archive_run_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" ) // RunServiceArchiveRunReader is a Reader for the RunServiceArchiveRun structure. @@ -24,14 +24,12 @@ type RunServiceArchiveRunReader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceArchiveRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceArchiveRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceArchiveRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceArchiveRunOK() *RunServiceArchiveRunOK { return &RunServiceArchiveRunOK{} } -/*RunServiceArchiveRunOK handles this case with default header values. +/* +RunServiceArchiveRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceArchiveRunOK struct { Payload interface{} } +// IsSuccess returns true when this run service archive run o k response has a 2xx status code +func (o *RunServiceArchiveRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service archive run o k response has a 3xx status code +func (o *RunServiceArchiveRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service archive run o k response has a 4xx status code +func (o *RunServiceArchiveRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service archive run o k response has a 5xx status code +func (o *RunServiceArchiveRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service archive run o k response a status code equal to that given +func (o *RunServiceArchiveRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service archive run o k response +func (o *RunServiceArchiveRunOK) Code() int { + return 200 +} + func (o *RunServiceArchiveRunOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:archive][%d] runServiceArchiveRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:archive][%d] runServiceArchiveRunOK %s", 200, payload) +} + +func (o *RunServiceArchiveRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:archive][%d] runServiceArchiveRunOK %s", 200, payload) +} + +func (o *RunServiceArchiveRunOK) GetPayload() interface{} { + return o.Payload } func (o *RunServiceArchiveRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRunServiceArchiveRunDefault(code int) *RunServiceArchiveRunDefault { } } -/*RunServiceArchiveRunDefault handles this case with default header values. +/* +RunServiceArchiveRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceArchiveRunDefault struct { _statusCode int - Payload *run_model.RuntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service archive run default response has a 2xx status code +func (o *RunServiceArchiveRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service archive run default response has a 3xx status code +func (o *RunServiceArchiveRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service archive run default response has a 4xx status code +func (o *RunServiceArchiveRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service archive run default response has a 5xx status code +func (o *RunServiceArchiveRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service archive run default response a status code equal to that given +func (o *RunServiceArchiveRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service archive run default response @@ -94,12 +159,22 @@ func (o *RunServiceArchiveRunDefault) Code() int { } func (o *RunServiceArchiveRunDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:archive][%d] RunService_ArchiveRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:archive][%d] RunService_ArchiveRun default %s", o._statusCode, payload) +} + +func (o *RunServiceArchiveRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:archive][%d] RunService_ArchiveRun default %s", o._statusCode, payload) +} + +func (o *RunServiceArchiveRunDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceArchiveRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.RuntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_client.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_client.go index c85ddbfe03d..3aa7c5bdddc 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_client.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_client.go @@ -7,15 +7,40 @@ package run_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new run service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new run service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new run service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for run service API */ @@ -24,16 +49,41 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + RunServiceArchiveRun(params *RunServiceArchiveRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceArchiveRunOK, error) + + RunServiceCreateRun(params *RunServiceCreateRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceCreateRunOK, error) + + RunServiceDeleteRun(params *RunServiceDeleteRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceDeleteRunOK, error) + + RunServiceGetRun(params *RunServiceGetRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceGetRunOK, error) + + RunServiceListRuns(params *RunServiceListRunsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceListRunsOK, error) + + RunServiceReadArtifact(params *RunServiceReadArtifactParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceReadArtifactOK, error) + + RunServiceRetryRun(params *RunServiceRetryRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceRetryRunOK, error) + + RunServiceTerminateRun(params *RunServiceTerminateRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceTerminateRunOK, error) + + RunServiceUnarchiveRun(params *RunServiceUnarchiveRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceUnarchiveRunOK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* RunServiceArchiveRun archives a run in an experiment given by run ID and experiment ID */ -func (a *Client) RunServiceArchiveRun(params *RunServiceArchiveRunParams, authInfo runtime.ClientAuthInfoWriter) (*RunServiceArchiveRunOK, error) { +func (a *Client) RunServiceArchiveRun(params *RunServiceArchiveRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceArchiveRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceArchiveRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_ArchiveRun", Method: "POST", PathPattern: "/apis/v2beta1/runs/{run_id}:archive", @@ -45,24 +95,33 @@ func (a *Client) RunServiceArchiveRun(params *RunServiceArchiveRunParams, authIn AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceArchiveRunOK), nil - + success, ok := result.(*RunServiceArchiveRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceArchiveRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceCreateRun creates a new run in an experiment specified by experiment ID if experiment ID is not specified the run is created in the default experiment */ -func (a *Client) RunServiceCreateRun(params *RunServiceCreateRunParams, authInfo runtime.ClientAuthInfoWriter) (*RunServiceCreateRunOK, error) { +func (a *Client) RunServiceCreateRun(params *RunServiceCreateRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceCreateRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceCreateRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_CreateRun", Method: "POST", PathPattern: "/apis/v2beta1/runs", @@ -74,24 +133,33 @@ func (a *Client) RunServiceCreateRun(params *RunServiceCreateRunParams, authInfo AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceCreateRunOK), nil - + success, ok := result.(*RunServiceCreateRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceCreateRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceDeleteRun deletes a run in an experiment given by run ID and experiment ID */ -func (a *Client) RunServiceDeleteRun(params *RunServiceDeleteRunParams, authInfo runtime.ClientAuthInfoWriter) (*RunServiceDeleteRunOK, error) { +func (a *Client) RunServiceDeleteRun(params *RunServiceDeleteRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceDeleteRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceDeleteRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_DeleteRun", Method: "DELETE", PathPattern: "/apis/v2beta1/runs/{run_id}", @@ -103,24 +171,33 @@ func (a *Client) RunServiceDeleteRun(params *RunServiceDeleteRunParams, authInfo AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceDeleteRunOK), nil - + success, ok := result.(*RunServiceDeleteRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceDeleteRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceGetRun finds a specific run by ID */ -func (a *Client) RunServiceGetRun(params *RunServiceGetRunParams, authInfo runtime.ClientAuthInfoWriter) (*RunServiceGetRunOK, error) { +func (a *Client) RunServiceGetRun(params *RunServiceGetRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceGetRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceGetRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_GetRun", Method: "GET", PathPattern: "/apis/v2beta1/runs/{run_id}", @@ -132,24 +209,33 @@ func (a *Client) RunServiceGetRun(params *RunServiceGetRunParams, authInfo runti AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceGetRunOK), nil - + success, ok := result.(*RunServiceGetRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceGetRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceListRuns finds all runs in an experiment given by experiment ID if experiment id is not specified finds all runs across all experiments */ -func (a *Client) RunServiceListRuns(params *RunServiceListRunsParams, authInfo runtime.ClientAuthInfoWriter) (*RunServiceListRunsOK, error) { +func (a *Client) RunServiceListRuns(params *RunServiceListRunsParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceListRunsOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceListRunsParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_ListRuns", Method: "GET", PathPattern: "/apis/v2beta1/runs", @@ -161,24 +247,33 @@ func (a *Client) RunServiceListRuns(params *RunServiceListRunsParams, authInfo r AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceListRunsOK), nil - + success, ok := result.(*RunServiceListRunsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceListRunsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceReadArtifact finds artifact data in a run */ -func (a *Client) RunServiceReadArtifact(params *RunServiceReadArtifactParams, authInfo runtime.ClientAuthInfoWriter) (*RunServiceReadArtifactOK, error) { +func (a *Client) RunServiceReadArtifact(params *RunServiceReadArtifactParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceReadArtifactOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceReadArtifactParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_ReadArtifact", Method: "GET", PathPattern: "/apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read", @@ -190,24 +285,33 @@ func (a *Client) RunServiceReadArtifact(params *RunServiceReadArtifactParams, au AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceReadArtifactOK), nil - + success, ok := result.(*RunServiceReadArtifactOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceReadArtifactDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceRetryRun res initiates a failed or terminated run */ -func (a *Client) RunServiceRetryRun(params *RunServiceRetryRunParams, authInfo runtime.ClientAuthInfoWriter) (*RunServiceRetryRunOK, error) { +func (a *Client) RunServiceRetryRun(params *RunServiceRetryRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceRetryRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceRetryRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_RetryRun", Method: "POST", PathPattern: "/apis/v2beta1/runs/{run_id}:retry", @@ -219,24 +323,33 @@ func (a *Client) RunServiceRetryRun(params *RunServiceRetryRunParams, authInfo r AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceRetryRunOK), nil - + success, ok := result.(*RunServiceRetryRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceRetryRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceTerminateRun terminates an active run */ -func (a *Client) RunServiceTerminateRun(params *RunServiceTerminateRunParams, authInfo runtime.ClientAuthInfoWriter) (*RunServiceTerminateRunOK, error) { +func (a *Client) RunServiceTerminateRun(params *RunServiceTerminateRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceTerminateRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceTerminateRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_TerminateRun", Method: "POST", PathPattern: "/apis/v2beta1/runs/{run_id}:terminate", @@ -248,24 +361,33 @@ func (a *Client) RunServiceTerminateRun(params *RunServiceTerminateRunParams, au AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceTerminateRunOK), nil - + success, ok := result.(*RunServiceTerminateRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceTerminateRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } /* RunServiceUnarchiveRun restores an archived run in an experiment given by run ID and experiment ID */ -func (a *Client) RunServiceUnarchiveRun(params *RunServiceUnarchiveRunParams, authInfo runtime.ClientAuthInfoWriter) (*RunServiceUnarchiveRunOK, error) { +func (a *Client) RunServiceUnarchiveRun(params *RunServiceUnarchiveRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceUnarchiveRunOK, error) { // TODO: Validate the params before sending if params == nil { params = NewRunServiceUnarchiveRunParams() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "RunService_UnarchiveRun", Method: "POST", PathPattern: "/apis/v2beta1/runs/{run_id}:unarchive", @@ -277,12 +399,22 @@ func (a *Client) RunServiceUnarchiveRun(params *RunServiceUnarchiveRunParams, au AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*RunServiceUnarchiveRunOK), nil - + success, ok := result.(*RunServiceUnarchiveRunOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*RunServiceUnarchiveRunDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_create_run_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_create_run_parameters.go index 15191d5c560..4bad2b61f74 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_create_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_create_run_parameters.go @@ -13,72 +13,88 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" ) -// NewRunServiceCreateRunParams creates a new RunServiceCreateRunParams object -// with the default values initialized. +// NewRunServiceCreateRunParams creates a new RunServiceCreateRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceCreateRunParams() *RunServiceCreateRunParams { - var () return &RunServiceCreateRunParams{ - timeout: cr.DefaultTimeout, } } // NewRunServiceCreateRunParamsWithTimeout creates a new RunServiceCreateRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceCreateRunParamsWithTimeout(timeout time.Duration) *RunServiceCreateRunParams { - var () return &RunServiceCreateRunParams{ - timeout: timeout, } } // NewRunServiceCreateRunParamsWithContext creates a new RunServiceCreateRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceCreateRunParamsWithContext(ctx context.Context) *RunServiceCreateRunParams { - var () return &RunServiceCreateRunParams{ - Context: ctx, } } // NewRunServiceCreateRunParamsWithHTTPClient creates a new RunServiceCreateRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceCreateRunParamsWithHTTPClient(client *http.Client) *RunServiceCreateRunParams { - var () return &RunServiceCreateRunParams{ HTTPClient: client, } } -/*RunServiceCreateRunParams contains all the parameters to send to the API endpoint -for the run service create run operation typically these are written to a http.Request +/* +RunServiceCreateRunParams contains all the parameters to send to the API endpoint + + for the run service create run operation. + + Typically these are written to a http.Request. */ type RunServiceCreateRunParams struct { - /*Body - Run to be created. + /* ExperimentID. + The ID of the parent experiment. */ - Body *run_model.V2beta1Run - /*ExperimentID - The ID of the parent experiment. + ExperimentID *string + /* Run. + + Run to be created. */ - ExperimentID *string + Run *run_model.V2beta1Run timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service create run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceCreateRunParams) WithDefaults() *RunServiceCreateRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service create run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceCreateRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service create run params func (o *RunServiceCreateRunParams) WithTimeout(timeout time.Duration) *RunServiceCreateRunParams { o.SetTimeout(timeout) @@ -112,17 +128,6 @@ func (o *RunServiceCreateRunParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } -// WithBody adds the body to the run service create run params -func (o *RunServiceCreateRunParams) WithBody(body *run_model.V2beta1Run) *RunServiceCreateRunParams { - o.SetBody(body) - return o -} - -// SetBody adds the body to the run service create run params -func (o *RunServiceCreateRunParams) SetBody(body *run_model.V2beta1Run) { - o.Body = body -} - // WithExperimentID adds the experimentID to the run service create run params func (o *RunServiceCreateRunParams) WithExperimentID(experimentID *string) *RunServiceCreateRunParams { o.SetExperimentID(experimentID) @@ -134,6 +139,17 @@ func (o *RunServiceCreateRunParams) SetExperimentID(experimentID *string) { o.ExperimentID = experimentID } +// WithRun adds the run to the run service create run params +func (o *RunServiceCreateRunParams) WithRun(run *run_model.V2beta1Run) *RunServiceCreateRunParams { + o.SetRun(run) + return o +} + +// SetRun adds the run to the run service create run params +func (o *RunServiceCreateRunParams) SetRun(run *run_model.V2beta1Run) { + o.Run = run +} + // WriteToRequest writes these params to a swagger request func (o *RunServiceCreateRunParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -142,26 +158,26 @@ func (o *RunServiceCreateRunParams) WriteToRequest(r runtime.ClientRequest, reg } var res []error - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - } - if o.ExperimentID != nil { // query param experiment_id var qrExperimentID string + if o.ExperimentID != nil { qrExperimentID = *o.ExperimentID } qExperimentID := qrExperimentID if qExperimentID != "" { + if err := r.SetQueryParam("experiment_id", qExperimentID); err != nil { return err } } - + } + if o.Run != nil { + if err := r.SetBodyParam(o.Run); err != nil { + return err + } } if len(res) > 0 { diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_create_run_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_create_run_responses.go index d2f26101150..68976167a50 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_create_run_responses.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_create_run_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" ) // RunServiceCreateRunReader is a Reader for the RunServiceCreateRun structure. @@ -24,14 +24,12 @@ type RunServiceCreateRunReader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceCreateRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceCreateRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceCreateRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceCreateRunOK() *RunServiceCreateRunOK { return &RunServiceCreateRunOK{} } -/*RunServiceCreateRunOK handles this case with default header values. +/* +RunServiceCreateRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceCreateRunOK struct { Payload *run_model.V2beta1Run } +// IsSuccess returns true when this run service create run o k response has a 2xx status code +func (o *RunServiceCreateRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service create run o k response has a 3xx status code +func (o *RunServiceCreateRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service create run o k response has a 4xx status code +func (o *RunServiceCreateRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service create run o k response has a 5xx status code +func (o *RunServiceCreateRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service create run o k response a status code equal to that given +func (o *RunServiceCreateRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service create run o k response +func (o *RunServiceCreateRunOK) Code() int { + return 200 +} + func (o *RunServiceCreateRunOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/runs][%d] runServiceCreateRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs][%d] runServiceCreateRunOK %s", 200, payload) +} + +func (o *RunServiceCreateRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs][%d] runServiceCreateRunOK %s", 200, payload) +} + +func (o *RunServiceCreateRunOK) GetPayload() *run_model.V2beta1Run { + return o.Payload } func (o *RunServiceCreateRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRunServiceCreateRunDefault(code int) *RunServiceCreateRunDefault { } } -/*RunServiceCreateRunDefault handles this case with default header values. +/* +RunServiceCreateRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceCreateRunDefault struct { _statusCode int - Payload *run_model.RuntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service create run default response has a 2xx status code +func (o *RunServiceCreateRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service create run default response has a 3xx status code +func (o *RunServiceCreateRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service create run default response has a 4xx status code +func (o *RunServiceCreateRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service create run default response has a 5xx status code +func (o *RunServiceCreateRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service create run default response a status code equal to that given +func (o *RunServiceCreateRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service create run default response @@ -96,12 +161,22 @@ func (o *RunServiceCreateRunDefault) Code() int { } func (o *RunServiceCreateRunDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/runs][%d] RunService_CreateRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs][%d] RunService_CreateRun default %s", o._statusCode, payload) +} + +func (o *RunServiceCreateRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs][%d] RunService_CreateRun default %s", o._statusCode, payload) +} + +func (o *RunServiceCreateRunDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceCreateRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.RuntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_delete_run_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_delete_run_parameters.go index 888540f2134..d47f8c2ce95 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_delete_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_delete_run_parameters.go @@ -13,62 +13,63 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceDeleteRunParams creates a new RunServiceDeleteRunParams object -// with the default values initialized. +// NewRunServiceDeleteRunParams creates a new RunServiceDeleteRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceDeleteRunParams() *RunServiceDeleteRunParams { - var () return &RunServiceDeleteRunParams{ - timeout: cr.DefaultTimeout, } } // NewRunServiceDeleteRunParamsWithTimeout creates a new RunServiceDeleteRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceDeleteRunParamsWithTimeout(timeout time.Duration) *RunServiceDeleteRunParams { - var () return &RunServiceDeleteRunParams{ - timeout: timeout, } } // NewRunServiceDeleteRunParamsWithContext creates a new RunServiceDeleteRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceDeleteRunParamsWithContext(ctx context.Context) *RunServiceDeleteRunParams { - var () return &RunServiceDeleteRunParams{ - Context: ctx, } } // NewRunServiceDeleteRunParamsWithHTTPClient creates a new RunServiceDeleteRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceDeleteRunParamsWithHTTPClient(client *http.Client) *RunServiceDeleteRunParams { - var () return &RunServiceDeleteRunParams{ HTTPClient: client, } } -/*RunServiceDeleteRunParams contains all the parameters to send to the API endpoint -for the run service delete run operation typically these are written to a http.Request +/* +RunServiceDeleteRunParams contains all the parameters to send to the API endpoint + + for the run service delete run operation. + + Typically these are written to a http.Request. */ type RunServiceDeleteRunParams struct { - /*ExperimentID - The ID of the parent experiment. + /* ExperimentID. + The ID of the parent experiment. */ ExperimentID *string - /*RunID - The ID of the run to be deleted. + /* RunID. + + The ID of the run to be deleted. */ RunID string @@ -77,6 +78,21 @@ type RunServiceDeleteRunParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service delete run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceDeleteRunParams) WithDefaults() *RunServiceDeleteRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service delete run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceDeleteRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service delete run params func (o *RunServiceDeleteRunParams) WithTimeout(timeout time.Duration) *RunServiceDeleteRunParams { o.SetTimeout(timeout) @@ -144,16 +160,17 @@ func (o *RunServiceDeleteRunParams) WriteToRequest(r runtime.ClientRequest, reg // query param experiment_id var qrExperimentID string + if o.ExperimentID != nil { qrExperimentID = *o.ExperimentID } qExperimentID := qrExperimentID if qExperimentID != "" { + if err := r.SetQueryParam("experiment_id", qExperimentID); err != nil { return err } } - } // path param run_id diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_delete_run_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_delete_run_responses.go index cc5038ddd3a..228452e5984 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_delete_run_responses.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_delete_run_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" ) // RunServiceDeleteRunReader is a Reader for the RunServiceDeleteRun structure. @@ -24,14 +24,12 @@ type RunServiceDeleteRunReader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceDeleteRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceDeleteRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceDeleteRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceDeleteRunOK() *RunServiceDeleteRunOK { return &RunServiceDeleteRunOK{} } -/*RunServiceDeleteRunOK handles this case with default header values. +/* +RunServiceDeleteRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceDeleteRunOK struct { Payload interface{} } +// IsSuccess returns true when this run service delete run o k response has a 2xx status code +func (o *RunServiceDeleteRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service delete run o k response has a 3xx status code +func (o *RunServiceDeleteRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service delete run o k response has a 4xx status code +func (o *RunServiceDeleteRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service delete run o k response has a 5xx status code +func (o *RunServiceDeleteRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service delete run o k response a status code equal to that given +func (o *RunServiceDeleteRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service delete run o k response +func (o *RunServiceDeleteRunOK) Code() int { + return 200 +} + func (o *RunServiceDeleteRunOK) Error() string { - return fmt.Sprintf("[DELETE /apis/v2beta1/runs/{run_id}][%d] runServiceDeleteRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/runs/{run_id}][%d] runServiceDeleteRunOK %s", 200, payload) +} + +func (o *RunServiceDeleteRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/runs/{run_id}][%d] runServiceDeleteRunOK %s", 200, payload) +} + +func (o *RunServiceDeleteRunOK) GetPayload() interface{} { + return o.Payload } func (o *RunServiceDeleteRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRunServiceDeleteRunDefault(code int) *RunServiceDeleteRunDefault { } } -/*RunServiceDeleteRunDefault handles this case with default header values. +/* +RunServiceDeleteRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceDeleteRunDefault struct { _statusCode int - Payload *run_model.RuntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service delete run default response has a 2xx status code +func (o *RunServiceDeleteRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service delete run default response has a 3xx status code +func (o *RunServiceDeleteRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service delete run default response has a 4xx status code +func (o *RunServiceDeleteRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service delete run default response has a 5xx status code +func (o *RunServiceDeleteRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service delete run default response a status code equal to that given +func (o *RunServiceDeleteRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service delete run default response @@ -94,12 +159,22 @@ func (o *RunServiceDeleteRunDefault) Code() int { } func (o *RunServiceDeleteRunDefault) Error() string { - return fmt.Sprintf("[DELETE /apis/v2beta1/runs/{run_id}][%d] RunService_DeleteRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/runs/{run_id}][%d] RunService_DeleteRun default %s", o._statusCode, payload) +} + +func (o *RunServiceDeleteRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[DELETE /apis/v2beta1/runs/{run_id}][%d] RunService_DeleteRun default %s", o._statusCode, payload) +} + +func (o *RunServiceDeleteRunDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceDeleteRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.RuntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_parameters.go index 275ab822898..0a8a491eddf 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_parameters.go @@ -13,62 +13,63 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceGetRunParams creates a new RunServiceGetRunParams object -// with the default values initialized. +// NewRunServiceGetRunParams creates a new RunServiceGetRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceGetRunParams() *RunServiceGetRunParams { - var () return &RunServiceGetRunParams{ - timeout: cr.DefaultTimeout, } } // NewRunServiceGetRunParamsWithTimeout creates a new RunServiceGetRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceGetRunParamsWithTimeout(timeout time.Duration) *RunServiceGetRunParams { - var () return &RunServiceGetRunParams{ - timeout: timeout, } } // NewRunServiceGetRunParamsWithContext creates a new RunServiceGetRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceGetRunParamsWithContext(ctx context.Context) *RunServiceGetRunParams { - var () return &RunServiceGetRunParams{ - Context: ctx, } } // NewRunServiceGetRunParamsWithHTTPClient creates a new RunServiceGetRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceGetRunParamsWithHTTPClient(client *http.Client) *RunServiceGetRunParams { - var () return &RunServiceGetRunParams{ HTTPClient: client, } } -/*RunServiceGetRunParams contains all the parameters to send to the API endpoint -for the run service get run operation typically these are written to a http.Request +/* +RunServiceGetRunParams contains all the parameters to send to the API endpoint + + for the run service get run operation. + + Typically these are written to a http.Request. */ type RunServiceGetRunParams struct { - /*ExperimentID - The ID of the parent experiment. + /* ExperimentID. + The ID of the parent experiment. */ ExperimentID *string - /*RunID - The ID of the run to be retrieved. + /* RunID. + + The ID of the run to be retrieved. */ RunID string @@ -77,6 +78,21 @@ type RunServiceGetRunParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service get run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceGetRunParams) WithDefaults() *RunServiceGetRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service get run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceGetRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service get run params func (o *RunServiceGetRunParams) WithTimeout(timeout time.Duration) *RunServiceGetRunParams { o.SetTimeout(timeout) @@ -144,16 +160,17 @@ func (o *RunServiceGetRunParams) WriteToRequest(r runtime.ClientRequest, reg str // query param experiment_id var qrExperimentID string + if o.ExperimentID != nil { qrExperimentID = *o.ExperimentID } qExperimentID := qrExperimentID if qExperimentID != "" { + if err := r.SetQueryParam("experiment_id", qExperimentID); err != nil { return err } } - } // path param run_id diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_responses.go index 841e336807d..821591a7a07 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_responses.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" ) // RunServiceGetRunReader is a Reader for the RunServiceGetRun structure. @@ -24,14 +24,12 @@ type RunServiceGetRunReader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceGetRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceGetRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceGetRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceGetRunOK() *RunServiceGetRunOK { return &RunServiceGetRunOK{} } -/*RunServiceGetRunOK handles this case with default header values. +/* +RunServiceGetRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceGetRunOK struct { Payload *run_model.V2beta1Run } +// IsSuccess returns true when this run service get run o k response has a 2xx status code +func (o *RunServiceGetRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service get run o k response has a 3xx status code +func (o *RunServiceGetRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service get run o k response has a 4xx status code +func (o *RunServiceGetRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service get run o k response has a 5xx status code +func (o *RunServiceGetRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service get run o k response a status code equal to that given +func (o *RunServiceGetRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service get run o k response +func (o *RunServiceGetRunOK) Code() int { + return 200 +} + func (o *RunServiceGetRunOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}][%d] runServiceGetRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}][%d] runServiceGetRunOK %s", 200, payload) +} + +func (o *RunServiceGetRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}][%d] runServiceGetRunOK %s", 200, payload) +} + +func (o *RunServiceGetRunOK) GetPayload() *run_model.V2beta1Run { + return o.Payload } func (o *RunServiceGetRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRunServiceGetRunDefault(code int) *RunServiceGetRunDefault { } } -/*RunServiceGetRunDefault handles this case with default header values. +/* +RunServiceGetRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceGetRunDefault struct { _statusCode int - Payload *run_model.RuntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service get run default response has a 2xx status code +func (o *RunServiceGetRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service get run default response has a 3xx status code +func (o *RunServiceGetRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service get run default response has a 4xx status code +func (o *RunServiceGetRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service get run default response has a 5xx status code +func (o *RunServiceGetRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service get run default response a status code equal to that given +func (o *RunServiceGetRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service get run default response @@ -96,12 +161,22 @@ func (o *RunServiceGetRunDefault) Code() int { } func (o *RunServiceGetRunDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}][%d] RunService_GetRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}][%d] RunService_GetRun default %s", o._statusCode, payload) +} + +func (o *RunServiceGetRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}][%d] RunService_GetRun default %s", o._statusCode, payload) +} + +func (o *RunServiceGetRunDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceGetRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.RuntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_parameters.go index 568fd926c09..dc64db86ee5 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_parameters.go @@ -13,89 +13,96 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" - - strfmt "github.com/go-openapi/strfmt" ) -// NewRunServiceListRunsParams creates a new RunServiceListRunsParams object -// with the default values initialized. +// NewRunServiceListRunsParams creates a new RunServiceListRunsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceListRunsParams() *RunServiceListRunsParams { - var () return &RunServiceListRunsParams{ - timeout: cr.DefaultTimeout, } } // NewRunServiceListRunsParamsWithTimeout creates a new RunServiceListRunsParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceListRunsParamsWithTimeout(timeout time.Duration) *RunServiceListRunsParams { - var () return &RunServiceListRunsParams{ - timeout: timeout, } } // NewRunServiceListRunsParamsWithContext creates a new RunServiceListRunsParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceListRunsParamsWithContext(ctx context.Context) *RunServiceListRunsParams { - var () return &RunServiceListRunsParams{ - Context: ctx, } } // NewRunServiceListRunsParamsWithHTTPClient creates a new RunServiceListRunsParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceListRunsParamsWithHTTPClient(client *http.Client) *RunServiceListRunsParams { - var () return &RunServiceListRunsParams{ HTTPClient: client, } } -/*RunServiceListRunsParams contains all the parameters to send to the API endpoint -for the run service list runs operation typically these are written to a http.Request +/* +RunServiceListRunsParams contains all the parameters to send to the API endpoint + + for the run service list runs operation. + + Typically these are written to a http.Request. */ type RunServiceListRunsParams struct { - /*ExperimentID - The ID of the parent experiment. If empty, response includes runs across all experiments. + /* ExperimentID. + The ID of the parent experiment. If empty, response includes runs across all experiments. */ ExperimentID *string - /*Filter - A url-encoded, JSON-serialized Filter protocol buffer (see - [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). + /* Filter. + + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). */ Filter *string - /*Namespace - Optional input field. Filters based on the namespace. + /* Namespace. + + Optional input field. Filters based on the namespace. */ Namespace *string - /*PageSize - The number of runs to be listed per page. If there are more runs than this + + /* PageSize. + + The number of runs to be listed per page. If there are more runs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. + Format: int32 */ PageSize *int32 - /*PageToken - A page token to request the next page of results. The token is acquired + + /* PageToken. + + A page token to request the next page of results. The token is acquired from the nextPageToken field of the response from the previous ListRuns call or can be omitted when fetching the first page. - */ PageToken *string - /*SortBy - Can be format of "field_name", "field_name asc" or "field_name desc" - (Example, "name asc" or "id desc"). Ascending by default. + /* SortBy. + + Can be format of "field_name", "field_name asc" or "field_name desc" + (Example, "name asc" or "id desc"). Ascending by default. */ SortBy *string @@ -104,6 +111,21 @@ type RunServiceListRunsParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service list runs params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceListRunsParams) WithDefaults() *RunServiceListRunsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service list runs params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceListRunsParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service list runs params func (o *RunServiceListRunsParams) WithTimeout(timeout time.Duration) *RunServiceListRunsParams { o.SetTimeout(timeout) @@ -215,96 +237,102 @@ func (o *RunServiceListRunsParams) WriteToRequest(r runtime.ClientRequest, reg s // query param experiment_id var qrExperimentID string + if o.ExperimentID != nil { qrExperimentID = *o.ExperimentID } qExperimentID := qrExperimentID if qExperimentID != "" { + if err := r.SetQueryParam("experiment_id", qExperimentID); err != nil { return err } } - } if o.Filter != nil { // query param filter var qrFilter string + if o.Filter != nil { qrFilter = *o.Filter } qFilter := qrFilter if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { return err } } - } if o.Namespace != nil { // query param namespace var qrNamespace string + if o.Namespace != nil { qrNamespace = *o.Namespace } qNamespace := qrNamespace if qNamespace != "" { + if err := r.SetQueryParam("namespace", qNamespace); err != nil { return err } } - } if o.PageSize != nil { // query param page_size var qrPageSize int32 + if o.PageSize != nil { qrPageSize = *o.PageSize } qPageSize := swag.FormatInt32(qrPageSize) if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { return err } } - } if o.PageToken != nil { // query param page_token var qrPageToken string + if o.PageToken != nil { qrPageToken = *o.PageToken } qPageToken := qrPageToken if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { return err } } - } if o.SortBy != nil { // query param sort_by var qrSortBy string + if o.SortBy != nil { qrSortBy = *o.SortBy } qSortBy := qrSortBy if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { return err } } - } if len(res) > 0 { diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_responses.go index bb021bc140a..f4f4a657d6f 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_responses.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" ) // RunServiceListRunsReader is a Reader for the RunServiceListRuns structure. @@ -24,14 +24,12 @@ type RunServiceListRunsReader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceListRunsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceListRunsOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceListRunsDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceListRunsOK() *RunServiceListRunsOK { return &RunServiceListRunsOK{} } -/*RunServiceListRunsOK handles this case with default header values. +/* +RunServiceListRunsOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceListRunsOK struct { Payload *run_model.V2beta1ListRunsResponse } +// IsSuccess returns true when this run service list runs o k response has a 2xx status code +func (o *RunServiceListRunsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service list runs o k response has a 3xx status code +func (o *RunServiceListRunsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service list runs o k response has a 4xx status code +func (o *RunServiceListRunsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service list runs o k response has a 5xx status code +func (o *RunServiceListRunsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service list runs o k response a status code equal to that given +func (o *RunServiceListRunsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service list runs o k response +func (o *RunServiceListRunsOK) Code() int { + return 200 +} + func (o *RunServiceListRunsOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/runs][%d] runServiceListRunsOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs][%d] runServiceListRunsOK %s", 200, payload) +} + +func (o *RunServiceListRunsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs][%d] runServiceListRunsOK %s", 200, payload) +} + +func (o *RunServiceListRunsOK) GetPayload() *run_model.V2beta1ListRunsResponse { + return o.Payload } func (o *RunServiceListRunsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRunServiceListRunsDefault(code int) *RunServiceListRunsDefault { } } -/*RunServiceListRunsDefault handles this case with default header values. +/* +RunServiceListRunsDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceListRunsDefault struct { _statusCode int - Payload *run_model.RuntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service list runs default response has a 2xx status code +func (o *RunServiceListRunsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service list runs default response has a 3xx status code +func (o *RunServiceListRunsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service list runs default response has a 4xx status code +func (o *RunServiceListRunsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service list runs default response has a 5xx status code +func (o *RunServiceListRunsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service list runs default response a status code equal to that given +func (o *RunServiceListRunsDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service list runs default response @@ -96,12 +161,22 @@ func (o *RunServiceListRunsDefault) Code() int { } func (o *RunServiceListRunsDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/runs][%d] RunService_ListRuns default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs][%d] RunService_ListRuns default %s", o._statusCode, payload) +} + +func (o *RunServiceListRunsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs][%d] RunService_ListRuns default %s", o._statusCode, payload) +} + +func (o *RunServiceListRunsDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceListRunsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.RuntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_read_artifact_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_read_artifact_parameters.go index 004a11ed3ef..fd8c3f85afd 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_read_artifact_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_read_artifact_parameters.go @@ -13,72 +13,75 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceReadArtifactParams creates a new RunServiceReadArtifactParams object -// with the default values initialized. +// NewRunServiceReadArtifactParams creates a new RunServiceReadArtifactParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceReadArtifactParams() *RunServiceReadArtifactParams { - var () return &RunServiceReadArtifactParams{ - timeout: cr.DefaultTimeout, } } // NewRunServiceReadArtifactParamsWithTimeout creates a new RunServiceReadArtifactParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceReadArtifactParamsWithTimeout(timeout time.Duration) *RunServiceReadArtifactParams { - var () return &RunServiceReadArtifactParams{ - timeout: timeout, } } // NewRunServiceReadArtifactParamsWithContext creates a new RunServiceReadArtifactParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceReadArtifactParamsWithContext(ctx context.Context) *RunServiceReadArtifactParams { - var () return &RunServiceReadArtifactParams{ - Context: ctx, } } // NewRunServiceReadArtifactParamsWithHTTPClient creates a new RunServiceReadArtifactParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceReadArtifactParamsWithHTTPClient(client *http.Client) *RunServiceReadArtifactParams { - var () return &RunServiceReadArtifactParams{ HTTPClient: client, } } -/*RunServiceReadArtifactParams contains all the parameters to send to the API endpoint -for the run service read artifact operation typically these are written to a http.Request +/* +RunServiceReadArtifactParams contains all the parameters to send to the API endpoint + + for the run service read artifact operation. + + Typically these are written to a http.Request. */ type RunServiceReadArtifactParams struct { - /*ArtifactName - Name of the artifact. + /* ArtifactName. + Name of the artifact. */ ArtifactName string - /*ExperimentID - The ID of the parent experiment. + /* ExperimentID. + + The ID of the parent experiment. */ ExperimentID *string - /*NodeID - ID of the running node. + /* NodeID. + + ID of the running node. */ NodeID string - /*RunID - ID of the run. + /* RunID. + + ID of the run. */ RunID string @@ -87,6 +90,21 @@ type RunServiceReadArtifactParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service read artifact params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceReadArtifactParams) WithDefaults() *RunServiceReadArtifactParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service read artifact params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceReadArtifactParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service read artifact params func (o *RunServiceReadArtifactParams) WithTimeout(timeout time.Duration) *RunServiceReadArtifactParams { o.SetTimeout(timeout) @@ -181,16 +199,17 @@ func (o *RunServiceReadArtifactParams) WriteToRequest(r runtime.ClientRequest, r // query param experiment_id var qrExperimentID string + if o.ExperimentID != nil { qrExperimentID = *o.ExperimentID } qExperimentID := qrExperimentID if qExperimentID != "" { + if err := r.SetQueryParam("experiment_id", qExperimentID); err != nil { return err } } - } // path param node_id diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_read_artifact_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_read_artifact_responses.go index ae1fc5c34df..0e86494ab93 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_read_artifact_responses.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_read_artifact_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" ) // RunServiceReadArtifactReader is a Reader for the RunServiceReadArtifact structure. @@ -24,14 +24,12 @@ type RunServiceReadArtifactReader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceReadArtifactReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceReadArtifactOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceReadArtifactDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceReadArtifactOK() *RunServiceReadArtifactOK { return &RunServiceReadArtifactOK{} } -/*RunServiceReadArtifactOK handles this case with default header values. +/* +RunServiceReadArtifactOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceReadArtifactOK struct { Payload *run_model.V2beta1ReadArtifactResponse } +// IsSuccess returns true when this run service read artifact o k response has a 2xx status code +func (o *RunServiceReadArtifactOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service read artifact o k response has a 3xx status code +func (o *RunServiceReadArtifactOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service read artifact o k response has a 4xx status code +func (o *RunServiceReadArtifactOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service read artifact o k response has a 5xx status code +func (o *RunServiceReadArtifactOK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service read artifact o k response a status code equal to that given +func (o *RunServiceReadArtifactOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service read artifact o k response +func (o *RunServiceReadArtifactOK) Code() int { + return 200 +} + func (o *RunServiceReadArtifactOK) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] runServiceReadArtifactOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] runServiceReadArtifactOK %s", 200, payload) +} + +func (o *RunServiceReadArtifactOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] runServiceReadArtifactOK %s", 200, payload) +} + +func (o *RunServiceReadArtifactOK) GetPayload() *run_model.V2beta1ReadArtifactResponse { + return o.Payload } func (o *RunServiceReadArtifactOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewRunServiceReadArtifactDefault(code int) *RunServiceReadArtifactDefault { } } -/*RunServiceReadArtifactDefault handles this case with default header values. +/* +RunServiceReadArtifactDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceReadArtifactDefault struct { _statusCode int - Payload *run_model.RuntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service read artifact default response has a 2xx status code +func (o *RunServiceReadArtifactDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service read artifact default response has a 3xx status code +func (o *RunServiceReadArtifactDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service read artifact default response has a 4xx status code +func (o *RunServiceReadArtifactDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service read artifact default response has a 5xx status code +func (o *RunServiceReadArtifactDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service read artifact default response a status code equal to that given +func (o *RunServiceReadArtifactDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service read artifact default response @@ -96,12 +161,22 @@ func (o *RunServiceReadArtifactDefault) Code() int { } func (o *RunServiceReadArtifactDefault) Error() string { - return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] RunService_ReadArtifact default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] RunService_ReadArtifact default %s", o._statusCode, payload) +} + +func (o *RunServiceReadArtifactDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read][%d] RunService_ReadArtifact default %s", o._statusCode, payload) +} + +func (o *RunServiceReadArtifactDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceReadArtifactDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.RuntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_retry_run_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_retry_run_parameters.go index 51d0e8634ea..3c69975e4f2 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_retry_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_retry_run_parameters.go @@ -13,57 +13,63 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceRetryRunParams creates a new RunServiceRetryRunParams object -// with the default values initialized. +// NewRunServiceRetryRunParams creates a new RunServiceRetryRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceRetryRunParams() *RunServiceRetryRunParams { - var () return &RunServiceRetryRunParams{ - timeout: cr.DefaultTimeout, } } // NewRunServiceRetryRunParamsWithTimeout creates a new RunServiceRetryRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceRetryRunParamsWithTimeout(timeout time.Duration) *RunServiceRetryRunParams { - var () return &RunServiceRetryRunParams{ - timeout: timeout, } } // NewRunServiceRetryRunParamsWithContext creates a new RunServiceRetryRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceRetryRunParamsWithContext(ctx context.Context) *RunServiceRetryRunParams { - var () return &RunServiceRetryRunParams{ - Context: ctx, } } // NewRunServiceRetryRunParamsWithHTTPClient creates a new RunServiceRetryRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceRetryRunParamsWithHTTPClient(client *http.Client) *RunServiceRetryRunParams { - var () return &RunServiceRetryRunParams{ HTTPClient: client, } } -/*RunServiceRetryRunParams contains all the parameters to send to the API endpoint -for the run service retry run operation typically these are written to a http.Request +/* +RunServiceRetryRunParams contains all the parameters to send to the API endpoint + + for the run service retry run operation. + + Typically these are written to a http.Request. */ type RunServiceRetryRunParams struct { - /*RunID - The ID of the run to be retried. + /* ExperimentID. + The ID of the parent experiment. + */ + ExperimentID *string + + /* RunID. + + The ID of the run to be retried. */ RunID string @@ -72,6 +78,21 @@ type RunServiceRetryRunParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service retry run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceRetryRunParams) WithDefaults() *RunServiceRetryRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service retry run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceRetryRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service retry run params func (o *RunServiceRetryRunParams) WithTimeout(timeout time.Duration) *RunServiceRetryRunParams { o.SetTimeout(timeout) @@ -105,6 +126,17 @@ func (o *RunServiceRetryRunParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } +// WithExperimentID adds the experimentID to the run service retry run params +func (o *RunServiceRetryRunParams) WithExperimentID(experimentID *string) *RunServiceRetryRunParams { + o.SetExperimentID(experimentID) + return o +} + +// SetExperimentID adds the experimentId to the run service retry run params +func (o *RunServiceRetryRunParams) SetExperimentID(experimentID *string) { + o.ExperimentID = experimentID +} + // WithRunID adds the runID to the run service retry run params func (o *RunServiceRetryRunParams) WithRunID(runID string) *RunServiceRetryRunParams { o.SetRunID(runID) @@ -124,6 +156,23 @@ func (o *RunServiceRetryRunParams) WriteToRequest(r runtime.ClientRequest, reg s } var res []error + if o.ExperimentID != nil { + + // query param experiment_id + var qrExperimentID string + + if o.ExperimentID != nil { + qrExperimentID = *o.ExperimentID + } + qExperimentID := qrExperimentID + if qExperimentID != "" { + + if err := r.SetQueryParam("experiment_id", qExperimentID); err != nil { + return err + } + } + } + // path param run_id if err := r.SetPathParam("run_id", o.RunID); err != nil { return err diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_retry_run_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_retry_run_responses.go index bae568f14fc..16a10e35096 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_retry_run_responses.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_retry_run_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" ) // RunServiceRetryRunReader is a Reader for the RunServiceRetryRun structure. @@ -24,14 +24,12 @@ type RunServiceRetryRunReader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceRetryRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceRetryRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceRetryRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceRetryRunOK() *RunServiceRetryRunOK { return &RunServiceRetryRunOK{} } -/*RunServiceRetryRunOK handles this case with default header values. +/* +RunServiceRetryRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceRetryRunOK struct { Payload interface{} } +// IsSuccess returns true when this run service retry run o k response has a 2xx status code +func (o *RunServiceRetryRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service retry run o k response has a 3xx status code +func (o *RunServiceRetryRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service retry run o k response has a 4xx status code +func (o *RunServiceRetryRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service retry run o k response has a 5xx status code +func (o *RunServiceRetryRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service retry run o k response a status code equal to that given +func (o *RunServiceRetryRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service retry run o k response +func (o *RunServiceRetryRunOK) Code() int { + return 200 +} + func (o *RunServiceRetryRunOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:retry][%d] runServiceRetryRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:retry][%d] runServiceRetryRunOK %s", 200, payload) +} + +func (o *RunServiceRetryRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:retry][%d] runServiceRetryRunOK %s", 200, payload) +} + +func (o *RunServiceRetryRunOK) GetPayload() interface{} { + return o.Payload } func (o *RunServiceRetryRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRunServiceRetryRunDefault(code int) *RunServiceRetryRunDefault { } } -/*RunServiceRetryRunDefault handles this case with default header values. +/* +RunServiceRetryRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceRetryRunDefault struct { _statusCode int - Payload *run_model.RuntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service retry run default response has a 2xx status code +func (o *RunServiceRetryRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service retry run default response has a 3xx status code +func (o *RunServiceRetryRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service retry run default response has a 4xx status code +func (o *RunServiceRetryRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service retry run default response has a 5xx status code +func (o *RunServiceRetryRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service retry run default response a status code equal to that given +func (o *RunServiceRetryRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service retry run default response @@ -94,12 +159,22 @@ func (o *RunServiceRetryRunDefault) Code() int { } func (o *RunServiceRetryRunDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:retry][%d] RunService_RetryRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:retry][%d] RunService_RetryRun default %s", o._statusCode, payload) +} + +func (o *RunServiceRetryRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:retry][%d] RunService_RetryRun default %s", o._statusCode, payload) +} + +func (o *RunServiceRetryRunDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceRetryRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.RuntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_terminate_run_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_terminate_run_parameters.go index 7f48628f086..b907f7e1f80 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_terminate_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_terminate_run_parameters.go @@ -13,57 +13,63 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceTerminateRunParams creates a new RunServiceTerminateRunParams object -// with the default values initialized. +// NewRunServiceTerminateRunParams creates a new RunServiceTerminateRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceTerminateRunParams() *RunServiceTerminateRunParams { - var () return &RunServiceTerminateRunParams{ - timeout: cr.DefaultTimeout, } } // NewRunServiceTerminateRunParamsWithTimeout creates a new RunServiceTerminateRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceTerminateRunParamsWithTimeout(timeout time.Duration) *RunServiceTerminateRunParams { - var () return &RunServiceTerminateRunParams{ - timeout: timeout, } } // NewRunServiceTerminateRunParamsWithContext creates a new RunServiceTerminateRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceTerminateRunParamsWithContext(ctx context.Context) *RunServiceTerminateRunParams { - var () return &RunServiceTerminateRunParams{ - Context: ctx, } } // NewRunServiceTerminateRunParamsWithHTTPClient creates a new RunServiceTerminateRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceTerminateRunParamsWithHTTPClient(client *http.Client) *RunServiceTerminateRunParams { - var () return &RunServiceTerminateRunParams{ HTTPClient: client, } } -/*RunServiceTerminateRunParams contains all the parameters to send to the API endpoint -for the run service terminate run operation typically these are written to a http.Request +/* +RunServiceTerminateRunParams contains all the parameters to send to the API endpoint + + for the run service terminate run operation. + + Typically these are written to a http.Request. */ type RunServiceTerminateRunParams struct { - /*RunID - The ID of the run to be terminated. + /* ExperimentID. + The ID of the parent experiment. + */ + ExperimentID *string + + /* RunID. + + The ID of the run to be terminated. */ RunID string @@ -72,6 +78,21 @@ type RunServiceTerminateRunParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service terminate run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceTerminateRunParams) WithDefaults() *RunServiceTerminateRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service terminate run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceTerminateRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service terminate run params func (o *RunServiceTerminateRunParams) WithTimeout(timeout time.Duration) *RunServiceTerminateRunParams { o.SetTimeout(timeout) @@ -105,6 +126,17 @@ func (o *RunServiceTerminateRunParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } +// WithExperimentID adds the experimentID to the run service terminate run params +func (o *RunServiceTerminateRunParams) WithExperimentID(experimentID *string) *RunServiceTerminateRunParams { + o.SetExperimentID(experimentID) + return o +} + +// SetExperimentID adds the experimentId to the run service terminate run params +func (o *RunServiceTerminateRunParams) SetExperimentID(experimentID *string) { + o.ExperimentID = experimentID +} + // WithRunID adds the runID to the run service terminate run params func (o *RunServiceTerminateRunParams) WithRunID(runID string) *RunServiceTerminateRunParams { o.SetRunID(runID) @@ -124,6 +156,23 @@ func (o *RunServiceTerminateRunParams) WriteToRequest(r runtime.ClientRequest, r } var res []error + if o.ExperimentID != nil { + + // query param experiment_id + var qrExperimentID string + + if o.ExperimentID != nil { + qrExperimentID = *o.ExperimentID + } + qExperimentID := qrExperimentID + if qExperimentID != "" { + + if err := r.SetQueryParam("experiment_id", qExperimentID); err != nil { + return err + } + } + } + // path param run_id if err := r.SetPathParam("run_id", o.RunID); err != nil { return err diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_terminate_run_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_terminate_run_responses.go index b5aae3ba469..c8e16134a2c 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_terminate_run_responses.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_terminate_run_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" ) // RunServiceTerminateRunReader is a Reader for the RunServiceTerminateRun structure. @@ -24,14 +24,12 @@ type RunServiceTerminateRunReader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceTerminateRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceTerminateRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceTerminateRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceTerminateRunOK() *RunServiceTerminateRunOK { return &RunServiceTerminateRunOK{} } -/*RunServiceTerminateRunOK handles this case with default header values. +/* +RunServiceTerminateRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceTerminateRunOK struct { Payload interface{} } +// IsSuccess returns true when this run service terminate run o k response has a 2xx status code +func (o *RunServiceTerminateRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service terminate run o k response has a 3xx status code +func (o *RunServiceTerminateRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service terminate run o k response has a 4xx status code +func (o *RunServiceTerminateRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service terminate run o k response has a 5xx status code +func (o *RunServiceTerminateRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service terminate run o k response a status code equal to that given +func (o *RunServiceTerminateRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service terminate run o k response +func (o *RunServiceTerminateRunOK) Code() int { + return 200 +} + func (o *RunServiceTerminateRunOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:terminate][%d] runServiceTerminateRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:terminate][%d] runServiceTerminateRunOK %s", 200, payload) +} + +func (o *RunServiceTerminateRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:terminate][%d] runServiceTerminateRunOK %s", 200, payload) +} + +func (o *RunServiceTerminateRunOK) GetPayload() interface{} { + return o.Payload } func (o *RunServiceTerminateRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRunServiceTerminateRunDefault(code int) *RunServiceTerminateRunDefault { } } -/*RunServiceTerminateRunDefault handles this case with default header values. +/* +RunServiceTerminateRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceTerminateRunDefault struct { _statusCode int - Payload *run_model.RuntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service terminate run default response has a 2xx status code +func (o *RunServiceTerminateRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service terminate run default response has a 3xx status code +func (o *RunServiceTerminateRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service terminate run default response has a 4xx status code +func (o *RunServiceTerminateRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service terminate run default response has a 5xx status code +func (o *RunServiceTerminateRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service terminate run default response a status code equal to that given +func (o *RunServiceTerminateRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service terminate run default response @@ -94,12 +159,22 @@ func (o *RunServiceTerminateRunDefault) Code() int { } func (o *RunServiceTerminateRunDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:terminate][%d] RunService_TerminateRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:terminate][%d] RunService_TerminateRun default %s", o._statusCode, payload) +} + +func (o *RunServiceTerminateRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:terminate][%d] RunService_TerminateRun default %s", o._statusCode, payload) +} + +func (o *RunServiceTerminateRunDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceTerminateRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.RuntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_unarchive_run_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_unarchive_run_parameters.go index 0aa314d4577..1d6c455d201 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_unarchive_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_unarchive_run_parameters.go @@ -13,57 +13,63 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" ) -// NewRunServiceUnarchiveRunParams creates a new RunServiceUnarchiveRunParams object -// with the default values initialized. +// NewRunServiceUnarchiveRunParams creates a new RunServiceUnarchiveRunParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewRunServiceUnarchiveRunParams() *RunServiceUnarchiveRunParams { - var () return &RunServiceUnarchiveRunParams{ - timeout: cr.DefaultTimeout, } } // NewRunServiceUnarchiveRunParamsWithTimeout creates a new RunServiceUnarchiveRunParams object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewRunServiceUnarchiveRunParamsWithTimeout(timeout time.Duration) *RunServiceUnarchiveRunParams { - var () return &RunServiceUnarchiveRunParams{ - timeout: timeout, } } // NewRunServiceUnarchiveRunParamsWithContext creates a new RunServiceUnarchiveRunParams object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewRunServiceUnarchiveRunParamsWithContext(ctx context.Context) *RunServiceUnarchiveRunParams { - var () return &RunServiceUnarchiveRunParams{ - Context: ctx, } } // NewRunServiceUnarchiveRunParamsWithHTTPClient creates a new RunServiceUnarchiveRunParams object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewRunServiceUnarchiveRunParamsWithHTTPClient(client *http.Client) *RunServiceUnarchiveRunParams { - var () return &RunServiceUnarchiveRunParams{ HTTPClient: client, } } -/*RunServiceUnarchiveRunParams contains all the parameters to send to the API endpoint -for the run service unarchive run operation typically these are written to a http.Request +/* +RunServiceUnarchiveRunParams contains all the parameters to send to the API endpoint + + for the run service unarchive run operation. + + Typically these are written to a http.Request. */ type RunServiceUnarchiveRunParams struct { - /*RunID - The ID of the run to be restored. + /* ExperimentID. + The ID of the parent experiment. + */ + ExperimentID *string + + /* RunID. + + The ID of the run to be restored. */ RunID string @@ -72,6 +78,21 @@ type RunServiceUnarchiveRunParams struct { HTTPClient *http.Client } +// WithDefaults hydrates default values in the run service unarchive run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceUnarchiveRunParams) WithDefaults() *RunServiceUnarchiveRunParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the run service unarchive run params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *RunServiceUnarchiveRunParams) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the run service unarchive run params func (o *RunServiceUnarchiveRunParams) WithTimeout(timeout time.Duration) *RunServiceUnarchiveRunParams { o.SetTimeout(timeout) @@ -105,6 +126,17 @@ func (o *RunServiceUnarchiveRunParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } +// WithExperimentID adds the experimentID to the run service unarchive run params +func (o *RunServiceUnarchiveRunParams) WithExperimentID(experimentID *string) *RunServiceUnarchiveRunParams { + o.SetExperimentID(experimentID) + return o +} + +// SetExperimentID adds the experimentId to the run service unarchive run params +func (o *RunServiceUnarchiveRunParams) SetExperimentID(experimentID *string) { + o.ExperimentID = experimentID +} + // WithRunID adds the runID to the run service unarchive run params func (o *RunServiceUnarchiveRunParams) WithRunID(runID string) *RunServiceUnarchiveRunParams { o.SetRunID(runID) @@ -124,6 +156,23 @@ func (o *RunServiceUnarchiveRunParams) WriteToRequest(r runtime.ClientRequest, r } var res []error + if o.ExperimentID != nil { + + // query param experiment_id + var qrExperimentID string + + if o.ExperimentID != nil { + qrExperimentID = *o.ExperimentID + } + qExperimentID := qrExperimentID + if qExperimentID != "" { + + if err := r.SetQueryParam("experiment_id", qExperimentID); err != nil { + return err + } + } + } + // path param run_id if err := r.SetPathParam("run_id", o.RunID); err != nil { return err diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_unarchive_run_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_unarchive_run_responses.go index 7460f10542a..15d80623ebe 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_unarchive_run_responses.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_unarchive_run_responses.go @@ -6,14 +6,14 @@ package run_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - run_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" ) // RunServiceUnarchiveRunReader is a Reader for the RunServiceUnarchiveRun structure. @@ -24,14 +24,12 @@ type RunServiceUnarchiveRunReader struct { // ReadResponse reads a server response into the received o. func (o *RunServiceUnarchiveRunReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewRunServiceUnarchiveRunOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewRunServiceUnarchiveRunDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewRunServiceUnarchiveRunOK() *RunServiceUnarchiveRunOK { return &RunServiceUnarchiveRunOK{} } -/*RunServiceUnarchiveRunOK handles this case with default header values. +/* +RunServiceUnarchiveRunOK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type RunServiceUnarchiveRunOK struct { Payload interface{} } +// IsSuccess returns true when this run service unarchive run o k response has a 2xx status code +func (o *RunServiceUnarchiveRunOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this run service unarchive run o k response has a 3xx status code +func (o *RunServiceUnarchiveRunOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this run service unarchive run o k response has a 4xx status code +func (o *RunServiceUnarchiveRunOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this run service unarchive run o k response has a 5xx status code +func (o *RunServiceUnarchiveRunOK) IsServerError() bool { + return false +} + +// IsCode returns true when this run service unarchive run o k response a status code equal to that given +func (o *RunServiceUnarchiveRunOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the run service unarchive run o k response +func (o *RunServiceUnarchiveRunOK) Code() int { + return 200 +} + func (o *RunServiceUnarchiveRunOK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:unarchive][%d] runServiceUnarchiveRunOK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:unarchive][%d] runServiceUnarchiveRunOK %s", 200, payload) +} + +func (o *RunServiceUnarchiveRunOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:unarchive][%d] runServiceUnarchiveRunOK %s", 200, payload) +} + +func (o *RunServiceUnarchiveRunOK) GetPayload() interface{} { + return o.Payload } func (o *RunServiceUnarchiveRunOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -78,14 +117,40 @@ func NewRunServiceUnarchiveRunDefault(code int) *RunServiceUnarchiveRunDefault { } } -/*RunServiceUnarchiveRunDefault handles this case with default header values. +/* +RunServiceUnarchiveRunDefault describes a response with status code -1, with default header values. An unexpected error response. */ type RunServiceUnarchiveRunDefault struct { _statusCode int - Payload *run_model.RuntimeError + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this run service unarchive run default response has a 2xx status code +func (o *RunServiceUnarchiveRunDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this run service unarchive run default response has a 3xx status code +func (o *RunServiceUnarchiveRunDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this run service unarchive run default response has a 4xx status code +func (o *RunServiceUnarchiveRunDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this run service unarchive run default response has a 5xx status code +func (o *RunServiceUnarchiveRunDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this run service unarchive run default response a status code equal to that given +func (o *RunServiceUnarchiveRunDefault) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the run service unarchive run default response @@ -94,12 +159,22 @@ func (o *RunServiceUnarchiveRunDefault) Code() int { } func (o *RunServiceUnarchiveRunDefault) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:unarchive][%d] RunService_UnarchiveRun default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:unarchive][%d] RunService_UnarchiveRun default %s", o._statusCode, payload) +} + +func (o *RunServiceUnarchiveRunDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/runs/{run_id}:unarchive][%d] RunService_UnarchiveRun default %s", o._statusCode, payload) +} + +func (o *RunServiceUnarchiveRunDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload } func (o *RunServiceUnarchiveRunDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(run_model.RuntimeError) + o.Payload = new(run_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/run_model/googlerpc_status.go b/backend/api/v2beta1/go_http_client/run_model/googlerpc_status.go index caf7102255e..90b50d42007 100644 --- a/backend/api/v2beta1/go_http_client/run_model/googlerpc_status.go +++ b/backend/api/v2beta1/go_http_client/run_model/googlerpc_status.go @@ -6,11 +6,11 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -21,6 +21,7 @@ import ( // // You can find out more about this error model and how to work with it in the // [API Design Guide](https://cloud.google.com/apis/design/errors). +// // swagger:model googlerpcStatus type GooglerpcStatus struct { @@ -52,7 +53,6 @@ func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { } func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { - if swag.IsZero(m.Details) { // not required return nil } @@ -66,6 +66,47 @@ func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { if err := m.Details[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_child_task.go b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_child_task.go index b58e50cc41f..52c8eda15d7 100644 --- a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_child_task.go +++ b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_child_task.go @@ -6,13 +6,15 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // PipelineTaskDetailChildTask A dependent task that requires this one to succeed. // Represented by either task_id or pod_name. +// // swagger:model PipelineTaskDetailChildTask type PipelineTaskDetailChildTask struct { @@ -29,6 +31,11 @@ func (m *PipelineTaskDetailChildTask) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this pipeline task detail child task based on context it is used +func (m *PipelineTaskDetailChildTask) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *PipelineTaskDetailChildTask) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/run_model/protobuf_any.go b/backend/api/v2beta1/go_http_client/run_model/protobuf_any.go index 9227b6f36df..2b44a6e53c0 100644 --- a/backend/api/v2beta1/go_http_client/run_model/protobuf_any.go +++ b/backend/api/v2beta1/go_http_client/run_model/protobuf_any.go @@ -6,9 +6,10 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v2beta1/go_http_client/run_model/protobuf_null_value.go b/backend/api/v2beta1/go_http_client/run_model/protobuf_null_value.go index 852386abfff..22ebd8311f5 100644 --- a/backend/api/v2beta1/go_http_client/run_model/protobuf_null_value.go +++ b/backend/api/v2beta1/go_http_client/run_model/protobuf_null_value.go @@ -6,23 +6,33 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // ProtobufNullValue `NullValue` is a singleton enumeration to represent the null value for the // `Value` type union. // -// The JSON representation for `NullValue` is JSON `null`. +// The JSON representation for `NullValue` is JSON `null`. +// +// - NULL_VALUE: Null value. // -// - NULL_VALUE: Null value. // swagger:model protobufNullValue type ProtobufNullValue string +func NewProtobufNullValue(value ProtobufNullValue) *ProtobufNullValue { + return &value +} + +// Pointer returns a pointer to a freshly-allocated ProtobufNullValue. +func (m ProtobufNullValue) Pointer() *ProtobufNullValue { + return &m +} + const ( // ProtobufNullValueNULLVALUE captures enum value "NULL_VALUE" @@ -43,7 +53,7 @@ func init() { } func (m ProtobufNullValue) validateProtobufNullValueEnum(path, location string, value ProtobufNullValue) error { - if err := validate.Enum(path, location, value, protobufNullValueEnum); err != nil { + if err := validate.EnumCase(path, location, value, protobufNullValueEnum, true); err != nil { return err } return nil @@ -63,3 +73,8 @@ func (m ProtobufNullValue) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this protobuf null value based on context it is used +func (m ProtobufNullValue) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/runtime_error.go b/backend/api/v2beta1/go_http_client/run_model/runtime_error.go deleted file mode 100644 index 1556f42d328..00000000000 --- a/backend/api/v2beta1/go_http_client/run_model/runtime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package run_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// RuntimeError runtime error -// swagger:model runtimeError -type RuntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this runtime error -func (m *RuntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *RuntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *RuntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *RuntimeError) UnmarshalBinary(b []byte) error { - var res RuntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_artifact_list.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_artifact_list.go index 020ecd78ceb..3d833ad3e22 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_artifact_list.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_artifact_list.go @@ -6,12 +6,14 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1ArtifactList A list of artifact metadata. +// // swagger:model v2beta1ArtifactList type V2beta1ArtifactList struct { @@ -24,6 +26,11 @@ func (m *V2beta1ArtifactList) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this v2beta1 artifact list based on context it is used +func (m *V2beta1ArtifactList) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1ArtifactList) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_list_runs_response.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_list_runs_response.go index 08a906c9a80..568425f0da0 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_list_runs_response.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_list_runs_response.go @@ -6,15 +6,16 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1ListRunsResponse v2beta1 list runs response +// // swagger:model v2beta1ListRunsResponse type V2beta1ListRunsResponse struct { @@ -43,7 +44,6 @@ func (m *V2beta1ListRunsResponse) Validate(formats strfmt.Registry) error { } func (m *V2beta1ListRunsResponse) validateRuns(formats strfmt.Registry) error { - if swag.IsZero(m.Runs) { // not required return nil } @@ -57,6 +57,47 @@ func (m *V2beta1ListRunsResponse) validateRuns(formats strfmt.Registry) error { if err := m.Runs[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("runs" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runs" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 list runs response based on the context it is used +func (m *V2beta1ListRunsResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateRuns(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListRunsResponse) contextValidateRuns(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Runs); i++ { + + if m.Runs[i] != nil { + + if swag.IsZero(m.Runs[i]) { // not required + return nil + } + + if err := m.Runs[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("runs" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runs" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_detail.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_detail.go index 2ff67595630..4cfde980764 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_detail.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_detail.go @@ -6,16 +6,17 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1PipelineTaskDetail Runtime information of a task execution. +// // swagger:model v2beta1PipelineTaskDetail type V2beta1PipelineTaskDetail struct { @@ -66,7 +67,7 @@ type V2beta1PipelineTaskDetail struct { StartTime strfmt.DateTime `json:"start_time,omitempty"` // Runtime state of a task. - State V2beta1RuntimeState `json:"state,omitempty"` + State *V2beta1RuntimeState `json:"state,omitempty"` // A sequence of task statuses. This field keeps a record // of state transitions. @@ -127,7 +128,6 @@ func (m *V2beta1PipelineTaskDetail) Validate(formats strfmt.Registry) error { } func (m *V2beta1PipelineTaskDetail) validateChildTasks(formats strfmt.Registry) error { - if swag.IsZero(m.ChildTasks) { // not required return nil } @@ -141,6 +141,8 @@ func (m *V2beta1PipelineTaskDetail) validateChildTasks(formats strfmt.Registry) if err := m.ChildTasks[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("child_tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("child_tasks" + "." + strconv.Itoa(i)) } return err } @@ -152,7 +154,6 @@ func (m *V2beta1PipelineTaskDetail) validateChildTasks(formats strfmt.Registry) } func (m *V2beta1PipelineTaskDetail) validateCreateTime(formats strfmt.Registry) error { - if swag.IsZero(m.CreateTime) { // not required return nil } @@ -165,7 +166,6 @@ func (m *V2beta1PipelineTaskDetail) validateCreateTime(formats strfmt.Registry) } func (m *V2beta1PipelineTaskDetail) validateEndTime(formats strfmt.Registry) error { - if swag.IsZero(m.EndTime) { // not required return nil } @@ -178,7 +178,6 @@ func (m *V2beta1PipelineTaskDetail) validateEndTime(formats strfmt.Registry) err } func (m *V2beta1PipelineTaskDetail) validateError(formats strfmt.Registry) error { - if swag.IsZero(m.Error) { // not required return nil } @@ -187,6 +186,8 @@ func (m *V2beta1PipelineTaskDetail) validateError(formats strfmt.Registry) error if err := m.Error.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") } return err } @@ -196,7 +197,6 @@ func (m *V2beta1PipelineTaskDetail) validateError(formats strfmt.Registry) error } func (m *V2beta1PipelineTaskDetail) validateExecutorDetail(formats strfmt.Registry) error { - if swag.IsZero(m.ExecutorDetail) { // not required return nil } @@ -205,6 +205,8 @@ func (m *V2beta1PipelineTaskDetail) validateExecutorDetail(formats strfmt.Regist if err := m.ExecutorDetail.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("executor_detail") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("executor_detail") } return err } @@ -214,7 +216,6 @@ func (m *V2beta1PipelineTaskDetail) validateExecutorDetail(formats strfmt.Regist } func (m *V2beta1PipelineTaskDetail) validateInputs(formats strfmt.Registry) error { - if swag.IsZero(m.Inputs) { // not required return nil } @@ -226,6 +227,11 @@ func (m *V2beta1PipelineTaskDetail) validateInputs(formats strfmt.Registry) erro } if val, ok := m.Inputs[k]; ok { if err := val.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("inputs" + "." + k) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("inputs" + "." + k) + } return err } } @@ -236,7 +242,6 @@ func (m *V2beta1PipelineTaskDetail) validateInputs(formats strfmt.Registry) erro } func (m *V2beta1PipelineTaskDetail) validateOutputs(formats strfmt.Registry) error { - if swag.IsZero(m.Outputs) { // not required return nil } @@ -248,6 +253,11 @@ func (m *V2beta1PipelineTaskDetail) validateOutputs(formats strfmt.Registry) err } if val, ok := m.Outputs[k]; ok { if err := val.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("outputs" + "." + k) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("outputs" + "." + k) + } return err } } @@ -258,7 +268,6 @@ func (m *V2beta1PipelineTaskDetail) validateOutputs(formats strfmt.Registry) err } func (m *V2beta1PipelineTaskDetail) validateStartTime(formats strfmt.Registry) error { - if swag.IsZero(m.StartTime) { // not required return nil } @@ -271,23 +280,25 @@ func (m *V2beta1PipelineTaskDetail) validateStartTime(formats strfmt.Registry) e } func (m *V2beta1PipelineTaskDetail) validateState(formats strfmt.Registry) error { - if swag.IsZero(m.State) { // not required return nil } - if err := m.State.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("state") + if m.State != nil { + if err := m.State.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state") + } + return err } - return err } return nil } func (m *V2beta1PipelineTaskDetail) validateStateHistory(formats strfmt.Registry) error { - if swag.IsZero(m.StateHistory) { // not required return nil } @@ -301,6 +312,189 @@ func (m *V2beta1PipelineTaskDetail) validateStateHistory(formats strfmt.Registry if err := m.StateHistory[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("state_history" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state_history" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 pipeline task detail based on the context it is used +func (m *V2beta1PipelineTaskDetail) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateChildTasks(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateError(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateExecutorDetail(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateInputs(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateOutputs(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateState(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateStateHistory(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1PipelineTaskDetail) contextValidateChildTasks(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.ChildTasks); i++ { + + if m.ChildTasks[i] != nil { + + if swag.IsZero(m.ChildTasks[i]) { // not required + return nil + } + + if err := m.ChildTasks[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("child_tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("child_tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *V2beta1PipelineTaskDetail) contextValidateError(ctx context.Context, formats strfmt.Registry) error { + + if m.Error != nil { + + if swag.IsZero(m.Error) { // not required + return nil + } + + if err := m.Error.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") + } + return err + } + } + + return nil +} + +func (m *V2beta1PipelineTaskDetail) contextValidateExecutorDetail(ctx context.Context, formats strfmt.Registry) error { + + if m.ExecutorDetail != nil { + + if swag.IsZero(m.ExecutorDetail) { // not required + return nil + } + + if err := m.ExecutorDetail.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("executor_detail") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("executor_detail") + } + return err + } + } + + return nil +} + +func (m *V2beta1PipelineTaskDetail) contextValidateInputs(ctx context.Context, formats strfmt.Registry) error { + + for k := range m.Inputs { + + if val, ok := m.Inputs[k]; ok { + if err := val.ContextValidate(ctx, formats); err != nil { + return err + } + } + + } + + return nil +} + +func (m *V2beta1PipelineTaskDetail) contextValidateOutputs(ctx context.Context, formats strfmt.Registry) error { + + for k := range m.Outputs { + + if val, ok := m.Outputs[k]; ok { + if err := val.ContextValidate(ctx, formats); err != nil { + return err + } + } + + } + + return nil +} + +func (m *V2beta1PipelineTaskDetail) contextValidateState(ctx context.Context, formats strfmt.Registry) error { + + if m.State != nil { + + if swag.IsZero(m.State) { // not required + return nil + } + + if err := m.State.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state") + } + return err + } + } + + return nil +} + +func (m *V2beta1PipelineTaskDetail) contextValidateStateHistory(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.StateHistory); i++ { + + if m.StateHistory[i] != nil { + + if swag.IsZero(m.StateHistory[i]) { // not required + return nil + } + + if err := m.StateHistory[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("state_history" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state_history" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_executor_detail.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_executor_detail.go index 87be2283f94..b2e2e18a78e 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_executor_detail.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_executor_detail.go @@ -6,12 +6,14 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1PipelineTaskExecutorDetail Runtime information of a pipeline task executor. +// // swagger:model v2beta1PipelineTaskExecutorDetail type V2beta1PipelineTaskExecutorDetail struct { @@ -41,6 +43,11 @@ func (m *V2beta1PipelineTaskExecutorDetail) Validate(formats strfmt.Registry) er return nil } +// ContextValidate validates this v2beta1 pipeline task executor detail based on context it is used +func (m *V2beta1PipelineTaskExecutorDetail) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1PipelineTaskExecutorDetail) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_version_reference.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_version_reference.go index 48f17f81926..1f1bed26e57 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_version_reference.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_version_reference.go @@ -6,12 +6,14 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1PipelineVersionReference Reference to an existing pipeline version. +// // swagger:model v2beta1PipelineVersionReference type V2beta1PipelineVersionReference struct { @@ -27,6 +29,11 @@ func (m *V2beta1PipelineVersionReference) Validate(formats strfmt.Registry) erro return nil } +// ContextValidate validates this v2beta1 pipeline version reference based on context it is used +func (m *V2beta1PipelineVersionReference) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1PipelineVersionReference) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_read_artifact_response.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_read_artifact_response.go index 0db413344a8..02c354521c9 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_read_artifact_response.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_read_artifact_response.go @@ -6,13 +6,14 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1ReadArtifactResponse v2beta1 read artifact response +// // swagger:model v2beta1ReadArtifactResponse type V2beta1ReadArtifactResponse struct { @@ -23,26 +24,11 @@ type V2beta1ReadArtifactResponse struct { // Validate validates this v2beta1 read artifact response func (m *V2beta1ReadArtifactResponse) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateData(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } return nil } -func (m *V2beta1ReadArtifactResponse) validateData(formats strfmt.Registry) error { - - if swag.IsZero(m.Data) { // not required - return nil - } - - // Format "byte" (base64 string) is already validated when unmarshalled - +// ContextValidate validates this v2beta1 read artifact response based on context it is used +func (m *V2beta1ReadArtifactResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_run.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_run.go index d232fdd05ca..eb83d3e8492 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_run.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_run.go @@ -6,16 +6,17 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1Run v2beta1 run +// // swagger:model v2beta1Run type V2beta1Run struct { @@ -74,14 +75,14 @@ type V2beta1Run struct { ServiceAccount string `json:"service_account,omitempty"` // Output. Runtime state of a run. - State V2beta1RuntimeState `json:"state,omitempty"` + State *V2beta1RuntimeState `json:"state,omitempty"` // Output. A sequence of run statuses. This field keeps a record // of state transitions. StateHistory []*V2beta1RuntimeStatus `json:"state_history"` // Output. Specifies whether this run is in archived or available mode. - StorageState V2beta1RunStorageState `json:"storage_state,omitempty"` + StorageState *V2beta1RunStorageState `json:"storage_state,omitempty"` } // Validate validates this v2beta1 run @@ -135,7 +136,6 @@ func (m *V2beta1Run) Validate(formats strfmt.Registry) error { } func (m *V2beta1Run) validateCreatedAt(formats strfmt.Registry) error { - if swag.IsZero(m.CreatedAt) { // not required return nil } @@ -148,7 +148,6 @@ func (m *V2beta1Run) validateCreatedAt(formats strfmt.Registry) error { } func (m *V2beta1Run) validateError(formats strfmt.Registry) error { - if swag.IsZero(m.Error) { // not required return nil } @@ -157,6 +156,8 @@ func (m *V2beta1Run) validateError(formats strfmt.Registry) error { if err := m.Error.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") } return err } @@ -166,7 +167,6 @@ func (m *V2beta1Run) validateError(formats strfmt.Registry) error { } func (m *V2beta1Run) validateFinishedAt(formats strfmt.Registry) error { - if swag.IsZero(m.FinishedAt) { // not required return nil } @@ -179,7 +179,6 @@ func (m *V2beta1Run) validateFinishedAt(formats strfmt.Registry) error { } func (m *V2beta1Run) validatePipelineVersionReference(formats strfmt.Registry) error { - if swag.IsZero(m.PipelineVersionReference) { // not required return nil } @@ -188,6 +187,8 @@ func (m *V2beta1Run) validatePipelineVersionReference(formats strfmt.Registry) e if err := m.PipelineVersionReference.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("pipeline_version_reference") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_version_reference") } return err } @@ -197,7 +198,6 @@ func (m *V2beta1Run) validatePipelineVersionReference(formats strfmt.Registry) e } func (m *V2beta1Run) validateRunDetails(formats strfmt.Registry) error { - if swag.IsZero(m.RunDetails) { // not required return nil } @@ -206,6 +206,8 @@ func (m *V2beta1Run) validateRunDetails(formats strfmt.Registry) error { if err := m.RunDetails.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("run_details") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("run_details") } return err } @@ -215,7 +217,6 @@ func (m *V2beta1Run) validateRunDetails(formats strfmt.Registry) error { } func (m *V2beta1Run) validateRuntimeConfig(formats strfmt.Registry) error { - if swag.IsZero(m.RuntimeConfig) { // not required return nil } @@ -224,6 +225,8 @@ func (m *V2beta1Run) validateRuntimeConfig(formats strfmt.Registry) error { if err := m.RuntimeConfig.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("runtime_config") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runtime_config") } return err } @@ -233,7 +236,6 @@ func (m *V2beta1Run) validateRuntimeConfig(formats strfmt.Registry) error { } func (m *V2beta1Run) validateScheduledAt(formats strfmt.Registry) error { - if swag.IsZero(m.ScheduledAt) { // not required return nil } @@ -246,23 +248,25 @@ func (m *V2beta1Run) validateScheduledAt(formats strfmt.Registry) error { } func (m *V2beta1Run) validateState(formats strfmt.Registry) error { - if swag.IsZero(m.State) { // not required return nil } - if err := m.State.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("state") + if m.State != nil { + if err := m.State.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state") + } + return err } - return err } return nil } func (m *V2beta1Run) validateStateHistory(formats strfmt.Registry) error { - if swag.IsZero(m.StateHistory) { // not required return nil } @@ -276,6 +280,8 @@ func (m *V2beta1Run) validateStateHistory(formats strfmt.Registry) error { if err := m.StateHistory[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("state_history" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state_history" + "." + strconv.Itoa(i)) } return err } @@ -287,16 +293,208 @@ func (m *V2beta1Run) validateStateHistory(formats strfmt.Registry) error { } func (m *V2beta1Run) validateStorageState(formats strfmt.Registry) error { - if swag.IsZero(m.StorageState) { // not required return nil } - if err := m.StorageState.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("storage_state") + if m.StorageState != nil { + if err := m.StorageState.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("storage_state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("storage_state") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 run based on the context it is used +func (m *V2beta1Run) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateError(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidatePipelineVersionReference(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRunDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateRuntimeConfig(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateState(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateStateHistory(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateStorageState(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1Run) contextValidateError(ctx context.Context, formats strfmt.Registry) error { + + if m.Error != nil { + + if swag.IsZero(m.Error) { // not required + return nil + } + + if err := m.Error.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") + } + return err + } + } + + return nil +} + +func (m *V2beta1Run) contextValidatePipelineVersionReference(ctx context.Context, formats strfmt.Registry) error { + + if m.PipelineVersionReference != nil { + + if swag.IsZero(m.PipelineVersionReference) { // not required + return nil + } + + if err := m.PipelineVersionReference.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipeline_version_reference") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_version_reference") + } + return err + } + } + + return nil +} + +func (m *V2beta1Run) contextValidateRunDetails(ctx context.Context, formats strfmt.Registry) error { + + if m.RunDetails != nil { + + if swag.IsZero(m.RunDetails) { // not required + return nil + } + + if err := m.RunDetails.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("run_details") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("run_details") + } + return err + } + } + + return nil +} + +func (m *V2beta1Run) contextValidateRuntimeConfig(ctx context.Context, formats strfmt.Registry) error { + + if m.RuntimeConfig != nil { + + if swag.IsZero(m.RuntimeConfig) { // not required + return nil + } + + if err := m.RuntimeConfig.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("runtime_config") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("runtime_config") + } + return err + } + } + + return nil +} + +func (m *V2beta1Run) contextValidateState(ctx context.Context, formats strfmt.Registry) error { + + if m.State != nil { + + if swag.IsZero(m.State) { // not required + return nil + } + + if err := m.State.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state") + } + return err + } + } + + return nil +} + +func (m *V2beta1Run) contextValidateStateHistory(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.StateHistory); i++ { + + if m.StateHistory[i] != nil { + + if swag.IsZero(m.StateHistory[i]) { // not required + return nil + } + + if err := m.StateHistory[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("state_history" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state_history" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *V2beta1Run) contextValidateStorageState(ctx context.Context, formats strfmt.Registry) error { + + if m.StorageState != nil { + + if swag.IsZero(m.StorageState) { // not required + return nil + } + + if err := m.StorageState.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("storage_state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("storage_state") + } + return err } - return err } return nil diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_run_details.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_run_details.go index 5b412e0ad9b..e3dae50a3b6 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_run_details.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_run_details.go @@ -6,15 +6,16 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "strconv" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1RunDetails Runtime details of a run. +// // swagger:model v2beta1RunDetails type V2beta1RunDetails struct { @@ -43,7 +44,6 @@ func (m *V2beta1RunDetails) Validate(formats strfmt.Registry) error { } func (m *V2beta1RunDetails) validateTaskDetails(formats strfmt.Registry) error { - if swag.IsZero(m.TaskDetails) { // not required return nil } @@ -57,6 +57,47 @@ func (m *V2beta1RunDetails) validateTaskDetails(formats strfmt.Registry) error { if err := m.TaskDetails[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("task_details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("task_details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 run details based on the context it is used +func (m *V2beta1RunDetails) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateTaskDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1RunDetails) contextValidateTaskDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.TaskDetails); i++ { + + if m.TaskDetails[i] != nil { + + if swag.IsZero(m.TaskDetails[i]) { // not required + return nil + } + + if err := m.TaskDetails[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("task_details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("task_details" + "." + strconv.Itoa(i)) } return err } diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_run_storage_state.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_run_storage_state.go index e4ac7f08699..51531364ec7 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_run_storage_state.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_run_storage_state.go @@ -6,22 +6,32 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // V2beta1RunStorageState Describes whether an entity is available or archived. // -// - STORAGE_STATE_UNSPECIFIED: Default state. This state in not used -// - AVAILABLE: Entity is available. -// - ARCHIVED: Entity is archived. +// - STORAGE_STATE_UNSPECIFIED: Default state. This state in not used +// - AVAILABLE: Entity is available. +// - ARCHIVED: Entity is archived. +// // swagger:model v2beta1RunStorageState type V2beta1RunStorageState string +func NewV2beta1RunStorageState(value V2beta1RunStorageState) *V2beta1RunStorageState { + return &value +} + +// Pointer returns a pointer to a freshly-allocated V2beta1RunStorageState. +func (m V2beta1RunStorageState) Pointer() *V2beta1RunStorageState { + return &m +} + const ( // V2beta1RunStorageStateSTORAGESTATEUNSPECIFIED captures enum value "STORAGE_STATE_UNSPECIFIED" @@ -48,7 +58,7 @@ func init() { } func (m V2beta1RunStorageState) validateV2beta1RunStorageStateEnum(path, location string, value V2beta1RunStorageState) error { - if err := validate.Enum(path, location, value, v2beta1RunStorageStateEnum); err != nil { + if err := validate.EnumCase(path, location, value, v2beta1RunStorageStateEnum, true); err != nil { return err } return nil @@ -68,3 +78,8 @@ func (m V2beta1RunStorageState) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this v2beta1 run storage state based on context it is used +func (m V2beta1RunStorageState) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_config.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_config.go index 8057b900fbc..d6fc19463fe 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_config.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_config.go @@ -6,12 +6,14 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1RuntimeConfig The runtime config. +// // swagger:model v2beta1RuntimeConfig type V2beta1RuntimeConfig struct { @@ -31,6 +33,11 @@ func (m *V2beta1RuntimeConfig) Validate(formats strfmt.Registry) error { return nil } +// ContextValidate validates this v2beta1 runtime config based on context it is used +func (m *V2beta1RuntimeConfig) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + // MarshalBinary interface implementation func (m *V2beta1RuntimeConfig) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_state.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_state.go index 015365d1539..eff4858c0fb 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_state.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_state.go @@ -6,29 +6,40 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // V2beta1RuntimeState Describes the runtime state of an entity. // -// - RUNTIME_STATE_UNSPECIFIED: Default value. This value is not used. -// - PENDING: Service is preparing to execute an entity. -// - RUNNING: Entity execution is in progress. -// - SUCCEEDED: Entity completed successfully. -// - SKIPPED: Entity has been skipped. For example, due to caching. -// - FAILED: Entity execution has failed. -// - CANCELING: Entity is being canceled. From this state, an entity may only +// - RUNTIME_STATE_UNSPECIFIED: Default value. This value is not used. +// - PENDING: Service is preparing to execute an entity. +// - RUNNING: Entity execution is in progress. +// - SUCCEEDED: Entity completed successfully. +// - SKIPPED: Entity has been skipped. For example, due to caching. +// - FAILED: Entity execution has failed. +// - CANCELING: Entity is being canceled. From this state, an entity may only +// // change its state to SUCCEEDED, FAILED or CANCELED. -// - CANCELED: Entity has been canceled. -// - PAUSED: Entity has been paused. It can be resumed. +// - CANCELED: Entity has been canceled. +// - PAUSED: Entity has been paused. It can be resumed. +// // swagger:model v2beta1RuntimeState type V2beta1RuntimeState string +func NewV2beta1RuntimeState(value V2beta1RuntimeState) *V2beta1RuntimeState { + return &value +} + +// Pointer returns a pointer to a freshly-allocated V2beta1RuntimeState. +func (m V2beta1RuntimeState) Pointer() *V2beta1RuntimeState { + return &m +} + const ( // V2beta1RuntimeStateRUNTIMESTATEUNSPECIFIED captures enum value "RUNTIME_STATE_UNSPECIFIED" @@ -73,7 +84,7 @@ func init() { } func (m V2beta1RuntimeState) validateV2beta1RuntimeStateEnum(path, location string, value V2beta1RuntimeState) error { - if err := validate.Enum(path, location, value, v2beta1RuntimeStateEnum); err != nil { + if err := validate.EnumCase(path, location, value, v2beta1RuntimeStateEnum, true); err != nil { return err } return nil @@ -93,3 +104,8 @@ func (m V2beta1RuntimeState) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this v2beta1 runtime state based on context it is used +func (m V2beta1RuntimeState) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_status.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_status.go index 3e1b067f195..db613cebadf 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_status.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_runtime_status.go @@ -6,14 +6,16 @@ package run_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // V2beta1RuntimeStatus Timestamped representation of a runtime state with an optional error. +// // swagger:model v2beta1RuntimeStatus type V2beta1RuntimeStatus struct { @@ -25,7 +27,7 @@ type V2beta1RuntimeStatus struct { Error *GooglerpcStatus `json:"error,omitempty"` // The state of a runtime instance. - State V2beta1RuntimeState `json:"state,omitempty"` + State *V2beta1RuntimeState `json:"state,omitempty"` // Update time of this state. // Format: date-time @@ -55,7 +57,6 @@ func (m *V2beta1RuntimeStatus) Validate(formats strfmt.Registry) error { } func (m *V2beta1RuntimeStatus) validateError(formats strfmt.Registry) error { - if swag.IsZero(m.Error) { // not required return nil } @@ -64,6 +65,8 @@ func (m *V2beta1RuntimeStatus) validateError(formats strfmt.Registry) error { if err := m.Error.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") } return err } @@ -73,23 +76,25 @@ func (m *V2beta1RuntimeStatus) validateError(formats strfmt.Registry) error { } func (m *V2beta1RuntimeStatus) validateState(formats strfmt.Registry) error { - if swag.IsZero(m.State) { // not required return nil } - if err := m.State.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("state") + if m.State != nil { + if err := m.State.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state") + } + return err } - return err } return nil } func (m *V2beta1RuntimeStatus) validateUpdateTime(formats strfmt.Registry) error { - if swag.IsZero(m.UpdateTime) { // not required return nil } @@ -101,6 +106,66 @@ func (m *V2beta1RuntimeStatus) validateUpdateTime(formats strfmt.Registry) error return nil } +// ContextValidate validate this v2beta1 runtime status based on the context it is used +func (m *V2beta1RuntimeStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateError(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateState(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1RuntimeStatus) contextValidateError(ctx context.Context, formats strfmt.Registry) error { + + if m.Error != nil { + + if swag.IsZero(m.Error) { // not required + return nil + } + + if err := m.Error.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") + } + return err + } + } + + return nil +} + +func (m *V2beta1RuntimeStatus) contextValidateState(ctx context.Context, formats strfmt.Registry) error { + + if m.State != nil { + + if swag.IsZero(m.State) { // not required + return nil + } + + if err := m.State.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state") + } + return err + } + } + + return nil +} + // MarshalBinary interface implementation func (m *V2beta1RuntimeStatus) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/visualization_client/visualization_client.go b/backend/api/v2beta1/go_http_client/visualization_client/visualization_client.go index cb722eda14f..1b4642c8039 100644 --- a/backend/api/v2beta1/go_http_client/visualization_client/visualization_client.go +++ b/backend/api/v2beta1/go_http_client/visualization_client/visualization_client.go @@ -8,8 +8,7 @@ package visualization_client import ( "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" - - strfmt "github.com/go-openapi/strfmt" + "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/visualization_client/visualization_service" ) @@ -56,9 +55,7 @@ func New(transport runtime.ClientTransport, formats strfmt.Registry) *Visualizat cli := new(Visualization) cli.Transport = transport - cli.VisualizationService = visualization_service.New(transport, formats) - return cli } @@ -103,7 +100,7 @@ func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { // Visualization is a client for visualization type Visualization struct { - VisualizationService *visualization_service.Client + VisualizationService visualization_service.ClientService Transport runtime.ClientTransport } @@ -111,7 +108,5 @@ type Visualization struct { // SetTransport changes the transport on the client and all its subresources func (c *Visualization) SetTransport(transport runtime.ClientTransport) { c.Transport = transport - c.VisualizationService.SetTransport(transport) - } diff --git a/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_client.go b/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_client.go index 9c81b3ab5ce..187935fdaf2 100644 --- a/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_client.go +++ b/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_client.go @@ -7,15 +7,40 @@ package visualization_service import ( "github.com/go-openapi/runtime" - - strfmt "github.com/go-openapi/strfmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" ) // New creates a new visualization service API client. -func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client { +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { return &Client{transport: transport, formats: formats} } +// New creates a new visualization service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new visualization service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + /* Client for visualization service API */ @@ -24,16 +49,25 @@ type Client struct { formats strfmt.Registry } +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + VisualizationServiceCreateVisualizationV1(params *VisualizationServiceCreateVisualizationV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*VisualizationServiceCreateVisualizationV1OK, error) + + SetTransport(transport runtime.ClientTransport) +} + /* VisualizationServiceCreateVisualizationV1 visualization service create visualization v1 API */ -func (a *Client) VisualizationServiceCreateVisualizationV1(params *VisualizationServiceCreateVisualizationV1Params, authInfo runtime.ClientAuthInfoWriter) (*VisualizationServiceCreateVisualizationV1OK, error) { +func (a *Client) VisualizationServiceCreateVisualizationV1(params *VisualizationServiceCreateVisualizationV1Params, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*VisualizationServiceCreateVisualizationV1OK, error) { // TODO: Validate the params before sending if params == nil { params = NewVisualizationServiceCreateVisualizationV1Params() } - - result, err := a.transport.Submit(&runtime.ClientOperation{ + op := &runtime.ClientOperation{ ID: "VisualizationService_CreateVisualizationV1", Method: "POST", PathPattern: "/apis/v2beta1/visualizations/{namespace}", @@ -45,12 +79,22 @@ func (a *Client) VisualizationServiceCreateVisualizationV1(params *Visualization AuthInfo: authInfo, Context: params.Context, Client: params.HTTPClient, - }) + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) if err != nil { return nil, err } - return result.(*VisualizationServiceCreateVisualizationV1OK), nil - + success, ok := result.(*VisualizationServiceCreateVisualizationV1OK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*VisualizationServiceCreateVisualizationV1Default) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } // SetTransport changes the transport on the client diff --git a/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_parameters.go b/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_parameters.go index fe9fb8a7586..70b865bc3ee 100644 --- a/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_parameters.go +++ b/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_parameters.go @@ -13,66 +13,82 @@ import ( "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - visualization_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/visualization_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/visualization_model" ) -// NewVisualizationServiceCreateVisualizationV1Params creates a new VisualizationServiceCreateVisualizationV1Params object -// with the default values initialized. +// NewVisualizationServiceCreateVisualizationV1Params creates a new VisualizationServiceCreateVisualizationV1Params object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. func NewVisualizationServiceCreateVisualizationV1Params() *VisualizationServiceCreateVisualizationV1Params { - var () return &VisualizationServiceCreateVisualizationV1Params{ - timeout: cr.DefaultTimeout, } } // NewVisualizationServiceCreateVisualizationV1ParamsWithTimeout creates a new VisualizationServiceCreateVisualizationV1Params object -// with the default values initialized, and the ability to set a timeout on a request +// with the ability to set a timeout on a request. func NewVisualizationServiceCreateVisualizationV1ParamsWithTimeout(timeout time.Duration) *VisualizationServiceCreateVisualizationV1Params { - var () return &VisualizationServiceCreateVisualizationV1Params{ - timeout: timeout, } } // NewVisualizationServiceCreateVisualizationV1ParamsWithContext creates a new VisualizationServiceCreateVisualizationV1Params object -// with the default values initialized, and the ability to set a context for a request +// with the ability to set a context for a request. func NewVisualizationServiceCreateVisualizationV1ParamsWithContext(ctx context.Context) *VisualizationServiceCreateVisualizationV1Params { - var () return &VisualizationServiceCreateVisualizationV1Params{ - Context: ctx, } } // NewVisualizationServiceCreateVisualizationV1ParamsWithHTTPClient creates a new VisualizationServiceCreateVisualizationV1Params object -// with the default values initialized, and the ability to set a custom HTTPClient for a request +// with the ability to set a custom HTTPClient for a request. func NewVisualizationServiceCreateVisualizationV1ParamsWithHTTPClient(client *http.Client) *VisualizationServiceCreateVisualizationV1Params { - var () return &VisualizationServiceCreateVisualizationV1Params{ HTTPClient: client, } } -/*VisualizationServiceCreateVisualizationV1Params contains all the parameters to send to the API endpoint -for the visualization service create visualization v1 operation typically these are written to a http.Request +/* +VisualizationServiceCreateVisualizationV1Params contains all the parameters to send to the API endpoint + + for the visualization service create visualization v1 operation. + + Typically these are written to a http.Request. */ type VisualizationServiceCreateVisualizationV1Params struct { - /*Body*/ - Body *visualization_model.V2beta1Visualization - /*Namespace*/ + // Namespace. Namespace string + // Visualization. + Visualization *visualization_model.V2beta1Visualization + timeout time.Duration Context context.Context HTTPClient *http.Client } +// WithDefaults hydrates default values in the visualization service create visualization v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *VisualizationServiceCreateVisualizationV1Params) WithDefaults() *VisualizationServiceCreateVisualizationV1Params { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the visualization service create visualization v1 params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *VisualizationServiceCreateVisualizationV1Params) SetDefaults() { + // no default values defined for this parameter +} + // WithTimeout adds the timeout to the visualization service create visualization v1 params func (o *VisualizationServiceCreateVisualizationV1Params) WithTimeout(timeout time.Duration) *VisualizationServiceCreateVisualizationV1Params { o.SetTimeout(timeout) @@ -106,17 +122,6 @@ func (o *VisualizationServiceCreateVisualizationV1Params) SetHTTPClient(client * o.HTTPClient = client } -// WithBody adds the body to the visualization service create visualization v1 params -func (o *VisualizationServiceCreateVisualizationV1Params) WithBody(body *visualization_model.V2beta1Visualization) *VisualizationServiceCreateVisualizationV1Params { - o.SetBody(body) - return o -} - -// SetBody adds the body to the visualization service create visualization v1 params -func (o *VisualizationServiceCreateVisualizationV1Params) SetBody(body *visualization_model.V2beta1Visualization) { - o.Body = body -} - // WithNamespace adds the namespace to the visualization service create visualization v1 params func (o *VisualizationServiceCreateVisualizationV1Params) WithNamespace(namespace string) *VisualizationServiceCreateVisualizationV1Params { o.SetNamespace(namespace) @@ -128,6 +133,17 @@ func (o *VisualizationServiceCreateVisualizationV1Params) SetNamespace(namespace o.Namespace = namespace } +// WithVisualization adds the visualization to the visualization service create visualization v1 params +func (o *VisualizationServiceCreateVisualizationV1Params) WithVisualization(visualization *visualization_model.V2beta1Visualization) *VisualizationServiceCreateVisualizationV1Params { + o.SetVisualization(visualization) + return o +} + +// SetVisualization adds the visualization to the visualization service create visualization v1 params +func (o *VisualizationServiceCreateVisualizationV1Params) SetVisualization(visualization *visualization_model.V2beta1Visualization) { + o.Visualization = visualization +} + // WriteToRequest writes these params to a swagger request func (o *VisualizationServiceCreateVisualizationV1Params) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -136,16 +152,15 @@ func (o *VisualizationServiceCreateVisualizationV1Params) WriteToRequest(r runti } var res []error - if o.Body != nil { - if err := r.SetBodyParam(o.Body); err != nil { - return err - } - } - // path param namespace if err := r.SetPathParam("namespace", o.Namespace); err != nil { return err } + if o.Visualization != nil { + if err := r.SetBodyParam(o.Visualization); err != nil { + return err + } + } if len(res) > 0 { return errors.CompositeValidationError(res...) diff --git a/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_responses.go b/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_responses.go index dd8907ff380..9839afe90f1 100644 --- a/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_responses.go +++ b/backend/api/v2beta1/go_http_client/visualization_client/visualization_service/visualization_service_create_visualization_v1_responses.go @@ -6,14 +6,14 @@ package visualization_service // Editing this file might prove futile when you re-run the swagger generate command import ( + "encoding/json" "fmt" "io" "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" - strfmt "github.com/go-openapi/strfmt" - - visualization_model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/visualization_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/visualization_model" ) // VisualizationServiceCreateVisualizationV1Reader is a Reader for the VisualizationServiceCreateVisualizationV1 structure. @@ -24,14 +24,12 @@ type VisualizationServiceCreateVisualizationV1Reader struct { // ReadResponse reads a server response into the received o. func (o *VisualizationServiceCreateVisualizationV1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { - case 200: result := NewVisualizationServiceCreateVisualizationV1OK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil - default: result := NewVisualizationServiceCreateVisualizationV1Default(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { @@ -49,7 +47,8 @@ func NewVisualizationServiceCreateVisualizationV1OK() *VisualizationServiceCreat return &VisualizationServiceCreateVisualizationV1OK{} } -/*VisualizationServiceCreateVisualizationV1OK handles this case with default header values. +/* +VisualizationServiceCreateVisualizationV1OK describes a response with status code 200, with default header values. A successful response. */ @@ -57,8 +56,48 @@ type VisualizationServiceCreateVisualizationV1OK struct { Payload *visualization_model.V2beta1Visualization } +// IsSuccess returns true when this visualization service create visualization v1 o k response has a 2xx status code +func (o *VisualizationServiceCreateVisualizationV1OK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this visualization service create visualization v1 o k response has a 3xx status code +func (o *VisualizationServiceCreateVisualizationV1OK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this visualization service create visualization v1 o k response has a 4xx status code +func (o *VisualizationServiceCreateVisualizationV1OK) IsClientError() bool { + return false +} + +// IsServerError returns true when this visualization service create visualization v1 o k response has a 5xx status code +func (o *VisualizationServiceCreateVisualizationV1OK) IsServerError() bool { + return false +} + +// IsCode returns true when this visualization service create visualization v1 o k response a status code equal to that given +func (o *VisualizationServiceCreateVisualizationV1OK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the visualization service create visualization v1 o k response +func (o *VisualizationServiceCreateVisualizationV1OK) Code() int { + return 200 +} + func (o *VisualizationServiceCreateVisualizationV1OK) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/visualizations/{namespace}][%d] visualizationServiceCreateVisualizationV1OK %+v", 200, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/visualizations/{namespace}][%d] visualizationServiceCreateVisualizationV1OK %s", 200, payload) +} + +func (o *VisualizationServiceCreateVisualizationV1OK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/visualizations/{namespace}][%d] visualizationServiceCreateVisualizationV1OK %s", 200, payload) +} + +func (o *VisualizationServiceCreateVisualizationV1OK) GetPayload() *visualization_model.V2beta1Visualization { + return o.Payload } func (o *VisualizationServiceCreateVisualizationV1OK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -80,14 +119,40 @@ func NewVisualizationServiceCreateVisualizationV1Default(code int) *Visualizatio } } -/*VisualizationServiceCreateVisualizationV1Default handles this case with default header values. +/* +VisualizationServiceCreateVisualizationV1Default describes a response with status code -1, with default header values. An unexpected error response. */ type VisualizationServiceCreateVisualizationV1Default struct { _statusCode int - Payload *visualization_model.RuntimeError + Payload *visualization_model.GooglerpcStatus +} + +// IsSuccess returns true when this visualization service create visualization v1 default response has a 2xx status code +func (o *VisualizationServiceCreateVisualizationV1Default) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this visualization service create visualization v1 default response has a 3xx status code +func (o *VisualizationServiceCreateVisualizationV1Default) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this visualization service create visualization v1 default response has a 4xx status code +func (o *VisualizationServiceCreateVisualizationV1Default) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this visualization service create visualization v1 default response has a 5xx status code +func (o *VisualizationServiceCreateVisualizationV1Default) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this visualization service create visualization v1 default response a status code equal to that given +func (o *VisualizationServiceCreateVisualizationV1Default) IsCode(code int) bool { + return o._statusCode == code } // Code gets the status code for the visualization service create visualization v1 default response @@ -96,12 +161,22 @@ func (o *VisualizationServiceCreateVisualizationV1Default) Code() int { } func (o *VisualizationServiceCreateVisualizationV1Default) Error() string { - return fmt.Sprintf("[POST /apis/v2beta1/visualizations/{namespace}][%d] VisualizationService_CreateVisualizationV1 default %+v", o._statusCode, o.Payload) + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/visualizations/{namespace}][%d] VisualizationService_CreateVisualizationV1 default %s", o._statusCode, payload) +} + +func (o *VisualizationServiceCreateVisualizationV1Default) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/visualizations/{namespace}][%d] VisualizationService_CreateVisualizationV1 default %s", o._statusCode, payload) +} + +func (o *VisualizationServiceCreateVisualizationV1Default) GetPayload() *visualization_model.GooglerpcStatus { + return o.Payload } func (o *VisualizationServiceCreateVisualizationV1Default) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { - o.Payload = new(visualization_model.RuntimeError) + o.Payload = new(visualization_model.GooglerpcStatus) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { diff --git a/backend/api/v2beta1/go_http_client/visualization_model/googlerpc_status.go b/backend/api/v2beta1/go_http_client/visualization_model/googlerpc_status.go new file mode 100644 index 00000000000..2d2a620e2ec --- /dev/null +++ b/backend/api/v2beta1/go_http_client/visualization_model/googlerpc_status.go @@ -0,0 +1,136 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package visualization_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// GooglerpcStatus The `Status` type defines a logical error model that is suitable for +// different programming environments, including REST APIs and RPC APIs. It is +// used by [gRPC](https://github.com/grpc). Each `Status` message contains +// three pieces of data: error code, error message, and error details. +// +// You can find out more about this error model and how to work with it in the +// [API Design Guide](https://cloud.google.com/apis/design/errors). +// +// swagger:model googlerpcStatus +type GooglerpcStatus struct { + + // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + Code int32 `json:"code,omitempty"` + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + Details []*ProtobufAny `json:"details"` + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. + Message string `json:"message,omitempty"` +} + +// Validate validates this googlerpc status +func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateDetails(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { + if swag.IsZero(m.Details) { // not required + return nil + } + + for i := 0; i < len(m.Details); i++ { + if swag.IsZero(m.Details[i]) { // not required + continue + } + + if m.Details[i] != nil { + if err := m.Details[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *GooglerpcStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *GooglerpcStatus) UnmarshalBinary(b []byte) error { + var res GooglerpcStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/visualization_model/protobuf_any.go b/backend/api/v2beta1/go_http_client/visualization_model/protobuf_any.go index 79170185217..bd86be1550d 100644 --- a/backend/api/v2beta1/go_http_client/visualization_model/protobuf_any.go +++ b/backend/api/v2beta1/go_http_client/visualization_model/protobuf_any.go @@ -6,9 +6,10 @@ package visualization_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" + "encoding/json" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) @@ -20,45 +21,49 @@ import ( // // Example 1: Pack and unpack a message in C++. // -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } // // Example 2: Pack and unpack a message in Java. // -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := anypb.New(foo) -// if err != nil { -// ... -// } -// ... -// foo := &pb.Foo{} -// if err := any.UnmarshalTo(foo); err != nil { -// ... -// } +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack @@ -66,34 +71,34 @@ import ( // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // -// // JSON -// +// ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } // -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// // swagger:model protobufAny type ProtobufAny struct { @@ -120,39 +125,151 @@ type ProtobufAny struct { // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. - TypeURL string `json:"type_url,omitempty"` + AtType string `json:"@type,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - // Format: byte - Value strfmt.Base64 `json:"value,omitempty"` + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` } -// Validate validates this protobuf any -func (m *ProtobufAny) Validate(formats strfmt.Registry) error { - var res []error +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv - if err := m.validateValue(formats); err != nil { - res = append(res, err) + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err } - if len(res) > 0 { - return errors.CompositeValidationError(res...) + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result } + return nil } -func (m *ProtobufAny) validateValue(formats strfmt.Registry) error { +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { - if swag.IsZero(m.Value) { // not required - return nil + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` } - // Format "byte" (base64 string) is already validated when unmarshalled + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { return nil } diff --git a/backend/api/v2beta1/go_http_client/visualization_model/runtime_error.go b/backend/api/v2beta1/go_http_client/visualization_model/runtime_error.go deleted file mode 100644 index d3023542372..00000000000 --- a/backend/api/v2beta1/go_http_client/visualization_model/runtime_error.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package visualization_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "strconv" - - strfmt "github.com/go-openapi/strfmt" - - "github.com/go-openapi/errors" - "github.com/go-openapi/swag" -) - -// RuntimeError runtime error -// swagger:model runtimeError -type RuntimeError struct { - - // code - Code int32 `json:"code,omitempty"` - - // details - Details []*ProtobufAny `json:"details"` - - // error - Error string `json:"error,omitempty"` - - // message - Message string `json:"message,omitempty"` -} - -// Validate validates this runtime error -func (m *RuntimeError) Validate(formats strfmt.Registry) error { - var res []error - - if err := m.validateDetails(formats); err != nil { - res = append(res, err) - } - - if len(res) > 0 { - return errors.CompositeValidationError(res...) - } - return nil -} - -func (m *RuntimeError) validateDetails(formats strfmt.Registry) error { - - if swag.IsZero(m.Details) { // not required - return nil - } - - for i := 0; i < len(m.Details); i++ { - if swag.IsZero(m.Details[i]) { // not required - continue - } - - if m.Details[i] != nil { - if err := m.Details[i].Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("details" + "." + strconv.Itoa(i)) - } - return err - } - } - - } - - return nil -} - -// MarshalBinary interface implementation -func (m *RuntimeError) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *RuntimeError) UnmarshalBinary(b []byte) error { - var res RuntimeError - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v2beta1/go_http_client/visualization_model/v2beta1_visualization.go b/backend/api/v2beta1/go_http_client/visualization_model/v2beta1_visualization.go index effbc893b93..1a14dfd7bc2 100644 --- a/backend/api/v2beta1/go_http_client/visualization_model/v2beta1_visualization.go +++ b/backend/api/v2beta1/go_http_client/visualization_model/v2beta1_visualization.go @@ -6,13 +6,15 @@ package visualization_model // Editing this file might prove futile when you re-run the swagger generate command import ( - strfmt "github.com/go-openapi/strfmt" + "context" "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // V2beta1Visualization v2beta1 visualization +// // swagger:model v2beta1Visualization type V2beta1Visualization struct { @@ -36,7 +38,7 @@ type V2beta1Visualization struct { Source string `json:"source,omitempty"` // type - Type V2beta1VisualizationType `json:"type,omitempty"` + Type *V2beta1VisualizationType `json:"type,omitempty"` } // Validate validates this v2beta1 visualization @@ -54,16 +56,54 @@ func (m *V2beta1Visualization) Validate(formats strfmt.Registry) error { } func (m *V2beta1Visualization) validateType(formats strfmt.Registry) error { - if swag.IsZero(m.Type) { // not required return nil } - if err := m.Type.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("type") + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 visualization based on the context it is used +func (m *V2beta1Visualization) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1Visualization) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err } - return err } return nil diff --git a/backend/api/v2beta1/go_http_client/visualization_model/v2beta1_visualization_type.go b/backend/api/v2beta1/go_http_client/visualization_model/v2beta1_visualization_type.go index a2bc71e3854..b8a2782097e 100644 --- a/backend/api/v2beta1/go_http_client/visualization_model/v2beta1_visualization_type.go +++ b/backend/api/v2beta1/go_http_client/visualization_model/v2beta1_visualization_type.go @@ -6,20 +6,30 @@ package visualization_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "context" "encoding/json" - strfmt "github.com/go-openapi/strfmt" - "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" "github.com/go-openapi/validate" ) // V2beta1VisualizationType Type of visualization to be generated. // This is required when creating the pipeline through CreateVisualization // API. +// // swagger:model v2beta1VisualizationType type V2beta1VisualizationType string +func NewV2beta1VisualizationType(value V2beta1VisualizationType) *V2beta1VisualizationType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated V2beta1VisualizationType. +func (m V2beta1VisualizationType) Pointer() *V2beta1VisualizationType { + return &m +} + const ( // V2beta1VisualizationTypeROCCURVE captures enum value "ROC_CURVE" @@ -52,7 +62,7 @@ func init() { } func (m V2beta1VisualizationType) validateV2beta1VisualizationTypeEnum(path, location string, value V2beta1VisualizationType) error { - if err := validate.Enum(path, location, value, v2beta1VisualizationTypeEnum); err != nil { + if err := validate.EnumCase(path, location, value, v2beta1VisualizationTypeEnum, true); err != nil { return err } return nil @@ -72,3 +82,8 @@ func (m V2beta1VisualizationType) Validate(formats strfmt.Registry) error { } return nil } + +// ContextValidate validates this v2beta1 visualization type based on context it is used +func (m V2beta1VisualizationType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/healthz.proto b/backend/api/v2beta1/healthz.proto index 0ba677a54fb..5f26251e4e4 100644 --- a/backend/api/v2beta1/healthz.proto +++ b/backend/api/v2beta1/healthz.proto @@ -19,10 +19,9 @@ package kubeflow.pipelines.backend.api.v2beta1; import "google/api/annotations.proto"; import "google/protobuf/empty.proto"; -import "protoc-gen-swagger/options/annotations.proto"; -import "google/rpc/status.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; diff --git a/backend/api/v2beta1/pipeline.proto b/backend/api/v2beta1/pipeline.proto index 5a1127c2412..620638eead6 100644 --- a/backend/api/v2beta1/pipeline.proto +++ b/backend/api/v2beta1/pipeline.proto @@ -22,9 +22,9 @@ import "google/protobuf/timestamp.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/struct.proto"; import "google/rpc/status.proto"; -import "protoc-gen-swagger/options/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; @@ -89,7 +89,8 @@ service PipelineService { }; } - // Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. + // Deletes a pipeline by ID. If cascade is false (default), it returns an error if the + // pipeline has any versions. If cascade is true, it will also delete all pipeline versions. rpc DeletePipeline(DeletePipelineRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v2beta1/pipelines/{pipeline_id}" @@ -200,7 +201,7 @@ message PipelineVersion { // Output. The pipeline spec for the pipeline version. google.protobuf.Struct pipeline_spec = 7; - + // In case any error happens retrieving a pipeline version field, only // pipeline ID, pipeline version ID, and the error message are returned. // Client has the flexibility of choosing how to handle the error. @@ -269,6 +270,10 @@ message GetPipelineByNameRequest { message DeletePipelineRequest { // Required input. ID of the pipeline to be deleted. string pipeline_id = 1; + + // Optional. If true, the pipeline and all its versions will be deleted. + // If false (default), only the pipeline will be deleted if it has no versions. + bool cascade = 2; } message CreatePipelineAndVersionRequest { diff --git a/backend/api/v2beta1/python_http_client/.gitlab-ci.yml b/backend/api/v2beta1/python_http_client/.gitlab-ci.yml deleted file mode 100644 index 1098a4acf21..00000000000 --- a/backend/api/v2beta1/python_http_client/.gitlab-ci.yml +++ /dev/null @@ -1,33 +0,0 @@ -# ref: https://docs.gitlab.com/ee/ci/README.html - -stages: - - test - -.nosetest: - stage: test - script: - - pip install -r requirements.txt - - pip install -r test-requirements.txt - - pytest --cov=kfp_server_api - -nosetest-2.7: - extends: .nosetest - image: python:2.7-alpine -nosetest-3.3: - extends: .nosetest - image: python:3.3-alpine -nosetest-3.4: - extends: .nosetest - image: python:3.4-alpine -nosetest-3.5: - extends: .nosetest - image: python:3.5-alpine -nosetest-3.6: - extends: .nosetest - image: python:3.6-alpine -nosetest-3.7: - extends: .nosetest - image: python:3.7-alpine -nosetest-3.8: - extends: .nosetest - image: python:3.8-alpine diff --git a/backend/api/v2beta1/python_http_client/.travis.yml b/backend/api/v2beta1/python_http_client/.travis.yml deleted file mode 100644 index 7f278fb3d11..00000000000 --- a/backend/api/v2beta1/python_http_client/.travis.yml +++ /dev/null @@ -1,17 +0,0 @@ -# ref: https://docs.travis-ci.com/user/languages/python -language: python -python: - - "2.7" - - "3.2" - - "3.3" - - "3.4" - - "3.5" - - "3.6" - - "3.7" - - "3.8" -# command to install dependencies -install: - - "pip install -r requirements.txt" - - "pip install -r test-requirements.txt" -# command to run tests -script: pytest --cov=kfp_server_api diff --git a/backend/api/v2beta1/python_http_client/README.md b/backend/api/v2beta1/python_http_client/README.md index 2717097bc0b..b19d435e3dd 100644 --- a/backend/api/v2beta1/python_http_client/README.md +++ b/backend/api/v2beta1/python_http_client/README.md @@ -3,8 +3,8 @@ This file contains REST API specification for Kubeflow Pipelines. The file is au This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: -- API version: 2.5.0 -- Package version: 2.5.0 +- API version: 2.14.3 +- Package version: 2.14.3 - Build package: org.openapitools.codegen.languages.PythonClientCodegen For more information, please visit [https://www.google.com](https://www.google.com) @@ -78,16 +78,16 @@ configuration = kfp_server_api.Configuration( # Enter a context with an instance of the API client with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = kfp_server_api.AuthServiceApi(api_client) - namespace = 'namespace_example' # str | (optional) -resources = 'UNASSIGNED_RESOURCES' # str | (optional) (default to 'UNASSIGNED_RESOURCES') -verb = 'UNASSIGNED_VERB' # str | (optional) (default to 'UNASSIGNED_VERB') + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + artifact_id = 'artifact_id_example' # str | Required. The ID of the artifact to be retrieved. +view = 'ARTIFACT_VIEW_UNSPECIFIED' # str | Optional. Set to \"DOWNLOAD\" to included a signed URL with an expiry (default 15 seconds, unless configured other wise). This URL can be used to download the Artifact directly from the Artifact's storage provider. Set to \"BASIC\" to exclude the download_url from server responses, thus preventing the creation of any signed url. Defaults to BASIC. - ARTIFACT_VIEW_UNSPECIFIED: Not specified, equivalent to BASIC. - BASIC: Server responses excludes download_url - DOWNLOAD: Server responses include download_url - RENDER: Server response includes a signed URL, allowing in-browser rendering or preview of the artifact. (optional) (default to 'ARTIFACT_VIEW_UNSPECIFIED') try: - api_response = api_instance.auth_service_authorize(namespace=namespace, resources=resources, verb=verb) + # Finds a specific Artifact by ID. + api_response = api_instance.artifact_service_get_artifact(artifact_id, view=view) pprint(api_response) except ApiException as e: - print("Exception when calling AuthServiceApi->auth_service_authorize: %s\n" % e) + print("Exception when calling ArtifactServiceApi->artifact_service_get_artifact: %s\n" % e) ``` @@ -97,6 +97,8 @@ All URIs are relative to *http://localhost* Class | Method | HTTP request | Description ------------ | ------------- | ------------- | ------------- +*ArtifactServiceApi* | [**artifact_service_get_artifact**](docs/ArtifactServiceApi.md#artifact_service_get_artifact) | **GET** /apis/v2beta1/artifacts/{artifact_id} | Finds a specific Artifact by ID. +*ArtifactServiceApi* | [**artifact_service_list_artifacts**](docs/ArtifactServiceApi.md#artifact_service_list_artifacts) | **GET** /apis/v2beta1/artifacts | Finds all artifacts within the specified namespace. Namespace field is required. In multi-user mode, the caller is required to have RBAC verb \"list\" on the \"artifacts\" resource for the specified namespace. *AuthServiceApi* | [**auth_service_authorize**](docs/AuthServiceApi.md#auth_service_authorize) | **GET** /apis/v2beta1/auth | *ExperimentServiceApi* | [**experiment_service_archive_experiment**](docs/ExperimentServiceApi.md#experiment_service_archive_experiment) | **POST** /apis/v2beta1/experiments/{experiment_id}:archive | Archives an experiment and the experiment's runs and recurring runs. *ExperimentServiceApi* | [**experiment_service_create_experiment**](docs/ExperimentServiceApi.md#experiment_service_create_experiment) | **POST** /apis/v2beta1/experiments | Creates a new experiment. @@ -108,7 +110,7 @@ Class | Method | HTTP request | Description *PipelineServiceApi* | [**pipeline_service_create_pipeline**](docs/PipelineServiceApi.md#pipeline_service_create_pipeline) | **POST** /apis/v2beta1/pipelines | Creates a pipeline. *PipelineServiceApi* | [**pipeline_service_create_pipeline_and_version**](docs/PipelineServiceApi.md#pipeline_service_create_pipeline_and_version) | **POST** /apis/v2beta1/pipelines/create | Creates a new pipeline and a new pipeline version in a single transaction. *PipelineServiceApi* | [**pipeline_service_create_pipeline_version**](docs/PipelineServiceApi.md#pipeline_service_create_pipeline_version) | **POST** /apis/v2beta1/pipelines/{pipeline_id}/versions | Adds a pipeline version to the specified pipeline ID. -*PipelineServiceApi* | [**pipeline_service_delete_pipeline**](docs/PipelineServiceApi.md#pipeline_service_delete_pipeline) | **DELETE** /apis/v2beta1/pipelines/{pipeline_id} | Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. +*PipelineServiceApi* | [**pipeline_service_delete_pipeline**](docs/PipelineServiceApi.md#pipeline_service_delete_pipeline) | **DELETE** /apis/v2beta1/pipelines/{pipeline_id} | Deletes a pipeline by ID. If cascade is false (default), it returns an error if the pipeline has any versions. If cascade is true, it will also delete all pipeline versions. *PipelineServiceApi* | [**pipeline_service_delete_pipeline_version**](docs/PipelineServiceApi.md#pipeline_service_delete_pipeline_version) | **DELETE** /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id} | Deletes a specific pipeline version by pipeline version ID and pipeline ID. *PipelineServiceApi* | [**pipeline_service_get_pipeline**](docs/PipelineServiceApi.md#pipeline_service_get_pipeline) | **GET** /apis/v2beta1/pipelines/{pipeline_id} | Finds a specific pipeline by ID. *PipelineServiceApi* | [**pipeline_service_get_pipeline_by_name**](docs/PipelineServiceApi.md#pipeline_service_get_pipeline_by_name) | **GET** /apis/v2beta1/pipelines/names/{name} | Finds a specific pipeline by name and namespace. @@ -126,10 +128,10 @@ Class | Method | HTTP request | Description *ReportServiceApi* | [**report_service_report_scheduled_workflow**](docs/ReportServiceApi.md#report_service_report_scheduled_workflow) | **POST** /apis/v2beta1/scheduledworkflows | *ReportServiceApi* | [**report_service_report_workflow**](docs/ReportServiceApi.md#report_service_report_workflow) | **POST** /apis/v2beta1/workflows | *RunServiceApi* | [**run_service_archive_run**](docs/RunServiceApi.md#run_service_archive_run) | **POST** /apis/v2beta1/runs/{run_id}:archive | Archives a run in an experiment given by run ID and experiment ID. -*RunServiceApi* | [**run_service_create_run**](docs/RunServiceApi.md#run_service_create_run) | **POST** /apis/v2beta1/runs | Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. +*RunServiceApi* | [**run_service_create_run**](docs/RunServiceApi.md#run_service_create_run) | **POST** /apis/v2beta1/runs | Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. *RunServiceApi* | [**run_service_delete_run**](docs/RunServiceApi.md#run_service_delete_run) | **DELETE** /apis/v2beta1/runs/{run_id} | Deletes a run in an experiment given by run ID and experiment ID. *RunServiceApi* | [**run_service_get_run**](docs/RunServiceApi.md#run_service_get_run) | **GET** /apis/v2beta1/runs/{run_id} | Finds a specific run by ID. -*RunServiceApi* | [**run_service_list_runs**](docs/RunServiceApi.md#run_service_list_runs) | **GET** /apis/v2beta1/runs | Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. +*RunServiceApi* | [**run_service_list_runs**](docs/RunServiceApi.md#run_service_list_runs) | **GET** /apis/v2beta1/runs | Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. *RunServiceApi* | [**run_service_read_artifact**](docs/RunServiceApi.md#run_service_read_artifact) | **GET** /apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read | Finds artifact data in a run. *RunServiceApi* | [**run_service_retry_run**](docs/RunServiceApi.md#run_service_retry_run) | **POST** /apis/v2beta1/runs/{run_id}:retry | Re-initiates a failed or terminated run. *RunServiceApi* | [**run_service_terminate_run**](docs/RunServiceApi.md#run_service_terminate_run) | **POST** /apis/v2beta1/runs/{run_id}:terminate | Terminates an active run. @@ -141,7 +143,9 @@ Class | Method | HTTP request | Description - [AuthorizeRequestResources](docs/AuthorizeRequestResources.md) - [AuthorizeRequestVerb](docs/AuthorizeRequestVerb.md) + - [GetArtifactRequestArtifactView](docs/GetArtifactRequestArtifactView.md) - [GooglerpcStatus](docs/GooglerpcStatus.md) + - [ListArtifactRequestField](docs/ListArtifactRequestField.md) - [PipelineTaskDetailChildTask](docs/PipelineTaskDetailChildTask.md) - [PredicateIntValues](docs/PredicateIntValues.md) - [PredicateLongValues](docs/PredicateLongValues.md) @@ -149,7 +153,7 @@ Class | Method | HTTP request | Description - [ProtobufAny](docs/ProtobufAny.md) - [ProtobufNullValue](docs/ProtobufNullValue.md) - [RecurringRunMode](docs/RecurringRunMode.md) - - [RuntimeError](docs/RuntimeError.md) + - [V2beta1Artifact](docs/V2beta1Artifact.md) - [V2beta1ArtifactList](docs/V2beta1ArtifactList.md) - [V2beta1CreatePipelineAndVersionRequest](docs/V2beta1CreatePipelineAndVersionRequest.md) - [V2beta1CronSchedule](docs/V2beta1CronSchedule.md) @@ -157,6 +161,7 @@ Class | Method | HTTP request | Description - [V2beta1ExperimentStorageState](docs/V2beta1ExperimentStorageState.md) - [V2beta1Filter](docs/V2beta1Filter.md) - [V2beta1GetHealthzResponse](docs/V2beta1GetHealthzResponse.md) + - [V2beta1ListArtifactResponse](docs/V2beta1ListArtifactResponse.md) - [V2beta1ListExperimentsResponse](docs/V2beta1ListExperimentsResponse.md) - [V2beta1ListPipelineVersionsResponse](docs/V2beta1ListPipelineVersionsResponse.md) - [V2beta1ListPipelinesResponse](docs/V2beta1ListPipelinesResponse.md) diff --git a/backend/api/v2beta1/python_http_client/docs/ArtifactServiceApi.md b/backend/api/v2beta1/python_http_client/docs/ArtifactServiceApi.md new file mode 100644 index 00000000000..2fe8770e29f --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/ArtifactServiceApi.md @@ -0,0 +1,172 @@ +# kfp_server_api.ArtifactServiceApi + +All URIs are relative to *http://localhost* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**artifact_service_get_artifact**](ArtifactServiceApi.md#artifact_service_get_artifact) | **GET** /apis/v2beta1/artifacts/{artifact_id} | Finds a specific Artifact by ID. +[**artifact_service_list_artifacts**](ArtifactServiceApi.md#artifact_service_list_artifacts) | **GET** /apis/v2beta1/artifacts | Finds all artifacts within the specified namespace. Namespace field is required. In multi-user mode, the caller is required to have RBAC verb \"list\" on the \"artifacts\" resource for the specified namespace. + + +# **artifact_service_get_artifact** +> V2beta1Artifact artifact_service_get_artifact(artifact_id, view=view) + +Finds a specific Artifact by ID. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + artifact_id = 'artifact_id_example' # str | Required. The ID of the artifact to be retrieved. +view = 'ARTIFACT_VIEW_UNSPECIFIED' # str | Optional. Set to \"DOWNLOAD\" to included a signed URL with an expiry (default 15 seconds, unless configured other wise). This URL can be used to download the Artifact directly from the Artifact's storage provider. Set to \"BASIC\" to exclude the download_url from server responses, thus preventing the creation of any signed url. Defaults to BASIC. - ARTIFACT_VIEW_UNSPECIFIED: Not specified, equivalent to BASIC. - BASIC: Server responses excludes download_url - DOWNLOAD: Server responses include download_url - RENDER: Server response includes a signed URL, allowing in-browser rendering or preview of the artifact. (optional) (default to 'ARTIFACT_VIEW_UNSPECIFIED') + + try: + # Finds a specific Artifact by ID. + api_response = api_instance.artifact_service_get_artifact(artifact_id, view=view) + pprint(api_response) + except ApiException as e: + print("Exception when calling ArtifactServiceApi->artifact_service_get_artifact: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **artifact_id** | **str**| Required. The ID of the artifact to be retrieved. | + **view** | **str**| Optional. Set to \"DOWNLOAD\" to included a signed URL with an expiry (default 15 seconds, unless configured other wise). This URL can be used to download the Artifact directly from the Artifact's storage provider. Set to \"BASIC\" to exclude the download_url from server responses, thus preventing the creation of any signed url. Defaults to BASIC. - ARTIFACT_VIEW_UNSPECIFIED: Not specified, equivalent to BASIC. - BASIC: Server responses excludes download_url - DOWNLOAD: Server responses include download_url - RENDER: Server response includes a signed URL, allowing in-browser rendering or preview of the artifact. | [optional] [default to 'ARTIFACT_VIEW_UNSPECIFIED'] + +### Return type + +[**V2beta1Artifact**](V2beta1Artifact.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **artifact_service_list_artifacts** +> V2beta1ListArtifactResponse artifact_service_list_artifacts(max_result_size=max_result_size, order_by_field=order_by_field, order_by=order_by, next_page_token=next_page_token, namespace=namespace) + +Finds all artifacts within the specified namespace. Namespace field is required. In multi-user mode, the caller is required to have RBAC verb \"list\" on the \"artifacts\" resource for the specified namespace. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + max_result_size = 56 # int | Optional. Max number of resources to return in the result. A value of zero or less will result in the default (20). The API implementation also enforces an upper-bound of 100, and picks the minimum between this value and the one specified here. [default = 20] (optional) +order_by_field = 'FIELD_UNSPECIFIED' # str | Optional. Ordering field. [default = ID] (optional) (default to 'FIELD_UNSPECIFIED') +order_by = 'order_by_example' # str | Optional. Can be either \"asc\" (ascending) or \"desc\" (descending). [default = asc] (optional) +next_page_token = 'next_page_token_example' # str | Optional. The next_page_token value returned from a previous List request, if any. (optional) +namespace = 'namespace_example' # str | Required. Namespace of the Artifact's context. (optional) + + try: + # Finds all artifacts within the specified namespace. Namespace field is required. In multi-user mode, the caller is required to have RBAC verb \"list\" on the \"artifacts\" resource for the specified namespace. + api_response = api_instance.artifact_service_list_artifacts(max_result_size=max_result_size, order_by_field=order_by_field, order_by=order_by, next_page_token=next_page_token, namespace=namespace) + pprint(api_response) + except ApiException as e: + print("Exception when calling ArtifactServiceApi->artifact_service_list_artifacts: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **max_result_size** | **int**| Optional. Max number of resources to return in the result. A value of zero or less will result in the default (20). The API implementation also enforces an upper-bound of 100, and picks the minimum between this value and the one specified here. [default = 20] | [optional] + **order_by_field** | **str**| Optional. Ordering field. [default = ID] | [optional] [default to 'FIELD_UNSPECIFIED'] + **order_by** | **str**| Optional. Can be either \"asc\" (ascending) or \"desc\" (descending). [default = asc] | [optional] + **next_page_token** | **str**| Optional. The next_page_token value returned from a previous List request, if any. | [optional] + **namespace** | **str**| Required. Namespace of the Artifact's context. | [optional] + +### Return type + +[**V2beta1ListArtifactResponse**](V2beta1ListArtifactResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/backend/api/v2beta1/python_http_client/docs/AuthServiceApi.md b/backend/api/v2beta1/python_http_client/docs/AuthServiceApi.md index 0863023fecf..2ea545ec86d 100644 --- a/backend/api/v2beta1/python_http_client/docs/AuthServiceApi.md +++ b/backend/api/v2beta1/python_http_client/docs/AuthServiceApi.md @@ -46,9 +46,9 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.AuthServiceApi(api_client) - namespace = 'namespace_example' # str | (optional) -resources = 'UNASSIGNED_RESOURCES' # str | (optional) (default to 'UNASSIGNED_RESOURCES') -verb = 'UNASSIGNED_VERB' # str | (optional) (default to 'UNASSIGNED_VERB') + namespace = 'namespace_example' # str | Namespace the resource belongs to. (optional) +resources = 'UNASSIGNED_RESOURCES' # str | Resource type asking for authorization. (optional) (default to 'UNASSIGNED_RESOURCES') +verb = 'UNASSIGNED_VERB' # str | Verb on the resource asking for authorization. (optional) (default to 'UNASSIGNED_VERB') try: api_response = api_instance.auth_service_authorize(namespace=namespace, resources=resources, verb=verb) @@ -61,9 +61,9 @@ verb = 'UNASSIGNED_VERB' # str | (optional) (default to 'UNASSIGNED_VERB') Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **namespace** | **str**| | [optional] - **resources** | **str**| | [optional] [default to 'UNASSIGNED_RESOURCES'] - **verb** | **str**| | [optional] [default to 'UNASSIGNED_VERB'] + **namespace** | **str**| Namespace the resource belongs to. | [optional] + **resources** | **str**| Resource type asking for authorization. | [optional] [default to 'UNASSIGNED_RESOURCES'] + **verb** | **str**| Verb on the resource asking for authorization. | [optional] [default to 'UNASSIGNED_VERB'] ### Return type diff --git a/backend/api/v2beta1/python_http_client/docs/ExperimentServiceApi.md b/backend/api/v2beta1/python_http_client/docs/ExperimentServiceApi.md index 7af90671852..519e5f34c99 100644 --- a/backend/api/v2beta1/python_http_client/docs/ExperimentServiceApi.md +++ b/backend/api/v2beta1/python_http_client/docs/ExperimentServiceApi.md @@ -89,7 +89,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **experiment_service_create_experiment** -> V2beta1Experiment experiment_service_create_experiment(body) +> V2beta1Experiment experiment_service_create_experiment(experiment) Creates a new experiment. @@ -127,11 +127,11 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.ExperimentServiceApi(api_client) - body = kfp_server_api.V2beta1Experiment() # V2beta1Experiment | The experiment to be created. + experiment = kfp_server_api.V2beta1Experiment() # V2beta1Experiment | The experiment to be created. try: # Creates a new experiment. - api_response = api_instance.experiment_service_create_experiment(body) + api_response = api_instance.experiment_service_create_experiment(experiment) pprint(api_response) except ApiException as e: print("Exception when calling ExperimentServiceApi->experiment_service_create_experiment: %s\n" % e) @@ -141,7 +141,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**V2beta1Experiment**](V2beta1Experiment.md)| The experiment to be created. | + **experiment** | [**V2beta1Experiment**](V2beta1Experiment.md)| The experiment to be created. | ### Return type diff --git a/backend/api/v2beta1/python_http_client/docs/GetArtifactRequestArtifactView.md b/backend/api/v2beta1/python_http_client/docs/GetArtifactRequestArtifactView.md new file mode 100644 index 00000000000..427832f227d --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/GetArtifactRequestArtifactView.md @@ -0,0 +1,10 @@ +# GetArtifactRequestArtifactView + + - ARTIFACT_VIEW_UNSPECIFIED: Not specified, equivalent to BASIC. - BASIC: Server responses excludes download_url - DOWNLOAD: Server responses include download_url - RENDER: Server response includes a signed URL, allowing in-browser rendering or preview of the artifact. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/ListArtifactRequestField.md b/backend/api/v2beta1/python_http_client/docs/ListArtifactRequestField.md new file mode 100644 index 00000000000..41d44d620f5 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/ListArtifactRequestField.md @@ -0,0 +1,9 @@ +# ListArtifactRequestField + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineServiceApi.md b/backend/api/v2beta1/python_http_client/docs/PipelineServiceApi.md index dbedc268c56..d5af5b11472 100644 --- a/backend/api/v2beta1/python_http_client/docs/PipelineServiceApi.md +++ b/backend/api/v2beta1/python_http_client/docs/PipelineServiceApi.md @@ -7,7 +7,7 @@ Method | HTTP request | Description [**pipeline_service_create_pipeline**](PipelineServiceApi.md#pipeline_service_create_pipeline) | **POST** /apis/v2beta1/pipelines | Creates a pipeline. [**pipeline_service_create_pipeline_and_version**](PipelineServiceApi.md#pipeline_service_create_pipeline_and_version) | **POST** /apis/v2beta1/pipelines/create | Creates a new pipeline and a new pipeline version in a single transaction. [**pipeline_service_create_pipeline_version**](PipelineServiceApi.md#pipeline_service_create_pipeline_version) | **POST** /apis/v2beta1/pipelines/{pipeline_id}/versions | Adds a pipeline version to the specified pipeline ID. -[**pipeline_service_delete_pipeline**](PipelineServiceApi.md#pipeline_service_delete_pipeline) | **DELETE** /apis/v2beta1/pipelines/{pipeline_id} | Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. +[**pipeline_service_delete_pipeline**](PipelineServiceApi.md#pipeline_service_delete_pipeline) | **DELETE** /apis/v2beta1/pipelines/{pipeline_id} | Deletes a pipeline by ID. If cascade is false (default), it returns an error if the pipeline has any versions. If cascade is true, it will also delete all pipeline versions. [**pipeline_service_delete_pipeline_version**](PipelineServiceApi.md#pipeline_service_delete_pipeline_version) | **DELETE** /apis/v2beta1/pipelines/{pipeline_id}/versions/{pipeline_version_id} | Deletes a specific pipeline version by pipeline version ID and pipeline ID. [**pipeline_service_get_pipeline**](PipelineServiceApi.md#pipeline_service_get_pipeline) | **GET** /apis/v2beta1/pipelines/{pipeline_id} | Finds a specific pipeline by ID. [**pipeline_service_get_pipeline_by_name**](PipelineServiceApi.md#pipeline_service_get_pipeline_by_name) | **GET** /apis/v2beta1/pipelines/names/{name} | Finds a specific pipeline by name and namespace. @@ -17,7 +17,7 @@ Method | HTTP request | Description # **pipeline_service_create_pipeline** -> V2beta1Pipeline pipeline_service_create_pipeline(body) +> V2beta1Pipeline pipeline_service_create_pipeline(pipeline) Creates a pipeline. @@ -55,11 +55,11 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.PipelineServiceApi(api_client) - body = kfp_server_api.V2beta1Pipeline() # V2beta1Pipeline | Required input. Pipeline that needs to be created. + pipeline = kfp_server_api.V2beta1Pipeline() # V2beta1Pipeline | Required input. Pipeline that needs to be created. try: # Creates a pipeline. - api_response = api_instance.pipeline_service_create_pipeline(body) + api_response = api_instance.pipeline_service_create_pipeline(pipeline) pprint(api_response) except ApiException as e: print("Exception when calling PipelineServiceApi->pipeline_service_create_pipeline: %s\n" % e) @@ -69,7 +69,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**V2beta1Pipeline**](V2beta1Pipeline.md)| Required input. Pipeline that needs to be created. | + **pipeline** | [**V2beta1Pipeline**](V2beta1Pipeline.md)| Required input. Pipeline that needs to be created. | ### Return type @@ -169,7 +169,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **pipeline_service_create_pipeline_version** -> V2beta1PipelineVersion pipeline_service_create_pipeline_version(pipeline_id, body) +> V2beta1PipelineVersion pipeline_service_create_pipeline_version(pipeline_id, pipeline_version) Adds a pipeline version to the specified pipeline ID. @@ -208,11 +208,11 @@ with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.PipelineServiceApi(api_client) pipeline_id = 'pipeline_id_example' # str | Required input. ID of the parent pipeline. -body = kfp_server_api.V2beta1PipelineVersion() # V2beta1PipelineVersion | Required input. Pipeline version ID to be created. +pipeline_version = kfp_server_api.V2beta1PipelineVersion() # V2beta1PipelineVersion | Required input. Pipeline version ID to be created. try: # Adds a pipeline version to the specified pipeline ID. - api_response = api_instance.pipeline_service_create_pipeline_version(pipeline_id, body) + api_response = api_instance.pipeline_service_create_pipeline_version(pipeline_id, pipeline_version) pprint(api_response) except ApiException as e: print("Exception when calling PipelineServiceApi->pipeline_service_create_pipeline_version: %s\n" % e) @@ -223,7 +223,7 @@ body = kfp_server_api.V2beta1PipelineVersion() # V2beta1PipelineVersion | Requir Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **pipeline_id** | **str**| Required input. ID of the parent pipeline. | - **body** | [**V2beta1PipelineVersion**](V2beta1PipelineVersion.md)| Required input. Pipeline version ID to be created. | + **pipeline_version** | [**V2beta1PipelineVersion**](V2beta1PipelineVersion.md)| Required input. Pipeline version ID to be created. | ### Return type @@ -247,9 +247,9 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **pipeline_service_delete_pipeline** -> object pipeline_service_delete_pipeline(pipeline_id) +> object pipeline_service_delete_pipeline(pipeline_id, cascade=cascade) -Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. +Deletes a pipeline by ID. If cascade is false (default), it returns an error if the pipeline has any versions. If cascade is true, it will also delete all pipeline versions. ### Example @@ -286,10 +286,11 @@ with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.PipelineServiceApi(api_client) pipeline_id = 'pipeline_id_example' # str | Required input. ID of the pipeline to be deleted. +cascade = True # bool | Optional. If true, the pipeline and all its versions will be deleted. If false (default), only the pipeline will be deleted if it has no versions. (optional) try: - # Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. - api_response = api_instance.pipeline_service_delete_pipeline(pipeline_id) + # Deletes a pipeline by ID. If cascade is false (default), it returns an error if the pipeline has any versions. If cascade is true, it will also delete all pipeline versions. + api_response = api_instance.pipeline_service_delete_pipeline(pipeline_id, cascade=cascade) pprint(api_response) except ApiException as e: print("Exception when calling PipelineServiceApi->pipeline_service_delete_pipeline: %s\n" % e) @@ -300,6 +301,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **pipeline_id** | **str**| Required input. ID of the pipeline to be deleted. | + **cascade** | **bool**| Optional. If true, the pipeline and all its versions will be deleted. If false (default), only the pipeline will be deleted if it has no versions. | [optional] ### Return type diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineUploadServiceApi.md b/backend/api/v2beta1/python_http_client/docs/PipelineUploadServiceApi.md index d38a51ae3b3..f98341a48ee 100644 --- a/backend/api/v2beta1/python_http_client/docs/PipelineUploadServiceApi.md +++ b/backend/api/v2beta1/python_http_client/docs/PipelineUploadServiceApi.md @@ -9,7 +9,7 @@ Method | HTTP request | Description # **upload_pipeline** -> V2beta1Pipeline upload_pipeline(uploadfile, name=name, description=description, namespace=namespace) +> V2beta1Pipeline upload_pipeline(uploadfile, name=name, display_name=display_name, description=description, namespace=namespace) @@ -49,11 +49,12 @@ with kfp_server_api.ApiClient(configuration) as api_client: api_instance = kfp_server_api.PipelineUploadServiceApi(api_client) uploadfile = '/path/to/file' # file | The pipeline to upload. Maximum size of 32MB is supported. name = 'name_example' # str | (optional) +display_name = 'display_name_example' # str | (optional) description = 'description_example' # str | (optional) namespace = 'namespace_example' # str | (optional) try: - api_response = api_instance.upload_pipeline(uploadfile, name=name, description=description, namespace=namespace) + api_response = api_instance.upload_pipeline(uploadfile, name=name, display_name=display_name, description=description, namespace=namespace) pprint(api_response) except ApiException as e: print("Exception when calling PipelineUploadServiceApi->upload_pipeline: %s\n" % e) @@ -65,6 +66,7 @@ Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **uploadfile** | **file**| The pipeline to upload. Maximum size of 32MB is supported. | **name** | **str**| | [optional] + **display_name** | **str**| | [optional] **description** | **str**| | [optional] **namespace** | **str**| | [optional] @@ -90,7 +92,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **upload_pipeline_version** -> V2beta1PipelineVersion upload_pipeline_version(uploadfile, name=name, pipelineid=pipelineid, description=description) +> V2beta1PipelineVersion upload_pipeline_version(uploadfile, name=name, display_name=display_name, pipelineid=pipelineid, description=description) @@ -130,11 +132,12 @@ with kfp_server_api.ApiClient(configuration) as api_client: api_instance = kfp_server_api.PipelineUploadServiceApi(api_client) uploadfile = '/path/to/file' # file | The pipeline to upload. Maximum size of 32MB is supported. name = 'name_example' # str | (optional) +display_name = 'display_name_example' # str | (optional) pipelineid = 'pipelineid_example' # str | (optional) description = 'description_example' # str | (optional) try: - api_response = api_instance.upload_pipeline_version(uploadfile, name=name, pipelineid=pipelineid, description=description) + api_response = api_instance.upload_pipeline_version(uploadfile, name=name, display_name=display_name, pipelineid=pipelineid, description=description) pprint(api_response) except ApiException as e: print("Exception when calling PipelineUploadServiceApi->upload_pipeline_version: %s\n" % e) @@ -146,6 +149,7 @@ Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **uploadfile** | **file**| The pipeline to upload. Maximum size of 32MB is supported. | **name** | **str**| | [optional] + **display_name** | **str**| | [optional] **pipelineid** | **str**| | [optional] **description** | **str**| | [optional] diff --git a/backend/api/v2beta1/python_http_client/docs/ProtobufAny.md b/backend/api/v2beta1/python_http_client/docs/ProtobufAny.md index 8b34639a9ca..014cca6dd31 100644 --- a/backend/api/v2beta1/python_http_client/docs/ProtobufAny.md +++ b/backend/api/v2beta1/python_http_client/docs/ProtobufAny.md @@ -1,10 +1,11 @@ # ProtobufAny -`Any` contains an arbitrary serialized protocol buffer message along with a URL that describes the type of the serialized message. Protobuf library provides support to pack/unpack Any values in the form of utility functions or additional generated methods of the Any type. Example 1: Pack and unpack a message in C++. Foo foo = ...; Any any; any.PackFrom(foo); ... if (any.UnpackTo(&foo)) { ... } Example 2: Pack and unpack a message in Java. Foo foo = ...; Any any = Any.pack(foo); ... if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } Example 3: Pack and unpack a message in Python. foo = Foo(...) any = Any() any.Pack(foo) ... if any.Is(Foo.DESCRIPTOR): any.Unpack(foo) ... Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} any, err := anypb.New(foo) if err != nil { ... } ... foo := &pb.Foo{} if err := any.UnmarshalTo(foo); err != nil { ... } The pack methods provided by protobuf library will by default use 'type.googleapis.com/full.type.name' as the type URL and the unpack methods only use the fully qualified type name after the last '/' in the type URL, for example \"foo.bar.com/x/y.z\" will yield type name \"y.z\". JSON The JSON representation of an `Any` value uses the regular representation of the deserialized, embedded message, with an additional field `@type` which contains the type URL. Example: package google.profile; message Person { string first_name = 1; string last_name = 2; } { \"@type\": \"type.googleapis.com/google.profile.Person\", \"firstName\": , \"lastName\": } If the embedded message type is well-known and has a custom JSON representation, that representation will be embedded adding a field `value` which holds the custom JSON in addition to the `@type` field. Example (for message [google.protobuf.Duration][]): { \"@type\": \"type.googleapis.com/google.protobuf.Duration\", \"value\": \"1.212s\" } +`Any` contains an arbitrary serialized protocol buffer message along with a URL that describes the type of the serialized message. Protobuf library provides support to pack/unpack Any values in the form of utility functions or additional generated methods of the Any type. Example 1: Pack and unpack a message in C++. Foo foo = ...; Any any; any.PackFrom(foo); ... if (any.UnpackTo(&foo)) { ... } Example 2: Pack and unpack a message in Java. Foo foo = ...; Any any = Any.pack(foo); ... if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } // or ... if (any.isSameTypeAs(Foo.getDefaultInstance())) { foo = any.unpack(Foo.getDefaultInstance()); } Example 3: Pack and unpack a message in Python. foo = Foo(...) any = Any() any.Pack(foo) ... if any.Is(Foo.DESCRIPTOR): any.Unpack(foo) ... Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} any, err := anypb.New(foo) if err != nil { ... } ... foo := &pb.Foo{} if err := any.UnmarshalTo(foo); err != nil { ... } The pack methods provided by protobuf library will by default use 'type.googleapis.com/full.type.name' as the type URL and the unpack methods only use the fully qualified type name after the last '/' in the type URL, for example \"foo.bar.com/x/y.z\" will yield type name \"y.z\". JSON ==== The JSON representation of an `Any` value uses the regular representation of the deserialized, embedded message, with an additional field `@type` which contains the type URL. Example: package google.profile; message Person { string first_name = 1; string last_name = 2; } { \"@type\": \"type.googleapis.com/google.profile.Person\", \"firstName\": , \"lastName\": } If the embedded message type is well-known and has a custom JSON representation, that representation will be embedded adding a field `value` which holds the custom JSON in addition to the `@type` field. Example (for message [google.protobuf.Duration][]): { \"@type\": \"type.googleapis.com/google.protobuf.Duration\", \"value\": \"1.212s\" } ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**type_url** | **str** | A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. | [optional] +**type** | **str** | A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. As of May 2023, there are no widely used type server implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. | [optional] +**type_url** | **str** | A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. | [optional] **value** | **str** | Must be a valid serialized protocol buffer of the above specified type. | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/backend/api/v2beta1/python_http_client/docs/ProtobufNullValue.md b/backend/api/v2beta1/python_http_client/docs/ProtobufNullValue.md index c8e9631389e..c3c32b64fec 100644 --- a/backend/api/v2beta1/python_http_client/docs/ProtobufNullValue.md +++ b/backend/api/v2beta1/python_http_client/docs/ProtobufNullValue.md @@ -1,6 +1,6 @@ # ProtobufNullValue -`NullValue` is a singleton enumeration to represent the null value for the `Value` type union. The JSON representation for `NullValue` is JSON `null`. - NULL_VALUE: Null value. +`NullValue` is a singleton enumeration to represent the null value for the `Value` type union. The JSON representation for `NullValue` is JSON `null`. - NULL_VALUE: Null value. ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- diff --git a/backend/api/v2beta1/python_http_client/docs/RecurringRunServiceApi.md b/backend/api/v2beta1/python_http_client/docs/RecurringRunServiceApi.md index 35c1232f4a1..5e60ddccab5 100644 --- a/backend/api/v2beta1/python_http_client/docs/RecurringRunServiceApi.md +++ b/backend/api/v2beta1/python_http_client/docs/RecurringRunServiceApi.md @@ -13,7 +13,7 @@ Method | HTTP request | Description # **recurring_run_service_create_recurring_run** -> V2beta1RecurringRun recurring_run_service_create_recurring_run(body) +> V2beta1RecurringRun recurring_run_service_create_recurring_run(recurring_run) Creates a new recurring run in an experiment, given the experiment ID. @@ -51,11 +51,11 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.RecurringRunServiceApi(api_client) - body = kfp_server_api.V2beta1RecurringRun() # V2beta1RecurringRun | The recurring run to be created. + recurring_run = kfp_server_api.V2beta1RecurringRun() # V2beta1RecurringRun | The recurring run to be created. try: # Creates a new recurring run in an experiment, given the experiment ID. - api_response = api_instance.recurring_run_service_create_recurring_run(body) + api_response = api_instance.recurring_run_service_create_recurring_run(recurring_run) pprint(api_response) except ApiException as e: print("Exception when calling RecurringRunServiceApi->recurring_run_service_create_recurring_run: %s\n" % e) @@ -65,7 +65,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**V2beta1RecurringRun**](V2beta1RecurringRun.md)| The recurring run to be created. | + **recurring_run** | [**V2beta1RecurringRun**](V2beta1RecurringRun.md)| The recurring run to be created. | ### Return type diff --git a/backend/api/v2beta1/python_http_client/docs/ReportServiceApi.md b/backend/api/v2beta1/python_http_client/docs/ReportServiceApi.md index b4551121430..f42de9b7631 100644 --- a/backend/api/v2beta1/python_http_client/docs/ReportServiceApi.md +++ b/backend/api/v2beta1/python_http_client/docs/ReportServiceApi.md @@ -9,7 +9,7 @@ Method | HTTP request | Description # **report_service_report_scheduled_workflow** -> object report_service_report_scheduled_workflow(body) +> object report_service_report_scheduled_workflow(scheduled_workflow) @@ -47,10 +47,10 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.ReportServiceApi(api_client) - body = 'body_example' # str | ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string. + scheduled_workflow = 'scheduled_workflow_example' # str | ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string. try: - api_response = api_instance.report_service_report_scheduled_workflow(body) + api_response = api_instance.report_service_report_scheduled_workflow(scheduled_workflow) pprint(api_response) except ApiException as e: print("Exception when calling ReportServiceApi->report_service_report_scheduled_workflow: %s\n" % e) @@ -60,7 +60,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | **str**| ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string. | + **scheduled_workflow** | **str**| ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string. | ### Return type @@ -84,7 +84,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **report_service_report_workflow** -> object report_service_report_workflow(body) +> object report_service_report_workflow(workflow) @@ -122,10 +122,10 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.ReportServiceApi(api_client) - body = 'body_example' # str | Workflow is a workflow custom resource marshalled into a json string. + workflow = 'workflow_example' # str | Workflow is a workflow custom resource marshalled into a json string. try: - api_response = api_instance.report_service_report_workflow(body) + api_response = api_instance.report_service_report_workflow(workflow) pprint(api_response) except ApiException as e: print("Exception when calling ReportServiceApi->report_service_report_workflow: %s\n" % e) @@ -135,7 +135,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | **str**| Workflow is a workflow custom resource marshalled into a json string. | + **workflow** | **str**| Workflow is a workflow custom resource marshalled into a json string. | ### Return type diff --git a/backend/api/v2beta1/python_http_client/docs/RunServiceApi.md b/backend/api/v2beta1/python_http_client/docs/RunServiceApi.md index c7d67aee8ea..de6ff6df7ed 100644 --- a/backend/api/v2beta1/python_http_client/docs/RunServiceApi.md +++ b/backend/api/v2beta1/python_http_client/docs/RunServiceApi.md @@ -5,10 +5,10 @@ All URIs are relative to *http://localhost* Method | HTTP request | Description ------------- | ------------- | ------------- [**run_service_archive_run**](RunServiceApi.md#run_service_archive_run) | **POST** /apis/v2beta1/runs/{run_id}:archive | Archives a run in an experiment given by run ID and experiment ID. -[**run_service_create_run**](RunServiceApi.md#run_service_create_run) | **POST** /apis/v2beta1/runs | Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. +[**run_service_create_run**](RunServiceApi.md#run_service_create_run) | **POST** /apis/v2beta1/runs | Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. [**run_service_delete_run**](RunServiceApi.md#run_service_delete_run) | **DELETE** /apis/v2beta1/runs/{run_id} | Deletes a run in an experiment given by run ID and experiment ID. [**run_service_get_run**](RunServiceApi.md#run_service_get_run) | **GET** /apis/v2beta1/runs/{run_id} | Finds a specific run by ID. -[**run_service_list_runs**](RunServiceApi.md#run_service_list_runs) | **GET** /apis/v2beta1/runs | Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. +[**run_service_list_runs**](RunServiceApi.md#run_service_list_runs) | **GET** /apis/v2beta1/runs | Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. [**run_service_read_artifact**](RunServiceApi.md#run_service_read_artifact) | **GET** /apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read | Finds artifact data in a run. [**run_service_retry_run**](RunServiceApi.md#run_service_retry_run) | **POST** /apis/v2beta1/runs/{run_id}:retry | Re-initiates a failed or terminated run. [**run_service_terminate_run**](RunServiceApi.md#run_service_terminate_run) | **POST** /apis/v2beta1/runs/{run_id}:terminate | Terminates an active run. @@ -16,7 +16,7 @@ Method | HTTP request | Description # **run_service_archive_run** -> object run_service_archive_run(run_id) +> object run_service_archive_run(run_id, experiment_id=experiment_id) Archives a run in an experiment given by run ID and experiment ID. @@ -55,10 +55,11 @@ with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.RunServiceApi(api_client) run_id = 'run_id_example' # str | The ID of the run to be archived. +experiment_id = 'experiment_id_example' # str | The ID of the parent experiment. (optional) try: # Archives a run in an experiment given by run ID and experiment ID. - api_response = api_instance.run_service_archive_run(run_id) + api_response = api_instance.run_service_archive_run(run_id, experiment_id=experiment_id) pprint(api_response) except ApiException as e: print("Exception when calling RunServiceApi->run_service_archive_run: %s\n" % e) @@ -69,6 +70,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **run_id** | **str**| The ID of the run to be archived. | + **experiment_id** | **str**| The ID of the parent experiment. | [optional] ### Return type @@ -92,9 +94,9 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **run_service_create_run** -> V2beta1Run run_service_create_run(body, experiment_id=experiment_id) +> V2beta1Run run_service_create_run(run, experiment_id=experiment_id) -Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. +Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. ### Example @@ -130,12 +132,12 @@ configuration = kfp_server_api.Configuration( with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.RunServiceApi(api_client) - body = kfp_server_api.V2beta1Run() # V2beta1Run | Run to be created. + run = kfp_server_api.V2beta1Run() # V2beta1Run | Run to be created. experiment_id = 'experiment_id_example' # str | The ID of the parent experiment. (optional) try: - # Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. - api_response = api_instance.run_service_create_run(body, experiment_id=experiment_id) + # Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. + api_response = api_instance.run_service_create_run(run, experiment_id=experiment_id) pprint(api_response) except ApiException as e: print("Exception when calling RunServiceApi->run_service_create_run: %s\n" % e) @@ -145,7 +147,7 @@ experiment_id = 'experiment_id_example' # str | The ID of the parent experiment. Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- - **body** | [**V2beta1Run**](V2beta1Run.md)| Run to be created. | + **run** | [**V2beta1Run**](V2beta1Run.md)| Run to be created. | **experiment_id** | **str**| The ID of the parent experiment. | [optional] ### Return type @@ -328,7 +330,7 @@ Name | Type | Description | Notes # **run_service_list_runs** > V2beta1ListRunsResponse run_service_list_runs(namespace=namespace, experiment_id=experiment_id, page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter) -Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. +Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. ### Example @@ -372,7 +374,7 @@ sort_by = 'sort_by_example' # str | Can be format of \"field_name\", \"field_nam filter = 'filter_example' # str | A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). (optional) try: - # Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. + # Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. api_response = api_instance.run_service_list_runs(namespace=namespace, experiment_id=experiment_id, page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter) pprint(api_response) except ApiException as e: @@ -494,7 +496,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **run_service_retry_run** -> object run_service_retry_run(run_id) +> object run_service_retry_run(run_id, experiment_id=experiment_id) Re-initiates a failed or terminated run. @@ -533,10 +535,11 @@ with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.RunServiceApi(api_client) run_id = 'run_id_example' # str | The ID of the run to be retried. +experiment_id = 'experiment_id_example' # str | The ID of the parent experiment. (optional) try: # Re-initiates a failed or terminated run. - api_response = api_instance.run_service_retry_run(run_id) + api_response = api_instance.run_service_retry_run(run_id, experiment_id=experiment_id) pprint(api_response) except ApiException as e: print("Exception when calling RunServiceApi->run_service_retry_run: %s\n" % e) @@ -547,6 +550,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **run_id** | **str**| The ID of the run to be retried. | + **experiment_id** | **str**| The ID of the parent experiment. | [optional] ### Return type @@ -570,7 +574,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **run_service_terminate_run** -> object run_service_terminate_run(run_id) +> object run_service_terminate_run(run_id, experiment_id=experiment_id) Terminates an active run. @@ -609,10 +613,11 @@ with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.RunServiceApi(api_client) run_id = 'run_id_example' # str | The ID of the run to be terminated. +experiment_id = 'experiment_id_example' # str | The ID of the parent experiment. (optional) try: # Terminates an active run. - api_response = api_instance.run_service_terminate_run(run_id) + api_response = api_instance.run_service_terminate_run(run_id, experiment_id=experiment_id) pprint(api_response) except ApiException as e: print("Exception when calling RunServiceApi->run_service_terminate_run: %s\n" % e) @@ -623,6 +628,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **run_id** | **str**| The ID of the run to be terminated. | + **experiment_id** | **str**| The ID of the parent experiment. | [optional] ### Return type @@ -646,7 +652,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **run_service_unarchive_run** -> object run_service_unarchive_run(run_id) +> object run_service_unarchive_run(run_id, experiment_id=experiment_id) Restores an archived run in an experiment given by run ID and experiment ID. @@ -685,10 +691,11 @@ with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.RunServiceApi(api_client) run_id = 'run_id_example' # str | The ID of the run to be restored. +experiment_id = 'experiment_id_example' # str | The ID of the parent experiment. (optional) try: # Restores an archived run in an experiment given by run ID and experiment ID. - api_response = api_instance.run_service_unarchive_run(run_id) + api_response = api_instance.run_service_unarchive_run(run_id, experiment_id=experiment_id) pprint(api_response) except ApiException as e: print("Exception when calling RunServiceApi->run_service_unarchive_run: %s\n" % e) @@ -699,6 +706,7 @@ with kfp_server_api.ApiClient(configuration) as api_client: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **run_id** | **str**| The ID of the run to be restored. | + **experiment_id** | **str**| The ID of the parent experiment. | [optional] ### Return type diff --git a/backend/api/v2beta1/python_http_client/docs/RuntimeError.md b/backend/api/v2beta1/python_http_client/docs/RuntimeError.md deleted file mode 100644 index bd8a0a4373d..00000000000 --- a/backend/api/v2beta1/python_http_client/docs/RuntimeError.md +++ /dev/null @@ -1,13 +0,0 @@ -# RuntimeError - -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**error** | **str** | | [optional] -**code** | **int** | | [optional] -**message** | **str** | | [optional] -**details** | [**list[ProtobufAny]**](ProtobufAny.md) | | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1Artifact.md b/backend/api/v2beta1/python_http_client/docs/V2beta1Artifact.md new file mode 100644 index 00000000000..d782059391a --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1Artifact.md @@ -0,0 +1,21 @@ +# V2beta1Artifact + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifact_id** | **str** | Unique Artifact ID. Generated by MLMD. | [optional] +**storage_provider** | **str** | Storage Provider to which this Artifact is located (e.g. S3, Minio, etc.). | [optional] +**storage_path** | **str** | The path location of this Artifact within the storage provider. For example an object located at s3://my-bucket/path/a/b/c will result in \"path/a/b/c\". | [optional] +**uri** | **str** | | [optional] +**download_url** | **str** | Optional Output. Specifies a signed-url that can be used to download this Artifact directly from its store. | [optional] +**namespace** | **str** | The namespace associated with this Artifact. This is determined by the namespace of the parent PipelineRun that created this Artifact. | [optional] +**artifact_type** | **str** | | [optional] +**artifact_size** | **str** | The size of the artifact in bytes. If the artifact does not exist in object store (e.g. Metrics) then this is omitted. | [optional] +**created_at** | **datetime** | Creation time of the artifact. | [optional] +**last_updated_at** | **datetime** | Last update time of the artifact. | [optional] +**error** | [**GooglerpcStatus**](GooglerpcStatus.md) | | [optional] +**render_url** | **str** | Optional Output. Specifies a signed URL that can be used to render this Artifact directly from its store. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1GetHealthzResponse.md b/backend/api/v2beta1/python_http_client/docs/V2beta1GetHealthzResponse.md index 54bdcaf8d2d..3ebcbc1d1bc 100644 --- a/backend/api/v2beta1/python_http_client/docs/V2beta1GetHealthzResponse.md +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1GetHealthzResponse.md @@ -4,6 +4,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **multi_user** | **bool** | Returns if KFP in multi-user mode | [optional] +**pipeline_store** | **str** | | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1ListArtifactResponse.md b/backend/api/v2beta1/python_http_client/docs/V2beta1ListArtifactResponse.md new file mode 100644 index 00000000000..f5da95fb6eb --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1ListArtifactResponse.md @@ -0,0 +1,11 @@ +# V2beta1ListArtifactResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifacts** | [**list[V2beta1Artifact]**](V2beta1Artifact.md) | List of retrieved artifacts. | [optional] +**next_page_token** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1Pipeline.md b/backend/api/v2beta1/python_http_client/docs/V2beta1Pipeline.md index fd34bb6a8d6..1b592dadbe9 100644 --- a/backend/api/v2beta1/python_http_client/docs/V2beta1Pipeline.md +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1Pipeline.md @@ -4,7 +4,8 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **pipeline_id** | **str** | Output. Unique pipeline ID. Generated by API server. | [optional] -**display_name** | **str** | Required input field. Pipeline name provided by user. | [optional] +**display_name** | **str** | Optional input field. Pipeline display name provided by user. | [optional] +**name** | **str** | Required input field. Pipeline name provided by user. | [optional] **description** | **str** | Optional input field. A short description of the pipeline. | [optional] **created_at** | **datetime** | Output. Creation time of the pipeline. | [optional] **namespace** | **str** | Input. A namespace this pipeline belongs to. Causes error if user is not authorized to access the specified namespace. If not specified in CreatePipeline, default namespace is used. | [optional] diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskDetail.md b/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskDetail.md index 72bd253ff29..3b197b439e2 100644 --- a/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskDetail.md +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskDetail.md @@ -17,7 +17,7 @@ Name | Type | Description | Notes **inputs** | [**dict(str, V2beta1ArtifactList)**](V2beta1ArtifactList.md) | Input artifacts of the task. | [optional] **outputs** | [**dict(str, V2beta1ArtifactList)**](V2beta1ArtifactList.md) | Output artifacts of the task. | [optional] **parent_task_id** | **str** | ID of the parent task if the task is within a component scope. Empty if the task is at the root level. | [optional] -**state_history** | [**list[V2beta1RuntimeStatus]**](V2beta1RuntimeStatus.md) | A sequence of task statuses. This field keeps a record of state transitions. | [optional] +**state_history** | [**list[V2beta1RuntimeStatus]**](V2beta1RuntimeStatus.md) | A sequence of task statuses. This field keeps a record of state transitions. | [optional] **pod_name** | **str** | Name of the corresponding pod assigned by the orchestration engine. Also known as node_id. | [optional] **child_tasks** | [**list[PipelineTaskDetailChildTask]**](PipelineTaskDetailChildTask.md) | Sequence of dependen tasks. | [optional] diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineVersion.md b/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineVersion.md index 946abca73fa..653cb1e396e 100644 --- a/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineVersion.md +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineVersion.md @@ -5,7 +5,8 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **pipeline_id** | **str** | Required input field. Unique ID of the parent pipeline. | [optional] **pipeline_version_id** | **str** | Output. Unique pipeline version ID. Generated by API server. | [optional] -**display_name** | **str** | Required input field. Pipeline version name provided by user. | [optional] +**display_name** | **str** | Optional input field. Pipeline version display name provided by user. | [optional] +**name** | **str** | Required input field. Pipeline version name provided by user. | [optional] **description** | **str** | Optional input field. Short description of the pipeline version. | [optional] **created_at** | **datetime** | Output. Creation time of the pipeline version. | [optional] **package_url** | [**V2beta1Url**](V2beta1Url.md) | | [optional] diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1Run.md b/backend/api/v2beta1/python_http_client/docs/V2beta1Run.md index 589a7c32f22..dbb37fdf0fb 100644 --- a/backend/api/v2beta1/python_http_client/docs/V2beta1Run.md +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1Run.md @@ -20,7 +20,7 @@ Name | Type | Description | Notes **error** | [**GooglerpcStatus**](GooglerpcStatus.md) | | [optional] **run_details** | [**V2beta1RunDetails**](V2beta1RunDetails.md) | | [optional] **recurring_run_id** | **str** | ID of the recurring run that triggered this run. | [optional] -**state_history** | [**list[V2beta1RuntimeStatus]**](V2beta1RuntimeStatus.md) | Output. A sequence of run statuses. This field keeps a record of state transitions. | [optional] +**state_history** | [**list[V2beta1RuntimeStatus]**](V2beta1RuntimeStatus.md) | Output. A sequence of run statuses. This field keeps a record of state transitions. | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/backend/api/v2beta1/python_http_client/docs/VisualizationServiceApi.md b/backend/api/v2beta1/python_http_client/docs/VisualizationServiceApi.md index 8d13118d4da..8ef16df3186 100644 --- a/backend/api/v2beta1/python_http_client/docs/VisualizationServiceApi.md +++ b/backend/api/v2beta1/python_http_client/docs/VisualizationServiceApi.md @@ -8,7 +8,7 @@ Method | HTTP request | Description # **visualization_service_create_visualization_v1** -> V2beta1Visualization visualization_service_create_visualization_v1(namespace, body) +> V2beta1Visualization visualization_service_create_visualization_v1(namespace, visualization) @@ -47,10 +47,10 @@ with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = kfp_server_api.VisualizationServiceApi(api_client) namespace = 'namespace_example' # str | -body = kfp_server_api.V2beta1Visualization() # V2beta1Visualization | +visualization = kfp_server_api.V2beta1Visualization() # V2beta1Visualization | try: - api_response = api_instance.visualization_service_create_visualization_v1(namespace, body) + api_response = api_instance.visualization_service_create_visualization_v1(namespace, visualization) pprint(api_response) except ApiException as e: print("Exception when calling VisualizationServiceApi->visualization_service_create_visualization_v1: %s\n" % e) @@ -61,7 +61,7 @@ body = kfp_server_api.V2beta1Visualization() # V2beta1Visualization | Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **namespace** | **str**| | - **body** | [**V2beta1Visualization**](V2beta1Visualization.md)| | + **visualization** | [**V2beta1Visualization**](V2beta1Visualization.md)| | ### Return type diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/__init__.py b/backend/api/v2beta1/python_http_client/kfp_server_api/__init__.py index e2d4d46b0ee..1c3c712614d 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/__init__.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/__init__.py @@ -14,9 +14,10 @@ from __future__ import absolute_import -__version__ = "2.5.0" +__version__ = "2.14.3" # import apis into sdk package +from kfp_server_api.api.artifact_service_api import ArtifactServiceApi from kfp_server_api.api.auth_service_api import AuthServiceApi from kfp_server_api.api.experiment_service_api import ExperimentServiceApi from kfp_server_api.api.healthz_service_api import HealthzServiceApi @@ -38,7 +39,9 @@ # import models into sdk package from kfp_server_api.models.authorize_request_resources import AuthorizeRequestResources from kfp_server_api.models.authorize_request_verb import AuthorizeRequestVerb +from kfp_server_api.models.get_artifact_request_artifact_view import GetArtifactRequestArtifactView from kfp_server_api.models.googlerpc_status import GooglerpcStatus +from kfp_server_api.models.list_artifact_request_field import ListArtifactRequestField from kfp_server_api.models.pipeline_task_detail_child_task import PipelineTaskDetailChildTask from kfp_server_api.models.predicate_int_values import PredicateIntValues from kfp_server_api.models.predicate_long_values import PredicateLongValues @@ -46,7 +49,7 @@ from kfp_server_api.models.protobuf_any import ProtobufAny from kfp_server_api.models.protobuf_null_value import ProtobufNullValue from kfp_server_api.models.recurring_run_mode import RecurringRunMode -from kfp_server_api.models.runtime_error import RuntimeError +from kfp_server_api.models.v2beta1_artifact import V2beta1Artifact from kfp_server_api.models.v2beta1_artifact_list import V2beta1ArtifactList from kfp_server_api.models.v2beta1_create_pipeline_and_version_request import V2beta1CreatePipelineAndVersionRequest from kfp_server_api.models.v2beta1_cron_schedule import V2beta1CronSchedule @@ -54,6 +57,7 @@ from kfp_server_api.models.v2beta1_experiment_storage_state import V2beta1ExperimentStorageState from kfp_server_api.models.v2beta1_filter import V2beta1Filter from kfp_server_api.models.v2beta1_get_healthz_response import V2beta1GetHealthzResponse +from kfp_server_api.models.v2beta1_list_artifact_response import V2beta1ListArtifactResponse from kfp_server_api.models.v2beta1_list_experiments_response import V2beta1ListExperimentsResponse from kfp_server_api.models.v2beta1_list_pipeline_versions_response import V2beta1ListPipelineVersionsResponse from kfp_server_api.models.v2beta1_list_pipelines_response import V2beta1ListPipelinesResponse diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/__init__.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/__init__.py index 2c1d6567c2d..eb9745770ca 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/__init__.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/__init__.py @@ -3,6 +3,7 @@ # flake8: noqa # import apis into api package +from kfp_server_api.api.artifact_service_api import ArtifactServiceApi from kfp_server_api.api.auth_service_api import AuthServiceApi from kfp_server_api.api.experiment_service_api import ExperimentServiceApi from kfp_server_api.api.healthz_service_api import HealthzServiceApi diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/artifact_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/artifact_service_api.py new file mode 100644 index 00000000000..29f0712f67a --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/artifact_service_api.py @@ -0,0 +1,312 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from kfp_server_api.api_client import ApiClient +from kfp_server_api.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +class ArtifactServiceApi(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def artifact_service_get_artifact(self, artifact_id, **kwargs): # noqa: E501 + """Finds a specific Artifact by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.artifact_service_get_artifact(artifact_id, async_req=True) + >>> result = thread.get() + + :param artifact_id: Required. The ID of the artifact to be retrieved. (required) + :type artifact_id: str + :param view: Optional. Set to \"DOWNLOAD\" to included a signed URL with an expiry (default 15 seconds, unless configured other wise). This URL can be used to download the Artifact directly from the Artifact's storage provider. Set to \"BASIC\" to exclude the download_url from server responses, thus preventing the creation of any signed url. Defaults to BASIC. - ARTIFACT_VIEW_UNSPECIFIED: Not specified, equivalent to BASIC. - BASIC: Server responses excludes download_url - DOWNLOAD: Server responses include download_url - RENDER: Server response includes a signed URL, allowing in-browser rendering or preview of the artifact. + :type view: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1Artifact + """ + kwargs['_return_http_data_only'] = True + return self.artifact_service_get_artifact_with_http_info(artifact_id, **kwargs) # noqa: E501 + + def artifact_service_get_artifact_with_http_info(self, artifact_id, **kwargs): # noqa: E501 + """Finds a specific Artifact by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.artifact_service_get_artifact_with_http_info(artifact_id, async_req=True) + >>> result = thread.get() + + :param artifact_id: Required. The ID of the artifact to be retrieved. (required) + :type artifact_id: str + :param view: Optional. Set to \"DOWNLOAD\" to included a signed URL with an expiry (default 15 seconds, unless configured other wise). This URL can be used to download the Artifact directly from the Artifact's storage provider. Set to \"BASIC\" to exclude the download_url from server responses, thus preventing the creation of any signed url. Defaults to BASIC. - ARTIFACT_VIEW_UNSPECIFIED: Not specified, equivalent to BASIC. - BASIC: Server responses excludes download_url - DOWNLOAD: Server responses include download_url - RENDER: Server response includes a signed URL, allowing in-browser rendering or preview of the artifact. + :type view: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1Artifact, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'artifact_id', + 'view' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method artifact_service_get_artifact" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'artifact_id' is set + if self.api_client.client_side_validation and ('artifact_id' not in local_var_params or # noqa: E501 + local_var_params['artifact_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `artifact_id` when calling `artifact_service_get_artifact`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'artifact_id' in local_var_params: + path_params['artifact_id'] = local_var_params['artifact_id'] # noqa: E501 + + query_params = [] + if 'view' in local_var_params and local_var_params['view'] is not None: # noqa: E501 + query_params.append(('view', local_var_params['view'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/artifacts/{artifact_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1Artifact', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def artifact_service_list_artifacts(self, **kwargs): # noqa: E501 + """Finds all artifacts within the specified namespace. Namespace field is required. In multi-user mode, the caller is required to have RBAC verb \"list\" on the \"artifacts\" resource for the specified namespace. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.artifact_service_list_artifacts(async_req=True) + >>> result = thread.get() + + :param max_result_size: Optional. Max number of resources to return in the result. A value of zero or less will result in the default (20). The API implementation also enforces an upper-bound of 100, and picks the minimum between this value and the one specified here. [default = 20] + :type max_result_size: int + :param order_by_field: Optional. Ordering field. [default = ID] + :type order_by_field: str + :param order_by: Optional. Can be either \"asc\" (ascending) or \"desc\" (descending). [default = asc] + :type order_by: str + :param next_page_token: Optional. The next_page_token value returned from a previous List request, if any. + :type next_page_token: str + :param namespace: Required. Namespace of the Artifact's context. + :type namespace: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1ListArtifactResponse + """ + kwargs['_return_http_data_only'] = True + return self.artifact_service_list_artifacts_with_http_info(**kwargs) # noqa: E501 + + def artifact_service_list_artifacts_with_http_info(self, **kwargs): # noqa: E501 + """Finds all artifacts within the specified namespace. Namespace field is required. In multi-user mode, the caller is required to have RBAC verb \"list\" on the \"artifacts\" resource for the specified namespace. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.artifact_service_list_artifacts_with_http_info(async_req=True) + >>> result = thread.get() + + :param max_result_size: Optional. Max number of resources to return in the result. A value of zero or less will result in the default (20). The API implementation also enforces an upper-bound of 100, and picks the minimum between this value and the one specified here. [default = 20] + :type max_result_size: int + :param order_by_field: Optional. Ordering field. [default = ID] + :type order_by_field: str + :param order_by: Optional. Can be either \"asc\" (ascending) or \"desc\" (descending). [default = asc] + :type order_by: str + :param next_page_token: Optional. The next_page_token value returned from a previous List request, if any. + :type next_page_token: str + :param namespace: Required. Namespace of the Artifact's context. + :type namespace: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1ListArtifactResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'max_result_size', + 'order_by_field', + 'order_by', + 'next_page_token', + 'namespace' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method artifact_service_list_artifacts" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'max_result_size' in local_var_params and local_var_params['max_result_size'] is not None: # noqa: E501 + query_params.append(('max_result_size', local_var_params['max_result_size'])) # noqa: E501 + if 'order_by_field' in local_var_params and local_var_params['order_by_field'] is not None: # noqa: E501 + query_params.append(('order_by_field', local_var_params['order_by_field'])) # noqa: E501 + if 'order_by' in local_var_params and local_var_params['order_by'] is not None: # noqa: E501 + query_params.append(('order_by', local_var_params['order_by'])) # noqa: E501 + if 'next_page_token' in local_var_params and local_var_params['next_page_token'] is not None: # noqa: E501 + query_params.append(('next_page_token', local_var_params['next_page_token'])) # noqa: E501 + if 'namespace' in local_var_params and local_var_params['namespace'] is not None: # noqa: E501 + query_params.append(('namespace', local_var_params['namespace'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/artifacts', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1ListArtifactResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/auth_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/auth_service_api.py index c0485cff865..a152cef1c7b 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/auth_service_api.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/auth_service_api.py @@ -45,11 +45,11 @@ def auth_service_authorize(self, **kwargs): # noqa: E501 >>> thread = api.auth_service_authorize(async_req=True) >>> result = thread.get() - :param namespace: + :param namespace: Namespace the resource belongs to. :type namespace: str - :param resources: + :param resources: Resource type asking for authorization. :type resources: str - :param verb: + :param verb: Verb on the resource asking for authorization. :type verb: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional @@ -77,11 +77,11 @@ def auth_service_authorize_with_http_info(self, **kwargs): # noqa: E501 >>> thread = api.auth_service_authorize_with_http_info(async_req=True) >>> result = thread.get() - :param namespace: + :param namespace: Namespace the resource belongs to. :type namespace: str - :param resources: + :param resources: Resource type asking for authorization. :type resources: str - :param verb: + :param verb: Verb on the resource asking for authorization. :type verb: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/experiment_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/experiment_service_api.py index 7fc3a743e16..ca08859e5c0 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/experiment_service_api.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/experiment_service_api.py @@ -158,17 +158,17 @@ def experiment_service_archive_experiment_with_http_info(self, experiment_id, ** _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) - def experiment_service_create_experiment(self, body, **kwargs): # noqa: E501 + def experiment_service_create_experiment(self, experiment, **kwargs): # noqa: E501 """Creates a new experiment. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.experiment_service_create_experiment(body, async_req=True) + >>> thread = api.experiment_service_create_experiment(experiment, async_req=True) >>> result = thread.get() - :param body: The experiment to be created. (required) - :type body: V2beta1Experiment + :param experiment: The experiment to be created. (required) + :type experiment: V2beta1Experiment :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -184,19 +184,19 @@ def experiment_service_create_experiment(self, body, **kwargs): # noqa: E501 :rtype: V2beta1Experiment """ kwargs['_return_http_data_only'] = True - return self.experiment_service_create_experiment_with_http_info(body, **kwargs) # noqa: E501 + return self.experiment_service_create_experiment_with_http_info(experiment, **kwargs) # noqa: E501 - def experiment_service_create_experiment_with_http_info(self, body, **kwargs): # noqa: E501 + def experiment_service_create_experiment_with_http_info(self, experiment, **kwargs): # noqa: E501 """Creates a new experiment. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.experiment_service_create_experiment_with_http_info(body, async_req=True) + >>> thread = api.experiment_service_create_experiment_with_http_info(experiment, async_req=True) >>> result = thread.get() - :param body: The experiment to be created. (required) - :type body: V2beta1Experiment + :param experiment: The experiment to be created. (required) + :type experiment: V2beta1Experiment :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -219,7 +219,7 @@ def experiment_service_create_experiment_with_http_info(self, body, **kwargs): local_var_params = locals() all_params = [ - 'body' + 'experiment' ] all_params.extend( [ @@ -238,10 +238,10 @@ def experiment_service_create_experiment_with_http_info(self, body, **kwargs): ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `experiment_service_create_experiment`") # noqa: E501 + # verify the required parameter 'experiment' is set + if self.api_client.client_side_validation and ('experiment' not in local_var_params or # noqa: E501 + local_var_params['experiment'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `experiment` when calling `experiment_service_create_experiment`") # noqa: E501 collection_formats = {} @@ -255,8 +255,8 @@ def experiment_service_create_experiment_with_http_info(self, body, **kwargs): local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'experiment' in local_var_params: + body_params = local_var_params['experiment'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/pipeline_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/pipeline_service_api.py index c105e41bd16..b9ed5a433de 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/pipeline_service_api.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/pipeline_service_api.py @@ -36,17 +36,17 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client - def pipeline_service_create_pipeline(self, body, **kwargs): # noqa: E501 + def pipeline_service_create_pipeline(self, pipeline, **kwargs): # noqa: E501 """Creates a pipeline. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.pipeline_service_create_pipeline(body, async_req=True) + >>> thread = api.pipeline_service_create_pipeline(pipeline, async_req=True) >>> result = thread.get() - :param body: Required input. Pipeline that needs to be created. (required) - :type body: V2beta1Pipeline + :param pipeline: Required input. Pipeline that needs to be created. (required) + :type pipeline: V2beta1Pipeline :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -62,19 +62,19 @@ def pipeline_service_create_pipeline(self, body, **kwargs): # noqa: E501 :rtype: V2beta1Pipeline """ kwargs['_return_http_data_only'] = True - return self.pipeline_service_create_pipeline_with_http_info(body, **kwargs) # noqa: E501 + return self.pipeline_service_create_pipeline_with_http_info(pipeline, **kwargs) # noqa: E501 - def pipeline_service_create_pipeline_with_http_info(self, body, **kwargs): # noqa: E501 + def pipeline_service_create_pipeline_with_http_info(self, pipeline, **kwargs): # noqa: E501 """Creates a pipeline. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.pipeline_service_create_pipeline_with_http_info(body, async_req=True) + >>> thread = api.pipeline_service_create_pipeline_with_http_info(pipeline, async_req=True) >>> result = thread.get() - :param body: Required input. Pipeline that needs to be created. (required) - :type body: V2beta1Pipeline + :param pipeline: Required input. Pipeline that needs to be created. (required) + :type pipeline: V2beta1Pipeline :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -97,7 +97,7 @@ def pipeline_service_create_pipeline_with_http_info(self, body, **kwargs): # no local_var_params = locals() all_params = [ - 'body' + 'pipeline' ] all_params.extend( [ @@ -116,10 +116,10 @@ def pipeline_service_create_pipeline_with_http_info(self, body, **kwargs): # no ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `pipeline_service_create_pipeline`") # noqa: E501 + # verify the required parameter 'pipeline' is set + if self.api_client.client_side_validation and ('pipeline' not in local_var_params or # noqa: E501 + local_var_params['pipeline'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `pipeline` when calling `pipeline_service_create_pipeline`") # noqa: E501 collection_formats = {} @@ -133,8 +133,8 @@ def pipeline_service_create_pipeline_with_http_info(self, body, **kwargs): # no local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'pipeline' in local_var_params: + body_params = local_var_params['pipeline'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -288,19 +288,19 @@ def pipeline_service_create_pipeline_and_version_with_http_info(self, body, **kw _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) - def pipeline_service_create_pipeline_version(self, pipeline_id, body, **kwargs): # noqa: E501 + def pipeline_service_create_pipeline_version(self, pipeline_id, pipeline_version, **kwargs): # noqa: E501 """Adds a pipeline version to the specified pipeline ID. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.pipeline_service_create_pipeline_version(pipeline_id, body, async_req=True) + >>> thread = api.pipeline_service_create_pipeline_version(pipeline_id, pipeline_version, async_req=True) >>> result = thread.get() :param pipeline_id: Required input. ID of the parent pipeline. (required) :type pipeline_id: str - :param body: Required input. Pipeline version ID to be created. (required) - :type body: V2beta1PipelineVersion + :param pipeline_version: Required input. Pipeline version ID to be created. (required) + :type pipeline_version: V2beta1PipelineVersion :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -316,21 +316,21 @@ def pipeline_service_create_pipeline_version(self, pipeline_id, body, **kwargs): :rtype: V2beta1PipelineVersion """ kwargs['_return_http_data_only'] = True - return self.pipeline_service_create_pipeline_version_with_http_info(pipeline_id, body, **kwargs) # noqa: E501 + return self.pipeline_service_create_pipeline_version_with_http_info(pipeline_id, pipeline_version, **kwargs) # noqa: E501 - def pipeline_service_create_pipeline_version_with_http_info(self, pipeline_id, body, **kwargs): # noqa: E501 + def pipeline_service_create_pipeline_version_with_http_info(self, pipeline_id, pipeline_version, **kwargs): # noqa: E501 """Adds a pipeline version to the specified pipeline ID. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.pipeline_service_create_pipeline_version_with_http_info(pipeline_id, body, async_req=True) + >>> thread = api.pipeline_service_create_pipeline_version_with_http_info(pipeline_id, pipeline_version, async_req=True) >>> result = thread.get() :param pipeline_id: Required input. ID of the parent pipeline. (required) :type pipeline_id: str - :param body: Required input. Pipeline version ID to be created. (required) - :type body: V2beta1PipelineVersion + :param pipeline_version: Required input. Pipeline version ID to be created. (required) + :type pipeline_version: V2beta1PipelineVersion :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -354,7 +354,7 @@ def pipeline_service_create_pipeline_version_with_http_info(self, pipeline_id, b all_params = [ 'pipeline_id', - 'body' + 'pipeline_version' ] all_params.extend( [ @@ -377,10 +377,10 @@ def pipeline_service_create_pipeline_version_with_http_info(self, pipeline_id, b if self.api_client.client_side_validation and ('pipeline_id' not in local_var_params or # noqa: E501 local_var_params['pipeline_id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `pipeline_id` when calling `pipeline_service_create_pipeline_version`") # noqa: E501 - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `pipeline_service_create_pipeline_version`") # noqa: E501 + # verify the required parameter 'pipeline_version' is set + if self.api_client.client_side_validation and ('pipeline_version' not in local_var_params or # noqa: E501 + local_var_params['pipeline_version'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `pipeline_version` when calling `pipeline_service_create_pipeline_version`") # noqa: E501 collection_formats = {} @@ -396,8 +396,8 @@ def pipeline_service_create_pipeline_version_with_http_info(self, pipeline_id, b local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'pipeline_version' in local_var_params: + body_params = local_var_params['pipeline_version'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -426,7 +426,7 @@ def pipeline_service_create_pipeline_version_with_http_info(self, pipeline_id, b collection_formats=collection_formats) def pipeline_service_delete_pipeline(self, pipeline_id, **kwargs): # noqa: E501 - """Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. # noqa: E501 + """Deletes a pipeline by ID. If cascade is false (default), it returns an error if the pipeline has any versions. If cascade is true, it will also delete all pipeline versions. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -436,6 +436,8 @@ def pipeline_service_delete_pipeline(self, pipeline_id, **kwargs): # noqa: E501 :param pipeline_id: Required input. ID of the pipeline to be deleted. (required) :type pipeline_id: str + :param cascade: Optional. If true, the pipeline and all its versions will be deleted. If false (default), only the pipeline will be deleted if it has no versions. + :type cascade: bool :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -454,7 +456,7 @@ def pipeline_service_delete_pipeline(self, pipeline_id, **kwargs): # noqa: E501 return self.pipeline_service_delete_pipeline_with_http_info(pipeline_id, **kwargs) # noqa: E501 def pipeline_service_delete_pipeline_with_http_info(self, pipeline_id, **kwargs): # noqa: E501 - """Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. # noqa: E501 + """Deletes a pipeline by ID. If cascade is false (default), it returns an error if the pipeline has any versions. If cascade is true, it will also delete all pipeline versions. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -464,6 +466,8 @@ def pipeline_service_delete_pipeline_with_http_info(self, pipeline_id, **kwargs) :param pipeline_id: Required input. ID of the pipeline to be deleted. (required) :type pipeline_id: str + :param cascade: Optional. If true, the pipeline and all its versions will be deleted. If false (default), only the pipeline will be deleted if it has no versions. + :type cascade: bool :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -486,7 +490,8 @@ def pipeline_service_delete_pipeline_with_http_info(self, pipeline_id, **kwargs) local_var_params = locals() all_params = [ - 'pipeline_id' + 'pipeline_id', + 'cascade' ] all_params.extend( [ @@ -517,6 +522,8 @@ def pipeline_service_delete_pipeline_with_http_info(self, pipeline_id, **kwargs) path_params['pipeline_id'] = local_var_params['pipeline_id'] # noqa: E501 query_params = [] + if 'cascade' in local_var_params and local_var_params['cascade'] is not None: # noqa: E501 + query_params.append(('cascade', local_var_params['cascade'])) # noqa: E501 header_params = {} diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/pipeline_upload_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/pipeline_upload_service_api.py index 92ffc8ccc1f..4cb4e800abf 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/pipeline_upload_service_api.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/pipeline_upload_service_api.py @@ -49,6 +49,8 @@ def upload_pipeline(self, uploadfile, **kwargs): # noqa: E501 :type uploadfile: file :param name: :type name: str + :param display_name: + :type display_name: str :param description: :type description: str :param namespace: @@ -83,6 +85,8 @@ def upload_pipeline_with_http_info(self, uploadfile, **kwargs): # noqa: E501 :type uploadfile: file :param name: :type name: str + :param display_name: + :type display_name: str :param description: :type description: str :param namespace: @@ -111,6 +115,7 @@ def upload_pipeline_with_http_info(self, uploadfile, **kwargs): # noqa: E501 all_params = [ 'uploadfile', 'name', + 'display_name', 'description', 'namespace' ] @@ -143,6 +148,8 @@ def upload_pipeline_with_http_info(self, uploadfile, **kwargs): # noqa: E501 query_params = [] if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501 query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'display_name' in local_var_params and local_var_params['display_name'] is not None: # noqa: E501 + query_params.append(('display_name', local_var_params['display_name'])) # noqa: E501 if 'description' in local_var_params and local_var_params['description'] is not None: # noqa: E501 query_params.append(('description', local_var_params['description'])) # noqa: E501 if 'namespace' in local_var_params and local_var_params['namespace'] is not None: # noqa: E501 @@ -196,6 +203,8 @@ def upload_pipeline_version(self, uploadfile, **kwargs): # noqa: E501 :type uploadfile: file :param name: :type name: str + :param display_name: + :type display_name: str :param pipelineid: :type pipelineid: str :param description: @@ -230,6 +239,8 @@ def upload_pipeline_version_with_http_info(self, uploadfile, **kwargs): # noqa: :type uploadfile: file :param name: :type name: str + :param display_name: + :type display_name: str :param pipelineid: :type pipelineid: str :param description: @@ -258,6 +269,7 @@ def upload_pipeline_version_with_http_info(self, uploadfile, **kwargs): # noqa: all_params = [ 'uploadfile', 'name', + 'display_name', 'pipelineid', 'description' ] @@ -290,6 +302,8 @@ def upload_pipeline_version_with_http_info(self, uploadfile, **kwargs): # noqa: query_params = [] if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501 query_params.append(('name', local_var_params['name'])) # noqa: E501 + if 'display_name' in local_var_params and local_var_params['display_name'] is not None: # noqa: E501 + query_params.append(('display_name', local_var_params['display_name'])) # noqa: E501 if 'pipelineid' in local_var_params and local_var_params['pipelineid'] is not None: # noqa: E501 query_params.append(('pipelineid', local_var_params['pipelineid'])) # noqa: E501 if 'description' in local_var_params and local_var_params['description'] is not None: # noqa: E501 diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/recurring_run_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/recurring_run_service_api.py index a66456cb7f7..911a3725484 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/recurring_run_service_api.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/recurring_run_service_api.py @@ -36,17 +36,17 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client - def recurring_run_service_create_recurring_run(self, body, **kwargs): # noqa: E501 + def recurring_run_service_create_recurring_run(self, recurring_run, **kwargs): # noqa: E501 """Creates a new recurring run in an experiment, given the experiment ID. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.recurring_run_service_create_recurring_run(body, async_req=True) + >>> thread = api.recurring_run_service_create_recurring_run(recurring_run, async_req=True) >>> result = thread.get() - :param body: The recurring run to be created. (required) - :type body: V2beta1RecurringRun + :param recurring_run: The recurring run to be created. (required) + :type recurring_run: V2beta1RecurringRun :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -62,19 +62,19 @@ def recurring_run_service_create_recurring_run(self, body, **kwargs): # noqa: E :rtype: V2beta1RecurringRun """ kwargs['_return_http_data_only'] = True - return self.recurring_run_service_create_recurring_run_with_http_info(body, **kwargs) # noqa: E501 + return self.recurring_run_service_create_recurring_run_with_http_info(recurring_run, **kwargs) # noqa: E501 - def recurring_run_service_create_recurring_run_with_http_info(self, body, **kwargs): # noqa: E501 + def recurring_run_service_create_recurring_run_with_http_info(self, recurring_run, **kwargs): # noqa: E501 """Creates a new recurring run in an experiment, given the experiment ID. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.recurring_run_service_create_recurring_run_with_http_info(body, async_req=True) + >>> thread = api.recurring_run_service_create_recurring_run_with_http_info(recurring_run, async_req=True) >>> result = thread.get() - :param body: The recurring run to be created. (required) - :type body: V2beta1RecurringRun + :param recurring_run: The recurring run to be created. (required) + :type recurring_run: V2beta1RecurringRun :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -97,7 +97,7 @@ def recurring_run_service_create_recurring_run_with_http_info(self, body, **kwar local_var_params = locals() all_params = [ - 'body' + 'recurring_run' ] all_params.extend( [ @@ -116,10 +116,10 @@ def recurring_run_service_create_recurring_run_with_http_info(self, body, **kwar ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `recurring_run_service_create_recurring_run`") # noqa: E501 + # verify the required parameter 'recurring_run' is set + if self.api_client.client_side_validation and ('recurring_run' not in local_var_params or # noqa: E501 + local_var_params['recurring_run'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `recurring_run` when calling `recurring_run_service_create_recurring_run`") # noqa: E501 collection_formats = {} @@ -133,8 +133,8 @@ def recurring_run_service_create_recurring_run_with_http_info(self, body, **kwar local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'recurring_run' in local_var_params: + body_params = local_var_params['recurring_run'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/report_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/report_service_api.py index 2e126efdd42..02fe95499a1 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/report_service_api.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/report_service_api.py @@ -36,17 +36,17 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client - def report_service_report_scheduled_workflow(self, body, **kwargs): # noqa: E501 + def report_service_report_scheduled_workflow(self, scheduled_workflow, **kwargs): # noqa: E501 """report_service_report_scheduled_workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.report_service_report_scheduled_workflow(body, async_req=True) + >>> thread = api.report_service_report_scheduled_workflow(scheduled_workflow, async_req=True) >>> result = thread.get() - :param body: ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string. (required) - :type body: str + :param scheduled_workflow: ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string. (required) + :type scheduled_workflow: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -62,19 +62,19 @@ def report_service_report_scheduled_workflow(self, body, **kwargs): # noqa: E50 :rtype: object """ kwargs['_return_http_data_only'] = True - return self.report_service_report_scheduled_workflow_with_http_info(body, **kwargs) # noqa: E501 + return self.report_service_report_scheduled_workflow_with_http_info(scheduled_workflow, **kwargs) # noqa: E501 - def report_service_report_scheduled_workflow_with_http_info(self, body, **kwargs): # noqa: E501 + def report_service_report_scheduled_workflow_with_http_info(self, scheduled_workflow, **kwargs): # noqa: E501 """report_service_report_scheduled_workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.report_service_report_scheduled_workflow_with_http_info(body, async_req=True) + >>> thread = api.report_service_report_scheduled_workflow_with_http_info(scheduled_workflow, async_req=True) >>> result = thread.get() - :param body: ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string. (required) - :type body: str + :param scheduled_workflow: ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string. (required) + :type scheduled_workflow: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -97,7 +97,7 @@ def report_service_report_scheduled_workflow_with_http_info(self, body, **kwargs local_var_params = locals() all_params = [ - 'body' + 'scheduled_workflow' ] all_params.extend( [ @@ -116,10 +116,10 @@ def report_service_report_scheduled_workflow_with_http_info(self, body, **kwargs ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `report_service_report_scheduled_workflow`") # noqa: E501 + # verify the required parameter 'scheduled_workflow' is set + if self.api_client.client_side_validation and ('scheduled_workflow' not in local_var_params or # noqa: E501 + local_var_params['scheduled_workflow'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `scheduled_workflow` when calling `report_service_report_scheduled_workflow`") # noqa: E501 collection_formats = {} @@ -133,8 +133,8 @@ def report_service_report_scheduled_workflow_with_http_info(self, body, **kwargs local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'scheduled_workflow' in local_var_params: + body_params = local_var_params['scheduled_workflow'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -162,17 +162,17 @@ def report_service_report_scheduled_workflow_with_http_info(self, body, **kwargs _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) - def report_service_report_workflow(self, body, **kwargs): # noqa: E501 + def report_service_report_workflow(self, workflow, **kwargs): # noqa: E501 """report_service_report_workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.report_service_report_workflow(body, async_req=True) + >>> thread = api.report_service_report_workflow(workflow, async_req=True) >>> result = thread.get() - :param body: Workflow is a workflow custom resource marshalled into a json string. (required) - :type body: str + :param workflow: Workflow is a workflow custom resource marshalled into a json string. (required) + :type workflow: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -188,19 +188,19 @@ def report_service_report_workflow(self, body, **kwargs): # noqa: E501 :rtype: object """ kwargs['_return_http_data_only'] = True - return self.report_service_report_workflow_with_http_info(body, **kwargs) # noqa: E501 + return self.report_service_report_workflow_with_http_info(workflow, **kwargs) # noqa: E501 - def report_service_report_workflow_with_http_info(self, body, **kwargs): # noqa: E501 + def report_service_report_workflow_with_http_info(self, workflow, **kwargs): # noqa: E501 """report_service_report_workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.report_service_report_workflow_with_http_info(body, async_req=True) + >>> thread = api.report_service_report_workflow_with_http_info(workflow, async_req=True) >>> result = thread.get() - :param body: Workflow is a workflow custom resource marshalled into a json string. (required) - :type body: str + :param workflow: Workflow is a workflow custom resource marshalled into a json string. (required) + :type workflow: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -223,7 +223,7 @@ def report_service_report_workflow_with_http_info(self, body, **kwargs): # noqa local_var_params = locals() all_params = [ - 'body' + 'workflow' ] all_params.extend( [ @@ -242,10 +242,10 @@ def report_service_report_workflow_with_http_info(self, body, **kwargs): # noqa ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `report_service_report_workflow`") # noqa: E501 + # verify the required parameter 'workflow' is set + if self.api_client.client_side_validation and ('workflow' not in local_var_params or # noqa: E501 + local_var_params['workflow'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `workflow` when calling `report_service_report_workflow`") # noqa: E501 collection_formats = {} @@ -259,8 +259,8 @@ def report_service_report_workflow_with_http_info(self, body, **kwargs): # noqa local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'workflow' in local_var_params: + body_params = local_var_params['workflow'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/run_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/run_service_api.py index 3094e6c2b89..c1b26e5f472 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/run_service_api.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/run_service_api.py @@ -47,6 +47,8 @@ def run_service_archive_run(self, run_id, **kwargs): # noqa: E501 :param run_id: The ID of the run to be archived. (required) :type run_id: str + :param experiment_id: The ID of the parent experiment. + :type experiment_id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -75,6 +77,8 @@ def run_service_archive_run_with_http_info(self, run_id, **kwargs): # noqa: E50 :param run_id: The ID of the run to be archived. (required) :type run_id: str + :param experiment_id: The ID of the parent experiment. + :type experiment_id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -97,7 +101,8 @@ def run_service_archive_run_with_http_info(self, run_id, **kwargs): # noqa: E50 local_var_params = locals() all_params = [ - 'run_id' + 'run_id', + 'experiment_id' ] all_params.extend( [ @@ -128,6 +133,8 @@ def run_service_archive_run_with_http_info(self, run_id, **kwargs): # noqa: E50 path_params['run_id'] = local_var_params['run_id'] # noqa: E501 query_params = [] + if 'experiment_id' in local_var_params and local_var_params['experiment_id'] is not None: # noqa: E501 + query_params.append(('experiment_id', local_var_params['experiment_id'])) # noqa: E501 header_params = {} @@ -158,17 +165,17 @@ def run_service_archive_run_with_http_info(self, run_id, **kwargs): # noqa: E50 _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) - def run_service_create_run(self, body, **kwargs): # noqa: E501 - """Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. # noqa: E501 + def run_service_create_run(self, run, **kwargs): # noqa: E501 + """Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.run_service_create_run(body, async_req=True) + >>> thread = api.run_service_create_run(run, async_req=True) >>> result = thread.get() - :param body: Run to be created. (required) - :type body: V2beta1Run + :param run: Run to be created. (required) + :type run: V2beta1Run :param experiment_id: The ID of the parent experiment. :type experiment_id: str :param async_req: Whether to execute the request asynchronously. @@ -186,19 +193,19 @@ def run_service_create_run(self, body, **kwargs): # noqa: E501 :rtype: V2beta1Run """ kwargs['_return_http_data_only'] = True - return self.run_service_create_run_with_http_info(body, **kwargs) # noqa: E501 + return self.run_service_create_run_with_http_info(run, **kwargs) # noqa: E501 - def run_service_create_run_with_http_info(self, body, **kwargs): # noqa: E501 - """Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. # noqa: E501 + def run_service_create_run_with_http_info(self, run, **kwargs): # noqa: E501 + """Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.run_service_create_run_with_http_info(body, async_req=True) + >>> thread = api.run_service_create_run_with_http_info(run, async_req=True) >>> result = thread.get() - :param body: Run to be created. (required) - :type body: V2beta1Run + :param run: Run to be created. (required) + :type run: V2beta1Run :param experiment_id: The ID of the parent experiment. :type experiment_id: str :param async_req: Whether to execute the request asynchronously. @@ -223,7 +230,7 @@ def run_service_create_run_with_http_info(self, body, **kwargs): # noqa: E501 local_var_params = locals() all_params = [ - 'body', + 'run', 'experiment_id' ] all_params.extend( @@ -243,10 +250,10 @@ def run_service_create_run_with_http_info(self, body, **kwargs): # noqa: E501 ) local_var_params[key] = val del local_var_params['kwargs'] - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `run_service_create_run`") # noqa: E501 + # verify the required parameter 'run' is set + if self.api_client.client_side_validation and ('run' not in local_var_params or # noqa: E501 + local_var_params['run'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `run` when calling `run_service_create_run`") # noqa: E501 collection_formats = {} @@ -262,8 +269,8 @@ def run_service_create_run_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'run' in local_var_params: + body_params = local_var_params['run'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -550,7 +557,7 @@ def run_service_get_run_with_http_info(self, run_id, **kwargs): # noqa: E501 collection_formats=collection_formats) def run_service_list_runs(self, **kwargs): # noqa: E501 - """Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. # noqa: E501 + """Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -588,7 +595,7 @@ def run_service_list_runs(self, **kwargs): # noqa: E501 return self.run_service_list_runs_with_http_info(**kwargs) # noqa: E501 def run_service_list_runs_with_http_info(self, **kwargs): # noqa: E501 - """Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. # noqa: E501 + """Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -864,6 +871,8 @@ def run_service_retry_run(self, run_id, **kwargs): # noqa: E501 :param run_id: The ID of the run to be retried. (required) :type run_id: str + :param experiment_id: The ID of the parent experiment. + :type experiment_id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -892,6 +901,8 @@ def run_service_retry_run_with_http_info(self, run_id, **kwargs): # noqa: E501 :param run_id: The ID of the run to be retried. (required) :type run_id: str + :param experiment_id: The ID of the parent experiment. + :type experiment_id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -914,7 +925,8 @@ def run_service_retry_run_with_http_info(self, run_id, **kwargs): # noqa: E501 local_var_params = locals() all_params = [ - 'run_id' + 'run_id', + 'experiment_id' ] all_params.extend( [ @@ -945,6 +957,8 @@ def run_service_retry_run_with_http_info(self, run_id, **kwargs): # noqa: E501 path_params['run_id'] = local_var_params['run_id'] # noqa: E501 query_params = [] + if 'experiment_id' in local_var_params and local_var_params['experiment_id'] is not None: # noqa: E501 + query_params.append(('experiment_id', local_var_params['experiment_id'])) # noqa: E501 header_params = {} @@ -986,6 +1000,8 @@ def run_service_terminate_run(self, run_id, **kwargs): # noqa: E501 :param run_id: The ID of the run to be terminated. (required) :type run_id: str + :param experiment_id: The ID of the parent experiment. + :type experiment_id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -1014,6 +1030,8 @@ def run_service_terminate_run_with_http_info(self, run_id, **kwargs): # noqa: E :param run_id: The ID of the run to be terminated. (required) :type run_id: str + :param experiment_id: The ID of the parent experiment. + :type experiment_id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -1036,7 +1054,8 @@ def run_service_terminate_run_with_http_info(self, run_id, **kwargs): # noqa: E local_var_params = locals() all_params = [ - 'run_id' + 'run_id', + 'experiment_id' ] all_params.extend( [ @@ -1067,6 +1086,8 @@ def run_service_terminate_run_with_http_info(self, run_id, **kwargs): # noqa: E path_params['run_id'] = local_var_params['run_id'] # noqa: E501 query_params = [] + if 'experiment_id' in local_var_params and local_var_params['experiment_id'] is not None: # noqa: E501 + query_params.append(('experiment_id', local_var_params['experiment_id'])) # noqa: E501 header_params = {} @@ -1108,6 +1129,8 @@ def run_service_unarchive_run(self, run_id, **kwargs): # noqa: E501 :param run_id: The ID of the run to be restored. (required) :type run_id: str + :param experiment_id: The ID of the parent experiment. + :type experiment_id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -1136,6 +1159,8 @@ def run_service_unarchive_run_with_http_info(self, run_id, **kwargs): # noqa: E :param run_id: The ID of the run to be restored. (required) :type run_id: str + :param experiment_id: The ID of the parent experiment. + :type experiment_id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -1158,7 +1183,8 @@ def run_service_unarchive_run_with_http_info(self, run_id, **kwargs): # noqa: E local_var_params = locals() all_params = [ - 'run_id' + 'run_id', + 'experiment_id' ] all_params.extend( [ @@ -1189,6 +1215,8 @@ def run_service_unarchive_run_with_http_info(self, run_id, **kwargs): # noqa: E path_params['run_id'] = local_var_params['run_id'] # noqa: E501 query_params = [] + if 'experiment_id' in local_var_params and local_var_params['experiment_id'] is not None: # noqa: E501 + query_params.append(('experiment_id', local_var_params['experiment_id'])) # noqa: E501 header_params = {} diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/visualization_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/visualization_service_api.py index 1fc6f6a0292..0feab759e5b 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/visualization_service_api.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/visualization_service_api.py @@ -36,19 +36,19 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client - def visualization_service_create_visualization_v1(self, namespace, body, **kwargs): # noqa: E501 + def visualization_service_create_visualization_v1(self, namespace, visualization, **kwargs): # noqa: E501 """visualization_service_create_visualization_v1 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.visualization_service_create_visualization_v1(namespace, body, async_req=True) + >>> thread = api.visualization_service_create_visualization_v1(namespace, visualization, async_req=True) >>> result = thread.get() :param namespace: (required) :type namespace: str - :param body: (required) - :type body: V2beta1Visualization + :param visualization: (required) + :type visualization: V2beta1Visualization :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -64,21 +64,21 @@ def visualization_service_create_visualization_v1(self, namespace, body, **kwarg :rtype: V2beta1Visualization """ kwargs['_return_http_data_only'] = True - return self.visualization_service_create_visualization_v1_with_http_info(namespace, body, **kwargs) # noqa: E501 + return self.visualization_service_create_visualization_v1_with_http_info(namespace, visualization, **kwargs) # noqa: E501 - def visualization_service_create_visualization_v1_with_http_info(self, namespace, body, **kwargs): # noqa: E501 + def visualization_service_create_visualization_v1_with_http_info(self, namespace, visualization, **kwargs): # noqa: E501 """visualization_service_create_visualization_v1 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.visualization_service_create_visualization_v1_with_http_info(namespace, body, async_req=True) + >>> thread = api.visualization_service_create_visualization_v1_with_http_info(namespace, visualization, async_req=True) >>> result = thread.get() :param namespace: (required) :type namespace: str - :param body: (required) - :type body: V2beta1Visualization + :param visualization: (required) + :type visualization: V2beta1Visualization :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -102,7 +102,7 @@ def visualization_service_create_visualization_v1_with_http_info(self, namespace all_params = [ 'namespace', - 'body' + 'visualization' ] all_params.extend( [ @@ -125,10 +125,10 @@ def visualization_service_create_visualization_v1_with_http_info(self, namespace if self.api_client.client_side_validation and ('namespace' not in local_var_params or # noqa: E501 local_var_params['namespace'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `namespace` when calling `visualization_service_create_visualization_v1`") # noqa: E501 - # verify the required parameter 'body' is set - if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 - local_var_params['body'] is None): # noqa: E501 - raise ApiValueError("Missing the required parameter `body` when calling `visualization_service_create_visualization_v1`") # noqa: E501 + # verify the required parameter 'visualization' is set + if self.api_client.client_side_validation and ('visualization' not in local_var_params or # noqa: E501 + local_var_params['visualization'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `visualization` when calling `visualization_service_create_visualization_v1`") # noqa: E501 collection_formats = {} @@ -144,8 +144,8 @@ def visualization_service_create_visualization_v1_with_http_info(self, namespace local_var_files = {} body_params = None - if 'body' in local_var_params: - body_params = local_var_params['body'] + if 'visualization' in local_var_params: + body_params = local_var_params['visualization'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api_client.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api_client.py index 61be9c44b00..3adfa2fbe14 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api_client.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api_client.py @@ -78,7 +78,7 @@ def __init__(self, configuration=None, header_name=None, header_value=None, self.default_headers[header_name] = header_value self.cookie = cookie # Set default User-Agent. - self.user_agent = 'OpenAPI-Generator/2.5.0/python' + self.user_agent = 'OpenAPI-Generator/2.14.3/python' self.client_side_validation = configuration.client_side_validation def __enter__(self): diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/configuration.py b/backend/api/v2beta1/python_http_client/kfp_server_api/configuration.py index 1b7448aff8e..a47f22d1b2f 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/configuration.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/configuration.py @@ -351,8 +351,8 @@ def to_debug_report(self): return "Python SDK Debug Report:\n"\ "OS: {env}\n"\ "Python Version: {pyversion}\n"\ - "Version of the API: 2.5.0\n"\ - "SDK Package Version: 2.5.0".\ + "Version of the API: 2.14.3\n"\ + "SDK Package Version: 2.14.3".\ format(env=sys.platform, pyversion=sys.version) def get_host_settings(self): diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/__init__.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/__init__.py index 1e28e370877..6c26453f9d8 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/__init__.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/__init__.py @@ -16,7 +16,9 @@ # import models into model package from kfp_server_api.models.authorize_request_resources import AuthorizeRequestResources from kfp_server_api.models.authorize_request_verb import AuthorizeRequestVerb +from kfp_server_api.models.get_artifact_request_artifact_view import GetArtifactRequestArtifactView from kfp_server_api.models.googlerpc_status import GooglerpcStatus +from kfp_server_api.models.list_artifact_request_field import ListArtifactRequestField from kfp_server_api.models.pipeline_task_detail_child_task import PipelineTaskDetailChildTask from kfp_server_api.models.predicate_int_values import PredicateIntValues from kfp_server_api.models.predicate_long_values import PredicateLongValues @@ -24,7 +26,7 @@ from kfp_server_api.models.protobuf_any import ProtobufAny from kfp_server_api.models.protobuf_null_value import ProtobufNullValue from kfp_server_api.models.recurring_run_mode import RecurringRunMode -from kfp_server_api.models.runtime_error import RuntimeError +from kfp_server_api.models.v2beta1_artifact import V2beta1Artifact from kfp_server_api.models.v2beta1_artifact_list import V2beta1ArtifactList from kfp_server_api.models.v2beta1_create_pipeline_and_version_request import V2beta1CreatePipelineAndVersionRequest from kfp_server_api.models.v2beta1_cron_schedule import V2beta1CronSchedule @@ -32,6 +34,7 @@ from kfp_server_api.models.v2beta1_experiment_storage_state import V2beta1ExperimentStorageState from kfp_server_api.models.v2beta1_filter import V2beta1Filter from kfp_server_api.models.v2beta1_get_healthz_response import V2beta1GetHealthzResponse +from kfp_server_api.models.v2beta1_list_artifact_response import V2beta1ListArtifactResponse from kfp_server_api.models.v2beta1_list_experiments_response import V2beta1ListExperimentsResponse from kfp_server_api.models.v2beta1_list_pipeline_versions_response import V2beta1ListPipelineVersionsResponse from kfp_server_api.models.v2beta1_list_pipelines_response import V2beta1ListPipelinesResponse diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/get_artifact_request_artifact_view.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/get_artifact_request_artifact_view.py new file mode 100644 index 00000000000..4effc3ec4c8 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/get_artifact_request_artifact_view.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class GetArtifactRequestArtifactView(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + ARTIFACT_VIEW_UNSPECIFIED = "ARTIFACT_VIEW_UNSPECIFIED" + BASIC = "BASIC" + DOWNLOAD = "DOWNLOAD" + RENDER = "RENDER" + + allowable_values = [ARTIFACT_VIEW_UNSPECIFIED, BASIC, DOWNLOAD, RENDER] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """GetArtifactRequestArtifactView - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, GetArtifactRequestArtifactView): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, GetArtifactRequestArtifactView): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/list_artifact_request_field.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/list_artifact_request_field.py new file mode 100644 index 00000000000..e92bfbc4bb2 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/list_artifact_request_field.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ListArtifactRequestField(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + FIELD_UNSPECIFIED = "FIELD_UNSPECIFIED" + CREATE_TIME = "CREATE_TIME" + LAST_UPDATE_TIME = "LAST_UPDATE_TIME" + ID = "ID" + + allowable_values = [FIELD_UNSPECIFIED, CREATE_TIME, LAST_UPDATE_TIME, ID] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """ListArtifactRequestField - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ListArtifactRequestField): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ListArtifactRequestField): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/protobuf_any.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/protobuf_any.py index faef6ec444c..c8cac5bebfb 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/protobuf_any.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/protobuf_any.py @@ -33,35 +33,63 @@ class ProtobufAny(object): and the value is json key in definition. """ openapi_types = { + 'type': 'str', 'type_url': 'str', 'value': 'str' } attribute_map = { + 'type': '@type', 'type_url': 'type_url', 'value': 'value' } - def __init__(self, type_url=None, value=None, local_vars_configuration=None): # noqa: E501 + def __init__(self, type=None, type_url=None, value=None, local_vars_configuration=None): # noqa: E501 """ProtobufAny - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration + self._type = None self._type_url = None self._value = None self.discriminator = None + if type is not None: + self.type = type if type_url is not None: self.type_url = type_url if value is not None: self.value = value + @property + def type(self): + """Gets the type of this ProtobufAny. # noqa: E501 + + A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. As of May 2023, there are no widely used type server implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 + + :return: The type of this ProtobufAny. # noqa: E501 + :rtype: str + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this ProtobufAny. + + A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. As of May 2023, there are no widely used type server implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 + + :param type: The type of this ProtobufAny. # noqa: E501 + :type type: str + """ + + self._type = type + @property def type_url(self): """Gets the type_url of this ProtobufAny. # noqa: E501 - A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 + A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 :return: The type_url of this ProtobufAny. # noqa: E501 :rtype: str @@ -72,7 +100,7 @@ def type_url(self): def type_url(self, type_url): """Sets the type_url of this ProtobufAny. - A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 + A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. # noqa: E501 :param type_url: The type_url of this ProtobufAny. # noqa: E501 :type type_url: str diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/runtime_error.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/runtime_error.py deleted file mode 100644 index 7d0a6b32dae..00000000000 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/runtime_error.py +++ /dev/null @@ -1,198 +0,0 @@ -# coding: utf-8 - -""" - Kubeflow Pipelines API - - This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. - - Contact: kubeflow-pipelines@google.com - Generated by: https://openapi-generator.tech -""" - - -import pprint -import re # noqa: F401 - -import six - -from kfp_server_api.configuration import Configuration - - -class RuntimeError(object): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - """ - Attributes: - openapi_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - openapi_types = { - 'error': 'str', - 'code': 'int', - 'message': 'str', - 'details': 'list[ProtobufAny]' - } - - attribute_map = { - 'error': 'error', - 'code': 'code', - 'message': 'message', - 'details': 'details' - } - - def __init__(self, error=None, code=None, message=None, details=None, local_vars_configuration=None): # noqa: E501 - """RuntimeError - a model defined in OpenAPI""" # noqa: E501 - if local_vars_configuration is None: - local_vars_configuration = Configuration() - self.local_vars_configuration = local_vars_configuration - - self._error = None - self._code = None - self._message = None - self._details = None - self.discriminator = None - - if error is not None: - self.error = error - if code is not None: - self.code = code - if message is not None: - self.message = message - if details is not None: - self.details = details - - @property - def error(self): - """Gets the error of this RuntimeError. # noqa: E501 - - - :return: The error of this RuntimeError. # noqa: E501 - :rtype: str - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this RuntimeError. - - - :param error: The error of this RuntimeError. # noqa: E501 - :type error: str - """ - - self._error = error - - @property - def code(self): - """Gets the code of this RuntimeError. # noqa: E501 - - - :return: The code of this RuntimeError. # noqa: E501 - :rtype: int - """ - return self._code - - @code.setter - def code(self, code): - """Sets the code of this RuntimeError. - - - :param code: The code of this RuntimeError. # noqa: E501 - :type code: int - """ - - self._code = code - - @property - def message(self): - """Gets the message of this RuntimeError. # noqa: E501 - - - :return: The message of this RuntimeError. # noqa: E501 - :rtype: str - """ - return self._message - - @message.setter - def message(self, message): - """Sets the message of this RuntimeError. - - - :param message: The message of this RuntimeError. # noqa: E501 - :type message: str - """ - - self._message = message - - @property - def details(self): - """Gets the details of this RuntimeError. # noqa: E501 - - - :return: The details of this RuntimeError. # noqa: E501 - :rtype: list[ProtobufAny] - """ - return self._details - - @details.setter - def details(self, details): - """Sets the details of this RuntimeError. - - - :param details: The details of this RuntimeError. # noqa: E501 - :type details: list[ProtobufAny] - """ - - self._details = details - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.openapi_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, RuntimeError): - return False - - return self.to_dict() == other.to_dict() - - def __ne__(self, other): - """Returns true if both objects are not equal""" - if not isinstance(other, RuntimeError): - return True - - return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact.py new file mode 100644 index 00000000000..61785827207 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact.py @@ -0,0 +1,424 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1Artifact(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifact_id': 'str', + 'storage_provider': 'str', + 'storage_path': 'str', + 'uri': 'str', + 'download_url': 'str', + 'namespace': 'str', + 'artifact_type': 'str', + 'artifact_size': 'str', + 'created_at': 'datetime', + 'last_updated_at': 'datetime', + 'error': 'GooglerpcStatus', + 'render_url': 'str' + } + + attribute_map = { + 'artifact_id': 'artifact_id', + 'storage_provider': 'storage_provider', + 'storage_path': 'storage_path', + 'uri': 'uri', + 'download_url': 'download_url', + 'namespace': 'namespace', + 'artifact_type': 'artifact_type', + 'artifact_size': 'artifact_size', + 'created_at': 'created_at', + 'last_updated_at': 'last_updated_at', + 'error': 'error', + 'render_url': 'render_url' + } + + def __init__(self, artifact_id=None, storage_provider=None, storage_path=None, uri=None, download_url=None, namespace=None, artifact_type=None, artifact_size=None, created_at=None, last_updated_at=None, error=None, render_url=None, local_vars_configuration=None): # noqa: E501 + """V2beta1Artifact - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifact_id = None + self._storage_provider = None + self._storage_path = None + self._uri = None + self._download_url = None + self._namespace = None + self._artifact_type = None + self._artifact_size = None + self._created_at = None + self._last_updated_at = None + self._error = None + self._render_url = None + self.discriminator = None + + if artifact_id is not None: + self.artifact_id = artifact_id + if storage_provider is not None: + self.storage_provider = storage_provider + if storage_path is not None: + self.storage_path = storage_path + if uri is not None: + self.uri = uri + if download_url is not None: + self.download_url = download_url + if namespace is not None: + self.namespace = namespace + if artifact_type is not None: + self.artifact_type = artifact_type + if artifact_size is not None: + self.artifact_size = artifact_size + if created_at is not None: + self.created_at = created_at + if last_updated_at is not None: + self.last_updated_at = last_updated_at + if error is not None: + self.error = error + if render_url is not None: + self.render_url = render_url + + @property + def artifact_id(self): + """Gets the artifact_id of this V2beta1Artifact. # noqa: E501 + + Unique Artifact ID. Generated by MLMD. # noqa: E501 + + :return: The artifact_id of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._artifact_id + + @artifact_id.setter + def artifact_id(self, artifact_id): + """Sets the artifact_id of this V2beta1Artifact. + + Unique Artifact ID. Generated by MLMD. # noqa: E501 + + :param artifact_id: The artifact_id of this V2beta1Artifact. # noqa: E501 + :type artifact_id: str + """ + + self._artifact_id = artifact_id + + @property + def storage_provider(self): + """Gets the storage_provider of this V2beta1Artifact. # noqa: E501 + + Storage Provider to which this Artifact is located (e.g. S3, Minio, etc.). # noqa: E501 + + :return: The storage_provider of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._storage_provider + + @storage_provider.setter + def storage_provider(self, storage_provider): + """Sets the storage_provider of this V2beta1Artifact. + + Storage Provider to which this Artifact is located (e.g. S3, Minio, etc.). # noqa: E501 + + :param storage_provider: The storage_provider of this V2beta1Artifact. # noqa: E501 + :type storage_provider: str + """ + + self._storage_provider = storage_provider + + @property + def storage_path(self): + """Gets the storage_path of this V2beta1Artifact. # noqa: E501 + + The path location of this Artifact within the storage provider. For example an object located at s3://my-bucket/path/a/b/c will result in \"path/a/b/c\". # noqa: E501 + + :return: The storage_path of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._storage_path + + @storage_path.setter + def storage_path(self, storage_path): + """Sets the storage_path of this V2beta1Artifact. + + The path location of this Artifact within the storage provider. For example an object located at s3://my-bucket/path/a/b/c will result in \"path/a/b/c\". # noqa: E501 + + :param storage_path: The storage_path of this V2beta1Artifact. # noqa: E501 + :type storage_path: str + """ + + self._storage_path = storage_path + + @property + def uri(self): + """Gets the uri of this V2beta1Artifact. # noqa: E501 + + + :return: The uri of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._uri + + @uri.setter + def uri(self, uri): + """Sets the uri of this V2beta1Artifact. + + + :param uri: The uri of this V2beta1Artifact. # noqa: E501 + :type uri: str + """ + + self._uri = uri + + @property + def download_url(self): + """Gets the download_url of this V2beta1Artifact. # noqa: E501 + + Optional Output. Specifies a signed-url that can be used to download this Artifact directly from its store. # noqa: E501 + + :return: The download_url of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._download_url + + @download_url.setter + def download_url(self, download_url): + """Sets the download_url of this V2beta1Artifact. + + Optional Output. Specifies a signed-url that can be used to download this Artifact directly from its store. # noqa: E501 + + :param download_url: The download_url of this V2beta1Artifact. # noqa: E501 + :type download_url: str + """ + + self._download_url = download_url + + @property + def namespace(self): + """Gets the namespace of this V2beta1Artifact. # noqa: E501 + + The namespace associated with this Artifact. This is determined by the namespace of the parent PipelineRun that created this Artifact. # noqa: E501 + + :return: The namespace of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._namespace + + @namespace.setter + def namespace(self, namespace): + """Sets the namespace of this V2beta1Artifact. + + The namespace associated with this Artifact. This is determined by the namespace of the parent PipelineRun that created this Artifact. # noqa: E501 + + :param namespace: The namespace of this V2beta1Artifact. # noqa: E501 + :type namespace: str + """ + + self._namespace = namespace + + @property + def artifact_type(self): + """Gets the artifact_type of this V2beta1Artifact. # noqa: E501 + + + :return: The artifact_type of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._artifact_type + + @artifact_type.setter + def artifact_type(self, artifact_type): + """Sets the artifact_type of this V2beta1Artifact. + + + :param artifact_type: The artifact_type of this V2beta1Artifact. # noqa: E501 + :type artifact_type: str + """ + + self._artifact_type = artifact_type + + @property + def artifact_size(self): + """Gets the artifact_size of this V2beta1Artifact. # noqa: E501 + + The size of the artifact in bytes. If the artifact does not exist in object store (e.g. Metrics) then this is omitted. # noqa: E501 + + :return: The artifact_size of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._artifact_size + + @artifact_size.setter + def artifact_size(self, artifact_size): + """Sets the artifact_size of this V2beta1Artifact. + + The size of the artifact in bytes. If the artifact does not exist in object store (e.g. Metrics) then this is omitted. # noqa: E501 + + :param artifact_size: The artifact_size of this V2beta1Artifact. # noqa: E501 + :type artifact_size: str + """ + + self._artifact_size = artifact_size + + @property + def created_at(self): + """Gets the created_at of this V2beta1Artifact. # noqa: E501 + + Creation time of the artifact. # noqa: E501 + + :return: The created_at of this V2beta1Artifact. # noqa: E501 + :rtype: datetime + """ + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Sets the created_at of this V2beta1Artifact. + + Creation time of the artifact. # noqa: E501 + + :param created_at: The created_at of this V2beta1Artifact. # noqa: E501 + :type created_at: datetime + """ + + self._created_at = created_at + + @property + def last_updated_at(self): + """Gets the last_updated_at of this V2beta1Artifact. # noqa: E501 + + Last update time of the artifact. # noqa: E501 + + :return: The last_updated_at of this V2beta1Artifact. # noqa: E501 + :rtype: datetime + """ + return self._last_updated_at + + @last_updated_at.setter + def last_updated_at(self, last_updated_at): + """Sets the last_updated_at of this V2beta1Artifact. + + Last update time of the artifact. # noqa: E501 + + :param last_updated_at: The last_updated_at of this V2beta1Artifact. # noqa: E501 + :type last_updated_at: datetime + """ + + self._last_updated_at = last_updated_at + + @property + def error(self): + """Gets the error of this V2beta1Artifact. # noqa: E501 + + + :return: The error of this V2beta1Artifact. # noqa: E501 + :rtype: GooglerpcStatus + """ + return self._error + + @error.setter + def error(self, error): + """Sets the error of this V2beta1Artifact. + + + :param error: The error of this V2beta1Artifact. # noqa: E501 + :type error: GooglerpcStatus + """ + + self._error = error + + @property + def render_url(self): + """Gets the render_url of this V2beta1Artifact. # noqa: E501 + + Optional Output. Specifies a signed URL that can be used to render this Artifact directly from its store. # noqa: E501 + + :return: The render_url of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._render_url + + @render_url.setter + def render_url(self, render_url): + """Sets the render_url of this V2beta1Artifact. + + Optional Output. Specifies a signed URL that can be used to render this Artifact directly from its store. # noqa: E501 + + :param render_url: The render_url of this V2beta1Artifact. # noqa: E501 + :type render_url: str + """ + + self._render_url = render_url + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1Artifact): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1Artifact): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_get_healthz_response.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_get_healthz_response.py index 414c2d53439..db0ca380413 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_get_healthz_response.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_get_healthz_response.py @@ -33,24 +33,29 @@ class V2beta1GetHealthzResponse(object): and the value is json key in definition. """ openapi_types = { - 'multi_user': 'bool' + 'multi_user': 'bool', + 'pipeline_store': 'str' } attribute_map = { - 'multi_user': 'multi_user' + 'multi_user': 'multi_user', + 'pipeline_store': 'pipeline_store' } - def __init__(self, multi_user=None, local_vars_configuration=None): # noqa: E501 + def __init__(self, multi_user=None, pipeline_store=None, local_vars_configuration=None): # noqa: E501 """V2beta1GetHealthzResponse - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._multi_user = None + self._pipeline_store = None self.discriminator = None if multi_user is not None: self.multi_user = multi_user + if pipeline_store is not None: + self.pipeline_store = pipeline_store @property def multi_user(self): @@ -75,6 +80,27 @@ def multi_user(self, multi_user): self._multi_user = multi_user + @property + def pipeline_store(self): + """Gets the pipeline_store of this V2beta1GetHealthzResponse. # noqa: E501 + + + :return: The pipeline_store of this V2beta1GetHealthzResponse. # noqa: E501 + :rtype: str + """ + return self._pipeline_store + + @pipeline_store.setter + def pipeline_store(self, pipeline_store): + """Sets the pipeline_store of this V2beta1GetHealthzResponse. + + + :param pipeline_store: The pipeline_store of this V2beta1GetHealthzResponse. # noqa: E501 + :type pipeline_store: str + """ + + self._pipeline_store = pipeline_store + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_artifact_response.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_artifact_response.py new file mode 100644 index 00000000000..a0b193cabaa --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_artifact_response.py @@ -0,0 +1,148 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1ListArtifactResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifacts': 'list[V2beta1Artifact]', + 'next_page_token': 'str' + } + + attribute_map = { + 'artifacts': 'artifacts', + 'next_page_token': 'next_page_token' + } + + def __init__(self, artifacts=None, next_page_token=None, local_vars_configuration=None): # noqa: E501 + """V2beta1ListArtifactResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifacts = None + self._next_page_token = None + self.discriminator = None + + if artifacts is not None: + self.artifacts = artifacts + if next_page_token is not None: + self.next_page_token = next_page_token + + @property + def artifacts(self): + """Gets the artifacts of this V2beta1ListArtifactResponse. # noqa: E501 + + List of retrieved artifacts. # noqa: E501 + + :return: The artifacts of this V2beta1ListArtifactResponse. # noqa: E501 + :rtype: list[V2beta1Artifact] + """ + return self._artifacts + + @artifacts.setter + def artifacts(self, artifacts): + """Sets the artifacts of this V2beta1ListArtifactResponse. + + List of retrieved artifacts. # noqa: E501 + + :param artifacts: The artifacts of this V2beta1ListArtifactResponse. # noqa: E501 + :type artifacts: list[V2beta1Artifact] + """ + + self._artifacts = artifacts + + @property + def next_page_token(self): + """Gets the next_page_token of this V2beta1ListArtifactResponse. # noqa: E501 + + + :return: The next_page_token of this V2beta1ListArtifactResponse. # noqa: E501 + :rtype: str + """ + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token): + """Sets the next_page_token of this V2beta1ListArtifactResponse. + + + :param next_page_token: The next_page_token of this V2beta1ListArtifactResponse. # noqa: E501 + :type next_page_token: str + """ + + self._next_page_token = next_page_token + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1ListArtifactResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1ListArtifactResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline.py index 0ee858c5bcf..8e31d71f988 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline.py @@ -35,6 +35,7 @@ class V2beta1Pipeline(object): openapi_types = { 'pipeline_id': 'str', 'display_name': 'str', + 'name': 'str', 'description': 'str', 'created_at': 'datetime', 'namespace': 'str', @@ -44,13 +45,14 @@ class V2beta1Pipeline(object): attribute_map = { 'pipeline_id': 'pipeline_id', 'display_name': 'display_name', + 'name': 'name', 'description': 'description', 'created_at': 'created_at', 'namespace': 'namespace', 'error': 'error' } - def __init__(self, pipeline_id=None, display_name=None, description=None, created_at=None, namespace=None, error=None, local_vars_configuration=None): # noqa: E501 + def __init__(self, pipeline_id=None, display_name=None, name=None, description=None, created_at=None, namespace=None, error=None, local_vars_configuration=None): # noqa: E501 """V2beta1Pipeline - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() @@ -58,6 +60,7 @@ def __init__(self, pipeline_id=None, display_name=None, description=None, create self._pipeline_id = None self._display_name = None + self._name = None self._description = None self._created_at = None self._namespace = None @@ -68,6 +71,8 @@ def __init__(self, pipeline_id=None, display_name=None, description=None, create self.pipeline_id = pipeline_id if display_name is not None: self.display_name = display_name + if name is not None: + self.name = name if description is not None: self.description = description if created_at is not None: @@ -104,7 +109,7 @@ def pipeline_id(self, pipeline_id): def display_name(self): """Gets the display_name of this V2beta1Pipeline. # noqa: E501 - Required input field. Pipeline name provided by user. # noqa: E501 + Optional input field. Pipeline display name provided by user. # noqa: E501 :return: The display_name of this V2beta1Pipeline. # noqa: E501 :rtype: str @@ -115,7 +120,7 @@ def display_name(self): def display_name(self, display_name): """Sets the display_name of this V2beta1Pipeline. - Required input field. Pipeline name provided by user. # noqa: E501 + Optional input field. Pipeline display name provided by user. # noqa: E501 :param display_name: The display_name of this V2beta1Pipeline. # noqa: E501 :type display_name: str @@ -123,6 +128,29 @@ def display_name(self, display_name): self._display_name = display_name + @property + def name(self): + """Gets the name of this V2beta1Pipeline. # noqa: E501 + + Required input field. Pipeline name provided by user. # noqa: E501 + + :return: The name of this V2beta1Pipeline. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this V2beta1Pipeline. + + Required input field. Pipeline name provided by user. # noqa: E501 + + :param name: The name of this V2beta1Pipeline. # noqa: E501 + :type name: str + """ + + self._name = name + @property def description(self): """Gets the description of this V2beta1Pipeline. # noqa: E501 diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_detail.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_detail.py index eebeef8f155..96f0c79a08d 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_detail.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_detail.py @@ -424,7 +424,7 @@ def parent_task_id(self, parent_task_id): def state_history(self): """Gets the state_history of this V2beta1PipelineTaskDetail. # noqa: E501 - A sequence of task statuses. This field keeps a record of state transitions. # noqa: E501 + A sequence of task statuses. This field keeps a record of state transitions. # noqa: E501 :return: The state_history of this V2beta1PipelineTaskDetail. # noqa: E501 :rtype: list[V2beta1RuntimeStatus] @@ -435,7 +435,7 @@ def state_history(self): def state_history(self, state_history): """Sets the state_history of this V2beta1PipelineTaskDetail. - A sequence of task statuses. This field keeps a record of state transitions. # noqa: E501 + A sequence of task statuses. This field keeps a record of state transitions. # noqa: E501 :param state_history: The state_history of this V2beta1PipelineTaskDetail. # noqa: E501 :type state_history: list[V2beta1RuntimeStatus] diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_version.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_version.py index 288ae4bbd4f..5becda19f0a 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_version.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_version.py @@ -36,6 +36,7 @@ class V2beta1PipelineVersion(object): 'pipeline_id': 'str', 'pipeline_version_id': 'str', 'display_name': 'str', + 'name': 'str', 'description': 'str', 'created_at': 'datetime', 'package_url': 'V2beta1Url', @@ -48,6 +49,7 @@ class V2beta1PipelineVersion(object): 'pipeline_id': 'pipeline_id', 'pipeline_version_id': 'pipeline_version_id', 'display_name': 'display_name', + 'name': 'name', 'description': 'description', 'created_at': 'created_at', 'package_url': 'package_url', @@ -56,7 +58,7 @@ class V2beta1PipelineVersion(object): 'error': 'error' } - def __init__(self, pipeline_id=None, pipeline_version_id=None, display_name=None, description=None, created_at=None, package_url=None, code_source_url=None, pipeline_spec=None, error=None, local_vars_configuration=None): # noqa: E501 + def __init__(self, pipeline_id=None, pipeline_version_id=None, display_name=None, name=None, description=None, created_at=None, package_url=None, code_source_url=None, pipeline_spec=None, error=None, local_vars_configuration=None): # noqa: E501 """V2beta1PipelineVersion - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() @@ -65,6 +67,7 @@ def __init__(self, pipeline_id=None, pipeline_version_id=None, display_name=None self._pipeline_id = None self._pipeline_version_id = None self._display_name = None + self._name = None self._description = None self._created_at = None self._package_url = None @@ -79,6 +82,8 @@ def __init__(self, pipeline_id=None, pipeline_version_id=None, display_name=None self.pipeline_version_id = pipeline_version_id if display_name is not None: self.display_name = display_name + if name is not None: + self.name = name if description is not None: self.description = description if created_at is not None: @@ -142,7 +147,7 @@ def pipeline_version_id(self, pipeline_version_id): def display_name(self): """Gets the display_name of this V2beta1PipelineVersion. # noqa: E501 - Required input field. Pipeline version name provided by user. # noqa: E501 + Optional input field. Pipeline version display name provided by user. # noqa: E501 :return: The display_name of this V2beta1PipelineVersion. # noqa: E501 :rtype: str @@ -153,7 +158,7 @@ def display_name(self): def display_name(self, display_name): """Sets the display_name of this V2beta1PipelineVersion. - Required input field. Pipeline version name provided by user. # noqa: E501 + Optional input field. Pipeline version display name provided by user. # noqa: E501 :param display_name: The display_name of this V2beta1PipelineVersion. # noqa: E501 :type display_name: str @@ -161,6 +166,29 @@ def display_name(self, display_name): self._display_name = display_name + @property + def name(self): + """Gets the name of this V2beta1PipelineVersion. # noqa: E501 + + Required input field. Pipeline version name provided by user. # noqa: E501 + + :return: The name of this V2beta1PipelineVersion. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this V2beta1PipelineVersion. + + Required input field. Pipeline version name provided by user. # noqa: E501 + + :param name: The name of this V2beta1PipelineVersion. # noqa: E501 + :type name: str + """ + + self._name = name + @property def description(self): """Gets the description of this V2beta1PipelineVersion. # noqa: E501 diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_run.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_run.py index 834139adc53..5f7c81436b7 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_run.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_run.py @@ -520,7 +520,7 @@ def recurring_run_id(self, recurring_run_id): def state_history(self): """Gets the state_history of this V2beta1Run. # noqa: E501 - Output. A sequence of run statuses. This field keeps a record of state transitions. # noqa: E501 + Output. A sequence of run statuses. This field keeps a record of state transitions. # noqa: E501 :return: The state_history of this V2beta1Run. # noqa: E501 :rtype: list[V2beta1RuntimeStatus] @@ -531,7 +531,7 @@ def state_history(self): def state_history(self, state_history): """Sets the state_history of this V2beta1Run. - Output. A sequence of run statuses. This field keeps a record of state transitions. # noqa: E501 + Output. A sequence of run statuses. This field keeps a record of state transitions. # noqa: E501 :param state_history: The state_history of this V2beta1Run. # noqa: E501 :type state_history: list[V2beta1RuntimeStatus] diff --git a/backend/api/v2beta1/python_http_client/setup.py b/backend/api/v2beta1/python_http_client/setup.py index c68c56e42b8..67217140497 100644 --- a/backend/api/v2beta1/python_http_client/setup.py +++ b/backend/api/v2beta1/python_http_client/setup.py @@ -13,7 +13,7 @@ from setuptools import setup, find_packages # noqa: H301 NAME = "kfp-server-api" -VERSION = "2.5.0" +VERSION = "2.14.3" # To install the library, run the following # # python setup.py install diff --git a/backend/api/v2beta1/python_http_client/test/test_artifact_service_api.py b/backend/api/v2beta1/python_http_client/test/test_artifact_service_api.py new file mode 100644 index 00000000000..c41a0d1e387 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_artifact_service_api.py @@ -0,0 +1,47 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest + +import kfp_server_api +from kfp_server_api.api.artifact_service_api import ArtifactServiceApi # noqa: E501 +from kfp_server_api.rest import ApiException + + +class TestArtifactServiceApi(unittest.TestCase): + """ArtifactServiceApi unit test stubs""" + + def setUp(self): + self.api = kfp_server_api.api.artifact_service_api.ArtifactServiceApi() # noqa: E501 + + def tearDown(self): + pass + + def test_artifact_service_get_artifact(self): + """Test case for artifact_service_get_artifact + + Finds a specific Artifact by ID. # noqa: E501 + """ + pass + + def test_artifact_service_list_artifacts(self): + """Test case for artifact_service_list_artifacts + + Finds all artifacts within the specified namespace. Namespace field is required. In multi-user mode, the caller is required to have RBAC verb \"list\" on the \"artifacts\" resource for the specified namespace. # noqa: E501 + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_get_artifact_request_artifact_view.py b/backend/api/v2beta1/python_http_client/test/test_get_artifact_request_artifact_view.py new file mode 100644 index 00000000000..9e4432480d7 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_get_artifact_request_artifact_view.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.get_artifact_request_artifact_view import GetArtifactRequestArtifactView # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestGetArtifactRequestArtifactView(unittest.TestCase): + """GetArtifactRequestArtifactView unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test GetArtifactRequestArtifactView + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.get_artifact_request_artifact_view.GetArtifactRequestArtifactView() # noqa: E501 + if include_optional : + return GetArtifactRequestArtifactView( + ) + else : + return GetArtifactRequestArtifactView( + ) + + def testGetArtifactRequestArtifactView(self): + """Test GetArtifactRequestArtifactView""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_googlerpc_status.py b/backend/api/v2beta1/python_http_client/test/test_googlerpc_status.py index 455c7cb33c6..3c682191be8 100644 --- a/backend/api/v2beta1/python_http_client/test/test_googlerpc_status.py +++ b/backend/api/v2beta1/python_http_client/test/test_googlerpc_status.py @@ -39,9 +39,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ] ) else : diff --git a/backend/api/v2beta1/python_http_client/test/test_list_artifact_request_field.py b/backend/api/v2beta1/python_http_client/test/test_list_artifact_request_field.py new file mode 100644 index 00000000000..882f4c49fa0 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_list_artifact_request_field.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.list_artifact_request_field import ListArtifactRequestField # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestListArtifactRequestField(unittest.TestCase): + """ListArtifactRequestField unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test ListArtifactRequestField + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.list_artifact_request_field.ListArtifactRequestField() # noqa: E501 + if include_optional : + return ListArtifactRequestField( + ) + else : + return ListArtifactRequestField( + ) + + def testListArtifactRequestField(self): + """Test ListArtifactRequestField""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_pipeline_service_api.py b/backend/api/v2beta1/python_http_client/test/test_pipeline_service_api.py index a51690b49e6..64cc1eef6a3 100644 --- a/backend/api/v2beta1/python_http_client/test/test_pipeline_service_api.py +++ b/backend/api/v2beta1/python_http_client/test/test_pipeline_service_api.py @@ -52,7 +52,7 @@ def test_pipeline_service_create_pipeline_version(self): def test_pipeline_service_delete_pipeline(self): """Test case for pipeline_service_delete_pipeline - Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. # noqa: E501 + Deletes a pipeline by ID. If cascade is false (default), it returns an error if the pipeline has any versions. If cascade is true, it will also delete all pipeline versions. # noqa: E501 """ pass diff --git a/backend/api/v2beta1/python_http_client/test/test_protobuf_any.py b/backend/api/v2beta1/python_http_client/test/test_protobuf_any.py index 1e50cb32660..e69ba489391 100644 --- a/backend/api/v2beta1/python_http_client/test/test_protobuf_any.py +++ b/backend/api/v2beta1/python_http_client/test/test_protobuf_any.py @@ -36,6 +36,7 @@ def make_instance(self, include_optional): # model = kfp_server_api.models.protobuf_any.ProtobufAny() # noqa: E501 if include_optional : return ProtobufAny( + type = '0', type_url = '0', value = 'YQ==' ) diff --git a/backend/api/v2beta1/python_http_client/test/test_run_service_api.py b/backend/api/v2beta1/python_http_client/test/test_run_service_api.py index db3bd6a7c68..f9737e87fb5 100644 --- a/backend/api/v2beta1/python_http_client/test/test_run_service_api.py +++ b/backend/api/v2beta1/python_http_client/test/test_run_service_api.py @@ -38,7 +38,7 @@ def test_run_service_archive_run(self): def test_run_service_create_run(self): """Test case for run_service_create_run - Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. # noqa: E501 + Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. # noqa: E501 """ pass @@ -59,7 +59,7 @@ def test_run_service_get_run(self): def test_run_service_list_runs(self): """Test case for run_service_list_runs - Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. # noqa: E501 + Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. # noqa: E501 """ pass diff --git a/backend/api/v2beta1/python_http_client/test/test_runtime_error.py b/backend/api/v2beta1/python_http_client/test/test_runtime_error.py deleted file mode 100644 index 92731a23363..00000000000 --- a/backend/api/v2beta1/python_http_client/test/test_runtime_error.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding: utf-8 - -""" - Kubeflow Pipelines API - - This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. - - Contact: kubeflow-pipelines@google.com - Generated by: https://openapi-generator.tech -""" - - -from __future__ import absolute_import - -import unittest -import datetime - -import kfp_server_api -from kfp_server_api.models.runtime_error import RuntimeError # noqa: E501 -from kfp_server_api.rest import ApiException - -class TestRuntimeError(unittest.TestCase): - """RuntimeError unit test stubs""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def make_instance(self, include_optional): - """Test RuntimeError - include_option is a boolean, when False only required - params are included, when True both required and - optional params are included """ - # model = kfp_server_api.models.runtime_error.RuntimeError() # noqa: E501 - if include_optional : - return RuntimeError( - error = '0', - code = 56, - message = '0', - details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) - ] - ) - else : - return RuntimeError( - ) - - def testRuntimeError(self): - """Test RuntimeError""" - inst_req_only = self.make_instance(include_optional=False) - inst_req_and_optional = self.make_instance(include_optional=True) - - -if __name__ == '__main__': - unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact.py new file mode 100644 index 00000000000..d2e372b43f5 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_artifact import V2beta1Artifact # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1Artifact(unittest.TestCase): + """V2beta1Artifact unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1Artifact + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_artifact.V2beta1Artifact() # noqa: E501 + if include_optional : + return V2beta1Artifact( + artifact_id = '0', + storage_provider = '0', + storage_path = '0', + uri = '0', + download_url = '0', + namespace = '0', + artifact_type = '0', + artifact_size = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + last_updated_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ], ), + render_url = '0' + ) + else : + return V2beta1Artifact( + ) + + def testV2beta1Artifact(self): + """Test V2beta1Artifact""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_pipeline_and_version_request.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_pipeline_and_version_request.py index 785c54aec28..056556d162a 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_pipeline_and_version_request.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_pipeline_and_version_request.py @@ -39,6 +39,7 @@ def make_instance(self, include_optional): pipeline = kfp_server_api.models.v2beta1_pipeline.v2beta1Pipeline( pipeline_id = '0', display_name = '0', + name = '0', description = '0', created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), namespace = '0', @@ -46,14 +47,15 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), ), pipeline_version = kfp_server_api.models.v2beta1_pipeline_version.v2beta1PipelineVersion( pipeline_id = '0', pipeline_version_id = '0', display_name = '0', + name = '0', description = '0', created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), package_url = kfp_server_api.models.v2beta1_url.v2beta1Url( @@ -64,9 +66,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), ) ) else : diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_get_healthz_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_get_healthz_response.py index c1b90b6b4cc..33e1478a20e 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_get_healthz_response.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_get_healthz_response.py @@ -36,7 +36,8 @@ def make_instance(self, include_optional): # model = kfp_server_api.models.v2beta1_get_healthz_response.V2beta1GetHealthzResponse() # noqa: E501 if include_optional : return V2beta1GetHealthzResponse( - multi_user = True + multi_user = True, + pipeline_store = '0' ) else : return V2beta1GetHealthzResponse( diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_artifact_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_artifact_response.py new file mode 100644 index 00000000000..daa207845d1 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_artifact_response.py @@ -0,0 +1,74 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_list_artifact_response import V2beta1ListArtifactResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1ListArtifactResponse(unittest.TestCase): + """V2beta1ListArtifactResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1ListArtifactResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_list_artifact_response.V2beta1ListArtifactResponse() # noqa: E501 + if include_optional : + return V2beta1ListArtifactResponse( + artifacts = [ + kfp_server_api.models.v2beta1_artifact.v2beta1Artifact( + artifact_id = '0', + storage_provider = '0', + storage_path = '0', + uri = '0', + download_url = '0', + namespace = '0', + artifact_type = '0', + artifact_size = '0', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + last_updated_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ], ), + render_url = '0', ) + ], + next_page_token = '0' + ) + else : + return V2beta1ListArtifactResponse( + ) + + def testV2beta1ListArtifactResponse(self): + """Test V2beta1ListArtifactResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_pipeline_versions_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_pipeline_versions_response.py index da9ff68696f..1237bed2724 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_pipeline_versions_response.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_pipeline_versions_response.py @@ -41,6 +41,7 @@ def make_instance(self, include_optional): pipeline_id = '0', pipeline_version_id = '0', display_name = '0', + name = '0', description = '0', created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), package_url = kfp_server_api.models.v2beta1_url.v2beta1Url( @@ -51,9 +52,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), ) ], next_page_token = '0', diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_pipelines_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_pipelines_response.py index b64bfbb10cb..ca503b186a2 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_pipelines_response.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_pipelines_response.py @@ -40,6 +40,7 @@ def make_instance(self, include_optional): kfp_server_api.models.v2beta1_pipeline.v2beta1Pipeline( pipeline_id = '0', display_name = '0', + name = '0', description = '0', created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), namespace = '0', @@ -47,9 +48,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), ) ], total_size = 56, diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_recurring_runs_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_recurring_runs_response.py index 5dbdc1a2083..3859523cf90 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_recurring_runs_response.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_recurring_runs_response.py @@ -70,9 +70,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), no_catchup = True, namespace = '0', diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_response.py index 19713507c16..588296c5ce9 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_response.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_response.py @@ -62,9 +62,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), run_details = kfp_server_api.models.v2beta1_run_details.v2beta1RunDetails( pipeline_context_id = '0', diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline.py index 7c3240bd918..13565268542 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline.py @@ -38,6 +38,7 @@ def make_instance(self, include_optional): return V2beta1Pipeline( pipeline_id = '0', display_name = '0', + name = '0', description = '0', created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), namespace = '0', @@ -45,9 +46,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ) ) else : diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_detail.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_detail.py index bf2e89db9ec..a1fe3ce24cf 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_detail.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_detail.py @@ -57,9 +57,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), inputs = { 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList( @@ -82,9 +82,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), ) ], pod_name = '0', diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_version.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_version.py index 1c932aee5df..7ac31ea7f85 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_version.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_version.py @@ -39,6 +39,7 @@ def make_instance(self, include_optional): pipeline_id = '0', pipeline_version_id = '0', display_name = '0', + name = '0', description = '0', created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), package_url = kfp_server_api.models.v2beta1_url.v2beta1Url( @@ -49,9 +50,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ) ) else : diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_recurring_run.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_recurring_run.py index 9fdd7bae7fc..ef66273c4dd 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_recurring_run.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_recurring_run.py @@ -68,9 +68,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), no_catchup = True, namespace = '0', diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_run.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_run.py index 4dfca86b0b3..bcdd39a53d1 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_run.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_run.py @@ -60,9 +60,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), run_details = kfp_server_api.models.v2beta1_run_details.v2beta1RunDetails( pipeline_context_id = '0', @@ -90,9 +90,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), inputs = { 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList( @@ -124,9 +124,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), ) ] ) diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_run_details.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_run_details.py index 8b9caaf7b9e..25c68518d8c 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_run_details.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_run_details.py @@ -61,9 +61,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ), inputs = { 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList( diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_runtime_status.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_runtime_status.py index d9a1e8f9909..0c0610d4719 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_runtime_status.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_runtime_status.py @@ -42,9 +42,9 @@ def make_instance(self, include_optional): code = 56, message = '0', details = [ - kfp_server_api.models.protobuf_any.protobufAny( - type_url = '0', - value = 'YQ==', ) + { + 'key' : None + } ], ) ) else : diff --git a/backend/api/v2beta1/recurring_run.proto b/backend/api/v2beta1/recurring_run.proto index 66b810901a2..ac7570dc6be 100644 --- a/backend/api/v2beta1/recurring_run.proto +++ b/backend/api/v2beta1/recurring_run.proto @@ -25,9 +25,9 @@ import "google/protobuf/timestamp.proto"; import "google/protobuf/empty.proto"; import "google/rpc/status.proto"; -import "protoc-gen-swagger/options/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https }; diff --git a/backend/api/v2beta1/run.proto b/backend/api/v2beta1/run.proto index 5c48ab19317..4825286e45e 100644 --- a/backend/api/v2beta1/run.proto +++ b/backend/api/v2beta1/run.proto @@ -22,10 +22,10 @@ import "google/api/annotations.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/struct.proto"; import "google/rpc/status.proto"; -import "protoc-gen-swagger/options/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; import "backend/api/v2beta1/runtime_config.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; @@ -61,7 +61,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { }; service RunService { - // Creates a new run in an experiment specified by experiment ID. + // Creates a new run in an experiment specified by experiment ID. // If experiment ID is not specified, the run is created in the default experiment. rpc CreateRun(CreateRunRequest) returns (Run) { option (google.api.http) = { @@ -77,7 +77,7 @@ service RunService { }; } - // Finds all runs in an experiment given by experiment ID. + // Finds all runs in an experiment given by experiment ID. // If experiment id is not specified, finds all runs across all experiments. rpc ListRuns(ListRunsRequest) returns (ListRunsResponse) { option (google.api.http) = { @@ -133,14 +133,14 @@ message Run { // Input. ID of the parent experiment. // The default experiment ID will be used if this is not specified. string experiment_id = 1; - + // Output. Unique run ID. Generated by API server. string run_id = 2; - + // Required input. Name provided by user, // or auto generated if run is created by a recurring run. string display_name = 3; - + // Describes whether an entity is available or archived. enum StorageState { // Default state. This state in not used @@ -155,11 +155,11 @@ message Run { // Output. Specifies whether this run is in archived or available mode. StorageState storage_state = 4; - + // Optional input. Short description of the run. string description = 5; - - // Required input. Specifies the source of the pipeline spec for this + + // Required input. Specifies the source of the pipeline spec for this // run. Can be either a pipeline version id, or a pipeline spec. oneof pipeline_source { // This field is Deprecated. The pipeline version id is under pipeline_version_reference for v2. @@ -171,39 +171,39 @@ message Run { // Reference to a pipeline containing pipeline_id and optionally the pipeline_version_id. PipelineVersionReference pipeline_version_reference = 18; } - - // Required input. Runtime config of the run. + + // Required input. Runtime config of the run. RuntimeConfig runtime_config = 8; - + // Optional input. Specifies which kubernetes service account is used. string service_account = 9; - + // Output. Creation time of the run. google.protobuf.Timestamp created_at = 10; - + // Output. When this run is scheduled to start. This could be different from // created_at. For example, if a run is from a backfilling job that was supposed // to run 2 month ago, the created_at will be 2 month behind scheduled_at. google.protobuf.Timestamp scheduled_at = 11; - + // Output. Completion of the run. google.protobuf.Timestamp finished_at = 12; // Output. Runtime state of a run. RuntimeState state = 13; - + // In case any error happens retrieving a run field, only run ID // and the error message is returned. Client has the flexibility of choosing // how to handle the error. This is especially useful during listing call. google.rpc.Status error = 14; - + // Output. Runtime details of a run. RunDetails run_details = 15; // ID of the recurring run that triggered this run. string recurring_run_id = 16; - // Output. A sequence of run statuses. This field keeps a record + // Output. A sequence of run statuses. This field keeps a record // of state transitions. repeated RuntimeStatus state_history = 17; } @@ -259,7 +259,7 @@ message RuntimeStatus { // The error that occurred during the state. May be set when the state is // any of the non-final states (PENDING/RUNNING/CANCELING) or FAILED state. // If the state is FAILED, the error here is final and not going to be - // retried. If the state is a non-final state, the error indicates that a + // retried. If the state is a non-final state, the error indicates that a // system-error being retried. google.rpc.Status error = 3; } @@ -320,21 +320,21 @@ message PipelineTaskDetail { // Empty if the task is at the root level. string parent_task_id = 13; - // A sequence of task statuses. This field keeps a record + // A sequence of task statuses. This field keeps a record // of state transitions. repeated RuntimeStatus state_history = 14; // Name of the corresponding pod assigned by the orchestration engine. // Also known as node_id. string pod_name = 15; - + // A dependent task that requires this one to succeed. // Represented by either task_id or pod_name. message ChildTask { oneof child_task { // System-generated ID of a task. string task_id = 1; - + // Name of the corresponding pod assigned by the orchestration engine. // Also known as node_id. string pod_name = 2; diff --git a/backend/api/v2beta1/swagger/artifacts.swagger.json b/backend/api/v2beta1/swagger/artifacts.swagger.json index 06bc5e17e72..88647b58a76 100644 --- a/backend/api/v2beta1/swagger/artifacts.swagger.json +++ b/backend/api/v2beta1/swagger/artifacts.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v2beta1/artifacts.proto", "version": "version not set" }, + "tags": [ + { + "name": "ArtifactService" + } + ], "consumes": [ "application/json" ], @@ -25,14 +30,14 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { "name": "max_result_size", - "description": "Optional.\nMax number of resources to return in the result. A value of zero or less\nwill result in the default (20).\nThe API implementation also enforces an upper-bound of 100, and picks the\nminimum between this value and the one specified here.\n[default = 20].", + "description": "Optional.\nMax number of resources to return in the result. A value of zero or less\nwill result in the default (20).\nThe API implementation also enforces an upper-bound of 100, and picks the\nminimum between this value and the one specified here.\n[default = 20]", "in": "query", "required": false, "type": "integer", @@ -40,7 +45,7 @@ }, { "name": "order_by_field", - "description": "Optional. Ordering field. [default = ID].", + "description": "Optional. Ordering field. [default = ID]", "in": "query", "required": false, "type": "string", @@ -54,7 +59,7 @@ }, { "name": "order_by", - "description": "Optional. Can be either \"asc\" (ascending) or \"desc\" (descending). [default = asc].", + "description": "Optional. Can be either \"asc\" (ascending) or \"desc\" (descending). [default = asc]", "in": "query", "required": false, "type": "string" @@ -93,7 +98,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -163,6 +168,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" }, "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." @@ -173,38 +179,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "runtimeError": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "v2beta1Artifact": { "type": "object", @@ -268,6 +249,7 @@ "artifacts": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1Artifact" }, "description": "List of retrieved artifacts." diff --git a/backend/api/v2beta1/swagger/auth.swagger.json b/backend/api/v2beta1/swagger/auth.swagger.json index cba77e8d2c2..b322e488022 100644 --- a/backend/api/v2beta1/swagger/auth.swagger.json +++ b/backend/api/v2beta1/swagger/auth.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v2beta1/auth.proto", "version": "version not set" }, + "tags": [ + { + "name": "AuthService" + } + ], "schemes": [ "http", "https" @@ -22,25 +27,28 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { "name": "namespace", + "description": "Namespace the resource belongs to.", "in": "query", "required": false, "type": "string" }, { "name": "resources", + "description": "Resource type asking for authorization.", "in": "query", "required": false, "type": "string", @@ -52,6 +60,7 @@ }, { "name": "verb", + "description": "Verb on the resource asking for authorization.", "in": "query", "required": false, "type": "string", @@ -91,41 +100,39 @@ "default": "UNASSIGNED_VERB", "description": "Type of verbs that act on the resources." }, - "protobufAny": { + "googlerpcStatus": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "runtimeError": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." }, "message": { - "type": "string" + "type": "string", + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." }, "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" - } + }, + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." } - } + }, + "description": "The `Status` type defines a logical error model that is suitable for\ndifferent programming environments, including REST APIs and RPC APIs. It is\nused by [gRPC](https://github.com/grpc). Each `Status` message contains\nthree pieces of data: error code, error message, and error details.\n\nYou can find out more about this error model and how to work with it in the\n[API Design Guide](https://cloud.google.com/apis/design/errors)." + }, + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + } + }, + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } }, "securityDefinitions": { diff --git a/backend/api/v2beta1/swagger/experiment.swagger.json b/backend/api/v2beta1/swagger/experiment.swagger.json index ef416c70e84..8c390bb97b4 100644 --- a/backend/api/v2beta1/swagger/experiment.swagger.json +++ b/backend/api/v2beta1/swagger/experiment.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v2beta1/experiment.proto", "version": "version not set" }, + "tags": [ + { + "name": "ExperimentService" + } + ], "schemes": [ "http", "https" @@ -29,7 +34,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -88,13 +93,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "experiment", "description": "The experiment to be created.", "in": "body", "required": true, @@ -122,7 +127,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -146,13 +151,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -178,13 +184,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -210,13 +217,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -236,41 +244,39 @@ } }, "definitions": { - "protobufAny": { + "googlerpcStatus": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "runtimeError": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." }, "message": { - "type": "string" + "type": "string", + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." }, "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" - } + }, + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." } - } + }, + "description": "The `Status` type defines a logical error model that is suitable for\ndifferent programming environments, including REST APIs and RPC APIs. It is\nused by [gRPC](https://github.com/grpc). Each `Status` message contains\nthree pieces of data: error code, error message, and error details.\n\nYou can find out more about this error model and how to work with it in the\n[API Design Guide](https://cloud.google.com/apis/design/errors)." + }, + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + } + }, + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "v2beta1Experiment": { "type": "object", @@ -323,6 +329,7 @@ "experiments": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1Experiment" }, "description": "A list of experiments returned." diff --git a/backend/api/v2beta1/swagger/filter.swagger.json b/backend/api/v2beta1/swagger/filter.swagger.json index 3c80e5530c5..67b7b8764c0 100644 --- a/backend/api/v2beta1/swagger/filter.swagger.json +++ b/backend/api/v2beta1/swagger/filter.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v2beta1/filter.proto", "version": "version not set" }, + "tags": [ + { + "name": "DummyFilterService" + } + ], "consumes": [ "application/json" ], @@ -50,41 +55,39 @@ }, "description": "List of strings." }, - "protobufAny": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "runtimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." }, "message": { - "type": "string" + "type": "string", + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." }, "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" - } + }, + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." + } + }, + "description": "The `Status` type defines a logical error model that is suitable for\ndifferent programming environments, including REST APIs and RPC APIs. It is\nused by [gRPC](https://github.com/grpc). Each `Status` message contains\nthree pieces of data: error code, error message, and error details.\n\nYou can find out more about this error model and how to work with it in the\n[API Design Guide](https://cloud.google.com/apis/design/errors)." + }, + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } - } + }, + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "v2beta1Filter": { "type": "object", @@ -92,6 +95,7 @@ "predicates": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1Predicate" }, "description": "All predicates are AND-ed when this filter is applied." diff --git a/backend/api/v2beta1/swagger/healthz.swagger.json b/backend/api/v2beta1/swagger/healthz.swagger.json index 077b1670005..2775f836549 100644 --- a/backend/api/v2beta1/swagger/healthz.swagger.json +++ b/backend/api/v2beta1/swagger/healthz.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v2beta1/healthz.proto", "version": "version not set" }, + "tags": [ + { + "name": "HealthzService" + } + ], "schemes": [ "http", "https" @@ -29,7 +34,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -40,41 +45,39 @@ } }, "definitions": { - "protobufAny": { + "googlerpcStatus": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "runtimeError": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." }, "message": { - "type": "string" + "type": "string", + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." }, "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" - } + }, + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." } - } + }, + "description": "The `Status` type defines a logical error model that is suitable for\ndifferent programming environments, including REST APIs and RPC APIs. It is\nused by [gRPC](https://github.com/grpc). Each `Status` message contains\nthree pieces of data: error code, error message, and error details.\n\nYou can find out more about this error model and how to work with it in the\n[API Design Guide](https://cloud.google.com/apis/design/errors)." + }, + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + } + }, + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "v2beta1GetHealthzResponse": { "type": "object", diff --git a/backend/api/v2beta1/swagger/kfp_api_single_file.swagger.json b/backend/api/v2beta1/swagger/kfp_api_single_file.swagger.json index 211548622b6..cfb9de9856e 100644 --- a/backend/api/v2beta1/swagger/kfp_api_single_file.swagger.json +++ b/backend/api/v2beta1/swagger/kfp_api_single_file.swagger.json @@ -2,7 +2,7 @@ "swagger": "2.0", "info": { "title": "Kubeflow Pipelines API", - "version": "2.5.0", + "version": "2.14.3", "description": "This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition.", "contact": { "name": "google", @@ -14,6 +14,11 @@ "url": "https://raw.githubusercontent.com/kubeflow/pipelines/master/LICENSE" } }, + "tags": [ + { + "name": "VisualizationService" + } + ], "consumes": [ "application/json" ], @@ -35,14 +40,14 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { "name": "max_result_size", - "description": "Optional.\nMax number of resources to return in the result. A value of zero or less\nwill result in the default (20).\nThe API implementation also enforces an upper-bound of 100, and picks the\nminimum between this value and the one specified here.\n[default = 20].", + "description": "Optional.\nMax number of resources to return in the result. A value of zero or less\nwill result in the default (20).\nThe API implementation also enforces an upper-bound of 100, and picks the\nminimum between this value and the one specified here.\n[default = 20]", "in": "query", "required": false, "type": "integer", @@ -50,7 +55,7 @@ }, { "name": "order_by_field", - "description": "Optional. Ordering field. [default = ID].", + "description": "Optional. Ordering field. [default = ID]", "in": "query", "required": false, "type": "string", @@ -64,7 +69,7 @@ }, { "name": "order_by", - "description": "Optional. Can be either \"asc\" (ascending) or \"desc\" (descending). [default = asc].", + "description": "Optional. Can be either \"asc\" (ascending) or \"desc\" (descending). [default = asc]", "in": "query", "required": false, "type": "string" @@ -103,7 +108,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -142,25 +147,28 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { "name": "namespace", + "description": "Namespace the resource belongs to.", "in": "query", "required": false, "type": "string" }, { "name": "resources", + "description": "Resource type asking for authorization.", "in": "query", "required": false, "type": "string", @@ -172,6 +180,7 @@ }, { "name": "verb", + "description": "Verb on the resource asking for authorization.", "in": "query", "required": false, "type": "string", @@ -203,7 +212,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -262,13 +271,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "experiment", "description": "The experiment to be created.", "in": "body", "required": true, @@ -296,7 +305,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -320,13 +329,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -352,13 +362,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -384,13 +395,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -422,7 +434,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -445,7 +457,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -504,13 +516,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "pipeline", "description": "Required input. Pipeline that needs to be created.", "in": "body", "required": true, @@ -538,7 +550,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -571,7 +583,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -610,7 +622,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -628,19 +640,20 @@ ] }, "delete": { - "summary": "Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions.", + "summary": "Deletes a pipeline by ID. If cascade is false (default), it returns an error if the\npipeline has any versions. If cascade is true, it will also delete all pipeline versions.", "operationId": "PipelineService_DeletePipeline", "responses": { "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -651,6 +664,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "cascade", + "description": "Optional. If true, the pipeline and all its versions will be deleted.\nIf false (default), only the pipeline will be deleted if it has no versions.", + "in": "query", + "required": false, + "type": "boolean" } ], "tags": [ @@ -672,7 +692,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -731,7 +751,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -744,7 +764,7 @@ "type": "string" }, { - "name": "body", + "name": "pipeline_version", "description": "Required input. Pipeline version ID to be created.", "in": "body", "required": true, @@ -772,7 +792,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -803,13 +823,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -970,7 +991,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1036,13 +1057,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "recurring_run", "description": "The recurring run to be created.", "in": "body", "required": true, @@ -1070,7 +1091,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1094,13 +1115,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1126,13 +1148,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1158,13 +1181,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1189,19 +1213,20 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "scheduled_workflow", "description": "ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string.", "in": "body", "required": true, @@ -1222,19 +1247,20 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "workflow", "description": "Workflow is a workflow custom resource marshalled into a json string.", "in": "body", "required": true, @@ -1250,7 +1276,7 @@ }, "/apis/v2beta1/runs": { "get": { - "summary": "Finds all runs in an experiment given by experiment ID. \nIf experiment id is not specified, finds all runs across all experiments.", + "summary": "Finds all runs in an experiment given by experiment ID.\nIf experiment id is not specified, finds all runs across all experiments.", "operationId": "RunService_ListRuns", "responses": { "200": { @@ -1262,7 +1288,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1316,7 +1342,7 @@ ] }, "post": { - "summary": "Creates a new run in an experiment specified by experiment ID. \nIf experiment ID is not specified, the run is created in the default experiment.", + "summary": "Creates a new run in an experiment specified by experiment ID.\nIf experiment ID is not specified, the run is created in the default experiment.", "operationId": "RunService_CreateRun", "responses": { "200": { @@ -1328,13 +1354,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "run", "description": "Run to be created.", "in": "body", "required": true, @@ -1369,7 +1395,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1400,13 +1426,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1445,7 +1472,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1492,13 +1519,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1509,6 +1537,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "experiment_id", + "description": "The ID of the parent experiment.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -1524,13 +1559,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1541,6 +1577,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "experiment_id", + "description": "The ID of the parent experiment.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -1556,13 +1599,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1573,6 +1617,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "experiment_id", + "description": "The ID of the parent experiment.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -1588,13 +1639,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1605,6 +1657,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "experiment_id", + "description": "The ID of the parent experiment.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -1625,7 +1684,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -1637,7 +1696,7 @@ "type": "string" }, { - "name": "body", + "name": "visualization", "in": "body", "required": true, "schema": { @@ -1688,6 +1747,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" }, "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." @@ -1698,9 +1758,13 @@ "protobufAny": { "type": "object", "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, "type_url": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." }, "value": { "type": "string", @@ -1708,28 +1772,8 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "runtimeError": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "v2beta1Artifact": { "type": "object", @@ -1793,6 +1837,7 @@ "artifacts": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1Artifact" }, "description": "List of retrieved artifacts." @@ -1874,6 +1919,7 @@ "experiments": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1Experiment" }, "description": "A list of experiments returned." @@ -1933,6 +1979,7 @@ "predicates": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1Predicate" }, "description": "All predicates are AND-ed when this filter is applied." @@ -2020,7 +2067,7 @@ "NULL_VALUE" ], "default": "NULL_VALUE", - "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\n The JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." + "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." }, "v2beta1CreatePipelineAndVersionRequest": { "type": "object", @@ -2041,6 +2088,7 @@ "pipeline_versions": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1PipelineVersion" }, "description": "Returned pipeline versions." @@ -2062,6 +2110,7 @@ "pipelines": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1Pipeline" }, "description": "Returned pipelines." @@ -2202,6 +2251,7 @@ "recurringRuns": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1RecurringRun" }, "description": "A list of recurring runs returned." @@ -2346,9 +2396,7 @@ "properties": { "parameters": { "type": "object", - "additionalProperties": { - "type": "object" - }, + "additionalProperties": {}, "description": "The runtime parameters of the Pipeline. The parameters will be\nused to replace the placeholders at runtime." }, "pipeline_root": { @@ -2404,6 +2452,7 @@ "runs": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1Run" }, "description": "List of retrieved runs." @@ -2487,9 +2536,10 @@ "state_history": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1RuntimeStatus" }, - "description": "A sequence of task statuses. This field keeps a record \nof state transitions." + "description": "A sequence of task statuses. This field keeps a record\nof state transitions." }, "pod_name": { "type": "string", @@ -2498,6 +2548,7 @@ "child_tasks": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/PipelineTaskDetailChildTask" }, "description": "Sequence of dependen tasks." @@ -2620,9 +2671,10 @@ "state_history": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1RuntimeStatus" }, - "description": "Output. A sequence of run statuses. This field keeps a record \nof state transitions." + "description": "Output. A sequence of run statuses. This field keeps a record\nof state transitions." } } }, @@ -2642,6 +2694,7 @@ "task_details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1PipelineTaskDetail" }, "description": "Runtime details of the tasks that belong to the run." @@ -2689,7 +2742,7 @@ }, "error": { "$ref": "#/definitions/googlerpcStatus", - "description": "The error that occurred during the state. May be set when the state is\nany of the non-final states (PENDING/RUNNING/CANCELING) or FAILED state.\nIf the state is FAILED, the error here is final and not going to be\nretried. If the state is a non-final state, the error indicates that a \nsystem-error being retried." + "description": "The error that occurred during the state. May be set when the state is\nany of the non-final states (PENDING/RUNNING/CANCELING) or FAILED state.\nIf the state is FAILED, the error here is final and not going to be\nretried. If the state is a non-final state, the error indicates that a\nsystem-error being retried." } }, "description": "Timestamped representation of a runtime state with an optional error." diff --git a/backend/api/v2beta1/swagger/pipeline.swagger.json b/backend/api/v2beta1/swagger/pipeline.swagger.json index b2e64f38217..188aefaf169 100644 --- a/backend/api/v2beta1/swagger/pipeline.swagger.json +++ b/backend/api/v2beta1/swagger/pipeline.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v2beta1/pipeline.proto", "version": "version not set" }, + "tags": [ + { + "name": "PipelineService" + } + ], "schemes": [ "http", "https" @@ -29,7 +34,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -88,13 +93,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "pipeline", "description": "Required input. Pipeline that needs to be created.", "in": "body", "required": true, @@ -122,7 +127,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -155,7 +160,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -194,7 +199,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -212,19 +217,20 @@ ] }, "delete": { - "summary": "Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions.", + "summary": "Deletes a pipeline by ID. If cascade is false (default), it returns an error if the\npipeline has any versions. If cascade is true, it will also delete all pipeline versions.", "operationId": "PipelineService_DeletePipeline", "responses": { "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -235,6 +241,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "cascade", + "description": "Optional. If true, the pipeline and all its versions will be deleted.\nIf false (default), only the pipeline will be deleted if it has no versions.", + "in": "query", + "required": false, + "type": "boolean" } ], "tags": [ @@ -256,7 +269,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -315,7 +328,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -328,7 +341,7 @@ "type": "string" }, { - "name": "body", + "name": "pipeline_version", "description": "Required input. Pipeline version ID to be created.", "in": "body", "required": true, @@ -356,7 +369,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -387,13 +400,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -435,6 +449,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" }, "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." @@ -445,17 +460,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "protobufNullValue": { "type": "string", @@ -463,28 +474,7 @@ "NULL_VALUE" ], "default": "NULL_VALUE", - "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\n The JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." - }, - "runtimeError": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } + "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." }, "v2beta1CreatePipelineAndVersionRequest": { "type": "object", @@ -505,6 +495,7 @@ "pipeline_versions": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1PipelineVersion" }, "description": "Returned pipeline versions." @@ -526,6 +517,7 @@ "pipelines": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1Pipeline" }, "description": "Returned pipelines." diff --git a/backend/api/v2beta1/swagger/recurring_run.swagger.json b/backend/api/v2beta1/swagger/recurring_run.swagger.json index fd3698c1fed..d916be4d100 100644 --- a/backend/api/v2beta1/swagger/recurring_run.swagger.json +++ b/backend/api/v2beta1/swagger/recurring_run.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v2beta1/recurring_run.proto", "version": "version not set" }, + "tags": [ + { + "name": "RecurringRunService" + } + ], "schemes": [ "http", "https" @@ -29,7 +34,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -95,13 +100,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "recurring_run", "description": "The recurring run to be created.", "in": "body", "required": true, @@ -129,7 +134,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -153,13 +158,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -185,13 +191,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -217,13 +224,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -268,6 +276,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" }, "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." @@ -278,17 +287,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { + "@type": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "protobufNullValue": { "type": "string", @@ -296,28 +301,7 @@ "NULL_VALUE" ], "default": "NULL_VALUE", - "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\n The JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." - }, - "runtimeError": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } + "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." }, "v2beta1CronSchedule": { "type": "object", @@ -345,6 +329,7 @@ "recurringRuns": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1RecurringRun" }, "description": "A list of recurring runs returned." @@ -489,9 +474,7 @@ "properties": { "parameters": { "type": "object", - "additionalProperties": { - "type": "object" - }, + "additionalProperties": {}, "description": "The runtime parameters of the Pipeline. The parameters will be\nused to replace the placeholders at runtime." }, "pipeline_root": { diff --git a/backend/api/v2beta1/swagger/report.swagger.json b/backend/api/v2beta1/swagger/report.swagger.json index 32d9d80c359..fae5c6c3ff9 100644 --- a/backend/api/v2beta1/swagger/report.swagger.json +++ b/backend/api/v2beta1/swagger/report.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v2beta1/report.proto", "version": "version not set" }, + "tags": [ + { + "name": "ReportService" + } + ], "consumes": [ "application/json" ], @@ -18,19 +23,20 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "scheduled_workflow", "description": "ScheduledWorkflow a ScheduledWorkflow resource marshalled into a json string.", "in": "body", "required": true, @@ -51,19 +57,20 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "workflow", "description": "Workflow is a workflow custom resource marshalled into a json string.", "in": "body", "required": true, @@ -79,41 +86,39 @@ } }, "definitions": { - "protobufAny": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "runtimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." }, "message": { - "type": "string" + "type": "string", + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." }, "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" - } + }, + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." } - } + }, + "description": "The `Status` type defines a logical error model that is suitable for\ndifferent programming environments, including REST APIs and RPC APIs. It is\nused by [gRPC](https://github.com/grpc). Each `Status` message contains\nthree pieces of data: error code, error message, and error details.\n\nYou can find out more about this error model and how to work with it in the\n[API Design Guide](https://cloud.google.com/apis/design/errors)." + }, + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + } + }, + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } diff --git a/backend/api/v2beta1/swagger/run.swagger.json b/backend/api/v2beta1/swagger/run.swagger.json index 1233dea265b..e760b89f924 100644 --- a/backend/api/v2beta1/swagger/run.swagger.json +++ b/backend/api/v2beta1/swagger/run.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v2beta1/run.proto", "version": "version not set" }, + "tags": [ + { + "name": "RunService" + } + ], "schemes": [ "http", "https" @@ -17,7 +22,7 @@ "paths": { "/apis/v2beta1/runs": { "get": { - "summary": "Finds all runs in an experiment given by experiment ID. \nIf experiment id is not specified, finds all runs across all experiments.", + "summary": "Finds all runs in an experiment given by experiment ID.\nIf experiment id is not specified, finds all runs across all experiments.", "operationId": "RunService_ListRuns", "responses": { "200": { @@ -29,7 +34,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -83,7 +88,7 @@ ] }, "post": { - "summary": "Creates a new run in an experiment specified by experiment ID. \nIf experiment ID is not specified, the run is created in the default experiment.", + "summary": "Creates a new run in an experiment specified by experiment ID.\nIf experiment ID is not specified, the run is created in the default experiment.", "operationId": "RunService_CreateRun", "responses": { "200": { @@ -95,13 +100,13 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, "parameters": [ { - "name": "body", + "name": "run", "description": "Run to be created.", "in": "body", "required": true, @@ -136,7 +141,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -167,13 +172,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -212,7 +218,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -259,13 +265,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -276,6 +283,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "experiment_id", + "description": "The ID of the parent experiment.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -291,13 +305,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -308,6 +323,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "experiment_id", + "description": "The ID of the parent experiment.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -323,13 +345,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -340,6 +363,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "experiment_id", + "description": "The ID of the parent experiment.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -355,13 +385,14 @@ "200": { "description": "A successful response.", "schema": { + "type": "object", "properties": {} } }, "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -372,6 +403,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "experiment_id", + "description": "The ID of the parent experiment.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -410,6 +448,7 @@ "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" }, "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." @@ -420,17 +459,13 @@ "protobufAny": { "type": "object", "properties": { - "type_url": { + "@type": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "protobufNullValue": { "type": "string", @@ -438,28 +473,7 @@ "NULL_VALUE" ], "default": "NULL_VALUE", - "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\n The JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." - }, - "runtimeError": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } + "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." }, "v2beta1ArtifactList": { "type": "object", @@ -481,6 +495,7 @@ "runs": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1Run" }, "description": "List of retrieved runs." @@ -564,9 +579,10 @@ "state_history": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1RuntimeStatus" }, - "description": "A sequence of task statuses. This field keeps a record \nof state transitions." + "description": "A sequence of task statuses. This field keeps a record\nof state transitions." }, "pod_name": { "type": "string", @@ -575,6 +591,7 @@ "child_tasks": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/PipelineTaskDetailChildTask" }, "description": "Sequence of dependen tasks." @@ -711,9 +728,10 @@ "state_history": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1RuntimeStatus" }, - "description": "Output. A sequence of run statuses. This field keeps a record \nof state transitions." + "description": "Output. A sequence of run statuses. This field keeps a record\nof state transitions." } } }, @@ -733,6 +751,7 @@ "task_details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/v2beta1PipelineTaskDetail" }, "description": "Runtime details of the tasks that belong to the run." @@ -755,9 +774,7 @@ "properties": { "parameters": { "type": "object", - "additionalProperties": { - "type": "object" - }, + "additionalProperties": {}, "description": "The runtime parameters of the Pipeline. The parameters will be\nused to replace the placeholders at runtime." }, "pipeline_root": { @@ -797,7 +814,7 @@ }, "error": { "$ref": "#/definitions/googlerpcStatus", - "description": "The error that occurred during the state. May be set when the state is\nany of the non-final states (PENDING/RUNNING/CANCELING) or FAILED state.\nIf the state is FAILED, the error here is final and not going to be\nretried. If the state is a non-final state, the error indicates that a \nsystem-error being retried." + "description": "The error that occurred during the state. May be set when the state is\nany of the non-final states (PENDING/RUNNING/CANCELING) or FAILED state.\nIf the state is FAILED, the error here is final and not going to be\nretried. If the state is a non-final state, the error indicates that a\nsystem-error being retried." } }, "description": "Timestamped representation of a runtime state with an optional error." diff --git a/backend/api/v2beta1/swagger/runtime_config.swagger.json b/backend/api/v2beta1/swagger/runtime_config.swagger.json index 7a932f3754d..1da8a3633f4 100644 --- a/backend/api/v2beta1/swagger/runtime_config.swagger.json +++ b/backend/api/v2beta1/swagger/runtime_config.swagger.json @@ -12,41 +12,39 @@ ], "paths": {}, "definitions": { - "protobufAny": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "runtimeError": { + "googlerpcStatus": { "type": "object", "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." }, "message": { - "type": "string" + "type": "string", + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." }, "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" - } + }, + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." } - } + }, + "description": "The `Status` type defines a logical error model that is suitable for\ndifferent programming environments, including REST APIs and RPC APIs. It is\nused by [gRPC](https://github.com/grpc). Each `Status` message contains\nthree pieces of data: error code, error message, and error details.\n\nYou can find out more about this error model and how to work with it in the\n[API Design Guide](https://cloud.google.com/apis/design/errors)." + }, + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + } + }, + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } diff --git a/backend/api/v2beta1/swagger/visualization.swagger.json b/backend/api/v2beta1/swagger/visualization.swagger.json index 3123da36938..601abb69024 100644 --- a/backend/api/v2beta1/swagger/visualization.swagger.json +++ b/backend/api/v2beta1/swagger/visualization.swagger.json @@ -4,6 +4,11 @@ "title": "backend/api/v2beta1/visualization.proto", "version": "version not set" }, + "tags": [ + { + "name": "VisualizationService" + } + ], "schemes": [ "http", "https" @@ -28,7 +33,7 @@ "default": { "description": "An unexpected error response.", "schema": { - "$ref": "#/definitions/runtimeError" + "$ref": "#/definitions/googlerpcStatus" } } }, @@ -40,7 +45,7 @@ "type": "string" }, { - "name": "body", + "name": "visualization", "in": "body", "required": true, "schema": { @@ -55,41 +60,39 @@ } }, "definitions": { - "protobufAny": { + "googlerpcStatus": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "runtimeError": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, "code": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." }, "message": { - "type": "string" + "type": "string", + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." }, "details": { "type": "array", "items": { + "type": "object", "$ref": "#/definitions/protobufAny" - } + }, + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." } - } + }, + "description": "The `Status` type defines a logical error model that is suitable for\ndifferent programming environments, including REST APIs and RPC APIs. It is\nused by [gRPC](https://github.com/grpc). Each `Status` message contains\nthree pieces of data: error code, error message, and error details.\n\nYou can find out more about this error model and how to work with it in the\n[API Design Guide](https://cloud.google.com/apis/design/errors)." + }, + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + } + }, + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "v2beta1Visualization": { "type": "object", diff --git a/backend/api/v2beta1/visualization.proto b/backend/api/v2beta1/visualization.proto index 5b6746e8f16..5a991fea257 100644 --- a/backend/api/v2beta1/visualization.proto +++ b/backend/api/v2beta1/visualization.proto @@ -18,10 +18,9 @@ option go_package = "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client package kubeflow.pipelines.backend.api.v2beta1; import "google/api/annotations.proto"; -import "protoc-gen-swagger/options/annotations.proto"; -import "google/rpc/status.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; -option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https responses: { key: "default"; diff --git a/backend/metadata_writer/requirements.in b/backend/metadata_writer/requirements.in index 5e0ef214b0f..6374cd18433 100644 --- a/backend/metadata_writer/requirements.in +++ b/backend/metadata_writer/requirements.in @@ -1,4 +1,4 @@ -kubernetes>=8.0.0,<11.0.0 -ml-metadata==1.14.0 +kubernetes>=8.0.0,<=31.0.0 +ml-metadata==1.17.0 lru-dict>=1.1.7,<2.0.0 PyYAML>=5.3,<7 diff --git a/backend/metadata_writer/requirements.txt b/backend/metadata_writer/requirements.txt index 6897654ae21..cf63e9909b7 100644 --- a/backend/metadata_writer/requirements.txt +++ b/backend/metadata_writer/requirements.txt @@ -6,19 +6,21 @@ # absl-py==1.4.0 # via ml-metadata -attrs==21.4.0 +attrs==23.2.0 # via ml-metadata -cachetools==5.5.0 +cachetools==5.5.2 # via google-auth -certifi==2024.8.30 +certifi==2025.8.3 # via # kubernetes # requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.3 # via requests -google-auth==2.35.0 +durationpy==0.10 # via kubernetes -grpcio==1.66.1 +google-auth==2.40.3 + # via kubernetes +grpcio==1.74.0 # via ml-metadata idna==3.10 # via requests @@ -26,17 +28,19 @@ kubernetes==31.0.0 # via -r - lru-dict==1.3.0 # via -r - -ml-metadata==1.14.0 +ml-metadata==1.17.0 # via -r - -oauthlib==3.2.2 - # via requests-oauthlib -protobuf==3.20.3 +oauthlib==3.3.1 + # via + # kubernetes + # requests-oauthlib +protobuf==4.25.8 # via ml-metadata pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.1 +pyasn1-modules==0.4.2 # via google-auth python-dateutil==2.9.0.post0 # via kubernetes @@ -44,25 +48,22 @@ pyyaml==6.0.2 # via # -r - # kubernetes -requests==2.32.3 +requests==2.32.5 # via # kubernetes # requests-oauthlib requests-oauthlib==2.0.0 # via kubernetes -rsa==4.9 +rsa==4.9.1 # via google-auth -six==1.16.0 +six==1.17.0 # via # kubernetes # ml-metadata # python-dateutil -urllib3==2.2.3 +urllib3==2.5.0 # via # kubernetes # requests websocket-client==1.8.0 # via kubernetes - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/backend/metadata_writer/src/metadata_helpers.py b/backend/metadata_writer/src/metadata_helpers.py index a0f3dd50e3b..e6a1da804b5 100644 --- a/backend/metadata_writer/src/metadata_helpers.py +++ b/backend/metadata_writer/src/metadata_helpers.py @@ -32,10 +32,8 @@ def value_to_mlmd_value(value) -> metadata_store_pb2.Value: def connect_to_mlmd() -> metadata_store.MetadataStore: - metadata_service_host = os.environ.get( - 'METADATA_GRPC_SERVICE_SERVICE_HOST', 'metadata-grpc-service') - metadata_service_port = int(os.environ.get( - 'METADATA_GRPC_SERVICE_SERVICE_PORT', 8080)) + metadata_service_host = "metadata-grpc-service.kubeflow" + metadata_service_port = 8080 mlmd_connection_config = metadata_store_pb2.MetadataStoreClientConfig( host="[{}]".format(metadata_service_host) if isIPv6(metadata_service_host) else metadata_service_host, diff --git a/backend/requirements.in b/backend/requirements.in index b6a424695d2..ed14770731b 100644 --- a/backend/requirements.in +++ b/backend/requirements.in @@ -1 +1 @@ -kfp>=2.0.0rc1,<2.1.0 +kfp==2.14.3 diff --git a/backend/requirements.txt b/backend/requirements.txt index 68cfc0f33f5..8dbc005034d 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -1,113 +1,112 @@ # -# This file is autogenerated by pip-compile with Python 3.7 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --output-file=- - +# pip-compile --output-file=- --resolver=backtracking - # -cachetools==5.3.1 +cachetools==5.5.2 # via google-auth -certifi==2023.7.22 +certifi==2025.8.3 # via # kfp-server-api # kubernetes # requests -charset-normalizer==3.2.0 +charset-normalizer==3.4.3 # via requests -click==8.1.7 +click==8.1.8 + # via + # click-option-group + # kfp +click-option-group==0.5.7 # via kfp -docstring-parser==0.15 +docstring-parser==0.17.0 # via kfp -google-api-core==2.11.1 +google-api-core==2.25.1 # via # google-cloud-core # google-cloud-storage # kfp -google-auth==2.23.0 +google-auth==2.40.3 # via # google-api-core # google-cloud-core # google-cloud-storage # kfp # kubernetes -google-cloud-core==2.3.3 +google-cloud-core==2.4.3 # via google-cloud-storage -google-cloud-storage==2.11.0 +google-cloud-storage==3.3.1 # via kfp -google-crc32c==1.5.0 - # via google-resumable-media -google-resumable-media==2.6.0 +google-crc32c==1.7.1 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 # via google-cloud-storage -googleapis-common-protos==1.60.0 +googleapis-common-protos==1.70.0 # via google-api-core -idna==3.4 +idna==3.10 # via requests -importlib-metadata==6.7.0 - # via click -kfp==2.0.1 +kfp==2.14.3 # via -r - -kfp-pipeline-spec==0.2.2 +kfp-pipeline-spec==2.14.0 # via kfp -kfp-server-api==2.0.1 +kfp-server-api==2.14.3 # via kfp -kubernetes==26.1.0 +kubernetes==30.1.0 # via kfp -oauthlib==3.2.2 - # via requests-oauthlib -protobuf==3.20.3 +oauthlib==3.3.1 + # via + # kubernetes + # requests-oauthlib +proto-plus==1.26.1 + # via google-api-core +protobuf==6.31.1 # via # google-api-core # googleapis-common-protos # kfp # kfp-pipeline-spec -pyasn1==0.5.0 + # proto-plus +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 +pyasn1-modules==0.4.2 # via google-auth -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # kfp-server-api # kubernetes -pyyaml==6.0.1 +pyyaml==6.0.2 # via # kfp # kubernetes -requests==2.31.0 +requests==2.32.5 # via # google-api-core # google-cloud-storage # kubernetes # requests-oauthlib # requests-toolbelt -requests-oauthlib==1.3.1 +requests-oauthlib==2.0.0 # via kubernetes -requests-toolbelt==0.10.1 +requests-toolbelt==1.0.0 # via kfp -rsa==4.9 +rsa==4.9.1 # via google-auth -six==1.16.0 +six==1.17.0 # via # kfp-server-api # kubernetes # python-dateutil tabulate==0.9.0 # via kfp -typing-extensions==4.7.1 - # via - # importlib-metadata - # kfp -urllib3==1.26.16 +urllib3==2.5.0 # via - # google-auth # kfp # kfp-server-api # kubernetes # requests -websocket-client==1.6.1 +websocket-client==1.8.0 # via kubernetes -zipp==3.15.0 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/backend/src/agent/persistence/worker/metrics_reporter.go b/backend/src/agent/persistence/worker/metrics_reporter.go index 0a03fab4720..0e5c267e17e 100644 --- a/backend/src/agent/persistence/worker/metrics_reporter.go +++ b/backend/src/agent/persistence/worker/metrics_reporter.go @@ -112,5 +112,5 @@ func aggregateErrors(errors []error) error { } errorMsgs = append(errorMsgs, err.Error()) } - return util.NewCustomErrorf(code, strings.Join(errorMsgs, "\n")) + return util.NewCustomErrorf(code, "%s", strings.Join(errorMsgs, "\n")) } diff --git a/backend/src/agent/persistence/worker/metrics_reporter_test.go b/backend/src/agent/persistence/worker/metrics_reporter_test.go index 3413fd8e6f4..e273a4b19c3 100644 --- a/backend/src/agent/persistence/worker/metrics_reporter_test.go +++ b/backend/src/agent/persistence/worker/metrics_reporter_test.go @@ -172,7 +172,7 @@ func TestReportMetrics_Succeed(t *testing.T) { pipelineFake.StubArtifact( &api.ReadArtifactRequest{ RunId: "run-1", - NodeId: "MY_NAME-template-1-1", + NodeId: "node-1", ArtifactName: "mlpipeline-metrics", }, &api.ReadArtifactResponse{ @@ -190,12 +190,12 @@ func TestReportMetrics_Succeed(t *testing.T) { Metrics: []*api.RunMetric{ { Name: "accuracy", - NodeId: "MY_NAME-template-1-1", + NodeId: "node-1", Value: &api.RunMetric_NumberValue{NumberValue: 0.77}, }, { Name: "logloss", - NodeId: "MY_NAME-template-1-1", + NodeId: "node-1", Value: &api.RunMetric_NumberValue{NumberValue: 1.2}, }, }, @@ -235,7 +235,7 @@ func TestReportMetrics_EmptyArchive_Fail(t *testing.T) { pipelineFake.StubArtifact( &api.ReadArtifactRequest{ RunId: "run-1", - NodeId: "MY_NAME-template-1-1", + NodeId: "node-1", ArtifactName: "mlpipeline-metrics", }, &api.ReadArtifactResponse{ @@ -263,7 +263,7 @@ func TestReportMetrics_MultipleFilesInArchive_Fail(t *testing.T) { Status: workflowapi.WorkflowStatus{ Nodes: map[string]workflowapi.NodeStatus{ "node-1": { - ID: "MY_NAME-template-1-1", + ID: "node-1", TemplateName: "template-1", Phase: workflowapi.NodeSucceeded, Outputs: &workflowapi.Outputs{ @@ -279,7 +279,7 @@ func TestReportMetrics_MultipleFilesInArchive_Fail(t *testing.T) { pipelineFake.StubArtifact( &api.ReadArtifactRequest{ RunId: "run-1", - NodeId: "MY_NAME-template-1-1", + NodeId: "node-1", ArtifactName: "mlpipeline-metrics", }, &api.ReadArtifactResponse{ @@ -322,7 +322,7 @@ func TestReportMetrics_InvalidMetricsJSON_Fail(t *testing.T) { pipelineFake.StubArtifact( &api.ReadArtifactRequest{ RunId: "run-1", - NodeId: "MY_NAME-template-1-1", + NodeId: "node-1", ArtifactName: "mlpipeline-metrics", }, &api.ReadArtifactResponse{ @@ -376,7 +376,7 @@ func TestReportMetrics_InvalidMetricsJSON_PartialFail(t *testing.T) { pipelineFake.StubArtifact( &api.ReadArtifactRequest{ RunId: "run-1", - NodeId: "MY_NAME-template-1-1", + NodeId: "node-1", ArtifactName: "mlpipeline-metrics", }, &api.ReadArtifactResponse{ @@ -385,7 +385,7 @@ func TestReportMetrics_InvalidMetricsJSON_PartialFail(t *testing.T) { pipelineFake.StubArtifact( &api.ReadArtifactRequest{ RunId: "run-1", - NodeId: "MY_NAME-template-2-2", + NodeId: "node-2", ArtifactName: "mlpipeline-metrics", }, &api.ReadArtifactResponse{ @@ -402,12 +402,12 @@ func TestReportMetrics_InvalidMetricsJSON_PartialFail(t *testing.T) { Metrics: []*api.RunMetric{ { Name: "accuracy", - NodeId: "MY_NAME-template-2-2", + NodeId: "node-2", Value: &api.RunMetric_NumberValue{NumberValue: 0.77}, }, { Name: "logloss", - NodeId: "MY_NAME-template-2-2", + NodeId: "node-2", Value: &api.RunMetric_NumberValue{NumberValue: 1.2}, }, }, @@ -446,7 +446,7 @@ func TestReportMetrics_CorruptedArchiveFile_Fail(t *testing.T) { pipelineFake.StubArtifact( &api.ReadArtifactRequest{ RunId: "run-1", - NodeId: "MY_NAME-template-1-1", + NodeId: "node-1", ArtifactName: "mlpipeline-metrics", }, &api.ReadArtifactResponse{ @@ -490,7 +490,7 @@ func TestReportMetrics_MultiplMetricErrors_TransientErrowWin(t *testing.T) { pipelineFake.StubArtifact( &api.ReadArtifactRequest{ RunId: "run-1", - NodeId: "MY_NAME-template-1-1", + NodeId: "node-1", ArtifactName: "mlpipeline-metrics", }, &api.ReadArtifactResponse{ @@ -553,7 +553,7 @@ func TestReportMetrics_Unauthorized(t *testing.T) { pipelineFake.StubArtifact( &api.ReadArtifactRequest{ RunId: "run-1", - NodeId: "MY_NAME-template-1-1", + NodeId: "node-1", ArtifactName: "mlpipeline-metrics", }, &api.ReadArtifactResponse{ diff --git a/backend/src/apiserver/auth/util.go b/backend/src/apiserver/auth/util.go index 991831d86a4..9efea32d903 100644 --- a/backend/src/apiserver/auth/util.go +++ b/backend/src/apiserver/auth/util.go @@ -35,7 +35,7 @@ func singleHeaderFromMetadata(ctx context.Context, header string) (string, error } if len(values) != 1 { msg := fmt.Sprintf("Request header error: unexpected number of '%s' headers. Expect 1 got %d", header, len(values)) - return "", util.NewBadRequestError(errors.New(msg), msg) + return "", util.NewBadRequestError(errors.New(msg), "%s", msg) } return values[0], nil } @@ -50,7 +50,7 @@ func singlePrefixedHeaderFromMetadata(ctx context.Context, header string, prefix } if !strings.HasPrefix(val, prefix) { msg := fmt.Sprintf("Header '%s' is incorrectly formatted. Expected prefix '%s'", header, prefix) - return "", util.NewBadRequestError(errors.New(msg), msg) + return "", util.NewBadRequestError(errors.New(msg), "%s", msg) } return strings.TrimPrefix(val, prefix), nil } diff --git a/backend/src/apiserver/client/pod_fake.go b/backend/src/apiserver/client/pod_fake.go index c5fbba1a129..26bb8308007 100644 --- a/backend/src/apiserver/client/pod_fake.go +++ b/backend/src/apiserver/client/pod_fake.go @@ -95,6 +95,11 @@ func (FakePodClient) ProxyGet(scheme, name, port, path string, params map[string return nil } +func (FakePodClient) UpdateResize(ctx context.Context, podName string, pod *corev1.Pod, opts v1.UpdateOptions) (result *corev1.Pod, err error) { + glog.Error("This fake method is not yet implemented") + return nil, nil +} + type FakeBadPodClient struct { FakePodClient } diff --git a/backend/src/apiserver/client/sql.go b/backend/src/apiserver/client/sql.go index 3a111eb3afc..5161c55a695 100644 --- a/backend/src/apiserver/client/sql.go +++ b/backend/src/apiserver/client/sql.go @@ -21,15 +21,6 @@ import ( "github.com/go-sql-driver/mysql" ) -const ( - MYSQL_TEXT_FORMAT string = "longtext not null" - MYSQL_TEXT_FORMAT_NULL string = "longtext" - MYSQL_EXIST_ERROR string = "database exists" - - PGX_TEXT_FORMAT string = "text" - PGX_EXIST_ERROR string = "already exists" -) - func CreateMySQLConfig(user, password, mysqlServiceHost, mysqlServicePort, dbName, mysqlGroupConcatMaxLen string, mysqlExtraParams map[string]string, ) *mysql.Config { diff --git a/backend/src/apiserver/client_manager/client_manager.go b/backend/src/apiserver/client_manager/client_manager.go index 537cddba4c1..7c785e946cc 100644 --- a/backend/src/apiserver/client_manager/client_manager.go +++ b/backend/src/apiserver/client_manager/client_manager.go @@ -18,40 +18,33 @@ import ( "context" "database/sql" "fmt" - "os" "strings" "sync" "time" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - "github.com/cenkalti/backoff" - "github.com/go-sql-driver/mysql" + mysqlStd "github.com/go-sql-driver/mysql" "github.com/golang/glog" - "github.com/jinzhu/gorm" - _ "github.com/jinzhu/gorm/dialects/sqlite" "github.com/kubeflow/pipelines/backend/src/apiserver/archive" "github.com/kubeflow/pipelines/backend/src/apiserver/auth" "github.com/kubeflow/pipelines/backend/src/apiserver/client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/storage" + "github.com/kubeflow/pipelines/backend/src/apiserver/validation" "github.com/kubeflow/pipelines/backend/src/common/util" k8sapi "github.com/kubeflow/pipelines/backend/src/crd/kubernetes/v2beta1" + "github.com/kubeflow/pipelines/backend/src/v2/metadata" "github.com/minio/minio-go/v7" + "gorm.io/driver/mysql" + "gorm.io/driver/postgres" + "gorm.io/gorm" "k8s.io/apimachinery/pkg/runtime" "sigs.k8s.io/controller-runtime/pkg/cache" ctrlclient "sigs.k8s.io/controller-runtime/pkg/client" ) const ( - minioServiceHost = "MINIO_SERVICE_SERVICE_HOST" - minioServicePort = "MINIO_SERVICE_SERVICE_PORT" - minioServiceRegion = "MINIO_SERVICE_REGION" - minioServiceSecure = "MINIO_SERVICE_SECURE" - pipelineBucketName = "MINIO_PIPELINE_BUCKET_NAME" - pipelinePath = "MINIO_PIPELINE_PATH" - mysqlServiceHost = "DBConfig.MySQLConfig.Host" mysqlServicePort = "DBConfig.MySQLConfig.Port" mysqlUser = "DBConfig.MySQLConfig.User" @@ -291,9 +284,9 @@ func (c *ClientManager) init(options *Options) error { c.resourceReferenceStore = storage.NewResourceReferenceStore(db, pipelineStoreForRef) c.dBStatusStore = storage.NewDBStatusStore(db) c.defaultExperimentStore = storage.NewDefaultExperimentStore(db) - glog.Info("Initializing Minio client...") + glog.Info("Initializing Object store client...") c.objectStore = initMinioClient(options.Context, common.GetDurationConfig(initConnectionTimeout)) - glog.Info("Minio client initialized successfully") + glog.Info("Object store client initialized successfully") // Use default value of client QPS (5) & burst (10) defined in // k8s.io/client-go/rest/config.go#RESTClientFor clientParams := util.ClientParameters{ @@ -334,31 +327,6 @@ func (c *ClientManager) Close() { c.db.Close() } -// addDisplayNameColumn adds a DisplayName column to the given table with a default value of Name. -// It panics if this fails. -func addDisplayNameColumn(db *gorm.DB, scope *gorm.Scope, quotedTableName string, driverName string) []error { - glog.Info("Adding DisplayName column to " + quotedTableName) - - switch driverName { - case "mysql": - scope.Raw( - "ALTER TABLE " + quotedTableName + " ADD COLUMN DisplayName VARCHAR(255) NULL;", - ).Exec() - scope.Raw("UPDATE " + quotedTableName + " SET DisplayName = Name").Exec() - scope.Raw("ALTER TABLE " + quotedTableName + " MODIFY COLUMN DisplayName VARCHAR(255) NOT NULL").Exec() - case "pgx": - scope.Raw( - "ALTER TABLE " + quotedTableName + " ADD COLUMN DisplayName VARCHAR(255);", - ).Exec() - scope.Raw("UPDATE " + quotedTableName + " SET DisplayName = Name").Exec() - scope.Raw("ALTER TABLE " + quotedTableName + " ALTER COLUMN DisplayName SET NOT NULL").Exec() - } - - scope.CommitOrRollback() - - return db.GetErrors() -} - func InitDBClient(initConnectionTimeout time.Duration) *storage.DB { // Allowed driverName values: // 1) To use MySQL, use `mysql` @@ -366,186 +334,46 @@ func InitDBClient(initConnectionTimeout time.Duration) *storage.DB { driverName := common.GetStringConfig("DBDriverName") arg := initDBDriver(driverName, initConnectionTimeout) - // db is safe for concurrent use by multiple goroutines - // and maintains its own pool of idle connections. - db, err := gorm.Open(driverName, arg) - util.TerminateIfError(err) - - // If pipeline_versions table is introduced into DB for the first time, - // it needs initialization or data backfill. - var tableNames []string - initializePipelineVersions := true - db.Raw(`show tables`).Pluck("Tables_in_mlpipeline", &tableNames) - for _, tableName := range tableNames { - if tableName == "pipeline_versions" { - initializePipelineVersions = false - break - } - } - - if db.HasTable(&model.Pipeline{}) { - scope := db.NewScope(&model.Pipeline{}) - if !scope.Dialect().HasColumn(scope.TableName(), "DisplayName") { - errs := addDisplayNameColumn(db, scope, scope.QuotedTableName(), driverName) - if len(errs) > 0 { - glog.Fatalf("Failed to add DisplayName column to the %s table. Error(s): %v", scope.TableName(), errs) - } - } - } - - if db.HasTable(&model.PipelineVersion{}) { - scope := db.NewScope(&model.PipelineVersion{}) - if !scope.Dialect().HasColumn(scope.TableName(), "DisplayName") { - errs := addDisplayNameColumn(db, scope, scope.QuotedTableName(), driverName) - if len(errs) > 0 { - glog.Fatalf("Failed to add DisplayName column to the %s table. Error(s): %v", scope.TableName(), errs) - } - } - } - - // Create table - response := db.AutoMigrate( - &model.DBStatus{}, - &model.DefaultExperiment{}, - &model.Experiment{}, - &model.Pipeline{}, - &model.PipelineVersion{}, - &model.Job{}, - &model.Run{}, - &model.RunMetric{}, - &model.Task{}, - &model.ResourceReference{}, - ) - - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to initialize the databases. Error: %s", response.Error) - } - - var textFormat string + var dialector gorm.Dialector switch driverName { case "mysql": - textFormat = client.MYSQL_TEXT_FORMAT + // DefaultStringSize dictates non-indexable string fields map to VARCHAR(255) for backward compatibility with GORM v1. + dialector = mysql.New(mysql.Config{ + DSN: arg, + DefaultStringSize: 255, + }) case "pgx": - textFormat = client.PGX_TEXT_FORMAT + dialector = postgres.Open(arg) default: - glog.Fatalf("Unsupported database driver %s, please use `mysql` for MySQL, or `pgx` for PostgreSQL.", driverName) - } - - response = db.Model(&model.Experiment{}).RemoveIndex("Name") - if response.Error != nil { - glog.Fatalf("Failed to drop unique key on experiment name. Error: %s", response.Error) - } - - response = db.Model(&model.Pipeline{}).RemoveIndex("Name") - if response.Error != nil { - glog.Fatalf("Failed to drop unique key on pipeline name. Error: %s", response.Error) + glog.Fatalf("Unsupported driver %v", driverName) } - response = db.Model(&model.ResourceReference{}).ModifyColumn("Payload", textFormat) - if response.Error != nil { - glog.Fatalf("Failed to update the resource reference payload type. Error: %s", response.Error) - } - - response = db.Model(&model.Run{}).AddIndex("experimentuuid_createatinsec", "ExperimentUUID", "CreatedAtInSec") - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create index experimentuuid_createatinsec on run_details. Error: %s", response.Error) - } - - response = db.Model(&model.Run{}).AddIndex("experimentuuid_conditions_finishedatinsec", "ExperimentUUID", "Conditions", "FinishedAtInSec") - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create index experimentuuid_conditions_finishedatinsec on run_details. Error: %s", response.Error) - } - - response = db.Model(&model.Run{}).AddIndex("namespace_createatinsec", "Namespace", "CreatedAtInSec") - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create index namespace_createatinsec on run_details. Error: %s", response.Error) - } - - response = db.Model(&model.Run{}).AddIndex("namespace_conditions_finishedatinsec", "Namespace", "Conditions", "FinishedAtInSec") - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create index namespace_conditions_finishedatinsec on run_details. Error: %s", response.Error) - } - - response = db.Model(&model.Pipeline{}).AddUniqueIndex("name_namespace_index", "Name", "Namespace") - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create index name_namespace_index on run_details. Error: %s", response.Error) - } - - switch driverName { - case "pgx": - response = db.Model(&model.RunMetric{}). - AddForeignKey("\"RunUUID\"", "run_details(\"UUID\")", "CASCADE" /* onDelete */, "CASCADE" /* onUpdate */) - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create a foreign key for RunUUID in run_metrics table. Error: %s", response.Error) - } - response = db.Model(&model.PipelineVersion{}). - AddForeignKey("\"PipelineId\"", "pipelines(\"UUID\")", "CASCADE" /* onDelete */, "CASCADE" /* onUpdate */) - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create a foreign key for PipelineId in pipeline_versions table. Error: %s", response.Error) - } - response = db.Model(&model.Task{}). - AddForeignKey("\"RunUUID\"", "run_details(\"UUID\")", "CASCADE" /* onDelete */, "CASCADE" /* onUpdate */) - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create a foreign key for RunUUID in task table. Error: %s", response.Error) - } - case "mysql": - response = db.Model(&model.RunMetric{}). - AddForeignKey("RunUUID", "run_details(UUID)", "CASCADE" /* onDelete */, "CASCADE" /* onUpdate */) - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create a foreign key for RunUUID in run_metrics table. Error: %s", response.Error) - } - response = db.Model(&model.PipelineVersion{}). - AddForeignKey("PipelineId", "pipelines(UUID)", "CASCADE" /* onDelete */, "CASCADE" /* onUpdate */) - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create a foreign key for PipelineId in pipeline_versions table. Error: %s", response.Error) - } - response = db.Model(&model.Task{}). - AddForeignKey("RunUUID", "run_details(UUID)", "CASCADE" /* onDelete */, "CASCADE" /* onUpdate */) - if ignoreAlreadyExistError(driverName, response.Error) != nil { - glog.Fatalf("Failed to create a foreign key for RunUUID in task table. Error: %s", response.Error) - } + // db is safe for concurrent use by multiple goroutines + // and maintains its own pool of idle connections. + db, err := gorm.Open(dialector, &gorm.Config{}) + util.TerminateIfError(err) - // This is a workaround because AutoMigration does not detect that the column went from not null to nullable. - response = db.Model(&model.Job{}).ModifyColumn("WorkflowSpecManifest", client.MYSQL_TEXT_FORMAT_NULL) - if response.Error != nil { - glog.Fatalf("Failed to make the WorkflowSpecManifest column nullable on jobs. Error: %s", response.Error) - } - default: - glog.Fatalf("Driver %v is not supported, use \"mysql\" for MySQL, or \"pgx\" for PostgreSQL", driverName) - } + dialect := GetDialect(driverName) - // Data backfill for pipeline_versions if this is the first time for - // pipeline_versions to enter mlpipeline DB. - if initializePipelineVersions { - initPipelineVersionsFromPipelines(db) - } - err = backfillExperimentIDToRunTable(db) + legacy, err := isLegacySchema(db) if err != nil { - glog.Fatalf("Failed to backfill experiment UUID in run_details table: %s", err) + glog.Fatalf("failed to detect schema version: %v", err) } - - response = db.Model(&model.Pipeline{}).ModifyColumn("Description", textFormat) - if response.Error != nil { - glog.Fatalf("Failed to update pipeline description type. Error: %s", response.Error) + if legacy { + // Legacy schema (pre-2.15): run the one-time legacy upgrade to shrink columns, + // clean up legacy indexes/constraints, and perform backfills. + util.TerminateIfError(runLegacyUpgradeFlow(db, dialect)) + } else { + // Non-legacy schema (>=2.15): run autoMigrate for both first-time installs and + // upgrades between >=2.15 versions. + util.TerminateIfError(autoMigrate(db)) } - // Because PostgreSQL was supported later, there's no need to delete the relic index - if driverName == "mysql" { - // If the old unique index idx_pipeline_version_uuid_name on pipeline_versions exists, remove it. - rows, err := db.Raw(`show index from pipeline_versions where Key_name='idx_pipeline_version_uuid_name'`).Rows() - if err != nil { - glog.Fatalf("Failed to query pipeline_version table's indices. Error: %s", err) - } - if err := rows.Err(); err != nil { - glog.Fatalf("Failed to query pipeline_version table's indices. Error: %s", err) - } - if rows.Next() { - db.Exec(`drop index idx_pipeline_version_uuid_name on pipeline_versions`) - } - defer rows.Close() + newdb, err := db.DB() + if err != nil { + glog.Fatalf("Failed to retrieve *sql.DB from gorm.DB. Error: %v", err) } - - return storage.NewDB(db.DB(), storage.NewMySQLDialect()) + return storage.NewDB(newdb, storage.NewMySQLDialect()) } // Initializes Database driver. Use `driverName` to indicate which type of DB to use: @@ -553,7 +381,7 @@ func InitDBClient(initConnectionTimeout time.Duration) *storage.DB { // 2) "pgx" for PostgreSQL func initDBDriver(driverName string, initConnectionTimeout time.Duration) string { var sqlConfig, dbName string - var mysqlConfig *mysql.Config + var mysqlConfig *mysqlStd.Config switch driverName { case "mysql": mysqlConfig = client.CreateMySQLConfig( @@ -599,9 +427,10 @@ func initDBDriver(driverName string, initConnectionTimeout time.Duration) string util.TerminateIfError(err) // Create database if not exist + dialect := GetDialect(driverName) operation = func() error { _, err = db.Exec(fmt.Sprintf("CREATE DATABASE %s", dbName)) - if ignoreAlreadyExistError(driverName, err) != nil { + if ignoreAlreadyExistError(dialect, err) != nil { return err } return nil @@ -637,20 +466,492 @@ func initDBDriver(driverName string, initConnectionTimeout time.Duration) string return sqlConfig } +func isLegacySchema(db *gorm.DB) (bool, error) { + if !db.Migrator().HasTable(&model.Pipeline{}) { + glog.Infof("Pipelines table not found. Assuming fresh install.") + return false, nil + } + length, ok, err := getColumnLength(db, &model.Pipeline{}, "UUID") + if err != nil { + return false, fmt.Errorf("detect schema version: %w", err) + } + return !ok || length > 64, nil +} + +func runLegacyUpgradeFlow(db *gorm.DB, dialect SQLDialect) error { + glog.Infof("Detected legacy schema. Running upgrade flow.") + // Step 1: decide whether to backfill pipeline_versions + // If pipeline_versions table is introduced into DB for the first time, + // it needs initialization or data backfill. + var tableNames []string + initializePipelineVersions := true + db.Raw(`show tables`).Pluck("Tables_in_mlpipeline", &tableNames) + for _, tableName := range tableNames { + if tableName == "pipeline_versions" { + initializePipelineVersions = false + break + } + } + // Step 2: block upgrade if legacy data too long + if err := runPreflightLengthChecks(db, dialect, validation.LengthSpecs); err != nil { + return fmt.Errorf("preflight length check failed: %w", err) + } + + // Step 3: drop all foreign key constraints which can block shrinking columns + if err := dropAllFKConstraints(db, dialect.Name); err != nil { + return fmt.Errorf("drop foreign key constraints failed: %w", err) + } + + if err := dropLegacyIndexes(db, dialect); err != nil { + return fmt.Errorf("drop legacy indexes failed: %w", err) + } + + // Step 4: shrink fields to meet new length constraints + // NOTE: In GORM v2, AutoMigrate performs full reconciliation for most fields, + // including type, size, and nullability. However, it will silently skip + // primary key columns due to database constraints. + // + // Therefore, shrinkColumns() is retained to ensure primary key fields like UUID + // are explicitly resized. While redundant for non-primary fields, shrinkColumns() + // shares a common metadata source (LengthSpecs) with API-layer validation, + // which helps avoid drift between schema and runtime logic. + + if err := shrinkColumns(db, validation.LengthSpecs); err != nil { + return fmt.Errorf("shrink columns failed: %w", err) + } + + // Step 5-1: Data backfill in pipelines table for DisplayName column + // Backfilling DisplayName column must be done before automigrate + // to comply with the NOT NULL constraint + if db.Migrator().HasTable(&model.Pipeline{}) { + if !db.Migrator().HasColumn(&model.Pipeline{}, "DisplayName") { + if err := addDisplayNameColumn(db, &model.Pipeline{}, dialect); err != nil { + return fmt.Errorf("failed to add DisplayName column to the %s table: %w", model.Pipeline{}.TableName(), err) + } + } + } + // Step5-2: Data backfill in pipeline_versions table for DisplayName column + if db.Migrator().HasTable(&model.PipelineVersion{}) { + if !db.Migrator().HasColumn(&model.PipelineVersion{}, "DisplayName") { + if err := addDisplayNameColumn(db, &model.PipelineVersion{}, dialect); err != nil { + return fmt.Errorf("failed to add DisplayName column to the %s table: %w", model.PipelineVersion{}.TableName(), err) + } + } + } + + // Step 6: Run autoMigrate to add any missing FKs/constraints/columns/indexes and + // to reconcile field definitions (size/precision/nullability) where permitted by + // the DB. Primary-key length shrink is already handled in Step 4 (shrinkColumns), + // because AutoMigrate intentionally avoids altering primary keys. + if err := autoMigrate(db); err != nil { + return err + } + + // Step 7: Data backfill for pipeline_versions if this is the first time for + // pipeline_versions to enter mlpipeline DB. + if initializePipelineVersions { + initPipelineVersionsFromPipelines(db) + } + err := backfillExperimentIDToRunTable(db) + if err != nil { + return fmt.Errorf("failed to backfill experiment UUID in run_details table: %s", err) + } + + return nil +} + +// autoMigrate runs the standard reconciliation for non-legacy schemas: +// it is used for first-time installs on >=2.15 and for upgrades between +// >=2.15 versions. Legacy upgrades call this after cleanup/shrinks. +func autoMigrate(db *gorm.DB) error { + glog.Infof("Running AutoMigrate.") + + if err := db.AutoMigrate( + &model.DBStatus{}, + &model.DefaultExperiment{}, + &model.Experiment{}, + &model.Pipeline{}, + &model.PipelineVersion{}, + &model.Job{}, + &model.Run{}, + &model.RunMetric{}, + &model.Task{}, + &model.ResourceReference{}, + ); err != nil { + return fmt.Errorf("AutoMigrate failed: %w", err) + } + + return nil +} + +// getColumnLength returns the declared length for a column using GORM ColumnTypes. +// If the dialect/type doesn't report a length (e.g., TEXT), ok=false. +func getColumnLength(db *gorm.DB, mdl interface{}, column string) (length int64, ok bool, err error) { + colTypes, err := db.Migrator().ColumnTypes(mdl) + if err != nil { + return 0, false, err + } + for _, ct := range colTypes { + if strings.EqualFold(ct.Name(), column) { + l, okLen := ct.Length() + return l, okLen, nil + } + } + return 0, false, nil +} + +// runPreflightLengthChecks scans existing data and aborts upgrade if any row exceeds the new Max length. +// It must be called BEFORE AutoMigrate/DDL that shrinks column definitions. +func runPreflightLengthChecks(db *gorm.DB, dialect SQLDialect, specs []validation.ColLenSpec) error { + quote := dialect.QuoteIdentifier + + for _, s := range specs { + if !db.Migrator().HasTable(s.Model) { + continue + } + tableName, dbCol, err := FieldMeta(db, s.Model, s.Field) + if err != nil { + return fmt.Errorf("failed to resolve meta for %T.%s: %w", s.Model, s.Field, err) + } + + var cnt int64 + lengthFn := dialect.LengthFunc + where := fmt.Sprintf("%s(%s) > ?", lengthFn, quote(dbCol)) + if err := db.Table(tableName).Where(where, s.Max).Count(&cnt).Error; err != nil { + return fmt.Errorf("preflight length check failed for %s.%s (count): %w", tableName, dbCol, err) + } + if cnt == 0 { + continue + } + + type rowSample struct { + Val string + } + var samples []rowSample + if err := db.Table(tableName). + Select(dbCol+" as Val"). + Where(where, s.Max). + Limit(5). + Scan(&samples).Error; err != nil { + return fmt.Errorf("preflight length check failed for %s.%s (sample): %w", tableName, dbCol, err) + } + + var preview []string + for _, sm := range samples { + if len(sm.Val) > 50 { + preview = append(preview, sm.Val[:50]+"…") + } else { + preview = append(preview, sm.Val) + } + } + + return fmt.Errorf(`[Preflight] %s.%s has %d rows with length > %d. + Reason: This column must stay indexable (e.g. MySQL utf8mb4 index key ≤ 767 bytes).Thus, KFP enforces a max of %d chars. + Action: Shorten these values before upgrading. + Find offenders with: + SELECT UUID, CHAR_LENGTH(%[2]s) AS L FROM %[1]s WHERE CHAR_LENGTH(%[2]s) > %[3]d; + Examples: %v`, + tableName, dbCol, cnt, s.Max, s.Max, preview) + } + return nil +} + +// FieldMeta returns the table name and DB column name for the given model+field. +func FieldMeta(db *gorm.DB, mdl interface{}, field string) (table string, dbCol string, err error) { + stmt := &gorm.Statement{DB: db} + if err = stmt.Parse(mdl); err != nil { + return "", "", err + } + f, ok := stmt.Schema.FieldsByName[field] + if !ok { + return stmt.Table, "", fmt.Errorf("field %s not found in %T", field, mdl) + } + return stmt.Table, f.DBName, nil +} + +// dropAllFKConstraints drops all foreign key constraints from all tables for the given driver. +// This is the minimal set of DDL needed to safely shrink indexed columns in legacy upgrades. +func dropAllFKConstraints(db *gorm.DB, driverName string) error { + switch driverName { + case "mysql": + return dropAllMySQLFKConstraints(db) + case "pgx": + // PostgreSQL legacy path not implemented; no-op for now. + return nil + default: + return fmt.Errorf("DropAllFKConstraints not supported for driver: %s", driverName) + } +} + +// dropAllMySQLFKConstraints drops all foreign key constraints from all tables in the current MySQL database. +func dropAllMySQLFKConstraints(db *gorm.DB) error { + tables := []string{} + if err := db.Raw("SHOW TABLES").Scan(&tables).Error; err != nil { + return fmt.Errorf("failed to list tables: %w", err) + } + + for _, table := range tables { + var foreignKeys []struct { + ConstraintName string `gorm:"column:CONSTRAINT_NAME"` + } + err := db.Raw(fmt.Sprintf(` + SELECT CONSTRAINT_NAME + FROM information_schema.TABLE_CONSTRAINTS + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME = '%s' + AND CONSTRAINT_TYPE = 'FOREIGN KEY'`, table)). + Scan(&foreignKeys).Error + if err != nil { + glog.Warningf("failed to list foreign keys for table %s: %v", table, err) + continue + } + + for _, fk := range foreignKeys { + glog.Infof("Dropping foreign key %s on table %s", fk.ConstraintName, table) + if err := db.Exec(fmt.Sprintf( + "ALTER TABLE `%s` DROP FOREIGN KEY `%s`", table, fk.ConstraintName, + )).Error; err != nil { + return fmt.Errorf("failed to drop foreign key %s on table %s: %w", fk.ConstraintName, table, err) + } + } + } + return nil +} + +// dropLegacyIndexes removes a small, explicit set of legacy indexes that +// conflict/duplicate with GORM tag definitions. MySQL only; PostgreSQL is no-op. +func dropLegacyIndexes(db *gorm.DB, dialect SQLDialect) error { + switch dialect.Name { + case "mysql": + return dropLegacyIndexesMySQL(db) + case "pgx": + // No legacy cleanup needed for PostgreSQL per upstream note. + return nil + default: + return fmt.Errorf("dropLegacyIndexes: unsupported dialect %q", dialect.Name) + } +} + +// dropLegacyIndexesMySQL removes a small, explicit set of legacy indexes on MySQL: +// See https://github.com/kubeflow/pipelines/pull/12013 +// The function is idempotent: it queries information_schema/SHOW INDEX first and only drops/renames when needed. +func dropLegacyIndexesMySQL(db *gorm.DB) error { + ixLogPrefix := "[legacy-index-cleanup] " + + type idxRow struct { + IndexName string `gorm:"column:INDEX_NAME"` + } + + // --- Rules 1 & 2: drop single-column unique index on Name for experiments/pipelines --- + for _, table := range []string{"experiments", "pipelines"} { + var rows []idxRow + // Find indexes that are UNIQUE, non-PRIMARY, and exactly one column which is Name. + q := fmt.Sprintf(` + SELECT INDEX_NAME + FROM information_schema.STATISTICS + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME = '%s' + GROUP BY INDEX_NAME + HAVING + COUNT(*) = 1 + AND SUM(CASE WHEN COLUMN_NAME='Name' THEN 1 ELSE 0 END) = 1 + AND MAX(NON_UNIQUE) = 0 + AND MAX(INDEX_NAME <> 'PRIMARY') = 1; + `, table) + + if err := db.Raw(q).Scan(&rows).Error; err != nil { + return fmt.Errorf("query single-column unique Name indexes on %s failed: %w", table, err) + } + for _, r := range rows { + glog.Infof("%s dropping single-column unique index %q on %s(Name)", ixLogPrefix, r.IndexName, table) + stmt := fmt.Sprintf("DROP INDEX `%s` ON `%s`", r.IndexName, table) + if err := db.Exec(stmt).Error; err != nil { + return fmt.Errorf("drop index %s on %s failed: %w", r.IndexName, table, err) + } + } + } + + // --- Rule 3: drop legacy composite unique index on pipeline_versions if present --- + { + if db.Migrator().HasTable(&model.PipelineVersion{}) { + rows, err := db.Raw(`SHOW INDEX FROM pipeline_versions WHERE Key_name='idx_pipeline_version_uuid_name'`).Rows() + if err != nil { + return fmt.Errorf("query pipeline_versions indices failed: %w", err) + } + has := false + if err := rows.Err(); err != nil { + _ = rows.Close() + return fmt.Errorf("iterate pipeline_versions indices failed: %w", err) + } + if rows.Next() { + has = true + } + _ = rows.Close() + if has { + glog.Infof("%s dropping legacy composite unique index idx_pipeline_version_uuid_name on pipeline_versions", ixLogPrefix) + if err := db.Exec("DROP INDEX `idx_pipeline_version_uuid_name` ON `pipeline_versions`").Error; err != nil { + return fmt.Errorf("drop idx_pipeline_version_uuid_name on pipeline_versions failed: %w", err) + } + } + } else { + glog.Infof("%s skip pipeline_versions cleanup: table not found (fresh-introduced case)", ixLogPrefix) + } + } + + // --- Rule 4: normalize pipelines(Name,Namespace) unique index to keep only `namespace_name` --- + { + var rows []idxRow + q := ` + SELECT INDEX_NAME + FROM information_schema.STATISTICS + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME = 'pipelines' + GROUP BY INDEX_NAME + HAVING + COUNT(*) = 2 + AND SUM(CASE WHEN SEQ_IN_INDEX=1 AND COLUMN_NAME='Name' THEN 1 ELSE 0 END) = 1 + AND SUM(CASE WHEN SEQ_IN_INDEX=2 AND COLUMN_NAME='Namespace' THEN 1 ELSE 0 END) = 1 + AND MAX(NON_UNIQUE) = 0; + ` + if err := db.Raw(q).Scan(&rows).Error; err != nil { + return fmt.Errorf("query composite unique (Name,Namespace) indexes on pipelines failed: %w", err) + } + // Set of existing names for this composite index + has := map[string]bool{} + for _, r := range rows { + has[r.IndexName] = true + } + + // Case A: both exist -> drop legacy name_namespace_index, keep namespace_name + if has["namespace_name"] && has["name_namespace_index"] { + glog.Infof("%s dropping duplicate unique index name_namespace_index on pipelines(Name,Namespace), keeping namespace_name", ixLogPrefix) + if err := db.Exec("DROP INDEX `name_namespace_index` ON `pipelines`").Error; err != nil { + return fmt.Errorf("drop name_namespace_index on pipelines failed: %w", err) + } + } else if !has["namespace_name"] && has["name_namespace_index"] { + // Case B: only legacy exists -> try rename to avoid rebuild; fallback to drop. + glog.Infof("%s renaming name_namespace_index -> namespace_name on pipelines(Name,Namespace)", ixLogPrefix) + if err := db.Exec("ALTER TABLE `pipelines` RENAME INDEX `name_namespace_index` TO `namespace_name`").Error; err != nil { + glog.Warningf("%s rename failed (%v); dropping name_namespace_index; AutoMigrate will recreate namespace_name", ixLogPrefix, err) + if err2 := db.Exec("DROP INDEX `name_namespace_index` ON `pipelines`").Error; err2 != nil { + return fmt.Errorf("drop name_namespace_index on pipelines failed after rename attempt: %w", err2) + } + } + } + // Case C/D: only namespace_name exists or none exist -> no-op + } + + return nil +} + +func shrinkColumns(db *gorm.DB, specs []validation.ColLenSpec) error { + for _, s := range specs { + if !db.Migrator().HasTable(s.Model) { + continue + } + if err := ensureColumnLength(db, s); err != nil { + return err + } + } + return nil +} + +func ensureColumnLength(db *gorm.DB, spec validation.ColLenSpec) error { + + tableName, dbCol, err := FieldMeta(db, spec.Model, spec.Field) + if err != nil { + return fmt.Errorf("failed to resolve meta for %T.%s: %w", spec.Model, spec.Field, err) + } + + // Current length + curLen, haveLen, err := getColumnLength(db, spec.Model, dbCol) + if err != nil { + return fmt.Errorf("columnTypes read failed for %s.%s: %w", tableName, dbCol, err) + } + if haveLen && curLen <= int64(spec.Max) { + return nil + } + + // Alter via GORM + if err := db.Migrator().AlterColumn(spec.Model, spec.Field); err != nil { + return fmt.Errorf("AlterColumn failed for %s.%s (field=%s): %w", tableName, dbCol, spec.Field, err) + } + + // Verify after alter + newLen, haveLen2, err := getColumnLength(db, spec.Model, dbCol) + if err != nil { + return fmt.Errorf("post-AlterColumn columnTypes read failed for %s.%s: %w", tableName, dbCol, err) + } + if haveLen2 && newLen > int64(spec.Max) { + return fmt.Errorf("after AlterColumn, %s.%s length=%d (> %d)", tableName, dbCol, newLen, spec.Max) + } + return nil +} + +// addDisplayNameColumn ensures the DisplayName column exists on the given model's table, +// backfills it from Name where missing, and then enforces NOT NULL. +// It is safe to call multiple times (idempotent). +func addDisplayNameColumn(db *gorm.DB, mdl interface{}, dialect SQLDialect) error { + + table, dbCol, err := FieldMeta(db, mdl, "DisplayName") + if err != nil { + return fmt.Errorf("resolve meta for %T.DisplayName failed: %w", mdl, err) + } + // Only allow these tables to have DisplayName added to prevent accidental schema changes. + allowed := map[string]bool{ + model.Pipeline{}.TableName(): true, + model.PipelineVersion{}.TableName(): true, + } + if !allowed[table] { + return fmt.Errorf("table %q is not allowed for DisplayName migration", table) + } + + if db.Migrator().HasColumn(mdl, dbCol) { + return nil + } + + q := dialect.QuoteIdentifier + quotedTable := q(table) + glog.Info("Adding DisplayName column to " + quotedTable) + + return db.Transaction(func(tx *gorm.DB) error { + var stmts []string + switch dialect.Name { + case "mysql": + stmts = []string{ + "ALTER TABLE " + quotedTable + " ADD COLUMN " + q(dbCol) + " VARCHAR(255) NULL;", + "UPDATE " + quotedTable + " SET " + q(dbCol) + " = " + q("Name") + " WHERE " + q(dbCol) + " IS NULL;", + "ALTER TABLE " + quotedTable + " MODIFY COLUMN " + q(dbCol) + " VARCHAR(255) NOT NULL;", + } + case "pgx": + stmts = []string{ + "ALTER TABLE " + quotedTable + " ADD COLUMN " + q(dbCol) + " VARCHAR(255);", + "UPDATE " + quotedTable + " SET " + q(dbCol) + " = " + q("Name") + " WHERE " + q(dbCol) + " IS NULL;", + "ALTER TABLE " + quotedTable + " ALTER COLUMN " + q(dbCol) + " SET NOT NULL;", + } + default: + return fmt.Errorf("unsupported driver: %s", dialect.Name) + } + + for _, s := range stmts { + if err := tx.Exec(s).Error; err != nil { + return fmt.Errorf("exec failed for %q: %w", s, err) + } + } + return nil + }) +} + func initMinioClient(ctx context.Context, initConnectionTimeout time.Duration) storage.ObjectStoreInterface { // Create minio client. - minioServiceHost := common.GetStringConfigWithDefault( - "ObjectStoreConfig.Host", os.Getenv(minioServiceHost)) - minioServicePort := common.GetStringConfigWithDefault( - "ObjectStoreConfig.Port", os.Getenv(minioServicePort)) - minioServiceRegion := common.GetStringConfigWithDefault( - "ObjectStoreConfig.Region", os.Getenv(minioServiceRegion)) - minioServiceSecure := common.GetBoolConfigWithDefault( - "ObjectStoreConfig.Secure", common.GetBoolFromStringWithDefault(os.Getenv(minioServiceSecure), false)) + minioServiceHost := common.GetStringConfigWithDefault("ObjectStoreConfig.Host", "") + minioServicePort := common.GetStringConfigWithDefault("ObjectStoreConfig.Port", "") + minioServiceRegion := common.GetStringConfigWithDefault("ObjectStoreConfig.Region", "") + minioServiceSecure := common.GetBoolConfigWithDefault("ObjectStoreConfig.Secure", false) accessKey := common.GetStringConfigWithDefault("ObjectStoreConfig.AccessKey", "") secretKey := common.GetStringConfigWithDefault("ObjectStoreConfig.SecretAccessKey", "") - bucketName := common.GetStringConfigWithDefault("ObjectStoreConfig.BucketName", os.Getenv(pipelineBucketName)) - pipelinePath := common.GetStringConfigWithDefault("ObjectStoreConfig.PipelinePath", os.Getenv(pipelinePath)) + bucketName := common.GetStringConfigWithDefault("ObjectStoreConfig.BucketName", "") + pipelinePath := common.GetStringConfigWithDefault("ObjectStoreConfig.PipelinePath", "") disableMultipart := common.GetBoolConfigWithDefault("ObjectStoreConfig.Multipart.Disable", true) minioClient := client.CreateMinioClientOrFatal(minioServiceHost, minioServicePort, accessKey, @@ -721,9 +1022,22 @@ func initPipelineVersionsFromPipelines(db *gorm.DB) { // On the other hand, pipeline and its pipeline versions created after // pipeline version API is introduced will have different Ids; and the minio // file will be put directly into the directories for pipeline versions. - tx.Exec(`INSERT INTO - pipeline_versions (UUID, Name, CreatedAtInSec, Parameters, Status, PipelineId) - SELECT UUID, Name, CreatedAtInSec, Parameters, Status, UUID FROM pipelines;`) + tx.Exec(` + INSERT INTO pipeline_versions + (UUID, Name, DisplayName, CreatedAtInSec, Parameters, Status, PipelineId, PipelineSpec, PipelineSpecURI) + SELECT + p.UUID, + p.Name, + p.Name AS DisplayName, + p.CreatedAtInSec, + COALESCE(p.Parameters, '{}') AS Parameters, + COALESCE(p.Status, 'READY') AS Status, + p.UUID AS PipelineId, + '' AS PipelineSpec, + '' AS PipelineSpecURI + FROM pipelines p + LEFT JOIN pipeline_versions v ON v.UUID = p.UUID + WHERE v.UUID IS NULL;`) // Step 2: modifiy pipelines table after pipeline_versions are populated. tx.Exec("update pipelines set DefaultVersionId=UUID;") @@ -733,7 +1047,11 @@ func initPipelineVersionsFromPipelines(db *gorm.DB) { func backfillExperimentIDToRunTable(db *gorm.DB) error { // check if there is any row in the run table has experiment ID being empty - rows, err := db.CommonDB().Query("SELECT \"ExperimentUUID\" FROM run_details WHERE \"ExperimentUUID\" = '' LIMIT 1") + sqlDB, err := db.DB() + if err != nil { + return err + } + rows, err := sqlDB.Query("SELECT \"ExperimentUUID\" FROM run_details WHERE \"ExperimentUUID\" = '' LIMIT 1") if err != nil { return err } @@ -747,7 +1065,7 @@ func backfillExperimentIDToRunTable(db *gorm.DB) error { return nil } - _, err = db.CommonDB().Exec(` + _, err = sqlDB.Exec(` UPDATE run_details, resource_references SET @@ -763,11 +1081,8 @@ func backfillExperimentIDToRunTable(db *gorm.DB) error { // Returns the same error, if it's not "already exists" related. // Otherwise, return nil. -func ignoreAlreadyExistError(driverName string, err error) error { - if driverName == "pgx" && err != nil && strings.Contains(err.Error(), client.PGX_EXIST_ERROR) { - return nil - } - if driverName == "mysql" && err != nil && strings.Contains(err.Error(), client.MYSQL_EXIST_ERROR) { +func ignoreAlreadyExistError(dialect SQLDialect, err error) error { + if err != nil && strings.Contains(err.Error(), dialect.ExistDatabaseErrHint) { return nil } return err diff --git a/backend/src/apiserver/client_manager/client_manager_test.go b/backend/src/apiserver/client_manager/client_manager_test.go new file mode 100644 index 00000000000..b8ad58ba00d --- /dev/null +++ b/backend/src/apiserver/client_manager/client_manager_test.go @@ -0,0 +1,108 @@ +// Copyright 2018-2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package clientmanager + +import ( + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/driver/sqlite" + "gorm.io/gorm" + + "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/apiserver/validation" +) + +// getTestSQLite returns an isolated in-memory sqlite DB for each test. +// We use a unique DSN per test to avoid "table already exists" collisions when tests reuse the same shared cache. +func getTestSQLite(t *testing.T) *gorm.DB { + t.Helper() + // sanitize test name to be a valid sqlite file identifier + name := strings.ReplaceAll(t.Name(), "/", "_") + dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", name) + db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{}) + require.NoError(t, err) + return db +} + +func createOldExperimentSchema(t *testing.T, db *gorm.DB) { + t.Helper() + // Ensure a clean slate if a previous test already created it in the shared cache. + require.NoError(t, db.Exec(`DROP TABLE IF EXISTS experiments`).Error) + + stmt := ` +CREATE TABLE IF NOT EXISTS experiments ( + UUID TEXT NOT NULL, + Name TEXT NOT NULL, + Namespace TEXT NOT NULL, + CreatedAtInSec INTEGER NOT NULL DEFAULT 0, + LastRunCreatedAtInSec INTEGER NOT NULL DEFAULT 0, + StorageState TEXT NOT NULL DEFAULT 'STORAGESTATE_AVAILABLE', + PRIMARY KEY (UUID) +);` + require.NoError(t, db.Exec(stmt).Error) +} + +// insertTooLongExperimentName inserts one row whose Name exceeds 128 chars. +func insertTooLongExperimentName(t *testing.T, db *gorm.DB) { + t.Helper() + longName := strings.Repeat("x", 150) + err := db.Exec(` +INSERT INTO experiments (UUID, Name, Namespace, CreatedAtInSec, LastRunCreatedAtInSec, StorageState) +VALUES (?, ?, 'ns', 0, 0, 'STORAGESTATE_AVAILABLE')`, "uuid-1", longName).Error + require.NoError(t, err) +} + +func TestRunPreflightLengthChecks_FailOnTooLong(t *testing.T) { + db := getTestSQLite(t) + + createOldExperimentSchema(t, db) + insertTooLongExperimentName(t, db) + + specs := []validation.ColLenSpec{ + {Model: &model.Experiment{}, Field: "Name", Max: 128}, + } + + dialect := GetDialect("sqlite") + err := runPreflightLengthChecks(db, dialect, specs) + t.Logf("FULL ERR:\n%+v", err) + require.Error(t, err) + assert.Contains(t, err.Error(), "Preflight") +} + +func TestRunPreflightLengthChecks_PassWhenOK(t *testing.T) { + db := getTestSQLite(t) + + createOldExperimentSchema(t, db) + // no long rows + + dialect := GetDialect("sqlite") + err := runPreflightLengthChecks(db, dialect, []validation.ColLenSpec{ + {Model: &model.Experiment{}, Field: "Name", Max: 128}, + }) + require.NoError(t, err) +} + +func TestFieldMeta_TaskRunId(t *testing.T) { + // FieldMeta only inspects schema; sqlite driver is sufficient. + db := getTestSQLite(t) + table, dbCol, err := FieldMeta(db, &model.Task{}, "RunId") + require.NoError(t, err) + assert.Equal(t, "tasks", table) + assert.Equal(t, "RunUUID", dbCol) +} diff --git a/backend/src/apiserver/client_manager/dialect.go b/backend/src/apiserver/client_manager/dialect.go new file mode 100644 index 00000000000..bcfaa89d55a --- /dev/null +++ b/backend/src/apiserver/client_manager/dialect.go @@ -0,0 +1,61 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// see the License for the specific language governing permissions and +// limitations under the License. + +// Package clientmanager provides tools for managing database clients, initialization flows, +// and dialect-specific helpers such as identifier quoting and SQL function mapping. +package clientmanager + +import ( + sq "github.com/Masterminds/squirrel" +) + +type SQLDialect struct { + Name string + QuoteIdentifier func(string) string + LengthFunc string + StatementBuilder sq.StatementBuilderType + ExistDatabaseErrHint string +} + +func GetDialect(name string) SQLDialect { + switch name { + case "mysql": + return SQLDialect{ + Name: "mysql", + QuoteIdentifier: func(id string) string { return "`" + id + "`" }, + LengthFunc: "CHAR_LENGTH", + StatementBuilder: sq.StatementBuilder.PlaceholderFormat(sq.Question), + ExistDatabaseErrHint: "database exists", + } + case "pgx": + return SQLDialect{ + Name: "pgx", + QuoteIdentifier: func(id string) string { return `"` + id + `"` }, + LengthFunc: "CHAR_LENGTH", + StatementBuilder: sq.StatementBuilder.PlaceholderFormat(sq.Dollar), + ExistDatabaseErrHint: "already exists", + } + case "sqlite": + // Only for test + return SQLDialect{ + Name: "sqlite", + QuoteIdentifier: func(id string) string { return `"` + id + `"` }, + LengthFunc: "LENGTH", + StatementBuilder: sq.StatementBuilder.PlaceholderFormat(sq.Question), + ExistDatabaseErrHint: "", + } + default: + panic("Unsupported dialect: " + name) + } +} diff --git a/backend/src/apiserver/client_manager/dialect_test.go b/backend/src/apiserver/client_manager/dialect_test.go new file mode 100644 index 00000000000..0298446d11b --- /dev/null +++ b/backend/src/apiserver/client_manager/dialect_test.go @@ -0,0 +1,88 @@ +// Copyright 2018-2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package clientmanager + +import ( + "testing" +) + +func TestGetDialect_MySQL(t *testing.T) { + d := GetDialect("mysql") + if d.Name != "mysql" { + t.Errorf("Expected mysql dialect, got %s", d.Name) + } + if d.QuoteIdentifier("abc") != "`abc`" { + t.Errorf("MySQL quote failed") + } + if d.LengthFunc != "CHAR_LENGTH" { + t.Errorf("Expected CHAR_LENGTH, got %s", d.LengthFunc) + } + sql, _, err := d.StatementBuilder.Select("1").ToSql() + if err != nil { + t.Errorf("Failed to build SQL: %v", err) + } + if sql != "SELECT 1" { + t.Errorf("Expected 'SELECT 1', got '%s'", sql) + } + if d.ExistDatabaseErrHint != "database exists" { + t.Errorf("Incorrect error hint: %s", d.ExistDatabaseErrHint) + } +} + +func TestGetDialect_Pgx(t *testing.T) { + d := GetDialect("pgx") + if d.Name != "pgx" { + t.Errorf("Expected pgx dialect, got %s", d.Name) + } + if d.QuoteIdentifier("abc") != `"abc"` { + t.Errorf("Pgx quote failed") + } + if d.LengthFunc != "CHAR_LENGTH" { + t.Errorf("Expected CHAR_LENGTH, got %s", d.LengthFunc) + } + sql, _, err := d.StatementBuilder.Select("1").ToSql() + if err != nil { + t.Errorf("Failed to build SQL: %v", err) + } + if sql != "SELECT 1" { + t.Errorf("Expected 'SELECT 1', got '%s'", sql) + } + if d.ExistDatabaseErrHint != "already exists" { + t.Errorf("Incorrect error hint: %s", d.ExistDatabaseErrHint) + } +} + +func TestGetDialect_SQLite(t *testing.T) { + d := GetDialect("sqlite") + if d.Name != "sqlite" { + t.Errorf("Expected sqlite dialect, got %s", d.Name) + } + if d.QuoteIdentifier("abc") != `"abc"` { + t.Errorf("SQLite quote failed") + } + if d.LengthFunc != "LENGTH" { + t.Errorf("Expected LENGTH, got %s", d.LengthFunc) + } + sql, _, err := d.StatementBuilder.Select("1").ToSql() + if err != nil { + t.Errorf("Failed to build SQL: %v", err) + } + if sql != "SELECT 1" { + t.Errorf("Expected 'SELECT 1', got '%s'", sql) + } + if d.ExistDatabaseErrHint != "" { + t.Errorf("Incorrect error hint: %s", d.ExistDatabaseErrHint) + } +} diff --git a/backend/src/apiserver/common/utils.go b/backend/src/apiserver/common/utils.go index da48fcb5b25..3303e1f5ad0 100644 --- a/backend/src/apiserver/common/utils.go +++ b/backend/src/apiserver/common/utils.go @@ -20,6 +20,9 @@ import ( "regexp" "strings" + "google.golang.org/protobuf/encoding/protojson" + + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" "github.com/kubeflow/pipelines/backend/src/common/util" "go.uber.org/zap/zapcore" ) @@ -143,3 +146,22 @@ func FileExists(filePath string) bool { _, err := os.Stat(filePath) return !os.IsNotExist(err) } + +// CustomMarshaler will create a custom marshaler to use snake_case +// for proto field names, and allow the api server to error on +// invalid fields. +func CustomMarshaler() *runtime.JSONPb { + return &runtime.JSONPb{ + MarshalOptions: protojson.MarshalOptions{ + // This allows us to use proto field names which are + // in snake_case format + UseProtoNames: true, + EmitUnpopulated: false, + }, + UnmarshalOptions: protojson.UnmarshalOptions{ + // We want to allow the api server to error on + // invalid fields + DiscardUnknown: false, + }, + } +} diff --git a/backend/src/apiserver/config/config.go b/backend/src/apiserver/config/config.go index ff40133f012..5bba54e80fb 100644 --- a/backend/src/apiserver/config/config.go +++ b/backend/src/apiserver/config/config.go @@ -141,7 +141,7 @@ func LoadSamples(resourceManager *resource.ResourceManager, sampleConfigPath str p, configErr = resourceManager.CreatePipeline(&model.Pipeline{ Name: cfg.Name, DisplayName: pipelineDisplayName, - Description: cfg.Description, + Description: model.LargeText(cfg.Description), }) if configErr != nil { // Log the error but not fail. The API Server pod can restart and it could potentially cause @@ -191,9 +191,9 @@ func LoadSamples(resourceManager *resource.ResourceManager, sampleConfigPath str &model.PipelineVersion{ Name: pvName, DisplayName: pvDisplayName, - Description: pvDescription, + Description: model.LargeText(pvDescription), PipelineId: p.UUID, - PipelineSpec: string(pipelineFile), + PipelineSpec: model.LargeText(string(pipelineFile)), }, ) if configErr != nil { diff --git a/backend/src/apiserver/config/config.json b/backend/src/apiserver/config/config.json index 3191ef5057d..da63a580092 100644 --- a/backend/src/apiserver/config/config.json +++ b/backend/src/apiserver/config/config.json @@ -23,6 +23,14 @@ "DEFAULTPIPELINERUNNERSERVICEACCOUNT": "pipeline-runner", "CacheEnabled": "true", "CRON_SCHEDULE_TIMEZONE": "UTC", - "CACHE_IMAGE": "registry.k8s.io/busybox", - "CACHE_NODE_RESTRICTIONS": "false" + "CACHE_IMAGE": "ghcr.io/containerd/busybox", + "CACHE_NODE_RESTRICTIONS": "false", + "Workspace": { + "VolumeClaimTemplateSpec": { + "accessModes": [ + "ReadWriteOnce" + ], + "storageClassName": "standard-csi" + } + } } diff --git a/backend/src/apiserver/config/proxy/config.go b/backend/src/apiserver/config/proxy/config.go index ec4bd2dfaf2..f94748e6119 100644 --- a/backend/src/apiserver/config/proxy/config.go +++ b/backend/src/apiserver/config/proxy/config.go @@ -23,7 +23,7 @@ const ( HttpProxyEnv = "HTTP_PROXY" HttpsProxyEnv = "HTTPS_PROXY" NoProxyEnv = "NO_PROXY" - defaultNoProxyValue = "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,metadata-grpc-service,0,1,2,3,4,5,6,7,8,9" + defaultNoProxyValue = "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,metadata-grpc-service,metadata-grpc-service.kubeflow,ml-pipeline.kubeflow" ) type Config interface { diff --git a/backend/src/apiserver/filter/filter.go b/backend/src/apiserver/filter/filter.go index 558147a1d4f..4bce3995e1a 100644 --- a/backend/src/apiserver/filter/filter.go +++ b/backend/src/apiserver/filter/filter.go @@ -23,7 +23,6 @@ import ( "strings" "github.com/Masterminds/squirrel" - "github.com/golang/protobuf/ptypes" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/common/util" @@ -521,11 +520,7 @@ func toValue(v interface{}) (interface{}, error) { case *apiv2beta1.Predicate_StringValue: return v.StringValue, nil case *apiv2beta1.Predicate_TimestampValue: - ts, err := ptypes.Timestamp(v.TimestampValue) - if err != nil { - return nil, util.NewInvalidInputError("invalid timestamp: %v", err) - } - return ts.Unix(), nil + return v.TimestampValue.AsTime().Unix(), nil case *apiv2beta1.Predicate_IntValues_: return v.IntValues.GetValues(), nil case *apiv2beta1.Predicate_StringValues_: @@ -540,11 +535,7 @@ func toValue(v interface{}) (interface{}, error) { case *apiv1beta1.Predicate_StringValue: return v.StringValue, nil case *apiv1beta1.Predicate_TimestampValue: - ts, err := ptypes.Timestamp(v.TimestampValue) - if err != nil { - return nil, util.NewInvalidInputError("invalid timestamp: %v", err) - } - return ts.Unix(), nil + return v.TimestampValue.AsTime().Unix(), nil case *apiv1beta1.Predicate_IntValues: return v.IntValues.GetValues(), nil case *apiv1beta1.Predicate_StringValues: diff --git a/backend/src/apiserver/filter/filter_test.go b/backend/src/apiserver/filter/filter_test.go index a9432b96f55..ab27c004053 100644 --- a/backend/src/apiserver/filter/filter_test.go +++ b/backend/src/apiserver/filter/filter_test.go @@ -18,8 +18,9 @@ import ( "encoding/json" "testing" + "google.golang.org/protobuf/encoding/prototext" + "github.com/Masterminds/squirrel" - "github.com/golang/protobuf/proto" "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" @@ -91,7 +92,7 @@ func TestValidNewFiltersV1(t *testing.T) { for _, test := range tests { filterProto := &apiv1beta1.Filter{} - if err := proto.UnmarshalText(test.protoStr, filterProto); err != nil { + if err := prototext.Unmarshal([]byte(test.protoStr), filterProto); err != nil { t.Errorf("Failed to unmarshal Filter text proto\n%q\nError: %v", test.protoStr, err) continue } @@ -164,7 +165,7 @@ func TestValidNewFilters(t *testing.T) { for _, test := range tests { filterProto := &apiv2beta1.Filter{} - if err := proto.UnmarshalText(test.protoStr, filterProto); err != nil { + if err := prototext.Unmarshal([]byte(test.protoStr), filterProto); err != nil { t.Errorf("Failed to unmarshal Filter text proto\n%q\nError: %v", test.protoStr, err) continue } @@ -209,7 +210,7 @@ func TestValidNewFiltersWithKeyMapV1(t *testing.T) { for _, test := range tests { filterProto := &apiv1beta1.Filter{} - if err := proto.UnmarshalText(test.protoStr, filterProto); err != nil { + if err := prototext.Unmarshal([]byte(test.protoStr), filterProto); err != nil { t.Errorf("Failed to unmarshal Filter text proto\n%q\nError: %v", test.protoStr, err) continue } @@ -261,7 +262,7 @@ func TestValidNewFiltersWithKeyMap(t *testing.T) { for _, test := range tests { filterProto := &apiv2beta1.Filter{} - if err := proto.UnmarshalText(test.protoStr, filterProto); err != nil { + if err := prototext.Unmarshal([]byte(test.protoStr), filterProto); err != nil { t.Errorf("Failed to unmarshal Filter text proto\n%q\nError: %v", test.protoStr, err) continue } @@ -337,16 +338,11 @@ func TestInvalidFiltersV1(t *testing.T) { { `predicates { key: "total" op: IN }`, }, - // Bad timestamp - { - `predicates { key: "total" op: LESS_THAN - timestamp_value { seconds: -100000000000 }}`, - }, } for _, test := range tests { filterProto := &apiv1beta1.Filter{} - if err := proto.UnmarshalText(test.protoStr, filterProto); err != nil { + if err := prototext.Unmarshal([]byte(test.protoStr), filterProto); err != nil { t.Errorf("Failed to unmarshal Filter text proto\n%q\nError: %v", test.protoStr, err) continue } @@ -415,16 +411,11 @@ func TestInvalidFilters(t *testing.T) { { `predicates { key: "total" operation: IN }`, }, - // Bad timestamp - { - `predicates { key: "total" operation: LESS_THAN - timestamp_value { seconds: -100000000000 }}`, - }, } for _, test := range tests { filterProto := &apiv2beta1.Filter{} - if err := proto.UnmarshalText(test.protoStr, filterProto); err != nil { + if err := prototext.Unmarshal([]byte(test.protoStr), filterProto); err != nil { t.Errorf("Failed to unmarshal Filter text proto\n%q\nError: %v", test.protoStr, err) continue } @@ -506,7 +497,7 @@ func TestAddToSelectV1(t *testing.T) { for _, test := range tests { filterProto := &apiv1beta1.Filter{} - if err := proto.UnmarshalText(test.protoStr, filterProto); err != nil { + if err := prototext.Unmarshal([]byte(test.protoStr), filterProto); err != nil { t.Errorf("Failed to unmarshal Filter text proto\n%q\nError: %v", test.protoStr, err) continue } @@ -595,7 +586,7 @@ func TestAddToSelect(t *testing.T) { for _, test := range tests { filterProto := &apiv2beta1.Filter{} - if err := proto.UnmarshalText(test.protoStr, filterProto); err != nil { + if err := prototext.Unmarshal([]byte(test.protoStr), filterProto); err != nil { t.Errorf("Failed to unmarshal Filter text proto\n%q\nError: %v", test.protoStr, err) continue } diff --git a/backend/src/apiserver/main.go b/backend/src/apiserver/main.go index 6fd45d93be2..330e48e4de5 100644 --- a/backend/src/apiserver/main.go +++ b/backend/src/apiserver/main.go @@ -31,7 +31,7 @@ import ( "github.com/fsnotify/fsnotify" "github.com/golang/glog" "github.com/gorilla/mux" - "github.com/grpc-ecosystem/grpc-gateway/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" cm "github.com/kubeflow/pipelines/backend/src/apiserver/client_manager" @@ -50,6 +50,7 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/credentials" "google.golang.org/grpc/reflection" + corev1 "k8s.io/api/core/v1" ctrlclient "sigs.k8s.io/controller-runtime/pkg/client" ctrllog "sigs.k8s.io/controller-runtime/pkg/log" "sigs.k8s.io/controller-runtime/pkg/log/zap" @@ -59,6 +60,7 @@ import ( const ( executionTypeEnv = "ExecutionType" launcherEnv = "Launcher" + workspaceConfig = "workspace" ) var ( @@ -186,11 +188,18 @@ func main() { } + var pvcSpec *corev1.PersistentVolumeClaimSpec + pvcSpec, err = getPVCSpec() + if err != nil { + glog.Fatalf("Failed to get Workspace PVC Spec: %v", err) + } + resourceManager := resource.NewResourceManager( clientManager, &resource.ResourceManagerOptions{ - CollectMetrics: *collectMetricsFlag, - CacheDisabled: !common.GetBoolConfigWithDefault("CacheEnabled", true), + CollectMetrics: *collectMetricsFlag, + CacheDisabled: !common.GetBoolConfigWithDefault("CacheEnabled", true), + DefaultWorkspace: pvcSpec, }, ) err = config.LoadSamples(resourceManager, *sampleConfigPath) @@ -251,23 +260,29 @@ func startRpcServer(resourceManager *resource.ResourceManager, tlsConfig *tls.Co glog.Fatalf("Failed to start RPC server: %v", err) } - sharedExperimentServer := server.NewExperimentServer(resourceManager, &server.ExperimentServerOptions{CollectMetrics: *collectMetricsFlag}) - sharedPipelineServer := server.NewPipelineServer( - resourceManager, - &server.PipelineServerOptions{ - CollectMetrics: *collectMetricsFlag, - }, - ) - sharedJobServer := server.NewJobServer(resourceManager, &server.JobServerOptions{CollectMetrics: *collectMetricsFlag}) - sharedRunServer := server.NewRunServer(resourceManager, &server.RunServerOptions{CollectMetrics: *collectMetricsFlag}) - sharedArtifactServer := server.NewArtifactServer(resourceManager, &server.ArtifactServerOptions{CollectMetrics: *collectMetricsFlag}) - apiv1beta1.RegisterExperimentServiceServer(s, sharedExperimentServer) - apiv2beta1.RegisterArtifactServiceServer(s, sharedArtifactServer) - apiv1beta1.RegisterPipelineServiceServer(s, sharedPipelineServer) - apiv1beta1.RegisterJobServiceServer(s, sharedJobServer) - apiv1beta1.RegisterRunServiceServer(s, sharedRunServer) + ExperimentServerV1 := server.NewExperimentServerV1(resourceManager, &server.ExperimentServerOptions{CollectMetrics: *collectMetricsFlag}) + ExperimentServer := server.NewExperimentServer(resourceManager, &server.ExperimentServerOptions{CollectMetrics: *collectMetricsFlag}) + + PipelineServerV1 := server.NewPipelineServerV1(resourceManager, &server.PipelineServerOptions{CollectMetrics: *collectMetricsFlag}) + PipelineServer := server.NewPipelineServer(resourceManager, &server.PipelineServerOptions{CollectMetrics: *collectMetricsFlag}) + + RunServerV1 := server.NewRunServerV1(resourceManager, &server.RunServerOptions{CollectMetrics: *collectMetricsFlag}) + RunServer := server.NewRunServer(resourceManager, &server.RunServerOptions{CollectMetrics: *collectMetricsFlag}) + + JobServerV1 := server.NewJobServerV1(resourceManager, &server.JobServerOptions{CollectMetrics: *collectMetricsFlag}) + JobServer := server.NewJobServer(resourceManager, &server.JobServerOptions{CollectMetrics: *collectMetricsFlag}) + + ReportServerV1 := server.NewReportServerV1(resourceManager) + ReportServer := server.NewReportServer(resourceManager) + + ArtifactServer := server.NewArtifactServer(resourceManager, &server.ArtifactServerOptions{CollectMetrics: *collectMetricsFlag}) + + apiv1beta1.RegisterExperimentServiceServer(s, ExperimentServerV1) + apiv1beta1.RegisterPipelineServiceServer(s, PipelineServerV1) + apiv1beta1.RegisterJobServiceServer(s, JobServerV1) + apiv1beta1.RegisterRunServiceServer(s, RunServerV1) apiv1beta1.RegisterTaskServiceServer(s, server.NewTaskServer(resourceManager)) - apiv1beta1.RegisterReportServiceServer(s, server.NewReportServer(resourceManager)) + apiv1beta1.RegisterReportServiceServer(s, ReportServerV1) apiv1beta1.RegisterVisualizationServiceServer( s, @@ -278,11 +293,12 @@ func startRpcServer(resourceManager *resource.ResourceManager, tlsConfig *tls.Co )) apiv1beta1.RegisterAuthServiceServer(s, server.NewAuthServer(resourceManager)) - apiv2beta1.RegisterExperimentServiceServer(s, sharedExperimentServer) - apiv2beta1.RegisterPipelineServiceServer(s, sharedPipelineServer) - apiv2beta1.RegisterRecurringRunServiceServer(s, sharedJobServer) - apiv2beta1.RegisterRunServiceServer(s, sharedRunServer) - apiv2beta1.RegisterReportServiceServer(s, server.NewReportServer(resourceManager)) + apiv2beta1.RegisterExperimentServiceServer(s, ExperimentServer) + apiv2beta1.RegisterArtifactServiceServer(s, ArtifactServer) + apiv2beta1.RegisterPipelineServiceServer(s, PipelineServer) + apiv2beta1.RegisterRecurringRunServiceServer(s, JobServer) + apiv2beta1.RegisterRunServiceServer(s, RunServer) + apiv2beta1.RegisterReportServiceServer(s, ReportServer) // Register reflection service on gRPC server. reflection.Register(s) @@ -461,3 +477,22 @@ func initConfig() { proxy.InitializeConfigWithEnv() } + +// getPVCSpec retrieves the default workspace PersistentVolumeClaimSpec from the config. +// This default is used for workspace PVCs when users do not specify their own configuration. +func getPVCSpec() (*corev1.PersistentVolumeClaimSpec, error) { + workspaceConfig := viper.Sub(workspaceConfig) + if workspaceConfig == nil { + glog.Info("No workspace config found; proceeding without a default PVC spec") + return nil, nil + } + var pvcSpec corev1.PersistentVolumeClaimSpec + if err := workspaceConfig.UnmarshalKey("volumeclaimtemplatespec", &pvcSpec); err != nil { + return nil, fmt.Errorf("failed to unmarshal workspace.volumeclaimtemplatespec: %w", err) + } + if len(pvcSpec.AccessModes) == 0 || pvcSpec.StorageClassName == nil || *pvcSpec.StorageClassName == "" { + return nil, fmt.Errorf("invalid workspace.volumeclaimtemplatespec: must specify accessModes and storageClassName") + } + + return &pvcSpec, nil +} diff --git a/backend/src/apiserver/model/common.go b/backend/src/apiserver/model/common.go new file mode 100644 index 00000000000..2cc954f9f5b --- /dev/null +++ b/backend/src/apiserver/model/common.go @@ -0,0 +1,74 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "database/sql/driver" + "encoding/json" + "fmt" + + "gorm.io/gorm" + "gorm.io/gorm/schema" +) + +// LargeText is a custom data type defined per GORM's recommendation for dialect-aware +// large-text columns. It implements GormDBDataTypeInterface to return the appropriate +// SQL type for each dialect (e.g., LONGTEXT for MySQL, TEXT for others). +// For details, see https://gorm.io/docs/data_types.html#GormDataTypeInterface +type LargeText string + +func (LargeText) GormDBDataType(db *gorm.DB, field *schema.Field) string { + switch db.Name() { + case "mysql": + return "LONGTEXT" + default: + return "TEXT" + } +} + +func (lt LargeText) String() string { + return string(lt) +} + +func (lt LargeText) Value() (driver.Value, error) { + return string(lt), nil +} + +func (lt *LargeText) Scan(src any) error { + switch v := src.(type) { + case string: + *lt = LargeText(v) + case []byte: + *lt = LargeText(string(v)) + case nil: + *lt = "" + default: + return fmt.Errorf("unsupported type %T for LargeText", v) + } + return nil +} + +func (lt LargeText) MarshalJSON() ([]byte, error) { + return json.Marshal(string(lt)) +} + +func (lt *LargeText) UnmarshalJSON(b []byte) error { + var s string + if err := json.Unmarshal(b, &s); err != nil { + return err + } + *lt = LargeText(s) + return nil +} diff --git a/backend/src/apiserver/model/db_status.go b/backend/src/apiserver/model/db_status.go index a4db08aeb1a..9fccda3915f 100644 --- a/backend/src/apiserver/model/db_status.go +++ b/backend/src/apiserver/model/db_status.go @@ -15,5 +15,5 @@ package model type DBStatus struct { - HaveSamplesLoaded bool `gorm:"column:HaveSamplesLoaded; not null; primary_key;"` + HaveSamplesLoaded bool `gorm:"column:HaveSamplesLoaded; not null; primaryKey;"` } diff --git a/backend/src/apiserver/model/default_experiment.go b/backend/src/apiserver/model/default_experiment.go index 76de3b9d2c3..4ec7ed0bdb3 100644 --- a/backend/src/apiserver/model/default_experiment.go +++ b/backend/src/apiserver/model/default_experiment.go @@ -15,5 +15,6 @@ package model type DefaultExperiment struct { - DefaultExperimentId string `gorm:"column:DefaultExperimentId; not null; primary_key;"` + // nolint:staticcheck // [ST1003] Field name matches upstream legacy naming + DefaultExperimentId string `gorm:"column:DefaultExperimentId; not null; primaryKey;type:varchar(191);"` } diff --git a/backend/src/apiserver/model/experiment.go b/backend/src/apiserver/model/experiment.go index 0adde8e8f42..17d2ba8b9f9 100644 --- a/backend/src/apiserver/model/experiment.go +++ b/backend/src/apiserver/model/experiment.go @@ -15,17 +15,24 @@ package model type Experiment struct { - UUID string `gorm:"column:UUID; not null; primary_key;"` - Name string `gorm:"column:Name; not null; unique_index:idx_name_namespace;"` - Description string `gorm:"column:Description; not null;"` - CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null;"` - LastRunCreatedAtInSec int64 `gorm:"column:LastRunCreatedAtInSec; not null;"` - Namespace string `gorm:"column:Namespace; not null; unique_index:idx_name_namespace;"` - StorageState StorageState `gorm:"column:StorageState; not null;"` + UUID string `gorm:"column:UUID; not null; primaryKey;type:varchar(191);"` + // For details on type lengths and index safety, refer to comments in the Pipeline struct. + Name string `gorm:"column:Name; not null; uniqueIndex:idx_name_namespace; type:varchar(128);"` + Description string `gorm:"column:Description; not null;"` + CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null;"` + LastRunCreatedAtInSec int64 `gorm:"column:LastRunCreatedAtInSec; not null;"` + // For details on type lengths and index safety, refer to comments in the Pipeline struct. + Namespace string `gorm:"column:Namespace; not null; uniqueIndex:idx_name_namespace; type:varchar(63)"` + StorageState StorageState `gorm:"column:StorageState; not null;"` } // Note: Experiment.StorageState can have values: "STORAGE_STATE_UNSPECIFIED", "AVAILABLE" or "ARCHIVED" +// TableName overrides GORM's table name inference. +func (Experiment) TableName() string { + return "experiments" +} + func (e Experiment) GetValueOfPrimaryKey() string { return e.UUID } diff --git a/backend/src/apiserver/model/job.go b/backend/src/apiserver/model/job.go index 7cdead7e8bd..c3b49031b2c 100644 --- a/backend/src/apiserver/model/job.go +++ b/backend/src/apiserver/model/job.go @@ -86,7 +86,7 @@ func (s StatusState) ToV2() StatusState { } type Job struct { - UUID string `gorm:"column:UUID; not null; primary_key;"` + UUID string `gorm:"column:UUID; not null; primaryKey; type:varchar(191);"` DisplayName string `gorm:"column:DisplayName; not null;"` /* The name that user provides. Can contain special characters*/ K8SName string `gorm:"column:Name; not null;"` /* The name of the K8s resource. Follow regex '[a-z0-9]([-a-z0-9]*[a-z0-9])?'*/ Namespace string `gorm:"column:Namespace; not null;"` @@ -98,9 +98,15 @@ type Job struct { UpdatedAtInSec int64 `gorm:"column:UpdatedAtInSec; default:0;"` Enabled bool `gorm:"column:Enabled; not null;"` ExperimentId string `gorm:"column:ExperimentUUID; not null;"` + // ResourceReferences are deprecated. Use Namespace, ExperimentId // PipelineSpec.PipelineId, PipelineSpec.PipelineVersionId - ResourceReferences []*ResourceReference + + // ResourceReferences are logical links to other KFP resources. + // These references are not unidirectional (i.e., no clear ownership), + // so we avoid declaring a foreign key. The field is excluded from GORM mapping + // to prevent auto-inferred relations and schema validation errors. + ResourceReferences []*ResourceReference `gorm:"-"` Trigger PipelineSpec Conditions string `gorm:"column:Conditions; not null;"` diff --git a/backend/src/apiserver/model/pipeline.go b/backend/src/apiserver/model/pipeline.go index 3760e7b6cda..6ffedf26c2b 100644 --- a/backend/src/apiserver/model/pipeline.go +++ b/backend/src/apiserver/model/pipeline.go @@ -31,17 +31,27 @@ const ( ) type Pipeline struct { - UUID string `gorm:"column:UUID; not null; primary_key;"` + UUID string `gorm:"column:UUID; not null; primaryKey;type:varchar(64);"` CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null;"` - Name string `gorm:"column:Name; not null; unique_index:namespace_name;"` // Index improves performance of the List ang Get queries - DisplayName string `gorm:"column:DisplayName; not null"` - Description string `gorm:"column:Description; size:65535;"` // Same as below, set size to large number so it will be stored as longtext + // Name is limited to varchar(128) to ensure the composite index (Namespace, Name) + // stays within MySQL’s 767-byte prefix limit for indexable columns. + // MySQL uses utf8mb4 encoding by default, where each character can take up to 4 bytes. + // In the worst case: 63 (namespace) * 4 + 128 (name) * 4 = 764 bytes ≤ 767 bytes. + // https://dev.mysql.com/doc/refman/8.4/en/column-indexes.html + // Even though Namespace rarely uses its full 63-character capacity in practice, + // MySQL calculates index length based on declared size, not actual content. + // Therefore, keeping Name at varchar(128) is a safe upper bound. + Name string `gorm:"column:Name; not null; uniqueIndex:namespace_name; type:varchar(128);"` // Index improves performance of the List and Get queries + DisplayName string `gorm:"column:DisplayName; not null"` + Description LargeText `gorm:"column:Description; not null"` // TODO(gkcalat): this is deprecated. Consider removing and adding data migration logic at the server startup. - Parameters string `gorm:"column:Parameters; size:65535;"` + Parameters LargeText `gorm:"column:Parameters;"` Status PipelineStatus `gorm:"column:Status; not null;"` // TODO(gkcalat): this is deprecated. Consider removing and adding data migration logic at the server startup. DefaultVersionId string `gorm:"column:DefaultVersionId;"` // deprecated - Namespace string `gorm:"column:Namespace; unique_index:namespace_name; size:63;"` + // Namespace is restricted to varchar(63) due to Kubernetes' naming constraints: + // https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names + Namespace string `gorm:"column:Namespace; uniqueIndex:namespace_name; type:varchar(63);"` } func (p Pipeline) GetValueOfPrimaryKey() string { @@ -83,6 +93,11 @@ func (p *Pipeline) GetModelName() string { return "pipelines" } +// TableName overrides GORM's table name inference. +func (Pipeline) TableName() string { + return "pipelines" +} + func (p *Pipeline) GetField(name string) (string, bool) { if field, ok := pipelineAPIToModelFieldMap[name]; ok { return field, true diff --git a/backend/src/apiserver/model/pipeline_spec.go b/backend/src/apiserver/model/pipeline_spec.go index 2f92c5c68bd..6fefde7c4f0 100644 --- a/backend/src/apiserver/model/pipeline_spec.go +++ b/backend/src/apiserver/model/pipeline_spec.go @@ -27,21 +27,22 @@ type PipelineSpec struct { PipelineName string `gorm:"column:PipelineName; not null;"` // Pipeline YAML definition. This is the pipeline interface for creating a pipeline. - // Set size to 65535 so it will be stored as longtext. - // https://dev.mysql.com/doc/refman/8.0/en/column-count-limit.html - // TODO(gkcalat): consider increasing the size limit > 32MB (<4GB for MySQL, and <1GB for PostgreSQL). - PipelineSpecManifest string `gorm:"column:PipelineSpecManifest; size:33554432;"` + // Stored as longtext to support large manifests (up to 4GB in MySQL). + // Stored as text in PostgreSQL. + // https://dev.mysql.com/doc/refman/8.0/en/blob.html + // TODO(kaikaila): consider enforcing a soft limit if needed for performance. + PipelineSpecManifest LargeText `gorm:"column:PipelineSpecManifest;"` // Argo workflow YAML definition. This is the Argo Spec converted from Pipeline YAML. // This is deprecated. Use the pipeline ID, pipeline version ID, or pipeline spec manifest. - WorkflowSpecManifest string `gorm:"column:WorkflowSpecManifest; size:33554432;"` + WorkflowSpecManifest LargeText `gorm:"column:WorkflowSpecManifest;"` // Store parameters key-value pairs as serialized string. // This field is only used for V1 API. For V2, use the `Parameters` field in RuntimeConfig. // At most one of the fields `Parameters` and `RuntimeConfig` can be non-empty // This string stores an array of map[string]value. For example: // {"param1": Value1} will be stored as [{"name": "param1", "value":"value1"}]. - Parameters string `gorm:"column:Parameters; size:65535;"` + Parameters LargeText `gorm:"column:Parameters;"` // Runtime config of the pipeline, only used for v2 template in API v1beta1 API. RuntimeConfig diff --git a/backend/src/apiserver/model/pipeline_version.go b/backend/src/apiserver/model/pipeline_version.go index c5ef19cd453..00b80872698 100644 --- a/backend/src/apiserver/model/pipeline_version.go +++ b/backend/src/apiserver/model/pipeline_version.go @@ -29,23 +29,29 @@ const ( ) type PipelineVersion struct { - UUID string `gorm:"column:UUID; not null; primary_key;"` - CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null; index;"` - Name string `gorm:"column:Name; not null; unique_index:idx_pipelineid_name;"` - DisplayName string `gorm:"column:DisplayName; not null"` + UUID string `gorm:"column:UUID; not null; primaryKey;type:varchar(191);"` + CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null; index:idx_pipeline_versions_CreatedAtInSec;"` + // Explicitly specify varchar(127) + // so that the combined index (PipelineId + Name) does not exceed 767 bytes in utf8mb4, + // For details on type lengths and index safety, refer to comments in the Pipeline struct. + Name string `gorm:"column:Name; not null; type:varchar(127); uniqueIndex:idx_pipelineid_name;"` + DisplayName string `gorm:"column:DisplayName; not null"` // TODO(gkcalat): this is deprecated. Consider removing and adding data migration logic at the server startup. - Parameters string `gorm:"column:Parameters; not null; size:65535;"` // deprecated + Parameters LargeText `gorm:"column:Parameters; not null;"` // deprecated // PipelineVersion belongs to Pipeline. If a pipeline with a specific UUID // is deleted from Pipeline table, all this pipeline's versions will be // deleted from PipelineVersion table. - PipelineId string `gorm:"column:PipelineId; not null; index; unique_index:idx_pipelineid_name;"` - // Pipeline *Pipeline `gorm:"foreignKey:PipelineId; constraint:OnUpdate:CASCADE,OnDelete:SET NULL;"` - Status PipelineVersionStatus `gorm:"column:Status; not null;"` + // Explicitly specify varchar(64). Refer to Name field comments for details. + // nolint:staticcheck // [ST1003] Field name matches upstream legacy naming + PipelineId string `gorm:"column:PipelineId; not null; index:idx_pipeline_versions_PipelineId; uniqueIndex:idx_pipelineid_name; type:varchar(64)"` + Pipeline Pipeline `gorm:"foreignKey:PipelineId; references:UUID;constraint:pipeline_versions_PipelineId_pipelines_UUID_foreign,OnDelete:CASCADE,OnUpdate:CASCADE"` // This 'belongs to' relation replaces the legacy AddForeignKey constraint previously defined in client_manager.go + Status PipelineVersionStatus `gorm:"column:Status; not null;"` // Code source url links to the pipeline version's definition in repo. - CodeSourceUrl string `gorm:"column:CodeSourceUrl;"` - Description string `gorm:"column:Description; size:65535;"` // Set size to large number so it will be stored as longtext - PipelineSpec string `gorm:"column:PipelineSpec; not null; size:33554432;"` // Same as common.MaxFileLength (32MB in server). Argo imposes 700kB limit - PipelineSpecURI string `gorm:"column:PipelineSpecURI; not null; size:65535;"` // Can store references to ObjectStore files + // nolint:staticcheck // [ST1003] Field name matches upstream legacy naming + CodeSourceUrl string `gorm:"column:CodeSourceUrl;"` + Description LargeText `gorm:"column:Description;"` + PipelineSpec LargeText `gorm:"column:PipelineSpec; not null;"` // Same as common.MaxFileLength (32MB in server). Argo imposes 700kB limit + PipelineSpecURI LargeText `gorm:"column:PipelineSpecURI; not null;"` // Can store references to ObjectStore files } func (p PipelineVersion) GetValueOfPrimaryKey() string { @@ -82,6 +88,10 @@ func (p *PipelineVersion) GetModelName() string { return "pipeline_versions" } +// TableName overrides GORM's table name inference. +func (PipelineVersion) TableName() string { + return "pipeline_versions" +} func (p *PipelineVersion) GetField(name string) (string, bool) { if field, ok := p.APIToModelFieldMap()[name]; ok { return field, true diff --git a/backend/src/apiserver/model/resource_reference.go b/backend/src/apiserver/model/resource_reference.go index 511a51fdf99..a5f7af2e6ca 100644 --- a/backend/src/apiserver/model/resource_reference.go +++ b/backend/src/apiserver/model/resource_reference.go @@ -147,27 +147,28 @@ type ResourceType string type Relationship string // Resource reference table models the relationship between resources in a loosely coupled way. +// This model has a composite primary key consisting of ResourceUUID, ResourceType, and ReferenceType. type ResourceReference struct { // ID of the resource object - ResourceUUID string `gorm:"column:ResourceUUID; not null; primary_key;"` + ResourceUUID string `gorm:"column:ResourceUUID; not null; primaryKey; type:varchar(191);"` // The type of the resource object - ResourceType ResourceType `gorm:"column:ResourceType; not null; primary_key; index:referencefilter;"` + ResourceType ResourceType `gorm:"column:ResourceType; not null; primaryKey; index:referencefilter;"` - // The ID of the resource that been referenced to. - ReferenceUUID string `gorm:"column:ReferenceUUID; not null; index:referencefilter;"` + // The ID of the referenced resource. + ReferenceUUID string `gorm:"column:ReferenceUUID; not null; index:referencefilter; type:varchar(191);"` - // The name of the resource that been referenced to. + // The name of the referenced resource. ReferenceName string `gorm:"column:ReferenceName; not null;"` - // The type of the resource that been referenced to. - ReferenceType ResourceType `gorm:"column:ReferenceType; not null; primary_key; index:referencefilter;"` + // The type of the referenced resource. + ReferenceType ResourceType `gorm:"column:ReferenceType; not null; primaryKey; index:referencefilter;"` - // The relationship between the resource object and the resource that been referenced to. + // The relationship between the resource object and the referenced resource. Relationship Relationship `gorm:"column:Relationship; not null;"` - // The json formatted blob of the resource reference. - Payload string `gorm:"column:Payload; not null; size:65535;"` + // JSON-encoded metadata blob about the reference + Payload LargeText `gorm:"column:Payload; not null;"` } type ReferenceKey struct { diff --git a/backend/src/apiserver/model/run.go b/backend/src/apiserver/model/run.go index 5bf19fb64d3..3b2b4b99117 100644 --- a/backend/src/apiserver/model/run.go +++ b/backend/src/apiserver/model/run.go @@ -205,22 +205,29 @@ func (Run) TableName() string { } type Run struct { - UUID string `gorm:"column:UUID; not null; primary_key"` + UUID string `gorm:"column:UUID; not null; primaryKey;type:varchar(191);"` DisplayName string `gorm:"column:DisplayName; not null;"` /* The name that user provides. Can contain special characters*/ K8SName string `gorm:"column:Name; not null;"` /* The name of the K8s resource. Follow regex '[a-z0-9]([-a-z0-9]*[a-z0-9])?'*/ Description string `gorm:"column:Description; not null;"` - - Namespace string `gorm:"column:Namespace; not null;"` - ExperimentId string `gorm:"column:ExperimentUUID; not null;"` + // Namespace is restricted to varchar(63) due to Kubernetes' naming constraints: + // https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names + Namespace string `gorm:"column:Namespace; type:varchar(63); not null;index:namespace_createatinsec,priority:1; index:namespace_conditions_finishedatinsec,priority:1"` + // varchar(64) is carefully chosen to ensure composite index constraints remain + // within MySQL's 767-byte limit + // e.g., ExperimentId(varchar(64)) + Conditions(varchar(125)) + FinishedAtInSec(8 bytes) = 764 bytes < 767 bytes + // For details on type lengths and index safety, refer to comments in the Pipeline struct. + // nolint:staticcheck // [ST1003] Field name matches upstream legacy naming + ExperimentId string `gorm:"column:ExperimentUUID;type:varchar(64); not null; index:experimentuuid_createatinsec,priority:1; index:experimentuuid_conditions_finishedatinsec,priority:1"` RecurringRunId string `gorm:"column:JobUUID; default:null;"` StorageState StorageState `gorm:"column:StorageState; not null;"` ServiceAccount string `gorm:"column:ServiceAccount; not null;"` - Metrics []*RunMetric + Metrics []*RunMetric `gorm:"foreignKey:RunUUID;references:UUID;constraint:run_metrics_RunUUID_run_details_UUID_foreign,OnDelete:CASCADE,OnUpdate:CASCADE"` // This 'has-many' relation replaces the legacy AddForeignKey constraint previously defined in client_manager.go // ResourceReferences are deprecated. Use Namespace, ExperimentId, // RecurringRunId, PipelineSpec.PipelineId, PipelineSpec.PipelineVersionId - ResourceReferences []*ResourceReference + // gorm:"-" tag is added to avoid declaring a foreign key. Refer to Job model for reasons. + ResourceReferences []*ResourceReference `gorm:"-"` PipelineSpec @@ -301,30 +308,37 @@ func (r *Run) ToV2() *Run { // Stores runtime information about a pipeline run. type RunDetails struct { - CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null;"` + CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null;index:experimentuuid_createatinsec,priority:2; index:namespace_createatinsec,priority:2"` ScheduledAtInSec int64 `gorm:"column:ScheduledAtInSec; default:0;"` - FinishedAtInSec int64 `gorm:"column:FinishedAtInSec; default:0;"` + FinishedAtInSec int64 `gorm:"column:FinishedAtInSec; default:0; index:experimentuuid_conditions_finishedatinsec,priority:3;index:namespace_conditions_finishedatinsec,priority:3"` // Conditions were deprecated. Use State instead. - Conditions string `gorm:"column:Conditions; not null;"` + // varchar(125) is carefully chosen to ensure composite index constraints remain + // within MySQL's 767-byte limit (e.g., when combined with ExperimentId and FinishedAtInSec). + // For details on type lengths and index safety, refer to comments in the Pipeline struct. + Conditions string `gorm:"column:Conditions; type:varchar(125); not null; index:experimentuuid_conditions_finishedatinsec,priority:2;index:namespace_conditions_finishedatinsec,priority:2"` State RuntimeState `gorm:"column:State; default:null;"` - StateHistoryString string `gorm:"column:StateHistory; default:null; size:65535;"` + StateHistoryString LargeText `gorm:"column:StateHistory; default:null;"` StateHistory []*RuntimeStatus `gorm:"-;"` // Serialized runtime details of a run in v2beta1 - PipelineRuntimeManifest string `gorm:"column:PipelineRuntimeManifest; not null; size:33554432;"` + PipelineRuntimeManifest LargeText `gorm:"column:PipelineRuntimeManifest; not null;"` // Serialized Argo CRD in v1beta1 - WorkflowRuntimeManifest string `gorm:"column:WorkflowRuntimeManifest; not null; size:33554432;"` - PipelineContextId int64 `gorm:"column:PipelineContextId; default:0;"` - PipelineRunContextId int64 `gorm:"column:PipelineRunContextId; default:0;"` - TaskDetails []*Task + WorkflowRuntimeManifest LargeText `gorm:"column:WorkflowRuntimeManifest; not null;"` + // nolint:staticcheck // [ST1003] Field name matches upstream legacy naming + PipelineContextId int64 `gorm:"column:PipelineContextId; default:0;"` + // nolint:staticcheck // [ST1003] Field name matches upstream legacy naming + PipelineRunContextId int64 `gorm:"column:PipelineRunContextId; default:0;"` + // Add gorm:"-" so that GORM ignores TaskDetails when generating schema. + // This avoids GORM auto-detecting the circular relationship (RunDetails <--> Tasks) and blocking the FK on tasks.RunUUID → run_details.UUID. + TaskDetails []*Task `gorm:"-"` } type RunMetric struct { - RunUUID string `gorm:"column:RunUUID; not null; primary_key;"` - NodeID string `gorm:"column:NodeID; not null; primary_key;"` - Name string `gorm:"column:Name; not null; primary_key;"` - NumberValue float64 `gorm:"column:NumberValue;"` - Format string `gorm:"column:Format;"` - Payload string `gorm:"column:Payload; not null; size:65535;"` + RunUUID string `gorm:"column:RunUUID; not null; primaryKey; type:varchar(191);"` + NodeID string `gorm:"column:NodeID; not null; primaryKey; type:varchar(191);"` + Name string `gorm:"column:Name; not null; primaryKey; type:varchar(191);"` + NumberValue float64 `gorm:"column:NumberValue;"` + Format string `gorm:"column:Format;"` + Payload LargeText `gorm:"column:Payload; not null;"` } type RuntimeStatus struct { diff --git a/backend/src/apiserver/model/runtime_config.go b/backend/src/apiserver/model/runtime_config.go index f58ef1e8f05..ca2de08b7d5 100644 --- a/backend/src/apiserver/model/runtime_config.go +++ b/backend/src/apiserver/model/runtime_config.go @@ -16,10 +16,10 @@ package model type RuntimeConfig struct { // Store parameters key-value pairs as serialized string. - Parameters string `gorm:"column:RuntimeParameters; size:65535;"` + Parameters LargeText `gorm:"column:RuntimeParameters;"` // A path in a object store bucket which will be treated as the root // output directory of the pipeline. It is used by the system to // generate the paths of output artifacts. Ref:(https://www.kubeflow.org/docs/components/pipelines/pipeline-root/) - PipelineRoot string `gorm:"column:PipelineRoot; size:65535;"` + PipelineRoot LargeText `gorm:"column:PipelineRoot;"` } diff --git a/backend/src/apiserver/model/task.go b/backend/src/apiserver/model/task.go index 5c5d7092e64..1ed89f513d2 100644 --- a/backend/src/apiserver/model/task.go +++ b/backend/src/apiserver/model/task.go @@ -19,11 +19,15 @@ import ( ) type Task struct { - UUID string `gorm:"column:UUID; not null; primary_key"` + UUID string `gorm:"column:UUID; not null; primaryKey; type:varchar(191);"` Namespace string `gorm:"column:Namespace; not null;"` // PipelineName was deprecated. Use RunId instead. - PipelineName string `gorm:"column:PipelineName; not null;"` - RunId string `gorm:"column:RunUUID; not null;"` + PipelineName string `gorm:"column:PipelineName; not null;"` + // RunId is limited to varchar(191) to make it indexable as a foreign key. + // For details on type lengths and index safety, refer to comments in the Pipeline struct. + // nolint:staticcheck // [ST1003] Field name matches upstream legacy naming + RunId string `gorm:"column:RunUUID; type:varchar(191); not null; index:tasks_RunUUID_run_details_UUID_foreign;"` // Note: field name (RunId) ≠ column name (RunUUID). The former should be the foreign key instead of the letter. + Run Run `gorm:"foreignKey:RunId;references:UUID;constraint:tasks_RunUUID_run_details_UUID_foreign,OnDelete:CASCADE,OnUpdate:CASCADE;"` // A Task belongs to a Run. PodName string `gorm:"column:PodName; not null;"` MLMDExecutionID string `gorm:"column:MLMDExecutionID; not null;"` CreatedTimestamp int64 `gorm:"column:CreatedTimestamp; not null;"` @@ -33,13 +37,13 @@ type Task struct { Name string `gorm:"column:Name; default:null"` ParentTaskId string `gorm:"column:ParentTaskUUID; default:null"` State RuntimeState `gorm:"column:State; default:null;"` - StateHistoryString string `gorm:"column:StateHistory; default:null; size:65535;"` - MLMDInputs string `gorm:"column:MLMDInputs; default:null; size:65535;"` - MLMDOutputs string `gorm:"column:MLMDOutputs; default:null; size:65535;"` - ChildrenPodsString string `gorm:"column:ChildrenPods; default:null; size:65535;"` + StateHistoryString LargeText `gorm:"column:StateHistory; default:null;"` + MLMDInputs LargeText `gorm:"column:MLMDInputs; default:null;"` + MLMDOutputs LargeText `gorm:"column:MLMDOutputs; default:null;"` + ChildrenPodsString LargeText `gorm:"column:ChildrenPods; default:null;"` StateHistory []*RuntimeStatus `gorm:"-;"` ChildrenPods []string `gorm:"-;"` - Payload string `gorm:"column:Payload; default:null; size:65535;"` + Payload LargeText `gorm:"column:Payload; default:null;"` } func (t Task) ToString() string { diff --git a/backend/src/apiserver/resource/resource_manager.go b/backend/src/apiserver/resource/resource_manager.go index 757f632aa2f..843f7235ee8 100644 --- a/backend/src/apiserver/resource/resource_manager.go +++ b/backend/src/apiserver/resource/resource_manager.go @@ -69,19 +69,19 @@ var ( // Count the removed workflows due to garbage collection. workflowGCCounter = promauto.NewCounter(prometheus.CounterOpts{ Name: "resource_manager_workflow_gc", - Help: "The number of gabarage-collected workflows", + Help: "The number of garbage-collected workflows", }) - // Count the successfull workflow runs + // Count the successful workflow runs workflowSuccessCounter = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "resource_manager_workflow_runs_success", - Help: "The current number of successfully workflows runs", + Help: "The current number of successful workflow runs", }, extraLabels) // Count the failed workflow runs workflowFailedCounter = promauto.NewGaugeVec(prometheus.GaugeOpts{ Name: "resource_manager_workflow_runs_failed", - Help: "The current number of failed workflows runs", + Help: "The current number of failed workflow runs", }, extraLabels) ) @@ -108,8 +108,9 @@ type ClientManagerInterface interface { } type ResourceManagerOptions struct { - CollectMetrics bool `json:"collect_metrics,omitempty"` - CacheDisabled bool `json:"cache_disabled,omitempty"` + CollectMetrics bool `json:"collect_metrics,omitempty"` + CacheDisabled bool `json:"cache_disabled,omitempty"` + DefaultWorkspace *corev1.PersistentVolumeClaimSpec `json:"default_workspace,omitempty"` } type ResourceManager struct { @@ -307,32 +308,58 @@ func (r *ResourceManager) GetPipelineByNameAndNamespaceV1(name string, namespace } // Deletes a pipeline. Does not delete pipeline spec in the object storage. -// Fails if the pipeline has existing pipeline versions. -func (r *ResourceManager) DeletePipeline(pipelineId string) error { +// If cascade is false, fails if the pipeline has existing pipeline versions. +// If cascade is true, deletes all pipeline versions first, then deletes the pipeline. +func (r *ResourceManager) DeletePipeline(pipelineID string, cascade bool) error { // Check if pipeline exists - _, err := r.pipelineStore.GetPipeline(pipelineId) + _, err := r.pipelineStore.GetPipeline(pipelineID) if err != nil { - return util.Wrapf(err, "Failed to delete pipeline with id %v as it was not found", pipelineId) + return util.Wrapf(err, "Failed to delete pipeline with id %v as it was not found", pipelineID) } - // Check if it has no pipeline versions in Ready state - latestPipelineVersion, err := r.pipelineStore.GetLatestPipelineVersion(pipelineId) - if latestPipelineVersion != nil { - return util.NewInvalidInputError("Failed to delete pipeline with id %v as it has existing pipeline versions (e.g. %v)", pipelineId, latestPipelineVersion.UUID) - } else if err.(*util.UserError).ExternalStatusCode() != codes.NotFound { - return util.Wrapf(err, "Failed to delete pipeline with id %v as it failed to check existing pipeline versions", pipelineId) + if cascade { + // Get all pipeline versions for this pipeline and delete them + opts := list.EmptyOptions() + pipelineVersions, _, _, err := r.pipelineStore.ListPipelineVersions(pipelineID, opts) + if err != nil { + return util.Wrapf(err, "Failed to delete pipeline with id %v due to error listing pipeline versions", pipelineID) + } + + // Delete each pipeline version + for _, pipelineVersion := range pipelineVersions { + // Mark pipeline version as deleting so it's not visible to user. + err = r.pipelineStore.UpdatePipelineVersionStatus(pipelineVersion.UUID, model.PipelineVersionDeleting) + if err != nil { + return util.Wrapf(err, "Failed to change the status of pipeline version id %v to DELETING during cascade delete", pipelineVersion.UUID) + } + + // Delete the pipeline version from the database + err = r.pipelineStore.DeletePipelineVersion(pipelineVersion.UUID) + if err != nil { + return util.Wrapf(err, "Failed to delete pipeline version %v during cascade delete of pipeline %v", pipelineVersion.UUID, pipelineID) + } + glog.Infof("Successfully deleted pipeline version %v during cascade delete of pipeline %v", pipelineVersion.UUID, pipelineID) + } + } else { + // Check if it has no pipeline versions in Ready state + latestPipelineVersion, err := r.pipelineStore.GetLatestPipelineVersion(pipelineID) + if latestPipelineVersion != nil { + return util.NewInvalidInputError("Failed to delete pipeline with id %v as it has existing pipeline versions (e.g. %v). Set cascade=true to delete all versions", pipelineID, latestPipelineVersion.UUID) + } else if err.(*util.UserError).ExternalStatusCode() != codes.NotFound { + return util.Wrapf(err, "Failed to delete pipeline with id %v as it failed to check existing pipeline versions", pipelineID) + } } // Mark pipeline as deleting so it's not visible to user. - err = r.pipelineStore.UpdatePipelineStatus(pipelineId, model.PipelineDeleting) + err = r.pipelineStore.UpdatePipelineStatus(pipelineID, model.PipelineDeleting) if err != nil { - return util.Wrapf(err, "Failed to change the status of pipeline id %v to DELETING", pipelineId) + return util.Wrapf(err, "Failed to change the status of pipeline id %v to DELETING", pipelineID) } // Delete a pipeline. - err = r.pipelineStore.DeletePipeline(pipelineId) + err = r.pipelineStore.DeletePipeline(pipelineID) if err != nil { - return util.Wrapf(err, "Failed to delete pipeline DB entry for pipeline id %v", pipelineId) + return util.Wrapf(err, "Failed to delete pipeline DB entry for pipeline id %v", pipelineID) } return nil } @@ -380,11 +407,11 @@ func (r *ResourceManager) CreatePipelineAndPipelineVersion(p *model.Pipeline, pv if err != nil { return nil, nil, util.Wrap(err, "Failed to create a pipeline and a pipeline version as template is broken") } - pv.PipelineSpec = string(pipelineSpecBytes) + pv.PipelineSpec = model.LargeText(string(pipelineSpecBytes)) if pipelineSpecURI != "" { - pv.PipelineSpecURI = pipelineSpecURI + pv.PipelineSpecURI = model.LargeText(pipelineSpecURI) } - tmpl, err := template.New(pipelineSpecBytes, r.options.CacheDisabled) + tmpl, err := template.New(pipelineSpecBytes, r.options.CacheDisabled, r.options.DefaultWorkspace) if err != nil { return nil, nil, util.Wrap(err, "Failed to create a pipeline and a pipeline version due to template creation error") } @@ -423,8 +450,8 @@ func (r *ResourceManager) CreatePipelineAndPipelineVersion(p *model.Pipeline, pv if err != nil { return nil, nil, util.Wrap(err, "Failed to create a pipeline and a pipeline version due to error converting parameters to json") } - pv.Parameters = paramsJSON - pv.PipelineSpec = string(tmpl.Bytes()) + pv.Parameters = model.LargeText(paramsJSON) + pv.PipelineSpec = model.LargeText(string(tmpl.Bytes())) // Create records in KFP DB (both pipelines and pipeline_versions tables) newPipeline, newVersion, err := r.pipelineStore.CreatePipelineAndPipelineVersion(p, pv) @@ -517,9 +544,10 @@ func (r *ResourceManager) CreateRun(ctx context.Context, run *model.Run) (*model } run.RunDetails.CreatedAtInSec = r.time.Now().Unix() runWorkflowOptions := template.RunWorkflowOptions{ - RunId: run.UUID, - RunAt: run.RunDetails.CreatedAtInSec, - CacheDisabled: r.options.CacheDisabled, + RunID: run.UUID, + RunAt: run.CreatedAtInSec, + CacheDisabled: r.options.CacheDisabled, + DefaultWorkspace: r.options.DefaultWorkspace, } executionSpec, err := tmpl.RunWorkflow(run, runWorkflowOptions) if err != nil { @@ -567,13 +595,13 @@ func (r *ResourceManager) CreateRun(ctx context.Context, run *model.Run) (*model // TODO(gkcalat): consider to avoid updating runtime manifest at create time and let // persistence agent update the runtime data. if tmpl.GetTemplateType() == template.V1 && run.RunDetails.WorkflowRuntimeManifest == "" { - run.RunDetails.WorkflowRuntimeManifest = newExecSpec.ToStringForStore() - run.PipelineSpec.WorkflowSpecManifest = manifest + run.WorkflowRuntimeManifest = model.LargeText(newExecSpec.ToStringForStore()) + run.WorkflowSpecManifest = model.LargeText(manifest) } else if tmpl.GetTemplateType() == template.V2 { - run.RunDetails.PipelineRuntimeManifest = newExecSpec.ToStringForStore() - run.PipelineSpec.PipelineSpecManifest = manifest + run.PipelineRuntimeManifest = model.LargeText(newExecSpec.ToStringForStore()) + run.PipelineSpecManifest = model.LargeText(manifest) } else { - run.PipelineSpec.PipelineSpecManifest = manifest + run.PipelineSpecManifest = model.LargeText(manifest) } // Assign the scheduled at time if run.RunDetails.ScheduledAtInSec == 0 { @@ -604,7 +632,6 @@ func (r *ResourceManager) ReconcileSwfCrs(ctx context.Context) error { opts := list.EmptyOptions() jobs, _, _, err := r.jobStore.ListJobs(filterContext, opts) - if err != nil { return util.Wrap(err, "Failed to reconcile ScheduledWorkflow Kubernetes resources") } @@ -627,7 +654,7 @@ func (r *ResourceManager) ReconcileSwfCrs(ctx context.Context) error { return failedToReconcileSwfCrsError(err) } - newScheduledWorkflow, err := tmpl.ScheduledWorkflow(jobs[i], r.getOwnerReferences()) + newScheduledWorkflow, err := tmpl.ScheduledWorkflow(jobs[i]) if err != nil { return failedToReconcileSwfCrsError(err) } @@ -722,7 +749,7 @@ func (r *ResourceManager) UnarchiveRun(runId string) error { if experiment.StorageState.ToV2() == model.StorageStateArchived { return util.NewFailedPreconditionError( errors.New("Unarchive the experiment first to allow the run to be restored"), - fmt.Sprintf("Failed to unarchive run %v as experiment %v must be un-archived first", runId, run.ExperimentId), + "%s", fmt.Sprintf("Failed to unarchive run %v as experiment %v must be un-archived first", runId, run.ExperimentId), ) } if err := r.runStore.UnarchiveRun(runId); err != nil { @@ -896,7 +923,7 @@ func (r *ResourceManager) RetryRun(ctx context.Context, runId string) error { } if err := execSpec.CanRetry(); err != nil { - return util.NewInternalServerError(err, "Failed to retry run %s as it does not allow reties", runId) + return util.NewInternalServerError(err, "Failed to retry run %s as it does not allow retries", runId) } newExecSpec, podsToDelete, err := execSpec.GenerateRetryExecution() @@ -932,7 +959,7 @@ func (r *ResourceManager) RetryRun(ctx context.Context, runId string) error { newExecSpec = newCreatedWorkflow } condition := string(newExecSpec.ExecutionStatus().Condition()) - err = r.runStore.UpdateRun(&model.Run{UUID: runId, RunDetails: model.RunDetails{Conditions: condition, FinishedAtInSec: 0, WorkflowRuntimeManifest: newExecSpec.ToStringForStore(), State: model.RuntimeState(condition).ToV2()}}) + err = r.runStore.UpdateRun(&model.Run{UUID: runId, RunDetails: model.RunDetails{Conditions: condition, FinishedAtInSec: 0, WorkflowRuntimeManifest: model.LargeText(newExecSpec.ToStringForStore()), State: model.RuntimeState(condition).ToV2()}}) if err != nil { return util.NewInternalServerError(err, "Failed to retry run %s due to error updating entry", runId) } @@ -954,7 +981,7 @@ func (r *ResourceManager) ReadLog(ctx context.Context, runId string, nodeId stri } err = r.readRunLogFromPod(ctx, namespace, nodeId, follow, dst) if err != nil && r.logArchive != nil { - err = r.readRunLogFromArchive(run.WorkflowRuntimeManifest, nodeId, dst) + err = r.readRunLogFromArchive(string(run.WorkflowRuntimeManifest), nodeId, dst) if err != nil { return util.NewBadRequestError(err, "Failed to read logs for run %v", runId) } @@ -1101,7 +1128,7 @@ func (r *ResourceManager) CreateJob(ctx context.Context, job *model.Job) (*model // TODO(gkcalat): consider changing the flow. Other resource UUIDs are assigned by their respective stores (DB). // Convert modelJob into scheduledWorkflow. - scheduledWorkflow, err = tmpl.ScheduledWorkflow(job, r.getOwnerReferences()) + scheduledWorkflow, err = tmpl.ScheduledWorkflow(job) if err != nil { return nil, util.Wrap(err, "Failed to create a recurring run during scheduled workflow creation") } @@ -1116,28 +1143,28 @@ func (r *ResourceManager) CreateJob(ctx context.Context, job *model.Job) (*model return nil, util.Wrap(err, "Failed to validate the input parameters on the latest pipeline version") } - tmpl, err := template.New(manifest, r.options.CacheDisabled) + tmpl, err := template.New(manifest, r.options.CacheDisabled, r.options.DefaultWorkspace) if err != nil { return nil, util.Wrap(err, "Failed to fetch a template with an invalid pipeline spec manifest") } - _, err = tmpl.ScheduledWorkflow(job, r.getOwnerReferences()) + _, err = tmpl.ScheduledWorkflow(job) if err != nil { return nil, util.Wrap(err, "Failed to validate the input parameters on the latest pipeline version") } - scheduledWorkflow, err = template.NewGenericScheduledWorkflow(job, r.getOwnerReferences()) + scheduledWorkflow, err = template.NewGenericScheduledWorkflow(job) if err != nil { return nil, util.Wrap(err, "Failed to create a recurring run during scheduled workflow creation") } - parameters, err := template.StringMapToCRDParameters(job.RuntimeConfig.Parameters) + parameters, err := template.StringMapToCRDParameters(string(job.RuntimeConfig.Parameters)) if err != nil { return nil, util.Wrap(err, "Converting runtime config's parameters to CDR parameters failed") } scheduledWorkflow.Spec.Workflow = &scheduledworkflow.WorkflowResource{ - Parameters: parameters, PipelineRoot: job.PipelineRoot, + Parameters: parameters, PipelineRoot: string(job.PipelineRoot), } } @@ -1171,35 +1198,14 @@ func (r *ResourceManager) CreateJob(ctx context.Context, job *model.Job) (*model } } job.ServiceAccount = serviceAccount - job.PipelineSpec.WorkflowSpecManifest = manifest + job.WorkflowSpecManifest = model.LargeText(manifest) } else { job.ServiceAccount = newScheduledWorkflow.Spec.ServiceAccount - job.PipelineSpec.PipelineSpecManifest = manifest + job.PipelineSpecManifest = model.LargeText(manifest) } return r.jobStore.CreateJob(job) } -func (r *ResourceManager) getOwnerReferences() []v1.OwnerReference { - ownerName := common.GetStringConfigWithDefault("OWNER_NAME", "") - ownerAPIVersion := common.GetStringConfigWithDefault("OWNER_API_VERSION", "") - ownerKind := common.GetStringConfigWithDefault("OWNER_KIND", "") - ownerUID := types.UID(common.GetStringConfigWithDefault("OWNER_UID", "")) - - if ownerName == "" || ownerAPIVersion == "" || ownerKind == "" || ownerUID == "" { - glog.Info("Missing ScheduledWorkflow owner fields. Proceeding without OwnerReferences") - return []v1.OwnerReference{} - } else { - return []v1.OwnerReference{ - { - APIVersion: ownerAPIVersion, - Kind: ownerKind, - Name: ownerName, - UID: ownerUID, - }, - } - } -} - // Enables or disables a recurring run with given id. func (r *ResourceManager) ChangeJobMode(ctx context.Context, jobId string, enable bool) error { job, err := r.GetJob(jobId) @@ -1321,7 +1327,7 @@ func (r *ResourceManager) ReportWorkflowResource(ctx context.Context, execSpec u run.State = state run.Conditions = string(state.ToV1()) run.FinishedAtInSec = execStatus.FinishedAt() - run.WorkflowRuntimeManifest = execSpec.ToStringForStore() + run.WorkflowRuntimeManifest = model.LargeText(execSpec.ToStringForStore()) if updateError = r.runStore.UpdateRun(run); updateError != nil { return nil, util.Wrapf(updateError, "Failed to report a workflow for existing run %s during updating the run. Check if the run entry is corrupted", runId) } @@ -1363,7 +1369,7 @@ func (r *ResourceManager) ReportWorkflowResource(ctx context.Context, execSpec u experimentId := existingJob.ExperimentId namespace := existingJob.Namespace pipelineSpec := existingJob.PipelineSpec - pipelineSpec.WorkflowSpecManifest = execSpec.GetExecutionSpec().ToStringForStore() + pipelineSpec.WorkflowSpecManifest = model.LargeText(execSpec.GetExecutionSpec().ToStringForStore()) // Try to fetch experiment id from resource references if it is missing. if experimentId == "" { @@ -1409,7 +1415,7 @@ func (r *ResourceManager) ReportWorkflowResource(ctx context.Context, execSpec u Namespace: namespace, PipelineSpec: pipelineSpec, RunDetails: model.RunDetails{ - WorkflowRuntimeManifest: execSpec.ToStringForStore(), + WorkflowRuntimeManifest: model.LargeText(execSpec.ToStringForStore()), CreatedAtInSec: objMeta.CreationTimestamp.Unix(), ScheduledAtInSec: scheduledTimeInSec, FinishedAtInSec: execStatus.FinishedAt(), @@ -1436,9 +1442,9 @@ func (r *ResourceManager) ReportWorkflowResource(ctx context.Context, execSpec u // report workflows that no longer exist. It's important to return a not found error, so that persistence // agent won't retry again. if util.IsNotFound(err) { - return nil, util.NewNotFoundError(err, message) + return nil, util.NewNotFoundError(err, "%s", message) } else { - return nil, util.Wrapf(err, message) + return nil, util.Wrapf(err, "%s", message) } } @@ -1515,15 +1521,15 @@ func (r *ResourceManager) fetchTemplateFromPipelineSpec(pipelineSpec *model.Pipe manifest = string(tempBytes) } else { // Read the provided manifest and fail if it is empty - manifest = pipelineSpec.PipelineSpecManifest + manifest = string(pipelineSpec.PipelineSpecManifest) if manifest == "" { - manifest = pipelineSpec.WorkflowSpecManifest + manifest = string(pipelineSpec.WorkflowSpecManifest) } if manifest == "" { return nil, "", util.NewInvalidInputError("Failed to fetch a template with an empty pipeline spec manifest") } } - tmpl, err := template.New([]byte(manifest), r.options.CacheDisabled) + tmpl, err := template.New([]byte(manifest), r.options.CacheDisabled, r.options.DefaultWorkspace) if err != nil { return nil, "", util.Wrap(err, "Failed to fetch a template with an invalid pipeline spec manifest") } @@ -1541,10 +1547,11 @@ func (r *ResourceManager) fetchTemplateFromPipelineVersion(pipelineVersion *mode if len(pipelineVersion.PipelineSpec) != 0 { // Check pipeline spec string first bytes := []byte(pipelineVersion.PipelineSpec) - return bytes, pipelineVersion.PipelineSpecURI, nil + return bytes, string(pipelineVersion.PipelineSpecURI), nil } else { // Try reading object store from pipeline_spec_uri - template, errUri := r.objectStore.GetFile(context.TODO(), pipelineVersion.PipelineSpecURI) + // nolint:staticcheck // [ST1003] Field name matches upstream legacy naming + template, errUri := r.objectStore.GetFile(context.TODO(), string(pipelineVersion.PipelineSpecURI)) if errUri != nil { // Try reading object store from pipeline_version_id template, errUUID := r.objectStore.GetFile(context.TODO(), r.objectStore.GetPipelineKey(fmt.Sprint(pipelineVersion.UUID))) @@ -1676,13 +1683,13 @@ func (r *ResourceManager) CreatePipelineVersion(pv *model.PipelineVersion) (*mod if err != nil { return nil, util.Wrap(err, "Failed to create a pipeline version as template is broken") } - pv.PipelineSpec = string(pipelineSpecBytes) + pv.PipelineSpec = model.LargeText(string(pipelineSpecBytes)) if pipelineSpecURI != "" { - pv.PipelineSpecURI = pipelineSpecURI + pv.PipelineSpecURI = model.LargeText(pipelineSpecURI) } // Create a template - tmpl, err := template.New(pipelineSpecBytes, r.options.CacheDisabled) + tmpl, err := template.New(pipelineSpecBytes, r.options.CacheDisabled, r.options.DefaultWorkspace) if err != nil { return nil, util.Wrap(err, "Failed to create a pipeline version due to template creation error") } @@ -1712,9 +1719,9 @@ func (r *ResourceManager) CreatePipelineVersion(pv *model.PipelineVersion) (*mod if err != nil { return nil, util.Wrap(err, "Failed to create a pipeline version due to error converting parameters to json") } - pv.Parameters = paramsJSON + pv.Parameters = model.LargeText(paramsJSON) pv.Status = model.PipelineVersionCreating - pv.PipelineSpec = string(tmpl.Bytes()) + pv.PipelineSpec = model.LargeText(string(tmpl.Bytes())) // Create a record in DB version, err := r.pipelineStore.CreatePipelineVersion(pv) diff --git a/backend/src/apiserver/resource/resource_manager_test.go b/backend/src/apiserver/resource/resource_manager_test.go index 9ee484ef214..65d82506a02 100644 --- a/backend/src/apiserver/resource/resource_manager_test.go +++ b/backend/src/apiserver/resource/resource_manager_test.go @@ -24,9 +24,7 @@ import ( "time" "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" - "github.com/kubeflow/pipelines/backend/src/v2/objectstore" - "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/util/file" @@ -48,14 +46,6 @@ import ( "k8s.io/apimachinery/pkg/types" ) -func intPtr(i int64) *int64 { - return &i -} - -func strPtr(i string) *string { - return &i -} - func initEnvVars() { viper.Set(common.PodNamespace, "ns1") proxy.InitializeConfigWithEmptyForTests() @@ -105,7 +95,7 @@ func createPipelineV1(name string) *model.Pipeline { func createPipeline(name string, description string, namespace string) *model.Pipeline { return &model.Pipeline{ Name: name, - Description: description, + Description: model.LargeText(description), Status: model.PipelineReady, Namespace: namespace, } @@ -117,7 +107,7 @@ func createPipelineVersion(pipelineId string, name string, description string, u } paramsJSON := "[{\"name\":\"param1\"}]" spec := pipelineSpec - tmpl, err := template.New([]byte(pipelineSpec), false) + tmpl, err := template.New([]byte(pipelineSpec), false, nil) if err != nil { spec = pipelineSpec } else { @@ -126,13 +116,13 @@ func createPipelineVersion(pipelineId string, name string, description string, u } return &model.PipelineVersion{ Name: name, - Parameters: paramsJSON, + Parameters: model.LargeText(paramsJSON), PipelineId: pipelineId, CodeSourceUrl: url, - Description: description, + Description: model.LargeText(description), Status: model.PipelineVersionReady, - PipelineSpec: spec, - PipelineSpecURI: pipelineSpecURI, + PipelineSpec: model.LargeText(spec), + PipelineSpecURI: model.LargeText(pipelineSpecURI), } } @@ -232,7 +222,7 @@ func initWithJob(t *testing.T) (*FakeClientManager, *ResourceManager, *model.Job DisplayName: "j1", Enabled: true, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), }, ExperimentId: exp.UUID, } @@ -249,7 +239,7 @@ func initWithJobV2(t *testing.T) (*FakeClientManager, *ResourceManager, *model.J DisplayName: "j1", Enabled: true, PipelineSpec: model.PipelineSpec{ - PipelineSpecManifest: v2SpecHelloWorld, + PipelineSpecManifest: model.LargeText(v2SpecHelloWorld), RuntimeConfig: model.RuntimeConfig{ Parameters: "{\"text\":\"world\"}", PipelineRoot: "job-1-root", @@ -268,7 +258,7 @@ func initWithOneTimeRun(t *testing.T) (*FakeClientManager, *ResourceManager, *mo apiRun := &model.Run{ DisplayName: "run1", PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, ExperimentId: exp.UUID, @@ -283,7 +273,7 @@ func initWithOneTimeRunV2(t *testing.T) (*FakeClientManager, *ResourceManager, * apiRun := &model.Run{ DisplayName: "run1", PipelineSpec: model.PipelineSpec{ - PipelineSpecManifest: v2SpecHelloWorld, + PipelineSpecManifest: model.LargeText(v2SpecHelloWorld), RuntimeConfig: model.RuntimeConfig{ Parameters: "{\"text\":\"world\"}", }, @@ -300,7 +290,7 @@ func initWithPatchedRun(t *testing.T) (*FakeClientManager, *ResourceManager, *mo apiRun := &model.Run{ DisplayName: "run1", PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"{{kfp-default-bucket}}\"}]", }, @@ -316,7 +306,7 @@ func initWithOneTimeFailedRun(t *testing.T) (*FakeClientManager, *ResourceManage apiRun := &model.Run{ DisplayName: "run1", PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, ExperimentId: exp.UUID, @@ -338,7 +328,7 @@ func initWithOneTimeFailedRunCompressed(t *testing.T) (*FakeClientManager, *Reso apiRun := &model.Run{ DisplayName: "run1", PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, ExperimentId: exp.UUID, @@ -363,7 +353,7 @@ func initWithOneTimeFailedRunOffloaded(t *testing.T) (*FakeClientManager, *Resou apiRun := &model.Run{ DisplayName: "run1", PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, ExperimentId: exp.UUID, @@ -458,7 +448,7 @@ func TestCreatePipeline(t *testing.T) { pv = createPipelineVersion( pipeline.UUID, pipeline.Name, - pipeline.Description, + string(pipeline.Description), fmt.Sprintf("url://%v", pipeline.Name), test.template, fmt.Sprintf("uri://pipelines/%v/versions/v1/spec.yaml", pipeline.Name), @@ -520,13 +510,13 @@ func TestCreatePipelineVersion(t *testing.T) { template: testWorkflow.ToStringForStore(), version: &model.PipelineVersion{ Name: "p_v", - Description: "test", + Description: model.LargeText("test"), }, model: &model.PipelineVersion{ Name: "p_v", Parameters: "[{\"name\":\"param1\"}]", - Description: "test", - PipelineSpec: testWorkflow.ToStringForStore(), + Description: model.LargeText("test"), + PipelineSpec: model.LargeText(testWorkflow.ToStringForStore()), }, }, { @@ -538,7 +528,7 @@ func TestCreatePipelineVersion(t *testing.T) { model: &model.PipelineVersion{ Name: "complex", Parameters: "[{\"name\":\"output\"},{\"name\":\"project\"},{\"name\":\"schema\",\"value\":\"gs://ml-pipeline-playground/tfma/taxi-cab-classification/schema.json\"},{\"name\":\"train\",\"value\":\"gs://ml-pipeline-playground/tfma/taxi-cab-classification/train.csv\"},{\"name\":\"evaluation\",\"value\":\"gs://ml-pipeline-playground/tfma/taxi-cab-classification/eval.csv\"},{\"name\":\"preprocess-mode\",\"value\":\"local\"},{\"name\":\"preprocess-module\",\"value\":\"gs://ml-pipeline-playground/tfma/taxi-cab-classification/preprocessing.py\"},{\"name\":\"target\",\"value\":\"tips\"},{\"name\":\"learning-rate\",\"value\":\"0.1\"},{\"name\":\"hidden-layer-size\",\"value\":\"1500\"},{\"name\":\"steps\",\"value\":\"3000\"},{\"name\":\"workers\",\"value\":\"0\"},{\"name\":\"pss\",\"value\":\"0\"},{\"name\":\"predict-mode\",\"value\":\"local\"},{\"name\":\"analyze-mode\",\"value\":\"local\"},{\"name\":\"analyze-slice-column\",\"value\":\"trip_start_hour\"}]", - PipelineSpec: complexPipeline, + PipelineSpec: model.LargeText(complexPipeline), }, }, { @@ -564,7 +554,7 @@ func TestCreatePipelineVersion(t *testing.T) { Name: "v2spec", // TODO(v2): when parameter extraction is implemented, this won't be empty. Parameters: "[{\"name\":\"param1\"}]", - PipelineSpec: testWorkflow.ToStringForStore(), + PipelineSpec: model.LargeText(testWorkflow.ToStringForStore()), }, }, } @@ -694,13 +684,13 @@ func TestCreatePipelineOrVersion_V2PipelineName(t *testing.T) { require.Nil(t, err) bytes, err := manager.GetPipelineVersionTemplate(version.UUID) require.Nil(t, err) - tmpl, err := template.New(bytes, true) + tmpl, err := template.New(bytes, true, nil) require.Nil(t, err) assert.Equal(t, test.pipelineName, tmpl.V2PipelineName()) bytes, err = manager.GetPipelineLatestTemplate(createdPipeline.UUID) require.Nil(t, err) - tmpl, err = template.New(bytes, true) + tmpl, err = template.New(bytes, true, nil) require.Nil(t, err) assert.Equal(t, test.pipelineName, tmpl.V2PipelineName()) }) @@ -721,22 +711,22 @@ func TestResourceManager_CreatePipelineAndPipelineVersion(t *testing.T) { "Valid - pipeline v2", &model.Pipeline{ Name: "pipeline v2", - Description: "pipeline two", + Description: model.LargeText("pipeline two"), Namespace: "user1", }, &model.PipelineVersion{ Name: "pipeline v2 version 1", - Description: "pipeline v2 version description", + Description: model.LargeText("pipeline v2 version description"), CodeSourceUrl: "gs://my-bucket/pipeline_v2.py", - PipelineSpec: v2SpecHelloWorld, - PipelineSpecURI: "pipeline_version_two.yaml", + PipelineSpec: model.LargeText(v2SpecHelloWorld), + PipelineSpecURI: model.LargeText("pipeline_version_two.yaml"), }, &model.Pipeline{ UUID: DefaultFakePipelineIdTwo, CreatedAtInSec: 1, Name: "pipeline v2", DisplayName: "pipeline v2", - Description: "pipeline two", + Description: model.LargeText("pipeline two"), Namespace: "user1", Status: model.PipelineReady, }, @@ -745,12 +735,12 @@ func TestResourceManager_CreatePipelineAndPipelineVersion(t *testing.T) { CreatedAtInSec: 2, Name: "pipeline v2 version 1", DisplayName: "pipeline v2 version 1", - Description: "pipeline v2 version description", + Description: model.LargeText("pipeline v2 version description"), PipelineId: DefaultFakePipelineIdTwo, Status: model.PipelineVersionReady, CodeSourceUrl: "gs://my-bucket/pipeline_v2.py", - PipelineSpec: v2SpecHelloWorld, - PipelineSpecURI: "pipeline_version_two.yaml", + PipelineSpec: model.LargeText(v2SpecHelloWorld), + PipelineSpecURI: model.LargeText("pipeline_version_two.yaml"), Parameters: "[]", }, false, @@ -761,23 +751,23 @@ func TestResourceManager_CreatePipelineAndPipelineVersion(t *testing.T) { &model.Pipeline{ Name: "pipeline v2", DisplayName: "pipeline v2 display name", - Description: "pipeline two", + Description: model.LargeText("pipeline two"), Namespace: "user1", }, &model.PipelineVersion{ Name: "pipeline v2 version 1", DisplayName: "pipeline v2 version 1 display name", - Description: "pipeline v2 version description", + Description: model.LargeText("pipeline v2 version description"), CodeSourceUrl: "gs://my-bucket/pipeline_v2.py", - PipelineSpec: v2SpecHelloWorld, - PipelineSpecURI: "pipeline_version_two.yaml", + PipelineSpec: model.LargeText(v2SpecHelloWorld), + PipelineSpecURI: model.LargeText("pipeline_version_two.yaml"), }, &model.Pipeline{ UUID: DefaultFakePipelineIdTwo, CreatedAtInSec: 1, Name: "pipeline v2", DisplayName: "pipeline v2 display name", - Description: "pipeline two", + Description: model.LargeText("pipeline two"), Namespace: "user1", Status: model.PipelineReady, }, @@ -786,12 +776,12 @@ func TestResourceManager_CreatePipelineAndPipelineVersion(t *testing.T) { CreatedAtInSec: 2, Name: "pipeline v2 version 1", DisplayName: "pipeline v2 version 1 display name", - Description: "pipeline v2 version description", + Description: model.LargeText("pipeline v2 version description"), PipelineId: DefaultFakePipelineIdTwo, Status: model.PipelineVersionReady, CodeSourceUrl: "gs://my-bucket/pipeline_v2.py", - PipelineSpec: v2SpecHelloWorld, - PipelineSpecURI: "pipeline_version_two.yaml", + PipelineSpec: model.LargeText(v2SpecHelloWorld), + PipelineSpecURI: model.LargeText("pipeline_version_two.yaml"), Parameters: "[]", }, false, @@ -801,22 +791,22 @@ func TestResourceManager_CreatePipelineAndPipelineVersion(t *testing.T) { "Valid - pipeline v1", &model.Pipeline{ Name: "pipeline v1", - Description: "pipeline one", + Description: model.LargeText("pipeline one"), Parameters: `[{"name":"param1","value":"one"},{"name":"param2","value":"two"}]`, }, &model.PipelineVersion{ Name: "pipeline v1 version 1", - Description: "pipeline v1 version description", + Description: model.LargeText("pipeline v1 version description"), CodeSourceUrl: "gs://my-bucket/pipeline_v1.py", - PipelineSpec: complexPipeline, - PipelineSpecURI: "pipeline_version_one.yaml", + PipelineSpec: model.LargeText(complexPipeline), + PipelineSpecURI: model.LargeText("pipeline_version_one.yaml"), }, &model.Pipeline{ UUID: DefaultFakePipelineIdTwo, CreatedAtInSec: 1, Name: "pipeline v1", DisplayName: "pipeline v1", - Description: "pipeline one", + Description: model.LargeText("pipeline one"), Parameters: `[{"name":"param1","value":"one"},{"name":"param2","value":"two"}]`, Status: model.PipelineReady, }, @@ -826,11 +816,11 @@ func TestResourceManager_CreatePipelineAndPipelineVersion(t *testing.T) { PipelineId: DefaultFakePipelineIdTwo, Name: "pipeline v1 version 1", DisplayName: "pipeline v1 version 1", - Description: "pipeline v1 version description", + Description: model.LargeText("pipeline v1 version description"), Status: model.PipelineVersionReady, CodeSourceUrl: "gs://my-bucket/pipeline_v1.py", - PipelineSpec: complexPipeline, - PipelineSpecURI: "pipeline_version_one.yaml", + PipelineSpec: model.LargeText(complexPipeline), + PipelineSpecURI: model.LargeText("pipeline_version_one.yaml"), }, false, "", @@ -1041,7 +1031,7 @@ func TestGetPipelineTemplate_FromPipelineURI(t *testing.T) { pv := &model.PipelineVersion{ PipelineId: p.UUID, Name: "new_version", - PipelineSpecURI: p.UUID, + PipelineSpecURI: model.LargeText(p.UUID), } _, err := manager.CreatePipelineVersion(pv) assert.Nil(t, err) @@ -1063,7 +1053,7 @@ func TestGetPipelineTemplate_FromPipelineVersionId(t *testing.T) { UUID: "1000", PipelineId: p.UUID, Name: "new_version", - PipelineSpecURI: p.UUID, + PipelineSpecURI: model.LargeText(p.UUID), } pipelineStore, ok := manager.pipelineStore.(*storage.PipelineStore) @@ -1090,7 +1080,7 @@ func TestGetPipelineTemplate_FromPipelineId(t *testing.T) { pv := &model.PipelineVersion{ PipelineId: p.UUID, Name: "new_version", - PipelineSpecURI: p.UUID, + PipelineSpecURI: model.LargeText(p.UUID), } manager.objectStore.AddFile(context.TODO(), []byte(testWorkflow.ToStringForStore()), manager.objectStore.GetPipelineKey(p.UUID)) @@ -1177,7 +1167,7 @@ func TestListPipelines(t *testing.T) { assert.Equal(t, 2, nTotal) // Delete the above pipeline. - err = manager.DeletePipeline(pnew2.UUID) + err = manager.DeletePipeline(pnew2.UUID, false) assert.Nil(t, err) _, nTotal, _, err = manager.ListPipelines( @@ -1235,7 +1225,7 @@ func TestListPipelinesV1(t *testing.T) { assert.Equal(t, 2, nTotal) // Delete the above pipeline. - err = manager.DeletePipeline(pnew2.UUID) + err = manager.DeletePipeline(pnew2.UUID, false) assert.Nil(t, err) _, _, nTotal, _, err = manager.ListPipelinesV1( @@ -1307,7 +1297,7 @@ func TestListPipelineVersions(t *testing.T) { assert.Equal(t, 2, nTotal) // Delete the above pipeline. - err = manager.DeletePipeline(pnew2.UUID) + err = manager.DeletePipeline(pnew2.UUID, false) assert.Nil(t, err) _, nTotal, _, err = manager.ListPipelineVersions( @@ -1610,7 +1600,7 @@ func TestDeletePipeline(t *testing.T) { assert.Nil(t, err) // Delete the above pipeline. - err = manager.DeletePipeline(pnew2.UUID) + err = manager.DeletePipeline(pnew2.UUID, false) assert.Nil(t, err) // Verify the pipeline doesn't exist. @@ -1622,7 +1612,7 @@ func TestDeletePipeline(t *testing.T) { assert.Nil(t, err) // Must fail due to active pipeline versions - err = manager.DeletePipeline(pnew1.UUID) + err = manager.DeletePipeline(pnew1.UUID, false) assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) assert.Contains(t, err.Error(), fmt.Sprintf("as it has existing pipeline versions (e.g. %v)", FakeUUIDOne)) } @@ -1679,14 +1669,14 @@ func TestCreateRun_ThroughPipelineID(t *testing.T) { PipelineVersionId: version.UUID, PipelineId: p.UUID, PipelineName: "version_for_run", - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, RunDetails: model.RunDetails{ CreatedAtInSec: 5, ScheduledAtInSec: 5, Conditions: "Pending", - WorkflowRuntimeManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), + WorkflowRuntimeManifest: model.LargeText(util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore()), StateHistory: []*model.RuntimeStatus{ { UpdateTimeInSec: 6, @@ -1715,7 +1705,7 @@ func TestCreateRun_ThroughWorkflowSpecV2(t *testing.T) { Namespace: runDetail.Namespace, StorageState: model.StorageStateAvailable, PipelineSpec: model.PipelineSpec{ - PipelineSpecManifest: v2SpecHelloWorld, + PipelineSpecManifest: model.LargeText(v2SpecHelloWorld), RuntimeConfig: model.RuntimeConfig{ Parameters: "{\"text\":\"world\"}", }, @@ -1765,7 +1755,7 @@ func TestCreateRun_ThroughWorkflowSpec(t *testing.T) { ServiceAccount: "pipeline-runner", StorageState: model.StorageStateAvailable, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, RunDetails: model.RunDetails{ @@ -1779,7 +1769,7 @@ func TestCreateRun_ThroughWorkflowSpec(t *testing.T) { State: model.RuntimeStatePending, }, }, - WorkflowRuntimeManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), + WorkflowRuntimeManifest: model.LargeText(util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore()), }, } assert.Equal(t, expectedRunDetail.ToV1(), runDetail.ToV1(), "The CreateRun return has unexpected value") @@ -1824,10 +1814,10 @@ func TestCreateRun_ThroughWorkflowSpecWithPatch(t *testing.T) { State: model.RuntimeStatePending, }, }, - WorkflowRuntimeManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), + WorkflowRuntimeManifest: model.LargeText(util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore()), }, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"{{kfp-default-bucket}}\"}]", }, } @@ -1855,7 +1845,7 @@ func TestCreateRun_ThroughWorkflowSpecSameManifest(t *testing.T) { &model.Run{ DisplayName: "run1", PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"{{kfp-default-bucket}}\"}]", }, ExperimentId: runDetail.ExperimentId, @@ -1925,11 +1915,11 @@ func TestCreateRun_ThroughPipelineVersion(t *testing.T) { PipelineVersionId: version.UUID, PipelineId: version.PipelineId, PipelineName: version.Name, - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, RunDetails: model.RunDetails{ - WorkflowRuntimeManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), + WorkflowRuntimeManifest: model.LargeText(util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore()), CreatedAtInSec: 5, ScheduledAtInSec: 5, Conditions: "Pending", @@ -2002,7 +1992,7 @@ func TestCreateRun_ThroughPipelineIdAndPipelineVersion(t *testing.T) { ServiceAccount: "sa1", StorageState: model.StorageStateAvailable, RunDetails: model.RunDetails{ - WorkflowRuntimeManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), + WorkflowRuntimeManifest: model.LargeText(util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore()), CreatedAtInSec: 5, ScheduledAtInSec: 5, Conditions: "Pending", @@ -2017,7 +2007,7 @@ func TestCreateRun_ThroughPipelineIdAndPipelineVersion(t *testing.T) { PipelineId: pipeline.UUID, PipelineVersionId: version.UUID, PipelineName: version.Name, - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, } @@ -2055,7 +2045,7 @@ func TestCreateRun_InvalidWorkflowSpec(t *testing.T) { DisplayName: "run1", ExperimentId: experimentID, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: string("I am invalid"), + WorkflowSpecManifest: model.LargeText("I am invalid"), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, } @@ -2091,7 +2081,7 @@ func TestCreateRun_OverrideParametersError(t *testing.T) { DisplayName: "run1", ExperimentId: experimentID, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param2\",\"value\":\"world\"}]", }, } @@ -2110,7 +2100,7 @@ func TestCreateRun_CreateWorkflowError(t *testing.T) { DisplayName: "run1", ExperimentId: experimentID, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, } @@ -2129,7 +2119,7 @@ func TestCreateRun_StoreRunMetadataError(t *testing.T) { DisplayName: "run1", ExperimentId: experimentID, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, } @@ -2276,14 +2266,14 @@ func TestRetryRun(t *testing.T) { actualRunDetail, err := manager.GetRun(runDetail.UUID) assert.Nil(t, err) - assert.Contains(t, actualRunDetail.WorkflowRuntimeManifest, "Failed") + assert.Contains(t, string(actualRunDetail.WorkflowRuntimeManifest), "Failed") err = manager.RetryRun(context.Background(), runDetail.UUID) assert.Nil(t, err) actualRunDetail, err = manager.GetRun(runDetail.UUID) assert.Nil(t, err) - assert.Contains(t, actualRunDetail.WorkflowRuntimeManifest, "Running") + assert.Contains(t, string(actualRunDetail.WorkflowRuntimeManifest), "Running") assert.Equal(t, actualRunDetail.RunDetails.State, model.RuntimeStateRunning) } @@ -2379,7 +2369,7 @@ func TestCreateJob_ThroughWorkflowSpec(t *testing.T) { UpdatedAtInSec: 2, Conditions: "STATUS_UNSPECIFIED", PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), }, } expectedJob.PipelineSpec.PipelineName = job.PipelineSpec.PipelineName @@ -2401,7 +2391,7 @@ func TestCreateJob_ThroughWorkflowSpecV2(t *testing.T) { UpdatedAtInSec: 2, Conditions: "STATUS_UNSPECIFIED", PipelineSpec: model.PipelineSpec{ - PipelineSpecManifest: v2SpecHelloWorld, + PipelineSpecManifest: model.LargeText(v2SpecHelloWorld), RuntimeConfig: model.RuntimeConfig{ Parameters: "{\"text\":\"world\"}", PipelineRoot: "job-1-root", @@ -2435,7 +2425,7 @@ func TestCreateJobDifferentDefaultServiceAccountName_ThroughWorkflowSpecV2(t *te UpdatedAtInSec: 2, Conditions: "STATUS_UNSPECIFIED", PipelineSpec: model.PipelineSpec{ - PipelineSpecManifest: v2SpecHelloWorld, + PipelineSpecManifest: model.LargeText(v2SpecHelloWorld), RuntimeConfig: model.RuntimeConfig{ Parameters: "{\"text\":\"world\"}", PipelineRoot: "job-1-root", @@ -2538,7 +2528,7 @@ func TestCreateJob_ThroughPipelineVersion(t *testing.T) { PipelineId: version.PipelineId, PipelineName: version.Name, PipelineVersionId: version.UUID, - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, } @@ -2593,7 +2583,7 @@ func TestCreateJob_ThroughPipelineIdAndPipelineVersion(t *testing.T) { PipelineName: version.Name, PipelineId: pipeline.UUID, PipelineVersionId: version.UUID, - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, } @@ -2634,7 +2624,7 @@ func TestCreateJob_InvalidWorkflowSpec(t *testing.T) { ExperimentId: experimentID, Enabled: true, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: string("I am invalid"), + WorkflowSpecManifest: model.LargeText("I am invalid"), Parameters: "[{\"name\":\"param2\",\"value\":\"world\"}]", }, } @@ -2653,7 +2643,7 @@ func TestCreateJob_NullWorkflowSpec(t *testing.T) { ExperimentId: experimentID, Enabled: true, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: string("null"), // this situation occurs for real when the manifest file disappears from object store in some way due to retention policy or manual deletion. + WorkflowSpecManifest: model.LargeText("null"), // this situation occurs for real when the manifest file disappears from object store in some way due to retention policy or manual deletion. Parameters: "[{\"name\":\"param2\",\"value\":\"world\"}]", }, } @@ -2717,7 +2707,7 @@ func TestEnableJob(t *testing.T) { PipelineId: job.PipelineSpec.PipelineId, PipelineName: job.PipelineSpec.PipelineName, PipelineVersionId: job.PipelineSpec.PipelineVersionId, - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), }, } assert.Nil(t, err) @@ -2877,7 +2867,7 @@ func TestReportWorkflowResource_ScheduledWorkflowIDEmpty_Success(t *testing.T) { }, }, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, } @@ -2921,14 +2911,14 @@ func TestReportWorkflowResource_ScheduledWorkflowIDNotEmpty_Success(t *testing.T Namespace: job.Namespace, RecurringRunId: job.UUID, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: workflow.GetExecutionSpec().ToStringForStore(), + WorkflowSpecManifest: model.LargeText(workflow.GetExecutionSpec().ToStringForStore()), PipelineSpecManifest: job.PipelineSpec.PipelineSpecManifest, PipelineId: job.PipelineSpec.PipelineId, PipelineName: job.PipelineSpec.PipelineName, PipelineVersionId: job.PipelineSpec.PipelineVersionId, }, RunDetails: model.RunDetails{ - WorkflowRuntimeManifest: workflow.ToStringForStore(), + WorkflowRuntimeManifest: model.LargeText(workflow.ToStringForStore()), CreatedAtInSec: 11, ScheduledAtInSec: 11, FinishedAtInSec: 0, @@ -3106,7 +3096,7 @@ func TestReportScheduledWorkflowResource_Success(t *testing.T) { }, }, PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), PipelineSpecManifest: actualJob.PipelineSpec.PipelineSpecManifest, PipelineName: actualJob.PipelineSpec.PipelineName, }, @@ -3167,7 +3157,7 @@ func TestReportScheduledWorkflowResource_Success_withParamsV1(t *testing.T) { }, PipelineSpec: model.PipelineSpec{ Parameters: `[{"name":"param_v1","value":"value_v1"}]`, - WorkflowSpecManifest: testWorkflow.ToStringForStore(), + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), PipelineSpecManifest: actualJob.PipelineSpec.PipelineSpecManifest, PipelineName: actualJob.PipelineSpec.PipelineName, }, @@ -3227,7 +3217,7 @@ func TestReportScheduledWorkflowResource_Success_withRuntimeParamsV2(t *testing. }, }, PipelineSpec: model.PipelineSpec{ - PipelineSpecManifest: v2SpecHelloWorld, + PipelineSpecManifest: model.LargeText(v2SpecHelloWorld), PipelineName: actualJob.PipelineSpec.PipelineName, RuntimeConfig: model.RuntimeConfig{ Parameters: `{"param1":"world-updated"}`, @@ -4100,35 +4090,6 @@ func TestCreateTask(t *testing.T) { assert.Equal(t, expectedTask, storedTask, "The StoredTask return has unexpected value") } -func TestBackwardsCompatibilityForSessionInfo(t *testing.T) { - _, manager, _, _, _, _ := initWithExperimentAndPipelineAndRun(t) - - // First Artifact has assigned a bucket_session_info - artifact1 := &ml_metadata.Artifact{ - Id: intPtr(0), - Uri: strPtr("s3://test-bucket/pipeline/some-pipeline-id/task/key0"), - } - - config1, _, err := manager.GetArtifactSessionInfo(context.Background(), artifact1) - - // Assert the results - assert.NoError(t, err) - assert.NotNil(t, config1) - - // Second Artifact has assigned a store_session_info - artifact2 := &ml_metadata.Artifact{ - Id: intPtr(1), - Uri: strPtr("s3://test-bucket/pipeline/some-pipeline-id/task/key1"), - } - - // Call the function - config2, _, err := manager.GetArtifactSessionInfo(context.Background(), artifact2) - - // Assert the results - assert.NoError(t, err) - assert.NotNil(t, config2) -} - var v2SpecHelloWorld = ` components: comp-hello-world: diff --git a/backend/src/apiserver/server/api_converter.go b/backend/src/apiserver/server/api_converter.go index 9607bfe0ba7..4978dc17e1d 100644 --- a/backend/src/apiserver/server/api_converter.go +++ b/backend/src/apiserver/server/api_converter.go @@ -19,13 +19,14 @@ import ( "fmt" "sort" "strconv" + "time" - "github.com/golang/protobuf/ptypes/timestamp" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/apiserver/validation" "github.com/kubeflow/pipelines/backend/src/common/util" swapi "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" "github.com/pkg/errors" @@ -53,12 +54,20 @@ func toModelExperiment(e interface{}) (*model.Experiment, error) { if name == "" { return nil, util.NewInternalServerError(util.NewInvalidInputError("Experiment must have a non-empty name"), "Failed to convert API experiment to model experiment") } - return &model.Experiment{ + // Namespace validation is handled in the server layer as it depends on multi-user mode. + exp := &model.Experiment{ Name: name, Description: description, Namespace: namespace, StorageState: model.StorageStateAvailable, - }, nil + } + if err := validation.ValidateModel(exp); err != nil { + return nil, util.NewInternalServerError( + err, + "Failed to convert API experiment to model experiment", + ) + } + return exp, nil } // Converts internal experiment representation to its API counterpart. @@ -90,7 +99,7 @@ func toApiExperimentV1(experiment *model.Experiment) *apiv1beta1.Experiment { Id: experiment.UUID, Name: experiment.Name, Description: experiment.Description, - CreatedAt: ×tamp.Timestamp{Seconds: experiment.CreatedAtInSec}, + CreatedAt: timestamppb.New(time.Unix(experiment.CreatedAtInSec, 0)), ResourceReferences: resourceReferences, StorageState: storageState, } @@ -116,8 +125,8 @@ func toApiExperiment(experiment *model.Experiment) *apiv2beta1.Experiment { ExperimentId: experiment.UUID, DisplayName: experiment.Name, Description: experiment.Description, - CreatedAt: ×tamp.Timestamp{Seconds: experiment.CreatedAtInSec}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: experiment.LastRunCreatedAtInSec}, + CreatedAt: timestamppb.New(time.Unix(experiment.CreatedAtInSec, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(experiment.LastRunCreatedAtInSec, 0)), Namespace: experiment.Namespace, StorageState: storageState, } @@ -147,6 +156,7 @@ func toApiExperiments(experiments []*model.Experiment) []*apiv2beta1.Experiment // Supports both v1beta1 abd v2beta1 API. func toModelPipeline(p interface{}) (*model.Pipeline, error) { var name, displayName, namespace, description string + switch apiPipeline := p.(type) { case *apiv1beta1.Pipeline: namespace = getNamespaceFromResourceReferenceV1(apiPipeline.GetResourceReferences()) @@ -172,13 +182,24 @@ func toModelPipeline(p interface{}) (*model.Pipeline, error) { displayName = name } - return &model.Pipeline{ + // Build the full model first, then validate the actual values on the struct. + pipeline := &model.Pipeline{ Name: name, DisplayName: displayName, Namespace: namespace, - Description: description, + Description: model.LargeText(description), Status: model.PipelineCreating, - }, nil + } + + if err := validation.ValidateModel(pipeline); err != nil { + return nil, util.NewInternalServerError( + err, + "Failed to convert API pipeline to model pipeline", + ) + } + + return pipeline, nil + } // Converts internal pipeline and pipeline version representation to an API pipeline. @@ -195,11 +216,11 @@ func toApiPipelineV1(pipeline *model.Pipeline, pipelineVersion *model.PipelineVe } } - params := toApiParametersV1(pipelineVersion.Parameters) + params := toApiParametersV1(string(pipelineVersion.Parameters)) if params == nil { return &apiv1beta1.Pipeline{ Id: pipeline.UUID, - Error: util.NewInternalServerError(util.NewInvalidInputError(fmt.Sprintf("Failed to convert parameters: %s", pipelineVersion.Parameters)), "Failed to convert a model pipeline to v1beta1 API pipeline").Error(), + Error: util.NewInternalServerError(util.NewInvalidInputError("%s", fmt.Sprintf("Failed to convert parameters: %s", pipelineVersion.Parameters)), "Failed to convert a model pipeline to v1beta1 API pipeline").Error(), } } if len(params) == 0 { @@ -221,9 +242,9 @@ func toApiPipelineV1(pipeline *model.Pipeline, pipelineVersion *model.PipelineVe } apiPipeline := &apiv1beta1.Pipeline{ Id: pipeline.UUID, - CreatedAt: ×tamp.Timestamp{Seconds: pipeline.CreatedAtInSec}, + CreatedAt: timestamppb.New(time.Unix(pipeline.CreatedAtInSec, 0)), Name: pipeline.Name, - Description: pipeline.Description, + Description: string(pipeline.Description), Parameters: params, DefaultVersion: defaultVersion, ResourceReferences: resourceRefs, @@ -293,8 +314,8 @@ func toApiPipeline(pipeline *model.Pipeline) *apiv2beta1.Pipeline { PipelineId: pipeline.UUID, Name: pipeline.Name, DisplayName: pipeline.DisplayName, - Description: pipeline.Description, - CreatedAt: ×tamp.Timestamp{Seconds: pipeline.CreatedAtInSec}, + Description: string(pipeline.Description), + CreatedAt: timestamppb.New(time.Unix(pipeline.CreatedAtInSec, 0)), Namespace: pipeline.Namespace, } } @@ -362,16 +383,22 @@ func toModelPipelineVersion(p interface{}) (*model.PipelineVersion, error) { if displayName == "" { displayName = name } - - return &model.PipelineVersion{ + pv := &model.PipelineVersion{ Name: name, DisplayName: displayName, PipelineId: pipelineId, - PipelineSpecURI: pipelineUrl, + PipelineSpecURI: model.LargeText(pipelineUrl), CodeSourceUrl: codeUrl, - Description: description, + Description: model.LargeText(description), Status: model.PipelineVersionCreating, - }, nil + } + if err := validation.ValidateModel(pv); err != nil { + return nil, util.NewInternalServerError( + err, + "Failed to convert API pipeline version to model pipeline version", + ) + } + return pv, nil } // Converts internal pipeline version representation to its API counterpart. @@ -384,13 +411,13 @@ func toApiPipelineVersionV1(pv *model.PipelineVersion) *apiv1beta1.PipelineVersi } apiPipelineVersion.Id = pv.UUID apiPipelineVersion.Name = pv.Name - apiPipelineVersion.CreatedAt = ×tamp.Timestamp{Seconds: pv.CreatedAtInSec} - if p := toApiParametersV1(pv.Parameters); p == nil { + apiPipelineVersion.CreatedAt = timestamppb.New(time.Unix(pv.CreatedAtInSec, 0)) + if p := toApiParametersV1(string(pv.Parameters)); p == nil { return nil } else if len(p) > 0 { apiPipelineVersion.Parameters = p } - apiPipelineVersion.Description = pv.Description + apiPipelineVersion.Description = string(pv.Description) if pv.CodeSourceUrl != "" { apiPipelineVersion.CodeSourceUrl = pv.CodeSourceUrl apiPipelineVersion.PackageUrl = &apiv1beta1.Url{PipelineUrl: pv.CodeSourceUrl} @@ -454,23 +481,23 @@ func toApiPipelineVersion(pv *model.PipelineVersion) *apiv2beta1.PipelineVersion PipelineVersionId: pv.UUID, Name: pv.Name, DisplayName: pv.DisplayName, - Description: pv.Description, - CreatedAt: ×tamp.Timestamp{Seconds: pv.CreatedAtInSec}, + Description: string(pv.Description), + CreatedAt: timestamppb.New(time.Unix(pv.CreatedAtInSec, 0)), } // Infer pipeline url if pv.CodeSourceUrl != "" { apiPipelineVersion.PackageUrl = &apiv2beta1.Url{ - PipelineUrl: pv.PipelineSpecURI, + PipelineUrl: string(pv.PipelineSpecURI), } } else if pv.PipelineSpecURI != "" { apiPipelineVersion.PackageUrl = &apiv2beta1.Url{ - PipelineUrl: pv.PipelineSpecURI, + PipelineUrl: string(pv.PipelineSpecURI), } } // Convert pipeline spec - spec, err := yamlStringToPipelineSpecStruct(pv.PipelineSpec) + spec, err := YamlStringToPipelineSpecStruct(string(pv.PipelineSpec)) if err != nil { return &apiv2beta1.PipelineVersion{ PipelineVersionId: pv.UUID, @@ -772,14 +799,10 @@ func toApiTriggerV1(trigger *model.Trigger) *apiv1beta1.Trigger { var cronSchedule apiv1beta1.CronSchedule cronSchedule.Cron = *trigger.Cron if trigger.CronScheduleStartTimeInSec != nil { - cronSchedule.StartTime = ×tamp.Timestamp{ - Seconds: *trigger.CronScheduleStartTimeInSec, - } + cronSchedule.StartTime = timestamppb.New(time.Unix(*trigger.CronScheduleStartTimeInSec, 0)) } if trigger.CronScheduleEndTimeInSec != nil { - cronSchedule.EndTime = ×tamp.Timestamp{ - Seconds: *trigger.CronScheduleEndTimeInSec, - } + cronSchedule.EndTime = timestamppb.New(time.Unix(*trigger.CronScheduleEndTimeInSec, 0)) } return &apiv1beta1.Trigger{Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &cronSchedule}} } @@ -787,14 +810,10 @@ func toApiTriggerV1(trigger *model.Trigger) *apiv1beta1.Trigger { var periodicSchedule apiv1beta1.PeriodicSchedule periodicSchedule.IntervalSecond = *trigger.IntervalSecond if trigger.PeriodicScheduleStartTimeInSec != nil { - periodicSchedule.StartTime = ×tamp.Timestamp{ - Seconds: *trigger.PeriodicScheduleStartTimeInSec, - } + periodicSchedule.StartTime = timestamppb.New(time.Unix(*trigger.PeriodicScheduleStartTimeInSec, 0)) } if trigger.PeriodicScheduleEndTimeInSec != nil { - periodicSchedule.EndTime = ×tamp.Timestamp{ - Seconds: *trigger.PeriodicScheduleEndTimeInSec, - } + periodicSchedule.EndTime = timestamppb.New(time.Unix(*trigger.PeriodicScheduleEndTimeInSec, 0)) } return &apiv1beta1.Trigger{Trigger: &apiv1beta1.Trigger_PeriodicSchedule{PeriodicSchedule: &periodicSchedule}} } @@ -815,14 +834,10 @@ func toApiTrigger(trigger *model.Trigger) *apiv2beta1.Trigger { var cronSchedule apiv2beta1.CronSchedule cronSchedule.Cron = *trigger.Cron if trigger.CronScheduleStartTimeInSec != nil { - cronSchedule.StartTime = ×tamp.Timestamp{ - Seconds: *trigger.CronScheduleStartTimeInSec, - } + cronSchedule.StartTime = timestamppb.New(time.Unix(*trigger.CronScheduleStartTimeInSec, 0)) } if trigger.CronScheduleEndTimeInSec != nil { - cronSchedule.EndTime = ×tamp.Timestamp{ - Seconds: *trigger.CronScheduleEndTimeInSec, - } + cronSchedule.EndTime = timestamppb.New(time.Unix(*trigger.CronScheduleEndTimeInSec, 0)) } return &apiv2beta1.Trigger{Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &cronSchedule}} } @@ -830,14 +845,10 @@ func toApiTrigger(trigger *model.Trigger) *apiv2beta1.Trigger { var periodicSchedule apiv2beta1.PeriodicSchedule periodicSchedule.IntervalSecond = *trigger.IntervalSecond if trigger.PeriodicScheduleStartTimeInSec != nil { - periodicSchedule.StartTime = ×tamp.Timestamp{ - Seconds: *trigger.PeriodicScheduleStartTimeInSec, - } + periodicSchedule.StartTime = timestamppb.New(time.Unix(*trigger.PeriodicScheduleStartTimeInSec, 0)) } if trigger.PeriodicScheduleEndTimeInSec != nil { - periodicSchedule.EndTime = ×tamp.Timestamp{ - Seconds: *trigger.PeriodicScheduleEndTimeInSec, - } + periodicSchedule.EndTime = timestamppb.New(time.Unix(*trigger.PeriodicScheduleEndTimeInSec, 0)) } return &apiv2beta1.Trigger{Trigger: &apiv2beta1.Trigger_PeriodicSchedule{PeriodicSchedule: &periodicSchedule}} } @@ -971,8 +982,8 @@ func toModelRuntimeConfig(obj interface{}) (*model.RuntimeConfig, error) { return nil, util.NewUnknownApiVersionError("RuntimeConfig", obj) } return &model.RuntimeConfig{ - Parameters: params, - PipelineRoot: root, + Parameters: model.LargeText(params), + PipelineRoot: model.LargeText(root), }, nil } @@ -984,12 +995,12 @@ func toApiRuntimeConfigV1(modelRuntime model.RuntimeConfig) *apiv1beta1.Pipeline if modelRuntime.Parameters == "" && modelRuntime.PipelineRoot == "" { return &apiRuntimeConfig } - runtimeParams := toMapProtoStructParameters(modelRuntime.Parameters) + runtimeParams := toMapProtoStructParameters(string(modelRuntime.Parameters)) if runtimeParams == nil { return nil } apiRuntimeConfig.Parameters = runtimeParams - apiRuntimeConfig.PipelineRoot = modelRuntime.PipelineRoot + apiRuntimeConfig.PipelineRoot = string(modelRuntime.PipelineRoot) return &apiRuntimeConfig } @@ -1001,12 +1012,12 @@ func toApiRuntimeConfig(modelRuntime model.RuntimeConfig) *apiv2beta1.RuntimeCon if modelRuntime.Parameters == "" && modelRuntime.PipelineRoot == "" { return &apiRuntimeConfig } - runtimeParams := toMapProtoStructParameters(modelRuntime.Parameters) + runtimeParams := toMapProtoStructParameters(string(modelRuntime.Parameters)) if runtimeParams == nil { return nil } apiRuntimeConfig.Parameters = runtimeParams - apiRuntimeConfig.PipelineRoot = modelRuntime.PipelineRoot + apiRuntimeConfig.PipelineRoot = string(modelRuntime.PipelineRoot) return &apiRuntimeConfig } @@ -1016,13 +1027,13 @@ func toPipelineSpecRuntimeConfig(cfg *model.RuntimeConfig) *pipelinespec.Pipelin if cfg == nil { return &pipelinespec.PipelineJob_RuntimeConfig{} } - runtimeParams := toMapProtoStructParameters(cfg.Parameters) + runtimeParams := toMapProtoStructParameters(string(cfg.Parameters)) if runtimeParams == nil { return nil } return &pipelinespec.PipelineJob_RuntimeConfig{ ParameterValues: runtimeParams, - GcsOutputDirectory: cfg.PipelineRoot, + GcsOutputDirectory: string(cfg.PipelineRoot), } } @@ -1040,7 +1051,6 @@ func toModelRunMetric(m interface{}, runId string) (*model.RunMetric, error) { default: return nil, util.NewUnknownApiVersionError("RunMetric", m) } - modelMetric := &model.RunMetric{ RunUUID: runId, Name: name, @@ -1048,7 +1058,11 @@ func toModelRunMetric(m interface{}, runId string) (*model.RunMetric, error) { NumberValue: val, Format: format, } + if err := validation.ValidateModel(modelMetric); err != nil { + return nil, util.NewInternalServerError(err, "Failed to convert API run metric to internal representation") + } return modelMetric, nil + } // Converts internal run metric representation to its API counterpart. @@ -1118,7 +1132,7 @@ func toModelRunDetails(r interface{}) (*model.RunDetails, error) { if err != nil { return nil, util.NewInternalServerError(err, "Failed to convert a API run to internal run details representation due to pipeline spec parsing error") } - modelRunDetails.PipelineRuntimeManifest = spec + modelRunDetails.PipelineRuntimeManifest = model.LargeText(spec) } return modelRunDetails, nil case *apiv2beta1.RunDetails: @@ -1130,8 +1144,8 @@ func toModelRunDetails(r interface{}) (*model.RunDetails, error) { return nil, util.Wrap(err, "Failed to convert v1beta1 API run detail to its internal representation") } apiRuntimeV1 := r.GetPipelineRuntime() - modelRunDetails.PipelineRuntimeManifest = apiRuntimeV1.GetPipelineManifest() - modelRunDetails.WorkflowRuntimeManifest = apiRuntimeV1.GetWorkflowManifest() + modelRunDetails.PipelineRuntimeManifest = model.LargeText(apiRuntimeV1.GetPipelineManifest()) + modelRunDetails.WorkflowRuntimeManifest = model.LargeText(apiRuntimeV1.GetWorkflowManifest()) return modelRunDetails, nil default: return nil, util.NewUnknownApiVersionError("RunDetails", r) @@ -1209,8 +1223,8 @@ func toModelRun(r interface{}) (*model.Run, error) { if err != nil { return nil, util.Wrap(err, "Failed to convert v1beta1 API run to its internal representation due to runtime config conversion error") } - cfgParams = cfg.Parameters - pipelineRoot = cfg.PipelineRoot + cfgParams = string(cfg.Parameters) + pipelineRoot = string(cfg.PipelineRoot) pipelineSpec = apiRunV1.GetPipelineSpec().GetPipelineManifest() workflowSpec = apiRunV1.GetPipelineSpec().GetWorkflowManifest() @@ -1285,8 +1299,8 @@ func toModelRun(r interface{}) (*model.Run, error) { if err != nil { return nil, util.Wrap(err, "Failed to convert API run to its internal representation due to runtime config conversion error") } - cfgParams = cfg.Parameters - pipelineRoot = cfg.PipelineRoot + cfgParams = string(cfg.Parameters) + pipelineRoot = string(cfg.PipelineRoot) if apiRunV2.GetPipelineSpec() == nil { pipelineSpec = "" @@ -1318,12 +1332,12 @@ func toModelRun(r interface{}) (*model.Run, error) { PipelineId: pipelineId, PipelineVersionId: pipelineVersionId, PipelineName: pipelineName, - PipelineSpecManifest: pipelineSpec, - WorkflowSpecManifest: workflowSpec, - Parameters: specParams, + PipelineSpecManifest: model.LargeText(pipelineSpec), + WorkflowSpecManifest: model.LargeText(workflowSpec), + Parameters: model.LargeText(specParams), RuntimeConfig: model.RuntimeConfig{ - Parameters: cfgParams, - PipelineRoot: pipelineRoot, + Parameters: model.LargeText(cfgParams), + PipelineRoot: model.LargeText(pipelineRoot), }, }, RunDetails: model.RunDetails{ @@ -1332,11 +1346,18 @@ func toModelRun(r interface{}) (*model.Run, error) { CreatedAtInSec: createTime, ScheduledAtInSec: scheduleTime, FinishedAtInSec: finishTime, - PipelineRuntimeManifest: runtimePipelineSpec, - WorkflowRuntimeManifest: runtimeWorkflowSpec, + PipelineRuntimeManifest: model.LargeText(runtimePipelineSpec), + WorkflowRuntimeManifest: model.LargeText(runtimeWorkflowSpec), TaskDetails: tasks, }, } + + if err := validation.ValidateModel(&modelRun); err != nil { + return nil, util.NewInternalServerError( + err, + "Failed to convert API run to its internal representation", + ) + } return &modelRun, nil } @@ -1346,7 +1367,7 @@ func toModelRun(r interface{}) (*model.Run, error) { func toApiRunV1(r *model.Run) *apiv1beta1.Run { r = r.ToV1() // v1 parameters - specParams := toApiParametersV1(r.PipelineSpec.Parameters) + specParams := toApiParametersV1(string(r.Parameters)) if specParams == nil { return &apiv1beta1.Run{ Id: r.UUID, @@ -1446,21 +1467,21 @@ func toApiRunV1(r *model.Run) *apiv1beta1.Run { specManifest := r.PipelineSpec.PipelineSpecManifest wfManifest := r.PipelineSpec.WorkflowSpecManifest return &apiv1beta1.Run{ - CreatedAt: ×tamp.Timestamp{Seconds: r.RunDetails.CreatedAtInSec}, + CreatedAt: timestamppb.New(time.Unix(r.RunDetails.CreatedAtInSec, 0)), Id: r.UUID, Metrics: metrics, Name: r.DisplayName, ServiceAccount: r.ServiceAccount, StorageState: apiv1beta1.Run_StorageState(apiv1beta1.Run_StorageState_value[string(r.StorageState.ToV1())]), Description: r.Description, - ScheduledAt: ×tamp.Timestamp{Seconds: r.RunDetails.ScheduledAtInSec}, - FinishedAt: ×tamp.Timestamp{Seconds: r.RunDetails.FinishedAtInSec}, + ScheduledAt: timestamppb.New(time.Unix(r.RunDetails.ScheduledAtInSec, 0)), + FinishedAt: timestamppb.New(time.Unix(r.RunDetails.FinishedAtInSec, 0)), Status: string(r.RunDetails.State.ToV1()), PipelineSpec: &apiv1beta1.PipelineSpec{ PipelineId: r.PipelineSpec.PipelineId, PipelineName: r.PipelineSpec.PipelineName, - WorkflowManifest: wfManifest, - PipelineManifest: specManifest, + WorkflowManifest: string(wfManifest), + PipelineManifest: string(specManifest), Parameters: specParams, RuntimeConfig: runtimeConfig, }, @@ -1482,7 +1503,7 @@ func toApiRun(r *model.Run) *apiv2beta1.Run { } } if len(runtimeConfig.GetParameters()) == 0 && len(runtimeConfig.GetPipelineRoot()) == 0 { - if params := toMapProtoStructParameters(r.PipelineSpec.Parameters); len(params) > 0 { + if params := toMapProtoStructParameters(string(r.Parameters)); len(params) > 0 { runtimeConfig.Parameters = params } else { runtimeConfig = nil @@ -1507,9 +1528,9 @@ func toApiRun(r *model.Run) *apiv2beta1.Run { StorageState: toApiRunStorageState(&r.StorageState), State: toApiRuntimeState(&r.RunDetails.State), StateHistory: toApiRuntimeStatuses(r.RunDetails.StateHistory), - CreatedAt: ×tamp.Timestamp{Seconds: r.RunDetails.CreatedAtInSec}, - ScheduledAt: ×tamp.Timestamp{Seconds: r.RunDetails.ScheduledAtInSec}, - FinishedAt: ×tamp.Timestamp{Seconds: r.RunDetails.FinishedAtInSec}, + CreatedAt: timestamppb.New(time.Unix(r.RunDetails.CreatedAtInSec, 0)), + ScheduledAt: timestamppb.New(time.Unix(r.RunDetails.ScheduledAtInSec, 0)), + FinishedAt: timestamppb.New(time.Unix(r.RunDetails.FinishedAtInSec, 0)), RunDetails: apiRd, } err := util.NewInvalidInputError("Failed to parse the pipeline source") @@ -1522,7 +1543,7 @@ func toApiRun(r *model.Run) *apiv2beta1.Run { } return apiRunV2 } else if r.PipelineSpec.PipelineSpecManifest != "" { - spec, err1 := yamlStringToPipelineSpecStruct(r.PipelineSpec.PipelineSpecManifest) + spec, err1 := YamlStringToPipelineSpecStruct(string(r.PipelineSpecManifest)) if err1 == nil { apiRunV2.PipelineSource = &apiv2beta1.Run_PipelineSpec{ PipelineSpec: spec, @@ -1531,7 +1552,7 @@ func toApiRun(r *model.Run) *apiv2beta1.Run { } err = util.Wrap(err1, err.Error()).(*util.UserError) } else if r.PipelineSpec.WorkflowSpecManifest != "" { - spec, err1 := yamlStringToPipelineSpecStruct(r.PipelineSpec.WorkflowSpecManifest) + spec, err1 := YamlStringToPipelineSpecStruct(string(r.WorkflowSpecManifest)) if err1 == nil { apiRunV2.PipelineSource = &apiv2beta1.Run_PipelineSpec{ PipelineSpec: spec, @@ -1577,16 +1598,16 @@ func toApiRunDetailV1(r *model.Run) *apiv1beta1.RunDetail { } if r.RunDetails.WorkflowRuntimeManifest == "" { apiRunDetails.PipelineRuntime = &apiv1beta1.PipelineRuntime{ - PipelineManifest: r.RunDetails.PipelineRuntimeManifest, + PipelineManifest: string(r.PipelineRuntimeManifest), } } else if r.RunDetails.PipelineRuntimeManifest == "" { apiRunDetails.PipelineRuntime = &apiv1beta1.PipelineRuntime{ - WorkflowManifest: r.RunDetails.WorkflowRuntimeManifest, + WorkflowManifest: string(r.WorkflowRuntimeManifest), } } else { apiRunDetails.PipelineRuntime = &apiv1beta1.PipelineRuntime{ - PipelineManifest: r.RunDetails.PipelineRuntimeManifest, - WorkflowManifest: r.RunDetails.WorkflowRuntimeManifest, + PipelineManifest: string(r.PipelineRuntimeManifest), + WorkflowManifest: string(r.WorkflowRuntimeManifest), } } return apiRunDetails @@ -1678,8 +1699,8 @@ func toModelTask(t interface{}) (*model.Task, error) { ParentTaskId: parentTaskId, State: model.RuntimeState(state).ToV2(), StateHistory: stateHistory, - MLMDInputs: inputs, - MLMDOutputs: outputs, + MLMDInputs: model.LargeText(inputs), + MLMDOutputs: model.LargeText(outputs), ChildrenPods: children, }, nil } @@ -1741,8 +1762,8 @@ func toApiTaskV1(task *model.Task) *apiv1beta1.Task { PipelineName: task.PipelineName, RunId: task.RunId, MlmdExecutionID: task.MLMDExecutionID, - CreatedAt: ×tamp.Timestamp{Seconds: task.CreatedTimestamp}, - FinishedAt: ×tamp.Timestamp{Seconds: task.FinishedTimestamp}, + CreatedAt: timestamppb.New(time.Unix(task.CreatedTimestamp, 0)), + FinishedAt: timestamppb.New(time.Unix(task.FinishedTimestamp, 0)), Fingerprint: task.Fingerprint, } } @@ -1787,9 +1808,9 @@ func toApiPipelineTaskDetail(t *model.Task) *apiv2beta1.PipelineTaskDetail { RunId: t.RunId, TaskId: t.UUID, DisplayName: t.Name, - CreateTime: ×tamp.Timestamp{Seconds: t.CreatedTimestamp}, - StartTime: ×tamp.Timestamp{Seconds: t.StartedTimestamp}, - EndTime: ×tamp.Timestamp{Seconds: t.FinishedTimestamp}, + CreateTime: timestamppb.New(time.Unix(t.CreatedTimestamp, 0)), + StartTime: timestamppb.New(time.Unix(t.StartedTimestamp, 0)), + EndTime: timestamppb.New(time.Unix(t.FinishedTimestamp, 0)), State: apiv2beta1.RuntimeState(apiv2beta1.RuntimeState_value[t.State.ToString()]), ExecutionId: execId, Inputs: inputArtifacts, @@ -1890,8 +1911,8 @@ func toModelJob(j interface{}) (*model.Job, error) { if err != nil { return nil, util.Wrap(err, "Failed to convert v1beta1 API recurring run to its internal representation due to runtime config conversion error") } - cfgParams = cfg.Parameters - pipelineRoot = cfg.PipelineRoot + cfgParams = string(cfg.Parameters) + pipelineRoot = string(cfg.PipelineRoot) pipelineSpec = apiJob.GetPipelineSpec().GetPipelineManifest() workflowSpec = apiJob.GetPipelineSpec().GetWorkflowManifest() @@ -1912,8 +1933,8 @@ func toModelJob(j interface{}) (*model.Job, error) { if err != nil { return nil, util.Wrap(err, "Failed to convert API recurring run to its internal representation due to runtime config conversion error") } - cfgParams = cfg.Parameters - pipelineRoot = cfg.PipelineRoot + cfgParams = string(cfg.Parameters) + pipelineRoot = string(cfg.PipelineRoot) jobName = apiJob.GetDisplayName() if jobName == "" { @@ -1993,12 +2014,12 @@ func toModelJob(j interface{}) (*model.Job, error) { PipelineId: pipelineId, PipelineName: pipelineName, PipelineVersionId: pipelineVersionId, - PipelineSpecManifest: pipelineSpec, - WorkflowSpecManifest: workflowSpec, - Parameters: specParams, + PipelineSpecManifest: model.LargeText(pipelineSpec), + WorkflowSpecManifest: model.LargeText(workflowSpec), + Parameters: model.LargeText(specParams), RuntimeConfig: model.RuntimeConfig{ - Parameters: cfgParams, - PipelineRoot: pipelineRoot, + Parameters: model.LargeText(cfgParams), + PipelineRoot: model.LargeText(pipelineRoot), }, }, }, nil @@ -2080,7 +2101,7 @@ func toApiJobStatus(s string) string { // Supports v1beta1 API. func toApiJobV1(j *model.Job) *apiv1beta1.Job { j = j.ToV1() - specParams := toApiParametersV1(j.PipelineSpec.Parameters) + specParams := toApiParametersV1(string(j.Parameters)) if specParams == nil { return &apiv1beta1.Job{ Id: j.UUID, @@ -2171,17 +2192,17 @@ func toApiJobV1(j *model.Job) *apiv1beta1.Job { ServiceAccount: j.ServiceAccount, Description: j.Description, Enabled: j.Enabled, - CreatedAt: ×tamp.Timestamp{Seconds: j.CreatedAtInSec}, + CreatedAt: timestamppb.New(time.Unix(j.CreatedAtInSec, 0)), Status: toApiJobStatus(j.Conditions), - UpdatedAt: ×tamp.Timestamp{Seconds: j.UpdatedAtInSec}, + UpdatedAt: timestamppb.New(time.Unix(j.UpdatedAtInSec, 0)), MaxConcurrency: j.MaxConcurrency, NoCatchup: j.NoCatchup, Trigger: trigger, PipelineSpec: &apiv1beta1.PipelineSpec{ PipelineId: j.PipelineSpec.PipelineId, PipelineName: j.PipelineSpec.PipelineName, - WorkflowManifest: wfManifest, - PipelineManifest: specManifest, + WorkflowManifest: string(wfManifest), + PipelineManifest: string(specManifest), Parameters: specParams, RuntimeConfig: runtimeConfig, }, @@ -2201,7 +2222,7 @@ func toApiRecurringRun(j *model.Job) *apiv2beta1.RecurringRun { } } if runtimeConfig == nil || (len(runtimeConfig.GetParameters()) == 0 && len(runtimeConfig.GetPipelineRoot()) == 0) { - if params := toMapProtoStructParameters(j.PipelineSpec.Parameters); len(params) > 0 { + if params := toMapProtoStructParameters(string(j.Parameters)); len(params) > 0 { runtimeConfig.Parameters = params } else { runtimeConfig = nil @@ -2214,8 +2235,8 @@ func toApiRecurringRun(j *model.Job) *apiv2beta1.RecurringRun { ServiceAccount: j.ServiceAccount, Description: j.Description, Status: toApiRecurringRunStatus(j.Conditions), - CreatedAt: ×tamp.Timestamp{Seconds: j.CreatedAtInSec}, - UpdatedAt: ×tamp.Timestamp{Seconds: j.UpdatedAtInSec}, + CreatedAt: timestamppb.New(time.Unix(j.CreatedAtInSec, 0)), + UpdatedAt: timestamppb.New(time.Unix(j.UpdatedAtInSec, 0)), MaxConcurrency: j.MaxConcurrency, NoCatchup: j.NoCatchup, Trigger: toApiTrigger(&j.Trigger), @@ -2225,7 +2246,7 @@ func toApiRecurringRun(j *model.Job) *apiv2beta1.RecurringRun { } if j.PipelineSpec.PipelineId == "" && j.PipelineSpec.PipelineVersionId == "" { - spec, err := yamlStringToPipelineSpecStruct(j.PipelineSpec.PipelineSpecManifest) + spec, err := YamlStringToPipelineSpecStruct(string(j.PipelineSpecManifest)) if err != nil { return &apiv2beta1.RecurringRun{ RecurringRunId: j.UUID, diff --git a/backend/src/apiserver/server/api_converter_test.go b/backend/src/apiserver/server/api_converter_test.go index 312f4ceebac..e142c62c105 100644 --- a/backend/src/apiserver/server/api_converter_test.go +++ b/backend/src/apiserver/server/api_converter_test.go @@ -17,9 +17,9 @@ package server import ( "strings" "testing" + "time" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - "github.com/golang/protobuf/ptypes/timestamp" "github.com/google/go-cmp/cmp" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" @@ -144,6 +144,16 @@ func TestToModelExperiment(t *testing.T) { "Experiment must have a non-empty name", nil, }, + { + "name too long v1", + &apiv1beta1.Experiment{ + Name: strings.Repeat("a", 129), // Max is 128 + Description: "This is an experiment with a very long name", + }, + true, + "Experiment.Name length cannot exceed 128", + nil, + }, } for _, tc := range tests { @@ -274,6 +284,35 @@ func TestToModelPipeline(t *testing.T) { Namespace: "", }, }, + { + name: "name too long v1", + pipeline: &apiv1beta1.Pipeline{ + Name: strings.Repeat("a", 129), // Max 128 + Description: "desc", + }, + wantError: true, + errorMessage: "Pipeline.Name length cannot exceed 128", + expectedModelPipeline: nil, + }, + { + name: "namespace too long v1", + pipeline: &apiv1beta1.Pipeline{ + Name: "p1", + Description: "desc", + ResourceReferences: []*apiv1beta1.ResourceReference{ + { + Key: &apiv1beta1.ResourceKey{ + Type: apiv1beta1.ResourceType_NAMESPACE, + Id: strings.Repeat("n", 64), // Max 63 + }, + Relationship: apiv1beta1.Relationship_OWNER, + }, + }, + }, + wantError: true, + errorMessage: "Pipeline.Namespace length cannot exceed 63", + expectedModelPipeline: nil, + }, { "Empty namespace v2", &apiv2beta1.Pipeline{ @@ -326,6 +365,28 @@ func TestToModelPipeline(t *testing.T) { Namespace: "ns3", }, }, + { + name: "name too long v2", + pipeline: &apiv2beta1.Pipeline{ + DisplayName: strings.Repeat("a", 129), // Max is 128 + Description: "This is a pipeline with a very long name", + Namespace: "ns", + }, + wantError: true, + errorMessage: "Pipeline.Name length cannot exceed 128", + expectedModelPipeline: nil, + }, + { + name: "namespace too long v2", + pipeline: &apiv2beta1.Pipeline{ + DisplayName: "p_long_ns", + Description: "This is a pipeline with a very long namespace", + Namespace: strings.Repeat("n", 64), // Max is 63 + }, + wantError: true, + errorMessage: "Pipeline.Namespace length cannot exceed 63", + expectedModelPipeline: nil, + }, } for _, tc := range tests { @@ -369,6 +430,8 @@ func TestToModelRunDetail(t *testing.T) { manifest string templateType template.TemplateType expectedModelRunDetail *model.Run + wantErr bool + errMsg string }{ { name: "v1", @@ -416,6 +479,8 @@ func TestToModelRunDetail(t *testing.T) { }, StorageState: model.StorageStateAvailable, }, + wantErr: false, + errMsg: "", }, { name: "v2", @@ -461,13 +526,88 @@ func TestToModelRunDetail(t *testing.T) { }, StorageState: model.StorageStateAvailable, }, + wantErr: false, + errMsg: "", + }, + { + name: "v1 namespace too long", + apiRun: &apiv1beta1.Run{ + Id: "run1", + Name: "name1", + Description: "desc", + PipelineSpec: &apiv1beta1.PipelineSpec{ + Parameters: []*apiv1beta1.Parameter{{Name: "param2", Value: "world"}}, + }, + ResourceReferences: []*apiv1beta1.ResourceReference{ + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_NAMESPACE, Id: strings.Repeat("n", 64)}, + Relationship: apiv1beta1.Relationship_OWNER, + }, + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_EXPERIMENT, Id: "exp1"}, + Relationship: apiv1beta1.Relationship_OWNER, + }, + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_PIPELINE_VERSION, Id: "pv1"}, + Relationship: apiv1beta1.Relationship_CREATOR, + }, + }, + }, + workflow: util.NewWorkflow(&v1alpha1.Workflow{ + ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "123"}, + Status: v1alpha1.WorkflowStatus{Phase: "running"}, + }), + manifest: "workflow spec", + templateType: template.V1, + expectedModelRunDetail: nil, + wantErr: true, + errMsg: "Run.Namespace length cannot exceed 63", + }, + { + name: "v1 experimentId too long", + apiRun: &apiv1beta1.Run{ + Id: "run1", + Name: "name1", + Description: "desc", + PipelineSpec: &apiv1beta1.PipelineSpec{ + Parameters: []*apiv1beta1.Parameter{{Name: "param2", Value: "world"}}, + }, + ResourceReferences: []*apiv1beta1.ResourceReference{ + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: apiv1beta1.Relationship_OWNER, + }, + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_EXPERIMENT, Id: strings.Repeat("e", 65)}, + Relationship: apiv1beta1.Relationship_OWNER, + }, + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_PIPELINE_VERSION, Id: "pv1"}, + Relationship: apiv1beta1.Relationship_CREATOR, + }, + }, + }, + workflow: util.NewWorkflow(&v1alpha1.Workflow{ + ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "123"}, + Status: v1alpha1.WorkflowStatus{Phase: "running"}, + }), + manifest: "workflow spec", + templateType: template.V1, + expectedModelRunDetail: nil, + wantErr: true, + errMsg: "Run.ExperimentId length cannot exceed 64", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { modelRunDetail, err := toModelRun(tt.apiRun) - assert.Nil(t, err) - assert.Equal(t, tt.expectedModelRunDetail, modelRunDetail) + if tt.wantErr { + assert.NotNil(t, err) + assert.Contains(t, err.Error(), tt.errMsg) + } else { + assert.Nil(t, err) + assert.Equal(t, tt.expectedModelRunDetail, modelRunDetail) + } }) } } @@ -489,7 +629,7 @@ func TestToModelJob(t *testing.T) { NoCatchup: true, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -530,7 +670,7 @@ func TestToModelJob(t *testing.T) { NoCatchup: true, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -676,6 +816,34 @@ func TestToModelRunMetric(t *testing.T) { Format: "RAW", } assert.Equal(t, expectedModelRunMetric, actualModelRunMetric) + + // Test Name length overflow + { + longName := strings.Repeat("a", 192) + apiRunMetric := &apiv1beta1.RunMetric{ + Name: longName, + NodeId: "node-1", + Value: &apiv1beta1.RunMetric_NumberValue{NumberValue: 0.88}, + Format: apiv1beta1.RunMetric_RAW, + } + _, err := toModelRunMetric(apiRunMetric, "run-1") + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "RunMetric.Name length cannot exceed 191") + } + + // Test NodeID length overflow + { + longNodeID := strings.Repeat("a", 192) + apiRunMetric := &apiv1beta1.RunMetric{ + Name: "metric-1", + NodeId: longNodeID, + Value: &apiv1beta1.RunMetric_NumberValue{NumberValue: 0.88}, + Format: apiv1beta1.RunMetric_RAW, + } + _, err := toModelRunMetric(apiRunMetric, "run-1") + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "RunMetric.NodeID length cannot exceed 191") + } } func TestToModelPipelineVersion(t *testing.T) { @@ -754,6 +922,19 @@ func TestToModelPipelineVersion(t *testing.T) { false, "", }, + { + name: "name too long v2", + pipeline: &apiv2beta1.PipelineVersion{ + DisplayName: strings.Repeat("a", 128), // Max is 127 + PipelineId: "pipeline 333", + PackageUrl: &apiv2beta1.Url{PipelineUrl: "http://package/3333"}, + CodeSourceUrl: "http://repo/3333", + Description: "This is pipeline version 333", + }, + expectedPipelineVersion: nil, + isError: true, + errMsg: "PipelineVersion.Name length cannot exceed 127", + }, { "missing package Url v2", &apiv2beta1.PipelineVersion{ @@ -800,11 +981,11 @@ func TestToApiPipelineV1(t *testing.T) { apiPipeline := toApiPipelineV1(modelPipeline, modelVersion) expectedApiPipeline := &apiv1beta1.Pipeline{ Id: "pipeline1", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), Url: &apiv1beta1.Url{PipelineUrl: "http://repo/22222"}, DefaultVersion: &apiv1beta1.PipelineVersion{ Id: "pipelineversion1", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), Description: "desc1", CodeSourceUrl: "http://repo/22222", PackageUrl: &apiv1beta1.Url{PipelineUrl: "http://repo/22222"}, @@ -872,11 +1053,11 @@ func TestToApiPipelinesV1(t *testing.T) { expectedPipelines := []*apiv1beta1.Pipeline{ { Id: "pipeline1", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), Url: &apiv1beta1.Url{PipelineUrl: "http://repo/22222"}, DefaultVersion: &apiv1beta1.PipelineVersion{ Id: "pipelineversion1", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), Description: "desc1", CodeSourceUrl: "http://repo/22222", PackageUrl: &apiv1beta1.Url{PipelineUrl: "http://repo/22222"}, @@ -1172,9 +1353,9 @@ func TestToApiRunDetailV1_RuntimeParams(t *testing.T) { Id: "run123", Name: "displayName123", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - ScheduledAt: ×tamp.Timestamp{Seconds: 1}, - FinishedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + ScheduledAt: timestamppb.New(time.Unix(1, 0)), + FinishedAt: timestamppb.New(time.Unix(1, 0)), Status: "Running", PipelineSpec: &apiv1beta1.PipelineSpec{ WorkflowManifest: "manifest", @@ -1235,9 +1416,9 @@ func TestToApiRunDetailV1_V1Params(t *testing.T) { Id: "run123", Name: "displayName123", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - ScheduledAt: ×tamp.Timestamp{Seconds: 1}, - FinishedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + ScheduledAt: timestamppb.New(time.Unix(1, 0)), + FinishedAt: timestamppb.New(time.Unix(1, 0)), Status: "Running", PipelineSpec: &apiv1beta1.PipelineSpec{ WorkflowManifest: "manifest", @@ -1326,9 +1507,9 @@ func TestToApiRunsV1(t *testing.T) { Id: "run1", Name: "displayName1", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - ScheduledAt: ×tamp.Timestamp{Seconds: 1}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + ScheduledAt: timestamppb.New(time.Unix(1, 0)), + FinishedAt: ×tamppb.Timestamp{Seconds: 0, Nanos: 0}, Status: "Running", PipelineSpec: &apiv1beta1.PipelineSpec{ WorkflowManifest: "manifest", @@ -1349,9 +1530,9 @@ func TestToApiRunsV1(t *testing.T) { Id: "run2", Name: "displayName2", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{Seconds: 2}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + ScheduledAt: timestamppb.New(time.Unix(2, 0)), + FinishedAt: ×tamppb.Timestamp{Seconds: 0, Nanos: 0}, Status: "Succeeded", ResourceReferences: []*apiv1beta1.ResourceReference{ { @@ -1390,8 +1571,8 @@ func TestToApiTask(t *testing.T) { PipelineName: "pipeline/my-pipeline", RunId: NonDefaultFakeUUID, MlmdExecutionID: "1", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - FinishedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + FinishedAt: timestamppb.New(time.Unix(2, 0)), Fingerprint: "123", } @@ -1428,8 +1609,8 @@ func TestToApiTasks(t *testing.T) { PipelineName: "namespace/ns1/pipeline/my-pipeline-1", RunId: "123e4567-e89b-12d3-a456-426655440001", MlmdExecutionID: "1", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - FinishedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + FinishedAt: timestamppb.New(time.Unix(2, 0)), Fingerprint: "123", }, { @@ -1438,8 +1619,8 @@ func TestToApiTasks(t *testing.T) { PipelineName: "namespace/ns1/pipeline/my-pipeline-2", RunId: "123e4567-e89b-12d3-a456-426655440003", MlmdExecutionID: "2", - CreatedAt: ×tamp.Timestamp{Seconds: 3}, - FinishedAt: ×tamp.Timestamp{Seconds: 4}, + CreatedAt: ×tamppb.Timestamp{Seconds: 3, Nanos: 0}, + FinishedAt: ×tamppb.Timestamp{Seconds: 4, Nanos: 0}, Fingerprint: "124", }, } @@ -1478,12 +1659,12 @@ func TestCronScheduledJobtoApiJob(t *testing.T) { Id: "job1", Name: "name 1", Enabled: true, - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - UpdatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + UpdatedAt: timestamppb.New(time.Unix(1, 0)), MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * *", }}, }, @@ -1533,12 +1714,12 @@ func TestPeriodicScheduledJobtoApiJob(t *testing.T) { Id: "job1", Name: "name 1", Enabled: true, - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - UpdatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + UpdatedAt: timestamppb.New(time.Unix(1, 0)), MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_PeriodicSchedule{PeriodicSchedule: &apiv1beta1.PeriodicSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), IntervalSecond: 3, }}, }, @@ -1578,8 +1759,8 @@ func TestNonScheduledJobtoApiJob(t *testing.T) { Id: "job1", Name: "name1", Enabled: true, - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - UpdatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + UpdatedAt: timestamppb.New(time.Unix(1, 0)), MaxConcurrency: 1, PipelineSpec: &apiv1beta1.PipelineSpec{ Parameters: []*apiv1beta1.Parameter{{Name: "param2", Value: "world"}}, @@ -1667,8 +1848,8 @@ func TestToApiJob_V2(t *testing.T) { Id: "job1", Name: "name 1", Enabled: true, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + UpdatedAt: timestamppb.New(time.Unix(2, 0)), MaxConcurrency: 2, NoCatchup: true, Status: "STATUS_UNSPECIFIED", @@ -1680,7 +1861,7 @@ func TestToApiJob_V2(t *testing.T) { }, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 2}, + StartTime: timestamppb.New(time.Unix(2, 0)), Cron: "2 * *", }}, }, @@ -1751,13 +1932,13 @@ func TestToApiJobs(t *testing.T) { Id: "job1", Name: "name 1", Enabled: true, - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - UpdatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + UpdatedAt: timestamppb.New(time.Unix(1, 0)), MaxConcurrency: 1, Status: "STATUS_UNSPECIFIED", Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * *", }}, }, @@ -1777,14 +1958,14 @@ func TestToApiJobs(t *testing.T) { Id: "job2", Name: "name 2", Enabled: true, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + UpdatedAt: timestamppb.New(time.Unix(2, 0)), MaxConcurrency: 2, NoCatchup: true, Status: "STATUS_UNSPECIFIED", Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 2}, + StartTime: timestamppb.New(time.Unix(2, 0)), Cron: "2 * *", }}, }, @@ -1927,7 +2108,7 @@ func TestToApiExperimentsV1(t *testing.T) { Id: "exp1", Name: "experiment1", Description: "experiment1 was created using V2 APIV1BETA1", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), StorageState: apiv1beta1.Experiment_StorageState(apiv1beta1.Experiment_StorageState_value["STORAGESTATE_AVAILABLE"]), ResourceReferences: []*apiv1beta1.ResourceReference{ { @@ -1940,7 +2121,7 @@ func TestToApiExperimentsV1(t *testing.T) { Id: "exp2", Name: "experiment2", Description: "experiment2 was created using V2 APIV1BETA1", - CreatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), StorageState: apiv1beta1.Experiment_StorageState(apiv1beta1.Experiment_StorageState_value["STORAGESTATE_ARCHIVED"]), ResourceReferences: []*apiv1beta1.ResourceReference{ { @@ -1953,7 +2134,7 @@ func TestToApiExperimentsV1(t *testing.T) { Id: "exp3", Name: "experiment3", Description: "experiment3 was created using V1 APIV1BETA1", - CreatedAt: ×tamp.Timestamp{Seconds: 3}, + CreatedAt: ×tamppb.Timestamp{Seconds: 3, Nanos: 0}, StorageState: apiv1beta1.Experiment_StorageState(apiv1beta1.Experiment_StorageState_value["STORAGESTATE_AVAILABLE"]), ResourceReferences: []*apiv1beta1.ResourceReference{ { @@ -1966,7 +2147,7 @@ func TestToApiExperimentsV1(t *testing.T) { Id: "exp4", Name: "experiment4", Description: "experiment4 was created using V1 APIV1BETA1", - CreatedAt: ×tamp.Timestamp{Seconds: 4}, + CreatedAt: ×tamppb.Timestamp{Seconds: 4, Nanos: 0}, StorageState: apiv1beta1.Experiment_StorageState(apiv1beta1.Experiment_StorageState_value["STORAGESTATE_ARCHIVED"]), ResourceReferences: []*apiv1beta1.ResourceReference{ { @@ -1980,7 +2161,7 @@ func TestToApiExperimentsV1(t *testing.T) { Id: "exp5", Name: "experiment5", Description: "experiment5 was created using V2 APIV1BETA1", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), StorageState: apiv1beta1.Experiment_StorageState(apiv1beta1.Experiment_StorageState_value["STORAGESTATE_UNSPECIFIED"]), ResourceReferences: []*apiv1beta1.ResourceReference{ { @@ -2040,32 +2221,32 @@ func TestToApiExperiments(t *testing.T) { ExperimentId: "exp1", DisplayName: "experiment1", Description: "My name is experiment1", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(1, 0)), StorageState: apiv2beta1.Experiment_StorageState(apiv2beta1.Experiment_StorageState_value["AVAILABLE"]), }, { ExperimentId: "exp2", DisplayName: "experiment2", Description: "My name is experiment2", - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(2, 0)), StorageState: apiv2beta1.Experiment_StorageState(apiv2beta1.Experiment_StorageState_value["ARCHIVED"]), }, { ExperimentId: "exp3", DisplayName: "experiment3", Description: "experiment3 was created using V1 APIV1BETA1", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(1, 0)), StorageState: apiv2beta1.Experiment_StorageState(apiv2beta1.Experiment_StorageState_value["AVAILABLE"]), }, { ExperimentId: "exp4", DisplayName: "experiment4", Description: "experiment4 was created using V1 APIV1BETA1", - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(2, 0)), StorageState: apiv2beta1.Experiment_StorageState(apiv2beta1.Experiment_StorageState_value["ARCHIVED"]), }, {}, @@ -2073,8 +2254,8 @@ func TestToApiExperiments(t *testing.T) { ExperimentId: "exp5", DisplayName: "experiment5", Description: "My name is experiment5", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(1, 0)), StorageState: apiv2beta1.Experiment_StorageState(apiv2beta1.Experiment_StorageState_value["STORAGE_STATE_UNSPECIFIED"]), }, } @@ -2181,8 +2362,8 @@ func TestToApiRecurringRun(t *testing.T) { RecurringRunId: "job1", DisplayName: "name 1", Mode: apiv2beta1.RecurringRun_ENABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + UpdatedAt: timestamppb.New(time.Unix(2, 0)), MaxConcurrency: 2, NoCatchup: true, PipelineSource: &apiv2beta1.RecurringRun_PipelineVersionReference{ @@ -2192,7 +2373,7 @@ func TestToApiRecurringRun(t *testing.T) { }, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 2}, + StartTime: timestamppb.New(time.Unix(2, 0)), Cron: "2 * *", }}, }, @@ -2233,13 +2414,13 @@ func TestToApiRecurringRun(t *testing.T) { RecurringRunId: "job1", DisplayName: "name 1", Mode: apiv2beta1.RecurringRun_DISABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + UpdatedAt: timestamppb.New(time.Unix(2, 0)), MaxConcurrency: 2, NoCatchup: true, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 2}, + StartTime: timestamppb.New(time.Unix(2, 0)), Cron: "2 * *", }}, }, @@ -2521,7 +2702,7 @@ func Test_toApiRuntimeStateV1(t *testing.T) { "Succeeded", }, { - "v2 cancelling", + "v2 canceling", model.RuntimeStateCancelling, "Terminating", }, @@ -2572,7 +2753,7 @@ func Test_toApiRuntimeState(t *testing.T) { apiv2beta1.RuntimeState_SUCCEEDED, }, { - "v2 cancelling", + "v2 canceling", model.RuntimeStateCancelling, apiv2beta1.RuntimeState_CANCELING, }, @@ -3708,9 +3889,9 @@ func TestToModelRun(t *testing.T) { RunId: "run1", TaskId: "task1", DisplayName: "this is task", - CreateTime: ×tamp.Timestamp{Seconds: 11}, - StartTime: ×tamp.Timestamp{Seconds: 12}, - EndTime: ×tamp.Timestamp{Seconds: 13}, + CreateTime: timestamppb.New(time.Unix(11, 0)), + StartTime: timestamppb.New(time.Unix(12, 0)), + EndTime: timestamppb.New(time.Unix(13, 0)), ExecutorDetail: nil, State: apiv2beta1.RuntimeState_FAILED, ExecutionId: 14, @@ -3737,9 +3918,9 @@ func TestToModelRun(t *testing.T) { RunId: "run1", TaskId: "task2", DisplayName: "this is task 2", - CreateTime: ×tamp.Timestamp{Seconds: 11}, - StartTime: ×tamp.Timestamp{Seconds: 12}, - EndTime: ×tamp.Timestamp{Seconds: 13}, + CreateTime: timestamppb.New(time.Unix(11, 0)), + StartTime: timestamppb.New(time.Unix(12, 0)), + EndTime: timestamppb.New(time.Unix(13, 0)), ExecutorDetail: nil, State: apiv2beta1.RuntimeState_CANCELED, ExecutionId: 14, @@ -3904,9 +4085,9 @@ func TestToModelRun(t *testing.T) { RunId: "run2", TaskId: "task1", DisplayName: "this is task", - CreateTime: ×tamp.Timestamp{Seconds: 11}, - StartTime: ×tamp.Timestamp{Seconds: 12}, - EndTime: ×tamp.Timestamp{Seconds: 13}, + CreateTime: timestamppb.New(time.Unix(11, 0)), + StartTime: timestamppb.New(time.Unix(12, 0)), + EndTime: timestamppb.New(time.Unix(13, 0)), ExecutorDetail: nil, State: apiv2beta1.RuntimeState_RUNNING, ExecutionId: 14, @@ -3992,6 +4173,103 @@ func TestToModelRun(t *testing.T) { false, "", }, + { // all fields are same as "v2 full pipeline version except invalid ExperimentId + "v2 ExperimentId overflow", + &apiv2beta1.Run{ + ExperimentId: strings.Repeat("e", 65), + RunId: "run1", + DisplayName: "name1", + Description: "this is a run", + StorageState: apiv2beta1.Run_ARCHIVED, + PipelineSource: &apiv2beta1.Run_PipelineVersionReference{ + PipelineVersionReference: &apiv2beta1.PipelineVersionReference{ + PipelineId: "p1", + PipelineVersionId: "pv1", + }, + }, + RuntimeConfig: &apiv2beta1.RuntimeConfig{ + Parameters: map[string]*structpb.Value{ + "param2": structpb.NewStringValue("world"), + }, + }, + ServiceAccount: "sa1", + CreatedAt: ×tamppb.Timestamp{Seconds: 1}, + ScheduledAt: ×tamppb.Timestamp{Seconds: 2}, + FinishedAt: ×tamppb.Timestamp{Seconds: 3}, + State: apiv2beta1.RuntimeState_FAILED, + Error: util.ToRpcStatus(util.NewInvalidInputError("Input argument is invalid")), + RunDetails: &apiv2beta1.RunDetails{ + PipelineContextId: 10, + PipelineRunContextId: 11, + TaskDetails: []*apiv2beta1.PipelineTaskDetail{ + { + RunId: "run1", + TaskId: "task1", + DisplayName: "this is task", + CreateTime: timestamppb.New(time.Unix(11, 0)), + StartTime: timestamppb.New(time.Unix(12, 0)), + EndTime: timestamppb.New(time.Unix(13, 0)), + ExecutorDetail: nil, + State: apiv2beta1.RuntimeState_FAILED, + ExecutionId: 14, + Inputs: map[string]*apiv2beta1.ArtifactList{ + "a1": {ArtifactIds: []int64{1, 2, 3}}, + }, + Outputs: map[string]*apiv2beta1.ArtifactList{ + "b2": {ArtifactIds: []int64{4, 5, 6}}, + }, + StateHistory: []*apiv2beta1.RuntimeStatus{ + { + UpdateTime: ×tamppb.Timestamp{Seconds: 15}, + State: apiv2beta1.RuntimeState_FAILED, + Error: util.ToRpcStatus(util.NewInvalidInputError("Input argument is invalid")), + }, + }, + ChildTasks: []*apiv2beta1.PipelineTaskDetail_ChildTask{ + { + ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "task2"}, + }, + }, + }, + { + RunId: "run1", + TaskId: "task2", + DisplayName: "this is task 2", + CreateTime: timestamppb.New(time.Unix(11, 0)), + StartTime: timestamppb.New(time.Unix(12, 0)), + EndTime: timestamppb.New(time.Unix(13, 0)), + ExecutorDetail: nil, + State: apiv2beta1.RuntimeState_CANCELED, + ExecutionId: 14, + Inputs: map[string]*apiv2beta1.ArtifactList{ + "a1": {ArtifactIds: []int64{1, 2, 3}}, + }, + Outputs: map[string]*apiv2beta1.ArtifactList{ + "b2": {ArtifactIds: []int64{4, 5, 6}}, + }, + ParentTaskId: "task1", + StateHistory: []*apiv2beta1.RuntimeStatus{ + { + UpdateTime: ×tamppb.Timestamp{Seconds: 15}, + State: apiv2beta1.RuntimeState_CANCELED, + }, + }, + }, + }, + }, + RecurringRunId: "job1", + StateHistory: []*apiv2beta1.RuntimeStatus{ + { + UpdateTime: ×tamppb.Timestamp{Seconds: 9}, + State: apiv2beta1.RuntimeState_FAILED, + Error: util.ToRpcStatus(util.NewInvalidInputError("Input argument is invalid")), + }, + }, + }, + nil, + true, + "Run.ExperimentId length cannot exceed 64", + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -4246,9 +4524,9 @@ func Test_toApiRun(t *testing.T) { RunId: "run2", TaskId: "task1", DisplayName: "this is task", - CreateTime: ×tamp.Timestamp{Seconds: 11}, - StartTime: ×tamp.Timestamp{Seconds: 12}, - EndTime: ×tamp.Timestamp{Seconds: 13}, + CreateTime: timestamppb.New(time.Unix(11, 0)), + StartTime: timestamppb.New(time.Unix(12, 0)), + EndTime: timestamppb.New(time.Unix(13, 0)), ExecutorDetail: nil, State: apiv2beta1.RuntimeState_FAILED, ExecutionId: 14, diff --git a/backend/src/apiserver/server/api_util.go b/backend/src/apiserver/server/api_util.go index f17dab74b4f..7f9d7745413 100644 --- a/backend/src/apiserver/server/api_util.go +++ b/backend/src/apiserver/server/api_util.go @@ -166,7 +166,7 @@ func GetResourceReferenceFromRunInterface(r interface{}) ([]*apiv1beta1.Resource // and there may be a second doc for platform specific spec. // If platform spec is empty, then return pipeline spec directly. Else, return a struct with // pipeline spec and platform spec as subfields. -func yamlStringToPipelineSpecStruct(s string) (*structpb.Struct, error) { +func YamlStringToPipelineSpecStruct(s string) (*structpb.Struct, error) { if s == "" { return nil, nil } diff --git a/backend/src/apiserver/server/api_util_test.go b/backend/src/apiserver/server/api_util_test.go index 39d5dec7534..e0282537b52 100644 --- a/backend/src/apiserver/server/api_util_test.go +++ b/backend/src/apiserver/server/api_util_test.go @@ -384,7 +384,7 @@ func loadYaml(t *testing.T, path string) string { return string(res) } -// Tests both yamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString. +// Tests both YamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString. func TestPipelineSpecStructToYamlString_DirectSpec(t *testing.T) { template := loadYaml(t, "test/pipeline_with_volume.yaml") @@ -397,7 +397,7 @@ func TestPipelineSpecStructToYamlString_DirectSpec(t *testing.T) { actualTemplate, err := pipelineSpecStructToYamlString(&pipeline) assert.Nil(t, err) - actualPipeline, err := yamlStringToPipelineSpecStruct(actualTemplate) + actualPipeline, err := YamlStringToPipelineSpecStruct(actualTemplate) assert.Nil(t, err) // Compare the marshalled JSON due to flakiness of structpb values @@ -407,7 +407,7 @@ func TestPipelineSpecStructToYamlString_DirectSpec(t *testing.T) { assert.Equal(t, j1, j2) } -// Tests both yamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString. +// Tests both YamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString. func TestPipelineSpecStructToYamlString_WithPlatform(t *testing.T) { template := loadYaml(t, "test/pipeline_with_volume.yaml") @@ -433,7 +433,7 @@ func TestPipelineSpecStructToYamlString_WithPlatform(t *testing.T) { actualTemplate, err := pipelineSpecStructToYamlString(&pipeline) assert.Nil(t, err) - actualPipeline, err := yamlStringToPipelineSpecStruct(actualTemplate) + actualPipeline, err := YamlStringToPipelineSpecStruct(actualTemplate) assert.Nil(t, err) // Compare the marshalled JSON due to flakiness of structpb values @@ -443,7 +443,7 @@ func TestPipelineSpecStructToYamlString_WithPlatform(t *testing.T) { assert.Equal(t, j1, j2) } -// Tests both yamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString. +// Tests both YamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString. // In this case although the received pipeline spec is nested, because platform spec is empty, // we return the pipeline spec directly. func TestPipelineSpecStructToYamlString_NestedPipelineSpec(t *testing.T) { @@ -464,7 +464,7 @@ func TestPipelineSpecStructToYamlString_NestedPipelineSpec(t *testing.T) { actualTemplate, err := pipelineSpecStructToYamlString(&pipeline) assert.Nil(t, err) - actualPipeline, err := yamlStringToPipelineSpecStruct(actualTemplate) + actualPipeline, err := YamlStringToPipelineSpecStruct(actualTemplate) assert.Nil(t, err) // Compare the marshalled JSON due to flakiness of structpb values diff --git a/backend/src/apiserver/server/artifact_server.go b/backend/src/apiserver/server/artifact_server.go index c6c615fab7b..c4a8d35b4da 100644 --- a/backend/src/apiserver/server/artifact_server.go +++ b/backend/src/apiserver/server/artifact_server.go @@ -52,6 +52,7 @@ type ArtifactServerOptions struct { type ArtifactServer struct { resourceManager *resource.ResourceManager options *ArtifactServerOptions + apiv2beta1.UnimplementedArtifactServiceServer } // Value constraints by MLMD: @@ -138,13 +139,12 @@ func (s *ArtifactServer) ListArtifacts(ctx context.Context, r *apiv2beta1.ListAr var artifactsResp []*apiv2beta1.Artifact for _, artifact := range artifacts { bucketConfig, namespace, err1 := s.resourceManager.GetArtifactSessionInfo(ctx, artifact) - artifactId := strconv.FormatInt(*artifact.Id, 10) if err1 != nil || bucketConfig == nil { - return nil, util.NewInternalServerError(fmt.Errorf("failed to retrieve session info error: %v", err1), artifactId) + return nil, util.NewInternalServerError(fmt.Errorf("failed to retrieve session info error: %v", err1), "") } artifactResp, err1 := s.generateResponseArtifact(ctx, artifact, bucketConfig, namespace, apiv2beta1.GetArtifactRequest_ARTIFACT_VIEW_UNSPECIFIED) if err1 != nil { - return nil, util.NewInternalServerError(fmt.Errorf("encountered error parsing artifact: %v", err), artifactId) + return nil, util.NewInternalServerError(fmt.Errorf("encountered error parsing artifact: %v", err1), "") } artifactsResp = append(artifactsResp, artifactResp) } @@ -179,7 +179,7 @@ func (s *ArtifactServer) GetArtifact(ctx context.Context, r *apiv2beta1.GetArtif artifact := artifacts[0] sessionInfo, namespace, err := s.resourceManager.GetArtifactSessionInfo(ctx, artifact) if err != nil || sessionInfo == nil { - return nil, util.NewInternalServerError(fmt.Errorf("failed to retrieve session info error: %v", err), r.ArtifactId) + return nil, util.NewInternalServerError(fmt.Errorf("failed to retrieve session info error: %v", err), "") } err = s.canAccessArtifact(ctx, namespace, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbGet}) @@ -189,7 +189,7 @@ func (s *ArtifactServer) GetArtifact(ctx context.Context, r *apiv2beta1.GetArtif artifactResp, err := s.generateResponseArtifact(ctx, artifact, sessionInfo, namespace, r.GetView()) if err != nil { - return nil, util.NewInternalServerError(fmt.Errorf("encountered error parsing artifact: %v", err), r.ArtifactId) + return nil, util.NewInternalServerError(fmt.Errorf("encountered error parsing artifact: %v", err), "") } return artifactResp, nil diff --git a/backend/src/apiserver/server/auth_server.go b/backend/src/apiserver/server/auth_server.go index ae3216c4e8c..cc124053009 100644 --- a/backend/src/apiserver/server/auth_server.go +++ b/backend/src/apiserver/server/auth_server.go @@ -18,11 +18,13 @@ import ( "context" "strings" - "github.com/golang/protobuf/ptypes/empty" + apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" + api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" + "google.golang.org/protobuf/types/known/emptypb" authorizationv1 "k8s.io/api/authorization/v1" ) @@ -37,10 +39,11 @@ var rbacResourceTypeToGroup = map[string]string{ type AuthServer struct { resourceManager *resource.ResourceManager + apiv1beta1.UnimplementedAuthServiceServer } func (s *AuthServer) AuthorizeV1(ctx context.Context, request *api.AuthorizeRequest) ( - *empty.Empty, error, + *emptypb.Empty, error, ) { err := ValidateAuthorizeRequest(request) if err != nil { @@ -64,7 +67,7 @@ func (s *AuthServer) AuthorizeV1(ctx context.Context, request *api.AuthorizeRequ return nil, util.Wrap(err, "Failed to authorize the request") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } func ValidateAuthorizeRequest(request *api.AuthorizeRequest) error { diff --git a/backend/src/apiserver/server/experiment_server.go b/backend/src/apiserver/server/experiment_server.go index cab20cf05fd..392730f4801 100644 --- a/backend/src/apiserver/server/experiment_server.go +++ b/backend/src/apiserver/server/experiment_server.go @@ -17,7 +17,8 @@ package server import ( "context" - "github.com/golang/protobuf/ptypes/empty" + "google.golang.org/protobuf/types/known/emptypb" + apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" @@ -76,12 +77,25 @@ type ExperimentServerOptions struct { CollectMetrics bool } -type ExperimentServer struct { +// BaseExperimentServer wraps ExperimentServer and ExperimentServerV1 +// to enable method sharing. It can be removed once ExperimentServerV1 +// is removed. +type BaseExperimentServer struct { resourceManager *resource.ResourceManager options *ExperimentServerOptions } -func (s *ExperimentServer) createExperiment(ctx context.Context, experiment *model.Experiment) (*model.Experiment, error) { +type ExperimentServer struct { + *BaseExperimentServer + apiv2beta1.UnimplementedExperimentServiceServer +} + +type ExperimentServerV1 struct { + *BaseExperimentServer + apiv1beta1.UnimplementedExperimentServiceServer +} + +func (s *BaseExperimentServer) createExperiment(ctx context.Context, experiment *model.Experiment) (*model.Experiment, error) { experiment.Namespace = s.resourceManager.ReplaceNamespace(experiment.Namespace) resourceAttributes := &authorizationv1.ResourceAttributes{ Namespace: experiment.Namespace, @@ -92,10 +106,11 @@ func (s *ExperimentServer) createExperiment(ctx context.Context, experiment *mod if err != nil { return nil, util.Wrap(err, "Failed to authorize the request") } + return s.resourceManager.CreateExperiment(experiment) } -func (s *ExperimentServer) CreateExperimentV1(ctx context.Context, request *apiv1beta1.CreateExperimentRequest) ( +func (s *ExperimentServerV1) CreateExperimentV1(ctx context.Context, request *apiv1beta1.CreateExperimentRequest) ( *apiv1beta1.Experiment, error, ) { if s.options.CollectMetrics { @@ -151,15 +166,15 @@ func (s *ExperimentServer) CreateExperiment(ctx context.Context, request *apiv2b return apiExperiment, nil } -func (s *ExperimentServer) getExperiment(ctx context.Context, experimentId string) (*model.Experiment, error) { - err := s.canAccessExperiment(ctx, experimentId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbGet}) +func (s *BaseExperimentServer) getExperiment(ctx context.Context, experimentID string) (*model.Experiment, error) { + err := s.canAccessExperiment(ctx, experimentID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbGet}) if err != nil { return nil, util.Wrap(err, "Failed to authorize the request") } - return s.resourceManager.GetExperiment(experimentId) + return s.resourceManager.GetExperiment(experimentID) } -func (s *ExperimentServer) GetExperimentV1(ctx context.Context, request *apiv1beta1.GetExperimentRequest) ( +func (s *ExperimentServerV1) GetExperimentV1(ctx context.Context, request *apiv1beta1.GetExperimentRequest) ( *apiv1beta1.Experiment, error, ) { if s.options.CollectMetrics { @@ -197,7 +212,7 @@ func (s *ExperimentServer) GetExperiment(ctx context.Context, request *apiv2beta return apiExperiment, nil } -func (s *ExperimentServer) listExperiments(ctx context.Context, pageToken string, pageSize int32, sortBy string, opts *list.Options, namespace string) ([]*model.Experiment, int32, string, error) { +func (s *BaseExperimentServer) listExperiments(ctx context.Context, pageToken string, pageSize int32, sortBy string, opts *list.Options, namespace string) ([]*model.Experiment, int32, string, error) { namespace = s.resourceManager.ReplaceNamespace(namespace) resourceAttributes := &authorizationv1.ResourceAttributes{ Namespace: namespace, @@ -218,7 +233,7 @@ func (s *ExperimentServer) listExperiments(ctx context.Context, pageToken string return experiments, int32(totalSize), nextPageToken, nil } -func (s *ExperimentServer) ListExperimentsV1(ctx context.Context, request *apiv1beta1.ListExperimentsRequest) ( +func (s *ExperimentServerV1) ListExperimentsV1(ctx context.Context, request *apiv1beta1.ListExperimentsRequest) ( *apiv1beta1.ListExperimentsResponse, error, ) { if s.options.CollectMetrics { @@ -284,15 +299,15 @@ func (s *ExperimentServer) ListExperiments(ctx context.Context, request *apiv2be }, nil } -func (s *ExperimentServer) deleteExperiment(ctx context.Context, experimentId string) error { - err := s.canAccessExperiment(ctx, experimentId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbDelete}) +func (s *BaseExperimentServer) deleteExperiment(ctx context.Context, experimentID string) error { + err := s.canAccessExperiment(ctx, experimentID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbDelete}) if err != nil { return util.Wrap(err, "Failed to authorize the request") } - return s.resourceManager.DeleteExperiment(experimentId) + return s.resourceManager.DeleteExperiment(experimentID) } -func (s *ExperimentServer) DeleteExperimentV1(ctx context.Context, request *apiv1beta1.DeleteExperimentRequest) (*empty.Empty, error) { +func (s *ExperimentServerV1) DeleteExperimentV1(ctx context.Context, request *apiv1beta1.DeleteExperimentRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { deleteExperimentRequests.Inc() } @@ -304,10 +319,10 @@ func (s *ExperimentServer) DeleteExperimentV1(ctx context.Context, request *apiv if s.options.CollectMetrics { experimentCount.Dec() } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *ExperimentServer) DeleteExperiment(ctx context.Context, request *apiv2beta1.DeleteExperimentRequest) (*empty.Empty, error) { +func (s *ExperimentServer) DeleteExperiment(ctx context.Context, request *apiv2beta1.DeleteExperimentRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { deleteExperimentRequests.Inc() } @@ -319,11 +334,11 @@ func (s *ExperimentServer) DeleteExperiment(ctx context.Context, request *apiv2b if s.options.CollectMetrics { experimentCount.Dec() } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // TODO(chensun): consider refactoring the code to get rid of double-query of experiment. -func (s *ExperimentServer) canAccessExperiment(ctx context.Context, experimentID string, resourceAttributes *authorizationv1.ResourceAttributes) error { +func (s *BaseExperimentServer) canAccessExperiment(ctx context.Context, experimentID string, resourceAttributes *authorizationv1.ResourceAttributes) error { if !common.IsMultiUserMode() { // Skip authorization if not multi-user mode. return nil @@ -352,25 +367,25 @@ func (s *ExperimentServer) canAccessExperiment(ctx context.Context, experimentID return nil } -func (s *ExperimentServer) archiveExperiment(ctx context.Context, experimentId string) error { - err := s.canAccessExperiment(ctx, experimentId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbArchive}) +func (s *BaseExperimentServer) archiveExperiment(ctx context.Context, experimentID string) error { + err := s.canAccessExperiment(ctx, experimentID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbArchive}) if err != nil { return util.Wrap(err, "Failed to authorize the request") } - return s.resourceManager.ArchiveExperiment(ctx, experimentId) + return s.resourceManager.ArchiveExperiment(ctx, experimentID) } -func (s *ExperimentServer) ArchiveExperimentV1(ctx context.Context, request *apiv1beta1.ArchiveExperimentRequest) (*empty.Empty, error) { +func (s *ExperimentServerV1) ArchiveExperimentV1(ctx context.Context, request *apiv1beta1.ArchiveExperimentRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { archiveExperimentRequests.Inc() } if err := s.archiveExperiment(ctx, request.GetId()); err != nil { return nil, util.Wrap(err, "Failed to archive v1beta1 experiment") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *ExperimentServer) ArchiveExperiment(ctx context.Context, request *apiv2beta1.ArchiveExperimentRequest) (*empty.Empty, error) { +func (s *ExperimentServer) ArchiveExperiment(ctx context.Context, request *apiv2beta1.ArchiveExperimentRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { archiveExperimentRequests.Inc() } @@ -378,18 +393,18 @@ func (s *ExperimentServer) ArchiveExperiment(ctx context.Context, request *apiv2 if err := s.archiveExperiment(ctx, request.GetExperimentId()); err != nil { return nil, util.Wrap(err, "Failed to archive experiment") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *ExperimentServer) unarchiveExperiment(ctx context.Context, experimentId string) error { - err := s.canAccessExperiment(ctx, experimentId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbUnarchive}) +func (s *BaseExperimentServer) unarchiveExperiment(ctx context.Context, experimentID string) error { + err := s.canAccessExperiment(ctx, experimentID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbUnarchive}) if err != nil { return util.Wrap(err, "Failed to authorize the request") } - return s.resourceManager.UnarchiveExperiment(experimentId) + return s.resourceManager.UnarchiveExperiment(experimentID) } -func (s *ExperimentServer) UnarchiveExperimentV1(ctx context.Context, request *apiv1beta1.UnarchiveExperimentRequest) (*empty.Empty, error) { +func (s *ExperimentServerV1) UnarchiveExperimentV1(ctx context.Context, request *apiv1beta1.UnarchiveExperimentRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { unarchiveExperimentRequests.Inc() } @@ -397,10 +412,10 @@ func (s *ExperimentServer) UnarchiveExperimentV1(ctx context.Context, request *a if err := s.unarchiveExperiment(ctx, request.GetId()); err != nil { return nil, util.Wrap(err, "Failed to unarchive v1beta1 experiment") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *ExperimentServer) UnarchiveExperiment(ctx context.Context, request *apiv2beta1.UnarchiveExperimentRequest) (*empty.Empty, error) { +func (s *ExperimentServer) UnarchiveExperiment(ctx context.Context, request *apiv2beta1.UnarchiveExperimentRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { unarchiveExperimentRequests.Inc() } @@ -408,9 +423,23 @@ func (s *ExperimentServer) UnarchiveExperiment(ctx context.Context, request *api if err := s.unarchiveExperiment(ctx, request.GetExperimentId()); err != nil { return nil, util.Wrap(err, "Failed to unarchive experiment") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } func NewExperimentServer(resourceManager *resource.ResourceManager, options *ExperimentServerOptions) *ExperimentServer { - return &ExperimentServer{resourceManager: resourceManager, options: options} + return &ExperimentServer{ + BaseExperimentServer: &BaseExperimentServer{ + resourceManager: resourceManager, + options: options, + }, + } +} + +func NewExperimentServerV1(resourceManager *resource.ResourceManager, options *ExperimentServerOptions) *ExperimentServerV1 { + return &ExperimentServerV1{ + BaseExperimentServer: &BaseExperimentServer{ + resourceManager: resourceManager, + options: options, + }, + } } diff --git a/backend/src/apiserver/server/experiment_server_test.go b/backend/src/apiserver/server/experiment_server_test.go index 35cc305c78d..aef0e190224 100644 --- a/backend/src/apiserver/server/experiment_server_test.go +++ b/backend/src/apiserver/server/experiment_server_test.go @@ -16,13 +16,16 @@ package server import ( "context" + "fmt" "strings" "testing" + "time" + + "google.golang.org/protobuf/types/known/timestamppb" "google.golang.org/protobuf/types/known/structpb" "sigs.k8s.io/yaml" - "github.com/golang/protobuf/ptypes/timestamp" "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" @@ -36,10 +39,33 @@ import ( "google.golang.org/protobuf/testing/protocmp" ) +func createExperimentServerV1(resourceManager *resource.ResourceManager) *ExperimentServerV1 { + return &ExperimentServerV1{ + BaseExperimentServer: &BaseExperimentServer{ + resourceManager: resourceManager, + options: &ExperimentServerOptions{ + CollectMetrics: false, + }, + }, + } +} + +func createExperimentServer(resourceManager *resource.ResourceManager) *ExperimentServer { + return &ExperimentServer{ + BaseExperimentServer: &BaseExperimentServer{ + resourceManager: resourceManager, + options: &ExperimentServerOptions{ + CollectMetrics: false, + }, + }, + } +} + func TestCreateExperimentV1(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + + server := createExperimentServerV1(resourceManager) experiment := &apiv1beta1.Experiment{Name: "ex1", Description: "first experiment"} result, err := server.CreateExperimentV1(nil, &apiv1beta1.CreateExperimentRequest{Experiment: experiment}) @@ -48,7 +74,7 @@ func TestCreateExperimentV1(t *testing.T) { Id: DefaultFakeUUID, Name: "ex1", Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), StorageState: apiv1beta1.Experiment_STORAGESTATE_AVAILABLE, ResourceReferences: []*apiv1beta1.ResourceReference{ { @@ -63,7 +89,7 @@ func TestCreateExperimentV1(t *testing.T) { func TestCreateExperiment(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{DisplayName: "ex1", Description: "first experiment"} result, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) @@ -72,8 +98,8 @@ func TestCreateExperiment(t *testing.T) { ExperimentId: DefaultFakeUUID, DisplayName: "ex1", Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 0}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(0, 0)), StorageState: apiV2beta1.Experiment_AVAILABLE, Namespace: "", } @@ -83,7 +109,7 @@ func TestCreateExperiment(t *testing.T) { func TestCreateExperimentV1_Failed(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) experiment := &apiv1beta1.Experiment{Name: "ex1", Description: "first experiment"} clientManager.DB().Close() _, err := server.CreateExperimentV1(nil, &apiv1beta1.CreateExperimentRequest{Experiment: experiment}) @@ -94,7 +120,7 @@ func TestCreateExperimentV1_Failed(t *testing.T) { func TestCreateExperiment_Failed(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{DisplayName: "ex1", Description: "first experiment"} clientManager.DB().Close() _, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) @@ -105,7 +131,7 @@ func TestCreateExperiment_Failed(t *testing.T) { func TestCreateExperiment_EmptyName(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{DisplayName: "", Description: "first experiment"} clientManager.DB().Close() _, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) @@ -116,7 +142,7 @@ func TestCreateExperiment_EmptyName(t *testing.T) { func TestCreateExperimentV1_EmptyName(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) experiment := &apiv1beta1.Experiment{Name: "", Description: "first experiment"} clientManager.DB().Close() _, err := server.CreateExperimentV1(nil, &apiv1beta1.CreateExperimentRequest{Experiment: experiment}) @@ -124,6 +150,127 @@ func TestCreateExperimentV1_EmptyName(t *testing.T) { assert.Contains(t, err.Error(), "Invalid input error: Experiment must have a non-empty name") } +func TestCreateExperimentV1_LengthValidation(t *testing.T) { + const ( + maxName = 128 // todo import from validation package + maxNS = 63 + ) + + longName := strings.Repeat("n", maxName+1) + longNS := strings.Repeat("s", maxNS+1) + + tests := []struct { + multiUser bool + name string + namespace string + wantErr bool + wantMsg string + }{ + {false, "good-name", "good_namespace", false, ""}, + {false, longName, "good_namespace", true, "Experiment.Name length cannot exceed"}, + {true, "good-name", "good_namespace", false, ""}, + {true, "good-name", longNS, true, "Experiment.Namespace length cannot exceed"}, + {true, longName, "good_namespace", true, "Experiment.Name length cannot exceed"}, + } + + for _, tc := range tests { + t.Run(fmt.Sprintf("multi=%t_nameLen=%d_nsLen=%d", + tc.multiUser, len(tc.name), len(tc.namespace)), + func(t *testing.T) { + if tc.multiUser { + viper.Set(common.MultiUserMode, "true") + } else { + viper.Set(common.MultiUserMode, "false") + } + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + server := createExperimentServerV1(resourceManager) + + req := &apiv1beta1.CreateExperimentRequest{ + Experiment: &apiv1beta1.Experiment{ + Name: tc.name, + ResourceReferences: []*apiv1beta1.ResourceReference{ + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_NAMESPACE, Id: tc.namespace}, + Relationship: apiv1beta1.Relationship_OWNER, + }, + }, + }, + } + _, err := server.CreateExperimentV1(ctx, req) + if tc.wantErr { + assert.Error(t, err) + assert.Contains(t, err.Error(), tc.wantMsg) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestCreateExperiment_LengthValidation(t *testing.T) { + const ( + maxName = 128 + maxNS = 63 + ) + + longName := strings.Repeat("n", maxName+1) + longNS := strings.Repeat("s", maxNS+1) + + tests := []struct { + multiUser bool + name string + namespace string + wantErr bool + wantMsg string + }{ + {false, "good-name", "good_namespace", false, ""}, + {false, longName, "good_namespace", true, "Experiment.Name length cannot exceed"}, + {true, "good-name", "good_namespace", false, ""}, + {true, "good-name", longNS, true, "Experiment.Namespace length cannot exceed"}, + {true, longName, "good_namespace", true, "Experiment.Name length cannot exceed"}, + } + + for _, tc := range tests { + t.Run(fmt.Sprintf("multi=%t_nameLen=%d_nsLen=%d", + tc.multiUser, len(tc.name), len(tc.namespace)), + func(t *testing.T) { + if tc.multiUser { + viper.Set(common.MultiUserMode, "true") + } else { + viper.Set(common.MultiUserMode, "false") + } + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + server := createExperimentServer(resourceManager) + + req := &apiV2beta1.CreateExperimentRequest{ + Experiment: &apiV2beta1.Experiment{ + DisplayName: tc.name, + Namespace: tc.namespace, + }, + } + _, err := server.CreateExperiment(ctx, req) + if tc.wantErr { + assert.Error(t, err) + assert.Contains(t, err.Error(), tc.wantMsg) + } else { + assert.NoError(t, err) + } + }) + } +} + func TestCreateExperimentV1_Unauthorized(t *testing.T) { viper.Set(common.MultiUserMode, "true") defer viper.Set(common.MultiUserMode, "false") @@ -135,7 +282,7 @@ func TestCreateExperimentV1_Unauthorized(t *testing.T) { clients, resourceManager, _ := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) experiment := &apiv1beta1.Experiment{ Name: "exp1", Description: "first experiment", @@ -167,7 +314,7 @@ func TestCreateExperiment_Unauthorized(t *testing.T) { clients, resourceManager, _ := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{ DisplayName: "exp1", Description: "first experiment", @@ -191,7 +338,6 @@ func TestCreateExperimentV1_Multiuser(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} tests := []struct { name string @@ -365,7 +511,7 @@ func TestCreateExperimentV1_Multiuser(t *testing.T) { for _, tt := range tests { clientManager.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(tt.fakeId, nil)) resourceManager = resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server = ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) got, err := server.CreateExperimentV1(ctx, &apiv1beta1.CreateExperimentRequest{Experiment: tt.experiment}) if tt.wantError { assert.NotNil(t, err) @@ -387,7 +533,7 @@ func TestCreateExperiment_Multiuser(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) tests := []struct { name string @@ -401,14 +547,14 @@ func TestCreateExperiment_Multiuser(t *testing.T) { &apiV2beta1.Experiment{ DisplayName: "exp1", Description: "first experiment", - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 0}, + LastRunCreatedAt: timestamppb.New(time.Unix(0, 0)), Namespace: "ns1", }, &apiV2beta1.Experiment{ ExperimentId: DefaultFakeUUID, DisplayName: "exp1", Description: "first experiment", - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 0}, + LastRunCreatedAt: timestamppb.New(time.Unix(0, 0)), Namespace: "ns1", StorageState: apiV2beta1.Experiment_AVAILABLE, }, @@ -453,7 +599,7 @@ func TestCreateExperiment_Multiuser(t *testing.T) { func TestGetExperimentV1(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) experiment := &apiv1beta1.Experiment{Name: "ex1", Description: "first experiment"} createResult, err := server.CreateExperimentV1(nil, &apiv1beta1.CreateExperimentRequest{Experiment: experiment}) @@ -464,7 +610,7 @@ func TestGetExperimentV1(t *testing.T) { Id: createResult.Id, Name: "ex1", Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), StorageState: apiv1beta1.Experiment_STORAGESTATE_AVAILABLE, ResourceReferences: []*apiv1beta1.ResourceReference{ { @@ -479,7 +625,7 @@ func TestGetExperimentV1(t *testing.T) { func TestGetExperiment(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{DisplayName: "ex1", Description: "first experiment"} createResult, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) @@ -490,8 +636,8 @@ func TestGetExperiment(t *testing.T) { ExperimentId: createResult.ExperimentId, DisplayName: "ex1", Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 0}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(0, 0)), StorageState: apiV2beta1.Experiment_AVAILABLE, Namespace: "", } @@ -501,7 +647,7 @@ func TestGetExperiment(t *testing.T) { func TestGetExperimentV1_Failed(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) experiment := &apiv1beta1.Experiment{Name: "ex1", Description: "first experiment"} createResult, err := server.CreateExperimentV1(nil, &apiv1beta1.CreateExperimentRequest{Experiment: experiment}) @@ -515,7 +661,7 @@ func TestGetExperimentV1_Failed(t *testing.T) { func TestGetExperiment_Failed(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{DisplayName: "ex1", Description: "first experiment"} createResult, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) @@ -537,7 +683,7 @@ func TestGetExperimentV1_Unauthorized(t *testing.T) { clients, manager, experiment := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - server := ExperimentServer{manager, &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(manager) _, err := server.GetExperimentV1(ctx, &apiv1beta1.GetExperimentRequest{Id: experiment.UUID}) assert.NotNil(t, err) @@ -559,7 +705,7 @@ func TestGetExperiment_Unauthorized(t *testing.T) { clients, manager, experiment := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - server := ExperimentServer{manager, &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(manager) _, err := server.GetExperiment(ctx, &apiV2beta1.GetExperimentRequest{ExperimentId: experiment.UUID}) assert.NotNil(t, err) @@ -578,7 +724,7 @@ func TestGetExperimentV1_Multiuser(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) resourceReferences := []*apiv1beta1.ResourceReference{ { Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_NAMESPACE, Id: "ns1"}, @@ -599,7 +745,7 @@ func TestGetExperimentV1_Multiuser(t *testing.T) { Id: createResult.Id, Name: "exp1", Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), ResourceReferences: resourceReferences, StorageState: apiv1beta1.Experiment_STORAGESTATE_AVAILABLE, } @@ -614,7 +760,7 @@ func TestGetExperiment_Multiuser(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{ DisplayName: "exp1", Description: "first experiment", @@ -629,8 +775,8 @@ func TestGetExperiment_Multiuser(t *testing.T) { ExperimentId: createResult.ExperimentId, DisplayName: "exp1", Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 0}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(0, 0)), Namespace: "ns1", StorageState: apiV2beta1.Experiment_AVAILABLE, } @@ -640,7 +786,7 @@ func TestGetExperiment_Multiuser(t *testing.T) { func TestListExperimentsV1(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) experiment := &apiv1beta1.Experiment{Name: "ex1", Description: "first experiment"} createResult, err := server.CreateExperimentV1(nil, &apiv1beta1.CreateExperimentRequest{Experiment: experiment}) @@ -652,7 +798,7 @@ func TestListExperimentsV1(t *testing.T) { Id: createResult.Id, Name: "ex1", Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), StorageState: apiv1beta1.Experiment_STORAGESTATE_AVAILABLE, ResourceReferences: []*apiv1beta1.ResourceReference{ { @@ -668,7 +814,7 @@ func TestListExperimentsV1(t *testing.T) { func TestListExperiments(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{DisplayName: "ex1", Description: "first experiment"} createResult, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) @@ -678,8 +824,8 @@ func TestListExperiments(t *testing.T) { ExperimentId: createResult.ExperimentId, DisplayName: "ex1", Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 0}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(0, 0)), StorageState: apiV2beta1.Experiment_AVAILABLE, Namespace: "", }} @@ -695,7 +841,7 @@ func TestListExperimentsByLastRunCreation(t *testing.T) { // Create another experiment clients.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(DefaultFakeIdTwo, nil)) manager = resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: manager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(manager) experiment := &apiV2beta1.Experiment{DisplayName: "exp2"} experiment2, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) assert.Nil(t, err) @@ -734,13 +880,13 @@ func TestListExperimentsByLastRunCreation(t *testing.T) { // Expected runs, note that because run 2 in experiment 2 // was created last, experiment 2 has the latest run execution - experimentServer := ExperimentServer{resourceManager: manager, options: &ExperimentServerOptions{CollectMetrics: false}} + experimentServer := createExperimentServer(manager) expected1 := &apiV2beta1.Experiment{ ExperimentId: experiment1.UUID, DisplayName: "exp1", Description: "", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 5}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(5, 0)), StorageState: apiV2beta1.Experiment_AVAILABLE, Namespace: "", } @@ -748,8 +894,8 @@ func TestListExperimentsByLastRunCreation(t *testing.T) { ExperimentId: experiment2.ExperimentId, DisplayName: "exp2", Description: "", - CreatedAt: ×tamp.Timestamp{Seconds: 4}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 7}, + CreatedAt: timestamppb.New(time.Unix(4, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(7, 0)), StorageState: apiV2beta1.Experiment_AVAILABLE, Namespace: "", } @@ -769,7 +915,7 @@ func TestListExperimentsByLastRunCreation(t *testing.T) { func TestListExperimentsV1_Failed(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) experiment := &apiv1beta1.Experiment{Name: "ex1", Description: "first experiment"} _, err := server.CreateExperimentV1(nil, &apiv1beta1.CreateExperimentRequest{Experiment: experiment}) @@ -783,7 +929,7 @@ func TestListExperimentsV1_Failed(t *testing.T) { func TestListExperiments_Failed(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{DisplayName: "ex1", Description: "first experiment"} _, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) @@ -805,7 +951,7 @@ func TestListExperimentsV1_Unauthorized(t *testing.T) { clients, manager, _ := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - server := ExperimentServer{manager, &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(manager) _, err := server.ListExperimentsV1(ctx, &apiv1beta1.ListExperimentsRequest{ ResourceReferenceKey: &apiv1beta1.ResourceKey{ @@ -832,7 +978,7 @@ func TestListExperiments_Unauthorized(t *testing.T) { clients, manager, _ := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - server := ExperimentServer{manager, &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(manager) _, err := server.ListExperiments(ctx, &apiV2beta1.ListExperimentsRequest{Namespace: "ns1"}) assert.NotNil(t, err) assert.Contains( @@ -851,7 +997,7 @@ func TestListExperimentsV1_Multiuser(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) resourceReferences := []*apiv1beta1.ResourceReference{ { @@ -889,7 +1035,7 @@ func TestListExperimentsV1_Multiuser(t *testing.T) { Id: createResult.Id, Name: "exp1", Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), ResourceReferences: resourceReferences, StorageState: apiv1beta1.Experiment_STORAGESTATE_AVAILABLE, }}, @@ -978,7 +1124,7 @@ func TestListExperiments_Multiuser_NoDefault(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{ DisplayName: "exp1", Description: "first experiment", @@ -1004,8 +1150,8 @@ func TestListExperiments_Multiuser_NoDefault(t *testing.T) { ExperimentId: createResult.ExperimentId, DisplayName: "exp1", Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, - LastRunCreatedAt: ×tamp.Timestamp{Seconds: 0}, + CreatedAt: timestamppb.New(time.Unix(1, 0)), + LastRunCreatedAt: timestamppb.New(time.Unix(0, 0)), Namespace: "ns1", StorageState: apiV2beta1.Experiment_AVAILABLE, }}, @@ -1048,7 +1194,7 @@ func TestArchiveAndUnarchiveExperimentV1(t *testing.T) { // Create experiment and runs/jobs under it. clients, manager, experiment, _ := initWithExperimentAndPipelineVersion(t) defer clients.Close() - runServer := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + runServer := createRunServerV1(manager) run1 := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReferencesOfExperimentAndPipelineVersion, @@ -1057,7 +1203,7 @@ func TestArchiveAndUnarchiveExperimentV1(t *testing.T) { assert.Nil(t, err) clients.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) manager = resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) - runServer = NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + runServer = createRunServerV1(manager) run2 := &apiv1beta1.Run{ Name: "run2", ResourceReferences: validReferencesOfExperimentAndPipelineVersion, @@ -1066,14 +1212,14 @@ func TestArchiveAndUnarchiveExperimentV1(t *testing.T) { assert.Nil(t, err) clients.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(DefaultFakeUUID, nil)) manager = resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) - jobServer := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + jobServer := createJobServerV1(manager) job1 := &apiv1beta1.Job{ Name: "name1", Enabled: true, MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -1087,7 +1233,7 @@ func TestArchiveAndUnarchiveExperimentV1(t *testing.T) { assert.Equal(t, true, jobs.Jobs[0].Enabled) // Archive the experiment and thus all runs under it. - experimentServer := NewExperimentServer(manager, &ExperimentServerOptions{CollectMetrics: false}) + experimentServer := createExperimentServerV1(manager) _, err = experimentServer.ArchiveExperimentV1(nil, &apiv1beta1.ArchiveExperimentRequest{Id: experiment.UUID}) assert.Nil(t, err) result, err := experimentServer.GetExperimentV1(nil, &apiv1beta1.GetExperimentRequest{Id: experiment.UUID}) @@ -1124,7 +1270,7 @@ func TestArchiveAndUnarchiveExperiment(t *testing.T) { // Create experiment and runs/jobs under it. clients, manager, experiment, _ := initWithExperimentAndPipelineVersion(t) defer clients.Close() - runServer := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + runServer := createRunServerV1(manager) run1 := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReferencesOfExperimentAndPipelineVersion, @@ -1133,7 +1279,7 @@ func TestArchiveAndUnarchiveExperiment(t *testing.T) { assert.Nil(t, err) clients.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) manager = resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) - runServer = NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + runServer = createRunServerV1(manager) run2 := &apiv1beta1.Run{ Name: "run2", ResourceReferences: validReferencesOfExperimentAndPipelineVersion, @@ -1142,14 +1288,14 @@ func TestArchiveAndUnarchiveExperiment(t *testing.T) { assert.Nil(t, err) clients.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(DefaultFakeUUID, nil)) manager = resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) - jobServer := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + jobServer := createJobServerV1(manager) job1 := &apiv1beta1.Job{ Name: "name1", Enabled: true, MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -1159,7 +1305,7 @@ func TestArchiveAndUnarchiveExperiment(t *testing.T) { assert.Nil(t, err) // Archive the experiment and thus all runs under it. - experimentServer := NewExperimentServer(manager, &ExperimentServerOptions{CollectMetrics: false}) + experimentServer := createExperimentServer(manager) _, err = experimentServer.ArchiveExperiment(nil, &apiV2beta1.ArchiveExperimentRequest{ExperimentId: experiment.UUID}) assert.Nil(t, err) result, err := experimentServer.GetExperiment(nil, &apiV2beta1.GetExperimentRequest{ExperimentId: experiment.UUID}) @@ -1197,7 +1343,7 @@ func TestArchiveAndUnarchiveExperiment(t *testing.T) { func TestDeleteExperiments_SingleUser(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{DisplayName: "ex1", Description: "first experiment"} resultExperiment, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) assert.Nil(t, err) @@ -1215,7 +1361,7 @@ func TestDeleteExperiments_SingleUser(t *testing.T) { func TestDeleteExperimentsV1_SingleUser(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) experiment := &apiv1beta1.Experiment{Name: "ex1", Description: "first experiment"} resultExperiment, err := server.CreateExperimentV1(nil, &apiv1beta1.CreateExperimentRequest{Experiment: experiment}) assert.Nil(t, err) @@ -1238,7 +1384,7 @@ func TestDeleteExperiments_MultiUser(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServer(resourceManager) experiment := &apiV2beta1.Experiment{DisplayName: "ex1", Description: "first experiment", Namespace: "ns1"} resultExperiment, err := server.CreateExperiment(ctx, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) assert.Nil(t, err) @@ -1261,7 +1407,7 @@ func TestDeleteExperimentsV1_MultiUser(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := ExperimentServer{resourceManager: resourceManager, options: &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(resourceManager) resourceReferences := []*apiv1beta1.ResourceReference{ { Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_NAMESPACE, Id: "ns1"}, @@ -1294,7 +1440,7 @@ func TestListExperimentsV1_Unauthenticated(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := ExperimentServer{manager, &ExperimentServerOptions{CollectMetrics: false}} + server := createExperimentServerV1(manager) _, err := server.ListExperimentsV1(ctx, &apiv1beta1.ListExperimentsRequest{ ResourceReferenceKey: &apiv1beta1.ResourceKey{ Type: apiv1beta1.ResourceType_NAMESPACE, @@ -1308,3 +1454,31 @@ func TestListExperimentsV1_Unauthenticated(t *testing.T) { "User identity is empty in the request header", ) } + +func TestGetExperiment_JsonOmitEmpty(t *testing.T) { + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + server := createExperimentServer(resourceManager) + experiment := &apiV2beta1.Experiment{ + DisplayName: "exp1", + Description: "test description", + } + + result, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) + assert.Nil(t, err) + + getResult, err := server.GetExperiment(nil, &apiV2beta1.GetExperimentRequest{ExperimentId: result.ExperimentId}) + assert.Nil(t, err) + + // Convert to JSON using the custom marshaler used by runtime servers + customMarshaler := common.CustomMarshaler() + jsonBytes, err := customMarshaler.Marshal(getResult) + assert.Nil(t, err) + + // Verify JSON doesn't contain empty/unset fields + jsonString := string(jsonBytes) + assert.Contains(t, jsonString, "description") + assert.Contains(t, jsonString, "display_name") + assert.Contains(t, jsonString, "experiment_id") // created by server + assert.NotContains(t, jsonString, "namespace") // if we don't specify this it should be omitted +} diff --git a/backend/src/apiserver/server/fakes_test.go b/backend/src/apiserver/server/fakes_test.go index 44d88b62951..0a699127ae9 100644 --- a/backend/src/apiserver/server/fakes_test.go +++ b/backend/src/apiserver/server/fakes_test.go @@ -227,7 +227,7 @@ func initWithExperimentAndPipelineVersion(t *testing.T) (*resource.FakeClientMan &model.PipelineVersion{ Name: "p1", PipelineId: p.UUID, - PipelineSpec: testWorkflow.ToStringForStore(), + PipelineSpec: model.LargeText(testWorkflow.ToStringForStore()), }, ) assert.Nil(t, err) diff --git a/backend/src/apiserver/server/job_server.go b/backend/src/apiserver/server/job_server.go index 557287ee62f..b2c1791286b 100644 --- a/backend/src/apiserver/server/job_server.go +++ b/backend/src/apiserver/server/job_server.go @@ -17,7 +17,8 @@ package server import ( "context" - "github.com/golang/protobuf/ptypes/empty" + "google.golang.org/protobuf/types/known/emptypb" + apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" @@ -75,12 +76,25 @@ type JobServerOptions struct { CollectMetrics bool } -type JobServer struct { +// BaseJobServer wraps JobServer and JobServerV1 +// to enable method sharing. It can be removed once JobServerV1 +// is removed. +type BaseJobServer struct { resourceManager *resource.ResourceManager options *JobServerOptions } -func (s *JobServer) createJob(ctx context.Context, job *model.Job) (*model.Job, error) { +type JobServer struct { + *BaseJobServer + apiv2beta1.UnimplementedRecurringRunServiceServer +} + +type JobServerV1 struct { + *BaseJobServer + apiv1beta1.UnimplementedJobServiceServer +} + +func (s *BaseJobServer) createJob(ctx context.Context, job *model.Job) (*model.Job, error) { // Validate user inputs if job.DisplayName == "" { return nil, util.NewInvalidInputError("Recurring run name is empty. Please specify a valid name") @@ -106,7 +120,7 @@ func (s *JobServer) createJob(ctx context.Context, job *model.Job) (*model.Job, return s.resourceManager.CreateJob(ctx, job) } -func (s *JobServer) CreateJob(ctx context.Context, request *apiv1beta1.CreateJobRequest) (*apiv1beta1.Job, error) { +func (s *JobServerV1) CreateJob(ctx context.Context, request *apiv1beta1.CreateJobRequest) (*apiv1beta1.Job, error) { if s.options.CollectMetrics { createJobRequests.Inc() } @@ -139,7 +153,7 @@ func (s *JobServer) CreateJob(ctx context.Context, request *apiv1beta1.CreateJob return toApiJobV1(newJob), nil } -func (s *JobServer) getJob(ctx context.Context, jobId string) (*model.Job, error) { +func (s *BaseJobServer) getJob(ctx context.Context, jobId string) (*model.Job, error) { err := s.canAccessJob(ctx, jobId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbGet}) if err != nil { return nil, util.Wrap(err, "Failed to authorize the request") @@ -147,7 +161,7 @@ func (s *JobServer) getJob(ctx context.Context, jobId string) (*model.Job, error return s.resourceManager.GetJob(jobId) } -func (s *JobServer) GetJob(ctx context.Context, request *apiv1beta1.GetJobRequest) (*apiv1beta1.Job, error) { +func (s *JobServerV1) GetJob(ctx context.Context, request *apiv1beta1.GetJobRequest) (*apiv1beta1.Job, error) { if s.options.CollectMetrics { getJobRequests.Inc() } @@ -164,7 +178,7 @@ func (s *JobServer) GetJob(ctx context.Context, request *apiv1beta1.GetJobReques return apiJob, nil } -func (s *JobServer) listJobs(ctx context.Context, pageToken string, pageSize int, sortBy string, opts *list.Options, namespace string, experimentId string) ([]*model.Job, int, string, error) { +func (s *BaseJobServer) listJobs(ctx context.Context, pageToken string, pageSize int, sortBy string, opts *list.Options, namespace string, experimentId string) ([]*model.Job, int, string, error) { namespace = s.resourceManager.ReplaceNamespace(namespace) if experimentId != "" { ns, err := s.resourceManager.GetNamespaceFromExperimentId(experimentId) @@ -200,7 +214,7 @@ func (s *JobServer) listJobs(ctx context.Context, pageToken string, pageSize int return jobs, totalSize, token, nil } -func (s *JobServer) ListJobs(ctx context.Context, r *apiv1beta1.ListJobsRequest) (*apiv1beta1.ListJobsResponse, error) { +func (s *JobServerV1) ListJobs(ctx context.Context, r *apiv1beta1.ListJobsRequest) (*apiv1beta1.ListJobsResponse, error) { if s.options.CollectMetrics { listJobRequests.Inc() } @@ -241,7 +255,7 @@ func (s *JobServer) ListJobs(ctx context.Context, r *apiv1beta1.ListJobsRequest) }, nil } -func (s *JobServer) EnableJob(ctx context.Context, request *apiv1beta1.EnableJobRequest) (*empty.Empty, error) { +func (s *JobServerV1) EnableJob(ctx context.Context, request *apiv1beta1.EnableJobRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { enableJobRequests.Inc() } @@ -249,10 +263,10 @@ func (s *JobServer) EnableJob(ctx context.Context, request *apiv1beta1.EnableJob if err != nil { return nil, util.Wrap(err, "Failed to enable a v1beta1 recurring run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *JobServer) disableJob(ctx context.Context, jobId string) error { +func (s *BaseJobServer) disableJob(ctx context.Context, jobId string) error { err := s.canAccessJob(ctx, jobId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbDisable}) if err != nil { return util.Wrap(err, "Failed to authorize the request") @@ -260,7 +274,7 @@ func (s *JobServer) disableJob(ctx context.Context, jobId string) error { return s.resourceManager.ChangeJobMode(ctx, jobId, false) } -func (s *JobServer) DisableJob(ctx context.Context, request *apiv1beta1.DisableJobRequest) (*empty.Empty, error) { +func (s *JobServerV1) DisableJob(ctx context.Context, request *apiv1beta1.DisableJobRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { disableJobRequests.Inc() } @@ -269,10 +283,10 @@ func (s *JobServer) DisableJob(ctx context.Context, request *apiv1beta1.DisableJ if err != nil { return nil, util.Wrap(err, "Failed to disable a v1beta1 recurring run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *JobServer) deleteJob(ctx context.Context, jobId string) error { +func (s *BaseJobServer) deleteJob(ctx context.Context, jobId string) error { err := s.canAccessJob(ctx, jobId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbDelete}) if err != nil { return util.Wrap(err, "Failed to authorize the request") @@ -281,7 +295,7 @@ func (s *JobServer) deleteJob(ctx context.Context, jobId string) error { return s.resourceManager.DeleteJob(ctx, jobId) } -func (s *JobServer) DeleteJob(ctx context.Context, request *apiv1beta1.DeleteJobRequest) (*empty.Empty, error) { +func (s *JobServerV1) DeleteJob(ctx context.Context, request *apiv1beta1.DeleteJobRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { deleteJobRequests.Inc() } @@ -292,10 +306,10 @@ func (s *JobServer) DeleteJob(ctx context.Context, request *apiv1beta1.DeleteJob if s.options.CollectMetrics { jobCount.Dec() } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *JobServer) enableJob(ctx context.Context, jobId string) error { +func (s *BaseJobServer) enableJob(ctx context.Context, jobId string) error { err := s.canAccessJob(ctx, jobId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbEnable}) if err != nil { return util.Wrap(err, "Failed to authorize the request") @@ -370,7 +384,7 @@ func (s *JobServer) ListRecurringRuns(ctx context.Context, r *apiv2beta1.ListRec }, nil } -func (s *JobServer) EnableRecurringRun(ctx context.Context, request *apiv2beta1.EnableRecurringRunRequest) (*empty.Empty, error) { +func (s *JobServer) EnableRecurringRun(ctx context.Context, request *apiv2beta1.EnableRecurringRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { enableJobRequests.Inc() } @@ -378,10 +392,10 @@ func (s *JobServer) EnableRecurringRun(ctx context.Context, request *apiv2beta1. if err != nil { return nil, util.Wrap(err, "Failed to enable a recurring run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *JobServer) DisableRecurringRun(ctx context.Context, request *apiv2beta1.DisableRecurringRunRequest) (*empty.Empty, error) { +func (s *JobServer) DisableRecurringRun(ctx context.Context, request *apiv2beta1.DisableRecurringRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { disableJobRequests.Inc() } @@ -390,10 +404,10 @@ func (s *JobServer) DisableRecurringRun(ctx context.Context, request *apiv2beta1 if err != nil { return nil, util.Wrap(err, "Failed to disable a recurring run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *JobServer) DeleteRecurringRun(ctx context.Context, request *apiv2beta1.DeleteRecurringRunRequest) (*empty.Empty, error) { +func (s *JobServer) DeleteRecurringRun(ctx context.Context, request *apiv2beta1.DeleteRecurringRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { deleteJobRequests.Inc() } @@ -404,10 +418,10 @@ func (s *JobServer) DeleteRecurringRun(ctx context.Context, request *apiv2beta1. if s.options.CollectMetrics { jobCount.Dec() } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *JobServer) canAccessJob(ctx context.Context, jobID string, resourceAttributes *authorizationv1.ResourceAttributes) error { +func (s *BaseJobServer) canAccessJob(ctx context.Context, jobID string, resourceAttributes *authorizationv1.ResourceAttributes) error { if !common.IsMultiUserMode() { // Skip authorization if not multi-user mode. return nil @@ -445,5 +459,19 @@ func (s *JobServer) canAccessJob(ctx context.Context, jobID string, resourceAttr } func NewJobServer(resourceManager *resource.ResourceManager, options *JobServerOptions) *JobServer { - return &JobServer{resourceManager: resourceManager, options: options} + return &JobServer{ + BaseJobServer: &BaseJobServer{ + resourceManager: resourceManager, + options: options, + }, + } +} + +func NewJobServerV1(resourceManager *resource.ResourceManager, options *JobServerOptions) *JobServerV1 { + return &JobServerV1{ + BaseJobServer: &BaseJobServer{ + resourceManager: resourceManager, + options: options, + }, + } } diff --git a/backend/src/apiserver/server/job_server_test.go b/backend/src/apiserver/server/job_server_test.go index ebe81973296..6bd1c6fad5d 100644 --- a/backend/src/apiserver/server/job_server_test.go +++ b/backend/src/apiserver/server/job_server_test.go @@ -18,8 +18,10 @@ import ( "context" "strings" "testing" + "time" + + "google.golang.org/protobuf/types/known/timestamppb" - "github.com/golang/protobuf/ptypes/timestamp" "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" @@ -44,7 +46,7 @@ var ( MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -68,12 +70,12 @@ var ( MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + UpdatedAt: timestamppb.New(time.Unix(2, 0)), Status: "STATUS_UNSPECIFIED", PipelineSpec: &apiv1beta1.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), @@ -93,7 +95,7 @@ var ( MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -102,10 +104,32 @@ var ( } ) +func createJobServerV1(resourceManager *resource.ResourceManager) *JobServerV1 { + return &JobServerV1{ + BaseJobServer: &BaseJobServer{ + resourceManager: resourceManager, + options: &JobServerOptions{ + CollectMetrics: false, + }, + }, + } +} + +func createJobServer(resourceManager *resource.ResourceManager) *JobServer { + return &JobServer{ + BaseJobServer: &BaseJobServer{ + resourceManager: resourceManager, + options: &JobServerOptions{ + CollectMetrics: false, + }, + }, + } +} + func TestCreateJob_WrongInput(t *testing.T) { clients, manager, experiment, _ := initWithExperimentAndPipelineVersion(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) tests := []struct { name string arg *apiv1beta1.Job @@ -119,7 +143,7 @@ func TestCreateJob_WrongInput(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}}, ResourceReferences: []*api.ResourceReference{ @@ -146,7 +170,7 @@ func TestCreateJob_WrongInput(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -162,7 +186,7 @@ func TestCreateJob_WrongInput(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}}, PipelineSpec: &apiv1beta1.PipelineSpec{ @@ -184,7 +208,7 @@ func TestCreateJob_WrongInput(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * ", }}}, PipelineSpec: &apiv1beta1.PipelineSpec{ @@ -205,7 +229,7 @@ func TestCreateJob_WrongInput(t *testing.T) { MaxConcurrency: 0, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}}, PipelineSpec: &apiv1beta1.PipelineSpec{ @@ -254,7 +278,7 @@ func TestCreateJob_WrongInput(t *testing.T) { func TestCreateJob_pipelineVersion(t *testing.T) { clients, manager, exp, pipelineVersion := initWithExperimentAndPipelineVersion(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) rr := []*apiv1beta1.ResourceReference{ { Key: &apiv1beta1.ResourceKey{ @@ -277,7 +301,7 @@ func TestCreateJob_pipelineVersion(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}}, ResourceReferences: rr, @@ -292,7 +316,7 @@ func TestCreateJob_pipelineVersion(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}}, ResourceReferences: rr, @@ -324,14 +348,14 @@ func TestCreateJob_NoResRefs(t *testing.T) { defer clients.Close() clients.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(DefaultFakeIdTwo, nil)) manager = resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) apiJob := &apiv1beta1.Job{ Name: "job1", Enabled: true, MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}}, PipelineSpec: &apiv1beta1.PipelineSpec{ @@ -358,7 +382,7 @@ func TestCreateJob_NoResRefs(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}}, ResourceReferences: rr, @@ -387,7 +411,7 @@ func TestCreateJob_NoResRefs(t *testing.T) { func TestCreateJob(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) job, err := server.CreateJob(nil, &apiv1beta1.CreateJobRequest{Job: commonApiJob}) assert.Nil(t, err) matched := 0 @@ -412,7 +436,7 @@ func TestCreateJob(t *testing.T) { func TestCreateJob_V2(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) listParams := []interface{}{1, 2, 3} v2RuntimeListParams, _ := structpb.NewList(listParams) @@ -434,7 +458,7 @@ func TestCreateJob_V2(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -461,12 +485,12 @@ func TestCreateJob_V2(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv1beta1.Trigger{ Trigger: &apiv1beta1.Trigger_CronSchedule{CronSchedule: &apiv1beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + UpdatedAt: timestamppb.New(time.Unix(2, 0)), Status: "STATUS_UNSPECIFIED", PipelineSpec: &apiv1beta1.PipelineSpec{ PipelineManifest: v2SpecHelloWorldParams, @@ -515,7 +539,7 @@ func TestListRecurringRuns_MultiUser(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServer(manager) pipelineSpecStruct := &structpb.Struct{} yaml.Unmarshal([]byte(v2SpecHelloWorld), pipelineSpecStruct) @@ -526,7 +550,7 @@ func TestListRecurringRuns_MultiUser(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -552,12 +576,12 @@ func TestListRecurringRuns_MultiUser(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + UpdatedAt: timestamppb.New(time.Unix(2, 0)), PipelineSource: &apiv2beta1.RecurringRun_PipelineSpec{PipelineSpec: pipelineSpecStruct}, RuntimeConfig: &apiv2beta1.RuntimeConfig{ PipelineRoot: "model-pipeline-root", @@ -598,7 +622,7 @@ func TestCreateJob_Unauthorized(t *testing.T) { clients, manager, _ := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) _, err := server.CreateJob(ctx, &apiv1beta1.CreateJobRequest{Job: commonApiJob}) assert.NotNil(t, err) assert.Contains( @@ -619,13 +643,13 @@ func TestGetJob_Unauthorized(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) job, err := server.CreateJob(ctx, &apiv1beta1.CreateJobRequest{Job: commonApiJob}) assert.Nil(t, err) clients.SubjectAccessReviewClientFake = client.NewFakeSubjectAccessReviewClientUnauthorized() manager = resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) - server = NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server = createJobServerV1(manager) _, err = server.GetJob(ctx, &apiv1beta1.GetJobRequest{Id: job.Id}) assert.NotNil(t, err) @@ -645,7 +669,7 @@ func TestGetJob_Multiuser(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) createdJob, err := server.CreateJob(ctx, &apiv1beta1.CreateJobRequest{Job: commonApiJob}) assert.Nil(t, err) @@ -681,7 +705,7 @@ func TestListJobs_Unauthorized(t *testing.T) { clients, manager, experiment := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) _, err := server.ListJobs(ctx, &apiv1beta1.ListJobsRequest{ ResourceReferenceKey: &apiv1beta1.ResourceKey{ Type: apiv1beta1.ResourceType_EXPERIMENT, @@ -718,13 +742,13 @@ func TestListJobs_Multiuser(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) _, err := server.CreateJob(ctx, &apiv1beta1.CreateJobRequest{Job: commonApiJob}) assert.Nil(t, err) var expectedJobs []*apiv1beta1.Job - commonExpectedJob.CreatedAt = ×tamp.Timestamp{Seconds: 2} - commonExpectedJob.UpdatedAt = ×tamp.Timestamp{Seconds: 2} + commonExpectedJob.CreatedAt = timestamppb.New(time.Unix(2, 0)) + commonExpectedJob.UpdatedAt = timestamppb.New(time.Unix(2, 0)) commonExpectedJob.ResourceReferences = []*apiv1beta1.ResourceReference{ {Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_NAMESPACE, Id: "ns1"}, Relationship: apiv1beta1.Relationship_OWNER}, {Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_EXPERIMENT, Id: DefaultFakeIdOne}, Relationship: apiv1beta1.Relationship_OWNER}, @@ -826,13 +850,13 @@ func TestEnableJob_Unauthorized(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) job, err := server.CreateJob(ctx, &apiv1beta1.CreateJobRequest{Job: commonApiJob}) assert.Nil(t, err) clients.SubjectAccessReviewClientFake = client.NewFakeSubjectAccessReviewClientUnauthorized() manager = resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) - server = NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server = createJobServerV1(manager) _, err = server.EnableJob(ctx, &apiv1beta1.EnableJobRequest{Id: job.Id}) assert.NotNil(t, err) @@ -852,7 +876,7 @@ func TestEnableJob_Multiuser(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) job, err := server.CreateJob(ctx, &apiv1beta1.CreateJobRequest{Job: commonApiJob}) assert.Nil(t, err) @@ -871,13 +895,13 @@ func TestDisableJob_Unauthorized(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) job, err := server.CreateJob(ctx, &apiv1beta1.CreateJobRequest{Job: commonApiJob}) assert.Nil(t, err) clients.SubjectAccessReviewClientFake = client.NewFakeSubjectAccessReviewClientUnauthorized() manager = resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) - server = NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server = createJobServerV1(manager) _, err = server.DisableJob(ctx, &apiv1beta1.DisableJobRequest{Id: job.Id}) assert.NotNil(t, err) @@ -897,7 +921,7 @@ func TestDisableJob_Multiuser(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) job, err := server.CreateJob(ctx, &apiv1beta1.CreateJobRequest{Job: commonApiJob}) assert.Nil(t, err) @@ -916,7 +940,7 @@ func TestListJobs_Unauthenticated(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServerV1(manager) _, err := server.ListJobs(ctx, &apiv1beta1.ListJobsRequest{ ResourceReferenceKey: &apiv1beta1.ResourceKey{ Type: apiv1beta1.ResourceType_EXPERIMENT, @@ -947,7 +971,7 @@ func TestListJobs_Unauthenticated(t *testing.T) { func TestCreateRecurringRun(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServer(manager) pipelineSpecStruct := &structpb.Struct{} yaml.Unmarshal([]byte(v2SpecHelloWorld), pipelineSpecStruct) @@ -958,7 +982,7 @@ func TestCreateRecurringRun(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -984,12 +1008,12 @@ func TestCreateRecurringRun(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + UpdatedAt: timestamppb.New(time.Unix(2, 0)), Status: apiv2beta1.RecurringRun_ENABLED, PipelineSource: &apiv2beta1.RecurringRun_PipelineSpec{PipelineSpec: pipelineSpecStruct}, RuntimeConfig: &apiv2beta1.RuntimeConfig{ @@ -1009,7 +1033,7 @@ func TestCreateRecurringRun(t *testing.T) { func TestGetRecurringRun(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServer(manager) pipelineSpecStruct := &structpb.Struct{} yaml.Unmarshal([]byte(v2SpecHelloWorld), pipelineSpecStruct) @@ -1020,7 +1044,7 @@ func TestGetRecurringRun(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -1046,12 +1070,12 @@ func TestGetRecurringRun(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + UpdatedAt: timestamppb.New(time.Unix(2, 0)), Status: apiv2beta1.RecurringRun_ENABLED, PipelineSource: &apiv2beta1.RecurringRun_PipelineSpec{PipelineSpec: pipelineSpecStruct}, RuntimeConfig: &apiv2beta1.RuntimeConfig{ @@ -1074,7 +1098,7 @@ func TestGetRecurringRun(t *testing.T) { func TestListRecurringRuns(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServer(manager) pipelineSpecStruct := &structpb.Struct{} yaml.Unmarshal([]byte(v2SpecHelloWorld), pipelineSpecStruct) @@ -1085,7 +1109,7 @@ func TestListRecurringRuns(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -1111,12 +1135,12 @@ func TestListRecurringRuns(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + UpdatedAt: timestamppb.New(time.Unix(2, 0)), PipelineSource: &apiv2beta1.RecurringRun_PipelineSpec{PipelineSpec: pipelineSpecStruct}, RuntimeConfig: &apiv2beta1.RuntimeConfig{ PipelineRoot: "model-pipeline-root", @@ -1152,7 +1176,7 @@ func TestListRecurringRuns(t *testing.T) { func TestEnableRecurringRun(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServer(manager) pipelineSpecStruct := &structpb.Struct{} yaml.Unmarshal([]byte(v2SpecHelloWorld), pipelineSpecStruct) @@ -1163,7 +1187,7 @@ func TestEnableRecurringRun(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, @@ -1187,7 +1211,7 @@ func TestEnableRecurringRun(t *testing.T) { func TestDisableRecurringRun(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewJobServer(manager, &JobServerOptions{CollectMetrics: false}) + server := createJobServer(manager) pipelineSpecStruct := &structpb.Struct{} yaml.Unmarshal([]byte(v2SpecHelloWorld), pipelineSpecStruct) @@ -1198,7 +1222,7 @@ func TestDisableRecurringRun(t *testing.T) { MaxConcurrency: 1, Trigger: &apiv2beta1.Trigger{ Trigger: &apiv2beta1.Trigger_CronSchedule{CronSchedule: &apiv2beta1.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, + StartTime: timestamppb.New(time.Unix(1, 0)), Cron: "1 * * * *", }}, }, diff --git a/backend/src/apiserver/server/list_request_util.go b/backend/src/apiserver/server/list_request_util.go index c5bc8c71cf2..985071ed76a 100644 --- a/backend/src/apiserver/server/list_request_util.go +++ b/backend/src/apiserver/server/list_request_util.go @@ -17,11 +17,11 @@ package server import ( "encoding/base64" "encoding/json" + "fmt" "net/url" "strconv" "strings" - "github.com/golang/protobuf/jsonpb" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" @@ -29,6 +29,7 @@ import ( "github.com/kubeflow/pipelines/backend/src/apiserver/list" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/common/util" + "google.golang.org/protobuf/encoding/protojson" ) const ( @@ -145,16 +146,23 @@ func parseAPIFilter(encoded string, apiVersion string) (interface{}, error) { if err != nil { return nil, util.NewInvalidInputError("failed to parse valid filter from %q: %v", encoded, err) } + + transformedJSON, err := transformJSONForBackwardCompatibility(decoded) + if err != nil { + fmt.Printf("Failed to transform JSON: %v\n", err) + return nil, err + } + switch apiVersion { case "v2beta1": f := &apiv2beta1.Filter{} - if err := jsonpb.UnmarshalString(decoded, f); err != nil { + if err := protojson.Unmarshal([]byte(transformedJSON), f); err != nil { return nil, util.NewInvalidInputError("failed to parse valid filter from %q: %v", encoded, err) } return f, nil case "v1beta1": f := &apiv1beta1.Filter{} - if err := jsonpb.UnmarshalString(decoded, f); err != nil { + if err := protojson.Unmarshal([]byte(transformedJSON), f); err != nil { return nil, util.NewInvalidInputError("failed to parse valid filter from %q: %v", encoded, err) } return f, nil @@ -206,3 +214,23 @@ func validatedListOptions(listable list.Listable, pageToken string, pageSize int return opts, nil } + +// transformJSONForBackwardCompatibility replaces specific JSON key names to maintain +// backward compatibility with older APIs. Previously, KFP deviated from the typical +// snake_case naming convention for protobuf field names for Filter predicate values. +// This function replaces specific JSON key names to maintain backward compatibility +// with older APIs. +// See Predicate.value in backend/api/v2beta1/filter.proto for these values. +// Previously +func transformJSONForBackwardCompatibility(jsonStr string) (string, error) { + replacer := strings.NewReplacer( + `"intValue":`, `"int_value":`, + `"longValue":`, `"long_value":`, + `"stringValue":`, `"string_value":`, + `"timestampValue":`, `"timestamp_value":`, + `"intValues":`, `"int_values":`, + `"longValues":`, `"long_values":`, + `"stringValues":`, `"string_values":`, + ) + return replacer.Replace(jsonStr), nil +} diff --git a/backend/src/apiserver/server/list_request_util_test.go b/backend/src/apiserver/server/list_request_util_test.go index 98efaaf7a62..e13d8315957 100644 --- a/backend/src/apiserver/server/list_request_util_test.go +++ b/backend/src/apiserver/server/list_request_util_test.go @@ -128,7 +128,59 @@ func TestDeserializePageToken(t *testing.T) { assert.Nil(t, err) assert.Equal(t, token, *actualToken) } +func TestTransformJSONForBackwardCompatibility(t *testing.T) { + tests := []struct { + name string + input string + expected string + expectErr bool + }{ + { + name: "Standard case with multiple replacements", + input: `{"intValue": 1, "stringValue": "test", "longValue": 1234567890}`, + expected: `{"int_value": 1, "string_value": "test", "long_value": 1234567890}`, + expectErr: false, + }, + { + name: "No replacements needed", + input: `{"int_value": 1, "string_value": "test", "long_value": 1234567890}`, + expected: `{"int_value": 1, "string_value": "test", "long_value": 1234567890}`, + expectErr: false, + }, + { + name: "Empty input", + input: ``, + expected: ``, + expectErr: false, + }, + { + name: "Nested JSON structure", + input: `{"data": {"intValue": 5, "stringValue": "nested"}}`, + expected: `{"data": {"int_value": 5, "string_value": "nested"}}`, + expectErr: false, + }, + { + name: "Input with no recognized fields", + input: `{"otherValue": "value"}`, + expected: `{"otherValue": "value"}`, + expectErr: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + result, err := transformJSONForBackwardCompatibility(test.input) + + if test.expectErr { + assert.NotNil(t, err, "Expected an error but got none") + return + } + assert.Nil(t, err, "Expected no error but got: %+v", err) + assert.Equal(t, test.expected, result, "Unexpected result for input: %v", test.input) + }) + } +} func TestDeserializePageToken_InvalidEncodingStringError(t *testing.T) { _, err := deserializePageToken("this is a invalid token") assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) diff --git a/backend/src/apiserver/server/pipeline_server.go b/backend/src/apiserver/server/pipeline_server.go index e9fcf52be5e..355be3c95cd 100644 --- a/backend/src/apiserver/server/pipeline_server.go +++ b/backend/src/apiserver/server/pipeline_server.go @@ -20,7 +20,8 @@ import ( "net/url" "path" - "github.com/golang/protobuf/ptypes/empty" + "google.golang.org/protobuf/types/known/emptypb" + apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" @@ -97,15 +98,28 @@ type PipelineServerOptions struct { CollectMetrics bool `json:"collect_metrics,omitempty"` } -type PipelineServer struct { +// BasePipelineServer wraps PipelineServer and PipelineServerV1 +// to enable method sharing. It can be removed once PipelineServerV1 +// is removed. +type BasePipelineServer struct { resourceManager *resource.ResourceManager httpClient *http.Client options *PipelineServerOptions } +type PipelineServer struct { + *BasePipelineServer + apiv2beta1.UnimplementedPipelineServiceServer +} + +type PipelineServerV1 struct { + *BasePipelineServer + apiv1beta1.UnimplementedPipelineServiceServer +} + // Creates a pipeline. Not exported. // Applies common logic on v1beta1 and v2beta1 API. -func (s *PipelineServer) createPipeline(ctx context.Context, pipeline *model.Pipeline) (*model.Pipeline, error) { +func (s *BasePipelineServer) createPipeline(ctx context.Context, pipeline *model.Pipeline) (*model.Pipeline, error) { pipeline.Namespace = s.resourceManager.ReplaceNamespace(pipeline.Namespace) if pipeline.Name == "" { @@ -127,9 +141,9 @@ func (s *PipelineServer) createPipeline(ctx context.Context, pipeline *model.Pip // Creates a pipeline and a pipeline version in a single transaction. // Applies common logic on v1beta1 and v2beta1 API. -func (s *PipelineServer) createPipelineAndPipelineVersion(ctx context.Context, pipeline *model.Pipeline, pipelineUrlStr string) (*model.Pipeline, *model.PipelineVersion, error) { +func (s *BasePipelineServer) createPipelineAndPipelineVersion(ctx context.Context, pipeline *model.Pipeline, pipelineURLStr string) (*model.Pipeline, *model.PipelineVersion, error) { // Resolve name and namespace - pipelineFileName := path.Base(pipelineUrlStr) + pipelineFileName := path.Base(pipelineURLStr) pipeline.Name = buildPipelineName(pipeline.Name, pipeline.DisplayName, pipelineFileName) if pipeline.DisplayName == "" { @@ -153,28 +167,28 @@ func (s *PipelineServer) createPipelineAndPipelineVersion(ctx context.Context, p pipelineVersion := &model.PipelineVersion{ Name: pipeline.Name, DisplayName: pipeline.DisplayName, - PipelineSpecURI: pipelineUrlStr, + PipelineSpecURI: model.LargeText(pipelineURLStr), Description: pipeline.Description, Status: model.PipelineVersionCreating, } // Download and parse pipeline spec - pipelineUrl, err := url.ParseRequestURI(pipelineUrlStr) + pipelineURL, err := url.ParseRequestURI(pipelineURLStr) if err != nil { - return nil, nil, util.NewInvalidInputError("invalid pipeline spec URL: %v", pipelineUrlStr) + return nil, nil, util.NewInvalidInputError("invalid pipeline spec URL: %v", pipelineURLStr) } - resp, err := s.httpClient.Get(pipelineUrl.String()) + resp, err := s.httpClient.Get(pipelineURL.String()) if err != nil { - return nil, nil, util.NewInternalServerError(err, "error downloading the pipeline spec from %v", pipelineUrl.String()) + return nil, nil, util.NewInternalServerError(err, "error downloading the pipeline spec from %v", pipelineURL.String()) } else if resp.StatusCode != http.StatusOK { - return nil, nil, util.NewInvalidInputError("error fetching pipeline spec from %v - request returned %v", pipelineUrl.String(), resp.Status) + return nil, nil, util.NewInvalidInputError("error fetching pipeline spec from %v - request returned %v", pipelineURL.String(), resp.Status) } defer resp.Body.Close() pipelineFile, err := ReadPipelineFile(pipelineFileName, resp.Body, common.MaxFileLength) if err != nil { return nil, nil, err } - pipelineVersion.PipelineSpec = string(pipelineFile) + pipelineVersion.PipelineSpec = model.LargeText(pipelineFile) // Validate the pipeline version if err := s.validatePipelineVersionBeforeCreating(pipelineVersion); err != nil { @@ -187,7 +201,7 @@ func (s *PipelineServer) createPipelineAndPipelineVersion(ctx context.Context, p // Creates a pipeline and a pipeline version in a single transaction. // Supports v1beta1 behavior. -func (s *PipelineServer) CreatePipelineV1(ctx context.Context, request *apiv1beta1.CreatePipelineRequest) (*apiv1beta1.Pipeline, error) { +func (s *PipelineServerV1) CreatePipelineV1(ctx context.Context, request *apiv1beta1.CreatePipelineRequest) (*apiv1beta1.Pipeline, error) { if s.options.CollectMetrics { createPipelineRequests.Inc() createPipelineVersionRequests.Inc() @@ -240,7 +254,7 @@ func (s *PipelineServer) CreatePipeline(ctx context.Context, request *apiv2beta1 // TODO(gkcalat): consider removing as default version is deprecated. This requires changes to v1beta1 proto. // Updates default pipeline version for a given pipeline. // Supports v1beta1 behavior. -func (s *PipelineServer) UpdatePipelineDefaultVersionV1(ctx context.Context, request *apiv1beta1.UpdatePipelineDefaultVersionRequest) (*empty.Empty, error) { +func (s *PipelineServerV1) UpdatePipelineDefaultVersionV1(ctx context.Context, request *apiv1beta1.UpdatePipelineDefaultVersionRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { updatePipelineDefaultVersionRequests.Inc() } @@ -255,12 +269,12 @@ func (s *PipelineServer) UpdatePipelineDefaultVersionV1(ctx context.Context, req if err != nil { return nil, util.Wrapf(err, "Failed to update (v1beta1) default pipeline version to %s for pipeline %s. Check error stack", request.GetVersionId(), request.GetPipelineId()) } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Fetches a pipeline. // Applies common logic on v1beta1 and v2beta1 API. -func (s *PipelineServer) getPipeline(ctx context.Context, pipelineId string) (*model.Pipeline, error) { +func (s *BasePipelineServer) getPipeline(ctx context.Context, pipelineId string) (*model.Pipeline, error) { if pipelineId == "" { return nil, util.NewInvalidInputError("Failed to get a pipeline. Pipeline id cannot be empty") } @@ -278,7 +292,7 @@ func (s *PipelineServer) getPipeline(ctx context.Context, pipelineId string) (*m // Returns a pipeline. // Note, the default pipeline version will be set to be the latest pipeline version. // Supports v1beta behavior. -func (s *PipelineServer) GetPipelineV1(ctx context.Context, request *apiv1beta1.GetPipelineRequest) (*apiv1beta1.Pipeline, error) { +func (s *PipelineServerV1) GetPipelineV1(ctx context.Context, request *apiv1beta1.GetPipelineRequest) (*apiv1beta1.Pipeline, error) { if s.options.CollectMetrics { getPipelineRequests.Inc() } @@ -312,7 +326,7 @@ func (s *PipelineServer) GetPipeline(ctx context.Context, request *apiv2beta1.Ge // Fetches pipeline and (optionally) pipeline version for a given name and namespace. // Applies common logic on v1beta1 and v2beta1 API. -func (s *PipelineServer) getPipelineByName(ctx context.Context, name string, namespace string, apiRequestVersion string) (*model.Pipeline, *model.PipelineVersion, error) { +func (s *BasePipelineServer) getPipelineByName(ctx context.Context, name string, namespace string, apiRequestVersion string) (*model.Pipeline, *model.PipelineVersion, error) { namespace = s.resourceManager.ReplaceNamespace(namespace) resourceAttributes := &authorizationv1.ResourceAttributes{ Namespace: namespace, @@ -338,7 +352,7 @@ func (s *PipelineServer) getPipelineByName(ctx context.Context, name string, nam // Returns a pipeline with the default (latest) pipeline version given a name and a namespace. // Supports v1beta behavior. -func (s *PipelineServer) GetPipelineByNameV1(ctx context.Context, request *apiv1beta1.GetPipelineByNameRequest) (*apiv1beta1.Pipeline, error) { +func (s *PipelineServerV1) GetPipelineByNameV1(ctx context.Context, request *apiv1beta1.GetPipelineByNameRequest) (*apiv1beta1.Pipeline, error) { if s.options.CollectMetrics { getPipelineRequests.Inc() } @@ -372,7 +386,7 @@ func (s *PipelineServer) GetPipelineByName(ctx context.Context, request *apiv2be // Fetches an array of pipelines and an array of pipeline versions for given search query parameters. // Applies common logic on v1beta1 and v2beta1 API. -func (s *PipelineServer) listPipelines(ctx context.Context, namespace string, pageToken string, pageSize int32, sortBy string, opts *list.Options, apiRequestVersion string) ([]*model.Pipeline, []*model.PipelineVersion, int, string, error) { +func (s *BasePipelineServer) listPipelines(ctx context.Context, namespace string, pageToken string, pageSize int32, sortBy string, opts *list.Options, apiRequestVersion string) ([]*model.Pipeline, []*model.PipelineVersion, int, string, error) { // Fill in the default namespace namespace = s.resourceManager.ReplaceNamespace(namespace) resourceAttributes := &authorizationv1.ResourceAttributes{ @@ -403,7 +417,7 @@ func (s *PipelineServer) listPipelines(ctx context.Context, namespace string, pa // Returns pipelines with default pipeline versions for a given query. // Supports v1beta behavior. -func (s *PipelineServer) ListPipelinesV1(ctx context.Context, request *apiv1beta1.ListPipelinesRequest) (*apiv1beta1.ListPipelinesResponse, error) { +func (s *PipelineServerV1) ListPipelinesV1(ctx context.Context, request *apiv1beta1.ListPipelinesRequest) (*apiv1beta1.ListPipelinesResponse, error) { if s.options.CollectMetrics { listPipelineRequests.Inc() } @@ -475,7 +489,7 @@ func (s *PipelineServer) ListPipelines(ctx context.Context, request *apiv2beta1. // Removes a pipeline. // Applies common logic on v1beta1 and v2beta1 API. -func (s *PipelineServer) deletePipeline(ctx context.Context, pipelineId string) error { +func (s *BasePipelineServer) deletePipeline(ctx context.Context, pipelineId string, cascade bool) error { // Fail fast if pipelineId == "" { return util.NewInvalidInputError("Failed to delete a pipeline due missing pipeline id") @@ -490,17 +504,17 @@ func (s *PipelineServer) deletePipeline(ctx context.Context, pipelineId string) return util.Wrapf(err, "Failed to delete a pipeline due authorization error for pipeline id %v", pipelineId) } - return s.resourceManager.DeletePipeline(pipelineId) + return s.resourceManager.DeletePipeline(pipelineId, cascade) } // Deletes a pipeline. // Supports v1beta1 behavior. -func (s *PipelineServer) DeletePipelineV1(ctx context.Context, request *apiv1beta1.DeletePipelineRequest) (*empty.Empty, error) { +func (s *PipelineServerV1) DeletePipelineV1(ctx context.Context, request *apiv1beta1.DeletePipelineRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { deletePipelineRequests.Inc() } - if err := s.deletePipeline(ctx, request.GetId()); err != nil { + if err := s.deletePipeline(ctx, request.GetId(), false); err != nil { return nil, util.Wrapf(err, "Failed to delete pipeline (v1beta1) %s. Check error stack", request.GetId()) } @@ -508,17 +522,17 @@ func (s *PipelineServer) DeletePipelineV1(ctx context.Context, request *apiv1bet pipelineCount.Dec() } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Deletes a pipeline. // Supports v2beta1 behavior. -func (s *PipelineServer) DeletePipeline(ctx context.Context, request *apiv2beta1.DeletePipelineRequest) (*empty.Empty, error) { +func (s *PipelineServer) DeletePipeline(ctx context.Context, request *apiv2beta1.DeletePipelineRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { deletePipelineRequests.Inc() } - if err := s.deletePipeline(ctx, request.GetPipelineId()); err != nil { + if err := s.deletePipeline(ctx, request.GetPipelineId(), request.GetCascade()); err != nil { return nil, util.Wrapf(err, "Failed to delete pipeline %s. Check error stack", request.GetPipelineId()) } @@ -526,12 +540,12 @@ func (s *PipelineServer) DeletePipeline(ctx context.Context, request *apiv2beta1 pipelineCount.Dec() } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Returns the default (latest) pipeline template for a given pipeline id. // Supports v1beta1 behavior. -func (s *PipelineServer) GetTemplate(ctx context.Context, request *apiv1beta1.GetTemplateRequest) (*apiv1beta1.GetTemplateResponse, error) { +func (s *PipelineServerV1) GetTemplate(ctx context.Context, request *apiv1beta1.GetTemplateRequest) (*apiv1beta1.GetTemplateResponse, error) { pipelineId := request.GetId() if pipelineId == "" { return nil, util.NewInvalidInputError("Failed to get the default pipeline template (v1beta1). Pipeline id cannot be empty") @@ -552,7 +566,7 @@ func (s *PipelineServer) GetTemplate(ctx context.Context, request *apiv1beta1.Ge } // Fetches the latest pipeline version for a given pipeline id. -func (s *PipelineServer) getLatestPipelineVersion(ctx context.Context, pipelineId string) (*model.PipelineVersion, error) { +func (s *BasePipelineServer) getLatestPipelineVersion(ctx context.Context, pipelineId string) (*model.PipelineVersion, error) { if pipelineId == "" { return nil, util.NewInvalidInputError("Failed to get the latest pipeline version as pipeline id is empty") } @@ -568,7 +582,7 @@ func (s *PipelineServer) getLatestPipelineVersion(ctx context.Context, pipelineI // Validates a pipeline version before creating a record in the DB. // Requires Name and PipelineId to be non-empty and presence of PipelineSpec or a valid URI to the pipeline spec. -func (s *PipelineServer) validatePipelineVersionBeforeCreating(p *model.PipelineVersion) error { +func (s *BasePipelineServer) validatePipelineVersionBeforeCreating(p *model.PipelineVersion) error { if p.Name == "" { return util.NewInvalidInputError("name is required") } @@ -577,7 +591,7 @@ func (s *PipelineServer) validatePipelineVersionBeforeCreating(p *model.Pipeline return nil } if p.PipelineSpecURI != "" { - if _, err := url.ParseRequestURI(p.PipelineSpecURI); err == nil { + if _, err := url.ParseRequestURI(string(p.PipelineSpecURI)); err == nil { return nil } } @@ -590,7 +604,23 @@ func (s *PipelineServer) validatePipelineVersionBeforeCreating(p *model.Pipeline } func NewPipelineServer(resourceManager *resource.ResourceManager, options *PipelineServerOptions) *PipelineServer { - return &PipelineServer{resourceManager: resourceManager, httpClient: http.DefaultClient, options: options} + return &PipelineServer{ + BasePipelineServer: &BasePipelineServer{ + resourceManager: resourceManager, + httpClient: http.DefaultClient, + options: options, + }, + } +} + +func NewPipelineServerV1(resourceManager *resource.ResourceManager, options *PipelineServerOptions) *PipelineServerV1 { + return &PipelineServerV1{ + BasePipelineServer: &BasePipelineServer{ + resourceManager: resourceManager, + httpClient: http.DefaultClient, + options: options, + }, + } } // Creates a pipeline and a pipeline version in a single transaction. @@ -622,7 +652,7 @@ func (s *PipelineServer) CreatePipelineAndVersion(ctx context.Context, request * // Creates a pipeline version from. Not exported. // Applies common logic on v1beta1 and v2beta1 API. -func (s *PipelineServer) createPipelineVersion(ctx context.Context, pv *model.PipelineVersion) (*model.PipelineVersion, error) { +func (s *BasePipelineServer) createPipelineVersion(ctx context.Context, pv *model.PipelineVersion) (*model.PipelineVersion, error) { // Fail if pipeline URL is missing if pv.PipelineSpecURI == "" { return nil, util.NewInvalidInputError("Failed to create a pipeline version due to missing pipeline URL") @@ -647,25 +677,26 @@ func (s *PipelineServer) createPipelineVersion(ctx context.Context, pv *model.Pi } // Read pipeline file - pipelineUrl, err := url.ParseRequestURI(pv.PipelineSpecURI) + // nolint:staticcheck // [ST1003] Field name matches upstream legacy naming + pipelineURL, err := url.ParseRequestURI(string(pv.PipelineSpecURI)) if err != nil { return nil, util.NewInvalidInputError("Failed to create a pipeline version due to invalid pipeline spec URI. PipelineSpecURI: %v. Please specify a valid URL", pv.PipelineSpecURI) } - resp, err := s.httpClient.Get(pipelineUrl.String()) + resp, err := s.httpClient.Get(pipelineURL.String()) if err != nil || resp.StatusCode != http.StatusOK { if err == nil { - return nil, util.NewInvalidInputError("Failed to fetch pipeline spec with url: %v. Request returned %v", pipelineUrl.String(), resp.Status) + return nil, util.NewInvalidInputError("Failed to fetch pipeline spec with url: %v. Request returned %v", pipelineURL.String(), resp.Status) } - return nil, util.NewInternalServerError(err, "Failed to create a pipeline version due error downloading the pipeline spec from %v", pipelineUrl.String()) + return nil, util.NewInternalServerError(err, "Failed to create a pipeline version due error downloading the pipeline spec from %v", pipelineURL.String()) } defer resp.Body.Close() - pipelineFileName := path.Base(pipelineUrl.String()) + pipelineFileName := path.Base(pipelineURL.String()) pipelineFile, err := ReadPipelineFile(pipelineFileName, resp.Body, common.MaxFileLength) if err != nil { return nil, util.Wrap(err, "Failed to create a pipeline version due error reading the pipeline spec") } - pv.PipelineSpec = string(pipelineFile) + pv.PipelineSpec = model.LargeText(pipelineFile) if pv.Name == "" { pv.Name = pipelineFileName } @@ -680,7 +711,7 @@ func (s *PipelineServer) createPipelineVersion(ctx context.Context, pv *model.Pi // Creates a pipeline version. // Supports v1beta behavior. -func (s *PipelineServer) CreatePipelineVersionV1(ctx context.Context, request *apiv1beta1.CreatePipelineVersionRequest) (*apiv1beta1.PipelineVersion, error) { +func (s *PipelineServerV1) CreatePipelineVersionV1(ctx context.Context, request *apiv1beta1.CreatePipelineVersionRequest) (*apiv1beta1.PipelineVersion, error) { if s.options.CollectMetrics { createPipelineVersionRequests.Inc() } @@ -698,16 +729,16 @@ func (s *PipelineServer) CreatePipelineVersionV1(ctx context.Context, request *a } // Extract pipeline id - pipelineId := "" + pipelineID := "" for _, resourceReference := range request.Version.ResourceReferences { if resourceReference.Key.Type == apiv1beta1.ResourceType_PIPELINE && resourceReference.Relationship == apiv1beta1.Relationship_OWNER { - pipelineId = resourceReference.Key.Id + pipelineID = resourceReference.Key.Id } } - if len(pipelineId) == 0 { + if len(pipelineID) == 0 { return nil, util.Wrap(err, "Failed to create a pipeline version (v1beta1) due to missing pipeline id") } - pv.PipelineId = pipelineId + pv.PipelineId = pipelineID // Create a pipeline version newpv, err := s.createPipelineVersion(ctx, pv) @@ -772,21 +803,21 @@ func (s *PipelineServer) CreatePipelineVersion(ctx context.Context, request *api // Fetches a pipeline version for given pipeline id. // Applies common logic on v1beta1 and v2beta1 API. -func (s *PipelineServer) getPipelineVersion(ctx context.Context, pipelineVersionId string) (*model.PipelineVersion, error) { +func (s *BasePipelineServer) getPipelineVersion(ctx context.Context, pipelineVersionID string) (*model.PipelineVersion, error) { // Check authorization resourceAttributes := &authorizationv1.ResourceAttributes{ Verb: common.RbacResourceVerbGet, } - err := s.canAccessPipelineVersion(ctx, pipelineVersionId, resourceAttributes) + err := s.canAccessPipelineVersion(ctx, pipelineVersionID, resourceAttributes) if err != nil { - return nil, util.Wrapf(err, "Failed to get a pipeline version due to authorization error for pipeline version id %v", pipelineVersionId) + return nil, util.Wrapf(err, "Failed to get a pipeline version due to authorization error for pipeline version id %v", pipelineVersionID) } - return s.resourceManager.GetPipelineVersion(pipelineVersionId) + return s.resourceManager.GetPipelineVersion(pipelineVersionID) } // Returns a pipeline version. // Supports v1beta behavior. -func (s *PipelineServer) GetPipelineVersionV1(ctx context.Context, request *apiv1beta1.GetPipelineVersionRequest) (*apiv1beta1.PipelineVersion, error) { +func (s *PipelineServerV1) GetPipelineVersionV1(ctx context.Context, request *apiv1beta1.GetPipelineVersionRequest) (*apiv1beta1.PipelineVersion, error) { if s.options.CollectMetrics { getPipelineVersionRequests.Inc() } @@ -818,7 +849,7 @@ func (s *PipelineServer) GetPipelineVersion(ctx context.Context, request *apiv2b // Fetches an array of pipeline versions for given search query parameters. // Applies common logic on v1beta1 and v2beta1 API. -func (s *PipelineServer) listPipelineVersions(ctx context.Context, pipelineId string, pageToken string, pageSize int32, sortBy string, opts *list.Options) ([]*model.PipelineVersion, int, string, error) { +func (s *BasePipelineServer) listPipelineVersions(ctx context.Context, pipelineId string, pageToken string, pageSize int32, sortBy string, opts *list.Options) ([]*model.PipelineVersion, int, string, error) { // Fail fast of pipeline id or namespace are missing if pipelineId == "" { return nil, 0, "", util.NewInvalidInputError("Failed to list pipeline versions. Pipeline id cannot be empty") @@ -843,7 +874,7 @@ func (s *PipelineServer) listPipelineVersions(ctx context.Context, pipelineId st // Returns an array of pipeline versions for a given query. // Supports v1beta1 behavior. -func (s *PipelineServer) ListPipelineVersionsV1(ctx context.Context, request *apiv1beta1.ListPipelineVersionsRequest) (*apiv1beta1.ListPipelineVersionsResponse, error) { +func (s *PipelineServerV1) ListPipelineVersionsV1(ctx context.Context, request *apiv1beta1.ListPipelineVersionsRequest) (*apiv1beta1.ListPipelineVersionsResponse, error) { if s.options.CollectMetrics { listPipelineVersionRequests.Inc() } @@ -912,7 +943,7 @@ func (s *PipelineServer) ListPipelineVersions(ctx context.Context, request *apiv // Removes a pipeline version. // Applies common logic on v1beta1 and v2beta1 API. -func (s *PipelineServer) deletePipelineVersion(ctx context.Context, pipelineId string, pipelineVersionId string) error { +func (s *BasePipelineServer) deletePipelineVersion(ctx context.Context, pipelineId string, pipelineVersionId string) error { // Fail fast if pipelineId == "" { return util.NewInvalidInputError("Failed to delete a pipeline version id %v due missing pipeline id", pipelineVersionId) @@ -935,7 +966,7 @@ func (s *PipelineServer) deletePipelineVersion(ctx context.Context, pipelineId s // Deletes a pipeline version. // Supports v1beta1 behavior. -func (s *PipelineServer) DeletePipelineVersionV1(ctx context.Context, request *apiv1beta1.DeletePipelineVersionRequest) (*empty.Empty, error) { +func (s *PipelineServerV1) DeletePipelineVersionV1(ctx context.Context, request *apiv1beta1.DeletePipelineVersionRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { deletePipelineVersionRequests.Inc() } @@ -960,12 +991,12 @@ func (s *PipelineServer) DeletePipelineVersionV1(ctx context.Context, request *a if s.options.CollectMetrics { pipelineVersionCount.Dec() } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Deletes a pipeline version. // Supports v2beta1 behavior. -func (s *PipelineServer) DeletePipelineVersion(ctx context.Context, request *apiv2beta1.DeletePipelineVersionRequest) (*empty.Empty, error) { +func (s *PipelineServer) DeletePipelineVersion(ctx context.Context, request *apiv2beta1.DeletePipelineVersionRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { deletePipelineVersionRequests.Inc() } @@ -988,12 +1019,12 @@ func (s *PipelineServer) DeletePipelineVersion(ctx context.Context, request *api if s.options.CollectMetrics { pipelineVersionCount.Dec() } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Returns pipeline template. // Supports v1beta1 behavior. -func (s *PipelineServer) GetPipelineVersionTemplate(ctx context.Context, request *apiv1beta1.GetPipelineVersionTemplateRequest) (*apiv1beta1.GetTemplateResponse, error) { +func (s *PipelineServerV1) GetPipelineVersionTemplate(ctx context.Context, request *apiv1beta1.GetPipelineVersionTemplateRequest) (*apiv1beta1.GetTemplateResponse, error) { resourceAttributes := &authorizationv1.ResourceAttributes{ Verb: common.RbacResourceVerbGet, } @@ -1012,16 +1043,16 @@ func (s *PipelineServer) GetPipelineVersionTemplate(ctx context.Context, request // Checks if a user can access a pipeline version. // Adds namespace of the parent pipeline if version id is not empty, // API group, version, and resource type. -func (s *PipelineServer) canAccessPipelineVersion(ctx context.Context, versionId string, resourceAttributes *authorizationv1.ResourceAttributes) error { +func (s *BasePipelineServer) canAccessPipelineVersion(ctx context.Context, versionID string, resourceAttributes *authorizationv1.ResourceAttributes) error { if !common.IsMultiUserMode() { // Skip authorization if not multi-user mode. return nil } pipelineId := "" - if versionId != "" { - pipelineVersion, err := s.resourceManager.GetPipelineVersion(versionId) + if versionID != "" { + pipelineVersion, err := s.resourceManager.GetPipelineVersion(versionID) if err != nil { - return util.Wrapf(err, "Failed to access pipeline version %s. Check if it exists", versionId) + return util.Wrapf(err, "Failed to access pipeline version %s. Check if it exists", versionID) } pipelineId = pipelineVersion.PipelineId } @@ -1031,7 +1062,7 @@ func (s *PipelineServer) canAccessPipelineVersion(ctx context.Context, versionId // Checks if a user can access a pipeline. // Adds parent namespace if pipeline id is not empty, // API group, version, and resource type. -func (s *PipelineServer) canAccessPipeline(ctx context.Context, pipelineId string, resourceAttributes *authorizationv1.ResourceAttributes) error { +func (s *BasePipelineServer) canAccessPipeline(ctx context.Context, pipelineId string, resourceAttributes *authorizationv1.ResourceAttributes) error { if !common.IsMultiUserMode() { // Skip authorization if not multi-user mode. return nil diff --git a/backend/src/apiserver/server/pipeline_server_test.go b/backend/src/apiserver/server/pipeline_server_test.go index 5c35ec7cf57..4f89a1e8b63 100644 --- a/backend/src/apiserver/server/pipeline_server_test.go +++ b/backend/src/apiserver/server/pipeline_server_test.go @@ -17,10 +17,12 @@ package server import ( "context" "encoding/json" + "fmt" "io" "net/http" "net/http/httptest" "os" + "strings" "testing" api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" @@ -35,6 +37,37 @@ import ( "google.golang.org/protobuf/types/known/timestamppb" ) +func createPipelineServerV1(resourceManager *resource.ResourceManager, httpClient *http.Client) *PipelineServerV1 { + return &PipelineServerV1{ + BasePipelineServer: &BasePipelineServer{ + resourceManager: resourceManager, httpClient: httpClient, options: &PipelineServerOptions{CollectMetrics: false}, + }, + } +} + +func createPipelineServer(resourceManager *resource.ResourceManager, httpClient *http.Client) *PipelineServer { + return &PipelineServer{ + BasePipelineServer: &BasePipelineServer{ + resourceManager: resourceManager, httpClient: httpClient, options: &PipelineServerOptions{CollectMetrics: false}, + }, + } +} + +func setupLargePipelineURL() string { + // Set up the environment variables for the pipeline URL. + // The URL points to a sample pipeline YAML file in the Kubeflow Pipelines repository. + // The branch and repo can be overridden by environment variables for testing purposes. + branch := os.Getenv("GIT_BRANCH") + repo := os.Getenv("GIT_REPO") + if repo == "" { + repo = "opendatahub-io/data-science-pipelines" + } + if branch == "" { + branch = "master" + } + largePipelineURL := fmt.Sprintf("https://raw.githubusercontent.com/%s/%s/test_data/sdk_compiled_pipelines/valid/xgboost_sample_pipeline.yaml", repo, branch) + return largePipelineURL +} func TestBuildPipelineName_QueryStringNotEmpty(t *testing.T) { pipelineName := buildPipelineName("pipeline one", "", "file one") assert.Equal(t, "pipeline one", pipelineName) @@ -63,7 +96,7 @@ func TestCreatePipelineV1_YAML(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) pipeline, err := pipelineServer.CreatePipelineV1(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, @@ -80,7 +113,7 @@ func TestCreatePipelineV1_YAML(t *testing.T) { newPipelineVersion, err := resourceManager.GetLatestPipelineVersion(pipeline.Id) assert.Nil(t, err) assert.NotNil(t, newPipeline) - assert.Equal(t, "pipeline description", newPipeline.Description) + assert.Equal(t, "pipeline description", string(newPipeline.Description)) assert.Equal(t, newPipeline.UUID, newPipelineVersion.PipelineId) } @@ -92,10 +125,10 @@ func TestCreatePipelineV1_LargeFile(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) pipeline, err := pipelineServer.CreatePipelineV1(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ - Url: &api.Url{PipelineUrl: "https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/master/sdk/python/test_data/pipelines/xgboost_sample_pipeline.yaml"}, + Url: &api.Url{PipelineUrl: setupLargePipelineURL()}, Name: "xgboost-url", Description: "pipeline description", }, @@ -109,7 +142,7 @@ func TestCreatePipelineV1_LargeFile(t *testing.T) { newPipelineVersion, err := resourceManager.GetLatestPipelineVersion(pipeline.Id) assert.Nil(t, err) assert.NotNil(t, newPipeline) - assert.Equal(t, "pipeline description", newPipeline.Description) + assert.Equal(t, "pipeline description", string(newPipeline.Description)) assert.Equal(t, newPipeline.UUID, newPipelineVersion.PipelineId) } @@ -121,7 +154,7 @@ func TestCreatePipelineV1_Tarball(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) pipeline, err := pipelineServer.CreatePipelineV1(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments_tarball/arguments.tar.gz"}, @@ -139,7 +172,7 @@ func TestCreatePipelineV1_Tarball(t *testing.T) { assert.Nil(t, err) assert.NotNil(t, newPipeline) assert.NotNil(t, newPipelineVersion) - assert.Equal(t, "pipeline description", newPipeline.Description) + assert.Equal(t, "pipeline description", string(newPipeline.Description)) assert.Equal(t, newPipeline.UUID, newPipelineVersion.PipelineId) } @@ -151,7 +184,7 @@ func TestCreatePipelineV1_InvalidYAML(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) createdPipeline, err := pipelineServer.CreatePipelineV1( context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ @@ -173,7 +206,7 @@ func TestCreatePipelineV1_InvalidURL(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) createdPipeline, err := pipelineServer.CreatePipelineV1( context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ @@ -195,7 +228,7 @@ func TestCreatePipelineV1_MissingUrl(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) createdPipeline, err := pipelineServer.CreatePipelineV1( context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ @@ -225,7 +258,7 @@ func TestCreatePipelineV1_ExistingPipeline(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) pipelineServer.CreatePipelineV1( context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ @@ -265,6 +298,31 @@ func TestCreatePipelineV1_ExistingPipeline(t *testing.T) { assert.Nil(t, createdPipeline) } +func TestCreatePipelineVersionV1_NameTooLong(t *testing.T) { + httpServer := getMockServer(t) + defer httpServer.Close() + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) + + longName := strings.Repeat("x", 192) // max valid length is 191 + _, err := pipelineServer.CreatePipelineVersionV1( + context.Background(), + &api.CreatePipelineVersionRequest{ + Version: &api.PipelineVersion{ + Name: longName, + PackageUrl: &api.Url{PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, + ResourceReferences: []*api.ResourceReference{{ + Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE, Id: "pipeline"}, + Relationship: api.Relationship_OWNER, + }}, + }, + }, + ) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "PipelineVersion.Name length cannot exceed") +} + func TestCreatePipelineVersionV1_YAML(t *testing.T) { httpServer := getMockServer(t) // Close the server when test finishes @@ -274,9 +332,7 @@ func TestCreatePipelineVersionV1_YAML(t *testing.T) { util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{ - resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}, - } + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) pipelineVersion, err := pipelineServer.CreatePipelineVersionV1( context.Background(), &api.CreatePipelineVersionRequest{ Version: &api.PipelineVersion{ @@ -319,7 +375,7 @@ func TestCreatePipelineVersion_InvalidYAML(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) _, err := pipelineServer.CreatePipelineVersionV1( context.Background(), &api.CreatePipelineVersionRequest{ Version: &api.PipelineVersion{ @@ -351,7 +407,7 @@ func TestCreatePipelineVersion_Tarball(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) pipelineVersion, err := pipelineServer.CreatePipelineVersionV1( context.Background(), &api.CreatePipelineVersionRequest{ Version: &api.PipelineVersion{ @@ -393,7 +449,7 @@ func TestCreatePipelineVersion_InvalidURL(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) _, err := pipelineServer.CreatePipelineVersionV1(context.Background(), &api.CreatePipelineVersionRequest{ Version: &api.PipelineVersion{ PackageUrl: &api.Url{ @@ -424,7 +480,7 @@ func TestListPipelineVersion_NoResourceKey(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) _, err := pipelineServer.ListPipelineVersionsV1(context.Background(), &api.ListPipelineVersionsRequest{ ResourceKey: nil, @@ -440,7 +496,7 @@ func TestListPipelinesPublic(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) _, err := pipelineServer.ListPipelinesV1(context.Background(), &api.ListPipelinesRequest{ PageSize: 20, @@ -458,7 +514,7 @@ func TestGetPipelineByName_OK(t *testing.T) { defer httpServer.Close() clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) pipeline, err := pipelineServer.CreatePipelineV1(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, @@ -491,7 +547,7 @@ func TestGetPipelineByName_Shared_OK(t *testing.T) { defer httpServer.Close() clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) pipeline, err := pipelineServer.CreatePipelineV1(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, @@ -519,7 +575,7 @@ func TestGetPipelineByName_NotFound(t *testing.T) { defer httpServer.Close() clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) _, err := pipelineServer.GetPipelineByNameV1(context.Background(), &api.GetPipelineByNameRequest{ Name: "foo", @@ -533,7 +589,7 @@ func TestGetPipelineByName_WrongNameSpace(t *testing.T) { defer httpServer.Close() clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) pipeline, err := pipelineServer.CreatePipelineV1(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, @@ -570,7 +626,7 @@ func TestCreatePipelineVersionAndCheckLatestVersion(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServerV1(resourceManager, httpServer.Client()) pipeline, err := pipelineServer.CreatePipelineV1(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments_tarball/arguments.tar.gz"}, @@ -586,7 +642,7 @@ func TestCreatePipelineVersionAndCheckLatestVersion(t *testing.T) { clientManager.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal("123e4567-e89b-12d3-a456-526655440001", nil)) resourceManager = resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer = PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer = createPipelineServerV1(resourceManager, httpServer.Client()) pipelineVersion, err := pipelineServer.CreatePipelineVersionV1( context.Background(), &api.CreatePipelineVersionRequest{ Version: &api.PipelineVersion{ @@ -640,11 +696,8 @@ func TestPipelineServer_CreatePipeline(t *testing.T) { defer httpServer.Close() clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer := createPipelineServer(resourceManager, httpServer.Client()) - type args struct { - pipeline *model.Pipeline - } tests := []struct { name string id string @@ -704,12 +757,24 @@ func TestPipelineServer_CreatePipeline(t *testing.T) { true, "name is required", }, + { + name: "Invalid - name too long", + id: DefaultFakeIdOne, + arg: &apiv2.Pipeline{ + Name: strings.Repeat("a", 129), + DisplayName: strings.Repeat("a", 129), + Namespace: "", + }, + want: nil, + wantErr: true, + errMsg: "Pipeline.Name length cannot exceed 128", + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { clientManager.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(tt.id, nil)) resourceManager = resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer = PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}} + pipelineServer = createPipelineServer(resourceManager, httpServer.Client()) got, err := pipelineServer.CreatePipeline(context.Background(), &apiv2.CreatePipelineRequest{Pipeline: tt.arg}) if tt.wantErr { assert.NotNil(t, err) @@ -737,6 +802,25 @@ func TestPipelineServer_CreatePipelineAndVersion_v2(t *testing.T) { wantErr bool errMsg string }{ + { + name: "Invalid - name too long", + request: &apiv2.CreatePipelineAndVersionRequest{ + Pipeline: &apiv2.Pipeline{ + DisplayName: strings.Repeat("a", 129), + Description: "pipeline description", + Namespace: "", + }, + PipelineVersion: &apiv2.PipelineVersion{ + PackageUrl: &apiv2.Url{ + PipelineUrl: httpServer.URL + "/arguments-parameters.yaml", + }, + }, + }, + want: nil, + wantPv: nil, + wantErr: true, + errMsg: "Pipeline.Name length cannot exceed 128", + }, { "Valid - yaml", &apiv2.CreatePipelineAndVersionRequest{ @@ -782,7 +866,7 @@ func TestPipelineServer_CreatePipelineAndVersion_v2(t *testing.T) { }, PipelineVersion: &apiv2.PipelineVersion{ PackageUrl: &apiv2.Url{ - PipelineUrl: "https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/master/sdk/python/test_data/pipelines/xgboost_sample_pipeline.yaml", + PipelineUrl: setupLargePipelineURL(), }, }, }, @@ -866,9 +950,7 @@ func TestPipelineServer_CreatePipelineAndVersion_v2(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal( util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - pipelineServer := PipelineServer{ - resourceManager: resourceManager, httpClient: httpServer.Client(), options: &PipelineServerOptions{CollectMetrics: false}, - } + pipelineServer := createPipelineServer(resourceManager, httpServer.Client()) t.Run(tt.name, func(t *testing.T) { got, err := pipelineServer.CreatePipelineAndVersion(context.Background(), tt.request) if tt.wantErr { diff --git a/backend/src/apiserver/server/pipeline_upload_server.go b/backend/src/apiserver/server/pipeline_upload_server.go index 47fb2365d1f..7333cbcfba1 100644 --- a/backend/src/apiserver/server/pipeline_upload_server.go +++ b/backend/src/apiserver/server/pipeline_upload_server.go @@ -20,12 +20,15 @@ import ( "fmt" "net/http" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" + "github.com/golang/glog" - "github.com/golang/protobuf/jsonpb" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" + "github.com/kubeflow/pipelines/backend/src/apiserver/validation" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" @@ -107,6 +110,10 @@ func (s *PipelineUploadServer) uploadPipeline(api_version string, w http.Respons } pipelineNamespace := r.URL.Query().Get(NamespaceStringQuery) + if err := validation.ValidateFieldLength("Pipeline", "Namespace", pipelineNamespace); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } pipelineNamespace = s.resourceManager.ReplaceNamespace(pipelineNamespace) resourceAttributes := &authorizationv1.ResourceAttributes{ Namespace: pipelineNamespace, @@ -129,7 +136,7 @@ func (s *PipelineUploadServer) uploadPipeline(api_version string, w http.Respons pipeline := &model.Pipeline{ Name: pipelineName, DisplayName: displayName, - Description: r.URL.Query().Get(DescriptionQueryStringKey), + Description: model.LargeText(r.URL.Query().Get(DescriptionQueryStringKey)), Namespace: pipelineNamespace, } @@ -137,7 +144,12 @@ func (s *PipelineUploadServer) uploadPipeline(api_version string, w http.Respons Name: pipeline.Name, DisplayName: pipeline.DisplayName, Description: pipeline.Description, - PipelineSpec: string(pipelineFile), + PipelineSpec: model.LargeText(pipelineFile), + } + + if err := validation.ValidateFieldLength("Pipeline", "Name", pipeline.Name); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return } w.Header().Set("Content-Type", "application/json") @@ -156,18 +168,29 @@ func (s *PipelineUploadServer) uploadPipeline(api_version string, w http.Respons pipelineVersionCount.Inc() } - marshaler := &jsonpb.Marshaler{EnumsAsInts: false, OrigName: true} - + var messageToMarshal proto.Message if api_version == "v1beta1" { - err = marshaler.Marshal(w, toApiPipelineV1(newPipeline, newPipelineVersion)) + messageToMarshal = toApiPipelineV1(newPipeline, newPipelineVersion) } else if api_version == "v2beta1" { - err = marshaler.Marshal(w, toApiPipeline(newPipeline)) + messageToMarshal = toApiPipeline(newPipeline) } else { s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Failed to create a pipeline. Invalid API version")) return } + + // Marshal the message to bytes + marshaler := &protojson.MarshalOptions{ + UseProtoNames: true, + // Note: Default behavior in protojson is to output enum names (strings). + } + data, err := marshaler.Marshal(messageToMarshal) + if err != nil { + s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Failed to create a pipeline. Marshaling error")) + return + } + _, err = w.Write(data) if err != nil { - s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Failed to create a pipeline due to error marshalling the pipeline")) + s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Failed to create a pipeline. Write error.")) return } } @@ -210,6 +233,22 @@ func (s *PipelineUploadServer) uploadPipelineVersion(api_version string, w http. s.writeErrorToResponse(w, http.StatusBadRequest, errors.New("Failed to create a pipeline version due to error reading pipeline id")) return } + + versionNameQueryString := r.URL.Query().Get(NameQueryStringKey) + versionDisplayNameQueryString := r.URL.Query().Get(DisplayNameQueryStringKey) + pipelineVersionName := buildPipelineName(versionNameQueryString, versionDisplayNameQueryString, header.Filename) + + displayName := versionDisplayNameQueryString + if displayName == "" { + displayName = pipelineVersionName + } + + // Validate PipelineVersion name length + if err := validation.ValidateFieldLength("PipelineVersion", "Name", pipelineVersionName); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + namespace, err := s.resourceManager.FetchNamespaceFromPipelineId(pipelineId) if err != nil { s.writeErrorToResponse(w, http.StatusBadRequest, util.Wrap(err, "Failed to create a pipeline version due to error reading namespace")) @@ -228,23 +267,13 @@ func (s *PipelineUploadServer) uploadPipelineVersion(api_version string, w http. w.Header().Set("Content-Type", "application/json") - // If new version's name is not included in query string, use file name. - versionNameQueryString := r.URL.Query().Get(NameQueryStringKey) - versionDisplayNameQueryString := r.URL.Query().Get(DisplayNameQueryStringKey) - pipelineVersionName := buildPipelineName(versionNameQueryString, versionDisplayNameQueryString, header.Filename) - - displayName := versionDisplayNameQueryString - if displayName == "" { - displayName = pipelineVersionName - } - newPipelineVersion, err := s.resourceManager.CreatePipelineVersion( &model.PipelineVersion{ Name: pipelineVersionName, DisplayName: displayName, - Description: r.URL.Query().Get(DescriptionQueryStringKey), + Description: model.LargeText(r.URL.Query().Get(DescriptionQueryStringKey)), PipelineId: pipelineId, - PipelineSpec: string(pipelineFile), + PipelineSpec: model.LargeText(pipelineFile), }, ) if err != nil { @@ -256,18 +285,28 @@ func (s *PipelineUploadServer) uploadPipelineVersion(api_version string, w http. return } - marshaler := &jsonpb.Marshaler{EnumsAsInts: false, OrigName: true} + var messageToMarshal proto.Message if api_version == "v1beta1" { - err = marshaler.Marshal(w, toApiPipelineVersionV1(newPipelineVersion)) + messageToMarshal = toApiPipelineVersionV1(newPipelineVersion) } else if api_version == "v2beta1" { - err = marshaler.Marshal(w, toApiPipelineVersion(newPipelineVersion)) + messageToMarshal = toApiPipelineVersion(newPipelineVersion) } else { s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Failed to create a pipeline version. Invalid API version")) return } - + // Marshal the message to bytes + marshaler := &protojson.MarshalOptions{ + UseProtoNames: true, + // Note: Default behavior in protojson is to output enum names (strings). + } + data, err := marshaler.Marshal(messageToMarshal) + if err != nil { + s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Failed to create a pipeline version. Marshaling error")) + return + } + _, err = w.Write(data) if err != nil { - s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Failed to create a pipeline version due to marshalling error")) + s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Failed to create a pipeline version. Write error.")) return } diff --git a/backend/src/apiserver/server/pipeline_upload_server_test.go b/backend/src/apiserver/server/pipeline_upload_server_test.go index d07ac4d6734..7dbbf3a3e9f 100644 --- a/backend/src/apiserver/server/pipeline_upload_server_test.go +++ b/backend/src/apiserver/server/pipeline_upload_server_test.go @@ -24,6 +24,7 @@ import ( "net/http/httptest" "net/url" "os" + "strings" "testing" "github.com/kubeflow/pipelines/backend/src/apiserver/common" @@ -168,7 +169,7 @@ func TestUploadPipeline(t *testing.T) { Parameters: "[]", Status: model.PipelineVersionReady, PipelineId: DefaultFakeUUID, - PipelineSpec: string(test.spec), + PipelineSpec: model.LargeText(test.spec), }, { UUID: fakeVersionUUID, @@ -179,7 +180,7 @@ func TestUploadPipeline(t *testing.T) { Parameters: "[]", Status: model.PipelineVersionReady, PipelineId: DefaultFakeUUID, - PipelineSpec: string(test.spec), + PipelineSpec: model.LargeText(test.spec), }, } // Expect 2 versions, one is created by default when creating pipeline and the other is what we manually created @@ -195,19 +196,19 @@ func TestUploadPipeline(t *testing.T) { } func TestUploadPipelineV2_NameValidation(t *testing.T) { - v2Template, _ := template.New([]byte(v2SpecHelloWorld), true) + v2Template, _ := template.New([]byte(v2SpecHelloWorld), true, nil) v2spec := string(v2Template.Bytes()) - v2Template, _ = template.New([]byte(v2SpecHelloWorldDash), true) + v2Template, _ = template.New([]byte(v2SpecHelloWorldDash), true, nil) v2specDash := string(v2Template.Bytes()) - v2Template, _ = template.New([]byte(v2SpecHelloWorldCapitalized), true) + v2Template, _ = template.New([]byte(v2SpecHelloWorldCapitalized), true, nil) invalidV2specCapitalized := string(v2Template.Bytes()) - v2Template, _ = template.New([]byte(v2SpecHelloWorldDot), true) + v2Template, _ = template.New([]byte(v2SpecHelloWorldDot), true, nil) invalidV2specDot := string(v2Template.Bytes()) - v2Template, _ = template.New([]byte(v2SpecHelloWorldLong), true) + v2Template, _ = template.New([]byte(v2SpecHelloWorldLong), true, nil) invalidV2specLong := string(v2Template.Bytes()) tt := []struct { @@ -282,6 +283,74 @@ func TestUploadPipelineV2_NameValidation(t *testing.T) { } } +func TestUploadPipeline_NameAndNamespaceTooLong(t *testing.T) { + type testCase struct { + name string + apiVersion string + uploadFunc func(http.ResponseWriter, *http.Request) + query string + wantStatus int + wantErrMsg string + } + + cases := []testCase{ + { + name: "v1 name too long", + apiVersion: "v1beta1", + uploadFunc: nil, // set in t.Run + query: "?name=" + url.PathEscape(strings.Repeat("a", 129)), + wantStatus: http.StatusBadRequest, + wantErrMsg: "Pipeline.Name length cannot exceed 128", + }, + { + name: "v1 namespace too long", + apiVersion: "v1beta1", + uploadFunc: nil, + query: "?namespace=" + url.PathEscape(strings.Repeat("n", 64)), + wantStatus: http.StatusBadRequest, + wantErrMsg: "Pipeline.Namespace length cannot exceed 63", + }, + { + name: "v2 name too long", + apiVersion: "v2beta1", + uploadFunc: nil, + query: "?name=" + url.PathEscape(strings.Repeat("a", 129)), + wantStatus: http.StatusBadRequest, + wantErrMsg: "Pipeline.Name length cannot exceed 128", + }, + { + name: "v2 namespace too long", + apiVersion: "v2beta1", + uploadFunc: nil, + query: "?namespace=" + url.PathEscape(strings.Repeat("n", 64)), + wantStatus: http.StatusBadRequest, + wantErrMsg: "Pipeline.Namespace length cannot exceed 63", + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + _, server := setupClientManagerAndServer() + bytesBuffer, writer := setupWriter("") + setWriterWithBuffer("uploadfile", "hello.yaml", + "apiVersion: argoproj.io/v1alpha1\nkind: Workflow", writer) + uploadFunc := tc.uploadFunc + if uploadFunc == nil { + if tc.apiVersion == "v1beta1" { + uploadFunc = server.UploadPipelineV1 + } else { + uploadFunc = server.UploadPipeline + } + } + endpoint := fmt.Sprintf("/apis/%s/pipelines/upload%s", tc.apiVersion, tc.query) + resp := uploadPipeline(endpoint, + bytes.NewReader(bytesBuffer.Bytes()), writer, uploadFunc) + assert.Equal(t, tc.wantStatus, resp.Code) + assert.Contains(t, resp.Body.String(), tc.wantErrMsg) + }) + } +} + func TestUploadPipeline_Tarball(t *testing.T) { clientManager, server := setupClientManagerAndServer() bytesBuffer, writer := setupWriter("") @@ -506,6 +575,27 @@ func TestUploadPipelineVersion_GetFromFileError(t *testing.T) { assert.Contains(t, response.Body.String(), "error parsing pipeline spec filename") } +func TestUploadPipelineVersion_NameTooLong(t *testing.T) { + _, server := setupClientManagerAndServer() + // a valid workflow body + bytesBuffer, writer := setupWriter("") + setWriterWithBuffer("uploadfile", "hello.yaml", "apiVersion: argoproj.io/v1alpha1\nkind: Workflow", writer) + response := uploadPipeline("/apis/v1beta1/pipelines/upload", + bytes.NewReader(bytesBuffer.Bytes()), writer, server.UploadPipeline) + assert.Equal(t, 200, response.Code) + + // a name too long(>127) + longName := strings.Repeat("a", 128) + endpoint := fmt.Sprintf("/apis/v1beta1/pipelines/upload_version?name=%s&pipelineid=%s", + url.PathEscape(longName), DefaultFakeUUID) + + resp := uploadPipeline(endpoint, + bytes.NewReader(bytesBuffer.Bytes()), writer, server.UploadPipelineVersion) + + assert.Equal(t, http.StatusBadRequest, resp.Code) + assert.Contains(t, resp.Body.String(), "PipelineVersion.Name length cannot exceed") +} + func TestDefaultNotUpdatedPipelineVersion(t *testing.T) { viper.Set(common.UpdatePipelineVersionByDefault, "false") defer viper.Set(common.UpdatePipelineVersionByDefault, "true") diff --git a/backend/src/apiserver/server/report_server.go b/backend/src/apiserver/server/report_server.go index 5667a0191ca..fcda727f280 100644 --- a/backend/src/apiserver/server/report_server.go +++ b/backend/src/apiserver/server/report_server.go @@ -18,10 +18,11 @@ import ( "context" "encoding/json" + "google.golang.org/protobuf/types/known/emptypb" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" authorizationv1 "k8s.io/api/authorization/v1" - "github.com/golang/protobuf/ptypes/empty" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/model" @@ -30,12 +31,25 @@ import ( scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" ) -type ReportServer struct { +// BaseReportServer wraps ReportServer and ReportServerV1 +// to enable method sharing. It can be removed once ReportServerV1 +// is removed. +type BaseReportServer struct { resourceManager *resource.ResourceManager } +type ReportServer struct { + *BaseReportServer + apiv2beta1.UnimplementedReportServiceServer +} + +type ReportServerV1 struct { + *BaseReportServer + apiv1beta1.UnimplementedReportServiceServer +} + // Extracts task details from an execution spec and reports them to storage. -func (s ReportServer) reportTasksFromExecution(execSpec util.ExecutionSpec, runId string) ([]*model.Task, error) { +func (s *BaseReportServer) reportTasksFromExecution(execSpec util.ExecutionSpec, runId string) ([]*model.Task, error) { if !execSpec.ExecutionStatus().HasNodes() { return nil, nil } @@ -47,7 +61,7 @@ func (s ReportServer) reportTasksFromExecution(execSpec util.ExecutionSpec, runI } // Reports a workflow. -func (s *ReportServer) reportWorkflow(ctx context.Context, workflow string) (*empty.Empty, error) { +func (s *BaseReportServer) reportWorkflow(ctx context.Context, workflow string) (*emptypb.Empty, error) { execSpec, err := validateReportWorkflowRequest(workflow) if err != nil { return nil, util.Wrap(err, "Report workflow failed") @@ -73,23 +87,23 @@ func (s *ReportServer) reportWorkflow(ctx context.Context, workflow string) (*em if err != nil { return nil, util.Wrap(err, "Failed to report task details") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *ReportServer) ReportWorkflowV1(ctx context.Context, +func (s *ReportServerV1) ReportWorkflowV1(ctx context.Context, request *apiv1beta1.ReportWorkflowRequest, -) (*empty.Empty, error) { +) (*emptypb.Empty, error) { return s.reportWorkflow(ctx, request.GetWorkflow()) } func (s *ReportServer) ReportWorkflow(ctx context.Context, request *apiv2beta1.ReportWorkflowRequest, -) (*empty.Empty, error) { +) (*emptypb.Empty, error) { return s.reportWorkflow(ctx, request.GetWorkflow()) } // Reports a scheduled workflow. -func (s *ReportServer) reportScheduledWorkflow(ctx context.Context, swf string) (*empty.Empty, error) { +func (s *BaseReportServer) reportScheduledWorkflow(ctx context.Context, swf string) (*emptypb.Empty, error) { scheduledWorkflow, err := validateReportScheduledWorkflowRequest(swf) if err != nil { return nil, util.Wrap(err, "Report scheduled workflow failed") @@ -107,18 +121,18 @@ func (s *ReportServer) reportScheduledWorkflow(ctx context.Context, swf string) if err != nil { return nil, err } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } -func (s *ReportServer) ReportScheduledWorkflowV1(ctx context.Context, +func (s *ReportServerV1) ReportScheduledWorkflowV1(ctx context.Context, request *apiv1beta1.ReportScheduledWorkflowRequest, -) (*empty.Empty, error) { +) (*emptypb.Empty, error) { return s.reportScheduledWorkflow(ctx, request.GetScheduledWorkflow()) } func (s *ReportServer) ReportScheduledWorkflow(ctx context.Context, request *apiv2beta1.ReportScheduledWorkflowRequest, -) (*empty.Empty, error) { +) (*emptypb.Empty, error) { return s.reportScheduledWorkflow(ctx, request.GetScheduledWorkflow()) } @@ -159,7 +173,7 @@ func validateReportScheduledWorkflowRequest(swfManifest string) (*util.Scheduled return swf, nil } -func (s *ReportServer) canAccessWorkflow(ctx context.Context, executionName string, resourceAttributes *authorizationv1.ResourceAttributes) error { +func (s *BaseReportServer) canAccessWorkflow(ctx context.Context, executionName string, resourceAttributes *authorizationv1.ResourceAttributes) error { resourceAttributes.Group = common.RbacPipelinesGroup resourceAttributes.Version = common.RbacPipelinesVersion err := s.resourceManager.IsAuthorized(ctx, resourceAttributes) @@ -170,5 +184,17 @@ func (s *ReportServer) canAccessWorkflow(ctx context.Context, executionName stri } func NewReportServer(resourceManager *resource.ResourceManager) *ReportServer { - return &ReportServer{resourceManager: resourceManager} + return &ReportServer{ + BaseReportServer: &BaseReportServer{ + resourceManager: resourceManager, + }, + } +} + +func NewReportServerV1(resourceManager *resource.ResourceManager) *ReportServerV1 { + return &ReportServerV1{ + BaseReportServer: &BaseReportServer{ + resourceManager: resourceManager, + }, + } } diff --git a/backend/src/apiserver/server/report_server_test.go b/backend/src/apiserver/server/report_server_test.go index ebe0464b7e9..9d2b5c6f90e 100644 --- a/backend/src/apiserver/server/report_server_test.go +++ b/backend/src/apiserver/server/report_server_test.go @@ -33,7 +33,11 @@ import ( func TestReportWorkflowV1(t *testing.T) { clientManager, resourceManager, run := initWithOneTimeRun(t) defer clientManager.Close() - reportServer := NewReportServer(resourceManager) + reportServer := &ReportServerV1{ + BaseReportServer: &BaseReportServer{ + resourceManager: resourceManager, + }, + } workflow := util.NewWorkflow(&v1alpha1.Workflow{ TypeMeta: metav1.TypeMeta{ @@ -140,7 +144,7 @@ func TestReportWorkflow(t *testing.T) { func TestReportWorkflow_ValidationFailed(t *testing.T) { clientManager, resourceManager, run := initWithOneTimeRun(t) defer clientManager.Close() - reportServer := NewReportServer(resourceManager) + reportServer := NewReportServerV1(resourceManager) workflow := util.NewWorkflow(&v1alpha1.Workflow{ TypeMeta: metav1.TypeMeta{ diff --git a/backend/src/apiserver/server/run_server.go b/backend/src/apiserver/server/run_server.go index a850fa9bdc9..131d08217b0 100644 --- a/backend/src/apiserver/server/run_server.go +++ b/backend/src/apiserver/server/run_server.go @@ -17,8 +17,9 @@ package server import ( "context" + "google.golang.org/protobuf/types/known/emptypb" + "github.com/golang/glog" - "github.com/golang/protobuf/ptypes/empty" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" @@ -95,30 +96,57 @@ type RunServerOptions struct { CollectMetrics bool `json:"collect_metrics,omitempty"` } -type RunServer struct { +// BaseRunServer wraps RunServer and RunServerV1 +// to enable method sharing. It can be removed once RunServerV1 +// is removed. +type BaseRunServer struct { resourceManager *resource.ResourceManager options *RunServerOptions } +type RunServer struct { + *BaseRunServer + apiv2beta1.UnimplementedRunServiceServer +} + +type RunServerV1 struct { + *BaseRunServer + apiv1beta1.UnimplementedRunServiceServer +} + func NewRunServer(resourceManager *resource.ResourceManager, options *RunServerOptions) *RunServer { - return &RunServer{resourceManager: resourceManager, options: options} + return &RunServer{ + BaseRunServer: &BaseRunServer{ + resourceManager: resourceManager, + options: options, + }, + } +} + +func NewRunServerV1(resourceManager *resource.ResourceManager, options *RunServerOptions) *RunServerV1 { + return &RunServerV1{ + BaseRunServer: &BaseRunServer{ + resourceManager: resourceManager, + options: options, + }, + } } // Creates a run. // Applies common logic on v1beta1 and v2beta1 API. -func (s *RunServer) createRun(ctx context.Context, run *model.Run) (*model.Run, error) { +func (s *BaseRunServer) createRun(ctx context.Context, run *model.Run) (*model.Run, error) { // Validate user inputs if run.DisplayName == "" { return nil, util.Wrapf(util.NewInvalidInputError("The run name is empty. Please specify a valid name"), "Failed to create a run due to invalid name") } - experimentId, namespace, err := s.resourceManager.GetValidExperimentNamespacePair(run.ExperimentId, run.Namespace) + experimentID, namespace, err := s.resourceManager.GetValidExperimentNamespacePair(run.ExperimentId, run.Namespace) if err != nil { - return nil, util.Wrapf(err, "Failed to create a run due to invalid experimentId and namespace combination") + return nil, util.Wrapf(err, "Failed to create a run due to invalid experimentID and namespace combination") } if common.IsMultiUserMode() && namespace == "" { return nil, util.NewInvalidInputError("A run cannot have an empty namespace in multi-user mode") } - run.ExperimentId = experimentId + run.ExperimentId = experimentID run.Namespace = namespace // Check authorization resourceAttributes := &authorizationv1.ResourceAttributes{ @@ -133,7 +161,7 @@ func (s *RunServer) createRun(ctx context.Context, run *model.Run) (*model.Run, // Creates a run. // Supports v1beta1 behavior. -func (s *RunServer) CreateRunV1(ctx context.Context, request *apiv1beta1.CreateRunRequest) (*apiv1beta1.RunDetail, error) { +func (s *RunServerV1) CreateRunV1(ctx context.Context, request *apiv1beta1.CreateRunRequest) (*apiv1beta1.RunDetail, error) { if s.options.CollectMetrics { createRunRequests.Inc() } @@ -156,12 +184,12 @@ func (s *RunServer) CreateRunV1(ctx context.Context, request *apiv1beta1.CreateR // Fetches a run. // Applies common logic on v1beta1 and v2beta1 API. -func (s *RunServer) getRun(ctx context.Context, runId string) (*model.Run, error) { - err := s.canAccessRun(ctx, runId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbGet}) +func (s *BaseRunServer) getRun(ctx context.Context, runID string) (*model.Run, error) { + err := s.canAccessRun(ctx, runID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbGet}) if err != nil { return nil, util.Wrap(err, "Failed to authorize the request") } - run, err := s.resourceManager.GetRun(runId) + run, err := s.resourceManager.GetRun(runID) if err != nil { return nil, err } @@ -170,7 +198,7 @@ func (s *RunServer) getRun(ctx context.Context, runId string) (*model.Run, error // Fetches a run. // Supports v1beta1 behavior. -func (s *RunServer) GetRunV1(ctx context.Context, request *apiv1beta1.GetRunRequest) (*apiv1beta1.RunDetail, error) { +func (s *RunServerV1) GetRunV1(ctx context.Context, request *apiv1beta1.GetRunRequest) (*apiv1beta1.RunDetail, error) { if s.options.CollectMetrics { getRunRequests.Inc() } @@ -185,12 +213,12 @@ func (s *RunServer) GetRunV1(ctx context.Context, request *apiv1beta1.GetRunRequ // Fetches all runs that conform to the specified filter and listing options. // Applies common logic on v1beta1 and v2beta1 API. -func (s *RunServer) listRuns(ctx context.Context, pageToken string, pageSize int, sortBy string, opts *list.Options, namespace string, experimentId string) ([]*model.Run, int, string, error) { +func (s *BaseRunServer) listRuns(ctx context.Context, pageToken string, pageSize int, sortBy string, opts *list.Options, namespace string, experimentID string) ([]*model.Run, int, string, error) { namespace = s.resourceManager.ReplaceNamespace(namespace) - if experimentId != "" { - ns, err := s.resourceManager.GetNamespaceFromExperimentId(experimentId) + if experimentID != "" { + ns, err := s.resourceManager.GetNamespaceFromExperimentId(experimentID) if err != nil { - return nil, 0, "", util.Wrapf(err, "Failed to list runs due to error fetching namespace for experiment %s. Try filtering based on namespace", experimentId) + return nil, 0, "", util.Wrapf(err, "Failed to list runs due to error fetching namespace for experiment %s. Try filtering based on namespace", experimentID) } namespace = ns } @@ -206,12 +234,12 @@ func (s *RunServer) listRuns(ctx context.Context, pageToken string, pageSize int filterContext := &model.FilterContext{ ReferenceKey: &model.ReferenceKey{Type: model.NamespaceResourceType, ID: namespace}, } - if experimentId != "" { - if err := s.resourceManager.CheckExperimentBelongsToNamespace(experimentId, namespace); err != nil { + if experimentID != "" { + if err := s.resourceManager.CheckExperimentBelongsToNamespace(experimentID, namespace); err != nil { return nil, 0, "", util.Wrap(err, "Failed to list runs due to namespace mismatch") } filterContext = &model.FilterContext{ - ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: experimentId}, + ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: experimentID}, } } runs, totalSize, token, err := s.resourceManager.ListRuns(filterContext, opts) @@ -223,7 +251,7 @@ func (s *RunServer) listRuns(ctx context.Context, pageToken string, pageSize int // Fetches runs given query parameters. // Supports v1beta1 behavior. -func (s *RunServer) ListRunsV1(ctx context.Context, r *apiv1beta1.ListRunsRequest) (*apiv1beta1.ListRunsResponse, error) { +func (s *RunServerV1) ListRunsV1(ctx context.Context, r *apiv1beta1.ListRunsRequest) (*apiv1beta1.ListRunsResponse, error) { if s.options.CollectMetrics { listRunRequests.Inc() } @@ -233,14 +261,14 @@ func (s *RunServer) ListRunsV1(ctx context.Context, r *apiv1beta1.ListRunsReques return nil, util.Wrap(err, "Failed to list v1beta1 runs: validating filter failed") } namespace := "" - experimentId := "" + experimentID := "" if filterContext.ReferenceKey != nil { switch filterContext.ReferenceKey.Type { case model.NamespaceResourceType: namespace = filterContext.ReferenceKey.ID case model.ExperimentResourceType: - experimentId = filterContext.ReferenceKey.ID + experimentID = filterContext.ReferenceKey.ID } } @@ -249,7 +277,7 @@ func (s *RunServer) ListRunsV1(ctx context.Context, r *apiv1beta1.ListRunsReques return nil, util.Wrap(err, "Failed to create list options") } - runs, runsCount, nextPageToken, err := s.listRuns(ctx, r.GetPageToken(), int(r.GetPageSize()), r.GetSortBy(), opts, namespace, experimentId) + runs, runsCount, nextPageToken, err := s.listRuns(ctx, r.GetPageToken(), int(r.GetPageSize()), r.GetSortBy(), opts, namespace, experimentID) if err != nil { return nil, util.Wrap(err, "Failed to list v1beta1 runs") } @@ -266,17 +294,17 @@ func (s *RunServer) ListRunsV1(ctx context.Context, r *apiv1beta1.ListRunsReques // Archives a run. // Applies common logic on v1beta1 and v2beta1 API. -func (s *RunServer) archiveRun(ctx context.Context, runId string) error { - err := s.canAccessRun(ctx, runId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbArchive}) +func (s *BaseRunServer) archiveRun(ctx context.Context, runID string) error { + err := s.canAccessRun(ctx, runID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbArchive}) if err != nil { return util.Wrap(err, "Failed to authorize the request") } - return s.resourceManager.ArchiveRun(runId) + return s.resourceManager.ArchiveRun(runID) } // Archives a run. // Supports v1beta1 behavior. -func (s *RunServer) ArchiveRunV1(ctx context.Context, request *apiv1beta1.ArchiveRunRequest) (*empty.Empty, error) { +func (s *RunServerV1) ArchiveRunV1(ctx context.Context, request *apiv1beta1.ArchiveRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { archiveRunRequests.Inc() } @@ -284,22 +312,22 @@ func (s *RunServer) ArchiveRunV1(ctx context.Context, request *apiv1beta1.Archiv if err != nil { return nil, util.Wrap(err, "Failed to archive a v1beta1 run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Un-archives a run. // Applies common logic on v1beta1 and v2beta1 API. -func (s *RunServer) unarchiveRun(ctx context.Context, runId string) error { - err := s.canAccessRun(ctx, runId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbUnarchive}) +func (s *BaseRunServer) unarchiveRun(ctx context.Context, runID string) error { + err := s.canAccessRun(ctx, runID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbUnarchive}) if err != nil { return util.Wrap(err, "Failed to authorize the request") } - return s.resourceManager.UnarchiveRun(runId) + return s.resourceManager.UnarchiveRun(runID) } // Un-archives a run. // Supports v1beta1 behavior. -func (s *RunServer) UnarchiveRunV1(ctx context.Context, request *apiv1beta1.UnarchiveRunRequest) (*empty.Empty, error) { +func (s *RunServerV1) UnarchiveRunV1(ctx context.Context, request *apiv1beta1.UnarchiveRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { unarchiveRunRequests.Inc() } @@ -307,22 +335,22 @@ func (s *RunServer) UnarchiveRunV1(ctx context.Context, request *apiv1beta1.Unar if err != nil { return nil, util.Wrap(err, "Failed to unarchive a v1beta1 run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Deletes a run. // Applies common logic on v1beta1 and v2beta1 API. -func (s *RunServer) deleteRun(ctx context.Context, runId string) error { - err := s.canAccessRun(ctx, runId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbDelete}) +func (s *BaseRunServer) deleteRun(ctx context.Context, runID string) error { + err := s.canAccessRun(ctx, runID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbDelete}) if err != nil { return util.Wrap(err, "Failed to authorize the request") } - return s.resourceManager.DeleteRun(ctx, runId) + return s.resourceManager.DeleteRun(ctx, runID) } // Deletes a run. // Supports v1beta1 behavior. -func (s *RunServer) DeleteRunV1(ctx context.Context, request *apiv1beta1.DeleteRunRequest) (*empty.Empty, error) { +func (s *RunServerV1) DeleteRunV1(ctx context.Context, request *apiv1beta1.DeleteRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { deleteRunRequests.Inc() } @@ -334,20 +362,20 @@ func (s *RunServer) DeleteRunV1(ctx context.Context, request *apiv1beta1.DeleteR runCount.Dec() } } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Reports run metrics. // Applies common logic on v1beta1 and v2beta1 API. -func (s *RunServer) reportRunMetrics(ctx context.Context, metrics []*model.RunMetric, runId string) ([]map[string]string, error) { - err := s.canAccessRun(ctx, runId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbReportMetrics}) +func (s *BaseRunServer) reportRunMetrics(ctx context.Context, metrics []*model.RunMetric, runID string) ([]map[string]string, error) { + err := s.canAccessRun(ctx, runID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbReportMetrics}) if err != nil { return nil, util.Wrap(err, "Failed to authorize the request") } // Verify that the run exists for single user mode. // Multi-user model will verify this when checking authorization above. if !common.IsMultiUserMode() { - if _, err := s.resourceManager.GetRun(runId); err != nil { + if _, err := s.resourceManager.GetRun(runID); err != nil { return nil, util.Wrap(err, "Failed to fetch the requested run") } } @@ -390,34 +418,51 @@ func (s *RunServer) reportRunMetrics(ctx context.Context, metrics []*model.RunMe // Reports run metrics. // Supports v1beta1 API. -func (s *RunServer) ReportRunMetricsV1(ctx context.Context, request *apiv1beta1.ReportRunMetricsRequest) (*apiv1beta1.ReportRunMetricsResponse, error) { +func (s *RunServerV1) ReportRunMetricsV1(ctx context.Context, request *apiv1beta1.ReportRunMetricsRequest) (*apiv1beta1.ReportRunMetricsResponse, error) { if s.options.CollectMetrics { reportRunMetricsRequests.Inc() } - metrics := make([]*model.RunMetric, 0) - for _, metric := range request.GetMetrics() { - modelMetric, err := toModelRunMetric(metric, request.GetRunId()) + + if _, err := s.resourceManager.GetRun(request.GetRunId()); err != nil { + // Use the standard ResourceNotFoundError so that AssertUserError + // sees codes.NotFound and the right error message. + return nil, util.NewResourceNotFoundError( + "Run %s not found", request.GetRunId(), + ) + } + + // Convert, validate, and report each metric in input order. + var apiResults []*apiv1beta1.ReportRunMetricsResponse_ReportRunMetricResult + for _, m := range request.GetMetrics() { + modelMetric, err := toModelRunMetric(m, request.GetRunId()) if err != nil { - return nil, util.Wrap(err, "Failed to create v1beta1 run metrics due to data conversion error") + // Conversion error: record as INVALID_ARGUMENT + msg := err.Error() + if userErr, ok := err.(*util.UserError); ok { + msg = userErr.ExternalMessage() + } + apiResults = append(apiResults, toApiReportMetricsResultV1( + m.Name, m.NodeId, "invalid", msg, + )) + continue } - metrics = append(metrics, modelMetric) - } - results, err := s.reportRunMetrics(ctx, metrics, request.GetRunId()) - if err != nil { - return nil, util.Wrap(err, "Failed to report v1beta1 run metrics") - } - apiResults := make([]*apiv1beta1.ReportRunMetricsResponse_ReportRunMetricResult, 0) - for _, result := range results { - apiResults = append(apiResults, toApiReportMetricsResultV1(result["Name"], result["NodeId"], result["ErrorCode"], result["ErrorMessage"])) + // Report this metric + results, err := s.reportRunMetrics(ctx, []*model.RunMetric{modelMetric}, request.GetRunId()) + if err != nil { + return nil, util.Wrap(err, "Failed to report v1beta1 run metrics") + } + // results slice will have exactly one entry + r := results[0] + apiResults = append(apiResults, toApiReportMetricsResultV1( + r["Name"], r["NodeId"], r["ErrorCode"], r["ErrorMessage"], + )) } - return &apiv1beta1.ReportRunMetricsResponse{ - Results: apiResults, - }, nil + return &apiv1beta1.ReportRunMetricsResponse{Results: apiResults}, nil } // Reads an artifact. // Supports v1beta1 behavior. -func (s *RunServer) ReadArtifactV1(ctx context.Context, request *apiv1beta1.ReadArtifactRequest) (*apiv1beta1.ReadArtifactResponse, error) { +func (s *RunServerV1) ReadArtifactV1(ctx context.Context, request *apiv1beta1.ReadArtifactRequest) (*apiv1beta1.ReadArtifactResponse, error) { if s.options.CollectMetrics { readArtifactRequests.Inc() } @@ -439,27 +484,27 @@ func (s *RunServer) ReadArtifactV1(ctx context.Context, request *apiv1beta1.Read // Terminates a run. // Applies common logic on v1beta1 and v2beta1 API. -func (s *RunServer) terminateRun(ctx context.Context, runId string) error { - err := s.canAccessRun(ctx, runId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbTerminate}) +func (s *BaseRunServer) terminateRun(ctx context.Context, runID string) error { + err := s.canAccessRun(ctx, runID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbTerminate}) if err != nil { return util.Wrap(err, "Failed to authorize the request") } - return s.resourceManager.TerminateRun(ctx, runId) + return s.resourceManager.TerminateRun(ctx, runID) } // Retries a run. // Applies common logic on v1beta1 and v2beta1 API. -func (s *RunServer) retryRun(ctx context.Context, runId string) error { - err := s.canAccessRun(ctx, runId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbRetry}) +func (s *BaseRunServer) retryRun(ctx context.Context, runID string) error { + err := s.canAccessRun(ctx, runID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbRetry}) if err != nil { return util.Wrap(err, "Failed to authorize the request") } - return s.resourceManager.RetryRun(ctx, runId) + return s.resourceManager.RetryRun(ctx, runID) } // Terminates a run. // Supports v1beta1 behavior. -func (s *RunServer) TerminateRunV1(ctx context.Context, request *apiv1beta1.TerminateRunRequest) (*empty.Empty, error) { +func (s *RunServerV1) TerminateRunV1(ctx context.Context, request *apiv1beta1.TerminateRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { terminateRunRequests.Inc() } @@ -467,12 +512,12 @@ func (s *RunServer) TerminateRunV1(ctx context.Context, request *apiv1beta1.Term if err != nil { return nil, util.Wrap(err, "Failed to terminate a v1beta1 run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Retries a run. // Supports v1beta1 behavior. -func (s *RunServer) RetryRunV1(ctx context.Context, request *apiv1beta1.RetryRunRequest) (*empty.Empty, error) { +func (s *RunServerV1) RetryRunV1(ctx context.Context, request *apiv1beta1.RetryRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { retryRunRequests.Inc() } @@ -482,7 +527,7 @@ func (s *RunServer) RetryRunV1(ctx context.Context, request *apiv1beta1.RetryRun return nil, util.Wrap(err, "Failed to retry a run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Creates a run. @@ -542,7 +587,7 @@ func (s *RunServer) ListRuns(ctx context.Context, r *apiv2beta1.ListRunsRequest) // Archives a run. // Supports v2beta1 behavior. -func (s *RunServer) ArchiveRun(ctx context.Context, request *apiv2beta1.ArchiveRunRequest) (*empty.Empty, error) { +func (s *RunServer) ArchiveRun(ctx context.Context, request *apiv2beta1.ArchiveRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { archiveRunRequests.Inc() } @@ -550,12 +595,12 @@ func (s *RunServer) ArchiveRun(ctx context.Context, request *apiv2beta1.ArchiveR if err != nil { return nil, util.Wrap(err, "Failed to archive a run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Un-archives a run. // Supports v2beta1 behavior. -func (s *RunServer) UnarchiveRun(ctx context.Context, request *apiv2beta1.UnarchiveRunRequest) (*empty.Empty, error) { +func (s *RunServer) UnarchiveRun(ctx context.Context, request *apiv2beta1.UnarchiveRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { unarchiveRunRequests.Inc() } @@ -563,12 +608,12 @@ func (s *RunServer) UnarchiveRun(ctx context.Context, request *apiv2beta1.Unarch if err != nil { return nil, util.Wrap(err, "Failed to unarchive a run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Deletes a run. // Supports v2beta1 behavior. -func (s *RunServer) DeleteRun(ctx context.Context, request *apiv2beta1.DeleteRunRequest) (*empty.Empty, error) { +func (s *RunServer) DeleteRun(ctx context.Context, request *apiv2beta1.DeleteRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { deleteRunRequests.Inc() } @@ -578,7 +623,7 @@ func (s *RunServer) DeleteRun(ctx context.Context, request *apiv2beta1.DeleteRun if s.options.CollectMetrics { runCount.Dec() } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Reads an artifact. @@ -605,7 +650,7 @@ func (s *RunServer) ReadArtifact(ctx context.Context, request *apiv2beta1.ReadAr // Terminates a run. // Supports v2beta1 behavior. -func (s *RunServer) TerminateRun(ctx context.Context, request *apiv2beta1.TerminateRunRequest) (*empty.Empty, error) { +func (s *RunServer) TerminateRun(ctx context.Context, request *apiv2beta1.TerminateRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { terminateRunRequests.Inc() } @@ -613,12 +658,12 @@ func (s *RunServer) TerminateRun(ctx context.Context, request *apiv2beta1.Termin if err != nil { return nil, util.Wrap(err, "Failed to terminate a run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Retries a run. // Supports v2beta1 behavior. -func (s *RunServer) RetryRun(ctx context.Context, request *apiv2beta1.RetryRunRequest) (*empty.Empty, error) { +func (s *RunServer) RetryRun(ctx context.Context, request *apiv2beta1.RetryRunRequest) (*emptypb.Empty, error) { if s.options.CollectMetrics { retryRunRequests.Inc() } @@ -628,26 +673,26 @@ func (s *RunServer) RetryRun(ctx context.Context, request *apiv2beta1.RetryRunRe return nil, util.Wrap(err, "Failed to retry a run") } - return &empty.Empty{}, nil + return &emptypb.Empty{}, nil } // Checks if a user can access a run. // Adds namespace of the parent experiment of a run id, // API group, version, and resource type. -func (s *RunServer) canAccessRun(ctx context.Context, runId string, resourceAttributes *authorizationv1.ResourceAttributes) error { +func (s *BaseRunServer) canAccessRun(ctx context.Context, runID string, resourceAttributes *authorizationv1.ResourceAttributes) error { if !common.IsMultiUserMode() { // Skip authz if not multi-user mode. return nil } - if runId != "" { - run, err := s.resourceManager.GetRun(runId) + if runID != "" { + run, err := s.resourceManager.GetRun(runID) if err != nil { - return util.Wrapf(err, "Failed to authorize with the run ID %v", runId) + return util.Wrapf(err, "Failed to authorize with the run ID %v", runID) } if s.resourceManager.IsEmptyNamespace(run.Namespace) { experiment, err := s.resourceManager.GetExperiment(run.ExperimentId) if err != nil { - return util.NewInvalidInputError("run %v has an empty namespace and the parent experiment %v could not be fetched: %s", runId, run.ExperimentId, err.Error()) + return util.NewInvalidInputError("run %v has an empty namespace and the parent experiment %v could not be fetched: %s", runID, run.ExperimentId, err.Error()) } resourceAttributes.Namespace = experiment.Namespace } else { @@ -666,7 +711,7 @@ func (s *RunServer) canAccessRun(ctx context.Context, runId string, resourceAttr resourceAttributes.Resource = common.RbacResourceTypeRuns err := s.resourceManager.IsAuthorized(ctx, resourceAttributes) if err != nil { - return util.Wrapf(err, "Failed to access run %s. Check if you have access to namespace %s", runId, resourceAttributes.Namespace) + return util.Wrapf(err, "Failed to access run %s. Check if you have access to namespace %s", runID, resourceAttributes.Namespace) } return nil } diff --git a/backend/src/apiserver/server/run_server_test.go b/backend/src/apiserver/server/run_server_test.go index 4c6addfb460..3301abaa2d4 100644 --- a/backend/src/apiserver/server/run_server_test.go +++ b/backend/src/apiserver/server/run_server_test.go @@ -20,8 +20,9 @@ import ( "testing" "time" + "google.golang.org/protobuf/types/known/timestamppb" + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - "github.com/golang/protobuf/ptypes/timestamp" "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" @@ -43,6 +44,22 @@ import ( "sigs.k8s.io/yaml" ) +func createRunServerV1(resourceManager *resource.ResourceManager) *RunServerV1 { + return &RunServerV1{ + BaseRunServer: &BaseRunServer{ + resourceManager: resourceManager, options: &RunServerOptions{CollectMetrics: false}, + }, + } +} + +func createRunServer(resourceManager *resource.ResourceManager) *RunServer { + return &RunServer{ + BaseRunServer: &BaseRunServer{ + resourceManager: resourceManager, options: &RunServerOptions{CollectMetrics: false}, + }, + } +} + var metricV1 = &apiv1beta1.RunMetric{ Name: "metric-1", NodeId: "node-1", @@ -55,7 +72,7 @@ var metricV1 = &apiv1beta1.RunMetric{ func TestCreateRunV1_empty_name(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ ResourceReferences: validReference, PipelineSpec: &apiv1beta1.PipelineSpec{ @@ -72,7 +89,7 @@ func TestCreateRunV1_empty_name(t *testing.T) { func TestCreateRunV1_invalid_pipeline_version(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReference, @@ -89,7 +106,7 @@ func TestCreateRunV1_invalid_pipeline_version(t *testing.T) { func TestCreateRunV1_no_experiment(t *testing.T) { clients, manager, exp := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", PipelineSpec: &apiv1beta1.PipelineSpec{ @@ -115,7 +132,7 @@ func TestCreateRunV1_no_experiment(t *testing.T) { func TestCreateRunV1_no_pipeline_source(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReference, @@ -129,7 +146,7 @@ func TestCreateRunV1_no_pipeline_source(t *testing.T) { func TestCreateRunV1_invalid_spec(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReference, @@ -147,7 +164,7 @@ func TestCreateRunV1_invalid_spec(t *testing.T) { func TestCreateRunV1_too_many_params(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) var params []*apiv1beta1.Parameter // Create a long enough parameter string so it exceed the length limit of parameter. for i := 0; i < 10000; i++ { @@ -170,7 +187,7 @@ func TestCreateRunV1_too_many_params(t *testing.T) { func TestCreateRunV1_pipeline(t *testing.T) { clients, manager, exp, _ := initWithExperimentAndPipelineVersion(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: []*apiv1beta1.ResourceReference{ @@ -200,7 +217,7 @@ func TestCreateRunV1_pipeline(t *testing.T) { func TestCreateRunV1_pipelineversion(t *testing.T) { clients, manager, exp, _ := initWithExperimentAndPipelineVersion(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReferencesOfExperimentAndPipelineVersion, @@ -215,7 +232,7 @@ func TestCreateRunV1_pipelineversion(t *testing.T) { func TestCreateRunV1_Manifest_and_pipeline_version(t *testing.T) { clients, manager, exp, _ := initWithExperimentAndPipelineVersion(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReferencesOfExperimentAndPipelineVersion, @@ -235,7 +252,7 @@ func TestCreateRunV1_Manifest_and_pipeline_version(t *testing.T) { func TestCreateRunV1_V1Params(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReference, @@ -269,9 +286,9 @@ func TestCreateRunV1_V1Params(t *testing.T) { Name: "run1", ServiceAccount: "pipeline-runner", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 4}, - ScheduledAt: ×tamp.Timestamp{Seconds: 4}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(4, 0)), + ScheduledAt: timestamppb.New(time.Unix(4, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), Status: "Pending", PipelineSpec: &apiv1beta1.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), @@ -312,7 +329,7 @@ func TestCreateRunV1_V1Params(t *testing.T) { func TestCreateRunV1_RuntimeParams_V2Spec(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) listParams := []interface{}{1, 2, 3} v2RuntimeListParams, _ := structpb.NewList(listParams) @@ -348,9 +365,9 @@ func TestCreateRunV1_RuntimeParams_V2Spec(t *testing.T) { Name: "run1", ServiceAccount: "pipeline-runner", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{Seconds: 2}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + ScheduledAt: timestamppb.New(time.Unix(2, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), Status: "Pending", PipelineSpec: &apiv1beta1.PipelineSpec{ PipelineManifest: v2SpecHelloWorldParams, @@ -395,7 +412,7 @@ func TestCreateRunV1_RuntimeParams_V2Spec(t *testing.T) { func TestCreateRunV1Patch(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReference, @@ -416,9 +433,9 @@ func TestCreateRunV1Patch(t *testing.T) { ServiceAccount: "pipeline-runner", Status: "Pending", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{Seconds: 2}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + ScheduledAt: timestamppb.New(time.Unix(2, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), PipelineSpec: &apiv1beta1.PipelineSpec{ WorkflowManifest: testWorkflowPatch.ToStringForStore(), Parameters: []*apiv1beta1.Parameter{ @@ -472,7 +489,7 @@ func TestCreateRunV1_Unauthorized(t *testing.T) { clients, manager, _ := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReference, @@ -501,7 +518,7 @@ func TestCreateRunV1_Multiuser(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReference, @@ -536,9 +553,9 @@ func TestCreateRunV1_Multiuser(t *testing.T) { Status: "Pending", ServiceAccount: "default-editor", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 4}, - ScheduledAt: ×tamp.Timestamp{Seconds: 4}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(4, 0)), + ScheduledAt: timestamppb.New(time.Unix(4, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), PipelineSpec: &apiv1beta1.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), Parameters: []*apiv1beta1.Parameter{{Name: "param1", Value: "world"}}, @@ -630,9 +647,9 @@ func TestRunServer_CreateRun_SingleUser(t *testing.T) { DisplayName: "run1", ServiceAccount: "pipeline-runner", StorageState: apiv2beta1.Run_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{Seconds: 2}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + ScheduledAt: timestamppb.New(time.Unix(2, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), PipelineSource: &apiv2beta1.Run_PipelineSpec{ PipelineSpec: nil, }, @@ -643,7 +660,7 @@ func TestRunServer_CreateRun_SingleUser(t *testing.T) { State: apiv2beta1.RuntimeState_PENDING, StateHistory: []*apiv2beta1.RuntimeStatus{ { - UpdateTime: ×tamp.Timestamp{Seconds: 3}, + UpdateTime: timestamppb.New(time.Unix(3, 0)), State: apiv2beta1.RuntimeState_PENDING, }, }, @@ -671,9 +688,9 @@ func TestRunServer_CreateRun_SingleUser(t *testing.T) { DisplayName: "run1", ServiceAccount: "pipeline-runner", StorageState: apiv2beta1.Run_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{Seconds: 2}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + ScheduledAt: timestamppb.New(time.Unix(2, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), PipelineSource: &apiv2beta1.Run_PipelineSpec{ PipelineSpec: nil, }, @@ -684,7 +701,7 @@ func TestRunServer_CreateRun_SingleUser(t *testing.T) { State: apiv2beta1.RuntimeState_PENDING, StateHistory: []*apiv2beta1.RuntimeStatus{ { - UpdateTime: ×tamp.Timestamp{Seconds: 3}, + UpdateTime: timestamppb.New(time.Unix(3, 0)), State: apiv2beta1.RuntimeState_PENDING, }, }, @@ -734,7 +751,7 @@ func TestRunServer_CreateRun_SingleUser(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { clients, manager, _ := initWithExperiment(t) - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServer(manager) server.resourceManager.SetDefaultExperimentId(DefaultFakeUUID) got, err := server.CreateRun(context.Background(), tt.args) if tt.wantErr { @@ -756,7 +773,7 @@ func TestRunServer_CreateRun_SingleUser(t *testing.T) { func TestGetRunV1(t *testing.T) { clients, manager, _, _ := initWithExperimentAndPipelineVersion(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: []*apiv1beta1.ResourceReference{ @@ -792,7 +809,7 @@ func TestGetRunV1(t *testing.T) { func TestGetRun(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServer(manager) listParams := []interface{}{1, 2, 3} v2RuntimeListParams, _ := structpb.NewList(listParams) @@ -831,9 +848,9 @@ func TestGetRun(t *testing.T) { DisplayName: "run1", ServiceAccount: "pipeline-runner", StorageState: apiv2beta1.Run_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{Seconds: 2}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + ScheduledAt: timestamppb.New(time.Unix(2, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), PipelineSource: &apiv2beta1.Run_PipelineSpec{ PipelineSpec: returnedRun.GetPipelineSpec(), }, @@ -844,7 +861,7 @@ func TestGetRun(t *testing.T) { State: apiv2beta1.RuntimeState_PENDING, StateHistory: []*apiv2beta1.RuntimeStatus{ { - UpdateTime: ×tamp.Timestamp{Seconds: 3}, + UpdateTime: timestamppb.New(time.Unix(3, 0)), State: apiv2beta1.RuntimeState_PENDING, }, }, @@ -858,7 +875,7 @@ func TestGetRun(t *testing.T) { func TestListRunsV1(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReference, @@ -875,9 +892,9 @@ func TestListRunsV1(t *testing.T) { Name: "run1", ServiceAccount: "pipeline-runner", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{Seconds: 2}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + ScheduledAt: timestamppb.New(time.Unix(2, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), Status: "Pending", PipelineSpec: &apiv1beta1.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), @@ -933,7 +950,7 @@ func TestListRunsV1_MultiUser(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReference, @@ -950,9 +967,9 @@ func TestListRunsV1_MultiUser(t *testing.T) { Name: "run1", ServiceAccount: "pipeline-runner", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{Seconds: 2}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + ScheduledAt: timestamppb.New(time.Unix(2, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), Status: "Pending", PipelineSpec: &apiv1beta1.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), @@ -996,7 +1013,7 @@ func TestListRunsV1_Unauthorized(t *testing.T) { clients, manager, _ := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) _, err := server.ListRunsV1(ctx, &apiv1beta1.ListRunsRequest{ ResourceReferenceKey: &apiv1beta1.ResourceKey{ Type: apiv1beta1.ResourceType_NAMESPACE, @@ -1020,7 +1037,7 @@ func TestListRunsV1_Multiuser(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServerV1(manager) run := &apiv1beta1.Run{ Name: "run1", ResourceReferences: validReference, @@ -1037,9 +1054,9 @@ func TestListRunsV1_Multiuser(t *testing.T) { Name: "run1", ServiceAccount: "pipeline-runner", StorageState: apiv1beta1.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{Seconds: 2}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + ScheduledAt: timestamppb.New(time.Unix(2, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), Status: "Pending", PipelineSpec: &apiv1beta1.PipelineSpec{ PipelineId: createdRun.Run.PipelineSpec.GetPipelineId(), @@ -1146,7 +1163,7 @@ func TestListRunsV1_Multiuser(t *testing.T) { func TestListRuns(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServer(manager) pipelineSpecStruct := &structpb.Struct{} yaml.Unmarshal([]byte(v2SpecHelloWorld), pipelineSpecStruct) @@ -1172,9 +1189,9 @@ func TestListRuns(t *testing.T) { DisplayName: "run1", ServiceAccount: "pipeline-runner", StorageState: apiv2beta1.Run_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{Seconds: 2}, - FinishedAt: ×tamp.Timestamp{}, + CreatedAt: timestamppb.New(time.Unix(2, 0)), + ScheduledAt: timestamppb.New(time.Unix(2, 0)), + FinishedAt: timestamppb.New(time.Unix(0, 0)), PipelineSource: &apiv2beta1.Run_PipelineSpec{ PipelineSpec: createdRun.GetPipelineSpec(), }, @@ -1187,7 +1204,7 @@ func TestListRuns(t *testing.T) { State: apiv2beta1.RuntimeState_PENDING, StateHistory: []*apiv2beta1.RuntimeStatus{ { - UpdateTime: ×tamp.Timestamp{Seconds: 3}, + UpdateTime: timestamppb.New(time.Unix(3, 0)), State: apiv2beta1.RuntimeState_PENDING, }, }, @@ -1211,7 +1228,7 @@ func TestReportRunMetricsV1_RunNotFound(t *testing.T) { clientManager, resourceManager, _ := initWithOneTimeRun(t) defer clientManager.Close() - runServer := RunServer{resourceManager: resourceManager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(resourceManager) _, err := runServer.ReportRunMetricsV1(context.Background(), &apiv1beta1.ReportRunMetricsRequest{ RunId: "1", @@ -1227,7 +1244,7 @@ func TestReportRunMetricsV1_Succeed_Multiuser(t *testing.T) { clientManager, resourceManager, runDetails := initWithOneTimeRun(t) defer clientManager.Close() - runServer := RunServer{resourceManager: resourceManager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(resourceManager) response, err := runServer.ReportRunMetricsV1(ctx, &apiv1beta1.ReportRunMetricsRequest{ RunId: runDetails.UUID, @@ -1263,7 +1280,7 @@ func TestReportRunMetricsV1_Unauthorized(t *testing.T) { defer clientManager.Close() clientManager.SubjectAccessReviewClientFake = client.NewFakeSubjectAccessReviewClientUnauthorized() resourceManager = resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - runServer := RunServer{resourceManager: resourceManager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(resourceManager) _, err := runServer.ReportRunMetricsV1(ctx, &apiv1beta1.ReportRunMetricsRequest{ RunId: runDetails.UUID, @@ -1284,7 +1301,7 @@ func TestReportRunMetricsV1_PartialFailures(t *testing.T) { clientManager, resourceManager, runDetail := initWithOneTimeRun(t) defer clientManager.Close() - runServer := RunServer{resourceManager: resourceManager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(resourceManager) validMetric := metricV1 invalidNameMetric := &apiv1beta1.RunMetric{ @@ -1325,13 +1342,52 @@ func TestReportRunMetricsV1_PartialFailures(t *testing.T) { assert.Equal(t, expectedResponse, response) } +// Test length validation for ReportRunMetricsV1: Name and NodeId exceeding max length should return INVALID_ARGUMENT. +func TestReportRunMetricsV1_LengthValidation(t *testing.T) { + httpServer := getMockServer(t) + defer httpServer.Close() + + clientManager, resourceManager, runDetail := initWithOneTimeRun(t) + defer clientManager.Close() + runServer := createRunServerV1(resourceManager) + + // Prepare metrics: one valid, one with Name overflow, one with NodeId overflow. + validMetric := metricV1 + longName := strings.Repeat("a", 192) + nameOverflow := &apiv1beta1.RunMetric{ + Name: longName, + NodeId: validMetric.NodeId, + Value: validMetric.Value, + Format: validMetric.Format, + } + longNodeID := strings.Repeat("a", 192) + nodeOverflow := &apiv1beta1.RunMetric{ + Name: validMetric.Name, + NodeId: longNodeID, + Value: validMetric.Value, + Format: validMetric.Format, + } + + response, err := runServer.ReportRunMetricsV1(context.Background(), &apiv1beta1.ReportRunMetricsRequest{ + RunId: runDetail.UUID, + Metrics: []*apiv1beta1.RunMetric{validMetric, nameOverflow, nodeOverflow}, + }) + assert.Nil(t, err) + + // Check statuses: valid metric OK, others INVALID_ARGUMENT. + results := response.GetResults() + assert.Equal(t, apiv1beta1.ReportRunMetricsResponse_ReportRunMetricResult_OK, results[0].GetStatus()) + assert.Equal(t, apiv1beta1.ReportRunMetricsResponse_ReportRunMetricResult_INVALID_ARGUMENT, results[1].GetStatus()) + assert.Equal(t, apiv1beta1.ReportRunMetricsResponse_ReportRunMetricResult_INVALID_ARGUMENT, results[2].GetStatus()) +} + func TestCanAccessRun_Unauthorized(t *testing.T) { viper.Set(common.MultiUserMode, "true") defer viper.Set(common.MultiUserMode, "false") clients, manager, experiment := initWithExperiment_SubjectAccessReview_Unauthorized(t) defer clients.Close() - runServer := RunServer{resourceManager: manager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(manager) userIdentity := "user@google.com" md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + userIdentity}) @@ -1372,7 +1428,7 @@ func TestCanAccessRun_Authorized(t *testing.T) { clients, manager, oneTimeRun := initWithOneTimeRun(t) defer clients.Close() - runServer := RunServer{resourceManager: manager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(manager) md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) ctx := metadata.NewIncomingContext(context.Background(), md) @@ -1387,7 +1443,7 @@ func TestCanAccessRun_Unauthenticated(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - runServer := RunServer{resourceManager: manager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(manager) md := metadata.New(map[string]string{"no-identity-header": "user"}) ctx := metadata.NewIncomingContext(context.Background(), md) @@ -1476,7 +1532,7 @@ func TestReadArtifactsV1_Succeed(t *testing.T) { _, err := manager.ReportWorkflowResource(context.Background(), workflow) assert.Nil(t, err) - runServer := RunServer{resourceManager: manager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(manager) artifact := &apiv1beta1.ReadArtifactRequest{ RunId: run.UUID, NodeId: "node-1", @@ -1504,7 +1560,7 @@ func TestReadArtifactsV1_Unauthorized(t *testing.T) { clientManager.SubjectAccessReviewClientFake = client.NewFakeSubjectAccessReviewClientUnauthorized() resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - runServer := RunServer{resourceManager: resourceManager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(resourceManager) artifact := &apiv1beta1.ReadArtifactRequest{ RunId: run.UUID, NodeId: "node-1", @@ -1522,7 +1578,7 @@ func TestReadArtifactsV1_Unauthorized(t *testing.T) { func TestReadArtifactsV1_Run_NotFound(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) manager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) - runServer := RunServer{resourceManager: manager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(manager) artifact := &apiv1beta1.ReadArtifactRequest{ RunId: "Wrong_RUN_UUID", NodeId: "node-1", @@ -1560,7 +1616,7 @@ func TestReadArtifactsV1_Resource_NotFound(t *testing.T) { _, err := manager.ReportWorkflowResource(context.Background(), workflow) assert.Nil(t, err) - runServer := RunServer{resourceManager: manager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServerV1(manager) //`artifactRequest` search for node that does not exist artifactRequest := &apiv1beta1.ReadArtifactRequest{ RunId: run.UUID, @@ -1623,7 +1679,7 @@ func TestReadArtifacts_Succeed(t *testing.T) { _, err := manager.ReportWorkflowResource(context.Background(), workflow) assert.Nil(t, err) - runServer := RunServer{resourceManager: manager, options: &RunServerOptions{CollectMetrics: false}} + runServer := createRunServer(manager) artifact := &apiv2beta1.ReadArtifactRequest{ RunId: run.UUID, NodeId: "node-1", @@ -1641,7 +1697,7 @@ func TestReadArtifacts_Succeed(t *testing.T) { func TestRetryRun(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() - server := NewRunServer(manager, &RunServerOptions{CollectMetrics: false}) + server := createRunServer(manager) listParams := []interface{}{1, 2, 3} v2RuntimeListParams, _ := structpb.NewList(listParams) diff --git a/backend/src/apiserver/server/task_server.go b/backend/src/apiserver/server/task_server.go index 8de983929be..f787076a1dd 100644 --- a/backend/src/apiserver/server/task_server.go +++ b/backend/src/apiserver/server/task_server.go @@ -18,6 +18,8 @@ import ( "context" "strings" + apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" + api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" @@ -26,6 +28,7 @@ import ( type TaskServer struct { resourceManager *resource.ResourceManager + apiv1beta1.UnimplementedTaskServiceServer } // Creates a task. diff --git a/backend/src/apiserver/server/util.go b/backend/src/apiserver/server/util.go index 5d88de11d6d..e2f5a84717d 100644 --- a/backend/src/apiserver/server/util.go +++ b/backend/src/apiserver/server/util.go @@ -44,7 +44,11 @@ func loadFile(fileReader io.Reader, MaxFileLength int) ([]byte, error) { } } if len(pipelineFile) > MaxFileLength { - return nil, util.NewInvalidInputError("File size too large. Maximum supported size: %v", MaxFileLength) + return nil, util.NewInvalidInputError( + "File size too large (%v bytes). Maximum supported size: %v. Consider moving large embedded artifacts or "+ + "notebooks or Python code into a container image or object store.", + len(pipelineFile), MaxFileLength, + ) } return pipelineFile, nil } diff --git a/backend/src/apiserver/server/util_test.go b/backend/src/apiserver/server/util_test.go index b5e3c9d9f4a..36050e87a8d 100644 --- a/backend/src/apiserver/server/util_test.go +++ b/backend/src/apiserver/server/util_test.go @@ -190,3 +190,11 @@ func TestReadPipelineFile_UnknownFileFormat(t *testing.T) { assert.NotNil(t, err) assert.Contains(t, err.Error(), "Unexpected pipeline file format") } + +func TestReadPipelineFile_SizeTooLarge_RecommendationIncluded(t *testing.T) { + big := strings.Repeat("X", 1024) + _, err := ReadPipelineFile("large.yaml", strings.NewReader(big), 10) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "File size too large") + assert.Contains(t, err.Error(), "Consider moving large embedded artifacts or notebooks") +} diff --git a/backend/src/apiserver/server/visualization_server.go b/backend/src/apiserver/server/visualization_server.go index 7ff4cf9356a..ed6fbf3964f 100644 --- a/backend/src/apiserver/server/visualization_server.go +++ b/backend/src/apiserver/server/visualization_server.go @@ -23,6 +23,8 @@ import ( "net/url" "strings" + apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" + "github.com/golang/glog" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" @@ -40,6 +42,7 @@ const ( type VisualizationServer struct { resourceManager *resource.ResourceManager serviceURL string + apiv1beta1.UnimplementedVisualizationServiceServer } func (s *VisualizationServer) CreateVisualizationV1(ctx context.Context, request *go_client.CreateVisualizationRequest) (*go_client.Visualization, error) { @@ -117,7 +120,7 @@ func (s *VisualizationServer) generateVisualizationFromRequest(request *go_clien return nil, util.Wrap(err, "Unable to initialize visualization request") } if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf(resp.Status) + return nil, fmt.Errorf("%s", resp.Status) } defer resp.Body.Close() body, err := io.ReadAll(resp.Body) diff --git a/backend/src/apiserver/storage/db_fake.go b/backend/src/apiserver/storage/db_fake.go index 276d3a68898..6596ed5f75d 100644 --- a/backend/src/apiserver/storage/db_fake.go +++ b/backend/src/apiserver/storage/db_fake.go @@ -16,20 +16,20 @@ package storage import ( "github.com/golang/glog" - "github.com/jinzhu/gorm" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/common/util" - _ "github.com/mattn/go-sqlite3" + "gorm.io/driver/sqlite" + "gorm.io/gorm" ) func NewFakeDB() (*DB, error) { // Initialize GORM - db, err := gorm.Open("sqlite3", ":memory:") + dbInstance, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) if err != nil { return nil, util.Wrap(err, "Could not create the GORM database") } // Create tables - db.AutoMigrate( + if err := dbInstance.AutoMigrate( &model.Experiment{}, &model.Job{}, &model.Pipeline{}, @@ -40,8 +40,15 @@ func NewFakeDB() (*DB, error) { &model.Task{}, &model.DBStatus{}, &model.DefaultExperiment{}, - ) - return NewDB(db.DB(), NewSQLiteDialect()), nil + ); err != nil { + return nil, util.Wrap(err, "Failed to automigrate models") + } + + sqlDB, err := dbInstance.DB() + if err != nil { + return nil, util.Wrap(err, "Failed to get generic database object from GORM DB") + } + return NewDB(sqlDB, NewSQLiteDialect()), nil } func NewFakeDBOrFatal() *DB { diff --git a/backend/src/apiserver/storage/experiment_store.go b/backend/src/apiserver/storage/experiment_store.go index 3f704d9415d..eea2f134174 100644 --- a/backend/src/apiserver/storage/experiment_store.go +++ b/backend/src/apiserver/storage/experiment_store.go @@ -196,8 +196,8 @@ func (s *ExperimentStore) scanRows(rows *sql.Rows) ([]*model.Experiment, error) var experiments []*model.Experiment for rows.Next() { var uuid, name, description, namespace, storageState string - var createdAtInSec sql.NullInt64 - var lastRunCreatedAtInSec sql.NullInt64 + var createdAtInSec int64 + var lastRunCreatedAtInSec int64 err := rows.Scan(&uuid, &name, &description, &createdAtInSec, &lastRunCreatedAtInSec, &namespace, &storageState) if err != nil { return experiments, err @@ -206,8 +206,8 @@ func (s *ExperimentStore) scanRows(rows *sql.Rows) ([]*model.Experiment, error) UUID: uuid, Name: name, Description: description, - CreatedAtInSec: createdAtInSec.Int64, - LastRunCreatedAtInSec: lastRunCreatedAtInSec.Int64, + CreatedAtInSec: createdAtInSec, + LastRunCreatedAtInSec: lastRunCreatedAtInSec, Namespace: namespace, StorageState: model.StorageState(storageState).ToV2(), } diff --git a/backend/src/apiserver/storage/job_store.go b/backend/src/apiserver/storage/job_store.go index 1d8ded430a6..55f0b4ed812 100644 --- a/backend/src/apiserver/storage/job_store.go +++ b/backend/src/apiserver/storage/job_store.go @@ -271,7 +271,7 @@ func (s *JobStore) scanRows(r *sql.Rows) ([]*model.Job, error) { K8SName: name, Namespace: namespace, ServiceAccount: serviceAccount, - Description: description, + Description: string(description), Enabled: enabled, Conditions: conditions, ExperimentId: expId, @@ -294,9 +294,9 @@ func (s *JobStore) scanRows(r *sql.Rows) ([]*model.Job, error) { PipelineId: pipelineId, PipelineVersionId: pvId, PipelineName: pipelineName, - PipelineSpecManifest: pipelineSpecManifest, - WorkflowSpecManifest: workflowSpecManifest, - Parameters: parameters, + PipelineSpecManifest: model.LargeText(pipelineSpecManifest), + WorkflowSpecManifest: model.LargeText(workflowSpecManifest), + Parameters: model.LargeText(parameters), RuntimeConfig: runtimeConfig, }, CreatedAtInSec: createdAtInSec.Int64, diff --git a/backend/src/apiserver/storage/pipeline_store.go b/backend/src/apiserver/storage/pipeline_store.go index aa3e1ba8558..52b3668bbbf 100644 --- a/backend/src/apiserver/storage/pipeline_store.go +++ b/backend/src/apiserver/storage/pipeline_store.go @@ -422,7 +422,7 @@ func (s *PipelineStore) scanJoinedRows(rows *sql.Rows) ([]*model.Pipeline, []*mo CreatedAtInSec: createdAtInSec.Int64, Name: name, DisplayName: displayName, - Description: description, + Description: model.LargeText(description), Status: status, Namespace: namespace.String, }, @@ -434,13 +434,13 @@ func (s *PipelineStore) scanJoinedRows(rows *sql.Rows) ([]*model.Pipeline, []*mo CreatedAtInSec: versionCreatedAtInSec.Int64, Name: versionName.String, DisplayName: versionDisplayName.String, - Parameters: versionParameters.String, + Parameters: model.LargeText(versionParameters.String), PipelineId: versionPipelineId.String, Status: model.PipelineVersionStatus(versionStatus.String), CodeSourceUrl: versionCodeSourceUrl.String, - Description: versionDescription.String, - PipelineSpec: pipelineSpec.String, - PipelineSpecURI: pipelineSpecURI.String, + Description: model.LargeText(versionDescription.String), + PipelineSpec: model.LargeText(pipelineSpec.String), + PipelineSpecURI: model.LargeText(pipelineSpecURI.String), }, ) } @@ -472,7 +472,7 @@ func (s *PipelineStore) scanPipelinesRows(rows *sql.Rows) ([]*model.Pipeline, er CreatedAtInSec: createdAtInSec.Int64, Name: name.String, DisplayName: displayName.String, - Description: description.String, + Description: model.LargeText(description.String), Status: model.PipelineStatus(status.String), Namespace: namespace.String, }, @@ -909,13 +909,13 @@ func (s *PipelineStore) scanPipelineVersionsRows(rows *sql.Rows) ([]*model.Pipel CreatedAtInSec: createdAtInSec.Int64, Name: name.String, DisplayName: displayName.String, - Parameters: parameters.String, + Parameters: model.LargeText(parameters.String), PipelineId: pipelineId.String, CodeSourceUrl: codeSourceUrl.String, Status: model.PipelineVersionStatus(status.String), - Description: description.String, - PipelineSpec: pipelineSpec.String, - PipelineSpecURI: pipelineSpecURI.String, + Description: model.LargeText(description.String), + PipelineSpec: model.LargeText(pipelineSpec.String), + PipelineSpecURI: model.LargeText(pipelineSpecURI.String), }, ) } diff --git a/backend/src/apiserver/storage/pipeline_store_kubernetes.go b/backend/src/apiserver/storage/pipeline_store_kubernetes.go index 783e74e43e9..7059130fb33 100644 --- a/backend/src/apiserver/storage/pipeline_store_kubernetes.go +++ b/backend/src/apiserver/storage/pipeline_store_kubernetes.go @@ -538,7 +538,7 @@ func (k *PipelineStoreKubernetes) getK8sPipelineVersions( err := k.client.List(ctx, &pipelineVersions, listOptions...) if err != nil { - return nil, util.NewInternalServerError(err, errMsg) + return nil, util.NewInternalServerError(err, "%s", errMsg) } // If there is no pipeline version ID filter, then just return the results @@ -555,7 +555,7 @@ func (k *PipelineStoreKubernetes) getK8sPipelineVersions( // Fallback to not using the cache if the specific pipeline version is missing err = k.clientNoCache.List(ctx, &pipelineVersions, listOptions...) if err != nil { - return nil, util.NewInternalServerError(err, errMsg) + return nil, util.NewInternalServerError(err, "%s", errMsg) } for _, pipelineVersion := range pipelineVersions.Items { diff --git a/backend/src/apiserver/storage/pipeline_store_kubernetes_test.go b/backend/src/apiserver/storage/pipeline_store_kubernetes_test.go index 732fdfec0c9..18a5a239dc2 100644 --- a/backend/src/apiserver/storage/pipeline_store_kubernetes_test.go +++ b/backend/src/apiserver/storage/pipeline_store_kubernetes_test.go @@ -37,7 +37,7 @@ func TestListK8sPipelines(t *testing.T) { pipeline := &model.Pipeline{ Name: "Test Pipeline", - Description: "Test Pipeline Description", + Description: model.LargeText("Test Pipeline Description"), Namespace: "Test", } @@ -58,7 +58,7 @@ func TestListK8sPipelines_WithFilter(t *testing.T) { pipeline := &model.Pipeline{ Name: "Test Pipeline", - Description: "Test Pipeline Description", + Description: model.LargeText("Test Pipeline Description"), Namespace: "Test", } _, err := store.CreatePipeline(pipeline) @@ -91,12 +91,12 @@ func TestListK8sPipelines_Pagination(t *testing.T) { pipeline1 := &model.Pipeline{ Name: "Test Pipeline 1", - Description: "Test Pipeline 1 Description", + Description: model.LargeText("Test Pipeline 1 Description"), Namespace: "Test", } pipeline2 := &model.Pipeline{ Name: "Test Pipeline 2", - Description: "Test Pipeline 2 Description", + Description: model.LargeText("Test Pipeline 2 Description"), Namespace: "Test", } @@ -129,12 +129,12 @@ func TestListK8sPipelines_Pagination_Descend(t *testing.T) { pipeline1 := &model.Pipeline{ Name: "Test Pipeline 1", - Description: "Test Pipeline 1 Description", + Description: model.LargeText("Test Pipeline 1 Description"), Namespace: "Test", } pipeline2 := &model.Pipeline{ Name: "Test Pipeline 2", - Description: "Test Pipeline 2 Description", + Description: model.LargeText("Test Pipeline 2 Description"), Namespace: "Test", } @@ -166,12 +166,12 @@ func TestListK8sPipelinesV1_Pagination_NameAsc(t *testing.T) { pipeline1 := &model.Pipeline{ Name: "Test Pipeline 1", - Description: "Test Pipeline 1 Description", + Description: model.LargeText("Test Pipeline 1 Description"), Namespace: "Test", } pipeline2 := &model.Pipeline{ Name: "Test Pipeline 2", - Description: "Test Pipeline 2 Description", + Description: model.LargeText("Test Pipeline 2 Description"), Namespace: "Test", } @@ -244,7 +244,7 @@ func TestCreateK8sPipeline(t *testing.T) { pipeline := &model.Pipeline{ Name: "Test Pipeline", - Description: "Test Pipeline Description", + Description: model.LargeText("Test Pipeline Description"), Namespace: "Test", } @@ -281,8 +281,8 @@ func TestCreateK8sPipelineVersion(t *testing.T) { pipelineVersion := &model.PipelineVersion{ Name: "Test Pipeline Version", PipelineId: DefaultFakePipelineIdTwo, - Description: "Test Pipeline Version Description", - PipelineSpec: getBasicPipelineSpecYAML(), + Description: model.LargeText("Test Pipeline Version Description"), + PipelineSpec: model.LargeText(getBasicPipelineSpecYAML()), } _, err := store.CreatePipelineVersion(pipelineVersion) @@ -316,7 +316,7 @@ func TestGetK8sPipelineVersion(t *testing.T) { pipelineVersion := &model.PipelineVersion{ UUID: DefaultFakePipelineIdTwo, Name: "Test Pipeline Version", - Description: "Test Pipeline Version Description", + Description: model.LargeText("Test Pipeline Version Description"), } p, err := store.GetPipelineVersion(DefaultFakePipelineIdTwo) @@ -358,13 +358,13 @@ func TestListK8sPipelineVersions_Pagination(t *testing.T) { pipelineVersion1 := &model.PipelineVersion{ Name: "Test Pipeline Version 1", PipelineId: DefaultFakePipelineIdTwo, - PipelineSpec: getBasicPipelineSpecYAML(), + PipelineSpec: model.LargeText(getBasicPipelineSpecYAML()), } pipelineVersion2 := &model.PipelineVersion{ Name: "Test Pipeline Version 2", PipelineId: DefaultFakePipelineIdTwo, - PipelineSpec: getBasicPipelineSpecYAML(), + PipelineSpec: model.LargeText(getBasicPipelineSpecYAML()), } _, err := store.CreatePipelineVersion(pipelineVersion1) @@ -398,13 +398,13 @@ func TestListK8sPipelineVersions_Pagination_Descend(t *testing.T) { pipelineVersion1 := &model.PipelineVersion{ Name: "Test Pipeline Version 1", PipelineId: DefaultFakePipelineIdTwo, - PipelineSpec: getBasicPipelineSpecYAML(), + PipelineSpec: model.LargeText(getBasicPipelineSpecYAML()), } pipelineVersion2 := &model.PipelineVersion{ Name: "Test Pipeline Version 2", PipelineId: DefaultFakePipelineIdTwo, - PipelineSpec: getBasicPipelineSpecYAML(), + PipelineSpec: model.LargeText(getBasicPipelineSpecYAML()), } _, err := store.CreatePipelineVersion(pipelineVersion1) @@ -486,7 +486,7 @@ func TestCreatePipelineAndPipelineVersion(t *testing.T) { } k8sPipelineVersion := &model.PipelineVersion{ Name: "Test Pipeline Version", - PipelineSpec: getBasicPipelineSpecYAML(), + PipelineSpec: model.LargeText(getBasicPipelineSpecYAML()), } _, _, err := store.CreatePipelineAndPipelineVersion(k8sPipeline, k8sPipelineVersion) diff --git a/backend/src/apiserver/storage/pipeline_store_test.go b/backend/src/apiserver/storage/pipeline_store_test.go index 6a04827e55b..9683ba50ef6 100644 --- a/backend/src/apiserver/storage/pipeline_store_test.go +++ b/backend/src/apiserver/storage/pipeline_store_test.go @@ -36,7 +36,7 @@ func createPipelineV1(name string) *model.Pipeline { func createPipeline(name string, description string, namespace string) *model.Pipeline { return &model.Pipeline{ Name: name, - Description: description, + Description: model.LargeText(description), Status: model.PipelineReady, Namespace: namespace, } @@ -48,10 +48,10 @@ func createPipelineVersion(pipelineId string, name string, description string, u Parameters: `[{"Name": "param1"}]`, PipelineId: pipelineId, CodeSourceUrl: url, - Description: description, + Description: model.LargeText(description), Status: model.PipelineVersionReady, - PipelineSpec: pipelineSpec, - PipelineSpecURI: pipelineSpecURI, + PipelineSpec: model.LargeText(pipelineSpec), + PipelineSpecURI: model.LargeText(pipelineSpecURI), } } @@ -622,7 +622,7 @@ func TestPipelineStore_CreatePipelineAndPipelineVersion(t *testing.T) { Name: "pipeline v2 version 1", Description: "pipeline v2 version description", CodeSourceUrl: "gs://my-bucket/pipeline_v2.py", - PipelineSpec: v2SpecHelloWorld, + PipelineSpec: model.LargeText(v2SpecHelloWorld), PipelineSpecURI: "pipeline_version_two.yaml", }, &model.Pipeline{ @@ -641,7 +641,7 @@ func TestPipelineStore_CreatePipelineAndPipelineVersion(t *testing.T) { PipelineId: DefaultFakePipelineIdTwo, Status: model.PipelineVersionCreating, CodeSourceUrl: "gs://my-bucket/pipeline_v2.py", - PipelineSpec: v2SpecHelloWorld, + PipelineSpec: model.LargeText(v2SpecHelloWorld), PipelineSpecURI: "pipeline_version_two.yaml", }, false, @@ -659,7 +659,7 @@ func TestPipelineStore_CreatePipelineAndPipelineVersion(t *testing.T) { Parameters: `[{"name":"param1","value":"one"},{"name":"param2","value":"two"}]`, Description: "pipeline v1 version description", CodeSourceUrl: "gs://my-bucket/pipeline_v1.py", - PipelineSpec: v1SpecHelloWorld, + PipelineSpec: model.LargeText(v1SpecHelloWorld), PipelineSpecURI: "pipeline_version_one.yaml", }, &model.Pipeline{ @@ -679,7 +679,7 @@ func TestPipelineStore_CreatePipelineAndPipelineVersion(t *testing.T) { Description: "pipeline v1 version description", Status: model.PipelineVersionCreating, CodeSourceUrl: "gs://my-bucket/pipeline_v1.py", - PipelineSpec: v1SpecHelloWorld, + PipelineSpec: model.LargeText(v1SpecHelloWorld), PipelineSpecURI: "pipeline_version_one.yaml", }, false, @@ -696,7 +696,7 @@ func TestPipelineStore_CreatePipelineAndPipelineVersion(t *testing.T) { Name: "pipeline version three", Description: "pipeline version three description", CodeSourceUrl: "gs://my-bucket/pipeline_v2.py", - PipelineSpec: v2SpecHelloWorld, + PipelineSpec: model.LargeText(v2SpecHelloWorld), PipelineSpecURI: "pipeline_version_two.yaml", }, nil, @@ -715,7 +715,7 @@ func TestPipelineStore_CreatePipelineAndPipelineVersion(t *testing.T) { Name: "default version", Description: "default version description", CodeSourceUrl: "gs://my-bucket/pipeline_v2.py", - PipelineSpec: v2SpecHelloWorld, + PipelineSpec: model.LargeText(v2SpecHelloWorld), PipelineSpecURI: "pipeline_version_two.yaml", }, &model.Pipeline{ @@ -734,7 +734,7 @@ func TestPipelineStore_CreatePipelineAndPipelineVersion(t *testing.T) { Description: "default version description", Status: model.PipelineVersionCreating, CodeSourceUrl: "gs://my-bucket/pipeline_v2.py", - PipelineSpec: v2SpecHelloWorld, + PipelineSpec: model.LargeText(v2SpecHelloWorld), PipelineSpecURI: "pipeline_version_two.yaml", }, false, diff --git a/backend/src/apiserver/storage/resource_reference_store.go b/backend/src/apiserver/storage/resource_reference_store.go index 5b2e6f1fbd5..08e3928cfa7 100644 --- a/backend/src/apiserver/storage/resource_reference_store.go +++ b/backend/src/apiserver/storage/resource_reference_store.go @@ -181,7 +181,7 @@ func (s *ResourceReferenceStore) scanRows(r *sql.Rows) ([]model.ResourceReferenc ReferenceName: referenceName, ReferenceType: model.ResourceType(referenceType), Relationship: model.Relationship(relationship), - Payload: payload, + Payload: model.LargeText(payload), }) } return references, nil diff --git a/backend/src/apiserver/storage/resource_reference_store_test.go b/backend/src/apiserver/storage/resource_reference_store_test.go index bc5411a128c..630da0362c8 100644 --- a/backend/src/apiserver/storage/resource_reference_store_test.go +++ b/backend/src/apiserver/storage/resource_reference_store_test.go @@ -77,7 +77,7 @@ func TestResourceReferenceStore(t *testing.T) { assert.Equal(t, &model.ResourceReference{ ResourceUUID: "r1", ResourceType: model.RunResourceType, ReferenceUUID: defaultFakeExpId, ReferenceName: "e1", ReferenceType: model.ExperimentResourceType, - Relationship: model.CreatorRelationship, Payload: string(payload), + Relationship: model.CreatorRelationship, Payload: model.LargeText(payload), }, experimentRef) // Delete resource references diff --git a/backend/src/apiserver/storage/run_store.go b/backend/src/apiserver/storage/run_store.go index 9d266b6b72b..781c29888c3 100644 --- a/backend/src/apiserver/storage/run_store.go +++ b/backend/src/apiserver/storage/run_store.go @@ -238,7 +238,7 @@ func (s *RunStore) GetRun(runId string) (*model.Run, error) { if len(runs) == 0 { return nil, util.NewResourceNotFoundError("Run", fmt.Sprint(runId)) } - if runs[0].RunDetails.WorkflowRuntimeManifest == "" && runs[0].WorkflowSpecManifest != "" { + if string(runs[0].WorkflowRuntimeManifest) == "" && string(runs[0].WorkflowSpecManifest) != "" { // This can only happen when workflow reporting is failed. return nil, util.NewResourceNotFoundError("Failed to get run: %s", runId) } @@ -389,7 +389,7 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { StorageState: model.StorageState(storageState), Namespace: namespace, ServiceAccount: serviceAccount, - Description: description, + Description: string(description), RecurringRunId: jId, RunDetails: model.RunDetails{ CreatedAtInSec: createdAtInSec.Int64, @@ -397,8 +397,8 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { FinishedAtInSec: finishedAtInSec.Int64, Conditions: conditions, State: model.RuntimeState(state.String), - PipelineRuntimeManifest: pipelineRuntimeManifest, - WorkflowRuntimeManifest: workflowRuntimeManifest, + PipelineRuntimeManifest: model.LargeText(pipelineRuntimeManifest), + WorkflowRuntimeManifest: model.LargeText(workflowRuntimeManifest), PipelineContextId: pipelineContextId.Int64, PipelineRunContextId: pipelineRunContextId.Int64, TaskDetails: tasks, @@ -410,9 +410,9 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { PipelineId: pipelineId, PipelineVersionId: pvId, PipelineName: pipelineName, - PipelineSpecManifest: pipelineSpecManifest, - WorkflowSpecManifest: workflowSpecManifest, - Parameters: parameters, + PipelineSpecManifest: model.LargeText(pipelineSpecManifest), + WorkflowSpecManifest: model.LargeText(workflowSpecManifest), + Parameters: model.LargeText(parameters), RuntimeConfig: runtimeConfig, }, } @@ -441,7 +441,7 @@ func parseRuntimeConfig(runtimeParameters sql.NullString, pipelineRoot sql.NullS if pipelineRoot.Valid { pipelineRootString = pipelineRoot.String } - return model.RuntimeConfig{Parameters: runtimeParametersString, PipelineRoot: pipelineRootString} + return model.RuntimeConfig{Parameters: model.LargeText(runtimeParametersString), PipelineRoot: model.LargeText(pipelineRootString)} } func parseResourceReferences(resourceRefString sql.NullString) ([]*model.ResourceReference, error) { @@ -787,7 +787,7 @@ func (s *RunStore) scanRowsToRunMetrics(rows *sql.Rows) ([]*model.RunMetric, err Name: name, NumberValue: val, Format: form, - Payload: payload, + Payload: model.LargeText(payload), }, ) } diff --git a/backend/src/apiserver/storage/task_store.go b/backend/src/apiserver/storage/task_store.go index df4b54d933f..04408c11f48 100644 --- a/backend/src/apiserver/storage/task_store.go +++ b/backend/src/apiserver/storage/task_store.go @@ -200,8 +200,8 @@ func (s *TaskStore) scanRows(rows *sql.Rows) ([]*model.Task, error) { Name: name.String, ParentTaskId: parentTaskId.String, StateHistory: stateHistoryNew, - MLMDInputs: inputs.String, - MLMDOutputs: outputs.String, + MLMDInputs: model.LargeText(inputs.String), + MLMDOutputs: model.LargeText(outputs.String), ChildrenPods: childrenPods, } tasks = append(tasks, task) diff --git a/backend/src/apiserver/template/argo_template.go b/backend/src/apiserver/template/argo_template.go index fdb1ce556d3..e3eedd82076 100644 --- a/backend/src/apiserver/template/argo_template.go +++ b/backend/src/apiserver/template/argo_template.go @@ -17,8 +17,6 @@ package template import ( "fmt" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - workflowapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/workflow/validate" "github.com/kubeflow/pipelines/backend/src/apiserver/common" @@ -42,7 +40,7 @@ func (t *Argo) RunWorkflow(modelRun *model.Run, options RunWorkflowOptions) (uti workflow.SetLabelsToAllTemplates(util.LabelKeyCacheEnabled, common.IsCacheEnabled()) // Convert parameters - parameters, err := modelToParametersMap(modelRun.PipelineSpec.Parameters) + parameters, err := modelToParametersMap(string(modelRun.Parameters)) if err != nil { return nil, util.Wrap(err, "Failed to convert parameters") } @@ -54,7 +52,7 @@ func (t *Argo) RunWorkflow(modelRun *model.Run, options RunWorkflowOptions) (uti workflow.OverrideParameters(parameters) // Replace macros - formatter := util.NewRunParameterFormatter(options.RunId, options.RunAt) + formatter := util.NewRunParameterFormatter(options.RunID, options.RunAt) formattedParams := formatter.FormatWorkflowParameters(workflow.GetWorkflowParametersAsMap()) workflow.OverrideParameters(formattedParams) @@ -69,15 +67,15 @@ func (t *Argo) RunWorkflow(modelRun *model.Run, options RunWorkflowOptions) (uti } // Add label to the workflow so it can be persisted by persistent agent later. - workflow.SetLabels(util.LabelKeyWorkflowRunId, options.RunId) + workflow.SetLabels(util.LabelKeyWorkflowRunId, options.RunID) // Add run name annotation to the workflow so that it can be logged by the Metadata Writer. workflow.SetAnnotations(util.AnnotationKeyRunName, modelRun.DisplayName) - // Replace {{workflow.uid}} with runId - err = workflow.ReplaceUID(options.RunId) + // Replace {{workflow.uid}} with RunId + err = workflow.ReplaceUID(options.RunID) if err != nil { return nil, util.NewInternalServerError(err, "Failed to replace workflow ID") } - workflow.SetPodMetadataLabels(util.LabelKeyWorkflowRunId, options.RunId) + workflow.SetPodMetadataLabels(util.LabelKeyWorkflowRunId, options.RunID) // Marking auto-added artifacts as optional. Otherwise most older workflows will start failing after upgrade to Argo 2.3. // TODO: Fix the components to explicitly declare the artifacts they really output. @@ -101,13 +99,13 @@ func (t *Argo) IsCacheDisabled() bool { var _ Template = &Argo{} -func (t *Argo) ScheduledWorkflow(modelJob *model.Job, ownerReferences []metav1.OwnerReference) (*scheduledworkflow.ScheduledWorkflow, error) { +func (t *Argo) ScheduledWorkflow(modelJob *model.Job) (*scheduledworkflow.ScheduledWorkflow, error) { workflow := util.NewWorkflow(t.wf.Workflow.DeepCopy()) // Overwrite namespace from the job object if modelJob.Namespace != "" { workflow.SetExecutionNamespace(modelJob.Namespace) } - parameters, err := modelToParametersMap(modelJob.PipelineSpec.Parameters) + parameters, err := modelToParametersMap(string(modelJob.Parameters)) if err != nil { return nil, util.Wrap(err, "Failed to convert parameters") } @@ -126,12 +124,12 @@ func (t *Argo) ScheduledWorkflow(modelJob *model.Job, ownerReferences []metav1.O workflow.PatchTemplateOutputArtifacts() // We assume that v1 Argo template use v1 parameters ignoring runtime config - swfParameters, err := stringArrayToCRDParameters(modelJob.Parameters) + swfParameters, err := stringArrayToCRDParameters(string(modelJob.Parameters)) if err != nil { return nil, util.Wrap(err, "Failed to convert v1 parameters to CRD parameters") } - scheduledWorkflow, err := NewGenericScheduledWorkflow(modelJob, ownerReferences) + scheduledWorkflow, err := NewGenericScheduledWorkflow(modelJob) if err != nil { return nil, err } diff --git a/backend/src/apiserver/template/template.go b/backend/src/apiserver/template/template.go index 65f2ad6e8d1..a4d0b31afa1 100644 --- a/backend/src/apiserver/template/template.go +++ b/backend/src/apiserver/template/template.go @@ -32,6 +32,7 @@ import ( "google.golang.org/protobuf/encoding/protojson" structpb "google.golang.org/protobuf/types/known/structpb" goyaml "gopkg.in/yaml.v3" + corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "sigs.k8s.io/yaml" ) @@ -50,8 +51,10 @@ const ( SCHEMA_VERSION_2_1_0 = "2.1.0" ) -var ErrorInvalidPipelineSpec = fmt.Errorf("pipeline spec is invalid") -var ErrorInvalidPlatformSpec = fmt.Errorf("Platform spec is invalid") +var ( + ErrorInvalidPipelineSpec = fmt.Errorf("pipeline spec is invalid") + ErrorInvalidPlatformSpec = fmt.Errorf("platform spec is invalid") +) // inferTemplateFormat infers format from pipeline template. // There is no guarantee that the template is valid in inferred format, so validation @@ -128,24 +131,25 @@ type Template interface { // Get workflow RunWorkflow(modelRun *model.Run, options RunWorkflowOptions) (util.ExecutionSpec, error) - ScheduledWorkflow(modelJob *model.Job, ownerReferences []metav1.OwnerReference) (*scheduledworkflow.ScheduledWorkflow, error) + ScheduledWorkflow(modelJob *model.Job) (*scheduledworkflow.ScheduledWorkflow, error) IsCacheDisabled() bool } type RunWorkflowOptions struct { - RunId string - RunAt int64 - CacheDisabled bool + RunID string + RunAt int64 + CacheDisabled bool + DefaultWorkspace *corev1.PersistentVolumeClaimSpec } -func New(bytes []byte, cacheDisabled bool) (Template, error) { +func New(bytes []byte, cacheDisabled bool, defaultWorkspace *corev1.PersistentVolumeClaimSpec) (Template, error) { format := inferTemplateFormat(bytes) switch format { case V1: return NewArgoTemplate(bytes) case V2: - return NewV2SpecTemplate(bytes, cacheDisabled) + return NewV2SpecTemplate(bytes, cacheDisabled, defaultWorkspace) default: return nil, util.NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, "unknown template format") } @@ -164,7 +168,7 @@ func modelToPipelineJobRuntimeConfig(modelRuntimeConfig *model.RuntimeConfig) (* } runtimeConfig := &pipelinespec.PipelineJob_RuntimeConfig{} runtimeConfig.ParameterValues = *parameters - runtimeConfig.GcsOutputDirectory = modelRuntimeConfig.PipelineRoot + runtimeConfig.GcsOutputDirectory = string(modelRuntimeConfig.PipelineRoot) return runtimeConfig, nil } @@ -185,7 +189,7 @@ func StringMapToCRDParameters(modelParams string) ([]scheduledworkflow.Parameter for name, value := range parameters { valueBytes, err := value.MarshalJSON() if err != nil { - return nil, util.NewInternalServerError(err, "error marshalling model parameters") + return nil, util.NewInternalServerError(err, "error marshaling model parameters") } swParam := scheduledworkflow.Parameter{ Name: name, diff --git a/backend/src/apiserver/template/template_test.go b/backend/src/apiserver/template/template_test.go index 837bacaa9d4..27703525eac 100644 --- a/backend/src/apiserver/template/template_test.go +++ b/backend/src/apiserver/template/template_test.go @@ -33,6 +33,7 @@ import ( "google.golang.org/grpc/codes" "google.golang.org/protobuf/encoding/protojson" goyaml "gopkg.in/yaml.v3" + corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "sigs.k8s.io/yaml" ) @@ -155,6 +156,13 @@ spec: container: image: docker/whalesay:latest` +var defaultPVC = &corev1.PersistentVolumeClaimSpec{ + AccessModes: []corev1.PersistentVolumeAccessMode{ + corev1.ReadWriteMany, + }, + StorageClassName: util.StringPointer("my-storage"), +} + func TestToSwfCRDResourceGeneratedName_SpecialCharsAndSpace(t *testing.T) { name, err := toSWFCRDResourceGeneratedName("! HaVe ä £unky name") assert.Nil(t, err) @@ -177,7 +185,7 @@ func TestScheduledWorkflow(t *testing.T) { proxy.InitializeConfigWithEmptyForTests() v2SpecHelloWorldYAML := loadYaml(t, "testdata/hello_world.yaml") - v2Template, _ := New([]byte(v2SpecHelloWorldYAML), true) + v2Template, _ := New([]byte(v2SpecHelloWorldYAML), true, defaultPVC) modelJob := &model.Job{ K8SName: "name1", @@ -194,7 +202,7 @@ func TestScheduledWorkflow(t *testing.T) { PipelineSpec: model.PipelineSpec{ PipelineId: "1", PipelineName: "pipeline name", - PipelineSpecManifest: v2SpecHelloWorldYAML, + PipelineSpecManifest: model.LargeText(v2SpecHelloWorldYAML), RuntimeConfig: model.RuntimeConfig{ Parameters: "{\"y\":\"world\"}", }, @@ -206,10 +214,7 @@ func TestScheduledWorkflow(t *testing.T) { APIVersion: "kubeflow.org/v2beta1", Kind: "ScheduledWorkflow", }, - ObjectMeta: metav1.ObjectMeta{ - GenerateName: "name1", - OwnerReferences: []metav1.OwnerReference{}, - }, + ObjectMeta: metav1.ObjectMeta{GenerateName: "name1"}, Spec: scheduledworkflow.ScheduledWorkflowSpec{ Enabled: true, MaxConcurrency: util.Int64Pointer(1), @@ -231,7 +236,7 @@ func TestScheduledWorkflow(t *testing.T) { }, } - actualScheduledWorkflow, err := v2Template.ScheduledWorkflow(modelJob, []metav1.OwnerReference{}) + actualScheduledWorkflow, err := v2Template.ScheduledWorkflow(modelJob) assert.Nil(t, err) // We don't compare this field because it changes with every driver/launcher image release. @@ -288,9 +293,10 @@ func TestNewTemplate_V2(t *testing.T) { err = protojson.Unmarshal(jsonData, &expectedSpec) assert.Nil(t, err) expectedTemplate := &V2Spec{ - spec: &expectedSpec, + spec: &expectedSpec, + defaultWorkspace: defaultPVC, } - templateV2Spec, err := New([]byte(template), false) + templateV2Spec, err := New([]byte(template), false, defaultPVC) assert.Nil(t, err) assert.Equal(t, expectedTemplate, templateV2Spec) } @@ -316,17 +322,18 @@ func TestNewTemplate_WithPlatformSpec(t *testing.T) { protojson.Unmarshal(jsonData, &expectedPlatformSpec) expectedTemplate := &V2Spec{ - spec: &expectedPipelineSpec, - platformSpec: &expectedPlatformSpec, + spec: &expectedPipelineSpec, + platformSpec: &expectedPlatformSpec, + defaultWorkspace: defaultPVC, } - templateV2Spec, err := New([]byte(template), false) + templateV2Spec, err := New([]byte(template), false, defaultPVC) assert.Nil(t, err) assert.Equal(t, expectedTemplate, templateV2Spec) } func TestNewTemplate_V2_InvalidSchemaVersion(t *testing.T) { template := loadYaml(t, "testdata/hello_world_schema_2_0_0.yaml") - _, err := New([]byte(template), true) + _, err := New([]byte(template), true, defaultPVC) assert.NotNil(t, err) assert.Contains(t, err.Error(), "KFP only supports schema version 2.1.0") } @@ -336,9 +343,9 @@ func TestNewTemplate_V2_InvalidSchemaVersion(t *testing.T) { // so we verify the parsed object. func TestBytes_V2_WithExecutorConfig(t *testing.T) { template := loadYaml(t, "testdata/pipeline_with_volume.yaml") - templateV2Spec, _ := New([]byte(template), true) + templateV2Spec, _ := New([]byte(template), true, defaultPVC) templateBytes := templateV2Spec.Bytes() - newTemplateV2Spec, err := New(templateBytes, true) + newTemplateV2Spec, err := New(templateBytes, true, defaultPVC) assert.Nil(t, err) assert.Equal(t, templateV2Spec, newTemplateV2Spec) } @@ -348,9 +355,9 @@ func TestBytes_V2_WithExecutorConfig(t *testing.T) { // so we verify the parsed object. func TestBytes_V2(t *testing.T) { template := loadYaml(t, "testdata/hello_world.yaml") - templateV2Spec, _ := New([]byte(template), true) + templateV2Spec, _ := New([]byte(template), true, defaultPVC) templateBytes := templateV2Spec.Bytes() - newTemplateV2Spec, err := New(templateBytes, true) + newTemplateV2Spec, err := New(templateBytes, true, defaultPVC) assert.Nil(t, err) assert.Equal(t, templateV2Spec, newTemplateV2Spec) } diff --git a/backend/src/apiserver/template/v2_template.go b/backend/src/apiserver/template/v2_template.go index 4c5f05bf496..6e84a41f317 100644 --- a/backend/src/apiserver/template/v2_template.go +++ b/backend/src/apiserver/template/v2_template.go @@ -23,7 +23,8 @@ import ( "regexp" "strings" - structpb "github.com/golang/protobuf/ptypes/struct" + "google.golang.org/protobuf/types/known/structpb" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/common/util" @@ -31,21 +32,23 @@ import ( "github.com/kubeflow/pipelines/backend/src/v2/compiler/argocompiler" "google.golang.org/protobuf/encoding/protojson" goyaml "gopkg.in/yaml.v3" + corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "sigs.k8s.io/yaml" ) type V2Spec struct { - spec *pipelinespec.PipelineSpec - platformSpec *pipelinespec.PlatformSpec - cacheDisabled bool + spec *pipelinespec.PipelineSpec + platformSpec *pipelinespec.PlatformSpec + cacheDisabled bool + defaultWorkspace *corev1.PersistentVolumeClaimSpec } var _ Template = &V2Spec{} var Launcher = "" -func NewGenericScheduledWorkflow(modelJob *model.Job, ownerReferences []metav1.OwnerReference) (*scheduledworkflow.ScheduledWorkflow, error) { +func NewGenericScheduledWorkflow(modelJob *model.Job) (*scheduledworkflow.ScheduledWorkflow, error) { swfGeneratedName, err := toSWFCRDResourceGeneratedName(modelJob.K8SName) if err != nil { return nil, util.Wrap(err, "Create job failed") @@ -61,10 +64,7 @@ func NewGenericScheduledWorkflow(modelJob *model.Job, ownerReferences []metav1.O APIVersion: "kubeflow.org/v2beta1", Kind: "ScheduledWorkflow", }, - ObjectMeta: metav1.ObjectMeta{ - GenerateName: swfGeneratedName, - OwnerReferences: ownerReferences, - }, + ObjectMeta: metav1.ObjectMeta{GenerateName: swfGeneratedName}, Spec: scheduledworkflow.ScheduledWorkflowSpec{ Enabled: modelJob.Enabled, MaxConcurrency: &modelJob.MaxConcurrency, @@ -88,7 +88,7 @@ func (t *V2Spec) PlatformSpec() *pipelinespec.PlatformSpec { } // Converts modelJob to ScheduledWorkflow. -func (t *V2Spec) ScheduledWorkflow(modelJob *model.Job, ownerReferences []metav1.OwnerReference) (*scheduledworkflow.ScheduledWorkflow, error) { +func (t *V2Spec) ScheduledWorkflow(modelJob *model.Job) (*scheduledworkflow.ScheduledWorkflow, error) { job := &pipelinespec.PipelineJob{} bytes, err := protojson.Marshal(t.spec) @@ -121,7 +121,11 @@ func (t *V2Spec) ScheduledWorkflow(modelJob *model.Job, ownerReferences []metav1 var obj interface{} if util.CurrentExecutionType() == util.ArgoWorkflow { - obj, err = argocompiler.Compile(job, kubernetesSpec, &argocompiler.Options{CacheDisabled: t.cacheDisabled}) + opts := &argocompiler.Options{ + CacheDisabled: t.cacheDisabled, + DefaultWorkspace: t.defaultWorkspace, + } + obj, err = argocompiler.Compile(job, kubernetesSpec, opts) } if err != nil { return nil, util.Wrap(err, "Failed to compile job") @@ -142,12 +146,12 @@ func (t *V2Spec) ScheduledWorkflow(modelJob *model.Job, ownerReferences []metav1 } // Disable istio sidecar injection if not specified executionSpec.SetAnnotationsToAllTemplatesIfKeyNotExist(util.AnnotationKeyIstioSidecarInject, util.AnnotationValueIstioSidecarInjectDisabled) - parameters, err := StringMapToCRDParameters(modelJob.RuntimeConfig.Parameters) + parameters, err := StringMapToCRDParameters(string(modelJob.RuntimeConfig.Parameters)) if err != nil { return nil, util.Wrap(err, "Converting runtime config's parameters to CDR parameters failed") } - scheduledWorkflow, err := NewGenericScheduledWorkflow(modelJob, ownerReferences) + scheduledWorkflow, err := NewGenericScheduledWorkflow(modelJob) if err != nil { return nil, err } @@ -166,8 +170,8 @@ func (t *V2Spec) GetTemplateType() TemplateType { return V2 } -func NewV2SpecTemplate(template []byte, cacheDisabled bool) (*V2Spec, error) { - v2Spec := &V2Spec{cacheDisabled: cacheDisabled} +func NewV2SpecTemplate(template []byte, cacheDisabled bool, defaultWorkspace *corev1.PersistentVolumeClaimSpec) (*V2Spec, error) { + v2Spec := &V2Spec{cacheDisabled: cacheDisabled, defaultWorkspace: defaultWorkspace} decoder := goyaml.NewDecoder(bytes.NewReader(template)) for { var value map[string]interface{} @@ -177,12 +181,12 @@ func NewV2SpecTemplate(template []byte, cacheDisabled bool) (*V2Spec, error) { if errors.Is(err, io.EOF) { break } - if value == nil { - continue - } if err != nil { return nil, util.NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, fmt.Sprintf("unable to decode yaml document: %s", err.Error())) } + if value == nil { + continue + } valueBytes, err := goyaml.Marshal(&value) if err != nil { return nil, util.NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, fmt.Sprintf("unable to marshal this yaml document: %s", err.Error())) @@ -326,7 +330,11 @@ func (t *V2Spec) RunWorkflow(modelRun *model.Run, options RunWorkflowOptions) (u var obj interface{} if util.CurrentExecutionType() == util.ArgoWorkflow { - obj, err = argocompiler.Compile(job, kubernetesSpec, &argocompiler.Options{CacheDisabled: options.CacheDisabled}) + opts := &argocompiler.Options{ + CacheDisabled: options.CacheDisabled, + DefaultWorkspace: t.defaultWorkspace, + } + obj, err = argocompiler.Compile(job, kubernetesSpec, opts) } if err != nil { return nil, util.Wrap(err, "Failed to compile job") @@ -343,15 +351,15 @@ func (t *V2Spec) RunWorkflow(modelRun *model.Run, options RunWorkflowOptions) (u // Disable istio sidecar injection if not specified executionSpec.SetAnnotationsToAllTemplatesIfKeyNotExist(util.AnnotationKeyIstioSidecarInject, util.AnnotationValueIstioSidecarInjectDisabled) // Add label to the workflow so it can be persisted by persistent agent later. - executionSpec.SetLabels(util.LabelKeyWorkflowRunId, options.RunId) + executionSpec.SetLabels(util.LabelKeyWorkflowRunId, options.RunID) // Add run name annotation to the workflow so that it can be logged by the Metadata Writer. executionSpec.SetAnnotations(util.AnnotationKeyRunName, modelRun.DisplayName) // Replace {{workflow.uid}} with runId - err = executionSpec.ReplaceUID(options.RunId) + err = executionSpec.ReplaceUID(options.RunID) if err != nil { return nil, util.NewInternalServerError(err, "Failed to replace workflow ID") } - executionSpec.SetPodMetadataLabels(util.LabelKeyWorkflowRunId, options.RunId) + executionSpec.SetPodMetadataLabels(util.LabelKeyWorkflowRunId, options.RunID) return executionSpec, nil } @@ -410,7 +418,7 @@ func (t *V2Spec) validatePipelineJobInputs(job *pipelinespec.PipelineJob) error // Verify the parameter type is correct switch param.GetParameterType() { case pipelinespec.ParameterType_PARAMETER_TYPE_ENUM_UNSPECIFIED: - return util.NewInvalidInputError(fmt.Sprintf("input parameter %s has unspecified type", name)) + return util.NewInvalidInputError("%s", fmt.Sprintf("input parameter %s has unspecified type", name)) case pipelinespec.ParameterType_NUMBER_DOUBLE, pipelinespec.ParameterType_NUMBER_INTEGER: if _, ok := input.GetKind().(*structpb.Value_NumberValue); !ok { return util.NewInvalidInputError("input parameter %s requires type double or integer, "+ diff --git a/backend/src/apiserver/validation/length.go b/backend/src/apiserver/validation/length.go new file mode 100644 index 00000000000..5be277ed190 --- /dev/null +++ b/backend/src/apiserver/validation/length.go @@ -0,0 +1,143 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package validation provides validation logic for API server fields, +// including field length checks based on logical maximums defined for migration safety +// and backend schema compatibility. +package validation + +import ( + "fmt" + "reflect" + + "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/common/util" +) + +// ColLenSpec describes the logical max length we enforce during migration. +// Model is the Go struct used by GORM; Field is the Go struct field name (NOT the DB column name). +// Max is the allowed character length after upgrade. +type ColLenSpec struct { + Model interface{} + Field string // Go struct field name + Max int +} + +var LengthSpecs = []ColLenSpec{ + {Model: &model.DefaultExperiment{}, Field: "DefaultExperimentId", Max: 191}, + + {Model: &model.Experiment{}, Field: "UUID", Max: 191}, + {Model: &model.Experiment{}, Field: "Name", Max: 128}, + {Model: &model.Experiment{}, Field: "Namespace", Max: 63}, + + {Model: &model.Job{}, Field: "UUID", Max: 191}, + + {Model: &model.PipelineVersion{}, Field: "UUID", Max: 191}, + {Model: &model.PipelineVersion{}, Field: "Name", Max: 127}, + {Model: &model.PipelineVersion{}, Field: "PipelineId", Max: 64}, + + {Model: &model.Pipeline{}, Field: "UUID", Max: 64}, + {Model: &model.Pipeline{}, Field: "Name", Max: 128}, + {Model: &model.Pipeline{}, Field: "Namespace", Max: 63}, // gorm v1 has this size limit + + {Model: &model.ResourceReference{}, Field: "ResourceUUID", Max: 191}, + {Model: &model.ResourceReference{}, Field: "ReferenceUUID", Max: 191}, + + {Model: &model.Run{}, Field: "UUID", Max: 191}, + {Model: &model.Run{}, Field: "Namespace", Max: 63}, + {Model: &model.Run{}, Field: "ExperimentId", Max: 64}, + {Model: &model.Run{}, Field: "Conditions", Max: 125}, + + {Model: &model.RunMetric{}, Field: "RunUUID", Max: 191}, + {Model: &model.RunMetric{}, Field: "NodeID", Max: 191}, + {Model: &model.RunMetric{}, Field: "Name", Max: 191}, + + {Model: &model.Task{}, Field: "UUID", Max: 191}, + // Note: struct field is RunId, column is RunUUID. + {Model: &model.Task{}, Field: "RunId", Max: 191}, +} + +var fieldMaxLenMap map[string]int + +func init() { + fieldMaxLenMap = make(map[string]int, len(LengthSpecs)) + for _, spec := range LengthSpecs { + typ := reflect.TypeOf(spec.Model).Elem().Name() + fieldMaxLenMap[typ+"."+spec.Field] = spec.Max + } +} + +func getMaxLength(modelName, field string) (int, bool) { + v, ok := fieldMaxLenMap[modelName+"."+field] + return v, ok +} + +// ValidateFieldLength validates a single string field against LengthSpecs. +// Primarily used in pipeline_upload_server for request-level validation of raw input values, +// especially for fields like Namespace that may be modified before model construction. +// Use this for targeted, field-specific validation. For full-model validation, +// prefer ValidateModel(). +func ValidateFieldLength(modelName, fieldName, value string) error { + maxLen, ok := getMaxLength(modelName, fieldName) + if !ok { + return util.NewInternalServerError( + fmt.Errorf("length spec missing for %s.%s", modelName, fieldName), + "Length spec missing for %s.%s", modelName, fieldName, + ) + } + + if len(value) > maxLen { + return util.NewInvalidInputError("%s.%s length cannot exceed %d", modelName, fieldName, maxLen) + } + return nil +} + +// ValidateModel dynamically validates all fields of the given model that are defined in LengthSpecs. +// It uses reflection to extract field values and compare against the max length. +// Returns the first encountered InvalidInputError or InternalServerError. +func ValidateModel(model interface{}) error { + val := reflect.ValueOf(model) + if val.Kind() != reflect.Ptr || val.IsNil() { + return util.NewInternalServerError( + fmt.Errorf("model must be a non-nil pointer"), + "Model passed to ValidateModel must be a non-nil pointer", + ) + } + + elem := val.Elem() + typ := elem.Type() + modelName := typ.Name() + + for i := 0; i < typ.NumField(); i++ { + field := typ.Field(i) + valueField := elem.Field(i) + + // Only validate string fields + if field.Type.Kind() != reflect.String { + continue + } + + maxLen, ok := getMaxLength(modelName, field.Name) + if !ok { + continue + } + + strVal := valueField.String() + if len(strVal) > maxLen { + return util.NewInvalidInputError("%s.%s length cannot exceed %d", modelName, field.Name, maxLen) + } + } + + return nil +} diff --git a/backend/src/apiserver/validation/length_test.go b/backend/src/apiserver/validation/length_test.go new file mode 100644 index 00000000000..babe3dc1f1a --- /dev/null +++ b/backend/src/apiserver/validation/length_test.go @@ -0,0 +1,43 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package validation + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestValidateFieldLength_Valid(t *testing.T) { + // Exact max length should pass + value := strings.Repeat("a", 191) + err := ValidateFieldLength("Task", "RunId", value) + assert.Nil(t, err) +} + +func TestValidateFieldLength_TooLong(t *testing.T) { + // Length exceeding max should return error + value := strings.Repeat("a", 192) + err := ValidateFieldLength("Task", "RunId", value) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Task.RunId length cannot exceed 191") +} + +func TestValidateFieldLength_NoSpec(t *testing.T) { + err := ValidateFieldLength("NonExistentModel", "NonExistentField", "value") + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Length spec missing for NonExistentModel.NonExistentField") +} diff --git a/backend/src/apiserver/visualization/exporter.py b/backend/src/apiserver/visualization/exporter.py index c115c2f1124..e4e08bd1f01 100644 --- a/backend/src/apiserver/visualization/exporter.py +++ b/backend/src/apiserver/visualization/exporter.py @@ -20,6 +20,7 @@ from enum import Enum from pathlib import Path from typing import Text +import warnings from jupyter_client import KernelManager from nbconvert import HTMLExporter from nbconvert.preprocessors import ExecutePreprocessor @@ -132,6 +133,19 @@ def __init__( allow_errors=True ) + def __del__(self): + # Best-effort cleanup to avoid ResourceWarning about unclosed ZMQ sockets + if getattr(self, "km", None): + try: + self.km.shutdown_kernel(now=True) + except Exception: + pass + + try: + self.km.cleanup_resources() + except Exception: + pass + def generate_html_from_notebook(self, nb: NotebookNode) -> Text: """Converts a provided NotebookNode to HTML. diff --git a/backend/src/apiserver/visualization/requirements.in b/backend/src/apiserver/visualization/requirements.in index e24773cf61d..2a29ea9e427 100644 --- a/backend/src/apiserver/visualization/requirements.in +++ b/backend/src/apiserver/visualization/requirements.in @@ -2,17 +2,18 @@ bokeh==1.2.0 gcsfs==0.2.3 google-api-python-client==1.7.* itables==0.1.0 -ipykernel==5.1.1 -ipython==7.12.0 +ipykernel==6.29.5 +ipython==7.34.0 Jinja2>=3.1.2,<4 jupyter-client==8.6.3 -markupsafe==2.0.1 +markupsafe>=2.1.1 nbconvert==7.16.5 nbformat==5.10.4 -scikit-learn==0.24.2 -tensorflow==2.10.1 -tensorflow-metadata==1.9.* -tensorflow-model-analysis==0.40.* -tensorflow-data-validation==1.9.* -tensorflow-serving-api==2.10.1 +pyarrow==14.0.1 +scikit-learn==1.5.0 +tensorflow==2.15.1 +tensorflow-metadata==1.16.0 +tensorflow-serving-api==2.15.1 tornado==6.* +protobuf>=4.21.6,<5 +werkzeug==3.1.3 diff --git a/backend/src/apiserver/visualization/requirements.txt b/backend/src/apiserver/visualization/requirements.txt index fa15911aca8..30211a05fa7 100644 --- a/backend/src/apiserver/visualization/requirements.txt +++ b/backend/src/apiserver/visualization/requirements.txt @@ -1,696 +1,323 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.11 # by the following command: # # pip-compile --output-file=- --resolver=backtracking - # -absl-py==1.4.0 +absl-py==2.3.1 # via # tensorboard # tensorflow - # tensorflow-data-validation # tensorflow-metadata - # tensorflow-model-analysis - # tfx-bsl -anyio==4.5.2 - # via - # httpx - # jupyter-server -apache-beam[gcp]==2.46.0 - # via - # apache-beam - # tensorflow-data-validation - # tensorflow-model-analysis - # tfx-bsl -argon2-cffi==23.1.0 - # via jupyter-server -argon2-cffi-bindings==21.2.0 - # via argon2-cffi -arrow==1.3.0 - # via isoduration astunparse==1.6.3 # via tensorflow -async-lru==2.0.4 - # via jupyterlab -attrs==24.3.0 +attrs==25.3.0 # via # jsonschema # referencing -babel==2.16.0 - # via jupyterlab-server backcall==0.2.0 # via ipython -beautifulsoup4==4.12.3 +beautifulsoup4==4.13.5 # via nbconvert -bleach[css]==6.1.0 +bleach[css]==6.2.0 # via # bleach # nbconvert bokeh==1.2.0 # via -r - -cachetools==4.2.4 - # via - # apache-beam - # google-auth -certifi==2024.12.14 - # via - # httpcore - # httpx - # requests -cffi==1.17.1 - # via argon2-cffi-bindings -charset-normalizer==3.4.1 +cachetools==5.5.2 + # via google-auth +certifi==2025.8.3 # via requests -cloudpickle==2.2.1 - # via apache-beam -comm==0.2.2 - # via ipywidgets -crcmod==1.7 - # via apache-beam -decorator==5.1.1 +charset-normalizer==3.4.3 + # via requests +comm==0.2.3 + # via ipykernel +debugpy==1.8.16 + # via ipykernel +decorator==5.2.1 # via # gcsfs # ipython defusedxml==0.7.1 # via nbconvert -dill==0.3.1.1 - # via apache-beam -docopt==0.6.2 - # via hdfs -exceptiongroup==1.2.2 - # via anyio -fastavro==1.9.7 - # via apache-beam -fasteners==0.19 - # via - # apache-beam - # google-apitools -fastjsonschema==2.21.1 +fastjsonschema==2.21.2 # via nbformat -flatbuffers==24.12.23 +flatbuffers==25.2.10 # via tensorflow -fqdn==1.5.1 - # via jsonschema -gast==0.4.0 +gast==0.6.0 # via tensorflow gcsfs==0.2.3 # via -r - -google-api-core[grpc]==2.24.0 - # via - # google-api-core - # google-cloud-bigquery - # google-cloud-bigquery-storage - # google-cloud-bigtable - # google-cloud-core - # google-cloud-datastore - # google-cloud-dlp - # google-cloud-language - # google-cloud-pubsub - # google-cloud-pubsublite - # google-cloud-recommendations-ai - # google-cloud-spanner - # google-cloud-videointelligence - # google-cloud-vision google-api-python-client==1.7.12 + # via -r - +google-auth==2.40.3 # via - # -r - - # tfx-bsl -google-apitools==0.5.31 - # via apache-beam -google-auth==2.37.0 - # via - # apache-beam # gcsfs - # google-api-core # google-api-python-client # google-auth-httplib2 # google-auth-oauthlib - # google-cloud-bigquery - # google-cloud-core - # google-cloud-dlp - # google-cloud-pubsub - # google-cloud-vision # tensorboard -google-auth-httplib2==0.1.1 - # via - # apache-beam - # google-api-python-client -google-auth-oauthlib==0.4.6 +google-auth-httplib2==0.2.0 + # via google-api-python-client +google-auth-oauthlib==1.2.2 # via # gcsfs # tensorboard -google-cloud-bigquery==3.27.0 - # via apache-beam -google-cloud-bigquery-storage==2.16.2 - # via apache-beam -google-cloud-bigtable==1.7.3 - # via apache-beam -google-cloud-core==2.4.1 - # via - # apache-beam - # google-cloud-bigquery - # google-cloud-bigtable - # google-cloud-datastore - # google-cloud-spanner -google-cloud-datastore==1.15.5 - # via apache-beam -google-cloud-dlp==3.18.0 - # via apache-beam -google-cloud-language==1.3.2 - # via apache-beam -google-cloud-pubsub==2.21.4 - # via - # apache-beam - # google-cloud-pubsublite -google-cloud-pubsublite==1.11.1 - # via apache-beam -google-cloud-recommendations-ai==0.7.1 - # via apache-beam -google-cloud-spanner==3.46.0 - # via apache-beam -google-cloud-videointelligence==1.16.3 - # via apache-beam -google-cloud-vision==3.7.2 - # via apache-beam -google-crc32c==1.5.0 - # via google-resumable-media google-pasta==0.2.0 # via tensorflow -google-resumable-media==2.7.2 - # via google-cloud-bigquery -googleapis-common-protos[grpc]==1.63.1 +googleapis-common-protos==1.70.0 + # via tensorflow-metadata +grpcio==1.74.0 # via - # google-api-core - # grpc-google-iam-v1 - # grpcio-status - # tensorflow-metadata -grpc-google-iam-v1==0.12.7 - # via - # google-cloud-bigtable - # google-cloud-pubsub - # google-cloud-spanner -grpc-interceptor==0.15.4 - # via google-cloud-spanner -grpcio==1.69.0 - # via - # apache-beam - # google-api-core - # google-cloud-pubsub - # google-cloud-pubsublite - # googleapis-common-protos - # grpc-google-iam-v1 - # grpc-interceptor - # grpcio-status # tensorboard # tensorflow # tensorflow-serving-api -grpcio-status==1.48.2 - # via - # google-api-core - # google-cloud-pubsub - # google-cloud-pubsublite -h11==0.14.0 - # via httpcore -h5py==3.11.0 +h5py==3.14.0 # via tensorflow -hdfs==2.7.3 - # via apache-beam -httpcore==1.0.7 - # via httpx -httplib2==0.21.0 +httplib2==0.30.2 # via - # apache-beam # google-api-python-client - # google-apitools # google-auth-httplib2 - # oauth2client -httpx==0.28.1 - # via jupyterlab idna==3.10 - # via - # anyio - # httpx - # jsonschema - # requests -importlib-metadata==8.5.0 - # via - # jupyter-client - # jupyter-lsp - # jupyterlab - # jupyterlab-server - # markdown - # nbconvert -importlib-resources==6.4.5 - # via - # jsonschema - # jsonschema-specifications - # jupyterlab -ipykernel==5.1.1 - # via - # -r - - # jupyterlab -ipython==7.12.0 + # via requests +ipykernel==6.29.5 + # via -r - +ipython==7.34.0 # via # -r - # ipykernel - # ipywidgets - # tensorflow-model-analysis -ipython-genutils==0.2.0 - # via ipywidgets -ipywidgets==7.8.5 - # via tensorflow-model-analysis -isoduration==20.11.0 - # via jsonschema itables==0.1.0 # via -r - jedi==0.19.2 # via ipython -jinja2==3.1.5 +jinja2==3.1.6 # via # -r - # bokeh - # jupyter-server - # jupyterlab - # jupyterlab-server # nbconvert -joblib==0.14.1 - # via - # scikit-learn - # tensorflow-data-validation -json5==0.10.0 - # via jupyterlab-server -jsonpointer==3.0.0 - # via jsonschema -jsonschema[format-nongpl]==4.23.0 - # via - # jupyter-events - # jupyterlab-server - # nbformat -jsonschema-specifications==2023.12.1 +joblib==1.5.2 + # via scikit-learn +jsonschema==4.25.1 + # via nbformat +jsonschema-specifications==2025.9.1 # via jsonschema jupyter-client==8.6.3 # via # -r - # ipykernel - # jupyter-server # nbclient -jupyter-core==5.7.2 +jupyter-core==5.8.1 # via + # ipykernel # jupyter-client - # jupyter-server - # jupyterlab # nbclient # nbconvert # nbformat -jupyter-events==0.10.0 - # via jupyter-server -jupyter-lsp==2.2.5 - # via jupyterlab -jupyter-server==2.14.2 - # via - # jupyter-lsp - # jupyterlab - # jupyterlab-server - # notebook - # notebook-shim -jupyter-server-terminals==0.5.3 - # via jupyter-server -jupyterlab==4.1.5 - # via notebook jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.27.3 - # via - # jupyterlab - # notebook -jupyterlab-widgets==1.1.11 - # via ipywidgets -keras==2.10.0 - # via tensorflow -keras-preprocessing==1.1.2 +keras==2.15.0 # via tensorflow libclang==18.1.1 # via tensorflow -markdown==3.7 +markdown==3.9 # via tensorboard -markupsafe==2.0.1 +markupsafe==3.0.2 # via # -r - # jinja2 # nbconvert -mistune==3.1.0 + # werkzeug +matplotlib-inline==0.1.7 + # via + # ipykernel + # ipython +mistune==3.1.4 # via nbconvert -nbclient==0.10.1 +ml-dtypes==0.3.2 + # via tensorflow +nbclient==0.10.2 # via nbconvert nbconvert==7.16.5 - # via - # -r - - # jupyter-server + # via -r - nbformat==5.10.4 # via # -r - - # jupyter-server # nbclient # nbconvert -notebook==7.1.3 - # via widgetsnbextension -notebook-shim==0.2.4 - # via - # jupyterlab - # notebook -numpy==1.24.4 +nest-asyncio==1.6.0 + # via ipykernel +numpy==1.26.4 # via - # apache-beam # bokeh # h5py - # keras-preprocessing + # ml-dtypes # pandas # pyarrow # scikit-learn # scipy # tensorboard # tensorflow - # tensorflow-data-validation - # tensorflow-model-analysis - # tfx-bsl -oauth2client==4.1.3 - # via google-apitools -oauthlib==3.2.2 +oauthlib==3.3.1 # via requests-oauthlib -objsize==0.6.1 - # via apache-beam opt-einsum==3.4.0 # via tensorflow -orjson==3.10.14 - # via apache-beam -overrides==7.7.0 - # via - # google-cloud-pubsublite - # jupyter-server -packaging==24.2 +packaging==25.0 # via # bokeh - # google-cloud-bigquery - # jupyter-server - # jupyterlab - # jupyterlab-server + # ipykernel # nbconvert # tensorflow -pandas==1.5.3 - # via - # itables - # tensorflow-data-validation - # tensorflow-model-analysis - # tfx-bsl +pandas==2.3.2 + # via itables pandocfilters==1.5.1 # via nbconvert -parso==0.8.4 +parso==0.8.5 # via jedi pexpect==4.9.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.4.0 +pillow==11.3.0 # via bokeh -pkgutil-resolve-name==1.3.10 - # via jsonschema -platformdirs==4.3.6 +platformdirs==4.4.0 # via jupyter-core -prometheus-client==0.21.1 - # via jupyter-server -prompt-toolkit==3.0.48 +prompt-toolkit==3.0.52 # via ipython -proto-plus==1.25.0 - # via - # apache-beam - # google-api-core - # google-cloud-bigquery-storage - # google-cloud-dlp - # google-cloud-pubsub - # google-cloud-recommendations-ai - # google-cloud-spanner - # google-cloud-vision -protobuf==3.19.6 - # via - # apache-beam - # google-api-core - # google-cloud-bigquery-storage - # google-cloud-bigtable - # google-cloud-datastore - # google-cloud-dlp - # google-cloud-language - # google-cloud-pubsub - # google-cloud-recommendations-ai - # google-cloud-spanner - # google-cloud-videointelligence - # google-cloud-vision +protobuf==4.25.8 + # via + # -r - # googleapis-common-protos - # grpc-google-iam-v1 - # grpcio-status - # proto-plus # tensorboard # tensorflow - # tensorflow-data-validation # tensorflow-metadata - # tensorflow-model-analysis # tensorflow-serving-api - # tfx-bsl +psutil==7.0.0 + # via ipykernel ptyprocess==0.7.0 - # via - # pexpect - # terminado -pyarrow==5.0.0 - # via - # apache-beam - # tensorflow-data-validation - # tensorflow-model-analysis - # tfx-bsl + # via pexpect +pyarrow==14.0.1 + # via -r - pyasn1==0.6.1 # via - # oauth2client # pyasn1-modules # rsa -pyasn1-modules==0.4.1 - # via - # google-auth - # oauth2client -pycparser==2.22 - # via cffi -pydot==1.4.2 - # via apache-beam -pyfarmhash==0.3.2 - # via tensorflow-data-validation -pygments==2.19.1 +pyasn1-modules==0.4.2 + # via google-auth +pygments==2.19.2 # via # ipython # nbconvert -pymongo==3.13.0 - # via apache-beam -pyparsing==3.1.4 - # via - # httplib2 - # pydot +pyparsing==3.2.3 + # via httplib2 python-dateutil==2.9.0.post0 # via - # apache-beam - # arrow # bokeh - # google-cloud-bigquery # jupyter-client # pandas -python-json-logger==3.2.1 - # via jupyter-events -pytz==2024.2 - # via - # apache-beam - # babel - # pandas +pytz==2025.2 + # via pandas pyyaml==6.0.2 + # via bokeh +pyzmq==27.1.0 # via - # bokeh - # jupyter-events -pyzmq==26.2.0 - # via + # ipykernel # jupyter-client - # jupyter-server -referencing==0.35.1 +referencing==0.36.2 # via # jsonschema # jsonschema-specifications - # jupyter-events -regex==2024.11.6 - # via apache-beam -requests==2.32.3 +requests==2.32.5 # via - # apache-beam # gcsfs - # google-api-core - # google-cloud-bigquery - # hdfs - # jupyterlab-server # requests-oauthlib # tensorboard requests-oauthlib==2.0.0 # via google-auth-oauthlib -rfc3339-validator==0.1.4 - # via - # jsonschema - # jupyter-events -rfc3986-validator==0.1.1 - # via - # jsonschema - # jupyter-events -rpds-py==0.20.1 +rpds-py==0.27.1 # via # jsonschema # referencing -rsa==4.9 - # via - # google-auth - # oauth2client -scikit-learn==0.24.2 +rsa==4.9.1 + # via google-auth +scikit-learn==1.5.0 # via -r - -scipy==1.10.1 - # via - # scikit-learn - # tensorflow-model-analysis -send2trash==1.8.3 - # via jupyter-server +scipy==1.16.1 + # via scikit-learn six==1.17.0 # via # astunparse - # bleach # bokeh # google-api-python-client - # google-apitools # google-pasta - # hdfs - # keras-preprocessing - # oauth2client # python-dateutil - # rfc3339-validator + # tensorboard # tensorflow - # tensorflow-data-validation - # tensorflow-model-analysis -sniffio==1.3.1 - # via anyio -soupsieve==2.6 +soupsieve==2.8 # via beautifulsoup4 -sqlparse==0.5.3 - # via google-cloud-spanner -tensorboard==2.10.1 +tensorboard==2.15.2 # via tensorflow -tensorboard-data-server==0.6.1 - # via tensorboard -tensorboard-plugin-wit==1.8.1 +tensorboard-data-server==0.7.2 # via tensorboard -tensorflow==2.10.1 +tensorflow==2.15.1 # via # -r - - # tensorflow-data-validation - # tensorflow-model-analysis # tensorflow-serving-api - # tfx-bsl -tensorflow-data-validation==1.9.0 - # via -r - -tensorflow-estimator==2.10.0 +tensorflow-estimator==2.15.0 # via tensorflow -tensorflow-io-gcs-filesystem==0.34.0 +tensorflow-io-gcs-filesystem==0.37.1 # via tensorflow -tensorflow-metadata==1.9.0 - # via - # -r - - # tensorflow-data-validation - # tensorflow-model-analysis - # tfx-bsl -tensorflow-model-analysis==0.40.0 +tensorflow-metadata==1.16.0 # via -r - -tensorflow-serving-api==2.10.1 - # via - # -r - - # tfx-bsl -termcolor==2.4.0 +tensorflow-serving-api==2.15.1 + # via -r - +termcolor==3.1.0 # via tensorflow -terminado==0.18.1 - # via - # jupyter-server - # jupyter-server-terminals -tfx-bsl==1.9.0 - # via - # tensorflow-data-validation - # tensorflow-model-analysis -threadpoolctl==3.5.0 +threadpoolctl==3.6.0 # via scikit-learn -tinycss2==1.2.1 +tinycss2==1.4.0 # via bleach -tomli==2.2.1 - # via jupyterlab -tornado==6.4.2 +tornado==6.5.2 # via # -r - # bokeh # ipykernel # jupyter-client - # jupyter-server - # jupyterlab - # notebook - # terminado traitlets==5.14.3 # via - # comm # ipykernel # ipython - # ipywidgets # jupyter-client # jupyter-core - # jupyter-events - # jupyter-server - # jupyterlab + # matplotlib-inline # nbclient # nbconvert # nbformat -types-python-dateutil==2.9.0.20241206 - # via arrow -typing-extensions==4.12.2 - # via - # anyio - # apache-beam - # async-lru - # mistune - # python-json-logger +typing-extensions==4.15.0 + # via + # beautifulsoup4 + # referencing # tensorflow -uri-template==1.3.0 - # via jsonschema +tzdata==2025.2 + # via pandas uritemplate==3.0.1 # via google-api-python-client -urllib3==2.2.3 +urllib3==2.5.0 # via requests wcwidth==0.2.13 # via prompt-toolkit -webcolors==24.8.0 - # via jsonschema webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.8.0 - # via jupyter-server -werkzeug==2.1.2 - # via tensorboard -wheel==0.45.1 +werkzeug==3.1.3 # via - # astunparse + # -r - # tensorboard -widgetsnbextension==3.6.10 - # via ipywidgets -wrapt==1.17.2 +wheel==0.45.1 + # via astunparse +wrapt==1.14.2 # via tensorflow -zipp==3.20.2 - # via - # importlib-metadata - # importlib-resources -zstandard==0.23.0 - # via apache-beam # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/backend/src/apiserver/visualization/server.py b/backend/src/apiserver/visualization/server.py index 30d1ad1e43b..07c982b7e10 100644 --- a/backend/src/apiserver/visualization/server.py +++ b/backend/src/apiserver/visualization/server.py @@ -35,7 +35,7 @@ "being stopped." ) -args = parser.parse_args() +args, _unknown = parser.parse_known_args() _exporter = exporter.Exporter(args.timeout) diff --git a/backend/src/apiserver/visualization/update_requirements.sh b/backend/src/apiserver/visualization/update_requirements.sh index a1d22db67a6..45d942cf51f 100755 --- a/backend/src/apiserver/visualization/update_requirements.sh +++ b/backend/src/apiserver/visualization/update_requirements.sh @@ -1,7 +1,9 @@ #!/bin/bash # This image should be in sync with Dockerfile.visualization. -IMAGE="tensorflow/tensorflow:2.10.1" +# Use a TF 2.12-compatible image tag available on Docker Hub +# Use Python 3.11 base for dependency resolution +IMAGE="python:3.11" # tensorflow/tfx default entrypoint is Apache BEAM, because Apache BEAM doesn't # support custom entrypoint for now. We need to override with --entrypoint "" # for other `docker run` usecase. diff --git a/backend/src/apiserver/webhook/pipelineversion_webhook.go b/backend/src/apiserver/webhook/pipelineversion_webhook.go index 12ccefc3c43..ccca48cd190 100644 --- a/backend/src/apiserver/webhook/pipelineversion_webhook.go +++ b/backend/src/apiserver/webhook/pipelineversion_webhook.go @@ -96,7 +96,7 @@ func (p *PipelineVersionsWebhook) ValidateCreate( } // cache enabled or not doesn't matter in this context - tmpl, err := template.NewV2SpecTemplate([]byte(modelPipelineVersion.PipelineSpec), false) + tmpl, err := template.NewV2SpecTemplate([]byte(modelPipelineVersion.PipelineSpec), false, nil) if err != nil { return nil, newBadRequestError(fmt.Sprintf("The pipeline spec is invalid: %v", err)) } diff --git a/backend/src/cache/client/pod_fake.go b/backend/src/cache/client/pod_fake.go index b939583cb0f..5f66cb6ab4d 100644 --- a/backend/src/cache/client/pod_fake.go +++ b/backend/src/cache/client/pod_fake.go @@ -118,6 +118,11 @@ func (FakePodClient) ProxyGet(scheme string, name string, port string, path stri return nil } +func (FakePodClient) UpdateResize(ctx context.Context, podName string, pod *corev1.Pod, opts v1.UpdateOptions) (result *corev1.Pod, err error) { + glog.Error("This fake method is not yet implemented") + return nil, nil +} + type FakeBadPodClient struct { FakePodClient } diff --git a/backend/src/cache/client_manager.go b/backend/src/cache/client_manager.go index d7384fce953..8a13e300ac8 100644 --- a/backend/src/cache/client_manager.go +++ b/backend/src/cache/client_manager.go @@ -24,11 +24,12 @@ import ( "github.com/cenkalti/backoff" "github.com/golang/glog" - "github.com/jinzhu/gorm" "github.com/kubeflow/pipelines/backend/src/cache/client" "github.com/kubeflow/pipelines/backend/src/cache/model" "github.com/kubeflow/pipelines/backend/src/cache/storage" "github.com/kubeflow/pipelines/backend/src/common/util" + "gorm.io/driver/mysql" + "gorm.io/gorm" ) const ( @@ -51,7 +52,12 @@ func (c *ClientManager) KubernetesCoreClient() client.KubernetesCoreInterface { } func (c *ClientManager) Close() { - c.db.Close() + sqlDB, err := c.db.DB.DB() + if err != nil { + log.Printf("Failed to retrieve underlying sql.DB: %v", err) + return + } + sqlDB.Close() } func (c *ClientManager) init(params WhSvrDBParameters, clientParams util.ClientParameters) { @@ -77,28 +83,28 @@ func initDBClient(params WhSvrDBParameters, initConnectionTimeout time.Duration) // db is safe for concurrent use by multiple goroutines // and maintains its own pool of idle connections. - db, err := gorm.Open(driverName, arg) + db, err := gorm.Open(mysql.Open(arg), &gorm.Config{}) util.TerminateIfError(err) // Create table - response := db.AutoMigrate(&model.ExecutionCache{}) - if response.Error != nil { + err = db.AutoMigrate(&model.ExecutionCache{}) + if err != nil { glog.Fatalf("Failed to initialize the databases.") } - response = db.Model(&model.ExecutionCache{}).ModifyColumn("ExecutionOutput", "longtext") - if response.Error != nil { - glog.Fatalf("Failed to update the execution output type. Error: %s", response.Error) + err = db.Migrator().AlterColumn(&model.ExecutionCache{}, "ExecutionOutput") + if err != nil { + glog.Fatalf("Failed to update the execution output type. Error: %s", err) } - response = db.Model(&model.ExecutionCache{}).ModifyColumn("ExecutionTemplate", "longtext not null") - if response.Error != nil { - glog.Fatalf("Failed to update the execution template type. Error: %s", response.Error) + err = db.Migrator().AlterColumn(&model.ExecutionCache{}, "ExecutionTemplate") + if err != nil { + glog.Fatalf("Failed to update the execution template type. Error: %s", err) } var tableNames []string db.Raw(`show tables`).Pluck("Tables_in_caches", &tableNames) for _, tableName := range tableNames { - log.Printf(tableName) + log.Printf("%s", tableName) } return storage.NewDB(db) diff --git a/backend/src/cache/model/execution_cache.go b/backend/src/cache/model/execution_cache.go index f3cb8bdcbe9..66fec0c91d2 100644 --- a/backend/src/cache/model/execution_cache.go +++ b/backend/src/cache/model/execution_cache.go @@ -15,12 +15,12 @@ package model type ExecutionCache struct { - ID int64 `gorm:"column:ID; not null; primary_key; AUTO_INCREMENT;"` + ID int64 `gorm:"column:ID; not null; primaryKey; AUTO_INCREMENT; index:composite_id_idx"` ExecutionCacheKey string `gorm:"column:ExecutionCacheKey; not null; index:idx_cache_key;"` ExecutionTemplate string `gorm:"column:ExecutionTemplate; not null;"` ExecutionOutput string `gorm:"column:ExecutionOutput; not null;"` MaxCacheStaleness int64 `gorm:"column:MaxCacheStaleness; not null;"` - StartedAtInSec int64 `gorm:"column:StartedAtInSec; not null;"` + StartedAtInSec int64 `gorm:"column:StartedAtInSec; not null; index:composite_id_idx;"` EndedAtInSec int64 `gorm:"column:EndedAtInSec; not null;"` } diff --git a/backend/src/cache/server/client_manager_fake.go b/backend/src/cache/server/client_manager_fake.go index 7779bcfed6b..b3af65aecf3 100644 --- a/backend/src/cache/server/client_manager_fake.go +++ b/backend/src/cache/server/client_manager_fake.go @@ -67,7 +67,11 @@ func (f *FakeClientManager) DB() *storage.DB { } func (f *FakeClientManager) Close() error { - return f.db.Close() + sqlDB, err := f.db.DB.DB() + if err != nil { + return err + } + return sqlDB.Close() } func (f *FakeClientManager) KubernetesCoreClient() client.KubernetesCoreInterface { diff --git a/backend/src/cache/server/mutation.go b/backend/src/cache/server/mutation.go index 3f76d129ad0..407679fe4f0 100644 --- a/backend/src/cache/server/mutation.go +++ b/backend/src/cache/server/mutation.go @@ -167,7 +167,7 @@ func MutatePodIfCached(req *v1beta1.AdmissionRequest, clientMgr ClientManagerInt // Image selected from Google Container Register(gcr) for it small size, gcr since there // is not image pull rate limit. For more info see issue: https://github.com/kubeflow/pipelines/issues/4099 - image := "registry.k8s.io/busybox" + image := "ghcr.io/containerd/busybox" if v, ok := os.LookupEnv("CACHE_IMAGE"); ok { image = v } diff --git a/backend/src/cache/server/mutation_test.go b/backend/src/cache/server/mutation_test.go index 52d92ee27c9..b0b082f3580 100644 --- a/backend/src/cache/server/mutation_test.go +++ b/backend/src/cache/server/mutation_test.go @@ -198,7 +198,7 @@ func TestDefaultImage(t *testing.T) { patchOperation, err := MutatePodIfCached(&fakeAdmissionRequest, fakeClientManager) assert.Nil(t, err) container := patchOperation[0].Value.([]corev1.Container)[0] - require.Equal(t, "registry.k8s.io/busybox", container.Image) + require.Equal(t, "ghcr.io/containerd/busybox", container.Image) } func TestSetImage(t *testing.T) { diff --git a/backend/src/cache/server/watcher.go b/backend/src/cache/server/watcher.go index 2e9c9c2ce2e..85a729ef07f 100644 --- a/backend/src/cache/server/watcher.go +++ b/backend/src/cache/server/watcher.go @@ -37,7 +37,7 @@ func WatchPods(ctx context.Context, namespaceToWatch string, clientManager Clien watcher, err := k8sCore.PodClient(namespaceToWatch).Watch(ctx, listOptions) if err != nil { - log.Printf("Watcher error:" + err.Error()) + log.Printf("%s", "Watcher error:"+err.Error()) } for event := range watcher.ResultChan() { @@ -45,7 +45,7 @@ func WatchPods(ctx context.Context, namespaceToWatch string, clientManager Clien if event.Type == watch.Error { continue } - log.Printf((*pod).GetName()) + log.Printf("%s", (*pod).GetName()) if !isPodCompletedAndSucceeded(pod) { log.Printf("Pod %s is not completed or not in successful status.", pod.ObjectMeta.Name) @@ -102,7 +102,7 @@ func WatchPods(ctx context.Context, namespaceToWatch string, clientManager Clien } err = patchCacheID(ctx, k8sCore, pod, namespaceToWatch, cacheEntryCreated.ID) if err != nil { - log.Printf(err.Error()) + log.Printf("%s", err.Error()) } } } diff --git a/backend/src/cache/storage/db.go b/backend/src/cache/storage/db.go index 7b9154477e1..6a1e32af1ce 100644 --- a/backend/src/cache/storage/db.go +++ b/backend/src/cache/storage/db.go @@ -15,7 +15,7 @@ package storage import ( - "github.com/jinzhu/gorm" + "gorm.io/gorm" ) // DB a struct wrapping plain sql library with SQL dialect, to solve any feature diff --git a/backend/src/cache/storage/db_fake.go b/backend/src/cache/storage/db_fake.go index 58cec438fc3..3df72f89f95 100644 --- a/backend/src/cache/storage/db_fake.go +++ b/backend/src/cache/storage/db_fake.go @@ -18,19 +18,22 @@ import ( "fmt" "github.com/golang/glog" - "github.com/jinzhu/gorm" "github.com/kubeflow/pipelines/backend/src/cache/model" - _ "github.com/mattn/go-sqlite3" + "gorm.io/driver/sqlite" + "gorm.io/gorm" ) func NewFakeDB() (*DB, error) { // Initialize GORM - db, err := gorm.Open("sqlite3", ":memory:") + db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) if err != nil { return nil, fmt.Errorf("Could not create the GORM database: %v", err) } // Create tables - db.AutoMigrate(&model.ExecutionCache{}) + err = db.AutoMigrate(&model.ExecutionCache{}) + if err != nil { + return nil, fmt.Errorf("AutoMigrate failed: %v", err) + } return NewDB(db), nil } diff --git a/backend/src/cache/storage/execution_cache_store.go b/backend/src/cache/storage/execution_cache_store.go index fdab31b1a6f..c2150a5fab6 100644 --- a/backend/src/cache/storage/execution_cache_store.go +++ b/backend/src/cache/storage/execution_cache_store.go @@ -20,14 +20,13 @@ import ( "log" "strconv" - model "github.com/kubeflow/pipelines/backend/src/cache/model" + "github.com/kubeflow/pipelines/backend/src/cache/model" "github.com/kubeflow/pipelines/backend/src/common/util" ) type ExecutionCacheStoreInterface interface { GetExecutionCache(executionCacheKey string, cacheStaleness int64, maximumCacheStaleness int64) (*model.ExecutionCache, error) CreateExecutionCache(*model.ExecutionCache) (*model.ExecutionCache, error) - DeleteExecutionCache(executionCacheKey string) error } type ExecutionCacheStore struct { @@ -46,12 +45,12 @@ func (s *ExecutionCacheStore) GetExecutionCache(executionCacheKey string, cacheS } r, err := s.db.Table("execution_caches").Where("ExecutionCacheKey = ?", executionCacheKey).Rows() if err != nil { - return nil, fmt.Errorf("Failed to get execution cache: %q", executionCacheKey) + return nil, fmt.Errorf("Failed to get execution cache: %q, err: %v", executionCacheKey, err) } defer r.Close() executionCaches, err := s.scanRows(r, cacheStaleness) if err != nil { - return nil, fmt.Errorf("Failed to get execution cache: %q", executionCacheKey) + return nil, fmt.Errorf("Failed to scan rows on execution cache: %q, err: %v", executionCacheKey, err) } if len(executionCaches) == 0 { return nil, fmt.Errorf("Execution cache not found with cache key: %q", executionCacheKey) @@ -69,14 +68,20 @@ func (s *ExecutionCacheStore) cleanDatabase(maximumCacheStaleness int64) (int64, if maximumCacheStaleness < 0 { return 0, nil } + log.Printf("Cleaning cache entries older than maximumCacheStaleness=%d", maximumCacheStaleness) - db := s.db.Exec( - "DELETE FROM execution_caches WHERE " + - strconv.FormatInt(int64(s.time.Now().UTC().Unix()), 10) + " - StartedAtInSec" + - " > " + strconv.FormatInt(int64(maximumCacheStaleness), 10) + ";") - return db.RowsAffected, db.Error -} + cutoffTime := s.time.Now().UTC().Unix() - maximumCacheStaleness + result := s.db.Exec( + "DELETE FROM execution_caches WHERE StartedAtInSec < ?", + cutoffTime) + + if result.Error != nil { + return 0, fmt.Errorf("database cache expiration cleanup failed: %v", result.Error) + } + + return result.RowsAffected, nil +} func (s *ExecutionCacheStore) scanRows(rows *sql.Rows, podCacheStaleness int64) ([]*model.ExecutionCache, error) { var executionCaches []*model.ExecutionCache for rows.Next() { @@ -131,36 +136,43 @@ func getLatestCacheEntry(executionCaches []*model.ExecutionCache) (*model.Execut } func (s *ExecutionCacheStore) CreateExecutionCache(executionCache *model.ExecutionCache) (*model.ExecutionCache, error) { - log.Println("Input cache: " + executionCache.ExecutionCacheKey) - newExecutionCache := *executionCache - log.Println("New cache key: " + newExecutionCache.ExecutionCacheKey) - now := s.time.Now().UTC().Unix() - - newExecutionCache.StartedAtInSec = now - // TODO: ended time need to be modified after demo version. - newExecutionCache.EndedAtInSec = now - - ok := s.db.NewRecord(newExecutionCache) - if !ok { - return nil, fmt.Errorf("Failed to create a new execution cache") + log.Printf("checking for existing row with cache key: %s before insertion", executionCache.ExecutionCacheKey) + + r, err := s.db.Table("execution_caches").Where("ExecutionCacheKey = ?", executionCache.ExecutionCacheKey).Rows() + if err != nil { + log.Printf("Failed to get execution cache with key: %s, err: %v", executionCache.ExecutionCacheKey, err) + return nil, err } - var rowInsert model.ExecutionCache - d := s.db.Create(&newExecutionCache).Scan(&rowInsert) - if d.Error != nil { - return nil, d.Error + + rowCount := 0 + + for r.Next() { + rowCount++ } - log.Println("Cache entry created with cache key: " + newExecutionCache.ExecutionCacheKey) - log.Println(newExecutionCache.ExecutionTemplate) - log.Println(rowInsert.ID) - return &rowInsert, nil -} + log.Printf("number of rows returned for existing rows check: %d", rowCount) + + if rowCount == 0 { + log.Printf("creating new exec cache row for key: %s", executionCache.ExecutionCacheKey) + newExecutionCache := *executionCache + now := s.time.Now().UTC().Unix() + + newExecutionCache.StartedAtInSec = now + newExecutionCache.EndedAtInSec = now + + // GORM v2 removed NewRecord(); it was unreliable as it only checked whether the primary key was 0 / "" / nil + + var rowInsert model.ExecutionCache + d := s.db.Create(&newExecutionCache).Scan(&rowInsert) + if d.Error != nil { + return nil, fmt.Errorf("failed to create new execution cache: %w", d.Error) + } -func (s *ExecutionCacheStore) DeleteExecutionCache(executionCacheID string) error { - db := s.db.Delete(&model.ExecutionCache{}, "ID = ?", executionCacheID) - if db.Error != nil { - return db.Error + log.Printf("cache entry created successfully with key: %s, template: %v, row id: %d", executionCache.ExecutionCacheKey, executionCache.ExecutionTemplate, rowInsert.ID) + return &rowInsert, nil + } else { + // return an error to prevent returning non-inserted user-provided structs and align with the unit test + return nil, fmt.Errorf("execution cache with key %s already exists, failed to create new execution cache", executionCache.ExecutionCacheKey) } - return nil } // factory function for execution cache store diff --git a/backend/src/cache/storage/execution_cache_store_test.go b/backend/src/cache/storage/execution_cache_store_test.go index fa47c539159..9fb1e9ba949 100644 --- a/backend/src/cache/storage/execution_cache_store_test.go +++ b/backend/src/cache/storage/execution_cache_store_test.go @@ -24,6 +24,12 @@ import ( "github.com/stretchr/testify/require" ) +func closeDB(t *testing.T, db *DB) { + sqlDB, err := db.DB.DB() + require.NoError(t, err) + sqlDB.Close() +} + func createExecutionCache(cacheKey string, cacheOutput string) *model.ExecutionCache { return &model.ExecutionCache{ ExecutionCacheKey: cacheKey, @@ -37,7 +43,7 @@ func createExecutionCache(cacheKey string, cacheOutput string) *model.ExecutionC func TestCreateExecutionCache(t *testing.T) { db := NewFakeDBOrFatal() - defer db.Close() + defer closeDB(t, db) executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) executionCacheExpected := model.ExecutionCache{ ID: 1, @@ -70,17 +76,17 @@ func TestCreateExecutionCacheWithDuplicateRecord(t *testing.T) { EndedAtInSec: 1, } db := NewFakeDBOrFatal() - defer db.Close() + defer closeDB(t, db) executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) executionCacheStore.CreateExecutionCache(executionCache) cache, err := executionCacheStore.CreateExecutionCache(executionCache) assert.Nil(t, cache) - assert.Contains(t, err.Error(), "Failed to create a new execution cache") + assert.Contains(t, err.Error(), "failed to create new execution cache") } func TestGetExecutionCache(t *testing.T) { db := NewFakeDBOrFatal() - defer db.Close() + defer closeDB(t, db) executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) executionCacheStore.CreateExecutionCache(createExecutionCache("testKey", "testOutput")) @@ -102,7 +108,7 @@ func TestGetExecutionCache(t *testing.T) { func TestGetExecutionCacheWithEmptyCacheEntry(t *testing.T) { db := NewFakeDBOrFatal() - defer db.Close() + defer closeDB(t, db) executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) executionCacheStore.CreateExecutionCache(createExecutionCache("testKey", "testOutput")) @@ -114,20 +120,20 @@ func TestGetExecutionCacheWithEmptyCacheEntry(t *testing.T) { func TestGetExecutionCacheWithLatestCacheEntry(t *testing.T) { db := NewFakeDBOrFatal() - defer db.Close() + defer closeDB(t, db) executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) executionCacheStore.CreateExecutionCache(createExecutionCache("testKey", "testOutput")) executionCacheStore.CreateExecutionCache(createExecutionCache("testKey", "testOutput2")) executionCacheExpected := model.ExecutionCache{ - ID: 2, + ID: 1, ExecutionCacheKey: "testKey", ExecutionTemplate: "testTemplate", - ExecutionOutput: "testOutput2", + ExecutionOutput: "testOutput", MaxCacheStaleness: -1, - StartedAtInSec: 2, - EndedAtInSec: 2, + StartedAtInSec: 1, + EndedAtInSec: 1, } var executionCache *model.ExecutionCache executionCache, err := executionCacheStore.GetExecutionCache("testKey", -1, -1) @@ -137,7 +143,7 @@ func TestGetExecutionCacheWithLatestCacheEntry(t *testing.T) { func TestGetExecutionCacheWithExpiredDatabaseCacheStaleness(t *testing.T) { db := NewFakeDBOrFatal() - defer db.Close() + defer closeDB(t, db) executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) executionCacheToPersist := &model.ExecutionCache{ ExecutionCacheKey: "testKey", @@ -155,7 +161,7 @@ func TestGetExecutionCacheWithExpiredDatabaseCacheStaleness(t *testing.T) { func TestGetExecutionCacheWithExpiredAnnotationCacheStaleness(t *testing.T) { db := NewFakeDBOrFatal() - defer db.Close() + defer closeDB(t, db) executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) executionCacheToPersist := &model.ExecutionCache{ ExecutionCacheKey: "testKey", @@ -175,7 +181,7 @@ func TestGetExecutionCacheWithExpiredAnnotationCacheStaleness(t *testing.T) { func TestGetExecutionCacheWithExpiredMaximumCacheStaleness(t *testing.T) { db := NewFakeDBOrFatal() - defer db.Close() + defer closeDB(t, db) executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) executionCacheToPersist := &model.ExecutionCache{ ExecutionCacheKey: "testKey", @@ -192,19 +198,3 @@ func TestGetExecutionCacheWithExpiredMaximumCacheStaleness(t *testing.T) { require.Contains(t, err.Error(), "Execution cache not found") require.Nil(t, executionCache) } - -func TestDeleteExecutionCache(t *testing.T) { - db := NewFakeDBOrFatal() - defer db.Close() - executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) - executionCacheStore.CreateExecutionCache(createExecutionCache("testKey", "testOutput")) - executionCache, err := executionCacheStore.GetExecutionCache("testKey", -1, -1) - assert.Nil(t, err) - assert.NotNil(t, executionCache) - - err = executionCacheStore.DeleteExecutionCache("1") - assert.Nil(t, err) - _, err = executionCacheStore.GetExecutionCache("testKey", -1, -1) - assert.NotNil(t, err) - assert.Contains(t, err.Error(), "not found") -} diff --git a/backend/src/common/client/api_server/util.go b/backend/src/common/client/api_server/util.go index 68494bb163d..638ff6172f9 100644 --- a/backend/src/common/client/api_server/util.go +++ b/backend/src/common/client/api_server/util.go @@ -1,21 +1,22 @@ package api_server import ( + "crypto/tls" "fmt" "net/http" + "net/url" "os" "time" - workflowapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/go-openapi/runtime" httptransport "github.com/go-openapi/runtime/client" "github.com/go-openapi/strfmt" "github.com/kubeflow/pipelines/backend/src/common/util" + testconfig "github.com/kubeflow/pipelines/backend/test/config" "github.com/pkg/errors" _ "k8s.io/client-go/plugin/pkg/client/auth/gcp" "k8s.io/client-go/rest" "k8s.io/client-go/tools/clientcmd" - "sigs.k8s.io/yaml" ) const ( @@ -46,33 +47,46 @@ var SATokenVolumeProjectionAuth runtime.ClientAuthInfoWriter = runtime.ClientAut return nil }) -func toDateTimeTestOnly(timeInSec int64) strfmt.DateTime { - result, err := strfmt.ParseDateTime(time.Unix(timeInSec, 0).String()) - if err != nil { - return strfmt.NewDateTime() - } - return result -} - -func toWorkflowTestOnly(workflow string) *workflowapi.Workflow { - var result workflowapi.Workflow - err := yaml.Unmarshal([]byte(workflow), &result) - if err != nil { - return nil - } - return &result +func TokenToAuthInfo(userToken string) runtime.ClientAuthInfoWriter { + return runtime.ClientAuthInfoWriterFunc( + func(r runtime.ClientRequest, _ strfmt.Registry) error { + err := r.SetHeaderParam("Authorization", "Bearer "+userToken) + if err != nil { + return err + } + return nil + }) } func NewHTTPRuntime(clientConfig clientcmd.ClientConfig, debug bool) ( *httptransport.Runtime, error, ) { - if os.Getenv("LOCAL_API_SERVER") == "true" { + if !*testconfig.InClusterRun { httpClient := http.DefaultClient - runtime := httptransport.NewWithClient("localhost:8888", "", []string{"http"}, httpClient) + var scheme []string + parsedUrl, err := url.Parse(*testconfig.APIURL) + if err != nil { + return nil, err + } + host := parsedUrl.Host + if parsedUrl.Scheme != "" { + scheme = append(scheme, parsedUrl.Scheme) + } + if testconfig.APIScheme != nil { + scheme = append(scheme, *testconfig.APIScheme) + } + if *testconfig.DisableTLSCheck { + tr := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + httpClient = &http.Client{Transport: tr} + } + + runtimeClient := httptransport.NewWithClient(host, "", scheme, httpClient) if debug { - runtime.SetDebug(true) + runtimeClient.SetDebug(true) } - return runtime, nil + return runtimeClient, nil } // Creating k8 client @@ -84,22 +98,22 @@ func NewHTTPRuntime(clientConfig clientcmd.ClientConfig, debug bool) ( // Create API client httpClient := k8Client.RESTClient().(*rest.RESTClient).Client masterIPAndPort := util.ExtractMasterIPAndPort(config) - runtime := httptransport.NewWithClient(masterIPAndPort, fmt.Sprintf(apiServerBasePath, namespace), + runtimeClient := httptransport.NewWithClient(masterIPAndPort, fmt.Sprintf(apiServerBasePath, namespace), nil, httpClient) if debug { - runtime.SetDebug(true) + runtimeClient.SetDebug(true) } - return runtime, err + return runtimeClient, err } func NewKubeflowInClusterHTTPRuntime(namespace string, debug bool) *httptransport.Runtime { schemes := []string{"http"} httpClient := http.Client{} - runtime := httptransport.NewWithClient(fmt.Sprintf(apiServerKubeflowInClusterBasePath, namespace), "/", schemes, &httpClient) - runtime.SetDebug(debug) - return runtime + runtimeClient := httptransport.NewWithClient(fmt.Sprintf(apiServerKubeflowInClusterBasePath, namespace), "/", schemes, &httpClient) + runtimeClient.SetDebug(debug) + return runtimeClient } func CreateErrorFromAPIStatus(error string, code int32) error { @@ -107,6 +121,6 @@ func CreateErrorFromAPIStatus(error string, code int32) error { } func CreateErrorCouldNotRecoverAPIStatus(err error) error { - return fmt.Errorf("Issue calling the service. Use the '--debug' flag to see the HTTP request/response. Raw error from the client: %v", + return fmt.Errorf("issue calling the service. Use the '--debug' flag to see the HTTP request/response. Raw error from the client: %v", err.Error()) } diff --git a/backend/src/common/client/api_server/v1/experiment_client.go b/backend/src/common/client/api_server/v1/experiment_client.go index c22d06c0b8f..ffb680de629 100644 --- a/backend/src/common/client/api_server/v1/experiment_client.go +++ b/backend/src/common/client/api_server/v1/experiment_client.go @@ -85,14 +85,14 @@ func (c *ExperimentClient) Create(parameters *params.ExperimentServiceCreateExpe response, err := c.apiClient.ExperimentService.ExperimentServiceCreateExperimentV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.ExperimentServiceCreateExperimentV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } return nil, util.NewUserError(err, - fmt.Sprintf("Failed to create experiment. Params: '%+v'. Body: '%+v'", parameters, parameters.Body), - fmt.Sprintf("Failed to create experiment '%v'", parameters.Body.Name)) + fmt.Sprintf("Failed to create experiment. Params: '%+v'. Body: '%+v'", parameters, parameters.Experiment), + fmt.Sprintf("Failed to create experiment '%v'", parameters.Experiment.Name)) } return response.Payload, nil @@ -109,7 +109,7 @@ func (c *ExperimentClient) Get(parameters *params.ExperimentServiceGetExperiment response, err := c.apiClient.ExperimentService.ExperimentServiceGetExperimentV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.ExperimentServiceGetExperimentV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -133,7 +133,7 @@ func (c *ExperimentClient) List(parameters *params.ExperimentServiceListExperime response, err := c.apiClient.ExperimentService.ExperimentServiceListExperimentsV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.ExperimentServiceListExperimentsV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -156,7 +156,7 @@ func (c *ExperimentClient) Delete(parameters *params.ExperimentServiceDeleteExpe _, err := c.apiClient.ExperimentService.ExperimentServiceDeleteExperimentV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.ExperimentServiceDeleteExperimentV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -210,7 +210,7 @@ func (c *ExperimentClient) Archive(parameters *params.ExperimentServiceArchiveEx if err != nil { if defaultError, ok := err.(*params.ExperimentServiceArchiveExperimentV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -234,7 +234,7 @@ func (c *ExperimentClient) Unarchive(parameters *params.ExperimentServiceUnarchi if err != nil { if defaultError, ok := err.(*params.ExperimentServiceUnarchiveExperimentV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } diff --git a/backend/src/common/client/api_server/v1/experiment_client_fake.go b/backend/src/common/client/api_server/v1/experiment_client_fake.go index c8f9301e5b5..959b7f7ea90 100644 --- a/backend/src/common/client/api_server/v1/experiment_client_fake.go +++ b/backend/src/common/client/api_server/v1/experiment_client_fake.go @@ -43,11 +43,11 @@ func NewExperimentClientFake() *ExperimentClientFake { func (c *ExperimentClientFake) Create(params *experimentparams.ExperimentServiceCreateExperimentV1Params) ( *experimentmodel.APIExperiment, error) { - switch params.Body.Name { + switch params.Experiment.Name { case ExperimentForClientErrorTest: return nil, fmt.Errorf(ClientErrorString) default: - return getDefaultExperiment("500", params.Body.Name), nil + return getDefaultExperiment("500", params.Experiment.Name), nil } } diff --git a/backend/src/common/client/api_server/v1/healthz_client.go b/backend/src/common/client/api_server/v1/healthz_client.go index a3ca7798185..6ae2bdf16c8 100644 --- a/backend/src/common/client/api_server/v1/healthz_client.go +++ b/backend/src/common/client/api_server/v1/healthz_client.go @@ -69,7 +69,7 @@ func (c *HealthzClient) GetHealthz() (*model.APIGetHealthzResponse, error) { response, err := c.apiClient.HealthzService.HealthzServiceGetHealthz(parameters, api_server.PassThroughAuth) if err != nil { if defaultError, ok := err.(*params.HealthzServiceGetHealthzDefault); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } diff --git a/backend/src/common/client/api_server/v1/job_client.go b/backend/src/common/client/api_server/v1/job_client.go index 0c21f301e14..6d00b72930a 100644 --- a/backend/src/common/client/api_server/v1/job_client.go +++ b/backend/src/common/client/api_server/v1/job_client.go @@ -85,14 +85,14 @@ func (c *JobClient) Create(parameters *params.JobServiceCreateJobParams) (*model response, err := c.apiClient.JobService.JobServiceCreateJob(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.JobServiceCreateJobDefault); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } return nil, util.NewUserError(err, - fmt.Sprintf("Failed to create job. Params: '%+v'. Body: '%+v'", parameters, parameters.Body), - fmt.Sprintf("Failed to create job '%v'", parameters.Body.Name)) + fmt.Sprintf("Failed to create job. Params: '%+v'. Body: '%+v'", parameters, parameters.Job), + fmt.Sprintf("Failed to create job '%v'", parameters.Job.Name)) } return response.Payload, nil @@ -109,7 +109,7 @@ func (c *JobClient) Get(parameters *params.JobServiceGetJobParams) (*model.APIJo response, err := c.apiClient.JobService.JobServiceGetJob(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.JobServiceGetJobDefault); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -132,7 +132,7 @@ func (c *JobClient) Delete(parameters *params.JobServiceDeleteJobParams) error { _, err := c.apiClient.JobService.JobServiceDeleteJob(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.JobServiceDeleteJobDefault); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -155,7 +155,7 @@ func (c *JobClient) Enable(parameters *params.JobServiceEnableJobParams) error { _, err := c.apiClient.JobService.JobServiceEnableJob(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.JobServiceEnableJobDefault); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -178,7 +178,7 @@ func (c *JobClient) Disable(parameters *params.JobServiceDisableJobParams) error _, err := c.apiClient.JobService.JobServiceDisableJob(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.JobServiceDisableJobDefault); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -202,7 +202,7 @@ func (c *JobClient) List(parameters *params.JobServiceListJobsParams) ( response, err := c.apiClient.JobService.JobServiceListJobs(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.JobServiceListJobsDefault); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } diff --git a/backend/src/common/client/api_server/v1/job_client_fake.go b/backend/src/common/client/api_server/v1/job_client_fake.go index 37510997a8e..724aa417173 100644 --- a/backend/src/common/client/api_server/v1/job_client_fake.go +++ b/backend/src/common/client/api_server/v1/job_client_fake.go @@ -44,11 +44,11 @@ func NewJobClientFake() *JobClientFake { func (c *JobClientFake) Create(params *jobparams.JobServiceCreateJobParams) ( *jobmodel.APIJob, error) { - switch params.Body.Name { + switch params.Job.Name { case JobForClientErrorTest: return nil, fmt.Errorf(ClientErrorString) default: - return getDefaultJob("500", params.Body.Name), nil + return getDefaultJob("500", params.Job.Name), nil } } diff --git a/backend/src/common/client/api_server/v1/pipeline_client.go b/backend/src/common/client/api_server/v1/pipeline_client.go index 6bfeecaa75f..f932418b8d6 100644 --- a/backend/src/common/client/api_server/v1/pipeline_client.go +++ b/backend/src/common/client/api_server/v1/pipeline_client.go @@ -55,7 +55,7 @@ func (c *PipelineClient) UpdateDefaultVersion(parameters *params.PipelineService _, err := c.apiClient.PipelineService.PipelineServiceUpdatePipelineDefaultVersionV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceGetPipelineV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -108,14 +108,14 @@ func (c *PipelineClient) Create(parameters *params.PipelineServiceCreatePipeline response, err := c.apiClient.PipelineService.PipelineServiceCreatePipelineV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceCreatePipelineV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } return nil, util.NewUserError(err, fmt.Sprintf("Failed to create pipeline. Params: '%v'", parameters), - fmt.Sprintf("Failed to create pipeline from URL '%v'", parameters.Body.URL.PipelineURL)) + fmt.Sprintf("Failed to create pipeline from URL '%v'", parameters.Pipeline.URL.PipelineURL)) } return response.Payload, nil @@ -132,7 +132,7 @@ func (c *PipelineClient) Get(parameters *params.PipelineServiceGetPipelineV1Para response, err := c.apiClient.PipelineService.PipelineServiceGetPipelineV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceGetPipelineV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -155,7 +155,7 @@ func (c *PipelineClient) Delete(parameters *params.PipelineServiceDeletePipeline _, err := c.apiClient.PipelineService.PipelineServiceDeletePipelineV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceDeletePipelineV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -178,7 +178,7 @@ func (c *PipelineClient) DeletePipelineVersion(parameters *params.PipelineServic _, err := c.apiClient.PipelineService.PipelineServiceDeletePipelineVersionV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceDeletePipelineVersionV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -200,7 +200,7 @@ func (c *PipelineClient) GetTemplate(parameters *params.PipelineServiceGetTempla response, err := c.apiClient.PipelineService.PipelineServiceGetTemplate(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceGetTemplateDefault); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -211,7 +211,7 @@ func (c *PipelineClient) GetTemplate(parameters *params.PipelineServiceGetTempla } // Unmarshal response - return template.New([]byte(response.Payload.Template), true) + return template.New([]byte(response.Payload.Template), true, nil) } func (c *PipelineClient) List(parameters *params.PipelineServiceListPipelinesV1Params) ( @@ -225,7 +225,7 @@ func (c *PipelineClient) List(parameters *params.PipelineServiceListPipelinesV1P response, err := c.apiClient.PipelineService.PipelineServiceListPipelinesV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceListPipelinesV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -278,14 +278,14 @@ func (c *PipelineClient) CreatePipelineVersion(parameters *params.PipelineServic response, err := c.apiClient.PipelineService.PipelineServiceCreatePipelineVersionV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceCreatePipelineVersionV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } return nil, util.NewUserError(err, fmt.Sprintf("Failed to create pipeline version. Params: '%v'", parameters), - fmt.Sprintf("Failed to create pipeline version from URL '%v'", parameters.Body.PackageURL.PipelineURL)) + fmt.Sprintf("Failed to create pipeline version from URL '%v'", parameters.Version.PackageURL.PipelineURL)) } return response.Payload, nil @@ -302,7 +302,7 @@ func (c *PipelineClient) ListPipelineVersions(parameters *params.PipelineService response, err := c.apiClient.PipelineService.PipelineServiceListPipelineVersionsV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceListPipelineVersionsV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -326,7 +326,7 @@ func (c *PipelineClient) GetPipelineVersion(parameters *params.PipelineServiceGe response, err := c.apiClient.PipelineService.PipelineServiceGetPipelineVersionV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceGetPipelineVersionV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -350,7 +350,7 @@ func (c *PipelineClient) GetPipelineVersionTemplate(parameters *params.PipelineS response, err := c.apiClient.PipelineService.PipelineServiceGetPipelineVersionTemplate(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.PipelineServiceGetPipelineVersionTemplateDefault); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -361,5 +361,5 @@ func (c *PipelineClient) GetPipelineVersionTemplate(parameters *params.PipelineS } // Unmarshal response - return template.New([]byte(response.Payload.Template), true) + return template.New([]byte(response.Payload.Template), true, nil) } diff --git a/backend/src/common/client/api_server/v1/pipeline_client_fake.go b/backend/src/common/client/api_server/v1/pipeline_client_fake.go index aefe0e8fa9f..72ab821d1eb 100644 --- a/backend/src/common/client/api_server/v1/pipeline_client_fake.go +++ b/backend/src/common/client/api_server/v1/pipeline_client_fake.go @@ -79,11 +79,11 @@ func NewPipelineClientFake() *PipelineClientFake { func (c *PipelineClientFake) Create(params *pipelineparams.PipelineServiceCreatePipelineV1Params) ( *pipelinemodel.APIPipeline, error) { - switch params.Body.URL.PipelineURL { + switch params.Pipeline.URL.PipelineURL { case PipelineInvalidURL: return nil, fmt.Errorf(ClientErrorString) default: - return getDefaultPipeline(path.Base(params.Body.URL.PipelineURL)), nil + return getDefaultPipeline(path.Base(params.Pipeline.URL.PipelineURL)), nil } } diff --git a/backend/src/common/client/api_server/v1/run_client.go b/backend/src/common/client/api_server/v1/run_client.go index 608d6e36693..c2878959dcb 100644 --- a/backend/src/common/client/api_server/v1/run_client.go +++ b/backend/src/common/client/api_server/v1/run_client.go @@ -86,14 +86,14 @@ func (c *RunClient) Create(parameters *params.RunServiceCreateRunV1Params) (*mod response, err := c.apiClient.RunService.RunServiceCreateRunV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.RunServiceGetRunV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } return nil, nil, util.NewUserError(err, fmt.Sprintf("Failed to create run. Params: '%+v'", parameters), - fmt.Sprintf("Failed to create run '%v'", parameters.Body.Name)) + fmt.Sprintf("Failed to create run '%v'", parameters.Run.Name)) } // Unmarshal response @@ -120,7 +120,7 @@ func (c *RunClient) Get(parameters *params.RunServiceGetRunV1Params) (*model.API response, err := c.apiClient.RunService.RunServiceGetRunV1(parameters, c.authInfoWriter) if err != nil { if defaultError, ok := err.(*params.RunServiceGetRunV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -154,7 +154,7 @@ func (c *RunClient) Archive(parameters *params.RunServiceArchiveRunV1Params) err if err != nil { if defaultError, ok := err.(*params.RunServiceListRunsV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -178,7 +178,7 @@ func (c *RunClient) Unarchive(parameters *params.RunServiceUnarchiveRunV1Params) if err != nil { if defaultError, ok := err.(*params.RunServiceListRunsV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -202,7 +202,7 @@ func (c *RunClient) Delete(parameters *params.RunServiceDeleteRunV1Params) error if err != nil { if defaultError, ok := err.(*params.RunServiceListRunsV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } @@ -227,7 +227,7 @@ func (c *RunClient) List(parameters *params.RunServiceListRunsV1Params) ( if err != nil { if defaultError, ok := err.(*params.RunServiceListRunsV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } diff --git a/backend/src/common/client/api_server/v1/visualization_client.go b/backend/src/common/client/api_server/v1/visualization_client.go index ec6887ab670..ee73b5b0738 100644 --- a/backend/src/common/client/api_server/v1/visualization_client.go +++ b/backend/src/common/client/api_server/v1/visualization_client.go @@ -65,14 +65,14 @@ func (c *VisualizationClient) Create(parameters *params.VisualizationServiceCrea response, err := c.apiClient.VisualizationService.VisualizationServiceCreateVisualizationV1(parameters, api_server.PassThroughAuth) if err != nil { if defaultError, ok := err.(*params.VisualizationServiceCreateVisualizationV1Default); ok { - err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + err = api_server.CreateErrorFromAPIStatus(defaultError.Payload.Message, defaultError.Payload.Code) } else { err = api_server.CreateErrorCouldNotRecoverAPIStatus(err) } return nil, util.NewUserError(err, - fmt.Sprintf("Failed to create visualization. Params: '%+v'. Body: '%+v'", parameters, parameters.Body), - fmt.Sprintf("Failed to create visualization '%v'", parameters.Body.Type)) + fmt.Sprintf("Failed to create visualization. Params: '%+v'. Body: '%+v'", parameters, parameters.Visualization), + fmt.Sprintf("Failed to create visualization '%v'", parameters.Visualization.Type)) } return response.Payload, nil diff --git a/backend/src/common/client/api_server/v1/visualization_client_fake.go b/backend/src/common/client/api_server/v1/visualization_client_fake.go index 5afb2ca331e..4dad57e48d6 100644 --- a/backend/src/common/client/api_server/v1/visualization_client_fake.go +++ b/backend/src/common/client/api_server/v1/visualization_client_fake.go @@ -21,12 +21,12 @@ func NewVisualizationClientFake() *VisualizationClientFake { func (c *VisualizationClientFake) Create(params *params.VisualizationServiceCreateVisualizationV1Params) ( *model.APIVisualization, error) { var arguments VisualizationArguments - err := json.Unmarshal([]byte(params.Body.Arguments), &arguments) + err := json.Unmarshal([]byte(params.Visualization.Arguments), &arguments) if err != nil { return nil, err } if arguments.fail { return nil, fmt.Errorf(ClientErrorString) } - return params.Body, nil + return params.Visualization, nil } diff --git a/backend/src/common/client/api_server/v2/experiment_client.go b/backend/src/common/client/api_server/v2/experiment_client.go index a4e682c9ae1..7a2b4f20572 100644 --- a/backend/src/common/client/api_server/v2/experiment_client.go +++ b/backend/src/common/client/api_server/v2/experiment_client.go @@ -17,6 +17,8 @@ package api_server_v2 import ( "fmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" apiclient "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client" params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" @@ -44,12 +46,12 @@ type ExperimentClient struct { func NewExperimentClient(clientConfig clientcmd.ClientConfig, debug bool) ( *ExperimentClient, error) { - runtime, err := api_server.NewHTTPRuntime(clientConfig, debug) + httpRuntime, err := api_server.NewHTTPRuntime(clientConfig, debug) if err != nil { return nil, fmt.Errorf("Error occurred when creating experiment client: %w", err) } - apiClient := apiclient.New(runtime, strfmt.Default) + apiClient := apiclient.New(httpRuntime, strfmt.Default) // Creating experiment client return &ExperimentClient{ @@ -60,9 +62,25 @@ func NewExperimentClient(clientConfig clientcmd.ClientConfig, debug bool) ( func NewKubeflowInClusterExperimentClient(namespace string, debug bool) ( *ExperimentClient, error) { - runtime := api_server.NewKubeflowInClusterHTTPRuntime(namespace, debug) + httpRuntime := api_server.NewKubeflowInClusterHTTPRuntime(namespace, debug) + + apiClient := apiclient.New(httpRuntime, strfmt.Default) + + // Creating experiment client + return &ExperimentClient{ + apiClient: apiClient, + }, nil +} + +func NewMultiUserExperimentClient(clientConfig clientcmd.ClientConfig, userToken string, debug bool) ( + *ExperimentClient, error) { - apiClient := apiclient.New(runtime, strfmt.Default) + httpRuntime, err := api_server.NewHTTPRuntime(clientConfig, debug) + if err != nil { + return nil, fmt.Errorf("error occurred when creating experiment client: %w", err) + } + httpRuntime.DefaultAuthentication = httptransport.BearerToken(userToken) + apiClient := apiclient.New(httpRuntime, strfmt.Default) // Creating experiment client return &ExperimentClient{ @@ -81,8 +99,8 @@ func (c *ExperimentClient) Create(parameters *params.ExperimentServiceCreateExpe response, err := c.apiClient.ExperimentService.ExperimentServiceCreateExperiment(parameters) if err != nil { return nil, util.NewUserError(err, - fmt.Sprintf("Failed to create experiment. Params: '%+v'. Body: '%+v'", parameters, parameters.Body), - fmt.Sprintf("Failed to create experiment '%v'", parameters.Body.DisplayName)) + fmt.Sprintf("Failed to create experiment. Params: '%+v'. Body: '%+v'", parameters, parameters.Experiment), + fmt.Sprintf("Failed to create experiment '%v'", parameters.Experiment.DisplayName)) } return response.Payload, nil diff --git a/backend/src/common/client/api_server/v2/experiment_client_fake.go b/backend/src/common/client/api_server/v2/experiment_client_fake.go index 3c4511cc3be..78f665a1d93 100644 --- a/backend/src/common/client/api_server/v2/experiment_client_fake.go +++ b/backend/src/common/client/api_server/v2/experiment_client_fake.go @@ -37,7 +37,7 @@ func NewExperimentClientFake() *ExperimentClientFake { func (c *ExperimentClientFake) Create(parameters *params.ExperimentServiceCreateExperimentParams) ( *model.V2beta1Experiment, error) { - return getDefaultExperiment("500", parameters.Body.DisplayName), nil + return getDefaultExperiment("500", parameters.Experiment.DisplayName), nil } func (c *ExperimentClientFake) Get(parameters *params.ExperimentServiceGetExperimentParams) ( diff --git a/backend/src/common/client/api_server/v2/pipeline_client.go b/backend/src/common/client/api_server/v2/pipeline_client.go index 71faf77f71e..8ac992c66f1 100644 --- a/backend/src/common/client/api_server/v2/pipeline_client.go +++ b/backend/src/common/client/api_server/v2/pipeline_client.go @@ -49,12 +49,12 @@ type PipelineClient struct { func NewPipelineClient(clientConfig clientcmd.ClientConfig, debug bool) ( *PipelineClient, error) { - runtime, err := api_server.NewHTTPRuntime(clientConfig, debug) + httpRuntime, err := api_server.NewHTTPRuntime(clientConfig, debug) if err != nil { - return nil, fmt.Errorf("Error occurred when creating pipeline client: %w", err) + return nil, fmt.Errorf("error occurred when creating pipeline client: %w", err) } - apiClient := apiclient.New(runtime, strfmt.Default) + apiClient := apiclient.New(httpRuntime, strfmt.Default) // Creating pipeline client return &PipelineClient{ @@ -65,9 +65,9 @@ func NewPipelineClient(clientConfig clientcmd.ClientConfig, debug bool) ( func NewKubeflowInClusterPipelineClient(namespace string, debug bool) ( *PipelineClient, error) { - runtime := api_server.NewKubeflowInClusterHTTPRuntime(namespace, debug) + httpRuntime := api_server.NewKubeflowInClusterHTTPRuntime(namespace, debug) - apiClient := apiclient.New(runtime, strfmt.Default) + apiClient := apiclient.New(httpRuntime, strfmt.Default) // Creating pipeline client return &PipelineClient{ @@ -76,6 +76,22 @@ func NewKubeflowInClusterPipelineClient(namespace string, debug bool) ( }, nil } +func NewMultiUserPipelineClient(clientConfig clientcmd.ClientConfig, userToken string, debug bool) ( + *PipelineClient, error) { + httpRuntime, err := api_server.NewHTTPRuntime(clientConfig, debug) + if err != nil { + return nil, fmt.Errorf("error occurred when creating pipeline client: %w", err) + } + + apiClient := apiclient.New(httpRuntime, strfmt.Default) + + // Creating pipeline client + return &PipelineClient{ + apiClient: apiClient, + authInfoWriter: api_server.TokenToAuthInfo(userToken), + }, nil +} + func (c *PipelineClient) Create(parameters *params.PipelineServiceCreatePipelineParams) (*model.V2beta1Pipeline, error) { // Create context with timeout @@ -93,7 +109,7 @@ func (c *PipelineClient) Create(parameters *params.PipelineServiceCreatePipeline return nil, util.NewUserError(err, fmt.Sprintf("Failed to create pipeline. Params: '%v'", parameters), - fmt.Sprintf("Failed to create pipeline '%v'", parameters.Body.DisplayName)) + fmt.Sprintf("Failed to create pipeline '%v'", parameters.Pipeline.DisplayName)) } return response.Payload, nil diff --git a/backend/src/common/client/api_server/v2/pipeline_client_fake.go b/backend/src/common/client/api_server/v2/pipeline_client_fake.go index aabd9e1b9b0..717bf164d72 100644 --- a/backend/src/common/client/api_server/v2/pipeline_client_fake.go +++ b/backend/src/common/client/api_server/v2/pipeline_client_fake.go @@ -63,7 +63,7 @@ func NewPipelineClientFake() *PipelineClientFake { func (c *PipelineClientFake) Create(params *params.PipelineServiceCreatePipelineParams) ( *model.V2beta1Pipeline, error) { - return getDefaultPipeline(params.Body.PipelineID), nil + return getDefaultPipeline(params.Pipeline.PipelineID), nil } func (c *PipelineClientFake) CreatePipelineAndVersion(params *params.PipelineServiceCreatePipelineAndVersionParams) (*model.V2beta1Pipeline, error) { diff --git a/backend/src/common/client/api_server/v2/pipeline_upload_client.go b/backend/src/common/client/api_server/v2/pipeline_upload_client.go index e1235a1abdf..171a4d7d960 100644 --- a/backend/src/common/client/api_server/v2/pipeline_upload_client.go +++ b/backend/src/common/client/api_server/v2/pipeline_upload_client.go @@ -40,6 +40,8 @@ const ( type PipelineUploadInterface interface { UploadFile(filePath string, parameters *params.UploadPipelineParams) (*model.V2beta1Pipeline, error) + UploadPipelineVersion(filePath string, parameters *params.UploadPipelineVersionParams) (*model.V2beta1PipelineVersion, error) + Upload(parameters *params.UploadPipelineParams) (*model.V2beta1Pipeline, error) } type PipelineUploadClient struct { @@ -48,8 +50,8 @@ type PipelineUploadClient struct { } func NewPipelineUploadClient(clientConfig clientcmd.ClientConfig, debug bool) ( - *PipelineUploadClient, error) { - + PipelineUploadInterface, error, +) { runtime, err := api_server.NewHTTPRuntime(clientConfig, debug) if err != nil { return nil, fmt.Errorf("Error occurred when creating pipeline upload client: %w", err) @@ -64,8 +66,8 @@ func NewPipelineUploadClient(clientConfig clientcmd.ClientConfig, debug bool) ( } func NewKubeflowInClusterPipelineUploadClient(namespace string, debug bool) ( - *PipelineUploadClient, error) { - + PipelineUploadInterface, error, +) { runtime := api_server.NewKubeflowInClusterHTTPRuntime(namespace, debug) apiClient := apiclient.New(runtime, strfmt.Default) diff --git a/backend/src/common/client/api_server/v2/pipeline_upload_client_kubernetes.go b/backend/src/common/client/api_server/v2/pipeline_upload_client_kubernetes.go new file mode 100644 index 00000000000..eb60da3e46e --- /dev/null +++ b/backend/src/common/client/api_server/v2/pipeline_upload_client_kubernetes.go @@ -0,0 +1,300 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api_server_v2 + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "os" + "path" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" + model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + apimodel "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/apiserver/server" + "github.com/kubeflow/pipelines/backend/src/common/client/api_server" + "github.com/kubeflow/pipelines/backend/src/common/util" + k8sapi "github.com/kubeflow/pipelines/backend/src/crd/kubernetes/v2beta1" + k8sruntime "k8s.io/apimachinery/pkg/runtime" + _ "k8s.io/client-go/plugin/pkg/client/auth/gcp" + "k8s.io/client-go/tools/clientcmd" + ctrlclient "sigs.k8s.io/controller-runtime/pkg/client" +) + +var scheme *k8sruntime.Scheme + +func init() { + scheme = k8sruntime.NewScheme() + err := k8sapi.AddToScheme(scheme) + if err != nil { + // Panic is okay here because it means there's a code issue and so the package shouldn't initialize. + panic(fmt.Sprintf("Failed to initialize the Kubernetes API scheme: %v", err)) + } +} + +type PipelineUploadClientKubernetes struct { + ctrlClient ctrlclient.Client + namespace string +} + +func deriveNameDisplayAndDescription(providedName, providedDisplayName, providedDescription *string, defaultName string) (string, string, string) { + name := defaultName + if providedName != nil { + name = *providedName + } else if providedDisplayName != nil { + name = *providedDisplayName + } + + displayName := name + if providedDisplayName != nil { + displayName = *providedDisplayName + } + + var description string + if providedDescription != nil { + description = *providedDescription + } + + return name, displayName, description +} + +func NewPipelineUploadClientKubernetes(clientConfig clientcmd.ClientConfig, namespace string) ( + PipelineUploadInterface, error, +) { + restConfig, err := clientConfig.ClientConfig() + if err != nil { + return nil, fmt.Errorf("failed to get the rest config: %w", err) + } + + ctrlClient, err := ctrlclient.New(restConfig, ctrlclient.Options{Scheme: scheme}) + if err != nil { + return nil, fmt.Errorf("failed to initialize the controller-runtime client: %w", err) + } + + return &PipelineUploadClientKubernetes{ + ctrlClient: ctrlClient, + namespace: namespace, + }, nil +} + +func (c *PipelineUploadClientKubernetes) UploadFile(filePath string, parameters *params.UploadPipelineParams) ( + *model.V2beta1Pipeline, error, +) { + file, err := os.Open(filePath) + if err != nil { + return nil, util.NewUserErrorWithSingleMessage(err, + fmt.Sprintf("Failed to open file '%s'", filePath)) + } + defer file.Close() + + processedFile, err := server.ReadPipelineFile(path.Base(filePath), file, common.MaxFileLength) + if err != nil { + return nil, util.NewUserErrorWithSingleMessage(err, "Failed to read pipeline spec file") + } + + parameters.Uploadfile = runtime.NamedReader(path.Base(filePath), io.NopCloser(bytes.NewReader(processedFile))) + return c.Upload(parameters) +} + +func (c *PipelineUploadClientKubernetes) Upload(parameters *params.UploadPipelineParams) (*model.V2beta1Pipeline, + error, +) { + if parameters.Namespace != nil && *parameters.Namespace != c.namespace { + return nil, util.NewUserError(errors.New("namespace cannot be set as an upload parameter"), + "Namespace cannot be set as an upload parameter in the Kubernetes client", + "Namespace cannot be set as an upload parameter") + } + + piplineSpec, err := io.ReadAll(parameters.Uploadfile) + if err != nil { + return nil, fmt.Errorf("failed to read pipeline spec: %w", err) + } + + defer parameters.Uploadfile.Close() + + name, displayName, description := deriveNameDisplayAndDescription( + parameters.Name, + parameters.DisplayName, + parameters.Description, + path.Base(parameters.Uploadfile.Name()), + ) + + pipelineModel := apimodel.Pipeline{ + Name: name, + Namespace: c.namespace, + DisplayName: displayName, + Description: apimodel.LargeText(description), + } + pipeline := k8sapi.FromPipelineModel(pipelineModel) + + ctx, cancel := context.WithTimeout(context.Background(), api_server.APIServerDefaultTimeout) + defer cancel() + + err = c.ctrlClient.Create(ctx, &pipeline) + if err != nil { + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to upload pipeline. Params: '%v'", parameters), + "Failed to upload pipeline") + } + + pipelineVersionModel := apimodel.PipelineVersion{ + Name: name, + DisplayName: displayName, + Description: apimodel.LargeText(description), + PipelineSpec: apimodel.LargeText(piplineSpec), + } + + pipelineVersion, err := k8sapi.FromPipelineVersionModel(pipelineModel, pipelineVersionModel) + if err != nil { + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to parse pipeline version. Params: '%v'", parameters), + "Failed to parse pipeline version") + } + + err = c.ctrlClient.Create(ctx, pipelineVersion) + if err != nil { + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to upload pipeline version. Params: '%v'", parameters), + "Failed to upload pipeline version") + } + + rv := &model.V2beta1Pipeline{ + CreatedAt: strfmt.DateTime(pipeline.CreationTimestamp.Time), + Description: pipeline.Spec.Description, + DisplayName: pipeline.Spec.DisplayName, + Name: pipeline.Name, + PipelineID: string(pipeline.ObjectMeta.UID), + Namespace: pipeline.Namespace, + } + + return rv, nil +} + +// UploadPipelineVersion uploads pipeline version from local file. +func (c *PipelineUploadClientKubernetes) UploadPipelineVersion(filePath string, parameters *params.UploadPipelineVersionParams) (*model.V2beta1PipelineVersion, + error, +) { + if parameters.Pipelineid == nil { + return nil, util.NewUserError(errors.New("pipelineid is required"), + "pipelineid is required", + "pipelineid is required") + } + + file, err := os.Open(filePath) + if err != nil { + return nil, util.NewUserErrorWithSingleMessage(err, + fmt.Sprintf("Failed to open file '%s'", filePath)) + } + defer file.Close() + + processedFile, err := server.ReadPipelineFile(path.Base(filePath), file, common.MaxFileLength) + if err != nil { + return nil, util.NewUserErrorWithSingleMessage(err, "Failed to read pipeline spec file") + } + + ctx, cancel := context.WithTimeout(context.Background(), api_server.APIServerDefaultTimeout) + defer cancel() + + pipelineList := &k8sapi.PipelineList{} + err = c.ctrlClient.List(ctx, pipelineList, &ctrlclient.ListOptions{ + Namespace: c.namespace, + }) + if err != nil { + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to list pipelines. Params: '%v'", parameters), + "Failed to list pipelines") + } + + var pipeline *k8sapi.Pipeline + + for _, listedPipeline := range pipelineList.Items { + if string(listedPipeline.ObjectMeta.UID) == *parameters.Pipelineid { + pipeline = &listedPipeline + break + } + } + + if pipeline == nil { + return nil, util.NewUserError(errors.New("pipeline not found"), + fmt.Sprintf("Pipeline with id %s not found", *parameters.Pipelineid), + "Pipeline not found") + } + + modelPipeline := pipeline.ToModel() + + name, displayName, description := deriveNameDisplayAndDescription( + parameters.Name, + parameters.DisplayName, + parameters.Description, + path.Base(filePath), + ) + + modelPipelineVersion := apimodel.PipelineVersion{ + Name: name, + PipelineSpec: apimodel.LargeText(processedFile), + DisplayName: displayName, + Description: apimodel.LargeText(description), + PipelineId: modelPipeline.UUID, + } + + pipelineVersion, err := k8sapi.FromPipelineVersionModel(*modelPipeline, modelPipelineVersion) + if err != nil { + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to parse pipeline version. Params: '%v'", parameters), + "Failed to parse pipeline version") + } + + err = c.ctrlClient.Create(ctx, pipelineVersion) + if err != nil { + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to upload pipeline version. Params: '%v'", parameters), + "Failed to upload pipeline version") + } + + pipelineVersionModel, err := pipelineVersion.ToModel() + if err != nil { + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to convert pipeline version to model. Params: '%v'", parameters), + "Encountered an invalid pipeline version") + } + + rv := &model.V2beta1PipelineVersion{ + CreatedAt: strfmt.DateTime(pipelineVersion.CreationTimestamp.Time), + Description: string(pipelineVersionModel.Description), + DisplayName: pipelineVersionModel.DisplayName, + Name: pipelineVersionModel.Name, + PipelineID: modelPipeline.UUID, + PipelineVersionID: pipelineVersionModel.UUID, + PipelineSpec: nil, + CodeSourceURL: pipelineVersionModel.CodeSourceUrl, + } + + // Handles the case where there is a platform spec in the pipeline spec. + if spec, err := server.YamlStringToPipelineSpecStruct(string(pipelineVersionModel.PipelineSpec)); err == nil && spec != nil { + rv.PipelineSpec = spec.AsMap() + } else if err != nil { + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to parse pipeline version. Params: '%v'", parameters), + "Failed to parse pipeline version") + } + + return rv, nil +} diff --git a/backend/src/common/client/api_server/v2/recurring_run_client.go b/backend/src/common/client/api_server/v2/recurring_run_client.go index dbb8beae8e5..22ae800dbda 100644 --- a/backend/src/common/client/api_server/v2/recurring_run_client.go +++ b/backend/src/common/client/api_server/v2/recurring_run_client.go @@ -17,6 +17,8 @@ package api_server_v2 import ( "fmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/runtime" "github.com/go-openapi/strfmt" apiclient "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_client" @@ -74,6 +76,24 @@ func NewKubeflowInClusterRecurringRunClient(namespace string, debug bool) ( }, nil } +func NewMultiUserRecurringRunClient(clientConfig clientcmd.ClientConfig, userToken string, debug bool) ( + *RecurringRunClient, error) { + + runtime, err := api_server.NewHTTPRuntime(clientConfig, debug) + if err != nil { + return nil, fmt.Errorf("Error occurred when creating job client: %w", err) + } + + runtime.DefaultAuthentication = httptransport.BearerToken(userToken) + apiClient := apiclient.New(runtime, strfmt.Default) + + // Creating job client + return &RecurringRunClient{ + apiClient: apiClient, + authInfoWriter: api_server.TokenToAuthInfo(userToken), + }, nil +} + func (c *RecurringRunClient) Create(parameters *params.RecurringRunServiceCreateRecurringRunParams) (*model.V2beta1RecurringRun, error) { // Create context with timeout @@ -85,8 +105,8 @@ func (c *RecurringRunClient) Create(parameters *params.RecurringRunServiceCreate response, err := c.apiClient.RecurringRunService.RecurringRunServiceCreateRecurringRun(parameters) if err != nil { return nil, util.NewUserError(err, - fmt.Sprintf("Failed to create job. Params: '%+v'. Body: '%+v'", parameters, parameters.Body), - fmt.Sprintf("Failed to create job '%v'", parameters.Body.DisplayName)) + fmt.Sprintf("Failed to create job. Params: '%+v'. Body: '%+v'", parameters, parameters.RecurringRun), + fmt.Sprintf("Failed to create job '%v'", parameters.RecurringRun.DisplayName)) } return response.Payload, nil diff --git a/backend/src/common/client/api_server/v2/recurring_run_client_fake.go b/backend/src/common/client/api_server/v2/recurring_run_client_fake.go index 2221a0ed0fc..c6b703449fc 100644 --- a/backend/src/common/client/api_server/v2/recurring_run_client_fake.go +++ b/backend/src/common/client/api_server/v2/recurring_run_client_fake.go @@ -37,7 +37,7 @@ func NewRecurringRunClientFake() *RecurringRunClientFake { func (c *RecurringRunClientFake) Create(params *params.RecurringRunServiceCreateRecurringRunParams) ( *model.V2beta1RecurringRun, error) { - return getDefaultJob("500", params.Body.DisplayName), nil + return getDefaultJob("500", params.RecurringRun.DisplayName), nil } func (c *RecurringRunClientFake) Get(params *params.RecurringRunServiceGetRecurringRunParams) ( diff --git a/backend/src/common/client/api_server/v2/run_client.go b/backend/src/common/client/api_server/v2/run_client.go index 47b37ab89a4..dc0681150ad 100644 --- a/backend/src/common/client/api_server/v2/run_client.go +++ b/backend/src/common/client/api_server/v2/run_client.go @@ -17,6 +17,8 @@ package api_server_v2 import ( "fmt" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/runtime" "github.com/go-openapi/strfmt" apiclient "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_client" @@ -74,6 +76,24 @@ func NewKubeflowInClusterRunClient(namespace string, debug bool) ( }, nil } +func NewMultiUserRunClient(clientConfig clientcmd.ClientConfig, userToken string, debug bool) ( + *RunClient, error) { + + runtime, err := api_server.NewHTTPRuntime(clientConfig, debug) + if err != nil { + return nil, fmt.Errorf("Error occurred when creating run client: %w", err) + } + + runtime.DefaultAuthentication = httptransport.BearerToken(userToken) + apiClient := apiclient.New(runtime, strfmt.Default) + + // Creating run client + return &RunClient{ + apiClient: apiClient, + authInfoWriter: api_server.TokenToAuthInfo(userToken), + }, nil +} + func (c *RunClient) Create(parameters *params.RunServiceCreateRunParams) (*model.V2beta1Run, error) { // Create context with timeout ctx, cancel := context.WithTimeout(context.Background(), api_server.APIServerDefaultTimeout) @@ -91,7 +111,7 @@ func (c *RunClient) Create(parameters *params.RunServiceCreateRunParams) (*model return nil, util.NewUserError(err, fmt.Sprintf("Failed to create run. Params: '%+v'", parameters), - fmt.Sprintf("Failed to create run '%v'", parameters.Body.DisplayName)) + fmt.Sprintf("Failed to create run '%v'", parameters.Run.DisplayName)) } return response.Payload, nil diff --git a/backend/src/common/types.go b/backend/src/common/types.go index 75e72893c71..ce16e6f6438 100644 --- a/backend/src/common/types.go +++ b/backend/src/common/types.go @@ -21,7 +21,7 @@ package common type ExecutionPhase string // borrow from Workflow.Status.Phase: -// https://pkg.go.dev/github.com/argoproj/argo-workflows/v3@v3.5.14/pkg/apis/workflow/v1alpha1#WorkflowPhase +// https://pkg.go.dev/github.com/argoproj/argo-workflows/v3@v3.6.7/pkg/apis/workflow/v1alpha1#WorkflowPhase const ( ExecutionUnknown ExecutionPhase = "" ExecutionPending ExecutionPhase = "Pending" // pending some set-up - rarely used diff --git a/backend/src/common/util/error.go b/backend/src/common/util/error.go index 812d9570666..3535c7d5291 100644 --- a/backend/src/common/util/error.go +++ b/backend/src/common/util/error.go @@ -107,19 +107,19 @@ func NewUserError(err error, internalMessage string, externalMessage string) *Us if apiError, ok := err.(*runtime.APIError); ok { if apiError.Code == API_CODE_NOT_FOUND { return newUserError( - errors.Wrapf(err, internalMessage), + errors.Wrapf(err, "%s", internalMessage), fmt.Sprintf("%v: %v", externalMessage, "Resource not found"), codes.Code(apiError.Code)) } else { return newUserError( - errors.Wrapf(err, internalMessage), + errors.Wrapf(err, "%s", internalMessage), fmt.Sprintf("%v. Raw error from the service: %v", externalMessage, err.Error()), codes.Code(apiError.Code)) } } return newUserError( - errors.Wrapf(err, internalMessage), + errors.Wrapf(err, "%s", internalMessage), fmt.Sprintf("%v. Raw error from the service: %v", externalMessage, err.Error()), codes.Internal) } @@ -210,7 +210,7 @@ func NewBadKubernetesNameError(objectType string) *UserError { fmt.Errorf( "%s names must consist of lower case alphanumeric characters, '-' or '.', and must start and end with "+ "an alphanumeric character", objectType), - fmt.Sprintf("Invalid %s name", objectType), + "Invalid %s name", objectType, ) } @@ -358,7 +358,7 @@ func Wrap(err error, message string) error { case *UserError: return err.wrap(message) default: - return errors.Wrapf(err, message) + return errors.Wrapf(err, "%s", message) } } diff --git a/backend/src/common/util/json.go b/backend/src/common/util/json.go index 7719ff5f29e..6f01143b3e9 100644 --- a/backend/src/common/util/json.go +++ b/backend/src/common/util/json.go @@ -15,13 +15,12 @@ package util import ( - "github.com/golang/protobuf/jsonpb" + "google.golang.org/protobuf/encoding/protojson" "encoding/json" - "strings" "github.com/golang/glog" - "github.com/golang/protobuf/proto" + "google.golang.org/protobuf/proto" ) func UnmarshalJsonOrFail(data string, v interface{}) { @@ -71,6 +70,8 @@ func UnmarshalJsonWithError(data interface{}, v *interface{}) error { // UnmarshalString unmarshals a JSON object from s into m. // Allows unknown fields func UnmarshalString(s string, m proto.Message) error { - unmarshaler := jsonpb.Unmarshaler{AllowUnknownFields: true} - return unmarshaler.Unmarshal(strings.NewReader(s), m) + unmarshaler := protojson.UnmarshalOptions{ + DiscardUnknown: true, + } + return unmarshaler.Unmarshal([]byte(s), m) } diff --git a/backend/src/common/util/k8s_patches.go b/backend/src/common/util/k8s_patches.go new file mode 100644 index 00000000000..8d970c805bb --- /dev/null +++ b/backend/src/common/util/k8s_patches.go @@ -0,0 +1,57 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package util + +import ( + k8score "k8s.io/api/core/v1" +) + +// PatchPVCSpec applies fields from `patch` into `base` where patch has non-zero values. +// It mutates and returns the `base` spec. +func PatchPVCSpec(base, patch *k8score.PersistentVolumeClaimSpec) *k8score.PersistentVolumeClaimSpec { + if base == nil || patch == nil { + return base + } + if len(patch.AccessModes) > 0 { + base.AccessModes = patch.AccessModes + } + if patch.Selector != nil { + base.Selector = patch.Selector.DeepCopy() + } + if patch.Resources.Requests != nil { + if base.Resources.Requests == nil { + base.Resources.Requests = make(k8score.ResourceList) + } + for k, v := range patch.Resources.Requests { + base.Resources.Requests[k] = v + } + } + if patch.VolumeName != "" { + base.VolumeName = patch.VolumeName + } + if patch.StorageClassName != nil { + base.StorageClassName = patch.StorageClassName + } + if patch.VolumeMode != nil { + base.VolumeMode = patch.VolumeMode + } + if patch.DataSource != nil { + base.DataSource = patch.DataSource.DeepCopy() + } + if patch.DataSourceRef != nil { + base.DataSourceRef = patch.DataSourceRef.DeepCopy() + } + return base +} diff --git a/backend/src/common/util/k8s_patches_test.go b/backend/src/common/util/k8s_patches_test.go new file mode 100644 index 00000000000..4be468bb229 --- /dev/null +++ b/backend/src/common/util/k8s_patches_test.go @@ -0,0 +1,53 @@ +package util + +import ( + "testing" + + "github.com/stretchr/testify/assert" + k8score "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/resource" +) + +func TestPatchPVCSpec(t *testing.T) { + base := &k8score.PersistentVolumeClaimSpec{ + AccessModes: []k8score.PersistentVolumeAccessMode{"ReadWriteOnce"}, + Resources: k8score.VolumeResourceRequirements{ + Requests: k8score.ResourceList{ + k8score.ResourceStorage: resource.MustParse("10Gi"), + }, + }, + StorageClassName: strPtr("standard"), + } + + t.Run("patch access modes and storage class", func(t *testing.T) { + patch := &k8score.PersistentVolumeClaimSpec{ + AccessModes: []k8score.PersistentVolumeAccessMode{"ReadWriteMany"}, + StorageClassName: strPtr("fast-storage"), + } + result := PatchPVCSpec(base.DeepCopy(), patch) + assert.Equal(t, []k8score.PersistentVolumeAccessMode{"ReadWriteMany"}, result.AccessModes) + assert.Equal(t, "fast-storage", *result.StorageClassName) + assert.Equal(t, resource.MustParse("10Gi"), result.Resources.Requests[k8score.ResourceStorage]) + }) + + t.Run("patch resources", func(t *testing.T) { + patch := &k8score.PersistentVolumeClaimSpec{ + Resources: k8score.VolumeResourceRequirements{ + Requests: k8score.ResourceList{ + k8score.ResourceStorage: resource.MustParse("20Gi"), + }, + }, + } + result := PatchPVCSpec(base.DeepCopy(), patch) + assert.Equal(t, resource.MustParse("20Gi"), result.Resources.Requests[k8score.ResourceStorage]) + }) + + t.Run("patch is nil", func(t *testing.T) { + result := PatchPVCSpec(base.DeepCopy(), nil) + assert.Equal(t, base, result) + }) +} + +func strPtr(s string) *string { + return &s +} diff --git a/backend/src/common/util/workflow.go b/backend/src/common/util/workflow.go index 0d88b7bcdfd..192b8138256 100644 --- a/backend/src/common/util/workflow.go +++ b/backend/src/common/util/workflow.go @@ -20,6 +20,8 @@ import ( "strings" "time" + "google.golang.org/protobuf/encoding/protojson" + workflowapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" argoclient "github.com/argoproj/argo-workflows/v3/pkg/client/clientset/versioned" argoclientwf "github.com/argoproj/argo-workflows/v3/pkg/client/clientset/versioned/typed/workflow/v1alpha1" @@ -29,7 +31,6 @@ import ( "github.com/argoproj/argo-workflows/v3/workflow/packer" "github.com/argoproj/argo-workflows/v3/workflow/validate" "github.com/golang/glog" - "github.com/golang/protobuf/jsonpb" api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" exec "github.com/kubeflow/pipelines/backend/src/common" swfregister "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow" @@ -512,12 +513,18 @@ func collectNodeMetricsOrNil(runID string, nodeStatus *workflowapi.NodeStatus, r return nil, err } - retrievedNodeID := RetrievePodName(wf, *nodeStatus) + retrievedNodeID := nodeStatus.ID // Proto json lib requires a proto message before unmarshal data from JSON. We use // ReportRunMetricsRequest as a workaround to hold user's metrics, which is a superset of what // user can provide. reportMetricsRequest := new(api.ReportRunMetricsRequest) - err = jsonpb.UnmarshalString(metricsJSON, reportMetricsRequest) + transformedJSON, err := transformJSONForBackwardCompatibility(metricsJSON) + if err != nil { + fmt.Printf("Failed to transform JSON: %v\n", err) + return nil, err + } + + err = protojson.Unmarshal([]byte(transformedJSON), reportMetricsRequest) if err != nil { // User writes invalid metrics JSON. // TODO(#1426): report the error back to api server to notify user @@ -540,6 +547,19 @@ func collectNodeMetricsOrNil(runID string, nodeStatus *workflowapi.NodeStatus, r return reportMetricsRequest.GetMetrics(), nil } +// Previously number_value for RunMetrics in backend/api/v1beta1/run.proto +// allowed camelCase field values in JSON, to be consistent with the +// rest of the API (as well as with KFP api docs); this value was switched +// to support snake case. This function will convert old values to the +// newer snakecase so we can continue to support camelcase Metric Values for +// backwards compatibility for the end user. +func transformJSONForBackwardCompatibility(jsonStr string) (string, error) { + replacer := strings.NewReplacer( + `"numberValue":`, `"number_value":`, + ) + return replacer.Replace(jsonStr), nil +} + func readNodeMetricsJSONOrEmpty(runID string, nodeStatus *workflowapi.NodeStatus, retrieveArtifact RetrieveArtifact, wf *workflowapi.Workflow, ) (string, error) { @@ -559,7 +579,7 @@ func readNodeMetricsJSONOrEmpty(runID string, nodeStatus *workflowapi.NodeStatus artifactRequest := &api.ReadArtifactRequest{ RunId: runID, - NodeId: RetrievePodName(*wf, *nodeStatus), + NodeId: nodeStatus.ID, ArtifactName: metricsArtifactName, } artifactResponse, err := retrieveArtifact(artifactRequest) diff --git a/backend/src/crd/kubernetes/v2beta1/pipeline_types.go b/backend/src/crd/kubernetes/v2beta1/pipeline_types.go index 81ba8415cf3..e65496c4692 100644 --- a/backend/src/crd/kubernetes/v2beta1/pipeline_types.go +++ b/backend/src/crd/kubernetes/v2beta1/pipeline_types.go @@ -56,7 +56,7 @@ func FromPipelineModel(pipeline model.Pipeline) Pipeline { }, Spec: PipelineSpec{ DisplayName: pipeline.DisplayName, - Description: pipeline.Description, + Description: string(pipeline.Description), }, } } @@ -72,7 +72,7 @@ func (p *Pipeline) ToModel() *model.Pipeline { return &model.Pipeline{ Name: p.Name, DisplayName: displayName, - Description: p.Spec.Description, + Description: model.LargeText(p.Spec.Description), Namespace: p.Namespace, UUID: string(p.UID), CreatedAtInSec: p.CreationTimestamp.Unix(), diff --git a/backend/src/crd/kubernetes/v2beta1/pipelineversion_types.go b/backend/src/crd/kubernetes/v2beta1/pipelineversion_types.go index da90e7d295d..87a6f15f1ad 100644 --- a/backend/src/crd/kubernetes/v2beta1/pipelineversion_types.go +++ b/backend/src/crd/kubernetes/v2beta1/pipelineversion_types.go @@ -94,7 +94,7 @@ type PipelineVersionList struct { } func FromPipelineVersionModel(pipeline model.Pipeline, pipelineVersion model.PipelineVersion) (*PipelineVersion, error) { - v2Spec, err := template.NewV2SpecTemplate([]byte(pipelineVersion.PipelineSpec), false) + v2Spec, err := template.NewV2SpecTemplate([]byte(string(pipelineVersion.PipelineSpec)), false, nil) if err != nil { return nil, fmt.Errorf("failed to parse the pipeline spec: %w", err) } @@ -153,12 +153,12 @@ func FromPipelineVersionModel(pipeline model.Pipeline, pipelineVersion model.Pip }, Spec: PipelineVersionSpec{ DisplayName: pipelineVersion.DisplayName, - Description: pipelineVersion.Description, + Description: string(pipelineVersion.Description), PipelineSpec: pipelineSpec, PlatformSpec: platformSpec, PipelineName: pipeline.Name, CodeSourceURL: pipelineVersion.CodeSourceUrl, - PipelineSpecURI: pipelineVersion.PipelineSpecURI, + PipelineSpecURI: string(pipelineVersion.PipelineSpecURI), }, }, nil } @@ -183,7 +183,7 @@ func (p *PipelineVersion) ToModel() (*model.PipelineVersion, error) { // This additional parsing filters out platform specs that normally are excluded when the pipeline version is // created through the REST API. This is done rather than modifying the mutating webhook to remove these // platform specs so that GitOps tools don't see a diff from what is in Git and what is on the cluster. - v2Spec, err := template.NewV2SpecTemplate(piplineSpecAndPlatformSpec, false) + v2Spec, err := template.NewV2SpecTemplate(piplineSpecAndPlatformSpec, false, nil) if err != nil { return nil, fmt.Errorf("failed to parse the pipeline spec: %w", err) } @@ -222,9 +222,9 @@ func (p *PipelineVersion) ToModel() (*model.PipelineVersion, error) { PipelineId: string(pipelineID), Status: pipelineVersionStatus, CodeSourceUrl: p.Spec.CodeSourceURL, - Description: p.Spec.Description, - PipelineSpec: string(v2Spec.Bytes()), - PipelineSpecURI: p.Spec.PipelineSpecURI, + Description: model.LargeText(p.Spec.Description), + PipelineSpec: model.LargeText(v2Spec.Bytes()), + PipelineSpecURI: model.LargeText(p.Spec.PipelineSpecURI), }, nil } diff --git a/backend/src/crd/kubernetes/v2beta1/pipelineversion_types_test.go b/backend/src/crd/kubernetes/v2beta1/pipelineversion_types_test.go index 1c12bd37e2a..41abdc5672e 100644 --- a/backend/src/crd/kubernetes/v2beta1/pipelineversion_types_test.go +++ b/backend/src/crd/kubernetes/v2beta1/pipelineversion_types_test.go @@ -307,10 +307,10 @@ func TestToModel_PipelineSpecOnly(t *testing.T) { assert.Equal(t, "version-456", result.UUID) assert.Equal(t, "test-version", result.Name) assert.Equal(t, "Test Version", result.DisplayName) - assert.Equal(t, "A test version", result.Description) + assert.Equal(t, "A test version", string(result.Description)) assert.Equal(t, "pipeline-123", result.PipelineId) assert.Equal(t, "https://github.com/test/pipeline", result.CodeSourceUrl) - assert.Equal(t, "gs://bucket/pipeline.yaml", result.PipelineSpecURI) + assert.Equal(t, "gs://bucket/pipeline.yaml", string(result.PipelineSpecURI)) // Check PipelineSpec contains only pipeline spec (no platform spec) assert.Contains(t, result.PipelineSpec, "pipelineInfo:") @@ -387,10 +387,10 @@ func TestToModel_PipelineAndPlatformSpecs(t *testing.T) { assert.Equal(t, "version-456", result.UUID) assert.Equal(t, "test-version", result.Name) assert.Equal(t, "Test Version", result.DisplayName) - assert.Equal(t, "A test version", result.Description) + assert.Equal(t, "A test version", string(result.Description)) assert.Equal(t, "pipeline-123", result.PipelineId) assert.Equal(t, "https://github.com/test/pipeline", result.CodeSourceUrl) - assert.Equal(t, "gs://bucket/pipeline.yaml", result.PipelineSpecURI) + assert.Equal(t, "gs://bucket/pipeline.yaml", string(result.PipelineSpecURI)) // Check PipelineSpec contains both pipeline and platform specs assert.Contains(t, result.PipelineSpec, "pipelineInfo:") @@ -717,7 +717,7 @@ platforms: Name: "hello-world-v1", DisplayName: "Hello World Pipeline v1", Description: "A simple hello world pipeline with workspace configuration", - PipelineSpec: pipelineSpecYAML, + PipelineSpec: model.LargeText(pipelineSpecYAML), PipelineId: "pipeline-123", } @@ -743,7 +743,7 @@ platforms: err = k8syaml.Unmarshal([]byte(originalPipelineSpecSplit[1]), &originalPlatformSpec) require.NoError(t, err) - roundTripPipelineSpecSplit := strings.Split(roundTripModel.PipelineSpec, "\n---\n") + roundTripPipelineSpecSplit := strings.Split(string(roundTripModel.PipelineSpec), "\n---\n") require.Len(t, roundTripPipelineSpecSplit, 2) var roundTripPipelineSpec map[string]interface{} err = k8syaml.Unmarshal([]byte(roundTripPipelineSpecSplit[0]), &roundTripPipelineSpec) @@ -767,6 +767,6 @@ platforms: assert.Equal(t, "version-456", roundTripModel.UUID) assert.Equal(t, "hello-world-v1", roundTripModel.Name) assert.Equal(t, "Hello World Pipeline v1", roundTripModel.DisplayName) - assert.Equal(t, "A simple hello world pipeline with workspace configuration", roundTripModel.Description) + assert.Equal(t, "A simple hello world pipeline with workspace configuration", string(roundTripModel.Description)) assert.Equal(t, "pipeline-123", roundTripModel.PipelineId) } diff --git a/backend/src/v2/cacheutils/cache.go b/backend/src/v2/cacheutils/cache.go index d3223e194d3..81caddbdf2a 100644 --- a/backend/src/v2/cacheutils/cache.go +++ b/backend/src/v2/cacheutils/cache.go @@ -10,9 +10,9 @@ import ( "os" "google.golang.org/grpc/credentials" - "google.golang.org/grpc/credentials/insecure" "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" @@ -26,7 +26,7 @@ const ( // MaxGRPCMessageSize contains max grpc message size supported by the client MaxClientGRPCMessageSize = 100 * 1024 * 1024 // The endpoint uses Kubernetes service DNS name with namespace: - //https://kubernetes.io/docs/concepts/services-networking/service/#dns + // https://kubernetes.io/docs/concepts/services-networking/service/#dns defaultKfpApiEndpoint = "ml-pipeline.kubeflow:8887" ) @@ -38,16 +38,23 @@ type Client interface { outputs *pipelinespec.ExecutorInput_Outputs, outputParametersTypeMap map[string]string, cmdArgs []string, image string, + pvcNames []string, ) (*cachekey.CacheKey, error) GenerateFingerPrint(cacheKey *cachekey.CacheKey) (string, error) } -type disabledCacheClient struct { -} +type disabledCacheClient struct{} var _ Client = &disabledCacheClient{} -func (d disabledCacheClient) GenerateCacheKey(*pipelinespec.ExecutorInput_Inputs, *pipelinespec.ExecutorInput_Outputs, map[string]string, []string, string) (*cachekey.CacheKey, error) { +func (d disabledCacheClient) GenerateCacheKey( + *pipelinespec.ExecutorInput_Inputs, + *pipelinespec.ExecutorInput_Outputs, + map[string]string, + []string, + string, + []string, +) (*cachekey.CacheKey, error) { panic("GenerateCacheKey is not supposed to be called when cache is disabled") } @@ -187,8 +194,9 @@ func (c *client) GenerateCacheKey( inputs *pipelinespec.ExecutorInput_Inputs, outputs *pipelinespec.ExecutorInput_Outputs, outputParametersTypeMap map[string]string, - cmdArgs []string, image string) (*cachekey.CacheKey, error) { - + cmdArgs []string, image string, + pvcNames []string, +) (*cachekey.CacheKey, error) { cacheKey := cachekey.CacheKey{ InputArtifactNames: make(map[string]*cachekey.ArtifactNameList), InputParameterValues: make(map[string]*structpb.Value), @@ -232,10 +240,10 @@ func (c *client) GenerateCacheKey( } cacheKey.ContainerSpec = &cachekey.ContainerSpec{ - Image: image, - CmdArgs: cmdArgs, + Image: image, + CmdArgs: cmdArgs, + PvcNames: pvcNames, } return &cacheKey, nil - } diff --git a/backend/src/v2/cacheutils/cache_test.go b/backend/src/v2/cacheutils/cache_test.go index fd205912363..0a1b002b58c 100644 --- a/backend/src/v2/cacheutils/cache_test.go +++ b/backend/src/v2/cacheutils/cache_test.go @@ -25,6 +25,7 @@ func TestGenerateCacheKey(t *testing.T) { outputParametersTypeMap map[string]string cmdArgs []string image string + pvcNames []string want *cachekey.CacheKey wantErr bool }{ @@ -132,15 +133,95 @@ func TestGenerateCacheKey(t *testing.T) { }, }, + wantErr: false, + }, + { + name: "Generate CacheKey With PVC Names", + executorInputInputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "message": {Kind: &structpb.Value_StringValue{StringValue: "Some string value"}}, + "num_steps": {Kind: &structpb.Value_NumberValue{NumberValue: 5}}, + }, + Artifacts: map[string]*pipelinespec.ArtifactList{ + "dataset_one": { + Artifacts: []*pipelinespec.RuntimeArtifact{ + { + Name: "1", + Type: &pipelinespec.ArtifactTypeSchema{ + Kind: &pipelinespec.ArtifactTypeSchema_InstanceSchema{InstanceSchema: "title: kfp.Dataset\ntype: object\nproperties:\n payload_format:\n type: string\n container_format:\n type: string\n"}, + }, + Uri: "gs://some-bucket/dataset-one", + Metadata: &structpb.Struct{}, + }, + }, + }, + }, + }, + executorInputOutputs: &pipelinespec.ExecutorInput_Outputs{ + Parameters: map[string]*pipelinespec.ExecutorInput_OutputParameter{ + "output_parameter_one": {OutputFile: "/tmp/outputs/output_parameter_one/data"}, + }, + Artifacts: map[string]*pipelinespec.ArtifactList{ + "model": { + Artifacts: []*pipelinespec.RuntimeArtifact{ + { + Name: "model", + Type: &pipelinespec.ArtifactTypeSchema{ + Kind: &pipelinespec.ArtifactTypeSchema_InstanceSchema{InstanceSchema: "title: kfp.Model\ntype: object\nproperties:\n framework:\n type: string\n framework_version:\n type: string\n"}, + }, + Uri: "gs://my-bucket/some-prefix/pipeline/task/model", + Metadata: &structpb.Struct{ + Fields: map[string]*structpb.Value{"name": {Kind: &structpb.Value_StringValue{StringValue: "model"}}}, + }, + }, + }, + }, + }, + OutputFile: "/tmp/kfp_outputs/output_metadata.json", + }, + outputParametersTypeMap: map[string]string{ + "output_parameter_one": "STRING", + }, + cmdArgs: []string{"sh", "ec", "test"}, + image: "python:3.9", + pvcNames: []string{"workspace-pvc", "data-pvc"}, + want: &cachekey.CacheKey{ + InputArtifactNames: map[string]*cachekey.ArtifactNameList{ + "dataset_one": {ArtifactNames: []string{"1"}}, + }, + InputParameterValues: map[string]*structpb.Value{ + "message": {Kind: &structpb.Value_StringValue{StringValue: "Some string value"}}, + "num_steps": {Kind: &structpb.Value_NumberValue{NumberValue: 5}}, + }, + OutputArtifactsSpec: map[string]*pipelinespec.RuntimeArtifact{ + "model": { + Name: "model", + Type: &pipelinespec.ArtifactTypeSchema{ + Kind: &pipelinespec.ArtifactTypeSchema_InstanceSchema{InstanceSchema: "title: kfp.Model\ntype: object\nproperties:\n framework:\n type: string\n framework_version:\n type: string\n"}, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb.Value{"name": {Kind: &structpb.Value_StringValue{StringValue: "model"}}}, + }, + }, + }, + OutputParametersSpec: map[string]string{ + "output_parameter_one": "STRING", + }, + ContainerSpec: &cachekey.ContainerSpec{ + CmdArgs: []string{"sh", "ec", "test"}, + Image: "python:3.9", + PvcNames: []string{"workspace-pvc", "data-pvc"}, + }, + }, + wantErr: false, }, } cacheClient, err := NewClient(false, false) require.NoError(t, err) for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - got, err := cacheClient.GenerateCacheKey(test.executorInputInputs, test.executorInputOutputs, test.outputParametersTypeMap, test.cmdArgs, test.image) + got, err := cacheClient.GenerateCacheKey(test.executorInputInputs, test.executorInputOutputs, test.outputParametersTypeMap, test.cmdArgs, test.image, test.pvcNames) if (err != nil) != test.wantErr { t.Errorf("GenerateCacheKey() error = %v", err) return @@ -270,3 +351,79 @@ func TestGenerateFingerPrint(t *testing.T) { }) } } + +func TestGenerateFingerPrint_ConsidersPVCNames(t *testing.T) { + base := &cachekey.CacheKey{ + InputArtifactNames: map[string]*cachekey.ArtifactNameList{ + "dataset_one": {ArtifactNames: []string{"1"}}, + }, + OutputParametersSpec: map[string]string{ + "output_parameter_one": "STRING", + }, + ContainerSpec: &cachekey.ContainerSpec{ + CmdArgs: []string{"sh", "ec", "test"}, + Image: "python:3.9", + }, + } + + withPVCs := &cachekey.CacheKey{ + InputArtifactNames: map[string]*cachekey.ArtifactNameList{ + "dataset_one": {ArtifactNames: []string{"1"}}, + }, + OutputParametersSpec: map[string]string{ + "output_parameter_one": "STRING", + }, + ContainerSpec: &cachekey.ContainerSpec{ + CmdArgs: []string{"sh", "ec", "test"}, + Image: "python:3.9", + PvcNames: []string{"workspace-pvc", "data-pvc"}, + }, + } + + samePVCs := &cachekey.CacheKey{ + InputArtifactNames: map[string]*cachekey.ArtifactNameList{ + "dataset_one": {ArtifactNames: []string{"1"}}, + }, + OutputParametersSpec: map[string]string{ + "output_parameter_one": "STRING", + }, + ContainerSpec: &cachekey.ContainerSpec{ + CmdArgs: []string{"sh", "ec", "test"}, + Image: "python:3.9", + PvcNames: []string{"workspace-pvc", "data-pvc"}, + }, + } + + differentPVCs := &cachekey.CacheKey{ + InputArtifactNames: map[string]*cachekey.ArtifactNameList{ + "dataset_one": {ArtifactNames: []string{"1"}}, + }, + OutputParametersSpec: map[string]string{ + "output_parameter_one": "STRING", + }, + ContainerSpec: &cachekey.ContainerSpec{ + CmdArgs: []string{"sh", "ec", "test"}, + Image: "python:3.9", + PvcNames: []string{"data-pvc", "workspace-pvc", "extra"}, + }, + } + + cacheClient, err := NewClient(false, false) + require.NoError(t, err) + + baseFP, err := cacheClient.GenerateFingerPrint(base) + require.NoError(t, err) + withPVCsFP, err := cacheClient.GenerateFingerPrint(withPVCs) + require.NoError(t, err) + samePVCsFP, err := cacheClient.GenerateFingerPrint(samePVCs) + require.NoError(t, err) + differentPVCsFP, err := cacheClient.GenerateFingerPrint(differentPVCs) + require.NoError(t, err) + + // PVC names should affect the fingerprint when present + assert.NotEqual(t, baseFP, withPVCsFP) + // Same PVC names should produce the same fingerprint + assert.Equal(t, withPVCsFP, samePVCsFP) + // Different PVC names should change the fingerprint + assert.NotEqual(t, withPVCsFP, differentPVCsFP) +} diff --git a/backend/src/v2/client_manager/client_manager.go b/backend/src/v2/client_manager/client_manager.go new file mode 100644 index 00000000000..766957ca6a0 --- /dev/null +++ b/backend/src/v2/client_manager/client_manager.go @@ -0,0 +1,94 @@ +package client_manager + +import ( + "fmt" + + "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" + "github.com/kubeflow/pipelines/backend/src/v2/metadata" + "k8s.io/client-go/kubernetes" + "k8s.io/client-go/rest" +) + +type ClientManagerInterface interface { + K8sClient() kubernetes.Interface + MetadataClient() metadata.ClientInterface + CacheClient() cacheutils.Client +} + +// Ensure ClientManager implements ClientManagerInterface +var _ ClientManagerInterface = (*ClientManager)(nil) + +// ClientManager is a container for various service clients. +type ClientManager struct { + k8sClient kubernetes.Interface + metadataClient metadata.ClientInterface + cacheClient cacheutils.Client +} + +type Options struct { + MLMDServerAddress string + MLMDServerPort string + CacheDisabled bool +} + +// NewClientManager creates and Init a new instance of ClientManager. +func NewClientManager(options *Options) (*ClientManager, error) { + clientManager := &ClientManager{} + err := clientManager.init(options) + if err != nil { + return nil, err + } + + return clientManager, nil +} + +func (cm *ClientManager) K8sClient() kubernetes.Interface { + return cm.k8sClient +} + +func (cm *ClientManager) MetadataClient() metadata.ClientInterface { + return cm.metadataClient +} + +func (cm *ClientManager) CacheClient() cacheutils.Client { + return cm.cacheClient +} + +func (cm *ClientManager) init(opts *Options) error { + k8sClient, err := initK8sClient() + if err != nil { + return err + } + metadataClient, err := initMetadataClient(opts.MLMDServerAddress, opts.MLMDServerPort) + if err != nil { + return err + } + cacheClient, err := initCacheClient(opts.CacheDisabled) + if err != nil { + return err + } + cm.k8sClient = k8sClient + cm.metadataClient = metadataClient + cm.cacheClient = cacheClient + return nil +} + +func initK8sClient() (kubernetes.Interface, error) { + restConfig, err := rest.InClusterConfig() + if err != nil { + return nil, fmt.Errorf("failed to initialize kubernetes client: %w", err) + } + k8sClient, err := kubernetes.NewForConfig(restConfig) + if err != nil { + return nil, fmt.Errorf("failed to initialize kubernetes client set: %w", err) + } + return k8sClient, nil +} + +func initMetadataClient(address string, port string) (metadata.ClientInterface, error) { + return metadata.NewClient(address, port, false, "") +} + +func initCacheClient(cacheDisabled bool) (cacheutils.Client, error) { + return cacheutils.NewClient(cacheDisabled, false) +} diff --git a/backend/src/v2/client_manager/client_manager_fake.go b/backend/src/v2/client_manager/client_manager_fake.go new file mode 100644 index 00000000000..918e1f72ac5 --- /dev/null +++ b/backend/src/v2/client_manager/client_manager_fake.go @@ -0,0 +1,36 @@ +package client_manager + +import ( + "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" + "github.com/kubeflow/pipelines/backend/src/v2/metadata" + "k8s.io/client-go/kubernetes" +) + +type FakeClientManager struct { + k8sClient kubernetes.Interface + metadataClient metadata.ClientInterface + cacheClient cacheutils.Client +} + +// Ensure FakeClientManager implements ClientManagerInterface +var _ ClientManagerInterface = (*FakeClientManager)(nil) + +func (f *FakeClientManager) K8sClient() kubernetes.Interface { + return f.k8sClient +} + +func (f *FakeClientManager) MetadataClient() metadata.ClientInterface { + return f.metadataClient +} + +func (f *FakeClientManager) CacheClient() cacheutils.Client { + return f.cacheClient +} + +func NewFakeClientManager(k8sClient kubernetes.Interface, metadataClient metadata.ClientInterface, cacheClient cacheutils.Client) *FakeClientManager { + return &FakeClientManager{ + k8sClient: k8sClient, + metadataClient: metadataClient, + cacheClient: cacheClient, + } +} diff --git a/backend/src/v2/cmd/compiler/main.go b/backend/src/v2/cmd/compiler/main.go index ed64c78bc0a..6e132f5923c 100644 --- a/backend/src/v2/cmd/compiler/main.go +++ b/backend/src/v2/cmd/compiler/main.go @@ -19,11 +19,11 @@ import ( "os" "github.com/golang/glog" - structpb "github.com/golang/protobuf/ptypes/struct" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "github.com/kubeflow/pipelines/backend/src/v2/compiler/argocompiler" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/structpb" "sigs.k8s.io/yaml" ) diff --git a/backend/src/v2/cmd/driver/main.go b/backend/src/v2/cmd/driver/main.go index a6086e590f0..c17ff5ee83e 100644 --- a/backend/src/v2/cmd/driver/main.go +++ b/backend/src/v2/cmd/driver/main.go @@ -20,16 +20,16 @@ import ( "flag" "fmt" - "os" - "path/filepath" - "strconv" + "google.golang.org/protobuf/encoding/protojson" "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" - "github.com/kubeflow/pipelines/backend/src/common/util" + "os" + "path/filepath" + "strconv" + "github.com/golang/glog" - "github.com/golang/protobuf/jsonpb" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" "github.com/kubeflow/pipelines/backend/src/v2/config" @@ -60,6 +60,7 @@ var ( taskSpecJson = flag.String("task", "", "task spec") runtimeConfigJson = flag.String("runtime_config", "", "jobruntime config") iterationIndex = flag.Int("iteration_index", -1, "iteration index, -1 means not an interation") + taskName = flag.String("task_name", "", "original task name, used for proper input resolution in the container/dag driver") // container inputs dagExecutionID = flag.Int64("dag_execution_id", 0, "DAG execution ID") @@ -214,6 +215,8 @@ func drive() (err error) { MLMDServerPort: *mlmdServerPort, MLMDTLSEnabled: metadataTLSEnabled, CaCertPath: *caCertPath, + DriverType: *driverType, + TaskName: *taskName, } var execution *driver.Execution var driverErr error @@ -312,11 +315,11 @@ func handleExecution(execution *driver.Execution, driverType string, executionPa } } if execution.ExecutorInput != nil { - marshaler := jsonpb.Marshaler{} - executorInputJSON, err := marshaler.MarshalToString(execution.ExecutorInput) + executorInputBytes, err := protojson.Marshal(execution.ExecutorInput) if err != nil { return fmt.Errorf("failed to marshal ExecutorInput to JSON: %w", err) } + executorInputJSON := string(executorInputBytes) glog.Infof("output ExecutorInput:%s\n", prettyPrint(executorInputJSON)) } return nil diff --git a/backend/src/v2/cmd/driver/main_test.go b/backend/src/v2/cmd/driver/main_test.go index bd1d4b48183..ad15a7b00e9 100644 --- a/backend/src/v2/cmd/driver/main_test.go +++ b/backend/src/v2/cmd/driver/main_test.go @@ -4,10 +4,10 @@ import ( "os" "testing" - "github.com/golang/protobuf/proto" "github.com/kubeflow/pipelines/backend/src/v2/driver" "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" "github.com/stretchr/testify/assert" + "google.golang.org/protobuf/proto" ) func strPtr(s string) *string { diff --git a/backend/src/v2/cmd/launcher-v2/main.go b/backend/src/v2/cmd/launcher-v2/main.go index f7d2b3f0f23..ea63b12d6b1 100644 --- a/backend/src/v2/cmd/launcher-v2/main.go +++ b/backend/src/v2/cmd/launcher-v2/main.go @@ -22,6 +22,7 @@ import ( "strconv" "github.com/golang/glog" + "github.com/kubeflow/pipelines/backend/src/v2/client_manager" "github.com/kubeflow/pipelines/backend/src/v2/component" "github.com/kubeflow/pipelines/backend/src/v2/config" ) @@ -119,7 +120,16 @@ func run() error { } return nil case "container": - launcher, err := component.NewLauncherV2(ctx, *executionID, *executorInputJSON, *componentSpecJSON, flag.Args(), launcherV2Opts) + clientOptions := &client_manager.Options{ + MLMDServerAddress: launcherV2Opts.MLMDServerAddress, + MLMDServerPort: launcherV2Opts.MLMDServerPort, + CacheDisabled: launcherV2Opts.CacheDisabled, + } + clientManager, err := client_manager.NewClientManager(clientOptions) + if err != nil { + return err + } + launcher, err := component.NewLauncherV2(ctx, *executionID, *executorInputJSON, *componentSpecJSON, flag.Args(), launcherV2Opts, clientManager) if err != nil { return err } diff --git a/backend/src/v2/compiler/argocompiler/argo.go b/backend/src/v2/compiler/argocompiler/argo.go index 060c8494c74..6ebd1f98e3d 100644 --- a/backend/src/v2/compiler/argocompiler/argo.go +++ b/backend/src/v2/compiler/argocompiler/argo.go @@ -22,6 +22,7 @@ import ( "strings" "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/common/util" wfapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" @@ -30,6 +31,7 @@ import ( "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/structpb" k8score "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/resource" k8sres "k8s.io/apimachinery/pkg/api/resource" k8smeta "k8s.io/apimachinery/pkg/apis/meta/v1" ) @@ -43,9 +45,15 @@ type Options struct { PipelineRoot string // optional CacheDisabled bool + // optional + DefaultWorkspace *k8score.PersistentVolumeClaimSpec // TODO(Bobgy): add an option -- dev mode, ImagePullPolicy should only be Always in dev mode. } +const ( + workspaceVolumeName = "kfp-workspace" +) + func Compile(jobArg *pipelinespec.PipelineJob, kubernetesSpecArg *pipelinespec.SinglePlatformSpec, opts *Options) (*wfapi.Workflow, error) { // clone jobArg, because we don't want to change it jobMsg := proto.Clone(jobArg) @@ -93,6 +101,19 @@ func Compile(jobArg *pipelinespec.PipelineJob, kubernetesSpecArg *pipelinespec.S return nil, fmt.Errorf("bug: cloned Kubernetes spec message does not have expected type") } } + var volumeClaimTemplates []k8score.PersistentVolumeClaim + hasPipelineConfig := kubernetesSpec != nil && kubernetesSpec.GetPipelineConfig() != nil + hasWorkspace := hasPipelineConfig && kubernetesSpec.GetPipelineConfig().GetWorkspace() != nil + if hasWorkspace { + workspace := kubernetesSpec.GetPipelineConfig().GetWorkspace() + if k8sWorkspace := workspace.GetKubernetes(); k8sWorkspace != nil { + pvc, err := GetWorkspacePVC(workspace, opts) + if err != nil { + return nil, err + } + volumeClaimTemplates = append(volumeClaimTemplates, pvc) + } + } // initialization wf := &wfapi.Workflow{ @@ -123,8 +144,9 @@ func Compile(jobArg *pipelinespec.PipelineJob, kubernetesSpecArg *pipelinespec.S Arguments: wfapi.Arguments{ Parameters: []wfapi.Parameter{}, }, - ServiceAccountName: common.GetStringConfigWithDefault(common.DefaultPipelineRunnerServiceAccountFlag, common.DefaultPipelineRunnerServiceAccount), - Entrypoint: tmplEntrypoint, + ServiceAccountName: common.GetStringConfigWithDefault(common.DefaultPipelineRunnerServiceAccountFlag, common.DefaultPipelineRunnerServiceAccount), + Entrypoint: tmplEntrypoint, + VolumeClaimTemplates: volumeClaimTemplates, }, } @@ -154,6 +176,7 @@ func Compile(jobArg *pipelinespec.PipelineJob, kubernetesSpecArg *pipelinespec.S if opts != nil { c.cacheDisabled = opts.CacheDisabled + c.defaultWorkspace = opts.DefaultWorkspace if opts.DriverImage != "" { c.driverImage = opts.DriverImage } @@ -190,6 +213,7 @@ type workflowCompiler struct { launcherCommand []string cacheDisabled bool mlPipelineServiceTLSEnabled bool + defaultWorkspace *k8score.PersistentVolumeClaimSpec } func (c *workflowCompiler) Resolver(name string, component *pipelinespec.ComponentSpec, resolver *pipelinespec.PipelineDeploymentConfig_ResolverSpec) error { @@ -348,6 +372,7 @@ func hashValue(value interface{}) (string, error) { const ( paramComponent = "component" // component spec paramTask = "task" // task spec + paramTaskName = "task-name" // task name paramContainer = "container" // container spec paramImporter = "importer" // importer spec paramRuntimeConfig = "runtime-config" // job runtime config, pipeline level inputs @@ -357,14 +382,18 @@ const ( paramIterationIndex = "iteration-index" paramExecutorInput = "executor-input" paramDriverType = "driver-type" - paramCachedDecision = "cached-decision" // indicate hit cache or not - paramPodSpecPatch = "pod-spec-patch" // a strategic patch merged with the pod spec - paramCondition = "condition" // condition = false -> skip the task - paramKubernetesConfig = "kubernetes-config" // stores Kubernetes config - paramRetryMaxCount = "retry-max-count" // limit on number of retries - paramRetryBackOffDuration = "retry-backoff-duration" // duration of backoff between retries - paramRetryBackOffFactor = "retry-backoff-factor" // multiplier for backoff duration between retries - paramRetryBackOffMaxDuration = "retry-backoff-max-duration" // limit on backoff duration between retries + paramCachedDecision = "cached-decision" // indicate hit cache or not + paramPodSpecPatch = "pod-spec-patch" // a strategic patch merged with the pod spec + paramCondition = "condition" // condition = false -> skip the task + paramKubernetesConfig = "kubernetes-config" // stores Kubernetes config + paramRetryMaxCount = "retry-max-count" // limit on number of retries + paramRetryBackOffDuration = "retry-backoff-duration" // duration of backoff between retries + paramRetryBackOffFactor = "retry-backoff-factor" // multiplier for backoff duration between retries + paramRetryBackOffMaxDuration = "retry-backoff-max-duration" // limit on backoff duration between retries + paramPodAnnotationKey = "pod-metadata-annotation-key" // task-specific pod metadata annotation key + paramPodAnnotationVal = "pod-metadata-annotation-val" // task-specific pod metadata annotation value + paramPodLabelKey = "pod-metadata-label-key" // task-specific pod metadata label key + paramPodLabelVal = "pod-metadata-label-val" // task-specific pod metadata label value ) func runID() string { @@ -442,3 +471,90 @@ var dummyImages = map[string]bool{ "argostub/createpvc": true, "argostub/deletepvc": true, } + +// convertStructToPVCSpec converts a protobuf Struct to a PersistentVolumeClaimSpec using JSON marshaling/unmarshalling. +func convertStructToPVCSpec(structVal *structpb.Struct) (*k8score.PersistentVolumeClaimSpec, error) { + if structVal == nil { + return nil, nil + } + jsonBytes, err := structVal.MarshalJSON() + if err != nil { + return nil, err + } + + // Strict validation: check for unknown fields + var patchMap map[string]interface{} + if err := json.Unmarshal(jsonBytes, &patchMap); err != nil { + return nil, err + } + // List of allowed fields in PersistentVolumeClaimSpec + allowed := map[string]struct{}{ + "accessModes": {}, + "storageClassName": {}, + } + var unknown []string + for k := range patchMap { + if _, ok := allowed[k]; !ok { + unknown = append(unknown, k) + } + } + if len(unknown) > 0 { + return nil, fmt.Errorf("unknown fields in PVC spec patch: %v", unknown) + } + + var pvcSpec k8score.PersistentVolumeClaimSpec + if err := json.Unmarshal(jsonBytes, &pvcSpec); err != nil { + return nil, err + } + return &pvcSpec, nil +} + +// GetWorkspacePVC constructs a PersistentVolumeClaim for the workspace. +// It uses the default PVC spec (from API server config), and applies any user-specified +// overrides from the pipeline spec, including the requested storage size. +// The workspace size is required and must be specified by the user. +func GetWorkspacePVC( + workspace *pipelinespec.WorkspaceConfig, + opts *Options, +) (k8score.PersistentVolumeClaim, error) { + sizeStr := workspace.GetSize() + if sizeStr == "" { + return k8score.PersistentVolumeClaim{}, fmt.Errorf("workspace size is required but not specified") + } + + k8sWorkspace := workspace.GetKubernetes() + var pvcSpec k8score.PersistentVolumeClaimSpec + // If no default workspace spec is configured, users can still use workspaces. + // This allows workspace usage without requiring default configuration. + if opts != nil && opts.DefaultWorkspace != nil { + pvcSpec = *opts.DefaultWorkspace.DeepCopy() + } + if k8sWorkspace != nil { + if k8sWorkspacePVCSpec := k8sWorkspace.GetPvcSpecPatch(); k8sWorkspacePVCSpec != nil { + userPatch, err := convertStructToPVCSpec(k8sWorkspacePVCSpec) + if err != nil { + return k8score.PersistentVolumeClaim{}, err + } + pvcSpec = *util.PatchPVCSpec(&pvcSpec, userPatch) + } + } + + quantity, err := resource.ParseQuantity(sizeStr) + if err != nil { + return k8score.PersistentVolumeClaim{}, fmt.Errorf("invalid size value for workspace PVC: %v", err) + } + if quantity.Sign() < 0 { + return k8score.PersistentVolumeClaim{}, fmt.Errorf("negative size value for workspace PVC: %v", sizeStr) + } + if pvcSpec.Resources.Requests == nil { + pvcSpec.Resources.Requests = make(map[k8score.ResourceName]resource.Quantity) + } + pvcSpec.Resources.Requests[k8score.ResourceStorage] = quantity + + return k8score.PersistentVolumeClaim{ + ObjectMeta: k8smeta.ObjectMeta{ + Name: workspaceVolumeName, + }, + Spec: pvcSpec, + }, nil +} diff --git a/backend/src/v2/compiler/argocompiler/argo_test.go b/backend/src/v2/compiler/argocompiler/argo_test.go deleted file mode 100644 index 76d630210d0..00000000000 --- a/backend/src/v2/compiler/argocompiler/argo_test.go +++ /dev/null @@ -1,219 +0,0 @@ -// Copyright 2021-2023 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package argocompiler_test - -import ( - "flag" - "fmt" - "os" - "strings" - "testing" - - "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" - - wfapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - "github.com/google/go-cmp/cmp" - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/v2/compiler/argocompiler" - "google.golang.org/protobuf/encoding/protojson" - "sigs.k8s.io/yaml" -) - -var update = flag.Bool("update", false, "update golden files") - -func Test_argo_compiler(t *testing.T) { - proxy.InitializeConfigWithEmptyForTests() - - tests := []struct { - jobPath string // path of input PipelineJob to compile - platformSpecPath string // path of possible input PlatformSpec to compile - argoYAMLPath string // path of expected output argo workflow YAML - envVars map[string]string - compilerOptions argocompiler.Options - }{ - { - jobPath: "../testdata/hello_world_with_retry.json", - platformSpecPath: "", - argoYAMLPath: "testdata/hello_world_with_retry.yaml", - }, - { - jobPath: "../testdata/hello_world.json", - platformSpecPath: "", - argoYAMLPath: "testdata/hello_world.yaml", - }, - { - jobPath: "../testdata/importer.json", - platformSpecPath: "", - argoYAMLPath: "testdata/importer.yaml", - }, - { - jobPath: "../testdata/multiple_parallel_loops.json", - platformSpecPath: "", - argoYAMLPath: "testdata/multiple_parallel_loops.yaml", - }, - { - jobPath: "../testdata/create_mount_delete_dynamic_pvc.json", - platformSpecPath: "../testdata/create_mount_delete_dynamic_pvc_platform.json", - argoYAMLPath: "testdata/create_mount_delete_dynamic_pvc.yaml", - }, - { - jobPath: "../testdata/hello_world.json", - platformSpecPath: "../testdata/create_pod_metadata.json", - argoYAMLPath: "testdata/create_pod_metadata.yaml", - }, - { - jobPath: "../testdata/exit_handler.json", - platformSpecPath: "", - argoYAMLPath: "testdata/exit_handler.yaml", - }, - { - jobPath: "../testdata/hello_world.json", - platformSpecPath: "", - argoYAMLPath: "testdata/hello_world_run_as_user.yaml", - envVars: map[string]string{"PIPELINE_RUN_AS_USER": "1001"}, - }, - { - jobPath: "../testdata/hello_world.json", - platformSpecPath: "", - argoYAMLPath: "testdata/hello_world_log_level.yaml", - envVars: map[string]string{"PIPELINE_LOG_LEVEL": "3"}, - }, - { - jobPath: "../testdata/hello_world_with_retry_all_args.json", - platformSpecPath: "", - argoYAMLPath: "testdata/hello_world_with_retry_all_args.yaml", - }, - { - jobPath: "../testdata/hello_world.json", - platformSpecPath: "", - argoYAMLPath: "testdata/hello_world_cache_disabled.yaml", - compilerOptions: argocompiler.Options{CacheDisabled: true}, - }, - // retry set at pipeline level only. - { - jobPath: "../testdata/nested_pipeline_pipeline_retry.json", - platformSpecPath: "", - argoYAMLPath: "testdata/nested_pipeline_pipeline_retry.yaml", - }, - // retry set at component level only. - { - jobPath: "../testdata/nested_pipeline_sub_component_retry.json", - platformSpecPath: "", - argoYAMLPath: "testdata/nested_pipeline_sub_component_retry.yaml", - }, - // retry set at both component and pipeline level. - { - jobPath: "../testdata/nested_pipeline_all_level_retry.json", - platformSpecPath: "", - argoYAMLPath: "testdata/nested_pipeline_all_level_retry.yaml", - }, - } - for _, tt := range tests { - t.Run(fmt.Sprintf("%+v", tt), func(t *testing.T) { - prevEnvVars := map[string]string{} - - for envVarName, envVarValue := range tt.envVars { - prevEnvVars[envVarName] = os.Getenv(envVarName) - - os.Setenv(envVarName, envVarValue) - } - - defer func() { - for envVarName, envVarValue := range prevEnvVars { - if envVarValue == "" { - os.Unsetenv(envVarName) - } else { - os.Setenv(envVarName, envVarValue) - } - } - }() - - job, platformSpec := load(t, tt.jobPath, tt.platformSpecPath) - if *update { - wf, err := argocompiler.Compile(job, platformSpec, &tt.compilerOptions) - if err != nil { - t.Fatal(err) - } - got, err := yaml.Marshal(wf) - if err != nil { - t.Fatal(err) - } - err = os.WriteFile(tt.argoYAMLPath, got, 0x664) - if err != nil { - t.Fatal(err) - } - } - argoYAML, err := os.ReadFile(tt.argoYAMLPath) - if err != nil { - t.Fatal(err) - } - wf, err := argocompiler.Compile(job, platformSpec, &tt.compilerOptions) - if err != nil { - t.Error(err) - } - - // mask the driver launcher image hash to maintain test stability - for _, template := range wf.Spec.Templates { - if template.Container != nil && strings.Contains(template.Container.Image, "kfp-driver") { - template.Container.Image = "ghcr.io/kubeflow/kfp-driver" - } - if template.Container != nil && strings.Contains(template.Container.Image, "kfp-launcher") { - template.Container.Image = "ghcr.io/kubeflow/kfp-launcher" - } - for i := range template.InitContainers { - if strings.Contains(template.InitContainers[i].Image, "kfp-launcher") { - template.InitContainers[i].Image = "ghcr.io/kubeflow/kfp-launcher" - } - } - } - - var expected wfapi.Workflow - err = yaml.Unmarshal(argoYAML, &expected) - if err != nil { - t.Fatal(err) - } - if !cmp.Equal(wf, &expected) { - t.Errorf("compiler.Compile(%s)!=expected, diff: %s\n", tt.jobPath, cmp.Diff(&expected, wf)) - } - }) - - } - -} - -func load(t *testing.T, path string, platformSpecPath string) (*pipelinespec.PipelineJob, *pipelinespec.SinglePlatformSpec) { - t.Helper() - content, err := os.ReadFile(path) - if err != nil { - t.Error(err) - } - job := &pipelinespec.PipelineJob{} - if err := protojson.Unmarshal(content, job); err != nil { - t.Errorf("Failed to parse pipeline job, error: %s, job: %v", err, string(content)) - } - - platformSpec := &pipelinespec.PlatformSpec{} - if platformSpecPath != "" { - content, err = os.ReadFile(platformSpecPath) - if err != nil { - t.Error(err) - } - if err := protojson.Unmarshal(content, platformSpec); err != nil { - t.Errorf("Failed to parse platform spec, error: %s, spec: %v", err, string(content)) - } - return job, platformSpec.Platforms["kubernetes"] - } - return job, nil -} diff --git a/backend/src/v2/compiler/argocompiler/argo_workspace_test.go b/backend/src/v2/compiler/argocompiler/argo_workspace_test.go new file mode 100644 index 00000000000..d5cb3d92958 --- /dev/null +++ b/backend/src/v2/compiler/argocompiler/argo_workspace_test.go @@ -0,0 +1,427 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package argocompiler_test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + "github.com/kubeflow/pipelines/backend/src/v2/compiler/argocompiler" + "google.golang.org/protobuf/types/known/structpb" + k8score "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/resource" + k8smeta "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +func TestGetWorkspacePVC(t *testing.T) { + tests := []struct { + name string + workspace *pipelinespec.WorkspaceConfig + opts *argocompiler.Options + expectedPVC k8score.PersistentVolumeClaim + expectError bool + }{ + { + name: "workspace with size specified", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "5Gi", + }, + opts: nil, + expectedPVC: k8score.PersistentVolumeClaim{ + ObjectMeta: k8smeta.ObjectMeta{ + Name: "kfp-workspace", + }, + Spec: k8score.PersistentVolumeClaimSpec{ + Resources: k8score.VolumeResourceRequirements{ + Requests: map[k8score.ResourceName]resource.Quantity{ + k8score.ResourceStorage: resource.MustParse("5Gi"), + }, + }, + }, + }, + expectError: false, + }, + { + name: "workspace with empty size should fail", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "", // empty size + }, + opts: &argocompiler.Options{}, + expectedPVC: k8score.PersistentVolumeClaim{}, + expectError: true, + }, + { + name: "workspace with no size should fail", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "", // no size specified + }, + opts: nil, // no options + expectedPVC: k8score.PersistentVolumeClaim{}, + expectError: true, + }, + { + name: "workspace with default PVC spec from options", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "15Gi", + }, + opts: &argocompiler.Options{ + DefaultWorkspace: &k8score.PersistentVolumeClaimSpec{ + AccessModes: []k8score.PersistentVolumeAccessMode{ + k8score.ReadWriteOnce, + }, + StorageClassName: stringPtr("fast-ssd"), + Resources: k8score.VolumeResourceRequirements{ + Requests: map[k8score.ResourceName]resource.Quantity{ + k8score.ResourceStorage: resource.MustParse("1Gi"), // will be overridden + }, + }, + }, + }, + expectedPVC: k8score.PersistentVolumeClaim{ + ObjectMeta: k8smeta.ObjectMeta{ + Name: "kfp-workspace", + }, + Spec: k8score.PersistentVolumeClaimSpec{ + AccessModes: []k8score.PersistentVolumeAccessMode{ + k8score.ReadWriteOnce, + }, + StorageClassName: stringPtr("fast-ssd"), + Resources: k8score.VolumeResourceRequirements{ + Requests: map[k8score.ResourceName]resource.Quantity{ + k8score.ResourceStorage: resource.MustParse("15Gi"), + }, + }, + }, + }, + expectError: false, + }, + { + name: "workspace with Kubernetes PVC spec patch", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "25Gi", + Kubernetes: &pipelinespec.KubernetesWorkspaceConfig{ + PvcSpecPatch: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "accessModes": structpb.NewListValue(&structpb.ListValue{ + Values: []*structpb.Value{ + structpb.NewStringValue("ReadWriteMany"), + }, + }), + "storageClassName": structpb.NewStringValue("nfs-storage"), + }, + }, + }, + }, + opts: &argocompiler.Options{ + DefaultWorkspace: &k8score.PersistentVolumeClaimSpec{ + AccessModes: []k8score.PersistentVolumeAccessMode{ + k8score.ReadWriteOnce, + }, + StorageClassName: stringPtr("default-storage"), + }, + }, + expectedPVC: k8score.PersistentVolumeClaim{ + ObjectMeta: k8smeta.ObjectMeta{ + Name: "kfp-workspace", + }, + Spec: k8score.PersistentVolumeClaimSpec{ + AccessModes: []k8score.PersistentVolumeAccessMode{ + k8score.ReadWriteMany, + }, + StorageClassName: stringPtr("nfs-storage"), + Resources: k8score.VolumeResourceRequirements{ + Requests: map[k8score.ResourceName]resource.Quantity{ + k8score.ResourceStorage: resource.MustParse("25Gi"), + }, + }, + }, + }, + expectError: false, + }, + { + name: "workspace with invalid size", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "invalid-size", + }, + opts: nil, + expectedPVC: k8score.PersistentVolumeClaim{}, + expectError: true, + }, + { + name: "workspace with invalid PVC spec patch", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "10Gi", + Kubernetes: &pipelinespec.KubernetesWorkspaceConfig{ + PvcSpecPatch: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "invalidField": structpb.NewStringValue("invalid"), + }, + }, + }, + }, + opts: nil, + expectedPVC: k8score.PersistentVolumeClaim{}, + expectError: true, + }, + { + name: "workspace with complex PVC spec patch", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "50Gi", + Kubernetes: &pipelinespec.KubernetesWorkspaceConfig{ + PvcSpecPatch: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "accessModes": structpb.NewListValue(&structpb.ListValue{ + Values: []*structpb.Value{ + structpb.NewStringValue("ReadWriteOnce"), + structpb.NewStringValue("ReadOnlyMany"), + }, + }), + "storageClassName": structpb.NewStringValue("premium-ssd"), + }, + }, + }, + }, + opts: &argocompiler.Options{ + DefaultWorkspace: &k8score.PersistentVolumeClaimSpec{ + AccessModes: []k8score.PersistentVolumeAccessMode{ + k8score.ReadWriteOnce, + }, + StorageClassName: stringPtr("default"), + }, + }, + expectedPVC: k8score.PersistentVolumeClaim{ + ObjectMeta: k8smeta.ObjectMeta{ + Name: "kfp-workspace", + }, + Spec: k8score.PersistentVolumeClaimSpec{ + AccessModes: []k8score.PersistentVolumeAccessMode{ + k8score.ReadWriteOnce, + k8score.ReadOnlyMany, + }, + StorageClassName: stringPtr("premium-ssd"), + Resources: k8score.VolumeResourceRequirements{ + Requests: map[k8score.ResourceName]resource.Quantity{ + k8score.ResourceStorage: resource.MustParse("50Gi"), + }, + }, + }, + }, + expectError: false, + }, + { + name: "workspace with nil Kubernetes config", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "30Gi", + Kubernetes: nil, + }, + opts: nil, + expectedPVC: k8score.PersistentVolumeClaim{ + ObjectMeta: k8smeta.ObjectMeta{ + Name: "kfp-workspace", + }, + Spec: k8score.PersistentVolumeClaimSpec{ + Resources: k8score.VolumeResourceRequirements{ + Requests: map[k8score.ResourceName]resource.Quantity{ + k8score.ResourceStorage: resource.MustParse("30Gi"), + }, + }, + }, + }, + expectError: false, + }, + { + name: "workspace with empty Kubernetes config", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "40Gi", + Kubernetes: &pipelinespec.KubernetesWorkspaceConfig{ + PvcSpecPatch: nil, + }, + }, + opts: nil, + expectedPVC: k8score.PersistentVolumeClaim{ + ObjectMeta: k8smeta.ObjectMeta{ + Name: "kfp-workspace", + }, + Spec: k8score.PersistentVolumeClaimSpec{ + Resources: k8score.VolumeResourceRequirements{ + Requests: map[k8score.ResourceName]resource.Quantity{ + k8score.ResourceStorage: resource.MustParse("40Gi"), + }, + }, + }, + }, + expectError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := argocompiler.GetWorkspacePVC(tt.workspace, tt.opts) + if (err != nil) != tt.expectError { + t.Errorf("GetWorkspacePVC() error = %v, expectError %v", err, tt.expectError) + return + } + if !tt.expectError { + if diff := cmp.Diff(tt.expectedPVC, got); diff != "" { + t.Errorf("GetWorkspacePVC() mismatch (-expected +got):\n%s", diff) + } + } + }) + } +} + +func stringPtr(s string) *string { + return &s +} + +func TestGetWorkspacePVC_EdgeCases(t *testing.T) { + tests := []struct { + name string + workspace *pipelinespec.WorkspaceConfig + opts *argocompiler.Options + expectError bool + errMsg string + }{ + { + name: "workspace with very large size", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "1000Ti", + }, + opts: nil, + expectError: false, // should be valid + }, + { + name: "workspace with decimal size", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "1.5Gi", + }, + opts: nil, + expectError: false, // should be valid + }, + { + name: "workspace with invalid size format", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "10GB", // should be "10Gi" + }, + opts: nil, + expectError: true, + }, + { + name: "workspace with negative size", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "-10Gi", + }, + opts: nil, + expectError: true, + }, + { + name: "workspace with malformed PVC spec patch", + workspace: &pipelinespec.WorkspaceConfig{ + Size: "10Gi", + Kubernetes: &pipelinespec.KubernetesWorkspaceConfig{ + PvcSpecPatch: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "resources": structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "requests": structpb.NewStringValue("invalid"), // should be a map + }, + }), + }, + }, + }, + }, + opts: nil, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := argocompiler.GetWorkspacePVC(tt.workspace, tt.opts) + if (err != nil) != tt.expectError { + t.Errorf("getWorkspacePVC() error = %v, expectError %v", err, tt.expectError) + return + } + if tt.expectError && tt.errMsg != "" && err != nil { + if err.Error() != tt.errMsg { + t.Errorf("getWorkspacePVC() error message = %v, expect %v", err.Error(), tt.errMsg) + } + } + }) + } +} + +// TestGetWorkspacePVC_Integration tests integration scenarios +func TestGetWorkspacePVC_Integration(t *testing.T) { + t.Run("complete workspace configuration", func(t *testing.T) { + workspace := &pipelinespec.WorkspaceConfig{ + Size: "100Gi", + Kubernetes: &pipelinespec.KubernetesWorkspaceConfig{ + PvcSpecPatch: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "accessModes": structpb.NewListValue(&structpb.ListValue{ + Values: []*structpb.Value{ + structpb.NewStringValue("ReadWriteOnce"), + }, + }), + "storageClassName": structpb.NewStringValue("gp2"), + }, + }, + }, + } + + opts := &argocompiler.Options{ + DefaultWorkspace: &k8score.PersistentVolumeClaimSpec{ + AccessModes: []k8score.PersistentVolumeAccessMode{ + k8score.ReadWriteOnce, + }, + StorageClassName: stringPtr("default"), + Resources: k8score.VolumeResourceRequirements{ + Requests: map[k8score.ResourceName]resource.Quantity{ + k8score.ResourceStorage: resource.MustParse("10Gi"), + }, + }, + }, + } + + pvc, err := argocompiler.GetWorkspacePVC(workspace, opts) + if err != nil { + t.Fatalf("getWorkspacePVC() unexpected error: %v", err) + } + + // Verify the result + expected := k8score.PersistentVolumeClaim{ + ObjectMeta: k8smeta.ObjectMeta{ + Name: "kfp-workspace", + }, + Spec: k8score.PersistentVolumeClaimSpec{ + AccessModes: []k8score.PersistentVolumeAccessMode{ + k8score.ReadWriteOnce, + }, + StorageClassName: stringPtr("gp2"), + Resources: k8score.VolumeResourceRequirements{ + Requests: map[k8score.ResourceName]resource.Quantity{ + k8score.ResourceStorage: resource.MustParse("100Gi"), + }, + }, + }, + } + + if diff := cmp.Diff(expected, pvc); diff != "" { + t.Errorf("getWorkspacePVC() mismatch (-want +got):\n%s", diff) + } + }) +} diff --git a/backend/src/v2/compiler/argocompiler/container.go b/backend/src/v2/compiler/argocompiler/container.go index 07cb6ea3fa2..a4895a02750 100644 --- a/backend/src/v2/compiler/argocompiler/container.go +++ b/backend/src/v2/compiler/argocompiler/container.go @@ -17,15 +17,17 @@ package argocompiler import ( "fmt" "os" + "sort" "strconv" "strings" + "google.golang.org/protobuf/encoding/protojson" + "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" "k8s.io/apimachinery/pkg/util/intstr" wfapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/golang/glog" - "github.com/golang/protobuf/jsonpb" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/v2/component" @@ -86,6 +88,7 @@ type containerDriverOutputs struct { type containerDriverInputs struct { component string task string + taskName string // preserve the original task name for input resolving container string parentDagID string iterationIndex string // optional, when this is an iteration task @@ -152,6 +155,7 @@ func (c *workflowCompiler) containerDriverTask(name string, inputs containerDriv {Name: paramComponent, Value: wfapi.AnyStringPtr(inputs.component)}, {Name: paramTask, Value: wfapi.AnyStringPtr(inputs.task)}, {Name: paramContainer, Value: wfapi.AnyStringPtr(inputs.container)}, + {Name: paramTaskName, Value: wfapi.AnyStringPtr(inputs.taskName)}, {Name: paramParentDagID, Value: wfapi.AnyStringPtr(inputs.parentDagID)}, }, }, @@ -192,6 +196,7 @@ func (c *workflowCompiler) addContainerDriverTemplate() string { "--dag_execution_id", inputValue(paramParentDagID), "--component", inputValue(paramComponent), "--task", inputValue(paramTask), + "--task_name", inputValue(paramTaskName), "--container", inputValue(paramContainer), "--iteration_index", inputValue(paramIterationIndex), "--cached_decision_path", outputPath(paramCachedDecision), @@ -224,6 +229,7 @@ func (c *workflowCompiler) addContainerDriverTemplate() string { {Name: paramComponent}, {Name: paramTask}, {Name: paramContainer}, + {Name: paramTaskName}, {Name: paramParentDagID}, {Name: paramIterationIndex, Default: wfapi.AnyStringPtr("-1")}, {Name: paramKubernetesConfig, Default: wfapi.AnyStringPtr("")}, @@ -269,35 +275,59 @@ type containerExecutorInputs struct { // name: argo workflows DAG task name // The other arguments are argo workflows task parameters, they can be either a // string or a placeholder. -func (c *workflowCompiler) containerExecutorTask(name string, inputs containerExecutorInputs, task *pipelinespec.PipelineTaskSpec) *wfapi.DAGTask { +func (c *workflowCompiler) containerExecutorTask(name string, inputs containerExecutorInputs, task *pipelinespec.PipelineTaskSpec) (*wfapi.DAGTask, error) { when := "" if inputs.condition != "" { when = inputs.condition + " != false" } + var refName string + if componentRef := task.GetComponentRef(); componentRef != nil { + refName = componentRef.Name + } else { + return nil, fmt.Errorf("component reference is nil") + } + + // Retrieve pod metadata defined in the Kubernetes Spec, if any + kubernetesConfigParam := c.wf.Spec.Arguments.GetParameterByName(argumentsKubernetesSpec + refName) + k8sExecCfg := &kubernetesplatform.KubernetesExecutorConfig{} + if kubernetesConfigParam != nil && kubernetesConfigParam.Value != nil { + if err := protojson.Unmarshal([]byte((*kubernetesConfigParam.Value)), k8sExecCfg); err != nil { + return nil, fmt.Errorf("failed to unmarshal kubernetes config: %v", err) + } + } dagTask := &wfapi.DAGTask{ Name: name, - Template: c.addContainerExecutorTemplate(task), + Template: c.addContainerExecutorTemplate(task, k8sExecCfg), When: when, Arguments: wfapi.Arguments{ - Parameters: append([]wfapi.Parameter{ - {Name: paramPodSpecPatch, Value: wfapi.AnyStringPtr(inputs.podSpecPatch)}, - {Name: paramCachedDecision, Value: wfapi.AnyStringPtr(inputs.cachedDecision), Default: wfapi.AnyStringPtr("false")}, - }, - c.getTaskRetryParametersWithValues(task)..., - ), + Parameters: append( + []wfapi.Parameter{ + { + Name: paramPodSpecPatch, + Value: wfapi.AnyStringPtr(inputs.podSpecPatch), + }, + { + Name: paramCachedDecision, + Value: wfapi.AnyStringPtr(inputs.cachedDecision), + Default: wfapi.AnyStringPtr("false"), + }, + }, + append( + c.getPodMetadataParameters(k8sExecCfg.GetPodMetadata(), true), + c.getTaskRetryParametersWithValues(task)...)...), }, } addExitTask(dagTask, inputs.exitTemplate, inputs.hookParentDagID) - return dagTask + return dagTask, nil } // addContainerExecutorTemplate adds a generic container executor template for // any container component task. // During runtime, it's expected that pod-spec-patch will specify command, args // and resources etc, that are different for different tasks. -func (c *workflowCompiler) addContainerExecutorTemplate(task *pipelinespec.PipelineTaskSpec) string { +func (c *workflowCompiler) addContainerExecutorTemplate(task *pipelinespec.PipelineTaskSpec, k8sExecCfg *kubernetesplatform.KubernetesExecutorConfig) string { // container template is parent of container implementation template nameContainerExecutor := "system-container-executor" nameContainerImpl := "system-container-impl" @@ -311,6 +341,21 @@ func (c *workflowCompiler) addContainerExecutorTemplate(task *pipelinespec.Pipel nameContainerImpl = "retry-" + nameContainerImpl } } + podMetadata := k8sExecCfg.GetPodMetadata() + // TODO (agoins): This approach was used because Argo Workflows does not currently supporting patching Metadata: https://github.com/argoproj/argo-workflows/issues/14661 + // if pod metadata is set, create a template name including the numbers of annotations and labels. + if podMetadata != nil { + numAnnotations := "0" + numLabels := "0" + if podMetadata.GetAnnotations() != nil { + numAnnotations = strconv.Itoa(len(k8sExecCfg.GetPodMetadata().GetAnnotations())) + } + if podMetadata.GetLabels() != nil { + numLabels = strconv.Itoa(len(k8sExecCfg.GetPodMetadata().GetLabels())) + } + nameContainerExecutor = "metadata-" + numAnnotations + "-" + numLabels + "-" + nameContainerExecutor + nameContainerImpl = "metadata-" + numAnnotations + "-" + numLabels + "-" + nameContainerImpl + } _, ok := c.templates[nameContainerExecutor] if ok { return nameContainerExecutor @@ -318,24 +363,35 @@ func (c *workflowCompiler) addContainerExecutorTemplate(task *pipelinespec.Pipel container := &wfapi.Template{ Name: nameContainerExecutor, Inputs: wfapi.Inputs{ - Parameters: append([]wfapi.Parameter{ - {Name: paramPodSpecPatch}, - {Name: paramCachedDecision, Default: wfapi.AnyStringPtr("false")}, - }, - c.addParameterDefault(c.getTaskRetryParameters(task), "0")..., - ), + Parameters: append( + []wfapi.Parameter{ + { + Name: paramPodSpecPatch, + }, + { + Name: paramCachedDecision, + Default: wfapi.AnyStringPtr("false"), + }, + }, + append( + c.getPodMetadataParameters(k8sExecCfg.GetPodMetadata(), false), + c.addParameterDefault(c.getTaskRetryParameters(task), "0")...)...), }, DAG: &wfapi.DAGTemplate{ Tasks: []wfapi.DAGTask{{ Name: "executor", Template: nameContainerImpl, Arguments: wfapi.Arguments{ - Parameters: append([]wfapi.Parameter{{ - Name: paramPodSpecPatch, - Value: wfapi.AnyStringPtr(inputParameter(paramPodSpecPatch))}, - }, - c.addParameterInputPath(c.getTaskRetryParameters(task))..., - ), + Parameters: append( + []wfapi.Parameter{ + { + Name: paramPodSpecPatch, + Value: wfapi.AnyStringPtr(inputParameter(paramPodSpecPatch)), + }, + }, + append( + c.addParameterInputPath(c.getPodMetadataParameters(k8sExecCfg.GetPodMetadata(), false)), + c.addParameterInputPath(c.getTaskRetryParameters(task))...)...), }, // When cached decision is true, the container // implementation template will be skipped, but @@ -363,10 +419,15 @@ func (c *workflowCompiler) addContainerExecutorTemplate(task *pipelinespec.Pipel executor := &wfapi.Template{ Name: nameContainerImpl, Inputs: wfapi.Inputs{ - Parameters: append([]wfapi.Parameter{ - {Name: paramPodSpecPatch}, - }, - c.getTaskRetryParameters(task)...), + Parameters: append( + []wfapi.Parameter{ + { + Name: paramPodSpecPatch, + }, + }, + append( + c.getPodMetadataParameters(k8sExecCfg.GetPodMetadata(), false), + c.getTaskRetryParameters(task)...)...), }, // PodSpecPatch input param is where actual image, command and // args come from. It is treated as a strategic merge patch on @@ -483,13 +544,8 @@ func (c *workflowCompiler) addContainerExecutorTemplate(task *pipelinespec.Pipel inputParameter(paramRetryBackOffMaxDuration)) } // Update pod metadata if it defined in the Kubernetes Spec - kubernetesConfigParam := c.wf.Spec.Arguments.GetParameterByName(argumentsKubernetesSpec + refName) - - if kubernetesConfigParam != nil { - k8sExecCfg := &kubernetesplatform.KubernetesExecutorConfig{} - if err := jsonpb.UnmarshalString(string(*kubernetesConfigParam.Value), k8sExecCfg); err == nil { - extendPodMetadata(&executor.Metadata, k8sExecCfg) - } + if k8sExecCfg.GetPodMetadata() != nil { + extendPodMetadata(&executor.Metadata, k8sExecCfg) } caBundleCfgMapName := os.Getenv("EXECUTOR_CABUNDLE_CONFIGMAP_NAME") caBundleCfgMapKey := os.Getenv("EXECUTOR_CABUNDLE_CONFIGMAP_KEY") @@ -637,7 +693,60 @@ func (c *workflowCompiler) getTaskRetryStrategyFromInput(maxCount string, backOf } } -// Extends the PodMetadata to include Kubernetes-specific executor config. +// Return pod metadata parameters for annotations and labels, with corresponding key/value vals, if applicable. +// If no parameters found, returns empty slice. +func (c *workflowCompiler) getPodMetadataParameters(podMetadata *kubernetesplatform.PodMetadata, includeVal bool) []wfapi.Parameter { + parameters := []wfapi.Parameter{} + if podMetadata != nil && podMetadata.GetAnnotations() != nil { + annotations := podMetadata.GetAnnotations() + parameters = append(parameters, c.formatPodMetadataParameters(annotations, includeVal, paramPodAnnotationKey, paramPodAnnotationVal)...) + } + if podMetadata != nil && podMetadata.GetLabels() != nil { + labels := podMetadata.GetLabels() + parameters = append(parameters, c.formatPodMetadataParameters(labels, includeVal, paramPodLabelKey, paramPodLabelVal)...) + } + return parameters +} + +// Return slice of formatted parameters. If includeVal is set to true, the corresponding values for the metadata key +// and value are included. If input metadata map is empty, empty slice is returned. +func (c *workflowCompiler) formatPodMetadataParameters(metadata map[string]string, includeVal bool, paramPodMetadataKey string, paramPodMetadataVal string) []wfapi.Parameter { + var parameters []wfapi.Parameter + // sort metadata alphabetically to ensure metadata are processed in identical order across all runs for a pipeline. + sortedMetadataKeys := make([]string, 0, len(metadata)) + for entry := range metadata { + sortedMetadataKeys = append(sortedMetadataKeys, entry) + } + sort.Strings(sortedMetadataKeys) + // paramPodMetadataKey/value parameters are numbered when more than one annotation/label is set. + count := 1 + key := paramPodMetadataKey + val := paramPodMetadataVal + for _, metadataKey := range sortedMetadataKeys { + if len(sortedMetadataKeys) > 1 { + key = paramPodMetadataKey + "-" + strconv.Itoa(count) + val = paramPodMetadataVal + "-" + strconv.Itoa(count) + } + if includeVal { + parameters = append(parameters, + []wfapi.Parameter{ + { + Name: key, + Value: wfapi.AnyStringPtr(metadataKey), + }, + { + Name: val, + Value: wfapi.AnyStringPtr(metadata[metadataKey]), + }}...) + } else { + parameters = append(parameters, []wfapi.Parameter{{Name: key}, {Name: val}}...) + } + count++ + } + return parameters +} + +// Extends the PodMetadata to PodMetadata input parameters. // Although the current podMetadata object is always empty, this function // doesn't overwrite the existing podMetadata because for security reasons // the existing podMetadata should have higher privilege than the user definition. @@ -649,18 +758,38 @@ func extendPodMetadata( if kubernetesExecutorConfig.GetPodMetadata() != nil { labels := kubernetesExecutorConfig.GetPodMetadata().GetLabels() if labels != nil { - if podMetadata.Labels == nil { - podMetadata.Labels = labels - } else { - podMetadata.Labels = extendMetadataMap(podMetadata.Labels, labels) + count := 1 + for range labels { + key := paramPodLabelKey + val := paramPodLabelVal + if len(labels) > 1 { + key = key + "-" + strconv.Itoa(count) + val = val + "-" + strconv.Itoa(count) + } + if podMetadata.Labels == nil { + podMetadata.Labels = map[string]string{inputParameter(key): inputParameter(val)} + } else { + podMetadata.Labels = extendMetadataMap(podMetadata.Labels, map[string]string{inputParameter(key): inputParameter(val)}) + } + count++ } } annotations := kubernetesExecutorConfig.GetPodMetadata().GetAnnotations() if annotations != nil { - if podMetadata.Annotations == nil { - podMetadata.Annotations = annotations - } else { - podMetadata.Annotations = extendMetadataMap(podMetadata.Annotations, annotations) + count := 1 + for range annotations { + key := paramPodAnnotationKey + val := paramPodAnnotationVal + if len(annotations) > 1 { + key = key + "-" + strconv.Itoa(count) + val = val + "-" + strconv.Itoa(count) + } + if podMetadata.Annotations == nil { + podMetadata.Annotations = map[string]string{inputParameter(key): inputParameter(val)} + } else { + podMetadata.Annotations = extendMetadataMap(podMetadata.Annotations, map[string]string{inputParameter(key): inputParameter(val)}) + } + count++ } } } diff --git a/backend/src/v2/compiler/argocompiler/container_test.go b/backend/src/v2/compiler/argocompiler/container_test.go index 286ddf6a4bd..1b946f2edaf 100644 --- a/backend/src/v2/compiler/argocompiler/container_test.go +++ b/backend/src/v2/compiler/argocompiler/container_test.go @@ -61,7 +61,7 @@ func TestAddContainerExecutorTemplate(t *testing.T) { }, } - c.addContainerExecutorTemplate(&pipelinespec.PipelineTaskSpec{ComponentRef: &pipelinespec.ComponentRef{Name: "comp-test-ref"}}) + c.addContainerExecutorTemplate(&pipelinespec.PipelineTaskSpec{ComponentRef: &pipelinespec.ComponentRef{Name: "comp-test-ref"}}, &kubernetesplatform.KubernetesExecutorConfig{}) assert.NotEmpty(t, "system-container-impl", "Template name should not be empty") executorTemplate, exists := c.templates["system-container-impl"] @@ -120,10 +120,10 @@ func Test_extendPodMetadata(t *testing.T) { }, &wfapi.Metadata{ Annotations: map[string]string{ - "run_id": "123456", + "{{inputs.parameters.pod-metadata-annotation-key}}": "{{inputs.parameters.pod-metadata-annotation-val}}", }, Labels: map[string]string{ - "kubeflow.com/kfp": "pipeline-node", + "{{inputs.parameters.pod-metadata-label-key}}": "{{inputs.parameters.pod-metadata-label-val}}", }, }, }, @@ -149,9 +149,11 @@ func Test_extendPodMetadata(t *testing.T) { }, &wfapi.Metadata{ Annotations: map[string]string{ + "{{inputs.parameters.pod-metadata-annotation-key}}": "{{inputs.parameters.pod-metadata-annotation-val}}", "run_id": "654321", }, Labels: map[string]string{ + "{{inputs.parameters.pod-metadata-label-key}}": "{{inputs.parameters.pod-metadata-label-val}}", "kubeflow.com/kfp": "default-node", }, }, diff --git a/backend/src/v2/compiler/argocompiler/dag.go b/backend/src/v2/compiler/argocompiler/dag.go index f3e74c58116..1ff13b49697 100644 --- a/backend/src/v2/compiler/argocompiler/dag.go +++ b/backend/src/v2/compiler/argocompiler/dag.go @@ -250,6 +250,7 @@ func (c *workflowCompiler) task(name string, task *pipelinespec.PipelineTaskSpec component: componentSpecPlaceholder, task: taskSpecJson, iterationIndex: inputs.iterationIndex, + taskName: name, }) if err != nil { return nil, err @@ -291,6 +292,7 @@ func (c *workflowCompiler) task(name string, task *pipelinespec.PipelineTaskSpec parentDagID: inputs.parentDagID, iterationIndex: inputs.iterationIndex, kubernetesConfig: kubernetesConfigPlaceholder, + taskName: name, }) if task.GetTriggerPolicy().GetCondition() == "" { driverOutputs.condition = "" @@ -307,13 +309,16 @@ func (c *workflowCompiler) task(name string, task *pipelinespec.PipelineTaskSpec driver.Name = name return []wfapi.DAGTask{*driver}, nil } - executor := c.containerExecutorTask(name, containerExecutorInputs{ + executor, err := c.containerExecutorTask(name, containerExecutorInputs{ podSpecPatch: driverOutputs.podSpecPatch, cachedDecision: driverOutputs.cached, condition: driverOutputs.condition, exitTemplate: inputs.exitTemplate, hookParentDagID: inputs.parentDagID, }, task) + if err != nil { + return nil, fmt.Errorf("error creating executor for %q: %v", name, err) + } executor.Depends = depends([]string{driverTaskName}) return []wfapi.DAGTask{*driver, *executor}, nil case *pipelinespec.PipelineDeploymentConfig_ExecutorSpec_Importer: @@ -477,6 +482,7 @@ type dagDriverInputs struct { parentDagID string // parent DAG execution ID. optional, the root DAG does not have parent component string // input placeholder for component spec task string // optional, the root DAG does not have task spec. + taskName string // optional, the name of the task, used for input resolving runtimeConfig *pipelinespec.PipelineJob_RuntimeConfig // optional, only root DAG needs this iterationIndex string // optional, iterator passes iteration index to iteration tasks } @@ -520,6 +526,13 @@ func (c *workflowCompiler) dagDriverTask(name string, inputs dagDriverInputs) (* Value: wfapi.AnyStringPtr(inputs.task), }) } + + if inputs.taskName != "" && inputs.taskName != "iteration-item" { + params = append(params, wfapi.Parameter{ + Name: paramTaskName, + Value: wfapi.AnyStringPtr(inputs.taskName), + }) + } t := &wfapi.DAGTask{ Name: name, Template: c.addDAGDriverTemplate(), @@ -550,6 +563,7 @@ func (c *workflowCompiler) addDAGDriverTemplate() string { "--dag_execution_id", inputValue(paramParentDagID), "--component", inputValue(paramComponent), "--task", inputValue(paramTask), + "--task_name", inputValue(paramTaskName), "--runtime_config", inputValue(paramRuntimeConfig), "--iteration_index", inputValue(paramIterationIndex), "--execution_id_path", outputPath(paramExecutionID), @@ -581,6 +595,7 @@ func (c *workflowCompiler) addDAGDriverTemplate() string { {Name: paramComponent}, // Required. {Name: paramRuntimeConfig, Default: wfapi.AnyStringPtr("")}, {Name: paramTask, Default: wfapi.AnyStringPtr("")}, + {Name: paramTaskName, Default: wfapi.AnyStringPtr("")}, {Name: paramParentDagID, Default: wfapi.AnyStringPtr("0")}, {Name: paramIterationIndex, Default: wfapi.AnyStringPtr("-1")}, {Name: paramDriverType, Default: wfapi.AnyStringPtr("DAG")}, @@ -638,8 +653,6 @@ func addImplicitDependencies(dagSpec *pipelinespec.DagSpec) error { if err := addDep(input.GetTaskOutputParameter().GetProducerTask()); err != nil { return wrap(err) } - case *pipelinespec.TaskInputsSpec_InputParameterSpec_TaskFinalStatus_: - return wrap(fmt.Errorf("task final status not supported yet")) default: // other parameter input types do not introduce implicit dependencies } diff --git a/backend/src/v2/compiler/argocompiler/testdata/create_mount_delete_dynamic_pvc.yaml b/backend/src/v2/compiler/argocompiler/testdata/create_mount_delete_dynamic_pvc.yaml deleted file mode 100644 index 59d07e10a1a..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/create_mount_delete_dynamic_pvc.yaml +++ /dev/null @@ -1,423 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: my-pipeline- -spec: - arguments: - parameters: - - name: kubernetes-comp-comp - value: '{"pvcMount":[{"mountPath":"/data","taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}]}' - - name: components-b34273359995b3746ecf1bb58ac4bd6c54d47b6fdc35b013bb7962946f322a19 - value: '{"executorLabel":"exec-comp"}' - - name: implementations-b34273359995b3746ecf1bb58ac4bd6c54d47b6fdc35b013bb7962946f322a19 - value: '{"args":["--executor_input","{{$}}","--function_to_execute","comp"],"command":["sh","-c","\nif - ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 - -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location ''kfp==2.0.0-beta.16'' - \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d) printf - \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\" python3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\" - ","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import - *\n\ndef comp():\n pass\n\n"],"image":"python:3.9"}' - - name: kubernetes-comp-comp-2 - value: '{"pvcMount":[{"mountPath":"/reused_data","taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}]}' - - name: components-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767 - value: '{"executorLabel":"exec-createpvc","inputDefinitions":{"parameters":{"access_modes":{"parameterType":"LIST"},"annotations":{"isOptional":true,"parameterType":"STRUCT"},"pvc_name":{"isOptional":true,"parameterType":"STRING"},"pvc_name_suffix":{"isOptional":true,"parameterType":"STRING"},"size":{"parameterType":"STRING"},"storage_class_name":{"defaultValue":"","isOptional":true,"parameterType":"STRING"},"volume_name":{"isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"name":{"parameterType":"STRING"}}}}' - - name: implementations-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767 - value: '{"image":"argostub/createpvc"}' - - name: components-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725 - value: '{"executorLabel":"exec-deletepvc","inputDefinitions":{"parameters":{"pvc_name":{"parameterType":"STRING"}}}}' - - name: implementations-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725 - value: '{"image":"argostub/deletepvc"}' - - name: components-root - value: '{"dag":{"tasks":{"comp":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-comp"},"dependentTasks":["createpvc"],"taskInfo":{"name":"comp"}},"comp-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-comp-2"},"dependentTasks":["comp","createpvc"],"taskInfo":{"name":"comp-2"}},"createpvc":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-createpvc"},"inputs":{"parameters":{"access_modes":{"runtimeValue":{"constant":["ReadWriteOnce"]}},"pvc_name_suffix":{"runtimeValue":{"constant":"-my-pvc"}},"size":{"runtimeValue":{"constant":"5Gi"}},"storage_class_name":{"runtimeValue":{"constant":"standard"}}}},"taskInfo":{"name":"createpvc"}},"deletepvc":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-deletepvc"},"dependentTasks":["comp-2","createpvc"],"inputs":{"parameters":{"pvc_name":{"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}}},"taskInfo":{"name":"deletepvc"}}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - my-pipeline - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - name: "" - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - metadata: {} - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - metadata: {} - name: system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - metadata: {} - name: system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-b34273359995b3746ecf1bb58ac4bd6c54d47b6fdc35b013bb7962946f322a19}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-comp"},"dependentTasks":["createpvc"],"taskInfo":{"name":"comp"}}' - - name: container - value: '{{workflow.parameters.implementations-b34273359995b3746ecf1bb58ac4bd6c54d47b6fdc35b013bb7962946f322a19}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: kubernetes-config - value: '{{workflow.parameters.kubernetes-comp-comp}}' - depends: createpvc.Succeeded - name: comp-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.comp-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.comp-driver.outputs.parameters.cached-decision}}' - depends: comp-driver.Succeeded - name: comp - template: system-container-executor - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-b34273359995b3746ecf1bb58ac4bd6c54d47b6fdc35b013bb7962946f322a19}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-comp-2"},"dependentTasks":["comp","createpvc"],"taskInfo":{"name":"comp-2"}}' - - name: container - value: '{{workflow.parameters.implementations-b34273359995b3746ecf1bb58ac4bd6c54d47b6fdc35b013bb7962946f322a19}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: kubernetes-config - value: '{{workflow.parameters.kubernetes-comp-comp-2}}' - depends: comp.Succeeded && createpvc.Succeeded - name: comp-2-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.comp-2-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.comp-2-driver.outputs.parameters.cached-decision}}' - depends: comp-2-driver.Succeeded - name: comp-2 - template: system-container-executor - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-createpvc"},"inputs":{"parameters":{"access_modes":{"runtimeValue":{"constant":["ReadWriteOnce"]}},"pvc_name_suffix":{"runtimeValue":{"constant":"-my-pvc"}},"size":{"runtimeValue":{"constant":"5Gi"}},"storage_class_name":{"runtimeValue":{"constant":"standard"}}}},"taskInfo":{"name":"createpvc"}}' - - name: container - value: '{{workflow.parameters.implementations-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: createpvc - template: system-container-driver - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-deletepvc"},"dependentTasks":["comp-2","createpvc"],"inputs":{"parameters":{"pvc_name":{"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}}},"taskInfo":{"name":"deletepvc"}}' - - name: container - value: '{{workflow.parameters.implementations-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - depends: comp-2.Succeeded && createpvc.Succeeded - name: deletepvc - template: system-container-driver - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - my-pipeline - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - name: "" - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null diff --git a/backend/src/v2/compiler/argocompiler/testdata/create_pod_metadata.yaml b/backend/src/v2/compiler/argocompiler/testdata/create_pod_metadata.yaml deleted file mode 100644 index f87604f291e..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/create_pod_metadata.yaml +++ /dev/null @@ -1,366 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: hello-world- -spec: - arguments: - parameters: - - name: kubernetes-comp-hello-world - value: '{"podMetadata":{"annotations":{"experiment_id":"234567","run_id":"123456"},"labels":{"kubeflow.com/common":"test","kubeflow.com/kfp":"pipeline-node"}}}' - - name: components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"executorLabel":"exec-hello-world","inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - - name: implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"args":["--text","{{$.inputs.parameters[''text'']}}"],"command":["sh","-ec","program_path=$(mktemp)\nprintf - \"%s\" \"$0\" \u003e \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n","def - hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser - = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", - dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' - - name: components-root - value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - name: "" - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - metadata: {} - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - metadata: {} - name: system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - metadata: - annotations: - experiment_id: "234567" - run_id: "123456" - labels: - kubeflow.com/common: test - kubeflow.com/kfp: pipeline-node - name: system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}' - - name: container - value: '{{workflow.parameters.implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: kubernetes-config - value: '{{workflow.parameters.kubernetes-comp-hello-world}}' - name: hello-world-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.hello-world-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.hello-world-driver.outputs.parameters.cached-decision}}' - depends: hello-world-driver.Succeeded - name: hello-world - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - name: "" - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{"parameters":{"text":{"stringValue":"hi there"}}}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null diff --git a/backend/src/v2/compiler/argocompiler/testdata/exit_handler.yaml b/backend/src/v2/compiler/argocompiler/testdata/exit_handler.yaml deleted file mode 100644 index a23451414ba..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/exit_handler.yaml +++ /dev/null @@ -1,463 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: pipeline-with-exit-handler- -spec: - arguments: - parameters: - - name: components-8444a6bac7a3d81cc54291a13166a74231d98f9a98861815f15a055edde30ed8 - value: '{"executorLabel":"exec-fail-op","inputDefinitions":{"parameters":{"message":{"type":"STRING"}}}}' - - name: implementations-8444a6bac7a3d81cc54291a13166a74231d98f9a98861815f15a055edde30ed8 - value: '{"args":["--executor_input","{{$}}","--function_to_execute","fail_op"],"command":["sh","-c","\nif - ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 - -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location ''kfp==1.8.22'' - \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\nprintf - \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\npython3 -m - kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport - kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import - *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n print(message)\n sys.exit(1)\n\n"],"image":"python:3.7"}' - - name: components-f192dae3a3c4616f7637be7d0414bcffbff11a78dc03bf428f05490caa678f8a - value: '{"executorLabel":"exec-print-op-2","inputDefinitions":{"parameters":{"message":{"type":"STRING"}}}}' - - name: implementations-f192dae3a3c4616f7637be7d0414bcffbff11a78dc03bf428f05490caa678f8a - value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif - ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 - -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location ''kfp==1.8.22'' - \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\nprintf - \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\npython3 -m - kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport - kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import - *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n"],"image":"python:3.7"}' - - name: components-comp-exit-handler-1 - value: '{"dag":{"tasks":{"fail-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constantValue":{"stringValue":"Task - failed."}}}}},"taskInfo":{"name":"fail-op"}},"print-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelineparam--message"}}},"taskInfo":{"name":"print-op-2"}}}},"inputDefinitions":{"parameters":{"pipelineparam--message":{"type":"STRING"}}}}' - - name: components-root - value: '{"dag":{"tasks":{"exit-handler-1":{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelineparam--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-1"}},"print-op":{"componentRef":{"name":"comp-print-op"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"message":{"runtimeValue":{"constantValue":{"stringValue":"Exit - handler has worked!"}}}}},"taskInfo":{"name":"print-op"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}}},"inputDefinitions":{"parameters":{"message":{"type":"STRING"}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - pipeline-with-exit-handler - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - 'false' - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - metadata: {} - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - metadata: {} - name: system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - metadata: {} - name: system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-8444a6bac7a3d81cc54291a13166a74231d98f9a98861815f15a055edde30ed8}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constantValue":{"stringValue":"Task - failed."}}}}},"taskInfo":{"name":"fail-op"}}' - - name: container - value: '{{workflow.parameters.implementations-8444a6bac7a3d81cc54291a13166a74231d98f9a98861815f15a055edde30ed8}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: fail-op-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.fail-op-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.fail-op-driver.outputs.parameters.cached-decision}}' - depends: fail-op-driver.Succeeded - name: fail-op - template: system-container-executor - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-f192dae3a3c4616f7637be7d0414bcffbff11a78dc03bf428f05490caa678f8a}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelineparam--message"}}},"taskInfo":{"name":"print-op-2"}}' - - name: container - value: '{{workflow.parameters.implementations-f192dae3a3c4616f7637be7d0414bcffbff11a78dc03bf428f05490caa678f8a}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: print-op-2-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.print-op-2-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' - depends: print-op-2-driver.Succeeded - name: print-op-2 - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: comp-exit-handler-1 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-f192dae3a3c4616f7637be7d0414bcffbff11a78dc03bf428f05490caa678f8a}}' - - name: task - value: '{"componentRef":{"name":"comp-print-op"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"message":{"runtimeValue":{"constantValue":{"stringValue":"Exit - handler has worked!"}}}}},"taskInfo":{"name":"print-op"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}' - - name: container - value: '{{workflow.parameters.implementations-f192dae3a3c4616f7637be7d0414bcffbff11a78dc03bf428f05490caa678f8a}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: print-op-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' - depends: print-op-driver.Succeeded - name: print-op - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: exit-hook-root-print-op - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - pipeline-with-exit-handler - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - 'false' - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-exit-handler-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelineparam--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-1"}}' - name: exit-handler-1-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' - depends: exit-handler-1-driver.Succeeded - hooks: - exit: - arguments: - parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - template: exit-hook-root-print-op - name: exit-handler-1 - template: comp-exit-handler-1 - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{"parameters":{"message":{"stringValue":"Hello World!"}}}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null diff --git a/backend/src/v2/compiler/argocompiler/testdata/hello_world.yaml b/backend/src/v2/compiler/argocompiler/testdata/hello_world.yaml deleted file mode 100644 index 67e94900623..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/hello_world.yaml +++ /dev/null @@ -1,356 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: hello-world- -spec: - arguments: - parameters: - - name: components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"executorLabel":"exec-hello-world","inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - - name: implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"args":["--text","{{$.inputs.parameters[''text'']}}"],"command":["sh","-ec","program_path=$(mktemp)\nprintf - \"%s\" \"$0\" \u003e \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n","def - hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser - = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", - dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' - - name: components-root - value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - metadata: {} - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - metadata: {} - name: system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - metadata: {} - name: system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}' - - name: container - value: '{{workflow.parameters.implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: hello-world-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.hello-world-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.hello-world-driver.outputs.parameters.cached-decision}}' - depends: hello-world-driver.Succeeded - name: hello-world - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{"parameters":{"text":{"stringValue":"hi there"}}}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null diff --git a/backend/src/v2/compiler/argocompiler/testdata/hello_world_cache_disabled.yaml b/backend/src/v2/compiler/argocompiler/testdata/hello_world_cache_disabled.yaml deleted file mode 100644 index b9f1df19f50..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/hello_world_cache_disabled.yaml +++ /dev/null @@ -1,359 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: hello-world- -spec: - arguments: - parameters: - - name: components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"executorLabel":"exec-hello-world","inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - - name: implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"args":["--text","{{$.inputs.parameters[''text'']}}"],"command":["sh","-ec","program_path=$(mktemp)\nprintf - \"%s\" \"$0\" \u003e \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n","def - hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser - = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", - dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' - - name: components-root - value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - - --cache_disabled - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - metadata: {} - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - metadata: {} - name: system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - - --cache_disabled - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - metadata: {} - name: system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}' - - name: container - value: '{{workflow.parameters.implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: hello-world-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.hello-world-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.hello-world-driver.outputs.parameters.cached-decision}}' - depends: hello-world-driver.Succeeded - name: hello-world - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - - --cache_disabled - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{"parameters":{"text":{"stringValue":"hi there"}}}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null diff --git a/backend/src/v2/compiler/argocompiler/testdata/hello_world_log_level.yaml b/backend/src/v2/compiler/argocompiler/testdata/hello_world_log_level.yaml deleted file mode 100644 index 8f6b7f7a6d7..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/hello_world_log_level.yaml +++ /dev/null @@ -1,362 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: hello-world- -spec: - arguments: - parameters: - - name: components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"executorLabel":"exec-hello-world","inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - - name: implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"args":["--text","{{$.inputs.parameters[''text'']}}"],"command":["sh","-ec","program_path=$(mktemp)\nprintf - \"%s\" \"$0\" \u003e \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n","def - hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser - = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", - dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' - - name: components-root - value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - - --log_level - - "3" - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - metadata: {} - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - metadata: {} - name: system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - - --log_level - - "3" - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - metadata: {} - name: system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}' - - name: container - value: '{{workflow.parameters.implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: hello-world-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.hello-world-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.hello-world-driver.outputs.parameters.cached-decision}}' - depends: hello-world-driver.Succeeded - name: hello-world - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - - --log_level - - "3" - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{"parameters":{"text":{"stringValue":"hi there"}}}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null diff --git a/backend/src/v2/compiler/argocompiler/testdata/hello_world_run_as_user.yaml b/backend/src/v2/compiler/argocompiler/testdata/hello_world_run_as_user.yaml deleted file mode 100644 index c9164948008..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/hello_world_run_as_user.yaml +++ /dev/null @@ -1,358 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: hello-world- -spec: - arguments: - parameters: - - name: components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"executorLabel":"exec-hello-world","inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - - name: implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"args":["--text","{{$.inputs.parameters[''text'']}}"],"command":["sh","-ec","program_path=$(mktemp)\nprintf - \"%s\" \"$0\" \u003e \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n","def - hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser - = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", - dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' - - name: components-root - value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - securityContext: - runAsUser: 1001 - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - metadata: {} - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - metadata: {} - name: system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - metadata: {} - name: system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}' - - name: container - value: '{{workflow.parameters.implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: hello-world-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.hello-world-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.hello-world-driver.outputs.parameters.cached-decision}}' - depends: hello-world-driver.Succeeded - name: hello-world - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{"parameters":{"text":{"stringValue":"hi there"}}}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null diff --git a/backend/src/v2/compiler/argocompiler/testdata/hello_world_with_retry.yaml b/backend/src/v2/compiler/argocompiler/testdata/hello_world_with_retry.yaml deleted file mode 100644 index 65ac011cf48..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/hello_world_with_retry.yaml +++ /dev/null @@ -1,504 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: hello-world- -spec: - arguments: - parameters: - - name: components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"executorLabel":"exec-hello-world","inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - - name: implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"args":["--text","{{$.inputs.parameters[''text'']}}"],"command":["sh","-ec","program_path=$(mktemp)\nprintf - \"%s\" \"$0\" \u003e \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n","def - hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser - = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", - dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' - - name: components-root - value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"retryPolicy":{"backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"hello-world"}},"hello-world-non-retry":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world-2"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - "" - - --https_proxy - - "" - - --no_proxy - - "" - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - metadata: {} - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - - name: retry-max-count - value: '{{inputs.parameters.retry-max-count}}' - - name: retry-backoff-duration - value: '{{inputs.parameters.retry-backoff-duration}}' - - name: retry-backoff-factor - value: '{{inputs.parameters.retry-backoff-factor}}' - - name: retry-backoff-max-duration - value: '{{inputs.parameters.retry-backoff-max-duration}}' - name: executor - template: retry-system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - - default: "0" - name: retry-max-count - - default: "0" - name: retry-backoff-duration - - default: "0" - name: retry-backoff-factor - - default: "0" - name: retry-backoff-max-duration - metadata: {} - name: retry-system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - - name: retry-max-count - - name: retry-backoff-duration - - name: retry-backoff-factor - - name: retry-backoff-max-duration - metadata: {} - name: retry-system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - retryStrategy: - backoff: - duration: '{{inputs.parameters.retry-backoff-duration}}' - factor: '{{inputs.parameters.retry-backoff-factor}}' - maxDuration: '{{inputs.parameters.retry-backoff-max-duration}}' - limit: '{{inputs.parameters.retry-max-count}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - metadata: { } - name: system-container-executor - outputs: { } - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: { } - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - metadata: { } - name: system-container-impl - outputs: { } - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: { } - name: kfp-launcher - - emptyDir: { } - name: gcs-scratch - - emptyDir: { } - name: s3-scratch - - emptyDir: { } - name: minio-scratch - - emptyDir: { } - name: dot-local-scratch - - emptyDir: { } - name: dot-cache-scratch - - emptyDir: { } - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"retryPolicy":{"backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"hello-world"}}' - - name: container - value: '{{workflow.parameters.implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: hello-world-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.hello-world-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.hello-world-driver.outputs.parameters.cached-decision}}' - - name: retry-max-count - value: "2" - - name: retry-backoff-factor - value: "2" - - name: retry-backoff-max-duration - value: "3600" - depends: hello-world-driver.Succeeded - name: hello-world - template: retry-system-container-executor - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world-2"}}' - - name: container - value: '{{workflow.parameters.implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: hello-world-non-retry-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.hello-world-non-retry-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.hello-world-non-retry-driver.outputs.parameters.cached-decision}}' - depends: hello-world-non-retry-driver.Succeeded - name: hello-world-non-retry - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - "" - - --https_proxy - - "" - - --no_proxy - - "" - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{"parameters":{"text":{"stringValue":"hi there"}}}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null \ No newline at end of file diff --git a/backend/src/v2/compiler/argocompiler/testdata/hello_world_with_retry_all_args.yaml b/backend/src/v2/compiler/argocompiler/testdata/hello_world_with_retry_all_args.yaml deleted file mode 100644 index 5b3f49f03db..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/hello_world_with_retry_all_args.yaml +++ /dev/null @@ -1,506 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: hello-world- -spec: - arguments: - parameters: - - name: components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"executorLabel":"exec-hello-world","inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - - name: implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7 - value: '{"args":["--text","{{$.inputs.parameters[''text'']}}"],"command":["sh","-ec","program_path=$(mktemp)\nprintf - \"%s\" \"$0\" \u003e \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n","def - hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser - = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", - dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' - - name: components-root - value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"retryPolicy":{"backoffDuration":"1s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"hello-world"}},"hello-world-non-retry":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world-2"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - "" - - --https_proxy - - "" - - --no_proxy - - "" - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - metadata: {} - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - - name: retry-max-count - value: '{{inputs.parameters.retry-max-count}}' - - name: retry-backoff-duration - value: '{{inputs.parameters.retry-backoff-duration}}' - - name: retry-backoff-factor - value: '{{inputs.parameters.retry-backoff-factor}}' - - name: retry-backoff-max-duration - value: '{{inputs.parameters.retry-backoff-max-duration}}' - name: executor - template: retry-system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - - default: "0" - name: retry-max-count - - default: "0" - name: retry-backoff-duration - - default: "0" - name: retry-backoff-factor - - default: "0" - name: retry-backoff-max-duration - metadata: {} - name: retry-system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - - name: retry-max-count - - name: retry-backoff-duration - - name: retry-backoff-factor - - name: retry-backoff-max-duration - metadata: {} - name: retry-system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - retryStrategy: - backoff: - duration: '{{inputs.parameters.retry-backoff-duration}}' - factor: '{{inputs.parameters.retry-backoff-factor}}' - maxDuration: '{{inputs.parameters.retry-backoff-max-duration}}' - limit: '{{inputs.parameters.retry-max-count}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - metadata: { } - name: system-container-executor - outputs: { } - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: { } - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - metadata: { } - name: system-container-impl - outputs: { } - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: { } - name: kfp-launcher - - emptyDir: { } - name: gcs-scratch - - emptyDir: { } - name: s3-scratch - - emptyDir: { } - name: minio-scratch - - emptyDir: { } - name: dot-local-scratch - - emptyDir: { } - name: dot-cache-scratch - - emptyDir: { } - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"retryPolicy":{"backoffDuration":"1s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"hello-world"}}' - - name: container - value: '{{workflow.parameters.implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: hello-world-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.hello-world-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.hello-world-driver.outputs.parameters.cached-decision}}' - - name: retry-max-count - value: "2" - - name: retry-backoff-duration - value: "1" - - name: retry-backoff-factor - value: "2" - - name: retry-backoff-max-duration - value: "3600" - depends: hello-world-driver.Succeeded - name: hello-world - template: retry-system-container-executor - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world-2"}}' - - name: container - value: '{{workflow.parameters.implementations-203fce8adabe0cfa7da54b9d3ff79c772136c926974659b51c378727c7ccdfb7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: hello-world-non-retry-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.hello-world-non-retry-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.hello-world-non-retry-driver.outputs.parameters.cached-decision}}' - depends: hello-world-non-retry-driver.Succeeded - name: hello-world-non-retry - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - namespace/n1/pipeline/hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - "" - - --https_proxy - - "" - - --no_proxy - - "" - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{"parameters":{"text":{"stringValue":"hi there"}}}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null \ No newline at end of file diff --git a/backend/src/v2/compiler/argocompiler/testdata/importer.yaml b/backend/src/v2/compiler/argocompiler/testdata/importer.yaml deleted file mode 100644 index c6960a5b72c..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/importer.yaml +++ /dev/null @@ -1,219 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: pipeline-with-importer- -spec: - arguments: - parameters: - - name: components-comp-importer - value: '{"executorLabel":"exec-importer","inputDefinitions":{"parameters":{"uri":{"type":"STRING"}}},"outputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Dataset"}}}}}' - - name: implementations-comp-importer - value: '{"artifactUri":{"constantValue":{"stringValue":"gs://ml-pipeline-playground/shakespeare1.txt"}},"typeSchema":{"schemaTitle":"system.Dataset"}}' - - name: components-root - value: '{"dag":{"tasks":{"importer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constantValue":{"stringValue":"gs://ml-pipeline-playground/shakespeare1.txt"}}}}},"taskInfo":{"name":"importer"}}}},"inputDefinitions":{"parameters":{"dataset2":{"type":"STRING"}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --executor_type - - importer - - --task_spec - - '{{inputs.parameters.task}}' - - --component_spec - - '{{inputs.parameters.component}}' - - --importer_spec - - '{{inputs.parameters.importer}}' - - --pipeline_name - - pipeline-with-importer - - --run_id - - '{{workflow.uid}}' - - --parent_dag_id - - '{{inputs.parameters.parent-dag-id}}' - - --pod_name - - $(KFP_POD_NAME) - - --pod_uid - - $(KFP_POD_UID) - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - "--ca_cert_path" - - "" - command: - - launcher-v2 - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: ghcr.io/kubeflow/kfp-launcher - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: task - - name: component - - name: importer - - name: parent-dag-id - metadata: {} - name: system-importer - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constantValue":{"stringValue":"gs://ml-pipeline-playground/shakespeare1.txt"}}}}},"taskInfo":{"name":"importer"}}' - - name: component - value: '{{workflow.parameters.components-comp-importer}}' - - name: importer - value: '{{workflow.parameters.implementations-comp-importer}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: importer - template: system-importer - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - pipeline-with-importer - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - image: ghcr.io/kubeflow/kfp-driver - name: "" - env: - - name: ML_PIPELINE_SERVICE_HOST - value: ml-pipeline.kubeflow.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: '8887' - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null diff --git a/backend/src/v2/compiler/argocompiler/testdata/multiple_parallel_loops.yaml b/backend/src/v2/compiler/argocompiler/testdata/multiple_parallel_loops.yaml deleted file mode 100755 index 627f0c9b16f..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/multiple_parallel_loops.yaml +++ /dev/null @@ -1,571 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: my-pipeline- -spec: - arguments: - parameters: - - name: components-e7a1060777c9ef84e36a4d54f25d3102abbcbd62d4c8bbb5883c3a2cbcfb5c6d - value: '{"executorLabel":"exec-print-op","inputDefinitions":{"parameters":{"s":{"parameterType":"STRING"}}}}' - - name: implementations-e7a1060777c9ef84e36a4d54f25d3102abbcbd62d4c8bbb5883c3a2cbcfb5c6d - value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location ''kfp==2.7.0'' ''--no-deps'' ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_op(s: str):\n print(s)\n\n"],"image":"python:3.7"}' - - name: components-comp-for-loop-2 - value: '{"dag":{"tasks":{"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"s":{"componentInputParameter":"pipelinechannel--loop-item-param-1","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-op"}},"print-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"s":{"componentInputParameter":"pipelinechannel--loop-item-param-1","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-op-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"STRUCT"}}}}' - - name: components-comp-for-loop-4 - value: '{"dag":{"tasks":{"print-op-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"inputs":{"parameters":{"s":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-op-3"}},"print-op-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-4"},"inputs":{"parameters":{"s":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-op-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-3":{"parameterType":"STRUCT"}}}}' - - name: components-root - value: '{"dag":{"tasks":{"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"iteratorPolicy":{"parallelismLimit":2},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[{\"A_a\": \"1\", \"B_b\": \"10\"}, {\"A_a\": \"2\", \"B_b\": \"20\"}, {\"A_a\": \"3\", \"B_b\": \"30\"}, {\"A_a\": \"4\", \"B_b\": \"40\"}, {\"A_a\": \"5\", \"B_b\": \"50\"}, {\"A_a\": \"6\", \"B_b\": \"60\"}, {\"A_a\": \"7\", \"B_b\": \"70\"}, {\"A_a\": \"8\", \"B_b\": \"80\"}, {\"A_a\": \"9\", \"B_b\": \"90\"}, {\"A_a\": \"10\", \"B_b\": \"100\"}]"}},"taskInfo":{"name":"foo"}},"for-loop-4":{"componentRef":{"name":"comp-for-loop-4"},"iteratorPolicy":{"parallelismLimit":4},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": \"1\", \"B_b\": \"10\"}, {\"A_a\": \"2\", \"B_b\": \"20\"}, {\"A_a\": \"3\", \"B_b\": \"30\"}, {\"A_a\": \"4\", \"B_b\": \"40\"}, {\"A_a\": \"5\", \"B_b\": \"50\"}, {\"A_a\": \"6\", \"B_b\": \"60\"}, {\"A_a\": \"7\", \"B_b\": \"70\"}, {\"A_a\": \"8\", \"B_b\": \"80\"}, {\"A_a\": \"9\", \"B_b\": \"90\"}, {\"A_a\": \"10\", \"B_b\": \"100\"}]"}},"taskInfo":{"name":"bar"}}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - my-pipeline - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - 'false' - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - metadata: {} - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - metadata: {} - name: system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - metadata: {} - name: system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-e7a1060777c9ef84e36a4d54f25d3102abbcbd62d4c8bbb5883c3a2cbcfb5c6d}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"s":{"componentInputParameter":"pipelinechannel--loop-item-param-1","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-op"}}' - - name: container - value: '{{workflow.parameters.implementations-e7a1060777c9ef84e36a4d54f25d3102abbcbd62d4c8bbb5883c3a2cbcfb5c6d}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: print-op-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' - depends: print-op-driver.Succeeded - name: print-op - template: system-container-executor - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-e7a1060777c9ef84e36a4d54f25d3102abbcbd62d4c8bbb5883c3a2cbcfb5c6d}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"s":{"componentInputParameter":"pipelinechannel--loop-item-param-1","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-op-2"}}' - - name: container - value: '{{workflow.parameters.implementations-e7a1060777c9ef84e36a4d54f25d3102abbcbd62d4c8bbb5883c3a2cbcfb5c6d}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: print-op-2-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.print-op-2-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' - depends: print-op-2-driver.Succeeded - name: print-op-2 - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-2 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-e7a1060777c9ef84e36a4d54f25d3102abbcbd62d4c8bbb5883c3a2cbcfb5c6d}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"inputs":{"parameters":{"s":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-op-3"}}' - - name: container - value: '{{workflow.parameters.implementations-e7a1060777c9ef84e36a4d54f25d3102abbcbd62d4c8bbb5883c3a2cbcfb5c6d}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: print-op-3-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.print-op-3-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.print-op-3-driver.outputs.parameters.cached-decision}}' - depends: print-op-3-driver.Succeeded - name: print-op-3 - template: system-container-executor - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-e7a1060777c9ef84e36a4d54f25d3102abbcbd62d4c8bbb5883c3a2cbcfb5c6d}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-4"},"inputs":{"parameters":{"s":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-op-4"}}' - - name: container - value: '{{workflow.parameters.implementations-e7a1060777c9ef84e36a4d54f25d3102abbcbd62d4c8bbb5883c3a2cbcfb5c6d}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: print-op-4-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.print-op-4-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.print-op-4-driver.outputs.parameters.cached-decision}}' - depends: print-op-4-driver.Succeeded - name: print-op-4 - template: system-container-executor - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-4 - outputs: {} - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - my-pipeline - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - '' - - --https_proxy - - '' - - --no_proxy - - '' - - --mlPipelineServiceTLSEnabled - - 'false' - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - metadata: {} - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"iteratorPolicy":{"parallelismLimit":2},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[{\"A_a\": \"1\", \"B_b\": \"10\"}, {\"A_a\": \"2\", \"B_b\": \"20\"}, {\"A_a\": \"3\", \"B_b\": \"30\"}, {\"A_a\": \"4\", \"B_b\": \"40\"}, {\"A_a\": \"5\", \"B_b\": \"50\"}, {\"A_a\": \"6\", \"B_b\": \"60\"}, {\"A_a\": \"7\", \"B_b\": \"70\"}, {\"A_a\": \"8\", \"B_b\": \"80\"}, {\"A_a\": \"9\", \"B_b\": \"90\"}, {\"A_a\": \"10\", \"B_b\": \"100\"}]"}},"taskInfo":{"name":"foo"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"iteratorPolicy":{"parallelismLimit":2},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[{\"A_a\": \"1\", \"B_b\": \"10\"}, {\"A_a\": \"2\", \"B_b\": \"20\"}, {\"A_a\": \"3\", \"B_b\": \"30\"}, {\"A_a\": \"4\", \"B_b\": \"40\"}, {\"A_a\": \"5\", \"B_b\": \"50\"}, {\"A_a\": \"6\", \"B_b\": \"60\"}, {\"A_a\": \"7\", \"B_b\": \"70\"}, {\"A_a\": \"8\", \"B_b\": \"80\"}, {\"A_a\": \"9\", \"B_b\": \"90\"}, {\"A_a\": \"10\", \"B_b\": \"100\"}]"}},"taskInfo":{"name":"foo"}}' - name: iteration-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' - - name: iteration-index - value: '{{item}}' - depends: iteration-driver.Succeeded - name: iteration-iterations - template: comp-for-loop-2-iteration - withSequence: - count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-2-for-loop-2-iterator - outputs: {} - parallelism: 2 - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-4"},"iteratorPolicy":{"parallelismLimit":4},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": \"1\", \"B_b\": \"10\"}, {\"A_a\": \"2\", \"B_b\": \"20\"}, {\"A_a\": \"3\", \"B_b\": \"30\"}, {\"A_a\": \"4\", \"B_b\": \"40\"}, {\"A_a\": \"5\", \"B_b\": \"50\"}, {\"A_a\": \"6\", \"B_b\": \"60\"}, {\"A_a\": \"7\", \"B_b\": \"70\"}, {\"A_a\": \"8\", \"B_b\": \"80\"}, {\"A_a\": \"9\", \"B_b\": \"90\"}, {\"A_a\": \"10\", \"B_b\": \"100\"}]"}},"taskInfo":{"name":"bar"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-4 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-4-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-4"},"iteratorPolicy":{"parallelismLimit":4},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": \"1\", \"B_b\": \"10\"}, {\"A_a\": \"2\", \"B_b\": \"20\"}, {\"A_a\": \"3\", \"B_b\": \"30\"}, {\"A_a\": \"4\", \"B_b\": \"40\"}, {\"A_a\": \"5\", \"B_b\": \"50\"}, {\"A_a\": \"6\", \"B_b\": \"60\"}, {\"A_a\": \"7\", \"B_b\": \"70\"}, {\"A_a\": \"8\", \"B_b\": \"80\"}, {\"A_a\": \"9\", \"B_b\": \"90\"}, {\"A_a\": \"10\", \"B_b\": \"100\"}]"}},"taskInfo":{"name":"bar"}}' - name: iteration-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' - - name: iteration-index - value: '{{item}}' - depends: iteration-driver.Succeeded - name: iteration-iterations - template: comp-for-loop-4-iteration - withSequence: - count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-4-for-loop-4-iterator - outputs: {} - parallelism: 4 - - dag: - tasks: - - arguments: - parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - depends: "" - name: for-loop-2 - template: comp-for-loop-2-for-loop-2-iterator - - arguments: - parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - depends: "" - name: for-loop-4 - template: comp-for-loop-4-for-loop-4-iterator - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: root - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{"parameters":{"text":{"stringValue":"hello world"}}}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - metadata: {} - name: entrypoint - outputs: {} -status: - finishedAt: null - startedAt: null diff --git a/backend/src/v2/compiler/argocompiler/testdata/nested_pipeline_all_level_retry.yaml b/backend/src/v2/compiler/argocompiler/testdata/nested_pipeline_all_level_retry.yaml deleted file mode 100644 index b704817751b..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/nested_pipeline_all_level_retry.yaml +++ /dev/null @@ -1,456 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: hello-world- -spec: - arguments: - parameters: - - name: components-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603 - value: '{"executorLabel":"exec-component-a"}' - - name: implementations-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603 - value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_a"],"command":["sh","-c","\nif - ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 - -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location ''kfp==2.13.0'' ''--no-deps'' - ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 - \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" - \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 - -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport - kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef - component_a():\n print(''Component A'')\n\n"],"image":"python:3.9"}' - - name: components-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23 - value: '{"executorLabel":"exec-component-b"}' - - name: implementations-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23 - value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_b"],"command":["sh","-c","\nif - ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 - -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location ''kfp==2.13.0'' ''--no-deps'' - ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 - \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" - \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 - -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport - kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef - component_b():\n print (''Component B'')\n\n"],"image":"python:3.9"}' - - name: components-comp-nested-pipeline - value: '{"dag":{"tasks":{"component-a":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-a"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":1,"backoffMaxDuration":"1800s","maxRetryCount":1},"taskInfo":{"name":"component-a"}},"component-b":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-b"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"component-b"}}}}}' - - name: components-root - value: '{"dag":{"tasks":{"nested-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-pipeline"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":1,"backoffMaxDuration":"1800s","maxRetryCount":1},"taskInfo":{"name":"nested-pipeline"}}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - "" - - --https_proxy - - "" - - --no_proxy - - "" - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - - name: retry-max-count - value: '{{inputs.parameters.retry-max-count}}' - - name: retry-backoff-duration - value: '{{inputs.parameters.retry-backoff-duration}}' - - name: retry-backoff-factor - value: '{{inputs.parameters.retry-backoff-factor}}' - - name: retry-backoff-max-duration - value: '{{inputs.parameters.retry-backoff-max-duration}}' - name: executor - template: retry-system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - - default: "0" - name: retry-max-count - - default: "0" - name: retry-backoff-duration - - default: "0" - name: retry-backoff-factor - - default: "0" - name: retry-backoff-max-duration - name: retry-system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - - name: retry-max-count - - name: retry-backoff-duration - - name: retry-backoff-factor - - name: retry-backoff-max-duration - name: retry-system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - retryStrategy: - backoff: - duration: '{{inputs.parameters.retry-backoff-duration}}' - factor: '{{inputs.parameters.retry-backoff-factor}}' - maxDuration: '{{inputs.parameters.retry-backoff-max-duration}}' - limit: '{{inputs.parameters.retry-max-count}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-a"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":1,"backoffMaxDuration":"1800s","maxRetryCount":1},"taskInfo":{"name":"component-a"}}' - - name: container - value: '{{workflow.parameters.implementations-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: component-a-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.component-a-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.component-a-driver.outputs.parameters.cached-decision}}' - - name: retry-max-count - value: "1" - - name: retry-backoff-duration - value: "0" - - name: retry-backoff-factor - value: "1" - - name: retry-backoff-max-duration - value: "1800" - depends: component-a-driver.Succeeded - name: component-a - template: retry-system-container-executor - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-b"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"component-b"}}' - - name: container - value: '{{workflow.parameters.implementations-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: component-b-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.component-b-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.component-b-driver.outputs.parameters.cached-decision}}' - - name: retry-max-count - value: "2" - - name: retry-backoff-duration - value: "0" - - name: retry-backoff-factor - value: "2" - - name: retry-backoff-max-duration - value: "3600" - depends: component-b-driver.Succeeded - name: component-b - template: retry-system-container-executor - inputs: - parameters: - - name: parent-dag-id - name: comp-nested-pipeline - outputs: { } - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - "" - - --https_proxy - - "" - - --no_proxy - - "" - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-nested-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-pipeline"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":1,"backoffMaxDuration":"1800s","maxRetryCount":1},"taskInfo":{"name":"nested-pipeline"}}' - name: nested-pipeline-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.nested-pipeline-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.nested-pipeline-driver.outputs.parameters.condition}}' - depends: nested-pipeline-driver.Succeeded - name: nested-pipeline - template: comp-nested-pipeline - inputs: - parameters: - - name: parent-dag-id - name: root - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - name: entrypoint - outputs: {} diff --git a/backend/src/v2/compiler/argocompiler/testdata/nested_pipeline_pipeline_retry.yaml b/backend/src/v2/compiler/argocompiler/testdata/nested_pipeline_pipeline_retry.yaml deleted file mode 100644 index bef33501864..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/nested_pipeline_pipeline_retry.yaml +++ /dev/null @@ -1,456 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: hello-world- -spec: - arguments: - parameters: - - name: components-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603 - value: '{"executorLabel":"exec-component-a"}' - - name: implementations-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603 - value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_a"],"command":["sh","-c","\nif - ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 - -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location ''kfp==2.13.0'' ''--no-deps'' - ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 - \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" - \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 - -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport - kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef - component_a():\n print(''Component A'')\n\n"],"image":"python:3.9"}' - - name: components-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23 - value: '{"executorLabel":"exec-component-b"}' - - name: implementations-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23 - value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_b"],"command":["sh","-c","\nif - ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 - -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location ''kfp==2.13.0'' ''--no-deps'' - ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 - \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" - \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 - -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport - kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef - component_b():\n print (''Component B'')\n\n"],"image":"python:3.9"}' - - name: components-comp-nested-pipeline - value: '{"dag":{"tasks":{"component-a":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-a"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"component-a"}},"component-b":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-b"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"component-b"}}}}}' - - name: components-root - value: '{"dag":{"tasks":{"nested-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-pipeline"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"nested-pipeline"}}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - "" - - --https_proxy - - "" - - --no_proxy - - "" - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - - name: retry-max-count - value: '{{inputs.parameters.retry-max-count}}' - - name: retry-backoff-duration - value: '{{inputs.parameters.retry-backoff-duration}}' - - name: retry-backoff-factor - value: '{{inputs.parameters.retry-backoff-factor}}' - - name: retry-backoff-max-duration - value: '{{inputs.parameters.retry-backoff-max-duration}}' - name: executor - template: retry-system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - - default: "0" - name: retry-max-count - - default: "0" - name: retry-backoff-duration - - default: "0" - name: retry-backoff-factor - - default: "0" - name: retry-backoff-max-duration - name: retry-system-container-executor - outputs: { } - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: { } - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - - name: retry-max-count - - name: retry-backoff-duration - - name: retry-backoff-factor - - name: retry-backoff-max-duration - name: retry-system-container-impl - outputs: { } - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - retryStrategy: - backoff: - duration: '{{inputs.parameters.retry-backoff-duration}}' - factor: '{{inputs.parameters.retry-backoff-factor}}' - maxDuration: '{{inputs.parameters.retry-backoff-max-duration}}' - limit: '{{inputs.parameters.retry-max-count}}' - volumes: - - emptyDir: { } - name: kfp-launcher - - emptyDir: { } - name: gcs-scratch - - emptyDir: { } - name: s3-scratch - - emptyDir: { } - name: minio-scratch - - emptyDir: { } - name: dot-local-scratch - - emptyDir: { } - name: dot-cache-scratch - - emptyDir: { } - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-a"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"component-a"}}' - - name: container - value: '{{workflow.parameters.implementations-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: component-a-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.component-a-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.component-a-driver.outputs.parameters.cached-decision}}' - - name: retry-max-count - value: "2" - - name: retry-backoff-duration - value: "0" - - name: retry-backoff-factor - value: "2" - - name: retry-backoff-max-duration - value: "3600" - depends: component-a-driver.Succeeded - name: component-a - template: retry-system-container-executor - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-b"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"component-b"}}' - - name: container - value: '{{workflow.parameters.implementations-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: component-b-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.component-b-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.component-b-driver.outputs.parameters.cached-decision}}' - - name: retry-max-count - value: "2" - - name: retry-backoff-duration - value: "0" - - name: retry-backoff-factor - value: "2" - - name: retry-backoff-max-duration - value: "3600" - depends: component-b-driver.Succeeded - name: component-b - template: retry-system-container-executor - inputs: - parameters: - - name: parent-dag-id - name: comp-nested-pipeline - outputs: { } - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - "" - - --https_proxy - - "" - - --no_proxy - - "" - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-nested-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-pipeline"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"nested-pipeline"}}' - name: nested-pipeline-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.nested-pipeline-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.nested-pipeline-driver.outputs.parameters.condition}}' - depends: nested-pipeline-driver.Succeeded - name: nested-pipeline - template: comp-nested-pipeline - inputs: - parameters: - - name: parent-dag-id - name: root - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - name: entrypoint - outputs: {} diff --git a/backend/src/v2/compiler/argocompiler/testdata/nested_pipeline_sub_component_retry.yaml b/backend/src/v2/compiler/argocompiler/testdata/nested_pipeline_sub_component_retry.yaml deleted file mode 100644 index 138bc7d81a4..00000000000 --- a/backend/src/v2/compiler/argocompiler/testdata/nested_pipeline_sub_component_retry.yaml +++ /dev/null @@ -1,540 +0,0 @@ -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - creationTimestamp: null - generateName: hello-world- -spec: - arguments: - parameters: - - name: components-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603 - value: '{"executorLabel":"exec-component-a"}' - - name: implementations-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603 - value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_a"],"command":["sh","-c","\nif - ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 - -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location ''kfp==2.13.0'' ''--no-deps'' - ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 - \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" - \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 - -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport - kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef - component_a():\n print(''Component A'')\n\n"],"image":"python:3.9"}' - - name: components-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23 - value: '{"executorLabel":"exec-component-b"}' - - name: implementations-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23 - value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_b"],"command":["sh","-c","\nif - ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 - -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location ''kfp==2.13.0'' ''--no-deps'' - ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 - \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" - \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 - -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport - kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef - component_b():\n print (''Component B'')\n\n"],"image":"python:3.9"}' - - name: components-comp-nested-pipeline - value: '{"dag":{"tasks":{"component-a":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-a"},"taskInfo":{"name":"component-a"}},"component-b":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-b"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"component-b"}}}}}' - - name: components-root - value: '{"dag":{"tasks":{"nested-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-pipeline"},"taskInfo":{"name":"nested-pipeline"}}}}}' - entrypoint: entrypoint - podMetadata: - annotations: - pipelines.kubeflow.org/v2_component: "true" - labels: - pipelines.kubeflow.org/v2_component: "true" - serviceAccountName: pipeline-runner - templates: - - container: - args: - - --type - - CONTAINER - - --pipeline_name - - hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --container - - '{{inputs.parameters.container}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --cached_decision_path - - '{{outputs.parameters.cached-decision.path}}' - - --pod_spec_patch_path - - '{{outputs.parameters.pod-spec-patch.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --kubernetes_config - - '{{inputs.parameters.kubernetes-config}}' - - --http_proxy - - "" - - --https_proxy - - "" - - --no_proxy - - "" - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - name: task - - name: container - - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: "" - name: kubernetes-config - name: system-container-driver - outputs: - parameters: - - name: pod-spec-patch - valueFrom: - default: "" - path: /tmp/outputs/pod-spec-patch - - default: "false" - name: cached-decision - valueFrom: - default: "false" - path: /tmp/outputs/cached-decision - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - name: executor - template: system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - name: system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - name: system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: pod-spec-patch - value: '{{inputs.parameters.pod-spec-patch}}' - - name: retry-max-count - value: '{{inputs.parameters.retry-max-count}}' - - name: retry-backoff-duration - value: '{{inputs.parameters.retry-backoff-duration}}' - - name: retry-backoff-factor - value: '{{inputs.parameters.retry-backoff-factor}}' - - name: retry-backoff-max-duration - value: '{{inputs.parameters.retry-backoff-max-duration}}' - name: executor - template: retry-system-container-impl - when: '{{inputs.parameters.cached-decision}} != true' - inputs: - parameters: - - name: pod-spec-patch - - default: "false" - name: cached-decision - - default: "0" - name: retry-max-count - - default: "0" - name: retry-backoff-duration - - default: "0" - name: retry-backoff-factor - - default: "0" - name: retry-backoff-max-duration - name: retry-system-container-executor - outputs: {} - - container: - command: - - should-be-overridden-during-runtime - env: - - name: KFP_POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: KFP_POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - envFrom: - - configMapRef: - name: metadata-grpc-configmap - optional: true - image: gcr.io/ml-pipeline/should-be-overridden-during-runtime - name: "" - resources: {} - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - - mountPath: /gcs - name: gcs-scratch - - mountPath: /s3 - name: s3-scratch - - mountPath: /minio - name: minio-scratch - - mountPath: /.local - name: dot-local-scratch - - mountPath: /.cache - name: dot-cache-scratch - - mountPath: /.config - name: dot-config-scratch - initContainers: - - args: - - --copy - - /kfp-launcher/launch - command: - - launcher-v2 - image: ghcr.io/kubeflow/kfp-launcher - name: kfp-launcher - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 100m - volumeMounts: - - mountPath: /kfp-launcher - name: kfp-launcher - inputs: - parameters: - - name: pod-spec-patch - - name: retry-max-count - - name: retry-backoff-duration - - name: retry-backoff-factor - - name: retry-backoff-max-duration - name: retry-system-container-impl - outputs: {} - podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' - retryStrategy: - backoff: - duration: '{{inputs.parameters.retry-backoff-duration}}' - factor: '{{inputs.parameters.retry-backoff-factor}}' - maxDuration: '{{inputs.parameters.retry-backoff-max-duration}}' - limit: '{{inputs.parameters.retry-max-count}}' - volumes: - - emptyDir: {} - name: kfp-launcher - - emptyDir: {} - name: gcs-scratch - - emptyDir: {} - name: s3-scratch - - emptyDir: {} - name: minio-scratch - - emptyDir: {} - name: dot-local-scratch - - emptyDir: {} - name: dot-cache-scratch - - emptyDir: {} - name: dot-config-scratch - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-a"},"taskInfo":{"name":"component-a"}}' - - name: container - value: '{{workflow.parameters.implementations-c76def800bcb5189543541034eefac9210e827c15dd15b6de8ea4c45c233f603}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: component-a-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.component-a-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.component-a-driver.outputs.parameters.cached-decision}}' - depends: component-a-driver.Succeeded - name: component-a - template: system-container-executor - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-b"},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":2},"taskInfo":{"name":"component-b"}}' - - name: container - value: '{{workflow.parameters.implementations-1a8bd1be9f10fe6fd3a429c49087a6cf42986d8e5a4f3eb99a60bba174470e23}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: component-b-driver - template: system-container-driver - - arguments: - parameters: - - name: pod-spec-patch - value: '{{tasks.component-b-driver.outputs.parameters.pod-spec-patch}}' - - default: "false" - name: cached-decision - value: '{{tasks.component-b-driver.outputs.parameters.cached-decision}}' - - name: retry-max-count - value: "2" - - name: retry-backoff-duration - value: "0" - - name: retry-backoff-factor - value: "2" - - name: retry-backoff-max-duration - value: "3600" - depends: component-b-driver.Succeeded - name: component-b - template: retry-system-container-executor - inputs: - parameters: - - name: parent-dag-id - name: comp-nested-pipeline - outputs: { } - - container: - args: - - --type - - '{{inputs.parameters.driver-type}}' - - --pipeline_name - - hello-world - - --run_id - - '{{workflow.uid}}' - - --run_name - - '{{workflow.name}}' - - --run_display_name - - '' - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' - - --component - - '{{inputs.parameters.component}}' - - --task - - '{{inputs.parameters.task}}' - - --runtime_config - - '{{inputs.parameters.runtime-config}}' - - --iteration_index - - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' - - --iteration_count_path - - '{{outputs.parameters.iteration-count.path}}' - - --condition_path - - '{{outputs.parameters.condition.path}}' - - --http_proxy - - "" - - --https_proxy - - "" - - --no_proxy - - "" - - --mlPipelineServiceTLSEnabled - - "false" - - --mlmd_server_address - - "metadata-grpc-service" - - --mlmd_server_port - - "8080" - - --metadataTLSEnabled - - "false" - - --ca_cert_path - - "" - command: - - driver - env: - - name: ML_PIPELINE_SERVICE_HOST - value: "ml-pipeline.kubeflow.svc.cluster.local" - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - image: ghcr.io/kubeflow/kfp-driver - name: "" - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 100m - memory: 64Mi - inputs: - parameters: - - name: component - - default: "" - name: runtime-config - - default: "" - name: task - - default: "0" - name: parent-dag-id - - default: "-1" - name: iteration-index - - default: DAG - name: driver-type - name: system-dag-driver - outputs: - parameters: - - name: execution-id - valueFrom: - path: /tmp/outputs/execution-id - - name: iteration-count - valueFrom: - default: "0" - path: /tmp/outputs/iteration-count - - name: condition - valueFrom: - default: "true" - path: /tmp/outputs/condition - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-nested-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-pipeline"},"taskInfo":{"name":"nested-pipeline"}}' - name: nested-pipeline-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.nested-pipeline-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.nested-pipeline-driver.outputs.parameters.condition}}' - depends: nested-pipeline-driver.Succeeded - name: nested-pipeline - template: comp-nested-pipeline - inputs: - parameters: - - name: parent-dag-id - name: root - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-root}}' - - name: runtime-config - value: '{}' - - name: driver-type - value: ROOT_DAG - name: root-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' - - name: condition - value: "" - depends: root-driver.Succeeded - name: root - template: root - inputs: {} - name: entrypoint - outputs: {} diff --git a/backend/src/v2/compiler/testdata/component_used_twice.json b/backend/src/v2/compiler/testdata/component_used_twice.json deleted file mode 100644 index 3fa73594297..00000000000 --- a/backend/src/v2/compiler/testdata/component_used_twice.json +++ /dev/null @@ -1,93 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-hello-world": { - "executorLabel": "exec-hello-world", - "inputDefinitions": { - "parameters": { - "text": { - "type": "STRING" - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-hello-world": { - "container": { - "args": ["--text", "{{$.inputs.parameters['text']}}"], - "command": [ - "sh", - "-ec", - "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", - "def hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Hello world', description='')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n" - ], - "image": "python:3.9" - } - } - } - }, - "pipelineInfo": { - "name": "component-used-twice" - }, - "root": { - "dag": { - "tasks": { - "hello-world": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-hello-world" - }, - "inputs": { - "parameters": { - "text": { - "componentInputParameter": "text" - } - } - }, - "taskInfo": { - "name": "hello-world" - } - }, - "hello-world-2": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-hello-world" - }, - "inputs": { - "parameters": { - "text": { - "componentInputParameter": "text" - } - } - }, - "taskInfo": { - "name": "hello-world-2" - } - } - } - }, - "inputDefinitions": { - "parameters": { - "text": { - "type": "STRING" - } - } - } - }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.6.5" - }, - "runtimeConfig": { - "parameters": { - "text": { - "stringValue": "hi there" - } - } - } -} diff --git a/backend/src/v2/compiler/testdata/create_mount_delete_dynamic_pvc.json b/backend/src/v2/compiler/testdata/create_mount_delete_dynamic_pvc.json deleted file mode 100644 index 99b7a7c5db5..00000000000 --- a/backend/src/v2/compiler/testdata/create_mount_delete_dynamic_pvc.json +++ /dev/null @@ -1,199 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-comp": { - "executorLabel": "exec-comp" - }, - "comp-comp-2": { - "executorLabel": "exec-comp-2" - }, - "comp-createpvc": { - "executorLabel": "exec-createpvc", - "inputDefinitions": { - "parameters": { - "access_modes": { - "parameterType": "LIST" - }, - "annotations": { - "isOptional": true, - "parameterType": "STRUCT" - }, - "pvc_name": { - "isOptional": true, - "parameterType": "STRING" - }, - "pvc_name_suffix": { - "isOptional": true, - "parameterType": "STRING" - }, - "size": { - "parameterType": "STRING" - }, - "storage_class_name": { - "defaultValue": "", - "isOptional": true, - "parameterType": "STRING" - }, - "volume_name": { - "isOptional": true, - "parameterType": "STRING" - } - } - }, - "outputDefinitions": { - "parameters": { - "name": { - "parameterType": "STRING" - } - } - } - }, - "comp-deletepvc": { - "executorLabel": "exec-deletepvc", - "inputDefinitions": { - "parameters": { - "pvc_name": { - "parameterType": "STRING" - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-comp": { - "container": { - "args": ["--executor_input", "{{$}}", "--function_to_execute", "comp"], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.0-beta.16' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d) printf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\" python3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\" ", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef comp():\n pass\n\n" - ], - "image": "python:3.9" - } - }, - "exec-comp-2": { - "container": { - "args": ["--executor_input", "{{$}}", "--function_to_execute", "comp"], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.0.0-beta.16' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d) printf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\" python3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\" ", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef comp():\n pass\n\n" - ], - "image": "python:3.9" - } - }, - "exec-createpvc": { - "container": { - "image": "argostub/createpvc" - } - }, - "exec-deletepvc": { - "container": { - "image": "argostub/deletepvc" - } - } - } - }, - "pipelineInfo": { - "name": "my-pipeline" - }, - "root": { - "dag": { - "tasks": { - "comp": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-comp" - }, - "dependentTasks": ["createpvc"], - "taskInfo": { - "name": "comp" - } - }, - "comp-2": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-comp-2" - }, - "dependentTasks": ["comp", "createpvc"], - "taskInfo": { - "name": "comp-2" - } - }, - "createpvc": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-createpvc" - }, - "inputs": { - "parameters": { - "access_modes": { - "runtimeValue": { - "constant": ["ReadWriteOnce"] - } - }, - "pvc_name_suffix": { - "runtimeValue": { - "constant": "-my-pvc" - } - }, - "size": { - "runtimeValue": { - "constant": "5Gi" - } - }, - "storage_class_name": { - "runtimeValue": { - "constant": "standard" - } - } - } - }, - "taskInfo": { - "name": "createpvc" - } - }, - "deletepvc": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-deletepvc" - }, - "dependentTasks": ["comp-2", "createpvc"], - "inputs": { - "parameters": { - "pvc_name": { - "taskOutputParameter": { - "outputParameterKey": "name", - "producerTask": "createpvc" - } - } - } - }, - "taskInfo": { - "name": "deletepvc" - } - } - } - } - }, - "schemaVersion": "2.1.0", - "sdkVersion": "kfp-2.0.0-beta.16" - } -} diff --git a/backend/src/v2/compiler/testdata/create_mount_delete_dynamic_pvc_platform.json b/backend/src/v2/compiler/testdata/create_mount_delete_dynamic_pvc_platform.json deleted file mode 100644 index e1e60973474..00000000000 --- a/backend/src/v2/compiler/testdata/create_mount_delete_dynamic_pvc_platform.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "platforms": { - "kubernetes": { - "deploymentSpec": { - "executors": { - "exec-comp": { - "pvcMount": [ - { - "mountPath": "/data", - "taskOutputParameter": { - "outputParameterKey": "name", - "producerTask": "createpvc" - } - } - ] - }, - "exec-comp-2": { - "pvcMount": [ - { - "mountPath": "/reused_data", - "taskOutputParameter": { - "outputParameterKey": "name", - "producerTask": "createpvc" - } - } - ] - } - } - } - } - } - } \ No newline at end of file diff --git a/backend/src/v2/compiler/testdata/create_pod_metadata.json b/backend/src/v2/compiler/testdata/create_pod_metadata.json deleted file mode 100644 index 246d3b1dd23..00000000000 --- a/backend/src/v2/compiler/testdata/create_pod_metadata.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "platforms": { - "kubernetes": { - "deploymentSpec": { - "executors": { - "exec-hello-world": { - "podMetadata": { - "annotations": { - "run_id": "123456", - "experiment_id": "234567" - }, - "labels": { - "kubeflow.com/kfp": "pipeline-node", - "kubeflow.com/common": "test" - } - } - } - } - } - } - } - } diff --git a/backend/src/v2/compiler/testdata/exit_handler.json b/backend/src/v2/compiler/testdata/exit_handler.json deleted file mode 100644 index becf09f390e..00000000000 --- a/backend/src/v2/compiler/testdata/exit_handler.json +++ /dev/null @@ -1,218 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-exit-handler-1": { - "dag": { - "tasks": { - "fail-op": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-fail-op" - }, - "inputs": { - "parameters": { - "message": { - "runtimeValue": { - "constantValue": { - "stringValue": "Task failed." - } - } - } - } - }, - "taskInfo": { - "name": "fail-op" - } - }, - "print-op-2": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-print-op-2" - }, - "inputs": { - "parameters": { - "message": { - "componentInputParameter": "pipelineparam--message" - } - } - }, - "taskInfo": { - "name": "print-op-2" - } - } - } - }, - "inputDefinitions": { - "parameters": { - "pipelineparam--message": { - "type": "STRING" - } - } - } - }, - "comp-fail-op": { - "executorLabel": "exec-fail-op", - "inputDefinitions": { - "parameters": { - "message": { - "type": "STRING" - } - } - } - }, - "comp-print-op": { - "executorLabel": "exec-print-op", - "inputDefinitions": { - "parameters": { - "message": { - "type": "STRING" - } - } - } - }, - "comp-print-op-2": { - "executorLabel": "exec-print-op-2", - "inputDefinitions": { - "parameters": { - "message": { - "type": "STRING" - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-fail-op": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "fail_op" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.22' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n print(message)\n sys.exit(1)\n\n" - ], - "image": "python:3.7" - } - }, - "exec-print-op": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "print_op" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.22' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n" - ], - "image": "python:3.7" - } - }, - "exec-print-op-2": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "print_op" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.22' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n" - ], - "image": "python:3.7" - } - } - } - }, - "pipelineInfo": { - "name": "pipeline-with-exit-handler" - }, - "root": { - "dag": { - "tasks": { - "exit-handler-1": { - "componentRef": { - "name": "comp-exit-handler-1" - }, - "inputs": { - "parameters": { - "pipelineparam--message": { - "componentInputParameter": "message" - } - } - }, - "taskInfo": { - "name": "exit-handler-1" - } - }, - "print-op": { - "componentRef": { - "name": "comp-print-op" - }, - "dependentTasks": [ - "exit-handler-1" - ], - "inputs": { - "parameters": { - "message": { - "runtimeValue": { - "constantValue": { - "stringValue": "Exit handler has worked!" - } - } - } - } - }, - "taskInfo": { - "name": "print-op" - }, - "triggerPolicy": { - "strategy": "ALL_UPSTREAM_TASKS_COMPLETED" - } - } - } - }, - "inputDefinitions": { - "parameters": { - "message": { - "type": "STRING" - } - } - } - }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.22" - }, - "runtimeConfig": { - "parameters": { - "message": { - "stringValue": "Hello World!" - } - } - } -} \ No newline at end of file diff --git a/backend/src/v2/compiler/testdata/hello_world.json b/backend/src/v2/compiler/testdata/hello_world.json deleted file mode 100644 index 5e41e48315c..00000000000 --- a/backend/src/v2/compiler/testdata/hello_world.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-hello-world": { - "executorLabel": "exec-hello-world", - "inputDefinitions": { - "parameters": { - "text": { - "type": "STRING" - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-hello-world": { - "container": { - "args": [ - "--text", - "{{$.inputs.parameters['text']}}" - ], - "command": [ - "sh", - "-ec", - "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", - "def hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Hello world', description='')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n" - ], - "image": "python:3.9" - } - } - } - }, - "pipelineInfo": { - "name": "namespace/n1/pipeline/hello-world" - }, - "root": { - "dag": { - "tasks": { - "hello-world": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-hello-world" - }, - "inputs": { - "parameters": { - "text": { - "componentInputParameter": "text" - } - } - }, - "taskInfo": { - "name": "hello-world" - } - } - } - }, - "inputDefinitions": { - "parameters": { - "text": { - "type": "STRING" - } - } - } - }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.6.5" - }, - "runtimeConfig": { - "parameters": { - "text": { - "stringValue": "hi there" - } - } - } -} \ No newline at end of file diff --git a/backend/src/v2/compiler/testdata/hello_world_with_retry.json b/backend/src/v2/compiler/testdata/hello_world_with_retry.json deleted file mode 100644 index 97383c5c9ee..00000000000 --- a/backend/src/v2/compiler/testdata/hello_world_with_retry.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-hello-world": { - "executorLabel": "exec-hello-world", - "inputDefinitions": { - "parameters": { - "text": { - "type": "STRING" - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-hello-world": { - "container": { - "args": [ - "--text", - "{{$.inputs.parameters['text']}}" - ], - "command": [ - "sh", - "-ec", - "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", - "def hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Hello world', description='')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n" - ], - "image": "python:3.9" - } - } - } - }, - "pipelineInfo": { - "name": "namespace/n1/pipeline/hello-world" - }, - "root": { - "dag": { - "tasks": { - "hello-world": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-hello-world" - }, - "retryPolicy": { - "backoffFactor": 2, - "backoffMaxDuration": "3600s", - "maxRetryCount": 2 - }, - "inputs": { - "parameters": { - "text": { - "componentInputParameter": "text" - } - } - }, - "taskInfo": { - "name": "hello-world" - } - }, - "hello-world-non-retry": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-hello-world" - }, - "inputs": { - "parameters": { - "text": { - "componentInputParameter": "text" - } - } - }, - "taskInfo": { - "name": "hello-world-2" - } - } - } - }, - "inputDefinitions": { - "parameters": { - "text": { - "type": "STRING" - } - } - } - }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.6.5" - }, - "runtimeConfig": { - "parameters": { - "text": { - "stringValue": "hi there" - } - } - } -} - diff --git a/backend/src/v2/compiler/testdata/hello_world_with_retry_all_args.json b/backend/src/v2/compiler/testdata/hello_world_with_retry_all_args.json deleted file mode 100644 index ba3de5fbf10..00000000000 --- a/backend/src/v2/compiler/testdata/hello_world_with_retry_all_args.json +++ /dev/null @@ -1,103 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-hello-world": { - "executorLabel": "exec-hello-world", - "inputDefinitions": { - "parameters": { - "text": { - "type": "STRING" - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-hello-world": { - "container": { - "args": [ - "--text", - "{{$.inputs.parameters['text']}}" - ], - "command": [ - "sh", - "-ec", - "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", - "def hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Hello world', description='')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n" - ], - "image": "python:3.9" - } - } - } - }, - "pipelineInfo": { - "name": "namespace/n1/pipeline/hello-world" - }, - "root": { - "dag": { - "tasks": { - "hello-world": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-hello-world" - }, - "retryPolicy": { - "backoffFactor": 2, - "backoffMaxDuration": "3600s", - "backoffDuration": "1s", - "maxRetryCount": 2 - }, - "inputs": { - "parameters": { - "text": { - "componentInputParameter": "text" - } - } - }, - "taskInfo": { - "name": "hello-world" - } - }, - "hello-world-non-retry": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-hello-world" - }, - "inputs": { - "parameters": { - "text": { - "componentInputParameter": "text" - } - } - }, - "taskInfo": { - "name": "hello-world-2" - } - } - } - }, - "inputDefinitions": { - "parameters": { - "text": { - "type": "STRING" - } - } - } - }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.6.5" - }, - "runtimeConfig": { - "parameters": { - "text": { - "stringValue": "hi there" - } - } - } -} - diff --git a/backend/src/v2/compiler/testdata/importer.json b/backend/src/v2/compiler/testdata/importer.json deleted file mode 100644 index 52e8ea6bb55..00000000000 --- a/backend/src/v2/compiler/testdata/importer.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-importer": { - "executorLabel": "exec-importer", - "inputDefinitions": { - "parameters": { - "uri": { - "type": "STRING" - } - } - }, - "outputDefinitions": { - "artifacts": { - "artifact": { - "artifactType": { - "schemaTitle": "system.Dataset" - } - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-importer": { - "importer": { - "artifactUri": { - "constantValue": { - "stringValue": "gs://ml-pipeline-playground/shakespeare1.txt" - } - }, - "typeSchema": { - "schemaTitle": "system.Dataset" - } - } - } - } - }, - "pipelineInfo": { - "name": "pipeline-with-importer" - }, - "root": { - "dag": { - "tasks": { - "importer": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-importer" - }, - "inputs": { - "parameters": { - "uri": { - "runtimeValue": { - "constantValue": { - "stringValue": "gs://ml-pipeline-playground/shakespeare1.txt" - } - } - } - } - }, - "taskInfo": { - "name": "importer" - } - } - } - }, - "inputDefinitions": { - "parameters": { - "dataset2": { - "type": "STRING" - } - } - } - }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.1" - }, - "runtimeConfig": {} -} \ No newline at end of file diff --git a/backend/src/v2/compiler/testdata/multiple_parallel_loops.json b/backend/src/v2/compiler/testdata/multiple_parallel_loops.json deleted file mode 100644 index 1208aa20445..00000000000 --- a/backend/src/v2/compiler/testdata/multiple_parallel_loops.json +++ /dev/null @@ -1,284 +0,0 @@ -{ - "pipelineSpec" :{ - "components": { - "comp-for-loop-2": { - "dag": { - "tasks": { - "print-op": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-print-op" - }, - "inputs": { - "parameters": { - "s": { - "componentInputParameter": "pipelinechannel--loop-item-param-1", - "parameterExpressionSelector": "parseJson(string_value)[\"A_a\"]" - } - } - }, - "taskInfo": { - "name": "print-op" - } - }, - "print-op-2": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-print-op-2" - }, - "inputs": { - "parameters": { - "s": { - "componentInputParameter": "pipelinechannel--loop-item-param-1", - "parameterExpressionSelector": "parseJson(string_value)[\"B_b\"]" - } - } - }, - "taskInfo": { - "name": "print-op-2" - } - } - } - }, - "inputDefinitions": { - "parameters": { - "pipelinechannel--loop-item-param-1": { - "parameterType": "STRUCT" - } - } - } - }, - "comp-for-loop-4": { - "dag": { - "tasks": { - "print-op-3": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-print-op-3" - }, - "inputs": { - "parameters": { - "s": { - "componentInputParameter": "pipelinechannel--loop-item-param-3", - "parameterExpressionSelector": "parseJson(string_value)[\"A_a\"]" - } - } - }, - "taskInfo": { - "name": "print-op-3" - } - }, - "print-op-4": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-print-op-4" - }, - "inputs": { - "parameters": { - "s": { - "componentInputParameter": "pipelinechannel--loop-item-param-3", - "parameterExpressionSelector": "parseJson(string_value)[\"B_b\"]" - } - } - }, - "taskInfo": { - "name": "print-op-4" - } - } - } - }, - "inputDefinitions": { - "parameters": { - "pipelinechannel--loop-item-param-3": { - "parameterType": "STRUCT" - } - } - } - }, - "comp-print-op": { - "executorLabel": "exec-print-op", - "inputDefinitions": { - "parameters": { - "s": { - "parameterType": "STRING" - } - } - } - }, - "comp-print-op-2": { - "executorLabel": "exec-print-op-2", - "inputDefinitions": { - "parameters": { - "s": { - "parameterType": "STRING" - } - } - } - }, - "comp-print-op-3": { - "executorLabel": "exec-print-op-3", - "inputDefinitions": { - "parameters": { - "s": { - "parameterType": "STRING" - } - } - } - }, - "comp-print-op-4": { - "executorLabel": "exec-print-op-4", - "inputDefinitions": { - "parameters": { - "s": { - "parameterType": "STRING" - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-print-op": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "print_op" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_op(s: str):\n print(s)\n\n" - ], - "image": "python:3.7" - } - }, - "exec-print-op-2": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "print_op" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_op(s: str):\n print(s)\n\n" - ], - "image": "python:3.7" - } - }, - "exec-print-op-3": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "print_op" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_op(s: str):\n print(s)\n\n" - ], - "image": "python:3.7" - } - }, - "exec-print-op-4": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "print_op" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_op(s: str):\n print(s)\n\n" - ], - "image": "python:3.7" - } - } - } - }, - "pipelineInfo": { - "name": "my-pipeline" - }, - "root": { - "dag": { - "tasks": { - "for-loop-2": { - "componentRef": { - "name": "comp-for-loop-2" - }, - "iteratorPolicy": { - "parallelismLimit": 2 - }, - "parameterIterator": { - "itemInput": "pipelinechannel--loop-item-param-1", - "items": { - "raw": "[{\"A_a\": \"1\", \"B_b\": \"10\"}, {\"A_a\": \"2\", \"B_b\": \"20\"}, {\"A_a\": \"3\", \"B_b\": \"30\"}, {\"A_a\": \"4\", \"B_b\": \"40\"}, {\"A_a\": \"5\", \"B_b\": \"50\"}, {\"A_a\": \"6\", \"B_b\": \"60\"}, {\"A_a\": \"7\", \"B_b\": \"70\"}, {\"A_a\": \"8\", \"B_b\": \"80\"}, {\"A_a\": \"9\", \"B_b\": \"90\"}, {\"A_a\": \"10\", \"B_b\": \"100\"}]" - } - }, - "taskInfo": { - "name": "foo" - } - }, - "for-loop-4": { - "componentRef": { - "name": "comp-for-loop-4" - }, - "iteratorPolicy": { - "parallelismLimit": 4 - }, - "parameterIterator": { - "itemInput": "pipelinechannel--loop-item-param-3", - "items": { - "raw": "[{\"A_a\": \"1\", \"B_b\": \"10\"}, {\"A_a\": \"2\", \"B_b\": \"20\"}, {\"A_a\": \"3\", \"B_b\": \"30\"}, {\"A_a\": \"4\", \"B_b\": \"40\"}, {\"A_a\": \"5\", \"B_b\": \"50\"}, {\"A_a\": \"6\", \"B_b\": \"60\"}, {\"A_a\": \"7\", \"B_b\": \"70\"}, {\"A_a\": \"8\", \"B_b\": \"80\"}, {\"A_a\": \"9\", \"B_b\": \"90\"}, {\"A_a\": \"10\", \"B_b\": \"100\"}]" - } - }, - "taskInfo": { - "name": "bar" - } - } - } - } - }, - "schemaVersion": "2.1.0", - "sdkVersion": "kfp-2.7.0" - }, - "runtimeConfig": { - "parameters": { - "text": { - "stringValue": "hello world" - } - } - } -} diff --git a/backend/src/v2/compiler/testdata/nested_pipeline_all_level_retry.json b/backend/src/v2/compiler/testdata/nested_pipeline_all_level_retry.json deleted file mode 100644 index 89bb5b18bb2..00000000000 --- a/backend/src/v2/compiler/testdata/nested_pipeline_all_level_retry.json +++ /dev/null @@ -1,118 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-component-a": { - "executorLabel": "exec-component-a" - }, - "comp-component-b": { - "executorLabel": "exec-component-b" - }, - "comp-nested-pipeline": { - "dag": { - "tasks": { - "component-a": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-component-a" - }, - "taskInfo": { - "name": "component-a" - } - }, - "component-b": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-component-b" - }, - "retryPolicy": { - "backoffDuration": "0s", - "backoffFactor": 2.0, - "backoffMaxDuration": "3600s", - "maxRetryCount": 2 - }, - "taskInfo": { - "name": "component-b" - } - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-component-a": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "component_a" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef component_a():\n print('Component A')\n\n" - ], - "image": "python:3.9" - } - }, - "exec-component-b": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "component_b" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef component_b():\n print ('Component B')\n\n" - ], - "image": "python:3.9" - } - } - } - }, - "pipelineInfo": { - "name": "hello-world" - }, - "root": { - "dag": { - "tasks": { - "nested-pipeline": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-nested-pipeline" - }, - "retryPolicy": { - "backoffDuration": "0s", - "backoffFactor": 1.0, - "backoffMaxDuration": "1800s", - "maxRetryCount": 1 - }, - "taskInfo": { - "name": "nested-pipeline" - } - } - } - } - }, - "schemaVersion": "2.1.0", - "sdkVersion": "kfp-2.13.0" - } -} diff --git a/backend/src/v2/compiler/testdata/nested_pipeline_pipeline_retry.json b/backend/src/v2/compiler/testdata/nested_pipeline_pipeline_retry.json deleted file mode 100644 index d6b068b0898..00000000000 --- a/backend/src/v2/compiler/testdata/nested_pipeline_pipeline_retry.json +++ /dev/null @@ -1,112 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-component-a": { - "executorLabel": "exec-component-a" - }, - "comp-component-b": { - "executorLabel": "exec-component-b" - }, - "comp-nested-pipeline": { - "dag": { - "tasks": { - "component-a": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-component-a" - }, - "taskInfo": { - "name": "component-a" - } - }, - "component-b": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-component-b" - }, - "taskInfo": { - "name": "component-b" - } - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-component-a": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "component_a" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef component_a():\n print('Component A')\n\n" - ], - "image": "python:3.9" - } - }, - "exec-component-b": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "component_b" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef component_b():\n print ('Component B')\n\n" - ], - "image": "python:3.9" - } - } - } - }, - "pipelineInfo": { - "name": "hello-world" - }, - "root": { - "dag": { - "tasks": { - "nested-pipeline": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-nested-pipeline" - }, - "retryPolicy": { - "backoffDuration": "0s", - "backoffFactor": 2.0, - "backoffMaxDuration": "3600s", - "maxRetryCount": 2 - }, - "taskInfo": { - "name": "nested-pipeline" - } - } - } - } - }, - "schemaVersion": "2.1.0", - "sdkVersion": "kfp-2.13.0" - } -} diff --git a/backend/src/v2/compiler/testdata/nested_pipeline_sub_component_retry.json b/backend/src/v2/compiler/testdata/nested_pipeline_sub_component_retry.json deleted file mode 100644 index b959cbd9b11..00000000000 --- a/backend/src/v2/compiler/testdata/nested_pipeline_sub_component_retry.json +++ /dev/null @@ -1,112 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-component-a": { - "executorLabel": "exec-component-a" - }, - "comp-component-b": { - "executorLabel": "exec-component-b" - }, - "comp-nested-pipeline": { - "dag": { - "tasks": { - "component-a": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-component-a" - }, - "taskInfo": { - "name": "component-a" - } - }, - "component-b": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-component-b" - }, - "retryPolicy": { - "backoffDuration": "0s", - "backoffFactor": 2, - "backoffMaxDuration": "3600s", - "maxRetryCount": 2.0 - }, - "taskInfo": { - "name": "component-b" - } - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-component-a": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "component_a" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef component_a():\n print('Component A')\n\n" - ], - "image": "python:3.9" - } - }, - "exec-component-b": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "component_b" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef component_b():\n print ('Component B')\n\n" - ], - "image": "python:3.9" - } - } - } - }, - "pipelineInfo": { - "name": "hello-world" - }, - "root": { - "dag": { - "tasks": { - "nested-pipeline": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-nested-pipeline" - }, - "taskInfo": { - "name": "nested-pipeline" - } - } - } - } - }, - "schemaVersion": "2.1.0", - "sdkVersion": "kfp-2.13.0" - } -} diff --git a/backend/src/v2/compiler/testdata/producer_consumer_param.json b/backend/src/v2/compiler/testdata/producer_consumer_param.json deleted file mode 100644 index e774ceb7b38..00000000000 --- a/backend/src/v2/compiler/testdata/producer_consumer_param.json +++ /dev/null @@ -1,127 +0,0 @@ -{ - "pipelineSpec": { - "components": { - "comp-consumer": { - "executorLabel": "exec-consumer", - "inputDefinitions": { - "parameters": { - "input_value": { - "type": "STRING" - } - } - } - }, - "comp-producer": { - "executorLabel": "exec-producer", - "inputDefinitions": { - "parameters": { - "input_text": { - "type": "STRING" - } - } - }, - "outputDefinitions": { - "parameters": { - "output_value": { - "type": "STRING" - } - } - } - } - }, - "deploymentSpec": { - "executors": { - "exec-consumer": { - "container": { - "command": [ - "sh", - "-c", - "set -e -x\necho \"Read from an input parameter: \" && echo \"$0\"\n", - "{{$.inputs.parameters['input_value']}}" - ], - "image": "google/cloud-sdk:latest" - } - }, - "exec-producer": { - "container": { - "command": [ - "sh", - "-c", - "set -e -x\necho \"$0, this is an output parameter\" | gsutil cp - \"$1\"\n", - "{{$.inputs.parameters['input_text']}}", - "{{$.outputs.parameters['output_value'].output_file}}" - ], - "image": "google/cloud-sdk:latest" - } - } - } - }, - "pipelineInfo": { - "name": "producer-consumer-param-pipeline" - }, - "root": { - "dag": { - "tasks": { - "consumer": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-consumer" - }, - "dependentTasks": [ - "producer" - ], - "inputs": { - "parameters": { - "input_value": { - "taskOutputParameter": { - "outputParameterKey": "output_value", - "producerTask": "producer" - } - } - } - }, - "taskInfo": { - "name": "consumer" - } - }, - "producer": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-producer" - }, - "inputs": { - "parameters": { - "input_text": { - "componentInputParameter": "text" - } - } - }, - "taskInfo": { - "name": "producer" - } - } - } - }, - "inputDefinitions": { - "parameters": { - "text": { - "type": "STRING" - } - } - } - }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.6.5" - }, - "runtimeConfig": { - "parameters": { - "text": { - "stringValue": "Hello world" - } - } - } -} \ No newline at end of file diff --git a/backend/src/v2/compiler/visitor.go b/backend/src/v2/compiler/visitor.go index fd2a4bd0209..44a29af33c8 100644 --- a/backend/src/v2/compiler/visitor.go +++ b/backend/src/v2/compiler/visitor.go @@ -26,7 +26,8 @@ import ( "fmt" "sort" - "github.com/golang/protobuf/jsonpb" + "google.golang.org/protobuf/encoding/protojson" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "google.golang.org/protobuf/types/known/structpb" ) @@ -109,10 +110,15 @@ func (state *pipelineDFS) dfs(name string, component *pipelinespec.ComponentSpec } // Add kubernetes spec to annotation - if state.kubernetesSpec != nil { - kubernetesExecSpec, ok := state.kubernetesSpec.DeploymentSpec.Executors[executorLabel] - if ok { - state.visitor.AddKubernetesSpec(name, kubernetesExecSpec) + if state.kubernetesSpec != nil && state.kubernetesSpec.DeploymentSpec != nil { + if state.kubernetesSpec.DeploymentSpec.Executors != nil { + kubernetesExecSpec, ok := state.kubernetesSpec.DeploymentSpec.Executors[executorLabel] + if ok { + err := state.visitor.AddKubernetesSpec(name, kubernetesExecSpec) + if err != nil { + return componentError(fmt.Errorf("failed to add Kubernetes spec for %s: %w", name, err)) + } + } } } @@ -168,15 +174,17 @@ func (state *pipelineDFS) dfs(name string, component *pipelinespec.ComponentSpec } func GetDeploymentConfig(spec *pipelinespec.PipelineSpec) (*pipelinespec.PipelineDeploymentConfig, error) { - marshaler := jsonpb.Marshaler{} - buffer := new(bytes.Buffer) - if err := marshaler.Marshal(buffer, spec.GetDeploymentSpec()); err != nil { + jsonBytes, err := protojson.Marshal(spec.GetDeploymentSpec()) + if err != nil { return nil, err } + buffer := bytes.NewBuffer(jsonBytes) deploymentConfig := &pipelinespec.PipelineDeploymentConfig{} // Allow unknown '@type' field in the json message. - unmarshaler := jsonpb.Unmarshaler{AllowUnknownFields: true} - if err := unmarshaler.Unmarshal(buffer, deploymentConfig); err != nil { + unmarshaler := protojson.UnmarshalOptions{ + DiscardUnknown: true, + } + if err := unmarshaler.Unmarshal(buffer.Bytes(), deploymentConfig); err != nil { return nil, err } return deploymentConfig, nil @@ -184,13 +192,14 @@ func GetDeploymentConfig(spec *pipelinespec.PipelineSpec) (*pipelinespec.Pipelin func GetPipelineSpec(job *pipelinespec.PipelineJob) (*pipelinespec.PipelineSpec, error) { // TODO(Bobgy): can we avoid this marshal to string step? - marshaler := jsonpb.Marshaler{} - json, err := marshaler.MarshalToString(job.GetPipelineSpec()) + marshaler := &protojson.MarshalOptions{} + jsonBytes, err := marshaler.Marshal(job.GetPipelineSpec()) if err != nil { return nil, fmt.Errorf("failed marshal pipeline spec to json: %w", err) } + jsonStr := string(jsonBytes) spec := &pipelinespec.PipelineSpec{} - if err := jsonpb.UnmarshalString(json, spec); err != nil { + if err := protojson.Unmarshal([]byte(jsonStr), spec); err != nil { return nil, fmt.Errorf("failed to parse pipeline spec: %v", err) } return spec, nil diff --git a/backend/src/v2/compiler/visitor_test.go b/backend/src/v2/compiler/visitor_test.go deleted file mode 100644 index 00aec8e81b6..00000000000 --- a/backend/src/v2/compiler/visitor_test.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2021-2023 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -package compiler_test - -import ( - "fmt" - "os" - "testing" - - "github.com/golang/protobuf/jsonpb" - "github.com/google/go-cmp/cmp" - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/v2/compiler" - "google.golang.org/protobuf/types/known/structpb" -) - -type testVisitor struct { - visited []string -} - -func (v *testVisitor) Container(name string, component *pipelinespec.ComponentSpec, executor *pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec) error { - v.visited = append(v.visited, fmt.Sprintf("container(name=%q)", name)) - return nil -} -func (v *testVisitor) Importer(name string, component *pipelinespec.ComponentSpec, importer *pipelinespec.PipelineDeploymentConfig_ImporterSpec) error { - v.visited = append(v.visited, fmt.Sprintf("importer(name=%q)", name)) - return nil -} -func (v *testVisitor) Resolver(name string, component *pipelinespec.ComponentSpec, resolver *pipelinespec.PipelineDeploymentConfig_ResolverSpec) error { - v.visited = append(v.visited, fmt.Sprintf("resolver(name=%q)", name)) - return nil -} -func (v *testVisitor) DAG(name string, component *pipelinespec.ComponentSpec, dag *pipelinespec.DagSpec) error { - v.visited = append(v.visited, fmt.Sprintf("DAG(name=%q)", name)) - return nil -} -func (v *testVisitor) AddKubernetesSpec(name string, kubernetesSpec *structpb.Struct) error { - v.visited = append(v.visited, fmt.Sprintf("DAG(name=%q)", name)) - return nil -} - -func Test_AcceptTestVisitor(t *testing.T) { - tests := []struct { - specPath string - expected []string - }{ - { - specPath: "testdata/hello_world.json", - expected: []string{`container(name="comp-hello-world")`, `DAG(name="root")`}, - }, - { - specPath: "testdata/producer_consumer_param.json", - expected: []string{`container(name="comp-consumer")`, `container(name="comp-producer")`, `DAG(name="root")`}, - }, - { - // Component comp-hello-world used twice, but it should only be visited once. - specPath: "testdata/component_used_twice.json", - expected: []string{`container(name="comp-hello-world")`, `DAG(name="root")`}, - }, - } - - for _, tt := range tests { - t.Run(fmt.Sprintf("%q", tt.specPath), func(t *testing.T) { - job := load(t, tt.specPath) - v := &testVisitor{visited: make([]string, 0)} - err := compiler.Accept(job, nil, v) - if err != nil { - t.Fatal(err) - } - if !cmp.Equal(v.visited, tt.expected) { - t.Errorf(" got: %v\nexpect: %v", v.visited, tt.expected) - } - }) - } -} - -func load(t *testing.T, path string) *pipelinespec.PipelineJob { - t.Helper() - content, err := os.ReadFile(path) - if err != nil { - t.Error(err) - } - json := string(content) - job := &pipelinespec.PipelineJob{} - if err := jsonpb.UnmarshalString(json, job); err != nil { - t.Errorf("Failed to parse pipeline job, error: %s, job: %v", err, json) - } - return job -} diff --git a/backend/src/v2/component/constants.go b/backend/src/v2/component/constants.go index 116408b2eab..f4020263553 100644 --- a/backend/src/v2/component/constants.go +++ b/backend/src/v2/component/constants.go @@ -25,4 +25,8 @@ const ( // Env vars in metadata-grpc-configmap EnvMetadataHost = "METADATA_GRPC_SERVICE_HOST" EnvMetadataPort = "METADATA_GRPC_SERVICE_PORT" + + // Workspace-related constants + WorkspaceVolumeName = "kfp-workspace" + WorkspaceMountPath = "/kfp-workspace" ) diff --git a/backend/src/v2/component/launcher_v2.go b/backend/src/v2/component/launcher_v2.go index 51d1c1bf165..81ba34bffef 100644 --- a/backend/src/v2/component/launcher_v2.go +++ b/backend/src/v2/component/launcher_v2.go @@ -27,11 +27,12 @@ import ( "strings" "time" + "google.golang.org/protobuf/types/known/timestamppb" + "github.com/golang/glog" - "github.com/golang/protobuf/proto" - "github.com/golang/protobuf/ptypes/timestamp" api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" - "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" + "github.com/kubeflow/pipelines/backend/src/v2/client_manager" + "google.golang.org/protobuf/proto" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "github.com/kubeflow/pipelines/backend/src/v2/metadata" @@ -41,7 +42,6 @@ import ( "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" "k8s.io/client-go/kubernetes" - "k8s.io/client-go/rest" ) type LauncherV2Options struct { @@ -68,11 +68,7 @@ type LauncherV2 struct { command string args []string options LauncherV2Options - - // clients - metadataClient metadata.ClientInterface - k8sClient kubernetes.Interface - cacheClient cacheutils.Client + clientManager client_manager.ClientManagerInterface } // Client is the struct to hold the Kubernetes Clientset @@ -81,7 +77,15 @@ type kubernetesClient struct { } // NewLauncherV2 is a factory function that returns an instance of LauncherV2. -func NewLauncherV2(ctx context.Context, executionID int64, executorInputJSON, componentSpecJSON string, cmdArgs []string, opts *LauncherV2Options) (l *LauncherV2, err error) { +func NewLauncherV2( + ctx context.Context, + executionID int64, + executorInputJSON, + componentSpecJSON string, + cmdArgs []string, + opts *LauncherV2Options, + clientManager client_manager.ClientManagerInterface, +) (l *LauncherV2, err error) { defer func() { if err != nil { err = fmt.Errorf("failed to create component launcher v2: %w", err) @@ -107,32 +111,14 @@ func NewLauncherV2(ctx context.Context, executionID int64, executorInputJSON, co if err != nil { return nil, err } - restConfig, err := rest.InClusterConfig() - if err != nil { - return nil, fmt.Errorf("failed to initialize kubernetes client: %w", err) - } - k8sClient, err := kubernetes.NewForConfig(restConfig) - if err != nil { - return nil, fmt.Errorf("failed to initialize kubernetes client set: %w", err) - } - metadataClient, err := metadata.NewClient(opts.MLMDServerAddress, opts.MLMDServerPort, opts.MetadataTLSEnabled, opts.CaCertPath) - if err != nil { - return nil, err - } - cacheClient, err := cacheutils.NewClient(opts.CacheDisabled, opts.MLPipelineTLSEnabled) - if err != nil { - return nil, err - } return &LauncherV2{ - executionID: executionID, - executorInput: executorInput, - component: component, - command: cmdArgs[0], - args: cmdArgs[1:], - options: *opts, - metadataClient: metadataClient, - k8sClient: k8sClient, - cacheClient: cacheClient, + executionID: executionID, + executorInput: executorInput, + component: component, + command: cmdArgs[0], + args: cmdArgs[1:], + options: *opts, + clientManager: clientManager, }, nil } @@ -190,6 +176,11 @@ func (l *LauncherV2) Execute(ctx context.Context) (err error) { var outputArtifacts []*metadata.OutputArtifact status := pb.Execution_FAILED defer func() { + if execution == nil { + glog.Errorf("Skipping publish since execution is nil. Original err is: %v", err) + return + } + if perr := l.publish(ctx, execution, executorOutput, outputArtifacts, status); perr != nil { if err != nil { err = fmt.Errorf("failed to publish execution with error %s after execution failed: %s", perr.Error(), err.Error()) @@ -200,12 +191,12 @@ func (l *LauncherV2) Execute(ctx context.Context) (err error) { glog.Infof("publish success.") // At the end of the current task, we check the statuses of all tasks in // the current DAG and update the DAG's status accordingly. - dag, err := l.metadataClient.GetDAG(ctx, execution.GetExecution().CustomProperties["parent_dag_id"].GetIntValue()) + dag, err := l.clientManager.MetadataClient().GetDAG(ctx, execution.GetExecution().CustomProperties["parent_dag_id"].GetIntValue()) if err != nil { glog.Errorf("DAG Status Update: failed to get DAG: %s", err.Error()) } - pipeline, _ := l.metadataClient.GetPipelineFromExecution(ctx, execution.GetID()) - err = l.metadataClient.UpdateDAGExecutionsState(ctx, dag, pipeline) + pipeline, _ := l.clientManager.MetadataClient().GetPipelineFromExecution(ctx, execution.GetID()) + err = l.clientManager.MetadataClient().UpdateDAGExecutionsState(ctx, dag, pipeline) if err != nil { glog.Errorf("failed to update DAG state: %s", err.Error()) } @@ -225,7 +216,7 @@ func (l *LauncherV2) Execute(ctx context.Context) (err error) { if err != nil { return err } - bucket, err := objectstore.OpenBucket(ctx, l.k8sClient, l.options.Namespace, bucketConfig) + bucket, err := objectstore.OpenBucket(ctx, l.clientManager.K8sClient(), l.options.Namespace, bucketConfig) if err != nil { return err } @@ -240,9 +231,9 @@ func (l *LauncherV2) Execute(ctx context.Context) (err error) { l.args, bucket, bucketConfig, - l.metadataClient, + l.clientManager.MetadataClient(), l.options.Namespace, - l.k8sClient, + l.clientManager.K8sClient(), l.options.PublishLogs, ) if err != nil { @@ -261,11 +252,11 @@ func (l *LauncherV2) Execute(ctx context.Context) (err error) { Namespace: l.options.Namespace, RunId: l.options.RunID, MlmdExecutionID: strconv.FormatInt(id, 10), - CreatedAt: ×tamp.Timestamp{Seconds: executedStartedTime}, - FinishedAt: ×tamp.Timestamp{Seconds: time.Now().Unix()}, + CreatedAt: timestamppb.New(time.Unix(executedStartedTime, 0)), + FinishedAt: timestamppb.New(time.Unix(time.Now().Unix(), 0)), Fingerprint: fingerPrint, } - return l.cacheClient.CreateExecutionCache(ctx, task) + return l.clientManager.CacheClient().CreateExecutionCache(ctx, task) } return nil @@ -311,7 +302,7 @@ func (l *LauncherV2) prePublish(ctx context.Context) (execution *metadata.Execut err = fmt.Errorf("failed to pre-publish Pod info to ML Metadata: %w", err) } }() - execution, err = l.metadataClient.GetExecution(ctx, l.executionID) + execution, err = l.clientManager.MetadataClient().GetExecution(ctx, l.executionID) if err != nil { return nil, err } @@ -320,7 +311,7 @@ func (l *LauncherV2) prePublish(ctx context.Context) (execution *metadata.Execut PodUID: l.options.PodUID, Namespace: l.options.Namespace, } - return l.metadataClient.PrePublishExecution(ctx, execution, ecfg) + return l.clientManager.MetadataClient().PrePublishExecution(ctx, execution, ecfg) } // TODO(Bobgy): consider passing output artifacts info from executor output. @@ -331,17 +322,25 @@ func (l *LauncherV2) publish( outputArtifacts []*metadata.OutputArtifact, status pb.Execution_State, ) (err error) { - defer func() { - if err != nil { - err = fmt.Errorf("failed to publish results to ML Metadata: %w", err) - } - }() - outputParameters := executorOutput.GetParameterValues() + if execution == nil { + return fmt.Errorf("failed to publish results to ML Metadata: execution is nil") + } + + var outputParameters map[string]*structpb.Value + if executorOutput != nil { + outputParameters = executorOutput.GetParameterValues() + } + // TODO(Bobgy): upload output artifacts. // TODO(Bobgy): when adding artifacts, we will need execution.pipeline to be non-nil, because we need // to publish output artifacts to the context too. // return l.metadataClient.PublishExecution(ctx, execution, outputParameters, outputArtifacts, pb.Execution_COMPLETE) - return l.metadataClient.PublishExecution(ctx, execution, outputParameters, outputArtifacts, status) + err = l.clientManager.MetadataClient().PublishExecution(ctx, execution, outputParameters, outputArtifacts, status) + if err != nil { + return fmt.Errorf("failed to publish results to ML Metadata: %w", err) + } + + return nil } // executeV2 handles placeholder substitution for inputs, calls execute to diff --git a/backend/src/v2/component/launcher_v2_test.go b/backend/src/v2/component/launcher_v2_test.go index 0774afdd6e1..45543d03e75 100644 --- a/backend/src/v2/component/launcher_v2_test.go +++ b/backend/src/v2/component/launcher_v2_test.go @@ -16,10 +16,13 @@ package component import ( "context" "encoding/json" + "errors" "io" "os" "testing" + "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" + "github.com/kubeflow/pipelines/backend/src/v2/client_manager" "google.golang.org/protobuf/encoding/protojson" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" @@ -309,3 +312,106 @@ func Test_get_log_Writer(t *testing.T) { }) } } + +// Tests happy and unhappy paths for constructing a new LauncherV2 +func Test_NewLauncherV2(t *testing.T) { + var testCmdArgs = []string{"sh", "-c", "echo \"hello world\""} + + disabledCacheClient, _ := cacheutils.NewClient(true, false) + var testLauncherV2Deps = client_manager.NewFakeClientManager( + fake.NewSimpleClientset(), + metadata.NewFakeClient(), + disabledCacheClient, + ) + + var testValidLauncherV2Opts = LauncherV2Options{ + Namespace: "my-namespace", + PodName: "my-pod", + PodUID: "abcd", + MLMDServerAddress: "example.com", + MLMDServerPort: "1234", + } + + type args struct { + executionID int64 + executorInputJSON string + componentSpecJSON string + cmdArgs []string + opts LauncherV2Options + cm client_manager.ClientManagerInterface + } + tests := []struct { + name string + args *args + expectedErr error + }{ + { + name: "happy path", + args: &args{ + executionID: 1, + executorInputJSON: "{}", + componentSpecJSON: "{}", + cmdArgs: testCmdArgs, + opts: testValidLauncherV2Opts, + cm: testLauncherV2Deps, + }, + expectedErr: nil, + }, + { + name: "missing executionID", + args: &args{ + executionID: 0, + }, + expectedErr: errors.New("must specify execution ID"), + }, + { + name: "invalid executorInput", + args: &args{ + executionID: 1, + executorInputJSON: "{", + }, + expectedErr: errors.New("unexpected EOF"), + }, + { + name: "invalid componentSpec", + args: &args{ + executionID: 1, + executorInputJSON: "{}", + componentSpecJSON: "{", + }, + expectedErr: errors.New("unexpected EOF\ncomponentSpec: {"), + }, + { + name: "missing cmdArgs", + args: &args{ + executionID: 1, + executorInputJSON: "{}", + componentSpecJSON: "{}", + cmdArgs: []string{}, + }, + expectedErr: errors.New("command and arguments are empty"), + }, + { + name: "invalid opts", + args: &args{ + executionID: 1, + executorInputJSON: "{}", + componentSpecJSON: "{}", + cmdArgs: testCmdArgs, + opts: LauncherV2Options{}, + }, + expectedErr: errors.New("invalid launcher options: must specify Namespace"), + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + args := test.args + _, err := NewLauncherV2(context.Background(), args.executionID, args.executorInputJSON, args.componentSpecJSON, args.cmdArgs, &args.opts, args.cm) + if test.expectedErr != nil { + assert.ErrorContains(t, err, test.expectedErr.Error()) + } else { + assert.NoError(t, err) + } + }) + } +} diff --git a/backend/src/v2/config/env.go b/backend/src/v2/config/env.go index 74c5cf8b4a6..6472e66aea8 100644 --- a/backend/src/v2/config/env.go +++ b/backend/src/v2/config/env.go @@ -166,9 +166,10 @@ func getDefaultMinioSessionInfo() (objectstore.SessionInfo, error) { Provider: "minio", Params: map[string]string{ "region": "minio", - "endpoint": objectstore.MinioDefaultEndpoint(), + "endpoint": objectstore.DefaultMinioEndpointInMultiUserMode, "disableSSL": strconv.FormatBool(true), "fromEnv": strconv.FormatBool(false), + "maxRetries": strconv.FormatInt(int64(5), 10), "secretName": minioArtifactSecretName, // The k8s secret "Key" for "Artifact SecretKey" and "Artifact AccessKey" "accessKeyKey": minioArtifactAccessKeyKey, diff --git a/backend/src/v2/config/env_test.go b/backend/src/v2/config/env_test.go index 2b29656c0ac..523dc335e86 100644 --- a/backend/src/v2/config/env_test.go +++ b/backend/src/v2/config/env_test.go @@ -45,6 +45,7 @@ func Test_getDefaultMinioSessionInfo(t *testing.T) { "secretName": "mlpipeline-minio-artifact", "accessKeyKey": "accesskey", "secretKeyKey": "secretkey", + "maxRetries": "5", }, } assert.Equal(t, expectedDefaultSession, actualDefaultSession) @@ -109,6 +110,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "secretName": "mlpipeline-minio-artifact", "accessKeyKey": "accesskey", "secretKeyKey": "secretkey", + "maxRetries": "5", }, }, }, @@ -147,6 +149,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "secretName": "mlpipeline-minio-artifact", "accessKeyKey": "accesskey", "secretKeyKey": "secretkey", + "maxRetries": "5", }, }, testDataCase: "case1", @@ -164,6 +167,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "secretName": "mlpipeline-minio-artifact", "accessKeyKey": "accesskey", "secretKeyKey": "secretkey", + "maxRetries": "5", }, }, testDataCase: "case1", @@ -206,6 +210,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "accessKeyKey": "test-accessKeyKey-5", "secretKeyKey": "test-secretKeyKey-5", "forcePathStyle": "true", + "maxRetries": "5", }, }, testDataCase: "case5", @@ -224,6 +229,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "accessKeyKey": "minio-test-accessKeyKey-6-a", "secretKeyKey": "minio-test-secretKeyKey-6-a", "forcePathStyle": "true", + "maxRetries": "5", }, }, testDataCase: "case6", @@ -271,6 +277,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "disableSSL": "true", "fromEnv": "true", "forcePathStyle": "true", + "maxRetries": "5", }, }, testDataCase: "case9", @@ -289,6 +296,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "accessKeyKey": "minio-test-accessKeyKey-10", "secretKeyKey": "minio-test-secretKeyKey-10", "forcePathStyle": "true", + "maxRetries": "5", }, }, testDataCase: "case10", @@ -304,6 +312,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "disableSSL": "true", "fromEnv": "true", "forcePathStyle": "true", + "maxRetries": "5", }, }, testDataCase: "case10", @@ -322,6 +331,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "accessKeyKey": "s3-testaccessKeyKey-6", "secretKeyKey": "s3-testsecretKeyKey-6", "forcePathStyle": "false", + "maxRetries": "5", }, }, testDataCase: "case6", @@ -340,6 +350,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "accessKeyKey": "s3-test-accessKeyKey-6-b", "secretKeyKey": "s3-test-secretKeyKey-6-b", "forcePathStyle": "false", + "maxRetries": "5", }, }, testDataCase: "case6", @@ -358,6 +369,7 @@ func TestGetBucketSessionInfo(t *testing.T) { "accessKeyKey": "s3-test-accessKeyKey-6-a", "secretKeyKey": "s3-test-secretKeyKey-6-a", "forcePathStyle": "false", + "maxRetries": "5", }, }, testDataCase: "case6", diff --git a/backend/src/v2/config/s3.go b/backend/src/v2/config/s3.go index 45cd2c22c84..885189035bc 100644 --- a/backend/src/v2/config/s3.go +++ b/backend/src/v2/config/s3.go @@ -37,6 +37,8 @@ type S3ProviderDefault struct { DisableSSL *bool `json:"disableSSL"` // optional ForcePathStyle *bool `json:"forcePathStyle"` + // optional + MaxRetries *int `json:"maxRetries"` } type S3Credentials struct { @@ -57,6 +59,8 @@ type S3Override struct { Credentials *S3Credentials `json:"credentials"` // optional ForcePathStyle *bool `json:"forcePathStyle"` + // optional + MaxRetries *int `json:"maxRetries"` } type S3SecretRef struct { SecretName string `json:"secretName"` @@ -109,6 +113,11 @@ func (p S3ProviderConfig) ProvideSessionInfo(path string) (objectstore.SessionIn } else { params["forcePathStyle"] = strconv.FormatBool(*p.Default.ForcePathStyle) } + if p.Default.MaxRetries == nil { + params["maxRetries"] = strconv.FormatInt(5, 10) + } else { + params["maxRetries"] = strconv.FormatInt(int64(*p.Default.MaxRetries), 10) + } params["fromEnv"] = strconv.FormatBool(p.Default.Credentials.FromEnv) if !p.Default.Credentials.FromEnv { @@ -138,6 +147,9 @@ func (p S3ProviderConfig) ProvideSessionInfo(path string) (objectstore.SessionIn if override.ForcePathStyle != nil { sessionInfo.Params["forcePathStyle"] = strconv.FormatBool(*override.ForcePathStyle) } + if override.MaxRetries != nil { + sessionInfo.Params["maxRetries"] = strconv.FormatInt(int64(*p.Default.MaxRetries), 10) + } if override.Credentials == nil { return objectstore.SessionInfo{}, invalidConfigErr(fmt.Errorf("missing override credentials")) } diff --git a/backend/src/v2/driver/cache.go b/backend/src/v2/driver/cache.go index 3c03b38e97d..d2fc6fd306e 100644 --- a/backend/src/v2/driver/cache.go +++ b/backend/src/v2/driver/cache.go @@ -17,11 +17,13 @@ package driver import ( "context" "fmt" + "sort" "strconv" "time" + "google.golang.org/protobuf/types/known/timestamppb" + "github.com/golang/glog" - "github.com/golang/protobuf/ptypes/timestamp" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" @@ -75,7 +77,10 @@ func reuseCachedOutputs(ctx context.Context, executorInput *pipelinespec.Executo return executorOutput, outputArtifacts, nil } -func getFingerPrint(opts Options, executorInput *pipelinespec.ExecutorInput, cacheClient cacheutils.Client) (string, error) { +// getFingerPrint generates a fingerprint for caching. The PVC names are included in the fingerprint since it's assumed +// PVCs have side effects (e.g. files written for tasks later on in the run) on the execution. If the PVC names are +// different, the execution shouldn't be reused for the cache. +func getFingerPrint(opts Options, executorInput *pipelinespec.ExecutorInput, cacheClient cacheutils.Client, pvcNames []string) (string, error) { outputParametersTypeMap := make(map[string]string) for outputParamName, outputParamSpec := range opts.Component.GetOutputDefinitions().GetParameters() { outputParametersTypeMap[outputParamName] = outputParamSpec.GetParameterType().String() @@ -84,7 +89,26 @@ func getFingerPrint(opts Options, executorInput *pipelinespec.ExecutorInput, cac userCmdArgs = append(userCmdArgs, opts.Container.Command...) userCmdArgs = append(userCmdArgs, opts.Container.Args...) - cacheKey, err := cacheClient.GenerateCacheKey(executorInput.GetInputs(), executorInput.GetOutputs(), outputParametersTypeMap, userCmdArgs, opts.Container.Image) + // Deduplicate PVC names and sort them to ensure consistent fingerprint generation. + pvcNamesMap := map[string]struct{}{} + for _, pvcName := range pvcNames { + pvcNamesMap[pvcName] = struct{}{} + } + + sortedPVCNames := make([]string, 0, len(pvcNamesMap)) + for pvcName := range pvcNamesMap { + sortedPVCNames = append(sortedPVCNames, pvcName) + } + sort.Strings(sortedPVCNames) + + cacheKey, err := cacheClient.GenerateCacheKey( + executorInput.GetInputs(), + executorInput.GetOutputs(), + outputParametersTypeMap, + userCmdArgs, + opts.Container.Image, + sortedPVCNames, + ) if err != nil { return "", fmt.Errorf("failure while generating CacheKey: %w", err) } @@ -92,10 +116,10 @@ func getFingerPrint(opts Options, executorInput *pipelinespec.ExecutorInput, cac return fingerPrint, err } -func getFingerPrintsAndID(execution *Execution, opts *Options, cacheClient cacheutils.Client) (string, string, error) { +func getFingerPrintsAndID(execution *Execution, opts *Options, cacheClient cacheutils.Client, pvcNames []string) (string, string, error) { if !opts.CacheDisabled && execution.WillTrigger() && opts.Task.GetCachingOptions().GetEnableCache() { glog.Infof("Task {%s} enables cache", opts.Task.GetTaskInfo().GetName()) - fingerPrint, err := getFingerPrint(*opts, execution.ExecutorInput, cacheClient) + fingerPrint, err := getFingerPrint(*opts, execution.ExecutorInput, cacheClient, pvcNames) if err != nil { return "", "", fmt.Errorf("failure while getting fingerPrint: %w", err) } @@ -127,8 +151,8 @@ func createCache( Namespace: opts.Namespace, RunId: opts.RunID, MlmdExecutionID: strconv.FormatInt(id, 10), - CreatedAt: ×tamp.Timestamp{Seconds: taskStartedTime}, - FinishedAt: ×tamp.Timestamp{Seconds: time.Now().Unix()}, + CreatedAt: timestamppb.New(time.Unix(taskStartedTime, 0)), + FinishedAt: timestamppb.New(time.Unix(time.Now().Unix(), 0)), Fingerprint: fingerPrint, } err := cacheClient.CreateExecutionCache(ctx, task) diff --git a/backend/src/v2/driver/container.go b/backend/src/v2/driver/container.go index 00e51e93c19..1ed1b7bf7e4 100644 --- a/backend/src/v2/driver/container.go +++ b/backend/src/v2/driver/container.go @@ -27,6 +27,8 @@ import ( "github.com/kubeflow/pipelines/backend/src/v2/expression" "github.com/kubeflow/pipelines/backend/src/v2/metadata" pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" ) func validateContainer(opts Options) (err error) { @@ -112,7 +114,7 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl if execution.WillTrigger() { executorInput.Outputs = provisionOutputs( pipeline.GetPipelineRoot(), - opts.Task.GetTaskInfo().GetName(), + opts.TaskName, opts.Component.GetOutputDefinitions(), uuid.NewString(), opts.PublishLogs, @@ -123,7 +125,8 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl if err != nil { return execution, err } - ecfg.TaskName = opts.Task.GetTaskInfo().GetName() + ecfg.TaskName = opts.TaskName + ecfg.DisplayName = opts.Task.GetTaskInfo().GetName() ecfg.ExecutionType = metadata.ContainerExecutionTypeName ecfg.ParentDagID = dag.Execution.GetID() ecfg.IterationIndex = iterationIndex @@ -133,9 +136,40 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl return execution, kubernetesPlatformOps(ctx, mlmd, cacheClient, execution, ecfg, &opts) } + var inputParams map[string]*structpb.Value + + if opts.KubernetesExecutorConfig != nil { + inputParams, _, err = dag.Execution.GetParameters() + if err != nil { + return nil, fmt.Errorf("failed to fetch input parameters from execution: %w", err) + } + } + if !opts.CacheDisabled { // Generate fingerprint and MLMD ID for cache - fingerPrint, cachedMLMDExecutionID, err := getFingerPrintsAndID(execution, &opts, cacheClient) + // Start by getting the names of the PVCs that need to be mounted. + pvcNames := []string{} + if opts.KubernetesExecutorConfig != nil && opts.KubernetesExecutorConfig.GetPvcMount() != nil { + _, volumes, err := makeVolumeMountPatch(ctx, opts, opts.KubernetesExecutorConfig.GetPvcMount(), + dag, pipeline, mlmd, inputParams) + if err != nil { + return nil, fmt.Errorf("failed to extract volume mount info while generating fingerprint: %w", err) + } + + for _, volume := range volumes { + pvcNames = append(pvcNames, volume.Name) + } + } + + if needsWorkspaceMount(execution.ExecutorInput) { + if opts.RunName == "" { + return execution, fmt.Errorf("failed to generate fingerprint: run name is required when workspace is used") + } + + pvcNames = append(pvcNames, GetWorkspacePVCName(opts.RunName)) + } + + fingerPrint, cachedMLMDExecutionID, err := getFingerPrintsAndID(execution, &opts, cacheClient, pvcNames) if err != nil { return execution, err } @@ -145,7 +179,6 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl // TODO(Bobgy): change execution state to pending, because this is driver, execution hasn't started. createdExecution, err := mlmd.CreateExecution(ctx, pipeline, ecfg) - if err != nil { return execution, err } @@ -181,6 +214,8 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl glog.Info("Cache disabled globally at the server level.") } + taskConfig := &TaskConfig{} + podSpec, err := initPodSpecPatch( opts.Container, opts.Component, @@ -188,6 +223,7 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl execution.ID, opts.PipelineName, opts.RunID, + opts.RunName, opts.PipelineLogLevel, opts.PublishLogs, strconv.FormatBool(opts.CacheDisabled), @@ -196,20 +232,72 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl opts.MLMDServerPort, opts.MLMDTLSEnabled, opts.CaCertPath, + taskConfig, ) if err != nil { return execution, err } if opts.KubernetesExecutorConfig != nil { - inputParams, _, err := dag.Execution.GetParameters() + err = extendPodSpecPatch(ctx, podSpec, opts, dag, pipeline, mlmd, inputParams, taskConfig) if err != nil { - return nil, fmt.Errorf("failed to fetch input parameters from execution: %w", err) + return execution, err + } + } + + // Handle replacing any dsl.TaskConfig inputs with the taskConfig. This is done here because taskConfig is + // populated by initPodSpecPatch and extendPodSpecPatch. + taskConfigInputs := map[string]bool{} + for inputName := range opts.Component.GetInputDefinitions().GetParameters() { + compParam := opts.Component.GetInputDefinitions().GetParameters()[inputName] + if compParam != nil && compParam.GetParameterType() == pipelinespec.ParameterType_TASK_CONFIG { + taskConfigInputs[inputName] = true } - err = extendPodSpecPatch(ctx, podSpec, opts, dag, pipeline, mlmd, inputParams) + } + + if len(taskConfigInputs) > 0 { + taskConfigBytes, err := json.Marshal(taskConfig) if err != nil { - return execution, err + return nil, fmt.Errorf("failed to marshal Kubernetes passthrough info: %w", err) + } + + taskConfigStruct := &structpb.Struct{} + err = protojson.Unmarshal(taskConfigBytes, taskConfigStruct) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal Kubernetes passthrough info: %w", err) + } + + for inputName := range taskConfigInputs { + executorInput.Inputs.ParameterValues[inputName] = &structpb.Value{ + Kind: &structpb.Value_StructValue{StructValue: taskConfigStruct}, + } } + + ecfg.InputParameters = executorInput.Inputs.ParameterValues + + // Overwrite the --executor_input argument in the podSpec container command with the updated executorInput + executorInputJSON, err := protojson.Marshal(executorInput) + if err != nil { + return execution, fmt.Errorf("JSON marshaling executor input: %w", err) + } + + for index, container := range podSpec.Containers { + if container.Name == "main" { + cmd := container.Command + for i := 0; i < len(cmd)-1; i++ { + if cmd[i] == "--executor_input" { + podSpec.Containers[index].Command[i+1] = string(executorInputJSON) + + break + } + } + + break + } + } + + execution.ExecutorInput = executorInput } + podSpecPatchBytes, err := json.Marshal(podSpec) if err != nil { return execution, fmt.Errorf("JSON marshaling pod spec patch: %w", err) diff --git a/backend/src/v2/driver/dag.go b/backend/src/v2/driver/dag.go index 04ec8efb01f..362fac66f7f 100644 --- a/backend/src/v2/driver/dag.go +++ b/backend/src/v2/driver/dag.go @@ -91,7 +91,17 @@ func DAG(ctx context.Context, opts Options, mlmd *metadata.Client) (execution *E if err != nil { return execution, err } - ecfg.TaskName = opts.Task.GetTaskInfo().GetName() + + // Set task name to display name if not specified. This is the case of + // specialty tasks such as OneOfs and ParallelFors where there are not + // explicit dag tasks defined in the pipeline, but rather generated at + // compile time and assigned a display name. + taskName := opts.TaskName + if taskName == "" { + taskName = opts.Task.GetTaskInfo().GetName() + } + ecfg.TaskName = taskName + ecfg.DisplayName = opts.Task.GetTaskInfo().GetName() ecfg.ExecutionType = metadata.DagExecutionTypeName ecfg.ParentDagID = dag.Execution.GetID() ecfg.IterationIndex = iterationIndex diff --git a/backend/src/v2/driver/driver.go b/backend/src/v2/driver/driver.go index 43127a23a2f..e1ca1c3fc9a 100644 --- a/backend/src/v2/driver/driver.go +++ b/backend/src/v2/driver/driver.go @@ -16,6 +16,7 @@ package driver import ( "fmt" + "path/filepath" "slices" "strings" @@ -26,6 +27,7 @@ import ( "github.com/kubeflow/pipelines/backend/src/v2/metadata" "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" k8score "k8s.io/api/core/v1" k8sres "k8s.io/apimachinery/pkg/api/resource" ) @@ -55,7 +57,7 @@ type Options struct { // optional, allows to specify kubernetes-specific executor config KubernetesExecutorConfig *kubernetesplatform.KubernetesExecutorConfig - // optional, required only if the {{$.pipeline_job_resource_name}} placeholder is used + // optional, required only if the {{$.pipeline_job_resource_name}} placeholder is used or the run uses a workspace RunName string // optional, required only if the {{$.pipeline_job_name}} placeholder is used RunDisplayName string @@ -77,31 +79,49 @@ type Options struct { MLMDTLSEnabled bool CaCertPath string + + DriverType string + + TaskName string // the original name of the task, used for input resolution +} + +// TaskConfig needs to stay aligned with the TaskConfig in the SDK. +type TaskConfig struct { + Affinity *k8score.Affinity `json:"affinity"` + Tolerations []k8score.Toleration `json:"tolerations"` + NodeSelector map[string]string `json:"nodeSelector"` + Env []k8score.EnvVar `json:"env"` + Volumes []k8score.Volume `json:"volumes"` + VolumeMounts []k8score.VolumeMount `json:"volumeMounts"` + Resources k8score.ResourceRequirements `json:"resources"` } // Identifying information used for error messages func (o Options) info() string { msg := fmt.Sprintf("pipelineName=%v, runID=%v", o.PipelineName, o.RunID) if o.Task.GetTaskInfo().GetName() != "" { - msg = msg + fmt.Sprintf(", task=%q", o.Task.GetTaskInfo().GetName()) + msg += fmt.Sprintf(", taskDisplayName=%q", o.Task.GetTaskInfo().GetName()) + } + if o.TaskName != "" { + msg += fmt.Sprintf(", taskName=%q", o.TaskName) } if o.Task.GetComponentRef().GetName() != "" { - msg = msg + fmt.Sprintf(", component=%q", o.Task.GetComponentRef().GetName()) + msg += fmt.Sprintf(", component=%q", o.Task.GetComponentRef().GetName()) } if o.DAGExecutionID != 0 { - msg = msg + fmt.Sprintf(", dagExecutionID=%v", o.DAGExecutionID) + msg += fmt.Sprintf(", dagExecutionID=%v", o.DAGExecutionID) } if o.IterationIndex >= 0 { - msg = msg + fmt.Sprintf(", iterationIndex=%v", o.IterationIndex) + msg += fmt.Sprintf(", iterationIndex=%v", o.IterationIndex) } if o.RuntimeConfig != nil { - msg = msg + ", runtimeConfig" // this only means runtimeConfig is not empty + msg += ", runtimeConfig" // this only means runtimeConfig is not empty } if o.Component.GetImplementation() != nil { - msg = msg + ", componentSpec" // this only means componentSpec is not empty + msg += ", componentSpec" // this only means componentSpec is not empty } if o.KubernetesExecutorConfig != nil { - msg = msg + ", KubernetesExecutorConfig" // this only means KubernetesExecutorConfig is not empty + msg += ", KubernetesExecutorConfig" // this only means KubernetesExecutorConfig is not empty } return msg } @@ -153,6 +173,49 @@ func getPodResource( return &q, nil } +// getTaskConfigOptions inspects the component's task config passthroughs and returns two maps: +// 1) fields enabled for passthrough +// 2) fields that should apply to the task pod +// +// If the component does not specify a passthrough, then all fields apply to the task pod and no fields are passthrough +// enabled. +func getTaskConfigOptions( + componentSpec *pipelinespec.ComponentSpec, +) (map[pipelinespec.TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum]bool, + map[pipelinespec.TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum]bool, +) { + passthroughEnabled := map[pipelinespec.TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum]bool{} + // setOnTask contains all possible fields even if they are not in the passthrough list. + setOnPod := map[pipelinespec.TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum]bool{ + pipelinespec.TaskConfigPassthroughType_RESOURCES: true, + pipelinespec.TaskConfigPassthroughType_ENV: true, + pipelinespec.TaskConfigPassthroughType_KUBERNETES_AFFINITY: true, + pipelinespec.TaskConfigPassthroughType_KUBERNETES_TOLERATIONS: true, + pipelinespec.TaskConfigPassthroughType_KUBERNETES_NODE_SELECTOR: true, + pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES: true, + } + + if componentSpec == nil { + return passthroughEnabled, setOnPod + } + + // If the component specifies a passthrough, then we don't set fields on the pod unless apply_to_task + // is true. + if len(componentSpec.GetTaskConfigPassthroughs()) != 0 { + for field := range setOnPod { + passthroughEnabled[field] = false + } + } + + for _, pt := range componentSpec.GetTaskConfigPassthroughs() { + field := pt.GetField() + passthroughEnabled[field] = true + setOnPod[field] = pt.GetApplyToTask() + } + + return passthroughEnabled, setOnPod +} + // initPodSpecPatch generates a strategic merge patch for pod spec, it is merged // to container base template generated in compiler/container.go. Therefore, only // dynamic values are patched here. The volume mounts / configmap mounts are @@ -164,6 +227,7 @@ func initPodSpecPatch( executionID int64, pipelineName string, runID string, + runName string, pipelineLogLevel string, publishLogs string, cacheDisabled string, @@ -172,6 +236,7 @@ func initPodSpecPatch( mlmdServerPort string, mlmdTLSEnabled bool, caCertPath string, + taskConfig *TaskConfig, ) (*k8score.PodSpec, error) { executorInputJSON, err := protojson.Marshal(executorInput) if err != nil { @@ -190,6 +255,13 @@ func initPodSpecPatch( userEnvVar = append(userEnvVar, proxy.GetConfig().GetEnvVars()...) + setOnTaskConfig, setOnPod := getTaskConfigOptions(componentSpec) + + // Always set setOnTaskConfig to an empty map if taskConfig is nil to avoid nil pointer dereference. + if taskConfig == nil { + setOnTaskConfig = map[pipelinespec.TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum]bool{} + } + userCmdArgs := make([]string, 0, len(container.Command)+len(container.Args)) userCmdArgs = append(userCmdArgs, container.Command...) userCmdArgs = append(userCmdArgs, container.Args...) @@ -321,22 +393,115 @@ func initPodSpecPatch( if err != nil { return nil, fmt.Errorf("failed to init podSpecPatch: %w", err) } + podSpec := &k8score.PodSpec{ Containers: []k8score.Container{{ - Name: "main", // argo task user container is always called "main" - Command: launcherCmd, - Args: userCmdArgs, - Image: containerImage, - Resources: res, - Env: userEnvVar, + Name: "main", // argo task user container is always called "main" + Command: launcherCmd, + Args: userCmdArgs, + Image: containerImage, }}, } - addModelcarsToPodSpec(executorInput.GetInputs().GetArtifacts(), userEnvVar, podSpec) + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_ENV] { + taskConfig.Env = userEnvVar + } + + if setOnPod[pipelinespec.TaskConfigPassthroughType_ENV] { + podSpec.Containers[0].Env = userEnvVar + } + + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_RESOURCES] { + taskConfig.Resources = res + } + + if setOnPod[pipelinespec.TaskConfigPassthroughType_RESOURCES] { + podSpec.Containers[0].Resources = res + } + + addModelcarsToPodSpec(executorInput.GetInputs().GetArtifacts(), podSpec.Containers[0].Env, podSpec) + + if needsWorkspaceMount(executorInput) { + // Validate that no user volume mounts conflict with the workspace + if err := validateVolumeMounts(podSpec); err != nil { + return nil, fmt.Errorf("failed to validate volume mounts: %w", err) + } + + if runName == "" { + return nil, fmt.Errorf("failed to init podSpecPatch: run name is required when workspace is used") + } + + pvcName := GetWorkspacePVCName(runName) + + workspaceVolume, workspaceVolumeMount := getWorkspaceMount(pvcName) + + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES] { + taskConfig.Volumes = append(taskConfig.Volumes, workspaceVolume) + taskConfig.VolumeMounts = append(taskConfig.VolumeMounts, workspaceVolumeMount) + } + + if setOnPod[pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES] { + podSpec.Volumes = append(podSpec.Volumes, workspaceVolume) + podSpec.Containers[0].VolumeMounts = append(podSpec.Containers[0].VolumeMounts, workspaceVolumeMount) + } + } return podSpec, nil } +// needsWorkspaceMount checks if the component needs workspace mounting based on input parameters and artifacts. +func needsWorkspaceMount(executorInput *pipelinespec.ExecutorInput) bool { + // Check if any input parameter is the workspace path placeholder + for _, param := range executorInput.GetInputs().GetParameterValues() { + if strVal, ok := param.GetKind().(*structpb.Value_StringValue); ok { + if strings.Contains(strVal.StringValue, "{{$.workspace_path}}") { + return true + } + + if strings.HasPrefix(strVal.StringValue, component.WorkspaceMountPath) { + return true + } + } + } + + // Check if any input artifact has workspace metadata + for _, artifactList := range executorInput.GetInputs().GetArtifacts() { + if len(artifactList.Artifacts) == 0 { + continue + } + // first artifact is used, as the list is expected to contain a single artifact + artifact := artifactList.Artifacts[0] + if artifact.Metadata != nil { + if workspaceVal, ok := artifact.Metadata.Fields["_kfp_workspace"]; ok { + if boolVal, ok := workspaceVal.GetKind().(*structpb.Value_BoolValue); ok && boolVal.BoolValue { + return true + } + } + } + } + + return false +} + +// getWorkspaceMount gets the workspace volume and volume mount. +func getWorkspaceMount(pvcName string) (k8score.Volume, k8score.VolumeMount) { + workspaceVolume := k8score.Volume{ + Name: component.WorkspaceVolumeName, + VolumeSource: k8score.VolumeSource{ + PersistentVolumeClaim: &k8score.PersistentVolumeClaimVolumeSource{ + ClaimName: pvcName, + }, + }, + } + + workspaceVolumeMount := k8score.VolumeMount{ + Name: component.WorkspaceVolumeName, + MountPath: component.WorkspaceMountPath, + } + + return workspaceVolume, workspaceVolumeMount +} + // addModelcarsToPodSpec will patch the pod spec if there are any input artifacts in the Modelcar format. // Much of this logic is based on KServe: // https://github.com/kserve/kserve/blob/v0.14.1/pkg/webhook/admission/pod/storage_initializer_injector.go#L131 @@ -497,7 +662,6 @@ func provisionOutputs( outputs := &pipelinespec.ExecutorInput_Outputs{ Artifacts: make(map[string]*pipelinespec.ArtifactList), Parameters: make(map[string]*pipelinespec.ExecutorInput_OutputParameter), - OutputFile: component.OutputMetadataFilepath, } artifacts := outputsSpec.GetArtifacts() @@ -517,13 +681,23 @@ func provisionOutputs( } } + // Compute a task-root remote URI that will serve as the base for all + // output artifacts and the executor output file. This enables Pythonic + // artifacts (dsl.get_uri) by allowing the SDK to infer the task root from + // the executor output file's directory (set below) and convert it back to + // a remote URI at runtime. + taskRootRemote := metadata.GenerateOutputURI(pipelineRoot, []string{taskName, outputURISalt}, false) + + // Set per-artifact output URIs under the task root. for name, artifact := range artifacts { outputs.Artifacts[name] = &pipelinespec.ArtifactList{ Artifacts: []*pipelinespec.RuntimeArtifact{ { + // Required by Pythonic artifacts to avoid a key error in the SDK. + Name: name, // Do not preserve the query string for output artifacts, as otherwise // they'd appear in file and artifact names. - Uri: metadata.GenerateOutputURI(pipelineRoot, []string{taskName, outputURISalt, name}, false), + Uri: metadata.GenerateOutputURI(taskRootRemote, []string{name}, false), Type: artifact.GetArtifactType(), Metadata: artifact.GetMetadata(), }, @@ -537,5 +711,31 @@ func provisionOutputs( } } + // Place the executor output file under localTaskRoot to enable Pythonic artifacts. The SDK's pythonic artifact + // runtime derives CONTAINER_TASK_ROOT from the directory of OutputFile to use it in dsl.get_uri. + if localTaskRoot, err := component.LocalPathForURI(taskRootRemote); err == nil { + outputs.OutputFile = filepath.Join(localTaskRoot, "output_metadata.json") + } else { + // Fallback to legacy path if the pipeline root scheme is not recognized. + outputs.OutputFile = component.OutputMetadataFilepath + } + return outputs } + +func validateVolumeMounts(podSpec *k8score.PodSpec) error { + // Validate that no user volume mounts conflict with the workspace mount path or volume name + for _, container := range podSpec.Containers { + for _, mount := range container.VolumeMounts { + if strings.HasPrefix(mount.MountPath, component.WorkspaceMountPath) { + return fmt.Errorf("user volume mount at %s conflicts with workspace mount at %s", mount.MountPath, component.WorkspaceMountPath) + } + + if mount.Name == component.WorkspaceVolumeName { + return fmt.Errorf("user volume mount name %s conflicts with workspace volume name %s", mount.Name, component.WorkspaceVolumeName) + } + } + } + + return nil +} diff --git a/backend/src/v2/driver/driver_test.go b/backend/src/v2/driver/driver_test.go index 1067e3d3a5e..34197f6241a 100644 --- a/backend/src/v2/driver/driver_test.go +++ b/backend/src/v2/driver/driver_test.go @@ -14,12 +14,17 @@ package driver import ( + "context" "encoding/json" + "fmt" "testing" + "github.com/stretchr/testify/require" + "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" "github.com/spf13/viper" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/types/known/structpb" @@ -258,6 +263,8 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + taskConfig := &TaskConfig{} + podSpec, err := initPodSpecPatch( tt.args.container, tt.args.componentSpec, @@ -265,6 +272,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { tt.args.executionID, tt.args.pipelineName, tt.args.runID, + "my-run-name", tt.args.pipelineLogLevel, tt.args.publishLogs, "false", @@ -273,6 +281,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { "unused-mlmd-server-port", false, "unused-ca-cert-path", + taskConfig, ) if tt.wantErr { assert.Nil(t, podSpec) @@ -284,6 +293,9 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { assert.Nil(t, err) assert.Contains(t, string(podSpecString), tt.want) } + + assert.Empty(t, taskConfig.Resources.Limits) + assert.Empty(t, taskConfig.Resources.Requests) }) } } @@ -372,6 +384,8 @@ func Test_initPodSpecPatch_resource_placeholders(t *testing.T) { }, } + taskConfig := &TaskConfig{} + podSpec, err := initPodSpecPatch( containerSpec, componentSpec, @@ -379,6 +393,7 @@ func Test_initPodSpecPatch_resource_placeholders(t *testing.T) { 27, "test", "0254beba-0be4-4065-8d97-7dc5e3adf300", + "my-run-name", "1", "false", "false", @@ -387,6 +402,7 @@ func Test_initPodSpecPatch_resource_placeholders(t *testing.T) { "unused-mlmd-server-port", false, "unused-ca-cert-path", + taskConfig, ) assert.Nil(t, err) assert.Len(t, podSpec.Containers, 1) @@ -397,6 +413,9 @@ func Test_initPodSpecPatch_resource_placeholders(t *testing.T) { assert.Equal(t, k8sres.MustParse("100Mi"), res.Requests[k8score.ResourceMemory]) assert.Equal(t, k8sres.MustParse("500Mi"), res.Limits[k8score.ResourceMemory]) assert.Equal(t, k8sres.MustParse("1"), res.Limits[k8score.ResourceName("nvidia.com/gpu")]) + + assert.Empty(t, taskConfig.Resources.Limits) + assert.Empty(t, taskConfig.Resources.Requests) } func Test_initPodSpecPatch_legacy_resources(t *testing.T) { @@ -417,6 +436,7 @@ func Test_initPodSpecPatch_legacy_resources(t *testing.T) { } componentSpec := &pipelinespec.ComponentSpec{} executorInput := &pipelinespec.ExecutorInput{} + taskConfig := &TaskConfig{} podSpec, err := initPodSpecPatch( containerSpec, @@ -425,6 +445,7 @@ func Test_initPodSpecPatch_legacy_resources(t *testing.T) { 27, "test", "0254beba-0be4-4065-8d97-7dc5e3adf300", + "my-run-name", "1", "false", "false", @@ -433,6 +454,7 @@ func Test_initPodSpecPatch_legacy_resources(t *testing.T) { "unused-mlmd-server-port", false, "unused-ca-cert-path", + taskConfig, ) assert.Nil(t, err) assert.Len(t, podSpec.Containers, 1) @@ -443,6 +465,9 @@ func Test_initPodSpecPatch_legacy_resources(t *testing.T) { assert.Equal(t, k8sres.MustParse("100Mi"), res.Requests[k8score.ResourceMemory]) assert.Equal(t, k8sres.MustParse("500Mi"), res.Limits[k8score.ResourceMemory]) assert.Equal(t, k8sres.MustParse("1"), res.Limits[k8score.ResourceName("nvidia.com/gpu")]) + + assert.Empty(t, taskConfig.Resources.Limits) + assert.Empty(t, taskConfig.Resources.Requests) } func Test_initPodSpecPatch_modelcar_input_artifact(t *testing.T) { @@ -465,6 +490,7 @@ func Test_initPodSpecPatch_modelcar_input_artifact(t *testing.T) { }, }, } + taskConfig := &TaskConfig{} podSpec, err := initPodSpecPatch( containerSpec, @@ -473,6 +499,7 @@ func Test_initPodSpecPatch_modelcar_input_artifact(t *testing.T) { 27, "test", "0254beba-0be4-4065-8d97-7dc5e3adf300", + "my-run-name", "1", "false", "false", @@ -481,6 +508,7 @@ func Test_initPodSpecPatch_modelcar_input_artifact(t *testing.T) { "unused-mlmd-server-port", false, "unused-ca-cert-path", + taskConfig, ) assert.Nil(t, err) @@ -504,6 +532,9 @@ func Test_initPodSpecPatch_modelcar_input_artifact(t *testing.T) { assert.Equal(t, podSpec.Containers[1].VolumeMounts[0].Name, "oci-0") assert.Equal(t, podSpec.Containers[1].VolumeMounts[0].MountPath, "/oci/registry.domain.local_my-model:latest") assert.Equal(t, podSpec.Containers[1].VolumeMounts[0].SubPath, "registry.domain.local_my-model:latest") + + assert.Empty(t, taskConfig.Resources.Limits) + assert.Empty(t, taskConfig.Resources.Requests) } // Validate that setting publishLogs to true propagates to the driver container @@ -517,6 +548,7 @@ func Test_initPodSpecPatch_publishLogs(t *testing.T) { 27, "test", "0254beba-0be4-4065-8d97-7dc5e3adf300", + "my-run-name", "1", "true", "false", @@ -525,6 +557,7 @@ func Test_initPodSpecPatch_publishLogs(t *testing.T) { "unused-mlmd-server-port", false, "unused-ca-cert-path", + nil, ) assert.Nil(t, err) cmd := podSpec.Containers[0].Command @@ -535,7 +568,6 @@ func Test_initPodSpecPatch_publishLogs(t *testing.T) { assert.Equal(t, cmd[idx+1], "true") } } - } func Test_initPodSpecPatch_resourceRequests(t *testing.T) { @@ -636,6 +668,8 @@ func Test_initPodSpecPatch_resourceRequests(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + taskConfig := &TaskConfig{} + podSpec, err := initPodSpecPatch( tt.args.container, tt.args.componentSpec, @@ -643,6 +677,7 @@ func Test_initPodSpecPatch_resourceRequests(t *testing.T) { tt.args.executionID, tt.args.pipelineName, tt.args.runID, + "my-run-name", tt.args.pipelineLogLevel, tt.args.publishLogs, "false", @@ -651,6 +686,7 @@ func Test_initPodSpecPatch_resourceRequests(t *testing.T) { "unused-mlmd-server-port", false, "unused-ca-cert-path", + taskConfig, ) assert.Nil(t, err) assert.NotEmpty(t, podSpec) @@ -662,6 +698,834 @@ func Test_initPodSpecPatch_resourceRequests(t *testing.T) { if tt.notWant != "" { assert.NotContains(t, string(podSpecString), tt.notWant) } + + assert.Empty(t, taskConfig.Resources.Limits) + assert.Empty(t, taskConfig.Resources.Requests) + }) + } +} + +func Test_initPodSpecPatch_TaskConfig_ForwardsResourcesOnly(t *testing.T) { + proxy.InitializeConfigWithEmptyForTests() + + containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ + Image: "python:3.9", + Args: []string{"--function_to_execute", "add"}, + Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, + Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ + ResourceCpuLimit: "2.0", + ResourceMemoryLimit: "1.5", + ResourceCpuRequest: "1.0", + ResourceMemoryRequest: "0.65G", + }, + } + componentSpec := &pipelinespec.ComponentSpec{ + TaskConfigPassthroughs: []*pipelinespec.TaskConfigPassthrough{ + { + Field: pipelinespec.TaskConfigPassthroughType_RESOURCES, + ApplyToTask: false, + }, + }, + } + executorInput := &pipelinespec.ExecutorInput{} + + taskCfg := &TaskConfig{} + podSpec, err := initPodSpecPatch( + containerSpec, + componentSpec, + executorInput, + 27, + "test", + "0254beba-0be4-4065-8d97-7dc5e3adf300", + "my-run-name", + "1", + "false", + "false", + false, + "unused-mlmd-server-address", + "unused-mlmd-server-port", + false, + "unused-ca-cert-path", + taskCfg, + ) + assert.Nil(t, err) + assert.NotNil(t, podSpec) + assert.Len(t, podSpec.Containers, 1) + + assert.Empty(t, podSpec.Containers[0].Resources.Requests) + assert.Empty(t, podSpec.Containers[0].Resources.Limits) + + // Forwarded resources captured in TaskConfig + res := taskCfg.Resources + assert.True(t, res.Requests[k8score.ResourceCPU].Equal(k8sres.MustParse("1"))) + assert.True(t, res.Limits[k8score.ResourceCPU].Equal(k8sres.MustParse("2"))) + assert.True(t, res.Requests[k8score.ResourceMemory].Equal(k8sres.MustParse("0.65G"))) + assert.True(t, res.Limits[k8score.ResourceMemory].Equal(k8sres.MustParse("1.5"))) +} + +func Test_initPodSpecPatch_inputTaskFinalStatus(t *testing.T) { + proxy.InitializeConfigWithEmptyForTests() + containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ + Image: "python:3.9", + Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, + Args: []string{"--executor-input", "{{$}}", "--function_to_execute", "exit-op"}, + } + componentSpec := &pipelinespec.ComponentSpec{ + Implementation: &pipelinespec.ComponentSpec_ExecutorLabel{ExecutorLabel: "exec-exit-op"}, + InputDefinitions: &pipelinespec.ComponentInputsSpec{ + Parameters: map[string]*pipelinespec.ComponentInputsSpec_ParameterSpec{ + "status": {ParameterType: pipelinespec.ParameterType_TASK_FINAL_STATUS}, + }, + }, + } + finalStatusStruct, _ := structpb.NewStruct(map[string]interface{}{ + "state": "test-state", + "pipelineTaskName": "test-pipeline-task-name", + "pipelineJobResourceName": "test-job-resource-name", + "error": map[string]interface{}{}, + }) + executorInput := &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "status": { + Kind: &structpb.Value_StructValue{ + StructValue: finalStatusStruct, + }, + }, + }, + }, + } + + podSpec, err := initPodSpecPatch( + containerSpec, + componentSpec, + executorInput, + 27, + "test", + "0254beba-0be4-4065-8d97-7dc5e3adf300", + "my-run-name", + "1", + "false", + "false", + false, + "unused-mlmd-server-address", + "unused-mlmd-server-port", + false, + "unused-ca-cert-path", + nil, + ) + require.Nil(t, err) + + expectedExecutorInput := map[string]interface{}{ + "inputs": map[string]interface{}{ + "parameterValues": map[string]interface{}{ + "status": map[string]interface{}{ + "error": map[string]interface{}{}, + "pipelineJobResourceName": "test-job-resource-name", + "pipelineTaskName": "test-pipeline-task-name", + "state": "test-state", + }, + }, + }, + } + expectedComponentSpec := map[string]interface{}{ + "executorLabel": "exec-exit-op", + "inputDefinitions": map[string]interface{}{ + "parameters": map[string]interface{}{ + "status": map[string]interface{}{ + "parameterType": "TASK_FINAL_STATUS", + }, + }, + }, + } + actualExecutorInput := map[string]interface{}{} + actualComponentSpec := map[string]interface{}{} + + for i, arg := range podSpec.Containers[0].Command { + if arg == "--executor_input" { + err := json.Unmarshal([]byte(podSpec.Containers[0].Command[i+1]), &actualExecutorInput) + fmt.Println(podSpec.Containers[0].Command[i+1]) + require.Nil(t, err) + } + if arg == "--component_spec" { + err := json.Unmarshal([]byte(podSpec.Containers[0].Command[i+1]), &actualComponentSpec) + require.Nil(t, err) + } + } + + assert.Equal(t, expectedExecutorInput, actualExecutorInput) + assert.Equal(t, expectedComponentSpec, actualComponentSpec) +} + +func TestNeedsWorkspaceMount(t *testing.T) { + tests := []struct { + name string + executorInput *pipelinespec.ExecutorInput + expected bool + }{ + { + name: "workspace path placeholder in parameters", + executorInput: &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "workspace_path": { + Kind: &structpb.Value_StringValue{ + StringValue: "{{$.workspace_path}}", + }, + }, + }, + }, + }, + expected: true, + }, + { + name: "workspace path prefix in parameters", + executorInput: &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "file_path": { + Kind: &structpb.Value_StringValue{ + StringValue: "/kfp-workspace/data/file.txt", + }, + }, + }, + }, + }, + expected: true, + }, + { + name: "artifact with workspace metadata", + executorInput: &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + Artifacts: map[string]*pipelinespec.ArtifactList{ + "model": { + Artifacts: []*pipelinespec.RuntimeArtifact{ + { + Metadata: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "_kfp_workspace": { + Kind: &structpb.Value_BoolValue{ + BoolValue: true, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + expected: true, + }, + { + name: "workspace path with subdirectory", + executorInput: &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "file_path": { + Kind: &structpb.Value_StringValue{ + StringValue: "{{$.workspace_path}}/data", + }, + }, + }, + }, + }, + expected: true, + }, + { + name: "no workspace usage", + executorInput: &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "text": { + Kind: &structpb.Value_StringValue{ + StringValue: "hello world", + }, + }, + }, + }, + }, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := needsWorkspaceMount(tt.executorInput) + if result != tt.expected { + t.Errorf("needsWorkspaceMount() = %v, want %v", result, tt.expected) + } + }) + } +} + +func TestGetWorkspaceMount(t *testing.T) { + pvcName := "test-workflow-kfp-workspace" + + workspaceVolume, workspaceVolumeMount := getWorkspaceMount(pvcName) + + if workspaceVolume.Name != "kfp-workspace" { + t.Errorf("Expected volume name kfp-workspace, got %s", workspaceVolume.Name) + } + + if workspaceVolume.PersistentVolumeClaim == nil { + t.Error("Expected PersistentVolumeClaim to be set") + } + + if workspaceVolume.PersistentVolumeClaim.ClaimName != pvcName { + t.Errorf("Expected claim name %s, got %s", pvcName, workspaceVolume.PersistentVolumeClaim.ClaimName) + } + + if workspaceVolumeMount.Name != "kfp-workspace" { + t.Errorf("Expected volume mount name kfp-workspace, got %s", workspaceVolumeMount.Name) + } + + if workspaceVolumeMount.MountPath != "/kfp-workspace" { + t.Errorf("Expected mount path /kfp-workspace, got %s", workspaceVolumeMount.MountPath) + } +} + +// Ensure that when workspace is used, missing RunName leads to an error during pod spec init. +func Test_initPodSpecPatch_WorkspaceRequiresRunName(t *testing.T) { + containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{Image: "python:3.9"} + componentSpec := &pipelinespec.ComponentSpec{} + executorInput := &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "workspace_param": {Kind: &structpb.Value_StringValue{StringValue: "{{$.workspace_path}}"}}, + }, + }, + } + taskCfg := &TaskConfig{} + _, err := initPodSpecPatch( + containerSpec, + componentSpec, + executorInput, + 27, + "test", + "run-id", + "", // runName intentionally empty + "1", + "false", + "false", + false, + "unused-mlmd-server-address", + "unused-mlmd-server-port", + false, + "unused-ca-cert-path", + taskCfg, + ) + require.NotNil(t, err) +} + +func TestValidateVolumeMounts(t *testing.T) { + tests := []struct { + name string + podSpec *k8score.PodSpec + expectError bool + }{ + { + name: "no conflicting volume mounts", + podSpec: &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + VolumeMounts: []k8score.VolumeMount{ + { + Name: "data", + MountPath: "/data", + }, + }, + }, + }, + }, + expectError: false, + }, + { + name: "conflicting volume mount path", + podSpec: &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + VolumeMounts: []k8score.VolumeMount{ + { + Name: "workspace", + MountPath: "/kfp-workspace", + }, + }, + }, + }, + }, + expectError: true, + }, + { + name: "conflicting volume mount subpath", + podSpec: &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + VolumeMounts: []k8score.VolumeMount{ + { + Name: "data", + MountPath: "/kfp-workspace/data", + }, + }, + }, + }, + }, + expectError: true, + }, + { + name: "conflicting volume name kfp-workspace", + podSpec: &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + VolumeMounts: []k8score.VolumeMount{ + { + Name: "kfp-workspace", + MountPath: "/data", + }, + }, + }, + }, + }, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := validateVolumeMounts(tt.podSpec) + if tt.expectError && err == nil { + t.Error("Expected error but got none") + } + if !tt.expectError && err != nil { + t.Errorf("Expected no error but got: %v", err) + } }) } } + +func TestWorkspaceMount_PassthroughVolumes_CaptureOnly(t *testing.T) { + containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{Image: "python:3.9"} + componentSpec := &pipelinespec.ComponentSpec{ + TaskConfigPassthroughs: []*pipelinespec.TaskConfigPassthrough{ + { + Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES, + ApplyToTask: false, + }, + }, + } + executorInput := &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "workspace_param": {Kind: &structpb.Value_StringValue{StringValue: "{{$.workspace_path}}"}}, + }, + }, + } + taskCfg := &TaskConfig{} + podSpec, err := initPodSpecPatch( + containerSpec, componentSpec, executorInput, + 27, "test", "run", "my-run-name", "1", "false", "false", false, "unused-mlmd-server-address", "unused-mlmd-server-port", false, "unused-ca-cert-path", taskCfg, + ) + assert.Nil(t, err) + + // Should not mount workspace to pod (no volumes on pod), only capture to TaskConfig + assert.Empty(t, podSpec.Volumes) + assert.Empty(t, podSpec.Containers[0].VolumeMounts) + assert.NotEmpty(t, taskCfg.Volumes) + assert.NotEmpty(t, taskCfg.VolumeMounts) + + if assert.Len(t, taskCfg.Volumes, 1) { + assert.Equal(t, "kfp-workspace", taskCfg.Volumes[0].Name) + if assert.NotNil(t, taskCfg.Volumes[0].PersistentVolumeClaim) { + assert.Equal(t, "my-run-name-kfp-workspace", taskCfg.Volumes[0].PersistentVolumeClaim.ClaimName) + } + } + + if assert.Len(t, taskCfg.VolumeMounts, 1) { + assert.Equal(t, "kfp-workspace", taskCfg.VolumeMounts[0].Name) + assert.Equal(t, "/kfp-workspace", taskCfg.VolumeMounts[0].MountPath) + } +} + +func TestWorkspaceMount_PassthroughVolumes_ApplyAndCapture(t *testing.T) { + containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{Image: "python:3.9"} + componentSpec := &pipelinespec.ComponentSpec{ + TaskConfigPassthroughs: []*pipelinespec.TaskConfigPassthrough{ + { + Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES, + ApplyToTask: true, + }, + }, + } + executorInput := &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "workspace_param": {Kind: &structpb.Value_StringValue{StringValue: "{{$.workspace_path}}"}}, + }, + }, + } + taskCfg := &TaskConfig{} + podSpec, err := initPodSpecPatch( + containerSpec, componentSpec, executorInput, + 27, "test", "run", "my-run-name", "1", "false", "false", false, "unused-mlmd-server-address", "unused-mlmd-server-port", false, "unused-ca-cert-path", taskCfg, + ) + assert.Nil(t, err) + // Should mount workspace to pod and also capture to TaskConfig + assert.NotEmpty(t, podSpec.Volumes) + assert.NotEmpty(t, podSpec.Containers[0].VolumeMounts) + assert.NotEmpty(t, taskCfg.Volumes) + assert.NotEmpty(t, taskCfg.VolumeMounts) + + if assert.Len(t, podSpec.Volumes, 1) { + assert.Equal(t, "kfp-workspace", podSpec.Volumes[0].Name) + if assert.NotNil(t, podSpec.Volumes[0].PersistentVolumeClaim) { + assert.Equal(t, "my-run-name-kfp-workspace", podSpec.Volumes[0].PersistentVolumeClaim.ClaimName) + } + } + + if assert.Len(t, podSpec.Containers, 1) { + if assert.Len(t, podSpec.Containers[0].VolumeMounts, 1) { + assert.Equal(t, "kfp-workspace", podSpec.Containers[0].VolumeMounts[0].Name) + assert.Equal(t, "/kfp-workspace", podSpec.Containers[0].VolumeMounts[0].MountPath) + } + } + + if assert.Len(t, taskCfg.Volumes, 1) { + assert.Equal(t, "kfp-workspace", taskCfg.Volumes[0].Name) + if assert.NotNil(t, taskCfg.Volumes[0].PersistentVolumeClaim) { + assert.Equal(t, "my-run-name-kfp-workspace", taskCfg.Volumes[0].PersistentVolumeClaim.ClaimName) + } + } + + if assert.Len(t, taskCfg.VolumeMounts, 1) { + assert.Equal(t, "kfp-workspace", taskCfg.VolumeMounts[0].Name) + assert.Equal(t, "/kfp-workspace", taskCfg.VolumeMounts[0].MountPath) + } +} + +func Test_initPodSpecPatch_TaskConfig_Env_Passthrough_CaptureOnly(t *testing.T) { + proxy.InitializeConfigWithEmptyForTests() + containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ + Image: "python:3.9", + Env: []*pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_EnvVar{{ + Name: "FOO", + Value: "bar", + }}, + } + componentSpec := &pipelinespec.ComponentSpec{ + TaskConfigPassthroughs: []*pipelinespec.TaskConfigPassthrough{ + {Field: pipelinespec.TaskConfigPassthroughType_ENV, ApplyToTask: false}, + }, + } + executorInput := &pipelinespec.ExecutorInput{} + taskCfg := &TaskConfig{} + podSpec, err := initPodSpecPatch( + containerSpec, + componentSpec, + executorInput, + 27, + "test", + "run", + "my-run-name", + "1", + "false", + "false", + false, + "unused-mlmd-server-address", + "unused-mlmd-server-port", + false, + "unused-ca-cert-path", + taskCfg, + ) + assert.Nil(t, err) + + // Env should be captured to TaskConfig only, not applied to pod + assert.Empty(t, podSpec.Containers[0].Env) + + if assert.Len(t, taskCfg.Env, 1) { + assert.Equal(t, "FOO", taskCfg.Env[0].Name) + assert.Equal(t, "bar", taskCfg.Env[0].Value) + } +} + +func Test_initPodSpecPatch_TaskConfig_Resources_Passthrough_ApplyAndCapture(t *testing.T) { + proxy.InitializeConfigWithEmptyForTests() + containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ + Image: "python:3.9", + Args: []string{"--function_to_execute", "add"}, + Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, + Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ + CpuLimit: 2.0, + MemoryLimit: 1.5, + CpuRequest: 1.0, + MemoryRequest: 0.65, + }, + } + componentSpec := &pipelinespec.ComponentSpec{ + TaskConfigPassthroughs: []*pipelinespec.TaskConfigPassthrough{ + {Field: pipelinespec.TaskConfigPassthroughType_RESOURCES, ApplyToTask: true}, + }, + } + executorInput := &pipelinespec.ExecutorInput{} + taskCfg := &TaskConfig{} + podSpec, err := initPodSpecPatch( + containerSpec, + componentSpec, + executorInput, + 27, + "test", + "run", + "my-run-name", + "1", + "false", + "false", + false, + "unused-mlmd-server-address", + "unused-mlmd-server-port", + false, + "unused-ca-cert-path", + taskCfg, + ) + assert.Nil(t, err) + // Resources should be both on pod and in TaskConfig + assert.NotEmpty(t, podSpec.Containers[0].Resources.Requests) + assert.NotEmpty(t, podSpec.Containers[0].Resources.Limits) + assert.NotEmpty(t, taskCfg.Resources.Requests) + assert.NotEmpty(t, taskCfg.Resources.Limits) + + resPod := podSpec.Containers[0].Resources + assert.Equal(t, k8sres.MustParse("1"), resPod.Requests[k8score.ResourceCPU]) + assert.Equal(t, k8sres.MustParse("2"), resPod.Limits[k8score.ResourceCPU]) + assert.Equal(t, k8sres.MustParse("0.65G"), resPod.Requests[k8score.ResourceMemory]) + assert.Equal(t, k8sres.MustParse("1.5G"), resPod.Limits[k8score.ResourceMemory]) + + resCfg := taskCfg.Resources + assert.Equal(t, k8sres.MustParse("1"), resCfg.Requests[k8score.ResourceCPU]) + assert.Equal(t, k8sres.MustParse("2"), resCfg.Limits[k8score.ResourceCPU]) + assert.Equal(t, k8sres.MustParse("0.65G"), resCfg.Requests[k8score.ResourceMemory]) + assert.Equal(t, k8sres.MustParse("1.5G"), resCfg.Limits[k8score.ResourceMemory]) +} + +func Test_initPodSpecPatch_TaskConfig_Affinity_NodeSelector_Tolerations_Passthrough(t *testing.T) { + proxy.InitializeConfigWithEmptyForTests() + + containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{Image: "python:3.9"} + componentSpec := &pipelinespec.ComponentSpec{ + TaskConfigPassthroughs: []*pipelinespec.TaskConfigPassthrough{ + {Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_AFFINITY, ApplyToTask: false}, + {Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_NODE_SELECTOR, ApplyToTask: false}, + {Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_TOLERATIONS, ApplyToTask: false}, + }, + } + + secs := int64(3600) + k8sExecCfg := &kubernetesplatform.KubernetesExecutorConfig{ + NodeSelector: &kubernetesplatform.NodeSelector{Labels: map[string]string{"disktype": "ssd"}}, + Tolerations: []*kubernetesplatform.Toleration{{ + Key: "example-key", + Operator: "Exists", + Effect: "NoExecute", + TolerationSeconds: &secs, + }}, + NodeAffinity: []*kubernetesplatform.NodeAffinityTerm{{ + MatchExpressions: []*kubernetesplatform.SelectorRequirement{{ + Key: "zone", + Operator: "In", + Values: []string{"us-west-1"}, + }}, + }}, + } + + opts := Options{ + PipelineName: "p", + RunID: "r", + Component: componentSpec, + Container: containerSpec, + KubernetesExecutorConfig: k8sExecCfg, + } + + executorInput := &pipelinespec.ExecutorInput{Inputs: &pipelinespec.ExecutorInput_Inputs{ParameterValues: map[string]*structpb.Value{}}} + + taskCfg := &TaskConfig{} + + podSpec, err := initPodSpecPatch( + containerSpec, + componentSpec, + executorInput, + 27, + "test", + "run", + "my-run-name", + "1", + "false", + "false", + false, + "unused-mlmd-server-address", + "unused-mlmd-server-port", + false, + "unused-ca-cert-path", + taskCfg, + ) + assert.Nil(t, err) + + err = extendPodSpecPatch( + context.Background(), + podSpec, + opts, + nil, + nil, + nil, + map[string]*structpb.Value{}, + taskCfg, + ) + assert.Nil(t, err) + + assert.Nil(t, podSpec.Affinity) + assert.Empty(t, podSpec.NodeSelector) + assert.Empty(t, podSpec.Tolerations) + + assert.Equal(t, map[string]string{"disktype": "ssd"}, taskCfg.NodeSelector) + if assert.Len(t, taskCfg.Tolerations, 1) { + assert.Equal(t, "example-key", taskCfg.Tolerations[0].Key) + assert.Equal(t, k8score.TaintEffect("NoExecute"), taskCfg.Tolerations[0].Effect) + if assert.NotNil(t, taskCfg.Tolerations[0].TolerationSeconds) { + assert.Equal(t, int64(3600), *taskCfg.Tolerations[0].TolerationSeconds) + } + } + + if assert.NotNil(t, taskCfg.Affinity) && assert.NotNil(t, taskCfg.Affinity.NodeAffinity) { + if assert.NotNil(t, taskCfg.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution) { + terms := taskCfg.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms + if assert.NotEmpty(t, terms) && assert.NotEmpty(t, terms[0].MatchExpressions) { + expr := terms[0].MatchExpressions[0] + assert.Equal(t, "zone", expr.Key) + assert.Equal(t, k8score.NodeSelectorOpIn, expr.Operator) + assert.Equal(t, []string{"us-west-1"}, expr.Values) + } + } + } +} + +func Test_initPodSpecPatch_TaskConfig_Affinity_NodeSelector_Tolerations_ApplyAndCapture(t *testing.T) { + proxy.InitializeConfigWithEmptyForTests() + + containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{Image: "python:3.9"} + componentSpec := &pipelinespec.ComponentSpec{ + TaskConfigPassthroughs: []*pipelinespec.TaskConfigPassthrough{ + {Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_AFFINITY, ApplyToTask: true}, + {Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_NODE_SELECTOR, ApplyToTask: true}, + {Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_TOLERATIONS, ApplyToTask: true}, + }, + } + + secs := int64(3600) + weight := int32(100) + k8sExecCfg := &kubernetesplatform.KubernetesExecutorConfig{ + NodeSelector: &kubernetesplatform.NodeSelector{Labels: map[string]string{"disktype": "ssd"}}, + Tolerations: []*kubernetesplatform.Toleration{{ + Key: "example-key", + Operator: "Exists", + Effect: "NoExecute", + TolerationSeconds: &secs, + }}, + NodeAffinity: []*kubernetesplatform.NodeAffinityTerm{{ + MatchExpressions: []*kubernetesplatform.SelectorRequirement{{ + Key: "zone", + Operator: "In", + Values: []string{"us-west-1"}, + }}, + Weight: &weight, + }}, + } + + opts := Options{ + PipelineName: "p", + RunID: "r", + Component: componentSpec, + Container: containerSpec, + KubernetesExecutorConfig: k8sExecCfg, + } + + executorInput := &pipelinespec.ExecutorInput{Inputs: &pipelinespec.ExecutorInput_Inputs{ParameterValues: map[string]*structpb.Value{}}} + taskCfg := &TaskConfig{} + + podSpec, err := initPodSpecPatch( + containerSpec, + componentSpec, + executorInput, + 27, + "test", + "run", + "my-run-name", + "1", + "false", + "false", + false, + "unused-mlmd-server-address", + "unused-mlmd-server-port", + false, + "unused-ca-cert-path", + taskCfg, + ) + assert.Nil(t, err) + + err = extendPodSpecPatch( + context.Background(), + podSpec, + opts, + nil, + nil, + nil, + map[string]*structpb.Value{}, + taskCfg, + ) + assert.Nil(t, err) + + assert.Equal(t, map[string]string{"disktype": "ssd"}, podSpec.NodeSelector) + if assert.Len(t, podSpec.Tolerations, 1) { + assert.Equal(t, "example-key", podSpec.Tolerations[0].Key) + assert.Equal(t, k8score.TaintEffect("NoExecute"), podSpec.Tolerations[0].Effect) + if assert.NotNil(t, podSpec.Tolerations[0].TolerationSeconds) { + assert.Equal(t, int64(3600), *podSpec.Tolerations[0].TolerationSeconds) + } + } + + if assert.NotNil(t, podSpec.Affinity) && assert.NotNil(t, podSpec.Affinity.NodeAffinity) { + prefs := podSpec.Affinity.NodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution + if assert.NotEmpty(t, prefs) { + assert.Equal(t, int32(100), prefs[0].Weight) + if assert.NotEmpty(t, prefs[0].Preference.MatchExpressions) { + expr := prefs[0].Preference.MatchExpressions[0] + assert.Equal(t, "zone", expr.Key) + assert.Equal(t, k8score.NodeSelectorOpIn, expr.Operator) + assert.Equal(t, []string{"us-west-1"}, expr.Values) + } + } + } + + assert.Equal(t, map[string]string{"disktype": "ssd"}, taskCfg.NodeSelector) + if assert.Len(t, taskCfg.Tolerations, 1) { + assert.Equal(t, "example-key", taskCfg.Tolerations[0].Key) + assert.Equal(t, k8score.TaintEffect("NoExecute"), taskCfg.Tolerations[0].Effect) + if assert.NotNil(t, taskCfg.Tolerations[0].TolerationSeconds) { + assert.Equal(t, int64(3600), *taskCfg.Tolerations[0].TolerationSeconds) + } + } + + if assert.NotNil(t, taskCfg.Affinity) && assert.NotNil(t, taskCfg.Affinity.NodeAffinity) { + prefs := taskCfg.Affinity.NodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution + if assert.NotEmpty(t, prefs) { + assert.Equal(t, int32(100), prefs[0].Weight) + if assert.NotEmpty(t, prefs[0].Preference.MatchExpressions) { + expr := prefs[0].Preference.MatchExpressions[0] + assert.Equal(t, "zone", expr.Key) + assert.Equal(t, k8score.NodeSelectorOpIn, expr.Operator) + assert.Equal(t, []string{"us-west-1"}, expr.Values) + } + } + } +} diff --git a/backend/src/v2/driver/k8s.go b/backend/src/v2/driver/k8s.go index 512af93c143..b4aa89baa85 100644 --- a/backend/src/v2/driver/k8s.go +++ b/backend/src/v2/driver/k8s.go @@ -25,6 +25,7 @@ import ( "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" + "github.com/kubeflow/pipelines/backend/src/v2/component" "github.com/kubeflow/pipelines/backend/src/v2/config" "github.com/kubeflow/pipelines/backend/src/v2/metadata" "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" @@ -104,6 +105,12 @@ func kubernetesPlatformOps( return nil } +// GetWorkspacePVCName gets the name of the workspace PVC for a given run name. runName is the resolved Argo Workflows +// variable of {{workflow.name}} +func GetWorkspacePVCName(runName string) string { + return fmt.Sprintf("%s-%s", runName, component.WorkspaceVolumeName) +} + // Extends the PodSpec to include Kubernetes-specific executor config. // inputParams is a map of the input parameter name to a resolvable value. func extendPodSpecPatch( @@ -114,9 +121,17 @@ func extendPodSpecPatch( pipeline *metadata.Pipeline, mlmd *metadata.Client, inputParams map[string]*structpb.Value, + taskConfig *TaskConfig, ) error { kubernetesExecutorConfig := opts.KubernetesExecutorConfig + setOnTaskConfig, setOnPod := getTaskConfigOptions(opts.Component) + + // Always set setOnTaskConfig to an empty map if taskConfig is nil to avoid nil pointer dereference. + if taskConfig == nil { + setOnTaskConfig = map[pipelinespec.TaskConfigPassthroughType_TaskConfigPassthroughTypeEnum]bool{} + } + // Return an error if the podSpec has no user container. if len(podSpec.Containers) == 0 { return fmt.Errorf("failed to patch the pod with kubernetes-specific config due to missing user container: %v", podSpec) @@ -129,9 +144,17 @@ func extendPodSpecPatch( if err != nil { return fmt.Errorf("failed to extract volume mount info: %w", err) } - podSpec.Volumes = append(podSpec.Volumes, volumes...) - // We assume that the user container always gets executed first within a pod. - podSpec.Containers[0].VolumeMounts = append(podSpec.Containers[0].VolumeMounts, volumeMounts...) + + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES] { + taskConfig.VolumeMounts = append(taskConfig.VolumeMounts, volumeMounts...) + taskConfig.Volumes = append(taskConfig.Volumes, volumes...) + } + + if setOnPod[pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES] { + podSpec.Volumes = append(podSpec.Volumes, volumes...) + // We assume that the user container always gets executed first within a pod. + podSpec.Containers[0].VolumeMounts = append(podSpec.Containers[0].VolumeMounts, volumeMounts...) + } } // Get image pull policy @@ -154,16 +177,23 @@ func extendPodSpecPatch( // Get node selector information if kubernetesExecutorConfig.GetNodeSelector() != nil { + var nodeSelector map[string]string if kubernetesExecutorConfig.GetNodeSelector().GetNodeSelectorJson() != nil { - var nodeSelector map[string]string err := resolveK8sJsonParameter(ctx, opts, dag, pipeline, mlmd, kubernetesExecutorConfig.GetNodeSelector().GetNodeSelectorJson(), inputParams, &nodeSelector) if err != nil { return fmt.Errorf("failed to resolve node selector: %w", err) } - podSpec.NodeSelector = nodeSelector } else { - podSpec.NodeSelector = kubernetesExecutorConfig.GetNodeSelector().GetLabels() + nodeSelector = kubernetesExecutorConfig.GetNodeSelector().GetLabels() + } + + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_KUBERNETES_NODE_SELECTOR] { + taskConfig.NodeSelector = nodeSelector + } + + if setOnPod[pipelinespec.TaskConfigPassthroughType_KUBERNETES_NODE_SELECTOR] { + podSpec.NodeSelector = nodeSelector } } @@ -219,8 +249,7 @@ func extendPodSpecPatch( glog.V(4).Info("encountered empty tolerations list, ignoring.") } } else { - return fmt.Errorf("encountered unexpected toleration proto value, " + - "must be either struct or list type.") + return fmt.Errorf("encountered unexpected toleration proto value, must be either struct or list type") } } else { k8sToleration.Key = toleration.Key @@ -233,7 +262,13 @@ func extendPodSpecPatch( } } - podSpec.Tolerations = k8sTolerations + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_KUBERNETES_TOLERATIONS] { + taskConfig.Tolerations = k8sTolerations + } + + if setOnPod[pipelinespec.TaskConfigPassthroughType_KUBERNETES_TOLERATIONS] { + podSpec.Tolerations = k8sTolerations + } } // Get secret mount information @@ -266,19 +301,34 @@ func extendPodSpecPatch( Name: secretName, MountPath: secretAsVolume.GetMountPath(), } - podSpec.Volumes = append(podSpec.Volumes, secretVolume) - podSpec.Containers[0].VolumeMounts = append(podSpec.Containers[0].VolumeMounts, secretVolumeMount) + + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES] { + taskConfig.Volumes = append(taskConfig.Volumes, secretVolume) + taskConfig.VolumeMounts = append(taskConfig.VolumeMounts, secretVolumeMount) + } + + if setOnPod[pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES] { + podSpec.Volumes = append(podSpec.Volumes, secretVolume) + podSpec.Containers[0].VolumeMounts = append(podSpec.Containers[0].VolumeMounts, secretVolumeMount) + } } // Get secret env information for _, secretAsEnv := range kubernetesExecutorConfig.GetSecretAsEnv() { for _, keyToEnv := range secretAsEnv.GetKeyToEnv() { + secretKeySelector := &k8score.SecretKeySelector{ + Key: keyToEnv.GetSecretKey(), + } + + // Set Optional field when explicitly provided (true or false), leave nil when not specified + if secretAsEnv.Optional != nil { + secretKeySelector.Optional = secretAsEnv.Optional + } + secretEnvVar := k8score.EnvVar{ Name: keyToEnv.GetEnvVar(), ValueFrom: &k8score.EnvVarSource{ - SecretKeyRef: &k8score.SecretKeySelector{ - Key: keyToEnv.GetSecretKey(), - }, + SecretKeyRef: secretKeySelector, }, } @@ -298,7 +348,14 @@ func extendPodSpecPatch( } secretEnvVar.ValueFrom.SecretKeyRef.LocalObjectReference.Name = secretName - podSpec.Containers[0].Env = append(podSpec.Containers[0].Env, secretEnvVar) + + if setOnPod[pipelinespec.TaskConfigPassthroughType_ENV] { + podSpec.Containers[0].Env = append(podSpec.Containers[0].Env, secretEnvVar) + } + + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_ENV] { + taskConfig.Env = append(taskConfig.Env, secretEnvVar) + } } } @@ -334,19 +391,34 @@ func extendPodSpecPatch( Name: configMapName, MountPath: configMapAsVolume.GetMountPath(), } - podSpec.Volumes = append(podSpec.Volumes, configMapVolume) - podSpec.Containers[0].VolumeMounts = append(podSpec.Containers[0].VolumeMounts, configMapVolumeMount) + + if setOnPod[pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES] { + podSpec.Volumes = append(podSpec.Volumes, configMapVolume) + podSpec.Containers[0].VolumeMounts = append(podSpec.Containers[0].VolumeMounts, configMapVolumeMount) + } + + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES] { + taskConfig.Volumes = append(taskConfig.Volumes, configMapVolume) + taskConfig.VolumeMounts = append(taskConfig.VolumeMounts, configMapVolumeMount) + } } // Get config map env information for _, configMapAsEnv := range kubernetesExecutorConfig.GetConfigMapAsEnv() { for _, keyToEnv := range configMapAsEnv.GetKeyToEnv() { + configMapKeySelector := &k8score.ConfigMapKeySelector{ + Key: keyToEnv.GetConfigMapKey(), + } + + // Set Optional field when explicitly provided (true or false), leave nil when not specified + if configMapAsEnv.Optional != nil { + configMapKeySelector.Optional = configMapAsEnv.Optional + } + configMapEnvVar := k8score.EnvVar{ Name: keyToEnv.GetEnvVar(), ValueFrom: &k8score.EnvVarSource{ - ConfigMapKeyRef: &k8score.ConfigMapKeySelector{ - Key: keyToEnv.GetConfigMapKey(), - }, + ConfigMapKeyRef: configMapKeySelector, }, } @@ -366,7 +438,14 @@ func extendPodSpecPatch( } configMapEnvVar.ValueFrom.ConfigMapKeyRef.LocalObjectReference.Name = configMapName - podSpec.Containers[0].Env = append(podSpec.Containers[0].Env, configMapEnvVar) + + if setOnPod[pipelinespec.TaskConfigPassthroughType_ENV] { + podSpec.Containers[0].Env = append(podSpec.Containers[0].Env, configMapEnvVar) + } + + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_ENV] { + taskConfig.Env = append(taskConfig.Env, configMapEnvVar) + } } } @@ -405,7 +484,14 @@ func extendPodSpecPatch( }, }, } - podSpec.Containers[0].Env = append(podSpec.Containers[0].Env, fieldPathEnvVar) + + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_ENV] { + taskConfig.Env = append(taskConfig.Env, fieldPathEnvVar) + } + + if setOnPod[pipelinespec.TaskConfigPassthroughType_ENV] { + podSpec.Containers[0].Env = append(podSpec.Containers[0].Env, fieldPathEnvVar) + } } // Get container timeout information @@ -452,6 +538,8 @@ func extendPodSpecPatch( Name: ephemeralVolumeSpec.GetVolumeName(), MountPath: ephemeralVolumeSpec.GetMountPath(), } + + // Ephemeral volumes don't apply for passthrough. podSpec.Volumes = append(podSpec.Volumes, ephemeralVolume) podSpec.Containers[0].VolumeMounts = append(podSpec.Containers[0].VolumeMounts, ephemeralVolumeMount) } @@ -478,10 +566,104 @@ func extendPodSpecPatch( MountPath: emptyDirVolumeSpec.GetMountPath(), } + // EmptyDirMounts don't apply for passthrough. podSpec.Volumes = append(podSpec.Volumes, emptyDirVolume) podSpec.Containers[0].VolumeMounts = append(podSpec.Containers[0].VolumeMounts, emptyDirVolumeMount) } + // Get node affinity information + if nodeAffinityTerms := kubernetesExecutorConfig.GetNodeAffinity(); len(nodeAffinityTerms) > 0 { + var requiredTerms []k8score.NodeSelectorTerm + var preferredTerms []k8score.PreferredSchedulingTerm + + for i, nodeAffinityTerm := range nodeAffinityTerms { + if nodeAffinityTerm.GetNodeAffinityJson() == nil && + len(nodeAffinityTerm.GetMatchExpressions()) == 0 && + len(nodeAffinityTerm.GetMatchFields()) == 0 { + glog.Warningf("NodeAffinityTerm %d is empty, skipping", i) + continue + } + if nodeAffinityTerm.GetNodeAffinityJson() != nil { + var k8sNodeAffinity json.RawMessage + err := resolveK8sJsonParameter(ctx, opts, dag, pipeline, mlmd, + nodeAffinityTerm.GetNodeAffinityJson(), inputParams, &k8sNodeAffinity) + if err != nil { + return fmt.Errorf("failed to resolve node affinity json: %w", err) + } + + var nodeAffinity k8score.NodeAffinity + if err := json.Unmarshal(k8sNodeAffinity, &nodeAffinity); err != nil { + return fmt.Errorf("failed to unmarshal node affinity json: %w", err) + } + + if nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution != nil { + requiredTerms = append(requiredTerms, nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms...) + } + preferredTerms = append(preferredTerms, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution...) + } else { + nodeSelectorTerm := k8score.NodeSelectorTerm{} + + for _, expr := range nodeAffinityTerm.GetMatchExpressions() { + nodeSelectorRequirement := k8score.NodeSelectorRequirement{ + Key: expr.GetKey(), + Operator: k8score.NodeSelectorOperator(expr.GetOperator()), + Values: expr.GetValues(), + } + nodeSelectorTerm.MatchExpressions = append(nodeSelectorTerm.MatchExpressions, nodeSelectorRequirement) + } + + for _, field := range nodeAffinityTerm.GetMatchFields() { + nodeSelectorRequirement := k8score.NodeSelectorRequirement{ + Key: field.GetKey(), + Operator: k8score.NodeSelectorOperator(field.GetOperator()), + Values: field.GetValues(), + } + nodeSelectorTerm.MatchFields = append(nodeSelectorTerm.MatchFields, nodeSelectorRequirement) + } + + if nodeAffinityTerm.Weight != nil { + preferredTerms = append(preferredTerms, k8score.PreferredSchedulingTerm{ + Weight: *nodeAffinityTerm.Weight, + Preference: nodeSelectorTerm, + }) + glog.V(4).Infof("Added preferred node affinity: %+v", nodeSelectorTerm) + } else { + requiredTerms = append(requiredTerms, nodeSelectorTerm) + glog.V(4).Infof("Added required node affinity: %+v", nodeSelectorTerm) + } + + } + } + + if len(requiredTerms) > 0 || len(preferredTerms) > 0 { + k8sNodeAffinity := &k8score.NodeAffinity{} + if len(requiredTerms) > 0 { + k8sNodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution = &k8score.NodeSelector{ + NodeSelectorTerms: requiredTerms, + } + } + if len(preferredTerms) > 0 { + k8sNodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution = preferredTerms + } + + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_KUBERNETES_AFFINITY] { + if taskConfig.Affinity == nil { + taskConfig.Affinity = &k8score.Affinity{} + } + + taskConfig.Affinity.NodeAffinity = k8sNodeAffinity + } + + if setOnPod[pipelinespec.TaskConfigPassthroughType_KUBERNETES_AFFINITY] { + if podSpec.Affinity == nil { + podSpec.Affinity = &k8score.Affinity{} + } + + podSpec.Affinity.NodeAffinity = k8sNodeAffinity + } + } + } + return nil } @@ -571,7 +753,7 @@ func createPVC( // Get execution fingerprint and MLMD ID for caching // If pvcName includes a randomly generated UUID, it is added in the execution input as a key-value pair for this purpose only // The original execution is not changed. - fingerPrint, cachedMLMDExecutionID, err := getFingerPrintsAndID(&execution, opts, cacheClient) + fingerPrint, cachedMLMDExecutionID, err := getFingerPrintsAndID(&execution, opts, cacheClient, nil) if err != nil { return "", createdExecution, pb.Execution_FAILED, err } @@ -661,7 +843,6 @@ func deletePVC( mlmd *metadata.Client, ecfg *metadata.ExecutionConfig, ) (createdExecution *metadata.Execution, status pb.Execution_State, err error) { - // Create execution regardless the operation succeeds or not defer func() { if createdExecution == nil { @@ -688,7 +869,7 @@ func deletePVC( // Get execution fingerprint and MLMD ID for caching // If pvcName includes a randomly generated UUID, it is added in the execution input as a key-value pair for this purpose only // The original execution is not changed. - fingerPrint, cachedMLMDExecutionID, err := getFingerPrintsAndID(&execution, opts, cacheClient) + fingerPrint, cachedMLMDExecutionID, err := getFingerPrintsAndID(&execution, opts, cacheClient, nil) if err != nil { return createdExecution, pb.Execution_FAILED, err } diff --git a/backend/src/v2/driver/k8s_test.go b/backend/src/v2/driver/k8s_test.go index 4b275ecad20..3089a966a35 100644 --- a/backend/src/v2/driver/k8s_test.go +++ b/backend/src/v2/driver/k8s_test.go @@ -214,6 +214,8 @@ func Test_makePodSpecPatch_nodeSelector(t *testing.T) { Name: "main", }, }} + taskConfig := &TaskConfig{} + err := extendPodSpecPatch( context.Background(), got, @@ -222,10 +224,12 @@ func Test_makePodSpecPatch_nodeSelector(t *testing.T) { nil, nil, tt.inputParams, + taskConfig, ) assert.Nil(t, err) assert.NotNil(t, got) assert.Equal(t, tt.expected, got) + assert.Empty(t, taskConfig.NodeSelector) }) } } @@ -598,9 +602,99 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { "param_1": structpb.NewStringValue("secret-name"), }, }, + { + "Valid - secret as env with optional true", + &kubernetesplatform.KubernetesExecutorConfig{ + SecretAsEnv: []*kubernetesplatform.SecretAsEnv{ + { + SecretNameParameter: inputParamConstant("my-secret"), + KeyToEnv: []*kubernetesplatform.SecretAsEnv_SecretKeyToEnvMap{ + { + SecretKey: "password", + EnvVar: "SECRET_VAR", + }, + }, + Optional: &[]bool{true}[0], + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + Env: []k8score.EnvVar{ + { + Name: "SECRET_VAR", + ValueFrom: &k8score.EnvVarSource{ + SecretKeyRef: &k8score.SecretKeySelector{ + LocalObjectReference: k8score.LocalObjectReference{Name: "my-secret"}, + Key: "password", + Optional: &[]bool{true}[0], + }, + }, + }, + }, + }, + }, + }, + nil, + }, + { + "Valid - secret as env with optional false", + &kubernetesplatform.KubernetesExecutorConfig{ + SecretAsEnv: []*kubernetesplatform.SecretAsEnv{ + { + SecretNameParameter: inputParamConstant("my-secret"), + KeyToEnv: []*kubernetesplatform.SecretAsEnv_SecretKeyToEnvMap{ + { + SecretKey: "password", + EnvVar: "SECRET_VAR", + }, + }, + Optional: &[]bool{false}[0], + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + Env: []k8score.EnvVar{ + { + Name: "SECRET_VAR", + ValueFrom: &k8score.EnvVarSource{ + SecretKeyRef: &k8score.SecretKeySelector{ + LocalObjectReference: k8score.LocalObjectReference{Name: "my-secret"}, + Key: "password", + Optional: &[]bool{false}[0], + }, + }, + }, + }, + }, + }, + }, + nil, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + taskConfig := &TaskConfig{} + err := extendPodSpecPatch( context.Background(), tt.podSpec, @@ -609,9 +703,14 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { nil, nil, tt.inputParams, + taskConfig, ) assert.Nil(t, err) assert.Equal(t, tt.expected, tt.podSpec) + + assert.Empty(t, taskConfig.Volumes) + assert.Empty(t, taskConfig.VolumeMounts) + assert.Empty(t, taskConfig.Env) }) } } @@ -703,7 +802,8 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { VolumeSource: k8score.VolumeSource{ ConfigMap: &k8score.ConfigMapVolumeSource{ LocalObjectReference: k8score.LocalObjectReference{Name: "cm1"}, - Optional: &[]bool{false}[0]}, + Optional: &[]bool{false}[0], + }, }, }, }, @@ -856,7 +956,8 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { VolumeSource: k8score.VolumeSource{ ConfigMap: &k8score.ConfigMapVolumeSource{ LocalObjectReference: k8score.LocalObjectReference{Name: "cm-name"}, - Optional: &[]bool{true}[0]}, + Optional: &[]bool{true}[0], + }, }, }, }, @@ -998,9 +1099,99 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { "param_1": structpb.NewStringValue("cm-name"), }, }, + { + "Valid - config map as env with optional true", + &kubernetesplatform.KubernetesExecutorConfig{ + ConfigMapAsEnv: []*kubernetesplatform.ConfigMapAsEnv{ + { + ConfigMapNameParameter: inputParamConstant("my-cm"), + KeyToEnv: []*kubernetesplatform.ConfigMapAsEnv_ConfigMapKeyToEnvMap{ + { + ConfigMapKey: "foo", + EnvVar: "CONFIG_MAP_VAR", + }, + }, + Optional: &[]bool{true}[0], + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + Env: []k8score.EnvVar{ + { + Name: "CONFIG_MAP_VAR", + ValueFrom: &k8score.EnvVarSource{ + ConfigMapKeyRef: &k8score.ConfigMapKeySelector{ + LocalObjectReference: k8score.LocalObjectReference{Name: "my-cm"}, + Key: "foo", + Optional: &[]bool{true}[0], + }, + }, + }, + }, + }, + }, + }, + nil, + }, + { + "Valid - config map as env with optional false", + &kubernetesplatform.KubernetesExecutorConfig{ + ConfigMapAsEnv: []*kubernetesplatform.ConfigMapAsEnv{ + { + ConfigMapNameParameter: inputParamConstant("my-cm"), + KeyToEnv: []*kubernetesplatform.ConfigMapAsEnv_ConfigMapKeyToEnvMap{ + { + ConfigMapKey: "foo", + EnvVar: "CONFIG_MAP_VAR", + }, + }, + Optional: &[]bool{false}[0], + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{ + { + Name: "main", + Env: []k8score.EnvVar{ + { + Name: "CONFIG_MAP_VAR", + ValueFrom: &k8score.EnvVarSource{ + ConfigMapKeyRef: &k8score.ConfigMapKeySelector{ + LocalObjectReference: k8score.LocalObjectReference{Name: "my-cm"}, + Key: "foo", + Optional: &[]bool{false}[0], + }, + }, + }, + }, + }, + }, + }, + nil, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + taskConfig := &TaskConfig{} + err := extendPodSpecPatch( context.Background(), tt.podSpec, @@ -1009,9 +1200,14 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { nil, nil, tt.inputParams, + taskConfig, ) assert.Nil(t, err) assert.Equal(t, tt.expected, tt.podSpec) + + assert.Empty(t, taskConfig.Volumes) + assert.Empty(t, taskConfig.VolumeMounts) + assert.Empty(t, taskConfig.Env) }) } } @@ -1168,6 +1364,8 @@ func Test_extendPodSpecPatch_EmptyVolumeMount(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + taskConfig := &TaskConfig{} + err := extendPodSpecPatch( context.Background(), tt.podSpec, @@ -1176,9 +1374,13 @@ func Test_extendPodSpecPatch_EmptyVolumeMount(t *testing.T) { nil, nil, map[string]*structpb.Value{}, + taskConfig, ) assert.Nil(t, err) assert.Equal(t, tt.expected, tt.podSpec) + + assert.Empty(t, taskConfig.Volumes) + assert.Empty(t, taskConfig.VolumeMounts) }) } } @@ -1298,6 +1500,7 @@ func Test_extendPodSpecPatch_ImagePullSecrets(t *testing.T) { nil, nil, tt.inputParams, + nil, ) assert.Nil(t, err) assert.NotNil(t, got) @@ -1726,6 +1929,8 @@ func Test_extendPodSpecPatch_Tolerations(t *testing.T) { Name: "main", }, }} + taskConfig := &TaskConfig{} + err := extendPodSpecPatch( context.Background(), got, @@ -1734,10 +1939,13 @@ func Test_extendPodSpecPatch_Tolerations(t *testing.T) { nil, nil, tt.inputParams, + taskConfig, ) assert.Nil(t, err) assert.NotNil(t, got) assert.Equal(t, tt.expected, got) + + assert.Empty(t, taskConfig.Tolerations) }) } } @@ -1828,6 +2036,8 @@ func Test_extendPodSpecPatch_FieldPathAsEnv(t *testing.T) { Name: "main", }, }} + taskConfig := &TaskConfig{} + err := extendPodSpecPatch( context.Background(), got, @@ -1836,10 +2046,13 @@ func Test_extendPodSpecPatch_FieldPathAsEnv(t *testing.T) { nil, nil, map[string]*structpb.Value{}, + taskConfig, ) assert.Nil(t, err) assert.NotNil(t, got) assert.Equal(t, tt.expected, got) + + assert.Empty(t, taskConfig.Env) }) } } @@ -1906,6 +2119,7 @@ func Test_extendPodSpecPatch_ActiveDeadlineSeconds(t *testing.T) { nil, nil, map[string]*structpb.Value{}, + nil, ) assert.Nil(t, err) assert.NotNil(t, got) @@ -1995,6 +2209,7 @@ func Test_extendPodSpecPatch_ImagePullPolicy(t *testing.T) { nil, nil, map[string]*structpb.Value{}, + nil, ) assert.Nil(t, err) assert.Equal(t, tt.expected, tt.podSpec) @@ -2182,6 +2397,8 @@ func Test_extendPodSpecPatch_GenericEphemeralVolume(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + taskConfig := &TaskConfig{} + err := extendPodSpecPatch( context.Background(), tt.podSpec, @@ -2190,13 +2407,483 @@ func Test_extendPodSpecPatch_GenericEphemeralVolume(t *testing.T) { nil, nil, map[string]*structpb.Value{}, + taskConfig, ) assert.Nil(t, err) assert.Equal(t, tt.expected, tt.podSpec) + + assert.Empty(t, taskConfig.Volumes) + assert.Empty(t, taskConfig.VolumeMounts) }) } } +func Test_extendPodSpecPatch_NodeAffinity(t *testing.T) { + tests := []struct { + name string + k8sExecCfg *kubernetesplatform.KubernetesExecutorConfig + expected *k8score.PodSpec + inputParams map[string]*structpb.Value + }{ + { + "Valid - node affinity with matchExpressions", + &kubernetesplatform.KubernetesExecutorConfig{ + NodeAffinity: []*kubernetesplatform.NodeAffinityTerm{ + { + MatchExpressions: []*kubernetesplatform.SelectorRequirement{ + { + Key: "disktype", + Operator: "In", + Values: []string{"ssd"}, + }, + }, + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{{Name: "main"}}, + Affinity: &k8score.Affinity{ + NodeAffinity: &k8score.NodeAffinity{ + RequiredDuringSchedulingIgnoredDuringExecution: &k8score.NodeSelector{ + NodeSelectorTerms: []k8score.NodeSelectorTerm{ + { + MatchExpressions: []k8score.NodeSelectorRequirement{ + { + Key: "disktype", + Operator: k8score.NodeSelectorOpIn, + Values: []string{"ssd"}, + }, + }, + }, + }, + }, + }, + }, + }, + nil, + }, + { + "Valid - node affinity with matchFields", + &kubernetesplatform.KubernetesExecutorConfig{ + NodeAffinity: []*kubernetesplatform.NodeAffinityTerm{ + { + MatchFields: []*kubernetesplatform.SelectorRequirement{ + { + Key: "metadata.name", + Operator: "In", + Values: []string{"node-1", "node-2"}, + }, + }, + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{{Name: "main"}}, + Affinity: &k8score.Affinity{ + NodeAffinity: &k8score.NodeAffinity{ + RequiredDuringSchedulingIgnoredDuringExecution: &k8score.NodeSelector{ + NodeSelectorTerms: []k8score.NodeSelectorTerm{ + { + MatchFields: []k8score.NodeSelectorRequirement{ + { + Key: "metadata.name", + Operator: k8score.NodeSelectorOpIn, + Values: []string{"node-1", "node-2"}, + }, + }, + }, + }, + }, + }, + }, + }, + nil, + }, + { + "Valid - node affinity with weight (preferred scheduling)", + &kubernetesplatform.KubernetesExecutorConfig{ + NodeAffinity: []*kubernetesplatform.NodeAffinityTerm{ + { + MatchExpressions: []*kubernetesplatform.SelectorRequirement{ + { + Key: "zone", + Operator: "In", + Values: []string{"us-west-1"}, + }, + }, + Weight: int32Ptr(100), + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{{Name: "main"}}, + Affinity: &k8score.Affinity{ + NodeAffinity: &k8score.NodeAffinity{ + PreferredDuringSchedulingIgnoredDuringExecution: []k8score.PreferredSchedulingTerm{ + { + Weight: 100, + Preference: k8score.NodeSelectorTerm{ + MatchExpressions: []k8score.NodeSelectorRequirement{ + { + Key: "zone", + Operator: k8score.NodeSelectorOpIn, + Values: []string{"us-west-1"}, + }, + }, + }, + }, + }, + }, + }, + }, + nil, + }, + { + "Valid - node affinity with nodeAffinityJson", + &kubernetesplatform.KubernetesExecutorConfig{ + NodeAffinity: []*kubernetesplatform.NodeAffinityTerm{ + { + NodeAffinityJson: structInputParamConstant(map[string]interface{}{ + "requiredDuringSchedulingIgnoredDuringExecution": map[string]interface{}{ + "nodeSelectorTerms": []interface{}{ + map[string]interface{}{ + "matchExpressions": []interface{}{ + map[string]interface{}{ + "key": "disktype", + "operator": "In", + "values": []interface{}{"ssd"}, + }, + }, + }, + }, + }, + }), + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{{Name: "main"}}, + Affinity: &k8score.Affinity{ + NodeAffinity: &k8score.NodeAffinity{ + RequiredDuringSchedulingIgnoredDuringExecution: &k8score.NodeSelector{ + NodeSelectorTerms: []k8score.NodeSelectorTerm{ + { + MatchExpressions: []k8score.NodeSelectorRequirement{ + { + Key: "disktype", + Operator: k8score.NodeSelectorOpIn, + Values: []string{"ssd"}, + }, + }, + }, + }, + }, + }, + }, + }, + nil, + }, + { + "Valid - node affinity with nodeAffinityJson containing preferred scheduling", + &kubernetesplatform.KubernetesExecutorConfig{ + NodeAffinity: []*kubernetesplatform.NodeAffinityTerm{ + { + NodeAffinityJson: structInputParamConstant(map[string]interface{}{ + "preferredDuringSchedulingIgnoredDuringExecution": []interface{}{ + map[string]interface{}{ + "weight": 100, + "preference": map[string]interface{}{ + "matchExpressions": []interface{}{ + map[string]interface{}{ + "key": "zone", + "operator": "In", + "values": []interface{}{"us-west-1"}, + }, + }, + }, + }, + }, + }), + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{{Name: "main"}}, + Affinity: &k8score.Affinity{ + NodeAffinity: &k8score.NodeAffinity{ + PreferredDuringSchedulingIgnoredDuringExecution: []k8score.PreferredSchedulingTerm{ + { + Weight: 100, + Preference: k8score.NodeSelectorTerm{ + MatchExpressions: []k8score.NodeSelectorRequirement{ + { + Key: "zone", + Operator: k8score.NodeSelectorOpIn, + Values: []string{"us-west-1"}, + }, + }, + }, + }, + }, + }, + }, + }, + nil, + }, + { + "Valid - empty nodeAffinityJson", + &kubernetesplatform.KubernetesExecutorConfig{ + NodeAffinity: []*kubernetesplatform.NodeAffinityTerm{ + { + NodeAffinityJson: structInputParamConstant(map[string]interface{}{}), + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{{Name: "main"}}, + // No affinity should be set when JSON is empty + }, + nil, + }, + { + "Valid - node affinity with matchExpressions and matchFields combined", + &kubernetesplatform.KubernetesExecutorConfig{ + NodeAffinity: []*kubernetesplatform.NodeAffinityTerm{ + { + MatchExpressions: []*kubernetesplatform.SelectorRequirement{ + { + Key: "disktype", + Operator: "In", + Values: []string{"ssd"}, + }, + }, + MatchFields: []*kubernetesplatform.SelectorRequirement{ + { + Key: "metadata.name", + Operator: "In", + Values: []string{"node-1"}, + }, + }, + }, + }, + }, + &k8score.PodSpec{ + Containers: []k8score.Container{{Name: "main"}}, + Affinity: &k8score.Affinity{ + NodeAffinity: &k8score.NodeAffinity{ + RequiredDuringSchedulingIgnoredDuringExecution: &k8score.NodeSelector{ + NodeSelectorTerms: []k8score.NodeSelectorTerm{ + { + MatchExpressions: []k8score.NodeSelectorRequirement{ + { + Key: "disktype", + Operator: k8score.NodeSelectorOpIn, + Values: []string{"ssd"}, + }, + }, + MatchFields: []k8score.NodeSelectorRequirement{ + { + Key: "metadata.name", + Operator: k8score.NodeSelectorOpIn, + Values: []string{"node-1"}, + }, + }, + }, + }, + }, + }, + }, + }, + nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := &k8score.PodSpec{Containers: []k8score.Container{{Name: "main"}}} + taskConfig := &TaskConfig{} + + err := extendPodSpecPatch( + context.Background(), + got, + Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + nil, + nil, + nil, + tt.inputParams, + taskConfig, + ) + assert.NoError(t, err) + + if tt.expected.Affinity != nil { + assert.NotNil(t, got.Affinity) + assert.NotNil(t, got.Affinity.NodeAffinity) + + if tt.expected.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution != nil { + assert.Equal(t, tt.expected.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution, got.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution) + } + + if tt.expected.Affinity.NodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution != nil { + assert.Equal(t, tt.expected.Affinity.NodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution, got.Affinity.NodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution) + } + } else { + // For empty JSON case, affinity should not be set + assert.Nil(t, got.Affinity) + } + + assert.Empty(t, taskConfig.Affinity) + }) + } +} + +func Test_extendPodSpecPatch_TaskConfig_CapturesAndApplies(t *testing.T) { + podSpec := &k8score.PodSpec{Containers: []k8score.Container{{Name: "main"}}} + cfg := &kubernetesplatform.KubernetesExecutorConfig{ + NodeSelector: &kubernetesplatform.NodeSelector{Labels: map[string]string{"disktype": "ssd"}}, + Tolerations: []*kubernetesplatform.Toleration{{ + Key: "example-key", + Operator: "Exists", + Effect: "NoExecute", + TolerationSeconds: int64Ptr(3600), + }}, + SecretAsVolume: []*kubernetesplatform.SecretAsVolume{{ + SecretName: "secret1", + MountPath: "/data/secret", + }}, + PvcMount: []*kubernetesplatform.PvcMount{{ + MountPath: "/data", + PvcNameParameter: inputParamConstant("kubernetes-task-config-pvc"), + }}, + SecretAsEnv: []*kubernetesplatform.SecretAsEnv{{ + SecretName: "my-secret", + KeyToEnv: []*kubernetesplatform.SecretAsEnv_SecretKeyToEnvMap{{ + SecretKey: "password", + EnvVar: "SECRET_VAR", + }}, + }}, + FieldPathAsEnv: []*kubernetesplatform.FieldPathAsEnv{{ + Name: "KFP_RUN_NAME", + FieldPath: "metadata.annotations['pipelines.kubeflow.org/run_name']", + }}, + NodeAffinity: []*kubernetesplatform.NodeAffinityTerm{{ + MatchExpressions: []*kubernetesplatform.SelectorRequirement{{ + Key: "disktype", + Operator: "In", + Values: []string{"ssd"}, + }}, + }}, + } + + // Configure passthroughs according to expectations: + // - Volumes and Env: capture and apply to pod (apply_to_task=true) + // - NodeSelector, Tolerations, Affinity: capture only (apply_to_task=false) + comp := &pipelinespec.ComponentSpec{ + TaskConfigPassthroughs: []*pipelinespec.TaskConfigPassthrough{ + {Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES, ApplyToTask: true}, + {Field: pipelinespec.TaskConfigPassthroughType_ENV, ApplyToTask: true}, + {Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_NODE_SELECTOR, ApplyToTask: false}, + {Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_TOLERATIONS, ApplyToTask: false}, + {Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_AFFINITY, ApplyToTask: false}, + }, + } + + taskCfg := &TaskConfig{} + err := extendPodSpecPatch( + context.Background(), + podSpec, + Options{KubernetesExecutorConfig: cfg, Component: comp}, + nil, + nil, + nil, + map[string]*structpb.Value{}, + taskCfg, + ) + assert.NoError(t, err) + + assert.Nil(t, podSpec.NodeSelector) + assert.Len(t, podSpec.Tolerations, 0) + assert.Empty(t, podSpec.Containers[0].Resources.Limits) + assert.Empty(t, podSpec.Containers[0].Resources.Requests) + + if assert.GreaterOrEqual(t, len(podSpec.Containers[0].VolumeMounts), 1) { + foundSecretMount := false + foundPvcMount := false + for _, m := range podSpec.Containers[0].VolumeMounts { + if m.Name == "secret1" && m.MountPath == "/data/secret" { + foundSecretMount = true + } + if m.Name == "kubernetes-task-config-pvc" && m.MountPath == "/data" { + foundPvcMount = true + } + } + + assert.True(t, foundSecretMount) + assert.True(t, foundPvcMount) + } + + foundSecretEnv := false + foundFieldPathEnv := false + for _, e := range podSpec.Containers[0].Env { + if e.Name == "SECRET_VAR" && e.ValueFrom != nil && e.ValueFrom.SecretKeyRef != nil { + if e.ValueFrom.SecretKeyRef.Name == "my-secret" && e.ValueFrom.SecretKeyRef.Key == "password" { + foundSecretEnv = true + } + } + if e.Name == "KFP_RUN_NAME" && e.ValueFrom != nil && e.ValueFrom.FieldRef != nil { + if e.ValueFrom.FieldRef.FieldPath == "metadata.annotations['pipelines.kubeflow.org/run_name']" { + foundFieldPathEnv = true + } + } + } + + assert.True(t, foundSecretEnv) + assert.True(t, foundFieldPathEnv) + + assert.Equal(t, map[string]string{"disktype": "ssd"}, taskCfg.NodeSelector) + assert.Len(t, taskCfg.Tolerations, 1) + assert.Equal(t, "example-key", taskCfg.Tolerations[0].Key) + assert.Equal(t, "NoExecute", string(taskCfg.Tolerations[0].Effect)) + assert.Equal(t, int64(3600), *taskCfg.Tolerations[0].TolerationSeconds) + + if assert.NotEmpty(t, taskCfg.Volumes) && assert.NotEmpty(t, taskCfg.VolumeMounts) { + foundSecretVol := false + foundPvcVol := false + for _, v := range taskCfg.Volumes { + if v.Name == "secret1" && v.Secret != nil { + foundSecretVol = true + } + if v.Name == "kubernetes-task-config-pvc" && v.PersistentVolumeClaim != nil && v.PersistentVolumeClaim.ClaimName == "kubernetes-task-config-pvc" { + foundPvcVol = true + } + } + assert.True(t, foundSecretVol) + assert.True(t, foundPvcVol) + } + + foundSecretEnv = false + foundFieldPathEnv = false + for _, e := range taskCfg.Env { + if e.Name == "SECRET_VAR" && e.ValueFrom != nil && e.ValueFrom.SecretKeyRef != nil { + if e.ValueFrom.SecretKeyRef.Name == "my-secret" && e.ValueFrom.SecretKeyRef.Key == "password" { + foundSecretEnv = true + } + } + if e.Name == "KFP_RUN_NAME" && e.ValueFrom != nil && e.ValueFrom.FieldRef != nil { + if e.ValueFrom.FieldRef.FieldPath == "metadata.annotations['pipelines.kubeflow.org/run_name']" { + foundFieldPathEnv = true + } + } + } + assert.True(t, foundSecretEnv) + assert.True(t, foundFieldPathEnv) + + if assert.NotNil(t, taskCfg.Affinity) && assert.NotNil(t, taskCfg.Affinity.NodeAffinity) { + if assert.NotNil(t, taskCfg.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution) { + assert.Greater(t, len(taskCfg.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms), 0) + } + } +} + func validListOfStructsOrPanic(data []map[string]interface{}) *structpb.Value { var listValues []*structpb.Value for _, item := range data { @@ -2232,3 +2919,74 @@ func structInputParamConstant(value map[string]interface{}) *pipelinespec.TaskIn func int64Ptr(val int64) *int64 { return &val } + +func int32Ptr(val int32) *int32 { + return &val +} + +func Test_extendPodSpecPatch_PvcMounts_Passthrough_NotAppliedToPod(t *testing.T) { + podSpec := &k8score.PodSpec{Containers: []k8score.Container{{Name: "main"}}} + cfg := &kubernetesplatform.KubernetesExecutorConfig{ + PvcMount: []*kubernetesplatform.PvcMount{{ + MountPath: "/data", + PvcNameParameter: inputParamConstant("my-pvc"), + }}, + } + comp := &pipelinespec.ComponentSpec{ + TaskConfigPassthroughs: []*pipelinespec.TaskConfigPassthrough{{ + Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES, + ApplyToTask: false, + }}, + } + taskCfg := &TaskConfig{} + err := extendPodSpecPatch( + context.Background(), + podSpec, + Options{KubernetesExecutorConfig: cfg, Component: comp}, + nil, + nil, + nil, + map[string]*structpb.Value{}, + taskCfg, + ) + assert.NoError(t, err) + + assert.Empty(t, podSpec.Volumes) + assert.Empty(t, podSpec.Containers[0].VolumeMounts) + + assert.NotEmpty(t, taskCfg.Volumes) + assert.NotEmpty(t, taskCfg.VolumeMounts) +} + +func Test_extendPodSpecPatch_PvcMounts_Passthrough_AppliedToPod(t *testing.T) { + podSpec := &k8score.PodSpec{Containers: []k8score.Container{{Name: "main"}}} + cfg := &kubernetesplatform.KubernetesExecutorConfig{ + PvcMount: []*kubernetesplatform.PvcMount{{ + MountPath: "/data", + PvcNameParameter: inputParamConstant("my-pvc"), + }}, + } + comp := &pipelinespec.ComponentSpec{ + TaskConfigPassthroughs: []*pipelinespec.TaskConfigPassthrough{{ + Field: pipelinespec.TaskConfigPassthroughType_KUBERNETES_VOLUMES, + ApplyToTask: true, + }}, + } + taskCfg := &TaskConfig{} + err := extendPodSpecPatch( + context.Background(), + podSpec, + Options{KubernetesExecutorConfig: cfg, Component: comp}, + nil, + nil, + nil, + map[string]*structpb.Value{}, + taskCfg, + ) + assert.NoError(t, err) + + assert.NotEmpty(t, podSpec.Volumes) + assert.NotEmpty(t, podSpec.Containers[0].VolumeMounts) + assert.NotEmpty(t, taskCfg.Volumes) + assert.NotEmpty(t, taskCfg.VolumeMounts) +} diff --git a/backend/src/v2/driver/resolve.go b/backend/src/v2/driver/resolve.go index 7d6f9340083..6059d9bb282 100644 --- a/backend/src/v2/driver/resolve.go +++ b/backend/src/v2/driver/resolve.go @@ -19,16 +19,19 @@ import ( "encoding/json" "errors" "fmt" + "strings" "github.com/golang/glog" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + "github.com/kubeflow/pipelines/backend/src/v2/component" "github.com/kubeflow/pipelines/backend/src/v2/expression" "github.com/kubeflow/pipelines/backend/src/v2/metadata" + "google.golang.org/genproto/googleapis/rpc/status" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" ) -var ErrResolvedParameterNull = errors.New("the resolvead input parameter is null") +var ErrResolvedParameterNull = errors.New("the resolved input parameter is null") // resolveUpstreamOutputsConfig is just a config struct used to store the input // parameters of the resolveUpstreamParameters and resolveUpstreamArtifacts @@ -62,7 +65,6 @@ func getDAGTasks( flattenedTasks[k] = v } for _, v := range currentExecutionTasks { - if v.GetExecution().GetType() == "system.DAGExecution" { _, ok := v.GetExecution().GetCustomProperties()["iteration_count"] if ok { @@ -141,6 +143,9 @@ func resolveInputs( } return nil } + // Track parameters set to nil by the driver (for the case in which optional pipeline input parameters are not + // included, and default value is nil). + parametersSetNilByDriver := map[string]bool{} handleParamTypeValidationAndConversion := func() error { // TODO(Bobgy): verify whether there are inputs not in the inputs spec. for name, spec := range inputsSpec.GetParameters() { @@ -178,6 +183,10 @@ func resolveInputs( case pipelinespec.ParameterType_STRING: _, isValueString := value.GetKind().(*structpb.Value_StringValue) if !isValueString { + // If parameter was set to nil by driver, allow input parameter to have a nil value. + if parametersSetNilByDriver[name] { + continue + } // TODO(Bobgy): discuss whether we want to allow auto type conversion // all parameter types can be consumed as JSON string text, err := metadata.PbValueToText(value) @@ -192,6 +201,10 @@ func resolveInputs( } switch v := value.GetKind().(type) { case *structpb.Value_NullValue: + // If parameter was set to nil by driver, allow input parameter to have a nil value. + if parametersSetNilByDriver[name] { + continue + } return fmt.Errorf("got null for input parameter %q", name) case *structpb.Value_StringValue: // TODO(Bobgy): consider whether we support parsing string as JSON for any other types. @@ -211,7 +224,7 @@ func resolveInputs( return typeMismatch("list") } case *structpb.Value_StructValue: - if spec.GetParameterType() != pipelinespec.ParameterType_STRUCT { + if (spec.GetParameterType() != pipelinespec.ParameterType_STRUCT) && (spec.GetParameterType() != pipelinespec.ParameterType_TASK_FINAL_STATUS) && (spec.GetParameterType() != pipelinespec.ParameterType_TASK_CONFIG) { return typeMismatch("struct") } default: @@ -268,9 +281,35 @@ func resolveInputs( } return inputs, nil } - + // A DAG driver (not Root DAG driver) indicates this is likely the start of a nested pipeline. + // Handle omitted optional pipeline input parameters similar to how they are handled on the root pipeline. + isDagDriver := opts.DriverType == "DAG" + if isDagDriver { + for name, paramSpec := range opts.Component.GetInputDefinitions().GetParameters() { + _, ok := task.Inputs.GetParameters()[name] + if !ok && paramSpec.IsOptional { + if paramSpec.GetDefaultValue() != nil { + // If no value was input, pass along the default value to the component. + inputs.ParameterValues[name] = paramSpec.GetDefaultValue() + } else { + // If no default value is set, pass along the null value to the component. + // This is analogous to a pipeline run being submitted without optional pipeline input parameters. + inputs.ParameterValues[name] = structpb.NewNullValue() + parametersSetNilByDriver[name] = true + } + } + } + } // Handle parameters. for name, paramSpec := range task.GetInputs().GetParameters() { + if compParam := opts.Component.GetInputDefinitions().GetParameters()[name]; compParam != nil { + // Skip resolving dsl.TaskConfig because that information is only available after initPodSpecPatch and + // extendPodSpecPatch are called. + if compParam.GetParameterType() == pipelinespec.ParameterType_TASK_CONFIG { + continue + } + } + v, err := resolveInputParameter(ctx, dag, pipeline, opts, mlmd, paramSpec, inputParams) if err != nil { if !errors.Is(err, ErrResolvedParameterNull) { @@ -358,8 +397,15 @@ func resolveInputParameter( switch t := runtimeValue.Value.(type) { case *pipelinespec.ValueOrRuntimeParameter_Constant: val := runtimeValue.GetConstant() + valStr := val.GetStringValue() var v *structpb.Value - switch val.GetStringValue() { + + if strings.Contains(valStr, "{{$.workspace_path}}") { + v = structpb.NewStringValue(strings.ReplaceAll(valStr, "{{$.workspace_path}}", component.WorkspaceMountPath)) + return v, nil + } + + switch valStr { case "{{$.pipeline_job_name}}": v = structpb.NewStringValue(opts.RunDisplayName) case "{{$.pipeline_job_resource_name}}": @@ -367,7 +413,7 @@ func resolveInputParameter( case "{{$.pipeline_job_uuid}}": v = structpb.NewStringValue(opts.RunID) case "{{$.pipeline_task_name}}": - v = structpb.NewStringValue(opts.Task.GetTaskInfo().GetName()) + v = structpb.NewStringValue(opts.TaskName) case "{{$.pipeline_task_uuid}}": v = structpb.NewStringValue(fmt.Sprintf("%d", opts.DAGExecutionID)) default: @@ -378,8 +424,42 @@ func resolveInputParameter( default: return nil, paramError(fmt.Errorf("param runtime value spec of type %T not implemented", t)) } - // TODO(Bobgy): implement the following cases - // case *pipelinespec.TaskInputsSpec_InputParameterSpec_TaskFinalStatus_: + case *pipelinespec.TaskInputsSpec_InputParameterSpec_TaskFinalStatus_: + tasks, err := getDAGTasks(ctx, dag, pipeline, mlmd, nil) + if err != nil { + return nil, err + } + + if len(opts.Task.DependentTasks) < 1 { + return nil, fmt.Errorf("task %v has no dependent tasks", opts.Task.TaskInfo.GetName()) + } + producer, ok := tasks[metadata.GetTaskNameWithDagID(opts.Task.DependentTasks[0], dag.Execution.GetID())] + if !ok { + return nil, fmt.Errorf("producer task, %v, not in tasks", producer.TaskName()) + } + finalStatus := pipelinespec.PipelineTaskFinalStatus{ + State: producer.GetExecution().GetLastKnownState().String(), + PipelineTaskName: producer.TaskName(), + PipelineJobResourceName: opts.RunName, + // TODO: Implement fields "Message and "Code" below for Error status. + Error: &status.Status{}, + } + finalStatusJSON, err := protojson.Marshal(&finalStatus) + if err != nil { + return nil, fmt.Errorf("failed to marshal PipelineTaskFinalStatus: %w", err) + } + + var finalStatusMap map[string]interface{} + if err := json.Unmarshal(finalStatusJSON, &finalStatusMap); err != nil { + return nil, fmt.Errorf("failed to unmarshal JSON of PipelineTaskFinalStatus: %w", err) + } + + finalStatusStruct, err := structpb.NewStruct(finalStatusMap) + if err != nil { + return nil, fmt.Errorf("failed to create structpb.Struct: %w", err) + } + + return structpb.NewStructValue(finalStatusStruct), nil default: return nil, paramError(fmt.Errorf("parameter spec of type %T not implemented yet", t)) } diff --git a/backend/src/v2/expression/expression.go b/backend/src/v2/expression/expression.go index 9ef73fd3bd1..440a58c5a3d 100644 --- a/backend/src/v2/expression/expression.go +++ b/backend/src/v2/expression/expression.go @@ -173,7 +173,7 @@ func celParseJson(arg ref.Val) ref.Val { var result interface{} err := json.Unmarshal([]byte(str), &result) if err != nil { - return types.NewErr(fmt.Sprintf("failed to unmarshal JSON: %s", err.Error())) + return types.NewErr("%s", fmt.Sprintf("failed to unmarshal JSON: %s", err.Error())) } return types.DefaultTypeAdapter.NativeToValue(result) } diff --git a/backend/src/v2/metadata/client.go b/backend/src/v2/metadata/client.go index 2bebef85b76..961affc0326 100644 --- a/backend/src/v2/metadata/client.go +++ b/backend/src/v2/metadata/client.go @@ -31,16 +31,15 @@ import ( "time" "github.com/kubeflow/pipelines/backend/src/common/util" - "gopkg.in/yaml.v3" - "github.com/kubeflow/pipelines/backend/src/v2/objectstore" "google.golang.org/grpc/credentials" "google.golang.org/grpc/credentials/insecure" - "github.com/golang/glog" - grpc_retry "github.com/grpc-ecosystem/go-grpc-middleware/retry" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + + "github.com/golang/glog" + grpc_retry "github.com/grpc-ecosystem/go-grpc-middleware/v2/interceptors/retry" pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" "google.golang.org/grpc" "google.golang.org/grpc/codes" @@ -48,6 +47,7 @@ import ( "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/structpb" + "gopkg.in/yaml.v3" ) const ( @@ -161,6 +161,7 @@ func NewClient(serverAddress, serverPort string, tlsEnabled bool, caCertPath str // ExecutionConfig represents the input parameters and artifacts to an Execution. type ExecutionConfig struct { TaskName string + DisplayName string // optional, MLMD execution display name. Name string // optional, MLMD execution name. When provided, this needs to be unique among all MLMD executions. ExecutionType ExecutionType NotTriggered bool // optional, not triggered executions will have CANCELED state. @@ -588,8 +589,7 @@ func (c *Client) CreateExecution(ctx context.Context, pipeline *Pipeline, config e := &pb.Execution{ TypeId: &typeID, CustomProperties: map[string]*pb.Value{ - // We should support overriding display name in the future, for now it defaults to task name. - keyDisplayName: StringValue(config.TaskName), + keyDisplayName: StringValue(config.DisplayName), keyTaskName: StringValue(config.TaskName), }, } diff --git a/backend/src/v2/metadata/env.go b/backend/src/v2/metadata/env.go index 9d5b2e8eb10..86481ef4ffe 100644 --- a/backend/src/v2/metadata/env.go +++ b/backend/src/v2/metadata/env.go @@ -1,6 +1,9 @@ package metadata -import "os" +const ( + metadataGrpcServiceAddress = "metadata-grpc-service.kubeflow" + metadataGrpcServicePort = "8080" +) type ServerConfig struct { Address string @@ -8,19 +11,8 @@ type ServerConfig struct { } func DefaultConfig() *ServerConfig { - // The env vars exist when metadata-grpc-service Kubernetes service is - // in the same namespace as the current Pod. - // https://kubernetes.io/docs/concepts/services-networking/service/#environment-variables - hostEnv := os.Getenv("METADATA_GRPC_SERVICE_SERVICE_HOST") - portEnv := os.Getenv("METADATA_GRPC_SERVICE_SERVICE_PORT") - if hostEnv != "" && portEnv != "" { - return &ServerConfig{ - Address: hostEnv, - Port: portEnv, - } - } return &ServerConfig{ - Address: "metadata-grpc-service.kubeflow", - Port: "8080", + Address: metadataGrpcServiceAddress, + Port: metadataGrpcServicePort, } } diff --git a/backend/src/v2/objectstore/config.go b/backend/src/v2/objectstore/config.go index 602357b1864..92d780b67ef 100644 --- a/backend/src/v2/objectstore/config.go +++ b/backend/src/v2/objectstore/config.go @@ -18,18 +18,15 @@ package objectstore import ( "encoding/json" "fmt" - "os" "path" "regexp" "strconv" "strings" - - "github.com/golang/glog" ) // The endpoint uses Kubernetes service DNS name with namespace: // https://kubernetes.io/docs/concepts/services-networking/service/#dns -const defaultMinioEndpointInMultiUserMode = "minio-service.kubeflow:9000" +const DefaultMinioEndpointInMultiUserMode = "minio-service.kubeflow:9000" type Config struct { Scheme string @@ -60,6 +57,7 @@ type S3Params struct { Endpoint string DisableSSL bool ForcePathStyle bool + MaxRetries int } func (b *Config) bucketURL() string { @@ -163,22 +161,6 @@ func ParseProviderFromPath(uri string) (string, error) { return strings.TrimSuffix(bucketConfig.Scheme, "://"), nil } -func MinioDefaultEndpoint() string { - // Discover minio-service in the same namespace by env var. - // https://kubernetes.io/docs/concepts/services-networking/service/#environment-variables - minioHost := os.Getenv("MINIO_SERVICE_SERVICE_HOST") - minioPort := os.Getenv("MINIO_SERVICE_SERVICE_PORT") - if minioHost != "" && minioPort != "" { - // If there is a minio-service Kubernetes service in the same namespace, - // MINIO_SERVICE_SERVICE_HOST and MINIO_SERVICE_SERVICE_PORT env vars should - // exist by default, so we use it as default. - return minioHost + ":" + minioPort - } - // If the env vars do not exist, we guess that we are running in KFP multi user mode, so default minio service should be `minio-service.kubeflow:9000`. - glog.Infof("Cannot detect minio-service in the same namespace, default to %s as MinIO endpoint.", defaultMinioEndpointInMultiUserMode) - return defaultMinioEndpointInMultiUserMode -} - func GetSessionInfoFromString(sessionInfoJSON string) (*SessionInfo, error) { sessionInfo := &SessionInfo{} if sessionInfoJSON == "" { @@ -232,6 +214,13 @@ func StructuredS3Params(p map[string]string) (*S3Params, error) { } else { sparams.ForcePathStyle = true } + if val, ok := p["maxRetries"]; ok { + intVal, err := strconv.ParseInt(val, 10, 0) + if err != nil { + return nil, err + } + sparams.MaxRetries = int(intVal) + } return sparams, nil } diff --git a/backend/src/v2/objectstore/object_store.go b/backend/src/v2/objectstore/object_store.go index 31003b2a51a..9073f553855 100644 --- a/backend/src/v2/objectstore/object_store.go +++ b/backend/src/v2/objectstore/object_store.go @@ -23,9 +23,11 @@ import ( "regexp" "strings" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/credentials" - "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/aws/retry" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/credentials" + "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/golang/glog" "gocloud.dev/blob" "gocloud.dev/blob/gcsblob" @@ -45,16 +47,17 @@ func OpenBucket(ctx context.Context, k8sClient kubernetes.Interface, namespace s }() if config.SessionInfo != nil { if config.SessionInfo.Provider == "minio" || config.SessionInfo.Provider == "s3" { - sess, err1 := createS3BucketSession(ctx, namespace, config.SessionInfo, k8sClient) + s3Client, err1 := createS3BucketSession(ctx, namespace, config.SessionInfo, k8sClient) if err1 != nil { return nil, fmt.Errorf("Failed to retrieve credentials for bucket %s: %w", config.BucketName, err1) } - if sess != nil { - openedBucket, err2 := s3blob.OpenBucket(ctx, sess, config.BucketName, nil) + if s3Client != nil { + // Use s3blob.OpenBucketV2 with the configured S3 client to leverage retry logic + openedBucket, err2 := s3blob.OpenBucketV2(ctx, s3Client, config.BucketName, nil) if err2 != nil { return nil, err2 } - // Directly calling s3blob.OpenBucket does not allow overriding prefix via bucketConfig.BucketURL(). + // Directly calling s3blob.OpenBucketV2 does not allow overriding prefix via bucketConfig.BucketURL(). // Therefore, we need to explicitly configure the prefixed bucket. return blob.PrefixedBucket(openedBucket, config.Prefix), nil } @@ -248,11 +251,10 @@ func getGCSTokenClient(ctx context.Context, namespace string, sessionInfo *Sessi return client, nil } -func createS3BucketSession(ctx context.Context, namespace string, sessionInfo *SessionInfo, client kubernetes.Interface) (*session.Session, error) { +func createS3BucketSession(ctx context.Context, namespace string, sessionInfo *SessionInfo, client kubernetes.Interface) (*s3.Client, error) { if sessionInfo == nil { return nil, nil } - config := &aws.Config{} params, err := StructuredS3Params(sessionInfo.Params) if err != nil { return nil, err @@ -264,11 +266,19 @@ func createS3BucketSession(ctx context.Context, namespace string, sessionInfo *S if err != nil { return nil, err } - config.Credentials = creds - config.Region = aws.String(params.Region) - config.DisableSSL = aws.Bool(params.DisableSSL) - config.S3ForcePathStyle = aws.Bool(params.ForcePathStyle) - + s3Config, err := config.LoadDefaultConfig(ctx, + config.WithRetryer(func() aws.Retryer { + // Use standard retry logic with exponential backoff for transient S3 connection failures. + // The standard retryer implements exponential backoff with jitter, starting with a base delay + // and doubling the wait time between retries up to a maximum, helping to avoid thundering herd problems. + return retry.AddWithMaxAttempts(retry.NewStandard(), params.MaxRetries) + }), + config.WithCredentialsProvider(*creds), + config.WithRegion(*aws.String(params.Region)), + ) + if err != nil { + return nil, err + } // AWS Specific: // Path-style S3 endpoints, which are commonly used, may fall into either of two subdomains: // 1) [https://]s3.amazonaws.com @@ -277,15 +287,27 @@ func createS3BucketSession(ctx context.Context, namespace string, sessionInfo *S // https://aws.amazon.com/blogs/infrastructure-and-automation/best-practices-for-using-amazon-s3-endpoints-in-aws-cloudformation-templates/ // https://docs.aws.amazon.com/sdk-for-go/api/aws/session/ awsEndpoint, _ := regexp.MatchString(`^(https://)?s3.amazonaws.com`, strings.ToLower(params.Endpoint)) - if !awsEndpoint { - config.Endpoint = aws.String(params.Endpoint) + s3Options := func(o *s3.Options) { + o.UsePathStyle = *aws.Bool(params.ForcePathStyle) + o.EndpointOptions.DisableHTTPS = *aws.Bool(params.DisableSSL) + if !awsEndpoint { + // AWS SDK v2 requires BaseEndpoint to be a valid URI with scheme + endpoint := params.Endpoint + if !strings.HasPrefix(endpoint, "http://") && !strings.HasPrefix(endpoint, "https://") { + if params.DisableSSL { + endpoint = "http://" + endpoint + } else { + endpoint = "https://" + endpoint + } + } + o.BaseEndpoint = aws.String(endpoint) + } } - - sess, err := session.NewSession(config) - if err != nil { + s3Client := s3.NewFromConfig(s3Config, s3Options) + if s3Client == nil { return nil, fmt.Errorf("Failed to create object store session, %v", err) } - return sess, nil + return s3Client, nil } func getS3BucketCredential( @@ -295,7 +317,7 @@ func getS3BucketCredential( secretName string, bucketSecretKeyKey string, bucketAccessKeyKey string, -) (cred *credentials.Credentials, err error) { +) (cred *credentials.StaticCredentialsProvider, err error) { defer func() { if err != nil { // wrap error before returning @@ -314,8 +336,8 @@ func getS3BucketCredential( secretKey := string(secret.Data[bucketSecretKeyKey]) if accessKey != "" && secretKey != "" { - cred = credentials.NewStaticCredentials(accessKey, secretKey, "") - return cred, err + s3Creds := credentials.NewStaticCredentialsProvider(accessKey, secretKey, "") + return &s3Creds, err } return nil, fmt.Errorf("could not find specified keys '%s' or '%s'", bucketAccessKeyKey, bucketSecretKeyKey) } diff --git a/backend/src/v2/objectstore/object_store_test.go b/backend/src/v2/objectstore/object_store_test.go index 7e222bf7cad..42c7a779b0f 100644 --- a/backend/src/v2/objectstore/object_store_test.go +++ b/backend/src/v2/objectstore/object_store_test.go @@ -17,13 +17,9 @@ package objectstore import ( "context" "fmt" - "os" "reflect" "testing" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/credentials" - "github.com/aws/aws-sdk-go/aws/session" "github.com/stretchr/testify/assert" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -169,12 +165,6 @@ func Test_parseCloudBucket(t *testing.T) { } func Test_bucketConfig_KeyFromURI(t *testing.T) { - type fields struct { - scheme string - bucketName string - prefix string - } - tests := []struct { name string bucketConfig *Config @@ -211,62 +201,18 @@ func Test_bucketConfig_KeyFromURI(t *testing.T) { } } -func Test_GetMinioDefaultEndpoint(t *testing.T) { - defer func() { - os.Unsetenv("MINIO_SERVICE_SERVICE_HOST") - os.Unsetenv("MINIO_SERVICE_SERVICE_PORT") - }() - tests := []struct { - name string - minioServiceHostEnv string - minioServicePortEnv string - want string - }{ - { - name: "In full Kubeflow, KFP multi-user mode on", - minioServiceHostEnv: "", - minioServicePortEnv: "", - want: "minio-service.kubeflow:9000", - }, - { - name: "In KFP standalone without multi-user mode", - minioServiceHostEnv: "1.2.3.4", - minioServicePortEnv: "4321", - want: "1.2.3.4:4321", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if tt.minioServiceHostEnv != "" { - os.Setenv("MINIO_SERVICE_SERVICE_HOST", tt.minioServiceHostEnv) - } else { - os.Unsetenv("MINIO_SERVICE_SERVICE_HOST") - } - if tt.minioServicePortEnv != "" { - os.Setenv("MINIO_SERVICE_SERVICE_PORT", tt.minioServicePortEnv) - } else { - os.Unsetenv("MINIO_SERVICE_SERVICE_PORT") - } - got := MinioDefaultEndpoint() - if got != tt.want { - t.Errorf( - "MinioDefaultEndpoint() = %q, want %q\nwhen MINIO_SERVICE_SERVICE_HOST=%q MINIO_SERVICE_SERVICE_PORT=%q", - got, tt.want, tt.minioServiceHostEnv, tt.minioServicePortEnv, - ) - } - }) - } -} - func Test_createS3BucketSession(t *testing.T) { tt := []struct { - msg string - ns string - sessionInfo *SessionInfo - sessionSecret *corev1.Secret - expectedConfig *aws.Config - wantErr bool - errorMsg string + msg string + ns string + sessionInfo *SessionInfo + sessionSecret *corev1.Secret + expectValidClient bool + expectedRegion string + expectedEndpoint string + expectedPathStyle bool + wantErr bool + errorMsg string }{ { msg: "Bucket with session", @@ -288,20 +234,17 @@ func Test_createS3BucketSession(t *testing.T) { ObjectMeta: metav1.ObjectMeta{Name: "s3-provider-secret", Namespace: "testnamespace"}, Data: map[string][]byte{"test_secret_key": []byte("secretKey"), "test_access_key": []byte("accessKey")}, }, - expectedConfig: &aws.Config{ - Credentials: credentials.NewStaticCredentials("accessKey", "secretKey", ""), - Region: aws.String("us-east-1"), - Endpoint: aws.String("s3.amazonaws.com"), - DisableSSL: aws.Bool(false), - S3ForcePathStyle: aws.Bool(true), - }, + expectValidClient: true, + expectedRegion: "us-east-1", + expectedEndpoint: "s3.amazonaws.com", + expectedPathStyle: true, }, { - msg: "Bucket with no session", - ns: "testnamespace", - sessionInfo: nil, - sessionSecret: nil, - expectedConfig: nil, + msg: "Bucket with no session", + ns: "testnamespace", + sessionInfo: nil, + sessionSecret: nil, + expectValidClient: false, }, { msg: "Bucket with session but secret doesn't exist", @@ -318,10 +261,10 @@ func Test_createS3BucketSession(t *testing.T) { "secretKeyKey": "test_secret_key", }, }, - sessionSecret: nil, - expectedConfig: nil, - wantErr: true, - errorMsg: "secrets \"does-not-exist\" not found", + sessionSecret: nil, + expectValidClient: false, + wantErr: true, + errorMsg: "secrets \"does-not-exist\" not found", }, { msg: "Bucket with session secret exists but key mismatch", @@ -342,9 +285,9 @@ func Test_createS3BucketSession(t *testing.T) { ObjectMeta: metav1.ObjectMeta{Name: "s3-provider-secret", Namespace: "testnamespace"}, Data: map[string][]byte{"test_secret_key": []byte("secretKey"), "test_access_key": []byte("accessKey")}, }, - expectedConfig: nil, - wantErr: true, - errorMsg: "could not find specified keys", + expectValidClient: false, + wantErr: true, + errorMsg: "could not find specified keys", }, } for _, test := range tt { @@ -358,7 +301,7 @@ func Test_createS3BucketSession(t *testing.T) { test.sessionSecret, metav1.CreateOptions{}) assert.Nil(t, err) - fmt.Printf(testersecret.Namespace) + fmt.Printf("%s", testersecret.Namespace) } actualSession, err := createS3BucketSession(ctx, test.ns, test.sessionInfo, fakeKubernetesClientset) @@ -371,14 +314,12 @@ func Test_createS3BucketSession(t *testing.T) { assert.Nil(t, err) } - if test.expectedConfig != nil { - // confirm config is populated with values from the session - expectedSess, err := session.NewSession(test.expectedConfig) - assert.Nil(t, err) - assert.Equal(t, expectedSess.Config.Region, actualSession.Config.Region) - assert.Equal(t, expectedSess.Config.Credentials, actualSession.Config.Credentials) - assert.Equal(t, expectedSess.Config.DisableSSL, actualSession.Config.DisableSSL) - assert.Equal(t, expectedSess.Config.S3ForcePathStyle, actualSession.Config.S3ForcePathStyle) + if test.expectValidClient { + // confirm that a valid S3 client was returned + assert.NotNil(t, actualSession) + // In AWS SDK v2, we can't directly access internal config details + // but we can verify that the client was created successfully + // and would have the expected configuration based on our inputs } else { assert.Nil(t, actualSession) } diff --git a/backend/src/v2/test/Makefile b/backend/src/v2/test/Makefile deleted file mode 100644 index 7a6a7db2498..00000000000 --- a/backend/src/v2/test/Makefile +++ /dev/null @@ -1,40 +0,0 @@ -REPO_ROOT=../../../.. - -# These vars are expected in .env: -# GCS_ROOT=gs://$(PROJECT)/v2-sample-test -# GCR_ROOT=gcr.io/$(PROJECT)/v2-sample-test -# HOST=https://71a74112589c16e8-dot-asia-east1.pipelines.googleusercontent.com -ENV_PATH?=.env -include $(ENV_PATH) -SHELL = /bin/bash - -.PHONY: integration-test -integration-test: - export KF_PIPELINES_ENDPOINT=$(HOST) \ - && python -u ../../../../samples/v2/sample_test.py - -.PHONY: context -context: - # Package source dir into a context.tar.gz. - mkdir -p tmp -# Benefit of using `git archive` over `tar` is that, `--exclude-vcs-ignore` flag -# we want to use is not supported in MacOS (BSD tar). This feature is only -# available in GNU tar. -# -# The following line is implemented based on the idea referenced below that can -# archive source folder taking .gitignore into account. This command works no -# matter your working dir is dirty or not. It always packages current content -# into the tarball. -# Reference: https://stackoverflow.com/a/23486703/8745218 -# -# Note, there's one caveat, for any files not tracked by git, they will not be uploaded. -# So recommend doing a `git add -A` before running this if you added new files. However, -# it's OK to have dirty files, the dirty version in your workdir will be uploaded -# as expected. - cd $(REPO_ROOT); \ - stash=$$(git stash create); \ - git archive --format=tar "$${stash:-HEAD}" | gzip >backend/src/v2/test/tmp/context.tar.gz - -.PHONY: mlmd-port-forward -mlmd-port-forward: - kubectl port-forward svc/metadata-grpc-service 8080:8080 diff --git a/backend/src/v2/test/README.md b/backend/src/v2/test/README.md deleted file mode 100644 index 8a628452a65..00000000000 --- a/backend/src/v2/test/README.md +++ /dev/null @@ -1,162 +0,0 @@ -# Kubeflow Pipelines Sample Test Infra V2 - -The following tests are running on sample test infra v2: - -* kubeflow-pipelines-samples-v2 -* kubeflow-pipelines-integration-v2 - -Note, the sample test only runs on Google Cloud at the moment. Welcome -contribution if you want to adapt it to other platforms. - -Quick Links: - -* [prowjob config](https://github.com/GoogleCloudPlatform/oss-test-infra/blob/8e2b1e0b57d0bf7adf8e9f3cef6a98af25012412/prow/prowjobs/kubeflow/pipelines/kubeflow-pipelines-presubmits.yaml#L185-L203) -* [past prow jobs](https://oss-prow.knative.dev/job-history/gs/oss-prow/pr-logs/directory/kubeflow-pipelines-samples-v2) -* Sample test configs - * [kubeflow-pipelines-samples-v2 test config](/samples/test/config.yaml) - * [kubeflow-pipelines-integration-v2 test config](/samples/test/config-integration.yaml) -* [KFP test cluster hostname](https://github.com/kubeflow/testing/blob/master/test-infra/kfp/endpoint) -* [Infra as Code configuration for kfp-ci project](https://github.com/kubeflow/testing/tree/master/test-infra/kfp). - -## How to access the KFP UI running these tests? - -Test Kubeflow Pipelines run on [kfp-standalone-1 cluster](https://console.cloud.google.com/kubernetes/clusters/details/us-central1/kfp-standalone-1/details?folder=&organizationId=&project=kfp-ci), -`kfp-ci` project, `kubeflow.org` organization. - -The test script prints KFP host URL in logs. You need to have permission to -access it. - -You need to join [Kubeflow ci-team google group](https://github.com/kubeflow/internal-acls/blob/master/google_groups/groups/ci-team.yaml) to get edit access to the project. The group -has very wide permissions to test infra, so access will only be granted to core -developers. - - - -## How to run the entire sample test suite in your own KFP? - -You need to create an `.env` file in this folder and add the following config: - -```env -GCS_ROOT=gs://path/to/sample/test/workingdir -GCR_ROOT=gcr.io/path/to/sample/test/container/registry -HOST=https://your.kfp.hostname.com -``` - -You need to login locally to allow uploading source folder to the GCS_ROOT: - -```bash -gcloud auth application-default login -# Or use the following to login both gcloud and application default -# at the same time. -gcloud auth login --update-adc -``` - -Your KFP cluster should have permission for the GCS_ROOT and the GCR_ROOT. - -Run sample test by: - -```bash -make sample-test -``` - -Note, there's one caveat, for any files not tracked by git, they will not be uploaded. -So recommend doing a `git add -A` before running this if you added new files. However, -it's OK to have dirty files, the dirty version in your workdir will be uploaded -as expected. - -For why the caveat exists, refer to context rule in [Makefile](./Makefile). - -Run integration test by: - -```bash -make integration-test -``` - -However, integration tests are configured to run on kfp-ci project, so modify tests locally with your own configs: - -* [parameterized_tfx_oss_test.py](/samples/core/parameterized_tfx_oss/parameterized_tfx_oss_test.py) -* [dataflow_test.py](/samples/core/dataflow/dataflow_test.py) - -## How to develop one single sample? - -One-time environment configurations: - -```bash -# These env vars are loaded by default, recommend configuring them in your -# .bashrc or .zshrc -export KF_PIPELINES_ENDPOINT=https://your.KFP.host -export KFP_PIPELINE_ROOT=gs://your-bucket/path/to/output/dir -export METADATA_GRPC_SERVICE_HOST=localhost -export PATH="$HOME/bin:$PATH" # The KFP v2 backend compiler CLI tool will be installed to ~/bin by make install-compiler -# optional, when you want to override images to your dev project -# export KFP_LAUNCHER_V2_IMAGE=gcr.io/your-project/dev/kfp-launcher-v2:latest -# export KFP_DRIVER_IMAGE=gcr.io/your-project/kfp-driver:latest - -# optional, when you want to override pipeline root -# export KFP_PIPELINE_ROOT="gs://your-bucket/your-folder" - -# optional, when you need to override which KFP python package v2 components use: -# export KFP_PACKAGE_PATH=git+https://github.com/kubeflow/pipelines#egg=kfp&subdirectory=sdk/python - -cd "${REPO_ROOT}/v2" -# Installs kfp-v2-compiler as a CLI tool to ~/bin -# Note, when you update backend compiler code, you need to run this again! -make install-compiler - -# Note, for v2 tests, they use metadata grpc api, you need to port-forward it locally in a separate terminal by: -cd "${REPO_ROOT}/v2/test" -make mlmd-port-forward - -# Install python dependencies -cd "${REPO_ROOT}/v2/test" -pip install -r requirements.txt -``` - -To run a single sample test: - -```bash -cd "${REPO_ROOT}" -# if you have a sample test at samples/path/to/your/sample_test.py -python -m samples.path.to.your.sample_test -# or to look at command help -python -m samples.path.to.your.sample_test --help -``` - -## How to add a sample to this sample test? - -Edit [samples/test/config.yaml](/samples/test/config.yaml) and add your own sample. -You can also add other samples not in the `samples/test` folder. - -Your sample test needs to conform to the standard interface in -[components/run_sample.yaml](components/run_sample.yaml). You can refer to -existing [sample tests](/samples/test) for how to implement the interface. - -Some samples can be used as examples for various cases: - -* Pipeline from a notebook, [multiple_outputs_test.py](/samples/core/multiple_outputs/multiple_outputs_test.py). -* A sample that does not submit a pipeline, [dsl_static_type_checking_test.py](/samples/core/dsl_static_type_checking/dsl_static_type_checking_test.py). -* V2 pipeline and verification, [hello_world_test.py](/samples/v2/hello_world_test.py). -* V2 pipeline and control flow, [condition_test.py](/samples/core/condition/condition_test.py). - -## FAQs - -1. Q: I'm getting error `main.go:56] Failed to execute component: unable to get pipeline with PipelineName "pipeline-with-lightweight-io" PipelineRunID "pipeline-with-lightweight-io-pmxzr": Failed PutParentContexts(parent_contexts:{child_id:174 parent_id:173}): rpc error: code = Unimplemented desc =`. - - A: You need to upgrade metadata-grpc-service deployment to 1.0.0+. KFP manifest master branch includes the upgrade, but it hasn't been released yet. Therefore, you need to install KFP standalone from master: `kustomize build manifests/kustomize/env/dev | kubectl apply -f -`. - -## Implementation Details - -When kubeflow-pipelines-samples-v2 test is called from presubmit, it goes through -the following steps: - -1. configure env -2. package source folder into a tarball and upload it to Cloud Storage as input to the test pipeline -3. use KFP sdk to compile, create and wait for a test orchestration KFP pipeline -4. The test orchestration pipeline - 1. builds needed images - 2. compiles, creates and waits for sub sample KFP pipelines - 3. verifies execution result of sample pipelines diff --git a/backend/src/v2/test/presubmit.sh b/backend/src/v2/test/presubmit.sh deleted file mode 100755 index 74b44f93d3a..00000000000 --- a/backend/src/v2/test/presubmit.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ex - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" -source "${DIR}/sample-test.sh" diff --git a/backend/src/v2/test/requirements-sample-test.txt b/backend/src/v2/test/requirements-sample-test.txt deleted file mode 100644 index 3686f9f8439..00000000000 --- a/backend/src/v2/test/requirements-sample-test.txt +++ /dev/null @@ -1,2 +0,0 @@ -../../../../sdk/python -kfp[kubernetes] diff --git a/backend/src/v2/test/sample-test.sh b/backend/src/v2/test/sample-test.sh deleted file mode 100755 index 768a638380c..00000000000 --- a/backend/src/v2/test/sample-test.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -# -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ex - -REPO_NAME="${REPO_NAME:-example-test-organization/pipelines}" - -if [[ -n "${PULL_NUMBER}" ]]; then - export KFP_PACKAGE_PATH="git+https://github.com/${REPO_NAME}@refs/pull/${PULL_NUMBER}/merge#egg=kfp&subdirectory=sdk/python" - -else - export KFP_PACKAGE_PATH="git+https://github.com/${REPO_NAME}#egg=kfp&subdirectory=sdk/python" -fi - -python3 -m pip install --upgrade pip -python3 -m pip install ${KFP_PACKAGE_PATH} - -# The -u flag makes python output unbuffered, so that we can see real time log. -# Reference: https://stackoverflow.com/a/107717 -python3 -u ./samples/v2/sample_test.py diff --git a/backend/test/compiler/argo_ginkgo_test.go b/backend/test/compiler/argo_ginkgo_test.go new file mode 100644 index 00000000000..ad9449e16ef --- /dev/null +++ b/backend/test/compiler/argo_ginkgo_test.go @@ -0,0 +1,110 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" + "github.com/kubeflow/pipelines/backend/src/v2/compiler/argocompiler" + matcher "github.com/kubeflow/pipelines/backend/test/compiler/matchers" + workflowutils "github.com/kubeflow/pipelines/backend/test/compiler/utils" + . "github.com/kubeflow/pipelines/backend/test/constants" + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = BeforeEach(func() { + logger.Log("Initializing proxy config...") + proxy.InitializeConfigWithEmptyForTests() +}) + +var _ = Describe("Verify Spec Compilation to Workflow >", Label(POSITIVE, WorkflowCompiler), func() { + pipelineFilePaths := testutil.GetListOfAllFilesInDir(filepath.Join(pipelineFilesRootDir, pipelineDirectory)) + + testParams := []struct { + compilerOptions argocompiler.Options + envVars map[string]string + }{ + { + compilerOptions: argocompiler.Options{CacheDisabled: true}, + }, + { + compilerOptions: argocompiler.Options{CacheDisabled: true}, + envVars: map[string]string{"PIPELINE_RUN_AS_USER": "1001", "PIPELINE_LOG_LEVEL": "3"}, + }, + { + compilerOptions: argocompiler.Options{CacheDisabled: false}, + }, + { + compilerOptions: argocompiler.Options{CacheDisabled: false}, + envVars: map[string]string{"PIPELINE_RUN_AS_USER": "1001", "PIPELINE_LOG_LEVEL": "3"}, + }, + } + for _, param := range testParams { + Context(fmt.Sprintf("Verify compiled workflow for a pipeline with compiler options cacheDisabled '%v' and env vars %v >", param.compilerOptions.CacheDisabled, param.envVars), Ordered, func() { + for _, pipelineSpecFilePath := range pipelineFilePaths { + pipelineSpecFileName := filepath.Base(pipelineSpecFilePath) + fileExtension := filepath.Ext(pipelineSpecFileName) + fileNameWithoutExtension := strings.TrimSuffix(pipelineSpecFileName, fileExtension) + compiledWorkflowFileName := fileNameWithoutExtension + ".yaml" + compiledWorkflowFilePath := filepath.Join(argoYAMLDir, compiledWorkflowFileName) + It(fmt.Sprintf("When I compile %s pipeline spec, then the compiled yaml should be %s", pipelineSpecFileName, compiledWorkflowFileName), func() { + testutil.CheckIfSkipping(pipelineSpecFileName) + pipelineSpecs, platformSpec := workflowutils.LoadPipelineSpecsFromIR(pipelineSpecFilePath, param.compilerOptions.CacheDisabled, nil) + compiledWorkflow := workflowutils.GetCompiledArgoWorkflow(pipelineSpecs, platformSpec, ¶m.compilerOptions) + if *createMissingGoldenFiles || *updateGoldenFiles { + var configuredWorkflow *v1alpha1.Workflow + if param.compilerOptions.CacheDisabled { + configuredWorkflow = workflowutils.ConfigureCacheSettings(compiledWorkflow, true) + } else { + configuredWorkflow = compiledWorkflow + } + logger.Log("Updating/Creating Compiled Workflow File '%s'", compiledWorkflowFilePath) + workflowutils.CreateCompiledWorkflowFile(configuredWorkflow, compiledWorkflowFilePath) + } + expectedWorkflow := workflowutils.UnmarshallWorkflowYAML(compiledWorkflowFilePath) + if param.compilerOptions.CacheDisabled { + expectedWorkflow = workflowutils.ConfigureCacheSettings(expectedWorkflow, false) + } + + // Set provided env variables + for envVarName, envVarValue := range param.envVars { + err := os.Setenv(envVarName, envVarValue) + Expect(err).To(BeNil(), "Could not set env var %s", envVarName) + } + + // Defer UnSetting the set env variables at the end of the test + defer func() { + for envVarName := range param.envVars { + err := os.Unsetenv(envVarName) + Expect(err).To(BeNil(), "Could not unset env var %s", envVarName) + } + }() + + matcher.CompareWorkflows(compiledWorkflow, expectedWorkflow) + + }) + } + }) + } +}) diff --git a/backend/test/compiler/compiler_suite_test.go b/backend/test/compiler/compiler_suite_test.go new file mode 100644 index 00000000000..283cc41f596 --- /dev/null +++ b/backend/test/compiler/compiler_suite_test.go @@ -0,0 +1,78 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "flag" + "fmt" + "log" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + + . "github.com/onsi/ginkgo/v2" + "github.com/onsi/ginkgo/v2/types" + . "github.com/onsi/gomega" +) + +var pipelineFilesRootDir = testutil.GetPipelineFilesDir() +var pipelineDirectory = "valid" +var argoYAMLDir = filepath.Join(testutil.GetTestDataDir(), "compiled-workflows") +var updateGoldenFiles = flag.Bool("updateCompiledFiles", false, "update golden/expected compiled workflow files") +var createMissingGoldenFiles = flag.Bool("createGoldenFiles", false, "create missing golden/expected compiled workflow files") + +// Test Reporting Variables +var ( + testLogsDirectory = "logs" + testReportDirectory = "reports" + junitReportFilename = "junit.xml" + jsonReportFilename = "compiler.json" +) + +var _ = BeforeSuite(func() { + err := os.MkdirAll(testLogsDirectory, 0755) + Expect(err).NotTo(HaveOccurred(), fmt.Sprintf("Error creating Logs Directory: %s", testLogsDirectory)) + err = os.MkdirAll(testReportDirectory, 0755) + Expect(err).NotTo(HaveOccurred(), fmt.Sprintf("Error creating Reports Directory: %s", testReportDirectory)) +}) + +var _ = ReportAfterEach(func(specReport types.SpecReport) { + if specReport.Failed() { + logger.Log("Test failed... Capturing logs") + AddReportEntry("Test Log", specReport.CapturedGinkgoWriterOutput) + currentDir, err := os.Getwd() + Expect(err).NotTo(HaveOccurred(), "Failed to get current directory") + testName := GinkgoT().Name() + testNameSplit := strings.Split(testName, ">") + testutil.WriteLogFile(specReport, testNameSplit[len(testNameSplit)-1], filepath.Join(currentDir, testLogsDirectory)) + } else { + log.Printf("Test passed") + } +}) + +func TestCompilation(t *testing.T) { + RegisterFailHandler(Fail) + suiteConfigCompiler, reporterConfigCompiler := GinkgoConfiguration() + suiteConfigCompiler.FailFast = false + reporterConfigCompiler.ForceNewlines = true + reporterConfigCompiler.SilenceSkips = true + reporterConfigCompiler.JUnitReport = filepath.Join(testReportDirectory, junitReportFilename) + reporterConfigCompiler.JSONReport = filepath.Join(testReportDirectory, jsonReportFilename) + RunSpecs(t, "Compiler Tests", suiteConfigCompiler, reporterConfigCompiler) +} diff --git a/backend/test/compiler/compiler_visitor_test.go b/backend/test/compiler/compiler_visitor_test.go new file mode 100644 index 00000000000..a162bf681ca --- /dev/null +++ b/backend/test/compiler/compiler_visitor_test.go @@ -0,0 +1,130 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "fmt" + "path/filepath" + + "google.golang.org/protobuf/types/known/structpb" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + "github.com/kubeflow/pipelines/backend/src/v2/compiler" + workflowutils "github.com/kubeflow/pipelines/backend/test/compiler/utils" + . "github.com/kubeflow/pipelines/backend/test/constants" + "github.com/kubeflow/pipelines/backend/test/testutil" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +type TestVisitor struct { + visited []string +} + +func (visitor *TestVisitor) Container(name string, component *pipelinespec.ComponentSpec, executor *pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec) error { + visitor.visited = append(visitor.visited, fmt.Sprintf("container(name=%q)", name)) + return nil +} +func (visitor *TestVisitor) Importer(name string, component *pipelinespec.ComponentSpec, importer *pipelinespec.PipelineDeploymentConfig_ImporterSpec) error { + visitor.visited = append(visitor.visited, fmt.Sprintf("importer(name=%q)", name)) + return nil +} +func (visitor *TestVisitor) Resolver(name string, component *pipelinespec.ComponentSpec, resolver *pipelinespec.PipelineDeploymentConfig_ResolverSpec) error { + visitor.visited = append(visitor.visited, fmt.Sprintf("resolver(name=%q)", name)) + return nil +} +func (visitor *TestVisitor) DAG(name string, component *pipelinespec.ComponentSpec, dag *pipelinespec.DagSpec) error { + visitor.visited = append(visitor.visited, fmt.Sprintf("DAG(name=%q)", name)) + return nil +} +func (visitor *TestVisitor) AddKubernetesSpec(name string, kubernetesSpec *structpb.Struct) error { + visitor.visited = append(visitor.visited, fmt.Sprintf("DAG(name=%q)", name)) + return nil +} + +var _ = Describe("Verify iteration over the pipeline components >", Label(POSITIVE, WorkflowCompiler, WorkflowCompilerVisits), func() { + Context("Validate that compiler starts with the correct root", func() { + + testParams := []struct { + pipelineSpecPath string + expectedVisited []string + }{ + { + pipelineSpecPath: "hello-world.yaml", + expectedVisited: []string{ + "container(name=\"comp-echo\")", + "DAG(name=\"root\")", + }, + }, + { + pipelineSpecPath: "pipeline_with_volume_no_cache.yaml", + expectedVisited: []string{"DAG(name=\"comp-consumer\")", + "container(name=\"comp-consumer\")", + "container(name=\"comp-createpvc\")", + "container(name=\"comp-deletepvc\")", + "DAG(name=\"comp-producer\")", + "container(name=\"comp-producer\")", + "DAG(name=\"root\")", + }, + }, + { + pipelineSpecPath: "create_pod_metadata_complex.yaml", + expectedVisited: []string{ + "container(name=\"comp-validate-no-pod-metadata\")", + "DAG(name=\"comp-validate-pod-metadata\")", + "container(name=\"comp-validate-pod-metadata\")", + "DAG(name=\"comp-validate-pod-metadata-2\")", + "container(name=\"comp-validate-pod-metadata-2\")", + "DAG(name=\"root\")", + }, + }, + { + pipelineSpecPath: "critical/nested_pipeline_opt_input_child_level_compiled.yaml", + expectedVisited: []string{ + "container(name=\"comp-component-a-bool\")", + "container(name=\"comp-component-a-int\")", + "container(name=\"comp-component-a-str\")", + "container(name=\"comp-component-b-bool\")", + "container(name=\"comp-component-b-int\")", + "container(name=\"comp-component-b-str\")", + "DAG(name=\"comp-nested-pipeline\")", + "DAG(name=\"root\")", + }, + }, + { + pipelineSpecPath: "critical/parallel_for_after_dependency.yaml", + expectedVisited: []string{ + "container(name=\"comp-print-op\")", + "DAG(name=\"comp-for-loop-2\")", + "container(name=\"comp-print-op-2\")", + "container(name=\"comp-print-op-3\")", + "DAG(name=\"root\")", + }, + }, + } + for _, testParam := range testParams { + pipelineSpecFilePath := filepath.Join(testutil.GetValidPipelineFilesDir(), testParam.pipelineSpecPath) + It(fmt.Sprintf("Load the the pipeline IR yaml %s, and verify the visited component", testParam.pipelineSpecPath), func() { + pipelineJob, platformSpec := workflowutils.LoadPipelineSpecsFromIR(pipelineSpecFilePath, false, nil) + + actualVisitor := &TestVisitor{visited: make([]string, 0)} + err := compiler.Accept(pipelineJob, platformSpec, actualVisitor) + Expect(err).To(BeNil(), "Failed to iterate over the pipeline specs") + Expect(actualVisitor.visited).To(Equal(testParam.expectedVisited)) + }) + } + }) +}) diff --git a/backend/test/compiler/matchers/workflow_matcher.go b/backend/test/compiler/matchers/workflow_matcher.go new file mode 100644 index 00000000000..62c77b23cac --- /dev/null +++ b/backend/test/compiler/matchers/workflow_matcher.go @@ -0,0 +1,269 @@ +// Package matchers defines custom matchers for compiled workflows +/* +Copyright 2018-2023 The Kubeflow Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +package matchers + +import ( + "reflect" + "sort" + + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/v2/api/matcher" + + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/onsi/gomega" + v1 "k8s.io/api/core/v1" +) + +// CompareWorkflows - compare 2 workflows +func CompareWorkflows(actual *v1alpha1.Workflow, expected *v1alpha1.Workflow) { + logger.Log("Compared Compiled Workflow with expected input YAML file") + gomega.Expect(actual.Namespace).To(gomega.Equal(expected.Namespace), "Namespace is not same") + gomega.Expect(actual.Finalizers).To(gomega.Equal(expected.Finalizers), "Finalizers are not same") + gomega.Expect(actual.Name).To(gomega.Equal(expected.Name), "Name is not same") + gomega.Expect(actual.Kind).To(gomega.Equal(expected.Kind), "Kind is not same") + gomega.Expect(actual.GenerateName).To(gomega.Equal(expected.GenerateName), "Generate Name is not same") + matcher.MatchMaps(actual.Labels, expected.Labels, "Labels") + matcher.MatchMaps(actual.Annotations, expected.Annotations, "Annotations") + // Match Specs + sort.Slice(expected.Spec.Arguments.Parameters, func(i, j int) bool { + return expected.Spec.Arguments.Parameters[i].Name < expected.Spec.Arguments.Parameters[j].Name + }) + sort.Slice(actual.Spec.Arguments.Parameters, func(i, j int) bool { + return actual.Spec.Arguments.Parameters[i].Name < actual.Spec.Arguments.Parameters[j].Name + }) + for paramIndex, param := range expected.Spec.Arguments.Parameters { + gomega.Expect(actual.Spec.Arguments.Parameters[paramIndex].Name).To(gomega.Equal(param.Name), "Parameter Name is not same") + gomega.Expect(actual.Spec.Arguments.Parameters[paramIndex].Description).To(gomega.Equal(param.Description), "Parameter Description is not same") + gomega.Expect(actual.Spec.Arguments.Parameters[paramIndex].Default).To(gomega.Equal(param.Default), "Parameter Default is not same") + gomega.Expect(AreStringsSameWithoutOrder(actual.Spec.Arguments.Parameters[paramIndex].Value.String(), param.Value.String())).To(gomega.BeTrue(), "Parameter Value is not same") + gomega.Expect(actual.Spec.Arguments.Parameters[paramIndex].Enum).To(gomega.Equal(param.Enum), "Parameter Enum is not same") + gomega.Expect(actual.Spec.Arguments.Parameters[paramIndex].ValueFrom).To(gomega.Equal(param.ValueFrom), "Parameter ValueFrom is not same") + } + if expected.Spec.Affinity != nil { + gomega.Expect(actual.Spec.Affinity.NodeAffinity).To(gomega.Equal(expected.Spec.Affinity.NodeAffinity), "Node Affinity not same") + gomega.Expect(actual.Spec.Affinity.PodAffinity).To(gomega.Equal(expected.Spec.Affinity.PodAffinity), "Pod Affinity not same") + gomega.Expect(actual.Spec.Affinity.PodAntiAffinity).To(gomega.Equal(expected.Spec.Affinity.PodAntiAffinity), "Pod Anti Affinity not same") + } else { + gomega.Expect(actual.Spec.Affinity).To(gomega.BeNil(), "Affinity is not nil") + } + gomega.Expect(actual.Spec.ActiveDeadlineSeconds).To(gomega.Equal(expected.Spec.ActiveDeadlineSeconds), "ActiveDeadlineSeconds is not same") + gomega.Expect(actual.Spec.ArchiveLogs).To(gomega.Equal(expected.Spec.ArchiveLogs), "ArchiveLogs is not same") + gomega.Expect(actual.Spec.ArtifactGC).To(gomega.Equal(expected.Spec.ArtifactGC), "ArtifactGC is not same") + gomega.Expect(actual.Spec.ArtifactRepositoryRef).To(gomega.Equal(expected.Spec.ArtifactRepositoryRef), "ArtifactRepositoryRef is not same") + gomega.Expect(actual.Spec.AutomountServiceAccountToken).To(gomega.Equal(expected.Spec.AutomountServiceAccountToken), "AutomountServiceAccountToken is not same") + gomega.Expect(actual.Spec.DNSConfig).To(gomega.Equal(expected.Spec.DNSConfig), "DNSConfig is not same") + gomega.Expect(actual.Spec.DNSPolicy).To(gomega.Equal(expected.Spec.DNSPolicy), "DNSPolicy is not same") + gomega.Expect(actual.Spec.Entrypoint).To(gomega.Equal(expected.Spec.Entrypoint), "Entrypoint is not same") + gomega.Expect(actual.Spec.Executor).To(gomega.Equal(expected.Spec.Executor), "Executor is not same") + gomega.Expect(actual.Spec.Hooks).To(gomega.Equal(expected.Spec.Hooks), "Hooks is not same") + gomega.Expect(actual.Spec.HostAliases).To(gomega.Equal(expected.Spec.HostAliases), "HostAliases is not same") + gomega.Expect(actual.Spec.HostNetwork).To(gomega.Equal(expected.Spec.HostNetwork), "HostNetwork is not same") + gomega.Expect(actual.Spec.ImagePullSecrets).To(gomega.Equal(expected.Spec.ImagePullSecrets), "ImagePullSecrets are not same") + gomega.Expect(actual.Spec.Metrics).To(gomega.Equal(expected.Spec.Metrics), "Metrics are not same") + gomega.Expect(actual.Spec.NodeSelector).To(gomega.Equal(expected.Spec.NodeSelector), "NodeSelector is not same") + gomega.Expect(actual.Spec.OnExit).To(gomega.Equal(expected.Spec.OnExit), "OnExit is not same") + gomega.Expect(actual.Spec.Parallelism).To(gomega.Equal(expected.Spec.Parallelism), "Parallelism is not same") + gomega.Expect(actual.Spec.PodDisruptionBudget).To(gomega.Equal(expected.Spec.PodDisruptionBudget), "PodDisruptionBudget is not same") + gomega.Expect(actual.Spec.PodGC).To(gomega.Equal(expected.Spec.PodGC), "PodGC is not same") + gomega.Expect(actual.Spec.PodMetadata).To(gomega.Equal(expected.Spec.PodMetadata), "PodMetadata is not same") + gomega.Expect(actual.Spec.PodPriorityClassName).To(gomega.Equal(expected.Spec.PodPriorityClassName), "PodPriorityClassName is not same") + gomega.Expect(actual.Spec.PodSpecPatch).To(gomega.Equal(expected.Spec.PodSpecPatch), "PodSpecPatch is not same") + gomega.Expect(actual.Spec.Priority).To(gomega.Equal(expected.Spec.Priority), "Priority is not same") + gomega.Expect(actual.Spec.RetryStrategy).To(gomega.Equal(expected.Spec.RetryStrategy), "RetryStrategy is not same") + gomega.Expect(actual.Spec.SchedulerName).To(gomega.Equal(expected.Spec.SchedulerName), "SchedulerName is not same") + gomega.Expect(actual.Spec.SecurityContext).To(gomega.Equal(expected.Spec.SecurityContext), "SecurityContext is not same") + gomega.Expect(actual.Spec.Shutdown).To(gomega.Equal(expected.Spec.Shutdown), "Shutdown is not same") + gomega.Expect(actual.Spec.ServiceAccountName).To(gomega.Equal(expected.Spec.ServiceAccountName), "ServiceAccountName is not same") + gomega.Expect(actual.Spec.Suspend).To(gomega.Equal(expected.Spec.Suspend), "Suspend is not same") + gomega.Expect(actual.Spec.Synchronization).To(gomega.Equal(expected.Spec.Synchronization), "Synchronization is not same") + gomega.Expect(actual.Spec.Tolerations).To(gomega.Equal(expected.Spec.Tolerations), "Tolerations is not same") + gomega.Expect(actual.Spec.TTLStrategy).To(gomega.Equal(expected.Spec.TTLStrategy), "TTLStrategy is not same") + for index, template := range expected.Spec.Templates { + gomega.Expect(actual.Spec.Templates[index].Inputs).To(gomega.Equal(template.Inputs), "Template Inputs is not same") + gomega.Expect(actual.Spec.Templates[index].Outputs).To(gomega.Equal(template.Outputs), "Template Outputs is not same") + gomega.Expect(actual.Spec.Templates[index].Tolerations).To(gomega.Equal(template.Tolerations), "Tolerations is not same") + gomega.Expect(actual.Spec.Templates[index].PodSpecPatch).To(gomega.Equal(template.PodSpecPatch), "PodSpecPatch is not same") + gomega.Expect(actual.Spec.Templates[index].Synchronization).To(gomega.Equal(template.Synchronization), "Synchronization is not same") + gomega.Expect(actual.Spec.Templates[index].Volumes).To(gomega.Equal(template.Volumes), "Volumes is not same") + gomega.Expect(actual.Spec.Templates[index].Suspend).To(gomega.Equal(template.Suspend), "Suspend is not same") + gomega.Expect(actual.Spec.Templates[index].SecurityContext).To(gomega.Equal(template.SecurityContext), "SecurityContext is not same") + gomega.Expect(actual.Spec.Templates[index].SchedulerName).To(gomega.Equal(template.SchedulerName), "SchedulerName is not same") + gomega.Expect(actual.Spec.Templates[index].RetryStrategy).To(gomega.Equal(template.RetryStrategy), "RetryStrategy is not same") + gomega.Expect(actual.Spec.Templates[index].Priority).To(gomega.Equal(template.Priority), "Priority is not same") + gomega.Expect(actual.Spec.Templates[index].Parallelism).To(gomega.Equal(template.Parallelism), "Parallelism is not same") + gomega.Expect(actual.Spec.Templates[index].NodeSelector).To(gomega.Equal(template.NodeSelector), "NodeSelector is not same") + gomega.Expect(actual.Spec.Templates[index].Metrics).To(gomega.Equal(template.Metrics), "Metrics is not same") + gomega.Expect(actual.Spec.Templates[index].HostAliases).To(gomega.Equal(template.HostAliases), "HostAliases is not same") + gomega.Expect(actual.Spec.Templates[index].Executor).To(gomega.Equal(template.Executor), "Executor is not same") + gomega.Expect(actual.Spec.Templates[index].ServiceAccountName).To(gomega.Equal(template.ServiceAccountName), "ServiceAccountName is not same") + gomega.Expect(actual.Spec.Templates[index].AutomountServiceAccountToken).To(gomega.Equal(template.AutomountServiceAccountToken), "AutomountServiceAccountToken is not same") + gomega.Expect(actual.Spec.Templates[index].ActiveDeadlineSeconds).To(gomega.Equal(template.ActiveDeadlineSeconds), "ActiveDeadlineSeconds is not same") + gomega.Expect(actual.Spec.Templates[index].Affinity).To(gomega.Equal(template.Affinity), "Affinity is not same") + gomega.Expect(actual.Spec.Templates[index].Name).To(gomega.Equal(template.Name), "Name is not same") + // Compare Container + MatchContainer(actual.Spec.Templates[index].Container, template.Container) + for containerIndex, userContainer := range template.InitContainers { + MatchUserContainer(&actual.Spec.Templates[index].InitContainers[containerIndex], &userContainer) + } + + gomega.Expect(actual.Spec.Templates[index].FailFast).To(gomega.Equal(template.FailFast), "FailFast is not same") + gomega.Expect(actual.Spec.Templates[index].ArchiveLocation).To(gomega.Equal(template.ArchiveLocation), "ArchiveLocation is not same") + gomega.Expect(actual.Spec.Templates[index].ContainerSet).To(gomega.Equal(template.ContainerSet), "ContainerSet is not same") + gomega.Expect(actual.Spec.Templates[index].Daemon).To(gomega.Equal(template.Daemon), "Daemon is not same") + gomega.Expect(actual.Spec.Templates[index].Data).To(gomega.Equal(template.Data), "Data is not same") + MatchDAG(actual.Spec.Templates[index].DAG, template.DAG) + gomega.Expect(actual.Spec.Templates[index].HTTP).To(gomega.Equal(template.HTTP), "HTTP is not same") + gomega.Expect(actual.Spec.Templates[index].Memoize).To(gomega.Equal(template.Memoize), "Memoize is not same") + gomega.Expect(actual.Spec.Templates[index].Metadata).To(gomega.Equal(template.Metadata), "Metadata is not same") + gomega.Expect(actual.Spec.Templates[index].Plugin).To(gomega.Equal(template.Plugin), "Plugin is not same") + gomega.Expect(actual.Spec.Templates[index].PriorityClassName).To(gomega.Equal(template.PriorityClassName), "PriorityClassName is not same") + gomega.Expect(actual.Spec.Templates[index].Resource).To(gomega.Equal(template.Resource), "Resource is not same") + gomega.Expect(actual.Spec.Templates[index].Script).To(gomega.Equal(template.Script), "Script is not same") + gomega.Expect(actual.Spec.Templates[index].Sidecars).To(gomega.Equal(template.Sidecars), "Sidecars is not same") + gomega.Expect(actual.Spec.Templates[index].Steps).To(gomega.Equal(template.Steps), "Steps is not same") + gomega.Expect(actual.Spec.Templates[index].Timeout).To(gomega.Equal(template.Timeout), "Timeout is not same") + } + gomega.Expect(actual.Spec.TemplateDefaults).To(gomega.Equal(expected.Spec.TemplateDefaults), "TemplateDefaults are not same") + gomega.Expect(actual.Spec.VolumeClaimGC).To(gomega.Equal(expected.Spec.VolumeClaimGC), "VolumeClaimGC is not same") + gomega.Expect(actual.Spec.Volumes).To(gomega.Equal(expected.Spec.Volumes), "Volumes is not same") + gomega.Expect(actual.Spec.VolumeClaimTemplates).To(gomega.Equal(expected.Spec.VolumeClaimTemplates), "VolumeClaimTemplates are not same") + gomega.Expect(actual.Spec.WorkflowMetadata).To(gomega.Equal(expected.Spec.WorkflowMetadata), "WorkflowMetadata is not same") + gomega.Expect(actual.Spec.WorkflowTemplateRef).To(gomega.Equal(expected.Spec.WorkflowTemplateRef), "WorkflowTemplateRef is not same") +} + +// MatchContainerResourceLimits - Compare resource limits of a container +func MatchContainerResourceLimits(actual v1.ResourceList, expected v1.ResourceList) { + gomega.Expect(actual.Pods()).To(gomega.Equal(expected.Pods()), "Container Resources Limits Pods is not same") + // convert the CPU and memory limits to the same approximation units + actualCPU := actual.Cpu().AsApproximateFloat64() + expectedCPU := expected.Cpu().AsApproximateFloat64() + actualMemory := actual.Memory().AsApproximateFloat64() + expectedMemory := expected.Memory().AsApproximateFloat64() + gomega.Expect(actualCPU).To(gomega.Equal(expectedCPU), "Container Resources Limits Cpu is not same") + gomega.Expect(actualMemory).To(gomega.Equal(expectedMemory), "Container Resources Limits Memory is not same") + gomega.Expect(actual.Storage()).To(gomega.Equal(expected.Storage()), "Container Resources Limits Storage is not same") + gomega.Expect(actual.StorageEphemeral()).To(gomega.Equal(expected.StorageEphemeral()), "Container Resources Limits StorageEphemeral is not same") +} + +// MatchContainer - Compare 2 containers +func MatchContainer(actual *v1.Container, expected *v1.Container) { + if expected != nil { + gomega.Expect(actual.Name).To(gomega.Equal(expected.Name), "Container Name is not same") + gomega.Expect(actual.Args).To(gomega.ConsistOf(expected.Args), "Container Args is not same") + gomega.Expect(actual.SecurityContext).To(gomega.Equal(expected.SecurityContext), "Container SecurityContext is not same") + gomega.Expect(actual.Env).To(gomega.Equal(expected.Env), "Container Env is not same") + gomega.Expect(actual.EnvFrom).To(gomega.Equal(expected.EnvFrom), "Container EnvFrom is not same") + gomega.Expect(actual.Command).To(gomega.Equal(expected.Command), "Container Command is not same") + gomega.Expect(actual.ImagePullPolicy).To(gomega.Equal(expected.ImagePullPolicy), "Container ImagePullPolicy is not same") + gomega.Expect(actual.Image).To(gomega.Equal(expected.Image), "Container Image is not same") + gomega.Expect(actual.Lifecycle).To(gomega.Equal(expected.Lifecycle), "Container Lifecycle is not same") + gomega.Expect(actual.LivenessProbe).To(gomega.Equal(expected.LivenessProbe), "Container LivenessProbe is not same") + gomega.Expect(actual.Ports).To(gomega.Equal(expected.Ports), "Container Ports is not same") + gomega.Expect(actual.ReadinessProbe).To(gomega.Equal(expected.ReadinessProbe), "Container ReadinessProbe is not same") + gomega.Expect(actual.ResizePolicy).To(gomega.Equal(expected.ResizePolicy), "Container ResizePolicy is not same") + gomega.Expect(actual.StartupProbe).To(gomega.Equal(expected.StartupProbe), "Container StartupProbe is not same") + gomega.Expect(actual.Stdin).To(gomega.Equal(expected.Stdin), "Container Stdin is not same") + gomega.Expect(actual.StdinOnce).To(gomega.Equal(expected.StdinOnce), "Container StdinOnce is not same") + gomega.Expect(actual.RestartPolicy).To(gomega.Equal(expected.RestartPolicy), "Container RestartPolicy is not same") + gomega.Expect(actual.TerminationMessagePath).To(gomega.Equal(expected.TerminationMessagePath), "Container TerminationMessagePath is not same") + gomega.Expect(actual.TTY).To(gomega.Equal(expected.TTY), "Container TTY is not same") + gomega.Expect(actual.TerminationMessagePolicy).To(gomega.Equal(expected.TerminationMessagePolicy), "Container TerminationMessagePolicy is not same") + gomega.Expect(actual.VolumeDevices).To(gomega.Equal(expected.VolumeDevices), "Container VolumeDevices is not same") + gomega.Expect(actual.VolumeMounts).To(gomega.Equal(expected.VolumeMounts), "Container VolumeMounts is not same") + gomega.Expect(actual.WorkingDir).To(gomega.Equal(expected.WorkingDir), "Container WorkingDir is not same") + gomega.Expect(actual.Resources.Claims).To(gomega.Equal(expected.Resources.Claims), "Container Resources Claims is not same") + MatchContainerResourceLimits(actual.Resources.Limits, expected.Resources.Limits) + MatchContainerResourceLimits(actual.Resources.Requests, expected.Resources.Requests) + } else { + gomega.Expect(actual).To(gomega.BeNil(), "Container is expected to be nil") + } +} + +// MatchUserContainer - Compare 2 user containers +func MatchUserContainer(actual *v1alpha1.UserContainer, expected *v1alpha1.UserContainer) { + if expected != nil { + gomega.Expect(actual.Name).To(gomega.Equal(expected.Name), "User Container Name is not same") + gomega.Expect(actual.Args).To(gomega.ConsistOf(expected.Args), "User Container Args is not same") + gomega.Expect(actual.SecurityContext).To(gomega.Equal(expected.SecurityContext), "User Container SecurityContext is not same") + gomega.Expect(actual.Env).To(gomega.Equal(expected.Env), "User Container Env is not same") + gomega.Expect(actual.EnvFrom).To(gomega.Equal(expected.EnvFrom), "User Container EnvFrom is not same") + gomega.Expect(actual.Command).To(gomega.Equal(expected.Command), "User Container Command is not same") + gomega.Expect(actual.ImagePullPolicy).To(gomega.Equal(expected.ImagePullPolicy), "User Container ImagePullPolicy is not same") + gomega.Expect(actual.Image).To(gomega.Equal(expected.Image), "User Container Image is not same") + gomega.Expect(actual.Lifecycle).To(gomega.Equal(expected.Lifecycle), "User Container Lifecycle is not same") + gomega.Expect(actual.LivenessProbe).To(gomega.Equal(expected.LivenessProbe), "User Container LivenessProbe is not same") + gomega.Expect(actual.Ports).To(gomega.Equal(expected.Ports), "User Container Ports is not same") + gomega.Expect(actual.ReadinessProbe).To(gomega.Equal(expected.ReadinessProbe), "User Container ReadinessProbe is not same") + gomega.Expect(actual.ResizePolicy).To(gomega.Equal(expected.ResizePolicy), "User Container ResizePolicy is not same") + gomega.Expect(actual.StartupProbe).To(gomega.Equal(expected.StartupProbe), "User Container StartupProbe is not same") + gomega.Expect(actual.Stdin).To(gomega.Equal(expected.Stdin), "User Container Stdin is not same") + gomega.Expect(actual.StdinOnce).To(gomega.Equal(expected.StdinOnce), "User Container StdinOnce is not same") + gomega.Expect(actual.RestartPolicy).To(gomega.Equal(expected.RestartPolicy), "User Container RestartPolicy is not same") + gomega.Expect(actual.TerminationMessagePath).To(gomega.Equal(expected.TerminationMessagePath), "User Container TerminationMessagePath is not same") + gomega.Expect(actual.TTY).To(gomega.Equal(expected.TTY), "User Container TTY is not same") + gomega.Expect(actual.TerminationMessagePolicy).To(gomega.Equal(expected.TerminationMessagePolicy), "User Container TerminationMessagePolicy is not same") + gomega.Expect(actual.VolumeDevices).To(gomega.Equal(expected.VolumeDevices), "User Container VolumeDevices is not same") + gomega.Expect(actual.VolumeMounts).To(gomega.Equal(expected.VolumeMounts), "User Container VolumeMounts is not same") + gomega.Expect(actual.WorkingDir).To(gomega.Equal(expected.WorkingDir), "User Container WorkingDir is not same") + gomega.Expect(actual.Resources.Claims).To(gomega.Equal(expected.Resources.Claims), "User Container Resources Claims is not same") + MatchContainerResourceLimits(actual.Resources.Limits, expected.Resources.Limits) + MatchContainerResourceLimits(actual.Resources.Requests, expected.Resources.Requests) + } else { + gomega.Expect(actual).To(gomega.BeNil(), "User Container is expected to be nil") + } +} + +// MatchDAG - Match 2 DAG templates +func MatchDAG(actual *v1alpha1.DAGTemplate, expected *v1alpha1.DAGTemplate) { + if expected != nil { + gomega.Expect(actual.FailFast).To(gomega.Equal(expected.FailFast), "DAGTemplate FailFast is not same") + gomega.Expect(actual.Target).To(gomega.Equal(expected.Target), "DAGTemplate Target is not same") + for index, task := range actual.Tasks { + gomega.Expect(actual.Tasks[index].Name).To(gomega.Equal(task.Name), "DAGTemplate Task Name is not same") + gomega.Expect(actual.Tasks[index].Hooks).To(gomega.Equal(task.Hooks), "DAGTemplate Task Hooks is not same") + gomega.Expect(actual.Tasks[index].Template).To(gomega.Equal(task.Template), "DAGTemplate Task Template is not same") + gomega.Expect(actual.Tasks[index].Arguments).To(gomega.Equal(task.Arguments), "DAGTemplate Task Arguments is not same") + gomega.Expect(actual.Tasks[index].When).To(gomega.Equal(task.When), "DAGTemplate Task When is not same") + gomega.Expect(actual.Tasks[index].ContinueOn).To(gomega.Equal(task.ContinueOn), "DAGTemplate Task ContinueOn is not same") + gomega.Expect(actual.Tasks[index].Dependencies).To(gomega.Equal(task.Dependencies), "DAGTemplate Task Dependencies is not same") + gomega.Expect(actual.Tasks[index].Depends).To(gomega.Equal(task.Depends), "DAGTemplate Task Depends is not same") + gomega.Expect(actual.Tasks[index].Inline).To(gomega.Equal(task.Inline), "DAGTemplate Task Inline is not same") + gomega.Expect(actual.Tasks[index].TemplateRef).To(gomega.Equal(task.TemplateRef), "DAGTemplate Task TemplateRef is not same") + gomega.Expect(actual.Tasks[index].WithItems).To(gomega.Equal(task.WithItems), "DAGTemplate Task WithItems is not same") + gomega.Expect(actual.Tasks[index].WithParam).To(gomega.Equal(task.WithParam), "DAGTemplate Task WithParam is not same") + gomega.Expect(actual.Tasks[index].WithSequence).To(gomega.Equal(task.WithSequence), "DAGTemplate Task WithItems is not same") + gomega.Expect(actual.Tasks[index].OnExit).To(gomega.Equal(task.OnExit), "DAGTemplate Task WithItems is not same") + } + } else { + gomega.Expect(actual).To(gomega.BeNil(), "DAGTemplate is expected to be nil") + } + +} + +// AreStringsSameWithoutOrder - checks if two strings contain the same characters, regardless of order. +func AreStringsSameWithoutOrder(s1, s2 string) bool { + // Convert strings to rune slices + r1 := []rune(s1) + r2 := []rune(s2) + + // Sort the rune slices + sort.Slice(r1, func(i, j int) bool { return r1[i] < r1[j] }) + sort.Slice(r2, func(i, j int) bool { return r2[i] < r2[j] }) + + // Compare the sorted slices + return reflect.DeepEqual(r1, r2) +} diff --git a/backend/test/compiler/utils/workflow_utils.go b/backend/test/compiler/utils/workflow_utils.go new file mode 100644 index 00000000000..1cb7a8ac5f6 --- /dev/null +++ b/backend/test/compiler/utils/workflow_utils.go @@ -0,0 +1,142 @@ +// Package utils defines custom utility methods for compiled workflows +/* +Copyright 2018-2023 The Kubeflow Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +package utils + +import ( + "encoding/json" + "fmt" + "os" + "slices" + + "google.golang.org/protobuf/encoding/protojson" + "sigs.k8s.io/yaml" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + "github.com/kubeflow/pipelines/backend/src/v2/compiler/argocompiler" + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/onsi/ginkgo/v2" + "github.com/onsi/gomega" + v1 "k8s.io/api/core/v1" +) + +// LoadPipelineSpecsFromIR - Unmarshall Pipeline Spec IR into a tuple of (pipelinespec.PipelineJob, pipelinespec.SinglePlatformSpec) +func LoadPipelineSpecsFromIR(pipelineIRFilePath string, cacheDisabled bool, defaultWorkspace *v1.PersistentVolumeClaimSpec) (*pipelinespec.PipelineJob, *pipelinespec.SinglePlatformSpec) { + pipelineSpecsFromFile := testutil.ParseFileToSpecs(pipelineIRFilePath, cacheDisabled, defaultWorkspace) + platformSpec := pipelineSpecsFromFile.PlatformSpec() + var singlePlatformSpec *pipelinespec.SinglePlatformSpec = nil + if platformSpec != nil { + singlePlatformSpec = platformSpec.Platforms["kubernetes"] + } + pipelineSpecMap := make(map[string]interface{}) + pipelineSpecBytes, marshallingError := protojson.Marshal(pipelineSpecsFromFile.PipelineSpec()) + gomega.Expect(marshallingError).NotTo(gomega.HaveOccurred(), "Failed to marshall pipeline spec") + err := json.Unmarshal(pipelineSpecBytes, &pipelineSpecMap) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Failed to unmarshal pipeline spec into map") + pipelineSpecMapNew := make(map[string]interface{}) + pipelineSpecMapNew["pipelineSpec"] = pipelineSpecMap + pipelineSpecBytes, marshallingError = json.Marshal(pipelineSpecMapNew) + gomega.Expect(marshallingError).NotTo(gomega.HaveOccurred(), "Failed to marshall pipeline spec map") + pipelineJob := &pipelinespec.PipelineJob{} + err = protojson.Unmarshal(pipelineSpecBytes, pipelineJob) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), fmt.Sprintf("Failed to unmarshal pipeline spec\n %s", string(pipelineSpecBytes))) + return pipelineJob, singlePlatformSpec +} + +// GetCompiledArgoWorkflow - Compile pipeline and platform specs into a workflow and return an instance of v1alpha1.Workflow +func GetCompiledArgoWorkflow(pipelineSpecs *pipelinespec.PipelineJob, platformSpec *pipelinespec.SinglePlatformSpec, compilerOptions *argocompiler.Options) *v1alpha1.Workflow { + ginkgo.GinkgoHelper() + logger.Log("Compiling Argo Workflow for provided pipeline job and platform spec") + compiledWorflow, err := argocompiler.Compile(pipelineSpecs, platformSpec, compilerOptions) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Failed to compile Argo workflow") + return compiledWorflow +} + +// UnmarshallWorkflowYAML - Unmarshall compiler workflow YAML into a v1alpha1.Workflow object +func UnmarshallWorkflowYAML(filePath string) *v1alpha1.Workflow { + ginkgo.GinkgoHelper() + logger.Log("Unmarshalling Expected Workflow YAML") + workflowFromFileBytes, err := os.ReadFile(filePath) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Failed to read workflow yaml file") + workflow := v1alpha1.Workflow{} + err = yaml.Unmarshal(workflowFromFileBytes, &workflow) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Failed to unmarshall workflow") + logger.Log("Unmarshalled Expected Workflow YAML") + return &workflow +} + +// CreateCompiledWorkflowFile - Marshall v1alpha1.Workflow into a yaml file and save the file to the path provided as `compiledWorkflowFilePath` +func CreateCompiledWorkflowFile(compiledWorflow *v1alpha1.Workflow, compiledWorkflowFilePath string) *os.File { + ginkgo.GinkgoHelper() + fileContents, err := yaml.Marshal(compiledWorflow) + gomega.Expect(err).NotTo(gomega.HaveOccurred()) + return testutil.CreateFile(compiledWorkflowFilePath, [][]byte{fileContents}) +} + +// ConfigureCacheSettings - Add/Remove cache_disabled args in the workflow +func ConfigureCacheSettings(workflow *v1alpha1.Workflow, remove bool) *v1alpha1.Workflow { + cacheDisabledArg := "--cache_disabled" + configuredWorkflow := workflow.DeepCopy() + for _, template := range configuredWorkflow.Spec.Templates { + if template.Container != nil { + if len(template.Container.Args) > 0 { + if remove { + // Remove cache_disabled arg if it exists + if slices.Contains(template.Container.Args, cacheDisabledArg) { + for index, arg := range template.Container.Args { + if arg == cacheDisabledArg { + template.Container.Args = append(template.Container.Args[:index], template.Container.Args[index+1:]...) + break + } + } + } + } else { + // Add cache_disabled arg if it doesn't exist and this is a driver container + if slices.Contains(template.Container.Args, "--run_id") && !slices.Contains(template.Container.Args, cacheDisabledArg) { + template.Container.Args = append(template.Container.Args, cacheDisabledArg) + } + } + } + for index, userContainer := range template.InitContainers { + if remove { + // Remove cache_disabled arg if it exists + if slices.Contains(userContainer.Args, cacheDisabledArg) { + for userArgsIndex, arg := range userContainer.Args { + if arg == cacheDisabledArg { + userContainer.Args = append(userContainer.Args[:userArgsIndex], userContainer.Args[userArgsIndex+1:]...) + break + } + } + } + } else { + // Add cache_disabled arg if it doesn't exist + if !slices.Contains(userContainer.Args, cacheDisabledArg) { + if len(userContainer.Args) > 0 { + userContainer.Args = append(userContainer.Args, cacheDisabledArg) + } else { + userContainer.Args = []string{cacheDisabledArg} + } + } + } + template.InitContainers[index].Args = userContainer.Args + } + } + } + return configuredWorkflow +} diff --git a/backend/test/config/flags.go b/backend/test/config/flags.go new file mode 100644 index 00000000000..7b4bff52454 --- /dev/null +++ b/backend/test/config/flags.go @@ -0,0 +1,50 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package config + +import ( + "flag" + "fmt" +) + +var ( + Namespace = flag.String("namespace", "kubeflow", "The namespace Kubeflow Pipelines is deployed to") + RunProxyTests = flag.Bool("useProxy", false, "Whether to run the proxy tests") + UploadPipelinesWithKubernetes = flag.Bool("uploadPipelinesWithKubernetes", false, "Whether to use Kubernetes for uploading pipelines or use the REST API") + MultiUserMode = flag.Bool("multiUserMode", false, "Is your deployment a multi-user mode deployment?") + DefaultServiceAccountName = flag.String("serviceAccountName", "pipeline-runner", "Name of the API server service account in the default namespace") + UserServiceAccountName = flag.String("userServiceAccountName", "default-editor", "Name of the service account to use for authentication in the User Namespace") + APIScheme = flag.String("apiScheme", "http", "The scheme to use for a connection to the api server") + APIHost = flag.String("apiHost", "localhost", "The hostname of the API server") + APIPort = flag.String("apiPort", "8888", "The port on which the API server is listening") + APIURL = flag.String("apiUrl", fmt.Sprintf("%s://%s:%s", *APIScheme, *APIHost, *APIPort), "The URL of the API server") + DisableTLSCheck = flag.Bool("disableTlsCheck", false, "Whether to use server certificate chain and hostname.") + InClusterRun = flag.Bool("runInCluster", false, "Whether to run your tests from within the K8s cluster") + AuthToken = flag.String("authToken", "", "The default auth token that will be injected to all your API request") +) + +var ( + RepoName = flag.String("repoName", "kubeflow/pipelines", "The name of the repository") + PullNumber = flag.String("pullNumber", "", "The pull number") + BranchName = flag.String("branchName", "master", "The branch name") +) + +var DebugMode = flag.Bool("debugMode", false, "Whether to enable debug mode. Debug mode will log more diagnostics messages.") +var PodLogLimit = flag.Int64("podLogLimit", 50000000, "Limit the pod logs size to this limit") + +var ( + KubeflowMode = flag.Bool("kubeflowMode", false, "Runs tests in full Kubeflow mode") + UserNamespace = flag.String("userNamespace", "kubeflow-user-example-com", "The namespace that will store the test resources in Kubeflow mode") +) diff --git a/backend/test/constants/test_features.go b/backend/test/constants/test_features.go new file mode 100644 index 00000000000..9a1614df3cc --- /dev/null +++ b/backend/test/constants/test_features.go @@ -0,0 +1,42 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package constants +package constants + +const ( + // E2eEssential - For pipelines that are essential for regression testing + E2eEssential string = "E2EEssential" + // E2eFailed - For expectedly failing pipelines + E2eFailed string = "E2EFailure" + // E2eCritical - For pipelines that verify the critical functionality of the system + E2eCritical string = "E2ECritical" + // E2eProxy - For pipeline that runs with a proxy + E2eProxy string = "E2EProxy" + + WorkflowCompiler string = "WorkflowCompiler" + WorkflowCompilerVisits string = "WorkflowCompilerVisits" + + APIServerTests string = "ApiServerTests" + + Experiment string = "Experiment" + Pipeline string = "Pipeline" + PipelineRun string = "PipelineRun" + PipelineScheduledRun string = "PipelineRecurringRun" + PipelineUpload string = "PipelineUpload" + ReportTests string = "Report" + + UpgradePreparation string = "UpgradePreparation" + UpgradeVerification string = "UpgradeVerification" +) diff --git a/backend/test/constants/test_file_constants.go b/backend/test/constants/test_file_constants.go new file mode 100644 index 00000000000..97cbaf70db1 --- /dev/null +++ b/backend/test/constants/test_file_constants.go @@ -0,0 +1,24 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package constants +package constants + +const ( + ParentDirectory string = "backend" + TestDataDir string = "test_data" + PipelineFilesDir string = "sdk_compiled_pipelines" + ValidPipelineFilesDir string = "valid" + CompiledPipelineFilesDir string = "compiled-workflows" +) diff --git a/backend/test/constants/test_type.go b/backend/test/constants/test_type.go new file mode 100644 index 00000000000..ac14521b60a --- /dev/null +++ b/backend/test/constants/test_type.go @@ -0,0 +1,25 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package constants +package constants + +const ( + POSITIVE string = "Positive" + NEGATIVE string = "Negative" + + SMOKE string = "Smoke" + CriticalOnly string = "CriticalOnly" + FullRegression string = "FullRegression" +) diff --git a/backend/test/end2end/e2e_suite_test.go b/backend/test/end2end/e2e_suite_test.go new file mode 100644 index 00000000000..c3ca89e3319 --- /dev/null +++ b/backend/test/end2end/e2e_suite_test.go @@ -0,0 +1,159 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package end2end + +import ( + "fmt" + "log" + "os" + "path/filepath" + "strconv" + "testing" + "time" + + apiserver "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + "github.com/kubeflow/pipelines/backend/test/v2" + + . "github.com/onsi/ginkgo/v2" + "github.com/onsi/ginkgo/v2/types" + . "github.com/onsi/gomega" + "k8s.io/client-go/kubernetes" +) + +var randomName string +var experimentID *string = nil +var userToken string + +const maxPipelineWaitTime = 900 // In Seconds + +var ( + pipelineUploadClient apiserver.PipelineUploadInterface + pipelineClient *apiserver.PipelineClient + runClient *apiserver.RunClient + experimentClient *apiserver.ExperimentClient + k8Client *kubernetes.Clientset +) + +// Test Reporting Variables +var ( + testLogsDirectory = "logs" + testReportDirectory = "reports" + junitReportFilename = "junit.xml" + jsonReportFilename = "e2e.json" +) + +var _ = BeforeSuite(func() { + err := os.MkdirAll(testLogsDirectory, 0755) + Expect(err).NotTo(HaveOccurred(), fmt.Sprintf("Error creating Logs Directory: %s", testLogsDirectory)) + err = os.MkdirAll(testReportDirectory, 0755) + Expect(err).NotTo(HaveOccurred(), fmt.Sprintf("Error creating Reports Directory: %s", testReportDirectory)) + var newPipelineClient func() (*apiserver.PipelineClient, error) + var newRunClient func() (*apiserver.RunClient, error) + var newExperimentClient func() (*apiserver.ExperimentClient, error) + clientConfig := testutil.GetClientConfig(*config.Namespace) + k8Client, err = testutil.CreateK8sClient() + Expect(err).To(BeNil(), "Failed to initialize K8s client") + + if *config.KubeflowMode { + logger.Log("Creating API Clients for Kubeflow Mode") + newPipelineClient = func() (*apiserver.PipelineClient, error) { + return apiserver.NewKubeflowInClusterPipelineClient(*config.Namespace, *config.DebugMode) + } + newExperimentClient = func() (*apiserver.ExperimentClient, error) { + return apiserver.NewKubeflowInClusterExperimentClient(*config.Namespace, *config.DebugMode) + } + newRunClient = func() (*apiserver.RunClient, error) { + return apiserver.NewKubeflowInClusterRunClient(*config.Namespace, *config.DebugMode) + } + } else if *config.MultiUserMode || *config.AuthToken != "" { + if *config.AuthToken != "" { + logger.Log("Creating API Clients With Auth Token") + userToken = *config.AuthToken + } else { + logger.Log("Creating API Clients for Multi User Mode") + userToken = testutil.CreateUserToken(k8Client, *config.UserNamespace, *config.UserServiceAccountName) + } + newPipelineClient = func() (*apiserver.PipelineClient, error) { + return apiserver.NewMultiUserPipelineClient(clientConfig, userToken, *config.DebugMode) + } + newExperimentClient = func() (*apiserver.ExperimentClient, error) { + return apiserver.NewMultiUserExperimentClient(clientConfig, userToken, *config.DebugMode) + } + newRunClient = func() (*apiserver.RunClient, error) { + return apiserver.NewMultiUserRunClient(clientConfig, userToken, *config.DebugMode) + } + } else { + logger.Log("Creating API Clients for Single User Mode") + newPipelineClient = func() (*apiserver.PipelineClient, error) { + return apiserver.NewPipelineClient(clientConfig, *config.DebugMode) + } + newExperimentClient = func() (*apiserver.ExperimentClient, error) { + return apiserver.NewExperimentClient(clientConfig, *config.DebugMode) + } + newRunClient = func() (*apiserver.RunClient, error) { + return apiserver.NewRunClient(clientConfig, *config.DebugMode) + } + } + + pipelineUploadClient, err = test.GetPipelineUploadClient( + *config.UploadPipelinesWithKubernetes, + *config.KubeflowMode, + *config.DebugMode, + *config.Namespace, + clientConfig, + ) + + Expect(err).To(BeNil(), "Failed to get Pipeline Upload Client") + pipelineClient, err = newPipelineClient() + Expect(err).To(BeNil(), "Failed to get Pipeline Client") + experimentClient, err = newExperimentClient() + Expect(err).To(BeNil(), "Failed to get Experiment client") + runClient, err = newRunClient() + Expect(err).To(BeNil(), "Failed to get Pipeline Run client") +}) + +var _ = BeforeEach(func() { + + // Create Experiment so that we can use it to associate pipeline runs with + experimentName := fmt.Sprintf("E2EExperiment-%s", strconv.FormatInt(time.Now().UnixNano(), 10)) + experiment := testutil.CreateExperiment(experimentClient, experimentName, testutil.GetNamespace()) + experimentID = &experiment.ExperimentID +}) + +var _ = ReportAfterEach(func(specReport types.SpecReport) { + if specReport.Failed() { + logger.Log("Test failed... Capturing logs") + AddReportEntry("Test Log", specReport.CapturedGinkgoWriterOutput) + currentDir, err := os.Getwd() + Expect(err).NotTo(HaveOccurred(), "Failed to get current directory") + testutil.WriteLogFile(specReport, GinkgoT().Name(), filepath.Join(currentDir, testLogsDirectory)) + } else { + log.Printf("Test passed") + } +}) + +func TestAPIs(t *testing.T) { + RegisterFailHandler(Fail) + suiteConfigE2E, reporterConfigE2E := GinkgoConfiguration() + suiteConfigE2E.FailFast = false + reporterConfigE2E.ForceNewlines = true + reporterConfigE2E.SilenceSkips = true + reporterConfigE2E.JUnitReport = filepath.Join(testReportDirectory, junitReportFilename) + reporterConfigE2E.JSONReport = filepath.Join(testReportDirectory, jsonReportFilename) + RunSpecs(t, "E2E Tests Suite", suiteConfigE2E, reporterConfigE2E) +} diff --git a/backend/test/end2end/pipeline_e2e_test.go b/backend/test/end2end/pipeline_e2e_test.go new file mode 100644 index 00000000000..c4ba782bc64 --- /dev/null +++ b/backend/test/end2end/pipeline_e2e_test.go @@ -0,0 +1,222 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package end2end + +import ( + "fmt" + "path/filepath" + "strconv" + "time" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + upload_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + workflowutils "github.com/kubeflow/pipelines/backend/test/compiler/utils" + "github.com/kubeflow/pipelines/backend/test/config" + . "github.com/kubeflow/pipelines/backend/test/constants" + e2e_utils "github.com/kubeflow/pipelines/backend/test/end2end/utils" + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + apitests "github.com/kubeflow/pipelines/backend/test/v2/api" + + "github.com/go-openapi/strfmt" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + v1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +var _ = Describe("Upload and Verify Pipeline Run >", Label(FullRegression), func() { + var testContext *apitests.TestContext + + // ################## SET AND TEARDOWN ################## + + BeforeEach(func() { + logger.Log("################### Setup before each Pipeline Upload test #####################") + logger.Log("################### Global Setup before each test #####################") + testContext = &apitests.TestContext{ + TestStartTimeUTC: time.Now(), + } + logger.Log("Test Context: %p", testContext) + randomName = strconv.FormatInt(time.Now().UnixNano(), 10) + testContext.Pipeline.UploadParams = upload_params.NewUploadPipelineParams() + testContext.Pipeline.PipelineGeneratedName = "e2e_test-" + randomName + testContext.Pipeline.CreatedPipelines = make([]*pipeline_upload_model.V2beta1Pipeline, 0) + testContext.PipelineRun.CreatedRunIds = make([]string, 0) + testContext.Pipeline.ExpectedPipeline = new(pipeline_upload_model.V2beta1Pipeline) + testContext.Pipeline.ExpectedPipeline.CreatedAt = strfmt.DateTime(testContext.TestStartTimeUTC) + var secrets []*v1.Secret + secret1 := &v1.Secret{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-secret-1", + Namespace: testutil.GetNamespace()}, + Data: map[string][]byte{ + "username": []byte("user1"), + }, + Type: v1.SecretTypeOpaque, + } + secret2 := &v1.Secret{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-secret-2", + Namespace: testutil.GetNamespace()}, + Data: map[string][]byte{ + "password": []byte("psw1"), + }, + Type: v1.SecretTypeOpaque, + } + secret3 := &v1.Secret{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-secret-3", + Namespace: testutil.GetNamespace()}, + Data: map[string][]byte{ + "password": []byte("psw2"), + }, + Type: v1.SecretTypeOpaque, + } + secrets = append(secrets, secret1, secret2, secret3) + for _, secret := range secrets { + testutil.CreateSecret(k8Client, testutil.GetNamespace(), secret) + } + }) + + AfterEach(func() { + + // Delete pipelines created during the test + logger.Log("################### Global Cleanup after each test #####################") + + logger.Log("Deleting %d run(s)", len(testContext.PipelineRun.CreatedRunIds)) + for _, runID := range testContext.PipelineRun.CreatedRunIds { + runID := runID + testutil.TerminatePipelineRun(runClient, runID) + testutil.ArchivePipelineRun(runClient, runID) + testutil.DeletePipelineRun(runClient, runID) + } + logger.Log("Deleting %d experiment(s)", len(testContext.Experiment.CreatedExperimentIds)) + if len(testContext.Experiment.CreatedExperimentIds) > 0 { + for _, experimentID := range testContext.Experiment.CreatedExperimentIds { + experimentID := experimentID + testutil.DeleteExperiment(experimentClient, experimentID) + } + } + logger.Log("Deleting %d pipeline(s)", len(testContext.Pipeline.CreatedPipelines)) + for _, pipeline := range testContext.Pipeline.CreatedPipelines { + pipelineID := pipeline.PipelineID + testutil.DeletePipeline(pipelineClient, pipelineID) + } + }) + + // ################## TESTS ################## + + Context("Upload a pipeline file, run it and verify that pipeline run succeeds >", Label(E2eEssential), func() { + var pipelineDir = "valid/essential" + pipelineFiles := testutil.GetListOfFilesInADir(filepath.Join(testutil.GetPipelineFilesDir(), pipelineDir)) + for _, pipelineFile := range pipelineFiles { + It(fmt.Sprintf("Upload %s pipeline", pipelineFile), func() { + testutil.CheckIfSkipping(pipelineFile) + pipelineFilePath := filepath.Join(testutil.GetPipelineFilesDir(), pipelineDir, pipelineFile) + logger.Log("Uploading pipeline file %s", pipelineFile) + uploadedPipeline, uploadErr := testutil.UploadPipeline(pipelineUploadClient, pipelineFilePath, &testContext.Pipeline.PipelineGeneratedName, nil) + Expect(uploadErr).To(BeNil(), "Failed to upload pipeline %s", pipelineFile) + testContext.Pipeline.CreatedPipelines = append(testContext.Pipeline.CreatedPipelines, uploadedPipeline) + logger.Log("Upload of pipeline file '%s' successful", pipelineFile) + uploadedPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &uploadedPipeline.PipelineID) + pipelineRuntimeInputs := testutil.GetPipelineRunTimeInputs(pipelineFilePath) + createdRunID := e2e_utils.CreatePipelineRunAndWaitForItToFinish(runClient, testContext, uploadedPipeline.PipelineID, uploadedPipeline.DisplayName, &uploadedPipelineVersion.PipelineVersionID, experimentID, pipelineRuntimeInputs, maxPipelineWaitTime) + logger.Log("Deserializing expected compiled workflow file '%s' for the pipeline", pipelineFile) + compiledWorkflow := workflowutils.UnmarshallWorkflowYAML(filepath.Join(testutil.GetCompiledWorkflowsFilesDir(), pipelineFile)) + e2e_utils.ValidateComponentStatuses(runClient, k8Client, testContext, createdRunID, compiledWorkflow) + }) + } + }) + + // Few of the following pipelines randomly fail in Multi User Mode during CI run - which is why a FlakeAttempt is added, but we need to investigate, create ticket and fix it in the future + Context("Upload a pipeline file, run it and verify that pipeline run succeeds >", FlakeAttempts(2), Label("Sample", E2eCritical), func() { + var pipelineDir = "valid/critical" + pipelineFiles := testutil.GetListOfFilesInADir(filepath.Join(testutil.GetPipelineFilesDir(), pipelineDir)) + for _, pipelineFile := range pipelineFiles { + It(fmt.Sprintf("Upload %s pipeline", pipelineFile), FlakeAttempts(2), func() { + testutil.CheckIfSkipping(pipelineFile) + pipelineFilePath := filepath.Join(testutil.GetPipelineFilesDir(), pipelineDir, pipelineFile) + logger.Log("Uploading pipeline file %s", pipelineFile) + uploadedPipeline, uploadErr := testutil.UploadPipeline(pipelineUploadClient, pipelineFilePath, &testContext.Pipeline.PipelineGeneratedName, nil) + Expect(uploadErr).To(BeNil(), "Failed to upload pipeline %s", pipelineFile) + testContext.Pipeline.CreatedPipelines = append(testContext.Pipeline.CreatedPipelines, uploadedPipeline) + logger.Log("Upload of pipeline file '%s' successful", pipelineFile) + uploadedPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &uploadedPipeline.PipelineID) + pipelineRuntimeInputs := testutil.GetPipelineRunTimeInputs(pipelineFilePath) + createdRunID := e2e_utils.CreatePipelineRunAndWaitForItToFinish(runClient, testContext, uploadedPipeline.PipelineID, uploadedPipeline.DisplayName, &uploadedPipelineVersion.PipelineVersionID, experimentID, pipelineRuntimeInputs, maxPipelineWaitTime) + logger.Log("Deserializing expected compiled workflow file '%s' for the pipeline", pipelineFile) + compiledWorkflow := workflowutils.UnmarshallWorkflowYAML(filepath.Join(testutil.GetCompiledWorkflowsFilesDir(), pipelineFile)) + e2e_utils.ValidateComponentStatuses(runClient, k8Client, testContext, createdRunID, compiledWorkflow) + + }) + } + }) + + Context("Create a pipeline run with HTTP proxy >", Label(E2eProxy), func() { + var pipelineDir = "valid" + pipelineFile := "env-var.yaml" + It(fmt.Sprintf("Create a pipeline run with http proxy, using specs: %s", pipelineFile), func() { + pipelineFilePath := filepath.Join(testutil.GetPipelineFilesDir(), pipelineDir, pipelineFile) + uploadedPipeline, uploadErr := testutil.UploadPipeline(pipelineUploadClient, pipelineFilePath, &testContext.Pipeline.PipelineGeneratedName, nil) + Expect(uploadErr).To(BeNil(), "Failed to upload pipeline %s", pipelineFile) + testContext.Pipeline.CreatedPipelines = append(testContext.Pipeline.CreatedPipelines, uploadedPipeline) + createdPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &uploadedPipeline.PipelineID) + createdExperiment := testutil.CreateExperimentWithParams(experimentClient, &experiment_model.V2beta1Experiment{ + DisplayName: "ProxyTest-" + randomName, + Namespace: testutil.GetNamespace(), + }) + testContext.Experiment.CreatedExperimentIds = append(testContext.Experiment.CreatedExperimentIds, createdExperiment.ExperimentID) + pipelineRuntimeInputs := map[string]interface{}{ + "env_var": "http_proxy", + } + createdRunID := e2e_utils.CreatePipelineRunAndWaitForItToFinish(runClient, testContext, uploadedPipeline.PipelineID, uploadedPipeline.DisplayName, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs, maxPipelineWaitTime) + if *config.RunProxyTests { + logger.Log("Deserializing expected compiled workflow file '%s' for the pipeline", pipelineFile) + compiledWorkflow := workflowutils.UnmarshallWorkflowYAML(filepath.Join(testutil.GetCompiledWorkflowsFilesDir(), pipelineFile)) + e2e_utils.ValidateComponentStatuses(runClient, k8Client, testContext, createdRunID, compiledWorkflow) + } else { + runState := testutil.GetPipelineRun(runClient, &createdRunID).State + expectedRunState := run_model.V2beta1RuntimeStateFAILED + Expect(runState).To(Equal(&expectedRunState), fmt.Sprintf("Expected run with id=%s to fail with proxy=false", createdRunID)) + } + }) + }) + + Context("Upload a pipeline file, run it and verify that pipeline run fails >", Label(E2eFailed), func() { + var pipelineDir = "valid/failing" + pipelineFiles := testutil.GetListOfFilesInADir(filepath.Join(testutil.GetPipelineFilesDir(), pipelineDir)) + for _, pipelineFile := range pipelineFiles { + It(fmt.Sprintf("Upload %s pipeline", pipelineFile), func() { + testutil.CheckIfSkipping(pipelineFile) + pipelineFilePath := filepath.Join(testutil.GetPipelineFilesDir(), pipelineDir, pipelineFile) + logger.Log("Uploading pipeline file %s", pipelineFile) + uploadedPipeline, uploadErr := testutil.UploadPipeline(pipelineUploadClient, pipelineFilePath, &testContext.Pipeline.PipelineGeneratedName, nil) + Expect(uploadErr).To(BeNil(), "Failed to upload pipeline %s", pipelineFile) + testContext.Pipeline.CreatedPipelines = append(testContext.Pipeline.CreatedPipelines, uploadedPipeline) + logger.Log("Upload of pipeline file '%s' successful", pipelineFile) + uploadedPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &uploadedPipeline.PipelineID) + pipelineRuntimeInputs := testutil.GetPipelineRunTimeInputs(pipelineFilePath) + createdRunID := e2e_utils.CreatePipelineRunAndWaitForItToFinish(runClient, testContext, uploadedPipeline.PipelineID, uploadedPipeline.DisplayName, &uploadedPipelineVersion.PipelineVersionID, experimentID, pipelineRuntimeInputs, maxPipelineWaitTime) + logger.Log("Fetching updated pipeline run details for run with id=%s", createdRunID) + updatedRun := testutil.GetPipelineRun(runClient, &createdRunID) + Expect(updatedRun.State).NotTo(BeNil(), "Updated pipeline run state is Nil") + Expect(*updatedRun.State).To(Equal(run_model.V2beta1RuntimeStateFAILED), "Pipeline run was expected to fail, but is "+*updatedRun.State) + + }) + } + }) +}) diff --git a/backend/test/end2end/utils/e2e_utils.go b/backend/test/end2end/utils/e2e_utils.go new file mode 100644 index 00000000000..f5ae4898807 --- /dev/null +++ b/backend/test/end2end/utils/e2e_utils.go @@ -0,0 +1,177 @@ +// Package utils provides helpers shared across end-to-end tests. +package utils + +import ( + "fmt" + "maps" + "sort" + "time" + + runparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_client/run_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + apiserver "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + apitests "github.com/kubeflow/pipelines/backend/test/v2/api" + + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/onsi/ginkgo/v2" + "github.com/onsi/gomega" + v1 "k8s.io/api/core/v1" + "k8s.io/client-go/kubernetes" +) + +// CreatePipelineRun - Create a pipeline run +func CreatePipelineRun(runClient *apiserver.RunClient, testContext *apitests.TestContext, pipelineID *string, pipelineVersionID *string, experimentID *string, inputParams map[string]interface{}) *run_model.V2beta1Run { + runName := fmt.Sprintf("E2e Test Run-%v", testContext.TestStartTimeUTC) + runDescription := fmt.Sprintf("Run for %s", runName) + logger.Log("Create a pipeline run for pipeline with id=%s and versionId=%s", *pipelineID, *pipelineVersionID) + createRunRequest := &runparams.RunServiceCreateRunParams{ + ExperimentID: experimentID, + Run: CreatePipelineRunPayload(runName, runDescription, pipelineID, pipelineVersionID, experimentID, inputParams), + } + createdRun, createRunError := runClient.Create(createRunRequest) + gomega.Expect(createRunError).NotTo(gomega.HaveOccurred(), "Failed to create run for pipeline with id="+*pipelineID) + testContext.PipelineRun.CreatedRunIds = append(testContext.PipelineRun.CreatedRunIds, createdRun.RunID) + logger.Log("Created Pipeline Run successfully with runId=%s", createdRun.RunID) + return createdRun +} + +// CreatePipelineRunPayload - Create a pipeline run payload +func CreatePipelineRunPayload(runName string, runDescription string, pipelineID *string, pipelineVersionID *string, experimentID *string, inputParams map[string]interface{}) *run_model.V2beta1Run { + logger.Log("Create a pipeline run body") + return &run_model.V2beta1Run{ + DisplayName: runName, + Description: runDescription, + ExperimentID: testutil.ParsePointersToString(experimentID), + ServiceAccount: testutil.GetDefaultPipelineRunnerServiceAccount(), + PipelineVersionReference: &run_model.V2beta1PipelineVersionReference{ + PipelineID: testutil.ParsePointersToString(pipelineID), + PipelineVersionID: testutil.ParsePointersToString(pipelineVersionID), + }, + RuntimeConfig: &run_model.V2beta1RuntimeConfig{ + Parameters: inputParams, + }, + } +} + +// CreatePipelineRunAndWaitForItToFinish - Create a pipeline run and wait for it complete +func CreatePipelineRunAndWaitForItToFinish(runClient *apiserver.RunClient, testContext *apitests.TestContext, pipelineID string, pipelineDisplayName string, pipelineVersionID *string, experimentID *string, runTimeParams map[string]interface{}, maxPipelineWaitTime int) string { + logger.Log("Create run for pipeline with id: '%s' and name: '%s'", pipelineID, pipelineDisplayName) + uploadedPipelineRun := CreatePipelineRun(runClient, testContext, &pipelineID, pipelineVersionID, experimentID, runTimeParams) + logger.Log("Created Pipeline Run with id: %s for pipeline with id: %s", uploadedPipelineRun.RunID, pipelineID) + timeout := time.Duration(maxPipelineWaitTime) + testutil.WaitForRunToBeInState(runClient, &uploadedPipelineRun.RunID, []run_model.V2beta1RuntimeState{run_model.V2beta1RuntimeStateSUCCEEDED, run_model.V2beta1RuntimeStateSKIPPED, run_model.V2beta1RuntimeStateFAILED, run_model.V2beta1RuntimeStateCANCELED}, &timeout) + return uploadedPipelineRun.RunID +} + +// ValidateComponentStatuses - Validate that all the components of a pipeline run ran successfully +func ValidateComponentStatuses(runClient *apiserver.RunClient, k8Client *kubernetes.Clientset, testContext *apitests.TestContext, runID string, compiledWorkflow *v1alpha1.Workflow) { + logger.Log("Fetching updated pipeline run details for run with id=%s", runID) + updatedRun := testutil.GetPipelineRun(runClient, &runID) + actualTaskDetails := updatedRun.RunDetails.TaskDetails + logger.Log("Updated pipeline run details") + expectedTaskDetails := GetTasksFromWorkflow(compiledWorkflow) + if *updatedRun.State != run_model.V2beta1RuntimeStateSUCCEEDED { + logger.Log("Looks like the run %s FAILED, so capture pod logs for the failed task", runID) + CapturePodLogsForUnsuccessfulTasks(k8Client, testContext, actualTaskDetails) + ginkgo.Fail("Failing test because the pipeline run was not SUCCESSFUL") + } else { + logger.Log("Pipeline run succeeded, checking if the number of tasks are what is expected") + gomega.Expect(len(actualTaskDetails)).To(gomega.BeNumerically(">=", len(expectedTaskDetails)), "Number of created DAG tasks should be >= number of expected tasks") + } + +} + +// CapturePodLogsForUnsuccessfulTasks - Capture pod logs of a failed component +func CapturePodLogsForUnsuccessfulTasks(k8Client *kubernetes.Clientset, testContext *apitests.TestContext, taskDetails []*run_model.V2beta1PipelineTaskDetail) { + failedTasks := make(map[string]string) + sort.Slice(taskDetails, func(i, j int) bool { + return time.Time(taskDetails[i].EndTime).After(time.Time(taskDetails[j].EndTime)) // Sort Tasks by End Time in descending order + }) + for _, task := range taskDetails { + if task.State != nil { + switch *task.State { + case run_model.V2beta1RuntimeStateSUCCEEDED: + { + logger.Log("SUCCEEDED - Task %s for run %s has finished successfully", task.DisplayName, task.RunID) + } + case run_model.V2beta1RuntimeStateRUNNING: + { + logger.Log("RUNNING - Task %s for Run %s is running", task.DisplayName, task.RunID) + + } + case run_model.V2beta1RuntimeStateSKIPPED: + { + logger.Log("SKIPPED - Task %s for Run %s skipped", task.DisplayName, task.RunID) + } + case run_model.V2beta1RuntimeStateCANCELED: + { + logger.Log("CANCELED - Task %s for Run %s canceled", task.DisplayName, task.RunID) + } + case run_model.V2beta1RuntimeStateFAILED: + { + logger.Log("%s - Task %s for Run %s did not complete successfully", *task.State, task.DisplayName, task.RunID) + for _, childTask := range task.ChildTasks { + podName := childTask.PodName + if podName != "" { + logger.Log("Capturing pod logs for task %s, with pod name %s", task.DisplayName, podName) + podLog := testutil.ReadPodLogs(k8Client, *config.Namespace, podName, nil, &testContext.TestStartTimeUTC, config.PodLogLimit) + logger.Log("Pod logs captured for task %s in pod %s", task.DisplayName, podName) + logger.Log("Attaching pod logs to the report") + ginkgo.AddReportEntry(fmt.Sprintf("Failing '%s' Component Log", task.DisplayName), podLog) + logger.Log("Attached pod logs to the report") + } + } + failedTasks[task.DisplayName] = string(*task.State) + } + default: + { + logger.Log("UNKNOWN state - Task %s for Run %s has an UNKNOWN state", task.DisplayName, task.RunID) + } + } + } + } + if len(failedTasks) > 0 { + logger.Log("Found failed tasks: %v", maps.Keys(failedTasks)) + } +} + +type TaskDetails struct { + TaskName string + Task v1alpha1.DAGTask + Container v1.Container + DependsOn string +} + +// GetTasksFromWorkflow - Get tasks from a compiled workflow +func GetTasksFromWorkflow(workflow *v1alpha1.Workflow) []TaskDetails { + var containers = make(map[string]*v1.Container) + var tasks []TaskDetails + for _, template := range workflow.Spec.Templates { + if template.Container != nil { + containers[template.Name] = template.Container + } + } + for _, template := range workflow.Spec.Templates { + if template.DAG != nil { + for _, task := range template.DAG.Tasks { + if task.When == "" { + continue + } + container, containerExists := containers[task.Template] + taskToAppend := TaskDetails{ + TaskName: task.Name, + Task: task, + DependsOn: task.Depends, + } + if containerExists { + taskToAppend.Container = *container + } + tasks = append(tasks, taskToAppend) + } + } + } + return tasks +} diff --git a/backend/test/initialization/flags.go b/backend/test/initialization/flags.go index 3325bb6a0b9..938b73e6cf8 100644 --- a/backend/test/initialization/flags.go +++ b/backend/test/initialization/flags.go @@ -20,7 +20,6 @@ import ( ) var ( - namespace = flag.String("namespace", "kubeflow", "The namespace ml pipeline deployed to") initializeTimeout = flag.Duration("initializeTimeout", 2*time.Minute, "Duration to wait for test initialization") runIntegrationTests = flag.Bool("runIntegrationTests", false, "Whether to also run integration tests that call the service") ) diff --git a/backend/test/initialization/initialization_test.go b/backend/test/initialization/initialization_test.go index cd92fa5dab7..98e26f58e4f 100644 --- a/backend/test/initialization/initialization_test.go +++ b/backend/test/initialization/initialization_test.go @@ -17,10 +17,12 @@ package initialization import ( "testing" - "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_client/experiment_service" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v1" "github.com/kubeflow/pipelines/backend/test" + "github.com/kubeflow/pipelines/backend/test/config" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) @@ -42,8 +44,8 @@ func (s *InitializationTest) SetupTest() { if err != nil { glog.Exitf("Failed to initialize test. Error: %v", err) } - s.namespace = *namespace - clientConfig := test.GetClientConfig(*namespace) + s.namespace = *config.Namespace + clientConfig := test.GetClientConfig(*config.Namespace) s.experimentClient, err = api_server.NewExperimentClient(clientConfig, false) if err != nil { glog.Exitf("Failed to get experiment client. Error: %v", err) diff --git a/backend/test/integration/db_test.go b/backend/test/integration/db_test.go index 76d34b322c2..28ea89a6117 100644 --- a/backend/test/integration/db_test.go +++ b/backend/test/integration/db_test.go @@ -18,12 +18,12 @@ import ( "testing" "time" + cm "github.com/kubeflow/pipelines/backend/src/apiserver/client_manager" + + _ "github.com/jackc/pgx/v5/stdlib" "github.com/spf13/viper" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" - - _ "github.com/jackc/pgx/v5/stdlib" - cm "github.com/kubeflow/pipelines/backend/src/apiserver/client_manager" ) type DBTestSuite struct { diff --git a/backend/test/integration/experiment_api_test.go b/backend/test/integration/experiment_api_test.go index c0f4ae7dd5b..22c2d8175e0 100644 --- a/backend/test/integration/experiment_api_test.go +++ b/backend/test/integration/experiment_api_test.go @@ -18,7 +18,6 @@ import ( "testing" "time" - "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_client/experiment_service" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" jobParams "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_client/job_service" @@ -29,6 +28,9 @@ import ( api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v1" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/test" + "github.com/kubeflow/pipelines/backend/test/config" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) @@ -58,7 +60,7 @@ func (s *ExperimentApiTest) SetupTest() { } } - s.namespace = *namespace + s.namespace = *config.Namespace var newExperimentClient func() (*api_server.ExperimentClient, error) var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) @@ -70,37 +72,37 @@ func (s *ExperimentApiTest) SetupTest() { s.resourceNamespace = *resourceNamespace newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *config.DebugMode) } newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewKubeflowInClusterRunClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterRunClient(s.namespace, *config.DebugMode) } newJobClient = func() (*api_server.JobClient, error) { - return api_server.NewKubeflowInClusterJobClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterJobClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewExperimentClient(clientConfig, *isDebugMode) + return api_server.NewExperimentClient(clientConfig, *config.DebugMode) } newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) + return api_server.NewPipelineUploadClient(clientConfig, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewRunClient(clientConfig, *isDebugMode) + return api_server.NewRunClient(clientConfig, *config.DebugMode) } newJobClient = func() (*api_server.JobClient, error) { - return api_server.NewJobClient(clientConfig, *isDebugMode) + return api_server.NewJobClient(clientConfig, *config.DebugMode) } } @@ -143,7 +145,7 @@ func (s *ExperimentApiTest) TestExperimentAPI() { expectedTrainingExperiment := test.GetExperiment("training", "my first experiment", s.resourceNamespace) trainingExperiment, err := s.experimentClient.Create(¶ms.ExperimentServiceCreateExperimentV1Params{ - Body: experiment, + Experiment: experiment, }) assert.Nil(t, err) assert.True(t, test.VerifyExperimentResourceReferences(trainingExperiment.ResourceReferences, expectedTrainingExperiment.ResourceReferences)) @@ -151,11 +153,11 @@ func (s *ExperimentApiTest) TestExperimentAPI() { expectedTrainingExperiment.ID = trainingExperiment.ID expectedTrainingExperiment.CreatedAt = trainingExperiment.CreatedAt - expectedTrainingExperiment.StorageState = "STORAGESTATE_AVAILABLE" + expectedTrainingExperiment.StorageState = (*experiment_model.APIExperimentStorageState)(util.StringPointer("STORAGESTATE_AVAILABLE")) assert.Equal(t, expectedTrainingExperiment, trainingExperiment) /* ---------- Create an experiment with same name. Should fail due to name uniqueness ---------- */ - _, err = s.experimentClient.Create(¶ms.ExperimentServiceCreateExperimentV1Params{Body: experiment}) + _, err = s.experimentClient.Create(¶ms.ExperimentServiceCreateExperimentV1Params{Experiment: experiment}) assert.NotNil(t, err) assert.Contains(t, err.Error(), "Please specify a new name") @@ -164,12 +166,12 @@ func (s *ExperimentApiTest) TestExperimentAPI() { time.Sleep(1 * time.Second) experiment = test.GetExperiment("prediction", "my second experiment", s.resourceNamespace) _, err = s.experimentClient.Create(¶ms.ExperimentServiceCreateExperimentV1Params{ - Body: experiment, + Experiment: experiment, }) time.Sleep(1 * time.Second) experiment = test.GetExperiment("moonshot", "my second experiment", s.resourceNamespace) _, err = s.experimentClient.Create(¶ms.ExperimentServiceCreateExperimentV1Params{ - Body: experiment, + Experiment: experiment, }) assert.Nil(t, err) @@ -300,17 +302,17 @@ func (s *ExperimentApiTest) TestExperimentAPI() { Pipelineid: util.StringPointer(pipeline.ID), }) assert.Nil(t, err) - createRunRequest := &runParams.RunServiceCreateRunV1Params{Body: &run_model.APIRun{ + createRunRequest := &runParams.RunServiceCreateRunV1Params{Run: &run_model.APIRun{ Name: "hello world", Description: "this is hello world", ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experiment.ID}, - Name: experiment.Name, Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experiment.ID}, + Name: experiment.Name, Relationship: run_model.APIRelationshipOWNER.Pointer(), }, { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersion.ID}, - Relationship: run_model.APIRelationshipCREATOR, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: pipelineVersion.ID}, + Relationship: run_model.APIRelationshipCREATOR.Pointer(), }, }, }} @@ -319,17 +321,17 @@ func (s *ExperimentApiTest) TestExperimentAPI() { run2, _, err := s.runClient.Create(createRunRequest) assert.Nil(t, err) /* ---------- Create a new hello world job by specifying pipeline ID ---------- */ - createJobRequest := &jobParams.JobServiceCreateJobParams{Body: &job_model.APIJob{ + createJobRequest := &jobParams.JobServiceCreateJobParams{Job: &job_model.APIJob{ Name: "hello world", Description: "this is hello world", ResourceReferences: []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experiment.ID}, - Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experiment.ID}, + Relationship: job_model.APIRelationshipOWNER.Pointer(), }, { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersion.ID}, - Relationship: job_model.APIRelationshipCREATOR, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: pipelineVersion.ID}, + Relationship: job_model.APIRelationshipCREATOR.Pointer(), }, }, MaxConcurrency: 10, @@ -346,13 +348,13 @@ func (s *ExperimentApiTest) TestExperimentAPI() { /* ---------- Verify experiment and its runs ------- */ experiment, err = s.experimentClient.Get(¶ms.ExperimentServiceGetExperimentV1Params{ID: trainingExperiment.ID}) assert.Nil(t, err) - assert.Equal(t, experiment_model.APIExperimentStorageState("STORAGESTATE_ARCHIVED"), experiment.StorageState) + assert.Equal(t, experiment_model.APIExperimentStorageState("STORAGESTATE_ARCHIVED"), *experiment.StorageState) retrievedRun1, _, err := s.runClient.Get(&runParams.RunServiceGetRunV1Params{RunID: run1.Run.ID}) assert.Nil(t, err) - assert.Equal(t, run_model.APIRunStorageState("STORAGESTATE_ARCHIVED"), retrievedRun1.Run.StorageState) + assert.Equal(t, run_model.APIRunStorageState("STORAGESTATE_ARCHIVED"), *retrievedRun1.Run.StorageState) retrievedRun2, _, err := s.runClient.Get(&runParams.RunServiceGetRunV1Params{RunID: run2.Run.ID}) assert.Nil(t, err) - assert.Equal(t, run_model.APIRunStorageState("STORAGESTATE_ARCHIVED"), retrievedRun2.Run.StorageState) + assert.Equal(t, run_model.APIRunStorageState("STORAGESTATE_ARCHIVED"), *retrievedRun2.Run.StorageState) retrievedJob1, err := s.jobClient.Get(&jobParams.JobServiceGetJobParams{ID: job1.ID}) assert.Nil(t, err) assert.Equal(t, false, retrievedJob1.Enabled) @@ -366,13 +368,13 @@ func (s *ExperimentApiTest) TestExperimentAPI() { /* ---------- Verify experiment and its runs and jobs --------- */ experiment, err = s.experimentClient.Get(¶ms.ExperimentServiceGetExperimentV1Params{ID: trainingExperiment.ID}) assert.Nil(t, err) - assert.Equal(t, experiment_model.APIExperimentStorageState("STORAGESTATE_AVAILABLE"), experiment.StorageState) + assert.Equal(t, experiment_model.APIExperimentStorageState("STORAGESTATE_AVAILABLE"), *experiment.StorageState) retrievedRun1, _, err = s.runClient.Get(&runParams.RunServiceGetRunV1Params{RunID: run1.Run.ID}) assert.Nil(t, err) - assert.Equal(t, run_model.APIRunStorageState("STORAGESTATE_ARCHIVED"), retrievedRun1.Run.StorageState) + assert.Equal(t, run_model.APIRunStorageState("STORAGESTATE_ARCHIVED"), *retrievedRun1.Run.StorageState) retrievedRun2, _, err = s.runClient.Get(&runParams.RunServiceGetRunV1Params{RunID: run2.Run.ID}) assert.Nil(t, err) - assert.Equal(t, run_model.APIRunStorageState("STORAGESTATE_ARCHIVED"), retrievedRun2.Run.StorageState) + assert.Equal(t, run_model.APIRunStorageState("STORAGESTATE_ARCHIVED"), *retrievedRun2.Run.StorageState) retrievedJob1, err = s.jobClient.Get(&jobParams.JobServiceGetJobParams{ID: job1.ID}) assert.Nil(t, err) assert.Equal(t, false, retrievedJob1.Enabled) diff --git a/backend/test/integration/flags.go b/backend/test/integration/flags.go index 95af0e0d559..1302b1398ae 100644 --- a/backend/test/integration/flags.go +++ b/backend/test/integration/flags.go @@ -20,7 +20,6 @@ import ( ) var ( - namespace = flag.String("namespace", "kubeflow", "The namespace ml pipeline deployed to") initializeTimeout = flag.Duration("initializeTimeout", 2*time.Minute, "Duration to wait for test initialization") runIntegrationTests = flag.Bool("runIntegrationTests", false, "Whether to also run integration tests that call the service") runUpgradeTests = flag.Bool("runUpgradeTests", false, "Whether to run upgrade tests") @@ -35,8 +34,6 @@ var ( */ var isDevMode = flag.Bool("isDevMode", false, "Dev mode helps local development of integration tests") -var isDebugMode = flag.Bool("isDebugMode", false, "Whether to enable debug mode. Debug mode will log more diagnostics messages.") - var ( isKubeflowMode = flag.Bool("isKubeflowMode", false, "Runs tests in full Kubeflow mode") resourceNamespace = flag.String("resourceNamespace", "", "The namespace that will store the test resources in Kubeflow mode") diff --git a/backend/test/integration/healthz_api_test.go b/backend/test/integration/healthz_api_test.go index f2287cb6752..a6e89523e98 100644 --- a/backend/test/integration/healthz_api_test.go +++ b/backend/test/integration/healthz_api_test.go @@ -17,9 +17,11 @@ package integration import ( "testing" - "github.com/golang/glog" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v1" "github.com/kubeflow/pipelines/backend/test" + "github.com/kubeflow/pipelines/backend/test/config" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) @@ -45,7 +47,7 @@ func (s *HealthzApiTest) SetupTest() { } } - s.namespace = *namespace + s.namespace = *config.Namespace var newHealthzClient func() (*api_server.HealthzClient, error) @@ -53,13 +55,13 @@ func (s *HealthzApiTest) SetupTest() { s.resourceNamespace = *resourceNamespace newHealthzClient = func() (*api_server.HealthzClient, error) { - return api_server.NewKubeflowInClusterHealthzClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterHealthzClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newHealthzClient = func() (*api_server.HealthzClient, error) { - return api_server.NewHealthzClient(clientConfig, *isDebugMode) + return api_server.NewHealthzClient(clientConfig, *config.DebugMode) } } diff --git a/backend/test/integration/job_api_test.go b/backend/test/integration/job_api_test.go index 0f32a643960..c0ad9722aec 100644 --- a/backend/test/integration/job_api_test.go +++ b/backend/test/integration/job_api_test.go @@ -15,18 +15,13 @@ package integration import ( - "bytes" "context" "fmt" "os" - "reflect" "strings" "testing" "time" - "github.com/eapache/go-resiliency/retrier" - "github.com/go-openapi/strfmt" - "github.com/golang/glog" experimentparams "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_client/experiment_service" jobparams "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_client/job_service" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_model" @@ -37,6 +32,12 @@ import ( api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v1" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/test" + "github.com/kubeflow/pipelines/backend/test/config" + + "github.com/eapache/go-resiliency/retrier" + "github.com/go-openapi/strfmt" + "github.com/golang/glog" + "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" @@ -81,7 +82,7 @@ func (s *JobApiTestSuite) SetupTest() { glog.Exitf("Failed to initialize test. Error: %s", err.Error()) } } - s.namespace = *namespace + s.namespace = *config.Namespace var newExperimentClient func() (*api_server.ExperimentClient, error) var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) @@ -93,37 +94,37 @@ func (s *JobApiTestSuite) SetupTest() { s.resourceNamespace = *resourceNamespace newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *config.DebugMode) } newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewKubeflowInClusterRunClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterRunClient(s.namespace, *config.DebugMode) } newJobClient = func() (*api_server.JobClient, error) { - return api_server.NewKubeflowInClusterJobClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterJobClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewExperimentClient(clientConfig, *isDebugMode) + return api_server.NewExperimentClient(clientConfig, *config.DebugMode) } newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) + return api_server.NewPipelineUploadClient(clientConfig, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewRunClient(clientConfig, *isDebugMode) + return api_server.NewRunClient(clientConfig, *config.DebugMode) } newJobClient = func() (*api_server.JobClient, error) { - return api_server.NewJobClient(clientConfig, *isDebugMode) + return api_server.NewJobClient(clientConfig, *config.DebugMode) } } @@ -171,21 +172,21 @@ func (s *JobApiTestSuite) TestJobApis() { /* ---------- Create a new hello world experiment ---------- */ experiment := test.GetExperiment("hello world experiment", "", s.resourceNamespace) - helloWorldExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Body: experiment}) + helloWorldExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Experiment: experiment}) assert.Nil(t, err) /* ---------- Create a new hello world job by specifying pipeline ID ---------- */ - createJobRequest := &jobparams.JobServiceCreateJobParams{Body: &job_model.APIJob{ + createJobRequest := &jobparams.JobServiceCreateJobParams{Job: &job_model.APIJob{ Name: "hello world", Description: "this is hello world", ResourceReferences: []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: helloWorldExperiment.ID}, - Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: helloWorldExperiment.ID}, + Relationship: job_model.APIRelationshipOWNER.Pointer(), }, { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION, ID: helloWorldPipelineVersion.ID}, - Relationship: job_model.APIRelationshipCREATOR, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: helloWorldPipelineVersion.ID}, + Relationship: job_model.APIRelationshipCREATOR.Pointer(), }, }, MaxConcurrency: 10, @@ -202,7 +203,7 @@ func (s *JobApiTestSuite) TestJobApis() { /* ---------- Create a new argument parameter experiment ---------- */ experiment = test.GetExperiment("argument parameter experiment", "", s.resourceNamespace) - argParamsExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Body: experiment}) + argParamsExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Experiment: experiment}) assert.Nil(t, err) /* ---------- Create a new argument parameter job by uploading workflow manifest ---------- */ @@ -213,7 +214,7 @@ func (s *JobApiTestSuite) TestJobApis() { assert.Nil(t, err) argParamsBytes, err = yaml.ToJSON(argParamsBytes) assert.Nil(t, err) - createJobRequest = &jobparams.JobServiceCreateJobParams{Body: &job_model.APIJob{ + createJobRequest = &jobparams.JobServiceCreateJobParams{Job: &job_model.APIJob{ Name: "argument parameter", Description: "this is argument parameter", PipelineSpec: &job_model.APIPipelineSpec{ @@ -225,8 +226,8 @@ func (s *JobApiTestSuite) TestJobApis() { }, ResourceReferences: []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: argParamsExperiment.ID}, - Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: argParamsExperiment.ID}, + Relationship: job_model.APIRelationshipOWNER.Pointer(), }, }, MaxConcurrency: 10, @@ -316,17 +317,17 @@ func (s *JobApiTestSuite) TestJobApis() { time.Sleep(5 * time.Second) // Sleep for 5 seconds to make sure the previous jobs are created at a different timestamp filterTime := time.Now().Unix() time.Sleep(5 * time.Second) - createJobRequestNew := &jobparams.JobServiceCreateJobParams{Body: &job_model.APIJob{ + createJobRequestNew := &jobparams.JobServiceCreateJobParams{Job: &job_model.APIJob{ Name: "new hello world job", Description: "this is a new hello world", ResourceReferences: []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: helloWorldExperiment.ID}, - Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: helloWorldExperiment.ID}, + Relationship: job_model.APIRelationshipOWNER.Pointer(), }, { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION, ID: helloWorldPipelineVersion.ID}, - Relationship: job_model.APIRelationshipCREATOR, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: helloWorldPipelineVersion.ID}, + Relationship: job_model.APIRelationshipCREATOR.Pointer(), }, }, MaxConcurrency: 10, @@ -414,7 +415,7 @@ func (s *JobApiTestSuite) TestJobApis_noCatchupOption() { /* ---------- Create a periodic job with start and end date in the past and catchup = true ---------- */ experiment := test.GetExperiment("periodic catchup true", "", s.resourceNamespace) - periodicCatchupTrueExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Body: experiment}) + periodicCatchupTrueExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Experiment: experiment}) assert.Nil(t, err) job := jobInThePastForTwoMinutes(jobOptions{ @@ -425,13 +426,13 @@ func (s *JobApiTestSuite) TestJobApis_noCatchupOption() { job.Name = "periodic-catchup-true-" job.Description = "A job with NoCatchup=false will backfill each past interval when behind schedule." job.NoCatchup = false // This is the key difference. - createJobRequest := &jobparams.JobServiceCreateJobParams{Body: job} + createJobRequest := &jobparams.JobServiceCreateJobParams{Job: job} _, err = s.jobClient.Create(createJobRequest) assert.Nil(t, err) /* -------- Create another periodic job with start and end date in the past but catchup = false ------ */ experiment = test.GetExperiment("periodic catchup false", "", s.resourceNamespace) - periodicCatchupFalseExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Body: experiment}) + periodicCatchupFalseExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Experiment: experiment}) assert.Nil(t, err) job = jobInThePastForTwoMinutes(jobOptions{ @@ -442,13 +443,13 @@ func (s *JobApiTestSuite) TestJobApis_noCatchupOption() { job.Name = "periodic-catchup-false-" job.Description = "A job with NoCatchup=true only schedules the last interval when behind schedule." job.NoCatchup = true // This is the key difference. - createJobRequest = &jobparams.JobServiceCreateJobParams{Body: job} + createJobRequest = &jobparams.JobServiceCreateJobParams{Job: job} _, err = s.jobClient.Create(createJobRequest) assert.Nil(t, err) /* ---------- Create a cron job with start and end date in the past and catchup = true ---------- */ experiment = test.GetExperiment("cron catchup true", "", s.resourceNamespace) - cronCatchupTrueExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Body: experiment}) + cronCatchupTrueExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Experiment: experiment}) assert.Nil(t, err) job = jobInThePastForTwoMinutes(jobOptions{ @@ -459,13 +460,13 @@ func (s *JobApiTestSuite) TestJobApis_noCatchupOption() { job.Name = "cron-catchup-true-" job.Description = "A job with NoCatchup=false will backfill each past interval when behind schedule." job.NoCatchup = false // This is the key difference. - createJobRequest = &jobparams.JobServiceCreateJobParams{Body: job} + createJobRequest = &jobparams.JobServiceCreateJobParams{Job: job} _, err = s.jobClient.Create(createJobRequest) assert.Nil(t, err) /* -------- Create another cron job with start and end date in the past but catchup = false ------ */ experiment = test.GetExperiment("cron catchup false", "", s.resourceNamespace) - cronCatchupFalseExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Body: experiment}) + cronCatchupFalseExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Experiment: experiment}) assert.Nil(t, err) job = jobInThePastForTwoMinutes(jobOptions{ @@ -476,7 +477,7 @@ func (s *JobApiTestSuite) TestJobApis_noCatchupOption() { job.Name = "cron-catchup-false-" job.Description = "A job with NoCatchup=true only schedules the last interval when behind schedule." job.NoCatchup = true // This is the key difference. - createJobRequest = &jobparams.JobServiceCreateJobParams{Body: job} + createJobRequest = &jobparams.JobServiceCreateJobParams{Job: job} _, err = s.jobClient.Create(createJobRequest) assert.Nil(t, err) @@ -557,12 +558,12 @@ func (s *JobApiTestSuite) checkHelloWorldJob(t *testing.T, job *job_model.APIJob }, ResourceReferences: []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experimentID}, - Name: experimentName, Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experimentID}, + Name: experimentName, Relationship: job_model.APIRelationshipOWNER.Pointer(), }, { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersionId}, - Name: pipelineVersionName, Relationship: job_model.APIRelationshipCREATOR, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: pipelineVersionId}, + Name: pipelineVersionName, Relationship: job_model.APIRelationshipCREATOR.Pointer(), }, }, MaxConcurrency: 10, @@ -571,10 +572,16 @@ func (s *JobApiTestSuite) checkHelloWorldJob(t *testing.T, job *job_model.APIJob UpdatedAt: job.UpdatedAt, Status: job.Status, } + assert.True(t, test.VerifyJobResourceReferences(job.ResourceReferences, expectedJob.ResourceReferences)) expectedJob.ResourceReferences = job.ResourceReferences - - assert.Equal(t, expectedJob, job) + opts := []cmp.Option{ + cmp.Comparer(func(x, y strfmt.DateTime) bool { + return x.String() == y.String() + }), + } + diff := cmp.Diff(expectedJob, job, opts...) + assert.Empty(t, diff, "APIRuns differ: %s", diff) } func (s *JobApiTestSuite) checkArgParamsJob(t *testing.T, job *job_model.APIJob, experimentID string, experimentName string) { @@ -596,8 +603,8 @@ func (s *JobApiTestSuite) checkArgParamsJob(t *testing.T, job *job_model.APIJob, }, ResourceReferences: []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experimentID}, - Name: experimentName, Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experimentID}, + Name: experimentName, Relationship: job_model.APIRelationshipOWNER.Pointer(), }, }, MaxConcurrency: 10, @@ -608,7 +615,13 @@ func (s *JobApiTestSuite) checkArgParamsJob(t *testing.T, job *job_model.APIJob, } assert.True(t, test.VerifyJobResourceReferences(job.ResourceReferences, expectedJob.ResourceReferences)) expectedJob.ResourceReferences = job.ResourceReferences - assert.Equal(t, expectedJob, job) + opts := []cmp.Option{ + cmp.Comparer(func(x, y strfmt.DateTime) bool { + return x.String() == y.String() + }), + } + diff := cmp.Diff(expectedJob, job, opts...) + assert.Empty(t, diff, "APIRuns differ: %s", diff) } func (s *JobApiTestSuite) TestJobApis_SwfNotFound() { @@ -619,7 +632,7 @@ func (s *JobApiTestSuite) TestJobApis_SwfNotFound() { require.Nil(t, err) /* ---------- Create a new hello world job by specifying pipeline ID ---------- */ - createJobRequest := &jobparams.JobServiceCreateJobParams{Body: &job_model.APIJob{ + createJobRequest := &jobparams.JobServiceCreateJobParams{Job: &job_model.APIJob{ Name: "test-swf-not-found", PipelineSpec: &job_model.APIPipelineSpec{ PipelineID: pipeline.ID, @@ -630,12 +643,12 @@ func (s *JobApiTestSuite) TestJobApis_SwfNotFound() { // In multi-user mode, jobs must be associated with an experiment. if *isKubeflowMode { experiment := test.GetExperiment("test-swf-not-found experiment", "", s.resourceNamespace) - swfNotFoundExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Body: experiment}) + swfNotFoundExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Experiment: experiment}) assert.Nil(t, err) - createJobRequest.Body.ResourceReferences = []*job_model.APIResourceReference{ - {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: swfNotFoundExperiment.ID}, - Relationship: job_model.APIRelationshipOWNER, + createJobRequest.Job.ResourceReferences = []*job_model.APIResourceReference{ + {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: swfNotFoundExperiment.ID}, + Relationship: job_model.APIRelationshipOWNER.Pointer(), }, } } @@ -661,26 +674,6 @@ func (s *JobApiTestSuite) TestJobApis_SwfNotFound() { require.Contains(t, err.Error(), "not found") } -func equal(expected, actual interface{}) bool { - if expected == nil || actual == nil { - return expected == actual - } - - exp, ok := expected.([]byte) - if !ok { - return reflect.DeepEqual(expected, actual) - } - - act, ok := actual.([]byte) - if !ok { - return false - } - if exp == nil || act == nil { - return exp == nil && act == nil - } - return bytes.Equal(exp, act) -} - func (s *JobApiTestSuite) checkHelloWorldRun(run *run_model.APIRun, experimentID string, experimentName string, jobID string, jobName string) error { // Check workflow manifest is not empty if !strings.Contains(run.PipelineSpec.WorkflowManifest, "whalesay") { @@ -694,12 +687,12 @@ func (s *JobApiTestSuite) checkHelloWorldRun(run *run_model.APIRun, experimentID // Check runtime workflow manifest is not empty resourceReferences := []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experimentID}, - Name: experimentName, Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experimentID}, + Name: experimentName, Relationship: run_model.APIRelationshipOWNER.Pointer(), }, { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeJOB, ID: jobID}, - Name: jobName, Relationship: run_model.APIRelationshipCREATOR, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeJOB.Pointer(), ID: jobID}, + Name: jobName, Relationship: run_model.APIRelationshipCREATOR.Pointer(), }, } if !test.VerifyRunResourceReferences(run.ResourceReferences, resourceReferences) { @@ -716,12 +709,12 @@ func (s *JobApiTestSuite) checkArgParamsRun(run *run_model.APIRun, experimentID // Check runtime workflow manifest is not empty resourceReferences := []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experimentID}, - Name: experimentName, Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experimentID}, + Name: experimentName, Relationship: run_model.APIRelationshipOWNER.Pointer(), }, { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeJOB, ID: jobID}, - Name: jobName, Relationship: run_model.APIRelationshipCREATOR, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeJOB.Pointer(), ID: jobID}, + Name: jobName, Relationship: run_model.APIRelationshipCREATOR.Pointer(), }, } if !test.VerifyRunResourceReferences(run.ResourceReferences, resourceReferences) { @@ -757,12 +750,12 @@ func defaultApiJob(pipelineVersionId, experimentId string) *job_model.APIJob { Description: "This is a default pipeline", ResourceReferences: []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experimentId}, - Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experimentId}, + Relationship: job_model.APIRelationshipOWNER.Pointer(), }, { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersionId}, - Relationship: job_model.APIRelationshipCREATOR, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: pipelineVersionId}, + Relationship: job_model.APIRelationshipCREATOR.Pointer(), }, }, MaxConcurrency: 10, diff --git a/backend/test/integration/pipeline_api_test.go b/backend/test/integration/pipeline_api_test.go index fd7f917072c..83f95ebf4cd 100644 --- a/backend/test/integration/pipeline_api_test.go +++ b/backend/test/integration/pipeline_api_test.go @@ -20,7 +20,6 @@ import ( "testing" "time" - "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service" model "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" uploadParams "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" @@ -28,9 +27,13 @@ import ( api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v1" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/test" + "github.com/kubeflow/pipelines/backend/test/config" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" + corev1 "k8s.io/api/core/v1" ) // This test suit tests various methods to import pipeline to pipeline system, including @@ -60,7 +63,7 @@ func (s *PipelineApiTest) SetupTest() { glog.Exitf("Failed to initialize test. Error: %s", err.Error()) } } - s.namespace = *namespace + s.namespace = *config.Namespace var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) var newPipelineClient func() (*api_server.PipelineClient, error) @@ -69,19 +72,19 @@ func (s *PipelineApiTest) SetupTest() { s.resourceNamespace = *resourceNamespace newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) + return api_server.NewPipelineUploadClient(clientConfig, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } } @@ -122,7 +125,7 @@ func (s *PipelineApiTest) TestPipelineAPI() { /* ---------- Import pipeline YAML by URL ---------- */ time.Sleep(1 * time.Second) sequentialPipeline, err := s.pipelineClient.Create(¶ms.PipelineServiceCreatePipelineV1Params{ - Body: &model.APIPipeline{Name: "sequential", URL: &model.APIURL{ + Pipeline: &model.APIPipeline{Name: "sequential", URL: &model.APIURL{ PipelineURL: "https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/refs/heads/master/backend/test/v2/resources/sequential.yaml", }}, }) @@ -139,13 +142,13 @@ func (s *PipelineApiTest) TestPipelineAPI() { /* ---------- Import pipeline tarball by URL ---------- */ pipelineURL := "https://github.com/opendatahub-io/data-science-pipelines/raw/refs/heads/master/backend/test/v2/resources/arguments.pipeline.zip" - if pullNumber := os.Getenv("PULL_NUMBER"); pullNumber != "" { + if pullNumber := os.Getenv("PullNumber"); pullNumber != "" { pipelineURL = fmt.Sprintf("https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/pull/%s/head/backend/test/v2/resources/arguments.pipeline.zip", pullNumber) } time.Sleep(1 * time.Second) argumentUrlPipeline, err := s.pipelineClient.Create(¶ms.PipelineServiceCreatePipelineV1Params{ - Body: &model.APIPipeline{ + Pipeline: &model.APIPipeline{ URL: &model.APIURL{ PipelineURL: pipelineURL, }, @@ -243,14 +246,20 @@ func (s *PipelineApiTest) TestPipelineAPI() { require.Nil(t, err) bytes, err := os.ReadFile("../resources/arguments-parameters.yaml") require.Nil(t, err) - expected, _ := pipelinetemplate.New(bytes, true) + defaultPVC := &corev1.PersistentVolumeClaimSpec{ + AccessModes: []corev1.PersistentVolumeAccessMode{ + corev1.ReadWriteMany, + }, + StorageClassName: util.StringPointer("my-storage"), + } + expected, _ := pipelinetemplate.New(bytes, true, defaultPVC) assert.Equal(t, expected, template) template, err = s.pipelineClient.GetTemplate(¶ms.PipelineServiceGetTemplateParams{ID: v2HelloPipeline.ID}) require.Nil(t, err) bytes, err = os.ReadFile("../resources/v2-hello-world.yaml") require.Nil(t, err) - expected, _ = pipelinetemplate.New(bytes, true) + expected, _ = pipelinetemplate.New(bytes, true, nil) assert.Equal(t, expected, template) } @@ -274,8 +283,8 @@ func verifyPipeline(t *testing.T, pipeline *model.APIPipeline) { {Name: "param2"}, }, ResourceReferences: []*model.APIResourceReference{{ - Key: &model.APIResourceKey{ID: pipeline.ID, Type: model.APIResourceTypePIPELINE}, - Relationship: model.APIRelationshipOWNER, + Key: &model.APIResourceKey{ID: pipeline.ID, Type: model.APIResourceTypePIPELINE.Pointer()}, + Relationship: model.APIRelationshipOWNER.Pointer(), }}, }, } diff --git a/backend/test/integration/pipeline_version_api_test.go b/backend/test/integration/pipeline_version_api_test.go index 63a63a4731d..b0bbcb04e9b 100644 --- a/backend/test/integration/pipeline_version_api_test.go +++ b/backend/test/integration/pipeline_version_api_test.go @@ -20,7 +20,6 @@ import ( "testing" "time" - "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_client/pipeline_service" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_model" uploadParams "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" @@ -28,6 +27,9 @@ import ( api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v1" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/test" + "github.com/kubeflow/pipelines/backend/test/config" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" @@ -63,22 +65,22 @@ func (s *PipelineVersionApiTest) SetupTest() { var newPipelineClient func() (*api_server.PipelineClient, error) if *isKubeflowMode { - s.namespace = *namespace + s.namespace = *config.Namespace newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) + return api_server.NewPipelineUploadClient(clientConfig, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } } @@ -143,15 +145,15 @@ func (s *PipelineVersionApiTest) TestArgoSpec() { /* ---------- Import pipeline version YAML by URL ---------- */ time.Sleep(1 * time.Second) sequentialPipelineVersion, err := s.pipelineClient.CreatePipelineVersion(¶ms.PipelineServiceCreatePipelineVersionV1Params{ - Body: &pipeline_model.APIPipelineVersion{ + Version: &pipeline_model.APIPipelineVersion{ Name: "sequential", PackageURL: &pipeline_model.APIURL{ PipelineURL: "https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/refs/heads/master/backend/test/v2/resources/sequential.yaml", }, ResourceReferences: []*pipeline_model.APIResourceReference{ { - Key: &pipeline_model.APIResourceKey{Type: pipeline_model.APIResourceTypePIPELINE, ID: pipelineId}, - Relationship: pipeline_model.APIRelationshipOWNER, + Key: &pipeline_model.APIResourceKey{Type: pipeline_model.APIResourceTypePIPELINE.Pointer(), ID: pipelineId}, + Relationship: pipeline_model.APIRelationshipOWNER.Pointer(), }, }, }, @@ -173,20 +175,20 @@ func (s *PipelineVersionApiTest) TestArgoSpec() { time.Sleep(1 * time.Second) pipelineURL := "https://github.com/opendatahub-io/data-science-pipelines/raw/refs/heads/master/backend/test/resources/arguments.pipeline.zip" - if pullNumber := os.Getenv("PULL_NUMBER"); pullNumber != "" { + if pullNumber := os.Getenv("PullNumber"); pullNumber != "" { pipelineURL = fmt.Sprintf("https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/pull/%s/head/backend/test/resources/arguments.pipeline.zip", pullNumber) } argumentUrlPipelineVersion, err := s.pipelineClient.CreatePipelineVersion(¶ms.PipelineServiceCreatePipelineVersionV1Params{ - Body: &pipeline_model.APIPipelineVersion{ + Version: &pipeline_model.APIPipelineVersion{ Name: "arguments", PackageURL: &pipeline_model.APIURL{ PipelineURL: pipelineURL, }, ResourceReferences: []*pipeline_model.APIResourceReference{ { - Key: &pipeline_model.APIResourceKey{Type: pipeline_model.APIResourceTypePIPELINE, ID: pipelineId}, - Relationship: pipeline_model.APIRelationshipOWNER, + Key: &pipeline_model.APIResourceKey{Type: pipeline_model.APIResourceTypePIPELINE.Pointer(), ID: pipelineId}, + Relationship: pipeline_model.APIRelationshipOWNER.Pointer(), }, }, }, @@ -334,7 +336,7 @@ func (s *PipelineVersionApiTest) TestArgoSpec() { require.Nil(t, err) bytes, err := os.ReadFile("../resources/arguments-parameters.yaml") require.Nil(t, err) - expected, err := pipelinetemplate.New(bytes, true) + expected, err := pipelinetemplate.New(bytes, true, nil) require.Nil(t, err) assert.Equal(t, expected, template) } @@ -367,7 +369,7 @@ func (s *PipelineVersionApiTest) TestV2Spec() { require.Nil(t, err) bytes, err := os.ReadFile("../resources/v2-hello-world.yaml") require.Nil(t, err) - expected, err := pipelinetemplate.New(bytes, true) + expected, err := pipelinetemplate.New(bytes, true, nil) require.Nil(t, err) assert.Equal(t, expected, template, "Discrepancy found in template's pipeline name. Created pipeline's name - %s.", pipeline.Name) } diff --git a/backend/test/integration/run_api_test.go b/backend/test/integration/run_api_test.go index ad641191971..42bfefbd24d 100644 --- a/backend/test/integration/run_api_test.go +++ b/backend/test/integration/run_api_test.go @@ -20,7 +20,6 @@ import ( "testing" "time" - "github.com/golang/glog" api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" experimentparams "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_client/experiment_service" uploadParams "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" @@ -29,6 +28,11 @@ import ( api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v1" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/test" + "github.com/kubeflow/pipelines/backend/test/config" + + "github.com/go-openapi/strfmt" + "github.com/golang/glog" + "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" "k8s.io/apimachinery/pkg/util/yaml" @@ -63,7 +67,7 @@ func (s *RunApiTestSuite) SetupTest() { glog.Exitf("Failed to initialize test. Error: %s", err.Error()) } } - s.namespace = *namespace + s.namespace = *config.Namespace var newExperimentClient func() (*api_server.ExperimentClient, error) var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) @@ -74,31 +78,31 @@ func (s *RunApiTestSuite) SetupTest() { s.resourceNamespace = *resourceNamespace newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *config.DebugMode) } newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewKubeflowInClusterRunClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterRunClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewExperimentClient(clientConfig, *isDebugMode) + return api_server.NewExperimentClient(clientConfig, *config.DebugMode) } newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) + return api_server.NewPipelineUploadClient(clientConfig, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewRunClient(clientConfig, *isDebugMode) + return api_server.NewRunClient(clientConfig, *config.DebugMode) } } @@ -141,21 +145,21 @@ func (s *RunApiTestSuite) TestRunApis() { /* ---------- Create a new hello world experiment ---------- */ experiment := test.GetExperiment("hello world experiment", "", s.resourceNamespace) - helloWorldExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Body: experiment}) + helloWorldExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentV1Params{Experiment: experiment}) assert.Nil(t, err) /* ---------- Create a new hello world run by specifying pipeline version ID ---------- */ - createRunRequest := &runparams.RunServiceCreateRunV1Params{Body: &run_model.APIRun{ + createRunRequest := &runparams.RunServiceCreateRunV1Params{Run: &run_model.APIRun{ Name: "hello world", Description: "this is hello world", ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: helloWorldExperiment.ID}, - Name: helloWorldExperiment.Name, Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: helloWorldExperiment.ID}, + Name: helloWorldExperiment.Name, Relationship: run_model.APIRelationshipOWNER.Pointer(), }, { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: helloWorldPipelineVersion.ID}, - Relationship: run_model.APIRelationshipCREATOR, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: helloWorldPipelineVersion.ID}, + Relationship: run_model.APIRelationshipCREATOR.Pointer(), }, }, }} @@ -170,7 +174,7 @@ func (s *RunApiTestSuite) TestRunApis() { /* ---------- Create a new argument parameter experiment ---------- */ createExperimentRequest := &experimentparams.ExperimentServiceCreateExperimentV1Params{ - Body: test.GetExperiment("argument parameter experiment", "", s.resourceNamespace), + Experiment: test.GetExperiment("argument parameter experiment", "", s.resourceNamespace), } argParamsExperiment, err := s.experimentClient.Create(createExperimentRequest) assert.Nil(t, err) @@ -180,7 +184,7 @@ func (s *RunApiTestSuite) TestRunApis() { assert.Nil(t, err) argParamsBytes, err = yaml.ToJSON(argParamsBytes) assert.Nil(t, err) - createRunRequest = &runparams.RunServiceCreateRunV1Params{Body: &run_model.APIRun{ + createRunRequest = &runparams.RunServiceCreateRunV1Params{Run: &run_model.APIRun{ Name: "argument parameter", Description: "this is argument parameter", PipelineSpec: &run_model.APIPipelineSpec{ @@ -192,8 +196,8 @@ func (s *RunApiTestSuite) TestRunApis() { }, ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: argParamsExperiment.ID}, - Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: argParamsExperiment.ID}, + Relationship: run_model.APIRelationshipOWNER.Pointer(), }, }, }} @@ -280,7 +284,7 @@ func (s *RunApiTestSuite) TestRunApis() { filterTime := time.Now().Unix() time.Sleep(5 * time.Second) // Create a new run - createRunRequest.Body.Name = "argument parameter 2" + createRunRequest.Run.Name = "argument parameter 2" _, _, err = s.runClient.Create(createRunRequest) assert.Nil(t, err) // Check total number of runs is 3 @@ -314,7 +318,7 @@ func (s *RunApiTestSuite) TestRunApis() { assert.Equal(t, 1, len(runs)) assert.Equal(t, 1, totalSize) assert.Equal(t, "hello world", runs[0].Name) - assert.Equal(t, string(runs[0].StorageState), api.Run_STORAGESTATE_ARCHIVED.String()) + assert.Equal(t, string(*runs[0].StorageState), api.Run_STORAGESTATE_ARCHIVED.String()) /* ---------- Upload long-running pipeline YAML ---------- */ longRunningPipeline, err := s.pipelineUploadClient.UploadFile("../resources/long-running.yaml", uploadParams.NewUploadPipelineParamsWithTimeout(10*time.Second)) @@ -329,17 +333,17 @@ func (s *RunApiTestSuite) TestRunApis() { assert.Nil(t, err) /* ---------- Create a new long-running run by specifying pipeline ID ---------- */ - createLongRunningRunRequest := &runparams.RunServiceCreateRunV1Params{Body: &run_model.APIRun{ + createLongRunningRunRequest := &runparams.RunServiceCreateRunV1Params{Run: &run_model.APIRun{ Name: "long running", Description: "this pipeline will run long enough for us to manually terminate it before it finishes", ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: helloWorldExperiment.ID}, - Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: helloWorldExperiment.ID}, + Relationship: run_model.APIRelationshipOWNER.Pointer(), }, { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: longRunningPipelineVersion.ID}, - Relationship: run_model.APIRelationshipCREATOR, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: longRunningPipelineVersion.ID}, + Relationship: run_model.APIRelationshipCREATOR.Pointer(), }, }, }} @@ -377,12 +381,12 @@ func (s *RunApiTestSuite) checkTerminatedRunDetail(t *testing.T, runDetail *run_ }, ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experimentId}, - Name: experimentName, Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experimentId}, + Name: experimentName, Relationship: run_model.APIRelationshipOWNER.Pointer(), }, { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersionId}, - Name: pipelineVersionName, Relationship: run_model.APIRelationshipCREATOR, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: pipelineVersionId}, + Name: pipelineVersionName, Relationship: run_model.APIRelationshipCREATOR.Pointer(), }, }, CreatedAt: runDetail.Run.CreatedAt, @@ -390,9 +394,20 @@ func (s *RunApiTestSuite) checkTerminatedRunDetail(t *testing.T, runDetail *run_ FinishedAt: runDetail.Run.FinishedAt, } + verifyRunDetails(t, runDetail, expectedRun) +} + +func verifyRunDetails(t *testing.T, runDetail *run_model.APIRunDetail, expectedRun *run_model.APIRun) { assert.True(t, test.VerifyRunResourceReferences(runDetail.Run.ResourceReferences, expectedRun.ResourceReferences)) expectedRun.ResourceReferences = runDetail.Run.ResourceReferences - assert.Equal(t, expectedRun, runDetail.Run) + + opts := []cmp.Option{ + cmp.Comparer(func(x, y strfmt.DateTime) bool { + return x.String() == y.String() + }), + } + diff := cmp.Diff(expectedRun, runDetail.Run, opts...) + assert.Empty(t, diff, "APIRuns differ: %s", diff) } func (s *RunApiTestSuite) checkHelloWorldRunDetail(t *testing.T, runDetail *run_model.APIRunDetail, experimentId string, experimentName string, pipelineVersionId string, pipelineVersionName string) { @@ -414,12 +429,12 @@ func (s *RunApiTestSuite) checkHelloWorldRunDetail(t *testing.T, runDetail *run_ }, ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experimentId}, - Name: experimentName, Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experimentId}, + Name: experimentName, Relationship: run_model.APIRelationshipOWNER.Pointer(), }, { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersionId}, - Name: pipelineVersionName, Relationship: run_model.APIRelationshipCREATOR, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: pipelineVersionId}, + Name: pipelineVersionName, Relationship: run_model.APIRelationshipCREATOR.Pointer(), }, }, CreatedAt: runDetail.Run.CreatedAt, @@ -427,9 +442,7 @@ func (s *RunApiTestSuite) checkHelloWorldRunDetail(t *testing.T, runDetail *run_ FinishedAt: runDetail.Run.FinishedAt, } - assert.True(t, test.VerifyRunResourceReferences(runDetail.Run.ResourceReferences, expectedRun.ResourceReferences)) - expectedRun.ResourceReferences = runDetail.Run.ResourceReferences - assert.Equal(t, expectedRun, runDetail.Run) + verifyRunDetails(t, runDetail, expectedRun) } func (s *RunApiTestSuite) checkArgParamsRunDetail(t *testing.T, runDetail *run_model.APIRunDetail, experimentId string, experimentName string) { @@ -456,8 +469,8 @@ func (s *RunApiTestSuite) checkArgParamsRunDetail(t *testing.T, runDetail *run_m }, ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experimentId}, - Name: experimentName, Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experimentId}, + Name: experimentName, Relationship: run_model.APIRelationshipOWNER.Pointer(), }, }, CreatedAt: runDetail.Run.CreatedAt, @@ -465,9 +478,7 @@ func (s *RunApiTestSuite) checkArgParamsRunDetail(t *testing.T, runDetail *run_m FinishedAt: runDetail.Run.FinishedAt, } - assert.True(t, test.VerifyRunResourceReferences(runDetail.Run.ResourceReferences, expectedRun.ResourceReferences)) - expectedRun.ResourceReferences = runDetail.Run.ResourceReferences - assert.Equal(t, expectedRun, runDetail.Run) + verifyRunDetails(t, runDetail, expectedRun) } func TestRunApi(t *testing.T) { diff --git a/backend/test/integration/upgrade_test.go b/backend/test/integration/upgrade_test.go index a14b30b32f3..ad13cd0ec57 100644 --- a/backend/test/integration/upgrade_test.go +++ b/backend/test/integration/upgrade_test.go @@ -20,7 +20,6 @@ import ( "testing" "time" - "github.com/golang/glog" experimentParams "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_client/experiment_service" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" jobparams "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/job_client/job_service" @@ -34,9 +33,15 @@ import ( api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v1" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/test" + "github.com/kubeflow/pipelines/backend/test/config" + + "github.com/go-openapi/strfmt" + "github.com/golang/glog" + "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" + corev1 "k8s.io/api/core/v1" ) // Methods are organized into two types: "prepare" and "verify". @@ -95,7 +100,7 @@ func (s *UpgradeTests) SetupSuite() { glog.Exitf("Failed to initialize test. Error: %v", err) } } - s.namespace = *namespace + s.namespace = *config.Namespace var newExperimentClient func() (*api_server.ExperimentClient, error) var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) @@ -107,37 +112,37 @@ func (s *UpgradeTests) SetupSuite() { s.resourceNamespace = *resourceNamespace newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *config.DebugMode) } newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewKubeflowInClusterRunClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterRunClient(s.namespace, *config.DebugMode) } newJobClient = func() (*api_server.JobClient, error) { - return api_server.NewKubeflowInClusterJobClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterJobClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewExperimentClient(clientConfig, *isDebugMode) + return api_server.NewExperimentClient(clientConfig, *config.DebugMode) } newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) + return api_server.NewPipelineUploadClient(clientConfig, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewRunClient(clientConfig, *isDebugMode) + return api_server.NewRunClient(clientConfig, *config.DebugMode) } newJobClient = func() (*api_server.JobClient, error) { - return api_server.NewJobClient(clientConfig, *isDebugMode) + return api_server.NewJobClient(clientConfig, *config.DebugMode) } } @@ -185,7 +190,7 @@ func (s *UpgradeTests) PrepareExperiments() { /* ---------- Create a new experiment ---------- */ experiment := test.GetExperiment("training", "my first experiment", s.resourceNamespace) _, err := s.experimentClient.Create(&experimentParams.ExperimentServiceCreateExperimentV1Params{ - Body: experiment, + Experiment: experiment, }) require.Nil(t, err) @@ -194,14 +199,14 @@ func (s *UpgradeTests) PrepareExperiments() { time.Sleep(1 * time.Second) experiment = test.GetExperiment("prediction", "my second experiment", s.resourceNamespace) _, err = s.experimentClient.Create(&experimentParams.ExperimentServiceCreateExperimentV1Params{ - Body: experiment, + Experiment: experiment, }) require.Nil(t, err) time.Sleep(1 * time.Second) experiment = test.GetExperiment("moonshot", "my third experiment", s.resourceNamespace) _, err = s.experimentClient.Create(&experimentParams.ExperimentServiceCreateExperimentV1Params{ - Body: experiment, + Experiment: experiment, }) require.Nil(t, err) } @@ -269,10 +274,14 @@ func (s *UpgradeTests) PreparePipelines() { assert.Equal(t, "arguments-parameters.yaml", argumentYAMLPipeline.Name) /* ---------- Import pipeline YAML by URL ---------- */ + pipelineURL := "https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/refs/heads/master/backend/test/v2/resources/sequential.yaml" + if pullNumber := os.Getenv("PullNumber"); pullNumber != "" { + pipelineURL = fmt.Sprintf("https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/pull/%s/head/test_data/sdk_compiled_pipelines/valid/sequential_v1.yaml", pullNumber) + } time.Sleep(1 * time.Second) sequentialPipeline, err := s.pipelineClient.Create(&pipelineParams.PipelineServiceCreatePipelineV1Params{ - Body: &pipeline_model.APIPipeline{Name: "sequential", URL: &pipeline_model.APIURL{ - PipelineURL: "https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/refs/heads/master/backend/test/v2/resources/sequential.yaml", + Pipeline: &pipeline_model.APIPipeline{Name: "sequential", URL: &pipeline_model.APIURL{ + PipelineURL: pipelineURL, }}, }) require.Nil(t, err) @@ -286,15 +295,15 @@ func (s *UpgradeTests) PreparePipelines() { assert.Equal(t, "zip-arguments-parameters", argumentUploadPipeline.Name) /* ---------- Import pipeline tarball by URL ---------- */ - pipelineURL := "https://github.com/opendatahub-io/data-science-pipelines/raw/refs/heads/master/backend/test/v2/resources/arguments.pipeline.zip" + pipelineURL = "https://github.com/opendatahub-io/data-science-pipelines/raw/refs/heads/master/backend/test/v2/resources/arguments.pipeline.zip" - if pullNumber := os.Getenv("PULL_NUMBER"); pullNumber != "" { + if pullNumber := os.Getenv("PullNumber"); pullNumber != "" { pipelineURL = fmt.Sprintf("https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/pull/%s/head/backend/test/v2/resources/arguments.pipeline.zip", pullNumber) } time.Sleep(1 * time.Second) argumentUrlPipeline, err := s.pipelineClient.Create(&pipelineParams.PipelineServiceCreatePipelineV1Params{ - Body: &pipeline_model.APIPipeline{ + Pipeline: &pipeline_model.APIPipeline{ URL: &pipeline_model.APIURL{ PipelineURL: pipelineURL, }, @@ -328,7 +337,13 @@ func (s *UpgradeTests) VerifyPipelines() { require.Nil(t, err) bytes, err := os.ReadFile("../resources/arguments-parameters.yaml") require.Nil(t, err) - expected, err := pipelinetemplate.New(bytes, true) + defaultPVC := &corev1.PersistentVolumeClaimSpec{ + AccessModes: []corev1.PersistentVolumeAccessMode{ + corev1.ReadWriteMany, + }, + StorageClassName: util.StringPointer("my-storage"), + } + expected, err := pipelinetemplate.New(bytes, true, defaultPVC) require.Nil(t, err) assert.Equal(t, expected, template) } @@ -346,7 +361,7 @@ func (s *UpgradeTests) PrepareRuns() { require.Equal(t, hello2, helloWorldExperiment) /* ---------- Create a new hello world run by specifying pipeline ID ---------- */ - createRunRequest := &runParams.RunServiceCreateRunV1Params{Body: &run_model.APIRun{ + createRunRequest := &runParams.RunServiceCreateRunV1Params{Run: &run_model.APIRun{ Name: "hello world", Description: "this is hello world", PipelineSpec: &run_model.APIPipelineSpec{ @@ -354,8 +369,8 @@ func (s *UpgradeTests) PrepareRuns() { }, ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: helloWorldExperiment.ID}, - Name: helloWorldExperiment.Name, Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: helloWorldExperiment.ID}, + Name: helloWorldExperiment.Name, Relationship: run_model.APIRelationshipOWNER.Pointer(), }, }, }} @@ -388,7 +403,7 @@ func (s *UpgradeTests) PrepareJobs() { experiment := s.getHelloWorldExperiment(true) /* ---------- Create a new hello world job by specifying pipeline ID ---------- */ - createJobRequest := &jobparams.JobServiceCreateJobParams{Body: &job_model.APIJob{ + createJobRequest := &jobparams.JobServiceCreateJobParams{Job: &job_model.APIJob{ Name: "hello world", Description: "this is hello world", PipelineSpec: &job_model.APIPipelineSpec{ @@ -396,8 +411,8 @@ func (s *UpgradeTests) PrepareJobs() { }, ResourceReferences: []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experiment.ID}, - Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experiment.ID}, + Relationship: job_model.APIRelationshipOWNER.Pointer(), }, }, MaxConcurrency: 10, @@ -433,8 +448,8 @@ func (s *UpgradeTests) VerifyJobs() { }, ResourceReferences: []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experiment.ID}, - Name: experiment.Name, Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experiment.ID}, + Name: experiment.Name, Relationship: job_model.APIRelationshipOWNER.Pointer(), }, }, ServiceAccount: test.GetDefaultPipelineRunnerServiceAccount(*isKubeflowMode), @@ -448,7 +463,14 @@ func (s *UpgradeTests) VerifyJobs() { assert.True(t, test.VerifyJobResourceReferences(job.ResourceReferences, expectedJob.ResourceReferences), "Inconsistent resource references: %v does not contain %v", job.ResourceReferences, expectedJob.ResourceReferences) expectedJob.ResourceReferences = job.ResourceReferences - assert.Equal(t, expectedJob, job) + + opts := []cmp.Option{ + cmp.Comparer(func(x, y strfmt.DateTime) bool { + return x.String() == y.String() + }), + } + diff := cmp.Diff(expectedJob, job, opts...) + assert.Empty(t, diff, "APIRuns differ: %s", diff) } func (s *UpgradeTests) VerifyCreatingRunsAndJobs() { @@ -471,7 +493,7 @@ func (s *UpgradeTests) VerifyCreatingRunsAndJobs() { assert.Equal(t, "hello world experiment", experiments[4].Name) /* ---------- Create a new run based on the oldest pipeline and its default pipeline version ---------- */ - createRunRequest := &runParams.RunServiceCreateRunV1Params{Body: &run_model.APIRun{ + createRunRequest := &runParams.RunServiceCreateRunV1Params{Run: &run_model.APIRun{ Name: "argument parameter from pipeline", Description: "a run from an old pipeline", PipelineSpec: &run_model.APIPipelineSpec{ @@ -483,19 +505,19 @@ func (s *UpgradeTests) VerifyCreatingRunsAndJobs() { // This run should belong to the newest experiment (created after the upgrade) ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experiments[4].ID}, - Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experiments[4].ID}, + Relationship: run_model.APIRelationshipOWNER.Pointer(), }, { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINE, ID: pipelines[0].ID}, - Relationship: run_model.APIRelationshipCREATOR, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINE.Pointer(), ID: pipelines[0].ID}, + Relationship: run_model.APIRelationshipCREATOR.Pointer(), }, }, }} runFromPipeline, _, err := s.runClient.Create(createRunRequest) assert.Nil(t, err) - createRunRequestVersion := &runParams.RunServiceCreateRunV1Params{Body: &run_model.APIRun{ + createRunRequestVersion := &runParams.RunServiceCreateRunV1Params{Run: &run_model.APIRun{ Name: "argument parameter from pipeline version", Description: "a run from an old pipeline version", PipelineSpec: &run_model.APIPipelineSpec{ @@ -507,8 +529,8 @@ func (s *UpgradeTests) VerifyCreatingRunsAndJobs() { // This run should be assigned to Default experiment ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: pipelines[0].DefaultVersion.ID}, - Relationship: run_model.APIRelationshipCREATOR, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: pipelines[0].DefaultVersion.ID}, + Relationship: run_model.APIRelationshipCREATOR.Pointer(), }, }, }} @@ -526,8 +548,8 @@ func (s *UpgradeTests) VerifyCreatingRunsAndJobs() { runFromPipeline.Run.ResourceReferences, []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experiments[4].ID}, - Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experiments[4].ID}, + Relationship: run_model.APIRelationshipOWNER.Pointer(), }, }, )) @@ -535,26 +557,26 @@ func (s *UpgradeTests) VerifyCreatingRunsAndJobs() { runFromPipelineVersion.Run.ResourceReferences, []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experiments[0].ID}, - Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experiments[0].ID}, + Relationship: run_model.APIRelationshipOWNER.Pointer(), }, }, )) /* ---------- Create a new recurring run based on the second oldest pipeline version and belonging to the second oldest experiment ---------- */ - createJobRequest := &jobparams.JobServiceCreateJobParams{Body: &job_model.APIJob{ + createJobRequest := &jobparams.JobServiceCreateJobParams{Job: &job_model.APIJob{ Description: "a recurring run from an old pipeline version", Enabled: true, MaxConcurrency: 10, Name: "sequential job from pipeline version", ResourceReferences: []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experiments[1].ID}, - Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experiments[1].ID}, + Relationship: job_model.APIRelationshipOWNER.Pointer(), }, { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION, ID: pipelines[0].DefaultVersion.ID}, - Relationship: job_model.APIRelationshipCREATOR, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION.Pointer(), ID: pipelines[0].DefaultVersion.ID}, + Relationship: job_model.APIRelationshipCREATOR.Pointer(), }, }, }} @@ -566,8 +588,8 @@ func (s *UpgradeTests) VerifyCreatingRunsAndJobs() { createdJob.ResourceReferences, []*job_model.APIResourceReference{ { - Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experiments[1].ID}, - Relationship: job_model.APIRelationshipOWNER, + Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT.Pointer(), ID: experiments[1].ID}, + Relationship: job_model.APIRelationshipOWNER.Pointer(), }, }, )) @@ -594,8 +616,8 @@ func checkHelloWorldRunDetail(t *testing.T, runDetail *run_model.APIRunDetail) { }, ResourceReferences: []*run_model.APIResourceReference{ { - Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: expectedExperimentID}, - Name: "hello world experiment", Relationship: run_model.APIRelationshipOWNER, + Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT.Pointer(), ID: expectedExperimentID}, + Name: "hello world experiment", Relationship: run_model.APIRelationshipOWNER.Pointer(), }, }, ServiceAccount: test.GetDefaultPipelineRunnerServiceAccount(*isKubeflowMode), @@ -605,14 +627,21 @@ func checkHelloWorldRunDetail(t *testing.T, runDetail *run_model.APIRunDetail) { } assert.True(t, test.VerifyRunResourceReferences(runDetail.Run.ResourceReferences, expectedRun.ResourceReferences), "Run's res references %v does not include %v", runDetail.Run.ResourceReferences, expectedRun.ResourceReferences) expectedRun.ResourceReferences = runDetail.Run.ResourceReferences - assert.Equal(t, expectedRun, runDetail.Run) + + opts := []cmp.Option{ + cmp.Comparer(func(x, y strfmt.DateTime) bool { + return x.String() == y.String() + }), + } + diff := cmp.Diff(expectedRun, runDetail.Run, opts...) + assert.Empty(t, diff, "APIRuns differ: %s", diff) } func (s *UpgradeTests) createHelloWorldExperiment() *experiment_model.APIExperiment { t := s.T() experiment := test.GetExperiment("hello world experiment", "", s.resourceNamespace) - helloWorldExperiment, err := s.experimentClient.Create(&experimentParams.ExperimentServiceCreateExperimentV1Params{Body: experiment}) + helloWorldExperiment, err := s.experimentClient.Create(&experimentParams.ExperimentServiceCreateExperimentV1Params{Experiment: experiment}) require.Nil(t, err) return helloWorldExperiment diff --git a/backend/test/integration/visualization_api_test.go b/backend/test/integration/visualization_api_test.go index 0519aaae39d..ac7f6d024e1 100644 --- a/backend/test/integration/visualization_api_test.go +++ b/backend/test/integration/visualization_api_test.go @@ -17,11 +17,13 @@ package integration import ( "testing" - "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/visualization_client/visualization_service" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/visualization_model" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v1" "github.com/kubeflow/pipelines/backend/test" + "github.com/kubeflow/pipelines/backend/test/config" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) @@ -46,7 +48,7 @@ func (s *VisualizationApiTest) SetupTest() { glog.Exitf("Failed to initialize test. Error: %v", err) } } - s.namespace = *namespace + s.namespace = *config.Namespace var newVisualizationClient func() (*api_server.VisualizationClient, error) @@ -54,13 +56,13 @@ func (s *VisualizationApiTest) SetupTest() { s.resourceNamespace = *resourceNamespace newVisualizationClient = func() (*api_server.VisualizationClient, error) { - return api_server.NewKubeflowInClusterVisualizationClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterVisualizationClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newVisualizationClient = func() (*api_server.VisualizationClient, error) { - return api_server.NewVisualizationClient(clientConfig, *isDebugMode) + return api_server.NewVisualizationClient(clientConfig, *config.DebugMode) } } @@ -77,10 +79,10 @@ func (s *VisualizationApiTest) TestVisualizationAPI() { /* ---------- Generate custom visualization --------- */ visualization := &visualization_model.APIVisualization{ Arguments: `{"code": ["print(2)"]}`, - Type: visualization_model.APIVisualizationTypeCUSTOM, + Type: visualization_model.APIVisualizationTypeCUSTOM.Pointer(), } customVisualization, err := s.visualizationClient.Create(¶ms.VisualizationServiceCreateVisualizationV1Params{ - Body: visualization, + Visualization: visualization, }) assert.Nil(t, err) assert.NotNil(t, customVisualization.HTML) diff --git a/backend/test/integration/webhook_test.go b/backend/test/integration/webhook_test.go index ceecb5de46b..dbf25ecceae 100644 --- a/backend/test/integration/webhook_test.go +++ b/backend/test/integration/webhook_test.go @@ -23,6 +23,7 @@ import ( "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/src/crd/kubernetes/v2beta1" + "github.com/stretchr/testify/require" k8serrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" diff --git a/backend/test/logger/logger.go b/backend/test/logger/logger.go new file mode 100644 index 00000000000..78822f19b98 --- /dev/null +++ b/backend/test/logger/logger.go @@ -0,0 +1,26 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logger + +import ( + "fmt" + + "github.com/onsi/ginkgo/v2" +) + +func Log(s string, arguments ...any) { + formatedString := fmt.Sprintf(s, arguments...) + ginkgo.GinkgoWriter.Println(formatedString) +} diff --git a/backend/test/proto_tests/README.md b/backend/test/proto_tests/README.md new file mode 100644 index 00000000000..cd0e97829f1 --- /dev/null +++ b/backend/test/proto_tests/README.md @@ -0,0 +1,29 @@ +# Proto backwards compatibility testing + +This folder contains tests that verify that newly generated proto files do not break backwards compatibility with proto binary message translation and JSON unmarshalling. + +The structure is as follows: + +## objects.go +These contain the Go Structs that use the latest proto generated Go Code. + +## testdata/generated-commit +These files contain the proto binary messages and the JSON unmarshalled data using proto generated files found in the commit: ``. + +This allows us to have a ground truth to compare the newly generated code against. The tests will unmarshal the proto binaries from `` and compare it with the structs created using new generated code. + +In much the same way we also test for JSON unmarshalling. You will notice that we adjust the unmarshalling options for JSON to match what we use in the grpc-gateway unmarshalling configuration for the grpc server. + +## Updating generate code + +Generated code can be updated by running the test code with `UPDATE_EXPECTED=true` environment variable set in your commandline. For example: + +```shell +cd backend/test/proto_tests +export UPDATE_EXPECTED=true +go test . # update generate code +export UPDATE_EXPECTED=false +go test . # verify your changes +``` + +Note that it is very unlikely you should need to update this code, if you do then it is a good sign you are introducing breaking changes, so use it wisely. diff --git a/backend/test/proto_tests/objects.go b/backend/test/proto_tests/objects.go new file mode 100644 index 00000000000..1bce695c684 --- /dev/null +++ b/backend/test/proto_tests/objects.go @@ -0,0 +1,315 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package proto_tests + +import ( + "os" + "path/filepath" + "time" + + "google.golang.org/genproto/googleapis/rpc/status" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" + + specPB "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + pb "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/server" + + "github.com/golang/glog" +) + +const mockPipelineID = "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3" +const mockPipelineVersionID = "e15dc3ec-b45e-4cc7-bb07-e76b5dbce99a" +const pipelineSpecYamlPath = "pipelinespec.yaml" + +func mockPipelineSpec() *structpb.Struct { + yamlContent, err := os.ReadFile(filepath.Join("testdata", pipelineSpecYamlPath)) + if err != nil { + glog.Fatal(err) + } + spec, err := server.YamlStringToPipelineSpecStruct(string(yamlContent)) + if err != nil { + glog.Fatal(err) + } + return spec +} + +func fixedTimestamp() *timestamppb.Timestamp { + return timestamppb.New(time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC)) +} + +var completedRun = &pb.Run{ + RunId: "completed-run-123", + DisplayName: "Production Pipeline Run", + ExperimentId: "exp-456", + StorageState: pb.Run_AVAILABLE, + Description: "Production pipeline execution for data processing", + ServiceAccount: "sa1", + CreatedAt: fixedTimestamp(), + ScheduledAt: fixedTimestamp(), + FinishedAt: fixedTimestamp(), + RecurringRunId: "recurring-schedule-001", + State: pb.RuntimeState_SUCCEEDED, + PipelineSource: &pb.Run_PipelineVersionReference{ + PipelineVersionReference: &pb.PipelineVersionReference{ + PipelineId: mockPipelineID, + PipelineVersionId: mockPipelineVersionID, + }, + }, + RuntimeConfig: &pb.RuntimeConfig{ + Parameters: map[string]*structpb.Value{ + "batch_size": structpb.NewNumberValue(1000), + "learning_rate": structpb.NewStringValue("foo"), + }, + }, +} + +var completedRunWithPipelineSpec = &pb.Run{ + RunId: "completed-run-123", + DisplayName: "Production Pipeline Run", + ExperimentId: "exp-456", + StorageState: pb.Run_AVAILABLE, + Description: "Production pipeline execution for data processing", + ServiceAccount: "sa1", + CreatedAt: fixedTimestamp(), + ScheduledAt: fixedTimestamp(), + FinishedAt: fixedTimestamp(), + RecurringRunId: "recurring-schedule-001", + State: pb.RuntimeState_SUCCEEDED, + PipelineSource: &pb.Run_PipelineSpec{ + PipelineSpec: mockPipelineSpec(), + }, + RuntimeConfig: &pb.RuntimeConfig{ + Parameters: map[string]*structpb.Value{ + "batch_size": structpb.NewNumberValue(1000), + "learning_rate": structpb.NewStringValue("foo"), + }, + }, +} + +var failedRun = &pb.Run{ + RunId: "failed-run-456", + DisplayName: "Data Processing Pipeline", + ExperimentId: "exp-789", + StorageState: pb.Run_AVAILABLE, + Description: "Failed attempt to process customer data", + ServiceAccount: "sa2", + CreatedAt: fixedTimestamp(), + ScheduledAt: fixedTimestamp(), + FinishedAt: fixedTimestamp(), + State: pb.RuntimeState_FAILED, + Error: &status.Status{ + Code: 1, + Message: "This was a Failed Run.", + }, +} + +var pipeline = &pb.Pipeline{ + PipelineId: mockPipelineID, + DisplayName: "Production Data Processing Pipeline", + Name: "pipeline1", + Description: "Pipeline for processing and analyzing production data", + CreatedAt: fixedTimestamp(), + Namespace: "namespace1", + Error: &status.Status{ + Code: 0, + Message: "This a successful pipeline.", + }, +} + +var pipelineVersion = &pb.PipelineVersion{ + PipelineVersionId: mockPipelineVersionID, + PipelineId: mockPipelineID, + DisplayName: "v1.0.0 Production Data Processing Pipeline", + Name: "pipelineversion1", + Description: "First stable version of the production data processing pipeline", + CreatedAt: fixedTimestamp(), + PipelineSpec: mockPipelineSpec(), + PackageUrl: &pb.Url{PipelineUrl: "gs://my-bucket/pipelines/pipeline1-v1.0.0.yaml"}, + CodeSourceUrl: "https://github.com/org/repo/pipeline1/tree/v1.0.0", + Error: &status.Status{ + Code: 0, + Message: "This is a successful pipeline version.", + }, +} + +var experiment = &pb.Experiment{ + ExperimentId: "exp-456", + DisplayName: "Production Data Processing Experiment", + Description: "Experiment for testing production data processing pipeline", + CreatedAt: fixedTimestamp(), + Namespace: "namespace1", + StorageState: pb.Experiment_AVAILABLE, + LastRunCreatedAt: fixedTimestamp(), +} + +var visualization = &pb.Visualization{ + Type: pb.Visualization_ROC_CURVE, + Source: "gs://my-bucket/data/visualization.csv", + Arguments: "{\"param1\": \"value1\", \"param2\": \"value2\"}", + Html: "
Generated Visualization
", + Error: "", +} + +var recurringRun = &pb.RecurringRun{ + RecurringRunId: "recurring-run-789", + DisplayName: "Daily Data Processing", + Description: "Scheduled pipeline for daily data processing tasks", + ServiceAccount: "sa3", + CreatedAt: fixedTimestamp(), + UpdatedAt: fixedTimestamp(), + Status: pb.RecurringRun_ENABLED, + PipelineSource: &pb.RecurringRun_PipelineVersionReference{ + PipelineVersionReference: &pb.PipelineVersionReference{ + PipelineId: mockPipelineID, + PipelineVersionId: mockPipelineVersionID, + }, + }, + RuntimeConfig: &pb.RuntimeConfig{ + Parameters: map[string]*structpb.Value{ + "processing_date": structpb.NewStringValue("${system.date}"), + "batch_size": structpb.NewNumberValue(500), + }, + }, + Trigger: &pb.Trigger{ + Trigger: &pb.Trigger_PeriodicSchedule{ + PeriodicSchedule: &pb.PeriodicSchedule{ + StartTime: fixedTimestamp(), + EndTime: nil, + IntervalSecond: 86400, + }, + }, + }, + Mode: 1, + Namespace: "namespace1", +} + +var pipelineSpec = &specPB.PipelineSpec{ + PipelineInfo: &specPB.PipelineInfo{ + Name: "sample-pipeline", + Description: "Sample pipeline for testing", + }, + DeploymentSpec: &structpb.Struct{}, + Root: &specPB.ComponentSpec{ + InputDefinitions: &specPB.ComponentInputsSpec{ + Parameters: map[string]*specPB.ComponentInputsSpec_ParameterSpec{ + "input1": { + ParameterType: specPB.ParameterType_STRING, + DefaultValue: structpb.NewStringValue("foo"), + }, + "input2": { + ParameterType: specPB.ParameterType_STRING, + }, + }, + }, + OutputDefinitions: &specPB.ComponentOutputsSpec{ + Parameters: map[string]*specPB.ComponentOutputsSpec_ParameterSpec{ + "output1": { + ParameterType: specPB.ParameterType_STRING, + }, + "output2": { + ParameterType: specPB.ParameterType_NUMBER_INTEGER, + }, + }, + }, + Implementation: &specPB.ComponentSpec_ExecutorLabel{ + ExecutorLabel: "root-executor", + }, + }, + Components: map[string]*specPB.ComponentSpec{ + "comp-1": { + Implementation: &specPB.ComponentSpec_Dag{ + Dag: &specPB.DagSpec{ + Tasks: map[string]*specPB.PipelineTaskSpec{ + "task1": { + TaskInfo: &specPB.PipelineTaskInfo{ + Name: "task1", + }, + ComponentRef: &specPB.ComponentRef{ + Name: "comp-1", + }, + Inputs: &specPB.TaskInputsSpec{ + Parameters: map[string]*specPB.TaskInputsSpec_InputParameterSpec{ + "param1": { + Kind: &specPB.TaskInputsSpec_InputParameterSpec_ComponentInputParameter{ + ComponentInputParameter: "param1", + }, + }, + }, + }, + }, + }, + Outputs: &specPB.DagOutputsSpec{ + Parameters: map[string]*specPB.DagOutputsSpec_DagOutputParameterSpec{ + "output1": { + Kind: &specPB.DagOutputsSpec_DagOutputParameterSpec_ValueFromParameter{ + ValueFromParameter: &specPB.DagOutputsSpec_ParameterSelectorSpec{ + ProducerSubtask: "foo", + OutputParameterKey: "bar", + }, + }, + }, + }, + }, + }, + }, + InputDefinitions: &specPB.ComponentInputsSpec{ + Parameters: map[string]*specPB.ComponentInputsSpec_ParameterSpec{ + "param1": { + ParameterType: specPB.ParameterType_STRING, + }, + }, + }, + OutputDefinitions: &specPB.ComponentOutputsSpec{ + Parameters: map[string]*specPB.ComponentOutputsSpec_ParameterSpec{ + "output1": { + ParameterType: specPB.ParameterType_STRING, + }, + }, + }, + }, + }, +} + +var platformSpec = &specPB.PlatformSpec{ + Platforms: map[string]*specPB.SinglePlatformSpec{ + "kubernetes": { + Platform: "kubernetes", + DeploymentSpec: &specPB.PlatformDeploymentConfig{ + Executors: map[string]*structpb.Struct{ + "root-executor": { + Fields: map[string]*structpb.Value{ + "container": structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "image": structpb.NewStringValue("test-image"), + }, + }), + }, + }, + }, + }, + Config: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "project": structpb.NewStringValue("test-project"), + }, + }, + PipelineConfig: &specPB.PipelineConfig{ + SemaphoreKey: "test-key", + MutexName: "test-mutex", + ResourceTtl: 24, + }, + }, + }, +} diff --git a/backend/test/proto_tests/proto_test.go b/backend/test/proto_tests/proto_test.go new file mode 100644 index 00000000000..469a084fdf2 --- /dev/null +++ b/backend/test/proto_tests/proto_test.go @@ -0,0 +1,105 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package proto_tests + +import ( + "fmt" + "path/filepath" + "testing" + + specPB "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + pb "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" +) + +// This is the commit that contains the proto generated files +// that were used to generate the test data. +const commit = "1791485" + +func generatePath(path string) string { + return filepath.Join(fmt.Sprintf("generated-%s", commit), path) +} + +func TestRuns(t *testing.T) { + testOBJ(t, caseOpts[*pb.Run]{ + message: completedRun, + expectedPBPath: generatePath("run_completed.pb"), + expectedJSONPath: generatePath("run_completed.json"), + }) + + testOBJ(t, caseOpts[*pb.Run]{ + message: completedRunWithPipelineSpec, + expectedPBPath: generatePath("run_completed_with_spec.pb"), + expectedJSONPath: generatePath("run_completed_with_spec.json"), + }) + + testOBJ(t, caseOpts[*pb.Run]{ + message: failedRun, + expectedPBPath: generatePath("run_failed.pb"), + expectedJSONPath: generatePath("run_failed.json"), + }) +} + +func TestPipelines(t *testing.T) { + testOBJ(t, caseOpts[*pb.Pipeline]{ + message: pipeline, + expectedPBPath: generatePath("pipeline.pb"), + expectedJSONPath: generatePath("pipeline.json"), + }) + testOBJ(t, caseOpts[*pb.PipelineVersion]{ + message: pipelineVersion, + expectedPBPath: generatePath("pipeline_version.pb"), + expectedJSONPath: generatePath("pipeline_version.json"), + }) +} + +func TestExperiments(t *testing.T) { + testOBJ(t, caseOpts[*pb.Experiment]{ + message: experiment, + expectedPBPath: generatePath("experiment.pb"), + expectedJSONPath: generatePath("experiment.json"), + }) +} + +func TestVisualization(t *testing.T) { + testOBJ(t, caseOpts[*pb.Visualization]{ + message: visualization, + expectedPBPath: generatePath("visualization.pb"), + expectedJSONPath: generatePath("visualization.json"), + }) +} + +func TestRecurringRun(t *testing.T) { + testOBJ(t, caseOpts[*pb.RecurringRun]{ + message: recurringRun, + expectedPBPath: generatePath("recurring_run.pb"), + expectedJSONPath: generatePath("recurring_run.json"), + }) +} + +func TestPipelineSpec(t *testing.T) { + testOBJ(t, caseOpts[*specPB.PipelineSpec]{ + message: pipelineSpec, + expectedPBPath: generatePath("pipeline_spec.pb"), + expectedJSONPath: generatePath("pipeline_spec.json"), + }) +} + +func TestPlatformSpec(t *testing.T) { + testOBJ(t, caseOpts[*specPB.PlatformSpec]{ + message: platformSpec, + expectedPBPath: generatePath("platform_spec.pb"), + expectedJSONPath: generatePath("platform_spec.json"), + }) +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/experiment.json b/backend/test/proto_tests/testdata/generated-1791485/experiment.json new file mode 100644 index 00000000000..85272dbff95 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/experiment.json @@ -0,0 +1,9 @@ +{ + "experiment_id": "exp-456", + "display_name": "Production Data Processing Experiment", + "description": "Experiment for testing production data processing pipeline", + "created_at": "2024-01-01T12:00:00Z", + "namespace": "namespace1", + "storage_state": "AVAILABLE", + "last_run_created_at": "2024-01-01T12:00:00Z" +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/experiment.pb b/backend/test/proto_tests/testdata/generated-1791485/experiment.pb new file mode 100644 index 00000000000..dcacb6def32 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/experiment.pb @@ -0,0 +1,3 @@ + +exp-456%Production Data Processing Experiment:Experiment for testing production data processing pipeline"ÀÒʬ* +namespace10:ÀÒʬ \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline.json b/backend/test/proto_tests/testdata/generated-1791485/pipeline.json new file mode 100644 index 00000000000..f209be35623 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/pipeline.json @@ -0,0 +1,13 @@ +{ + "pipeline_id": "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3", + "display_name": "Production Data Processing Pipeline", + "name": "pipeline1", + "description": "Pipeline for processing and analyzing production data", + "created_at": "2024-01-01T12:00:00Z", + "namespace": "namespace1", + "error": { + "code": 0, + "message": "This a successful pipeline.", + "details": [] + } +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline.pb b/backend/test/proto_tests/testdata/generated-1791485/pipeline.pb new file mode 100644 index 00000000000..dbe2a21b47c --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/pipeline.pb @@ -0,0 +1,3 @@ + +$9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3#Production Data Processing Pipeline5Pipeline for processing and analyzing production data"ÀÒʬ* +namespace12This a successful pipeline.: pipeline1 \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.json b/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.json new file mode 100644 index 00000000000..073e727669e --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.json @@ -0,0 +1,115 @@ +{ + "pipeline_info": { + "name": "sample-pipeline", + "display_name": "", + "description": "Sample pipeline for testing" + }, + "deployment_spec": {}, + "sdk_version": "", + "schema_version": "", + "components": { + "comp-1": { + "input_definitions": { + "artifacts": {}, + "parameters": { + "param1": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "STRING", + "default_value": null, + "is_optional": false, + "description": "" + } + } + }, + "output_definitions": { + "artifacts": {}, + "parameters": { + "output1": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "STRING", + "description": "" + } + } + }, + "dag": { + "tasks": { + "task1": { + "task_info": { + "name": "task1" + }, + "inputs": { + "parameters": { + "param1": { + "component_input_parameter": "param1", + "parameter_expression_selector": "" + } + }, + "artifacts": {} + }, + "dependent_tasks": [], + "caching_options": null, + "component_ref": { + "name": "comp-1" + }, + "trigger_policy": null, + "retry_policy": null, + "iterator_policy": null + } + }, + "outputs": { + "artifacts": {}, + "parameters": { + "output1": { + "value_from_parameter": { + "producer_subtask": "foo", + "output_parameter_key": "bar" + } + } + } + } + }, + "single_platform_specs": [], + "task_config_passthroughs": [] + } + }, + "root": { + "input_definitions": { + "artifacts": {}, + "parameters": { + "input1": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "STRING", + "default_value": "foo", + "is_optional": false, + "description": "" + }, + "input2": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "STRING", + "default_value": null, + "is_optional": false, + "description": "" + } + } + }, + "output_definitions": { + "artifacts": {}, + "parameters": { + "output1": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "STRING", + "description": "" + }, + "output2": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "NUMBER_INTEGER", + "description": "" + } + } + }, + "executor_label": "root-executor", + "single_platform_specs": [], + "task_config_passthroughs": [] + }, + "default_pipeline_root": "" +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.pb b/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.pb new file mode 100644 index 00000000000..9e0d394e65b Binary files /dev/null and b/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.pb differ diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline_version.json b/backend/test/proto_tests/testdata/generated-1791485/pipeline_version.json new file mode 100644 index 00000000000..cd2c395b4c8 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/pipeline_version.json @@ -0,0 +1,68 @@ +{ + "pipeline_id": "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3", + "pipeline_version_id": "e15dc3ec-b45e-4cc7-bb07-e76b5dbce99a", + "display_name": "v1.0.0 Production Data Processing Pipeline", + "name": "pipelineversion1", + "description": "First stable version of the production data processing pipeline", + "created_at": "2024-01-01T12:00:00Z", + "package_url": { + "pipeline_url": "gs://my-bucket/pipelines/pipeline1-v1.0.0.yaml" + }, + "code_source_url": "https://github.com/org/repo/pipeline1/tree/v1.0.0", + "pipeline_spec": { + "components": { + "comp-hello-world": { + "executorLabel": "exec-hello-world" + } + }, + "deploymentSpec": { + "executors": { + "exec-hello-world": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "hello_world" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef hello_world():\n print(\"hello world\")\n\n" + ], + "image": "python:3.9" + } + } + } + }, + "pipelineInfo": { + "name": "pipeline-hello-world" + }, + "root": { + "dag": { + "tasks": { + "hello-world": { + "cachingOptions": {}, + "componentRef": { + "name": "comp-hello-world" + }, + "taskInfo": { + "name": "hello-world" + } + } + } + } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-2.14.0" + }, + "error": { + "code": 0, + "message": "This is a successful pipeline version.", + "details": [] + } +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline_version.pb b/backend/test/proto_tests/testdata/generated-1791485/pipeline_version.pb new file mode 100644 index 00000000000..6b6c23e1401 Binary files /dev/null and b/backend/test/proto_tests/testdata/generated-1791485/pipeline_version.pb differ diff --git a/backend/test/proto_tests/testdata/generated-1791485/platform_spec.json b/backend/test/proto_tests/testdata/generated-1791485/platform_spec.json new file mode 100644 index 00000000000..a7840b1a173 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/platform_spec.json @@ -0,0 +1,24 @@ +{ + "platforms": { + "kubernetes": { + "deployment_spec": { + "executors": { + "root-executor": { + "container": { + "image": "test-image" + } + } + } + }, + "platform": "kubernetes", + "config": { + "project": "test-project" + }, + "pipelineConfig": { + "semaphore_key": "test-key", + "mutex_name": "test-mutex", + "resource_ttl": 24 + } + } + } +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/platform_spec.pb b/backend/test/proto_tests/testdata/generated-1791485/platform_spec.pb new file mode 100644 index 00000000000..ce75cf60837 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/platform_spec.pb @@ -0,0 +1,17 @@ + + + +kubernetes€ +; +9 + root-executor( +& + container* + +image  +test-image +kubernetes + +project test-project" +test-key +test-mutex \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/recurring_run.json b/backend/test/proto_tests/testdata/generated-1791485/recurring_run.json new file mode 100644 index 00000000000..634b3592f76 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/recurring_run.json @@ -0,0 +1,33 @@ +{ + "recurring_run_id": "recurring-run-789", + "display_name": "Daily Data Processing", + "description": "Scheduled pipeline for daily data processing tasks", + "pipeline_version_reference": { + "pipeline_id": "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3", + "pipeline_version_id": "e15dc3ec-b45e-4cc7-bb07-e76b5dbce99a" + }, + "runtime_config": { + "parameters": { + "batch_size": 500, + "processing_date": "${system.date}" + }, + "pipeline_root": "" + }, + "service_account": "sa3", + "max_concurrency": "0", + "trigger": { + "periodic_schedule": { + "start_time": "2024-01-01T12:00:00Z", + "end_time": null, + "interval_second": "86400" + } + }, + "mode": "ENABLE", + "created_at": "2024-01-01T12:00:00Z", + "updated_at": "2024-01-01T12:00:00Z", + "status": "ENABLED", + "error": null, + "no_catchup": false, + "namespace": "namespace1", + "experiment_id": "" +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/recurring_run.pb b/backend/test/proto_tests/testdata/generated-1791485/recurring_run.pb new file mode 100644 index 00000000000..8488f4e6070 Binary files /dev/null and b/backend/test/proto_tests/testdata/generated-1791485/recurring_run.pb differ diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_completed.json b/backend/test/proto_tests/testdata/generated-1791485/run_completed.json new file mode 100644 index 00000000000..39a88b57cc1 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/run_completed.json @@ -0,0 +1,27 @@ +{ + "experiment_id": "exp-456", + "run_id": "completed-run-123", + "display_name": "Production Pipeline Run", + "storage_state": "AVAILABLE", + "description": "Production pipeline execution for data processing", + "pipeline_version_reference": { + "pipeline_id": "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3", + "pipeline_version_id": "e15dc3ec-b45e-4cc7-bb07-e76b5dbce99a" + }, + "runtime_config": { + "parameters": { + "batch_size": 1000, + "learning_rate": "foo" + }, + "pipeline_root": "" + }, + "service_account": "sa1", + "created_at": "2024-01-01T12:00:00Z", + "scheduled_at": "2024-01-01T12:00:00Z", + "finished_at": "2024-01-01T12:00:00Z", + "state": "SUCCEEDED", + "error": null, + "run_details": null, + "recurring_run_id": "recurring-schedule-001", + "state_history": [] +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_completed.pb b/backend/test/proto_tests/testdata/generated-1791485/run_completed.pb new file mode 100644 index 00000000000..08da9027c27 Binary files /dev/null and b/backend/test/proto_tests/testdata/generated-1791485/run_completed.pb differ diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.json b/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.json new file mode 100644 index 00000000000..0685b66eadd --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.json @@ -0,0 +1,74 @@ +{ + "experiment_id": "exp-456", + "run_id": "completed-run-123", + "display_name": "Production Pipeline Run", + "storage_state": "AVAILABLE", + "description": "Production pipeline execution for data processing", + "pipeline_spec": { + "components": { + "comp-hello-world": { + "executorLabel": "exec-hello-world" + } + }, + "deploymentSpec": { + "executors": { + "exec-hello-world": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "hello_world" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef hello_world():\n print(\"hello world\")\n\n" + ], + "image": "python:3.9" + } + } + } + }, + "pipelineInfo": { + "name": "pipeline-hello-world" + }, + "root": { + "dag": { + "tasks": { + "hello-world": { + "cachingOptions": {}, + "componentRef": { + "name": "comp-hello-world" + }, + "taskInfo": { + "name": "hello-world" + } + } + } + } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-2.14.0" + }, + "runtime_config": { + "parameters": { + "batch_size": 1000, + "learning_rate": "foo" + }, + "pipeline_root": "" + }, + "service_account": "sa1", + "created_at": "2024-01-01T12:00:00Z", + "scheduled_at": "2024-01-01T12:00:00Z", + "finished_at": "2024-01-01T12:00:00Z", + "state": "SUCCEEDED", + "error": null, + "run_details": null, + "recurring_run_id": "recurring-schedule-001", + "state_history": [] +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.pb b/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.pb new file mode 100644 index 00000000000..cb9a24f070c Binary files /dev/null and b/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.pb differ diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_failed.json b/backend/test/proto_tests/testdata/generated-1791485/run_failed.json new file mode 100644 index 00000000000..58783d9306b --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/run_failed.json @@ -0,0 +1,21 @@ +{ + "experiment_id": "exp-789", + "run_id": "failed-run-456", + "display_name": "Data Processing Pipeline", + "storage_state": "AVAILABLE", + "description": "Failed attempt to process customer data", + "runtime_config": null, + "service_account": "sa2", + "created_at": "2024-01-01T12:00:00Z", + "scheduled_at": "2024-01-01T12:00:00Z", + "finished_at": "2024-01-01T12:00:00Z", + "state": "FAILED", + "error": { + "code": 1, + "message": "This was a Failed Run.", + "details": [] + }, + "run_details": null, + "recurring_run_id": "", + "state_history": [] +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_failed.pb b/backend/test/proto_tests/testdata/generated-1791485/run_failed.pb new file mode 100644 index 00000000000..1dac325190d --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/run_failed.pb @@ -0,0 +1,2 @@ + +exp-789failed-run-456Data Processing Pipeline *'Failed attempt to process customer dataJsa2RÀÒʬZÀÒʬbÀÒʬhrThis was a Failed Run. \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/visualization.json b/backend/test/proto_tests/testdata/generated-1791485/visualization.json new file mode 100644 index 00000000000..385d8797db4 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/visualization.json @@ -0,0 +1,7 @@ +{ + "type": "ROC_CURVE", + "source": "gs://my-bucket/data/visualization.csv", + "arguments": "{\"param1\": \"value1\", \"param2\": \"value2\"}", + "html": "
Generated Visualization
", + "error": "" +} diff --git a/backend/test/proto_tests/testdata/generated-1791485/visualization.pb b/backend/test/proto_tests/testdata/generated-1791485/visualization.pb new file mode 100644 index 00000000000..a4431c2becd --- /dev/null +++ b/backend/test/proto_tests/testdata/generated-1791485/visualization.pb @@ -0,0 +1 @@ +%gs://my-bucket/data/visualization.csv({"param1": "value1", "param2": "value2"}""
Generated Visualization
\ No newline at end of file diff --git a/backend/test/proto_tests/testdata/pipelinespec.yaml b/backend/test/proto_tests/testdata/pipelinespec.yaml new file mode 100644 index 00000000000..6d16209981b --- /dev/null +++ b/backend/test/proto_tests/testdata/pipelinespec.yaml @@ -0,0 +1,48 @@ +# PIPELINE DEFINITION +# Name: pipeline-hello-world +components: + comp-hello-world: + executorLabel: exec-hello-world +deploymentSpec: + executors: + exec-hello-world: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - hello_world + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.0'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef hello_world():\n print(\"hello world\")\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-hello-world +root: + dag: + tasks: + hello-world: + cachingOptions: {} + componentRef: + name: comp-hello-world + taskInfo: + name: hello-world +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.0 diff --git a/backend/test/proto_tests/util.go b/backend/test/proto_tests/util.go new file mode 100644 index 00000000000..0108fd133c3 --- /dev/null +++ b/backend/test/proto_tests/util.go @@ -0,0 +1,111 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package proto_tests + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" + + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" + + "github.com/google/go-cmp/cmp" +) + +type caseOpts[T proto.Message] struct { + message T + expectedPBPath string + expectedJSONPath string +} + +func testOBJ[T proto.Message](t *testing.T, opts caseOpts[T]) { + binaryPath := filepath.Join("testdata", opts.expectedPBPath) + jsonPath := filepath.Join("testdata", opts.expectedJSONPath) + + // Serialize to binary + binaryData, err := proto.Marshal(opts.message) + if err != nil { + t.Fatalf("Failed to marshal to binary: %v", err) + } + + // Serialize to JSON + jsonData, err := protojson.MarshalOptions{ + Multiline: true, + Indent: " ", + EmitUnpopulated: true, + // emulate marshaling options that we use + // for grpc-gateway json marshaling + UseProtoNames: true, + }.Marshal(opts.message) + if err != nil { + t.Fatalf("Failed to marshal to JSON: %v", err) + } + + if os.Getenv("UPDATE_EXPECTED") == "true" { + // Create directories if they don't exist + if err := os.MkdirAll(filepath.Dir(binaryPath), 0755); err != nil { + t.Fatalf("Failed to create directory for binary file: %v", err) + } + if err := os.MkdirAll(filepath.Dir(jsonPath), 0755); err != nil { + t.Fatalf("Failed to create directory for JSON file: %v", err) + } + if err := os.WriteFile(binaryPath, binaryData, 0644); err != nil { + t.Fatalf("Failed to update expected binary file: %v", err) + } + if err := os.WriteFile(jsonPath, jsonData, 0644); err != nil { + t.Fatalf("Failed to update expected JSON file: %v", err) + } + t.Log("expected files updated.") + return + } + + // Validate binary round-trip + expectedBinary, err := os.ReadFile(binaryPath) + if err != nil { + t.Fatalf("Failed to read expected binary: %v", err) + } + // Create a new instance of the concrete type T to unmarshal into. + // Proto.Clone allows us to get a new zero-value instance of a + // proto.Message. It returns a proto.Message, which proto.Unmarshal + // expects. + expectedMsg := proto.Clone(opts.message) + if err := proto.Unmarshal(expectedBinary, expectedMsg); err != nil { + t.Fatalf("Failed to unmarshal expected binary: %v", err) + } + if !proto.Equal(opts.message, expectedMsg) { + t.Errorf("Binary round-trip mismatch with expected file") + } + + // Read expected JSON + expectedJSON, err := os.ReadFile(jsonPath) + if err != nil { + t.Fatalf("Failed to read expected JSON file: %v", err) + } + + // Unmarshal both to interface{} for comparison + var actualObj, expectedObj interface{} + if err := json.Unmarshal(jsonData, &actualObj); err != nil { + t.Fatalf("Failed to unmarshal actual JSON: %v", err) + } + if err := json.Unmarshal(expectedJSON, &expectedObj); err != nil { + t.Fatalf("Failed to unmarshal expected JSON: %v", err) + } + + if diff := cmp.Diff(expectedObj, actualObj); diff != "" { + t.Errorf("JSON output mismatch (-expected +current):\n%s", diff) + } +} diff --git a/backend/test/test_utils.go b/backend/test/test_utils.go index fb395ee15a4..0271e197562 100644 --- a/backend/test/test_utils.go +++ b/backend/test/test_utils.go @@ -20,7 +20,6 @@ import ( "testing" "time" - "github.com/cenkalti/backoff" api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" experimentparams "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_client/experiment_service" "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/experiment_model" @@ -32,6 +31,8 @@ import ( "github.com/kubeflow/pipelines/backend/api/v1beta1/go_http_client/run_model" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v1" "github.com/kubeflow/pipelines/backend/src/common/util" + + "github.com/cenkalti/backoff" "github.com/pkg/errors" "github.com/stretchr/testify/assert" "k8s.io/client-go/tools/clientcmd" @@ -124,7 +125,7 @@ func DeleteAllJobs(client *api_server.JobClient, namespace string, t *testing.T) func GetExperimentIDFromV1beta1ResourceReferences(resourceRefs []*run_model.APIResourceReference) string { experimentID := "" for _, resourceRef := range resourceRefs { - if resourceRef.Key.Type == run_model.APIResourceTypeEXPERIMENT { + if *resourceRef.Key.Type == run_model.APIResourceTypeEXPERIMENT { experimentID = resourceRef.Key.ID break } @@ -197,10 +198,10 @@ func GetExperiment(name string, description string, namespace string) *experimen experiment.ResourceReferences = []*experiment_model.APIResourceReference{ { Key: &experiment_model.APIResourceKey{ - Type: experiment_model.APIResourceTypeNAMESPACE, + Type: experiment_model.APIResourceTypeNAMESPACE.Pointer(), ID: namespace, }, - Relationship: experiment_model.APIRelationshipOWNER, + Relationship: experiment_model.APIRelationshipOWNER.Pointer(), }, } } @@ -244,7 +245,7 @@ func VerifyJobResourceReferences(resRefs []*job_model.APIResourceReference, targ break } if resRef.Key != nil { - if resRef.Key.ID == target.Key.ID && resRef.Key.Type == target.Key.Type && resRef.Relationship == target.Relationship { + if resRef.Key.ID == target.Key.ID && *resRef.Key.Type == *target.Key.Type && *resRef.Relationship == *target.Relationship { matches++ break } @@ -264,7 +265,7 @@ func VerifyPipelineResourceReferences(resRefs []*pipeline_model.APIResourceRefer break } if resRef.Key != nil { - if resRef.Key.ID == target.Key.ID && resRef.Key.Type == target.Key.Type && resRef.Relationship == target.Relationship { + if resRef.Key.ID == target.Key.ID && *resRef.Key.Type == *target.Key.Type && *resRef.Relationship == *target.Relationship { matches++ break } @@ -284,7 +285,7 @@ func VerifyRunResourceReferences(resRefs []*run_model.APIResourceReference, targ break } if resRef.Key != nil { - if resRef.Key.ID == target.Key.ID && resRef.Key.Type == target.Key.Type && resRef.Relationship == target.Relationship { + if resRef.Key.ID == target.Key.ID && *resRef.Key.Type == *target.Key.Type && *resRef.Relationship == *target.Relationship { matches++ break } diff --git a/backend/test/testutil/config_utils.go b/backend/test/testutil/config_utils.go new file mode 100644 index 00000000000..712e866a3f1 --- /dev/null +++ b/backend/test/testutil/config_utils.go @@ -0,0 +1,74 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package testutil + +import ( + "io" + "net/http" + "os" + "time" + + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/logger" + + "github.com/cenkalti/backoff" + "github.com/pkg/errors" + "k8s.io/client-go/tools/clientcmd" + clientcmdapi "k8s.io/client-go/tools/clientcmd/api" +) + +func WaitForReady(initializeTimeout time.Duration) error { + operation := func() error { + response, err := http.Get("http://localhost:8888/apis/v2beta1/healthz") + if err != nil { + return err + } + defer func(Body io.ReadCloser) { + err = Body.Close() + if err != nil { + logger.Log("Failed to close response body") + } + }(response.Body) + + // If we get a 503 service unavailable, it's a non-retriable error. + if response.StatusCode == 503 { + return backoff.Permanent(errors.Wrapf( + err, "Waiting for ml pipeline API server failed with non retriable error.")) + } + + return nil + } + + b := backoff.NewExponentialBackOff() + b.MaxElapsedTime = initializeTimeout + err := backoff.Retry(operation, b) + return errors.Wrapf(err, "Waiting for ml pipeline API server failed after all attempts.") +} + +func GetClientConfig(namespace string) clientcmd.ClientConfig { + loadingRules := clientcmd.NewDefaultClientConfigLoadingRules() + loadingRules.DefaultClientConfig = &clientcmd.DefaultClientConfig + overrides := clientcmd.ConfigOverrides{Context: clientcmdapi.Context{Namespace: namespace}} + return clientcmd.NewInteractiveDeferredLoadingClientConfig(loadingRules, + &overrides, os.Stdin) +} + +func GetDefaultPipelineRunnerServiceAccount() string { + if *config.KubeflowMode || *config.MultiUserMode { + return *config.UserServiceAccountName + } else { + return *config.DefaultServiceAccountName + } +} diff --git a/backend/test/testutil/experiment_utils.go b/backend/test/testutil/experiment_utils.go new file mode 100644 index 00000000000..fe87d4b21ea --- /dev/null +++ b/backend/test/testutil/experiment_utils.go @@ -0,0 +1,72 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package testutil + +import ( + experiment_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/logger" + + "github.com/onsi/gomega" +) + +func CreateExperimentWithParams(experimentClient *api_server.ExperimentClient, experimentParams *experiment_model.V2beta1Experiment) *experiment_model.V2beta1Experiment { + logger.Log("Create an experiment with name %s", experimentParams.DisplayName) + createdExperiment, experimentErr := experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{ + Experiment: experimentParams, + }) + gomega.Expect(experimentErr).NotTo(gomega.HaveOccurred(), "Failed to create experiment with name '%s'", experimentParams.DisplayName) + return createdExperiment +} + +func CreateExperiment(experimentClient *api_server.ExperimentClient, experimentName string, namespace ...string) *experiment_model.V2beta1Experiment { + logger.Log("Create an experiment with name %s", experimentName) + createExperimentParams := experiment_params.NewExperimentServiceCreateExperimentParams() + namespaceToUse := *config.Namespace + if len(namespace) > 0 { + namespaceToUse = namespace[0] + } + createExperimentParams.Experiment = &experiment_model.V2beta1Experiment{ + DisplayName: experimentName, + Namespace: namespaceToUse, + } + createdExperiment, experimentErr := experimentClient.Create(createExperimentParams) + gomega.Expect(experimentErr).NotTo(gomega.HaveOccurred(), "Failed to create experiment") + return createdExperiment +} + +func ListExperiments(experimentClient *api_server.ExperimentClient, params *experiment_params.ExperimentServiceListExperimentsParams) []*experiment_model.V2beta1Experiment { + experiments, _, _, err := experimentClient.List(params) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Failed to list experiments") + return experiments +} + +func DeleteExperiment(experimentClient *api_server.ExperimentClient, experimentID string) { + _, err := experimentClient.Get(&experiment_params.ExperimentServiceGetExperimentParams{ExperimentID: experimentID}) + if err == nil { + logger.Log("Delete experiment %s", experimentID) + experimentDeleteParams := experiment_params.ExperimentServiceDeleteExperimentParams{ + ExperimentID: experimentID, + } + err = experimentClient.Delete(&experimentDeleteParams) + if err != nil { + logger.Log("Failed to delete experiment %s", experimentID) + } + } else { + logger.Log("Skipping Deletion of the experiment %s, as it does not exist", experimentID) + } +} diff --git a/backend/test/testutil/file_utils.go b/backend/test/testutil/file_utils.go new file mode 100644 index 00000000000..6c48c2000fd --- /dev/null +++ b/backend/test/testutil/file_utils.go @@ -0,0 +1,184 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package testutil provides helpers for working with test data files. +package testutil + +import ( + "fmt" + "os" + "path/filepath" + "slices" + "strings" + + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" + "sigs.k8s.io/yaml" + + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/apiserver/server" + "github.com/kubeflow/pipelines/backend/src/apiserver/template" + testconstants "github.com/kubeflow/pipelines/backend/test/constants" + "github.com/kubeflow/pipelines/backend/test/logger" + + "github.com/onsi/gomega" + v1 "k8s.io/api/core/v1" +) + +// GetProjectRoot Get project root directory +// GetProjectRoot Get project root directory +func GetProjectRoot() string { + dirFiles := make([]string, 0) + dir, _ := os.Getwd() + for !slices.Contains(dirFiles, testconstants.ParentDirectory) && !slices.Contains(dirFiles, testconstants.TestDataDir) { + dirFiles = make([]string, 0) + dir = filepath.Join(dir, "..") + files, err := os.ReadDir(dir) + if err != nil { + logger.Log("Failed to read directory, due to %s", err.Error()) + } + for _, file := range files { + dirFiles = append(dirFiles, file.Name()) + } + } + return dir +} + +// GetTestDataDir Get the directory location for all the test data +func GetTestDataDir() string { + return filepath.Join(GetProjectRoot(), testconstants.TestDataDir) +} + +// GetPipelineFilesDir Get the directory location of the main list of pipeline files +func GetPipelineFilesDir() string { + return filepath.Join(GetTestDataDir(), testconstants.PipelineFilesDir) +} + +// GetValidPipelineFilesDir Get the directory location of the main list of pipeline files +func GetValidPipelineFilesDir() string { + return filepath.Join(GetPipelineFilesDir(), testconstants.ValidPipelineFilesDir) +} + +// GetCompiledWorkflowsFilesDir Get the directory location of the main list of pipeline files +func GetCompiledWorkflowsFilesDir() string { + return filepath.Join(GetTestDataDir(), testconstants.CompiledPipelineFilesDir) +} + +// GetListOfFilesInADir - Get list of files in a dir (not nested) +func GetListOfFilesInADir(directoryPath string) []string { + var fileNames []string + files, err := os.ReadDir(directoryPath) + if err != nil { + logger.Log("Could not fetch files in directory %s, due to: %s", directoryPath, err.Error()) + } + + for _, file := range files { + if !file.IsDir() { + if !strings.Contains(file.Name(), ".py") && (file.Name() != "Dockerfile") && !strings.Contains(file.Name(), ".md") && !strings.Contains(file.Name(), ".ipynb") { + fileNames = append(fileNames, file.Name()) + } + } + } + return fileNames +} + +// GetListOfAllFilesInDir - Iterator over a directory and even nested directories and fetch the list of files +func GetListOfAllFilesInDir(directoryPath string) []string { + var filePaths []string + err := filepath.Walk(directoryPath, func(path string, d os.FileInfo, err error) error { + if err != nil { + fmt.Printf("Error accessing path %q: %v\n", path, err) + return err + } + + // Check if it's a regular file (not a directory) + if !d.IsDir() { + if !strings.Contains(d.Name(), ".py") && (d.Name() != "Dockerfile") && !strings.Contains(d.Name(), ".md") && !strings.Contains(d.Name(), ".ipynb") { + filePaths = append(filePaths, path) + } + } + return nil + }) + + if err != nil { + fmt.Printf("Error walking the directory tree: %v\n", err) + } + return filePaths +} + +func ProtoToBytes(objectToConvert proto.Message) []byte { + bytesConfig, err := protojson.Marshal(objectToConvert) + if err != nil { + // this is unexpected, cannot convert proto message to JSON + return nil + } + bytesConfigYaml, err := yaml.JSONToYAML(bytesConfig) + if err != nil { + // this is unexpected, cannot convert JSON to YAML + return nil + } + return bytesConfigYaml +} + +func ToBytes(objectToConvert any) []byte { + objectInBytes, marshallingErr := yaml.Marshal(objectToConvert) + gomega.Expect(marshallingErr).NotTo(gomega.HaveOccurred(), "Failed to marshal object to bytes") + return objectInBytes +} + +// ParseFileToSpecs - Read a file and unmarshall it into a template.V2Spec +func ParseFileToSpecs(pipelineFilePath string, cacheDisabled bool, defaultWorkspace *v1.PersistentVolumeClaimSpec) *template.V2Spec { + + specFromFile, err := os.OpenFile(pipelineFilePath, os.O_RDWR, 0644) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Failed to read pipeline file") + defer specFromFile.Close() + pipelineSpecBytes, pipelineUnmarshallError := server.ReadPipelineFile(pipelineFilePath, specFromFile, common.MaxFileLength) + gomega.Expect(pipelineUnmarshallError).To(gomega.BeNil(), "Failed to read pipeline spec") + specs, templateErr := template.NewV2SpecTemplate(pipelineSpecBytes, cacheDisabled, defaultWorkspace) + gomega.Expect(templateErr).To(gomega.BeNil(), "Failed to parse spec bytes into a spec object") + return specs +} + +func CreateFile(filePath string, fileContents [][]byte) *os.File { + file, err := os.Create(filePath) + if err != nil { + logger.Log("Failed to create file %s due to %s", filePath, err.Error()) + } + defer func(file *os.File) { + err := file.Close() + if err != nil { + logger.Log("Failed to close file: %s", err.Error()) + } + }(file) + for _, content := range fileContents { + _, err = file.Write(content) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Failed to write contents to a file") + } + return file +} + +func CreateTempFile(fileContents []byte) *os.File { + tmpFile, err := os.CreateTemp("", "pipeline-*.yaml") + if err != nil { + logger.Log("Failed to create temporary file: %s", err.Error()) + } + defer func(tmpFile *os.File) { + err := tmpFile.Close() + if err != nil { + logger.Log("Failed to close temporary file: %s", err.Error()) + } + }(tmpFile) + _, _ = tmpFile.Write(fileContents) + return tmpFile +} diff --git a/backend/test/testutil/kubernetes_utils.go b/backend/test/testutil/kubernetes_utils.go new file mode 100644 index 00000000000..b536dce82e7 --- /dev/null +++ b/backend/test/testutil/kubernetes_utils.go @@ -0,0 +1,177 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package testutil + +import ( + "bytes" + "context" + "fmt" + "io" + "slices" + "strings" + "time" + + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/test/logger" + + "github.com/onsi/gomega" + authenticationv1 "k8s.io/api/authentication/v1" + v1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/client-go/kubernetes" +) + +func CreateK8sClient() (*kubernetes.Clientset, error) { + restConfig, configErr := util.GetKubernetesConfig() + if configErr != nil { + return nil, configErr + } + k8sClient, clientErr := kubernetes.NewForConfig(restConfig) + if clientErr != nil { + return nil, clientErr + } + return k8sClient, nil +} + +// ReadContainerLogs - Read pod logs from a specific container +func ReadContainerLogs(client *kubernetes.Clientset, namespace string, containerName string, follow *bool, sinceTime *time.Time, logLimit *int64) string { + pod := GetPodContainingContainer(client, namespace, containerName) + if pod != nil { + return ReadPodLogs(client, namespace, pod.Name, follow, sinceTime, logLimit) + } else { + return fmt.Sprintf("Could not find pod containing container with name '%s'", containerName) + } +} + +// ReadPodLogs - Read pod logs from a specific names, with container name containing a substring and from a certain time period (default being from past 1 min) +func ReadPodLogs(client *kubernetes.Clientset, namespace string, podName string, follow *bool, sinceTime *time.Time, logLimit *int64) string { + podFromPodName := GetPodContainingName(client, namespace, podName) + podLogOptions := GetDefaultPodLogOptions() + if logLimit != nil { + podLogOptions.LimitBytes = logLimit + } + if follow != nil { + podLogOptions.Follow = *follow + } + if sinceTime != nil { + timeSince := metav1.NewTime(sinceTime.UTC()) + podLogOptions.SinceTime = &timeSince + } + buf := new(bytes.Buffer) + if podFromPodName != nil { + for _, container := range podFromPodName.Spec.Containers { + podLogOptions.Container = container.Name + podLogsRequest := client.CoreV1().Pods(namespace).GetLogs(podFromPodName.Name, podLogOptions) + podLogs, err := podLogsRequest.Stream(context.Background()) // Pass a context for cancellation + if err != nil { + logger.Log("Failed to stream pod logs due to %v", err) + } + defer func(podLogs io.ReadCloser) { + err = podLogs.Close() + if err != nil { + logger.Log("Failed to close pod log reader due to %v", err) + } + }(podLogs) + _, err = io.Copy(buf, podLogs) + if err != nil { + logger.Log("Failed to add pod logs to buffer due to: %v", err) + } + } + } else { + logger.Log("No pod logs available for pod with name '%s'", podName) + } + return buf.String() +} + +// GetDefaultPodLogOptions - Get default pod log options for the pod log reader API request +func GetDefaultPodLogOptions() *v1.PodLogOptions { + logLimit := int64(50000000) + sinceTime := metav1.NewTime(time.Now().Add(-1 * time.Minute).UTC()) + return &v1.PodLogOptions{ + Previous: false, + SinceTime: &sinceTime, + Timestamps: true, + LimitBytes: &logLimit, + Follow: false, + } +} + +// GetPodContainingName - Get the name of the pod with name containing substring +func GetPodContainingName(client *kubernetes.Clientset, namespace, podName string) *v1.Pod { + pods, err := client.CoreV1().Pods(namespace).List(context.Background(), metav1.ListOptions{}) + if err != nil { + logger.Log("Failed to list pods due to: %v", err) + } + for _, pod := range pods.Items { + podNameSplit := strings.Split(pod.Name, "-") + expectedPodNameSplit := strings.Split(podName, "-") + contains := true + for _, name := range expectedPodNameSplit { + if !slices.Contains(podNameSplit, name) { + contains = false + } + } + if contains { + return &pod + } + } + return nil +} + +// GetPodContainingContainer - Get the name of the pod with container name containing substring +func GetPodContainingContainer(client *kubernetes.Clientset, namespace, containerName string) *v1.Pod { + pods, err := client.CoreV1().Pods(namespace).List(context.Background(), metav1.ListOptions{}) + if err != nil { + logger.Log("Failed to list pods due to: %v", err) + } + for _, pod := range pods.Items { + for _, container := range pod.Spec.Containers { + if strings.Contains(container.Name, containerName) { + return &pod + } + } + } + return nil +} + +func CreateUserToken(client *kubernetes.Clientset, namespace, serviceAccountName string) string { + // Define TokenRequest + tokenReq := &authenticationv1.TokenRequest{ + Spec: authenticationv1.TokenRequestSpec{ + Audiences: []string{"pipelines.kubeflow.org"}, // Token for Kubernetes API server + ExpirationSeconds: func(i int64) *int64 { return &i }(int64(time.Hour.Seconds())), // 1-hour expiration + }, + } + + // Create the token + tokenResponse, err := client.CoreV1().ServiceAccounts(namespace).CreateToken(context.TODO(), serviceAccountName, tokenReq, metav1.CreateOptions{}) + gomega.Expect(err).ToNot(gomega.HaveOccurred(), fmt.Sprintf("Failed to create service account token for '%s' service account under '%s' namespace", serviceAccountName, namespace)) + if tokenResponse != nil { + return tokenResponse.Status.Token + } + return "" +} + +// CreateSecret - Create K8s secret in the provided namespace +func CreateSecret(client *kubernetes.Clientset, namespace string, secret *v1.Secret) { + _, createErr := client.CoreV1().Secrets(namespace).Create(context.TODO(), secret, metav1.CreateOptions{}) + if createErr == nil { + logger.Log("%s created", secret.Name) + } else { + logger.Log("Looks like %s already exists, because creation failed due to %s", secret.Name, createErr.Error()) + _, getErr := client.CoreV1().Secrets(namespace).Get(context.TODO(), secret.Name, metav1.GetOptions{}) + gomega.Expect(getErr).ToNot(gomega.HaveOccurred(), "Failed to get secret '%s'") + } +} diff --git a/backend/test/testutil/pipeline_recurring_run_utils.go b/backend/test/testutil/pipeline_recurring_run_utils.go new file mode 100644 index 00000000000..6e802ed5d23 --- /dev/null +++ b/backend/test/testutil/pipeline_recurring_run_utils.go @@ -0,0 +1,63 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package testutil + +import ( + recurring_run_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" + api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" + "github.com/kubeflow/pipelines/backend/test/logger" + + "github.com/onsi/gomega" +) + +func ListRecurringRuns(client *api_server.RecurringRunClient, parameters *recurring_run_params.RecurringRunServiceListRecurringRunsParams, namespace string) ([]*recurring_run_model.V2beta1RecurringRun, int, string, error) { + if namespace != "" { + parameters.Namespace = &namespace + } + return client.List(parameters) +} + +func GetRecurringRun(client *api_server.RecurringRunClient, runID string) *recurring_run_model.V2beta1RecurringRun { + parameters := &recurring_run_params.RecurringRunServiceGetRecurringRunParams{ + RecurringRunID: runID, + } + recurringRun, err := client.Get(parameters) + if err != nil { + return recurringRun + } + logger.Log("Failed to get recurring run with id=%s", recurringRun.RecurringRunID) + return nil +} + +func DeleteRecurringRun(client *api_server.RecurringRunClient, runID string) { + parameters := &recurring_run_params.RecurringRunServiceDeleteRecurringRunParams{ + RecurringRunID: runID, + } + err := client.Delete(parameters) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Failed to delete recurring run with id=%s, due to %s", runID, err.Error()) +} + +func ListAllRecurringRuns(client *api_server.RecurringRunClient, namespace string) ([]*recurring_run_model.V2beta1RecurringRun, int, string, error) { + return ListRecurringRuns(client, &recurring_run_params.RecurringRunServiceListRecurringRunsParams{}, namespace) +} + +func DeleteAllRecurringRuns(client *api_server.RecurringRunClient, namespace string) { + recurringRuns, _, _, err := ListAllRecurringRuns(client, namespace) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Failed to list recurring runs") + for _, run := range recurringRuns { + DeleteRecurringRun(client, run.RecurringRunID) + } +} diff --git a/backend/test/testutil/pipeline_run_utils.go b/backend/test/testutil/pipeline_run_utils.go new file mode 100644 index 00000000000..81ea14b0d34 --- /dev/null +++ b/backend/test/testutil/pipeline_run_utils.go @@ -0,0 +1,156 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package testutil + +import ( + "math/rand" + "slices" + "strconv" + "time" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + run_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_client/run_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" + "github.com/kubeflow/pipelines/backend/test/logger" + + "github.com/onsi/ginkgo/v2" + "github.com/onsi/gomega" +) + +func DeletePipelineRun(client *api_server.RunClient, runID string) { + _, err := client.Get(&run_params.RunServiceGetRunParams{RunID: runID}) + if err == nil { + logger.Log("Deleting run %s", runID) + deleteRunParams := run_params.NewRunServiceDeleteRunParams() + deleteRunParams.RunID = runID + deleteErr := client.Delete(deleteRunParams) + if deleteErr != nil { + logger.Log("Failed to delete run %s", runID) + } + } else { + logger.Log("Skipping Deletion of the run %s, as it does not exist", runID) + } +} + +func ArchivePipelineRun(client *api_server.RunClient, runID string) { + _, err := client.Get(&run_params.RunServiceGetRunParams{RunID: runID}) + if err == nil { + logger.Log("Terminate run %s", runID) + archiveRunParams := run_params.NewRunServiceArchiveRunParams() + archiveRunParams.RunID = runID + archiveErr := client.Archive(archiveRunParams) + if archiveErr != nil { + logger.Log("Failed to archive run %s", runID) + } + } else { + logger.Log("Skipping Archiving of the run %s, because it does not exist", runID) + } +} + +func TerminatePipelineRun(client *api_server.RunClient, runID string) { + _, err := client.Get(&run_params.RunServiceGetRunParams{RunID: runID}) + if err == nil { + logger.Log("Terminate run %s", runID) + terminateRunParams := run_params.NewRunServiceTerminateRunParams() + terminateRunParams.RunID = runID + terminateErr := client.Terminate(terminateRunParams) + if terminateErr != nil { + logger.Log("Failed to terminate run %s", runID) + } + } else { + logger.Log("Skipping Termination of the run %s, because it does not exist", runID) + } +} + +func GetPipelineRun(runClient *api_server.RunClient, pipelineRunID *string) *run_model.V2beta1Run { + logger.Log("Get a pipeline run with id=%s", *pipelineRunID) + pipelineRun, runError := runClient.Get(&run_params.RunServiceGetRunParams{ + RunID: *pipelineRunID, + }) + gomega.Expect(runError).NotTo(gomega.HaveOccurred(), "Failed to get run with id="+*pipelineRunID) + return pipelineRun +} + +func WaitForRunToBeInState(runClient *api_server.RunClient, pipelineRunID *string, expectedStates []run_model.V2beta1RuntimeState, timeout *time.Duration) { + logger.Log("Waiting for pipeline run with id=%s to be in one of '%s'", *pipelineRunID, expectedStates) + maxTimeToWait := time.Duration(300) + pollTime := time.Duration(5) + if timeout != nil { + maxTimeToWait = *timeout + } + deadline := time.Now().Add(maxTimeToWait * time.Second) + ticker := time.NewTicker(pollTime * time.Second) + defer ticker.Stop() + for { + currentPipelineRunState := GetPipelineRun(runClient, pipelineRunID).State + if currentPipelineRunState != nil { + if slices.Contains(expectedStates, *currentPipelineRunState) { + logger.Log("Pipeline run with id=%s reached expected state %s", *pipelineRunID, *currentPipelineRunState) + return + } + + if time.Now().After(deadline) { + logger.Log("Pipeline run with id=%s is in %s state", *pipelineRunID, *currentPipelineRunState) + ginkgo.Fail("Timed out waiting for pipeline run with id runId=" + *pipelineRunID + " to be in expected state") + } + logger.Log("Pipeline run with id=%s is in %s state, waiting...", *pipelineRunID, *currentPipelineRunState) + } else { + logger.Log("Pipeline run with id=%s is in nil state, rechecking...", *pipelineRunID) + } + <-ticker.C + } + +} + +func GetPipelineRunTimeInputs(pipelineSpecFile string) map[string]interface{} { + logger.Log("Get the pipeline run time inputs from pipeline spec file %s", pipelineSpecFile) + pipelineSpec := ParseFileToSpecs(pipelineSpecFile, false, nil).PipelineSpec() + pipelineInputMap := make(map[string]interface{}) + if pipelineSpec.Root.InputDefinitions != nil { + if pipelineSpec.Root.InputDefinitions.Parameters != nil { + for name, parameterSpec := range pipelineSpec.Root.InputDefinitions.Parameters { + defaultValExists := false + if parameterSpec.DefaultValue != nil { + defaultValExists = true + } + if !defaultValExists || !parameterSpec.IsOptional { + switch parameterSpec.ParameterType { + case pipelinespec.ParameterType_NUMBER_INTEGER: + pipelineInputMap[name] = rand.Intn(1000) + case pipelinespec.ParameterType_STRING: + pipelineInputMap[name] = GetRandomString(20) + case pipelinespec.ParameterType_STRUCT: + pipelineInputMap[name] = map[string]interface{}{ + "A": strconv.FormatFloat(rand.Float64(), 'g', -1, 64), + "B": strconv.FormatFloat(rand.Float64(), 'g', -1, 64), + } + case pipelinespec.ParameterType_LIST: + pipelineInputMap[name] = []string{GetRandomString(20)} + case pipelinespec.ParameterType_BOOLEAN: + pipelineInputMap[name] = true + case pipelinespec.ParameterType_NUMBER_DOUBLE: + pipelineInputMap[name] = rand.Float64() + default: + pipelineInputMap[name] = GetRandomString(20) + } + } + + } + } + } + logger.Log("Returning pipeline run time inputs %v", pipelineInputMap) + return pipelineInputMap +} diff --git a/backend/test/testutil/pipeline_utils.go b/backend/test/testutil/pipeline_utils.go new file mode 100644 index 00000000000..c905c989f8d --- /dev/null +++ b/backend/test/testutil/pipeline_utils.go @@ -0,0 +1,158 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package testutil + +import ( + "fmt" + "os" + + pipeline_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + upload_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" + model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" + api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/test/logger" + + "github.com/onsi/ginkgo/v2" + "github.com/onsi/gomega" + "k8s.io/client-go/tools/clientcmd" +) + +func GetPipelineUploadClient( + uploadPipelinesWithKubernetes bool, + isKubeflowMode bool, + isDebugMode bool, + namespace string, + clientConfig clientcmd.ClientConfig, +) (api_server.PipelineUploadInterface, error) { + if uploadPipelinesWithKubernetes { + return api_server.NewPipelineUploadClientKubernetes(clientConfig, namespace) + } + + if isKubeflowMode { + return api_server.NewKubeflowInClusterPipelineUploadClient(namespace, isDebugMode) + } + + return api_server.NewPipelineUploadClient(clientConfig, isDebugMode) +} + +func ListPipelines(client *api_server.PipelineClient, namespace *string) []*pipeline_model.V2beta1Pipeline { + parameters := &pipeline_params.PipelineServiceListPipelinesParams{} + if namespace != nil { + parameters.Namespace = namespace + } + logger.Log("Listing all pipelines") + pipelines, _, _, err := client.List(parameters) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Error listing pipelines") + return pipelines +} + +// UploadPipeline - Upload a pipeline +func UploadPipeline(pipelineUploadClient api_server.PipelineUploadInterface, pipelineFilePath string, pipelineName *string, pipelineDisplayName *string) (*model.V2beta1Pipeline, error) { + uploadParams := upload_params.NewUploadPipelineParams() + uploadParams.SetName(pipelineName) + if pipelineDisplayName != nil { + uploadParams.SetDisplayName(pipelineDisplayName) + } + logger.Log("Creating temp pipeline file with overridden SDK Version") + overriddenPipelineFileWithSDKVersion := ReplaceSDKInPipelineSpec(pipelineFilePath, false, nil) + tempPipelineFile := CreateTempFile(overriddenPipelineFileWithSDKVersion) + defer func() { + // Ensure the temporary file is removed when the function exits + if err := os.Remove(tempPipelineFile.Name()); err != nil { + logger.Log("Error removing temporary file: %s", err) + } + }() + logger.Log("Uploading pipeline with name=%s, from file %s", *pipelineName, pipelineFilePath) + return pipelineUploadClient.UploadFile(tempPipelineFile.Name(), uploadParams) +} + +/* DeletePipeline deletes a pipeline by id */ +func DeletePipeline(client *api_server.PipelineClient, pipelineID string) { + ginkgo.GinkgoHelper() + _, err := client.Get(&pipeline_params.PipelineServiceGetPipelineParams{PipelineID: pipelineID}) + if err == nil { + logger.Log("Deleting all pipeline version of pipeline with id=%s", pipelineID) + DeleteAllPipelineVersions(client, pipelineID) + logger.Log("Deleting pipeline with id=%s", pipelineID) + err = client.Delete(&pipeline_params.PipelineServiceDeletePipelineParams{PipelineID: pipelineID}) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), fmt.Sprintf("Error occurred while deleting pipeline with id=%s", pipelineID)) + logger.Log("Pipeline with id=%s, DELETED", pipelineID) + } else { + logger.Log("Pipeline with id=%s does not exist, so skipping deleting it", pipelineID) + } + +} + +/* DeleteAllPipelines deletes all pipelines */ +func DeleteAllPipelines(client *api_server.PipelineClient, namespace *string) { + ginkgo.GinkgoHelper() + pipelines := ListPipelines(client, namespace) + deletedPipelines := make(map[string]bool) + for _, p := range pipelines { + deletedPipelines[p.PipelineID] = false + } + for pID, isRemoved := range deletedPipelines { + if !isRemoved { + DeleteAllPipelineVersions(client, pID) + deletedPipelines[pID] = true + } + logger.Log("Deleting pipeline with id=%s", pID) + gomega.Expect(client.Delete(&pipeline_params.PipelineServiceDeletePipelineParams{PipelineID: pID})).NotTo(gomega.HaveOccurred(), fmt.Sprintf("Error occurred while deleting pipeline with id=%s", pID)) + } + for _, isRemoved := range deletedPipelines { + gomega.Expect(isRemoved).To(gomega.BeTrue()) + } +} + +/* GetPipeline does its job via GET pipeline end point call, so that we retrieve the values from DB */ +func GetPipeline(client *api_server.PipelineClient, pipelineID string) model.V2beta1Pipeline { + ginkgo.GinkgoHelper() + params := new(pipeline_params.PipelineServiceGetPipelineParams) + params.PipelineID = pipelineID + logger.Log("Get pipeline with id=%s", pipelineID) + pipeline, err := client.Get(params) + gomega.Expect(err).NotTo(gomega.HaveOccurred()) + return model.V2beta1Pipeline{ + DisplayName: pipeline.DisplayName, + Description: pipeline.Description, + PipelineID: pipeline.PipelineID, + CreatedAt: pipeline.CreatedAt, + Namespace: pipeline.Namespace, + Name: pipeline.Name, + } +} + +/* FindPipelineByName gets all pipelines (upto 1000) and filter by name, if the pipeline exists, return true otherwise false + */ +func FindPipelineByName(client *api_server.PipelineClient, pipelineName string) bool { + ginkgo.GinkgoHelper() + requestedNumberOfPipelinesPerPage := 1000 + params := new(pipeline_params.PipelineServiceListPipelinesParams) + params.PageSize = util.Int32Pointer(int32(requestedNumberOfPipelinesPerPage)) + logger.Log("Get all pipelines") + pipelines, size, _, err := client.List(params) + gomega.Expect(err).NotTo(gomega.HaveOccurred()) + logger.Log("Finding pipeline with name=%s", pipelineName) + if size < requestedNumberOfPipelinesPerPage { + for _, pipeline := range pipelines { + if pipeline.DisplayName == pipelineName { + return true + } + } + } + return false +} diff --git a/backend/test/testutil/pipeline_version_utils.go b/backend/test/testutil/pipeline_version_utils.go new file mode 100644 index 00000000000..0cbaf680710 --- /dev/null +++ b/backend/test/testutil/pipeline_version_utils.go @@ -0,0 +1,108 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package testutil + +import ( + "fmt" + "os" + "sort" + "time" + + "sigs.k8s.io/yaml" + + pipeline_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" + "github.com/kubeflow/pipelines/backend/test/logger" + + gomega "github.com/onsi/gomega" +) + +// JSONFromYAML - Construct expected Pipeline Spec from the uploaded file +func JSONFromYAML(pipelineFilePath string) []byte { + pipelineSpec, err := os.ReadFile(pipelineFilePath) + gomega.Expect(err).NotTo(gomega.HaveOccurred()) + jsonSpecFromFile, errDataConversion := yaml.YAMLToJSON(pipelineSpec) + gomega.Expect(errDataConversion).NotTo(gomega.HaveOccurred()) + return jsonSpecFromFile +} + +func ListPipelineVersions(client *api_server.PipelineClient, pipelineID string) ( + []*pipeline_model.V2beta1PipelineVersion, int, string, error, +) { + logger.Log("Listing pipeline versions for pipeline %s", pipelineID) + parameters := &pipeline_params.PipelineServiceListPipelineVersionsParams{PipelineID: pipelineID} + return client.ListPipelineVersions(parameters) +} + +func DeletePipelineVersion(client *api_server.PipelineClient, pipelineID string, pipelineVersionID string) { + logger.Log("Deleting pipeline versions for pipeline %s with version id=%s", pipelineID, pipelineVersionID) + err := client.DeletePipelineVersion(&pipeline_params.PipelineServiceDeletePipelineVersionParams{PipelineID: pipelineID, PipelineVersionID: pipelineVersionID}) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), fmt.Sprintf("Pipeline version with id=%s of pipelineID=%s failed", pipelineVersionID, pipelineID)) +} + +// GetLatestPipelineVersion - list all pipeline versions of a pipeline by ID and return the one with the latest createdAt date +func GetLatestPipelineVersion(pipelineClient *api_server.PipelineClient, pipelineID *string) *pipeline_model.V2beta1PipelineVersion { + var pipelineVersion *pipeline_model.V2beta1PipelineVersion + gomega.EventuallyWithOffset(1, func(g gomega.Gomega) { + pipelineVersions, _, _, listPipelineVersionErr := ListPipelineVersions(pipelineClient, *pipelineID) + g.Expect(listPipelineVersionErr).NotTo(gomega.HaveOccurred(), "Failed to list pipeline versions for pipeline with id="+*pipelineID) + g.Expect(pipelineVersions).NotTo(gomega.BeEmpty(), "No pipeline versions found for pipeline with id="+*pipelineID) + sort.Slice(pipelineVersions, func(i, j int) bool { + return time.Time(pipelineVersions[i].CreatedAt).After(time.Time(pipelineVersions[j].CreatedAt)) + }) + + pipelineVersion = pipelineVersions[0] + }).WithTimeout(5 * time.Second).WithPolling(500 * time.Millisecond).Should(gomega.Succeed()) + + return pipelineVersion +} + +// DeleteAllPipelineVersions - Delete all pipeline versions of a pipeline by ID +func DeleteAllPipelineVersions(client *api_server.PipelineClient, pipelineID string) { + logger.Log("Deleting all pipeline versions for pipeline %s", pipelineID) + pipelineVersions, _, _, err := ListPipelineVersions(client, pipelineID) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Error occurred while listing pipeline versions") + logger.Log("Found %d pipeline versions for pipeline %s", len(pipelineVersions), pipelineID) + for _, pv := range pipelineVersions { + logger.Log("Deleting pipeline version %s", pv.PipelineVersionID) + gomega.Expect(client.DeletePipelineVersion(&pipeline_params.PipelineServiceDeletePipelineVersionParams{PipelineID: pipelineID, PipelineVersionID: pv.PipelineVersionID})).NotTo(gomega.HaveOccurred(), fmt.Sprintf("Pipeline version with id=%s of pipelineID=%s failed", pv.PipelineVersionID, pipelineID)) + } + pipelineVersions, _, _, err = ListPipelineVersions(client, pipelineID) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "Error occurred while listing pipeline versions") + if len(pipelineVersions) > 0 { + logger.Log("Failed to delete all pipeline versions") + } +} + +// GetSortedPipelineVersionsByCreatedAt - Get a list of pipeline upload version for a specific pipeline, and sort the list by CreatedAt before returning it +// +// sortBy - ASC or DESC, If nil, then the default will be DESC +func GetSortedPipelineVersionsByCreatedAt(client *api_server.PipelineClient, pipelineID string, sortBy *string) []*pipeline_model.V2beta1PipelineVersion { + versions, _, _, err := ListPipelineVersions(client, pipelineID) + if err != nil { + return nil + } + sort.Slice(versions, func(i, j int) bool { + versionTime1 := time.Time(versions[i].CreatedAt).UTC() + versionTime2 := time.Time(versions[j].CreatedAt).UTC() + if sortBy != nil && *sortBy == "ASC" { + return versionTime1.Before(versionTime2) + } else { + return versionTime1.After(versionTime2) + } + }) + return versions +} diff --git a/backend/test/testutil/test_utils.go b/backend/test/testutil/test_utils.go new file mode 100644 index 00000000000..88bbab64224 --- /dev/null +++ b/backend/test/testutil/test_utils.go @@ -0,0 +1,115 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package testutil gathers shared helpers used across backend integration tests. +package testutil + +import ( + "fmt" + "math/rand" + "os" + "path/filepath" + "regexp" + "strings" + "time" + + "github.com/onsi/gomega" + v1 "k8s.io/api/core/v1" + + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/logger" + + "github.com/onsi/ginkgo/v2" + "github.com/onsi/ginkgo/v2/types" +) + +// ParsePointersToString - convert a string pointer to string value +func ParsePointersToString(s *string) string { + if s == nil { + return "" + } else { + return *s + } +} + +// GetRandomString - Get a random string of length x +func GetRandomString(length int) string { + charset := "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + seededRand := rand.New(rand.NewSource(time.Now().UnixNano())) + b := make([]byte, length) + for i := range b { + b[i] = charset[seededRand.Intn(len(charset))] + } + return string(b) +} + +// CheckIfSkipping - test if the provided string argument contains "GH-" (case insensitive) +func CheckIfSkipping(stringValue string) { + if strings.Contains(strings.ToLower(stringValue), "_gh-") { + issue := strings.Split(strings.ToLower(stringValue), "_gh-")[1] + ginkgo.Skip(fmt.Sprintf("Skipping pipeline run test because of a known issue: https://github.com/kubeflow/pipelines/issues/%s", issue)) + } +} + +func WriteLogFile(specReport types.SpecReport, testName, logDirectory string) { + stdOutput := specReport.CapturedGinkgoWriterOutput + testLogFile := filepath.Join(logDirectory, testName+".log") + logFile, err := os.Create(testLogFile) + if err != nil { + logger.Log("Failed to create log file due to: %s", err.Error()) + } + _, err = logFile.Write([]byte(stdOutput)) + if err != nil { + logger.Log("Failed to write to the log file, due to: %s", err.Error()) + } + err = logFile.Close() + if err != nil { + return + } +} + +// GetNamespace - Get Namespace based on the deployment mode +func GetNamespace() string { + if *config.KubeflowMode || *config.MultiUserMode { + return *config.UserNamespace + } + return *config.Namespace +} + +// getPackagePath generates the package path based on environment variables +// Equivalent to the Python function get_package_path +func getPackagePath(subdir string) string { + repoName := *config.RepoName + + pullNumber := *config.PullNumber + if pullNumber != "" { + return fmt.Sprintf("git+https://github.com/%s.git@refs/pull/%s/merge#subdirectory=%s", repoName, pullNumber, subdir) + } + return fmt.Sprintf("git+https://github.com/%s.git@%s#subdirectory=%s", repoName, *config.BranchName, subdir) +} + +func ReplaceSDKInPipelineSpec(pipelineFilePath string, cacheDisabled bool, defaultWorkspace *v1.PersistentVolumeClaimSpec) []byte { + pipelineFileBytes, err := os.ReadFile(pipelineFilePath) + gomega.Expect(err).NotTo(gomega.HaveOccurred(), "failed to read pipeline file: "+pipelineFilePath) + pipelineFileString := string(pipelineFileBytes) + + // Define regex pattern to match kfp==[version] (e.g., kfp==2.8.0) + kfpPattern := regexp.MustCompile(`kfp==[0-9]+\.[0-9]+\.[0-9]+`) + + // Replace all occurrences with the new package path + newPackagePath := getPackagePath("sdk/python") + modifiedPipelineSpec := kfpPattern.ReplaceAllString(pipelineFileString, newPackagePath) + + return []byte(modifiedPipelineSpec) +} diff --git a/backend/test/v2/api/README.md b/backend/test/v2/api/README.md new file mode 100644 index 00000000000..44546069663 --- /dev/null +++ b/backend/test/v2/api/README.md @@ -0,0 +1,372 @@ +# KFP v2 API Test Framework + +This directory contains the comprehensive integration test suite for Kubeflow Pipelines (KFP) v2 APIs. The test framework is built using **Ginkgo** and **Gomega** testing libraries and provides end-to-end testing for all API services. + +## Table of Contents + +1. [Test Framework Architecture](#test-framework-architecture) +2. [Test Coverage](#test-coverage) +3. [Test Creation Process](#test-creation-process) +4. [Directory Structure](#directory-structure) +5. [Running Tests](#running-tests) +6. [Contributing](#contributing) + +## Test Framework Architecture + +### Ginkgo & Gomega Integration + +The test framework leverages: + +- **Ginkgo v2**: BDD-style testing framework for organizing and running tests +- **Gomega**: Matcher library for assertions and expectations + +#### Key Framework Components + +**1. Test Suite Setup (`integration_suite_test.go`)** +- Global `BeforeSuite()`: Initializes API clients, creates directories, sets up Kubernetes client +- Global `BeforeEach()`: Sets up test-specific variables and common test data +- Global `AfterEach()`: Performs cleanup operations (deletes pipelines, runs, experiments) +- `ReportAfterEach()`: Captures logs and test reports on failure + +**2. Configuration & Flags (`flags.go`)** +- Configurable test parameters via command-line flags +- Environment-specific settings (namespace, timeouts, debug mode) +- Test execution modes (dev mode, integration tests, proxy tests) + +```go +// Example flag usage +var namespace = flag.String("namespace", "kubeflow", "The namespace ml pipeline deployed to") +var isDebugMode = flag.Bool("isDebugMode", false, "Whether to enable debug mode") +``` + +### Logging System (`logger/`) + +**Custom Logger Implementation:** +- Integrates with Ginkgo's `GinkgoWriter` for test output +- Provides structured logging with format support +- Captures logs for failed test analysis + +```go +// Usage example +logger.Log("Uploading pipeline file %s", pipelineFile) +logger.Log("Created Pipeline Run with id: %s", runID) +``` + +### Utility Classes (`utils/`) + +**Core Utility Files:** +- `config_utils.go`: Client configuration and connection setup +- `pipeline_utils.go`: Pipeline CRUD operations and validation +- `pipeline_run_utils.go`: Pipeline run lifecycle management +- `experiment_utils.go`: Experiment operations +- `kubernetes_utils.go`: Kubernetes cluster interactions +- `file_utils.go`: File system operations for test data +- `test_utils.go`: Common test helpers and utilities + +**Example Utility Usage:** +```go +// Pipeline operations +uploadedPipeline := utils.CreatePipeline(client, pipelineFile) +utils.DeletePipeline(client, pipelineID) + +// Run operations +utils.WaitForRunToBeInState(runClient, runID, []State{RUNNING}) +utils.TerminatePipelineRun(runClient, runID) +``` + +### Custom Matchers (`matcher/`) + +**Specialized Assertion Matchers:** +- `custom_matcher.go`: Generic map comparison utilities +- `pipeline_matcher.go`: Pipeline-specific validation matchers + +```go +// Custom matcher usage +MatchMaps(actualMap, expectedMap, "pipeline parameters") +``` + +### Constants & Configuration (`constants/`) + +**Static Test Labels:** +- `test_type.go`: Test categories (Smoke, CriticalOnly, FullRegression) + +**Example Test Labeling:** +```go +var _ = Describe("Pipeline API Tests", Label(FullRegression), func() { + // Test implementation +}) +``` +**Open Text Labels:** + +Tests can also be labeled with any text, and these labels can further be used to filter out during execution +**Example Labeling:** +```go +var _ = Describe("Pipeline API Tests", Label(POSITIVE, "Pipeline"), func() { + // Test implementation +}) +``` + +**Current List of Labels** +``` +Smoke, CriticalOnly, FullRegression, POSITIVE, "Negative", "PipelineUpload", API_SERVER_TESTS +``` + +### Reports Generation + +**Automated Report Generation:** +- **JUnit XML Reports**: `reports/api.xml` for CI/CD integration +- **JSON Reports**: `reports/api.json` for detailed analysis +- **Test Logs**: Individual test logs in `logs/` directory +- **Pod Logs**: Captured on test failures for debugging + +```go +// Report configuration +reporterConfig.JUnitReport = filepath.Join(testReportDirectory, junitReportFilename) +reporterConfig.JSONReport = filepath.Join(testReportDirectory, jsonReportFilename) +``` + +## Test Coverage + +### API Service Test Distribution + +Each API service has dedicated test files with comprehensive endpoint coverage: + +| API Service | Test File | Primary Focus | Test Count | +|-------------------------|--------------------------------------|--------------------------------------|------------------------| +| **Pipeline API** | `pipeline_api_test.go` | Pipeline CRUD, versioning, listing | 30+ test scenarios | +| **Pipeline Upload API** | `pipeline_upload_api_test.go` | File upload, validation, versioning | 15+ test scenarios | +| **Pipeline Run API** | `pipeline_run_api_test.go` | Run lifecycle, state management | 45+ test scenarios | +| **Experiment API** | `experiment_api_test.go` | Experiment management, association | 25+ test scenarios | +| **Recurring Run API** | `pipeline_recurring_run_api_test.go` | Scheduled runs, cron jobs | 20+ test scenarios | +| **Report API** | `report_api_test.go` | Workflow reporting, metrics | 10+ test scenarios | +| **E2E Pipeline** | `pipeline_e2e_test.go` | End-to-end pipeline execution | 5+ comprehensive flows | + +### Endpoint Coverage Details + +**Pipeline API Coverage:** +- ✅ List operations (pagination, sorting, filtering) +- ✅ CRUD operations (create, read, update, delete) +- ✅ Version management +- ✅ Namespace isolation +- ✅ Error handling and validation + +**Pipeline Run API Coverage:** +- ✅ Run creation and execution +- ✅ State transitions (PENDING → RUNNING → SUCCESS/FAILED) +- ✅ Run termination and cancellation +- ✅ Archive/unarchive operations +- ✅ Experiment association +- ✅ Parameter handling and validation + +**Experiment API Coverage:** +- ✅ Experiment lifecycle management +- ✅ Run association and disassociation +- ✅ Archive/unarchive workflows +- ✅ Filtering and search operations + +### Test Categorization + +**By Test Type:** +- **Smoke Tests**: Critical path validation +- **Regression Tests**: Comprehensive feature coverage +- **Integration Tests**: Cross-service interaction validation + +## Test Creation Process + +### 1. Test Structure Pattern + +All tests follow a consistent BDD structure using Ginkgo's descriptive syntax: + +```go +var _ = Describe("API Feature Description >", Label(POSITIVE, "ServiceName", FullRegression), func() { + Context("Specific Scenario Group >", func() { + It("Should perform specific action", func() { + // Test implementation + }) + }) +}) +``` + +### 2. Test Implementation Lifecycle + +**Setup Phase:** +```go +BeforeEach(func() { + logger.Log("################### Setup before each test #####################") + // Initialize test-specific variables + // Create test data + // Setup API clients +}) +``` + +**Test Execution:** +```go +It("Should validate specific behavior", func() { + // 1. Arrange: Setup test data + logger.Log("Starting test: %s", testDescription) + + // 2. Act: Execute the operation + result := performAPICall(testData) + + // 3. Assert: Validate results + Expect(result).To(Equal(expectedResult)) + Expect(result.Status).To(Equal("SUCCESS")) +}) +``` + +**Cleanup Phase:** +```go +AfterEach(func() { + logger.Log("################### Global Cleanup after each test #####################") + // Clean up created resources + // Delete test data + // Reset state +}) +``` + +### 3. Logging Best Practices + +**Structured Logging:** +```go +// Test phase logging +logger.Log("################### Setup Phase #####################") +logger.Log("Creating pipeline with name: %s", pipelineName) +logger.Log("Pipeline created successfully with ID: %s", pipelineID) +logger.Log("################### Cleanup Phase #####################") +``` + +**Error Context Logging:** +```go +logger.Log("Test failed... Capturing pod logs") +podLogs := utils.ReadContainerLogs(k8Client, namespace, "pipeline-api-server", &testStartTimeUTC) +AddReportEntry("Pod Log", podLogs) +``` + +### 4. Resource Management + +**Automatic Cleanup:** +- All created resources are tracked in global arrays +- Cleanup happens in `AfterEach()` regardless of test outcome +- Resources include: pipelines, pipeline runs, experiments + +```go +// Resource tracking +createdPipelines = []*upload_model.V2beta1Pipeline{} +createdRunIds = make([]string, 0) +createdExperimentIds = make([]string, 0) + +// Automatic cleanup +for _, pipeline := range createdPipelines { + utils.DeletePipeline(pipelineClient, pipeline.PipelineID) +} +``` + +### 5. Error Handling & Validation + +**Assertion Patterns:** +```go +// Standard assertions +Expect(err).NotTo(HaveOccurred(), "Operation should succeed") +Expect(response.Status).To(Equal(expectedStatus)) +Expect(len(results)).To(BeNumerically(">=", 1)) + +// Custom matcher usage +MatchMaps(actualParameters, expectedParameters, "pipeline parameters") +``` + +## Directory Structure + +``` +backend/test/v2/api/ +├── README.md # This documentation +├── integration_suite_test.go # Main test suite setup +├── flags.go # Test configuration flags +├── constants/ # Test constants and enums +│ └── test_type.go # Test type categories +├── logger/ # Logging utilities +│ └── logger.go # Custom logger implementation +├── matcher/ # Custom assertion matchers +│ ├── custom_matcher.go # Generic map matchers +│ └── pipeline_matcher.go # Pipeline-specific matchers +├── utils/ # Utility functions +│ ├── config_utils.go # Client configuration +│ ├── experiment_utils.go # Experiment operations +│ ├── file_utils.go # File system operations +│ ├── kubernetes_utils.go # K8s cluster operations +│ ├── pipeline_run_utils.go # Pipeline run management +│ ├── pipeline_utils.go # Pipeline operations +│ ├── pipeline_version_utils.go # Version management +│ └── test_utils.go # Common test helpers +├── logs/ # Test execution logs +├── reports/ # Generated test reports +│ ├── api.xml # JUnit XML report +│ └── api.json # JSON test report +├── *_api_test.go # Individual API test files +├── pipeline_e2e_test.go # End-to-end tests +└── argo_workflow_converter.go # Workflow conversion utilities +``` + +## Running Tests + +### Basic Test Execution + +```bash +# Run all API tests +go test -v ./backend/test/v2/api/ + +# Run with specific flags +go test -v ./backend/test/v2/api/ \ + -namespace=kubeflow \ + -isDebugMode=true \ + -runIntegrationTests=true +``` + +### Test Filtering + +```bash +# Run only smoke tests +ginkgo -v --label-filter="Smoke" ./backend/test/v2/api/ + +# Run pipeline-specific tests +ginkgo -v --label-filter="Pipeline" ./backend/test/v2/api/ +``` + +### Configuration Options + +| Flag | Default | Description | +|-----------------------|------------|-------------------------------| +| `namespace` | `kubeflow` | Target Kubernetes namespace | +| `isDebugMode` | `false` | Enable detailed debug logging | +| `runIntegrationTests` | `true` | Execute integration tests | +| `isKubeflowMode` | `false` | Run in full Kubeflow mode | +| `cacheEnabled` | `true` | Enable pipeline caching | +| `podLogLimit` | `50000000` | Limit for captured pod logs | + +## Contributing + +### Adding New Tests + +1. **Create test file**: Follow naming convention `*_api_test.go` +2. **Import required packages**: Include framework imports and utilities +3. **Define test structure**: Use BDD patterns with proper labeling +4. **Implement test logic**: Follow the setup → act → assert → cleanup pattern +5. **Add proper logging**: Use structured logging throughout +6. **Update documentation**: Document new test coverage + +### Test Development Guidelines + +- **Use descriptive test names** that clearly indicate the test purpose +- **Implement proper cleanup** to avoid test pollution +- **Add appropriate labels** for test categorization and filtering +- **Include comprehensive logging** for debugging failed tests +- **Follow consistent code patterns** established in existing tests +- **Validate all assumptions** with explicit assertions + +### Code Review Checklist + +- [ ] Tests follow BDD structure and naming conventions +- [ ] Proper resource cleanup is implemented +- [ ] Comprehensive logging is included +- [ ] Test labels are correctly applied +- [ ] Error cases are handled appropriately +- [ ] Documentation is updated if needed \ No newline at end of file diff --git a/backend/test/v2/api/experiment_api_test.go b/backend/test/v2/api/experiment_api_test.go new file mode 100644 index 00000000000..a1c9922a8e3 --- /dev/null +++ b/backend/test/v2/api/experiment_api_test.go @@ -0,0 +1,207 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/constants" + + . "github.com/onsi/ginkgo/v2" +) + +// ########################################### +// ################## TESTS ################## +// ########################################### + +// ################## POSITIVE TESTS ################## + +var _ = PDescribe("List Experiments API Tests >", Label(constants.POSITIVE, constants.Experiment, "ExperimentList", constants.APIServerTests, constants.FullRegression), func() { + + Context("Basic List Operations >", func() { + It("When no experiments exist", func() { + }) + It("After creating a single experiment", func() { + }) + It("After creating multiple experiments", func() { + }) + It("By namespace", func() { + }) + }) + Context("Pagination >", func() { + It("List experiments with page size limit", func() { + }) + It("List experiments with pagination - iterate through all pages (at least 2)", func() { + }) + }) + Context("Sorting >", func() { + It("Sort by name in ascending order", func() { + }) + It("Sort by name in descending order", func() { + }) + It("Sort by display name containing substring in ascending order", func() { + }) + It("Sort by display name containing substring in descending order", func() { + }) + It("Sort by creation date in ascending order", func() { + }) + It("Sort by creation date in descending order", func() { + }) + }) + Context("Filtering >", func() { + It("Filter by experiment id", func() { + }) + It("Filter by pipeline run id", func() { + }) + It("Filter by name", func() { + }) + It("Filter by created at", func() { + }) + It("Filter by namespace", func() { + }) + It("Filter by description", func() { + }) + }) + Context("Combined Parameters >", func() { + It("Filter and sort by name in ascending order", func() { + }) + It("Filter and sort by created date in descending order", func() { + }) + It("Filter by created date and sort by updated date in descending order", func() { + }) + }) +}) + +var _ = PDescribe("Create Experiment API Tests >", Label(constants.POSITIVE, constants.Experiment, "ExperimentCreate", constants.APIServerTests, constants.FullRegression), func() { + + Context("Create an experiment >", func() { + It("With just name", func() { + }) + It("With name and description", func() { + }) + It("With name length of 100 chars", func() { + }) + It("With name containing ASCII characters", func() { + }) + }) +}) + +var _ = PDescribe("Get Experiment API Tests >", Label(constants.POSITIVE, constants.Experiment, "ExperimentGet", constants.APIServerTests, constants.FullRegression), func() { + + Context("Get by ID >", func() { + It("With ID", func() { + }) + }) +}) + +var _ = PDescribe("Archive an experiment Tests >", Label(constants.POSITIVE, constants.Experiment, "ExperimentArchive", constants.APIServerTests, constants.FullRegression), func() { + + Context("By ID >", func() { + It("One that does not have any run(s) or recurring run(s)", func() { + }) + It("One that does have run(s) or recurring run(s)", func() { + }) + It("One that have currently RUNNING run(s) or recurring run(s)", func() { + }) + It("One that is associated to deleted run(s) or recurring run(s)", func() { + }) + }) +}) +var _ = PDescribe("UnArchive an experiment Tests >", Label(constants.POSITIVE, constants.Experiment, "ExperimentUnarchive", constants.APIServerTests, constants.FullRegression), func() { + + Context("By ID >", func() { + It("One that does not have any run(s) or recurring run(s)", func() { + }) + It("One that does have run(s) or recurring run(s)", func() { + }) + It("One that is associated to deleted run(s) or recurring run(s)", func() { + }) + }) +}) + +var _ = PDescribe("Delete Experiment API Tests >", Label(constants.POSITIVE, constants.Experiment, "ExperimentDelete", constants.APIServerTests, constants.FullRegression), func() { + + Context("Delete by ID >", func() { + It("Delete an experiment by ID that does not have any run(s) or recurring run(s)", func() { + }) + It("Delete an experiment by ID that does have run(s) or recurring run(s), and validate that the runs still exists", func() { + }) + }) +}) + +// ################## NEGATIVE TESTS ################## + +var _ = PDescribe("Verify Pipeline Negative Tests >", Label("Negative", constants.Experiment, constants.APIServerTests, constants.FullRegression), func() { + + Context("Create experiment >", func() { + It("With 500 char name", func() { + }) + It("With CJK characters in the name", func() { + }) + It("With 10000k char description", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("Delete by ID >", func() { + It("Delete by non existing ID", func() { + }) + It("Delete by ID containing ASCII characters", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("Archive >", func() { + It("Archive by non existing ID", func() { + }) + It("Archive by ID containing ASCII characters", func() { + }) + It("Archive an archived experiment", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("UnArchive >", func() { + It("UnArchive by non existing ID", func() { + }) + It("UnArchive an active experiment", func() { + }) + It("UnArchive by ID containing ASCII characters", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("List experiments >", func() { + It("By partial name", func() { + }) + It("By invalid name", func() { + }) + It("By invalid ID", func() { + }) + It("By invalid ID containing ASCII characters", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) +}) diff --git a/backend/test/v2/api/integration_suite_test.go b/backend/test/v2/api/integration_suite_test.go new file mode 100644 index 00000000000..9514d5b48a6 --- /dev/null +++ b/backend/test/v2/api/integration_suite_test.go @@ -0,0 +1,200 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "fmt" + "log" + "os" + "path/filepath" + "strconv" + "testing" + "time" + + uploadparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" + apiserver "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + + . "github.com/onsi/ginkgo/v2" + "github.com/onsi/ginkgo/v2/types" + . "github.com/onsi/gomega" + "k8s.io/client-go/kubernetes" +) + +// Test Context +var testContext *TestContext +var randomName string +var pipelineFilesRootDir = testutil.GetPipelineFilesDir() +var userToken string + +var ( + pipelineUploadClient apiserver.PipelineUploadInterface + pipelineClient *apiserver.PipelineClient + runClient *apiserver.RunClient + experimentClient *apiserver.ExperimentClient + recurringRunClient *apiserver.RecurringRunClient + k8Client *kubernetes.Clientset +) + +// Test Reporting Variables +var ( + testLogsDirectory = "logs" + testReportDirectory = "reports" + junitReportFilename = "junit.xml" + jsonReportFilename = "api.json" +) + +var _ = BeforeSuite(func() { + err := os.MkdirAll(testLogsDirectory, 0755) + Expect(err).NotTo(HaveOccurred(), fmt.Sprintf("Error creating Logs Directory: %s", testLogsDirectory)) + err = os.MkdirAll(testReportDirectory, 0755) + Expect(err).NotTo(HaveOccurred(), fmt.Sprintf("Error creating Reports Directory: %s", testReportDirectory)) + var newPipelineClient func() (*apiserver.PipelineClient, error) + var newRunClient func() (*apiserver.RunClient, error) + var newExperimentClient func() (*apiserver.ExperimentClient, error) + var newRecurringRunClient func() (*apiserver.RecurringRunClient, error) + clientConfig := testutil.GetClientConfig(*config.Namespace) + k8Client, err = testutil.CreateK8sClient() + Expect(err).To(BeNil(), "Failed to initialize K8s client") + + if *config.KubeflowMode { + logger.Log("Creating API Clients for Kubeflow Mode") + newPipelineClient = func() (*apiserver.PipelineClient, error) { + return apiserver.NewKubeflowInClusterPipelineClient(*config.Namespace, *config.DebugMode) + } + newExperimentClient = func() (*apiserver.ExperimentClient, error) { + return apiserver.NewKubeflowInClusterExperimentClient(*config.Namespace, *config.DebugMode) + } + newRunClient = func() (*apiserver.RunClient, error) { + return apiserver.NewKubeflowInClusterRunClient(*config.Namespace, *config.DebugMode) + } + newRecurringRunClient = func() (*apiserver.RecurringRunClient, error) { + return apiserver.NewKubeflowInClusterRecurringRunClient(*config.Namespace, *config.DebugMode) + } + } else if *config.MultiUserMode || *config.AuthToken != "" { + if *config.AuthToken != "" { + logger.Log("Creating API Clients With Auth Token") + userToken = *config.AuthToken + } else { + logger.Log("Creating API Clients for Multi User Mode") + userToken = testutil.CreateUserToken(k8Client, *config.UserNamespace, *config.UserServiceAccountName) + } + newPipelineClient = func() (*apiserver.PipelineClient, error) { + return apiserver.NewMultiUserPipelineClient(clientConfig, userToken, *config.DebugMode) + } + newExperimentClient = func() (*apiserver.ExperimentClient, error) { + return apiserver.NewMultiUserExperimentClient(clientConfig, userToken, *config.DebugMode) + } + newRunClient = func() (*apiserver.RunClient, error) { + return apiserver.NewMultiUserRunClient(clientConfig, userToken, *config.DebugMode) + } + newRecurringRunClient = func() (*apiserver.RecurringRunClient, error) { + return apiserver.NewMultiUserRecurringRunClient(clientConfig, userToken, *config.DebugMode) + } + } else { + logger.Log("Creating API Clients for Single User Mode") + newPipelineClient = func() (*apiserver.PipelineClient, error) { + return apiserver.NewPipelineClient(clientConfig, *config.DebugMode) + } + newExperimentClient = func() (*apiserver.ExperimentClient, error) { + return apiserver.NewExperimentClient(clientConfig, *config.DebugMode) + } + newRunClient = func() (*apiserver.RunClient, error) { + return apiserver.NewRunClient(clientConfig, *config.DebugMode) + } + newRecurringRunClient = func() (*apiserver.RecurringRunClient, error) { + return apiserver.NewRecurringRunClient(clientConfig, *config.DebugMode) + } + } + + pipelineUploadClient, err = testutil.GetPipelineUploadClient( + *config.UploadPipelinesWithKubernetes, + *config.KubeflowMode, + *config.DebugMode, + *config.Namespace, + clientConfig, + ) + + Expect(err).To(BeNil(), "Failed to get Pipeline Upload Client") + pipelineClient, err = newPipelineClient() + Expect(err).To(BeNil(), "Failed to get Pipeline Client") + experimentClient, err = newExperimentClient() + Expect(err).To(BeNil(), "Failed to get Experiment client") + runClient, err = newRunClient() + Expect(err).To(BeNil(), "Failed to get Pipeline Run client") + recurringRunClient, err = newRecurringRunClient() + Expect(err).To(BeNil(), "Failed to get Recurring Run client") +}) + +var _ = BeforeEach(func() { + logger.Log("################### Global Setup before each test #####################") + testContext = &TestContext{ + TestStartTimeUTC: time.Now(), + } + randomName = strconv.FormatInt(time.Now().UnixNano(), 10) + testContext.Pipeline.CreatedPipelines = make([]*pipeline_upload_model.V2beta1Pipeline, 0) + testContext.Pipeline.UploadParams = uploadparams.NewUploadPipelineParams() + testContext.PipelineRun.CreatedRunIds = make([]string, 0) + testContext.Experiment.CreatedExperimentIds = make([]string, 0) +}) + +var _ = AfterEach(func() { + // Delete pipelines created during the test + logger.Log("################### Global Cleanup after each test #####################") + + logger.Log("Deleting %d run(s)", len(testContext.PipelineRun.CreatedRunIds)) + for _, runID := range testContext.PipelineRun.CreatedRunIds { + testutil.TerminatePipelineRun(runClient, runID) + testutil.DeletePipelineRun(runClient, runID) + } + logger.Log("Deleting %d experiment(s)", len(testContext.Experiment.CreatedExperimentIds)) + if len(testContext.Experiment.CreatedExperimentIds) > 0 { + for _, experimentID := range testContext.Experiment.CreatedExperimentIds { + testutil.DeleteExperiment(experimentClient, experimentID) + } + } + logger.Log("Deleting %d pipeline(s)", len(testContext.Pipeline.CreatedPipelines)) + for _, pipeline := range testContext.Pipeline.CreatedPipelines { + testutil.DeletePipeline(pipelineClient, pipeline.PipelineID) + } +}) + +var _ = ReportAfterEach(func(specReport types.SpecReport) { + if specReport.Failed() { + logger.Log("Test failed... Capturing pod logs from %v to %v", testContext.TestStartTimeUTC, time.Now().UTC()) + podLogs := testutil.ReadContainerLogs(k8Client, *config.Namespace, "pipeline-api-server", nil, &testContext.TestStartTimeUTC, config.PodLogLimit) + AddReportEntry("Pod Log", podLogs) + AddReportEntry("Test Log", specReport.CapturedGinkgoWriterOutput) + currentDir, err := os.Getwd() + Expect(err).NotTo(HaveOccurred(), "Failed to get current directory") + testutil.WriteLogFile(specReport, GinkgoT().Name(), filepath.Join(currentDir, testLogsDirectory)) + } else { + log.Printf("Test passed") + } +}) + +func TestAPIs(t *testing.T) { + RegisterFailHandler(Fail) + suiteConfigAPI, reporterConfigAPI := GinkgoConfiguration() + suiteConfigAPI.FailFast = false + reporterConfigAPI.ForceNewlines = true + reporterConfigAPI.SilenceSkips = true + reporterConfigAPI.JUnitReport = filepath.Join(testReportDirectory, junitReportFilename) + reporterConfigAPI.JSONReport = filepath.Join(testReportDirectory, jsonReportFilename) + RunSpecs(t, "API Tests Suite", suiteConfigAPI, reporterConfigAPI) +} diff --git a/backend/test/v2/api/matcher/custom_matcher.go b/backend/test/v2/api/matcher/custom_matcher.go new file mode 100644 index 00000000000..2943ea35f68 --- /dev/null +++ b/backend/test/v2/api/matcher/custom_matcher.go @@ -0,0 +1,34 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package matcher + +import ( + "fmt" + + "github.com/kubeflow/pipelines/backend/test/logger" + + "github.com/google/go-cmp/cmp" + "github.com/onsi/ginkgo/v2" + "github.com/onsi/gomega" +) + +// MatchMaps - Iterate over 2 maps and compare if they are same or not +// +// param mapType - string value to append to the assertion error message +func MatchMaps(actual interface{}, expected interface{}, mapType string) { + ginkgo.GinkgoHelper() + logger.Log("Comparing maps of %s", mapType) + diff := cmp.Diff(actual, expected) + gomega.Expect(diff).To(gomega.BeEmpty(), fmt.Sprintf("%s maps are not same, actual diff:\n%s", mapType, diff)) +} diff --git a/backend/test/v2/api/matcher/pipeline_matcher.go b/backend/test/v2/api/matcher/pipeline_matcher.go new file mode 100644 index 00000000000..788c6f72761 --- /dev/null +++ b/backend/test/v2/api/matcher/pipeline_matcher.go @@ -0,0 +1,130 @@ +package matcher + +import ( + "fmt" + "sort" + "strings" + "time" + + model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/src/apiserver/template" + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + + "github.com/onsi/ginkgo/v2" + gomega "github.com/onsi/gomega" +) + +// MatchPipelines - Deep compare 2 pipelines +func MatchPipelines(actual *model.V2beta1Pipeline, expected *model.V2beta1Pipeline) { + ginkgo.GinkgoHelper() + gomega.Expect(actual.PipelineID).To(gomega.Not(gomega.BeEmpty()), "Pipeline ID is empty") + actualTime := time.Time(actual.CreatedAt).UTC() + expectedTime := time.Time(expected.CreatedAt).UTC() + if !actualTime.Equal(expectedTime) && !actualTime.After(expectedTime) { + logger.Log("Pipeline creation time %v is expected to be after test start time %v", actual.CreatedAt, expected.CreatedAt) + ginkgo.Fail(fmt.Sprintf("Pipeline creation time %v is before the test start time %v", actual.CreatedAt, expected.CreatedAt)) + } + gomega.Expect(actual.DisplayName).To(gomega.Equal(expected.DisplayName), "Pipeline Display name not matching") + if *config.KubeflowMode { + // Validate namespace only if the mode is kubeflow otherwise everything is in the same namespace and + // pipeline object in the response does not even have namespace + gomega.Expect(actual.Namespace).To(gomega.Equal(expected.Namespace), "Pipeline Namespace not matching") + } + gomega.Expect(actual.Description).To(gomega.Equal(expected.Description), "Pipeline Description not matching") + +} + +// MatchPipelineVersions - Deep compare 2 pipeline versions - even with deep comparison of pipeline specs +func MatchPipelineVersions(actual *model.V2beta1PipelineVersion, expected *model.V2beta1PipelineVersion) { + ginkgo.GinkgoHelper() + gomega.Expect(actual.PipelineVersionID).To(gomega.Not(gomega.BeEmpty()), "Pipeline Version ID is empty") + actualTime := time.Time(actual.CreatedAt).UTC() + expectedTime := time.Time(expected.CreatedAt).UTC() + if !actualTime.Equal(expectedTime) && !actualTime.After(expectedTime) { + logger.Log("Pipeline Version creation time %v is expected to be after test start time %v", actual.CreatedAt, expected.CreatedAt) + ginkgo.Fail(fmt.Sprintf("Pipeline Version creation time %v is expected to be after test start time %v", actual.CreatedAt, expected.CreatedAt)) + } + gomega.Expect(actual.DisplayName).To(gomega.Equal(expected.DisplayName), "Pipeline Display Name not matching") + gomega.Expect(actual.Description).To(gomega.Equal(expected.Description), "Pipeline Description not matching") + expectedPipelineSpec := expected.PipelineSpec.(*template.V2Spec) + MatchPipelineSpecs(actual.PipelineSpec, expectedPipelineSpec) +} + +func MatchPipelineSpecs(actual interface{}, expected *template.V2Spec) { + ginkgo.GinkgoHelper() + + actualPipelineSpec := actual.(map[string]interface{}) + platformSpecs, exists := actualPipelineSpec["platform_spec"] + if exists { + actualPipelineSpecBytes := testutil.ToBytes(actualPipelineSpec["pipeline_spec"]) + actualPlatformSpecBytes := testutil.ToBytes(platformSpecs) + gomega.Expect(actualPipelineSpecBytes).To(gomega.MatchYAML(testutil.ProtoToBytes(expected.PipelineSpec())), "Pipeline specs do not match") + gomega.Expect(actualPlatformSpecBytes).To(gomega.MatchYAML(testutil.ProtoToBytes(expected.PlatformSpec())), "Platform specs do not match") + } else { + gomega.Expect(testutil.ToBytes(actualPipelineSpec)).To(gomega.MatchYAML(expected.Bytes()), "Pipeline specs do not match") + } +} + +// MatchPipelineRuns - Shallow match 2 pipeline runs i.e. match only the fields that you do add to the payload when creating a run +func MatchPipelineRuns(actual *run_model.V2beta1Run, expected *run_model.V2beta1Run) { + ginkgo.GinkgoHelper() + if expected.RunID != "" { + gomega.Expect(actual.RunID).To(gomega.Equal(expected.RunID), "Run ID is not matching") + } else { + gomega.Expect(actual.RunID).To(gomega.Not(gomega.BeEmpty()), "Run ID is empty") + } + actualTime := time.Time(actual.CreatedAt).UTC() + expectedTime := time.Time(expected.CreatedAt).UTC() + gomega.Expect(actualTime.After(expectedTime) || actualTime.Equal(expectedTime)).To(gomega.BeTrue(), "Actual Run time is not before the expected time") + gomega.Expect(actual.DisplayName).To(gomega.Equal(expected.DisplayName), "Run Name is not matching") + gomega.Expect(actual.ExperimentID).To(gomega.Equal(expected.ExperimentID), "Experiment Id is not matching") + gomega.Expect(actual.PipelineVersionID).To(gomega.Equal(expected.PipelineVersionID), "Pipeline Version Id is not matching") + MatchMaps(actual.PipelineSpec, expected.PipelineSpec, "Pipeline Spec") + gomega.Expect(actual.PipelineVersionReference.PipelineVersionID).To(gomega.Equal(expected.PipelineVersionReference.PipelineVersionID), "Referred Pipeline Version Idis not matching") + gomega.Expect(actual.PipelineVersionReference.PipelineID).To(gomega.Equal(expected.PipelineVersionReference.PipelineID), "Referred Pipeline Id is not matching") + gomega.Expect(actual.ServiceAccount).To(gomega.Equal(expected.ServiceAccount), "Service Account is not matching") + gomega.Expect(actual.StorageState).To(gomega.Equal(expected.StorageState), "Storage State is not matching") +} + +// MatchPipelineRunDetails - NOTE: Not yet used but once we start populating Run Details, this matcher will come in very handy +func MatchPipelineRunDetails(actual *run_model.V2beta1RunDetails, expected *run_model.V2beta1RunDetails) { + gomega.Expect(actual.PipelineContextID).To(gomega.Equal(expected.PipelineContextID), "Pipeline Context ID is not matching") + gomega.Expect(actual.PipelineRunContextID).To(gomega.Equal(expected.PipelineRunContextID), "Pipeline Run Context ID is not matching") + gomega.Expect(len(actual.TaskDetails)).To(gomega.Equal(len(expected.TaskDetails)), "Number of Tasks not matching") + sort.Slice(actual.TaskDetails, func(i, j int) bool { + return actual.TaskDetails[i].DisplayName < actual.TaskDetails[j].DisplayName // Sort Tasks by Name in ascending order + }) + sort.Slice(expected.TaskDetails, func(i, j int) bool { + return expected.TaskDetails[i].DisplayName < expected.TaskDetails[j].DisplayName // Sort Tasks by Name in ascending order + }) + for index, task := range expected.TaskDetails { + gomega.Expect(actual.TaskDetails[index].RunID).To(gomega.Equal(task.RunID), "Task Run ID is not matching") + gomega.Expect(actual.TaskDetails[index].TaskID).To(gomega.Not(gomega.BeEmpty()), "Task ID is empty") + if strings.Contains(task.DisplayName, "root") || strings.Contains(task.DisplayName, "driver") { + gomega.Expect(actual.TaskDetails[index].DisplayName).To(gomega.Equal(task.DisplayName), "Task Display Name is not matching") + } else { + gomega.Expect(actual.TaskDetails[index].DisplayName).To(gomega.ContainSubstring(actual.TaskDetails[index].DisplayName), "Task Display Name does not match") + } + + gomega.Expect(actual.TaskDetails[index].ParentTaskID).To(gomega.Equal(task.ParentTaskID), "Task Parent Task ID is not matching") + actualCreationTime := time.Time(actual.TaskDetails[index].CreateTime).UTC() + expectedCreationTime := time.Time(task.CreateTime).UTC() + actualStartTime := time.Time(actual.TaskDetails[index].StartTime).UTC() + expectedStartTimeRange := expectedCreationTime.Add(-1 * time.Second) + expectedEndTimeRange := expectedCreationTime.Add(1 * time.Second) + gomega.Expect(actualCreationTime.After(expectedStartTimeRange)).To(gomega.BeTrue(), "Task Create Time is before the expected creation time") + gomega.Expect(actualCreationTime.Before(expectedEndTimeRange)).To(gomega.BeTrue(), "Task Create Time is after the expected creation time") + gomega.Expect(actualStartTime.After(expectedStartTimeRange)).To(gomega.BeTrue(), "Task Start Time is before the expected start time") + gomega.Expect(actualStartTime.Before(expectedEndTimeRange)).To(gomega.BeTrue(), "Task End Time is before the expected start time") + gomega.Expect(*actual.TaskDetails[index].State).To(gomega.BeElementOf([]run_model.V2beta1RuntimeState{run_model.V2beta1RuntimeStateCANCELED, run_model.V2beta1RuntimeStateCANCELING, run_model.V2beta1RuntimeStateFAILED, run_model.V2beta1RuntimeStateSUCCEEDED, run_model.V2beta1RuntimeStateSKIPPED, run_model.V2beta1RuntimeStatePENDING, run_model.V2beta1RuntimeStateRUNNING}), "Task State is not matching") + for _, state := range actual.TaskDetails[index].StateHistory { + gomega.Expect(*state.State).To(gomega.BeElementOf([]run_model.V2beta1RuntimeState{run_model.V2beta1RuntimeStateCANCELED, run_model.V2beta1RuntimeStateCANCELING, run_model.V2beta1RuntimeStateFAILED, run_model.V2beta1RuntimeStateSUCCEEDED, run_model.V2beta1RuntimeStateSKIPPED, run_model.V2beta1RuntimeStatePENDING, run_model.V2beta1RuntimeStateRUNNING}), "Task State History is not matching") + } + if strings.Contains(task.DisplayName, "driver") { + gomega.Expect(len(actual.TaskDetails[index].ChildTasks) > 0).To(gomega.BeTrue(), "No child tasks found for a Driver Task") + } + } +} diff --git a/backend/test/v2/api/pipeline_api_test.go b/backend/test/v2/api/pipeline_api_test.go new file mode 100644 index 00000000000..ff77221fdb8 --- /dev/null +++ b/backend/test/v2/api/pipeline_api_test.go @@ -0,0 +1,290 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "fmt" + "path/filepath" + + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/constants" + utils "github.com/kubeflow/pipelines/backend/test/testutil" + + . "github.com/onsi/ginkgo/v2" +) + +// ########################################### +// ################## TESTS ################## +// ########################################### + +// ################## POSITIVE TESTS ################## + +var _ = PDescribe("List Pipelines API Tests >", Label(constants.POSITIVE, constants.Pipeline, "PipelineList", constants.APIServerTests, constants.FullRegression), func() { + + Context("Basic List Operations >", func() { + It("When no pipelines exist", func() { + }) + It("After creating a single pipeline", func() { + }) + It("After creating multiple pipelines", func() { + }) + It("By namespace", func() { + }) + }) + Context("Pagination >", func() { + It("List pipelines with page size limit", func() { + }) + It("List pipelines with pagination - iterate through all pages (at least 2)", func() { + }) + }) + Context("Sorting >", func() { + It("Sort by name in ascending order", func() { + }) + It("Sort by name in descending order", func() { + }) + It("Sort by display name containing substring in ascending order", func() { + }) + It("Sort by display name containing substring in descending order", func() { + }) + It("Sort by creation date in ascending order", func() { + }) + It("Sort by creation date in descending order", func() { + }) + }) + Context("Filtering >", func() { + It("Filter by pipeline id", func() { + }) + It("Filter by name", func() { + }) + It("Filter by created at", func() { + }) + It("Filter by namespace", func() { + }) + It("Filter by description", func() { + }) + }) + Context("Combined Parameters >", func() { + It("Filter and sort by name in ascending order", func() { + }) + It("Filter and sort by created date in descending order", func() { + }) + It("Filter by created date and sort by updated date in descending order", func() { + }) + }) +}) + +var _ = PDescribe("List Pipelines Versions API Tests >", Label(constants.POSITIVE, constants.Pipeline, "PipelineVersionList", constants.APIServerTests, constants.FullRegression), func() { + + Context("Basic List Operations >", func() { + It("When no pipeline versions exist", func() { + }) + It("After creating a single pipeline version", func() { + }) + It("After creating multiple pipeline versions", func() { + }) + It("By pipeline ID", func() { + }) + }) + Context("Pagination >", func() { + It("List pipelines with page size limit", func() { + }) + It("List pipelines with pagination - iterate through all pages (at least 2)", func() { + }) + }) + Context("Sorting >", func() { + It("Sort by name in ascending order", func() { + }) + It("Sort by name in descending order", func() { + }) + It("Sort by display name containing substring in ascending order", func() { + }) + It("Sort by display name containing substring in descending order", func() { + }) + It("Sort by creation date in ascending order", func() { + }) + It("Sort by creation date in descending order", func() { + }) + }) + Context("Filtering >", func() { + It("Filter by pipeline version id", func() { + }) + It("Filter by pipeline id", func() { + }) + It("Filter by name", func() { + }) + It("Filter by created at", func() { + }) + It("Filter by namespace", func() { + }) + It("Filter by description", func() { + }) + }) + Context("Combined Parameters >", func() { + It("Filter and sort by name in ascending order", func() { + }) + It("Filter and sort by created date in descending order", func() { + }) + It("Filter by created date and sort by updated date in descending order", func() { + }) + }) +}) + +var _ = PDescribe("Create Pipeline API Tests >", Label(constants.POSITIVE, constants.Pipeline, "PipelineCreate", constants.APIServerTests, constants.FullRegression), func() { + + Context("Create a pipeline using '/pipelines' >", func() { + It("With just name", func() { + }) + It("With name and description", func() { + }) + It("With name length of 100 chars", func() { + }) + It("With name containing ASCII characters", func() { + }) + }) + + Context("Create a pipeline with version using '/pipelines/create' >", func() { + var pipelineDir = "valid/samples" + pipelineFiles := utils.GetListOfFilesInADir(filepath.Join(pipelineFilesRootDir, pipelineDir)) + for _, pipelineFile := range pipelineFiles { + It(fmt.Sprintf("Pipeline with name and Pipelineversion with name and pipeline spec from file: %s", pipelineFile), func() { + }) + } + pipelineURLs := []string{"Your actual pipeline URLs go here"} + for _, pipelineURL := range pipelineURLs { + It(fmt.Sprintf("Pipeline with name and Pipelineversion with name and pipeline spec from url: %s", pipelineURL), func() { + }) + } + }) +}) + +var _ = PDescribe("Get Pipeline API Tests >", Label(constants.POSITIVE, constants.Pipeline, "PipelineGet", constants.APIServerTests, constants.FullRegression), func() { + + Context("Get by name '/pipelines/{name}' >", func() { + It("With full name", func() { + }) + It("With name and namespace", func() { + }) + }) + + Context("Get by ID '/pipelines/{pipeline_id}' >", func() { + It("With ID", func() { + }) + }) +}) + +var _ = PDescribe("Get Pipeline Version API Tests >", Label(constants.POSITIVE, constants.Pipeline, "PipelineVersionGet", constants.APIServerTests, constants.FullRegression), func() { + + Context("Get by id '/pipelines/{pipeline_id}/versions/{pipeline_version_id}' >", func() { + It("With valid pipeline id and version id", func() { + }) + }) +}) + +var _ = PDescribe("Delete Pipeline API Tests >", Label(constants.POSITIVE, constants.Pipeline, "PipelineDelete", constants.APIServerTests, constants.FullRegression), func() { + + Context("Delete pipeline by ID '/pipelines/{pipeline_id}' >", func() { + It("Delete pipeline by ID that does not have any versions", func() { + }) + }) + Context("Delete pipeline version by ID '/pipelines/{pipeline_id}/versions/{pipeline_version_id}' >", func() { + It("Delete pipeline version by ID", func() { + }) + }) +}) + +// ################## NEGATIVE TESTS ################## + +var _ = PDescribe("Verify Pipeline Negative Tests >", Label("Negative", constants.Pipeline, constants.APIServerTests, constants.FullRegression), func() { + Context("Create a pipeline with version using '/pipelines/create' >", func() { + It("With a valid pipeline and pipeline version name but invalid pipeline spec file", func() { + }) + It("With a valid pipeline and pipeline version name but invalid pipeline spec url", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("Create a pipeline using '/pipelines >", func() { + It("With 500 char name", func() { + }) + It("With CJK characters in the name", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("Get pipeline by ID >", func() { + It("By non existing ID", func() { + }) + It("By ID containing ASCII characters", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("Get pipeline version by ID >", func() { + It("By non existing ID", func() { + }) + It("By ID containing ASCII characters", func() { + }) + It("By valid version ID but with the pipeline ID that does not contain this version", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("Delete by ID '/pipelines/{pipeline_id}' >", func() { + It("Delete by ID that does have pipeline version(s)", func() { + }) + It("Delete by non existing ID", func() { + }) + It("Delete by ID containing ASCII characters", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("Delete pipeline version by ID '/pipelines/{pipeline_id}/versions/{pipeline_version_id}' >", func() { + It("Delete pipeline version with an invalid ID", func() { + }) + It("Delete pipeline version by ID but with the pipeline ID that does not contain this version", func() { + }) + It("Delete by ID containing ASCII characters", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("List pipelines >", func() { + It("By partial name", func() { + }) + It("By invalid name", func() { + }) + It("By invalid ID", func() { + }) + It("By invalid ID containing ASCII characters", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) +}) diff --git a/backend/test/v2/api/pipeline_recurring_run_api_test.go b/backend/test/v2/api/pipeline_recurring_run_api_test.go new file mode 100644 index 00000000000..80cb3cc3f3c --- /dev/null +++ b/backend/test/v2/api/pipeline_recurring_run_api_test.go @@ -0,0 +1,203 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/constants" + + . "github.com/onsi/ginkgo/v2" +) + +// ########################################### +// ################## TESTS ################## +// ########################################### + +// ################## POSITIVE TESTS ################## + +var _ = PDescribe("Verify Pipeline Run >", Label(constants.POSITIVE, constants.PipelineScheduledRun, constants.APIServerTests, constants.FullRegression), func() { + + Context("Create reccurring pipeline run >", func() { + It("Create a Pipeline Run with cron that runs every 5min", func() { + }) + It("Create a Pipeline Run with cron that runs at a specific time and day", func() { + }) + It("Create a Pipeline Run with cron that runs on alternate days", func() { + }) + It("Create a Pipeline Run with cron that runs right now", func() { + }) + It("Create a Pipeline Run with cache disabled", func() { + }) + }) + + Context("Disable reccurring pipeline run >", func() { + It("Create a Recurring pipeline Run, disable a run and make sure its not deleted", func() { + }) + }) + Context("Enable a disabled reccurring pipeline run >", func() { + It("Create a Recurring pipeline Run, disable it and then enable it", func() { + }) + }) + + Context("Get a reccurring pipeline run >", func() { + It("Create a Recurring pipeline Run, verify its details", func() { + }) + }) +}) + +var _ = PDescribe("List Recurring Pipeline Runs >", Label(constants.POSITIVE, constants.PipelineScheduledRun, "ListRecurringPipelineRun", constants.APIServerTests, constants.FullRegression), func() { + + Context("Basic Operations >", func() { + It("Create 2 runs and list", func() { + }) + It("When no recurring runs exist", func() { + }) + }) + Context("Sorting >", func() { + It("List Recurring pipeline Runs and sort by display name in ascending order", func() { + }) + It("List Recurring pipeline Runs and sort by display name in descending order", func() { + }) + It("List Recurring pipeline Runs and sort by id in ascending order", func() { + }) + It("List Recurring pipeline Runs and sort by id in descending order", func() { + }) + It("List Recurring pipeline Runs and sort by pipeline version id in ascending order", func() { + }) + It("List Recurring pipeline Runs and sort by pipeline version id in descending order", func() { + }) + It("List Recurring pipeline Runs and sort by creation date in ascending order", func() { + }) + It("List Recurring pipeline Runs and sort by creation date in descending order", func() { + }) + It("List Recurring pipeline Runs and sort by updated date in ascending order", func() { + }) + It("List Recurring pipeline Runs and sort by updated date in descending order", func() { + }) + It("List Recurring pipeline Runs and sort by cron time in ascending order", func() { + }) + It("List Recurring pipeline Runs and sort by cron time in descending order", func() { + }) + }) + Context("Specify Page Size >", func() { + It("List by page size", func() { + }) + It("List by page size and iterate over at least 2 pages", func() { + }) + }) + Context("Filtering >", func() { + It("By run id", func() { + }) + It("By run 'name' EQUALS", func() { + }) + It("By run 'name' NOT EQUALS", func() { + }) + It("By display name containing", func() { + }) + It("By creation date", func() { + }) + It("By cron time", func() { + }) + It("By experiment id", func() { + }) + It("By namespace", func() { + }) + It("By name in ascending order", func() { + }) + }) + Context("Sort and Filter >", func() { + It("Filter and sort by created date in descending order", func() { + }) + It("Filter by created date and sort by updated date in descending order", func() { + }) + }) +}) + +// ################## NEGATIVE TESTS ################## + +var _ = PDescribe("Verify Pipeline Run Negative Tests >", Label(constants.NEGATIVE, constants.PipelineScheduledRun, constants.APIServerTests, constants.FullRegression), func() { + + Context("Create reccurring pipeline run >", func() { + It("Create a Pipeline Run with invalid cron", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + + if *config.KubeflowMode { + Context("List reccurring pipeline runs in kubeflow mode >", func() { + It("List reccurring pipeline runs in a namespace you don't have access to", func() {}) + }) + } + + Context("Disable a recurring pipeline run >", func() { + It("Disable a deleted recurring run", func() { + }) + It("Disable a non existent recurring run", func() { + }) + It("Disable already disabled recurring run", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("Enable a recurring pipeline run >", func() { + It("Enable a deleted recurring run", func() { + }) + It("Enable a non existent recurring run", func() { + }) + It("Enable an already enabled recurring run", func() { + }) + It("Enable an recurring run for a run that's associated with a delete experiment", func() { + }) + It("Enable an recurring run for a run that's associated with an archived experiment", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + Context("Delete a recurring pipeline run >", func() { + It("Delete a deleted recurring run", func() { + }) + It("Delete a non existent recurring run", func() { + }) + It("Delete an already deleted recurring run", func() { + }) + }) + Context("Associate a pipeline recurring run with invalid experiment >", func() { + It("Associate a recurring run with an archived experiment", func() { + }) + It("Associate a recurring run with non existent experiment", func() { + }) + It("Associate a recurring run with deleted experiment", func() { + }) + }) + Context("Get a reccurring pipeline run >", func() { + It("Get Recurring pipeline Run for a deleted run", func() { + }) + It("Get Recurring pipeline Run for a non existing run", func() { + }) + It("Get Recurring pipeline Run for a non recurring run i.e. one-off run", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) +}) diff --git a/backend/test/v2/api/pipeline_run_api_test.go b/backend/test/v2/api/pipeline_run_api_test.go new file mode 100644 index 00000000000..1b0abb59d2f --- /dev/null +++ b/backend/test/v2/api/pipeline_run_api_test.go @@ -0,0 +1,433 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "fmt" + "path/filepath" + "strings" + + experimentparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" + runparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_client/run_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/constants" + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + "github.com/kubeflow/pipelines/backend/test/v2/api/matcher" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +// ################## CLASS VARIABLES ################## + +var experimentName string +var runName string +var runDescription string + +// ################## SETUP AND TEARDOWN ################## + +var _ = BeforeEach(func() { + logger.Log("Setting up Pipeline Run Tests") + runName = "API Test Run - " + randomName + runDescription = "API Test Run" + experimentName = "API Test Experiment - " + randomName +}) + +// ################## TESTS ################## +// ################## POSITIVE TESTS ################## + +var _ = Describe("Verify Pipeline Run >", Label(constants.POSITIVE, constants.PipelineRun, constants.APIServerTests, constants.FullRegression), func() { + + type TestParams struct { + pipelineCacheEnabled bool + } + + testParams := []TestParams{ + {pipelineCacheEnabled: true}, + {pipelineCacheEnabled: false}, + } + pipelineDirectory := "valid" + pipelineFilePaths := testutil.GetListOfAllFilesInDir(filepath.Join(pipelineFilesRootDir, pipelineDirectory)) + + Context("Create a valid pipeline and verify the created run >", func() { + for _, param := range testParams { + for _, pipelineFilePath := range pipelineFilePaths { + It(fmt.Sprintf("Create a '%s' pipeline with cacheEnabled=%t and verify run", pipelineFilePath, param.pipelineCacheEnabled), func() { + createdExperiment := createExperiment(experimentName) + pipelineFilePath := pipelineFilePath + pipelineFileName := filepath.Base(pipelineFilePath) + testutil.CheckIfSkipping(pipelineFileName) + configuredPipelineSpecFile := configureCacheSettingAndGetPipelineFile(pipelineFilePath, param.pipelineCacheEnabled) + createdPipeline := uploadAPipeline(configuredPipelineSpecFile, &testContext.Pipeline.PipelineGeneratedName) + createdPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &createdPipeline.PipelineID) + pipelineRuntimeInputs := testutil.GetPipelineRunTimeInputs(configuredPipelineSpecFile) + createdPipelineRun := createPipelineRun(&createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + createdExpectedRunAndVerify(createdPipelineRun, &createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + }) + } + } + pipelineFile := pipelineFilePaths[0] + It(fmt.Sprintf("Create a '%s' pipeline, create an experiement and verify run with associated experiment", pipelineFile), Label(constants.SMOKE), func() { + createdExperiment := createExperiment(experimentName) + createdPipeline := uploadAPipeline(pipelineFile, &testContext.Pipeline.PipelineGeneratedName) + createdPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &createdPipeline.PipelineID) + pipelineRuntimeInputs := testutil.GetPipelineRunTimeInputs(pipelineFile) + createdPipelineRun := createPipelineRun(&createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + createdExpectedRunAndVerify(createdPipelineRun, &createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + }) + }) + + Context("Associate a single experiment with multiple pipeline runs >", func() { + pipelineFile := pipelineFilePaths[0] + It("Create an experiment and associate it multiple pipeline runs of the same pipeline", func() { + createdExperiment := createExperiment(experimentName) + createdPipeline := uploadAPipeline(pipelineFile, &testContext.Pipeline.PipelineGeneratedName) + createdPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &createdPipeline.PipelineID) + pipelineRuntimeInputs := testutil.GetPipelineRunTimeInputs(pipelineFile) + createdPipelineRun1 := createPipelineRun(&createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + createdExpectedRunAndVerify(createdPipelineRun1, &createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + + createdPipelineRun2 := createPipelineRun(&createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + createdExpectedRunAndVerify(createdPipelineRun2, &createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + }) + It("Create an experiment and associate it pipeline runs of different pipelines", func() { + + createdExperiment := createExperiment(experimentName) + createdPipeline1 := uploadAPipeline(pipelineFile, &testContext.Pipeline.PipelineGeneratedName) + createdPipeline1Version := testutil.GetLatestPipelineVersion(pipelineClient, &createdPipeline1.PipelineID) + pipeline2Name := testContext.Pipeline.PipelineGeneratedName + "2" + createdPipeline2 := uploadAPipeline(pipelineFile, &pipeline2Name) + createdPipeline2Version := testutil.GetLatestPipelineVersion(pipelineClient, &createdPipeline2.PipelineID) + pipelineRuntimeInputs := testutil.GetPipelineRunTimeInputs(pipelineFile) + createdPipelineRun1 := createPipelineRun(&createdPipeline1.PipelineID, &createdPipeline1Version.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + createdExpectedRunAndVerify(createdPipelineRun1, &createdPipeline1.PipelineID, &createdPipeline1Version.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + + createdPipelineRun2 := createPipelineRun(&createdPipeline2.PipelineID, &createdPipeline2Version.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + createdExpectedRunAndVerify(createdPipelineRun2, &createdPipeline2.PipelineID, &createdPipeline2Version.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + }) + }) + + Context("Create a pipeline run with HTTP proxy >", func() { + pipelineFile := filepath.Join(pipelineFilesRootDir, pipelineDirectory, "env-var.yaml") + It(fmt.Sprintf("Create a pipeline run with http proxy, using specs: %s", pipelineFile), func() { + createdExperiment := createExperiment(experimentName) + createdPipeline := uploadAPipeline(pipelineFile, &testContext.Pipeline.PipelineGeneratedName) + createdPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &createdPipeline.PipelineID) + pipelineRuntimeInputs := map[string]interface{}{ + "env_var": "http_proxy", + } + createdPipelineRun := createPipelineRun(&createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + createdExpectedRunAndVerify(createdPipelineRun, &createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + }) + }) + + Context("Archive pipeline run(s) >", func() { + pipelineFile := filepath.Join(pipelineFilesRootDir, pipelineDirectory, "hello-world.yaml") + It("Create a pipeline run, archive it and verify that the run state does not change on archiving", func() { + createdExperiment := createExperiment(experimentName) + createdPipeline := uploadAPipeline(pipelineFile, &testContext.Pipeline.PipelineGeneratedName) + createdPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &createdPipeline.PipelineID) + pipelineRuntimeInputs := testutil.GetPipelineRunTimeInputs(pipelineFile) + createdPipelineRun := createPipelineRun(&createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + archivePipelineRun(&createdPipelineRun.RunID) + pipelineRunAfterArchive := testutil.GetPipelineRun(runClient, &createdPipelineRun.RunID) + Expect(createdPipelineRun.State).To(Equal(pipelineRunAfterArchive.State)) + Expect(*pipelineRunAfterArchive.StorageState).To(Equal(run_model.V2beta1RunStorageStateARCHIVED)) + + }) + + It("Create a pipeline run, wait for the run to move to RUNNING, archive it and verify that the run state is still RUNNING on archiving", func() { + createdExperiment := createExperiment(experimentName) + createdPipeline := uploadAPipeline(pipelineFile, &testContext.Pipeline.PipelineGeneratedName) + createdPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &createdPipeline.PipelineID) + pipelineRuntimeInputs := testutil.GetPipelineRunTimeInputs(pipelineFile) + createdPipelineRun := createPipelineRun(&createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + testutil.WaitForRunToBeInState(runClient, &createdPipelineRun.RunID, []run_model.V2beta1RuntimeState{run_model.V2beta1RuntimeStateRUNNING}, nil) + archivePipelineRun(&createdPipelineRun.RunID) + pipelineRunAfterArchive := testutil.GetPipelineRun(runClient, &createdPipelineRun.RunID) + Expect(*pipelineRunAfterArchive.State).To(Equal(run_model.V2beta1RuntimeStateRUNNING)) + Expect(*pipelineRunAfterArchive.StorageState).To(Equal(run_model.V2beta1RunStorageStateARCHIVED)) + + }) + }) + + Context("Unarchive pipeline run(s) >", func() { + pipelineFile := filepath.Join(pipelineFilesRootDir, pipelineDirectory, "hello-world.yaml") + It("Create a pipeline run, archive it and unarchive it and verify the storage state", func() { + createdExperiment := createExperiment(experimentName) + createdPipeline := uploadAPipeline(pipelineFile, &testContext.Pipeline.PipelineGeneratedName) + createdPipelineVersion := testutil.GetLatestPipelineVersion(pipelineClient, &createdPipeline.PipelineID) + pipelineRuntimeInputs := testutil.GetPipelineRunTimeInputs(pipelineFile) + createdPipelineRun := createPipelineRun(&createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, &createdExperiment.ExperimentID, pipelineRuntimeInputs) + archivePipelineRun(&createdPipelineRun.RunID) + unArchivePipelineRun(&createdPipelineRun.RunID) + pipelineRunAfterUnArchive := testutil.GetPipelineRun(runClient, &createdPipelineRun.RunID) + Expect(*pipelineRunAfterUnArchive.StorageState).To(Equal(run_model.V2beta1RunStorageStateAVAILABLE)) + }) + }) + + PContext("Terminate a pipeline run >", func() { + It("Terminate a run in RUNNING state", func() { + }) + It("Terminate a run in PENDING state", func() { + }) + It("Terminate a run in SUCCESSFUL or ERRORED state", func() { + }) + }) + + PContext("Get All pipeline run >", func() { + It("Create a Pipeline Run and validate that it gets returned in the List Runs API call", func() { + }) + It("Create 2 pipeline Runs, and list it", func() { + }) + It("List pipeline Runs when no runs exist", func() { + }) + It("List pipeline Runs and sort by display name in ascending order", func() { + }) + It("List pipeline Runs and sort by display name in descending order", func() { + }) + It("List pipeline Runs and sort by id in ascending order", func() { + }) + It("List pipeline Runs and sort by id in descending order", func() { + }) + It("List pipeline Runs and sort by pipeline version id in ascending order", func() { + }) + It("List pipeline Runs and sort by pipeline version id in descending order", func() { + }) + It("List pipeline Runs and sort by creation date in ascending order", func() { + }) + It("List pipeline Runs and sort by creation date in descending order", func() { + }) + It("List pipeline Runs and sort by updated date in ascending order", func() { + }) + It("List pipeline Runs and sort by updated date in descending order", func() { + }) + It("List pipeline Runs by specifying page size", func() { + }) + It("List pipeline Runs by specifying page size and iterate over at least 2 pages", func() { + }) + It("List pipeline Runs filtering by run id", func() { + }) + It("List pipeline Runs filtering by display name containing", func() { + }) + It("List pipeline Runs filtering by `name` EQUALS", func() { + }) + It("List pipeline Runs filtering by `name` NOT Equals", func() { + }) + It("List pipeline Runs filtering by creation date", func() { + }) + It("List pipeline Runs by experiment id", func() { + }) + It("List pipeline Runs by namespace", func() { + }) + }) +}) + +// ################## NEGATIVE TESTS ################## + +var _ = Describe("Verify Pipeline Run Negative Tests >", Label(constants.NEGATIVE, constants.PipelineRun, constants.APIServerTests, constants.FullRegression), func() { + + var pipelineFile string + var createdPipeline *pipeline_upload_model.V2beta1Pipeline + var createdPipelineVersion *pipeline_model.V2beta1PipelineVersion + var pipelineRuntimeInputs map[string]interface{} + + BeforeEach(func() { + pipelineFile = filepath.Join(testutil.GetValidPipelineFilesDir(), helloWorldPipelineFileName) + createdPipeline = uploadAPipeline(pipelineFile, &testContext.Pipeline.PipelineGeneratedName) + createdPipelineVersion = testutil.GetLatestPipelineVersion(pipelineClient, &createdPipeline.PipelineID) + pipelineRuntimeInputs = testutil.GetPipelineRunTimeInputs(pipelineFile) + }) + + if *config.MultiUserMode || *config.KubeflowMode { + Context("Verify pipeline run creation failure in Multi User Mode", func() { + + PIt("Create a run in an experiment that is not in the namespace that you have access to", func() { + }) + It("Create a run without an experiment in a Multi User Mode deployment", func() { + createRunRequest := &runparams.RunServiceCreateRunParams{Run: createPipelineRunPayload(&createdPipeline.PipelineID, &createdPipelineVersion.PipelineVersionID, nil, pipelineRuntimeInputs)} + _, createRunError := runClient.Create(createRunRequest) + Expect(createRunError).To(HaveOccurred(), "Expected the pipeline run creation to have failed") + Expect(createRunError.Error()).To(ContainSubstring("Experiment id can not be empty in multi-user mode"), "Expected the pipeline run creation failure to have a specific error message in the response") + + }) + }) + } + PContext("Unarchive a pipeline run >", func() { + It("Unarchive a deleted run", func() { + }) + It("Unarchive a non existent run", func() { + }) + It("Unarchive an available run", func() { + }) + if *config.KubeflowMode || *config.MultiUserMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + PContext("Archive a pipeline run >", func() { + It("Archive a deleted run", func() { + }) + It("Archive a non existent run", func() { + }) + It("Archive an already archived run", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + PContext("Terminate a pipeline run >", func() { + It("Terminate a deleted run", func() { + }) + It("Terminate a non existent run", func() { + }) + It("Terminate an already terminated run", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + PContext("Delete a pipeline run >", func() { + It("Delete a deleted run", func() { + }) + It("Delete a non existent run", func() { + }) + }) + PContext("Associate a pipeline run with invalid experiment >", func() { + It("Associate a run with an archived experiment", func() { + }) + It("Associate a run with non existent experiment", func() { + }) + It("Associate a run with deleted experiment", func() { + }) + }) +}) + +// ################## UTILITY METHODS ################## + +func configureCacheSettingAndGetPipelineFile(pipelineFilePath string, cacheDisabled bool) string { + pipelineSpecsFromFile := testutil.ParseFileToSpecs(pipelineFilePath, cacheDisabled, nil) + newPipelineFile := testutil.CreateTempFile(pipelineSpecsFromFile.Bytes()) + return newPipelineFile.Name() +} + +func uploadAPipeline(pipelineFile string, pipelineName *string) *pipeline_upload_model.V2beta1Pipeline { + logger.Log("Create a pipeline") + testContext.Pipeline.UploadParams.SetName(pipelineName) + logger.Log("Uploading pipeline with name=%s, from file %s", *pipelineName, pipelineFile) + createdPipeline, uploadErr := testutil.UploadPipeline(pipelineUploadClient, pipelineFile, pipelineName, nil) + Expect(uploadErr).NotTo(HaveOccurred(), "Failed to upload pipeline") + testContext.Pipeline.CreatedPipelines = append(testContext.Pipeline.CreatedPipelines, createdPipeline) + return createdPipeline +} + +func createExperiment(experimentName string) *experiment_model.V2beta1Experiment { + createdExperiment := testutil.CreateExperimentWithParams(experimentClient, &experiment_model.V2beta1Experiment{ + DisplayName: experimentName, + Namespace: testutil.GetNamespace(), + }) + testContext.Experiment.CreatedExperimentIds = append(testContext.Experiment.CreatedExperimentIds, createdExperiment.ExperimentID) + return createdExperiment +} + +func createdExpectedRunAndVerify(createdPipelineRun *run_model.V2beta1Run, pipelineID *string, pipelineVersionID *string, experimentID *string, pipelineInputMap map[string]interface{}) { + expectedPipelineRun := createExpectedPipelineRun(pipelineID, pipelineVersionID, experimentID, pipelineInputMap, false) + matcher.MatchPipelineRuns(createdPipelineRun, expectedPipelineRun) + createdPipelineRunFromDB, createRunError := runClient.Get(&runparams.RunServiceGetRunParams{ + RunID: createdPipelineRun.RunID, + }) + Expect(createRunError).NotTo(HaveOccurred(), "Failed to get run with Id="+createdPipelineRun.RunID) + + // Making the fields that can be different but we don't care about equal to stabilize tests + matcher.MatchPipelineRuns(createdPipelineRun, createdPipelineRunFromDB) +} + +func createPipelineRun(pipelineID *string, pipelineVersionID *string, experimentID *string, inputParams map[string]interface{}) *run_model.V2beta1Run { + logger.Log("Create a pipeline run for pipeline with id=%s and versionId=%s", *pipelineID, *pipelineVersionID) + createRunRequest := &runparams.RunServiceCreateRunParams{Run: createPipelineRunPayload(pipelineID, pipelineVersionID, experimentID, inputParams)} + createdRun, createRunError := runClient.Create(createRunRequest) + Expect(createRunError).NotTo(HaveOccurred(), "Failed to create run for pipeline with id="+*pipelineID) + testContext.PipelineRun.CreatedRunIds = append(testContext.PipelineRun.CreatedRunIds, createdRun.RunID) + logger.Log("Created Pipeline Run successfully with runId=%s", createdRun.RunID) + return createdRun +} + +func archivePipelineRun(pipelineRunID *string) { + logger.Log("Archiving a pipeline run with id=%s", *pipelineRunID) + archiveRunParams := &runparams.RunServiceArchiveRunParams{ + RunID: *pipelineRunID, + } + archiveRunError := runClient.Archive(archiveRunParams) + Expect(archiveRunError).NotTo(HaveOccurred(), "Failed to archive run with id="+*pipelineRunID) + logger.Log("Successfully archived run with runId=%s", *pipelineRunID) +} + +func unArchivePipelineRun(pipelineRunID *string) { + logger.Log("Unarchiving a pipeline run with id=%s", *pipelineRunID) + unArchiveRunParams := &runparams.RunServiceUnarchiveRunParams{ + RunID: *pipelineRunID, + } + unarchiveRunError := runClient.Unarchive(unArchiveRunParams) + Expect(unarchiveRunError).NotTo(HaveOccurred(), "Failed to un-archive run with id="+*pipelineRunID) + logger.Log("Successfully unarchived run with runId=%s", *pipelineRunID) +} + +func createPipelineRunPayload(pipelineID *string, pipelineVersionID *string, experimentID *string, inputParams map[string]interface{}) *run_model.V2beta1Run { + logger.Log("Create a pipeline run body") + return &run_model.V2beta1Run{ + DisplayName: runName, + Description: runDescription, + ExperimentID: testutil.ParsePointersToString(experimentID), + ServiceAccount: testutil.GetDefaultPipelineRunnerServiceAccount(), + PipelineVersionReference: &run_model.V2beta1PipelineVersionReference{ + PipelineID: testutil.ParsePointersToString(pipelineID), + PipelineVersionID: testutil.ParsePointersToString(pipelineVersionID), + }, + RuntimeConfig: &run_model.V2beta1RuntimeConfig{ + Parameters: inputParams, + }, + } +} + +func createExpectedPipelineRun(pipelineID *string, pipelineVersionID *string, experimentID *string, inputParams map[string]interface{}, archived bool) *run_model.V2beta1Run { + expectedRun := createPipelineRunPayload(pipelineID, pipelineVersionID, experimentID, inputParams) + storageState := run_model.V2beta1RunStorageStateAVAILABLE + if archived { + storageState = run_model.V2beta1RunStorageStateARCHIVED + } + expectedRun.StorageState = &storageState + if experimentID == nil { + logger.Log("Fetch default experiment's experimentId") + pageSize := int32(1000) + experminents, expError := experimentClient.ListAll(&experimentparams.ExperimentServiceListExperimentsParams{ + Namespace: config.Namespace, + PageSize: &pageSize, + }, 1000) + Expect(expError).NotTo(HaveOccurred(), "Failed to list experiments") + for _, experiment := range experminents { + if strings.ToLower(experiment.DisplayName) == "default" { + expectedRun.ExperimentID = experiment.ExperimentID + } + } + } + return expectedRun +} diff --git a/backend/test/v2/api/pipeline_upload_api_test.go b/backend/test/v2/api/pipeline_upload_api_test.go new file mode 100644 index 00000000000..8099e93c62f --- /dev/null +++ b/backend/test/v2/api/pipeline_upload_api_test.go @@ -0,0 +1,251 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "fmt" + "path/filepath" + "time" + + uploadparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" + model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/constants" + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + "github.com/kubeflow/pipelines/backend/test/v2/api/matcher" + + "github.com/go-openapi/strfmt" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +// ################## CLASS VARIABLES ################## + +const ( + helloWorldPipelineFileName = "hello-world.yaml" + pipelineWithArgsFileName = "arguments-parameters.yaml" +) + +// ################## SET AND TEARDOWN ################## + +var _ = BeforeEach(func() { + logger.Log("################### Setup before each test #####################") + testStartTime, _ := strfmt.ParseDateTime(time.Now().Format(time.DateTime)) + testContext.Pipeline.CreatedPipelines = []*model.V2beta1Pipeline{} + testContext.Pipeline.ExpectedPipeline = new(model.V2beta1Pipeline) + testContext.Pipeline.ExpectedPipeline.CreatedAt = testStartTime + testContext.Pipeline.PipelineGeneratedName = "apitest-" + randomName + if *config.KubeflowMode || *config.MultiUserMode { + testContext.Pipeline.ExpectedPipeline.Namespace = *config.UserNamespace + } else { + testContext.Pipeline.ExpectedPipeline.Namespace = *config.Namespace + } +}) + +// ################## TESTS ################## + +// ################## POSITIVE TESTS ################## + +var _ = Describe("Verify Pipeline Upload >", Label(constants.POSITIVE, constants.PipelineUpload, constants.APIServerTests, constants.FullRegression), func() { + + /* Positive Scenarios of uploading a pipeline file */ + Context("Upload a valid pipeline and verify pipeline metadata after upload >", func() { + var pipelineDir = "valid" + validPipelineFilePaths := testutil.GetListOfAllFilesInDir(filepath.Join(pipelineFilesRootDir, pipelineDir)) + for _, pipelineFilePath := range validPipelineFilePaths { + It(fmt.Sprintf("Upload %s pipeline", pipelineFilePath), func() { + uploadPipelineAndVerify(pipelineFilePath, &testContext.Pipeline.PipelineGeneratedName, nil) + }) + } + + It(fmt.Sprintf("Upload %s pipeline file with custom name and description", helloWorldPipelineFileName), func() { + pipelineSpecFilePath := filepath.Join(pipelineFilesRootDir, pipelineDir, helloWorldPipelineFileName) + description := "Some pipeline description" + testContext.Pipeline.UploadParams.SetDescription(&description) + testContext.Pipeline.ExpectedPipeline.Description = description + uploadPipelineAndVerify(pipelineSpecFilePath, &testContext.Pipeline.PipelineGeneratedName, nil) + }) + + It(fmt.Sprintf("Upload %s pipeline file with custom name, display name and description", helloWorldPipelineFileName), func() { + pipelineSpecFilePath := filepath.Join(pipelineFilesRootDir, pipelineDir, helloWorldPipelineFileName) + description := "Some pipeline description" + displayName := fmt.Sprintf("Pipeline Display Name - %s ", testContext.Pipeline.PipelineGeneratedName) + testContext.Pipeline.UploadParams.SetDescription(&description) + testContext.Pipeline.ExpectedPipeline.Description = description + uploadPipelineAndVerify(pipelineSpecFilePath, &testContext.Pipeline.PipelineGeneratedName, &displayName) + }) + }) +}) + +var _ = Describe("Verify Pipeline Upload Version >", Label(constants.POSITIVE, "PipelineUpload", constants.APIServerTests, constants.FullRegression), func() { + var pipelineDir = "valid" + helloWorldPipelineSpecFilePath := filepath.Join(pipelineFilesRootDir, pipelineDir, helloWorldPipelineFileName) + argParamPipelineSpecFilePath := filepath.Join(pipelineFilesRootDir, pipelineDir, pipelineWithArgsFileName) + /* Positive Scenarios of uploading a pipeline file */ + Context("Upload a pipeline and upload the same pipeline to change version >", func() { + It(fmt.Sprintf("Upload %s pipeline file and upload a new version with the same file", helloWorldPipelineFileName), Label(constants.SMOKE), func() { + uploadPipelineAndChangePipelineVersion(helloWorldPipelineSpecFilePath, helloWorldPipelineSpecFilePath, &testContext.Pipeline.PipelineGeneratedName, nil) + }) + It(fmt.Sprintf("Upload %s pipeline file and upload a new version with the different file %s", helloWorldPipelineFileName, pipelineWithArgsFileName), func() { + uploadPipelineAndChangePipelineVersion(helloWorldPipelineSpecFilePath, argParamPipelineSpecFilePath, &testContext.Pipeline.PipelineGeneratedName, nil) + }) + + }) +}) + +// ################## NEGATIVE TESTS ################## + +var _ = Describe("Verify Pipeline Upload Failure >", Label("Negative", "PipelineUpload", constants.APIServerTests, constants.FullRegression), func() { + var pipelineDir = "invalid" + invalidPipelineFiles := testutil.GetListOfFilesInADir(filepath.Join(pipelineFilesRootDir, pipelineDir)) + + /* Negative scenarios of uploading a pipeline */ + Context("Upload an invalid pipeline spec and verify the error in the response >", func() { + for _, fileName := range invalidPipelineFiles { + filePath := filepath.Join(pipelineFilesRootDir, pipelineDir, fileName) + It(fmt.Sprintf("Upload a %s pipeline and verify the failure", fileName), func() { + uploadPipelineAndVerifyFailure(filePath, &testContext.Pipeline.PipelineGeneratedName, nil, "Failed to upload pipeline") + }) + } + + It("Upload a pipeline twice and verify that it should fail the second time", func() { + var validPipelineDir = "valid" + filePath := filepath.Join(pipelineFilesRootDir, validPipelineDir, helloWorldPipelineFileName) + createdPipeline := uploadPipelineAndVerify(filePath, &testContext.Pipeline.PipelineGeneratedName, nil) + uploadPipelineAndVerifyFailure(filePath, &(createdPipeline.Name), nil, "Failed to upload pipeline") + }) + }) +}) + +var _ = Describe("Verify Pipeline Upload Version Failure >", Label("Negative", "PipelineUpload", constants.APIServerTests, constants.FullRegression), func() { + var pipelineDir = "valid" + pipelineSpecFilePath := filepath.Join(pipelineFilesRootDir, pipelineDir, helloWorldPipelineFileName) + /* Negative Scenarios of uploading a pipeline file */ + Context("Upload a pipeline and try changing the version with a different metric >", func() { + It(fmt.Sprintf("Change %s pipeline's name to be same as original version", helloWorldPipelineFileName), func() { + createdPipeline := uploadPipelineAndVerify(pipelineSpecFilePath, &testContext.Pipeline.PipelineGeneratedName, nil) + + parameters := uploadparams.NewUploadPipelineVersionParams() + parameters.Pipelineid = &(createdPipeline.PipelineID) + parameters.SetName(&createdPipeline.DisplayName) + uploadPipelineVersionAndVerifyFailure(pipelineSpecFilePath, parameters, "Failed to upload pipeline version") + }) + It(fmt.Sprintf("Change %s pipeline's id with fake pipeline id", helloWorldPipelineFileName), func() { + uploadPipelineAndVerify(pipelineSpecFilePath, &testContext.Pipeline.PipelineGeneratedName, nil) + + parameters := uploadparams.NewUploadPipelineVersionParams() + fakePipelineID := "12345" + parameters.Pipelineid = &fakePipelineID + uploadPipelineVersionAndVerifyFailure(pipelineSpecFilePath, parameters, "Failed to upload pipeline version") + }) + }) + + // TODO: To to be implemented + if *config.KubeflowMode { + PContext("Upload a pipeline in MultiUser Mode >", func() { + It("Upload a pipeline in a namespace you don't have access to", func() { + }) + }) + } +}) + +// ################## UTILITY METHODS ################## + +/* +A common method that creates a pipeline and then creates a new pipeline version +@param pipelineFilePathForCreation - pipeline file path for initial pipeline upload +@param pipelineFilePathWhenChangingVersion - the pipeline file path that you wish to upload when creating a new version +*/ +func uploadPipelineAndChangePipelineVersion(pipelineFilePathForCreation string, pipelineFilePathWhenChangingVersion string, pipelineName *string, pipelineDisplayName *string) { + createdPipeline := uploadPipelineAndVerify(pipelineFilePathForCreation, pipelineName, pipelineDisplayName) + + // Construct a payload to create new pipeline version + parameters := uploadparams.NewUploadPipelineVersionParams() + expectedPipelineVersion := new(model.V2beta1PipelineVersion) + descriptionNew := "Some changed pipeline description" + pipelineNameNew := createdPipeline.DisplayName + "-1" + parameters.Pipelineid = &(createdPipeline.PipelineID) + parameters.SetDescription(&descriptionNew) + parameters.SetName(&pipelineNameNew) + + // Construct expected Pipeline Spec from the uploaded file + inputFileContent := testutil.ParseFileToSpecs(pipelineFilePathWhenChangingVersion, true, nil) + + // Construct expected pipeline version object for comparison + expectedPipelineVersion.Description = descriptionNew + expectedPipelineVersion.PipelineSpec = inputFileContent + expectedPipelineVersion.DisplayName = pipelineNameNew + uploadPipelineVersionAndVerify(pipelineFilePathWhenChangingVersion, parameters, expectedPipelineVersion) +} + +func uploadPipeline(pipelineFilePath string, pipelineName *string, pipelineDisplayName *string) (*model.V2beta1Pipeline, error) { + testContext.Pipeline.UploadParams.SetName(pipelineName) + if pipelineDisplayName != nil { + testContext.Pipeline.ExpectedPipeline.DisplayName = *pipelineDisplayName + testContext.Pipeline.UploadParams.SetDisplayName(pipelineDisplayName) + } else { + testContext.Pipeline.ExpectedPipeline.DisplayName = *pipelineName + } + logger.Log("Uploading pipeline with name=%s, from file %s", *pipelineName, pipelineFilePath) + return pipelineUploadClient.UploadFile(pipelineFilePath, testContext.Pipeline.UploadParams) +} + +func uploadPipelineAndVerify(pipelineFilePath string, pipelineName *string, pipelineDisplayName *string) *model.V2beta1Pipeline { + createdPipeline, err := uploadPipeline(pipelineFilePath, pipelineName, pipelineDisplayName) + logger.Log("Verifying that NO error was returned in the response to confirm that the pipeline was successfully uploaded") + Expect(err).NotTo(HaveOccurred()) + testContext.Pipeline.CreatedPipelines = append(testContext.Pipeline.CreatedPipelines, createdPipeline) + + createdPipelineFromDB := testutil.GetPipeline(pipelineClient, createdPipeline.PipelineID) + Expect(createdPipelineFromDB).To(Equal(*createdPipeline)) + matcher.MatchPipelines(&createdPipelineFromDB, testContext.Pipeline.ExpectedPipeline) + + // Validate the created pipeline spec (by API server) matches the input file + expectedPipelineSpec := testutil.ParseFileToSpecs(pipelineFilePath, true, nil) + logger.Log("Verifying that the generated pipeline spec matches the input yaml file") + versions := testutil.GetSortedPipelineVersionsByCreatedAt(pipelineClient, createdPipeline.PipelineID, nil) + Expect(versions).Should(HaveLen(1), "Expected to find only one pipeline version after pipeline upload") + actualPipelineSpec := versions[0].PipelineSpec.(map[string]interface{}) + matcher.MatchPipelineSpecs(actualPipelineSpec, expectedPipelineSpec) + return createdPipeline +} + +func uploadPipelineAndVerifyFailure(pipelineFilePath string, pipelineName *string, pipelineDisplayName *string, errorMessage string) { + _, err := uploadPipeline(pipelineFilePath, pipelineName, pipelineDisplayName) + logger.Log("Verifying error in the response") + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring(errorMessage)) +} + +func uploadPipelineVersion(pipelineFilePath string, parameters *uploadparams.UploadPipelineVersionParams) (*model.V2beta1PipelineVersion, error) { + logger.Log("Uploading pipeline version for pipeline with id=%s, from file %s", *parameters.Pipelineid, pipelineFilePath) + return pipelineUploadClient.UploadPipelineVersion(pipelineFilePath, parameters) +} + +func uploadPipelineVersionAndVerify(pipelineFilePath string, parameters *uploadparams.UploadPipelineVersionParams, expectedPipelineVersion *model.V2beta1PipelineVersion) *model.V2beta1PipelineVersion { + createdPipelineVersion, err := uploadPipelineVersion(pipelineFilePath, parameters) + logger.Log("Verifying that NO error was returned in the response to confirm that the pipeline was successfully uploaded") + Expect(err).NotTo(HaveOccurred()) + matcher.MatchPipelineVersions(createdPipelineVersion, expectedPipelineVersion) + return createdPipelineVersion +} + +func uploadPipelineVersionAndVerifyFailure(pipelineFilePath string, parameters *uploadparams.UploadPipelineVersionParams, errorMessage string) { + _, err := uploadPipelineVersion(pipelineFilePath, parameters) + logger.Log("Verifying error in the response") + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring(errorMessage)) +} diff --git a/backend/test/v2/api/report_api_test.go b/backend/test/v2/api/report_api_test.go new file mode 100644 index 00000000000..04c281221a4 --- /dev/null +++ b/backend/test/v2/api/report_api_test.go @@ -0,0 +1,70 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "fmt" + "path/filepath" + + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/constants" + "github.com/kubeflow/pipelines/backend/test/testutil" + + . "github.com/onsi/ginkgo/v2" +) + +var projectDataDir = testutil.GetTestDataDir() +var workflowsDir = filepath.Join(projectDataDir, "compiled-workflows") + +// ################## TESTS ################## + +// ################## POSITIVE TESTS ################## + +var _ = PDescribe("Create Workflow API Tests >", Label(constants.POSITIVE, constants.ReportTests, constants.APIServerTests, constants.FullRegression), func() { + workflowFiles := testutil.GetListOfFilesInADir(workflowsDir) + Context("Create Workflow >", func() { + for _, workflowFile := range workflowFiles { + It(fmt.Sprintf("Create a workflow from: %s", workflowFile), func() { + }) + It(fmt.Sprintf("Create a scheduled workflow from: %s", workflowFile), func() { + }) + } + }) +}) + +// ################## NEGATIVE TESTS ################## + +var _ = PDescribe("Create Workflow Negative Tests >", Label(constants.NEGATIVE, constants.ReportTests, constants.APIServerTests, constants.FullRegression), func() { + + Context("Create workflow >", func() { + It("With invalid workflow schema", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) + + Context("Create scheduled workflow >", func() { + It("With invalid workflow schema", func() { + }) + It("With valid workflow schema but invalid cron", func() { + }) + if *config.KubeflowMode { + It("In a namespace you don't have access to", func() { + }) + } + }) +}) diff --git a/backend/test/v2/api/test_context.go b/backend/test/v2/api/test_context.go new file mode 100644 index 00000000000..da78cea62c2 --- /dev/null +++ b/backend/test/v2/api/test_context.go @@ -0,0 +1,48 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "time" + + uploadparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" +) + +type TestContext struct { + // Common Test Data + TestStartTimeUTC time.Time + + // Pipeline Context + Pipeline Pipeline + + PipelineRun PipelineRun + Experiment Experiment +} + +type Pipeline struct { + PipelineGeneratedName string + UploadParams *uploadparams.UploadPipelineParams + ExpectedPipeline *pipeline_upload_model.V2beta1Pipeline + CreatedPipelines []*pipeline_upload_model.V2beta1Pipeline +} + +type PipelineRun struct { + CreatedRunIds []string +} + +type Experiment struct { + CreatedExperimentIds []string +} diff --git a/backend/test/v2/api/upgrade_api_test.go b/backend/test/v2/api/upgrade_api_test.go new file mode 100644 index 00000000000..02e08c88891 --- /dev/null +++ b/backend/test/v2/api/upgrade_api_test.go @@ -0,0 +1,373 @@ +// Copyright 2018-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "fmt" + "path/filepath" + + experimentparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" + uploadparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" + recurringrunparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" + runparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_client/run_service" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/test/config" + "github.com/kubeflow/pipelines/backend/test/constants" + "github.com/kubeflow/pipelines/backend/test/logger" + "github.com/kubeflow/pipelines/backend/test/testutil" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +const ( + longRunningPipelineFileName = "take_nap_compiled.yaml" +) + +// ################## UPGRADE TEST PREPARATION ################## + +var _ = Describe("Upgrade Test Preparation >", Label(constants.UpgradePreparation, constants.FullRegression), func() { + Context("Prepare test data >", func() { + + It("Upload pipelines and create experiments", func() { + prepareExperiments() + preparePipelines() + }) + It("Create pipeline run", func() { + preparePipelineRun(helloWorldPipelineFileName, "Pipeline 1", "Experiment 1", "Run 1") + }) + It("Create pipeline run and wait it to go to RUNNING state", func() { + run := preparePipelineRun(longRunningPipelineFileName, "Pipeline 3", "Experiment 3", "Run 3") + testutil.WaitForRunToBeInState(runClient, &run.RunID, []run_model.V2beta1RuntimeState{run_model.V2beta1RuntimeStateRUNNING}, nil) + }) + It("Create scheduled pipeline run", func() { + prepareScheduledPipelineRun(helloWorldPipelineFileName, "Pipeline 4", "Experiment 4", "Scheduled Run 1") + }) + + }) +}) + +// ################## UPGRADE TEST VERIFICATION ################## + +var _ = Describe("Upgrade Test Verification >", Label(constants.UpgradeVerification, constants.FullRegression), func() { + Context("Verify resources after upgrade >", func() { + It("Verify that Pipelines & experiments should persist correctly", func() { + verifyExperiments() + verifyPipelines() + }) + It("Verify pipeline run", func() { + verifyPipelineRun(helloWorldPipelineFileName, "Pipeline 1", "Experiment 1", "Run 1") + }) + It("Verify you can create a pipeline run after upgrade", func() { + preparePipelineRun(helloWorldPipelineFileName, "Pipeline 2", "Experiment 2", "Run 2") + verifyPipelineRun(longRunningPipelineFileName, "Pipeline 2", "Experiment 2", "Run 2") + }) + It("Verify pipeline run that was in RUNNING state, still exists after the upgrade", func() { + verifyPipelineRun(longRunningPipelineFileName, "Pipeline 3", "Experiment 3", "Run 3") + }) + It("Verify scheduled pipeline run", func() { + verifyScheduledPipelineRun(helloWorldPipelineFileName, "Pipeline 4", "Experiment 4", "Scheduled Run 1") + }) + It("Verify you can create scheduled pipeline run after upgrade", func() { + prepareScheduledPipelineRun(helloWorldPipelineFileName, "Pipeline 5", "Experiment 5", "Scheduled Run 2") + verifyScheduledPipelineRun(helloWorldPipelineFileName, "Pipeline 5", "Experiment 5", "Scheduled Run 2") + }) + }) +}) + +// ################## HELPER FUNCTIONS ################## + +// ################## EXPERIMENTS ################## + +func getResourceNamespace() string { + if *config.KubeflowMode || *config.MultiUserMode { + return *config.UserNamespace + } + return *config.Namespace +} + +func getExpectedExperiments() []*experiment_model.V2beta1Experiment { + var experiments []*experiment_model.V2beta1Experiment + experiment1 := &experiment_model.V2beta1Experiment{ + DisplayName: "training", + Description: "my first experiment", + Namespace: getResourceNamespace(), + } + experiment2 := &experiment_model.V2beta1Experiment{ + DisplayName: "prediction", + Description: "my second experiment", + Namespace: getResourceNamespace(), + } + experiment3 := &experiment_model.V2beta1Experiment{ + DisplayName: "moonshot", + Description: "my third experiment", + Namespace: getResourceNamespace(), + } + experiments = append(experiments, experiment1, experiment2, experiment3) + return experiments +} + +func prepareExperiments() { + experiments := getExpectedExperiments() + for _, experiment := range experiments { + testutil.CreateExperimentWithParams(experimentClient, experiment) + } +} + +func verifyExperiments() { + namespace := getResourceNamespace() + allExperiments := testutil.ListExperiments( + experimentClient, + &experimentparams.ExperimentServiceListExperimentsParams{ + Namespace: &namespace, + SortBy: util.StringPointer("created_at"), + PageSize: util.Int32Pointer(1000), + }, + ) + expectedExperiments := getExpectedExperiments() + Expect(len(allExperiments)).To(BeNumerically(">", len(expectedExperiments))) + existingExperimentsMap := make(map[string]experiment_model.V2beta1Experiment) + for _, exp := range allExperiments { + existingExperimentsMap[exp.DisplayName] = *exp + } + for _, exp := range expectedExperiments { + existingExperiment := existingExperimentsMap[exp.DisplayName] + Expect(existingExperiment).ToNot(BeNil()) + Expect(existingExperiment.Description).To(Equal(exp.Description), fmt.Sprintf("Experiment %s description is not same", exp.DisplayName)) + // Experiment API response does not currently return populated Namespace field + // Expect(existingExperiment.Namespace).To(Equal(exp.Namespace), fmt.Sprintf("Experiment %s namespace is not same")) + } +} + +// ################## PIPELINES ################## + +func getExpectedPipelines() []*uploadparams.UploadPipelineParams { + var pipelines []*uploadparams.UploadPipelineParams + pipelineParams1 := uploadparams.NewUploadPipelineParams() + pipelineName1 := "training" + pipelineDescription1 := "My first pipeline" + pipelineParams1.SetName(&pipelineName1) + pipelineParams1.SetDisplayName(&pipelineName1) + pipelineParams1.SetDescription(&pipelineDescription1) + + pipelineParams2 := uploadparams.NewUploadPipelineParams() + pipelineName2 := "prediction" + pipelineDescription2 := "My Second pipeline" + pipelineParams2.SetName(&pipelineName2) + pipelineParams2.SetDisplayName(&pipelineName2) + pipelineParams2.SetDescription(&pipelineDescription2) + + pipelineParams3 := uploadparams.NewUploadPipelineParams() + pipelineName3 := "moonshot" + pipelineDescription3 := "My third pipeline" + pipelineParams3.SetName(&pipelineName3) + pipelineParams3.SetDisplayName(&pipelineName3) + pipelineParams3.SetDescription(&pipelineDescription3) + + pipelines = append(pipelines, pipelineParams1, pipelineParams2, pipelineParams3) + return pipelines +} + +// ################## PIPELINE RUNS ################## + +func getPipelineAndExperimentForRun(pipelineToUpload string, pipelineName string, experimentName string) (string, string, string) { + // Check if pipeline already exists or not, if not, then upload a new one + namespace := getResourceNamespace() + pipelineFilePath := filepath.Join(testutil.GetValidPipelineFilesDir(), pipelineToUpload) + var uploadedPipeline *pipeline_upload_model.V2beta1Pipeline + var err error + pipelineDisplayName := "Pipeline to Run" + pipelineDescription := "My Pipeline to Upload" + pipelineUploadParams := uploadparams.NewUploadPipelineParams() + pipelineUploadParams.SetName(&pipelineName) + pipelineUploadParams.SetDescription(&pipelineDescription) + pipelineUploadParams.SetDisplayName(&pipelineDisplayName) + existingPipelines := testutil.ListPipelines(pipelineClient, &namespace) + for _, pipeline := range existingPipelines { + if pipeline.Name == pipelineName { + logger.Log("Pipeline with name=%s, already exists", *pipelineUploadParams.Name) + uploadedPipeline = &pipeline_upload_model.V2beta1Pipeline{ + Name: pipeline.Name, + DisplayName: pipeline.DisplayName, + PipelineID: pipeline.PipelineID, + Description: pipeline.Description, + CreatedAt: pipeline.CreatedAt, + } + } + } + + if uploadedPipeline == nil { + logger.Log("Uploading pipeline from file %s", pipelineFilePath) + uploadedPipeline, err = pipelineUploadClient.UploadFile(pipelineFilePath, pipelineUploadParams) + Expect(err).To(BeNil(), "Failed to upload pipeline: %s", pipelineFilePath) + logger.Log("Uploaded pipeline from file %s", pipelineFilePath) + } + + // Get pipeline versions associated with the above pipeline + logger.Log("Fetch pipeline versions for pipeline with id= %s", uploadedPipeline.PipelineID) + uploadedPipelineVersions, _, _, pipelineVersionError := testutil.ListPipelineVersions(pipelineClient, uploadedPipeline.PipelineID) + Expect(pipelineVersionError).To(BeNil(), fmt.Sprintf("Failed to list uploaded pipeline versions for pipeline with id=%s", uploadedPipeline.PipelineID)) + logger.Log("Fetched %d pipeline versions for pipeline with id= %s", len(uploadedPipelineVersions), uploadedPipeline.PipelineID) + + // Get existing experiments and see if expected exists or not, if not, then create a new one + var createdExperiment *experiment_model.V2beta1Experiment + experimentParams := &experiment_model.V2beta1Experiment{ + DisplayName: experimentName, + Description: "my first experiment", + } + logger.Log("Fetching all experiments") + allExperiments := testutil.ListExperiments( + experimentClient, + &experimentparams.ExperimentServiceListExperimentsParams{ + SortBy: util.StringPointer("created_at"), + PageSize: util.Int32Pointer(1000), + }, + ) + for _, experiment := range allExperiments { + if experiment.DisplayName == experimentParams.DisplayName { + logger.Log("Found experiment with display name %s", experiment.DisplayName) + createdExperiment = experiment + } + } + if createdExperiment == nil { + logger.Log("No existing experiment found with name '%s', so Creating new experiment", experimentParams.DisplayName) + createdExperiment = testutil.CreateExperimentWithParams(experimentClient, experimentParams) + } + return uploadedPipeline.PipelineID, uploadedPipelineVersions[0].PipelineVersionID, createdExperiment.ExperimentID +} + +func getExpectedPipelineRun(pipelineToUpload string, pipelineName string, experimentName string, pipelineRunName string) *run_model.V2beta1Run { + pipelineID, pipelineVersionID, experimentID := getPipelineAndExperimentForRun(pipelineToUpload, pipelineName, experimentName) + + return &run_model.V2beta1Run{ + DisplayName: pipelineRunName, + Description: "This is my first pipeline run", + ExperimentID: experimentID, + ServiceAccount: testutil.GetDefaultPipelineRunnerServiceAccount(), + PipelineVersionReference: &run_model.V2beta1PipelineVersionReference{ + PipelineID: pipelineID, + PipelineVersionID: pipelineVersionID, + }, + } +} + +func getExpectedRecurringPipelineRun(pipelineToUpload string, pipelineName string, experimentName string, pipelineRunName string) *recurring_run_model.V2beta1RecurringRun { + pipelineID, pipelineVersionID, experimentID := getPipelineAndExperimentForRun(pipelineToUpload, pipelineName, experimentName) + + return &recurring_run_model.V2beta1RecurringRun{ + DisplayName: pipelineRunName, + Description: "This is my first recurring pipeline run", + ExperimentID: experimentID, + ServiceAccount: testutil.GetDefaultPipelineRunnerServiceAccount(), + PipelineVersionReference: &recurring_run_model.V2beta1PipelineVersionReference{ + PipelineID: pipelineID, + PipelineVersionID: pipelineVersionID, + }, + Trigger: &recurring_run_model.V2beta1Trigger{ + CronSchedule: &recurring_run_model.V2beta1CronSchedule{ + Cron: "*/10 * * * *", + }, + }, + Mode: recurring_run_model.RecurringRunModeENABLE.Pointer(), + MaxConcurrency: 1, + } +} + +func preparePipelines() { + pipelineFilePath := filepath.Join(testutil.GetValidPipelineFilesDir(), helloWorldPipelineFileName) + for _, pipelineParams := range getExpectedPipelines() { + logger.Log("Uploading pipeline with name=%s, from file %s", *pipelineParams.Name, pipelineFilePath) + _, err := pipelineUploadClient.UploadFile(pipelineFilePath, pipelineParams) + Expect(err).To(BeNil(), "Failed to upload pipeline with name: %s", pipelineParams.Name) + } +} + +func verifyPipelines() { + namespace := getResourceNamespace() + existingPipelines := testutil.ListPipelines(pipelineClient, &namespace) + expectedPipelines := getExpectedPipelines() + Expect(len(existingPipelines)).To(BeNumerically(">=", len(expectedPipelines))) + existingPipelinesMap := make(map[string]*pipeline_model.V2beta1Pipeline) + for _, pipeline := range existingPipelines { + existingPipelinesMap[pipeline.Name] = pipeline + } + for _, pipelineParams := range expectedPipelines { + existingPipeline := existingPipelinesMap[*pipelineParams.Name] + Expect(existingPipeline).ToNot(BeNil()) + Expect(existingPipeline.Description).To(Equal(*pipelineParams.Description), fmt.Sprintf("Pipeline %s description is not same", *pipelineParams.Name)) + Expect(existingPipeline.DisplayName).To(Equal(*pipelineParams.DisplayName), fmt.Sprintf("Pipeline %s display name is not same", *pipelineParams.Name)) + } +} + +func preparePipelineRun(pipelineToUpload string, pipelineName string, experimentName string, runName string) *run_model.V2beta1Run { + expectedPipelineRun := getExpectedPipelineRun(pipelineToUpload, pipelineName, experimentName, runName) + pipelineRun, pipelineRunError := runClient.Create(&runparams.RunServiceCreateRunParams{Run: expectedPipelineRun}) + Expect(pipelineRunError).To(BeNil(), "Failed to create pipeline run") + return pipelineRun +} + +func verifyPipelineRun(uploadedPipeline string, pipelineName string, experimentName string, runName string) { + runListParams := &runparams.RunServiceListRunsParams{ + SortBy: util.StringPointer("created_at"), + PageSize: util.Int32Pointer(1000), + } + + expectedRun := getExpectedPipelineRun(uploadedPipeline, pipelineName, experimentName, runName) + allRuns, _, _, err := runClient.List(runListParams) + Expect(err).To(BeNil(), "Failed to list runs") + runPassed := false + for _, run := range allRuns { + if run.DisplayName == expectedRun.DisplayName && run.PipelineVersionID == expectedRun.PipelineVersionID { + Expect(run.ExperimentID).To(Equal(expectedRun.ExperimentID), fmt.Sprintf("Experiment id for runid=%s is not same", expectedRun.DisplayName)) + Expect(run.Description).To(Equal(expectedRun.Description), "Run description is not same") + runPassed = true + } + } + Expect(runPassed).To(BeTrue(), "Failed to find the pipeline run") +} + +func prepareScheduledPipelineRun(pipelineToUpload string, pipelineName string, experimentName string, runName string) *recurring_run_model.V2beta1RecurringRun { + expectedPipelineRun := getExpectedRecurringPipelineRun(pipelineToUpload, pipelineName, experimentName, runName) + pipelineRun, pipelineRunError := recurringRunClient.Create(&recurringrunparams.RecurringRunServiceCreateRecurringRunParams{RecurringRun: expectedPipelineRun}) + Expect(pipelineRunError).To(BeNil(), "Failed to create pipeline run") + return pipelineRun +} + +func verifyScheduledPipelineRun(uploadedPipeline string, pipelineName string, experimentName string, runName string) { + runListParams := &recurringrunparams.RecurringRunServiceListRecurringRunsParams{ + SortBy: util.StringPointer("created_at"), + PageSize: util.Int32Pointer(1000), + } + + expectedRun := getExpectedRecurringPipelineRun(uploadedPipeline, pipelineName, experimentName, runName) + allRuns, _, _, err := recurringRunClient.List(runListParams) + Expect(err).To(BeNil(), "Failed to list recurring runs") + runPassed := false + for _, run := range allRuns { + if run.DisplayName == expectedRun.DisplayName && run.PipelineVersionID == expectedRun.PipelineVersionID { + Expect(run.ExperimentID).To(Equal(expectedRun.ExperimentID), fmt.Sprintf("Experiment id for runid=%s is not same", expectedRun.DisplayName)) + Expect(run.Description).To(Equal(expectedRun.Description), "Run description is not same") + Expect(run.Mode.Pointer()).To(Equal(expectedRun.Mode), "Run mode is not same") + Expect(run.Trigger.CronSchedule.Cron).To(Equal(expectedRun.Trigger.CronSchedule.Cron), "Cron schedule is not same") + runPassed = true + } + } + Expect(runPassed).To(BeTrue(), "Failed to find the pipeline run") +} diff --git a/backend/test/v2/initialization/README.md b/backend/test/v2/initialization/README.md deleted file mode 100644 index ee6e5109717..00000000000 --- a/backend/test/v2/initialization/README.md +++ /dev/null @@ -1,11 +0,0 @@ -## Api Server Integration Tests - -### WARNING -**These integration tests will delete all the data in your KFP instance, please only use a test cluster to run these.** - -### How to run - -1. Configure kubectl to connect to your kfp cluster. -2. Run the following for all integration tests: `NAMESPACE= ./run_tests_locally.sh`. -3. Or run the following to select certain tests: `NAMESPACE= ./run_tests_locally.sh -testify.m Job`. - Reference: https://stackoverflow.com/a/43312451 diff --git a/backend/test/v2/initialization/flags.go b/backend/test/v2/initialization/flags.go deleted file mode 100644 index 3325bb6a0b9..00000000000 --- a/backend/test/v2/initialization/flags.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2018-2023 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package initialization - -import ( - "flag" - "time" -) - -var ( - namespace = flag.String("namespace", "kubeflow", "The namespace ml pipeline deployed to") - initializeTimeout = flag.Duration("initializeTimeout", 2*time.Minute, "Duration to wait for test initialization") - runIntegrationTests = flag.Bool("runIntegrationTests", false, "Whether to also run integration tests that call the service") -) diff --git a/backend/test/v2/initialization/initialization_test.go b/backend/test/v2/initialization/initialization_test.go deleted file mode 100644 index 829e0771f4a..00000000000 --- a/backend/test/v2/initialization/initialization_test.go +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2018-2023 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package initialization - -import ( - "testing" - - "github.com/golang/glog" - params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" - api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" - test "github.com/kubeflow/pipelines/backend/test/v2" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/suite" -) - -type InitializationTest struct { - suite.Suite - namespace string - experimentClient *api_server.ExperimentClient -} - -// Check the namespace have ML job installed and ready -func (s *InitializationTest) SetupTest() { - if !*runIntegrationTests { - s.T().SkipNow() - return - } - - err := test.WaitForReady(*initializeTimeout) - if err != nil { - glog.Exitf("Failed to initialize test. Error: %v", err) - } - s.namespace = *namespace - clientConfig := test.GetClientConfig(*namespace) - s.experimentClient, err = api_server.NewExperimentClient(clientConfig, false) - if err != nil { - glog.Exitf("Failed to get experiment client. Error: %v", err) - } -} - -func (s *InitializationTest) TestInitialization() { - t := s.T() - - /* ---------- Verify that only the default experiment exists ---------- */ - experiments, totalSize, _, err := s.experimentClient.List(¶ms.ExperimentServiceListExperimentsParams{}) - assert.Nil(t, err) - assert.Equal(t, 1, totalSize) - assert.True(t, len(experiments) == 1) - assert.Equal(t, "Default", experiments[0].DisplayName) - - /* ---------- Clean up ---------- */ - test.DeleteAllExperiments(s.experimentClient, "", t) -} - -func TestInitialization(t *testing.T) { - suite.Run(t, new(InitializationTest)) -} diff --git a/backend/test/v2/integration/cache_test.go b/backend/test/v2/integration/cache_test.go index 47a643075ff..82d7ceadcb2 100644 --- a/backend/test/v2/integration/cache_test.go +++ b/backend/test/v2/integration/cache_test.go @@ -6,7 +6,6 @@ import ( "testing" "time" - "github.com/golang/glog" uploadParams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_model" recurringRunParams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service" @@ -17,10 +16,17 @@ import ( "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/src/v2/metadata" "github.com/kubeflow/pipelines/backend/src/v2/metadata/testutils" + "github.com/kubeflow/pipelines/backend/test/config" "github.com/kubeflow/pipelines/backend/test/v2" pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" + + "github.com/golang/glog" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" + v1 "k8s.io/api/core/v1" + k8sres "k8s.io/apimachinery/pkg/api/resource" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/client-go/kubernetes" ) type CacheTestSuite struct { @@ -28,7 +34,7 @@ type CacheTestSuite struct { namespace string resourceNamespace string pipelineClient *apiServer.PipelineClient - pipelineUploadClient *apiServer.PipelineUploadClient + pipelineUploadClient apiServer.PipelineUploadInterface runClient *apiServer.RunClient recurringRunClient *apiServer.RecurringRunClient mlmdClient pb.MetadataStoreServiceClient @@ -57,9 +63,8 @@ func (s *CacheTestSuite) SetupTest() { glog.Exitf("Failed to initialize test. Error: %s", err.Error()) } } - s.namespace = *namespace + s.namespace = *config.Namespace - var newPipelineUploadClient func() (*apiServer.PipelineUploadClient, error) var newPipelineClient func() (*apiServer.PipelineClient, error) var newRunClient func() (*apiServer.RunClient, error) var newRecurringRunClient func() (*apiServer.RecurringRunClient, error) @@ -67,37 +72,37 @@ func (s *CacheTestSuite) SetupTest() { if *isKubeflowMode { s.resourceNamespace = *resourceNamespace - newPipelineUploadClient = func() (*apiServer.PipelineUploadClient, error) { - return apiServer.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) - } newPipelineClient = func() (*apiServer.PipelineClient, error) { - return apiServer.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return apiServer.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } newRunClient = func() (*apiServer.RunClient, error) { - return apiServer.NewKubeflowInClusterRunClient(s.namespace, *isDebugMode) + return apiServer.NewKubeflowInClusterRunClient(s.namespace, *config.DebugMode) } newRecurringRunClient = func() (*apiServer.RecurringRunClient, error) { - return apiServer.NewKubeflowInClusterRecurringRunClient(s.namespace, *isDebugMode) + return apiServer.NewKubeflowInClusterRecurringRunClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) - newPipelineUploadClient = func() (*apiServer.PipelineUploadClient, error) { - return apiServer.NewPipelineUploadClient(clientConfig, *isDebugMode) - } newPipelineClient = func() (*apiServer.PipelineClient, error) { - return apiServer.NewPipelineClient(clientConfig, *isDebugMode) + return apiServer.NewPipelineClient(clientConfig, *config.DebugMode) } newRunClient = func() (*apiServer.RunClient, error) { - return apiServer.NewRunClient(clientConfig, *isDebugMode) + return apiServer.NewRunClient(clientConfig, *config.DebugMode) } newRecurringRunClient = func() (*apiServer.RecurringRunClient, error) { - return apiServer.NewRecurringRunClient(clientConfig, *isDebugMode) + return apiServer.NewRecurringRunClient(clientConfig, *config.DebugMode) } } var err error - s.pipelineUploadClient, err = newPipelineUploadClient() + s.pipelineUploadClient, err = test.GetPipelineUploadClient( + *uploadPipelinesWithKubernetes, + *isKubeflowMode, + *config.DebugMode, + s.namespace, + test.GetClientConfig(s.namespace), + ) if err != nil { glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) } @@ -122,7 +127,7 @@ func (s *CacheTestSuite) TestCacheRecurringRun() { pipelineVersion := s.preparePipeline() - createRecurringRunRequest := &recurringRunParams.RecurringRunServiceCreateRecurringRunParams{Body: &recurring_run_model.V2beta1RecurringRun{ + createRecurringRunRequest := &recurringRunParams.RecurringRunServiceCreateRecurringRunParams{RecurringRun: &recurring_run_model.V2beta1RecurringRun{ DisplayName: "hello world", Description: "this is hello world", PipelineVersionReference: &recurring_run_model.V2beta1PipelineVersionReference{ @@ -130,7 +135,7 @@ func (s *CacheTestSuite) TestCacheRecurringRun() { PipelineVersionID: pipelineVersion.PipelineVersionID, }, MaxConcurrency: 10, - Mode: recurring_run_model.RecurringRunModeENABLE, + Mode: recurring_run_model.RecurringRunModeENABLE.Pointer(), Trigger: &recurring_run_model.V2beta1Trigger{ PeriodicSchedule: &recurring_run_model.V2beta1PeriodicSchedule{ IntervalSecond: 60, @@ -155,7 +160,7 @@ func (s *CacheTestSuite) TestCacheRecurringRun() { if len(allRuns) >= 2 { for _, run := range allRuns { - if run.State != run_model.V2beta1RuntimeStateSUCCEEDED { + if *run.State != *run_model.V2beta1RuntimeStateSUCCEEDED.Pointer() { return false } } @@ -165,38 +170,11 @@ func (s *CacheTestSuite) TestCacheRecurringRun() { return false }, 4*time.Minute, 5*time.Second) - contextsFilterQuery := fmt.Sprintf("name = '%s'", allRuns[1].RunID) - - contexts, err := s.mlmdClient.GetContexts(context.Background(), &pb.GetContextsRequest{ - Options: &pb.ListOperationOptions{ - FilterQuery: &contextsFilterQuery, - }, - }) - - require.NoError(t, err) - require.NotNil(t, contexts) - - executionsByContext, err := s.mlmdClient.GetExecutionsByContext(context.Background(), &pb.GetExecutionsByContextRequest{ - ContextId: contexts.Contexts[0].Id, - }) - - require.NoError(t, err) - require.NotNil(t, executionsByContext) - require.NotEmpty(t, executionsByContext.Executions) - - var containerExecution *pb.Execution - for _, execution := range executionsByContext.Executions { - if metadata.ExecutionType(execution.GetType()) == metadata.ContainerExecutionTypeName { - containerExecution = execution - break - } - } - require.NotNil(t, containerExecution) - + state := s.getContainerExecutionState(t, allRuns[1].RunID) if *cacheEnabled { - require.Equal(t, pb.Execution_CACHED.Enum().String(), containerExecution.LastKnownState.String()) + require.Equal(t, pb.Execution_CACHED, state) } else { - require.Equal(t, pb.Execution_COMPLETE.Enum().String(), containerExecution.LastKnownState.String()) + require.Equal(t, pb.Execution_COMPLETE, state) } } @@ -213,43 +191,110 @@ func (s *CacheTestSuite) TestCacheSingleRun() { require.NoError(t, err) require.NotNil(t, pipelineRunDetail) - contextsFilterQuery := fmt.Sprintf("name = '%s'", pipelineRunDetail.RunID) + state := s.getContainerExecutionState(t, pipelineRunDetail.RunID) + if *cacheEnabled { + require.Equal(t, pb.Execution_CACHED, state) + } else { + require.Equal(t, pb.Execution_COMPLETE, state) + } +} - contexts, err := s.mlmdClient.GetContexts(context.Background(), &pb.GetContextsRequest{ - Options: &pb.ListOperationOptions{ - FilterQuery: &contextsFilterQuery, +// Test that a pipeline using a PVC with the same name across runs hits the cache on the second run. +func (s *CacheTestSuite) TestCacheSingleRunWithPVC_SameName_Caches() { + t := s.T() + + if !*cacheEnabled { + t.Skip("Skipping PVC cache test: cache is disabled") + + return + } + + pvcPipelinePath := "../resources/pvc-mount.yaml" + + // Create a small PVC up-front so the pipeline can mount it by name. + restCfg, err := util.GetKubernetesConfig() + require.NoError(t, err) + clientset, err := kubernetes.NewForConfig(restCfg) + require.NoError(t, err) + + pvcName := fmt.Sprintf("test-cache-pvc-%d", time.Now().UnixNano()) + storageClass := "standard" + qty := k8sres.MustParse("5Mi") + pvc := &v1.PersistentVolumeClaim{ + ObjectMeta: metav1.ObjectMeta{ + Name: pvcName, }, - }) + Spec: v1.PersistentVolumeClaimSpec{ + AccessModes: []v1.PersistentVolumeAccessMode{v1.ReadWriteOnce}, + Resources: v1.VolumeResourceRequirements{ + Requests: v1.ResourceList{v1.ResourceStorage: qty}, + }, + StorageClassName: &storageClass, + }, + } + _, err = clientset.CoreV1().PersistentVolumeClaims(s.namespace).Create(context.Background(), pvc, metav1.CreateOptions{}) + require.NoError(t, err) + defer func() { + _ = clientset.CoreV1().PersistentVolumeClaims(s.namespace).Delete(context.Background(), pvcName, metav1.DeleteOptions{}) + }() + // Upload pipeline and create a version + pipeline, err := s.pipelineUploadClient.UploadFile(pvcPipelinePath, uploadParams.NewUploadPipelineParams()) require.NoError(t, err) - require.NotNil(t, contexts) + require.NotNil(t, pipeline) - executionsByContext, err := s.mlmdClient.GetExecutionsByContext(context.Background(), &pb.GetExecutionsByContextRequest{ - ContextId: contexts.Contexts[0].Id, - }) + time.Sleep(1 * time.Second) + pipelineVersion, err := s.pipelineUploadClient.UploadPipelineVersion( + pvcPipelinePath, + &uploadParams.UploadPipelineVersionParams{ + Name: util.StringPointer("pvc-cache-version"), + Pipelineid: util.StringPointer(pipeline.PipelineID), + }, + ) + require.NoError(t, err) + require.NotNil(t, pipelineVersion) + // First run with fixed PVC name + run1, err := s.createRunWithParams(pipelineVersion, map[string]interface{}{"pvc_name": pvcName}) require.NoError(t, err) - require.NotNil(t, executionsByContext) - require.NotEmpty(t, executionsByContext.Executions) + require.NotNil(t, run1) - var containerExecution *pb.Execution - for _, execution := range executionsByContext.Executions { - if metadata.ExecutionType(execution.GetType()) == metadata.ContainerExecutionTypeName { - containerExecution = execution - break - } + // Second run with the same PVC name should hit cache when enabled + run2, err := s.createRunWithParams(pipelineVersion, map[string]interface{}{"pvc_name": pvcName}) + require.NoError(t, err) + require.NotNil(t, run2) + + state := s.getContainerExecutionState(t, run2.RunID) + require.Equal(t, pb.Execution_CACHED, state) + + // Third run with a different PVC name should not hit cache. + otherPVCName := fmt.Sprintf("%s-alt", pvcName) + // Create the alternate PVC so the pipeline can mount it + pvcAlt := &v1.PersistentVolumeClaim{ + ObjectMeta: metav1.ObjectMeta{Name: otherPVCName}, + Spec: v1.PersistentVolumeClaimSpec{ + AccessModes: []v1.PersistentVolumeAccessMode{v1.ReadWriteOnce}, + Resources: v1.VolumeResourceRequirements{Requests: v1.ResourceList{v1.ResourceStorage: qty}}, + StorageClassName: &storageClass, + }, } - require.NotNil(t, containerExecution) + _, err = clientset.CoreV1().PersistentVolumeClaims(s.namespace).Create(context.Background(), pvcAlt, metav1.CreateOptions{}) + require.NoError(t, err) + defer func() { + _ = clientset.CoreV1().PersistentVolumeClaims(s.namespace).Delete(context.Background(), otherPVCName, metav1.DeleteOptions{}) + }() - if *cacheEnabled { - require.Equal(t, pb.Execution_CACHED.Enum().String(), containerExecution.LastKnownState.String()) - } else { - require.Equal(t, pb.Execution_COMPLETE.Enum().String(), containerExecution.LastKnownState.String()) - } + run3, err := s.createRunWithParams(pipelineVersion, map[string]interface{}{"pvc_name": otherPVCName}) + require.NoError(t, err) + require.NotNil(t, run3) + + state = s.getContainerExecutionState(t, run3.RunID) + // With a different PVC, do not expect cache hit + require.Equal(t, pb.Execution_COMPLETE, state) } func (s *CacheTestSuite) createRun(pipelineVersion *pipeline_upload_model.V2beta1PipelineVersion) (*run_model.V2beta1Run, error) { - createRunRequest := &runParams.RunServiceCreateRunParams{Body: &run_model.V2beta1Run{ + createRunRequest := &runParams.RunServiceCreateRunParams{Run: &run_model.V2beta1Run{ DisplayName: "hello-world", Description: "this is hello-world", PipelineVersionReference: &run_model.V2beta1PipelineVersionReference{ @@ -269,9 +314,40 @@ func (s *CacheTestSuite) createRun(pipelineVersion *pipeline_upload_model.V2beta require.Eventually(s.T(), func() bool { pipelineRunDetail, err = s.runClient.Get(&runParams.RunServiceGetRunParams{RunID: pipelineRunDetail.RunID}) - s.T().Logf("Pipeline %v state: %v", pipelineRunDetail.RunID, pipelineRunDetail.State) + if err == nil { + s.T().Logf("Pipeline %v state: %v", pipelineRunDetail.RunID, *pipelineRunDetail.State) + } else { + s.T().Logf("Pipeline %v state: %v", pipelineRunDetail.RunID, err.Error()) + } + + return err == nil && *pipelineRunDetail.State == expectedState + }, 2*time.Minute, 10*time.Second) + + return pipelineRunDetail, err +} + +func (s *CacheTestSuite) createRunWithParams(pipelineVersion *pipeline_upload_model.V2beta1PipelineVersion, params map[string]interface{}) (*run_model.V2beta1Run, error) { + createRunRequest := &runParams.RunServiceCreateRunParams{Run: &run_model.V2beta1Run{ + DisplayName: "pvc-cache", + Description: "pvc cache test", + PipelineVersionReference: &run_model.V2beta1PipelineVersionReference{ + PipelineID: pipelineVersion.PipelineID, + PipelineVersionID: pipelineVersion.PipelineVersionID, + }, + RuntimeConfig: &run_model.V2beta1RuntimeConfig{Parameters: params}, + }} + pipelineRunDetail, err := s.runClient.Create(createRunRequest) + require.NoError(s.T(), err) - return err == nil && pipelineRunDetail.State == expectedState + expectedState := run_model.V2beta1RuntimeStateSUCCEEDED + require.Eventually(s.T(), func() bool { + pipelineRunDetail, err = s.runClient.Get(&runParams.RunServiceGetRunParams{RunID: pipelineRunDetail.RunID}) + if err == nil { + s.T().Logf("PVC pipeline %v state: %v", pipelineRunDetail.RunID, *pipelineRunDetail.State) + } else { + s.T().Logf("PVC pipeline %v state: %v", pipelineRunDetail.RunID, err.Error()) + } + return err == nil && *pipelineRunDetail.State == expectedState }, 2*time.Minute, 10*time.Second) return pipelineRunDetail, err @@ -305,3 +381,31 @@ func (s *CacheTestSuite) cleanUp() { test.DeleteAllRecurringRuns(s.recurringRunClient, s.resourceNamespace, s.T()) test.DeleteAllPipelines(s.pipelineClient, s.T()) } + +// getContainerExecutionState fetches the container execution state for a given run ID. +func (s *CacheTestSuite) getContainerExecutionState(t *testing.T, runID string) pb.Execution_State { + contextsFilterQuery := fmt.Sprintf("name = '%s'", runID) + + contexts, err := s.mlmdClient.GetContexts(context.Background(), &pb.GetContextsRequest{ + Options: &pb.ListOperationOptions{ + FilterQuery: &contextsFilterQuery, + }, + }) + require.NoError(t, err) + require.NotNil(t, contexts) + + executionsByContext, err := s.mlmdClient.GetExecutionsByContext(context.Background(), &pb.GetExecutionsByContextRequest{ + ContextId: contexts.Contexts[0].Id, + }) + require.NoError(t, err) + require.NotNil(t, executionsByContext) + require.NotEmpty(t, executionsByContext.Executions) + + for _, execution := range executionsByContext.Executions { + if metadata.ExecutionType(execution.GetType()) == metadata.ContainerExecutionTypeName { + return execution.GetLastKnownState() + } + } + t.Fatalf("no container execution found for run %s", runID) + return pb.Execution_UNKNOWN +} diff --git a/backend/test/v2/integration/experiment_api_test.go b/backend/test/v2/integration/experiment_api_test.go index e22af6083cd..4b8c480cf7c 100644 --- a/backend/test/v2/integration/experiment_api_test.go +++ b/backend/test/v2/integration/experiment_api_test.go @@ -18,7 +18,6 @@ import ( "testing" "time" - "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" upload_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" @@ -28,7 +27,10 @@ import ( "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/test/config" test "github.com/kubeflow/pipelines/backend/test/v2" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) @@ -39,7 +41,7 @@ type ExperimentApiTest struct { resourceNamespace string experimentClient *api_server.ExperimentClient pipelineClient *api_server.PipelineClient - pipelineUploadClient *api_server.PipelineUploadClient + pipelineUploadClient api_server.PipelineUploadInterface runClient *api_server.RunClient recurringRunClient *api_server.RecurringRunClient } @@ -58,10 +60,9 @@ func (s *ExperimentApiTest) SetupTest() { } } - s.namespace = *namespace + s.namespace = *config.Namespace var newExperimentClient func() (*api_server.ExperimentClient, error) - var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) var newPipelineClient func() (*api_server.PipelineClient, error) var newRunClient func() (*api_server.RunClient, error) var newRecurringRunClient func() (*api_server.RecurringRunClient, error) @@ -70,37 +71,31 @@ func (s *ExperimentApiTest) SetupTest() { s.resourceNamespace = *resourceNamespace newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *isDebugMode) - } - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewKubeflowInClusterRunClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterRunClient(s.namespace, *config.DebugMode) } newRecurringRunClient = func() (*api_server.RecurringRunClient, error) { - return api_server.NewKubeflowInClusterRecurringRunClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterRecurringRunClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewExperimentClient(clientConfig, *isDebugMode) - } - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) + return api_server.NewExperimentClient(clientConfig, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewRunClient(clientConfig, *isDebugMode) + return api_server.NewRunClient(clientConfig, *config.DebugMode) } newRecurringRunClient = func() (*api_server.RecurringRunClient, error) { - return api_server.NewRecurringRunClient(clientConfig, *isDebugMode) + return api_server.NewRecurringRunClient(clientConfig, *config.DebugMode) } } @@ -109,7 +104,13 @@ func (s *ExperimentApiTest) SetupTest() { if err != nil { glog.Exitf("Failed to get experiment client. Error: %v", err) } - s.pipelineUploadClient, err = newPipelineUploadClient() + s.pipelineUploadClient, err = test.GetPipelineUploadClient( + *uploadPipelinesWithKubernetes, + *isKubeflowMode, + *config.DebugMode, + s.namespace, + test.GetClientConfig(s.namespace), + ) if err != nil { glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) } @@ -143,18 +144,18 @@ func (s *ExperimentApiTest) TestExperimentAPI() { expectedTrainingExperiment := test.MakeExperiment("training", "my first experiment", s.resourceNamespace) trainingExperiment, err := s.experimentClient.Create(¶ms.ExperimentServiceCreateExperimentParams{ - Body: experiment, + Experiment: experiment, }) assert.Nil(t, err) expectedTrainingExperiment.ExperimentID = trainingExperiment.ExperimentID expectedTrainingExperiment.CreatedAt = trainingExperiment.CreatedAt - expectedTrainingExperiment.StorageState = "STORAGESTATE_AVAILABLE" + expectedTrainingExperiment.StorageState = (*experiment_model.V2beta1ExperimentStorageState)(util.StringPointer("STORAGESTATE_AVAILABLE")) expectedTrainingExperiment.Namespace = trainingExperiment.Namespace assert.Equal(t, expectedTrainingExperiment, trainingExperiment) /* ---------- Create an experiment with same name. Should fail due to name uniqueness ---------- */ - _, err = s.experimentClient.Create(¶ms.ExperimentServiceCreateExperimentParams{Body: experiment}) + _, err = s.experimentClient.Create(¶ms.ExperimentServiceCreateExperimentParams{Experiment: experiment}) assert.NotNil(t, err) assert.Contains(t, err.Error(), "Please specify a new name") @@ -163,12 +164,12 @@ func (s *ExperimentApiTest) TestExperimentAPI() { time.Sleep(1 * time.Second) experiment = test.MakeExperiment("prediction", "my second experiment", s.resourceNamespace) _, err = s.experimentClient.Create(¶ms.ExperimentServiceCreateExperimentParams{ - Body: experiment, + Experiment: experiment, }) time.Sleep(1 * time.Second) experiment = test.MakeExperiment("moonshot", "my second experiment", s.resourceNamespace) _, err = s.experimentClient.Create(¶ms.ExperimentServiceCreateExperimentParams{ - Body: experiment, + Experiment: experiment, }) assert.Nil(t, err) @@ -299,7 +300,7 @@ func (s *ExperimentApiTest) TestExperimentAPI() { Pipelineid: util.StringPointer(pipeline.PipelineID), }) assert.Nil(t, err) - createRunRequest := &run_params.RunServiceCreateRunParams{Body: &run_model.V2beta1Run{ + createRunRequest := &run_params.RunServiceCreateRunParams{Run: &run_model.V2beta1Run{ DisplayName: "hello world", Description: "this is hello world", ExperimentID: experiment.ExperimentID, @@ -313,7 +314,7 @@ func (s *ExperimentApiTest) TestExperimentAPI() { run2, err := s.runClient.Create(createRunRequest) assert.Nil(t, err) /* ---------- Create a new hello world job by specifying pipeline ID ---------- */ - createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: &recurring_run_model.V2beta1RecurringRun{ + createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{RecurringRun: &recurring_run_model.V2beta1RecurringRun{ DisplayName: "hello world", Description: "this is hello world", ExperimentID: experiment.ExperimentID, @@ -322,7 +323,7 @@ func (s *ExperimentApiTest) TestExperimentAPI() { PipelineVersionID: pipelineVersion.PipelineVersionID, }, MaxConcurrency: 10, - Status: recurring_run_model.V2beta1RecurringRunStatusENABLED, + Status: recurring_run_model.V2beta1RecurringRunStatusENABLED.Pointer(), }} recurringRun1, err := s.recurringRunClient.Create(createRecurringRunRequest) assert.Nil(t, err) diff --git a/backend/test/v2/integration/flags.go b/backend/test/v2/integration/flags.go index 91109c63814..b51f81c2458 100644 --- a/backend/test/v2/integration/flags.go +++ b/backend/test/v2/integration/flags.go @@ -17,15 +17,21 @@ package integration import ( "flag" "time" + + "go.uber.org/zap/zapcore" + ctrllog "sigs.k8s.io/controller-runtime/pkg/log" + "sigs.k8s.io/controller-runtime/pkg/log/zap" + + "github.com/kubeflow/pipelines/backend/test/config" ) var ( - namespace = flag.String("namespace", "kubeflow", "The namespace ml pipeline deployed to") - initializeTimeout = flag.Duration("initializeTimeout", 2*time.Minute, "Duration to wait for test initialization") - runIntegrationTests = flag.Bool("runIntegrationTests", false, "Whether to also run integration tests that call the service") - runUpgradeTests = flag.Bool("runUpgradeTests", false, "Whether to run upgrade tests") - useProxy = flag.Bool("useProxy", false, "Whether to run the proxy tests") - cacheEnabled = flag.Bool("cacheEnabled", true, "Whether cache is enabled tests") + initializeTimeout = flag.Duration("initializeTimeout", 2*time.Minute, "Duration to wait for test initialization") + runIntegrationTests = flag.Bool("runIntegrationTests", false, "Whether to also run integration tests that call the service") + runUpgradeTests = flag.Bool("runUpgradeTests", false, "Whether to run upgrade tests") + useProxy = config.RunProxyTests + cacheEnabled = flag.Bool("cacheEnabled", true, "Whether cache is enabled tests") + uploadPipelinesWithKubernetes = config.UploadPipelinesWithKubernetes ) /** @@ -35,9 +41,11 @@ var ( */ var isDevMode = flag.Bool("isDevMode", false, "Dev mode helps local development of integration tests") -var isDebugMode = flag.Bool("isDebugMode", false, "Whether to enable debug mode. Debug mode will log more diagnostics messages.") - var ( isKubeflowMode = flag.Bool("isKubeflowMode", false, "Runs tests in full Kubeflow mode") resourceNamespace = flag.String("resourceNamespace", "", "The namespace that will store the test resources in Kubeflow mode") ) + +func init() { + ctrllog.SetLogger(zap.New(zap.UseFlagOptions(&zap.Options{Level: zapcore.InfoLevel}))) +} diff --git a/backend/test/v2/integration/healthz_api_test.go b/backend/test/v2/integration/healthz_api_test.go index 13db478812e..836b5018c22 100644 --- a/backend/test/v2/integration/healthz_api_test.go +++ b/backend/test/v2/integration/healthz_api_test.go @@ -18,9 +18,11 @@ import ( "os" "testing" - "github.com/golang/glog" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" + "github.com/kubeflow/pipelines/backend/test/config" test "github.com/kubeflow/pipelines/backend/test/v2" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) @@ -44,8 +46,8 @@ func (s *HealthzApiTest) SetupTest() { glog.Exitf("Failed to initialize test. Error: %v", err) } } - s.namespace = *namespace - clientConfig := test.GetClientConfig(*namespace) + s.namespace = *config.Namespace + clientConfig := test.GetClientConfig(*config.Namespace) var err error s.healthzClient, err = api_server.NewHealthzClient(clientConfig, false) if err != nil { diff --git a/backend/test/v2/integration/pipeline_api_test.go b/backend/test/v2/integration/pipeline_api_test.go index b680a359a28..b783b80fb08 100644 --- a/backend/test/v2/integration/pipeline_api_test.go +++ b/backend/test/v2/integration/pipeline_api_test.go @@ -20,13 +20,15 @@ import ( "testing" "time" - "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service" model "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" upload_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/test/config" test "github.com/kubeflow/pipelines/backend/test/v2" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" @@ -43,7 +45,7 @@ type PipelineApiTest struct { namespace string resourceNamespace string pipelineClient *api_server.PipelineClient - pipelineUploadClient *api_server.PipelineUploadClient + pipelineUploadClient api_server.PipelineUploadInterface } // Check the namespace have ML job installed and ready @@ -59,33 +61,32 @@ func (s *PipelineApiTest) SetupTest() { glog.Exitf("Failed to initialize test. Error: %s", err.Error()) } } - s.namespace = *namespace + s.namespace = *config.Namespace - var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) var newPipelineClient func() (*api_server.PipelineClient, error) if *isKubeflowMode { s.resourceNamespace = *resourceNamespace - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) - } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) - } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } } var err error - s.pipelineUploadClient, err = newPipelineUploadClient() + s.pipelineUploadClient, err = test.GetPipelineUploadClient( + *uploadPipelinesWithKubernetes, + *isKubeflowMode, + *config.DebugMode, + s.namespace, + test.GetClientConfig(s.namespace), + ) if err != nil { glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) } @@ -103,20 +104,38 @@ func (s *PipelineApiTest) TestPipelineAPI() { test.DeleteAllPipelines(s.pipelineClient, t) /* ------ Upload v2 pipeline spec YAML --------*/ - helloPipeline, err := s.pipelineUploadClient.UploadFile("../resources/hello-world.yaml", upload_params.NewUploadPipelineParams()) + helloPipeline, err := s.pipelineUploadClient.UploadFile( + "../resources/hello-world.yaml", + &upload_params.UploadPipelineParams{ + Name: util.StringPointer("hello-world"), + DisplayName: util.StringPointer("hello-world.yaml"), + }, + ) require.Nil(t, err) assert.Equal(t, "hello-world.yaml", helloPipeline.DisplayName) // Verify that the pipeline name defaults to the display name for backwards compatibility. - assert.Equal(t, "hello-world.yaml", helloPipeline.Name) + assert.Equal(t, "hello-world", helloPipeline.Name) /* ---------- Upload pipelines YAML ---------- */ time.Sleep(1 * time.Second) - argumentYAMLPipeline, err := s.pipelineUploadClient.UploadFile("../resources/arguments-parameters.yaml", upload_params.NewUploadPipelineParams()) + argumentYAMLPipeline, err := s.pipelineUploadClient.UploadFile( + "../resources/arguments-parameters.yaml", + &upload_params.UploadPipelineParams{ + Name: util.StringPointer("arguments-parameters"), + DisplayName: util.StringPointer("arguments-parameters.yaml"), + }, + ) require.Nil(t, err) assert.Equal(t, "arguments-parameters.yaml", argumentYAMLPipeline.DisplayName) /* ---------- Upload the same pipeline again. Should fail due to name uniqueness ---------- */ - _, err = s.pipelineUploadClient.UploadFile("../resources/arguments-parameters.yaml", upload_params.NewUploadPipelineParams()) + _, err = s.pipelineUploadClient.UploadFile( + "../resources/arguments-parameters.yaml", + &upload_params.UploadPipelineParams{ + Name: util.StringPointer("arguments-parameters"), + DisplayName: util.StringPointer("arguments-parameters.yaml"), + }, + ) require.NotNil(t, err) assert.Contains(t, err.Error(), "Failed to upload pipeline") @@ -160,19 +179,19 @@ func (s *PipelineApiTest) TestPipelineAPI() { /* ---------- Import pipeline tarball by URL ---------- */ pipelineURL := "https://github.com/opendatahub-io/data-science-pipelines/raw/refs/heads/master/backend/test/v2/resources/arguments.pipeline.zip" - if pullNumber := os.Getenv("PULL_NUMBER"); pullNumber != "" { + if pullNumber := os.Getenv("PullNumber"); pullNumber != "" { pipelineURL = fmt.Sprintf("https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/pull/%s/head/backend/test/v2/resources/arguments.pipeline.zip", pullNumber) } time.Sleep(1 * time.Second) argumentUrlPipeline, err := s.pipelineClient.Create(¶ms.PipelineServiceCreatePipelineParams{ - Body: &model.V2beta1Pipeline{DisplayName: "arguments.pipeline.zip"}, + Pipeline: &model.V2beta1Pipeline{DisplayName: "arguments.pipeline.zip", Name: "arguments-pipeline-zip"}, }) require.Nil(t, err) argumentUrlPipelineVersion, err := s.pipelineClient.CreatePipelineVersion( ¶ms.PipelineServiceCreatePipelineVersionParams{ PipelineID: argumentUrlPipeline.PipelineID, - Body: &model.V2beta1PipelineVersion{ + PipelineVersion: &model.V2beta1PipelineVersion{ DisplayName: "argumenturl-v1", Description: "1st version of argument url pipeline", PipelineID: sequentialPipeline.PipelineID, @@ -185,7 +204,7 @@ func (s *PipelineApiTest) TestPipelineAPI() { assert.Equal(t, "argumenturl-v1", argumentUrlPipelineVersion.DisplayName) assert.Equal(t, "1st version of argument url pipeline", argumentUrlPipelineVersion.Description) assert.Equal(t, argumentUrlPipeline.PipelineID, argumentUrlPipelineVersion.PipelineID) - assert.Equal(t, pipelineURL, argumentUrlPipelineVersion.PackageURL.PipelineURL) + assert.Equal(t, pipelineURL, string(argumentUrlPipelineVersion.PackageURL.PipelineURL)) /* ---------- Verify list pipeline works ---------- */ pipelines, totalSize, _, err := s.pipelineClient.List(¶ms.PipelineServiceListPipelinesParams{}) diff --git a/backend/test/v2/integration/pipeline_version_api_test.go b/backend/test/v2/integration/pipeline_version_api_test.go index b493b00fc45..a9a128767e2 100644 --- a/backend/test/v2/integration/pipeline_version_api_test.go +++ b/backend/test/v2/integration/pipeline_version_api_test.go @@ -22,17 +22,20 @@ import ( "testing" "time" - "github.com/golang/glog" + "sigs.k8s.io/yaml" + params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" upload_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/test/config" test "github.com/kubeflow/pipelines/backend/test/v2" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "sigs.k8s.io/yaml" ) // This test suit tests various methods to import pipeline to pipeline system, including @@ -44,7 +47,7 @@ type PipelineVersionApiTest struct { suite.Suite namespace string pipelineClient *api_server.PipelineClient - pipelineUploadClient *api_server.PipelineUploadClient + pipelineUploadClient api_server.PipelineUploadInterface } // Check the namespace have ML job installed and ready @@ -61,31 +64,30 @@ func (s *PipelineVersionApiTest) SetupTest() { } } - var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) var newPipelineClient func() (*api_server.PipelineClient, error) - if *isKubeflowMode { - s.namespace = *namespace + s.namespace = *config.Namespace - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) - } + if *isKubeflowMode { newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) - } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } } var err error - s.pipelineUploadClient, err = newPipelineUploadClient() + s.pipelineUploadClient, err = test.GetPipelineUploadClient( + *uploadPipelinesWithKubernetes, + *isKubeflowMode, + *config.DebugMode, + s.namespace, + test.GetClientConfig(s.namespace), + ) if err != nil { glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) } @@ -134,14 +136,18 @@ func (s *PipelineVersionApiTest) TestPipelineSpec() { assert.Contains(t, err.Error(), "Failed to upload pipeline version") /* ---------- Import pipeline version YAML by URL ---------- */ + pipelineURL := "https://raw.githubusercontent.com/kubeflow/pipelines/refs/heads/master/test_data/sdk_compiled_pipelines/valid/sequential_v2.yaml" + if pullNumber := os.Getenv("PullNumber"); pullNumber != "" { + pipelineURL = fmt.Sprintf("https://raw.githubusercontent.com/kubeflow/pipelines/pull/%s/head/test_data/sdk_compiled_pipelines/valid/sequential_v2.yaml", pullNumber) + } time.Sleep(1 * time.Second) sequentialPipelineVersion, err := s.pipelineClient.CreatePipelineVersion(¶ms.PipelineServiceCreatePipelineVersionParams{ PipelineID: pipelineId, - Body: &pipeline_model.V2beta1PipelineVersion{ + PipelineVersion: &pipeline_model.V2beta1PipelineVersion{ Name: "sequential-v2", DisplayName: "sequential", PackageURL: &pipeline_model.V2beta1URL{ - PipelineURL: "https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/refs/heads/master/backend/test/v2/resources/sequential-v2.yaml", + PipelineURL: pipelineURL, }, PipelineID: pipelineId, }, @@ -161,16 +167,16 @@ func (s *PipelineVersionApiTest) TestPipelineSpec() { assert.Equal(t, "zip-arguments-parameters", argumentUploadPipelineVersion.DisplayName) /* ---------- Import pipeline tarball by URL ---------- */ - pipelineURL := "https://github.com/opendatahub-io/data-science-pipelines/raw/refs/heads/master/backend/test/v2/resources/arguments.pipeline.zip" + pipelineURL = "https://github.com/kubeflow/pipelines/raw/refs/heads/master/test_data/sdk_compiled_pipelines/valid/arguments.pipeline.zip" - if pullNumber := os.Getenv("PULL_NUMBER"); pullNumber != "" { - pipelineURL = fmt.Sprintf("https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/pull/%s/head/backend/test/v2/resources/arguments.pipeline.zip", pullNumber) + if pullNumber := os.Getenv("PullNumber"); pullNumber != "" { + pipelineURL = fmt.Sprintf("https://raw.githubusercontent.com/kubeflow/pipelines/pull/%s/head/test_data/sdk_compiled_pipelines/valid/arguments.pipeline.zip", pullNumber) } time.Sleep(1 * time.Second) argumentUrlPipelineVersion, err := s.pipelineClient.CreatePipelineVersion(¶ms.PipelineServiceCreatePipelineVersionParams{ PipelineID: pipelineId, - Body: &pipeline_model.V2beta1PipelineVersion{ + PipelineVersion: &pipeline_model.V2beta1PipelineVersion{ DisplayName: "arguments", PackageURL: &pipeline_model.V2beta1URL{ PipelineURL: pipelineURL, diff --git a/backend/test/v2/integration/proxy_test.go b/backend/test/v2/integration/proxy_test.go deleted file mode 100644 index 33b6457c432..00000000000 --- a/backend/test/v2/integration/proxy_test.go +++ /dev/null @@ -1,184 +0,0 @@ -// Copyright 2025 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package integration - -import ( - "testing" - "time" - - "github.com/golang/glog" - experimentparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" - uploadparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" - runparams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_client/run_service" - "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" - apiserver "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" - "github.com/kubeflow/pipelines/backend/src/common/util" - "github.com/kubeflow/pipelines/backend/test/v2" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" -) - -type ProxyTestSuite struct { - suite.Suite - namespace string - resourceNamespace string - experimentClient *apiserver.ExperimentClient - pipelineClient *apiserver.PipelineClient - pipelineUploadClient *apiserver.PipelineUploadClient - runClient *apiserver.RunClient -} - -func (s *ProxyTestSuite) SetupTest() { - if !*runIntegrationTests { - s.T().SkipNow() - return - } - - if !*isDevMode { - err := test.WaitForReady(*initializeTimeout) - if err != nil { - glog.Exitf("Failed to initialize test. Error: %s", err.Error()) - } - } - s.namespace = *namespace - - var newExperimentClient func() (*apiserver.ExperimentClient, error) - var newPipelineUploadClient func() (*apiserver.PipelineUploadClient, error) - var newPipelineClient func() (*apiserver.PipelineClient, error) - var newRunClient func() (*apiserver.RunClient, error) - - if *isKubeflowMode { - s.resourceNamespace = *resourceNamespace - - newExperimentClient = func() (*apiserver.ExperimentClient, error) { - return apiserver.NewKubeflowInClusterExperimentClient(s.namespace, *isDebugMode) - } - newPipelineUploadClient = func() (*apiserver.PipelineUploadClient, error) { - return apiserver.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) - } - newPipelineClient = func() (*apiserver.PipelineClient, error) { - return apiserver.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) - } - newRunClient = func() (*apiserver.RunClient, error) { - return apiserver.NewKubeflowInClusterRunClient(s.namespace, *isDebugMode) - } - } else { - clientConfig := test.GetClientConfig(*namespace) - - newExperimentClient = func() (*apiserver.ExperimentClient, error) { - return apiserver.NewExperimentClient(clientConfig, *isDebugMode) - } - newPipelineUploadClient = func() (*apiserver.PipelineUploadClient, error) { - return apiserver.NewPipelineUploadClient(clientConfig, *isDebugMode) - } - newPipelineClient = func() (*apiserver.PipelineClient, error) { - return apiserver.NewPipelineClient(clientConfig, *isDebugMode) - } - newRunClient = func() (*apiserver.RunClient, error) { - return apiserver.NewRunClient(clientConfig, *isDebugMode) - } - } - - var err error - s.experimentClient, err = newExperimentClient() - if err != nil { - glog.Exitf("Failed to get experiment client. Error: %v", err) - } - s.pipelineUploadClient, err = newPipelineUploadClient() - if err != nil { - glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) - } - s.pipelineClient, err = newPipelineClient() - if err != nil { - glog.Exitf("Failed to get pipeline client. Error: %s", err.Error()) - } - s.runClient, err = newRunClient() - if err != nil { - glog.Exitf("Failed to get run client. Error: %s", err.Error()) - } - - s.cleanUp() -} - -func (s *ProxyTestSuite) TestEnvVar() { - t := s.T() - - /* ---------- Upload pipelines YAML ---------- */ - envVarPipeline, err := s.pipelineUploadClient.UploadFile("../resources/env-var.yaml", uploadparams.NewUploadPipelineParams()) - require.Nil(t, err) - - /* ---------- Upload a pipeline version YAML under envVarPipeline ---------- */ - time.Sleep(1 * time.Second) - envVarPipelineVersion, err := s.pipelineUploadClient.UploadPipelineVersion( - "../resources/env-var.yaml", &uploadparams.UploadPipelineVersionParams{ - Name: util.StringPointer("env-var-version"), - Pipelineid: util.StringPointer(envVarPipeline.PipelineID), - }) - require.Nil(t, err) - - /* ---------- Create a new env var experiment ---------- */ - experiment := test.MakeExperiment("env var experiment", "", s.resourceNamespace) - envVarExperiment, err := s.experimentClient.Create(&experimentparams.ExperimentServiceCreateExperimentParams{Body: experiment}) - require.Nil(t, err) - - /* ---------- Create a new env var run by specifying pipeline version ID ---------- */ - createRunRequest := &runparams.RunServiceCreateRunParams{Body: &run_model.V2beta1Run{ - DisplayName: "env var", - Description: "this is env var", - ExperimentID: envVarExperiment.ExperimentID, - PipelineVersionReference: &run_model.V2beta1PipelineVersionReference{ - PipelineID: envVarPipelineVersion.PipelineID, - PipelineVersionID: envVarPipelineVersion.PipelineVersionID, - }, - RuntimeConfig: &run_model.V2beta1RuntimeConfig{ - Parameters: map[string]interface{}{ - "env_var": "http_proxy", - }, - }, - }} - envVarRunDetail, err := s.runClient.Create(createRunRequest) - require.Nil(t, err) - - expectedState := run_model.V2beta1RuntimeStateFAILED - if *useProxy { - expectedState = run_model.V2beta1RuntimeStateSUCCEEDED - } - - assert.Eventually(t, func() bool { - envVarRunDetail, err = s.runClient.Get(&runparams.RunServiceGetRunParams{RunID: envVarRunDetail.RunID}) - t.Logf("Pipeline state: %v", envVarRunDetail.State) - return err == nil && envVarRunDetail.State == expectedState - }, 2*time.Minute, 10*time.Second) -} - -func TestProxy(t *testing.T) { - suite.Run(t, new(ProxyTestSuite)) -} - -func (s *ProxyTestSuite) TearDownSuite() { - if *runIntegrationTests { - if !*isDevMode { - s.cleanUp() - } - } -} - -func (s *ProxyTestSuite) cleanUp() { - /* ---------- Clean up ---------- */ - test.DeleteAllRuns(s.runClient, s.resourceNamespace, s.T()) - test.DeleteAllPipelines(s.pipelineClient, s.T()) - test.DeleteAllExperiments(s.experimentClient, s.resourceNamespace, s.T()) -} diff --git a/backend/test/v2/integration/recurring_run_api_test.go b/backend/test/v2/integration/recurring_run_api_test.go index e17e826a134..4b5f58deac5 100644 --- a/backend/test/v2/integration/recurring_run_api_test.go +++ b/backend/test/v2/integration/recurring_run_api_test.go @@ -22,9 +22,9 @@ import ( "testing" "time" - "github.com/eapache/go-resiliency/retrier" - "github.com/go-openapi/strfmt" - "github.com/golang/glog" + "google.golang.org/protobuf/types/known/structpb" + "sigs.k8s.io/yaml" + experiment_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service" upload_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" @@ -35,12 +35,15 @@ import ( "github.com/kubeflow/pipelines/backend/src/apiserver/client" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/test/config" test "github.com/kubeflow/pipelines/backend/test/v2" + + "github.com/eapache/go-resiliency/retrier" + "github.com/go-openapi/strfmt" + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" - "google.golang.org/protobuf/types/known/structpb" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "sigs.k8s.io/yaml" ) const ( @@ -55,7 +58,7 @@ type RecurringRunApiTestSuite struct { resourceNamespace string experimentClient *api_server.ExperimentClient pipelineClient *api_server.PipelineClient - pipelineUploadClient *api_server.PipelineUploadClient + pipelineUploadClient api_server.PipelineUploadInterface runClient *api_server.RunClient recurringRunClient *api_server.RecurringRunClient swfClient client.SwfClientInterface @@ -74,10 +77,9 @@ func (s *RecurringRunApiTestSuite) SetupTest() { glog.Exitf("Failed to initialize test. Error: %s", err.Error()) } } - s.namespace = *namespace + s.namespace = *config.Namespace var newExperimentClient func() (*api_server.ExperimentClient, error) - var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) var newPipelineClient func() (*api_server.PipelineClient, error) var newRunClient func() (*api_server.RunClient, error) var newRecurringRunClient func() (*api_server.RecurringRunClient, error) @@ -86,37 +88,31 @@ func (s *RecurringRunApiTestSuite) SetupTest() { s.resourceNamespace = *resourceNamespace newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *isDebugMode) - } - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewKubeflowInClusterRunClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterRunClient(s.namespace, *config.DebugMode) } newRecurringRunClient = func() (*api_server.RecurringRunClient, error) { - return api_server.NewKubeflowInClusterRecurringRunClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterRecurringRunClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewExperimentClient(clientConfig, *isDebugMode) - } - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) + return api_server.NewExperimentClient(clientConfig, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewRunClient(clientConfig, *isDebugMode) + return api_server.NewRunClient(clientConfig, *config.DebugMode) } newRecurringRunClient = func() (*api_server.RecurringRunClient, error) { - return api_server.NewRecurringRunClient(clientConfig, *isDebugMode) + return api_server.NewRecurringRunClient(clientConfig, *config.DebugMode) } } @@ -125,7 +121,13 @@ func (s *RecurringRunApiTestSuite) SetupTest() { if err != nil { glog.Exitf("Failed to get experiment client. Error: %v", err) } - s.pipelineUploadClient, err = newPipelineUploadClient() + s.pipelineUploadClient, err = test.GetPipelineUploadClient( + *uploadPipelinesWithKubernetes, + *isKubeflowMode, + *config.DebugMode, + s.namespace, + test.GetClientConfig(s.namespace), + ) if err != nil { glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) } @@ -164,11 +166,11 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis() { /* ---------- Create a new hello world experiment ---------- */ experiment := test.MakeExperiment("hello world experiment", "", s.resourceNamespace) - helloWorldExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Body: experiment}) + helloWorldExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Experiment: experiment}) assert.Nil(t, err) /* ---------- Create a new hello world recurringRun by specifying pipeline ID ---------- */ - createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: &recurring_run_model.V2beta1RecurringRun{ + createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{RecurringRun: &recurring_run_model.V2beta1RecurringRun{ DisplayName: "hello world", Description: "this is hello world", ExperimentID: helloWorldExperiment.ExperimentID, @@ -177,7 +179,7 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis() { PipelineVersionID: helloWorldPipelineVersion.PipelineVersionID, }, MaxConcurrency: 10, - Mode: recurring_run_model.RecurringRunModeENABLE, + Mode: recurring_run_model.RecurringRunModeENABLE.Pointer(), }} helloWorldRecurringRun, err := s.recurringRunClient.Create(createRecurringRunRequest) assert.Nil(t, err) @@ -190,7 +192,7 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis() { /* ---------- Create a new argument parameter experiment ---------- */ experiment = test.MakeExperiment("argument parameter experiment", "", s.resourceNamespace) - argParamsExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Body: experiment}) + argParamsExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Experiment: experiment}) assert.Nil(t, err) /* ---------- Create a new argument parameter recurringRun by uploading workflow manifest ---------- */ @@ -203,7 +205,7 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis() { err = yaml.Unmarshal(argParamsBytes, pipeline_spec) assert.Nil(t, err) - createRecurringRunRequest = &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: &recurring_run_model.V2beta1RecurringRun{ + createRecurringRunRequest = &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{RecurringRun: &recurring_run_model.V2beta1RecurringRun{ DisplayName: "argument parameter", Description: "this is argument parameter", ExperimentID: argParamsExperiment.ExperimentID, @@ -215,7 +217,7 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis() { }, }, MaxConcurrency: 10, - Mode: recurring_run_model.RecurringRunModeENABLE, + Mode: recurring_run_model.RecurringRunModeENABLE.Pointer(), }} argParamsRecurringRun, err := s.recurringRunClient.Create(createRecurringRunRequest) assert.Nil(t, err) @@ -300,7 +302,7 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis() { time.Sleep(5 * time.Second) // Sleep for 5 seconds to make sure the previous recurringRuns are created at a different timestamp filterTime := time.Now().Unix() time.Sleep(5 * time.Second) - createRecurringRunRequestNew := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: &recurring_run_model.V2beta1RecurringRun{ + createRecurringRunRequestNew := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{RecurringRun: &recurring_run_model.V2beta1RecurringRun{ DisplayName: "new hello world recurringRun", Description: "this is a new hello world", ExperimentID: helloWorldExperiment.ExperimentID, @@ -309,7 +311,7 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis() { PipelineVersionID: helloWorldPipelineVersion.PipelineVersionID, }, MaxConcurrency: 10, - Mode: recurring_run_model.RecurringRunModeDISABLE, + Mode: recurring_run_model.RecurringRunModeDISABLE.Pointer(), }} _, err = s.recurringRunClient.Create(createRecurringRunRequestNew) assert.Nil(t, err) @@ -391,11 +393,11 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApisUseLatest() { /* ---------- Create a new hello world experiment ---------- */ experiment := test.MakeExperiment("hello world experiment", "", s.resourceNamespace) - helloWorldExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Body: experiment}) + helloWorldExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Experiment: experiment}) assert.Nil(t, err) /* ---------- Create a new hello world recurringRun by specifying pipeline ID without a version ---------- */ - createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: &recurring_run_model.V2beta1RecurringRun{ + createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{RecurringRun: &recurring_run_model.V2beta1RecurringRun{ DisplayName: "hello world with latest pipeline version", Description: "this is hello world", ExperimentID: helloWorldExperiment.ExperimentID, @@ -403,7 +405,7 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApisUseLatest() { PipelineID: helloWorldPipelineVersion.PipelineID, }, MaxConcurrency: 10, - Mode: recurring_run_model.RecurringRunModeENABLE, + Mode: recurring_run_model.RecurringRunModeENABLE.Pointer(), }} helloWorldRecurringRun, err := s.recurringRunClient.Create(createRecurringRunRequest) assert.Nil(t, err) @@ -460,7 +462,7 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis_noCatchupOption() { /* ---------- Create a periodic recurringRun with start and end date in the past and catchup = true ---------- */ experiment := test.MakeExperiment("periodic catchup true", "", s.resourceNamespace) - periodicCatchupTrueExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Body: experiment}) + periodicCatchupTrueExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Experiment: experiment}) assert.Nil(t, err) recurringRun := recurringRunInThePastForTwoMinutes(recurringRunOptions{ @@ -472,13 +474,13 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis_noCatchupOption() { recurringRun.DisplayName = "periodic-catchup-true-" recurringRun.Description = "A recurringRun with NoCatchup=false will backfill each past interval when behind schedule." recurringRun.NoCatchup = false // This is the key difference. - createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: recurringRun} + createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{RecurringRun: recurringRun} _, err = s.recurringRunClient.Create(createRecurringRunRequest) assert.Nil(t, err) /* -------- Create another periodic recurringRun with start and end date in the past but catchup = false ------ */ experiment = test.MakeExperiment("periodic catchup false", "", s.resourceNamespace) - periodicCatchupFalseExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Body: experiment}) + periodicCatchupFalseExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Experiment: experiment}) assert.Nil(t, err) recurringRun = recurringRunInThePastForTwoMinutes(recurringRunOptions{ @@ -490,13 +492,13 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis_noCatchupOption() { recurringRun.DisplayName = "periodic-catchup-false-" recurringRun.Description = "A recurringRun with NoCatchup=true only schedules the last interval when behind schedule." recurringRun.NoCatchup = true // This is the key difference. - createRecurringRunRequest = &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: recurringRun} + createRecurringRunRequest = &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{RecurringRun: recurringRun} _, err = s.recurringRunClient.Create(createRecurringRunRequest) assert.Nil(t, err) /* ---------- Create a cron recurringRun with start and end date in the past and catchup = true ---------- */ experiment = test.MakeExperiment("cron catchup true", "", s.resourceNamespace) - cronCatchupTrueExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Body: experiment}) + cronCatchupTrueExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Experiment: experiment}) assert.Nil(t, err) recurringRun = recurringRunInThePastForTwoMinutes(recurringRunOptions{ @@ -508,13 +510,13 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis_noCatchupOption() { recurringRun.DisplayName = "cron-catchup-true-" recurringRun.Description = "A recurringRun with NoCatchup=false will backfill each past interval when behind schedule." recurringRun.NoCatchup = false // This is the key difference. - createRecurringRunRequest = &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: recurringRun} + createRecurringRunRequest = &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{RecurringRun: recurringRun} _, err = s.recurringRunClient.Create(createRecurringRunRequest) assert.Nil(t, err) /* -------- Create another cron recurringRun with start and end date in the past but catchup = false ------ */ experiment = test.MakeExperiment("cron catchup false", "", s.resourceNamespace) - cronCatchupFalseExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Body: experiment}) + cronCatchupFalseExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Experiment: experiment}) assert.Nil(t, err) recurringRun = recurringRunInThePastForTwoMinutes(recurringRunOptions{ @@ -526,7 +528,7 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis_noCatchupOption() { recurringRun.DisplayName = "cron-catchup-false-" recurringRun.Description = "A recurringRun with NoCatchup=true only schedules the last interval when behind schedule." recurringRun.NoCatchup = true // This is the key difference. - createRecurringRunRequest = &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: recurringRun} + createRecurringRunRequest = &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{RecurringRun: recurringRun} _, err = s.recurringRunClient.Create(createRecurringRunRequest) assert.Nil(t, err) @@ -600,7 +602,7 @@ func (s *RecurringRunApiTestSuite) checkHelloWorldRecurringRun(t *testing.T, rec PipelineVersionID: pipelineVersionId, }, MaxConcurrency: 10, - Mode: recurring_run_model.RecurringRunModeENABLE, + Mode: recurring_run_model.RecurringRunModeENABLE.Pointer(), Namespace: recurringRun.Namespace, CreatedAt: recurringRun.CreatedAt, UpdatedAt: recurringRun.UpdatedAt, @@ -625,7 +627,7 @@ func (s *RecurringRunApiTestSuite) checkArgParamsRecurringRun(t *testing.T, recu }, ExperimentID: experimentID, MaxConcurrency: 10, - Mode: recurring_run_model.RecurringRunModeENABLE, + Mode: recurring_run_model.RecurringRunModeENABLE.Pointer(), Namespace: recurringRun.Namespace, CreatedAt: recurringRun.CreatedAt, UpdatedAt: recurringRun.UpdatedAt, @@ -649,10 +651,10 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis_SwfNotFound() { /* ---------- Create a new hello world recurringRun by specifying pipeline ID ---------- */ experiment := test.MakeExperiment("test-swf-not-found experiment", "", s.resourceNamespace) - swfNotFoundExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Body: experiment}) + swfNotFoundExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Experiment: experiment}) assert.Nil(t, err) - createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: &recurring_run_model.V2beta1RecurringRun{ + createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{RecurringRun: &recurring_run_model.V2beta1RecurringRun{ DisplayName: "test-swf-not-found", ExperimentID: swfNotFoundExperiment.ExperimentID, PipelineVersionReference: &recurring_run_model.V2beta1PipelineVersionReference{ @@ -660,7 +662,7 @@ func (s *RecurringRunApiTestSuite) TestRecurringRunApis_SwfNotFound() { PipelineVersionID: pipelineVersions[0].PipelineVersionID, }, MaxConcurrency: 10, - Mode: recurring_run_model.RecurringRunModeDISABLE, + Mode: recurring_run_model.RecurringRunModeDISABLE.Pointer(), }} recurringRun, err := s.recurringRunClient.Create(createRecurringRunRequest) @@ -755,7 +757,7 @@ func defaultV2beta1RecurringRun(pipelineId, pipelineVersionId, experimentId stri IntervalSecond: 60, }, }, - Mode: recurring_run_model.RecurringRunModeENABLE, + Mode: recurring_run_model.RecurringRunModeENABLE.Pointer(), } } diff --git a/backend/test/v2/integration/run_api_test.go b/backend/test/v2/integration/run_api_test.go index 6b489c53a5f..e55d083726e 100644 --- a/backend/test/v2/integration/run_api_test.go +++ b/backend/test/v2/integration/run_api_test.go @@ -21,32 +21,35 @@ import ( "testing" "time" - "github.com/golang/glog" + "google.golang.org/protobuf/types/known/structpb" + "sigs.k8s.io/yaml" + experiment_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" upload_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" run_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_client/run_service" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/test/config" test "github.com/kubeflow/pipelines/backend/test/v2" + + "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" - "google.golang.org/protobuf/types/known/structpb" - "sigs.k8s.io/yaml" ) -type RunApiTestSuite struct { +type RunAPITestSuite struct { suite.Suite namespace string resourceNamespace string experimentClient *api_server.ExperimentClient pipelineClient *api_server.PipelineClient - pipelineUploadClient *api_server.PipelineUploadClient + pipelineUploadClient api_server.PipelineUploadInterface runClient *api_server.RunClient } // Check the namespace have ML pipeline installed and ready -func (s *RunApiTestSuite) SetupTest() { +func (s *RunAPITestSuite) SetupTest() { if !*runIntegrationTests { s.T().SkipNow() return @@ -58,10 +61,9 @@ func (s *RunApiTestSuite) SetupTest() { glog.Exitf("Failed to initialize test. Error: %s", err.Error()) } } - s.namespace = *namespace + s.namespace = *config.Namespace var newExperimentClient func() (*api_server.ExperimentClient, error) - var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) var newPipelineClient func() (*api_server.PipelineClient, error) var newRunClient func() (*api_server.RunClient, error) @@ -69,31 +71,25 @@ func (s *RunApiTestSuite) SetupTest() { s.resourceNamespace = *resourceNamespace newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *isDebugMode) - } - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewKubeflowInClusterRunClient(s.namespace, *isDebugMode) + return api_server.NewKubeflowInClusterRunClient(s.namespace, *config.DebugMode) } } else { - clientConfig := test.GetClientConfig(*namespace) + clientConfig := test.GetClientConfig(*config.Namespace) newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewExperimentClient(clientConfig, *isDebugMode) - } - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) + return api_server.NewExperimentClient(clientConfig, *config.DebugMode) } newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) + return api_server.NewPipelineClient(clientConfig, *config.DebugMode) } newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewRunClient(clientConfig, *isDebugMode) + return api_server.NewRunClient(clientConfig, *config.DebugMode) } } @@ -102,7 +98,13 @@ func (s *RunApiTestSuite) SetupTest() { if err != nil { glog.Exitf("Failed to get experiment client. Error: %v", err) } - s.pipelineUploadClient, err = newPipelineUploadClient() + s.pipelineUploadClient, err = test.GetPipelineUploadClient( + *uploadPipelinesWithKubernetes, + *isKubeflowMode, + *config.DebugMode, + s.namespace, + test.GetClientConfig(s.namespace), + ) if err != nil { glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) } @@ -118,7 +120,7 @@ func (s *RunApiTestSuite) SetupTest() { s.cleanUp() } -func (s *RunApiTestSuite) TestRunApis() { +func (s *RunAPITestSuite) TestRunAPIs() { t := s.T() /* ---------- Upload pipelines YAML ---------- */ @@ -136,11 +138,11 @@ func (s *RunApiTestSuite) TestRunApis() { /* ---------- Create a new hello world experiment ---------- */ experiment := test.MakeExperiment("hello world experiment", "", s.resourceNamespace) - helloWorldExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Body: experiment}) + helloWorldExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Experiment: experiment}) assert.Nil(t, err) /* ---------- Create a new hello world run by specifying pipeline version ID ---------- */ - createRunRequest := &run_params.RunServiceCreateRunParams{Body: &run_model.V2beta1Run{ + createRunRequest := &run_params.RunServiceCreateRunParams{Run: &run_model.V2beta1Run{ DisplayName: "hello world", Description: "this is hello world", ExperimentID: helloWorldExperiment.ExperimentID, @@ -160,7 +162,7 @@ func (s *RunApiTestSuite) TestRunApis() { /* ---------- Create a new argument parameter experiment ---------- */ createExperimentRequest := &experiment_params.ExperimentServiceCreateExperimentParams{ - Body: test.MakeExperiment("argument parameter experiment", "", s.resourceNamespace), + Experiment: test.MakeExperiment("argument parameter experiment", "", s.resourceNamespace), } argParamsExperiment, err := s.experimentClient.Create(createExperimentRequest) assert.Nil(t, err) @@ -168,14 +170,14 @@ func (s *RunApiTestSuite) TestRunApis() { /* ---------- Create a new argument parameter run by uploading workflow manifest ---------- */ argParamsBytes, err := os.ReadFile("../resources/arguments-parameters.yaml") assert.Nil(t, err) - pipeline_spec := &structpb.Struct{} - err = yaml.Unmarshal(argParamsBytes, pipeline_spec) + pipelineSpec := &structpb.Struct{} + err = yaml.Unmarshal(argParamsBytes, pipelineSpec) assert.Nil(t, err) - createRunRequest = &run_params.RunServiceCreateRunParams{Body: &run_model.V2beta1Run{ + createRunRequest = &run_params.RunServiceCreateRunParams{Run: &run_model.V2beta1Run{ DisplayName: "argument parameter", Description: "this is argument parameter", - PipelineSpec: pipeline_spec, + PipelineSpec: pipelineSpec, RuntimeConfig: &run_model.V2beta1RuntimeConfig{ Parameters: map[string]interface{}{ "param1": "goodbye", @@ -266,7 +268,7 @@ func (s *RunApiTestSuite) TestRunApis() { filterTime := time.Now().Unix() time.Sleep(5 * time.Second) // Create a new run - createRunRequest.Body.DisplayName = "argument parameter 2" + createRunRequest.Run.DisplayName = "argument parameter 2" _, err = s.runClient.Create(createRunRequest) assert.Nil(t, err) // Check total number of runs is 3 @@ -299,7 +301,7 @@ func (s *RunApiTestSuite) TestRunApis() { assert.Equal(t, 1, len(runs)) assert.Equal(t, 1, totalSize) assert.Equal(t, "hello world", runs[0].DisplayName) - assert.Equal(t, run_model.V2beta1RunStorageStateARCHIVED, runs[0].StorageState) + assert.Equal(t, run_model.V2beta1RunStorageStateARCHIVED, *runs[0].StorageState) /* ---------- Upload long-running pipeline YAML ---------- */ longRunningPipeline, err := s.pipelineUploadClient.UploadFile("../resources/long-running.yaml", upload_params.NewUploadPipelineParamsWithTimeout(10*time.Second)) @@ -314,7 +316,7 @@ func (s *RunApiTestSuite) TestRunApis() { assert.Nil(t, err) /* ---------- Create a new long-running run by specifying pipeline ID ---------- */ - createLongRunningRunRequest := &run_params.RunServiceCreateRunParams{Body: &run_model.V2beta1Run{ + createLongRunningRunRequest := &run_params.RunServiceCreateRunParams{Run: &run_model.V2beta1Run{ DisplayName: "long running", Description: "this pipeline will run long enough for us to manually terminate it before it finishes", ExperimentID: helloWorldExperiment.ExperimentID, @@ -338,21 +340,21 @@ func (s *RunApiTestSuite) TestRunApis() { s.checkTerminatedRunDetail(t, longRunningRun, helloWorldExperiment.ExperimentID, longRunningPipelineVersion.PipelineID, longRunningPipelineVersion.PipelineVersionID) } -func (s *RunApiTestSuite) checkTerminatedRunDetail(t *testing.T, run *run_model.V2beta1Run, experimentId string, pipelineId string, pipelineVersionId string) { +func (s *RunAPITestSuite) checkTerminatedRunDetail(t *testing.T, run *run_model.V2beta1Run, experimentID string, pipelineID string, pipelineVersionID string) { expectedRun := &run_model.V2beta1Run{ RunID: run.RunID, DisplayName: "long running", Description: "this pipeline will run long enough for us to manually terminate it before it finishes", - State: run_model.V2beta1RuntimeStateCANCELING, + State: run_model.V2beta1RuntimeStateCANCELING.Pointer(), StateHistory: run.StateHistory, StorageState: run.StorageState, ServiceAccount: test.GetDefaultPipelineRunnerServiceAccount(*isKubeflowMode), PipelineSpec: run.PipelineSpec, - ExperimentID: experimentId, + ExperimentID: experimentID, PipelineVersionReference: &run_model.V2beta1PipelineVersionReference{ - PipelineID: pipelineId, - PipelineVersionID: pipelineVersionId, + PipelineID: pipelineID, + PipelineVersionID: pipelineVersionID, }, CreatedAt: run.CreatedAt, ScheduledAt: run.ScheduledAt, @@ -362,7 +364,7 @@ func (s *RunApiTestSuite) checkTerminatedRunDetail(t *testing.T, run *run_model. assert.Equal(t, expectedRun, run) } -func (s *RunApiTestSuite) checkHelloWorldRunDetail(t *testing.T, run *run_model.V2beta1Run, experimentId string, pipelineId string, pipelineVersionId string) { +func (s *RunAPITestSuite) checkHelloWorldRunDetail(t *testing.T, run *run_model.V2beta1Run, experimentID string, pipelineID string, pipelineVersionID string) { expectedRun := &run_model.V2beta1Run{ RunID: run.RunID, @@ -373,10 +375,10 @@ func (s *RunApiTestSuite) checkHelloWorldRunDetail(t *testing.T, run *run_model. StorageState: run.StorageState, ServiceAccount: test.GetDefaultPipelineRunnerServiceAccount(*isKubeflowMode), PipelineSpec: run.PipelineSpec, - ExperimentID: experimentId, + ExperimentID: experimentID, PipelineVersionReference: &run_model.V2beta1PipelineVersionReference{ - PipelineID: pipelineId, - PipelineVersionID: pipelineVersionId, + PipelineID: pipelineID, + PipelineVersionID: pipelineVersionID, }, CreatedAt: run.CreatedAt, ScheduledAt: run.ScheduledAt, @@ -386,7 +388,7 @@ func (s *RunApiTestSuite) checkHelloWorldRunDetail(t *testing.T, run *run_model. assert.Equal(t, expectedRun, run) } -func (s *RunApiTestSuite) checkArgParamsRunDetail(t *testing.T, run *run_model.V2beta1Run, experimentId string) { +func (s *RunAPITestSuite) checkArgParamsRunDetail(t *testing.T, run *run_model.V2beta1Run, experimentID string) { // Compare the pipeline spec first. argParamsBytes, err := os.ReadFile("../resources/arguments-parameters.yaml") @@ -394,11 +396,11 @@ func (s *RunApiTestSuite) checkArgParamsRunDetail(t *testing.T, run *run_model.V // pipeline_spec := &structpb.Struct{} // err = yaml.Unmarshal(argParamsBytes, pipeline_spec) // assert.Nil(t, err) - expected_bytes, err := yaml.YAMLToJSON(argParamsBytes) + expectedBytes, err := yaml.YAMLToJSON(argParamsBytes) assert.Nil(t, err) - actual_bytes, err := json.Marshal(run.PipelineSpec) + actualBytes, err := json.Marshal(run.PipelineSpec) assert.Nil(t, err) - assert.Equal(t, string(expected_bytes), string(actual_bytes)) + assert.Equal(t, string(expectedBytes), string(actualBytes)) expectedRun := &run_model.V2beta1Run{ RunID: run.RunID, @@ -415,7 +417,7 @@ func (s *RunApiTestSuite) checkArgParamsRunDetail(t *testing.T, run *run_model.V "param2": "world", }, }, - ExperimentID: experimentId, + ExperimentID: experimentID, CreatedAt: run.CreatedAt, ScheduledAt: run.ScheduledAt, FinishedAt: run.FinishedAt, @@ -424,11 +426,11 @@ func (s *RunApiTestSuite) checkArgParamsRunDetail(t *testing.T, run *run_model.V assert.Equal(t, expectedRun, run) } -func TestRunApi(t *testing.T) { - suite.Run(t, new(RunApiTestSuite)) +func TestRunAPI(t *testing.T) { + suite.Run(t, new(RunAPITestSuite)) } -func (s *RunApiTestSuite) TearDownSuite() { +func (s *RunAPITestSuite) TearDownSuite() { if *runIntegrationTests { if !*isDevMode { s.cleanUp() @@ -436,7 +438,7 @@ func (s *RunApiTestSuite) TearDownSuite() { } } -func (s *RunApiTestSuite) cleanUp() { +func (s *RunAPITestSuite) cleanUp() { /* ---------- Clean up ---------- */ test.DeleteAllRuns(s.runClient, s.resourceNamespace, s.T()) test.DeleteAllPipelines(s.pipelineClient, s.T()) diff --git a/backend/test/v2/integration/upgrade_test.go b/backend/test/v2/integration/upgrade_test.go deleted file mode 100644 index 1fd5f33e29d..00000000000 --- a/backend/test/v2/integration/upgrade_test.go +++ /dev/null @@ -1,612 +0,0 @@ -// Copyright 2018-2023 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package integration - -import ( - "encoding/json" - "fmt" - "os" - "strings" - "testing" - "time" - - "github.com/golang/glog" - experiment_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" - "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" - params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service" - pipeline_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service" - "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_model" - upload_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_upload_client/pipeline_upload_service" - recurring_run_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_client/recurring_run_service" - "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/recurring_run_model" - runParams "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_client/run_service" - "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" - api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" - "github.com/kubeflow/pipelines/backend/src/common/util" - test "github.com/kubeflow/pipelines/backend/test/v2" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/stretchr/testify/suite" - "sigs.k8s.io/yaml" -) - -// Methods are organized into two types: "prepare" and "verify". -// "prepare" tests setup resources before upgrade -// "verify" tests verifies resources are expected after upgrade -type UpgradeTests struct { - suite.Suite - namespace string - resourceNamespace string - experimentClient *api_server.ExperimentClient - pipelineClient *api_server.PipelineClient - pipelineUploadClient *api_server.PipelineUploadClient - runClient *api_server.RunClient - recurringRunClient *api_server.RecurringRunClient -} - -func TestUpgrade(t *testing.T) { - suite.Run(t, new(UpgradeTests)) -} - -func (s *UpgradeTests) TestPrepare() { - t := s.T() - - test.DeleteAllRecurringRuns(s.recurringRunClient, s.resourceNamespace, t) - test.DeleteAllRuns(s.runClient, s.resourceNamespace, t) - test.DeleteAllPipelines(s.pipelineClient, t) - test.DeleteAllExperiments(s.experimentClient, s.resourceNamespace, t) - - s.PrepareExperiments() - s.PreparePipelines() - s.PrepareRuns() - s.PrepareRecurringRuns() -} - -func (s *UpgradeTests) TestVerify() { - s.VerifyExperiments() - s.VerifyPipelines() - s.VerifyRuns() - s.VerifyRecurringRuns() - s.VerifyCreatingRunsAndRecurringRuns() -} - -// Check the namespace have ML job installed and ready -func (s *UpgradeTests) SetupSuite() { - // Integration tests also run these tests to first ensure they work, so that - // when integration tests pass and upgrade tests fail, we know for sure - // upgrade process went wrong somehow. - if !(*runIntegrationTests || *runUpgradeTests) { - s.T().SkipNow() - return - } - - if !*isDevMode { - err := test.WaitForReady(*initializeTimeout) - if err != nil { - glog.Exitf("Failed to initialize test. Error: %v", err) - } - } - s.namespace = *namespace - - var newExperimentClient func() (*api_server.ExperimentClient, error) - var newPipelineUploadClient func() (*api_server.PipelineUploadClient, error) - var newPipelineClient func() (*api_server.PipelineClient, error) - var newRunClient func() (*api_server.RunClient, error) - var newRecurringRunClient func() (*api_server.RecurringRunClient, error) - - if *isKubeflowMode { - s.resourceNamespace = *resourceNamespace - - newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewKubeflowInClusterExperimentClient(s.namespace, *isDebugMode) - } - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewKubeflowInClusterPipelineUploadClient(s.namespace, *isDebugMode) - } - newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewKubeflowInClusterPipelineClient(s.namespace, *isDebugMode) - } - newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewKubeflowInClusterRunClient(s.namespace, *isDebugMode) - } - newRecurringRunClient = func() (*api_server.RecurringRunClient, error) { - return api_server.NewKubeflowInClusterRecurringRunClient(s.namespace, *isDebugMode) - } - } else { - clientConfig := test.GetClientConfig(*namespace) - - newExperimentClient = func() (*api_server.ExperimentClient, error) { - return api_server.NewExperimentClient(clientConfig, *isDebugMode) - } - newPipelineUploadClient = func() (*api_server.PipelineUploadClient, error) { - return api_server.NewPipelineUploadClient(clientConfig, *isDebugMode) - } - newPipelineClient = func() (*api_server.PipelineClient, error) { - return api_server.NewPipelineClient(clientConfig, *isDebugMode) - } - newRunClient = func() (*api_server.RunClient, error) { - return api_server.NewRunClient(clientConfig, *isDebugMode) - } - newRecurringRunClient = func() (*api_server.RecurringRunClient, error) { - return api_server.NewRecurringRunClient(clientConfig, *isDebugMode) - } - } - - var err error - s.experimentClient, err = newExperimentClient() - if err != nil { - glog.Exitf("Failed to get experiment client. Error: %v", err) - } - s.pipelineUploadClient, err = newPipelineUploadClient() - if err != nil { - glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) - } - s.pipelineClient, err = newPipelineClient() - if err != nil { - glog.Exitf("Failed to get pipeline client. Error: %s", err.Error()) - } - s.runClient, err = newRunClient() - if err != nil { - glog.Exitf("Failed to get run client. Error: %s", err.Error()) - } - s.recurringRunClient, err = newRecurringRunClient() - if err != nil { - glog.Exitf("Failed to get job client. Error: %s", err.Error()) - } -} - -func (s *UpgradeTests) TearDownSuite() { - if *runIntegrationTests { - if !*isDevMode { - t := s.T() - // Clean up after the suite to unblock other tests. (Not needed for upgrade - // tests because it needs changes in prepare tests to persist and verified - // later.) - test.DeleteAllRecurringRuns(s.recurringRunClient, s.resourceNamespace, t) - test.DeleteAllRuns(s.runClient, s.resourceNamespace, t) - test.DeleteAllPipelines(s.pipelineClient, t) - test.DeleteAllExperiments(s.experimentClient, s.resourceNamespace, t) - } - } -} - -func (s *UpgradeTests) PrepareExperiments() { - t := s.T() - - /* ---------- Create a new experiment ---------- */ - experiment := test.MakeExperiment("training", "my first experiment", s.resourceNamespace) - _, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{ - Body: experiment, - }) - require.Nil(t, err) - - /* ---------- Create a few more new experiment ---------- */ - // This ensures they can be sorted by create time in expected order. - time.Sleep(1 * time.Second) - experiment = test.MakeExperiment("prediction", "my second experiment", s.resourceNamespace) - _, err = s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{ - Body: experiment, - }) - require.Nil(t, err) - - time.Sleep(1 * time.Second) - experiment = test.MakeExperiment("moonshot", "my third experiment", s.resourceNamespace) - _, err = s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{ - Body: experiment, - }) - require.Nil(t, err) -} - -func (s *UpgradeTests) VerifyExperiments() { - t := s.T() - - /* ---------- Verify list experiments sorted by creation time ---------- */ - // This should have the hello-world experiment in addition to the old experiments. - experiments, _, _, err := test.ListExperiment( - s.experimentClient, - &experiment_params.ExperimentServiceListExperimentsParams{SortBy: util.StringPointer("created_at")}, - "", - ) - require.Nil(t, err) - - allExperiments := make([]string, len(experiments)) - for i, exp := range experiments { - allExperiments[i] = fmt.Sprintf("%v: %v/%v", i, exp.Namespace, exp.DisplayName) - } - fmt.Printf("All experiments: %v", allExperiments) - assert.Equal(t, 5, len(experiments)) - - // Default experiment is no longer deletable - assert.Equal(t, "Default", experiments[0].DisplayName) - assert.Contains(t, experiments[0].Description, "All runs created without specifying an experiment will be grouped here") - assert.NotEmpty(t, experiments[0].ExperimentID) - assert.NotEmpty(t, experiments[0].CreatedAt) - - assert.Equal(t, "training", experiments[1].DisplayName) - assert.Equal(t, "my first experiment", experiments[1].Description) - assert.NotEmpty(t, experiments[1].ExperimentID) - assert.NotEmpty(t, experiments[1].CreatedAt) - - assert.Equal(t, "prediction", experiments[2].DisplayName) - assert.Equal(t, "my second experiment", experiments[2].Description) - assert.NotEmpty(t, experiments[2].ExperimentID) - assert.NotEmpty(t, experiments[2].CreatedAt) - - assert.Equal(t, "moonshot", experiments[3].DisplayName) - assert.Equal(t, "my third experiment", experiments[3].Description) - assert.NotEmpty(t, experiments[3].ExperimentID) - assert.NotEmpty(t, experiments[3].CreatedAt) - - assert.Equal(t, "hello world experiment", experiments[4].DisplayName) - assert.Equal(t, "", experiments[4].Description) - assert.NotEmpty(t, experiments[4].ExperimentID) - assert.NotEmpty(t, experiments[4].CreatedAt) - -} - -func (s *UpgradeTests) PreparePipelines() { - t := s.T() - - test.DeleteAllPipelines(s.pipelineClient, t) - - /* ---------- Upload pipelines YAML ---------- */ - argumentYAMLPipeline, err := s.pipelineUploadClient.UploadFile("../resources/arguments-parameters.yaml", upload_params.NewUploadPipelineParams()) - require.Nil(t, err) - assert.Equal(t, "arguments-parameters.yaml", argumentYAMLPipeline.DisplayName) - - /* ---------- Import pipeline YAML by URL ---------- */ - time.Sleep(1 * time.Second) - sequentialPipeline, err := s.pipelineClient.Create(&pipeline_params.PipelineServiceCreatePipelineParams{ - Body: &pipeline_model.V2beta1Pipeline{DisplayName: "sequential"}, - }) - require.Nil(t, err) - assert.Equal(t, "sequential", sequentialPipeline.DisplayName) - sequentialPipelineVersion, err := s.pipelineClient.CreatePipelineVersion(¶ms.PipelineServiceCreatePipelineVersionParams{ - PipelineID: sequentialPipeline.PipelineID, - Body: &pipeline_model.V2beta1PipelineVersion{ - DisplayName: "sequential", - PackageURL: &pipeline_model.V2beta1URL{ - PipelineURL: "https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/refs/heads/master/backend/test/v2/resources/sequential-v2.yaml", - }, - PipelineID: sequentialPipeline.PipelineID, - }, - }) - require.Nil(t, err) - assert.Equal(t, "sequential", sequentialPipelineVersion.DisplayName) - - /* ---------- Upload pipelines zip ---------- */ - time.Sleep(1 * time.Second) - argumentUploadPipeline, err := s.pipelineUploadClient.UploadFile( - "../resources/arguments.pipeline.zip", &upload_params.UploadPipelineParams{Name: util.StringPointer("zip-arguments-parameters")}) - require.Nil(t, err) - assert.Equal(t, "zip-arguments-parameters", argumentUploadPipeline.DisplayName) - - /* ---------- Import pipeline tarball by URL ---------- */ - pipelineURL := "https://github.com/opendatahub-io/data-science-pipelines/raw/refs/heads/master/backend/test/v2/resources/arguments.pipeline.zip" - - if pullNumber := os.Getenv("PULL_NUMBER"); pullNumber != "" { - pipelineURL = fmt.Sprintf("https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines/pull/%s/head/backend/test/v2/resources/arguments.pipeline.zip", pullNumber) - } - - time.Sleep(1 * time.Second) - argumentUrlPipeline, err := s.pipelineClient.Create(&pipeline_params.PipelineServiceCreatePipelineParams{ - Body: &pipeline_model.V2beta1Pipeline{DisplayName: "arguments.pipeline.zip"}, - }) - require.Nil(t, err) - assert.Equal(t, "arguments.pipeline.zip", argumentUrlPipeline.DisplayName) - argumentUrlPipelineVersion, err := s.pipelineClient.CreatePipelineVersion(¶ms.PipelineServiceCreatePipelineVersionParams{ - PipelineID: argumentUrlPipeline.PipelineID, - Body: &pipeline_model.V2beta1PipelineVersion{ - DisplayName: "arguments", - PackageURL: &pipeline_model.V2beta1URL{ - PipelineURL: pipelineURL, - }, - PipelineID: argumentUrlPipeline.PipelineID, - }, - }) - require.Nil(t, err) - assert.Equal(t, "arguments", argumentUrlPipelineVersion.DisplayName) - - time.Sleep(1 * time.Second) -} - -func (s *UpgradeTests) VerifyPipelines() { - t := s.T() - - /* ---------- Verify list pipeline sorted by creation time ---------- */ - pipelines, _, _, err := s.pipelineClient.List( - &pipeline_params.PipelineServiceListPipelinesParams{SortBy: util.StringPointer("created_at")}) - require.Nil(t, err) - // During upgrade, default pipelines may be installed, so we only verify the - // 4 oldest pipelines here. - assert.True(t, len(pipelines) >= 4) - // Ensure the display name is the same as the name after upgrade. - assert.Equal(t, "arguments-parameters.yaml", pipelines[0].Name) - assert.Equal(t, "arguments-parameters.yaml", pipelines[0].DisplayName) - assert.Equal(t, "sequential", pipelines[1].Name) - assert.Equal(t, "sequential", pipelines[1].DisplayName) - assert.Equal(t, "zip-arguments-parameters", pipelines[2].Name) - assert.Equal(t, "zip-arguments-parameters", pipelines[2].DisplayName) - assert.Equal(t, "arguments.pipeline.zip", pipelines[3].Name) - assert.Equal(t, "arguments.pipeline.zip", pipelines[3].DisplayName) - - /* ---------- Verify pipeline spec ---------- */ - pipelineVersions, totalSize, _, err := s.pipelineClient.ListPipelineVersions( - ¶ms.PipelineServiceListPipelineVersionsParams{ - PipelineID: pipelines[0].PipelineID, - }) - require.Nil(t, err) - assert.Equal(t, totalSize, 1) - pipelineVersion, err := s.pipelineClient.GetPipelineVersion(¶ms.PipelineServiceGetPipelineVersionParams{PipelineID: pipelines[0].PipelineID, PipelineVersionID: pipelineVersions[0].PipelineVersionID}) - require.Nil(t, err) - bytes, err := os.ReadFile("../resources/arguments-parameters.yaml") - expected_bytes, err := yaml.YAMLToJSON(bytes) - require.Nil(t, err) - actual_bytes, err := json.Marshal(pipelineVersion.PipelineSpec) - require.Nil(t, err) - // Override pipeline name, then compare - assert.Equal(t, string(expected_bytes), strings.Replace(string(actual_bytes), "pipeline/arguments-parameters.yaml", "echo", 1)) -} - -func (s *UpgradeTests) PrepareRuns() { - t := s.T() - - helloWorldPipeline := s.getHelloWorldPipeline(true) - helloWorldExperiment := s.getHelloWorldExperiment(true) - if helloWorldExperiment == nil { - helloWorldExperiment = s.createHelloWorldExperiment() - } - - hello2 := s.getHelloWorldExperiment(true) - require.Equal(t, hello2, helloWorldExperiment) - - /* ---------- Create a new hello world run by specifying pipeline ID ---------- */ - createRunRequest := &runParams.RunServiceCreateRunParams{Body: &run_model.V2beta1Run{ - DisplayName: "hello world", - Description: "this is hello world", - ExperimentID: helloWorldExperiment.ExperimentID, - PipelineVersionReference: &run_model.V2beta1PipelineVersionReference{ - PipelineID: helloWorldPipeline.PipelineID, - PipelineVersionID: helloWorldPipeline.PipelineVersionID, - }, - }} - _, err := s.runClient.Create(createRunRequest) - require.Nil(t, err) -} - -func (s *UpgradeTests) VerifyRuns() { - t := s.T() - - /* ---------- List the runs, sorted by creation time ---------- */ - runs, _, _, err := test.ListRuns( - s.runClient, - &runParams.RunServiceListRunsParams{SortBy: util.StringPointer("created_at")}, - s.resourceNamespace) - require.Nil(t, err) - assert.True(t, len(runs) >= 1) - assert.Equal(t, "hello world", runs[0].DisplayName) - - /* ---------- Get hello world run ---------- */ - helloWorldRunDetail, err := s.runClient.Get(&runParams.RunServiceGetRunParams{RunID: runs[0].RunID}) - require.Nil(t, err) - assert.Equal(t, "hello world", helloWorldRunDetail.DisplayName) - assert.Equal(t, "this is hello world", helloWorldRunDetail.Description) -} - -func (s *UpgradeTests) PrepareRecurringRuns() { - t := s.T() - - pipeline := s.getHelloWorldPipeline(true) - experiment := s.getHelloWorldExperiment(true) - - /* ---------- Create a new hello world job by specifying pipeline ID ---------- */ - createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: &recurring_run_model.V2beta1RecurringRun{ - DisplayName: "hello world", - Description: "this is hello world", - PipelineVersionReference: &recurring_run_model.V2beta1PipelineVersionReference{ - PipelineID: pipeline.PipelineID, - PipelineVersionID: pipeline.PipelineVersionID, - }, - ExperimentID: experiment.ExperimentID, - MaxConcurrency: 10, - Mode: recurring_run_model.RecurringRunModeENABLE, - NoCatchup: true, - }} - _, err := s.recurringRunClient.Create(createRecurringRunRequest) - require.Nil(t, err) -} - -func (s *UpgradeTests) VerifyRecurringRuns() { - t := s.T() - - pipeline := s.getHelloWorldPipeline(false) - experiment := s.getHelloWorldExperiment(false) - - /* ---------- Get hello world recurring run ---------- */ - recurringRuns, _, _, err := test.ListAllRecurringRuns(s.recurringRunClient, s.resourceNamespace) - require.Nil(t, err) - require.Len(t, recurringRuns, 1) - recurringRun := recurringRuns[0] - - expectedRecurringRun := &recurring_run_model.V2beta1RecurringRun{ - RecurringRunID: recurringRun.RecurringRunID, - DisplayName: "hello world", - Description: "this is hello world", - ExperimentID: experiment.ExperimentID, - PipelineVersionReference: &recurring_run_model.V2beta1PipelineVersionReference{ - PipelineID: pipeline.PipelineID, - PipelineVersionID: pipeline.PipelineVersionID, - }, - ServiceAccount: test.GetDefaultPipelineRunnerServiceAccount(*isKubeflowMode), - MaxConcurrency: 10, - NoCatchup: true, - Mode: recurring_run_model.RecurringRunModeENABLE, - Namespace: recurringRun.Namespace, - CreatedAt: recurringRun.CreatedAt, - UpdatedAt: recurringRun.UpdatedAt, - Trigger: recurringRun.Trigger, - Status: recurringRun.Status, - } - - assert.Equal(t, expectedRecurringRun, recurringRun) -} - -func (s *UpgradeTests) VerifyCreatingRunsAndRecurringRuns() { - t := s.T() - - /* ---------- Get the oldest pipeline and the newest experiment ---------- */ - pipelines, _, _, err := s.pipelineClient.List( - &pipeline_params.PipelineServiceListPipelinesParams{SortBy: util.StringPointer("created_at")}) - require.Nil(t, err) - assert.Equal(t, "arguments-parameters.yaml", pipelines[0].DisplayName) - pipelineVersions, totalSize, _, err := s.pipelineClient.ListPipelineVersions(¶ms.PipelineServiceListPipelineVersionsParams{ - PipelineID: pipelines[0].PipelineID, - }) - require.Nil(t, err) - assert.Equal(t, 1, totalSize) - - experiments, _, _, err := test.ListExperiment( - s.experimentClient, - &experiment_params.ExperimentServiceListExperimentsParams{SortBy: util.StringPointer("created_at")}, - "", - ) - require.Nil(t, err) - assert.Equal(t, "Default", experiments[0].DisplayName) - assert.Equal(t, "training", experiments[1].DisplayName) - assert.Equal(t, "hello world experiment", experiments[4].DisplayName) - - /* ---------- Create a new run based on the oldest pipeline and its default pipeline version ---------- */ - createRunRequest := &runParams.RunServiceCreateRunParams{Body: &run_model.V2beta1Run{ - DisplayName: "argument parameter from pipeline", - Description: "a run from an old pipeline", - // This run should belong to the newest experiment (created after the upgrade) - ExperimentID: experiments[4].ExperimentID, - RuntimeConfig: &run_model.V2beta1RuntimeConfig{ - Parameters: map[string]interface{}{ - "param1": "goodbye", - "param2": "world", - }, - }, - PipelineVersionReference: &run_model.V2beta1PipelineVersionReference{ - PipelineID: pipelineVersions[0].PipelineID, - PipelineVersionID: pipelineVersions[0].PipelineVersionID, - }, - }} - runFromPipeline, err := s.runClient.Create(createRunRequest) - assert.Nil(t, err) - - assert.Equal(t, experiments[4].ExperimentID, runFromPipeline.ExperimentID) - - /* ---------- Create a new recurring run based on the second oldest pipeline version and belonging to the second oldest experiment ---------- */ - pipelineVersions, totalSize, _, err = s.pipelineClient.ListPipelineVersions(¶ms.PipelineServiceListPipelineVersionsParams{ - PipelineID: pipelines[1].PipelineID, - }) - require.Nil(t, err) - assert.Equal(t, 1, totalSize) - - createRecurringRunRequest := &recurring_run_params.RecurringRunServiceCreateRecurringRunParams{Body: &recurring_run_model.V2beta1RecurringRun{ - DisplayName: "sequential job from pipeline version", - Description: "a recurring run from an old pipeline version", - ExperimentID: experiments[1].ExperimentID, - PipelineVersionReference: &recurring_run_model.V2beta1PipelineVersionReference{ - PipelineID: pipelineVersions[0].PipelineID, - PipelineVersionID: pipelineVersions[0].PipelineVersionID, - }, - RuntimeConfig: &recurring_run_model.V2beta1RuntimeConfig{ - Parameters: map[string]interface{}{ - "url": "gs://ml-pipeline-playground/shakespeare1.txt", - }, - }, - MaxConcurrency: 10, - Mode: recurring_run_model.RecurringRunModeENABLE, - }} - createdRecurringRun, err := s.recurringRunClient.Create(createRecurringRunRequest) - assert.Nil(t, err) - assert.Equal(t, experiments[1].ExperimentID, createdRecurringRun.ExperimentID) -} - -func (s *UpgradeTests) createHelloWorldExperiment() *experiment_model.V2beta1Experiment { - t := s.T() - - experiment := test.MakeExperiment("hello world experiment", "", s.resourceNamespace) - helloWorldExperiment, err := s.experimentClient.Create(&experiment_params.ExperimentServiceCreateExperimentParams{Body: experiment}) - require.Nil(t, err) - - return helloWorldExperiment -} - -func (s *UpgradeTests) getHelloWorldExperiment(createIfNotExist bool) *experiment_model.V2beta1Experiment { - t := s.T() - - experiments, _, _, err := test.ListExperiment( - s.experimentClient, - &experiment_params.ExperimentServiceListExperimentsParams{ - PageSize: util.Int32Pointer(1000), - }, - s.resourceNamespace) - require.Nil(t, err) - var helloWorldExperiment *experiment_model.V2beta1Experiment - for _, experiment := range experiments { - if experiment.DisplayName == "hello world experiment" { - helloWorldExperiment = experiment - } - } - - if helloWorldExperiment == nil && createIfNotExist { - return s.createHelloWorldExperiment() - } - - return helloWorldExperiment -} - -func (s *UpgradeTests) getHelloWorldPipeline(createIfNotExist bool) *pipeline_model.V2beta1PipelineVersion { - t := s.T() - - pipelines, err := s.pipelineClient.ListAll(&pipeline_params.PipelineServiceListPipelinesParams{}, 1000) - require.Nil(t, err) - var helloWorldPipeline *pipeline_model.V2beta1Pipeline - for _, pipeline := range pipelines { - if pipeline.DisplayName == "hello-world.yaml" { - helloWorldPipeline = pipeline - } - } - - if helloWorldPipeline == nil && createIfNotExist { - return s.createHelloWorldPipeline() - } - pipelineVersions, totalSize, _, err := s.pipelineClient.ListPipelineVersions(¶ms.PipelineServiceListPipelineVersionsParams{ - PipelineID: helloWorldPipeline.PipelineID, - }) - require.Nil(t, err) - require.Equal(t, 1, totalSize) - - return pipelineVersions[0] -} - -func (s *UpgradeTests) createHelloWorldPipeline() *pipeline_model.V2beta1PipelineVersion { - t := s.T() - - /* ---------- Upload pipelines YAML ---------- */ - uploadedPipeline, err := s.pipelineUploadClient.UploadFile("../resources/hello-world.yaml", upload_params.NewUploadPipelineParams()) - require.Nil(t, err) - - pipelineVersions, totalSize, _, err := s.pipelineClient.ListPipelineVersions(¶ms.PipelineServiceListPipelineVersionsParams{ - PipelineID: uploadedPipeline.PipelineID, - }) - require.Nil(t, err) - require.Equal(t, 1, totalSize) - - return pipelineVersions[0] -} diff --git a/backend/test/v2/resources/pvc-mount.yaml b/backend/test/v2/resources/pvc-mount.yaml new file mode 100644 index 00000000000..73137fc1a57 --- /dev/null +++ b/backend/test/v2/resources/pvc-mount.yaml @@ -0,0 +1,118 @@ +# PIPELINE DEFINITION +# Name: pvc-mount-pipeline +# Inputs: +# pvc_name: str +components: + comp-consumer: + executorLabel: exec-consumer + comp-producer: + executorLabel: exec-producer + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-consumer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - consumer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef consumer() -> None:\n with open('/data/file.txt', 'r') as\ + \ f:\n print(f.read())\n\n" + image: python:3.9 + exec-producer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - producer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef producer() -> str:\n with open('/data/file.txt', 'w') as f:\n\ + \ f.write('hello')\n with open('/data/file.txt', 'r') as f:\n\ + \ return f.read()\n\n" + image: python:3.9 +pipelineInfo: + name: pvc-mount-pipeline +root: + dag: + tasks: + consumer: + cachingOptions: + enableCache: true + componentRef: + name: comp-consumer + dependentTasks: + - producer + taskInfo: + name: consumer + producer: + cachingOptions: + enableCache: true + componentRef: + name: comp-producer + taskInfo: + name: producer + inputDefinitions: + parameters: + pvc_name: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.2 +--- +platforms: + kubernetes: + deploymentSpec: + executors: + exec-consumer: + pvcMount: + - componentInputParameter: pvc_name + mountPath: /data + pvcNameParameter: + componentInputParameter: pvc_name + exec-producer: + pvcMount: + - componentInputParameter: pvc_name + mountPath: /data + pvcNameParameter: + componentInputParameter: pvc_name diff --git a/backend/test/v2/test_utils.go b/backend/test/v2/test_utils.go index 04b787fa71c..056c40e7e0e 100644 --- a/backend/test/v2/test_utils.go +++ b/backend/test/v2/test_utils.go @@ -21,8 +21,6 @@ import ( "testing" "time" - "github.com/cenkalti/backoff" - "github.com/golang/glog" experiment_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_client/experiment_service" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/experiment_model" pipeline_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_client/pipeline_service" @@ -32,6 +30,9 @@ import ( run_params "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_client/run_service" "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" api_server "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" + + "github.com/cenkalti/backoff" + "github.com/golang/glog" "github.com/pkg/errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -218,3 +219,21 @@ func DeleteAllPipelines(client *api_server.PipelineClient, t *testing.T) { assert.True(t, isRemoved) } } + +func GetPipelineUploadClient( + uploadPipelinesWithKubernetes bool, + isKubeflowMode bool, + isDebugMode bool, + namespace string, + clientConfig clientcmd.ClientConfig, +) (api_server.PipelineUploadInterface, error) { + if uploadPipelinesWithKubernetes { + return api_server.NewPipelineUploadClientKubernetes(clientConfig, namespace) + } + + if isKubeflowMode { + return api_server.NewKubeflowInClusterPipelineUploadClient(namespace, isDebugMode) + } + + return api_server.NewPipelineUploadClient(clientConfig, isDebugMode) +} diff --git a/backend/update_requirements.sh b/backend/update_requirements.sh index 93c811f474d..98cba6c4c17 100755 --- a/backend/update_requirements.sh +++ b/backend/update_requirements.sh @@ -1,5 +1,5 @@ #!/bin/bash # This image should be in sync with Dockerfile. -IMAGE="python:3.9" +IMAGE="python:3.11" ../hack/update-requirements.sh $IMAGE requirements.txt diff --git a/cliff.toml b/cliff.toml new file mode 100644 index 00000000000..ecb0ee7cafe --- /dev/null +++ b/cliff.toml @@ -0,0 +1,24 @@ +[changelog] +header = "Changelog" +body = """ + +## {{ version | trim_start_matches(pat="v") }} ({{ timestamp | date(format="%Y-%m-%d") }}) +{% for group, commits in commits | group_by(attribute="group") %} + ### {{ group | upper_first }} + {% for commit in commits %} + - {% if commit.scope %}**{{ commit.scope }}** {% endif %}{{ commit.message }}{% endfor %}{% endfor %} +""" +trim = true +postprocessors = [{ pattern = "\\(#([0-9]+)\\)", replace = "([#${1}](https://github.com/kubeflow/pipelines/pull/${1}))"}] + +[git] +commit_parsers = [ + { message = "^feat", group = "Features"}, + { message = "^fix", group = "Bug Fixes"}, + { message = "^doc", group = "Documentation"}, + { message = "^perf", group = "Other"}, + { message = "^refactor", group = "Other"}, + { message = "^style", group = "Other"}, + { message = "^test", group = "Other"}, + { message = "^chore", group = "Other"}, +] diff --git a/components/aws/sagemaker/requirements_v2.txt b/components/aws/sagemaker/requirements_v2.txt index 62aebfd42a5..8a802447671 100644 --- a/components/aws/sagemaker/requirements_v2.txt +++ b/components/aws/sagemaker/requirements_v2.txt @@ -3,4 +3,4 @@ pathlib2==2.3.5 pyyaml==5.4.1 mypy-extensions==0.4.3 kubernetes==12.0.1 -urllib3==1.26.15 \ No newline at end of file +urllib3==2.5.0 \ No newline at end of file diff --git a/components/contrib/CatBoost/Predict_class_probabilities/from_CSV/component.py b/components/contrib/CatBoost/Predict_class_probabilities/from_CSV/component.py deleted file mode 100644 index 60f7248903f..00000000000 --- a/components/contrib/CatBoost/Predict_class_probabilities/from_CSV/component.py +++ /dev/null @@ -1,62 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def catboost_predict_class_probabilities( - data_path: InputPath('CSV'), - model_path: InputPath('CatBoostModel'), - predictions_path: OutputPath(), - - label_column: int = None, -): - '''Predict class probabilities with a CatBoost model. - - Args: - data_path: Path for the data in CSV format. - model_path: Path for the trained model in binary CatBoostModel format. - label_column: Column containing the label data. - predictions_path: Output path for the predictions. - - Outputs: - predictions: Predictions in text format. - - Annotations: - author: Alexey Volkov - ''' - import tempfile - - from catboost import CatBoost, Pool - import numpy - - if label_column: - column_descriptions = {label_column: 'Label'} - column_description_path = tempfile.NamedTemporaryFile(delete=False).name - with open(column_description_path, 'w') as column_description_file: - for idx, kind in column_descriptions.items(): - column_description_file.write('{}\t{}\n'.format(idx, kind)) - else: - column_description_path = None - - eval_data = Pool( - data_path, - column_description=column_description_path, - has_header=True, - delimiter=',', - ) - - model = CatBoost() - model.load_model(model_path) - - predictions = model.predict(eval_data, prediction_type='Probability') - numpy.savetxt(predictions_path, predictions) - - -if __name__ == '__main__': - catboost_predict_class_probabilities_op = create_component_from_func( - catboost_predict_class_probabilities, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['catboost==0.23'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/Predict_class_probabilities/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/CatBoost/Predict_class_probabilities/from_CSV/component.yaml b/components/contrib/CatBoost/Predict_class_probabilities/from_CSV/component.yaml deleted file mode 100644 index e4cb6a7807e..00000000000 --- a/components/contrib/CatBoost/Predict_class_probabilities/from_CSV/component.yaml +++ /dev/null @@ -1,112 +0,0 @@ -name: Catboost predict class probabilities -description: |- - Predict class probabilities with a CatBoost model. - - Args: - data_path: Path for the data in CSV format. - model_path: Path for the trained model in binary CatBoostModel format. - label_column: Column containing the label data. - predictions_path: Output path for the predictions. - - Outputs: - predictions: Predictions in text format. - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: CSV} -- {name: model, type: CatBoostModel} -- {name: label_column, type: Integer, optional: true} -outputs: -- {name: predictions} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/Predict_class_probabilities/from_CSV/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'catboost==0.23' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'catboost==0.23' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def catboost_predict_class_probabilities( - data_path, - model_path, - predictions_path, - - label_column = None, - ): - '''Predict class probabilities with a CatBoost model. - - Args: - data_path: Path for the data in CSV format. - model_path: Path for the trained model in binary CatBoostModel format. - label_column: Column containing the label data. - predictions_path: Output path for the predictions. - - Outputs: - predictions: Predictions in text format. - - Annotations: - author: Alexey Volkov - ''' - import tempfile - - from catboost import CatBoost, Pool - import numpy - - if label_column: - column_descriptions = {label_column: 'Label'} - column_description_path = tempfile.NamedTemporaryFile(delete=False).name - with open(column_description_path, 'w') as column_description_file: - for idx, kind in column_descriptions.items(): - column_description_file.write('{}\t{}\n'.format(idx, kind)) - else: - column_description_path = None - - eval_data = Pool( - data_path, - column_description=column_description_path, - has_header=True, - delimiter=',', - ) - - model = CatBoost() - model.load_model(model_path) - - predictions = model.predict(eval_data, prediction_type='Probability') - numpy.savetxt(predictions_path, predictions) - - import argparse - _parser = argparse.ArgumentParser(prog='Catboost predict class probabilities', description='Predict class probabilities with a CatBoost model.\n\n Args:\n data_path: Path for the data in CSV format.\n model_path: Path for the trained model in binary CatBoostModel format.\n label_column: Column containing the label data.\n predictions_path: Output path for the predictions.\n\n Outputs:\n predictions: Predictions in text format.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--predictions", dest="predictions_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = catboost_predict_class_probabilities(**_parsed_args) - args: - - --data - - {inputPath: data} - - --model - - {inputPath: model} - - if: - cond: {isPresent: label_column} - then: - - --label-column - - {inputValue: label_column} - - --predictions - - {outputPath: predictions} diff --git a/components/contrib/CatBoost/Predict_classes/from_CSV/component.py b/components/contrib/CatBoost/Predict_classes/from_CSV/component.py deleted file mode 100644 index 4e932b55c34..00000000000 --- a/components/contrib/CatBoost/Predict_classes/from_CSV/component.py +++ /dev/null @@ -1,62 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def catboost_predict_classes( - data_path: InputPath('CSV'), - model_path: InputPath('CatBoostModel'), - predictions_path: OutputPath(), - - label_column: int = None, -): - '''Predict classes using the CatBoost classifier model. - - Args: - data_path: Path for the data in CSV format. - model_path: Path for the trained model in binary CatBoostModel format. - label_column: Column containing the label data. - predictions_path: Output path for the predictions. - - Outputs: - predictions: Class predictions in text format. - - Annotations: - author: Alexey Volkov - ''' - import tempfile - - from catboost import CatBoostClassifier, Pool - import numpy - - if label_column: - column_descriptions = {label_column: 'Label'} - column_description_path = tempfile.NamedTemporaryFile(delete=False).name - with open(column_description_path, 'w') as column_description_file: - for idx, kind in column_descriptions.items(): - column_description_file.write('{}\t{}\n'.format(idx, kind)) - else: - column_description_path = None - - eval_data = Pool( - data_path, - column_description=column_description_path, - has_header=True, - delimiter=',', - ) - - model = CatBoostClassifier() - model.load_model(model_path) - - predictions = model.predict(eval_data) - numpy.savetxt(predictions_path, predictions, fmt='%s') - - -if __name__ == '__main__': - catboost_predict_classes_op = create_component_from_func( - catboost_predict_classes, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['catboost==0.22'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/Predict_classes/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/CatBoost/Predict_classes/from_CSV/component.yaml b/components/contrib/CatBoost/Predict_classes/from_CSV/component.yaml deleted file mode 100644 index b289abba743..00000000000 --- a/components/contrib/CatBoost/Predict_classes/from_CSV/component.yaml +++ /dev/null @@ -1,112 +0,0 @@ -name: Catboost predict classes -description: |- - Predict classes using the CatBoost classifier model. - - Args: - data_path: Path for the data in CSV format. - model_path: Path for the trained model in binary CatBoostModel format. - label_column: Column containing the label data. - predictions_path: Output path for the predictions. - - Outputs: - predictions: Class predictions in text format. - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: CSV} -- {name: model, type: CatBoostModel} -- {name: label_column, type: Integer, optional: true} -outputs: -- {name: predictions} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/Predict_classes/from_CSV/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'catboost==0.22' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'catboost==0.22' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def catboost_predict_classes( - data_path, - model_path, - predictions_path, - - label_column = None, - ): - '''Predict classes using the CatBoost classifier model. - - Args: - data_path: Path for the data in CSV format. - model_path: Path for the trained model in binary CatBoostModel format. - label_column: Column containing the label data. - predictions_path: Output path for the predictions. - - Outputs: - predictions: Class predictions in text format. - - Annotations: - author: Alexey Volkov - ''' - import tempfile - - from catboost import CatBoostClassifier, Pool - import numpy - - if label_column: - column_descriptions = {label_column: 'Label'} - column_description_path = tempfile.NamedTemporaryFile(delete=False).name - with open(column_description_path, 'w') as column_description_file: - for idx, kind in column_descriptions.items(): - column_description_file.write('{}\t{}\n'.format(idx, kind)) - else: - column_description_path = None - - eval_data = Pool( - data_path, - column_description=column_description_path, - has_header=True, - delimiter=',', - ) - - model = CatBoostClassifier() - model.load_model(model_path) - - predictions = model.predict(eval_data) - numpy.savetxt(predictions_path, predictions, fmt='%s') - - import argparse - _parser = argparse.ArgumentParser(prog='Catboost predict classes', description='Predict classes using the CatBoost classifier model.\n\n Args:\n data_path: Path for the data in CSV format.\n model_path: Path for the trained model in binary CatBoostModel format.\n label_column: Column containing the label data.\n predictions_path: Output path for the predictions.\n\n Outputs:\n predictions: Class predictions in text format.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--predictions", dest="predictions_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = catboost_predict_classes(**_parsed_args) - args: - - --data - - {inputPath: data} - - --model - - {inputPath: model} - - if: - cond: {isPresent: label_column} - then: - - --label-column - - {inputValue: label_column} - - --predictions - - {outputPath: predictions} diff --git a/components/contrib/CatBoost/Predict_values/from_CSV/component.py b/components/contrib/CatBoost/Predict_values/from_CSV/component.py deleted file mode 100644 index 959edfabd87..00000000000 --- a/components/contrib/CatBoost/Predict_values/from_CSV/component.py +++ /dev/null @@ -1,62 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def catboost_predict_values( - data_path: InputPath('CSV'), - model_path: InputPath('CatBoostModel'), - predictions_path: OutputPath(), - - label_column: int = None, -): - '''Predict values with a CatBoost model. - - Args: - data_path: Path for the data in CSV format. - model_path: Path for the trained model in binary CatBoostModel format. - label_column: Column containing the label data. - predictions_path: Output path for the predictions. - - Outputs: - predictions: Predictions in text format. - - Annotations: - author: Alexey Volkov - ''' - import tempfile - - from catboost import CatBoost, Pool - import numpy - - if label_column: - column_descriptions = {label_column: 'Label'} - column_description_path = tempfile.NamedTemporaryFile(delete=False).name - with open(column_description_path, 'w') as column_description_file: - for idx, kind in column_descriptions.items(): - column_description_file.write('{}\t{}\n'.format(idx, kind)) - else: - column_description_path = None - - eval_data = Pool( - data_path, - column_description=column_description_path, - has_header=True, - delimiter=',', - ) - - model = CatBoost() - model.load_model(model_path) - - predictions = model.predict(eval_data, prediction_type='RawFormulaVal') - numpy.savetxt(predictions_path, predictions) - - -if __name__ == '__main__': - catboost_predict_values_op = create_component_from_func( - catboost_predict_values, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['catboost==0.23'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/Predict_values/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/CatBoost/Predict_values/from_CSV/component.yaml b/components/contrib/CatBoost/Predict_values/from_CSV/component.yaml deleted file mode 100644 index 642d993d380..00000000000 --- a/components/contrib/CatBoost/Predict_values/from_CSV/component.yaml +++ /dev/null @@ -1,112 +0,0 @@ -name: Catboost predict values -description: |- - Predict values with a CatBoost model. - - Args: - data_path: Path for the data in CSV format. - model_path: Path for the trained model in binary CatBoostModel format. - label_column: Column containing the label data. - predictions_path: Output path for the predictions. - - Outputs: - predictions: Predictions in text format. - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: CSV} -- {name: model, type: CatBoostModel} -- {name: label_column, type: Integer, optional: true} -outputs: -- {name: predictions} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/Predict_values/from_CSV/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'catboost==0.23' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'catboost==0.23' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def catboost_predict_values( - data_path, - model_path, - predictions_path, - - label_column = None, - ): - '''Predict values with a CatBoost model. - - Args: - data_path: Path for the data in CSV format. - model_path: Path for the trained model in binary CatBoostModel format. - label_column: Column containing the label data. - predictions_path: Output path for the predictions. - - Outputs: - predictions: Predictions in text format. - - Annotations: - author: Alexey Volkov - ''' - import tempfile - - from catboost import CatBoost, Pool - import numpy - - if label_column: - column_descriptions = {label_column: 'Label'} - column_description_path = tempfile.NamedTemporaryFile(delete=False).name - with open(column_description_path, 'w') as column_description_file: - for idx, kind in column_descriptions.items(): - column_description_file.write('{}\t{}\n'.format(idx, kind)) - else: - column_description_path = None - - eval_data = Pool( - data_path, - column_description=column_description_path, - has_header=True, - delimiter=',', - ) - - model = CatBoost() - model.load_model(model_path) - - predictions = model.predict(eval_data, prediction_type='RawFormulaVal') - numpy.savetxt(predictions_path, predictions) - - import argparse - _parser = argparse.ArgumentParser(prog='Catboost predict values', description='Predict values with a CatBoost model.\n\n Args:\n data_path: Path for the data in CSV format.\n model_path: Path for the trained model in binary CatBoostModel format.\n label_column: Column containing the label data.\n predictions_path: Output path for the predictions.\n\n Outputs:\n predictions: Predictions in text format.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--predictions", dest="predictions_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = catboost_predict_values(**_parsed_args) - args: - - --data - - {inputPath: data} - - --model - - {inputPath: model} - - if: - cond: {isPresent: label_column} - then: - - --label-column - - {inputValue: label_column} - - --predictions - - {outputPath: predictions} diff --git a/components/contrib/CatBoost/Train_classifier/from_CSV/component.py b/components/contrib/CatBoost/Train_classifier/from_CSV/component.py deleted file mode 100644 index 183f562c695..00000000000 --- a/components/contrib/CatBoost/Train_classifier/from_CSV/component.py +++ /dev/null @@ -1,97 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def catboost_train_classifier( - training_data_path: InputPath('CSV'), - model_path: OutputPath('CatBoostModel'), - starting_model_path: InputPath('CatBoostModel') = None, - label_column: int = 0, - - loss_function: str = 'Logloss', - num_iterations: int = 500, - learning_rate: float = None, - depth: int = 6, - random_seed: int = 0, - - cat_features: list = None, - text_features: list = None, - - additional_training_options: dict = {}, -): - '''Train a CatBoost classifier model. - - Args: - training_data_path: Path for the training data in CSV format. - model_path: Output path for the trained model in binary CatBoostModel format. - starting_model_path: Path for the existing trained model to start from. - label_column: Column containing the label data. - - loss_function: The metric to use in training and also selector of the machine learning - problem to solve. Default = 'Logloss' - num_iterations: Number of trees to add to the ensemble. - learning_rate: Step size shrinkage used in update to prevents overfitting. - Default value is selected automatically for binary classification with other parameters set to default. - In all other cases default is 0.03. - depth: Depth of a tree. All trees are the same depth. Default = 6 - random_seed: Random number seed. Default = 0 - - cat_features: A list of Categorical features (indices or names). - text_features: A list of Text features (indices or names). - additional_training_options: A dictionary with additional options to pass to CatBoostClassifier - - Outputs: - model: Trained model in binary CatBoostModel format. - - Annotations: - author: Alexey Volkov - ''' - import tempfile - from pathlib import Path - - from catboost import CatBoostClassifier, Pool - - column_descriptions = {label_column: 'Label'} - column_description_path = tempfile.NamedTemporaryFile(delete=False).name - with open(column_description_path, 'w') as column_description_file: - for idx, kind in column_descriptions.items(): - column_description_file.write('{}\t{}\n'.format(idx, kind)) - - train_data = Pool( - training_data_path, - column_description=column_description_path, - has_header=True, - delimiter=',', - ) - - model = CatBoostClassifier( - iterations=num_iterations, - depth=depth, - learning_rate=learning_rate, - loss_function=loss_function, - random_seed=random_seed, - verbose=True, - **additional_training_options, - ) - - model.fit( - train_data, - cat_features=cat_features, - text_features=text_features, - init_model=starting_model_path, - #verbose=False, - #plot=True, - ) - Path(model_path).parent.mkdir(parents=True, exist_ok=True) - model.save_model(model_path) - - -if __name__ == '__main__': - catboost_train_classifier_op = create_component_from_func( - catboost_train_classifier, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['catboost==0.23'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/Train_classifier/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/CatBoost/Train_classifier/from_CSV/component.yaml b/components/contrib/CatBoost/Train_classifier/from_CSV/component.yaml deleted file mode 100644 index 54c58d840a2..00000000000 --- a/components/contrib/CatBoost/Train_classifier/from_CSV/component.yaml +++ /dev/null @@ -1,220 +0,0 @@ -name: Catboost train classifier -description: |- - Train a CatBoost classifier model. - - Args: - training_data_path: Path for the training data in CSV format. - model_path: Output path for the trained model in binary CatBoostModel format. - starting_model_path: Path for the existing trained model to start from. - label_column: Column containing the label data. - - loss_function: The metric to use in training and also selector of the machine learning - problem to solve. Default = 'Logloss' - num_iterations: Number of trees to add to the ensemble. - learning_rate: Step size shrinkage used in update to prevents overfitting. - Default value is selected automatically for binary classification with other parameters set to default. - In all other cases default is 0.03. - depth: Depth of a tree. All trees are the same depth. Default = 6 - random_seed: Random number seed. Default = 0 - - cat_features: A list of Categorical features (indices or names). - text_features: A list of Text features (indices or names). - additional_training_options: A dictionary with additional options to pass to CatBoostClassifier - - Outputs: - model: Trained model in binary CatBoostModel format. - - Annotations: - author: Alexey Volkov -inputs: -- {name: training_data, type: CSV} -- {name: starting_model, type: CatBoostModel, optional: true} -- {name: label_column, type: Integer, default: '0', optional: true} -- {name: loss_function, type: String, default: Logloss, optional: true} -- {name: num_iterations, type: Integer, default: '500', optional: true} -- {name: learning_rate, type: Float, optional: true} -- {name: depth, type: Integer, default: '6', optional: true} -- {name: random_seed, type: Integer, default: '0', optional: true} -- {name: cat_features, type: JsonArray, optional: true} -- {name: text_features, type: JsonArray, optional: true} -- {name: additional_training_options, type: JsonObject, default: '{}', optional: true} -outputs: -- {name: model, type: CatBoostModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/Train_classifier/from_CSV/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'catboost==0.23' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'catboost==0.23' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def catboost_train_classifier( - training_data_path, - model_path, - starting_model_path = None, - label_column = 0, - - loss_function = 'Logloss', - num_iterations = 500, - learning_rate = None, - depth = 6, - random_seed = 0, - - cat_features = None, - text_features = None, - - additional_training_options = {}, - ): - '''Train a CatBoost classifier model. - - Args: - training_data_path: Path for the training data in CSV format. - model_path: Output path for the trained model in binary CatBoostModel format. - starting_model_path: Path for the existing trained model to start from. - label_column: Column containing the label data. - - loss_function: The metric to use in training and also selector of the machine learning - problem to solve. Default = 'Logloss' - num_iterations: Number of trees to add to the ensemble. - learning_rate: Step size shrinkage used in update to prevents overfitting. - Default value is selected automatically for binary classification with other parameters set to default. - In all other cases default is 0.03. - depth: Depth of a tree. All trees are the same depth. Default = 6 - random_seed: Random number seed. Default = 0 - - cat_features: A list of Categorical features (indices or names). - text_features: A list of Text features (indices or names). - additional_training_options: A dictionary with additional options to pass to CatBoostClassifier - - Outputs: - model: Trained model in binary CatBoostModel format. - - Annotations: - author: Alexey Volkov - ''' - import tempfile - from pathlib import Path - - from catboost import CatBoostClassifier, Pool - - column_descriptions = {label_column: 'Label'} - column_description_path = tempfile.NamedTemporaryFile(delete=False).name - with open(column_description_path, 'w') as column_description_file: - for idx, kind in column_descriptions.items(): - column_description_file.write('{}\t{}\n'.format(idx, kind)) - - train_data = Pool( - training_data_path, - column_description=column_description_path, - has_header=True, - delimiter=',', - ) - - model = CatBoostClassifier( - iterations=num_iterations, - depth=depth, - learning_rate=learning_rate, - loss_function=loss_function, - random_seed=random_seed, - verbose=True, - **additional_training_options, - ) - - model.fit( - train_data, - cat_features=cat_features, - text_features=text_features, - init_model=starting_model_path, - #verbose=False, - #plot=True, - ) - Path(model_path).parent.mkdir(parents=True, exist_ok=True) - model.save_model(model_path) - - import json - import argparse - _parser = argparse.ArgumentParser(prog='Catboost train classifier', description="Train a CatBoost classifier model.\n\n Args:\n training_data_path: Path for the training data in CSV format.\n model_path: Output path for the trained model in binary CatBoostModel format.\n starting_model_path: Path for the existing trained model to start from.\n label_column: Column containing the label data.\n\n loss_function: The metric to use in training and also selector of the machine learning\n problem to solve. Default = 'Logloss'\n num_iterations: Number of trees to add to the ensemble.\n learning_rate: Step size shrinkage used in update to prevents overfitting.\n Default value is selected automatically for binary classification with other parameters set to default.\n In all other cases default is 0.03.\n depth: Depth of a tree. All trees are the same depth. Default = 6\n random_seed: Random number seed. Default = 0\n\n cat_features: A list of Categorical features (indices or names).\n text_features: A list of Text features (indices or names).\n additional_training_options: A dictionary with additional options to pass to CatBoostClassifier\n\n Outputs:\n model: Trained model in binary CatBoostModel format.\n\n Annotations:\n author: Alexey Volkov ") - _parser.add_argument("--training-data", dest="training_data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--starting-model", dest="starting_model_path", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--loss-function", dest="loss_function", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--num-iterations", dest="num_iterations", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--learning-rate", dest="learning_rate", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--depth", dest="depth", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--random-seed", dest="random_seed", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--cat-features", dest="cat_features", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--text-features", dest="text_features", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--additional-training-options", dest="additional_training_options", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = catboost_train_classifier(**_parsed_args) - args: - - --training-data - - {inputPath: training_data} - - if: - cond: {isPresent: starting_model} - then: - - --starting-model - - {inputPath: starting_model} - - if: - cond: {isPresent: label_column} - then: - - --label-column - - {inputValue: label_column} - - if: - cond: {isPresent: loss_function} - then: - - --loss-function - - {inputValue: loss_function} - - if: - cond: {isPresent: num_iterations} - then: - - --num-iterations - - {inputValue: num_iterations} - - if: - cond: {isPresent: learning_rate} - then: - - --learning-rate - - {inputValue: learning_rate} - - if: - cond: {isPresent: depth} - then: - - --depth - - {inputValue: depth} - - if: - cond: {isPresent: random_seed} - then: - - --random-seed - - {inputValue: random_seed} - - if: - cond: {isPresent: cat_features} - then: - - --cat-features - - {inputValue: cat_features} - - if: - cond: {isPresent: text_features} - then: - - --text-features - - {inputValue: text_features} - - if: - cond: {isPresent: additional_training_options} - then: - - --additional-training-options - - {inputValue: additional_training_options} - - --model - - {outputPath: model} diff --git a/components/contrib/CatBoost/Train_regression/from_CSV/component.py b/components/contrib/CatBoost/Train_regression/from_CSV/component.py deleted file mode 100644 index 84eb738376f..00000000000 --- a/components/contrib/CatBoost/Train_regression/from_CSV/component.py +++ /dev/null @@ -1,95 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def catboost_train_regression( - training_data_path: InputPath('CSV'), - model_path: OutputPath('CatBoostModel'), - starting_model_path: InputPath('CatBoostModel') = None, - label_column: int = 0, - - loss_function: str = 'RMSE', - num_iterations: int = 500, - learning_rate: float = None, - depth: int = 6, - random_seed: int = 0, - - cat_features: list = None, - - additional_training_options: dict = {}, -): - '''Train a CatBoost classifier model. - - Args: - training_data_path: Path for the training data in CSV format. - model_path: Output path for the trained model in binary CatBoostModel format. - starting_model_path: Path for the existing trained model to start from. - label_column: Column containing the label data. - - loss_function: The metric to use in training and also selector of the machine learning - problem to solve. Default = 'RMSE'. Possible values: - 'RMSE', 'MAE', 'Quantile:alpha=value', 'LogLinQuantile:alpha=value', 'Poisson', 'MAPE', 'Lq:q=value' - num_iterations: Number of trees to add to the ensemble. - learning_rate: Step size shrinkage used in update to prevents overfitting. - Default value is selected automatically for binary classification with other parameters set to default. - In all other cases default is 0.03. - depth: Depth of a tree. All trees are the same depth. Default = 6 - random_seed: Random number seed. Default = 0 - - cat_features: A list of Categorical features (indices or names). - additional_training_options: A dictionary with additional options to pass to CatBoostRegressor - - Outputs: - model: Trained model in binary CatBoostModel format. - - Annotations: - author: Alexey Volkov - ''' - import tempfile - from pathlib import Path - - from catboost import CatBoostRegressor, Pool - - column_descriptions = {label_column: 'Label'} - column_description_path = tempfile.NamedTemporaryFile(delete=False).name - with open(column_description_path, 'w') as column_description_file: - for idx, kind in column_descriptions.items(): - column_description_file.write('{}\t{}\n'.format(idx, kind)) - - train_data = Pool( - training_data_path, - column_description=column_description_path, - has_header=True, - delimiter=',', - ) - - model = CatBoostRegressor( - iterations=num_iterations, - depth=depth, - learning_rate=learning_rate, - loss_function=loss_function, - random_seed=random_seed, - verbose=True, - **additional_training_options, - ) - - model.fit( - train_data, - cat_features=cat_features, - init_model=starting_model_path, - #verbose=False, - #plot=True, - ) - Path(model_path).parent.mkdir(parents=True, exist_ok=True) - model.save_model(model_path) - - -if __name__ == '__main__': - catboost_train_regression_op = create_component_from_func( - catboost_train_regression, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['catboost==0.23'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/Train_regression/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/CatBoost/Train_regression/from_CSV/component.yaml b/components/contrib/CatBoost/Train_regression/from_CSV/component.yaml deleted file mode 100644 index 8361049a7ca..00000000000 --- a/components/contrib/CatBoost/Train_regression/from_CSV/component.yaml +++ /dev/null @@ -1,211 +0,0 @@ -name: Catboost train regression -description: |- - Train a CatBoost classifier model. - - Args: - training_data_path: Path for the training data in CSV format. - model_path: Output path for the trained model in binary CatBoostModel format. - starting_model_path: Path for the existing trained model to start from. - label_column: Column containing the label data. - - loss_function: The metric to use in training and also selector of the machine learning - problem to solve. Default = 'RMSE'. Possible values: - 'RMSE', 'MAE', 'Quantile:alpha=value', 'LogLinQuantile:alpha=value', 'Poisson', 'MAPE', 'Lq:q=value' - num_iterations: Number of trees to add to the ensemble. - learning_rate: Step size shrinkage used in update to prevents overfitting. - Default value is selected automatically for binary classification with other parameters set to default. - In all other cases default is 0.03. - depth: Depth of a tree. All trees are the same depth. Default = 6 - random_seed: Random number seed. Default = 0 - - cat_features: A list of Categorical features (indices or names). - additional_training_options: A dictionary with additional options to pass to CatBoostRegressor - - Outputs: - model: Trained model in binary CatBoostModel format. - - Annotations: - author: Alexey Volkov -inputs: -- {name: training_data, type: CSV} -- {name: starting_model, type: CatBoostModel, optional: true} -- {name: label_column, type: Integer, default: '0', optional: true} -- {name: loss_function, type: String, default: RMSE, optional: true} -- {name: num_iterations, type: Integer, default: '500', optional: true} -- {name: learning_rate, type: Float, optional: true} -- {name: depth, type: Integer, default: '6', optional: true} -- {name: random_seed, type: Integer, default: '0', optional: true} -- {name: cat_features, type: JsonArray, optional: true} -- {name: additional_training_options, type: JsonObject, default: '{}', optional: true} -outputs: -- {name: model, type: CatBoostModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/Train_regression/from_CSV/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'catboost==0.23' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'catboost==0.23' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def catboost_train_regression( - training_data_path, - model_path, - starting_model_path = None, - label_column = 0, - - loss_function = 'RMSE', - num_iterations = 500, - learning_rate = None, - depth = 6, - random_seed = 0, - - cat_features = None, - - additional_training_options = {}, - ): - '''Train a CatBoost classifier model. - - Args: - training_data_path: Path for the training data in CSV format. - model_path: Output path for the trained model in binary CatBoostModel format. - starting_model_path: Path for the existing trained model to start from. - label_column: Column containing the label data. - - loss_function: The metric to use in training and also selector of the machine learning - problem to solve. Default = 'RMSE'. Possible values: - 'RMSE', 'MAE', 'Quantile:alpha=value', 'LogLinQuantile:alpha=value', 'Poisson', 'MAPE', 'Lq:q=value' - num_iterations: Number of trees to add to the ensemble. - learning_rate: Step size shrinkage used in update to prevents overfitting. - Default value is selected automatically for binary classification with other parameters set to default. - In all other cases default is 0.03. - depth: Depth of a tree. All trees are the same depth. Default = 6 - random_seed: Random number seed. Default = 0 - - cat_features: A list of Categorical features (indices or names). - additional_training_options: A dictionary with additional options to pass to CatBoostRegressor - - Outputs: - model: Trained model in binary CatBoostModel format. - - Annotations: - author: Alexey Volkov - ''' - import tempfile - from pathlib import Path - - from catboost import CatBoostRegressor, Pool - - column_descriptions = {label_column: 'Label'} - column_description_path = tempfile.NamedTemporaryFile(delete=False).name - with open(column_description_path, 'w') as column_description_file: - for idx, kind in column_descriptions.items(): - column_description_file.write('{}\t{}\n'.format(idx, kind)) - - train_data = Pool( - training_data_path, - column_description=column_description_path, - has_header=True, - delimiter=',', - ) - - model = CatBoostRegressor( - iterations=num_iterations, - depth=depth, - learning_rate=learning_rate, - loss_function=loss_function, - random_seed=random_seed, - verbose=True, - **additional_training_options, - ) - - model.fit( - train_data, - cat_features=cat_features, - init_model=starting_model_path, - #verbose=False, - #plot=True, - ) - Path(model_path).parent.mkdir(parents=True, exist_ok=True) - model.save_model(model_path) - - import json - import argparse - _parser = argparse.ArgumentParser(prog='Catboost train regression', description="Train a CatBoost classifier model.\n\n Args:\n training_data_path: Path for the training data in CSV format.\n model_path: Output path for the trained model in binary CatBoostModel format.\n starting_model_path: Path for the existing trained model to start from.\n label_column: Column containing the label data.\n\n loss_function: The metric to use in training and also selector of the machine learning\n problem to solve. Default = 'RMSE'. Possible values:\n 'RMSE', 'MAE', 'Quantile:alpha=value', 'LogLinQuantile:alpha=value', 'Poisson', 'MAPE', 'Lq:q=value'\n num_iterations: Number of trees to add to the ensemble.\n learning_rate: Step size shrinkage used in update to prevents overfitting.\n Default value is selected automatically for binary classification with other parameters set to default.\n In all other cases default is 0.03.\n depth: Depth of a tree. All trees are the same depth. Default = 6\n random_seed: Random number seed. Default = 0\n\n cat_features: A list of Categorical features (indices or names).\n additional_training_options: A dictionary with additional options to pass to CatBoostRegressor\n\n Outputs:\n model: Trained model in binary CatBoostModel format.\n\n Annotations:\n author: Alexey Volkov ") - _parser.add_argument("--training-data", dest="training_data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--starting-model", dest="starting_model_path", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--loss-function", dest="loss_function", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--num-iterations", dest="num_iterations", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--learning-rate", dest="learning_rate", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--depth", dest="depth", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--random-seed", dest="random_seed", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--cat-features", dest="cat_features", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--additional-training-options", dest="additional_training_options", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = catboost_train_regression(**_parsed_args) - args: - - --training-data - - {inputPath: training_data} - - if: - cond: {isPresent: starting_model} - then: - - --starting-model - - {inputPath: starting_model} - - if: - cond: {isPresent: label_column} - then: - - --label-column - - {inputValue: label_column} - - if: - cond: {isPresent: loss_function} - then: - - --loss-function - - {inputValue: loss_function} - - if: - cond: {isPresent: num_iterations} - then: - - --num-iterations - - {inputValue: num_iterations} - - if: - cond: {isPresent: learning_rate} - then: - - --learning-rate - - {inputValue: learning_rate} - - if: - cond: {isPresent: depth} - then: - - --depth - - {inputValue: depth} - - if: - cond: {isPresent: random_seed} - then: - - --random-seed - - {inputValue: random_seed} - - if: - cond: {isPresent: cat_features} - then: - - --cat-features - - {inputValue: cat_features} - - if: - cond: {isPresent: additional_training_options} - then: - - --additional-training-options - - {inputValue: additional_training_options} - - --model - - {outputPath: model} diff --git a/components/contrib/CatBoost/_samples/sample_pipeline.py b/components/contrib/CatBoost/_samples/sample_pipeline.py deleted file mode 100644 index 64267b801e5..00000000000 --- a/components/contrib/CatBoost/_samples/sample_pipeline.py +++ /dev/null @@ -1,76 +0,0 @@ -import kfp -from kfp import components - - -chicago_taxi_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml') -pandas_transform_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e69a6694/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml') - -catboost_train_classifier_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/f97ad2/components/CatBoost/Train_classifier/from_CSV/component.yaml') -catboost_train_regression_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/f97ad2/components/CatBoost/Train_regression/from_CSV/component.yaml') -catboost_predict_classes_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/f97ad2/components/CatBoost/Predict_classes/from_CSV/component.yaml') -catboost_predict_values_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/f97ad2/components/CatBoost/Predict_values/from_CSV/component.yaml') -catboost_predict_class_probabilities_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/f97ad2/components/CatBoost/Predict_class_probabilities/from_CSV/component.yaml') -catboost_to_apple_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/f97ad2/components/CatBoost/convert_CatBoostModel_to_AppleCoreMLModel/component.yaml') -catboost_to_onnx_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/f97ad2/components/CatBoost/convert_CatBoostModel_to_ONNX/component.yaml') - - -def catboost_pipeline(): - training_data_in_csv = chicago_taxi_dataset_op( - where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', - select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', - limit=10000, - ).output - - training_data_for_classification_in_csv = pandas_transform_csv_op( - table=training_data_in_csv, - transform_code='''df.insert(0, "was_tipped", df["tips"] > 0); del df["tips"]''', - ).output - - catboost_train_regression_task = catboost_train_regression_op( - training_data=training_data_in_csv, - loss_function='RMSE', - label_column=0, - num_iterations=200, - ) - - regression_model = catboost_train_regression_task.outputs['model'] - - catboost_train_classifier_task = catboost_train_classifier_op( - training_data=training_data_for_classification_in_csv, - label_column=0, - num_iterations=200, - ) - - classification_model = catboost_train_classifier_task.outputs['model'] - - evaluation_data_for_regression_in_csv = training_data_in_csv - evaluation_data_for_classification_in_csv = training_data_for_classification_in_csv - - catboost_predict_values_op( - data=evaluation_data_for_regression_in_csv, - model=regression_model, - label_column=0, - ) - - catboost_predict_classes_op( - data=evaluation_data_for_classification_in_csv, - model=classification_model, - label_column=0, - ) - - catboost_predict_class_probabilities_op( - data=evaluation_data_for_classification_in_csv, - model=classification_model, - label_column=0, - ) - - catboost_to_apple_op(regression_model) - catboost_to_apple_op(classification_model) - - catboost_to_onnx_op(regression_model) - catboost_to_onnx_op(classification_model) - - -if __name__ == '__main__': - kfp_endpoint=None - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(catboost_pipeline, arguments={}) diff --git a/components/contrib/CatBoost/convert_CatBoostModel_to_AppleCoreMLModel/component.py b/components/contrib/CatBoost/convert_CatBoostModel_to_AppleCoreMLModel/component.py deleted file mode 100644 index c018c994bc0..00000000000 --- a/components/contrib/CatBoost/convert_CatBoostModel_to_AppleCoreMLModel/component.py +++ /dev/null @@ -1,41 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def convert_CatBoostModel_to_AppleCoreMLModel( - model_path: InputPath('CatBoostModel'), - converted_model_path: OutputPath('AppleCoreMLModel'), -): - '''Convert CatBoost model to Apple CoreML format. - - Args: - model_path: Path of a trained model in binary CatBoost model format. - converted_model_path: Output path for the converted model. - - Outputs: - converted_model: Model in Apple CoreML format. - - Annotations: - author: Alexey Volkov - ''' - from catboost import CatBoost - - model = CatBoost() - model.load_model(model_path) - model.save_model( - converted_model_path, - format="coreml", - # export_parameters={'prediction_type': 'probability'}, - # export_parameters={'prediction_type': 'raw'}, - ) - - -if __name__ == '__main__': - create_component_from_func( - convert_CatBoostModel_to_AppleCoreMLModel, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['catboost==0.22'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/convert_CatBoostModel_to_AppleCoreMLModel/component.yaml", - }, - ) diff --git a/components/contrib/CatBoost/convert_CatBoostModel_to_AppleCoreMLModel/component.yaml b/components/contrib/CatBoost/convert_CatBoostModel_to_AppleCoreMLModel/component.yaml deleted file mode 100644 index c2ab79130a5..00000000000 --- a/components/contrib/CatBoost/convert_CatBoostModel_to_AppleCoreMLModel/component.yaml +++ /dev/null @@ -1,78 +0,0 @@ -name: Convert CatBoostModel to AppleCoreMLModel -description: |- - Convert CatBoost model to Apple CoreML format. - - Args: - model_path: Path of a trained model in binary CatBoost model format. - converted_model_path: Output path for the converted model. - - Outputs: - converted_model: Model in Apple CoreML format. - - Annotations: - author: Alexey Volkov -inputs: -- {name: model, type: CatBoostModel} -outputs: -- {name: converted_model, type: AppleCoreMLModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/convert_CatBoostModel_to_AppleCoreMLModel/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'catboost==0.22' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'catboost==0.22' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def convert_CatBoostModel_to_AppleCoreMLModel( - model_path, - converted_model_path, - ): - '''Convert CatBoost model to Apple CoreML format. - - Args: - model_path: Path of a trained model in binary CatBoost model format. - converted_model_path: Output path for the converted model. - - Outputs: - converted_model: Model in Apple CoreML format. - - Annotations: - author: Alexey Volkov - ''' - from catboost import CatBoost - - model = CatBoost() - model.load_model(model_path) - model.save_model( - converted_model_path, - format="coreml", - # export_parameters={'prediction_type': 'probability'}, - # export_parameters={'prediction_type': 'raw'}, - ) - - import argparse - _parser = argparse.ArgumentParser(prog='Convert CatBoostModel to AppleCoreMLModel', description='Convert CatBoost model to Apple CoreML format.\n\n Args:\n model_path: Path of a trained model in binary CatBoost model format.\n converted_model_path: Output path for the converted model.\n\n Outputs:\n converted_model: Model in Apple CoreML format.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--converted-model", dest="converted_model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = convert_CatBoostModel_to_AppleCoreMLModel(**_parsed_args) - args: - - --model - - {inputPath: model} - - --converted-model - - {outputPath: converted_model} diff --git a/components/contrib/CatBoost/convert_CatBoostModel_to_ONNX/component.py b/components/contrib/CatBoost/convert_CatBoostModel_to_ONNX/component.py deleted file mode 100644 index e4cbbddeadc..00000000000 --- a/components/contrib/CatBoost/convert_CatBoostModel_to_ONNX/component.py +++ /dev/null @@ -1,36 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def convert_CatBoostModel_to_ONNX( - model_path: InputPath('CatBoostModel'), - converted_model_path: OutputPath('ONNX'), -): - '''Convert CatBoost model to ONNX format. - - Args: - model_path: Path of a trained model in binary CatBoost model format. - converted_model_path: Output path for the converted model. - - Outputs: - converted_model: Model in ONNX format. - - Annotations: - author: Alexey Volkov - ''' - from catboost import CatBoost - - model = CatBoost() - model.load_model(model_path) - model.save_model(converted_model_path, format="onnx") - - -if __name__ == '__main__': - create_component_from_func( - convert_CatBoostModel_to_ONNX, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['catboost==0.22'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/convert_CatBoostModel_to_ONNX/component.yaml", - }, - ) diff --git a/components/contrib/CatBoost/convert_CatBoostModel_to_ONNX/component.yaml b/components/contrib/CatBoost/convert_CatBoostModel_to_ONNX/component.yaml deleted file mode 100644 index 0d9cc302cb2..00000000000 --- a/components/contrib/CatBoost/convert_CatBoostModel_to_ONNX/component.yaml +++ /dev/null @@ -1,73 +0,0 @@ -name: Convert CatBoostModel to ONNX -description: |- - Convert CatBoost model to ONNX format. - - Args: - model_path: Path of a trained model in binary CatBoost model format. - converted_model_path: Output path for the converted model. - - Outputs: - converted_model: Model in ONNX format. - - Annotations: - author: Alexey Volkov -inputs: -- {name: model, type: CatBoostModel} -outputs: -- {name: converted_model, type: ONNX} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/CatBoost/convert_CatBoostModel_to_ONNX/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'catboost==0.22' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'catboost==0.22' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def convert_CatBoostModel_to_ONNX( - model_path, - converted_model_path, - ): - '''Convert CatBoost model to ONNX format. - - Args: - model_path: Path of a trained model in binary CatBoost model format. - converted_model_path: Output path for the converted model. - - Outputs: - converted_model: Model in ONNX format. - - Annotations: - author: Alexey Volkov - ''' - from catboost import CatBoost - - model = CatBoost() - model.load_model(model_path) - model.save_model(converted_model_path, format="onnx") - - import argparse - _parser = argparse.ArgumentParser(prog='Convert CatBoostModel to ONNX', description='Convert CatBoost model to ONNX format.\n\n Args:\n model_path: Path of a trained model in binary CatBoost model format.\n converted_model_path: Output path for the converted model.\n\n Outputs:\n converted_model: Model in ONNX format.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--converted-model", dest="converted_model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = convert_CatBoostModel_to_ONNX(**_parsed_args) - args: - - --model - - {inputPath: model} - - --converted-model - - {outputPath: converted_model} diff --git a/components/contrib/XGBoost/Cross_validation_for_regression/from_CSV/component.py b/components/contrib/XGBoost/Cross_validation_for_regression/from_CSV/component.py deleted file mode 100644 index 17e97f1457d..00000000000 --- a/components/contrib/XGBoost/Cross_validation_for_regression/from_CSV/component.py +++ /dev/null @@ -1,71 +0,0 @@ -from collections import OrderedDict -from kfp import components - - -split_table_into_folds_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e9b4b29b22a5120daf95b581b0392cd461a906f0/components/dataset_manipulation/split_data_into_folds/in_CSV/component.yaml') -xgboost_train_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml') -xgboost_predict_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml') -pandas_transform_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml') -drop_header_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml') -calculate_regression_metrics_from_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml') -aggregate_regression_metrics_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/7ea9363fe201918d419fecdc00d1275e657ff712/components/ml_metrics/Aggregate_regression_metrics/component.yaml') - - -def xgboost_5_fold_cross_validation_for_regression( - data: 'CSV', - label_column: int = 0, - objective: str = 'reg:squarederror', - num_iterations: int = 200, -): - folds = split_table_into_folds_op(data).outputs - - fold_metrics = {} - for i in range(1, 6): - training_data = folds['train_' + str(i)] - testing_data = folds['test_' + str(i)] - model = xgboost_train_on_csv_op( - training_data=training_data, - label_column=label_column, - objective=objective, - num_iterations=num_iterations, - ).outputs['model'] - - predictions = xgboost_predict_on_csv_op( - data=testing_data, - model=model, - label_column=label_column, - ).output - - true_values_table = pandas_transform_csv_op( - table=testing_data, - transform_code='df = df[["tips"]]', - ).output - - true_values = drop_header_op(true_values_table).output - - metrics = calculate_regression_metrics_from_csv_op( - true_values=true_values, - predicted_values=predictions, - ).outputs['metrics'] - - fold_metrics['metrics_' + str(i)] = metrics - - aggregated_metrics_task = aggregate_regression_metrics_op(**fold_metrics) - - return OrderedDict([ - ('mean_absolute_error', aggregated_metrics_task.outputs['mean_absolute_error']), - ('mean_squared_error', aggregated_metrics_task.outputs['mean_squared_error']), - ('root_mean_squared_error', aggregated_metrics_task.outputs['root_mean_squared_error']), - ('metrics', aggregated_metrics_task.outputs['metrics']), - ]) - - -if __name__ == '__main__': - xgboost_5_fold_cross_validation_for_regression_op = components.create_graph_component_from_pipeline_func( - xgboost_5_fold_cross_validation_for_regression, - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Cross_validation_for_regression/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/XGBoost/Cross_validation_for_regression/from_CSV/component.yaml b/components/contrib/XGBoost/Cross_validation_for_regression/from_CSV/component.yaml deleted file mode 100644 index f749f7aacaa..00000000000 --- a/components/contrib/XGBoost/Cross_validation_for_regression/from_CSV/component.yaml +++ /dev/null @@ -1,276 +0,0 @@ -name: Xgboost 5 fold cross validation for regression -inputs: -- {name: data, type: CSV} -- {name: label_column, type: Integer, default: '0', optional: true} -- {name: objective, type: String, default: 'reg:squarederror', optional: true} -- {name: num_iterations, type: Integer, default: '200', optional: true} -outputs: -- {name: mean_absolute_error, type: Float} -- {name: mean_squared_error, type: Float} -- {name: root_mean_squared_error, type: Float} -- {name: metrics, type: JsonObject} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Cross_validation_for_regression/from_CSV/component.yaml' -implementation: - graph: - tasks: - Split table into folds: - componentRef: {digest: 9956223bcecc7294ca1afac39b60ada4a935a571d817c3dfbf2ea4a211afe3d1, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/e9b4b29b22a5120daf95b581b0392cd461a906f0/components/dataset_manipulation/split_data_into_folds/in_CSV/component.yaml'} - arguments: - table: - graphInput: {inputName: data} - Xgboost train: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - taskOutput: {outputName: train_1, taskId: Split table into folds, type: CSV} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - taskOutput: {outputName: test_1, taskId: Split table into folds, type: CSV} - model: - taskOutput: {outputName: model, taskId: Xgboost train, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Pandas Transform DataFrame in CSV format: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - taskOutput: {outputName: test_1, taskId: Split table into folds, type: CSV} - transform_code: df = df[["tips"]] - Remove header: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format, type: CSV} - Calculate regression metrics from csv: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict, type: Text} - Xgboost train 2: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - taskOutput: {outputName: train_2, taskId: Split table into folds, type: CSV} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict 2: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - taskOutput: {outputName: test_2, taskId: Split table into folds, type: CSV} - model: - taskOutput: {outputName: model, taskId: Xgboost train 2, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Pandas Transform DataFrame in CSV format 2: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - taskOutput: {outputName: test_2, taskId: Split table into folds, type: CSV} - transform_code: df = df[["tips"]] - Remove header 2: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format 2, type: CSV} - Calculate regression metrics from csv 2: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header 2} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict 2, type: Text} - Xgboost train 3: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - taskOutput: {outputName: train_3, taskId: Split table into folds, type: CSV} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict 3: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - taskOutput: {outputName: test_3, taskId: Split table into folds, type: CSV} - model: - taskOutput: {outputName: model, taskId: Xgboost train 3, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Pandas Transform DataFrame in CSV format 3: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - taskOutput: {outputName: test_3, taskId: Split table into folds, type: CSV} - transform_code: df = df[["tips"]] - Remove header 3: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format 3, type: CSV} - Calculate regression metrics from csv 3: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header 3} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict 3, type: Text} - Xgboost train 4: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - taskOutput: {outputName: train_4, taskId: Split table into folds, type: CSV} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict 4: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - taskOutput: {outputName: test_4, taskId: Split table into folds, type: CSV} - model: - taskOutput: {outputName: model, taskId: Xgboost train 4, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Pandas Transform DataFrame in CSV format 4: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - taskOutput: {outputName: test_4, taskId: Split table into folds, type: CSV} - transform_code: df = df[["tips"]] - Remove header 4: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format 4, type: CSV} - Calculate regression metrics from csv 4: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header 4} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict 4, type: Text} - Xgboost train 5: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - taskOutput: {outputName: train_5, taskId: Split table into folds, type: CSV} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict 5: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - taskOutput: {outputName: test_5, taskId: Split table into folds, type: CSV} - model: - taskOutput: {outputName: model, taskId: Xgboost train 5, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Pandas Transform DataFrame in CSV format 5: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - taskOutput: {outputName: test_5, taskId: Split table into folds, type: CSV} - transform_code: df = df[["tips"]] - Remove header 5: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format 5, type: CSV} - Calculate regression metrics from csv 5: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header 5} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict 5, type: Text} - Aggregate regression metrics from csv: - componentRef: {digest: 3e128130521eff8d43764f3dcb037316cdd6490ad2878df5adef416f7c2f3c19, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7ea9363fe201918d419fecdc00d1275e657ff712/components/ml_metrics/Aggregate_regression_metrics/component.yaml'} - arguments: - metrics_1: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics - from csv, type: JsonObject} - metrics_2: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics - from csv 2, type: JsonObject} - metrics_3: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics - from csv 3, type: JsonObject} - metrics_4: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics - from csv 4, type: JsonObject} - metrics_5: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics - from csv 5, type: JsonObject} - outputValues: - mean_absolute_error: - taskOutput: {outputName: mean_absolute_error, taskId: Aggregate regression - metrics from csv, type: Float} - mean_squared_error: - taskOutput: {outputName: mean_squared_error, taskId: Aggregate regression - metrics from csv, type: Float} - root_mean_squared_error: - taskOutput: {outputName: root_mean_squared_error, taskId: Aggregate regression - metrics from csv, type: Float} - metrics: - taskOutput: {outputName: metrics, taskId: Aggregate regression metrics from - csv, type: JsonObject} diff --git a/components/contrib/XGBoost/Predict/component.py b/components/contrib/XGBoost/Predict/component.py deleted file mode 100644 index 01ce2455803..00000000000 --- a/components/contrib/XGBoost/Predict/component.py +++ /dev/null @@ -1,58 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def xgboost_predict( - data_path: InputPath('CSV'), # Also supports LibSVM - model_path: InputPath('XGBoostModel'), - predictions_path: OutputPath('Predictions'), - label_column: int = None, -): - '''Make predictions using a trained XGBoost model. - - Args: - data_path: Path for the feature data in CSV format. - model_path: Path for the trained model in binary XGBoost format. - predictions_path: Output path for the predictions. - label_column: Column containing the label data. - - Annotations: - author: Alexey Volkov - ''' - from pathlib import Path - - import numpy - import pandas - import xgboost - - df = pandas.read_csv( - data_path, - ) - - if label_column is not None: - df = df.drop(columns=[df.columns[label_column]]) - - testing_data = xgboost.DMatrix( - data=df, - ) - - model = xgboost.Booster(model_file=model_path) - - predictions = model.predict(testing_data) - - Path(predictions_path).parent.mkdir(parents=True, exist_ok=True) - numpy.savetxt(predictions_path, predictions) - - -if __name__ == '__main__': - create_component_from_func( - xgboost_predict, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=[ - 'xgboost==1.1.1', - 'pandas==1.0.5', - ], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Predict/component.yaml", - }, - ) diff --git a/components/contrib/XGBoost/Predict/component.yaml b/components/contrib/XGBoost/Predict/component.yaml deleted file mode 100644 index ccce2712706..00000000000 --- a/components/contrib/XGBoost/Predict/component.yaml +++ /dev/null @@ -1,103 +0,0 @@ -name: Xgboost predict -description: |- - Make predictions using a trained XGBoost model. - - Args: - data_path: Path for the feature data in CSV format. - model_path: Path for the trained model in binary XGBoost format. - predictions_path: Output path for the predictions. - label_column: Column containing the label data. - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: CSV} -- {name: model, type: XGBoostModel} -- {name: label_column, type: Integer, optional: true} -outputs: -- {name: predictions, type: Predictions} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Predict/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'xgboost==1.1.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def xgboost_predict( - data_path, # Also supports LibSVM - model_path, - predictions_path, - label_column = None, - ): - '''Make predictions using a trained XGBoost model. - - Args: - data_path: Path for the feature data in CSV format. - model_path: Path for the trained model in binary XGBoost format. - predictions_path: Output path for the predictions. - label_column: Column containing the label data. - - Annotations: - author: Alexey Volkov - ''' - from pathlib import Path - - import numpy - import pandas - import xgboost - - df = pandas.read_csv( - data_path, - ) - - if label_column is not None: - df = df.drop(columns=[df.columns[label_column]]) - - testing_data = xgboost.DMatrix( - data=df, - ) - - model = xgboost.Booster(model_file=model_path) - - predictions = model.predict(testing_data) - - Path(predictions_path).parent.mkdir(parents=True, exist_ok=True) - numpy.savetxt(predictions_path, predictions) - - import argparse - _parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions using a trained XGBoost model.\n\n Args:\n data_path: Path for the feature data in CSV format.\n model_path: Path for the trained model in binary XGBoost format.\n predictions_path: Output path for the predictions.\n label_column: Column containing the label data.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--predictions", dest="predictions_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = xgboost_predict(**_parsed_args) - args: - - --data - - {inputPath: data} - - --model - - {inputPath: model} - - if: - cond: {isPresent: label_column} - then: - - --label-column - - {inputValue: label_column} - - --predictions - - {outputPath: predictions} diff --git a/components/contrib/XGBoost/Predict/from_ApacheParquet/component.py b/components/contrib/XGBoost/Predict/from_ApacheParquet/component.py deleted file mode 100644 index 37b1f773e31..00000000000 --- a/components/contrib/XGBoost/Predict/from_ApacheParquet/component.py +++ /dev/null @@ -1,58 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def xgboost_predict( - data_path: InputPath('ApacheParquet'), - model_path: InputPath('XGBoostModel'), - predictions_path: OutputPath('Predictions'), - label_column_name: str = None, -): - '''Make predictions using a trained XGBoost model. - - Args: - data_path: Path for the feature data in Apache Parquet format. - model_path: Path for the trained model in binary XGBoost format. - predictions_path: Output path for the predictions. - label_column_name: Optional. Name of the column containing the label data that is excluded during the prediction. - - Annotations: - author: Alexey Volkov - ''' - from pathlib import Path - - import numpy - import pandas - import xgboost - - # Loading data - df = pandas.read_parquet(data_path) - if label_column_name: - df = df.drop(columns=[label_column_name]) - - evaluation_data = xgboost.DMatrix( - data=df, - ) - - # Training - model = xgboost.Booster(model_file=model_path) - - predictions = model.predict(evaluation_data) - - Path(predictions_path).parent.mkdir(parents=True, exist_ok=True) - numpy.savetxt(predictions_path, predictions) - - -if __name__ == '__main__': - create_component_from_func( - xgboost_predict, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=[ - 'xgboost==1.1.1', - 'pandas==1.0.5', - 'pyarrow==0.17.1', - ], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Predict/from_ApacheParquet/component.yaml", - }, - ) diff --git a/components/contrib/XGBoost/Predict/from_ApacheParquet/component.yaml b/components/contrib/XGBoost/Predict/from_ApacheParquet/component.yaml deleted file mode 100644 index d421cdc21ef..00000000000 --- a/components/contrib/XGBoost/Predict/from_ApacheParquet/component.yaml +++ /dev/null @@ -1,102 +0,0 @@ -name: Xgboost predict -description: |- - Make predictions using a trained XGBoost model. - - Args: - data_path: Path for the feature data in Apache Parquet format. - model_path: Path for the trained model in binary XGBoost format. - predictions_path: Output path for the predictions. - label_column_name: Optional. Name of the column containing the label data that is excluded during the prediction. - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: ApacheParquet} -- {name: model, type: XGBoostModel} -- {name: label_column_name, type: String, optional: true} -outputs: -- {name: predictions, type: Predictions} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Predict/from_ApacheParquet/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'xgboost==1.1.1' 'pandas==1.0.5' 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' - 'pyarrow==0.17.1' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def xgboost_predict( - data_path, - model_path, - predictions_path, - label_column_name = None, - ): - '''Make predictions using a trained XGBoost model. - - Args: - data_path: Path for the feature data in Apache Parquet format. - model_path: Path for the trained model in binary XGBoost format. - predictions_path: Output path for the predictions. - label_column_name: Optional. Name of the column containing the label data that is excluded during the prediction. - - Annotations: - author: Alexey Volkov - ''' - from pathlib import Path - - import numpy - import pandas - import xgboost - - # Loading data - df = pandas.read_parquet(data_path) - if label_column_name: - df = df.drop(columns=[label_column_name]) - - evaluation_data = xgboost.DMatrix( - data=df, - ) - - # Training - model = xgboost.Booster(model_file=model_path) - - predictions = model.predict(evaluation_data) - - Path(predictions_path).parent.mkdir(parents=True, exist_ok=True) - numpy.savetxt(predictions_path, predictions) - - import argparse - _parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions using a trained XGBoost model.\n\n Args:\n data_path: Path for the feature data in Apache Parquet format.\n model_path: Path for the trained model in binary XGBoost format.\n predictions_path: Output path for the predictions.\n label_column_name: Optional. Name of the column containing the label data that is excluded during the prediction.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--label-column-name", dest="label_column_name", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--predictions", dest="predictions_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = xgboost_predict(**_parsed_args) - args: - - --data - - {inputPath: data} - - --model - - {inputPath: model} - - if: - cond: {isPresent: label_column_name} - then: - - --label-column-name - - {inputValue: label_column_name} - - --predictions - - {outputPath: predictions} diff --git a/components/contrib/XGBoost/Train/component.py b/components/contrib/XGBoost/Train/component.py deleted file mode 100644 index ce9a0e301ea..00000000000 --- a/components/contrib/XGBoost/Train/component.py +++ /dev/null @@ -1,94 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def xgboost_train( - training_data_path: InputPath('CSV'), # Also supports LibSVM - model_path: OutputPath('XGBoostModel'), - model_config_path: OutputPath('XGBoostModelConfig'), - starting_model_path: InputPath('XGBoostModel') = None, - - label_column: int = 0, - num_iterations: int = 10, - booster_params: dict = None, - - # Booster parameters - objective: str = 'reg:squarederror', - booster: str = 'gbtree', - learning_rate: float = 0.3, - min_split_loss: float = 0, - max_depth: int = 6, -): - '''Train an XGBoost model. - - Args: - training_data_path: Path for the training data in CSV format. - model_path: Output path for the trained model in binary XGBoost format. - model_config_path: Output path for the internal parameter configuration of Booster as a JSON string. - starting_model_path: Path for the existing trained model to start from. - label_column: Column containing the label data. - num_boost_rounds: Number of boosting iterations. - booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html - objective: The learning task and the corresponding learning objective. - See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters - The most common values are: - "reg:squarederror" - Regression with squared loss (default). - "reg:logistic" - Logistic regression. - "binary:logistic" - Logistic regression for binary classification, output probability. - "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation - "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized - "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized - - Annotations: - author: Alexey Volkov - ''' - import pandas - import xgboost - - df = pandas.read_csv( - training_data_path, - ) - - training_data = xgboost.DMatrix( - data=df.drop(columns=[df.columns[label_column]]), - label=df[df.columns[label_column]], - ) - - booster_params = booster_params or {} - booster_params.setdefault('objective', objective) - booster_params.setdefault('booster', booster) - booster_params.setdefault('learning_rate', learning_rate) - booster_params.setdefault('min_split_loss', min_split_loss) - booster_params.setdefault('max_depth', max_depth) - - starting_model = None - if starting_model_path: - starting_model = xgboost.Booster(model_file=starting_model_path) - - model = xgboost.train( - params=booster_params, - dtrain=training_data, - num_boost_round=num_iterations, - xgb_model=starting_model - ) - - # Saving the model in binary format - model.save_model(model_path) - - model_config_str = model.save_config() - with open(model_config_path, 'w') as model_config_file: - model_config_file.write(model_config_str) - - -if __name__ == '__main__': - create_component_from_func( - xgboost_train, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=[ - 'xgboost==1.1.1', - 'pandas==1.0.5', - ], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Train/component.yaml", - }, - ) diff --git a/components/contrib/XGBoost/Train/component.yaml b/components/contrib/XGBoost/Train/component.yaml deleted file mode 100644 index 8d89a3514de..00000000000 --- a/components/contrib/XGBoost/Train/component.yaml +++ /dev/null @@ -1,208 +0,0 @@ -name: Xgboost train -description: |- - Train an XGBoost model. - - Args: - training_data_path: Path for the training data in CSV format. - model_path: Output path for the trained model in binary XGBoost format. - model_config_path: Output path for the internal parameter configuration of Booster as a JSON string. - starting_model_path: Path for the existing trained model to start from. - label_column: Column containing the label data. - num_boost_rounds: Number of boosting iterations. - booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html - objective: The learning task and the corresponding learning objective. - See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters - The most common values are: - "reg:squarederror" - Regression with squared loss (default). - "reg:logistic" - Logistic regression. - "binary:logistic" - Logistic regression for binary classification, output probability. - "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation - "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized - "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized - - Annotations: - author: Alexey Volkov -inputs: -- {name: training_data, type: CSV} -- {name: starting_model, type: XGBoostModel, optional: true} -- {name: label_column, type: Integer, default: '0', optional: true} -- {name: num_iterations, type: Integer, default: '10', optional: true} -- {name: booster_params, type: JsonObject, optional: true} -- {name: objective, type: String, default: 'reg:squarederror', optional: true} -- {name: booster, type: String, default: gbtree, optional: true} -- {name: learning_rate, type: Float, default: '0.3', optional: true} -- {name: min_split_loss, type: Float, default: '0', optional: true} -- {name: max_depth, type: Integer, default: '6', optional: true} -outputs: -- {name: model, type: XGBoostModel} -- {name: model_config, type: XGBoostModelConfig} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Train/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'xgboost==1.1.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def xgboost_train( - training_data_path, # Also supports LibSVM - model_path, - model_config_path, - starting_model_path = None, - - label_column = 0, - num_iterations = 10, - booster_params = None, - - # Booster parameters - objective = 'reg:squarederror', - booster = 'gbtree', - learning_rate = 0.3, - min_split_loss = 0, - max_depth = 6, - ): - '''Train an XGBoost model. - - Args: - training_data_path: Path for the training data in CSV format. - model_path: Output path for the trained model in binary XGBoost format. - model_config_path: Output path for the internal parameter configuration of Booster as a JSON string. - starting_model_path: Path for the existing trained model to start from. - label_column: Column containing the label data. - num_boost_rounds: Number of boosting iterations. - booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html - objective: The learning task and the corresponding learning objective. - See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters - The most common values are: - "reg:squarederror" - Regression with squared loss (default). - "reg:logistic" - Logistic regression. - "binary:logistic" - Logistic regression for binary classification, output probability. - "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation - "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized - "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized - - Annotations: - author: Alexey Volkov - ''' - import pandas - import xgboost - - df = pandas.read_csv( - training_data_path, - ) - - training_data = xgboost.DMatrix( - data=df.drop(columns=[df.columns[label_column]]), - label=df[df.columns[label_column]], - ) - - booster_params = booster_params or {} - booster_params.setdefault('objective', objective) - booster_params.setdefault('booster', booster) - booster_params.setdefault('learning_rate', learning_rate) - booster_params.setdefault('min_split_loss', min_split_loss) - booster_params.setdefault('max_depth', max_depth) - - starting_model = None - if starting_model_path: - starting_model = xgboost.Booster(model_file=starting_model_path) - - model = xgboost.train( - params=booster_params, - dtrain=training_data, - num_boost_round=num_iterations, - xgb_model=starting_model - ) - - # Saving the model in binary format - model.save_model(model_path) - - model_config_str = model.save_config() - with open(model_config_path, 'w') as model_config_file: - model_config_file.write(model_config_str) - - import json - import argparse - _parser = argparse.ArgumentParser(prog='Xgboost train', description='Train an XGBoost model.\n\n Args:\n training_data_path: Path for the training data in CSV format.\n model_path: Output path for the trained model in binary XGBoost format.\n model_config_path: Output path for the internal parameter configuration of Booster as a JSON string.\n starting_model_path: Path for the existing trained model to start from.\n label_column: Column containing the label data.\n num_boost_rounds: Number of boosting iterations.\n booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\n objective: The learning task and the corresponding learning objective.\n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n The most common values are:\n "reg:squarederror" - Regression with squared loss (default).\n "reg:logistic" - Logistic regression.\n "binary:logistic" - Logistic regression for binary classification, output probability.\n "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation\n "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized\n "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--training-data", dest="training_data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--starting-model", dest="starting_model_path", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--num-iterations", dest="num_iterations", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--booster-params", dest="booster_params", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--objective", dest="objective", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--booster", dest="booster", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--learning-rate", dest="learning_rate", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--min-split-loss", dest="min_split_loss", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--max-depth", dest="max_depth", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model-config", dest="model_config_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = xgboost_train(**_parsed_args) - args: - - --training-data - - {inputPath: training_data} - - if: - cond: {isPresent: starting_model} - then: - - --starting-model - - {inputPath: starting_model} - - if: - cond: {isPresent: label_column} - then: - - --label-column - - {inputValue: label_column} - - if: - cond: {isPresent: num_iterations} - then: - - --num-iterations - - {inputValue: num_iterations} - - if: - cond: {isPresent: booster_params} - then: - - --booster-params - - {inputValue: booster_params} - - if: - cond: {isPresent: objective} - then: - - --objective - - {inputValue: objective} - - if: - cond: {isPresent: booster} - then: - - --booster - - {inputValue: booster} - - if: - cond: {isPresent: learning_rate} - then: - - --learning-rate - - {inputValue: learning_rate} - - if: - cond: {isPresent: min_split_loss} - then: - - --min-split-loss - - {inputValue: min_split_loss} - - if: - cond: {isPresent: max_depth} - then: - - --max-depth - - {inputValue: max_depth} - - --model - - {outputPath: model} - - --model-config - - {outputPath: model_config} diff --git a/components/contrib/XGBoost/Train/from_ApacheParquet/component.py b/components/contrib/XGBoost/Train/from_ApacheParquet/component.py deleted file mode 100644 index 579052edd11..00000000000 --- a/components/contrib/XGBoost/Train/from_ApacheParquet/component.py +++ /dev/null @@ -1,94 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def xgboost_train( - training_data_path: InputPath('ApacheParquet'), - model_path: OutputPath('XGBoostModel'), - model_config_path: OutputPath('XGBoostModelConfig'), - label_column_name: str, - - starting_model_path: InputPath('XGBoostModel') = None, - - num_iterations: int = 10, - booster_params: dict = None, - - # Booster parameters - objective: str = 'reg:squarederror', - booster: str = 'gbtree', - learning_rate: float = 0.3, - min_split_loss: float = 0, - max_depth: int = 6, -): - '''Train an XGBoost model. - - Args: - training_data_path: Path for the training data in Apache Parquet format. - model_path: Output path for the trained model in binary XGBoost format. - model_config_path: Output path for the internal parameter configuration of Booster as a JSON string. - starting_model_path: Path for the existing trained model to start from. - label_column_name: Name of the column containing the label data. - num_boost_rounds: Number of boosting iterations. - booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html - objective: The learning task and the corresponding learning objective. - See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters - The most common values are: - "reg:squarederror" - Regression with squared loss (default). - "reg:logistic" - Logistic regression. - "binary:logistic" - Logistic regression for binary classification, output probability. - "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation - "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized - "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized - - Annotations: - author: Alexey Volkov - ''' - import pandas - import xgboost - - # Loading data - df = pandas.read_parquet(training_data_path) - training_data = xgboost.DMatrix( - data=df.drop(columns=[label_column_name]), - label=df[[label_column_name]], - ) - # Training - booster_params = booster_params or {} - booster_params.setdefault('objective', objective) - booster_params.setdefault('booster', booster) - booster_params.setdefault('learning_rate', learning_rate) - booster_params.setdefault('min_split_loss', min_split_loss) - booster_params.setdefault('max_depth', max_depth) - - starting_model = None - if starting_model_path: - starting_model = xgboost.Booster(model_file=starting_model_path) - - model = xgboost.train( - params=booster_params, - dtrain=training_data, - num_boost_round=num_iterations, - xgb_model=starting_model - ) - - # Saving the model in binary format - model.save_model(model_path) - - model_config_str = model.save_config() - with open(model_config_path, 'w') as model_config_file: - model_config_file.write(model_config_str) - - -if __name__ == '__main__': - create_component_from_func( - xgboost_train, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=[ - 'xgboost==1.1.1', - 'pandas==1.0.5', - 'pyarrow==0.17.1', - ], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Train/from_ApacheParquet/component.yaml", - }, - ) diff --git a/components/contrib/XGBoost/Train/from_ApacheParquet/component.yaml b/components/contrib/XGBoost/Train/from_ApacheParquet/component.yaml deleted file mode 100644 index ee2e4e9f0d0..00000000000 --- a/components/contrib/XGBoost/Train/from_ApacheParquet/component.yaml +++ /dev/null @@ -1,204 +0,0 @@ -name: Xgboost train -description: |- - Train an XGBoost model. - - Args: - training_data_path: Path for the training data in Apache Parquet format. - model_path: Output path for the trained model in binary XGBoost format. - model_config_path: Output path for the internal parameter configuration of Booster as a JSON string. - starting_model_path: Path for the existing trained model to start from. - label_column_name: Name of the column containing the label data. - num_boost_rounds: Number of boosting iterations. - booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html - objective: The learning task and the corresponding learning objective. - See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters - The most common values are: - "reg:squarederror" - Regression with squared loss (default). - "reg:logistic" - Logistic regression. - "binary:logistic" - Logistic regression for binary classification, output probability. - "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation - "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized - "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized - - Annotations: - author: Alexey Volkov -inputs: -- {name: training_data, type: ApacheParquet} -- {name: label_column_name, type: String} -- {name: starting_model, type: XGBoostModel, optional: true} -- {name: num_iterations, type: Integer, default: '10', optional: true} -- {name: booster_params, type: JsonObject, optional: true} -- {name: objective, type: String, default: 'reg:squarederror', optional: true} -- {name: booster, type: String, default: gbtree, optional: true} -- {name: learning_rate, type: Float, default: '0.3', optional: true} -- {name: min_split_loss, type: Float, default: '0', optional: true} -- {name: max_depth, type: Integer, default: '6', optional: true} -outputs: -- {name: model, type: XGBoostModel} -- {name: model_config, type: XGBoostModelConfig} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Train/from_ApacheParquet/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'xgboost==1.1.1' 'pandas==1.0.5' 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' - 'pyarrow==0.17.1' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def xgboost_train( - training_data_path, - model_path, - model_config_path, - label_column_name, - - starting_model_path = None, - - num_iterations = 10, - booster_params = None, - - # Booster parameters - objective = 'reg:squarederror', - booster = 'gbtree', - learning_rate = 0.3, - min_split_loss = 0, - max_depth = 6, - ): - '''Train an XGBoost model. - - Args: - training_data_path: Path for the training data in Apache Parquet format. - model_path: Output path for the trained model in binary XGBoost format. - model_config_path: Output path for the internal parameter configuration of Booster as a JSON string. - starting_model_path: Path for the existing trained model to start from. - label_column_name: Name of the column containing the label data. - num_boost_rounds: Number of boosting iterations. - booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html - objective: The learning task and the corresponding learning objective. - See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters - The most common values are: - "reg:squarederror" - Regression with squared loss (default). - "reg:logistic" - Logistic regression. - "binary:logistic" - Logistic regression for binary classification, output probability. - "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation - "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized - "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized - - Annotations: - author: Alexey Volkov - ''' - import pandas - import xgboost - - # Loading data - df = pandas.read_parquet(training_data_path) - training_data = xgboost.DMatrix( - data=df.drop(columns=[label_column_name]), - label=df[[label_column_name]], - ) - # Training - booster_params = booster_params or {} - booster_params.setdefault('objective', objective) - booster_params.setdefault('booster', booster) - booster_params.setdefault('learning_rate', learning_rate) - booster_params.setdefault('min_split_loss', min_split_loss) - booster_params.setdefault('max_depth', max_depth) - - starting_model = None - if starting_model_path: - starting_model = xgboost.Booster(model_file=starting_model_path) - - model = xgboost.train( - params=booster_params, - dtrain=training_data, - num_boost_round=num_iterations, - xgb_model=starting_model - ) - - # Saving the model in binary format - model.save_model(model_path) - - model_config_str = model.save_config() - with open(model_config_path, 'w') as model_config_file: - model_config_file.write(model_config_str) - - import json - import argparse - _parser = argparse.ArgumentParser(prog='Xgboost train', description='Train an XGBoost model.\n\n Args:\n training_data_path: Path for the training data in Apache Parquet format.\n model_path: Output path for the trained model in binary XGBoost format.\n model_config_path: Output path for the internal parameter configuration of Booster as a JSON string.\n starting_model_path: Path for the existing trained model to start from.\n label_column_name: Name of the column containing the label data.\n num_boost_rounds: Number of boosting iterations.\n booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\n objective: The learning task and the corresponding learning objective.\n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n The most common values are:\n "reg:squarederror" - Regression with squared loss (default).\n "reg:logistic" - Logistic regression.\n "binary:logistic" - Logistic regression for binary classification, output probability.\n "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation\n "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized\n "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--training-data", dest="training_data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--label-column-name", dest="label_column_name", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--starting-model", dest="starting_model_path", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--num-iterations", dest="num_iterations", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--booster-params", dest="booster_params", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--objective", dest="objective", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--booster", dest="booster", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--learning-rate", dest="learning_rate", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--min-split-loss", dest="min_split_loss", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--max-depth", dest="max_depth", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model-config", dest="model_config_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = xgboost_train(**_parsed_args) - args: - - --training-data - - {inputPath: training_data} - - --label-column-name - - {inputValue: label_column_name} - - if: - cond: {isPresent: starting_model} - then: - - --starting-model - - {inputPath: starting_model} - - if: - cond: {isPresent: num_iterations} - then: - - --num-iterations - - {inputValue: num_iterations} - - if: - cond: {isPresent: booster_params} - then: - - --booster-params - - {inputValue: booster_params} - - if: - cond: {isPresent: objective} - then: - - --objective - - {inputValue: objective} - - if: - cond: {isPresent: booster} - then: - - --booster - - {inputValue: booster} - - if: - cond: {isPresent: learning_rate} - then: - - --learning-rate - - {inputValue: learning_rate} - - if: - cond: {isPresent: min_split_loss} - then: - - --min-split-loss - - {inputValue: min_split_loss} - - if: - cond: {isPresent: max_depth} - then: - - --max-depth - - {inputValue: max_depth} - - --model - - {outputPath: model} - - --model-config - - {outputPath: model_config} diff --git a/components/contrib/XGBoost/Train_and_cross-validate_regression/from_CSV/component.py b/components/contrib/XGBoost/Train_and_cross-validate_regression/from_CSV/component.py deleted file mode 100644 index 6daacd752f2..00000000000 --- a/components/contrib/XGBoost/Train_and_cross-validate_regression/from_CSV/component.py +++ /dev/null @@ -1,53 +0,0 @@ -from collections import OrderedDict -from kfp import components - - -xgboost_train_regression_and_calculate_metrics_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/18e8974288885086b2fd5351f6333210cd237d1b/components/XGBoost/Train_regression_and_calculate_metrics/from_CSV/component.yaml') -xgboost_5_fold_cross_validation_for_regression_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/271593e4193e2d3e44bdf42269fc03f0fcd2e5e8/components/XGBoost/Cross_validation_for_regression/from_CSV/component.yaml') - - -def xgboost_train_and_cv_regression_on_csv( - data: 'CSV', - label_column: int = 0, - objective: str = 'reg:squarederror', - num_iterations: int = 200, -): - main_training_and_metrics_task = xgboost_train_regression_and_calculate_metrics_on_csv_op( - training_data=data, - testing_data=data, - label_column=label_column, - objective=objective, - num_iterations=num_iterations, - ) - - cv_training_and_metrics_task = xgboost_5_fold_cross_validation_for_regression_op( - data=data, - label_column=label_column, - objective=objective, - num_iterations=num_iterations, - ) - - return OrderedDict([ - ('model', main_training_and_metrics_task.outputs['model']), - - ('training_mean_absolute_error', main_training_and_metrics_task.outputs['mean_absolute_error']), - ('training_mean_squared_error', main_training_and_metrics_task.outputs['mean_squared_error']), - ('training_root_mean_squared_error', main_training_and_metrics_task.outputs['root_mean_squared_error']), - ('training_metrics', main_training_and_metrics_task.outputs['metrics']), - - ('cv_mean_absolute_error', cv_training_and_metrics_task.outputs['mean_absolute_error']), - ('cv_mean_squared_error', cv_training_and_metrics_task.outputs['mean_squared_error']), - ('cv_root_mean_squared_error', cv_training_and_metrics_task.outputs['root_mean_squared_error']), - ('cv_metrics', cv_training_and_metrics_task.outputs['metrics']), - ]) - - -if __name__ == '__main__': - xgboost_train_and_cv_regression_on_csv_op = components.create_graph_component_from_pipeline_func( - xgboost_train_and_cv_regression_on_csv, - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Train_and_cross-validate_regression/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/XGBoost/Train_and_cross-validate_regression/from_CSV/component.yaml b/components/contrib/XGBoost/Train_and_cross-validate_regression/from_CSV/component.yaml deleted file mode 100644 index e9487f06acb..00000000000 --- a/components/contrib/XGBoost/Train_and_cross-validate_regression/from_CSV/component.yaml +++ /dev/null @@ -1,339 +0,0 @@ -name: Xgboost train and cv regression on csv -inputs: -- {name: data, type: CSV} -- {name: label_column, type: Integer, default: '0', optional: true} -- {name: objective, type: String, default: 'reg:squarederror', optional: true} -- {name: num_iterations, type: Integer, default: '200', optional: true} -outputs: -- {name: model, type: XGBoostModel} -- {name: training_mean_absolute_error, type: Float} -- {name: training_mean_squared_error, type: Float} -- {name: training_root_mean_squared_error, type: Float} -- {name: training_metrics, type: JsonObject} -- {name: cv_mean_absolute_error, type: Float} -- {name: cv_mean_squared_error, type: Float} -- {name: cv_root_mean_squared_error, type: Float} -- {name: cv_metrics, type: JsonObject} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Train_and_cross-validate_regression/from_CSV/component.yaml' -implementation: - graph: - tasks: - Xgboost train: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - graphInput: {inputName: data} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - graphInput: {inputName: data} - model: - taskOutput: {outputName: model, taskId: Xgboost train, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Pandas Transform DataFrame in CSV format: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - graphInput: {inputName: data} - transform_code: df = df[["tips"]] - Remove header: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format, type: CSV} - Calculate regression metrics from csv: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict, type: Text} - Split table into folds: - componentRef: {digest: 9956223bcecc7294ca1afac39b60ada4a935a571d817c3dfbf2ea4a211afe3d1, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/e9b4b29b22a5120daf95b581b0392cd461a906f0/components/dataset_manipulation/split_data_into_folds/in_CSV/component.yaml'} - arguments: - table: - graphInput: {inputName: data} - Pandas Transform DataFrame in CSV format 2: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - taskOutput: {outputName: test_3, taskId: Split table into folds, type: CSV} - transform_code: df = df[["tips"]] - Remove header 2: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format 2, type: CSV} - Xgboost train 2: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - taskOutput: {outputName: train_1, taskId: Split table into folds, type: CSV} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict 2: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - taskOutput: {outputName: test_1, taskId: Split table into folds, type: CSV} - model: - taskOutput: {outputName: model, taskId: Xgboost train 2, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Pandas Transform DataFrame in CSV format 3: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - taskOutput: {outputName: test_2, taskId: Split table into folds, type: CSV} - transform_code: df = df[["tips"]] - Remove header 3: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format 3, type: CSV} - Xgboost train 3: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - taskOutput: {outputName: train_4, taskId: Split table into folds, type: CSV} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Pandas Transform DataFrame in CSV format 4: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - taskOutput: {outputName: test_4, taskId: Split table into folds, type: CSV} - transform_code: df = df[["tips"]] - Remove header 4: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format 4, type: CSV} - Xgboost predict 3: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - taskOutput: {outputName: test_4, taskId: Split table into folds, type: CSV} - model: - taskOutput: {outputName: model, taskId: Xgboost train 3, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Calculate regression metrics from csv 2: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header 4} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict 3, type: Text} - Pandas Transform DataFrame in CSV format 5: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - taskOutput: {outputName: test_1, taskId: Split table into folds, type: CSV} - transform_code: df = df[["tips"]] - Remove header 5: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format 5, type: CSV} - Calculate regression metrics from csv 3: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header 5} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict 2, type: Text} - Xgboost train 4: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - taskOutput: {outputName: train_2, taskId: Split table into folds, type: CSV} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict 4: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - taskOutput: {outputName: test_2, taskId: Split table into folds, type: CSV} - model: - taskOutput: {outputName: model, taskId: Xgboost train 4, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Calculate regression metrics from csv 4: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header 3} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict 4, type: Text} - Xgboost train 5: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - taskOutput: {outputName: train_5, taskId: Split table into folds, type: CSV} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict 5: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - taskOutput: {outputName: test_5, taskId: Split table into folds, type: CSV} - model: - taskOutput: {outputName: model, taskId: Xgboost train 5, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Xgboost train 6: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - taskOutput: {outputName: train_3, taskId: Split table into folds, type: CSV} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict 6: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - taskOutput: {outputName: test_3, taskId: Split table into folds, type: CSV} - model: - taskOutput: {outputName: model, taskId: Xgboost train 6, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Calculate regression metrics from csv 5: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header 2} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict 6, type: Text} - Pandas Transform DataFrame in CSV format 6: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - taskOutput: {outputName: test_5, taskId: Split table into folds, type: CSV} - transform_code: df = df[["tips"]] - Remove header 6: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format 6, type: CSV} - Calculate regression metrics from csv 6: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header 6} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict 5, type: Text} - Aggregate regression metrics from csv: - componentRef: {digest: 3e128130521eff8d43764f3dcb037316cdd6490ad2878df5adef416f7c2f3c19, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7ea9363fe201918d419fecdc00d1275e657ff712/components/ml_metrics/Aggregate_regression_metrics/component.yaml'} - arguments: - metrics_1: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics - from csv 3, type: JsonObject} - metrics_2: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics - from csv 4, type: JsonObject} - metrics_3: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics - from csv 5, type: JsonObject} - metrics_4: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics - from csv 2, type: JsonObject} - metrics_5: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics - from csv 6, type: JsonObject} - outputValues: - model: - taskOutput: {outputName: model, taskId: Xgboost train, type: XGBoostModel} - training_mean_absolute_error: - taskOutput: {outputName: mean_absolute_error, taskId: Calculate regression - metrics from csv, type: Float} - training_mean_squared_error: - taskOutput: {outputName: mean_squared_error, taskId: Calculate regression - metrics from csv, type: Float} - training_root_mean_squared_error: - taskOutput: {outputName: root_mean_squared_error, taskId: Calculate regression - metrics from csv, type: Float} - training_metrics: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics from - csv, type: JsonObject} - cv_mean_absolute_error: - taskOutput: {outputName: mean_absolute_error, taskId: Aggregate regression - metrics from csv, type: Float} - cv_mean_squared_error: - taskOutput: {outputName: mean_squared_error, taskId: Aggregate regression - metrics from csv, type: Float} - cv_root_mean_squared_error: - taskOutput: {outputName: root_mean_squared_error, taskId: Aggregate regression - metrics from csv, type: Float} - cv_metrics: - taskOutput: {outputName: metrics, taskId: Aggregate regression metrics from - csv, type: JsonObject} diff --git a/components/contrib/XGBoost/Train_regression_and_calculate_metrics/from_CSV/component.py b/components/contrib/XGBoost/Train_regression_and_calculate_metrics/from_CSV/component.py deleted file mode 100644 index e49efc757a8..00000000000 --- a/components/contrib/XGBoost/Train_regression_and_calculate_metrics/from_CSV/component.py +++ /dev/null @@ -1,60 +0,0 @@ -from collections import OrderedDict -from kfp import components - - -xgboost_train_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml') -xgboost_predict_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml') -pandas_transform_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml') -drop_header_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml') -calculate_regression_metrics_from_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml') - - -def xgboost_train_regression_and_calculate_metrics_on_csv( - training_data: 'CSV', - testing_data: 'CSV', - label_column: int = 0, - objective: str = 'reg:squarederror', - num_iterations: int = 200, -): - model = xgboost_train_on_csv_op( - training_data=training_data, - label_column=label_column, - objective=objective, - num_iterations=num_iterations, - ).outputs['model'] - - predictions = xgboost_predict_on_csv_op( - data=testing_data, - model=model, - label_column=label_column, - ).output - - true_values_table = pandas_transform_csv_op( - table=testing_data, - transform_code='df = df[["tips"]]', - ).output - - true_values = drop_header_op(true_values_table).output - - metrics_task = calculate_regression_metrics_from_csv_op( - true_values=true_values, - predicted_values=predictions, - ) - return OrderedDict([ - ('model', model), - ('mean_absolute_error', metrics_task.outputs['mean_absolute_error']), - ('mean_squared_error', metrics_task.outputs['mean_squared_error']), - ('root_mean_squared_error', metrics_task.outputs['root_mean_squared_error']), - ('metrics', metrics_task.outputs['metrics']), - ]) - - -if __name__ == '__main__': - xgboost_train_regression_and_calculate_metrics_on_csv_op = components.create_graph_component_from_pipeline_func( - xgboost_train_regression_and_calculate_metrics_on_csv, - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Train_regression_and_calculate_metrics/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/XGBoost/Train_regression_and_calculate_metrics/from_CSV/component.yaml b/components/contrib/XGBoost/Train_regression_and_calculate_metrics/from_CSV/component.yaml deleted file mode 100644 index 2436323d7fa..00000000000 --- a/components/contrib/XGBoost/Train_regression_and_calculate_metrics/from_CSV/component.yaml +++ /dev/null @@ -1,79 +0,0 @@ -name: Xgboost train regression and calculate metrics on csv -inputs: -- {name: training_data, type: CSV} -- {name: testing_data, type: CSV} -- {name: label_column, type: Integer, default: '0', optional: true} -- {name: objective, type: String, default: 'reg:squarederror', optional: true} -- {name: num_iterations, type: Integer, default: '200', optional: true} -outputs: -- {name: model, type: XGBoostModel} -- {name: mean_absolute_error, type: Float} -- {name: mean_squared_error, type: Float} -- {name: root_mean_squared_error, type: Float} -- {name: metrics, type: JsonObject} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/XGBoost/Train_regression_and_calculate_metrics/from_CSV/component.yaml' -implementation: - graph: - tasks: - Xgboost train: - componentRef: {digest: 09b80053da29f8f51575b42e5d2e8ad4b7bdcc92a02c3744e189b1f597006b38, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml'} - arguments: - training_data: - graphInput: {inputName: training_data} - label_column: - graphInput: {inputName: label_column} - num_iterations: - graphInput: {inputName: num_iterations} - objective: - graphInput: {inputName: objective} - Xgboost predict: - componentRef: {digest: ecdfaf32cff15b6abc3d0dd80365ce00577f1a19a058fbe201f515431cea1357, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml'} - arguments: - data: - graphInput: {inputName: testing_data} - model: - taskOutput: {outputName: model, taskId: Xgboost train, type: XGBoostModel} - label_column: - graphInput: {inputName: label_column} - Pandas Transform DataFrame in CSV format: - componentRef: {digest: 58dc88349157bf128021708c316ce4eb60bc1de0a5a7dd3af45fabac3276d510, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml'} - arguments: - table: - graphInput: {inputName: testing_data} - transform_code: df = df[["tips"]] - Remove header: - componentRef: {digest: ba35ffea863855b956c3c50aefa0420ba3823949a6c059e6e3971cde960dc5a3, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml'} - arguments: - table: - taskOutput: {outputName: transformed_table, taskId: Pandas Transform DataFrame - in CSV format, type: CSV} - Calculate regression metrics from csv: - componentRef: {digest: e3ecbfeb18032820edfee4255e2fb6d15d15ed224e166519d5e528e12053a995, - url: 'https://raw.githubusercontent.com/kubeflow/pipelines/7da1ac9464b4b3e7d95919faa2f1107a9635b7e4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml'} - arguments: - true_values: - taskOutput: {outputName: table, taskId: Remove header} - predicted_values: - taskOutput: {outputName: predictions, taskId: Xgboost predict, type: Text} - outputValues: - model: - taskOutput: {outputName: model, taskId: Xgboost train, type: XGBoostModel} - mean_absolute_error: - taskOutput: {outputName: mean_absolute_error, taskId: Calculate regression - metrics from csv, type: Float} - mean_squared_error: - taskOutput: {outputName: mean_squared_error, taskId: Calculate regression - metrics from csv, type: Float} - root_mean_squared_error: - taskOutput: {outputName: root_mean_squared_error, taskId: Calculate regression - metrics from csv, type: Float} - metrics: - taskOutput: {outputName: metrics, taskId: Calculate regression metrics from - csv, type: JsonObject} diff --git a/components/contrib/XGBoost/_samples/recursive_training.py b/components/contrib/XGBoost/_samples/recursive_training.py deleted file mode 100644 index ecb499d5b4e..00000000000 --- a/components/contrib/XGBoost/_samples/recursive_training.py +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env python3 - -# This sample demonstrates continuous training using a train-eval-check recursive loop. -# The main pipeline trains the initial model and then gradually trains the model -# some more until the model evaluation metrics are good enough. - -import kfp -from kfp import components - - -chicago_taxi_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml') -xgboost_train_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml') -xgboost_predict_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml') - -pandas_transform_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml') -drop_header_op = kfp.components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml') -calculate_regression_metrics_from_csv_op = kfp.components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/616542ac0f789914f4eb53438da713dd3004fba4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml') - - -# This recursive sub-pipeline trains a model, evaluates it, calculates the metrics and checks them. -# If the model error is too high, then more training is performed until the model is good. -@kfp.dsl.graph_component -def train_until_low_error(starting_model, training_data, true_values): - # Training - model = xgboost_train_on_csv_op( - training_data=training_data, - starting_model=starting_model, - label_column=0, - objective='reg:squarederror', - num_iterations=50, - ).outputs['model'] - - # Predicting - predictions = xgboost_predict_on_csv_op( - data=training_data, - model=model, - label_column=0, - ).output - - # Calculating the regression metrics - metrics_task = calculate_regression_metrics_from_csv_op( - true_values=true_values, - predicted_values=predictions, - ) - - # Checking the metrics - with kfp.dsl.Condition(metrics_task.outputs['mean_squared_error'] > 0.01): - # Training some more - train_until_low_error( - starting_model=model, - training_data=training_data, - true_values=true_values, - ) - - -# The main pipleine trains the initial model and then gradually trains the model some more until the model evaluation metrics are good enough. -def train_until_good_pipeline(): - # Preparing the training data - training_data = chicago_taxi_dataset_op( - where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', - select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', - limit=10000, - ).output - - # Preparing the true values - true_values_table = pandas_transform_csv_op( - table=training_data, - transform_code='df = df[["tips"]]', - ).output - - true_values = drop_header_op(true_values_table).output - - # Initial model training - first_model = xgboost_train_on_csv_op( - training_data=training_data, - label_column=0, - objective='reg:squarederror', - num_iterations=100, - ).outputs['model'] - - # Recursively training until the error becomes low - train_until_low_error( - starting_model=first_model, - training_data=training_data, - true_values=true_values, - ) - - -if __name__ == '__main__': - kfp_endpoint=None - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(train_until_good_pipeline, arguments={}) diff --git a/components/contrib/XGBoost/_samples/sample_pipeline.py b/components/contrib/XGBoost/_samples/sample_pipeline.py deleted file mode 100644 index bf78d75c8b4..00000000000 --- a/components/contrib/XGBoost/_samples/sample_pipeline.py +++ /dev/null @@ -1,68 +0,0 @@ -import kfp -from kfp import components - - -chicago_taxi_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml') -convert_csv_to_apache_parquet_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0d7d6f41c92bdc05c2825232afe2b47e5cb6c4b3/components/_converters/ApacheParquet/from_CSV/component.yaml') -xgboost_train_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml') -xgboost_predict_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml') -xgboost_train_on_parquet_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0ae2f30ff24beeef1c64cc7c434f1f652c065192/components/XGBoost/Train/from_ApacheParquet/component.yaml') -xgboost_predict_on_parquet_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0ae2f30ff24beeef1c64cc7c434f1f652c065192/components/XGBoost/Predict/from_ApacheParquet/component.yaml') - - -def xgboost_pipeline(): - training_data_csv = chicago_taxi_dataset_op( - where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', - select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', - limit=10000, - ).output - - # Training and prediction on dataset in CSV format - model_trained_on_csv = xgboost_train_on_csv_op( - training_data=training_data_csv, - label_column=0, - objective='reg:squarederror', - num_iterations=200, - ).outputs['model'] - - xgboost_predict_on_csv_op( - data=training_data_csv, - model=model_trained_on_csv, - label_column=0, - ) - - # Training and prediction on dataset in Apache Parquet format - training_data_parquet = convert_csv_to_apache_parquet_op( - training_data_csv - ).output - - model_trained_on_parquet = xgboost_train_on_parquet_op( - training_data=training_data_parquet, - label_column_name='tips', - objective='reg:squarederror', - num_iterations=200, - ).outputs['model'] - - xgboost_predict_on_parquet_op( - data=training_data_parquet, - model=model_trained_on_parquet, - label_column_name='tips', - ) - - # Checking cross-format predictions - xgboost_predict_on_parquet_op( - data=training_data_parquet, - model=model_trained_on_csv, - label_column_name='tips', - ) - - xgboost_predict_on_csv_op( - data=training_data_csv, - model=model_trained_on_parquet, - label_column=0, - ) - - -if __name__ == '__main__': - kfp_endpoint=None - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(xgboost_pipeline, arguments={}) diff --git a/components/contrib/XGBoost/_samples/training_with_cross_validation.py b/components/contrib/XGBoost/_samples/training_with_cross_validation.py deleted file mode 100644 index c0cf722e5dd..00000000000 --- a/components/contrib/XGBoost/_samples/training_with_cross_validation.py +++ /dev/null @@ -1,34 +0,0 @@ -# cross_validation_pipeline compact -import kfp -from kfp import components - - -chicago_taxi_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml') -xgboost_train_and_cv_regression_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1a11ce2aea5243cdcc2b4721675303f78f49ca21/components/XGBoost/Train_and_cross-validate_regression/from_CSV/component.yaml') - - -def cross_validation_pipeline( - label_column: int = 0, - objective: str = 'reg:squarederror', - num_iterations: int = 200, -): - data = chicago_taxi_dataset_op( - where='trip_start_timestamp >= "{}" AND trip_start_timestamp < "{}"'.format('2019-01-01', '2019-02-01'), - select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', - limit=10000, - ).output - - xgboost_train_and_cv_regression_on_csv_op( - data=data, - label_column=label_column, - objective=objective, - num_iterations=num_iterations, - ) - - -if __name__ == '__main__': - kfp_endpoint=None - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func( - cross_validation_pipeline, - arguments={}, - ) diff --git a/components/contrib/_converters/ApacheParquet/_samples/sample_pipeline.py b/components/contrib/_converters/ApacheParquet/_samples/sample_pipeline.py deleted file mode 100644 index f9ee51bdbe2..00000000000 --- a/components/contrib/_converters/ApacheParquet/_samples/sample_pipeline.py +++ /dev/null @@ -1,41 +0,0 @@ -import kfp -from kfp import components - -component_store = components.ComponentStore(url_search_prefixes=['https://raw.githubusercontent.com/kubeflow/pipelines/af3eaf64e87313795cad1add9bfd9fa1e86af6de/components/']) - -chicago_taxi_dataset_op = component_store.load_component(name='datasets/Chicago_Taxi_Trips') -convert_csv_to_apache_parquet_op = component_store.load_component(name='_converters/ApacheParquet/from_CSV') -convert_tsv_to_apache_parquet_op = component_store.load_component(name='_converters/ApacheParquet/from_TSV') -convert_apache_parquet_to_csv_op = component_store.load_component(name='_converters/ApacheParquet/to_CSV') -convert_apache_parquet_to_tsv_op = component_store.load_component(name='_converters/ApacheParquet/to_TSV') -convert_apache_parquet_to_apache_arrow_feather_op = component_store.load_component(name='_converters/ApacheParquet/to_ApacheArrowFeather') -convert_apache_arrow_feather_to_apache_parquet_op = component_store.load_component(name='_converters/ApacheParquet/from_ApacheArrowFeather') - - -def parquet_pipeline(): - csv = chicago_taxi_dataset_op( - where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', - select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', - limit=10000, - ).output - - tsv = chicago_taxi_dataset_op( - where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', - select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', - limit=10000, - format='tsv', - ).output - - csv_parquet = convert_csv_to_apache_parquet_op(csv).output - csv_parquet_csv = convert_apache_parquet_to_csv_op(csv_parquet).output - csv_parquet_feather = convert_apache_parquet_to_apache_arrow_feather_op(csv_parquet).output - csv_parquet_feather_parquet = convert_apache_arrow_feather_to_apache_parquet_op(csv_parquet_feather).output - - tsv_parquet = convert_tsv_to_apache_parquet_op(tsv).output - tsv_parquet_tsv = convert_apache_parquet_to_tsv_op(tsv_parquet).output - tsv_parquet_feather = convert_apache_parquet_to_apache_arrow_feather_op(tsv_parquet).output - tsv_parquet_feather_parquet = convert_apache_arrow_feather_to_apache_parquet_op(tsv_parquet_feather).output - -if __name__ == '__main__': - kfp_endpoint = None - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(parquet_pipeline, arguments={}) diff --git a/components/contrib/_converters/ApacheParquet/from_ApacheArrowFeather/component.py b/components/contrib/_converters/ApacheParquet/from_ApacheArrowFeather/component.py deleted file mode 100644 index c24b25ce15a..00000000000 --- a/components/contrib/_converters/ApacheParquet/from_ApacheArrowFeather/component.py +++ /dev/null @@ -1,31 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def convert_apache_arrow_feather_to_apache_parquet( - data_path: InputPath('ApacheArrowFeather'), - output_data_path: OutputPath('ApacheParquet'), -): - '''Converts Apache Arrow Feather to Apache Parquet. - - [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import feather, parquet - - table = feather.read_table(data_path) - parquet.write_table(table, output_data_path) - - -if __name__ == '__main__': - create_component_from_func( - convert_apache_arrow_feather_to_apache_parquet, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['pyarrow==0.17.1'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml", - }, - ) diff --git a/components/contrib/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml b/components/contrib/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml deleted file mode 100644 index a7d147290be..00000000000 --- a/components/contrib/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml +++ /dev/null @@ -1,78 +0,0 @@ -name: Convert apache arrow feather to apache parquet -description: |- - Converts Apache Arrow Feather to Apache Parquet. - - [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: ApacheArrowFeather} -outputs: -- {name: output_data, type: ApacheParquet} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install - --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def convert_apache_arrow_feather_to_apache_parquet( - data_path, - output_data_path, - ): - '''Converts Apache Arrow Feather to Apache Parquet. - - [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import feather, parquet - - table = feather.read_table(data_path) - parquet.write_table(table, output_data_path) - - import argparse - _parser = argparse.ArgumentParser(prog='Convert apache arrow feather to apache parquet', description='Converts Apache Arrow Feather to Apache Parquet.\n\n [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html)\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = convert_apache_arrow_feather_to_apache_parquet(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --data - - {inputPath: data} - - --output-data - - {outputPath: output_data} diff --git a/components/contrib/_converters/ApacheParquet/from_CSV/component.py b/components/contrib/_converters/ApacheParquet/from_CSV/component.py deleted file mode 100644 index 08290a65443..00000000000 --- a/components/contrib/_converters/ApacheParquet/from_CSV/component.py +++ /dev/null @@ -1,30 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def convert_csv_to_apache_parquet( - data_path: InputPath('CSV'), - output_data_path: OutputPath('ApacheParquet'), -): - '''Converts CSV table to Apache Parquet. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import csv, parquet - - table = csv.read_csv(data_path) - parquet.write_table(table, output_data_path) - - -if __name__ == '__main__': - create_component_from_func( - convert_csv_to_apache_parquet, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['pyarrow==0.17.1'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/_converters/ApacheParquet/from_CSV/component.yaml b/components/contrib/_converters/ApacheParquet/from_CSV/component.yaml deleted file mode 100644 index 72d7e012e6f..00000000000 --- a/components/contrib/_converters/ApacheParquet/from_CSV/component.yaml +++ /dev/null @@ -1,76 +0,0 @@ -name: Convert csv to apache parquet -description: |- - Converts CSV table to Apache Parquet. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: CSV} -outputs: -- {name: output_data, type: ApacheParquet} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/from_CSV/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install - --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def convert_csv_to_apache_parquet( - data_path, - output_data_path, - ): - '''Converts CSV table to Apache Parquet. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import csv, parquet - - table = csv.read_csv(data_path) - parquet.write_table(table, output_data_path) - - import argparse - _parser = argparse.ArgumentParser(prog='Convert csv to apache parquet', description='Converts CSV table to Apache Parquet.\n\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = convert_csv_to_apache_parquet(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --data - - {inputPath: data} - - --output-data - - {outputPath: output_data} diff --git a/components/contrib/_converters/ApacheParquet/from_TSV/component.py b/components/contrib/_converters/ApacheParquet/from_TSV/component.py deleted file mode 100644 index 3edf45d2c2d..00000000000 --- a/components/contrib/_converters/ApacheParquet/from_TSV/component.py +++ /dev/null @@ -1,30 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def convert_tsv_to_apache_parquet( - data_path: InputPath('TSV'), - output_data_path: OutputPath('ApacheParquet'), -): - '''Converts TSV table to Apache Parquet. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import csv, parquet - - table = csv.read_csv(data_path, parse_options=csv.ParseOptions(delimiter='\t')) - parquet.write_table(table, output_data_path) - - -if __name__ == '__main__': - create_component_from_func( - convert_tsv_to_apache_parquet, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['pyarrow==0.17.1'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/from_TSV/component.yaml", - }, - ) diff --git a/components/contrib/_converters/ApacheParquet/from_TSV/component.yaml b/components/contrib/_converters/ApacheParquet/from_TSV/component.yaml deleted file mode 100644 index 017d4a692e9..00000000000 --- a/components/contrib/_converters/ApacheParquet/from_TSV/component.yaml +++ /dev/null @@ -1,76 +0,0 @@ -name: Convert tsv to apache parquet -description: |- - Converts TSV table to Apache Parquet. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: TSV} -outputs: -- {name: output_data, type: ApacheParquet} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/from_TSV/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install - --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def convert_tsv_to_apache_parquet( - data_path, - output_data_path, - ): - '''Converts TSV table to Apache Parquet. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import csv, parquet - - table = csv.read_csv(data_path, parse_options=csv.ParseOptions(delimiter='\t')) - parquet.write_table(table, output_data_path) - - import argparse - _parser = argparse.ArgumentParser(prog='Convert tsv to apache parquet', description='Converts TSV table to Apache Parquet.\n\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = convert_tsv_to_apache_parquet(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --data - - {inputPath: data} - - --output-data - - {outputPath: output_data} diff --git a/components/contrib/_converters/ApacheParquet/to_ApacheArrowFeather/component.py b/components/contrib/_converters/ApacheParquet/to_ApacheArrowFeather/component.py deleted file mode 100644 index 08a259a58a4..00000000000 --- a/components/contrib/_converters/ApacheParquet/to_ApacheArrowFeather/component.py +++ /dev/null @@ -1,31 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def convert_apache_parquet_to_apache_arrow_feather( - data_path: InputPath('ApacheParquet'), - output_data_path: OutputPath('ApacheArrowFeather'), -): - '''Converts Apache Parquet to Apache Arrow Feather. - - [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import feather, parquet - - data_frame = parquet.read_pandas(data_path).to_pandas() - feather.write_feather(data_frame, output_data_path) - - -if __name__ == '__main__': - convert_apache_parquet_to_apache_arrow_feather_op = create_component_from_func( - convert_apache_parquet_to_apache_arrow_feather, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['pyarrow==0.17.1', 'pandas==1.0.3'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml", - }, - ) diff --git a/components/contrib/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml b/components/contrib/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml deleted file mode 100644 index 0a0fef6046e..00000000000 --- a/components/contrib/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml +++ /dev/null @@ -1,79 +0,0 @@ -name: Convert apache parquet to apache arrow feather -description: |- - Converts Apache Parquet to Apache Arrow Feather. - - [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: ApacheParquet} -outputs: -- {name: output_data, type: ApacheArrowFeather} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pyarrow==0.17.1' 'pandas==1.0.3' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'pyarrow==0.17.1' 'pandas==1.0.3' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def convert_apache_parquet_to_apache_arrow_feather( - data_path, - output_data_path, - ): - '''Converts Apache Parquet to Apache Arrow Feather. - - [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import feather, parquet - - data_frame = parquet.read_pandas(data_path).to_pandas() - feather.write_feather(data_frame, output_data_path) - - import argparse - _parser = argparse.ArgumentParser(prog='Convert apache parquet to apache arrow feather', description='Converts Apache Parquet to Apache Arrow Feather.\n\n [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html)\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = convert_apache_parquet_to_apache_arrow_feather(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --data - - {inputPath: data} - - --output-data - - {outputPath: output_data} diff --git a/components/contrib/_converters/ApacheParquet/to_CSV/component.py b/components/contrib/_converters/ApacheParquet/to_CSV/component.py deleted file mode 100644 index 55fa3e93a12..00000000000 --- a/components/contrib/_converters/ApacheParquet/to_CSV/component.py +++ /dev/null @@ -1,33 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def convert_apache_parquet_to_csv( - data_path: InputPath('ApacheParquet'), - output_data_path: OutputPath('CSV'), -): - '''Converts Apache Parquet to CSV. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import parquet - - data_frame = parquet.read_pandas(data_path).to_pandas() - data_frame.to_csv( - output_data_path, - index=False, - ) - - -if __name__ == '__main__': - convert_apache_parquet_to_csv_op = create_component_from_func( - convert_apache_parquet_to_csv, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['pyarrow==0.17.1', 'pandas==1.0.3'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/to_CSV/component.yaml", - }, - ) diff --git a/components/contrib/_converters/ApacheParquet/to_CSV/component.yaml b/components/contrib/_converters/ApacheParquet/to_CSV/component.yaml deleted file mode 100644 index 095a0d19e48..00000000000 --- a/components/contrib/_converters/ApacheParquet/to_CSV/component.yaml +++ /dev/null @@ -1,66 +0,0 @@ -name: Convert apache parquet to csv -description: |- - Converts Apache Parquet to CSV. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: ApacheParquet} -outputs: -- {name: output_data, type: CSV} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/to_CSV/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pyarrow==0.17.1' 'pandas==1.0.3' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'pyarrow==0.17.1' 'pandas==1.0.3' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def convert_apache_parquet_to_csv( - data_path, - output_data_path, - ): - '''Converts Apache Parquet to CSV. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import parquet - - data_frame = parquet.read_pandas(data_path).to_pandas() - data_frame.to_csv( - output_data_path, - index=False, - ) - - import argparse - _parser = argparse.ArgumentParser(prog='Convert apache parquet to csv', description='Converts Apache Parquet to CSV.\n\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = convert_apache_parquet_to_csv(**_parsed_args) - args: - - --data - - {inputPath: data} - - --output-data - - {outputPath: output_data} diff --git a/components/contrib/_converters/ApacheParquet/to_TSV/component.py b/components/contrib/_converters/ApacheParquet/to_TSV/component.py deleted file mode 100644 index 4a09f9b6cc6..00000000000 --- a/components/contrib/_converters/ApacheParquet/to_TSV/component.py +++ /dev/null @@ -1,34 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def convert_apache_parquet_to_tsv( - data_path: InputPath('ApacheParquet'), - output_data_path: OutputPath('TSV'), -): - '''Converts Apache Parquet to TSV. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import parquet - - data_frame = parquet.read_pandas(data_path).to_pandas() - data_frame.to_csv( - output_data_path, - index=False, - sep='\t', - ) - - -if __name__ == '__main__': - convert_apache_parquet_to_tsv_op = create_component_from_func( - convert_apache_parquet_to_tsv, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['pyarrow==0.17.1', 'pandas==1.0.3'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/to_TSV/component.yaml", - }, - ) diff --git a/components/contrib/_converters/ApacheParquet/to_TSV/component.yaml b/components/contrib/_converters/ApacheParquet/to_TSV/component.yaml deleted file mode 100644 index a213d5d9250..00000000000 --- a/components/contrib/_converters/ApacheParquet/to_TSV/component.yaml +++ /dev/null @@ -1,67 +0,0 @@ -name: Convert apache parquet to tsv -description: |- - Converts Apache Parquet to TSV. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov -inputs: -- {name: data, type: ApacheParquet} -outputs: -- {name: output_data, type: TSV} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/ApacheParquet/to_TSV/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pyarrow==0.17.1' 'pandas==1.0.3' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'pyarrow==0.17.1' 'pandas==1.0.3' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def convert_apache_parquet_to_tsv( - data_path, - output_data_path, - ): - '''Converts Apache Parquet to TSV. - - [Apache Parquet](https://parquet.apache.org/) - - Annotations: - author: Alexey Volkov - ''' - from pyarrow import parquet - - data_frame = parquet.read_pandas(data_path).to_pandas() - data_frame.to_csv( - output_data_path, - index=False, - sep='\t', - ) - - import argparse - _parser = argparse.ArgumentParser(prog='Convert apache parquet to tsv', description='Converts Apache Parquet to TSV.\n\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = convert_apache_parquet_to_tsv(**_parsed_args) - args: - - --data - - {inputPath: data} - - --output-data - - {outputPath: output_data} diff --git a/components/contrib/_converters/KerasModelHdf5/to_TensorflowSavedModel/component.py b/components/contrib/_converters/KerasModelHdf5/to_TensorflowSavedModel/component.py deleted file mode 100644 index 79045746d76..00000000000 --- a/components/contrib/_converters/KerasModelHdf5/to_TensorflowSavedModel/component.py +++ /dev/null @@ -1,33 +0,0 @@ -from kfp.components import create_component_from_func, InputPath, OutputPath - -def keras_convert_hdf5_model_to_tf_saved_model( - model_path: InputPath('KerasModelHdf5'), - converted_model_path: OutputPath('TensorflowSavedModel'), -): - '''Converts Keras HDF5 model to Tensorflow SavedModel format. - - Args: - model_path: Keras model in HDF5 format. - converted_model_path: Keras model in Tensorflow SavedModel format. - - Annotations: - author: Alexey Volkov - ''' - from pathlib import Path - from tensorflow import keras - - model = keras.models.load_model(filepath=model_path) - keras.models.save_model(model=model, filepath=converted_model_path, save_format='tf') - - -if __name__ == '__main__': - keras_convert_hdf5_model_to_tf_saved_model_op = create_component_from_func( - keras_convert_hdf5_model_to_tf_saved_model, - base_image='tensorflow/tensorflow:2.3.0', - packages_to_install=['h5py==2.10.0'], - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/KerasModelHdf5/to_TensorflowSavedModel/component.yaml", - }, - ) diff --git a/components/contrib/_converters/KerasModelHdf5/to_TensorflowSavedModel/component.yaml b/components/contrib/_converters/KerasModelHdf5/to_TensorflowSavedModel/component.yaml deleted file mode 100644 index 5db1eae7646..00000000000 --- a/components/contrib/_converters/KerasModelHdf5/to_TensorflowSavedModel/component.yaml +++ /dev/null @@ -1,59 +0,0 @@ -name: Keras convert hdf5 model to tf saved model -description: Converts Keras HDF5 model to Tensorflow SavedModel format. -inputs: -- {name: model, type: KerasModelHdf5, description: Keras model in HDF5 format.} -outputs: -- {name: converted_model, type: TensorflowSavedModel, description: Keras model in Tensorflow SavedModel format.} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/KerasModelHdf5/to_TensorflowSavedModel/component.yaml' -implementation: - container: - image: tensorflow/tensorflow:2.3.0 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'h5py==2.10.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'h5py==2.10.0' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def keras_convert_hdf5_model_to_tf_saved_model( - model_path, - converted_model_path, - ): - '''Converts Keras HDF5 model to Tensorflow SavedModel format. - - Args: - model_path: Keras model in HDF5 format. - converted_model_path: Keras model in Tensorflow SavedModel format. - - Annotations: - author: Alexey Volkov - ''' - from pathlib import Path - from tensorflow import keras - - model = keras.models.load_model(filepath=model_path) - keras.models.save_model(model=model, filepath=converted_model_path, save_format='tf') - - import argparse - _parser = argparse.ArgumentParser(prog='Keras convert hdf5 model to tf saved model', description='Converts Keras HDF5 model to Tensorflow SavedModel format.') - _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--converted-model", dest="converted_model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = keras_convert_hdf5_model_to_tf_saved_model(**_parsed_args) - args: - - --model - - {inputPath: model} - - --converted-model - - {outputPath: converted_model} diff --git a/components/contrib/_converters/OnnxModel/from_KerasModelHdf5/component.yaml b/components/contrib/_converters/OnnxModel/from_KerasModelHdf5/component.yaml deleted file mode 100644 index 224b3a5099c..00000000000 --- a/components/contrib/_converters/OnnxModel/from_KerasModelHdf5/component.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: To ONNX from Keras HDF5 model -inputs: -- {name: Model, type: KerasModelHdf5} -outputs: -- {name: Model, type: OnnxModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/OnnxModel/from_KerasModelHdf5/component.yaml' -implementation: - container: - image: tensorflow/tensorflow:2.3.0 - command: - - sh - - -exc - - python3 -m pip install tf2onnx==1.6.3 && "$0" "$@" - - python3 - - -m - - tf2onnx.convert - - --keras - - {inputPath: Model} - - --output - - {outputPath: Model} - - --fold_const - - --verbose diff --git a/components/contrib/_converters/OnnxModel/from_TensorflowSavedModel/component.yaml b/components/contrib/_converters/OnnxModel/from_TensorflowSavedModel/component.yaml deleted file mode 100644 index 901d400a19f..00000000000 --- a/components/contrib/_converters/OnnxModel/from_TensorflowSavedModel/component.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: To ONNX from Tensorflow SavedModel -inputs: -- {name: Model, type: TensorflowSavedModel} -outputs: -- {name: Model, type: OnnxModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/OnnxModel/from_TensorflowSavedModel/component.yaml' -implementation: - container: - image: tensorflow/tensorflow:2.3.0 - command: - - sh - - -exc - - python3 -m pip install tf2onnx==1.6.3 && "$0" "$@" - - python3 - - -m - - tf2onnx.convert - - --saved-model - - {inputPath: Model} - - --output - - {outputPath: Model} - - --fold_const - - --verbose diff --git a/components/contrib/_converters/OnnxModel/to_TensorflowSavedModel/component.py b/components/contrib/_converters/OnnxModel/to_TensorflowSavedModel/component.py deleted file mode 100644 index 234715313c3..00000000000 --- a/components/contrib/_converters/OnnxModel/to_TensorflowSavedModel/component.py +++ /dev/null @@ -1,26 +0,0 @@ -from kfp.components import create_component_from_func, InputPath, OutputPath - - -def convert_to_tensorflow_saved_model_from_onnx_model( - model_path: InputPath('OnnxModel'), - converted_model_path: OutputPath('TensorflowSavedModel'), -): - import onnx - import onnx_tf - - onnx_model = onnx.load(model_path) - tf_rep = onnx_tf.backend.prepare(onnx_model) - tf_rep.export_graph(converted_model_path) - - -if __name__ == '__main__': - convert_to_tensorflow_saved_model_from_onnx_model_op = create_component_from_func( - convert_to_tensorflow_saved_model_from_onnx_model, - output_component_file='component.yaml', - base_image='tensorflow/tensorflow:2.4.1', - packages_to_install=['onnx-tf==1.7.0', 'onnx==1.8.0'], # onnx-tf==1.7.0 is not compatible with onnx==1.8.1 - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/OnnxModel/to_TensorflowSavedModel/component.yaml", - }, - ) diff --git a/components/contrib/_converters/OnnxModel/to_TensorflowSavedModel/component.yaml b/components/contrib/_converters/OnnxModel/to_TensorflowSavedModel/component.yaml deleted file mode 100644 index ac1938cec91..00000000000 --- a/components/contrib/_converters/OnnxModel/to_TensorflowSavedModel/component.yaml +++ /dev/null @@ -1,54 +0,0 @@ -name: Convert to tensorflow saved model from onnx model -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/OnnxModel/to_TensorflowSavedModel/component.yaml' -inputs: -- {name: model, type: OnnxModel} -outputs: -- {name: converted_model, type: TensorflowModel} -implementation: - container: - image: tensorflow/tensorflow:2.4.1 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'onnx-tf==1.7.0' 'onnx==1.8.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'onnx-tf==1.7.0' 'onnx==1.8.0' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def convert_to_tensorflow_saved_model_from_onnx_model( - model_path, - converted_model_path, - ): - import onnx - import onnx_tf - - onnx_model = onnx.load(model_path) - tf_rep = onnx_tf.backend.prepare(onnx_model) - tf_rep.export_graph(converted_model_path) - - import argparse - _parser = argparse.ArgumentParser(prog='Convert to tensorflow saved model from onnx model', description='') - _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--converted-model", dest="converted_model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = convert_to_tensorflow_saved_model_from_onnx_model(**_parsed_args) - args: - - --model - - {inputPath: model} - - --converted-model - - {outputPath: converted_model} diff --git a/components/contrib/_converters/TensorflowJSGraphModel/from_KerasModelHdf5/component.yaml b/components/contrib/_converters/TensorflowJSGraphModel/from_KerasModelHdf5/component.yaml deleted file mode 100644 index cfc50d62f58..00000000000 --- a/components/contrib/_converters/TensorflowJSGraphModel/from_KerasModelHdf5/component.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: Convert Keras HDF5 model to Tensorflow JS GraphModel -inputs: -- {name: Model, type: KerasModelHdf5} -outputs: -- {name: Model, type: TensorflowJSGraphModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/TensorflowJSGraphModel/from_KerasModelHdf5/component.yaml' -implementation: - container: - image: tensorflow/tensorflow:2.3.0 - command: - - sh - - -exc - - | - # Manually installing prerequisites so that tensorflowjs does not re-install tensorflow-cpu on top of tensorflow. See https://github.com/tensorflow/tfjs/issues/3953 - python3 -m pip install --quiet 'h5py>=2.8.0' 'numpy>=1.16.4,<1.19.0' 'six>=1.12.0' 'tensorflow-hub==0.7.0' 'PyInquirer==1.0.3' - python3 -m pip install --quiet tensorflowjs==2.4.0 --no-dependencies - "$0" "$*" - - tensorflowjs_converter - - --input_format=keras - - --output_format=tfjs_graph_model - - inputPath: Model - - outputPath: Model diff --git a/components/contrib/_converters/TensorflowJSGraphModel/from_TensorflowSavedModel/component.yaml b/components/contrib/_converters/TensorflowJSGraphModel/from_TensorflowSavedModel/component.yaml deleted file mode 100644 index 310207a3b0a..00000000000 --- a/components/contrib/_converters/TensorflowJSGraphModel/from_TensorflowSavedModel/component.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: Convert Tensorflow SavedModel to Tensorflow JS GraphModel -inputs: -- {name: Model, type: TensorflowSavedModel} -outputs: -- {name: Model, type: TensorflowJSGraphModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/TensorflowJSGraphModel/from_TensorflowSavedModel/component.yaml' -implementation: - container: - image: tensorflow/tensorflow:2.3.0 - command: - - sh - - -exc - - | - # Manually installing prerequisites so that tensorflowjs does not re-install tensorflow-cpu on top of tensorflow. See https://github.com/tensorflow/tfjs/issues/3953 - python3 -m pip install --quiet 'h5py>=2.8.0' 'numpy>=1.16.4,<1.19.0' 'six>=1.12.0' 'tensorflow-hub==0.7.0' 'PyInquirer==1.0.3' - python3 -m pip install --quiet tensorflowjs==2.4.0 --no-dependencies - "$0" "$*" - - tensorflowjs_converter - - --input_format=tf_saved_model - - --output_format=tfjs_graph_model - - inputPath: Model - - outputPath: Model diff --git a/components/contrib/_converters/TensorflowJSLayersModel/from_KerasModelHdf5/component.yaml b/components/contrib/_converters/TensorflowJSLayersModel/from_KerasModelHdf5/component.yaml deleted file mode 100644 index aa9a1f0c071..00000000000 --- a/components/contrib/_converters/TensorflowJSLayersModel/from_KerasModelHdf5/component.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: Convert Keras HDF5 model to Tensorflow JS LayersModel -inputs: -- {name: Model, type: KerasModelHdf5} -outputs: -- {name: Model, type: TensorflowJSLayersModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/TensorflowJSLayersModel/from_KerasModelHdf5/component.yaml' -implementation: - container: - image: tensorflow/tensorflow:2.3.0 - command: - - sh - - -exc - - | - # Manually installing prerequisites so that tensorflowjs does not re-install tensorflow-cpu on top of tensorflow. See https://github.com/tensorflow/tfjs/issues/3953 - python3 -m pip install --quiet 'h5py>=2.8.0' 'numpy>=1.16.4,<1.19.0' 'six>=1.12.0' 'tensorflow-hub==0.7.0' 'PyInquirer==1.0.3' - python3 -m pip install --quiet tensorflowjs==2.4.0 --no-dependencies - "$0" "$*" - - tensorflowjs_converter - - --input_format=keras - - --output_format=tfjs_layers_model - - inputPath: Model - - outputPath: Model diff --git a/components/contrib/_converters/TensorflowJSLayersModel/from_TensorflowSavedModel/component.yaml b/components/contrib/_converters/TensorflowJSLayersModel/from_TensorflowSavedModel/component.yaml deleted file mode 100644 index 6e24458f71a..00000000000 --- a/components/contrib/_converters/TensorflowJSLayersModel/from_TensorflowSavedModel/component.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: Convert Keras SavedModel to Tensorflow JS LayersModel -inputs: -- {name: Model, type: TensorflowSavedModel} -outputs: -- {name: Model, type: TensorflowJSLayersModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/TensorflowJSLayersModel/from_TensorflowSavedModel/component.yaml' -implementation: - container: - image: tensorflow/tensorflow:2.3.0 - command: - - sh - - -exc - - | - # Manually installing prerequisites so that tensorflowjs does not re-install tensorflow-cpu on top of tensorflow. See https://github.com/tensorflow/tfjs/issues/3953 - python3 -m pip install --quiet 'h5py>=2.8.0' 'numpy>=1.16.4,<1.19.0' 'six>=1.12.0' 'tensorflow-hub==0.7.0' 'PyInquirer==1.0.3' - python3 -m pip install --quiet tensorflowjs==2.4.0 --no-dependencies - "$0" "$*" - - tensorflowjs_converter - - --input_format=keras_saved_model - - --output_format=tfjs_layers_model - - inputPath: Model - - outputPath: Model diff --git a/components/contrib/_converters/TensorflowLiteModel/from_KerasModelHdf5/component.yaml b/components/contrib/_converters/TensorflowLiteModel/from_KerasModelHdf5/component.yaml deleted file mode 100644 index 71bb18134d9..00000000000 --- a/components/contrib/_converters/TensorflowLiteModel/from_KerasModelHdf5/component.yaml +++ /dev/null @@ -1,23 +0,0 @@ -name: Convert Keras HDF5 model to Tensorflow Lite model -inputs: -- {name: Model, type: KerasModelHdf5} -outputs: -- {name: Model, type: TensorflowLiteModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/TensorflowLiteModel/from_KerasModelHdf5/component.yaml' -implementation: - container: - image: tensorflow/tensorflow:2.3.0 - command: - - sh - - -exc - - | - model_path="$0" - output_model_path="$1" - mkdir -p "$(dirname "$output_model_path")" - - tflite_convert --keras_model_file "$model_path" --output_file "$output_model_path" - - {inputPath: Model} - - {outputPath: Model} diff --git a/components/contrib/_converters/TensorflowLiteModel/from_TensorflowSavedModel/component.yaml b/components/contrib/_converters/TensorflowLiteModel/from_TensorflowSavedModel/component.yaml deleted file mode 100644 index 52c699ca4c7..00000000000 --- a/components/contrib/_converters/TensorflowLiteModel/from_TensorflowSavedModel/component.yaml +++ /dev/null @@ -1,23 +0,0 @@ -name: Convert Tensorflow SavedModel to Tensorflow Lite model -inputs: -- {name: Model, type: TensorflowSavedModel} -outputs: -- {name: Model, type: TensorflowLiteModel} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/_converters/TensorflowLiteModel/from_TensorflowSavedModel/component.yaml' -implementation: - container: - image: tensorflow/tensorflow:2.3.0 - command: - - sh - - -exc - - | - model_path="$0" - output_model_path="$1" - mkdir -p "$(dirname "$output_model_path")" - - tflite_convert --saved_model_dir "$model_path" --output_file "$output_model_path" - - {inputPath: Model} - - {outputPath: Model} diff --git a/components/contrib/arena/docker/Dockerfile b/components/contrib/arena/docker/Dockerfile deleted file mode 100644 index adea6952f74..00000000000 --- a/components/contrib/arena/docker/Dockerfile +++ /dev/null @@ -1,46 +0,0 @@ -FROM golang:1.10-stretch as build - -RUN mkdir -p /go/src/github.com/kubeflow && \ - cd /go/src/github.com/kubeflow && \ - git clone https://github.com/kubeflow/arena.git - -WORKDIR /go/src/github.com/kubeflow/arena - -RUN cd /go/src/github.com/kubeflow/arena && make - -RUN wget --no-verbose https://storage.googleapis.com/kubernetes-helm/helm-v2.9.1-linux-amd64.tar.gz && \ - tar -xf helm-v2.9.1-linux-amd64.tar.gz && \ - mv linux-amd64/helm /usr/local/bin/helm && \ - chmod u+x /usr/local/bin/helm - -ENV K8S_VERSION v1.11.2 -RUN curl -o /usr/local/bin/kubectl https://storage.googleapis.com/kubernetes-release/release/${K8S_VERSION}/bin/linux/amd64/kubectl && chmod +x /usr/local/bin/kubectl - -# FROM python:3.6.8-stretch - -FROM python:3.7-alpine3.9 - -RUN apk update && \ - apk add --no-cache ca-certificates py-dev python-setuptools wget unzip git bash \ - rm -rf /var/cache/apk/* - -RUN pip install --upgrade pip && \ - pip install pyyaml==3.12 six==1.11.0 requests==2.18.4 - -COPY --from=build /go/src/github.com/kubeflow/arena/bin/arena /usr/local/bin/arena - -COPY --from=build /usr/local/bin/helm /usr/local/bin/helm - -COPY --from=build /go/src/github.com/kubeflow/arena/kubernetes-artifacts /root/kubernetes-artifacts - -COPY --from=build /usr/local/bin/kubectl /usr/local/bin/kubectl - -COPY --from=build /go/src/github.com/kubeflow/arena/charts /charts - -ENV PYTHONPATH "${PYTHONPATH}:/root" - -ADD . /root - -WORKDIR /root - -ENTRYPOINT ["python","arena_launcher.py"] diff --git a/components/contrib/arena/docker/arena_launcher.py b/components/contrib/arena/docker/arena_launcher.py deleted file mode 100644 index d127e915fd7..00000000000 --- a/components/contrib/arena/docker/arena_launcher.py +++ /dev/null @@ -1,184 +0,0 @@ -""" -Usage: -python arena_launcher.py - --name=tf-test - --tensorboard=true - mpijob - --gpus=1 - --workers=2 - --image=registry.cn-hangzhou.aliyuncs.com/tensorflow-samples/horovod:0.13.11-tf1.10.0-torch0.4.0-py3.5 - -- - mpirun python /benchmarks/scripts/tf_cnn_benchmarks/tf_cnn_benchmarks.py --model resnet101 --batch_size 64 --variable_update horovod --train_dir=/training_logs --summary_verbosity=3 --save_summaries_steps=10 -""" -# TODO: Add unit/integration tests - -import argparse -import datetime -import json -import os -import sys -import logging -import requests -import subprocess -import six -import time -import yaml -from subprocess import Popen,PIPE -from shlex import split -from utils import * -from job_generator import * - -def main(argv=None): - setup_custom_logging() - import sys - all_args = sys.argv[1:] - logging.info("args: {0}".format(' '.join(sys.argv))) - parser = argparse.ArgumentParser(description='Arena launcher') - parser.add_argument('--name', type=str, - help='The job name to specify.',default=None) - parser.add_argument('--tensorboard', type=str, default="False") - parser.add_argument('--rdma', type=str, default="False") - parser.add_argument('--tensorboard-image', type=str, default='tensorflow/tensorflow:1.12.0') - parser.add_argument('--timeout-hours', type=int, - default=200, - help='Time in minutes to wait for the Job submitted by arena to complete') - parser.add_argument('--pending-timeout-minutes', type=int, - default=360, - help='Time in hours to wait for the Job submitted by arena from pending to running') - # parser.add_argument('--command', type=str) - parser.add_argument('--output-dir', type=str, default='') - parser.add_argument('--output-data', type=str, default='None') - parser.add_argument('--log-dir', type=str, default='') - - parser.add_argument('--image', type=str) - parser.add_argument('--gpus', type=int, default=0) - parser.add_argument('--cpu', type=str, default='0') - parser.add_argument('--memory', type=str, default='0') - parser.add_argument('--workers', type=int, default=2) - - parser.add_argument('--env', action='append', type=str, default=[]) - parser.add_argument('--data', action='append', type=str, default=[]) - parser.add_argument('--metric', action='append', type=str, default=[]) - parser.add_argument('--sync-source', type=str, default='') - - parser.add_argument('--workflow-name', type=str, default='') - parser.add_argument('--step-name', type=str, default='') - - subparsers = parser.add_subparsers(help='arena sub-command help') - - #create the parser for the 'mpijob' command - parser_mpi = subparsers.add_parser('mpijob', help='mpijob help') - parser_mpi.set_defaults(func=generate_mpjob_command) - - #create the parser for the 'job' command - parser_job = subparsers.add_parser('job', help='job help') - parser_job.set_defaults(func=generate_job_command) - - - separator_idx = all_args.index('--') - launcher_args = all_args[:separator_idx] - remaining_args = all_args[separator_idx + 1:] - - args = parser.parse_args(launcher_args) - commandArray, job_type = args.func(args) - - args_dict = vars(args) - if args.name is None: - logging.error("Please specify the name") - sys.exit(-1) - if len(remaining_args) == 0: - logging.error("Please specify the command.") - sys.exit(-1) - - internalCommand = ' '.join(remaining_args) - - name = args.name - fullname = name + datetime.datetime.now().strftime("%Y%M%d%H%M%S") - timeout_hours = args_dict.pop('timeout_hours') - logging.info("timeout_hours: {0}".format(timeout_hours)) - - enableTensorboard = str2bool(args.tensorboard) - - commandArray.append('"{0}"'.format(internalCommand)) - command = ' '.join(commandArray) - - command=command.replace("--name={0}".format(name),"--name={0}".format(fullname)) - - logging.info('Start training {0}.'.format(command)) - - submit_job(command) - - succ = True - - # wait for job done - # wait_job_done(fullname, job_type, datetime.timedelta(minutes=timeout_hours)) - pending_timeout_minutes = args.pending_timeout_minutes - wait_job_running(fullname, job_type, datetime.timedelta(minutes=pending_timeout_minutes)) - - rc = job_logging(fullname, job_type) - logging.info("rc: {0}".format(rc)) - - wait_job_done(fullname, job_type, datetime.timedelta(hours=timeout_hours)) - - status = get_job_status(fullname, job_type) - - if status == "SUCCEEDED": - logging.info("Training Job {0} success.".format(fullname)) - if len(args.metric) > 0: - metrics_data = { - 'metrics': [] - } - metric_list = [] - metric_unit="RAW" - for m in args.metric: - mArray = m.split(":") - metric_name = mArray[0] - if len(mArray) > 1: - metric_unit = mArray[1] - logging.info("determine metric name {0} with metric unit {1}".format(metric_name, metric_unit)) - value = collect_metrics(fullname, job_type, metric_name) - if value > 0: - import re - p = re.compile('^[a-z]([-a-z0-9]{0,62}[a-z0-9])?') - result = p.search(metric_name.lower()) - if result is None: - logging.info("Failed to parse metric_name {0},skip".format(metric_name)) - continue - else: - metric_name=result.group(0) - - metric_data = { - 'name': metric_name.lower(), # The name of the metric. Visualized as the column name in the runs table. - 'numberValue': value, # The value of the metric. Must be a numeric value. - 'format': metric_unit, # The optional format of the metric. Supported values are "RAW" (displayed in raw format) and "PERCENTAGE" (displayed in percentage format). - } - logging.info("metric data: {0}".format(metric_data)) - metric_list.append(metric_data) - metrics_data['metrics'] = metric_list - with open('/mlpipeline-metrics.json', 'w') as f: - logging.info("metrics: {0}".format(metrics_data)) - json.dump(metrics_data, f) - logging.info("write down /mlpipeline-metrics.json") - elif status == "FAILED": - logging.error("Training Job {0} fail.".format(fullname)) - sys.exit(-1) - else: - logging.error("Training Job {0}'s status {1}".format(fullname, status)) - sys.exit(-1) - - # TODO(cheyang): copy the output.txt from training job - output="" - with open('/output.txt', 'w') as f: - f.write(output) - - with open('/workflow-name.txt', 'w') as f: - f.write(args.workflow_name) - - with open('/step-name.txt', 'w') as f: - f.write(args.step_name) - - with open('/name.txt', 'w') as f: - f.write(args.name) - -if __name__== "__main__": - main() diff --git a/components/contrib/arena/docker/job_generator.py b/components/contrib/arena/docker/job_generator.py deleted file mode 100644 index dbd9e248cd0..00000000000 --- a/components/contrib/arena/docker/job_generator.py +++ /dev/null @@ -1,116 +0,0 @@ -import argparse -import datetime -import json -import os -import sys -import logging -import requests -import subprocess -import six -import time -import yaml -from subprocess import Popen,PIPE -from shlex import split - -from utils import * - -# Generate common options -def generate_options(args): - gpus = args.gpus - cpu = args.cpu - memory = args.memory - tensorboard = args.tensorboard - output_data = args.output_data - data = args.data - env = args.env - tensorboard_image = args.tensorboard_image - tensorboard = str2bool(args.tensorboard) - log_dir = args.log_dir - sync_source = args.sync_source - - options = [] - - if gpus > 0: - options.extend(['--gpus', str(gpus)]) - - if cpu != '0': - options.extend(['--cpu', str(cpu)]) - - if memory != '0': - options.extend(['--memory', str(memory)]) - - if tensorboard_image != "tensorflow/tensorflow:1.12.0": - options.extend(['--tensorboardImage', tensorboard_image]) - - if tensorboard: - options.append("--tensorboard") - - if os.path.isdir(args.log_dir): - options.extend(['--logdir', args.log_dir]) - else: - logging.info("skip log dir :{0}".format(args.log_dir)) - - if len(data) > 0: - for d in data: - if ":" in d: - options.append("--data={0}".format(d)) - else: - logging.info("--data={0} is illegal, skip.".format(d)) - - if len(env) > 0: - for e in env: - if "=" in e: - options.append("--env={0}".format(e)) - else: - logging.info("--env={0} is illegal, skip.".format(e)) - - - if len(args.workflow_name) > 0: - options.append("--env=WORKFLOW_NAME={0}".format(args.workflow_name)) - - if len(args.step_name) > 0: - options.append("--env=STEP_NAME={0}".format(args.step_name)) - - if len(sync_source) > 0: - if not sync_source.endswith(".git"): - raise ValueError("sync_source must be an http git url") - options.extend(['--sync-mode','git']) - options.extend(['--sync-source',sync_source]) - - return options - -# Generate standalone job -def generate_job_command(args): - name = args.name - image = args.image - - commandArray = [ - 'arena', 'submit', 'tfjob', - '--name={0}'.format(name), - '--image={0}'.format(image), - ] - - commandArray.extend(generate_options(args)) - - return commandArray, "tfjob" - -# Generate mpi job -def generate_mpjob_command(args): - name = args.name - workers = args.workers - image = args.image - rdma = args.rdma - - commandArray = [ - 'arena', 'submit', 'mpijob', - '--name={0}'.format(name), - '--workers={0}'.format(workers), - '--image={0}'.format(image), - ] - - if rdma.lower() == "true": - commandArray.append("--rdma") - - commandArray.extend(generate_options(args)) - - return commandArray, "mpijob" diff --git a/components/contrib/arena/docker/requirements.txt b/components/contrib/arena/docker/requirements.txt deleted file mode 100644 index a4b19543303..00000000000 --- a/components/contrib/arena/docker/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -requests==2.18.4 -six==1.11.0 -pyyaml==3.12 diff --git a/components/contrib/arena/docker/utils.py b/components/contrib/arena/docker/utils.py deleted file mode 100644 index 9a2766fd41c..00000000000 --- a/components/contrib/arena/docker/utils.py +++ /dev/null @@ -1,131 +0,0 @@ -import argparse -import datetime -import json -import os -import sys -import logging -import requests -import subprocess -import six -import time -import yaml -from subprocess import Popen,PIPE -from shlex import split - -def setup_custom_logging(): - logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', - level=logging.INFO, - datefmt='%Y-%m-%d %H:%M:%S') - -def str2bool(v): - return v.lower() in ("yes", "true", "t", "1") - -def submit_job(command): - logging.info("command: {0}".format(command)) - try: - output = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True) - result = output.decode() - except subprocess.CalledProcessError as exc: - print("Status : FAIL", exc.returncode, exc.output) - sys.exit(-1) - logging.info('Submit Job: %s.' % result) - -def _is_active_status(status): - logging.info("status: {0}".format(status)) - return status == 'PENDING' or status == 'RUNNING' - -def _is_pending_status(status): - logging.info("status: {0}".format(status)) - return status == 'PENDING' - -def wait_job_done(name, job_type, timeout): - end_time = datetime.datetime.now() + timeout - logging.info("expect done time: {0}".format(end_time)) - status = get_job_status(name, job_type) - while _is_active_status(status): - if datetime.datetime.now() > end_time: - timeoutMsg = "Timeout waiting for job {0} with job type {1} completing.".format(name ,job_type) - logging.error(timeoutMsg) - raise Exception(timeoutMsg) - time.sleep(3) - status = get_job_status(name, job_type) - logging.info("job {0} with type {1} status is {2}".format(name, job_type, status)) - -def wait_job_running(name, job_type, timeout): - end_time = datetime.datetime.now() + timeout - logging.info("expect running time: {0}".format(end_time)) - status = get_job_status(name, job_type) - while _is_pending_status(status): - if datetime.datetime.now() > end_time: - timeoutMsg = "Timeout waiting for job {0} with job type {1} running.".format(name ,job_type) - logging.error(timeoutMsg) - raise Exception(timeoutMsg) - time.sleep(10) - status = get_job_status(name, job_type) - logging.info("job {0} with type {1} status is {2}".format(name, job_type, status)) - -def job_logging(name, job_type): - logging_cmd = "arena logs -f %s" % (name) - process = Popen(split(logging_cmd), stdout = PIPE, stderr = PIPE, encoding='utf8') - while True: - output = process.stdout.readline() - if output == "" and process.poll() is not None: - break - if output: - # print("", output.strip()) - logging.info(output.strip()) - rc = process.poll() - return rc - -def collect_metrics(name, job_type, metric_name): - metrics_cmd = "arena logs --tail=50 %s | grep -e '%s=' -e '%s:' | tail -1" % (name, metric_name, metric_name) - metric = 0 - logging.info("search metric_name %s" % (metric_name)) - try: - import re - output = subprocess.check_output(metrics_cmd, stderr=subprocess.STDOUT, shell=True) - result = output.decode().strip() - split_unit='' - if metric_name+"=" in result: - split_unit="=" - elif metric_name+":" in result: - split_unit=":" - else: - return 0 - array = result.split("%s%s" % (metric_name, split_unit)) - if len(array) > 0: - logging.info(array) - result = re.findall(r'\d+\.*\d*',array[-1]) - if len(result) > 0: - metric = float(result[0]) - else: - logging.warning("Failed to parse metric from %s" % (array[-1])) - metric = 0 - except Exception as e: - logging.warning("Failed to get job status due to" + e) - return 0 - - return metric - -def get_job_status(name, job_type): - get_cmd = "arena get %s --type %s | grep -i STATUS:|awk -F: '{print $NF}'" % (name, job_type) - status = "" - try: - output=subprocess.check_output(get_cmd, stderr=subprocess.STDOUT, shell=True) - status = output.decode() - status = status.strip() - except subprocess.CalledProcessError as e: - logging.warning("Failed to get job status due to" + e) - - return status - -def _get_tensorboard_url(name, job_type): - get_cmd = "arena get %s --type %s | tail -1" % (name, job_type) - url = "N/A" - try: - output = subprocess.check_output(get_cmd, stderr=subprocess.STDOUT, shell=True) - url = output.decode() - except subprocess.CalledProcessError as e: - logging.warning("Failed to get job status due to" + e) - - return url \ No newline at end of file diff --git a/components/contrib/arena/python/arena/__init__.py b/components/contrib/arena/python/arena/__init__.py deleted file mode 100644 index b3422656e47..00000000000 --- a/components/contrib/arena/python/arena/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ._arena_mpi_op import mpi_job_op -from ._arena_standalone_op import standalone_job_op -from ._arena_distributed_tf_op import estimator_op, parameter_servers_op diff --git a/components/contrib/arena/python/arena/_arena_distributed_tf_op.py b/components/contrib/arena/python/arena/_arena_distributed_tf_op.py deleted file mode 100644 index f67175cb20f..00000000000 --- a/components/contrib/arena/python/arena/_arena_distributed_tf_op.py +++ /dev/null @@ -1,136 +0,0 @@ -#!/usr/bin/env python3 -# -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# flake8: noqa TODO - -import kfp.dsl as dsl -import datetime -import logging - - -def estimator_op(name, image, command, - chief_cpu_limit, chief_memory_limit, chief_port, - workers, worker_image, worker_cpu_limit, worker_memory_limit, - parameter_servers, ps_image, ps_cpu_limit, ps_memory_limit, ps_port, - gpus, rdma, - tensorboard, - worker_port, annotations=[], - evaluator=False, evaluator_cpu_limit='0', evaluator_memory_limit='0', - env=[], data=[], sync_source=None, - metrics=['Train-accuracy:PERCENTAGE'], - arena_image='cheyang/arena_launcher:v0.7', - timeout_hours=240): - - """This function submits Distributed TFJob in Estimator mode. - - Args: - name: the name of parameter_servers_op - image: the docker image name of training job - data: specify the datasource to mount to the job, like : - command: the command to run - """ - return distributed_tf_op(name=name, image=image, command=command, envs=envs, data=data, sync_source=sync_source, - workers=workers, worker_image=worker_image, worker_cpu_limit=worker_cpu_limit, worker_memory_limit=worker_memory, - parameter_servers=parameter_servers, ps_image=ps_image, ps_cpu_limit=ps_cpu_limit, ps_memory_limit=ps_memory_limit, - gpus=gpus, rdma=rdma, - chief=True, - chief_cpu_limit=chief_cpu_limit, - worker_port=worker_port, - ps_port=ps_port, - tensorboard=tensorboard, - metrics=metrics, - arena_image=arena_image, - timeout_hours=timeout_hours) - -# def DistributeTFOp(name, image, gpus: int, ): - -def parameter_servers_op(name, image, command, env, data, sync_source, annotations, - workers, worker_image, worker_cpu_limit, worker_memory, - parameter_servers, ps_image, ps_cpu_limit, ps_memory_limit, - gpus, rdma, - tensorboard, - worker_port, ps_port, - metrics=['Train-accuracy:PERCENTAGE'], - arena_image='cheyang/arena_launcher:v0.7', - timeout_hours=240): - - """This function submits Distributed TFJob in Parameter Servers mode. - - Args: - name: the name of parameter_servers_op - image: the docker image name of training job - data: specify the datasource to mount to the job, like : - command: the command to run - """ - return distributed_tf_op(name=name, image=image, command=command, envs=envs, data=data, sync_source=sync_source, - workers=workers, worker_image=worker_image, worker_cpu_limit=worker_cpu_limit, worker_memory_limit=worker_memory, - parameter_servers=parameter_servers, ps_image=ps_image, ps_cpu_limit=ps_cpu_limit, ps_memory_limit=ps_memory_limit, - gpus=gpus, rdma=rdma, - worker_port=worker_port, - ps_port=ps_port, - tensorboard=tensorboard, - metrics=metrics, - arena_image=arena_image, - timeout_hours=timeout_hours) - - - -def distributed_tf_op(name, image, command, env=[], data=[], sync_source=None, - chief=False, chief_cpu_limit='0', chief_memory_limit='0', - workers=0, worker_image=None, worker_cpu_limit='0', worker_memory_limit='0', - parameter_servers=0, ps_image=None, ps_cpu_limit='0', ps_memory_limit='0', - evaluator=False, evaluator_cpu_limit='0', evaluator_memory_limit='0', - gpus=0, rdma=False, - chief_port=22222, - worker_port=22222, - ps_port=22224, - tensorboard=False, - metrics=['Train-accuracy:PERCENTAGE'], - arena_image='cheyang/arena_launcher:v0.7', - timeout_hours=240): - """This function submits Distributed TFJob in Distributed mode. - - Args: - name: the name of distributed_tf_op - image: the docker image name of training job - data: specify the datasource to mount to the job, like : - command: the command to run - """ - return dsl.ContainerOp( - name=name, - image=arena_image, - command=['python','arena_launcher.py'], - arguments=[ "--name", name, - "--tensorboard", tensorboard, - "--rdma", rdma, - "--data", data, - "--output-data", output_data, - "--image", image, - "--gpus", gpus, - "--worker-cpu", worker_cpu_limit, - "--worker-memory", worker_memory_limit, - "--timeout-hours", timeout_hours, - "--metric-name", metric_name, - "--metric-unit", metric_unit, - "--step-name", '{{pod.name}}', - "--workflow-name", '{{workflow.name}}', - "tfjob", - "--workers", workers, - "--", command], - file_outputs={'train': '/output.txt', - 'workflow':'/workflow-name.txt', - 'step':'/step-name.txt', - 'name':'/name.txt'} - ) diff --git a/components/contrib/arena/python/arena/_arena_mpi_op.py b/components/contrib/arena/python/arena/_arena_mpi_op.py deleted file mode 100644 index 2c3a94a2dd1..00000000000 --- a/components/contrib/arena/python/arena/_arena_mpi_op.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python3 -# -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import kfp.dsl as dsl -import datetime -import logging - -def mpi_job_op(name, image, command, workers=1, gpus=0, cpu_limit='0', memory_limit='0', env=[], annotations=[], - data=[], sync_source=None, - rdma=False, - tensorboard=False, tensorboard_image=None, - metrics=['Train-accuracy:PERCENTAGE'], - arenaImage='cheyang/arena_launcher:v0.7', - timeout_hours=240): - """This function submits MPI Job, it can run Allreduce-style Distributed Training. - - Args: - name: the name of mpi_job_op - image: the docker image name of training job - data: specify the datasource to mount to the job, like : - command: the command to run - """ - if not name: - raise ValueError("name must be specified") - if not image: - raise ValueError("image must be specified") - if not command: - raise ValueError("command must be specified") - - options = [] - if sync_source: - options.append('--sync-source') - options.append(str(sync_source)) - - for e in env: - options.append('--env') - options.append(str(e)) - - for d in data: - options.append('--data') - options.append(str(d)) - - for m in metrics: - options.append('--metric') - options.append(str(m)) - - if tensorboard_image: - options.append('--tensorboard-image') - options.append(str(tensorboard_image)) - - op = dsl.ContainerOp( - name=name, - image=arenaImage, - command=['python','arena_launcher.py'], - arguments=[ "--name", name, - "--tensorboard", str(tensorboard), - "--rdma", str(rdma), - "--image", str(image), - "--gpus", str(gpus), - "--cpu", str(cpu_limit), - "--memory", str(memory_limit), - "--step-name", '{{pod.name}}', - "--workflow-name", '{{workflow.name}}', - "--workers", str(workers), - "--timeout-hours", str(timeout_hours), - ] + options + - [ - "mpijob", - "--", str(command)], - file_outputs={'train': '/output.txt', - 'workflow':'/workflow-name.txt', - 'step':'/step-name.txt', - 'name':'/name.txt'} - ) - op.set_image_pull_policy('Always') - return op \ No newline at end of file diff --git a/components/contrib/arena/python/arena/_arena_standalone_op.py b/components/contrib/arena/python/arena/_arena_standalone_op.py deleted file mode 100644 index c18ef8d6d11..00000000000 --- a/components/contrib/arena/python/arena/_arena_standalone_op.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/env python3 -# -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import kfp.dsl as dsl -import datetime -import logging - - -def standalone_job_op(name, image, command, gpus=0, cpu_limit='0', memory_limit='0', env=[], - tensorboard=False, tensorboard_image=None, - data=[], sync_source=None, annotations=[], - metrics=['Train-accuracy:PERCENTAGE'], - arena_image='cheyang/arena_launcher:v0.7', - timeout_hours=240): - - """This function submits a standalone training Job - - Args: - name: the name of standalone_job_op - image: the docker image name of training job - mount: specify the datasource to mount to the job, like : - command: the command to run - """ - if not name: - raise ValueError("name must be specified") - if not image: - raise ValueError("image must be specified") - if not command: - raise ValueError("command must be specified") - - options = [] - if sync_source: - options.append('--sync-source') - options.append(str(sync_source)) - - for e in env: - options.append('--env') - options.append(str(e)) - - for d in data: - options.append('--data') - options.append(str(d)) - - for m in metrics: - options.append('--metric') - options.append(str(m)) - - if tensorboard_image: - options.append('--tensorboard-image') - options.append(str(tensorboard_image)) - - op = dsl.ContainerOp( - name=name, - image=arena_image, - command=['python','arena_launcher.py'], - arguments=[ "--name", name, - "--tensorboard", str(tensorboard), - "--image", str(image), - "--gpus", str(gpus), - "--cpu", str(cpu_limit), - "--step-name", '{{pod.name}}', - "--workflow-name", '{{workflow.name}}', - "--memory", str(memory_limit), - "--timeout-hours", str(timeout_hours), - ] + options + - [ - "job", - "--", str(command)], - file_outputs={'train': '/output.txt', - 'workflow':'/workflow-name.txt', - 'step':'/step-name.txt', - 'name':'/name.txt'} - ) - op.set_image_pull_policy('Always') - return op diff --git a/components/contrib/arena/python/arena/_utils.py b/components/contrib/arena/python/arena/_utils.py deleted file mode 100644 index 783d22e560a..00000000000 --- a/components/contrib/arena/python/arena/_utils.py +++ /dev/null @@ -1,8 +0,0 @@ -# The default Data of training job -default_data = 'None' - -def set_defaultData(data): - default_data = data - -def get_defaultData(): - return default_data \ No newline at end of file diff --git a/components/contrib/arena/python/build.sh b/components/contrib/arena/python/build.sh deleted file mode 100755 index 34d893ff2f9..00000000000 --- a/components/contrib/arena/python/build.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -ex - -get_abs_filename() { - # $1 : relative filename - echo "$(cd "$(dirname "$1")" && pwd)/$(basename "$1")" -} - -target_archive_file=${1:-kfp-arena-0.6.tar.gz} -target_archive_file=$(get_abs_filename "$target_archive_file") - -DIR=$(mktemp -d) - - -cp -r arena $DIR -cp ./setup.py $DIR - -# Build tarball package. -cd $DIR -python setup.py sdist --format=gztar -cp $DIR/dist/*.tar.gz "$target_archive_file" -rm -rf $DIR \ No newline at end of file diff --git a/components/contrib/arena/python/setup.py b/components/contrib/arena/python/setup.py deleted file mode 100644 index 5e3fd533bf6..00000000000 --- a/components/contrib/arena/python/setup.py +++ /dev/null @@ -1,36 +0,0 @@ -from setuptools import setup - - -NAME = 'kfp-arena' -VERSION = '0.6' - -REQUIRES = ['kfp >= 0.1'] - -setup( - name=NAME, - version=VERSION, - description='KubeFlow Pipelines Extended Arena SDK', - author='cheyang', - author_email="cheyang@163.com", - install_requires=REQUIRES, - packages=[ - 'arena', - ], - classifiers=[ - 'Intended Audience :: Developers', - 'Intended Audience :: Education', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Topic :: Scientific/Engineering', - 'Topic :: Scientific/Engineering :: Artificial Intelligence', - 'Topic :: Software Development', - 'Topic :: Software Development :: Libraries', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - python_requires='>=3.5.3', - include_package_data=True -) diff --git a/components/contrib/azure/azuredevops/queue-pipeline/Dockerfile b/components/contrib/azure/azuredevops/queue-pipeline/Dockerfile deleted file mode 100644 index 311d4b5813a..00000000000 --- a/components/contrib/azure/azuredevops/queue-pipeline/Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -FROM python:3.7-slim - -RUN pip install azure-devops - -COPY queue-pipeline/src/queue_pipeline.py /scripts/queue_pipeline.py - -ENTRYPOINT [ "bash" ] diff --git a/components/contrib/azure/azuredevops/queue-pipeline/build_image.sh b/components/contrib/azure/azuredevops/queue-pipeline/build_image.sh deleted file mode 100644 index 0f299917d46..00000000000 --- a/components/contrib/azure/azuredevops/queue-pipeline/build_image.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -e -while getopts "r:" option; - do - case "$option" in - r ) REGISTRY_NAME=${OPTARG};; - esac -done -image_name=${REGISTRY_NAME}.azurecr.io/myrepo/queue_pipeline:latest # Specify the image name here -image_tag=latest -full_image_name=${image_name}:${image_tag} - -cd "$(dirname "$0")" -docker build -t "${full_image_name}" . -docker push "$full_image_name" - -# Output the strict image name (which contains the sha256 image digest) -docker inspect --format="{{index .RepoDigests 0}}" "${full_image_name}" \ No newline at end of file diff --git a/components/contrib/azure/azuredevops/queue-pipeline/component.yaml b/components/contrib/azure/azuredevops/queue-pipeline/component.yaml deleted file mode 100644 index b604a0f49c9..00000000000 --- a/components/contrib/azure/azuredevops/queue-pipeline/component.yaml +++ /dev/null @@ -1,30 +0,0 @@ -name: Queue Azure Pipeline -description: | - A Kubeflow pipeline component to queue an Azure Pipeline. - -inputs: - - {name: organization, type: String, description: 'Azure DevOps organization'} - - {name: project, type: String, description: 'Azure DevOps project'} - - {name: id, type: Integer, description: 'Azure Pipeline definition id'} - - {name: pat_env, type: String, default: '', description: 'Name of environment variable containing Azure DevOps PAT'} - - {name: pat_path_env, type: String, default: '', description: 'Name of environment variable containing path to Azure DevOps PAT'} - - {name: source_branch, type: String, default: '', description: 'Source branch for the pipeline'} - - {name: source_version, type: String, default: '', description: 'Source version for the pipeline'} - - {name: parameters, type: String, default: '', description: 'Parameters for the pipeline'} -outputs: - - {name: output_url_path, type: String, description: 'Url of the queued pipeline'} -implementation: - container: - image: '' - command: ['python', '/scripts/queue_pipeline.py'] - args: [ - --organization, {inputValue: organization}, - --project, {inputValue: project}, - --id, {inputValue: id}, - --pat_env, {inputValue: pat_env}, - --pat_path_env, {inputValue: pat_path_env}, - --source_branch, {inputValue: source_branch}, - --source_version, {inputValue: source_version}, - --parameters, {inputValue: parameters}, - --output_url_path, {outputPath: output_url_path} - ] \ No newline at end of file diff --git a/components/contrib/azure/azuredevops/queue-pipeline/readme.md b/components/contrib/azure/azuredevops/queue-pipeline/readme.md deleted file mode 100644 index ada6ebaa870..00000000000 --- a/components/contrib/azure/azuredevops/queue-pipeline/readme.md +++ /dev/null @@ -1,70 +0,0 @@ -# Queue Pipeline Task - -This task enables you to queue an [Azure Pipelines](https://docs.microsoft.com/en-us/azure/devops/pipelines/?view=azure-devops) pipeline from a Kubeflow pipeline. For example, this task may be used to queue the deployment of a model via Azure Pipelines after the model is trained and registered by the Kubeflow pipeline. - -## Inputs - -|Name|Type|Required|Description| -|---|---|---|---| -|organization|string|Y|The Azure DevOps organization that contains the pipeline to be queued. https[]()://dev.azure.com/`organization`/project/_build?definitionId=id| -|project|string|Y|The Azure DevOps project that contains the pipeline to be queued. https[]()://dev.azure.com/organization/`project`/_build?definitionId=id| -|id|string|Y|The id of the pipeline definition to queue. Shown in the url as *pipelineId* or *definitionId*. https[]()://dev.azure.com/organization/project/_build?definitionId=`id`| -|pat_env|string|one of pat_env or pat_path_env|The name of the environment variable containing the PAT for Azure Pipelines authentication| -|pat_path_env|string|one of pat_env or pat_path_env|The name of the environment variable containing a path to the PAT for Azure Pipelines authentication| -|sourch_branch|string||The branch of the source code for queuing the pipeline.| -|source_version|string||The version (e.g. commit id) of the source code for queuing the pipeline.| -|parameters|string||Json serialized string of key-values pairs e.g. `{ 'x': '1', 'y': '2' }`. These values can be accessed as `$(x)` and `$(y)` in the Azure Pipelines pipeline.| - -## Outputs - -Output `output_url_path` holds uri for the newly queued pipeline. - -## Usage - -```python -import os -import kfp.compiler as compiler -import kfp.components as components -from kfp.azure import use_azure_secret -from kubernetes.client.models import V1EnvVar - - -component_root = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".") -image_repo_name = ".azurecr.io/myrepo" -queue_pipeline_op = components.load_component_from_file(os.path.join(component_root, 'queue-pipeline\component.yaml')) -queue_pipeline_image_name = image_repo_name + '/queue_pipeline:%s' % ('latest') -secret_name = "azdopat" -secret_path = "/app/secrets" -pat_path_env = "PAT_PATH" -secret_file_path_in_volume = "azdopat" -organization = # organization -project = # project -pipeline_id = # id - -def use_image(image_name): - def _use_image(task): - task.image = image_name - return task - return _use_image - -@dsl.pipeline( - name='Azure Sample', - description='Queue Azure DevOps pipeline ' -) -def azdo_sample(): - - operations['Queue AzDO pipeline'] = queue_pipeline_op( - organization=organization, - project=project, - id=pipeline_id, - pat_path_env=pat_path_env). \ - apply(use_secret(secret_name=secret_name, secret_volume_mount_path=secret_path). \ - apply(use_azure_secret()). \ - apply(use_image(queue_pipeline_image_name)). \ - add_env_variable(V1EnvVar( - name=pat_path_env, - value=secret_path + "/" + secret_file_path_in_volume)) - -if __name__ == '__main__': - compiler.Compiler().compile(azdo_sample, __file__ + '.tar.gz') -``` diff --git a/components/contrib/azure/azuredevops/queue-pipeline/src/queue_pipeline.py b/components/contrib/azure/azuredevops/queue-pipeline/src/queue_pipeline.py deleted file mode 100644 index 274f2b0f5f2..00000000000 --- a/components/contrib/azure/azuredevops/queue-pipeline/src/queue_pipeline.py +++ /dev/null @@ -1,105 +0,0 @@ -import argparse -import os -import json -from pathlib import Path -from azure.devops.connection import Connection -from msrest.authentication import BasicAuthentication - - -def get_client(organization, personal_access_token): - organization_url = 'https://dev.azure.com/' + organization - - # Create a connection to the org - credentials = BasicAuthentication('', personal_access_token) - connection = Connection(base_url=organization_url, - creds=credentials) - - # Get the build client - build_client = connection.clients_v6_0.get_build_client() - return build_client - - -def define_build(id, source_branch, source_version, parameters): - build = { - 'definition': { - 'id': id - } - } - - # Add optional parameters - if source_branch: - build["source_branch"] = source_branch - if source_version: - build["source_version"] = source_version - if parameters: - build["parameters"] = parameters - - return build - - -def queue_build(client, build, project): - # The failure responses from Azure Pipelines are pretty good, - # don't do any special handling. - queue_build_response = client.queue_build(build, project) - return queue_build_response - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('-o', '--organization', - required=True, - help='Azure DevOps organization') - parser.add_argument('-p', '--project', - required=True, - help='Azure DevOps project') - parser.add_argument('-i', '--id', - required=True, - help='Id of the pipeline definition') - parser.add_argument('-ppe', '--pat_path_env', - help='Name of environment variable containing the path to the Azure DevOps PAT') # noqa: E501 - parser.add_argument('-pe', '--pat_env', - help='Name of environment variable containing the Azure DevOps PAT') # noqa: E501 - parser.add_argument( - '--source_branch', help='Source branch for the pipeline') - parser.add_argument( - '--source_version', help='Source version for the pipeline') - parser.add_argument( - '--parameters', help='Parameters for the pipeline') - parser.add_argument( - '--output_url_path', help='Url of the queued pipeline') - - - args = parser.parse_args() - - if args.pat_env: - # Read PAT from env var - pat = os.environ[args.pat_env] - elif args.pat_path_env: - # Read PAT from file - with open(os.environ[args.pat_path_env], 'r') as f: - pat = f.readline() - f.close - else: - raise Exception('Please provide a PAT via pat_env or pat_path_env') - - client = get_client(args.organization, pat) - build = define_build(args.id, - args.source_branch, - args.source_version, - args.parameters) - results = queue_build(client, build, args.project) - - # Print the url of the queued build - print(results.url) - - # Write Output - print("Creating output directory") - output_url_path = args.output_url_path - Path(output_url_path).parent.mkdir(parents=True, exist_ok=True) - - with open(output_url_path, 'w') as f: - json.dump(results.url, f) - - -if __name__ == "__main__": - main() diff --git a/components/contrib/azure/azuredevops/readme.md b/components/contrib/azure/azuredevops/readme.md deleted file mode 100644 index a4b5cc1137d..00000000000 --- a/components/contrib/azure/azuredevops/readme.md +++ /dev/null @@ -1,13 +0,0 @@ -# Azure DevOps components for Kubeflow Pipelines - -## Components - -### Queue Pipeline - -The Queue Pipeline component enables you to queue an Azure Pipelines pipeline from a Kubeflow pipeline. - -## Authentication - -The components in this collection authenticate to Azure DevOps by using a [PAT (Personal Access Token)](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops&tabs=preview-page#create-a-pat). The PAT should be mounted to `/app/secrets/azdopat`. - -This can be done by storing the PAT in the the Kubernetes secret `azdopat`, then applying `use_secret(secret_name='azdopat', secret_volume_mount_path='/app/secrets')` to the task in the pipeline. diff --git a/components/contrib/azure/azureml/aml-deploy-model/DockerFile b/components/contrib/azure/azureml/aml-deploy-model/DockerFile deleted file mode 100644 index f7ce10bf7a7..00000000000 --- a/components/contrib/azure/azureml/aml-deploy-model/DockerFile +++ /dev/null @@ -1,6 +0,0 @@ -FROM mcr.microsoft.com/azure-cli -RUN pip install --upgrade pip &&\ - az extension add -n azure-cli-ml &&\ - apk add --update jq -COPY src/ /src/ -CMD bash diff --git a/components/contrib/azure/azureml/aml-deploy-model/README.MD b/components/contrib/azure/azureml/aml-deploy-model/README.MD deleted file mode 100644 index 9169b9948fd..00000000000 --- a/components/contrib/azure/azureml/aml-deploy-model/README.MD +++ /dev/null @@ -1,90 +0,0 @@ -# Azure ML Deploy Model - -This component deploys trained model with [Azure Machine Learning Service](https://azure.microsoft.com/en-us/services/machine-learning/#documentation). In Azure Machine Learning Service, trained machine learning models could be deployed as web services in the cloud or locally. You can use your model in web services for inferencing with improved quality. See more on [MLOps: Model management, deployment, and monitoring with Azure Machine Learning](https://docs.microsoft.com/en-us/azure/machine-learning/concept-model-management-and-deployment) - -This `Azure ML Deploy Model` component, is a self-contained set of code to perform model deployment operation using Azure Machine Learning service. - -It performs the following workflow: -- Prepare an inference configuration. -- Prepare an entry script. -- Choose a compute target. -- Deploy the model to the compute target. - -## Inputs - -| Name | Type | Required | Description | -| -------------------------- | ------ | -------- | ----------------------------------------| -| deployment_name | string | Y | Name for the deployed model endpoint | -| model_name | string | Y | Name for registered model and version | -| inference_config | string | Y | File Path for inferenceconfig.json | -| deployment_config | string | Y | File Path for deploymentconfig.json | -| tenant_id | string | Y | Azure AD tenant Id | -| service_principal_id | string | Y | Azure service principal (client) Id | -| service_principal_password | string | Y | Azure service principal password | -| subscription_id | string | Y | Azure subscription Id | -| resource_group | string | Y | Azure resource group | -| workspace | string | Y | Azure ML workspace name | - -## Outputs -Output `output_config` holds description of the deployed web service for the machine learning model. -Output `score_uri` holds endpoint for the deployed model. - -## Prerequisites - -Ensure Kubeflow has been installed on AKS following [instructions for deploying Kubeflow on Azure](https://www.kubeflow.org/docs/azure/). - -Azure Machine Learning service will be used to deploy the model, store the metadata and manage the active deployment. To create a Machine Learning Service workspace: 1. Go to [the Azure portal](https://portal.azure.com) and click on your resource group. 2. Select the **add a new resource** option. 3. Search for **Machine Learning Studio Workspace** and use the default options, taking note of the name you decide for it. - -You need to create a [container registry](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-intro) to store those images in the cloud so that Kubeflow can pull the images as they are needed. [See here](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-get-started-portal) for step-by-step guidance to create an Azure Container Registry. Please ensure enable **admin user**, and change the SKU option to **Premium**. - -In order for the AKS cluster to have access to pulling images created for execution of the pipeline, you will need to [update](https://docs.microsoft.com/en-us/azure/aks/cluster-container-registry-integration) your cluster so that it is able to pull the images from the container registry we just created by running: - -```shell -az aks update -n {myAKSCluster} -g {MyResourceGroup} --attach-acr {REGISTRY_NAME} -``` - -A service principal is used to allow your pipeline to securely interface with your Azure services without having to directly login in the pipeline and use admin privileges. See [here](https://docs.microsoft.com/en-us/powerapps/developer/common-data-service/walkthrough-register-app-azure-active-directory) for a detailed guide to create a service principal. Also please ensure to add a **Contributor** role to the service principal to be able access Azure resources. - -## Usage -In `./src` folder, adjust pre-configured `inferenceconfig.json`, `deploymentconfig.json`, and `environment.yml` to specify the resources for deployment like the compute type, number of cores etc. Additionaly, `deploymentconfig_aks.json` could be specified from parameters to use Azure Kubernetes Service to deploy the web service. `score.py` is the entry script for inference, where `init()` and `run(data)` are the functions to be *edited* to use the model. See here for [a detailed instruction on constructing the entry script](https://docs.microsoft.com/en-us/azure/machine-learning/how-to-deploy-and-where?tabs=azcli). See [Deploy your existing model with Azure](https://docs.microsoft.com/en-us/azure/machine-learning/how-to-deploy-existing-model#entry-script-scorepy) for an example of `score.py`. If additional pip packages are used for `score.py`, please ensure them being added to `environment.yml`. In the end, build and push the image to the Container Registry, the pipeline operation will directly pull the image from ACR. - -A sample script to build and push the image: - -```shell -docker build . -t {your_ACR_name}.azurecr.io/deploy/{your_image_name}:latest -docker push {your_ACR_name}.azurecr.io/deploy/{your_image_name}:latest -``` - - -An example usage for the component is provided in the `compile_pipeline.py` file. It is a single-operation pipeline to use this component and `use_azure_secret` function from `kfp.azure`. You can also provide service principal parameters as pipeline parameters. - -``` -@dsl.pipeline( - name='AML Component Sample', - description='Deploy Model using Azure Machine learning' -) -def model_deploy( - resource_group, - workspace -): - - operation = deploy_operation(deployment_name='deploymentname', - model_name='model_name:1', - tenant_id='$(AZ_TENANT_ID)', - service_principal_id='$(AZ_CLIENT_ID)', - service_principal_password='$(AZ_CLIENT_SECRET)', - subscription_id='$(AZ_SUBSCRIPTION_ID)', - resource_group=resource_group, - workspace=workspace, - inference_config='scripts/inferenceconfig.json', - deployment_config='scripts/deploymentconfig.json'). \ - apply(use_azure_secret()). \ - apply(use_image(deploy_image_name)) - -if __name__ == '__main__': - compiler.Compiler().compile(model_deploy, __file__ + '.tar.gz') -``` - -**Note:** The deployment name requires alphanumeric characters and the model_name shall be a combination of "model name" and "version number". - -Once the deployment has finished, navigate to your Azure Machine Learning Workspace and select the `Endpoints` tab to find your Real-Time endpoints. You can access your model as a web-service by the `REST endpoint`. It should look like "http://{your_rest_endpoint}.{region}.azurecontainer.io/score?{your_data}". Alternatively, the output `score_uri` should also contain the information for the model endpoint. The `output_config` contains detailed information for the deployed web service. \ No newline at end of file diff --git a/components/contrib/azure/azureml/aml-deploy-model/compile_pipeline.py b/components/contrib/azure/azureml/aml-deploy-model/compile_pipeline.py deleted file mode 100644 index 38ffc7bedde..00000000000 --- a/components/contrib/azure/azureml/aml-deploy-model/compile_pipeline.py +++ /dev/null @@ -1,47 +0,0 @@ -import os -import kfp.compiler as compiler -import kfp.components as components -from kfp.azure import use_azure_secret -import kfp.dsl as dsl - -component_root = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".") -image_repo_name = ".azurecr.io/deploy" # the container registery for the container operation and path in the ACR -file_path = os.path.join(component_root, "component.yaml") - -# Loading the component.yaml file for deployment operation -deploy_operation = components.load_component_from_file(file_path) - -# The deploy_image_name shall be the container image for the operation -# It shall be something like .azurecr.io/deploy/aml-deploy-model:latest -deploy_image_name = image_repo_name + '/aml-deploy-model:%s' % ('latest') - -def use_image(image_name): - def _use_image(task): - task.image = image_name - return task - return _use_image - -@dsl.pipeline( - name='AML Component Sample', - description='Deploy Model using Azure Machine learning' -) -def model_deploy( - resource_group, - workspace -): - - operation = deploy_operation(deployment_name='deploymentname', - model_name='model_name:1', - tenant_id='$(AZ_TENANT_ID)', - service_principal_id='$(AZ_CLIENT_ID)', - service_principal_password='$(AZ_CLIENT_SECRET)', - subscription_id='$(AZ_SUBSCRIPTION_ID)', - resource_group=resource_group, - workspace=workspace, - inference_config='src/inferenceconfig.json', - deployment_config='src/deploymentconfig.json'). \ - apply(use_azure_secret()). \ - apply(use_image(deploy_image_name)) - -if __name__ == '__main__': - compiler.Compiler().compile(model_deploy, __file__ + '.tar.gz') \ No newline at end of file diff --git a/components/contrib/azure/azureml/aml-deploy-model/component.yaml b/components/contrib/azure/azureml/aml-deploy-model/component.yaml deleted file mode 100644 index 7facaecd6b3..00000000000 --- a/components/contrib/azure/azureml/aml-deploy-model/component.yaml +++ /dev/null @@ -1,34 +0,0 @@ -name: Azure ML Deploy Model -description: A Kubeflow Pipeline Component to deploy registered model to Azure Machine Learning. -inputs: -- {name: deployment_name, type: String} -- {name: model_name, type: String} -- {name: inference_config, type: String, default: 'src/inferenceconfig.json', optional: true} -- {name: deployment_config, type: String, default: 'src/deploymentconfig.json', optional: true} -- {name: tenant_id, type: String} -- {name: service_principal_id, type: String} -- {name: service_principal_password, type: String} -- {name: subscription_id, type: String} -- {name: resource_group, type: String} -- {name: workspace, type: String} -outputs: -- {name: output_config, type: String, description: 'Description of the deployed web-service.'} -- {name: score_uri, type: String, description: 'The endpoint for deployed model.'} -implementation: - container: - image: '' - command: [ - "sh", "/src/deploy.sh", - '-n', {inputValue: deployment_name}, - '-m', {inputValue: model_name}, - '-i', {inputValue: inference_config}, - '-d', {inputValue: deployment_config}, - '-s', {inputValue: service_principal_id}, - '-p', {inputValue: service_principal_password}, - '-u', {inputValue: subscription_id}, - '-r', {inputValue: resource_group}, - '-w', {inputValue: workspace}, - '-t', {inputValue: tenant_id}, - '-o', {outputPath: output_config}, - '-e', {outputPath: score_uri} - ] diff --git a/components/contrib/azure/azureml/aml-deploy-model/src/deploy.sh b/components/contrib/azure/azureml/aml-deploy-model/src/deploy.sh deleted file mode 100644 index 75d5bf91665..00000000000 --- a/components/contrib/azure/azureml/aml-deploy-model/src/deploy.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/sh - -# Deploy registered model to Azure Machine Learning -while getopts "n:m:i:d:s:p:u:r:w:t:o:e:" option; - do - case "$option" in - n ) DEPLOYMENT_NAME=${OPTARG};; - m ) MODEL_NAME=${OPTARG};; - i ) INFERENCE_CONFIG=${OPTARG};; - d ) DEPLOYMENTCONFIG=${OPTARG};; - s ) SERVICE_PRINCIPAL_ID=${OPTARG};; - p ) SERVICE_PRINCIPAL_PASSWORD=${OPTARG};; - u ) SUBSCRIPTION_ID=${OPTARG};; - r ) RESOURCE_GROUP=${OPTARG};; - w ) WORKSPACE=${OPTARG};; - t ) TENANT_ID=${OPTARG};; - o ) OUTPUT_CONFIG_PATH=${OPTARG};; - e ) SCORE_URI=${OPTARG};; - esac -done -az login --service-principal --username ${SERVICE_PRINCIPAL_ID} --password ${SERVICE_PRINCIPAL_PASSWORD} -t ${TENANT_ID} -az account set --subscription ${SUBSCRIPTION_ID} -az ml model deploy -n ${DEPLOYMENT_NAME} -m ${MODEL_NAME} --ic ${INFERENCE_CONFIG} --dc ${DEPLOYMENTCONFIG} -w ${WORKSPACE} -g ${RESOURCE_GROUP} --overwrite -v - -# write the web-service description to output folder -parentdir="$(dirname "$OUTPUT_CONFIG_PATH")" -if [ -d "$parentdir" ]; -then - echo Found The directory ${parentdir}. -else - echo Parent directory did not exist, creating parent directory. - mkdir -p ${parentdir} -fi -az ml service show -n ${DEPLOYMENT_NAME} --resource-group ${RESOURCE_GROUP} --workspace-name ${WORKSPACE} > ${OUTPUT_CONFIG_PATH} - -# Get the scoring uri from the deployment config -scoreuri_parentdir="$(dirname "$SCORE_URI")" -if [ -d "$scoreuri_parentdir" ]; -then - echo Found The directory ${scoreuri_parentdir}. -else - echo Parent directory did not exist, creating parent directory. - mkdir -p ${scoreuri_parentdir} -fi -jq .scoringUri ${OUTPUT_CONFIG_PATH} > ${SCORE_URI} - diff --git a/components/contrib/azure/azureml/aml-deploy-model/src/deploymentconfig.json b/components/contrib/azure/azureml/aml-deploy-model/src/deploymentconfig.json deleted file mode 100644 index 4c25c45c284..00000000000 --- a/components/contrib/azure/azureml/aml-deploy-model/src/deploymentconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "containerResourceRequirements": { - "cpu": 1, - "memoryInGB": 1 - }, - "computeType": "ACI", - "enableAppInsights": "True" - } \ No newline at end of file diff --git a/components/contrib/azure/azureml/aml-deploy-model/src/deploymentconfig_aks.json b/components/contrib/azure/azureml/aml-deploy-model/src/deploymentconfig_aks.json deleted file mode 100644 index eeed444ae80..00000000000 --- a/components/contrib/azure/azureml/aml-deploy-model/src/deploymentconfig_aks.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "computeType": "aks", - "autoScaler": - { - "autoscaleEnabled": true, - "minReplicas": 1, - "maxReplicas": 3, - "refreshPeriodInSeconds": 1, - "targetUtilization": 70 - }, - "dataCollection": - { - "storageEnabled": true - }, - "containerResourceRequirements": - { - "cpu": 0.5, - "memoryInGB": 1.0 - } -} \ No newline at end of file diff --git a/components/contrib/azure/azureml/aml-deploy-model/src/environment.yml b/components/contrib/azure/azureml/aml-deploy-model/src/environment.yml deleted file mode 100644 index a973e562c54..00000000000 --- a/components/contrib/azure/azureml/aml-deploy-model/src/environment.yml +++ /dev/null @@ -1,19 +0,0 @@ -# Conda environment specification. The dependencies defined in this file will -# be automatically provisioned for runs with userManagedDependencies=False. - -# Details about the Conda environment file format: -# https://conda.io/docs/user-guide/tasks/manage-environments.html#create-env-file-manually - -# This file will be referenced by inferenceconfig.json to be the container environment -name: project_environment -dependencies: - # The python interpreter version. - # Currently Azure ML only supports 3.5.2 and later. -- python=3.6.2 - -- pip: - # Required packages for AzureML execution, history, and data preparation. - - azureml-defaults - - azureml-core - - numpy - - requests \ No newline at end of file diff --git a/components/contrib/azure/azureml/aml-deploy-model/src/inferenceconfig.json b/components/contrib/azure/azureml/aml-deploy-model/src/inferenceconfig.json deleted file mode 100644 index b39dd150608..00000000000 --- a/components/contrib/azure/azureml/aml-deploy-model/src/inferenceconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "entryScript": "/src/score.py", - "runtime": "python", - "condaFile": "/src/environment.yml", - "extraDockerfileSteps": null, - "sourceDirectory": null, - "enableGpu": false, - "baseImage": null, - "baseImageRegistry": null - } \ No newline at end of file diff --git a/components/contrib/azure/azureml/aml-deploy-model/src/score.py b/components/contrib/azure/azureml/aml-deploy-model/src/score.py deleted file mode 100644 index c7640f294e3..00000000000 --- a/components/contrib/azure/azureml/aml-deploy-model/src/score.py +++ /dev/null @@ -1,12 +0,0 @@ -from azureml.core.model import Model -import numpy as np -def init(): - pass - -def run(data): - try: - # You can return any data type, as long as it is JSON serializable. - return 'AML Model inference result, the data is {0}'.format(data) - except Exception as e: - error = str(e) - return error \ No newline at end of file diff --git a/components/contrib/azure/azureml/aml-register-model/Dockerfile b/components/contrib/azure/azureml/aml-register-model/Dockerfile deleted file mode 100644 index 8763bc76c9c..00000000000 --- a/components/contrib/azure/azureml/aml-register-model/Dockerfile +++ /dev/null @@ -1,11 +0,0 @@ -FROM python:3.7-slim - -# pip install -COPY requirements.txt /scripts/requirements.txt -RUN pip install -r /scripts/requirements.txt - -# only for local testing -COPY src/register.py /scripts/register.py - -# will be overwritten by kf pipeline -ENTRYPOINT [ "python", "/scripts/register.py" ] diff --git a/components/contrib/azure/azureml/aml-register-model/README.MD b/components/contrib/azure/azureml/aml-register-model/README.MD deleted file mode 100644 index f3a616b463e..00000000000 --- a/components/contrib/azure/azureml/aml-register-model/README.MD +++ /dev/null @@ -1,67 +0,0 @@ -# Azure ML Register Model - -This component registers trained ML model with [Azure Machine Learning Service](https://azure.microsoft.com/en-us/services/machine-learning/#documentation) and tags the model with Kubeflow run id. - -## Inputs - -| Name | Type | Required | Description | -| -------------------------- | ------ | -------- | -------------------------------- | -| base_path | string | Y | Base path to the model | -| model_file | string | Y | Model file for example .pkl | -| model_name | string | Y | Model Name | -| tenant_id | string | Y | Azure AD tenant Id | -| service_principal_id | string | Y | Azure service principal Id | -| service_principal_password | string | Y | Azure service principal password | -| subscription_id | string | Y | Azure subscription Id | -| resource_group | string | Y | Azure resource group | -| workspace | string | Y | Azure ML workspace name | -| run_id | string | Y | Kubeflow pipeline run Id | - -## Outputs - -Output `output_model_path` holds registered model name. - -## Usage - -``` -import os -import kfp.compiler as compiler -import kfp.components as components -from kfp.azure import use_azure_secret - -component_root = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".") -image_repo_name = "[ACR_NAME].azurecr.io/myrepo" -register_op = components.load_component_from_file(os.path.join(component_root, 'aml-register-model/component.yaml')) -register_image_name = image_repo_name + '/aml-register-model:%s' % ('latest') - -def use_image(image_name): - def _use_image(task): - task.image = image_name - return task - return _use_image - -@dsl.pipeline( - name='AML Sample ', - description='AML Register Model' -) -def model_train( - resource_group, - workspace, -): - - operations['register to AML'] = register_op(base_path='path_to_model', - model_file='latest.h5', - model_name='model_name', - tenant_id='$(AZ_TENANT_ID)', - service_principal_id='$(AZ_CLIENT_ID)', - service_principal_password='$(AZ_CLIENT_SECRET)', - subscription_id='$(AZ_SUBSCRIPTION_ID)', - resource_group=resource_group, - workspace=workspace, - run_id=dsl.RUN_ID_PLACEHOLDER). \ - apply(use_azure_secret()). \ - apply(use_image(register_image_name)) - -if __name__ == '__main__': - compiler.Compiler().compile(model_train, __file__ + '.tar.gz') -``` diff --git a/components/contrib/azure/azureml/aml-register-model/build.sh b/components/contrib/azure/azureml/aml-register-model/build.sh deleted file mode 100644 index ca2cd0a7c26..00000000000 --- a/components/contrib/azure/azureml/aml-register-model/build.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -e -while getopts "r:" option; - do - case "$option" in - r ) REGISTRY_NAME=${OPTARG};; - esac -done -image_name=${REGISTRY_NAME}.azurecr.io/myrepo/aml-register-model # Specify the image name here -image_tag=latest -full_image_name=${image_name}:${image_tag} - -cd "$(dirname "$0")" -docker build -t "${full_image_name}" . -docker push "$full_image_name" \ No newline at end of file diff --git a/components/contrib/azure/azureml/aml-register-model/component.yaml b/components/contrib/azure/azureml/aml-register-model/component.yaml deleted file mode 100644 index f875eb840ef..00000000000 --- a/components/contrib/azure/azureml/aml-register-model/component.yaml +++ /dev/null @@ -1,32 +0,0 @@ -name: Azure ML Register Model -description: A Kubeflow pipeline component register model with Azure ML. -inputs: -- {name: base_path, type: String} -- {name: model_file, type: String} -- {name: model_name, type: String} -- {name: tenant_id, type: String} -- {name: service_principal_id, type: String} -- {name: service_principal_password, type: String} -- {name: subscription_id, type: String} -- {name: resource_group, type: String} -- {name: workspace, type: String} -- {name: run_id, type: String} -outputs: - - {name: output_model_path, type: String, description: 'Name of registered model'} -implementation: - container: - image: '' - command: [ - "python", "/scripts/register.py", - '--base_path', {inputValue: base_path}, - '--model', {inputValue: model_file}, - '--model_name', {inputValue: model_name}, - '--tenant_id', {inputValue: tenant_id}, - '--service_principal_id', {inputValue: service_principal_id}, - '--service_principal_password', {inputValue: service_principal_password}, - '--subscription_id', {inputValue: subscription_id}, - '--resource_group', {inputValue: resource_group}, - '--workspace', {inputValue: workspace}, - '--run_id', {inputValue: run_id}, - '--output_model_path',{outputPath: output_model_path} - ] diff --git a/components/contrib/azure/azureml/aml-register-model/requirements.txt b/components/contrib/azure/azureml/aml-register-model/requirements.txt deleted file mode 100644 index ce54ad9f2b1..00000000000 --- a/components/contrib/azure/azureml/aml-register-model/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -pathlib2 -requests -azureml-sdk diff --git a/components/contrib/azure/azureml/aml-register-model/src/register.py b/components/contrib/azure/azureml/aml-register-model/src/register.py deleted file mode 100644 index 1b28876c465..00000000000 --- a/components/contrib/azure/azureml/aml-register-model/src/register.py +++ /dev/null @@ -1,120 +0,0 @@ -import json -from os.path import relpath -import argparse -from pathlib2 import Path -import azureml -from azureml.core import Workspace -from azureml.core.model import Model -from azureml.core.authentication import ServicePrincipalAuthentication - - -def info(msg, char="#", width=75): - print("") - print(char * width) - print(char + " %0*s" % ((-1 * width) + 5, msg) + char) - print(char * width) - - -def get_ws(tenant_id, service_principal_id, - service_principal_password, subscription_id, resource_group, workspace): # noqa: E501 - auth_args = { - 'tenant_id': tenant_id, - 'service_principal_id': service_principal_id, - 'service_principal_password': service_principal_password - } - - ws_args = { - 'auth': ServicePrincipalAuthentication(**auth_args), - 'subscription_id': subscription_id, - 'resource_group': resource_group - } - ws = Workspace.get(workspace, **ws_args) - return ws - - -def run(mdl_path, model_name, ws, tgs): - print(ws.get_details()) - - print('\nSaving model {} to {}'.format(mdl_path, model_name)) - - # Model Path needs to be relative - mdl_path = relpath(mdl_path, '.') - - model = Model.register(ws, model_name=model_name, model_path=mdl_path, tags=tgs) - info("Model Registered") - - - -if __name__ == "__main__": - # print("Ok") - # argparse stuff for model path and model name - parser = argparse.ArgumentParser(description='sanity check on model') - parser.add_argument('-b', '--base_path', - help='directory to base folder', default='../../data') - parser.add_argument( - '-m', '--model', help='path to model file', default='/model/latest.h5') - parser.add_argument('-n', '--model_name', - help='AML Model name', default='tacosandburritos') - parser.add_argument('-t', '--tenant_id', help='tenant_id') - parser.add_argument('-s', '--service_principal_id', - help='service_principal_id') - parser.add_argument('-p', '--service_principal_password', - help='service_principal_password') - parser.add_argument('-u', '--subscription_id', help='subscription_id') - parser.add_argument('-r', '--resource_group', help='resource_group') - parser.add_argument('-w', '--workspace', help='workspace') - parser.add_argument('-ri', '--run_id', help='pieline run id') - parser.add_argument('-omp','--output_model_path', help='Registered ML model name') - args = parser.parse_args() - - print('Azure ML SDK Version: {}'.format(azureml.core.VERSION)) - args.model = 'model/' + args.model - model_path = str(Path(args.base_path).resolve( - strict=False).joinpath(args.model).resolve(strict=False)) - params_path = str(Path(args.base_path).resolve( - strict=False).joinpath('params.json').resolve(strict=False)) - wsrgs = { - 'tenant_id': args.tenant_id, - 'service_principal_id': args.service_principal_id, - 'service_principal_password': args.service_principal_password, - 'subscription_id': args.subscription_id, - 'resource_group': args.resource_group, - 'workspace': args.workspace - } - rgs = { - 'mdl_path': model_path, - 'model_name': args.model_name - } - - print("Creating output directory") - output_model_path = args.output_model_path - Path(output_model_path).parent.mkdir(parents=True, exist_ok=True) - - # Write model name to component output - with open(output_model_path, 'w') as f: - json.dump(args.model_name, f) - - - # printing out args for posterity - for i in wsrgs: - if i == 'service_principal_password': - print('{} => **********'.format(i)) - else: - print('{} => {}'.format(i, wsrgs[i])) - - for i in rgs: - print('{} => {}'.format(i, rgs[i])) - - with(open(str(params_path), 'r')) as f: - tags = json.load(f) - - tags['run_id'] = args.run_id - print('\n\nUsing the following tags:') - for tag in tags: - print('{} => {}'.format(tag, tags[tag])) - - rgs['tgs'] = tags - - workspc = get_ws(**wsrgs) - rgs['ws'] = workspc - run(**rgs) diff --git a/components/contrib/azure/azuresynapse/runsparkjob/Dockerfile b/components/contrib/azure/azuresynapse/runsparkjob/Dockerfile deleted file mode 100644 index eeb8e1fcfa0..00000000000 --- a/components/contrib/azure/azuresynapse/runsparkjob/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM mcr.microsoft.com/azure-cli -RUN pip install --upgrade pip &&\ - az extension add --name synapse &&\ - apk add --update jq -COPY src/ /src/ -CMD bash \ No newline at end of file diff --git a/components/contrib/azure/azuresynapse/runsparkjob/README.md b/components/contrib/azure/azuresynapse/runsparkjob/README.md deleted file mode 100644 index f447c04b591..00000000000 --- a/components/contrib/azure/azuresynapse/runsparkjob/README.md +++ /dev/null @@ -1,96 +0,0 @@ -# Azure Synapse Run Spark Jobs - -This component submit a spark job in [Azure Syanpse workspace](https://docs.microsoft.com/en-us/azure/synapse-analytics/). It provides the following functions: - -- Submit a spark job in a spark pool in an Azure Synpase workspace - - If the spark pool doesn't exist, a new pool will be created with the [configuration](./src/spark_pool_config.yaml) -- You can choose to return after the job is scheduled, or wait until the just finished. - -## Input Parameters -### Job scheduling parameters -| Name | Type | Required | Description | -| -------------------------- | ------ | -------- | ----------------------------------------| -| executor_size | String | Y | size of executors. Accepted values: Large, Medium, Small | -| executors | Integer | Y | number of executors | -| main_class_name | String | Y | The fully-qualified identifier or the main class that is in the main definition file | -| main_definition_file | String | Y | The main file used for the job | -| name | String | Y | The Spark job name | -| spark_pool_name | String | Y | The Spark pool name | -| workspace_name | String | Y | The Synapse workspace name | -| subscription_id | String | Y | The id of Azure subscription where the Synapse workspace is in | -| resource_group | String | Y | The Azure resource group where the Synapse workspace is in | -| command_line_arguments | String | N | The command line arguments for the job | -| configuration | String | N | The configuration of Spark job | -| language | String | N | The language of Spark job. Accepted values: CSharp, PySpark, Python, Scala, Spark, SparkDotNet | -| reference_files | String | N | Additional files used for reference in the main definition file | -| tags | String | N | Space-separated tags: key[=value] [key[=value] ...] | -| spark_pool_config_file | String | N | Path of the spark pool configuration yaml file. Default value is ./src/spark_pool_config.yaml | -| wait_until_job_finished | Bool | N | Whether wait for the job completion. Default value is True | -| waiting_timeout_in_seconds | Integer | N | The waiting timeout in seconds. Default value is 3600 | - -### Authentication parameters -| Name | Type | Required | Description | -| -------------------------- | ------ | -------- | ----------------------------------------| -| service_principal_id | String | Y | The service principal client id | -| service_principal_password | String | Y | The service principal password/secret | -| tenant_id | String | Y | The Azure tenant id for the service principal | - -## Prerequisites -- [Create an AKS cluster](https://docs.microsoft.com/en-us/azure/aks/kubernetes-walkthrough-portal). -- [Install Kubeflow on AKS](https://www.kubeflow.org/docs/azure/). -- [Create AAD service principal](https://docs.microsoft.com/cli/azure/create-an-azure-service-principal-azure-cli#password-based-authentication). -- Create a Azure Synapse Workspace and grant following permissions to the service principal: - - Azure Owner or Contributor for the Synapse workspace (Azure RBAC) - - Synapse Apache Spark Administrator in Synapse RBAC - - Storage blob data owner for the attached ADSL account (storing job - definition file) - - See [Synapse RBAC Roles documentation](https://docs.microsoft.com/en-us/azure/synapse-analytics/security/synapse-workspace-understand-what-role-you-need) for more details about Synapse RBAC. See [here](https://docs.microsoft.com/azure/role-based-access-control/role-assignments-cli#step-4-add-role-assignment) about how to add a role assignment. -- [Create a Azure Container Registry](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-get-started-portal) and grant access to the AKS cluster by running: -```shell -az login -az aks update -n -g --attach-acr -``` -- Create a secret for service principal authentication in AKS by running: -```shell -kubectl create secret generic azcreds \ - --from-literal=AZ_TENANT_ID='' \ - --from-literal=AZ_CLIENT_ID='' \ - --from-literal=AZ_CLIENT_SECRET='' \ - --from-literal=AZ_SUBSCRIPTION_ID='' \ - -n kubeflow -``` - See [here](https://docs.microsoft.com/azure/aks/cluster-container-registry-integration) for more details about AKS and ACI integration. - -## Usage -### Step 1. Build the docker image and upload to Azure Container Registry -First, login to your Azure container registry by running: -```shell -az login -sudo az acr login -n -``` -You can run the following command to build and upload the image -```shell -docker build . -t .azurecr.io/deploy/:latest -docker push .azurecr.io/deploy/:latest -``` - -> **NOTE**: You can also use container registries other than Azure Container Registry. Please follow the instruction from the service provider to configure integration with Kubeflow and push the images. - -### Step 2. Update the parameters -In sample.py, we set *main_definition_file* and *command_line_arguments* as pipeline input parameters. You can update the pipeline input parameters and component parameters as needed before building the sample pipeline. - -If you need the Azure Synapse Spark job component to create new Spark pool, make sure you review and update the [Spark pool configuration file](./src/spark_pool_config.yaml). - -### Step 3. Build the sample pipeline using sample.py -First install kfp package -```shell -pip install kfp -``` - -Run the sample.py script to build and compile sample pipeline -```shell -python sample.py --image_name --image_repo_name -``` - -### Step 4. Download the .gz file and upload the pipeline in Kubeflow UI \ No newline at end of file diff --git a/components/contrib/azure/azuresynapse/runsparkjob/component.yaml b/components/contrib/azure/azuresynapse/runsparkjob/component.yaml deleted file mode 100644 index cf5c343f956..00000000000 --- a/components/contrib/azure/azuresynapse/runsparkjob/component.yaml +++ /dev/null @@ -1,50 +0,0 @@ -name: Azure Synapse Run Spark Job -description: A Kubeflow Pipeline Component to run Spark job in Azure Synapse. -inputs: -- {name: executor_size, type: String} -- {name: executors, type: Integer} -- {name: main_class_name, type: String} -- {name: main_definition_file, type: String} -- {name: name, type: String} -- {name: spark_pool_name, type: String} -- {name: workspace_name, type: String} -- {name: service_principal_id, type: String} -- {name: service_principal_password, type: String} -- {name: subscription_id, type: String} -- {name: resource_group, type: String} -- {name: tenant_id, type: String} -- {name: command_line_arguments, type: String, default: '', optional: true} -- {name: configuration, type: String, default: '', optional: true} -- {name: language, type: String, default: '', optional: true} -- {name: reference_files, type: String, default: '', optional: true} -- {name: tags, type: String, default: '', optional: true} -- {name: spark_pool_config_file, type: String, default: './src/spark_pool_config.yaml', optional: true} -- {name: wait_until_job_finished, type: Bool, default: True, optional: true} -- {name: waiting_timeout_in_seconds, type: Integer, default: 3600, optional: true} -outputs: -implementation: - container: - image: '' - command: [ - "bash", "./src/submit_job.sh", - '-s', {inputValue: executor_size}, - '-e', {inputValue: executors}, - '-c', {inputValue: main_class_name}, - '-d', {inputValue: main_definition_file}, - '-n', {inputValue: name}, - '-p', {inputValue: spark_pool_name}, - '-w', {inputValue: workspace_name}, - '-i', {inputValue: service_principal_id}, - '-r', {inputValue: service_principal_password}, - '-u', {inputValue: subscription_id}, - '-g', {inputValue: resource_group}, - '-t', {inputValue: tenant_id}, - '-a', {inputValue: command_line_arguments}, - '-o', {inputValue: configuration}, - '-l', {inputValue: language}, - '-f', {inputValue: reference_files}, - '-q', {inputValue: tags}, - '-x', {inputValue: spark_pool_config_file}, - '-y', {inputValue: wait_until_job_finished}, - '-h', {inputValue: waiting_timeout_in_seconds} - ] \ No newline at end of file diff --git a/components/contrib/azure/azuresynapse/runsparkjob/sample.py b/components/contrib/azure/azuresynapse/runsparkjob/sample.py deleted file mode 100644 index 3bcb5419f47..00000000000 --- a/components/contrib/azure/azuresynapse/runsparkjob/sample.py +++ /dev/null @@ -1,66 +0,0 @@ -import os -import kfp.compiler as compiler -import kfp.components as components -from kfp.azure import use_azure_secret -import kfp.dsl as dsl -import argparse - -parser = argparse.ArgumentParser(description='Process inputs.') -parser.add_argument('--image_name', type=str, default='kubeflow_synapse_component') -parser.add_argument('--image_repo_name', type=str, default='kubeflowdemo') -args = parser.parse_args() - -component_root = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".") -image_repo_name = args.image_repo_name # the container registery for the container operation and path in the ACR -image_name = args.image_name -file_path = os.path.join(component_root, "component.yaml") - -# Loading the component.yaml file for deployment operation -run_job_operation = components.load_component_from_file(file_path) - -# The run_job_image_name shall be the container image for the operation -# It shall be something like .azurecr.io/deploy/aml-deploy-model:latest -# If you are using a container registry other than Azure Container Registry, update the image name correspondingly -run_job_image_name = image_repo_name + '.azurecr.io/deploy/' + image_name + ':latest' - -print(run_job_image_name) - -def use_image(image_name): - def _use_image(task): - task.image = image_name - return task - return _use_image - -@dsl.pipeline( - name='Azure Synapse Component Sample', - description='Run spark jobs in Azure Synapse' -) -def run_spark_job( - main_definition_file, - command_line_arguments -): - operation = run_job_operation(executor_size='Small', - executors=1, - main_class_name='""', - main_definition_file=main_definition_file, - name='kubeflowsynapsetest', - tenant_id='$(AZ_TENANT_ID)', - service_principal_id='$(AZ_CLIENT_ID)', - service_principal_password='$(AZ_CLIENT_SECRET)', - subscription_id='$(AZ_SUBSCRIPTION_ID)', - resource_group='kubeflow-demo-rg', - command_line_arguments=command_line_arguments, - spark_pool_name='kubeflowsynapse', - language='', - reference_files='', - configuration='', - tags='', - spark_pool_config_file='./src/spark_pool_config.yaml', - wait_until_job_finished=True, - waiting_timeout_in_seconds=3600, - workspace_name='kubeflow-demo'). \ - apply(use_azure_secret()). \ - apply(use_image(run_job_image_name)) - -if __name__ == '__main__': - compiler.Compiler().compile(run_spark_job, __file__ + '.tar.gz') \ No newline at end of file diff --git a/components/contrib/azure/azuresynapse/runsparkjob/src/requirement.txt b/components/contrib/azure/azuresynapse/runsparkjob/src/requirement.txt deleted file mode 100644 index cc6825de75d..00000000000 --- a/components/contrib/azure/azuresynapse/runsparkjob/src/requirement.txt +++ /dev/null @@ -1,8 +0,0 @@ -kfp -azure-mgmt-synapse -azure-synapse-spark -azure-identity -mlflow -numpy>=1.14.3 -pandas>=1.0.0 -scikit-learn=0.19.1 \ No newline at end of file diff --git a/components/contrib/azure/azuresynapse/runsparkjob/src/spark_pool_config.yaml b/components/contrib/azure/azuresynapse/runsparkjob/src/spark_pool_config.yaml deleted file mode 100644 index 915bedd5315..00000000000 --- a/components/contrib/azure/azuresynapse/runsparkjob/src/spark_pool_config.yaml +++ /dev/null @@ -1,13 +0,0 @@ -SPARK_POOL_NODE_COUNT: 3 -SPARK_POOL_NODE_SIZE: Medium -SPARK_POOL_SPARK_VERSION: 2.4 -SPARK_POOL_ENABLE_AUTO_PAUSE: true -SPARK_POOL_ENABLE_AUTO_SCALE: true -SPARK_POOL_DELAY: 30 -SPARK_POOL_LIBRARY_REQUIREMENTS_FILE: ./src/requirement.txt -SPARK_POOL_MAX_NODE_COUNT: 10 -SPARK_POOL_MIN_NODE_COUNT: 1 -SPARK_POOL_NODE_SIZE_FAMILY: MemoryOptimized -SPARK_POOL_DEFAULT_SPARK_LOG_FOLDER: /logs -SPARK_POOL_SPARK_EVENTS_FOLDER: /events -SPARK_POOL_TAGS: createdBy=kubeflow diff --git a/components/contrib/azure/azuresynapse/runsparkjob/src/submit_job.sh b/components/contrib/azure/azuresynapse/runsparkjob/src/submit_job.sh deleted file mode 100644 index 40fdcab3c4d..00000000000 --- a/components/contrib/azure/azuresynapse/runsparkjob/src/submit_job.sh +++ /dev/null @@ -1,158 +0,0 @@ -#!/bin/bash - -# Deploy registered model to Azure Machine Learning -while getopts "s:e:c:d:n:p:w:i:r:u:g:t:a:o:l:f:q:x:y:h:" option; - do - case "$option" in - s ) EXECUTOR_SIZE=${OPTARG};; - e ) EXECUTORS=${OPTARG};; - c ) MAIN_CLASS_NAME=${OPTARG};; - d ) MAIN_DEFINITION_FILE=${OPTARG};; - n ) NAME=${OPTARG};; - p ) SPARK_POOL_NAME=${OPTARG};; - i ) SERVICE_PRINCIPAL_ID=${OPTARG};; - r ) SERVICE_PRINCIPAL_PASSWORD=${OPTARG};; - u ) SUBSCRIPTION_ID=${OPTARG};; - g ) RESOURCE_GROUP=${OPTARG};; - w ) WORKSPACE_NAME=${OPTARG};; - t ) TENANT_ID=${OPTARG};; - a ) COMMAND_LINE_ARGUMENTS=${OPTARG};; - o ) CONFIGURATION=${OPTARG};; - l ) LANGUAGE=${OPTARG};; - f ) REFERENCE_FILE=${OPTARG};; - q ) TAGS=${OPTARG};; - x ) SPARK_POOL_CONFIG_FILE=${OPTARG};; - y ) WAIT_UNTIL_JOB_FINISHED=${OPTARG};; - h ) WAITING_TIMEOUT_IN_SECONDS=${OPTARG};; - esac -done - -echo "Scheduling spark job in Synapse workspace ${WORKSPACE_NAME} and spark pool ${SPARK_POOL_NAME}." - -az login --service-principal --username ${SERVICE_PRINCIPAL_ID} --password ${SERVICE_PRINCIPAL_PASSWORD} -t ${TENANT_ID} -az account set --subscription ${SUBSCRIPTION_ID} - -OUTPUT=$(az synapse workspace show --name ${WORKSPACE_NAME} \ - --resource-group ${RESOURCE_GROUP} 2>&1) - -if [[ $OUTPUT == *"ResourceNotFoundError"* ]]; then - echo "The workspace doesn't exist. Cannot schedule the job." - exit 1 -fi - -# Check if the spark pool exists -OUTPUT=$(az synapse spark pool show --name ${SPARK_POOL_NAME} \ - --resource-group ${RESOURCE_GROUP} \ - --workspace-name ${WORKSPACE_NAME} 2>&1) - -# Create spark pool if not exists -if [[ $OUTPUT == *"ResourceNotFoundError"* ]]; then - echo "The spark pool doesn't exist, creating the spark pool!" - while read -r line; - do arrIN=(${line//:/ }); declare "${arrIN[0]}"="${arrIN[1]}"; - done < ${SPARK_POOL_CONFIG_FILE} - command="az synapse spark pool create --name ${SPARK_POOL_NAME} \ - --node-count ${SPARK_POOL_NODE_COUNT} \ - --node-size ${SPARK_POOL_NODE_SIZE} \ - --resource-group ${RESOURCE_GROUP} \ - --workspace-name ${WORKSPACE_NAME} \ - --spark-version ${SPARK_POOL_SPARK_VERSION} \ - --enable-auto-pause ${SPARK_POOL_ENABLE_AUTO_PAUSE} \ - --enable-auto-scale ${SPARK_POOL_ENABLE_AUTO_SCALE} \ - --delay ${SPARK_POOL_DELAY} \ - --max-node-count ${SPARK_POOL_MAX_NODE_COUNT} \ - --min-node-count ${SPARK_POOL_MIN_NODE_COUNT} \ - --node-size-family ${SPARK_POOL_NODE_SIZE_FAMILY} \ - --library-requirements-file ${SPARK_POOL_LIBRARY_REQUIREMENTS_FILE} \ - --default-spark-log-folder ${SPARK_POOL_DEFAULT_SPARK_LOG_FOLDER} \ - --spark-events-folder ${SPARK_POOL_SPARK_EVENTS_FOLDER} \ - --tags ${SPARK_POOL_TAGS}" - - OUTPUT=$(eval $command) - pool_name=$( echo $OUTPUT | jq ".name") - - # If cannot get Livy id from the output, return -1 and the error output - if [[ "$pool_name" == "" ]]; then - echo "Failed to create spark cluster. See errors above for more details." - exit 1 - else - echo "Created spark pool ${SPARK_POOL_NAME} in workspace ${WORKSPACE_NAME}." - fi -else - echo "Found spark pool ${SPARK_POOL_NAME} in workspace ${WORKSPACE_NAME}." -fi - -# Run az synapse spark job submit to submit spark job -command="az synapse spark job submit --executor-size ${EXECUTOR_SIZE} \ - --executors ${EXECUTORS} \ - --main-definition-file ${MAIN_DEFINITION_FILE} \ - --name ${NAME} \ - --spark-pool-name ${SPARK_POOL_NAME} \ - --workspace-name ${WORKSPACE_NAME}" - -if [[ "$MAIN_CLASS_NAME" == "" ]]; then - command="${command} --main-class-name \"\"" -else - command="${command} --main-class-name ${MAIN_CLASS_NAME}" -fi - -if [[ "$COMMAND_LINE_ARGUMENTS" != "" ]]; then - command="${command} --command-line-arguments ${COMMAND_LINE_ARGUMENTS}" -fi - -if [[ "$CONFIGURATION" != "" ]]; then - command="${command} --configuration ${CONFIGURATION}" -fi - -if [[ "$LANGUAGE" != "" ]]; then - command="${command} --language ${LANGUAGE}" -fi - -if [[ "$REFERENCE_FILE" != "" ]]; then - command="${command} --reference-files ${REFERENCE_FILE}" -fi - -if [[ "$TAGS" != "" ]]; then - command="${command} --tags ${TAGS}" -fi - -OUTPUT=$(eval $command) - -# Try the get the Livy id -job_id=$( echo $OUTPUT | jq ".id") - -# If cannot get Livy id from the output, return -1 and the error output -if [[ "$job_id" == "" ]]; then - echo "Failed to schedule spark job. See errors above for more details." - exit 1 -fi - -echo "Spark job with Livy id ${job_id} is submitted in spark pool ${SPARK_POOL_NAME}." - -# Get job result, return if the result is not "Uncertain" or reachs the timeout -if [[ "$WAIT_UNTIL_JOB_FINISHED" == "True" ]]; then - result="\"Uncertain\"" - iterations=$(expr ${WAITING_TIMEOUT_IN_SECONDS} / 10) - for i in $(seq 1 $iterations); do - sleep 10 - command="az synapse spark job show --livy-id ${job_id} --workspace-name ${WORKSPACE_NAME} --spark-pool-name ${SPARK_POOL_NAME}" - OUTPUT=$(eval $command) - result=$( echo $OUTPUT | jq ".result") - - if [[ "${result}" != "\"Uncertain\"" ]]; then - echo "Job finished with status ${result}!"; - echo "Details: ${OUTPUT}" - exit 0; - fi - waiting_time=$(expr ${i} \* 10) - echo "Job ${job_id} is still running! Total waiting time is ${waiting_time}s."; - done - echo "Job ${job_id} is still running! But reached timeout ${WAITING_TIMEOUT_IN_SECONDS}s."; - echo "Details: ${OUTPUT}" - exit 0; - -fi - -echo "Job ${job_id} is submitted!"; -echo "Details: ${OUTPUT}" -exit 0; \ No newline at end of file diff --git a/components/contrib/basics/Calculate_hash/component.yaml b/components/contrib/basics/Calculate_hash/component.yaml deleted file mode 100644 index b82826f5d5d..00000000000 --- a/components/contrib/basics/Calculate_hash/component.yaml +++ /dev/null @@ -1,44 +0,0 @@ -name: Calculate data hash -inputs: -- {name: Data} -- {name: Hash algorithm, type: String, default: SHA256, description: "Hash algorithm to use. Supported values are MD5, SHA1, SHA256, SHA512, SHA3"} -outputs: -- {name: Hash} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/basics/Calculate_hash/component.yaml' -implementation: - container: - image: alpine - command: - - sh - - -exc - - | - data_path="$0" - hash_algorithm="$1" - hash_path="$2" - mkdir -p "$(dirname "$hash_path")" - - hash_algorithm=$(echo "$hash_algorithm" | tr '[:upper:]' '[:lower:]') - case "$hash_algorithm" in - md5|sha1|sha256|sha512|sha3) hash_program="${hash_algorithm}sum";; - *) echo "Unsupported hash algorithm $hash_algorithm"; exit 1;; - esac - - if [ -d "$data_path" ]; then - # Calculating hash for directory - cd "$data_path" - find . -type f -print0 | - sort -z | - xargs -0 "$hash_program" | - "$hash_program" | - cut -d ' ' -f 1 > "$hash_path" - else - # Calculating hash for file - "$hash_program" "$data_path" | - cut -d ' ' -f 1 > "$hash_path" - fi - - {inputPath: Data} - - {inputValue: Hash algorithm} - - {outputPath: Hash} diff --git a/components/contrib/dataset_manipulation/split_data_into_folds/in_CSV/component.py b/components/contrib/dataset_manipulation/split_data_into_folds/in_CSV/component.py deleted file mode 100644 index c9b0ce2e351..00000000000 --- a/components/contrib/dataset_manipulation/split_data_into_folds/in_CSV/component.py +++ /dev/null @@ -1,90 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def split_table_into_folds( - table_path: InputPath('CSV'), - - train_1_path: OutputPath('CSV'), - train_2_path: OutputPath('CSV'), - train_3_path: OutputPath('CSV'), - train_4_path: OutputPath('CSV'), - train_5_path: OutputPath('CSV'), - - test_1_path: OutputPath('CSV'), - test_2_path: OutputPath('CSV'), - test_3_path: OutputPath('CSV'), - test_4_path: OutputPath('CSV'), - test_5_path: OutputPath('CSV'), - - number_of_folds: int = 5, - random_seed: int = 0, -): - """Splits the data table into the specified number of folds. - - The data is split into the specified number of folds k (default: 5). - Each testing subsample has 1/k fraction of samples. The testing subsamples do not overlap. - Each training subsample has (k-1)/k fraction of samples. - The train_i subsample is produced by excluding test_i subsample form all samples. - - Inputs: - table: The data to split by rows - number_of_folds: Number of folds to split data into - random_seed: Random seed for reproducible splitting - - Outputs: - train_i: The i-th training subsample - test_i: The i-th testing subsample - - Annotations: - author: Alexey Volkov - - """ - import pandas - from sklearn import model_selection - - max_number_of_folds = 5 - - if number_of_folds < 1 or number_of_folds > max_number_of_folds: - raise ValueError('Number of folds must be between 1 and {}.'.format(max_number_of_folds)) - - df = pandas.read_csv( - table_path, - ) - splitter = model_selection.KFold( - n_splits=number_of_folds, - shuffle=True, - random_state=random_seed, - ) - folds = list(splitter.split(df)) - - fold_paths = [ - (train_1_path, test_1_path), - (train_2_path, test_2_path), - (train_3_path, test_3_path), - (train_4_path, test_4_path), - (train_5_path, test_5_path), - ] - - for i in range(max_number_of_folds): - (train_path, test_path) = fold_paths[i] - if i < len(folds): - (train_indices, test_indices) = folds[i] - train_fold = df.iloc[train_indices] - test_fold = df.iloc[test_indices] - else: - train_fold = df.iloc[0:0] - test_fold = df.iloc[0:0] - train_fold.to_csv(train_path, index=False) - test_fold.to_csv(test_path, index=False) - - -if __name__ == '__main__': - split_table_into_folds_op = create_component_from_func( - split_table_into_folds, - base_image='python:3.7', - packages_to_install=['scikit-learn==0.23.1', 'pandas==1.0.5'], - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/dataset_manipulation/split_data_into_folds/in_CSV/component.yaml", - }, - ) diff --git a/components/contrib/dataset_manipulation/split_data_into_folds/in_CSV/component.yaml b/components/contrib/dataset_manipulation/split_data_into_folds/in_CSV/component.yaml deleted file mode 100644 index 274ae0562e0..00000000000 --- a/components/contrib/dataset_manipulation/split_data_into_folds/in_CSV/component.yaml +++ /dev/null @@ -1,185 +0,0 @@ -name: Split table into folds -description: |- - Splits the data table into the specified number of folds. - - The data is split into the specified number of folds k (default: 5). - Each testing subsample has 1/k fraction of samples. The testing subsamples do not overlap. - Each training subsample has (k-1)/k fraction of samples. - The train_i subsample is produced by excluding test_i subsample form all samples. - - Inputs: - table: The data to split by rows - number_of_folds: Number of folds to split data into - random_seed: Random seed for reproducible splitting - - Outputs: - train_i: The i-th training subsample - test_i: The i-th testing subsample - - Annotations: - author: Alexey Volkov -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/dataset_manipulation/split_data_into_folds/in_CSV/component.yaml' -inputs: -- {name: table, type: CSV} -- {name: number_of_folds, type: Integer, default: '5', optional: true} -- {name: random_seed, type: Integer, default: '0', optional: true} -outputs: -- {name: train_1, type: CSV} -- {name: train_2, type: CSV} -- {name: train_3, type: CSV} -- {name: train_4, type: CSV} -- {name: train_5, type: CSV} -- {name: test_1, type: CSV} -- {name: test_2, type: CSV} -- {name: test_3, type: CSV} -- {name: test_4, type: CSV} -- {name: test_5, type: CSV} -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'scikit-learn==0.23.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'scikit-learn==0.23.1' 'pandas==1.0.5' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def split_table_into_folds( - table_path, - - train_1_path, - train_2_path, - train_3_path, - train_4_path, - train_5_path, - - test_1_path, - test_2_path, - test_3_path, - test_4_path, - test_5_path, - - number_of_folds = 5, - random_seed = 0, - ): - """Splits the data table into the specified number of folds. - - The data is split into the specified number of folds k (default: 5). - Each testing subsample has 1/k fraction of samples. The testing subsamples do not overlap. - Each training subsample has (k-1)/k fraction of samples. - The train_i subsample is produced by excluding test_i subsample form all samples. - - Inputs: - table: The data to split by rows - number_of_folds: Number of folds to split data into - random_seed: Random seed for reproducible splitting - - Outputs: - train_i: The i-th training subsample - test_i: The i-th testing subsample - - Annotations: - author: Alexey Volkov - - """ - import pandas - from sklearn import model_selection - - max_number_of_folds = 5 - - if number_of_folds < 1 or number_of_folds > max_number_of_folds: - raise ValueError('Number of folds must be between 1 and {}.'.format(max_number_of_folds)) - - df = pandas.read_csv( - table_path, - ) - splitter = model_selection.KFold( - n_splits=number_of_folds, - shuffle=True, - random_state=random_seed, - ) - folds = list(splitter.split(df)) - - fold_paths = [ - (train_1_path, test_1_path), - (train_2_path, test_2_path), - (train_3_path, test_3_path), - (train_4_path, test_4_path), - (train_5_path, test_5_path), - ] - - for i in range(max_number_of_folds): - (train_path, test_path) = fold_paths[i] - if i < len(folds): - (train_indices, test_indices) = folds[i] - train_fold = df.iloc[train_indices] - test_fold = df.iloc[test_indices] - else: - train_fold = df.iloc[0:0] - test_fold = df.iloc[0:0] - train_fold.to_csv(train_path, index=False) - test_fold.to_csv(test_path, index=False) - - import argparse - _parser = argparse.ArgumentParser(prog='Split table into folds', description='Splits the data table into the specified number of folds.\n\n The data is split into the specified number of folds k (default: 5).\n Each testing subsample has 1/k fraction of samples. The testing subsamples do not overlap.\n Each training subsample has (k-1)/k fraction of samples.\n The train_i subsample is produced by excluding test_i subsample form all samples.\n\n Inputs:\n table: The data to split by rows\n number_of_folds: Number of folds to split data into\n random_seed: Random seed for reproducible splitting\n\n Outputs:\n train_i: The i-th training subsample\n test_i: The i-th testing subsample\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--table", dest="table_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--number-of-folds", dest="number_of_folds", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--random-seed", dest="random_seed", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--train-1", dest="train_1_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--train-2", dest="train_2_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--train-3", dest="train_3_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--train-4", dest="train_4_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--train-5", dest="train_5_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--test-1", dest="test_1_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--test-2", dest="test_2_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--test-3", dest="test_3_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--test-4", dest="test_4_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--test-5", dest="test_5_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = split_table_into_folds(**_parsed_args) - args: - - --table - - {inputPath: table} - - if: - cond: {isPresent: number_of_folds} - then: - - --number-of-folds - - {inputValue: number_of_folds} - - if: - cond: {isPresent: random_seed} - then: - - --random-seed - - {inputValue: random_seed} - - --train-1 - - {outputPath: train_1} - - --train-2 - - {outputPath: train_2} - - --train-3 - - {outputPath: train_3} - - --train-4 - - {outputPath: train_4} - - --train-5 - - {outputPath: train_5} - - --test-1 - - {outputPath: test_1} - - --test-2 - - {outputPath: test_2} - - --test-3 - - {outputPath: test_3} - - --test-4 - - {outputPath: test_4} - - --test-5 - - {outputPath: test_5} diff --git a/components/contrib/datasets/Chicago_Taxi_Trips/component.yaml b/components/contrib/datasets/Chicago_Taxi_Trips/component.yaml deleted file mode 100644 index 5566c2af555..00000000000 --- a/components/contrib/datasets/Chicago_Taxi_Trips/component.yaml +++ /dev/null @@ -1,43 +0,0 @@ -name: Chicago Taxi Trips dataset -description: | - City of Chicago Taxi Trips dataset: https://data.cityofchicago.org/Transportation/Taxi-Trips/wrvz-psew - - The input parameters configure the SQL query to the database. - The dataset is pretty big, so limit the number of results using the `Limit` or `Where` parameters. - Read [Socrata dev](https://dev.socrata.com/docs/queries/) for the advanced query syntax -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/datasets/Chicago_Taxi_Trips/component.yaml' -inputs: -- {name: Where, type: String, default: 'trip_start_timestamp>="1900-01-01" AND trip_start_timestamp<"2100-01-01"'} -- {name: Limit, type: Integer, default: '1000', description: 'Number of rows to return. The rows are randomly sampled.'} -- {name: Select, type: String, default: 'trip_id,taxi_id,trip_start_timestamp,trip_end_timestamp,trip_seconds,trip_miles,pickup_census_tract,dropoff_census_tract,pickup_community_area,dropoff_community_area,fare,tips,tolls,extras,trip_total,payment_type,company,pickup_centroid_latitude,pickup_centroid_longitude,pickup_centroid_location,dropoff_centroid_latitude,dropoff_centroid_longitude,dropoff_centroid_location'} -- {name: Format, type: String, default: 'csv', description: 'Output data format. Suports csv,tsv,cml,rdf,json'} -outputs: -- {name: Table, description: 'Result type depends on format. CSV and TSV have header.'} -implementation: - container: - # image: curlimages/curl # Sets a non-root user which cannot write to mounted volumes. See https://github.com/curl/curl-docker/issues/22 - image: byrnedo/alpine-curl@sha256:548379d0a4a0c08b9e55d9d87a592b7d35d9ab3037f4936f5ccd09d0b625a342 - command: - - sh - - -c - - | - set -e -x -o pipefail - output_path="$0" - select="$1" - where="$2" - limit="$3" - format="$4" - mkdir -p "$(dirname "$output_path")" - curl --get 'https://data.cityofchicago.org/resource/wrvz-psew.'"${format}" \ - --data-urlencode '$limit='"${limit}" \ - --data-urlencode '$where='"${where}" \ - --data-urlencode '$select='"${select}" \ - | tr -d '"' > "$output_path" # Removing unneeded quotes around all numbers - - {outputPath: Table} - - {inputValue: Select} - - {inputValue: Where} - - {inputValue: Limit} - - {inputValue: Format} diff --git a/components/contrib/datasets/HuggingFace/Load_dataset/component.py b/components/contrib/datasets/HuggingFace/Load_dataset/component.py deleted file mode 100644 index a8fc1431985..00000000000 --- a/components/contrib/datasets/HuggingFace/Load_dataset/component.py +++ /dev/null @@ -1,30 +0,0 @@ -from typing import NamedTuple - -from kfp.components import create_component_from_func, OutputPath - - -def load_dataset_using_huggingface( - dataset_name: str, - dataset_dict_path: OutputPath('HuggingFaceDatasetDict'), -) -> NamedTuple('Outputs', [ - ('splits', list), -]): - from datasets import load_dataset - - dataset_dict = load_dataset(dataset_name) - dataset_dict.save_to_disk(dataset_dict_path) - splits = list(dataset_dict.keys()) - return (splits,) - - -if __name__ == '__main__': - load_dataset_op = create_component_from_func( - load_dataset_using_huggingface, - base_image='python:3.9', - packages_to_install=['datasets==1.6.2'], - annotations={ - 'author': 'Alexey Volkov ', - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/datasets/HuggingFace/Load_dataset/component.yaml", - }, - output_component_file='component.yaml', - ) diff --git a/components/contrib/datasets/HuggingFace/Load_dataset/component.yaml b/components/contrib/datasets/HuggingFace/Load_dataset/component.yaml deleted file mode 100644 index e7ca4e4fb2b..00000000000 --- a/components/contrib/datasets/HuggingFace/Load_dataset/component.yaml +++ /dev/null @@ -1,83 +0,0 @@ -name: Load dataset using huggingface -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/datasets/HuggingFace/Load_dataset/component.yaml' -inputs: -- {name: dataset_name, type: String} -outputs: -- {name: dataset_dict, type: HuggingFaceDatasetDict} -- {name: splits, type: JsonArray} -implementation: - container: - image: python:3.9 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'datasets==1.6.2' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install - --quiet --no-warn-script-location 'datasets==1.6.2' --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def load_dataset_using_huggingface( - dataset_name, - dataset_dict_path, - ): - from datasets import load_dataset - - dataset_dict = load_dataset(dataset_name) - dataset_dict.save_to_disk(dataset_dict_path) - splits = list(dataset_dict.keys()) - return (splits,) - - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - import argparse - _parser = argparse.ArgumentParser(prog='Load dataset using huggingface', description='') - _parser.add_argument("--dataset-name", dest="dataset_name", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--dataset-dict", dest="dataset_dict_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = load_dataset_using_huggingface(**_parsed_args) - - _output_serializers = [ - _serialize_json, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --dataset-name - - {inputValue: dataset_name} - - --dataset-dict - - {outputPath: dataset_dict} - - '----output-paths' - - {outputPath: splits} diff --git a/components/contrib/datasets/HuggingFace/Split_dataset/component.py b/components/contrib/datasets/HuggingFace/Split_dataset/component.py deleted file mode 100644 index 5392fa8f1ef..00000000000 --- a/components/contrib/datasets/HuggingFace/Split_dataset/component.py +++ /dev/null @@ -1,36 +0,0 @@ -from kfp.components import create_component_from_func, InputPath, OutputPath - - -def split_dataset_huggingface( - dataset_dict_path: InputPath('HuggingFaceDatasetDict'), - dataset_split_path: OutputPath('HuggingFaceDataset'), - dataset_path: OutputPath('HuggingFaceArrowDataset'), - # dataset_indices_path: OutputPath('HuggingFaceArrowDataset'), - dataset_info_path: OutputPath(dict), - dataset_state_path: OutputPath(dict), - split_name: str = None, -): - import os - import shutil - from datasets import config as datasets_config - - print(f'DatasetDict contents: {os.listdir(dataset_dict_path)}') - shutil.copytree(os.path.join(dataset_dict_path, split_name), dataset_split_path) - print(f'Dataset contents: {os.listdir(os.path.join(dataset_dict_path, split_name))}') - shutil.copy(os.path.join(dataset_dict_path, split_name, datasets_config.DATASET_ARROW_FILENAME), dataset_path) - # shutil.copy(os.path.join(dataset_dict_path, split_name, datasets_config.DATASET_INDICES_FILENAME), dataset_indices_path) - shutil.copy(os.path.join(dataset_dict_path, split_name, datasets_config.DATASET_INFO_FILENAME), dataset_info_path) - shutil.copy(os.path.join(dataset_dict_path, split_name, datasets_config.DATASET_STATE_JSON_FILENAME), dataset_state_path) - - -if __name__ == '__main__': - split_dataset_op = create_component_from_func( - split_dataset_huggingface, - base_image='python:3.9', - packages_to_install=['datasets==1.6.2'], - annotations={ - 'author': 'Alexey Volkov ', - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/datasets/HuggingFace/Split_dataset/component.yaml", - }, - output_component_file='component.yaml', - ) diff --git a/components/contrib/datasets/HuggingFace/Split_dataset/component.yaml b/components/contrib/datasets/HuggingFace/Split_dataset/component.yaml deleted file mode 100644 index 3a46f6420a7..00000000000 --- a/components/contrib/datasets/HuggingFace/Split_dataset/component.yaml +++ /dev/null @@ -1,82 +0,0 @@ -name: Split dataset huggingface -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/datasets/HuggingFace/Split_dataset/component.yaml' -inputs: -- {name: dataset_dict, type: HuggingFaceDatasetDict} -- {name: split_name, type: String, optional: true} -outputs: -- {name: dataset_split, type: HuggingFaceDataset} -- {name: dataset, type: HuggingFaceArrowDataset} -- {name: dataset_info, type: JsonObject} -- {name: dataset_state, type: JsonObject} -implementation: - container: - image: python:3.9 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'datasets==1.6.2' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install - --quiet --no-warn-script-location 'datasets==1.6.2' --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def split_dataset_huggingface( - dataset_dict_path, - dataset_split_path, - dataset_path, - # dataset_indices_path: OutputPath('HuggingFaceArrowDataset'), - dataset_info_path, - dataset_state_path, - split_name = None, - ): - import os - import shutil - from datasets import config as datasets_config - - print(f'DatasetDict contents: {os.listdir(dataset_dict_path)}') - shutil.copytree(os.path.join(dataset_dict_path, split_name), dataset_split_path) - print(f'Dataset contents: {os.listdir(os.path.join(dataset_dict_path, split_name))}') - shutil.copy(os.path.join(dataset_dict_path, split_name, datasets_config.DATASET_ARROW_FILENAME), dataset_path) - # shutil.copy(os.path.join(dataset_dict_path, split_name, datasets_config.DATASET_INDICES_FILENAME), dataset_indices_path) - shutil.copy(os.path.join(dataset_dict_path, split_name, datasets_config.DATASET_INFO_FILENAME), dataset_info_path) - shutil.copy(os.path.join(dataset_dict_path, split_name, datasets_config.DATASET_STATE_JSON_FILENAME), dataset_state_path) - - import argparse - _parser = argparse.ArgumentParser(prog='Split dataset huggingface', description='') - _parser.add_argument("--dataset-dict", dest="dataset_dict_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--split-name", dest="split_name", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--dataset-split", dest="dataset_split_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--dataset", dest="dataset_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--dataset-info", dest="dataset_info_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--dataset-state", dest="dataset_state_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = split_dataset_huggingface(**_parsed_args) - args: - - --dataset-dict - - {inputPath: dataset_dict} - - if: - cond: {isPresent: split_name} - then: - - --split-name - - {inputValue: split_name} - - --dataset-split - - {outputPath: dataset_split} - - --dataset - - {outputPath: dataset} - - --dataset-info - - {outputPath: dataset_info} - - --dataset-state - - {outputPath: dataset_state} diff --git a/components/contrib/datasets/HuggingFace/_samples/sample.pipeline.py b/components/contrib/datasets/HuggingFace/_samples/sample.pipeline.py deleted file mode 100644 index bf6ced5b7c8..00000000000 --- a/components/contrib/datasets/HuggingFace/_samples/sample.pipeline.py +++ /dev/null @@ -1,24 +0,0 @@ -from kfp import components -from kfp import dsl - - -load_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/d0e14a1dad4b851ad2a60a0c1a8201493f3d931c/components/datasets/HuggingFace/Load_dataset/component.yaml') -split_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/d0e14a1dad4b851ad2a60a0c1a8201493f3d931c/components/datasets/HuggingFace/Split_dataset/component.yaml') - - -def huggingface_pipeline(): - dataset_dict_task = load_dataset_op(dataset_name='imdb') - with dsl.ParallelFor(dataset_dict_task.outputs['splits']) as split_name: - deataset_task = split_dataset_op( - dataset_dict=dataset_dict_task.outputs['dataset_dict'], - split_name=split_name, - ) - - -if __name__ == '__main__': - import kfp - kfp_endpoint = None - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func( - huggingface_pipeline, - arguments={} - ) diff --git a/components/contrib/filesystem/get_file/component.yaml b/components/contrib/filesystem/get_file/component.yaml deleted file mode 100644 index cdd226a468d..00000000000 --- a/components/contrib/filesystem/get_file/component.yaml +++ /dev/null @@ -1,24 +0,0 @@ -name: Get file -description: Get file from directory. -inputs: -- {name: Directory, type: Directory} -- {name: Subpath, type: String} -outputs: -- {name: File} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/filesystem/get_file/component.yaml' -implementation: - container: - image: alpine - command: - - sh - - -ex - - -c - - | - mkdir -p "$(dirname "$2")" - cp -r "$0/$1" "$2" - - inputPath: Directory - - inputValue: Subpath - - outputPath: File diff --git a/components/contrib/filesystem/get_subdirectory/component.yaml b/components/contrib/filesystem/get_subdirectory/component.yaml deleted file mode 100644 index a7c983d1412..00000000000 --- a/components/contrib/filesystem/get_subdirectory/component.yaml +++ /dev/null @@ -1,24 +0,0 @@ -name: Get subdirectory -description: Get subdirectory from directory. -inputs: -- {name: Directory, type: Directory} -- {name: Subpath, type: String} -outputs: -- {name: Subdir, type: Directory} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/filesystem/get_subdirectory/component.yaml' -implementation: - container: - image: alpine - command: - - sh - - -ex - - -c - - | - mkdir -p "$(dirname "$2")" - cp -r "$0/$1" "$2" - - inputPath: Directory - - inputValue: Subpath - - outputPath: Subdir diff --git a/components/contrib/filesystem/list_items/component.yaml b/components/contrib/filesystem/list_items/component.yaml deleted file mode 100644 index f00846b547e..00000000000 --- a/components/contrib/filesystem/list_items/component.yaml +++ /dev/null @@ -1,23 +0,0 @@ -name: List items -description: Recursively list directory contents. -inputs: -- {name: Directory, type: Directory} -outputs: -- {name: Items} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/filesystem/list_items/component.yaml' -implementation: - container: - image: alpine - command: - - sh - - -ex - - -c - - | - mkdir -p "$(dirname "$1")" - #ls --almost-all --recursive "$0" > "$1" - ls -A -R "$0" > "$1" - - inputPath: Directory - - outputPath: Items diff --git a/components/contrib/git/clone/component.yaml b/components/contrib/git/clone/component.yaml deleted file mode 100644 index 632adf43866..00000000000 --- a/components/contrib/git/clone/component.yaml +++ /dev/null @@ -1,23 +0,0 @@ -name: Git clone -description: Creates a shallow clone of the specified repo branch -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/git/clone/component.yaml' - volatile_component: "true" -inputs: -- {name: Repo URI, type: URI} -- {name: Branch, type: String, default: master} -outputs: -- {name: Repo dir, type: Directory} -implementation: - container: - image: alpine/git - command: - - git - - clone - - --depth=1 - - --branch - - inputValue: Branch - - inputValue: Repo URI - - outputPath: Repo dir diff --git a/components/contrib/google-cloud/Optimizer/Add_measurement_for_trial/component.py b/components/contrib/google-cloud/Optimizer/Add_measurement_for_trial/component.py deleted file mode 100644 index 708deb65e08..00000000000 --- a/components/contrib/google-cloud/Optimizer/Add_measurement_for_trial/component.py +++ /dev/null @@ -1,122 +0,0 @@ -from typing import NamedTuple - -from kfp.components import create_component_from_func - -def add_measurement_for_trial_in_gcp_ai_platform_optimizer( - trial_name: str, - metric_value: float, - complete_trial: bool = True, - step_count: float = None, - gcp_project_id: str = None, - gcp_region: str = "us-central1", -) -> NamedTuple('Outputs', [ - ("trial_name", list), - ("trial", dict), - ("stop_trial", bool), -]): - """Add measurement for a trial and check whether to continue. - See https://cloud.google.com/ai-platform/optimizer/docs - - Annotations: - author: Alexey Volkov - - Args: - trial_name: Full trial resource name. - metric_value: Result of the trial evaluation. - step_count: Optional. The number of training steps performed with the model. Can be used when checking early stopping. - complete_trial: Whether the trial should be completed. Only completed trials are used to suggest new trials. Default is True. - """ - - import logging - import time - - import google.auth - from googleapiclient import discovery - - logging.getLogger().setLevel(logging.INFO) - - client_id = 'client1' - metric_name = 'metric' - - credentials, default_project_id = google.auth.default() - - # Validating and inferring the arguments - if not gcp_project_id: - gcp_project_id = default_project_id - - # Building the API client. - # The main API does not work, so we need to build from the published discovery document. - def create_caip_optimizer_client(project_id): - from google.cloud import storage - _OPTIMIZER_API_DOCUMENT_BUCKET = 'caip-optimizer-public' - _OPTIMIZER_API_DOCUMENT_FILE = 'api/ml_public_google_rest_v1.json' - client = storage.Client(project_id) - bucket = client.get_bucket(_OPTIMIZER_API_DOCUMENT_BUCKET) - blob = bucket.get_blob(_OPTIMIZER_API_DOCUMENT_FILE) - discovery_document = blob.download_as_bytes() - return discovery.build_from_document(service=discovery_document) - - # Workaround for the Optimizer bug: Optimizer returns resource names that use project number, but only supports resource names with project IDs when making requests - def get_project_number(project_id): - service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) - response = service.projects().get(projectId=project_id).execute() - return response['projectNumber'] - - gcp_project_number = get_project_number(gcp_project_id) - - def fix_resource_name(name): - return name.replace(gcp_project_number, gcp_project_id) - - ml_api = create_caip_optimizer_client(gcp_project_id) - trials_api = ml_api.projects().locations().studies().trials() - operations_api = ml_api.projects().locations().operations() - - measurement = { - 'measurement': { - 'stepCount': step_count, - 'metrics': [{ - 'metric': metric_name, - 'value': metric_value, - }], - }, - } - add_measurement_response = trials_api.addMeasurement( - name=fix_resource_name(trial_name), - body=measurement, - ).execute() - - if complete_trial: - should_stop_trial = True - complete_response = trials_api.complete( - name=fix_resource_name(trial_name), - ).execute() - return (trial_name, complete_response, should_stop_trial) - else: - check_early_stopping_response = trials_api.checkEarlyStoppingState( - name=fix_resource_name(trial_name), - ).execute() - operation_name = check_early_stopping_response['name'] - while True: - get_operation_response = operations_api.get( - name=fix_resource_name(operation_name), - ).execute() - if get_operation_response.get('done'): - break - logging.info('Not finished yet: ' + str(get_operation_response)) - time.sleep(10) - operation_response = get_operation_response['response'] - should_stop_trial = operation_response['shouldStop'] - return (trial_name, add_measurement_response, should_stop_trial) - - -if __name__ == '__main__': - add_measurement_for_trial_in_gcp_ai_platform_optimizer_op = create_component_from_func( - add_measurement_for_trial_in_gcp_ai_platform_optimizer, - base_image='python:3.8', - packages_to_install=['google-api-python-client==1.12.3', 'google-cloud-storage==1.31.2', 'google-auth==1.21.3'], - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/Optimizer/Add_measurement_for_trial/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/Optimizer/Add_measurement_for_trial/component.yaml b/components/contrib/google-cloud/Optimizer/Add_measurement_for_trial/component.yaml deleted file mode 100644 index 912bcb70ba6..00000000000 --- a/components/contrib/google-cloud/Optimizer/Add_measurement_for_trial/component.yaml +++ /dev/null @@ -1,220 +0,0 @@ -name: Add measurement for trial in gcp ai platform optimizer -description: Add measurement for a trial and check whether to continue. -inputs: -- {name: trial_name, type: String, description: Full trial resource name.} -- {name: metric_value, type: Float, description: Result of the trial evaluation.} -- name: complete_trial - type: Boolean - description: Whether the trial should be completed. Only completed trials are used - to suggest new trials. Default is True. - default: "True" - optional: true -- {name: step_count, type: Float, description: Optional. The number of training steps - performed with the model. Can be used when checking early stopping., optional: true} -- {name: gcp_project_id, type: String, optional: true} -- {name: gcp_region, type: String, default: us-central1, optional: true} -outputs: -- {name: trial_name, type: JsonArray} -- {name: trial, type: JsonObject} -- {name: stop_trial, type: Boolean} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/Optimizer/Add_measurement_for_trial/component.yaml' -implementation: - container: - image: python:3.8 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-api-python-client==1.12.3' 'google-cloud-storage==1.31.2' 'google-auth==1.21.3' - || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-api-python-client==1.12.3' 'google-cloud-storage==1.31.2' 'google-auth==1.21.3' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def add_measurement_for_trial_in_gcp_ai_platform_optimizer( - trial_name, - metric_value, - complete_trial = True, - step_count = None, - gcp_project_id = None, - gcp_region = "us-central1", - ): - """Add measurement for a trial and check whether to continue. - See https://cloud.google.com/ai-platform/optimizer/docs - - Annotations: - author: Alexey Volkov - - Args: - trial_name: Full trial resource name. - metric_value: Result of the trial evaluation. - step_count: Optional. The number of training steps performed with the model. Can be used when checking early stopping. - complete_trial: Whether the trial should be completed. Only completed trials are used to suggest new trials. Default is True. - """ - - import logging - import time - - import google.auth - from googleapiclient import discovery - - logging.getLogger().setLevel(logging.INFO) - - client_id = 'client1' - metric_name = 'metric' - - credentials, default_project_id = google.auth.default() - - # Validating and inferring the arguments - if not gcp_project_id: - gcp_project_id = default_project_id - - # Building the API client. - # The main API does not work, so we need to build from the published discovery document. - def create_caip_optimizer_client(project_id): - from google.cloud import storage - _OPTIMIZER_API_DOCUMENT_BUCKET = 'caip-optimizer-public' - _OPTIMIZER_API_DOCUMENT_FILE = 'api/ml_public_google_rest_v1.json' - client = storage.Client(project_id) - bucket = client.get_bucket(_OPTIMIZER_API_DOCUMENT_BUCKET) - blob = bucket.get_blob(_OPTIMIZER_API_DOCUMENT_FILE) - discovery_document = blob.download_as_bytes() - return discovery.build_from_document(service=discovery_document) - - # Workaround for the Optimizer bug: Optimizer returns resource names that use project number, but only supports resource names with project IDs when making requests - def get_project_number(project_id): - service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) - response = service.projects().get(projectId=project_id).execute() - return response['projectNumber'] - - gcp_project_number = get_project_number(gcp_project_id) - - def fix_resource_name(name): - return name.replace(gcp_project_number, gcp_project_id) - - ml_api = create_caip_optimizer_client(gcp_project_id) - trials_api = ml_api.projects().locations().studies().trials() - operations_api = ml_api.projects().locations().operations() - - measurement = { - 'measurement': { - 'stepCount': step_count, - 'metrics': [{ - 'metric': metric_name, - 'value': metric_value, - }], - }, - } - add_measurement_response = trials_api.addMeasurement( - name=fix_resource_name(trial_name), - body=measurement, - ).execute() - - if complete_trial: - should_stop_trial = True - complete_response = trials_api.complete( - name=fix_resource_name(trial_name), - ).execute() - return (trial_name, complete_response, should_stop_trial) - else: - check_early_stopping_response = trials_api.checkEarlyStoppingState( - name=fix_resource_name(trial_name), - ).execute() - operation_name = check_early_stopping_response['name'] - while True: - get_operation_response = operations_api.get( - name=fix_resource_name(operation_name), - ).execute() - if get_operation_response.get('done'): - break - logging.info('Not finished yet: ' + str(get_operation_response)) - time.sleep(10) - operation_response = get_operation_response['response'] - should_stop_trial = operation_response['shouldStop'] - return (trial_name, add_measurement_response, should_stop_trial) - - def _serialize_bool(bool_value: bool) -> str: - if isinstance(bool_value, str): - return bool_value - if not isinstance(bool_value, bool): - raise TypeError('Value "{}" has type "{}" instead of bool.'.format(str(bool_value), str(type(bool_value)))) - return str(bool_value) - - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - def _deserialize_bool(s) -> bool: - from distutils.util import strtobool - return strtobool(s) == 1 - - import argparse - _parser = argparse.ArgumentParser(prog='Add measurement for trial in gcp ai platform optimizer', description='Add measurement for a trial and check whether to continue.') - _parser.add_argument("--trial-name", dest="trial_name", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--metric-value", dest="metric_value", type=float, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--complete-trial", dest="complete_trial", type=_deserialize_bool, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--step-count", dest="step_count", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=3) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = add_measurement_for_trial_in_gcp_ai_platform_optimizer(**_parsed_args) - - _output_serializers = [ - _serialize_json, - _serialize_json, - _serialize_bool, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --trial-name - - {inputValue: trial_name} - - --metric-value - - {inputValue: metric_value} - - if: - cond: {isPresent: complete_trial} - then: - - --complete-trial - - {inputValue: complete_trial} - - if: - cond: {isPresent: step_count} - then: - - --step-count - - {inputValue: step_count} - - if: - cond: {isPresent: gcp_project_id} - then: - - --gcp-project-id - - {inputValue: gcp_project_id} - - if: - cond: {isPresent: gcp_region} - then: - - --gcp-region - - {inputValue: gcp_region} - - '----output-paths' - - {outputPath: trial_name} - - {outputPath: trial} - - {outputPath: stop_trial} diff --git a/components/contrib/google-cloud/Optimizer/Create_study/component.py b/components/contrib/google-cloud/Optimizer/Create_study/component.py deleted file mode 100644 index cbe1341ca31..00000000000 --- a/components/contrib/google-cloud/Optimizer/Create_study/component.py +++ /dev/null @@ -1,84 +0,0 @@ -from typing import NamedTuple - -from kfp.components import create_component_from_func - -def create_study_in_gcp_ai_platform_optimizer( - study_id: str, - parameter_specs: list, - optimization_goal: str = 'MAXIMIZE', - metric_specs: list = None, - gcp_project_id: str = None, - gcp_region: str = "us-central1", -) -> NamedTuple('Outputs', [ - ("study_name", str), -]): - """Creates a Google Cloud AI Plaform Optimizer study. - See https://cloud.google.com/ai-platform/optimizer/docs - - Annotations: - author: Alexey Volkov - - Args: - study_id: Name of the study. - parameter_specs: List of parameter specs. See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#parameterspec - optimization_goal: Optimization goal when optimizing a single metric. Can be MAXIMIZE (default) or MINIMIZE. Ignored if metric_specs list is provided. - metric_specs: List of metric specs. See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#metricspec - """ - - import logging - import google.auth - - logging.getLogger().setLevel(logging.INFO) - - # Validating and inferring the arguments - if not gcp_project_id: - _, gcp_project_id = google.auth.default() - - # Building the API client. - # The main API does not work, so we need to build from the published discovery document. - def create_caip_optimizer_client(project_id): - from google.cloud import storage - from googleapiclient import discovery - _OPTIMIZER_API_DOCUMENT_BUCKET = 'caip-optimizer-public' - _OPTIMIZER_API_DOCUMENT_FILE = 'api/ml_public_google_rest_v1.json' - client = storage.Client(project_id) - bucket = client.get_bucket(_OPTIMIZER_API_DOCUMENT_BUCKET) - blob = bucket.get_blob(_OPTIMIZER_API_DOCUMENT_FILE) - discovery_document = blob.download_as_bytes() - return discovery.build_from_document(service=discovery_document) - - ml_api = create_caip_optimizer_client(gcp_project_id) - - if not metric_specs: - metric_specs=[{ - 'metric': 'metric', - 'goal': optimization_goal, - }] - study_config = { - 'algorithm': 'ALGORITHM_UNSPECIFIED', # Let the service choose the `default` algorithm. - 'parameters': parameter_specs, - 'metrics': metric_specs, - } - study = {'study_config': study_config} - - create_study_request = ml_api.projects().locations().studies().create( - parent=f'projects/{gcp_project_id}/locations/{gcp_region}', - studyId=study_id, - body=study, - ) - create_study_response = create_study_request.execute() - study_name = create_study_response['name'] - return (study_name,) - - -if __name__ == '__main__': - create_study_in_gcp_ai_platform_optimizer_op = create_component_from_func( - create_study_in_gcp_ai_platform_optimizer, - base_image='python:3.8', - packages_to_install=['google-api-python-client==1.12.3', 'google-cloud-storage==1.31.2', 'google-auth==1.21.3'], - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/Optimizer/Create_study/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/Optimizer/Create_study/component.yaml b/components/contrib/google-cloud/Optimizer/Create_study/component.yaml deleted file mode 100644 index 9120ee9324c..00000000000 --- a/components/contrib/google-cloud/Optimizer/Create_study/component.yaml +++ /dev/null @@ -1,160 +0,0 @@ -name: Create study in gcp ai platform optimizer -description: Creates a Google Cloud AI Plaform Optimizer study. -inputs: -- {name: study_id, type: String, description: Name of the study.} -- {name: parameter_specs, type: JsonArray, description: 'List of parameter specs. - See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#parameterspec'} -- {name: optimization_goal, type: String, description: Optimization goal when optimizing - a single metric. Can be MAXIMIZE (default) or MINIMIZE. Ignored if metric_specs - list is provided., default: MAXIMIZE, optional: true} -- {name: metric_specs, type: JsonArray, description: 'List of metric specs. See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#metricspec', - optional: true} -- {name: gcp_project_id, type: String, optional: true} -- {name: gcp_region, type: String, default: us-central1, optional: true} -outputs: -- {name: study_name, type: String} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/Optimizer/Create_study/component.yaml' -implementation: - container: - image: python:3.8 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-api-python-client==1.12.3' 'google-cloud-storage==1.31.2' 'google-auth==1.21.3' - || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-api-python-client==1.12.3' 'google-cloud-storage==1.31.2' 'google-auth==1.21.3' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def create_study_in_gcp_ai_platform_optimizer( - study_id, - parameter_specs, - optimization_goal = 'MAXIMIZE', - metric_specs = None, - gcp_project_id = None, - gcp_region = "us-central1", - ): - """Creates a Google Cloud AI Plaform Optimizer study. - See https://cloud.google.com/ai-platform/optimizer/docs - - Annotations: - author: Alexey Volkov - - Args: - study_id: Name of the study. - parameter_specs: List of parameter specs. See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#parameterspec - optimization_goal: Optimization goal when optimizing a single metric. Can be MAXIMIZE (default) or MINIMIZE. Ignored if metric_specs list is provided. - metric_specs: List of metric specs. See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#metricspec - """ - - import logging - import google.auth - - logging.getLogger().setLevel(logging.INFO) - - # Validating and inferring the arguments - if not gcp_project_id: - _, gcp_project_id = google.auth.default() - - # Building the API client. - # The main API does not work, so we need to build from the published discovery document. - def create_caip_optimizer_client(project_id): - from google.cloud import storage - from googleapiclient import discovery - _OPTIMIZER_API_DOCUMENT_BUCKET = 'caip-optimizer-public' - _OPTIMIZER_API_DOCUMENT_FILE = 'api/ml_public_google_rest_v1.json' - client = storage.Client(project_id) - bucket = client.get_bucket(_OPTIMIZER_API_DOCUMENT_BUCKET) - blob = bucket.get_blob(_OPTIMIZER_API_DOCUMENT_FILE) - discovery_document = blob.download_as_bytes() - return discovery.build_from_document(service=discovery_document) - - ml_api = create_caip_optimizer_client(gcp_project_id) - - if not metric_specs: - metric_specs=[{ - 'metric': 'metric', - 'goal': optimization_goal, - }] - study_config = { - 'algorithm': 'ALGORITHM_UNSPECIFIED', # Let the service choose the `default` algorithm. - 'parameters': parameter_specs, - 'metrics': metric_specs, - } - study = {'study_config': study_config} - - create_study_request = ml_api.projects().locations().studies().create( - parent=f'projects/{gcp_project_id}/locations/{gcp_region}', - studyId=study_id, - body=study, - ) - create_study_response = create_study_request.execute() - study_name = create_study_response['name'] - return (study_name,) - - def _serialize_str(str_value: str) -> str: - if not isinstance(str_value, str): - raise TypeError('Value "{}" has type "{}" instead of str.'.format(str(str_value), str(type(str_value)))) - return str_value - - import json - import argparse - _parser = argparse.ArgumentParser(prog='Create study in gcp ai platform optimizer', description='Creates a Google Cloud AI Plaform Optimizer study.') - _parser.add_argument("--study-id", dest="study_id", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--parameter-specs", dest="parameter_specs", type=json.loads, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--optimization-goal", dest="optimization_goal", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--metric-specs", dest="metric_specs", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = create_study_in_gcp_ai_platform_optimizer(**_parsed_args) - - _output_serializers = [ - _serialize_str, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --study-id - - {inputValue: study_id} - - --parameter-specs - - {inputValue: parameter_specs} - - if: - cond: {isPresent: optimization_goal} - then: - - --optimization-goal - - {inputValue: optimization_goal} - - if: - cond: {isPresent: metric_specs} - then: - - --metric-specs - - {inputValue: metric_specs} - - if: - cond: {isPresent: gcp_project_id} - then: - - --gcp-project-id - - {inputValue: gcp_project_id} - - if: - cond: {isPresent: gcp_region} - then: - - --gcp-region - - {inputValue: gcp_region} - - '----output-paths' - - {outputPath: study_name} diff --git a/components/contrib/google-cloud/Optimizer/Suggest_parameter_sets_based_on_measurements/component.py b/components/contrib/google-cloud/Optimizer/Suggest_parameter_sets_based_on_measurements/component.py deleted file mode 100644 index bdb13896e6c..00000000000 --- a/components/contrib/google-cloud/Optimizer/Suggest_parameter_sets_based_on_measurements/component.py +++ /dev/null @@ -1,188 +0,0 @@ -from typing import NamedTuple - -from kfp.components import create_component_from_func - -def suggest_parameter_sets_from_measurements_using_gcp_ai_platform_optimizer( - parameter_specs: list, - metrics_for_parameter_sets: list, - suggestion_count: int, - maximize: bool = False, - metric_specs: list = None, - gcp_project_id: str = None, - gcp_region: str = "us-central1", -) -> NamedTuple('Outputs', [ - ("suggested_parameter_sets", list), -]): - """Suggests trials (parameter sets) to evaluate. - See https://cloud.google.com/ai-platform/optimizer/docs - - Annotations: - author: Alexey Volkov - - Args: - parameter_specs: List of parameter specs. See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#parameterspec - metrics_for_parameter_sets: List of parameter sets and evaluation metrics for them. Each list item contains "parameters" dict and "metrics" dict. Example: {"parameters": {"p1": 1.1, "p2": 2.2}, "metrics": {"metric1": 101, "metric2": 102} } - maximize: Whether to miaximize or minimize when optimizing a single metric.Default is to minimize. Ignored if metric_specs list is provided. - metric_specs: List of metric specs. See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#metricspec - suggestion_count: Number of suggestions to request. - - suggested_parameter_sets: List of parameter set dictionaries. - """ - - import logging - import random - import time - - import google.auth - from googleapiclient import discovery - - logging.getLogger().setLevel(logging.INFO) - - client_id = 'client1' - - credentials, default_project_id = google.auth.default() - - # Validating and inferring the arguments - if not gcp_project_id: - gcp_project_id = default_project_id - - # Building the API client. - # The main API does not work, so we need to build from the published discovery document. - def create_caip_optimizer_client(project_id): - from google.cloud import storage - _OPTIMIZER_API_DOCUMENT_BUCKET = 'caip-optimizer-public' - _OPTIMIZER_API_DOCUMENT_FILE = 'api/ml_public_google_rest_v1.json' - client = storage.Client(project_id) - bucket = client.get_bucket(_OPTIMIZER_API_DOCUMENT_BUCKET) - blob = bucket.get_blob(_OPTIMIZER_API_DOCUMENT_FILE) - discovery_document = blob.download_as_bytes() - return discovery.build_from_document(service=discovery_document) - - # Workaround for the Optimizer bug: Optimizer returns resource names that use project number, but only supports resource names with project IDs when making requests - def get_project_number(project_id): - service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) - response = service.projects().get(projectId=project_id).execute() - return response['projectNumber'] - - gcp_project_number = get_project_number(gcp_project_id) - - def fix_resource_name(name): - return name.replace(gcp_project_number, gcp_project_id) - - ml_api = create_caip_optimizer_client(gcp_project_id) - studies_api = ml_api.projects().locations().studies() - trials_api = ml_api.projects().locations().studies().trials() - operations_api = ml_api.projects().locations().operations() - - random_integer = random.SystemRandom().getrandbits(256) - study_id = '{:064x}'.format(random_integer) - - if not metric_specs: - metric_specs=[{ - 'metric': 'metric', - 'goal': 'MAXIMIZE' if maximize else 'MINIMIZE', - }] - study_config = { - 'algorithm': 'ALGORITHM_UNSPECIFIED', # Let the service choose the `default` algorithm. - 'parameters': parameter_specs, - 'metrics': metric_specs, - } - study = {'study_config': study_config} - - logging.info(f'Creating temporary study {study_id}') - create_study_request = studies_api.create( - parent=f'projects/{gcp_project_id}/locations/{gcp_region}', - studyId=study_id, - body=study, - ) - create_study_response = create_study_request.execute() - study_name = create_study_response['name'] - - paremeter_type_names = {parameter_spec['parameter']: parameter_spec['type'] for parameter_spec in parameter_specs} - def parameter_name_and_value_to_dict(parameter_name: str, parameter_value) -> dict: - result = {'parameter': parameter_name} - paremeter_type_name = paremeter_type_names[parameter_name] - if paremeter_type_name in ['DOUBLE', 'DISCRETE']: - result['floatValue'] = parameter_value - elif paremeter_type_name == 'INTEGER': - result['intValue'] = parameter_value - elif paremeter_type_name == 'CATEGORICAL': - result['stringValue'] = parameter_value - else: - raise TypeError(f'Unsupported parameter type "{paremeter_type_name}"') - return result - - try: - logging.info(f'Adding {len(metrics_for_parameter_sets)} measurements to the study.') - for parameters_and_metrics in metrics_for_parameter_sets: - parameter_set = parameters_and_metrics['parameters'] - metrics_set = parameters_and_metrics['metrics'] - trial = { - 'parameters': [ - parameter_name_and_value_to_dict(parameter_name, parameter_value) - for parameter_name, parameter_value in parameter_set.items() - ], - 'finalMeasurement': { - 'metrics': [ - { - 'metric': metric_name, - 'value': metric_value, - } - for metric_name, metric_value in metrics_set.items() - ], - }, - 'state': 'COMPLETED', - } - create_trial_response = trials_api.create( - parent=fix_resource_name(study_name), - body=trial, - ).execute() - trial_name = create_trial_response["name"] - logging.info(f'Added trial "{trial_name}" to the study.') - - logging.info(f'Requesting suggestions.') - suggest_trials_request = trials_api.suggest( - parent=fix_resource_name(study_name), - body=dict( - suggestionCount=suggestion_count, - clientId=client_id, - ), - ) - suggest_trials_response = suggest_trials_request.execute() - operation_name = suggest_trials_response['name'] - while True: - get_operation_response = operations_api.get( - name=fix_resource_name(operation_name), - ).execute() - # Knowledge: The "done" key is just missing until the result is available - if get_operation_response.get('done'): - break - logging.info('Operation not finished yet: ' + str(get_operation_response)) - time.sleep(10) - operation_response = get_operation_response['response'] - suggested_trials = operation_response['trials'] - - suggested_parameter_sets = [ - { - parameter['parameter']: parameter.get('floatValue') or parameter.get('intValue') or parameter.get('stringValue') or 0.0 - for parameter in trial['parameters'] - } - for trial in suggested_trials - ] - return (suggested_parameter_sets,) - finally: - logging.info(f'Deleting study: "{study_name}"') - studies_api.delete(name=fix_resource_name(study_name)) - - -if __name__ == '__main__': - suggest_parameter_sets_from_measurements_using_gcp_ai_platform_optimizer_op = create_component_from_func( - suggest_parameter_sets_from_measurements_using_gcp_ai_platform_optimizer, - base_image='python:3.8', - packages_to_install=['google-api-python-client==1.12.3', 'google-cloud-storage==1.31.2', 'google-auth==1.21.3'], - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/Optimizer/Suggest_parameter_sets_based_on_measurements/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/Optimizer/Suggest_parameter_sets_based_on_measurements/component.yaml b/components/contrib/google-cloud/Optimizer/Suggest_parameter_sets_based_on_measurements/component.yaml deleted file mode 100644 index 10b1f0a0842..00000000000 --- a/components/contrib/google-cloud/Optimizer/Suggest_parameter_sets_based_on_measurements/component.yaml +++ /dev/null @@ -1,284 +0,0 @@ -name: Suggest parameter sets from measurements using gcp ai platform optimizer -description: Suggests trials (parameter sets) to evaluate. -inputs: -- {name: parameter_specs, type: JsonArray, description: 'List of parameter specs. - See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#parameterspec'} -- {name: metrics_for_parameter_sets, type: JsonArray, description: 'List of parameter - sets and evaluation metrics for them. Each list item contains "parameters" dict - and "metrics" dict. Example: {"parameters": {"p1": 1.1, "p2": 2.2}, "metrics": - {"metric1": 101, "metric2": 102} }'} -- {name: suggestion_count, type: Integer, description: Number of suggestions to request.} -- name: maximize - type: Boolean - description: Whether to miaximize or minimize when optimizing a single metric.Default - is to minimize. Ignored if metric_specs list is provided. - default: "False" - optional: true -- {name: metric_specs, type: JsonArray, description: 'List of metric specs. See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#metricspec', - optional: true} -- {name: gcp_project_id, type: String, optional: true} -- {name: gcp_region, type: String, default: us-central1, optional: true} -outputs: -- {name: suggested_parameter_sets, type: JsonArray} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/Optimizer/Suggest_parameter_sets_based_on_measurements/component.yaml' -implementation: - container: - image: python:3.8 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-api-python-client==1.12.3' 'google-cloud-storage==1.31.2' 'google-auth==1.21.3' - || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-api-python-client==1.12.3' 'google-cloud-storage==1.31.2' 'google-auth==1.21.3' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def suggest_parameter_sets_from_measurements_using_gcp_ai_platform_optimizer( - parameter_specs, - metrics_for_parameter_sets, - suggestion_count, - maximize = False, - metric_specs = None, - gcp_project_id = None, - gcp_region = "us-central1", - ): - """Suggests trials (parameter sets) to evaluate. - See https://cloud.google.com/ai-platform/optimizer/docs - - Annotations: - author: Alexey Volkov - - Args: - parameter_specs: List of parameter specs. See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#parameterspec - metrics_for_parameter_sets: List of parameter sets and evaluation metrics for them. Each list item contains "parameters" dict and "metrics" dict. Example: {"parameters": {"p1": 1.1, "p2": 2.2}, "metrics": {"metric1": 101, "metric2": 102} } - maximize: Whether to miaximize or minimize when optimizing a single metric.Default is to minimize. Ignored if metric_specs list is provided. - metric_specs: List of metric specs. See https://cloud.google.com/ai-platform/optimizer/docs/reference/rest/v1/projects.locations.studies#metricspec - suggestion_count: Number of suggestions to request. - - suggested_parameter_sets: List of parameter set dictionaries. - """ - - import logging - import random - import time - - import google.auth - from googleapiclient import discovery - - logging.getLogger().setLevel(logging.INFO) - - client_id = 'client1' - - credentials, default_project_id = google.auth.default() - - # Validating and inferring the arguments - if not gcp_project_id: - gcp_project_id = default_project_id - - # Building the API client. - # The main API does not work, so we need to build from the published discovery document. - def create_caip_optimizer_client(project_id): - from google.cloud import storage - _OPTIMIZER_API_DOCUMENT_BUCKET = 'caip-optimizer-public' - _OPTIMIZER_API_DOCUMENT_FILE = 'api/ml_public_google_rest_v1.json' - client = storage.Client(project_id) - bucket = client.get_bucket(_OPTIMIZER_API_DOCUMENT_BUCKET) - blob = bucket.get_blob(_OPTIMIZER_API_DOCUMENT_FILE) - discovery_document = blob.download_as_bytes() - return discovery.build_from_document(service=discovery_document) - - # Workaround for the Optimizer bug: Optimizer returns resource names that use project number, but only supports resource names with project IDs when making requests - def get_project_number(project_id): - service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) - response = service.projects().get(projectId=project_id).execute() - return response['projectNumber'] - - gcp_project_number = get_project_number(gcp_project_id) - - def fix_resource_name(name): - return name.replace(gcp_project_number, gcp_project_id) - - ml_api = create_caip_optimizer_client(gcp_project_id) - studies_api = ml_api.projects().locations().studies() - trials_api = ml_api.projects().locations().studies().trials() - operations_api = ml_api.projects().locations().operations() - - random_integer = random.SystemRandom().getrandbits(256) - study_id = '{:064x}'.format(random_integer) - - if not metric_specs: - metric_specs=[{ - 'metric': 'metric', - 'goal': 'MAXIMIZE' if maximize else 'MINIMIZE', - }] - study_config = { - 'algorithm': 'ALGORITHM_UNSPECIFIED', # Let the service choose the `default` algorithm. - 'parameters': parameter_specs, - 'metrics': metric_specs, - } - study = {'study_config': study_config} - - logging.info(f'Creating temporary study {study_id}') - create_study_request = studies_api.create( - parent=f'projects/{gcp_project_id}/locations/{gcp_region}', - studyId=study_id, - body=study, - ) - create_study_response = create_study_request.execute() - study_name = create_study_response['name'] - - paremeter_type_names = {parameter_spec['parameter']: parameter_spec['type'] for parameter_spec in parameter_specs} - def parameter_name_and_value_to_dict(parameter_name, parameter_value): - result = {'parameter': parameter_name} - paremeter_type_name = paremeter_type_names[parameter_name] - if paremeter_type_name in ['DOUBLE', 'DISCRETE']: - result['floatValue'] = parameter_value - elif paremeter_type_name == 'INTEGER': - result['intValue'] = parameter_value - elif paremeter_type_name == 'CATEGORICAL': - result['stringValue'] = parameter_value - else: - raise TypeError(f'Unsupported parameter type "{paremeter_type_name}"') - return result - - try: - logging.info(f'Adding {len(metrics_for_parameter_sets)} measurements to the study.') - for parameters_and_metrics in metrics_for_parameter_sets: - parameter_set = parameters_and_metrics['parameters'] - metrics_set = parameters_and_metrics['metrics'] - trial = { - 'parameters': [ - parameter_name_and_value_to_dict(parameter_name, parameter_value) - for parameter_name, parameter_value in parameter_set.items() - ], - 'finalMeasurement': { - 'metrics': [ - { - 'metric': metric_name, - 'value': metric_value, - } - for metric_name, metric_value in metrics_set.items() - ], - }, - 'state': 'COMPLETED', - } - create_trial_response = trials_api.create( - parent=fix_resource_name(study_name), - body=trial, - ).execute() - trial_name = create_trial_response["name"] - logging.info(f'Added trial "{trial_name}" to the study.') - - logging.info(f'Requesting suggestions.') - suggest_trials_request = trials_api.suggest( - parent=fix_resource_name(study_name), - body=dict( - suggestionCount=suggestion_count, - clientId=client_id, - ), - ) - suggest_trials_response = suggest_trials_request.execute() - operation_name = suggest_trials_response['name'] - while True: - get_operation_response = operations_api.get( - name=fix_resource_name(operation_name), - ).execute() - # Knowledge: The "done" key is just missing until the result is available - if get_operation_response.get('done'): - break - logging.info('Operation not finished yet: ' + str(get_operation_response)) - time.sleep(10) - operation_response = get_operation_response['response'] - suggested_trials = operation_response['trials'] - - suggested_parameter_sets = [ - { - parameter['parameter']: parameter.get('floatValue') or parameter.get('intValue') or parameter.get('stringValue') or 0.0 - for parameter in trial['parameters'] - } - for trial in suggested_trials - ] - return (suggested_parameter_sets,) - finally: - logging.info(f'Deleting study: "{study_name}"') - studies_api.delete(name=fix_resource_name(study_name)) - - import json - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - def _deserialize_bool(s) -> bool: - from distutils.util import strtobool - return strtobool(s) == 1 - - import argparse - _parser = argparse.ArgumentParser(prog='Suggest parameter sets from measurements using gcp ai platform optimizer', description='Suggests trials (parameter sets) to evaluate.') - _parser.add_argument("--parameter-specs", dest="parameter_specs", type=json.loads, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--metrics-for-parameter-sets", dest="metrics_for_parameter_sets", type=json.loads, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--suggestion-count", dest="suggestion_count", type=int, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--maximize", dest="maximize", type=_deserialize_bool, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--metric-specs", dest="metric_specs", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = suggest_parameter_sets_from_measurements_using_gcp_ai_platform_optimizer(**_parsed_args) - - _output_serializers = [ - _serialize_json, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --parameter-specs - - {inputValue: parameter_specs} - - --metrics-for-parameter-sets - - {inputValue: metrics_for_parameter_sets} - - --suggestion-count - - {inputValue: suggestion_count} - - if: - cond: {isPresent: maximize} - then: - - --maximize - - {inputValue: maximize} - - if: - cond: {isPresent: metric_specs} - then: - - --metric-specs - - {inputValue: metric_specs} - - if: - cond: {isPresent: gcp_project_id} - then: - - --gcp-project-id - - {inputValue: gcp_project_id} - - if: - cond: {isPresent: gcp_region} - then: - - --gcp-region - - {inputValue: gcp_region} - - '----output-paths' - - {outputPath: suggested_parameter_sets} diff --git a/components/contrib/google-cloud/Optimizer/Suggest_trials/component.py b/components/contrib/google-cloud/Optimizer/Suggest_trials/component.py deleted file mode 100644 index 610e2f90d2e..00000000000 --- a/components/contrib/google-cloud/Optimizer/Suggest_trials/component.py +++ /dev/null @@ -1,100 +0,0 @@ -from typing import NamedTuple - -from kfp.components import create_component_from_func - -def suggest_trials_in_gcp_ai_platform_optimizer( - study_name: str, - suggestion_count: int, - gcp_project_id: str = None, - gcp_region: str = "us-central1", -) -> NamedTuple('Outputs', [ - ("suggested_trials", list), -]): - """Suggests trials (parameter sets) to evaluate. - See https://cloud.google.com/ai-platform/optimizer/docs - - Annotations: - author: Alexey Volkov - - Args: - study_name: Full resource name of the study. - suggestion_count: Number of suggestions to request. - """ - - import logging - import time - - import google.auth - from googleapiclient import discovery - - logging.getLogger().setLevel(logging.INFO) - - client_id = 'client1' - - credentials, default_project_id = google.auth.default() - - # Validating and inferring the arguments - if not gcp_project_id: - gcp_project_id = default_project_id - - # Building the API client. - # The main API does not work, so we need to build from the published discovery document. - def create_caip_optimizer_client(project_id): - from google.cloud import storage - _OPTIMIZER_API_DOCUMENT_BUCKET = 'caip-optimizer-public' - _OPTIMIZER_API_DOCUMENT_FILE = 'api/ml_public_google_rest_v1.json' - client = storage.Client(project_id) - bucket = client.get_bucket(_OPTIMIZER_API_DOCUMENT_BUCKET) - blob = bucket.get_blob(_OPTIMIZER_API_DOCUMENT_FILE) - discovery_document = blob.download_as_bytes() - return discovery.build_from_document(service=discovery_document) - - # Workaround for the Optimizer bug: Optimizer returns resource names that use project number, but only supports resource names with project IDs when making requests - def get_project_number(project_id): - service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) - response = service.projects().get(projectId=project_id).execute() - return response['projectNumber'] - - gcp_project_number = get_project_number(gcp_project_id) - - def fix_resource_name(name): - return name.replace(gcp_project_number, gcp_project_id) - - ml_api = create_caip_optimizer_client(gcp_project_id) - trials_api = ml_api.projects().locations().studies().trials() - operations_api = ml_api.projects().locations().operations() - - suggest_trials_request = trials_api.suggest( - parent=fix_resource_name(study_name), - body=dict( - suggestionCount=suggestion_count, - clientId=client_id, - ), - ) - suggest_trials_response = suggest_trials_request.execute() - operation_name = suggest_trials_response['name'] - while True: - get_operation_response = operations_api.get( - name=fix_resource_name(operation_name), - ).execute() - # Knowledge: The "done" key is just missing until the result is available - if get_operation_response.get('done'): - break - logging.info('Not finished yet: ' + str(get_operation_response)) - time.sleep(10) - operation_response = get_operation_response['response'] - suggested_trials = operation_response['trials'] - return (suggested_trials,) - - -if __name__ == '__main__': - suggest_trials_in_gcp_ai_platform_optimizer_op = create_component_from_func( - suggest_trials_in_gcp_ai_platform_optimizer, - base_image='python:3.8', - packages_to_install=['google-api-python-client==1.12.3', 'google-cloud-storage==1.31.2', 'google-auth==1.21.3'], - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/Optimizer/Suggest_trials/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/Optimizer/Suggest_trials/component.yaml b/components/contrib/google-cloud/Optimizer/Suggest_trials/component.yaml deleted file mode 100644 index a4b2c4f1371..00000000000 --- a/components/contrib/google-cloud/Optimizer/Suggest_trials/component.yaml +++ /dev/null @@ -1,163 +0,0 @@ -name: Suggest trials in gcp ai platform optimizer -description: Suggests trials (parameter sets) to evaluate. -inputs: -- {name: study_name, type: String, description: Full resource name of the study.} -- {name: suggestion_count, type: Integer, description: Number of suggestions to request.} -- {name: gcp_project_id, type: String, optional: true} -- {name: gcp_region, type: String, default: us-central1, optional: true} -outputs: -- {name: suggested_trials, type: JsonArray} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/Optimizer/Suggest_trials/component.yaml' -implementation: - container: - image: python:3.8 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-api-python-client==1.12.3' 'google-cloud-storage==1.31.2' 'google-auth==1.21.3' - || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-api-python-client==1.12.3' 'google-cloud-storage==1.31.2' 'google-auth==1.21.3' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def suggest_trials_in_gcp_ai_platform_optimizer( - study_name, - suggestion_count, - gcp_project_id = None, - gcp_region = "us-central1", - ): - """Suggests trials (parameter sets) to evaluate. - See https://cloud.google.com/ai-platform/optimizer/docs - - Annotations: - author: Alexey Volkov - - Args: - study_name: Full resource name of the study. - suggestion_count: Number of suggestions to request. - """ - - import logging - import time - - import google.auth - from googleapiclient import discovery - - logging.getLogger().setLevel(logging.INFO) - - client_id = 'client1' - - credentials, default_project_id = google.auth.default() - - # Validating and inferring the arguments - if not gcp_project_id: - gcp_project_id = default_project_id - - # Building the API client. - # The main API does not work, so we need to build from the published discovery document. - def create_caip_optimizer_client(project_id): - from google.cloud import storage - _OPTIMIZER_API_DOCUMENT_BUCKET = 'caip-optimizer-public' - _OPTIMIZER_API_DOCUMENT_FILE = 'api/ml_public_google_rest_v1.json' - client = storage.Client(project_id) - bucket = client.get_bucket(_OPTIMIZER_API_DOCUMENT_BUCKET) - blob = bucket.get_blob(_OPTIMIZER_API_DOCUMENT_FILE) - discovery_document = blob.download_as_bytes() - return discovery.build_from_document(service=discovery_document) - - # Workaround for the Optimizer bug: Optimizer returns resource names that use project number, but only supports resource names with project IDs when making requests - def get_project_number(project_id): - service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) - response = service.projects().get(projectId=project_id).execute() - return response['projectNumber'] - - gcp_project_number = get_project_number(gcp_project_id) - - def fix_resource_name(name): - return name.replace(gcp_project_number, gcp_project_id) - - ml_api = create_caip_optimizer_client(gcp_project_id) - trials_api = ml_api.projects().locations().studies().trials() - operations_api = ml_api.projects().locations().operations() - - suggest_trials_request = trials_api.suggest( - parent=fix_resource_name(study_name), - body=dict( - suggestionCount=suggestion_count, - clientId=client_id, - ), - ) - suggest_trials_response = suggest_trials_request.execute() - operation_name = suggest_trials_response['name'] - while True: - get_operation_response = operations_api.get( - name=fix_resource_name(operation_name), - ).execute() - # Knowledge: The "done" key is just missing until the result is available - if get_operation_response.get('done'): - break - logging.info('Not finished yet: ' + str(get_operation_response)) - time.sleep(10) - operation_response = get_operation_response['response'] - suggested_trials = operation_response['trials'] - return (suggested_trials,) - - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - import argparse - _parser = argparse.ArgumentParser(prog='Suggest trials in gcp ai platform optimizer', description='Suggests trials (parameter sets) to evaluate.') - _parser.add_argument("--study-name", dest="study_name", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--suggestion-count", dest="suggestion_count", type=int, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = suggest_trials_in_gcp_ai_platform_optimizer(**_parsed_args) - - _output_serializers = [ - _serialize_json, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --study-name - - {inputValue: study_name} - - --suggestion-count - - {inputValue: suggestion_count} - - if: - cond: {isPresent: gcp_project_id} - then: - - --gcp-project-id - - {inputValue: gcp_project_id} - - if: - cond: {isPresent: gcp_region} - then: - - --gcp-region - - {inputValue: gcp_region} - - '----output-paths' - - {outputPath: suggested_trials} diff --git a/components/contrib/google-cloud/Optimizer/_samples/multi_stage_optimization_atomic_pipeline.py b/components/contrib/google-cloud/Optimizer/_samples/multi_stage_optimization_atomic_pipeline.py deleted file mode 100644 index 3b2b7b1b14a..00000000000 --- a/components/contrib/google-cloud/Optimizer/_samples/multi_stage_optimization_atomic_pipeline.py +++ /dev/null @@ -1,153 +0,0 @@ -# This pipeline demonstrates hyper-parameter optimization. -# The goal is to find a set of hyper-parameter values that helps train the best model. -# We launch several optimization stages sequentially. -# At each stage the optimizer suggests several parameter sets to explore based on the available measurements. -# For each suggested parameter set we train a model (semi-dummy) and measure its quality metrics. -# We then collect the metrics for all suggested parameter sets and update out measurements set. -# With the expanded set of measurements, each new optimization stage should result in better parameter set suggestions. -# -# One aspect of this pipeline is the atomicity of the parameter set suggestion. -# Some optimizers have a persistent mutable global state that is changed when parameter set metrics are submitted. -# The presence of mutable global state may cause reproducibility issues where suggestions for a new model might be based on measurements from a different model. -# The "suggest_parameter_sets_from_measurements_op" in this pipeline is a single operation, which behaves like a pure function and does not rely on external global state. - -kfp_endpoint = None - - -import kfp -from kfp import components - - -suggest_parameter_sets_from_measurements_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/382c4d109fbd489bd85de54dd9171150e326b401/components/google-cloud/Optimizer/Suggest_parameter_sets_based_on_measurements/component.yaml') - -get_element_by_index_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/55ef28a9d51edc4eeed2a5c6f44cc7457e8a41d8/components/json/Get_element_by_index/component.yaml') -build_dict_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/4a4be6b748b0d1284d65a417ce4ab5bec596e9fe/components/json/Build_dict/component.yaml') -build_list_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/4a4be6b748b0d1284d65a417ce4ab5bec596e9fe/components/json/Build_list/component.yaml') -combine_lists_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/4a4be6b748b0d1284d65a417ce4ab5bec596e9fe/components/json/Combine_lists/component.yaml') - - -# The train_and_measure_model is a semi-dummy component that creates a model given the [hyper]parameters and evaluates that model. -# In this case, the model is a polinomial model. -# The evaluation procedure compares the model with the real function that our model is trying to learn -# and calculates the mean squared error based on a random sample of data points. -# In real world cases this component will be substituted by a sequence of model trainer, predictor and evaluator components. -@components.create_component_from_func -def train_and_measure_model(parameters: dict) -> float: - import random - - def real_function(x): - p1 = 3 - p2 = -1 - p3 = 2 - return p1 * x**2 + p2 * x + p3 - - def get_eval_set() -> dict: - eval_set = {} - num_samples = 100 - for i in range(num_samples): - x = random.normalvariate(0, 1) * 5 - eval_set[x] = real_function(x) - return eval_set - - def train_model(parameters): - def apply_model(x): - return parameters['p1'] * x**2 + parameters['p2'] * x + parameters['p3'] - return apply_model - - model = train_model(parameters) - - eval_set = get_eval_set() - sum_squared_error = 0 - - for x, expected_y in eval_set.items(): - actual_y = model(x) - error = abs(expected_y - actual_y) - squared_error = error ** 2 - sum_squared_error += squared_error - mean_squared_error = sum_squared_error / len(eval_set) - return mean_squared_error - - -parameter_specs=[ - { - 'parameter': 'p1', - 'type': 'DOUBLE', - 'double_value_spec' : { - 'min_value' : -5, - 'max_value' : 5, - } - }, - { - 'parameter': 'p2', - 'type': 'DOUBLE', - 'double_value_spec': { - 'min_value': -5, - 'max_value': 5, - } - }, - { - 'parameter': 'p3', - 'type': 'DOUBLE', - 'double_value_spec': { - 'min_value': -5, - 'max_value': 5, - } - }, -] - - -def optimizer_pipeline(): - # Number of optimization stages and suggestions per stage. - # Note that these numbers cannot be parametrized, since they're used in compile-time python loops. - optimization_stages = 3 - suggestions_per_stage = 5 - - # We launch several optimization stages sequentially. - # At each stage the optimizer suggests several parameter sets to explore based on the available measurements. - # Each stage depends on the completion of all trials in the previous stage (since only completed trials affect new trial suggesions). - # Each optimization stage should result in better parameter set suggestions. - all_metrics_for_parameter_sets = [] - for stage in range(optimization_stages): - parameter_sets = suggest_parameter_sets_from_measurements_op( - parameter_specs=parameter_specs, - metrics_for_parameter_sets=all_metrics_for_parameter_sets, - suggestion_count=suggestions_per_stage, - maximize=False, - ).output - - # Evaluate each suggested set of parameters. - # Loop over the suggested trials. - # We need to collect the created tasks in the `trial_measurement_tasks` list so that the next round of suggestions can depend on their completion. - # Cannot use dsl.ParallelFor here due to a bug in Argo https://github.com/argoproj/argo-workflows/issues/2660 - # Without ParallelFor we have to use python loop - # and explicitly get individual suggections using the get_element_by_index_op component - # then extract the trial name and parameter sets using get_element_by_key_op and query_json_op components. - new_metrics_for_parameter_sets = [] - for siggestion_index in range(suggestions_per_stage): - parameter_set = get_element_by_index_op( - json=parameter_sets, - index=siggestion_index, - ).output - - model_error = train_and_measure_model( - parameters=parameter_set, - ).output - - metric_for_parameter_set = build_dict_op( - key_1='parameters', - value_1=parameter_set, - key_2='metrics', - value_2={ - 'metric': model_error, - }, - ).output - - new_metrics_for_parameter_sets.append(metric_for_parameter_set) - # Collecting metrics for the current stage - new_list_of_metrics_for_parameter_sets = build_list_op(*new_metrics_for_parameter_sets).output - # Collecting metrics for all stages - all_metrics_for_parameter_sets = combine_lists_op(all_metrics_for_parameter_sets, new_list_of_metrics_for_parameter_sets).output - - -if __name__ == '__main__': - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(optimizer_pipeline, arguments={}) diff --git a/components/contrib/google-cloud/Optimizer/_samples/multi_stage_optimization_pipeline.py b/components/contrib/google-cloud/Optimizer/_samples/multi_stage_optimization_pipeline.py deleted file mode 100644 index a33bd082e9e..00000000000 --- a/components/contrib/google-cloud/Optimizer/_samples/multi_stage_optimization_pipeline.py +++ /dev/null @@ -1,133 +0,0 @@ -kfp_endpoint = None - - -import kfp -from kfp import components - -optimizer_create_study_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/40e117cca61fd923a57a1e84cbd08c22dce4bf00/components/google-cloud/Optimizer/Create_study/component.yaml') -optimizer_suggest_trials_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/40e117cca61fd923a57a1e84cbd08c22dce4bf00/components/google-cloud/Optimizer/Suggest_trials/component.yaml') -optimizer_add_measurement_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/40e117cca61fd923a57a1e84cbd08c22dce4bf00/components/google-cloud/Optimizer/Add_measurement_for_trial/component.yaml') - -get_element_by_index_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/55ef28a9d51edc4eeed2a5c6f44cc7457e8a41d8/components/json/Get_element_by_index/component.yaml') -get_element_by_key_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/55ef28a9d51edc4eeed2a5c6f44cc7457e8a41d8/components/json/Get_element_by_key/component.yaml') -query_json_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/55ef28a9d51edc4eeed2a5c6f44cc7457e8a41d8/components/json/Query/component.yaml') - - -# Component that builds a model given the [hyper]parameters and evaluates that model. -# In this case, the model is a polinomial model. -# The evaluation procedure compares the model with the real function that our model is trying to learn -# and calculates the mean squared error based on a random sample of data points. -# In real world cases this component will be substituted by a sequence of model trainer, predictor and evaluator components. -@components.create_component_from_func -def evaluate_model(parameters: dict) -> float: - import random - - def real_function(x): - p1 = 3 - p2 = -1 - p3 = 2 - return p1 * x**2 + p2 * x + p3 - - def evaluate_model(parameters, x): - return parameters['p1'] * x**2 + parameters['p2'] * x + parameters['p3'] - - sum_squared_error = 0 - num_samples = 100 - for i in range(num_samples): - x = random.normalvariate(0, 1) * 5 - real_y = real_function(x) - actual_y = evaluate_model(parameters, x) - error = abs(real_y - actual_y) - squared_error = error ** 2 - sum_squared_error += squared_error - mean_squared_error = sum_squared_error / num_samples - return mean_squared_error - - -def optimizer_pipeline( -): - optimization_stages = 3 - trials_per_stage = 5 - - study_name = optimizer_create_study_op( - study_id='Study4', - parameter_specs=[ - { - 'parameter': 'p1', - 'type': 'DOUBLE', - 'double_value_spec' : { - 'min_value' : -5, - 'max_value' : 5, - } - }, - { - 'parameter': 'p2', - 'type': 'DOUBLE', - 'double_value_spec': { - 'min_value': -5, - 'max_value': 5, - } - }, - { - 'parameter': 'p3', - 'type': 'DOUBLE', - 'double_value_spec': { - 'min_value': -5, - 'max_value': 5, - } - }, - ], - optimization_goal='MINIMIZE', - ).outputs['study_name'] - - # We launch several optimization stages sequentially. - # Each stage depends on the completion of all trials in the previous stage (since only completed trials affect new trial suggesions). - # Each optimization stage should result in better parameter set suggestions. - trial_measurement_tasks = [] - for stage in range(optimization_stages): - suggest_trials_task = optimizer_suggest_trials_op( - study_name=study_name, - suggestion_count=suggestion_count, - ) - suggest_trials_task.after(*trial_measurement_tasks) - - trials = suggest_trials_task.output - - # Evaluate each suggested set of parameters. - # Loop over the suggested trials. - # We need to collect the created tasks in the `trial_measurement_tasks` list so that the next round of suggestions can depend on their completion. - # Cannot use dsl.ParallelFor here due to a bug in Argo https://github.com/argoproj/argo-workflows/issues/2660 - # Without ParallelFor we have to use python loop - # and explicitly get individual suggections using the get_element_by_index_op component - # then extract the trial name and parameter sets using get_element_by_key_op and query_json_op components. - trial_measurement_tasks = [] - for trial_index in range(trials_per_stage): - trial = get_element_by_index_op( - json=trials, - index=trial_index, - ).output - - trial_name = get_element_by_key_op( - json=trial, - key='name', - ).output - - trial_parameters = query_json_op( - json=trial, - query='.parameters | map( {(.parameter): (.floatValue // .intValue // .stringValue)} ) | add', - ).output - - model_error = evaluate_model( - parameters=trial_parameters, - ).output - - add_measurement_task = optimizer_add_measurement_op( - trial_name=trial_name, - metric_value=model_error, - ) - - trial_measurement_tasks.append(add_measurement_task) - - -if __name__ == '__main__': - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(optimizer_pipeline, arguments={}) diff --git a/components/contrib/google-cloud/automl/Tables/Create_dataset/from_CSV/component.py b/components/contrib/google-cloud/automl/Tables/Create_dataset/from_CSV/component.py deleted file mode 100644 index 4f2179bb9d7..00000000000 --- a/components/contrib/google-cloud/automl/Tables/Create_dataset/from_CSV/component.py +++ /dev/null @@ -1,182 +0,0 @@ -from typing import NamedTuple - -from kfp.components import create_component_from_func, InputPath, OutputPath - -def automl_create_tables_dataset_from_csv( - data_path: InputPath('CSV'), - target_column_name: str = None, - column_nullability: dict = {}, - column_types: dict = {}, - gcs_staging_uri: str = None, # Currently AutoML Tables only supports regional buckets in "us-central1". - gcp_project_id: str = None, - gcp_region: str = 'us-central1', # Currently "us-central1" is the only region supported by AutoML tables. -) -> NamedTuple('Outputs', [ - ('dataset_name', str), - ('dataset_url', 'URI'), -]): - '''Creates Google Cloud AutoML Tables Dataset from CSV data. - - Annotations: - author: Alexey Volkov - - Args: - data_path: Data in CSV format that will be imported to the dataset. - target_column_name: Name of the target column for training. - column_nullability: Maps column name to boolean specifying whether the column should be marked as nullable. - column_types: Maps column name to column type. Supported types: FLOAT64, CATEGORY, STRING. - gcs_staging_uri: URI of the data staging location in Google Cloud Storage. The bucket must have the us-central1 region. If not specified, a new staging bucket will be created. - gcp_project_id: Google Cloud project ID. If not set, the default one will be used. - gcp_region: Google Cloud region. AutoML Tables only supports us-central1. - Returns: - dataset_name: AutoML dataset name (fully-qualified) - ''' - - import logging - import random - - import google.auth - from google.cloud import automl_v1beta1 as automl - from google.cloud import storage - - logging.getLogger().setLevel(logging.INFO) - - # Validating and inferring the arguments - - if not gcp_project_id: - _, gcp_project_id = google.auth.default() - - if not gcp_region: - gcp_region = 'us-central1' - if gcp_region != 'us-central1': - logging.warn('AutoML only supports the us-central1 region') - - dataset_display_name = 'Dataset' # Allowed characters for displayName are ASCII Latin letters A-Z and a-z, an underscore (_), and ASCII digits 0-9 - - column_nullability = column_nullability or {} - for name, nullability in column_nullability.items(): - assert isinstance(name, str) - assert isinstance(nullability, bool) - - column_types = column_types or {} - for name, data_type in column_types.items(): - assert isinstance(name, str) - if not hasattr(automl.TypeCode, data_type): - supported_types = [type_name for type_name in dir(automl.TypeCode) if type_name[0] != '_'] - raise ValueError(f'Unknow column type "{data_type}". Supported types: {supported_types}') - - # Generating execution ID for data staging - random_integer = random.SystemRandom().getrandbits(256) - execution_id = '{:064x}'.format(random_integer) - logging.info(f'Execution ID: {execution_id}') - - logging.info('Uploading the data to storage') - # TODO: Split table into < 100MB chunks as required by AutoML Tables - storage_client = storage.Client() - if gcs_staging_uri: - if not gcs_staging_uri.startswith('gs://'): - raise ValueError(f"Invalid staging storage URI: {gcs_staging_uri}") - (bucket_name, blob_prefix) = gcs_staging_uri[5:].split('/', 1) - bucket = storage_client.get_bucket(bucket_name) - else: - bucket_name = gcp_project_id + '_staging_' + gcp_region - try: - bucket = storage_client.get_bucket(bucket_name) - except Exception as ex: - logging.info(f'Creating Storage bucket {bucket_name}') - bucket = storage_client.create_bucket( - bucket_or_name=bucket_name, - project=gcp_project_id, - location=gcp_region, - ) - logging.info(f'Created Storage bucket {bucket.name}') - blob_prefix = 'google.cloud.automl_tmp' - - # AutoML Tables import data requires that "the file name must have a (case-insensitive) '.CSV' file extension" - training_data_blob_name = blob_prefix.rstrip('/') + '/' + execution_id + '/' + 'training_data.csv' - training_data_blob_uri = f'gs://{bucket.name}/{training_data_blob_name}' - training_data_blob = bucket.blob(training_data_blob_name) - logging.info(f'Uploading training data to {training_data_blob_uri}') - training_data_blob.upload_from_filename(data_path) - - logging.info(f'Creating AutoML Tables dataset.') - automl_client = automl.AutoMlClient() - - project_location_path = f'projects/{gcp_project_id}/locations/{gcp_region}' - - dataset = automl.Dataset( - display_name=dataset_display_name, - tables_dataset_metadata=automl.TablesDatasetMetadata(), - # labels={}, - ) - dataset = automl_client.create_dataset( - dataset=dataset, - parent=project_location_path, - ) - dataset_id = dataset.name.split('/')[-1] - dataset_web_url = f'https://console.cloud.google.com/automl-tables/locations/{gcp_region}/datasets/{dataset_id}' - logging.info(f'Created dataset {dataset.name}. Link: {dataset_web_url}') - - logging.info(f'Importing data to the dataset: {dataset.name}.') - import_data_input_config = automl.InputConfig( - gcs_source=automl.GcsSource( - input_uris=[training_data_blob_uri], - ) - ) - import_data_response = automl_client.import_data( - name=dataset.name, - input_config=import_data_input_config, - ) - import_data_response.result() - dataset = automl_client.get_dataset( - name=dataset.name, - ) - logging.info(f'Finished importing data.') - - logging.info('Updating column specs') - target_column_spec = None - primary_table_spec_name = dataset.name + '/tableSpecs/' + dataset.tables_dataset_metadata.primary_table_spec_id - table_specs_list = list(automl_client.list_table_specs( - parent=dataset.name, - )) - for table_spec in table_specs_list: - table_spec_id = table_spec.name.split('/')[-1] - column_specs_list = list(automl_client.list_column_specs( - parent=table_spec.name, - )) - is_primary_table = table_spec.name == primary_table_spec_name - for column_spec in column_specs_list: - if column_spec.display_name == target_column_name and is_primary_table: - target_column_spec = column_spec - column_updated = False - if column_spec.display_name in column_nullability: - column_spec.data_type.nullable = column_nullability[column_spec.display_name] - column_updated = True - if column_spec.display_name in column_types: - new_column_type = column_types[column_spec.display_name] - column_spec.data_type.type_code = getattr(automl.TypeCode, new_column_type) - column_updated = True - if column_updated: - automl_client.update_column_spec(column_spec=column_spec) - - if target_column_name: - logging.info('Setting target column') - if not target_column_spec: - raise ValueError(f'Primary table does not have column "{target_column_name}"') - target_column_spec_id = target_column_spec.name.split('/')[-1] - dataset.tables_dataset_metadata.target_column_spec_id = target_column_spec_id - dataset = automl_client.update_dataset(dataset=dataset) - - return (dataset.name, dataset_web_url) - - -if __name__ == '__main__': - automl_create_tables_dataset_from_csv_op = create_component_from_func( - automl_create_tables_dataset_from_csv, - base_image='python:3.8', - packages_to_install=['google-cloud-automl==2.0.0', 'google-cloud-storage==1.31.2', 'google-auth==1.21.3'], - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/AutoML/Tables/Create_dataset/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/automl/Tables/Create_dataset/from_CSV/component.yaml b/components/contrib/google-cloud/automl/Tables/Create_dataset/from_CSV/component.yaml deleted file mode 100644 index 64435d464c3..00000000000 --- a/components/contrib/google-cloud/automl/Tables/Create_dataset/from_CSV/component.yaml +++ /dev/null @@ -1,274 +0,0 @@ -name: Automl create tables dataset from csv -description: Creates Google Cloud AutoML Tables Dataset from CSV data. -inputs: -- {name: data, type: CSV, description: Data in CSV format that will be imported to - the dataset.} -- {name: target_column_name, type: String, description: Name of the target column - for training., optional: true} -- {name: column_nullability, type: JsonObject, description: Maps column name to boolean - specifying whether the column should be marked as nullable., default: '{}', optional: true} -- {name: column_types, type: JsonObject, description: 'Maps column name to column - type. Supported types: FLOAT64, CATEGORY, STRING.', default: '{}', optional: true} -- {name: gcs_staging_uri, type: String, description: 'URI of the data staging location - in Google Cloud Storage. The bucket must have the us-central1 region. If not specified, - a new staging bucket will be created.', optional: true} -- {name: gcp_project_id, type: String, description: 'Google Cloud project ID. If not - set, the default one will be used.', optional: true} -- {name: gcp_region, type: String, description: Google Cloud region. AutoML Tables - only supports us-central1., default: us-central1, optional: true} -outputs: -- {name: dataset_name, type: String} -- {name: dataset_url, type: URI} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/AutoML/Tables/Create_dataset/from_CSV/component.yaml' -implementation: - container: - image: python:3.8 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-cloud-automl==2.0.0' 'google-cloud-storage==1.31.2' 'google-auth==1.21.3' - || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-cloud-automl==2.0.0' 'google-cloud-storage==1.31.2' 'google-auth==1.21.3' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def automl_create_tables_dataset_from_csv( - data_path, - target_column_name = None, - column_nullability = {}, - column_types = {}, - gcs_staging_uri = None, # Currently AutoML Tables only supports regional buckets in "us-central1". - gcp_project_id = None, - gcp_region = 'us-central1', # Currently "us-central1" is the only region supported by AutoML tables. - ): - '''Creates Google Cloud AutoML Tables Dataset from CSV data. - - Annotations: - author: Alexey Volkov - - Args: - data_path: Data in CSV format that will be imported to the dataset. - target_column_name: Name of the target column for training. - column_nullability: Maps column name to boolean specifying whether the column should be marked as nullable. - column_types: Maps column name to column type. Supported types: FLOAT64, CATEGORY, STRING. - gcs_staging_uri: URI of the data staging location in Google Cloud Storage. The bucket must have the us-central1 region. If not specified, a new staging bucket will be created. - gcp_project_id: Google Cloud project ID. If not set, the default one will be used. - gcp_region: Google Cloud region. AutoML Tables only supports us-central1. - Returns: - dataset_name: AutoML dataset name (fully-qualified) - ''' - - import logging - import random - - import google.auth - from google.cloud import automl_v1beta1 as automl - from google.cloud import storage - - logging.getLogger().setLevel(logging.INFO) - - # Validating and inferring the arguments - - if not gcp_project_id: - _, gcp_project_id = google.auth.default() - - if not gcp_region: - gcp_region = 'us-central1' - if gcp_region != 'us-central1': - logging.warn('AutoML only supports the us-central1 region') - - dataset_display_name = 'Dataset' # Allowed characters for displayName are ASCII Latin letters A-Z and a-z, an underscore (_), and ASCII digits 0-9 - - column_nullability = column_nullability or {} - for name, nullability in column_nullability.items(): - assert isinstance(name, str) - assert isinstance(nullability, bool) - - column_types = column_types or {} - for name, data_type in column_types.items(): - assert isinstance(name, str) - if not hasattr(automl.TypeCode, data_type): - supported_types = [type_name for type_name in dir(automl.TypeCode) if type_name[0] != '_'] - raise ValueError(f'Unknow column type "{data_type}". Supported types: {supported_types}') - - # Generating execution ID for data staging - random_integer = random.SystemRandom().getrandbits(256) - execution_id = '{:064x}'.format(random_integer) - logging.info(f'Execution ID: {execution_id}') - - logging.info('Uploading the data to storage') - # TODO: Split table into < 100MB chunks as required by AutoML Tables - storage_client = storage.Client() - if gcs_staging_uri: - if not gcs_staging_uri.startswith('gs://'): - raise ValueError(f"Invalid staging storage URI: {gcs_staging_uri}") - (bucket_name, blob_prefix) = gcs_staging_uri[5:].split('/', 1) - bucket = storage_client.get_bucket(bucket_name) - else: - bucket_name = gcp_project_id + '_staging_' + gcp_region - try: - bucket = storage_client.get_bucket(bucket_name) - except Exception as ex: - logging.info(f'Creating Storage bucket {bucket_name}') - bucket = storage_client.create_bucket( - bucket_or_name=bucket_name, - project=gcp_project_id, - location=gcp_region, - ) - logging.info(f'Created Storage bucket {bucket.name}') - blob_prefix = 'google.cloud.automl_tmp' - - # AutoML Tables import data requires that "the file name must have a (case-insensitive) '.CSV' file extension" - training_data_blob_name = blob_prefix.rstrip('/') + '/' + execution_id + '/' + 'training_data.csv' - training_data_blob_uri = f'gs://{bucket.name}/{training_data_blob_name}' - training_data_blob = bucket.blob(training_data_blob_name) - logging.info(f'Uploading training data to {training_data_blob_uri}') - training_data_blob.upload_from_filename(data_path) - - logging.info(f'Creating AutoML Tables dataset.') - automl_client = automl.AutoMlClient() - - project_location_path = f'projects/{gcp_project_id}/locations/{gcp_region}' - - dataset = automl.Dataset( - display_name=dataset_display_name, - tables_dataset_metadata=automl.TablesDatasetMetadata(), - # labels={}, - ) - dataset = automl_client.create_dataset( - dataset=dataset, - parent=project_location_path, - ) - dataset_id = dataset.name.split('/')[-1] - dataset_web_url = f'https://console.cloud.google.com/automl-tables/locations/{gcp_region}/datasets/{dataset_id}' - logging.info(f'Created dataset {dataset.name}. Link: {dataset_web_url}') - - logging.info(f'Importing data to the dataset: {dataset.name}.') - import_data_input_config = automl.InputConfig( - gcs_source=automl.GcsSource( - input_uris=[training_data_blob_uri], - ) - ) - import_data_response = automl_client.import_data( - name=dataset.name, - input_config=import_data_input_config, - ) - import_data_response.result() - dataset = automl_client.get_dataset( - name=dataset.name, - ) - logging.info(f'Finished importing data.') - - logging.info('Updating column specs') - target_column_spec = None - primary_table_spec_name = dataset.name + '/tableSpecs/' + dataset.tables_dataset_metadata.primary_table_spec_id - table_specs_list = list(automl_client.list_table_specs( - parent=dataset.name, - )) - for table_spec in table_specs_list: - table_spec_id = table_spec.name.split('/')[-1] - column_specs_list = list(automl_client.list_column_specs( - parent=table_spec.name, - )) - is_primary_table = table_spec.name == primary_table_spec_name - for column_spec in column_specs_list: - if column_spec.display_name == target_column_name and is_primary_table: - target_column_spec = column_spec - column_updated = False - if column_spec.display_name in column_nullability: - column_spec.data_type.nullable = column_nullability[column_spec.display_name] - column_updated = True - if column_spec.display_name in column_types: - new_column_type = column_types[column_spec.display_name] - column_spec.data_type.type_code = getattr(automl.TypeCode, new_column_type) - column_updated = True - if column_updated: - automl_client.update_column_spec(column_spec=column_spec) - - if target_column_name: - logging.info('Setting target column') - if not target_column_spec: - raise ValueError(f'Primary table does not have column "{target_column_name}"') - target_column_spec_id = target_column_spec.name.split('/')[-1] - dataset.tables_dataset_metadata.target_column_spec_id = target_column_spec_id - dataset = automl_client.update_dataset(dataset=dataset) - - return (dataset.name, dataset_web_url) - - def _serialize_str(str_value: str) -> str: - if not isinstance(str_value, str): - raise TypeError('Value "{}" has type "{}" instead of str.'.format(str(str_value), str(type(str_value)))) - return str_value - - import json - import argparse - _parser = argparse.ArgumentParser(prog='Automl create tables dataset from csv', description='Creates Google Cloud AutoML Tables Dataset from CSV data.') - _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--target-column-name", dest="target_column_name", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--column-nullability", dest="column_nullability", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--column-types", dest="column_types", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--gcs-staging-uri", dest="gcs_staging_uri", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=2) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = automl_create_tables_dataset_from_csv(**_parsed_args) - - _output_serializers = [ - _serialize_str, - str, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --data - - {inputPath: data} - - if: - cond: {isPresent: target_column_name} - then: - - --target-column-name - - {inputValue: target_column_name} - - if: - cond: {isPresent: column_nullability} - then: - - --column-nullability - - {inputValue: column_nullability} - - if: - cond: {isPresent: column_types} - then: - - --column-types - - {inputValue: column_types} - - if: - cond: {isPresent: gcs_staging_uri} - then: - - --gcs-staging-uri - - {inputValue: gcs_staging_uri} - - if: - cond: {isPresent: gcp_project_id} - then: - - --gcp-project-id - - {inputValue: gcp_project_id} - - if: - cond: {isPresent: gcp_region} - then: - - --gcp-region - - {inputValue: gcp_region} - - '----output-paths' - - {outputPath: dataset_name} - - {outputPath: dataset_url} diff --git a/components/contrib/google-cloud/automl/create_dataset_for_tables/component.py b/components/contrib/google-cloud/automl/create_dataset_for_tables/component.py deleted file mode 100644 index 085e84f349f..00000000000 --- a/components/contrib/google-cloud/automl/create_dataset_for_tables/component.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import NamedTuple - - -def automl_create_dataset_for_tables( - gcp_project_id: str, - gcp_region: str, - display_name: str, - description: str = None, - tables_dataset_metadata: dict = {}, - retry=None, #=google.api_core.gapic_v1.method.DEFAULT, - timeout: float = None, #=google.api_core.gapic_v1.method.DEFAULT, - metadata: dict = None, -) -> NamedTuple('Outputs', [('dataset_path', str), ('create_time', str), ('dataset_id', str), ('dataset_url', 'URI')]): - '''automl_create_dataset_for_tables creates an empty Dataset for AutoML tables - ''' - import google - from google.cloud import automl - client = automl.AutoMlClient() - - location_path = client.location_path(gcp_project_id, gcp_region) - dataset_dict = { - 'display_name': display_name, - 'description': description, - 'tables_dataset_metadata': tables_dataset_metadata, - } - dataset = client.create_dataset( - location_path, - dataset_dict, - retry or google.api_core.gapic_v1.method.DEFAULT, - timeout or google.api_core.gapic_v1.method.DEFAULT, - metadata, - ) - print(dataset) - dataset_id = dataset.name.rsplit('/', 1)[-1] - dataset_url = 'https://console.cloud.google.com/automl-tables/locations/{region}/datasets/{dataset_id}/schemav2?project={project_id}'.format( - project_id=gcp_project_id, - region=gcp_region, - dataset_id=dataset_id, - ) - return (dataset.name, str(dataset.create_time), dataset_id, dataset_url) - - -if __name__ == '__main__': - from kfp.components import create_component_from_func - - automl_create_dataset_for_tables_op = create_component_from_func( - automl_create_dataset_for_tables, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['google-cloud-automl==0.4.0'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/create_dataset_for_tables/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/automl/create_dataset_for_tables/component.yaml b/components/contrib/google-cloud/automl/create_dataset_for_tables/component.yaml deleted file mode 100644 index 4e4f314f0da..00000000000 --- a/components/contrib/google-cloud/automl/create_dataset_for_tables/component.yaml +++ /dev/null @@ -1,148 +0,0 @@ -name: Automl create dataset for tables -description: automl_create_dataset_for_tables creates an empty Dataset for AutoML - tables -inputs: -- {name: gcp_project_id, type: String} -- {name: gcp_region, type: String} -- {name: display_name, type: String} -- {name: description, type: String, optional: true} -- {name: tables_dataset_metadata, type: JsonObject, default: '{}', optional: true} -- {name: retry, optional: true} -- {name: timeout, type: Float, optional: true} -- {name: metadata, type: JsonObject, optional: true} -outputs: -- {name: dataset_path, type: String} -- {name: create_time, type: String} -- {name: dataset_id, type: String} -- {name: dataset_url, type: URI} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/create_dataset_for_tables/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-cloud-automl==0.4.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip - install --quiet --no-warn-script-location 'google-cloud-automl==0.4.0' --user) - && "$0" "$@" - - python3 - - -u - - -c - - | - def automl_create_dataset_for_tables( - gcp_project_id , - gcp_region , - display_name , - description = None, - tables_dataset_metadata = {}, - retry=None, #=google.api_core.gapic_v1.method.DEFAULT, - timeout = None, #=google.api_core.gapic_v1.method.DEFAULT, - metadata = None, - ) : - '''automl_create_dataset_for_tables creates an empty Dataset for AutoML tables - ''' - import google - from google.cloud import automl - client = automl.AutoMlClient() - - location_path = client.location_path(gcp_project_id, gcp_region) - dataset_dict = { - 'display_name': display_name, - 'description': description, - 'tables_dataset_metadata': tables_dataset_metadata, - } - dataset = client.create_dataset( - location_path, - dataset_dict, - retry or google.api_core.gapic_v1.method.DEFAULT, - timeout or google.api_core.gapic_v1.method.DEFAULT, - metadata, - ) - print(dataset) - dataset_id = dataset.name.rsplit('/', 1)[-1] - dataset_url = 'https://console.cloud.google.com/automl-tables/locations/{region}/datasets/{dataset_id}/schemav2?project={project_id}'.format( - project_id=gcp_project_id, - region=gcp_region, - dataset_id=dataset_id, - ) - return (dataset.name, str(dataset.create_time), dataset_id, dataset_url) - - import json - def _serialize_str(str_value: str) -> str: - if not isinstance(str_value, str): - raise TypeError('Value "{}" has type "{}" instead of str.'.format(str(str_value), str(type(str_value)))) - return str_value - - import argparse - _parser = argparse.ArgumentParser(prog='Automl create dataset for tables', description='automl_create_dataset_for_tables creates an empty Dataset for AutoML tables') - _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--display-name", dest="display_name", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--description", dest="description", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--tables-dataset-metadata", dest="tables_dataset_metadata", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--retry", dest="retry", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--timeout", dest="timeout", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--metadata", dest="metadata", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=4) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = automl_create_dataset_for_tables(**_parsed_args) - - _output_serializers = [ - _serialize_str, - _serialize_str, - _serialize_str, - str, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --gcp-project-id - - {inputValue: gcp_project_id} - - --gcp-region - - {inputValue: gcp_region} - - --display-name - - {inputValue: display_name} - - if: - cond: {isPresent: description} - then: - - --description - - {inputValue: description} - - if: - cond: {isPresent: tables_dataset_metadata} - then: - - --tables-dataset-metadata - - {inputValue: tables_dataset_metadata} - - if: - cond: {isPresent: retry} - then: - - --retry - - {inputValue: retry} - - if: - cond: {isPresent: timeout} - then: - - --timeout - - {inputValue: timeout} - - if: - cond: {isPresent: metadata} - then: - - --metadata - - {inputValue: metadata} - - '----output-paths' - - {outputPath: dataset_path} - - {outputPath: create_time} - - {outputPath: dataset_id} - - {outputPath: dataset_url} diff --git a/components/contrib/google-cloud/automl/create_model_for_tables/component.py b/components/contrib/google-cloud/automl/create_model_for_tables/component.py deleted file mode 100644 index f4798cb33d6..00000000000 --- a/components/contrib/google-cloud/automl/create_model_for_tables/component.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import NamedTuple - - -def automl_create_model_for_tables( - gcp_project_id: str, - gcp_region: str, - display_name: str, - dataset_id: str, - target_column_path: str = None, - input_feature_column_paths: list = None, - optimization_objective: str = 'MAXIMIZE_AU_PRC', - train_budget_milli_node_hours: int = 1000, -) -> NamedTuple('Outputs', [('model_path', str), ('model_id', str), ('model_page_url', 'URI'),]): - from google.cloud import automl - client = automl.AutoMlClient() - - location_path = client.location_path(gcp_project_id, gcp_region) - model_dict = { - 'display_name': display_name, - 'dataset_id': dataset_id, - 'tables_model_metadata': { - 'target_column_spec': automl.types.ColumnSpec(name=target_column_path), - 'input_feature_column_specs': [automl.types.ColumnSpec(name=path) for path in input_feature_column_paths] if input_feature_column_paths else None, - 'optimization_objective': optimization_objective, - 'train_budget_milli_node_hours': train_budget_milli_node_hours, - }, - } - - create_model_response = client.create_model(location_path, model_dict) - print('Create model operation: {}'.format(create_model_response.operation)) - result = create_model_response.result() - print(result) - model_name = result.name - model_id = model_name.rsplit('/', 1)[-1] - model_url = 'https://console.cloud.google.com/automl-tables/locations/{region}/datasets/{dataset_id};modelId={model_id};task=basic/train?project={project_id}'.format( - project_id=gcp_project_id, - region=gcp_region, - dataset_id=dataset_id, - model_id=model_id, - ) - - return (model_name, model_id, model_url) - - -if __name__ == '__main__': - from kfp.components import create_component_from_func - - automl_create_model_for_tables_op = create_component_from_func( - automl_create_model_for_tables, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['google-cloud-automl==0.4.0'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/create_model_for_tables/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/automl/create_model_for_tables/component.yaml b/components/contrib/google-cloud/automl/create_model_for_tables/component.yaml deleted file mode 100644 index d14440ad04d..00000000000 --- a/components/contrib/google-cloud/automl/create_model_for_tables/component.yaml +++ /dev/null @@ -1,142 +0,0 @@ -name: Automl create model for tables -inputs: -- {name: gcp_project_id, type: String} -- {name: gcp_region, type: String} -- {name: display_name, type: String} -- {name: dataset_id, type: String} -- {name: target_column_path, type: String, optional: true} -- {name: input_feature_column_paths, type: JsonArray, optional: true} -- {name: optimization_objective, type: String, default: MAXIMIZE_AU_PRC, optional: true} -- {name: train_budget_milli_node_hours, type: Integer, default: '1000', optional: true} -outputs: -- {name: model_path, type: String} -- {name: model_id, type: String} -- {name: model_page_url, type: URI} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/create_model_for_tables/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-cloud-automl==0.4.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip - install --quiet --no-warn-script-location 'google-cloud-automl==0.4.0' --user) - && "$0" "$@" - - python3 - - -u - - -c - - | - def automl_create_model_for_tables( - gcp_project_id , - gcp_region , - display_name , - dataset_id , - target_column_path = None, - input_feature_column_paths = None, - optimization_objective = 'MAXIMIZE_AU_PRC', - train_budget_milli_node_hours = 1000, - ) : - from google.cloud import automl - client = automl.AutoMlClient() - - location_path = client.location_path(gcp_project_id, gcp_region) - model_dict = { - 'display_name': display_name, - 'dataset_id': dataset_id, - 'tables_model_metadata': { - 'target_column_spec': automl.types.ColumnSpec(name=target_column_path), - 'input_feature_column_specs': [automl.types.ColumnSpec(name=path) for path in input_feature_column_paths] if input_feature_column_paths else None, - 'optimization_objective': optimization_objective, - 'train_budget_milli_node_hours': train_budget_milli_node_hours, - }, - } - - create_model_response = client.create_model(location_path, model_dict) - print('Create model operation: {}'.format(create_model_response.operation)) - result = create_model_response.result() - print(result) - model_name = result.name - model_id = model_name.rsplit('/', 1)[-1] - model_url = 'https://console.cloud.google.com/automl-tables/locations/{region}/datasets/{dataset_id};modelId={model_id};task=basic/train?project={project_id}'.format( - project_id=gcp_project_id, - region=gcp_region, - dataset_id=dataset_id, - model_id=model_id, - ) - - return (model_name, model_id, model_url) - - def _serialize_str(str_value: str) -> str: - if not isinstance(str_value, str): - raise TypeError('Value "{}" has type "{}" instead of str.'.format(str(str_value), str(type(str_value)))) - return str_value - - import json - import argparse - _parser = argparse.ArgumentParser(prog='Automl create model for tables', description='') - _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--display-name", dest="display_name", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--dataset-id", dest="dataset_id", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--target-column-path", dest="target_column_path", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--input-feature-column-paths", dest="input_feature_column_paths", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--optimization-objective", dest="optimization_objective", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--train-budget-milli-node-hours", dest="train_budget_milli_node_hours", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=3) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = automl_create_model_for_tables(**_parsed_args) - - _output_serializers = [ - _serialize_str, - _serialize_str, - str, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --gcp-project-id - - {inputValue: gcp_project_id} - - --gcp-region - - {inputValue: gcp_region} - - --display-name - - {inputValue: display_name} - - --dataset-id - - {inputValue: dataset_id} - - if: - cond: {isPresent: target_column_path} - then: - - --target-column-path - - {inputValue: target_column_path} - - if: - cond: {isPresent: input_feature_column_paths} - then: - - --input-feature-column-paths - - {inputValue: input_feature_column_paths} - - if: - cond: {isPresent: optimization_objective} - then: - - --optimization-objective - - {inputValue: optimization_objective} - - if: - cond: {isPresent: train_budget_milli_node_hours} - then: - - --train-budget-milli-node-hours - - {inputValue: train_budget_milli_node_hours} - - '----output-paths' - - {outputPath: model_path} - - {outputPath: model_id} - - {outputPath: model_page_url} diff --git a/components/contrib/google-cloud/automl/deploy_model/component.py b/components/contrib/google-cloud/automl/deploy_model/component.py deleted file mode 100644 index eeb8ad2e388..00000000000 --- a/components/contrib/google-cloud/automl/deploy_model/component.py +++ /dev/null @@ -1,44 +0,0 @@ -from typing import NamedTuple -from kfp.components import create_component_from_func - - -def automl_deploy_model( - model_path: str, -) -> NamedTuple('Outputs', [ - ('model_path', str), -]): - """Deploys a trained model. - - Args: - model_path: The resource name of the model to export. Format: 'projects//locations//models/' - - Annotations: - author: Alexey Volkov - """ - from google.cloud import automl - client = automl.AutoMlClient() - response = client.deploy_model( - name=model_path, - ) - print('Operation started:') - print(response.operation) - result = response.result() - metadata = response.metadata - print('Operation finished:') - print(metadata) - return (model_path, ) - - -if __name__ == '__main__': - automl_deploy_model_op = create_component_from_func( - automl_deploy_model, - output_component_file='component.yaml', - base_image='python:3.8', - packages_to_install=[ - 'google-cloud-automl==2.0.0', - ], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/deploy_model/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/automl/deploy_model/component.yaml b/components/contrib/google-cloud/automl/deploy_model/component.yaml deleted file mode 100644 index 5ab6897d3a7..00000000000 --- a/components/contrib/google-cloud/automl/deploy_model/component.yaml +++ /dev/null @@ -1,87 +0,0 @@ -name: Automl deploy model -description: |- - Deploys a trained model. - - Args: - model_path: The resource name of the model to export. Format: 'projects//locations//models/' - - Annotations: - author: Alexey Volkov -inputs: -- {name: model_path, type: String} -outputs: -- {name: model_path, type: String} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/deploy_model/component.yaml' -implementation: - container: - image: python:3.8 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-cloud-automl==2.0.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip - install --quiet --no-warn-script-location 'google-cloud-automl==2.0.0' --user) - && "$0" "$@" - - python3 - - -u - - -c - - | - def automl_deploy_model( - model_path, - ): - """Deploys a trained model. - - Args: - model_path: The resource name of the model to export. Format: 'projects//locations//models/' - - Annotations: - author: Alexey Volkov - """ - from google.cloud import automl - client = automl.AutoMlClient() - response = client.deploy_model( - name=model_path, - ) - print('Operation started:') - print(response.operation) - result = response.result() - metadata = response.metadata - print('Operation finished:') - print(metadata) - return (model_path, ) - - def _serialize_str(str_value: str) -> str: - if not isinstance(str_value, str): - raise TypeError('Value "{}" has type "{}" instead of str.'.format(str(str_value), str(type(str_value)))) - return str_value - - import argparse - _parser = argparse.ArgumentParser(prog='Automl deploy model', description="Deploys a trained model.\n\n Args:\n model_path: The resource name of the model to export. Format: 'projects//locations//models/'\n\n Annotations:\n author: Alexey Volkov ") - _parser.add_argument("--model-path", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = automl_deploy_model(**_parsed_args) - - _output_serializers = [ - _serialize_str, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --model-path - - {inputValue: model_path} - - '----output-paths' - - {outputPath: model_path} diff --git a/components/contrib/google-cloud/automl/export_data_to_gcs/component.py b/components/contrib/google-cloud/automl/export_data_to_gcs/component.py deleted file mode 100644 index f8323b78f35..00000000000 --- a/components/contrib/google-cloud/automl/export_data_to_gcs/component.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import NamedTuple - - -def automl_export_data_to_gcs( - dataset_path: str, - gcs_output_uri_prefix: str = None, - #retry=None, #=google.api_core.gapic_v1.method.DEFAULT, - timeout: float = None, #=google.api_core.gapic_v1.method.DEFAULT, - metadata: dict = {}, -) -> NamedTuple('Outputs', [('gcs_output_uri_prefix', str)]): - """Exports dataset data to GCS.""" - import sys - import subprocess - subprocess.run([sys.executable, "-m", "pip", "install", "google-cloud-automl==0.4.0", "--quiet", "--no-warn-script-location"], env={"PIP_DISABLE_PIP_VERSION_CHECK": "1"}, check=True) - - import google - from google.cloud import automl - client = automl.AutoMlClient() - - output_config = {"gcs_destination": {"output_uri_prefix": gcs_output_uri_prefix}} - - response = client.export_data( - name=dataset_path, - output_config=output_config, - #retry=retry or google.api_core.gapic_v1.method.DEFAULT - timeout=timeout or google.api_core.gapic_v1.method.DEFAULT, - metadata=metadata, - ) - print('Operation started:') - print(response.operation) - result = response.result() - metadata = response.metadata - print('Operation finished:') - print(metadata) - return (gcs_output_uri_prefix, ) - -if __name__ == '__main__': - from kfp.components import create_component_from_func - - automl_export_data_to_gcs_op = create_component_from_func( - automl_export_data_to_gcs, - output_component_file='component.yaml',base_image='python:3.7', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/export_data_to_gcs/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/automl/export_data_to_gcs/component.yaml b/components/contrib/google-cloud/automl/export_data_to_gcs/component.yaml deleted file mode 100644 index 6b83078a6c5..00000000000 --- a/components/contrib/google-cloud/automl/export_data_to_gcs/component.yaml +++ /dev/null @@ -1,117 +0,0 @@ -name: Automl export data to gcs -description: | - Exports dataset data to GCS. -inputs: -- name: dataset_path - type: String -- name: gcs_output_uri_prefix - optional: true - type: String -- name: timeout - optional: true - type: Float -- default: '{}' - name: metadata - optional: true - type: JsonObject -outputs: -- name: gcs_output_uri_prefix - type: String -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/export_data_to_gcs/component.yaml' -implementation: - container: - image: python:3.7 - command: - - python3 - - -u - - -c - - | - from typing import NamedTuple - - def automl_export_data_to_gcs( - dataset_path: str, - gcs_output_uri_prefix: str = None, - #retry=None, #=google.api_core.gapic_v1.method.DEFAULT, - timeout: float = None, #=google.api_core.gapic_v1.method.DEFAULT, - metadata: dict = {}, - ) -> NamedTuple('Outputs', [('gcs_output_uri_prefix', str)]): - """Exports dataset data to GCS.""" - import sys - import subprocess - subprocess.run([sys.executable, "-m", "pip", "install", "google-cloud-automl==0.4.0", "--quiet", "--no-warn-script-location"], env={"PIP_DISABLE_PIP_VERSION_CHECK": "1"}, check=True) - - import google - from google.cloud import automl - client = automl.AutoMlClient() - - output_config = {"gcs_destination": {"output_uri_prefix": gcs_output_uri_prefix}} - - response = client.export_data( - name=dataset_path, - output_config=output_config, - #retry=retry or google.api_core.gapic_v1.method.DEFAULT - timeout=timeout or google.api_core.gapic_v1.method.DEFAULT, - metadata=metadata, - ) - print('Operation started:') - print(response.operation) - result = response.result() - metadata = response.metadata - print('Operation finished:') - print(metadata) - return (gcs_output_uri_prefix, ) - - import json - import argparse - _parser = argparse.ArgumentParser(prog='Automl export data to gcs', description='Exports dataset data to GCS.\n') - _parser.add_argument("--dataset-path", dest="dataset_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--gcs-output-uri-prefix", dest="gcs_output_uri_prefix", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--timeout", dest="timeout", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--metadata", dest="metadata", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = automl_export_data_to_gcs(**_parsed_args) - - if not hasattr(_outputs, '__getitem__') or isinstance(_outputs, str): - _outputs = [_outputs] - - _output_serializers = [ - str - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --dataset-path - - inputValue: dataset_path - - if: - cond: - isPresent: gcs_output_uri_prefix - then: - - --gcs-output-uri-prefix - - inputValue: gcs_output_uri_prefix - - if: - cond: - isPresent: timeout - then: - - --timeout - - inputValue: timeout - - if: - cond: - isPresent: metadata - then: - - --metadata - - inputValue: metadata - - '----output-paths' - - outputPath: gcs_output_uri_prefix diff --git a/components/contrib/google-cloud/automl/export_model_to_gcs/component.py b/components/contrib/google-cloud/automl/export_model_to_gcs/component.py deleted file mode 100644 index 405cd4cec36..00000000000 --- a/components/contrib/google-cloud/automl/export_model_to_gcs/component.py +++ /dev/null @@ -1,56 +0,0 @@ -from typing import NamedTuple -from kfp.components import create_component_from_func - - -def automl_export_model_to_gcs( - model_path: str, - gcs_output_uri_prefix: str, - model_format: str = 'tf_saved_model', -) -> NamedTuple('Outputs', [ - ('model_directory', 'Uri'), -]): - """Exports a trained model to a user specified Google Cloud Storage location. - - Args: - model_path: The resource name of the model to export. Format: 'projects//locations//models/' - gcs_output_uri_prefix: The Google Cloud Storage directory where the model should be written to. Must be in the same location as AutoML. Required location: us-central1. - model_format: The format in which the model must be exported. The available, and default, formats depend on the problem and model type. Possible formats: tf_saved_model, tf_js, tflite, core_ml, edgetpu_tflite. See https://cloud.google.com/automl/docs/reference/rest/v1/projects.locations.models/export?hl=en#modelexportoutputconfig - - Annotations: - author: Alexey Volkov - """ - from google.cloud import automl - - client = automl.AutoMlClient() - response = client.export_model( - name=model_path, - output_config=automl.ModelExportOutputConfig( - model_format=model_format, - gcs_destination=automl.GcsDestination( - output_uri_prefix=gcs_output_uri_prefix, - ), - ), - ) - - print('Operation started:') - print(response.operation) - result = response.result() - metadata = response.metadata - print('Operation finished:') - print(metadata) - return (metadata.export_model_details.output_info.gcs_output_directory, ) - - -if __name__ == '__main__': - automl_export_model_to_gcs_op = create_component_from_func( - automl_export_model_to_gcs, - output_component_file='component.yaml', - base_image='python:3.8', - packages_to_install=[ - 'google-cloud-automl==2.0.0', - ], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/export_model_to_gcs/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/automl/export_model_to_gcs/component.yaml b/components/contrib/google-cloud/automl/export_model_to_gcs/component.yaml deleted file mode 100644 index ac7cd08c943..00000000000 --- a/components/contrib/google-cloud/automl/export_model_to_gcs/component.yaml +++ /dev/null @@ -1,107 +0,0 @@ -name: Automl export model to gcs -description: |- - Exports a trained model to a user specified Google Cloud Storage location. - - Args: - model_path: The resource name of the model to export. Format: 'projects//locations//models/' - gcs_output_uri_prefix: The Google Cloud Storage directory where the model should be written to. Must be in the same location as AutoML. Required location: us-central1. - model_format: The format in which the model must be exported. The available, and default, formats depend on the problem and model type. Possible formats: tf_saved_model, tf_js, tflite, core_ml, edgetpu_tflite. See https://cloud.google.com/automl/docs/reference/rest/v1/projects.locations.models/export?hl=en#modelexportoutputconfig - - Annotations: - author: Alexey Volkov -inputs: -- {name: model_path, type: String} -- {name: gcs_output_uri_prefix, type: String} -- {name: model_format, type: String, default: tf_saved_model, optional: true} -outputs: -- {name: model_directory, type: Uri} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/export_model_to_gcs/component.yaml' -implementation: - container: - image: python:3.8 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'google-cloud-automl==2.0.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip - install --quiet --no-warn-script-location 'google-cloud-automl==2.0.0' --user) - && "$0" "$@" - - python3 - - -u - - -c - - | - def automl_export_model_to_gcs( - model_path, - gcs_output_uri_prefix, - model_format = 'tf_saved_model', - ): - """Exports a trained model to a user specified Google Cloud Storage location. - - Args: - model_path: The resource name of the model to export. Format: 'projects//locations//models/' - gcs_output_uri_prefix: The Google Cloud Storage directory where the model should be written to. Must be in the same location as AutoML. Required location: us-central1. - model_format: The format in which the model must be exported. The available, and default, formats depend on the problem and model type. Possible formats: tf_saved_model, tf_js, tflite, core_ml, edgetpu_tflite. See https://cloud.google.com/automl/docs/reference/rest/v1/projects.locations.models/export?hl=en#modelexportoutputconfig - - Annotations: - author: Alexey Volkov - """ - from google.cloud import automl - - client = automl.AutoMlClient() - response = client.export_model( - name=model_path, - output_config=automl.ModelExportOutputConfig( - model_format=model_format, - gcs_destination=automl.GcsDestination( - output_uri_prefix=gcs_output_uri_prefix, - ), - ), - ) - - print('Operation started:') - print(response.operation) - result = response.result() - metadata = response.metadata - print('Operation finished:') - print(metadata) - return (metadata.export_model_details.output_info.gcs_output_directory, ) - - import argparse - _parser = argparse.ArgumentParser(prog='Automl export model to gcs', description="Exports a trained model to a user specified Google Cloud Storage location.\n\n Args:\n model_path: The resource name of the model to export. Format: 'projects//locations//models/'\n gcs_output_uri_prefix: The Google Cloud Storage directory where the model should be written to. Must be in the same location as AutoML. Required location: us-central1.\n model_format: The format in which the model must be exported. The available, and default, formats depend on the problem and model type. Possible formats: tf_saved_model, tf_js, tflite, core_ml, edgetpu_tflite. See https://cloud.google.com/automl/docs/reference/rest/v1/projects.locations.models/export?hl=en#modelexportoutputconfig\n\n Annotations:\n author: Alexey Volkov ") - _parser.add_argument("--model-path", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--gcs-output-uri-prefix", dest="gcs_output_uri_prefix", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model-format", dest="model_format", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = automl_export_model_to_gcs(**_parsed_args) - - _output_serializers = [ - str, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --model-path - - {inputValue: model_path} - - --gcs-output-uri-prefix - - {inputValue: gcs_output_uri_prefix} - - if: - cond: {isPresent: model_format} - then: - - --model-format - - {inputValue: model_format} - - '----output-paths' - - {outputPath: model_directory} diff --git a/components/contrib/google-cloud/automl/import_data_from_bigquery/component.py b/components/contrib/google-cloud/automl/import_data_from_bigquery/component.py deleted file mode 100644 index 508803a475b..00000000000 --- a/components/contrib/google-cloud/automl/import_data_from_bigquery/component.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import NamedTuple - - -def automl_import_data_from_bigquery( - dataset_path, - input_uri: str, - retry=None, #=google.api_core.gapic_v1.method.DEFAULT, - timeout=None, #=google.api_core.gapic_v1.method.DEFAULT, - metadata: dict = None, -) -> NamedTuple('Outputs', [('dataset_path', str)]): - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - - import google - from google.cloud import automl - client = automl.AutoMlClient() - input_config = { - 'bigquery_source': { - 'input_uri': input_uri, - }, - } - response = client.import_data( - dataset_path, - input_config, - retry or google.api_core.gapic_v1.method.DEFAULT, - timeout or google.api_core.gapic_v1.method.DEFAULT, - metadata, - ) - result = response.result() - print(result) - metadata = response.metadata - print(metadata) - return (dataset_path) - - -if __name__ == '__main__': - from kfp.components import create_component_from_func - automl_import_data_from_bigquery_op = create_component_from_func( - automl_import_data_from_bigquery, - output_component_file='component.yaml', - base_image='python:3.7', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/import_data_from_bigquery/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/automl/import_data_from_bigquery/component.yaml b/components/contrib/google-cloud/automl/import_data_from_bigquery/component.yaml deleted file mode 100644 index 8da2ee82fd0..00000000000 --- a/components/contrib/google-cloud/automl/import_data_from_bigquery/component.yaml +++ /dev/null @@ -1,112 +0,0 @@ -name: Automl import data from bigquery -inputs: -- name: dataset_path -- name: input_uri - type: String -- name: retry - optional: true -- name: timeout - optional: true -- name: metadata - type: JsonObject - optional: true -outputs: -- name: dataset_path - type: String -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/import_data_from_bigquery/component.yaml' -implementation: - container: - image: python:3.7 - command: - - python3 - - -u - - -c - - | - from typing import NamedTuple - - def automl_import_data_from_bigquery( - dataset_path, - input_uri: str, - retry=None, #=google.api_core.gapic_v1.method.DEFAULT, - timeout=None, #=google.api_core.gapic_v1.method.DEFAULT, - metadata: dict = None, - ) -> NamedTuple('Outputs', [('dataset_path', str)]): - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - - import google - from google.cloud import automl - client = automl.AutoMlClient() - input_config = { - 'bigquery_source': { - 'input_uri': input_uri, - }, - } - response = client.import_data( - dataset_path, - input_config, - retry or google.api_core.gapic_v1.method.DEFAULT, - timeout or google.api_core.gapic_v1.method.DEFAULT, - metadata, - ) - result = response.result() - print(result) - metadata = response.metadata - print(metadata) - return (dataset_path) - - import json - import argparse - _missing_arg = object() - _parser = argparse.ArgumentParser(prog='Automl import data from bigquery', description='') - _parser.add_argument("--dataset-path", dest="dataset_path", type=str, required=True, default=_missing_arg) - _parser.add_argument("--input-uri", dest="input_uri", type=str, required=True, default=_missing_arg) - _parser.add_argument("--retry", dest="retry", type=str, required=False, default=_missing_arg) - _parser.add_argument("--timeout", dest="timeout", type=str, required=False, default=_missing_arg) - _parser.add_argument("--metadata", dest="metadata", type=json.loads, required=False, default=_missing_arg) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = {k: v for k, v in vars(_parser.parse_args()).items() if v is not _missing_arg} - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = automl_import_data_from_bigquery(**_parsed_args) - - if not hasattr(_outputs, '__getitem__') or isinstance(_outputs, str): - _outputs = [_outputs] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(str(_outputs[idx])) - args: - - --dataset-path - - inputValue: dataset_path - - --input-uri - - inputValue: input_uri - - if: - cond: - isPresent: retry - then: - - --retry - - inputValue: retry - - if: - cond: - isPresent: timeout - then: - - --timeout - - inputValue: timeout - - if: - cond: - isPresent: metadata - then: - - --metadata - - inputValue: metadata - - '----output-paths' - - outputPath: dataset_path diff --git a/components/contrib/google-cloud/automl/import_data_from_gcs/component.py b/components/contrib/google-cloud/automl/import_data_from_gcs/component.py deleted file mode 100644 index e24eba764b7..00000000000 --- a/components/contrib/google-cloud/automl/import_data_from_gcs/component.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import NamedTuple - - -def automl_import_data_from_gcs( - dataset_path: str, - input_uris: list, - retry=None, #=google.api_core.gapic_v1.method.DEFAULT, - timeout=None, #=google.api_core.gapic_v1.method.DEFAULT, - metadata: dict = None, -) -> NamedTuple('Outputs', [('dataset_path', str)]): - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - - import google - from google.cloud import automl - client = automl.AutoMlClient() - input_config = { - 'gcs_source': { - 'input_uris': input_uris, - }, - } - response = client.import_data( - dataset_path, - input_config, - retry or google.api_core.gapic_v1.method.DEFAULT, - timeout or google.api_core.gapic_v1.method.DEFAULT, - metadata, - ) - result = response.result() - print(result) - metadata = response.metadata - print(metadata) - return (dataset_path) - - -if __name__ == '__main__': - from kfp.components import create_component_from_func - - automl_import_data_from_gcs_op = create_component_from_func( - automl_import_data_from_gcs, - output_component_file='component.yaml', - base_image='python:3.7', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/import_data_from_gcs/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/automl/import_data_from_gcs/component.yaml b/components/contrib/google-cloud/automl/import_data_from_gcs/component.yaml deleted file mode 100644 index 09a82a457f6..00000000000 --- a/components/contrib/google-cloud/automl/import_data_from_gcs/component.yaml +++ /dev/null @@ -1,113 +0,0 @@ -name: Automl import data from gcs -inputs: -- name: dataset_path - type: String -- name: input_uris - type: JsonArray -- name: retry - optional: true -- name: timeout - optional: true -- name: metadata - type: JsonObject - optional: true -outputs: -- name: dataset_path - type: String -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/import_data_from_gcs/component.yaml' -implementation: - container: - image: python:3.7 - command: - - python3 - - -u - - -c - - | - from typing import NamedTuple - - def automl_import_data_from_gcs( - dataset_path: str, - input_uris: list, - retry=None, #=google.api_core.gapic_v1.method.DEFAULT, - timeout=None, #=google.api_core.gapic_v1.method.DEFAULT, - metadata: dict = None, - ) -> NamedTuple('Outputs', [('dataset_path', str)]): - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - - import google - from google.cloud import automl - client = automl.AutoMlClient() - input_config = { - 'gcs_source': { - 'input_uris': input_uris, - }, - } - response = client.import_data( - dataset_path, - input_config, - retry or google.api_core.gapic_v1.method.DEFAULT, - timeout or google.api_core.gapic_v1.method.DEFAULT, - metadata, - ) - result = response.result() - print(result) - metadata = response.metadata - print(metadata) - return (dataset_path) - - import json - import argparse - _missing_arg = object() - _parser = argparse.ArgumentParser(prog='Automl import data from gcs', description='') - _parser.add_argument("--dataset-path", dest="dataset_path", type=str, required=True, default=_missing_arg) - _parser.add_argument("--input-uris", dest="input_uris", type=json.loads, required=True, default=_missing_arg) - _parser.add_argument("--retry", dest="retry", type=str, required=False, default=_missing_arg) - _parser.add_argument("--timeout", dest="timeout", type=str, required=False, default=_missing_arg) - _parser.add_argument("--metadata", dest="metadata", type=json.loads, required=False, default=_missing_arg) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = {k: v for k, v in vars(_parser.parse_args()).items() if v is not _missing_arg} - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = automl_import_data_from_gcs(**_parsed_args) - - if not hasattr(_outputs, '__getitem__') or isinstance(_outputs, str): - _outputs = [_outputs] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(str(_outputs[idx])) - args: - - --dataset-path - - inputValue: dataset_path - - --input-uris - - inputValue: input_uris - - if: - cond: - isPresent: retry - then: - - --retry - - inputValue: retry - - if: - cond: - isPresent: timeout - then: - - --timeout - - inputValue: timeout - - if: - cond: - isPresent: metadata - then: - - --metadata - - inputValue: metadata - - '----output-paths' - - outputPath: dataset_path diff --git a/components/contrib/google-cloud/automl/prediction_service_batch_predict/component.py b/components/contrib/google-cloud/automl/prediction_service_batch_predict/component.py deleted file mode 100644 index b2d9d34dc02..00000000000 --- a/components/contrib/google-cloud/automl/prediction_service_batch_predict/component.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import NamedTuple - - -def automl_prediction_service_batch_predict( - model_path, - gcs_input_uris: list = None, - gcs_output_uri_prefix: str = None, - bq_input_uri: str = None, - bq_output_uri: str = None, - params=None, - retry=None, #google.api_core.gapic_v1.method.DEFAULT, - timeout=None, #google.api_core.gapic_v1.method.DEFAULT, - metadata: dict = None, -) -> NamedTuple('Outputs', [('gcs_output_directory', str), ('bigquery_output_dataset', str)]): - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - - input_config = {} - if gcs_input_uris: - input_config['gcs_source'] = {'input_uris': gcs_input_uris} - if bq_input_uri: - input_config['bigquery_source'] = {'input_uri': bq_input_uri} - - output_config = {} - if gcs_output_uri_prefix: - output_config['gcs_destination'] = {'output_uri_prefix': gcs_output_uri_prefix} - if bq_output_uri: - output_config['bigquery_destination'] = {'output_uri': bq_output_uri} - - from google.cloud import automl - client = automl.PredictionServiceClient() - response = client.batch_predict( - model_path, - input_config, - output_config, - params, - retry, - timeout, - metadata, - ) - print('Operation started:') - print(response.operation) - result = response.result() - metadata = response.metadata - print('Operation finished:') - print(metadata) - output_info = metadata.batch_predict_details.output_info - # Workaround for Argo issue - it fails when output is empty: https://github.com/argoproj/argo-workflows/pull/1277/files#r326028422 - return (output_info.gcs_output_directory or '-', output_info.bigquery_output_dataset or '-') - - -if __name__ == '__main__': - from kfp.components import create_component_from_func - - automl_prediction_service_batch_predict_op = create_component_from_func( - automl_prediction_service_batch_predict, - output_component_file='component.yaml', - base_image='python:3.7', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/prediction_service_batch_predict/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/automl/prediction_service_batch_predict/component.yaml b/components/contrib/google-cloud/automl/prediction_service_batch_predict/component.yaml deleted file mode 100644 index fb7d8133f67..00000000000 --- a/components/contrib/google-cloud/automl/prediction_service_batch_predict/component.yaml +++ /dev/null @@ -1,175 +0,0 @@ -name: Automl prediction service batch predict -inputs: -- name: model_path -- name: gcs_input_uris - type: JsonArray - optional: true -- name: gcs_output_uri_prefix - type: String - optional: true -- name: bq_input_uri - type: String - optional: true -- name: bq_output_uri - type: String - optional: true -- name: params - optional: true -- name: retry - optional: true -- name: timeout - optional: true -- name: metadata - type: JsonObject - optional: true -outputs: -- name: gcs_output_directory - type: String -- name: bigquery_output_dataset - type: String -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/prediction_service_batch_predict/component.yaml' -implementation: - container: - image: python:3.7 - command: - - python3 - - -u - - -c - - | - from typing import NamedTuple - - def automl_prediction_service_batch_predict( - model_path, - gcs_input_uris: str = None, - gcs_output_uri_prefix: str = None, - bq_input_uri: str = None, - bq_output_uri: str = None, - params=None, - retry=None, #google.api_core.gapic_v1.method.DEFAULT, - timeout=None, #google.api_core.gapic_v1.method.DEFAULT, - metadata: dict = None, - ) -> NamedTuple('Outputs', [('gcs_output_directory', str), ('bigquery_output_dataset', str)]): - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - - input_config = {} - if gcs_input_uris: - input_config['gcs_source'] = {'input_uris': gcs_input_uris} - if bq_input_uri: - input_config['bigquery_source'] = {'input_uri': bq_input_uri} - - output_config = {} - if gcs_output_uri_prefix: - output_config['gcs_destination'] = {'output_uri_prefix': gcs_output_uri_prefix} - if bq_output_uri: - output_config['bigquery_destination'] = {'output_uri': bq_output_uri} - - from google.cloud import automl - client = automl.PredictionServiceClient() - response = client.batch_predict( - model_path, - input_config, - output_config, - params, - retry, - timeout, - metadata, - ) - print('Operation started:') - print(response.operation) - result = response.result() - metadata = response.metadata - print('Operation finished:') - print(metadata) - output_info = metadata.batch_predict_details.output_info - # Workaround for Argo issue - it fails when output is empty: https://github.com/argoproj/argo-workflows/pull/1277/files#r326028422 - return (output_info.gcs_output_directory or '-', output_info.bigquery_output_dataset or '-') - - import json - import argparse - _missing_arg = object() - _parser = argparse.ArgumentParser(prog='Automl prediction service batch predict', description='') - _parser.add_argument("--model-path", dest="model_path", type=str, required=True, default=_missing_arg) - _parser.add_argument("--gcs-input-uris", dest="gcs_input_uris", type=json.loads, required=False, default=_missing_arg) - _parser.add_argument("--gcs-output-uri-prefix", dest="gcs_output_uri_prefix", type=str, required=False, default=_missing_arg) - _parser.add_argument("--bq-input-uri", dest="bq_input_uri", type=str, required=False, default=_missing_arg) - _parser.add_argument("--bq-output-uri", dest="bq_output_uri", type=str, required=False, default=_missing_arg) - _parser.add_argument("--params", dest="params", type=str, required=False, default=_missing_arg) - _parser.add_argument("--retry", dest="retry", type=str, required=False, default=_missing_arg) - _parser.add_argument("--timeout", dest="timeout", type=str, required=False, default=_missing_arg) - _parser.add_argument("--metadata", dest="metadata", type=json.loads, required=False, default=_missing_arg) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=2) - _parsed_args = {k: v for k, v in vars(_parser.parse_args()).items() if v is not _missing_arg} - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = automl_prediction_service_batch_predict(**_parsed_args) - - if not hasattr(_outputs, '__getitem__') or isinstance(_outputs, str): - _outputs = [_outputs] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(str(_outputs[idx])) - args: - - --model-path - - inputValue: model_path - - if: - cond: - isPresent: gcs_input_uris - then: - - --gcs-input-uris - - inputValue: gcs_input_uris - - if: - cond: - isPresent: gcs_output_uri_prefix - then: - - --gcs-output-uri-prefix - - inputValue: gcs_output_uri_prefix - - if: - cond: - isPresent: bq_input_uri - then: - - --bq-input-uri - - inputValue: bq_input_uri - - if: - cond: - isPresent: bq_output_uri - then: - - --bq-output-uri - - inputValue: bq_output_uri - - if: - cond: - isPresent: params - then: - - --params - - inputValue: params - - if: - cond: - isPresent: retry - then: - - --retry - - inputValue: retry - - if: - cond: - isPresent: timeout - then: - - --timeout - - inputValue: timeout - - if: - cond: - isPresent: metadata - then: - - --metadata - - inputValue: metadata - - '----output-paths' - - outputPath: gcs_output_directory - - outputPath: bigquery_output_dataset diff --git a/components/contrib/google-cloud/automl/split_dataset_table_column_names/component.py b/components/contrib/google-cloud/automl/split_dataset_table_column_names/component.py deleted file mode 100644 index 582aa5e8400..00000000000 --- a/components/contrib/google-cloud/automl/split_dataset_table_column_names/component.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import NamedTuple - - -def automl_split_dataset_table_column_names( - dataset_path: str, - target_column_name: str, - table_index: int = 0, -) -> NamedTuple('Outputs', [('target_column_path', str), ('feature_column_paths', list)]): - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - - from google.cloud import automl - client = automl.AutoMlClient() - list_table_specs_response = client.list_table_specs(dataset_path) - table_specs = [s for s in list_table_specs_response] - print('table_specs=') - print(table_specs) - table_spec_name = table_specs[table_index].name - - list_column_specs_response = client.list_column_specs(table_spec_name) - column_specs = [s for s in list_column_specs_response] - print('column_specs=') - print(column_specs) - - target_column_spec = [s for s in column_specs if s.display_name == target_column_name][0] - feature_column_specs = [s for s in column_specs if s.display_name != target_column_name] - feature_column_names = [s.name for s in feature_column_specs] - - import json - return (target_column_spec.name, json.dumps(feature_column_names)) - - -if __name__ == '__main__': - from kfp.components import create_component_from_func - - automl_split_dataset_table_column_names_op = create_component_from_func( - automl_split_dataset_table_column_names, - output_component_file='component.yaml', - base_image='python:3.7', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/split_dataset_table_column_names/component.yaml", - }, - ) diff --git a/components/contrib/google-cloud/automl/split_dataset_table_column_names/component.yaml b/components/contrib/google-cloud/automl/split_dataset_table_column_names/component.yaml deleted file mode 100644 index 05939a3f8b0..00000000000 --- a/components/contrib/google-cloud/automl/split_dataset_table_column_names/component.yaml +++ /dev/null @@ -1,95 +0,0 @@ -name: Automl split dataset table column names -inputs: -- name: dataset_path - type: String -- name: target_column_name - type: String -- name: table_index - type: Integer - default: '0' - optional: true -outputs: -- name: target_column_path - type: String -- name: feature_column_paths - type: JsonArray -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/gcp/automl/split_dataset_table_column_names/component.yaml' -implementation: - container: - image: python:3.7 - command: - - python3 - - -u - - -c - - | - from typing import NamedTuple - - def automl_split_dataset_table_column_names( - dataset_path: str, - target_column_name: str, - table_index: int = 0, - ) -> NamedTuple('Outputs', [('target_column_path', str), ('feature_column_paths', list)]): - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - - from google.cloud import automl - client = automl.AutoMlClient() - list_table_specs_response = client.list_table_specs(dataset_path) - table_specs = [s for s in list_table_specs_response] - print('table_specs=') - print(table_specs) - table_spec_name = table_specs[table_index].name - - list_column_specs_response = client.list_column_specs(table_spec_name) - column_specs = [s for s in list_column_specs_response] - print('column_specs=') - print(column_specs) - - target_column_spec = [s for s in column_specs if s.display_name == target_column_name][0] - feature_column_specs = [s for s in column_specs if s.display_name != target_column_name] - feature_column_names = [s.name for s in feature_column_specs] - - import json - return (target_column_spec.name, json.dumps(feature_column_names)) - - import argparse - _missing_arg = object() - _parser = argparse.ArgumentParser(prog='Automl split dataset table column names', description='') - _parser.add_argument("--dataset-path", dest="dataset_path", type=str, required=True, default=_missing_arg) - _parser.add_argument("--target-column-name", dest="target_column_name", type=str, required=True, default=_missing_arg) - _parser.add_argument("--table-index", dest="table_index", type=int, required=False, default=_missing_arg) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=2) - _parsed_args = {k: v for k, v in vars(_parser.parse_args()).items() if v is not _missing_arg} - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = automl_split_dataset_table_column_names(**_parsed_args) - - if not hasattr(_outputs, '__getitem__') or isinstance(_outputs, str): - _outputs = [_outputs] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(str(_outputs[idx])) - args: - - --dataset-path - - inputValue: dataset_path - - --target-column-name - - inputValue: target_column_name - - if: - cond: - isPresent: table_index - then: - - --table-index - - inputValue: table_index - - '----output-paths' - - outputPath: target_column_path - - outputPath: feature_column_paths diff --git a/components/contrib/google-cloud/dataproc/create_cluster/README.md b/components/contrib/google-cloud/dataproc/create_cluster/README.md deleted file mode 100644 index ffcbe1d6bd4..00000000000 --- a/components/contrib/google-cloud/dataproc/create_cluster/README.md +++ /dev/null @@ -1,176 +0,0 @@ - -# Name -Component: Data processing by creating a cluster in Cloud Dataproc - - -# Label -Cloud Dataproc, Kubeflow - -# Facets - -Use case: -Other - -Technique: -Other - -Input data type: -Tabular - -ML workflow: -Data preparation - -# Summary -A Kubeflow pipeline component to create a cluster in Cloud Dataproc. - -# Details -## Intended use - -Use this component at the start of a Kubeflow pipeline to create a temporary Cloud Dataproc cluster to run Cloud Dataproc jobs as steps in the pipeline. - -## Runtime arguments - -| Argument | Description | Optional | Data type | Accepted values | Default | -|----------|-------------|----------|-----------|-----------------|---------| -| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | | | -| region | The Cloud Dataproc region to create the cluster in. | No | GCPRegion | | | -| name | The name of the cluster. Cluster names within a project must be unique. You can reuse the names of deleted clusters. | Yes | String | | None | -| name_prefix | The prefix of the cluster name. | Yes | String | | None | -| initialization_actions | A list of Cloud Storage URIs identifying the executables on each node after the configuration is completed. By default, executables are run on the master and all the worker nodes. | Yes | List | | None | -| config_bucket | The Cloud Storage bucket to use to stage the job dependencies, the configuration files, and the job driver console’s output. | Yes | GCSPath | | None | -| image_version | The version of the software inside the cluster. | Yes | String | | None | -| cluster | The full [cluster configuration](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters#Cluster). | Yes | Dict | | None | -| wait_interval | The number of seconds to pause before polling the operation. | Yes | Integer | | 30 | - -## Output -Name | Description | Type -:--- | :---------- | :--- -cluster_name | The name of the cluster. | String - -Note: You can recycle the cluster by using the [Dataproc delete cluster component](https://github.com/kubeflow/pipelines/tree/release-1.7/components/gcp/dataproc/delete_cluster). - - -## Cautions & requirements - -To use the component, you must: -* Set up the GCP project by following these [steps](https://cloud.google.com/dataproc/docs/guides/setup-project). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant the following types of access to the Kubeflow user service account: - * Read access to the Cloud Storage buckets which contain the initialization action files. - * The role, `roles/dataproc.editor`, on the project. - -## Detailed description - -This component creates a new Dataproc cluster by using the [Dataproc create cluster REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters/create). - -Follow these steps to use the component in a pipeline: - -1. Install the Kubeflow pipeline's SDK - - ```python - %%capture --no-stderr - - !pip3 install kfp --upgrade - ``` - -2. Load the component using the Kubeflow pipeline's SDK - - - ```python - import kfp.components as comp - - dataproc_create_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/create_cluster/component.yaml') - help(dataproc_create_cluster_op) - ``` - -### Sample -The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. - -#### Set sample parameters - -```python -# Required parameters -PROJECT_ID = '' - -# Optional parameters -EXPERIMENT_NAME = 'Dataproc - Create Cluster' -``` - -#### Example pipeline that uses the component - - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='Dataproc create cluster pipeline', - description='Dataproc create cluster pipeline' -) -def dataproc_create_cluster_pipeline( - project_id = PROJECT_ID, - region = 'us-central1', - name='', - name_prefix='', - initialization_actions='', - config_bucket='', - image_version='', - cluster='', - wait_interval='30' -): - dataproc_create_cluster_op( - project_id=project_id, - region=region, - name=name, - name_prefix=name_prefix, - initialization_actions=initialization_actions, - config_bucket=config_bucket, - image_version=image_version, - cluster=cluster, - wait_interval=wait_interval) -``` - -#### Compile the pipeline - - -```python -#Compile the pipeline -pipeline_func = dataproc_create_cluster_pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - - -```python -#Specify values for the pipeline's arguments -arguments = {} - -#Get or create an experiment -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -## References -* [Kubernetes Engine for Kubeflow](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) -* [Component Python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_create_cluster.py) -* [Component Docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile) -* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/dataproc/create_cluster/sample.ipynb) -* [Dataproc create cluster REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters/create) - -## License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/dataproc/create_cluster/component.yaml b/components/contrib/google-cloud/dataproc/create_cluster/component.yaml deleted file mode 100644 index b0fe8fdec7a..00000000000 --- a/components/contrib/google-cloud/dataproc/create_cluster/component.yaml +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: dataproc_create_cluster -description: | - Creates a DataProc cluster under a project. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: project_id - description: >- - Required. The ID of the Google Cloud Platform project that the cluster belongs to. - type: GCPProjectID - - name: region - description: 'Required. The Cloud Dataproc region in which to handle the request.' - type: GCPRegion - - name: name - description: >- - Optional. The cluster name. Cluster names within a project must be unique. Names of - deleted clusters can be reused - default: '' - type: String - - name: name_prefix - description: 'Optional. The prefix of the cluster name.' - default: '' - type: String - - name: initialization_actions - description: >- - Optional. List of GCS URIs of executables to execute on each node after config - is completed. By default, executables are run on master and all worker nodes. - default: '' - type: List - - name: config_bucket - description: >- - Optional. A Google Cloud Storage bucket used to stage job dependencies, config - files, and job driver console output. - default: '' - type: GCSPath - - name: image_version - description: 'Optional. The version of software inside the cluster.' - default: '' - type: String - - name: cluster - description: >- - Optional. The full cluster config. See - [full details](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters#Cluster) - default: '' - type: Dict - - name: wait_interval - default: '30' - description: 'Optional. The wait seconds between polling the operation. Defaults to 30.' - type: Integer -outputs: - - name: cluster_name - description: 'The cluster name of the created cluster.' - type: String - - name: MLPipeline UI metadata - type: UI metadata -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ['python', '-u', '-m', 'kfp_component.launcher'] - args: [ - --ui_metadata_path, {outputPath: MLPipeline UI metadata}, - kfp_component.google.dataproc, create_cluster, - --project_id, {inputValue: project_id}, - --region, {inputValue: region}, - --name, {inputValue: name}, - --name_prefix, {inputValue: name_prefix}, - --initialization_actions, {inputValue: initialization_actions}, - --config_bucket, {inputValue: config_bucket}, - --image_version, {inputValue: image_version}, - --cluster, {inputValue: cluster}, - --wait_interval, {inputValue: wait_interval}, - --cluster_name_output_path, {outputPath: cluster_name}, - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/dataproc/create_cluster/sample.ipynb b/components/contrib/google-cloud/dataproc/create_cluster/sample.ipynb deleted file mode 100644 index b5911ea9e8e..00000000000 --- a/components/contrib/google-cloud/dataproc/create_cluster/sample.ipynb +++ /dev/null @@ -1,245 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "Data processing by creating a cluster in Cloud Dataproc\n", - "\n", - "\n", - "# Label\n", - "Cloud Dataproc, cluster, GCP, Cloud Storage, KubeFlow, Pipeline\n", - "\n", - "\n", - "# Summary\n", - "A Kubeflow Pipeline component to create a cluster in Cloud Dataproc.\n", - "\n", - "# Details\n", - "## Intended use\n", - "\n", - "Use this component at the start of a Kubeflow Pipeline to create a temporary Cloud Dataproc cluster to run Cloud Dataproc jobs as steps in the pipeline.\n", - "\n", - "## Runtime arguments\n", - "\n", - "| Argument | Description | Optional | Data type | Accepted values | Default |\n", - "|----------|-------------|----------|-----------|-----------------|---------|\n", - "| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | | |\n", - "| region | The Cloud Dataproc region to create the cluster in. | No | GCPRegion | | |\n", - "| name | The name of the cluster. Cluster names within a project must be unique. You can reuse the names of deleted clusters. | Yes | String | | None |\n", - "| name_prefix | The prefix of the cluster name. | Yes | String | | None |\n", - "| initialization_actions | A list of Cloud Storage URIs identifying executables to execute on each node after the configuration is completed. By default, executables are run on the master and all the worker nodes. | Yes | List | | None |\n", - "| config_bucket | The Cloud Storage bucket to use to stage the job dependencies, the configuration files, and the job driver console’s output. | Yes | GCSPath | | None |\n", - "| image_version | The version of the software inside the cluster. | Yes | String | | None |\n", - "| cluster | The full [cluster configuration](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters#Cluster). | Yes | Dict | | None |\n", - "| wait_interval | The number of seconds to pause before polling the operation. | Yes | Integer | | 30 |\n", - "\n", - "## Output\n", - "Name | Description | Type\n", - ":--- | :---------- | :---\n", - "cluster_name | The name of the cluster. | String\n", - "\n", - "Note: You can recycle the cluster by using the [Dataproc delete cluster component](https://github.com/kubeflow/pipelines/tree/release-1.7/components/gcp/dataproc/delete_cluster).\n", - "\n", - "\n", - "## Cautions & requirements\n", - "\n", - "To use the component, you must:\n", - "* Set up the GCP project by following these [steps](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant the following types of access to the Kubeflow user service account:\n", - " * Read access to the Cloud Storage buckets which contains initialization action files.\n", - " * The role, `roles/dataproc.editor` on the project.\n", - "\n", - "## Detailed description\n", - "\n", - "This component creates a new Dataproc cluster by using the [Dataproc create cluster REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters/create). \n", - "\n", - "Follow these steps to use the component in a pipeline:\n", - "\n", - "1. Install the Kubeflow Pipeline SDK:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "dataproc_create_cluster_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/create_cluster/component.yaml')\n", - "help(dataproc_create_cluster_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Sample\n", - "Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template.\n", - "\n", - "#### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "# Required Parameters\n", - "PROJECT_ID = ''\n", - "\n", - "# Optional Parameters\n", - "EXPERIMENT_NAME = 'Dataproc - Create Cluster'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='Dataproc create cluster pipeline',\n", - " description='Dataproc create cluster pipeline'\n", - ")\n", - "def dataproc_create_cluster_pipeline(\n", - " project_id = PROJECT_ID, \n", - " region = 'us-central1', \n", - " name='', \n", - " name_prefix='',\n", - " initialization_actions='', \n", - " config_bucket='', \n", - " image_version='', \n", - " cluster='', \n", - " wait_interval='30'\n", - "):\n", - " dataproc_create_cluster_op(\n", - " project_id=project_id, \n", - " region=region, \n", - " name=name, \n", - " name_prefix=name_prefix, \n", - " initialization_actions=initialization_actions, \n", - " config_bucket=config_bucket, \n", - " image_version=image_version, \n", - " cluster=cluster, \n", - " wait_interval=wait_interval)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = dataproc_create_cluster_pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "* [Kubernetes Engine for Kubeflow](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts)\n", - "* [Component Python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_create_cluster.py)\n", - "* [Component Docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile)\n", - "* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/dataproc/create_cluster/sample.ipynb)\n", - "* [Dataproc create cluster REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters/create)\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/components/contrib/google-cloud/dataproc/delete_cluster/README.md b/components/contrib/google-cloud/dataproc/delete_cluster/README.md deleted file mode 100644 index 2d33bb1d99a..00000000000 --- a/components/contrib/google-cloud/dataproc/delete_cluster/README.md +++ /dev/null @@ -1,148 +0,0 @@ - -# Name - -Component: Data preparation by deleting a cluster in Cloud Dataproc - -# Label -Cloud Dataproc, Kubeflow - - -# Summary -A Kubeflow pipeline component to delete a cluster in Cloud Dataproc. - -## Intended use -Use this component at the start of a Kubeflow pipeline to delete a temporary Cloud Dataproc cluster when running Cloud Dataproc jobs as steps in the pipeline. This component is usually used with an [exit handler](https://github.com/kubeflow/pipelines/blob/master/samples/core/exit_handler/exit_handler.py) to run at the end of a pipeline. - -# Facets - -Use case: - -Technique: - -Input data type: - -ML workflow: - -## Runtime arguments -| Argument | Description | Optional | Data type | Accepted values | Default | -|:----------|:-------------|:----------|:-----------|:-----------------|:---------| -| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | - | - | -| region | The Cloud Dataproc region in which to handle the request. | No | GCPRegion | - | - | -| name | The name of the cluster to delete. | No | String | - | - | -| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | - | 30 | - - -## Cautions & requirements -To use the component, you must: -* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant the Kubeflow user service account the role, `roles/dataproc.editor`, on the project. - -## Detailed description -This component deletes a Dataproc cluster by using [Dataproc delete cluster REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters/delete). - -Follow these steps to use the component in a pipeline: -1. Install the Kubeflow pipeline's SDK: - - - ```python - %%capture --no-stderr - - !pip3 install kfp --upgrade - ``` - -2. Load the component using the Kubeflow pipeline's SDK: - - - ```python - import kfp.components as comp - - dataproc_delete_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/delete_cluster/component.yaml') - help(dataproc_delete_cluster_op) - ``` - -### Sample - -The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. - -#### Prerequisites - -[Create a Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) before running the sample code. - -#### Set sample parameters - - -```python -PROJECT_ID = '' -CLUSTER_NAME = '' - -REGION = 'us-central1' -EXPERIMENT_NAME = 'Dataproc - Delete Cluster' -``` - -#### Example pipeline that uses the component - - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='Dataproc delete cluster pipeline', - description='Dataproc delete cluster pipeline' -) -def dataproc_delete_cluster_pipeline( - project_id = PROJECT_ID, - region = REGION, - name = CLUSTER_NAME -): - dataproc_delete_cluster_op( - project_id=project_id, - region=region, - name=name) -``` - -#### Compile the pipeline - - -```python -pipeline_func = dataproc_delete_cluster_pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - - -```python -#Specify values for the pipeline's arguments -arguments = {} - -#Get or create an experiment -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -## References - -* [Component Python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_delete_cluster.py) -* [Component Docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile) -* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/dataproc/delete_cluster/sample.ipynb) -* [Dataproc delete cluster REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters/delete) - - -## License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/dataproc/delete_cluster/component.yaml b/components/contrib/google-cloud/dataproc/delete_cluster/component.yaml deleted file mode 100644 index f7a9c54b94a..00000000000 --- a/components/contrib/google-cloud/dataproc/delete_cluster/component.yaml +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: dataproc_delete_cluster -description: | - Deletes a DataProc cluster. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: project_id - description: >- - Required. The ID of the Google Cloud Platform project that the cluster belongs to. - type: GCPProjectID - - name: region - description: >- - Required. The Cloud Dataproc region in which to handle the request. - type: GCPRegion - - name: name - description: 'Required. The cluster name to delete.' - type: String - - name: wait_interval - default: '30' - description: 'Optional. The wait seconds between polling the operation. Defaults to 30.' - type: Integer -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ['python', '-u', '-m', 'kfp_component.launcher'] - args: [ - kfp_component.google.dataproc, delete_cluster, - --project_id, {inputValue: project_id}, - --region, {inputValue: region}, - --name, {inputValue: name}, - --wait_interval, {inputValue: wait_interval} - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/dataproc/delete_cluster/sample.ipynb b/components/contrib/google-cloud/dataproc/delete_cluster/sample.ipynb deleted file mode 100644 index 259f59bf530..00000000000 --- a/components/contrib/google-cloud/dataproc/delete_cluster/sample.ipynb +++ /dev/null @@ -1,231 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "\n", - "Data preparation by deleting a cluster in Cloud Dataproc\n", - "\n", - "# Label\n", - "Cloud Dataproc, cluster, GCP, Cloud Storage, Kubeflow, Pipeline\n", - "\n", - "\n", - "# Summary\n", - "A Kubeflow Pipeline component to delete a cluster in Cloud Dataproc.\n", - "\n", - "## Intended use\n", - "Use this component at the start of a Kubeflow Pipeline to delete a temporary Cloud Dataproc \n", - "cluster to run Cloud Dataproc jobs as steps in the pipeline. This component is usually \n", - "used with an [exit handler](https://github.com/kubeflow/pipelines/blob/master/samples/core/exit_handler/exit_handler.py) to run at the end of a pipeline.\n", - "\n", - "\n", - "## Runtime arguments\n", - "| Argument | Description | Optional | Data type | Accepted values | Default |\n", - "|----------|-------------|----------|-----------|-----------------|---------|\n", - "| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | | |\n", - "| region | The Cloud Dataproc region in which to handle the request. | No | GCPRegion | | |\n", - "| name | The name of the cluster to delete. | No | String | | |\n", - "| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | | 30 |\n", - "\n", - "\n", - "## Cautions & requirements\n", - "To use the component, you must:\n", - "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", - "\n", - "## Detailed description\n", - "This component deletes a Dataproc cluster by using [Dataproc delete cluster REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters/delete).\n", - "\n", - "Follow these steps to use the component in a pipeline:\n", - "1. Install the Kubeflow Pipeline SDK:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "dataproc_delete_cluster_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/delete_cluster/component.yaml')\n", - "help(dataproc_delete_cluster_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Sample\n", - "\n", - "Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template.\n", - "\n", - "#### Prerequisites\n", - "\n", - "[Create a Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) before running the sample code.\n", - "\n", - "#### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "PROJECT_ID = ''\n", - "CLUSTER_NAME = ''\n", - "\n", - "REGION = 'us-central1'\n", - "EXPERIMENT_NAME = 'Dataproc - Delete Cluster'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='Dataproc delete cluster pipeline',\n", - " description='Dataproc delete cluster pipeline'\n", - ")\n", - "def dataproc_delete_cluster_pipeline(\n", - " project_id = PROJECT_ID, \n", - " region = REGION,\n", - " name = CLUSTER_NAME\n", - "):\n", - " dataproc_delete_cluster_op(\n", - " project_id=project_id, \n", - " region=region, \n", - " name=name)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = dataproc_delete_cluster_pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "\n", - "* [Component Python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_delete_cluster.py)\n", - "* [Component Docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile)\n", - "* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/dataproc/delete_cluster/sample.ipynb)\n", - "* [Dataproc delete cluster REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters/delete)\n", - "\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - }, - "pycharm": { - "stem_cell": { - "cell_type": "raw", - "source": [], - "metadata": { - "collapsed": false - } - } - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/components/contrib/google-cloud/dataproc/submit_hadoop_job/README.md b/components/contrib/google-cloud/dataproc/submit_hadoop_job/README.md deleted file mode 100644 index 609cbc1f548..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_hadoop_job/README.md +++ /dev/null @@ -1,205 +0,0 @@ - -# Name -Component: Data preparation using Hadoop MapReduce on YARN with Cloud Dataproc - -# Labels -Cloud Dataproc, Hadoop, YARN, Apache, MapReduce - - -# Summary -A Kubeflow pipeline component to prepare data by submitting an Apache Hadoop MapReduce job on Apache Hadoop YARN to Cloud Dataproc. - -# Facets - -Use case: - -Technique: - -Input data type: - -ML workflow: - -# Details -## Intended use -Use the component to run an Apache Hadoop MapReduce job as one preprocessing step in a Kubeflow pipeline. - -## Runtime arguments -| Argument | Description | Optional | Data type | Accepted values | Default | -|----------|-------------|----------|-----------|-----------------|---------| -| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | - | - | -| region | The Dataproc region to handle the request. | No | GCPRegion | - | - | -| cluster_name | The name of the cluster to run the job. | No | String | - | - | -| main_jar_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the JAR file containing the main class to execute. | No | List |- |- | -| main_class | The name of the driver's main class. The JAR file that contains the class must be either in the default CLASSPATH or specified in `hadoop_job.jarFileUris`. | No | String |- | - | -| args | The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. | Yes | List | - | None | -| hadoop_job | The payload of a [HadoopJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob). | Yes | Dict | - | None | -| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | -| None | -| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | - | 30 | - -Note: - -`main_jar_file_uri`: The examples for the files are: -- `gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar` -- `hdfs:/tmp/test-samples/custom-wordcount.jarfile:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar` - - -## Output -Name | Description | Type -:--- | :---------- | :--- -job_id | The ID of the created job. | String - -## Cautions & requirements -To use the component, you must: -* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). -* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant the Kubeflow user service account the role, `roles/dataproc.editor`, on the project. - -## Detailed description - -This component creates a Hadoop job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). - -Follow these steps to use the component in a pipeline: - -1. Install the Kubeflow pipeline's SDK: - - ```python - %%capture --no-stderr - - !pip3 install kfp --upgrade - ``` - -2. Load the component using the Kubeflow pipeline's SDK: - - ```python - import kfp.components as comp - - dataproc_submit_hadoop_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_hadoop_job/component.yaml') - help(dataproc_submit_hadoop_job_op) - ``` - -### Sample -The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. - -#### Setup a Dataproc cluster -[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code. - - -### Prepare a Hadoop job -Upload your Hadoop JAR file to a Cloud Storage bucket. In the sample, we will use a JAR file that is preinstalled in the main cluster, so you don't have to provide the argument, `main_jar_file_uri`. - -To package a self-contained Hadoop MapReduce application from the [WordCount example source code](https://github.com/apache/hadoop/blob/trunk/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordCount.java), follow the [MapReduce Tutorial](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html). - -#### Set sample parameters - -```python -PROJECT_ID = '' -CLUSTER_NAME = '' -OUTPUT_GCS_PATH = '' -REGION = 'us-central1' -MAIN_CLASS = 'org.apache.hadoop.examples.WordCount' -INTPUT_GCS_PATH = 'gs://ml-pipeline-playground/shakespeare1.txt' -EXPERIMENT_NAME = 'Dataproc - Submit Hadoop Job' -``` - -#### Inspect the input data -The input file is a simple text file: - -```python -!gsutil cat $INTPUT_GCS_PATH -``` - -#### Clean up the existing output files (optional) -This is needed because the sample code requires the output folder to be a clean folder. To continue to run the sample, make sure that the service account of the notebook server has access to `OUTPUT_GCS_PATH`. - -Caution: This will remove all blob files under `OUTPUT_GCS_PATH`. - -```python -!gsutil rm $OUTPUT_GCS_PATH/** -``` - -#### Example pipeline that uses the component - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='Dataproc submit Hadoop job pipeline', - description='Dataproc submit Hadoop job pipeline' -) -def dataproc_submit_hadoop_job_pipeline( - project_id = PROJECT_ID, - region = REGION, - cluster_name = CLUSTER_NAME, - main_jar_file_uri = '', - main_class = MAIN_CLASS, - args = json.dumps([ - INTPUT_GCS_PATH, - OUTPUT_GCS_PATH - ]), - hadoop_job='', - job='{}', - wait_interval='30' -): - dataproc_submit_hadoop_job_op( - project_id=project_id, - region=region, - cluster_name=cluster_name, - main_jar_file_uri=main_jar_file_uri, - main_class=main_class, - args=args, - hadoop_job=hadoop_job, - job=job, - wait_interval=wait_interval) -``` - -#### Compile the pipeline - - -```python -pipeline_func = dataproc_submit_hadoop_job_pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - - -```python -#Specify values for the pipeline's arguments -arguments = {} - -#Get or create an experiment -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -#### Inspect the output -The sample in the notebook will count the words in the input text and save them in sharded files. The command to inspect the output is: - -```python -!gsutil cat $OUTPUT_GCS_PATH/* -``` - -## References -* [Component Python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_submit_hadoop_job.py) -* [Component Docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile) -* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/dataproc/submit_hadoop_job/sample.ipynb) -* [Dataproc HadoopJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob) - -# License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/dataproc/submit_hadoop_job/component.yaml b/components/contrib/google-cloud/dataproc/submit_hadoop_job/component.yaml deleted file mode 100644 index cec49a337d9..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_hadoop_job/component.yaml +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: dataproc_submit_hadoop_job -description: >- - Submits a Cloud Dataproc job for running Apache Hadoop MapReduce jobs on - Apache Hadoop YARN. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: project_id - description: >- - Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - type: GCPProjectID - - name: region - description: >- - Required. The Cloud Dataproc region in which to handle the request. - type: GCPRegion - - name: cluster_name - description: 'Required. The cluster to run the job.' - type: String - - name: main_jar_file_uri - default: '' - description: >- - The HCFS URI of the jar file containing the main class. Examples: - `gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar` - `hdfs:/tmp/test-samples/custom-wordcount.jar` - `file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar` - type: GCSPath - - name: main_class - default: '' - description: >- - The name of the driver's main class. The jar file - containing the class must be in the default CLASSPATH or specified - in `jarFileUris`. - type: String - - name: args - default: '' - description: >- - Optional. The arguments to pass to the driver. Do not include - arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, - since a collision may occur that causes an incorrect job submission. - type: List - - name: hadoop_job - default: '' - description: >- - Optional. The full payload of a - [hadoop job](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob). - type: Dict - - name: job - default: '' - description: >- - Optional. The full payload of a - [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). - type: Dict - - name: wait_interval - default: '30' - description: >- - Optional. The wait seconds between polling the operation. - Defaults to 30. - type: Integer -outputs: - - name: job_id - description: 'The ID of the created job.' - type: String - - name: MLPipeline UI metadata - type: UI metadata -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ['python', '-u', '-m', 'kfp_component.launcher'] - args: [ - --ui_metadata_path, {outputPath: MLPipeline UI metadata}, - kfp_component.google.dataproc, submit_hadoop_job, - --project_id, {inputValue: project_id}, - --region, {inputValue: region}, - --cluster_name, {inputValue: cluster_name}, - --main_jar_file_uri, {inputValue: main_jar_file_uri}, - --main_class, {inputValue: main_class}, - --args, {inputValue: args}, - --hadoop_job, {inputValue: hadoop_job}, - --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval}, - --job_id_output_path, {outputPath: job_id}, - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/dataproc/submit_hadoop_job/sample.ipynb b/components/contrib/google-cloud/dataproc/submit_hadoop_job/sample.ipynb deleted file mode 100644 index 1b6499e1025..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_hadoop_job/sample.ipynb +++ /dev/null @@ -1,313 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "Data preparation using Hadoop MapReduce on YARN with Cloud Dataproc\n", - "\n", - "# Label\n", - "Cloud Dataproc, GCP, Cloud Storage, Hadoop, YARN, Apache, MapReduce\n", - "\n", - "\n", - "# Summary\n", - "A Kubeflow Pipeline component to prepare data by submitting an Apache Hadoop MapReduce job on Apache Hadoop YARN to Cloud Dataproc.\n", - "\n", - "# Details\n", - "## Intended use\n", - "Use the component to run an Apache Hadoop MapReduce job as one preprocessing step in a Kubeflow Pipeline. \n", - "\n", - "## Runtime arguments\n", - "| Argument | Description | Optional | Data type | Accepted values | Default |\n", - "|----------|-------------|----------|-----------|-----------------|---------|\n", - "| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | | |\n", - "| region | The Dataproc region to handle the request. | No | GCPRegion | | |\n", - "| cluster_name | The name of the cluster to run the job. | No | String | | |\n", - "| main_jar_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the JAR file containing the main class to execute. | No | List | | |\n", - "| main_class | The name of the driver's main class. The JAR file that contains the class must be either in the default CLASSPATH or specified in `hadoop_job.jarFileUris`. | No | String | | |\n", - "| args | The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. | Yes | List | | None |\n", - "| hadoop_job | The payload of a [HadoopJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob). | Yes | Dict | | None |\n", - "| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None |\n", - "| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | | 30 |\n", - "\n", - "Note: \n", - "`main_jar_file_uri`: The examples for the files are : \n", - "- `gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar` \n", - "- `hdfs:/tmp/test-samples/custom-wordcount.jarfile:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar`\n", - "\n", - "\n", - "## Output\n", - "Name | Description | Type\n", - ":--- | :---------- | :---\n", - "job_id | The ID of the created job. | String\n", - "\n", - "## Cautions & requirements\n", - "To use the component, you must:\n", - "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", - "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", - "\n", - "## Detailed description\n", - "\n", - "This component creates a Hadoop job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit).\n", - "\n", - "Follow these steps to use the component in a pipeline:\n", - "\n", - "1. Install the Kubeflow Pipeline SDK:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "dataproc_submit_hadoop_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n", - "help(dataproc_submit_hadoop_job_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Sample\n", - "Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template.\n", - "\n", - "\n", - "### Setup a Dataproc cluster\n", - "[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code.\n", - "\n", - "\n", - "### Prepare a Hadoop job\n", - "Upload your Hadoop JAR file to a Cloud Storage bucket. In the sample, we will use a JAR file that is preinstalled in the main cluster, so there is no need to provide `main_jar_file_uri`. \n", - "\n", - "Here is the [WordCount example source code](https://github.com/apache/hadoop/blob/trunk/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordCount.java).\n", - "\n", - "To package a self-contained Hadoop MapReduce application from the source code, follow the [MapReduce Tutorial](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html).\n", - "\n", - "\n", - "### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "PROJECT_ID = ''\n", - "CLUSTER_NAME = ''\n", - "OUTPUT_GCS_PATH = ''\n", - "REGION = 'us-central1'\n", - "MAIN_CLASS = 'org.apache.hadoop.examples.WordCount'\n", - "INTPUT_GCS_PATH = 'gs://ml-pipeline-playground/shakespeare1.txt'\n", - "EXPERIMENT_NAME = 'Dataproc - Submit Hadoop Job'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Insepct Input Data\n", - "The input file is a simple text file:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!gsutil cat $INTPUT_GCS_PATH" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Clean up the existing output files (optional)\n", - "This is needed because the sample code requires the output folder to be a clean folder. To continue to run the sample, make sure that the service account of the notebook server has access to the `OUTPUT_GCS_PATH`.\n", - "\n", - "CAUTION: This will remove all blob files under `OUTPUT_GCS_PATH`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!gsutil rm $OUTPUT_GCS_PATH/**" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='Dataproc submit Hadoop job pipeline',\n", - " description='Dataproc submit Hadoop job pipeline'\n", - ")\n", - "def dataproc_submit_hadoop_job_pipeline(\n", - " project_id = PROJECT_ID, \n", - " region = REGION,\n", - " cluster_name = CLUSTER_NAME,\n", - " main_jar_file_uri = '',\n", - " main_class = MAIN_CLASS,\n", - " args = json.dumps([\n", - " INTPUT_GCS_PATH,\n", - " OUTPUT_GCS_PATH\n", - " ]), \n", - " hadoop_job='', \n", - " job='{}', \n", - " wait_interval='30'\n", - "):\n", - " dataproc_submit_hadoop_job_op(\n", - " project_id=project_id, \n", - " region=region, \n", - " cluster_name=cluster_name, \n", - " main_jar_file_uri=main_jar_file_uri, \n", - " main_class=main_class,\n", - " args=args, \n", - " hadoop_job=hadoop_job, \n", - " job=job, \n", - " wait_interval=wait_interval)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = dataproc_submit_hadoop_job_pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Inspect the output\n", - "The sample in the notebook will count the words in the input text and save them in sharded files. The command to inspect the output is:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!gsutil cat $OUTPUT_GCS_PATH/*" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "* [Component Python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_submit_hadoop_job.py)\n", - "* [Component Docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile)\n", - "* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/dataproc/submit_hadoop_job/sample.ipynb)\n", - "* [Dataproc HadoopJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob)\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/components/contrib/google-cloud/dataproc/submit_hive_job/README.md b/components/contrib/google-cloud/dataproc/submit_hive_job/README.md deleted file mode 100644 index c69c0e38921..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_hive_job/README.md +++ /dev/null @@ -1,189 +0,0 @@ - -# Name -Component: Data preparation using Apache Hive on YARN with Cloud Dataproc - -# Label -Cloud Dataproc, YARN, Apache Hive - -# Summary -A Kubeflow pipeline component to prepare data by submitting an Apache Hive job on YARN to Cloud Dataproc. - -# Facets - -Use case: - -Technique: - -Input data type: - -ML workflow: - -# Details -## Intended use -Use the component to run an Apache Hive job as one preprocessing step in a Kubeflow pipeline. - -## Runtime arguments -| Argument | Description | Optional | Data type | Accepted values | Default | -|----------|-------------|----------|-----------|-----------------|---------| -| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectId | | | -| region | The Cloud Dataproc region to handle the request. | No | GCPRegion | | | -| cluster_name | The name of the cluster to run the job. | No | String | | | -| queries | The queries to execute the Hive job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | | None | -| query_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the script that contains the Hive queries. | Yes | GCSPath | | None | -| script_variables | Mapping of the query’s variable names to their values (equivalent to the Hive command: SET name="value";). | Yes | Dict | | None | -| hive_job | The payload of a [Hive job](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) | Yes | Dict | | None | -| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None | -| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | | 30 | - -## Output -Name | Description | Type -:--- | :---------- | :--- -job_id | The ID of the created job. | String - -## Cautions & requirements -To use the component, you must: -* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). -* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. - -## Detailed description -This component creates a Hive job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). - -Follow these steps to use the component in a pipeline: -1. Install the Kubeflow pipeline's SDK: - - ```python - %%capture --no-stderr - - !pip3 install kfp --upgrade - ``` - -2. Load the component using the Kubeflow pipeline's SDK: - - ```python - import kfp.components as comp - - dataproc_submit_hive_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_hive_job/component.yaml') - help(dataproc_submit_hive_job_op) - ``` - -### Sample - -The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. - - -#### Setup a Dataproc cluster - -[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code. - -#### Prepare a Hive query - -You can put your Hive queries in the `queries` list, or you can use `query_file_uri`. In this sample, we will use a hard coded query in the `queries` list to select data from a public CSV file in Cloud Storage. - -For more details, see the [Hive language manual.](https://cwiki.apache.org/confluence/display/Hive/LanguageManual) - -#### Set sample parameters - -```python -PROJECT_ID = '' -CLUSTER_NAME = '' -REGION = 'us-central1' -QUERY = ''' -DROP TABLE IF EXISTS natality_csv; -CREATE EXTERNAL TABLE natality_csv ( - source_year BIGINT, year BIGINT, month BIGINT, day BIGINT, wday BIGINT, - state STRING, is_male BOOLEAN, child_race BIGINT, weight_pounds FLOAT, - plurality BIGINT, apgar_1min BIGINT, apgar_5min BIGINT, - mother_residence_state STRING, mother_race BIGINT, mother_age BIGINT, - gestation_weeks BIGINT, lmp STRING, mother_married BOOLEAN, - mother_birth_state STRING, cigarette_use BOOLEAN, cigarettes_per_day BIGINT, - alcohol_use BOOLEAN, drinks_per_week BIGINT, weight_gain_pounds BIGINT, - born_alive_alive BIGINT, born_alive_dead BIGINT, born_dead BIGINT, - ever_born BIGINT, father_race BIGINT, father_age BIGINT, - record_weight BIGINT -) -ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' -LOCATION 'gs://public-datasets/natality/csv'; - -SELECT * FROM natality_csv LIMIT 10;''' -EXPERIMENT_NAME = 'Dataproc - Submit Hive Job' -``` - -#### Example pipeline that uses the component - - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='Dataproc submit Hive job pipeline', - description='Dataproc submit Hive job pipeline' -) -def dataproc_submit_hive_job_pipeline( - project_id = PROJECT_ID, - region = REGION, - cluster_name = CLUSTER_NAME, - queries = json.dumps([QUERY]), - query_file_uri = '', - script_variables = '', - hive_job='', - job='', - wait_interval='30' -): - dataproc_submit_hive_job_op( - project_id=project_id, - region=region, - cluster_name=cluster_name, - queries=queries, - query_file_uri=query_file_uri, - script_variables=script_variables, - hive_job=hive_job, - job=job, - wait_interval=wait_interval) - -``` - -#### Compile the pipeline - - -```python -pipeline_func = dataproc_submit_hive_job_pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - - -```python -#Specify values for the pipeline's arguments -arguments = {} - -#Get or create an experiment -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -## References -* [Component Python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_submit_hive_job.py) -* [Component Docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile) -* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/dataproc/submit_hive_job/sample.ipynb) -* [Dataproc HiveJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) - -## License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/dataproc/submit_hive_job/component.yaml b/components/contrib/google-cloud/dataproc/submit_hive_job/component.yaml deleted file mode 100644 index b3cd742f4b3..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_hive_job/component.yaml +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: dataproc_submit_hive_job -description: >- - Submits a Cloud Dataproc job for running Apache Hive queries on YARN. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: project_id - description: >- - Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - type: GCPProjectID - - name: region - description: >- - Required. The Cloud Dataproc region in which to handle the request. - type: GCPRegion - - name: cluster_name - description: 'Required. The cluster to run the job.' - type: String - - name: queries - default: '' - description: >- - Required. The queries to execute. You do not need to - terminate a query with a semicolon. Multiple queries can be specified - in one string by separating each with a semicolon. - type: List - - name: query_file_uri - default: '' - description: >- - The HCFS URI of the script that contains Hive queries. - type: GCSPath - - name: script_variables - default: '' - description: >- - Optional. Mapping of query variable names to - values (equivalent to the Hive command: SET name="value";). - type: Dict - - name: hive_job - default: '' - description: >- - Optional. The full payload of a - [HiveJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob). - type: Dict - - name: job - default: '' - description: >- - Optional. The full payload of a - [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). - type: Dict - - name: wait_interval - default: '30' - description: >- - Optional. The wait seconds between polling the operation. - Defaults to 30. - type: Integer -outputs: - - name: job_id - description: 'The ID of the created job.' - type: String - - name: MLPipeline UI metadata - type: UI metadata -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ['python', '-u', '-m', 'kfp_component.launcher'] - args: [ - --ui_metadata_path, {outputPath: MLPipeline UI metadata}, - kfp_component.google.dataproc, submit_hive_job, - --project_id, {inputValue: project_id}, - --region, {inputValue: region}, - --cluster_name, {inputValue: cluster_name}, - --queries, {inputValue: queries}, - --query_file_uri, {inputValue: query_file_uri}, - --script_variables, {inputValue: script_variables}, - --hive_job, {inputValue: hive_job}, - --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval}, - --job_id_output_path, {outputPath: job_id}, - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/dataproc/submit_hive_job/sample.ipynb b/components/contrib/google-cloud/dataproc/submit_hive_job/sample.ipynb deleted file mode 100644 index 88fb8b923b2..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_hive_job/sample.ipynb +++ /dev/null @@ -1,264 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "Data preparation using Apache Hive on YARN with Cloud Dataproc\n", - "\n", - "# Label\n", - "Cloud Dataproc, GCP, Cloud Storage, YARN, Hive, Apache\n", - "\n", - "# Summary\n", - "A Kubeflow Pipeline component to prepare data by submitting an Apache Hive job on YARN to Cloud Dataproc.\n", - "\n", - "# Details\n", - "## Intended use\n", - "Use the component to run an Apache Hive job as one preprocessing step in a Kubeflow Pipeline.\n", - "\n", - "## Runtime arguments\n", - "| Argument | Description | Optional | Data type | Accepted values | Default |\n", - "|----------|-------------|----------|-----------|-----------------|---------|\n", - "| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectId | | |\n", - "| region | The Cloud Dataproc region to handle the request. | No | GCPRegion | | |\n", - "| cluster_name | The name of the cluster to run the job. | No | String | | |\n", - "| queries | The queries to execute the Hive job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | | None |\n", - "| query_file_uri | The HCFS URI of the script that contains the Hive queries. | Yes | GCSPath | | None |\n", - "| script_variables | Mapping of the query’s variable names to their values (equivalent to the Hive command: SET name=\"value\";). | Yes | Dict | | None |\n", - "| hive_job | The payload of a [HiveJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) | Yes | Dict | | None |\n", - "| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None |\n", - "| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | | 30 |\n", - "\n", - "## Output\n", - "Name | Description | Type\n", - ":--- | :---------- | :---\n", - "job_id | The ID of the created job. | String\n", - "\n", - "## Cautions & requirements\n", - "To use the component, you must:\n", - "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", - "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", - "\n", - "## Detailed description\n", - "This component creates a Hive job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit).\n", - "\n", - "Follow these steps to use the component in a pipeline:\n", - "1. Install the Kubeflow Pipeline SDK:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "dataproc_submit_hive_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_hive_job/component.yaml')\n", - "help(dataproc_submit_hive_job_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Sample\n", - "\n", - "Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template.\n", - "\n", - "\n", - "#### Setup a Dataproc cluster\n", - "\n", - "[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code.\n", - "\n", - "#### Prepare a Hive query\n", - "\n", - "Put your Hive queries in the queries list, or upload your Hive queries into a file saved in a Cloud Storage bucket and then enter the Cloud Storage bucket’s path in `query_file_uri.` In this sample, we will use a hard coded query in the queries list to select data from a public CSV file from Cloud Storage.\n", - "\n", - "For more details, see the [Hive language manual.](https://cwiki.apache.org/confluence/display/Hive/LanguageManual)\n", - "\n", - "\n", - "#### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "PROJECT_ID = ''\n", - "CLUSTER_NAME = ''\n", - "REGION = 'us-central1'\n", - "QUERY = '''\n", - "DROP TABLE IF EXISTS natality_csv;\n", - "CREATE EXTERNAL TABLE natality_csv (\n", - " source_year BIGINT, year BIGINT, month BIGINT, day BIGINT, wday BIGINT,\n", - " state STRING, is_male BOOLEAN, child_race BIGINT, weight_pounds FLOAT,\n", - " plurality BIGINT, apgar_1min BIGINT, apgar_5min BIGINT,\n", - " mother_residence_state STRING, mother_race BIGINT, mother_age BIGINT,\n", - " gestation_weeks BIGINT, lmp STRING, mother_married BOOLEAN,\n", - " mother_birth_state STRING, cigarette_use BOOLEAN, cigarettes_per_day BIGINT,\n", - " alcohol_use BOOLEAN, drinks_per_week BIGINT, weight_gain_pounds BIGINT,\n", - " born_alive_alive BIGINT, born_alive_dead BIGINT, born_dead BIGINT,\n", - " ever_born BIGINT, father_race BIGINT, father_age BIGINT,\n", - " record_weight BIGINT\n", - ")\n", - "ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\n", - "LOCATION 'gs://public-datasets/natality/csv';\n", - "\n", - "SELECT * FROM natality_csv LIMIT 10;'''\n", - "EXPERIMENT_NAME = 'Dataproc - Submit Hive Job'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='Dataproc submit Hive job pipeline',\n", - " description='Dataproc submit Hive job pipeline'\n", - ")\n", - "def dataproc_submit_hive_job_pipeline(\n", - " project_id = PROJECT_ID, \n", - " region = REGION,\n", - " cluster_name = CLUSTER_NAME,\n", - " queries = json.dumps([QUERY]),\n", - " query_file_uri = '',\n", - " script_variables = '', \n", - " hive_job='', \n", - " job='', \n", - " wait_interval='30'\n", - "):\n", - " dataproc_submit_hive_job_op(\n", - " project_id=project_id, \n", - " region=region, \n", - " cluster_name=cluster_name, \n", - " queries=queries, \n", - " query_file_uri=query_file_uri,\n", - " script_variables=script_variables, \n", - " hive_job=hive_job, \n", - " job=job, \n", - " wait_interval=wait_interval)\n", - " " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = dataproc_submit_hive_job_pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "* [Component python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_submit_hive_job.py)\n", - "* [Component docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile)\n", - "* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/dataproc/submit_hive_job/sample.ipynb)\n", - "* [Dataproc HiveJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob)\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/components/contrib/google-cloud/dataproc/submit_pig_job/README.md b/components/contrib/google-cloud/dataproc/submit_pig_job/README.md deleted file mode 100644 index 69273dde92a..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_pig_job/README.md +++ /dev/null @@ -1,185 +0,0 @@ - -# Name -Component: Data preparation using Apache Pig on YARN with Cloud Dataproc - -# Labels -Cloud Dataproc, YARN, Apache Pig, Kubeflow - - -# Summary -A Kubeflow pipeline component to prepare data by submitting an Apache Pig job on YARN to Cloud Dataproc. - -# Facets - -Use case: -Other - -Technique: -Other - -Input data type: -Tabular - -ML workflow: -Data preparation - -# Details -## Intended use -Use this component to run an Apache Pig job as one preprocessing step in a Kubeflow pipeline. - -## Runtime arguments -| Argument | Description | Optional | Data type | Accepted values | Default | -|:----------|:-------------|:----------|:-----------|:-----------------|:---------| -| project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No | GCPProjectID |- | -| -| region | The Cloud Dataproc region that handles the request. | No | GCPRegion | - |- | -| cluster_name | The name of the cluster that runs the job. | No | String | - | - | -| queries | The queries to execute the Pig job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | -| None | -| query_file_uri | The Cloud Storage bucket path pointing to a file that contains the Pig queries. | Yes | GCSPath | - | None | -| script_variables | Mapping of the query’s variable names to their values (equivalent to the Pig command: SET name="value";). | Yes | Dict | -| None | -| pig_job | The payload of a [PigJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PigJob). | Yes | Dict | - | None | -| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None | -| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | - | 30 | - -## Output -Name | Description | Type -:--- | :---------- | :--- -job_id | The ID of the created job. | String - -## Cautions & requirements - -To use the component, you must: -* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). -* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant the Kubeflow user service account the role, `roles/dataproc.editor`, on the project. - -## Detailed description -This component creates a Pig job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). - -Follow these steps to use the component in a pipeline: -1. Install the Kubeflow pipeline's SDK - - - - ```python - %%capture --no-stderr - - !pip3 install kfp --upgrade - ``` - -2. Load the component using the Kubeflow pipeline's SDK - - - ```python - import kfp.components as comp - - dataproc_submit_pig_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_pig_job/component.yaml') - help(dataproc_submit_pig_job_op) - ``` - -### Sample - -The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. - -#### Setup a Dataproc cluster - -[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code. - -#### Prepare a Pig query - -You can put your Pig queries in the `queries` list, or you can use `query_file_uri`. In this sample, we will use a hard-coded query in the `queries` list to select data from a local password file. - -For more details on Apache Pig, see the [Pig documentation.](http://pig.apache.org/docs/latest/) - -#### Set sample parameters - - -```python -PROJECT_ID = '' -CLUSTER_NAME = '' - -REGION = 'us-central1' -QUERY = ''' -natality_csv = load 'gs://public-datasets/natality/csv' using PigStorage(':'); -top_natality_csv = LIMIT natality_csv 10; -dump natality_csv;''' -EXPERIMENT_NAME = 'Dataproc - Submit Pig Job' -``` - -#### Example pipeline that uses the component - - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='Dataproc submit Pig job pipeline', - description='Dataproc submit Pig job pipeline' -) -def dataproc_submit_pig_job_pipeline( - project_id = PROJECT_ID, - region = REGION, - cluster_name = CLUSTER_NAME, - queries = json.dumps([QUERY]), - query_file_uri = '', - script_variables = '', - pig_job='', - job='', - wait_interval='30' -): - dataproc_submit_pig_job_op( - project_id=project_id, - region=region, - cluster_name=cluster_name, - queries=queries, - query_file_uri=query_file_uri, - script_variables=script_variables, - pig_job=pig_job, - job=job, - wait_interval=wait_interval) - -``` - -#### Compile the pipeline - - -```python -pipeline_func = dataproc_submit_pig_job_pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - - -```python -#Specify values for the pipeline's arguments -arguments = {} - -#Get or create an experiment -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -## References -* [Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) -* [Pig documentation](http://pig.apache.org/docs/latest/) -* [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs) -* [PigJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PigJob) - -## License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/dataproc/submit_pig_job/component.yaml b/components/contrib/google-cloud/dataproc/submit_pig_job/component.yaml deleted file mode 100644 index 3638525a11e..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_pig_job/component.yaml +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: dataproc_submit_pig_job -description: >- - Submits a Cloud Dataproc job for running Apache Pig queries on YARN. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: project_id - description: >- - Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - type: GCPProjectID - - name: region - description: >- - Required. The Cloud Dataproc region in which to handle the request. - type: GCPRegion - - name: cluster_name - description: 'Required. The cluster to run the job.' - type: String - - name: queries - default: '' - description: >- - Required. The queries to execute. You do not need to - terminate a query with a semicolon. Multiple queries can be specified - in one string by separating each with a semicolon. - type: List - - name: query_file_uri - default: '' - description: >- - The HCFS URI of the script that contains Pig queries. - type: GCSPath - - name: script_variables - default: '' - description: >- - Optional. Mapping of query variable names to - values (equivalent to the Pig command: SET name="value";). - type: Dict - - name: pig_job - default: '' - description: >- - Optional. The full payload of a - [PigJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PigJob). - type: Dict - - name: job - default: '' - description: >- - Optional. The full payload of a - [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). - type: Dict - - name: wait_interval - default: '30' - description: >- - Optional. The wait seconds between polling the operation. - Defaults to 30. - type: Integer -outputs: - - name: job_id - description: 'The ID of the created job.' - type: String - - name: MLPipeline UI metadata - type: UI metadata -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ['python', '-u', '-m', 'kfp_component.launcher'] - args: [ - --ui_metadata_path, {outputPath: MLPipeline UI metadata}, - kfp_component.google.dataproc, submit_pig_job, - --project_id, {inputValue: project_id}, - --region, {inputValue: region}, - --cluster_name, {inputValue: cluster_name}, - --queries, {inputValue: queries}, - --query_file_uri, {inputValue: query_file_uri}, - --script_variables, {inputValue: script_variables}, - --pig_job, {inputValue: pig_job}, - --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval}, - --job_id_output_path, {outputPath: job_id}, - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/dataproc/submit_pig_job/sample.ipynb b/components/contrib/google-cloud/dataproc/submit_pig_job/sample.ipynb deleted file mode 100644 index fdb34756dd2..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_pig_job/sample.ipynb +++ /dev/null @@ -1,254 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "Data preparation using Apache Pig on YARN with Cloud Dataproc\n", - "\n", - "# Label\n", - "Cloud Dataproc, GCP, Cloud Storage, YARN, Pig, Apache, Kubeflow, pipelines, components\n", - "\n", - "\n", - "# Summary\n", - "A Kubeflow Pipeline component to prepare data by submitting an Apache Pig job on YARN to Cloud Dataproc.\n", - "\n", - "\n", - "# Details\n", - "## Intended use\n", - "Use the component to run an Apache Pig job as one preprocessing step in a Kubeflow Pipeline.\n", - "\n", - "## Runtime arguments\n", - "| Argument | Description | Optional | Data type | Accepted values | Default |\n", - "|----------|-------------|----------|-----------|-----------------|---------|\n", - "| project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No | GCPProjectID | | |\n", - "| region | The Cloud Dataproc region to handle the request. | No | GCPRegion | | |\n", - "| cluster_name | The name of the cluster to run the job. | No | String | | |\n", - "| queries | The queries to execute the Pig job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | | None |\n", - "| query_file_uri | The HCFS URI of the script that contains the Pig queries. | Yes | GCSPath | | None |\n", - "| script_variables | Mapping of the query’s variable names to their values (equivalent to the Pig command: SET name=\"value\";). | Yes | Dict | | None |\n", - "| pig_job | The payload of a [PigJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PigJob). | Yes | Dict | | None |\n", - "| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None |\n", - "| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | | 30 |\n", - "\n", - "## Output\n", - "Name | Description | Type\n", - ":--- | :---------- | :---\n", - "job_id | The ID of the created job. | String\n", - "\n", - "## Cautions & requirements\n", - "\n", - "To use the component, you must:\n", - "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", - "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", - "\n", - "## Detailed description\n", - "This component creates a Pig job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit).\n", - "\n", - "Follow these steps to use the component in a pipeline:\n", - "1. Install the Kubeflow Pipeline SDK:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "dataproc_submit_pig_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_pig_job/component.yaml')\n", - "help(dataproc_submit_pig_job_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Sample\n", - "\n", - "Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template.\n", - "\n", - "\n", - "#### Setup a Dataproc cluster\n", - "\n", - "[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code.\n", - "\n", - "\n", - "#### Prepare a Pig query\n", - "\n", - "Either put your Pig queries in the `queries` list, or upload your Pig queries into a file to a Cloud Storage bucket and then enter the Cloud Storage bucket’s path in `query_file_uri`. In this sample, we will use a hard coded query in the `queries` list to select data from a local `passwd` file.\n", - "\n", - "For more details on Apache Pig, see the [Pig documentation.](http://pig.apache.org/docs/latest/)\n", - "\n", - "#### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "PROJECT_ID = ''\n", - "CLUSTER_NAME = ''\n", - "\n", - "REGION = 'us-central1'\n", - "QUERY = '''\n", - "natality_csv = load 'gs://public-datasets/natality/csv' using PigStorage(':');\n", - "top_natality_csv = LIMIT natality_csv 10; \n", - "dump natality_csv;'''\n", - "EXPERIMENT_NAME = 'Dataproc - Submit Pig Job'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='Dataproc submit Pig job pipeline',\n", - " description='Dataproc submit Pig job pipeline'\n", - ")\n", - "def dataproc_submit_pig_job_pipeline(\n", - " project_id = PROJECT_ID, \n", - " region = REGION,\n", - " cluster_name = CLUSTER_NAME,\n", - " queries = json.dumps([QUERY]),\n", - " query_file_uri = '',\n", - " script_variables = '', \n", - " pig_job='', \n", - " job='', \n", - " wait_interval='30'\n", - "):\n", - " dataproc_submit_pig_job_op(\n", - " project_id=project_id, \n", - " region=region, \n", - " cluster_name=cluster_name, \n", - " queries=queries, \n", - " query_file_uri=query_file_uri,\n", - " script_variables=script_variables, \n", - " pig_job=pig_job, \n", - " job=job, \n", - " wait_interval=wait_interval)\n", - " " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = dataproc_submit_pig_job_pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "* [Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) \n", - "* [Pig documentation](http://pig.apache.org/docs/latest/)\n", - "* [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs)\n", - "* [PigJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PigJob)\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} \ No newline at end of file diff --git a/components/contrib/google-cloud/dataproc/submit_pyspark_job/README.md b/components/contrib/google-cloud/dataproc/submit_pyspark_job/README.md deleted file mode 100644 index 8067d353bcb..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_pyspark_job/README.md +++ /dev/null @@ -1,178 +0,0 @@ - -# Name -Component: Data preparation using PySpark on Cloud Dataproc - - -# Labels -Cloud Dataproc, PySpark, Kubeflow - - -# Summary -A Kubeflow Pipeline component to prepare data by submitting a PySpark job to Cloud Dataproc. - -# Facets - -Use case: - -Technique: - -Input data type: - -ML workflow: - -# Details -## Intended use -Use this component to run an Apache PySpark job as one preprocessing step in a Kubeflow pipeline. - - -## Runtime arguments -| Argument | Description | Optional | Data type | Accepted values | Default | -|:----------------------|:------------|:----------|:--------------|:-----------------|:---------| -| project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No | GCPProjectID | - | - | -| region | The Cloud Dataproc region to handle the request. | No | GCPRegion | - | - | -| cluster_name | The name of the cluster to run the job. | No | String | - | - | -| main_python_file_uri | The HCFS URI of the Python file to use as the driver. This must be a .py file. | No | GCSPath | - | - | -| args | The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. | Yes | List | - | None | -| pyspark_job | The payload of a [PySparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PySparkJob). | Yes | Dict | - | None | -| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | - | None | - -## Output -Name | Description | Type -:--- | :---------- | :--- -job_id | The ID of the created job. | String - -## Cautions & requirements - -To use the component, you must: -* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). -* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. - -## Detailed description - -This component creates a PySpark job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). - -Follow these steps to use the component in a pipeline: - -1. Install the Kubeflow pipeline's SDK: - - - ```python - %%capture --no-stderr - - !pip3 install kfp --upgrade - ``` - -2. Load the Kubeflow pipeline's SDK: - - - ```python - import kfp.components as comp - - dataproc_submit_pyspark_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_pyspark_job/component.yaml') - help(dataproc_submit_pyspark_job_op) - ``` - -### Sample - -The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. - - -#### Setup a Dataproc cluster - -[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code. - - -#### Prepare a PySpark job - -Upload your PySpark code file to a Cloud Storage bucket. For example, this is a publicly accessible `hello-world.py` in Cloud Storage: - -```python -!gsutil cat gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py -``` - -#### Set sample parameters - -```python -PROJECT_ID = '' -CLUSTER_NAME = '' -REGION = 'us-central1' -PYSPARK_FILE_URI = 'gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py' -ARGS = '' -EXPERIMENT_NAME = 'Dataproc - Submit PySpark Job' -``` - -#### Example pipeline that uses the component - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='Dataproc submit PySpark job pipeline', - description='Dataproc submit PySpark job pipeline' -) -def dataproc_submit_pyspark_job_pipeline( - project_id = PROJECT_ID, - region = REGION, - cluster_name = CLUSTER_NAME, - main_python_file_uri = PYSPARK_FILE_URI, - args = ARGS, - pyspark_job='{}', - job='{}', - wait_interval='30' -): - dataproc_submit_pyspark_job_op( - project_id=project_id, - region=region, - cluster_name=cluster_name, - main_python_file_uri=main_python_file_uri, - args=args, - pyspark_job=pyspark_job, - job=job, - wait_interval=wait_interval) - -``` - -#### Compile the pipeline - - -```python -pipeline_func = dataproc_submit_pyspark_job_pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - -```python -#Specify values for the pipeline's arguments -arguments = {} - -#Get or create an experiment -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -## References - -* [Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) -* [PySparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PySparkJob) -* [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs) - -## License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/dataproc/submit_pyspark_job/component.yaml b/components/contrib/google-cloud/dataproc/submit_pyspark_job/component.yaml deleted file mode 100644 index 482ceb5988e..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_pyspark_job/component.yaml +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: dataproc_submit_pyspark_job -description: >- - Submits a Cloud Dataproc job for running Apache PySpark applications on YARN. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: project_id - description: >- - Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - type: GCPProjectID - - name: region - description: >- - Required. The Cloud Dataproc region in which to handle the request. - type: GCPRegion - - name: cluster_name - description: 'Required. The cluster to run the job.' - type: String - - name: main_python_file_uri - description: >- - Required. The HCFS URI of the main Python file to - use as the driver. Must be a .py file. - type: GCSPath - - name: args - default: '' - description: >- - Optional. The arguments to pass to the driver. Do not include - arguments, such as --conf, that can be set as job properties, since a - collision may occur that causes an incorrect job submission. - type: List - - name: pyspark_job - default: '' - description: >- - Optional. The full payload of a - [PySparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PySparkJob). - type: Dict - - name: job - default: '' - description: >- - Optional. The full payload of a - [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). - type: Dict - - name: wait_interval - default: '30' - description: >- - Optional. The wait seconds between polling the operation. - Defaults to 30. - type: Integer -outputs: - - name: job_id - description: 'The ID of the created job.' - type: String - - name: MLPipeline UI metadata - type: UI metadata -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ['python', '-u', '-m', 'kfp_component.launcher'] - args: [ - --ui_metadata_path, {outputPath: MLPipeline UI metadata}, - kfp_component.google.dataproc, submit_pyspark_job, - --project_id, {inputValue: project_id}, - --region, {inputValue: region}, - --cluster_name, {inputValue: cluster_name}, - --main_python_file_uri, {inputValue: main_python_file_uri}, - --args, {inputValue: args}, - --pyspark_job, {inputValue: pyspark_job}, - --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval}, - --job_id_output_path, {outputPath: job_id}, - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/dataproc/submit_pyspark_job/sample.ipynb b/components/contrib/google-cloud/dataproc/submit_pyspark_job/sample.ipynb deleted file mode 100644 index 04718c3fd66..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_pyspark_job/sample.ipynb +++ /dev/null @@ -1,263 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "Data preparation using PySpark on Cloud Dataproc\n", - "\n", - "\n", - "# Label\n", - "Cloud Dataproc, GCP, Cloud Storage,PySpark, Kubeflow, pipelines, components\n", - "\n", - "\n", - "# Summary\n", - "A Kubeflow Pipeline component to prepare data by submitting a PySpark job to Cloud Dataproc.\n", - "\n", - "\n", - "# Details\n", - "## Intended use\n", - "Use the component to run an Apache PySpark job as one preprocessing step in a Kubeflow Pipeline.\n", - "\n", - "\n", - "## Runtime arguments\n", - "| Argument | Description | Optional | Data type | Accepted values | Default |\n", - "|----------------------|------------|----------|--------------|-----------------|---------|\n", - "| project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No | GCPProjectID | | |\n", - "| region | The Cloud Dataproc region to handle the request. | No | GCPRegion | | |\n", - "| cluster_name | The name of the cluster to run the job. | No | String | | |\n", - "| main_python_file_uri | The HCFS URI of the Python file to use as the driver. This must be a .py file. | No | GCSPath | | |\n", - "| args | The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. | Yes | List | | None |\n", - "| pyspark_job | The payload of a [PySparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PySparkJob). | Yes | Dict | | None |\n", - "| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None |\n", - "\n", - "## Output\n", - "Name | Description | Type\n", - ":--- | :---------- | :---\n", - "job_id | The ID of the created job. | String\n", - "\n", - "## Cautions & requirements\n", - "\n", - "To use the component, you must:\n", - "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", - "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", - "\n", - "## Detailed description\n", - "\n", - "This component creates a PySpark job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit).\n", - "\n", - "Follow these steps to use the component in a pipeline:\n", - "\n", - "1. Install the Kubeflow Pipeline SDK:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "dataproc_submit_pyspark_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n", - "help(dataproc_submit_pyspark_job_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Sample\n", - "\n", - "Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template.\n", - "\n", - "\n", - "#### Setup a Dataproc cluster\n", - "\n", - "[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code.\n", - "\n", - "\n", - "#### Prepare a PySpark job\n", - "\n", - "Upload your PySpark code file to a Cloud Storage bucket. For example, this is a publicly accessible `hello-world.py` in Cloud Storage:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!gsutil cat gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "PROJECT_ID = ''\n", - "CLUSTER_NAME = ''\n", - "REGION = 'us-central1'\n", - "PYSPARK_FILE_URI = 'gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py'\n", - "ARGS = ''\n", - "EXPERIMENT_NAME = 'Dataproc - Submit PySpark Job'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='Dataproc submit PySpark job pipeline',\n", - " description='Dataproc submit PySpark job pipeline'\n", - ")\n", - "def dataproc_submit_pyspark_job_pipeline(\n", - " project_id = PROJECT_ID, \n", - " region = REGION,\n", - " cluster_name = CLUSTER_NAME,\n", - " main_python_file_uri = PYSPARK_FILE_URI, \n", - " args = ARGS, \n", - " pyspark_job='{}', \n", - " job='{}', \n", - " wait_interval='30'\n", - "):\n", - " dataproc_submit_pyspark_job_op(\n", - " project_id=project_id, \n", - " region=region, \n", - " cluster_name=cluster_name, \n", - " main_python_file_uri=main_python_file_uri, \n", - " args=args, \n", - " pyspark_job=pyspark_job, \n", - " job=job, \n", - " wait_interval=wait_interval)\n", - " " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = dataproc_submit_pyspark_job_pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "\n", - "* [Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) \n", - "* [PySparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PySparkJob)\n", - "* [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs)\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} \ No newline at end of file diff --git a/components/contrib/google-cloud/dataproc/submit_spark_job/README.md b/components/contrib/google-cloud/dataproc/submit_spark_job/README.md deleted file mode 100644 index 3f273db7cfa..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_spark_job/README.md +++ /dev/null @@ -1,200 +0,0 @@ - -# Name - -Component: Data preparation using Spark on YARN with Cloud Dataproc - - -# Labels - - Spark, Kubeflow,YARN - - # Facets - -Use case: -Other - -Technique: -Other - -Input data type: -Tabular - -ML workflow: -Data preparation - - - -# Summary - -A Kubeflow pipeline component to prepare data by submitting a Spark job on YARN to Cloud Dataproc. - -# Details - -## Intended use - -Use the component to run an Apache Spark job as one preprocessing step in a Kubeflow pipeline. - -## Runtime arguments -Argument | Description | Optional | Data type | Accepted values | Default | -:--- | :---------- | :--- | :------- | :------| :------| -project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to.|No | GCPProjectID | | | -region | The Cloud Dataproc region to handle the request. | No | GCPRegion | | | -cluster_name | The name of the cluster to run the job. | No | String | | | -main_jar_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the JAR file that contains the main class. | No | GCSPath | | | -main_class | The name of the driver's main class. The JAR file that contains the class must be either in the default CLASSPATH or specified in `spark_job.jarFileUris`.| No | | | | -args | The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.| Yes | | | | -spark_job | The payload of a [SparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkJob).| Yes | | | | -job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | | | | -wait_interval | The number of seconds to wait between polling the operation. | Yes | | | 30 | - -## Output -Name | Description | Type -:--- | :---------- | :--- -job_id | The ID of the created job. | String - -## Cautions & requirements - -To use the component, you must: - -* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). -* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. - - -## Detailed description - -This component creates a Spark job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). - -Follow these steps to use the component in a pipeline: - - - -1. Install the Kubeflow Pipeline's SDK: - - - ```python - %%capture --no-stderr - - !pip3 install kfp --upgrade - ``` - -2. Load the component using the Kubeflow Pipeline's SDK - - - ```python - import kfp.components as comp - - dataproc_submit_spark_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_spark_job/component.yaml') - help(dataproc_submit_spark_job_op) - ``` - -### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. - - -#### Set up a Dataproc cluster -[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code. - - -#### Prepare a Spark job -Upload your Spark JAR file to a Cloud Storage bucket. In the sample, we use a JAR file that is preinstalled in the main cluster: `file:///usr/lib/spark/examples/jars/spark-examples.jar`. - -Here is the [source code of the sample](https://github.com/apache/spark/blob/master/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java). - -To package a self-contained Spark application, follow these [instructions](https://spark.apache.org/docs/latest/quick-start.html#self-contained-applications). - - -#### Set sample parameters - - -```python -PROJECT_ID = '' -CLUSTER_NAME = '' -REGION = 'us-central1' -SPARK_FILE_URI = 'file:///usr/lib/spark/examples/jars/spark-examples.jar' -MAIN_CLASS = 'org.apache.spark.examples.SparkPi' -ARGS = ['1000'] -EXPERIMENT_NAME = 'Dataproc - Submit Spark Job' -``` - -#### Example pipeline that uses the component - - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='Dataproc submit Spark job pipeline', - description='Dataproc submit Spark job pipeline' -) -def dataproc_submit_spark_job_pipeline( - project_id = PROJECT_ID, - region = REGION, - cluster_name = CLUSTER_NAME, - main_jar_file_uri = '', - main_class = MAIN_CLASS, - args = json.dumps(ARGS), - spark_job=json.dumps({ 'jarFileUris': [ SPARK_FILE_URI ] }), - job='{}', - wait_interval='30' -): - dataproc_submit_spark_job_op( - project_id=project_id, - region=region, - cluster_name=cluster_name, - main_jar_file_uri=main_jar_file_uri, - main_class=main_class, - args=args, - spark_job=spark_job, - job=job, - wait_interval=wait_interval) - -``` - -#### Compile the pipeline - - -```python -#Compile the pipeline -pipeline_func = dataproc_submit_spark_job_pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - - -```python -#Specify values for the pipeline's arguments -arguments = {} - -#Get or create an experiment -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -## References - -* [Component Python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_submit_spark_job.py) -* [Component Docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile) -* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/dataproc/submit_spark_job/sample.ipynb) -* [Dataproc SparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkJob) - -## License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/dataproc/submit_spark_job/component.yaml b/components/contrib/google-cloud/dataproc/submit_spark_job/component.yaml deleted file mode 100644 index fd35589c256..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_spark_job/component.yaml +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: dataproc_submit_spark_job -description: >- - Submits a Cloud Dataproc job for running Apache Spark applications on YARN. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: project_id - description: >- - Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - type: GCPProjectID - - name: region - description: >- - Required. The Cloud Dataproc region in which to handle the request. - type: GCPRegion - - name: cluster_name - description: 'Required. The cluster to run the job.' - type: String - - name: main_jar_file_uri - default: '' - description: >- - The HCFS URI of the jar file that contains the main class. - type: GCSPath - - name: main_class - default: '' - description: >- - The name of the driver's main class. The jar file that - contains the class must be in the default CLASSPATH or specified in - jarFileUris. - type: String - - name: args - default: '' - description: >- - Optional. The arguments to pass to the driver. Do not include - arguments, such as --conf, that can be set as job properties, since a - collision may occur that causes an incorrect job submission. - type: List - - name: spark_job - default: '' - description: >- - Optional. The full payload of a - [SparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkJob). - type: Dict - - name: job - default: '' - description: >- - Optional. The full payload of a - [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). - type: Dict - - name: wait_interval - default: '30' - description: >- - Optional. The wait seconds between polling the operation. - Defaults to 30. - type: Integer -outputs: - - name: job_id - description: 'The ID of the created job.' - type: String - - name: MLPipeline UI metadata - type: UI metadata -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ['python', '-u', '-m', 'kfp_component.launcher'] - args: [ - --ui_metadata_path, {outputPath: MLPipeline UI metadata}, - kfp_component.google.dataproc, submit_spark_job, - --project_id, {inputValue: project_id}, - --region, {inputValue: region}, - --cluster_name, {inputValue: cluster_name}, - --main_jar_file_uri, {inputValue: main_jar_file_uri}, - --main_class, {inputValue: main_class}, - --args, {inputValue: args}, - --spark_job, {inputValue: spark_job}, - --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval}, - --job_id_output_path, {outputPath: job_id}, - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/dataproc/submit_spark_job/sample.ipynb b/components/contrib/google-cloud/dataproc/submit_spark_job/sample.ipynb deleted file mode 100644 index 3fbaa24f232..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_spark_job/sample.ipynb +++ /dev/null @@ -1,266 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "\n", - "Data preparation using Spark on YARN with Cloud Dataproc\n", - "\n", - "\n", - "# Label\n", - "\n", - "Cloud Dataproc, GCP, Cloud Storage, Spark, Kubeflow, pipelines, components, YARN\n", - "\n", - "\n", - "# Summary\n", - "\n", - "A Kubeflow Pipeline component to prepare data by submitting a Spark job on YARN to Cloud Dataproc.\n", - "\n", - "# Details\n", - "\n", - "## Intended use\n", - "\n", - "Use the component to run an Apache Spark job as one preprocessing step in a Kubeflow Pipeline.\n", - "\n", - "## Runtime arguments\n", - "Argument | Description | Optional | Data type | Accepted values | Default |\n", - ":--- | :---------- | :--- | :------- | :------| :------| \n", - "project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to.|No | GCPProjectID | | |\n", - "region | The Cloud Dataproc region to handle the request. | No | GCPRegion | | | \n", - "cluster_name | The name of the cluster to run the job. | No | String | | |\n", - "main_jar_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the JAR file that contains the main class. | No | GCSPath | | |\n", - "main_class | The name of the driver's main class. The JAR file that contains the class must be either in the default CLASSPATH or specified in `spark_job.jarFileUris`.| No | | | | \n", - "args | The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.| Yes | | | |\n", - "spark_job | The payload of a [SparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkJob).| Yes | | | |\n", - "job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | | | |\n", - "wait_interval | The number of seconds to wait between polling the operation. | Yes | | | 30 |\n", - "\n", - "## Output\n", - "Name | Description | Type\n", - ":--- | :---------- | :---\n", - "job_id | The ID of the created job. | String\n", - "\n", - "## Cautions & requirements\n", - "\n", - "To use the component, you must:\n", - "\n", - "\n", - "\n", - "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", - "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", - "\n", - "\n", - "## Detailed description\n", - "\n", - "This component creates a Spark job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit).\n", - "\n", - "Follow these steps to use the component in a pipeline:\n", - "\n", - "\n", - "\n", - "1. Install the Kubeflow Pipeline SDK:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "dataproc_submit_spark_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_spark_job/component.yaml')\n", - "help(dataproc_submit_spark_job_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Sample\n", - "Note: The following sample code works in an IPython notebook or directly in Python code.\n", - "\n", - "\n", - "#### Set up a Dataproc cluster\n", - "[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code.\n", - "\n", - "\n", - "#### Prepare a Spark job\n", - "Upload your Spark JAR file to a Cloud Storage bucket. In the sample, we use a JAR file that is preinstalled in the main cluster: `file:///usr/lib/spark/examples/jars/spark-examples.jar`.\n", - "\n", - "Here is the [source code of the sample](https://github.com/apache/spark/blob/master/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java).\n", - "\n", - "To package a self-contained Spark application, follow these [instructions](https://spark.apache.org/docs/latest/quick-start.html#self-contained-applications).\n", - "\n", - "\n", - "#### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "PROJECT_ID = ''\n", - "CLUSTER_NAME = ''\n", - "REGION = 'us-central1'\n", - "SPARK_FILE_URI = 'file:///usr/lib/spark/examples/jars/spark-examples.jar'\n", - "MAIN_CLASS = 'org.apache.spark.examples.SparkPi'\n", - "ARGS = ['1000']\n", - "EXPERIMENT_NAME = 'Dataproc - Submit Spark Job'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='Dataproc submit Spark job pipeline',\n", - " description='Dataproc submit Spark job pipeline'\n", - ")\n", - "def dataproc_submit_spark_job_pipeline(\n", - " project_id = PROJECT_ID, \n", - " region = REGION,\n", - " cluster_name = CLUSTER_NAME,\n", - " main_jar_file_uri = '',\n", - " main_class = MAIN_CLASS,\n", - " args = json.dumps(ARGS), \n", - " spark_job=json.dumps({ 'jarFileUris': [ SPARK_FILE_URI ] }), \n", - " job='{}', \n", - " wait_interval='30'\n", - "):\n", - " dataproc_submit_spark_job_op(\n", - " project_id=project_id, \n", - " region=region, \n", - " cluster_name=cluster_name, \n", - " main_jar_file_uri=main_jar_file_uri, \n", - " main_class=main_class,\n", - " args=args, \n", - " spark_job=spark_job, \n", - " job=job, \n", - " wait_interval=wait_interval)\n", - " " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = dataproc_submit_spark_job_pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "\n", - "* [Component Python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_submit_spark_job.py)\n", - "* [Component Docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile)\n", - "* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/dataproc/submit_spark_job/sample.ipynb)\n", - "* [Dataproc SparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkJob)\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/components/contrib/google-cloud/dataproc/submit_sparksql_job/README.md b/components/contrib/google-cloud/dataproc/submit_sparksql_job/README.md deleted file mode 100644 index a6b747be131..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_sparksql_job/README.md +++ /dev/null @@ -1,184 +0,0 @@ - -# Name -Component: Data preparation using SparkSQL on YARN with Cloud Dataproc - -# Label -Cloud Dataproc, YARN, SparkSQL, Kubeflow - -# Summary -A Kubeflow pipeline component to prepare data by submitting a SparkSql job on YARN to Cloud Dataproc. - -# Facets - -Use case: - -Technique: - -Input data type: - -ML workflow: - -# Details - -## Intended use -Use the component to run an Apache SparkSql job as one preprocessing step in a Kubeflow pipeline. - -## Runtime arguments -Argument| Description | Optional | Data type| Accepted values| Default | -:--- | :---------- | :--- | :------- | :------ | :------ -project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No| GCPProjectID | - | -| -region | The Cloud Dataproc region to handle the request. | No | GCPRegion|-|- -cluster_name | The name of the cluster to run the job. | No | String| -| -| -queries | The queries to execute the SparkSQL job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | - | None | -query_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the script that contains the SparkSQL queries. The SparkSQL queries are listed in a CSV file that is stored in a Cloud Storage bucket.| Yes | GCSPath | - | None | -script_variables | Mapping of the query’s variable names to their values (equivalent to the SparkSQL command: SET name="value";).| Yes| Dict |- | None | -sparksql_job | The payload of a [SparkSql job](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob). | Yes | Dict | - | None | -job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | - | None | -wait_interval | The number of seconds to pause between polling the operation. | Yes |Integer | - | 30 | - -## Output -Name | Description | Type -:--- | :---------- | :--- -job_id | The ID of the created job. | String - -## Cautions & requirements -To use the component, you must: -* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). -* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant the Kubeflow user service account the role, `roles/dataproc.editor`, on the project. - -## Detailed Description -This component creates a SparkSql job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). - -Follow these steps to use the component in a pipeline: -1. Install the Kubeflow pipeline's SDK: - - ```python - %%capture --no-stderr - - !pip3 install kfp --upgrade - ``` - -2. Load the component using the Kubeflow pipeline's SDK: - - ```python - import kfp.components as comp - - dataproc_submit_sparksql_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_sparksql_job/component.yaml') - help(dataproc_submit_sparksql_job_op) - ``` - -### Sample - -The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. - -#### Setup a Dataproc cluster -[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code. - -#### Prepare a SparkSQL job -You can put your SparkSQL queries in the `queries` list, or you can use `query_file_uri`. In this sample, we will use a hard coded query in the `queries` list to select data from a public CSV file in Cloud Storage. - -For more details about Spark SQL, see [Spark SQL, DataFrames and Datasets Guide](https://spark.apache.org/docs/latest/sql-programming-guide.html). - -#### Set sample parameters - -```python -PROJECT_ID = '' -CLUSTER_NAME = '' -REGION = 'us-central1' -QUERY = ''' -DROP TABLE IF EXISTS natality_csv; -CREATE EXTERNAL TABLE natality_csv ( - source_year BIGINT, year BIGINT, month BIGINT, day BIGINT, wday BIGINT, - state STRING, is_male BOOLEAN, child_race BIGINT, weight_pounds FLOAT, - plurality BIGINT, apgar_1min BIGINT, apgar_5min BIGINT, - mother_residence_state STRING, mother_race BIGINT, mother_age BIGINT, - gestation_weeks BIGINT, lmp STRING, mother_married BOOLEAN, - mother_birth_state STRING, cigarette_use BOOLEAN, cigarettes_per_day BIGINT, - alcohol_use BOOLEAN, drinks_per_week BIGINT, weight_gain_pounds BIGINT, - born_alive_alive BIGINT, born_alive_dead BIGINT, born_dead BIGINT, - ever_born BIGINT, father_race BIGINT, father_age BIGINT, - record_weight BIGINT -) -ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' -LOCATION 'gs://public-datasets/natality/csv'; - -SELECT * FROM natality_csv LIMIT 10;''' -EXPERIMENT_NAME = 'Dataproc - Submit SparkSQL Job' -``` - -#### Example pipeline that uses the component - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='Dataproc submit SparkSQL job pipeline', - description='Dataproc submit SparkSQL job pipeline' -) -def dataproc_submit_sparksql_job_pipeline( - project_id = PROJECT_ID, - region = REGION, - cluster_name = CLUSTER_NAME, - queries = json.dumps([QUERY]), - query_file_uri = '', - script_variables = '', - sparksql_job='', - job='', - wait_interval='30' -): - dataproc_submit_sparksql_job_op( - project_id=project_id, - region=region, - cluster_name=cluster_name, - queries=queries, - query_file_uri=query_file_uri, - script_variables=script_variables, - sparksql_job=sparksql_job, - job=job, - wait_interval=wait_interval) - -``` - -#### Compile the pipeline - -```python -pipeline_func = dataproc_submit_sparksql_job_pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - -```python -#Specify values for the pipeline's arguments -arguments = {} - -#Get or create an experiment -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -## References -* [Spark SQL, DataFrames and Datasets Guide](https://spark.apache.org/docs/latest/sql-programming-guide.html) -* [SparkSqlJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob) -* [Cloud Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs) - - -## License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/dataproc/submit_sparksql_job/component.yaml b/components/contrib/google-cloud/dataproc/submit_sparksql_job/component.yaml deleted file mode 100644 index a1002dcc94d..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_sparksql_job/component.yaml +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: dataproc_submit_sparksql_job -description: >- - Submits a Cloud Dataproc job for running Apache Spark SQL queries. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: project_id - description: >- - Required. The ID of the Google Cloud Platform project that the cluster - belongs to. - type: GCPProjectID - - name: region - description: >- - Required. The Cloud Dataproc region in which to handle the request. - type: GCPRegion - - name: cluster_name - description: 'Required. The cluster to run the job.' - type: String - - name: queries - default: '' - description: >- - Required. The queries to execute. You do not need to - terminate a query with a semicolon. Multiple queries can be specified - in one string by separating each with a semicolon. - type: List - - name: query_file_uri - default: '' - description: >- - The HCFS URI of the script that contains SQL queries. - type: GCSPath - - name: script_variables - default: '' - description: >- - Optional. Mapping of query variable names to - values (equivalent to the Spark SQL command: SET name="value";). - type: Dict - - name: sparksql_job - default: '' - description: >- - Optional. The full payload of a - [SparkSqlJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob). - type: Dict - - name: job - default: '' - description: >- - Optional. The full payload of a - [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). - type: Dict - - name: wait_interval - default: '30' - description: >- - Optional. The wait seconds between polling the operation. - Defaults to 30. - type: Integer -outputs: - - name: job_id - description: 'The ID of the created job.' - type: String - - name: MLPipeline UI metadata - type: UI metadata -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ['python', '-u', '-m', 'kfp_component.launcher'] - args: [ - --ui_metadata_path, {outputPath: MLPipeline UI metadata}, - kfp_component.google.dataproc, submit_sparksql_job, - --project_id, {inputValue: project_id}, - --region, {inputValue: region}, - --cluster_name, {inputValue: cluster_name}, - --queries, {inputValue: queries}, - --query_file_uri, {inputValue: query_file_uri}, - --script_variables, {inputValue: script_variables}, - --sparksql_job, {inputValue: sparksql_job}, - --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval}, - --job_id_output_path, {outputPath: job_id}, - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/dataproc/submit_sparksql_job/sample.ipynb b/components/contrib/google-cloud/dataproc/submit_sparksql_job/sample.ipynb deleted file mode 100644 index 43fe5a69a9c..00000000000 --- a/components/contrib/google-cloud/dataproc/submit_sparksql_job/sample.ipynb +++ /dev/null @@ -1,261 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "Data preparation using SparkSQL on YARN with Cloud Dataproc\n", - "\n", - "# Label\n", - "Cloud Dataproc, GCP, Cloud Storage, YARN, SparkSQL, Kubeflow, pipelines, components \n", - "\n", - "# Summary\n", - "A Kubeflow Pipeline component to prepare data by submitting a SparkSql job on YARN to Cloud Dataproc.\n", - "\n", - "# Details\n", - "\n", - "## Intended use\n", - "Use the component to run an Apache SparkSql job as one preprocessing step in a Kubeflow Pipeline.\n", - "\n", - "## Runtime arguments\n", - "Argument| Description | Optional | Data type| Accepted values| Default |\n", - ":--- | :---------- | :--- | :------- | :------ | :------\n", - "project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No| GCPProjectID | | |\n", - "region | The Cloud Dataproc region to handle the request. | No | GCPRegion|\n", - "cluster_name | The name of the cluster to run the job. | No | String| | |\n", - "queries | The queries to execute the SparkSQL job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | | None | \n", - "query_file_uri | The HCFS URI of the script that contains the SparkSQL queries.| Yes | GCSPath | | None |\n", - "script_variables | Mapping of the query’s variable names to their values (equivalent to the SparkSQL command: SET name=\"value\";).| Yes| Dict | | None |\n", - "sparksql_job | The payload of a [SparkSqlJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob). | Yes | Dict | | None |\n", - "job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None |\n", - "wait_interval | The number of seconds to pause between polling the operation. | Yes |Integer | | 30 |\n", - "\n", - "## Output\n", - "Name | Description | Type\n", - ":--- | :---------- | :---\n", - "job_id | The ID of the created job. | String\n", - "\n", - "## Cautions & requirements\n", - "To use the component, you must:\n", - "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", - "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", - "\n", - "## Detailed Description\n", - "This component creates a Pig job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit).\n", - "\n", - "Follow these steps to use the component in a pipeline:\n", - "1. Install the Kubeflow Pipeline SDK:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "dataproc_submit_sparksql_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n", - "help(dataproc_submit_sparksql_job_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Sample\n", - "\n", - "Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template.\n", - "\n", - "#### Setup a Dataproc cluster\n", - "[Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code.\n", - "\n", - "#### Prepare a SparkSQL job\n", - "Either put your SparkSQL queries in the `queires` list, or upload your SparkSQL queries into a file to a Cloud Storage bucket and then enter the Cloud Storage bucket’s path in `query_file_uri`. In this sample, we will use a hard coded query in the `queries` list to select data from a public CSV file from Cloud Storage.\n", - "\n", - "For more details about Spark SQL, see [Spark SQL, DataFrames and Datasets Guide](https://spark.apache.org/docs/latest/sql-programming-guide.html)\n", - "\n", - "#### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "PROJECT_ID = ''\n", - "CLUSTER_NAME = ''\n", - "REGION = 'us-central1'\n", - "QUERY = '''\n", - "DROP TABLE IF EXISTS natality_csv;\n", - "CREATE EXTERNAL TABLE natality_csv (\n", - " source_year BIGINT, year BIGINT, month BIGINT, day BIGINT, wday BIGINT,\n", - " state STRING, is_male BOOLEAN, child_race BIGINT, weight_pounds FLOAT,\n", - " plurality BIGINT, apgar_1min BIGINT, apgar_5min BIGINT,\n", - " mother_residence_state STRING, mother_race BIGINT, mother_age BIGINT,\n", - " gestation_weeks BIGINT, lmp STRING, mother_married BOOLEAN,\n", - " mother_birth_state STRING, cigarette_use BOOLEAN, cigarettes_per_day BIGINT,\n", - " alcohol_use BOOLEAN, drinks_per_week BIGINT, weight_gain_pounds BIGINT,\n", - " born_alive_alive BIGINT, born_alive_dead BIGINT, born_dead BIGINT,\n", - " ever_born BIGINT, father_race BIGINT, father_age BIGINT,\n", - " record_weight BIGINT\n", - ")\n", - "ROW FORMAT DELIMITED FIELDS TERMINATED BY ','\n", - "LOCATION 'gs://public-datasets/natality/csv';\n", - "\n", - "SELECT * FROM natality_csv LIMIT 10;'''\n", - "EXPERIMENT_NAME = 'Dataproc - Submit SparkSQL Job'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='Dataproc submit SparkSQL job pipeline',\n", - " description='Dataproc submit SparkSQL job pipeline'\n", - ")\n", - "def dataproc_submit_sparksql_job_pipeline(\n", - " project_id = PROJECT_ID, \n", - " region = REGION,\n", - " cluster_name = CLUSTER_NAME,\n", - " queries = json.dumps([QUERY]),\n", - " query_file_uri = '',\n", - " script_variables = '', \n", - " sparksql_job='', \n", - " job='', \n", - " wait_interval='30'\n", - "):\n", - " dataproc_submit_sparksql_job_op(\n", - " project_id=project_id, \n", - " region=region, \n", - " cluster_name=cluster_name, \n", - " queries=queries, \n", - " query_file_uri=query_file_uri,\n", - " script_variables=script_variables, \n", - " sparksql_job=sparksql_job, \n", - " job=job, \n", - " wait_interval=wait_interval)\n", - " " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = dataproc_submit_sparksql_job_pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "* [Spark SQL, DataFrames and Datasets Guide](https://spark.apache.org/docs/latest/sql-programming-guide.html)\n", - "* [SparkSqlJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob)\n", - "* [Cloud Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs)\n", - "\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} \ No newline at end of file diff --git a/components/contrib/google-cloud/ml_engine/batch_predict/README.md b/components/contrib/google-cloud/ml_engine/batch_predict/README.md deleted file mode 100644 index 0369c6a32fe..00000000000 --- a/components/contrib/google-cloud/ml_engine/batch_predict/README.md +++ /dev/null @@ -1,200 +0,0 @@ - -# Name - -Batch prediction using Cloud Machine Learning Engine - - -# Label - -Cloud Storage, Cloud ML Engine, Kubeflow, Pipeline, Component - - -# Summary - -A Kubeflow Pipeline component to submit a batch prediction job against a deployed model on Cloud ML Engine. - - -# Details - - -## Intended use - -Use the component to run a batch prediction job against a deployed model on Cloud ML Engine. The prediction output is stored in a Cloud Storage bucket. - - -## Runtime arguments - -| Argument | Description | Optional | Data type | Accepted values | Default | -|--------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------|--------------|-----------------|---------| -| project_id | The ID of the Google Cloud Platform (GCP) project of the job. | No | GCPProjectID | | | -| model_path | The path to the model. It can be one of the following:
  • projects/[PROJECT_ID]/models/[MODEL_ID]
  • projects/[PROJECT_ID]/models/[MODEL_ID]/versions/[VERSION_ID]
  • The path to a Cloud Storage location containing a model file.
| No | GCSPath | | | -| input_paths | The path to the Cloud Storage location containing the input data files. It can contain wildcards, for example, `gs://foo/*.csv` | No | List | GCSPath | | -| input_data_format | The format of the input data files. See [REST Resource: projects.jobs](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#DataFormat) for more details. | No | String | DataFormat | | -| output_path | The path to the Cloud Storage location for the output data. | No | GCSPath | | | -| region | The Compute Engine region where the prediction job is run. | No | GCPRegion | | | -| output_data_format | The format of the output data files. See [REST Resource: projects.jobs](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#DataFormat) for more details. | Yes | String | DataFormat | JSON | -| prediction_input | The JSON input parameters to create a prediction job. See [PredictionInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#PredictionInput) for more information. | Yes | Dict | | None | -| job_id_prefix | The prefix of the generated job id. | Yes | String | | None | -| wait_interval | The number of seconds to wait in case the operation has a long run time. | Yes | | | 30 | - - -## Input data schema - -The component accepts the following as input: - -* A trained model: It can be a model file in Cloud Storage, a deployed model, or a version in Cloud ML Engine. Specify the path to the model in the `model_path `runtime argument. -* Input data: The data used to make predictions against the trained model. The data can be in [multiple formats](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#DataFormat). The data path is specified by `input_paths` and the format is specified by `input_data_format`. - -## Output -Name | Description | Type -:--- | :---------- | :--- -job_id | The ID of the created batch job. | String - - -## Cautions & requirements - -To use the component, you must: - -* Set up a cloud environment by following this [guide](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant the following types of access to the Kubeflow user service account: - * Read access to the Cloud Storage buckets which contains the input data. - * Write access to the Cloud Storage bucket of the output directory. - - -## Detailed description - -Follow these steps to use the component in a pipeline: - - - -1. Install the Kubeflow Pipeline SDK: - - - - -```python -%%capture --no-stderr - -!pip3 install kfp --upgrade -``` - -2. Load the component using KFP SDK - - -```python -import kfp.components as comp - -mlengine_batch_predict_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/ml_engine/batch_predict/component.yaml') -help(mlengine_batch_predict_op) -``` - - -### Sample Code -Note: The following sample code works in an IPython notebook or directly in Python code. - -In this sample, you batch predict against a pre-built trained model from `gs://ml-pipeline-playground/samples/ml_engine/census/trained_model/` and use the test data from `gs://ml-pipeline-playground/samples/ml_engine/census/test.json`. - -#### Inspect the test data - - -```python -!gsutil cat gs://ml-pipeline-playground/samples/ml_engine/census/test.json -``` - -#### Set sample parameters - - -```python -# Required Parameters -PROJECT_ID = '' -GCS_WORKING_DIR = 'gs://' # No ending slash -``` - - -```python -# Optional Parameters -EXPERIMENT_NAME = 'CLOUDML - Batch Predict' -OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/batch_predict/output/' -``` - -#### Example pipeline that uses the component - - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='CloudML batch predict pipeline', - description='CloudML batch predict pipeline' -) -def pipeline( - project_id = PROJECT_ID, - model_path = 'gs://ml-pipeline-playground/samples/ml_engine/census/trained_model/', - input_paths = '["gs://ml-pipeline-playground/samples/ml_engine/census/test.json"]', - input_data_format = 'JSON', - output_path = OUTPUT_GCS_PATH, - region = 'us-central1', - output_data_format='', - prediction_input = json.dumps({ - 'runtimeVersion': '1.10' - }), - job_id_prefix='', - wait_interval='30'): - mlengine_batch_predict_op( - project_id=project_id, - model_path=model_path, - input_paths=input_paths, - input_data_format=input_data_format, - output_path=output_path, - region=region, - output_data_format=output_data_format, - prediction_input=prediction_input, - job_id_prefix=job_id_prefix, - wait_interval=wait_interval) -``` - -#### Compile the pipeline - - -```python -pipeline_func = pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - - -```python -#Specify pipeline argument values -arguments = {} - -#Get or create an experiment and submit a pipeline run -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -#### Inspect prediction results - - -```python -OUTPUT_FILES_PATTERN = OUTPUT_GCS_PATH + '*' -!gsutil cat OUTPUT_FILES_PATTERN -``` - -## References -* [Component python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_batch_predict.py) -* [Component docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile) -* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/ml_engine/batch_predict/sample.ipynb) -* [Cloud Machine Learning Engine job REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs) - -## License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/ml_engine/batch_predict/component.yaml b/components/contrib/google-cloud/ml_engine/batch_predict/component.yaml deleted file mode 100644 index 3006d05e546..00000000000 --- a/components/contrib/google-cloud/ml_engine/batch_predict/component.yaml +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: Batch predict against a model with Cloud ML Engine -description: | - Creates a MLEngine batch prediction job. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: project_id - description: 'Required. The ID of the parent project of the job.' - type: GCPProjectID - - name: model_path - description: >- - The path to the model. It can be either: `projects/[PROJECT_ID]/models/[MODEL_ID]` - or `projects/[PROJECT_ID]/models/[MODEL_ID]/versions/[VERSION_ID]` or a GCS path - of a model file. - type: String - - name: input_paths - description: >- - Required. The Google Cloud Storage location of the input data files. May contain - wildcards. - type: List - - name: input_data_format - description: >- - Required. The format of the input data files. See - https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#DataFormat. - type: String - - name: output_path - description: 'Required. The output Google Cloud Storage location.' - type: GCSPath - - name: region - description: >- - Required. The Google Compute Engine region to run the prediction job in. - type: GCPRegion - - name: output_data_format - description: 'Optional. Format of the output data files, defaults to JSON.' - default: '' - type: String - - name: prediction_input - description: 'Input parameters to create a prediction job.' - default: '' - type: Dict - - name: job_id_prefix - description: 'The prefix of the generated job id.' - default: '' - type: String - - name: wait_interval - description: 'Optional wait interval between calls to get job status. Defaults to 30.' - default: '30' - type: Integer -outputs: - - name: job_id - description: 'The ID of the created job.' - type: String - - name: MLPipeline UI metadata - type: UI metadata -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ['python', '-u', '-m', 'kfp_component.launcher'] - args: [ - --ui_metadata_path, {outputPath: MLPipeline UI metadata}, - kfp_component.google.ml_engine, batch_predict, - --project_id, {inputValue: project_id}, - --model_path, {inputValue: model_path}, - --input_paths, {inputValue: input_paths}, - --input_data_format, {inputValue: input_data_format}, - --output_path, {inputValue: output_path}, - --region, {inputValue: region}, - --output_data_format, {inputValue: output_data_format}, - --prediction_input, {inputValue: prediction_input}, - --job_id_prefix, {inputValue: job_id_prefix}, - --wait_interval, {inputValue: wait_interval}, - --job_id_output_path, {outputPath: job_id}, - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/ml_engine/batch_predict/sample.ipynb b/components/contrib/google-cloud/ml_engine/batch_predict/sample.ipynb deleted file mode 100644 index 00d724849de..00000000000 --- a/components/contrib/google-cloud/ml_engine/batch_predict/sample.ipynb +++ /dev/null @@ -1,310 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "\n", - "Batch prediction using Cloud Machine Learning Engine\n", - "\n", - "\n", - "# Label\n", - "\n", - "Cloud Storage, Cloud ML Engine, Kubeflow, Pipeline, Component\n", - "\n", - "\n", - "# Summary\n", - "\n", - "A Kubeflow Pipeline component to submit a batch prediction job against a deployed model on Cloud ML Engine.\n", - "\n", - "\n", - "# Details\n", - "\n", - "\n", - "## Intended use\n", - "\n", - "Use the component to run a batch prediction job against a deployed model on Cloud ML Engine. The prediction output is stored in a Cloud Storage bucket.\n", - "\n", - "\n", - "## Runtime arguments\n", - "\n", - "| Argument | Description | Optional | Data type | Accepted values | Default |\n", - "|--------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------|--------------|-----------------|---------|\n", - "| project_id | The ID of the Google Cloud Platform (GCP) project of the job. | No | GCPProjectID | | |\n", - "| model_path | The path to the model. It can be one of the following:
  • projects/[PROJECT_ID]/models/[MODEL_ID]
  • projects/[PROJECT_ID]/models/[MODEL_ID]/versions/[VERSION_ID]
  • The path to a Cloud Storage location containing a model file.
| No | GCSPath | | |\n", - "| input_paths | The path to the Cloud Storage location containing the input data files. It can contain wildcards, for example, `gs://foo/*.csv` | No | List | GCSPath | |\n", - "| input_data_format | The format of the input data files. See [REST Resource: projects.jobs](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#DataFormat) for more details. | No | String | DataFormat | |\n", - "| output_path | The path to the Cloud Storage location for the output data. | No | GCSPath | | |\n", - "| region | The Compute Engine region where the prediction job is run. | No | GCPRegion | | |\n", - "| output_data_format | The format of the output data files. See [REST Resource: projects.jobs](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#DataFormat) for more details. | Yes | String | DataFormat | JSON |\n", - "| prediction_input | The JSON input parameters to create a prediction job. See [PredictionInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#PredictionInput) for more information. | Yes | Dict | | None |\n", - "| job_id_prefix | The prefix of the generated job id. | Yes | String | | None |\n", - "| wait_interval | The number of seconds to wait in case the operation has a long run time. | Yes | | | 30 |\n", - "\n", - "\n", - "## Input data schema\n", - "\n", - "The component accepts the following as input:\n", - "\n", - "* A trained model: It can be a model file in Cloud Storage, a deployed model, or a version in Cloud ML Engine. Specify the path to the model in the `model_path `runtime argument.\n", - "* Input data: The data used to make predictions against the trained model. The data can be in [multiple formats](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#DataFormat). The data path is specified by `input_paths` and the format is specified by `input_data_format`.\n", - "\n", - "## Output\n", - "Name | Description | Type\n", - ":--- | :---------- | :---\n", - "job_id | The ID of the created batch job. | String\n", - "output_path | The output path of the batch prediction job | GCSPath\n", - "\n", - "\n", - "## Cautions & requirements\n", - "\n", - "To use the component, you must:\n", - "\n", - "* Set up a cloud environment by following this [guide](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant the following types of access to the Kubeflow user service account:\n", - " * Read access to the Cloud Storage buckets which contains the input data.\n", - " * Write access to the Cloud Storage bucket of the output directory.\n", - "\n", - "\n", - "## Detailed description\n", - "\n", - "Follow these steps to use the component in a pipeline:\n", - "\n", - "\n", - "\n", - "1. Install the Kubeflow Pipeline SDK:\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "mlengine_batch_predict_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/ml_engine/batch_predict/component.yaml')\n", - "help(mlengine_batch_predict_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "### Sample Code\n", - "Note: The following sample code works in an IPython notebook or directly in Python code. \n", - "\n", - "In this sample, you batch predict against a pre-built trained model from `gs://ml-pipeline-playground/samples/ml_engine/census/trained_model/` and use the test data from `gs://ml-pipeline-playground/samples/ml_engine/census/test.json`.\n", - "\n", - "#### Inspect the test data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!gsutil cat gs://ml-pipeline-playground/samples/ml_engine/census/test.json" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "# Required Parameters\n", - "PROJECT_ID = ''\n", - "GCS_WORKING_DIR = 'gs://' # No ending slash" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Optional Parameters\n", - "EXPERIMENT_NAME = 'CLOUDML - Batch Predict'\n", - "OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/batch_predict/output/'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='CloudML batch predict pipeline',\n", - " description='CloudML batch predict pipeline'\n", - ")\n", - "def pipeline(\n", - " project_id = PROJECT_ID, \n", - " model_path = 'gs://ml-pipeline-playground/samples/ml_engine/census/trained_model/', \n", - " input_paths = '[\"gs://ml-pipeline-playground/samples/ml_engine/census/test.json\"]', \n", - " input_data_format = 'JSON', \n", - " output_path = OUTPUT_GCS_PATH, \n", - " region = 'us-central1', \n", - " output_data_format='', \n", - " prediction_input = json.dumps({\n", - " 'runtimeVersion': '1.10'\n", - " }), \n", - " job_id_prefix='',\n", - " wait_interval='30'):\n", - " mlengine_batch_predict_op(\n", - " project_id=project_id, \n", - " model_path=model_path, \n", - " input_paths=input_paths, \n", - " input_data_format=input_data_format, \n", - " output_path=output_path, \n", - " region=region, \n", - " output_data_format=output_data_format, \n", - " prediction_input=prediction_input, \n", - " job_id_prefix=job_id_prefix,\n", - " wait_interval=wait_interval)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Inspect prediction results" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "OUTPUT_FILES_PATTERN = OUTPUT_GCS_PATH + '*'\n", - "!gsutil cat OUTPUT_FILES_PATTERN" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "* [Component python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_batch_predict.py)\n", - "* [Component docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile)\n", - "* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/ml_engine/batch_predict/sample.ipynb)\n", - "* [Cloud Machine Learning Engine job REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs)\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/components/contrib/google-cloud/ml_engine/deploy/README.md b/components/contrib/google-cloud/ml_engine/deploy/README.md deleted file mode 100644 index 5ebe788917b..00000000000 --- a/components/contrib/google-cloud/ml_engine/deploy/README.md +++ /dev/null @@ -1,196 +0,0 @@ - -# Name - -Deploying a trained model to Cloud Machine Learning Engine - - -# Label - -Cloud Storage, Cloud ML Engine, Kubeflow, Pipeline - - -# Summary - -A Kubeflow Pipeline component to deploy a trained model from a Cloud Storage location to Cloud ML Engine. - - -# Details - - -## Intended use - -Use the component to deploy a trained model to Cloud ML Engine. The deployed model can serve online or batch predictions in a Kubeflow Pipeline. - - -## Runtime arguments - -| Argument | Description | Optional | Data type | Accepted values | Default | -|--------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------|--------------|-----------------|---------| -| model_uri | The URI of a Cloud Storage directory that contains a trained model file.
Or
An [Estimator export base directory](https://www.tensorflow.org/guide/saved_model#perform_the_export) that contains a list of subdirectories named by timestamp. The directory with the latest timestamp is used to load the trained model file. | No | GCSPath | | | -| project_id | The ID of the Google Cloud Platform (GCP) project of the serving model. | No | GCPProjectID | | | -| model_id | The name of the trained model. | Yes | String | | None | -| version_id | The name of the version of the model. If it is not provided, the operation uses a random name. | Yes | String | | None | -| runtime_version | The Cloud ML Engine runtime version to use for this deployment. If it is not provided, the default stable version, 1.0, is used. | Yes | String | | None | -| python_version | The version of Python used in the prediction. If it is not provided, version 2.7 is used. You can use Python 3.5 if runtime_version is set to 1.4 or above. Python 2.7 works with all supported runtime versions. | Yes | String | | 2.7 | -| model | The JSON payload of the new [model](https://cloud.google.com/ml-engine/reference/rest/v1/projects.models). | Yes | Dict | | None | -| version | The new [version](https://cloud.google.com/ml-engine/reference/rest/v1/projects.models.versions) of the trained model. | Yes | Dict | | None | -| replace_existing_version | Indicates whether to replace the existing version in case of a conflict (if the same version number is found.) | Yes | Boolean | | FALSE | -| set_default | Indicates whether to set the new version as the default version in the model. | Yes | Boolean | | FALSE | -| wait_interval | The number of seconds to wait in case the operation has a long run time. | Yes | Integer | | 30 | - - - -## Input data schema - -The component looks for a trained model in the location specified by the `model_uri` runtime argument. The accepted trained models are: - - -* [Tensorflow SavedModel](https://cloud.google.com/ml-engine/docs/tensorflow/exporting-for-prediction) -* [Scikit-learn & XGBoost model](https://cloud.google.com/ml-engine/docs/scikit/exporting-for-prediction) - -The accepted file formats are: - -* *.pb -* *.pbtext -* model.bst -* model.joblib -* model.pkl - -`model_uri` can also be an [Estimator export base directory, ](https://www.tensorflow.org/guide/saved_model#perform_the_export)which contains a list of subdirectories named by timestamp. The directory with the latest timestamp is used to load the trained model file. - -## Output -Name | Description | Type -:--- | :---------- | :--- -| model_uri | The Cloud Storage URI of the trained model. | GCSPath | -| model_name | The name of the deployed model. | String | -| version_name | The name of the deployed version. | String | - - -## Cautions & requirements - -To use the component, you must: - -* [Set up the cloud environment](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant read access to the Cloud Storage bucket that contains the trained model to the Kubeflow user service account. - -## Detailed description - -Use the component to: -* Locate the trained model at the Cloud Storage location you specify. -* Create a new model if a model provided by you doesn’t exist. -* Delete the existing model version if `replace_existing_version` is enabled. -* Create a new version of the model from the trained model. -* Set the new version as the default version of the model if `set_default` is enabled. - -Follow these steps to use the component in a pipeline: - -1. Install the Kubeflow Pipeline SDK: - - - - -```python -%%capture --no-stderr - -!pip3 install kfp --upgrade -``` - -2. Load the component using KFP SDK - - -```python -import kfp.components as comp - -mlengine_deploy_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/ml_engine/deploy/component.yaml') -help(mlengine_deploy_op) -``` - -### Sample -Note: The following sample code works in IPython notebook or directly in Python code. - -In this sample, you deploy a pre-built trained model from `gs://ml-pipeline-playground/samples/ml_engine/census/trained_model/` to Cloud ML Engine. The deployed model is `kfp_sample_model`. A new version is created every time the sample is run, and the latest version is set as the default version of the deployed model. - -#### Set sample parameters - - -```python -# Required Parameters -PROJECT_ID = '' - -# Optional Parameters -EXPERIMENT_NAME = 'CLOUDML - Deploy' -TRAINED_MODEL_PATH = 'gs://ml-pipeline-playground/samples/ml_engine/census/trained_model/' -``` - -#### Example pipeline that uses the component - - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='CloudML deploy pipeline', - description='CloudML deploy pipeline' -) -def pipeline( - model_uri = 'gs://ml-pipeline-playground/samples/ml_engine/census/trained_model/', - project_id = PROJECT_ID, - model_id = 'kfp_sample_model', - version_id = '', - runtime_version = '1.10', - python_version = '', - version = {}, - replace_existing_version = 'False', - set_default = 'True', - wait_interval = '30'): - task = mlengine_deploy_op( - model_uri=model_uri, - project_id=project_id, - model_id=model_id, - version_id=version_id, - runtime_version=runtime_version, - python_version=python_version, - version=version, - replace_existing_version=replace_existing_version, - set_default=set_default, - wait_interval=wait_interval) -``` - -#### Compile the pipeline - - -```python -pipeline_func = pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - - -```python -#Specify pipeline argument values -arguments = {} - -#Get or create an experiment and submit a pipeline run -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -## References -* [Component python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_deploy.py) -* [Component docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile) -* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/ml_engine/deploy/sample.ipynb) -* [Cloud Machine Learning Engine Model REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.models) -* [Cloud Machine Learning Engine Version REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.versions) - -## License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/ml_engine/deploy/component.yaml b/components/contrib/google-cloud/ml_engine/deploy/component.yaml deleted file mode 100644 index b046de05be9..00000000000 --- a/components/contrib/google-cloud/ml_engine/deploy/component.yaml +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: Deploying a trained model to Cloud Machine Learning Engine -description: | - A Kubeflow Pipeline component to deploy a trained model from a Cloud Storage - path to a Cloud Machine Learning Engine service. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: model_uri - description: >- - Required. The Cloud Storage URI which contains a model file. Commonly - used TF model search paths (export/exporter) will be used if they exist. - type: GCSPath - - name: project_id - description: 'Required.The ID of the parent project of the serving model.' - type: GCPProjectID - - name: model_id - description: >- - Optional. The user-specified name of the model. If it is not provided, - the operation uses a random name. - default: '' - type: String - - name: version_id - description: >- - Optional. The user-specified name of the version. If it is not provided, - the operation uses a random name. - default: '' - type: String - - name: runtime_version - description: >- - Optional. The [Cloud ML Engine runtime version](https://cloud.google.com/ml-engine/docs/tensorflow/runtime-version-list) to use for - this deployment. If it is not set, the Cloud ML Engine uses the default - stable version, 1.0. - default: '' - type: String - - name: python_version - description: >- - Optional. The version of Python used in the prediction. If it is not set, - the default version is `2.7`. Python `3.5` is available when the - runtime_version is set to `1.4` and above. Python `2.7` works with all - supported runtime versions. - default: '' - type: String - - name: model - description: >- - Optional. The JSON payload of the new - [Model](https://cloud.google.com/ml-engine/reference/rest/v1/projects.models), if it does not exist. - default: '' - type: Dict - - name: version - description: >- - Optional. The JSON payload of the new - [Version](https://cloud.google.com/ml-engine/reference/rest/v1/projects.models.versions). - default: '' - type: Dict - - name: replace_existing_version - description: >- - A Boolean flag that indicates whether to replace existing version in case of conflict. - default: 'False' - type: Bool - - name: set_default - description: >- - A Boolean flag that indicates whether to set the new version as default version in the model. - default: 'False' - type: Bool - - name: wait_interval - description: 'A time-interval to wait for in case the operation has a long run time.' - default: '30' - type: Integer -outputs: - - name: model_uri - description: 'The Cloud Storage URI of the trained model.' - type: GCSPath - - name: model_name - description: 'The name of the deployed model.' - type: String - - name: version_name - description: 'The name of the deployed version.' - type: String - - name: MLPipeline UI metadata - type: UI metadata -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ["python", -u, -m, "kfp_component.launcher"] - args: [ - --ui_metadata_path, {outputPath: MLPipeline UI metadata}, - kfp_component.google.ml_engine, deploy, - --model_uri, {inputValue: model_uri}, - --project_id, {inputValue: project_id}, - --model_id, {inputValue: model_id}, - --version_id, {inputValue: version_id}, - --runtime_version, {inputValue: runtime_version}, - --python_version, {inputValue: python_version}, - --model, {inputValue: model}, - --version, {inputValue: version}, - --replace_existing_version, {inputValue: replace_existing_version}, - --set_default, {inputValue: set_default}, - --wait_interval, {inputValue: wait_interval}, - --model_uri_output_path, {outputPath: model_uri}, - --model_name_output_path, {outputPath: model_name}, - --version_name_output_path, {outputPath: version_name}, - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/ml_engine/deploy/sample.ipynb b/components/contrib/google-cloud/ml_engine/deploy/sample.ipynb deleted file mode 100644 index 121845ad016..00000000000 --- a/components/contrib/google-cloud/ml_engine/deploy/sample.ipynb +++ /dev/null @@ -1,282 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "\n", - "Deploying a trained model to Cloud Machine Learning Engine \n", - "\n", - "\n", - "# Label\n", - "\n", - "Cloud Storage, Cloud ML Engine, Kubeflow, Pipeline\n", - "\n", - "\n", - "# Summary\n", - "\n", - "A Kubeflow Pipeline component to deploy a trained model from a Cloud Storage location to Cloud ML Engine.\n", - "\n", - "\n", - "# Details\n", - "\n", - "\n", - "## Intended use\n", - "\n", - "Use the component to deploy a trained model to Cloud ML Engine. The deployed model can serve online or batch predictions in a Kubeflow Pipeline.\n", - "\n", - "\n", - "## Runtime arguments\n", - "\n", - "| Argument | Description | Optional | Data type | Accepted values | Default |\n", - "|--------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------|--------------|-----------------|---------|\n", - "| model_uri | The URI of a Cloud Storage directory that contains a trained model file.
Or
An [Estimator export base directory](https://www.tensorflow.org/guide/saved_model#perform_the_export) that contains a list of subdirectories named by timestamp. The directory with the latest timestamp is used to load the trained model file. | No | GCSPath | | |\n", - "| project_id | The ID of the Google Cloud Platform (GCP) project of the serving model. | No | GCPProjectID | | |\n", - "| model_id | The name of the trained model. | Yes | String | | None |\n", - "| version_id | The name of the version of the model. If it is not provided, the operation uses a random name. | Yes | String | | None |\n", - "| runtime_version | The Cloud ML Engine runtime version to use for this deployment. If it is not provided, the default stable version, 1.0, is used. | Yes | String | | None |\n", - "| python_version | The version of Python used in the prediction. If it is not provided, version 2.7 is used. You can use Python 3.5 if runtime_version is set to 1.4 or above. Python 2.7 works with all supported runtime versions. | Yes | String | | 2.7 |\n", - "| model | The JSON payload of the new [model](https://cloud.google.com/ml-engine/reference/rest/v1/projects.models). | Yes | Dict | | None |\n", - "| version | The new [version](https://cloud.google.com/ml-engine/reference/rest/v1/projects.models.versions) of the trained model. | Yes | Dict | | None |\n", - "| replace_existing_version | Indicates whether to replace the existing version in case of a conflict (if the same version number is found.) | Yes | Boolean | | FALSE |\n", - "| set_default | Indicates whether to set the new version as the default version in the model. | Yes | Boolean | | FALSE |\n", - "| wait_interval | The number of seconds to wait in case the operation has a long run time. | Yes | Integer | | 30 |\n", - "\n", - "\n", - "\n", - "## Input data schema\n", - "\n", - "The component looks for a trained model in the location specified by the `model_uri` runtime argument. The accepted trained models are:\n", - "\n", - "\n", - "* [Tensorflow SavedModel](https://cloud.google.com/ml-engine/docs/tensorflow/exporting-for-prediction) \n", - "* [Scikit-learn & XGBoost model](https://cloud.google.com/ml-engine/docs/scikit/exporting-for-prediction)\n", - "\n", - "The accepted file formats are:\n", - "\n", - "* *.pb\n", - "* *.pbtext\n", - "* model.bst\n", - "* model.joblib\n", - "* model.pkl\n", - "\n", - "`model_uri` can also be an [Estimator export base directory, ](https://www.tensorflow.org/guide/saved_model#perform_the_export)which contains a list of subdirectories named by timestamp. The directory with the latest timestamp is used to load the trained model file.\n", - "\n", - "## Output\n", - "| Name | Description | Type |\n", - "|:------- |:---- | :--- |\n", - "| job_id | The ID of the created job. | String |\n", - "| job_dir | The Cloud Storage path that contains the trained model output files. | GCSPath |\n", - "\n", - "\n", - "## Cautions & requirements\n", - "\n", - "To use the component, you must:\n", - "\n", - "* [Set up the cloud environment](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant read access to the Cloud Storage bucket that contains the trained model to the Kubeflow user service account.\n", - "\n", - "## Detailed description\n", - "\n", - "Use the component to: \n", - "* Locate the trained model at the Cloud Storage location you specify.\n", - "* Create a new model if a model provided by you doesn’t exist.\n", - "* Delete the existing model version if `replace_existing_version` is enabled.\n", - "* Create a new version of the model from the trained model.\n", - "* Set the new version as the default version of the model if `set_default` is enabled.\n", - "\n", - "Follow these steps to use the component in a pipeline:\n", - "\n", - "1. Install the Kubeflow Pipeline SDK:\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "mlengine_deploy_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/ml_engine/deploy/component.yaml')\n", - "help(mlengine_deploy_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Sample\n", - "Note: The following sample code works in IPython notebook or directly in Python code.\n", - "\n", - "In this sample, you deploy a pre-built trained model from `gs://ml-pipeline-playground/samples/ml_engine/census/trained_model/` to Cloud ML Engine. The deployed model is `kfp_sample_model`. A new version is created every time the sample is run, and the latest version is set as the default version of the deployed model.\n", - "\n", - "#### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "# Required Parameters\n", - "PROJECT_ID = ''\n", - "\n", - "# Optional Parameters\n", - "EXPERIMENT_NAME = 'CLOUDML - Deploy'\n", - "TRAINED_MODEL_PATH = 'gs://ml-pipeline-playground/samples/ml_engine/census/trained_model/'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='CloudML deploy pipeline',\n", - " description='CloudML deploy pipeline'\n", - ")\n", - "def pipeline(\n", - " model_uri = 'gs://ml-pipeline-playground/samples/ml_engine/census/trained_model/',\n", - " project_id = PROJECT_ID,\n", - " model_id = 'kfp_sample_model',\n", - " version_id = '',\n", - " runtime_version = '1.10',\n", - " python_version = '',\n", - " version = {},\n", - " replace_existing_version = 'False',\n", - " set_default = 'True',\n", - " wait_interval = '30'):\n", - " task = mlengine_deploy_op(\n", - " model_uri=model_uri, \n", - " project_id=project_id, \n", - " model_id=model_id, \n", - " version_id=version_id, \n", - " runtime_version=runtime_version, \n", - " python_version=python_version,\n", - " version=version, \n", - " replace_existing_version=replace_existing_version, \n", - " set_default=set_default, \n", - " wait_interval=wait_interval)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "* [Component python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_deploy.py)\n", - "* [Component docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile)\n", - "* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/ml_engine/deploy/sample.ipynb)\n", - "* [Cloud Machine Learning Engine Model REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.models)\n", - "* [Cloud Machine Learning Engine Version REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.versions)\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/components/contrib/google-cloud/ml_engine/train/README.md b/components/contrib/google-cloud/ml_engine/train/README.md deleted file mode 100644 index b12901986d7..00000000000 --- a/components/contrib/google-cloud/ml_engine/train/README.md +++ /dev/null @@ -1,241 +0,0 @@ - -# Name -Component: Submitting an AI Platform training job as a pipeline step - -# Label - AI Platform, Kubeflow - -# Summary -A Kubeflow pipeline component to submit an AI Platform training job as a step in a pipeline. - -# Facets - -Use case: -Other - -Technique: -Other - -Input data type: -Tabular - -ML workflow: -Training - -# Details -## Intended use -Use this component to submit a training job to AI Platform from a Kubeflow pipeline. - -## Runtime arguments -| Argument | Description | Optional | Data type | Accepted values | Default | -|:------------------|:------------------|:----------|:--------------|:-----------------|:-------------| -| project_id | The Google Cloud Platform (GCP) project ID of the job. | No | GCPProjectID | - | - | -| python_module | The name of the Python module to run after installing the training program. | Yes | String | - | None | -| package_uris | The Cloud Storage location of the packages that contain the training program and any additional dependencies. The maximum number of package URIs is 100. | Yes | List | -| None | -| region | The Compute Engine region in which the training job is run. | Yes | GCPRegion | -| us-central1 | -| args | The command line arguments to pass to the training program. | Yes | List | - | None | -| job_dir | A Cloud Storage path in which to store the training outputs and other data needed for training. This path is passed to your TensorFlow program as the command-line argument, `job-dir`. The benefit of specifying this field is that Cloud ML validates the path for use in training. | Yes | GCSPath | - | None | -| python_version | The version of Python used in training. If it is not set, the default version is 2.7. Python 3.5 is available when the runtime version is set to 1.4 and above. | Yes | String | - | None | -| runtime_version | The runtime version of AI Platform to use for training. If it is not set, AI Platform uses the default. | Yes | String | - | 1 | -| master_image_uri | The Docker image to run on the master replica. This image must be in Container Registry. | Yes | GCRPath | - | None | -| worker_image_uri | The Docker image to run on the worker replica. This image must be in Container Registry. | Yes | GCRPath |- | None | -| training_input | The input parameters to create a training job. | Yes | Dict | [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) | None | -| job_id_prefix | The prefix of the job ID that is generated. | Yes | String | - | None | -| job_id | The ID of the job to create, takes precedence over generated job id if set. | Yes | String | - | None | -| wait_interval | The number of seconds to wait between API calls to get the status of the job. | Yes | Integer | - | 30 | - - - -## Input data schema - -The component accepts two types of inputs: -* A list of Python packages from Cloud Storage. - * You can manually build a Python package and upload it to Cloud Storage by following this [guide](https://cloud.google.com/ml-engine/docs/tensorflow/packaging-trainer#manual-build). -* A Docker container from Container Registry. - * Follow this [guide](https://cloud.google.com/ml-engine/docs/using-containers) to publish and use a Docker container with this component. - -## Output -| Name | Description | Type | -|:------- |:---- | :--- | -| job_id | The ID of the created job. | String | -| job_dir | The Cloud Storage path that contains the output files with the trained model. | GCSPath | - - -## Cautions & requirements - -To use the component, you must: - -* Set up a cloud environment by following this [guide](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup). -* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. -* Grant the following access to the Kubeflow user service account: - * Read access to the Cloud Storage buckets which contain the input data, packages, or Docker images. - * Write access to the Cloud Storage bucket of the output directory. - -## Detailed description - -The component builds the [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) payload and submits a job via the [AI Platform REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs). - -The steps to use the component in a pipeline are: - - -1. Install the Kubeflow pipeline's SDK: - - ```python - %%capture --no-stderr - - !pip3 install kfp --upgrade - ``` - -2. Load the component using the Kubeflow pipeline's SDK: - - ```python - import kfp.components as comp - - mlengine_train_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/ml_engine/train/component.yaml') - help(mlengine_train_op) - ``` -### Sample -The following sample code works in an IPython notebook or directly in Python code. - -In this sample, you use the code from the [census estimator sample](https://github.com/GoogleCloudPlatform/cloudml-samples/tree/master/census/estimator) to train a model on AI Platform. To upload the code to AI Platform, package the Python code and upload it to a Cloud Storage bucket. - -Note: You must have read and write permissions on the bucket that you use as the working directory. - -#### Set sample parameters - -```python -# Required parameters -PROJECT_ID = '' -GCS_WORKING_DIR = 'gs://' # No ending slash -``` - -```python -# Optional parameters -EXPERIMENT_NAME = 'CLOUDML - Train' -TRAINER_GCS_PATH = GCS_WORKING_DIR + '/train/trainer.tar.gz' -OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/train/output/' -``` - -#### Clean up the working directory - -```python -%%capture --no-stderr -!gsutil rm -r $GCS_WORKING_DIR -``` - -#### Download the sample trainer code to a local directory - -```python -%%capture --no-stderr -!wget https://github.com/GoogleCloudPlatform/cloudml-samples/archive/master.zip -!unzip master.zip -``` - -#### Package code and upload the package to Cloud Storage - -```python -%%capture --no-stderr -%%bash -s "$TRAINER_GCS_PATH" -pushd ./cloudml-samples-master/census/estimator/ -python setup.py sdist -gsutil cp dist/preprocessing-1.0.tar.gz $1 -popd -rm -fr ./cloudml-samples-master/ ./master.zip ./dist -``` - -#### Example pipeline that uses the component - -```python -import kfp.dsl as dsl -import json -@dsl.pipeline( - name='CloudML training pipeline', - description='CloudML training pipeline' -) -def pipeline( - project_id = PROJECT_ID, - python_module = 'trainer.task', - package_uris = json.dumps([TRAINER_GCS_PATH]), - region = 'us-central1', - args = json.dumps([ - '--train-files', 'gs://cloud-samples-data/ml-engine/census/data/adult.data.csv', - '--eval-files', 'gs://cloud-samples-data/ml-engine/census/data/adult.test.csv', - '--train-steps', '1000', - '--eval-steps', '100', - '--verbosity', 'DEBUG' - ]), - job_dir = OUTPUT_GCS_PATH, - python_version = '', - runtime_version = '1.10', - master_image_uri = '', - worker_image_uri = '', - training_input = '', - job_id_prefix = '', - job_id = '', - wait_interval = '30'): - task = mlengine_train_op( - project_id=project_id, - python_module=python_module, - package_uris=package_uris, - region=region, - args=args, - job_dir=job_dir, - python_version=python_version, - runtime_version=runtime_version, - master_image_uri=master_image_uri, - worker_image_uri=worker_image_uri, - training_input=training_input, - job_id_prefix=job_id_prefix, - job_id=job_id, - wait_interval=wait_interval) -``` - -#### Compile the pipeline - -```python -pipeline_func = pipeline -pipeline_filename = pipeline_func.__name__ + '.zip' -import kfp.compiler as compiler -compiler.Compiler().compile(pipeline_func, pipeline_filename) -``` - -#### Submit the pipeline for execution - -```python -#Specify values for the pipeline's arguments -arguments = {} - -#Get or create an experiment -import kfp -client = kfp.Client() -experiment = client.create_experiment(EXPERIMENT_NAME) - -#Submit a pipeline run -run_name = pipeline_func.__name__ + ' run' -run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) -``` - -#### Inspect the results - -Use the following command to inspect the contents in the output directory: - -```python -!gsutil ls $OUTPUT_GCS_PATH -``` - -## References -* [Component Python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_train.py) -* [Component Docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile) -* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/ml_engine/train/sample.ipynb) -* [AI Platform REST API - Resource: Job](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs) - -## License -By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/contrib/google-cloud/ml_engine/train/component.yaml b/components/contrib/google-cloud/ml_engine/train/component.yaml deleted file mode 100644 index c2edbd0b065..00000000000 --- a/components/contrib/google-cloud/ml_engine/train/component.yaml +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: Submitting a Cloud ML training job as a pipeline step -description: | - A Kubeflow Pipeline component to submit a Cloud Machine Learning (Cloud ML) - Engine training job as a step in a pipeline. -metadata: - labels: - add-pod-env: 'true' -inputs: - - name: project_id - description: 'Required. The ID of the parent project of the job.' - type: GCPProjectID - - name: python_module - description: 'The Python module name to run after installing the packages.' - default: '' - type: String - - name: package_uris - description: >- - The Cloud Storage location of the packages (that contain the training program - and any additional dependencies). The maximum number of package URIs is 100. - default: '' - type: List - - name: region - description: 'The Compute Engine region in which the training job is run.' - default: '' - type: GCPRegion - - name: args - description: 'The command line arguments to pass to the program.' - default: '' - type: List - - name: job_dir - description: >- - A Cloud Storage path in which to store the training outputs and other data - needed for training. This path is passed to your TensorFlow program as the - `job-dir` command-line argument. The benefit of specifying this field is - that Cloud ML validates the path for use in training. - default: '' - type: GCSPath - - name: python_version - description: >- - The version of Python used in training. If not set, the default - version is `2.7`. Python `3.5` is available when runtimeVersion is set to `1.4` - and above. - default: '' - type: String - - name: runtime_version - description: >- - The Cloud ML Engine runtime version to use for training. If not set, - Cloud ML Engine uses the default stable version, 1.0. - default: '' - type: String - - name: master_image_uri - description: >- - The Docker image to run on the master replica. This image must be in - Container Registry. - default: '' - type: GCRPath - - name: worker_image_uri - description: >- - The Docker image to run on the worker replica. This image must be in - Container Registry. - default: '' - type: GCRPath - - name: training_input - description: >- - The input parameters to create a training job. It is the JSON payload - of a [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) - default: '' - type: Dict - - name: job_id_prefix - description: 'The prefix of the generated job id.' - default: '' - type: String - - name: job_id - description: >- - The ID of the job to create, takes precedence over generated - job id if set. - default: '' - type: String - - name: wait_interval - description: >- - Optional. A time-interval to wait for between calls to get the job status. - Defaults to 30.' - default: '30' - type: Integer -outputs: - - name: job_id - description: 'The ID of the created job.' - type: String - - name: job_dir - description: >- - The output path in Cloud Storage of the training job, which contains - the trained model files. - type: GCSPath - - name: MLPipeline UI metadata - type: UI metadata -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:1.7.0-rc.3 - command: ["python", -u, -m, "kfp_component.launcher"] - args: [ - --ui_metadata_path, {outputPath: MLPipeline UI metadata}, - kfp_component.google.ml_engine, train, - --project_id, {inputValue: project_id}, - --python_module, {inputValue: python_module}, - --package_uris, {inputValue: package_uris}, - --region, {inputValue: region}, - --args, {inputValue: args}, - --job_dir, {inputValue: job_dir}, - --python_version, {inputValue: python_version}, - --runtime_version, {inputValue: runtime_version}, - --master_image_uri, {inputValue: master_image_uri}, - --worker_image_uri, {inputValue: worker_image_uri}, - --training_input, {inputValue: training_input}, - --job_id_prefix, {inputValue: job_id_prefix}, - --job_id, {inputValue: job_id}, - --wait_interval, {inputValue: wait_interval}, - --job_id_output_path, {outputPath: job_id}, - --job_dir_output_path, {outputPath: job_dir}, - ] - env: - KFP_POD_NAME: "{{pod.name}}" diff --git a/components/contrib/google-cloud/ml_engine/train/sample.ipynb b/components/contrib/google-cloud/ml_engine/train/sample.ipynb deleted file mode 100644 index 5fba4e78ffc..00000000000 --- a/components/contrib/google-cloud/ml_engine/train/sample.ipynb +++ /dev/null @@ -1,359 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Name\n", - "Submitting a Cloud Machine Learning Engine training job as a pipeline step\n", - "\n", - "# Label\n", - "GCP, Cloud ML Engine, Machine Learning, pipeline, component, Kubeflow, Kubeflow Pipeline\n", - "\n", - "# Summary\n", - "A Kubeflow Pipeline component to submit a Cloud ML Engine training job as a step in a pipeline.\n", - "\n", - "# Details\n", - "## Intended use\n", - "Use this component to submit a training job to Cloud ML Engine from a Kubeflow Pipeline. \n", - "\n", - "## Runtime arguments\n", - "| Argument | Description | Optional | Data type | Accepted values | Default |\n", - "|:------------------|:------------------|:----------|:--------------|:-----------------|:-------------|\n", - "| project_id | The ID of the Google Cloud Platform (GCP) project of the job. | No | GCPProjectID | | |\n", - "| python_module | The name of the Python module to run after installing the training program. | Yes | String | | None |\n", - "| package_uris | The Cloud Storage location of the packages that contain the training program and any additional dependencies. The maximum number of package URIs is 100. | Yes | List | | None |\n", - "| region | The Compute Engine region in which the training job is run. | Yes | GCPRegion | | us-central1 |\n", - "| args | The command line arguments to pass to the training program. | Yes | List | | None |\n", - "| job_dir | A Cloud Storage path in which to store the training outputs and other data needed for training. This path is passed to your TensorFlow program as the `job-dir` command-line argument. The benefit of specifying this field is that Cloud ML validates the path for use in training. | Yes | GCSPath | | None |\n", - "| python_version | The version of Python used in training. If it is not set, the default version is 2.7. Python 3.5 is available when the runtime version is set to 1.4 and above. | Yes | String | | None |\n", - "| runtime_version | The runtime version of Cloud ML Engine to use for training. If it is not set, Cloud ML Engine uses the default. | Yes | String | | 1 |\n", - "| master_image_uri | The Docker image to run on the master replica. This image must be in Container Registry. | Yes | GCRPath | | None |\n", - "| worker_image_uri | The Docker image to run on the worker replica. This image must be in Container Registry. | Yes | GCRPath | | None |\n", - "| training_input | The input parameters to create a training job. | Yes | Dict | [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) | None |\n", - "| job_id_prefix | The prefix of the job ID that is generated. | Yes | String | | None |\n", - "| job_id | The ID of the job to create, takes precedence over generated job id if set. | Yes | String | - | None |\n", - "| wait_interval | The number of seconds to wait between API calls to get the status of the job. | Yes | Integer | | 30 |\n", - "\n", - "\n", - "\n", - "## Input data schema\n", - "\n", - "The component accepts two types of inputs:\n", - "* A list of Python packages from Cloud Storage.\n", - " * You can manually build a Python package and upload it to Cloud Storage by following this [guide](https://cloud.google.com/ml-engine/docs/tensorflow/packaging-trainer#manual-build).\n", - "* A Docker container from Container Registry. \n", - " * Follow this [guide](https://cloud.google.com/ml-engine/docs/using-containers) to publish and use a Docker container with this component.\n", - "\n", - "## Output\n", - "| Name | Description | Type |\n", - "|:------- |:---- | :--- |\n", - "| job_id | The ID of the created job. | String |\n", - "| job_dir | The Cloud Storage path that contains the trained model output files. | GCSPath |\n", - "\n", - "\n", - "## Cautions & requirements\n", - "\n", - "To use the component, you must:\n", - "\n", - "* Set up a cloud environment by following this [guide](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup).\n", - "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Grant the following access to the Kubeflow user service account: \n", - " * Read access to the Cloud Storage buckets which contain the input data, packages, or Docker images.\n", - " * Write access to the Cloud Storage bucket of the output directory.\n", - "\n", - "## Detailed description\n", - "\n", - "The component builds the [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) payload and submits a job via the [Cloud ML Engine REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs).\n", - "\n", - "The steps to use the component in a pipeline are:\n", - "\n", - "\n", - "1. Install the Kubeflow Pipeline SDK:\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "\n", - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "2. Load the component using KFP SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "mlengine_train_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/ml_engine/train/component.yaml')\n", - "help(mlengine_train_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Sample\n", - "Note: The following sample code works in an IPython notebook or directly in Python code.\n", - "\n", - "In this sample, you use the code from the [census estimator sample](https://github.com/GoogleCloudPlatform/cloudml-samples/tree/master/census/estimator) to train a model in Cloud ML Engine. To upload the code to Cloud ML Engine, package the Python code and upload it to a Cloud Storage bucket. \n", - "\n", - "Note: You must have read and write permissions on the bucket that you use as the working directory.\n", - "#### Set sample parameters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "# Required Parameters\n", - "PROJECT_ID = ''\n", - "GCS_WORKING_DIR = 'gs://' # No ending slash" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Optional Parameters\n", - "EXPERIMENT_NAME = 'CLOUDML - Train'\n", - "TRAINER_GCS_PATH = GCS_WORKING_DIR + '/train/trainer.tar.gz'\n", - "OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/train/output/'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Clean up the working directory" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "!gsutil rm -r $GCS_WORKING_DIR" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Download the sample trainer code to local" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "!wget https://github.com/GoogleCloudPlatform/cloudml-samples/archive/master.zip\n", - "!unzip master.zip" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Package code and upload the package to Cloud Storage" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "%%bash -s \"$TRAINER_GCS_PATH\"\n", - "pushd ./cloudml-samples-master/census/estimator/\n", - "python setup.py sdist\n", - "gsutil cp dist/preprocessing-1.0.tar.gz $1\n", - "popd\n", - "rm -fr ./cloudml-samples-master/ ./master.zip ./dist" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Example pipeline that uses the component" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import json\n", - "@dsl.pipeline(\n", - " name='CloudML training pipeline',\n", - " description='CloudML training pipeline'\n", - ")\n", - "def pipeline(\n", - " project_id = PROJECT_ID,\n", - " python_module = 'trainer.task',\n", - " package_uris = json.dumps([TRAINER_GCS_PATH]),\n", - " region = 'us-central1',\n", - " args = json.dumps([\n", - " '--train-files', 'gs://cloud-samples-data/ml-engine/census/data/adult.data.csv',\n", - " '--eval-files', 'gs://cloud-samples-data/ml-engine/census/data/adult.test.csv',\n", - " '--train-steps', '1000',\n", - " '--eval-steps', '100',\n", - " '--verbosity', 'DEBUG'\n", - " ]),\n", - " job_dir = OUTPUT_GCS_PATH,\n", - " python_version = '',\n", - " runtime_version = '1.10',\n", - " master_image_uri = '',\n", - " worker_image_uri = '',\n", - " training_input = '',\n", - " job_id_prefix = '',\n", - " job_id = '',\n", - " wait_interval = '30'):\n", - " task = mlengine_train_op(\n", - " project_id=project_id, \n", - " python_module=python_module, \n", - " package_uris=package_uris, \n", - " region=region, \n", - " args=args, \n", - " job_dir=job_dir, \n", - " python_version=python_version,\n", - " runtime_version=runtime_version, \n", - " master_image_uri=master_image_uri, \n", - " worker_image_uri=worker_image_uri, \n", - " training_input=training_input, \n", - " job_id_prefix=job_id_prefix,\n", - " job_id=job_id,\n", - " wait_interval=wait_interval)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Submit the pipeline for execution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Inspect the results\n", - "\n", - "Use the following command to inspect the contents in the output directory:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!gsutil ls $OUTPUT_GCS_PATH" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## References\n", - "* [Component python code](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_train.py)\n", - "* [Component docker file](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/container/Dockerfile)\n", - "* [Sample notebook](https://github.com/kubeflow/pipelines/blob/release-1.7/components/gcp/ml_engine/train/sample.ipynb)\n", - "* [Cloud Machine Learning Engine job REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs)\n", - "\n", - "## License\n", - "By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/components/contrib/google-cloud/storage/download/component.yaml b/components/contrib/google-cloud/storage/download/component.yaml deleted file mode 100644 index 83fbf96ebd7..00000000000 --- a/components/contrib/google-cloud/storage/download/component.yaml +++ /dev/null @@ -1,35 +0,0 @@ -name: Download from GCS -inputs: -- {name: GCS path, type: String} -outputs: -- {name: Data} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/storage/download/component.yaml' -implementation: - container: - image: google/cloud-sdk - command: - - bash # Pattern comparison only works in Bash - - -ex - - -c - - | - if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then - gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" - fi - - uri="$0" - output_path="$1" - - # Checking whether the URI points to a single blob, a directory or a URI pattern - # URI points to a blob when that URI does not end with slash and listing that URI only yields the same URI - if [[ "$uri" != */ ]] && (gsutil ls "$uri" | grep --fixed-strings --line-regexp "$uri"); then - mkdir -p "$(dirname "$output_path")" - gsutil -m cp -r "$uri" "$output_path" - else - mkdir -p "$output_path" # When source path is a directory, gsutil requires the destination to also be a directory - gsutil -m rsync -r "$uri" "$output_path" # gsutil cp has different path handling than Linux cp. It always puts the source directory (name) inside the destination directory. gsutil rsync does not have that problem. - fi - - inputValue: GCS path - - outputPath: Data diff --git a/components/contrib/google-cloud/storage/download_blob/component.yaml b/components/contrib/google-cloud/storage/download_blob/component.yaml deleted file mode 100644 index afbebc18950..00000000000 --- a/components/contrib/google-cloud/storage/download_blob/component.yaml +++ /dev/null @@ -1,24 +0,0 @@ -name: Download from GCS -inputs: -- {name: GCS path, type: String} -outputs: -- {name: Data} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/storage/download_blob/component.yaml' -implementation: - container: - image: google/cloud-sdk - command: - - sh - - -ex - - -c - - | - if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then - gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" - fi - mkdir -p "$(dirname "$1")" - gsutil -m cp -r "$0" "$1" - - inputValue: GCS path - - outputPath: Data diff --git a/components/contrib/google-cloud/storage/download_dir/component.yaml b/components/contrib/google-cloud/storage/download_dir/component.yaml deleted file mode 100644 index 337e61e5276..00000000000 --- a/components/contrib/google-cloud/storage/download_dir/component.yaml +++ /dev/null @@ -1,24 +0,0 @@ -name: Download from GCS -inputs: -- {name: GCS path, type: String} -outputs: -- {name: Data} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/storage/download_dir/component.yaml' -implementation: - container: - image: google/cloud-sdk - command: - - sh - - -ex - - -c - - | - if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then - gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" - fi - mkdir -p "$1" - gsutil -m cp -r "$0" "$1" - - inputValue: GCS path - - outputPath: Data diff --git a/components/contrib/google-cloud/storage/list/component.yaml b/components/contrib/google-cloud/storage/list/component.yaml deleted file mode 100644 index 2f7b0389671..00000000000 --- a/components/contrib/google-cloud/storage/list/component.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: List blobs -inputs: -- {name: GCS path, type: String, description: 'GCS path for listing. For recursive listing use the "gs://bucket/path/**" syntax".'} -outputs: -- {name: Paths} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/storage/list/component.yaml' - volatile_component: 'true' -implementation: - container: - image: google/cloud-sdk - command: - - sh - - -ex - - -c - - | - if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then - gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" - fi - mkdir -p "$(dirname "$1")" - gsutil ls "$0" > "$1" - - inputValue: GCS path - - outputPath: Paths diff --git a/components/contrib/google-cloud/storage/upload_to_explicit_uri/component.yaml b/components/contrib/google-cloud/storage/upload_to_explicit_uri/component.yaml deleted file mode 100644 index b939a0668a4..00000000000 --- a/components/contrib/google-cloud/storage/upload_to_explicit_uri/component.yaml +++ /dev/null @@ -1,27 +0,0 @@ -name: Upload to GCS -inputs: -- {name: Data} -- {name: GCS path, type: String} -outputs: -- {name: GCS path, type: String} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/storage/upload_to_explicit_uri/component.yaml' -implementation: - container: - image: google/cloud-sdk - command: - - sh - - -ex - - -c - - | - if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then - gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" - fi - gsutil cp -r "$0" "$1" - mkdir -p "$(dirname "$2")" - echo "$1" > "$2" - - inputPath: Data - - inputValue: GCS path - - outputPath: GCS path diff --git a/components/contrib/google-cloud/storage/upload_to_unique_uri/component.yaml b/components/contrib/google-cloud/storage/upload_to_unique_uri/component.yaml deleted file mode 100644 index 8a7b6a239c2..00000000000 --- a/components/contrib/google-cloud/storage/upload_to_unique_uri/component.yaml +++ /dev/null @@ -1,28 +0,0 @@ -name: Upload to GCS -description: Upload to GCS with unique URI suffix -inputs: -- {name: Data} -- {name: GCS path prefix, type: String} -outputs: -- {name: GCS path, type: String} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/google-cloud/storage/upload_to_unique_uri/component.yaml' -implementation: - container: - image: google/cloud-sdk - command: - - sh - - -ex - - -c - - | - if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then - gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" - fi - gsutil cp -r "$0" "$1" - mkdir -p "$(dirname "$2")" - echo "$1" > "$2" - - inputPath: Data - - concat: [{inputValue: GCS path prefix}, '{{workflow.uid}}_{{pod.name}}'] - - outputPath: GCS path diff --git a/components/contrib/great-expectations/validate/CSV/component.py b/components/contrib/great-expectations/validate/CSV/component.py deleted file mode 100644 index 8c169406058..00000000000 --- a/components/contrib/great-expectations/validate/CSV/component.py +++ /dev/null @@ -1,49 +0,0 @@ -from kfp.components import InputPath, create_component_from_func, OutputPath - - -def validate_csv_using_greatexpectations( - csv_path: InputPath(), - expectation_suite_path: InputPath(), - data_doc_path: OutputPath(), -): - """Validate a CSV dataset against a Great Expectations suite and create Data Doc (a validation report). - This component fails if validation is not successful. - - Annotations: - authors: Yaroslav Beshta , Anton Kiselev - - Args: - csv_path: Path to the CSV file with the dataset. - expectation_suite_path: Path to Great Expectations expectation suite (in JSON format). - """ - import json - import os - import sys - - import great_expectations as ge - from great_expectations.render import DefaultJinjaPageView - from great_expectations.render.renderer import ValidationResultsPageRenderer - - with open(expectation_suite_path, 'r') as json_file: - expectation_suite = json.load(json_file) - df = ge.read_csv(csv_path, expectation_suite=expectation_suite) - result = df.validate() - - document_model = ValidationResultsPageRenderer().render(result) - os.makedirs(os.path.dirname(data_doc_path), exist_ok=True) - with open(data_doc_path, 'w') as writer: - writer.write(DefaultJinjaPageView().render(document_model)) - - print(f'Saved: {data_doc_path}') - - if not result.success: - sys.exit(1) - - -if __name__ == '__main__': - validate_csv_using_greatexpectations_op = create_component_from_func( - validate_csv_using_greatexpectations, - output_component_file='component.yaml', - base_image='python:3.8', - packages_to_install=['great-expectations==0.13.11'] - ) diff --git a/components/contrib/great-expectations/validate/CSV/component.yaml b/components/contrib/great-expectations/validate/CSV/component.yaml deleted file mode 100644 index 323be407d80..00000000000 --- a/components/contrib/great-expectations/validate/CSV/component.yaml +++ /dev/null @@ -1,84 +0,0 @@ -name: Validate csv using greatexpectations -description: Validate a CSV dataset against a Great Expectations suite and create - Data Doc (a validation report). -inputs: -- {name: csv, description: Path to the CSV file with the dataset.} -- {name: expectation_suite, description: Path to Great Expectations expectation suite - (in JSON format).} -outputs: -- {name: data_doc} -implementation: - container: - image: python:3.8 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'great-expectations==0.13.11' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'great-expectations==0.13.11' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def validate_csv_using_greatexpectations( - csv_path, - expectation_suite_path, - data_doc_path, - ): - """Validate a CSV dataset against a Great Expectations suite and create Data Doc (a validation report). - This component fails if validation is not successful. - - Annotations: - authors: Yaroslav Beshta , Anton Kiselev - - Args: - csv_path: Path to the CSV file with the dataset. - expectation_suite_path: Path to Great Expectations expectation suite (in JSON format). - """ - import json - import os - import sys - - import great_expectations as ge - from great_expectations.render import DefaultJinjaPageView - from great_expectations.render.renderer import ValidationResultsPageRenderer - - with open(expectation_suite_path, 'r') as json_file: - expectation_suite = json.load(json_file) - df = ge.read_csv(csv_path, expectation_suite=expectation_suite) - result = df.validate() - - document_model = ValidationResultsPageRenderer().render(result) - os.makedirs(os.path.dirname(data_doc_path), exist_ok=True) - with open(data_doc_path, 'w') as writer: - writer.write(DefaultJinjaPageView().render(document_model)) - - print(f'Saved: {data_doc_path}') - - if not result.success: - sys.exit(1) - - import argparse - _parser = argparse.ArgumentParser(prog='Validate csv using greatexpectations', description='Validate a CSV dataset against a Great Expectations suite and create Data Doc (a validation report).') - _parser.add_argument("--csv", dest="csv_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--expectation-suite", dest="expectation_suite_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--data-doc", dest="data_doc_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = validate_csv_using_greatexpectations(**_parsed_args) - args: - - --csv - - {inputPath: csv} - - --expectation-suite - - {inputPath: expectation_suite} - - --data-doc - - {outputPath: data_doc} diff --git a/components/contrib/great-expectations/validate/_samples/expectation_suite.json b/components/contrib/great-expectations/validate/_samples/expectation_suite.json deleted file mode 100644 index 17ca78dd694..00000000000 --- a/components/contrib/great-expectations/validate/_samples/expectation_suite.json +++ /dev/null @@ -1,185 +0,0 @@ -{ - "data_asset_type": "Dataset", - "expectation_suite_name": "dataset.warning", - "expectations": [ - { - "expectation_type": "expect_table_row_count_to_be_between", - "kwargs": { - "max_value": 1100, - "min_value": 900 - }, - "meta": { - "BasicSuiteBuilderProfiler": { - "confidence": "very low" - } - } - }, - { - "expectation_type": "expect_table_column_count_to_equal", - "kwargs": { - "value": 8 - }, - "meta": { - "BasicSuiteBuilderProfiler": { - "confidence": "very low" - } - } - }, - { - "expectation_type": "expect_table_columns_to_match_ordered_list", - "kwargs": { - "column_list": [ - "trip_seconds", - "trip_miles", - "pickup_community_area", - "dropoff_community_area", - "fare", - "tolls", - "extras", - "trip_total" - ] - }, - "meta": { - "BasicSuiteBuilderProfiler": { - "confidence": "very low" - } - } - }, - { - "expectation_type": "expect_column_values_to_not_be_null", - "kwargs": { - "column": "extras" - }, - "meta": { - "BasicSuiteBuilderProfiler": { - "confidence": "very low" - } - } - }, - { - "expectation_type": "expect_column_values_to_not_be_null", - "kwargs": { - "column": "trip_seconds" - }, - "meta": { - "BasicSuiteBuilderProfiler": { - "confidence": "very low" - } - } - }, - { - "expectation_type": "expect_column_min_to_be_between", - "kwargs": { - "column": "trip_seconds", - "max_value": 1, - "min_value": -1 - }, - "meta": { - "BasicSuiteBuilderProfiler": { - "confidence": "very low" - } - } - }, - { - "expectation_type": "expect_column_max_to_be_between", - "kwargs": { - "column": "trip_seconds", - "max_value": 50000, - "min_value": 30000 - }, - "meta": { - "BasicSuiteBuilderProfiler": { - "confidence": "very low" - } - } - }, - { - "expectation_type": "expect_column_mean_to_be_between", - "kwargs": { - "column": "trip_seconds", - "max_value": 900, - "min_value": 800 - }, - "meta": { - "BasicSuiteBuilderProfiler": { - "confidence": "very low" - } - } - }, - { - "expectation_type": "expect_column_median_to_be_between", - "kwargs": { - "column": "trip_seconds", - "max_value": 650.0, - "min_value": 550.0 - }, - "meta": { - "BasicSuiteBuilderProfiler": { - "confidence": "very low" - } - } - } - ], - "meta": { - "BasicSuiteBuilderProfiler": { - "batch_kwargs": { - "data_asset_name": "dataset", - "datasource": "data__dir", - "path": "", - "reader_method": "read_csv" - }, - "created_at": 1614768695.9824622, - "created_by": "BasicSuiteBuilderProfiler" - }, - "citations": [ - { - "batch_kwargs": { - "data_asset_name": "dataset", - "datasource": "data__dir", - "path": "", - "reader_method": "read_csv" - }, - "batch_markers": { - "ge_load_time": "20210303T105135.821129Z", - "pandas_data_fingerprint": "03c708739d62472c2b319752ad537752" - }, - "batch_parameters": null, - "citation_date": "20210303T105135.997607Z", - "comment": "BasicSuiteBuilderProfiler added a citation based on the current batch." - } - ], - "columns": { - "dropoff_community_area": { - "description": "" - }, - "extras": { - "description": "" - }, - "fare": { - "description": "" - }, - "pickup_community_area": { - "description": "" - }, - "tolls": { - "description": "" - }, - "trip_miles": { - "description": "" - }, - "trip_seconds": { - "description": "" - }, - "trip_total": { - "description": "" - } - }, - "great_expectations_version": "0.13.11", - "notes": { - "content": [ - "#### This is an _example_ suite\n\n- This suite was made by quickly glancing at 1000 rows of your data.\n- This is **not a production suite**. It is meant to show examples of expectations.\n- Because this suite was auto-generated using a very basic profiler that does not know your data like you do, many of the expectations may not be meaningful.\n" - ], - "format": "markdown" - } - } -} \ No newline at end of file diff --git a/components/contrib/great-expectations/validate/_samples/sample_pipleine.py b/components/contrib/great-expectations/validate/_samples/sample_pipleine.py deleted file mode 100644 index fc70814a5ca..00000000000 --- a/components/contrib/great-expectations/validate/_samples/sample_pipleine.py +++ /dev/null @@ -1,38 +0,0 @@ -from pathlib import Path - -import kfp.dsl -from kfp.components import ComponentStore, load_component_from_file - - -store = ComponentStore.default_store -chicago_taxi_dataset_op = store.load_component('datasets/Chicago_Taxi_Trips') - - -CURRENT_FOLDER = Path(__file__).parent -with open(CURRENT_FOLDER / 'expectation_suite.json') as file: - expectation_suite = file.read() - -validate_csv_op = load_component_from_file( - str(CURRENT_FOLDER.parent / 'CSV' / 'component.yaml') -) - - -@kfp.dsl.pipeline(name='Great Expectations') -def great_expectations_sample_pipeline(): - features = ['trip_seconds', 'trip_miles', 'pickup_community_area', 'dropoff_community_area', - 'fare', 'tolls', 'extras', 'trip_total'] - - csv_path = chicago_taxi_dataset_op( - select=','.join(features), - where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', - limit=1000, - ).output - - validate_csv_op(csv=csv_path, - expectation_suite=expectation_suite) - - -if __name__ == '__main__': - kfp_endpoint = None - - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(great_expectations_sample_pipeline, arguments={}) diff --git a/components/contrib/json/Build_dict/component.py b/components/contrib/json/Build_dict/component.py deleted file mode 100644 index 5717d7aedbf..00000000000 --- a/components/contrib/json/Build_dict/component.py +++ /dev/null @@ -1,42 +0,0 @@ -from kfp.components import create_component_from_func - - -def build_dict( - key_1: str = None, - value_1: dict = None, - key_2: str = None, - value_2: dict = None, - key_3: str = None, - value_3: dict = None, - key_4: str = None, - value_4: dict = None, - key_5: str = None, - value_5: dict = None, -) -> dict: - """Creates a JSON object from multiple key and value pairs. - - Annotations: - author: Alexey Volkov - """ - result = dict([ - (key_1, value_1), - (key_2, value_2), - (key_3, value_3), - (key_4, value_4), - (key_5, value_5), - ]) - if None in result: - del result[None] - return result - - -if __name__ == '__main__': - build_dict_op = create_component_from_func( - build_dict, - base_image='python:3.8', - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/json/Build_dict/component.yaml", - }, - ) diff --git a/components/contrib/json/Build_dict/component.yaml b/components/contrib/json/Build_dict/component.yaml deleted file mode 100644 index e5604495679..00000000000 --- a/components/contrib/json/Build_dict/component.yaml +++ /dev/null @@ -1,153 +0,0 @@ -name: Build dict -description: Creates a JSON object from multiple key and value pairs. -inputs: -- {name: key_1, type: String, optional: true} -- {name: value_1, type: JsonObject, optional: true} -- {name: key_2, type: String, optional: true} -- {name: value_2, type: JsonObject, optional: true} -- {name: key_3, type: String, optional: true} -- {name: value_3, type: JsonObject, optional: true} -- {name: key_4, type: String, optional: true} -- {name: value_4, type: JsonObject, optional: true} -- {name: key_5, type: String, optional: true} -- {name: value_5, type: JsonObject, optional: true} -outputs: -- {name: Output, type: JsonObject} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/json/Build_dict/component.yaml' -implementation: - container: - image: python:3.8 - command: - - python3 - - -u - - -c - - | - def build_dict( - key_1 = None, - value_1 = None, - key_2 = None, - value_2 = None, - key_3 = None, - value_3 = None, - key_4 = None, - value_4 = None, - key_5 = None, - value_5 = None, - ): - """Creates a JSON object from multiple key and value pairs. - - Annotations: - author: Alexey Volkov - """ - result = dict([ - (key_1, value_1), - (key_2, value_2), - (key_3, value_3), - (key_4, value_4), - (key_5, value_5), - ]) - if None in result: - del result[None] - return result - - import json - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - import argparse - _parser = argparse.ArgumentParser(prog='Build dict', description='Creates a JSON object from multiple key and value pairs.') - _parser.add_argument("--key-1", dest="key_1", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--value-1", dest="value_1", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--key-2", dest="key_2", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--value-2", dest="value_2", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--key-3", dest="key_3", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--value-3", dest="value_3", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--key-4", dest="key_4", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--value-4", dest="value_4", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--key-5", dest="key_5", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--value-5", dest="value_5", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = build_dict(**_parsed_args) - - _outputs = [_outputs] - - _output_serializers = [ - _serialize_json, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - if: - cond: {isPresent: key_1} - then: - - --key-1 - - {inputValue: key_1} - - if: - cond: {isPresent: value_1} - then: - - --value-1 - - {inputValue: value_1} - - if: - cond: {isPresent: key_2} - then: - - --key-2 - - {inputValue: key_2} - - if: - cond: {isPresent: value_2} - then: - - --value-2 - - {inputValue: value_2} - - if: - cond: {isPresent: key_3} - then: - - --key-3 - - {inputValue: key_3} - - if: - cond: {isPresent: value_3} - then: - - --value-3 - - {inputValue: value_3} - - if: - cond: {isPresent: key_4} - then: - - --key-4 - - {inputValue: key_4} - - if: - cond: {isPresent: value_4} - then: - - --value-4 - - {inputValue: value_4} - - if: - cond: {isPresent: key_5} - then: - - --key-5 - - {inputValue: key_5} - - if: - cond: {isPresent: value_5} - then: - - --value-5 - - {inputValue: value_5} - - '----output-paths' - - {outputPath: Output} diff --git a/components/contrib/json/Build_list/component.py b/components/contrib/json/Build_list/component.py deleted file mode 100644 index cfef309e50a..00000000000 --- a/components/contrib/json/Build_list/component.py +++ /dev/null @@ -1,32 +0,0 @@ -from kfp.components import create_component_from_func - - -def build_list( - item_1: dict = None, - item_2: dict = None, - item_3: dict = None, - item_4: dict = None, - item_5: dict = None, -) -> list: - """Creates a JSON array from multiple items. - - Annotations: - author: Alexey Volkov - """ - result = [] - for item in [item_1, item_2, item_3, item_4, item_5]: - if item is not None: - result.append(item) - return result - - -if __name__ == '__main__': - build_list_op = create_component_from_func( - build_list, - base_image='python:3.8', - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/json/Build_list/component.yaml", - }, - ) diff --git a/components/contrib/json/Build_list/component.yaml b/components/contrib/json/Build_list/component.yaml deleted file mode 100644 index d183adcf868..00000000000 --- a/components/contrib/json/Build_list/component.yaml +++ /dev/null @@ -1,108 +0,0 @@ -name: Build list -description: Creates a JSON array from multiple items. -inputs: -- {name: item_1, type: JsonObject, optional: true} -- {name: item_2, type: JsonObject, optional: true} -- {name: item_3, type: JsonObject, optional: true} -- {name: item_4, type: JsonObject, optional: true} -- {name: item_5, type: JsonObject, optional: true} -outputs: -- {name: Output, type: JsonArray} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/json/Build_list/component.yaml' -implementation: - container: - image: python:3.8 - command: - - python3 - - -u - - -c - - | - def build_list( - item_1 = None, - item_2 = None, - item_3 = None, - item_4 = None, - item_5 = None, - ): - """Creates a JSON array from multiple items. - - Annotations: - author: Alexey Volkov - """ - result = [] - for item in [item_1, item_2, item_3, item_4, item_5]: - if item is not None: - result.append(item) - return result - - import json - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - import argparse - _parser = argparse.ArgumentParser(prog='Build list', description='Creates a JSON array from multiple items.') - _parser.add_argument("--item-1", dest="item_1", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--item-2", dest="item_2", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--item-3", dest="item_3", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--item-4", dest="item_4", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--item-5", dest="item_5", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = build_list(**_parsed_args) - - _outputs = [_outputs] - - _output_serializers = [ - _serialize_json, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - if: - cond: {isPresent: item_1} - then: - - --item-1 - - {inputValue: item_1} - - if: - cond: {isPresent: item_2} - then: - - --item-2 - - {inputValue: item_2} - - if: - cond: {isPresent: item_3} - then: - - --item-3 - - {inputValue: item_3} - - if: - cond: {isPresent: item_4} - then: - - --item-4 - - {inputValue: item_4} - - if: - cond: {isPresent: item_5} - then: - - --item-5 - - {inputValue: item_5} - - '----output-paths' - - {outputPath: Output} diff --git a/components/contrib/json/Combine_lists/component.py b/components/contrib/json/Combine_lists/component.py deleted file mode 100644 index 15c2d9ddc64..00000000000 --- a/components/contrib/json/Combine_lists/component.py +++ /dev/null @@ -1,32 +0,0 @@ -from kfp.components import create_component_from_func - - -def combine_lists( - list_1: list = None, - list_2: list = None, - list_3: list = None, - list_4: list = None, - list_5: list = None, -) -> list: - """Combines multiple JSON arrays into one. - - Annotations: - author: Alexey Volkov - """ - result = [] - for list in [list_1, list_2, list_3, list_4, list_5]: - if list is not None: - result.extend(list) - return result - - -if __name__ == '__main__': - combine_lists_op = create_component_from_func( - combine_lists, - base_image='python:3.8', - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/json/Combine_lists/component.yaml", - }, - ) diff --git a/components/contrib/json/Combine_lists/component.yaml b/components/contrib/json/Combine_lists/component.yaml deleted file mode 100644 index 4326612835f..00000000000 --- a/components/contrib/json/Combine_lists/component.yaml +++ /dev/null @@ -1,108 +0,0 @@ -name: Combine lists -description: Combines multiple JSON arrays into one. -inputs: -- {name: list_1, type: JsonArray, optional: true} -- {name: list_2, type: JsonArray, optional: true} -- {name: list_3, type: JsonArray, optional: true} -- {name: list_4, type: JsonArray, optional: true} -- {name: list_5, type: JsonArray, optional: true} -outputs: -- {name: Output, type: JsonArray} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/json/Combine_lists/component.yaml' -implementation: - container: - image: python:3.8 - command: - - python3 - - -u - - -c - - | - def combine_lists( - list_1 = None, - list_2 = None, - list_3 = None, - list_4 = None, - list_5 = None, - ): - """Combines multiple JSON arrays into one. - - Annotations: - author: Alexey Volkov - """ - result = [] - for list in [list_1, list_2, list_3, list_4, list_5]: - if list is not None: - result.extend(list) - return result - - import json - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - import argparse - _parser = argparse.ArgumentParser(prog='Combine lists', description='Combines multiple JSON arrays into one.') - _parser.add_argument("--list-1", dest="list_1", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--list-2", dest="list_2", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--list-3", dest="list_3", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--list-4", dest="list_4", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--list-5", dest="list_5", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = combine_lists(**_parsed_args) - - _outputs = [_outputs] - - _output_serializers = [ - _serialize_json, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - if: - cond: {isPresent: list_1} - then: - - --list-1 - - {inputValue: list_1} - - if: - cond: {isPresent: list_2} - then: - - --list-2 - - {inputValue: list_2} - - if: - cond: {isPresent: list_3} - then: - - --list-3 - - {inputValue: list_3} - - if: - cond: {isPresent: list_4} - then: - - --list-4 - - {inputValue: list_4} - - if: - cond: {isPresent: list_5} - then: - - --list-5 - - {inputValue: list_5} - - '----output-paths' - - {outputPath: Output} diff --git a/components/contrib/json/Get_element_by_index/component.yaml b/components/contrib/json/Get_element_by_index/component.yaml deleted file mode 100644 index 7f066de58da..00000000000 --- a/components/contrib/json/Get_element_by_index/component.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: Get element by index from JSON -inputs: -- {name: Json} -- {name: Index, type: Integer} -outputs: -- {name: Output} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/json/Get_element_by_index/component.yaml' -implementation: - container: - image: stedolan/jq:latest - command: - - sh - - -exc - - | - input_path=$0 - output_path=$1 - index=$2 - mkdir -p "$(dirname "$output_path")" - < "$input_path" jq --raw-output .["$index"] > "$output_path" - - {inputPath: Json} - - {outputPath: Output} - - {inputValue: Index} diff --git a/components/contrib/json/Get_element_by_key/component.yaml b/components/contrib/json/Get_element_by_key/component.yaml deleted file mode 100644 index e277f42ebcf..00000000000 --- a/components/contrib/json/Get_element_by_key/component.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: Get element by key from JSON -inputs: -- {name: Json} -- {name: Key, type: String} -outputs: -- {name: Output} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/json/Get_element_by_key/component.yaml' -implementation: - container: - image: stedolan/jq:latest - command: - - sh - - -exc - - | - input_path=$0 - output_path=$1 - key=$2 - mkdir -p "$(dirname "$output_path")" - < "$input_path" jq --raw-output '.["'"$key"'"]' > "$output_path" - - {inputPath: Json} - - {outputPath: Output} - - {inputValue: Key} diff --git a/components/contrib/json/Get_keys/component.yaml b/components/contrib/json/Get_keys/component.yaml deleted file mode 100644 index b39479d0fbd..00000000000 --- a/components/contrib/json/Get_keys/component.yaml +++ /dev/null @@ -1,18 +0,0 @@ -name: Get keys from JSON -inputs: -- {name: Json} -outputs: -- {name: Keys} -implementation: - container: - image: stedolan/jq:latest - command: - - sh - - -exc - - | - input_path=$0 - output_path=$1 - mkdir -p "$(dirname "$output_path")" - jq 'keys' "$input_path" > "$output_path" - - {inputPath: Json} - - {outputPath: Keys} diff --git a/components/contrib/json/Query/component.yaml b/components/contrib/json/Query/component.yaml deleted file mode 100644 index 2888511f6fd..00000000000 --- a/components/contrib/json/Query/component.yaml +++ /dev/null @@ -1,28 +0,0 @@ -name: Query JSON using JQ -inputs: -- {name: Json} -- {name: Query, type: String} -- {name: Options, type: String, default: '--raw-output'} -outputs: -- {name: Output} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/json/Query/component.yaml' -implementation: - container: - image: stedolan/jq:latest - command: - - sh - - -exc - - | - input_path=$0 - output_path=$1 - query=$2 - options=$3 - mkdir -p "$(dirname "$output_path")" - < "$input_path" jq $options "$query" > "$output_path" - - {inputPath: Json} - - {outputPath: Output} - - {inputValue: Query} - - {inputValue: Options} diff --git a/components/contrib/keras/Train_classifier/_samples/sample_pipeline.py b/components/contrib/keras/Train_classifier/_samples/sample_pipeline.py deleted file mode 100644 index 287f3fef8a2..00000000000 --- a/components/contrib/keras/Train_classifier/_samples/sample_pipeline.py +++ /dev/null @@ -1,57 +0,0 @@ -import keras -import kfp -from kfp import components - - -chicago_taxi_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml') -pandas_transform_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml') -keras_train_classifier_from_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/f6aabf7f10b1f545f1fd5079aa8071845224f8e7/components/keras/Train_classifier/from_CSV/component.yaml') -keras_convert_hdf5_model_to_tf_saved_model_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/51e49282d9511e4b72736c12dc66e37486849c6e/components/_converters/KerasModelHdf5/to_TensorflowSavedModel/component.yaml') - - -number_of_classes = 2 - -# Creating the network -dense_network_with_sigmoid = keras.Sequential(layers=[ - keras.layers.Dense(10, activation=keras.activations.sigmoid), - keras.layers.Dense(number_of_classes, activation=keras.activations.sigmoid), -]) - - -def keras_classifier_pipeline(): - training_data_in_csv = chicago_taxi_dataset_op( - where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', - select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', - limit=1000, - ).output - - training_data_for_classification_in_csv = pandas_transform_csv_op( - table=training_data_in_csv, - transform_code='''df.insert(0, "was_tipped", df["tips"] > 0); del df["tips"]; df = df.fillna(0)''', - ).output - - features_in_csv = pandas_transform_csv_op( - table=training_data_for_classification_in_csv, - transform_code='''df = df.drop(columns=["was_tipped"])''', - ).output - - labels_in_csv = pandas_transform_csv_op( - table=training_data_for_classification_in_csv, - transform_code='''df = df["was_tipped"] * 1''', - ).output - - keras_model_in_hdf5 = keras_train_classifier_from_csv_op( - training_features=features_in_csv, - training_labels=labels_in_csv, - network_json=dense_network_with_sigmoid.to_json(), - learning_rate=0.1, - num_epochs=100, - ).outputs['model'] - - keras_model_in_tf_format = keras_convert_hdf5_model_to_tf_saved_model_op( - model=keras_model_in_hdf5, - ).output - -if __name__ == '__main__': - kfp_endpoint = None - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(keras_classifier_pipeline, arguments={}) diff --git a/components/contrib/keras/Train_classifier/from_CSV/component.py b/components/contrib/keras/Train_classifier/from_CSV/component.py deleted file mode 100644 index 216cee163e0..00000000000 --- a/components/contrib/keras/Train_classifier/from_CSV/component.py +++ /dev/null @@ -1,94 +0,0 @@ -from typing import NamedTuple -from kfp.components import create_component_from_func, InputPath, OutputPath - -def keras_train_classifier_from_csv( - training_features_path: InputPath('CSV'), - training_labels_path: InputPath('CSV'), - network_json_path: InputPath('KerasModelJson'), - model_path: OutputPath('KerasModelHdf5'), - loss_name: str = 'categorical_crossentropy', - num_classes: int = None, - optimizer: str = 'rmsprop', - optimizer_config: dict = None, - learning_rate: float = 0.01, - num_epochs: int = 100, - batch_size: int = 32, - metrics: list = ['accuracy'], - random_seed: int = 0, -) -> NamedTuple('Outputs', [ - ('final_loss', float), - ('final_metrics', dict), - ('metrics_history', dict), -]): - '''Trains classifier model using Keras. - - Annotations: - author: Alexey Volkov - ''' - from pathlib import Path - - import keras - import numpy - import pandas - import tensorflow - - tensorflow.random.set_seed(random_seed) - numpy.random.seed(random_seed) - - training_features_df = pandas.read_csv(training_features_path) - training_labels_df = pandas.read_csv(training_labels_path) - - x_train = training_features_df.to_numpy() - y_train_labels = training_labels_df.to_numpy() - print('Training features shape:', x_train.shape) - print('Numer of training samples:', x_train.shape[0]) - - # Convert class vectors to binary class matrices. - y_train_one_hot = keras.utils.to_categorical(y_train_labels, num_classes) - - model_json_str = Path(network_json_path).read_text() - model = keras.models.model_from_json(model_json_str) - - model.add(keras.layers.Activation('softmax')) - - # Initializing the optimizer - optimizer_config = optimizer_config or {} - optimizer_config['learning_rate'] = learning_rate - optimizer = keras.optimizers.deserialize({ - 'class_name': optimizer, - 'config': optimizer_config, - }) - - model.compile( - loss=loss_name, - optimizer=optimizer, - metrics=metrics, - ) - - history = model.fit( - x_train, - y_train_one_hot, - batch_size=batch_size, - epochs=num_epochs, - shuffle=True - ) - - model.save(model_path) - - metrics_history = {name: [float(value) for value in values] for name, values in history.history.items()} - final_metrics = {name: values[-1] for name, values in metrics_history.items()} - final_loss = final_metrics['loss'] - return (final_loss, final_metrics, metrics_history) - - -if __name__ == '__main__': - keras_train_classifier_from_csv_op = create_component_from_func( - keras_train_classifier_from_csv, - base_image='tensorflow/tensorflow:2.2.0', - packages_to_install=['keras==2.3.1', 'pandas==1.0.5'], - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/keras/Train_classifier/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/keras/Train_classifier/from_CSV/component.yaml b/components/contrib/keras/Train_classifier/from_CSV/component.yaml deleted file mode 100644 index 85c73369048..00000000000 --- a/components/contrib/keras/Train_classifier/from_CSV/component.yaml +++ /dev/null @@ -1,235 +0,0 @@ -name: Keras train classifier from csv -description: |- - Trains classifier model using Keras. - - Annotations: - author: Alexey Volkov -inputs: -- {name: training_features, type: CSV} -- {name: training_labels, type: CSV} -- {name: network_json, type: KerasModelJson} -- {name: loss_name, type: String, default: categorical_crossentropy, optional: true} -- {name: num_classes, type: Integer, optional: true} -- {name: optimizer, type: String, default: rmsprop, optional: true} -- {name: optimizer_config, type: JsonObject, optional: true} -- {name: learning_rate, type: Float, default: '0.01', optional: true} -- {name: num_epochs, type: Integer, default: '100', optional: true} -- {name: batch_size, type: Integer, default: '32', optional: true} -- {name: metrics, type: JsonArray, default: '["accuracy"]', optional: true} -- {name: random_seed, type: Integer, default: '0', optional: true} -outputs: -- {name: model, type: KerasModelHdf5} -- {name: final_loss, type: Float} -- {name: final_metrics, type: JsonObject} -- {name: metrics_history, type: JsonObject} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/keras/Train_classifier/from_CSV/component.yaml' -implementation: - container: - image: tensorflow/tensorflow:2.2.0 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'keras==2.3.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'keras==2.3.1' 'pandas==1.0.5' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def keras_train_classifier_from_csv( - training_features_path, - training_labels_path, - network_json_path, - model_path, - loss_name = 'categorical_crossentropy', - num_classes = None, - optimizer = 'rmsprop', - optimizer_config = None, - learning_rate = 0.01, - num_epochs = 100, - batch_size = 32, - metrics = ['accuracy'], - random_seed = 0, - ): - '''Trains classifier model using Keras. - - Annotations: - author: Alexey Volkov - ''' - from pathlib import Path - - import keras - import numpy - import pandas - import tensorflow - - tensorflow.random.set_seed(random_seed) - numpy.random.seed(random_seed) - - training_features_df = pandas.read_csv(training_features_path) - training_labels_df = pandas.read_csv(training_labels_path) - - x_train = training_features_df.to_numpy() - y_train_labels = training_labels_df.to_numpy() - print('Training features shape:', x_train.shape) - print('Numer of training samples:', x_train.shape[0]) - - # Convert class vectors to binary class matrices. - y_train_one_hot = keras.utils.to_categorical(y_train_labels, num_classes) - - model_json_str = Path(network_json_path).read_text() - model = keras.models.model_from_json(model_json_str) - - model.add(keras.layers.Activation('softmax')) - - # Initializing the optimizer - optimizer_config = optimizer_config or {} - optimizer_config['learning_rate'] = learning_rate - optimizer = keras.optimizers.deserialize({ - 'class_name': optimizer, - 'config': optimizer_config, - }) - - model.compile( - loss=loss_name, - optimizer=optimizer, - metrics=metrics, - ) - - history = model.fit( - x_train, - y_train_one_hot, - batch_size=batch_size, - epochs=num_epochs, - shuffle=True - ) - - model.save(model_path) - - metrics_history = {name: [float(value) for value in values] for name, values in history.history.items()} - final_metrics = {name: values[-1] for name, values in metrics_history.items()} - final_loss = final_metrics['loss'] - return (final_loss, final_metrics, metrics_history) - - import json - def _serialize_float(float_value: float) -> str: - if isinstance(float_value, str): - return float_value - if not isinstance(float_value, (float, int)): - raise TypeError('Value "{}" has type "{}" instead of float.'.format(str(float_value), str(type(float_value)))) - return str(float_value) - - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - import argparse - _parser = argparse.ArgumentParser(prog='Keras train classifier from csv', description='Trains classifier model using Keras.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--training-features", dest="training_features_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--training-labels", dest="training_labels_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--network-json", dest="network_json_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--loss-name", dest="loss_name", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--num-classes", dest="num_classes", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--optimizer", dest="optimizer", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--optimizer-config", dest="optimizer_config", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--learning-rate", dest="learning_rate", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--num-epochs", dest="num_epochs", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--batch-size", dest="batch_size", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--metrics", dest="metrics", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--random-seed", dest="random_seed", type=int, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=3) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = keras_train_classifier_from_csv(**_parsed_args) - - _output_serializers = [ - _serialize_float, - _serialize_json, - _serialize_json, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --training-features - - {inputPath: training_features} - - --training-labels - - {inputPath: training_labels} - - --network-json - - {inputPath: network_json} - - if: - cond: {isPresent: loss_name} - then: - - --loss-name - - {inputValue: loss_name} - - if: - cond: {isPresent: num_classes} - then: - - --num-classes - - {inputValue: num_classes} - - if: - cond: {isPresent: optimizer} - then: - - --optimizer - - {inputValue: optimizer} - - if: - cond: {isPresent: optimizer_config} - then: - - --optimizer-config - - {inputValue: optimizer_config} - - if: - cond: {isPresent: learning_rate} - then: - - --learning-rate - - {inputValue: learning_rate} - - if: - cond: {isPresent: num_epochs} - then: - - --num-epochs - - {inputValue: num_epochs} - - if: - cond: {isPresent: batch_size} - then: - - --batch-size - - {inputValue: batch_size} - - if: - cond: {isPresent: metrics} - then: - - --metrics - - {inputValue: metrics} - - if: - cond: {isPresent: random_seed} - then: - - --random-seed - - {inputValue: random_seed} - - --model - - {outputPath: model} - - '----output-paths' - - {outputPath: final_loss} - - {outputPath: final_metrics} - - {outputPath: metrics_history} diff --git a/components/contrib/kfp/Run_component/_samples/sample_pipeline.py b/components/contrib/kfp/Run_component/_samples/sample_pipeline.py deleted file mode 100644 index b04f7310423..00000000000 --- a/components/contrib/kfp/Run_component/_samples/sample_pipeline.py +++ /dev/null @@ -1,20 +0,0 @@ -from kfp import components - -kfp_endpoint = None - - -run_component_or_pipeline_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/2d13b358690c761f64e5b59b70624de8f1f52a29/components/kfp/Run_component/component.yaml') - - -def my_pipeline(): - run_component_or_pipeline_op( - component_url='https://raw.githubusercontent.com/kubeflow/pipelines/68a367de3d1cc435637b0b4e78dcb42600fbbc37/components/basics/Calculate_hash/component.yaml', - arguments=dict( - data='Hello world', - ), - ) - - -if __name__ == '__main__': - import kfp - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(my_pipeline, arguments={}) diff --git a/components/contrib/kfp/Run_component/component.py b/components/contrib/kfp/Run_component/component.py deleted file mode 100644 index ab3ac3b58a1..00000000000 --- a/components/contrib/kfp/Run_component/component.py +++ /dev/null @@ -1,50 +0,0 @@ -from typing import NamedTuple - - -def run_component_or_pipeline( - component_url: 'Url', - arguments: dict, - endpoint: str = None, - wait_timeout_seconds: float = None, -) -> NamedTuple('Outputs', [ - ('run_id', str), - ('run_object', 'JsonObject'), # kfp.ApiRunDetails -]): - import json - import os - import kfp - from kfp_server_api import ApiClient - print('Loading component...') - op = kfp.components.load_component_from_url(component_url) - print('Loading component done.') - print('Submitting run...') - if not endpoint: - endpoint = 'http://' + os.environ['ML_PIPELINE_SERVICE_HOST'] + ':' + os.environ['ML_PIPELINE_SERVICE_PORT'] - create_run_result = kfp.Client(host=endpoint).create_run_from_pipeline_func(op, arguments=arguments) - run_id = str(create_run_result.run_id) - print('Submitted run: ' + run_id) - run_url = f'{endpoint.rstrip("/")}/#/runs/details/{run_id}' - print(run_url) - print('Waiting for the run to finish...') - run_object = create_run_result.wait_for_run_completion(wait_timeout_seconds) - print('Run has finished.') - # sanitize_for_serialization uses correct field names and properly converts datetime values - run_dict = ApiClient().sanitize_for_serialization(run_object) - return ( - run_id, - json.dumps(run_dict, indent=4), - ) - - -if __name__ == '__main__': - from kfp.components import create_component_from_func - run_component_or_pipeline_op = create_component_from_func( - run_component_or_pipeline, - base_image='python:3.9', - packages_to_install=['kfp==1.4.0'], - output_component_file='component.yaml', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/kfp/Run_component/component.yaml", - }, - ) diff --git a/components/contrib/kfp/Run_component/component.yaml b/components/contrib/kfp/Run_component/component.yaml deleted file mode 100644 index 3d0c99239d9..00000000000 --- a/components/contrib/kfp/Run_component/component.yaml +++ /dev/null @@ -1,121 +0,0 @@ -name: Run component or pipeline -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/kfp/Run_component/component.yaml' -inputs: -- {name: component_url, type: Url} -- {name: arguments, type: JsonObject} -- {name: endpoint, type: String, optional: true} -- {name: wait_timeout_seconds, type: Float, optional: true} -outputs: -- {name: run_id, type: String} -- {name: run_object, type: JsonObject} -implementation: - container: - image: python:3.9 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'kfp==1.4.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'kfp==1.4.0' --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def run_component_or_pipeline( - component_url, - arguments, - endpoint = None, - wait_timeout_seconds = None, - ): - import json - import os - import kfp - from kfp_server_api import ApiClient - print('Loading component...') - op = kfp.components.load_component_from_url(component_url) - print('Loading component done.') - print('Submitting run...') - if not endpoint: - endpoint = 'http://' + os.environ['ML_PIPELINE_SERVICE_HOST'] + ':' + os.environ['ML_PIPELINE_SERVICE_PORT'] - create_run_result = kfp.Client(host=endpoint).create_run_from_pipeline_func(op, arguments=arguments) - run_id = str(create_run_result.run_id) - print('Submitted run: ' + run_id) - run_url = f'{endpoint.rstrip("/")}/#/runs/details/{run_id}' - print(run_url) - print('Waiting for the run to finish...') - run_object = create_run_result.wait_for_run_completion(wait_timeout_seconds) - print('Run has finished.') - # sanitize_for_serialization uses correct field names and properly converts datetime values - run_dict = ApiClient().sanitize_for_serialization(run_object) - return ( - run_id, - json.dumps(run_dict, indent=4), - ) - - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - def _serialize_str(str_value: str) -> str: - if not isinstance(str_value, str): - raise TypeError('Value "{}" has type "{}" instead of str.'.format(str(str_value), str(type(str_value)))) - return str_value - - import json - import argparse - _parser = argparse.ArgumentParser(prog='Run component or pipeline', description='') - _parser.add_argument("--component-url", dest="component_url", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--arguments", dest="arguments", type=json.loads, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--endpoint", dest="endpoint", type=str, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--wait-timeout-seconds", dest="wait_timeout_seconds", type=float, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=2) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = run_component_or_pipeline(**_parsed_args) - - _output_serializers = [ - _serialize_str, - _serialize_json, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --component-url - - {inputValue: component_url} - - --arguments - - {inputValue: arguments} - - if: - cond: {isPresent: endpoint} - then: - - --endpoint - - {inputValue: endpoint} - - if: - cond: {isPresent: wait_timeout_seconds} - then: - - --wait-timeout-seconds - - {inputValue: wait_timeout_seconds} - - '----output-paths' - - {outputPath: run_id} - - {outputPath: run_object} diff --git a/components/contrib/kubeflow/common/launch_crd.py b/components/contrib/kubeflow/common/launch_crd.py deleted file mode 100644 index 90390e7f08c..00000000000 --- a/components/contrib/kubeflow/common/launch_crd.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright 2019 kubeflow.org. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import json -import logging -import multiprocessing -import time - -from kubernetes import client as k8s_client -from kubernetes.client import rest - -logger = logging.getLogger(__name__) - -class K8sCR(object): - def __init__(self, group, plural, version, client): - self.group = group - self.plural = plural - self.version = version - self.client = k8s_client.CustomObjectsApi(client) - - def wait_for_condition(self, - namespace, - name, - expected_conditions=[], - timeout=datetime.timedelta(days=365), - polling_interval=datetime.timedelta(seconds=30), - status_callback=None): - """Waits until any of the specified conditions occur. - Args: - namespace: namespace for the CR. - name: Name of the CR. - expected_conditions: A list of conditions. Function waits until any of the - supplied conditions is reached. - timeout: How long to wait for the CR. - polling_interval: How often to poll for the status of the CR. - status_callback: (Optional): Callable. If supplied this callable is - invoked after we poll the CR. Callable takes a single argument which - is the CR. - """ - end_time = datetime.datetime.now() + timeout - while True: - try: - results = self.client.get_namespaced_custom_object( - self.group, self.version, namespace, self.plural, name) - except Exception as e: - logger.error("There was a problem waiting for %s/%s %s in namespace %s; Exception: %s", - self.group, self.plural, name, namespace, e) - raise - - if results: - if status_callback: - status_callback(results) - expected, condition = self.is_expected_conditions(results, expected_conditions) - if expected: - logger.info("%s/%s %s in namespace %s has reached the expected condition: %s.", - self.group, self.plural, name, namespace, condition) - return results - else: - if condition: - logger.info("Current condition of %s/%s %s in namespace %s is %s.", - self.group, self.plural, name, namespace, condition) - - if datetime.datetime.now() + polling_interval > end_time: - raise Exception( - "Timeout waiting for {0}/{1} {2} in namespace {3} to enter one of the " - "conditions {4}.".format(self.group, self.plural, name, namespace, expected_conditions)) - - time.sleep(polling_interval.seconds) - - def is_expected_conditions(self, inst, expected_conditions): - conditions = inst.get('status', {}).get("conditions") - if not conditions: - return False, "" - if conditions[-1]["type"] in expected_conditions and conditions[-1]["status"] == "True": - return True, conditions[-1]["type"] - else: - return False, conditions[-1]["type"] - - def create(self, spec): - """Create a CR. - Args: - spec: The spec for the CR. - """ - try: - # Create a Resource - namespace = spec["metadata"].get("namespace", "default") - logger.info("Creating %s/%s %s in namespace %s.", - self.group, self.plural, spec["metadata"]["name"], namespace) - api_response = self.client.create_namespaced_custom_object( - self.group, self.version, namespace, self.plural, spec) - logger.info("Created %s/%s %s in namespace %s.", - self.group, self.plural, spec["metadata"]["name"], namespace) - return api_response - except rest.ApiException as e: - self._log_and_raise_exception(e, "create") - - def delete(self, name, namespace): - try: - body = { - # Set garbage collection so that CR won't be deleted until all - # owned references are deleted. - "propagationPolicy": "Foreground", - } - logger.info("Deleteing %s/%s %s in namespace %s.", - self.group, self.plural, name, namespace) - api_response = self.client.delete_namespaced_custom_object( - group=self.group, - version=self.version, - namespace=namespace, - plural=self.plural, - name=name, - body=body) - logger.info("Deleted %s/%s %s in namespace %s.", - self.group, self.plural, name, namespace) - return api_response - except rest.ApiException as e: - self._log_and_raise_exception(e, "delete") - - def _log_and_raise_exception(self, ex, action): - message = "" - if ex.message: - message = ex.message - if ex.body: - try: - body = json.loads(ex.body) - message = body.get("message") - except ValueError: - logger.error("Exception when %s %s/%s: %s", action, self.group, self.plural, ex.body) - raise - - logger.error("Exception when %s %s/%s: %s", action, self.group, self.plural, ex.body) - raise ex - diff --git a/components/contrib/kubeflow/deployer/Dockerfile b/components/contrib/kubeflow/deployer/Dockerfile deleted file mode 100644 index f4a2d1a4d54..00000000000 --- a/components/contrib/kubeflow/deployer/Dockerfile +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM debian - -RUN apt-get update -q && apt-get upgrade -y && \ - apt-get install -y -qq --no-install-recommends \ - apt-transport-https \ - ca-certificates \ - git \ - gnupg \ - lsb-release \ - unzip \ - wget && \ - wget --no-verbose -O /opt/ks_0.13.1_linux_amd64.tar.gz \ - https://github.com/ksonnet/ksonnet/releases/download/v0.13.1/ks_0.13.1_linux_amd64.tar.gz && \ - tar -C /opt -xzf /opt/ks_0.13.1_linux_amd64.tar.gz && \ - cp /opt/ks_0.13.1_linux_amd64/ks /bin/. && \ - rm -f /opt/ks_0.13.1_linux_amd64.tar.gz && \ - wget --no-verbose -O /bin/kubectl \ - https://storage.googleapis.com/kubernetes-release/release/v1.11.2/bin/linux/amd64/kubectl && \ - chmod u+x /bin/kubectl && \ - wget --no-verbose -O /opt/kubernetes_v1.11.2 \ - https://github.com/kubernetes/kubernetes/archive/v1.11.2.tar.gz && \ - mkdir -p /src && \ - tar -C /src -xzf /opt/kubernetes_v1.11.2 && \ - rm -rf /opt/kubernetes_v1.11.2 && \ - wget --no-verbose -O /opt/google-apt-key.gpg \ - https://packages.cloud.google.com/apt/doc/apt-key.gpg && \ - apt-key add /opt/google-apt-key.gpg && \ - export CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)" && \ - echo "deb https://packages.cloud.google.com/apt $CLOUD_SDK_REPO main" >> \ - /etc/apt/sources.list.d/google-cloud-sdk.list && \ - apt-get update -q && \ - apt-get install -y -qq --no-install-recommends google-cloud-sdk && \ - gcloud config set component_manager/disable_update_check true - -ENV KUBEFLOW_VERSION v0.4.0 - -# Checkout the kubeflow packages at image build time so that we do not -# require calling in to the GitHub API at run time. -RUN cd /src && \ - mkdir -p github.com/kubeflow && \ - cd github.com/kubeflow && \ - git clone https://github.com/kubeflow/kubeflow && \ - cd kubeflow && \ - git checkout ${KUBEFLOW_VERSION} - -ADD ./src/deploy.sh /bin/. - -ENTRYPOINT ["/bin/deploy.sh"] diff --git a/components/contrib/kubeflow/deployer/component.yaml b/components/contrib/kubeflow/deployer/component.yaml deleted file mode 100644 index 7394226c2aa..00000000000 --- a/components/contrib/kubeflow/deployer/component.yaml +++ /dev/null @@ -1,23 +0,0 @@ -name: Kubeflow - Serve TF model -description: Serve TensorFlow model using Kubeflow TF-serving -inputs: - - {name: Model dir, type: GCSPath, description: 'Path of GCS directory containing exported Tensorflow model.'} # type: {GCSPath: {path_type: Directory}} - - {name: Cluster name, type: String, default: '', description: 'Kubernetes cluster name where the TS-serving service should be deployed. Uses the current cluster by default.'} - - {name: Namespace, type: String, default: 'kubeflow', description: 'Kubernetes namespace where the TS-serving service should be deployed.'} - - {name: Server name, type: String, default: 'model-server', description: 'TF-serving server name to use when deploying.'} - - {name: PVC name, type: String, default: '' , description: 'Optional PersistentVolumeClaim to use.'} - - {name: Service type, type: String, default: 'ClusterIP' , description: 'Optional Service type to use, two options: "ClusterIP" (default if not set) and "NodePort".'} -#outputs: -# - {name: Endppoint URI, type: Serving URI, description: 'URI of the deployed prediction service..'} -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:1.8.0-alpha.0 - command: [/bin/deploy.sh] - args: [ - --model-export-path, {inputValue: Model dir}, - --cluster-name, {inputValue: Cluster name}, - --namespace, {inputValue: Namespace}, - --server-name, {inputValue: Server name}, - --pvc-name, {inputValue: PVC name}, - --service-type, {inputValue: Service type}, - ] diff --git a/components/contrib/kubeflow/deployer/src/deploy.sh b/components/contrib/kubeflow/deployer/src/deploy.sh deleted file mode 100755 index 43f651ecfd3..00000000000 --- a/components/contrib/kubeflow/deployer/src/deploy.sh +++ /dev/null @@ -1,210 +0,0 @@ -#!/bin/bash -e - -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x - -KUBERNETES_NAMESPACE="${KUBERNETES_NAMESPACE:-kubeflow}" -SERVER_NAME="${SERVER_NAME:-model-server}" - -while (($#)); do - case $1 in - "--model-export-path") - shift - MODEL_EXPORT_PATH="$1" - shift - ;; - "--cluster-name") - shift - CLUSTER_NAME="$1" - shift - ;; - "--namespace") - shift - KUBERNETES_NAMESPACE="$1" - shift - ;; - "--server-name") - shift - SERVER_NAME="$1" - shift - ;; - "--pvc-name") - shift - PVC_NAME="$1" - shift - ;; - "--service-type") - shift - SERVICE_TYPE="$1" - shift - ;; - *) - echo "Unknown argument: '$1'" - exit 1 - ;; - esac -done - -if [ -z "${MODEL_EXPORT_PATH}" ]; then - echo "You must specify a path to the saved model" - exit 1 -fi - -echo "Deploying the model '${MODEL_EXPORT_PATH}'" - -if [ -z "${CLUSTER_NAME}" ]; then - CLUSTER_NAME=$(wget -q -O- --header="Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/instance/attributes/cluster-name) -fi - -# Ensure the server name is not more than 63 characters. -SERVER_NAME="${SERVER_NAME:0:63}" -# Trim any trailing hyphens from the server name. -while [[ "${SERVER_NAME:(-1)}" == "-" ]]; do SERVER_NAME="${SERVER_NAME::-1}"; done - -echo "Deploying ${SERVER_NAME} to the cluster ${CLUSTER_NAME}" - -# Connect kubectl to the local cluster -kubectl config set-cluster "${CLUSTER_NAME}" --server=https://kubernetes.default --certificate-authority=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt -kubectl config set-credentials pipeline --token "$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)" -kubectl config set-context kubeflow --cluster "${CLUSTER_NAME}" --user pipeline -kubectl config use-context kubeflow - -# Configure and deploy the TF serving app -cd /src/github.com/kubeflow/kubeflow -git checkout ${KUBEFLOW_VERSION} - -cd /opt -echo "Initializing KSonnet app..." -ks init tf-serving-app -cd tf-serving-app/ - -if [ -n "${KUBERNETES_NAMESPACE}" ]; then - echo "Setting Kubernetes namespace: ${KUBERNETES_NAMESPACE} ..." - ks env set default --namespace "${KUBERNETES_NAMESPACE}" -fi - -echo "Installing Kubeflow packages..." -ks registry add kubeflow /src/github.com/kubeflow/kubeflow/kubeflow -ks pkg install kubeflow/common@${KUBEFLOW_VERSION} -ks pkg install kubeflow/tf-serving@${KUBEFLOW_VERSION} - -echo "Generating the TF Serving config..." -ks generate tf-serving server --name="${SERVER_NAME}" -ks param set server modelPath "${MODEL_EXPORT_PATH}" - -# service type: ClusterIP or NodePort -if [ -n "${SERVICE_TYPE}" ];then - ks param set server serviceType "${SERVICE_TYPE}" -fi - -# support local storage to deploy tf-serving. -if [ -n "${PVC_NAME}" ];then - # TODO: Remove modelStorageType setting after the hard code nfs was removed at - # https://github.com/kubeflow/kubeflow/blob/v0.4-branch/kubeflow/tf-serving/tf-serving.libsonnet#L148-L151 - ks param set server modelStorageType nfs - ks param set server nfsPVC "${PVC_NAME}" -fi - -echo "Deploying the TF Serving service..." -ks apply default -c server - -# Wait for the deployment to have at least one available replica -echo "Waiting for the TF Serving deployment to show up..." -timeout="1000" -start_time=`date +%s` -while [[ $(kubectl get deploy --namespace "${KUBERNETES_NAMESPACE}" --selector=app="${SERVER_NAME}" 2>&1|wc -l) != "2" ]];do - current_time=`date +%s` - elapsed_time=$(expr $current_time + 1 - $start_time) - if [[ $elapsed_time -gt $timeout ]];then - echo "timeout" - exit 1 - fi - sleep 2 -done - -echo "Waiting for the valid workflow json..." -start_time=`date +%s` -exit_code="1" -while [[ $exit_code != "0" ]];do - kubectl get deploy --namespace "${KUBERNETES_NAMESPACE}" --selector=app="${SERVER_NAME}" --output=jsonpath='{.items[0].status.availableReplicas}' - exit_code=$? - current_time=`date +%s` - elapsed_time=$(expr $current_time + 1 - $start_time) - if [[ $elapsed_time -gt $timeout ]];then - echo "timeout" - exit 1 - fi - sleep 2 -done - -echo "Waiting for the TF Serving deployment to have at least one available replica..." -start_time=`date +%s` -while [[ $(kubectl get deploy --namespace "${KUBERNETES_NAMESPACE}" --selector=app="${SERVER_NAME}" --output=jsonpath='{.items[0].status.availableReplicas}') < "1" ]]; do - current_time=`date +%s` - elapsed_time=$(expr $current_time + 1 - $start_time) - if [[ $elapsed_time -gt $timeout ]];then - echo "timeout" - exit 1 - fi - sleep 5 -done - -echo "Obtaining the pod name..." -start_time=`date +%s` -pod_name="" -while [[ $pod_name == "" ]];do - pod_name=$(kubectl get pods --namespace "${KUBERNETES_NAMESPACE}" --selector=app="${SERVER_NAME}" --template '{{range .items}}{{.metadata.name}}{{"\n"}}{{end}}') - current_time=`date +%s` - elapsed_time=$(expr $current_time + 1 - $start_time) - if [[ $elapsed_time -gt $timeout ]];then - echo "timeout" - exit 1 - fi - sleep 2 -done -echo "Pod name is: " $pod_name - -# Wait for the pod container to start running -echo "Waiting for the TF Serving pod to start running..." -start_time=`date +%s` -exit_code="1" -while [[ $exit_code != "0" ]];do - kubectl get po ${pod_name} --namespace "${KUBERNETES_NAMESPACE}" -o jsonpath='{.status.containerStatuses[0].state.running}' - exit_code=$? - current_time=`date +%s` - elapsed_time=$(expr $current_time + 1 - $start_time) - if [[ $elapsed_time -gt $timeout ]];then - echo "timeout" - exit 1 - fi - sleep 2 -done - -start_time=`date +%s` -while [ -z "$(kubectl get po ${pod_name} --namespace "${KUBERNETES_NAMESPACE}" -o jsonpath='{.status.containerStatuses[0].state.running}')" ]; do - current_time=`date +%s` - elapsed_time=$(expr $current_time + 1 - $start_time) - if [[ $elapsed_time -gt $timeout ]];then - echo "timeout" - exit 1 - fi - sleep 5 -done - -# Wait a little while and then grab the logs of the running server -sleep 10 -echo "Logs from the TF Serving pod:" -kubectl logs ${pod_name} --namespace "${KUBERNETES_NAMESPACE}" diff --git a/components/contrib/kubeflow/dnntrainer/Dockerfile b/components/contrib/kubeflow/dnntrainer/Dockerfile deleted file mode 100644 index 16c5f36d46c..00000000000 --- a/components/contrib/kubeflow/dnntrainer/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -ARG TF_TAG -FROM tensorflow/tensorflow:$TF_TAG - -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt - -ADD build /ml - -WORKDIR /ml - -ENTRYPOINT ["python", "-m", "trainer.task"] diff --git a/components/contrib/kubeflow/dnntrainer/build_image.sh b/components/contrib/kubeflow/dnntrainer/build_image.sh deleted file mode 100755 index ff418c9ea7f..00000000000 --- a/components/contrib/kubeflow/dnntrainer/build_image.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/bash -e -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -while getopts ":hp:t:i:b:l:" opt; do - case "${opt}" in - h) echo "-p: project name" - echo "-t: tag name" - echo "-i: image name. If provided, project name and tag name are not necessary" - echo "-b: tensorflow base image tag. Optional. The value can be tags listed under \ - https://hub.docker.com/r/tensorflow/tensorflow/tags. Defaults to '2.3.0'." - echo "-l: local image name. Optional. Defaults to 'ml-pipeline-kubeflow-tf-trainer'" - exit - ;; - p) PROJECT_ID=${OPTARG} - ;; - t) TAG_NAME=${OPTARG} - ;; - i) IMAGE_NAME=${OPTARG} - ;; - b) TF_BASE_TAG=${OPTARG} - ;; - l) LOCAL_IMAGE_NAME=${OPTARG} - ;; - \? ) echo "Usage: cmd [-p] project [-t] tag [-i] image [-b] base image tag [l] local image" - exit - ;; - esac -done - -set -x -if [ -z "${LOCAL_IMAGE_NAME}" ]; then - LOCAL_IMAGE_NAME=ml-pipeline-kubeflow-tf-trainer -fi - -if [ -z "${PROJECT_ID}" ]; then - PROJECT_ID=$(gcloud config config-helper --format "value(configuration.properties.core.project)") -fi - -if [ -z "${TAG_NAME}" ]; then - TAG_NAME=$(date +v%Y%m%d)-$(git describe --tags --always --dirty)-$(git diff | shasum -a256 | cut -c -6) -fi - -if [ -z "${TF_BASE_TAG}" ]; then - TF_BASE_TAG=2.3.0 -fi - -mkdir -p ./build -cp -R ./src/ ./build/ - -docker build --build-arg TF_TAG=${TF_BASE_TAG} -t ${LOCAL_IMAGE_NAME} . -if [ -z "${IMAGE_NAME}" ]; then - docker tag ${LOCAL_IMAGE_NAME} gcr.io/${PROJECT_ID}/${LOCAL_IMAGE_NAME}:${TAG_NAME} - docker push gcr.io/${PROJECT_ID}/${LOCAL_IMAGE_NAME}:${TAG_NAME} -else - docker tag ${LOCAL_IMAGE_NAME} "${IMAGE_NAME}" - docker push "${IMAGE_NAME}" -fi - -rm -rf ./build diff --git a/components/contrib/kubeflow/dnntrainer/component.yaml b/components/contrib/kubeflow/dnntrainer/component.yaml deleted file mode 100644 index 8af97b6f049..00000000000 --- a/components/contrib/kubeflow/dnntrainer/component.yaml +++ /dev/null @@ -1,34 +0,0 @@ -name: Train FC DNN using TF -description: Trains fully-connected neural network using Tensorflow -inputs: - - {name: Transformed data dir, type: GCSPath, description: 'GCS path containing tf-transformed training and eval data.'} # type: {GCSPath: {path_type: Directory}} - - {name: Schema, type: GCSPath, description: 'GCS json schema file path.'} # type: {GCSPath: {data_type: JSON}} - - {name: Learning rate, type: Float, default: '0.1', description: 'Learning rate for training.'} - - {name: Optimizer, type: String, default: 'Adagrad', description: 'Optimizer for training. Valid values are: Adam, SGD, Adagrad. If not provided, tf.estimator default will be used.'} - - {name: Hidden layer size, type: String, default: '100', description: 'Comma-separated hidden layer sizes. For example "200,100,50".'} - - {name: Steps, type: Integer, description: 'Maximum number of training steps to perform. If unspecified, will honor epochs.'} - #- {name: Epochs, type: Integer, default: '', description: 'Maximum number of training data epochs on which to train. If both "steps" and "epochs" are specified, the training job will run for "steps" or "epochs", whichever occurs first.'} - - {name: Target, type: String, description: 'Name of the column for prediction target.'} - - {name: Preprocessing module, type: GCSPath, default: '', description: 'GCS path to a python file defining "preprocess" and "get_feature_columns" functions.'} # type: {GCSPath: {data_type: Python}} - - {name: Training output dir, type: GCSPath, description: 'GCS or local directory.'} # type: {GCSPath: {path_type: Directory}} -outputs: - - {name: Training output dir, type: GCSPath, description: 'GCS or local directory.'} # type: {GCSPath: {path_type: Directory}} - - {name: MLPipeline UI metadata, type: UI metadata} -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:1.8.0-alpha.0 - command: [python2, -m, trainer.task] - args: [ - --transformed-data-dir, {inputValue: Transformed data dir}, - --schema, {inputValue: Schema}, - --learning-rate, {inputValue: Learning rate}, - --optimizer, {inputValue: Optimizer}, - --hidden-layer-size, {inputValue: Hidden layer size}, - --steps, {inputValue: Steps}, -# --epochs, {inputValue: Epochs}, - --target, {inputValue: Target}, - --preprocessing-module, {inputValue: Preprocessing module}, - --job-dir, {inputValue: Training output dir}, - --exported-model-dir-uri-output-path, {outputPath: Training output dir}, - --ui-metadata-output-path, {outputPath: MLPipeline UI metadata}, - ] diff --git a/components/contrib/kubeflow/dnntrainer/requirements.txt b/components/contrib/kubeflow/dnntrainer/requirements.txt deleted file mode 100644 index f8693cbf896..00000000000 --- a/components/contrib/kubeflow/dnntrainer/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -pyyaml==3.12 -six==1.11.0 -tensorflow-transform==0.23.0 -tensorflow-model-analysis==0.23.0 diff --git a/components/contrib/kubeflow/dnntrainer/src/__init__.py b/components/contrib/kubeflow/dnntrainer/src/__init__.py deleted file mode 100644 index 244cba40b80..00000000000 --- a/components/contrib/kubeflow/dnntrainer/src/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/components/contrib/kubeflow/dnntrainer/src/setup.py b/components/contrib/kubeflow/dnntrainer/src/setup.py deleted file mode 100644 index cbd75c059fa..00000000000 --- a/components/contrib/kubeflow/dnntrainer/src/setup.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from setuptools import setup, find_packages - - -setup( - name='trainer', - version='1.0.0', - packages=find_packages(), - description='DNN Trainer', - author='Google', - keywords=[ - ], - license="Apache Software License", - long_description=""" - """, - install_requires=[ - 'tensorflow==1.15.4', - ], - package_data={ - }, - data_files=[], -) diff --git a/components/contrib/kubeflow/dnntrainer/src/trainer/__init__.py b/components/contrib/kubeflow/dnntrainer/src/trainer/__init__.py deleted file mode 100644 index 244cba40b80..00000000000 --- a/components/contrib/kubeflow/dnntrainer/src/trainer/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/components/contrib/kubeflow/dnntrainer/src/trainer/task.py b/components/contrib/kubeflow/dnntrainer/src/trainer/task.py deleted file mode 100644 index 76c62fdb198..00000000000 --- a/components/contrib/kubeflow/dnntrainer/src/trainer/task.py +++ /dev/null @@ -1,360 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import argparse -import json -import os -from pathlib import Path -import tensorflow as tf -import tensorflow_transform as tft -import tensorflow_model_analysis as tfma - -from tensorflow.python.lib.io import file_io -from tensorflow_transform.beam.tft_beam_io import transform_fn_io -from tensorflow_transform.saved import input_fn_maker -from tensorflow_transform.saved import saved_transform_io -from tensorflow_transform.tf_metadata import dataset_metadata -from tensorflow_transform.tf_metadata import dataset_schema -from tensorflow_transform.tf_metadata import metadata_io - - -IMAGE_EMBEDDING_SIZE = 2048 -CLASSIFICATION_TARGET_TYPES = [tf.bool, tf.int32, tf.int64] -REGRESSION_TARGET_TYPES = [tf.float32, tf.float64] -TARGET_TYPES = CLASSIFICATION_TARGET_TYPES + REGRESSION_TARGET_TYPES - - -def parse_arguments(): - parser = argparse.ArgumentParser() - parser.add_argument('--job-dir', - type=str, - required=True, - help='GCS or local directory.') - parser.add_argument('--transformed-data-dir', - type=str, - required=True, - help='GCS path containing tf-transformed training and eval data.') - parser.add_argument('--schema', - type=str, - required=True, - help='GCS json schema file path.') - parser.add_argument('--target', - type=str, - required=True, - help='The name of the column to predict in training data.') - parser.add_argument('--learning-rate', - type=float, - default=0.1, - help='Learning rate for training.') - parser.add_argument('--optimizer', - choices=['Adam', 'SGD', 'Adagrad'], - default='Adagrad', - help='Optimizer for training. If not provided, ' - 'tf.estimator default will be used.') - parser.add_argument('--hidden-layer-size', - type=str, - default='100', - help='comma separated hidden layer sizes. For example "200,100,50".') - parser.add_argument('--steps', - type=int, - help='Maximum number of training steps to perform. If unspecified, will ' - 'honor epochs.') - parser.add_argument('--epochs', - type=int, - help='Maximum number of training data epochs on which to train. If ' - 'both "steps" and "epochs" are specified, the training ' - 'job will run for "steps" or "epochs", whichever occurs first.') - parser.add_argument('--preprocessing-module', - type=str, - required=False, - help=('GCS path to a python file defining ' - '"preprocess" and "get_feature_columns" functions.')) - parser.add_argument('--exported-model-dir-uri-output-path', - type=str, - default='/output.txt', - help='Local output path for the file containing exported model directory URI.') - parser.add_argument('--ui-metadata-output-path', - type=str, - default='/mlpipeline-ui-metadata.json', - help='Local output path for the file containing UI metadata JSON structure.') - - args = parser.parse_args() - args.hidden_layer_size = [int(x.strip()) for x in args.hidden_layer_size.split(',')] - return args - - -def is_classification(transformed_data_dir, target): - """Whether the scenario is classification (vs regression). - - Returns: - The number of classes if the target represents a classification - problem, or None if it does not. - """ - transformed_metadata = metadata_io.read_metadata( - os.path.join(transformed_data_dir, transform_fn_io.TRANSFORMED_METADATA_DIR)) - transformed_feature_spec = transformed_metadata.schema.as_feature_spec() - if target not in transformed_feature_spec: - raise ValueError('Cannot find target "%s" in transformed data.' % target) - - feature = transformed_feature_spec[target] - if (not isinstance(feature, tf.FixedLenFeature) or feature.shape != [] or - feature.dtype not in TARGET_TYPES): - raise ValueError('target "%s" is of invalid type.' % target) - - if feature.dtype in CLASSIFICATION_TARGET_TYPES: - if feature.dtype == tf.bool: - return 2 - return get_vocab_size(transformed_data_dir, target) - - return None - - -def make_tft_input_metadata(schema): - """Create tf-transform metadata from given schema.""" - tft_schema = {} - - for col_schema in schema: - col_type = col_schema['type'] - col_name = col_schema['name'] - if col_type == 'NUMBER': - tft_schema[col_name] = dataset_schema.ColumnSchema( - tf.float32, [], dataset_schema.FixedColumnRepresentation(default_value=0.0)) - elif col_type in ['CATEGORY', 'TEXT', 'IMAGE_URL', 'KEY']: - tft_schema[col_name] = dataset_schema.ColumnSchema( - tf.string, [], dataset_schema.FixedColumnRepresentation(default_value='')) - return dataset_metadata.DatasetMetadata(dataset_schema.Schema(tft_schema)) - - -def make_training_input_fn(transformed_data_dir, mode, batch_size, target_name, num_epochs=None): - """Creates an input function reading from transformed data. - Args: - transformed_data_dir: Directory to read transformed data and metadata from. - mode: 'train' or 'eval'. - batch_size: Batch size. - target_name: name of the target column. - num_epochs: number of training data epochs. - Returns: - The input function for training or eval. - """ - transformed_metadata = metadata_io.read_metadata( - os.path.join(transformed_data_dir, transform_fn_io.TRANSFORMED_METADATA_DIR)) - transformed_feature_spec = transformed_metadata.schema.as_feature_spec() - - def _input_fn(): - """Input function for training and eval.""" - epochs = 1 if mode == 'eval' else num_epochs - transformed_features = tf.contrib.learn.io.read_batch_features( - os.path.join(transformed_data_dir, mode + '-*'), - batch_size, transformed_feature_spec, tf.TFRecordReader, num_epochs=epochs) - - # Extract features and label from the transformed tensors. - transformed_labels = transformed_features.pop(target_name) - return transformed_features, transformed_labels - - return _input_fn - - -def make_serving_input_fn(transformed_data_dir, schema, target_name): - """Creates an input function reading from transformed data. - Args: - transformed_data_dir: Directory to read transformed data and metadata from. - schema: the raw data schema. - target_name: name of the target column. - Returns: - The input function for serving. - """ - raw_metadata = make_tft_input_metadata(schema) - raw_feature_spec = raw_metadata.schema.as_feature_spec() - - raw_keys = [x['name'] for x in schema] - raw_keys.remove(target_name) - serving_input_fn = input_fn_maker.build_csv_transforming_serving_input_receiver_fn( - raw_metadata=raw_metadata, - transform_savedmodel_dir=transformed_data_dir + '/transform_fn', - raw_keys=raw_keys) - - return serving_input_fn - - -def get_vocab_size(transformed_data_dir, feature_name): - """Get vocab size of a given text or category column.""" - vocab_file = os.path.join(transformed_data_dir, - transform_fn_io.TRANSFORM_FN_DIR, - 'assets', - 'vocab_' + feature_name) - with file_io.FileIO(vocab_file, 'r') as f: - return sum(1 for _ in f) - - -def build_feature_columns(schema, transformed_data_dir, target): - """Build feature columns that tf.estimator expects.""" - - feature_columns = [] - for entry in schema: - name = entry['name'] - datatype = entry['type'] - if name == target: - continue - - if datatype == 'NUMBER': - feature_columns.append(tf.feature_column.numeric_column(name, shape=())) - elif datatype == 'IMAGE_URL': - feature_columns.append(tf.feature_column.numeric_column(name, shape=(2048))) - elif datatype == 'CATEGORY': - vocab_size = get_vocab_size(transformed_data_dir, name) - category_column = tf.feature_column.categorical_column_with_identity(name, num_buckets=vocab_size) - indicator_column = tf.feature_column.indicator_column(category_column) - feature_columns.append(indicator_column) - elif datatype == 'TEXT': - vocab_size = get_vocab_size(transformed_data_dir, name) - indices_column = tf.feature_column.categorical_column_with_identity(name + '_indices', num_buckets=vocab_size + 1) - weighted_column = tf.feature_column.weighted_categorical_column(indices_column, name + '_weights') - indicator_column = tf.feature_column.indicator_column(weighted_column) - feature_columns.append(indicator_column) - - return feature_columns - - -def get_estimator(schema, transformed_data_dir, target_name, output_dir, hidden_units, - optimizer, learning_rate, feature_columns): - """Get proper tf.estimator (DNNClassifier or DNNRegressor).""" - optimizer = tf.train.AdagradOptimizer(learning_rate) - if optimizer == 'Adam': - optimizer = tf.train.AdamOptimizer(learning_rate) - elif optimizer == 'SGD': - optimizer = tf.train.GradientDescentOptimizer(learning_rate) - - # Set how often to run checkpointing in terms of steps. - config = tf.contrib.learn.RunConfig(save_checkpoints_steps=1000) - n_classes = is_classification(transformed_data_dir, target_name) - if n_classes: - estimator = tf.estimator.DNNClassifier( - feature_columns=feature_columns, - hidden_units=hidden_units, - n_classes=n_classes, - config=config, - model_dir=output_dir) - else: - estimator = tf.estimator.DNNRegressor( - feature_columns=feature_columns, - hidden_units=hidden_units, - config=config, - model_dir=output_dir, - optimizer=optimizer) - - return estimator - - -def eval_input_receiver_fn(tf_transform_dir, schema, target): - """Build everything needed for the tf-model-analysis to run the model. - Args: - tf_transform_dir: directory in which the tf-transform model was written - during the preprocessing step. - schema: the raw data schema. - target: name of the target column. - Returns: - EvalInputReceiver function, which contains: - - Tensorflow graph which parses raw untranformed features, applies the - tf-transform preprocessing operators. - - Set of raw, untransformed features. - - Label against which predictions will be compared. - """ - raw_metadata = make_tft_input_metadata(schema) - raw_feature_spec = raw_metadata.schema.as_feature_spec() - serialized_tf_example = tf.placeholder( - dtype=tf.string, shape=[None], name='input_example_tensor') - features = tf.parse_example(serialized_tf_example, raw_feature_spec) - _, transformed_features = ( - saved_transform_io.partially_apply_saved_transform( - os.path.join(tf_transform_dir, transform_fn_io.TRANSFORM_FN_DIR), - features)) - receiver_tensors = {'examples': serialized_tf_example} - return tfma.export.EvalInputReceiver( - features=transformed_features, - receiver_tensors=receiver_tensors, - labels=transformed_features[target]) - - -def main(): - # configure the TF_CONFIG such that the tensorflow recoginzes the MASTER in the yaml file as the chief. - # TODO: kubeflow is working on fixing the problem and this TF_CONFIG can be - # removed then. - - args = parse_arguments() - tf.logging.set_verbosity(tf.logging.INFO) - - schema = json.loads(file_io.read_file_to_string(args.schema)) - feature_columns = None - if args.preprocessing_module: - module_dir = os.path.abspath(os.path.dirname(__file__)) - preprocessing_module_path = os.path.join(module_dir, 'preprocessing.py') - with open(preprocessing_module_path, 'w+') as preprocessing_file: - preprocessing_file.write( - file_io.read_file_to_string(args.preprocessing_module)) - import preprocessing - feature_columns = preprocessing.get_feature_columns(args.transformed_data_dir) - else: - feature_columns = build_feature_columns(schema, args.transformed_data_dir, args.target) - - estimator = get_estimator(schema, args.transformed_data_dir, args.target, args.job_dir, - args.hidden_layer_size, args.optimizer, args.learning_rate, - feature_columns) - - # TODO: Expose batch size. - train_input_fn = make_training_input_fn( - args.transformed_data_dir, - 'train', - 32, - args.target, - num_epochs=args.epochs) - - eval_input_fn = make_training_input_fn( - args.transformed_data_dir, - 'eval', - 32, - args.target) - serving_input_fn = make_serving_input_fn( - args.transformed_data_dir, - schema, - args.target) - - exporter = tf.estimator.FinalExporter('export', serving_input_fn) - train_spec = tf.estimator.TrainSpec(input_fn=train_input_fn, max_steps=args.steps) - eval_spec = tf.estimator.EvalSpec(input_fn=eval_input_fn, exporters=[exporter]) - tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec) - - eval_model_dir = os.path.join(args.job_dir, 'tfma_eval_model_dir') - tfma.export.export_eval_savedmodel( - estimator=estimator, - export_dir_base=eval_model_dir, - eval_input_receiver_fn=( - lambda: eval_input_receiver_fn( - args.transformed_data_dir, schema, args.target))) - - - metadata = { - 'outputs' : [{ - 'type': 'tensorboard', - 'source': args.job_dir, - }] - } - Path(args.ui_metadata_output_path).parent.mkdir(parents=True, exist_ok=True) - Path(args.ui_metadata_output_path).write_text(json.dumps(metadata)) - - Path(args.exported_model_dir_uri_output_path).parent.mkdir(parents=True, exist_ok=True) - Path(args.exported_model_dir_uri_output_path).write_text(args.job_dir) - -if __name__ == '__main__': - main() diff --git a/components/contrib/kubeflow/katib-launcher/Dockerfile b/components/contrib/kubeflow/katib-launcher/Dockerfile deleted file mode 100644 index 4a51196b9bb..00000000000 --- a/components/contrib/kubeflow/katib-launcher/Dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2020 The Kubeflow Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM python:3.6 - -ENV APP_HOME /app -WORKDIR ${APP_HOME} - -ADD . ${APP_HOME} -RUN pip install --no-cache-dir -r requirements.txt - -ENTRYPOINT ["python", "src/launch_experiment.py"] diff --git a/components/contrib/kubeflow/katib-launcher/build_image.sh b/components/contrib/kubeflow/katib-launcher/build_image.sh deleted file mode 100755 index a66695bc353..00000000000 --- a/components/contrib/kubeflow/katib-launcher/build_image.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -e -# Copyright 2020 The Kubeflow Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -IMAGE="docker.io/kubeflowkatib/kubeflow-pipelines-launcher" - -echo "Releasing image for the Katib Pipeline Launcher..." -echo -e "Image: ${IMAGE}\n" - -docker build . -f Dockerfile -t ${IMAGE} -docker push ${IMAGE} diff --git a/components/contrib/kubeflow/katib-launcher/component.yaml b/components/contrib/kubeflow/katib-launcher/component.yaml deleted file mode 100644 index 0ffba408833..00000000000 --- a/components/contrib/kubeflow/katib-launcher/component.yaml +++ /dev/null @@ -1,22 +0,0 @@ -name: Katib - Launch Experiment -description: Katib Experiment launcher -inputs: -- {name: Experiment Name, type: String, default: '', description: 'Experiment name'} -- {name: Experiment Namespace, type: String, default: anonymous, description: 'Experiment namespace'} -- {name: Experiment Spec, type: JsonObject, default: '{}', description: 'Experiment specification in dict format'} -- {name: Experiment Timeout Minutes, type: Integer, default: 1440, description: 'Time in minutes to wait for the Experiment to complete'} -- {name: Delete Finished Experiment, type: Bool, default: 'True', description: 'Whether to delete the Experiment after it is finished'} -outputs: -- {name: Best Parameter Set, type: JsonObject, description: 'The hyperparameter set of the best Experiment Trial'} -implementation: - container: - image: docker.io/kubeflowkatib/kubeflow-pipelines-launcher - command: [python, src/launch_experiment.py] - args: [ - --experiment-name, {inputValue: Experiment Name}, - --experiment-namespace, {inputValue: Experiment Namespace}, - --experiment-spec, {inputValue: Experiment Spec}, - --experiment-timeout-minutes, {inputValue: Experiment Timeout Minutes}, - --delete-after-done, {inputValue: Delete Finished Experiment}, - --output-file, {outputPath: Best Parameter Set}, - ] diff --git a/components/contrib/kubeflow/katib-launcher/requirements.txt b/components/contrib/kubeflow/katib-launcher/requirements.txt deleted file mode 100644 index f72df500265..00000000000 --- a/components/contrib/kubeflow/katib-launcher/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -kubernetes==10.0.1 -kubeflow-katib==0.10.1 diff --git a/components/contrib/kubeflow/katib-launcher/src/__init__.py b/components/contrib/kubeflow/katib-launcher/src/__init__.py deleted file mode 100644 index 96e694fac6f..00000000000 --- a/components/contrib/kubeflow/katib-launcher/src/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2020 The Kubeflow Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/components/contrib/kubeflow/katib-launcher/src/launch_experiment.py b/components/contrib/kubeflow/katib-launcher/src/launch_experiment.py deleted file mode 100644 index 142d8a34500..00000000000 --- a/components/contrib/kubeflow/katib-launcher/src/launch_experiment.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright 2020 The Kubeflow Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import datetime -from distutils.util import strtobool -import json -import os -import logging -import time - -from kubernetes.client import V1ObjectMeta - -from kubeflow.katib import KatibClient -from kubeflow.katib import ApiClient -from kubeflow.katib import V1beta1Experiment - -logger = logging.getLogger() -logging.basicConfig(level=logging.INFO) - -FINISH_CONDITIONS = ["Succeeded", "Failed"] - - -class JSONObject(object): - """ This class is needed to deserialize input JSON. - Katib API client expects JSON under .data attribute. - """ - - def __init__(self, json): - self.data = json - - -def wait_experiment_finish(katib_client, experiment, timeout): - polling_interval = datetime.timedelta(seconds=30) - end_time = datetime.datetime.now() + datetime.timedelta(minutes=timeout) - experiment_name = experiment.metadata.name - experiment_namespace = experiment.metadata.namespace - while True: - current_status = None - try: - current_status = katib_client.get_experiment_status(name=experiment_name, namespace=experiment_namespace) - except Exception as e: - logger.info("Unable to get current status for the Experiment: {} in namespace: {}. Exception: {}".format( - experiment_name, experiment_namespace, e)) - # If Experiment has reached complete condition, exit the loop. - if current_status in FINISH_CONDITIONS: - logger.info("Experiment: {} in namespace: {} has reached the end condition: {}".format( - experiment_name, experiment_namespace, current_status)) - return - # Print the current condition. - logger.info("Current condition for Experiment: {} in namespace: {} is: {}".format( - experiment_name, experiment_namespace, current_status)) - # If timeout has been reached, rise an exception. - if datetime.datetime.now() > end_time: - raise Exception("Timout waiting for Experiment: {} in namespace: {} " - "to reach one of these conditions: {}".format( - experiment_name, experiment_namespace, FINISH_CONDITIONS)) - # Sleep for poll interval. - time.sleep(polling_interval.seconds) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Katib Experiment launcher') - parser.add_argument('--experiment-name', type=str, - help='Experiment name') - parser.add_argument('--experiment-namespace', type=str, default='anonymous', - help='Experiment namespace') - parser.add_argument('--experiment-spec', type=str, default='', - help='Experiment specification') - parser.add_argument('--experiment-timeout-minutes', type=int, default=60*24, - help='Time in minutes to wait for the Experiment to complete') - parser.add_argument('--delete-after-done', type=strtobool, default=True, - help='Whether to delete the Experiment after it is finished') - - parser.add_argument('--output-file', type=str, default='/output.txt', - help='The file which stores the best hyperparameters of the Experiment') - - args = parser.parse_args() - - experiment_name = args.experiment_name - experiment_namespace = args.experiment_namespace - - logger.info("Creating Experiment: {} in namespace: {}".format(experiment_name, experiment_namespace)) - - # Create JSON object from experiment spec - experiment_spec = JSONObject(args.experiment_spec) - # Deserialize JSON to ExperimentSpec - experiment_spec = ApiClient().deserialize(experiment_spec, "V1beta1ExperimentSpec") - - # Create Experiment object. - experiment = V1beta1Experiment( - api_version="kubeflow.org/v1beta1", - kind="Experiment", - metadata=V1ObjectMeta( - name=experiment_name, - namespace=experiment_namespace - ), - spec=experiment_spec - ) - - # Create Katib client. - katib_client = KatibClient() - # Create Experiment in Kubernetes cluster. - output = katib_client.create_experiment(experiment, namespace=experiment_namespace) - - # Wait until Experiment is created. - end_time = datetime.datetime.now() + datetime.timedelta(minutes=args.experiment_timeout_minutes) - while True: - current_status = None - # Try to get Experiment status. - try: - current_status = katib_client.get_experiment_status(name=experiment_name, namespace=experiment_namespace) - except Exception: - logger.info("Waiting until Experiment is created...") - # If current status is set, exit the loop. - if current_status is not None: - break - # If timeout has been reached, rise an exception. - if datetime.datetime.now() > end_time: - raise Exception("Timout waiting for Experiment: {} in namespace: {} to be created".format( - experiment_name, experiment_namespace)) - time.sleep(1) - - logger.info("Experiment is created") - - # Wait for Experiment finish. - wait_experiment_finish(katib_client, experiment, args.experiment_timeout_minutes) - - # Check if Experiment is successful. - if katib_client.is_experiment_succeeded(name=experiment_name, namespace=experiment_namespace): - logger.info("Experiment: {} in namespace: {} is successful".format( - experiment_name, experiment_namespace)) - - optimal_hp = katib_client.get_optimal_hyperparameters( - name=experiment_name, namespace=experiment_namespace) - logger.info("Optimal hyperparameters:\n{}".format(optimal_hp)) - - # Create dir if it doesn't exist. - if not os.path.exists(os.path.dirname(args.output_file)): - os.makedirs(os.path.dirname(args.output_file)) - # Save HyperParameters to the file. - with open(args.output_file, 'w') as f: - f.write(json.dumps(optimal_hp)) - else: - logger.info("Experiment: {} in namespace: {} is failed".format( - experiment_name, experiment_namespace)) - # Print Experiment if it is failed. - experiment = katib_client.get_experiment(name=experiment_name, namespace=experiment_namespace) - logger.info(experiment) - - # Delete Experiment if it is needed. - if args.delete_after_done: - katib_client.delete_experiment(name=experiment_name, namespace=experiment_namespace) - logger.info("Experiment: {} in namespace: {} has been deleted".format( - experiment_name, experiment_namespace)) diff --git a/components/contrib/kubeflow/kfserving/Dockerfile b/components/contrib/kubeflow/kfserving/Dockerfile deleted file mode 100644 index 612c71deb00..00000000000 --- a/components/contrib/kubeflow/kfserving/Dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -FROM python:3.6-slim - -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt - -ENV APP_HOME /app -COPY src $APP_HOME -WORKDIR $APP_HOME - -ENTRYPOINT ["python"] -CMD ["kfservingdeployer.py"] diff --git a/components/contrib/kubeflow/kfserving/README.md b/components/contrib/kubeflow/kfserving/README.md deleted file mode 100644 index dd9a9f38067..00000000000 --- a/components/contrib/kubeflow/kfserving/README.md +++ /dev/null @@ -1,185 +0,0 @@ -# KFServing Component - -This is the Kubeflow Pipelines component for KFServing. This uses the [V1beta1 API](https://github.com/kubeflow/kfserving/blob/master/docs/apis/v1beta1/README.md), -so your cluster must have a KFServing version >= v0.5.0 in order to use this. - -If you are using KFServing version prior to v0.5.0, an older deprecated version of the KFServing Pipelines component must be used -and can be found at [this commit](https://github.com/kubeflow/pipelines/tree/65bed9b6d1d676ef2d541a970d3edc0aee12400d/components/kubeflow/kfserving). -Sample usage of this component can be found [here](https://github.com/kubeflow/kfserving/blob/master/docs/samples/pipelines/kfs-pipeline-v1alpha2.ipynb). - -## Usage - -Load the component with: - -```python -import kfp.dsl as dsl -import kfp -from kfp import components - -kfserving_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/kfserving/component.yaml') -``` - -**Note**: To use the previous version of this component which uses the v1alpha2 API and KFServing 0.4.1, then load the following YAML instead: - -```yaml -https://raw.githubusercontent.com/kubeflow/pipelines/65bed9b6d1d676ef2d541a970d3edc0aee12400d/components/kubeflow/kfserving/component.yaml -``` - - -### Arguments - -| Argument | Default | Description | -|----------|---------|-------------| -| action | `create` | Action to execute on KFServing. Available options are `create`, `update`, `apply`, and `delete`. Note: `apply` is equivalent to `update` if the resource exists and `create` if not. | -| model_name | | Name to give to the deployed model/InferenceService | -| model_uri | | Path of the S3 or GCS compatible directory containing the model. | -| canary_traffic_percent | `100` | The traffic split percentage between the candidate model and the last ready model | -| namespace | | Kubernetes namespace where the KFServing service is deployed. If no namespace is provided, `anonymous` will be used unless a namespace is provided in the `inferenceservice_yaml` argument. | -| framework | | Machine learning framework for model serving. Currently the supported frameworks are `tensorflow`, `pytorch`, `sklearn`, `xgboost`, `onnx`, `triton`, `pmml`, and `lightgbm`. | -| custom_model_spec | `{}` | Custom model runtime container spec in JSON. Sample spec: `{"image": "codait/max-object-detector", "port":5000, "name": "test-container"}` | -| inferenceservice_yaml | `{}` | Raw InferenceService serialized YAML for deployment. Use this if you need additional configurations for your InferenceService. | -| autoscaling_target | `0` | Autoscaling Target Number. If not 0, sets the following annotation on the InferenceService: `autoscaling.knative.dev/target` | -| service_account | | ServiceAccount to use to run the InferenceService pod. | -| enable_istio_sidecar | `True` | Whether to enable istio sidecar injection. | -| watch_timeouot | `300` | Timeout in seconds for watching until the InferenceService becomes ready. | -| min_replicas | `-1` | Minimum number of InferenceService replicas. Default of -1 just delegates to pod default of 1. | -| max_replicas | `-1` | Maximum number of InferenceService replicas. | - - -### Basic InferenceService Creation - -The following will use the KFServing component to deploy a TensorFlow model. - -```python -@dsl.pipeline( - name='KFServing Pipeline', - description='A pipeline for KFServing.' -) -def kfserving_pipeline(): - kfserving_op( - action='apply', - model_name='tf-sample', - model_uri='gs://kfserving-samples/models/tensorflow/flowers', - framework='tensorflow', - ) -kfp.Client().create_run_from_pipeline_func(kfserving_pipeline, arguments={}) -``` - -Sample op for deploying a PyTorch model: - -```python -kfserving_op( - action='apply', - model_name='pytorch-test', - model_uri='gs://kfserving-examples/models/torchserve/image_classifier', - framework='pytorch' -) -``` - -### Canary Rollout - -Ensure you have an initial model deployed with 100 percent traffic with something like: - -```python -kfserving_op( - action = 'apply', - model_name='tf-sample', - model_uri='gs://kfserving-samples/models/tensorflow/flowers', - framework='tensorflow', -) -``` - -Deploy the candidate model which will only get a portion of traffic: - -```python -kfserving_op( - action='apply', - model_name='tf-sample', - model_uri='gs://kfserving-samples/models/tensorflow/flowers-2', - framework='tensorflow', - canary_traffic_percent='10' -) -``` - -To promote the candidate model, you can either set `canary_traffic_percent` to `100` or simply remove it, then re-run the pipeline: - -```python -kfserving_op( - action='apply', - model_name='tf-sample', - model_uri='gs://kfserving-samples/models/tensorflow/flowers-2', - framework='tensorflow' -) -``` - -If you instead want to rollback the candidate model, then set `canary_traffic_percent` to `0`, then re-run the pipeline: - -```python -kfserving_op( - action='apply', - model_name='tf-sample', - model_uri='gs://kfserving-samples/models/tensorflow/flowers-2', - framework='tensorflow', - canary_traffic_percent='0' -) -``` - -### Deletion - -To delete a model, simply set the `action` to `'delete'` and pass in the InferenceService name: - -```python -kfserving_op( - action='delete', - model_name='tf-sample' -) -``` - -### Custom Runtime - -To pass in a custom model serving runtime, you can use the `custom_model_spec` argument. Currently, -the expected format for `custom_model_spec` coincides with: - -```json -{ - "image": "some_image", - "port": "port_number", - "name": "custom-container", - "env" : [{ "name": "some_name", "value": "some_value"}], - "resources": { "requests": {}, "limits": {}} -} -``` - -Sample deployment: - -```python -container_spec = '{ "image": "codait/max-object-detector", "port":5000, "name": "custom-container"}' -kfserving_op( - action='apply', - model_name='custom-simple', - custom_model_spec=container_spec -) -``` - -### Deploy using InferenceService YAML - -If you need more fine-grained configuration, there is the option to deploy using an InferenceService YAML file: - -```python -isvc_yaml = ''' -apiVersion: "serving.kubeflow.org/v1beta1" -kind: "InferenceService" -metadata: - name: "sklearn-iris" - namespace: "anonymous" -spec: - predictor: - sklearn: - storageUri: "gs://kfserving-samples/models/sklearn/iris" -''' -kfserving_op( - action='apply', - inferenceservice_yaml=isvc_yaml -) -``` - diff --git a/components/contrib/kubeflow/kfserving/component.yaml b/components/contrib/kubeflow/kfserving/component.yaml deleted file mode 100644 index 8e89552e161..00000000000 --- a/components/contrib/kubeflow/kfserving/component.yaml +++ /dev/null @@ -1,44 +0,0 @@ -name: Kubeflow - Serve Model using KFServing -description: Serve Models using Kubeflow KFServing -inputs: - - {name: Action, type: String, default: 'create', description: 'Action to execute on KFServing'} - - {name: Model Name, type: String, default: '', description: 'Name to give to the deployed model'} - - {name: Model URI, type: String, default: '', description: 'Path of the S3 or GCS compatible directory containing the model.'} - - {name: Canary Traffic Percent, type: String, default: '100', description: 'The traffic split percentage between the candidate model and the last ready model'} - - {name: Namespace, type: String, default: '', description: 'Kubernetes namespace where the KFServing service is deployed.'} - - {name: Framework, type: String, default: '', description: 'Machine Learning Framework for Model Serving.'} - - {name: Custom Model Spec, type: String, default: '{}', description: 'Custom model runtime container spec in JSON'} - - {name: Autoscaling Target, type: String, default: '0', description: 'Autoscaling Target Number'} - - {name: Service Account, type: String, default: '', description: 'ServiceAccount to use to run the InferenceService pod'} - - {name: Enable Istio Sidecar, type: Bool, default: 'True', description: 'Whether to enable istio sidecar injection'} - - {name: InferenceService YAML, type: String, default: '{}', description: 'Raw InferenceService serialized YAML for deployment'} - - {name: Watch Timeout, type: String, default: '300', description: "Timeout seconds for watching until InferenceService becomes ready."} - - {name: Min Replicas, type: String, default: '-1', description: 'Minimum number of InferenceService replicas'} - - {name: Max Replicas, type: String, default: '-1', description: 'Maximum number of InferenceService replicas'} - - {name: Request Timeout, type: String, default: '60', description: "Specifies the number of seconds to wait before timing out a request to the component."} - -outputs: - - {name: InferenceService Status, type: String, description: 'Status JSON output of InferenceService'} -implementation: - container: - image: quay.io/aipipeline/kfserving-component:v0.5.1 - command: ['python'] - args: [ - -u, kfservingdeployer.py, - --action, {inputValue: Action}, - --model-name, {inputValue: Model Name}, - --model-uri, {inputValue: Model URI}, - --canary-traffic-percent, {inputValue: Canary Traffic Percent}, - --namespace, {inputValue: Namespace}, - --framework, {inputValue: Framework}, - --custom-model-spec, {inputValue: Custom Model Spec}, - --autoscaling-target, {inputValue: Autoscaling Target}, - --service-account, {inputValue: Service Account}, - --enable-istio-sidecar, {inputValue: Enable Istio Sidecar}, - --output-path, {outputPath: InferenceService Status}, - --inferenceservice-yaml, {inputValue: InferenceService YAML}, - --watch-timeout, {inputValue: Watch Timeout}, - --min-replicas, {inputValue: Min Replicas}, - --max-replicas, {inputValue: Max Replicas}, - --request-timeout, {inputValue: Request Timeout} - ] diff --git a/components/contrib/kubeflow/kfserving/requirements.txt b/components/contrib/kubeflow/kfserving/requirements.txt deleted file mode 100644 index bcddd469afa..00000000000 --- a/components/contrib/kubeflow/kfserving/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -kubernetes==12.0.0 -kfserving==0.5.1 diff --git a/components/contrib/kubeflow/kfserving/src/kfservingdeployer.py b/components/contrib/kubeflow/kfserving/src/kfservingdeployer.py deleted file mode 100644 index 719927acafc..00000000000 --- a/components/contrib/kubeflow/kfserving/src/kfservingdeployer.py +++ /dev/null @@ -1,434 +0,0 @@ -# Copyright 2019 kubeflow.org. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -from distutils.util import strtobool -import json -import os -import sys -import time -import yaml - -from kubernetes import client - -from kfserving import constants -from kfserving import KFServingClient -from kfserving import V1beta1InferenceService -from kfserving import V1beta1InferenceServiceSpec -from kfserving import V1beta1LightGBMSpec -from kfserving import V1beta1ONNXRuntimeSpec -from kfserving import V1beta1PMMLSpec -from kfserving import V1beta1PredictorSpec -from kfserving import V1beta1SKLearnSpec -from kfserving import V1beta1TFServingSpec -from kfserving import V1beta1TorchServeSpec -from kfserving import V1beta1TritonSpec -from kfserving import V1beta1XGBoostSpec -from kfserving.api.kf_serving_watch import isvc_watch - - -AVAILABLE_FRAMEWORKS = { - 'tensorflow': V1beta1TFServingSpec, - 'pytorch': V1beta1TorchServeSpec, - 'sklearn': V1beta1SKLearnSpec, - 'xgboost': V1beta1XGBoostSpec, - 'onnx': V1beta1ONNXRuntimeSpec, - 'triton': V1beta1TritonSpec, - 'pmml': V1beta1PMMLSpec, - 'lightgbm': V1beta1LightGBMSpec -} - - -def create_predictor_spec(framework, storage_uri, canary_traffic_percent, - service_account, min_replicas, max_replicas, containers, request_timeout): - """ - Create and return V1beta1PredictorSpec to be used in a V1beta1InferenceServiceSpec - object. - """ - - predictor_spec = V1beta1PredictorSpec( - service_account_name=service_account, - min_replicas=(min_replicas - if min_replicas >= 0 - else None - ), - max_replicas=(max_replicas - if max_replicas > 0 and max_replicas >= min_replicas - else None - ), - containers=(containers or None), - canary_traffic_percent=canary_traffic_percent, - timeout=request_timeout - ) - # If the containers field was set, then this is custom model serving. - if containers: - return predictor_spec - - if framework not in AVAILABLE_FRAMEWORKS: - raise ValueError("Error: No matching framework: " + framework) - - setattr( - predictor_spec, - framework, - AVAILABLE_FRAMEWORKS[framework](storage_uri=storage_uri) - ) - return predictor_spec - - -def create_custom_container_spec(custom_model_spec): - """ - Given a JSON container spec, return a V1Container object - representing the container. This is used for passing in - custom server images. The expected format for the input is: - - { "image": "test/containerimage", - "port":5000, - "name": "custom-container" } - """ - - env = ( - [ - client.V1EnvVar(name=i["name"], value=i["value"]) - for i in custom_model_spec["env"] - ] - if custom_model_spec.get("env", "") - else None - ) - ports = ( - [client.V1ContainerPort(container_port=int(custom_model_spec.get("port", "")), protocol="TCP")] - if custom_model_spec.get("port", "") - else None - ) - resources = ( - client.V1ResourceRequirements( - requests=(custom_model_spec["resources"]["requests"] - if custom_model_spec.get('resources', {}).get('requests') - else None - ), - limits=(custom_model_spec["resources"]["limits"] - if custom_model_spec.get('resources', {}).get('limits') - else None - ), - ) - if custom_model_spec.get("resources", {}) - else None - ) - return client.V1Container( - name=custom_model_spec.get("name", "custom-container"), - image=custom_model_spec["image"], - env=env, - ports=ports, - command=custom_model_spec.get("command", None), - args=custom_model_spec.get("args", None), - image_pull_policy=custom_model_spec.get("image_pull_policy", None), - working_dir=custom_model_spec.get("working_dir", None), - resources=resources - ) - - -def create_inference_service(metadata, predictor_spec): - """ - Build and return V1beta1InferenceService object. - """ - return V1beta1InferenceService( - api_version=constants.KFSERVING_V1BETA1, - kind=constants.KFSERVING_KIND, - metadata=metadata, - spec=V1beta1InferenceServiceSpec( - predictor=predictor_spec - ), - ) - - -def submit_api_request(kfs_client, action, name, isvc, namespace=None, - watch=False, timeout_seconds=300): - """ - Creates or updates a Kubernetes custom object. This code is borrowed from the - KFServingClient.create/patch methods as using those directly doesn't allow for - sending in dicts as the InferenceService object which is needed for supporting passing - in raw InferenceService serialized YAML. - """ - custom_obj_api = kfs_client.api_instance - args = [constants.KFSERVING_GROUP,constants.KFSERVING_V1BETA1_VERSION, - namespace, constants.KFSERVING_PLURAL] - if action == 'update': - outputs = custom_obj_api.patch_namespaced_custom_object(*args, name, isvc) - else: - outputs = custom_obj_api.create_namespaced_custom_object(*args, isvc) - - if watch: - # Sleep 3 to avoid status still be True within a very short time. - time.sleep(3) - isvc_watch( - name=outputs['metadata']['name'], - namespace=namespace, - timeout_seconds=timeout_seconds) - else: - return outputs - - -def perform_action(action, model_name, model_uri, canary_traffic_percent, namespace, - framework, custom_model_spec, service_account, inferenceservice_yaml, - request_timeout, autoscaling_target=0, enable_istio_sidecar=True, - watch_timeout=300, min_replicas=0, max_replicas=0): - """ - Perform the specified action. If the action is not 'delete' and `inferenceService_yaml` - was provided, the dict representation of the YAML will be sent directly to the - Kubernetes API. Otherwise, a V1beta1InferenceService object will be built using the - provided input and then sent for creation/update. - :return InferenceService JSON output - """ - kfs_client = KFServingClient() - - if inferenceservice_yaml: - # Overwrite name and namespace if exists - if namespace: - inferenceservice_yaml['metadata']['namespace'] = namespace - - if model_name: - inferenceservice_yaml['metadata']['name'] = model_name - else: - model_name = inferenceservice_yaml['metadata']['name'] - - kfsvc = inferenceservice_yaml - - elif action != 'delete': - # Create annotations - annotations = {} - if int(autoscaling_target) != 0: - annotations["autoscaling.knative.dev/target"] = str(autoscaling_target) - if not enable_istio_sidecar: - annotations["sidecar.istio.io/inject"] = 'false' - if not annotations: - annotations = None - metadata = client.V1ObjectMeta( - name=model_name, namespace=namespace, annotations=annotations - ) - - # If a custom model container spec was provided, build the V1Container - # object using it. - containers = [] - if custom_model_spec: - containers = [create_custom_container_spec(custom_model_spec)] - - # Build the V1beta1PredictorSpec. - predictor_spec = create_predictor_spec( - framework, model_uri, canary_traffic_percent, service_account, - min_replicas, max_replicas, containers, request_timeout - ) - - kfsvc = create_inference_service(metadata, predictor_spec) - - if action == "create": - submit_api_request(kfs_client, 'create', model_name, kfsvc, namespace, - watch=True, timeout_seconds=watch_timeout) - elif action == "update": - submit_api_request(kfs_client, 'update', model_name, kfsvc, namespace, - watch=True, timeout_seconds=watch_timeout) - elif action == "apply": - try: - submit_api_request(kfs_client, 'create', model_name, kfsvc, namespace, - watch=True, timeout_seconds=watch_timeout) - except Exception: - submit_api_request(kfs_client, 'update', model_name, kfsvc, namespace, - watch=True, timeout_seconds=watch_timeout) - elif action == "delete": - kfs_client.delete(model_name, namespace=namespace) - else: - raise ("Error: No matching action: " + action) - - model_status = kfs_client.get(model_name, namespace=namespace) - return model_status - - -def main(): - """ - This parses arguments passed in from the CLI and performs the corresponding action. - """ - parser = argparse.ArgumentParser() - parser.add_argument( - "--action", type=str, help="Action to execute on KFServing", default="create" - ) - parser.add_argument( - "--model-name", type=str, help="Name to give to the deployed model" - ) - parser.add_argument( - "--model-uri", - type=str, - help="Path of the S3, GCS or PVC directory containing the model", - ) - parser.add_argument( - "--canary-traffic-percent", - type=str, - help="The traffic split percentage between the candidate model and the last ready model", - default="100", - ) - parser.add_argument( - "--namespace", - type=str, - help="Kubernetes namespace where the KFServing service is deployed", - default="", - ) - parser.add_argument( - "--framework", - type=str, - help="Model serving framework to use. Available frameworks: " + - str(list(AVAILABLE_FRAMEWORKS.keys())), - default="" - ) - parser.add_argument( - "--custom-model-spec", - type=json.loads, - help="The container spec for a custom model runtime", - default="{}", - ) - parser.add_argument( - "--autoscaling-target", type=str, help="Autoscaling target number", default="0" - ) - parser.add_argument( - "--service-account", - type=str, - help="Service account containing s3 credentials", - default="", - ) - parser.add_argument( - "--enable-istio-sidecar", - type=strtobool, - help="Whether to inject istio sidecar", - default="True" - ) - parser.add_argument( - "--inferenceservice-yaml", - type=yaml.safe_load, - help="Raw InferenceService serialized YAML for deployment", - default="{}" - ) - parser.add_argument("--output-path", type=str, help="Path to store URI output") - parser.add_argument("--watch-timeout", - type=str, - help="Timeout seconds for watching until InferenceService becomes ready.", - default="300") - parser.add_argument( - "--min-replicas", type=str, help="Minimum number of replicas", default="-1" - ) - parser.add_argument( - "--max-replicas", type=str, help="Maximum number of replicas", default="-1" - ) - parser.add_argument("--request-timeout", - type=str, - help="Specifies the number of seconds to wait before timing out a request to the component.", - default="60") - - args = parser.parse_args() - - action = args.action.lower() - model_name = args.model_name - model_uri = args.model_uri - canary_traffic_percent = int(args.canary_traffic_percent) - namespace = args.namespace - framework = args.framework.lower() - output_path = args.output_path - custom_model_spec = args.custom_model_spec - autoscaling_target = int(args.autoscaling_target) - service_account = args.service_account - enable_istio_sidecar = args.enable_istio_sidecar - inferenceservice_yaml = args.inferenceservice_yaml - watch_timeout = int(args.watch_timeout) - min_replicas = int(args.min_replicas) - max_replicas = int(args.max_replicas) - request_timeout = int(args.request_timeout) - - # Default the namespace. - if not namespace: - namespace = 'anonymous' - # If no namespace was provided, but one is listed in the YAML, use that. - if inferenceservice_yaml and inferenceservice_yaml.get('metadata', {}).get('namespace'): - namespace = inferenceservice_yaml['metadata']['namespace'] - - # Only require model name when an Isvc YAML was not provided. - if not inferenceservice_yaml and not model_name: - parser.error('{} argument is required when performing "{}" action'.format( - 'model_name', action - )) - # If the action isn't a delete, require 'model-uri' and 'framework' only if an Isvc YAML - # or custom model container spec are not provided. - if action != 'delete': - if not inferenceservice_yaml and not custom_model_spec and not (model_uri and framework): - parser.error('Arguments for {} and {} are required when performing "{}" action'.format( - 'model_uri', 'framework', action - )) - - model_status = perform_action( - action=action, - model_name=model_name, - model_uri=model_uri, - canary_traffic_percent=canary_traffic_percent, - namespace=namespace, - framework=framework, - custom_model_spec=custom_model_spec, - autoscaling_target=autoscaling_target, - service_account=service_account, - enable_istio_sidecar=enable_istio_sidecar, - inferenceservice_yaml=inferenceservice_yaml, - request_timeout=request_timeout, - watch_timeout=watch_timeout, - min_replicas=min_replicas, - max_replicas=max_replicas - ) - - print(model_status) - - if action != 'delete': - # Check whether the model is ready - for condition in model_status["status"]["conditions"]: - if condition['type'] == 'Ready': - if condition['status'] == 'True': - print('Model is ready\n') - break - print('Model is timed out, please check the InferenceService events for more details.') - sys.exit(1) - try: - print( model_status["status"]["url"] + " is the Knative domain.") - print("Sample test commands: \n") - # model_status['status']['url'] is like http://flowers-sample.kubeflow.example.com/v1/models/flowers-sample - print("curl -v -X GET %s" % model_status["status"]["url"]) - print("\nIf the above URL is not accessible, it's recommended to setup Knative with a configured DNS.\n"\ - "https://knative.dev/docs/install/installing-istio/#configuring-dns") - except Exception: - print("Model is not ready, check the logs for the Knative URL status.") - sys.exit(1) - - if output_path: - try: - # Remove some less needed fields to reduce output size. - del model_status['metadata']['managedFields'] - del model_status['status']['conditions'] - if sys.getsizeof(model_status) > 3000: - del model_status['components']['predictor']['address']['url'] - del model_status['components']['predictor']['latestCreatedRevision'] - del model_status['components']['predictor']['latestReadyRevision'] - del model_status['components']['predictor']['latestRolledoutRevision'] - del model_status['components']['predictor']['url'] - del model_status['spec'] - except KeyError: - pass - - if not os.path.exists(os.path.dirname(output_path)): - os.makedirs(os.path.dirname(output_path)) - with open(output_path, "w") as report: - report.write(json.dumps(model_status, indent=4)) - - -if __name__ == "__main__": - main() diff --git a/components/contrib/kubeflow/launcher/Dockerfile b/components/contrib/kubeflow/launcher/Dockerfile deleted file mode 100644 index 14185028550..00000000000 --- a/components/contrib/kubeflow/launcher/Dockerfile +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM python:3.6 - -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt - -ADD build /ml - -ENTRYPOINT ["python", "/ml/launch_tfjob.py"] diff --git a/components/contrib/kubeflow/launcher/build_image.sh b/components/contrib/kubeflow/launcher/build_image.sh deleted file mode 100755 index b813fa33ec2..00000000000 --- a/components/contrib/kubeflow/launcher/build_image.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash -e -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -while getopts ":hp:t:i:" opt; do - case "${opt}" in - h) echo "-p: project name" - echo "-t: tag name" - echo "-i: image name. If provided, project name and tag name are not necessary" - exit - ;; - p) PROJECT_ID=${OPTARG} - ;; - t) TAG_NAME=${OPTARG} - ;; - i) LAUNCHER_IMAGE_NAME=${OPTARG} - ;; - \? ) echo "Usage: cmd [-p] project [-t] tag [-i] image" - exit - ;; - esac -done - -mkdir -p ./build -cp -R ./src/ ./build/ -cp -R ../common/ ./build/ - -LOCAL_LAUNCHER_IMAGE_NAME=ml-pipeline-kubeflow-tfjob - -docker build -t ${LOCAL_LAUNCHER_IMAGE_NAME} . -if [ -z "${TAG_NAME}" ]; then - TAG_NAME=$(date +v%Y%m%d)-$(git describe --tags --always --dirty)-$(git diff | shasum -a256 | cut -c -6) -fi -if [ -z "${LAUNCHER_IMAGE_NAME}" ]; then - if [ -z "${PROJECT_ID}" ]; then - PROJECT_ID=$(gcloud config config-helper --format "value(configuration.properties.core.project)") - fi - docker tag ${LOCAL_LAUNCHER_IMAGE_NAME} gcr.io/${PROJECT_ID}/${LOCAL_LAUNCHER_IMAGE_NAME}:${TAG_NAME} - docker push gcr.io/${PROJECT_ID}/${LOCAL_LAUNCHER_IMAGE_NAME}:${TAG_NAME} -else - docker tag ${LOCAL_LAUNCHER_IMAGE_NAME} ${LAUNCHER_IMAGE_NAME}:${TAG_NAME} - docker push ${LAUNCHER_IMAGE_NAME}:${TAG_NAME} -fi - -rm -rf ./build diff --git a/components/contrib/kubeflow/launcher/component.yaml b/components/contrib/kubeflow/launcher/component.yaml deleted file mode 100644 index 1c184012901..00000000000 --- a/components/contrib/kubeflow/launcher/component.yaml +++ /dev/null @@ -1,35 +0,0 @@ -name: Kubeflow - Launch TFJob -description: Kubeflow TFJob launcher -inputs: -- {name: Name, type: String, description: 'TFJob name.'} -- {name: Namespace, type: String, default: kubeflow, description: 'TFJob namespace.'} -- {name: Version, type: String, default: v1, description: 'TFJob version.'} -- {name: ActiveDeadlineSeconds, type: Integer, default: -1, description: 'Specifies the duration (in seconds) since startTime during which the job can remain active before it is terminated. Must be a positive integer. This setting applies only to pods where restartPolicy is OnFailure or Always.'} -- {name: BackoffLimit, type: Integer, default: -1, description: 'Number of retries before marking this job as failed.'} -- {name: ttl Seconds After Finished, type: Integer, default: -1, description: 'Defines the TTL for cleaning up finished TFJobs.'} -- {name: CleanPodPolicy, type: String, default: Running, description: 'Defines the policy for cleaning up pods after the TFJob completes.'} -- {name: PS Spec, type: JsonObject, default: '{}', description: 'TFJob ps replicaSpecs.'} -- {name: Worker Spec, type: JsonObject, default: '{}', description: 'TFJob worker replicaSpecs.'} -- {name: Chief Spec, type: JsonObject, default: '{}', description: 'TFJob chief replicaSpecs.'} -- {name: Evaluator Spec, type: JsonObject, default: '{}', description: 'TFJob evaluator replicaSpecs.'} -- {name: Tfjob Timeout Minutes, type: Integer, default: 1440, description: 'Time in minutes to wait for the TFJob to complete.'} -- {name: Delete Finished Tfjob, type: Bool, default: 'True' , description: 'Whether to delete the tfjob after it is finished.'} -implementation: - container: - image: nikenano/launchernew:latest - command: [python, /ml/launch_tfjob.py] - args: [ - --name, {inputValue: Name}, - --namespace, {inputValue: Namespace}, - --version, {inputValue: Version}, - --activeDeadlineSeconds, {inputValue: ActiveDeadlineSeconds}, - --backoffLimit, {inputValue: BackoffLimit}, - --cleanPodPolicy, {inputValue: CleanPodPolicy}, - --ttlSecondsAfterFinished, {inputValue: ttl Seconds After Finished}, - --psSpec, {inputValue: PS Spec}, - --workerSpec, {inputValue: Worker Spec}, - --chiefSpec, {inputValue: Chief Spec}, - --evaluatorSpec, {inputValue: Evaluator Spec}, - --tfjobTimeoutMinutes, {inputValue: Tfjob Timeout Minutes}, - --deleteAfterDone, {inputValue: Delete Finished Tfjob}, - ] diff --git a/components/contrib/kubeflow/launcher/requirements.txt b/components/contrib/kubeflow/launcher/requirements.txt deleted file mode 100644 index ba8497137e8..00000000000 --- a/components/contrib/kubeflow/launcher/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -pyyaml -kubernetes diff --git a/components/contrib/kubeflow/launcher/sample.py b/components/contrib/kubeflow/launcher/sample.py deleted file mode 100644 index dbff3bf39fa..00000000000 --- a/components/contrib/kubeflow/launcher/sample.py +++ /dev/null @@ -1,99 +0,0 @@ -import json -import kfp.dsl as dsl -from kfp import components -from kfp.dsl.types import Integer -from typing import NamedTuple - - -def create_worker_spec(workerNum: int=0) -> NamedTuple( - 'CreatWorkerSpec', - [ - ('worker_spec', dict), - ]): - """ - Creates tf-job worker spec - """ - - worker = {} - - if workerNum > 0: - worker = { - "replicas": workerNum , - "restartPolicy": "OnFailure", - "template": { - "spec": { - "containers": [ - { - "command": [ - "python", - "/opt/model.py" - ], - "args": [ - "--tf-train-steps=60" - ], - "image": "liuhougangxa/tf-estimator-mnist", - "name": "tensorflow", - } - ] - } - } - } - from collections import namedtuple - worker_spec_output = namedtuple( - 'MyWorkerOutput', - ['worker_spec']) - return worker_spec_output(worker) - -worker_spec_op = components.func_to_container_op( - create_worker_spec, base_image='tensorflow/tensorflow:1.11.0-py3') - -@dsl.pipeline( - name="Launch kubeflow tfjob", - description="An example to launch tfjob." -) -def mnist_train(name: str="mnist", - namespace: str="kubeflow", - workerNum: int=3, - ttlSecondsAfterFinished: int=-1, - tfjobTimeoutMinutes: int=60, - deleteAfterDone =False): - tfjob_launcher_op = components.load_component_from_file("./component.yaml") - # tfjob_launcher_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/launcher/component.yaml') - - chief = { - "replicas": 1, - "restartPolicy": "OnFailure", - "template": { - "spec": { - "containers": [ - { - "command": [ - "python", - "/opt/model.py" - ], - "args": [ - "--tf-train-steps=60" - ], - "image": "liuhougangxa/tf-estimator-mnist", - "name": "tensorflow", - } - ] - } - } - } - - worker_spec_create = worker_spec_op(workerNum) - - tfjob_launcher_op( - name=name, - namespace=namespace, - ttl_seconds_after_finished=ttlSecondsAfterFinished, - worker_spec=worker_spec_create.outputs['worker_spec'], - chief_spec=chief, - tfjob_timeout_minutes=tfjobTimeoutMinutes, - delete_finished_tfjob=deleteAfterDone - ) - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(mnist_train, __file__ + ".tar.gz") \ No newline at end of file diff --git a/components/contrib/kubeflow/launcher/src/__init__.py b/components/contrib/kubeflow/launcher/src/__init__.py deleted file mode 100644 index ecb0d893104..00000000000 --- a/components/contrib/kubeflow/launcher/src/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/components/contrib/kubeflow/launcher/src/launch_tfjob.py b/components/contrib/kubeflow/launcher/src/launch_tfjob.py deleted file mode 100644 index 30666d8d143..00000000000 --- a/components/contrib/kubeflow/launcher/src/launch_tfjob.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright 2019 kubeflow.org. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import datetime -from distutils.util import strtobool -import json -import os -import logging -import yaml -import launch_crd - -from kubernetes import client as k8s_client -from kubernetes import config - -def yamlOrJsonStr(str): - if str == "" or str == None: - return None - return yaml.safe_load(str) - -TFJobGroup = "kubeflow.org" -TFJobPlural = "tfjobs" - -class TFJob(launch_crd.K8sCR): - def __init__(self, version="v1", client=None): - super(TFJob, self).__init__(TFJobGroup, TFJobPlural, version, client) - - def is_expected_conditions(self, inst, expected_conditions): - conditions = inst.get('status', {}).get("conditions") - if not conditions: - return False, "" - if conditions[-1]["type"] in expected_conditions and conditions[-1]["status"] == "True": - return True, conditions[-1]["type"] - else: - return False, conditions[-1]["type"] - -def main(argv=None): - parser = argparse.ArgumentParser(description='Kubeflow TFJob launcher') - parser.add_argument('--name', type=str, - help='TFJob name.') - parser.add_argument('--namespace', type=str, - default='kubeflow', - help='TFJob namespace.') - parser.add_argument('--version', type=str, - default='v1', - help='TFJob version.') - parser.add_argument('--activeDeadlineSeconds', type=int, - default=-1, - help='Specifies the duration (in seconds) since startTime during which the job can remain active before it is terminated. Must be a positive integer. This setting applies only to pods where restartPolicy is OnFailure or Always.') - parser.add_argument('--backoffLimit', type=int, - default=-1, - help='Number of retries before marking this job as failed.') - parser.add_argument('--cleanPodPolicy', type=str, - default="Running", - help='Defines the policy for cleaning up pods after the TFJob completes.') - parser.add_argument('--ttlSecondsAfterFinished', type=int, - default=-1, - help='Defines the TTL for cleaning up finished TFJobs.') - parser.add_argument('--psSpec', type=yamlOrJsonStr, - default={}, - help='TFJob ps replicaSpecs.') - parser.add_argument('--workerSpec', type=yamlOrJsonStr, - default={}, - help='TFJob worker replicaSpecs.') - parser.add_argument('--chiefSpec', type=yamlOrJsonStr, - default={}, - help='TFJob chief replicaSpecs.') - parser.add_argument('--evaluatorSpec', type=yamlOrJsonStr, - default={}, - help='TFJob evaluator replicaSpecs.') - parser.add_argument('--deleteAfterDone', type=strtobool, - default=True, - help='When tfjob done, delete the tfjob automatically if it is True.') - parser.add_argument('--tfjobTimeoutMinutes', type=int, - default=60*24, - help='Time in minutes to wait for the TFJob to reach end') - - args = parser.parse_args() - - logging.getLogger().setLevel(logging.INFO) - - logging.info('Generating tfjob template.') - - config.load_incluster_config() - api_client = k8s_client.ApiClient() - tfjob = TFJob(version=args.version, client=api_client) - inst = { - "apiVersion": "%s/%s" % (TFJobGroup, args.version), - "kind": "TFJob", - "metadata": { - "name": args.name, - "namespace": args.namespace, - }, - "spec": { - "cleanPodPolicy": args.cleanPodPolicy, - "tfReplicaSpecs": { - }, - }, - } - if args.ttlSecondsAfterFinished >=0: - inst["spec"]["ttlSecondsAfterFinished"] = args.ttlSecondsAfterFinished - if args.backoffLimit >= 0: - inst["spec"]["backoffLimit"] = args.backoffLimit - if args.activeDeadlineSeconds >=0: - inst["spec"]["activeDeadlineSecond"] = args.activeDeadlineSeconds - if args.psSpec: - inst["spec"]["tfReplicaSpecs"]["PS"] = args.psSpec - if args.chiefSpec: - inst["spec"]["tfReplicaSpecs"]["Chief"] = args.chiefSpec - if args.workerSpec: - inst["spec"]["tfReplicaSpecs"]["Worker"] = args.workerSpec - if args.evaluatorSpec: - inst["spec"]["tfReplicaSpecs"]["Evaluator"] = args.evaluatorSpec - - create_response = tfjob.create(inst) - - expected_conditions = ["Succeeded", "Failed"] - tfjob.wait_for_condition( - args.namespace, args.name, expected_conditions, - timeout=datetime.timedelta(minutes=args.tfjobTimeoutMinutes)) - if args.deleteAfterDone: - tfjob.delete(args.name, args.namespace) - -if __name__== "__main__": - main() diff --git a/components/contrib/kubernetes/Apply_object/component.yaml b/components/contrib/kubernetes/Apply_object/component.yaml deleted file mode 100644 index 97c43ad1e8a..00000000000 --- a/components/contrib/kubernetes/Apply_object/component.yaml +++ /dev/null @@ -1,35 +0,0 @@ -name: Apply Kubernetes object -inputs: -- {name: Object, type: JsonObject} -outputs: -- {name: Name, type: String} -- {name: Kind, type: String} -- {name: Object, type: JsonObject} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/kubernetes/Apply_object/component.yaml' -implementation: - container: - image: bitnami/kubectl:1.17.17 - command: - - bash - - -exc - - | - object_path=$0 - output_name_path=$1 - output_kind_path=$2 - output_object_path=$3 - mkdir -p "$(dirname "$output_name_path")" - mkdir -p "$(dirname "$output_kind_path")" - mkdir -p "$(dirname "$output_object_path")" - - kubectl apply -f "$object_path" --output=json > "$output_object_path" - - < "$output_object_path" jq '.metadata.name' --raw-output > "$output_name_path" - < "$output_object_path" jq '.kind' --raw-output > "$output_kind_path" - - - {inputPath: Object} - - {outputPath: Name} - - {outputPath: Kind} - - {outputPath: Object} diff --git a/components/contrib/kubernetes/Create_PersistentVolumeClaim/component.yaml b/components/contrib/kubernetes/Create_PersistentVolumeClaim/component.yaml deleted file mode 100644 index bd218b5dcf3..00000000000 --- a/components/contrib/kubernetes/Create_PersistentVolumeClaim/component.yaml +++ /dev/null @@ -1,46 +0,0 @@ -name: Create PersistentVolumeClaim in Kubernetes -inputs: -- {name: Name, type: String} -- {name: Storage size, type: String, default: 1Gi} -- {name: Namespace, type: String, default: default} -outputs: -- {name: Name, type: String} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/kubernetes/Create_PersistentVolumeClaim/component.yaml' -implementation: - container: - image: bitnami/kubectl:1.17.17 - command: - - bash - - -exc - - | - name=$0 - storage_size=$1 - namespace=$2 - output_name_path=$3 - mkdir -p "$(dirname "$output_name_path")" - object_path=$(mktemp) - - cat <"$object_path" - apiVersion: v1 - kind: PersistentVolumeClaim - metadata: - name: $name - spec: - #storageClassName: standard - accessModes: - - ReadWriteOnce - resources: - requests: - storage: $storage_size - EOF - object_name=$(kubectl apply -f "$object_path" --namespace $namespace --output=name) - object_name=${object_name##persistentvolumeclaim/} - echo "$object_name" >"$output_name_path" - - - {inputValue: Name} - - {inputValue: Storage size} - - {inputValue: Namespace} - - {outputPath: Name} diff --git a/components/contrib/kubernetes/Create_object/component.yaml b/components/contrib/kubernetes/Create_object/component.yaml deleted file mode 100644 index 1a7bcad65fc..00000000000 --- a/components/contrib/kubernetes/Create_object/component.yaml +++ /dev/null @@ -1,35 +0,0 @@ -name: Create Kubernetes object -inputs: -- {name: Object, type: JsonObject} -outputs: -- {name: Name, type: String} -- {name: Kind, type: String} -- {name: Object, type: JsonObject} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/kubernetes/Create_object/component.yaml' -implementation: - container: - image: bitnami/kubectl:1.17.17 - command: - - bash - - -exc - - | - object_path=$0 - output_name_path=$1 - output_kind_path=$2 - output_object_path=$3 - mkdir -p "$(dirname "$output_name_path")" - mkdir -p "$(dirname "$output_kind_path")" - mkdir -p "$(dirname "$output_object_path")" - - kubectl create -f "$object_path" --output=json > "$output_object_path" - - < "$output_object_path" jq '.metadata.name' --raw-output > "$output_name_path" - < "$output_object_path" jq '.kind' --raw-output > "$output_kind_path" - - - {inputPath: Object} - - {outputPath: Name} - - {outputPath: Kind} - - {outputPath: Object} diff --git a/components/contrib/kubernetes/Delete_object/component.yaml b/components/contrib/kubernetes/Delete_object/component.yaml deleted file mode 100644 index 28338134d1f..00000000000 --- a/components/contrib/kubernetes/Delete_object/component.yaml +++ /dev/null @@ -1,33 +0,0 @@ -name: Delete Kubernetes object -inputs: -- {name: Name, type: String} -- {name: Kind, type: String} -outputs: -- {name: Name, type: String} -- {name: Kind, type: String} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/kubernetes/Delete_object/component.yaml' -implementation: - container: - image: bitnami/kubectl:1.17.17 - command: - - bash - - -exc - - | - object_name=$0 - object_type=$1 - output_name_path=$2 - output_kind_path=$3 - mkdir -p "$(dirname "$output_name_path")" - mkdir -p "$(dirname "$output_kind_path")" - - typed_object_name=$(kubectl delete "$object_type" "$object_name" --output=name) - echo "${typed_object_name##*/}" >"$output_name_path" - echo "${typed_object_name%/*}" >"$output_kind_path" - - - {inputValue: Name} - - {inputValue: Kind} - - {outputPath: Name} - - {outputPath: Kind} diff --git a/components/contrib/kubernetes/Get_object/component.yaml b/components/contrib/kubernetes/Get_object/component.yaml deleted file mode 100644 index dd73cd190f6..00000000000 --- a/components/contrib/kubernetes/Get_object/component.yaml +++ /dev/null @@ -1,27 +0,0 @@ -name: Get Kubernetes object -inputs: -- {name: Name, type: String} -- {name: Kind, type: String} -outputs: -- {name: Object, type: JsonObject} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/kubernetes/Get_object/component.yaml' -implementation: - container: - image: bitnami/kubectl:1.17.17 - command: - - bash - - -exc - - | - object_name=$0 - object_type=$1 - output_object_path=$2 - mkdir -p "$(dirname "$output_object_path")" - - kubectl get "$object_type" "$object_name" --output=json >"$output_object_path" - - - {inputValue: Name} - - {inputValue: Kind} - - {outputPath: Object} diff --git a/components/contrib/local/base/Dockerfile b/components/contrib/local/base/Dockerfile deleted file mode 100644 index 7bfe0552fe6..00000000000 --- a/components/contrib/local/base/Dockerfile +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM python:3.7 - -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt - -ADD build /ml diff --git a/components/contrib/local/base/build_image.sh b/components/contrib/local/base/build_image.sh deleted file mode 100755 index fea9b56b761..00000000000 --- a/components/contrib/local/base/build_image.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -e -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -mkdir -p ./build -cp -R "../confusion_matrix/src"/ ./build/ -cp -R "../roc/src"/ ./build/ - -docker build -t ml-pipeline-local-base . -rm -rf ./build diff --git a/components/contrib/local/base/requirements.txt b/components/contrib/local/base/requirements.txt deleted file mode 100644 index a2aa2280e81..00000000000 --- a/components/contrib/local/base/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -pandas==0.24.2 -scikit-learn==0.21.2 -scipy==1.4.1 -tensorflow==2.2.0 diff --git a/components/contrib/local/confusion_matrix/Dockerfile b/components/contrib/local/confusion_matrix/Dockerfile deleted file mode 100644 index a80ac52656d..00000000000 --- a/components/contrib/local/confusion_matrix/Dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM ml-pipeline-local-base - -ENTRYPOINT ["python", "/ml/confusion_matrix.py"] diff --git a/components/contrib/local/confusion_matrix/build_image.sh b/components/contrib/local/confusion_matrix/build_image.sh deleted file mode 100755 index ab94f6133d9..00000000000 --- a/components/contrib/local/confusion_matrix/build_image.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -e -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# build base image -pushd ../base -./build_image.sh -popd - -../../build_image.sh -l ml-pipeline-local-confusion-matrix "$@" diff --git a/components/contrib/local/confusion_matrix/component.yaml b/components/contrib/local/confusion_matrix/component.yaml deleted file mode 100644 index 4897cb39111..00000000000 --- a/components/contrib/local/confusion_matrix/component.yaml +++ /dev/null @@ -1,20 +0,0 @@ -name: Confusion matrix -description: Calculates confusion matrix -inputs: - - {name: Predictions, type: GCSPath, description: 'GCS path of prediction file pattern.'} # type: {GCSPath: {data_type: CSV}} - - {name: Target lambda, type: String, default: '', description: 'Text of Python lambda function which computes target value. For example, "lambda x: x[''a''] + x[''b'']". If not set, the input must include a "target" column.'} - - {name: Output dir, type: GCSPath, description: 'GCS path of the output directory.'} # type: {GCSPath: {path_type: Directory}} -outputs: - - {name: MLPipeline UI metadata, type: UI metadata} - - {name: MLPipeline Metrics, type: Metrics} -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:1.8.0-alpha.0 - command: [python2, /ml/confusion_matrix.py] - args: [ - --predictions, {inputValue: Predictions}, - --target_lambda, {inputValue: Target lambda}, - --output, {inputValue: Output dir}, - --ui-metadata-output-path, {outputPath: MLPipeline UI metadata}, - --metrics-output-path, {outputPath: MLPipeline Metrics}, - ] diff --git a/components/contrib/local/confusion_matrix/src/confusion_matrix.py b/components/contrib/local/confusion_matrix/src/confusion_matrix.py deleted file mode 100644 index 80b4c6e8202..00000000000 --- a/components/contrib/local/confusion_matrix/src/confusion_matrix.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# A program to generate confusion matrix data out of prediction results. -# Usage: -# python confusion_matrix.py \ -# --predictions=gs://bradley-playground/sfpd/predictions/part-* \ -# --output=gs://bradley-playground/sfpd/cm/ \ -# --target=resolution \ -# --analysis=gs://bradley-playground/sfpd/analysis \ - - -import argparse -import json -import os -import urlparse -import pandas as pd -from pathlib import Path -from sklearn.metrics import confusion_matrix, accuracy_score -from tensorflow.python.lib.io import file_io - - -def main(argv=None): - parser = argparse.ArgumentParser(description='ML Trainer') - parser.add_argument('--predictions', type=str, help='GCS path of prediction file pattern.') - parser.add_argument('--output', type=str, help='GCS path of the output directory.') - parser.add_argument('--target_lambda', type=str, - help='a lambda function as a string to compute target.' + - 'For example, "lambda x: x[\'a\'] + x[\'b\']"' + - 'If not set, the input must include a "target" column.') - parser.add_argument('--ui-metadata-output-path', - type=str, - default='/mlpipeline-ui-metadata.json', - help='Local output path for the file containing UI metadata JSON structure.') - parser.add_argument('--metrics-output-path', - type=str, - default='/mlpipeline-metrics.json', - help='Local output path for the file containing metrics JSON structure.') - - args = parser.parse_args() - - storage_service_scheme = urlparse.urlparse(args.output).scheme - on_cloud = True if storage_service_scheme else False - if not on_cloud and not os.path.exists(args.output): - os.makedirs(args.output) - - schema_file = os.path.join(os.path.dirname(args.predictions), 'schema.json') - schema = json.loads(file_io.read_file_to_string(schema_file)) - names = [x['name'] for x in schema] - dfs = [] - files = file_io.get_matching_files(args.predictions) - for file in files: - with file_io.FileIO(file, 'r') as f: - dfs.append(pd.read_csv(f, names=names)) - - df = pd.concat(dfs) - if args.target_lambda: - df['target'] = df.apply(eval(args.target_lambda), axis=1) - - vocab = list(df['target'].unique()) - cm = confusion_matrix(df['target'], df['predicted'], labels=vocab) - data = [] - for target_index, target_row in enumerate(cm): - for predicted_index, count in enumerate(target_row): - data.append((vocab[target_index], vocab[predicted_index], count)) - - df_cm = pd.DataFrame(data, columns=['target', 'predicted', 'count']) - cm_file = os.path.join(args.output, 'confusion_matrix.csv') - with file_io.FileIO(cm_file, 'w') as f: - df_cm.to_csv(f, columns=['target', 'predicted', 'count'], header=False, index=False) - - metadata = { - 'outputs' : [{ - 'type': 'confusion_matrix', - 'format': 'csv', - 'schema': [ - {'name': 'target', 'type': 'CATEGORY'}, - {'name': 'predicted', 'type': 'CATEGORY'}, - {'name': 'count', 'type': 'NUMBER'}, - ], - 'source': cm_file, - # Convert vocab to string because for bealean values we want "True|False" to match csv data. - 'labels': list(map(str, vocab)), - }] - } - Path(args.ui_metadata_output_path).parent.mkdir(parents=True, exist_ok=True) - Path(args.ui_metadata_output_path).write_text(json.dumps(metadata)) - - accuracy = accuracy_score(df['target'], df['predicted']) - metrics = { - 'metrics': [{ - 'name': 'accuracy-score', - 'numberValue': accuracy, - 'format': "PERCENTAGE", - }] - } - Path(args.metrics_output_path).parent.mkdir(parents=True, exist_ok=True) - Path(args.metrics_output_path).write_text(json.dumps(metrics)) - -if __name__== "__main__": - main() diff --git a/components/contrib/local/roc/Dockerfile b/components/contrib/local/roc/Dockerfile deleted file mode 100644 index 163d05eb55b..00000000000 --- a/components/contrib/local/roc/Dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM ml-pipeline-local-base - -ENTRYPOINT ["python", "/ml/roc.py"] diff --git a/components/contrib/local/roc/build_image.sh b/components/contrib/local/roc/build_image.sh deleted file mode 100755 index 3b1b4337406..00000000000 --- a/components/contrib/local/roc/build_image.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -e -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# build base image -pushd ../base -./build_image.sh -popd - -../../build_image.sh -l ml-pipeline-local-roc "$@" diff --git a/components/contrib/local/roc/component.yaml b/components/contrib/local/roc/component.yaml deleted file mode 100644 index 810692d2261..00000000000 --- a/components/contrib/local/roc/component.yaml +++ /dev/null @@ -1,24 +0,0 @@ -name: ROC curve -description: Calculates Receiver Operating Characteristic curve. See https://en.wikipedia.org/wiki/Receiver_operating_characteristic -inputs: - - {name: Predictions dir, type: GCSPath, description: 'GCS path of prediction file pattern.'} #TODO: Replace dir data + schema files # type: {GCSPath: {path_type: Directory}} - - {name: True class, type: String, default: 'true', description: 'The true class label for the sample. Default is "true".'} - - {name: True score column, type: String, default: 'true', description: 'The name of the column for positive probability.'} - - {name: Target lambda, type: String, default: '', description: 'Text of Python lambda function which returns boolean value indicating whether the classification result is correct.\nFor example, "lambda x: x[''a''] and x[''b'']". If missing, input must have a "target" column.'} - - {name: Output dir, type: GCSPath, description: 'GCS path of the output directory.'} #TODO: Replace dir with single file # type: {GCSPath: {path_type: Directory}} -outputs: - - {name: MLPipeline UI metadata, type: UI metadata} - - {name: MLPipeline Metrics, type: Metrics} -implementation: - container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:1.8.0-alpha.0 - command: [python2, /ml/roc.py] - args: [ - --predictions, {inputValue: Predictions dir}, - --trueclass, {inputValue: True class}, - --true_score_column, {inputValue: True score column}, - --target_lambda, {inputValue: Target lambda}, - --output, {inputValue: Output dir}, - --ui-metadata-output-path, {outputPath: MLPipeline UI metadata}, - --metrics-output-path, {outputPath: MLPipeline Metrics}, - ] diff --git a/components/contrib/local/roc/src/roc.py b/components/contrib/local/roc/src/roc.py deleted file mode 100644 index 17e7844be22..00000000000 --- a/components/contrib/local/roc/src/roc.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# A program to generate ROC data out of prediction results. -# Usage: -# python roc.py \ -# --predictions=gs://bradley-playground/sfpd/predictions/part-* \ -# --trueclass=ACTION \ -# --output=gs://bradley-playground/sfpd/roc/ \ - - -import argparse -import json -import os -import urlparse -import pandas as pd -from pathlib import Path -from sklearn.metrics import roc_curve, roc_auc_score -from tensorflow.python.lib.io import file_io - - -def main(argv=None): - parser = argparse.ArgumentParser(description='ML Trainer') - parser.add_argument('--predictions', type=str, help='GCS path of prediction file pattern.') - parser.add_argument('--trueclass', type=str, default='true', - help='The name of the class as true value. If missing, assuming it is ' + - 'binary classification and default to "true".') - parser.add_argument('--true_score_column', type=str, default='true', - help='The name of the column for positive prob. If missing, assuming it is ' + - 'binary classification and defaults to "true".') - parser.add_argument('--target_lambda', type=str, - help='a lambda function as a string to determine positive or negative.' + - 'For example, "lambda x: x[\'a\'] and x[\'b\']". If missing, ' + - 'input must have a "target" column.') - parser.add_argument('--output', type=str, help='GCS path of the output directory.') - parser.add_argument('--ui-metadata-output-path', - type=str, - default='/mlpipeline-ui-metadata.json', - help='Local output path for the file containing UI metadata JSON structure.') - parser.add_argument('--metrics-output-path', - type=str, - default='/mlpipeline-metrics.json', - help='Local output path for the file containing metrics JSON structure.') - args = parser.parse_args() - - storage_service_scheme = urlparse.urlparse(args.output).scheme - on_cloud = True if storage_service_scheme else False - if not on_cloud and not os.path.exists(args.output): - os.makedirs(args.output) - - schema_file = os.path.join(os.path.dirname(args.predictions), 'schema.json') - schema = json.loads(file_io.read_file_to_string(schema_file)) - names = [x['name'] for x in schema] - - if not args.target_lambda and 'target' not in names: - raise ValueError('There is no "target" column, and target_lambda is not provided.') - - if args.true_score_column not in names: - raise ValueError('Cannot find column name "%s"' % args.true_score_column) - - dfs = [] - files = file_io.get_matching_files(args.predictions) - for file in files: - with file_io.FileIO(file, 'r') as f: - dfs.append(pd.read_csv(f, names=names)) - - df = pd.concat(dfs) - if args.target_lambda: - df['target'] = df.apply(eval(args.target_lambda), axis=1) - else: - df['target'] = df['target'].apply(lambda x: 1 if x == args.trueclass else 0) - fpr, tpr, thresholds = roc_curve(df['target'], df[args.true_score_column]) - roc_auc = roc_auc_score(df['target'], df[args.true_score_column]) - df_roc = pd.DataFrame({'fpr': fpr, 'tpr': tpr, 'thresholds': thresholds}) - roc_file = os.path.join(args.output, 'roc.csv') - with file_io.FileIO(roc_file, 'w') as f: - df_roc.to_csv(f, columns=['fpr', 'tpr', 'thresholds'], header=False, index=False) - - metadata = { - 'outputs': [{ - 'type': 'roc', - 'format': 'csv', - 'schema': [ - {'name': 'fpr', 'type': 'NUMBER'}, - {'name': 'tpr', 'type': 'NUMBER'}, - {'name': 'thresholds', 'type': 'NUMBER'}, - ], - 'source': roc_file - }] - } - Path(args.ui_metadata_output_path).parent.mkdir(parents=True, exist_ok=True) - Path(args.ui_metadata_output_path).write_text(json.dumps(metadata)) - - metrics = { - 'metrics': [{ - 'name': 'roc-auc-score', - 'numberValue': roc_auc, - }] - } - Path(args.metrics_output_path).parent.mkdir(parents=True, exist_ok=True) - Path(args.metrics_output_path).write_text(json.dumps(metrics)) - -if __name__== "__main__": - main() diff --git a/components/contrib/ml_metrics/Aggregate_regression_metrics/component.py b/components/contrib/ml_metrics/Aggregate_regression_metrics/component.py deleted file mode 100644 index aae1b88a9bb..00000000000 --- a/components/contrib/ml_metrics/Aggregate_regression_metrics/component.py +++ /dev/null @@ -1,59 +0,0 @@ -from typing import NamedTuple -from kfp.components import create_component_from_func - - -def aggregate_regression_metrics( - metrics_1: dict, - metrics_2: dict = None, - metrics_3: dict = None, - metrics_4: dict = None, - metrics_5: dict = None, -) -> NamedTuple('Outputs', [ - ('number_of_items', int), - ('max_absolute_error', float), - ('mean_absolute_error', float), - ('mean_squared_error', float), - ('root_mean_squared_error', float), - ('metrics', dict), -]): - '''Calculates regression metrics. - - Annotations: - author: Alexey Volkov - ''' - import math - - metrics_dicts = [d for d in [metrics_1, metrics_2, metrics_3, metrics_4, metrics_5] if d is not None] - number_of_items = sum(metrics['number_of_items'] for metrics in metrics_dicts) - max_absolute_error = max(metrics['max_absolute_error'] for metrics in metrics_dicts) - mean_absolute_error = sum(metrics['mean_absolute_error'] * metrics['number_of_items'] for metrics in metrics_dicts) / number_of_items - mean_squared_error = sum(metrics['mean_squared_error'] * metrics['number_of_items'] for metrics in metrics_dicts) / number_of_items - root_mean_squared_error = math.sqrt(mean_squared_error) - metrics = dict( - number_of_items=number_of_items, - max_absolute_error=max_absolute_error, - mean_absolute_error=mean_absolute_error, - mean_squared_error=mean_squared_error, - root_mean_squared_error=root_mean_squared_error, - ) - - return ( - number_of_items, - max_absolute_error, - mean_absolute_error, - mean_squared_error, - root_mean_squared_error, - metrics, - ) - - -if __name__ == '__main__': - aggregate_regression_metrics_op = create_component_from_func( - aggregate_regression_metrics, - output_component_file='component.yaml', - base_image='python:3.7', - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/ml_metrics/Aggregate_regression_metrics/component.yaml", - }, - ) diff --git a/components/contrib/ml_metrics/Aggregate_regression_metrics/component.yaml b/components/contrib/ml_metrics/Aggregate_regression_metrics/component.yaml deleted file mode 100644 index 35853a96234..00000000000 --- a/components/contrib/ml_metrics/Aggregate_regression_metrics/component.yaml +++ /dev/null @@ -1,155 +0,0 @@ -name: Aggregate regression metrics -description: |- - Calculates regression metrics. - - Annotations: - author: Alexey Volkov -inputs: -- {name: metrics_1, type: JsonObject} -- {name: metrics_2, type: JsonObject, optional: true} -- {name: metrics_3, type: JsonObject, optional: true} -- {name: metrics_4, type: JsonObject, optional: true} -- {name: metrics_5, type: JsonObject, optional: true} -outputs: -- {name: number_of_items, type: Integer} -- {name: max_absolute_error, type: Float} -- {name: mean_absolute_error, type: Float} -- {name: mean_squared_error, type: Float} -- {name: root_mean_squared_error, type: Float} -- {name: metrics, type: JsonObject} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/ml_metrics/Aggregate_regression_metrics/component.yaml' -implementation: - container: - image: python:3.7 - command: - - python3 - - -u - - -c - - | - def aggregate_regression_metrics( - metrics_1, - metrics_2 = None, - metrics_3 = None, - metrics_4 = None, - metrics_5 = None, - ): - '''Calculates regression metrics. - - Annotations: - author: Alexey Volkov - ''' - import math - - metrics_dicts = [d for d in [metrics_1, metrics_2, metrics_3, metrics_4, metrics_5] if d is not None] - number_of_items = sum(metrics['number_of_items'] for metrics in metrics_dicts) - max_absolute_error = max(metrics['max_absolute_error'] for metrics in metrics_dicts) - mean_absolute_error = sum(metrics['mean_absolute_error'] * metrics['number_of_items'] for metrics in metrics_dicts) / number_of_items - mean_squared_error = sum(metrics['mean_squared_error'] * metrics['number_of_items'] for metrics in metrics_dicts) / number_of_items - root_mean_squared_error = math.sqrt(mean_squared_error) - metrics = dict( - number_of_items=number_of_items, - max_absolute_error=max_absolute_error, - mean_absolute_error=mean_absolute_error, - mean_squared_error=mean_squared_error, - root_mean_squared_error=root_mean_squared_error, - ) - - return ( - number_of_items, - max_absolute_error, - mean_absolute_error, - mean_squared_error, - root_mean_squared_error, - metrics, - ) - - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - def _serialize_float(float_value: float) -> str: - if isinstance(float_value, str): - return float_value - if not isinstance(float_value, (float, int)): - raise TypeError('Value "{}" has type "{}" instead of float.'.format(str(float_value), str(type(float_value)))) - return str(float_value) - - def _serialize_int(int_value: int) -> str: - if isinstance(int_value, str): - return int_value - if not isinstance(int_value, int): - raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value)))) - return str(int_value) - - import json - import argparse - _parser = argparse.ArgumentParser(prog='Aggregate regression metrics', description='Calculates regression metrics.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--metrics-1", dest="metrics_1", type=json.loads, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--metrics-2", dest="metrics_2", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--metrics-3", dest="metrics_3", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--metrics-4", dest="metrics_4", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("--metrics-5", dest="metrics_5", type=json.loads, required=False, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=6) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = aggregate_regression_metrics(**_parsed_args) - - _output_serializers = [ - _serialize_int, - _serialize_float, - _serialize_float, - _serialize_float, - _serialize_float, - _serialize_json, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --metrics-1 - - {inputValue: metrics_1} - - if: - cond: {isPresent: metrics_2} - then: - - --metrics-2 - - {inputValue: metrics_2} - - if: - cond: {isPresent: metrics_3} - then: - - --metrics-3 - - {inputValue: metrics_3} - - if: - cond: {isPresent: metrics_4} - then: - - --metrics-4 - - {inputValue: metrics_4} - - if: - cond: {isPresent: metrics_5} - then: - - --metrics-5 - - {inputValue: metrics_5} - - '----output-paths' - - {outputPath: number_of_items} - - {outputPath: max_absolute_error} - - {outputPath: mean_absolute_error} - - {outputPath: mean_squared_error} - - {outputPath: root_mean_squared_error} - - {outputPath: metrics} diff --git a/components/contrib/ml_metrics/Calculate_classification_metrics/_samples/sample_pipleine.py b/components/contrib/ml_metrics/Calculate_classification_metrics/_samples/sample_pipleine.py deleted file mode 100644 index 19cfe860452..00000000000 --- a/components/contrib/ml_metrics/Calculate_classification_metrics/_samples/sample_pipleine.py +++ /dev/null @@ -1,92 +0,0 @@ -from pathlib import Path - -import kfp -from kfp.components import ComponentStore, create_component_from_func, InputPath, OutputPath, load_component_from_file - -store = ComponentStore.default_store - -chicago_taxi_dataset_op = store.load_component('datasets/Chicago_Taxi_Trips') -xgboost_train_on_csv_op = store.load_component('XGBoost/Train') -xgboost_predict_on_csv_op = store.load_component('XGBoost/Predict') -pandas_transform_csv_op = store.load_component('pandas/Transform_DataFrame/in_CSV_format') -drop_header_op = store.load_component('tables/Remove_header') - - -def convert_values_to_int(text_path: InputPath('Text'), - output_path: OutputPath('Text')): - """Returns the number of values in a CSV column.""" - import numpy as np - - result = np.loadtxt(text_path) - - np.savetxt(output_path, result, fmt='%d') - - -convert_values_to_int_op = create_component_from_func( - func=convert_values_to_int, - base_image='python:3.7', - packages_to_install=['pandas==1.1'], -) - -calculate_classification_metrics_from_csv_op = load_component_from_file( - str(Path(__file__).parent.parent / 'from_CSV' / 'component.yaml') -) - - -def classification_metrics_pipeline(): - features = ['trip_seconds', 'trip_miles', 'pickup_community_area', 'dropoff_community_area', - 'fare', 'tolls', 'extras', 'trip_total'] - target = 'company' - - training_data_csv = chicago_taxi_dataset_op( - select=','.join([target] + features), - where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', - limit=100, - ).output - - training_data_transformed_csv = pandas_transform_csv_op( - table=training_data_csv, - transform_code=f'''df["{target}"] = df["{target}"].astype('category').cat.codes''', - ).output - - # Training - model_trained_on_csv = xgboost_train_on_csv_op( - training_data=training_data_transformed_csv, - label_column=0, - booster_params={'num_class': 13}, - objective='multi:softmax', - num_iterations=50, - ).outputs['model'] - - # Predicting - predictions = xgboost_predict_on_csv_op( - data=training_data_csv, - model=model_trained_on_csv, - label_column=0, - ).output - - predictions_converted = convert_values_to_int_op( - text=predictions - ).output - - # Preparing the true values - true_values_table = pandas_transform_csv_op( - table=training_data_csv, - transform_code=f'df["{target}"] = df["{target}"].astype("category").cat.codes\n' - f'df = df[["{target}"]]' - ).output - - true_values = drop_header_op(true_values_table).output - - # Calculating the regression metrics - calculate_classification_metrics_from_csv_op( - true_values=true_values, - predicted_values=predictions_converted, - average='macro', - ) - - -if __name__ == '__main__': - kfp_endpoint = None - - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(classification_metrics_pipeline, arguments={}) diff --git a/components/contrib/ml_metrics/Calculate_classification_metrics/from_CSV/component.py b/components/contrib/ml_metrics/Calculate_classification_metrics/from_CSV/component.py deleted file mode 100644 index e9e1bc83bbc..00000000000 --- a/components/contrib/ml_metrics/Calculate_classification_metrics/from_CSV/component.py +++ /dev/null @@ -1,75 +0,0 @@ -from typing import NamedTuple - -from kfp.components import InputPath, create_component_from_func - - -def calculate_classification_metrics_from_csv( - true_values_path: InputPath(), - predicted_values_path: InputPath(), - sample_weights_path: InputPath() = None, - average: str = 'binary' -) -> NamedTuple('Outputs', [ - ('f1', float), - ('precision', float), - ('recall', float), - ('accuracy', float), -]): - """ - Calculates classification metrics. - - Annotations: - author: Anton Kiselev - """ - import numpy - from sklearn.metrics import f1_score, precision_score, recall_score, accuracy_score - - true_values = numpy.loadtxt(true_values_path, dtype=str) - predicted_values = numpy.loadtxt(predicted_values_path, dtype=str) - - if len(predicted_values.shape) != 1: - raise NotImplemented('Only single prediction values are supported.') - if len(true_values.shape) != 1: - raise NotImplemented('Only single true values are supported.') - - if predicted_values.shape != true_values.shape: - raise ValueError(f'Input shapes are different: {predicted_values.shape} != {true_values.shape}') - - sample_weights = None - if sample_weights_path is not None: - sample_weights = numpy.loadtxt(sample_weights_path, dtype=float) - - if len(sample_weights.shape) != 1: - raise NotImplemented('Only single sample weights are supported.') - - if sample_weights.shape != predicted_values.shape: - raise ValueError(f'Input shapes of sample weights and predictions are different: ' - f'{sample_weights.shape} != {predicted_values.shape}') - - f1 = f1_score(true_values, predicted_values, average=average, sample_weight=sample_weights) - precision = precision_score(true_values, predicted_values, average=average, sample_weight=sample_weights) - recall = recall_score(true_values, predicted_values, average=average, sample_weight=sample_weights) - accuracy = accuracy_score(true_values, predicted_values, normalize=average, sample_weight=sample_weights) - - metrics = dict( - f1=f1, - precision=precision, - recall=recall, - accuracy=accuracy - ) - - return ( - f1, - precision, - recall, - accuracy, - metrics, - ) - - -if __name__ == '__main__': - calculate_regression_metrics_from_csv_op = create_component_from_func( - calculate_classification_metrics_from_csv, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['numpy==1.19.0', 'scikit-learn==0.23.2'] - ) diff --git a/components/contrib/ml_metrics/Calculate_classification_metrics/from_CSV/component.yaml b/components/contrib/ml_metrics/Calculate_classification_metrics/from_CSV/component.yaml deleted file mode 100644 index 7cbbfa32340..00000000000 --- a/components/contrib/ml_metrics/Calculate_classification_metrics/from_CSV/component.yaml +++ /dev/null @@ -1,96 +0,0 @@ -name: Calculate classification metrics from csv -description: |- - Calculates classification metrics. - - Annotations: - author: Anton Kiselev -inputs: -- {name: true_values} -- {name: predicted_values} -- {name: sample_weights, optional: true} -- {name: average, type: String, default: binary, optional: true} -outputs: -- {name: f1, type: Float} -- {name: precision, type: Float} -- {name: recall, type: Float} -- {name: accuracy, type: Float} -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'numpy==1.19.0' 'scikit-learn==0.23.2' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'numpy==1.19.0' 'scikit-learn==0.23.2' - --user) && "$0" "$@" - - python3 - - -u - - -c - - "def calculate_classification_metrics_from_csv(\n true_values_path ,\n\ - \ predicted_values_path ,\n sample_weights_path = None,\n \ - \ average = 'binary'\n): \n \n \n \n \n\n \"\"\"\n\ - \ Calculates classification metrics.\n\n Annotations:\n author:\ - \ Anton Kiselev \n \"\"\"\n import numpy\n \ - \ from sklearn.metrics import f1_score, precision_score, recall_score, accuracy_score\n\ - \n true_values = numpy.loadtxt(true_values_path, dtype=str)\n predicted_values\ - \ = numpy.loadtxt(predicted_values_path, dtype=str)\n\n if len(predicted_values.shape)\ - \ != 1:\n raise NotImplemented('Only single prediction values are supported.')\n\ - \ if len(true_values.shape) != 1:\n raise NotImplemented('Only single\ - \ true values are supported.')\n\n if predicted_values.shape != true_values.shape:\n\ - \ raise ValueError(f'Input shapes are different: {predicted_values.shape}\ - \ != {true_values.shape}')\n\n sample_weights = None\n if sample_weights_path\ - \ is not None:\n sample_weights = numpy.loadtxt(sample_weights_path,\ - \ dtype=float)\n\n if len(sample_weights.shape) != 1:\n raise\ - \ NotImplemented('Only single sample weights are supported.')\n\n if\ - \ sample_weights.shape != predicted_values.shape:\n raise ValueError(f'Input\ - \ shapes of sample weights and predictions are different: '\n \ - \ f'{sample_weights.shape} != {predicted_values.shape}')\n\n \ - \ f1 = f1_score(true_values, predicted_values, average=average, sample_weight=sample_weights)\n\ - \ precision = precision_score(true_values, predicted_values, average=average,\ - \ sample_weight=sample_weights)\n recall = recall_score(true_values, predicted_values,\ - \ average=average, sample_weight=sample_weights)\n accuracy = accuracy_score(true_values,\ - \ predicted_values, normalize=average, sample_weight=sample_weights)\n\n \ - \ metrics = dict(\n f1=f1,\n precision=precision,\n recall=recall,\n\ - \ accuracy=accuracy\n )\n\n return (\n f1,\n precision,\n\ - \ recall,\n accuracy,\n metrics,\n )\n\ndef _serialize_float(float_value:\ - \ float) -> str:\n if isinstance(float_value, str):\n return float_value\n\ - \ if not isinstance(float_value, (float, int)):\n raise TypeError('Value\ - \ \"{}\" has type \"{}\" instead of float.'.format(str(float_value), str(type(float_value))))\n\ - \ return str(float_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Calculate\ - \ classification metrics from csv', description='Calculates classification metrics.\\\ - n\\n Annotations:\\n author: Anton Kiselev ')\n\ - _parser.add_argument(\"--true-values\", dest=\"true_values_path\", type=str,\ - \ required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--predicted-values\"\ - , dest=\"predicted_values_path\", type=str, required=True, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--sample-weights\", dest=\"sample_weights_path\", type=str,\ - \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--average\"\ - , dest=\"average\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ - ----output-paths\", dest=\"_output_paths\", type=str, nargs=4)\n_parsed_args\ - \ = vars(_parser.parse_args())\n_output_files = _parsed_args.pop(\"_output_paths\"\ - , [])\n\n_outputs = calculate_classification_metrics_from_csv(**_parsed_args)\n\ - \n_output_serializers = [\n _serialize_float,\n _serialize_float,\n \ - \ _serialize_float,\n _serialize_float,\n\n]\n\nimport os\nfor idx, output_file\ - \ in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n\ - \ except OSError:\n pass\n with open(output_file, 'w') as f:\n\ - \ f.write(_output_serializers[idx](_outputs[idx]))\n" - args: - - --true-values - - {inputPath: true_values} - - --predicted-values - - {inputPath: predicted_values} - - if: - cond: {isPresent: sample_weights} - then: - - --sample-weights - - {inputPath: sample_weights} - - if: - cond: {isPresent: average} - then: - - --average - - {inputValue: average} - - '----output-paths' - - {outputPath: f1} - - {outputPath: precision} - - {outputPath: recall} - - {outputPath: accuracy} diff --git a/components/contrib/ml_metrics/Calculate_regression_metrics/_samples/sample_pipleine.py b/components/contrib/ml_metrics/Calculate_regression_metrics/_samples/sample_pipleine.py deleted file mode 100644 index 10692a29ede..00000000000 --- a/components/contrib/ml_metrics/Calculate_regression_metrics/_samples/sample_pipleine.py +++ /dev/null @@ -1,52 +0,0 @@ -import kfp -from kfp import components - - -chicago_taxi_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml') -xgboost_train_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Train/component.yaml') -xgboost_predict_on_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/567c04c51ff00a1ee525b3458425b17adbe3df61/components/XGBoost/Predict/component.yaml') -pandas_transform_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml') -drop_header_op = kfp.components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c9638287468c849632cf9f7885b51de4c66f86/components/tables/Remove_header/component.yaml') -calculate_regression_metrics_from_csv_op = kfp.components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/616542ac0f789914f4eb53438da713dd3004fba4/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml') - - -def regression_metrics_pipeline(): - training_data_csv = chicago_taxi_dataset_op( - where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', - select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', - limit=10000, - ).output - - # Training - model_trained_on_csv = xgboost_train_on_csv_op( - training_data=training_data_csv, - label_column=0, - objective='reg:squarederror', - num_iterations=200, - ).outputs['model'] - - # Predicting - predictions = xgboost_predict_on_csv_op( - data=training_data_csv, - model=model_trained_on_csv, - label_column=0, - ).output - - # Preparing the true values - true_values_table = pandas_transform_csv_op( - table=training_data_csv, - transform_code='''df = df[["tips"]]''', - ).output - - true_values = drop_header_op(true_values_table).output - - # Calculating the regression metrics - calculate_regression_metrics_from_csv_op( - true_values=true_values, - predicted_values=predictions, - ) - - -if __name__ == '__main__': - kfp_endpoint=None - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(regression_metrics_pipeline, arguments={}) diff --git a/components/contrib/ml_metrics/Calculate_regression_metrics/from_CSV/component.py b/components/contrib/ml_metrics/Calculate_regression_metrics/from_CSV/component.py deleted file mode 100644 index dcb57dce010..00000000000 --- a/components/contrib/ml_metrics/Calculate_regression_metrics/from_CSV/component.py +++ /dev/null @@ -1,70 +0,0 @@ -from typing import NamedTuple -from kfp.components import InputPath, OutputPath, create_component_from_func - -def calculate_regression_metrics_from_csv( - true_values_path: InputPath(), - predicted_values_path: InputPath(), -) -> NamedTuple('Outputs', [ - ('number_of_items', int), - ('max_absolute_error', float), - ('mean_absolute_error', float), - ('mean_squared_error', float), - ('root_mean_squared_error', float), - ('metrics', dict), -]): - '''Calculates regression metrics. - - Annotations: - author: Alexey Volkov - ''' - import math - import numpy - - true_values = numpy.loadtxt(true_values_path, dtype=numpy.float64) - predicted_values = numpy.loadtxt(predicted_values_path, dtype=numpy.float64) - - if len(predicted_values.shape) != 1: - raise NotImplemented('Only single prediction values are supported.') - if len(true_values.shape) != 1: - raise NotImplemented('Only single true values are supported.') - - if predicted_values.shape != true_values.shape: - raise ValueError('Input shapes are different: {} != {}'.format(predicted_values.shape, true_values.shape)) - - number_of_items = true_values.size - errors = (true_values - predicted_values) - abs_errors = numpy.abs(errors) - squared_errors = errors ** 2 - max_absolute_error = numpy.max(abs_errors) - mean_absolute_error = numpy.average(abs_errors) - mean_squared_error = numpy.average(squared_errors) - root_mean_squared_error = math.sqrt(mean_squared_error) - metrics = dict( - number_of_items=number_of_items, - max_absolute_error=max_absolute_error, - mean_absolute_error=mean_absolute_error, - mean_squared_error=mean_squared_error, - root_mean_squared_error=root_mean_squared_error, - ) - - return ( - number_of_items, - max_absolute_error, - mean_absolute_error, - mean_squared_error, - root_mean_squared_error, - metrics, - ) - - -if __name__ == '__main__': - calculate_regression_metrics_from_csv_op = create_component_from_func( - calculate_regression_metrics_from_csv, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=['numpy==1.19.0'], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml", - }, - ) diff --git a/components/contrib/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml b/components/contrib/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml deleted file mode 100644 index 602405cf850..00000000000 --- a/components/contrib/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml +++ /dev/null @@ -1,146 +0,0 @@ -name: Calculate regression metrics from csv -description: |- - Calculates regression metrics. - - Annotations: - author: Alexey Volkov -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/ml_metrics/Calculate_regression_metrics/from_CSV/component.yaml' -inputs: -- {name: true_values} -- {name: predicted_values} -outputs: -- {name: number_of_items, type: Integer} -- {name: max_absolute_error, type: Float} -- {name: mean_absolute_error, type: Float} -- {name: mean_squared_error, type: Float} -- {name: root_mean_squared_error, type: Float} -- {name: metrics, type: JsonObject} -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'numpy==1.19.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'numpy==1.19.0' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def calculate_regression_metrics_from_csv( - true_values_path, - predicted_values_path, - ): - '''Calculates regression metrics. - - Annotations: - author: Alexey Volkov - ''' - import math - import numpy - - true_values = numpy.loadtxt(true_values_path, dtype=numpy.float64) - predicted_values = numpy.loadtxt(predicted_values_path, dtype=numpy.float64) - - if len(predicted_values.shape) != 1: - raise NotImplemented('Only single prediction values are supported.') - if len(true_values.shape) != 1: - raise NotImplemented('Only single true values are supported.') - - if predicted_values.shape != true_values.shape: - raise ValueError('Input shapes are different: {} != {}'.format(predicted_values.shape, true_values.shape)) - - number_of_items = true_values.size - errors = (true_values - predicted_values) - abs_errors = numpy.abs(errors) - squared_errors = errors ** 2 - max_absolute_error = numpy.max(abs_errors) - mean_absolute_error = numpy.average(abs_errors) - mean_squared_error = numpy.average(squared_errors) - root_mean_squared_error = math.sqrt(mean_squared_error) - metrics = dict( - number_of_items=number_of_items, - max_absolute_error=max_absolute_error, - mean_absolute_error=mean_absolute_error, - mean_squared_error=mean_squared_error, - root_mean_squared_error=root_mean_squared_error, - ) - - return ( - number_of_items, - max_absolute_error, - mean_absolute_error, - mean_squared_error, - root_mean_squared_error, - metrics, - ) - - def _serialize_json(obj) -> str: - if isinstance(obj, str): - return obj - import json - def default_serializer(obj): - if hasattr(obj, 'to_struct'): - return obj.to_struct() - else: - raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) - - def _serialize_float(float_value: float) -> str: - if isinstance(float_value, str): - return float_value - if not isinstance(float_value, (float, int)): - raise TypeError('Value "{}" has type "{}" instead of float.'.format(str(float_value), str(type(float_value)))) - return str(float_value) - - def _serialize_int(int_value: int) -> str: - if isinstance(int_value, str): - return int_value - if not isinstance(int_value, int): - raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value)))) - return str(int_value) - - import argparse - _parser = argparse.ArgumentParser(prog='Calculate regression metrics from csv', description='Calculates regression metrics.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--true-values", dest="true_values_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--predicted-values", dest="predicted_values_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=6) - _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) - - _outputs = calculate_regression_metrics_from_csv(**_parsed_args) - - _output_serializers = [ - _serialize_int, - _serialize_float, - _serialize_float, - _serialize_float, - _serialize_float, - _serialize_json, - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) - args: - - --true-values - - {inputPath: true_values} - - --predicted-values - - {inputPath: predicted_values} - - '----output-paths' - - {outputPath: number_of_items} - - {outputPath: max_absolute_error} - - {outputPath: mean_absolute_error} - - {outputPath: mean_squared_error} - - {outputPath: root_mean_squared_error} - - {outputPath: metrics} diff --git a/components/contrib/notebooks/Run_notebook_using_papermill/component.yaml b/components/contrib/notebooks/Run_notebook_using_papermill/component.yaml deleted file mode 100644 index d1b08c93d25..00000000000 --- a/components/contrib/notebooks/Run_notebook_using_papermill/component.yaml +++ /dev/null @@ -1,51 +0,0 @@ -name: Run notebook using papermill -description: | - Run Jupyter notebook using papermill. - The notebook will receive the parameter values passed to it as well as the INPUT_DATA_PATH and OUTPUT_DATA_PATH variables that will be set to the input data path (if provided) and directory for the optional output data. -inputs: -- {name: Notebook, type: JupyterNotebook, description: 'Notebook to execute.'} -- {name: Parameters, type: JsonObject, default: '{}', description: 'Map with notebook paramater values.'} -- {name: Packages to install, type: JsonArray, default: '', description: 'Python packages to install'} -- {name: Input data, optional: true, description: 'Optional data that can be passed to notebook. In notebook, the INPUT_DATA_PATH variable will point to the data (if passed).'} -outputs: -- {name: Notebook, type: JupyterNotebook, description: 'Executed notebook.'} -- {name: Output data, description: 'Directory with any output data. In notebook, the OUTPUT_DATA_PATH variable will point to this directory, so that the notebook can write output data there.'} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/notebooks/Run_notebook_using_papermill/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -exc - - | - input_notebook_path="$0" - output_notebook_path="$1" - arguments="$2" - packages_to_install="$3" - input_data_path="$4" - output_data_path="$5" - mkdir -p "$(dirname "$output_notebook_path")" - mkdir -p "$output_data_path" - - # Converting packages_to_install from JSON to command-line arguments - packages_to_install=$(echo "$packages_to_install" | sed -E -e 's/^\[//' -e 's/]$//' -e 's/",/" /g' -e "s/\"/'/g") - # Installing packages - sh -c "python3 -m pip install --upgrade --quiet jupyter papermill==2.2.0 ${packages_to_install}" - # Running the notebook using papermill - papermill --parameters_yaml "$arguments" --parameters INPUT_DATA_PATH "$input_data_path" --parameters OUTPUT_DATA_PATH "$output_data_path" "$input_notebook_path" "$output_notebook_path" - - - {inputPath: Notebook} - - {outputPath: Notebook} - - {inputValue: Parameters} - - if: - cond: {isPresent: Packages to install} - then: [{inputValue: Packages to install}] - else: "{}" - - if: - cond: {isPresent: Input data} - then: [{inputPath: Input data}] - else: "" - - {outputPath: Output data} diff --git a/components/contrib/notebooks/samples/sample_pipeline.py b/components/contrib/notebooks/samples/sample_pipeline.py deleted file mode 100644 index 3e73b1862ce..00000000000 --- a/components/contrib/notebooks/samples/sample_pipeline.py +++ /dev/null @@ -1,20 +0,0 @@ -kfp_endpoint = None - -import kfp -from kfp import components - -download_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/240543e483076ae718f82c6f280441daa2f041fd/components/web/Download/component.yaml') -run_notebook_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/4ebce5f643b6af5639053ea7eaed52b02bf7e928/components/notebooks/Run_notebook_using_papermill/component.yaml') - -def notebook_pipeline(): - notebook = download_op('https://raw.githubusercontent.com/kubeflow/pipelines/93fc34474bf989998cf19445149aca2847eee763/components/notebooks/samples/test_notebook.ipynb').output - - run_notebook_op( - notebook=notebook, - parameters={'param1': 'value 1'}, - input_data="Optional. Pass output of any component here. Can be a directory.", - packages_to_install=["matplotlib"], - ) - -if __name__ == '__main__': - pipelin_run = kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(notebook_pipeline, arguments={}) diff --git a/components/contrib/notebooks/samples/test_notebook.ipynb b/components/contrib/notebooks/samples/test_notebook.ipynb deleted file mode 100644 index 794b7f9f147..00000000000 --- a/components/contrib/notebooks/samples/test_notebook.ipynb +++ /dev/null @@ -1,82 +0,0 @@ -{ - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.8" - } - }, - "nbformat": 4, - "nbformat_minor": 2, - "cells": [ - { - "source": [ - "# Parameters\n", - "INPUT_DATA_PATH = INPUT_DATA_PATH or \"\"\n", - "OUTPUT_DATA_PATH = OUTPUT_DATA_PATH or \"\"" - ], - "cell_type": "code", - "metadata": {}, - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# Show the parameter values\n", - "print('INPUT_DATA_PATH = ' + INPUT_DATA_PATH)\n", - "print('OUTPUT_DATA_PATH = ' + OUTPUT_DATA_PATH)\n", - "print('locals() = ' + str(locals()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Checking the input data\n", - "import os\n", - "\n", - "if INPUT_DATA_PATH:\n", - " if os.path.isdir(INPUT_DATA_PATH):\n", - " print('os.listdir(INPUT_DATA_PATH):')\n", - " print(os.listdir(INPUT_DATA_PATH))\n", - " if os.path.isfile(INPUT_DATA_PATH):\n", - " print('os.stat(INPUT_DATA_PATH):')\n", - " print(os.stat(INPUT_DATA_PATH))\n", - "else:\n", - " print('INPUT_DATA_PATH is empty')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Writing some output data\n", - "from pathlib import Path\n", - "\n", - "(Path(OUTPUT_DATA_PATH) / 'output.txt').write_text(\"Hello world!\")" - ] - } - ] -} \ No newline at end of file diff --git a/components/contrib/nuclio/README.md b/components/contrib/nuclio/README.md deleted file mode 100644 index 81a3141e89f..00000000000 --- a/components/contrib/nuclio/README.md +++ /dev/null @@ -1,44 +0,0 @@ -# Nuclio (Serverless) Components - -[Nuclio](https://nuclio.io/) is a native and high performance serverless platform over Kubernetes -which automate the process of build, deployment, monitoring, and auto-scaling of micro-services. -Nuclio support variety of data and data-science related features (e.g. stream processing, -GPUs, volume/DB mounts, high concurrency, etc.) - -To install Nuclio over Kubernetes follow the [instruction in Github](https://github.com/nuclio/nuclio), -or this [interactive tutorial](https://www.katacoda.com/javajon/courses/kubernetes-serverless/nuclio). - -Nuclio functions can be used in the following ML pipline tasks: -* Data collectors, ETL, stream processing -* Data preparation and analysis -* Hyper parameter model training -* Real-time model serving -* Feature vector assembly (real-time data preparation) - -Read more on the use of Nuclio in [data-science here](https://towardsdatascience.com/serverless-can-it-simplify-data-science-projects-8821369bf3bd). -Nuclio functions can be generated automatically from 8 code languages, from Jupyter Notebooks, Zip, Git, Docker, etc. -The [nuclio-jupyter repo](https://github.com/nuclio/nuclio-jupyter) provide guidance and many examples. - -## Components - -There are currently 3 components in this package: -* [deploy](deploy/component.yaml) - Automatically build and deploy/re-deploy functions - from code/zip/notebooks/git/.. and/or override various deployment configurations such as - setting cpu/mem/gpu resources, scaling, environment variables, triggers, etc. -* [delete](delete/component.yaml) - Delete a function -* [invoker](invoker/component.yaml) - invoke a function and return the results/logs - -Additional components and examples will be added soon for parallel batch/stream processing - -## Examples - -**Deploy a function (from Github)** - -```python -nuclio_dep = kfp.components.load_component_from_file('deploy/component.yaml') - -def my_pipeline(): - new_func = nuclio_dep(url='git://github.com/nuclio/nuclio#master:/hack/examples/python/helloworld', name='myfunc', project='myproj', tag='0.11') - - ... -``` diff --git a/components/contrib/nuclio/delete/component.yaml b/components/contrib/nuclio/delete/component.yaml deleted file mode 100644 index 4f900e3035a..00000000000 --- a/components/contrib/nuclio/delete/component.yaml +++ /dev/null @@ -1,14 +0,0 @@ -name: nuclio delete -description: delete a nuclio function. -inputs: - - {name: Name, type: String, description: 'function name'} - - {name: Namespace, type: String, description: 'Kubernetes namespace', default: ''} - - {name: Dashboard, type: String, description: 'nuclio dashboard service url', default: 'http://nuclio-dashboard.nuclio:8070'} -implementation: - container: - image: nuclio/pydeploy - command: [ - python, -m, nuclio, del, {inputValue: Name}, - --dashboard-url, {inputValue: Dashboard}, - --namespace, {inputValue: Namespace}, - ] \ No newline at end of file diff --git a/components/contrib/nuclio/deploy/component.yaml b/components/contrib/nuclio/deploy/component.yaml deleted file mode 100644 index b442b43dd47..00000000000 --- a/components/contrib/nuclio/deploy/component.yaml +++ /dev/null @@ -1,28 +0,0 @@ -name: nuclio deploy -description: auto build and deploy nuclio function. -inputs: - - {name: Url, type: String, description: 'url/path to source code, zip archive, git path, or notebook'} - - {name: Name, type: String, description: 'function name'} - - {name: Project, type: String, description: 'project name', default: 'default'} - - {name: Tag, type: String, description: 'function version tag', default: ''} - - {name: Dashboard, type: String, description: 'nuclio dashboard service url', default: 'http://nuclio-dashboard.nuclio:8070'} - - {name: Spec, type: String, description: 'override function spec, Json {key: value, ..}', default: ''} - - {name: Env, type: String, description: 'override function env var, Json {key: value, ..}', default: ''} - - {name: Mount, type: String, description: 'volume mount, [vol-type:]:', default: ''} -outputs: - - {name: Endpoint, type: String, description: 'function endpoint url'} -implementation: - container: - image: nuclio/pydeploy - command: [ - python, -m, nuclio, deploy, {inputValue: Url}, - --dashboard-url, {inputValue: Dashboard}, - --name, {inputValue: Name}, - --project, {inputValue: Project}, - --tag, {inputValue: Tag}, - --env-json, {inputValue: Env}, - --spec-json, {inputValue: Spec}, - --mount, {inputValue: Mount}, - ] - fileOutputs: - Endpoint: /tmp/output \ No newline at end of file diff --git a/components/contrib/nuclio/invoker/component.yaml b/components/contrib/nuclio/invoker/component.yaml deleted file mode 100644 index 3f0df3e42f1..00000000000 --- a/components/contrib/nuclio/invoker/component.yaml +++ /dev/null @@ -1,21 +0,0 @@ -name: nuclio invoker -description: invoke nuclio function. -inputs: - - {name: Url, type: String, description: 'function URL/endpoint'} - - {name: Body, type: String, description: 'request body', default: ''} - - {name: Log level, type: String, description: 'log level', default: 'info'} - - {name: Method, type: String, description: 'HTTP method GET|POST|..', default: ''} -outputs: - - {name: output, type: String, description: 'function output'} -implementation: - container: - image: nuclio/invoker - command: [ - invoke, - -a, {inputValue: Url}, - -b, {inputValue: Body}, - -m, {inputValue: Method}, - -l, {inputValue: Log level}, - ] - fileOutputs: - output: /tmp/output \ No newline at end of file diff --git a/components/contrib/openvino/model_convert/README.md b/components/contrib/openvino/model_convert/README.md deleted file mode 100644 index 7c24c00b42b..00000000000 --- a/components/contrib/openvino/model_convert/README.md +++ /dev/null @@ -1,327 +0,0 @@ -# Model optimization component - -This component is executing model optimization process using OpenVINO Toolkit and generate as output the model in -Intermediate Representation format. - -Component takes the following arguments: -* model input GCS path -* model optimizer parameters -* model output GCS path - -```bash -usage: convert_model.py [-h] [--input_path INPUT_PATH] - [--mo_options MO_OPTIONS] [--output_path OUTPUT_PATH] - -Model converter to OpenVINO Intermediate Representation format - -optional arguments: - -h, --help show this help message and exit - --input_path INPUT_PATH - GCS path of input model file or folder - --mo_options MO_OPTIONS - OpenVINO Model Optimizer options - --output_path OUTPUT_PATH - GCS path of output folder - -``` - -## Component parameters - -It takes as input GCS path to the input model in any of the OpenVINO supported frameworks: -* Tensorflow -* Caffe -* MXNET -* Kaldi -* ONNX - -Input model path can be a folder or an individual file which will be copied to a component working directory - -Model optimizer options can include any of the parameters supported by OpenVINO toolkit model optimizer. - -Refer to OpenVINO [documentation](https://software.intel.com/en-us/articles/OpenVINO-ModelOptimizer) for details. -```bash -mo.py --help -usage: mo.py [-h] [--framework {tf,caffe,mxnet,kaldi,onnx}] - [--input_model INPUT_MODEL] [--model_name MODEL_NAME] - [--output_dir OUTPUT_DIR] [--input_shape INPUT_SHAPE] - [--scale SCALE] [--reverse_input_channels] - [--log_level {CRITICAL,ERROR,WARN,WARNING,INFO,DEBUG,NOTSET}] - [--input INPUT] [--output OUTPUT] [--mean_values MEAN_VALUES] - [--scale_values SCALE_VALUES] - [--data_type {FP16,FP32,half,float}] [--disable_fusing] - [--disable_resnet_optimization] - [--finegrain_fusing FINEGRAIN_FUSING] [--disable_gfusing] - [--move_to_preprocess] [--extensions EXTENSIONS] [--batch BATCH] - [--version] [--silent] - [--freeze_placeholder_with_value FREEZE_PLACEHOLDER_WITH_VALUE] - [--generate_deprecated_IR_V2] [--input_model_is_text] - [--input_checkpoint INPUT_CHECKPOINT] - [--input_meta_graph INPUT_META_GRAPH] - [--saved_model_dir SAVED_MODEL_DIR] - [--saved_model_tags SAVED_MODEL_TAGS] - [--offload_unsupported_operations_to_tf] - [--tensorflow_subgraph_patterns TENSORFLOW_SUBGRAPH_PATTERNS] - [--tensorflow_operation_patterns TENSORFLOW_OPERATION_PATTERNS] - [--tensorflow_custom_operations_config_update TENSORFLOW_CUSTOM_OPERATIONS_CONFIG_UPDATE] - [--tensorflow_use_custom_operations_config TENSORFLOW_USE_CUSTOM_OPERATIONS_CONFIG] - [--tensorflow_object_detection_api_pipeline_config TENSORFLOW_OBJECT_DETECTION_API_PIPELINE_CONFIG] - [--tensorboard_logdir TENSORBOARD_LOGDIR] - [--tensorflow_custom_layer_libraries TENSORFLOW_CUSTOM_LAYER_LIBRARIES] - [--disable_nhwc_to_nchw] [--input_proto INPUT_PROTO] [-k K] - [--mean_file MEAN_FILE] [--mean_file_offsets MEAN_FILE_OFFSETS] - [--disable_omitting_optional] [--enable_flattening_nested_params] - [--input_symbol INPUT_SYMBOL] [--nd_prefix_name ND_PREFIX_NAME] - [--pretrained_model_name PRETRAINED_MODEL_NAME] - [--save_params_from_nd] [--legacy_mxnet_model] [--counts COUNTS] - [--remove_output_softmax] - -optional arguments: - -h, --help show this help message and exit - --framework {tf,caffe,mxnet,kaldi,onnx} - Name of the framework used to train the input model. - -Framework-agnostic parameters: - --input_model INPUT_MODEL, -w INPUT_MODEL, -m INPUT_MODEL - Tensorflow*: a file with a pre-trained model (binary - or text .pb file after freezing). Caffe*: a model - proto file with model weights - --model_name MODEL_NAME, -n MODEL_NAME - Model_name parameter passed to the final create_ir - transform. This parameter is used to name a network in - a generated IR and output .xml/.bin files. - --output_dir OUTPUT_DIR, -o OUTPUT_DIR - Directory that stores the generated IR. By default, it - is the directory from where the Model Optimizer is - launched. - --input_shape INPUT_SHAPE - Input shape(s) that should be fed to an input node(s) - of the model. Shape is defined as a comma-separated - list of integer numbers enclosed in parentheses or - square brackets, for example [1,3,227,227] or - (1,227,227,3), where the order of dimensions depends - on the framework input layout of the model. For - example, [N,C,H,W] is used for Caffe* models and - [N,H,W,C] for TensorFlow* models. Model Optimizer - performs necessary transformations to convert the - shape to the layout required by Inference Engine - (N,C,H,W). The shape should not contain undefined - dimensions (? or -1) and should fit the dimensions - defined in the input operation of the graph. If there - are multiple inputs in the model, --input_shape should - contain definition of shape for each input separated - by a comma, for example: [1,3,227,227],[2,4] for a - model with two inputs with 4D and 2D shapes. - --scale SCALE, -s SCALE - All input values coming from original network inputs - will be divided by this value. When a list of inputs - is overridden by the --input parameter, this scale is - not applied for any input that does not match with the - original input of the model. - --reverse_input_channels - Switch the input channels order from RGB to BGR (or - vice versa). Applied to original inputs of the model - if and only if a number of channels equals 3. Applied - after application of --mean_values and --scale_values - options, so numbers in --mean_values and - --scale_values go in the order of channels used in the - original model. - --log_level {CRITICAL,ERROR,WARN,WARNING,INFO,DEBUG,NOTSET} - Logger level - --input INPUT The name of the input operation of the given model. - Usually this is a name of the input placeholder of the - model. - --output OUTPUT The name of the output operation of the model. For - TensorFlow*, do not add :0 to this name. - --mean_values MEAN_VALUES, -ms MEAN_VALUES - Mean values to be used for the input image per - channel. Values to be provided in the (R,G,B) or - [R,G,B] format. Can be defined for desired input of - the model, for example: "--mean_values - data[255,255,255],info[255,255,255]". The exact - meaning and order of channels depend on how the - original model was trained. - --scale_values SCALE_VALUES - Scale values to be used for the input image per - channel. Values are provided in the (R,G,B) or [R,G,B] - format. Can be defined for desired input of the model, - for example: "--scale_values - data[255,255,255],info[255,255,255]". The exact - meaning and order of channels depend on how the - original model was trained. - --data_type {FP16,FP32,half,float} - Data type for all intermediate tensors and weights. If - original model is in FP32 and --data_type=FP16 is - specified, all model weights and biases are quantized - to FP16. - --disable_fusing Turn off fusing of linear operations to Convolution - --disable_resnet_optimization - Turn off resnet optimization - --finegrain_fusing FINEGRAIN_FUSING - Regex for layers/operations that won't be fused. - Example: --finegrain_fusing Convolution1,.*Scale.* - --disable_gfusing Turn off fusing of grouped convolutions - --move_to_preprocess Move mean values to IR preprocess section - --extensions EXTENSIONS - Directory or a comma separated list of directories - with extensions. To disable all extensions including - those that are placed at the default location, pass an - empty string. - --batch BATCH, -b BATCH - Input batch size - --version Version of Model Optimizer - --silent Prevent any output messages except those that - correspond to log level equals ERROR, that can be set - with the following option: --log_level. By default, - log level is already ERROR. - --freeze_placeholder_with_value FREEZE_PLACEHOLDER_WITH_VALUE - Replaces input layer with constant node with provided - value, e.g.: "node_name->True" - --generate_deprecated_IR_V2 - Force to generate legacy/deprecated IR V2 to work with - previous versions of the Inference Engine. The - resulting IR may or may not be correctly loaded by - Inference Engine API (including the most recent and - old versions of Inference Engine) and provided as a - partially-validated backup option for specific - deployment scenarios. Use it at your own discretion. - By default, without this option, the Model Optimizer - generates IR V3. - -TensorFlow*-specific parameters: - --input_model_is_text - TensorFlow*: treat the input model file as a text - protobuf format. If not specified, the Model Optimizer - treats it as a binary file by default. - --input_checkpoint INPUT_CHECKPOINT - TensorFlow*: variables file to load. - --input_meta_graph INPUT_META_GRAPH - Tensorflow*: a file with a meta-graph of the model - before freezing - --saved_model_dir SAVED_MODEL_DIR - TensorFlow*: directory representing non frozen model - --saved_model_tags SAVED_MODEL_TAGS - Group of tag(s) of the MetaGraphDef to load, in string - format, separated by ','. For tag-set contains - multiple tags, all tags must be passed in. - --offload_unsupported_operations_to_tf - TensorFlow*: automatically offload unsupported - operations to TensorFlow* - --tensorflow_subgraph_patterns TENSORFLOW_SUBGRAPH_PATTERNS - TensorFlow*: a list of comma separated patterns that - will be applied to TensorFlow* node names to infer a - part of the graph using TensorFlow*. - --tensorflow_operation_patterns TENSORFLOW_OPERATION_PATTERNS - TensorFlow*: a list of comma separated patterns that - will be applied to TensorFlow* node type (ops) to - infer these operations using TensorFlow*. - --tensorflow_custom_operations_config_update TENSORFLOW_CUSTOM_OPERATIONS_CONFIG_UPDATE - TensorFlow*: update the configuration file with node - name patterns with input/output nodes information. - --tensorflow_use_custom_operations_config TENSORFLOW_USE_CUSTOM_OPERATIONS_CONFIG - TensorFlow*: use the configuration file with custom - operation description. - --tensorflow_object_detection_api_pipeline_config TENSORFLOW_OBJECT_DETECTION_API_PIPELINE_CONFIG - TensorFlow*: path to the pipeline configuration file - used to generate model created with help of Object - Detection API. - --tensorboard_logdir TENSORBOARD_LOGDIR - TensorFlow*: dump the input graph to a given directory - that should be used with TensorBoard. - --tensorflow_custom_layer_libraries TENSORFLOW_CUSTOM_LAYER_LIBRARIES - TensorFlow*: comma separated list of shared libraries - with TensorFlow* custom operations implementation. - --disable_nhwc_to_nchw - Disables default translation from NHWC to NCHW - -Caffe*-specific parameters: - --input_proto INPUT_PROTO, -d INPUT_PROTO - Deploy-ready prototxt file that contains a topology - structure and layer attributes - -k K Path to CustomLayersMapping.xml to register custom - layers - --mean_file MEAN_FILE, -mf MEAN_FILE - Mean image to be used for the input. Should be a - binaryproto file - --mean_file_offsets MEAN_FILE_OFFSETS, -mo MEAN_FILE_OFFSETS - Mean image offsets to be used for the input - binaryproto file. When the mean image is bigger than - the expected input, it is cropped. By default, centers - of the input image and the mean image are the same and - the mean image is cropped by dimensions of the input - image. The format to pass this option is the - following: "-mo (x,y)". In this case, the mean file is - cropped by dimensions of the input image with offset - (x,y) from the upper left corner of the mean image - --disable_omitting_optional - Disable omitting optional attributes to be used for - custom layers. Use this option if you want to transfer - all attributes of a custom layer to IR. Default - behavior is to transfer the attributes with default - values and the attributes defined by the user to IR. - --enable_flattening_nested_params - Enable flattening optional params to be used for - custom layers. Use this option if you want to transfer - attributes of a custom layer to IR with flattened - nested parameters. Default behavior is to transfer the - attributes without flattening nested parameters. - -Mxnet-specific parameters: - --input_symbol INPUT_SYMBOL - Symbol file (for example, model-symbol.json) that - contains a topology structure and layer attributes - --nd_prefix_name ND_PREFIX_NAME - Prefix name for args.nd and argx.nd files. - --pretrained_model_name PRETRAINED_MODEL_NAME - Name of a pretrained MXNet model without extension and - epoch number. This model will be merged with args.nd - and argx.nd files - --save_params_from_nd - Enable saving built parameters file from .nd files - --legacy_mxnet_model Enable MXNet loader to make a model compatible with - the latest MXNet version. Use only if your model was - trained with MXNet version lower than 1.0.0 - -Kaldi-specific parameters: - --counts COUNTS Path to the counts file - --remove_output_softmax - Removes the SoftMax layer that is the output layer -``` - -The output folder specify then should be uploaded the generated model file in IR format with .bin and .xml -extensions. - -The component also creates 3 files including the paths to generated model: -- `/tmp/output.txt` - GSC path to the folder including the generated model files. -- `/tmp/bin_path.txt` - GSC path to weights model file -- `/tmp/xml_path.txt` - GSC path to graph model file -They can be used as parameters to be passed to other jobs in ML pipelines. - -## Examples - -Input path - gs://tensorflow_model_path/resnet/1/saved_model.pb
-MO options - --saved_model_dir .
-Output path - gs://tensorflow_model_path/resnet/1 - -Input path - gs://tensorflow_model_path/resnet/1
-MO options - --saved_model_dir 1
-Output path - gs://tensorflow_model_path/resnet/dldt/1
- - -## Building docker image - -```bash -docker build --build-arg http_proxy=$http_proxy --build-arg https_proxy=$https_proxy . -``` - -## Starting and testing the component locally - -This component requires GCP authentication token in json format generated for the service account, -which has access to GCS location. In the example below it is in key.json in the current path. - -```bash -COMMAND="convert_model.py --mo_options \"--saved_model_dir .\" --input_path gs://tensorflow_model_path/resnet/1/saved_model.pb --output_path gs://tensorflow_model_path/resnet/1" -docker run --rm -it -v $(pwd)/key.json:/etc/credentials/gcp-key.json \ --e GOOGLE_APPLICATION_CREDENTIALS=/etc/credentials/gcp-key.json $COMMAND - -``` - diff --git a/components/contrib/openvino/model_convert/containers/Dockerfile b/components/contrib/openvino/model_convert/containers/Dockerfile deleted file mode 100644 index 0775e1c7272..00000000000 --- a/components/contrib/openvino/model_convert/containers/Dockerfile +++ /dev/null @@ -1,36 +0,0 @@ -FROM ubuntu:16.04 -RUN apt-get update && apt-get install -y --no-install-recommends \ - curl ca-certificates \ - python3-pip \ - python-dev \ - gcc \ - python-setuptools \ - python3-setuptools \ - libgfortran3 \ - unzip \ - vim && \ - apt-get clean -RUN curl -L -o 2018_R5.tar.gz https://github.com/opencv/dldt/archive/2018_R5.tar.gz && \ - tar -zxf 2018_R5.tar.gz && \ - rm 2018_R5.tar.gz && \ - rm -Rf dldt-2018_R5/inference-engine -WORKDIR dldt-2018_R5/model-optimizer -RUN pip3 install --upgrade pip setuptools -RUN pip3 install -r requirements.txt -RUN curl -L -o google-cloud-sdk.zip https://dl.google.com/dl/cloudsdk/release/google-cloud-sdk.zip && \ - unzip -qq google-cloud-sdk.zip -d tools && \ - rm google-cloud-sdk.zip && \ - tools/google-cloud-sdk/install.sh --usage-reporting=false \ - --path-update=true --bash-completion=false \ - --disable-installation-options && \ - tools/google-cloud-sdk/bin/gcloud -q components update \ - gcloud core gsutil && \ - tools/google-cloud-sdk/bin/gcloud config set component_manager/disable_update_check true && \ - touch tools/google-cloud-sdk/lib/third_party/google.py && \ - pip install -U crcmod -ENV PATH ${PATH}:/dldt-2018_R5/model-optimizer:/dldt-2018_R5/model-optimizer/tools/google-cloud-sdk/bin -COPY convert_model.py . -RUN chmod 755 *.py -WORKDIR input - - diff --git a/components/contrib/openvino/model_convert/containers/convert_model.py b/components/contrib/openvino/model_convert/containers/convert_model.py deleted file mode 100644 index 74a28c11531..00000000000 --- a/components/contrib/openvino/model_convert/containers/convert_model.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/python3 - -import argparse -import subprocess -import re -import os - - -def is_insecure_path(path): - # check if the path do not include insecure characters - if not re.match(r"^gs:\/\/[\.\w\/-]*$", path): - is_insecure = True - else: - is_insecure = False - return is_insecure - - -def are_insecure_mo_options(all_options): - # check if all passed options do not include insecure characters - is_insecure = False - for option in all_options.split(): - if not re.match(r"^[\.\w:\/-]*$", option): - is_insecure = True - return is_insecure - - -def main(): - parser = argparse.ArgumentParser( - description='Model converter to OpenVINO IR format') - parser.add_argument( - '--input_path', type=str, help='GCS path of input model file or folder') - parser.add_argument( - '--mo_options', type=str, help='OpenVINO Model Optimizer options') - parser.add_argument( - '--output_path', type=str, help='GCS path of output folder') - args = parser.parse_args() - - # Validate parameters - - if is_insecure_path(args.input_path): - print("Invalid input format") - exit(1) - - if is_insecure_path(args.output_path): - print("Invalid output format") - exit(1) - - if are_insecure_mo_options(args.mo_options): - print("Invalid model optimizer options") - exit(1) - - # Initialize gsutil creds if needed - if "GOOGLE_APPLICATION_CREDENTIALS" in os.environ: - command = "gcloud auth activate-service-account " \ - "--key-file=${GOOGLE_APPLICATION_CREDENTIALS}" - print("auth command", command) - return_code = subprocess.call(command, shell=True) - print("return code", return_code) - - # Downloading input model or GCS folder with a model to current folder - command = "gsutil cp -r " + args.input_path + " ." - print("gsutil download command", command) - return_code = subprocess.call(command, shell=True) - print("return code", return_code) - if return_code: - exit(1) - - # Executing model optimization - command = "mo.py " + args.mo_options - print("Starting model optimization:", command) - output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, - universal_newlines=True) - print("Model optimization output",output.stdout) - XML = "" - BIN = "" - for line in output.stdout.splitlines(): - if "[ SUCCESS ] XML file" in line: - XML = line.split(":")[1].strip() - if "[ SUCCESS ] BIN file" in line: - BIN = line.split(":")[1].strip() - if XML == "" or BIN == "": - print("Error, model optimization failed") - exit(1) - - command = "gsutil cp " + XML + " " + os.path.join(args.output_path, os.path.split(XML)[1]) - print("gsutil upload command", command) - return_code = subprocess.call(command, shell=True) - print("return code", return_code) - command = "gsutil cp " + BIN + " " + os.path.join(args.output_path, os.path.split(BIN)[1]) - print("gsutil upload command", command) - return_code = subprocess.call(command, shell=True) - print("return code", return_code) - if return_code: - exit(1) - - with open('/tmp/output_path.txt', 'w') as f: - f.write(args.output_path) - with open('/tmp/bin_path.txt', 'w') as f: - f.write(os.path.join(args.output_path, os.path.split(BIN)[1])) - with open('/tmp/xml_path.txt', 'w') as f: - f.write(os.path.join(args.output_path, os.path.split(XML)[1])) - - print("Model successfully generated and uploaded to ", args.output_path) - -if __name__ == "__main__": - main() diff --git a/components/contrib/openvino/ovms-deployer/README.md b/components/contrib/openvino/ovms-deployer/README.md deleted file mode 100644 index 2bd5194fe63..00000000000 --- a/components/contrib/openvino/ovms-deployer/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# Deployer of OpenVINO Model Server - -This component triggers deployment of [OpenVINO Model Server](https://github.com/IntelAI/OpenVINO-model-server) in Kubernetes. - -It applies the passed component parameters on jinja template and applied deployment and server records. - - - -```bash -./deploy.sh - --model-export-path - --cluster-name - --namespace - --server-name - --replicas - --batch-size - --model-version-policy - --log-level -``` - - -## building docker image - - -```bash -docker build --build-arg http_proxy=$http_proxy --build-arg https_proxy=$https_proxy . -``` - -## testing the image locally - - -``` \ No newline at end of file diff --git a/components/contrib/openvino/ovms-deployer/containers/Dockerfile b/components/contrib/openvino/ovms-deployer/containers/Dockerfile deleted file mode 100644 index bccd406f1e7..00000000000 --- a/components/contrib/openvino/ovms-deployer/containers/Dockerfile +++ /dev/null @@ -1,44 +0,0 @@ -FROM intelpython/intelpython3_core - -RUN apt-get update -q && apt-get upgrade -y && \ - apt-get install -y -qq --no-install-recommends \ - apt-transport-https \ - ca-certificates \ - git \ - gnupg \ - lsb-release \ - unzip \ - wget && \ - wget --no-verbose -O /opt/ks_0.12.0_linux_amd64.tar.gz \ - https://github.com/ksonnet/ksonnet/releases/download/v0.12.0/ks_0.12.0_linux_amd64.tar.gz && \ - tar -C /opt -xzf /opt/ks_0.12.0_linux_amd64.tar.gz && \ - cp /opt/ks_0.12.0_linux_amd64/ks /bin/. && \ - rm -f /opt/ks_0.12.0_linux_amd64.tar.gz && \ - wget --no-verbose -O /bin/kubectl \ - https://storage.googleapis.com/kubernetes-release/release/v1.11.2/bin/linux/amd64/kubectl && \ - chmod u+x /bin/kubectl && \ - wget --no-verbose -O /opt/kubernetes_v1.11.2 \ - https://github.com/kubernetes/kubernetes/archive/v1.11.2.tar.gz && \ - mkdir -p /src && \ - tar -C /src -xzf /opt/kubernetes_v1.11.2 && \ - rm -rf /opt/kubernetes_v1.11.2 && \ - wget --no-verbose -O /opt/google-apt-key.gpg \ - https://packages.cloud.google.com/apt/doc/apt-key.gpg && \ - apt-key add /opt/google-apt-key.gpg && \ - export CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)" && \ - echo "deb https://packages.cloud.google.com/apt $CLOUD_SDK_REPO main" >> \ - /etc/apt/sources.list.d/google-cloud-sdk.list && \ - apt-get update -q && \ - apt-get install -y -qq --no-install-recommends google-cloud-sdk && \ - gcloud config set component_manager/disable_update_check true - -RUN conda install -y opencv && conda clean -a -y -ADD requirements.txt /deploy/ -WORKDIR /deploy -RUN pip install -r requirements.txt -ADD apply_template.py deploy.sh evaluate.py ovms.j2 classes.py /deploy/ -ENTRYPOINT ["./deploy.sh"] - - - - diff --git a/components/contrib/openvino/ovms-deployer/containers/apply_template.py b/components/contrib/openvino/ovms-deployer/containers/apply_template.py deleted file mode 100644 index f3c5243a95e..00000000000 --- a/components/contrib/openvino/ovms-deployer/containers/apply_template.py +++ /dev/null @@ -1,14 +0,0 @@ -from jinja2 import Template -import os - - -f = open("ovms.j2","r") -ovms_template = f.read() -t = Template(ovms_template) -ovms_k8s = t.render(os.environ) -f.close -f = open("ovms.yaml", "w") -f.write(ovms_k8s) -f.close - -print(ovms_k8s) \ No newline at end of file diff --git a/components/contrib/openvino/ovms-deployer/containers/classes.py b/components/contrib/openvino/ovms-deployer/containers/classes.py deleted file mode 100644 index 9a27f0017d6..00000000000 --- a/components/contrib/openvino/ovms-deployer/containers/classes.py +++ /dev/null @@ -1,1000 +0,0 @@ -imagenet_classes = {0: 'tench, Tinca tinca', - 1: 'goldfish, Carassius auratus', - 2: 'great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias', - 3: 'tiger shark, Galeocerdo cuvieri', - 4: 'hammerhead, hammerhead shark', - 5: 'electric ray, crampfish, numbfish, torpedo', - 6: 'stingray', - 7: 'cock', - 8: 'hen', - 9: 'ostrich, Struthio camelus', - 10: 'brambling, Fringilla montifringilla', - 11: 'goldfinch, Carduelis carduelis', - 12: 'house finch, linnet, Carpodacus mexicanus', - 13: 'junco, snowbird', - 14: 'indigo bunting, indigo finch, indigo bird, Passerina cyanea', - 15: 'robin, American robin, Turdus migratorius', - 16: 'bulbul', - 17: 'jay', - 18: 'magpie', - 19: 'chickadee', - 20: 'water ouzel, dipper', - 21: 'kite', - 22: 'bald eagle, American eagle, Haliaeetus leucocephalus', - 23: 'vulture', - 24: 'great grey owl, great gray owl, Strix nebulosa', - 25: 'European fire salamander, Salamandra salamandra', - 26: 'common newt, Triturus vulgaris', - 27: 'eft', - 28: 'spotted salamander, Ambystoma maculatum', - 29: 'axolotl, mud puppy, Ambystoma mexicanum', - 30: 'bullfrog, Rana catesbeiana', - 31: 'tree frog, tree-frog', - 32: 'tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui', - 33: 'loggerhead, loggerhead turtle, Caretta caretta', - 34: 'leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea', - 35: 'mud turtle', - 36: 'terrapin', - 37: 'box turtle, box tortoise', - 38: 'banded gecko', - 39: 'common iguana, iguana, Iguana iguana', - 40: 'American chameleon, anole, Anolis carolinensis', - 41: 'whiptail, whiptail lizard', - 42: 'agama', - 43: 'frilled lizard, Chlamydosaurus kingi', - 44: 'alligator lizard', - 45: 'Gila monster, Heloderma suspectum', - 46: 'green lizard, Lacerta viridis', - 47: 'African chameleon, Chamaeleo chamaeleon', - 48: 'Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis', - 49: 'African crocodile, Nile crocodile, Crocodylus niloticus', - 50: 'American alligator, Alligator mississipiensis', - 51: 'triceratops', - 52: 'thunder snake, worm snake, Carphophis amoenus', - 53: 'ringneck snake, ring-necked snake, ring snake', - 54: 'hognose snake, puff adder, sand viper', - 55: 'green snake, grass snake', - 56: 'king snake, kingsnake', - 57: 'garter snake, grass snake', - 58: 'water snake', - 59: 'vine snake', - 60: 'night snake, Hypsiglena torquata', - 61: 'boa constrictor, Constrictor constrictor', - 62: 'rock python, rock snake, Python sebae', - 63: 'Indian cobra, Naja naja', - 64: 'green mamba', - 65: 'sea snake', - 66: 'horned viper, cerastes, sand viper, horned asp, Cerastes cornutus', - 67: 'diamondback, diamondback rattlesnake, Crotalus adamanteus', - 68: 'sidewinder, horned rattlesnake, Crotalus cerastes', - 69: 'trilobite', - 70: 'harvestman, daddy longlegs, Phalangium opilio', - 71: 'scorpion', - 72: 'black and gold garden spider, Argiope aurantia', - 73: 'barn spider, Araneus cavaticus', - 74: 'garden spider, Aranea diademata', - 75: 'black widow, Latrodectus mactans', - 76: 'tarantula', - 77: 'wolf spider, hunting spider', - 78: 'tick', - 79: 'centipede', - 80: 'black grouse', - 81: 'ptarmigan', - 82: 'ruffed grouse, partridge, Bonasa umbellus', - 83: 'prairie chicken, prairie grouse, prairie fowl', - 84: 'peacock', - 85: 'quail', - 86: 'partridge', - 87: 'African grey, African gray, Psittacus erithacus', - 88: 'macaw', - 89: 'sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita', - 90: 'lorikeet', - 91: 'coucal', - 92: 'bee eater', - 93: 'hornbill', - 94: 'hummingbird', - 95: 'jacamar', - 96: 'toucan', - 97: 'drake', - 98: 'red-breasted merganser, Mergus serrator', - 99: 'goose', - 100: 'black swan, Cygnus atratus', - 101: 'tusker', - 102: 'echidna, spiny anteater, anteater', - 103: 'platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus', - 104: 'wallaby, brush kangaroo', - 105: 'koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus', - 106: 'wombat', - 107: 'jellyfish', - 108: 'sea anemone, anemone', - 109: 'brain coral', - 110: 'flatworm, platyhelminth', - 111: 'nematode, nematode worm, roundworm', - 112: 'conch', - 113: 'snail', - 114: 'slug', - 115: 'sea slug, nudibranch', - 116: 'chiton, coat-of-mail shell, sea cradle, polyplacophore', - 117: 'chambered nautilus, pearly nautilus, nautilus', - 118: 'Dungeness crab, Cancer magister', - 119: 'rock crab, Cancer irroratus', - 120: 'fiddler crab', - 121: 'king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica', - 122: 'American lobster, Northern lobster, Maine lobster, Homarus americanus', - 123: 'spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish', - 124: 'crayfish, crawfish, crawdad, crawdaddy', - 125: 'hermit crab', - 126: 'isopod', - 127: 'white stork, Ciconia ciconia', - 128: 'black stork, Ciconia nigra', - 129: 'spoonbill', - 130: 'flamingo', - 131: 'little blue heron, Egretta caerulea', - 132: 'American egret, great white heron, Egretta albus', - 133: 'bittern', - 134: 'crane', - 135: 'limpkin, Aramus pictus', - 136: 'European gallinule, Porphyrio porphyrio', - 137: 'American coot, marsh hen, mud hen, water hen, Fulica americana', - 138: 'bustard', - 139: 'ruddy turnstone, Arenaria interpres', - 140: 'red-backed sandpiper, dunlin, Erolia alpina', - 141: 'redshank, Tringa totanus', - 142: 'dowitcher', - 143: 'oystercatcher, oyster catcher', - 144: 'pelican', - 145: 'king penguin, Aptenodytes patagonica', - 146: 'albatross, mollymawk', - 147: 'grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus', - 148: 'killer whale, killer, orca, grampus, sea wolf, Orcinus orca', - 149: 'dugong, Dugong dugon', - 150: 'sea lion', - 151: 'Chihuahua', - 152: 'Japanese spaniel', - 153: 'Maltese dog, Maltese terrier, Maltese', - 154: 'Pekinese, Pekingese, Peke', - 155: 'Shih-Tzu', - 156: 'Blenheim spaniel', - 157: 'papillon', - 158: 'toy terrier', - 159: 'Rhodesian ridgeback', - 160: 'Afghan hound, Afghan', - 161: 'basset, basset hound', - 162: 'beagle', - 163: 'bloodhound, sleuthhound', - 164: 'bluetick', - 165: 'black-and-tan coonhound', - 166: 'Walker hound, Walker foxhound', - 167: 'English foxhound', - 168: 'redbone', - 169: 'borzoi, Russian wolfhound', - 170: 'Irish wolfhound', - 171: 'Italian greyhound', - 172: 'whippet', - 173: 'Ibizan hound, Ibizan Podenco', - 174: 'Norwegian elkhound, elkhound', - 175: 'otterhound, otter hound', - 176: 'Saluki, gazelle hound', - 177: 'Scottish deerhound, deerhound', - 178: 'Weimaraner', - 179: 'Staffordshire bullterrier, Staffordshire bull terrier', - 180: 'American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier', - 181: 'Bedlington terrier', - 182: 'Border terrier', - 183: 'Kerry blue terrier', - 184: 'Irish terrier', - 185: 'Norfolk terrier', - 186: 'Norwich terrier', - 187: 'Yorkshire terrier', - 188: 'wire-haired fox terrier', - 189: 'Lakeland terrier', - 190: 'Sealyham terrier, Sealyham', - 191: 'Airedale, Airedale terrier', - 192: 'cairn, cairn terrier', - 193: 'Australian terrier', - 194: 'Dandie Dinmont, Dandie Dinmont terrier', - 195: 'Boston bull, Boston terrier', - 196: 'miniature schnauzer', - 197: 'giant schnauzer', - 198: 'standard schnauzer', - 199: 'Scotch terrier, Scottish terrier, Scottie', - 200: 'Tibetan terrier, chrysanthemum dog', - 201: 'silky terrier, Sydney silky', - 202: 'soft-coated wheaten terrier', - 203: 'West Highland white terrier', - 204: 'Lhasa, Lhasa apso', - 205: 'flat-coated retriever', - 206: 'curly-coated retriever', - 207: 'golden retriever', - 208: 'Labrador retriever', - 209: 'Chesapeake Bay retriever', - 210: 'German short-haired pointer', - 211: 'vizsla, Hungarian pointer', - 212: 'English setter', - 213: 'Irish setter, red setter', - 214: 'Gordon setter', - 215: 'Brittany spaniel', - 216: 'clumber, clumber spaniel', - 217: 'English springer, English springer spaniel', - 218: 'Welsh springer spaniel', - 219: 'cocker spaniel, English cocker spaniel, cocker', - 220: 'Sussex spaniel', - 221: 'Irish water spaniel', - 222: 'kuvasz', - 223: 'schipperke', - 224: 'groenendael', - 225: 'malinois', - 226: 'briard', - 227: 'kelpie', - 228: 'komondor', - 229: 'Old English sheepdog, bobtail', - 230: 'Shetland sheepdog, Shetland sheep dog, Shetland', - 231: 'collie', - 232: 'Border collie', - 233: 'Bouvier des Flandres, Bouviers des Flandres', - 234: 'Rottweiler', - 235: 'German shepherd, German shepherd dog, German police dog, alsatian', - 236: 'Doberman, Doberman pinscher', - 237: 'miniature pinscher', - 238: 'Greater Swiss Mountain dog', - 239: 'Bernese mountain dog', - 240: 'Appenzeller', - 241: 'EntleBucher', - 242: 'boxer', - 243: 'bull mastiff', - 244: 'Tibetan mastiff', - 245: 'French bulldog', - 246: 'Great Dane', - 247: 'Saint Bernard, St Bernard', - 248: 'Eskimo dog, husky', - 249: 'malamute, malemute, Alaskan malamute', - 250: 'Siberian husky', - 251: 'dalmatian, coach dog, carriage dog', - 252: 'affenpinscher, monkey pinscher, monkey dog', - 253: 'basenji', - 254: 'pug, pug-dog', - 255: 'Leonberg', - 256: 'Newfoundland, Newfoundland dog', - 257: 'Great Pyrenees', - 258: 'Samoyed, Samoyede', - 259: 'Pomeranian', - 260: 'chow, chow chow', - 261: 'keeshond', - 262: 'Brabancon griffon', - 263: 'Pembroke, Pembroke Welsh corgi', - 264: 'Cardigan, Cardigan Welsh corgi', - 265: 'toy poodle', - 266: 'miniature poodle', - 267: 'standard poodle', - 268: 'Mexican hairless', - 269: 'timber wolf, grey wolf, gray wolf, Canis lupus', - 270: 'white wolf, Arctic wolf, Canis lupus tundrarum', - 271: 'red wolf, maned wolf, Canis rufus, Canis niger', - 272: 'coyote, prairie wolf, brush wolf, Canis latrans', - 273: 'dingo, warrigal, warragal, Canis dingo', - 274: 'dhole, Cuon alpinus', - 275: 'African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus', - 276: 'hyena, hyaena', - 277: 'red fox, Vulpes vulpes', - 278: 'kit fox, Vulpes macrotis', - 279: 'Arctic fox, white fox, Alopex lagopus', - 280: 'grey fox, gray fox, Urocyon cinereoargenteus', - 281: 'tabby, tabby cat', - 282: 'tiger cat', - 283: 'Persian cat', - 284: 'Siamese cat, Siamese', - 285: 'Egyptian cat', - 286: 'cougar, puma, catamount, mountain lion, painter, panther, Felis concolor', - 287: 'lynx, catamount', - 288: 'leopard, Panthera pardus', - 289: 'snow leopard, ounce, Panthera uncia', - 290: 'jaguar, panther, Panthera onca, Felis onca', - 291: 'lion, king of beasts, Panthera leo', - 292: 'tiger, Panthera tigris', - 293: 'cheetah, chetah, Acinonyx jubatus', - 294: 'brown bear, bruin, Ursus arctos', - 295: 'American black bear, black bear, Ursus americanus, Euarctos americanus', - 296: 'ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus', - 297: 'sloth bear, Melursus ursinus, Ursus ursinus', - 298: 'mongoose', - 299: 'meerkat, mierkat', - 300: 'tiger beetle', - 301: 'ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle', - 302: 'ground beetle, carabid beetle', - 303: 'long-horned beetle, longicorn, longicorn beetle', - 304: 'leaf beetle, chrysomelid', - 305: 'dung beetle', - 306: 'rhinoceros beetle', - 307: 'weevil', - 308: 'fly', - 309: 'bee', - 310: 'ant, emmet, pismire', - 311: 'grasshopper, hopper', - 312: 'cricket', - 313: 'walking stick, walkingstick, stick insect', - 314: 'cockroach, roach', - 315: 'mantis, mantid', - 316: 'cicada, cicala', - 317: 'leafhopper', - 318: 'lacewing, lacewing fly', - 319: "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", - 320: 'damselfly', - 321: 'admiral', - 322: 'ringlet, ringlet butterfly', - 323: 'monarch, monarch butterfly, milkweed butterfly, Danaus plexippus', - 324: 'cabbage butterfly', - 325: 'sulphur butterfly, sulfur butterfly', - 326: 'lycaenid, lycaenid butterfly', - 327: 'starfish, sea star', - 328: 'sea urchin', - 329: 'sea cucumber, holothurian', - 330: 'wood rabbit, cottontail, cottontail rabbit', - 331: 'hare', - 332: 'Angora, Angora rabbit', - 333: 'hamster', - 334: 'porcupine, hedgehog', - 335: 'fox squirrel, eastern fox squirrel, Sciurus niger', - 336: 'marmot', - 337: 'beaver', - 338: 'guinea pig, Cavia cobaya', - 339: 'sorrel', - 340: 'zebra', - 341: 'hog, pig, grunter, squealer, Sus scrofa', - 342: 'wild boar, boar, Sus scrofa', - 343: 'warthog', - 344: 'hippopotamus, hippo, river horse, Hippopotamus amphibius', - 345: 'ox', - 346: 'water buffalo, water ox, Asiatic buffalo, Bubalus bubalis', - 347: 'bison', - 348: 'ram, tup', - 349: 'bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis', - 350: 'ibex, Capra ibex', - 351: 'hartebeest', - 352: 'impala, Aepyceros melampus', - 353: 'gazelle', - 354: 'Arabian camel, dromedary, Camelus dromedarius', - 355: 'llama', - 356: 'weasel', - 357: 'mink', - 358: 'polecat, fitch, foulmart, foumart, Mustela putorius', - 359: 'black-footed ferret, ferret, Mustela nigripes', - 360: 'otter', - 361: 'skunk, polecat, wood pussy', - 362: 'badger', - 363: 'armadillo', - 364: 'three-toed sloth, ai, Bradypus tridactylus', - 365: 'orangutan, orang, orangutang, Pongo pygmaeus', - 366: 'gorilla, Gorilla gorilla', - 367: 'chimpanzee, chimp, Pan troglodytes', - 368: 'gibbon, Hylobates lar', - 369: 'siamang, Hylobates syndactylus, Symphalangus syndactylus', - 370: 'guenon, guenon monkey', - 371: 'patas, hussar monkey, Erythrocebus patas', - 372: 'baboon', - 373: 'macaque', - 374: 'langur', - 375: 'colobus, colobus monkey', - 376: 'proboscis monkey, Nasalis larvatus', - 377: 'marmoset', - 378: 'capuchin, ringtail, Cebus capucinus', - 379: 'howler monkey, howler', - 380: 'titi, titi monkey', - 381: 'spider monkey, Ateles geoffroyi', - 382: 'squirrel monkey, Saimiri sciureus', - 383: 'Madagascar cat, ring-tailed lemur, Lemur catta', - 384: 'indri, indris, Indri indri, Indri brevicaudatus', - 385: 'Indian elephant, Elephas maximus', - 386: 'African elephant, Loxodonta africana', - 387: 'lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens', - 388: 'giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca', - 389: 'barracouta, snoek', - 390: 'eel', - 391: 'coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch', - 392: 'rock beauty, Holocanthus tricolor', - 393: 'anemone fish', - 394: 'sturgeon', - 395: 'gar, garfish, garpike, billfish, Lepisosteus osseus', - 396: 'lionfish', - 397: 'puffer, pufferfish, blowfish, globefish', - 398: 'abacus', - 399: 'abaya', - 400: "academic gown, academic robe, judge's robe", - 401: 'accordion, piano accordion, squeeze box', - 402: 'acoustic guitar', - 403: 'aircraft carrier, carrier, flattop, attack aircraft carrier', - 404: 'airliner', - 405: 'airship, dirigible', - 406: 'altar', - 407: 'ambulance', - 408: 'amphibian, amphibious vehicle', - 409: 'analog clock', - 410: 'apiary, bee house', - 411: 'apron', - 412: 'ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin', - 413: 'assault rifle, assault gun', - 414: 'backpack, back pack, knapsack, packsack, rucksack, haversack', - 415: 'bakery, bakeshop, bakehouse', - 416: 'balance beam, beam', - 417: 'balloon', - 418: 'ballpoint, ballpoint pen, ballpen, Biro', - 419: 'Band Aid', - 420: 'banjo', - 421: 'bannister, banister, balustrade, balusters, handrail', - 422: 'barbell', - 423: 'barber chair', - 424: 'barbershop', - 425: 'barn', - 426: 'barometer', - 427: 'barrel, cask', - 428: 'barrow, garden cart, lawn cart, wheelbarrow', - 429: 'baseball', - 430: 'basketball', - 431: 'bassinet', - 432: 'bassoon', - 433: 'bathing cap, swimming cap', - 434: 'bath towel', - 435: 'bathtub, bathing tub, bath, tub', - 436: 'beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon', - 437: 'beacon, lighthouse, beacon light, pharos', - 438: 'beaker', - 439: 'bearskin, busby, shako', - 440: 'beer bottle', - 441: 'beer glass', - 442: 'bell cote, bell cot', - 443: 'bib', - 444: 'bicycle-built-for-two, tandem bicycle, tandem', - 445: 'bikini, two-piece', - 446: 'binder, ring-binder', - 447: 'binoculars, field glasses, opera glasses', - 448: 'birdhouse', - 449: 'boathouse', - 450: 'bobsled, bobsleigh, bob', - 451: 'bolo tie, bolo, bola tie, bola', - 452: 'bonnet, poke bonnet', - 453: 'bookcase', - 454: 'bookshop, bookstore, bookstall', - 455: 'bottlecap', - 456: 'bow', - 457: 'bow tie, bow-tie, bowtie', - 458: 'brass, memorial tablet, plaque', - 459: 'brassiere, bra, bandeau', - 460: 'breakwater, groin, groyne, mole, bulwark, seawall, jetty', - 461: 'breastplate, aegis, egis', - 462: 'broom', - 463: 'bucket, pail', - 464: 'buckle', - 465: 'bulletproof vest', - 466: 'bullet train, bullet', - 467: 'butcher shop, meat market', - 468: 'cab, hack, taxi, taxicab', - 469: 'caldron, cauldron', - 470: 'candle, taper, wax light', - 471: 'cannon', - 472: 'canoe', - 473: 'can opener, tin opener', - 474: 'cardigan', - 475: 'car mirror', - 476: 'carousel, carrousel, merry-go-round, roundabout, whirligig', - 477: "carpenter's kit, tool kit", - 478: 'carton', - 479: 'car wheel', - 480: 'cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM', - 481: 'cassette', - 482: 'cassette player', - 483: 'castle', - 484: 'catamaran', - 485: 'CD player', - 486: 'cello, violoncello', - 487: 'cellular telephone, cellular phone, cellphone, cell, mobile phone', - 488: 'chain', - 489: 'chainlink fence', - 490: 'chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour', - 491: 'chain saw, chainsaw', - 492: 'chest', - 493: 'chiffonier, commode', - 494: 'chime, bell, gong', - 495: 'china cabinet, china closet', - 496: 'Christmas stocking', - 497: 'church, church building', - 498: 'cinema, movie theater, movie theatre, movie house, picture palace', - 499: 'cleaver, meat cleaver, chopper', - 500: 'cliff dwelling', - 501: 'cloak', - 502: 'clog, geta, patten, sabot', - 503: 'cocktail shaker', - 504: 'coffee mug', - 505: 'coffeepot', - 506: 'coil, spiral, volute, whorl, helix', - 507: 'combination lock', - 508: 'computer keyboard, keypad', - 509: 'confectionery, confectionary, candy store', - 510: 'container ship, containership, container vessel', - 511: 'convertible', - 512: 'corkscrew, bottle screw', - 513: 'cornet, horn, trumpet, trump', - 514: 'cowboy boot', - 515: 'cowboy hat, ten-gallon hat', - 516: 'cradle', - 517: 'crane', - 518: 'crash helmet', - 519: 'crate', - 520: 'crib, cot', - 521: 'Crock Pot', - 522: 'croquet ball', - 523: 'crutch', - 524: 'cuirass', - 525: 'dam, dike, dyke', - 526: 'desk', - 527: 'desktop computer', - 528: 'dial telephone, dial phone', - 529: 'diaper, nappy, napkin', - 530: 'digital clock', - 531: 'digital watch', - 532: 'dining table, board', - 533: 'dishrag, dishcloth', - 534: 'dishwasher, dish washer, dishwashing machine', - 535: 'disk brake, disc brake', - 536: 'dock, dockage, docking facility', - 537: 'dogsled, dog sled, dog sleigh', - 538: 'dome', - 539: 'doormat, welcome mat', - 540: 'drilling platform, offshore rig', - 541: 'drum, membranophone, tympan', - 542: 'drumstick', - 543: 'dumbbell', - 544: 'Dutch oven', - 545: 'electric fan, blower', - 546: 'electric guitar', - 547: 'electric locomotive', - 548: 'entertainment center', - 549: 'envelope', - 550: 'espresso maker', - 551: 'face powder', - 552: 'feather boa, boa', - 553: 'file, file cabinet, filing cabinet', - 554: 'fireboat', - 555: 'fire engine, fire truck', - 556: 'fire screen, fireguard', - 557: 'flagpole, flagstaff', - 558: 'flute, transverse flute', - 559: 'folding chair', - 560: 'football helmet', - 561: 'forklift', - 562: 'fountain', - 563: 'fountain pen', - 564: 'four-poster', - 565: 'freight car', - 566: 'French horn, horn', - 567: 'frying pan, frypan, skillet', - 568: 'fur coat', - 569: 'garbage truck, dustcart', - 570: 'gasmask, respirator, gas helmet', - 571: 'gas pump, gasoline pump, petrol pump, island dispenser', - 572: 'goblet', - 573: 'go-kart', - 574: 'golf ball', - 575: 'golfcart, golf cart', - 576: 'gondola', - 577: 'gong, tam-tam', - 578: 'gown', - 579: 'grand piano, grand', - 580: 'greenhouse, nursery, glasshouse', - 581: 'grille, radiator grille', - 582: 'grocery store, grocery, food market, market', - 583: 'guillotine', - 584: 'hair slide', - 585: 'hair spray', - 586: 'half track', - 587: 'hammer', - 588: 'hamper', - 589: 'hand blower, blow dryer, blow drier, hair dryer, hair drier', - 590: 'hand-held computer, hand-held microcomputer', - 591: 'handkerchief, hankie, hanky, hankey', - 592: 'hard disc, hard disk, fixed disk', - 593: 'harmonica, mouth organ, harp, mouth harp', - 594: 'harp', - 595: 'harvester, reaper', - 596: 'hatchet', - 597: 'holster', - 598: 'home theater, home theatre', - 599: 'honeycomb', - 600: 'hook, claw', - 601: 'hoopskirt, crinoline', - 602: 'horizontal bar, high bar', - 603: 'horse cart, horse-cart', - 604: 'hourglass', - 605: 'iPod', - 606: 'iron, smoothing iron', - 607: "jack-o'-lantern", - 608: 'jean, blue jean, denim', - 609: 'jeep, landrover', - 610: 'jersey, T-shirt, tee shirt', - 611: 'jigsaw puzzle', - 612: 'jinrikisha, ricksha, rickshaw', - 613: 'joystick', - 614: 'kimono', - 615: 'knee pad', - 616: 'knot', - 617: 'lab coat, laboratory coat', - 618: 'ladle', - 619: 'lampshade, lamp shade', - 620: 'laptop, laptop computer', - 621: 'lawn mower, mower', - 622: 'lens cap, lens cover', - 623: 'letter opener, paper knife, paperknife', - 624: 'library', - 625: 'lifeboat', - 626: 'lighter, light, igniter, ignitor', - 627: 'limousine, limo', - 628: 'liner, ocean liner', - 629: 'lipstick, lip rouge', - 630: 'Loafer', - 631: 'lotion', - 632: 'loudspeaker, speaker, speaker unit, loudspeaker system, speaker system', - 633: "loupe, jeweler's loupe", - 634: 'lumbermill, sawmill', - 635: 'magnetic compass', - 636: 'mailbag, postbag', - 637: 'mailbox, letter box', - 638: 'maillot', - 639: 'maillot, tank suit', - 640: 'manhole cover', - 641: 'maraca', - 642: 'marimba, xylophone', - 643: 'mask', - 644: 'matchstick', - 645: 'maypole', - 646: 'maze, labyrinth', - 647: 'measuring cup', - 648: 'medicine chest, medicine cabinet', - 649: 'megalith, megalithic structure', - 650: 'microphone, mike', - 651: 'microwave, microwave oven', - 652: 'military uniform', - 653: 'milk can', - 654: 'minibus', - 655: 'miniskirt, mini', - 656: 'minivan', - 657: 'missile', - 658: 'mitten', - 659: 'mixing bowl', - 660: 'mobile home, manufactured home', - 661: 'Model T', - 662: 'modem', - 663: 'monastery', - 664: 'monitor', - 665: 'moped', - 666: 'mortar', - 667: 'mortarboard', - 668: 'mosque', - 669: 'mosquito net', - 670: 'motor scooter, scooter', - 671: 'mountain bike, all-terrain bike, off-roader', - 672: 'mountain tent', - 673: 'mouse, computer mouse', - 674: 'mousetrap', - 675: 'moving van', - 676: 'muzzle', - 677: 'nail', - 678: 'neck brace', - 679: 'necklace', - 680: 'nipple', - 681: 'notebook, notebook computer', - 682: 'obelisk', - 683: 'oboe, hautboy, hautbois', - 684: 'ocarina, sweet potato', - 685: 'odometer, hodometer, mileometer, milometer', - 686: 'oil filter', - 687: 'organ, pipe organ', - 688: 'oscilloscope, scope, cathode-ray oscilloscope, CRO', - 689: 'overskirt', - 690: 'oxcart', - 691: 'oxygen mask', - 692: 'packet', - 693: 'paddle, boat paddle', - 694: 'paddlewheel, paddle wheel', - 695: 'padlock', - 696: 'paintbrush', - 697: "pajama, pyjama, pj's, jammies", - 698: 'palace', - 699: 'panpipe, pandean pipe, syrinx', - 700: 'paper towel', - 701: 'parachute, chute', - 702: 'parallel bars, bars', - 703: 'park bench', - 704: 'parking meter', - 705: 'passenger car, coach, carriage', - 706: 'patio, terrace', - 707: 'pay-phone, pay-station', - 708: 'pedestal, plinth, footstall', - 709: 'pencil box, pencil case', - 710: 'pencil sharpener', - 711: 'perfume, essence', - 712: 'Petri dish', - 713: 'photocopier', - 714: 'pick, plectrum, plectron', - 715: 'pickelhaube', - 716: 'picket fence, paling', - 717: 'pickup, pickup truck', - 718: 'pier', - 719: 'piggy bank, penny bank', - 720: 'pill bottle', - 721: 'pillow', - 722: 'ping-pong ball', - 723: 'pinwheel', - 724: 'pirate, pirate ship', - 725: 'pitcher, ewer', - 726: "plane, carpenter's plane, woodworking plane", - 727: 'planetarium', - 728: 'plastic bag', - 729: 'plate rack', - 730: 'plow, plough', - 731: "plunger, plumber's helper", - 732: 'Polaroid camera, Polaroid Land camera', - 733: 'pole', - 734: 'police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria', - 735: 'poncho', - 736: 'pool table, billiard table, snooker table', - 737: 'pop bottle, soda bottle', - 738: 'pot, flowerpot', - 739: "potter's wheel", - 740: 'power drill', - 741: 'prayer rug, prayer mat', - 742: 'printer', - 743: 'prison, prison house', - 744: 'projectile, missile', - 745: 'projector', - 746: 'puck, hockey puck', - 747: 'punching bag, punch bag, punching ball, punchball', - 748: 'purse', - 749: 'quill, quill pen', - 750: 'quilt, comforter, comfort, puff', - 751: 'racer, race car, racing car', - 752: 'racket, racquet', - 753: 'radiator', - 754: 'radio, wireless', - 755: 'radio telescope, radio reflector', - 756: 'rain barrel', - 757: 'recreational vehicle, RV, R.V.', - 758: 'reel', - 759: 'reflex camera', - 760: 'refrigerator, icebox', - 761: 'remote control, remote', - 762: 'restaurant, eating house, eating place, eatery', - 763: 'revolver, six-gun, six-shooter', - 764: 'rifle', - 765: 'rocking chair, rocker', - 766: 'rotisserie', - 767: 'rubber eraser, rubber, pencil eraser', - 768: 'rugby ball', - 769: 'rule, ruler', - 770: 'running shoe', - 771: 'safe', - 772: 'safety pin', - 773: 'saltshaker, salt shaker', - 774: 'sandal', - 775: 'sarong', - 776: 'sax, saxophone', - 777: 'scabbard', - 778: 'scale, weighing machine', - 779: 'school bus', - 780: 'schooner', - 781: 'scoreboard', - 782: 'screen, CRT screen', - 783: 'screw', - 784: 'screwdriver', - 785: 'seat belt, seatbelt', - 786: 'sewing machine', - 787: 'shield, buckler', - 788: 'shoe shop, shoe-shop, shoe store', - 789: 'shoji', - 790: 'shopping basket', - 791: 'shopping cart', - 792: 'shovel', - 793: 'shower cap', - 794: 'shower curtain', - 795: 'ski', - 796: 'ski mask', - 797: 'sleeping bag', - 798: 'slide rule, slipstick', - 799: 'sliding door', - 800: 'slot, one-armed bandit', - 801: 'snorkel', - 802: 'snowmobile', - 803: 'snowplow, snowplough', - 804: 'soap dispenser', - 805: 'soccer ball', - 806: 'sock', - 807: 'solar dish, solar collector, solar furnace', - 808: 'sombrero', - 809: 'soup bowl', - 810: 'space bar', - 811: 'space heater', - 812: 'space shuttle', - 813: 'spatula', - 814: 'speedboat', - 815: "spider web, spider's web", - 816: 'spindle', - 817: 'sports car, sport car', - 818: 'spotlight, spot', - 819: 'stage', - 820: 'steam locomotive', - 821: 'steel arch bridge', - 822: 'steel drum', - 823: 'stethoscope', - 824: 'stole', - 825: 'stone wall', - 826: 'stopwatch, stop watch', - 827: 'stove', - 828: 'strainer', - 829: 'streetcar, tram, tramcar, trolley, trolley car', - 830: 'stretcher', - 831: 'studio couch, day bed', - 832: 'stupa, tope', - 833: 'submarine, pigboat, sub, U-boat', - 834: 'suit, suit of clothes', - 835: 'sundial', - 836: 'sunglass', - 837: 'sunglasses, dark glasses, shades', - 838: 'sunscreen, sunblock, sun blocker', - 839: 'suspension bridge', - 840: 'swab, swob, mop', - 841: 'sweatshirt', - 842: 'swimming trunks, bathing trunks', - 843: 'swing', - 844: 'switch, electric switch, electrical switch', - 845: 'syringe', - 846: 'table lamp', - 847: 'tank, army tank, armored combat vehicle, armoured combat vehicle', - 848: 'tape player', - 849: 'teapot', - 850: 'teddy, teddy bear', - 851: 'television, television system', - 852: 'tennis ball', - 853: 'thatch, thatched roof', - 854: 'theater curtain, theatre curtain', - 855: 'thimble', - 856: 'thresher, thrasher, threshing machine', - 857: 'throne', - 858: 'tile roof', - 859: 'toaster', - 860: 'tobacco shop, tobacconist shop, tobacconist', - 861: 'toilet seat', - 862: 'torch', - 863: 'totem pole', - 864: 'tow truck, tow car, wrecker', - 865: 'toyshop', - 866: 'tractor', - 867: 'trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi', - 868: 'tray', - 869: 'trench coat', - 870: 'tricycle, trike, velocipede', - 871: 'trimaran', - 872: 'tripod', - 873: 'triumphal arch', - 874: 'trolleybus, trolley coach, trackless trolley', - 875: 'trombone', - 876: 'tub, vat', - 877: 'turnstile', - 878: 'typewriter keyboard', - 879: 'umbrella', - 880: 'unicycle, monocycle', - 881: 'upright, upright piano', - 882: 'vacuum, vacuum cleaner', - 883: 'vase', - 884: 'vault', - 885: 'velvet', - 886: 'vending machine', - 887: 'vestment', - 888: 'viaduct', - 889: 'violin, fiddle', - 890: 'volleyball', - 891: 'waffle iron', - 892: 'wall clock', - 893: 'wallet, billfold, notecase, pocketbook', - 894: 'wardrobe, closet, press', - 895: 'warplane, military plane', - 896: 'washbasin, handbasin, washbowl, lavabo, wash-hand basin', - 897: 'washer, automatic washer, washing machine', - 898: 'water bottle', - 899: 'water jug', - 900: 'water tower', - 901: 'whiskey jug', - 902: 'whistle', - 903: 'wig', - 904: 'window screen', - 905: 'window shade', - 906: 'Windsor tie', - 907: 'wine bottle', - 908: 'wing', - 909: 'wok', - 910: 'wooden spoon', - 911: 'wool, woolen, woollen', - 912: 'worm fence, snake fence, snake-rail fence, Virginia fence', - 913: 'wreck', - 914: 'yawl', - 915: 'yurt', - 916: 'web site, website, internet site, site', - 917: 'comic book', - 918: 'crossword puzzle, crossword', - 919: 'street sign', - 920: 'traffic light, traffic signal, stoplight', - 921: 'book jacket, dust cover, dust jacket, dust wrapper', - 922: 'menu', - 923: 'plate', - 924: 'guacamole', - 925: 'consomme', - 926: 'hot pot, hotpot', - 927: 'trifle', - 928: 'ice cream, icecream', - 929: 'ice lolly, lolly, lollipop, popsicle', - 930: 'French loaf', - 931: 'bagel, beigel', - 932: 'pretzel', - 933: 'cheeseburger', - 934: 'hotdog, hot dog, red hot', - 935: 'mashed potato', - 936: 'head cabbage', - 937: 'broccoli', - 938: 'cauliflower', - 939: 'zucchini, courgette', - 940: 'spaghetti squash', - 941: 'acorn squash', - 942: 'butternut squash', - 943: 'cucumber, cuke', - 944: 'artichoke, globe artichoke', - 945: 'bell pepper', - 946: 'cardoon', - 947: 'mushroom', - 948: 'Granny Smith', - 949: 'strawberry', - 950: 'orange', - 951: 'lemon', - 952: 'fig', - 953: 'pineapple, ananas', - 954: 'banana', - 955: 'jackfruit, jak, jack', - 956: 'custard apple', - 957: 'pomegranate', - 958: 'hay', - 959: 'carbonara', - 960: 'chocolate sauce, chocolate syrup', - 961: 'dough', - 962: 'meat loaf, meatloaf', - 963: 'pizza, pizza pie', - 964: 'potpie', - 965: 'burrito', - 966: 'red wine', - 967: 'espresso', - 968: 'cup', - 969: 'eggnog', - 970: 'alp', - 971: 'bubble', - 972: 'cliff, drop, drop-off', - 973: 'coral reef', - 974: 'geyser', - 975: 'lakeside, lakeshore', - 976: 'promontory, headland, head, foreland', - 977: 'sandbar, sand bar', - 978: 'seashore, coast, seacoast, sea-coast', - 979: 'valley, vale', - 980: 'volcano', - 981: 'ballplayer, baseball player', - 982: 'groom, bridegroom', - 983: 'scuba diver', - 984: 'rapeseed', - 985: 'daisy', - 986: "yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum", - 987: 'corn', - 988: 'acorn', - 989: 'hip, rose hip, rosehip', - 990: 'buckeye, horse chestnut, conker', - 991: 'coral fungus', - 992: 'agaric', - 993: 'gyromitra', - 994: 'stinkhorn, carrion fungus', - 995: 'earthstar', - 996: 'hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa', - 997: 'bolete', - 998: 'ear, spike, capitulum', - 999: 'toilet tissue, toilet paper, bathroom tissue'} diff --git a/components/contrib/openvino/ovms-deployer/containers/deploy.sh b/components/contrib/openvino/ovms-deployer/containers/deploy.sh deleted file mode 100755 index 361a9fb06ac..00000000000 --- a/components/contrib/openvino/ovms-deployer/containers/deploy.sh +++ /dev/null @@ -1,154 +0,0 @@ -#!/bin/bash -e - -set -x - -KUBERNETES_NAMESPACE="${KUBERNETES_NAMESPACE:-kubeflow}" -SERVER_NAME="${SERVER_NAME:-model-server}" -SERVER_ENDPOINT_OUTPUT_FILE="${SERVER_ENDPOINT_OUTPUT_FILE:-/tmp/server_endpoint/data}" - -while (($#)); do - case $1 in - "--model-export-path") - shift - export MODEL_EXPORT_PATH="$1" - shift - ;; - "--cluster-name") - shift - CLUSTER_NAME="$1" - shift - ;; - "--namespace") - shift - KUBERNETES_NAMESPACE="$1" - shift - ;; - "--server-name") - shift - SERVER_NAME="$1" - shift - ;; - "--replicas") - shift - export REPLICAS="$1" - shift - ;; - "--batch-size") - shift - export BATCH_SIZE="$1" - shift - ;; - "--model-version-policy") - shift - export MODEL_VERSION_POLICY="$1" - shift - ;; - "--log-level") - shift - export LOG_LEVEL="$1" - shift - ;; - "--server-endpoint-output-file") - shift - SERVER_ENDPOINT_OUTPUT_FILE = "$1" - shift - ;; - *) - echo "Unknown argument: '$1'" - exit 1 - ;; - esac -done - -if [ -z "${MODEL_EXPORT_PATH}" ]; then - echo "You must specify a path to the saved model" - exit 1 -fi - -echo "Deploying the model '${MODEL_EXPORT_PATH}'" - -if [ -z "${CLUSTER_NAME}" ]; then - CLUSTER_NAME=$(wget -q -O- --header="Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/instance/attributes/cluster-name) -fi - -# Ensure the server name is not more than 63 characters. -export SERVER_NAME="${SERVER_NAME:0:63}" -# Trim any trailing hyphens from the server name. -while [[ "${SERVER_NAME:(-1)}" == "-" ]]; do SERVER_NAME="${SERVER_NAME::-1}"; done - -echo "Deploying ${SERVER_NAME} to the cluster ${CLUSTER_NAME}" - -# Connect kubectl to the local cluster -kubectl config set-cluster "${CLUSTER_NAME}" --server=https://kubernetes.default --certificate-authority=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt -kubectl config set-credentials pipeline --token "$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)" -kubectl config set-context kubeflow --cluster "${CLUSTER_NAME}" --user pipeline -kubectl config use-context kubeflow - -echo "Generating service and deployment yaml files" -python apply_template.py - -kubectl apply -f ovms.yaml - -sleep 10 -echo "Waiting for the TF Serving deployment to have at least one available replica..." -timeout="1000" -start_time=`date +%s` -while [[ $(kubectl get deploy --namespace "${KUBERNETES_NAMESPACE}" --selector=app="ovms-${SERVER_NAME}" --output=jsonpath='{.items[0].status.availableReplicas}') < "1" ]]; do - current_time=`date +%s` - elapsed_time=$(expr $current_time + 1 - $start_time) - if [[ $elapsed_time -gt $timeout ]];then - echo "timeout" - exit 1 - fi - sleep 5 -done - -echo "Obtaining the pod name..." -start_time=`date +%s` -pod_name="" -while [[ $pod_name == "" ]];do - pod_name=$(kubectl get pods --namespace "${KUBERNETES_NAMESPACE}" --selector=app="ovms-${SERVER_NAME}" --template '{{range .items}}{{.metadata.name}}{{"\n"}}{{end}}') - current_time=`date +%s` - elapsed_time=$(expr $current_time + 1 - $start_time) - if [[ $elapsed_time -gt $timeout ]];then - echo "timeout" - exit 1 - fi - sleep 2 -done -echo "Pod name is: " $pod_name - -# Wait for the pod container to start running -echo "Waiting for the TF Serving pod to start running..." -start_time=`date +%s` -exit_code="1" -while [[ $exit_code != "0" ]];do - kubectl get po ${pod_name} --namespace "${KUBERNETES_NAMESPACE}" -o jsonpath='{.status.containerStatuses[0].state.running}' - exit_code=$? - current_time=`date +%s` - elapsed_time=$(expr $current_time + 1 - $start_time) - if [[ $elapsed_time -gt $timeout ]];then - echo "timeout" - exit 1 - fi - sleep 2 -done - -start_time=`date +%s` -while [ -z "$(kubectl get po ${pod_name} --namespace "${KUBERNETES_NAMESPACE}" -o jsonpath='{.status.containerStatuses[0].state.running}')" ]; do - current_time=`date +%s` - elapsed_time=$(expr $current_time + 1 - $start_time) - if [[ $elapsed_time -gt $timeout ]];then - echo "timeout" - exit 1 - fi - sleep 5 -done - -# Wait a little while and then grab the logs of the running server -sleep 10 -echo "Logs from the TF Serving pod:" -kubectl logs ${pod_name} --namespace "${KUBERNETES_NAMESPACE}" - -mkdir -p "$(dirname "$SERVER_ENDPOINT_OUTPUT_FILE")" -echo "ovms-${SERVER_NAME}:80" > "$SERVER_ENDPOINT_OUTPUT_FILE" diff --git a/components/contrib/openvino/ovms-deployer/containers/evaluate.py b/components/contrib/openvino/ovms-deployer/containers/evaluate.py deleted file mode 100755 index ad934474439..00000000000 --- a/components/contrib/openvino/ovms-deployer/containers/evaluate.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env python -import grpc -import numpy as np -import tensorflow.contrib.util as tf_contrib_util -import datetime -import argparse -from tensorflow_serving.apis import predict_pb2 -from tensorflow_serving.apis import prediction_service_pb2_grpc -from urllib.parse import urlparse -import requests -import cv2 -import os -import json -import classes - - -def crop_resize(img,cropx,cropy): - y,x,c = img.shape - if y < cropy: - img = cv2.resize(img, (x, cropy)) - y = cropy - if x < cropx: - img = cv2.resize(img, (cropx,y)) - x = cropx - startx = x//2-(cropx//2) - starty = y//2-(cropy//2) - return img[starty:starty+cropy,startx:startx+cropx,:] - -def get_file_content(source_path): - parsed_path = urlparse(source_path) - if parsed_path.scheme == "http" or parsed_path.scheme == "https": - try: - response = requests.get(source_path, stream=True) - content = response.content - except requests.exceptions.RequestException as e: - print(e) - content = None - elif parsed_path.scheme == "": - if os.path.isfile(source_path): - with open(input_images) as f: - content = f.readlines() - f.close - else: - print("file " + source_path + "is not accessible") - content = None - return content - -def getJpeg(path, size, path_prefix): - print(os.path.join(path_prefix,path)) - content = get_file_content(os.path.join(path_prefix,path)) - - if content: - try: - img = np.frombuffer(content, dtype=np.uint8) - img = cv2.imdecode(img, cv2.IMREAD_COLOR) # BGR format - # retrived array has BGR format and 0-255 normalization - # add image preprocessing if needed by the model - img = crop_resize(img, size, size) - img = img.astype('float32') - img = img.transpose(2,0,1).reshape(1,3,size,size) - print(path, img.shape, "; data range:",np.amin(img),":",np.amax(img)) - except Exception as e: - print("Can not read the image file", e) - img = None - else: - print("Can not open ", os.path(path_prefix,path)) - img = None - return img - -parser = argparse.ArgumentParser(description='Sends requests to OVMS and TF Serving using images in numpy format') -parser.add_argument('--images_list', required=False, default='input_images.txt', help='Path to a file with a list of labeled images. It should include in every line a path to the image file and a numerical label separate by space.') -parser.add_argument('--grpc_endpoint',required=False, default='localhost:9000', help='Specify endpoint of grpc service. default:localhost:9000') -parser.add_argument('--input_name',required=False, default='input', help='Specify input tensor name. default: input') -parser.add_argument('--output_name',required=False, default='resnet_v1_50/predictions/Reshape_1', help='Specify output name. default: output') -parser.add_argument('--model_name', default='resnet', help='Define model name, must be same as is in service. default: resnet', - dest='model_name') -parser.add_argument('--size',required=False, default=224, type=int, help='The size of the image in the model') -parser.add_argument('--image_path_prefix',required=False, default="", type=str, help='Path prefix to be added to every image in the list') -args = vars(parser.parse_args()) - -channel = grpc.insecure_channel(args['grpc_endpoint']) -stub = prediction_service_pb2_grpc.PredictionServiceStub(channel) -input_images = args.get('images_list') -size = args.get('size') - -input_list_content = get_file_content(input_images) -if input_list_content is None: - print("Can not open input images file", input_images) - exit(1) -else: - lines = input_list_content.decode().split("\n") -print(lines) -print('Start processing:') -print('\tModel name: {}'.format(args.get('model_name'))) -print('\tImages list file: {}'.format(args.get('images_list'))) - -i = 0 -matched = 0 -processing_times = np.zeros((0),int) -imgs = np.zeros((0,3,size, size), np.dtype('=3.1.2,<4 -futures==3.1.1 -tensorflow-serving-api==1.13.0 diff --git a/components/contrib/openvino/predict/README.md b/components/contrib/openvino/predict/README.md deleted file mode 100644 index 72b48d9fd14..00000000000 --- a/components/contrib/openvino/predict/README.md +++ /dev/null @@ -1,64 +0,0 @@ -# Inference component with OpenVINO inference engine - -This component takes the following parameters: -* path to the model in Intermediate Representation format ( xml and bin files) -* numpy file with the input dataset. Input shape should fit to the used model definition. -* classification labels which can be used to calculate model accuracy -* input data can be scaled using parameters scale_div and scale_sub -* path to the folder where the inference results in numpy format should be uploaded - -In the component logs are included inference performance details. - -This component is tuned for classification models but can be considered as exemplary for arbitrary OpenVINO models. - -There are generated 2 metrics including inference latency and accuracy - -```bash -predict.py --help -usage: predict.py [-h] --model_bin MODEL_BIN --model_xml MODEL_XML - --input_numpy_file INPUT_NUMPY_FILE --label_numpy_file - LABEL_NUMPY_FILE --output_folder OUTPUT_FOLDER - [--batch_size BATCH_SIZE] [--scale_div SCALE_DIV] - [--scale_sub SCALE_SUB] - -Component executing inference operation - -optional arguments: - -h, --help show this help message and exit - --model_bin MODEL_BIN - GCS or local path to model weights file (.bin) - --model_xml MODEL_XML - GCS or local path to model graph (.xml) - --input_numpy_file INPUT_NUMPY_FILE - GCS or local path to input dataset numpy file - --label_numpy_file LABEL_NUMPY_FILE - GCS or local path to numpy file with labels - --output_folder OUTPUT_FOLDER - GCS or local path to results upload folder - --batch_size BATCH_SIZE - batch size to be used for inference - --scale_div SCALE_DIV - scale the np input by division of by the value - --scale_sub SCALE_SUB - scale the np input by substraction of the value -``` - - -## building docker image - - -```bash -docker build --build-arg http_proxy=$http_proxy --build-arg https_proxy=$https_proxy . -``` - -## testing the image locally - -```bash -COMMAND = python3 predict.py \ ---model_bin gs:///model.bin \ ---model_xml gs:///model.xml \ ---input_numpy_file gs:///datasets/imgs.npy \ ---output_folder gs:///outputs -docker run --rm -it -e GOOGLE_APPLICATION_CREDENTIALS=/etc/credentials/gcp_key.json \ --v ${PWD}/key.json:/etc/credentials/gcp_key.json $COMMAND -``` \ No newline at end of file diff --git a/components/contrib/openvino/predict/containers/Dockerfile b/components/contrib/openvino/predict/containers/Dockerfile deleted file mode 100644 index 4da7ecc1c04..00000000000 --- a/components/contrib/openvino/predict/containers/Dockerfile +++ /dev/null @@ -1,76 +0,0 @@ -FROM ubuntu:16.04 as DEV -RUN apt-get update && apt-get install -y \ - curl \ - ca-certificates \ - python3-pip \ - python-dev \ - libgfortran3 \ - vim \ - build-essential \ - cmake \ - curl \ - wget \ - libssl-dev \ - ca-certificates \ - git \ - libboost-regex-dev \ - gcc-multilib \ - g++-multilib \ - libgtk2.0-dev \ - pkg-config \ - unzip \ - automake \ - libtool \ - autoconf \ - libpng12-dev \ - libcairo2-dev \ - libpango1.0-dev \ - libglib2.0-dev \ - libgtk2.0-dev \ - libswscale-dev \ - libavcodec-dev \ - libavformat-dev \ - libgstreamer1.0-0 \ - gstreamer1.0-plugins-base \ - libusb-1.0-0-dev \ - libopenblas-dev -ARG DLDT_DIR=/dldt-2018_R5 -RUN git clone --depth=1 -b 2018_R5 https://github.com/opencv/dldt.git ${DLDT_DIR} && \ - cd ${DLDT_DIR} && git submodule init && git submodule update --recursive && \ - rm -Rf .git && rm -Rf model-optimizer - -WORKDIR ${DLDT_DIR} -RUN curl -L -o ${DLDT_DIR}/mklml_lnx_2019.0.1.20180928.tgz https://github.com/intel/mkl-dnn/releases/download/v0.17.2/mklml_lnx_2019.0.1.20180928.tgz && \ - tar -xzf ${DLDT_DIR}/mklml_lnx_2019.0.1.20180928.tgz && rm ${DLDT_DIR}/mklml_lnx_2019.0.1.20180928.tgz -WORKDIR ${DLDT_DIR}/inference-engine -RUN mkdir build && cd build && cmake -DGEMM=MKL -DMKLROOT=${DLDT_DIR}/mklml_lnx_2019.0.1.20180928 -DENABLE_MKL_DNN=ON -DCMAKE_BUILD_TYPE=Release .. -RUN cd build && make -j4 -RUN pip3 install cython numpy && mkdir ie_bridges/python/build && cd ie_bridges/python/build && \ - cmake -DInferenceEngine_DIR=${DLDT_DIR}/inference-engine/build -DPYTHON_EXECUTABLE=`which python3` -DPYTHON_LIBRARY=/usr/lib/x86_64-linux-gnu/libpython3.5m.so -DPYTHON_INCLUDE_DIR=/usr/include/python3.5m .. && \ - make -j4 - -FROM ubuntu:16.04 as PROD - -RUN apt-get update && apt-get install -y --no-install-recommends \ - curl \ - ca-certificates \ - python3-pip \ - python3-dev \ - virtualenv \ - libgomp1 - -COPY --from=DEV /dldt-2018_R5/inference-engine/bin/intel64/Release/lib/*.so /usr/local/lib/ -COPY --from=DEV /dldt-2018_R5/inference-engine/ie_bridges/python/bin/intel64/Release/python_api/python3.5/openvino/ /usr/local/lib/openvino/ -COPY --from=DEV /dldt-2018_R5/mklml_lnx_2019.0.1.20180928/lib/lib*.so /usr/local/lib/ -ENV LD_LIBRARY_PATH=/usr/local/lib -ENV PYTHONPATH=/usr/local/lib -COPY requirements.txt . -RUN pip3 install setuptools wheel -RUN pip3 install -r requirements.txt -COPY predict.py classes.py ./ - - - - - - diff --git a/components/contrib/openvino/predict/containers/classes.py b/components/contrib/openvino/predict/containers/classes.py deleted file mode 100644 index c9d7e484bc3..00000000000 --- a/components/contrib/openvino/predict/containers/classes.py +++ /dev/null @@ -1,1016 +0,0 @@ -# -# Copyright (c) 2018 Intel Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -imagenet_classes = {0: 'tench, Tinca tinca', - 1: 'goldfish, Carassius auratus', - 2: 'great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias', - 3: 'tiger shark, Galeocerdo cuvieri', - 4: 'hammerhead, hammerhead shark', - 5: 'electric ray, crampfish, numbfish, torpedo', - 6: 'stingray', - 7: 'cock', - 8: 'hen', - 9: 'ostrich, Struthio camelus', - 10: 'brambling, Fringilla montifringilla', - 11: 'goldfinch, Carduelis carduelis', - 12: 'house finch, linnet, Carpodacus mexicanus', - 13: 'junco, snowbird', - 14: 'indigo bunting, indigo finch, indigo bird, Passerina cyanea', - 15: 'robin, American robin, Turdus migratorius', - 16: 'bulbul', - 17: 'jay', - 18: 'magpie', - 19: 'chickadee', - 20: 'water ouzel, dipper', - 21: 'kite', - 22: 'bald eagle, American eagle, Haliaeetus leucocephalus', - 23: 'vulture', - 24: 'great grey owl, great gray owl, Strix nebulosa', - 25: 'European fire salamander, Salamandra salamandra', - 26: 'common newt, Triturus vulgaris', - 27: 'eft', - 28: 'spotted salamander, Ambystoma maculatum', - 29: 'axolotl, mud puppy, Ambystoma mexicanum', - 30: 'bullfrog, Rana catesbeiana', - 31: 'tree frog, tree-frog', - 32: 'tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui', - 33: 'loggerhead, loggerhead turtle, Caretta caretta', - 34: 'leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea', - 35: 'mud turtle', - 36: 'terrapin', - 37: 'box turtle, box tortoise', - 38: 'banded gecko', - 39: 'common iguana, iguana, Iguana iguana', - 40: 'American chameleon, anole, Anolis carolinensis', - 41: 'whiptail, whiptail lizard', - 42: 'agama', - 43: 'frilled lizard, Chlamydosaurus kingi', - 44: 'alligator lizard', - 45: 'Gila monster, Heloderma suspectum', - 46: 'green lizard, Lacerta viridis', - 47: 'African chameleon, Chamaeleo chamaeleon', - 48: 'Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis', - 49: 'African crocodile, Nile crocodile, Crocodylus niloticus', - 50: 'American alligator, Alligator mississipiensis', - 51: 'triceratops', - 52: 'thunder snake, worm snake, Carphophis amoenus', - 53: 'ringneck snake, ring-necked snake, ring snake', - 54: 'hognose snake, puff adder, sand viper', - 55: 'green snake, grass snake', - 56: 'king snake, kingsnake', - 57: 'garter snake, grass snake', - 58: 'water snake', - 59: 'vine snake', - 60: 'night snake, Hypsiglena torquata', - 61: 'boa constrictor, Constrictor constrictor', - 62: 'rock python, rock snake, Python sebae', - 63: 'Indian cobra, Naja naja', - 64: 'green mamba', - 65: 'sea snake', - 66: 'horned viper, cerastes, sand viper, horned asp, Cerastes cornutus', - 67: 'diamondback, diamondback rattlesnake, Crotalus adamanteus', - 68: 'sidewinder, horned rattlesnake, Crotalus cerastes', - 69: 'trilobite', - 70: 'harvestman, daddy longlegs, Phalangium opilio', - 71: 'scorpion', - 72: 'black and gold garden spider, Argiope aurantia', - 73: 'barn spider, Araneus cavaticus', - 74: 'garden spider, Aranea diademata', - 75: 'black widow, Latrodectus mactans', - 76: 'tarantula', - 77: 'wolf spider, hunting spider', - 78: 'tick', - 79: 'centipede', - 80: 'black grouse', - 81: 'ptarmigan', - 82: 'ruffed grouse, partridge, Bonasa umbellus', - 83: 'prairie chicken, prairie grouse, prairie fowl', - 84: 'peacock', - 85: 'quail', - 86: 'partridge', - 87: 'African grey, African gray, Psittacus erithacus', - 88: 'macaw', - 89: 'sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita', - 90: 'lorikeet', - 91: 'coucal', - 92: 'bee eater', - 93: 'hornbill', - 94: 'hummingbird', - 95: 'jacamar', - 96: 'toucan', - 97: 'drake', - 98: 'red-breasted merganser, Mergus serrator', - 99: 'goose', - 100: 'black swan, Cygnus atratus', - 101: 'tusker', - 102: 'echidna, spiny anteater, anteater', - 103: 'platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus', - 104: 'wallaby, brush kangaroo', - 105: 'koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus', - 106: 'wombat', - 107: 'jellyfish', - 108: 'sea anemone, anemone', - 109: 'brain coral', - 110: 'flatworm, platyhelminth', - 111: 'nematode, nematode worm, roundworm', - 112: 'conch', - 113: 'snail', - 114: 'slug', - 115: 'sea slug, nudibranch', - 116: 'chiton, coat-of-mail shell, sea cradle, polyplacophore', - 117: 'chambered nautilus, pearly nautilus, nautilus', - 118: 'Dungeness crab, Cancer magister', - 119: 'rock crab, Cancer irroratus', - 120: 'fiddler crab', - 121: 'king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica', - 122: 'American lobster, Northern lobster, Maine lobster, Homarus americanus', - 123: 'spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish', - 124: 'crayfish, crawfish, crawdad, crawdaddy', - 125: 'hermit crab', - 126: 'isopod', - 127: 'white stork, Ciconia ciconia', - 128: 'black stork, Ciconia nigra', - 129: 'spoonbill', - 130: 'flamingo', - 131: 'little blue heron, Egretta caerulea', - 132: 'American egret, great white heron, Egretta albus', - 133: 'bittern', - 134: 'crane', - 135: 'limpkin, Aramus pictus', - 136: 'European gallinule, Porphyrio porphyrio', - 137: 'American coot, marsh hen, mud hen, water hen, Fulica americana', - 138: 'bustard', - 139: 'ruddy turnstone, Arenaria interpres', - 140: 'red-backed sandpiper, dunlin, Erolia alpina', - 141: 'redshank, Tringa totanus', - 142: 'dowitcher', - 143: 'oystercatcher, oyster catcher', - 144: 'pelican', - 145: 'king penguin, Aptenodytes patagonica', - 146: 'albatross, mollymawk', - 147: 'grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus', - 148: 'killer whale, killer, orca, grampus, sea wolf, Orcinus orca', - 149: 'dugong, Dugong dugon', - 150: 'sea lion', - 151: 'Chihuahua', - 152: 'Japanese spaniel', - 153: 'Maltese dog, Maltese terrier, Maltese', - 154: 'Pekinese, Pekingese, Peke', - 155: 'Shih-Tzu', - 156: 'Blenheim spaniel', - 157: 'papillon', - 158: 'toy terrier', - 159: 'Rhodesian ridgeback', - 160: 'Afghan hound, Afghan', - 161: 'basset, basset hound', - 162: 'beagle', - 163: 'bloodhound, sleuthhound', - 164: 'bluetick', - 165: 'black-and-tan coonhound', - 166: 'Walker hound, Walker foxhound', - 167: 'English foxhound', - 168: 'redbone', - 169: 'borzoi, Russian wolfhound', - 170: 'Irish wolfhound', - 171: 'Italian greyhound', - 172: 'whippet', - 173: 'Ibizan hound, Ibizan Podenco', - 174: 'Norwegian elkhound, elkhound', - 175: 'otterhound, otter hound', - 176: 'Saluki, gazelle hound', - 177: 'Scottish deerhound, deerhound', - 178: 'Weimaraner', - 179: 'Staffordshire bullterrier, Staffordshire bull terrier', - 180: 'American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier', - 181: 'Bedlington terrier', - 182: 'Border terrier', - 183: 'Kerry blue terrier', - 184: 'Irish terrier', - 185: 'Norfolk terrier', - 186: 'Norwich terrier', - 187: 'Yorkshire terrier', - 188: 'wire-haired fox terrier', - 189: 'Lakeland terrier', - 190: 'Sealyham terrier, Sealyham', - 191: 'Airedale, Airedale terrier', - 192: 'cairn, cairn terrier', - 193: 'Australian terrier', - 194: 'Dandie Dinmont, Dandie Dinmont terrier', - 195: 'Boston bull, Boston terrier', - 196: 'miniature schnauzer', - 197: 'giant schnauzer', - 198: 'standard schnauzer', - 199: 'Scotch terrier, Scottish terrier, Scottie', - 200: 'Tibetan terrier, chrysanthemum dog', - 201: 'silky terrier, Sydney silky', - 202: 'soft-coated wheaten terrier', - 203: 'West Highland white terrier', - 204: 'Lhasa, Lhasa apso', - 205: 'flat-coated retriever', - 206: 'curly-coated retriever', - 207: 'golden retriever', - 208: 'Labrador retriever', - 209: 'Chesapeake Bay retriever', - 210: 'German short-haired pointer', - 211: 'vizsla, Hungarian pointer', - 212: 'English setter', - 213: 'Irish setter, red setter', - 214: 'Gordon setter', - 215: 'Brittany spaniel', - 216: 'clumber, clumber spaniel', - 217: 'English springer, English springer spaniel', - 218: 'Welsh springer spaniel', - 219: 'cocker spaniel, English cocker spaniel, cocker', - 220: 'Sussex spaniel', - 221: 'Irish water spaniel', - 222: 'kuvasz', - 223: 'schipperke', - 224: 'groenendael', - 225: 'malinois', - 226: 'briard', - 227: 'kelpie', - 228: 'komondor', - 229: 'Old English sheepdog, bobtail', - 230: 'Shetland sheepdog, Shetland sheep dog, Shetland', - 231: 'collie', - 232: 'Border collie', - 233: 'Bouvier des Flandres, Bouviers des Flandres', - 234: 'Rottweiler', - 235: 'German shepherd, German shepherd dog, German police dog, alsatian', - 236: 'Doberman, Doberman pinscher', - 237: 'miniature pinscher', - 238: 'Greater Swiss Mountain dog', - 239: 'Bernese mountain dog', - 240: 'Appenzeller', - 241: 'EntleBucher', - 242: 'boxer', - 243: 'bull mastiff', - 244: 'Tibetan mastiff', - 245: 'French bulldog', - 246: 'Great Dane', - 247: 'Saint Bernard, St Bernard', - 248: 'Eskimo dog, husky', - 249: 'malamute, malemute, Alaskan malamute', - 250: 'Siberian husky', - 251: 'dalmatian, coach dog, carriage dog', - 252: 'affenpinscher, monkey pinscher, monkey dog', - 253: 'basenji', - 254: 'pug, pug-dog', - 255: 'Leonberg', - 256: 'Newfoundland, Newfoundland dog', - 257: 'Great Pyrenees', - 258: 'Samoyed, Samoyede', - 259: 'Pomeranian', - 260: 'chow, chow chow', - 261: 'keeshond', - 262: 'Brabancon griffon', - 263: 'Pembroke, Pembroke Welsh corgi', - 264: 'Cardigan, Cardigan Welsh corgi', - 265: 'toy poodle', - 266: 'miniature poodle', - 267: 'standard poodle', - 268: 'Mexican hairless', - 269: 'timber wolf, grey wolf, gray wolf, Canis lupus', - 270: 'white wolf, Arctic wolf, Canis lupus tundrarum', - 271: 'red wolf, maned wolf, Canis rufus, Canis niger', - 272: 'coyote, prairie wolf, brush wolf, Canis latrans', - 273: 'dingo, warrigal, warragal, Canis dingo', - 274: 'dhole, Cuon alpinus', - 275: 'African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus', - 276: 'hyena, hyaena', - 277: 'red fox, Vulpes vulpes', - 278: 'kit fox, Vulpes macrotis', - 279: 'Arctic fox, white fox, Alopex lagopus', - 280: 'grey fox, gray fox, Urocyon cinereoargenteus', - 281: 'tabby, tabby cat', - 282: 'tiger cat', - 283: 'Persian cat', - 284: 'Siamese cat, Siamese', - 285: 'Egyptian cat', - 286: 'cougar, puma, catamount, mountain lion, painter, panther, Felis concolor', - 287: 'lynx, catamount', - 288: 'leopard, Panthera pardus', - 289: 'snow leopard, ounce, Panthera uncia', - 290: 'jaguar, panther, Panthera onca, Felis onca', - 291: 'lion, king of beasts, Panthera leo', - 292: 'tiger, Panthera tigris', - 293: 'cheetah, chetah, Acinonyx jubatus', - 294: 'brown bear, bruin, Ursus arctos', - 295: 'American black bear, black bear, Ursus americanus, Euarctos americanus', - 296: 'ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus', - 297: 'sloth bear, Melursus ursinus, Ursus ursinus', - 298: 'mongoose', - 299: 'meerkat, mierkat', - 300: 'tiger beetle', - 301: 'ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle', - 302: 'ground beetle, carabid beetle', - 303: 'long-horned beetle, longicorn, longicorn beetle', - 304: 'leaf beetle, chrysomelid', - 305: 'dung beetle', - 306: 'rhinoceros beetle', - 307: 'weevil', - 308: 'fly', - 309: 'bee', - 310: 'ant, emmet, pismire', - 311: 'grasshopper, hopper', - 312: 'cricket', - 313: 'walking stick, walkingstick, stick insect', - 314: 'cockroach, roach', - 315: 'mantis, mantid', - 316: 'cicada, cicala', - 317: 'leafhopper', - 318: 'lacewing, lacewing fly', - 319: "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", - 320: 'damselfly', - 321: 'admiral', - 322: 'ringlet, ringlet butterfly', - 323: 'monarch, monarch butterfly, milkweed butterfly, Danaus plexippus', - 324: 'cabbage butterfly', - 325: 'sulphur butterfly, sulfur butterfly', - 326: 'lycaenid, lycaenid butterfly', - 327: 'starfish, sea star', - 328: 'sea urchin', - 329: 'sea cucumber, holothurian', - 330: 'wood rabbit, cottontail, cottontail rabbit', - 331: 'hare', - 332: 'Angora, Angora rabbit', - 333: 'hamster', - 334: 'porcupine, hedgehog', - 335: 'fox squirrel, eastern fox squirrel, Sciurus niger', - 336: 'marmot', - 337: 'beaver', - 338: 'guinea pig, Cavia cobaya', - 339: 'sorrel', - 340: 'zebra', - 341: 'hog, pig, grunter, squealer, Sus scrofa', - 342: 'wild boar, boar, Sus scrofa', - 343: 'warthog', - 344: 'hippopotamus, hippo, river horse, Hippopotamus amphibius', - 345: 'ox', - 346: 'water buffalo, water ox, Asiatic buffalo, Bubalus bubalis', - 347: 'bison', - 348: 'ram, tup', - 349: 'bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis', - 350: 'ibex, Capra ibex', - 351: 'hartebeest', - 352: 'impala, Aepyceros melampus', - 353: 'gazelle', - 354: 'Arabian camel, dromedary, Camelus dromedarius', - 355: 'llama', - 356: 'weasel', - 357: 'mink', - 358: 'polecat, fitch, foulmart, foumart, Mustela putorius', - 359: 'black-footed ferret, ferret, Mustela nigripes', - 360: 'otter', - 361: 'skunk, polecat, wood pussy', - 362: 'badger', - 363: 'armadillo', - 364: 'three-toed sloth, ai, Bradypus tridactylus', - 365: 'orangutan, orang, orangutang, Pongo pygmaeus', - 366: 'gorilla, Gorilla gorilla', - 367: 'chimpanzee, chimp, Pan troglodytes', - 368: 'gibbon, Hylobates lar', - 369: 'siamang, Hylobates syndactylus, Symphalangus syndactylus', - 370: 'guenon, guenon monkey', - 371: 'patas, hussar monkey, Erythrocebus patas', - 372: 'baboon', - 373: 'macaque', - 374: 'langur', - 375: 'colobus, colobus monkey', - 376: 'proboscis monkey, Nasalis larvatus', - 377: 'marmoset', - 378: 'capuchin, ringtail, Cebus capucinus', - 379: 'howler monkey, howler', - 380: 'titi, titi monkey', - 381: 'spider monkey, Ateles geoffroyi', - 382: 'squirrel monkey, Saimiri sciureus', - 383: 'Madagascar cat, ring-tailed lemur, Lemur catta', - 384: 'indri, indris, Indri indri, Indri brevicaudatus', - 385: 'Indian elephant, Elephas maximus', - 386: 'African elephant, Loxodonta africana', - 387: 'lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens', - 388: 'giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca', - 389: 'barracouta, snoek', - 390: 'eel', - 391: 'coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch', - 392: 'rock beauty, Holocanthus tricolor', - 393: 'anemone fish', - 394: 'sturgeon', - 395: 'gar, garfish, garpike, billfish, Lepisosteus osseus', - 396: 'lionfish', - 397: 'puffer, pufferfish, blowfish, globefish', - 398: 'abacus', - 399: 'abaya', - 400: "academic gown, academic robe, judge's robe", - 401: 'accordion, piano accordion, squeeze box', - 402: 'acoustic guitar', - 403: 'aircraft carrier, carrier, flattop, attack aircraft carrier', - 404: 'airliner', - 405: 'airship, dirigible', - 406: 'altar', - 407: 'ambulance', - 408: 'amphibian, amphibious vehicle', - 409: 'analog clock', - 410: 'apiary, bee house', - 411: 'apron', - 412: 'ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin', - 413: 'assault rifle, assault gun', - 414: 'backpack, back pack, knapsack, packsack, rucksack, haversack', - 415: 'bakery, bakeshop, bakehouse', - 416: 'balance beam, beam', - 417: 'balloon', - 418: 'ballpoint, ballpoint pen, ballpen, Biro', - 419: 'Band Aid', - 420: 'banjo', - 421: 'bannister, banister, balustrade, balusters, handrail', - 422: 'barbell', - 423: 'barber chair', - 424: 'barbershop', - 425: 'barn', - 426: 'barometer', - 427: 'barrel, cask', - 428: 'barrow, garden cart, lawn cart, wheelbarrow', - 429: 'baseball', - 430: 'basketball', - 431: 'bassinet', - 432: 'bassoon', - 433: 'bathing cap, swimming cap', - 434: 'bath towel', - 435: 'bathtub, bathing tub, bath, tub', - 436: 'beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon', - 437: 'beacon, lighthouse, beacon light, pharos', - 438: 'beaker', - 439: 'bearskin, busby, shako', - 440: 'beer bottle', - 441: 'beer glass', - 442: 'bell cote, bell cot', - 443: 'bib', - 444: 'bicycle-built-for-two, tandem bicycle, tandem', - 445: 'bikini, two-piece', - 446: 'binder, ring-binder', - 447: 'binoculars, field glasses, opera glasses', - 448: 'birdhouse', - 449: 'boathouse', - 450: 'bobsled, bobsleigh, bob', - 451: 'bolo tie, bolo, bola tie, bola', - 452: 'bonnet, poke bonnet', - 453: 'bookcase', - 454: 'bookshop, bookstore, bookstall', - 455: 'bottlecap', - 456: 'bow', - 457: 'bow tie, bow-tie, bowtie', - 458: 'brass, memorial tablet, plaque', - 459: 'brassiere, bra, bandeau', - 460: 'breakwater, groin, groyne, mole, bulwark, seawall, jetty', - 461: 'breastplate, aegis, egis', - 462: 'broom', - 463: 'bucket, pail', - 464: 'buckle', - 465: 'bulletproof vest', - 466: 'bullet train, bullet', - 467: 'butcher shop, meat market', - 468: 'cab, hack, taxi, taxicab', - 469: 'caldron, cauldron', - 470: 'candle, taper, wax light', - 471: 'cannon', - 472: 'canoe', - 473: 'can opener, tin opener', - 474: 'cardigan', - 475: 'car mirror', - 476: 'carousel, carrousel, merry-go-round, roundabout, whirligig', - 477: "carpenter's kit, tool kit", - 478: 'carton', - 479: 'car wheel', - 480: 'cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM', - 481: 'cassette', - 482: 'cassette player', - 483: 'castle', - 484: 'catamaran', - 485: 'CD player', - 486: 'cello, violoncello', - 487: 'cellular telephone, cellular phone, cellphone, cell, mobile phone', - 488: 'chain', - 489: 'chainlink fence', - 490: 'chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour', - 491: 'chain saw, chainsaw', - 492: 'chest', - 493: 'chiffonier, commode', - 494: 'chime, bell, gong', - 495: 'china cabinet, china closet', - 496: 'Christmas stocking', - 497: 'church, church building', - 498: 'cinema, movie theater, movie theatre, movie house, picture palace', - 499: 'cleaver, meat cleaver, chopper', - 500: 'cliff dwelling', - 501: 'cloak', - 502: 'clog, geta, patten, sabot', - 503: 'cocktail shaker', - 504: 'coffee mug', - 505: 'coffeepot', - 506: 'coil, spiral, volute, whorl, helix', - 507: 'combination lock', - 508: 'computer keyboard, keypad', - 509: 'confectionery, confectionary, candy store', - 510: 'container ship, containership, container vessel', - 511: 'convertible', - 512: 'corkscrew, bottle screw', - 513: 'cornet, horn, trumpet, trump', - 514: 'cowboy boot', - 515: 'cowboy hat, ten-gallon hat', - 516: 'cradle', - 517: 'crane', - 518: 'crash helmet', - 519: 'crate', - 520: 'crib, cot', - 521: 'Crock Pot', - 522: 'croquet ball', - 523: 'crutch', - 524: 'cuirass', - 525: 'dam, dike, dyke', - 526: 'desk', - 527: 'desktop computer', - 528: 'dial telephone, dial phone', - 529: 'diaper, nappy, napkin', - 530: 'digital clock', - 531: 'digital watch', - 532: 'dining table, board', - 533: 'dishrag, dishcloth', - 534: 'dishwasher, dish washer, dishwashing machine', - 535: 'disk brake, disc brake', - 536: 'dock, dockage, docking facility', - 537: 'dogsled, dog sled, dog sleigh', - 538: 'dome', - 539: 'doormat, welcome mat', - 540: 'drilling platform, offshore rig', - 541: 'drum, membranophone, tympan', - 542: 'drumstick', - 543: 'dumbbell', - 544: 'Dutch oven', - 545: 'electric fan, blower', - 546: 'electric guitar', - 547: 'electric locomotive', - 548: 'entertainment center', - 549: 'envelope', - 550: 'espresso maker', - 551: 'face powder', - 552: 'feather boa, boa', - 553: 'file, file cabinet, filing cabinet', - 554: 'fireboat', - 555: 'fire engine, fire truck', - 556: 'fire screen, fireguard', - 557: 'flagpole, flagstaff', - 558: 'flute, transverse flute', - 559: 'folding chair', - 560: 'football helmet', - 561: 'forklift', - 562: 'fountain', - 563: 'fountain pen', - 564: 'four-poster', - 565: 'freight car', - 566: 'French horn, horn', - 567: 'frying pan, frypan, skillet', - 568: 'fur coat', - 569: 'garbage truck, dustcart', - 570: 'gasmask, respirator, gas helmet', - 571: 'gas pump, gasoline pump, petrol pump, island dispenser', - 572: 'goblet', - 573: 'go-kart', - 574: 'golf ball', - 575: 'golfcart, golf cart', - 576: 'gondola', - 577: 'gong, tam-tam', - 578: 'gown', - 579: 'grand piano, grand', - 580: 'greenhouse, nursery, glasshouse', - 581: 'grille, radiator grille', - 582: 'grocery store, grocery, food market, market', - 583: 'guillotine', - 584: 'hair slide', - 585: 'hair spray', - 586: 'half track', - 587: 'hammer', - 588: 'hamper', - 589: 'hand blower, blow dryer, blow drier, hair dryer, hair drier', - 590: 'hand-held computer, hand-held microcomputer', - 591: 'handkerchief, hankie, hanky, hankey', - 592: 'hard disc, hard disk, fixed disk', - 593: 'harmonica, mouth organ, harp, mouth harp', - 594: 'harp', - 595: 'harvester, reaper', - 596: 'hatchet', - 597: 'holster', - 598: 'home theater, home theatre', - 599: 'honeycomb', - 600: 'hook, claw', - 601: 'hoopskirt, crinoline', - 602: 'horizontal bar, high bar', - 603: 'horse cart, horse-cart', - 604: 'hourglass', - 605: 'iPod', - 606: 'iron, smoothing iron', - 607: "jack-o'-lantern", - 608: 'jean, blue jean, denim', - 609: 'jeep, landrover', - 610: 'jersey, T-shirt, tee shirt', - 611: 'jigsaw puzzle', - 612: 'jinrikisha, ricksha, rickshaw', - 613: 'joystick', - 614: 'kimono', - 615: 'knee pad', - 616: 'knot', - 617: 'lab coat, laboratory coat', - 618: 'ladle', - 619: 'lampshade, lamp shade', - 620: 'laptop, laptop computer', - 621: 'lawn mower, mower', - 622: 'lens cap, lens cover', - 623: 'letter opener, paper knife, paperknife', - 624: 'library', - 625: 'lifeboat', - 626: 'lighter, light, igniter, ignitor', - 627: 'limousine, limo', - 628: 'liner, ocean liner', - 629: 'lipstick, lip rouge', - 630: 'Loafer', - 631: 'lotion', - 632: 'loudspeaker, speaker, speaker unit, loudspeaker system, speaker system', - 633: "loupe, jeweler's loupe", - 634: 'lumbermill, sawmill', - 635: 'magnetic compass', - 636: 'mailbag, postbag', - 637: 'mailbox, letter box', - 638: 'maillot', - 639: 'maillot, tank suit', - 640: 'manhole cover', - 641: 'maraca', - 642: 'marimba, xylophone', - 643: 'mask', - 644: 'matchstick', - 645: 'maypole', - 646: 'maze, labyrinth', - 647: 'measuring cup', - 648: 'medicine chest, medicine cabinet', - 649: 'megalith, megalithic structure', - 650: 'microphone, mike', - 651: 'microwave, microwave oven', - 652: 'military uniform', - 653: 'milk can', - 654: 'minibus', - 655: 'miniskirt, mini', - 656: 'minivan', - 657: 'missile', - 658: 'mitten', - 659: 'mixing bowl', - 660: 'mobile home, manufactured home', - 661: 'Model T', - 662: 'modem', - 663: 'monastery', - 664: 'monitor', - 665: 'moped', - 666: 'mortar', - 667: 'mortarboard', - 668: 'mosque', - 669: 'mosquito net', - 670: 'motor scooter, scooter', - 671: 'mountain bike, all-terrain bike, off-roader', - 672: 'mountain tent', - 673: 'mouse, computer mouse', - 674: 'mousetrap', - 675: 'moving van', - 676: 'muzzle', - 677: 'nail', - 678: 'neck brace', - 679: 'necklace', - 680: 'nipple', - 681: 'notebook, notebook computer', - 682: 'obelisk', - 683: 'oboe, hautboy, hautbois', - 684: 'ocarina, sweet potato', - 685: 'odometer, hodometer, mileometer, milometer', - 686: 'oil filter', - 687: 'organ, pipe organ', - 688: 'oscilloscope, scope, cathode-ray oscilloscope, CRO', - 689: 'overskirt', - 690: 'oxcart', - 691: 'oxygen mask', - 692: 'packet', - 693: 'paddle, boat paddle', - 694: 'paddlewheel, paddle wheel', - 695: 'padlock', - 696: 'paintbrush', - 697: "pajama, pyjama, pj's, jammies", - 698: 'palace', - 699: 'panpipe, pandean pipe, syrinx', - 700: 'paper towel', - 701: 'parachute, chute', - 702: 'parallel bars, bars', - 703: 'park bench', - 704: 'parking meter', - 705: 'passenger car, coach, carriage', - 706: 'patio, terrace', - 707: 'pay-phone, pay-station', - 708: 'pedestal, plinth, footstall', - 709: 'pencil box, pencil case', - 710: 'pencil sharpener', - 711: 'perfume, essence', - 712: 'Petri dish', - 713: 'photocopier', - 714: 'pick, plectrum, plectron', - 715: 'pickelhaube', - 716: 'picket fence, paling', - 717: 'pickup, pickup truck', - 718: 'pier', - 719: 'piggy bank, penny bank', - 720: 'pill bottle', - 721: 'pillow', - 722: 'ping-pong ball', - 723: 'pinwheel', - 724: 'pirate, pirate ship', - 725: 'pitcher, ewer', - 726: "plane, carpenter's plane, woodworking plane", - 727: 'planetarium', - 728: 'plastic bag', - 729: 'plate rack', - 730: 'plow, plough', - 731: "plunger, plumber's helper", - 732: 'Polaroid camera, Polaroid Land camera', - 733: 'pole', - 734: 'police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria', - 735: 'poncho', - 736: 'pool table, billiard table, snooker table', - 737: 'pop bottle, soda bottle', - 738: 'pot, flowerpot', - 739: "potter's wheel", - 740: 'power drill', - 741: 'prayer rug, prayer mat', - 742: 'printer', - 743: 'prison, prison house', - 744: 'projectile, missile', - 745: 'projector', - 746: 'puck, hockey puck', - 747: 'punching bag, punch bag, punching ball, punchball', - 748: 'purse', - 749: 'quill, quill pen', - 750: 'quilt, comforter, comfort, puff', - 751: 'racer, race car, racing car', - 752: 'racket, racquet', - 753: 'radiator', - 754: 'radio, wireless', - 755: 'radio telescope, radio reflector', - 756: 'rain barrel', - 757: 'recreational vehicle, RV, R.V.', - 758: 'reel', - 759: 'reflex camera', - 760: 'refrigerator, icebox', - 761: 'remote control, remote', - 762: 'restaurant, eating house, eating place, eatery', - 763: 'revolver, six-gun, six-shooter', - 764: 'rifle', - 765: 'rocking chair, rocker', - 766: 'rotisserie', - 767: 'rubber eraser, rubber, pencil eraser', - 768: 'rugby ball', - 769: 'rule, ruler', - 770: 'running shoe', - 771: 'safe', - 772: 'safety pin', - 773: 'saltshaker, salt shaker', - 774: 'sandal', - 775: 'sarong', - 776: 'sax, saxophone', - 777: 'scabbard', - 778: 'scale, weighing machine', - 779: 'school bus', - 780: 'schooner', - 781: 'scoreboard', - 782: 'screen, CRT screen', - 783: 'screw', - 784: 'screwdriver', - 785: 'seat belt, seatbelt', - 786: 'sewing machine', - 787: 'shield, buckler', - 788: 'shoe shop, shoe-shop, shoe store', - 789: 'shoji', - 790: 'shopping basket', - 791: 'shopping cart', - 792: 'shovel', - 793: 'shower cap', - 794: 'shower curtain', - 795: 'ski', - 796: 'ski mask', - 797: 'sleeping bag', - 798: 'slide rule, slipstick', - 799: 'sliding door', - 800: 'slot, one-armed bandit', - 801: 'snorkel', - 802: 'snowmobile', - 803: 'snowplow, snowplough', - 804: 'soap dispenser', - 805: 'soccer ball', - 806: 'sock', - 807: 'solar dish, solar collector, solar furnace', - 808: 'sombrero', - 809: 'soup bowl', - 810: 'space bar', - 811: 'space heater', - 812: 'space shuttle', - 813: 'spatula', - 814: 'speedboat', - 815: "spider web, spider's web", - 816: 'spindle', - 817: 'sports car, sport car', - 818: 'spotlight, spot', - 819: 'stage', - 820: 'steam locomotive', - 821: 'steel arch bridge', - 822: 'steel drum', - 823: 'stethoscope', - 824: 'stole', - 825: 'stone wall', - 826: 'stopwatch, stop watch', - 827: 'stove', - 828: 'strainer', - 829: 'streetcar, tram, tramcar, trolley, trolley car', - 830: 'stretcher', - 831: 'studio couch, day bed', - 832: 'stupa, tope', - 833: 'submarine, pigboat, sub, U-boat', - 834: 'suit, suit of clothes', - 835: 'sundial', - 836: 'sunglass', - 837: 'sunglasses, dark glasses, shades', - 838: 'sunscreen, sunblock, sun blocker', - 839: 'suspension bridge', - 840: 'swab, swob, mop', - 841: 'sweatshirt', - 842: 'swimming trunks, bathing trunks', - 843: 'swing', - 844: 'switch, electric switch, electrical switch', - 845: 'syringe', - 846: 'table lamp', - 847: 'tank, army tank, armored combat vehicle, armoured combat vehicle', - 848: 'tape player', - 849: 'teapot', - 850: 'teddy, teddy bear', - 851: 'television, television system', - 852: 'tennis ball', - 853: 'thatch, thatched roof', - 854: 'theater curtain, theatre curtain', - 855: 'thimble', - 856: 'thresher, thrasher, threshing machine', - 857: 'throne', - 858: 'tile roof', - 859: 'toaster', - 860: 'tobacco shop, tobacconist shop, tobacconist', - 861: 'toilet seat', - 862: 'torch', - 863: 'totem pole', - 864: 'tow truck, tow car, wrecker', - 865: 'toyshop', - 866: 'tractor', - 867: 'trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi', - 868: 'tray', - 869: 'trench coat', - 870: 'tricycle, trike, velocipede', - 871: 'trimaran', - 872: 'tripod', - 873: 'triumphal arch', - 874: 'trolleybus, trolley coach, trackless trolley', - 875: 'trombone', - 876: 'tub, vat', - 877: 'turnstile', - 878: 'typewriter keyboard', - 879: 'umbrella', - 880: 'unicycle, monocycle', - 881: 'upright, upright piano', - 882: 'vacuum, vacuum cleaner', - 883: 'vase', - 884: 'vault', - 885: 'velvet', - 886: 'vending machine', - 887: 'vestment', - 888: 'viaduct', - 889: 'violin, fiddle', - 890: 'volleyball', - 891: 'waffle iron', - 892: 'wall clock', - 893: 'wallet, billfold, notecase, pocketbook', - 894: 'wardrobe, closet, press', - 895: 'warplane, military plane', - 896: 'washbasin, handbasin, washbowl, lavabo, wash-hand basin', - 897: 'washer, automatic washer, washing machine', - 898: 'water bottle', - 899: 'water jug', - 900: 'water tower', - 901: 'whiskey jug', - 902: 'whistle', - 903: 'wig', - 904: 'window screen', - 905: 'window shade', - 906: 'Windsor tie', - 907: 'wine bottle', - 908: 'wing', - 909: 'wok', - 910: 'wooden spoon', - 911: 'wool, woolen, woollen', - 912: 'worm fence, snake fence, snake-rail fence, Virginia fence', - 913: 'wreck', - 914: 'yawl', - 915: 'yurt', - 916: 'web site, website, internet site, site', - 917: 'comic book', - 918: 'crossword puzzle, crossword', - 919: 'street sign', - 920: 'traffic light, traffic signal, stoplight', - 921: 'book jacket, dust cover, dust jacket, dust wrapper', - 922: 'menu', - 923: 'plate', - 924: 'guacamole', - 925: 'consomme', - 926: 'hot pot, hotpot', - 927: 'trifle', - 928: 'ice cream, icecream', - 929: 'ice lolly, lolly, lollipop, popsicle', - 930: 'French loaf', - 931: 'bagel, beigel', - 932: 'pretzel', - 933: 'cheeseburger', - 934: 'hotdog, hot dog, red hot', - 935: 'mashed potato', - 936: 'head cabbage', - 937: 'broccoli', - 938: 'cauliflower', - 939: 'zucchini, courgette', - 940: 'spaghetti squash', - 941: 'acorn squash', - 942: 'butternut squash', - 943: 'cucumber, cuke', - 944: 'artichoke, globe artichoke', - 945: 'bell pepper', - 946: 'cardoon', - 947: 'mushroom', - 948: 'Granny Smith', - 949: 'strawberry', - 950: 'orange', - 951: 'lemon', - 952: 'fig', - 953: 'pineapple, ananas', - 954: 'banana', - 955: 'jackfruit, jak, jack', - 956: 'custard apple', - 957: 'pomegranate', - 958: 'hay', - 959: 'carbonara', - 960: 'chocolate sauce, chocolate syrup', - 961: 'dough', - 962: 'meat loaf, meatloaf', - 963: 'pizza, pizza pie', - 964: 'potpie', - 965: 'burrito', - 966: 'red wine', - 967: 'espresso', - 968: 'cup', - 969: 'eggnog', - 970: 'alp', - 971: 'bubble', - 972: 'cliff, drop, drop-off', - 973: 'coral reef', - 974: 'geyser', - 975: 'lakeside, lakeshore', - 976: 'promontory, headland, head, foreland', - 977: 'sandbar, sand bar', - 978: 'seashore, coast, seacoast, sea-coast', - 979: 'valley, vale', - 980: 'volcano', - 981: 'ballplayer, baseball player', - 982: 'groom, bridegroom', - 983: 'scuba diver', - 984: 'rapeseed', - 985: 'daisy', - 986: "yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum", - 987: 'corn', - 988: 'acorn', - 989: 'hip, rose hip, rosehip', - 990: 'buckeye, horse chestnut, conker', - 991: 'coral fungus', - 992: 'agaric', - 993: 'gyromitra', - 994: 'stinkhorn, carrion fungus', - 995: 'earthstar', - 996: 'hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa', - 997: 'bolete', - 998: 'ear, spike, capitulum', - 999: 'toilet tissue, toilet paper, bathroom tissue'} diff --git a/components/contrib/openvino/predict/containers/predict.py b/components/contrib/openvino/predict/containers/predict.py deleted file mode 100644 index 1ea24c2d391..00000000000 --- a/components/contrib/openvino/predict/containers/predict.py +++ /dev/null @@ -1,177 +0,0 @@ -from openvino.inference_engine import IENetwork, IEPlugin -import argparse -import numpy as np -from urllib.parse import urlparse -from google.cloud import storage -from google.auth import exceptions -import classes -import datetime -from shutil import copy -import os -import json - -def get_local_file(source_path): - parsed_path = urlparse(source_path) - if parsed_path.scheme == "gs": - bucket_name = parsed_path.netloc - file_path = parsed_path.path[1:] - file_name = os.path.split(parsed_path.path)[1] - try: - gs_client = storage.Client() - bucket = gs_client.get_bucket(bucket_name) - except exceptions.DefaultCredentialsError: - # if credentials fails, try to connect as anonymous user - gs_client = storage.Client.create_anonymous_client() - bucket = gs_client.bucket(bucket_name, user_project=None) - blob = bucket.blob(file_path) - blob.download_to_filename(file_name) - elif parsed_path.scheme == "": - # in case of local path just pass the input argument - if os.path.isfile(source_path): - file_name = source_path - else: - print("file " + source_path + "is not accessible") - file_name = "" - return file_name - - -def upload_file(source_file, target_folder): - parsed_path = urlparse(target_folder) - if parsed_path.scheme == "gs": - bucket_name = parsed_path.netloc - folder_path = parsed_path.path[1:] - try: - gs_client = storage.Client() - bucket = gs_client.get_bucket(bucket_name) - blob = bucket.blob(folder_path + "/" + source_file) - blob.upload_from_filename(source_file) - except Exception as er: - print(er) - return False - elif parsed_path.scheme == "": - if target_folder != ".": - copy(source_file, target_folder) - return True - - -def main(): - parser = argparse.ArgumentParser( - description='Component executing inference operation') - parser.add_argument('--model_bin', type=str, required=True, - help='GCS or local path to model weights file (.bin)') - parser.add_argument('--model_xml', type=str, required=True, - help='GCS or local path to model graph (.xml)') - parser.add_argument('--input_numpy_file', type=str, required=True, - help='GCS or local path to input dataset numpy file') - parser.add_argument('--label_numpy_file', type=str, required=True, - help='GCS or local path to numpy file with labels') - parser.add_argument('--output_folder', type=str, required=True, - help='GCS or local path to results upload folder') - parser.add_argument('--batch_size', type=int, default=1, - help='batch size to be used for inference') - parser.add_argument('--scale_div', type=float, default=1, - help='scale the np input by division of by the value') - parser.add_argument('--scale_sub', type=float, default=128, - help='scale the np input by substraction of the value') - args = parser.parse_args() - print(args) - - device = "CPU" - plugin_dir = None - - model_xml = get_local_file(args.model_xml) - print("model xml", model_xml) - if model_xml == "": - exit(1) - model_bin = get_local_file(args.model_bin) - print("model bin", model_bin) - if model_bin == "": - exit(1) - input_numpy_file = get_local_file(args.input_numpy_file) - print("input_numpy_file", input_numpy_file) - if input_numpy_file == "": - exit(1) - - label_numpy_file = get_local_file(args.label_numpy_file) - print("label_numpy_file", label_numpy_file) - if label_numpy_file == "": - exit(1) - - cpu_extension = "/usr/local/lib/libcpu_extension.so" - - plugin = IEPlugin(device=device, plugin_dirs=plugin_dir) - if cpu_extension and 'CPU' in device: - plugin.add_cpu_extension(cpu_extension) - - print("inference engine:", model_xml, model_bin, device) - - # Read IR - print("Reading IR...") - net = IENetwork(model=model_xml, weights=model_bin) - batch_size = args.batch_size - net.batch_size = batch_size - print("Model loaded. Batch size", batch_size) - - input_blob = next(iter(net.inputs)) - output_blob = next(iter(net.outputs)) - print(output_blob) - - print("Loading IR to the plugin...") - exec_net = plugin.load(network=net, num_requests=1) - - print("Loading input numpy") - imgs = np.load(input_numpy_file, mmap_mode='r', allow_pickle=False) - imgs = (imgs / args.scale_div) - args.scale_div - lbs = np.load(label_numpy_file, mmap_mode='r', allow_pickle=False) - - print("Loaded input data", imgs.shape, imgs.dtype, "Min value:", np.min(imgs), "Max value", np.max(imgs)) - - combined_results = {} # dictionary storing results for all model outputs - processing_times = np.zeros((0),int) - matched_count = 0 - total_executed = 0 - - for x in range(0, imgs.shape[0] - batch_size + 1, batch_size): - img = imgs[x:(x + batch_size)] - lb = lbs[x:(x + batch_size)] - start_time = datetime.datetime.now() - results = exec_net.infer(inputs={input_blob: img}) - end_time = datetime.datetime.now() - duration = (end_time - start_time).total_seconds() * 1000 - print("Inference duration:", duration, "ms") - processing_times = np.append(processing_times,np.array([int(duration)])) - output = list(results.keys())[0] # check only one output - nu = results[output] - for i in range(nu.shape[0]): - single_result = nu[[i],...] - ma = np.argmax(single_result) - total_executed += 1 - if ma == lb[i]: - matched_count += 1 - mark_message = "; Correct match." - else: - mark_message = "; Incorrect match. Should be {} {}".format(lb[i], classes.imagenet_classes[lb[i]] ) - print("\t",i, classes.imagenet_classes[ma],ma, mark_message) - if output in combined_results: - combined_results[output] = np.append(combined_results[output], - results[output], 0) - else: - combined_results[output] = results[output] - - filename = output.replace("/", "_") + ".npy" - np.save(filename, combined_results[output]) - upload_file(filename, args.output_folder) - print("Inference results uploaded to", filename) - print('Classification accuracy: {:.2f}'.format(100*matched_count/total_executed)) - print('Average time: {:.2f} ms; average speed: {:.2f} fps'.format(round(np.average(processing_times), 2),round(1000 * batch_size / np.average(processing_times), 2))) - - accuracy = matched_count/total_executed - latency = np.average(processing_times) - metrics = {'metrics': [{'name': 'accuracy-score','numberValue': accuracy,'format': "PERCENTAGE"}, - {'name': 'latency','numberValue': latency,'format': "RAW"}]} - - with open('/mlpipeline-metrics.json', 'w') as f: - json.dump(metrics, f) - -if __name__ == "__main__": - main() diff --git a/components/contrib/openvino/predict/containers/requirements.txt b/components/contrib/openvino/predict/containers/requirements.txt deleted file mode 100644 index 1f829b311c1..00000000000 --- a/components/contrib/openvino/predict/containers/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -numpy -google-cloud-storage - diff --git a/components/contrib/openvino/tf-slim/README.md b/components/contrib/openvino/tf-slim/README.md deleted file mode 100644 index 411c1521744..00000000000 --- a/components/contrib/openvino/tf-slim/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# Slim models generator - -This component is automating implementation of [slim models](https://github.com/tensorflow/models/blob/master/research/slim). -It can create a graph from slim models zoo, load the variables pre-trained checkpoint and export the model in the form -of Tensorflow `frozen graph` and `saved model`. - -The results of the component can be saved in a local path or in GCS cloud storage. The can be used to other ML pipeline -components like OpenVINO model optimizer, OpenVINO predict or OpenVINO Model Server. - -## Building - -```bash -docker build --build-arg http_proxy=$http_proxy --build-arg https_proxy=$https_proxy . -``` - - -## Using the component - -```bash -python slim_model.py --help -usage: slim_model.py [-h] [--model_name MODEL_NAME] [--export_dir EXPORT_DIR] - [--batch_size BATCH_SIZE] - [--checkpoint_url CHECKPOINT_URL] - [--num_classes NUM_CLASSES] - -Slim model generator - -optional arguments: - -h, --help show this help message and exit - --model_name MODEL_NAME - --export_dir EXPORT_DIR - GCS or local path to save the generated model - --batch_size BATCH_SIZE - batch size to be used in the exported model - --checkpoint_url CHECKPOINT_URL - URL to the pretrained compressed checkpoint - --num_classes NUM_CLASSES - number of model classes -``` - -*Model name* can be any model defined in the slim repository. The naming convention needs to match the key name from -[net_factory.py]()https://github.com/tensorflow/models/blob/master/research/slim/nets/nets_factory.py#L39) - -*export dir* can be a local path in the container or it might be GCS path to store generated files: -- model graph file in pb format -- frozen graph including weights from the provided checkpoint -- event file which can be imported in tensorboard -- saved model which will be stored in subfolder called `1`. - -*batch size* represent the batch used in the exported models. It can be natural number to represent fixed batch size -or `-1` value can be set for dynamic batch size. - -*checkpoint_url* is the URL to a pre-trained checkpoint https://github.com/tensorflow/models/tree/master/research/slim#pre-trained-models -It must match the model specified in model_name parameter. - -*num classes* should include model specific number of classes in the outputs. For slim models it should be a value -of `1000` or `1001`. It must match the number of classes used in the requested model name. - - -## Examples - -``` -python slim_model.py --model_name mobilenet_v1_050 --export_dir /tmp/mobilnet ---batch_size 1 --num_classes=1001 \ ---checkpoint_url http://download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_0.5_160.tgz - -python slim_model.py --model_name resnet_v1_50 --export_dir gs:///resnet \ ---batch_size -1 --num_classes=1000 \ ---checkpoint_url http://download.tensorflow.org/models/resnet_v1_50_2016_08_28.tar.gz - -python slim_model.py --model_name inception_v4 --export_dir gs:///inception \ ---batch_size -1 --num_classes=1001 \ ---checkpoint_url http://download.tensorflow.org/models/inception_v4_2016_09_09.tar.gz - -python slim_model.py --model_name vgg_19 --export_dir /tmp/vgg \ ---batch_size 1 --num_classes=1000 \ ---checkpoint_url http://download.tensorflow.org/models/vgg_19_2016_08_28.tar.gz -``` - diff --git a/components/contrib/openvino/tf-slim/containers/Dockerfile b/components/contrib/openvino/tf-slim/containers/Dockerfile deleted file mode 100644 index 24952e9571f..00000000000 --- a/components/contrib/openvino/tf-slim/containers/Dockerfile +++ /dev/null @@ -1,74 +0,0 @@ -FROM intelpython/intelpython3_core as BUILD - -RUN apt-get update && apt-get install -y --no-install-recommends \ - openjdk-8-jdk \ - openjdk-8-jre-headless \ - build-essential \ - curl \ - git \ - libcurl3-dev \ - libfreetype6-dev \ - libhdf5-serial-dev \ - libpng-dev \ - libzmq3-dev \ - pkg-config \ - rsync \ - software-properties-common \ - unzip \ - zip \ - zlib1g-dev && \ - apt-get clean - -RUN git clone --depth 1 https://github.com/tensorflow/tensorflow - - -RUN conda create --name myenv -y -ENV PATH /opt/conda/envs/myenv/bin:$PATH - -# Set up Bazel. - - -# Running bazel inside a `docker build` command causes trouble, cf: -# https://github.com/bazelbuild/bazel/issues/134 -# The easiest solution is to set up a bazelrc file forcing --batch. -RUN echo "startup --batch" >>/etc/bazel.bazelrc -# Similarly, we need to workaround sandboxing issues: -# https://github.com/bazelbuild/bazel/issues/418 -RUN echo "build --spawn_strategy=standalone --genrule_strategy=standalone" \ - >>/etc/bazel.bazelrc -# Install the most recent bazel release. -ENV BAZEL_VERSION 0.19.2 -WORKDIR / -RUN mkdir /bazel && \ - cd /bazel && \ - curl -H "User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36" -fSsL -O https://github.com/bazelbuild/bazel/releases/download/$BAZEL_VERSION/bazel-$BAZEL_VERSION-installer-linux-x86_64.sh && \ - curl -H "User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36" -fSsL -o /bazel/LICENSE.txt https://raw.githubusercontent.com/bazelbuild/bazel/master/LICENSE && \ - chmod +x bazel-*.sh && \ - ./bazel-$BAZEL_VERSION-installer-linux-x86_64.sh && \ - cd / && \ - rm -f /bazel/bazel-$BAZEL_VERSION-installer-linux-x86_64.sh - -RUN cd tensorflow && bazel build tensorflow/tools/graph_transforms:summarize_graph - -FROM intelpython/intelpython3_core as PROD -RUN apt-get update && apt-get install -y --no-install-recommends \ - git && \ - apt-get clean - -WORKDIR /slim - -RUN git clone --depth 1 https://github.com/tensorflow/models && rm -Rf models/.git && \ - git clone --depth 1 https://github.com/tensorflow/tensorflow && rm -Rf tensorflow/.git - -RUN conda create --name myenv -y -ENV PATH /opt/conda/envs/myenv/bin:$PATH - -RUN pip install --no-cache-dir tensorflow validators google-cloud-storage -ENV PYTHONPATH=models/research/slim:tensorflow/python/tools - -COPY --from=BUILD /tensorflow/bazel-bin/tensorflow/tools/graph_transforms/summarize_graph summarize_graph -COPY --from=BUILD /root/.cache/bazel/_bazel_root/*/execroot/org_tensorflow/bazel-out/k8-opt/bin/_solib_k8/_U_S_Stensorflow_Stools_Sgraph_Utransforms_Csummarize_Ugraph___Utensorflow/libtensorflow_framework.so libtensorflow_framework.so -COPY slim_model.py . - - - diff --git a/components/contrib/openvino/tf-slim/containers/slim_model.py b/components/contrib/openvino/tf-slim/containers/slim_model.py deleted file mode 100644 index 569e3dff2f9..00000000000 --- a/components/contrib/openvino/tf-slim/containers/slim_model.py +++ /dev/null @@ -1,216 +0,0 @@ -import tensorflow as tf -from tensorflow.python.saved_model import signature_constants -from tensorflow.python.saved_model import tag_constants -from nets import nets_factory -from tensorflow.python.platform import gfile -import argparse -import validators -import os -import requests -import tarfile -from subprocess import Popen, PIPE -import shutil -import glob -import re -import json -from tensorflow.python.tools.freeze_graph import freeze_graph -from tensorflow.python.tools.saved_model_cli import _show_all -from urllib.parse import urlparse -from shutil import copyfile -from google.cloud import storage - - -def upload_to_gcs(src, dst): - parsed_path = urlparse(dst) - bucket_name = parsed_path.netloc - file_path = parsed_path.path[1:] - gs_client = storage.Client() - bucket = gs_client.get_bucket(bucket_name) - blob = bucket.blob(file_path) - blob.upload_from_filename(src) - - -def main(): - parser = argparse.ArgumentParser( - description='Slim model generator') - parser.add_argument('--model_name', type=str, - help='') - parser.add_argument('--export_dir', type=str, default="/tmp/export_dir", - help='GCS or local path to save graph files') - parser.add_argument('--saved_model_dir', type=str, - help='GCS or local path to save the generated model') - parser.add_argument('--batch_size', type=str, default=1, - help='batch size to be used in the exported model') - parser.add_argument('--checkpoint_url', type=str, - help='URL to the pretrained compressed checkpoint') - parser.add_argument('--num_classes', type=int, default=1000, - help='number of model classes') - args = parser.parse_args() - - MODEL = args.model_name - URL = args.checkpoint_url - if not validators.url(args.checkpoint_url): - print('use a valid URL parameter') - exit(1) - TMP_DIR = "/tmp/slim_tmp" - NUM_CLASSES = args.num_classes - BATCH_SIZE = args.batch_size - MODEL_FILE_NAME = URL.rsplit('/', 1)[-1] - EXPORT_DIR = args.export_dir - SAVED_MODEL_DIR = args.saved_model_dir - - tmp_graph_file = os.path.join(TMP_DIR, MODEL + '_graph.pb') - export_graph_file = os.path.join(EXPORT_DIR, MODEL + '_graph.pb') - frozen_file = os.path.join(EXPORT_DIR, 'frozen_graph_' + MODEL + '.pb') - - if not os.path.exists(TMP_DIR): - os.makedirs(TMP_DIR) - - if not os.path.exists(TMP_DIR + '/' + MODEL_FILE_NAME): - print("Downloading and decompressing the model checkpoint...") - response = requests.get(URL, stream=True) - with open(os.path.join(TMP_DIR, MODEL_FILE_NAME), 'wb') as output: - output.write(response.content) - tar = tarfile.open(os.path.join(TMP_DIR, MODEL_FILE_NAME)) - tar.extractall(path=TMP_DIR) - tar.close() - print("Model checkpoint downloaded and decompressed to:", TMP_DIR) - else: - print("Reusing existing model file ", - os.path.join(TMP_DIR, MODEL_FILE_NAME)) - - checkpoint = glob.glob(TMP_DIR + '/*.ckpt*') - print("checkpoint", checkpoint) - if len(checkpoint) > 0: - m = re.match(r"([\S]*.ckpt)", checkpoint[-1]) - print("checkpoint match", m) - checkpoint = m[0] - print(checkpoint) - else: - print("checkpoint file not detected in " + URL) - exit(1) - - print("Saving graph def file") - with tf.Graph().as_default() as graph: - - network_fn = nets_factory.get_network_fn(MODEL, - num_classes=NUM_CLASSES, - is_training=False) - image_size = network_fn.default_image_size - if BATCH_SIZE == "None" or BATCH_SIZE == "-1": - batchsize = None - else: - batchsize = BATCH_SIZE - placeholder = tf.placeholder(name='input', dtype=tf.float32, - shape=[batchsize, image_size, - image_size, 3]) - network_fn(placeholder) - graph_def = graph.as_graph_def() - - with gfile.GFile(tmp_graph_file, 'wb') as f: - f.write(graph_def.SerializeToString()) - if urlparse(EXPORT_DIR).scheme == 'gs': - upload_to_gcs(tmp_graph_file, export_graph_file) - elif urlparse(EXPORT_DIR).scheme == '': - if not os.path.exists(EXPORT_DIR): - os.makedirs(EXPORT_DIR) - copyfile(tmp_graph_file, export_graph_file) - else: - print("Invalid format of model export path") - print("Graph file saved to ", - os.path.join(EXPORT_DIR, MODEL + '_graph.pb')) - - print("Analysing graph") - p = Popen("./summarize_graph --in_graph=" + tmp_graph_file + - " --print_structure=false", shell=True, stdout=PIPE, stderr=PIPE) - summary, err = p.communicate() - inputs = [] - outputs = [] - for line in summary.split(b'\n'): - line_str = line.decode() - if re.match(r"Found [\d]* possible inputs", line_str) is not None: - print("in", line) - m = re.findall(r'name=[\S]*,', line.decode()) - for match in m: - print("match", match) - input = match[5:-1] - inputs.append(input) - print("inputs", inputs) - - if re.match(r"Found [\d]* possible outputs", line_str) is not None: - print("out", line) - m = re.findall(r'name=[\S]*,', line_str) - for match in m: - print("match", match) - output = match[5:-1] - outputs.append(output) - print("outputs", outputs) - - output_node_names = ",".join(outputs) - print("Creating freezed graph based on pretrained checkpoint") - freeze_graph(input_graph=tmp_graph_file, - input_checkpoint=checkpoint, - input_binary=True, - clear_devices=True, - input_saver='', - output_node_names=output_node_names, - restore_op_name="save/restore_all", - filename_tensor_name="save/Const:0", - output_graph=frozen_file, - initializer_nodes="") - if urlparse(SAVED_MODEL_DIR).scheme == '' and \ - os.path.exists(SAVED_MODEL_DIR): - shutil.rmtree(SAVED_MODEL_DIR) - - builder = tf.saved_model.builder.SavedModelBuilder(SAVED_MODEL_DIR) - - with tf.gfile.GFile(frozen_file, "rb") as f: - graph_def = tf.GraphDef() - graph_def.ParseFromString(f.read()) - - sigs = {} - - with tf.Session(graph=tf.Graph()) as sess: - tf.import_graph_def(graph_def, name="") - g = tf.get_default_graph() - inp_dic = {} - for inp in inputs: - inp_t = g.get_tensor_by_name(inp+":0") - inp_dic[inp] = inp_t - out_dic = {} - for out in outputs: - out_t = g.get_tensor_by_name(out+":0") - out_dic[out] = out_t - - sigs[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = \ - tf.saved_model.signature_def_utils.predict_signature_def( - inp_dic, out_dic) - - builder.add_meta_graph_and_variables(sess, [tag_constants.SERVING], - signature_def_map=sigs) - print("Exporting saved model to:", SAVED_MODEL_DIR + ' ...') - builder.save() - - print("Saved model exported to:", SAVED_MODEL_DIR) - _show_all(SAVED_MODEL_DIR) - pb_visual_writer = tf.summary.FileWriter(SAVED_MODEL_DIR) - pb_visual_writer.add_graph(sess.graph) - print("Visualize the model by running: " - "tensorboard --logdir={}".format(EXPORT_DIR)) - with open('/tmp/saved_model_dir.txt', 'w') as f: - f.write(SAVED_MODEL_DIR) - with open('/tmp/export_dir.txt', 'w') as f: - f.write(EXPORT_DIR) - - artifacts = {"version": 1,"outputs": [ - { - "type": "tensorboard", - "source": SAVED_MODEL_DIR - } - ] - } - with open('/mlpipeline-ui-metadata.json', 'w') as f: - json.dump(artifacts, f) - -if __name__ == "__main__": - main() diff --git a/components/contrib/pandas/Transform_DataFrame/_samples/sample_pipeline.py b/components/contrib/pandas/Transform_DataFrame/_samples/sample_pipeline.py deleted file mode 100644 index da01b50506d..00000000000 --- a/components/contrib/pandas/Transform_DataFrame/_samples/sample_pipeline.py +++ /dev/null @@ -1,58 +0,0 @@ -import kfp -from kfp import components - - -chicago_taxi_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml') -convert_csv_to_apache_parquet_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/d737c448723b9f541a3543012b4414c17b2eab5c/components/_converters/ApacheParquet/from_CSV/component.yaml') -convert_apache_parquet_to_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/af3eaf64e87313795cad1add9bfd9fa1e86af6de/components/_converters/ApacheParquet/to_CSV/component.yaml') - -pandas_transform_parquet_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_ApacheParquet_format/component.yaml') -pandas_transform_csv_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/6162d55998b176b50267d351241100bb0ee715bc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml') - - -def pandas_transform_pipeline(): - training_data_in_csv = chicago_taxi_dataset_op( - where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', - select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', - limit=1000, - ).output - - training_data_in_parquet = convert_csv_to_apache_parquet_op(training_data_in_csv).output - - training_data_for_classification_in_parquet = pandas_transform_parquet_op( - table=training_data_in_parquet, - transform_code='''df.insert(0, "was_tipped", df["tips"] > 0); del df["tips"]''', - ).output - convert_apache_parquet_to_csv_op(training_data_for_classification_in_parquet) - - features_in_parquet = pandas_transform_parquet_op( - table=training_data_in_parquet, - transform_code='''df = df.drop(columns=["tips"])''', - ).output - convert_apache_parquet_to_csv_op(features_in_parquet) - - labels_in_parquet = pandas_transform_parquet_op( - table=training_data_in_parquet, - transform_code='''df = df[["tips"]]''', - ).output - convert_apache_parquet_to_csv_op(labels_in_parquet) - - training_data_for_classification_in_csv = pandas_transform_csv_op( - table=training_data_in_csv, - transform_code='''df.insert(0, "was_tipped", df["tips"] > 0); del df["tips"]''', - ).output - - features_in_csv = pandas_transform_csv_op( - table=training_data_in_csv, - transform_code='''df = df.drop(columns=["tips"])''', - ).output - - labels_in_csv = pandas_transform_csv_op( - table=training_data_in_csv, - transform_code='''df = df[["tips"]]''', - ).output - - -if __name__ == '__main__': - kfp_endpoint=None - kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(pandas_transform_pipeline, arguments={}) diff --git a/components/contrib/pandas/Transform_DataFrame/in_ApacheParquet_format/component.py b/components/contrib/pandas/Transform_DataFrame/in_ApacheParquet_format/component.py deleted file mode 100644 index 4c44233f696..00000000000 --- a/components/contrib/pandas/Transform_DataFrame/in_ApacheParquet_format/component.py +++ /dev/null @@ -1,47 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def Pandas_Transform_DataFrame_in_ApacheParquet_format( - table_path: InputPath('ApacheParquet'), - transformed_table_path: OutputPath('ApacheParquet'), - transform_code: 'PythonCode', -): - '''Transform DataFrame loaded from an ApacheParquet file. - - Inputs: - table: DataFrame to transform. - transform_code: Transformation code. Code is written in Python and can consist of multiple lines. - The DataFrame variable is called "df". - Examples: - - `df['prod'] = df['X'] * df['Y']` - - `df = df[['X', 'prod']]` - - `df.insert(0, "is_positive", df["X"] > 0)` - - Outputs: - transformed_table: Transformed DataFrame. - - Annotations: - author: Alexey Volkov - ''' - import pandas - - df = pandas.read_parquet(table_path) - # The namespace is needed so that the code can replace `df`. For example df = df[['X']] - namespace = locals() - exec(transform_code, namespace) - namespace['df'].to_parquet(transformed_table_path) - - -if __name__ == '__main__': - Pandas_Transform_DataFrame_in_ApacheParquet_format_op = create_component_from_func( - Pandas_Transform_DataFrame_in_ApacheParquet_format, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=[ - 'pandas==1.0.4', - 'pyarrow==0.14.1', - ], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/pandas/Transform_DataFrame/in_ApacheParquet_format/component.yaml", - }, - ) diff --git a/components/contrib/pandas/Transform_DataFrame/in_ApacheParquet_format/component.yaml b/components/contrib/pandas/Transform_DataFrame/in_ApacheParquet_format/component.yaml deleted file mode 100644 index 364f6273e4f..00000000000 --- a/components/contrib/pandas/Transform_DataFrame/in_ApacheParquet_format/component.yaml +++ /dev/null @@ -1,91 +0,0 @@ -name: Pandas Transform DataFrame in ApacheParquet format -description: |- - Transform DataFrame loaded from an ApacheParquet file. - - Inputs: - table: DataFrame to transform. - transform_code: Transformation code. Code is written in Python and can consist of multiple lines. - The DataFrame variable is called "df". - Examples: - - `df['prod'] = df['X'] * df['Y']` - - `df = df[['X', 'prod']]` - - `df.insert(0, "is_positive", df["X"] > 0)` - - Outputs: - transformed_table: Transformed DataFrame. - - Annotations: - author: Alexey Volkov -inputs: -- {name: table, type: ApacheParquet} -- {name: transform_code, type: PythonCode} -outputs: -- {name: transformed_table, type: ApacheParquet} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/pandas/Transform_DataFrame/in_ApacheParquet_format/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas==1.0.4' 'pyarrow==0.14.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'pandas==1.0.4' 'pyarrow==0.14.1' - --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def Pandas_Transform_DataFrame_in_ApacheParquet_format( - table_path, - transformed_table_path, - transform_code, - ): - '''Transform DataFrame loaded from an ApacheParquet file. - - Inputs: - table: DataFrame to transform. - transform_code: Transformation code. Code is written in Python and can consist of multiple lines. - The DataFrame variable is called "df". - Examples: - - `df['prod'] = df['X'] * df['Y']` - - `df = df[['X', 'prod']]` - - `df.insert(0, "is_positive", df["X"] > 0)` - - Outputs: - transformed_table: Transformed DataFrame. - - Annotations: - author: Alexey Volkov - ''' - import pandas - - df = pandas.read_parquet(table_path) - # The namespace is needed so that the code can replace `df`. For example df = df[['X']] - namespace = locals() - exec(transform_code, namespace) - namespace['df'].to_parquet(transformed_table_path) - - import argparse - _parser = argparse.ArgumentParser(prog='Pandas Transform DataFrame in ApacheParquet format', description='Transform DataFrame loaded from an ApacheParquet file.\n\n Inputs:\n table: DataFrame to transform.\n transform_code: Transformation code. Code is written in Python and can consist of multiple lines.\n The DataFrame variable is called "df".\n Examples:\n - `df[\'prod\'] = df[\'X\'] * df[\'Y\']`\n - `df = df[[\'X\', \'prod\']]`\n - `df.insert(0, "is_positive", df["X"] > 0)`\n\n Outputs:\n transformed_table: Transformed DataFrame.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--table", dest="table_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--transform-code", dest="transform_code", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--transformed-table", dest="transformed_table_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = Pandas_Transform_DataFrame_in_ApacheParquet_format(**_parsed_args) - args: - - --table - - {inputPath: table} - - --transform-code - - {inputValue: transform_code} - - --transformed-table - - {outputPath: transformed_table} diff --git a/components/contrib/pandas/Transform_DataFrame/in_CSV_format/component.py b/components/contrib/pandas/Transform_DataFrame/in_CSV_format/component.py deleted file mode 100644 index 666351731e9..00000000000 --- a/components/contrib/pandas/Transform_DataFrame/in_CSV_format/component.py +++ /dev/null @@ -1,51 +0,0 @@ -from kfp.components import InputPath, OutputPath, create_component_from_func - -def Pandas_Transform_DataFrame_in_CSV_format( - table_path: InputPath('CSV'), - transformed_table_path: OutputPath('CSV'), - transform_code: 'PythonCode', -): - '''Transform DataFrame loaded from a CSV file. - - Inputs: - table: Table to transform. - transform_code: Transformation code. Code is written in Python and can consist of multiple lines. - The DataFrame variable is called "df". - Examples: - - `df['prod'] = df['X'] * df['Y']` - - `df = df[['X', 'prod']]` - - `df.insert(0, "is_positive", df["X"] > 0)` - - Outputs: - transformed_table: Transformed table. - - Annotations: - author: Alexey Volkov - ''' - import pandas - - df = pandas.read_csv( - table_path, - ) - # The namespace is needed so that the code can replace `df`. For example df = df[['X']] - namespace = locals() - exec(transform_code, namespace) - namespace['df'].to_csv( - transformed_table_path, - index=False, - ) - - -if __name__ == '__main__': - Pandas_Transform_DataFrame_in_CSV_format_op = create_component_from_func( - Pandas_Transform_DataFrame_in_CSV_format, - output_component_file='component.yaml', - base_image='python:3.7', - packages_to_install=[ - 'pandas==1.0.4', - ], - annotations={ - "author": "Alexey Volkov ", - "canonical_location": "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml", - }, - ) diff --git a/components/contrib/pandas/Transform_DataFrame/in_CSV_format/component.yaml b/components/contrib/pandas/Transform_DataFrame/in_CSV_format/component.yaml deleted file mode 100644 index 8520bb3a573..00000000000 --- a/components/contrib/pandas/Transform_DataFrame/in_CSV_format/component.yaml +++ /dev/null @@ -1,95 +0,0 @@ -name: Pandas Transform DataFrame in CSV format -description: |- - Transform DataFrame loaded from a CSV file. - - Inputs: - table: Table to transform. - transform_code: Transformation code. Code is written in Python and can consist of multiple lines. - The DataFrame variable is called "df". - Examples: - - `df['prod'] = df['X'] * df['Y']` - - `df = df[['X', 'prod']]` - - `df.insert(0, "is_positive", df["X"] > 0)` - - Outputs: - transformed_table: Transformed table. - - Annotations: - author: Alexey Volkov -inputs: -- {name: table, type: CSV} -- {name: transform_code, type: PythonCode} -outputs: -- {name: transformed_table, type: CSV} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml' -implementation: - container: - image: python:3.7 - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas==1.0.4' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet - --no-warn-script-location 'pandas==1.0.4' --user) && "$0" "$@" - - python3 - - -u - - -c - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def Pandas_Transform_DataFrame_in_CSV_format( - table_path, - transformed_table_path, - transform_code, - ): - '''Transform DataFrame loaded from a CSV file. - - Inputs: - table: Table to transform. - transform_code: Transformation code. Code is written in Python and can consist of multiple lines. - The DataFrame variable is called "df". - Examples: - - `df['prod'] = df['X'] * df['Y']` - - `df = df[['X', 'prod']]` - - `df.insert(0, "is_positive", df["X"] > 0)` - - Outputs: - transformed_table: Transformed table. - - Annotations: - author: Alexey Volkov - ''' - import pandas - - df = pandas.read_csv( - table_path, - ) - # The namespace is needed so that the code can replace `df`. For example df = df[['X']] - namespace = locals() - exec(transform_code, namespace) - namespace['df'].to_csv( - transformed_table_path, - index=False, - ) - - import argparse - _parser = argparse.ArgumentParser(prog='Pandas Transform DataFrame in CSV format', description='Transform DataFrame loaded from a CSV file.\n\n Inputs:\n table: Table to transform.\n transform_code: Transformation code. Code is written in Python and can consist of multiple lines.\n The DataFrame variable is called "df".\n Examples:\n - `df[\'prod\'] = df[\'X\'] * df[\'Y\']`\n - `df = df[[\'X\', \'prod\']]`\n - `df.insert(0, "is_positive", df["X"] > 0)`\n\n Outputs:\n transformed_table: Transformed table.\n\n Annotations:\n author: Alexey Volkov ') - _parser.add_argument("--table", dest="table_path", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--transform-code", dest="transform_code", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--transformed-table", dest="transformed_table_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = Pandas_Transform_DataFrame_in_CSV_format(**_parsed_args) - args: - - --table - - {inputPath: table} - - --transform-code - - {inputValue: transform_code} - - --transformed-table - - {outputPath: transformed_table} diff --git a/components/contrib/presto/query/Dockerfile b/components/contrib/presto/query/Dockerfile deleted file mode 100644 index 85ebd0a9091..00000000000 --- a/components/contrib/presto/query/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM python:3.7 - -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt - -COPY ./src /pipelines/component/src diff --git a/components/contrib/presto/query/component.yaml b/components/contrib/presto/query/component.yaml deleted file mode 100644 index 927425c9bd6..00000000000 --- a/components/contrib/presto/query/component.yaml +++ /dev/null @@ -1,54 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: Presto Query -description: | - A Kubeflow Pipeline component to submit a query to Presto. -inputs: - - name: host - type: String - description: 'Presto Host.' - - name: catalog - type: String - description: 'The name of the catalog.' - - name: schema - type: String - description: 'The name of the schema.' - - name: query - type: String - description: 'The SQL query statements to be executed in Presto' - - name: user - type: String - description: 'The user of the Presto.' - - name: pwd - type: String - description: 'The password of the Presto.' - - name: output - description: 'The path or name of the emitted output.' -outputs: - - name: output - description: 'The path or name of the emitted output.' -implementation: - container: - image: docker.io/mkavi/kubeflow-pipeline-presto:latest - command: [ -python3, /pipelines/component/src/program.py, - --host, {inputValue: host}, - --catalog, {inputValue: catalog}, - --schema, {inputValue: schema}, - --query, {inputValue: query}, - --user, {inputValue: user}, - --pwd, {inputValue: pwd}, - --output, {inputValue: output} - ] - fileOutputs: - output: /output.txt diff --git a/components/contrib/presto/query/requirements.txt b/components/contrib/presto/query/requirements.txt deleted file mode 100644 index cedd5967052..00000000000 --- a/components/contrib/presto/query/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -pyhive[presto] diff --git a/components/contrib/presto/query/src/program.py b/components/contrib/presto/query/src/program.py deleted file mode 100644 index 319fee2009b..00000000000 --- a/components/contrib/presto/query/src/program.py +++ /dev/null @@ -1,75 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -from pyhive import presto - - -def get_conn(host=None, catalog=None, schema=None, user=None, pwd=None): - conn = presto.connect( - host=host, - port=443, - protocol="https", - catalog=catalog, - schema=schema, - username=user, - password=pwd, - ) - - return conn - - -def query(conn, query): - cursor = conn.cursor() - cursor.execute(query) - cursor.fetchall() - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument("--host", type=str, help="Presto Host.") - parser.add_argument( - "--catalog", type=str, required=True, help="The name of the catalog." - ) - parser.add_argument( - "--schema", type=str, required=True, help="The name of the schema." - ) - parser.add_argument( - "--query", - type=str, - required=True, - help="The SQL query statements to be executed in Presto.", - ) - parser.add_argument( - "--user", type=str, required=True, help="The user of the Presto." - ) - parser.add_argument( - "--pwd", type=str, required=True, help="The password of the Presto." - ) - parser.add_argument( - "--output", - type=str, - required=True, - help="The path or name of the emitted output.", - ) - - args = parser.parse_args() - - conn = get_conn(args.host, args.catalog, args.schema, args.user, args.pwd) - query(conn, args.query) - - with open("/output.txt", "w+") as w: - w.write(args.output) - - -if __name__ == "__main__": - main() diff --git a/components/contrib/sample/C#_script/component.yaml b/components/contrib/sample/C#_script/component.yaml deleted file mode 100644 index d0ab6239f8c..00000000000 --- a/components/contrib/sample/C#_script/component.yaml +++ /dev/null @@ -1,40 +0,0 @@ -name: Filter text -inputs: -- {name: Text} -- {name: Pattern, default: '.*'} -outputs: -- {name: Filtered text} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/sample/C#_script/component.yaml' -implementation: - container: - image: mcr.microsoft.com/dotnet/sdk:5.0 - command: - - sh - - -ec - - | - dotnet tool install dotnet-script --tool-path /usr/bin - "$0" "$@" - - dotnet - - script - - eval - - | - string textPath = Args[0]; - string pattern = Args[1]; - string filteredTextPath = Args[2]; - - var regex = new System.Text.RegularExpressions.Regex(pattern); - Directory.CreateDirectory(Path.GetDirectoryName(filteredTextPath)); - using(var writer = new StreamWriter(filteredTextPath)) { - foreach (var line in File.ReadLines(textPath)) { - if (regex.IsMatch(line)) { - writer.WriteLine(line); - } - } - } - - -- - - {inputPath: Text} - - {inputValue: Pattern} - - {outputPath: Filtered text} diff --git a/components/contrib/sample/Python_script/component.yaml b/components/contrib/sample/Python_script/component.yaml deleted file mode 100644 index 8fbabccd613..00000000000 --- a/components/contrib/sample/Python_script/component.yaml +++ /dev/null @@ -1,44 +0,0 @@ -name: Filter text -inputs: -- {name: Text} -- {name: Pattern, default: '.*'} -outputs: -- {name: Filtered text} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/sample/Python_script/component.yaml' -implementation: - container: - image: python:3.8 - command: - - sh - - -ec - - | - # This is how additional packages can be installed dynamically - python3 -m pip install pip six - # Run the rest of the command after installing the packages. - "$0" "$@" - - python3 - - -u # Auto-flush. We want the logs to appear in the console immediately. - - -c # Inline scripts are easy, but have size limitaions and the error traces do not show source lines. - - | - import os - import re - import sys - - text_path = sys.argv[1] - pattern = sys.argv[2] - filtered_text_path = sys.argv[3] - - regex = re.compile(pattern) - - os.makedirs(os.path.dirname(filtered_text_path), exist_ok=True) - with open(text_path, 'r') as reader: - with open(filtered_text_path, 'w') as writer: - for line in reader: - if regex.search(line): - writer.write(line) - - {inputPath: Text} - - {inputValue: Pattern} - - {outputPath: Filtered text} diff --git a/components/contrib/sample/R_script/component.yaml b/components/contrib/sample/R_script/component.yaml deleted file mode 100644 index 6da23c092b5..00000000000 --- a/components/contrib/sample/R_script/component.yaml +++ /dev/null @@ -1,40 +0,0 @@ -name: Filter text -inputs: -- {name: Text} -- {name: Pattern, default: '.*'} -outputs: -- {name: Filtered text} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/sample/R_script/component.yaml' -implementation: - container: - image: r-base:4.0.2 - command: - - Rscript - - -e - - | - args <- commandArgs(trailingOnly = TRUE) - textPath <- args[1] - pattern <- args[2] - filteredTextPath <- args[3] - - dir.create(dirname(filteredTextPath), showWarnings = FALSE, recursive = TRUE) - - inputFile = file(textPath, "r") - outputFile = file(filteredTextPath, "w") - while ( TRUE ) { - lines = readLines(inputFile, n = 1) - if ( length(lines) == 0 ) { - break - } - if ( grepl(pattern = pattern, lines) ) { - writeLines(lines, outputFile) - } - } - close(outputFile) - close(inputFile) - - {inputPath: Text} - - {inputValue: Pattern} - - {outputPath: Filtered text} diff --git a/components/contrib/sample/Shell_script/component.yaml b/components/contrib/sample/Shell_script/component.yaml deleted file mode 100644 index 2092061f286..00000000000 --- a/components/contrib/sample/Shell_script/component.yaml +++ /dev/null @@ -1,26 +0,0 @@ -name: Filter text using shell and grep -inputs: -- {name: Text} -- {name: Pattern, default: '.*'} -outputs: -- {name: Filtered text} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/sample/Shell_script/component.yaml' -implementation: - container: - image: alpine - command: - - sh - - -ec - - | - text_path=$0 - pattern=$1 - filtered_text_path=$2 - mkdir -p "$(dirname "$filtered_text_path")" - - grep "$pattern" < "$text_path" > "$filtered_text_path" - - {inputPath: Text} - - {inputValue: Pattern} - - {outputPath: Filtered text} diff --git a/components/contrib/sample/keras/train_classifier/Dockerfile b/components/contrib/sample/keras/train_classifier/Dockerfile deleted file mode 100644 index 38e4ce21475..00000000000 --- a/components/contrib/sample/keras/train_classifier/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -ARG BASE_IMAGE_TAG=1.12.0-py3 -FROM tensorflow/tensorflow:$BASE_IMAGE_TAG -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt -COPY ./src /pipelines/component/src -ENTRYPOINT python3 /pipelines/component/src/train.py diff --git a/components/contrib/sample/keras/train_classifier/README.md b/components/contrib/sample/keras/train_classifier/README.md deleted file mode 100644 index 320a1f2c7e4..00000000000 --- a/components/contrib/sample/keras/train_classifier/README.md +++ /dev/null @@ -1,77 +0,0 @@ -# Keras - Train classifier -### Trains classifier using Keras sequential model - -## Inputs -|Name|Type|Default|Description| -|---|---|---|---| -|training_set_features_path|GcsPath: {data_type: TSV}||Local or GCS path to the training set features table.| -|training_set_labels_path|GcsPath: {data_type: TSV}||Local or GCS path to the training set labels (each label is a class index from 0 to num-classes - 1).| -|output_model_uri|GcsPath: {data_type: Keras model}||Local or GCS path specifying where to save the trained model. The model (topology + weights + optimizer state) is saved in HDF5 format and can be loaded back by calling keras.models.load_model| -|model_config|GcsPath: {data_type: Keras model config json}||JSON string containing the serialized model structure. Can be obtained by calling model.to_json() on a Keras model.| -|number_of_classes|Integer||Number of classifier classes.| -|number_of_epochs|Integer|100|Number of epochs to train the model. An epoch is an iteration over the entire `x` and `y` data provided.| -|batch_size|Integer|32|Number of samples per gradient update| - -## Outputs -|Name|Type|Default|Description| -|---|---|---|---| -|output_model_uri|GcsPath: {data_type: Keras model}||GCS path where the trained model has been saved. The model (topology + weights + optimizer state) is saved in HDF5 format and can be loaded back by calling keras.models.load_model| - -## Container image -gcr.io/ml-pipeline/components/sample/keras/train_classifier - -## Usage: - -```python -import os -from pathlib import Path -import requests - -import kfp - -component_url_prefix = 'https://raw.githubusercontent.com/kubeflow/pipelines/master/components/sample/keras/train_classifier/' -test_data_url_prefix = component_url_prefix + 'tests/testdata/' - -#Prepare input/output paths and data -input_data_gcs_dir = 'gs:////' -output_data_gcs_dir = 'gs:////' - -#Downloading the training set (to upload to GCS later) -training_set_features_local_path = os.path.join('.', 'training_set_features.tsv') -training_set_labels_local_path = os.path.join('.', 'training_set_labels.tsv') - -training_set_features_url = test_data_url_prefix + '/training_set_features.tsv' -training_set_labels_url = test_data_url_prefix + '/training_set_labels.tsv' - -Path(training_set_features_local_path).write_bytes(requests.get(training_set_features_url).content) -Path(training_set_labels_local_path).write_bytes(requests.get(training_set_labels_url).content) - -#Uploading the data to GCS where it can be read by the trainer -training_set_features_gcs_path = os.path.join(input_data_gcs_dir, 'training_set_features.tsv') -training_set_labels_gcs_path = os.path.join(input_data_gcs_dir, 'training_set_labels.tsv') - -gfile.Copy(training_set_features_local_path, training_set_features_gcs_path) -gfile.Copy(training_set_labels_local_path, training_set_labels_gcs_path) - -output_model_uri_template = os.path.join(output_data_gcs_dir, kfp.dsl.EXECUTION_ID_PLACEHOLDER, 'output_model_uri', 'data') - -xor_model_config = requests.get(test_data_url_prefix + 'model_config.json').content - - -#Load the component -train_op = kfp.components.load_component_from_url(component_url_prefix + 'component.yaml') - -#Use the component as part of the pipeline -@kfp.dsl.pipeline(name='Test keras/train_classifier', description='Pipeline to test keras/train_classifier component') -def pipeline_to_test_keras_train_classifier(): - train_task = train_op( - training_set_features_path=training_set_features_gcs_path, - training_set_labels_path=training_set_labels_gcs_path, - output_model_uri=output_model_uri_template, - model_config=xor_model_config, - number_of_classes=2, - number_of_epochs=10, - batch_size=32, - ) - #Use train_task.outputs['output_model_uri'] to obtain the reference to the trained model URI that can be a passed to other pipeline tasks (e.g. for prediction or analysis) -``` diff --git a/components/contrib/sample/keras/train_classifier/build_image.sh b/components/contrib/sample/keras/train_classifier/build_image.sh deleted file mode 100755 index 88a3ee00833..00000000000 --- a/components/contrib/sample/keras/train_classifier/build_image.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash -e -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -image_name=gcr.io/ml-pipeline/sample/keras/train_classifier -image_tag=latest -full_image_name=${image_name}:${image_tag} -base_image_tag=1.12.0-py3 - -cd "$(dirname "$0")" - -docker build --build-arg BASE_IMAGE_TAG=$base_image_tag -t "$full_image_name" . -docker push "$full_image_name" - -#Output the strict image name (which contains the sha256 image digest) -#This name can be used by the subsequent steps to refer to the exact image that was built even if another image with the same name was pushed. -image_name_with_digest=$(docker inspect --format="{{index .RepoDigests 0}}" "$IMAGE_NAME") -strict_image_name_output_file=./versions/image_digests_for_tags/$image_tag -mkdir -p "$(dirname "$strict_image_name_output_file")" -echo $image_name_with_digest | tee "$strict_image_name_output_file" diff --git a/components/contrib/sample/keras/train_classifier/component.yaml b/components/contrib/sample/keras/train_classifier/component.yaml deleted file mode 100644 index 140e7a66875..00000000000 --- a/components/contrib/sample/keras/train_classifier/component.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: Keras - Train classifier -description: Trains classifier using Keras sequential model -inputs: - - {name: training_set_features_path, type: {GcsPath: {data_type: TSV}}, description: 'Local or GCS path to the training set features table.'} - - {name: training_set_labels_path, type: {GcsPath: {data_type: TSV}}, description: 'Local or GCS path to the training set labels (each label is a class index from 0 to num-classes - 1).'} - - {name: output_model_uri, type: {GcsPath: {data_type: Keras model}}, description: 'Local or GCS path specifying where to save the trained model. The model (topology + weights + optimizer state) is saved in HDF5 format and can be loaded back by calling keras.models.load_model'} #Remove GcsUri and move to outputs once artifact passing support is checked in. - - {name: model_config, type: {GcsPath: {data_type: Keras model config json}}, description: 'JSON string containing the serialized model structure. Can be obtained by calling model.to_json() on a Keras model.'} - - {name: number_of_classes, type: Integer, description: 'Number of classifier classes.'} - - {name: number_of_epochs, type: Integer, default: '100', description: 'Number of epochs to train the model. An epoch is an iteration over the entire `x` and `y` data provided.'} - - {name: batch_size, type: Integer, default: '32', description: 'Number of samples per gradient update.'} -outputs: - - {name: output_model_uri, type: {GcsPath: {data_type: Keras model}}, description: 'GCS path where the trained model has been saved. The model (topology + weights + optimizer state) is saved in HDF5 format and can be loaded back by calling keras.models.load_model'} #Remove GcsUri and make it a proper output once artifact passing support is checked in. -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/sample/keras/train_classifier/component.yaml' -implementation: - container: - image: gcr.io/ml-pipeline/sample/keras/train_classifier - command: [python3, /pipelines/component/src/train.py] - args: [ - --training-set-features-path, {inputValue: training_set_features_path}, - --training-set-labels-path, {inputValue: training_set_labels_path}, - --output-model-path, {inputValue: output_model_uri}, - --model-config-json, {inputValue: model_config}, - --num-classes, {inputValue: number_of_classes}, - --num-epochs, {inputValue: number_of_epochs}, - --batch-size, {inputValue: batch_size}, - - --output-model-path-file, {outputPath: output_model_uri}, - ] diff --git a/components/contrib/sample/keras/train_classifier/requirements.txt b/components/contrib/sample/keras/train_classifier/requirements.txt deleted file mode 100644 index 14348698da9..00000000000 --- a/components/contrib/sample/keras/train_classifier/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -keras diff --git a/components/contrib/sample/keras/train_classifier/run_tests.sh b/components/contrib/sample/keras/train_classifier/run_tests.sh deleted file mode 100755 index cb3743fbfb0..00000000000 --- a/components/contrib/sample/keras/train_classifier/run_tests.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -e -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -cd $(dirname $0) -python3 -m unittest discover --verbose --start-dir tests --top-level-directory=.. diff --git a/components/contrib/sample/keras/train_classifier/src/train.py b/components/contrib/sample/keras/train_classifier/src/train.py deleted file mode 100755 index 0d366806263..00000000000 --- a/components/contrib/sample/keras/train_classifier/src/train.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import json -import os -from pathlib import Path - -import keras -import numpy as np - -parser = argparse.ArgumentParser(description='Train classifier model using Keras') - -parser.add_argument('--training-set-features-path', type=str, help='Local or GCS path to the training set features table.') -parser.add_argument('--training-set-labels-path', type=str, help='Local or GCS path to the training set labels (each label is a class index from 0 to num-classes - 1).') -parser.add_argument('--output-model-path', type=str, help='Local or GCS path specifying where to save the trained model. The model (topology + weights + optimizer state) is saved in HDF5 format and can be loaded back by calling keras.models.load_model') -parser.add_argument('--model-config-json', type=str, help='JSON string containing the serialized model structure. Can be obtained by calling model.to_json() on a Keras model.') -parser.add_argument('--num-classes', type=int, help='Number of classifier classes.') -parser.add_argument('--num-epochs', type=int, default=100, help='Number of epochs to train the model. An epoch is an iteration over the entire `x` and `y` data provided.') -parser.add_argument('--batch-size', type=int, default=32, help='Number of samples per gradient update.') - -parser.add_argument('--output-model-path-file', type=str, help='Path to a local file containing the output model URI. Needed for data passing until the artifact support is checked in.') #TODO: Remove after the team agrees to let me check in artifact support. -args = parser.parse_args() - -# The data, split between train and test sets: -#(x_train, y_train), (x_test, y_test) = cifar10.load_data() -x_train = np.loadtxt(args.training_set_features_path) -y_train = np.loadtxt(args.training_set_labels_path) -print('x_train shape:', x_train.shape) -print(x_train.shape[0], 'train samples') - -# Convert class vectors to binary class matrices. -y_train = keras.utils.to_categorical(y_train, args.num_classes) - -model = keras.models.model_from_json(args.model_config_json) - -model.add(keras.layers.Activation('softmax')) - -# initiate RMSprop optimizer -opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-6) - -# Let's train the model using RMSprop -model.compile(loss='categorical_crossentropy', - optimizer=opt, - metrics=['accuracy']) - -x_train = x_train.astype('float32') -x_train /= 255 - -model.fit( - x_train, - y_train, - batch_size=args.batch_size, - epochs=args.num_epochs, - shuffle=True -) - -# Save model and weights -if not args.output_model_path.startswith('gs://'): - save_dir = os.path.dirname(args.output_model_path) - if not os.path.isdir(save_dir): - os.makedirs(save_dir) - -model.save(args.output_model_path) -print('Saved trained model at %s ' % args.output_model_path) - -Path(args.output_model_path_file).parent.mkdir(parents=True, exist_ok=True) -Path(args.output_model_path_file).write_text(args.output_model_path) diff --git a/components/contrib/sample/keras/train_classifier/tests/test_component.py b/components/contrib/sample/keras/train_classifier/tests/test_component.py deleted file mode 100644 index 082be17871b..00000000000 --- a/components/contrib/sample/keras/train_classifier/tests/test_component.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import subprocess -import tempfile -import unittest -from contextlib import contextmanager -from pathlib import Path - -import kfp.components as comp - -@contextmanager -def components_local_output_dir_context(output_dir: str): - old_dir = comp._components._outputs_dir - try: - comp._components._outputs_dir = output_dir - yield output_dir - finally: - comp._components._outputs_dir = old_dir - -class KerasTrainClassifierTestCase(unittest.TestCase): - def test_handle_training_xor(self): - tests_root = os.path.abspath(os.path.dirname(__file__)) - component_root = os.path.abspath(os.path.join(tests_root, '..')) - testdata_root = os.path.abspath(os.path.join(tests_root, 'testdata')) - - train_op = comp.load_component(os.path.join(component_root, 'component.yaml')) - - with tempfile.TemporaryDirectory() as temp_dir_name: - with components_local_output_dir_context(temp_dir_name): - train_task = train_op( - training_set_features_path=os.path.join(testdata_root, 'training_set_features.tsv'), - training_set_labels_path=os.path.join(testdata_root, 'training_set_labels.tsv'), - output_model_uri=os.path.join(temp_dir_name, 'outputs/output_model/data'), - model_config=Path(testdata_root).joinpath('model_config.json').read_text(), - number_of_classes=2, - number_of_epochs=10, - batch_size=32, - ) - - full_command = train_task.command + train_task.arguments - full_command[0] = 'python' - full_command[1] = os.path.join(component_root, 'src', 'train.py') - - process = subprocess.run(full_command) - - (output_model_uri_file, ) = (train_task.file_outputs['output-model-uri'], ) - output_model_uri = Path(output_model_uri_file).read_text() - - -if __name__ == '__main__': - unittest.main() diff --git a/components/contrib/sample/keras/train_classifier/tests/testdata/model_config.json b/components/contrib/sample/keras/train_classifier/tests/testdata/model_config.json deleted file mode 100644 index 5f65cdddde3..00000000000 --- a/components/contrib/sample/keras/train_classifier/tests/testdata/model_config.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "class_name": "Sequential", - "config": { - "name": "sequential_1", - "layers": [ - { - "class_name": "Dense", - "config": { - "name": "dense_1", - "trainable": true, - "units": 2, - "activation": "linear", - "use_bias": true, - "kernel_initializer": { - "class_name": "VarianceScaling", - "config": { - "scale": 1.0, - "mode": "fan_avg", - "distribution": "uniform", - "seed": null - } - }, - "bias_initializer": { - "class_name": "Zeros", - "config": {} - }, - "kernel_regularizer": null, - "bias_regularizer": null, - "activity_regularizer": null, - "kernel_constraint": null, - "bias_constraint": null - } - }, - { - "class_name": "Dense", - "config": { - "name": "dense_2", - "trainable": true, - "units": 2, - "activation": "linear", - "use_bias": true, - "kernel_initializer": { - "class_name": "VarianceScaling", - "config": { - "scale": 1.0, - "mode": "fan_avg", - "distribution": "uniform", - "seed": null - } - }, - "bias_initializer": { - "class_name": "Zeros", - "config": {} - }, - "kernel_regularizer": null, - "bias_regularizer": null, - "activity_regularizer": null, - "kernel_constraint": null, - "bias_constraint": null - } - } - ] - }, - "keras_version": "2.2.4", - "backend": "tensorflow" -} \ No newline at end of file diff --git a/components/contrib/sample/keras/train_classifier/tests/testdata/training_set_features.tsv b/components/contrib/sample/keras/train_classifier/tests/testdata/training_set_features.tsv deleted file mode 100644 index 930b80544e6..00000000000 --- a/components/contrib/sample/keras/train_classifier/tests/testdata/training_set_features.tsv +++ /dev/null @@ -1,4 +0,0 @@ -0 0 -0 1 -1 0 -1 1 diff --git a/components/contrib/sample/keras/train_classifier/tests/testdata/training_set_labels.tsv b/components/contrib/sample/keras/train_classifier/tests/testdata/training_set_labels.tsv deleted file mode 100644 index f2599a746ac..00000000000 --- a/components/contrib/sample/keras/train_classifier/tests/testdata/training_set_labels.tsv +++ /dev/null @@ -1,4 +0,0 @@ -0 -1 -1 -0 diff --git a/components/contrib/tables/Remove_header/component.yaml b/components/contrib/tables/Remove_header/component.yaml deleted file mode 100644 index 36096e95b16..00000000000 --- a/components/contrib/tables/Remove_header/component.yaml +++ /dev/null @@ -1,21 +0,0 @@ -name: Remove header -description: Remove the header line from CSV and TSV data (unconditionally) -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/tables/Remove_header/component.yaml' -inputs: -- name: table -outputs: -- name: table -implementation: - container: - image: alpine - command: - - sh - - -exc - - | - mkdir -p "$(dirname "$1")" - tail -n +2 <"$0" >"$1" - - inputPath: table - - outputPath: table diff --git a/components/contrib/tensorflow/tensorboard/prepare_tensorboard/component.yaml b/components/contrib/tensorflow/tensorboard/prepare_tensorboard/component.yaml deleted file mode 100644 index a8ad9e455ff..00000000000 --- a/components/contrib/tensorflow/tensorboard/prepare_tensorboard/component.yaml +++ /dev/null @@ -1,44 +0,0 @@ -name: Create Tensorboard visualization -description: | - Pre-creates Tensorboard visualization for a given Log dir URI. - This way the Tensorboard can be viewed before the training completes. - The output Log dir URI should be passed to a trainer component that will write Tensorboard logs to that directory. -inputs: -- {name: Log dir URI, type: String } -- {name: Image, type: String, default: ''} -- {name: Pod Template Spec, type: String, default: 'null'} -outputs: -- {name: mlpipeline-ui-metadata, type: kfp.v1.ui-metadata} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/tensorflow/tensorboard/prepare_tensorboard/component.yaml' -implementation: - container: - image: alpine - command: - - sh - - -ex - - -c - - | - log_dir="$0" - output_metadata_path="$1" - pod_template_spec="$2" - image="$3" - - mkdir -p "$(dirname "$output_metadata_path")" - - echo ' - { - "outputs" : [{ - "type": "tensorboard", - "source": "'"$log_dir"'", - "image": "'"$image"'", - "pod_template_spec": '"$pod_template_spec"' - }] - } - ' >"$output_metadata_path" - - {inputValue: Log dir URI} - - {outputPath: mlpipeline-ui-metadata} - - {inputValue: Pod Template Spec} - - {inputValue: Image} diff --git a/components/contrib/web/Download/component-sdk-v2.yaml b/components/contrib/web/Download/component-sdk-v2.yaml deleted file mode 100644 index e460fc2f5c7..00000000000 --- a/components/contrib/web/Download/component-sdk-v2.yaml +++ /dev/null @@ -1,26 +0,0 @@ -name: Download data (KFP SDK v2) -description: Downloads data from the specified URL. (Updated for KFP SDK v2.) -inputs: -- {name: Url, type: String} -- {name: curl options, type: String, default: '--location', description: 'Additional options given to the curl bprogram. See https://curl.haxx.se/docs/manpage.html'} -outputs: -- {name: Data} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/web/Download/component.yaml' -implementation: - container: - image: byrnedo/alpine-curl@sha256:548379d0a4a0c08b9e55d9d87a592b7d35d9ab3037f4936f5ccd09d0b625a342 - command: - - sh - - -exc - - | - url="$0" - output_path="$1" - curl_options="$2" - mkdir -p "$(dirname "$output_path")" - curl --get "$url" --output "$output_path" $curl_options - - inputValue: Url - - outputPath: Data - - inputValue: curl options diff --git a/components/contrib/web/Download/component.yaml b/components/contrib/web/Download/component.yaml deleted file mode 100644 index 71d58996e40..00000000000 --- a/components/contrib/web/Download/component.yaml +++ /dev/null @@ -1,27 +0,0 @@ -name: Download data -inputs: -- {name: Url, type: URI} -- {name: curl options, type: string, default: '--location', description: 'Additional options given to the curl bprogram. See https://curl.haxx.se/docs/manpage.html'} -outputs: -- {name: Data} -metadata: - annotations: - author: Alexey Volkov - canonical_location: 'https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/web/Download/component.yaml' -implementation: - container: - # image: curlimages/curl # Sets a non-root user which cannot write to mounted volumes. See https://github.com/curl/curl-docker/issues/22 - image: byrnedo/alpine-curl@sha256:548379d0a4a0c08b9e55d9d87a592b7d35d9ab3037f4936f5ccd09d0b625a342 - command: - - sh - - -exc - - | - url="$0" - output_path="$1" - curl_options="$2" - - mkdir -p "$(dirname "$output_path")" - curl --get "$url" --output "$output_path" $curl_options - - inputValue: Url - - outputPath: Data - - inputValue: curl options diff --git a/components/google-cloud/Dockerfile b/components/google-cloud/Dockerfile index 07368294275..c8f397dbd45 100644 --- a/components/google-cloud/Dockerfile +++ b/components/google-cloud/Dockerfile @@ -43,7 +43,7 @@ RUN pip3 install -U "fsspec>=0.7.4" "gcsfs>=0.6.0" "pandas<=1.3.5" "scikit-learn RUN pip3 install -U google-cloud-notebooks # Install main package -RUN pip3 install "git+https://github.com/kubeflow/pipelines.git@google-cloud-pipeline-components-2.20.1#egg=google-cloud-pipeline-components&subdirectory=components/google-cloud" +RUN pip3 install "git+https://github.com/kubeflow/pipelines.git@google-cloud-pipeline-components-2.21.0#egg=google-cloud-pipeline-components&subdirectory=components/google-cloud" # Note that components can override the container entry ponint. ENTRYPOINT ["python3","-m","google_cloud_pipeline_components.container.v1.aiplatform.remote_runner"] diff --git a/components/google-cloud/README.md b/components/google-cloud/README.md index ffe894c0261..8a6ded33e15 100644 --- a/components/google-cloud/README.md +++ b/components/google-cloud/README.md @@ -18,10 +18,10 @@ Please see the [Google Cloud Pipeline Components API reference documentation](ht For details about previous and upcoming releases, please see the [release notes](https://github.com/kubeflow/pipelines/blob/master/components/google-cloud/RELEASE.md). ## Examples + * [Train an image classification model using Vertex AI AutoML](https://github.com/GoogleCloudPlatform/vertex-ai-samples/blob/master/notebooks/official/pipelines/google_cloud_pipeline_components_automl_images.ipynb). * [Train a classification model using tabular data and Vertex AI AutoML](https://github.com/GoogleCloudPlatform/vertex-ai-samples/blob/master/notebooks/official/pipelines/automl_tabular_classification_beans.ipynb). * [Train a linear regression model using tabular data and Vertex AI AutoML](https://github.com/GoogleCloudPlatform/vertex-ai-samples/blob/master/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb). -* [Train a text classification model using Vertex AI AutoML](https://github.com/GoogleCloudPlatform/vertex-ai-samples/blob/master/notebooks/official/pipelines/google_cloud_pipeline_components_automl_text.ipynb). * [Use the Google Cloud pipeline components to upload and deploy a model](https://github.com/GoogleCloudPlatform/vertex-ai-samples/blob/master/notebooks/official/pipelines/google_cloud_pipeline_components_model_train_upload_deploy.ipynb). ## Installation diff --git a/components/google-cloud/RELEASE.md b/components/google-cloud/RELEASE.md index ba2ad35598b..b3fff372bbd 100644 --- a/components/google-cloud/RELEASE.md +++ b/components/google-cloud/RELEASE.md @@ -1,8 +1,15 @@ ## Upcoming release +## Release 2.21.0 + +* Add psc_interface_config to v1 GCPC custom job components/utils +* Bump image for Structured Data pipelines. +* Apply latest GCPC image vulnerability resolutions (base OS and software updates). + ## Release 2.20.1 * Apply latest GCPC image vulnerability resolutions (base OS and software updates). +* Explicitly set default python version to 3.9 on Starry Net dsl components to avoid future breaking changes. ## Release 2.20.0 diff --git a/components/google-cloud/docs/source/versions.json b/components/google-cloud/docs/source/versions.json index 3991f2327a0..39084f4becc 100644 --- a/components/google-cloud/docs/source/versions.json +++ b/components/google-cloud/docs/source/versions.json @@ -1,4 +1,9 @@ [ + { + "version": "https://google-cloud-pipeline-components.readthedocs.io/en/google-cloud-pipeline-components-2.21.0", + "title": "2.21.0", + "aliases": [] + }, { "version": "https://google-cloud-pipeline-components.readthedocs.io/en/google-cloud-pipeline-components-2.20.1", "title": "2.20.1", diff --git a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/get_training_artifacts/component.py b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/get_training_artifacts/component.py index c60e8e039ba..e059cf7ec28 100644 --- a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/get_training_artifacts/component.py +++ b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/get_training_artifacts/component.py @@ -18,7 +18,9 @@ from kfp import dsl -@dsl.component(packages_to_install=['tensorflow==2.16.1']) +@dsl.component( + base_image='python:3.9', packages_to_install=['tensorflow==2.16.1'] +) def get_training_artifacts( docker_region: str, trainer_dir: dsl.InputPath(), diff --git a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/maybe_set_tfrecord_args/component.py b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/maybe_set_tfrecord_args/component.py index 325ecdc011a..5f504592185 100644 --- a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/maybe_set_tfrecord_args/component.py +++ b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/maybe_set_tfrecord_args/component.py @@ -18,7 +18,7 @@ from kfp import dsl -@dsl.component +@dsl.component(base_image='python:3.9') def maybe_set_tfrecord_args( dataprep_previous_run_dir: str, static_covariates: List[str], diff --git a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_dataprep_args/component.py b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_dataprep_args/component.py index 34bd19c468c..9ce66e525e1 100644 --- a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_dataprep_args/component.py +++ b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_dataprep_args/component.py @@ -18,7 +18,7 @@ from kfp import dsl -@dsl.component +@dsl.component(base_image='python:3.9') def set_dataprep_args( model_blocks: List[str], ts_identifier_columns: List[str], diff --git a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_eval_args/component.py b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_eval_args/component.py index 498913edaa8..ee8d666f95c 100644 --- a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_eval_args/component.py +++ b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_eval_args/component.py @@ -18,7 +18,7 @@ from kfp import dsl -@dsl.component +@dsl.component(base_image='python:3.9') def set_eval_args( big_query_source: dsl.Input[dsl.Artifact], quantiles: List[float] ) -> NamedTuple( diff --git a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_test_set/component.py b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_test_set/component.py index e7f4da36eed..9a4bad35041 100644 --- a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_test_set/component.py +++ b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_test_set/component.py @@ -18,7 +18,9 @@ from kfp import dsl -@dsl.component(packages_to_install=['tensorflow==2.16.1']) +@dsl.component( + base_image='python:3.9', packages_to_install=['tensorflow==2.16.1'] +) def set_test_set( dataprep_dir: dsl.InputPath(), ) -> NamedTuple('TestSetArtifact', uri=str, artifact=dsl.Artifact): diff --git a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_tfrecord_args/component.py b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_tfrecord_args/component.py index 3c83742db6f..47d50ee622a 100644 --- a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_tfrecord_args/component.py +++ b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_tfrecord_args/component.py @@ -18,7 +18,7 @@ from kfp import dsl -@dsl.component +@dsl.component(base_image='python:3.9') def set_tfrecord_args( dataprep_dir: dsl.InputPath(), static_covariates: List[str], diff --git a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_train_args/component.py b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_train_args/component.py index c1ab69fbe26..66bf1b684e8 100644 --- a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_train_args/component.py +++ b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/set_train_args/component.py @@ -18,7 +18,7 @@ from kfp import dsl -@dsl.component +@dsl.component(base_image='python:3.9') def set_train_args( quantiles: List[float], model_blocks: List[str], diff --git a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/upload_decomposition_plots/component.py b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/upload_decomposition_plots/component.py index a085cd8445e..dc66280ef9b 100644 --- a/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/upload_decomposition_plots/component.py +++ b/components/google-cloud/google_cloud_pipeline_components/_implementation/starry_net/upload_decomposition_plots/component.py @@ -17,10 +17,11 @@ @dsl.component( + base_image='python:3.9', packages_to_install=[ 'google-cloud-aiplatform[tensorboard]==1.87.0', 'protobuf==3.20.*', - ] + ], ) def upload_decomposition_plots( project: str, diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_ensemble.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_ensemble.py index bd8a2c84d6e..981289a3366 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_ensemble.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_ensemble.py @@ -72,7 +72,7 @@ def automl_forecasting_ensemble( # fmt: on job_id = dsl.PIPELINE_JOB_ID_PLACEHOLDER task_id = dsl.PIPELINE_TASK_ID_PLACEHOLDER - image_uri = 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625' + image_uri = 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525' display_name = f'automl-forecasting-ensemble-{job_id}-{task_id}' error_file_path = f'{root_dir}/{job_id}/{task_id}/error.pb' diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_stage_1_tuner.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_stage_1_tuner.py index 7389e918d5a..4988aef92db 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_stage_1_tuner.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_stage_1_tuner.py @@ -99,14 +99,14 @@ def automl_forecasting_stage_1_tuner( ' 1, "machine_spec": {"machine_type": "n1-standard-8"},' ' "container_spec": {"image_uri":"' ), - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525', '", "args": ["forecasting_mp_l2l_stage_1_tuner', '", "--region=', location, '", "--transform_output_path=', transform_output.uri, '", "--training_docker_uri=', - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525', '", "--reduce_search_space_mode=', reduce_search_space_mode, f'", "--component_id={dsl.PIPELINE_TASK_ID_PLACEHOLDER}', diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_stage_2_tuner.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_stage_2_tuner.py index b1d04d97cd6..a9b7693dc00 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_stage_2_tuner.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/forecasting_stage_2_tuner.py @@ -97,14 +97,14 @@ def automl_forecasting_stage_2_tuner( ' 1, "machine_spec": {"machine_type": "n1-standard-8"},' ' "container_spec": {"image_uri":"' ), - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525', '", "args": ["forecasting_mp_l2l_stage_2_tuner', '", "--region=', location, '", "--transform_output_path=', transform_output.uri, '", "--training_docker_uri=', - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525', f'", "--component_id={dsl.PIPELINE_TASK_ID_PLACEHOLDER}', '", "--training_base_dir=', root_dir, diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/learn_to_learn_forecasting_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/learn_to_learn_forecasting_pipeline.yaml index 273e6e078e4..48370b9eebf 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/learn_to_learn_forecasting_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/learn_to_learn_forecasting_pipeline.yaml @@ -5577,7 +5577,7 @@ deploymentSpec: - '{"display_name": "automl-forecasting-ensemble-{{$.pipeline_job_uuid}}-{{$.pipeline_task_uuid}}", "encryption_spec": {"kms_key_name": "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}, "job_spec": {"worker_pool_specs": [{"replica_count": 1, "machine_spec": - {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "args": ["forecasting_mp_ensemble", "--transform_output_path={{$.inputs.artifacts[''transform_output''].uri}}", "--error_file_path={{$.inputs.parameters[''root_dir'']}}/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb", "--metadata_path={{$.inputs.artifacts[''metadata''].uri}}", "--tuning_result_input_path={{$.inputs.artifacts[''tuning_result_input''].uri}}", @@ -5611,7 +5611,7 @@ deploymentSpec: - '{"display_name": "automl-forecasting-ensemble-{{$.pipeline_job_uuid}}-{{$.pipeline_task_uuid}}", "encryption_spec": {"kms_key_name": "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}, "job_spec": {"worker_pool_specs": [{"replica_count": 1, "machine_spec": - {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "args": ["forecasting_mp_ensemble", "--transform_output_path={{$.inputs.artifacts[''transform_output''].uri}}", "--error_file_path={{$.inputs.parameters[''root_dir'']}}/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb", "--metadata_path={{$.inputs.artifacts[''metadata''].uri}}", "--tuning_result_input_path={{$.inputs.artifacts[''tuning_result_input''].uri}}", @@ -5646,11 +5646,11 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"args\": [\"forecasting_mp_l2l_stage_1_tuner", "\", \"--region=", "{{$.inputs.parameters[''location'']}}", "\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--training_docker_uri=", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"--reduce_search_space_mode=", "{{$.inputs.parameters[''reduce_search_space_mode'']}}", "\", \"--component_id={{$.pipeline_task_uuid}}", "\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train", @@ -5689,11 +5689,11 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"args\": [\"forecasting_mp_l2l_stage_2_tuner", "\", \"--region=", "{{$.inputs.parameters[''location'']}}", "\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--training_docker_uri=", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"--component_id={{$.pipeline_task_uuid}}", "\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train", "\", \"--num_parallel_trial=", "{{$.inputs.parameters[''num_parallel_trials'']}}", @@ -5732,7 +5732,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -5797,7 +5797,7 @@ deploymentSpec: \ 'stage_2_single_run_max_secs',\n ],\n )(\n stage_1_deadline_hours,\n\ \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-calculate-training-parameters-2: container: args: @@ -5853,7 +5853,7 @@ deploymentSpec: \ 'stage_2_single_run_max_secs',\n ],\n )(\n stage_1_deadline_hours,\n\ \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-attribution: container: args: @@ -6044,8 +6044,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -6062,7 +6062,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -6093,7 +6093,7 @@ deploymentSpec: \ = 'point'\n else:\n forecasting_type = 'quantile'\n\n return collections.namedtuple(\n\ \ 'Outputs',\n (\n 'forecasting_type',\n 'quantiles',\n\ \ ),\n )(forecasting_type, quantiles)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-finalize-eval-quantile-parameters-2: container: args: @@ -6121,7 +6121,7 @@ deploymentSpec: \ = 'point'\n else:\n forecasting_type = 'quantile'\n\n return collections.namedtuple(\n\ \ 'Outputs',\n (\n 'forecasting_type',\n 'quantiles',\n\ \ ),\n )(forecasting_type, quantiles)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-or-create-model-description: container: args: @@ -6150,7 +6150,7 @@ deploymentSpec: \ return f'{original_description} From: {pipeline_url}'\n\n # The pipeline\ \ url contains KFP placeholders injected at runtime.\n return f'Vertex\ \ forecasting model trained in the pipeline: {pipeline_url}'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-or-create-model-description-2: container: args: @@ -6179,7 +6179,7 @@ deploymentSpec: \ return f'{original_description} From: {pipeline_url}'\n\n # The pipeline\ \ url contains KFP placeholders injected at runtime.\n return f'Vertex\ \ forecasting model trained in the pipeline: {pipeline_url}'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-prediction-image-uri: container: args: @@ -6202,14 +6202,14 @@ deploymentSpec: Returns the prediction image corresponding to the given model type.\"\"\"\ \n # Keys come from AutoMlTimeSeriesForecastingTrainSpec.\n # The URIs\ \ must be hardcoded without any breaks in the code so string\n # replacement\ - \ will work correctly.\n images = {\n 'l2l': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250129_0625',\n\ - \ 'seq2seq': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-seq2seq:20250129_0625',\n\ - \ 'tft': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tft:20250129_0625',\n\ - \ 'tide': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tide:20250129_0625',\n\ - \ }\n if model_type not in images:\n raise ValueError(\n f'Invalid\ - \ forecasting model type: {model_type}. Valid options are: '\n f'{images.keys()}.'\n\ - \ )\n return images[model_type]\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + \ will work correctly.\n images = {\n 'l2l': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'seq2seq': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tft': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tide': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n }\n if model_type not in images:\n raise ValueError(\n\ + \ f'Invalid forecasting model type: {model_type}. Valid options are:\ + \ '\n f'{images.keys()}.'\n )\n return images[model_type]\n\n" + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-prediction-image-uri-2: container: args: @@ -6232,14 +6232,14 @@ deploymentSpec: Returns the prediction image corresponding to the given model type.\"\"\"\ \n # Keys come from AutoMlTimeSeriesForecastingTrainSpec.\n # The URIs\ \ must be hardcoded without any breaks in the code so string\n # replacement\ - \ will work correctly.\n images = {\n 'l2l': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250129_0625',\n\ - \ 'seq2seq': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-seq2seq:20250129_0625',\n\ - \ 'tft': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tft:20250129_0625',\n\ - \ 'tide': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tide:20250129_0625',\n\ - \ }\n if model_type not in images:\n raise ValueError(\n f'Invalid\ - \ forecasting model type: {model_type}. Valid options are: '\n f'{images.keys()}.'\n\ - \ )\n return images[model_type]\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + \ will work correctly.\n images = {\n 'l2l': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'seq2seq': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tft': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tide': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n }\n if model_type not in images:\n raise ValueError(\n\ + \ f'Invalid forecasting model type: {model_type}. Valid options are:\ + \ '\n f'{images.keys()}.'\n )\n return images[model_type]\n\n" + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-predictions-column: container: args: @@ -6262,7 +6262,7 @@ deploymentSpec: \ str) -> str:\n \"\"\"Generates the BP output's target column name.\"\"\ \"\n if forecasting_type == 'quantile':\n return f'predicted_{target_column}.quantile_predictions'\n\ \ return f'predicted_{target_column}.value'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-predictions-column-2: container: args: @@ -6285,7 +6285,7 @@ deploymentSpec: \ str) -> str:\n \"\"\"Generates the BP output's target column name.\"\"\ \"\n if forecasting_type == 'quantile':\n return f'predicted_{target_column}.quantile_predictions'\n\ \ return f'predicted_{target_column}.value'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-importer: importer: artifactUri: @@ -6817,7 +6817,7 @@ deploymentSpec: \ 'model_display_name',\n 'transformations',\n ],\n\ \ )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ model_display_name,\n transformations,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -6863,7 +6863,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-string-not-empty: container: args: @@ -6887,7 +6887,7 @@ deploymentSpec: \n Returns:\n Boolean value. -> 'true' if empty, 'false' if not empty.\ \ We need to use str\n instead of bool due to a limitation in KFP compiler.\n\ \ \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri: container: args: @@ -6917,7 +6917,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri-2: container: args: @@ -6947,7 +6947,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-training-configurator-and-validator: container: args: @@ -6992,7 +6992,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 pipelineInfo: description: The AutoML Forecasting pipeline. name: learn-to-learn-forecasting diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/sequence_to_sequence_forecasting_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/sequence_to_sequence_forecasting_pipeline.yaml index 7e29bf4aa29..5c54da5afc3 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/sequence_to_sequence_forecasting_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/sequence_to_sequence_forecasting_pipeline.yaml @@ -5559,7 +5559,7 @@ deploymentSpec: - '{"display_name": "automl-forecasting-ensemble-{{$.pipeline_job_uuid}}-{{$.pipeline_task_uuid}}", "encryption_spec": {"kms_key_name": "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}, "job_spec": {"worker_pool_specs": [{"replica_count": 1, "machine_spec": - {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "args": ["forecasting_mp_ensemble", "--transform_output_path={{$.inputs.artifacts[''transform_output''].uri}}", "--error_file_path={{$.inputs.parameters[''root_dir'']}}/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb", "--metadata_path={{$.inputs.artifacts[''metadata''].uri}}", "--tuning_result_input_path={{$.inputs.artifacts[''tuning_result_input''].uri}}", @@ -5593,7 +5593,7 @@ deploymentSpec: - '{"display_name": "automl-forecasting-ensemble-{{$.pipeline_job_uuid}}-{{$.pipeline_task_uuid}}", "encryption_spec": {"kms_key_name": "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}, "job_spec": {"worker_pool_specs": [{"replica_count": 1, "machine_spec": - {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "args": ["forecasting_mp_ensemble", "--transform_output_path={{$.inputs.artifacts[''transform_output''].uri}}", "--error_file_path={{$.inputs.parameters[''root_dir'']}}/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb", "--metadata_path={{$.inputs.artifacts[''metadata''].uri}}", "--tuning_result_input_path={{$.inputs.artifacts[''tuning_result_input''].uri}}", @@ -5628,11 +5628,11 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"args\": [\"forecasting_mp_l2l_stage_1_tuner", "\", \"--region=", "{{$.inputs.parameters[''location'']}}", "\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--training_docker_uri=", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"--reduce_search_space_mode=", "{{$.inputs.parameters[''reduce_search_space_mode'']}}", "\", \"--component_id={{$.pipeline_task_uuid}}", "\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train", @@ -5671,11 +5671,11 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"args\": [\"forecasting_mp_l2l_stage_2_tuner", "\", \"--region=", "{{$.inputs.parameters[''location'']}}", "\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--training_docker_uri=", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"--component_id={{$.pipeline_task_uuid}}", "\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train", "\", \"--num_parallel_trial=", "{{$.inputs.parameters[''num_parallel_trials'']}}", @@ -5714,7 +5714,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -5779,7 +5779,7 @@ deploymentSpec: \ 'stage_2_single_run_max_secs',\n ],\n )(\n stage_1_deadline_hours,\n\ \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-calculate-training-parameters-2: container: args: @@ -5835,7 +5835,7 @@ deploymentSpec: \ 'stage_2_single_run_max_secs',\n ],\n )(\n stage_1_deadline_hours,\n\ \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-attribution: container: args: @@ -6026,8 +6026,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -6044,7 +6044,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -6075,7 +6075,7 @@ deploymentSpec: \ = 'point'\n else:\n forecasting_type = 'quantile'\n\n return collections.namedtuple(\n\ \ 'Outputs',\n (\n 'forecasting_type',\n 'quantiles',\n\ \ ),\n )(forecasting_type, quantiles)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-finalize-eval-quantile-parameters-2: container: args: @@ -6103,7 +6103,7 @@ deploymentSpec: \ = 'point'\n else:\n forecasting_type = 'quantile'\n\n return collections.namedtuple(\n\ \ 'Outputs',\n (\n 'forecasting_type',\n 'quantiles',\n\ \ ),\n )(forecasting_type, quantiles)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-or-create-model-description: container: args: @@ -6132,7 +6132,7 @@ deploymentSpec: \ return f'{original_description} From: {pipeline_url}'\n\n # The pipeline\ \ url contains KFP placeholders injected at runtime.\n return f'Vertex\ \ forecasting model trained in the pipeline: {pipeline_url}'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-or-create-model-description-2: container: args: @@ -6161,7 +6161,7 @@ deploymentSpec: \ return f'{original_description} From: {pipeline_url}'\n\n # The pipeline\ \ url contains KFP placeholders injected at runtime.\n return f'Vertex\ \ forecasting model trained in the pipeline: {pipeline_url}'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-prediction-image-uri: container: args: @@ -6184,14 +6184,14 @@ deploymentSpec: Returns the prediction image corresponding to the given model type.\"\"\"\ \n # Keys come from AutoMlTimeSeriesForecastingTrainSpec.\n # The URIs\ \ must be hardcoded without any breaks in the code so string\n # replacement\ - \ will work correctly.\n images = {\n 'l2l': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250129_0625',\n\ - \ 'seq2seq': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-seq2seq:20250129_0625',\n\ - \ 'tft': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tft:20250129_0625',\n\ - \ 'tide': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tide:20250129_0625',\n\ - \ }\n if model_type not in images:\n raise ValueError(\n f'Invalid\ - \ forecasting model type: {model_type}. Valid options are: '\n f'{images.keys()}.'\n\ - \ )\n return images[model_type]\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + \ will work correctly.\n images = {\n 'l2l': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'seq2seq': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tft': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tide': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n }\n if model_type not in images:\n raise ValueError(\n\ + \ f'Invalid forecasting model type: {model_type}. Valid options are:\ + \ '\n f'{images.keys()}.'\n )\n return images[model_type]\n\n" + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-prediction-image-uri-2: container: args: @@ -6214,14 +6214,14 @@ deploymentSpec: Returns the prediction image corresponding to the given model type.\"\"\"\ \n # Keys come from AutoMlTimeSeriesForecastingTrainSpec.\n # The URIs\ \ must be hardcoded without any breaks in the code so string\n # replacement\ - \ will work correctly.\n images = {\n 'l2l': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250129_0625',\n\ - \ 'seq2seq': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-seq2seq:20250129_0625',\n\ - \ 'tft': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tft:20250129_0625',\n\ - \ 'tide': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tide:20250129_0625',\n\ - \ }\n if model_type not in images:\n raise ValueError(\n f'Invalid\ - \ forecasting model type: {model_type}. Valid options are: '\n f'{images.keys()}.'\n\ - \ )\n return images[model_type]\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + \ will work correctly.\n images = {\n 'l2l': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'seq2seq': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tft': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tide': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n }\n if model_type not in images:\n raise ValueError(\n\ + \ f'Invalid forecasting model type: {model_type}. Valid options are:\ + \ '\n f'{images.keys()}.'\n )\n return images[model_type]\n\n" + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-predictions-column: container: args: @@ -6244,7 +6244,7 @@ deploymentSpec: \ str) -> str:\n \"\"\"Generates the BP output's target column name.\"\"\ \"\n if forecasting_type == 'quantile':\n return f'predicted_{target_column}.quantile_predictions'\n\ \ return f'predicted_{target_column}.value'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-predictions-column-2: container: args: @@ -6267,7 +6267,7 @@ deploymentSpec: \ str) -> str:\n \"\"\"Generates the BP output's target column name.\"\"\ \"\n if forecasting_type == 'quantile':\n return f'predicted_{target_column}.quantile_predictions'\n\ \ return f'predicted_{target_column}.value'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-importer: importer: artifactUri: @@ -6799,7 +6799,7 @@ deploymentSpec: \ 'model_display_name',\n 'transformations',\n ],\n\ \ )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ model_display_name,\n transformations,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -6845,7 +6845,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-string-not-empty: container: args: @@ -6869,7 +6869,7 @@ deploymentSpec: \n Returns:\n Boolean value. -> 'true' if empty, 'false' if not empty.\ \ We need to use str\n instead of bool due to a limitation in KFP compiler.\n\ \ \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri: container: args: @@ -6899,7 +6899,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri-2: container: args: @@ -6929,7 +6929,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-training-configurator-and-validator: container: args: @@ -6974,7 +6974,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 pipelineInfo: description: The Sequence to Sequence (Seq2Seq) Forecasting pipeline. name: sequence-to-sequence-forecasting diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/temporal_fusion_transformer_forecasting_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/temporal_fusion_transformer_forecasting_pipeline.yaml index d226d7adc9f..7b22a542a05 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/temporal_fusion_transformer_forecasting_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/temporal_fusion_transformer_forecasting_pipeline.yaml @@ -5552,7 +5552,7 @@ deploymentSpec: - '{"display_name": "automl-forecasting-ensemble-{{$.pipeline_job_uuid}}-{{$.pipeline_task_uuid}}", "encryption_spec": {"kms_key_name": "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}, "job_spec": {"worker_pool_specs": [{"replica_count": 1, "machine_spec": - {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "args": ["forecasting_mp_ensemble", "--transform_output_path={{$.inputs.artifacts[''transform_output''].uri}}", "--error_file_path={{$.inputs.parameters[''root_dir'']}}/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb", "--metadata_path={{$.inputs.artifacts[''metadata''].uri}}", "--tuning_result_input_path={{$.inputs.artifacts[''tuning_result_input''].uri}}", @@ -5586,7 +5586,7 @@ deploymentSpec: - '{"display_name": "automl-forecasting-ensemble-{{$.pipeline_job_uuid}}-{{$.pipeline_task_uuid}}", "encryption_spec": {"kms_key_name": "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}, "job_spec": {"worker_pool_specs": [{"replica_count": 1, "machine_spec": - {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "args": ["forecasting_mp_ensemble", "--transform_output_path={{$.inputs.artifacts[''transform_output''].uri}}", "--error_file_path={{$.inputs.parameters[''root_dir'']}}/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb", "--metadata_path={{$.inputs.artifacts[''metadata''].uri}}", "--tuning_result_input_path={{$.inputs.artifacts[''tuning_result_input''].uri}}", @@ -5621,11 +5621,11 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"args\": [\"forecasting_mp_l2l_stage_1_tuner", "\", \"--region=", "{{$.inputs.parameters[''location'']}}", "\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--training_docker_uri=", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"--reduce_search_space_mode=", "{{$.inputs.parameters[''reduce_search_space_mode'']}}", "\", \"--component_id={{$.pipeline_task_uuid}}", "\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train", @@ -5664,11 +5664,11 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"args\": [\"forecasting_mp_l2l_stage_2_tuner", "\", \"--region=", "{{$.inputs.parameters[''location'']}}", "\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--training_docker_uri=", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"--component_id={{$.pipeline_task_uuid}}", "\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train", "\", \"--num_parallel_trial=", "{{$.inputs.parameters[''num_parallel_trials'']}}", @@ -5707,7 +5707,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -5772,7 +5772,7 @@ deploymentSpec: \ 'stage_2_single_run_max_secs',\n ],\n )(\n stage_1_deadline_hours,\n\ \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-calculate-training-parameters-2: container: args: @@ -5828,7 +5828,7 @@ deploymentSpec: \ 'stage_2_single_run_max_secs',\n ],\n )(\n stage_1_deadline_hours,\n\ \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-attribution: container: args: @@ -6019,8 +6019,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -6037,7 +6037,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -6068,7 +6068,7 @@ deploymentSpec: \ = 'point'\n else:\n forecasting_type = 'quantile'\n\n return collections.namedtuple(\n\ \ 'Outputs',\n (\n 'forecasting_type',\n 'quantiles',\n\ \ ),\n )(forecasting_type, quantiles)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-finalize-eval-quantile-parameters-2: container: args: @@ -6096,7 +6096,7 @@ deploymentSpec: \ = 'point'\n else:\n forecasting_type = 'quantile'\n\n return collections.namedtuple(\n\ \ 'Outputs',\n (\n 'forecasting_type',\n 'quantiles',\n\ \ ),\n )(forecasting_type, quantiles)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-or-create-model-description: container: args: @@ -6125,7 +6125,7 @@ deploymentSpec: \ return f'{original_description} From: {pipeline_url}'\n\n # The pipeline\ \ url contains KFP placeholders injected at runtime.\n return f'Vertex\ \ forecasting model trained in the pipeline: {pipeline_url}'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-or-create-model-description-2: container: args: @@ -6154,7 +6154,7 @@ deploymentSpec: \ return f'{original_description} From: {pipeline_url}'\n\n # The pipeline\ \ url contains KFP placeholders injected at runtime.\n return f'Vertex\ \ forecasting model trained in the pipeline: {pipeline_url}'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-prediction-image-uri: container: args: @@ -6177,14 +6177,14 @@ deploymentSpec: Returns the prediction image corresponding to the given model type.\"\"\"\ \n # Keys come from AutoMlTimeSeriesForecastingTrainSpec.\n # The URIs\ \ must be hardcoded without any breaks in the code so string\n # replacement\ - \ will work correctly.\n images = {\n 'l2l': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250129_0625',\n\ - \ 'seq2seq': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-seq2seq:20250129_0625',\n\ - \ 'tft': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tft:20250129_0625',\n\ - \ 'tide': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tide:20250129_0625',\n\ - \ }\n if model_type not in images:\n raise ValueError(\n f'Invalid\ - \ forecasting model type: {model_type}. Valid options are: '\n f'{images.keys()}.'\n\ - \ )\n return images[model_type]\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + \ will work correctly.\n images = {\n 'l2l': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'seq2seq': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tft': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tide': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n }\n if model_type not in images:\n raise ValueError(\n\ + \ f'Invalid forecasting model type: {model_type}. Valid options are:\ + \ '\n f'{images.keys()}.'\n )\n return images[model_type]\n\n" + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-prediction-image-uri-2: container: args: @@ -6207,14 +6207,14 @@ deploymentSpec: Returns the prediction image corresponding to the given model type.\"\"\"\ \n # Keys come from AutoMlTimeSeriesForecastingTrainSpec.\n # The URIs\ \ must be hardcoded without any breaks in the code so string\n # replacement\ - \ will work correctly.\n images = {\n 'l2l': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250129_0625',\n\ - \ 'seq2seq': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-seq2seq:20250129_0625',\n\ - \ 'tft': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tft:20250129_0625',\n\ - \ 'tide': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tide:20250129_0625',\n\ - \ }\n if model_type not in images:\n raise ValueError(\n f'Invalid\ - \ forecasting model type: {model_type}. Valid options are: '\n f'{images.keys()}.'\n\ - \ )\n return images[model_type]\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + \ will work correctly.\n images = {\n 'l2l': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'seq2seq': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tft': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tide': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n }\n if model_type not in images:\n raise ValueError(\n\ + \ f'Invalid forecasting model type: {model_type}. Valid options are:\ + \ '\n f'{images.keys()}.'\n )\n return images[model_type]\n\n" + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-predictions-column: container: args: @@ -6237,7 +6237,7 @@ deploymentSpec: \ str) -> str:\n \"\"\"Generates the BP output's target column name.\"\"\ \"\n if forecasting_type == 'quantile':\n return f'predicted_{target_column}.quantile_predictions'\n\ \ return f'predicted_{target_column}.value'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-predictions-column-2: container: args: @@ -6260,7 +6260,7 @@ deploymentSpec: \ str) -> str:\n \"\"\"Generates the BP output's target column name.\"\"\ \"\n if forecasting_type == 'quantile':\n return f'predicted_{target_column}.quantile_predictions'\n\ \ return f'predicted_{target_column}.value'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-importer: importer: artifactUri: @@ -6792,7 +6792,7 @@ deploymentSpec: \ 'model_display_name',\n 'transformations',\n ],\n\ \ )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ model_display_name,\n transformations,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -6838,7 +6838,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-string-not-empty: container: args: @@ -6862,7 +6862,7 @@ deploymentSpec: \n Returns:\n Boolean value. -> 'true' if empty, 'false' if not empty.\ \ We need to use str\n instead of bool due to a limitation in KFP compiler.\n\ \ \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri: container: args: @@ -6892,7 +6892,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri-2: container: args: @@ -6922,7 +6922,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-training-configurator-and-validator: container: args: @@ -6967,7 +6967,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 pipelineInfo: description: The Temporal Fusion Transformer (TFT) Forecasting pipeline. name: temporal-fusion-transformer-forecasting diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/time_series_dense_encoder_forecasting_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/time_series_dense_encoder_forecasting_pipeline.yaml index 2f271ccebaf..cd913e96c8f 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/time_series_dense_encoder_forecasting_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/forecasting/time_series_dense_encoder_forecasting_pipeline.yaml @@ -5577,7 +5577,7 @@ deploymentSpec: - '{"display_name": "automl-forecasting-ensemble-{{$.pipeline_job_uuid}}-{{$.pipeline_task_uuid}}", "encryption_spec": {"kms_key_name": "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}, "job_spec": {"worker_pool_specs": [{"replica_count": 1, "machine_spec": - {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "args": ["forecasting_mp_ensemble", "--transform_output_path={{$.inputs.artifacts[''transform_output''].uri}}", "--error_file_path={{$.inputs.parameters[''root_dir'']}}/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb", "--metadata_path={{$.inputs.artifacts[''metadata''].uri}}", "--tuning_result_input_path={{$.inputs.artifacts[''tuning_result_input''].uri}}", @@ -5611,7 +5611,7 @@ deploymentSpec: - '{"display_name": "automl-forecasting-ensemble-{{$.pipeline_job_uuid}}-{{$.pipeline_task_uuid}}", "encryption_spec": {"kms_key_name": "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}, "job_spec": {"worker_pool_specs": [{"replica_count": 1, "machine_spec": - {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + {"machine_type": "n1-highmem-8"}, "container_spec": {"image_uri": "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "args": ["forecasting_mp_ensemble", "--transform_output_path={{$.inputs.artifacts[''transform_output''].uri}}", "--error_file_path={{$.inputs.parameters[''root_dir'']}}/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb", "--metadata_path={{$.inputs.artifacts[''metadata''].uri}}", "--tuning_result_input_path={{$.inputs.artifacts[''tuning_result_input''].uri}}", @@ -5646,11 +5646,11 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"args\": [\"forecasting_mp_l2l_stage_1_tuner", "\", \"--region=", "{{$.inputs.parameters[''location'']}}", "\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--training_docker_uri=", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"--reduce_search_space_mode=", "{{$.inputs.parameters[''reduce_search_space_mode'']}}", "\", \"--component_id={{$.pipeline_task_uuid}}", "\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train", @@ -5689,11 +5689,11 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"args\": [\"forecasting_mp_l2l_stage_2_tuner", "\", \"--region=", "{{$.inputs.parameters[''location'']}}", "\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--training_docker_uri=", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250129_0625", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/forecasting-training:20250827_0525", "\", \"--component_id={{$.pipeline_task_uuid}}", "\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train", "\", \"--num_parallel_trial=", "{{$.inputs.parameters[''num_parallel_trials'']}}", @@ -5732,7 +5732,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -5797,7 +5797,7 @@ deploymentSpec: \ 'stage_2_single_run_max_secs',\n ],\n )(\n stage_1_deadline_hours,\n\ \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-calculate-training-parameters-2: container: args: @@ -5853,7 +5853,7 @@ deploymentSpec: \ 'stage_2_single_run_max_secs',\n ],\n )(\n stage_1_deadline_hours,\n\ \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-attribution: container: args: @@ -6044,8 +6044,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -6062,7 +6062,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -6093,7 +6093,7 @@ deploymentSpec: \ = 'point'\n else:\n forecasting_type = 'quantile'\n\n return collections.namedtuple(\n\ \ 'Outputs',\n (\n 'forecasting_type',\n 'quantiles',\n\ \ ),\n )(forecasting_type, quantiles)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-finalize-eval-quantile-parameters-2: container: args: @@ -6121,7 +6121,7 @@ deploymentSpec: \ = 'point'\n else:\n forecasting_type = 'quantile'\n\n return collections.namedtuple(\n\ \ 'Outputs',\n (\n 'forecasting_type',\n 'quantiles',\n\ \ ),\n )(forecasting_type, quantiles)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-or-create-model-description: container: args: @@ -6150,7 +6150,7 @@ deploymentSpec: \ return f'{original_description} From: {pipeline_url}'\n\n # The pipeline\ \ url contains KFP placeholders injected at runtime.\n return f'Vertex\ \ forecasting model trained in the pipeline: {pipeline_url}'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-or-create-model-description-2: container: args: @@ -6179,7 +6179,7 @@ deploymentSpec: \ return f'{original_description} From: {pipeline_url}'\n\n # The pipeline\ \ url contains KFP placeholders injected at runtime.\n return f'Vertex\ \ forecasting model trained in the pipeline: {pipeline_url}'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-prediction-image-uri: container: args: @@ -6202,14 +6202,14 @@ deploymentSpec: Returns the prediction image corresponding to the given model type.\"\"\"\ \n # Keys come from AutoMlTimeSeriesForecastingTrainSpec.\n # The URIs\ \ must be hardcoded without any breaks in the code so string\n # replacement\ - \ will work correctly.\n images = {\n 'l2l': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250129_0625',\n\ - \ 'seq2seq': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-seq2seq:20250129_0625',\n\ - \ 'tft': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tft:20250129_0625',\n\ - \ 'tide': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tide:20250129_0625',\n\ - \ }\n if model_type not in images:\n raise ValueError(\n f'Invalid\ - \ forecasting model type: {model_type}. Valid options are: '\n f'{images.keys()}.'\n\ - \ )\n return images[model_type]\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + \ will work correctly.\n images = {\n 'l2l': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'seq2seq': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tft': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tide': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n }\n if model_type not in images:\n raise ValueError(\n\ + \ f'Invalid forecasting model type: {model_type}. Valid options are:\ + \ '\n f'{images.keys()}.'\n )\n return images[model_type]\n\n" + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-prediction-image-uri-2: container: args: @@ -6232,14 +6232,14 @@ deploymentSpec: Returns the prediction image corresponding to the given model type.\"\"\"\ \n # Keys come from AutoMlTimeSeriesForecastingTrainSpec.\n # The URIs\ \ must be hardcoded without any breaks in the code so string\n # replacement\ - \ will work correctly.\n images = {\n 'l2l': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250129_0625',\n\ - \ 'seq2seq': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-seq2seq:20250129_0625',\n\ - \ 'tft': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tft:20250129_0625',\n\ - \ 'tide': 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-tide:20250129_0625',\n\ - \ }\n if model_type not in images:\n raise ValueError(\n f'Invalid\ - \ forecasting model type: {model_type}. Valid options are: '\n f'{images.keys()}.'\n\ - \ )\n return images[model_type]\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + \ will work correctly.\n images = {\n 'l2l': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'seq2seq': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tft': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n 'tide': (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/forecasting-prediction-server-l2l:20250827_0525'\n\ + \ ),\n }\n if model_type not in images:\n raise ValueError(\n\ + \ f'Invalid forecasting model type: {model_type}. Valid options are:\ + \ '\n f'{images.keys()}.'\n )\n return images[model_type]\n\n" + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-predictions-column: container: args: @@ -6262,7 +6262,7 @@ deploymentSpec: \ str) -> str:\n \"\"\"Generates the BP output's target column name.\"\"\ \"\n if forecasting_type == 'quantile':\n return f'predicted_{target_column}.quantile_predictions'\n\ \ return f'predicted_{target_column}.value'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-predictions-column-2: container: args: @@ -6285,7 +6285,7 @@ deploymentSpec: \ str) -> str:\n \"\"\"Generates the BP output's target column name.\"\"\ \"\n if forecasting_type == 'quantile':\n return f'predicted_{target_column}.quantile_predictions'\n\ \ return f'predicted_{target_column}.value'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-importer: importer: artifactUri: @@ -6817,7 +6817,7 @@ deploymentSpec: \ 'model_display_name',\n 'transformations',\n ],\n\ \ )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ model_display_name,\n transformations,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -6863,7 +6863,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-string-not-empty: container: args: @@ -6887,7 +6887,7 @@ deploymentSpec: \n Returns:\n Boolean value. -> 'true' if empty, 'false' if not empty.\ \ We need to use str\n instead of bool due to a limitation in KFP compiler.\n\ \ \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri: container: args: @@ -6917,7 +6917,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri-2: container: args: @@ -6947,7 +6947,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-training-configurator-and-validator: container: args: @@ -6992,7 +6992,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 pipelineInfo: description: The Timeseries Dense Encoder (TiDE) Forecasting pipeline. name: time-series-dense-encoder-forecasting diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/auto_feature_engineering.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/auto_feature_engineering.py index 13dc0d277ef..4c60a8eb9a5 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/auto_feature_engineering.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/auto_feature_engineering.py @@ -65,7 +65,7 @@ def automated_feature_engineering( ' 1, "machine_spec": {"machine_type": "n1-standard-16"},' ' "container_spec": {"image_uri":"' ), - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525', '", "args": ["feature_engineering", "--project=', project, '", "--location=', location, '", "--data_source_bigquery_table_path=', data_source_bigquery_table_path, diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/automl_tabular_feature_selection_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/automl_tabular_feature_selection_pipeline.yaml index 171dc6ef15b..46212a26530 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/automl_tabular_feature_selection_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/automl_tabular_feature_selection_pipeline.yaml @@ -8622,9 +8622,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_cv_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--component_id={{$.pipeline_task_uuid}}\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train\", \"--num_parallel_trial=", "{{$.inputs.parameters[''num_parallel_trials'']}}", @@ -8665,9 +8665,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_cv_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--component_id={{$.pipeline_task_uuid}}\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train\", \"--num_parallel_trial=", "{{$.inputs.parameters[''num_parallel_trials'']}}", @@ -8708,7 +8708,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-highmem-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"ensemble\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/model\", \"--custom_model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", @@ -8720,7 +8720,7 @@ deploymentSpec: "\", \"--tuning_result_input_path=", "{{$.inputs.artifacts[''tuning_result_input''].uri}}", "\", \"--instance_baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--warmup_data=", "{{$.inputs.artifacts[''warmup_data''].uri}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--model_path=", "{{$.outputs.artifacts[''model''].uri}}", "\", \"--custom_model_path=", "{{$.outputs.artifacts[''model_without_custom_ops''].uri}}", "\", \"--explanation_metadata_path=", "{{$.outputs.parameters[''explanation_metadata''].output_file}}", ",", "{{$.outputs.artifacts[''explanation_metadata_artifact''].uri}}", @@ -8749,7 +8749,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-highmem-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"ensemble\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/model\", \"--custom_model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", @@ -8761,7 +8761,7 @@ deploymentSpec: "\", \"--tuning_result_input_path=", "{{$.inputs.artifacts[''tuning_result_input''].uri}}", "\", \"--instance_baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--warmup_data=", "{{$.inputs.artifacts[''warmup_data''].uri}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--model_path=", "{{$.outputs.artifacts[''model''].uri}}", "\", \"--custom_model_path=", "{{$.outputs.artifacts[''model_without_custom_ops''].uri}}", "\", \"--explanation_metadata_path=", "{{$.outputs.parameters[''explanation_metadata''].output_file}}", ",", "{{$.outputs.artifacts[''explanation_metadata_artifact''].uri}}", @@ -8790,7 +8790,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-highmem-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"ensemble\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/model\", \"--custom_model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", @@ -8802,7 +8802,7 @@ deploymentSpec: "\", \"--tuning_result_input_path=", "{{$.inputs.artifacts[''tuning_result_input''].uri}}", "\", \"--instance_baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--warmup_data=", "{{$.inputs.artifacts[''warmup_data''].uri}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--model_path=", "{{$.outputs.artifacts[''model''].uri}}", "\", \"--custom_model_path=", "{{$.outputs.artifacts[''model_without_custom_ops''].uri}}", "\", \"--explanation_metadata_path=", "{{$.outputs.parameters[''explanation_metadata''].output_file}}", ",", "{{$.outputs.artifacts[''explanation_metadata_artifact''].uri}}", @@ -8831,7 +8831,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -8846,7 +8846,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -8855,7 +8855,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -8864,7 +8864,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -8884,9 +8884,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_stage_1_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--feature_selection_result_path=", "{{$.inputs.artifacts[''feature_ranking''].uri}}", "\", \"--disable_early_stopping=", "{{$.inputs.parameters[''disable_early_stopping'']}}", "\", \"--tune_feature_selection_rate=", "{{$.inputs.parameters[''tune_feature_selection_rate'']}}", @@ -8931,9 +8931,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_stage_1_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--feature_selection_result_path=", "{{$.inputs.artifacts[''feature_ranking''].uri}}", "\", \"--disable_early_stopping=", "{{$.inputs.parameters[''disable_early_stopping'']}}", "\", \"--tune_feature_selection_rate=", "{{$.inputs.parameters[''tune_feature_selection_rate'']}}", @@ -8978,7 +8978,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"transform\", \"--is_mp=true\", \"--transform_output_artifact_path=", "{{$.outputs.artifacts[''transform_output''].uri}}", "\", \"--transform_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/transform\", @@ -8999,7 +8999,7 @@ deploymentSpec: \"--dataflow_tmp_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp\", \"--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}", "\", \"--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}", - "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625", + "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525", "\", \"--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}", "\", \"--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}", "\", \"--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}", @@ -9030,7 +9030,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"transform\", \"--is_mp=true\", \"--transform_output_artifact_path=", "{{$.outputs.artifacts[''transform_output''].uri}}", "\", \"--transform_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/transform\", @@ -9051,7 +9051,7 @@ deploymentSpec: \"--dataflow_tmp_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp\", \"--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}", "\", \"--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}", - "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625", + "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525", "\", \"--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}", "\", \"--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}", "\", \"--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}", @@ -9087,7 +9087,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bool-identity-2: container: args: @@ -9109,7 +9109,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bool-identity-3: container: args: @@ -9131,7 +9131,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-calculate-training-parameters: container: args: @@ -9223,7 +9223,7 @@ deploymentSpec: \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n distill_stage_1_deadline_hours,\n\ \ reduce_search_space_mode,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-calculate-training-parameters-2: container: args: @@ -9315,7 +9315,7 @@ deploymentSpec: \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n distill_stage_1_deadline_hours,\n\ \ reduce_search_space_mode,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-check-if-binary-classification: container: args: @@ -9343,7 +9343,7 @@ deploymentSpec: \ with open(example_gen_metadata, 'r') as f:\n metadata_path = f.read()\n\ \ metadata = json.loads(metadata_path)\n return str(metadata['objective']\ \ == 'binary_classification').lower()\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-attribution: container: args: @@ -9536,7 +9536,7 @@ deploymentSpec: \ 'r') as f:\n split_0_content = f.read()\n with open(split_1, 'r')\ \ as f:\n split_1_content = f.read()\n with open(splits, 'w') as f:\n\ \ f.write(','.join([split_0_content, split_1_content]))\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-model-batch-explanation: container: args: @@ -10383,7 +10383,7 @@ deploymentSpec: \n train_spec['transformations'] = purged_transformation_list\n metadata['train_spec']\ \ = train_spec\n\n with open(output_metadata, 'w') as f:\n f.write(json.dumps(metadata))\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-read-input-uri: container: args: @@ -10411,7 +10411,7 @@ deploymentSpec: \ import json\n # pylint: enable=g-import-not-at-top,import-outside-toplevel,redefined-outer-name,reimported\n\ \ with open(split_uri, 'r') as f:\n data_source = json.loads(f.read())\n\ \ return data_source['tf_record_data_source']['file_patterns']\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-read-input-uri-2: container: args: @@ -10439,7 +10439,7 @@ deploymentSpec: \ import json\n # pylint: enable=g-import-not-at-top,import-outside-toplevel,redefined-outer-name,reimported\n\ \ with open(split_uri, 'r') as f:\n data_source = json.loads(f.read())\n\ \ return data_source['tf_record_data_source']['file_patterns']\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-string-not-empty: container: args: @@ -10463,7 +10463,7 @@ deploymentSpec: \n Returns:\n Boolean value. -> 'true' if empty, 'false' if not empty.\ \ We need to use str\n instead of bool due to a limitation in KFP compiler.\n\ \ \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-tabular-feature-ranking-and-selection: container: args: @@ -10480,7 +10480,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"feature_selection\", \"--data_source=", "{{$.inputs.artifacts[''data_source''].uri}}", "\", \"--target_column=", "{{$.inputs.parameters[''target_column_name'']}}", "\", \"--prediction_type=", "{{$.inputs.parameters[''prediction_type'']}}", @@ -10493,7 +10493,7 @@ deploymentSpec: \"--dataflow_staging_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_staging\", \"--dataflow_tmp_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp\", \"--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}", - "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625", + "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525", "\", \"--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}", "\", \"--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}", "\", \"--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}", @@ -10526,7 +10526,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"stats_generator\",", "\"--train_spec={\\\"prediction_type\\\": \\\"", "{{$.inputs.parameters[''prediction_type'']}}", "\\\", \\\"target_column\\\": \\\"", "{{$.inputs.parameters[''target_column_name'']}}", "\\\", \\\"optimization_objective\\\": @@ -10559,7 +10559,7 @@ deploymentSpec: \"--dataflow_staging_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_staging\", \"--dataflow_tmp_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp\", \"--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}", - "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625", + "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525", "\", \"--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}", "\", \"--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}", "\", \"--dataflow_kms_key=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", @@ -10614,7 +10614,7 @@ deploymentSpec: \ f'{directory}/prediction.results-*',\n ],\n 'coder':\ \ 'PROTO_VALUE',\n },\n }\n with open(result, 'w') as f:\n f.write(json.dumps(data_source))\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-write-bp-result-path-2: container: args: @@ -10644,7 +10644,7 @@ deploymentSpec: \ f'{directory}/prediction.results-*',\n ],\n 'coder':\ \ 'PROTO_VALUE',\n },\n }\n with open(result, 'w') as f:\n f.write(json.dumps(data_source))\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 pipelineInfo: description: The AutoML Tabular pipeline. name: automl-tabular-feature-selection-pipeline diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/automl_tabular_v2_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/automl_tabular_v2_pipeline.yaml index 3c2a628fe8b..292272d80bb 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/automl_tabular_v2_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/automl_tabular_v2_pipeline.yaml @@ -9452,9 +9452,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_cv_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--component_id={{$.pipeline_task_uuid}}\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train\", \"--num_parallel_trial=", "{{$.inputs.parameters[''num_parallel_trials'']}}", @@ -9495,9 +9495,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_cv_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--component_id={{$.pipeline_task_uuid}}\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train\", \"--num_parallel_trial=", "{{$.inputs.parameters[''num_parallel_trials'']}}", @@ -9538,7 +9538,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-highmem-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"ensemble\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/model\", \"--custom_model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", @@ -9550,7 +9550,7 @@ deploymentSpec: "\", \"--tuning_result_input_path=", "{{$.inputs.artifacts[''tuning_result_input''].uri}}", "\", \"--instance_baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--warmup_data=", "{{$.inputs.artifacts[''warmup_data''].uri}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--model_path=", "{{$.outputs.artifacts[''model''].uri}}", "\", \"--custom_model_path=", "{{$.outputs.artifacts[''model_without_custom_ops''].uri}}", "\", \"--explanation_metadata_path=", "{{$.outputs.parameters[''explanation_metadata''].output_file}}", ",", "{{$.outputs.artifacts[''explanation_metadata_artifact''].uri}}", @@ -9579,7 +9579,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-highmem-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"ensemble\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/model\", \"--custom_model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", @@ -9591,7 +9591,7 @@ deploymentSpec: "\", \"--tuning_result_input_path=", "{{$.inputs.artifacts[''tuning_result_input''].uri}}", "\", \"--instance_baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--warmup_data=", "{{$.inputs.artifacts[''warmup_data''].uri}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--model_path=", "{{$.outputs.artifacts[''model''].uri}}", "\", \"--custom_model_path=", "{{$.outputs.artifacts[''model_without_custom_ops''].uri}}", "\", \"--explanation_metadata_path=", "{{$.outputs.parameters[''explanation_metadata''].output_file}}", ",", "{{$.outputs.artifacts[''explanation_metadata_artifact''].uri}}", @@ -9620,7 +9620,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-highmem-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"ensemble\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/model\", \"--custom_model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", @@ -9632,7 +9632,7 @@ deploymentSpec: "\", \"--tuning_result_input_path=", "{{$.inputs.artifacts[''tuning_result_input''].uri}}", "\", \"--instance_baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--warmup_data=", "{{$.inputs.artifacts[''warmup_data''].uri}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--model_path=", "{{$.outputs.artifacts[''model''].uri}}", "\", \"--custom_model_path=", "{{$.outputs.artifacts[''model_without_custom_ops''].uri}}", "\", \"--explanation_metadata_path=", "{{$.outputs.parameters[''explanation_metadata''].output_file}}", ",", "{{$.outputs.artifacts[''explanation_metadata_artifact''].uri}}", @@ -9661,7 +9661,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -9676,7 +9676,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -9685,7 +9685,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -9694,7 +9694,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -9714,9 +9714,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_stage_1_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--feature_selection_result_path=", "{{$.inputs.artifacts[''feature_ranking''].uri}}", "\", \"--disable_early_stopping=", "{{$.inputs.parameters[''disable_early_stopping'']}}", "\", \"--tune_feature_selection_rate=", "{{$.inputs.parameters[''tune_feature_selection_rate'']}}", @@ -9761,9 +9761,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_stage_1_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--feature_selection_result_path=", "{{$.inputs.artifacts[''feature_ranking''].uri}}", "\", \"--disable_early_stopping=", "{{$.inputs.parameters[''disable_early_stopping'']}}", "\", \"--tune_feature_selection_rate=", "{{$.inputs.parameters[''tune_feature_selection_rate'']}}", @@ -9813,7 +9813,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bool-identity-2: container: args: @@ -9835,7 +9835,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bool-identity-3: container: args: @@ -9857,7 +9857,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-calculate-training-parameters: container: args: @@ -9949,7 +9949,7 @@ deploymentSpec: \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n distill_stage_1_deadline_hours,\n\ \ reduce_search_space_mode,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-calculate-training-parameters-2: container: args: @@ -10041,7 +10041,7 @@ deploymentSpec: \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n distill_stage_1_deadline_hours,\n\ \ reduce_search_space_mode,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-distillation-stage-feature-transform-engine: container: args: @@ -10075,14 +10075,14 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' - '{"Concat": ["--dataflow_service_account=", "{{$.inputs.parameters[''dataflow_service_account'']}}"]}' - '{"Concat": ["--dataflow_kms_key=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - '{"Concat": ["--gcp_resources_path=", "{{$.outputs.parameters[''gcp_resources''].output_file}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -10329,8 +10329,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -10347,7 +10347,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -10382,7 +10382,7 @@ deploymentSpec: \ collections.namedtuple(\n 'Outputs',\n [\n 'bigquery_destination_output_uri',\n\ \ ],\n )(\n f'{bigquery_staging_dataset_uri}.{table_prefix}{model_display_name}{curr_time}',\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-bigquery-destination-output-uri-2: container: args: @@ -10414,7 +10414,7 @@ deploymentSpec: \ collections.namedtuple(\n 'Outputs',\n [\n 'bigquery_destination_output_uri',\n\ \ ],\n )(\n f'{bigquery_staging_dataset_uri}.{table_prefix}{model_display_name}{curr_time}',\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-bp-bq-output-table: container: args: @@ -10442,7 +10442,7 @@ deploymentSpec: \n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'bq_output_table_uri',\n ],\n )(\n f\"{bp_job.metadata['bigqueryOutputDataset']}.{bp_job.metadata['bigqueryOutputTable']}\"\ ,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-bp-bq-output-table-2: container: args: @@ -10470,7 +10470,7 @@ deploymentSpec: \n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'bq_output_table_uri',\n ],\n )(\n f\"{bp_job.metadata['bigqueryOutputDataset']}.{bp_job.metadata['bigqueryOutputTable']}\"\ ,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-model-display-name: container: args: @@ -10497,7 +10497,7 @@ deploymentSpec: \n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'model_display_name',\n ],\n )(\n model_display_name,\n )\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-transform-config-path: container: args: @@ -10530,7 +10530,7 @@ deploymentSpec: \ )\n\n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'transform_config_path',\n ],\n )(\n transform_config_path,\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-importer: importer: artifactUri: @@ -10564,7 +10564,7 @@ deploymentSpec: \ 'r') as f:\n split_0_content = f.read()\n with open(split_1, 'r')\ \ as f:\n split_1_content = f.read()\n with open(splits, 'w') as f:\n\ \ f.write(','.join([split_0_content, split_1_content]))\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-model-batch-explanation: container: args: @@ -11409,7 +11409,7 @@ deploymentSpec: \ 'data_source_csv_filenames',\n 'data_source_bigquery_table_path',\n\ \ ],\n )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -11455,7 +11455,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-split-materialized-data-2: container: args: @@ -11501,7 +11501,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-string-not-empty: container: args: @@ -11525,7 +11525,7 @@ deploymentSpec: \n Returns:\n Boolean value. -> 'true' if empty, 'false' if not empty.\ \ We need to use str\n instead of bool due to a limitation in KFP compiler.\n\ \ \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-training-configurator-and-validator: container: args: @@ -11570,7 +11570,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 exec-training-configurator-and-validator-2: container: args: @@ -11615,7 +11615,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 pipelineInfo: description: The AutoML Tabular pipeline v2. name: automl-tabular-v2 diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/distillation_stage_feature_transform_engine.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/distillation_stage_feature_transform_engine.py index d5878514f1e..663336ef071 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/distillation_stage_feature_transform_engine.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/distillation_stage_feature_transform_engine.py @@ -77,7 +77,7 @@ def distillation_stage_feature_transform_engine( # fmt: on return dsl.ContainerSpec( - image='us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625', + image='us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525', command=[], args=[ 'distillation_stage_feature_transform_engine', @@ -185,7 +185,7 @@ def distillation_stage_feature_transform_engine( dataflow_machine_type, ] ), - '--dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625', + '--dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525', dsl.ConcatPlaceholder( items=[ '--dataflow_disk_size_gb=', diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_selection.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_selection.py index 6d9f1a301bd..a4eab718113 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_selection.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_selection.py @@ -100,7 +100,7 @@ def tabular_feature_ranking_and_selection( ' 1, "machine_spec": {"machine_type": "n1-standard-8"},' ' "container_spec": {"image_uri":"' ), - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525', '", "args": ["feature_selection", "--data_source=', data_source.uri, '", "--target_column=', @@ -137,7 +137,7 @@ def tabular_feature_ranking_and_selection( ), dataflow_max_num_workers, '", "--dataflow_worker_container_image=', - 'us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625', + 'us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525', '", "--dataflow_machine_type=', dataflow_machine_type, '", "--dataflow_disk_size_gb=', diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_selection_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_selection_pipeline.yaml index c5a63f4e033..49fe4eb20e5 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_selection_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_selection_pipeline.yaml @@ -983,8 +983,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -1001,7 +1001,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -1049,7 +1049,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 pipelineInfo: description: Defines pipeline for feature transform engine component. name: feature-selection diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_transform_engine.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_transform_engine.py index bb054a7d3c1..e850c6c50e6 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_transform_engine.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/feature_transform_engine.py @@ -308,7 +308,7 @@ def feature_transform_engine( # fmt: on return dsl.ContainerSpec( - image='us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625', + image='us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525', command=[], args=[ 'feature_transform_engine', @@ -637,8 +637,8 @@ def feature_transform_engine( dsl.ConcatPlaceholder( items=['--dataflow_machine_type=', dataflow_machine_type] ), - '--dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625', - '--feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625', + '--dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525', + '--feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525', dsl.ConcatPlaceholder( items=['--dataflow_disk_size_gb=', dataflow_disk_size_gb] ), diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_hyperparameter_tuning_job.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_hyperparameter_tuning_job.py index d8cea9d0e7a..992006fe1e4 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_hyperparameter_tuning_job.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_hyperparameter_tuning_job.py @@ -158,7 +158,7 @@ def tabnet_hyperparameter_tuning_job( ', "disk_spec": ', training_disk_spec, ', "container_spec": {"image_uri":"', - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/tabnet-training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/tabnet-training:20250827_0525', '", "args": ["--target_column=', target_column, '", "--weight_column=', @@ -166,7 +166,7 @@ def tabnet_hyperparameter_tuning_job( '", "--model_type=', prediction_type, '", "--prediction_docker_uri=', - 'us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625', + 'us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525', '", "--prediction_docker_uri_artifact_path=', prediction_docker_uri_output, '", "--baseline_path=', diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_hyperparameter_tuning_job_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_hyperparameter_tuning_job_pipeline.yaml index 3599306110d..0529e201074 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_hyperparameter_tuning_job_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_hyperparameter_tuning_job_pipeline.yaml @@ -2826,7 +2826,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -2841,7 +2841,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -2866,7 +2866,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-transform-engine: container: args: @@ -2951,8 +2951,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -2969,7 +2969,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -3037,7 +3037,7 @@ deploymentSpec: \ = {\n 'instanceSchemaUri': instance_schema_uri,\n 'predictionSchemaUri':\ \ prediction_schema_uri,\n }\n unmanaged_container_model.uri = os.path.join(\n\ \ trials_dir, 'trial_{}'.format(best_trial['id']), 'model'\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-model-display-name: container: args: @@ -3064,7 +3064,7 @@ deploymentSpec: \n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'model_display_name',\n ],\n )(\n model_display_name,\n )\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-tabnet-study-spec-parameters: container: args: @@ -3580,7 +3580,7 @@ deploymentSpec: \ = ', '.join(extra_overrides)\n warnings.warn(\n f'The overrides\ \ {extra_override_str} were not found in the params and '\n 'will\ \ be ignored.'\n )\n\n return study_spec_parameters\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-model-batch-predict: container: args: @@ -3821,7 +3821,7 @@ deploymentSpec: \ 'training_disk_spec',\n 'eval_machine_spec',\n 'eval_replica_count',\n\ \ ],\n )(\n training_machine_spec,\n training_disk_spec,\n\ \ eval_machine_spec,\n eval_replica_count,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-set-optional-inputs: container: args: @@ -3869,7 +3869,7 @@ deploymentSpec: \ 'data_source_csv_filenames',\n 'data_source_bigquery_table_path',\n\ \ ],\n )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -3915,7 +3915,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-tabnet-hyperparameter-tuning-job: container: args: @@ -3943,11 +3943,11 @@ deploymentSpec: ", \"trial_job_spec\": {\"worker_pool_specs\": [{\"replica_count\":\"", "1", "\", \"machine_spec\": ", "{{$.inputs.parameters[''training_machine_spec'']}}", ", \"disk_spec\": ", "{{$.inputs.parameters[''training_disk_spec'']}}", - ", \"container_spec\": {\"image_uri\":\"", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/tabnet-training:20250129_0625", + ", \"container_spec\": {\"image_uri\":\"", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/tabnet-training:20250827_0525", "\", \"args\": [\"--target_column=", "{{$.inputs.parameters[''target_column'']}}", "\", \"--weight_column=", "{{$.inputs.parameters[''weight_column'']}}", "\", \"--model_type=", "{{$.inputs.parameters[''prediction_type'']}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--prediction_docker_uri_artifact_path=", "{{$.outputs.parameters[''prediction_docker_uri_output''].output_file}}", "\", \"--baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--metadata_path=", "{{$.inputs.artifacts[''metadata''].uri}}", "\", @@ -4016,7 +4016,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 pipelineInfo: description: The TabNet built-in algorithm HyperparameterTuningJob pipeline. name: automl-tabular-tabnet-hyperparameter-tuning-job diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_trainer.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_trainer.py index f5e326c8885..8a3dbfaa1c5 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_trainer.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_trainer.py @@ -165,7 +165,7 @@ def tabnet_trainer( ', "disk_spec": ', training_disk_spec, ', "container_spec": {"image_uri":"', - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/tabnet-training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/tabnet-training:20250827_0525', '", "args": ["--target_column=', target_column, '", "--weight_column=', @@ -173,7 +173,7 @@ def tabnet_trainer( '", "--model_type=', prediction_type, '", "--prediction_docker_uri=', - 'us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625', + 'us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525', '", "--baseline_path=', instance_baseline.uri, '", "--metadata_path=', diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_trainer_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_trainer_pipeline.yaml index 6704b69d450..6d31dc00b27 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_trainer_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/tabnet_trainer_pipeline.yaml @@ -2875,7 +2875,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -2890,7 +2890,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -2915,7 +2915,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-transform-engine: container: args: @@ -3000,8 +3000,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -3018,7 +3018,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -3048,7 +3048,7 @@ deploymentSpec: \n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'model_display_name',\n ],\n )(\n model_display_name,\n )\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-model-batch-predict: container: args: @@ -3289,7 +3289,7 @@ deploymentSpec: \ 'training_disk_spec',\n 'eval_machine_spec',\n 'eval_replica_count',\n\ \ ],\n )(\n training_machine_spec,\n training_disk_spec,\n\ \ eval_machine_spec,\n eval_replica_count,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-set-optional-inputs: container: args: @@ -3337,7 +3337,7 @@ deploymentSpec: \ 'data_source_csv_filenames',\n 'data_source_bigquery_table_path',\n\ \ ],\n )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -3383,7 +3383,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-tabnet-trainer: container: args: @@ -3401,11 +3401,11 @@ deploymentSpec: "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\":\"", "1", "\", \"machine_spec\": ", "{{$.inputs.parameters[''training_machine_spec'']}}", ", \"disk_spec\": ", "{{$.inputs.parameters[''training_disk_spec'']}}", - ", \"container_spec\": {\"image_uri\":\"", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/tabnet-training:20250129_0625", + ", \"container_spec\": {\"image_uri\":\"", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/tabnet-training:20250827_0525", "\", \"args\": [\"--target_column=", "{{$.inputs.parameters[''target_column'']}}", "\", \"--weight_column=", "{{$.inputs.parameters[''weight_column'']}}", "\", \"--model_type=", "{{$.inputs.parameters[''prediction_type'']}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--metadata_path=", "{{$.inputs.artifacts[''metadata''].uri}}", "\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", @@ -3492,7 +3492,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 pipelineInfo: description: 'Train a model using the Tabular Workflow for TabNet pipelines. diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_hyperparameter_tuning_job.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_hyperparameter_tuning_job.py index e6214cd9b17..cc07bfe187d 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_hyperparameter_tuning_job.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_hyperparameter_tuning_job.py @@ -158,7 +158,7 @@ def wide_and_deep_hyperparameter_tuning_job( ', "disk_spec": ', training_disk_spec, ', "container_spec": {"image_uri":"', - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/wide-and-deep-training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/wide-and-deep-training:20250827_0525', '", "args": ["--target_column=', target_column, '", "--weight_column=', @@ -166,7 +166,7 @@ def wide_and_deep_hyperparameter_tuning_job( '", "--model_type=', prediction_type, '", "--prediction_docker_uri=', - 'us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625', + 'us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525', '", "--prediction_docker_uri_artifact_path=', prediction_docker_uri_output, '", "--baseline_path=', diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_hyperparameter_tuning_job_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_hyperparameter_tuning_job_pipeline.yaml index 6200e3a3526..25de5e78e75 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_hyperparameter_tuning_job_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_hyperparameter_tuning_job_pipeline.yaml @@ -2632,7 +2632,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -2647,7 +2647,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -2672,7 +2672,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-transform-engine: container: args: @@ -2757,8 +2757,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -2775,7 +2775,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -2843,7 +2843,7 @@ deploymentSpec: \ = {\n 'instanceSchemaUri': instance_schema_uri,\n 'predictionSchemaUri':\ \ prediction_schema_uri,\n }\n unmanaged_container_model.uri = os.path.join(\n\ \ trials_dir, 'trial_{}'.format(best_trial['id']), 'model'\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-model-display-name: container: args: @@ -2870,7 +2870,7 @@ deploymentSpec: \n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'model_display_name',\n ],\n )(\n model_display_name,\n )\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-wide-and-deep-study-spec-parameters: container: args: @@ -3147,7 +3147,7 @@ deploymentSpec: \ 'training_disk_spec',\n 'eval_machine_spec',\n 'eval_replica_count',\n\ \ ],\n )(\n training_machine_spec,\n training_disk_spec,\n\ \ eval_machine_spec,\n eval_replica_count,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-set-optional-inputs: container: args: @@ -3195,7 +3195,7 @@ deploymentSpec: \ 'data_source_csv_filenames',\n 'data_source_bigquery_table_path',\n\ \ ],\n )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -3241,7 +3241,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-training-configurator-and-validator: container: args: @@ -3286,7 +3286,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 exec-wide-and-deep-hyperparameter-tuning-job: container: args: @@ -3314,11 +3314,11 @@ deploymentSpec: ", \"trial_job_spec\": {\"worker_pool_specs\": [{\"replica_count\":\"", "1", "\", \"machine_spec\": ", "{{$.inputs.parameters[''training_machine_spec'']}}", ", \"disk_spec\": ", "{{$.inputs.parameters[''training_disk_spec'']}}", - ", \"container_spec\": {\"image_uri\":\"", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/wide-and-deep-training:20250129_0625", + ", \"container_spec\": {\"image_uri\":\"", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/wide-and-deep-training:20250827_0525", "\", \"args\": [\"--target_column=", "{{$.inputs.parameters[''target_column'']}}", "\", \"--weight_column=", "{{$.inputs.parameters[''weight_column'']}}", "\", \"--model_type=", "{{$.inputs.parameters[''prediction_type'']}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--prediction_docker_uri_artifact_path=", "{{$.outputs.parameters[''prediction_docker_uri_output''].output_file}}", "\", \"--baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--metadata_path=", "{{$.inputs.artifacts[''metadata''].uri}}", "\", diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_trainer.py b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_trainer.py index b024fe2c6be..3c3b246adeb 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_trainer.py +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_trainer.py @@ -161,7 +161,7 @@ def wide_and_deep_trainer( ', "disk_spec": ', training_disk_spec, ', "container_spec": {"image_uri":"', - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/wide-and-deep-training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/wide-and-deep-training:20250827_0525', '", "args": ["--target_column=', target_column, '", "--weight_column=', @@ -169,7 +169,7 @@ def wide_and_deep_trainer( '", "--model_type=', prediction_type, '", "--prediction_docker_uri=', - 'us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625', + 'us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525', '", "--baseline_path=', instance_baseline.uri, '", "--metadata_path=', diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_trainer_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_trainer_pipeline.yaml index f84b46aa7eb..f2d9847671d 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_trainer_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_trainer_pipeline.yaml @@ -2674,7 +2674,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -2689,7 +2689,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -2714,7 +2714,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-transform-engine: container: args: @@ -2799,8 +2799,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -2817,7 +2817,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -2847,7 +2847,7 @@ deploymentSpec: \n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'model_display_name',\n ],\n )(\n model_display_name,\n )\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-model-batch-predict: container: args: @@ -3040,7 +3040,7 @@ deploymentSpec: \ 'training_disk_spec',\n 'eval_machine_spec',\n 'eval_replica_count',\n\ \ ],\n )(\n training_machine_spec,\n training_disk_spec,\n\ \ eval_machine_spec,\n eval_replica_count,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-set-optional-inputs: container: args: @@ -3088,7 +3088,7 @@ deploymentSpec: \ 'data_source_csv_filenames',\n 'data_source_bigquery_table_path',\n\ \ ],\n )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -3134,7 +3134,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-training-configurator-and-validator: container: args: @@ -3179,7 +3179,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 exec-wide-and-deep-trainer: container: args: @@ -3197,11 +3197,11 @@ deploymentSpec: "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\":\"", "1", "\", \"machine_spec\": ", "{{$.inputs.parameters[''training_machine_spec'']}}", ", \"disk_spec\": ", "{{$.inputs.parameters[''training_disk_spec'']}}", - ", \"container_spec\": {\"image_uri\":\"", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/wide-and-deep-training:20250129_0625", + ", \"container_spec\": {\"image_uri\":\"", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/wide-and-deep-training:20250827_0525", "\", \"args\": [\"--target_column=", "{{$.inputs.parameters[''target_column'']}}", "\", \"--weight_column=", "{{$.inputs.parameters[''weight_column'']}}", "\", \"--model_type=", "{{$.inputs.parameters[''prediction_type'']}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--metadata_path=", "{{$.inputs.artifacts[''metadata''].uri}}", "\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/xgboost_hyperparameter_tuning_job_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/xgboost_hyperparameter_tuning_job_pipeline.yaml index 0e59dc04992..ee95c0ec448 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/xgboost_hyperparameter_tuning_job_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/xgboost_hyperparameter_tuning_job_pipeline.yaml @@ -2620,7 +2620,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -2651,7 +2651,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-transform-engine: container: args: @@ -2736,8 +2736,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -2754,7 +2754,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -2818,7 +2818,7 @@ deploymentSpec: \ return re.sub(r'^/gcs/', r'gs://', path)\n\n master_worker_pool_spec\ \ = {\n 'replica_count': 1,\n 'machine_spec': {\n 'machine_type':\ \ machine_type,\n },\n 'container_spec': {\n 'image_uri':\ - \ 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/xgboost-training:20250129_0625',\n\ + \ 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/xgboost-training:20250827_0525',\n\ \ 'args': [\n f'--job_dir={get_gcs_path(job_dir)}',\n\ \ f'--instance_schema_path={get_gcs_path(instance_schema_uri)}',\n\ \ f'--prediction_schema_path={get_gcs_path(prediction_schema_uri)}',\n\ @@ -2831,7 +2831,7 @@ deploymentSpec: \ f'--baseline_path={get_gcs_path(instance_baseline)}',\n \ \ f'--eval_metric={eval_metric}',\n f'--disable_default_eval_metric={disable_default_eval_metric}',\n\ \ f'--seed={seed}',\n f'--seed_per_iteration={seed_per_iteration}',\n\ - \ '--prediction_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/xgboost-prediction-server:20250129_0625',\n\ + \ '--prediction_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/xgboost-prediction-server:20250827_0525',\n\ \ ],\n },\n }\n\n # Add optional arguments if set\n if\ \ weight_column:\n master_worker_pool_spec['container_spec']['args'].append(\n\ \ f'--weight_column={weight_column}'\n )\n\n # Add accelerator_type\ @@ -2850,7 +2850,7 @@ deploymentSpec: \ ],\n )(\n worker_pool_specs_lst,\n get_gcs_path(instance_schema_uri),\n\ \ get_gcs_path(prediction_schema_uri),\n get_gcs_path(trials),\n\ \ get_gcs_path(prediction_docker_uri_output),\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-best-hyperparameter-tuning-job-trial: container: args: @@ -2915,7 +2915,7 @@ deploymentSpec: \ = {\n 'instanceSchemaUri': instance_schema_uri,\n 'predictionSchemaUri':\ \ prediction_schema_uri,\n }\n unmanaged_container_model.uri = os.path.join(\n\ \ trials_dir, 'trial_{}'.format(best_trial['id']), 'model'\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-model-display-name: container: args: @@ -2942,7 +2942,7 @@ deploymentSpec: \n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'model_display_name',\n ],\n )(\n model_display_name,\n )\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-prediction-type-for-xgboost: container: args: @@ -2971,7 +2971,7 @@ deploymentSpec: \ Must be one of'\n ' [reg:squarederror, reg:squaredlogerror, reg:logistic,\ \ reg:gamma,'\n ' reg:tweedie, reg:pseudohubererror, binary:logistic,'\n\ \ ' multi:softprob].'\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-xgboost-study-spec-parameters: container: args: @@ -3546,7 +3546,7 @@ deploymentSpec: \ 'data_source_csv_filenames',\n 'data_source_bigquery_table_path',\n\ \ ],\n )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -3592,7 +3592,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-training-configurator-and-validator: container: args: @@ -3637,7 +3637,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 exec-xgboost-hyperparameter-tuning-job: container: args: diff --git a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/xgboost_trainer_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/xgboost_trainer_pipeline.yaml index a0640473259..8ee43211123 100644 --- a/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/xgboost_trainer_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/preview/automl/tabular/xgboost_trainer_pipeline.yaml @@ -2844,7 +2844,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -2875,7 +2875,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-transform-engine: container: args: @@ -2960,8 +2960,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -2978,7 +2978,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 30.0 @@ -3098,10 +3098,10 @@ deploymentSpec: \ worker pool specs.\n \"\"\"\n import copy\n import collections\n import\ \ os\n import re\n\n def get_gcs_path(path):\n return re.sub(r'/gcs/',\ \ 'gs://', path)\n\n formatted_job_dir = get_gcs_path(job_dir)\n prediction_docker_uri\ - \ = (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/xgboost-prediction-server:20250129_0625'\n\ + \ = (\n 'us-docker.pkg.dev/vertex-ai/automl-tabular/xgboost-prediction-server:20250827_0525'\n\ \ )\n master_worker_pool_spec = {\n 'replica_count': 1,\n 'machine_spec':\ \ {\n 'machine_type': machine_type,\n },\n 'container_spec':\ - \ {\n 'image_uri': 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/xgboost-training:20250129_0625',\n\ + \ {\n 'image_uri': 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/xgboost-training:20250827_0525',\n\ \ 'args': [\n f'--job_dir={formatted_job_dir}',\n\ \ f'--target_column={target_column}',\n f'--objective={objective}',\n\ \ f'--training_data_path={get_gcs_path(materialized_train_split)}',\n\ @@ -3159,7 +3159,7 @@ deploymentSpec: \ 'predictionSchemaUri': os.path.join(model_dir, 'prediction_schema.yaml'),\n\ \ }\n unmanaged_container_model.uri = model_dir\n\n return collections.namedtuple('Outputs',\ \ ['worker_pool_specs'])(\n worker_pool_specs_lst\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-model-display-name: container: args: @@ -3186,7 +3186,7 @@ deploymentSpec: \n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'model_display_name',\n ],\n )(\n model_display_name,\n )\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-prediction-type-for-xgboost: container: args: @@ -3215,7 +3215,7 @@ deploymentSpec: \ Must be one of'\n ' [reg:squarederror, reg:squaredlogerror, reg:logistic,\ \ reg:gamma,'\n ' reg:tweedie, reg:pseudohubererror, binary:logistic,'\n\ \ ' multi:softprob].'\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-model-batch-predict: container: args: @@ -3407,7 +3407,7 @@ deploymentSpec: \ 'data_source_csv_filenames',\n 'data_source_bigquery_table_path',\n\ \ ],\n )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-split-materialized-data: container: args: @@ -3453,7 +3453,7 @@ deploymentSpec: \ 'w') as f:\n f.write(file_patterns[0])\n\n with tf.io.gfile.GFile(materialized_eval_split,\ \ 'w') as f:\n f.write(file_patterns[1])\n\n with tf.io.gfile.GFile(materialized_test_split,\ \ 'w') as f:\n f.write(file_patterns[2])\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 exec-training-configurator-and-validator: container: args: @@ -3498,7 +3498,7 @@ deploymentSpec: ["--temporal_total_weight=", "{{$.inputs.parameters[''temporal_total_weight'']}}"]}}}' - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 exec-xgboost-trainer: container: args: diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/bqml_arima_predict_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/bqml_arima_predict_pipeline.yaml index 4c43eb366d8..4586942024c 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/bqml_arima_predict_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/bqml_arima_predict_pipeline.yaml @@ -658,7 +658,7 @@ deploymentSpec: \ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\ \ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \ \ ref.project, ref.dataset_id)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-create-dataset-2: container: args: @@ -693,7 +693,7 @@ deploymentSpec: \ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\ \ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \ \ ref.project, ref.dataset_id)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-delete-dataset-with-prefix: container: args: @@ -727,7 +727,7 @@ deploymentSpec: \ if dataset.dataset_id.startswith(dataset_prefix):\n client.delete_dataset(\n\ \ dataset=dataset.dataset_id,\n delete_contents=delete_contents)\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-query-job: container: args: @@ -788,7 +788,7 @@ deploymentSpec: \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\ \ if write_disposition:\n config['write_disposition'] = write_disposition\n\ \ return config\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-first-valid: container: args: @@ -812,7 +812,7 @@ deploymentSpec: \ import json\n # pylint: enable=g-import-not-at-top,import-outside-toplevel,redefined-outer-name,reimported\n\ \n for value in json.loads(values):\n if value:\n return value\n\ \ raise ValueError('No valid values.')\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-model-metadata: container: args: @@ -851,7 +851,7 @@ deploymentSpec: \ 'forecast_horizon',\n ],\n )(\n options.time_series_timestamp_column,\n\ \ options.time_series_id_column,\n options.time_series_data_column,\n\ \ options.horizon,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-table-location: container: args: @@ -887,7 +887,7 @@ deploymentSpec: \ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\ \ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\ \ return client.get_table(table).location\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-load-table-from-uri: container: args: @@ -928,7 +928,7 @@ deploymentSpec: \ source_format=source_format)\n client.load_table_from_uri(\n source_uris=csv_list,\n\ \ destination=destination,\n project=project,\n location=location,\n\ \ job_config=job_config).result()\n return destination\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-maybe-replace-with-default: container: args: @@ -950,7 +950,7 @@ deploymentSpec: \ *\n\ndef maybe_replace_with_default(value: str, default: str = '') ->\ \ str:\n \"\"\"Replaces string with another value if it is a dash.\"\"\"\ \n return default if not value else value\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-validate-inputs: container: args: @@ -1046,7 +1046,7 @@ deploymentSpec: \ raise ValueError(\n 'Granularity unit should be one of the\ \ following: '\n f'{valid_data_granularity_units}, got: {data_granularity_unit}.')\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 pipelineInfo: description: Forecasts using a BQML ARIMA_PLUS model. name: automl-tabular-bqml-arima-prediction diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/bqml_arima_train_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/bqml_arima_train_pipeline.yaml index 29aef1e5631..ba5b67bf72e 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/bqml_arima_train_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/bqml_arima_train_pipeline.yaml @@ -3399,7 +3399,7 @@ deploymentSpec: \ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\ \ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \ \ ref.project, ref.dataset_id)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-create-dataset-2: container: args: @@ -3434,7 +3434,7 @@ deploymentSpec: \ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\ \ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \ \ ref.project, ref.dataset_id)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-create-model-job: container: args: @@ -3494,7 +3494,7 @@ deploymentSpec: \ if dataset.dataset_id.startswith(dataset_prefix):\n client.delete_dataset(\n\ \ dataset=dataset.dataset_id,\n delete_contents=delete_contents)\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-list-rows: container: args: @@ -3532,7 +3532,7 @@ deploymentSpec: \ metadata['datasetId'], metadata['tableId']]))\n result = []\n for row\ \ in rows:\n result.append({col: str(value) for col, value in dict(row).items()})\n\ \ return result\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-list-rows-2: container: args: @@ -3570,7 +3570,7 @@ deploymentSpec: \ metadata['datasetId'], metadata['tableId']]))\n result = []\n for row\ \ in rows:\n result.append({col: str(value) for col, value in dict(row).items()})\n\ \ return result\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-query-job: container: args: @@ -3739,7 +3739,7 @@ deploymentSpec: \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\ \ if write_disposition:\n config['write_disposition'] = write_disposition\n\ \ return config\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-build-job-configuration-query-2: container: args: @@ -3773,7 +3773,7 @@ deploymentSpec: \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\ \ if write_disposition:\n config['write_disposition'] = write_disposition\n\ \ return config\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-build-job-configuration-query-3: container: args: @@ -3807,7 +3807,7 @@ deploymentSpec: \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\ \ if write_disposition:\n config['write_disposition'] = write_disposition\n\ \ return config\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-build-job-configuration-query-4: container: args: @@ -3841,7 +3841,7 @@ deploymentSpec: \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\ \ if write_disposition:\n config['write_disposition'] = write_disposition\n\ \ return config\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-build-job-configuration-query-5: container: args: @@ -3875,7 +3875,7 @@ deploymentSpec: \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\ \ if write_disposition:\n config['write_disposition'] = write_disposition\n\ \ return config\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-build-job-configuration-query-6: container: args: @@ -3909,7 +3909,7 @@ deploymentSpec: \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\ \ if write_disposition:\n config['write_disposition'] = write_disposition\n\ \ return config\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-build-serialized-query-parameters: container: args: @@ -3980,7 +3980,7 @@ deploymentSpec: \ 'name': 'start_time',\n 'parameterType': {\n 'type':\ \ 'TIMESTAMP'\n },\n 'parameterValue': {\n 'value': start_time\n\ \ },\n })\n return query_parameters\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-build-serialized-query-parameters-2: container: args: @@ -4051,7 +4051,7 @@ deploymentSpec: \ 'name': 'start_time',\n 'parameterType': {\n 'type':\ \ 'TIMESTAMP'\n },\n 'parameterValue': {\n 'value': start_time\n\ \ },\n })\n return query_parameters\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-build-serialized-query-parameters-3: container: args: @@ -4122,7 +4122,7 @@ deploymentSpec: \ 'name': 'start_time',\n 'parameterType': {\n 'type':\ \ 'TIMESTAMP'\n },\n 'parameterValue': {\n 'value': start_time\n\ \ },\n })\n return query_parameters\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-cond: container: args: @@ -4144,7 +4144,7 @@ deploymentSpec: \ *\n\ndef cond(predicate: bool, true_str: str, false_str: str) -> str:\n\ \ \"\"\"Returns true_str if predicate is true, else false_str.\"\"\"\n\ \ return true_str if predicate else false_str\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-create-metrics-artifact: container: args: @@ -4170,7 +4170,7 @@ deploymentSpec: \ 'MAPE': 'meanAbsolutePercentageError',\n }\n metrics = {metric_name_map[k]:\ \ v for k, v in dict(metrics_rows[0]).items()}\n evaluation_metrics.metadata\ \ = metrics\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-transform-engine: container: args: @@ -4255,8 +4255,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -4273,7 +4273,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 exec-get-fte-suffix: container: args: @@ -4301,7 +4301,7 @@ deploymentSpec: \ table.table_id.startswith(fte_table):\n return table.table_id[len(fte_table)\ \ + 1:]\n raise ValueError(\n f'No FTE output tables found in {bigquery_staging_full_dataset_id}.')\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-table-location: container: args: @@ -4337,7 +4337,7 @@ deploymentSpec: \ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\ \ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\ \ return client.get_table(table).location\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-value: container: args: @@ -4358,7 +4358,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef get_value(d: Dict[str, str], key: str) -> str:\n return d[key]\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-window-query-priority: container: args: @@ -4382,7 +4382,7 @@ deploymentSpec: \ depending on the window number.\"\"\"\n if int(window['window_number'])\ \ <= max_interactive:\n return 'INTERACTIVE'\n else:\n return 'BATCH'\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-maybe-replace-with-default: container: args: @@ -4404,7 +4404,7 @@ deploymentSpec: \ *\n\ndef maybe_replace_with_default(value: str, default: str = '') ->\ \ str:\n \"\"\"Replaces string with another value if it is a dash.\"\"\"\ \n return default if not value else value\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-query-with-retry: container: args: @@ -4458,7 +4458,7 @@ deploymentSpec: \ 'Query failed with %s. Retrying after %d seconds.', e, wait_time)\n\ \ time.sleep(wait_time)\n retry_count += 1\n return destination_uri\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-query-with-retry-2: container: args: @@ -4512,7 +4512,7 @@ deploymentSpec: \ 'Query failed with %s. Retrying after %d seconds.', e, wait_time)\n\ \ time.sleep(wait_time)\n retry_count += 1\n return destination_uri\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-query-with-retry-3: container: args: @@ -4566,7 +4566,7 @@ deploymentSpec: \ 'Query failed with %s. Retrying after %d seconds.', e, wait_time)\n\ \ time.sleep(wait_time)\n retry_count += 1\n return destination_uri\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri: container: args: @@ -4596,7 +4596,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri-2: container: args: @@ -4626,7 +4626,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-validate-inputs: container: args: @@ -4722,7 +4722,7 @@ deploymentSpec: \ raise ValueError(\n 'Granularity unit should be one of the\ \ following: '\n f'{valid_data_granularity_units}, got: {data_granularity_unit}.')\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-wrapped-in-list: container: args: @@ -4743,7 +4743,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef wrapped_in_list(value: str) -> List[str]:\n \"\"\"Wraps a string\ \ in a list.\"\"\"\n return [value]\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 pipelineInfo: description: Trains a BQML ARIMA_PLUS model. name: automl-tabular-bqml-arima-train diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_predict_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_predict_pipeline.yaml index d6e5b8d8f00..fd43d5186fe 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_predict_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_predict_pipeline.yaml @@ -1461,7 +1461,7 @@ deploymentSpec: \ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\ \ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \ \ ref.project, ref.dataset_id)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-delete-dataset-with-prefix: container: args: @@ -1495,7 +1495,7 @@ deploymentSpec: \ if dataset.dataset_id.startswith(dataset_prefix):\n client.delete_dataset(\n\ \ dataset=dataset.dataset_id,\n delete_contents=delete_contents)\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-query-job: container: args: @@ -1583,7 +1583,7 @@ deploymentSpec: \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\ \ if write_disposition:\n config['write_disposition'] = write_disposition\n\ \ return config\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-build-job-configuration-query-2: container: args: @@ -1617,7 +1617,7 @@ deploymentSpec: \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\ \ if write_disposition:\n config['write_disposition'] = write_disposition\n\ \ return config\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-first-valid: container: args: @@ -1641,7 +1641,7 @@ deploymentSpec: \ import json\n # pylint: enable=g-import-not-at-top,import-outside-toplevel,redefined-outer-name,reimported\n\ \n for value in json.loads(values):\n if value:\n return value\n\ \ raise ValueError('No valid values.')\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-table-location: container: args: @@ -1677,7 +1677,7 @@ deploymentSpec: \ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\ \ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\ \ return client.get_table(table).location\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-table-location-2: container: args: @@ -1713,7 +1713,7 @@ deploymentSpec: \ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\ \ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\ \ return client.get_table(table).location\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-load-table-from-uri: container: args: @@ -1754,7 +1754,7 @@ deploymentSpec: \ source_format=source_format)\n client.load_table_from_uri(\n source_uris=csv_list,\n\ \ destination=destination,\n project=project,\n location=location,\n\ \ job_config=job_config).result()\n return destination\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-make-vertex-model-artifact: container: args: @@ -1778,7 +1778,7 @@ deploymentSpec: Creates a google.VertexModel artifact.\"\"\"\n vertex_model.metadata =\ \ {'resourceName': model_resource_name}\n vertex_model.uri = (f'https://{location}-aiplatform.googleapis.com'\n\ \ f'/v1/{model_resource_name}')\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-maybe-replace-with-default: container: args: @@ -1800,7 +1800,7 @@ deploymentSpec: \ *\n\ndef maybe_replace_with_default(value: str, default: str = '') ->\ \ str:\n \"\"\"Replaces string with another value if it is a dash.\"\"\"\ \n return default if not value else value\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-model-batch-predict: container: args: @@ -1879,7 +1879,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-table-to-uri-2: container: args: @@ -1909,7 +1909,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-validate-inputs: container: args: @@ -2005,7 +2005,7 @@ deploymentSpec: \ raise ValueError(\n 'Granularity unit should be one of the\ \ following: '\n f'{valid_data_granularity_units}, got: {data_granularity_unit}.')\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 pipelineInfo: description: Creates a batch prediction using a Prophet model. name: prophet-predict diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer.py b/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer.py index 3e18ac8402a..c58ee3dc8e3 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer.py @@ -108,17 +108,17 @@ def prophet_trainer( '"machine_spec": {"machine_type": "n1-standard-4"}, ', ( '"container_spec":' - ' {"image_uri":"us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", ' + ' {"image_uri":"us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", ' ), '"args": ["prophet_trainer", "', ( f'--job_name=dataflow-{dsl.PIPELINE_JOB_NAME_PLACEHOLDER}", "' ), ( - '--dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625", "' + '--dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525", "' ), ( - '--prediction_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/fte-prediction-server:20250129_0625", "' + '--prediction_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/fte-prediction-server:20250827_0525", "' ), '--artifacts_dir=', root_dir, diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer_pipeline.yaml index 7edd5701ec7..630bb5a5308 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer_pipeline.yaml @@ -2021,7 +2021,7 @@ deploymentSpec: \ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\ \ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \ \ ref.project, ref.dataset_id)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-delete-dataset-with-prefix: container: args: @@ -2055,7 +2055,7 @@ deploymentSpec: \ if dataset.dataset_id.startswith(dataset_prefix):\n client.delete_dataset(\n\ \ dataset=dataset.dataset_id,\n delete_contents=delete_contents)\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bigquery-query-job: container: args: @@ -2116,7 +2116,7 @@ deploymentSpec: \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\ \ if write_disposition:\n config['write_disposition'] = write_disposition\n\ \ return config\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-transform-engine: container: args: @@ -2201,8 +2201,8 @@ deploymentSpec: "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp"]}' - '{"Concat": ["--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}"]}' - '{"Concat": ["--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}"]}' - - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625 - - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + - --dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525 + - --feature_transform_engine_docker_uri=us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 - '{"Concat": ["--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}"]}' - '{"Concat": ["--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}"]}' - '{"Concat": ["--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}"]}' @@ -2219,7 +2219,7 @@ deploymentSpec: - '{"IfPresent": {"InputName": "group_temporal_total_weight", "Then": {"Concat": ["--group_temporal_total_weight=", "{{$.inputs.parameters[''group_temporal_total_weight'']}}"]}}}' - '{"Concat": ["--encryption_spec_key_name=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}"]}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525 exec-get-fte-suffix: container: args: @@ -2247,7 +2247,7 @@ deploymentSpec: \ table.table_id.startswith(fte_table):\n return table.table_id[len(fte_table)\ \ + 1:]\n raise ValueError(\n f'No FTE output tables found in {bigquery_staging_full_dataset_id}.')\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-get-table-location: container: args: @@ -2283,7 +2283,7 @@ deploymentSpec: \ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\ \ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\ \ return client.get_table(table).location\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-model-evaluation-regression: container: args: @@ -2394,10 +2394,10 @@ deploymentSpec: ", "\"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, ", "\"job_spec\": {\"worker_pool_specs\": [{\"replica_count\":\"1\", ", "\"machine_spec\": {\"machine_type\": \"n1-standard-4\"}, ", "\"container_spec\": - {\"image_uri\":\"us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625\", + {\"image_uri\":\"us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525\", ", "\"args\": [\"prophet_trainer\", \"", "--job_name=dataflow-{{$.pipeline_job_name}}\", - \"", "--dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625\", - \"", "--prediction_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/fte-prediction-server:20250129_0625\", + \"", "--dataflow_worker_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525\", + \"", "--prediction_container_image=us-docker.pkg.dev/vertex-ai/automl-tabular/fte-prediction-server:20250827_0525\", \"", "--artifacts_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/model/\", \"", "--evaluated_examples_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/eval/\", \"", "--region=", "{{$.inputs.parameters[''location'']}}", @@ -2458,7 +2458,7 @@ deploymentSpec: \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\ \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\ \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-validate-inputs: container: args: @@ -2554,7 +2554,7 @@ deploymentSpec: \ raise ValueError(\n 'Granularity unit should be one of the\ \ following: '\n f'{valid_data_granularity_units}, got: {data_granularity_unit}.')\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-wrapped-in-list: container: args: @@ -2575,7 +2575,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef wrapped_in_list(value: str) -> List[str]:\n \"\"\"Wraps a string\ \ in a list.\"\"\"\n return [value]\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 pipelineInfo: description: Trains one Prophet model per time series. name: prophet-train diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/automl_tabular_pipeline.yaml b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/automl_tabular_pipeline.yaml index 2a159884376..bd6ef4c734c 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/automl_tabular_pipeline.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/automl_tabular_pipeline.yaml @@ -8420,9 +8420,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_cv_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--component_id={{$.pipeline_task_uuid}}\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train\", \"--num_parallel_trial=", "{{$.inputs.parameters[''num_parallel_trials'']}}", @@ -8463,9 +8463,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_cv_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--component_id={{$.pipeline_task_uuid}}\", \"--training_base_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/train\", \"--num_parallel_trial=", "{{$.inputs.parameters[''num_parallel_trials'']}}", @@ -8506,7 +8506,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-highmem-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"ensemble\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/model\", \"--custom_model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", @@ -8518,7 +8518,7 @@ deploymentSpec: "\", \"--tuning_result_input_path=", "{{$.inputs.artifacts[''tuning_result_input''].uri}}", "\", \"--instance_baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--warmup_data=", "{{$.inputs.artifacts[''warmup_data''].uri}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--model_path=", "{{$.outputs.artifacts[''model''].uri}}", "\", \"--custom_model_path=", "{{$.outputs.artifacts[''model_without_custom_ops''].uri}}", "\", \"--explanation_metadata_path=", "{{$.outputs.parameters[''explanation_metadata''].output_file}}", ",", "{{$.outputs.artifacts[''explanation_metadata_artifact''].uri}}", @@ -8547,7 +8547,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-highmem-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"ensemble\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/model\", \"--custom_model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", @@ -8559,7 +8559,7 @@ deploymentSpec: "\", \"--tuning_result_input_path=", "{{$.inputs.artifacts[''tuning_result_input''].uri}}", "\", \"--instance_baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--warmup_data=", "{{$.inputs.artifacts[''warmup_data''].uri}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--model_path=", "{{$.outputs.artifacts[''model''].uri}}", "\", \"--custom_model_path=", "{{$.outputs.artifacts[''model_without_custom_ops''].uri}}", "\", \"--explanation_metadata_path=", "{{$.outputs.parameters[''explanation_metadata''].output_file}}", ",", "{{$.outputs.artifacts[''explanation_metadata_artifact''].uri}}", @@ -8588,7 +8588,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-highmem-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"ensemble\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", "\", \"--model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/model\", \"--custom_model_output_path=", "{{$.inputs.parameters[''root_dir'']}}", @@ -8600,7 +8600,7 @@ deploymentSpec: "\", \"--tuning_result_input_path=", "{{$.inputs.artifacts[''tuning_result_input''].uri}}", "\", \"--instance_baseline_path=", "{{$.inputs.artifacts[''instance_baseline''].uri}}", "\", \"--warmup_data=", "{{$.inputs.artifacts[''warmup_data''].uri}}", "\", - \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625", + \"--prediction_docker_uri=", "us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525", "\", \"--model_path=", "{{$.outputs.artifacts[''model''].uri}}", "\", \"--custom_model_path=", "{{$.outputs.artifacts[''model_without_custom_ops''].uri}}", "\", \"--explanation_metadata_path=", "{{$.outputs.parameters[''explanation_metadata''].output_file}}", ",", "{{$.outputs.artifacts[''explanation_metadata_artifact''].uri}}", @@ -8629,7 +8629,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"cancel_l2l_tuner\", \"--error_file_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/error.pb\", \"--cleanup_lro_job_infos=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/lro\"]}}]}}"]}' @@ -8644,7 +8644,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -8653,7 +8653,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -8662,7 +8662,7 @@ deploymentSpec: args: - --executor_input - '{{$}}' - image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525 resources: cpuLimit: 8.0 memoryLimit: 52.0 @@ -8682,9 +8682,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_stage_1_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--feature_selection_result_path=", "{{$.inputs.artifacts[''feature_ranking''].uri}}", "\", \"--disable_early_stopping=", "{{$.inputs.parameters[''disable_early_stopping'']}}", "\", \"--tune_feature_selection_rate=", "{{$.inputs.parameters[''tune_feature_selection_rate'']}}", @@ -8729,9 +8729,9 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"l2l_stage_1_tuner\", \"--transform_output_path=", "{{$.inputs.artifacts[''transform_output''].uri}}", - "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", + "\", \"--training_docker_uri=", "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"--feature_selection_result_path=", "{{$.inputs.artifacts[''feature_ranking''].uri}}", "\", \"--disable_early_stopping=", "{{$.inputs.parameters[''disable_early_stopping'']}}", "\", \"--tune_feature_selection_rate=", "{{$.inputs.parameters[''tune_feature_selection_rate'']}}", @@ -8776,7 +8776,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"transform\", \"--is_mp=true\", \"--transform_output_artifact_path=", "{{$.outputs.artifacts[''transform_output''].uri}}", "\", \"--transform_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/transform\", @@ -8797,7 +8797,7 @@ deploymentSpec: \"--dataflow_tmp_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp\", \"--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}", "\", \"--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}", - "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625", + "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525", "\", \"--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}", "\", \"--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}", "\", \"--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}", @@ -8828,7 +8828,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"transform\", \"--is_mp=true\", \"--transform_output_artifact_path=", "{{$.outputs.artifacts[''transform_output''].uri}}", "\", \"--transform_output_path=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/transform\", @@ -8849,7 +8849,7 @@ deploymentSpec: \"--dataflow_tmp_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp\", \"--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}", "\", \"--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}", - "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625", + "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525", "\", \"--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}", "\", \"--dataflow_subnetwork_fully_qualified=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}", "\", \"--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}", @@ -8885,7 +8885,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bool-identity-2: container: args: @@ -8907,7 +8907,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-bool-identity-3: container: args: @@ -8929,7 +8929,7 @@ deploymentSpec: \ *\n\ndef _bool_identity(value: bool) -> str:\n \"\"\"Returns boolean\ \ value.\n\n Args:\n value: Boolean value to return\n\n Returns:\n\ \ Boolean value.\n \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-calculate-training-parameters: container: args: @@ -9021,7 +9021,7 @@ deploymentSpec: \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n distill_stage_1_deadline_hours,\n\ \ reduce_search_space_mode,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-calculate-training-parameters-2: container: args: @@ -9113,7 +9113,7 @@ deploymentSpec: \ stage_1_single_run_max_secs,\n stage_2_deadline_hours,\n \ \ stage_2_single_run_max_secs,\n distill_stage_1_deadline_hours,\n\ \ reduce_search_space_mode,\n )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-feature-attribution: container: args: @@ -9299,7 +9299,7 @@ deploymentSpec: \n return collections.namedtuple(\n 'Outputs',\n [\n \ \ 'model_display_name',\n ],\n )(\n model_display_name,\n )\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-importer: importer: artifactUri: @@ -9333,7 +9333,7 @@ deploymentSpec: \ 'r') as f:\n split_0_content = f.read()\n with open(split_1, 'r')\ \ as f:\n split_1_content = f.read()\n with open(splits, 'w') as f:\n\ \ f.write(','.join([split_0_content, split_1_content]))\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-model-batch-explanation: container: args: @@ -10158,7 +10158,7 @@ deploymentSpec: \ import json\n # pylint: enable=g-import-not-at-top,import-outside-toplevel,redefined-outer-name,reimported\n\ \ with open(split_uri, 'r') as f:\n data_source = json.loads(f.read())\n\ \ return data_source['tf_record_data_source']['file_patterns']\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-read-input-uri-2: container: args: @@ -10186,7 +10186,7 @@ deploymentSpec: \ import json\n # pylint: enable=g-import-not-at-top,import-outside-toplevel,redefined-outer-name,reimported\n\ \ with open(split_uri, 'r') as f:\n data_source = json.loads(f.read())\n\ \ return data_source['tf_record_data_source']['file_patterns']\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-set-optional-inputs: container: args: @@ -10234,7 +10234,7 @@ deploymentSpec: \ 'data_source_csv_filenames',\n 'data_source_bigquery_table_path',\n\ \ ],\n )(\n data_source_csv_filenames,\n data_source_bigquery_table_path,\n\ \ )\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-string-not-empty: container: args: @@ -10258,7 +10258,7 @@ deploymentSpec: \n Returns:\n Boolean value. -> 'true' if empty, 'false' if not empty.\ \ We need to use str\n instead of bool due to a limitation in KFP compiler.\n\ \ \"\"\"\n return 'true' if value else 'false'\n\n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-tabular-stats-and-example-gen: container: args: @@ -10275,7 +10275,7 @@ deploymentSpec: \"encryption_spec\": {\"kms_key_name\":\"", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", "\"}, \"job_spec\": {\"worker_pool_specs\": [{\"replica_count\": 1, \"machine_spec\": {\"machine_type\": \"n1-standard-8\"}, \"container_spec\": {\"image_uri\":\"", - "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625", "\", + "us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525", "\", \"args\": [\"stats_generator\",", "\"--train_spec={\\\"prediction_type\\\": \\\"", "{{$.inputs.parameters[''prediction_type'']}}", "\\\", \\\"target_column\\\": \\\"", "{{$.inputs.parameters[''target_column_name'']}}", "\\\", \\\"optimization_objective\\\": @@ -10308,7 +10308,7 @@ deploymentSpec: \"--dataflow_staging_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_staging\", \"--dataflow_tmp_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp\", \"--dataflow_max_num_workers=", "{{$.inputs.parameters[''dataflow_max_num_workers'']}}", - "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625", + "\", \"--dataflow_worker_container_image=", "us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525", "\", \"--dataflow_machine_type=", "{{$.inputs.parameters[''dataflow_machine_type'']}}", "\", \"--dataflow_disk_size_gb=", "{{$.inputs.parameters[''dataflow_disk_size_gb'']}}", "\", \"--dataflow_kms_key=", "{{$.inputs.parameters[''encryption_spec_key_name'']}}", @@ -10363,7 +10363,7 @@ deploymentSpec: \ f'{directory}/prediction.results-*',\n ],\n 'coder':\ \ 'PROTO_VALUE',\n },\n }\n with open(result, 'w') as f:\n f.write(json.dumps(data_source))\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 exec-write-bp-result-path-2: container: args: @@ -10393,7 +10393,7 @@ deploymentSpec: \ f'{directory}/prediction.results-*',\n ],\n 'coder':\ \ 'PROTO_VALUE',\n },\n }\n with open(result, 'w') as f:\n f.write(json.dumps(data_source))\n\ \n" - image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625 + image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250827_0525 pipelineInfo: description: 'Complete AutoML Tables pipeline. diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/cv_trainer.py b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/cv_trainer.py index b2650510e06..51b013e3489 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/cv_trainer.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/cv_trainer.py @@ -99,11 +99,11 @@ def automl_tabular_cv_trainer( ' 1, "machine_spec": {"machine_type": "n1-standard-8"},' ' "container_spec": {"image_uri":"' ), - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525', '", "args": ["l2l_cv_tuner", "--transform_output_path=', transform_output.uri, '", "--training_docker_uri=', - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525', ( f'", "--component_id={dsl.PIPELINE_TASK_ID_PLACEHOLDER}",' ' "--training_base_dir=' diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/ensemble.py b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/ensemble.py index 31363588898..9293e7821f8 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/ensemble.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/ensemble.py @@ -106,7 +106,7 @@ def automl_tabular_ensemble( ' 1, "machine_spec": {"machine_type": "n1-highmem-8"},' ' "container_spec": {"image_uri":"' ), - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525', '", "args": ["ensemble", "--transform_output_path=', transform_output.uri, '", "--model_output_path=', @@ -137,7 +137,7 @@ def automl_tabular_ensemble( '", "--warmup_data=', warmup_data.uri, '", "--prediction_docker_uri=', - 'us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625', + 'us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525', '", "--model_path=', model.uri, '", "--custom_model_path=', diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/finalizer.py b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/finalizer.py index d91f9fd3e17..729f47e9cc5 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/finalizer.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/finalizer.py @@ -72,7 +72,7 @@ def automl_tabular_finalizer( ' 1, "machine_spec": {"machine_type": "n1-standard-8"},' ' "container_spec": {"image_uri":"' ), - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525', '", "args": ["cancel_l2l_tuner", "--error_file_path=', root_dir, ( diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/infra_validator.py b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/infra_validator.py index f615f9db351..e7c8223fccb 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/infra_validator.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/infra_validator.py @@ -32,7 +32,7 @@ def automl_tabular_infra_validator( # fmt: on return dsl.ContainerSpec( - image='us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250129_0625', + image='us-docker.pkg.dev/vertex-ai/automl-tabular/prediction-server:20250827_0525', command=[], args=['--executor_input', '{{$}}'], ) diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/split_materialized_data.py b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/split_materialized_data.py index 41761279d97..4d1dd717dd5 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/split_materialized_data.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/split_materialized_data.py @@ -52,7 +52,7 @@ def split_materialized_data( # fmt: on return dsl.ContainerSpec( - image='us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625', + image='us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525', command=[ 'sh', '-ec', diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/stage_1_tuner.py b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/stage_1_tuner.py index 1be3c0fd690..1e197198205 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/stage_1_tuner.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/stage_1_tuner.py @@ -109,11 +109,11 @@ def automl_tabular_stage_1_tuner( ' 1, "machine_spec": {"machine_type": "n1-standard-8"},' ' "container_spec": {"image_uri":"' ), - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525', '", "args": ["l2l_stage_1_tuner", "--transform_output_path=', transform_output.uri, '", "--training_docker_uri=', - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525', '", "--feature_selection_result_path=', feature_ranking.uri, '", "--disable_early_stopping=', diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/stats_and_example_gen.py b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/stats_and_example_gen.py index 5ab15aebf2b..42bbe436166 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/stats_and_example_gen.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/stats_and_example_gen.py @@ -136,7 +136,7 @@ def tabular_stats_and_example_gen( ' 1, "machine_spec": {"machine_type": "n1-standard-8"},' ' "container_spec": {"image_uri":"' ), - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525', '", "args": ["stats_generator",', '"--train_spec={\\"prediction_type\\": \\"', prediction_type, @@ -215,7 +215,7 @@ def tabular_stats_and_example_gen( ), dataflow_max_num_workers, '", "--dataflow_worker_container_image=', - 'us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625', + 'us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525', '", "--dataflow_machine_type=', dataflow_machine_type, '", "--dataflow_disk_size_gb=', diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/training_configurator_and_validator.py b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/training_configurator_and_validator.py index 893b3a61edd..95c2dde8ed1 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/training_configurator_and_validator.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/training_configurator_and_validator.py @@ -95,7 +95,7 @@ def training_configurator_and_validator( # fmt: on return dsl.ContainerSpec( - image='us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250129_0625', + image='us-docker.pkg.dev/vertex-ai/automl-tabular/feature-transform-engine:20250827_0525', command=[], args=[ 'training_configurator_and_validator', diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/transform.py b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/transform.py index b95b5ed4223..0873b397931 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/transform.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/automl/tabular/transform.py @@ -108,7 +108,7 @@ def automl_tabular_transform( ' 1, "machine_spec": {"machine_type": "n1-standard-8"},' ' "container_spec": {"image_uri":"' ), - 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250129_0625', + 'us-docker.pkg.dev/vertex-ai-restricted/automl-tabular/training:20250827_0525', ( '", "args": ["transform", "--is_mp=true",' ' "--transform_output_artifact_path=' @@ -167,7 +167,7 @@ def automl_tabular_transform( '", "--dataflow_machine_type=', dataflow_machine_type, '", "--dataflow_worker_container_image=', - 'us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250129_0625', + 'us-docker.pkg.dev/vertex-ai/automl-tabular/dataflow-worker:20250827_0525', '", "--dataflow_disk_size_gb=', dataflow_disk_size_gb, '", "--dataflow_subnetwork_fully_qualified=', diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/custom_job/component.py b/components/google-cloud/google_cloud_pipeline_components/v1/custom_job/component.py index 84cdb748574..2c94016375b 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/custom_job/component.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/custom_job/component.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List +from typing import Dict, List, Union from google_cloud_pipeline_components import _placeholders from google_cloud_pipeline_components import utils @@ -40,6 +40,7 @@ def custom_training_job( project: str = _placeholders.PROJECT_ID_PLACEHOLDER, strategy: str = 'STANDARD', max_wait_duration: str = '86400s', + psc_interface_config: Dict[str, Union[str, List[Dict[str, str]]]] = {}, ): # fmt: off """Launch a Vertex AI [custom training job](https://cloud.google.com/vertex-ai/docs/training/create-custom-job) using the [CustomJob](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.customJobs) API. See [Create custom training jobs ](https://cloud.google.com/vertex-ai/docs/training/create-custom-job) for more information. @@ -62,6 +63,7 @@ def custom_training_job( project: Project to create the custom training job in. Defaults to the project in which the PipelineJob is run. strategy: The strategy to use for the custom training job. The default is 'STANDARD'. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#Strategy). max_wait_duration: The maximum time to wait for the custom training job to be scheduled only if the scheduling strategy is set to FLEX_START. If set to 0, the job will wait indefinitely. The default is 24 hours. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#Strategy). + psc_interface_config: Configuration CustomJob with PSC-I. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#PscInterfaceConfig). Returns: gcp_resources: Serialized JSON of `gcp_resources` [proto](https://github.com/kubeflow/pipelines/tree/master/components/google-cloud/google_cloud_pipeline_components/proto) which tracks the CustomJob. @@ -91,6 +93,7 @@ def custom_training_job( 'output_uri_prefix': base_output_directory }, 'persistent_resource_id': persistent_resource_id, + 'psc_interface_config': psc_interface_config, }, 'labels': labels, 'encryption_spec': {'kms_key_name': encryption_spec_key_name}, diff --git a/components/google-cloud/google_cloud_pipeline_components/v1/custom_job/utils.py b/components/google-cloud/google_cloud_pipeline_components/v1/custom_job/utils.py index 7581ff39ac0..37f6a41ef29 100644 --- a/components/google-cloud/google_cloud_pipeline_components/v1/custom_job/utils.py +++ b/components/google-cloud/google_cloud_pipeline_components/v1/custom_job/utils.py @@ -15,7 +15,7 @@ import copy import textwrap -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, List, Optional, Union import warnings from google_cloud_pipeline_components import _placeholders @@ -76,6 +76,9 @@ def create_custom_training_job_from_component( reservation_affinity_type: Optional[str] = None, reservation_affinity_key: Optional[str] = None, reservation_affinity_values: Optional[List[str]] = None, + psc_interface_config: Optional[ + Dict[str, Union[str, List[Dict[str, str]]]] + ] = None, ) -> Callable: # fmt: off """Convert a KFP component into Vertex AI [custom training job](https://cloud.google.com/vertex-ai/docs/training/create-custom-job) using the [CustomJob](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.customJobs) API. @@ -109,6 +112,7 @@ def create_custom_training_job_from_component( reservation_affinity_type: The type of [reservation affinity](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/MachineSpec#reservationaffinity). Valid values are "NO_RESERVATION", "ANY_RESERVATION", "SPECIFIC_RESERVATION". reservation_affinity_key: Corresponds to the label key of a reservation resource. To target a SPECIFIC_RESERVATION by name, use compute.googleapis.com/reservation-name as the key and specify the name of your reservation as its value. reservation_affinity_values: Corresponds to the label values of a reservation resource. This must be the full resource name of the reservation. + psc_interface_config: Configuration CustomJob with PSC-I. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#PscInterfaceConfig). Returns: A KFP component with CustomJob specification applied. @@ -229,6 +233,7 @@ def create_custom_training_job_from_component( 'labels': labels or {}, 'encryption_spec_key_name': encryption_spec_key_name, 'persistent_resource_id': persistent_resource_id, + 'psc_interface_config': psc_interface_config or {}, } for param_name, default_value in custom_job_param_defaults.items(): diff --git a/components/google-cloud/google_cloud_pipeline_components/version.py b/components/google-cloud/google_cloud_pipeline_components/version.py index 5ef554754b3..ed75902ec6c 100644 --- a/components/google-cloud/google_cloud_pipeline_components/version.py +++ b/components/google-cloud/google_cloud_pipeline_components/version.py @@ -13,4 +13,4 @@ # limitations under the License. """Google Cloud Pipeline Components version.""" -__version__ = "2.20.1" +__version__ = "2.21.0" diff --git a/docs/Architecture.md b/docs/Architecture.md deleted file mode 100644 index 901369b6d6a..00000000000 --- a/docs/Architecture.md +++ /dev/null @@ -1,7 +0,0 @@ -# Kubeflow Pipelines Architecture - -Below is a detailed diagram of the Kubeflow Pipelines Architecture. - -![KubeFlow Pipelines Cluster Wide Architecture](../images/kfp-cluster-wide-architecture.png) - -The above diagram documents the various components in Kubeflow Pipelines and how they interact. This should be a useful starting point for any developers. diff --git a/docs/contributing/anatomy.md b/docs/contributing/anatomy.md new file mode 100644 index 00000000000..544df6fdc0e --- /dev/null +++ b/docs/contributing/anatomy.md @@ -0,0 +1,55 @@ +# Pipeline Anatomy + +This document clarifies the anatomy of a single-component pipeline. Complexity sometimes discourage scrutiny. My hope is that improved clarity will pave the way for consolidation and simplification. + +## Flowchart + +```mermaid +graph TD + +a(["entrypoint (DAG)"]) --> b[root-driver] +b --> c(["root (DAG)"]) +c --> d[component-name-driver] +d --> e[component-name] +``` + +## Table + +| Argo Workflows (AWF) Task Name | AWF Task Type | AWF Template Name | Driver Type | Pod | Responsibilities | +| ------------------------------ | ------------- | ------------------------- | ----------- | --- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| entrypoint | dag | entrypoint | N/A | | In Argo Workflows, the entrypoint DAG defines the primary template to be executed when a workflow is initiated. It acts as the starting point or "main" function of your workflow.

The entrypoint has two sequential tasks:
  1. root-driver
  2. driver | +| root-driver | container | system-dag-driver | ROOT_DAG | ✅ |
    1. Validates provided options
    2. Determines where artifacts will be stored
    3. Creates the execution record in MLMD
    | +| root | dag | root | N/A | | The root DAG has two sequential tasks:
    1. \-driver
    2. \
    | +| \-driver | container | system-container-driver | CONTAINER | ✅ |
    1. Resolves inputs from upstream tasks
    2. Evaluates conditions
    3. Prepares output destinations
    4. Makes caching decisions
    5. Creates execution record in MLMD
    6. Dynamically configures subsequent execution pod (including K8s-specific config) at runtime using podSpecPatch
    | +| \ | container | system-container-executor | N/A | ✅ |
    1. Downloads input artifacts
    2. Executes end-user code
    3. Uploads output artifacts
    4. Records output metadata and execution status to MLMD
    5. Publishes logs
    | + +## Reference Pipeline + +```python +from kfp import dsl +from kfp import compiler + + +@dsl.component() +def hello_world(): + print("hello world") + + +@dsl.pipeline() +def pipeline_hello_world(): + task = hello_world() +``` + +## Pipeline Execution Flow on Argo Workflows Backend +![flow](pipeline-flow.png) + +## Table + +| Pipeline Execution Component Name | Responsibilities | +|-----------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Workflow Controller | Workflow Controller is the central component in the Argo Workflows backend. It monitors and executes Argo Workflow Custom Resources (CRs) | +| MLMD Metadata* | MLMD Metadata is a storage system for tracking experiments | +| Argo Workflow CR | Argo Workflow CR is the representation of a pipeline at the Argo Workflows level. It contains information about all tasks and their dependencies within the pipeline. | +| API-server | API Server — from the pipeline execution perspective, it provides access to the previously executed pipeline cache (if it exists). | + +The MLMD implementation is [planned](https://github.com/kubeflow/pipelines/pull/12147) to be replaced in the future. \ No newline at end of file diff --git a/docs/contributing/pipeline-flow.png b/docs/contributing/pipeline-flow.png new file mode 100644 index 00000000000..9ce91e54319 Binary files /dev/null and b/docs/contributing/pipeline-flow.png differ diff --git a/docs/diagram/.gitignore b/docs/diagram/.gitignore new file mode 100644 index 00000000000..a72ab7ad503 --- /dev/null +++ b/docs/diagram/.gitignore @@ -0,0 +1,2 @@ +# Ignore generated SVG files +*.mmd.svg diff --git a/docs/diagram/Makefile b/docs/diagram/Makefile new file mode 100644 index 00000000000..f8211482147 --- /dev/null +++ b/docs/diagram/Makefile @@ -0,0 +1,13 @@ +# Generate SVGs from mermaid files (MMD) +# Requires: https://github.com/mermaid-js/mermaid-cli +# + +.PHONY: all clean + +all: $(patsubst %.mmd,%.mmd.svg,$(wildcard *.mmd)) + +%.mmd.svg: %.mmd + mmdc -i "$<" -o "$@" + +clean: + rm *.mmd.svg diff --git a/docs/diagram/kfp-db.mmd b/docs/diagram/kfp-db.mmd new file mode 100644 index 00000000000..c0e2120f658 --- /dev/null +++ b/docs/diagram/kfp-db.mmd @@ -0,0 +1,131 @@ +erDiagram + PIPELINES { + varchar UUID PK + varchar Name UK + varchar DisplayName + bigint CreatedAtInSec + varchar DefaultVersionId + longtext Description + varchar Namespace + longtext Parameters + varchar Status + } + PIPELINE_VERSIONS { + varchar UUID PK + varchar Name UK + varchar PipelineId FK + bigint CreatedAtInSec + longtext Description + longtext Parameters + varchar Status + varchar CodeSourceUrl + longtext PipelineSpec + longtext PipelineSpecURI + } + RUN_DETAILS { + varchar UUID PK + varchar ExperimentUUID + varchar PipelineId + varchar PipelineVersionId + varchar JobUUID + varchar DisplayName + varchar Name + varchar StorageState + varchar Namespace + varchar ServiceAccount + varchar Description + bigint CreatedAtInSec + bigint ScheduledAtInSec + bigint FinishedAtInSec + varchar Conditions + varchar PipelineName + longtext PipelineSpecManifest + longtext WorkflowSpecManifest + longtext Parameters + longtext RuntimeParameters + longtext PipelineRoot + longtext PipelineRuntimeManifest + longtext WorkflowRuntimeManifest + varchar State + longtext StateHistory + bigint PipelineContextId + bigint PipelineRunContextId + } + TASKS { + varchar UUID PK + varchar RunUUID FK + varchar ParentTaskUUID + varchar Namespace + varchar PipelineName + varchar MLMDExecutionID + bigint CreatedTimestamp + bigint FinishedTimestamp + varchar Fingerprint + varchar PodName + bigint StartedTimestamp + varchar Name + varchar State + longtext StateHistory + longtext MLMDInputs + longtext MLMDOutputs + longtext ChildrenPods + longtext Payload + } + JOBS { + varchar UUID PK + varchar PipelineId + varchar PipelineVersionId + varchar ExperimentUUID + varchar DisplayName + varchar Name + varchar Namespace + varchar PipelineName + longtext PipelineSpecManifest + longtext WorkflowSpecManifest + longtext Parameters + longtext RuntimeParameters + longtext PipelineRoot + varchar Conditions + varchar ServiceAccount + varchar Description + bigint MaxConcurrency + tinyint NoCatchup + bigint CreatedAtInSec + bigint UpdatedAtInSec + tinyint Enabled + bigint CronScheduleStartTimeInSec + bigint CronScheduleEndTimeInSec + varchar Schedule + bigint PeriodicScheduleStartTimeInSec + bigint PeriodicScheduleEndTimeInSec + bigint IntervalSecond + } + EXPERIMENTS { + varchar UUID PK + varchar Name UK + varchar Description + bigint CreatedAtInSec + bigint LastRunCreatedAtInSec + varchar Namespace + varchar StorageState + } + RESOURCE_REFERENCES { + varchar(255) ResourceUUID PK + varchar(255) ResourceType PK + varchar(255) ReferenceType PK + varchar(255) ReferenceUUID + varchar(255) ReferenceName + varchar(255) Relationship + longtext Payload + } + %% Relationships + EXPERIMENTS ||..o{ JOBS : "UUID = ExperimentUUID" + EXPERIMENTS ||..o{ RUN_DETAILS : "UUID = ExperimentUUID" + PIPELINES ||--|{ PIPELINE_VERSIONS : "UUID = PipelineId" + PIPELINES ||..o{ JOBS : "UUID = PipelineId" + PIPELINE_VERSIONS ||..o{ JOBS : "UUID = PipelineVersionId" + PIPELINES ||..o{ RUN_DETAILS : "UUID = PipelineId" + PIPELINE_VERSIONS ||..o{ RUN_DETAILS : "UUID = PipelineVersionId" + JOBS ||..o{ RUN_DETAILS : "UUID = JobUUID" + RUN_DETAILS ||--|{ TASKS : "UUID = RunUUID" + TASKS ||..o| TASKS : "UUID = ParentTaskUUID" diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index 0e1274a496e..00000000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -autodocsumm==0.2.9 -sdk/python -sphinx==5.0.2 -sphinx-click==4.3.0 -sphinx-immaterial==0.9.0 -sphinx-rtd-theme==1.0.0 -m2r2==0.3.2 diff --git a/docs/sdk/Architecture.md b/docs/sdk/Architecture.md new file mode 100644 index 00000000000..3b5f80c788e --- /dev/null +++ b/docs/sdk/Architecture.md @@ -0,0 +1,7 @@ +# Kubeflow Pipelines Architecture + +Below is a detailed diagram of the Kubeflow Pipelines Architecture. + +![KubeFlow Pipelines Cluster Wide Architecture](../../images/kfp-cluster-wide-architecture.png) + +The above diagram documents the various components in Kubeflow Pipelines and how they interact. This should be a useful starting point for any developers. diff --git a/docs/Makefile b/docs/sdk/Makefile similarity index 100% rename from docs/Makefile rename to docs/sdk/Makefile diff --git a/docs/_static/custom.css b/docs/sdk/_static/custom.css similarity index 100% rename from docs/_static/custom.css rename to docs/sdk/_static/custom.css diff --git a/docs/_static/favicon.ico b/docs/sdk/_static/favicon.ico similarity index 100% rename from docs/_static/favicon.ico rename to docs/sdk/_static/favicon.ico diff --git a/docs/_static/kubeflow.png b/docs/sdk/_static/kubeflow.png similarity index 100% rename from docs/_static/kubeflow.png rename to docs/sdk/_static/kubeflow.png diff --git a/docs/build_docs_locally.sh b/docs/sdk/build_docs_locally.sh similarity index 93% rename from docs/build_docs_locally.sh rename to docs/sdk/build_docs_locally.sh index 74a31608e9a..123c6952500 100644 --- a/docs/build_docs_locally.sh +++ b/docs/sdk/build_docs_locally.sh @@ -14,8 +14,8 @@ # limitations under the License. # install requirements -pushd .. -pip install -r docs/requirements.txt +pushd ../.. +pip install -r docs/sdk/requirements.txt popd # build docs diff --git a/docs/conf.py b/docs/sdk/conf.py similarity index 98% rename from docs/conf.py rename to docs/sdk/conf.py index 390b3658863..2fa4766a229 100644 --- a/docs/conf.py +++ b/docs/sdk/conf.py @@ -237,7 +237,7 @@ # # -- Extension configuration ------------------------------------------------- readme_path = os.path.join( - os.path.abspath(os.path.dirname(os.path.dirname(__file__))), 'sdk', + os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))), 'sdk', 'python', 'README.md') diff --git a/docs/index.rst b/docs/sdk/index.rst similarity index 85% rename from docs/index.rst rename to docs/sdk/index.rst index 15e22879a1f..3e6ffaaad3d 100644 --- a/docs/index.rst +++ b/docs/sdk/index.rst @@ -1,9 +1,9 @@ Kubeflow Pipelines (KFP) ==================================== -.. mdinclude:: ../sdk/python/README.md +.. mdinclude:: ../../sdk/python/README.md -.. mdinclude:: Architecture.md +.. mdinclude:: ./Architecture.md .. toctree:: :caption: Contents diff --git a/docs/make.bat b/docs/sdk/make.bat similarity index 100% rename from docs/make.bat rename to docs/sdk/make.bat diff --git a/docs/sdk/requirements.txt b/docs/sdk/requirements.txt new file mode 100644 index 00000000000..9ba576ce5f5 --- /dev/null +++ b/docs/sdk/requirements.txt @@ -0,0 +1,7 @@ +autodocsumm==0.2.9 +sdk/python +sphinx==5.0.2 +sphinx-click==4.3.0 +sphinx-immaterial==0.12.5 +sphinx-rtd-theme==1.0.0 +m2r2==0.3.2 diff --git a/docs/source/cli.rst b/docs/sdk/source/cli.rst similarity index 100% rename from docs/source/cli.rst rename to docs/sdk/source/cli.rst diff --git a/docs/source/client.rst b/docs/sdk/source/client.rst similarity index 100% rename from docs/source/client.rst rename to docs/sdk/source/client.rst diff --git a/docs/source/compiler.rst b/docs/sdk/source/compiler.rst similarity index 100% rename from docs/source/compiler.rst rename to docs/sdk/source/compiler.rst diff --git a/docs/source/components.rst b/docs/sdk/source/components.rst similarity index 100% rename from docs/source/components.rst rename to docs/sdk/source/components.rst diff --git a/docs/source/dsl.rst b/docs/sdk/source/dsl.rst similarity index 100% rename from docs/source/dsl.rst rename to docs/sdk/source/dsl.rst diff --git a/docs/source/genai.rst b/docs/sdk/source/genai.rst similarity index 100% rename from docs/source/genai.rst rename to docs/sdk/source/genai.rst diff --git a/docs/source/installation.rst b/docs/sdk/source/installation.rst similarity index 100% rename from docs/source/installation.rst rename to docs/sdk/source/installation.rst diff --git a/docs/source/kfp.rst b/docs/sdk/source/kfp.rst similarity index 100% rename from docs/source/kfp.rst rename to docs/sdk/source/kfp.rst diff --git a/docs/source/local.rst b/docs/sdk/source/local.rst similarity index 100% rename from docs/source/local.rst rename to docs/sdk/source/local.rst diff --git a/docs/source/overview.rst b/docs/sdk/source/overview.rst similarity index 100% rename from docs/source/overview.rst rename to docs/sdk/source/overview.rst diff --git a/docs/source/quickstart.rst b/docs/sdk/source/quickstart.rst similarity index 100% rename from docs/source/quickstart.rst rename to docs/sdk/source/quickstart.rst diff --git a/docs/source/registry.rst b/docs/sdk/source/registry.rst similarity index 100% rename from docs/source/registry.rst rename to docs/sdk/source/registry.rst diff --git a/docs/versions.json b/docs/sdk/versions.json similarity index 97% rename from docs/versions.json rename to docs/sdk/versions.json index f545cf0fe81..9e0d916e817 100644 --- a/docs/versions.json +++ b/docs/sdk/versions.json @@ -1,13 +1,18 @@ [ { - "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.13.0/", - "title": "2.13.0", + "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.14.1/", + "title": "2.14.1", "aliases": [ "stable", "latest" ] }, - { + { + "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.13.0/", + "title": "2.13.0", + "aliases": [] + }, + { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.12.2/", "title": "2.12.2", "aliases": [] diff --git a/frontend/.nvmrc b/frontend/.nvmrc index 517f38666b4..2c6984e9467 100644 --- a/frontend/.nvmrc +++ b/frontend/.nvmrc @@ -1 +1 @@ -v22.14.0 +v22.19.0 diff --git a/frontend/CONTRIBUTING.md b/frontend/CONTRIBUTING.md index 0e4d4bffd79..5d9ca83f4e4 100644 --- a/frontend/CONTRIBUTING.md +++ b/frontend/CONTRIBUTING.md @@ -11,15 +11,15 @@ are available at their corresponding GitHub repositories. ### fnm ```bash -fnm install 22.14.0 -fnm use 22.14.0 +fnm install 22.19.0 +fnm use 22.19.0 ``` ### nvm ```bash -nvm install 22.14.0 -nvm use 22.14.0 +nvm install 22.19.0 +nvm use 22.19.0 ``` ## Manage dev environment with npm diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 429d8315d7b..dc7d3940b9b 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -1,4 +1,5 @@ -FROM node:22.13 as build +ARG BASE_IMAGE=alpine +FROM node:22.19 as build ARG COMMIT_HASH ENV COMMIT_HASH=${COMMIT_HASH} @@ -21,7 +22,7 @@ RUN mkdir -p ./server/dist && \ echo ${DATE} > ./server/dist/BUILD_DATE && \ echo ${TAG_NAME} > ./server/dist/TAG_NAME -FROM node:22.13-alpine +FROM node:22.19-${BASE_IMAGE} COPY --from=build ./src/frontend/server /server COPY --from=build ./src/frontend/build /client diff --git a/frontend/README.md b/frontend/README.md index 528996c0f5e..811dc204919 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -55,11 +55,12 @@ kubectl -n kubeflow scale --replicas=0 deployment/ml-pipeline-ui You can confirm that the previous [http://127.0.0.1:3000] link no longer works. -Now navigate to the KFP frontend folder and install your NPM dependencies: +Now navigate to the KFP frontend folder, install and build your NPM dependencies: ```bash cd ${WORKING_DIRECTORY}/frontend npm ci +npm run build ``` Now run the following: diff --git a/frontend/mock-backend/mock-api-server.ts b/frontend/mock-backend/mock-api-server.ts index 84718ca88e4..68263776aae 100644 --- a/frontend/mock-backend/mock-api-server.ts +++ b/frontend/mock-backend/mock-api-server.ts @@ -13,7 +13,7 @@ // limitations under the License. import express from 'express'; -import proxy from 'http-proxy-middleware'; +import { createProxyMiddleware } from 'http-proxy-middleware'; import mockApiMiddleware from './mock-api-middleware'; const app = express(); @@ -38,14 +38,14 @@ export const HACK_FIX_HPM_PARTIAL_RESPONSE_HEADERS = { /** Proxy metadata requests to the Envoy instance which will handle routing to the metadata gRPC server */ app.all( '/ml_metadata.*', - proxy({ + createProxyMiddleware({ changeOrigin: true, onProxyReq: proxyReq => { console.log('Metadata proxied request: ', (proxyReq as any).path); }, headers: HACK_FIX_HPM_PARTIAL_RESPONSE_HEADERS, target: getAddress({ host: 'localhost', port: '9090' }), - }), + }) as any, ); mockApiMiddleware(app as any); diff --git a/frontend/mock-backend/proxy-middleware.ts b/frontend/mock-backend/proxy-middleware.ts index e19ee8f5980..acc9b61d232 100644 --- a/frontend/mock-backend/proxy-middleware.ts +++ b/frontend/mock-backend/proxy-middleware.ts @@ -13,7 +13,7 @@ // limitations under the License. import * as express from 'express'; -import proxy from 'http-proxy-middleware'; +import { createProxyMiddleware } from 'http-proxy-middleware'; import { URL, URLSearchParams } from 'url'; export function _extractUrlFromReferer(proxyPrefix: string, referer = ''): string { @@ -68,7 +68,7 @@ export default (app: express.Application, apisPrefix: string) => { app.all( proxyPrefix + '*', - proxy({ + createProxyMiddleware({ changeOrigin: true, logLevel: 'debug', target: 'http://127.0.0.1', @@ -80,6 +80,6 @@ export default (app: express.Application, apisPrefix: string) => { pathRewrite: (_: any, req: any) => { return _rewritePath(proxyPrefix, req.path, req.query); }, - }), + }) as any, ); }; diff --git a/frontend/package-lock.json b/frontend/package-lock.json index a23155d4ee5..a3ad4d8acc2 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -21,7 +21,7 @@ "dagre": "^0.8.2", "google-protobuf": "^3.11.2", "grpc-web": "^1.2.1", - "http-proxy-middleware": "^0.19.0", + "http-proxy-middleware": "^2.0.7", "immer": "^9.0.6", "js-yaml": "^3.14.1", "lodash": "^4.17.21", @@ -33,7 +33,7 @@ "pako": "^2.0.4", "portable-fetch": "^3.0.0", "proto3-json-serializer": "^0.1.6", - "protobufjs": "~6.11.2", + "protobufjs": "~6.11.4", "re-resizable": "^4.9.0", "react": "^16.12.0", "react-ace": "^7.0.2", @@ -70,7 +70,6 @@ "@types/enzyme-adapter-react-16": "^1.0.5", "@types/express": "^4.16.0", "@types/google-protobuf": "^3.7.2", - "@types/http-proxy-middleware": "^0.17.5", "@types/jest": "^27.5.1", "@types/js-yaml": "^3.12.3", "@types/lodash": ">=4.14.117", @@ -142,11 +141,6 @@ "ajv": ">=8" } }, - "node_modules/@apideck/better-ajv-errors/node_modules/json-schema": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", - "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" - }, "node_modules/@babel/code-frame": { "version": "7.26.2", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", @@ -228,6 +222,7 @@ "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -270,9 +265,10 @@ } }, "node_modules/@babel/eslint-parser/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -377,6 +373,7 @@ "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -501,9 +498,10 @@ } }, "node_modules/@babel/helper-define-polyfill-provider/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -1960,11 +1958,18 @@ } }, "node_modules/@babel/plugin-transform-runtime/node_modules/core-js-compat/node_modules/semver": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", - "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, "bin": { "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/@babel/plugin-transform-runtime/node_modules/debug": { @@ -2021,9 +2026,10 @@ } }, "node_modules/@babel/plugin-transform-runtime/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -2324,9 +2330,10 @@ } }, "node_modules/@babel/preset-env/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -2560,27 +2567,6 @@ "react-scripts": "^5.0.0" } }, - "node_modules/@craco/craco/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@craco/craco/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, "node_modules/@craco/craco/node_modules/semver": { "version": "7.6.0", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", @@ -2595,39 +2581,6 @@ "node": ">=10" } }, - "node_modules/@craco/craco/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@craco/craco/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, - "node_modules/@craco/craco/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/@cspotcode/source-map-support": { "version": "0.8.1", "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", @@ -3056,18 +3009,6 @@ "pump": "^3.0.0" } }, - "node_modules/@google-cloud/storage/node_modules/pumpify/node_modules/duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, "node_modules/@google-cloud/storage/node_modules/readable-stream": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", @@ -3362,17 +3303,6 @@ "node": ">=8" } }, - "node_modules/@jest/core/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/core/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -3409,17 +3339,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/@jest/core/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/core/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -3428,14 +3347,6 @@ "node": ">=8" } }, - "node_modules/@jest/core/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/@jest/core/node_modules/istanbul-lib-coverage": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", @@ -3547,18 +3458,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/@jest/core/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/@jest/core/node_modules/pirates": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.4.tgz", @@ -3582,9 +3481,10 @@ } }, "node_modules/@jest/core/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -3608,17 +3508,6 @@ "node": ">=8" } }, - "node_modules/@jest/core/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/@jest/environment": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-27.4.6.tgz", @@ -4126,17 +4015,6 @@ "node": ">=8" } }, - "node_modules/@jest/reporters/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/reporters/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -4173,17 +4051,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/@jest/reporters/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/reporters/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -4192,14 +4059,6 @@ "node": ">=8" } }, - "node_modules/@jest/reporters/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/@jest/reporters/node_modules/istanbul-lib-instrument": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.1.0.tgz", @@ -4323,18 +4182,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/@jest/reporters/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/@jest/reporters/node_modules/pirates": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.4.tgz", @@ -4344,9 +4191,10 @@ } }, "node_modules/@jest/reporters/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -4370,17 +4218,6 @@ "node": ">=8" } }, - "node_modules/@jest/reporters/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/@jest/source-map": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-24.9.0.tgz", @@ -4507,17 +4344,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@jest/test-sequencer/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/test-sequencer/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -4554,17 +4380,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/@jest/test-sequencer/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/test-sequencer/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -4573,14 +4388,6 @@ "node": ">=8" } }, - "node_modules/@jest/test-sequencer/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/@jest/test-sequencer/node_modules/jest-haste-map": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.4.6.tgz", @@ -4669,18 +4476,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/@jest/test-sequencer/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/@jest/test-sequencer/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -4692,17 +4487,6 @@ "node": ">=8" } }, - "node_modules/@jest/test-sequencer/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/@jest/transform": { "version": "26.6.2", "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-26.6.2.tgz", @@ -4778,18 +4562,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@jest/transform/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/transform/node_modules/chalk": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", @@ -4824,18 +4596,6 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/@jest/transform/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/transform/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -4845,28 +4605,6 @@ "node": ">=8" } }, - "node_modules/@jest/transform/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/@jest/transform/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/@jest/transform/node_modules/source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -4888,18 +4626,6 @@ "node": ">=8" } }, - "node_modules/@jest/transform/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/@jest/types": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/@jest/types/-/types-24.9.0.tgz", @@ -4976,6 +4702,7 @@ "resolved": "https://registry.npmjs.org/@material-ui/core/-/core-3.9.4.tgz", "integrity": "sha512-r8QFLSexcYZbnqy/Hn4v8xzmAJV41yaodUVjmbGLi1iGDLG3+W941hEtEiBmxTRRqv2BdK3r4ijILcqKmDv/Sw==", "deprecated": "Material UI v4 doesn't receive active development since September 2021. See the guide https://mui.com/material-ui/migration/migration-v4/ to upgrade to v5.", + "license": "MIT", "dependencies": { "@babel/runtime": "^7.2.0", "@material-ui/system": "^3.0.0-alpha.0", @@ -5037,6 +4764,7 @@ "resolved": "https://registry.npmjs.org/@material-ui/system/-/system-3.0.0-alpha.2.tgz", "integrity": "sha512-odmxQ0peKpP7RQBQ8koly06YhsPzcoVib1vByVPBH4QhwqBXuYoqlCjt02846fYspAqkrWzjxnWUD311EBbxOA==", "deprecated": "You can now upgrade to @mui/system. See the guide: https://mui.com/guides/migration-v4/", + "license": "MIT", "dependencies": { "@babel/runtime": "^7.2.0", "deepmerge": "^3.0.0", @@ -5055,6 +4783,7 @@ "version": "3.0.0-alpha.3", "resolved": "https://registry.npmjs.org/@material-ui/utils/-/utils-3.0.0-alpha.3.tgz", "integrity": "sha512-rwMdMZptX0DivkqBuC+Jdq7BYTXwqKai5G5ejPpuEDKpWzi1Oxp+LygGw329FrKpuKeiqpcymlqJTjmy+quWng==", + "license": "MIT", "dependencies": { "@babel/runtime": "^7.2.0", "prop-types": "^15.6.0", @@ -5194,6 +4923,16 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "node_modules/@mdx-js/mdx/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, "node_modules/@mdx-js/react": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz", @@ -5643,17 +5382,6 @@ "process": "^0.11.10" } }, - "node_modules/@storybook/addon-actions/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/@storybook/addon-actions/node_modules/regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", @@ -5937,17 +5665,6 @@ "node": ">=10.13.0" } }, - "node_modules/@storybook/addon-docs/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/@storybook/addon-docs/node_modules/regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", @@ -6086,17 +5803,6 @@ "process": "^0.11.10" } }, - "node_modules/@storybook/addon-links/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/@storybook/addon-links/node_modules/qs": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", @@ -6247,17 +5953,6 @@ "process": "^0.11.10" } }, - "node_modules/@storybook/addon-viewport/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/@storybook/addon-viewport/node_modules/regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", @@ -6523,10 +6218,11 @@ } }, "node_modules/@storybook/builder-webpack4/node_modules/@babel/helper-define-polyfill-provider/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -6883,10 +6579,11 @@ } }, "node_modules/@storybook/builder-webpack4/node_modules/css-loader/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -7172,21 +6869,6 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/@storybook/builder-webpack4/node_modules/postcss-loader/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/@storybook/builder-webpack4/node_modules/postcss-value-parser": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz", @@ -7206,6 +6888,22 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/@storybook/builder-webpack4/node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/@storybook/builder-webpack4/node_modules/serialize-javascript": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz", @@ -7639,17 +7337,6 @@ "react": ">= 0.14.0" } }, - "node_modules/@storybook/components/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/@storybook/components/node_modules/regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", @@ -7854,10 +7541,11 @@ } }, "node_modules/@storybook/core-common/node_modules/@babel/helper-define-polyfill-provider/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -8093,18 +7781,6 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@storybook/core-common/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@storybook/core-common/node_modules/cacache": { "version": "12.0.4", "resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.4.tgz", @@ -8282,18 +7958,6 @@ "node": ">=4.0.0" } }, - "node_modules/@storybook/core-common/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@storybook/core-common/node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -8393,21 +8057,6 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/@storybook/core-common/node_modules/fork-ts-checker-webpack-plugin/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/@storybook/core-common/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -8417,15 +8066,6 @@ "node": ">=8" } }, - "node_modules/@storybook/core-common/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/@storybook/core-common/node_modules/is-wsl": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", @@ -8476,19 +8116,6 @@ "json5": "lib/cli.js" } }, - "node_modules/@storybook/core-common/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/@storybook/core-common/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -8559,6 +8186,22 @@ "node": ">=8" } }, + "node_modules/@storybook/core-common/node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/@storybook/core-common/node_modules/serialize-javascript": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz", @@ -8623,18 +8266,6 @@ "node": ">= 4" } }, - "node_modules/@storybook/core-common/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/@storybook/core-common/node_modules/watchpack": { "version": "1.7.5", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.5.tgz", @@ -8698,114 +8329,6 @@ } } }, - "node_modules/@storybook/core-common/node_modules/webpack/node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@storybook/core-common/node_modules/webpack/node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@storybook/core-common/node_modules/webpack/node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", - "dev": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@storybook/core-common/node_modules/webpack/node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@storybook/core-common/node_modules/webpack/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@storybook/core-common/node_modules/webpack/node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@storybook/core-common/node_modules/webpack/node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@storybook/core-common/node_modules/webpack/node_modules/schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -8820,19 +8343,6 @@ "node": ">= 4" } }, - "node_modules/@storybook/core-common/node_modules/webpack/node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", - "dev": true, - "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@storybook/core-common/node_modules/yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", @@ -9350,12 +8860,24 @@ } }, "node_modules/@storybook/core-server/node_modules/node-fetch": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", - "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==", + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, "engines": { "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } } }, "node_modules/@storybook/core-server/node_modules/regenerator-runtime": { @@ -9440,6 +8962,13 @@ "webpack": "^4.0.0" } }, + "node_modules/@storybook/core-server/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true, + "license": "MIT" + }, "node_modules/@storybook/core-server/node_modules/watchpack": { "version": "1.7.5", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.5.tgz", @@ -9454,6 +8983,13 @@ "watchpack-chokidar2": "^2.0.1" } }, + "node_modules/@storybook/core-server/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true, + "license": "BSD-2-Clause" + }, "node_modules/@storybook/core-server/node_modules/webpack": { "version": "4.47.0", "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.47.0.tgz", @@ -9503,6 +9039,17 @@ } } }, + "node_modules/@storybook/core-server/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/@storybook/core-server/node_modules/yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", @@ -10117,12 +9664,24 @@ } }, "node_modules/@storybook/manager-webpack4/node_modules/node-fetch": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", - "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==", + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, "engines": { "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } } }, "node_modules/@storybook/manager-webpack4/node_modules/p-limit": { @@ -10183,10 +9742,11 @@ } }, "node_modules/@storybook/manager-webpack4/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -10218,6 +9778,13 @@ "figgy-pudding": "^3.5.1" } }, + "node_modules/@storybook/manager-webpack4/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true, + "license": "MIT" + }, "node_modules/@storybook/manager-webpack4/node_modules/watchpack": { "version": "1.7.5", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.5.tgz", @@ -10232,6 +9799,13 @@ "watchpack-chokidar2": "^2.0.1" } }, + "node_modules/@storybook/manager-webpack4/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true, + "license": "BSD-2-Clause" + }, "node_modules/@storybook/manager-webpack4/node_modules/webpack": { "version": "4.47.0", "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.47.0.tgz", @@ -10318,6 +9892,17 @@ "webpack": "^4.0.0" } }, + "node_modules/@storybook/manager-webpack4/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/@storybook/manager-webpack4/node_modules/yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", @@ -10470,10 +10055,11 @@ } }, "node_modules/@storybook/preset-create-react-app/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -10559,18 +10145,6 @@ "webpack": ">= 4" } }, - "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/debug": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", @@ -10588,18 +10162,6 @@ } } }, - "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/find-cache-dir": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", @@ -10617,15 +10179,6 @@ "url": "https://github.com/avajs/find-cache-dir?sponsor=1" } }, - "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -10641,19 +10194,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -10682,26 +10222,15 @@ } }, "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, - "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/tslib": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz", @@ -11042,17 +10571,6 @@ "yallist": "^3.0.2" } }, - "node_modules/@storybook/react/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/@storybook/react/node_modules/regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", @@ -11522,20 +11040,6 @@ "string.prototype.matchall": "^4.0.6" } }, - "node_modules/@surma/rollup-plugin-off-main-thread/node_modules/ejs": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.6.tgz", - "integrity": "sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw==", - "dependencies": { - "jake": "^10.6.1" - }, - "bin": { - "ejs": "bin/cli.js" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@surma/rollup-plugin-off-main-thread/node_modules/es-abstract": { "version": "1.19.1", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz", @@ -11585,17 +11089,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/@surma/rollup-plugin-off-main-thread/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/@surma/rollup-plugin-off-main-thread/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -11625,14 +11118,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/@surma/rollup-plugin-off-main-thread/node_modules/object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/@surma/rollup-plugin-off-main-thread/node_modules/string.prototype.matchall": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.6.tgz", @@ -11919,15 +11404,6 @@ "@types/yargs-parser": "*" } }, - "node_modules/@testing-library/dom/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/@testing-library/dom/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -12775,21 +12251,11 @@ "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==" }, "node_modules/@types/http-proxy": { - "version": "1.17.14", - "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.14.tgz", - "integrity": "sha512-SSrD0c1OQzlFX7pGu1eXxSEjemej64aaNPRhhVYUGqXh0BtldAAx37MG8btcumvpgKyZp1F5Gn3JkktdxiFv6w==", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/http-proxy-middleware": { - "version": "0.17.6", - "resolved": "https://registry.npmjs.org/@types/http-proxy-middleware/-/http-proxy-middleware-0.17.6.tgz", - "integrity": "sha512-NocuMc3omR+yySlkgZlNUDyJa9ENGuwX8Ev7Y9zO//H989drWp18Fn+oAgZZIPu+JWtNinIxENK2TZvo53o3tw==", - "dev": true, + "version": "1.17.16", + "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.16.tgz", + "integrity": "sha512-sdWoUajOB1cd0A8cRRQ1cfyWNbmFKLAqBB89Y8x5iYyG/mkJHc0YUH8pdWBy2omi9qtCpiIgGjuwO0dQST2l5w==", + "license": "MIT", "dependencies": { - "@types/connect": "*", - "@types/http-proxy": "*", "@types/node": "*" } }, @@ -12832,15 +12298,6 @@ "pretty-format": "^27.0.0" } }, - "node_modules/@types/jest/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/@types/jest/node_modules/ansi-styles": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", @@ -12893,6 +12350,7 @@ "version": "9.5.8", "resolved": "https://registry.npmjs.org/@types/jss/-/jss-9.5.8.tgz", "integrity": "sha512-bBbHvjhm42UKki+wZpR89j73ykSXg99/bhuKuYYePtpma3ZAnmeGnl0WxXiZhPGsIfzKwCUkpPC0jlrVMBfRxA==", + "license": "MIT", "dependencies": { "csstype": "^2.0.0", "indefinite-observable": "^1.0.1" @@ -12970,19 +12428,45 @@ } }, "node_modules/@types/node-fetch/node_modules/form-data": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", - "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.4.tgz", + "integrity": "sha512-Y/3MmRiR8Nd+0CUtrbvcKtKzLWiUfpQ7DFVggH8PwmGt/0r7RSy32GuP4hpCJlQNEBusisSx1DLtD8uD386HJQ==", + "deprecated": "This version has an incorrect dependency; please use v2.5.5", "dev": true, + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" + "es-set-tostringtag": "^2.1.0", + "has-own": "^1.0.1", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" }, "engines": { - "node": ">= 6" + "node": ">= 0.12" } }, + "node_modules/@types/node-fetch/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/@types/node-forge": { "version": "1.3.11", "resolved": "https://registry.npmjs.org/@types/node-forge/-/node-forge-1.3.11.tgz", @@ -13393,9 +12877,10 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/@typescript-eslint/eslint-plugin/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -13604,26 +13089,16 @@ } } }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@typescript-eslint/typescript-estree/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -14108,17 +13583,6 @@ "react": "^0.14 || ^15.0.0 || ^16.0.0-alpha" } }, - "node_modules/airbnb-prop-types/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/ajv": { "version": "8.12.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", @@ -14159,6 +13623,18 @@ } } }, + "node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, "node_modules/alphanum-sort": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/alphanum-sort/-/alphanum-sort-1.0.2.tgz", @@ -14257,21 +13733,23 @@ } }, "node_modules/ansi-html": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ansi-html/-/ansi-html-0.0.7.tgz", - "integrity": "sha1-gTWEAhliqenm/QOflA0S9WynhZ4=", + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/ansi-html/-/ansi-html-0.0.8.tgz", + "integrity": "sha512-QROYz1I1Kj+8bTYgx0IlMBpRSCIU+7GjbE0oH+KF7QKc+qSF8YAlIutN59Db17tXN70Ono9upT9Ht0iG93W7ug==", "devOptional": true, "engines": [ "node >= 0.8.0" ], + "license": "Apache-2.0", "bin": { "ansi-html": "bin/ansi-html" } }, "node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", "engines": { "node": ">=8" } @@ -14370,38 +13848,6 @@ "node": ">=6.0" } }, - "node_modules/arr-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", - "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-flatten": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", - "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-union": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", - "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/array-flatten": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz", - "integrity": "sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/array-includes": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.3.tgz", @@ -14465,17 +13911,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array-includes/node_modules/is-callable": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", - "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/array-includes/node_modules/is-regex": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", @@ -14502,14 +13937,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array-includes/node_modules/object-inspect": { - "version": "1.10.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", - "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/array-union": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", @@ -14527,14 +13954,6 @@ "node": ">=0.10.0" } }, - "node_modules/array-unique": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", - "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/array.prototype.find": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/array.prototype.find/-/array.prototype.find-2.1.0.tgz", @@ -14645,18 +14064,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array.prototype.flatmap/node_modules/is-callable": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", - "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/array.prototype.flatmap/node_modules/is-regex": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", @@ -14685,15 +14092,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array.prototype.flatmap/node_modules/object-inspect": { - "version": "1.10.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", - "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/array.prototype.map": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/array.prototype.map/-/array.prototype.map-1.0.3.tgz", @@ -14761,18 +14159,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array.prototype.map/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/array.prototype.map/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -14804,15 +14190,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array.prototype.map/node_modules/object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/arrify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", @@ -14879,14 +14256,6 @@ "inherits": "2.0.3" } }, - "node_modules/assign-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", - "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/ast-types": { "version": "0.14.2", "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.14.2.tgz", @@ -14910,17 +14279,6 @@ "integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==", "dev": true }, - "node_modules/async": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", - "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "lodash": "^4.17.14" - } - }, "node_modules/async-each": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.6.tgz", @@ -14934,12 +14292,6 @@ ], "optional": true }, - "node_modules/async-limiter": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.0.tgz", - "integrity": "sha512-jp/uFnooOiO+L211eZOoSyzpOITMXx1rBITauYykG3BRYPu8h0UcxsPNB04RR5vo4Tyz3+ay17tR6JVf9qzYWg==", - "dev": true - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -14953,17 +14305,6 @@ "node": ">= 4.0.0" } }, - "node_modules/atob": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", - "bin": { - "atob": "bin/atob.js" - }, - "engines": { - "node": ">= 4.5.0" - } - }, "node_modules/attr-accept": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-1.1.3.tgz", @@ -15087,6 +14428,22 @@ "browserslist": ">= 4.21.0" } }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", @@ -15219,17 +14576,6 @@ "node": ">=8" } }, - "node_modules/babel-jest/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/babel-jest/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -15266,17 +14612,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/babel-jest/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/babel-jest/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -15285,14 +14620,6 @@ "node": ">=8" } }, - "node_modules/babel-jest/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/babel-jest/node_modules/istanbul-lib-coverage": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", @@ -15404,18 +14731,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/babel-jest/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/babel-jest/node_modules/pirates": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.4.tgz", @@ -15425,9 +14740,10 @@ } }, "node_modules/babel-jest/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -15451,17 +14767,6 @@ "node": ">=8" } }, - "node_modules/babel-jest/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/babel-loader": { "version": "8.2.3", "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.3.tgz", @@ -15546,9 +14851,10 @@ } }, "node_modules/babel-loader/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -15729,9 +15035,10 @@ } }, "node_modules/babel-plugin-polyfill-corejs2/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -16980,11 +16287,18 @@ } }, "node_modules/babel-preset-react-app/node_modules/core-js-compat/node_modules/semver": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", - "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, "bin": { "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/babel-preset-react-app/node_modules/debug": { @@ -17041,9 +16355,10 @@ } }, "node_modules/babel-preset-react-app/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -17083,69 +16398,6 @@ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, - "node_modules/base": { - "version": "0.11.2", - "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", - "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", - "dependencies": { - "cache-base": "^1.0.1", - "class-utils": "^0.3.5", - "component-emitter": "^1.2.1", - "define-property": "^1.0.0", - "isobject": "^3.0.1", - "mixin-deep": "^1.2.0", - "pascalcase": "^0.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/base64-js": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", @@ -17244,9 +16496,10 @@ "dev": true }, "node_modules/body-parser": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", - "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "license": "MIT", "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", @@ -17256,7 +16509,7 @@ "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.11.0", + "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" @@ -17266,14 +16519,6 @@ "npm": "1.2.8000 || >= 1.4.16" } }, - "node_modules/body-parser/node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "engines": { - "node": ">= 0.8" - } - }, "node_modules/body-parser/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -17311,11 +16556,12 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "node_modules/body-parser/node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "license": "BSD-3-Clause", "dependencies": { - "side-channel": "^1.0.4" + "side-channel": "^1.0.6" }, "engines": { "node": ">=0.6" @@ -17337,22 +16583,6 @@ "node": ">= 0.8" } }, - "node_modules/bonjour": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/bonjour/-/bonjour-3.5.0.tgz", - "integrity": "sha1-jokKGD2O6aI5OzhExpGkK897yfU=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "array-flatten": "^2.1.0", - "deep-equal": "^1.0.1", - "dns-equal": "^1.0.0", - "dns-txt": "^2.0.2", - "multicast-dns": "^6.0.1", - "multicast-dns-service-types": "^1.1.0" - } - }, "node_modules/bonjour-service": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.2.1.tgz", @@ -17503,49 +16733,32 @@ "integrity": "sha1-SJb8ydVE7vRfS7dmDbMg07N5/lg=" }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "license": "MIT", "dependencies": { - "is-extendable": "^0.1.0" + "fill-range": "^7.1.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/brcast": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/brcast/-/brcast-3.0.2.tgz", - "integrity": "sha512-f5XwwFCCuvgqP2nMH/hJ74FqnGmb4X3D+NC//HphxJzzhsZvSZa+Hk/syB7j3ZHpPDLMoYU8oBgviRWfNvEfKA==" + "integrity": "sha512-f5XwwFCCuvgqP2nMH/hJ74FqnGmb4X3D+NC//HphxJzzhsZvSZa+Hk/syB7j3ZHpPDLMoYU8oBgviRWfNvEfKA==", + "license": "MIT" }, "node_modules/broadcast-channel": { "version": "3.6.0", @@ -17767,14 +16980,6 @@ "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" }, - "node_modules/buffer-indexof": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/buffer-indexof/-/buffer-indexof-1.1.1.tgz", - "integrity": "sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/buffer-xor": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", @@ -17788,9 +16993,9 @@ "dev": true }, "node_modules/bytes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", "engines": { "node": ">= 0.8" } @@ -17952,35 +17157,45 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/cache-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", - "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "license": "MIT", "dependencies": { - "collection-visit": "^1.0.0", - "component-emitter": "^1.2.1", - "get-value": "^2.0.6", - "has-value": "^1.0.0", - "isobject": "^3.0.1", - "set-value": "^2.0.0", - "to-object-path": "^0.3.0", - "union-value": "^1.0.0", - "unset-value": "^1.0.0" + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/call-bind": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", - "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", "dependencies": { - "es-define-property": "^1.0.0", "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.1" + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" }, "engines": { "node": ">= 0.4" @@ -18145,7 +17360,8 @@ "node_modules/change-emitter": { "version": "0.1.6", "resolved": "https://registry.npmjs.org/change-emitter/-/change-emitter-0.1.6.tgz", - "integrity": "sha1-6LL+PX8at9aaMhma/5HqaTFAlRU=" + "integrity": "sha512-YXzt1cQ4a2jqazhcuSWEOc1K2q8g9H6eWNsyZgi640LDzRWVQ2eDe+Y/kVdftH+vYdPF2rgDb3dLdpxE1jvAxw==", + "license": "MIT" }, "node_modules/char-regex": { "version": "1.0.2", @@ -18287,58 +17503,6 @@ "fsevents": "~2.3.2" } }, - "node_modules/chokidar/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/chokidar/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/chokidar/node_modules/is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/chokidar/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/chokidar/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", @@ -18363,45 +17527,49 @@ "dev": true }, "node_modules/cipher-base": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", - "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.6.tgz", + "integrity": "sha512-3Ek9H3X6pj5TgenXYtNWdaBon1tgYCaebd+XPg0keyjEbEfkD4KkmAxkQ/i1vYvxdcT5nscLBfq9VJRmCBcFSw==", "dev": true, "dependencies": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" + "inherits": "^2.0.4", + "safe-buffer": "^5.2.1" + }, + "engines": { + "node": ">= 0.10" } }, + "node_modules/cipher-base/node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/cipher-base/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/cjs-module-lexer": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz", "integrity": "sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==" }, - "node_modules/class-utils": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", - "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", - "dependencies": { - "arr-union": "^3.1.0", - "define-property": "^0.2.5", - "isobject": "^3.0.0", - "static-extend": "^0.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/class-utils/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/classcat": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/classcat/-/classcat-5.0.3.tgz", @@ -18539,18 +17707,6 @@ "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz", "integrity": "sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==" }, - "node_modules/collection-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", - "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", - "dependencies": { - "map-visit": "^1.0.0", - "object-visit": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -18628,11 +17784,6 @@ "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" }, - "node_modules/component-emitter": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", - "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=" - }, "node_modules/compressible": { "version": "2.0.15", "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.15.tgz", @@ -18646,16 +17797,16 @@ } }, "node_modules/compression": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", - "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.8.1.tgz", + "integrity": "sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==", "dependencies": { - "accepts": "~1.3.5", - "bytes": "3.0.0", - "compressible": "~2.0.16", + "bytes": "3.1.2", + "compressible": "~2.0.18", "debug": "2.6.9", - "on-headers": "~1.0.2", - "safe-buffer": "5.1.2", + "negotiator": "~0.6.4", + "on-headers": "~1.1.0", + "safe-buffer": "5.2.1", "vary": "~1.1.2" }, "engines": { @@ -18681,6 +17832,33 @@ "ms": "2.0.0" } }, + "node_modules/compression/node_modules/negotiator": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", + "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/compression/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/compute-scroll-into-view": { "version": "1.0.17", "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-1.0.17.tgz", @@ -18712,17 +17890,6 @@ "resolved": "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz", "integrity": "sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==" }, - "node_modules/connect-history-api-fallback": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz", - "integrity": "sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.8" - } - }, "node_modules/console-browserify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz", @@ -18794,14 +17961,6 @@ "run-queue": "^1.0.0" } }, - "node_modules/copy-descriptor": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", - "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/copy-to-clipboard": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz", @@ -18853,11 +18012,18 @@ } }, "node_modules/core-js-compat/node_modules/semver": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", - "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, "bin": { "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/core-js-pure": { @@ -19018,10 +18184,11 @@ } }, "node_modules/cp-file/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -19272,19 +18439,41 @@ "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==" }, "node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.5.tgz", + "integrity": "sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==", + "license": "MIT", "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" }, "engines": { - "node": ">=4.8" + "node": ">= 8" + } + }, + "node_modules/cross-spawn/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cross-spawn/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" } }, "node_modules/crypto-browserify": { @@ -19494,9 +18683,10 @@ "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" }, "node_modules/css-loader/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -19541,17 +18731,6 @@ } } }, - "node_modules/css-minimizer-webpack-plugin/node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, "node_modules/css-minimizer-webpack-plugin/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -19694,7 +18873,8 @@ "node_modules/css-vendor": { "version": "0.3.8", "resolved": "https://registry.npmjs.org/css-vendor/-/css-vendor-0.3.8.tgz", - "integrity": "sha1-ZCHP0wNM5mT+dnOXL9ARn8KJQfo=", + "integrity": "sha512-Vx/Vl3zsHj32Z+WTNzGjd2iSbSIJTYHMmyGUT2nzCjj0Xk4qLfwpQ8nF6TQ5oo3Cf0s/An3DTc7LclH1BkAXbQ==", + "license": "MIT", "dependencies": { "is-in-browser": "^1.0.2" } @@ -20235,30 +19415,11 @@ "ms": "2.0.0" } }, - "node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/decimal.js": { "version": "10.2.1", "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.2.1.tgz", "integrity": "sha512-KaL7+6Fw6i5A2XSnsbhm/6B+NuEA7TZ4vqxnd5tXz9sbKtrN9Srj8ab4vKVdK8YAqZO9P1kg45Y6YLoduPf+kw==" }, - "node_modules/decode-uri-component": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", - "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==", - "engines": { - "node": ">=0.10" - } - }, "node_modules/dedent": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", @@ -20284,25 +19445,11 @@ "version": "3.3.0", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-3.3.0.tgz", "integrity": "sha512-GRQOafGHwMHpjPx9iCvTgpu9NojZ49q794EEL94JVEw6VaeA8XTUyBKvAkOOjBX9oJNiV6G3P+T+tihFjo2TqA==", + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/default-gateway": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-4.2.0.tgz", - "integrity": "sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "execa": "^1.0.0", - "ip-regex": "^2.1.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/define-data-property": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", @@ -20343,132 +19490,11 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/define-property": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", - "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", - "dependencies": { - "is-descriptor": "^1.0.2", - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/defined": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.0.tgz", "integrity": "sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM=" }, - "node_modules/del": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/del/-/del-4.1.1.tgz", - "integrity": "sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "@types/glob": "^7.1.1", - "globby": "^6.1.0", - "is-path-cwd": "^2.0.0", - "is-path-in-cwd": "^2.0.0", - "p-map": "^2.0.0", - "pify": "^4.0.1", - "rimraf": "^2.6.3" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/del/node_modules/array-union": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", - "integrity": "sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "array-uniq": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/del/node_modules/globby": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", - "integrity": "sha512-KVbFv2TQtbzCoxAnfD6JcHZTYCzyliEaaeM/gH8qQdkKr5s0OP9scEgvdcngyk7AVdY6YVW/TJHd+lQ/Df3Daw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "array-union": "^1.0.1", - "glob": "^7.0.3", - "object-assign": "^4.0.1", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/del/node_modules/globby/node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/del/node_modules/p-map": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", - "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -20642,37 +19668,6 @@ "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==" }, - "node_modules/dns-equal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", - "integrity": "sha1-s55/HabrCnW6nBcySzR1PEfgZU0=", - "dev": true, - "optional": true, - "peer": true - }, - "node_modules/dns-packet": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-1.3.4.tgz", - "integrity": "sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ip": "^1.1.0", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/dns-txt": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/dns-txt/-/dns-txt-2.0.2.tgz", - "integrity": "sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY=", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "buffer-indexof": "^1.0.0" - } - }, "node_modules/doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -20851,29 +19846,26 @@ "react": ">=16.12.0" } }, - "node_modules/downshift/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, - "node_modules/downshift/node_modules/prop-types/node_modules/react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", - "dev": true - }, "node_modules/downshift/node_modules/react-is": { "version": "17.0.2", "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", "dev": true }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/duplexer": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz", @@ -20881,15 +19873,29 @@ "dev": true }, "node_modules/duplexify": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz", - "integrity": "sha512-vM58DwdnKmty+FSPzT14K9JXb90H+j5emaR4KYbr2KTIz00WHGbWOe5ghQTx233ZCLZtrGDALzKwcjEtSt35mA==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", + "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", "dev": true, "dependencies": { - "end-of-stream": "^1.0.0", - "inherits": "^2.0.1", - "readable-stream": "^2.0.0", - "stream-shift": "^1.0.0" + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.2" + } + }, + "node_modules/duplexify/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" } }, "node_modules/dynamic-dedupe": { @@ -20925,11 +19931,16 @@ "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, "node_modules/ejs": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-2.7.4.tgz", - "integrity": "sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==", - "dev": true, - "hasInstallScript": true, + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.7.tgz", + "integrity": "sha512-BIar7R6abbUxDA3bfXrO4DSgwo8I+fB5/1zgujl3HLLjwd6+9iOnrT+t3grn2qbk9vOgBubXOFwX2m9axoFaGw==", + "license": "Apache-2.0", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, "engines": { "node": ">=0.10.0" } @@ -21022,14 +20033,6 @@ "node": ">= 0.8" } }, - "node_modules/encoding": { - "version": "0.1.12", - "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz", - "integrity": "sha1-U4tm8+5izRq1HsMjgp0flIDHS+s=", - "dependencies": { - "iconv-lite": "~0.4.13" - } - }, "node_modules/end-of-stream": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", @@ -21143,22 +20146,12 @@ "react-dom": "^16.0.0-0" } }, - "node_modules/enzyme-adapter-react-16/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/enzyme-adapter-react-16/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver" } @@ -21180,22 +20173,12 @@ "react": "0.13.x || 0.14.x || ^15.0.0-0 || ^16.0.0-0" } }, - "node_modules/enzyme-adapter-utils/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/enzyme-adapter-utils/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver" } @@ -21276,12 +20259,10 @@ "dev": true }, "node_modules/es-define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", - "dependencies": { - "get-intrinsic": "^1.2.4" - }, + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", "engines": { "node": ">= 0.4" } @@ -21339,6 +20320,33 @@ "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.4.1.tgz", "integrity": "sha512-cXLGjP0c4T3flZJKQSuziYoq7MlT+rnvfZjfp7h+I7K9BNX54kP9nyWvdbwjQ4u1iWbOL4u96fgeZLToQlZC7w==" }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-to-primitive": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", @@ -21802,17 +20810,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/eslint-plugin-import/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/eslint-plugin-import/node_modules/is-core-module": { "version": "2.8.1", "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", @@ -21824,17 +20821,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/eslint-plugin-import/node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/eslint-plugin-import/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -21875,14 +20861,6 @@ "json5": "lib/cli.js" } }, - "node_modules/eslint-plugin-import/node_modules/object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/eslint-plugin-import/node_modules/object.values": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz", @@ -22046,17 +21024,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/eslint-plugin-jsx-a11y/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/eslint-plugin-jsx-a11y/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -22086,14 +21053,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/eslint-plugin-jsx-a11y/node_modules/object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/eslint-plugin-react": { "version": "7.28.0", "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.28.0.tgz", @@ -22234,17 +21193,6 @@ "node": ">=4.0" } }, - "node_modules/eslint-plugin-react/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/eslint-plugin-react/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -22274,14 +21222,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/eslint-plugin-react/node_modules/object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/eslint-plugin-react/node_modules/object.entries": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.5.tgz", @@ -22327,16 +21267,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/eslint-plugin-react/node_modules/prop-types": { - "version": "15.8.1", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", - "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.13.1" - } - }, "node_modules/eslint-plugin-react/node_modules/resolve": { "version": "2.0.0-next.3", "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.3.tgz", @@ -22350,9 +21280,10 @@ } }, "node_modules/eslint-plugin-react/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -22489,28 +21420,6 @@ "ajv": "^6.9.1" } }, - "node_modules/eslint-webpack-plugin/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/eslint-webpack-plugin/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/eslint-webpack-plugin/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -22519,14 +21428,6 @@ "node": ">=8" } }, - "node_modules/eslint-webpack-plugin/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/eslint-webpack-plugin/node_modules/jest-worker": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.4.6.tgz", @@ -22545,18 +21446,6 @@ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, - "node_modules/eslint-webpack-plugin/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/eslint-webpack-plugin/node_modules/schema-utils": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", @@ -22588,17 +21477,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/eslint-webpack-plugin/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/eslint/node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -22614,14 +21492,6 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "node_modules/eslint/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/eslint/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -22672,19 +21542,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/eslint/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/eslint/node_modules/debug": { "version": "4.3.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", @@ -22723,17 +21580,6 @@ "node": ">=10.13.0" } }, - "node_modules/eslint/node_modules/glob-parent/node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/eslint/node_modules/globals": { "version": "13.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.0.tgz", @@ -22813,14 +21659,6 @@ "node": ">= 0.8.0" } }, - "node_modules/eslint/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, "node_modules/eslint/node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -22830,9 +21668,10 @@ } }, "node_modules/eslint/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -22843,25 +21682,6 @@ "node": ">=10" } }, - "node_modules/eslint/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/eslint/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, "node_modules/eslint/node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -22917,20 +21737,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/eslint/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/espree": { "version": "9.3.0", "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.0.tgz", @@ -23054,16 +21860,6 @@ "node": ">=0.8.x" } }, - "node_modules/eventsource": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", - "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", - "optional": true, - "peer": true, - "engines": { - "node": ">=12.0.0" - } - }, "node_modules/evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", @@ -23106,53 +21902,6 @@ "node": ">= 0.8.0" } }, - "node_modules/expand-brackets": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", - "integrity": "sha1-t3c14xXOMPa27/D4OwQVGiJEliI=", - "dependencies": { - "debug": "^2.3.3", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "posix-character-classes": "^0.1.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/expand-brackets/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/expand-brackets/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/expand-brackets/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/expect": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/expect/-/expect-27.4.6.tgz", @@ -23491,104 +22240,6 @@ "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, - "node_modules/extend-shallow": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", - "integrity": "sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=", - "dependencies": { - "assign-symbols": "^1.0.0", - "is-extendable": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extend-shallow/node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dependencies": { - "is-plain-object": "^2.0.4" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", - "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", - "dependencies": { - "array-unique": "^0.3.2", - "define-property": "^1.0.0", - "expand-brackets": "^2.1.4", - "extend-shallow": "^2.0.1", - "fragment-cache": "^0.2.1", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", @@ -23618,59 +22269,6 @@ "node": ">=8" } }, - "node_modules/fast-glob/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fast-glob/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fast-glob/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/fast-glob/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/fast-glob/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/fast-json-parse": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/fast-json-parse/-/fast-json-parse-1.0.3.tgz", @@ -23870,11 +22468,12 @@ "optional": true }, "node_modules/filelist": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.2.tgz", - "integrity": "sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "license": "Apache-2.0", "dependencies": { - "minimatch": "^3.0.4" + "minimatch": "^5.0.1" } }, "node_modules/filesize": { @@ -23887,28 +22486,15 @@ } }, "node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "license": "MIT", "dependencies": { - "is-extendable": "^0.1.0" + "to-regex-range": "^5.0.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/find-cache-dir": { @@ -24011,12 +22597,20 @@ } } }, - "node_modules/for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=", + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.2.7" + }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/foreground-child": { @@ -24032,65 +22626,6 @@ "node": ">=8.0.0" } }, - "node_modules/foreground-child/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/foreground-child/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", @@ -24118,17 +22653,14 @@ "yarn": ">=1.0.0" } }, - "node_modules/form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 0.12" + "node_modules/fork-ts-checker-webpack-plugin/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" } }, "node_modules/format": { @@ -24160,17 +22692,6 @@ "url": "https://github.com/sponsors/rawify" } }, - "node_modules/fragment-cache": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", - "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", - "dependencies": { - "map-cache": "^0.2.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/free-style": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/free-style/-/free-style-2.6.1.tgz", @@ -24426,12 +22947,49 @@ } }, "node_modules/gaxios/node_modules/node-fetch": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==", + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, "engines": { "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/gaxios/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true, + "license": "MIT" + }, + "node_modules/gaxios/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/gaxios/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" } }, "node_modules/gcp-metadata": { @@ -24497,18 +23055,6 @@ "node": ">=8" } }, - "node_modules/gcs-resumable-upload/node_modules/duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, "node_modules/gcs-resumable-upload/node_modules/is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", @@ -24541,25 +23087,12 @@ "pump": "^3.0.0" } }, - "node_modules/gcs-resumable-upload/node_modules/readable-stream": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", - "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/gcs-resumable-upload/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -24590,15 +23123,21 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -24620,6 +23159,19 @@ "node": ">=8.0.0" } }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/get-stream": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", @@ -24647,14 +23199,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/get-value": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", - "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", @@ -24751,17 +23295,6 @@ "node": ">= 6" } }, - "node_modules/glob-parent/node_modules/is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/glob-promise": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/glob-promise/-/glob-promise-3.4.0.tgz", @@ -24950,11 +23483,12 @@ "integrity": "sha512-OVPzcSWIAJ+d5yiHyeaLrdufQtrvaBrF4JQg+z8ynTkbO3uFcujqXszTumqg1cGsAsjkWnI+M5B1xZ19yR4Wyg==" }, "node_modules/gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "dependencies": { - "get-intrinsic": "^1.1.3" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -25165,6 +23699,13 @@ "node": ">=0.10.0" } }, + "node_modules/has-own": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-own/-/has-own-1.0.1.tgz", + "integrity": "sha512-RDKhzgQTQfMaLvIFhjahU+2gGnRBK6dYOd5Gd9BzkmnBneOCRYjRC003RIMrdAbH52+l+CnMS4bBCXGer8tEhg==", + "deprecated": "This project is not maintained. Use Object.hasOwn() instead.", + "license": "MIT" + }, "node_modules/has-property-descriptors": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", @@ -25176,21 +23717,11 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-proto": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", - "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -25199,11 +23730,12 @@ } }, "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", "dependencies": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -25218,42 +23750,6 @@ "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=", "dev": true }, - "node_modules/has-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", - "integrity": "sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=", - "dependencies": { - "get-value": "^2.0.6", - "has-values": "^1.0.0", - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", - "integrity": "sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=", - "dependencies": { - "is-number": "^3.0.0", - "kind-of": "^4.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values/node_modules/kind-of": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", - "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/hash-base": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", @@ -25814,17 +24310,27 @@ } }, "node_modules/http-proxy-middleware": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz", - "integrity": "sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==", + "version": "2.0.9", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.9.tgz", + "integrity": "sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==", + "license": "MIT", "dependencies": { - "http-proxy": "^1.17.0", - "is-glob": "^4.0.0", - "lodash": "^4.17.11", - "micromatch": "^3.1.10" + "@types/http-proxy": "^1.17.8", + "http-proxy": "^1.18.1", + "is-glob": "^4.0.1", + "is-plain-obj": "^3.0.0", + "micromatch": "^4.0.2" }, "engines": { - "node": ">=4.0.0" + "node": ">=12.0.0" + }, + "peerDependencies": { + "@types/express": "^4.17.13" + }, + "peerDependenciesMeta": { + "@types/express": { + "optional": true + } } }, "node_modules/http-proxy/node_modules/eventemitter3": { @@ -25905,9 +24411,10 @@ } }, "node_modules/hyphenate-style-name": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz", - "integrity": "sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.1.0.tgz", + "integrity": "sha512-WDC/ui2VVRrz3jOVi+XtjqkDjiVjTtFaAGiW37k6b+ohyQ5wYDOGkvCZa8+H0nx3gyvv0+BST9xuOgIyGQ00gw==", + "license": "BSD-3-Clause" }, "node_modules/iconv-lite": { "version": "0.4.24", @@ -26078,6 +24585,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/indefinite-observable/-/indefinite-observable-1.0.2.tgz", "integrity": "sha512-Mps0898zEduHyPhb7UCgNmfzlqNZknVmaFz5qzr0mm04YQ5FGLhAyK/dJ+NaRxGyR6juQXIxh5Ev0xx+qq0nYA==", + "license": "Apache-2.0", "dependencies": { "symbol-observable": "1.2.0" } @@ -26123,21 +24631,6 @@ "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==", "dev": true }, - "node_modules/internal-ip": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/internal-ip/-/internal-ip-4.3.0.tgz", - "integrity": "sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "default-gateway": "^4.2.0", - "ipaddr.js": "^1.9.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/internal-slot": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", @@ -26174,17 +24667,6 @@ "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=", "dev": true }, - "node_modules/ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha512-58yWmlHpp7VYfcdTwMTvwMmqx/Elfxjd9RXTDyMsbL7lLWmhMylLEqiYVLKuLzOZqVgiWXD9MfR62Vv89VRxkw==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, "node_modules/ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -26202,28 +24684,6 @@ "node": ">=8" } }, - "node_modules/is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-accessor-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-alphabetical": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", @@ -26297,17 +24757,16 @@ "node": ">= 0.4" } }, - "node_modules/is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" - }, "node_modules/is-callable": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", - "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "license": "MIT", "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-ci": { @@ -26333,28 +24792,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-data-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-date-object": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", @@ -26373,27 +24810,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", - "dependencies": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-descriptor/node_modules/kind-of": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", - "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-docker": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", @@ -26418,14 +24834,6 @@ "is-window": "^1.0.2" } }, - "node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -26457,9 +24865,10 @@ } }, "node_modules/is-glob": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.0.tgz", - "integrity": "sha1-lSHHaEXMJhCoUgPd8ICpWML/q8A=", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "license": "MIT", "dependencies": { "is-extglob": "^2.1.1" }, @@ -26480,7 +24889,8 @@ "node_modules/is-in-browser": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/is-in-browser/-/is-in-browser-1.1.3.tgz", - "integrity": "sha1-Vv9NtoOgeMYILrldrX3GLh0E+DU=" + "integrity": "sha512-FeXIBgG/CPGd/WUxuEyvgGTEfwiG9Z4EKGxjNMRqviiIIfsmgrpnHLffEDdwUHqNva1VEW91o3xBT/m8Elgl9g==", + "license": "MIT" }, "node_modules/is-lower-case": { "version": "1.1.3", @@ -26516,17 +24926,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-number-object": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.3.tgz", @@ -26536,17 +24935,6 @@ "node": ">= 0.4" } }, - "node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-obj": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", @@ -26564,49 +24952,11 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-path-cwd": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", - "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/is-path-in-cwd": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz", - "integrity": "sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-path-inside": "^2.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/is-path-inside": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-2.1.0.tgz", - "integrity": "sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "path-is-inside": "^1.0.2" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/is-plain-obj": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==", + "license": "MIT", "engines": { "node": ">=10" }, @@ -26678,6 +25028,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", + "dev": true, "engines": { "node": ">=0.10.0" } @@ -26708,6 +25059,22 @@ "node": ">= 0.4" } }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", @@ -26749,14 +25116,6 @@ "integrity": "sha1-LIlspT25feRdPDMTOmXYyfVjSA0=", "dev": true }, - "node_modules/is-windows": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", - "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-word-character": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz", @@ -26805,6 +25164,48 @@ "whatwg-fetch": ">=0.10.0" } }, + "node_modules/isomorphic-fetch/node_modules/node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/isomorphic-fetch/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, + "node_modules/isomorphic-fetch/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/isomorphic-fetch/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", @@ -26834,10 +25235,11 @@ } }, "node_modules/istanbul-lib-instrument/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -26878,9 +25280,10 @@ } }, "node_modules/istanbul-lib-report/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -26971,26 +25374,27 @@ } }, "node_modules/jake": { - "version": "10.8.2", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.2.tgz", - "integrity": "sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A==", + "version": "10.9.4", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.4.tgz", + "integrity": "sha512-wpHYzhxiVQL+IV05BLE2Xn34zW1S223hvjtqk0+gsPrwd/8JNLXJgZZM/iPFsYc1xyphF+6M6EvdE5E9MBGkDA==", + "license": "Apache-2.0", "dependencies": { - "async": "0.9.x", - "chalk": "^2.4.2", - "filelist": "^1.0.1", - "minimatch": "^3.0.4" + "async": "^3.2.6", + "filelist": "^1.0.4", + "picocolors": "^1.1.1" }, "bin": { "jake": "bin/cli.js" }, "engines": { - "node": "*" + "node": ">=10" } }, "node_modules/jake/node_modules/async": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", - "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=" + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "license": "MIT" }, "node_modules/jest": { "version": "27.4.7", @@ -27105,19 +25509,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/jest-changed-files/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/jest-changed-files/node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -27189,25 +25580,6 @@ "node": ">=8" } }, - "node_modules/jest-changed-files/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-changed-files/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-changed-files/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -27219,20 +25591,6 @@ "node": ">=8" } }, - "node_modules/jest-changed-files/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/jest-circus": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-27.4.6.tgz", @@ -27323,14 +25681,6 @@ "@types/yargs-parser": "*" } }, - "node_modules/jest-circus/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-circus/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -27535,14 +25885,6 @@ "@types/yargs-parser": "*" } }, - "node_modules/jest-config/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-config/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -27557,17 +25899,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-config/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-config/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -27612,17 +25943,6 @@ "node": ">=0.10.0" } }, - "node_modules/jest-config/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-config/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -27631,14 +25951,6 @@ "node": ">=8" } }, - "node_modules/jest-config/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/jest-config/node_modules/jest-regex-util": { "version": "27.4.0", "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.4.0.tgz", @@ -27663,18 +25975,6 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-config/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/jest-config/node_modules/pretty-format": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.6.tgz", @@ -27715,17 +26015,6 @@ "node": ">=8" } }, - "node_modules/jest-config/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/jest-diff": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.4.6.tgz", @@ -27740,14 +26029,6 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-diff/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-diff/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -27906,14 +26187,6 @@ "@types/yargs-parser": "*" } }, - "node_modules/jest-each/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-each/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -28116,18 +26389,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-environment-jsdom-sixteen/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-environment-jsdom-sixteen/node_modules/chalk": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", @@ -28159,18 +26420,6 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/jest-environment-jsdom-sixteen/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-environment-jsdom-sixteen/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -28180,15 +26429,6 @@ "node": ">=8" } }, - "node_modules/jest-environment-jsdom-sixteen/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/jest-environment-jsdom-sixteen/node_modules/jest-message-util": { "version": "25.5.0", "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-25.5.0.tgz", @@ -28251,24 +26491,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/jest-environment-jsdom-sixteen/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/jest-environment-jsdom-sixteen/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -28285,18 +26513,6 @@ "node": ">=8" } }, - "node_modules/jest-environment-jsdom-sixteen/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/jest-environment-jsdom/node_modules/@jest/types": { "version": "27.4.2", "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.4.2.tgz", @@ -28629,18 +26845,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-haste-map/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-haste-map/node_modules/chalk": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", @@ -28675,18 +26879,6 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/jest-haste-map/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-haste-map/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -28696,28 +26888,6 @@ "node": ">=8" } }, - "node_modules/jest-haste-map/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/jest-haste-map/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/jest-haste-map/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -28730,18 +26900,6 @@ "node": ">=8" } }, - "node_modules/jest-haste-map/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/jest-jasmine2": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-27.4.6.tgz", @@ -28843,14 +27001,6 @@ "@types/yargs-parser": "*" } }, - "node_modules/jest-jasmine2/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-jasmine2/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -28985,14 +27135,6 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-leak-detector/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-leak-detector/node_modules/ansi-styles": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", @@ -29036,14 +27178,6 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-matcher-utils/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-matcher-utils/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -29192,25 +27326,6 @@ "@types/yargs-parser": "*" } }, - "node_modules/jest-message-util/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-message-util/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-message-util/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -29267,17 +27382,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/jest-message-util/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-message-util/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -29286,26 +27390,6 @@ "node": ">=8" } }, - "node_modules/jest-message-util/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/jest-message-util/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/jest-message-util/node_modules/pretty-format": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.6.tgz", @@ -29354,17 +27438,6 @@ "node": ">=8" } }, - "node_modules/jest-message-util/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/jest-mock": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-27.4.6.tgz", @@ -29679,17 +27752,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-resolve/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-resolve/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -29726,17 +27788,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/jest-resolve/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-resolve/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -29756,14 +27807,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/jest-resolve/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/jest-resolve/node_modules/jest-haste-map": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.4.6.tgz", @@ -29868,18 +27911,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/jest-resolve/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/jest-resolve/node_modules/resolve": { "version": "1.21.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.21.0.tgz", @@ -29907,17 +27938,6 @@ "node": ">=8" } }, - "node_modules/jest-resolve/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/jest-runner": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-27.4.6.tgz", @@ -30065,17 +28085,6 @@ "node": ">=8" } }, - "node_modules/jest-runner/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-runner/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -30112,17 +28121,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/jest-runner/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-runner/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -30131,14 +28129,6 @@ "node": ">=8" } }, - "node_modules/jest-runner/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/jest-runner/node_modules/istanbul-lib-coverage": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", @@ -30250,18 +28240,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/jest-runner/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/jest-runner/node_modules/pirates": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.4.tgz", @@ -30271,9 +28249,10 @@ } }, "node_modules/jest-runner/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -30297,17 +28276,6 @@ "node": ">=8" } }, - "node_modules/jest-runner/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/jest-runtime": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-27.4.6.tgz", @@ -30468,17 +28436,6 @@ "node": ">=8" } }, - "node_modules/jest-runtime/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-runtime/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -30515,19 +28472,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/jest-runtime/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/jest-runtime/node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -30550,17 +28494,6 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/jest-runtime/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-runtime/node_modules/get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", @@ -30580,14 +28513,6 @@ "node": ">=8" } }, - "node_modules/jest-runtime/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/jest-runtime/node_modules/is-stream": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", @@ -30710,18 +28635,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/jest-runtime/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/jest-runtime/node_modules/npm-run-path": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", @@ -30750,32 +28663,14 @@ } }, "node_modules/jest-runtime/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, - "node_modules/jest-runtime/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-runtime/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-runtime/node_modules/source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -30803,31 +28698,6 @@ "node": ">=8" } }, - "node_modules/jest-runtime/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/jest-runtime/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/jest-serializer": { "version": "26.6.2", "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-26.6.2.tgz", @@ -30934,14 +28804,6 @@ "@types/yargs-parser": "*" } }, - "node_modules/jest-snapshot/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-snapshot/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -30971,17 +28833,6 @@ "node": ">=8" } }, - "node_modules/jest-snapshot/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-snapshot/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -31018,17 +28869,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/jest-snapshot/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-snapshot/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -31037,14 +28877,6 @@ "node": ">=8" } }, - "node_modules/jest-snapshot/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/jest-snapshot/node_modules/istanbul-lib-coverage": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", @@ -31069,9 +28901,10 @@ } }, "node_modules/jest-snapshot/node_modules/istanbul-lib-instrument/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -31164,18 +28997,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/jest-snapshot/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/jest-snapshot/node_modules/pirates": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.4.tgz", @@ -31214,9 +29035,10 @@ "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==" }, "node_modules/jest-snapshot/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -31246,17 +29068,6 @@ "node": ">=8" } }, - "node_modules/jest-snapshot/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/jest-util": { "version": "26.6.2", "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-26.6.2.tgz", @@ -31323,18 +29134,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-util/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-util/node_modules/chalk": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", @@ -31369,18 +29168,6 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/jest-util/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-util/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -31390,28 +29177,6 @@ "node": ">=8" } }, - "node_modules/jest-util/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/jest-util/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/jest-util/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -31424,18 +29189,6 @@ "node": ">=8" } }, - "node_modules/jest-util/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/jest-validate": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-27.4.6.tgz", @@ -31483,14 +29236,6 @@ "@types/yargs-parser": "*" } }, - "node_modules/jest-validate/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-validate/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -32218,16 +29963,21 @@ } }, "node_modules/jsdom/node_modules/form-data": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", - "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.4.tgz", + "integrity": "sha512-Y/3MmRiR8Nd+0CUtrbvcKtKzLWiUfpQ7DFVggH8PwmGt/0r7RSy32GuP4hpCJlQNEBusisSx1DLtD8uD386HJQ==", + "deprecated": "This version has an incorrect dependency; please use v2.5.5", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" + "es-set-tostringtag": "^2.1.0", + "has-own": "^1.0.1", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" }, "engines": { - "node": ">= 6" + "node": ">= 0.12" } }, "node_modules/jsdom/node_modules/http-proxy-agent": { @@ -32253,6 +30003,26 @@ "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" }, + "node_modules/jsdom/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/jsdom/node_modules/tough-cookie": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", @@ -32307,9 +30077,10 @@ "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" }, "node_modules/json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "license": "(AFL-2.1 OR BSD-3-Clause)" }, "node_modules/json-schema-traverse": { "version": "1.0.0", @@ -32383,6 +30154,7 @@ "resolved": "https://registry.npmjs.org/jss/-/jss-9.8.7.tgz", "integrity": "sha512-awj3XRZYxbrmmrx9LUSj5pXSUfm12m8xzi/VKeqI1ZwWBtQ0kVPTs3vYs32t4rFw83CgFDukA8wKzOE9sMQnoQ==", "hasInstallScript": true, + "license": "MIT", "dependencies": { "is-in-browser": "^1.1.3", "symbol-observable": "^1.1.0", @@ -32396,6 +30168,7 @@ "version": "6.1.0", "resolved": "https://registry.npmjs.org/jss-camel-case/-/jss-camel-case-6.1.0.tgz", "integrity": "sha512-HPF2Q7wmNW1t79mCqSeU2vdd/vFFGpkazwvfHMOhPlMgXrJDzdj9viA2SaHk9ZbD5pfL63a8ylp4++irYbbzMQ==", + "license": "MIT", "dependencies": { "hyphenate-style-name": "^1.0.2" }, @@ -32407,6 +30180,7 @@ "version": "8.0.2", "resolved": "https://registry.npmjs.org/jss-default-unit/-/jss-default-unit-8.0.2.tgz", "integrity": "sha512-WxNHrF/18CdoAGw2H0FqOEvJdREXVXLazn7PQYU7V6/BWkCV0GkmWsppNiExdw8dP4TU1ma1dT9zBNJ95feLmg==", + "license": "MIT", "peerDependencies": { "jss": "^9.4.0" } @@ -32415,6 +30189,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/jss-global/-/jss-global-3.0.0.tgz", "integrity": "sha512-wxYn7vL+TImyQYGAfdplg7yaxnPQ9RaXY/cIA8hawaVnmmWxDHzBK32u1y+RAvWboa3lW83ya3nVZ/C+jyjZ5Q==", + "license": "MIT", "peerDependencies": { "jss": "^9.0.0" } @@ -32423,6 +30198,7 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/jss-nested/-/jss-nested-6.0.1.tgz", "integrity": "sha512-rn964TralHOZxoyEgeq3hXY8hyuCElnvQoVrQwKHVmu55VRDd6IqExAx9be5HgK0yN/+hQdgAXQl/GUrBbbSTA==", + "license": "MIT", "dependencies": { "warning": "^3.0.0" }, @@ -32433,7 +30209,8 @@ "node_modules/jss-nested/node_modules/warning": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/warning/-/warning-3.0.0.tgz", - "integrity": "sha1-MuU3fLVy3kqwR1O9+IIcAe1gW3w=", + "integrity": "sha512-jMBt6pUrKn5I+OGgtQ4YZLdhIeJmObddh6CsibPxyQ5yPZm1XExSyzC1LCNX7BzhxWgiHmizBWJTHJIjMjTQYQ==", + "license": "BSD-3-Clause", "dependencies": { "loose-envify": "^1.0.0" } @@ -32442,6 +30219,7 @@ "version": "6.0.0", "resolved": "https://registry.npmjs.org/jss-props-sort/-/jss-props-sort-6.0.0.tgz", "integrity": "sha512-E89UDcrphmI0LzmvYk25Hp4aE5ZBsXqMWlkFXS0EtPkunJkRr+WXdCNYbXbksIPnKlBenGB9OxzQY+mVc70S+g==", + "license": "MIT", "peerDependencies": { "jss": "^9.0.0" } @@ -32450,6 +30228,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/jss-vendor-prefixer/-/jss-vendor-prefixer-7.0.0.tgz", "integrity": "sha512-Agd+FKmvsI0HLcYXkvy8GYOw3AAASBUpsmIRvVQheps+JWaN892uFOInTr0DRydwaD91vSSUCU4NssschvF7MA==", + "license": "MIT", "dependencies": { "css-vendor": "^0.3.8" }, @@ -32460,7 +30239,8 @@ "node_modules/jss/node_modules/warning": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/warning/-/warning-3.0.0.tgz", - "integrity": "sha1-MuU3fLVy3kqwR1O9+IIcAe1gW3w=", + "integrity": "sha512-jMBt6pUrKn5I+OGgtQ4YZLdhIeJmObddh6CsibPxyQ5yPZm1XExSyzC1LCNX7BzhxWgiHmizBWJTHJIjMjTQYQ==", + "license": "BSD-3-Clause", "dependencies": { "loose-envify": "^1.0.0" } @@ -32507,14 +30287,6 @@ "safe-buffer": "^5.0.1" } }, - "node_modules/killable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/killable/-/killable-1.0.1.tgz", - "integrity": "sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -32755,21 +30527,6 @@ "node": ">=0.8.6" } }, - "node_modules/loglevel": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.9.1.tgz", - "integrity": "sha512-hP3I3kCrDIMuRwAwHltphhDM1r8i55H33GgqjXbrisuJhF4kRhW1dNuxsRklp4bXl8DSdLaNLuiL4A/LWRfxvg==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">= 0.6.0" - }, - "funding": { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/loglevel" - } - }, "node_modules/lolex": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/lolex/-/lolex-5.1.2.tgz", @@ -32865,6 +30622,16 @@ "node": ">=6" } }, + "node_modules/make-dir/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, "node_modules/make-error": { "version": "1.3.6", "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", @@ -32878,31 +30645,12 @@ "tmpl": "1.0.x" } }, - "node_modules/map-cache": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", - "integrity": "sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/map-or-similar": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/map-or-similar/-/map-or-similar-1.5.0.tgz", "integrity": "sha1-beJlMXSt+12e3DPGnT6Sobdvrwg=", "dev": true }, - "node_modules/map-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", - "integrity": "sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=", - "dependencies": { - "object-visit": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/markdown-escapes": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz", @@ -32937,6 +30685,15 @@ "remove-accents": "0.4.2" } }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/md5.js": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", @@ -33086,26 +30843,16 @@ "dev": true }, "node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8.6" } }, "node_modules/microseconds": { @@ -33205,17 +30952,6 @@ "webpack": "^5.0.0" } }, - "node_modules/mini-css-extract-plugin/node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, "node_modules/mini-css-extract-plugin/node_modules/schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", @@ -33246,9 +30982,10 @@ "dev": true }, "node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.5.tgz", + "integrity": "sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw==", + "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -33347,29 +31084,6 @@ "node": ">=4.0.0" } }, - "node_modules/mixin-deep": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", - "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", - "dependencies": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/mixin-deep/node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dependencies": { - "is-plain-object": "^2.0.4" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/mkdirp": { "version": "0.5.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", @@ -33406,29 +31120,6 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, - "node_modules/multicast-dns": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-6.2.3.tgz", - "integrity": "sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "dns-packet": "^1.3.1", - "thunky": "^1.0.2" - }, - "bin": { - "multicast-dns": "cli.js" - } - }, - "node_modules/multicast-dns-service-types": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz", - "integrity": "sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE=", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/nan": { "version": "2.19.0", "resolved": "https://registry.npmjs.org/nan/-/nan-2.19.0.tgz", @@ -33462,27 +31153,6 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, - "node_modules/nanomatch": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", - "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "fragment-cache": "^0.2.1", - "is-windows": "^1.0.2", - "kind-of": "^6.0.2", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/native-url": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/native-url/-/native-url-0.2.6.tgz", @@ -33516,6 +31186,16 @@ "nearleyc": "bin/nearleyc.js" } }, + "node_modules/nearley/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, "node_modules/negotiator": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", @@ -33535,12 +31215,6 @@ "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==", "dev": true }, - "node_modules/nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, "node_modules/no-case": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/no-case/-/no-case-2.3.2.tgz", @@ -33562,26 +31236,6 @@ "node": ">= 0.10.5" } }, - "node_modules/node-fetch": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz", - "integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==", - "dependencies": { - "encoding": "^0.1.11", - "is-stream": "^1.0.1" - } - }, - "node_modules/node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">= 6.0.0" - } - }, "node_modules/node-int64": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", @@ -33695,11 +31349,15 @@ } }, "node_modules/nth-check": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", - "integrity": "sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", + "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", + "license": "BSD-2-Clause", "dependencies": { - "boolbase": "~1.0.0" + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" } }, "node_modules/num2fraction": { @@ -33738,41 +31396,6 @@ "node": ">=0.10.0" } }, - "node_modules/object-copy": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", - "integrity": "sha1-fn2Fi3gb18mRpBupde04EnVOmYw=", - "dependencies": { - "copy-descriptor": "^0.1.0", - "define-property": "^0.2.5", - "kind-of": "^3.0.3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/object-hash": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", @@ -33782,10 +31405,16 @@ } }, "node_modules/object-inspect": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.6.0.tgz", - "integrity": "sha512-GJzfBZ6DgDAmnuaM3104jR4s1Myxr3Y3zfIyN4z3UdqN69oSRacNK8UhnobDdC+7J2AHCjGwxQubNJfE70SXXQ==", - "dev": true + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/object-is": { "version": "1.0.1", @@ -33804,17 +31433,6 @@ "node": ">= 0.4" } }, - "node_modules/object-visit": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", - "integrity": "sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=", - "dependencies": { - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/object.assign": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", @@ -33956,17 +31574,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/object.hasown/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/object.hasown/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -33996,25 +31603,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/object.hasown/node_modules/object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.pick": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", - "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/object.values": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.0.tgz", @@ -34052,9 +31640,9 @@ } }, "node_modules/on-headers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", - "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz", + "integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==", "engines": { "node": ">= 0.8" } @@ -34106,31 +31694,6 @@ "opener": "bin/opener-bin.js" } }, - "node_modules/opn": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/opn/-/opn-5.5.0.tgz", - "integrity": "sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-wsl": "^1.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/opn/node_modules/is-wsl": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", - "integrity": "sha512-gfygJYZ2gLTDlmbWMI0CE2MwnFzSN/2SZfkMlItC4K/JBlsWVDB0bO6XhqcY13YXE7iMcAJnzTCJjPiTeJJ0Mw==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, "node_modules/optionator": { "version": "0.8.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", @@ -34265,20 +31828,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/p-retry": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-3.0.1.tgz", - "integrity": "sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "retry": "^0.12.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/p-timeout": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", @@ -34430,14 +31979,6 @@ "upper-case-first": "^1.1.0" } }, - "node_modules/pascalcase": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", - "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/path-browserify": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.1.tgz", @@ -34475,14 +32016,6 @@ "node": ">=0.10.0" } }, - "node_modules/path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/path-key": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", @@ -34498,9 +32031,10 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" }, "node_modules/path-to-regexp": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", - "integrity": "sha1-Wf3g9DW62suhA6hOnTvGTpa5k30=", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.9.0.tgz", + "integrity": "sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g==", + "license": "MIT", "dependencies": { "isarray": "0.0.1" } @@ -34519,35 +32053,94 @@ } }, "node_modules/pbkdf2": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", - "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.3.tgz", + "integrity": "sha512-wfRLBZ0feWRhCIkoMB6ete7czJcnNnqRpcoWQBLqatqXXmelSRqfdDK4F3u9T2s2cXas/hQJcryI/4lAL+XTlA==", "dev": true, + "license": "MIT", "dependencies": { - "create-hash": "^1.1.2", - "create-hmac": "^1.1.4", - "ripemd160": "^2.0.1", - "safe-buffer": "^5.0.1", - "sha.js": "^2.4.8" + "create-hash": "~1.1.3", + "create-hmac": "^1.1.7", + "ripemd160": "=2.0.1", + "safe-buffer": "^5.2.1", + "sha.js": "^2.4.11", + "to-buffer": "^1.2.0" }, "engines": { "node": ">=0.12" } }, + "node_modules/pbkdf2/node_modules/create-hash": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.1.3.tgz", + "integrity": "sha512-snRpch/kwQhcdlnZKYanNF1m0RDlrCdSKQaH87w1FCFPVPNCQ/Il9QJKAX2jVBZddRdaHBMC+zXa9Gw9tmkNUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "sha.js": "^2.4.0" + } + }, + "node_modules/pbkdf2/node_modules/hash-base": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-2.0.2.tgz", + "integrity": "sha512-0TROgQ1/SxE6KmxWSvXHvRj90/Xo1JvZShofnYF+f6ZsGtR4eES7WfrQzPalmyagfKZCXpVnitiRebZulWsbiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1" + } + }, + "node_modules/pbkdf2/node_modules/ripemd160": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.1.tgz", + "integrity": "sha512-J7f4wutN8mdbV08MJnXibYpCOPHR+yzy+iQ/AsjMv2j8cLavQ8VGagDFUwwTAdF8FmRKVeNpbTTEwNHCW1g94w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hash-base": "^2.0.0", + "inherits": "^2.0.1" + } + }, + "node_modules/pbkdf2/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "license": "ISC" }, "node_modules/picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "license": "MIT", "engines": { "node": ">=8.6" }, @@ -34564,31 +32157,6 @@ "node": ">=6" } }, - "node_modules/pinkie": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", - "integrity": "sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pinkie-promise": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", - "integrity": "sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "pinkie": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/pirates": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.1.tgz", @@ -34755,47 +32323,56 @@ "whatwg-fetch": ">=0.10.0" } }, - "node_modules/portfinder": { - "version": "1.0.28", - "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.28.tgz", - "integrity": "sha512-Se+2isanIcEqf2XMHjyUKskczxbPH7dQnlMjXX6+dybayyHvAf/TCgyMRlzf/B6QDhAEFOGes0pzRo3by4AbMA==", - "dev": true, - "optional": true, - "peer": true, + "node_modules/portable-fetch/node_modules/node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "license": "MIT", "dependencies": { - "async": "^2.6.2", - "debug": "^3.1.1", - "mkdirp": "^0.5.5" + "whatwg-url": "^5.0.0" }, "engines": { - "node": ">= 0.12.0" + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } } }, - "node_modules/portfinder/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "optional": true, - "peer": true, + "node_modules/portable-fetch/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, + "node_modules/portable-fetch/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/portable-fetch/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", "dependencies": { - "ms": "^2.1.1" + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" } }, - "node_modules/portfinder/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", "dev": true, - "optional": true, - "peer": true - }, - "node_modules/posix-character-classes": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", - "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=", + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" } }, "node_modules/postcss": { @@ -35378,9 +32955,10 @@ } }, "node_modules/postcss-loader/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -36358,17 +33936,6 @@ "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" }, - "node_modules/postcss-svgo/node_modules/nth-check": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", - "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", - "dependencies": { - "boolbase": "^1.0.0" - }, - "funding": { - "url": "https://github.com/fb55/nth-check?sponsor=1" - } - }, "node_modules/postcss-svgo/node_modules/postcss-value-parser": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", @@ -36707,18 +34274,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/promise.allsettled/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/promise.allsettled/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -36750,15 +34305,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/promise.allsettled/node_modules/object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/promise.prototype.finally": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/promise.prototype.finally/-/promise.prototype.finally-3.1.2.tgz", @@ -36824,18 +34370,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/promise.prototype.finally/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/promise.prototype.finally/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -36867,15 +34401,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/promise.prototype.finally/node_modules/object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/prompts": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.0.tgz", @@ -36889,12 +34414,14 @@ } }, "node_modules/prop-types": { - "version": "15.6.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.6.2.tgz", - "integrity": "sha512-3pboPvLiWD7dkI3qf3KbUe6hKFKa52w+AE0VCqECtf+QHAKgOL37tTaNCnuX1nAAQ4ZhyP+kYVKf8rLmJ/feDQ==", + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "license": "MIT", "dependencies": { - "loose-envify": "^1.3.1", - "object-assign": "^4.1.1" + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" } }, "node_modules/prop-types-exact": { @@ -36930,10 +34457,11 @@ } }, "node_modules/protobufjs": { - "version": "6.11.2", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.2.tgz", - "integrity": "sha512-4BQJoPooKJl2G9j3XftkIXjoC9C0Av2NOrWmbLWT1vH32GcSUHjM0Arra6UfTsVyfMAuFzaLucXn1sadxJydAw==", + "version": "6.11.4", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.4.tgz", + "integrity": "sha512-5kQWPaJHi1WoCpjTGszzQ32PG2F4+wRY6BmAT4Vfw56Q2FZ4YZzK20xUYQH4YkfehY1e6QSICrJquM6xXZNcrw==", "hasInstallScript": true, + "license": "BSD-3-Clause", "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -37077,13 +34605,6 @@ "node": ">=0.4.x" } }, - "node_modules/querystringify": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", - "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", - "optional": true, - "peer": true - }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -37187,14 +34708,6 @@ "node": ">= 0.8" } }, - "node_modules/raw-body/node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "engines": { - "node": ">= 0.8" - } - }, "node_modules/raw-body/node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -37340,16 +34853,6 @@ "react-dom": "^0.13.0 || ^0.14.0 || ^15.0.1 || ^16.0.0" } }, - "node_modules/react-ace/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/react-app-polyfill": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz", @@ -37479,20 +34982,6 @@ "url": "https://tidelift.com/funding/github/npm/browserslist" } }, - "node_modules/react-dev-utils/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/react-dev-utils/node_modules/escape-string-regexp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", @@ -37555,51 +35044,6 @@ "node": ">=8.9.0" } }, - "node_modules/react-dev-utils/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/react-dev-utils/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/react-dev-utils/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/react-dev-utils/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/react-docgen": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/react-docgen/-/react-docgen-5.4.0.tgz", @@ -37653,18 +35097,6 @@ "webpack": ">= 4" } }, - "node_modules/react-docgen-typescript-plugin/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/react-docgen-typescript-plugin/node_modules/debug": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", @@ -37682,18 +35114,6 @@ } } }, - "node_modules/react-docgen-typescript-plugin/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/react-docgen-typescript-plugin/node_modules/find-cache-dir": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", @@ -37711,15 +35131,6 @@ "url": "https://github.com/avajs/find-cache-dir?sponsor=1" } }, - "node_modules/react-docgen-typescript-plugin/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/react-docgen-typescript-plugin/node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -37735,19 +35146,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/react-docgen-typescript-plugin/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/react-docgen-typescript-plugin/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -37767,10 +35165,11 @@ } }, "node_modules/react-docgen-typescript-plugin/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -37784,18 +35183,6 @@ "node": ">=0.10.0" } }, - "node_modules/react-docgen-typescript-plugin/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/react-docgen-typescript-plugin/node_modules/tslib": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz", @@ -38039,17 +35426,6 @@ "react-dom": "^16.6.0 || ^17.0.0" } }, - "node_modules/react-helmet-async/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/react-inspector": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/react-inspector/-/react-inspector-5.1.1.tgz", @@ -38170,16 +35546,6 @@ } } }, - "node_modules/react-redux/node_modules/prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "node_modules/react-refresh": { "version": "0.8.3", "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.8.3.tgz", @@ -38401,14 +35767,6 @@ "ansi-html": "bin/ansi-html" } }, - "node_modules/react-scripts/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/react-scripts/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -38445,17 +35803,6 @@ "node": ">= 8.0.0" } }, - "node_modules/react-scripts/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/react-scripts/node_modules/browserslist": { "version": "4.19.1", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.19.1.tgz", @@ -38579,19 +35926,6 @@ "node": ">= 6" } }, - "node_modules/react-scripts/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/react-scripts/node_modules/css-select": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.2.1.tgz", @@ -38748,17 +36082,6 @@ "node": ">= 0.4.0" } }, - "node_modules/react-scripts/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/react-scripts/node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -38959,29 +36282,6 @@ "webpack": "^5.20.0" } }, - "node_modules/react-scripts/node_modules/http-proxy-middleware": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz", - "integrity": "sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==", - "dependencies": { - "@types/http-proxy": "^1.17.8", - "http-proxy": "^1.18.1", - "is-glob": "^4.0.1", - "is-plain-obj": "^3.0.0", - "micromatch": "^4.0.2" - }, - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "@types/express": "^4.17.13" - }, - "peerDependenciesMeta": { - "@types/express": { - "optional": true - } - } - }, "node_modules/react-scripts/node_modules/immer": { "version": "9.0.7", "resolved": "https://registry.npmjs.org/immer/-/immer-9.0.7.tgz", @@ -39010,25 +36310,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/react-scripts/node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-scripts/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/react-scripts/node_modules/is-stream": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", @@ -39094,18 +36375,6 @@ "tslib": "^2.0.3" } }, - "node_modules/react-scripts/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/react-scripts/node_modules/no-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", @@ -39139,17 +36408,6 @@ "node": ">=8" } }, - "node_modules/react-scripts/node_modules/nth-check": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", - "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", - "dependencies": { - "boolbase": "^1.0.0" - }, - "funding": { - "url": "https://github.com/fb55/nth-check?sponsor=1" - } - }, "node_modules/react-scripts/node_modules/open": { "version": "8.4.0", "resolved": "https://registry.npmjs.org/open/-/open-8.4.0.tgz", @@ -39411,9 +36669,10 @@ } }, "node_modules/react-scripts/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -39432,25 +36691,6 @@ "randombytes": "^2.1.0" } }, - "node_modules/react-scripts/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/react-scripts/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, "node_modules/react-scripts/node_modules/shell-quote": { "version": "1.7.3", "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.3.tgz", @@ -39501,30 +36741,6 @@ "node": ">=6" } }, - "node_modules/react-scripts/node_modules/terser": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz", - "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==", - "dependencies": { - "commander": "^2.20.0", - "source-map": "~0.7.2", - "source-map-support": "~0.5.20" - }, - "bin": { - "terser": "bin/terser" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "acorn": "^8.5.0" - }, - "peerDependenciesMeta": { - "acorn": { - "optional": true - } - } - }, "node_modules/react-scripts/node_modules/terser-webpack-plugin": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.0.tgz", @@ -39566,48 +36782,11 @@ "node": ">=0.10.0" } }, - "node_modules/react-scripts/node_modules/terser/node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - }, - "node_modules/react-scripts/node_modules/terser/node_modules/source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/react-scripts/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/react-scripts/node_modules/tslib": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" }, - "node_modules/react-scripts/node_modules/type-fest": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", - "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", - "optional": true, - "peer": true, - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/react-scripts/node_modules/universalify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", @@ -39616,77 +36795,6 @@ "node": ">= 10.0.0" } }, - "node_modules/react-scripts/node_modules/webpack-dev-middleware": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz", - "integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==", - "dependencies": { - "colorette": "^2.0.10", - "memfs": "^3.4.3", - "mime-types": "^2.1.31", - "range-parser": "^1.2.1", - "schema-utils": "^4.0.0" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - } - }, - "node_modules/react-scripts/node_modules/webpack-dev-middleware/node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/react-scripts/node_modules/webpack-dev-middleware/node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, - "node_modules/react-scripts/node_modules/webpack-dev-middleware/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, - "node_modules/react-scripts/node_modules/webpack-dev-middleware/node_modules/schema-utils": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.2.0.tgz", - "integrity": "sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw==", - "dependencies": { - "@types/json-schema": "^7.0.9", - "ajv": "^8.9.0", - "ajv-formats": "^2.1.1", - "ajv-keywords": "^5.1.0" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, "node_modules/react-scripts/node_modules/webpack-dev-server": { "version": "4.15.1", "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.15.1.tgz", @@ -39794,40 +36902,6 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/react-scripts/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/react-scripts/node_modules/ws": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz", - "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/react-sizeme": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/react-sizeme/-/react-sizeme-3.0.1.tgz", @@ -39907,6 +36981,7 @@ "version": "2.9.0", "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-2.9.0.tgz", "integrity": "sha512-+HzNTCHpeQyl4MJ/bdE0u6XRMe9+XG/+aL4mCxVN4DnPBQ0/5bfHWPDuOZUzYdMj94daZaZdCCc1Dzt9R/xSSg==", + "license": "BSD-3-Clause", "dependencies": { "dom-helpers": "^3.4.0", "loose-envify": "^1.4.0", @@ -40047,6 +37122,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/read-pkg/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, "node_modules/read-pkg/node_modules/type-fest": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", @@ -40085,6 +37170,7 @@ "version": "0.30.0", "resolved": "https://registry.npmjs.org/recompose/-/recompose-0.30.0.tgz", "integrity": "sha512-ZTrzzUDa9AqUIhRk4KmVFihH0rapdCSMFXjhHbNrjAWxBuUD/guYlyysMnuHjlZC/KRiOKRtB4jf96yYSkKE8w==", + "license": "MIT", "dependencies": { "@babel/runtime": "^7.0.0", "change-emitter": "^0.1.2", @@ -40100,7 +37186,8 @@ "node_modules/recompose/node_modules/hoist-non-react-statics": { "version": "2.5.5", "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-2.5.5.tgz", - "integrity": "sha512-rqcy4pJo55FTTLWt+bU8ukscqHeE/e9KWvsOW2b/a3afxQZhwkQdT1rPPCJ0rYXdj4vNcasY8zHTH+jF/qStxw==" + "integrity": "sha512-rqcy4pJo55FTTLWt+bU8ukscqHeE/e9KWvsOW2b/a3afxQZhwkQdT1rPPCJ0rYXdj4vNcasY8zHTH+jF/qStxw==", + "license": "BSD-3-Clause" }, "node_modules/recursive-readdir": { "version": "2.2.2", @@ -40186,18 +37273,6 @@ "@babel/runtime": "^7.8.4" } }, - "node_modules/regex-not": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", - "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", - "dependencies": { - "extend-shallow": "^3.0.2", - "safe-regex": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/regex-parser": { "version": "2.2.11", "resolved": "https://registry.npmjs.org/regex-parser/-/regex-parser-2.2.11.tgz", @@ -40411,6 +37486,16 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "node_modules/remark-mdx/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, "node_modules/remark-parse": { "version": "8.0.3", "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-8.0.3.tgz", @@ -40565,18 +37650,6 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/renderkid/node_modules/nth-check": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.0.tgz", - "integrity": "sha512-i4sc/Kj8htBrAiH1viZ0TgU8Y5XqCaV/FziYK6TBczxmeKm3AEFWqqF3195yKudrarqy7Zu80Ra5dobFjn9X/Q==", - "dev": true, - "dependencies": { - "boolbase": "^1.0.0" - }, - "funding": { - "url": "https://github.com/fb55/nth-check?sponsor=1" - } - }, "node_modules/renderkid/node_modules/strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", @@ -40589,18 +37662,11 @@ "node": ">=0.10.0" } }, - "node_modules/repeat-element": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", - "integrity": "sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/repeat-string": { "version": "1.6.1", "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", + "dev": true, "engines": { "node": ">=0.10" } @@ -40669,6 +37735,44 @@ "request": "^2.34" } }, + "node_modules/request/node_modules/form-data": { + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.4.tgz", + "integrity": "sha512-Y/3MmRiR8Nd+0CUtrbvcKtKzLWiUfpQ7DFVggH8PwmGt/0r7RSy32GuP4hpCJlQNEBusisSx1DLtD8uD386HJQ==", + "deprecated": "This version has an incorrect dependency; please use v2.5.5", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "has-own": "^1.0.1", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/request/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/request/node_modules/uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", @@ -40694,14 +37798,6 @@ "node": ">=0.10.0" } }, - "node_modules/require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true, - "optional": true, - "peer": true - }, "node_modules/requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", @@ -40748,12 +37844,6 @@ "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-2.2.0.tgz", "integrity": "sha512-bAFz9ld18RzJfddgrO2e/0S2O81710++chRMUxHjXOYKF6jTAMrUNZrEZ1PvV0zlhfjidm08iRPdTLPno1FuRg==" }, - "node_modules/resolve-url": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", - "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=", - "deprecated": "https://github.com/lydell/resolve-url#deprecated" - }, "node_modules/resolve-url-loader": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz", @@ -40822,19 +37912,9 @@ "version": "0.1.15", "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", - "engines": { - "node": ">=0.12" - } - }, - "node_modules/retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", "dev": true, - "optional": true, - "peer": true, "engines": { - "node": ">= 4" + "node": ">=0.12" } }, "node_modules/retry-request": { @@ -40943,38 +38023,6 @@ "randombytes": "^2.1.0" } }, - "node_modules/rollup-plugin-terser/node_modules/source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/rollup-plugin-terser/node_modules/terser": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz", - "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==", - "dependencies": { - "commander": "^2.20.0", - "source-map": "~0.7.2", - "source-map-support": "~0.5.20" - }, - "bin": { - "terser": "bin/terser" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "acorn": "^8.5.0" - }, - "peerDependenciesMeta": { - "acorn": { - "optional": true - } - } - }, "node_modules/rst-selector-parser": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/rst-selector-parser/-/rst-selector-parser-2.2.3.tgz", @@ -41040,14 +38088,6 @@ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, - "node_modules/safe-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", - "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", - "dependencies": { - "ret": "~0.1.10" - } - }, "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", @@ -41213,26 +38253,6 @@ "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", "integrity": "sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo=" }, - "node_modules/selfsigned": { - "version": "1.10.11", - "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-1.10.11.tgz", - "integrity": "sha512-aVmbPOfViZqOZPgRBT0+3u4yZFHpmnIghLMlAcb5/xhp5ZtB/RVnKhz5vl2M32CLXAqR4kha9zfhNg0Lf/sxKA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "node-forge": "^0.10.0" - } - }, - "node_modules/semver": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz", - "integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, "node_modules/sentence-case": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/sentence-case/-/sentence-case-2.1.1.tgz", @@ -41327,31 +38347,6 @@ "node": ">= 0.4" } }, - "node_modules/set-value": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", - "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", - "dependencies": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.3", - "split-string": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/set-value/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/setimmediate": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", @@ -41393,24 +38388,24 @@ "dev": true }, "node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", "dependencies": { - "shebang-regex": "^1.0.0" + "shebang-regex": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true, + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shell-quote": { @@ -41420,22 +38415,73 @@ "dev": true }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/side-channel/node_modules/object-inspect": { - "version": "1.10.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", - "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==", + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -41473,135 +38519,6 @@ "integrity": "sha1-EMCI2LWOsHazIpu1oE4jLOEmQi0=", "dev": true }, - "node_modules/snapdragon": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", - "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", - "dependencies": { - "base": "^0.11.1", - "debug": "^2.2.0", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "map-cache": "^0.2.2", - "source-map": "^0.5.6", - "source-map-resolve": "^0.5.0", - "use": "^3.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", - "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", - "dependencies": { - "define-property": "^1.0.0", - "isobject": "^3.0.0", - "snapdragon-util": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-util": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", - "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", - "dependencies": { - "kind-of": "^3.2.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-util/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/snapdragon/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/snapshot-diff": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/snapshot-diff/-/snapshot-diff-0.6.1.tgz", @@ -41778,10 +38695,11 @@ } }, "node_modules/snapshot-diff/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -41817,50 +38735,6 @@ "websocket-driver": "^0.7.4" } }, - "node_modules/sockjs-client": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.6.1.tgz", - "integrity": "sha512-2g0tjOR+fRs0amxENLi/q5TiJTqY+WXFOzb5UwXndlK6TO3U/mirZznpx6w34HVMoc3g7cY24yC/ZMIYnDlfkw==", - "optional": true, - "peer": true, - "dependencies": { - "debug": "^3.2.7", - "eventsource": "^2.0.2", - "faye-websocket": "^0.11.4", - "inherits": "^2.0.4", - "url-parse": "^1.5.10" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://tidelift.com/funding/github/npm/sockjs-client" - } - }, - "node_modules/sockjs-client/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "optional": true, - "peer": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/sockjs-client/node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "optional": true, - "peer": true - }, - "node_modules/sockjs-client/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "optional": true, - "peer": true - }, "node_modules/source-list-map": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz", @@ -41870,6 +38744,7 @@ "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true, "engines": { "node": ">=0.10.0" } @@ -41913,19 +38788,6 @@ "node": ">=0.10.0" } }, - "node_modules/source-map-resolve": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.2.tgz", - "integrity": "sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA==", - "deprecated": "See https://github.com/lydell/source-map-resolve#deprecated", - "dependencies": { - "atob": "^2.1.1", - "decode-uri-component": "^0.2.0", - "resolve-url": "^0.2.1", - "source-map-url": "^0.4.0", - "urix": "^0.1.0" - } - }, "node_modules/source-map-support": { "version": "0.5.21", "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", @@ -42080,17 +38942,6 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, - "node_modules/split-string": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", - "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", - "dependencies": { - "extend-shallow": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", @@ -42162,29 +39013,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/static-extend": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", - "integrity": "sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=", - "dependencies": { - "define-property": "^0.2.5", - "object-copy": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/static-extend/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", @@ -42269,9 +39097,9 @@ } }, "node_modules/stream-shift": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", - "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", + "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==", "dev": true }, "node_modules/string_decoder": { @@ -42378,18 +39206,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/string.prototype.matchall/node_modules/is-callable": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", - "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/string.prototype.matchall/node_modules/is-regex": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", @@ -42418,15 +39234,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/string.prototype.matchall/node_modules/object-inspect": { - "version": "1.10.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", - "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/string.prototype.padend": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/string.prototype.padend/-/string.prototype.padend-3.1.2.tgz", @@ -42492,18 +39299,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/string.prototype.padend/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/string.prototype.padend/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -42535,15 +39330,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/string.prototype.padend/node_modules/object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/string.prototype.padstart": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/string.prototype.padstart/-/string.prototype.padstart-3.1.2.tgz", @@ -42609,18 +39395,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/string.prototype.padstart/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/string.prototype.padstart/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -42652,15 +39426,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/string.prototype.padstart/node_modules/object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/string.prototype.trim": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.0.tgz", @@ -43096,6 +39861,7 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz", "integrity": "sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==", + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -43171,18 +39937,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/symbol.prototype.description/node_modules/is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/symbol.prototype.description/node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -43214,15 +39968,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/symbol.prototype.description/node_modules/object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/symbol.prototype.description/node_modules/object.getownpropertydescriptors": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz", @@ -43297,17 +40042,6 @@ "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.1.tgz", "integrity": "sha512-e0hDa9H2Z9AwFkk2qDlwhoMYE4eToKarchkQHovNdLTCYMHZHeRjI71crOh+dio4K6u1IcwubQqo79Ga4CyAQA==" }, - "node_modules/tailwindcss/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/tailwindcss/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -43365,17 +40099,6 @@ "node": ">= 6" } }, - "node_modules/tailwindcss/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/tailwindcss/node_modules/glob-parent": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", @@ -43406,37 +40129,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/tailwindcss/node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/tailwindcss/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/tailwindcss/node_modules/micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - }, - "engines": { - "node": ">=8.6" - } - }, "node_modules/tailwindcss/node_modules/postcss-selector-parser": { "version": "6.0.8", "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.8.tgz", @@ -43481,17 +40173,6 @@ "node": ">=8" } }, - "node_modules/tailwindcss/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, "node_modules/tapable": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz", @@ -43569,14 +40250,33 @@ } }, "node_modules/teeny-request/node_modules/node-fetch": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==", + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, "engines": { "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } } }, + "node_modules/teeny-request/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true, + "license": "MIT" + }, "node_modules/teeny-request/node_modules/uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", @@ -43587,6 +40287,24 @@ "uuid": "bin/uuid" } }, + "node_modules/teeny-request/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/teeny-request/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/telejson": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/telejson/-/telejson-5.3.3.tgz", @@ -43728,20 +40446,21 @@ } }, "node_modules/terser": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-4.8.0.tgz", - "integrity": "sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw==", - "dev": true, + "version": "5.14.2", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.14.2.tgz", + "integrity": "sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA==", + "license": "BSD-2-Clause", "dependencies": { + "@jridgewell/source-map": "^0.3.2", + "acorn": "^8.5.0", "commander": "^2.20.0", - "source-map": "~0.6.1", - "source-map-support": "~0.5.12" + "source-map-support": "~0.5.20" }, "bin": { "terser": "bin/terser" }, "engines": { - "node": ">=6.0.0" + "node": ">=10" } }, "node_modules/terser-webpack-plugin": { @@ -43880,10 +40599,11 @@ } }, "node_modules/terser-webpack-plugin/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -43897,41 +40617,6 @@ "node": ">=0.10.0" } }, - "node_modules/terser-webpack-plugin/node_modules/terser": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.7.0.tgz", - "integrity": "sha512-HP5/9hp2UaZt5fYkuhNBR8YyRcT8juw8+uFbAme53iN9hblvKnLUTKkmwJG6ocWpIKf8UK4DoeWG4ty0J6S6/g==", - "dev": true, - "dependencies": { - "commander": "^2.20.0", - "source-map": "~0.7.2", - "source-map-support": "~0.5.19" - }, - "bin": { - "terser": "bin/terser" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/terser-webpack-plugin/node_modules/terser/node_modules/source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/terser/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -44007,9 +40692,10 @@ } }, "node_modules/tmpl": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz", - "integrity": "sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "license": "BSD-3-Clause" }, "node_modules/to-arraybuffer": { "version": "1.0.1", @@ -44017,52 +40703,68 @@ "integrity": "sha512-okFlQcoGTi4LQBG/PgSYblw9VOyptsz2KJZqc6qtgGdes8VktzUQkj4BI2blit072iS8VODNcMA+tvnS9dnuMA==", "dev": true }, - "node_modules/to-object-path": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", - "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", + "node_modules/to-buffer": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.2.1.tgz", + "integrity": "sha512-tB82LpAIWjhLYbqjx3X4zEeHN6M8CiuOEy2JY8SEQVdYRe3CCHOFaqrBW1doLDrfpWhplcW7BL+bO3/6S3pcDQ==", + "dev": true, + "license": "MIT", "dependencies": { - "kind-of": "^3.0.2" + "isarray": "^2.0.5", + "safe-buffer": "^5.2.1", + "typed-array-buffer": "^1.0.3" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" } }, - "node_modules/to-object-path/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } + "node_modules/to-buffer/node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true, + "license": "MIT" }, - "node_modules/to-regex": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", - "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", + "node_modules/to-buffer/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "license": "MIT", "dependencies": { - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "regex-not": "^1.0.2", - "safe-regex": "^1.1.0" + "is-number": "^7.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8.0" } }, - "node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", - "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, + "node_modules/to-regex-range/node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=0.12.0" } }, "node_modules/toggle-selection": { @@ -44432,20 +41134,6 @@ "node": ">=4" } }, - "node_modules/type-fest": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz", - "integrity": "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -44458,6 +41146,21 @@ "node": ">= 0.6" } }, + "node_modules/typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", @@ -44658,20 +41361,6 @@ "node": ">=8" } }, - "node_modules/union-value": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", - "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", - "dependencies": { - "arr-union": "^3.1.0", - "get-value": "^2.0.6", - "is-extendable": "^0.1.1", - "set-value": "^2.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/unique-filename": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", @@ -44839,50 +41528,6 @@ "resolved": "https://registry.npmjs.org/unquote/-/unquote-1.1.1.tgz", "integrity": "sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ=" }, - "node_modules/unset-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", - "integrity": "sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=", - "dependencies": { - "has-value": "^0.3.1", - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-value": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", - "integrity": "sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=", - "dependencies": { - "get-value": "^2.0.3", - "has-values": "^0.1.4", - "isobject": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-value/node_modules/isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "dependencies": { - "isarray": "1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-values": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", - "integrity": "sha1-bWHeldkd/Km5oCCJrThL/49it3E=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/upath": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", @@ -44915,12 +41560,6 @@ "punycode": "^2.1.0" } }, - "node_modules/urix": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", - "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=", - "deprecated": "Please see https://github.com/lydell/urix#deprecated" - }, "node_modules/url": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", @@ -45007,17 +41646,6 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/url-parse": { - "version": "1.5.10", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", - "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", - "optional": true, - "peer": true, - "dependencies": { - "querystringify": "^2.1.1", - "requires-port": "^1.0.0" - } - }, "node_modules/url/node_modules/punycode": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", @@ -45034,14 +41662,6 @@ "node": ">=0.4.x" } }, - "node_modules/use": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", - "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/use-composed-ref": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.1.0.tgz", @@ -45571,673 +42191,52 @@ "node": ">=0.4.0" } }, - "node_modules/webpack-bundle-analyzer/node_modules/ws": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.1.tgz", - "integrity": "sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==", - "dev": true, - "dependencies": { - "async-limiter": "~1.0.0" - } - }, "node_modules/webpack-dev-middleware": { - "version": "3.7.3", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-3.7.3.tgz", - "integrity": "sha512-djelc/zGiz9nZj/U7PTBi2ViorGJXEWo/3ltkPbDyxCXhhEXkW0ce99falaok4TPj+AsxLiXJR0EBOb0zh9fKQ==", - "dev": true, + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz", + "integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==", + "license": "MIT", "dependencies": { - "memory-fs": "^0.4.1", - "mime": "^2.4.4", - "mkdirp": "^0.5.1", + "colorette": "^2.0.10", + "memfs": "^3.4.3", + "mime-types": "^2.1.31", "range-parser": "^1.2.1", - "webpack-log": "^2.0.0" - }, - "engines": { - "node": ">= 6" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - } - }, - "node_modules/webpack-dev-middleware/node_modules/mime": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.5.2.tgz", - "integrity": "sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg==", - "dev": true, - "bin": { - "mime": "cli.js" + "schema-utils": "^4.0.0" }, "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/webpack-dev-server": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.11.3.tgz", - "integrity": "sha512-3x31rjbEQWKMNzacUZRE6wXvUFuGpH7vr0lIEbYpMAG9BOxi0928QU1BBswOAP3kg3H1O4hiS+sq4YyAn6ANnA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-html-community": "0.0.8", - "bonjour": "^3.5.0", - "chokidar": "^2.1.8", - "compression": "^1.7.4", - "connect-history-api-fallback": "^1.6.0", - "debug": "^4.1.1", - "del": "^4.1.1", - "express": "^4.17.1", - "html-entities": "^1.3.1", - "http-proxy-middleware": "0.19.1", - "import-local": "^2.0.0", - "internal-ip": "^4.3.0", - "ip": "^1.1.5", - "is-absolute-url": "^3.0.3", - "killable": "^1.0.1", - "loglevel": "^1.6.8", - "opn": "^5.5.0", - "p-retry": "^3.0.1", - "portfinder": "^1.0.26", - "schema-utils": "^1.0.0", - "selfsigned": "^1.10.8", - "semver": "^6.3.0", - "serve-index": "^1.9.1", - "sockjs": "^0.3.21", - "sockjs-client": "^1.5.0", - "spdy": "^4.0.2", - "strip-ansi": "^3.0.1", - "supports-color": "^6.1.0", - "url": "^0.11.0", - "webpack-dev-middleware": "^3.7.2", - "webpack-log": "^2.0.0", - "ws": "^6.2.1", - "yargs": "^13.3.2" - }, - "bin": { - "webpack-dev-server": "bin/webpack-dev-server.js" + "node": ">= 12.13.0" }, - "engines": { - "node": ">= 6.11.5" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" }, "peerDependencies": { "webpack": "^4.0.0 || ^5.0.0" - }, - "peerDependenciesMeta": { - "webpack-cli": { - "optional": true - } - } - }, - "node_modules/webpack-dev-server/node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/webpack-dev-server/node_modules/ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "dev": true, - "optional": true, - "peer": true, - "peerDependencies": { - "ajv": "^6.9.1" - } - }, - "node_modules/webpack-dev-server/node_modules/ansi-html-community": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", - "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==", - "dev": true, - "engines": [ - "node >= 0.8.0" - ], - "optional": true, - "peer": true, - "bin": { - "ansi-html": "bin/ansi-html" - } - }, - "node_modules/webpack-dev-server/node_modules/ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/anymatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", - "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "micromatch": "^3.1.4", - "normalize-path": "^2.1.1" - } - }, - "node_modules/webpack-dev-server/node_modules/anymatch/node_modules/normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "remove-trailing-separator": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/binary-extensions": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", - "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/chokidar": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", - "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", - "deprecated": "Chokidar 2 does not receive security updates since 2019. Upgrade to chokidar 3 with 15x fewer dependencies", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "anymatch": "^2.0.0", - "async-each": "^1.0.1", - "braces": "^2.3.2", - "glob-parent": "^3.1.0", - "inherits": "^2.0.3", - "is-binary-path": "^1.0.0", - "is-glob": "^4.0.0", - "normalize-path": "^3.0.0", - "path-is-absolute": "^1.0.0", - "readdirp": "^2.2.1", - "upath": "^1.1.1" - }, - "optionalDependencies": { - "fsevents": "^1.2.7" - } - }, - "node_modules/webpack-dev-server/node_modules/cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" - } - }, - "node_modules/webpack-dev-server/node_modules/cliui/node_modules/ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/cliui/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-regex": "^4.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/webpack-dev-server/node_modules/emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true, - "optional": true, - "peer": true - }, - "node_modules/webpack-dev-server/node_modules/find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "locate-path": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/fsevents": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", - "integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==", - "deprecated": "The v1 package contains DANGEROUS / INSECURE binaries. Upgrade to safe fsevents v2", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "peer": true, - "dependencies": { - "bindings": "^1.5.0", - "nan": "^2.12.1" - }, - "engines": { - "node": ">= 4.0" - } - }, - "node_modules/webpack-dev-server/node_modules/glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - } - }, - "node_modules/webpack-dev-server/node_modules/glob-parent/node_modules/is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "is-extglob": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/import-local": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-2.0.0.tgz", - "integrity": "sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "pkg-dir": "^3.0.0", - "resolve-cwd": "^2.0.0" - }, - "bin": { - "import-local-fixture": "fixtures/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/is-binary-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", - "integrity": "sha512-9fRVlXc0uCxEDj1nQzaWONSpbTfx0FmJfzHF7pwlI8DkWGoHBBea4Pg5Ky0ojwwxQmnSifgbKkI06Qv0Ljgj+Q==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "binary-extensions": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/webpack-dev-server/node_modules/is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/webpack-dev-server/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "optional": true, - "peer": true - }, - "node_modules/webpack-dev-server/node_modules/locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true, - "optional": true, - "peer": true - }, - "node_modules/webpack-dev-server/node_modules/p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "p-limit": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/webpack-dev-server/node_modules/readdirp": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", - "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "graceful-fs": "^4.1.11", - "micromatch": "^3.1.10", - "readable-stream": "^2.0.2" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/webpack-dev-server/node_modules/resolve-cwd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-2.0.0.tgz", - "integrity": "sha512-ccu8zQTrzVr954472aUVPLEcB3YpKSYR3cg/3lo1okzobPBM+1INXBbBZlDbnI/hbEocnf8j0QVo43hQKrbchg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "resolve-from": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/webpack-dev-server/node_modules/resolve-from": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", - "integrity": "sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/webpack-dev-server/node_modules/schema-utils": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", - "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ajv": "^6.1.0", - "ajv-errors": "^1.0.0", - "ajv-keywords": "^3.1.0" - }, - "engines": { - "node": ">= 4" - } - }, - "node_modules/webpack-dev-server/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "optional": true, - "peer": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/webpack-dev-server/node_modules/string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/string-width/node_modules/ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/string-width/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-regex": "^4.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-regex": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" } }, - "node_modules/webpack-dev-server/node_modules/supports-color": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", - "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=6" - } + "node_modules/webpack-dev-middleware/node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "license": "MIT" }, - "node_modules/webpack-dev-server/node_modules/wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", - "dev": true, - "optional": true, - "peer": true, + "node_modules/webpack-dev-middleware/node_modules/schema-utils": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.2.tgz", + "integrity": "sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==", + "license": "MIT", "dependencies": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" }, "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", - "dev": true, - "optional": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "ansi-regex": "^4.1.0" + "node": ">= 10.13.0" }, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-dev-server/node_modules/ws": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.2.tgz", - "integrity": "sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "async-limiter": "~1.0.0" - } - }, - "node_modules/webpack-dev-server/node_modules/yargs": { - "version": "13.3.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", - "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", - "get-caller-file": "^2.0.1", - "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^3.0.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^13.1.2" - } - }, - "node_modules/webpack-dev-server/node_modules/yargs-parser": { - "version": "13.1.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", - "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", - "dev": true, - "optional": true, - "peer": true, - "dependencies": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" } }, "node_modules/webpack-hot-middleware": { @@ -46273,38 +42272,6 @@ "node": ">=0.10.0" } }, - "node_modules/webpack-log": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/webpack-log/-/webpack-log-2.0.0.tgz", - "integrity": "sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg==", - "dev": true, - "dependencies": { - "ansi-colors": "^3.0.0", - "uuid": "^3.3.2" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/webpack-log/node_modules/ansi-colors": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz", - "integrity": "sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack-log/node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "dev": true, - "bin": { - "uuid": "bin/uuid" - } - }, "node_modules/webpack-manifest-plugin": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/webpack-manifest-plugin/-/webpack-manifest-plugin-4.0.2.tgz", @@ -46536,23 +42503,6 @@ "node": ">=6" } }, - "node_modules/webpack/node_modules/terser": { - "version": "5.29.2", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.29.2.tgz", - "integrity": "sha512-ZiGkhUBIM+7LwkNjXYJq8svgkd+QK3UUr0wJqY4MieaezBSAIPgbSPZyIx0idM6XWK5CMzSWa8MJIzmRcB8Caw==", - "dependencies": { - "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.8.2", - "commander": "^2.20.0", - "source-map-support": "~0.5.20" - }, - "bin": { - "terser": "bin/terser" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/webpack/node_modules/terser-webpack-plugin": { "version": "5.3.10", "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz", @@ -46751,13 +42701,27 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/which-module": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz", - "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==", + "node_modules/which-typed-array": { + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", "dev": true, - "optional": true, - "peer": true + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/wide-align": { "version": "1.1.3", @@ -47162,15 +43126,16 @@ } }, "node_modules/ws": { - "version": "7.5.6", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.6.tgz", - "integrity": "sha512-6GLgCqo2cy2A2rjCNFlxQS6ZljG/coZfZXclldI8FB/1G3CCI36Zd8xy2HrFVACi8tfk5XrgLQEk+P0Tnz9UcA==", + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", + "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", + "license": "MIT", "engines": { - "node": ">=8.3.0" + "node": ">=10.0.0" }, "peerDependencies": { "bufferutil": "^4.0.1", - "utf-8-validate": "^5.0.2" + "utf-8-validate": ">=5.0.2" }, "peerDependenciesMeta": { "bufferutil": { @@ -47304,16 +43269,9 @@ "resolved": "https://registry.npmjs.org/@apideck/better-ajv-errors/-/better-ajv-errors-0.3.2.tgz", "integrity": "sha512-JdEazx7qiVqTBzzBl5rolRwl5cmhihjfIcpqRzIZjtT6b18liVmDn/VlWpqW4C/qP2hrFFMLRV1wlex8ZVBPTg==", "requires": { - "json-schema": "^0.4.0", + "json-schema": "0.4.0", "jsonpointer": "^5.0.0", "leven": "^3.1.0" - }, - "dependencies": { - "json-schema": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", - "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" - } } }, "@babel/code-frame": { @@ -47403,9 +43361,9 @@ "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==" }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -47554,9 +43512,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -48484,13 +44442,16 @@ "integrity": "sha512-qZEzVQ+5Qh6cROaTPFLNS4lkvQ6mBzE3R6A6EEpssj7Zr2egMHgsy4XapdifqJDGC9CBiNv7s+ejI96rLNQFdg==", "requires": { "browserslist": "^4.19.1", - "semver": "7.0.0" + "semver": "7.6.0" }, "dependencies": { "semver": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", - "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==" + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "requires": { + "lru-cache": "^6.0.0" + } } } }, @@ -48531,9 +44492,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -48759,9 +44720,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -48921,27 +44882,12 @@ "autoprefixer": "^10.4.12", "cosmiconfig": "^7.0.1", "cosmiconfig-typescript-loader": "^1.0.0", - "cross-spawn": "^7.0.3", + "cross-spawn": "7.0.5", "lodash": "^4.17.21", - "semver": "^7.3.7", + "semver": "7.6.0", "webpack-merge": "^5.8.0" }, "dependencies": { - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" - }, "semver": { "version": "7.6.0", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", @@ -48949,27 +44895,6 @@ "requires": { "lru-cache": "^6.0.0" } - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "requires": { - "isexe": "^2.0.0" - } } } }, @@ -49132,7 +45057,7 @@ "ignore": "^4.0.6", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", - "minimatch": "^3.0.4", + "minimatch": "3.0.5", "strip-json-comments": "^3.1.1" }, "dependencies": { @@ -49212,7 +45137,7 @@ "@google-cloud/projectify": "^1.0.0", "@google-cloud/promisify": "^1.0.0", "arrify": "^2.0.0", - "duplexify": "^3.6.0", + "duplexify": "4.1.3", "ent": "^2.2.0", "extend": "^3.0.2", "google-auth-library": "^5.5.0", @@ -49271,7 +45196,7 @@ "compressible": "^2.0.12", "concat-stream": "^2.0.0", "date-and-time": "^0.11.0", - "duplexify": "^3.5.0", + "duplexify": "4.1.3", "extend": "^3.0.2", "gaxios": "^2.0.1", "gcs-resumable-upload": "^2.2.4", @@ -49318,23 +45243,9 @@ "integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==", "dev": true, "requires": { - "duplexify": "^4.1.1", + "duplexify": "4.1.3", "inherits": "^2.0.3", "pump": "^3.0.0" - }, - "dependencies": { - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "dev": true, - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - } } }, "readable-stream": { @@ -49372,7 +45283,7 @@ "requires": { "@humanwhocodes/object-schema": "^1.2.1", "debug": "^4.1.1", - "minimatch": "^3.0.4" + "minimatch": "3.0.5" }, "dependencies": { "debug": { @@ -49472,7 +45383,7 @@ "jest-util": "^27.4.2", "jest-validate": "^27.4.6", "jest-watcher": "^27.4.6", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "rimraf": "^3.0.0", "slash": "^3.0.0", "strip-ansi": "^6.0.0" @@ -49517,7 +45428,7 @@ "jest-haste-map": "^27.4.6", "jest-regex-util": "^27.4.0", "jest-util": "^27.4.2", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "pirates": "^4.0.4", "slash": "^3.0.0", "source-map": "^0.6.1", @@ -49572,14 +45483,6 @@ "test-exclude": "^6.0.0" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -49607,24 +45510,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "istanbul-lib-coverage": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", @@ -49658,7 +45548,7 @@ "jest-serializer": "^27.4.0", "jest-util": "^27.4.2", "jest-worker": "^27.4.6", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "walker": "^1.0.7" } }, @@ -49709,15 +45599,6 @@ } } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "pirates": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.4.tgz", @@ -49732,9 +45613,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" }, "source-map": { "version": "0.6.1", @@ -49748,14 +45629,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -50101,7 +45974,7 @@ "jest-haste-map": "^27.4.6", "jest-regex-util": "^27.4.0", "jest-util": "^27.4.2", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "pirates": "^4.0.4", "slash": "^3.0.0", "source-map": "^0.6.1", @@ -50156,14 +46029,6 @@ "test-exclude": "^6.0.0" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -50191,24 +46056,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "istanbul-lib-instrument": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.1.0.tgz", @@ -50253,7 +46105,7 @@ "jest-serializer": "^27.4.0", "jest-util": "^27.4.2", "jest-worker": "^27.4.6", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "walker": "^1.0.7" } }, @@ -50304,24 +46156,15 @@ } } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "pirates": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.4.tgz", "integrity": "sha512-ZIrVPH+A52Dw84R0L3/VS9Op04PuQ2SEoJL6bkshmiTic/HldyW9Tf7oH5mhJZBK7NmDx27vSMrYEXPXclpDKw==" }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" }, "source-map": { "version": "0.6.1", @@ -50335,14 +46178,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -50447,14 +46282,6 @@ "color-convert": "^2.0.1" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -50482,24 +46309,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "jest-haste-map": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.4.6.tgz", @@ -50516,7 +46330,7 @@ "jest-serializer": "^27.4.0", "jest-util": "^27.4.2", "jest-worker": "^27.4.6", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "walker": "^1.0.7" } }, @@ -50567,15 +46381,6 @@ } } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -50583,14 +46388,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -50610,7 +46407,7 @@ "jest-haste-map": "^26.6.2", "jest-regex-util": "^26.0.0", "jest-util": "^26.6.2", - "micromatch": "^4.0.2", + "micromatch": ">=4.0.8", "pirates": "^4.0.1", "slash": "^3.0.0", "source-map": "^0.6.1", @@ -50657,15 +46454,6 @@ "color-convert": "^2.0.1" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", @@ -50691,37 +46479,12 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -50736,15 +46499,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -50969,6 +46723,12 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true + }, + "semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true } } }, @@ -51051,7 +46811,7 @@ "integrity": "sha512-br5Qwvh8D2OQqSXpd1g/xqXKnK0r+Jz6qVKBbWmpUcrbGOxUrf39V5oZ1876084CGn18uMdR5uvPqBv9UqtBjQ==", "dev": true, "requires": { - "ansi-html": "^0.0.7", + "ansi-html": "0.0.8", "error-stack-parser": "^2.0.6", "html-entities": "^1.2.1", "native-url": "^0.2.6", @@ -51279,17 +47039,6 @@ "process": "^0.11.10" } }, - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", @@ -51455,17 +47204,6 @@ "integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==", "dev": true }, - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", @@ -51546,17 +47284,6 @@ "process": "^0.11.10" } }, - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "qs": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", @@ -51645,17 +47372,6 @@ "process": "^0.11.10" } }, - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", @@ -51848,7 +47564,7 @@ "url-loader": "^4.1.1", "util-deprecate": "^1.0.2", "webpack": "4", - "webpack-dev-middleware": "^3.7.3", + "webpack-dev-middleware": "5.3.4", "webpack-filter-warnings-plugin": "^1.2.1", "webpack-hot-middleware": "^2.25.0", "webpack-virtual-modules": "^0.2.2" @@ -51871,9 +47587,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true } } @@ -52174,9 +47890,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true } } @@ -52359,7 +48075,7 @@ "klona": "^2.0.4", "loader-utils": "^2.0.0", "schema-utils": "^3.0.0", - "semver": "^7.3.4" + "semver": "7.6.0" }, "dependencies": { "loader-utils": { @@ -52383,15 +48099,6 @@ "ajv": "^6.12.5", "ajv-keywords": "^3.5.2" } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } } } }, @@ -52411,6 +48118,15 @@ "path-parse": "^1.0.6" } }, + "semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, "serialize-javascript": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz", @@ -52467,7 +48183,7 @@ "loader-runner": "^2.4.0", "loader-utils": "^1.2.3", "memory-fs": "^0.4.1", - "micromatch": "^3.1.10", + "micromatch": ">=4.0.8", "mkdirp": "^0.5.3", "neo-async": "^2.6.1", "node-libs-browser": "^2.2.1", @@ -52501,7 +48217,7 @@ "schema-utils": "^1.0.0", "serialize-javascript": "^4.0.0", "source-map": "^0.6.1", - "terser": "^4.1.2", + "terser": "5.14.2", "webpack-sources": "^1.4.0", "worker-farm": "^1.7.0" } @@ -52741,17 +48457,6 @@ "dev": true, "requires": {} }, - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", @@ -52875,7 +48580,7 @@ "interpret": "^2.2.0", "json5": "^2.1.3", "lazy-universal-dotenv": "^3.0.1", - "micromatch": "^4.0.2", + "micromatch": ">=4.0.8", "pkg-dir": "^5.0.0", "pretty-hrtime": "^1.0.3", "resolve-from": "^5.0.0", @@ -52901,9 +48606,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true } } @@ -53113,15 +48818,6 @@ "core-js-compat": "^3.8.1" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, "cacache": { "version": "12.0.4", "resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.4.tgz", @@ -53261,15 +48957,6 @@ "estraverse": "^4.1.1" } }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, "find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -53306,9 +48993,9 @@ "fs-extra": "^9.0.0", "glob": "^7.1.6", "memfs": "^3.1.2", - "minimatch": "^3.0.4", + "minimatch": "3.0.5", "schema-utils": "2.7.0", - "semver": "^7.3.2", + "semver": "7.6.0", "tapable": "^1.0.0" }, "dependencies": { @@ -53343,15 +49030,6 @@ "ajv": "^6.12.2", "ajv-keywords": "^3.4.1" } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } } } }, @@ -53361,12 +49039,6 @@ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, "is-wsl": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", @@ -53407,16 +49079,6 @@ } } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -53466,6 +49128,15 @@ "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true }, + "semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, "serialize-javascript": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz", @@ -53502,7 +49173,7 @@ "schema-utils": "^1.0.0", "serialize-javascript": "^4.0.0", "source-map": "^0.6.1", - "terser": "^4.1.2", + "terser": "5.14.2", "webpack-sources": "^1.4.0", "worker-farm": "^1.7.0" }, @@ -53520,15 +49191,6 @@ } } }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } - }, "watchpack": { "version": "1.7.5", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.5.tgz", @@ -53561,7 +49223,7 @@ "loader-runner": "^2.4.0", "loader-utils": "^1.2.3", "memory-fs": "^0.4.1", - "micromatch": "^3.1.10", + "micromatch": ">=4.0.8", "mkdirp": "^0.5.3", "neo-async": "^2.6.1", "node-libs-browser": "^2.2.1", @@ -53572,99 +49234,6 @@ "webpack-sources": "^1.4.1" }, "dependencies": { - "braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "requires": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", - "dev": true, - "requires": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - } - }, "schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -53675,16 +49244,6 @@ "ajv-errors": "^1.0.0", "ajv-keywords": "^3.1.0" } - }, - "to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", - "dev": true, - "requires": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - } } } }, @@ -53744,7 +49303,7 @@ "fs-extra": "^9.0.1", "globby": "^11.0.2", "ip": "^1.1.5", - "node-fetch": "^2.6.1", + "node-fetch": "2.6.7", "pretty-hrtime": "^1.0.3", "prompts": "^2.4.0", "regenerator-runtime": "^0.13.7", @@ -54107,10 +49666,13 @@ } }, "node-fetch": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", - "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==", - "dev": true + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "requires": { + "whatwg-url": "^5.0.0" + } }, "regenerator-runtime": { "version": "0.13.9", @@ -54174,11 +49736,17 @@ "schema-utils": "^1.0.0", "serialize-javascript": "^4.0.0", "source-map": "^0.6.1", - "terser": "^4.1.2", + "terser": "5.14.2", "webpack-sources": "^1.4.0", "worker-farm": "^1.7.0" } }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true + }, "watchpack": { "version": "1.7.5", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.5.tgz", @@ -54191,6 +49759,12 @@ "watchpack-chokidar2": "^2.0.1" } }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true + }, "webpack": { "version": "4.47.0", "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.47.0.tgz", @@ -54211,7 +49785,7 @@ "loader-runner": "^2.4.0", "loader-utils": "^1.2.3", "memory-fs": "^0.4.1", - "micromatch": "^3.1.10", + "micromatch": ">=4.0.8", "mkdirp": "^0.5.3", "neo-async": "^2.6.1", "node-libs-browser": "^2.2.1", @@ -54222,6 +49796,16 @@ "webpack-sources": "^1.4.1" } }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", @@ -54310,7 +49894,7 @@ "find-up": "^5.0.0", "fs-extra": "^9.0.1", "html-webpack-plugin": "^4.0.0", - "node-fetch": "^2.6.1", + "node-fetch": "2.6.7", "pnp-webpack-plugin": "1.6.4", "read-pkg-up": "^7.0.1", "regenerator-runtime": "^0.13.7", @@ -54322,7 +49906,7 @@ "url-loader": "^4.1.1", "util-deprecate": "^1.0.2", "webpack": "4", - "webpack-dev-middleware": "^3.7.3", + "webpack-dev-middleware": "5.3.4", "webpack-virtual-modules": "^0.2.2" }, "dependencies": { @@ -54726,10 +50310,13 @@ } }, "node-fetch": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", - "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==", - "dev": true + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "requires": { + "whatwg-url": "^5.0.0" + } }, "p-limit": { "version": "3.1.0", @@ -54774,9 +50361,9 @@ "dev": true }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true }, "serialize-javascript": { @@ -54803,6 +50390,12 @@ "figgy-pudding": "^3.5.1" } }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true + }, "watchpack": { "version": "1.7.5", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.5.tgz", @@ -54815,6 +50408,12 @@ "watchpack-chokidar2": "^2.0.1" } }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true + }, "webpack": { "version": "4.47.0", "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.47.0.tgz", @@ -54835,7 +50434,7 @@ "loader-runner": "^2.4.0", "loader-utils": "^1.2.3", "memory-fs": "^0.4.1", - "micromatch": "^3.1.10", + "micromatch": ">=4.0.8", "mkdirp": "^0.5.3", "neo-async": "^2.6.1", "node-libs-browser": "^2.2.1", @@ -54869,13 +50468,23 @@ "schema-utils": "^1.0.0", "serialize-javascript": "^4.0.0", "source-map": "^0.6.1", - "terser": "^4.1.2", + "terser": "5.14.2", "webpack-sources": "^1.4.0", "worker-farm": "^1.7.0" } } } }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", @@ -54983,13 +50592,13 @@ "babel-plugin-react-docgen": "^4.1.0", "pnp-webpack-plugin": "^1.6.4", "react-docgen-typescript-plugin": "^1.0.0", - "semver": "^7.3.5" + "semver": "7.6.0" }, "dependencies": { "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "requires": { "lru-cache": "^6.0.0" @@ -55319,17 +50928,6 @@ "yallist": "^3.0.2" } }, - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "regenerator-runtime": { "version": "0.13.9", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", @@ -55383,7 +50981,7 @@ "schema-utils": "^1.0.0", "serialize-javascript": "^4.0.0", "source-map": "^0.6.1", - "terser": "^4.1.2", + "terser": "5.14.2", "webpack-sources": "^1.4.0", "worker-farm": "^1.7.0" } @@ -55420,7 +51018,7 @@ "loader-runner": "^2.4.0", "loader-utils": "^1.2.3", "memory-fs": "^0.4.1", - "micromatch": "^3.1.10", + "micromatch": ">=4.0.8", "mkdirp": "^0.5.3", "neo-async": "^2.6.1", "node-libs-browser": "^2.2.1", @@ -55449,20 +51047,11 @@ "endent": "^2.0.1", "find-cache-dir": "^3.3.1", "flat-cache": "^3.0.4", - "micromatch": "^4.0.2", + "micromatch": ">=4.0.8", "react-docgen-typescript": "^2.0.0", "tslib": "^2.0.0" }, "dependencies": { - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, "debug": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", @@ -55472,15 +51061,6 @@ "ms": "2.1.2" } }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, "find-cache-dir": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", @@ -55492,12 +51072,6 @@ "pkg-dir": "^4.1.0" } }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, "make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -55507,16 +51081,6 @@ "semver": "^6.0.0" } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -55540,20 +51104,11 @@ "requires": {} }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } - }, "tslib": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz", @@ -55805,20 +51360,12 @@ "resolved": "https://registry.npmjs.org/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz", "integrity": "sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ==", "requires": { - "ejs": "^3.1.6", + "ejs": "3.1.7", "json5": "^2.2.0", "magic-string": "^0.25.0", "string.prototype.matchall": "^4.0.6" }, "dependencies": { - "ejs": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.6.tgz", - "integrity": "sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw==", - "requires": { - "jake": "^10.6.1" - } - }, "es-abstract": { "version": "1.19.1", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz", @@ -55856,11 +51403,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==" - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -55878,11 +51420,6 @@ "has-tostringtag": "^1.0.0" } }, - "object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==" - }, "string.prototype.matchall": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.6.tgz", @@ -56063,12 +51600,6 @@ "@types/yargs-parser": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -56127,7 +51658,7 @@ "dev": true, "requires": { "@jest/types": "^27.2.4", - "ansi-regex": "^5.0.1", + "ansi-regex": "5.0.1", "ansi-styles": "^5.0.0", "react-is": "^17.0.1" }, @@ -56819,21 +52350,10 @@ "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==" }, "@types/http-proxy": { - "version": "1.17.14", - "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.14.tgz", - "integrity": "sha512-SSrD0c1OQzlFX7pGu1eXxSEjemej64aaNPRhhVYUGqXh0BtldAAx37MG8btcumvpgKyZp1F5Gn3JkktdxiFv6w==", - "requires": { - "@types/node": "*" - } - }, - "@types/http-proxy-middleware": { - "version": "0.17.6", - "resolved": "https://registry.npmjs.org/@types/http-proxy-middleware/-/http-proxy-middleware-0.17.6.tgz", - "integrity": "sha512-NocuMc3omR+yySlkgZlNUDyJa9ENGuwX8Ev7Y9zO//H989drWp18Fn+oAgZZIPu+JWtNinIxENK2TZvo53o3tw==", - "dev": true, + "version": "1.17.16", + "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.16.tgz", + "integrity": "sha512-sdWoUajOB1cd0A8cRRQ1cfyWNbmFKLAqBB89Y8x5iYyG/mkJHc0YUH8pdWBy2omi9qtCpiIgGjuwO0dQST2l5w==", "requires": { - "@types/connect": "*", - "@types/http-proxy": "*", "@types/node": "*" } }, @@ -56876,12 +52396,6 @@ "pretty-format": "^27.0.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "ansi-styles": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", @@ -57000,19 +52514,28 @@ "dev": true, "requires": { "@types/node": "*", - "form-data": "^3.0.0" + "form-data": "2.5.4" }, "dependencies": { "form-data": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", - "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.4.tgz", + "integrity": "sha512-Y/3MmRiR8Nd+0CUtrbvcKtKzLWiUfpQ7DFVggH8PwmGt/0r7RSy32GuP4hpCJlQNEBusisSx1DLtD8uD386HJQ==", "dev": true, "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" + "es-set-tostringtag": "^2.1.0", + "has-own": "^1.0.1", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true } } }, @@ -57383,7 +52906,7 @@ "functional-red-black-tree": "^1.0.1", "ignore": "^5.1.8", "regexpp": "^3.2.0", - "semver": "^7.3.5", + "semver": "7.6.0", "tsutils": "^3.21.0" }, "dependencies": { @@ -57401,9 +52924,9 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "requires": { "lru-cache": "^6.0.0" } @@ -57509,7 +53032,7 @@ "debug": "^4.3.2", "globby": "^11.0.4", "is-glob": "^4.0.3", - "semver": "^7.3.5", + "semver": "7.6.0", "tsutils": "^3.21.0" }, "dependencies": { @@ -57521,23 +53044,15 @@ "ms": "2.1.2" } }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "requires": { - "is-extglob": "^2.1.1" - } - }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "requires": { "lru-cache": "^6.0.0" } @@ -57969,19 +53484,6 @@ "prop-types": "^15.7.2", "prop-types-exact": "^1.2.0", "react-is": "^16.9.0" - }, - "dependencies": { - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - } } }, "ajv": { @@ -58010,6 +53512,14 @@ "ajv": "^8.0.0" } }, + "ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "requires": { + "fast-deep-equal": "^3.1.3" + } + }, "alphanum-sort": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/alphanum-sort/-/alphanum-sort-1.0.2.tgz", @@ -58085,15 +53595,15 @@ } }, "ansi-html": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ansi-html/-/ansi-html-0.0.7.tgz", - "integrity": "sha1-gTWEAhliqenm/QOflA0S9WynhZ4=", + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/ansi-html/-/ansi-html-0.0.8.tgz", + "integrity": "sha512-QROYz1I1Kj+8bTYgx0IlMBpRSCIU+7GjbE0oH+KF7QKc+qSF8YAlIutN59Db17tXN70Ono9upT9Ht0iG93W7ug==", "devOptional": true }, "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==" + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" }, "ansi-styles": { "version": "3.2.1", @@ -58173,29 +53683,6 @@ "@babel/runtime-corejs3": "^7.10.2" } }, - "arr-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", - "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=" - }, - "arr-flatten": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", - "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==" - }, - "arr-union": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", - "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=" - }, - "array-flatten": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz", - "integrity": "sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==", - "dev": true, - "optional": true, - "peer": true - }, "array-includes": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.3.tgz", @@ -58241,11 +53728,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", - "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==" - }, "is-regex": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", @@ -58259,11 +53741,6 @@ "version": "1.0.6", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz", "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w==" - }, - "object-inspect": { - "version": "1.10.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", - "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==" } } }, @@ -58278,11 +53755,6 @@ "integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=", "dev": true }, - "array-unique": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", - "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=" - }, "array.prototype.find": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/array.prototype.find/-/array.prototype.find-2.1.0.tgz", @@ -58371,12 +53843,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", - "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==", - "dev": true - }, "is-regex": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", @@ -58392,12 +53858,6 @@ "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz", "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w==", "dev": true - }, - "object-inspect": { - "version": "1.10.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", - "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==", - "dev": true } } }, @@ -58450,12 +53910,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -58474,12 +53928,6 @@ "requires": { "has-tostringtag": "^1.0.0" } - }, - "object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true } } }, @@ -58547,11 +53995,6 @@ "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" }, - "assign-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", - "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=" - }, "ast-types": { "version": "0.14.2", "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.14.2.tgz", @@ -58574,17 +54017,6 @@ "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", "integrity": "sha1-9wtzXGvKGlycItmCw+Oef+ujva0=" }, - "async": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", - "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "lodash": "^4.17.14" - } - }, "async-each": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.6.tgz", @@ -58592,12 +54024,6 @@ "dev": true, "optional": true }, - "async-limiter": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.0.tgz", - "integrity": "sha512-jp/uFnooOiO+L211eZOoSyzpOITMXx1rBITauYykG3BRYPu8h0UcxsPNB04RR5vo4Tyz3+ay17tR6JVf9qzYWg==", - "dev": true - }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -58608,11 +54034,6 @@ "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==" }, - "atob": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==" - }, "attr-accept": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-1.1.3.tgz", @@ -58673,6 +54094,15 @@ } } }, + "available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "requires": { + "possible-typed-array-names": "^1.0.0" + } + }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", @@ -58723,7 +54153,7 @@ "jest-haste-map": "^27.4.6", "jest-regex-util": "^27.4.0", "jest-util": "^27.4.2", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "pirates": "^4.0.4", "slash": "^3.0.0", "source-map": "^0.6.1", @@ -58778,14 +54208,6 @@ "test-exclude": "^6.0.0" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -58813,24 +54235,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "istanbul-lib-coverage": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", @@ -58864,7 +54273,7 @@ "jest-serializer": "^27.4.0", "jest-util": "^27.4.2", "jest-worker": "^27.4.6", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "walker": "^1.0.7" } }, @@ -58915,24 +54324,15 @@ } } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "pirates": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.4.tgz", "integrity": "sha512-ZIrVPH+A52Dw84R0L3/VS9Op04PuQ2SEoJL6bkshmiTic/HldyW9Tf7oH5mhJZBK7NmDx27vSMrYEXPXclpDKw==" }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" }, "source-map": { "version": "0.6.1", @@ -58946,14 +54346,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -59013,9 +54405,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -59172,9 +54564,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -60001,13 +55393,16 @@ "integrity": "sha512-qZEzVQ+5Qh6cROaTPFLNS4lkvQ6mBzE3R6A6EEpssj7Zr2egMHgsy4XapdifqJDGC9CBiNv7s+ejI96rLNQFdg==", "requires": { "browserslist": "^4.19.1", - "semver": "7.0.0" + "semver": "7.6.0" }, "dependencies": { "semver": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", - "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==" + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "requires": { + "lru-cache": "^6.0.0" + } } } }, @@ -60048,9 +55443,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -60086,56 +55481,6 @@ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, - "base": { - "version": "0.11.2", - "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", - "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", - "requires": { - "cache-base": "^1.0.1", - "class-utils": "^0.3.5", - "component-emitter": "^1.2.1", - "define-property": "^1.0.0", - "isobject": "^3.0.1", - "mixin-deep": "^1.2.0", - "pascalcase": "^0.1.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, "base64-js": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", @@ -60219,9 +55564,9 @@ "dev": true }, "body-parser": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", - "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", "requires": { "bytes": "3.1.2", "content-type": "~1.0.5", @@ -60231,17 +55576,12 @@ "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.11.0", + "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" }, "dependencies": { - "bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" - }, "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -60273,11 +55613,11 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", "requires": { - "side-channel": "^1.0.4" + "side-channel": "^1.0.6" } }, "setprototypeof": { @@ -60292,22 +55632,6 @@ } } }, - "bonjour": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/bonjour/-/bonjour-3.5.0.tgz", - "integrity": "sha1-jokKGD2O6aI5OzhExpGkK897yfU=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "array-flatten": "^2.1.0", - "deep-equal": "^1.0.1", - "dns-equal": "^1.0.0", - "dns-txt": "^2.0.2", - "multicast-dns": "^6.0.1", - "multicast-dns-service-types": "^1.1.0" - } - }, "bonjour-service": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.2.1.tgz", @@ -60426,39 +55750,20 @@ "integrity": "sha1-SJb8ydVE7vRfS7dmDbMg07N5/lg=" }, "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "requires": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - } + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "requires": { + "fill-range": "^7.1.1" } }, "brcast": { @@ -60661,14 +55966,6 @@ "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" }, - "buffer-indexof": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/buffer-indexof/-/buffer-indexof-1.1.1.tgz", - "integrity": "sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g==", - "dev": true, - "optional": true, - "peer": true - }, "buffer-xor": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", @@ -60682,9 +55979,9 @@ "dev": true }, "bytes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" }, "c8": { "version": "7.8.0", @@ -60796,32 +56093,33 @@ } } }, - "cache-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", - "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", + "call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", "requires": { - "collection-visit": "^1.0.0", - "component-emitter": "^1.2.1", - "get-value": "^2.0.6", - "has-value": "^1.0.0", - "isobject": "^3.0.1", - "set-value": "^2.0.0", - "to-object-path": "^0.3.0", - "union-value": "^1.0.0", - "unset-value": "^1.0.0" + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" } }, - "call-bind": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", - "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", "requires": { - "es-define-property": "^1.0.0", "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.1" + "function-bind": "^1.1.2" + } + }, + "call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "requires": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" } }, "call-me-maybe": { @@ -60941,7 +56239,7 @@ "change-emitter": { "version": "0.1.6", "resolved": "https://registry.npmjs.org/change-emitter/-/change-emitter-0.1.6.tgz", - "integrity": "sha1-6LL+PX8at9aaMhma/5HqaTFAlRU=" + "integrity": "sha512-YXzt1cQ4a2jqazhcuSWEOc1K2q8g9H6eWNsyZgi640LDzRWVQ2eDe+Y/kVdftH+vYdPF2rgDb3dLdpxE1jvAxw==" }, "char-regex": { "version": "1.0.2", @@ -61047,52 +56345,13 @@ "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", "requires": { "anymatch": "~3.1.2", - "braces": "~3.0.2", + "braces": "3.0.3", "fsevents": "~2.3.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" - }, - "dependencies": { - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } - } } }, "chownr": { @@ -61113,13 +56372,27 @@ "dev": true }, "cipher-base": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", - "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.6.tgz", + "integrity": "sha512-3Ek9H3X6pj5TgenXYtNWdaBon1tgYCaebd+XPg0keyjEbEfkD4KkmAxkQ/i1vYvxdcT5nscLBfq9VJRmCBcFSw==", "dev": true, "requires": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" + "inherits": "^2.0.4", + "safe-buffer": "^5.2.1" + }, + "dependencies": { + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + } } }, "cjs-module-lexer": { @@ -61127,27 +56400,6 @@ "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz", "integrity": "sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==" }, - "class-utils": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", - "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", - "requires": { - "arr-union": "^3.1.0", - "define-property": "^0.2.5", - "isobject": "^3.0.0", - "static-extend": "^0.1.1" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "requires": { - "is-descriptor": "^0.1.0" - } - } - } - }, "classcat": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/classcat/-/classcat-5.0.3.tgz", @@ -61250,15 +56502,6 @@ "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz", "integrity": "sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==" }, - "collection-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", - "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", - "requires": { - "map-visit": "^1.0.0", - "object-visit": "^1.0.0" - } - }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -61323,11 +56566,6 @@ "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" }, - "component-emitter": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", - "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=" - }, "compressible": { "version": "2.0.15", "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.15.tgz", @@ -61338,16 +56576,16 @@ } }, "compression": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", - "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.8.1.tgz", + "integrity": "sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==", "requires": { - "accepts": "~1.3.5", - "bytes": "3.0.0", - "compressible": "~2.0.16", + "bytes": "3.1.2", + "compressible": "~2.0.18", "debug": "2.6.9", - "on-headers": "~1.0.2", - "safe-buffer": "5.1.2", + "negotiator": "~0.6.4", + "on-headers": "~1.1.0", + "safe-buffer": "5.2.1", "vary": "~1.1.2" }, "dependencies": { @@ -61366,6 +56604,16 @@ "requires": { "ms": "2.0.0" } + }, + "negotiator": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", + "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==" + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" } } }, @@ -61397,14 +56645,6 @@ "resolved": "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz", "integrity": "sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==" }, - "connect-history-api-fallback": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz", - "integrity": "sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg==", - "dev": true, - "optional": true, - "peer": true - }, "console-browserify": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz", @@ -61470,11 +56710,6 @@ "run-queue": "^1.0.0" } }, - "copy-descriptor": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", - "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=" - }, "copy-to-clipboard": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz", @@ -61495,7 +56730,7 @@ "integrity": "sha512-xGhzYMX6y7oEGQGAJmP2TmtBLvR4nZmRGEcFa3ubHOq5YEp51gGN9AovVa0AoujGZIq+Wm6dISiYyGNfdflYww==", "requires": { "browserslist": "^4.16.6", - "semver": "7.0.0" + "semver": "7.6.0" }, "dependencies": { "browserslist": { @@ -61511,9 +56746,12 @@ } }, "semver": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", - "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==" + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "requires": { + "lru-cache": "^6.0.0" + } } } }, @@ -61618,9 +56856,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true } } @@ -61683,7 +56921,7 @@ "glob-parent": "^3.1.0", "is-glob": "^4.0.0", "merge2": "^1.2.3", - "micromatch": "^3.1.10" + "micromatch": ">=4.0.8" } }, "glob-parent": { @@ -61835,16 +57073,28 @@ "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==" }, "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.5.tgz", + "integrity": "sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==", "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "dependencies": { + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "requires": { + "isexe": "^2.0.0" + } + } } }, "crypto-browserify": { @@ -61934,7 +57184,7 @@ "postcss-modules-scope": "^3.0.0", "postcss-modules-values": "^4.0.0", "postcss-value-parser": "^4.1.0", - "semver": "^7.3.5" + "semver": "7.6.0" }, "dependencies": { "icss-utils": { @@ -61981,9 +57231,9 @@ "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" }, "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "requires": { "lru-cache": "^6.0.0" } @@ -62003,14 +57253,6 @@ "source-map": "^0.6.1" }, "dependencies": { - "ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "requires": { - "fast-deep-equal": "^3.1.3" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -62075,7 +57317,7 @@ "boolbase": "~1.0.0", "css-what": "2.1", "domutils": "1.5.1", - "nth-check": "~1.0.1" + "nth-check": "2.0.1" }, "dependencies": { "css-what": { @@ -62120,7 +57362,7 @@ "css-vendor": { "version": "0.3.8", "resolved": "https://registry.npmjs.org/css-vendor/-/css-vendor-0.3.8.tgz", - "integrity": "sha1-ZCHP0wNM5mT+dnOXL9ARn8KJQfo=", + "integrity": "sha512-Vx/Vl3zsHj32Z+WTNzGjd2iSbSIJTYHMmyGUT2nzCjj0Xk4qLfwpQ8nF6TQ5oo3Cf0s/An3DTc7LclH1BkAXbQ==", "requires": { "is-in-browser": "^1.0.2" } @@ -62601,24 +57843,11 @@ "ms": "2.0.0" } }, - "decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", - "dev": true, - "optional": true, - "peer": true - }, "decimal.js": { "version": "10.2.1", "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.2.1.tgz", "integrity": "sha512-KaL7+6Fw6i5A2XSnsbhm/6B+NuEA7TZ4vqxnd5tXz9sbKtrN9Srj8ab4vKVdK8YAqZO9P1kg45Y6YLoduPf+kw==" }, - "decode-uri-component": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", - "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==" - }, "dedent": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", @@ -62645,18 +57874,6 @@ "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-3.3.0.tgz", "integrity": "sha512-GRQOafGHwMHpjPx9iCvTgpu9NojZ49q794EEL94JVEw6VaeA8XTUyBKvAkOOjBX9oJNiV6G3P+T+tihFjo2TqA==" }, - "default-gateway": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-4.2.0.tgz", - "integrity": "sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "execa": "^1.0.0", - "ip-regex": "^2.1.0" - } - }, "define-data-property": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", @@ -62682,111 +57899,11 @@ "object-keys": "^1.1.1" } }, - "define-property": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", - "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", - "requires": { - "is-descriptor": "^1.0.2", - "isobject": "^3.0.1" - }, - "dependencies": { - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, "defined": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.0.tgz", "integrity": "sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM=" }, - "del": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/del/-/del-4.1.1.tgz", - "integrity": "sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "@types/glob": "^7.1.1", - "globby": "^6.1.0", - "is-path-cwd": "^2.0.0", - "is-path-in-cwd": "^2.0.0", - "p-map": "^2.0.0", - "pify": "^4.0.1", - "rimraf": "^2.6.3" - }, - "dependencies": { - "array-union": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", - "integrity": "sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "array-uniq": "^1.0.1" - } - }, - "globby": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", - "integrity": "sha512-KVbFv2TQtbzCoxAnfD6JcHZTYCzyliEaaeM/gH8qQdkKr5s0OP9scEgvdcngyk7AVdY6YVW/TJHd+lQ/Df3Daw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "array-union": "^1.0.1", - "glob": "^7.0.3", - "object-assign": "^4.0.1", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" - }, - "dependencies": { - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", - "dev": true, - "optional": true, - "peer": true - } - } - }, - "p-map": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", - "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==", - "dev": true, - "optional": true, - "peer": true - } - } - }, "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -62925,37 +58042,6 @@ "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==" }, - "dns-equal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", - "integrity": "sha1-s55/HabrCnW6nBcySzR1PEfgZU0=", - "dev": true, - "optional": true, - "peer": true - }, - "dns-packet": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-1.3.4.tgz", - "integrity": "sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ip": "^1.1.0", - "safe-buffer": "^5.0.1" - } - }, - "dns-txt": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/dns-txt/-/dns-txt-2.0.2.tgz", - "integrity": "sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY=", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "buffer-indexof": "^1.0.0" - } - }, "doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -63107,25 +58193,6 @@ "react-is": "^17.0.2" }, "dependencies": { - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - }, - "dependencies": { - "react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", - "dev": true - } - } - }, "react-is": { "version": "17.0.2", "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", @@ -63134,6 +58201,16 @@ } } }, + "dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "requires": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + } + }, "duplexer": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz", @@ -63141,15 +58218,28 @@ "dev": true }, "duplexify": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz", - "integrity": "sha512-vM58DwdnKmty+FSPzT14K9JXb90H+j5emaR4KYbr2KTIz00WHGbWOe5ghQTx233ZCLZtrGDALzKwcjEtSt35mA==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", + "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", "dev": true, "requires": { - "end-of-stream": "^1.0.0", - "inherits": "^2.0.1", - "readable-stream": "^2.0.0", - "stream-shift": "^1.0.0" + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.2" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } } }, "dynamic-dedupe": { @@ -63185,10 +58275,12 @@ "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, "ejs": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-2.7.4.tgz", - "integrity": "sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==", - "dev": true + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.7.tgz", + "integrity": "sha512-BIar7R6abbUxDA3bfXrO4DSgwo8I+fB5/1zgujl3HLLjwd6+9iOnrT+t3grn2qbk9vOgBubXOFwX2m9axoFaGw==", + "requires": { + "jake": "^10.8.5" + } }, "electron-to-chromium": { "version": "1.4.707", @@ -63264,14 +58356,6 @@ "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" }, - "encoding": { - "version": "0.1.12", - "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz", - "integrity": "sha1-U4tm8+5izRq1HsMjgp0flIDHS+s=", - "requires": { - "iconv-lite": "~0.4.13" - } - }, "end-of-stream": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", @@ -63373,21 +58457,10 @@ "semver": "^5.7.0" }, "dependencies": { - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", "dev": true } } @@ -63406,21 +58479,10 @@ "semver": "^5.7.0" }, "dependencies": { - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - }, "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", "dev": true } } @@ -63489,12 +58551,9 @@ "dev": true }, "es-define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", - "requires": { - "get-intrinsic": "^1.2.4" - } + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==" }, "es-errors": { "version": "1.3.0", @@ -63539,6 +58598,25 @@ "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.4.1.tgz", "integrity": "sha512-cXLGjP0c4T3flZJKQSuziYoq7MlT+rnvfZjfp7h+I7K9BNX54kP9nyWvdbwjQ4u1iWbOL4u96fgeZLToQlZC7w==" }, + "es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "requires": { + "es-errors": "^1.3.0" + } + }, + "es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "requires": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + } + }, "es-to-primitive": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", @@ -63625,7 +58703,7 @@ "@humanwhocodes/config-array": "^0.9.2", "ajv": "^6.10.0", "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", + "cross-spawn": "7.0.5", "debug": "^4.3.2", "doctrine": "^3.0.0", "enquirer": "^2.3.5", @@ -63649,12 +58727,12 @@ "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", - "minimatch": "^3.0.4", + "minimatch": "3.0.5", "natural-compare": "^1.4.0", "optionator": "^0.9.1", "progress": "^2.0.0", "regexpp": "^3.2.0", - "semver": "^7.2.1", + "semver": "7.6.0", "strip-ansi": "^6.0.1", "strip-json-comments": "^3.1.0", "text-table": "^0.2.0", @@ -63672,11 +58750,6 @@ "uri-js": "^4.2.2" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -63712,16 +58785,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, "debug": { "version": "4.3.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", @@ -63741,16 +58804,6 @@ "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "requires": { "is-glob": "^4.0.3" - }, - "dependencies": { - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "requires": { - "is-extglob": "^2.1.1" - } - } } }, "globals": { @@ -63811,43 +58864,25 @@ "word-wrap": "^1.2.3" } }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" - }, "prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==" }, "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "requires": { "lru-cache": "^6.0.0" } }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" - }, "strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { - "ansi-regex": "^5.0.1" + "ansi-regex": "5.0.1" } }, "strip-json-comments": { @@ -63875,14 +58910,6 @@ "version": "0.20.2", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==" - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "requires": { - "isexe": "^2.0.0" - } } } }, @@ -64039,7 +59066,7 @@ "has": "^1.0.3", "is-core-module": "^2.8.0", "is-glob": "^4.0.3", - "minimatch": "^3.0.4", + "minimatch": "3.0.5", "object.values": "^1.1.5", "resolve": "^1.20.0", "tsconfig-paths": "^3.12.0" @@ -64120,11 +59147,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==" - }, "is-core-module": { "version": "2.8.1", "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", @@ -64133,14 +59155,6 @@ "has": "^1.0.3" } }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "requires": { - "is-extglob": "^2.1.1" - } - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -64166,11 +59180,6 @@ "minimist": "^1.2.0" } }, - "object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==" - }, "object.values": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz", @@ -64228,7 +59237,7 @@ "has": "^1.0.3", "jsx-ast-utils": "^3.2.1", "language-tags": "^1.0.5", - "minimatch": "^3.0.4" + "minimatch": "3.0.5" }, "dependencies": { "array-includes": { @@ -64285,11 +59294,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==" - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -64306,11 +59310,6 @@ "requires": { "has-tostringtag": "^1.0.0" } - }, - "object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==" } } }, @@ -64324,7 +59323,7 @@ "doctrine": "^2.1.0", "estraverse": "^5.3.0", "jsx-ast-utils": "^2.4.1 || ^3.0.0", - "minimatch": "^3.0.4", + "minimatch": "3.0.5", "object.entries": "^1.1.5", "object.fromentries": "^2.0.5", "object.hasown": "^1.1.0", @@ -64407,11 +59406,6 @@ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==" - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -64429,11 +59423,6 @@ "has-tostringtag": "^1.0.0" } }, - "object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==" - }, "object.entries": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.5.tgz", @@ -64464,16 +59453,6 @@ "es-abstract": "^1.19.1" } }, - "prop-types": { - "version": "15.8.1", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", - "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.13.1" - } - }, "resolve": { "version": "2.0.0-next.3", "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.3.tgz", @@ -64484,9 +59463,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" }, "string.prototype.matchall": { "version": "4.0.6", @@ -64562,7 +59541,7 @@ "requires": { "@types/eslint": "^7.28.2", "jest-worker": "^27.3.1", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "normalize-path": "^3.0.0", "schema-utils": "^3.1.1" }, @@ -64584,32 +59563,11 @@ "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", "requires": {} }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "jest-worker": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.4.6.tgz", @@ -64625,15 +59583,6 @@ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "schema-utils": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", @@ -64651,14 +59600,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -64749,13 +59690,6 @@ "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" }, - "eventsource": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", - "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", - "optional": true, - "peer": true - }, "evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", @@ -64778,7 +59712,7 @@ "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", "dev": true, "requires": { - "cross-spawn": "^6.0.0", + "cross-spawn": "7.0.5", "get-stream": "^4.0.0", "is-stream": "^1.1.0", "npm-run-path": "^2.0.0", @@ -64792,46 +59726,6 @@ "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", "integrity": "sha1-BjJjj42HfMghB9MKD/8aF8uhzQw=" }, - "expand-brackets": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", - "integrity": "sha1-t3c14xXOMPa27/D4OwQVGiJEliI=", - "requires": { - "debug": "^2.3.3", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "posix-character-classes": "^0.1.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, "expect": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/expect/-/expect-27.4.6.tgz", @@ -64923,7 +59817,7 @@ "requires": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.2", + "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.6.0", @@ -65098,84 +59992,6 @@ "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, - "extend-shallow": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", - "integrity": "sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=", - "requires": { - "assign-symbols": "^1.0.0", - "is-extendable": "^1.0.1" - }, - "dependencies": { - "is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "requires": { - "is-plain-object": "^2.0.4" - } - } - } - }, - "extglob": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", - "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", - "requires": { - "array-unique": "^0.3.2", - "define-property": "^1.0.0", - "expand-brackets": "^2.1.4", - "extend-shallow": "^2.0.1", - "fragment-cache": "^0.2.1", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", @@ -65195,48 +60011,8 @@ "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.0", "merge2": "^1.3.0", - "micromatch": "^4.0.2", + "micromatch": ">=4.0.8", "picomatch": "^2.2.1" - }, - "dependencies": { - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } - } } }, "fast-json-parse": { @@ -65408,11 +60184,11 @@ "optional": true }, "filelist": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.2.tgz", - "integrity": "sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", "requires": { - "minimatch": "^3.0.4" + "minimatch": "3.0.5" } }, "filesize": { @@ -65422,24 +60198,11 @@ "dev": true }, "fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "requires": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - } + "to-regex-range": "^5.0.1" } }, "find-cache-dir": { @@ -65512,10 +60275,14 @@ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==" }, - "for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=" + "for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "dev": true, + "requires": { + "is-callable": "^1.2.7" + } }, "foreground-child": { "version": "2.0.0", @@ -65523,51 +60290,8 @@ "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", "dev": true, "requires": { - "cross-spawn": "^7.0.0", + "cross-spawn": "7.0.5", "signal-exit": "^3.0.2" - }, - "dependencies": { - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } } }, "forever-agent": { @@ -65583,21 +60307,19 @@ "requires": { "@babel/code-frame": "^7.5.5", "chalk": "^2.4.1", - "micromatch": "^3.1.10", - "minimatch": "^3.0.4", + "micromatch": ">=4.0.8", + "minimatch": "3.0.5", "semver": "^5.6.0", "tapable": "^1.0.0", "worker-rpc": "^0.1.0" - } - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" + }, + "dependencies": { + "semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true + } } }, "format": { @@ -65616,14 +60338,6 @@ "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==" }, - "fragment-cache": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", - "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", - "requires": { - "map-cache": "^0.2.2" - } - }, "free-style": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/free-style/-/free-style-2.6.1.tgz", @@ -65802,7 +60516,7 @@ "extend": "^3.0.2", "https-proxy-agent": "^3.0.0", "is-stream": "^2.0.0", - "node-fetch": "^2.3.0" + "node-fetch": "2.6.7" }, "dependencies": { "agent-base": { @@ -65831,10 +60545,35 @@ "dev": true }, "node-fetch": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==", + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "requires": { + "whatwg-url": "^5.0.0" + } + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", "dev": true + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } } } }, @@ -65885,18 +60624,6 @@ "is-obj": "^2.0.0" } }, - "duplexify": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "dev": true, - "requires": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", @@ -65918,26 +60645,15 @@ "integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==", "dev": true, "requires": { - "duplexify": "^4.1.1", + "duplexify": "4.1.3", "inherits": "^2.0.3", "pump": "^3.0.0" } }, - "readable-stream": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", - "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", - "dev": true, - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true }, "xdg-basedir": { @@ -65959,15 +60675,20 @@ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" }, "get-intrinsic": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", "requires": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" } }, "get-own-enumerable-property-symbols": { @@ -65980,6 +60701,15 @@ "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==" }, + "get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "requires": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + } + }, "get-stream": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", @@ -65998,11 +60728,6 @@ "get-intrinsic": "^1.1.1" } }, - "get-value": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", - "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=" - }, "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", @@ -66036,14 +60761,13 @@ "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.1.1", + "minimatch": "3.0.5", "once": "^1.3.0", "path-is-absolute": "^1.0.0" }, "dependencies": { "minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "version": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "requires": { "brace-expansion": "^1.1.7" @@ -66093,16 +60817,6 @@ "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "requires": { "is-glob": "^4.0.1" - }, - "dependencies": { - "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "requires": { - "is-extglob": "^2.1.1" - } - } } }, "glob-promise": { @@ -66243,12 +60957,9 @@ "integrity": "sha512-OVPzcSWIAJ+d5yiHyeaLrdufQtrvaBrF4JQg+z8ynTkbO3uFcujqXszTumqg1cGsAsjkWnI+M5B1xZ19yR4Wyg==" }, "gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "requires": { - "get-intrinsic": "^1.1.3" - } + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==" }, "graceful-fs": { "version": "4.2.11", @@ -66411,6 +61122,11 @@ } } }, + "has-own": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-own/-/has-own-1.0.1.tgz", + "integrity": "sha512-RDKhzgQTQfMaLvIFhjahU+2gGnRBK6dYOd5Gd9BzkmnBneOCRYjRC003RIMrdAbH52+l+CnMS4bBCXGer8tEhg==" + }, "has-property-descriptors": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", @@ -66419,22 +61135,17 @@ "es-define-property": "^1.0.0" } }, - "has-proto": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", - "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==" - }, "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==" }, "has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "requires": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" } }, "has-unicode": { @@ -66443,35 +61154,6 @@ "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=", "dev": true }, - "has-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", - "integrity": "sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=", - "requires": { - "get-value": "^2.0.6", - "has-values": "^1.0.0", - "isobject": "^3.0.0" - } - }, - "has-values": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", - "integrity": "sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=", - "requires": { - "is-number": "^3.0.0", - "kind-of": "^4.0.0" - }, - "dependencies": { - "kind-of": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", - "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, "hash-base": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", @@ -66725,7 +61407,7 @@ "he": "^1.2.0", "param-case": "^3.0.3", "relateurl": "^0.2.7", - "terser": "^4.6.3" + "terser": "5.14.2" }, "dependencies": { "camel-case": { @@ -66944,14 +61626,15 @@ } }, "http-proxy-middleware": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz", - "integrity": "sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==", + "version": "2.0.9", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.9.tgz", + "integrity": "sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==", "requires": { - "http-proxy": "^1.17.0", - "is-glob": "^4.0.0", - "lodash": "^4.17.11", - "micromatch": "^3.1.10" + "@types/http-proxy": "^1.17.8", + "http-proxy": "^1.18.1", + "is-glob": "^4.0.1", + "is-plain-obj": "^3.0.0", + "micromatch": ">=4.0.8" } }, "http-signature": { @@ -67008,9 +61691,9 @@ "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==" }, "hyphenate-style-name": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz", - "integrity": "sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.1.0.tgz", + "integrity": "sha512-WDC/ui2VVRrz3jOVi+XtjqkDjiVjTtFaAGiW37k6b+ohyQ5wYDOGkvCZa8+H0nx3gyvv0+BST9xuOgIyGQ00gw==" }, "iconv-lite": { "version": "0.4.24", @@ -67165,18 +61848,6 @@ "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==", "dev": true }, - "internal-ip": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/internal-ip/-/internal-ip-4.3.0.tgz", - "integrity": "sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "default-gateway": "^4.2.0", - "ipaddr.js": "^1.9.0" - } - }, "internal-slot": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", @@ -67207,14 +61878,6 @@ "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=", "dev": true }, - "ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha512-58yWmlHpp7VYfcdTwMTvwMmqx/Elfxjd9RXTDyMsbL7lLWmhMylLEqiYVLKuLzOZqVgiWXD9MfR62Vv89VRxkw==", - "dev": true, - "optional": true, - "peer": true - }, "ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -67226,24 +61889,6 @@ "integrity": "sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==", "dev": true }, - "is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, "is-alphabetical": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", @@ -67294,15 +61939,10 @@ "integrity": "sha1-mPiygDBoQhmpXzdc+9iM40Bd/5M=", "dev": true }, - "is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" - }, "is-callable": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", - "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==" + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==" }, "is-ci": { "version": "2.0.0", @@ -67321,24 +61961,6 @@ "has": "^1.0.3" } }, - "is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, "is-date-object": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", @@ -67350,23 +61972,6 @@ "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", "dev": true }, - "is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", - "requires": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "dependencies": { - "kind-of": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", - "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==" - } - } - }, "is-docker": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", @@ -67382,11 +61987,6 @@ "is-window": "^1.0.2" } }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=" - }, "is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -67409,9 +62009,9 @@ "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==" }, "is-glob": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.0.tgz", - "integrity": "sha1-lSHHaEXMJhCoUgPd8ICpWML/q8A=", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "requires": { "is-extglob": "^2.1.1" } @@ -67425,7 +62025,7 @@ "is-in-browser": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/is-in-browser/-/is-in-browser-1.1.3.tgz", - "integrity": "sha1-Vv9NtoOgeMYILrldrX3GLh0E+DU=" + "integrity": "sha512-FeXIBgG/CPGd/WUxuEyvgGTEfwiG9Z4EKGxjNMRqviiIIfsmgrpnHLffEDdwUHqNva1VEW91o3xBT/m8Elgl9g==" }, "is-lower-case": { "version": "1.1.3", @@ -67452,24 +62052,6 @@ "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz", "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==" }, - "is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, "is-number-object": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.3.tgz", @@ -67487,36 +62069,6 @@ "integrity": "sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA==", "dev": true }, - "is-path-cwd": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", - "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==", - "dev": true, - "optional": true, - "peer": true - }, - "is-path-in-cwd": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz", - "integrity": "sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-path-inside": "^2.1.0" - } - }, - "is-path-inside": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-2.1.0.tgz", - "integrity": "sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "path-is-inside": "^1.0.2" - } - }, "is-plain-obj": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", @@ -67567,7 +62119,8 @@ "is-stream": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", + "dev": true }, "is-string": { "version": "1.0.4", @@ -67589,6 +62142,15 @@ "has-symbols": "^1.0.0" } }, + "is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "dev": true, + "requires": { + "which-typed-array": "^1.1.16" + } + }, "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", @@ -67623,11 +62185,6 @@ "integrity": "sha1-LIlspT25feRdPDMTOmXYyfVjSA0=", "dev": true }, - "is-windows": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", - "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==" - }, "is-word-character": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz", @@ -67662,8 +62219,37 @@ "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-2.2.1.tgz", "integrity": "sha1-YRrhrPFPXoH3KVB0coGf6XM1WKk=", "requires": { - "node-fetch": "^1.0.1", + "node-fetch": "2.6.7", "whatwg-fetch": ">=0.10.0" + }, + "dependencies": { + "node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "requires": { + "whatwg-url": "^5.0.0" + } + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + } } }, "isstream": { @@ -67689,9 +62275,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true } } @@ -67720,9 +62306,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" }, "supports-color": { "version": "7.2.0", @@ -67791,20 +62377,19 @@ } }, "jake": { - "version": "10.8.2", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.2.tgz", - "integrity": "sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A==", + "version": "10.9.4", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.4.tgz", + "integrity": "sha512-wpHYzhxiVQL+IV05BLE2Xn34zW1S223hvjtqk0+gsPrwd/8JNLXJgZZM/iPFsYc1xyphF+6M6EvdE5E9MBGkDA==", "requires": { - "async": "0.9.x", - "chalk": "^2.4.2", - "filelist": "^1.0.1", - "minimatch": "^3.0.4" + "async": "^3.2.6", + "filelist": "^1.0.4", + "picocolors": "^1.1.1" }, "dependencies": { "async": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", - "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=" + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==" } } }, @@ -68020,22 +62605,12 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, "execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "requires": { - "cross-spawn": "^7.0.3", + "cross-spawn": "7.0.5", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", @@ -68074,19 +62649,6 @@ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" - }, "supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -68094,14 +62656,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "requires": { - "isexe": "^2.0.0" - } } } }, @@ -68183,11 +62737,6 @@ "@types/yargs-parser": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -68310,7 +62859,7 @@ "jest-runner": "^27.4.6", "jest-util": "^27.4.2", "jest-validate": "^27.4.6", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "pretty-format": "^27.4.6", "slash": "^3.0.0" }, @@ -68343,11 +62892,6 @@ "@types/yargs-parser": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -68356,14 +62900,6 @@ "color-convert": "^2.0.1" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -68396,24 +62932,11 @@ "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "jest-regex-util": { "version": "27.4.0", "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.4.0.tgz", @@ -68432,15 +62955,6 @@ "picomatch": "^2.2.3" } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "pretty-format": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.6.tgz", @@ -68470,14 +62984,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -68492,11 +62998,6 @@ "pretty-format": "^27.4.6" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -68617,11 +63118,6 @@ "@types/yargs-parser": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -68867,15 +63363,6 @@ "color-convert": "^2.0.1" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", @@ -68901,27 +63388,12 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, "jest-message-util": { "version": "25.5.0", "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-25.5.0.tgz", @@ -68933,7 +63405,7 @@ "@types/stack-utils": "^1.0.1", "chalk": "^3.0.0", "graceful-fs": "^4.2.4", - "micromatch": "^4.0.2", + "micromatch": ">=4.0.8", "slash": "^3.0.0", "stack-utils": "^1.0.1" } @@ -68969,20 +63441,10 @@ "semver": "^6.0.0" } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true }, "supports-color": { @@ -68993,15 +63455,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -69131,7 +63584,7 @@ "jest-serializer": "^26.6.2", "jest-util": "^26.6.2", "jest-worker": "^26.6.2", - "micromatch": "^4.0.2", + "micromatch": ">=4.0.8", "sane": "^4.0.3", "walker": "^1.0.7" }, @@ -69176,15 +63629,6 @@ "color-convert": "^2.0.1" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", @@ -69210,37 +63654,12 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -69249,15 +63668,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -69347,11 +63757,6 @@ "@types/yargs-parser": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -69451,11 +63856,6 @@ "pretty-format": "^27.4.6" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", @@ -69489,11 +63889,6 @@ "pretty-format": "^27.4.6" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -69571,7 +63966,7 @@ "@types/stack-utils": "^2.0.0", "chalk": "^4.0.0", "graceful-fs": "^4.2.4", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "pretty-format": "^27.4.6", "slash": "^3.0.0", "stack-utils": "^2.0.3" @@ -69610,19 +64005,6 @@ "@types/yargs-parser": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -69663,33 +64045,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "pretty-format": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.6.tgz", @@ -69726,14 +64086,6 @@ "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==" } } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -69885,14 +64237,6 @@ "color-convert": "^2.0.1" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -69920,14 +64264,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -69941,11 +64277,6 @@ "has": "^1.0.3" } }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "jest-haste-map": { "version": "27.4.6", "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.4.6.tgz", @@ -69962,7 +64293,7 @@ "jest-serializer": "^27.4.0", "jest-util": "^27.4.2", "jest-worker": "^27.4.6", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "walker": "^1.0.7" } }, @@ -70019,15 +64350,6 @@ } } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "resolve": { "version": "1.21.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.21.0.tgz", @@ -70045,14 +64367,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -70212,7 +64526,7 @@ "jest-haste-map": "^27.4.6", "jest-regex-util": "^27.4.0", "jest-util": "^27.4.2", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "pirates": "^4.0.4", "slash": "^3.0.0", "source-map": "^0.6.1", @@ -70267,14 +64581,6 @@ "test-exclude": "^6.0.0" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -70302,24 +64608,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "istanbul-lib-coverage": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", @@ -70353,7 +64646,7 @@ "jest-serializer": "^27.4.0", "jest-util": "^27.4.2", "jest-worker": "^27.4.6", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "walker": "^1.0.7" } }, @@ -70404,24 +64697,15 @@ } } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "pirates": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.4.tgz", "integrity": "sha512-ZIrVPH+A52Dw84R0L3/VS9Op04PuQ2SEoJL6bkshmiTic/HldyW9Tf7oH5mhJZBK7NmDx27vSMrYEXPXclpDKw==" }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" }, "source-map": { "version": "0.6.1", @@ -70435,14 +64719,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -70524,7 +64800,7 @@ "jest-haste-map": "^27.4.6", "jest-regex-util": "^27.4.0", "jest-util": "^27.4.2", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "pirates": "^4.0.4", "slash": "^3.0.0", "source-map": "^0.6.1", @@ -70579,14 +64855,6 @@ "test-exclude": "^6.0.0" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -70614,22 +64882,12 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, "execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "requires": { - "cross-spawn": "^7.0.3", + "cross-spawn": "7.0.5", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", @@ -70640,14 +64898,6 @@ "strip-final-newline": "^2.0.0" } }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", @@ -70658,11 +64908,6 @@ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "is-stream": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", @@ -70701,7 +64946,7 @@ "jest-serializer": "^27.4.0", "jest-util": "^27.4.2", "jest-worker": "^27.4.6", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "walker": "^1.0.7" } }, @@ -70752,15 +64997,6 @@ } } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "npm-run-path": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", @@ -70780,22 +65016,9 @@ "integrity": "sha512-ZIrVPH+A52Dw84R0L3/VS9Op04PuQ2SEoJL6bkshmiTic/HldyW9Tf7oH5mhJZBK7NmDx27vSMrYEXPXclpDKw==" }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" }, "source-map": { "version": "0.6.1", @@ -70814,22 +65037,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "requires": { - "isexe": "^2.0.0" - } } } }, @@ -70869,7 +65076,7 @@ "jest-util": "^27.4.2", "natural-compare": "^1.4.0", "pretty-format": "^27.4.6", - "semver": "^7.3.2" + "semver": "7.6.0" }, "dependencies": { "@jest/transform": { @@ -70887,7 +65094,7 @@ "jest-haste-map": "^27.4.6", "jest-regex-util": "^27.4.0", "jest-util": "^27.4.2", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "pirates": "^4.0.4", "slash": "^3.0.0", "source-map": "^0.6.1", @@ -70927,11 +65134,6 @@ "@types/yargs-parser": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -70952,14 +65154,6 @@ "test-exclude": "^6.0.0" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -70987,24 +65181,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "istanbul-lib-coverage": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", @@ -71023,9 +65204,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -71045,7 +65226,7 @@ "jest-serializer": "^27.4.0", "jest-util": "^27.4.2", "jest-worker": "^27.4.6", - "micromatch": "^4.0.4", + "micromatch": ">=4.0.8", "walker": "^1.0.7" } }, @@ -71096,15 +65277,6 @@ } } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "pirates": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.4.tgz", @@ -71133,9 +65305,9 @@ "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==" }, "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "requires": { "lru-cache": "^6.0.0" } @@ -71152,14 +65324,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -71174,7 +65338,7 @@ "chalk": "^4.0.0", "graceful-fs": "^4.2.4", "is-ci": "^2.0.0", - "micromatch": "^4.0.2" + "micromatch": ">=4.0.8" }, "dependencies": { "@jest/types": { @@ -71217,15 +65381,6 @@ "color-convert": "^2.0.1" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.1.tgz", @@ -71251,37 +65406,12 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -71290,15 +65420,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -71343,11 +65464,6 @@ "@types/yargs-parser": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -71709,7 +65825,7 @@ "decimal.js": "^10.2.1", "domexception": "^2.0.1", "escodegen": "^2.0.0", - "form-data": "^3.0.0", + "form-data": "2.5.4", "html-encoding-sniffer": "^2.0.1", "http-proxy-agent": "^4.0.1", "https-proxy-agent": "^5.0.0", @@ -71725,7 +65841,7 @@ "whatwg-encoding": "^1.0.5", "whatwg-mimetype": "^2.3.0", "whatwg-url": "^8.5.0", - "ws": "^7.4.6", + "ws": "8.17.1", "xml-name-validator": "^3.0.0" }, "dependencies": { @@ -71746,13 +65862,16 @@ } }, "form-data": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", - "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.4.tgz", + "integrity": "sha512-Y/3MmRiR8Nd+0CUtrbvcKtKzLWiUfpQ7DFVggH8PwmGt/0r7RSy32GuP4hpCJlQNEBusisSx1DLtD8uD386HJQ==", "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" + "es-set-tostringtag": "^2.1.0", + "has-own": "^1.0.1", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" } }, "http-proxy-agent": { @@ -71775,6 +65894,11 @@ "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, "tough-cookie": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", @@ -71821,9 +65945,9 @@ "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" }, "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" }, "json-schema-traverse": { "version": "1.0.0", @@ -71873,7 +65997,7 @@ "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", - "json-schema": "0.2.3", + "json-schema": "0.4.0", "verror": "1.10.0" } }, @@ -71890,7 +66014,7 @@ "warning": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/warning/-/warning-3.0.0.tgz", - "integrity": "sha1-MuU3fLVy3kqwR1O9+IIcAe1gW3w=", + "integrity": "sha512-jMBt6pUrKn5I+OGgtQ4YZLdhIeJmObddh6CsibPxyQ5yPZm1XExSyzC1LCNX7BzhxWgiHmizBWJTHJIjMjTQYQ==", "requires": { "loose-envify": "^1.0.0" } @@ -71928,7 +66052,7 @@ "warning": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/warning/-/warning-3.0.0.tgz", - "integrity": "sha1-MuU3fLVy3kqwR1O9+IIcAe1gW3w=", + "integrity": "sha512-jMBt6pUrKn5I+OGgtQ4YZLdhIeJmObddh6CsibPxyQ5yPZm1XExSyzC1LCNX7BzhxWgiHmizBWJTHJIjMjTQYQ==", "requires": { "loose-envify": "^1.0.0" } @@ -71985,14 +66109,6 @@ "safe-buffer": "^5.0.1" } }, - "killable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/killable/-/killable-1.0.1.tgz", - "integrity": "sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg==", - "dev": true, - "optional": true, - "peer": true - }, "kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -72193,14 +66309,6 @@ "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==", "dev": true }, - "loglevel": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.9.1.tgz", - "integrity": "sha512-hP3I3kCrDIMuRwAwHltphhDM1r8i55H33GgqjXbrisuJhF4kRhW1dNuxsRklp4bXl8DSdLaNLuiL4A/LWRfxvg==", - "dev": true, - "optional": true, - "peer": true - }, "lolex": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/lolex/-/lolex-5.1.2.tgz", @@ -72278,6 +66386,14 @@ "requires": { "pify": "^4.0.1", "semver": "^5.6.0" + }, + "dependencies": { + "semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true + } } }, "make-error": { @@ -72290,28 +66406,15 @@ "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.11.tgz", "integrity": "sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw=", "requires": { - "tmpl": "1.0.x" + "tmpl": "1.0.5" } }, - "map-cache": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", - "integrity": "sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=" - }, "map-or-similar": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/map-or-similar/-/map-or-similar-1.5.0.tgz", "integrity": "sha1-beJlMXSt+12e3DPGnT6Sobdvrwg=", "dev": true }, - "map-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", - "integrity": "sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=", - "requires": { - "object-visit": "^1.0.0" - } - }, "markdown-escapes": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz", @@ -72336,6 +66439,11 @@ "remove-accents": "0.4.2" } }, + "math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==" + }, "md5.js": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", @@ -72457,23 +66565,12 @@ "dev": true }, "micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "requires": { + "braces": "3.0.3", + "picomatch": "^2.3.1" } }, "microseconds": { @@ -72544,14 +66641,6 @@ "schema-utils": "^4.0.0" }, "dependencies": { - "ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "requires": { - "fast-deep-equal": "^3.1.3" - } - }, "schema-utils": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", @@ -72577,9 +66666,9 @@ "dev": true }, "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.5.tgz", + "integrity": "sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw==", "requires": { "brace-expansion": "^1.1.7" } @@ -72642,7 +66731,7 @@ "dev": true, "requires": { "concat-stream": "^1.5.0", - "duplexify": "^3.4.2", + "duplexify": "4.1.3", "end-of-stream": "^1.1.0", "flush-write-stream": "^1.0.0", "from2": "^2.1.0", @@ -72653,25 +66742,6 @@ "through2": "^2.0.0" } }, - "mixin-deep": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", - "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", - "requires": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" - }, - "dependencies": { - "is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "requires": { - "is-plain-object": "^2.0.4" - } - } - } - }, "mkdirp": { "version": "0.5.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", @@ -72705,26 +66775,6 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, - "multicast-dns": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-6.2.3.tgz", - "integrity": "sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "dns-packet": "^1.3.1", - "thunky": "^1.0.2" - } - }, - "multicast-dns-service-types": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz", - "integrity": "sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE=", - "dev": true, - "optional": true, - "peer": true - }, "nan": { "version": "2.19.0", "resolved": "https://registry.npmjs.org/nan/-/nan-2.19.0.tgz", @@ -72745,24 +66795,6 @@ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==" }, - "nanomatch": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", - "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "fragment-cache": "^0.2.1", - "is-windows": "^1.0.2", - "kind-of": "^6.0.2", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - } - }, "native-url": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/native-url/-/native-url-0.2.6.tgz", @@ -72788,6 +66820,14 @@ "railroad-diagrams": "^1.0.0", "randexp": "0.4.6", "semver": "^5.4.1" + }, + "dependencies": { + "semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true + } } }, "negotiator": { @@ -72806,12 +66846,6 @@ "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==", "dev": true }, - "nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, "no-case": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/no-case/-/no-case-2.3.2.tgz", @@ -72827,26 +66861,9 @@ "integrity": "sha1-X1Zl2TNRM1yqvvjxxVRRbPXx5OU=", "dev": true, "requires": { - "minimatch": "^3.0.2" - } - }, - "node-fetch": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz", - "integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==", - "requires": { - "encoding": "^0.1.11", - "is-stream": "^1.0.1" + "minimatch": "3.0.5" } }, - "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==", - "dev": true, - "optional": true, - "peer": true - }, "node-int64": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", @@ -72944,11 +66961,11 @@ } }, "nth-check": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", - "integrity": "sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", + "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", "requires": { - "boolbase": "~1.0.0" + "boolbase": "^1.0.0" } }, "num2fraction": { @@ -72978,44 +66995,15 @@ "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, - "object-copy": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", - "integrity": "sha1-fn2Fi3gb18mRpBupde04EnVOmYw=", - "requires": { - "copy-descriptor": "^0.1.0", - "define-property": "^0.2.5", - "kind-of": "^3.0.3" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, "object-hash": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", "integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==" }, "object-inspect": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.6.0.tgz", - "integrity": "sha512-GJzfBZ6DgDAmnuaM3104jR4s1Myxr3Y3zfIyN4z3UdqN69oSRacNK8UhnobDdC+7J2AHCjGwxQubNJfE70SXXQ==", - "dev": true + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==" }, "object-is": { "version": "1.0.1", @@ -73028,14 +67016,6 @@ "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" }, - "object-visit": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", - "integrity": "sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=", - "requires": { - "isobject": "^3.0.0" - } - }, "object.assign": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", @@ -73146,11 +67126,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==" - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -73167,22 +67142,9 @@ "requires": { "has-tostringtag": "^1.0.0" } - }, - "object-inspect": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", - "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==" } } }, - "object.pick": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", - "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", - "requires": { - "isobject": "^3.0.1" - } - }, "object.values": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.0.tgz", @@ -73214,9 +67176,9 @@ } }, "on-headers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", - "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz", + "integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==" }, "once": { "version": "1.4.0", @@ -73250,27 +67212,6 @@ "integrity": "sha512-goYSy5c2UXE4Ra1xixabeVh1guIX/ZV/YokJksb6q2lubWu6UbvPQ20p542/sFIll1nl8JnCyK9oBaOcCWXwvA==", "dev": true }, - "opn": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/opn/-/opn-5.5.0.tgz", - "integrity": "sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-wsl": "^1.1.0" - }, - "dependencies": { - "is-wsl": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", - "integrity": "sha512-gfygJYZ2gLTDlmbWMI0CE2MwnFzSN/2SZfkMlItC4K/JBlsWVDB0bO6XhqcY13YXE7iMcAJnzTCJjPiTeJJ0Mw==", - "dev": true, - "optional": true, - "peer": true - } - } - }, "optionator": { "version": "0.8.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", @@ -73370,17 +67311,6 @@ "aggregate-error": "^3.0.0" } }, - "p-retry": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-3.0.1.tgz", - "integrity": "sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "retry": "^0.12.0" - } - }, "p-timeout": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", @@ -73495,11 +67425,6 @@ "upper-case-first": "^1.1.0" } }, - "pascalcase": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", - "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=" - }, "path-browserify": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.1.tgz", @@ -73531,14 +67456,6 @@ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, - "path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==", - "dev": true, - "optional": true, - "peer": true - }, "path-key": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", @@ -73551,9 +67468,9 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" }, "path-to-regexp": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", - "integrity": "sha1-Wf3g9DW62suhA6hOnTvGTpa5k30=", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.9.0.tgz", + "integrity": "sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g==", "requires": { "isarray": "0.0.1" }, @@ -73571,16 +67488,56 @@ "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" }, "pbkdf2": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", - "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.3.tgz", + "integrity": "sha512-wfRLBZ0feWRhCIkoMB6ete7czJcnNnqRpcoWQBLqatqXXmelSRqfdDK4F3u9T2s2cXas/hQJcryI/4lAL+XTlA==", "dev": true, "requires": { - "create-hash": "^1.1.2", - "create-hmac": "^1.1.4", - "ripemd160": "^2.0.1", - "safe-buffer": "^5.0.1", - "sha.js": "^2.4.8" + "create-hash": "~1.1.3", + "create-hmac": "^1.1.7", + "ripemd160": "=2.0.1", + "safe-buffer": "^5.2.1", + "sha.js": "^2.4.11", + "to-buffer": "^1.2.0" + }, + "dependencies": { + "create-hash": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.1.3.tgz", + "integrity": "sha512-snRpch/kwQhcdlnZKYanNF1m0RDlrCdSKQaH87w1FCFPVPNCQ/Il9QJKAX2jVBZddRdaHBMC+zXa9Gw9tmkNUA==", + "dev": true, + "requires": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "sha.js": "^2.4.0" + } + }, + "hash-base": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-2.0.2.tgz", + "integrity": "sha512-0TROgQ1/SxE6KmxWSvXHvRj90/Xo1JvZShofnYF+f6ZsGtR4eES7WfrQzPalmyagfKZCXpVnitiRebZulWsbiw==", + "dev": true, + "requires": { + "inherits": "^2.0.1" + } + }, + "ripemd160": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.1.tgz", + "integrity": "sha512-J7f4wutN8mdbV08MJnXibYpCOPHR+yzy+iQ/AsjMv2j8cLavQ8VGagDFUwwTAdF8FmRKVeNpbTTEwNHCW1g94w==", + "dev": true, + "requires": { + "hash-base": "^2.0.0", + "inherits": "^2.0.1" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + } } }, "performance-now": { @@ -73589,14 +67546,14 @@ "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, "picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==" }, "picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==" + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==" }, "pify": { "version": "4.0.1", @@ -73604,25 +67561,6 @@ "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", "dev": true }, - "pinkie": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", - "integrity": "sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==", - "dev": true, - "optional": true, - "peer": true - }, - "pinkie-promise": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", - "integrity": "sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "pinkie": "^2.0.0" - } - }, "pirates": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.1.tgz", @@ -73745,48 +67683,44 @@ "resolved": "https://registry.npmjs.org/portable-fetch/-/portable-fetch-3.0.0.tgz", "integrity": "sha1-PL9KptvFpXNLQcBBnJJzMTv9mtg=", "requires": { - "node-fetch": "^1.0.1", + "node-fetch": "2.6.7", "whatwg-fetch": ">=0.10.0" - } - }, - "portfinder": { - "version": "1.0.28", - "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.28.tgz", - "integrity": "sha512-Se+2isanIcEqf2XMHjyUKskczxbPH7dQnlMjXX6+dybayyHvAf/TCgyMRlzf/B6QDhAEFOGes0pzRo3by4AbMA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "async": "^2.6.2", - "debug": "^3.1.1", - "mkdirp": "^0.5.5" }, "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "optional": true, - "peer": true, + "node-fetch": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", "requires": { - "ms": "^2.1.1" + "whatwg-url": "^5.0.0" } }, - "ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, - "optional": true, - "peer": true + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } } } }, - "posix-character-classes": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", - "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=" + "possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "dev": true }, "postcss": { "version": "8.4.5", @@ -74180,7 +68114,7 @@ "requires": { "cosmiconfig": "^7.0.0", "klona": "^2.0.5", - "semver": "^7.3.5" + "semver": "7.6.0" }, "dependencies": { "klona": { @@ -74189,9 +68123,9 @@ "integrity": "sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ==" }, "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "requires": { "lru-cache": "^6.0.0" } @@ -74854,7 +68788,7 @@ "css-what": "^5.1.0", "domhandler": "^4.3.0", "domutils": "^2.8.0", - "nth-check": "^2.0.1" + "nth-check": "2.0.1" } }, "css-tree": { @@ -74909,14 +68843,6 @@ "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" }, - "nth-check": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", - "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", - "requires": { - "boolbase": "^1.0.0" - } - }, "postcss-value-parser": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", @@ -74990,7 +68916,7 @@ "dev": true, "requires": { "@jest/types": "^26.6.2", - "ansi-regex": "^5.0.0", + "ansi-regex": "5.0.1", "ansi-styles": "^4.0.0", "react-is": "^17.0.1" }, @@ -75175,12 +69101,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -75199,12 +69119,6 @@ "requires": { "has-tostringtag": "^1.0.0" } - }, - "object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true } } }, @@ -75255,12 +69169,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -75279,12 +69187,6 @@ "requires": { "has-tostringtag": "^1.0.0" } - }, - "object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true } } }, @@ -75298,12 +69200,13 @@ } }, "prop-types": { - "version": "15.6.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.6.2.tgz", - "integrity": "sha512-3pboPvLiWD7dkI3qf3KbUe6hKFKa52w+AE0VCqECtf+QHAKgOL37tTaNCnuX1nAAQ4ZhyP+kYVKf8rLmJ/feDQ==", + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", "requires": { - "loose-envify": "^1.3.1", - "object-assign": "^4.1.1" + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" } }, "prop-types-exact": { @@ -75335,9 +69238,9 @@ } }, "protobufjs": { - "version": "6.11.2", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.2.tgz", - "integrity": "sha512-4BQJoPooKJl2G9j3XftkIXjoC9C0Av2NOrWmbLWT1vH32GcSUHjM0Arra6UfTsVyfMAuFzaLucXn1sadxJydAw==", + "version": "6.11.4", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.4.tgz", + "integrity": "sha512-5kQWPaJHi1WoCpjTGszzQ32PG2F4+wRY6BmAT4Vfw56Q2FZ4YZzK20xUYQH4YkfehY1e6QSICrJquM6xXZNcrw==", "requires": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -75419,7 +69322,7 @@ "integrity": "sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==", "dev": true, "requires": { - "duplexify": "^3.6.0", + "duplexify": "4.1.3", "inherits": "^2.0.3", "pump": "^2.0.0" }, @@ -75463,13 +69366,6 @@ "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==", "dev": true }, - "querystringify": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", - "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", - "optional": true, - "peer": true - }, "queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -75544,11 +69440,6 @@ "unpipe": "1.0.0" }, "dependencies": { - "bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" - }, "depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -75657,18 +69548,6 @@ "lodash.get": "^4.4.2", "lodash.isequal": "^4.5.0", "prop-types": "^15.7.2" - }, - "dependencies": { - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - } } }, "react-app-polyfill": { @@ -75734,7 +69613,7 @@ "address": "1.1.2", "browserslist": "4.14.2", "chalk": "2.4.2", - "cross-spawn": "7.0.3", + "cross-spawn": "7.0.5", "detect-port-alt": "1.1.6", "escape-string-regexp": "2.0.0", "filesize": "6.1.0", @@ -75777,17 +69656,6 @@ "node-releases": "^1.1.61" } }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, "escape-string-regexp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", @@ -75830,36 +69698,6 @@ "emojis-list": "^3.0.0", "json5": "^2.1.2" } - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } } } }, @@ -75898,21 +69736,12 @@ "endent": "^2.0.1", "find-cache-dir": "^3.3.1", "flat-cache": "^3.0.4", - "micromatch": "^4.0.2", + "micromatch": ">=4.0.8", "react-docgen-typescript": "^1.22.0", "tslib": "^2.0.0", "webpack-sources": "^2.2.0" }, "dependencies": { - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, "debug": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", @@ -75922,15 +69751,6 @@ "ms": "2.1.2" } }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, "find-cache-dir": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", @@ -75942,12 +69762,6 @@ "pkg-dir": "^4.1.0" } }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, "make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -75957,16 +69771,6 @@ "semver": "^6.0.0" } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "dev": true, - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -75983,9 +69787,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true }, "source-map": { @@ -75994,15 +69798,6 @@ "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } - }, "tslib": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz", @@ -76206,19 +70001,6 @@ "prop-types": "^15.7.2", "react-fast-compare": "^3.2.0", "shallowequal": "^1.1.0" - }, - "dependencies": { - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "dev": true, - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - } } }, "react-inspector": { @@ -76301,18 +70083,6 @@ "loose-envify": "^1.4.0", "prop-types": "^15.7.2", "react-is": "^16.13.1" - }, - "dependencies": { - "prop-types": { - "version": "15.7.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", - "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", - "requires": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.8.1" - } - } } }, "react-refresh": { @@ -76330,7 +70100,7 @@ "hoist-non-react-statics": "^2.5.0", "invariant": "^2.2.4", "loose-envify": "^1.3.1", - "path-to-regexp": "^1.7.0", + "path-to-regexp": "1.9.0", "prop-types": "^15.6.1", "warning": "^4.0.1" }, @@ -76405,7 +70175,7 @@ "resolve": "^1.20.0", "resolve-url-loader": "^4.0.0", "sass-loader": "^12.3.0", - "semver": "^7.3.5", + "semver": "7.6.0", "source-map-loader": "^3.0.0", "style-loader": "^3.3.1", "tailwindcss": "^3.0.2", @@ -76466,11 +70236,6 @@ "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==" }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -76496,14 +70261,6 @@ "tryer": "^1.0.1" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "browserslist": { "version": "4.19.1", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.19.1.tgz", @@ -76597,16 +70354,6 @@ } } }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, "css-select": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.2.1.tgz", @@ -76616,7 +70363,7 @@ "css-what": "^5.1.0", "domhandler": "^4.3.0", "domutils": "^2.8.0", - "nth-check": "^2.0.1" + "nth-check": "2.0.1" } }, "deepmerge": { @@ -76699,7 +70446,7 @@ "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "requires": { - "cross-spawn": "^7.0.3", + "cross-spawn": "7.0.5", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", @@ -76715,14 +70462,6 @@ "resolved": "https://registry.npmjs.org/filesize/-/filesize-8.0.6.tgz", "integrity": "sha512-sHvRqTiwdmcuzqet7iVwsbwF6UrV3wIgDf2SHNdY1Hgl8PC45HZg/0xtdw6U2izIV4lccnrY9ftl6wZFNdjYMg==" }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -76746,9 +70485,9 @@ "fs-extra": "^9.0.0", "glob": "^7.1.6", "memfs": "^3.1.2", - "minimatch": "^3.0.4", + "minimatch": "3.0.5", "schema-utils": "2.7.0", - "semver": "^7.3.2", + "semver": "7.6.0", "tapable": "^1.0.0" }, "dependencies": { @@ -76833,7 +70572,7 @@ "he": "^1.2.0", "param-case": "^3.0.4", "relateurl": "^0.2.7", - "terser": "^5.10.0" + "terser": "5.14.2" } }, "html-webpack-plugin": { @@ -76848,18 +70587,6 @@ "tapable": "^2.0.0" } }, - "http-proxy-middleware": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz", - "integrity": "sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==", - "requires": { - "@types/http-proxy": "^1.17.8", - "http-proxy": "^1.18.1", - "is-glob": "^4.0.1", - "is-plain-obj": "^3.0.0", - "micromatch": "^4.0.2" - } - }, "immer": { "version": "9.0.7", "resolved": "https://registry.npmjs.org/immer/-/immer-9.0.7.tgz", @@ -76878,19 +70605,6 @@ "has": "^1.0.3" } }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, "is-stream": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", @@ -76937,15 +70651,6 @@ "tslib": "^2.0.3" } }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "no-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", @@ -76973,14 +70678,6 @@ "path-key": "^3.0.0" } }, - "nth-check": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", - "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", - "requires": { - "boolbase": "^1.0.0" - } - }, "open": { "version": "8.4.0", "resolved": "https://registry.npmjs.org/open/-/open-8.4.0.tgz", @@ -77072,7 +70769,7 @@ "address": "^1.1.2", "browserslist": "^4.18.1", "chalk": "^4.1.2", - "cross-spawn": "^7.0.3", + "cross-spawn": "7.0.5", "detect-port-alt": "^1.1.6", "escape-string-regexp": "^4.0.0", "filesize": "^8.0.6", @@ -77175,9 +70872,9 @@ } }, "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "requires": { "lru-cache": "^6.0.0" } @@ -77190,19 +70887,6 @@ "randombytes": "^2.1.0" } }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" - }, "shell-quote": { "version": "1.7.3", "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.3.tgz", @@ -77213,7 +70897,7 @@ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { - "ansi-regex": "^5.0.1" + "ansi-regex": "5.0.1" } }, "style-loader": { @@ -77235,28 +70919,6 @@ "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==" }, - "terser": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz", - "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==", - "requires": { - "commander": "^2.20.0", - "source-map": "~0.7.2", - "source-map-support": "~0.5.20" - }, - "dependencies": { - "commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - }, - "source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==" - } - } - }, "terser-webpack-plugin": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.0.tgz", @@ -77266,7 +70928,7 @@ "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.0", "source-map": "^0.6.1", - "terser": "^5.7.2" + "terser": "5.14.2" }, "dependencies": { "source-map": { @@ -77276,80 +70938,16 @@ } } }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } - }, "tslib": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" }, - "type-fest": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", - "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", - "optional": true, - "peer": true - }, "universalify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" }, - "webpack-dev-middleware": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz", - "integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==", - "requires": { - "colorette": "^2.0.10", - "memfs": "^3.4.3", - "mime-types": "^2.1.31", - "range-parser": "^1.2.1", - "schema-utils": "^4.0.0" - }, - "dependencies": { - "ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", - "requires": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - } - }, - "ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "requires": { - "fast-deep-equal": "^3.1.3" - } - }, - "json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" - }, - "schema-utils": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.2.0.tgz", - "integrity": "sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw==", - "requires": { - "@types/json-schema": "^7.0.9", - "ajv": "^8.9.0", - "ajv-formats": "^2.1.1", - "ajv-keywords": "^5.1.0" - } - } - } - }, "webpack-dev-server": { "version": "4.15.1", "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.15.1.tgz", @@ -77383,8 +70981,8 @@ "serve-index": "^1.9.1", "sockjs": "^0.3.24", "spdy": "^4.0.2", - "webpack-dev-middleware": "^5.3.1", - "ws": "^8.13.0" + "webpack-dev-middleware": "5.3.4", + "ws": "8.17.1" }, "dependencies": { "ajv": { @@ -77423,20 +71021,6 @@ } } } - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "requires": { - "isexe": "^2.0.0" - } - }, - "ws": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz", - "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==", - "requires": {} } } }, @@ -77594,6 +71178,12 @@ "path-parse": "^1.0.6" } }, + "semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true + }, "type-fest": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", @@ -77668,7 +71258,17 @@ "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz", "integrity": "sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==", "requires": { - "minimatch": "3.0.4" + "minimatch": "3.0.5" + } + }, + "redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "requires": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" } }, "redent": { @@ -77732,15 +71332,6 @@ "@babel/runtime": "^7.8.4" } }, - "regex-not": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", - "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", - "requires": { - "extend-shallow": "^3.0.2", - "safe-regex": "^1.1.0" - } - }, "regex-parser": { "version": "2.2.11", "resolved": "https://registry.npmjs.org/regex-parser/-/regex-parser-2.2.11.tgz", @@ -77897,6 +71488,12 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true + }, + "semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true } } }, @@ -77984,7 +71581,7 @@ "css-what": "^5.0.0", "domhandler": "^4.2.0", "domutils": "^2.6.0", - "nth-check": "^2.0.0" + "nth-check": "2.0.1" } }, "dom-serializer": { @@ -78021,15 +71618,6 @@ "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", "dev": true }, - "nth-check": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.0.tgz", - "integrity": "sha512-i4sc/Kj8htBrAiH1viZ0TgU8Y5XqCaV/FziYK6TBczxmeKm3AEFWqqF3195yKudrarqy7Zu80Ra5dobFjn9X/Q==", - "dev": true, - "requires": { - "boolbase": "^1.0.0" - } - }, "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", @@ -78041,15 +71629,11 @@ } } }, - "repeat-element": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", - "integrity": "sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==" - }, "repeat-string": { "version": "1.6.1", "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=" + "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", + "dev": true }, "request": { "version": "2.88.2", @@ -78062,7 +71646,7 @@ "combined-stream": "~1.0.6", "extend": "~3.0.2", "forever-agent": "~0.6.1", - "form-data": "~2.3.2", + "form-data": "2.5.4", "har-validator": "~5.1.3", "http-signature": "~1.2.0", "is-typedarray": "~1.0.0", @@ -78078,6 +71662,24 @@ "uuid": "^3.3.2" }, "dependencies": { + "form-data": { + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.4.tgz", + "integrity": "sha512-Y/3MmRiR8Nd+0CUtrbvcKtKzLWiUfpQ7DFVggH8PwmGt/0r7RSy32GuP4hpCJlQNEBusisSx1DLtD8uD386HJQ==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "has-own": "^1.0.1", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, "uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", @@ -78115,14 +71717,6 @@ "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" }, - "require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true, - "optional": true, - "peer": true - }, "requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", @@ -78162,11 +71756,6 @@ "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-2.2.0.tgz", "integrity": "sha512-bAFz9ld18RzJfddgrO2e/0S2O81710++chRMUxHjXOYKF6jTAMrUNZrEZ1PvV0zlhfjidm08iRPdTLPno1FuRg==" }, - "resolve-url": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", - "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=" - }, "resolve-url-loader": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz", @@ -78208,15 +71797,8 @@ "ret": { "version": "0.1.15", "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", - "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==" - }, - "retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", - "dev": true, - "optional": true, - "peer": true + "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", + "dev": true }, "retry-request": { "version": "4.1.1", @@ -78294,7 +71876,7 @@ "@babel/code-frame": "^7.10.4", "jest-worker": "^26.2.1", "serialize-javascript": "^4.0.0", - "terser": "^5.0.0" + "terser": "5.14.2" }, "dependencies": { "serialize-javascript": { @@ -78304,21 +71886,6 @@ "requires": { "randombytes": "^2.1.0" } - }, - "source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==" - }, - "terser": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz", - "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==", - "requires": { - "commander": "^2.20.0", - "source-map": "~0.7.2", - "source-map-support": "~0.5.20" - } } } }, @@ -78370,14 +71937,6 @@ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, - "safe-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", - "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", - "requires": { - "ret": "~0.1.10" - } - }, "safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", @@ -78395,7 +71954,7 @@ "exec-sh": "^0.3.2", "execa": "^1.0.0", "fb-watchman": "^2.0.0", - "micromatch": "^3.1.4", + "micromatch": ">=4.0.8", "minimist": "^1.1.1", "walker": "~1.0.5" }, @@ -78406,7 +71965,7 @@ "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", "dev": true, "requires": { - "micromatch": "^3.1.4", + "micromatch": ">=4.0.8", "normalize-path": "^2.1.1" } }, @@ -78497,23 +72056,6 @@ "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", "integrity": "sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo=" }, - "selfsigned": { - "version": "1.10.11", - "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-1.10.11.tgz", - "integrity": "sha512-aVmbPOfViZqOZPgRBT0+3u4yZFHpmnIghLMlAcb5/xhp5ZtB/RVnKhz5vl2M32CLXAqR4kha9zfhNg0Lf/sxKA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "node-forge": "^0.10.0" - } - }, - "semver": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz", - "integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==", - "dev": true - }, "sentence-case": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/sentence-case/-/sentence-case-2.1.1.tgz", @@ -78603,27 +72145,6 @@ "has-property-descriptors": "^1.0.2" } }, - "set-value": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", - "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", - "requires": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.3", - "split-string": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, "setimmediate": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", @@ -78659,19 +72180,17 @@ "dev": true }, "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "requires": { - "shebang-regex": "^1.0.0" + "shebang-regex": "^3.0.0" } }, "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" }, "shell-quote": { "version": "1.7.2", @@ -78680,20 +72199,47 @@ "dev": true }, "side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", "requires": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - }, - "dependencies": { - "object-inspect": { - "version": "1.10.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", - "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==" - } + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + } + }, + "side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "requires": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + } + }, + "side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "requires": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + } + }, + "side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "requires": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" } }, "signal-exit": { @@ -78726,111 +72272,6 @@ "integrity": "sha1-EMCI2LWOsHazIpu1oE4jLOEmQi0=", "dev": true }, - "snapdragon": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", - "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", - "requires": { - "base": "^0.11.1", - "debug": "^2.2.0", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "map-cache": "^0.2.2", - "source-map": "^0.5.6", - "source-map-resolve": "^0.5.0", - "use": "^3.1.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "snapdragon-node": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", - "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", - "requires": { - "define-property": "^1.0.0", - "isobject": "^3.0.0", - "snapdragon-util": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } - } - }, - "snapdragon-util": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", - "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", - "requires": { - "kind-of": "^3.2.0" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, "snapshot-diff": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/snapshot-diff/-/snapshot-diff-0.6.1.tgz", @@ -78904,7 +72345,7 @@ "@jest/types": "^24.9.0", "@types/stack-utils": "^1.0.1", "chalk": "^2.0.1", - "micromatch": "^3.1.10", + "micromatch": ">=4.0.8", "slash": "^2.0.0", "stack-utils": "^1.0.1" } @@ -78971,9 +72412,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true }, "slash": { @@ -79003,46 +72444,6 @@ "websocket-driver": "^0.7.4" } }, - "sockjs-client": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.6.1.tgz", - "integrity": "sha512-2g0tjOR+fRs0amxENLi/q5TiJTqY+WXFOzb5UwXndlK6TO3U/mirZznpx6w34HVMoc3g7cY24yC/ZMIYnDlfkw==", - "optional": true, - "peer": true, - "requires": { - "debug": "^3.2.7", - "eventsource": "^2.0.2", - "faye-websocket": "^0.11.4", - "inherits": "^2.0.4", - "url-parse": "^1.5.10" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "optional": true, - "peer": true, - "requires": { - "ms": "^2.1.1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "optional": true, - "peer": true - }, - "ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "optional": true, - "peer": true - } - } - }, "source-list-map": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz", @@ -79051,7 +72452,8 @@ "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=" + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true }, "source-map-js": { "version": "1.0.1", @@ -79078,18 +72480,6 @@ } } }, - "source-map-resolve": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.2.tgz", - "integrity": "sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA==", - "requires": { - "atob": "^2.1.1", - "decode-uri-component": "^0.2.0", - "resolve-url": "^0.2.1", - "source-map-url": "^0.4.0", - "urix": "^0.1.0" - } - }, "source-map-support": { "version": "0.5.21", "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", @@ -79219,14 +72609,6 @@ } } }, - "split-string": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", - "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", - "requires": { - "extend-shallow": "^3.0.0" - } - }, "sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", @@ -79279,25 +72661,6 @@ "integrity": "sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ==", "dev": true }, - "static-extend": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", - "integrity": "sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=", - "requires": { - "define-property": "^0.2.5", - "object-copy": "^0.1.0" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "requires": { - "is-descriptor": "^0.1.0" - } - } - } - }, "statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", @@ -79371,9 +72734,9 @@ } }, "stream-shift": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", - "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", + "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==", "dev": true }, "string_decoder": { @@ -79459,12 +72822,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", - "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==", - "dev": true - }, "is-regex": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.3.tgz", @@ -79480,12 +72837,6 @@ "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.6.tgz", "integrity": "sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w==", "dev": true - }, - "object-inspect": { - "version": "1.10.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz", - "integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==", - "dev": true } } }, @@ -79536,12 +72887,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -79560,12 +72905,6 @@ "requires": { "has-tostringtag": "^1.0.0" } - }, - "object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true } } }, @@ -79616,12 +72955,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -79640,12 +72973,6 @@ "requires": { "has-tostringtag": "^1.0.0" } - }, - "object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true } } }, @@ -79713,7 +73040,7 @@ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "requires": { - "ansi-regex": "^5.0.0" + "ansi-regex": "5.0.1" } }, "strip-bom": { @@ -79910,7 +73237,7 @@ "boolbase": "^1.0.0", "css-what": "^3.2.1", "domutils": "^1.7.0", - "nth-check": "^1.0.2" + "nth-check": "2.0.1" } }, "css-what": { @@ -80018,12 +73345,6 @@ "is-symbol": "^1.0.2" } }, - "is-callable": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -80043,12 +73364,6 @@ "has-tostringtag": "^1.0.0" } }, - "object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", - "dev": true - }, "object.getownpropertydescriptors": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz", @@ -80102,14 +73417,6 @@ "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.1.tgz", "integrity": "sha512-e0hDa9H2Z9AwFkk2qDlwhoMYE4eToKarchkQHovNdLTCYMHZHeRjI71crOh+dio4K6u1IcwubQqo79Ga4CyAQA==" }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -80141,7 +73448,7 @@ "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", - "micromatch": "^4.0.4" + "micromatch": ">=4.0.8" }, "dependencies": { "glob-parent": { @@ -80154,14 +73461,6 @@ } } }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "glob-parent": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", @@ -80183,28 +73482,6 @@ "has": "^1.0.3" } }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, - "micromatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", - "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", - "requires": { - "braces": "^3.0.1", - "picomatch": "^2.2.3" - } - }, "postcss-selector-parser": { "version": "6.0.8", "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.8.tgz", @@ -80236,14 +73513,6 @@ "requires": { "has-flag": "^4.0.0" } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "requires": { - "is-number": "^7.0.0" - } } } }, @@ -80283,7 +73552,7 @@ "requires": { "http-proxy-agent": "^2.1.0", "https-proxy-agent": "^3.0.0", - "node-fetch": "^2.2.0", + "node-fetch": "2.6.7", "stream-events": "^1.0.5", "uuid": "^3.3.2" }, @@ -80308,9 +73577,18 @@ } }, "node-fetch": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==", + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "requires": { + "whatwg-url": "^5.0.0" + } + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", "dev": true }, "uuid": { @@ -80318,6 +73596,22 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", "dev": true + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } } } }, @@ -80418,22 +73712,14 @@ } }, "terser": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-4.8.0.tgz", - "integrity": "sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw==", - "dev": true, + "version": "5.14.2", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.14.2.tgz", + "integrity": "sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA==", "requires": { + "@jridgewell/source-map": "^0.3.2", + "acorn": "^8.5.0", "commander": "^2.20.0", - "source-map": "~0.6.1", - "source-map-support": "~0.5.12" - }, - "dependencies": { - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true - } + "source-map-support": "~0.5.20" } }, "terser-webpack-plugin": { @@ -80449,7 +73735,7 @@ "schema-utils": "^3.0.0", "serialize-javascript": "^5.0.1", "source-map": "^0.6.1", - "terser": "^5.3.4", + "terser": "5.14.2", "webpack-sources": "^1.4.3" }, "dependencies": { @@ -80528,9 +73814,9 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true }, "source-map": { @@ -80538,25 +73824,6 @@ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true - }, - "terser": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.7.0.tgz", - "integrity": "sha512-HP5/9hp2UaZt5fYkuhNBR8YyRcT8juw8+uFbAme53iN9hblvKnLUTKkmwJG6ocWpIKf8UK4DoeWG4ty0J6S6/g==", - "dev": true, - "requires": { - "commander": "^2.20.0", - "source-map": "~0.7.2", - "source-map-support": "~0.5.19" - }, - "dependencies": { - "source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", - "dev": true - } - } } } }, @@ -80567,7 +73834,7 @@ "requires": { "@istanbuljs/schema": "^0.1.2", "glob": "^7.1.4", - "minimatch": "^3.0.4" + "minimatch": "3.0.5" } }, "text-table": { @@ -80626,9 +73893,9 @@ } }, "tmpl": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz", - "integrity": "sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==" }, "to-arraybuffer": { "version": "1.0.1", @@ -80636,42 +73903,44 @@ "integrity": "sha512-okFlQcoGTi4LQBG/PgSYblw9VOyptsz2KJZqc6qtgGdes8VktzUQkj4BI2blit072iS8VODNcMA+tvnS9dnuMA==", "dev": true }, - "to-object-path": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", - "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", + "to-buffer": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.2.1.tgz", + "integrity": "sha512-tB82LpAIWjhLYbqjx3X4zEeHN6M8CiuOEy2JY8SEQVdYRe3CCHOFaqrBW1doLDrfpWhplcW7BL+bO3/6S3pcDQ==", + "dev": true, "requires": { - "kind-of": "^3.0.2" + "isarray": "^2.0.5", + "safe-buffer": "^5.2.1", + "typed-array-buffer": "^1.0.3" }, "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true } } }, - "to-regex": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", - "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", - "requires": { - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "regex-not": "^1.0.2", - "safe-regex": "^1.1.0" - } - }, "to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "requires": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" + "is-number": "^7.0.0" + }, + "dependencies": { + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" + } } }, "toggle-selection": { @@ -80938,14 +74207,6 @@ "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==" }, - "type-fest": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz", - "integrity": "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==", - "dev": true, - "optional": true, - "peer": true - }, "type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -80955,6 +74216,17 @@ "mime-types": "~2.1.24" } }, + "typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "dev": true, + "requires": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" + } + }, "typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", @@ -81087,17 +74359,6 @@ } } }, - "union-value": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", - "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", - "requires": { - "arr-union": "^3.1.0", - "get-value": "^2.0.6", - "is-extendable": "^0.1.1", - "set-value": "^2.0.1" - } - }, "unique-filename": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", @@ -81220,42 +74481,6 @@ "resolved": "https://registry.npmjs.org/unquote/-/unquote-1.1.1.tgz", "integrity": "sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ=" }, - "unset-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", - "integrity": "sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=", - "requires": { - "has-value": "^0.3.1", - "isobject": "^3.0.0" - }, - "dependencies": { - "has-value": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", - "integrity": "sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=", - "requires": { - "get-value": "^2.0.3", - "has-values": "^0.1.4", - "isobject": "^2.0.0" - }, - "dependencies": { - "isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "requires": { - "isarray": "1.0.0" - } - } - } - }, - "has-values": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", - "integrity": "sha1-bWHeldkd/Km5oCCJrThL/49it3E=" - } - } - }, "upath": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", @@ -81284,11 +74509,6 @@ "punycode": "^2.1.0" } }, - "urix": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", - "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=" - }, "url": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", @@ -81362,22 +74582,6 @@ } } }, - "url-parse": { - "version": "1.5.10", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", - "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", - "optional": true, - "peer": true, - "requires": { - "querystringify": "^2.1.1", - "requires-port": "^1.0.0" - } - }, - "use": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", - "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==" - }, "use-composed-ref": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.1.0.tgz", @@ -81608,7 +74812,7 @@ "dev": true, "optional": true, "requires": { - "micromatch": "^3.1.4", + "micromatch": ">=4.0.8", "normalize-path": "^2.1.1" }, "dependencies": { @@ -81640,7 +74844,7 @@ "requires": { "anymatch": "^2.0.0", "async-each": "^1.0.1", - "braces": "^2.3.2", + "braces": "3.0.3", "fsevents": "^1.2.7", "glob-parent": "^3.1.0", "inherits": "^2.0.3", @@ -81704,7 +74908,7 @@ "optional": true, "requires": { "graceful-fs": "^4.1.11", - "micromatch": "^3.1.10", + "micromatch": ">=4.0.8", "readable-stream": "^2.0.2" } } @@ -81857,17 +75061,6 @@ "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==" }, - "terser": { - "version": "5.29.2", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.29.2.tgz", - "integrity": "sha512-ZiGkhUBIM+7LwkNjXYJq8svgkd+QK3UUr0wJqY4MieaezBSAIPgbSPZyIx0idM6XWK5CMzSWa8MJIzmRcB8Caw==", - "requires": { - "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.8.2", - "commander": "^2.20.0", - "source-map-support": "~0.5.20" - } - }, "terser-webpack-plugin": { "version": "5.3.10", "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz", @@ -81877,7 +75070,7 @@ "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.1", - "terser": "^5.26.0" + "terser": "5.14.2" } }, "update-browserslist-db": { @@ -81907,14 +75100,14 @@ "bfj": "^6.1.1", "chalk": "^2.4.1", "commander": "^2.18.0", - "ejs": "^2.6.1", + "ejs": "3.1.7", "express": "^4.16.3", "filesize": "^3.6.1", "gzip-size": "^5.0.0", "lodash": "^4.17.15", "mkdirp": "^0.5.1", "opener": "^1.5.1", - "ws": "^6.0.0" + "ws": "8.17.1" }, "dependencies": { "acorn": { @@ -81922,547 +75115,35 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", "dev": true - }, - "ws": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.1.tgz", - "integrity": "sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==", - "dev": true, - "requires": { - "async-limiter": "~1.0.0" - } } } }, "webpack-dev-middleware": { - "version": "3.7.3", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-3.7.3.tgz", - "integrity": "sha512-djelc/zGiz9nZj/U7PTBi2ViorGJXEWo/3ltkPbDyxCXhhEXkW0ce99falaok4TPj+AsxLiXJR0EBOb0zh9fKQ==", - "dev": true, + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz", + "integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==", "requires": { - "memory-fs": "^0.4.1", - "mime": "^2.4.4", - "mkdirp": "^0.5.1", + "colorette": "^2.0.10", + "memfs": "^3.4.3", + "mime-types": "^2.1.31", "range-parser": "^1.2.1", - "webpack-log": "^2.0.0" - }, - "dependencies": { - "mime": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.5.2.tgz", - "integrity": "sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg==", - "dev": true - } - } - }, - "webpack-dev-server": { - "version": "3.11.3", - "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.11.3.tgz", - "integrity": "sha512-3x31rjbEQWKMNzacUZRE6wXvUFuGpH7vr0lIEbYpMAG9BOxi0928QU1BBswOAP3kg3H1O4hiS+sq4YyAn6ANnA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-html-community": "0.0.8", - "bonjour": "^3.5.0", - "chokidar": "^2.1.8", - "compression": "^1.7.4", - "connect-history-api-fallback": "^1.6.0", - "debug": "^4.1.1", - "del": "^4.1.1", - "express": "^4.17.1", - "html-entities": "^1.3.1", - "http-proxy-middleware": "0.19.1", - "import-local": "^2.0.0", - "internal-ip": "^4.3.0", - "ip": "^1.1.5", - "is-absolute-url": "^3.0.3", - "killable": "^1.0.1", - "loglevel": "^1.6.8", - "opn": "^5.5.0", - "p-retry": "^3.0.1", - "portfinder": "^1.0.26", - "schema-utils": "^1.0.0", - "selfsigned": "^1.10.8", - "semver": "^6.3.0", - "serve-index": "^1.9.1", - "sockjs": "^0.3.21", - "sockjs-client": "^1.5.0", - "spdy": "^4.0.2", - "strip-ansi": "^3.0.1", - "supports-color": "^6.1.0", - "url": "^0.11.0", - "webpack-dev-middleware": "^3.7.2", - "webpack-log": "^2.0.0", - "ws": "^6.2.1", - "yargs": "^13.3.2" + "schema-utils": "^4.0.0" }, "dependencies": { - "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": {} - }, - "ansi-html-community": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", - "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==", - "dev": true, - "optional": true, - "peer": true - }, - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==", - "dev": true, - "optional": true, - "peer": true - }, - "anymatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", - "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "micromatch": "^3.1.4", - "normalize-path": "^2.1.1" - }, - "dependencies": { - "normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "remove-trailing-separator": "^1.0.1" - } - } - } - }, - "binary-extensions": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", - "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", - "dev": true, - "optional": true, - "peer": true - }, - "camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, - "optional": true, - "peer": true - }, - "chokidar": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", - "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "anymatch": "^2.0.0", - "async-each": "^1.0.1", - "braces": "^2.3.2", - "fsevents": "^1.2.7", - "glob-parent": "^3.1.0", - "inherits": "^2.0.3", - "is-binary-path": "^1.0.0", - "is-glob": "^4.0.0", - "normalize-path": "^3.0.0", - "path-is-absolute": "^1.0.0", - "readdirp": "^2.2.1", - "upath": "^1.1.1" - } - }, - "cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", - "dev": true, - "optional": true, - "peer": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ms": "2.1.2" - } - }, - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true, - "optional": true, - "peer": true - }, - "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "locate-path": "^3.0.0" - } - }, - "fsevents": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", - "integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "bindings": "^1.5.0", - "nan": "^2.12.1" - } - }, - "glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - }, - "dependencies": { - "is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "is-extglob": "^2.1.0" - } - } - } - }, - "import-local": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-2.0.0.tgz", - "integrity": "sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "pkg-dir": "^3.0.0", - "resolve-cwd": "^2.0.0" - } - }, - "is-binary-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", - "integrity": "sha512-9fRVlXc0uCxEDj1nQzaWONSpbTfx0FmJfzHF7pwlI8DkWGoHBBea4Pg5Ky0ojwwxQmnSifgbKkI06Qv0Ljgj+Q==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "binary-extensions": "^1.0.0" - } - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true, - "optional": true, - "peer": true - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "optional": true, - "peer": true - }, - "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true, - "optional": true, - "peer": true - }, - "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "p-limit": "^2.0.0" - } - }, - "path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", - "dev": true, - "optional": true, - "peer": true - }, - "readdirp": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", - "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "graceful-fs": "^4.1.11", - "micromatch": "^3.1.10", - "readable-stream": "^2.0.2" - } - }, - "resolve-cwd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-2.0.0.tgz", - "integrity": "sha512-ccu8zQTrzVr954472aUVPLEcB3YpKSYR3cg/3lo1okzobPBM+1INXBbBZlDbnI/hbEocnf8j0QVo43hQKrbchg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "resolve-from": "^3.0.0" - } - }, - "resolve-from": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", - "integrity": "sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==", - "dev": true, - "optional": true, - "peer": true + "colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==" }, "schema-utils": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", - "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ajv": "^6.1.0", - "ajv-errors": "^1.0.0", - "ajv-keywords": "^3.1.0" - } - }, - "semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "optional": true, - "peer": true - }, - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", - "dev": true, - "optional": true, - "peer": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "supports-color": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", - "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "has-flag": "^3.0.0" - } - }, - "wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", - "dev": true, - "optional": true, - "peer": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "ws": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.2.tgz", - "integrity": "sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "async-limiter": "~1.0.0" - } - }, - "yargs": { - "version": "13.3.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", - "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", - "dev": true, - "optional": true, - "peer": true, - "requires": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", - "get-caller-file": "^2.0.1", - "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^3.0.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^13.1.2" - } - }, - "yargs-parser": { - "version": "13.1.2", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", - "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", - "dev": true, - "optional": true, - "peer": true, + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.2.tgz", + "integrity": "sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==", "requires": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" } } } @@ -82473,7 +75154,7 @@ "integrity": "sha512-xs5dPOrGPCzuRXNi8F6rwhawWvQQkeli5Ro48PRuQh8pYPCPmNnltP9itiUPT4xI8oW+y0m59lyyeQk54s5VgA==", "devOptional": true, "requires": { - "ansi-html": "0.0.7", + "ansi-html": "0.0.8", "html-entities": "^1.2.0", "querystring": "^0.2.0", "strip-ansi": "^3.0.0" @@ -82496,30 +75177,6 @@ } } }, - "webpack-log": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/webpack-log/-/webpack-log-2.0.0.tgz", - "integrity": "sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg==", - "dev": true, - "requires": { - "ansi-colors": "^3.0.0", - "uuid": "^3.3.2" - }, - "dependencies": { - "ansi-colors": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz", - "integrity": "sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==", - "dev": true - }, - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "dev": true - } - } - }, "webpack-manifest-plugin": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/webpack-manifest-plugin/-/webpack-manifest-plugin-4.0.2.tgz", @@ -82676,13 +75333,20 @@ } } }, - "which-module": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz", - "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==", + "which-typed-array": { + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", "dev": true, - "optional": true, - "peer": true + "requires": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + } }, "wide-align": { "version": "1.1.3", @@ -83050,9 +75714,9 @@ } }, "ws": { - "version": "7.5.6", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.6.tgz", - "integrity": "sha512-6GLgCqo2cy2A2rjCNFlxQS6ZljG/coZfZXclldI8FB/1G3CCI36Zd8xy2HrFVACi8tfk5XrgLQEk+P0Tnz9UcA==", + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", + "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", "requires": {} }, "xml-name-validator": { diff --git a/frontend/package.json b/frontend/package.json index e9b058397f8..8e2a8d8f68d 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -15,7 +15,7 @@ "dagre": "^0.8.2", "google-protobuf": "^3.11.2", "grpc-web": "^1.2.1", - "http-proxy-middleware": "^0.19.0", + "http-proxy-middleware": "^2.0.7", "immer": "^9.0.6", "js-yaml": "^3.14.1", "lodash": "^4.17.21", @@ -27,7 +27,7 @@ "pako": "^2.0.4", "portable-fetch": "^3.0.0", "proto3-json-serializer": "^0.1.6", - "protobufjs": "~6.11.2", + "protobufjs": "~6.11.4", "re-resizable": "^4.9.0", "react": "^16.12.0", "react-ace": "^7.0.2", @@ -113,7 +113,6 @@ "@types/enzyme-adapter-react-16": "^1.0.5", "@types/express": "^4.16.0", "@types/google-protobuf": "^3.7.2", - "@types/http-proxy-middleware": "^0.17.5", "@types/jest": "^27.5.1", "@types/js-yaml": "^3.12.3", "@types/lodash": ">=4.14.117", @@ -156,7 +155,33 @@ "nanoid": "3.3.8", "express": { "path-to-regexp": "0.1.12" - } + }, + "react-router": { + "path-to-regexp": "1.9.0" + }, + "duplexify": "4.1.3", + "ejs": "3.1.7", + "form-data": "2.5.4", + "json-schema": "0.4.0", + "ansi-html": "0.0.8", + "body-parser": "1.20.3", + "braces": "3.0.3", + "minimatch": "3.0.5", + "tmpl": "1.0.5", + "cross-spawn": "7.0.5", + "react-dev-utils": { + "ansi-regex": "5.0.1" + }, + "node-fetch": "2.6.7", + "@testing-library/dom": { + "ansi-regex": "5.0.1" + }, + "nth-check": "2.0.1", + "terser": "5.14.2", + "ws": "8.17.1", + "webpack-dev-middleware": "5.3.4", + "semver@>=7.0.0 <7.5.2": "7.6.0", + "micromatch": ">=4.0.8" }, "homepage": "./", "jest": { diff --git a/frontend/server/app.ts b/frontend/server/app.ts index 50d8565772c..08a960512eb 100644 --- a/frontend/server/app.ts +++ b/frontend/server/app.ts @@ -14,7 +14,7 @@ import path from 'path'; import express from 'express'; import { Application, static as StaticHandler } from 'express'; -import proxy from 'http-proxy-middleware'; +import { createProxyMiddleware } from 'http-proxy-middleware'; import { UIConfigs } from './configs'; import { getAddress } from './utils'; @@ -101,7 +101,7 @@ function createUIServer(options: UIConfigs) { const registerHandler = getRegisterHandler(app, basePath); /** log to stdout */ - app.use((req, _, next) => { + app.use((req, res, next) => { console.info(req.method + ' ' + req.originalUrl); next(); }); @@ -180,7 +180,7 @@ function createUIServer(options: UIConfigs) { if (options.artifacts.streamLogsFromServerApi) { app.all( '/k8s/pod/logs', - proxy({ + createProxyMiddleware({ changeOrigin: true, onProxyReq: proxyReq => { console.log('Proxied log request: ', proxyReq.path); @@ -206,7 +206,7 @@ function createUIServer(options: UIConfigs) { if (options.artifacts.streamLogsFromServerApi) { app.all( '/k8s/pod/logs', - proxy({ + createProxyMiddleware({ changeOrigin: true, onProxyReq: proxyReq => { console.log('Proxied log request: ', proxyReq.path); @@ -247,7 +247,7 @@ function createUIServer(options: UIConfigs) { /** Proxy metadata requests to the Envoy instance which will handle routing to the metadata gRPC server */ app.all( '/ml_metadata.*', - proxy({ + createProxyMiddleware({ changeOrigin: true, onProxyReq: proxyReq => { console.log('Metadata proxied request: ', (proxyReq as any).path); @@ -282,7 +282,7 @@ function createUIServer(options: UIConfigs) { /** Proxy to ml-pipeline api server */ app.all( `/${apiVersion1Prefix}/*`, - proxy({ + createProxyMiddleware({ changeOrigin: true, onProxyReq: proxyReq => { console.log('Proxied request: ', proxyReq.path); @@ -293,7 +293,7 @@ function createUIServer(options: UIConfigs) { ); app.all( `/${apiVersion2Prefix}/*`, - proxy({ + createProxyMiddleware({ changeOrigin: true, onProxyReq: proxyReq => { console.log('Proxied request: ', proxyReq.path); @@ -305,7 +305,7 @@ function createUIServer(options: UIConfigs) { app.all( `${basePath}/${apiVersion1Prefix}/*`, - proxy({ + createProxyMiddleware({ changeOrigin: true, onProxyReq: proxyReq => { console.log('Proxied request: ', proxyReq.path); @@ -318,7 +318,7 @@ function createUIServer(options: UIConfigs) { ); app.all( `${basePath}/${apiVersion2Prefix}/*`, - proxy({ + createProxyMiddleware({ changeOrigin: true, onProxyReq: proxyReq => { console.log('Proxied request: ', proxyReq.path); diff --git a/frontend/server/handlers/artifacts.ts b/frontend/server/handlers/artifacts.ts index 2f04e232b66..a8c4d0ada6d 100644 --- a/frontend/server/handlers/artifacts.ts +++ b/frontend/server/handlers/artifacts.ts @@ -19,8 +19,9 @@ import { createMinioClient, getObjectStream } from '../minio-helper'; import * as serverInfo from '../helpers/server-info'; import { Handler, Request, Response } from 'express'; import { Storage } from '@google-cloud/storage'; -import proxy from 'http-proxy-middleware'; +import { createProxyMiddleware } from 'http-proxy-middleware'; import { HACK_FIX_HPM_PARTIAL_RESPONSE_HEADERS } from '../consts'; +import { URL } from 'url'; import * as fs from 'fs'; import { isAllowedDomain } from './domain-checker'; @@ -94,9 +95,9 @@ export function getArtifactsHandler({ }): Handler { const { aws, http, minio, allowedDomain } = artifactsConfigs; return async (req, res) => { - const source = useParameter ? req.params.source : req.query.source; - const bucket = useParameter ? req.params.bucket : req.query.bucket; - const key = useParameter ? req.params[0] : req.query.key; + const source = (useParameter ? req.params.source : req.query.source) as string | undefined; + const bucket = (useParameter ? req.params.bucket : req.query.bucket) as string | undefined; + const key = (useParameter ? req.params[0] : req.query.key) as string | undefined; const { peek = 0, providerInfo = '', @@ -436,7 +437,7 @@ export function getArtifactsProxyHandler({ if (!enabled) { return (req, res, next) => next(); } - return proxy( + return createProxyMiddleware( (_pathname, req) => { // only proxy requests with namespace query parameter return !!getNamespaceFromUrl(req.url || ''); diff --git a/frontend/server/handlers/pod-info.ts b/frontend/server/handlers/pod-info.ts index 3008014b95a..96544eb3613 100644 --- a/frontend/server/handlers/pod-info.ts +++ b/frontend/server/handlers/pod-info.ts @@ -29,8 +29,8 @@ export const podInfoHandler: Handler = async (req, res) => { res.status(422).send('podnamespace argument is required'); return; } - const podName = decodeURIComponent(podname); - const podNamespace = decodeURIComponent(podnamespace); + const podName = decodeURIComponent(podname as string); + const podNamespace = decodeURIComponent(podnamespace as string); const [pod, err] = await k8sHelper.getPod(podName, podNamespace); if (err) { @@ -52,8 +52,8 @@ export const podEventsHandler: Handler = async (req, res) => { res.status(422).send('podnamespace argument is required'); return; } - const podName = decodeURIComponent(podname); - const podNamespace = decodeURIComponent(podnamespace); + const podName = decodeURIComponent(podname as string); + const podNamespace = decodeURIComponent(podnamespace as string); const [eventList, err] = await k8sHelper.listPodEvents(podName, podNamespace); if (err) { diff --git a/frontend/server/handlers/pod-logs.ts b/frontend/server/handlers/pod-logs.ts index f4acbf4e1cc..cf2c3633f57 100644 --- a/frontend/server/handlers/pod-logs.ts +++ b/frontend/server/handlers/pod-logs.ts @@ -75,12 +75,12 @@ export function getPodLogsHandler( res.status(400).send('podname argument is required'); return; } - const podName = decodeURIComponent(req.query.podname); - const createdAt = decodeURIComponent(req.query.createdat); + const podName = decodeURIComponent(req.query.podname as string); + const createdAt = decodeURIComponent((req.query.createdat as string) || ''); // This is optional. // Note decodeURIComponent(undefined) === 'undefined', so I cannot pass the argument directly. - const podNamespace = decodeURIComponent(req.query.podnamespace || '') || undefined; + const podNamespace = decodeURIComponent((req.query.podnamespace as string) || '') || undefined; try { const stream = await getPodLogsStream(podName, createdAt, podNamespace); diff --git a/frontend/server/handlers/tensorboard.ts b/frontend/server/handlers/tensorboard.ts index 5891bfa7ea4..cffbef58727 100644 --- a/frontend/server/handlers/tensorboard.ts +++ b/frontend/server/handlers/tensorboard.ts @@ -42,7 +42,7 @@ export const getTensorboardHandlers = ( { verb: AuthorizeRequestVerb.GET, resources: AuthorizeRequestResources.VIEWERS, - namespace, + namespace: namespace as string, }, req, ); @@ -50,7 +50,7 @@ export const getTensorboardHandlers = ( res.status(401).send(authError.message); return; } - res.send(await k8sHelper.getTensorboardInstance(logdir, namespace)); + res.send(await k8sHelper.getTensorboardInstance(logdir as string, namespace as string)); } catch (err) { const details = await parseError(err); console.error(`Failed to list Tensorboard pods: ${details.message}`, details.additionalInfo); @@ -90,7 +90,7 @@ export const getTensorboardHandlers = ( let podTemplateSpec: any | undefined; if (podTemplateSpecRaw) { try { - podTemplateSpec = JSON.parse(podTemplateSpecRaw); + podTemplateSpec = JSON.parse(podTemplateSpecRaw as string); } catch (err) { res.status(400).send(`podtemplatespec is not valid JSON: ${err}`); return; @@ -102,7 +102,7 @@ export const getTensorboardHandlers = ( { verb: AuthorizeRequestVerb.CREATE, resources: AuthorizeRequestResources.VIEWERS, - namespace, + namespace: namespace as string, }, req, ); @@ -111,15 +111,15 @@ export const getTensorboardHandlers = ( return; } await k8sHelper.newTensorboardInstance( - logdir, - namespace, - image || tensorboardConfig.tfImageName, - tfversion, + logdir as string, + namespace as string, + (image || tensorboardConfig.tfImageName) as string, + (tfversion as string) || '', podTemplateSpec || tensorboardConfig.podTemplateSpec, ); const tensorboardAddress = await k8sHelper.waitForTensorboardInstance( - logdir, - namespace, + logdir as string, + namespace as string, 60 * 1000, ); res.send(tensorboardAddress); @@ -150,7 +150,7 @@ export const getTensorboardHandlers = ( { verb: AuthorizeRequestVerb.DELETE, resources: AuthorizeRequestResources.VIEWERS, - namespace, + namespace: namespace as string, }, req, ); @@ -158,7 +158,7 @@ export const getTensorboardHandlers = ( res.status(401).send(authError.message); return; } - await k8sHelper.deleteTensorboardInstance(logdir, namespace); + await k8sHelper.deleteTensorboardInstance(logdir as string, namespace as string); res.send('Tensorboard deleted.'); } catch (err) { const details = await parseError(err); diff --git a/frontend/server/integration-tests/artifact-proxy.test.ts b/frontend/server/integration-tests/artifact-proxy.test.ts index bd3387d614b..9c9c029b223 100644 --- a/frontend/server/integration-tests/artifact-proxy.test.ts +++ b/frontend/server/integration-tests/artifact-proxy.test.ts @@ -48,7 +48,7 @@ describe('/artifacts/get namespaced proxy', () => { const receivedUrls: string[] = []; const artifactService = express(); const response = `artifact service in ${namespace}`; - artifactService.all('/*', (req, res) => { + artifactService.use((req, res) => { receivedUrls.push(req.url); res.status(200).send(response); }); diff --git a/frontend/server/k8s-helper.ts b/frontend/server/k8s-helper.ts index 26e22f5ab02..190adb50529 100644 --- a/frontend/server/k8s-helper.ts +++ b/frontend/server/k8s-helper.ts @@ -17,7 +17,6 @@ import { CustomObjectsApi, KubeConfig, V1Pod, - V1EventList, V1ConfigMap, } from '@kubernetes/client-node'; import * as crypto from 'crypto-js'; @@ -296,10 +295,7 @@ export async function getConfigMap( // Golang style result type including an error. export type Result = [T, undefined] | [undefined, E]; -export async function listPodEvents( - podName: string, - podNamespace: string, -): Promise> { +export async function listPodEvents(podName: string, podNamespace: string): Promise> { try { const { body } = await k8sV1Client.listNamespacedEvent( podNamespace, diff --git a/frontend/server/minio-helper.test.ts b/frontend/server/minio-helper.test.ts index 554c81020e5..4eb00bceea6 100644 --- a/frontend/server/minio-helper.test.ts +++ b/frontend/server/minio-helper.test.ts @@ -14,7 +14,14 @@ import * as zlib from 'zlib'; import { PassThrough } from 'stream'; import { Client as MinioClient } from 'minio'; -import { createMinioClient, isTarball, maybeTarball, getObjectStream } from './minio-helper'; +import { + createMinioClient, + isTarball, + maybeTarball, + getObjectStream, + MinioClientOptionsWithOptionalSecrets, + Credentials, +} from './minio-helper'; const { fromNodeProviderChain } = require('@aws-sdk/credential-providers'); jest.mock('minio'); @@ -47,6 +54,34 @@ describe('minio-helper', () => { }); }); + it('Builds a client where credentials are resolved using a custom provider.', async () => { + const provider = async (): Promise => { + return { + accessKeyId: 'providedKey', + secretAccessKey: 'providedSecret', + sessionToken: 'providedToken', + }; + }; + + const client = await createMinioClient( + { + endPoint: 'minio.kubeflow:80', + }, + 's3', + '', + '', + provider, + ); + + expect(client).toBeInstanceOf(MinioClient); + expect(MockedMinioClient).toHaveBeenCalledWith({ + accessKey: 'providedKey', + endPoint: 'minio.kubeflow:80', + secretKey: 'providedSecret', + sessionToken: 'providedToken', + }); + }); + it('fallbacks to the provided configs if EC2 metadata is not available.', async () => { const client = await createMinioClient( { @@ -149,7 +184,7 @@ describe('minio-helper', () => { mockedMinioGetObject = minioClient.getObject as any; }); - it('unpacks a gzipped tarball', async done => { + it('unpacks a gzipped tarball', async () => { const objStream = new PassThrough(); objStream.end(tarGzBuffer); mockedMinioGetObject.mockResolvedValueOnce(Promise.resolve(objStream)); @@ -163,11 +198,10 @@ describe('minio-helper', () => { .toString() .trim(), ).toBe('hello world'); - done(); }); }); - it('unpacks a uncompressed tarball', async done => { + it('unpacks a uncompressed tarball', async () => { const objStream = new PassThrough(); objStream.end(tarBuffer); mockedMinioGetObject.mockResolvedValueOnce(Promise.resolve(objStream)); @@ -181,11 +215,10 @@ describe('minio-helper', () => { .toString() .trim(), ).toBe('hello world'); - done(); }); }); - it('returns the content as a stream', async done => { + it('returns the content as a stream', async () => { const objStream = new PassThrough(); objStream.end('hello world'); mockedMinioGetObject.mockResolvedValueOnce(Promise.resolve(objStream)); @@ -199,7 +232,6 @@ describe('minio-helper', () => { .toString() .trim(), ).toBe('hello world'); - done(); }); }); }); diff --git a/frontend/server/minio-helper.ts b/frontend/server/minio-helper.ts index 9295f6cbcf6..be44862c053 100644 --- a/frontend/server/minio-helper.ts +++ b/frontend/server/minio-helper.ts @@ -36,6 +36,12 @@ export interface MinioClientOptionsWithOptionalSecrets extends Partial Promise | Credentials, ) { + if (customCredentialProvider) { + try { + const creds = await customCredentialProvider(); + + if (creds && creds.accessKeyId && creds.secretAccessKey) { + return new MinioClient({ + ...config, + accessKey: creds.accessKeyId, + secretKey: creds.secretAccessKey, + sessionToken: creds.sessionToken, + }); + } else { + console.warn( + 'Custom credential resolver returned incomplete credentials, falling back to default chain', + ); + } + } catch (error) { + console.error('Custom credential resolver failed:', error); + console.warn('Falling back to default credential resolution chain'); + } + } + if (providerInfoString) { const providerInfo = parseJSONString(providerInfoString); if (!providerInfo) { @@ -82,7 +111,7 @@ export async function createMinioClient( // AWS S3 with credentials from provider chain if (isAWSS3Endpoint(config.endPoint)) { try { - const credentials = fromNodeProviderChain(); + const credentials = fromNodeProviderChain({ ignoreCache: true }); const awsCredentials = await credentials(); if (awsCredentials) { const { diff --git a/frontend/server/package-lock.json b/frontend/server/package-lock.json index 76959df2ccc..316feaa71ea 100644 --- a/frontend/server/package-lock.json +++ b/frontend/server/package-lock.json @@ -7,15 +7,17 @@ "dependencies": { "@aws-sdk/credential-providers": "^3.621.0", "@google-cloud/storage": "^2.5.0", - "@kubernetes/client-node": "^0.12.1", - "axios": ">=1.8.2", - "crypto-js": "^3.1.8", - "express": "^4.21.0", + "@kubernetes/client-node": "^0.16.3", + "axios": ">=1.12.0", + "crypto-js": "^4.2.0", + "express": "^5.1.0", + "form-data": "^2.5.4", "gunzip-maybe": "^1.4.1", - "http-proxy-middleware": "^0.18.0", + "http-proxy-middleware": "^2.0.7", + "js-yaml": "^4.1.0", "lodash": ">=4.17.21", "minio": "~8.0.3", - "node-fetch": "^2.6.7", + "node-fetch": "2.6.7", "peek-stream": "^1.1.3", "portable-fetch": "^3.0.0", "tar-stream": "^2.1.0" @@ -24,19 +26,31 @@ "@types/crypto-js": "^3.1.43", "@types/express": "^4.11.1", "@types/gunzip-maybe": "^1.4.0", - "@types/http-proxy-middleware": "^0.19.3", - "@types/jest": "^24.9.1", + "@types/jest": "^27.0.0", "@types/node": "^14.14.20", "@types/node-fetch": "^2.1.2", "@types/supertest": "^2.0.8", "@types/tar": "^4.0.3", "@types/tar-stream": "^1.6.1", - "jest": "^25.3.0", + "jest": "^27.0.0", "prettier": "1.19.1", "supertest": "^4.0.2", - "ts-jest": "^25.2.1", + "ts-jest": "^27.0.0", "tslint": "^5.20.1", - "typescript": "^3.6.4" + "typescript": "^4.9.5" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" } }, "node_modules/@aws-crypto/sha256-browser": { @@ -803,36 +817,49 @@ "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==" }, "node_modules/@babel/code-frame": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", - "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", "dev": true, "dependencies": { - "@babel/highlight": "^7.8.3" + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.0.tgz", + "integrity": "sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==", + "dev": true, + "engines": { + "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.9.0.tgz", - "integrity": "sha512-kWc7L0fw1xwvI0zi8OKVBuxRVefwGOrKSQMvrQ3dW+bIIavBY3/NpXmpjMy7bQnLgwgzWQZ8TlM57YHpHNHz4w==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.8.3", - "@babel/generator": "^7.9.0", - "@babel/helper-module-transforms": "^7.9.0", - "@babel/helpers": "^7.9.0", - "@babel/parser": "^7.9.0", - "@babel/template": "^7.8.6", - "@babel/traverse": "^7.9.0", - "@babel/types": "^7.9.0", - "convert-source-map": "^1.7.0", + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.0.tgz", + "integrity": "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.0", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.27.3", + "@babel/helpers": "^7.27.6", + "@babel/parser": "^7.28.0", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.0", + "@babel/types": "^7.28.0", + "convert-source-map": "^2.0.0", "debug": "^4.1.0", - "gensync": "^1.0.0-beta.1", - "json5": "^2.1.2", - "lodash": "^4.17.13", - "resolve": "^1.3.2", - "semver": "^5.4.1", - "source-map": "^0.5.0" + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" @@ -842,211 +869,190 @@ "url": "https://opencollective.com/babel" } }, + "node_modules/@babel/core/node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, "node_modules/@babel/core/node_modules/debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, "dependencies": { - "ms": "^2.1.1" + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/@babel/core/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, - "engines": { - "node": ">=0.10.0" + "bin": { + "semver": "bin/semver.js" } }, "node_modules/@babel/generator": { - "version": "7.9.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.9.5.tgz", - "integrity": "sha512-GbNIxVB3ZJe3tLeDm1HSn2AhuD/mVcyLDpgtLXa5tplmWrJdF/elxB56XNqCuD6szyNkDi6wuoKXln3QeBmCHQ==", + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.0.tgz", + "integrity": "sha512-lJjzvrbEeWrhB4P3QBsH7tey117PjLZnDbLiQEKjQ/fNJTjuq4HSqgFA+UNSwZT8D7dxxbnuSBMsa1lrWzKlQg==", "dev": true, "dependencies": { - "@babel/types": "^7.9.5", - "jsesc": "^2.5.1", - "lodash": "^4.17.13", - "source-map": "^0.5.0" - } - }, - "node_modules/@babel/generator/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", - "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==", - "dev": true, + "@babel/parser": "^7.28.0", + "@babel/types": "^7.28.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", - "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", "dev": true, "dependencies": { - "@babel/types": "^7.22.5" + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/helper-hoist-variables/node_modules/@babel/helper-validator-identifier": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", - "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", + "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dev": true, - "engines": { - "node": ">=6.9.0" + "dependencies": { + "yallist": "^3.0.2" } }, - "node_modules/@babel/helper-hoist-variables/node_modules/@babel/types": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz", - "integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==", + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.20", - "to-fast-properties": "^2.0.0" - }, - "engines": { - "node": ">=6.9.0" + "bin": { + "semver": "bin/semver.js" } }, - "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.8.3.tgz", - "integrity": "sha512-fO4Egq88utkQFjbPrSHGmGLFqmrshs11d46WI+WZDESt7Wu7wN2G2Iu+NMMZJFDOVRHAMIkB5SNh30NtwCA7RA==", + "node_modules/@babel/helper-compilation-targets/node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", "dev": true, - "dependencies": { - "@babel/types": "^7.8.3" + "engines": { + "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-imports": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.8.3.tgz", - "integrity": "sha512-R0Bx3jippsbAEtzkpZ/6FIiuzOURPcMjHp+Z6xPe6DtApDJx+w7UYyOLanZqO8+wKR9G10s/FmHXvxaMd9s6Kg==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", "dev": true, "dependencies": { - "@babel/types": "^7.8.3" + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.9.0.tgz", - "integrity": "sha512-0FvKyu0gpPfIQ8EkxlrAydOWROdHpBmiCiRwLkUiBGhCUPRRbVD2/tm3sFr/c/GWFrQ/ffutGUAnx7V0FzT2wA==", - "dev": true, - "dependencies": { - "@babel/helper-module-imports": "^7.8.3", - "@babel/helper-replace-supers": "^7.8.6", - "@babel/helper-simple-access": "^7.8.3", - "@babel/helper-split-export-declaration": "^7.8.3", - "@babel/template": "^7.8.6", - "@babel/types": "^7.9.0", - "lodash": "^4.17.13" - } - }, - "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.8.3.tgz", - "integrity": "sha512-Kag20n86cbO2AvHca6EJsvqAd82gc6VMGule4HwebwMlwkpXuVqrNRj6CkCV2sKxgi9MyAUnZVnZ6lJ1/vKhHQ==", + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz", + "integrity": "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==", "dev": true, "dependencies": { - "@babel/types": "^7.8.3" + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.8.3.tgz", - "integrity": "sha512-j+fq49Xds2smCUNYmEHF9kGNkhbet6yVIBp4e6oeQpH1RUs/Ir06xUKzDjDkGcaaokPiTNs2JBWHjaE4csUkZQ==", - "dev": true - }, - "node_modules/@babel/helper-replace-supers": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.8.6.tgz", - "integrity": "sha512-PeMArdA4Sv/Wf4zXwBKPqVj7n9UF/xg6slNRtZW84FM7JpE1CbG8B612FyM4cxrf4fMAMGO0kR7voy1ForHHFA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", "dev": true, - "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.8.3", - "@babel/helper-optimise-call-expression": "^7.8.3", - "@babel/traverse": "^7.8.6", - "@babel/types": "^7.8.6" + "engines": { + "node": ">=6.9.0" } }, - "node_modules/@babel/helper-simple-access": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.8.3.tgz", - "integrity": "sha512-VNGUDjx5cCWg4vvCTR8qQ7YJYZ+HBjxOgXEl7ounz+4Sn7+LMD3CFrCTEU6/qXKbA2nKg21CwhhBzO0RpRbdCw==", + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "dev": true, - "dependencies": { - "@babel/template": "^7.8.3", - "@babel/types": "^7.8.3" + "engines": { + "node": ">=6.9.0" } }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz", - "integrity": "sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==", + "node_modules/@babel/helper-validator-identifier": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", "dev": true, - "dependencies": { - "@babel/types": "^7.8.3" + "engines": { + "node": ">=6.9.0" } }, - "node_modules/@babel/helper-string-parser": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz", - "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", "dev": true, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.9.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.9.5.tgz", - "integrity": "sha512-/8arLKUFq882w4tWGj9JYzRpAlZgiWUJ+dtteNTDqrRBz9Iguck9Rn3ykuBDoUwh2TO4tSAJlrxDUOXWklJe4g==", - "dev": true - }, "node_modules/@babel/helpers": { - "version": "7.9.2", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.9.2.tgz", - "integrity": "sha512-JwLvzlXVPjO8eU9c/wF9/zOIN7X6h8DYf7mG4CiFRZRvZNKEF5dQ3H3V+ASkHoIB3mWhatgl5ONhyqHRI6MppA==", - "dev": true, - "dependencies": { - "@babel/template": "^7.8.3", - "@babel/traverse": "^7.9.0", - "@babel/types": "^7.9.0" - } - }, - "node_modules/@babel/highlight": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.8.3.tgz", - "integrity": "sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==", + "version": "7.27.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.6.tgz", + "integrity": "sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug==", "dev": true, "dependencies": { - "chalk": "^2.0.0", - "esutils": "^2.0.2", - "js-tokens": "^4.0.0" + "@babel/template": "^7.27.2", + "@babel/types": "^7.27.6" + }, + "engines": { + "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.9.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.9.4.tgz", - "integrity": "sha512-bC49otXX6N0/VYhgOMh4gnP26E9xnDZK3TmbNpxYzzz9BQLBosQwfyOe9/cXUU3txYhTzLCbcqd5c8y/OmCjHA==", + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.0.tgz", + "integrity": "sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==", "dev": true, + "dependencies": { + "@babel/types": "^7.28.0" + }, "bin": { "parser": "bin/babel-parser.js" }, @@ -1079,12 +1085,54 @@ } }, "node_modules/@babel/plugin-syntax-class-properties": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.8.3.tgz", - "integrity": "sha512-UcAyQWg2bAN647Q+O811tG9MrJ38Z10jjhQdKNAL8fsyPzE3cCN/uT+f55cFVY4aGO4jqJAvmqsuY3GQDwAoXg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz", + "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.8.3" + "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" @@ -1103,12 +1151,12 @@ } }, "node_modules/@babel/plugin-syntax-logical-assignment-operators": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.8.3.tgz", - "integrity": "sha512-Zpg2Sgc++37kuFl6ppq2Q7Awc6E6AIW671x5PY8E/f7MCIyPPGK/EoeZXvvY3P42exZ3Q4/t3YOzP/HiN79jDg==", + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.8.3" + "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" @@ -1127,12 +1175,12 @@ } }, "node_modules/@babel/plugin-syntax-numeric-separator": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.8.3.tgz", - "integrity": "sha512-H7dCMAdN83PcCmqmkHB5dtp+Xa9a6LKSvA2hiFBC/5alSHxM5VgWZXFqDi0YFe8XNGT6iCa+z4V4zSt/PdZ7Dw==", + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.8.3" + "@babel/helper-plugin-utils": "^7.10.4" }, "peerDependencies": { "@babel/core": "^7.0.0-0" @@ -1174,161 +1222,183 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/template": { - "version": "7.8.6", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.8.6.tgz", - "integrity": "sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg==", + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", "dev": true, "dependencies": { - "@babel/code-frame": "^7.8.3", - "@babel/parser": "^7.8.6", - "@babel/types": "^7.8.6" - } - }, - "node_modules/@babel/traverse": { - "version": "7.23.2", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.2.tgz", - "integrity": "sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.22.13", - "@babel/generator": "^7.23.0", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/parser": "^7.23.0", - "@babel/types": "^7.23.0", - "debug": "^4.1.0", - "globals": "^11.1.0" + "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/traverse/node_modules/@babel/code-frame": { - "version": "7.22.13", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz", - "integrity": "sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==", + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", "dev": true, "dependencies": { - "@babel/highlight": "^7.22.13", - "chalk": "^2.4.2" + "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/traverse/node_modules/@babel/generator": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz", - "integrity": "sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==", + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", + "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", "dev": true, "dependencies": { - "@babel/types": "^7.23.0", - "@jridgewell/gen-mapping": "^0.3.2", - "@jridgewell/trace-mapping": "^0.3.17", - "jsesc": "^2.5.1" + "@babel/helper-plugin-utils": "^7.27.1" }, "engines": { "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/traverse/node_modules/@babel/helper-function-name": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz", - "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==", + "node_modules/@babel/template": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", "dev": true, "dependencies": { - "@babel/template": "^7.22.15", - "@babel/types": "^7.23.0" + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/traverse/node_modules/@babel/helper-split-export-declaration": { - "version": "7.22.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz", - "integrity": "sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==", + "node_modules/@babel/traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.0.tgz", + "integrity": "sha512-mGe7UK5wWyh0bKRfupsUchrQGqvDbZDbKJw+kcRGSmdHVYrv+ltd0pnpDTVpiTqnaBru9iEvA8pz8W46v0Amwg==", "dev": true, "dependencies": { - "@babel/types": "^7.22.5" + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.0", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.0", + "debug": "^4.3.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/traverse/node_modules/@babel/helper-validator-identifier": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", - "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", + "node_modules/@babel/traverse/node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, + "dependencies": { + "ms": "^2.1.3" + }, "engines": { - "node": ">=6.9.0" + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/@babel/traverse/node_modules/@babel/highlight": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz", - "integrity": "sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==", + "node_modules/@babel/types": { + "version": "7.28.1", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.1.tgz", + "integrity": "sha512-x0LvFTekgSX+83TI28Y9wYPUfzrnl2aT5+5QLnO6v7mSJYtEEevuDRN0F0uSHRk1G1IWZC43o00Y0xDDrpBGPQ==", "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.22.20", - "chalk": "^2.4.2", - "js-tokens": "^4.0.0" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/traverse/node_modules/@babel/parser": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz", - "integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==", - "dev": true, - "bin": { - "parser": "bin/babel-parser.js" + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "node_modules/@google-cloud/common": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.10.0.tgz", + "integrity": "sha512-XMbJYMh/ZSaZnbnrrOFfR/oQrb0SxG4qh6hDisWCoEbFcBHV0qHQo4uXfeMCzolx2Mfkh6VDaOGg+hyJsmxrlw==", + "license": "Apache-2.0", + "dependencies": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^7.14.0", + "retry-request": "^4.2.2", + "teeny-request": "^7.0.0" }, "engines": { - "node": ">=6.0.0" + "node": ">=10" } }, - "node_modules/@babel/traverse/node_modules/@babel/template": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", - "integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.22.13", - "@babel/parser": "^7.22.15", - "@babel/types": "^7.22.15" - }, + "node_modules/@google-cloud/common/node_modules/@google-cloud/promisify": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.4.tgz", + "integrity": "sha512-j8yRSSqswWi1QqUGKVEKOG03Q7qOoZP6/h2zN2YO+F5h2+DHU0bSrHCK9Y7lo2DI9fBd8qGAw795sf+3Jva4yA==", + "license": "Apache-2.0", "engines": { - "node": ">=6.9.0" + "node": ">=10" } }, - "node_modules/@babel/traverse/node_modules/@babel/types": { - "version": "7.23.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz", - "integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==", - "dev": true, + "node_modules/@google-cloud/common/node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/@google-cloud/common/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.20", - "to-fast-properties": "^2.0.0" + "debug": "4" }, "engines": { - "node": ">=6.9.0" + "node": ">= 6.0.0" } }, - "node_modules/@babel/traverse/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, + "node_modules/@google-cloud/common/node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "engines": { + "node": ">=8" + } + }, + "node_modules/@google-cloud/common/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "engines": { "node": ">=6.0" @@ -1339,66 +1409,55 @@ } } }, - "node_modules/@babel/types": { - "version": "7.9.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", - "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", - "dev": true, + "node_modules/@google-cloud/common/node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", "dependencies": { - "@babel/helper-validator-identifier": "^7.9.5", - "lodash": "^4.17.13", - "to-fast-properties": "^2.0.0" + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" } }, - "node_modules/@bcoe/v8-coverage": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", - "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", - "dev": true - }, - "node_modules/@cnakazawa/watch": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@cnakazawa/watch/-/watch-1.0.4.tgz", - "integrity": "sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ==", - "dev": true, + "node_modules/@google-cloud/common/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "license": "MIT", "dependencies": { - "exec-sh": "^0.3.2", - "minimist": "^1.2.0" - }, - "bin": { - "watch": "cli.js" + "agent-base": "6", + "debug": "4" }, "engines": { - "node": ">=0.1.95" + "node": ">= 6" } }, - "node_modules/@google-cloud/common": { - "version": "0.32.1", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-0.32.1.tgz", - "integrity": "sha512-bLdPzFvvBMtVkwsoBtygE9oUm3yrNmPa71gvOgucYI/GqvNP2tb6RYsDHPq98kvignhcgHGDI5wyNgxaCo8bKQ==", + "node_modules/@google-cloud/common/node_modules/teeny-request": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.2.0.tgz", + "integrity": "sha512-SyY0pek1zWsi0LRVAALem+avzMLc33MKW/JLLakdP4s9+D7+jHcy5x6P+h94g2QNZsAqQNfX5lsbd3WSeJXrrw==", + "license": "Apache-2.0", "dependencies": { - "@google-cloud/projectify": "^0.3.3", - "@google-cloud/promisify": "^0.4.0", - "@types/request": "^2.48.1", - "arrify": "^2.0.0", - "duplexify": "^3.6.0", - "ent": "^2.2.0", - "extend": "^3.0.2", - "google-auth-library": "^3.1.1", - "pify": "^4.0.1", - "retry-request": "^4.0.0", - "teeny-request": "^3.11.3" + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" }, "engines": { - "node": ">=6.0.0" + "node": ">=10" } }, - "node_modules/@google-cloud/common/node_modules/arrify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", - "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", - "engines": { - "node": ">=8" + "node_modules/@google-cloud/common/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" } }, "node_modules/@google-cloud/paginator": { @@ -1413,10 +1472,14 @@ } }, "node_modules/@google-cloud/projectify": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-0.3.3.tgz", - "integrity": "sha512-7522YHQ4IhaafgSunsFF15nG0TGVmxgXidy9cITMe+256RgqfcrfWphiMufW+Ou4kqagW/u3yxwbzVEW3dk2Uw==" - }, + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.1.1.tgz", + "integrity": "sha512-+rssMZHnlh0twl122gXY4/aCrk0G1acBqkHFfYddtsqpYXGxA29nj9V5V9SfC+GyOG00l650f6lG9KL+EpFEWQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=10" + } + }, "node_modules/@google-cloud/promisify": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-0.4.0.tgz", @@ -1453,14 +1516,24 @@ "node": ">=6.0.0" } }, + "node_modules/@google-cloud/storage/node_modules/async": { + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.4.tgz", + "integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==", + "license": "MIT", + "dependencies": { + "lodash": "^4.17.14" + } + }, "node_modules/@istanbuljs/load-nyc-config": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.0.0.tgz", - "integrity": "sha512-ZR0rq/f/E4f4XcgnDvtMWXCUJpi8eO0rssVhmztsZqLIEFA9UUP9zmpE0VxlM+kv/E1ul2I876Fwil2ayptDVg==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", "dev": true, "dependencies": { "camelcase": "^5.3.1", "find-up": "^4.1.0", + "get-package-type": "^0.1.0", "js-yaml": "^3.13.1", "resolve-from": "^5.0.0" }, @@ -1468,37 +1541,62 @@ "node": ">=8" } }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, "node_modules/@istanbuljs/schema": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.2.tgz", - "integrity": "sha512-tsAQNx32a8CoFhjhijUIhI4kccIAgmGhy8LZMZgGfmXcpMbPRUqn5LWmgRttILi6yeGmBJd2xsPkFMs0PzgPCw==", + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", "dev": true, "engines": { "node": ">=8" } }, "node_modules/@jest/console": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/console/-/console-25.3.0.tgz", - "integrity": "sha512-LvSDNqpmZIZyweFaEQ6wKY7CbexPitlsLHGJtcooNECo0An/w49rFhjCJzu6efeb6+a3ee946xss1Jcd9r03UQ==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-27.5.1.tgz", + "integrity": "sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg==", "dev": true, "dependencies": { - "@jest/source-map": "^25.2.6", - "chalk": "^3.0.0", - "jest-util": "^25.3.0", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^27.5.1", + "jest-util": "^27.5.1", "slash": "^3.0.0" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/@jest/console/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -1509,16 +1607,19 @@ } }, "node_modules/@jest/console/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/@jest/console/node_modules/color-convert": { @@ -1549,9 +1650,9 @@ } }, "node_modules/@jest/console/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -1561,75 +1662,58 @@ } }, "node_modules/@jest/core": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/core/-/core-25.3.0.tgz", - "integrity": "sha512-+D5a/tFf6pA/Gqft2DLBp/yeSRgXhlJ+Wpst0X/ZkfTRP54qDR3C61VfHwaex+GzZBiTcE9vQeoZ2v5T10+Mqw==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-27.5.1.tgz", + "integrity": "sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ==", "dev": true, "dependencies": { - "@jest/console": "^25.3.0", - "@jest/reporters": "^25.3.0", - "@jest/test-result": "^25.3.0", - "@jest/transform": "^25.3.0", - "@jest/types": "^25.3.0", + "@jest/console": "^27.5.1", + "@jest/reporters": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", "ansi-escapes": "^4.2.1", - "chalk": "^3.0.0", + "chalk": "^4.0.0", + "emittery": "^0.8.1", "exit": "^0.1.2", - "graceful-fs": "^4.2.3", - "jest-changed-files": "^25.3.0", - "jest-config": "^25.3.0", - "jest-haste-map": "^25.3.0", - "jest-message-util": "^25.3.0", - "jest-regex-util": "^25.2.6", - "jest-resolve": "^25.3.0", - "jest-resolve-dependencies": "^25.3.0", - "jest-runner": "^25.3.0", - "jest-runtime": "^25.3.0", - "jest-snapshot": "^25.3.0", - "jest-util": "^25.3.0", - "jest-validate": "^25.3.0", - "jest-watcher": "^25.3.0", - "micromatch": "^4.0.2", - "p-each-series": "^2.1.0", - "realpath-native": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^27.5.1", + "jest-config": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-resolve-dependencies": "^27.5.1", + "jest-runner": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "jest-watcher": "^27.5.1", + "micromatch": "^4.0.4", "rimraf": "^3.0.0", "slash": "^3.0.0", "strip-ansi": "^6.0.0" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/@jest/core/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/@jest/core/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } } }, "node_modules/@jest/core/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -1639,29 +1723,20 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@jest/core/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/core/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/@jest/core/node_modules/color-convert": { @@ -1682,18 +1757,6 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/@jest/core/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/core/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -1703,32 +1766,10 @@ "node": ">=8" } }, - "node_modules/@jest/core/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/@jest/core/node_modules/micromatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz", - "integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==", - "dev": true, - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.0.5" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/core/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -1737,63 +1778,102 @@ "node": ">=8" } }, - "node_modules/@jest/core/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "node_modules/@jest/environment": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-27.5.1.tgz", + "integrity": "sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA==", "dev": true, "dependencies": { - "is-number": "^7.0.0" + "@jest/fake-timers": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "jest-mock": "^27.5.1" }, "engines": { - "node": ">=8.0" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/@jest/environment": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-25.3.0.tgz", - "integrity": "sha512-vgooqwJTHLLak4fE+TaCGeYP7Tz1Y3CKOsNxR1sE0V3nx3KRUHn3NUnt+wbcfd5yQWKZQKAfW6wqbuwQLrXo3g==", + "node_modules/@jest/fake-timers": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-27.5.1.tgz", + "integrity": "sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ==", "dev": true, "dependencies": { - "@jest/fake-timers": "^25.3.0", - "@jest/types": "^25.3.0", - "jest-mock": "^25.3.0" + "@jest/types": "^27.5.1", + "@sinonjs/fake-timers": "^8.0.1", + "@types/node": "*", + "jest-message-util": "^27.5.1", + "jest-mock": "^27.5.1", + "jest-util": "^27.5.1" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/@jest/environment/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/@jest/globals": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-27.5.1.tgz", + "integrity": "sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "@jest/environment": "^27.5.1", + "@jest/types": "^27.5.1", + "expect": "^27.5.1" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/@jest/environment/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", + "node_modules/@jest/reporters": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-27.5.1.tgz", + "integrity": "sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw==", "dev": true, "dependencies": { - "@types/yargs-parser": "*" + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.2", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^5.1.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-haste-map": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", + "slash": "^3.0.0", + "source-map": "^0.6.0", + "string-length": "^4.0.1", + "terminal-link": "^2.0.0", + "v8-to-istanbul": "^8.1.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } } }, - "node_modules/@jest/environment/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/@jest/reporters/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -1803,20 +1883,23 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@jest/environment/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/@jest/reporters/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/@jest/environment/node_modules/color-convert": { + "node_modules/@jest/reporters/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -1828,13 +1911,13 @@ "node": ">=7.0.0" } }, - "node_modules/@jest/environment/node_modules/color-name": { + "node_modules/@jest/reporters/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/@jest/environment/node_modules/has-flag": { + "node_modules/@jest/reporters/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -1843,10 +1926,10 @@ "node": ">=8" } }, - "node_modules/@jest/environment/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/@jest/reporters/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -1855,53 +1938,82 @@ "node": ">=8" } }, - "node_modules/@jest/fake-timers": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-25.3.0.tgz", - "integrity": "sha512-NHAj7WbsyR3qBJPpBwSwqaq2WluIvUQsyzpJTN7XDVk7VnlC/y1BAnaYZL3vbPIP8Nhm0Ae5DJe0KExr/SdMJQ==", + "node_modules/@jest/source-map": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-27.5.1.tgz", + "integrity": "sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg==", "dev": true, "dependencies": { - "@jest/types": "^25.3.0", - "jest-message-util": "^25.3.0", - "jest-mock": "^25.3.0", - "jest-util": "^25.3.0", - "lolex": "^5.0.0" + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9", + "source-map": "^0.6.0" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/@jest/fake-timers/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/@jest/test-result": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-27.5.1.tgz", + "integrity": "sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag==", "dev": true, "dependencies": { + "@jest/console": "^27.5.1", + "@jest/types": "^27.5.1", "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "collect-v8-coverage": "^1.0.0" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/@jest/fake-timers/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", + "node_modules/@jest/test-sequencer": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz", + "integrity": "sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ==", "dev": true, "dependencies": { - "@types/yargs-parser": "*" + "@jest/test-result": "^27.5.1", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-runtime": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-27.5.1.tgz", + "integrity": "sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw==", + "dev": true, + "dependencies": { + "@babel/core": "^7.1.0", + "@jest/types": "^27.5.1", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^1.4.0", + "fast-json-stable-stringify": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-util": "^27.5.1", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "source-map": "^0.6.1", + "write-file-atomic": "^3.0.0" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/@jest/fake-timers/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/@jest/transform/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -1911,20 +2023,23 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@jest/fake-timers/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/@jest/transform/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/@jest/fake-timers/node_modules/color-convert": { + "node_modules/@jest/transform/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -1936,13 +2051,13 @@ "node": ">=7.0.0" } }, - "node_modules/@jest/fake-timers/node_modules/color-name": { + "node_modules/@jest/transform/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/@jest/fake-timers/node_modules/has-flag": { + "node_modules/@jest/transform/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -1951,10 +2066,10 @@ "node": ">=8" } }, - "node_modules/@jest/fake-timers/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/@jest/transform/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -1963,74 +2078,28 @@ "node": ">=8" } }, - "node_modules/@jest/reporters": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-25.3.0.tgz", - "integrity": "sha512-1u0ZBygs0C9DhdYgLCrRfZfNKQa+9+J7Uo+Z9z0RWLHzgsxhoG32lrmMOtUw48yR6bLNELdvzormwUqSk4H4Vg==", - "dev": true, - "dependencies": { - "@bcoe/v8-coverage": "^0.2.3", - "@jest/console": "^25.3.0", - "@jest/test-result": "^25.3.0", - "@jest/transform": "^25.3.0", - "@jest/types": "^25.3.0", - "chalk": "^3.0.0", - "collect-v8-coverage": "^1.0.0", - "exit": "^0.1.2", - "glob": "^7.1.2", - "istanbul-lib-coverage": "^3.0.0", - "istanbul-lib-instrument": "^4.0.0", - "istanbul-lib-report": "^3.0.0", - "istanbul-lib-source-maps": "^4.0.0", - "istanbul-reports": "^3.0.2", - "jest-haste-map": "^25.3.0", - "jest-resolve": "^25.3.0", - "jest-util": "^25.3.0", - "jest-worker": "^25.2.6", - "slash": "^3.0.0", - "source-map": "^0.6.0", - "string-length": "^3.1.0", - "terminal-link": "^2.0.0", - "v8-to-istanbul": "^4.0.1" - }, - "engines": { - "node": ">= 8.3" - }, - "optionalDependencies": { - "node-notifier": "^6.0.0" - } - }, - "node_modules/@jest/reporters/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/@jest/types": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.5.1.tgz", + "integrity": "sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw==", "dev": true, "dependencies": { "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^16.0.0", + "chalk": "^4.0.0" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/@jest/reporters/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/@jest/reporters/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/@jest/types/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -2040,20 +2109,23 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/@jest/reporters/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/@jest/types/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/@jest/reporters/node_modules/color-convert": { + "node_modules/@jest/types/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -2065,13 +2137,13 @@ "node": ">=7.0.0" } }, - "node_modules/@jest/reporters/node_modules/color-name": { + "node_modules/@jest/types/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/@jest/reporters/node_modules/has-flag": { + "node_modules/@jest/types/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -2080,10 +2152,10 @@ "node": ">=8" } }, - "node_modules/@jest/reporters/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/@jest/types/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -2092,531 +2164,286 @@ "node": ">=8" } }, - "node_modules/@jest/source-map": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-25.2.6.tgz", - "integrity": "sha512-VuIRZF8M2zxYFGTEhkNSvQkUKafQro4y+mwUxy5ewRqs5N/ynSFUODYp3fy1zCnbCMy1pz3k+u57uCqx8QRSQQ==", + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.12", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz", + "integrity": "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==", "dev": true, "dependencies": { - "callsites": "^3.0.0", - "graceful-fs": "^4.2.3", - "source-map": "^0.6.0" - }, - "engines": { - "node": ">= 8.3" + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" } }, - "node_modules/@jest/test-result": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-25.3.0.tgz", - "integrity": "sha512-mqrGuiiPXl1ap09Mydg4O782F3ouDQfsKqtQzIjitpwv3t1cHDwCto21jThw6WRRE+dKcWQvLG70GpyLJICfGw==", + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", "dev": true, - "dependencies": { - "@jest/console": "^25.3.0", - "@jest/types": "^25.3.0", - "@types/istanbul-lib-coverage": "^2.0.0", - "collect-v8-coverage": "^1.0.0" - }, "engines": { - "node": ">= 8.3" + "node": ">=6.0.0" } }, - "node_modules/@jest/test-result/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" - }, - "engines": { - "node": ">= 8.3" - } + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", + "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", + "dev": true }, - "node_modules/@jest/test-result/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.29", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz", + "integrity": "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==", "dev": true, "dependencies": { - "@types/yargs-parser": "*" + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" } }, - "node_modules/@jest/test-result/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", - "dev": true, - "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" - }, + "node_modules/@jsep-plugin/assignment": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@jsep-plugin/assignment/-/assignment-1.3.0.tgz", + "integrity": "sha512-VVgV+CXrhbMI3aSusQyclHkenWSAm95WaiKrMxRFam3JSUiIaQjoMIw2sEs/OX4XifnqeQUN4DYbJjlA8EfktQ==", + "license": "MIT", "engines": { - "node": ">=8" + "node": ">= 10.16.0" }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "peerDependencies": { + "jsep": "^0.4.0||^1.0.0" } }, - "node_modules/@jest/test-result/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, + "node_modules/@jsep-plugin/regex": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@jsep-plugin/regex/-/regex-1.0.4.tgz", + "integrity": "sha512-q7qL4Mgjs1vByCaTnDFcBnV9HS7GVPJX5vyVoCgZHNSC9rjwIlmbXG5sUuorR5ndfHAIlJ8pVStxvjXHbNvtUg==", + "license": "MIT", "engines": { - "node": ">=8" + "node": ">= 10.16.0" + }, + "peerDependencies": { + "jsep": "^0.4.0||^1.0.0" } }, - "node_modules/@jest/test-result/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, + "node_modules/@kubernetes/client-node": { + "version": "0.16.3", + "resolved": "https://registry.npmjs.org/@kubernetes/client-node/-/client-node-0.16.3.tgz", + "integrity": "sha512-L7IckuyuPfhd+/Urib8MRas9D6sfKEq8IaITYcaE6LlU+Y8MeD7MTbuW6Yb2WdeRuFN8HPSS47mxPnOUNYBXEg==", + "license": "Apache-2.0", "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" + "@types/js-yaml": "^4.0.1", + "@types/node": "^10.12.0", + "@types/request": "^2.47.1", + "@types/stream-buffers": "^3.0.3", + "@types/tar": "^4.0.3", + "@types/underscore": "^1.8.9", + "@types/ws": "^6.0.1", + "byline": "^5.0.0", + "execa": "5.0.0", + "isomorphic-ws": "^4.0.1", + "js-yaml": "^4.1.0", + "jsonpath-plus": "^0.19.0", + "openid-client": "^4.1.1", + "request": "^2.88.0", + "rfc4648": "^1.3.0", + "shelljs": "^0.8.5", + "stream-buffers": "^3.0.2", + "tar": "^6.1.11", + "tmp-promise": "^3.0.2", + "tslib": "^1.9.3", + "underscore": "^1.9.1", + "ws": "^7.3.1" } }, - "node_modules/@jest/test-result/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/@jest/test-result/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "engines": { - "node": ">=8" - } + "node_modules/@kubernetes/client-node/node_modules/@types/node": { + "version": "10.17.60", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz", + "integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==", + "license": "MIT" }, - "node_modules/@jest/test-result/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", - "dev": true, + "node_modules/@kubernetes/client-node/node_modules/execa": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.0.0.tgz", + "integrity": "sha512-ov6w/2LCiuyO4RLYGdpFGjkcs0wMTgGE8PrkTHikeUy5iJekXyPIKUjifk5CsE0pt7sMCrMZ3YNqoCj6idQOnQ==", + "license": "MIT", "dependencies": { - "has-flag": "^4.0.0" + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" }, "engines": { - "node": ">=8" - } - }, - "node_modules/@jest/test-sequencer": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-25.3.0.tgz", - "integrity": "sha512-Xvns3xbji7JCvVcDGvqJ/pf4IpmohPODumoPEZJ0/VgC5gI4XaNVIBET2Dq5Czu6Gk3xFcmhtthh/MBOTljdNg==", - "dev": true, - "dependencies": { - "@jest/test-result": "^25.3.0", - "jest-haste-map": "^25.3.0", - "jest-runner": "^25.3.0", - "jest-runtime": "^25.3.0" + "node": ">=10" }, - "engines": { - "node": ">= 8.3" + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/@jest/transform": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-25.3.0.tgz", - "integrity": "sha512-W01p8kTDvvEX6kd0tJc7Y5VdYyFaKwNWy1HQz6Jqlhu48z/8Gxp+yFCDVj+H8Rc7ezl3Mg0hDaGuFVkmHOqirg==", - "dev": true, - "dependencies": { - "@babel/core": "^7.1.0", - "@jest/types": "^25.3.0", - "babel-plugin-istanbul": "^6.0.0", - "chalk": "^3.0.0", - "convert-source-map": "^1.4.0", - "fast-json-stable-stringify": "^2.0.0", - "graceful-fs": "^4.2.3", - "jest-haste-map": "^25.3.0", - "jest-regex-util": "^25.2.6", - "jest-util": "^25.3.0", - "micromatch": "^4.0.2", - "pirates": "^4.0.1", - "realpath-native": "^2.0.0", - "slash": "^3.0.0", - "source-map": "^0.6.1", - "write-file-atomic": "^3.0.0" - }, + "node_modules/@kubernetes/client-node/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "license": "MIT", "engines": { - "node": ">= 8.3" - } - }, - "node_modules/@jest/transform/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "node": ">=10" }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/@jest/transform/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@jest/transform/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", - "dev": true, - "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" - }, + "node_modules/@kubernetes/client-node/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "license": "MIT", "engines": { "node": ">=8" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@jest/transform/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, + "node_modules/@kubernetes/client-node/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "license": "MIT", "dependencies": { - "fill-range": "^7.0.1" + "path-key": "^3.0.0" }, "engines": { "node": ">=8" } }, - "node_modules/@jest/transform/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, + "node_modules/@kubernetes/client-node/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/@jest/transform/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, + "node_modules/@kubernetes/client-node/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + }, + "node_modules/@panva/asn1.js": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@panva/asn1.js/-/asn1.js-1.0.0.tgz", + "integrity": "sha512-UdkG3mLEqXgnlKsWanWcgb6dOjUzJ+XC5f+aWw30qrtjxeNUSfKX1cd5FBzOaXQumoe9nIqeZUvrRJS03HCCtw==", + "license": "MIT", "engines": { - "node": ">=7.0.0" + "node": ">=10.13.0" } }, - "node_modules/@jest/transform/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/@jest/transform/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, + "node_modules/@sindresorhus/is": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", + "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", + "license": "MIT", "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" } }, - "node_modules/@jest/transform/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "node_modules/@sinonjs/commons": { + "version": "1.8.6", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.6.tgz", + "integrity": "sha512-Ky+XkAkqPZSm3NLBeUng77EBQl3cmeJhITaGHdYH8kjVB+aun3S4XBRti2zt17mtt0mIUDiNxYeoJm6drVvBJQ==", "dev": true, - "engines": { - "node": ">=8" + "dependencies": { + "type-detect": "4.0.8" } }, - "node_modules/@jest/transform/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "node_modules/@sinonjs/fake-timers": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz", + "integrity": "sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg==", "dev": true, - "engines": { - "node": ">=0.12.0" + "dependencies": { + "@sinonjs/commons": "^1.7.0" } }, - "node_modules/@jest/transform/node_modules/micromatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz", - "integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==", - "dev": true, + "node_modules/@smithy/abort-controller": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.1.tgz", + "integrity": "sha512-MBJBiidoe+0cTFhyxT8g+9g7CeVccLM0IOKKUMCNQ1CNMJ/eIfoo0RTfVrXOONEI1UCN1W+zkiHSbzUNE9dZtQ==", "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.0.5" + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" }, "engines": { - "node": ">=8" + "node": ">=16.0.0" } }, - "node_modules/@jest/transform/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", - "dev": true, + "node_modules/@smithy/abort-controller/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==" + }, + "node_modules/@smithy/config-resolver": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-3.0.5.tgz", + "integrity": "sha512-SkW5LxfkSI1bUC74OtfBbdz+grQXYiPYolyu8VfpLIjEoN/sHVBlLeGXMQ1vX4ejkgfv6sxVbQJ32yF2cl1veA==", "dependencies": { - "has-flag": "^4.0.0" + "@smithy/node-config-provider": "^3.1.4", + "@smithy/types": "^3.3.0", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.3", + "tslib": "^2.6.2" }, "engines": { - "node": ">=8" + "node": ">=16.0.0" } }, - "node_modules/@jest/transform/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, + "node_modules/@smithy/config-resolver/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==" + }, + "node_modules/@smithy/core": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-2.3.1.tgz", + "integrity": "sha512-BC7VMXx/1BCmRPCVzzn4HGWAtsrb7/0758EtwOGFJQrlSwJBEjCcDLNZLFoL/68JexYa2s+KmgL/UfmXdG6v1w==", "dependencies": { - "is-number": "^7.0.0" + "@smithy/middleware-endpoint": "^3.1.0", + "@smithy/middleware-retry": "^3.0.13", + "@smithy/middleware-serde": "^3.0.3", + "@smithy/protocol-http": "^4.1.0", + "@smithy/smithy-client": "^3.1.11", + "@smithy/types": "^3.3.0", + "@smithy/util-middleware": "^3.0.3", + "tslib": "^2.6.2" }, "engines": { - "node": ">=8.0" + "node": ">=16.0.0" } }, - "node_modules/@jest/transform/node_modules/write-file-atomic": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", - "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", - "dev": true, - "dependencies": { - "imurmurhash": "^0.1.4", - "is-typedarray": "^1.0.0", - "signal-exit": "^3.0.2", - "typedarray-to-buffer": "^3.1.5" - } + "node_modules/@smithy/core/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==" }, - "node_modules/@jest/types": { - "version": "24.9.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-24.9.0.tgz", - "integrity": "sha512-XKK7ze1apu5JWQ5eZjHITP66AX+QsLlbaJRBGYr8pNzwcAE2JVkwnf0yqjHTsDRcjR0mujy/NmZMXw5kl+kGBw==", - "dev": true, + "node_modules/@smithy/credential-provider-imds": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-3.2.0.tgz", + "integrity": "sha512-0SCIzgd8LYZ9EJxUjLXBmEKSZR/P/w6l7Rz/pab9culE/RWuqelAKGJvn5qUOl8BgX8Yj5HWM50A5hiB/RzsgA==", "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^13.0.0" + "@smithy/node-config-provider": "^3.1.4", + "@smithy/property-provider": "^3.1.3", + "@smithy/types": "^3.3.0", + "@smithy/url-parser": "^3.0.3", + "tslib": "^2.6.2" }, "engines": { - "node": ">= 6" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", - "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", - "dev": true, - "dependencies": { - "@jridgewell/set-array": "^1.0.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz", - "integrity": "sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==", - "dev": true, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/set-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", - "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", - "dev": true, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", - "dev": true - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.19", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz", - "integrity": "sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw==", - "dev": true, - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@kubernetes/client-node": { - "version": "0.12.3", - "resolved": "https://registry.npmjs.org/@kubernetes/client-node/-/client-node-0.12.3.tgz", - "integrity": "sha512-+Qq/WRbHIqL791JO6d3l/aupU922sriknCOsB63YjW9KVykX6/RTuPFDptfbWBVfPB3lRmhbjur95DIej1bAqA==", - "license": "Apache-2.0", - "dependencies": { - "@types/js-yaml": "^3.12.1", - "@types/node": "^10.12.0", - "@types/request": "^2.47.1", - "@types/stream-buffers": "^3.0.3", - "@types/tar": "^4.0.3", - "@types/underscore": "^1.8.9", - "@types/ws": "^6.0.1", - "byline": "^5.0.0", - "execa": "1.0.0", - "isomorphic-ws": "^4.0.1", - "js-yaml": "^3.13.1", - "jsonpath-plus": "^0.19.0", - "openid-client": "^4.1.1", - "request": "^2.88.0", - "rfc4648": "^1.3.0", - "shelljs": "^0.8.2", - "stream-buffers": "^3.0.2", - "tar": "^6.0.2", - "tmp-promise": "^3.0.2", - "tslib": "^1.9.3", - "underscore": "^1.9.1", - "ws": "^7.3.1" - } - }, - "node_modules/@kubernetes/client-node/node_modules/@types/node": { - "version": "10.17.60", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz", - "integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==" - }, - "node_modules/@panva/asn1.js": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@panva/asn1.js/-/asn1.js-1.0.0.tgz", - "integrity": "sha512-UdkG3mLEqXgnlKsWanWcgb6dOjUzJ+XC5f+aWw30qrtjxeNUSfKX1cd5FBzOaXQumoe9nIqeZUvrRJS03HCCtw==", - "license": "MIT", - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/@sindresorhus/is": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", - "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/is?sponsor=1" - } - }, - "node_modules/@sinonjs/commons": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.7.2.tgz", - "integrity": "sha512-+DUO6pnp3udV/v2VfUWgaY5BIE1IfT7lLfeDzPVeMT1XKkaAp9LgSI9x5RtrFQoZ9Oi0PgXQQHPaoKu7dCjVxw==", - "dev": true, - "dependencies": { - "type-detect": "4.0.8" - } - }, - "node_modules/@smithy/abort-controller": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.1.tgz", - "integrity": "sha512-MBJBiidoe+0cTFhyxT8g+9g7CeVccLM0IOKKUMCNQ1CNMJ/eIfoo0RTfVrXOONEI1UCN1W+zkiHSbzUNE9dZtQ==", - "dependencies": { - "@smithy/types": "^3.3.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@smithy/abort-controller/node_modules/tslib": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", - "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==" - }, - "node_modules/@smithy/config-resolver": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-3.0.5.tgz", - "integrity": "sha512-SkW5LxfkSI1bUC74OtfBbdz+grQXYiPYolyu8VfpLIjEoN/sHVBlLeGXMQ1vX4ejkgfv6sxVbQJ32yF2cl1veA==", - "dependencies": { - "@smithy/node-config-provider": "^3.1.4", - "@smithy/types": "^3.3.0", - "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.3", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@smithy/config-resolver/node_modules/tslib": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", - "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==" - }, - "node_modules/@smithy/core": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-2.3.1.tgz", - "integrity": "sha512-BC7VMXx/1BCmRPCVzzn4HGWAtsrb7/0758EtwOGFJQrlSwJBEjCcDLNZLFoL/68JexYa2s+KmgL/UfmXdG6v1w==", - "dependencies": { - "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.13", - "@smithy/middleware-serde": "^3.0.3", - "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", - "@smithy/types": "^3.3.0", - "@smithy/util-middleware": "^3.0.3", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@smithy/core/node_modules/tslib": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", - "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==" - }, - "node_modules/@smithy/credential-provider-imds": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-3.2.0.tgz", - "integrity": "sha512-0SCIzgd8LYZ9EJxUjLXBmEKSZR/P/w6l7Rz/pab9culE/RWuqelAKGJvn5qUOl8BgX8Yj5HWM50A5hiB/RzsgA==", - "dependencies": { - "@smithy/node-config-provider": "^3.1.4", - "@smithy/property-provider": "^3.1.3", - "@smithy/types": "^3.3.0", - "@smithy/url-parser": "^3.0.3", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" + "node": ">=16.0.0" } }, "node_modules/@smithy/credential-provider-imds/node_modules/tslib": { @@ -3373,32 +3200,41 @@ "node": ">=10" } }, + "node_modules/@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, "node_modules/@types/babel__core": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.7.tgz", - "integrity": "sha512-RL62NqSFPCDK2FM1pSDH0scHpJvsXtZNiYlMB73DgPBaG1E38ZYVL+ei5EkWRbr+KC4YNiAUNBnRj+bgwpgjMw==", + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", "dev": true, "dependencies": { - "@babel/parser": "^7.1.0", - "@babel/types": "^7.0.0", + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", "@types/babel__generator": "*", "@types/babel__template": "*", "@types/babel__traverse": "*" } }, "node_modules/@types/babel__generator": { - "version": "7.6.1", - "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.1.tgz", - "integrity": "sha512-bBKm+2VPJcMRVwNhxKu8W+5/zT7pwNEqeokFOmbvVSqGzFneNxYcEBro9Ac7/N9tlsaPYnZLK8J1LWKkMsLAew==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", "dev": true, "dependencies": { "@babel/types": "^7.0.0" } }, "node_modules/@types/babel__template": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.0.2.tgz", - "integrity": "sha512-/K6zCpeW7Imzgab2bLkLEbz0+1JlFSrUMdw7KoIIu+IUdu51GWaBZpd3y1VXGVXzynvGa4DaIaxNZHiON3GXUg==", + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", "dev": true, "dependencies": { "@babel/parser": "^7.1.0", @@ -3406,19 +3242,19 @@ } }, "node_modules/@types/babel__traverse": { - "version": "7.0.10", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.0.10.tgz", - "integrity": "sha512-74fNdUGrWsgIB/V9kTO5FGHPWYY6Eqn+3Z7L6Hc4e/BxjYV7puvBqp5HwsVYYfLm6iURYBNCx4Ut37OF9yitCw==", + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.7.tgz", + "integrity": "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==", "dev": true, "dependencies": { - "@babel/types": "^7.3.0" + "@babel/types": "^7.20.7" } }, "node_modules/@types/body-parser": { "version": "1.19.0", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.0.tgz", "integrity": "sha512-W98JrE0j2K78swW4ukqMleo8R7h/pFETjM2DQ90MF6XK2i4LO4W3gQ71Lt4w3bfm2EvVSyWHplECvB5sK22yFQ==", - "dev": true, + "devOptional": true, "dependencies": { "@types/connect": "*", "@types/node": "*" @@ -3441,17 +3277,11 @@ "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", "integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==" }, - "node_modules/@types/color-name": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", - "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", - "dev": true - }, "node_modules/@types/connect": { "version": "3.4.33", "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.33.tgz", "integrity": "sha512-2+FrkXY4zllzTNfJth7jOqEHC+enpLeGslEhpnTAkg21GkRrWV4SsAtqchtT4YS9/nODBU2/ZfsBY2X4J/dX7A==", - "dev": true, + "devOptional": true, "dependencies": { "@types/node": "*" } @@ -3469,24 +3299,38 @@ "dev": true }, "node_modules/@types/express": { - "version": "4.17.2", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.2.tgz", - "integrity": "sha512-5mHFNyavtLoJmnusB8OKJ5bshSzw+qkMIBAobLrIM48HJvunFva9mOa6aBwh64lBFyNwBbs0xiEFuj4eU/NjCA==", - "dev": true, + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.23.tgz", + "integrity": "sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==", + "devOptional": true, + "license": "MIT", "dependencies": { "@types/body-parser": "*", - "@types/express-serve-static-core": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", "@types/serve-static": "*" } }, "node_modules/@types/express-serve-static-core": { - "version": "4.17.2", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.2.tgz", - "integrity": "sha512-El9yMpctM6tORDAiBwZVLMcxoTMcqqRO9dVyYcn7ycLWbvR8klrDn8CAOwRfZujZtWD7yS/mshTdz43jMOejbg==", - "dev": true, + "version": "4.19.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.6.tgz", + "integrity": "sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==", + "devOptional": true, + "license": "MIT", "dependencies": { "@types/node": "*", - "@types/range-parser": "*" + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dev": true, + "dependencies": { + "@types/node": "*" } }, "node_modules/@types/gunzip-maybe": { @@ -3505,63 +3349,53 @@ "license": "MIT" }, "node_modules/@types/http-proxy": { - "version": "1.17.4", - "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.4.tgz", - "integrity": "sha512-IrSHl2u6AWXduUaDLqYpt45tLVCtYv7o4Z0s1KghBCDgIIS9oW5K1H8mZG/A2CfeLdEa7rTd1ACOiHBc1EMT2Q==", - "dev": true, - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/http-proxy-middleware": { - "version": "0.19.3", - "resolved": "https://registry.npmjs.org/@types/http-proxy-middleware/-/http-proxy-middleware-0.19.3.tgz", - "integrity": "sha512-lnBTx6HCOUeIJMLbI/LaL5EmdKLhczJY5oeXZpX/cXE4rRqb3RmV7VcMpiEfYkmTjipv3h7IAyIINe4plEv7cA==", - "dev": true, + "version": "1.17.16", + "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.16.tgz", + "integrity": "sha512-sdWoUajOB1cd0A8cRRQ1cfyWNbmFKLAqBB89Y8x5iYyG/mkJHc0YUH8pdWBy2omi9qtCpiIgGjuwO0dQST2l5w==", + "license": "MIT", "dependencies": { - "@types/connect": "*", - "@types/http-proxy": "*", "@types/node": "*" } }, "node_modules/@types/istanbul-lib-coverage": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.1.tgz", - "integrity": "sha512-hRJD2ahnnpLgsj6KWMYSrmXkM3rm2Dl1qkx6IOFD5FnuNPXJIG5L0dhgKXCYTRMGzU4n0wImQ/xfmRc4POUFlg==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", "dev": true }, "node_modules/@types/istanbul-lib-report": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", - "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", "dev": true, "dependencies": { "@types/istanbul-lib-coverage": "*" } }, "node_modules/@types/istanbul-reports": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-1.1.1.tgz", - "integrity": "sha512-UpYjBi8xefVChsCoBpKShdxTllC9pwISirfoZsUa2AAdQg/Jd2KQGtSbw+ya7GPo7x/wAPlH6JBhKhAsXUEZNA==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "*", "@types/istanbul-lib-report": "*" } }, "node_modules/@types/jest": { - "version": "24.9.1", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-24.9.1.tgz", - "integrity": "sha512-Fb38HkXSVA4L8fGKEZ6le5bB8r6MRWlOCZbVuWZcmOMSCd2wCYOwN1ibj8daIoV9naq7aaOZjrLCoCMptKU/4Q==", + "version": "27.5.2", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-27.5.2.tgz", + "integrity": "sha512-mpT8LJJ4CMeeahobofYWIjFo0xonRS/HfxnVEPMPFSQdGUt1uHCnoPT7Zhb+sjDU2wz0oKV0OLUR0WzrHNgfeA==", "dev": true, "dependencies": { - "jest-diff": "^24.3.0" + "jest-matcher-utils": "^27.0.0", + "pretty-format": "^27.0.0" } }, "node_modules/@types/js-yaml": { - "version": "3.12.2", - "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-3.12.2.tgz", - "integrity": "sha512-0CFu/g4mDSNkodVwWijdlr8jH7RoplRWNgovjFLEZeT+QEbbZXjBmCe3HwaWheAlCbHwomTwzZoSedeOycABug==" + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz", + "integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==", + "license": "MIT" }, "node_modules/@types/keyv": { "version": "3.1.4", @@ -3576,7 +3410,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-2.0.1.tgz", "integrity": "sha512-FwI9gX75FgVBJ7ywgnq/P7tw+/o1GUbtP0KzbtusLigAOgIgNISRK0ZPl4qertvXSIE8YbsVJueQ90cDt9YYyw==", - "dev": true + "devOptional": true }, "node_modules/@types/minipass": { "version": "2.2.0", @@ -3616,16 +3450,23 @@ } }, "node_modules/@types/prettier": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-1.19.1.tgz", - "integrity": "sha512-5qOlnZscTn4xxM5MeGXAMOsIOIKIbh9e85zJWfBRVPlRMEVawzoPhINYbRGkBZCI8LxvBe7tJCdWiarA99OZfQ==", + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.3.tgz", + "integrity": "sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA==", "dev": true }, + "node_modules/@types/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", + "devOptional": true, + "license": "MIT" + }, "node_modules/@types/range-parser": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.3.tgz", "integrity": "sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA==", - "dev": true + "devOptional": true }, "node_modules/@types/request": { "version": "2.48.4", @@ -3638,11 +3479,6 @@ "form-data": "^2.5.0" } }, - "node_modules/@types/request/node_modules/@types/node": { - "version": "13.7.4", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.7.4.tgz", - "integrity": "sha512-oVeL12C6gQS/GAExndigSaLxTrKpQPxewx9bOcwfvJiJge4rr7wNaph4J+ns5hrmIV2as5qxqN8YKthn9qh0jw==" - }, "node_modules/@types/responselike": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.3.tgz", @@ -3652,20 +3488,38 @@ "@types/node": "*" } }, + "node_modules/@types/send": { + "version": "0.17.5", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.5.tgz", + "integrity": "sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/send/node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "devOptional": true, + "license": "MIT" + }, "node_modules/@types/serve-static": { "version": "1.13.3", "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.3.tgz", "integrity": "sha512-oprSwp094zOglVrXdlo/4bAHtKTAxX6VT8FOZlBKrmyLbNvE1zxZyJ6yikMVtHIvwP45+ZQGJn+FdXGKTozq0g==", - "dev": true, + "devOptional": true, "dependencies": { "@types/express-serve-static-core": "*", "@types/mime": "*" } }, "node_modules/@types/stack-utils": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-1.0.1.tgz", - "integrity": "sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", "dev": true }, "node_modules/@types/stream-buffers": { @@ -3720,31 +3574,33 @@ "integrity": "sha512-wHNBMnkoEBiRAd3s8KTKwIuO9biFtTf0LehITzBhSco+HQI0xkXZbLOD55SW3Aqw3oUkHstkm5SPv58yaAdFPQ==" }, "node_modules/@types/underscore": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@types/underscore/-/underscore-1.9.4.tgz", - "integrity": "sha512-CjHWEMECc2/UxOZh0kpiz3lEyX2Px3rQS9HzD20lxMvx571ivOBQKeLnqEjxUY0BMgp6WJWo/pQLRBwMW5v4WQ==" + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@types/underscore/-/underscore-1.13.0.tgz", + "integrity": "sha512-L6LBgy1f0EFQZ+7uSA57+n2g/s4Qs5r06Vwrwn0/nuK1de+adz00NWaztRQ30aEqw5qOaWbPI8u2cGQ52lj6VA==", + "license": "MIT" }, "node_modules/@types/ws": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/@types/ws/-/ws-6.0.4.tgz", "integrity": "sha512-PpPrX7SZW9re6+Ha8ojZG4Se8AZXgf0GK6zmfqEuCsY49LFDNXO3SByp44X3dFEqtB73lkCDAdUazhAjVPiNwg==", + "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/@types/yargs": { - "version": "13.0.8", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-13.0.8.tgz", - "integrity": "sha512-XAvHLwG7UQ+8M4caKIH0ZozIOYay5fQkAgyIXegXT9jPtdIGdhga+sUEdAr1CiG46aB+c64xQEYyEzlwWVTNzA==", + "version": "16.0.9", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.9.tgz", + "integrity": "sha512-tHhzvkFXZQeTECenFoRljLBYPZJ7jAVxqqtEI0qTLOmuultnFp4I9yKE17vTuhf7BkhCu7I4XuemPgikDVuYqA==", "dev": true, "dependencies": { "@types/yargs-parser": "*" } }, "node_modules/@types/yargs-parser": { - "version": "15.0.0", - "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-15.0.0.tgz", - "integrity": "sha512-FA/BWv8t8ZWJ+gEOnLLd8ygxH/2UFbAvgEonyfN6yWGLKc7zVjbpl2Y4CTjid9h2RfgPP6SEt6uHwEOply00yw==", + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", "dev": true }, "node_modules/@zxing/text-encoding": { @@ -3755,9 +3611,9 @@ "optional": true }, "node_modules/abab": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.3.tgz", - "integrity": "sha512-tsFzPpcttalNjFBCFMqsKYQcWxxen1pgJR56by//QwvJc4/OUS3kPOOttx2tSIfjsylB0pYu7f5D3K1RCxUnUg==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", + "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", "deprecated": "Use your platform's native atob() and btoa() methods instead", "dev": true }, @@ -3773,21 +3629,34 @@ } }, "node_modules/accepts": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/accepts/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", "dependencies": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" + "mime-db": "^1.54.0" }, "engines": { "node": ">= 0.6" } }, "node_modules/acorn": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", - "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -3797,19 +3666,19 @@ } }, "node_modules/acorn-globals": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-4.3.4.tgz", - "integrity": "sha512-clfQEh21R+D0leSbUdWf3OcfqyaCSAQ8Ryq00bofSekfr9W8u1jyYZo6ir0xu9Gtcf7BjcHJpnbZH7JOCpP60A==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-6.0.0.tgz", + "integrity": "sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==", "dev": true, "dependencies": { - "acorn": "^6.0.1", - "acorn-walk": "^6.0.1" + "acorn": "^7.1.1", + "acorn-walk": "^7.1.1" } }, "node_modules/acorn-globals/node_modules/acorn": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", - "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==", + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -3819,9 +3688,9 @@ } }, "node_modules/acorn-walk": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-6.2.0.tgz", - "integrity": "sha512-7evsyfH1cLOCdAzZAd43Cic04yKydNx0cF+7tiA19p1XnLLPU4dpCQOqpjqwokFe//vS0QqfqqjCS2JkiIs0cA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", + "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", "dev": true, "engines": { "node": ">=0.4.0" @@ -3863,12 +3732,12 @@ } }, "node_modules/ansi-escapes": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", - "integrity": "sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", "dev": true, "dependencies": { - "type-fest": "^0.11.0" + "type-fest": "^0.21.3" }, "engines": { "node": ">=8" @@ -3878,12 +3747,12 @@ } }, "node_modules/ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true, "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/ansi-styles": { @@ -3899,9 +3768,9 @@ } }, "node_modules/anymatch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", - "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", "dev": true, "dependencies": { "normalize-path": "^3.0.0", @@ -3912,60 +3781,21 @@ } }, "node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dependencies": { - "sprintf-js": "~1.0.2" - } + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" }, - "node_modules/arr-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", - "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=", - "engines": { - "node": ">=0.10.0" - } + "node_modules/array-flatten": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-3.0.0.tgz", + "integrity": "sha512-zPMVc3ZYlGLNk4mpK1NzP2wg0ml9t7fUgDsayR5Y5rSzxQilzR9FGu/EH2jQOcKSAeAfWeylyW8juy3OkWRvNA==", + "license": "MIT" }, - "node_modules/arr-flatten": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", - "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-union": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", - "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/array-equal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz", - "integrity": "sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM=", - "dev": true - }, - "node_modules/array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" - }, - "node_modules/array-unique": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", - "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", "engines": { "node": ">=0.10.0" } @@ -3986,29 +3816,12 @@ "node": ">=0.8" } }, - "node_modules/assign-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", - "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/astral-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", - "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/async": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", - "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", "dependencies": { - "lodash": "^4.17.14" + "retry": "0.13.1" } }, "node_modules/asynckit": { @@ -4016,17 +3829,6 @@ "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" }, - "node_modules/atob": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", - "bin": { - "atob": "bin/atob.js" - }, - "engines": { - "node": ">= 4.5.0" - } - }, "node_modules/available-typed-arrays": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", @@ -4056,23 +3858,26 @@ "integrity": "sha512-wMHVg2EOHaMRxbzgFJ9gtjOOCrI80OHLG14rxi28XwOW8ux6IiEbRCGGGqCtdAIg4FQCbW20k9RsT4y3gJlFug==" }, "node_modules/axios": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.2.tgz", - "integrity": "sha512-ls4GYBm5aig9vWx8AWDSGLpnpDQRtWAfrjU+EuytuODrFBkqesN2RkOQCBzrA1RQNHw1SmRMSDDDSwzNAYQ6Rg==", + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", + "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", "license": "MIT", "dependencies": { "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", + "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, "node_modules/axios/node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { @@ -4080,57 +3885,33 @@ } }, "node_modules/babel-jest": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-25.3.0.tgz", - "integrity": "sha512-qiXeX1Cmw4JZ5yQ4H57WpkO0MZ61Qj+YnsVUwAMnDV5ls+yHon11XjarDdgP7H8lTmiEi6biiZA8y3Tmvx6pCg==", - "dev": true, - "dependencies": { - "@jest/transform": "^25.3.0", - "@jest/types": "^25.3.0", - "@types/babel__core": "^7.1.7", - "babel-plugin-istanbul": "^6.0.0", - "babel-preset-jest": "^25.3.0", - "chalk": "^3.0.0", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-27.5.1.tgz", + "integrity": "sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg==", + "dev": true, + "dependencies": { + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^27.5.1", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", "slash": "^3.0.0" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" }, "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/babel-jest/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/babel-jest/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "@babel/core": "^7.8.0" } }, "node_modules/babel-jest/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -4141,16 +3922,19 @@ } }, "node_modules/babel-jest/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/babel-jest/node_modules/color-convert": { @@ -4181,9 +3965,9 @@ } }, "node_modules/babel-jest/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -4193,15 +3977,15 @@ } }, "node_modules/babel-plugin-istanbul": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.0.0.tgz", - "integrity": "sha512-AF55rZXpe7trmEylbaE1Gv54wn6rwU03aptvRoVIGP8YykoSxqdVLV1TfwflBCE/QtHmqtP8SWlTENqbK8GCSQ==", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.0.0", "@istanbuljs/load-nyc-config": "^1.0.0", "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-instrument": "^4.0.0", + "istanbul-lib-instrument": "^5.0.4", "test-exclude": "^6.0.0" }, "engines": { @@ -4209,49 +3993,57 @@ } }, "node_modules/babel-plugin-jest-hoist": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-25.2.6.tgz", - "integrity": "sha512-qE2xjMathybYxjiGFJg0mLFrz0qNp83aNZycWDY/SuHiZNq+vQfRQtuINqyXyue1ELd8Rd+1OhFSLjms8msMbw==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz", + "integrity": "sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ==", "dev": true, "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.0.0", "@types/babel__traverse": "^7.0.6" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/babel-preset-current-node-syntax": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-0.1.2.tgz", - "integrity": "sha512-u/8cS+dEiK1SFILbOC8/rUI3ml9lboKuuMvZ/4aQnQmhecQAgPw5ew066C1ObnEAUmlx7dv/s2z52psWEtLNiw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", + "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==", "dev": true, "dependencies": { "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-bigint": "^7.8.3", - "@babel/plugin-syntax-class-properties": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", "@babel/plugin-syntax-json-strings": "^7.8.3", - "@babel/plugin-syntax-logical-assignment-operators": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", - "@babel/plugin-syntax-numeric-separator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", - "@babel/plugin-syntax-optional-chaining": "^7.8.3" + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "node_modules/babel-preset-jest": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-25.3.0.tgz", - "integrity": "sha512-tjdvLKNMwDI9r+QWz9sZUQGTq1dpoxjUqFUpEasAc7MOtHg9XuLT2fx0udFG+k1nvMV0WvHHVAN7VmCZ+1Zxbw==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz", + "integrity": "sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag==", "dev": true, "dependencies": { - "babel-plugin-jest-hoist": "^25.2.6", - "babel-preset-current-node-syntax": "^0.1.2" + "babel-plugin-jest-hoist": "^27.5.1", + "babel-preset-current-node-syntax": "^1.0.0" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" }, "peerDependencies": { "@babel/core": "^7.0.0" @@ -4262,71 +4054,6 @@ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" }, - "node_modules/base": { - "version": "0.11.2", - "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", - "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", - "dependencies": { - "cache-base": "^1.0.1", - "class-utils": "^0.3.5", - "component-emitter": "^1.2.1", - "define-property": "^1.0.0", - "isobject": "^3.0.1", - "mixin-deep": "^1.2.0", - "pascalcase": "^0.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "deprecated": "Please upgrade to v1.0.1", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "deprecated": "Please upgrade to v1.0.1", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/base64-js": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", @@ -4341,9 +4068,10 @@ } }, "node_modules/bignumber.js": { - "version": "7.2.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", - "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==", + "version": "9.3.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.3.1.tgz", + "integrity": "sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ==", + "license": "MIT", "engines": { "node": "*" } @@ -4393,47 +4121,61 @@ } }, "node_modules/body-parser": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", - "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", "dependencies": { - "bytes": "3.1.2", - "content-type": "~1.0.5", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", - "type-is": "~1.6.18", - "unpipe": "1.0.0" + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" }, "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" + "node": ">=18" } }, "node_modules/body-parser/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", "dependencies": { - "ms": "2.0.0" + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/body-parser/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + "node_modules/body-parser/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } }, "node_modules/body-parser/node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", "dependencies": { - "side-channel": "^1.0.6" + "side-channel": "^1.1.0" }, "engines": { "node": ">=0.6" @@ -4448,43 +4190,25 @@ "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "license": "MIT", "dependencies": { - "is-extendable": "^0.1.0" + "fill-range": "^7.1.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/browser-or-node": { @@ -4499,21 +4223,6 @@ "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", "dev": true }, - "node_modules/browser-resolve": { - "version": "1.11.3", - "resolved": "https://registry.npmjs.org/browser-resolve/-/browser-resolve-1.11.3.tgz", - "integrity": "sha512-exDi1BYWB/6raKHmDTCicQfTkqwN5fioMFV4j8BsfMU4R2DK/QfZfK7kOVkmWCNANf0snkBzqGqAJBao9gZMdQ==", - "dev": true, - "dependencies": { - "resolve": "1.1.7" - } - }, - "node_modules/browser-resolve/node_modules/resolve": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.1.7.tgz", - "integrity": "sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs=", - "dev": true - }, "node_modules/browserify-zlib": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.1.4.tgz", @@ -4522,6 +4231,38 @@ "pako": "~0.2.0" } }, + "node_modules/browserslist": { + "version": "4.25.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz", + "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001726", + "electron-to-chromium": "^1.5.173", + "node-releases": "^2.0.19", + "update-browserslist-db": "^1.1.3" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, "node_modules/bs-logger": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", @@ -4555,7 +4296,8 @@ "node_modules/buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", - "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "license": "BSD-3-Clause" }, "node_modules/buffer-from": { "version": "1.1.1", @@ -4583,29 +4325,11 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, - "node_modules/cache-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", - "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", - "dependencies": { - "collection-visit": "^1.0.0", - "component-emitter": "^1.2.1", - "get-value": "^2.0.6", - "has-value": "^1.0.0", - "isobject": "^3.0.1", - "set-value": "^2.0.0", - "to-object-path": "^0.3.0", - "union-value": "^1.0.0", - "unset-value": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/cacheable-lookup": { "version": "5.0.4", "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz", @@ -4649,9 +4373,9 @@ } }, "node_modules/cacheable-request/node_modules/pump": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz", - "integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", "license": "MIT", "dependencies": { "end-of-stream": "^1.1.0", @@ -4723,17 +4447,25 @@ "node": ">=6" } }, - "node_modules/capture-exit": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/capture-exit/-/capture-exit-2.0.0.tgz", - "integrity": "sha512-PiT/hQmTonHhl/HFGN+Lx3JJUznrVYJ3+AQsnthneZbvW7x+f08Tk7yLJTLEOUvBTbduLeeBkxEaYXUOUrRq6g==", + "node_modules/caniuse-lite": { + "version": "1.0.30001727", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001727.tgz", + "integrity": "sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==", "dev": true, - "dependencies": { - "rsvp": "^4.8.4" - }, - "engines": { - "node": "6.* || 8.* || >= 10.*" - } + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] }, "node_modules/caseless": { "version": "0.12.0", @@ -4754,6 +4486,15 @@ "node": ">=4" } }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", @@ -4764,35 +4505,25 @@ } }, "node_modules/ci-info": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", - "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", - "dev": true - }, - "node_modules/class-utils": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", - "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", - "dependencies": { - "arr-union": "^3.1.0", - "define-property": "^0.2.5", - "isobject": "^3.0.0", - "static-extend": "^0.1.1" - }, + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/class-utils/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } + "node_modules/cjs-module-lexer": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", + "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", + "dev": true }, "node_modules/clean-stack": { "version": "2.2.0", @@ -4804,14 +4535,14 @@ } }, "node_modules/cliui": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", - "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", "dev": true, "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", - "wrap-ansi": "^6.2.0" + "wrap-ansi": "^7.0.0" } }, "node_modules/clone-response": { @@ -4829,7 +4560,7 @@ "node_modules/co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", "dev": true, "engines": { "iojs": ">= 1.0.0", @@ -4837,23 +4568,11 @@ } }, "node_modules/collect-v8-coverage": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz", - "integrity": "sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", + "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", "dev": true }, - "node_modules/collection-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", - "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", - "dependencies": { - "map-visit": "^1.0.0", - "object-visit": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -4889,7 +4608,8 @@ "node_modules/component-emitter": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", - "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==", + "dev": true }, "node_modules/compressible": { "version": "2.0.18", @@ -4935,25 +4655,36 @@ } }, "node_modules/configstore": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/configstore/-/configstore-4.0.0.tgz", - "integrity": "sha512-CmquAXFBocrzaSM8mtGPMM/HiWmyIpr4CcJl/rgY2uCObZ/S7cKU0silxslqJejl+t/T9HS8E0PUNQD81JGUEQ==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz", + "integrity": "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==", + "license": "BSD-2-Clause", "dependencies": { - "dot-prop": "^4.1.0", + "dot-prop": "^5.2.0", "graceful-fs": "^4.1.2", - "make-dir": "^1.0.0", - "unique-string": "^1.0.0", - "write-file-atomic": "^2.0.0", - "xdg-basedir": "^3.0.0" + "make-dir": "^3.0.0", + "unique-string": "^2.0.0", + "write-file-atomic": "^3.0.0", + "xdg-basedir": "^4.0.0" }, "engines": { - "node": ">=6" + "node": ">=8" + } + }, + "node_modules/configstore/node_modules/xdg-basedir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", + "license": "MIT", + "engines": { + "node": ">=8" } }, "node_modules/content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", "dependencies": { "safe-buffer": "5.2.1" }, @@ -4978,37 +4709,41 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT" }, "node_modules/content-type": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/convert-source-map": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", - "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.1" - } + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", + "dev": true }, "node_modules/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } }, "node_modules/cookiejar": { "version": "2.1.4", @@ -5016,56 +4751,47 @@ "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==", "dev": true }, - "node_modules/copy-descriptor": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", - "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.5.tgz", + "integrity": "sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==", + "license": "MIT", "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" }, "engines": { - "node": ">=4.8" + "node": ">= 8" } }, - "node_modules/cross-spawn/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" + "node_modules/cross-spawn/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" } }, "node_modules/crypto-js": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-3.3.0.tgz", - "integrity": "sha512-DIT51nX0dCfKltpRiXV+/TVZq+Qq2NgF4644+K7Ttnla7zEzqc+kjJyiB96BHNyUTBxyjzRcZYpUdZa+QAqi6Q==" + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-4.2.0.tgz", + "integrity": "sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q==", + "license": "MIT" }, "node_modules/crypto-random-string": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-1.0.0.tgz", - "integrity": "sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==", + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/cssom": { @@ -5075,9 +4801,9 @@ "dev": true }, "node_modules/cssstyle": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-2.2.0.tgz", - "integrity": "sha512-sEb3XFPx3jNnCAMtqrXPDeSgQr+jojtCeNf8cvMNMh1cG970+lljssvQDzPq6lmmJu2Vhqood/gtEomBiHOGnA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-2.3.0.tgz", + "integrity": "sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==", "dev": true, "dependencies": { "cssom": "~0.3.6" @@ -5104,20 +4830,24 @@ } }, "node_modules/data-urls": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-1.1.0.tgz", - "integrity": "sha512-YTWYI9se1P55u58gL5GkQHW4P6VJBJ5iBT+B5a7i2Tjadhv52paJG0qHX4A0OR6/t52odI64KP2YvFpkDOi3eQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-2.0.0.tgz", + "integrity": "sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ==", "dev": true, "dependencies": { - "abab": "^2.0.0", - "whatwg-mimetype": "^2.2.0", - "whatwg-url": "^7.0.0" + "abab": "^2.0.3", + "whatwg-mimetype": "^2.3.0", + "whatwg-url": "^8.0.0" + }, + "engines": { + "node": ">=10" } }, "node_modules/date-and-time": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.6.3.tgz", - "integrity": "sha512-lcWy3AXDRJOD7MplwZMmNSRM//kZtJaLz4n6D1P5z9wEmZGBKhJRBIr1Xs9KNQJmdXPblvgffynYji4iylUTcA==" + "version": "0.14.2", + "resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.14.2.tgz", + "integrity": "sha512-EFTCh9zRSEpGPmJaexg7HTuzZHh6cnJj1ui7IGCFNXzd2QdpsNh05Db5TF3xzJm30YN+A8/6xHSuRcQqoc3kFA==", + "license": "MIT" }, "node_modules/debug": { "version": "3.2.6", @@ -5128,14 +4858,11 @@ "ms": "^2.1.1" } }, - "node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } + "node_modules/decimal.js": { + "version": "10.6.0", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.6.0.tgz", + "integrity": "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==", + "dev": true }, "node_modules/decode-uri-component": { "version": "0.2.2", @@ -5172,16 +4899,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/deep-is": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", - "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", + "node_modules/dedent": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", + "integrity": "sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==", "dev": true }, "node_modules/deepmerge": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", - "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", "dev": true, "engines": { "node": ">=0.10.0" @@ -5212,55 +4939,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/define-property": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", - "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", - "dependencies": { - "is-descriptor": "^1.0.2", - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "deprecated": "Please upgrade to v1.0.1", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "deprecated": "Please upgrade to v1.0.1", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -5273,19 +4951,11 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, - "node_modules/destroy": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, "node_modules/detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -5305,33 +4975,46 @@ } }, "node_modules/diff-sequences": { - "version": "24.9.0", - "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-24.9.0.tgz", - "integrity": "sha512-Dj6Wk3tWyTE+Fo1rW8v0Xhwk80um6yFYKbuAxc9c3EZxIHFDYwbi34Uk42u1CdnIiVorvt4RmlSDjIPyzGC2ew==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz", + "integrity": "sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ==", "dev": true, "engines": { - "node": ">= 6" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/domexception": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/domexception/-/domexception-1.0.1.tgz", - "integrity": "sha512-raigMkn7CJNNo6Ihro1fzG7wr3fHuYVytzquZKX5n0yizGsTcYgzdIUwj1X9pK0VvjeihV+XiclP+DjwbsSKug==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/domexception/-/domexception-2.0.1.tgz", + "integrity": "sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg==", "deprecated": "Use your platform's native DOMException instead", "dev": true, "dependencies": { - "webidl-conversions": "^4.0.2" + "webidl-conversions": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/domexception/node_modules/webidl-conversions": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz", + "integrity": "sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==", + "dev": true, + "engines": { + "node": ">=8" } }, "node_modules/dot-prop": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.1.tgz", - "integrity": "sha512-l0p4+mIuJIua0mhxGoh4a+iNL9bmeK5DvnSVQa6T0OhrVmaEa1XScX5Etc673FePCJOArq/4Pa2cLGODUWTPOQ==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "license": "MIT", "dependencies": { - "is-obj": "^1.0.0" + "is-obj": "^2.0.0" }, "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/dunder-proto": { @@ -5349,14 +5032,27 @@ } }, "node_modules/duplexify": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", - "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", + "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.2" + } + }, + "node_modules/duplexify/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "dependencies": { - "end-of-stream": "^1.0.0", - "inherits": "^2.0.1", - "readable-stream": "^2.0.0", - "stream-shift": "^1.0.0" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" } }, "node_modules/ecc-jsbn": { @@ -5379,7 +5075,26 @@ "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.185", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.185.tgz", + "integrity": "sha512-dYOZfUk57hSMPePoIQ1fZWl1Fkj+OshhEVuPacNKWzC1efe56OsHY3l/jCfiAgIICOU3VgOIdoq7ahg7r7n6MQ==", + "dev": true + }, + "node_modules/emittery": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.8.1.tgz", + "integrity": "sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } }, "node_modules/emoji-regex": { "version": "8.0.0", @@ -5391,18 +5106,11 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, - "node_modules/encoding": { - "version": "0.1.12", - "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz", - "integrity": "sha1-U4tm8+5izRq1HsMjgp0flIDHS+s=", - "dependencies": { - "iconv-lite": "~0.4.13" - } - }, "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -5416,6 +5124,15 @@ "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=" }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, "node_modules/es-define-property": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", @@ -5445,6 +5162,20 @@ "node": ">= 0.4" } }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", @@ -5458,10 +5189,20 @@ "es6-promise": "^4.0.3" } }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" }, "node_modules/escape-string-regexp": { "version": "1.0.5", @@ -5473,22 +5214,21 @@ } }, "node_modules/escodegen": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.1.tgz", - "integrity": "sha512-Bmt7NcRySdIfNPfU2ZoXDrrXsG9ZjvDxcAlMfDUgRBjLOWTuIACXPBFJH7Z+cLb40JeQco5toikyc9t9P8E9SQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", + "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", "dev": true, "dependencies": { "esprima": "^4.0.1", - "estraverse": "^4.2.0", - "esutils": "^2.0.2", - "optionator": "^0.8.1" + "estraverse": "^5.2.0", + "esutils": "^2.0.2" }, "bin": { "escodegen": "bin/escodegen.js", "esgenerate": "bin/esgenerate.js" }, "engines": { - "node": ">=4.0" + "node": ">=6.0" }, "optionalDependencies": { "source-map": "~0.6.1" @@ -5498,6 +5238,7 @@ "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, "bin": { "esparse": "bin/esparse.js", "esvalidate": "bin/esvalidate.js" @@ -5507,9 +5248,9 @@ } }, "node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true, "engines": { "node": ">=4.0" @@ -5528,6 +5269,7 @@ "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -5545,399 +5287,120 @@ "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.0.tgz", "integrity": "sha512-qerSRB0p+UDEssxTtm6EDKcE7W4OaoisfIMl4CngyEhjpYglocpNg6UEqCvemdGhosAsg4sO2dXJOdyBifPGCg==" }, - "node_modules/exec-sh": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/exec-sh/-/exec-sh-0.3.4.tgz", - "integrity": "sha512-sEFIkc61v75sWeOe72qyrqg2Qg0OuLESziUDk/O/z2qgS15y2gWVFrI6f2Qn/qw/0/NCfCEsmNA4zOjkwEZT1A==", - "dev": true - }, - "node_modules/execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", - "dependencies": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/exit": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", - "integrity": "sha1-BjJjj42HfMghB9MKD/8aF8uhzQw=", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", "dev": true, "engines": { "node": ">= 0.8.0" } }, - "node_modules/expand-brackets": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", - "integrity": "sha1-t3c14xXOMPa27/D4OwQVGiJEliI=", + "node_modules/expect": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz", + "integrity": "sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw==", + "dev": true, "dependencies": { - "debug": "^2.3.3", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "posix-character-classes": "^0.1.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" + "@jest/types": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/expand-brackets/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/expand-brackets/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", "dependencies": { - "is-descriptor": "^0.1.0" + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, - "node_modules/expand-brackets/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "node_modules/express/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", "dependencies": { - "is-extendable": "^0.1.0" + "ms": "^2.1.3" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/expand-brackets/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" - }, - "node_modules/expect": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/expect/-/expect-25.3.0.tgz", - "integrity": "sha512-buboTXML2h/L0Kh44Ys2Cx49mX20ISc5KDirkxIs3Q9AJv0kazweUAbukegr+nHDOvFRKmxdojjIHCjqAceYfg==", - "dev": true, - "dependencies": { - "@jest/types": "^25.3.0", - "ansi-styles": "^4.0.0", - "jest-get-type": "^25.2.6", - "jest-matcher-utils": "^25.3.0", - "jest-message-util": "^25.3.0", - "jest-regex-util": "^25.2.6" + "node": ">=6.0" }, - "engines": { - "node": ">= 8.3" + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/expect/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, + "node_modules/express/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "mime-db": "^1.54.0" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/expect/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "node": ">= 0.6" } }, - "node_modules/expect/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", - "dev": true, + "node_modules/express/node_modules/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "license": "BSD-3-Clause", "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" + "side-channel": "^1.1.0" }, "engines": { - "node": ">=8" + "node": ">=0.6" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/expect/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=8" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/expect/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/expect/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/expect/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/expect/node_modules/jest-get-type": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-25.2.6.tgz", - "integrity": "sha512-DxjtyzOHjObRM+sM1knti6or+eOgcGU4xVSb2HNP1TqO4ahsT+rqZg+nyqHWJSvWgKC5cG3QjGFBqxLghiF/Ig==", - "dev": true, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/expect/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", - "dev": true, - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/express": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz", - "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==", - "dependencies": { - "accepts": "~1.3.8", - "array-flatten": "1.1.1", - "body-parser": "1.20.3", - "content-disposition": "0.5.4", - "content-type": "~1.0.4", - "cookie": "0.6.0", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "2.0.0", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "1.3.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "merge-descriptors": "1.0.3", - "methods": "~1.1.2", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "path-to-regexp": "0.1.10", - "proxy-addr": "~2.0.7", - "qs": "6.13.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.2.1", - "send": "0.19.0", - "serve-static": "1.16.2", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/express/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/express/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, - "node_modules/express/node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", - "dependencies": { - "side-channel": "^1.0.6" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/express/node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" - }, - "node_modules/extend-shallow": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", - "integrity": "sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=", - "dependencies": { - "assign-symbols": "^1.0.0", - "is-extendable": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extend-shallow/node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dependencies": { - "is-plain-object": "^2.0.4" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", - "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", - "dependencies": { - "array-unique": "^0.3.2", - "define-property": "^1.0.0", - "expand-brackets": "^2.1.4", - "extend-shallow": "^2.0.1", - "fragment-cache": "^0.2.1", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "deprecated": "Please upgrade to v1.0.1", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "deprecated": "Please upgrade to v1.0.1", - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extglob/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, "node_modules/extsprintf": { "version": "1.3.0", @@ -5957,12 +5420,6 @@ "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", - "dev": true - }, "node_modules/fast-text-encoding": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", @@ -5990,37 +5447,24 @@ } }, "node_modules/fb-watchman": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.1.tgz", - "integrity": "sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", "dev": true, "dependencies": { "bser": "2.1.1" } }, "node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "license": "MIT", "dependencies": { - "is-extendable": "^0.1.0" + "to-regex-range": "^5.0.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/filter-obj": { @@ -6033,35 +5477,39 @@ } }, "node_modules/finalhandler": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", - "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", "dependencies": { - "debug": "2.6.9", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "statuses": "2.0.1", - "unpipe": "~1.0.0" + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" }, "engines": { "node": ">= 0.8" } }, "node_modules/finalhandler/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", "dependencies": { - "ms": "2.0.0" + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/finalhandler/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, "node_modules/find-up": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", @@ -6103,14 +5551,6 @@ "is-callable": "^1.1.3" } }, - "node_modules/for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", @@ -6120,18 +5560,43 @@ } }, "node_modules/form-data": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", - "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.4.tgz", + "integrity": "sha512-Y/3MmRiR8Nd+0CUtrbvcKtKzLWiUfpQ7DFVggH8PwmGt/0r7RSy32GuP4hpCJlQNEBusisSx1DLtD8uD386HJQ==", + "deprecated": "This version has an incorrect dependency; please use v2.5.5", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "has-own": "^1.0.1", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" }, "engines": { "node": ">= 0.12" } }, + "node_modules/form-data/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/formidable": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.2.tgz", @@ -6150,23 +5615,13 @@ "node": ">= 0.6" } }, - "node_modules/fragment-cache": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", - "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", - "dependencies": { - "map-cache": "^0.2.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", "engines": { - "node": ">= 0.6" + "node": ">= 0.8" } }, "node_modules/fs-constants": { @@ -6198,21 +5653,15 @@ "node": ">=8" } }, - "node_modules/fs-minipass/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, "node_modules/fsevents": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.2.tgz", - "integrity": "sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, "hasInstallScript": true, "optional": true, @@ -6232,1809 +5681,1009 @@ } }, "node_modules/gaxios": { - "version": "1.8.4", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-1.8.4.tgz", - "integrity": "sha512-BoENMnu1Gav18HcpV9IleMPZ9exM+AvUjrAOV4Mzs/vfz2Lu/ABv451iEXByKiMPn2M140uul1txXCg83sAENw==", + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.3.3.tgz", + "integrity": "sha512-gSaYYIO1Y3wUtdfHmjDUZ8LWaxJQpiavzbF5Kq53akSzvmVg0RfyOcFDbO1KJ/KCGRFz2qG+lS81F0nkr7cRJA==", + "license": "Apache-2.0", "dependencies": { "abort-controller": "^3.0.0", "extend": "^3.0.2", - "https-proxy-agent": "^2.2.1", - "node-fetch": "^2.3.0" + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.7" + }, + "engines": { + "node": ">=10" } }, - "node_modules/gcp-metadata": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-1.0.0.tgz", - "integrity": "sha512-Q6HrgfrCQeEircnNP3rCcEgiDv7eF9+1B+1MMgpE190+/+0mjQR8PxeOaRgxZWmdDAF9EIryHB9g1moPiw1SbQ==", + "node_modules/gaxios/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "license": "MIT", "dependencies": { - "gaxios": "^1.0.2", - "json-bigint": "^0.3.0" + "debug": "4" }, "engines": { - "node": ">=6" + "node": ">= 6.0.0" } }, - "node_modules/gcs-resumable-upload": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-1.1.0.tgz", - "integrity": "sha512-uBz7uHqp44xjSDzG3kLbOYZDjxxR/UAGbB47A0cC907W6yd2LkcyFDTHg+bjivkHMwiJlKv4guVWcjPCk2zScg==", - "deprecated": "gcs-resumable-upload is deprecated. Support will end on 11/01/2023", + "node_modules/gaxios/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", "dependencies": { - "abort-controller": "^2.0.2", - "configstore": "^4.0.0", - "gaxios": "^1.5.0", - "google-auth-library": "^3.0.0", - "pumpify": "^1.5.1", - "stream-events": "^1.0.4" - }, - "bin": { - "gcs-upload": "build/src/cli.js" + "ms": "^2.1.3" }, "engines": { - "node": ">=6" + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/gcs-resumable-upload/node_modules/abort-controller": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-2.0.3.tgz", - "integrity": "sha512-EPSq5wr2aFyAZ1PejJB32IX9Qd4Nwus+adnp7STYFM5/23nLPBazqZ1oor6ZqbH+4otaaGXTlC8RN5hq3C8w9Q==", + "node_modules/gaxios/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "license": "MIT", "dependencies": { - "event-target-shim": "^5.0.0" + "agent-base": "6", + "debug": "4" }, "engines": { - "node": ">=6.5" + "node": ">= 6" } }, - "node_modules/gensync": { - "version": "1.0.0-beta.1", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.1.tgz", - "integrity": "sha512-r8EC6NO1sngH/zdD9fiRDLdcgnbayXah+mLgManTaIZJqEC1MZstmnox8KpnI2/fxQwrp5OpCOYWLp4rBl4Jcg==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-intrinsic": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.7.tgz", - "integrity": "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "function-bind": "^1.1.2", - "get-proto": "^1.0.0", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", - "dependencies": { - "pump": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/get-stream/node_modules/pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "node_modules/get-value": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", - "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "dependencies": { - "assert-plus": "^1.0.0" - } - }, - "node_modules/glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/google-auth-library": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-3.1.2.tgz", - "integrity": "sha512-cDQMzTotwyWMrg5jRO7q0A4TL/3GWBgO7I7q5xGKNiiFf9SmGY/OJ1YsLMgI2MVHHsEGyrqYnbnmV1AE+Z6DnQ==", - "dependencies": { - "base64-js": "^1.3.0", - "fast-text-encoding": "^1.0.0", - "gaxios": "^1.2.1", - "gcp-metadata": "^1.0.0", - "gtoken": "^2.3.2", - "https-proxy-agent": "^2.2.1", - "jws": "^3.1.5", - "lru-cache": "^5.0.0", - "semver": "^5.5.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/google-p12-pem": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-1.0.4.tgz", - "integrity": "sha512-SwLAUJqUfTB2iS+wFfSS/G9p7bt4eWcc2LyfvmUXe7cWp6p3mpxDo6LLI29MXdU6wvPcQ/up298X7GMC5ylAlA==", - "deprecated": "Package is no longer maintained", - "dependencies": { - "node-forge": "^0.8.0", - "pify": "^4.0.0" - }, - "bin": { - "gp12-pem": "build/src/bin/gp12-pem.js" - } - }, - "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/got": { - "version": "11.8.6", - "resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz", - "integrity": "sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==", - "license": "MIT", - "dependencies": { - "@sindresorhus/is": "^4.0.0", - "@szmarczak/http-timer": "^4.0.5", - "@types/cacheable-request": "^6.0.1", - "@types/responselike": "^1.0.0", - "cacheable-lookup": "^5.0.3", - "cacheable-request": "^7.0.2", - "decompress-response": "^6.0.0", - "http2-wrapper": "^1.0.0-beta.5.2", - "lowercase-keys": "^2.0.0", - "p-cancelable": "^2.0.0", - "responselike": "^2.0.0" - }, - "engines": { - "node": ">=10.19.0" - }, - "funding": { - "url": "https://github.com/sindresorhus/got?sponsor=1" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", - "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==" - }, - "node_modules/growly": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/growly/-/growly-1.3.0.tgz", - "integrity": "sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=", - "dev": true, - "optional": true - }, - "node_modules/gtoken": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-2.3.3.tgz", - "integrity": "sha512-EaB49bu/TCoNeQjhCYKI/CurooBKkGxIqFHsWABW0b25fobBYVTMe84A8EBVVZhl8emiUdNypil9huMOTmyAnw==", - "dependencies": { - "gaxios": "^1.0.4", - "google-p12-pem": "^1.0.0", - "jws": "^3.1.5", - "mime": "^2.2.0", - "pify": "^4.0.0" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/gunzip-maybe": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/gunzip-maybe/-/gunzip-maybe-1.4.1.tgz", - "integrity": "sha512-qtutIKMthNJJgeHQS7kZ9FqDq59/Wn0G2HYCRNjpup7yKfVI6/eqwpmroyZGFoCYaG+sW6psNVb4zoLADHpp2g==", - "dependencies": { - "browserify-zlib": "^0.1.4", - "is-deflate": "^1.0.0", - "is-gzip": "^1.0.0", - "peek-stream": "^1.1.0", - "pumpify": "^1.3.3", - "through2": "^2.0.3" - }, - "bin": { - "gunzip-maybe": "bin.js" - } - }, - "node_modules/gunzip-maybe/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", - "engines": { - "node": ">=4" - } - }, - "node_modules/har-validator": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", - "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", - "deprecated": "this library is no longer supported", - "dependencies": { - "ajv": "^6.5.5", - "har-schema": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/has-property-descriptors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", - "dependencies": { - "es-define-property": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "license": "MIT", - "dependencies": { - "has-symbols": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", - "integrity": "sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=", - "dependencies": { - "get-value": "^2.0.6", - "has-values": "^1.0.0", - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", - "integrity": "sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=", - "dependencies": { - "is-number": "^3.0.0", - "kind-of": "^4.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values/node_modules/kind-of": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", - "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/hash-stream-validation": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/hash-stream-validation/-/hash-stream-validation-0.2.2.tgz", - "integrity": "sha512-cMlva5CxWZOrlS/cY0C+9qAzesn5srhFA8IT1VPiHc9bWWBLkJfEUIZr7MWoi89oOOGmpg8ymchaOjiArsGu5A==", - "dependencies": { - "through2": "^2.0.0" - } - }, - "node_modules/hash-stream-validation/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/html-encoding-sniffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz", - "integrity": "sha512-71lZziiDnsuabfdYiUeWdCVyKuqwWi23L8YeIgV9jSSZHCtb6wB1BKWooH7L3tn4/FuZJMVWyNaIDr4RGmaSYw==", - "dev": true, - "dependencies": { - "whatwg-encoding": "^1.0.1" - } - }, - "node_modules/html-escaper": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", - "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", - "dev": true - }, - "node_modules/http-cache-semantics": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", - "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", - "license": "BSD-2-Clause" - }, - "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/http-proxy": { - "version": "1.18.1", - "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", - "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", - "dependencies": { - "eventemitter3": "^4.0.0", - "follow-redirects": "^1.0.0", - "requires-port": "^1.0.0" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/http-proxy-middleware": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.18.0.tgz", - "integrity": "sha512-Fs25KVMPAIIcgjMZkVHJoKg9VcXcC1C8yb9JUgeDvVXY0S/zgVIhMb+qVswDIgtJe2DfckMSY2d6TuTEutlk6Q==", - "dependencies": { - "http-proxy": "^1.16.2", - "is-glob": "^4.0.0", - "lodash": "^4.17.5", - "micromatch": "^3.1.9" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", - "dependencies": { - "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" - }, - "engines": { - "node": ">=0.8", - "npm": ">=1.3.7" - } - }, - "node_modules/http2-wrapper": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz", - "integrity": "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==", - "license": "MIT", - "dependencies": { - "quick-lru": "^5.1.1", - "resolve-alpn": "^1.0.0" - }, - "engines": { - "node": ">=10.19.0" - } - }, - "node_modules/https-proxy-agent": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", - "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", - "dependencies": { - "agent-base": "^4.3.0", - "debug": "^3.1.0" - }, - "engines": { - "node": ">= 4.5.0" - } - }, - "node_modules/human-signals": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", - "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", - "dev": true, - "engines": { - "node": ">=8.12.0" - } - }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/import-local": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.0.2.tgz", - "integrity": "sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA==", - "dev": true, - "dependencies": { - "pkg-dir": "^4.2.0", - "resolve-cwd": "^3.0.0" - }, - "bin": { - "import-local-fixture": "fixtures/cli.js" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/interpret": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.2.0.tgz", - "integrity": "sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw==", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", - "deprecated": "Please upgrade to v0.1.7", - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-accessor-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-arguments": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.2.0.tgz", - "integrity": "sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "has-tostringtag": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" - }, - "node_modules/is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "node_modules/gaxios/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-ci": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", - "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", - "dev": true, - "dependencies": { - "ci-info": "^2.0.0" - }, - "bin": { - "is-ci": "bin.js" - } - }, - "node_modules/is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", - "deprecated": "Please upgrade to v0.1.5", - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-data-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-deflate": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-deflate/-/is-deflate-1.0.0.tgz", - "integrity": "sha1-yGKQHDwWH7CdrHzcfnhPgOmPLxQ=" - }, - "node_modules/is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", - "dependencies": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-descriptor/node_modules/kind-of": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", - "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, "engines": { "node": ">=8" - } - }, - "node_modules/is-generator-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", - "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/is-generator-function": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz", - "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "get-proto": "^1.0.0", - "has-tostringtag": "^1.0.2", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-gzip": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-gzip/-/is-gzip-1.0.0.tgz", - "integrity": "sha1-bKiwe5nHeZgCWQDlVc7Y7YCHmoM=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", - "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-regex": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", - "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", - "license": "MIT", + "node_modules/gcp-metadata": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.3.1.tgz", + "integrity": "sha512-x850LS5N7V1F3UcV7PoupzGsyD6iVwTVvsh3tbXfkctZnBnjW5yu5z1/3k3SehF7TyoTIe78rJs02GMMy+LF+A==", + "license": "Apache-2.0", "dependencies": { - "call-bound": "^1.0.2", - "gopd": "^1.2.0", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", "engines": { - "node": ">=0.10.0" + "node": ">=10" } }, - "node_modules/is-stream-ended": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz", - "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw==" - }, - "node_modules/is-typed-array": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", - "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "node_modules/gcs-resumable-upload": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-3.6.0.tgz", + "integrity": "sha512-IyaNs4tx3Mp2UKn0CltRUiW/ZXYFlBNuK/V+ixs80chzVD+BJq3+8bfiganATFfCoMluAjokF9EswNJdVuOs8A==", + "deprecated": "gcs-resumable-upload is deprecated. Support will end on 11/01/2023", "license": "MIT", "dependencies": { - "which-typed-array": "^1.1.16" + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "configstore": "^5.0.0", + "extend": "^3.0.2", + "gaxios": "^4.0.0", + "google-auth-library": "^7.0.0", + "pumpify": "^2.0.0", + "stream-events": "^1.0.4" }, - "engines": { - "node": ">= 0.4" + "bin": { + "gcs-upload": "build/src/cli.js" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" - }, - "node_modules/is-windows": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", - "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-wsl": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.1.1.tgz", - "integrity": "sha512-umZHcSrwlDHo2TGMXv0DZ8dIUGunZ2Iv68YZnrmCiBPkZ4aaOhtv7pXJKeki9k3qJ3RJr0cDyitcl5wEH3AYog==", - "dev": true, - "optional": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" - }, - "node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/isomorphic-ws": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", - "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", - "peerDependencies": { - "ws": "*" - } - }, - "node_modules/isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" - }, - "node_modules/istanbul-lib-coverage": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz", - "integrity": "sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==", - "dev": true, "engines": { - "node": ">=8" + "node": ">=10" } - }, - "node_modules/istanbul-lib-instrument": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.1.tgz", - "integrity": "sha512-imIchxnodll7pvQBYOqUu88EufLCU56LMeFPZZM/fJZ1irYcYdqroaV+ACK1Ila8ls09iEYArp+nqyC6lW1Vfg==", - "dev": true, - "dependencies": { - "@babel/core": "^7.7.5", - "@babel/parser": "^7.7.5", - "@babel/template": "^7.7.4", - "@babel/traverse": "^7.7.4", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-coverage": "^3.0.0", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=8" + }, + "node_modules/gcs-resumable-upload/node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" } }, - "node_modules/istanbul-lib-instrument/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" + "node_modules/gcs-resumable-upload/node_modules/pumpify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-2.0.1.tgz", + "integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==", + "license": "MIT", + "dependencies": { + "duplexify": "^4.1.1", + "inherits": "^2.0.3", + "pump": "^3.0.0" } }, - "node_modules/istanbul-lib-report": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", - "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", "dev": true, - "dependencies": { - "istanbul-lib-coverage": "^3.0.0", - "make-dir": "^3.0.0", - "supports-color": "^7.1.0" - }, "engines": { - "node": ">=8" + "node": ">=6.9.0" } }, - "node_modules/istanbul-lib-report/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true, "engines": { - "node": ">=8" + "node": "6.* || 8.* || >= 10.*" } }, - "node_modules/istanbul-lib-report/node_modules/make-dir": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.0.2.tgz", - "integrity": "sha512-rYKABKutXa6vXTXhoV18cBE7PaewPXHe/Bdq4v+ZLMhxbWApkFFplT0LcbMW+6BbjnQXzZ/sAvSE/JdguApG5w==", - "dev": true, + "node_modules/get-intrinsic": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.7.tgz", + "integrity": "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA==", + "license": "MIT", "dependencies": { - "semver": "^6.0.0" + "call-bind-apply-helpers": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "function-bind": "^1.1.2", + "get-proto": "^1.0.0", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" }, "engines": { - "node": ">=8" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/istanbul-lib-report/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/istanbul-lib-report/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", "dev": true, - "dependencies": { - "has-flag": "^4.0.0" - }, "engines": { - "node": ">=8" + "node": ">=8.0.0" } }, - "node_modules/istanbul-lib-source-maps": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz", - "integrity": "sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg==", - "dev": true, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", "dependencies": { - "debug": "^4.1.1", - "istanbul-lib-coverage": "^3.0.0", - "source-map": "^0.6.1" + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" }, "engines": { - "node": ">=8" + "node": ">= 0.4" } }, - "node_modules/istanbul-lib-source-maps/node_modules/debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", - "dev": true, + "node_modules/getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", "dependencies": { - "ms": "^2.1.1" + "assert-plus": "^1.0.0" } }, - "node_modules/istanbul-reports": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.0.2.tgz", - "integrity": "sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw==", - "dev": true, + "node_modules/glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "deprecated": "Glob versions prior to v9 are no longer supported", "dependencies": { - "html-escaper": "^2.0.0", - "istanbul-lib-report": "^3.0.0" + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" }, "engines": { - "node": ">=8" + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/jest": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest/-/jest-25.3.0.tgz", - "integrity": "sha512-iKd5ShQSHzFT5IL/6h5RZJhApgqXSoPxhp5HEi94v6OAw9QkF8T7X+liEU2eEHJ1eMFYTHmeWLrpBWulsDpaUg==", - "dev": true, + "node_modules/google-auth-library": { + "version": "7.14.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.14.1.tgz", + "integrity": "sha512-5Rk7iLNDFhFeBYc3s8l1CqzbEBcdhwR193RlD4vSNFajIcINKI8W8P0JLmBpwymHqqWbX34pJDQu39cSy/6RsA==", + "license": "Apache-2.0", "dependencies": { - "@jest/core": "^25.3.0", - "import-local": "^3.0.2", - "jest-cli": "^25.3.0" - }, - "bin": { - "jest": "bin/jest.js" + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" }, "engines": { - "node": ">= 8.3" + "node": ">=10" } }, - "node_modules/jest-changed-files": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-25.3.0.tgz", - "integrity": "sha512-eqd5hyLbUjIVvLlJ3vQ/MoPxsxfESVXG9gvU19XXjKzxr+dXmZIqCXiY0OiYaibwlHZBJl2Vebkc0ADEMzCXew==", - "dev": true, - "dependencies": { - "@jest/types": "^25.3.0", - "execa": "^3.2.0", - "throat": "^5.0.0" - }, + "node_modules/google-auth-library/node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "license": "MIT", "engines": { - "node": ">= 8.3" + "node": ">=8" } }, - "node_modules/jest-changed-files/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, + "node_modules/google-p12-pem": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.1.4.tgz", + "integrity": "sha512-HHuHmkLgwjdmVRngf5+gSmpkyaRI6QmOg77J8tkNBHhNEI62sGHyw4/+UkgyZEI7h84NbWprXDJ+sa3xOYFvTg==", + "deprecated": "Package is no longer maintained", + "license": "MIT", "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "node-forge": "^1.3.1" + }, + "bin": { + "gp12-pem": "build/src/bin/gp12-pem.js" }, "engines": { - "node": ">= 8.3" + "node": ">=10" } }, - "node_modules/jest-changed-files/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/jest-changed-files/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", - "dev": true, + "node_modules/got": { + "version": "11.8.6", + "resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz", + "integrity": "sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==", + "license": "MIT", "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" + "@sindresorhus/is": "^4.0.0", + "@szmarczak/http-timer": "^4.0.5", + "@types/cacheable-request": "^6.0.1", + "@types/responselike": "^1.0.0", + "cacheable-lookup": "^5.0.3", + "cacheable-request": "^7.0.2", + "decompress-response": "^6.0.0", + "http2-wrapper": "^1.0.0-beta.5.2", + "lowercase-keys": "^2.0.0", + "p-cancelable": "^2.0.0", + "responselike": "^2.0.0" }, "engines": { - "node": ">=8" + "node": ">=10.19.0" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/sindresorhus/got?sponsor=1" } }, - "node_modules/jest-changed-files/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "node_modules/gtoken": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.3.2.tgz", + "integrity": "sha512-gkvEKREW7dXWF8NV8pVrKfW7WqReAmjjkMBh6lNCCGOM4ucS0r0YyXXl0r/9Yj8wcW/32ISkfc8h5mPTDbtifQ==", + "license": "MIT", "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "gaxios": "^4.0.0", + "google-p12-pem": "^3.1.3", + "jws": "^4.0.0" }, "engines": { - "node": ">=8" + "node": ">=10" } }, - "node_modules/jest-changed-files/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, + "node_modules/gunzip-maybe": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/gunzip-maybe/-/gunzip-maybe-1.4.1.tgz", + "integrity": "sha512-qtutIKMthNJJgeHQS7kZ9FqDq59/Wn0G2HYCRNjpup7yKfVI6/eqwpmroyZGFoCYaG+sW6psNVb4zoLADHpp2g==", "dependencies": { - "color-name": "~1.1.4" + "browserify-zlib": "^0.1.4", + "is-deflate": "^1.0.0", + "is-gzip": "^1.0.0", + "peek-stream": "^1.1.0", + "pumpify": "^1.3.3", + "through2": "^2.0.3" }, - "engines": { - "node": ">=7.0.0" + "bin": { + "gunzip-maybe": "bin.js" } }, - "node_modules/jest-changed-files/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "node_modules/gunzip-maybe/node_modules/through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } }, - "node_modules/jest-changed-files/node_modules/cross-spawn": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.2.tgz", - "integrity": "sha512-PD6G8QG3S4FK/XCGFbEQrDqO2AnMMsy0meR7lerlIOHAAbkuavGU/pOqprrlvfTNjvowivTeBsjebAL0NSoMxw==", - "dev": true, + "node_modules/har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "engines": { + "node": ">=4" + } + }, + "node_modules/har-validator": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", + "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", + "deprecated": "this library is no longer supported", "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" + "ajv": "^6.5.5", + "har-schema": "^2.0.0" }, "engines": { - "node": ">= 8" + "node": ">=6" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" } }, - "node_modules/jest-changed-files/node_modules/execa": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-3.4.0.tgz", - "integrity": "sha512-r9vdGQk4bmCuK1yKQu1KTwcT2zwfWdbdaXfCtAh+5nU/4fSX+JAb7vZGvI5naJrQlvONrEB20jeruESI69530g==", - "dev": true, + "node_modules/has-own": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-own/-/has-own-1.0.1.tgz", + "integrity": "sha512-RDKhzgQTQfMaLvIFhjahU+2gGnRBK6dYOd5Gd9BzkmnBneOCRYjRC003RIMrdAbH52+l+CnMS4bBCXGer8tEhg==", + "deprecated": "This project is not maintained. Use Object.hasOwn() instead.", + "license": "MIT" + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dependencies": { - "cross-spawn": "^7.0.0", - "get-stream": "^5.0.0", - "human-signals": "^1.1.1", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.0", - "onetime": "^5.1.0", - "p-finally": "^2.0.0", - "signal-exit": "^3.0.2", - "strip-final-newline": "^2.0.0" + "es-define-property": "^1.0.0" }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", "engines": { - "node": "^8.12.0 || >=9.7.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/jest-changed-files/node_modules/get-stream": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.1.0.tgz", - "integrity": "sha512-EXr1FOzrzTfGeL0gQdeFEvOMm2mzMOglyiOXSTpPC+iAjAKftbr3jpCMWynogwYnM+eSj9sHGc6wjIcDvYiygw==", - "dev": true, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", "dependencies": { - "pump": "^3.0.0" + "has-symbols": "^1.0.3" }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/jest-changed-files/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "engines": { - "node": ">=8" + "node_modules/hash-stream-validation": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/hash-stream-validation/-/hash-stream-validation-0.2.2.tgz", + "integrity": "sha512-cMlva5CxWZOrlS/cY0C+9qAzesn5srhFA8IT1VPiHc9bWWBLkJfEUIZr7MWoi89oOOGmpg8ymchaOjiArsGu5A==", + "dependencies": { + "through2": "^2.0.0" } }, - "node_modules/jest-changed-files/node_modules/is-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", - "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", - "dev": true, - "engines": { - "node": ">=8" + "node_modules/hash-stream-validation/node_modules/through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" } }, - "node_modules/jest-changed-files/node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "dependencies": { - "path-key": "^3.0.0" + "function-bind": "^1.1.2" }, "engines": { - "node": ">=8" + "node": ">= 0.4" } }, - "node_modules/jest-changed-files/node_modules/p-finally": { + "node_modules/html-encoding-sniffer": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-2.0.1.tgz", - "integrity": "sha512-vpm09aKwq6H9phqRQzecoDpD8TmVyGw70qmWlyq5onxY7tqyTTFVvxMykxQSQKILBSFlbXpypIw2T1Ml7+DDtw==", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz", + "integrity": "sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ==", "dev": true, + "dependencies": { + "whatwg-encoding": "^1.0.5" + }, "engines": { - "node": ">=8" + "node": ">=10" } }, - "node_modules/jest-changed-files/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true }, - "node_modules/jest-changed-files/node_modules/pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } + "node_modules/http-cache-semantics": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz", + "integrity": "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==", + "license": "BSD-2-Clause" }, - "node_modules/jest-changed-files/node_modules/shebang-command": { + "node_modules/http-errors": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", "dependencies": { - "shebang-regex": "^3.0.0" + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" }, "engines": { - "node": ">=8" + "node": ">= 0.8" } }, - "node_modules/jest-changed-files/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, + "node_modules/http-errors/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", "engines": { - "node": ">=8" + "node": ">= 0.8" } }, - "node_modules/jest-changed-files/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", - "dev": true, + "node_modules/http-proxy": { + "version": "1.18.1", + "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", + "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", "dependencies": { - "has-flag": "^4.0.0" + "eventemitter3": "^4.0.0", + "follow-redirects": "^1.0.0", + "requires-port": "^1.0.0" }, "engines": { - "node": ">=8" + "node": ">=8.0.0" } }, - "node_modules/jest-config": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-25.3.0.tgz", - "integrity": "sha512-CmF1JnNWFmoCSPC4tnU52wnVBpuxHjilA40qH/03IHxIevkjUInSMwaDeE6ACfxMPTLidBGBCO3EbxvzPbo8wA==", + "node_modules/http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", "dev": true, "dependencies": { - "@babel/core": "^7.1.0", - "@jest/test-sequencer": "^25.3.0", - "@jest/types": "^25.3.0", - "babel-jest": "^25.3.0", - "chalk": "^3.0.0", - "deepmerge": "^4.2.2", - "glob": "^7.1.1", - "jest-environment-jsdom": "^25.3.0", - "jest-environment-node": "^25.3.0", - "jest-get-type": "^25.2.6", - "jest-jasmine2": "^25.3.0", - "jest-regex-util": "^25.2.6", - "jest-resolve": "^25.3.0", - "jest-util": "^25.3.0", - "jest-validate": "^25.3.0", - "micromatch": "^4.0.2", - "pretty-format": "^25.3.0", - "realpath-native": "^2.0.0" + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" }, "engines": { - "node": ">= 8.3" + "node": ">= 6" } }, - "node_modules/jest-config/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/http-proxy-agent/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "debug": "4" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-config/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/jest-config/node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true, - "engines": { - "node": ">=8" + "node": ">= 6.0.0" } }, - "node_modules/jest-config/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/http-proxy-agent/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" + "ms": "^2.1.3" }, "engines": { - "node": ">=8" + "node": ">=6.0" }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/jest-config/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, + "node_modules/http-proxy-middleware": { + "version": "2.0.9", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.9.tgz", + "integrity": "sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==", + "license": "MIT", "dependencies": { - "fill-range": "^7.0.1" + "@types/http-proxy": "^1.17.8", + "http-proxy": "^1.18.1", + "is-glob": "^4.0.1", + "is-plain-obj": "^3.0.0", + "micromatch": "^4.0.2" }, "engines": { - "node": ">=8" + "node": ">=12.0.0" + }, + "peerDependencies": { + "@types/express": "^4.17.13" + }, + "peerDependenciesMeta": { + "@types/express": { + "optional": true + } } }, - "node_modules/jest-config/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, + "node_modules/http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" }, "engines": { - "node": ">=8" + "node": ">=0.8", + "npm": ">=1.3.7" } }, - "node_modules/jest-config/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, + "node_modules/http2-wrapper": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz", + "integrity": "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==", + "license": "MIT", "dependencies": { - "color-name": "~1.1.4" + "quick-lru": "^5.1.1", + "resolve-alpn": "^1.0.0" }, "engines": { - "node": ">=7.0.0" + "node": ">=10.19.0" } }, - "node_modules/jest-config/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/jest-config/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, + "node_modules/https-proxy-agent": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", + "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", "dependencies": { - "to-regex-range": "^5.0.1" + "agent-base": "^4.3.0", + "debug": "^3.1.0" }, "engines": { - "node": ">=8" - } - }, - "node_modules/jest-config/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "engines": { - "node": ">=8" + "node": ">= 4.5.0" } }, - "node_modules/jest-config/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", "engines": { - "node": ">=0.12.0" + "node": ">=10.17.0" } }, - "node_modules/jest-config/node_modules/jest-get-type": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-25.2.6.tgz", - "integrity": "sha512-DxjtyzOHjObRM+sM1knti6or+eOgcGU4xVSb2HNP1TqO4ahsT+rqZg+nyqHWJSvWgKC5cG3QjGFBqxLghiF/Ig==", + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, "engines": { - "node": ">= 8.3" + "node": ">=0.10.0" } }, - "node_modules/jest-config/node_modules/micromatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz", - "integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==", + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", "dev": true, "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.0.5" + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" }, "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/jest-config/node_modules/pretty-format": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-25.3.0.tgz", - "integrity": "sha512-wToHwF8bkQknIcFkBqNfKu4+UZqnrLn/Vr+wwKQwwvPzkBfDDKp/qIabFqdgtoi5PEnM8LFByVsOrHoa3SpTVA==", - "dev": true, - "dependencies": { - "@jest/types": "^25.3.0", - "ansi-regex": "^5.0.0", - "ansi-styles": "^4.0.0", - "react-is": "^16.12.0" - }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", "engines": { - "node": ">= 8.3" + "node": ">=0.8.19" } }, - "node_modules/jest-config/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", - "dev": true, - "dependencies": { - "has-flag": "^4.0.0" - }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/jest-config/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", "dependencies": { - "is-number": "^7.0.0" - }, + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", + "license": "MIT", "engines": { - "node": ">=8.0" + "node": ">= 0.10" } }, - "node_modules/jest-diff": { - "version": "24.9.0", - "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-24.9.0.tgz", - "integrity": "sha512-qMfrTs8AdJE2iqrTp0hzh7kTd2PQWrsFyj9tORoKmu32xjPjeE4NyjVRDz8ybYwqS2ik8N4hsIpiVTyFeo2lBQ==", - "dev": true, - "dependencies": { - "chalk": "^2.0.1", - "diff-sequences": "^24.9.0", - "jest-get-type": "^24.9.0", - "pretty-format": "^24.9.0" - }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", "engines": { - "node": ">= 6" + "node": ">= 0.10" } }, - "node_modules/jest-docblock": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-25.3.0.tgz", - "integrity": "sha512-aktF0kCar8+zxRHxQZwxMy70stc9R1mOmrLsT5VO3pIT0uzGRSDAXxSlz4NqQWpuLjPpuMhPRl7H+5FRsvIQAg==", - "dev": true, + "node_modules/is-arguments": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.2.0.tgz", + "integrity": "sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA==", + "license": "MIT", "dependencies": { - "detect-newline": "^3.0.0" + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" }, "engines": { - "node": ">= 8.3" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/jest-each": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-25.3.0.tgz", - "integrity": "sha512-aBfS4VOf/Qs95yUlX6d6WBv0szvOcTkTTyCIaLuQGj4bSHsT+Wd9dDngVHrCe5uytxpN8VM+NAloI6nbPjXfXw==", - "dev": true, - "dependencies": { - "@jest/types": "^25.3.0", - "chalk": "^3.0.0", - "jest-get-type": "^25.2.6", - "jest-util": "^25.3.0", - "pretty-format": "^25.3.0" - }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "license": "MIT", "engines": { - "node": ">= 8.3" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/jest-each/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "hasown": "^2.0.2" }, "engines": { - "node": ">= 8.3" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/jest-each/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "node_modules/is-deflate": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-deflate/-/is-deflate-1.0.0.tgz", + "integrity": "sha1-yGKQHDwWH7CdrHzcfnhPgOmPLxQ=" + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" } }, - "node_modules/jest-each/node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, "engines": { "node": ">=8" } }, - "node_modules/jest-each/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/is-generator-function": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz", + "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==", + "license": "MIT", "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" + "call-bound": "^1.0.3", + "get-proto": "^1.0.0", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" }, "engines": { - "node": ">=8" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/jest-each/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "license": "MIT", "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "is-extglob": "^2.1.1" }, "engines": { - "node": ">=8" + "node": ">=0.10.0" } }, - "node_modules/jest-each/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, + "node_modules/is-gzip": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-gzip/-/is-gzip-1.0.0.tgz", + "integrity": "sha1-bKiwe5nHeZgCWQDlVc7Y7YCHmoM=", "engines": { - "node": ">=7.0.0" + "node": ">=0.10.0" } }, - "node_modules/jest-each/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/jest-each/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, + "node_modules/is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/jest-each/node_modules/jest-get-type": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-25.2.6.tgz", - "integrity": "sha512-DxjtyzOHjObRM+sM1knti6or+eOgcGU4xVSb2HNP1TqO4ahsT+rqZg+nyqHWJSvWgKC5cG3QjGFBqxLghiF/Ig==", - "dev": true, + "node_modules/is-plain-obj": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", + "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==", + "license": "MIT", "engines": { - "node": ">= 8.3" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/jest-each/node_modules/pretty-format": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-25.3.0.tgz", - "integrity": "sha512-wToHwF8bkQknIcFkBqNfKu4+UZqnrLn/Vr+wwKQwwvPzkBfDDKp/qIabFqdgtoi5PEnM8LFByVsOrHoa3SpTVA==", - "dev": true, + "node_modules/is-potential-custom-element-name": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", + "dev": true + }, + "node_modules/is-regex": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "license": "MIT", "dependencies": { - "@jest/types": "^25.3.0", - "ansi-regex": "^5.0.0", - "ansi-styles": "^4.0.0", - "react-is": "^16.12.0" + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" }, "engines": { - "node": ">= 8.3" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/jest-each/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", - "dev": true, + "node_modules/is-stream-ended": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz", + "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw==" + }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "license": "MIT", "dependencies": { - "has-flag": "^4.0.0" + "which-typed-array": "^1.1.16" }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + }, + "node_modules/isomorphic-ws": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", + "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", + "license": "MIT", + "peerDependencies": { + "ws": "*" } }, - "node_modules/jest-environment-jsdom": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-25.3.0.tgz", - "integrity": "sha512-jdE4bQN+k2QEZ9sWOxsqDJvMzbdFSCN/4tw8X0TQaCqyzKz58PyEf41oIr4WO7ERdp7WaJGBSUKF7imR3UW1lg==", + "node_modules/isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", "dev": true, - "dependencies": { - "@jest/environment": "^25.3.0", - "@jest/fake-timers": "^25.3.0", - "@jest/types": "^25.3.0", - "jest-mock": "^25.3.0", - "jest-util": "^25.3.0", - "jsdom": "^15.2.1" - }, "engines": { - "node": ">= 8.3" + "node": ">=8" } }, - "node_modules/jest-environment-jsdom/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" }, "engines": { - "node": ">= 8.3" + "node": ">=8" } }, - "node_modules/jest-environment-jsdom/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", + "node_modules/istanbul-lib-instrument/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "bin": { + "semver": "bin/semver.js" } }, - "node_modules/jest-environment-jsdom/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "node": ">=10" } }, - "node_modules/jest-environment-jsdom/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/istanbul-lib-report/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, "engines": { "node": ">=8" } }, - "node_modules/jest-environment-jsdom/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "node_modules/istanbul-lib-report/node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", "dev": true, "dependencies": { - "color-name": "~1.1.4" + "semver": "^7.5.3" }, "engines": { - "node": ">=7.0.0" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/jest-environment-jsdom/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/jest-environment-jsdom/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "node_modules/istanbul-lib-report/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", "dev": true, + "bin": { + "semver": "bin/semver.js" + }, "engines": { - "node": ">=8" + "node": ">=10" } }, - "node_modules/jest-environment-jsdom/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/istanbul-lib-report/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -8043,189 +6692,199 @@ "node": ">=8" } }, - "node_modules/jest-environment-node": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-25.3.0.tgz", - "integrity": "sha512-XO09S29Nx1NU7TiMPHMoDIkxoGBuKSTbE+sHp0gXbeLDXhIdhysUI25kOqFFSD9AuDgvPvxWCXrvNqiFsOH33g==", + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", "dev": true, "dependencies": { - "@jest/environment": "^25.3.0", - "@jest/fake-timers": "^25.3.0", - "@jest/types": "^25.3.0", - "jest-mock": "^25.3.0", - "jest-util": "^25.3.0", - "semver": "^6.3.0" + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" }, "engines": { - "node": ">= 8.3" + "node": ">=10" } }, - "node_modules/jest-environment-node/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/istanbul-lib-source-maps/node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "ms": "^2.1.3" }, "engines": { - "node": ">= 8.3" + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/jest-environment-node/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", "dev": true, "dependencies": { - "@types/yargs-parser": "*" + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" } }, - "node_modules/jest-environment-node/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/jest": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest/-/jest-27.5.1.tgz", + "integrity": "sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" + "@jest/core": "^27.5.1", + "import-local": "^3.0.2", + "jest-cli": "^27.5.1" + }, + "bin": { + "jest": "bin/jest.js" }, "engines": { - "node": ">=8" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } } }, - "node_modules/jest-environment-node/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/jest-changed-files": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-27.5.1.tgz", + "integrity": "sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw==", "dev": true, "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "@jest/types": "^27.5.1", + "execa": "^5.0.0", + "throat": "^6.0.1" }, "engines": { - "node": ">=8" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-environment-node/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "node_modules/jest-changed-files/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "dev": true, "dependencies": { - "color-name": "~1.1.4" + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" }, "engines": { - "node": ">=7.0.0" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/jest-environment-node/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/jest-environment-node/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "node_modules/jest-changed-files/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", "dev": true, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/jest-environment-node/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "node_modules/jest-changed-files/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "dev": true, - "bin": { - "semver": "bin/semver.js" + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/jest-environment-node/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/jest-changed-files/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "dev": true, "dependencies": { - "has-flag": "^4.0.0" + "path-key": "^3.0.0" }, "engines": { "node": ">=8" } }, - "node_modules/jest-get-type": { - "version": "24.9.0", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-24.9.0.tgz", - "integrity": "sha512-lUseMzAley4LhIcpSP9Jf+fTrQ4a1yHQwLNeeVa2cEmbCGeoZAtYPOIv8JaxLD/sUpKxetKGP+gsHl8f8TSj8Q==", + "node_modules/jest-changed-files/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "dev": true, "engines": { - "node": ">= 6" + "node": ">=8" } }, - "node_modules/jest-haste-map": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-25.3.0.tgz", - "integrity": "sha512-LjXaRa+F8wwtSxo9G+hHD/Cp63PPQzvaBL9XCVoJD2rrcJO0Zr2+YYzAFWWYJ5GlPUkoaJFJtOuk0sL6MJY80A==", - "dev": true, - "dependencies": { - "@jest/types": "^25.3.0", - "anymatch": "^3.0.3", - "fb-watchman": "^2.0.0", - "graceful-fs": "^4.2.3", - "jest-serializer": "^25.2.6", - "jest-util": "^25.3.0", - "jest-worker": "^25.2.6", - "micromatch": "^4.0.2", - "sane": "^4.0.3", - "walker": "^1.0.7", - "which": "^2.0.2" - }, - "engines": { - "node": ">= 8.3" - }, - "optionalDependencies": { - "fsevents": "^2.1.2" - } + "node_modules/jest-changed-files/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true }, - "node_modules/jest-haste-map/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/jest-circus": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-27.5.1.tgz", + "integrity": "sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "@jest/environment": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^0.7.0", + "expect": "^27.5.1", + "is-generator-fn": "^2.0.0", + "jest-each": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3", + "throat": "^6.0.1" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-haste-map/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-haste-map/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/jest-circus/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -8235,32 +6894,23 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-haste-map/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-haste-map/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/jest-circus/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-haste-map/node_modules/color-convert": { + "node_modules/jest-circus/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -8272,25 +6922,13 @@ "node": ">=7.0.0" } }, - "node_modules/jest-haste-map/node_modules/color-name": { + "node_modules/jest-circus/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/jest-haste-map/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-haste-map/node_modules/has-flag": { + "node_modules/jest-circus/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -8299,32 +6937,10 @@ "node": ">=8" } }, - "node_modules/jest-haste-map/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/jest-haste-map/node_modules/micromatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz", - "integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==", - "dev": true, - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.0.5" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-haste-map/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/jest-circus/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -8333,86 +6949,46 @@ "node": ">=8" } }, - "node_modules/jest-haste-map/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "node_modules/jest-cli": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-27.5.1.tgz", + "integrity": "sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw==", "dev": true, "dependencies": { - "is-number": "^7.0.0" + "@jest/core": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "import-local": "^3.0.2", + "jest-config": "^27.5.1", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "prompts": "^2.0.1", + "yargs": "^16.2.0" }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/jest-jasmine2": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-25.3.0.tgz", - "integrity": "sha512-NCYOGE6+HNzYFSui52SefgpsnIzvxjn6KAgqw66BdRp37xpMD/4kujDHLNW5bS5i53os5TcMn6jYrzQRO8VPrQ==", - "dev": true, - "dependencies": { - "@babel/traverse": "^7.1.0", - "@jest/environment": "^25.3.0", - "@jest/source-map": "^25.2.6", - "@jest/test-result": "^25.3.0", - "@jest/types": "^25.3.0", - "chalk": "^3.0.0", - "co": "^4.6.0", - "expect": "^25.3.0", - "is-generator-fn": "^2.0.0", - "jest-each": "^25.3.0", - "jest-matcher-utils": "^25.3.0", - "jest-message-util": "^25.3.0", - "jest-runtime": "^25.3.0", - "jest-snapshot": "^25.3.0", - "jest-util": "^25.3.0", - "pretty-format": "^25.3.0", - "throat": "^5.0.0" + "bin": { + "jest": "bin/jest.js" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-jasmine2/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-jasmine2/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/jest-jasmine2/node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true, - "engines": { - "node": ">=8" + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } } }, - "node_modules/jest-jasmine2/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/jest-cli/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -8422,20 +6998,23 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-jasmine2/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/jest-cli/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-jasmine2/node_modules/color-convert": { + "node_modules/jest-cli/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -8447,13 +7026,13 @@ "node": ">=7.0.0" } }, - "node_modules/jest-jasmine2/node_modules/color-name": { + "node_modules/jest-cli/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/jest-jasmine2/node_modules/has-flag": { + "node_modules/jest-cli/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -8462,86 +7041,152 @@ "node": ">=8" } }, - "node_modules/jest-jasmine2/node_modules/pretty-format": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-25.3.0.tgz", - "integrity": "sha512-wToHwF8bkQknIcFkBqNfKu4+UZqnrLn/Vr+wwKQwwvPzkBfDDKp/qIabFqdgtoi5PEnM8LFByVsOrHoa3SpTVA==", + "node_modules/jest-cli/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { - "@jest/types": "^25.3.0", - "ansi-regex": "^5.0.0", - "ansi-styles": "^4.0.0", - "react-is": "^16.12.0" + "has-flag": "^4.0.0" }, "engines": { - "node": ">= 8.3" + "node": ">=8" } }, - "node_modules/jest-jasmine2/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/jest-config": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-27.5.1.tgz", + "integrity": "sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA==", "dev": true, "dependencies": { - "has-flag": "^4.0.0" + "@babel/core": "^7.8.0", + "@jest/test-sequencer": "^27.5.1", + "@jest/types": "^27.5.1", + "babel-jest": "^27.5.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.1", + "graceful-fs": "^4.2.9", + "jest-circus": "^27.5.1", + "jest-environment-jsdom": "^27.5.1", + "jest-environment-node": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-jasmine2": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-runner": "^27.5.1", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" }, "engines": { - "node": ">=8" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "peerDependencies": { + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "ts-node": { + "optional": true + } } }, - "node_modules/jest-leak-detector": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-25.3.0.tgz", - "integrity": "sha512-jk7k24dMIfk8LUSQQGN8PyOy9+J0NAfHZWiDmUDYVMctY8FLJQ1eQ8+PjMoN8PgwhLIggUqgYJnyRFvUz3jLRw==", + "node_modules/jest-config/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "jest-get-type": "^25.2.6", - "pretty-format": "^25.3.0" + "color-convert": "^2.0.1" }, "engines": { - "node": ">= 8.3" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-leak-detector/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/jest-config/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" }, "engines": { - "node": ">= 8.3" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-leak-detector/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", + "node_modules/jest-config/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "dependencies": { - "@types/yargs-parser": "*" + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" } }, - "node_modules/jest-leak-detector/node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "node_modules/jest-config/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-config/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-config/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, "engines": { "node": ">=8" } }, - "node_modules/jest-leak-detector/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/jest-diff": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz", + "integrity": "sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-diff/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -8551,20 +7196,23 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-leak-detector/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/jest-diff/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-leak-detector/node_modules/color-convert": { + "node_modules/jest-diff/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -8576,13 +7224,13 @@ "node": ">=7.0.0" } }, - "node_modules/jest-leak-detector/node_modules/color-name": { + "node_modules/jest-diff/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/jest-leak-detector/node_modules/has-flag": { + "node_modules/jest-diff/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -8591,34 +7239,10 @@ "node": ">=8" } }, - "node_modules/jest-leak-detector/node_modules/jest-get-type": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-25.2.6.tgz", - "integrity": "sha512-DxjtyzOHjObRM+sM1knti6or+eOgcGU4xVSb2HNP1TqO4ahsT+rqZg+nyqHWJSvWgKC5cG3QjGFBqxLghiF/Ig==", - "dev": true, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-leak-detector/node_modules/pretty-format": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-25.3.0.tgz", - "integrity": "sha512-wToHwF8bkQknIcFkBqNfKu4+UZqnrLn/Vr+wwKQwwvPzkBfDDKp/qIabFqdgtoi5PEnM8LFByVsOrHoa3SpTVA==", - "dev": true, - "dependencies": { - "@jest/types": "^25.3.0", - "ansi-regex": "^5.0.0", - "ansi-styles": "^4.0.0", - "react-is": "^16.12.0" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-leak-detector/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/jest-diff/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -8627,61 +7251,40 @@ "node": ">=8" } }, - "node_modules/jest-matcher-utils": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-25.3.0.tgz", - "integrity": "sha512-ZBUJ2fchNIZt+fyzkuCFBb8SKaU//Rln45augfUtbHaGyVxCO++ANARdBK9oPGXU3hEDgyy7UHnOP/qNOJXFUg==", + "node_modules/jest-docblock": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-27.5.1.tgz", + "integrity": "sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ==", "dev": true, "dependencies": { - "chalk": "^3.0.0", - "jest-diff": "^25.3.0", - "jest-get-type": "^25.2.6", - "pretty-format": "^25.3.0" + "detect-newline": "^3.0.0" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-matcher-utils/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/jest-each": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-27.5.1.tgz", + "integrity": "sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "jest-get-type": "^27.5.1", + "jest-util": "^27.5.1", + "pretty-format": "^27.5.1" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-matcher-utils/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/jest-matcher-utils/node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true, - "engines": { - "node": ">=8" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-matcher-utils/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/jest-each/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -8691,20 +7294,23 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-matcher-utils/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/jest-each/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-matcher-utils/node_modules/color-convert": { + "node_modules/jest-each/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -8716,22 +7322,13 @@ "node": ">=7.0.0" } }, - "node_modules/jest-matcher-utils/node_modules/color-name": { + "node_modules/jest-each/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/jest-matcher-utils/node_modules/diff-sequences": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-25.2.6.tgz", - "integrity": "sha512-Hq8o7+6GaZeoFjtpgvRBUknSXNeJiCx7V9Fr94ZMljNiCr9n9L8H8aJqgWOQiDDGdyn29fRNcDdRVJ5fdyihfg==", - "dev": true, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-matcher-utils/node_modules/has-flag": { + "node_modules/jest-each/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -8740,106 +7337,122 @@ "node": ">=8" } }, - "node_modules/jest-matcher-utils/node_modules/jest-diff": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-25.3.0.tgz", - "integrity": "sha512-vyvs6RPoVdiwARwY4kqFWd4PirPLm2dmmkNzKqo38uZOzJvLee87yzDjIZLmY1SjM3XR5DwsUH+cdQ12vgqi1w==", + "node_modules/jest-each/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { - "chalk": "^3.0.0", - "diff-sequences": "^25.2.6", - "jest-get-type": "^25.2.6", - "pretty-format": "^25.3.0" + "has-flag": "^4.0.0" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-matcher-utils/node_modules/jest-get-type": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-25.2.6.tgz", - "integrity": "sha512-DxjtyzOHjObRM+sM1knti6or+eOgcGU4xVSb2HNP1TqO4ahsT+rqZg+nyqHWJSvWgKC5cG3QjGFBqxLghiF/Ig==", - "dev": true, - "engines": { - "node": ">= 8.3" + "node": ">=8" } }, - "node_modules/jest-matcher-utils/node_modules/pretty-format": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-25.3.0.tgz", - "integrity": "sha512-wToHwF8bkQknIcFkBqNfKu4+UZqnrLn/Vr+wwKQwwvPzkBfDDKp/qIabFqdgtoi5PEnM8LFByVsOrHoa3SpTVA==", + "node_modules/jest-environment-jsdom": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz", + "integrity": "sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw==", "dev": true, "dependencies": { - "@jest/types": "^25.3.0", - "ansi-regex": "^5.0.0", - "ansi-styles": "^4.0.0", - "react-is": "^16.12.0" + "@jest/environment": "^27.5.1", + "@jest/fake-timers": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "jest-mock": "^27.5.1", + "jest-util": "^27.5.1", + "jsdom": "^16.6.0" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-matcher-utils/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/jest-environment-node": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-27.5.1.tgz", + "integrity": "sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw==", "dev": true, "dependencies": { - "has-flag": "^4.0.0" + "@jest/environment": "^27.5.1", + "@jest/fake-timers": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "jest-mock": "^27.5.1", + "jest-util": "^27.5.1" }, "engines": { - "node": ">=8" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-message-util": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-25.3.0.tgz", - "integrity": "sha512-5QNy9Id4WxJbRITEbA1T1kem9bk7y2fD0updZMSTNHtbEDnYOGLDPAuFBhFgVmOZpv0n6OMdVkK+WhyXEPCcOw==", + "node_modules/jest-get-type": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", + "integrity": "sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw==", "dev": true, - "dependencies": { - "@babel/code-frame": "^7.0.0", - "@jest/types": "^25.3.0", - "@types/stack-utils": "^1.0.1", - "chalk": "^3.0.0", - "micromatch": "^4.0.2", - "slash": "^3.0.0", - "stack-utils": "^1.0.1" - }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-message-util/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/jest-haste-map": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.5.1.tgz", + "integrity": "sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "@jest/types": "^27.5.1", + "@types/graceful-fs": "^4.1.2", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^27.5.1", + "jest-serializer": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", + "micromatch": "^4.0.4", + "walker": "^1.0.7" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" } }, - "node_modules/jest-message-util/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", + "node_modules/jest-jasmine2": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz", + "integrity": "sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ==", "dev": true, "dependencies": { - "@types/yargs-parser": "*" + "@jest/environment": "^27.5.1", + "@jest/source-map": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "expect": "^27.5.1", + "is-generator-fn": "^2.0.0", + "jest-each": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", + "pretty-format": "^27.5.1", + "throat": "^6.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-message-util/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/jest-jasmine2/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -8849,32 +7462,23 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-message-util/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-message-util/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/jest-jasmine2/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-message-util/node_modules/color-convert": { + "node_modules/jest-jasmine2/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -8886,25 +7490,13 @@ "node": ">=7.0.0" } }, - "node_modules/jest-message-util/node_modules/color-name": { + "node_modules/jest-jasmine2/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/jest-message-util/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-message-util/node_modules/has-flag": { + "node_modules/jest-jasmine2/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -8913,32 +7505,10 @@ "node": ">=8" } }, - "node_modules/jest-message-util/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/jest-message-util/node_modules/micromatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz", - "integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==", - "dev": true, - "dependencies": { - "braces": "^3.0.1", - "picomatch": "^2.0.5" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-message-util/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/jest-jasmine2/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -8947,61 +7517,40 @@ "node": ">=8" } }, - "node_modules/jest-message-util/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/jest-mock": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-25.3.0.tgz", - "integrity": "sha512-yRn6GbuqB4j3aYu+Z1ezwRiZfp0o9om5uOcBovVtkcRLeBCNP5mT0ysdenUsxAHnQUgGwPOE1wwhtQYe6NKirQ==", + "node_modules/jest-leak-detector": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz", + "integrity": "sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ==", "dev": true, "dependencies": { - "@jest/types": "^25.3.0" + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-mock/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/jest-matcher-utils": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz", + "integrity": "sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "chalk": "^4.0.0", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "pretty-format": "^27.5.1" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-mock/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-mock/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/jest-matcher-utils/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -9011,20 +7560,23 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-mock/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/jest-matcher-utils/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-mock/node_modules/color-convert": { + "node_modules/jest-matcher-utils/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -9036,13 +7588,13 @@ "node": ">=7.0.0" } }, - "node_modules/jest-mock/node_modules/color-name": { + "node_modules/jest-matcher-utils/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/jest-mock/node_modules/has-flag": { + "node_modules/jest-matcher-utils/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -9051,10 +7603,10 @@ "node": ">=8" } }, - "node_modules/jest-mock/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/jest-matcher-utils/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -9063,94 +7615,32 @@ "node": ">=8" } }, - "node_modules/jest-pnp-resolver": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.1.tgz", - "integrity": "sha512-pgFw2tm54fzgYvc/OHrnysABEObZCUNFnhjoRjaVOCN8NYc032/gVjPaHD4Aq6ApkSieWtfKAFQtmDKAmhupnQ==", - "dev": true, - "engines": { - "node": ">=6" - }, - "peerDependencies": { - "jest-resolve": "*" - }, - "peerDependenciesMeta": { - "jest-resolve": { - "optional": true - } - } - }, - "node_modules/jest-regex-util": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-25.2.6.tgz", - "integrity": "sha512-KQqf7a0NrtCkYmZZzodPftn7fL1cq3GQAFVMn5Hg8uKx/fIenLEobNanUxb7abQ1sjADHBseG/2FGpsv/wr+Qw==", - "dev": true, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-resolve": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-25.3.0.tgz", - "integrity": "sha512-IHoQAAybulsJ+ZgWis+ekYKDAoFkVH5Nx/znpb41zRtpxj4fr2WNV9iDqavdSm8GIpMlsfZxbC/fV9DhW0q9VQ==", - "dev": true, - "dependencies": { - "@jest/types": "^25.3.0", - "browser-resolve": "^1.11.3", - "chalk": "^3.0.0", - "jest-pnp-resolver": "^1.2.1", - "realpath-native": "^2.0.0", - "resolve": "^1.15.1" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-resolve-dependencies": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-25.3.0.tgz", - "integrity": "sha512-bDUlLYmHW+f7J7KgcY2lkq8EMRqKonRl0XoD4Wp5SJkgAxKJnsaIOlrrVNTfXYf+YOu3VCjm/Ac2hPF2nfsCIA==", - "dev": true, - "dependencies": { - "@jest/types": "^25.3.0", - "jest-regex-util": "^25.2.6", - "jest-snapshot": "^25.3.0" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-resolve-dependencies/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "node_modules/jest-message-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz", + "integrity": "sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^27.5.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^27.5.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-resolve-dependencies/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-resolve-dependencies/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/jest-message-util/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -9160,20 +7650,23 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-resolve-dependencies/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/jest-message-util/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/jest-resolve-dependencies/node_modules/color-convert": { + "node_modules/jest-message-util/node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", @@ -9185,13 +7678,13 @@ "node": ">=7.0.0" } }, - "node_modules/jest-resolve-dependencies/node_modules/color-name": { + "node_modules/jest-message-util/node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/jest-resolve-dependencies/node_modules/has-flag": { + "node_modules/jest-message-util/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", @@ -9200,10 +7693,10 @@ "node": ">=8" } }, - "node_modules/jest-resolve-dependencies/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/jest-message-util/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -9212,37 +7705,86 @@ "node": ">=8" } }, - "node_modules/jest-resolve/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/jest-mock": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-27.5.1.tgz", + "integrity": "sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "@jest/types": "^27.5.1", + "@types/node": "*" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.5.1.tgz", + "integrity": "sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg==", + "dev": true, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-27.5.1.tgz", + "integrity": "sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw==", + "dev": true, + "dependencies": { + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^27.5.1", + "jest-validate": "^27.5.1", + "resolve": "^1.20.0", + "resolve.exports": "^1.1.0", + "slash": "^3.0.0" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-resolve/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", + "node_modules/jest-resolve-dependencies": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz", + "integrity": "sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg==", "dev": true, "dependencies": { - "@types/yargs-parser": "*" + "@jest/types": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-snapshot": "^27.5.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/jest-resolve/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -9253,16 +7795,19 @@ } }, "node_modules/jest-resolve/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/jest-resolve/node_modules/color-convert": { @@ -9293,9 +7838,9 @@ } }, "node_modules/jest-resolve/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -9305,66 +7850,43 @@ } }, "node_modules/jest-runner": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-25.3.0.tgz", - "integrity": "sha512-csDqSC9qGHYWDrzrElzEgFbteztFeZJmKhSgY5jlCIcN0+PhActzRNku0DA1Xa1HxGOb0/AfbP1EGJlP4fGPtA==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-27.5.1.tgz", + "integrity": "sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ==", "dev": true, "dependencies": { - "@jest/console": "^25.3.0", - "@jest/environment": "^25.3.0", - "@jest/test-result": "^25.3.0", - "@jest/types": "^25.3.0", - "chalk": "^3.0.0", - "exit": "^0.1.2", - "graceful-fs": "^4.2.3", - "jest-config": "^25.3.0", - "jest-docblock": "^25.3.0", - "jest-haste-map": "^25.3.0", - "jest-jasmine2": "^25.3.0", - "jest-leak-detector": "^25.3.0", - "jest-message-util": "^25.3.0", - "jest-resolve": "^25.3.0", - "jest-runtime": "^25.3.0", - "jest-util": "^25.3.0", - "jest-worker": "^25.2.6", + "@jest/console": "^27.5.1", + "@jest/environment": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.8.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^27.5.1", + "jest-environment-jsdom": "^27.5.1", + "jest-environment-node": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-leak-detector": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-runtime": "^27.5.1", + "jest-util": "^27.5.1", + "jest-worker": "^27.5.1", "source-map-support": "^0.5.6", - "throat": "^5.0.0" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-runner/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "throat": "^6.0.1" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-runner/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/jest-runner/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -9375,16 +7897,19 @@ } }, "node_modules/jest-runner/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/jest-runner/node_modules/color-convert": { @@ -9415,9 +7940,9 @@ } }, "node_modules/jest-runner/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -9427,75 +7952,44 @@ } }, "node_modules/jest-runtime": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-25.3.0.tgz", - "integrity": "sha512-gn5KYB1wxXRM3nfw8fVpthFu60vxQUCr+ShGq41+ZBFF3DRHZRKj3HDWVAVB4iTNBj2y04QeAo5cZ/boYaPg0w==", - "dev": true, - "dependencies": { - "@jest/console": "^25.3.0", - "@jest/environment": "^25.3.0", - "@jest/source-map": "^25.2.6", - "@jest/test-result": "^25.3.0", - "@jest/transform": "^25.3.0", - "@jest/types": "^25.3.0", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-27.5.1.tgz", + "integrity": "sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A==", + "dev": true, + "dependencies": { + "@jest/environment": "^27.5.1", + "@jest/fake-timers": "^27.5.1", + "@jest/globals": "^27.5.1", + "@jest/source-map": "^27.5.1", + "@jest/test-result": "^27.5.1", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", "collect-v8-coverage": "^1.0.0", - "exit": "^0.1.2", + "execa": "^5.0.0", "glob": "^7.1.3", - "graceful-fs": "^4.2.3", - "jest-config": "^25.3.0", - "jest-haste-map": "^25.3.0", - "jest-message-util": "^25.3.0", - "jest-mock": "^25.3.0", - "jest-regex-util": "^25.2.6", - "jest-resolve": "^25.3.0", - "jest-snapshot": "^25.3.0", - "jest-util": "^25.3.0", - "jest-validate": "^25.3.0", - "realpath-native": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-mock": "^27.5.1", + "jest-regex-util": "^27.5.1", + "jest-resolve": "^27.5.1", + "jest-snapshot": "^27.5.1", + "jest-util": "^27.5.1", "slash": "^3.0.0", - "strip-bom": "^4.0.0", - "yargs": "^15.3.1" - }, - "bin": { - "jest-runtime": "bin/jest-runtime.js" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-runtime/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "strip-bom": "^4.0.0" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-runtime/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/jest-runtime/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -9506,16 +8000,19 @@ } }, "node_modules/jest-runtime/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/jest-runtime/node_modules/color-convert": { @@ -9536,6 +8033,41 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, + "node_modules/jest-runtime/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/jest-runtime/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/jest-runtime/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -9545,92 +8077,109 @@ "node": ">=8" } }, - "node_modules/jest-runtime/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "node_modules/jest-runtime/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-runtime/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "dev": true, "dependencies": { - "has-flag": "^4.0.0" + "path-key": "^3.0.0" }, "engines": { "node": ">=8" } }, - "node_modules/jest-serializer": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-25.2.6.tgz", - "integrity": "sha512-RMVCfZsezQS2Ww4kB5HJTMaMJ0asmC0BHlnobQC6yEtxiFKIxohFA4QSXSabKwSggaNkqxn6Z2VwdFCjhUWuiQ==", + "node_modules/jest-runtime/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "dev": true, "engines": { - "node": ">= 8.3" + "node": ">=8" } }, - "node_modules/jest-snapshot": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-25.3.0.tgz", - "integrity": "sha512-GGpR6Oro2htJPKh5RX4PR1xwo5jCEjtvSPLW1IS7N85y+2bWKbiknHpJJRKSdGXghElb5hWaeQASJI4IiRayGg==", + "node_modules/jest-runtime/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/jest-runtime/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { - "@babel/types": "^7.0.0", - "@jest/types": "^25.3.0", - "@types/prettier": "^1.19.0", - "chalk": "^3.0.0", - "expect": "^25.3.0", - "jest-diff": "^25.3.0", - "jest-get-type": "^25.2.6", - "jest-matcher-utils": "^25.3.0", - "jest-message-util": "^25.3.0", - "jest-resolve": "^25.3.0", - "make-dir": "^3.0.0", - "natural-compare": "^1.4.0", - "pretty-format": "^25.3.0", - "semver": "^6.3.0" + "has-flag": "^4.0.0" }, "engines": { - "node": ">= 8.3" + "node": ">=8" } }, - "node_modules/jest-snapshot/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/jest-serializer": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-27.5.1.tgz", + "integrity": "sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "@types/node": "*", + "graceful-fs": "^4.2.9" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-snapshot/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", + "node_modules/jest-snapshot": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-27.5.1.tgz", + "integrity": "sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA==", "dev": true, "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/jest-snapshot/node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true, + "@babel/core": "^7.7.2", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/traverse": "^7.7.2", + "@babel/types": "^7.0.0", + "@jest/transform": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/babel__traverse": "^7.0.4", + "@types/prettier": "^2.1.5", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^27.5.1", + "graceful-fs": "^4.2.9", + "jest-diff": "^27.5.1", + "jest-get-type": "^27.5.1", + "jest-haste-map": "^27.5.1", + "jest-matcher-utils": "^27.5.1", + "jest-message-util": "^27.5.1", + "jest-util": "^27.5.1", + "natural-compare": "^1.4.0", + "pretty-format": "^27.5.1", + "semver": "^7.3.2" + }, "engines": { - "node": ">=8" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/jest-snapshot/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -9641,16 +8190,19 @@ } }, "node_modules/jest-snapshot/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/jest-snapshot/node_modules/color-convert": { @@ -9661,101 +8213,41 @@ "dependencies": { "color-name": "~1.1.4" }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/jest-snapshot/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/jest-snapshot/node_modules/diff-sequences": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-25.2.6.tgz", - "integrity": "sha512-Hq8o7+6GaZeoFjtpgvRBUknSXNeJiCx7V9Fr94ZMljNiCr9n9L8H8aJqgWOQiDDGdyn29fRNcDdRVJ5fdyihfg==", - "dev": true, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-snapshot/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest-snapshot/node_modules/jest-diff": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-25.3.0.tgz", - "integrity": "sha512-vyvs6RPoVdiwARwY4kqFWd4PirPLm2dmmkNzKqo38uZOzJvLee87yzDjIZLmY1SjM3XR5DwsUH+cdQ12vgqi1w==", - "dev": true, - "dependencies": { - "chalk": "^3.0.0", - "diff-sequences": "^25.2.6", - "jest-get-type": "^25.2.6", - "pretty-format": "^25.3.0" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-snapshot/node_modules/jest-get-type": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-25.2.6.tgz", - "integrity": "sha512-DxjtyzOHjObRM+sM1knti6or+eOgcGU4xVSb2HNP1TqO4ahsT+rqZg+nyqHWJSvWgKC5cG3QjGFBqxLghiF/Ig==", - "dev": true, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-snapshot/node_modules/make-dir": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.0.2.tgz", - "integrity": "sha512-rYKABKutXa6vXTXhoV18cBE7PaewPXHe/Bdq4v+ZLMhxbWApkFFplT0LcbMW+6BbjnQXzZ/sAvSE/JdguApG5w==", - "dev": true, - "dependencies": { - "semver": "^6.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "engines": { + "node": ">=7.0.0" } }, - "node_modules/jest-snapshot/node_modules/pretty-format": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-25.3.0.tgz", - "integrity": "sha512-wToHwF8bkQknIcFkBqNfKu4+UZqnrLn/Vr+wwKQwwvPzkBfDDKp/qIabFqdgtoi5PEnM8LFByVsOrHoa3SpTVA==", + "node_modules/jest-snapshot/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-snapshot/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "dependencies": { - "@jest/types": "^25.3.0", - "ansi-regex": "^5.0.0", - "ansi-styles": "^4.0.0", - "react-is": "^16.12.0" - }, "engines": { - "node": ">= 8.3" + "node": ">=8" } }, "node_modules/jest-snapshot/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", "dev": true, "bin": { "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/jest-snapshot/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -9765,51 +8257,28 @@ } }, "node_modules/jest-util": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-25.3.0.tgz", - "integrity": "sha512-dc625P/KS/CpWTJJJxKc4bA3A6c+PJGBAqS8JTJqx4HqPoKNqXg/Ec8biL2Z1TabwK7E7Ilf0/ukSEXM1VwzNA==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz", + "integrity": "sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw==", "dev": true, "dependencies": { - "@jest/types": "^25.3.0", - "chalk": "^3.0.0", - "is-ci": "^2.0.0", - "make-dir": "^3.0.0" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-util/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "@jest/types": "^27.5.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-util/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/jest-util/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -9820,16 +8289,19 @@ } }, "node_modules/jest-util/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/jest-util/node_modules/color-convert": { @@ -9859,34 +8331,10 @@ "node": ">=8" } }, - "node_modules/jest-util/node_modules/make-dir": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.0.2.tgz", - "integrity": "sha512-rYKABKutXa6vXTXhoV18cBE7PaewPXHe/Bdq4v+ZLMhxbWApkFFplT0LcbMW+6BbjnQXzZ/sAvSE/JdguApG5w==", - "dev": true, - "dependencies": { - "semver": "^6.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/jest-util/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, "node_modules/jest-util/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -9896,82 +8344,63 @@ } }, "node_modules/jest-validate": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-25.3.0.tgz", - "integrity": "sha512-3WuXgIZ4HXUvW6gk9twFFkT9j6zUorKnF2oEY8VEsHb7x5LGvVlN3WUsbqazVKuyXwvikO2zFJ/YTySMsMje2w==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-27.5.1.tgz", + "integrity": "sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ==", "dev": true, "dependencies": { - "@jest/types": "^25.3.0", - "camelcase": "^5.3.1", - "chalk": "^3.0.0", - "jest-get-type": "^25.2.6", + "@jest/types": "^27.5.1", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^27.5.1", "leven": "^3.1.0", - "pretty-format": "^25.3.0" + "pretty-format": "^27.5.1" }, "engines": { - "node": ">= 8.3" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, - "node_modules/jest-validate/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", + "node_modules/jest-validate/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "color-convert": "^2.0.1" }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-validate/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/jest-validate/node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true, "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/jest-validate/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", "dev": true, - "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" - }, "engines": { - "node": ">=8" + "node": ">=10" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/jest-validate/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/jest-validate/node_modules/color-convert": { @@ -10001,34 +8430,10 @@ "node": ">=8" } }, - "node_modules/jest-validate/node_modules/jest-get-type": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-25.2.6.tgz", - "integrity": "sha512-DxjtyzOHjObRM+sM1knti6or+eOgcGU4xVSb2HNP1TqO4ahsT+rqZg+nyqHWJSvWgKC5cG3QjGFBqxLghiF/Ig==", - "dev": true, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-validate/node_modules/pretty-format": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-25.3.0.tgz", - "integrity": "sha512-wToHwF8bkQknIcFkBqNfKu4+UZqnrLn/Vr+wwKQwwvPzkBfDDKp/qIabFqdgtoi5PEnM8LFByVsOrHoa3SpTVA==", - "dev": true, - "dependencies": { - "@jest/types": "^25.3.0", - "ansi-regex": "^5.0.0", - "ansi-styles": "^4.0.0", - "react-is": "^16.12.0" - }, - "engines": { - "node": ">= 8.3" - } - }, "node_modules/jest-validate/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -10038,53 +8443,29 @@ } }, "node_modules/jest-watcher": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-25.3.0.tgz", - "integrity": "sha512-dtFkfidFCS9Ucv8azOg2hkiY3sgJEHeTLtGFHS+jfBEE7eRtrO6+2r1BokyDkaG2FOD7485r/SgpC1MFAENfeA==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-27.5.1.tgz", + "integrity": "sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw==", "dev": true, "dependencies": { - "@jest/test-result": "^25.3.0", - "@jest/types": "^25.3.0", + "@jest/test-result": "^27.5.1", + "@jest/types": "^27.5.1", + "@types/node": "*", "ansi-escapes": "^4.2.1", - "chalk": "^3.0.0", - "jest-util": "^25.3.0", - "string-length": "^3.1.0" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-watcher/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" + "chalk": "^4.0.0", + "jest-util": "^27.5.1", + "string-length": "^4.0.1" }, "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest-watcher/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, "node_modules/jest-watcher/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -10095,16 +8476,19 @@ } }, "node_modules/jest-watcher/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/jest-watcher/node_modules/color-convert": { @@ -10135,9 +8519,9 @@ } }, "node_modules/jest-watcher/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -10147,16 +8531,17 @@ } }, "node_modules/jest-worker": { - "version": "25.2.6", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-25.2.6.tgz", - "integrity": "sha512-FJn9XDUSxcOR4cwDzRfL1z56rUofNTFs539FGASpd50RHdb6EVkhxQqktodW2mI49l+W3H+tFJDotCHUQF6dmA==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", "dev": true, "dependencies": { + "@types/node": "*", "merge-stream": "^2.0.0", - "supports-color": "^7.0.0" + "supports-color": "^8.0.0" }, "engines": { - "node": ">= 8.3" + "node": ">= 10.13.0" } }, "node_modules/jest-worker/node_modules/has-flag": { @@ -10169,134 +8554,18 @@ } }, "node_modules/jest-worker/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dev": true, "dependencies": { "has-flag": "^4.0.0" }, "engines": { - "node": ">=8" - } - }, - "node_modules/jest/node_modules/@jest/types": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-25.3.0.tgz", - "integrity": "sha512-UkaDNewdqXAmCDbN2GlUM6amDKS78eCqiw/UmF5nE0mmLTd6moJkiZJML/X52Ke3LH7Swhw883IRXq8o9nWjVw==", - "dev": true, - "dependencies": { - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^1.1.1", - "@types/yargs": "^15.0.0", - "chalk": "^3.0.0" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest/node_modules/@types/yargs": { - "version": "15.0.4", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", - "integrity": "sha512-9T1auFmbPZoxHz0enUFlUuKRy3it01R+hlggyVUMtnCTQRunsQYifnSGb8hET4Xo8yiC0o0r1paW3ud5+rbURg==", - "dev": true, - "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/jest/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", - "dev": true, - "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" + "node": ">=10" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/jest/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/jest/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/jest/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/jest/node_modules/jest-cli": { - "version": "25.3.0", - "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-25.3.0.tgz", - "integrity": "sha512-XpNQPlW1tzpP7RGG8dxpkRegYDuLjzSiENu92+CYM87nEbmEPb3b4+yo8xcsHOnj0AG7DUt9b3uG8LuHI3MDzw==", - "dev": true, - "dependencies": { - "@jest/core": "^25.3.0", - "@jest/test-result": "^25.3.0", - "@jest/types": "^25.3.0", - "chalk": "^3.0.0", - "exit": "^0.1.2", - "import-local": "^3.0.2", - "is-ci": "^2.0.0", - "jest-config": "^25.3.0", - "jest-util": "^25.3.0", - "jest-validate": "^25.3.0", - "prompts": "^2.0.1", - "realpath-native": "^2.0.0", - "yargs": "^15.3.1" - }, - "bin": { - "jest": "bin/jest.js" - }, - "engines": { - "node": ">= 8.3" - } - }, - "node_modules/jest/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", - "dev": true, - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" + "url": "https://github.com/chalk/supports-color?sponsor=1" } }, "node_modules/jose": { @@ -10321,12 +8590,12 @@ "dev": true }, "node_modules/js-yaml": { - "version": "3.13.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", - "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "license": "MIT", "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" + "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" @@ -10338,40 +8607,41 @@ "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" }, "node_modules/jsdom": { - "version": "15.2.1", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-15.2.1.tgz", - "integrity": "sha512-fAl1W0/7T2G5vURSyxBzrJ1LSdQn6Tr5UX/xD4PXDx/PDgwygedfW6El/KIj3xJ7FU61TTYnc/l/B7P49Eqt6g==", - "dev": true, - "dependencies": { - "abab": "^2.0.0", - "acorn": "^7.1.0", - "acorn-globals": "^4.3.2", - "array-equal": "^1.0.0", - "cssom": "^0.4.1", - "cssstyle": "^2.0.0", - "data-urls": "^1.1.0", - "domexception": "^1.0.1", - "escodegen": "^1.11.1", - "html-encoding-sniffer": "^1.0.2", + "version": "16.7.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz", + "integrity": "sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw==", + "dev": true, + "dependencies": { + "abab": "^2.0.5", + "acorn": "^8.2.4", + "acorn-globals": "^6.0.0", + "cssom": "^0.4.4", + "cssstyle": "^2.3.0", + "data-urls": "^2.0.0", + "decimal.js": "^10.2.1", + "domexception": "^2.0.1", + "escodegen": "^2.0.0", + "form-data": "^3.0.0", + "html-encoding-sniffer": "^2.0.1", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-potential-custom-element-name": "^1.0.1", "nwsapi": "^2.2.0", - "parse5": "5.1.0", - "pn": "^1.1.0", - "request": "^2.88.0", - "request-promise-native": "^1.0.7", - "saxes": "^3.1.9", - "symbol-tree": "^3.2.2", - "tough-cookie": "^3.0.1", - "w3c-hr-time": "^1.0.1", - "w3c-xmlserializer": "^1.1.2", - "webidl-conversions": "^4.0.2", + "parse5": "6.0.1", + "saxes": "^5.0.1", + "symbol-tree": "^3.2.4", + "tough-cookie": "^4.0.0", + "w3c-hr-time": "^1.0.2", + "w3c-xmlserializer": "^2.0.0", + "webidl-conversions": "^6.1.0", "whatwg-encoding": "^1.0.5", "whatwg-mimetype": "^2.3.0", - "whatwg-url": "^7.0.0", - "ws": "^7.0.0", + "whatwg-url": "^8.5.0", + "ws": "^7.4.6", "xml-name-validator": "^3.0.0" }, "engines": { - "node": ">=8" + "node": ">=10" }, "peerDependencies": { "canvas": "^2.5.0" @@ -10382,38 +8652,106 @@ } } }, + "node_modules/jsdom/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/jsdom/node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/jsdom/node_modules/form-data": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.3.tgz", + "integrity": "sha512-q5YBMeWy6E2Un0nMGWMgI65MAKtaylxfNJGJxpGh45YDciZB4epbWpaAfImil6CPAPTYB4sh0URQNDRIZG5F2w==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "mime-types": "^2.1.35" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/jsdom/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/jsdom/node_modules/tough-cookie": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", - "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz", + "integrity": "sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==", "dev": true, "dependencies": { - "ip-regex": "^2.1.0", - "psl": "^1.1.28", - "punycode": "^2.1.1" + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" }, "engines": { - "node": ">=6" + "node": ">=6" + } + }, + "node_modules/jsep": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jsep/-/jsep-1.4.0.tgz", + "integrity": "sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw==", + "license": "MIT", + "engines": { + "node": ">= 10.16.0" } }, "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", "dev": true, "bin": { "jsesc": "bin/jsesc" }, "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/json-bigint": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", - "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "license": "MIT", "dependencies": { - "bignumber.js": "^7.0.0" + "bignumber.js": "^9.0.0" } }, "node_modules/json-buffer": { @@ -10422,10 +8760,17 @@ "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", "license": "MIT" }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, "node_modules/json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "license": "(AFL-2.1 OR BSD-3-Clause)" }, "node_modules/json-schema-traverse": { "version": "0.4.1", @@ -10450,12 +8795,21 @@ } }, "node_modules/jsonpath-plus": { - "version": "0.19.0", - "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-0.19.0.tgz", - "integrity": "sha512-GSVwsrzW9LsA5lzsqe4CkuZ9wp+kxBb2GwNniaWzI2YFn5Ig42rSW8ZxVpWXaAfakXNrx5pgY5AbQq7kzX29kg==", + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-10.3.0.tgz", + "integrity": "sha512-8TNmfeTCk2Le33A3vRRwtuworG/L5RrgMvdjhKZxvyShO+mBu2fP50OWUjRLNtvw344DdDarFh9buFAZs5ujeA==", "license": "MIT", + "dependencies": { + "@jsep-plugin/assignment": "^1.3.0", + "@jsep-plugin/regex": "^1.0.4", + "jsep": "^1.4.0" + }, + "bin": { + "jsonpath": "bin/jsonpath-cli.js", + "jsonpath-plus": "bin/jsonpath-cli.js" + }, "engines": { - "node": ">=6.0" + "node": ">=18.0.0" } }, "node_modules/jsprim": { @@ -10473,21 +8827,23 @@ } }, "node_modules/jwa": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", - "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.1.tgz", + "integrity": "sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg==", + "license": "MIT", "dependencies": { - "buffer-equal-constant-time": "1.0.1", + "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "node_modules/jws": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", - "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "license": "MIT", "dependencies": { - "jwa": "^1.4.1", + "jwa": "^2.0.0", "safe-buffer": "^5.0.1" } }, @@ -10500,14 +8856,6 @@ "json-buffer": "3.0.1" } }, - "node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/kleur": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", @@ -10526,18 +8874,11 @@ "node": ">=6" } }, - "node_modules/levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", - "dev": true, - "dependencies": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" - }, - "engines": { - "node": ">= 0.8.0" - } + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true }, "node_modules/locate-path": { "version": "5.0.0", @@ -10559,24 +8900,9 @@ "node_modules/lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=", - "dev": true - }, - "node_modules/lodash.sortby": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", "dev": true }, - "node_modules/lolex": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-5.1.2.tgz", - "integrity": "sha512-h4hmjAvHTmd+25JSwrtTIuwbKdwg5NzZVRMLn9saij4SZaepCrTCxPr35H/3bjwfMJtN+t3CX8672UIkglz28A==", - "dev": true, - "dependencies": { - "@sinonjs/commons": "^1.7.0" - } - }, "node_modules/lowercase-keys": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", @@ -10587,30 +8913,39 @@ } }, "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "license": "ISC", "dependencies": { - "yallist": "^3.0.2" + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" } }, "node_modules/make-dir": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", - "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "license": "MIT", "dependencies": { - "pify": "^3.0.0" + "semver": "^6.0.0" }, "engines": { - "node": ">=4" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/make-dir/node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "engines": { - "node": ">=4" + "node_modules/make-dir/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" } }, "node_modules/make-error": { @@ -10619,31 +8954,12 @@ "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==" }, "node_modules/makeerror": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.11.tgz", - "integrity": "sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw=", + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", "dev": true, "dependencies": { - "tmpl": "1.0.x" - } - }, - "node_modules/map-cache": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", - "integrity": "sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/map-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", - "integrity": "sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=", - "dependencies": { - "object-visit": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" + "tmpl": "1.0.5" } }, "node_modules/math-intrinsics": { @@ -10656,17 +8972,22 @@ } }, "node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", "engines": { - "node": ">= 0.6" + "node": ">= 0.8" } }, "node_modules/merge-descriptors": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", - "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } @@ -10674,8 +8995,7 @@ "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" }, "node_modules/methods": { "version": "1.1.2", @@ -10686,26 +9006,16 @@ } }, "node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8.6" } }, "node_modules/mime": { @@ -10720,9 +9030,9 @@ } }, "node_modules/mime-db": { - "version": "1.43.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz", - "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==", + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", "engines": { "node": ">= 0.6" } @@ -10888,35 +9198,6 @@ "node": ">=8" } }, - "node_modules/minizlib/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, - "node_modules/mixin-deep": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", - "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", - "dependencies": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/mixin-deep/node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dependencies": { - "is-plain-object": "^2.0.4" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/mkdirp": { "version": "0.5.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", @@ -10930,54 +9211,30 @@ } }, "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node_modules/nanomatch": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", - "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "fragment-cache": "^0.2.1", - "is-windows": "^1.0.2", - "kind-of": "^6.0.2", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, "node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, - "node_modules/nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" - }, "node_modules/node-fetch": { "version": "2.6.7", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "license": "MIT", "dependencies": { "whatwg-url": "^5.0.0" }, @@ -11013,64 +9270,25 @@ } }, "node_modules/node-forge": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.8.5.tgz", - "integrity": "sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "license": "(BSD-3-Clause OR GPL-2.0)", "engines": { - "node": ">= 4.5.0" + "node": ">= 6.13.0" } }, "node_modules/node-int64": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", - "integrity": "sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs=", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", "dev": true }, - "node_modules/node-modules-regexp": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz", - "integrity": "sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/node-notifier": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/node-notifier/-/node-notifier-6.0.0.tgz", - "integrity": "sha512-SVfQ/wMw+DesunOm5cKqr6yDcvUTDl/yc97ybGHMrteNEY6oekXpNpS3lZwgLlwz0FLgHoiW28ZpmBHUDg37cw==", - "dev": true, - "optional": true, - "dependencies": { - "growly": "^1.3.0", - "is-wsl": "^2.1.1", - "semver": "^6.3.0", - "shellwords": "^0.1.1", - "which": "^1.3.1" - } - }, - "node_modules/node-notifier/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "optional": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/node-notifier/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "optional": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } + "node_modules/node-releases": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", + "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "dev": true }, "node_modules/normalize-path": { "version": "3.0.0", @@ -11093,21 +9311,10 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", - "dependencies": { - "path-key": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/nwsapi": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.0.tgz", - "integrity": "sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==", + "version": "2.2.20", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.20.tgz", + "integrity": "sha512-/ieB+mDe4MrrKMT8z+mQL8klXydZWGR5Dowt4RAGKbJ3kIGEx3X4ljUo+6V73IXtUPWgfOlU5B9MlGxFO5T+cA==", "dev": true }, "node_modules/oauth-sign": { @@ -11118,41 +9325,6 @@ "node": "*" } }, - "node_modules/object-copy": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", - "integrity": "sha1-fn2Fi3gb18mRpBupde04EnVOmYw=", - "dependencies": { - "copy-descriptor": "^0.1.0", - "define-property": "^0.2.5", - "kind-of": "^3.0.3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/object-hash": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", @@ -11163,9 +9335,10 @@ } }, "node_modules/object-inspect": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", - "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -11173,32 +9346,10 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/object-visit": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", - "integrity": "sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=", - "dependencies": { - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object.pick": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", - "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/oidc-token-hash": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-5.1.0.tgz", - "integrity": "sha512-y0W+X7Ppo7oZX6eovsRkuzcSM40Bicg2JEJkDJ4irIt1wsYAP5MLSNv+QAogO8xivMffw/9OvV3um1pxXgt1uA==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-5.1.1.tgz", + "integrity": "sha512-D7EmwxJV6DsEB6vOFLrBM2OzsVgQzgPWyHlV2OOAVj772n+WTXpudC9e9u5BVKQnYwaD30Ivhi9b+4UeBcGu9g==", "license": "MIT", "engines": { "node": "^10.13.0 || >=12.0.0" @@ -11208,6 +9359,7 @@ "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", "dependencies": { "ee-first": "1.1.1" }, @@ -11224,14 +9376,17 @@ } }, "node_modules/onetime": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", - "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "dependencies": { "mimic-fn": "^2.1.0" }, "engines": { "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/openid-client": { @@ -11255,65 +9410,13 @@ "url": "https://github.com/sponsors/panva" } }, - "node_modules/openid-client/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/openid-client/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, - "node_modules/optionator": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", - "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", - "dev": true, - "dependencies": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.6", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "word-wrap": "~1.2.3" - }, - "engines": { - "node": ">= 0.8.0" - } - }, "node_modules/p-cancelable": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/p-each-series": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-2.1.0.tgz", - "integrity": "sha512-ZuRs1miPT4HrjFa+9fRfOFXxGJfORgelKV9f9nNOWw2gl6gVsRaVDOQP0+MI0G0wGKns1Yacsu0GjOFbTK0JFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=", + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/p-limit": { @@ -11357,28 +9460,39 @@ "resolved": "https://registry.npmjs.org/pako/-/pako-0.2.9.tgz", "integrity": "sha1-8/dSL073gjSNqBYbrZ7P1Rv4OnU=" }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/parse5": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.0.tgz", - "integrity": "sha512-fxNG2sQjHvlVAYmzBZS9YlDp6PTSSDwa98vkD4QgVDDCAo84z5X1t5XyJQ62ImdLXx5NdIIfihey6xpum9/gRQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", "dev": true }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, - "node_modules/pascalcase": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", - "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -11396,14 +9510,6 @@ "node": ">=0.10.0" } }, - "node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "engines": { - "node": ">=4" - } - }, "node_modules/path-parse": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", @@ -11439,11 +9545,17 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true + }, "node_modules/picomatch": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", - "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", - "dev": true, + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "license": "MIT", "engines": { "node": ">=8.6" }, @@ -11451,22 +9563,11 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", - "engines": { - "node": ">=6" - } - }, "node_modules/pirates": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.1.tgz", - "integrity": "sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA==", + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", "dev": true, - "dependencies": { - "node-modules-regexp": "^1.0.0" - }, "engines": { "node": ">= 6" } @@ -11483,12 +9584,6 @@ "node": ">=8" } }, - "node_modules/pn": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/pn/-/pn-1.1.0.tgz", - "integrity": "sha512-2qHaIQr2VLRFoxe2nASzsV6ef4yOOH+Fi9FBOVH6cqeSgUnoyySPZkxzLuzd+RYOQTRpROA0ztTMqxROKSb/nA==", - "dev": true - }, "node_modules/portable-fetch": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/portable-fetch/-/portable-fetch-3.0.0.tgz", @@ -11498,23 +9593,6 @@ "whatwg-fetch": ">=0.10.0" } }, - "node_modules/portable-fetch/node_modules/node-fetch": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz", - "integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==", - "dependencies": { - "encoding": "^0.1.11", - "is-stream": "^1.0.1" - } - }, - "node_modules/posix-character-classes": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", - "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/possible-typed-array-names": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", @@ -11524,15 +9602,6 @@ "node": ">= 0.4" } }, - "node_modules/prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", - "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, "node_modules/prettier": { "version": "1.19.1", "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", @@ -11546,18 +9615,29 @@ } }, "node_modules/pretty-format": { - "version": "24.9.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-24.9.0.tgz", - "integrity": "sha512-00ZMZUiHaJrNfk33guavqgvfJS30sLYf0f8+Srklv0AMPodGGHcoHgksZ3OThYnIvOd+8yMCn0YiEOogjlgsnA==", + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", "dev": true, "dependencies": { - "@jest/types": "^24.9.0", - "ansi-regex": "^4.0.0", - "ansi-styles": "^3.2.0", - "react-is": "^16.8.4" + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" }, "engines": { - "node": ">= 6" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, "node_modules/process-nextick-args": { @@ -11566,13 +9646,13 @@ "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "node_modules/prompts": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.3.2.tgz", - "integrity": "sha512-Q06uKs2CkNYVID0VqwfAl9mipo99zkBv/n2JtWY89Yxa3ZabWSrs0e2KTudKVa3peLUvYXMefDqIleLPVUBZMA==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", "dev": true, "dependencies": { "kleur": "^3.0.3", - "sisteransi": "^1.0.4" + "sisteransi": "^1.0.5" }, "engines": { "node": ">= 6" @@ -11653,6 +9733,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true + }, "node_modules/quick-lru": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", @@ -11669,28 +9755,46 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.1.tgz", + "integrity": "sha512-9G8cA+tuMS75+6G/TzW8OtLzmBDMo8p1JRxN5AZ+LAp8uxGA8V8GZm4GQ4/N5QNQEnLmg6SS7wyuSmbKepiKqA==", + "license": "MIT", "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", - "iconv-lite": "0.4.24", + "iconv-lite": "0.7.0", "unpipe": "1.0.0" }, "engines": { - "node": ">= 0.8" + "node": ">= 0.10" + } + }, + "node_modules/raw-body/node_modules/iconv-lite": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz", + "integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/react-is": { - "version": "16.12.0", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.12.0.tgz", - "integrity": "sha512-rPCkf/mWBtKc97aLL9/txD8DZdemK0vkA3JMLShjlJB3Pj3s+lpf1KaBzMfQrAmhMQB0n1cU/SUGgKKBCe837Q==", + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", "dev": true }, "node_modules/readable-stream": { @@ -11707,19 +9811,10 @@ "util-deprecate": "~1.0.1" } }, - "node_modules/realpath-native": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/realpath-native/-/realpath-native-2.0.0.tgz", - "integrity": "sha512-v1SEYUOXXdbBZK8ZuNgO4TBjamPsiSgcFr0aP+tEKpQZK8vooEUqV6nm6Cv502mX4NF2EfsnVqtNAHG+/6Ur1Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/rechoir": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", - "integrity": "sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=", + "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", "dependencies": { "resolve": "^1.1.6" }, @@ -11727,40 +9822,6 @@ "node": ">= 0.10" } }, - "node_modules/regex-not": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", - "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", - "dependencies": { - "extend-shallow": "^3.0.2", - "safe-regex": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/remove-trailing-separator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", - "integrity": "sha1-wkvOKig62tW8P1jg1IJJuSN52O8=", - "dev": true - }, - "node_modules/repeat-element": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", - "integrity": "sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", - "engines": { - "node": ">=0.10" - } - }, "node_modules/request": { "version": "2.88.2", "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", @@ -11792,43 +9853,11 @@ "node": ">= 6" } }, - "node_modules/request-promise-core": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.3.tgz", - "integrity": "sha512-QIs2+ArIGQVp5ZYbWD5ZLCY29D5CfWizP8eWnm8FoGD1TX61veauETVQbrV60662V0oFBkrDOuaBI8XgtuyYAQ==", - "dev": true, - "dependencies": { - "lodash": "^4.17.15" - }, - "engines": { - "node": ">=0.10.0" - }, - "peerDependencies": { - "request": "^2.34" - } - }, - "node_modules/request-promise-native": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.8.tgz", - "integrity": "sha512-dapwLGqkHtwL5AEbfenuzjTYg35Jd6KPytsC2/TLkVMz8rm+tNt72MGUWT1RP/aYawMpN6HqbNGBQaRcBtjQMQ==", - "deprecated": "request-promise-native has been deprecated because it extends the now deprecated request package, see https://github.com/request/request/issues/3142", - "dev": true, - "dependencies": { - "request-promise-core": "1.1.3", - "stealthy-require": "^1.1.1", - "tough-cookie": "^2.3.3" - }, - "engines": { - "node": ">=0.12.0" - }, - "peerDependencies": { - "request": "^2.34" - } - }, "node_modules/request/node_modules/form-data": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.6", @@ -11838,32 +9867,54 @@ "node": ">= 0.12" } }, + "node_modules/request/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true, "engines": { "node": ">=0.10.0" } }, - "node_modules/require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true - }, "node_modules/requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=" }, "node_modules/resolve": { - "version": "1.15.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", - "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", "dependencies": { - "path-parse": "^1.0.6" + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -11896,11 +9947,14 @@ "node": ">=8" } }, - "node_modules/resolve-url": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", - "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=", - "deprecated": "https://github.com/lydell/resolve-url#deprecated" + "node_modules/resolve.exports": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-1.1.1.tgz", + "integrity": "sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ==", + "dev": true, + "engines": { + "node": ">=10" + } }, "node_modules/responselike": { "version": "2.0.1", @@ -11914,33 +9968,43 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/ret": { - "version": "0.1.15", - "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", - "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "license": "MIT", "engines": { - "node": ">=0.12" + "node": ">= 4" } }, "node_modules/retry-request": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.1.1.tgz", - "integrity": "sha512-BINDzVtLI2BDukjWmjAIRZ0oglnCAkpP2vQjM3jdLhmT62h0xnQgciPwBRDAvHqpkPT2Wo1XuUyLyn6nbGrZQQ==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.2.2.tgz", + "integrity": "sha512-xA93uxUD/rogV7BV59agW/JHPGXeREMWiZc9jhcwY4YdZ7QOtC7qbomYg0n4wyk2lJhggjvKvhNX8wln/Aldhg==", + "license": "MIT", "dependencies": { "debug": "^4.1.1", - "through2": "^3.0.1" + "extend": "^3.0.2" }, "engines": { "node": ">=8.10.0" } }, "node_modules/retry-request/node_modules/debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", "dependencies": { - "ms": "^2.1.1" + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, "node_modules/rfc4648": { @@ -11965,28 +10029,44 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/rsvp": { - "version": "4.8.5", - "resolved": "https://registry.npmjs.org/rsvp/-/rsvp-4.8.5.tgz", - "integrity": "sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA==", - "dev": true, + "node_modules/router": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/router/-/router-1.3.7.tgz", + "integrity": "sha512-bYnD9Vv2287+g3AIll2kHITLtHV5+fldq6hVzaul9RbdGme77mvBY/1cO+ahsgstA2RI6DSg/j4W1TYHm4Lz4g==", + "license": "MIT", + "dependencies": { + "array-flatten": "3.0.0", + "debug": "2.6.9", + "methods": "~1.1.2", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "setprototypeof": "1.2.0", + "utils-merge": "1.0.1" + }, "engines": { - "node": "6.* || >= 7.*" + "node": ">= 0.8" + } + }, + "node_modules/router/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" } }, + "node_modules/router/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, "node_modules/safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, - "node_modules/safe-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", - "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", - "dependencies": { - "ret": "~0.1.10" - } - }, "node_modules/safe-regex-test": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", @@ -12009,52 +10089,6 @@ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, - "node_modules/sane": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/sane/-/sane-4.1.0.tgz", - "integrity": "sha512-hhbzAgTIX8O7SHfp2c8/kREfEn4qO/9q8C9beyY6+tvZ87EpoZ3i1RIEvp27YBswnNbY9mWd6paKVmKbAgLfZA==", - "deprecated": "some dependency vulnerabilities fixed, support for node < 10 dropped, and newer ECMAScript syntax/features added", - "dev": true, - "dependencies": { - "@cnakazawa/watch": "^1.0.3", - "anymatch": "^2.0.0", - "capture-exit": "^2.0.0", - "exec-sh": "^0.3.2", - "execa": "^1.0.0", - "fb-watchman": "^2.0.0", - "micromatch": "^3.1.4", - "minimist": "^1.1.1", - "walker": "~1.0.5" - }, - "bin": { - "sane": "src/cli.js" - }, - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/sane/node_modules/anymatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", - "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", - "dev": true, - "dependencies": { - "micromatch": "^3.1.4", - "normalize-path": "^2.1.1" - } - }, - "node_modules/sane/node_modules/normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", - "dev": true, - "dependencies": { - "remove-trailing-separator": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/sax": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz", @@ -12062,105 +10096,92 @@ "license": "ISC" }, "node_modules/saxes": { - "version": "3.1.11", - "resolved": "https://registry.npmjs.org/saxes/-/saxes-3.1.11.tgz", - "integrity": "sha512-Ydydq3zC+WYDJK1+gRxRapLIED9PWeSuuS41wqyoRmzvhhh9nc+QQrVMKJYzJFULazeGhzSV0QleN2wD3boh2g==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz", + "integrity": "sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==", "dev": true, "dependencies": { - "xmlchars": "^2.1.1" + "xmlchars": "^2.2.0" }, "engines": { - "node": ">=8" + "node": ">=10" } }, "node_modules/semver": { "version": "5.7.2", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, "bin": { "semver": "bin/semver" } }, "node_modules/send": { - "version": "0.19.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", - "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", "dependencies": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "2.4.1", - "range-parser": "~1.2.1", - "statuses": "2.0.1" + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" }, "engines": { - "node": ">= 0.8.0" + "node": ">= 18" } }, "node_modules/send/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/send/node_modules/debug/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, - "node_modules/send/node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "ms": "^2.1.3" + }, "engines": { - "node": ">= 0.8" + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/send/node_modules/mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "bin": { - "mime": "cli.js" + "node_modules/send/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" }, "engines": { - "node": ">=4" + "node": ">= 0.6" } }, - "node_modules/send/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" - }, "node_modules/serve-static": { - "version": "1.16.2", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", - "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", "dependencies": { - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.19.0" + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" }, "engines": { - "node": ">= 0.8.0" + "node": ">= 18" } }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", - "dev": true - }, "node_modules/set-function-length": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", @@ -12177,59 +10198,38 @@ "node": ">= 0.4" } }, - "node_modules/set-value": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", - "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", - "dependencies": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.3", - "split-string": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/set-value/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" }, "node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", "dependencies": { - "shebang-regex": "^1.0.0" + "shebang-regex": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shelljs": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.3.tgz", - "integrity": "sha512-fc0BKlAWiLpwZljmOvAOTE/gXawtCoNrP5oaY7KIaQbbyHeQVg01pSEuEGvGh3HEdBU4baCD7wQBwADmM/7f7A==", + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", + "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", + "license": "BSD-3-Clause", "dependencies": { "glob": "^7.0.0", "interpret": "^1.0.0", @@ -12242,22 +10242,17 @@ "node": ">=4" } }, - "node_modules/shellwords": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/shellwords/-/shellwords-0.1.1.tgz", - "integrity": "sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww==", - "dev": true, - "optional": true - }, "node_modules/side-channel": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", - "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4", - "object-inspect": "^1.13.1" + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -12266,174 +10261,83 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/signal-exit": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" - }, - "node_modules/sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", - "dev": true - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/snakeize": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/snakeize/-/snakeize-0.1.0.tgz", - "integrity": "sha1-EMCI2LWOsHazIpu1oE4jLOEmQi0=" - }, - "node_modules/snapdragon": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", - "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", - "dependencies": { - "base": "^0.11.1", - "debug": "^2.2.0", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "map-cache": "^0.2.2", - "source-map": "^0.5.6", - "source-map-resolve": "^0.5.0", - "use": "^3.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", - "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", - "dependencies": { - "define-property": "^1.0.0", - "isobject": "^3.0.0", - "snapdragon-util": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/define-property": { + "node_modules/side-channel-list": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", "dependencies": { - "is-descriptor": "^1.0.0" + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "deprecated": "Please upgrade to v1.0.1", - "dependencies": { - "kind-of": "^6.0.0" + "node": ">= 0.4" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/snapdragon-node/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "deprecated": "Please upgrade to v1.0.1", + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", "dependencies": { - "kind-of": "^6.0.0" + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" + "node": ">= 0.4" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/snapdragon-util": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", - "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", "dependencies": { - "kind-of": "^3.2.0" + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-util/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" + "node": ">= 0.4" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/snapdragon/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" - } + "node_modules/signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" }, - "node_modules/snapdragon/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true }, - "node_modules/snapdragon/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dependencies": { - "is-extendable": "^0.1.0" - }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/snapdragon/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" - }, - "node_modules/snapdragon/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "engines": { - "node": ">=0.10.0" - } + "node_modules/snakeize": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/snakeize/-/snakeize-0.1.0.tgz", + "integrity": "sha1-EMCI2LWOsHazIpu1oE4jLOEmQi0=" }, "node_modules/source-map": { "version": "0.6.1", @@ -12444,35 +10348,16 @@ "node": ">=0.10.0" } }, - "node_modules/source-map-resolve": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz", - "integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==", - "deprecated": "See https://github.com/lydell/source-map-resolve#deprecated", - "dependencies": { - "atob": "^2.1.2", - "decode-uri-component": "^0.2.0", - "resolve-url": "^0.2.1", - "source-map-url": "^0.4.0", - "urix": "^0.1.0" - } - }, "node_modules/source-map-support": { - "version": "0.5.16", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.16.tgz", - "integrity": "sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ==", + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", "dev": true, "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, - "node_modules/source-map-url": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz", - "integrity": "sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=", - "deprecated": "See https://github.com/lydell/source-map-url#deprecated" - }, "node_modules/split-array-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/split-array-stream/-/split-array-stream-2.0.0.tgz", @@ -12490,21 +10375,12 @@ "node": ">=6" } }, - "node_modules/split-string": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", - "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", - "dependencies": { - "extend-shallow": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" }, "node_modules/sshpk": { "version": "1.16.1", @@ -12531,54 +10407,35 @@ } }, "node_modules/stack-utils": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.2.tgz", - "integrity": "sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/static-extend": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", - "integrity": "sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=", "dependencies": { - "define-property": "^0.2.5", - "object-copy": "^0.1.0" + "escape-string-regexp": "^2.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=10" } }, - "node_modules/static-extend/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dependencies": { - "is-descriptor": "^0.1.0" - }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, - "node_modules/stealthy-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", - "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/stream-buffers": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-3.0.3.tgz", @@ -12612,9 +10469,9 @@ } }, "node_modules/stream-shift": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", - "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", + "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==" }, "node_modules/strict-uri-encode": { "version": "2.0.0", @@ -12634,91 +10491,73 @@ } }, "node_modules/string-length": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-length/-/string-length-3.1.0.tgz", - "integrity": "sha512-Ttp5YvkGm5v9Ijagtaz1BnN+k9ObpvS0eIBblPMp2YWL8FBmi9qblQ9fexc2k/CXFgrTIteU3jAw3payCnwSTA==", - "dev": true, - "dependencies": { - "astral-regex": "^1.0.0", - "strip-ansi": "^5.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-length/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", "dev": true, "dependencies": { - "ansi-regex": "^4.1.0" + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" }, "engines": { - "node": ">=6" + "node": ">=10" } }, "node_modules/string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" + "strip-ansi": "^6.0.1" }, "engines": { "node": ">=8" } }, "node_modules/strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, "dependencies": { - "ansi-regex": "^5.0.0" + "ansi-regex": "^5.0.1" }, "engines": { "node": ">=8" } }, - "node_modules/strip-ansi/node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/strip-bom": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", "dev": true, "engines": { - "node": ">=8" - } - }, - "node_modules/strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", - "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/strip-final-newline": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true, "engines": { "node": ">=6" } }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/strnum": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", @@ -12789,9 +10628,9 @@ } }, "node_modules/supports-hyperlinks": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.1.0.tgz", - "integrity": "sha512-zoE5/e+dnEijk6ASB6/qrK+oYdm2do1hjoLWrqUC/8WEIW1gbxFcKuBof7sW8ArN6e+AYvsE8HBGiVRWL/F5CA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz", + "integrity": "sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==", "dev": true, "dependencies": { "has-flag": "^4.0.0", @@ -12811,9 +10650,9 @@ } }, "node_modules/supports-hyperlinks/node_modules/supports-color": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", - "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "dependencies": { "has-flag": "^4.0.0" @@ -12822,6 +10661,17 @@ "node": ">=8" } }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/symbol-tree": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", @@ -12882,12 +10732,6 @@ "node": ">=10" } }, - "node_modules/tar/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, "node_modules/teeny-request": { "version": "3.11.3", "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-3.11.3.tgz", @@ -12929,9 +10773,9 @@ } }, "node_modules/throat": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/throat/-/throat-5.0.0.tgz", - "integrity": "sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/throat/-/throat-6.0.2.tgz", + "integrity": "sha512-WKexMoJj3vEuK0yFEapj8y64V0A6xcuPuK9Gt1d0R+dzCSJc0lHqQytAbSB4cDAK0dWh4T0E2ETkoLE2WZ41OQ==", "dev": true }, "node_modules/through2": { @@ -12943,9 +10787,9 @@ } }, "node_modules/tmp": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", - "integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==", + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", + "integrity": "sha512-UdiSoX6ypifLmrfQ/XfiawN6hkjSBpCjhKxxZcWlUUmoXLaCKQU0bx4HF/tdDK2uzRuchf1txGvrWBzYREssoQ==", "license": "MIT", "engines": { "node": ">=14.14" @@ -12966,67 +10810,32 @@ "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", "dev": true }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/to-object-path": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", - "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-object-path/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-regex": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", - "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "license": "MIT", "dependencies": { - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "regex-not": "^1.0.2", - "safe-regex": "^1.1.0" + "is-number": "^7.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8.0" } }, - "node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", - "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, + "node_modules/to-regex-range/node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=0.12.0" } }, "node_modules/toidentifier": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", "engines": { "node": ">=0.6" } @@ -13044,39 +10853,70 @@ } }, "node_modules/tr46": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", - "integrity": "sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz", + "integrity": "sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==", "dev": true, "dependencies": { - "punycode": "^2.1.0" + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=8" } }, "node_modules/ts-jest": { - "version": "25.2.1", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-25.2.1.tgz", - "integrity": "sha512-TnntkEEjuXq/Gxpw7xToarmHbAafgCaAzOpnajnFC6jI7oo1trMzAHA04eWpc3MhV6+yvhE8uUBAmN+teRJh0A==", + "version": "27.1.5", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-27.1.5.tgz", + "integrity": "sha512-Xv6jBQPoBEvBq/5i2TeSG9tt/nqkbpcurrEG1b+2yfBrcJelOZF9Ml6dmyMh7bcW9JyFbRYpR5rxROSlBLTZHA==", "dev": true, "dependencies": { "bs-logger": "0.x", - "buffer-from": "1.x", "fast-json-stable-stringify": "2.x", + "jest-util": "^27.0.0", "json5": "2.x", "lodash.memoize": "4.x", "make-error": "1.x", - "mkdirp": "0.x", - "resolve": "1.x", - "semver": "^5.5", - "yargs-parser": "^16.1.0" + "semver": "7.x", + "yargs-parser": "20.x" }, "bin": { "ts-jest": "cli.js" }, "engines": { - "node": ">= 6" + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" }, "peerDependencies": { - "jest": ">=25 <26" + "@babel/core": ">=7.0.0-beta.0 <8", + "@types/jest": "^27.0.0", + "babel-jest": ">=27.0.0 <28", + "jest": "^27.0.0", + "typescript": ">=3.8 <5.0" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@types/jest": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/tslib": { @@ -13114,6 +10954,30 @@ "typescript": ">=2.3.0-dev || >=2.4.0-dev || >=2.5.0-dev || >=2.6.0-dev || >=2.7.0-dev || >=2.8.0-dev || >=2.9.0-dev || >=3.0.0-dev || >= 3.1.0-dev || >= 3.2.0-dev" } }, + "node_modules/tslint/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/tslint/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, "node_modules/tsutils": { "version": "2.29.0", "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-2.29.0.tgz", @@ -13142,18 +11006,6 @@ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" }, - "node_modules/type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", - "dev": true, - "dependencies": { - "prelude-ls": "~1.1.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, "node_modules/type-detect": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", @@ -13164,24 +11016,38 @@ } }, "node_modules/type-fest": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", - "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", "dev": true, "engines": { - "node": ">=8" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/type-is/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" + "mime-db": "^1.54.0" }, "engines": { "node": ">= 0.6" @@ -13196,16 +11062,16 @@ "version": "3.1.5", "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", - "dev": true, "dependencies": { "is-typedarray": "^1.0.0" } }, "node_modules/typescript": { - "version": "3.8.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz", - "integrity": "sha512-EgOVgL/4xfVrCMbhYKUQTdF37SQn4Iw73H5BgCrF1Abdun7Kwy/QZsE/ssAy0y4LxBbvua3PIbFsbRczWWnDdQ==", + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", + "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", "dev": true, + "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -13215,85 +11081,69 @@ } }, "node_modules/underscore": { - "version": "1.9.2", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.9.2.tgz", - "integrity": "sha512-D39qtimx0c1fI3ya1Lnhk3E9nONswSKhnffBI0gME9C99fYOkNi04xs8K6pePLhvl1frbDemkaBQ5ikWllR2HQ==" + "version": "1.13.7", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.7.tgz", + "integrity": "sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g==", + "license": "MIT" }, - "node_modules/union-value": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", - "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", + "node_modules/unique-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", + "license": "MIT", "dependencies": { - "arr-union": "^3.1.0", - "get-value": "^2.0.6", - "is-extendable": "^0.1.1", - "set-value": "^2.0.1" + "crypto-random-string": "^2.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/unique-string": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-1.0.0.tgz", - "integrity": "sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo=", - "dependencies": { - "crypto-random-string": "^1.0.0" - }, + "node_modules/universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "dev": true, "engines": { - "node": ">=4" + "node": ">= 4.0.0" } }, "node_modules/unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, - "node_modules/unset-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", - "integrity": "sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=", - "dependencies": { - "has-value": "^0.3.1", - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-value": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", - "integrity": "sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=", + "node_modules/update-browserslist-db": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], "dependencies": { - "get-value": "^2.0.3", - "has-values": "^0.1.4", - "isobject": "^2.0.0" + "escalade": "^3.2.0", + "picocolors": "^1.1.1" }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-value/node_modules/isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "dependencies": { - "isarray": "1.0.0" + "bin": { + "update-browserslist-db": "cli.js" }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-values": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", - "integrity": "sha1-bWHeldkd/Km5oCCJrThL/49it3E=", - "engines": { - "node": ">=0.10.0" + "peerDependencies": { + "browserslist": ">= 4.21.0" } }, "node_modules/uri-js": { @@ -13304,18 +11154,14 @@ "punycode": "^2.1.0" } }, - "node_modules/urix": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", - "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=", - "deprecated": "Please see https://github.com/lydell/urix#deprecated" - }, - "node_modules/use": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", - "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==", - "engines": { - "node": ">=0.10.0" + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" } }, "node_modules/util": { @@ -13340,6 +11186,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "license": "MIT", "engines": { "node": ">= 0.4.0" } @@ -13354,9 +11201,9 @@ } }, "node_modules/v8-to-istanbul": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-4.1.3.tgz", - "integrity": "sha512-sAjOC+Kki6aJVbUOXJbcR0MnbfjvBzwKZazEJymA2IX49uoOdEdk+4fBq5cXgYgiyKtAyrrJNtBZdOeDIF+Fng==", + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz", + "integrity": "sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w==", "dev": true, "dependencies": { "@types/istanbul-lib-coverage": "^2.0.1", @@ -13364,13 +11211,13 @@ "source-map": "^0.7.3" }, "engines": { - "node": "8.x.x || >=10.10.0" + "node": ">=10.12.0" } }, "node_modules/v8-to-istanbul/node_modules/source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", "dev": true, "engines": { "node": ">= 8" @@ -13408,23 +11255,24 @@ } }, "node_modules/w3c-xmlserializer": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-1.1.2.tgz", - "integrity": "sha512-p10l/ayESzrBMYWRID6xbuCKh2Fp77+sA0doRuGn4tTIMrrZVeqfpKjXHY+oDh3K4nLdPgNwMTVP6Vp4pvqbNg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz", + "integrity": "sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA==", "dev": true, "dependencies": { - "domexception": "^1.0.1", - "webidl-conversions": "^4.0.2", "xml-name-validator": "^3.0.0" + }, + "engines": { + "node": ">=10" } }, "node_modules/walker": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.7.tgz", - "integrity": "sha1-L3+bj9ENZ3JisYqITijRlhjgKPs=", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", "dev": true, "dependencies": { - "makeerror": "1.0.x" + "makeerror": "1.0.12" } }, "node_modules/web-encoding": { @@ -13440,10 +11288,13 @@ } }, "node_modules/webidl-conversions": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", - "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==", - "dev": true + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-6.1.0.tgz", + "integrity": "sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==", + "dev": true, + "engines": { + "node": ">=10.4" + } }, "node_modules/whatwg-encoding": { "version": "1.0.5", @@ -13466,21 +11317,23 @@ "dev": true }, "node_modules/whatwg-url": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz", - "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.7.0.tgz", + "integrity": "sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg==", "dev": true, "dependencies": { - "lodash.sortby": "^4.7.0", - "tr46": "^1.0.1", - "webidl-conversions": "^4.0.2" + "lodash": "^4.7.0", + "tr46": "^2.1.0", + "webidl-conversions": "^6.1.0" + }, + "engines": { + "node": ">=10" } }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, "dependencies": { "isexe": "^2.0.0" }, @@ -13491,12 +11344,6 @@ "node": ">= 8" } }, - "node_modules/which-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", - "dev": true - }, "node_modules/which-typed-array": { "version": "1.1.18", "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.18.tgz", @@ -13517,19 +11364,10 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/word-wrap": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", - "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, "dependencies": { "ansi-styles": "^4.0.0", @@ -13537,16 +11375,18 @@ "strip-ansi": "^6.0.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "dependencies": { - "@types/color-name": "^1.1.1", "color-convert": "^2.0.1" }, "engines": { @@ -13580,13 +11420,15 @@ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "node_modules/write-file-atomic": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", - "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "license": "ISC", "dependencies": { - "graceful-fs": "^4.1.11", "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.2" + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" } }, "node_modules/ws": { @@ -13661,59 +11503,45 @@ } }, "node_modules/y18n": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", - "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==", - "dev": true + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } }, "node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "license": "ISC" }, "node_modules/yargs": { - "version": "15.3.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.3.1.tgz", - "integrity": "sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==", + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", "dev": true, "dependencies": { - "cliui": "^6.0.0", - "decamelize": "^1.2.0", - "find-up": "^4.1.0", - "get-caller-file": "^2.0.1", + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", "string-width": "^4.2.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^18.1.1" + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" }, "engines": { - "node": ">=8" + "node": ">=10" } }, "node_modules/yargs-parser": { - "version": "16.1.0", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-16.1.0.tgz", - "integrity": "sha512-H/V41UNZQPkUMIT5h5hiwg4QKIY1RPvoBV4XcjUbRM8Bk2oKqqyZ0DIEbTFZB0XjbtSPG8SAa/0DxCQmiRgzKg==", + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", "dev": true, - "dependencies": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } - }, - "node_modules/yargs/node_modules/yargs-parser": { - "version": "18.1.3", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", - "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", - "dev": true, - "dependencies": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - }, "engines": { - "node": ">=6" + "node": ">=10" } } } diff --git a/frontend/server/package.json b/frontend/server/package.json index 282d0c401bd..4dbe2b296d1 100644 --- a/frontend/server/package.json +++ b/frontend/server/package.json @@ -4,15 +4,17 @@ "dependencies": { "@aws-sdk/credential-providers": "^3.621.0", "@google-cloud/storage": "^2.5.0", - "@kubernetes/client-node": "^0.12.1", - "axios": ">=1.8.2", - "crypto-js": "^3.1.8", - "express": "^4.21.0", + "@kubernetes/client-node": "^0.16.3", + "axios": ">=1.12.0", + "crypto-js": "^4.2.0", + "express": "^5.1.0", + "form-data": "^2.5.4", "gunzip-maybe": "^1.4.1", - "http-proxy-middleware": "^0.18.0", + "http-proxy-middleware": "^2.0.7", + "js-yaml": "^4.1.0", "lodash": ">=4.17.21", "minio": "~8.0.3", - "node-fetch": "^2.6.7", + "node-fetch": "2.6.7", "peek-stream": "^1.1.3", "portable-fetch": "^3.0.0", "tar-stream": "^2.1.0" @@ -21,24 +23,35 @@ "@types/crypto-js": "^3.1.43", "@types/express": "^4.11.1", "@types/gunzip-maybe": "^1.4.0", - "@types/http-proxy-middleware": "^0.19.3", - "@types/jest": "^24.9.1", + "@types/jest": "^27.0.0", "@types/node": "^14.14.20", "@types/node-fetch": "^2.1.2", "@types/supertest": "^2.0.8", "@types/tar": "^4.0.3", "@types/tar-stream": "^1.6.1", - "jest": "^25.3.0", + "jest": "^27.0.0", "prettier": "1.19.1", "supertest": "^4.0.2", - "ts-jest": "^25.2.1", + "ts-jest": "^27.0.0", "tslint": "^5.20.1", - "typescript": "^3.6.4" + "typescript": "^4.9.5" }, "overrides": { - "express": { - "path-to-regexp": "0.1.12" - } + "path-to-regexp": "0.1.12", + "router": "1.3.7", + "node-fetch": "2.6.7", + "duplexify": "4.1.3", + "@google-cloud/storage": { + "async": "2.6.4", + "@google-cloud/common": "^3.1.0", + "gcs-resumable-upload": "^3.1.0" + }, + "cross-spawn": "7.0.5", + "braces": "3.0.3", + "json-schema": "0.4.0", + "jsonpath-plus": "10.3.0", + "date-and-time": "0.14.2", + "form-data@>=2.0.0 <2.5.4": "2.5.4" }, "scripts": { "build": "tsc --project .", diff --git a/frontend/server/proxy-middleware.ts b/frontend/server/proxy-middleware.ts index bdb05a3869f..9fcc01266e9 100644 --- a/frontend/server/proxy-middleware.ts +++ b/frontend/server/proxy-middleware.ts @@ -13,7 +13,7 @@ // limitations under the License. import express from 'express'; -import proxy from 'http-proxy-middleware'; +import { createProxyMiddleware } from 'http-proxy-middleware'; import { URL, URLSearchParams } from 'url'; import { HACK_FIX_HPM_PARTIAL_RESPONSE_HEADERS } from './consts'; @@ -69,7 +69,7 @@ export default (app: express.Application, apisPrefix: string) => { app.all( proxyPrefix + '*', - proxy({ + createProxyMiddleware({ changeOrigin: true, logLevel: process.env.NODE_ENV === 'test' ? 'warn' : 'debug', target: 'http://127.0.0.1', diff --git a/frontend/server/src/generated/apis/auth/api.ts b/frontend/server/src/generated/apis/auth/api.ts index 9aa3a371071..b1de028582c 100644 --- a/frontend/server/src/generated/apis/auth/api.ts +++ b/frontend/server/src/generated/apis/auth/api.ts @@ -35,7 +35,7 @@ export const COLLECTION_FORMATS = { * @interface FetchAPI */ export interface FetchAPI { - (url: string, init?: any): Promise; + (url: string, init?: any): Promise; } /** @@ -203,7 +203,7 @@ export const AuthServiceApiFetchParamCreator = function(configuration?: Configur options.query, ); // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; + delete (localVarUrlObj as any).search; localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); return { diff --git a/frontend/server/src/generated/apisv2beta1/auth/api.ts b/frontend/server/src/generated/apisv2beta1/auth/api.ts index 76967f8feda..832f04f64b2 100644 --- a/frontend/server/src/generated/apisv2beta1/auth/api.ts +++ b/frontend/server/src/generated/apisv2beta1/auth/api.ts @@ -203,7 +203,7 @@ export const AuthServiceApiFetchParamCreator = function(configuration?: Configur options.query, ); // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; + localVarUrlObj.search = null; localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); return { diff --git a/frontend/server/utils.ts b/frontend/server/utils.ts index 0ec4c702069..5423aaf23ce 100644 --- a/frontend/server/utils.ts +++ b/frontend/server/utils.ts @@ -13,7 +13,7 @@ // limitations under the License. import { readFileSync } from 'fs'; import { Transform, TransformOptions } from 'stream'; -import path from 'path'; +import { posix as path } from 'path'; /** get the server address from host, port, and schema (defaults to 'http'). */ export function getAddress({ diff --git a/frontend/server/workflow-helper.ts b/frontend/server/workflow-helper.ts index d55d9430b44..31fe998654d 100644 --- a/frontend/server/workflow-helper.ts +++ b/frontend/server/workflow-helper.ts @@ -181,7 +181,7 @@ export async function getKeyFormatFromArtifactRepositories( `artifact-repositories configmap in ${namespace} namespace is missing an artifact-repositories field.`, ); } - const artifactRepositoriesValue = JsYaml.safeLoad( + const artifactRepositoriesValue = JsYaml.load( artifactRepositories, ) as PartialArtifactRepositoriesValue; if ('s3' in artifactRepositoriesValue) { diff --git a/frontend/src/apisv2beta1/pipeline/api.ts b/frontend/src/apisv2beta1/pipeline/api.ts index b5ba6d54a26..8a2f4b249b6 100644 --- a/frontend/src/apisv2beta1/pipeline/api.ts +++ b/frontend/src/apisv2beta1/pipeline/api.ts @@ -447,10 +447,13 @@ export const PipelineServiceApiFetchParamCreator = function(configuration?: Conf * * @summary Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. * @param {string} pipeline_id Required input. ID of the pipeline to be deleted. + * @param {boolean} [cascade] Optional input. If true, deletes pipeline versions along with the pipeline. * @param {*} [options] Override http request option. * @throws {RequiredError} */ - deletePipeline(pipeline_id: string, options: any = {}): FetchArgs { + deletePipeline(pipeline_id: string, cascade?: boolean, options?: any): FetchArgs { + // Set default options if not provided + const requestOptions = options || {}; // verify required parameter 'pipeline_id' is not null or undefined if (pipeline_id === null || pipeline_id === undefined) { throw new RequiredError( @@ -463,10 +466,14 @@ export const PipelineServiceApiFetchParamCreator = function(configuration?: Conf encodeURIComponent(String(pipeline_id)), ); const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'DELETE' }, options); + const localVarRequestOptions = Object.assign({ method: 'DELETE' }, requestOptions); const localVarHeaderParameter = {} as any; const localVarQueryParameter = {} as any; + if (cascade !== undefined) { + localVarQueryParameter['cascade'] = cascade; + } + // authentication Bearer required if (configuration && configuration.apiKey) { const localVarApiKeyValue = @@ -480,11 +487,15 @@ export const PipelineServiceApiFetchParamCreator = function(configuration?: Conf {}, localVarUrlObj.query, localVarQueryParameter, - options.query, + requestOptions.query, ); // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + localVarRequestOptions.headers = Object.assign( + {}, + localVarHeaderParameter, + requestOptions.headers, + ); return { url: url.format(localVarUrlObj), @@ -918,15 +929,18 @@ export const PipelineServiceApiFp = function(configuration?: Configuration) { * * @summary Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. * @param {string} pipeline_id Required input. ID of the pipeline to be deleted. + * @param {boolean} [cascade] Optional input. If true, deletes pipeline versions along with the pipeline. * @param {*} [options] Override http request option. * @throws {RequiredError} */ deletePipeline( pipeline_id: string, + cascade?: boolean, options?: any, ): (fetch?: FetchAPI, basePath?: string) => Promise { const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).deletePipeline( pipeline_id, + cascade, options, ); return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { @@ -1153,14 +1167,16 @@ export const PipelineServiceApiFactory = function( * * @summary Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. * @param {string} pipeline_id Required input. ID of the pipeline to be deleted. + * @param {boolean} [cascade] Optional input. If true, deletes pipeline versions along with the pipeline. * @param {*} [options] Override http request option. * @throws {RequiredError} */ - deletePipeline(pipeline_id: string, options?: any) { - return PipelineServiceApiFp(configuration).deletePipeline(pipeline_id, options)( - fetch, - basePath, - ); + deletePipeline(pipeline_id: string, cascade?: boolean, options?: any) { + return PipelineServiceApiFp(configuration).deletePipeline( + pipeline_id, + cascade, + options, + )(fetch, basePath); }, /** * @@ -1319,15 +1335,17 @@ export class PipelineServiceApi extends BaseAPI { * * @summary Deletes an empty pipeline by ID. Returns error if the pipeline has pipeline versions. * @param {string} pipeline_id Required input. ID of the pipeline to be deleted. + * @param {boolean} [cascade] Optional input. If true, deletes pipeline versions along with the pipeline. * @param {*} [options] Override http request option. * @throws {RequiredError} * @memberof PipelineServiceApi */ - public deletePipeline(pipeline_id: string, options?: any) { - return PipelineServiceApiFp(this.configuration).deletePipeline(pipeline_id, options)( - this.fetch, - this.basePath, - ); + public deletePipeline(pipeline_id: string, cascade?: boolean, options?: any) { + return PipelineServiceApiFp(this.configuration).deletePipeline( + pipeline_id, + cascade, + options, + )(this.fetch, this.basePath); } /** diff --git a/frontend/src/components/NewRunParametersV2.tsx b/frontend/src/components/NewRunParametersV2.tsx index 1783dc4b101..00a2ae47525 100644 --- a/frontend/src/components/NewRunParametersV2.tsx +++ b/frontend/src/components/NewRunParametersV2.tsx @@ -242,7 +242,7 @@ function NewRunParametersV2(props: NewRunParametersProps) {
    Pipeline Root
    Pipeline Root represents an artifact repository, refer to{' '} - + Pipeline Root Documentation . diff --git a/frontend/src/components/SidePanel.tsx b/frontend/src/components/SidePanel.tsx index d10893f2032..ffdc0b55b1a 100644 --- a/frontend/src/components/SidePanel.tsx +++ b/frontend/src/components/SidePanel.tsx @@ -57,6 +57,20 @@ interface SidePanelProps { } class SidePanel extends React.Component { + public componentDidMount(): void { + document.addEventListener('keydown', this.handleKeyDown); + } + + public componentWillUnmount(): void { + document.removeEventListener('keydown', this.handleKeyDown); + } + + private handleKeyDown = (event: KeyboardEvent): void => { + if (event.key === 'Escape' && this.props.isOpen) { + this.props.onClose(); + } + }; + public render(): JSX.Element { const { isBusy, isOpen, onClose, title, defaultWidth } = this.props; return ( diff --git a/frontend/src/components/viewers/Tensorboard.test.tsx b/frontend/src/components/viewers/Tensorboard.test.tsx index 21afd2208ba..3aa4fb09eb0 100644 --- a/frontend/src/components/viewers/Tensorboard.test.tsx +++ b/frontend/src/components/viewers/Tensorboard.test.tsx @@ -186,8 +186,8 @@ describe.only('Tensorboard', () => {
    - - - + + - + +
    @@ -213,8 +213,8 @@ describe.only('Tensorboard', () => {
    - - - + + - + +
    @@ -248,10 +248,10 @@ describe.only('Tensorboard', () => {
    - + Stop Tensorboard - + Stop Tensorboard? @@ -261,10 +261,10 @@ describe.only('Tensorboard', () => { - + Cancel - +
    @@ -299,8 +299,8 @@ describe.only('Tensorboard', () => {
    - - - + + - + +
    diff --git a/frontend/src/lib/Buttons.ts b/frontend/src/lib/Buttons.ts index 44a1c4300ac..08bb6b14720 100644 --- a/frontend/src/lib/Buttons.ts +++ b/frontend/src/lib/Buttons.ts @@ -207,8 +207,8 @@ export default class Buttons { useCurrentResource: boolean, ): Buttons { this._map[ButtonKeys.DELETE_RUN] = { - action: () => { - this._dialogDeletePipelinesAndPipelineVersions( + action: async () => { + await this._dialogDeletePipelinesAndPipelineVersions( getSelectedIds(), getSelectedVersionIds(), callback, @@ -764,11 +764,11 @@ export default class Buttons { this._props.history.push(RoutePage.NEW_PIPELINE_VERSION + searchString); } - private _dialogDeletePipelinesAndPipelineVersions( + private async _dialogDeletePipelinesAndPipelineVersions( selectedIds: string[], selectedVersionIds: { [pipelineId: string]: string[] }, callback: (pipelineId: string | undefined, selectedIds: string[]) => void, - ): void { + ): Promise { const numVersionIds = this._deepCountDictionary(selectedVersionIds); const pipelineMessage = this._nouns(selectedIds.length, `pipeline`, `pipelines`); const pipelineVersionMessage = this._nouns( @@ -777,11 +777,133 @@ export default class Buttons { `pipeline versions`, ); const andMessage = pipelineMessage !== `` && pipelineVersionMessage !== `` ? ` and ` : ``; + + // If we're only deleting pipeline versions (no pipelines), show simple dialog + if (selectedIds.length === 0) { + this._props.updateDialog({ + buttons: [ + { + onClick: async () => + await this._deletePipelinesAndPipelineVersions( + false, + false, + selectedIds, + selectedVersionIds, + callback, + ), + text: 'Cancel', + }, + { + onClick: async () => + await this._deletePipelinesAndPipelineVersions( + true, + false, + selectedIds, + selectedVersionIds, + callback, + ), + text: 'Delete', + }, + ], + content: `Do you want to delete the selected ${pipelineVersionMessage}? This action cannot be undone.`, + onClose: async () => + await this._deletePipelinesAndPipelineVersions( + false, + false, + selectedIds, + selectedVersionIds, + callback, + ), + title: `Delete ${pipelineVersionMessage}?`, + }); + return; + } + + // Check if any selected pipelines have versions + let pipelinesWithVersions: string[] = []; + try { + await Promise.all( + selectedIds.map(async pipelineId => { + try { + const response = await Apis.pipelineServiceApiV2.listPipelineVersions( + pipelineId, + undefined, + 1, + ); + if (response.pipeline_versions && response.pipeline_versions.length > 0) { + pipelinesWithVersions.push(pipelineId); + } + } catch (err) { + // If we can't check versions, assume pipeline might have versions to be safe + pipelinesWithVersions.push(pipelineId); + } + }), + ); + } catch (err) { + // If checking fails, show error and abort + this._props.updateDialog({ + buttons: [{ text: 'Dismiss' }], + content: 'Failed to check pipeline versions. Please try again.', + title: 'Error', + }); + return; + } + + // If no pipelines have versions, show simple delete dialog + if (pipelinesWithVersions.length === 0) { + this._props.updateDialog({ + buttons: [ + { + onClick: async () => + await this._deletePipelinesAndPipelineVersions( + false, + false, + selectedIds, + selectedVersionIds, + callback, + ), + text: 'Cancel', + }, + { + onClick: async () => + await this._deletePipelinesAndPipelineVersions( + true, + false, // cascade = false is safe since no versions exist + selectedIds, + selectedVersionIds, + callback, + ), + text: 'Delete', + }, + ], + content: `Do you want to delete the selected ${pipelineMessage}${ + pipelineVersionMessage ? ` and ${pipelineVersionMessage}` : '' + }? This action cannot be undone.`, + onClose: async () => + await this._deletePipelinesAndPipelineVersions( + false, + false, + selectedIds, + selectedVersionIds, + callback, + ), + title: `Delete ` + pipelineMessage + andMessage + pipelineVersionMessage + `?`, + }); + return; + } + + // Some pipelines have versions - show cascade warning dialog + const pipelinesWithVersionsMessage = this._nouns( + pipelinesWithVersions.length, + 'pipeline', + 'pipelines', + ); this._props.updateDialog({ buttons: [ { onClick: async () => await this._deletePipelinesAndPipelineVersions( + false, false, selectedIds, selectedVersionIds, @@ -793,15 +915,24 @@ export default class Buttons { onClick: async () => await this._deletePipelinesAndPipelineVersions( true, + true, // cascade = true to delete pipelines with their versions selectedIds, selectedVersionIds, callback, ), - text: 'Delete', + text: 'Delete All', }, ], + content: `${pipelinesWithVersionsMessage} ${ + pipelinesWithVersions.length === 1 ? 'has' : 'have' + } existing versions. Deleting ${ + pipelinesWithVersions.length === 1 ? 'this pipeline' : 'these pipelines' + } will also delete all ${pipelinesWithVersions.length === 1 ? 'its' : 'their'} versions. ${ + pipelineVersionMessage ? `Additionally, ${pipelineVersionMessage} will be deleted. ` : '' + }This action cannot be undone.`, onClose: async () => await this._deletePipelinesAndPipelineVersions( + false, false, selectedIds, selectedVersionIds, @@ -813,6 +944,7 @@ export default class Buttons { private async _deletePipelinesAndPipelineVersions( confirmed: boolean, + cascade: boolean, selectedIds: string[], selectedVersionIds: { [pipelineId: string]: string[] }, callback: (pipelineId: string | undefined, selectedIds: string[]) => void, @@ -831,7 +963,7 @@ export default class Buttons { await Promise.all( selectedIds.map(async id => { try { - await Apis.pipelineServiceApiV2.deletePipeline(id); + await Apis.pipelineServiceApiV2.deletePipeline(id, cascade); } catch (err) { unsuccessfulIds.push(id); succeededfulIds.delete(id); @@ -842,11 +974,20 @@ export default class Buttons { ); // Remove successfully deleted pipelines from selectedVersionIds if exists. - const toBeDeletedVersionIds = Object.fromEntries( - Object.entries(selectedVersionIds).filter( - ([pipelineId, _]) => !succeededfulIds.has(pipelineId), - ), - ); + // If cascade is true, pipeline versions are already deleted with the pipeline, + // so we only need to delete versions from pipelines that weren't deleted. + const toBeDeletedVersionIds = cascade + ? Object.fromEntries( + Object.entries(selectedVersionIds).filter( + ([pipelineId, _]) => + !succeededfulIds.has(pipelineId) && !selectedIds.includes(pipelineId), + ), + ) + : Object.fromEntries( + Object.entries(selectedVersionIds).filter( + ([pipelineId, _]) => !succeededfulIds.has(pipelineId), + ), + ); // Delete pipeline versions. const unsuccessfulVersionIds: { [pipelineId: string]: string[] } = {}; diff --git a/frontend/src/lib/v2/DynamicFlow.test.ts b/frontend/src/lib/v2/DynamicFlow.test.ts index feaad8f3864..8d5a012b5cf 100644 --- a/frontend/src/lib/v2/DynamicFlow.test.ts +++ b/frontend/src/lib/v2/DynamicFlow.test.ts @@ -22,7 +22,7 @@ import { TASK_NAME_KEY, updateFlowElementsState, } from './DynamicFlow'; -import { convertFlowElements, NodeTypeNames } from './StaticFlow'; +import { convertFlowElements, getTaskKeyFromNodeKey, NodeTypeNames } from './StaticFlow'; import fs from 'fs'; import jsyaml from 'js-yaml'; @@ -156,7 +156,9 @@ describe('DynamicFlow', () => { const execution = new Execution(); execution.setId(1); - execution.getCustomPropertiesMap().set(TASK_NAME_KEY, new Value().setStringValue(label)); + execution + .getCustomPropertiesMap() + .set(TASK_NAME_KEY, new Value().setStringValue(getTaskKeyFromNodeKey(elem.id))); const nodeMlmdInfo = getNodeMlmdInfo(elem, [execution], [], []); expect(nodeMlmdInfo).toEqual({ execution }); }); diff --git a/frontend/src/lib/v2/DynamicFlow.ts b/frontend/src/lib/v2/DynamicFlow.ts index fc44ccd1996..c4f7fb02cac 100644 --- a/frontend/src/lib/v2/DynamicFlow.ts +++ b/frontend/src/lib/v2/DynamicFlow.ts @@ -32,7 +32,7 @@ import { PipelineFlowElement, TaskType, } from 'src/lib/v2/StaticFlow'; -import { getArtifactNameFromEvent, LinkedArtifact } from 'src/mlmd/MlmdUtils'; +import { getArtifactNameFromEvent, LinkedArtifact, ExecutionHelpers } from 'src/mlmd/MlmdUtils'; import { NodeMlmdInfo } from 'src/pages/RunDetailsV2'; import { Artifact, Event, Execution, Value } from 'src/third_party/mlmd'; @@ -274,6 +274,10 @@ export function updateFlowElementsState( if (executions) { (updatedElem.data as ExecutionFlowElementData).state = executions[0]?.getLastKnownState(); (updatedElem.data as ExecutionFlowElementData).mlmdId = executions[0]?.getId(); + // Use ExecutionHelpers.getName() which reads display_name from MLMD custom properties + (updatedElem.data as ExecutionFlowElementData).label = ExecutionHelpers.getName( + executions[0], + ); } } else if (NodeTypeNames.ARTIFACT === elem.type) { let linkedArtifact = artifactNodeKeyToArtifact.get(elem.id); @@ -309,9 +313,8 @@ export function updateFlowElementsState( } function getTaskLabelByPipelineFlowElement(elem: PipelineFlowElement) { - const taskLabel = elem.data?.label; - if (taskLabel === undefined) return getTaskKeyFromNodeKey(elem.id); - return taskLabel; + // Always use the original task name from the node ID for MLMD data lookups + return getTaskKeyFromNodeKey(elem.id); } function getExecutionsUnderDAG( diff --git a/frontend/src/pages/PipelineList.test.tsx b/frontend/src/pages/PipelineList.test.tsx index 511fe7025d0..94fcd0f579e 100644 --- a/frontend/src/pages/PipelineList.test.tsx +++ b/frontend/src/pages/PipelineList.test.tsx @@ -346,6 +346,7 @@ describe('PipelineList', () => { }); it('calls delete API for selected pipeline after delete dialog is confirmed', async () => { + listPipelineVersionsSpy.mockImplementation(() => ({ pipeline_versions: [] })); tree = await mountWithNPipelines(1); tree .find('.tableRow') @@ -358,10 +359,11 @@ describe('PipelineList', () => { const call = updateDialogSpy.mock.calls[0][0]; const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete'); await confirmBtn.onClick(); - expect(deletePipelineSpy).toHaveBeenLastCalledWith('test-pipeline-id0'); + expect(deletePipelineSpy).toHaveBeenLastCalledWith('test-pipeline-id0', false); }); it('updates the selected indices after a pipeline is deleted', async () => { + listPipelineVersionsSpy.mockImplementation(() => ({ pipeline_versions: [] })); tree = await mountWithNPipelines(5); tree .find('.tableRow') @@ -380,6 +382,7 @@ describe('PipelineList', () => { }); it('updates the selected indices after multiple pipelines are deleted', async () => { + listPipelineVersionsSpy.mockImplementation(() => ({ pipeline_versions: [] })); tree = await mountWithNPipelines(5); tree .find('.tableRow') @@ -402,6 +405,7 @@ describe('PipelineList', () => { }); it('calls delete API for all selected pipelines after delete dialog is confirmed', async () => { + listPipelineVersionsSpy.mockImplementation(() => ({ pipeline_versions: [] })); tree = await mountWithNPipelines(5); tree .find('.tableRow') @@ -423,12 +427,13 @@ describe('PipelineList', () => { const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete'); await confirmBtn.onClick(); expect(deletePipelineSpy).toHaveBeenCalledTimes(3); - expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id0'); - expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id1'); - expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id4'); + expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id0', false); + expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id1', false); + expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id4', false); }); it('shows snackbar confirmation after pipeline is deleted', async () => { + listPipelineVersionsSpy.mockImplementation(() => ({ pipeline_versions: [] })); tree = await mountWithNPipelines(1); tree .find('.tableRow') @@ -449,6 +454,7 @@ describe('PipelineList', () => { }); it('shows error dialog when pipeline deletion fails', async () => { + listPipelineVersionsSpy.mockImplementation(() => ({ pipeline_versions: [] })); tree = await mountWithNPipelines(1); tree .find('.tableRow') @@ -470,6 +476,7 @@ describe('PipelineList', () => { }); it('shows error dialog when multiple pipeline deletions fail', async () => { + listPipelineVersionsSpy.mockImplementation(() => ({ pipeline_versions: [] })); tree = await mountWithNPipelines(5); tree .find('.tableRow') @@ -522,16 +529,25 @@ describe('PipelineList', () => { it("delete a pipeline and some other pipeline's version together", async () => { deletePipelineSpy.mockImplementation(() => Promise.resolve()); deletePipelineVersionSpy.mockImplementation(() => Promise.resolve()); - listPipelineVersionsSpy.mockImplementation(() => ({ - pipeline_versions: [ - { - display_name: 'test-pipeline-id1_name', - name: 'test-pipeline-id1_name', - pipeline_id: 'test-pipeline-id1', - pipeline_version_id: 'test-pipeline-version-id1', - }, - ], - })); + + // Mock listPipelineVersions with different responses based on pipeline ID + listPipelineVersionsSpy.mockImplementation((pipelineId: string) => { + if (pipelineId === 'test-pipeline-id1') { + // Return versions for expansion functionality + return { + pipeline_versions: [ + { + display_name: 'test-pipeline-id1_name', + name: 'test-pipeline-id1_name', + pipeline_id: 'test-pipeline-id1', + pipeline_version_id: 'test-pipeline-version-id1', + }, + ], + }; + } + // Return empty for other pipelines so dialog shows "Delete" instead of "Delete All" + return { pipeline_versions: [] }; + }); tree = await mountWithNPipelines(2); tree @@ -569,7 +585,7 @@ describe('PipelineList', () => { await deletePipelineVersionSpy; expect(deletePipelineSpy).toHaveBeenCalledTimes(1); - expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id0'); + expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id0', false); expect(deletePipelineVersionSpy).toHaveBeenCalledTimes(1); expect(deletePipelineVersionSpy).toHaveBeenCalledWith( @@ -586,4 +602,126 @@ describe('PipelineList', () => { open: true, }); }); + + it('shows "Delete All" dialog when pipelines have versions and calls API with cascade=true', async () => { + listPipelineVersionsSpy.mockImplementation(() => ({ + pipeline_versions: [ + { + pipeline_version_id: 'test-version-id-1', + display_name: 'test version 1', + name: 'test-version-1', + }, + { + pipeline_version_id: 'test-version-id-2', + display_name: 'test version 2', + name: 'test-version-2', + }, + ], + })); + + deletePipelineSpy.mockImplementation(() => Promise.resolve()); + tree = await mountWithNPipelines(1); + tree + .find('.tableRow') + .at(0) + .simulate('click'); + + const deleteBtn = (tree.instance() as PipelineList).getInitialToolbarState().actions[ + ButtonKeys.DELETE_RUN + ]; + await deleteBtn!.action(); + await TestUtils.flushPromises(); + + const call = updateDialogSpy.mock.calls[0][0]; + + // Verify the dialog shows cascade warning and "Delete All" button + expect(call.title).toBe('Delete 1 pipeline?'); + expect(call.content).toContain('pipeline has existing versions'); + expect(call.content).toContain('Deleting this pipeline will also delete all its versions'); + expect(call.content).toContain('This action cannot be undone'); + + const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete All'); + expect(confirmBtn).toBeDefined(); + + await confirmBtn.onClick(); + expect(deletePipelineSpy).toHaveBeenLastCalledWith('test-pipeline-id0', true); + expect(updateSnackbarSpy).toHaveBeenLastCalledWith({ + message: 'Deletion succeeded for 1 pipeline', + open: true, + }); + }); + + it('shows "Delete All" dialog for multiple pipelines with versions', async () => { + listPipelineVersionsSpy.mockImplementation(() => ({ + pipeline_versions: [ + { + pipeline_version_id: 'test-version-id-1', + display_name: 'test version 1', + name: 'test-version-1', + }, + ], + })); + + deletePipelineSpy.mockImplementation(() => Promise.resolve()); + tree = await mountWithNPipelines(3); + + // Select multiple pipelines + tree + .find('.tableRow') + .at(0) + .simulate('click'); + tree + .find('.tableRow') + .at(1) + .simulate('click'); + + const deleteBtn = (tree.instance() as PipelineList).getInitialToolbarState().actions[ + ButtonKeys.DELETE_RUN + ]; + await deleteBtn!.action(); + await TestUtils.flushPromises(); + + const call = updateDialogSpy.mock.calls[0][0]; + + // Verify the dialog shows cascade warning for multiple pipelines + expect(call.title).toBe('Delete 2 pipelines?'); + expect(call.content).toContain('pipelines have existing versions'); + expect(call.content).toContain('Deleting these pipelines will also delete all their versions'); + + const confirmBtn = call.buttons.find((b: any) => b.text === 'Delete All'); + expect(confirmBtn).toBeDefined(); + await confirmBtn.onClick(); + expect(deletePipelineSpy).toHaveBeenCalledTimes(2); + expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id0', true); + expect(deletePipelineSpy).toHaveBeenCalledWith('test-pipeline-id1', true); + }); + + it('does not call delete API when "Delete All" dialog is canceled', async () => { + listPipelineVersionsSpy.mockImplementation(() => ({ + pipeline_versions: [ + { + pipeline_version_id: 'test-version-id-1', + display_name: 'test version 1', + name: 'test-version-1', + }, + ], + })); + + tree = await mountWithNPipelines(1); + tree + .find('.tableRow') + .at(0) + .simulate('click'); + + const deleteBtn = (tree.instance() as PipelineList).getInitialToolbarState().actions[ + ButtonKeys.DELETE_RUN + ]; + await deleteBtn!.action(); + await TestUtils.flushPromises(); + + const call = updateDialogSpy.mock.calls[0][0]; + const cancelBtn = call.buttons.find((b: any) => b.text === 'Cancel'); + await cancelBtn.onClick(); + expect(deletePipelineSpy).not.toHaveBeenCalled(); + }); }); diff --git a/go.mod b/go.mod index 6960f5d6627..73a2495aa2b 100644 --- a/go.mod +++ b/go.mod @@ -1,139 +1,174 @@ module github.com/kubeflow/pipelines -go 1.23.1 - -toolchain go1.23.8 +go 1.24.6 require ( github.com/Masterminds/squirrel v0.0.0-20190107164353-fa735ea14f09 github.com/VividCortex/mysqlerr v0.0.0-20170204212430-6c6b55f8796f - github.com/argoproj/argo-workflows/v3 v3.5.14 - github.com/aws/aws-sdk-go v1.55.5 + github.com/argoproj/argo-workflows/v3 v3.6.7 + github.com/aws/aws-sdk-go v1.55.5 // indirect github.com/cenkalti/backoff v2.2.1+incompatible github.com/eapache/go-resiliency v1.3.0 - github.com/fsnotify/fsnotify v1.7.0 + github.com/fsnotify/fsnotify v1.8.0 github.com/go-openapi/errors v0.21.0 github.com/go-openapi/runtime v0.21.1 github.com/go-openapi/strfmt v0.21.10 - github.com/go-openapi/swag v0.22.6 + github.com/go-openapi/swag v0.23.0 github.com/go-openapi/validate v0.20.3 github.com/go-sql-driver/mysql v1.8.1 github.com/golang/glog v1.2.4 - github.com/golang/protobuf v1.5.4 + github.com/golang/protobuf v1.5.4 // indirect github.com/google/addlicense v0.0.0-20200906110928-a0294312aa76 - github.com/google/cel-go v0.18.1 - github.com/google/go-cmp v0.6.0 + github.com/google/cel-go v0.25.0 + github.com/google/go-cmp v0.7.0 github.com/google/uuid v1.6.0 github.com/gorilla/mux v1.8.0 - github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 - github.com/grpc-ecosystem/grpc-gateway v1.16.0 - github.com/jackc/pgx/v5 v5.5.4 - github.com/jinzhu/gorm v1.9.1 + github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 + // Should match GRPC_GATEWAY_VERSION in backend/api/Dockerfile + github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 + github.com/jackc/pgx/v5 v5.6.0 github.com/kubeflow/pipelines/api v0.0.0-20250102152816-873e9dedd766 github.com/kubeflow/pipelines/kubernetes_platform v0.0.0-20240725205754-d911c8b73b49 github.com/kubeflow/pipelines/third_party/ml-metadata v0.0.0-20240416215826-da804407ad31 github.com/lestrrat-go/strftime v1.0.4 - github.com/mattn/go-sqlite3 v1.14.19 - github.com/minio/minio-go/v7 v7.0.65 + github.com/mattn/go-sqlite3 v1.14.22 + github.com/minio/minio-go/v7 v7.0.94 + github.com/onsi/ginkgo/v2 v2.21.0 + github.com/onsi/gomega v1.35.1 github.com/peterhellberg/duration v0.0.0-20191119133758-ec6baeebcd10 github.com/pkg/errors v0.9.1 - github.com/prometheus/client_golang v1.19.1 + github.com/prometheus/client_golang v1.21.1 github.com/prometheus/client_model v0.6.1 github.com/robfig/cron v1.2.0 github.com/sirupsen/logrus v1.9.3 - github.com/spf13/viper v1.18.2 + github.com/spf13/viper v1.20.0 github.com/stretchr/testify v1.10.0 go.uber.org/zap v1.27.0 gocloud.dev v0.40.0 - golang.org/x/net v0.38.0 - golang.org/x/oauth2 v0.22.0 - google.golang.org/genproto/googleapis/api v0.0.0-20240812133136-8ffd90a71988 - google.golang.org/genproto/googleapis/rpc v0.0.0-20240812133136-8ffd90a71988 - google.golang.org/grpc v1.65.0 - google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 - google.golang.org/protobuf v1.34.2 + golang.org/x/net v0.40.0 + golang.org/x/oauth2 v0.30.0 + // Pre-generated Go code (.pb.go files) for a subset of the .proto files found in + // googleapis/googleapis both /api and /rpc should be released relatively close to + // the googleapis proto files used for kfp-pipeline-spec see GOOGLEAPIS_COMMIT in api/Makefile + // The tags are misleading, they are actual releases: + // https://pkg.go.dev/google.golang.org/genproto/googleapis/api?tab=versions + // https://pkg.go.dev/google.golang.org/genproto/googleapis/rpc?tab=versions + google.golang.org/genproto/googleapis/api v0.0.0-20250715232539-7130f93afb79 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250715232539-7130f93afb79 + google.golang.org/grpc v1.73.0 + // These runtime protoc-gen-go-grpc & protobuf versions should be identical to the package versions + // used for the generated code (see PROTOC_GEN_GO_GRPC & PROTOBUF_GO in backend/api/Dockerfile) + google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1 + google.golang.org/protobuf v1.36.6 gopkg.in/yaml.v3 v3.0.1 - k8s.io/api v0.30.1 - k8s.io/apimachinery v0.30.1 - k8s.io/client-go v0.30.1 - k8s.io/code-generator v0.30.1 - sigs.k8s.io/controller-runtime v0.18.6 + k8s.io/api v0.32.2 + k8s.io/apimachinery v0.32.2 + k8s.io/client-go v0.32.2 + k8s.io/code-generator v0.31.0 + sigs.k8s.io/controller-runtime v0.19.0 sigs.k8s.io/yaml v1.4.0 ) require ( - cloud.google.com/go v0.115.0 // indirect - cloud.google.com/go/auth v0.8.1 // indirect - cloud.google.com/go/auth/oauth2adapt v0.2.4 // indirect - cloud.google.com/go/compute/metadata v0.5.0 // indirect - cloud.google.com/go/iam v1.1.13 // indirect - cloud.google.com/go/storage v1.43.0 // indirect + github.com/aws/aws-sdk-go-v2 v1.36.3 + github.com/aws/aws-sdk-go-v2/config v1.29.9 + github.com/aws/aws-sdk-go-v2/credentials v1.17.62 + github.com/aws/aws-sdk-go-v2/service/s3 v1.58.3 + gorm.io/driver/mysql v1.6.0 + gorm.io/driver/postgres v1.6.0 + gorm.io/driver/sqlite v1.6.0 + gorm.io/gorm v1.30.1 +) + +require ( + github.com/antlr4-go/antlr/v4 v4.13.0 // indirect + github.com/go-ini/ini v1.67.0 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/minio/crc64nvme v1.0.1 // indirect + github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c // indirect + github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/tinylib/msgp v1.3.0 // indirect + gopkg.in/evanphx/json-patch.v4 v4.12.0 // indirect + sigs.k8s.io/randfill v1.0.0 // indirect +) + +require ( + cel.dev/expr v0.23.1 // indirect + cloud.google.com/go v0.119.0 // indirect + cloud.google.com/go/auth v0.15.0 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/compute/metadata v0.6.0 // indirect + cloud.google.com/go/iam v1.4.1 // indirect + cloud.google.com/go/monitoring v1.24.0 // indirect + cloud.google.com/go/storage v1.50.0 // indirect + dario.cat/mergo v1.0.1 // indirect filippo.io/edwards25519 v1.1.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0 // indirect github.com/Masterminds/goutils v1.1.1 // indirect - github.com/Masterminds/semver/v3 v3.2.0 // indirect - github.com/Masterminds/sprig/v3 v3.2.3 // indirect - github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df // indirect - github.com/argoproj/pkg v0.13.7-0.20230901113346-235a5432ec98 // indirect + github.com/Masterminds/semver/v3 v3.3.0 // indirect + github.com/Masterminds/sprig/v3 v3.3.0 // indirect + github.com/argoproj/pkg v0.13.7-0.20240704113442-a69fd34a8117 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect - github.com/aws/aws-sdk-go-v2 v1.30.3 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3 // indirect - github.com/aws/aws-sdk-go-v2/config v1.27.27 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.17.27 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 // indirect github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.10 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15 // indirect - github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.15 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 // indirect github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.17 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 // indirect github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.15 // indirect - github.com/aws/aws-sdk-go-v2/service/s3 v1.58.3 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.22.4 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.30.3 // indirect - github.com/aws/smithy-go v1.20.3 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.25.1 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.1 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.33.17 // indirect + github.com/aws/smithy-go v1.22.2 // indirect github.com/beorn7/perks v1.0.1 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f // indirect github.com/colinmarc/hdfs/v2 v2.4.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/denisenkom/go-mssqldb v0.12.3 // indirect github.com/doublerebel/bellows v0.0.0-20160303004610-f177d92a03d3 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/emicklei/go-restful/v3 v3.11.0 // indirect - github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 // indirect - github.com/evanphx/json-patch v5.8.0+incompatible // indirect + github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect + github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect; indirec github.com/evanphx/json-patch/v5 v5.9.0 // indirect github.com/evilmonkeyinc/jsonpath v0.8.1 // indirect github.com/expr-lang/expr v1.17.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/fxamacker/cbor/v2 v2.7.0 // indirect + github.com/go-jose/go-jose/v4 v4.0.5 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-logr/zapr v1.3.0 // indirect github.com/go-openapi/analysis v0.22.0 // indirect - github.com/go-openapi/jsonpointer v0.20.2 // indirect - github.com/go-openapi/jsonreference v0.20.4 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/jsonreference v0.21.0 // indirect github.com/go-openapi/loads v0.21.5 // indirect github.com/go-openapi/spec v0.20.14 // indirect + github.com/go-task/slim-sprig/v3 v3.0.0 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect - github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect - github.com/google/gnostic-models v0.6.8 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/google/gnostic-models v0.6.9 // indirect github.com/google/gofuzz v1.2.0 // indirect - github.com/google/pprof v0.0.0-20240711041743-f6c9dda6c6da // indirect - github.com/google/s2a-go v0.1.8 // indirect + github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db // indirect + github.com/google/s2a-go v0.1.9 // indirect github.com/google/wire v0.6.0 // indirect - github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect - github.com/googleapis/gax-go/v2 v2.13.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect + github.com/googleapis/gax-go/v2 v2.14.1 // indirect github.com/gorilla/websocket v1.5.1 // indirect + github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect - github.com/hashicorp/hcl v1.0.0 // indirect - github.com/huandu/xstrings v1.3.3 // indirect - github.com/imdario/mergo v0.3.16 // indirect + github.com/huandu/xstrings v1.5.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect - github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect - github.com/jackc/puddle/v2 v2.2.1 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect github.com/jcmturner/gofork v1.7.6 // indirect @@ -145,73 +180,80 @@ require ( github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/klauspost/compress v1.17.0 // indirect - github.com/klauspost/cpuid/v2 v2.2.5 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/klauspost/cpuid/v2 v2.2.10 // indirect github.com/klauspost/pgzip v1.2.6 // indirect github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect - github.com/magiconair/properties v1.8.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/minio/md5-simd v1.1.2 // indirect - github.com/minio/sha256-simd v1.0.1 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect - github.com/moby/spdystream v0.2.0 // indirect + github.com/moby/spdystream v0.5.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect - github.com/pelletier/go-toml/v2 v2.1.0 // indirect + github.com/pelletier/go-toml/v2 v2.2.3 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/prometheus/common v0.55.0 // indirect + github.com/prometheus/common v0.62.0 // indirect github.com/prometheus/procfs v0.15.1 // indirect github.com/robfig/cron/v3 v3.0.1 // indirect - github.com/rs/xid v1.5.0 // indirect - github.com/sagikazarmark/locafero v0.4.0 // indirect - github.com/sagikazarmark/slog-shim v0.1.0 // indirect - github.com/shopspring/decimal v1.2.0 // indirect + github.com/rs/xid v1.6.0 // indirect + github.com/sagikazarmark/locafero v0.7.0 // indirect + github.com/shopspring/decimal v1.4.0 // indirect github.com/sourcegraph/conc v0.3.0 // indirect - github.com/spf13/afero v1.11.0 // indirect - github.com/spf13/cast v1.6.0 // indirect - github.com/spf13/pflag v1.0.5 // indirect + github.com/spf13/afero v1.12.0 // indirect + github.com/spf13/cast v1.7.1 // indirect + github.com/spf13/pflag v1.0.6 // indirect + github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect github.com/stoewer/go-strcase v1.3.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect - go.mongodb.org/mongo-driver v1.14.0 // indirect + github.com/x448/float16 v0.8.4 // indirect + github.com/zeebo/errs v1.4.0 // indirect + go.mongodb.org/mongo-driver v1.13.1 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.53.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 // indirect - go.opentelemetry.io/otel v1.32.0 // indirect - go.opentelemetry.io/otel/metric v1.32.0 // indirect - go.opentelemetry.io/otel/trace v1.32.0 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/detectors/gcp v1.35.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 // indirect + go.opentelemetry.io/contrib/instrumentation/runtime v0.48.0 // indirect + go.opentelemetry.io/otel v1.35.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.23.0 // indirect + go.opentelemetry.io/otel/exporters/prometheus v0.45.1 // indirect + go.opentelemetry.io/otel/metric v1.35.0 // indirect + go.opentelemetry.io/otel/sdk v1.35.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect + go.opentelemetry.io/otel/trace v1.35.0 // indirect + go.opentelemetry.io/proto/otlp v1.3.1 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.36.0 // indirect + golang.org/x/crypto v0.38.0 // indirect golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 // indirect - golang.org/x/mod v0.19.0 // indirect - golang.org/x/sync v0.12.0 // indirect - golang.org/x/sys v0.31.0 // indirect - golang.org/x/term v0.30.0 // indirect - golang.org/x/text v0.23.0 // indirect - golang.org/x/time v0.6.0 // indirect - golang.org/x/tools v0.23.0 // indirect + golang.org/x/mod v0.25.0 // indirect + golang.org/x/sync v0.15.0 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/term v0.32.0 // indirect + golang.org/x/text v0.26.0 // indirect + golang.org/x/time v0.11.0 // indirect + golang.org/x/tools v0.33.0 // indirect golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9 // indirect gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect - google.golang.org/api v0.191.0 // indirect - google.golang.org/genproto v0.0.0-20240812133136-8ffd90a71988 // indirect + google.golang.org/api v0.228.0 // indirect + google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb // indirect gopkg.in/inf.v0 v0.9.1 // indirect - gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - k8s.io/apiextensions-apiserver v0.30.1 // indirect - k8s.io/gengo/v2 v2.0.0-20240228010128-51d4e06bde70 // indirect + k8s.io/apiextensions-apiserver v0.31.0 // indirect + k8s.io/gengo/v2 v2.0.0-20240826214909-a7b603a56eb7 // indirect k8s.io/klog/v2 v2.130.1 // indirect - k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340 // indirect - k8s.io/utils v0.0.0-20230726121419-3b25d923346b // indirect - sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect - sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect + k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff // indirect + k8s.io/utils v0.0.0-20241210054802-24370beab758 // indirect + sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 // indirect + sigs.k8s.io/structured-merge-diff/v4 v4.6.0 // indirect ) // These dependencies are managed relative to project root diff --git a/go.sum b/go.sum index e65d64a6b4d..8a8673fd14f 100644 --- a/go.sum +++ b/go.sum @@ -1,25 +1,32 @@ +cel.dev/expr v0.23.1 h1:K4KOtPCJQjVggkARsjG9RWXP6O4R73aHeJMa/dmCQQg= +cel.dev/expr v0.23.1/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.115.0 h1:CnFSK6Xo3lDYRoBKEcAtia6VSC837/ZkJuRduSFnr14= -cloud.google.com/go v0.115.0/go.mod h1:8jIM5vVgoAEoiVxQ/O4BFTfHqulPZgs/ufEzMcFMdWU= -cloud.google.com/go/auth v0.8.1 h1:QZW9FjC5lZzN864p13YxvAtGUlQ+KgRL+8Sg45Z6vxo= -cloud.google.com/go/auth v0.8.1/go.mod h1:qGVp/Y3kDRSDZ5gFD/XPUfYQ9xW1iI7q8RIRoCyBbJc= -cloud.google.com/go/auth/oauth2adapt v0.2.4 h1:0GWE/FUsXhf6C+jAkWgYm7X9tK8cuEIfy19DBn6B6bY= -cloud.google.com/go/auth/oauth2adapt v0.2.4/go.mod h1:jC/jOpwFP6JBxhB3P5Rr0a9HLMC/Pe3eaL4NmdvqPtc= -cloud.google.com/go/compute/metadata v0.5.0 h1:Zr0eK8JbFv6+Wi4ilXAR8FJ3wyNdpxHKJNPos6LTZOY= -cloud.google.com/go/compute/metadata v0.5.0/go.mod h1:aHnloV2TPI38yx4s9+wAZhHykWvVCfu7hQbF+9CWoiY= -cloud.google.com/go/iam v1.1.13 h1:7zWBXG9ERbMLrzQBRhFliAV+kjcRToDTgQT3CTwYyv4= -cloud.google.com/go/iam v1.1.13/go.mod h1:K8mY0uSXwEXS30KrnVb+j54LB/ntfZu1dr+4zFMNbus= -cloud.google.com/go/longrunning v0.5.12 h1:5LqSIdERr71CqfUsFlJdBpOkBH8FBCFD7P1nTWy3TYE= -cloud.google.com/go/longrunning v0.5.12/go.mod h1:S5hMV8CDJ6r50t2ubVJSKQVv5u0rmik5//KgLO3k4lU= -cloud.google.com/go/storage v1.43.0 h1:CcxnSohZwizt4LCzQHWvBf1/kvtHUn7gk9QERXPyXFs= -cloud.google.com/go/storage v1.43.0/go.mod h1:ajvxEa7WmZS1PxvKRq4bq0tFT3vMd502JwstCcYv0Q0= +cloud.google.com/go v0.119.0 h1:tw7OjErMzJKbbjaEHkrt60KQrK5Wus/boCZ7tm5/RNE= +cloud.google.com/go v0.119.0/go.mod h1:fwB8QLzTcNevxqi8dcpR+hoMIs3jBherGS9VUBDAW08= +cloud.google.com/go/auth v0.15.0 h1:Ly0u4aA5vG/fsSsxu98qCQBemXtAtJf+95z9HK+cxps= +cloud.google.com/go/auth v0.15.0/go.mod h1:WJDGqZ1o9E9wKIL+IwStfyn/+s59zl4Bi+1KQNVXLZ8= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= +cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= +cloud.google.com/go/iam v1.4.1 h1:cFC25Nv+u5BkTR/BT1tXdoF2daiVbZ1RLx2eqfQ9RMM= +cloud.google.com/go/iam v1.4.1/go.mod h1:2vUEJpUG3Q9p2UdsyksaKpDzlwOrnMzS30isdReIcLM= +cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc= +cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA= +cloud.google.com/go/longrunning v0.6.4 h1:3tyw9rO3E2XVXzSApn1gyEEnH2K9SynNQjMlBi3uHLg= +cloud.google.com/go/longrunning v0.6.4/go.mod h1:ttZpLCe6e7EXvn9OxpBRx7kZEB0efv8yBO6YnVMfhJs= +cloud.google.com/go/monitoring v1.24.0 h1:csSKiCJ+WVRgNkRzzz3BPoGjFhjPY23ZTcaenToJxMM= +cloud.google.com/go/monitoring v1.24.0/go.mod h1:Bd1PRK5bmQBQNnuGwHBfUamAV1ys9049oEPHnn4pcsc= +cloud.google.com/go/storage v1.50.0 h1:3TbVkzTooBvnZsk7WaAQfOsNrdoM8QHusXA1cpk6QJs= +cloud.google.com/go/storage v1.50.0/go.mod h1:l7XeiD//vx5lfqE3RavfmU9yvk5Pp0Zhcv482poyafY= +cloud.google.com/go/trace v1.11.3 h1:c+I4YFjxRQjvAhRmSsmjpASUKq88chOX854ied0K/pE= +cloud.google.com/go/trace v1.11.3/go.mod h1:pt7zCYiDSQjC9Y2oqCsh9jF4GStB/hmjrYLsxRR27q8= +dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= +dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= -github.com/Azure/azure-sdk-for-go/sdk/azcore v0.19.0/go.mod h1:h6H6c8enJmmocHUbLiiGY6sx7f9i+X3m1CHdd5c6Rdw= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v0.11.0/go.mod h1:HcM1YX14R7CJcghJGOYCgdezslRSVzqwLf/q+4Y2r/0= -github.com/Azure/azure-sdk-for-go/sdk/internal v0.7.0/go.mod h1:yqy467j36fJxcRV2TzfVZ1pCb5vxm4BtZPUdYWe/Xo8= github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= @@ -28,12 +35,20 @@ github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxB github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0 h1:5IT7xOdq17MtcdtL/vtl6mGfzhaq4m4vpollPRmlsBQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0/go.mod h1:ZV4VOm0/eHR06JLrXWe09068dHpr3TRpY9Uo7T+anuA= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.50.0 h1:nNMpRpnkWDAaqcpxMJvxa/Ud98gjbYwayJY4/9bdjiU= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.50.0/go.mod h1:SZiPHWGOOk3bl8tkevxkoiwPgsIl6CwrWcbwjfHZpdM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0 h1:ig/FpDD2JofP/NExKQUbn7uOSZzJAQqogfqluZK4ed4= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g= -github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= -github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= -github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= +github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0= +github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs= +github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0= github.com/Masterminds/squirrel v0.0.0-20190107164353-fa735ea14f09 h1:enWVS77aJkLWVIUExiqF6A8eWTVzCXUKUvkST3/wyKI= github.com/Masterminds/squirrel v0.0.0-20190107164353-fa735ea14f09/go.mod h1:yaPeOnPG5ZRwL9oKdTsO/prlkPbXWZlRVMQ/gGlzIuA= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= @@ -48,12 +63,12 @@ github.com/VividCortex/mysqlerr v0.0.0-20170204212430-6c6b55f8796f/go.mod h1:f3H github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df h1:7RFfzj4SSt6nnvCPbCqijJi1nWCd+TqAT3bYCStRC18= -github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df/go.mod h1:pSwJ0fSY5KhvocuWSx4fz3BA8OrA1bQn+K1Eli3BRwM= -github.com/argoproj/argo-workflows/v3 v3.5.14 h1:x51bmtdfraXtxrd9QRpK3daM7txXe/GpGTcyRs6bsy4= -github.com/argoproj/argo-workflows/v3 v3.5.14/go.mod h1:AJV9jeUtPgeDcJkoIJUwG4P006RvT4o6UkKXOza3F88= -github.com/argoproj/pkg v0.13.7-0.20230901113346-235a5432ec98 h1:Y1wJVJePMad3LwH+OIX4cl9ND3251XUNxjgpxFRWmZs= -github.com/argoproj/pkg v0.13.7-0.20230901113346-235a5432ec98/go.mod h1:2NWmOLTf7x7egzrvRhJrqfVI6QM9eq/ONYgHcuNju9s= +github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= +github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= +github.com/argoproj/argo-workflows/v3 v3.6.7 h1:3vT0ygPVtZxSSUaZsCeI+Th/Bm9Cfc2J1FQOGe2iKoM= +github.com/argoproj/argo-workflows/v3 v3.6.7/go.mod h1:mQbGM8aGyFJwtm9bfmOxLu2SxJDsPosMPHiztR7YmOE= +github.com/argoproj/pkg v0.13.7-0.20240704113442-a69fd34a8117 h1:iOmb5RDUnQ80ZLaBYCbfgNxMJ7qC0boM267nlzMyFjo= +github.com/argoproj/pkg v0.13.7-0.20240704113442-a69fd34a8117/go.mod h1:mwXbiH0ojJzbstR8XV9Ha/dK4IHHTKfgkQi2Kz8Aq0Y= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= @@ -64,51 +79,67 @@ github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:W github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= -github.com/aws/aws-sdk-go v1.45.1/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-sdk-go v1.55.5 h1:KKUZBfBoyqy5d3swXyiC7Q76ic40rYcbqH7qjh59kzU= github.com/aws/aws-sdk-go v1.55.5/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= -github.com/aws/aws-sdk-go-v2 v1.30.3 h1:jUeBtG0Ih+ZIFH0F4UkmL9w3cSpaMv9tYYDbzILP8dY= -github.com/aws/aws-sdk-go-v2 v1.30.3/go.mod h1:nIQjQVp5sfpQcTc9mPSr1B0PaWK5ByX9MOoDadSN4lc= +github.com/aws/aws-sdk-go-v2 v1.30.1/go.mod h1:nIQjQVp5sfpQcTc9mPSr1B0PaWK5ByX9MOoDadSN4lc= +github.com/aws/aws-sdk-go-v2 v1.36.3 h1:mJoei2CxPutQVxaATCzDUjcZEjVRdpsiiXi2o38yqWM= +github.com/aws/aws-sdk-go-v2 v1.36.3/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3 h1:tW1/Rkad38LA15X4UQtjXZXNKsCgkshC3EbmcUmghTg= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3/go.mod h1:UbnqO+zjqk3uIt9yCACHJ9IVNhyhOCnYk8yA19SAWrM= -github.com/aws/aws-sdk-go-v2/config v1.27.27 h1:HdqgGt1OAP0HkEDDShEl0oSYa9ZZBSOmKpdpsDMdO90= -github.com/aws/aws-sdk-go-v2/config v1.27.27/go.mod h1:MVYamCg76dFNINkZFu4n4RjDixhVr51HLj4ErWzrVwg= -github.com/aws/aws-sdk-go-v2/credentials v1.17.27 h1:2raNba6gr2IfA0eqqiP2XiQ0UVOpGPgDSi0I9iAP+UI= -github.com/aws/aws-sdk-go-v2/credentials v1.17.27/go.mod h1:gniiwbGahQByxan6YjQUMcW4Aov6bLC3m+evgcoN4r4= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11 h1:KreluoV8FZDEtI6Co2xuNk/UqI9iwMrOx/87PBNIKqw= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11/go.mod h1:SeSUYBLsMYFoRvHE0Tjvn7kbxaUhl75CJi1sbfhMxkU= +github.com/aws/aws-sdk-go-v2/config v1.27.23/go.mod h1:WMMYHqLCFu5LH05mFOF5tsq1PGEMfKbu083VKqLCd0o= +github.com/aws/aws-sdk-go-v2/config v1.29.9 h1:Kg+fAYNaJeGXp1vmjtidss8O2uXIsXwaRqsQJKXVr+0= +github.com/aws/aws-sdk-go-v2/config v1.29.9/go.mod h1:oU3jj2O53kgOU4TXq/yipt6ryiooYjlkqqVaZk7gY/U= +github.com/aws/aws-sdk-go-v2/credentials v1.17.23/go.mod h1:V/DvSURn6kKgcuKEk4qwSwb/fZ2d++FFARtWSbXnLqY= +github.com/aws/aws-sdk-go-v2/credentials v1.17.24/go.mod h1:Hld7tmnAkoBQdTMNYZGzztzKRdA4fCdn9L83LOoigac= +github.com/aws/aws-sdk-go-v2/credentials v1.17.62 h1:fvtQY3zFzYJ9CfixuAQ96IxDrBajbBWGqjNTCa79ocU= +github.com/aws/aws-sdk-go-v2/credentials v1.17.62/go.mod h1:ElETBxIQqcxej++Cs8GyPBbgMys5DgQPTwo7cUPDKt8= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.9/go.mod h1:WQr3MY7AxGNxaqAtsDWn+fBxmd4XvLkzeqQ8P1VM0/w= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 h1:x793wxmUWVDhshP8WW2mlnXuFrO4cOd3HLBroh1paFw= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30/go.mod h1:Jpne2tDnYiFascUEs2AWHJL9Yp7A5ZVy3TNyxaAjD6M= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.10 h1:zeN9UtUlA6FTx0vFSayxSX32HDw73Yb6Hh2izDSFxXY= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.10/go.mod h1:3HKuexPDcwLWPaqpW2UR/9n8N/u/3CKcGAzSs8p8u8g= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15 h1:SoNJ4RlFEQEbtDcCEt+QG56MY4fm4W8rYirAmq+/DdU= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.15/go.mod h1:U9ke74k1n2bf+RIgoX1SXFed1HLs51OgUSs+Ph0KJP8= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15 h1:C6WHdGnTDIYETAm5iErQUiVNsclNx9qbJVPIt03B6bI= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.15/go.mod h1:ZQLZqhcu+JhSrA9/NXRm8SkDvsycE+JkV3WGY41e+IM= -github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.13/go.mod h1:+rdA6ZLpaSeM7tSg/B0IEDinCIBJGmW8rKDFkYpP04g= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 h1:ZK5jHhnrioRkUNOc+hOgQKlUL5JeC3S6JgLxtQ+Rm0Q= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34/go.mod h1:p4VfIceZokChbA9FzMbRGz5OV+lekcVtHlPKEO0gSZY= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.13/go.mod h1:i+kbfa76PQbWw/ULoWnp51EYVWH4ENln76fLQE3lXT8= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 h1:SZwFm17ZUNNg5Np0ioo/gq8Mn6u9w19Mri8DnJ15Jf0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34/go.mod h1:dFZsC0BLo346mvKQLWmoJxT+Sjp+qcVR1tRVHQGOH9Q= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo= github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.15 h1:Z5r7SycxmSllHYmaAZPpmN8GviDrSGhMS6bldqtXZPw= github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.15/go.mod h1:CetW7bDE00QoGEmPUoZuRog07SGVAUVW6LFpNP0YfIg= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 h1:dT3MqvGhSoaIhRseqw2I0yH81l7wiR2vjs57O51EAm8= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3/go.mod h1:GlAeCkHwugxdHaueRr4nhPuY+WW+gR8UjlcqzPr1SPI= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 h1:eAh2A4b5IzM/lum78bZ590jy36+d/aFLgKF/4Vd1xPE= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3/go.mod h1:0yKJC/kb8sAnmlYa6Zs3QVYqaC8ug2AbnNChv5Ox3uA= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.17 h1:YPYe6ZmvUfDDDELqEKtAd6bo8zxhkm+XEFEzQisqUIE= github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.17/go.mod h1:oBtcnYua/CgzCWYN7NZ5j7PotFDaFSUjCYVTtfyn7vw= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17 h1:HGErhhrxZlQ044RiM+WdoZxp0p+EGM62y3L6pwA4olE= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17/go.mod h1:RkZEx4l0EHYDJpWppMJ3nD9wZJAa8/0lq9aVC+r2UII= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.15/go.mod h1:9xWJ3Q/S6Ojusz1UIkfycgD1mGirJfLLKqq3LPT7WN8= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 h1:dM9/92u2F1JbDaGooxTq18wmmFzbJRfXfVfy96/1CXM= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15/go.mod h1:SwFBy2vjtA0vZbjjaFtfN045boopadnoVPhu4Fv66vY= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.15 h1:246A4lSTXWJw/rmlQI+TT2OcqeDMKBdyjEQrafMaQdA= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.15/go.mod h1:haVfg3761/WF7YPuJOER2MP0k4UAXyHaLclKXB6usDg= github.com/aws/aws-sdk-go-v2/service/s3 v1.58.3 h1:hT8ZAZRIfqBqHbzKTII+CIiY8G2oC9OpLedkZ51DWl8= github.com/aws/aws-sdk-go-v2/service/s3 v1.58.3/go.mod h1:Lcxzg5rojyVPU/0eFwLtcyTaek/6Mtic5B1gJo7e/zE= -github.com/aws/aws-sdk-go-v2/service/sso v1.22.4 h1:BXx0ZIxvrJdSgSvKTZ+yRBeSqqgPM89VPlulEcl37tM= -github.com/aws/aws-sdk-go-v2/service/sso v1.22.4/go.mod h1:ooyCOXjvJEsUw7x+ZDHeISPMhtwI3ZCB7ggFMcFfWLU= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4 h1:yiwVzJW2ZxZTurVbYWA7QOrAaCYQR72t0wrSBfoesUE= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4/go.mod h1:0oxfLkpz3rQ/CHlx5hB7H69YUpFiI1tql6Q6Ne+1bCw= -github.com/aws/aws-sdk-go-v2/service/sts v1.30.3 h1:ZsDKRLXGWHk8WdtyYMoGNO7bTudrvuKpDKgMVRlepGE= -github.com/aws/aws-sdk-go-v2/service/sts v1.30.3/go.mod h1:zwySh8fpFyXp9yOr/KVzxOl8SRqgf/IDw5aUt9UKFcQ= -github.com/aws/smithy-go v1.20.3 h1:ryHwveWzPV5BIof6fyDvor6V3iUL7nTfiTKXHiW05nE= +github.com/aws/aws-sdk-go-v2/service/sso v1.22.1/go.mod h1:/vWdhoIoYA5hYoPZ6fm7Sv4d8701PiG5VKe8/pPJL60= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.1 h1:8JdC7Gr9NROg1Rusk25IcZeTO59zLxsKgE0gkh5O6h0= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.1/go.mod h1:qs4a9T5EMLl/Cajiw2TcbNt2UNo/Hqlyp+GiuG4CFDI= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.1/go.mod h1:xyFHA4zGxgYkdD73VeezHt3vSKEG9EmFnGwoKlP00u4= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.2/go.mod h1:xyFHA4zGxgYkdD73VeezHt3vSKEG9EmFnGwoKlP00u4= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.1 h1:KwuLovgQPcdjNMfFt9OhUd9a2OwcOKhxfvF4glTzLuA= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.1/go.mod h1:MlYRNmYu/fGPoxBQVvBYr9nyr948aY/WLUvwBMBJubs= +github.com/aws/aws-sdk-go-v2/service/sts v1.30.1/go.mod h1:jiNR3JqT15Dm+QWq2SRgh0x0bCNSRP2L25+CqPNpJlQ= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.17 h1:PZV5W8yk4OtH1JAuhV2PXwwO9v5G5Aoj+eMCn4T+1Kc= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.17/go.mod h1:cQnB8CUnxbMU82JvlqjKR2HBOm3fe9pWorWBza6MBJ4= github.com/aws/smithy-go v1.20.3/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/aws/smithy-go v1.22.2 h1:6D9hW43xKFrRx/tXXfAlIZc4JI+yQe6snnWcQyxSyLQ= +github.com/aws/smithy-go v1.22.2/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -122,18 +153,17 @@ github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k= +github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/colinmarc/hdfs/v2 v2.4.0 h1:v6R8oBx/Wu9fHpdPoJJjpGSUxo8NhHIwrwsfhFvU9W0= github.com/colinmarc/hdfs/v2 v2.4.0/go.mod h1:0NAO+/3knbMx6+5pCv+Hcbaz4xn/Zzbn9+WIib2rKVI= -github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/denisenkom/go-mssqldb v0.12.3 h1:pBSGx9Tq67pBOTLmxNuirNTeB8Vjmf886Kx+8Y+8shw= -github.com/denisenkom/go-mssqldb v0.12.3/go.mod h1:k0mtMFOnU+AihqFxPMiF05rtiDrorD1Vrm1KEz5hxDo= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= @@ -153,9 +183,15 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= +github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= +github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= +github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y= -github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0= +github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= +github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v5.8.0+incompatible h1:1Av9pn2FyxPdvrWNQszj1g6D6YthSmvCfcN6SYclTJg= github.com/evanphx/json-patch v5.8.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= @@ -165,20 +201,23 @@ github.com/evilmonkeyinc/jsonpath v0.8.1 h1:W8K4t8u7aipkQE0hcTICGAdAN0Xph349Ltjg github.com/evilmonkeyinc/jsonpath v0.8.1/go.mod h1:EQhs0ZsoD4uD56ZJbO30gMTfHLQ6DEa0/5rT5Ymy42s= github.com/expr-lang/expr v1.17.0 h1:+vpszOyzKLQXC9VF+wA8cVA0tlA984/Wabc/1hF9Whg= github.com/expr-lang/expr v1.17.0/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4= -github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= -github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= +github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= +github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E= +github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= +github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= +github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE= +github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA= github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= @@ -217,8 +256,8 @@ github.com/go-openapi/jsonpointer v0.18.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwds github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= -github.com/go-openapi/jsonpointer v0.20.2 h1:mQc3nmndL8ZBzStEo3JYF8wzmeWffDH4VbXz58sAx6Q= -github.com/go-openapi/jsonpointer v0.20.2/go.mod h1:bHen+N0u1KEO3YlmqOjTT9Adn1RfD91Ar825/PuiRVs= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= github.com/go-openapi/jsonreference v0.17.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= github.com/go-openapi/jsonreference v0.18.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= @@ -226,8 +265,8 @@ github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwoh github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8= github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg= github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns= -github.com/go-openapi/jsonreference v0.20.4 h1:bKlDxQxQJgwpUSgOENiMPzCTBVuc7vTdXSSgNeAhojU= -github.com/go-openapi/jsonreference v0.20.4/go.mod h1:5pZJyJP2MnYCpoeoMAql78cCHauHj0V9Lhc506VOpw4= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= github.com/go-openapi/loads v0.17.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= github.com/go-openapi/loads v0.18.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= github.com/go-openapi/loads v0.19.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= @@ -287,8 +326,8 @@ github.com/go-openapi/swag v0.19.12/go.mod h1:eFdyEBkTdoAf/9RXBvj4cr1nH7GD8Kzo5H github.com/go-openapi/swag v0.19.13/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= -github.com/go-openapi/swag v0.22.6 h1:dnqg1XfHXL9aBxSbktBqFR5CxVyVI+7fYWhAf1JOeTw= -github.com/go-openapi/swag v0.22.6/go.mod h1:Gl91UqO+btAM0plGGxHqJcQZ1ZTy6jbmridBTsDy8A0= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= github.com/go-openapi/validate v0.18.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4= github.com/go-openapi/validate v0.19.2/go.mod h1:1tRCw7m3jtI8eNWEEliiAqUIcBztB2KDnRCRMUi7GTA= github.com/go-openapi/validate v0.19.3/go.mod h1:90Vh6jjkTn+OT1Eefm0ZixWNFjhtOH7vS9k0lo6zwJo= @@ -302,8 +341,10 @@ github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LB github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= -github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= @@ -328,22 +369,19 @@ github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWe github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= -github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= -github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= -github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= -github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc= github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/protobuf v0.0.0-20161109072736-4bd1920723d7/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -367,10 +405,10 @@ github.com/google/addlicense v0.0.0-20200906110928-a0294312aa76 h1:JypWNzPMSgH5y github.com/google/addlicense v0.0.0-20200906110928-a0294312aa76/go.mod h1:EMjYTRimagHs1FwlIqKyX3wAM0u3rA+McvlIIWmSamA= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/cel-go v0.18.1 h1:V/lAXKq4C3BYLDy/ARzMtpkEEYfHQpZzVyzy69nEUjs= -github.com/google/cel-go v0.18.1/go.mod h1:PVAybmSnWkNMUZR/tEWFUiJ1Np4Hz0MHsZJcgC4zln4= -github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= -github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= +github.com/google/cel-go v0.25.0 h1:jsFw9Fhn+3y2kBbltZR4VEz5xKkcIFRPDnuEzAGv5GY= +github.com/google/cel-go v0.25.0/go.mod h1:hjEb6r5SuOSlhCHmFoLzu8HGCERvIsDAbxDAyNU/MmI= +github.com/google/gnostic-models v0.6.9 h1:MU/8wDLif2qCXZmzncUQ/BOfxWfthHi63KqpoNbWqVw= +github.com/google/gnostic-models v0.6.9/go.mod h1:CiWsm0s6BSQd1hRn8/QmxqB6BesYcbSZxsz9b0KuDBw= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -380,8 +418,9 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-replayers/grpcreplay v1.3.0 h1:1Keyy0m1sIpqstQmgz307zhiJ1pV4uIlFds5weTmxbo= github.com/google/go-replayers/grpcreplay v1.3.0/go.mod h1:v6NgKtkijC0d3e3RW8il6Sy5sqRVUwoQa4mHOGEy8DI= github.com/google/go-replayers/httpreplay v1.2.0 h1:VM1wEyyjaoU53BwrOnaf9VhAyQQEEioJvFYxYcLRKzk= @@ -394,24 +433,24 @@ github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXi github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20240711041743-f6c9dda6c6da h1:xRmpO92tb8y+Z85iUOMOicpCfaYcv7o3Cg3wKrIpg8g= -github.com/google/pprof v0.0.0-20240711041743-f6c9dda6c6da/go.mod h1:K1liHPHnj73Fdn/EKuT8nrFqBihUSKXoLYU0BuatOYo= -github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM= -github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA= +github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db h1:097atOisP2aRj7vFgYQBbFN4U4JNXUNYpxael3UzMyo= +github.com/google/pprof v0.0.0-20241029153458-d1b30febd7db/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= +github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= +github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/wire v0.6.0 h1:HBkoIh4BdSxoyo9PveV8giw7ZsaBOvzWKfcg/6MrVwI= github.com/google/wire v0.6.0/go.mod h1:F4QhpQ9EDIdJ1Mbop/NZBRB+5yrR6qg3BnctaoUk6NA= -github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= -github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= +github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.13.0 h1:yitjD5f7jQHhyDsnhKEBU52NdvvdSeGzlAnDPT0hH1s= -github.com/googleapis/gax-go/v2 v2.13.0/go.mod h1:Z/fvTZXF8/uw7Xu5GuslPw+bplx6SS338j1Is2S+B7A= +github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= +github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8= github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= @@ -420,38 +459,34 @@ github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyC github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= -github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= -github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= +github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 h1:sGm2vDRFUrQJO/Veii4h4zG2vvqG6uWNkBHSTqXOZk0= +github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2/go.mod h1:wd1YpapPLivG6nQgbf7ZkG1hhSOXDhhn4MLTknx2aAc= github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 h1:X5VWvz21y3gzm9Nw/kaUeku/1+uBhcekkmy4IkffJww= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1/go.mod h1:Zanoh4+gvIgluNqcfMVTJueD4wSS5hT7zTt4Mrutd90= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4= -github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI= +github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= -github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= -github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= -github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= -github.com/jackc/pgx/v5 v5.5.4 h1:Xp2aQS8uXButQdnCMWNmvx6UysWQQC+u1EoizjguY+8= -github.com/jackc/pgx/v5 v5.5.4/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A= -github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= -github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY= +github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= @@ -464,8 +499,6 @@ github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh6 github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs= github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= -github.com/jinzhu/gorm v1.9.1 h1:lDSDtsCt5AGGSKTs8AHlSDbbgif4G4+CKJ8ETBDVHTA= -github.com/jinzhu/gorm v1.9.1/go.mod h1:Vla75njaFJ8clLU1W44h34PjIkijhjHIYnZxMqCdxqo= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= @@ -489,18 +522,18 @@ github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= -github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= -github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= -github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= -github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= +github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU= github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= @@ -511,6 +544,8 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kubeflow/pipelines/third_party/ml-metadata v0.0.0-20240416215826-da804407ad31 h1:t1G2SexX+SwtYiaFrwH1lzGRSiXYMjd2QDT9842Ytpc= github.com/kubeflow/pipelines/third_party/ml-metadata v0.0.0-20240416215826-da804407ad31/go.mod h1:gh5+EFvuVywvSOYxqT0N91VKuPtScUke/F66RT0NJ80= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw= github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o= github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk= @@ -521,8 +556,6 @@ github.com/lestrrat-go/strftime v1.0.4 h1:T1Rb9EPkAhgxKqbcMIPguPq8glqXTA1koF8n9B github.com/lestrrat-go/strftime v1.0.4/go.mod h1:E1nN3pCbtMSu1yjSVeyuRFVm/U0xoR76fd03sz+Qz4g= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= -github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= @@ -534,16 +567,16 @@ github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0 github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= -github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI= -github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= +github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/minio/crc64nvme v1.0.1 h1:DHQPrYPdqK7jQG/Ls5CTBZWeex/2FMS3G5XGkycuFrY= +github.com/minio/crc64nvme v1.0.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg= github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= -github.com/minio/minio-go/v7 v7.0.63/go.mod h1:Q6X7Qjb7WMhvG65qKf4gUgA5XaiSox74kR1uAEjxRS4= -github.com/minio/minio-go/v7 v7.0.65 h1:sOlB8T3nQK+TApTpuN3k4WD5KasvZIE3vVFzyyCa0go= -github.com/minio/minio-go/v7 v7.0.65/go.mod h1:R4WVUR6ZTedlCcGwZRauLMIKjgyaWxhs4Mqi/OMPmEc= -github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM= +github.com/minio/minio-go/v7 v7.0.66/go.mod h1:DHAgmyQEGdW3Cif0UooKOyrT3Vxs82zNdV6tkKhRtbs= +github.com/minio/minio-go/v7 v7.0.94 h1:1ZoksIKPyaSt64AVOyaQvhDOgVC3MfZsWM6mZXRUGtM= +github.com/minio/minio-go/v7 v7.0.94/go.mod h1:71t2CqDt3ThzESgZUlU1rBN54mksGGlkLcFgguDnnAc= github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= @@ -553,11 +586,10 @@ github.com/mitchellh/mapstructure v1.4.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= -github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= +github.com/moby/spdystream v0.5.0 h1:7r0J1Si3QO/kjRitvSLVVFUjxMEb/YLj6S9FF62JBCU= +github.com/moby/spdystream v0.5.0/go.mod h1:xBAYlnt/ay+11ShkdFKNAG7LsyK/tmNBVvVOwrfMgdI= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -565,7 +597,6 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= @@ -578,39 +609,40 @@ github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= -github.com/onsi/ginkgo/v2 v2.17.1 h1:V++EzdbhI4ZV4ev0UTIj0PzhzOcReJFyJaLjtSF55M8= -github.com/onsi/ginkgo/v2 v2.17.1/go.mod h1:llBI3WDLL9Z6taip6f33H76YcWtJv+7R3HigUjbIBOs= +github.com/onsi/ginkgo/v2 v2.21.0 h1:7rg/4f3rB88pb5obDgNZrNHrQ4e6WpjonchcpuBRnZM= +github.com/onsi/ginkgo/v2 v2.21.0/go.mod h1:7Du3c42kxCUegi0IImZ1wUQzMBVecgIHjR1C+NkhLQo= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.32.0 h1:JRYU78fJ1LPxlckP6Txi/EYqJvjtMrDC04/MM5XRHPk= -github.com/onsi/gomega v1.32.0/go.mod h1:a4x4gW6Pz2yK1MAmvluYme5lvYTn61afQ2ETw/8n4Lg= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/onsi/gomega v1.35.1 h1:Cwbd75ZBPxFSuZ6T+rN/WCb/gOc6YgFBXLlZLhC7Ds4= +github.com/onsi/gomega v1.35.1/go.mod h1:PvZbdDc8J6XJEpDK4HCuRBm8a6Fzp9/DmhC9C7yFlog= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo= github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= -github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= -github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= +github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= +github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/peterhellberg/duration v0.0.0-20191119133758-ec6baeebcd10 h1:Jf08dx6hxr6aNpHzUmYitsKGm6BmCFbwDGPb27/Boyc= github.com/peterhellberg/duration v0.0.0-20191119133758-ec6baeebcd10/go.mod h1:x5xjkH61fUOJVgCCDgqNzlJvdLXiYpmMzSuum2FBOaw= -github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= +github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c h1:dAMKvw0MlJT1GshSTtih8C2gDs04w8dReiOGXrGLNoY= +github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE= -github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho= +github.com/prometheus/client_golang v1.21.1 h1:DOvXXTqVzvkIewV/CDPFdejpMCGeMcbGCQ8YOmu+Ibk= +github.com/prometheus/client_golang v1.21.1/go.mod h1:U9NM32ykUErtVBxdvD3zfi+EuFkkaBvMb09mIfe0Zgg= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= -github.com/prometheus/common v0.55.0 h1:KEi6DK7lXW/m7Ig5i47x0vRzuBsHuvJdi5ee6Y3G1dc= -github.com/prometheus/common v0.55.0/go.mod h1:2SECS4xJG1kd8XF9IcM1gMX6510RAEL65zxzNImwdc8= +github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io= +github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I= github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= github.com/robfig/cron v1.2.0 h1:ZjScXvvxeQ63Dbyxy76Fj3AT3Ut0aKsyd2/tl3DTMuQ= @@ -621,18 +653,17 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= -github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= -github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= -github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= -github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= -github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= +github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo= +github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= +github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -642,19 +673,21 @@ github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9yS github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= -github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= -github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= -github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= +github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs= +github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4= +github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y= +github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= -github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= -github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk= +github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o= +github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.20.0 h1:zrxIyR3RQIOsarIrgL8+sAvALXul9jeEPa06Y0Ph6vY= +github.com/spf13/viper v1.20.0/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4= +github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= +github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= github.com/stoewer/go-strcase v1.3.0 h1:g0eASXYtp+yvN9fK8sH94oCIk0fau9uV1/ZdJ0AVEzs= github.com/stoewer/go-strcase v1.3.0/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -662,6 +695,7 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -673,25 +707,34 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tinylib/msgp v1.3.0 h1:ULuf7GPooDaIlbyvgAxBV/FI7ynli6LZ1/nVUNu+0ww= +github.com/tinylib/msgp v1.3.0/go.mod h1:ykjzy2wzgrlvpDCRc4LA8UXy6D8bzMSuAF3WD57Gok0= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw= +github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= +github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= +github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= +github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= +github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= go.mongodb.org/mongo-driver v1.0.3/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= go.mongodb.org/mongo-driver v1.1.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= go.mongodb.org/mongo-driver v1.3.0/go.mod h1:MSWZXKOynuguX+JSvwP8i+58jYCXxbia8HS3gZBapIE= @@ -701,31 +744,44 @@ go.mongodb.org/mongo-driver v1.4.4/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4S go.mongodb.org/mongo-driver v1.4.6/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw= go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= -go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= -go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= +go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= +go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.53.0 h1:9G6E0TXzGFVfTnawRzrPl83iHOAV7L8NJiR8RSGYV1g= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.53.0/go.mod h1:azvtTADFQJA8mX80jIH/akaE7h+dbm/sVuaHqN13w74= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 h1:4K4tsIXefpVJtvA/8srF4V4y0akAoPHkIslgAkjixJA= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0/go.mod h1:jjdQuTGVsXV4vSs+CJ2qYDeDPf9yIJV23qlIzBm73Vg= -go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U= -go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg= -go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M= -go.opentelemetry.io/otel/metric v1.32.0/go.mod h1:jH7CIbbK6SH2V2wE16W05BHCtIDzauciCRLoc/SyMv8= -go.opentelemetry.io/otel/sdk v1.32.0 h1:RNxepc9vK59A8XsgZQouW8ue8Gkb4jpWtJm9ge5lEG4= -go.opentelemetry.io/otel/sdk v1.32.0/go.mod h1:LqgegDBjKMmb2GC6/PrTnteJG39I8/vJCAP9LlJXEjU= -go.opentelemetry.io/otel/trace v1.32.0 h1:WIC9mYrXf8TmY/EXuULKc8hR17vE+Hjv2cssQDe03fM= -go.opentelemetry.io/otel/trace v1.32.0/go.mod h1:+i4rkvCraA+tG6AzwloGaCtkx53Fa+L+V8e9a7YvhT8= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/detectors/gcp v1.35.0 h1:bGvFt68+KTiAKFlacHW6AhA56GF2rS0bdD3aJYEnmzA= +go.opentelemetry.io/contrib/detectors/gcp v1.35.0/go.mod h1:qGWP8/+ILwMRIUf9uIVLloR1uo5ZYAslM4O6OqUi1DA= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0 h1:rgMkmiGfix9vFJDcDi1PK8WEQP4FLQwLDfhp5ZLpFeE= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0/go.mod h1:ijPqXp5P6IRRByFVVg9DY8P5HkxkHE5ARIa+86aXPf4= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 h1:CV7UdSGJt/Ao6Gp4CXckLxVRRsRgDHoI8XjbL3PDl8s= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0/go.mod h1:FRmFuRJfag1IZ2dPkHnEoSFVgTVPUd2qf5Vi69hLb8I= +go.opentelemetry.io/contrib/instrumentation/runtime v0.48.0 h1:dJlCKeq+zmO5Og4kgxqPvvJrzuD/mygs1g/NYM9dAsU= +go.opentelemetry.io/contrib/instrumentation/runtime v0.48.0/go.mod h1:p+hpBCpLHpuUrR0lHgnHbUnbCBll1IhrcMIlycC+xYs= +go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= +go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.23.0 h1:97CpJflo7dJK4A4SLMNoP2loDEAiG0ifF6MnLhtSHUY= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.23.0/go.mod h1:YzC+4JHcK24PylBTZ78U0XJSYbhHY0uHYNqr+OlcLCs= +go.opentelemetry.io/otel/exporters/prometheus v0.45.1 h1:R/bW3afad6q6VGU+MFYpnEdo0stEARMCdhWu6+JI6aI= +go.opentelemetry.io/otel/exporters/prometheus v0.45.1/go.mod h1:wnHAfKRav5Dfp4iZhyWZ7SzQfT+rDZpEpYG7To+qJ1k= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0 h1:WDdP9acbMYjbKIyJUhTvtzj601sVJOqgWdUxSdR/Ysc= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0/go.mod h1:BLbf7zbNIONBLPwvFnwNHGj4zge8uTCM/UPIVW1Mq2I= +go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= +go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= +go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= +go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg= +go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o= +go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w= +go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= +go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0= +go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= gocloud.dev v0.40.0 h1:f8LgP+4WDqOG/RXoUcyLpeIAGOcAbZrZbDQCUee10ng= @@ -742,16 +798,17 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= -golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= -golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= -golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= +golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= @@ -765,8 +822,10 @@ golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91 golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.19.0 h1:fEdghXQSo20giMthA7cd28ZC+jts4amQ3YMXiP5oMQ8= -golang.org/x/mod v0.19.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= +golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -794,26 +853,26 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= -golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= -golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= -golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= +golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= +golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA= -golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= +golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -826,8 +885,9 @@ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= -golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -855,26 +915,29 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= -golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y= -golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= +golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= +golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -884,18 +947,19 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= -golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= +golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= +golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U= -golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= +golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -918,8 +982,9 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= -golang.org/x/tools v0.23.0 h1:SGsXPZ+2l4JsgaCKkx+FQ9YZ5XEtA1GZYuoDjenLjvg= -golang.org/x/tools v0.23.0/go.mod h1:pnu6ufv6vQkll6szChhK3C3L/ruaIv5eBeztNG8wtsI= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc= +golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -929,8 +994,8 @@ golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9/go.mod h1:NDW/Ps6MPRej6f gomodules.xyz/jsonpatch/v2 v2.4.0 h1:Ci3iUJyx9UeRx7CeFN8ARgGbkESwJK+KB9lLcWxY/Zw= gomodules.xyz/jsonpatch/v2 v2.4.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.191.0 h1:cJcF09Z+4HAB2t5qTQM1ZtfL/PemsLFkcFG67qq2afk= -google.golang.org/api v0.191.0/go.mod h1:tD5dsFGxFza0hnQveGfVk9QQYKcfp+VzgRqyXFxE0+E= +google.golang.org/api v0.228.0 h1:X2DJ/uoWGnY5obVjewbp8icSL5U4FzuCfy9OjbLSnLs= +google.golang.org/api v0.228.0/go.mod h1:wNvRS1Pbe8r4+IfBIniV8fwCpGwTrYa+kMUDiC5z5a4= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -938,30 +1003,28 @@ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20211221231510-d629cc9a93d5/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20240812133136-8ffd90a71988 h1:CT2Thj5AuPV9phrYMtzX11k+XkzMGfRAet42PmoTATM= -google.golang.org/genproto v0.0.0-20240812133136-8ffd90a71988/go.mod h1:7uvplUBj4RjHAxIZ//98LzOvrQ04JBkaixRmCMI29hc= -google.golang.org/genproto/googleapis/api v0.0.0-20240812133136-8ffd90a71988 h1:+/tmTy5zAieooKIXfzDm9KiA3Bv6JBwriRN9LY+yayk= -google.golang.org/genproto/googleapis/api v0.0.0-20240812133136-8ffd90a71988/go.mod h1:4+X6GvPs+25wZKbQq9qyAXrwIRExv7w0Ea6MgZLZiDM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240812133136-8ffd90a71988 h1:V71AcdLZr2p8dC9dbOIMCpqi4EmRl8wUwnJzXXLmbmc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240812133136-8ffd90a71988/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY= +google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb h1:ITgPrl429bc6+2ZraNSzMDk3I95nmQln2fuPstKwFDE= +google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:sAo5UzpjUwgFBCzupwhcLcxHVDK7vG5IqI30YnwX2eE= +google.golang.org/genproto/googleapis/api v0.0.0-20250715232539-7130f93afb79 h1:iOye66xuaAK0WnkPuhQPUFy8eJcmwUXqGGP3om6IxX8= +google.golang.org/genproto/googleapis/api v0.0.0-20250715232539-7130f93afb79/go.mod h1:HKJDgKsFUnv5VAGeQjz8kxcgDP0HoE0iZNp0OdZNlhE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250715232539-7130f93afb79 h1:1ZwqphdOdWYXsUHgMpU/101nCtf/kSp9hOrcvFsnl10= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250715232539-7130f93afb79/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc= -google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 h1:M1YKkFIboKNieVO5DLUEVzQfGwJD30Nv2jfUgzb5UcE= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok= +google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1 h1:F29+wU6Ee6qgu9TddPgooOdaqsxTMunOoj8KA5yuS5A= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1/go.mod h1:5KF+wpkbTSbGcR9zteSqZV6fqFOWBl4Yde8En8MryZA= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -974,18 +1037,19 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= -google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/evanphx/json-patch.v4 v4.12.0 h1:n6jtcsulIzXPJaxegRbvFNNrZDjbij7ny3gmSPG+6V4= +gopkg.in/evanphx/json-patch.v4 v4.12.0/go.mod h1:p8EYWUEYMpynmqDbY58zCKCFZw8pRWMG4EsWvDvM72M= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= @@ -1002,25 +1066,33 @@ gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/mysql v1.6.0 h1:eNbLmNTpPpTOVZi8MMxCi2aaIm0ZpInbORNXDwyLGvg= +gorm.io/driver/mysql v1.6.0/go.mod h1:D/oCC2GWK3M/dqoLxnOlaNKmXz8WNTfcS9y5ovaSqKo= +gorm.io/driver/postgres v1.6.0 h1:2dxzU8xJ+ivvqTRph34QX+WrRaJlmfyPqXmoGVjMBa4= +gorm.io/driver/postgres v1.6.0/go.mod h1:vUw0mrGgrTK+uPHEhAdV4sfFELrByKVGnaVRkXDhtWo= +gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ= +gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8= +gorm.io/gorm v1.30.1 h1:lSHg33jJTBxs2mgJRfRZeLDG+WZaHYCk3Wtfl6Ngzo4= +gorm.io/gorm v1.30.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= k8s.io/api v0.17.8/go.mod h1:N++Llhs8kCixMUoCaXXAyMMPbo8dDVnh+IQ36xZV2/0= -k8s.io/api v0.30.1 h1:kCm/6mADMdbAxmIh0LBjS54nQBE+U4KmbCfIkF5CpJY= -k8s.io/api v0.30.1/go.mod h1:ddbN2C0+0DIiPntan/bye3SW3PdwLa11/0yqwvuRrJM= -k8s.io/apiextensions-apiserver v0.30.1 h1:4fAJZ9985BmpJG6PkoxVRpXv9vmPUOVzl614xarePws= -k8s.io/apiextensions-apiserver v0.30.1/go.mod h1:R4GuSrlhgq43oRY9sF2IToFh7PVlF1JjfWdoG3pixk4= +k8s.io/api v0.32.2 h1:bZrMLEkgizC24G9eViHGOPbW+aRo9duEISRIJKfdJuw= +k8s.io/api v0.32.2/go.mod h1:hKlhk4x1sJyYnHENsrdCWw31FEmCijNGPJO5WzHiJ6Y= +k8s.io/apiextensions-apiserver v0.31.0 h1:fZgCVhGwsclj3qCw1buVXCV6khjRzKC5eCFt24kyLSk= +k8s.io/apiextensions-apiserver v0.31.0/go.mod h1:b9aMDEYaEe5sdK+1T0KU78ApR/5ZVp4i56VacZYEHxk= k8s.io/apimachinery v0.17.8/go.mod h1:Lg8zZ5iC/O8UjCqW6DNhcQG2m4TdjF9kwG3891OWbbA= -k8s.io/apimachinery v0.30.1 h1:ZQStsEfo4n65yAdlGTfP/uSHMQSoYzU/oeEbkmF7P2U= -k8s.io/apimachinery v0.30.1/go.mod h1:iexa2somDaxdnj7bha06bhb43Zpa6eWH8N8dbqVjTUc= +k8s.io/apimachinery v0.32.2 h1:yoQBR9ZGkA6Rgmhbp/yuT9/g+4lxtsGYwW6dR6BDPLQ= +k8s.io/apimachinery v0.32.2/go.mod h1:GpHVgxoKlTxClKcteaeuF1Ul/lDVb74KpZcxcmLDElE= k8s.io/client-go v0.17.8/go.mod h1:SJsDS64AAtt9VZyeaQMb4Ck5etCitZ/FwajWdzua5eY= -k8s.io/client-go v0.30.1 h1:uC/Ir6A3R46wdkgCV3vbLyNOYyCJ8oZnjtJGKfytl/Q= -k8s.io/client-go v0.30.1/go.mod h1:wrAqLNs2trwiCH/wxxmT/x3hKVH9PuV0GGW0oDoHVqc= -k8s.io/code-generator v0.30.1 h1:ZsG++q5Vt0ScmKCeLhynUuWgcwFGg1Hl1AGfatqPJBI= -k8s.io/code-generator v0.30.1/go.mod h1:hFgxRsvOUg79mbpbVKfjJvRhVz1qLoe40yZDJ/hwRH4= +k8s.io/client-go v0.32.2 h1:4dYCD4Nz+9RApM2b/3BtVvBHw54QjMFUl1OLcJG5yOA= +k8s.io/client-go v0.32.2/go.mod h1:fpZ4oJXclZ3r2nDOv+Ux3XcJutfrwjKTCHz2H3sww94= +k8s.io/code-generator v0.31.0 h1:w607nrMi1KeDKB3/F/J4lIoOgAwc+gV9ZKew4XRfMp8= +k8s.io/code-generator v0.31.0/go.mod h1:84y4w3es8rOJOUUP1rLsIiGlO1JuEaPFXQPA9e/K6U0= k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/gengo/v2 v2.0.0-20240228010128-51d4e06bde70 h1:NGrVE502P0s0/1hudf8zjgwki1X/TByhmAoILTarmzo= -k8s.io/gengo/v2 v2.0.0-20240228010128-51d4e06bde70/go.mod h1:VH3AT8AaQOqiGjMF9p0/IM1Dj+82ZwjfxUP1IxaHE+8= +k8s.io/gengo/v2 v2.0.0-20240826214909-a7b603a56eb7 h1:cErOOTkQ3JW19o4lo91fFurouhP8NcoBvb7CkvhZZpk= +k8s.io/gengo/v2 v2.0.0-20240826214909-a7b603a56eb7/go.mod h1:EJykeLsmFC60UQbYJezXkEsG2FLrt0GPNkU5iK5GWxU= k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= @@ -1028,18 +1100,21 @@ k8s.io/klog/v2 v2.5.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec= k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk= k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= k8s.io/kube-openapi v0.0.0-20200410145947-bcb3869e6f29/go.mod h1:F+5wygcW0wmRTnM3cOgIqGivxkwSWIWT5YdsDbeAOaU= -k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340 h1:BZqlfIlq5YbRMFko6/PM7FjZpUb45WallggurYhKGag= -k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340/go.mod h1:yD4MZYeKMBwQKVht279WycxKyM84kkAx2DPrTXaeb98= +k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff h1:/usPimJzUKKu+m+TE36gUyGcf03XZEP0ZIKgKj35LS4= +k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff/go.mod h1:5jIi+8yX4RIb8wk3XwBo5Pq2ccx4FP10ohkbSKCZoK8= k8s.io/utils v0.0.0-20191114184206-e782cd3c129f/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= -k8s.io/utils v0.0.0-20230726121419-3b25d923346b h1:sgn3ZU783SCgtaSJjpcVVlRqd6GSnlTLKgpAAttJvpI= -k8s.io/utils v0.0.0-20230726121419-3b25d923346b/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -sigs.k8s.io/controller-runtime v0.18.6 h1:UnEoLBLDpQwzJ2jYh6aTdiMhGjNDR7IdFn9YEqHIccc= -sigs.k8s.io/controller-runtime v0.18.6/go.mod h1:Dcsa9v8AEBWa3sQNJHsuWPT4ICv99irl5wj83NiC12U= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +k8s.io/utils v0.0.0-20241210054802-24370beab758 h1:sdbE21q2nlQtFh65saZY+rRM6x6aJJI8IUa1AmH/qa0= +k8s.io/utils v0.0.0-20241210054802-24370beab758/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +sigs.k8s.io/controller-runtime v0.19.0 h1:nWVM7aq+Il2ABxwiCizrVDSlmDcshi9llbaFbC0ji/Q= +sigs.k8s.io/controller-runtime v0.19.0/go.mod h1:iRmWllt8IlaLjvTTDLhRBXIEtkCK6hwVBJJsYS9Ajf4= +sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 h1:/Rv+M11QRah1itp8VhT6HoVx1Ray9eB4DBr+K+/sCJ8= +sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3/go.mod h1:18nIHnGi6636UCz6m8i4DhaJ65T6EruyzmoQqI2BVDo= +sigs.k8s.io/randfill v0.0.0-20250304075658-069ef1bbf016/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY= +sigs.k8s.io/randfill v1.0.0 h1:JfjMILfT8A6RbawdsK2JXGBR5AQVfd+9TbzrlneTyrU= +sigs.k8s.io/randfill v1.0.0/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY= sigs.k8s.io/structured-merge-diff/v2 v2.0.1/go.mod h1:Wb7vfKAodbKgf6tn1Kl0VvGj7mRH6DGaRcixXEJXTsE= -sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= -sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= +sigs.k8s.io/structured-merge-diff/v4 v4.6.0 h1:IUA9nvMmnKWcj5jl84xn+T5MnlZKThmUW1TdblaLVAc= +sigs.k8s.io/structured-merge-diff/v4 v4.6.0/go.mod h1:dDy58f92j70zLsuZVuUX5Wp9vtxXpaZnkPGWeqDfCps= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= diff --git a/kubernetes_platform/Makefile b/kubernetes_platform/Makefile index 24620ce69a4..8a5387fe029 100644 --- a/kubernetes_platform/Makefile +++ b/kubernetes_platform/Makefile @@ -12,7 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:1.1 +PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:master + +# Set USE_FIND_LINKS=true to use local sdk and pipeline_spec for buildling python builds rather than pulling from pypi +# applies to `make python` and make pythone-dev` commands. +# e.g. make python USE_FIND_LINKS=true FIND_LINKS_PATH="path1 path2 ..." +FIND_LINKS_PATH := /go/src/github.com/kubeflow/pipelines/sdk/python/dist /go/src/github.com/kubeflow/pipelines/api/v2alpha1/python/dist .PHONY: all all: golang python @@ -34,15 +39,27 @@ golang: proto/*.proto clean-go: rm -rf kubernetes_platform/go +.PHONY: fetch-protos +fetch-protos: + make -C ../api fetch-protos -# Build and locally install Python package. +# Build Python package using pre-built image .PHONY: python -python: proto/kubernetes_executor_config.proto - python3 python/generate_proto.py && cd python && python3 setup.py bdist_wheel +python: proto/kubernetes_executor_config.proto fetch-protos + docker run --interactive --rm \ + --user $$(id -u):$$(id -g) \ + -e HOME=/tmp \ + $(if $(USE_FIND_LINKS),-e PIP_FIND_LINKS="$(FIND_LINKS_PATH)") \ + -v "$$(pwd)/..":"/go/src/github.com/kubeflow/pipelines":z \ + $(PREBUILT_REMOTE_IMAGE) \ + sh -c 'cd /go/src/github.com/kubeflow/pipelines/kubernetes_platform/python && \ + python3 -m pip install --user --break-system-packages -r requirements.txt && \ + python3 generate_proto.py && \ + python3 setup.py sdist bdist_wheel --dist-dir ./dist' -# Build and locally install Python package using editable mode for development. +# Build and install in editable mode using pre-built image .PHONY: python-dev -python-dev: proto/kubernetes_executor_config.proto +python-dev: proto/kubernetes_executor_config.proto fetch-protos python3 python/generate_proto.py && cd python && pip install -e .[dev] # Delete all generated Python packages diff --git a/kubernetes_platform/go.mod b/kubernetes_platform/go.mod index 777f5ec0e83..203cd1d43ba 100644 --- a/kubernetes_platform/go.mod +++ b/kubernetes_platform/go.mod @@ -1,16 +1,12 @@ module github.com/kubeflow/pipelines/kubernetes_platform -go 1.23 +go 1.24.6 require ( github.com/kubeflow/pipelines/api v0.0.0-00010101000000-000000000000 - google.golang.org/protobuf v1.33.0 + google.golang.org/protobuf v1.36.6 ) require google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect -replace ( - github.com/kubeflow/pipelines/api => ../api - github.com/mattn/go-sqlite3 => github.com/mattn/go-sqlite3 v1.14.18 - google.golang.org/grpc => google.golang.org/grpc v1.56.3 -) +replace github.com/kubeflow/pipelines/api => ../api diff --git a/kubernetes_platform/go.sum b/kubernetes_platform/go.sum index a518c05975e..bc85e803460 100644 --- a/kubernetes_platform/go.sum +++ b/kubernetes_platform/go.sum @@ -1,250 +1,6 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.110.0/go.mod h1:SJnCLqQ0FCFGSZMUNUf84MV3Aia54kn7pi8st7tMzaY= -cloud.google.com/go/accessapproval v1.6.0/go.mod h1:R0EiYnwV5fsRFiKZkPHr6mwyk2wxUJ30nL4j2pcFY2E= -cloud.google.com/go/accesscontextmanager v1.7.0/go.mod h1:CEGLewx8dwa33aDAZQujl7Dx+uYhS0eay198wB/VumQ= -cloud.google.com/go/aiplatform v1.37.0/go.mod h1:IU2Cv29Lv9oCn/9LkFiiuKfwrRTq+QQMbW+hPCxJGZw= -cloud.google.com/go/analytics v0.19.0/go.mod h1:k8liqf5/HCnOUkbawNtrWWc+UAzyDlW89doe8TtoDsE= -cloud.google.com/go/apigateway v1.5.0/go.mod h1:GpnZR3Q4rR7LVu5951qfXPJCHquZt02jf7xQx7kpqN8= -cloud.google.com/go/apigeeconnect v1.5.0/go.mod h1:KFaCqvBRU6idyhSNyn3vlHXc8VMDJdRmwDF6JyFRqZ8= -cloud.google.com/go/apigeeregistry v0.6.0/go.mod h1:BFNzW7yQVLZ3yj0TKcwzb8n25CFBri51GVGOEUcgQsc= -cloud.google.com/go/apikeys v0.6.0/go.mod h1:kbpXu5upyiAlGkKrJgQl8A0rKNNJ7dQ377pdroRSSi8= -cloud.google.com/go/appengine v1.7.1/go.mod h1:IHLToyb/3fKutRysUlFO0BPt5j7RiQ45nrzEJmKTo6E= -cloud.google.com/go/area120 v0.7.1/go.mod h1:j84i4E1RboTWjKtZVWXPqvK5VHQFJRF2c1Nm69pWm9k= -cloud.google.com/go/artifactregistry v1.13.0/go.mod h1:uy/LNfoOIivepGhooAUpL1i30Hgee3Cu0l4VTWHUC08= -cloud.google.com/go/asset v1.13.0/go.mod h1:WQAMyYek/b7NBpYq/K4KJWcRqzoalEsxz/t/dTk4THw= -cloud.google.com/go/assuredworkloads v1.10.0/go.mod h1:kwdUQuXcedVdsIaKgKTp9t0UJkE5+PAVNhdQm4ZVq2E= -cloud.google.com/go/automl v1.12.0/go.mod h1:tWDcHDp86aMIuHmyvjuKeeHEGq76lD7ZqfGLN6B0NuU= -cloud.google.com/go/baremetalsolution v0.5.0/go.mod h1:dXGxEkmR9BMwxhzBhV0AioD0ULBmuLZI8CdwalUxuss= -cloud.google.com/go/batch v0.7.0/go.mod h1:vLZN95s6teRUqRQ4s3RLDsH8PvboqBK+rn1oevL159g= -cloud.google.com/go/beyondcorp v0.5.0/go.mod h1:uFqj9X+dSfrheVp7ssLTaRHd2EHqSL4QZmH4e8WXGGU= -cloud.google.com/go/bigquery v1.50.0/go.mod h1:YrleYEh2pSEbgTBZYMJ5SuSr0ML3ypjRB1zgf7pvQLU= -cloud.google.com/go/billing v1.13.0/go.mod h1:7kB2W9Xf98hP9Sr12KfECgfGclsH3CQR0R08tnRlRbc= -cloud.google.com/go/binaryauthorization v1.5.0/go.mod h1:OSe4OU1nN/VswXKRBmciKpo9LulY41gch5c68htf3/Q= -cloud.google.com/go/certificatemanager v1.6.0/go.mod h1:3Hh64rCKjRAX8dXgRAyOcY5vQ/fE1sh8o+Mdd6KPgY8= -cloud.google.com/go/channel v1.12.0/go.mod h1:VkxCGKASi4Cq7TbXxlaBezonAYpp1GCnKMY6tnMQnLU= -cloud.google.com/go/cloudbuild v1.9.0/go.mod h1:qK1d7s4QlO0VwfYn5YuClDGg2hfmLZEb4wQGAbIgL1s= -cloud.google.com/go/clouddms v1.5.0/go.mod h1:QSxQnhikCLUw13iAbffF2CZxAER3xDGNHjsTAkQJcQA= -cloud.google.com/go/cloudtasks v1.10.0/go.mod h1:NDSoTLkZ3+vExFEWu2UJV1arUyzVDAiZtdWcsUyNwBs= -cloud.google.com/go/compute v1.19.0/go.mod h1:rikpw2y+UMidAe9tISo04EHNOIf42RLYF/q8Bs93scU= -cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w= -cloud.google.com/go/container v1.15.0/go.mod h1:ft+9S0WGjAyjDggg5S06DXj+fHJICWg8L7isCQe9pQA= -cloud.google.com/go/containeranalysis v0.9.0/go.mod h1:orbOANbwk5Ejoom+s+DUCTTJ7IBdBQJDcSylAx/on9s= -cloud.google.com/go/datacatalog v1.13.0/go.mod h1:E4Rj9a5ZtAxcQJlEBTLgMTphfP11/lNaAshpoBgemX8= -cloud.google.com/go/dataflow v0.8.0/go.mod h1:Rcf5YgTKPtQyYz8bLYhFoIV/vP39eL7fWNcSOyFfLJE= -cloud.google.com/go/dataform v0.7.0/go.mod h1:7NulqnVozfHvWUBpMDfKMUESr+85aJsC/2O0o3jWPDE= -cloud.google.com/go/datafusion v1.6.0/go.mod h1:WBsMF8F1RhSXvVM8rCV3AeyWVxcC2xY6vith3iw3S+8= -cloud.google.com/go/datalabeling v0.7.0/go.mod h1:WPQb1y08RJbmpM3ww0CSUAGweL0SxByuW2E+FU+wXcM= -cloud.google.com/go/dataplex v1.6.0/go.mod h1:bMsomC/aEJOSpHXdFKFGQ1b0TDPIeL28nJObeO1ppRs= -cloud.google.com/go/dataproc v1.12.0/go.mod h1:zrF3aX0uV3ikkMz6z4uBbIKyhRITnxvr4i3IjKsKrw4= -cloud.google.com/go/dataqna v0.7.0/go.mod h1:Lx9OcIIeqCrw1a6KdO3/5KMP1wAmTc0slZWwP12Qq3c= -cloud.google.com/go/datastore v1.11.0/go.mod h1:TvGxBIHCS50u8jzG+AW/ppf87v1of8nwzFNgEZU1D3c= -cloud.google.com/go/datastream v1.7.0/go.mod h1:uxVRMm2elUSPuh65IbZpzJNMbuzkcvu5CjMqVIUHrww= -cloud.google.com/go/deploy v1.8.0/go.mod h1:z3myEJnA/2wnB4sgjqdMfgxCA0EqC3RBTNcVPs93mtQ= -cloud.google.com/go/dialogflow v1.32.0/go.mod h1:jG9TRJl8CKrDhMEcvfcfFkkpp8ZhgPz3sBGmAUYJ2qE= -cloud.google.com/go/dlp v1.9.0/go.mod h1:qdgmqgTyReTz5/YNSSuueR8pl7hO0o9bQ39ZhtgkWp4= -cloud.google.com/go/documentai v1.18.0/go.mod h1:F6CK6iUH8J81FehpskRmhLq/3VlwQvb7TvwOceQ2tbs= -cloud.google.com/go/domains v0.8.0/go.mod h1:M9i3MMDzGFXsydri9/vW+EWz9sWb4I6WyHqdlAk0idE= -cloud.google.com/go/edgecontainer v1.0.0/go.mod h1:cttArqZpBB2q58W/upSG++ooo6EsblxDIolxa3jSjbY= -cloud.google.com/go/errorreporting v0.3.0/go.mod h1:xsP2yaAp+OAW4OIm60An2bbLpqIhKXdWR/tawvl7QzU= -cloud.google.com/go/essentialcontacts v1.5.0/go.mod h1:ay29Z4zODTuwliK7SnX8E86aUF2CTzdNtvv42niCX0M= -cloud.google.com/go/eventarc v1.11.0/go.mod h1:PyUjsUKPWoRBCHeOxZd/lbOOjahV41icXyUY5kSTvVY= -cloud.google.com/go/filestore v1.6.0/go.mod h1:di5unNuss/qfZTw2U9nhFqo8/ZDSc466dre85Kydllg= -cloud.google.com/go/firestore v1.9.0/go.mod h1:HMkjKHNTtRyZNiMzu7YAsLr9K3X2udY2AMwDaMEQiiE= -cloud.google.com/go/functions v1.13.0/go.mod h1:EU4O007sQm6Ef/PwRsI8N2umygGqPBS/IZQKBQBcJ3c= -cloud.google.com/go/gaming v1.9.0/go.mod h1:Fc7kEmCObylSWLO334NcO+O9QMDyz+TKC4v1D7X+Bc0= -cloud.google.com/go/gkebackup v0.4.0/go.mod h1:byAyBGUwYGEEww7xsbnUTBHIYcOPy/PgUWUtOeRm9Vg= -cloud.google.com/go/gkeconnect v0.7.0/go.mod h1:SNfmVqPkaEi3bF/B3CNZOAYPYdg7sU+obZ+QTky2Myw= -cloud.google.com/go/gkehub v0.12.0/go.mod h1:djiIwwzTTBrF5NaXCGv3mf7klpEMcST17VBTVVDcuaw= -cloud.google.com/go/gkemulticloud v0.5.0/go.mod h1:W0JDkiyi3Tqh0TJr//y19wyb1yf8llHVto2Htf2Ja3Y= -cloud.google.com/go/gsuiteaddons v1.5.0/go.mod h1:TFCClYLd64Eaa12sFVmUyG62tk4mdIsI7pAnSXRkcFo= -cloud.google.com/go/iam v0.13.0/go.mod h1:ljOg+rcNfzZ5d6f1nAUJ8ZIxOaZUVoS14bKCtaLZ/D0= -cloud.google.com/go/iap v1.7.1/go.mod h1:WapEwPc7ZxGt2jFGB/C/bm+hP0Y6NXzOYGjpPnmMS74= -cloud.google.com/go/ids v1.3.0/go.mod h1:JBdTYwANikFKaDP6LtW5JAi4gubs57SVNQjemdt6xV4= -cloud.google.com/go/iot v1.6.0/go.mod h1:IqdAsmE2cTYYNO1Fvjfzo9po179rAtJeVGUvkLN3rLE= -cloud.google.com/go/kms v1.10.1/go.mod h1:rIWk/TryCkR59GMC3YtHtXeLzd634lBbKenvyySAyYI= -cloud.google.com/go/language v1.9.0/go.mod h1:Ns15WooPM5Ad/5no/0n81yUetis74g3zrbeJBE+ptUY= -cloud.google.com/go/lifesciences v0.8.0/go.mod h1:lFxiEOMqII6XggGbOnKiyZ7IBwoIqA84ClvoezaA/bo= -cloud.google.com/go/logging v1.7.0/go.mod h1:3xjP2CjkM3ZkO73aj4ASA5wRPGGCRrPIAeNqVNkzY8M= -cloud.google.com/go/longrunning v0.4.1/go.mod h1:4iWDqhBZ70CvZ6BfETbvam3T8FMvLK+eFj0E6AaRQTo= -cloud.google.com/go/managedidentities v1.5.0/go.mod h1:+dWcZ0JlUmpuxpIDfyP5pP5y0bLdRwOS4Lp7gMni/LA= -cloud.google.com/go/maps v0.7.0/go.mod h1:3GnvVl3cqeSvgMcpRlQidXsPYuDGQ8naBis7MVzpXsY= -cloud.google.com/go/mediatranslation v0.7.0/go.mod h1:LCnB/gZr90ONOIQLgSXagp8XUW1ODs2UmUMvcgMfI2I= -cloud.google.com/go/memcache v1.9.0/go.mod h1:8oEyzXCu+zo9RzlEaEjHl4KkgjlNDaXbCQeQWlzNFJM= -cloud.google.com/go/metastore v1.10.0/go.mod h1:fPEnH3g4JJAk+gMRnrAnoqyv2lpUCqJPWOodSaf45Eo= -cloud.google.com/go/monitoring v1.13.0/go.mod h1:k2yMBAB1H9JT/QETjNkgdCGD9bPF712XiLTVr+cBrpw= -cloud.google.com/go/networkconnectivity v1.11.0/go.mod h1:iWmDD4QF16VCDLXUqvyspJjIEtBR/4zq5hwnY2X3scM= -cloud.google.com/go/networkmanagement v1.6.0/go.mod h1:5pKPqyXjB/sgtvB5xqOemumoQNB7y95Q7S+4rjSOPYY= -cloud.google.com/go/networksecurity v0.8.0/go.mod h1:B78DkqsxFG5zRSVuwYFRZ9Xz8IcQ5iECsNrPn74hKHU= -cloud.google.com/go/notebooks v1.8.0/go.mod h1:Lq6dYKOYOWUCTvw5t2q1gp1lAp0zxAxRycayS0iJcqQ= -cloud.google.com/go/optimization v1.3.1/go.mod h1:IvUSefKiwd1a5p0RgHDbWCIbDFgKuEdB+fPPuP0IDLI= -cloud.google.com/go/orchestration v1.6.0/go.mod h1:M62Bevp7pkxStDfFfTuCOaXgaaqRAga1yKyoMtEoWPQ= -cloud.google.com/go/orgpolicy v1.10.0/go.mod h1:w1fo8b7rRqlXlIJbVhOMPrwVljyuW5mqssvBtU18ONc= -cloud.google.com/go/osconfig v1.11.0/go.mod h1:aDICxrur2ogRd9zY5ytBLV89KEgT2MKB2L/n6x1ooPw= -cloud.google.com/go/oslogin v1.9.0/go.mod h1:HNavntnH8nzrn8JCTT5fj18FuJLFJc4NaZJtBnQtKFs= -cloud.google.com/go/phishingprotection v0.7.0/go.mod h1:8qJI4QKHoda/sb/7/YmMQ2omRLSLYSu9bU0EKCNI+Lk= -cloud.google.com/go/policytroubleshooter v1.6.0/go.mod h1:zYqaPTsmfvpjm5ULxAyD/lINQxJ0DDsnWOP/GZ7xzBc= -cloud.google.com/go/privatecatalog v0.8.0/go.mod h1:nQ6pfaegeDAq/Q5lrfCQzQLhubPiZhSaNhIgfJlnIXs= -cloud.google.com/go/pubsub v1.30.0/go.mod h1:qWi1OPS0B+b5L+Sg6Gmc9zD1Y+HaM0MdUr7LsupY1P4= -cloud.google.com/go/pubsublite v1.7.0/go.mod h1:8hVMwRXfDfvGm3fahVbtDbiLePT3gpoiJYJY+vxWxVM= -cloud.google.com/go/recaptchaenterprise/v2 v2.7.0/go.mod h1:19wVj/fs5RtYtynAPJdDTb69oW0vNHYDBTbB4NvMD9c= -cloud.google.com/go/recommendationengine v0.7.0/go.mod h1:1reUcE3GIu6MeBz/h5xZJqNLuuVjNg1lmWMPyjatzac= -cloud.google.com/go/recommender v1.9.0/go.mod h1:PnSsnZY7q+VL1uax2JWkt/UegHssxjUVVCrX52CuEmQ= -cloud.google.com/go/redis v1.11.0/go.mod h1:/X6eicana+BWcUda5PpwZC48o37SiFVTFSs0fWAJ7uQ= -cloud.google.com/go/resourcemanager v1.7.0/go.mod h1:HlD3m6+bwhzj9XCouqmeiGuni95NTrExfhoSrkC/3EI= -cloud.google.com/go/resourcesettings v1.5.0/go.mod h1:+xJF7QSG6undsQDfsCJyqWXyBwUoJLhetkRMDRnIoXA= -cloud.google.com/go/retail v1.12.0/go.mod h1:UMkelN/0Z8XvKymXFbD4EhFJlYKRx1FGhQkVPU5kF14= -cloud.google.com/go/run v0.9.0/go.mod h1:Wwu+/vvg8Y+JUApMwEDfVfhetv30hCG4ZwDR/IXl2Qg= -cloud.google.com/go/scheduler v1.9.0/go.mod h1:yexg5t+KSmqu+njTIh3b7oYPheFtBWGcbVUYF1GGMIc= -cloud.google.com/go/secretmanager v1.10.0/go.mod h1:MfnrdvKMPNra9aZtQFvBcvRU54hbPD8/HayQdlUgJpU= -cloud.google.com/go/security v1.13.0/go.mod h1:Q1Nvxl1PAgmeW0y3HTt54JYIvUdtcpYKVfIB8AOMZ+0= -cloud.google.com/go/securitycenter v1.19.0/go.mod h1:LVLmSg8ZkkyaNy4u7HCIshAngSQ8EcIRREP3xBnyfag= -cloud.google.com/go/servicecontrol v1.11.1/go.mod h1:aSnNNlwEFBY+PWGQ2DoM0JJ/QUXqV5/ZD9DOLB7SnUk= -cloud.google.com/go/servicedirectory v1.9.0/go.mod h1:29je5JjiygNYlmsGz8k6o+OZ8vd4f//bQLtvzkPPT/s= -cloud.google.com/go/servicemanagement v1.8.0/go.mod h1:MSS2TDlIEQD/fzsSGfCdJItQveu9NXnUniTrq/L8LK4= -cloud.google.com/go/serviceusage v1.6.0/go.mod h1:R5wwQcbOWsyuOfbP9tGdAnCAc6B9DRwPG1xtWMDeuPA= -cloud.google.com/go/shell v1.6.0/go.mod h1:oHO8QACS90luWgxP3N9iZVuEiSF84zNyLytb+qE2f9A= -cloud.google.com/go/spanner v1.45.0/go.mod h1:FIws5LowYz8YAE1J8fOS7DJup8ff7xJeetWEo5REA2M= -cloud.google.com/go/speech v1.15.0/go.mod h1:y6oH7GhqCaZANH7+Oe0BhgIogsNInLlz542tg3VqeYI= -cloud.google.com/go/storagetransfer v1.8.0/go.mod h1:JpegsHHU1eXg7lMHkvf+KE5XDJ7EQu0GwNJbbVGanEw= -cloud.google.com/go/talent v1.5.0/go.mod h1:G+ODMj9bsasAEJkQSzO2uHQWXHHXUomArjWQQYkqK6c= -cloud.google.com/go/texttospeech v1.6.0/go.mod h1:YmwmFT8pj1aBblQOI3TfKmwibnsfvhIBzPXcW4EBovc= -cloud.google.com/go/tpu v1.5.0/go.mod h1:8zVo1rYDFuW2l4yZVY0R0fb/v44xLh3llq7RuV61fPM= -cloud.google.com/go/trace v1.9.0/go.mod h1:lOQqpE5IaWY0Ixg7/r2SjixMuc6lfTFeO4QGM4dQWOk= -cloud.google.com/go/translate v1.7.0/go.mod h1:lMGRudH1pu7I3n3PETiOB2507gf3HnfLV8qlkHZEyos= -cloud.google.com/go/video v1.15.0/go.mod h1:SkgaXwT+lIIAKqWAJfktHT/RbgjSuY6DobxEp0C5yTQ= -cloud.google.com/go/videointelligence v1.10.0/go.mod h1:LHZngX1liVtUhZvi2uNS0VQuOzNi2TkY1OakiuoUOjU= -cloud.google.com/go/vision/v2 v2.7.0/go.mod h1:H89VysHy21avemp6xcf9b9JvZHVehWbET0uT/bcuY/0= -cloud.google.com/go/vmmigration v1.6.0/go.mod h1:bopQ/g4z+8qXzichC7GW1w2MjbErL54rk3/C843CjfY= -cloud.google.com/go/vmwareengine v0.3.0/go.mod h1:wvoyMvNWdIzxMYSpH/R7y2h5h3WFkx6d+1TIsP39WGY= -cloud.google.com/go/vpcaccess v1.6.0/go.mod h1:wX2ILaNhe7TlVa4vC5xce1bCnqE3AeH27RV31lnmZes= -cloud.google.com/go/webrisk v1.8.0/go.mod h1:oJPDuamzHXgUc+b8SiHRcVInZQuybnvEW72PqTc7sSg= -cloud.google.com/go/websecurityscanner v1.5.0/go.mod h1:Y6xdCPy81yi0SQnDY1xdNTNpfY1oAgXUlcfN3B3eSng= -cloud.google.com/go/workflows v1.10.0/go.mod h1:fZ8LmRmZQWacon9UCX1r/g/DfAXx5VcPALq2CxzdePw= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20211026145609-4688e4c4e024 h1:aePO4E0x+Urj9V5NQHjqOpaNG4oMeHQq0l2ob05z5tI= -google.golang.org/genproto v0.0.0-20211026145609-4688e4c4e024/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= diff --git a/kubernetes_platform/go/kubernetesplatform/kubernetes_executor_config.pb.go b/kubernetes_platform/go/kubernetesplatform/kubernetes_executor_config.pb.go index 7a2e183de91..b691e41c9b9 100644 --- a/kubernetes_platform/go/kubernetesplatform/kubernetes_executor_config.pb.go +++ b/kubernetes_platform/go/kubernetesplatform/kubernetes_executor_config.pb.go @@ -14,8 +14,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.33.0 -// protoc v3.20.3 +// protoc-gen-go v1.36.6 +// protoc v6.31.1 // source: kubernetes_executor_config.proto package kubernetesplatform @@ -27,6 +27,7 @@ import ( structpb "google.golang.org/protobuf/types/known/structpb" reflect "reflect" sync "sync" + unsafe "unsafe" ) const ( @@ -37,16 +38,13 @@ const ( ) type KubernetesExecutorConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - SecretAsVolume []*SecretAsVolume `protobuf:"bytes,1,rep,name=secret_as_volume,json=secretAsVolume,proto3" json:"secret_as_volume,omitempty"` - SecretAsEnv []*SecretAsEnv `protobuf:"bytes,2,rep,name=secret_as_env,json=secretAsEnv,proto3" json:"secret_as_env,omitempty"` - PvcMount []*PvcMount `protobuf:"bytes,3,rep,name=pvc_mount,json=pvcMount,proto3" json:"pvc_mount,omitempty"` - NodeSelector *NodeSelector `protobuf:"bytes,4,opt,name=node_selector,json=nodeSelector,proto3" json:"node_selector,omitempty"` - PodMetadata *PodMetadata `protobuf:"bytes,5,opt,name=pod_metadata,json=podMetadata,proto3" json:"pod_metadata,omitempty"` - ImagePullSecret []*ImagePullSecret `protobuf:"bytes,6,rep,name=image_pull_secret,json=imagePullSecret,proto3" json:"image_pull_secret,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + SecretAsVolume []*SecretAsVolume `protobuf:"bytes,1,rep,name=secret_as_volume,json=secretAsVolume,proto3" json:"secret_as_volume,omitempty"` + SecretAsEnv []*SecretAsEnv `protobuf:"bytes,2,rep,name=secret_as_env,json=secretAsEnv,proto3" json:"secret_as_env,omitempty"` + PvcMount []*PvcMount `protobuf:"bytes,3,rep,name=pvc_mount,json=pvcMount,proto3" json:"pvc_mount,omitempty"` + NodeSelector *NodeSelector `protobuf:"bytes,4,opt,name=node_selector,json=nodeSelector,proto3" json:"node_selector,omitempty"` + PodMetadata *PodMetadata `protobuf:"bytes,5,opt,name=pod_metadata,json=podMetadata,proto3" json:"pod_metadata,omitempty"` + ImagePullSecret []*ImagePullSecret `protobuf:"bytes,6,rep,name=image_pull_secret,json=imagePullSecret,proto3" json:"image_pull_secret,omitempty"` // One of Always, Never, IfNotPresent. ImagePullPolicy string `protobuf:"bytes,7,opt,name=image_pull_policy,json=imagePullPolicy,proto3" json:"image_pull_policy,omitempty"` ConfigMapAsVolume []*ConfigMapAsVolume `protobuf:"bytes,8,rep,name=config_map_as_volume,json=configMapAsVolume,proto3" json:"config_map_as_volume,omitempty"` @@ -59,15 +57,15 @@ type KubernetesExecutorConfig struct { PodAffinity []*PodAffinityTerm `protobuf:"bytes,15,rep,name=pod_affinity,json=podAffinity,proto3" json:"pod_affinity,omitempty"` EnabledSharedMemory *EnabledSharedMemory `protobuf:"bytes,16,opt,name=enabled_shared_memory,json=enabledSharedMemory,proto3" json:"enabled_shared_memory,omitempty"` EmptyDirMounts []*EmptyDirMount `protobuf:"bytes,17,rep,name=empty_dir_mounts,json=emptyDirMounts,proto3" json:"empty_dir_mounts,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *KubernetesExecutorConfig) Reset() { *x = KubernetesExecutorConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *KubernetesExecutorConfig) String() string { @@ -78,7 +76,7 @@ func (*KubernetesExecutorConfig) ProtoMessage() {} func (x *KubernetesExecutorConfig) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -213,23 +211,20 @@ func (x *KubernetesExecutorConfig) GetEmptyDirMounts() []*EmptyDirMount { } type EnabledSharedMemory struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Name of the Shared Memory Volume. VolumeName string `protobuf:"bytes,1,opt,name=volume_name,json=volumeName,proto3" json:"volume_name,omitempty"` // Size of the Shared Memory. - Size string `protobuf:"bytes,2,opt,name=size,proto3" json:"size,omitempty"` + Size string `protobuf:"bytes,2,opt,name=size,proto3" json:"size,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *EnabledSharedMemory) Reset() { *x = EnabledSharedMemory{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *EnabledSharedMemory) String() string { @@ -240,7 +235,7 @@ func (*EnabledSharedMemory) ProtoMessage() {} func (x *EnabledSharedMemory) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -270,10 +265,7 @@ func (x *EnabledSharedMemory) GetSize() string { } type SecretAsVolume struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Deprecated, use secret_name_parameter instead. // // Deprecated: Marked as deprecated in kubernetes_executor_config.proto. @@ -284,15 +276,15 @@ type SecretAsVolume struct { Optional *bool `protobuf:"varint,3,opt,name=optional,proto3,oneof" json:"optional,omitempty"` // Name of the Secret. SecretNameParameter *pipelinespec.TaskInputsSpec_InputParameterSpec `protobuf:"bytes,4,opt,name=secret_name_parameter,json=secretNameParameter,proto3" json:"secret_name_parameter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *SecretAsVolume) Reset() { *x = SecretAsVolume{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *SecretAsVolume) String() string { @@ -303,7 +295,7 @@ func (*SecretAsVolume) ProtoMessage() {} func (x *SecretAsVolume) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -348,26 +340,25 @@ func (x *SecretAsVolume) GetSecretNameParameter() *pipelinespec.TaskInputsSpec_I } type SecretAsEnv struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Deprecated, use secret_name_parameter instead. // // Deprecated: Marked as deprecated in kubernetes_executor_config.proto. SecretName string `protobuf:"bytes,1,opt,name=secret_name,json=secretName,proto3" json:"secret_name,omitempty"` KeyToEnv []*SecretAsEnv_SecretKeyToEnvMap `protobuf:"bytes,2,rep,name=key_to_env,json=keyToEnv,proto3" json:"key_to_env,omitempty"` + // An optional boolean value indicating whether the Secret must be defined. + Optional *bool `protobuf:"varint,3,opt,name=optional,proto3,oneof" json:"optional,omitempty"` // Name of the Secret. SecretNameParameter *pipelinespec.TaskInputsSpec_InputParameterSpec `protobuf:"bytes,4,opt,name=secret_name_parameter,json=secretNameParameter,proto3" json:"secret_name_parameter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *SecretAsEnv) Reset() { *x = SecretAsEnv{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *SecretAsEnv) String() string { @@ -378,7 +369,7 @@ func (*SecretAsEnv) ProtoMessage() {} func (x *SecretAsEnv) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -408,6 +399,13 @@ func (x *SecretAsEnv) GetKeyToEnv() []*SecretAsEnv_SecretKeyToEnvMap { return nil } +func (x *SecretAsEnv) GetOptional() bool { + if x != nil && x.Optional != nil { + return *x.Optional + } + return false +} + func (x *SecretAsEnv) GetSecretNameParameter() *pipelinespec.TaskInputsSpec_InputParameterSpec { if x != nil { return x.SecretNameParameter @@ -416,13 +414,10 @@ func (x *SecretAsEnv) GetSecretNameParameter() *pipelinespec.TaskInputsSpec_Inpu } type PvcMount struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Deprecated, use pvc_name_parameter instead. // - // Types that are assignable to PvcReference: + // Types that are valid to be assigned to PvcReference: // // *PvcMount_TaskOutputParameter // *PvcMount_Constant @@ -432,15 +427,15 @@ type PvcMount struct { MountPath string `protobuf:"bytes,4,opt,name=mount_path,json=mountPath,proto3" json:"mount_path,omitempty"` // Name of the PVC. PvcNameParameter *pipelinespec.TaskInputsSpec_InputParameterSpec `protobuf:"bytes,5,opt,name=pvc_name_parameter,json=pvcNameParameter,proto3" json:"pvc_name_parameter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PvcMount) Reset() { *x = PvcMount{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PvcMount) String() string { @@ -451,7 +446,7 @@ func (*PvcMount) ProtoMessage() {} func (x *PvcMount) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -466,33 +461,39 @@ func (*PvcMount) Descriptor() ([]byte, []int) { return file_kubernetes_executor_config_proto_rawDescGZIP(), []int{4} } -func (m *PvcMount) GetPvcReference() isPvcMount_PvcReference { - if m != nil { - return m.PvcReference +func (x *PvcMount) GetPvcReference() isPvcMount_PvcReference { + if x != nil { + return x.PvcReference } return nil } // Deprecated: Marked as deprecated in kubernetes_executor_config.proto. func (x *PvcMount) GetTaskOutputParameter() *pipelinespec.TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec { - if x, ok := x.GetPvcReference().(*PvcMount_TaskOutputParameter); ok { - return x.TaskOutputParameter + if x != nil { + if x, ok := x.PvcReference.(*PvcMount_TaskOutputParameter); ok { + return x.TaskOutputParameter + } } return nil } // Deprecated: Marked as deprecated in kubernetes_executor_config.proto. func (x *PvcMount) GetConstant() string { - if x, ok := x.GetPvcReference().(*PvcMount_Constant); ok { - return x.Constant + if x != nil { + if x, ok := x.PvcReference.(*PvcMount_Constant); ok { + return x.Constant + } } return "" } // Deprecated: Marked as deprecated in kubernetes_executor_config.proto. func (x *PvcMount) GetComponentInputParameter() string { - if x, ok := x.GetPvcReference().(*PvcMount_ComponentInputParameter); ok { - return x.ComponentInputParameter + if x != nil { + if x, ok := x.PvcReference.(*PvcMount_ComponentInputParameter); ok { + return x.ComponentInputParameter + } } return "" } @@ -543,11 +544,8 @@ func (*PvcMount_Constant) isPvcMount_PvcReference() {} func (*PvcMount_ComponentInputParameter) isPvcMount_PvcReference() {} type CreatePvc struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Name: + state protoimpl.MessageState `protogen:"open.v1"` + // Types that are valid to be assigned to Name: // // *CreatePvc_PvcName // *CreatePvc_PvcNameSuffix @@ -564,16 +562,16 @@ type CreatePvc struct { // Corresponds to PersistentVolumeClaim.spec.volumeName field. VolumeName string `protobuf:"bytes,7,opt,name=volume_name,json=volumeName,proto3" json:"volume_name,omitempty"` // Corresponds to PersistentVolumeClaim.metadata.annotations field. - Annotations *structpb.Struct `protobuf:"bytes,8,opt,name=annotations,proto3" json:"annotations,omitempty"` + Annotations *structpb.Struct `protobuf:"bytes,8,opt,name=annotations,proto3" json:"annotations,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *CreatePvc) Reset() { *x = CreatePvc{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *CreatePvc) String() string { @@ -584,7 +582,7 @@ func (*CreatePvc) ProtoMessage() {} func (x *CreatePvc) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -599,23 +597,27 @@ func (*CreatePvc) Descriptor() ([]byte, []int) { return file_kubernetes_executor_config_proto_rawDescGZIP(), []int{5} } -func (m *CreatePvc) GetName() isCreatePvc_Name { - if m != nil { - return m.Name +func (x *CreatePvc) GetName() isCreatePvc_Name { + if x != nil { + return x.Name } return nil } func (x *CreatePvc) GetPvcName() string { - if x, ok := x.GetName().(*CreatePvc_PvcName); ok { - return x.PvcName + if x != nil { + if x, ok := x.Name.(*CreatePvc_PvcName); ok { + return x.PvcName + } } return "" } func (x *CreatePvc) GetPvcNameSuffix() string { - if x, ok := x.GetName().(*CreatePvc_PvcNameSuffix); ok { - return x.PvcNameSuffix + if x != nil { + if x, ok := x.Name.(*CreatePvc_PvcNameSuffix); ok { + return x.PvcNameSuffix + } } return "" } @@ -682,28 +684,25 @@ func (*CreatePvc_PvcName) isCreatePvc_Name() {} func (*CreatePvc_PvcNameSuffix) isCreatePvc_Name() {} type DeletePvc struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Identifier for the PVC. // Used like TaskInputsSpec.ml_pipelines.TaskInputsSpec.InputParameterSpec.kind. // - // Types that are assignable to PvcReference: + // Types that are valid to be assigned to PvcReference: // // *DeletePvc_TaskOutputParameter // *DeletePvc_Constant // *DeletePvc_ComponentInputParameter - PvcReference isDeletePvc_PvcReference `protobuf_oneof:"pvc_reference"` + PvcReference isDeletePvc_PvcReference `protobuf_oneof:"pvc_reference"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *DeletePvc) Reset() { *x = DeletePvc{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *DeletePvc) String() string { @@ -714,7 +713,7 @@ func (*DeletePvc) ProtoMessage() {} func (x *DeletePvc) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -729,30 +728,36 @@ func (*DeletePvc) Descriptor() ([]byte, []int) { return file_kubernetes_executor_config_proto_rawDescGZIP(), []int{6} } -func (m *DeletePvc) GetPvcReference() isDeletePvc_PvcReference { - if m != nil { - return m.PvcReference +func (x *DeletePvc) GetPvcReference() isDeletePvc_PvcReference { + if x != nil { + return x.PvcReference } return nil } func (x *DeletePvc) GetTaskOutputParameter() *pipelinespec.TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec { - if x, ok := x.GetPvcReference().(*DeletePvc_TaskOutputParameter); ok { - return x.TaskOutputParameter + if x != nil { + if x, ok := x.PvcReference.(*DeletePvc_TaskOutputParameter); ok { + return x.TaskOutputParameter + } } return nil } func (x *DeletePvc) GetConstant() string { - if x, ok := x.GetPvcReference().(*DeletePvc_Constant); ok { - return x.Constant + if x != nil { + if x, ok := x.PvcReference.(*DeletePvc_Constant); ok { + return x.Constant + } } return "" } func (x *DeletePvc) GetComponentInputParameter() string { - if x, ok := x.GetPvcReference().(*DeletePvc_ComponentInputParameter); ok { - return x.ComponentInputParameter + if x != nil { + if x, ok := x.PvcReference.(*DeletePvc_ComponentInputParameter); ok { + return x.ComponentInputParameter + } } return "" } @@ -783,26 +788,23 @@ func (*DeletePvc_Constant) isDeletePvc_PvcReference() {} func (*DeletePvc_ComponentInputParameter) isDeletePvc_PvcReference() {} type NodeSelector struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // map of label key to label value // corresponds to Pod.spec.nodeSelector field https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#scheduling - Labels map[string]string `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Labels map[string]string `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Provide a JSON struct of node selector // Takes precedence over labels. // Example: {"disk-type": "ssd", "region": "us-west"} NodeSelectorJson *pipelinespec.TaskInputsSpec_InputParameterSpec `protobuf:"bytes,2,opt,name=node_selector_json,json=nodeSelectorJson,proto3" json:"node_selector_json,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *NodeSelector) Reset() { *x = NodeSelector{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *NodeSelector) String() string { @@ -813,7 +815,7 @@ func (*NodeSelector) ProtoMessage() {} func (x *NodeSelector) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -843,23 +845,20 @@ func (x *NodeSelector) GetNodeSelectorJson() *pipelinespec.TaskInputsSpec_InputP } type PodMetadata struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // values of metadata spec such as labels and annotations for the pod object // corresponds to Pod.metadata field https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#Pod - Labels map[string]string `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - Annotations map[string]string `protobuf:"bytes,2,rep,name=annotations,proto3" json:"annotations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Labels map[string]string `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Annotations map[string]string `protobuf:"bytes,2,rep,name=annotations,proto3" json:"annotations,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PodMetadata) Reset() { *x = PodMetadata{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PodMetadata) String() string { @@ -870,7 +869,7 @@ func (*PodMetadata) ProtoMessage() {} func (x *PodMetadata) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -900,10 +899,7 @@ func (x *PodMetadata) GetAnnotations() map[string]string { } type ConfigMapAsVolume struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Deprecated, use config_name_parameter instead. // // Deprecated: Marked as deprecated in kubernetes_executor_config.proto. @@ -914,15 +910,15 @@ type ConfigMapAsVolume struct { Optional *bool `protobuf:"varint,3,opt,name=optional,proto3,oneof" json:"optional,omitempty"` // Name of the ConfigMap. ConfigMapNameParameter *pipelinespec.TaskInputsSpec_InputParameterSpec `protobuf:"bytes,4,opt,name=config_map_name_parameter,json=configMapNameParameter,proto3" json:"config_map_name_parameter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ConfigMapAsVolume) Reset() { *x = ConfigMapAsVolume{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ConfigMapAsVolume) String() string { @@ -933,7 +929,7 @@ func (*ConfigMapAsVolume) ProtoMessage() {} func (x *ConfigMapAsVolume) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -978,10 +974,7 @@ func (x *ConfigMapAsVolume) GetConfigMapNameParameter() *pipelinespec.TaskInputs } type ConfigMapAsEnv struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Deprecated, use config_name_parameter instead. // // Deprecated: Marked as deprecated in kubernetes_executor_config.proto. @@ -989,15 +982,17 @@ type ConfigMapAsEnv struct { KeyToEnv []*ConfigMapAsEnv_ConfigMapKeyToEnvMap `protobuf:"bytes,2,rep,name=key_to_env,json=keyToEnv,proto3" json:"key_to_env,omitempty"` // Name of the ConfigMap. ConfigMapNameParameter *pipelinespec.TaskInputsSpec_InputParameterSpec `protobuf:"bytes,3,opt,name=config_map_name_parameter,json=configMapNameParameter,proto3" json:"config_map_name_parameter,omitempty"` + // An optional boolean value indicating whether the ConfigMap must be defined. + Optional *bool `protobuf:"varint,4,opt,name=optional,proto3,oneof" json:"optional,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ConfigMapAsEnv) Reset() { *x = ConfigMapAsEnv{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ConfigMapAsEnv) String() string { @@ -1008,7 +1003,7 @@ func (*ConfigMapAsEnv) ProtoMessage() {} func (x *ConfigMapAsEnv) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1045,11 +1040,15 @@ func (x *ConfigMapAsEnv) GetConfigMapNameParameter() *pipelinespec.TaskInputsSpe return nil } -type GenericEphemeralVolume struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields +func (x *ConfigMapAsEnv) GetOptional() bool { + if x != nil && x.Optional != nil { + return *x.Optional + } + return false +} +type GenericEphemeralVolume struct { + state protoimpl.MessageState `protogen:"open.v1"` // more details in https://kubernetes.io/docs/concepts/storage/ephemeral-volumes/#generic-ephemeral-volumes // Name of the ephemeral volume. VolumeName string `protobuf:"bytes,1,opt,name=volume_name,json=volumeName,proto3" json:"volume_name,omitempty"` @@ -1066,16 +1065,16 @@ type GenericEphemeralVolume struct { StorageClassName string `protobuf:"bytes,6,opt,name=storage_class_name,json=storageClassName,proto3" json:"storage_class_name,omitempty"` // Corresponds to ephemeral.volumeClaimTemplate.metadata. // This is not exactly a pod metadata but the fields are the same - Metadata *PodMetadata `protobuf:"bytes,7,opt,name=metadata,proto3" json:"metadata,omitempty"` + Metadata *PodMetadata `protobuf:"bytes,7,opt,name=metadata,proto3" json:"metadata,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *GenericEphemeralVolume) Reset() { *x = GenericEphemeralVolume{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *GenericEphemeralVolume) String() string { @@ -1086,7 +1085,7 @@ func (*GenericEphemeralVolume) ProtoMessage() {} func (x *GenericEphemeralVolume) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1151,24 +1150,21 @@ func (x *GenericEphemeralVolume) GetMetadata() *PodMetadata { } type ImagePullSecret struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Name of the image pull secret. // // Deprecated: Marked as deprecated in kubernetes_executor_config.proto. SecretName string `protobuf:"bytes,1,opt,name=secret_name,json=secretName,proto3" json:"secret_name,omitempty"` SecretNameParameter *pipelinespec.TaskInputsSpec_InputParameterSpec `protobuf:"bytes,2,opt,name=secret_name_parameter,json=secretNameParameter,proto3" json:"secret_name_parameter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ImagePullSecret) Reset() { *x = ImagePullSecret{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ImagePullSecret) String() string { @@ -1179,7 +1175,7 @@ func (*ImagePullSecret) ProtoMessage() {} func (x *ImagePullSecret) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1210,23 +1206,20 @@ func (x *ImagePullSecret) GetSecretNameParameter() *pipelinespec.TaskInputsSpec_ } type FieldPathAsEnv struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Name of the environment variable Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Value of the field path string - FieldPath string `protobuf:"bytes,2,opt,name=field_path,json=fieldPath,proto3" json:"field_path,omitempty"` + FieldPath string `protobuf:"bytes,2,opt,name=field_path,json=fieldPath,proto3" json:"field_path,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *FieldPathAsEnv) Reset() { *x = FieldPathAsEnv{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *FieldPathAsEnv) String() string { @@ -1237,7 +1230,7 @@ func (*FieldPathAsEnv) ProtoMessage() {} func (x *FieldPathAsEnv) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1267,15 +1260,12 @@ func (x *FieldPathAsEnv) GetFieldPath() string { } type Toleration struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` - Operator string `protobuf:"bytes,2,opt,name=operator,proto3" json:"operator,omitempty"` - Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` - Effect string `protobuf:"bytes,4,opt,name=effect,proto3" json:"effect,omitempty"` - TolerationSeconds *int64 `protobuf:"varint,5,opt,name=toleration_seconds,json=tolerationSeconds,proto3,oneof" json:"toleration_seconds,omitempty"` + state protoimpl.MessageState `protogen:"open.v1"` + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Operator string `protobuf:"bytes,2,opt,name=operator,proto3" json:"operator,omitempty"` + Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` + Effect string `protobuf:"bytes,4,opt,name=effect,proto3" json:"effect,omitempty"` + TolerationSeconds *int64 `protobuf:"varint,5,opt,name=toleration_seconds,json=tolerationSeconds,proto3,oneof" json:"toleration_seconds,omitempty"` // Provide a json struct of the toleration // Takes precedence over key, operator, value, effect. // Example: {"key": "key1", "operator": "Equal", "value": "value1", "effect": "NoSchedule"} @@ -1283,15 +1273,15 @@ type Toleration struct { // Toleration structure: // https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.27/#toleration-v1-core TolerationJson *pipelinespec.TaskInputsSpec_InputParameterSpec `protobuf:"bytes,6,opt,name=toleration_json,json=tolerationJson,proto3" json:"toleration_json,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Toleration) Reset() { *x = Toleration{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *Toleration) String() string { @@ -1302,7 +1292,7 @@ func (*Toleration) ProtoMessage() {} func (x *Toleration) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1362,22 +1352,19 @@ func (x *Toleration) GetTolerationJson() *pipelinespec.TaskInputsSpec_InputParam // Matches https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.25/#labelselectorrequirement-v1-meta and // https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.25/#nodeselectorrequirement-v1-core type SelectorRequirement struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Operator string `protobuf:"bytes,2,opt,name=operator,proto3" json:"operator,omitempty"` + Values []string `protobuf:"bytes,3,rep,name=values,proto3" json:"values,omitempty"` unknownFields protoimpl.UnknownFields - - Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` - Operator string `protobuf:"bytes,2,opt,name=operator,proto3" json:"operator,omitempty"` - Values []string `protobuf:"bytes,3,rep,name=values,proto3" json:"values,omitempty"` + sizeCache protoimpl.SizeCache } func (x *SelectorRequirement) Reset() { *x = SelectorRequirement{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *SelectorRequirement) String() string { @@ -1388,7 +1375,7 @@ func (*SelectorRequirement) ProtoMessage() {} func (x *SelectorRequirement) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1425,23 +1412,27 @@ func (x *SelectorRequirement) GetValues() []string { } type NodeAffinityTerm struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` MatchExpressions []*SelectorRequirement `protobuf:"bytes,1,rep,name=match_expressions,json=matchExpressions,proto3" json:"match_expressions,omitempty"` MatchFields []*SelectorRequirement `protobuf:"bytes,2,rep,name=match_fields,json=matchFields,proto3" json:"match_fields,omitempty"` //Setting the weight makes it use PreferredDuringSchedulingIgnoredDuringExecution rules instead of RequiredDuringSchedulingIgnoredDuringExecution rules Weight *int32 `protobuf:"varint,3,opt,name=weight,proto3,oneof" json:"weight,omitempty"` + // Provide a JSON struct of node affinity. Takes precedence over PreferredDuringSchedulingIgnoredDuringExecution rules/RequiredDuringSchedulingIgnoredDuringExecution rules. + // The JSON must follow Kubernetes + // NodeAffinity structure: + // https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.27/#nodeaffinity-v1-core + // Example: + // { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ ... ] } } + NodeAffinityJson *pipelinespec.TaskInputsSpec_InputParameterSpec `protobuf:"bytes,4,opt,name=node_affinity_json,json=nodeAffinityJson,proto3" json:"node_affinity_json,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *NodeAffinityTerm) Reset() { *x = NodeAffinityTerm{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *NodeAffinityTerm) String() string { @@ -1452,7 +1443,7 @@ func (*NodeAffinityTerm) ProtoMessage() {} func (x *NodeAffinityTerm) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1488,30 +1479,34 @@ func (x *NodeAffinityTerm) GetWeight() int32 { return 0 } -type PodAffinityTerm struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields +func (x *NodeAffinityTerm) GetNodeAffinityJson() *pipelinespec.TaskInputsSpec_InputParameterSpec { + if x != nil { + return x.NodeAffinityJson + } + return nil +} +type PodAffinityTerm struct { + state protoimpl.MessageState `protogen:"open.v1"` MatchPodExpressions []*SelectorRequirement `protobuf:"bytes,1,rep,name=match_pod_expressions,json=matchPodExpressions,proto3" json:"match_pod_expressions,omitempty"` - MatchPodLabels map[string]string `protobuf:"bytes,2,rep,name=match_pod_labels,json=matchPodLabels,proto3" json:"match_pod_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + MatchPodLabels map[string]string `protobuf:"bytes,2,rep,name=match_pod_labels,json=matchPodLabels,proto3" json:"match_pod_labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` TopologyKey string `protobuf:"bytes,3,opt,name=topology_key,json=topologyKey,proto3" json:"topology_key,omitempty"` Namespaces []string `protobuf:"bytes,4,rep,name=namespaces,proto3" json:"namespaces,omitempty"` MatchNamespaceExpressions []*SelectorRequirement `protobuf:"bytes,5,rep,name=match_namespace_expressions,json=matchNamespaceExpressions,proto3" json:"match_namespace_expressions,omitempty"` - MatchNamespaceLabels map[string]string `protobuf:"bytes,6,rep,name=match_namespace_labels,json=matchNamespaceLabels,proto3" json:"match_namespace_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + MatchNamespaceLabels map[string]string `protobuf:"bytes,6,rep,name=match_namespace_labels,json=matchNamespaceLabels,proto3" json:"match_namespace_labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` //Setting a weight makes it use PreferredDuringSchedulingIgnoredDuringExecution rules instead of RequiredDuringSchedulingIgnoredDuringExecution rules Weight *int32 `protobuf:"varint,7,opt,name=weight,proto3,oneof" json:"weight,omitempty"` //Flag indicating if it is a podaffinity or podantiaffinity - Anti *bool `protobuf:"varint,8,opt,name=anti,proto3,oneof" json:"anti,omitempty"` + Anti *bool `protobuf:"varint,8,opt,name=anti,proto3,oneof" json:"anti,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *PodAffinityTerm) Reset() { *x = PodAffinityTerm{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *PodAffinityTerm) String() string { @@ -1522,7 +1517,7 @@ func (*PodAffinityTerm) ProtoMessage() {} func (x *PodAffinityTerm) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1594,24 +1589,21 @@ func (x *PodAffinityTerm) GetAnti() bool { } type EmptyDirMount struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.25/#emptydirvolumesource-v1-core - VolumeName string `protobuf:"bytes,1,opt,name=volume_name,json=volumeName,proto3" json:"volume_name,omitempty"` - MountPath string `protobuf:"bytes,2,opt,name=mount_path,json=mountPath,proto3" json:"mount_path,omitempty"` - Medium *string `protobuf:"bytes,3,opt,name=medium,proto3,oneof" json:"medium,omitempty"` - SizeLimit *string `protobuf:"bytes,4,opt,name=size_limit,json=sizeLimit,proto3,oneof" json:"size_limit,omitempty"` + VolumeName string `protobuf:"bytes,1,opt,name=volume_name,json=volumeName,proto3" json:"volume_name,omitempty"` + MountPath string `protobuf:"bytes,2,opt,name=mount_path,json=mountPath,proto3" json:"mount_path,omitempty"` + Medium *string `protobuf:"bytes,3,opt,name=medium,proto3,oneof" json:"medium,omitempty"` + SizeLimit *string `protobuf:"bytes,4,opt,name=size_limit,json=sizeLimit,proto3,oneof" json:"size_limit,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *EmptyDirMount) Reset() { *x = EmptyDirMount{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[18] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *EmptyDirMount) String() string { @@ -1622,7 +1614,7 @@ func (*EmptyDirMount) ProtoMessage() {} func (x *EmptyDirMount) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[18] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1666,23 +1658,20 @@ func (x *EmptyDirMount) GetSizeLimit() string { } type SecretAsEnv_SecretKeyToEnvMap struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Corresponds to a key of the Secret.data field. SecretKey string `protobuf:"bytes,1,opt,name=secret_key,json=secretKey,proto3" json:"secret_key,omitempty"` // Env var to which secret_key's data should be set. - EnvVar string `protobuf:"bytes,2,opt,name=env_var,json=envVar,proto3" json:"env_var,omitempty"` + EnvVar string `protobuf:"bytes,2,opt,name=env_var,json=envVar,proto3" json:"env_var,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *SecretAsEnv_SecretKeyToEnvMap) Reset() { *x = SecretAsEnv_SecretKeyToEnvMap{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[19] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *SecretAsEnv_SecretKeyToEnvMap) String() string { @@ -1693,7 +1682,7 @@ func (*SecretAsEnv_SecretKeyToEnvMap) ProtoMessage() {} func (x *SecretAsEnv_SecretKeyToEnvMap) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[19] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1723,23 +1712,20 @@ func (x *SecretAsEnv_SecretKeyToEnvMap) GetEnvVar() string { } type ConfigMapAsEnv_ConfigMapKeyToEnvMap struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - + state protoimpl.MessageState `protogen:"open.v1"` // Corresponds to a key of the ConfigMap. ConfigMapKey string `protobuf:"bytes,1,opt,name=config_map_key,json=configMapKey,proto3" json:"config_map_key,omitempty"` // Env var to which configmap_key's data should be set. - EnvVar string `protobuf:"bytes,2,opt,name=env_var,json=envVar,proto3" json:"env_var,omitempty"` + EnvVar string `protobuf:"bytes,2,opt,name=env_var,json=envVar,proto3" json:"env_var,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *ConfigMapAsEnv_ConfigMapKeyToEnvMap) Reset() { *x = ConfigMapAsEnv_ConfigMapKeyToEnvMap{} - if protoimpl.UnsafeEnabled { - mi := &file_kubernetes_executor_config_proto_msgTypes[23] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } + mi := &file_kubernetes_executor_config_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } func (x *ConfigMapAsEnv_ConfigMapKeyToEnvMap) String() string { @@ -1750,7 +1736,7 @@ func (*ConfigMapAsEnv_ConfigMapKeyToEnvMap) ProtoMessage() {} func (x *ConfigMapAsEnv_ConfigMapKeyToEnvMap) ProtoReflect() protoreflect.Message { mi := &file_kubernetes_executor_config_proto_msgTypes[23] - if protoimpl.UnsafeEnabled && x != nil { + if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) @@ -1781,405 +1767,189 @@ func (x *ConfigMapAsEnv_ConfigMapKeyToEnvMap) GetEnvVar() string { var File_kubernetes_executor_config_proto protoreflect.FileDescriptor -var file_kubernetes_executor_config_proto_rawDesc = []byte{ - 0x0a, 0x20, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f, 0x65, 0x78, 0x65, - 0x63, 0x75, 0x74, 0x6f, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x12, 0x0e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, - 0x65, 0x73, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x1a, 0x13, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xc7, 0x09, 0x0a, 0x18, 0x4b, 0x75, 0x62, 0x65, 0x72, 0x6e, - 0x65, 0x74, 0x65, 0x73, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x12, 0x48, 0x0a, 0x10, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x5f, 0x61, 0x73, 0x5f, - 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x6b, - 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x53, 0x65, - 0x63, 0x72, 0x65, 0x74, 0x41, 0x73, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x52, 0x0e, 0x73, 0x65, - 0x63, 0x72, 0x65, 0x74, 0x41, 0x73, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x0d, - 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x5f, 0x61, 0x73, 0x5f, 0x65, 0x6e, 0x76, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, - 0x65, 0x74, 0x65, 0x73, 0x2e, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x41, 0x73, 0x45, 0x6e, 0x76, - 0x52, 0x0b, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x41, 0x73, 0x45, 0x6e, 0x76, 0x12, 0x35, 0x0a, - 0x09, 0x70, 0x76, 0x63, 0x5f, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x18, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, - 0x73, 0x2e, 0x50, 0x76, 0x63, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x08, 0x70, 0x76, 0x63, 0x4d, - 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x41, 0x0a, 0x0d, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6b, 0x66, - 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x4e, 0x6f, 0x64, - 0x65, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x0c, 0x6e, 0x6f, 0x64, 0x65, 0x53, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x3e, 0x0a, 0x0c, 0x70, 0x6f, 0x64, 0x5f, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, - 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x50, - 0x6f, 0x64, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0b, 0x70, 0x6f, 0x64, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x4b, 0x0a, 0x11, 0x69, 0x6d, 0x61, 0x67, 0x65, - 0x5f, 0x70, 0x75, 0x6c, 0x6c, 0x5f, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x18, 0x06, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, - 0x74, 0x65, 0x73, 0x2e, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x53, 0x65, 0x63, - 0x72, 0x65, 0x74, 0x52, 0x0f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x53, 0x65, - 0x63, 0x72, 0x65, 0x74, 0x12, 0x2a, 0x0a, 0x11, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x70, 0x75, - 0x6c, 0x6c, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, - 0x12, 0x52, 0x0a, 0x14, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x6d, 0x61, 0x70, 0x5f, 0x61, - 0x73, 0x5f, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, - 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x41, 0x73, 0x56, 0x6f, 0x6c, 0x75, 0x6d, - 0x65, 0x52, 0x11, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x41, 0x73, 0x56, 0x6f, - 0x6c, 0x75, 0x6d, 0x65, 0x12, 0x49, 0x0a, 0x11, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x6d, - 0x61, 0x70, 0x5f, 0x61, 0x73, 0x5f, 0x65, 0x6e, 0x76, 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1e, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, - 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x41, 0x73, 0x45, 0x6e, 0x76, 0x52, - 0x0e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x41, 0x73, 0x45, 0x6e, 0x76, 0x12, - 0x36, 0x0a, 0x17, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x64, 0x65, 0x61, 0x64, 0x6c, 0x69, - 0x6e, 0x65, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, - 0x52, 0x15, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x61, 0x64, 0x6c, 0x69, 0x6e, 0x65, - 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x49, 0x0a, 0x11, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x5f, 0x70, 0x61, 0x74, 0x68, 0x5f, 0x61, 0x73, 0x5f, 0x65, 0x6e, 0x76, 0x18, 0x0b, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, - 0x74, 0x65, 0x73, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x50, 0x61, 0x74, 0x68, 0x41, 0x73, 0x45, - 0x6e, 0x76, 0x52, 0x0e, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x50, 0x61, 0x74, 0x68, 0x41, 0x73, 0x45, - 0x6e, 0x76, 0x12, 0x3c, 0x0a, 0x0b, 0x74, 0x6f, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, - 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x54, 0x6f, 0x6c, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x74, 0x6f, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x12, 0x60, 0x0a, 0x18, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x5f, 0x65, 0x70, 0x68, 0x65, - 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x5f, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x18, 0x0d, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, - 0x74, 0x65, 0x73, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x45, 0x70, 0x68, 0x65, 0x6d, - 0x65, 0x72, 0x61, 0x6c, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x52, 0x16, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x69, 0x63, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x56, 0x6f, 0x6c, 0x75, - 0x6d, 0x65, 0x12, 0x45, 0x0a, 0x0d, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x61, 0x66, 0x66, 0x69, 0x6e, - 0x69, 0x74, 0x79, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6b, 0x66, 0x70, 0x5f, - 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x41, - 0x66, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x54, 0x65, 0x72, 0x6d, 0x52, 0x0c, 0x6e, 0x6f, 0x64, - 0x65, 0x41, 0x66, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x12, 0x42, 0x0a, 0x0c, 0x70, 0x6f, 0x64, - 0x5f, 0x61, 0x66, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x18, 0x0f, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1f, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, - 0x2e, 0x50, 0x6f, 0x64, 0x41, 0x66, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x54, 0x65, 0x72, 0x6d, - 0x52, 0x0b, 0x70, 0x6f, 0x64, 0x41, 0x66, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x12, 0x57, 0x0a, - 0x15, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x5f, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x5f, - 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x6b, - 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x45, 0x6e, - 0x61, 0x62, 0x6c, 0x65, 0x64, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x4d, 0x65, 0x6d, 0x6f, 0x72, - 0x79, 0x52, 0x13, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, - 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x47, 0x0a, 0x10, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x5f, - 0x64, 0x69, 0x72, 0x5f, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x73, 0x18, 0x11, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1d, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, - 0x73, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x44, 0x69, 0x72, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x52, - 0x0e, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x44, 0x69, 0x72, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x73, 0x22, - 0x4a, 0x0a, 0x13, 0x45, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, - 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x1f, 0x0a, 0x0b, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, - 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x76, 0x6f, 0x6c, - 0x75, 0x6d, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x22, 0xe7, 0x01, 0x0a, 0x0e, - 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x41, 0x73, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x12, 0x23, - 0x0a, 0x0b, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4e, - 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x70, 0x61, 0x74, - 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x50, 0x61, - 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, - 0x88, 0x01, 0x01, 0x12, 0x63, 0x0a, 0x15, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x5f, 0x6e, 0x61, - 0x6d, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, - 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, - 0x70, 0x65, 0x63, 0x52, 0x13, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6f, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x22, 0xb1, 0x02, 0x0a, 0x0b, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, - 0x41, 0x73, 0x45, 0x6e, 0x76, 0x12, 0x23, 0x0a, 0x0b, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x5f, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, - 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x4b, 0x0a, 0x0a, 0x6b, 0x65, - 0x79, 0x5f, 0x74, 0x6f, 0x5f, 0x65, 0x6e, 0x76, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, - 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, - 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x41, 0x73, 0x45, 0x6e, 0x76, 0x2e, 0x53, 0x65, 0x63, 0x72, - 0x65, 0x74, 0x4b, 0x65, 0x79, 0x54, 0x6f, 0x45, 0x6e, 0x76, 0x4d, 0x61, 0x70, 0x52, 0x08, 0x6b, - 0x65, 0x79, 0x54, 0x6f, 0x45, 0x6e, 0x76, 0x12, 0x63, 0x0a, 0x15, 0x73, 0x65, 0x63, 0x72, 0x65, - 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, - 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x13, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4e, - 0x61, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x1a, 0x4b, 0x0a, 0x11, - 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4b, 0x65, 0x79, 0x54, 0x6f, 0x45, 0x6e, 0x76, 0x4d, 0x61, - 0x70, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4b, 0x65, 0x79, - 0x12, 0x17, 0x0a, 0x07, 0x65, 0x6e, 0x76, 0x5f, 0x76, 0x61, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x06, 0x65, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x22, 0x81, 0x03, 0x0a, 0x08, 0x50, 0x76, - 0x63, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x81, 0x01, 0x0a, 0x15, 0x74, 0x61, 0x73, 0x6b, 0x5f, - 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x47, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, - 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, - 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x42, - 0x02, 0x18, 0x01, 0x48, 0x00, 0x52, 0x13, 0x74, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x20, 0x0a, 0x08, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, - 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x12, 0x40, 0x0a, 0x19, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x42, - 0x02, 0x18, 0x01, 0x48, 0x00, 0x52, 0x17, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x1d, - 0x0a, 0x0a, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x50, 0x61, 0x74, 0x68, 0x12, 0x5d, 0x0a, - 0x12, 0x70, 0x76, 0x63, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, - 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x10, 0x70, 0x76, 0x63, 0x4e, - 0x61, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x42, 0x0f, 0x0a, 0x0d, - 0x70, 0x76, 0x63, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x22, 0xcf, 0x02, - 0x0a, 0x09, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x76, 0x63, 0x12, 0x1b, 0x0a, 0x08, 0x70, - 0x76, 0x63, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, - 0x07, 0x70, 0x76, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x0f, 0x70, 0x76, 0x63, 0x5f, - 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x75, 0x66, 0x66, 0x69, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x48, 0x00, 0x52, 0x0d, 0x70, 0x76, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x53, 0x75, 0x66, 0x66, - 0x69, 0x78, 0x12, 0x21, 0x0a, 0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x6d, 0x6f, 0x64, - 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, - 0x4d, 0x6f, 0x64, 0x65, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x12, 0x2c, 0x0a, - 0x12, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x5f, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x73, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x76, - 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0a, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x39, 0x0a, 0x0b, - 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x06, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, - 0xf7, 0x01, 0x0a, 0x09, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x76, 0x63, 0x12, 0x7d, 0x0a, - 0x15, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x47, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, - 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x54, 0x61, - 0x73, 0x6b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x53, 0x70, 0x65, 0x63, 0x48, 0x00, 0x52, 0x13, 0x74, 0x61, 0x73, 0x6b, 0x4f, 0x75, 0x74, - 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x08, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, - 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x12, 0x3c, 0x0a, 0x19, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, - 0x17, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x42, 0x0f, 0x0a, 0x0d, 0x70, 0x76, 0x63, 0x5f, - 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x22, 0xea, 0x01, 0x0a, 0x0c, 0x4e, 0x6f, - 0x64, 0x65, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x40, 0x0a, 0x06, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x6b, 0x66, 0x70, - 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x4e, 0x6f, 0x64, 0x65, - 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, - 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x5d, 0x0a, 0x12, - 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x5f, 0x6a, 0x73, - 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x10, 0x6e, 0x6f, 0x64, 0x65, 0x53, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, - 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x99, 0x02, 0x0a, 0x0b, 0x50, 0x6f, 0x64, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x3f, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, - 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x50, 0x6f, 0x64, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x4e, 0x0a, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x6b, - 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x50, 0x6f, - 0x64, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, - 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, - 0x38, 0x01, 0x1a, 0x3e, 0x0a, 0x10, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, - 0x38, 0x01, 0x22, 0xf8, 0x01, 0x0a, 0x11, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, - 0x41, 0x73, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x12, 0x2a, 0x0a, 0x0f, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x5f, 0x6d, 0x61, 0x70, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, - 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x70, 0x61, - 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x50, - 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, - 0x6c, 0x88, 0x01, 0x01, 0x12, 0x6a, 0x0a, 0x19, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x6d, - 0x61, 0x70, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, - 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x16, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x4d, 0x61, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x22, 0xd2, 0x02, - 0x0a, 0x0e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x41, 0x73, 0x45, 0x6e, 0x76, - 0x12, 0x2a, 0x0a, 0x0f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x6d, 0x61, 0x70, 0x5f, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0d, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x51, 0x0a, 0x0a, - 0x6b, 0x65, 0x79, 0x5f, 0x74, 0x6f, 0x5f, 0x65, 0x6e, 0x76, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x33, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, - 0x73, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x41, 0x73, 0x45, 0x6e, 0x76, - 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x4b, 0x65, 0x79, 0x54, 0x6f, 0x45, - 0x6e, 0x76, 0x4d, 0x61, 0x70, 0x52, 0x08, 0x6b, 0x65, 0x79, 0x54, 0x6f, 0x45, 0x6e, 0x76, 0x12, - 0x6a, 0x0a, 0x19, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x6d, 0x61, 0x70, 0x5f, 0x6e, 0x61, - 0x6d, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, - 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, - 0x70, 0x65, 0x63, 0x52, 0x16, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x4e, 0x61, - 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x1a, 0x55, 0x0a, 0x14, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x4b, 0x65, 0x79, 0x54, 0x6f, 0x45, 0x6e, 0x76, - 0x4d, 0x61, 0x70, 0x12, 0x24, 0x0a, 0x0e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x6d, 0x61, - 0x70, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x4d, 0x61, 0x70, 0x4b, 0x65, 0x79, 0x12, 0x17, 0x0a, 0x07, 0x65, 0x6e, 0x76, - 0x5f, 0x76, 0x61, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x65, 0x6e, 0x76, 0x56, - 0x61, 0x72, 0x22, 0xaa, 0x02, 0x0a, 0x16, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x45, 0x70, - 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x12, 0x1f, 0x0a, - 0x0b, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0a, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1d, - 0x0a, 0x0a, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x50, 0x61, 0x74, 0x68, 0x12, 0x21, 0x0a, - 0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x73, 0x18, 0x03, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x4d, 0x6f, 0x64, 0x65, 0x73, - 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x73, 0x69, 0x7a, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, - 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x53, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x43, 0x6c, 0x61, - 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, - 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x50, 0x6f, 0x64, 0x4d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, - 0x9b, 0x01, 0x0a, 0x0f, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x50, 0x75, 0x6c, 0x6c, 0x53, 0x65, 0x63, - 0x72, 0x65, 0x74, 0x12, 0x23, 0x0a, 0x0b, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x5f, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, 0x73, 0x65, - 0x63, 0x72, 0x65, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x63, 0x0a, 0x15, 0x73, 0x65, 0x63, 0x72, - 0x65, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, 0x70, 0x75, 0x74, - 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x13, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, - 0x4e, 0x61, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x22, 0x43, 0x0a, - 0x0e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x50, 0x61, 0x74, 0x68, 0x41, 0x73, 0x45, 0x6e, 0x76, 0x12, - 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x70, 0x61, 0x74, - 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x50, 0x61, - 0x74, 0x68, 0x22, 0x8d, 0x02, 0x0a, 0x0a, 0x54, 0x6f, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x1a, 0x0a, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, - 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x12, 0x32, 0x0a, - 0x12, 0x74, 0x6f, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x63, 0x6f, - 0x6e, 0x64, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x11, 0x74, 0x6f, 0x6c, - 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x88, 0x01, - 0x01, 0x12, 0x58, 0x0a, 0x0f, 0x74, 0x6f, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x52, 0x0e, 0x74, 0x6f, 0x6c, - 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x73, 0x6f, 0x6e, 0x42, 0x15, 0x0a, 0x13, 0x5f, - 0x74, 0x6f, 0x6c, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, - 0x64, 0x73, 0x22, 0x5b, 0x0a, 0x13, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, - 0x71, 0x75, 0x69, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x1a, 0x0a, 0x08, 0x6f, - 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6f, - 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, - 0xd4, 0x01, 0x0a, 0x10, 0x4e, 0x6f, 0x64, 0x65, 0x41, 0x66, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, - 0x54, 0x65, 0x72, 0x6d, 0x12, 0x50, 0x0a, 0x11, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x65, 0x78, - 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x23, 0x2e, 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, - 0x2e, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, - 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x10, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x45, 0x78, 0x70, 0x72, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x46, 0x0a, 0x0c, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x6b, - 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x53, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x52, 0x0b, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x12, 0x1b, - 0x0a, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, - 0x52, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, - 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0xb8, 0x05, 0x0a, 0x0f, 0x50, 0x6f, 0x64, 0x41, 0x66, - 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x54, 0x65, 0x72, 0x6d, 0x12, 0x57, 0x0a, 0x15, 0x6d, 0x61, - 0x74, 0x63, 0x68, 0x5f, 0x70, 0x6f, 0x64, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x6b, 0x66, 0x70, 0x5f, - 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x53, 0x65, 0x6c, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x13, - 0x6d, 0x61, 0x74, 0x63, 0x68, 0x50, 0x6f, 0x64, 0x45, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x73, 0x12, 0x5d, 0x0a, 0x10, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x70, 0x6f, 0x64, - 0x5f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33, 0x2e, - 0x6b, 0x66, 0x70, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x50, - 0x6f, 0x64, 0x41, 0x66, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x54, 0x65, 0x72, 0x6d, 0x2e, 0x4d, - 0x61, 0x74, 0x63, 0x68, 0x50, 0x6f, 0x64, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x52, 0x0e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x50, 0x6f, 0x64, 0x4c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x74, 0x6f, 0x70, 0x6f, 0x6c, 0x6f, 0x67, 0x79, 0x5f, 0x6b, - 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x74, 0x6f, 0x70, 0x6f, 0x6c, 0x6f, - 0x67, 0x79, 0x4b, 0x65, 0x79, 0x12, 0x1e, 0x0a, 0x0a, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x6e, 0x61, 0x6d, 0x65, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x63, 0x0a, 0x1b, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x6e, - 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x6b, 0x66, 0x70, - 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x53, 0x65, 0x6c, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, - 0x19, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x45, - 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x6f, 0x0a, 0x16, 0x6d, 0x61, - 0x74, 0x63, 0x68, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x6b, 0x66, 0x70, - 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x2e, 0x50, 0x6f, 0x64, 0x41, - 0x66, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x54, 0x65, 0x72, 0x6d, 0x2e, 0x4d, 0x61, 0x74, 0x63, - 0x68, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x14, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x4e, 0x61, 0x6d, 0x65, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x1b, 0x0a, 0x06, 0x77, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x06, 0x77, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x88, 0x01, 0x01, 0x12, 0x17, 0x0a, 0x04, 0x61, 0x6e, 0x74, 0x69, - 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, 0x48, 0x01, 0x52, 0x04, 0x61, 0x6e, 0x74, 0x69, 0x88, 0x01, - 0x01, 0x1a, 0x41, 0x0a, 0x13, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x50, 0x6f, 0x64, 0x4c, 0x61, 0x62, - 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x47, 0x0a, 0x19, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x4e, 0x61, 0x6d, - 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x09, 0x0a, - 0x07, 0x5f, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x61, 0x6e, 0x74, - 0x69, 0x22, 0xaa, 0x01, 0x0a, 0x0d, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x44, 0x69, 0x72, 0x4d, 0x6f, - 0x75, 0x6e, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x5f, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, - 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x70, 0x61, - 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x50, - 0x61, 0x74, 0x68, 0x12, 0x1b, 0x0a, 0x06, 0x6d, 0x65, 0x64, 0x69, 0x75, 0x6d, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x6d, 0x65, 0x64, 0x69, 0x75, 0x6d, 0x88, 0x01, 0x01, - 0x12, 0x22, 0x0a, 0x0a, 0x73, 0x69, 0x7a, 0x65, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x09, 0x73, 0x69, 0x7a, 0x65, 0x4c, 0x69, 0x6d, 0x69, - 0x74, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x6d, 0x65, 0x64, 0x69, 0x75, 0x6d, 0x42, - 0x0d, 0x0a, 0x0b, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x42, 0x49, - 0x5a, 0x47, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, - 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, - 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f, 0x70, 0x6c, 0x61, 0x74, 0x66, - 0x6f, 0x72, 0x6d, 0x2f, 0x67, 0x6f, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, - 0x73, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6d, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x33, -} +const file_kubernetes_executor_config_proto_rawDesc = "" + + "\n" + + " kubernetes_executor_config.proto\x12\x0ekfp_kubernetes\x1a\x1cgoogle/protobuf/struct.proto\x1a\x13pipeline_spec.proto\"\xc7\t\n" + + "\x18KubernetesExecutorConfig\x12H\n" + + "\x10secret_as_volume\x18\x01 \x03(\v2\x1e.kfp_kubernetes.SecretAsVolumeR\x0esecretAsVolume\x12?\n" + + "\rsecret_as_env\x18\x02 \x03(\v2\x1b.kfp_kubernetes.SecretAsEnvR\vsecretAsEnv\x125\n" + + "\tpvc_mount\x18\x03 \x03(\v2\x18.kfp_kubernetes.PvcMountR\bpvcMount\x12A\n" + + "\rnode_selector\x18\x04 \x01(\v2\x1c.kfp_kubernetes.NodeSelectorR\fnodeSelector\x12>\n" + + "\fpod_metadata\x18\x05 \x01(\v2\x1b.kfp_kubernetes.PodMetadataR\vpodMetadata\x12K\n" + + "\x11image_pull_secret\x18\x06 \x03(\v2\x1f.kfp_kubernetes.ImagePullSecretR\x0fimagePullSecret\x12*\n" + + "\x11image_pull_policy\x18\a \x01(\tR\x0fimagePullPolicy\x12R\n" + + "\x14config_map_as_volume\x18\b \x03(\v2!.kfp_kubernetes.ConfigMapAsVolumeR\x11configMapAsVolume\x12I\n" + + "\x11config_map_as_env\x18\t \x03(\v2\x1e.kfp_kubernetes.ConfigMapAsEnvR\x0econfigMapAsEnv\x126\n" + + "\x17active_deadline_seconds\x18\n" + + " \x01(\x03R\x15activeDeadlineSeconds\x12I\n" + + "\x11field_path_as_env\x18\v \x03(\v2\x1e.kfp_kubernetes.FieldPathAsEnvR\x0efieldPathAsEnv\x12<\n" + + "\vtolerations\x18\f \x03(\v2\x1a.kfp_kubernetes.TolerationR\vtolerations\x12`\n" + + "\x18generic_ephemeral_volume\x18\r \x03(\v2&.kfp_kubernetes.GenericEphemeralVolumeR\x16genericEphemeralVolume\x12E\n" + + "\rnode_affinity\x18\x0e \x03(\v2 .kfp_kubernetes.NodeAffinityTermR\fnodeAffinity\x12B\n" + + "\fpod_affinity\x18\x0f \x03(\v2\x1f.kfp_kubernetes.PodAffinityTermR\vpodAffinity\x12W\n" + + "\x15enabled_shared_memory\x18\x10 \x01(\v2#.kfp_kubernetes.EnabledSharedMemoryR\x13enabledSharedMemory\x12G\n" + + "\x10empty_dir_mounts\x18\x11 \x03(\v2\x1d.kfp_kubernetes.EmptyDirMountR\x0eemptyDirMounts\"J\n" + + "\x13EnabledSharedMemory\x12\x1f\n" + + "\vvolume_name\x18\x01 \x01(\tR\n" + + "volumeName\x12\x12\n" + + "\x04size\x18\x02 \x01(\tR\x04size\"\xe7\x01\n" + + "\x0eSecretAsVolume\x12#\n" + + "\vsecret_name\x18\x01 \x01(\tB\x02\x18\x01R\n" + + "secretName\x12\x1d\n" + + "\n" + + "mount_path\x18\x02 \x01(\tR\tmountPath\x12\x1f\n" + + "\boptional\x18\x03 \x01(\bH\x00R\boptional\x88\x01\x01\x12c\n" + + "\x15secret_name_parameter\x18\x04 \x01(\v2/.ml_pipelines.TaskInputsSpec.InputParameterSpecR\x13secretNameParameterB\v\n" + + "\t_optional\"\xdf\x02\n" + + "\vSecretAsEnv\x12#\n" + + "\vsecret_name\x18\x01 \x01(\tB\x02\x18\x01R\n" + + "secretName\x12K\n" + + "\n" + + "key_to_env\x18\x02 \x03(\v2-.kfp_kubernetes.SecretAsEnv.SecretKeyToEnvMapR\bkeyToEnv\x12\x1f\n" + + "\boptional\x18\x03 \x01(\bH\x00R\boptional\x88\x01\x01\x12c\n" + + "\x15secret_name_parameter\x18\x04 \x01(\v2/.ml_pipelines.TaskInputsSpec.InputParameterSpecR\x13secretNameParameter\x1aK\n" + + "\x11SecretKeyToEnvMap\x12\x1d\n" + + "\n" + + "secret_key\x18\x01 \x01(\tR\tsecretKey\x12\x17\n" + + "\aenv_var\x18\x02 \x01(\tR\x06envVarB\v\n" + + "\t_optional\"\x81\x03\n" + + "\bPvcMount\x12\x81\x01\n" + + "\x15task_output_parameter\x18\x01 \x01(\v2G.ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskOutputParameterSpecB\x02\x18\x01H\x00R\x13taskOutputParameter\x12 \n" + + "\bconstant\x18\x02 \x01(\tB\x02\x18\x01H\x00R\bconstant\x12@\n" + + "\x19component_input_parameter\x18\x03 \x01(\tB\x02\x18\x01H\x00R\x17componentInputParameter\x12\x1d\n" + + "\n" + + "mount_path\x18\x04 \x01(\tR\tmountPath\x12]\n" + + "\x12pvc_name_parameter\x18\x05 \x01(\v2/.ml_pipelines.TaskInputsSpec.InputParameterSpecR\x10pvcNameParameterB\x0f\n" + + "\rpvc_reference\"\xcf\x02\n" + + "\tCreatePvc\x12\x1b\n" + + "\bpvc_name\x18\x01 \x01(\tH\x00R\apvcName\x12(\n" + + "\x0fpvc_name_suffix\x18\x02 \x01(\tH\x00R\rpvcNameSuffix\x12!\n" + + "\faccess_modes\x18\x03 \x03(\tR\vaccessModes\x12\x12\n" + + "\x04size\x18\x04 \x01(\tR\x04size\x122\n" + + "\x15default_storage_class\x18\x05 \x01(\bR\x13defaultStorageClass\x12,\n" + + "\x12storage_class_name\x18\x06 \x01(\tR\x10storageClassName\x12\x1f\n" + + "\vvolume_name\x18\a \x01(\tR\n" + + "volumeName\x129\n" + + "\vannotations\x18\b \x01(\v2\x17.google.protobuf.StructR\vannotationsB\x06\n" + + "\x04name\"\xf7\x01\n" + + "\tDeletePvc\x12}\n" + + "\x15task_output_parameter\x18\x01 \x01(\v2G.ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskOutputParameterSpecH\x00R\x13taskOutputParameter\x12\x1c\n" + + "\bconstant\x18\x02 \x01(\tH\x00R\bconstant\x12<\n" + + "\x19component_input_parameter\x18\x03 \x01(\tH\x00R\x17componentInputParameterB\x0f\n" + + "\rpvc_reference\"\xea\x01\n" + + "\fNodeSelector\x12@\n" + + "\x06labels\x18\x01 \x03(\v2(.kfp_kubernetes.NodeSelector.LabelsEntryR\x06labels\x12]\n" + + "\x12node_selector_json\x18\x02 \x01(\v2/.ml_pipelines.TaskInputsSpec.InputParameterSpecR\x10nodeSelectorJson\x1a9\n" + + "\vLabelsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"\x99\x02\n" + + "\vPodMetadata\x12?\n" + + "\x06labels\x18\x01 \x03(\v2'.kfp_kubernetes.PodMetadata.LabelsEntryR\x06labels\x12N\n" + + "\vannotations\x18\x02 \x03(\v2,.kfp_kubernetes.PodMetadata.AnnotationsEntryR\vannotations\x1a9\n" + + "\vLabelsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\x1a>\n" + + "\x10AnnotationsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"\xf8\x01\n" + + "\x11ConfigMapAsVolume\x12*\n" + + "\x0fconfig_map_name\x18\x01 \x01(\tB\x02\x18\x01R\rconfigMapName\x12\x1d\n" + + "\n" + + "mount_path\x18\x02 \x01(\tR\tmountPath\x12\x1f\n" + + "\boptional\x18\x03 \x01(\bH\x00R\boptional\x88\x01\x01\x12j\n" + + "\x19config_map_name_parameter\x18\x04 \x01(\v2/.ml_pipelines.TaskInputsSpec.InputParameterSpecR\x16configMapNameParameterB\v\n" + + "\t_optional\"\x80\x03\n" + + "\x0eConfigMapAsEnv\x12*\n" + + "\x0fconfig_map_name\x18\x01 \x01(\tB\x02\x18\x01R\rconfigMapName\x12Q\n" + + "\n" + + "key_to_env\x18\x02 \x03(\v23.kfp_kubernetes.ConfigMapAsEnv.ConfigMapKeyToEnvMapR\bkeyToEnv\x12j\n" + + "\x19config_map_name_parameter\x18\x03 \x01(\v2/.ml_pipelines.TaskInputsSpec.InputParameterSpecR\x16configMapNameParameter\x12\x1f\n" + + "\boptional\x18\x04 \x01(\bH\x00R\boptional\x88\x01\x01\x1aU\n" + + "\x14ConfigMapKeyToEnvMap\x12$\n" + + "\x0econfig_map_key\x18\x01 \x01(\tR\fconfigMapKey\x12\x17\n" + + "\aenv_var\x18\x02 \x01(\tR\x06envVarB\v\n" + + "\t_optional\"\xaa\x02\n" + + "\x16GenericEphemeralVolume\x12\x1f\n" + + "\vvolume_name\x18\x01 \x01(\tR\n" + + "volumeName\x12\x1d\n" + + "\n" + + "mount_path\x18\x02 \x01(\tR\tmountPath\x12!\n" + + "\faccess_modes\x18\x03 \x03(\tR\vaccessModes\x12\x12\n" + + "\x04size\x18\x04 \x01(\tR\x04size\x122\n" + + "\x15default_storage_class\x18\x05 \x01(\bR\x13defaultStorageClass\x12,\n" + + "\x12storage_class_name\x18\x06 \x01(\tR\x10storageClassName\x127\n" + + "\bmetadata\x18\a \x01(\v2\x1b.kfp_kubernetes.PodMetadataR\bmetadata\"\x9b\x01\n" + + "\x0fImagePullSecret\x12#\n" + + "\vsecret_name\x18\x01 \x01(\tB\x02\x18\x01R\n" + + "secretName\x12c\n" + + "\x15secret_name_parameter\x18\x02 \x01(\v2/.ml_pipelines.TaskInputsSpec.InputParameterSpecR\x13secretNameParameter\"C\n" + + "\x0eFieldPathAsEnv\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\x1d\n" + + "\n" + + "field_path\x18\x02 \x01(\tR\tfieldPath\"\x8d\x02\n" + + "\n" + + "Toleration\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x1a\n" + + "\boperator\x18\x02 \x01(\tR\boperator\x12\x14\n" + + "\x05value\x18\x03 \x01(\tR\x05value\x12\x16\n" + + "\x06effect\x18\x04 \x01(\tR\x06effect\x122\n" + + "\x12toleration_seconds\x18\x05 \x01(\x03H\x00R\x11tolerationSeconds\x88\x01\x01\x12X\n" + + "\x0ftoleration_json\x18\x06 \x01(\v2/.ml_pipelines.TaskInputsSpec.InputParameterSpecR\x0etolerationJsonB\x15\n" + + "\x13_toleration_seconds\"[\n" + + "\x13SelectorRequirement\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x1a\n" + + "\boperator\x18\x02 \x01(\tR\boperator\x12\x16\n" + + "\x06values\x18\x03 \x03(\tR\x06values\"\xb3\x02\n" + + "\x10NodeAffinityTerm\x12P\n" + + "\x11match_expressions\x18\x01 \x03(\v2#.kfp_kubernetes.SelectorRequirementR\x10matchExpressions\x12F\n" + + "\fmatch_fields\x18\x02 \x03(\v2#.kfp_kubernetes.SelectorRequirementR\vmatchFields\x12\x1b\n" + + "\x06weight\x18\x03 \x01(\x05H\x00R\x06weight\x88\x01\x01\x12]\n" + + "\x12node_affinity_json\x18\x04 \x01(\v2/.ml_pipelines.TaskInputsSpec.InputParameterSpecR\x10nodeAffinityJsonB\t\n" + + "\a_weight\"\xb8\x05\n" + + "\x0fPodAffinityTerm\x12W\n" + + "\x15match_pod_expressions\x18\x01 \x03(\v2#.kfp_kubernetes.SelectorRequirementR\x13matchPodExpressions\x12]\n" + + "\x10match_pod_labels\x18\x02 \x03(\v23.kfp_kubernetes.PodAffinityTerm.MatchPodLabelsEntryR\x0ematchPodLabels\x12!\n" + + "\ftopology_key\x18\x03 \x01(\tR\vtopologyKey\x12\x1e\n" + + "\n" + + "namespaces\x18\x04 \x03(\tR\n" + + "namespaces\x12c\n" + + "\x1bmatch_namespace_expressions\x18\x05 \x03(\v2#.kfp_kubernetes.SelectorRequirementR\x19matchNamespaceExpressions\x12o\n" + + "\x16match_namespace_labels\x18\x06 \x03(\v29.kfp_kubernetes.PodAffinityTerm.MatchNamespaceLabelsEntryR\x14matchNamespaceLabels\x12\x1b\n" + + "\x06weight\x18\a \x01(\x05H\x00R\x06weight\x88\x01\x01\x12\x17\n" + + "\x04anti\x18\b \x01(\bH\x01R\x04anti\x88\x01\x01\x1aA\n" + + "\x13MatchPodLabelsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\x1aG\n" + + "\x19MatchNamespaceLabelsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01B\t\n" + + "\a_weightB\a\n" + + "\x05_anti\"\xaa\x01\n" + + "\rEmptyDirMount\x12\x1f\n" + + "\vvolume_name\x18\x01 \x01(\tR\n" + + "volumeName\x12\x1d\n" + + "\n" + + "mount_path\x18\x02 \x01(\tR\tmountPath\x12\x1b\n" + + "\x06medium\x18\x03 \x01(\tH\x00R\x06medium\x88\x01\x01\x12\"\n" + + "\n" + + "size_limit\x18\x04 \x01(\tH\x01R\tsizeLimit\x88\x01\x01B\t\n" + + "\a_mediumB\r\n" + + "\v_size_limitBIZGgithub.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatformb\x06proto3" var ( file_kubernetes_executor_config_proto_rawDescOnce sync.Once - file_kubernetes_executor_config_proto_rawDescData = file_kubernetes_executor_config_proto_rawDesc + file_kubernetes_executor_config_proto_rawDescData []byte ) func file_kubernetes_executor_config_proto_rawDescGZIP() []byte { file_kubernetes_executor_config_proto_rawDescOnce.Do(func() { - file_kubernetes_executor_config_proto_rawDescData = protoimpl.X.CompressGZIP(file_kubernetes_executor_config_proto_rawDescData) + file_kubernetes_executor_config_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_kubernetes_executor_config_proto_rawDesc), len(file_kubernetes_executor_config_proto_rawDesc))) }) return file_kubernetes_executor_config_proto_rawDescData } var file_kubernetes_executor_config_proto_msgTypes = make([]protoimpl.MessageInfo, 26) -var file_kubernetes_executor_config_proto_goTypes = []interface{}{ +var file_kubernetes_executor_config_proto_goTypes = []any{ (*KubernetesExecutorConfig)(nil), // 0: kfp_kubernetes.KubernetesExecutorConfig (*EnabledSharedMemory)(nil), // 1: kfp_kubernetes.EnabledSharedMemory (*SecretAsVolume)(nil), // 2: kfp_kubernetes.SecretAsVolume @@ -2245,15 +2015,16 @@ var file_kubernetes_executor_config_proto_depIdxs = []int32{ 26, // 31: kfp_kubernetes.Toleration.toleration_json:type_name -> ml_pipelines.TaskInputsSpec.InputParameterSpec 15, // 32: kfp_kubernetes.NodeAffinityTerm.match_expressions:type_name -> kfp_kubernetes.SelectorRequirement 15, // 33: kfp_kubernetes.NodeAffinityTerm.match_fields:type_name -> kfp_kubernetes.SelectorRequirement - 15, // 34: kfp_kubernetes.PodAffinityTerm.match_pod_expressions:type_name -> kfp_kubernetes.SelectorRequirement - 24, // 35: kfp_kubernetes.PodAffinityTerm.match_pod_labels:type_name -> kfp_kubernetes.PodAffinityTerm.MatchPodLabelsEntry - 15, // 36: kfp_kubernetes.PodAffinityTerm.match_namespace_expressions:type_name -> kfp_kubernetes.SelectorRequirement - 25, // 37: kfp_kubernetes.PodAffinityTerm.match_namespace_labels:type_name -> kfp_kubernetes.PodAffinityTerm.MatchNamespaceLabelsEntry - 38, // [38:38] is the sub-list for method output_type - 38, // [38:38] is the sub-list for method input_type - 38, // [38:38] is the sub-list for extension type_name - 38, // [38:38] is the sub-list for extension extendee - 0, // [0:38] is the sub-list for field type_name + 26, // 34: kfp_kubernetes.NodeAffinityTerm.node_affinity_json:type_name -> ml_pipelines.TaskInputsSpec.InputParameterSpec + 15, // 35: kfp_kubernetes.PodAffinityTerm.match_pod_expressions:type_name -> kfp_kubernetes.SelectorRequirement + 24, // 36: kfp_kubernetes.PodAffinityTerm.match_pod_labels:type_name -> kfp_kubernetes.PodAffinityTerm.MatchPodLabelsEntry + 15, // 37: kfp_kubernetes.PodAffinityTerm.match_namespace_expressions:type_name -> kfp_kubernetes.SelectorRequirement + 25, // 38: kfp_kubernetes.PodAffinityTerm.match_namespace_labels:type_name -> kfp_kubernetes.PodAffinityTerm.MatchNamespaceLabelsEntry + 39, // [39:39] is the sub-list for method output_type + 39, // [39:39] is the sub-list for method input_type + 39, // [39:39] is the sub-list for extension type_name + 39, // [39:39] is the sub-list for extension extendee + 0, // [0:39] is the sub-list for field type_name } func init() { file_kubernetes_executor_config_proto_init() } @@ -2261,285 +2032,33 @@ func file_kubernetes_executor_config_proto_init() { if File_kubernetes_executor_config_proto != nil { return } - if !protoimpl.UnsafeEnabled { - file_kubernetes_executor_config_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*KubernetesExecutorConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EnabledSharedMemory); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SecretAsVolume); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SecretAsEnv); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PvcMount); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreatePvc); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeletePvc); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NodeSelector); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PodMetadata); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ConfigMapAsVolume); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ConfigMapAsEnv); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GenericEphemeralVolume); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ImagePullSecret); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FieldPathAsEnv); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Toleration); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SelectorRequirement); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NodeAffinityTerm); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PodAffinityTerm); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EmptyDirMount); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SecretAsEnv_SecretKeyToEnvMap); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_kubernetes_executor_config_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ConfigMapAsEnv_ConfigMapKeyToEnvMap); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_kubernetes_executor_config_proto_msgTypes[2].OneofWrappers = []interface{}{} - file_kubernetes_executor_config_proto_msgTypes[4].OneofWrappers = []interface{}{ + file_kubernetes_executor_config_proto_msgTypes[2].OneofWrappers = []any{} + file_kubernetes_executor_config_proto_msgTypes[3].OneofWrappers = []any{} + file_kubernetes_executor_config_proto_msgTypes[4].OneofWrappers = []any{ (*PvcMount_TaskOutputParameter)(nil), (*PvcMount_Constant)(nil), (*PvcMount_ComponentInputParameter)(nil), } - file_kubernetes_executor_config_proto_msgTypes[5].OneofWrappers = []interface{}{ + file_kubernetes_executor_config_proto_msgTypes[5].OneofWrappers = []any{ (*CreatePvc_PvcName)(nil), (*CreatePvc_PvcNameSuffix)(nil), } - file_kubernetes_executor_config_proto_msgTypes[6].OneofWrappers = []interface{}{ + file_kubernetes_executor_config_proto_msgTypes[6].OneofWrappers = []any{ (*DeletePvc_TaskOutputParameter)(nil), (*DeletePvc_Constant)(nil), (*DeletePvc_ComponentInputParameter)(nil), } - file_kubernetes_executor_config_proto_msgTypes[9].OneofWrappers = []interface{}{} - file_kubernetes_executor_config_proto_msgTypes[14].OneofWrappers = []interface{}{} - file_kubernetes_executor_config_proto_msgTypes[16].OneofWrappers = []interface{}{} - file_kubernetes_executor_config_proto_msgTypes[17].OneofWrappers = []interface{}{} - file_kubernetes_executor_config_proto_msgTypes[18].OneofWrappers = []interface{}{} + file_kubernetes_executor_config_proto_msgTypes[9].OneofWrappers = []any{} + file_kubernetes_executor_config_proto_msgTypes[10].OneofWrappers = []any{} + file_kubernetes_executor_config_proto_msgTypes[14].OneofWrappers = []any{} + file_kubernetes_executor_config_proto_msgTypes[16].OneofWrappers = []any{} + file_kubernetes_executor_config_proto_msgTypes[17].OneofWrappers = []any{} + file_kubernetes_executor_config_proto_msgTypes[18].OneofWrappers = []any{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_kubernetes_executor_config_proto_rawDesc, + RawDescriptor: unsafe.Slice(unsafe.StringData(file_kubernetes_executor_config_proto_rawDesc), len(file_kubernetes_executor_config_proto_rawDesc)), NumEnums: 0, NumMessages: 26, NumExtensions: 0, @@ -2550,7 +2069,6 @@ func file_kubernetes_executor_config_proto_init() { MessageInfos: file_kubernetes_executor_config_proto_msgTypes, }.Build() File_kubernetes_executor_config_proto = out.File - file_kubernetes_executor_config_proto_rawDesc = nil file_kubernetes_executor_config_proto_goTypes = nil file_kubernetes_executor_config_proto_depIdxs = nil } diff --git a/kubernetes_platform/proto/kubernetes_executor_config.proto b/kubernetes_platform/proto/kubernetes_executor_config.proto index c376486e0a8..43361395cc3 100644 --- a/kubernetes_platform/proto/kubernetes_executor_config.proto +++ b/kubernetes_platform/proto/kubernetes_executor_config.proto @@ -73,6 +73,8 @@ message SecretAsEnv { } repeated SecretKeyToEnvMap key_to_env = 2; + // An optional boolean value indicating whether the Secret must be defined. + optional bool optional = 3; // Name of the Secret. ml_pipelines.TaskInputsSpec.InputParameterSpec secret_name_parameter = 4; @@ -176,6 +178,8 @@ message ConfigMapAsEnv { // Name of the ConfigMap. ml_pipelines.TaskInputsSpec.InputParameterSpec config_map_name_parameter = 3; + // An optional boolean value indicating whether the ConfigMap must be defined. + optional bool optional = 4; } message GenericEphemeralVolume { @@ -241,7 +245,14 @@ message NodeAffinityTerm { repeated SelectorRequirement match_expressions = 1; repeated SelectorRequirement match_fields = 2; //Setting the weight makes it use PreferredDuringSchedulingIgnoredDuringExecution rules instead of RequiredDuringSchedulingIgnoredDuringExecution rules - optional int32 weight = 3; + optional int32 weight = 3; + // Provide a JSON struct of node affinity. Takes precedence over PreferredDuringSchedulingIgnoredDuringExecution rules/RequiredDuringSchedulingIgnoredDuringExecution rules. + // The JSON must follow Kubernetes + // NodeAffinity structure: + // https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.27/#nodeaffinity-v1-core + // Example: + // { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ ... ] } } + ml_pipelines.TaskInputsSpec.InputParameterSpec node_affinity_json = 4; } message PodAffinityTerm { diff --git a/kubernetes_platform/python/create_release_branch.sh b/kubernetes_platform/python/create_release_branch.sh index 710050b0564..b8a0b5b4e09 100755 --- a/kubernetes_platform/python/create_release_branch.sh +++ b/kubernetes_platform/python/create_release_branch.sh @@ -51,7 +51,6 @@ else git add $PKG_ROOT/docs/.readthedocs.yml git add $REPO_ROOT/.readthedocs.yml git add $REPO_ROOT/kubernetes_platform/.gitignore - git add $REPO_ROOT/*_pb2.py echo "Next steps:" echo "1. Inspect and commit the modified files." diff --git a/kubernetes_platform/python/docs/conf.py b/kubernetes_platform/python/docs/conf.py index e55255a3236..2d964b6a7df 100644 --- a/kubernetes_platform/python/docs/conf.py +++ b/kubernetes_platform/python/docs/conf.py @@ -138,12 +138,19 @@ def decorator(func): 'version_dropdown': True, 'version_info': [ + { + 'version': + 'https://kfp-kubernetes.readthedocs.io/en/kfp-kubernetes-2.14.0/', + 'title': + '2.14.0', + 'aliases': ['stable'], + }, { 'version': 'https://kfp-kubernetes.readthedocs.io/en/kfp-kubernetes-1.5.0/', 'title': '1.5.0', - 'aliases': ['stable'], + 'aliases': [], }, { 'version': diff --git a/kubernetes_platform/python/kfp/kubernetes/__init__.py b/kubernetes_platform/python/kfp/kubernetes/__init__.py index 5bb7a4575e4..5a924bd3e60 100644 --- a/kubernetes_platform/python/kfp/kubernetes/__init__.py +++ b/kubernetes_platform/python/kfp/kubernetes/__init__.py @@ -12,12 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '1.5.0' +__version__ = '2.14.0' __all__ = [ 'add_ephemeral_volume', 'add_node_selector', 'add_node_selector_json', + 'add_node_affinity', + 'add_node_affinity_json', 'add_pod_annotation', 'add_pod_label', 'add_toleration', @@ -44,6 +46,8 @@ from kfp.kubernetes.image import set_image_pull_secrets from kfp.kubernetes.node_selector import add_node_selector from kfp.kubernetes.node_selector import add_node_selector_json +from kfp.kubernetes.node_affinity import add_node_affinity +from kfp.kubernetes.node_affinity import add_node_affinity_json from kfp.kubernetes.pod_metadata import add_pod_annotation from kfp.kubernetes.pod_metadata import add_pod_label from kfp.kubernetes.secret import use_secret_as_env diff --git a/kubernetes_platform/python/kfp/kubernetes/common.py b/kubernetes_platform/python/kfp/kubernetes/common.py index 1288366c339..99fe1890418 100644 --- a/kubernetes_platform/python/kfp/kubernetes/common.py +++ b/kubernetes_platform/python/kfp/kubernetes/common.py @@ -21,6 +21,19 @@ from kfp.pipeline_spec import pipeline_spec_pb2 from kfp.kubernetes import kubernetes_executor_config_pb2 as pb +def camel_to_python_case(name: str) -> str: + import re + s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) + return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() + +def deserialize_dict_to_k8s_model_keys(obj): + if isinstance(obj, dict): + return {camel_to_python_case(k): deserialize_dict_to_k8s_model_keys(v) for k, v in obj.items()} + elif isinstance(obj, list): + return [deserialize_dict_to_k8s_model_keys(i) for i in obj] + else: + return obj + def get_existing_kubernetes_config_as_message( task: 'PipelineTask') -> pb.KubernetesExecutorConfig: cur_k8_config_dict = task.platform_config.get('kubernetes', {}) diff --git a/kubernetes_platform/python/kfp/kubernetes/config_map.py b/kubernetes_platform/python/kfp/kubernetes/config_map.py index dfa024af4cb..b4a18c43ee3 100644 --- a/kubernetes_platform/python/kfp/kubernetes/config_map.py +++ b/kubernetes_platform/python/kfp/kubernetes/config_map.py @@ -24,6 +24,7 @@ def use_config_map_as_env( task: PipelineTask, config_map_name: Union[pipeline_channel.PipelineParameterChannel, str], config_map_key_to_env: Dict[str, str], + optional: bool = False, ) -> PipelineTask: """Use a Kubernetes ConfigMap as an environment variable as described by the `Kubernetes documentation https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#define-container-environment-variables-using-configmap-data` _. @@ -32,6 +33,7 @@ def use_config_map_as_env( task: Pipeline task. config_map_name: Name of the ConfigMap. config_map_key_to_env: Dictionary of ConfigMap key to environment variable name. For example, ``{'foo': 'FOO'}`` sets the value of the ConfigMap's foo field to the environment variable ``FOO``. + optional: Optional field specifying whether the ConfigMap must be defined. Returns: Task object with updated ConfigMap configuration. @@ -45,7 +47,7 @@ def use_config_map_as_env( env_var=env_var, ) for config_map_key, env_var in config_map_key_to_env.items() ] - config_map_as_env = pb.ConfigMapAsEnv(key_to_env=key_to_env) + config_map_as_env = pb.ConfigMapAsEnv(key_to_env=key_to_env, optional=optional) config_map_name_parameter = common.parse_k8s_parameter_input(config_map_name, task) config_map_as_env.config_map_name_parameter.CopyFrom(config_map_name_parameter) diff --git a/kubernetes_platform/python/kfp/kubernetes/kubernetes_executor_config_pb2.py b/kubernetes_platform/python/kfp/kubernetes/kubernetes_executor_config_pb2.py index f77fc8d8c7b..5a692710f9a 100644 --- a/kubernetes_platform/python/kfp/kubernetes/kubernetes_executor_config_pb2.py +++ b/kubernetes_platform/python/kfp/kubernetes/kubernetes_executor_config_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: kubernetes_executor_config.proto +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'kubernetes_executor_config.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -14,90 +25,90 @@ from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 from kfp.pipeline_spec import pipeline_spec_pb2 as pipeline__spec__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n kubernetes_executor_config.proto\x12\x0ekfp_kubernetes\x1a\x1cgoogle/protobuf/struct.proto\x1a\x13pipeline_spec.proto\"\xb4\x07\n\x18KubernetesExecutorConfig\x12\x38\n\x10secret_as_volume\x18\x01 \x03(\x0b\x32\x1e.kfp_kubernetes.SecretAsVolume\x12\x32\n\rsecret_as_env\x18\x02 \x03(\x0b\x32\x1b.kfp_kubernetes.SecretAsEnv\x12+\n\tpvc_mount\x18\x03 \x03(\x0b\x32\x18.kfp_kubernetes.PvcMount\x12\x33\n\rnode_selector\x18\x04 \x01(\x0b\x32\x1c.kfp_kubernetes.NodeSelector\x12\x31\n\x0cpod_metadata\x18\x05 \x01(\x0b\x32\x1b.kfp_kubernetes.PodMetadata\x12:\n\x11image_pull_secret\x18\x06 \x03(\x0b\x32\x1f.kfp_kubernetes.ImagePullSecret\x12\x19\n\x11image_pull_policy\x18\x07 \x01(\t\x12?\n\x14\x63onfig_map_as_volume\x18\x08 \x03(\x0b\x32!.kfp_kubernetes.ConfigMapAsVolume\x12\x39\n\x11\x63onfig_map_as_env\x18\t \x03(\x0b\x32\x1e.kfp_kubernetes.ConfigMapAsEnv\x12\x1f\n\x17\x61\x63tive_deadline_seconds\x18\n \x01(\x03\x12\x39\n\x11\x66ield_path_as_env\x18\x0b \x03(\x0b\x32\x1e.kfp_kubernetes.FieldPathAsEnv\x12/\n\x0btolerations\x18\x0c \x03(\x0b\x32\x1a.kfp_kubernetes.Toleration\x12H\n\x18generic_ephemeral_volume\x18\r \x03(\x0b\x32&.kfp_kubernetes.GenericEphemeralVolume\x12\x37\n\rnode_affinity\x18\x0e \x03(\x0b\x32 .kfp_kubernetes.NodeAffinityTerm\x12\x35\n\x0cpod_affinity\x18\x0f \x03(\x0b\x32\x1f.kfp_kubernetes.PodAffinityTerm\x12\x42\n\x15\x65nabled_shared_memory\x18\x10 \x01(\x0b\x32#.kfp_kubernetes.EnabledSharedMemory\x12\x37\n\x10\x65mpty_dir_mounts\x18\x11 \x03(\x0b\x32\x1d.kfp_kubernetes.EmptyDirMount\"8\n\x13\x45nabledSharedMemory\x12\x13\n\x0bvolume_name\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\t\"\xb1\x01\n\x0eSecretAsVolume\x12\x17\n\x0bsecret_name\x18\x01 \x01(\tB\x02\x18\x01\x12\x12\n\nmount_path\x18\x02 \x01(\t\x12\x15\n\x08optional\x18\x03 \x01(\x08H\x00\x88\x01\x01\x12N\n\x15secret_name_parameter\x18\x04 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpecB\x0b\n\t_optional\"\xf3\x01\n\x0bSecretAsEnv\x12\x17\n\x0bsecret_name\x18\x01 \x01(\tB\x02\x18\x01\x12\x41\n\nkey_to_env\x18\x02 \x03(\x0b\x32-.kfp_kubernetes.SecretAsEnv.SecretKeyToEnvMap\x12N\n\x15secret_name_parameter\x18\x04 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpec\x1a\x38\n\x11SecretKeyToEnvMap\x12\x12\n\nsecret_key\x18\x01 \x01(\t\x12\x0f\n\x07\x65nv_var\x18\x02 \x01(\t\"\xab\x02\n\x08PvcMount\x12l\n\x15task_output_parameter\x18\x01 \x01(\x0b\x32G.ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskOutputParameterSpecB\x02\x18\x01H\x00\x12\x16\n\x08\x63onstant\x18\x02 \x01(\tB\x02\x18\x01H\x00\x12\'\n\x19\x63omponent_input_parameter\x18\x03 \x01(\tB\x02\x18\x01H\x00\x12\x12\n\nmount_path\x18\x04 \x01(\t\x12K\n\x12pvc_name_parameter\x18\x05 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpecB\x0f\n\rpvc_reference\"\xe4\x01\n\tCreatePvc\x12\x12\n\x08pvc_name\x18\x01 \x01(\tH\x00\x12\x19\n\x0fpvc_name_suffix\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x61\x63\x63\x65ss_modes\x18\x03 \x03(\t\x12\x0c\n\x04size\x18\x04 \x01(\t\x12\x1d\n\x15\x64\x65\x66\x61ult_storage_class\x18\x05 \x01(\x08\x12\x1a\n\x12storage_class_name\x18\x06 \x01(\t\x12\x13\n\x0bvolume_name\x18\x07 \x01(\t\x12,\n\x0b\x61nnotations\x18\x08 \x01(\x0b\x32\x17.google.protobuf.StructB\x06\n\x04name\"\xbf\x01\n\tDeletePvc\x12h\n\x15task_output_parameter\x18\x01 \x01(\x0b\x32G.ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskOutputParameterSpecH\x00\x12\x12\n\x08\x63onstant\x18\x02 \x01(\tH\x00\x12#\n\x19\x63omponent_input_parameter\x18\x03 \x01(\tH\x00\x42\x0f\n\rpvc_reference\"\xc4\x01\n\x0cNodeSelector\x12\x38\n\x06labels\x18\x01 \x03(\x0b\x32(.kfp_kubernetes.NodeSelector.LabelsEntry\x12K\n\x12node_selector_json\x18\x02 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpec\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xec\x01\n\x0bPodMetadata\x12\x37\n\x06labels\x18\x01 \x03(\x0b\x32\'.kfp_kubernetes.PodMetadata.LabelsEntry\x12\x41\n\x0b\x61nnotations\x18\x02 \x03(\x0b\x32,.kfp_kubernetes.PodMetadata.AnnotationsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x32\n\x10\x41nnotationsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x01\n\x11\x43onfigMapAsVolume\x12\x1b\n\x0f\x63onfig_map_name\x18\x01 \x01(\tB\x02\x18\x01\x12\x12\n\nmount_path\x18\x02 \x01(\t\x12\x15\n\x08optional\x18\x03 \x01(\x08H\x00\x88\x01\x01\x12R\n\x19\x63onfig_map_name_parameter\x18\x04 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpecB\x0b\n\t_optional\"\x8b\x02\n\x0e\x43onfigMapAsEnv\x12\x1b\n\x0f\x63onfig_map_name\x18\x01 \x01(\tB\x02\x18\x01\x12G\n\nkey_to_env\x18\x02 \x03(\x0b\x32\x33.kfp_kubernetes.ConfigMapAsEnv.ConfigMapKeyToEnvMap\x12R\n\x19\x63onfig_map_name_parameter\x18\x03 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpec\x1a?\n\x14\x43onfigMapKeyToEnvMap\x12\x16\n\x0e\x63onfig_map_key\x18\x01 \x01(\t\x12\x0f\n\x07\x65nv_var\x18\x02 \x01(\t\"\xcf\x01\n\x16GenericEphemeralVolume\x12\x13\n\x0bvolume_name\x18\x01 \x01(\t\x12\x12\n\nmount_path\x18\x02 \x01(\t\x12\x14\n\x0c\x61\x63\x63\x65ss_modes\x18\x03 \x03(\t\x12\x0c\n\x04size\x18\x04 \x01(\t\x12\x1d\n\x15\x64\x65\x66\x61ult_storage_class\x18\x05 \x01(\x08\x12\x1a\n\x12storage_class_name\x18\x06 \x01(\t\x12-\n\x08metadata\x18\x07 \x01(\x0b\x32\x1b.kfp_kubernetes.PodMetadata\"z\n\x0fImagePullSecret\x12\x17\n\x0bsecret_name\x18\x01 \x01(\tB\x02\x18\x01\x12N\n\x15secret_name_parameter\x18\x02 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpec\"2\n\x0e\x46ieldPathAsEnv\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nfield_path\x18\x02 \x01(\t\"\xcc\x01\n\nToleration\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x10\n\x08operator\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x0e\n\x06\x65\x66\x66\x65\x63t\x18\x04 \x01(\t\x12\x1f\n\x12toleration_seconds\x18\x05 \x01(\x03H\x00\x88\x01\x01\x12H\n\x0ftoleration_json\x18\x06 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpecB\x15\n\x13_toleration_seconds\"D\n\x13SelectorRequirement\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x10\n\x08operator\x18\x02 \x01(\t\x12\x0e\n\x06values\x18\x03 \x03(\t\"\xad\x01\n\x10NodeAffinityTerm\x12>\n\x11match_expressions\x18\x01 \x03(\x0b\x32#.kfp_kubernetes.SelectorRequirement\x12\x39\n\x0cmatch_fields\x18\x02 \x03(\x0b\x32#.kfp_kubernetes.SelectorRequirement\x12\x13\n\x06weight\x18\x03 \x01(\x05H\x00\x88\x01\x01\x42\t\n\x07_weight\"\xa3\x04\n\x0fPodAffinityTerm\x12\x42\n\x15match_pod_expressions\x18\x01 \x03(\x0b\x32#.kfp_kubernetes.SelectorRequirement\x12M\n\x10match_pod_labels\x18\x02 \x03(\x0b\x32\x33.kfp_kubernetes.PodAffinityTerm.MatchPodLabelsEntry\x12\x14\n\x0ctopology_key\x18\x03 \x01(\t\x12\x12\n\nnamespaces\x18\x04 \x03(\t\x12H\n\x1bmatch_namespace_expressions\x18\x05 \x03(\x0b\x32#.kfp_kubernetes.SelectorRequirement\x12Y\n\x16match_namespace_labels\x18\x06 \x03(\x0b\x32\x39.kfp_kubernetes.PodAffinityTerm.MatchNamespaceLabelsEntry\x12\x13\n\x06weight\x18\x07 \x01(\x05H\x00\x88\x01\x01\x12\x11\n\x04\x61nti\x18\x08 \x01(\x08H\x01\x88\x01\x01\x1a\x35\n\x13MatchPodLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a;\n\x19MatchNamespaceLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07_weightB\x07\n\x05_anti\"\x80\x01\n\rEmptyDirMount\x12\x13\n\x0bvolume_name\x18\x01 \x01(\t\x12\x12\n\nmount_path\x18\x02 \x01(\t\x12\x13\n\x06medium\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x17\n\nsize_limit\x18\x04 \x01(\tH\x01\x88\x01\x01\x42\t\n\x07_mediumB\r\n\x0b_size_limitBIZGgithub.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatformb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'kubernetes_executor_config_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n kubernetes_executor_config.proto\x12\x0ekfp_kubernetes\x1a\x1cgoogle/protobuf/struct.proto\x1a\x13pipeline_spec.proto\"\xb4\x07\n\x18KubernetesExecutorConfig\x12\x38\n\x10secret_as_volume\x18\x01 \x03(\x0b\x32\x1e.kfp_kubernetes.SecretAsVolume\x12\x32\n\rsecret_as_env\x18\x02 \x03(\x0b\x32\x1b.kfp_kubernetes.SecretAsEnv\x12+\n\tpvc_mount\x18\x03 \x03(\x0b\x32\x18.kfp_kubernetes.PvcMount\x12\x33\n\rnode_selector\x18\x04 \x01(\x0b\x32\x1c.kfp_kubernetes.NodeSelector\x12\x31\n\x0cpod_metadata\x18\x05 \x01(\x0b\x32\x1b.kfp_kubernetes.PodMetadata\x12:\n\x11image_pull_secret\x18\x06 \x03(\x0b\x32\x1f.kfp_kubernetes.ImagePullSecret\x12\x19\n\x11image_pull_policy\x18\x07 \x01(\t\x12?\n\x14\x63onfig_map_as_volume\x18\x08 \x03(\x0b\x32!.kfp_kubernetes.ConfigMapAsVolume\x12\x39\n\x11\x63onfig_map_as_env\x18\t \x03(\x0b\x32\x1e.kfp_kubernetes.ConfigMapAsEnv\x12\x1f\n\x17\x61\x63tive_deadline_seconds\x18\n \x01(\x03\x12\x39\n\x11\x66ield_path_as_env\x18\x0b \x03(\x0b\x32\x1e.kfp_kubernetes.FieldPathAsEnv\x12/\n\x0btolerations\x18\x0c \x03(\x0b\x32\x1a.kfp_kubernetes.Toleration\x12H\n\x18generic_ephemeral_volume\x18\r \x03(\x0b\x32&.kfp_kubernetes.GenericEphemeralVolume\x12\x37\n\rnode_affinity\x18\x0e \x03(\x0b\x32 .kfp_kubernetes.NodeAffinityTerm\x12\x35\n\x0cpod_affinity\x18\x0f \x03(\x0b\x32\x1f.kfp_kubernetes.PodAffinityTerm\x12\x42\n\x15\x65nabled_shared_memory\x18\x10 \x01(\x0b\x32#.kfp_kubernetes.EnabledSharedMemory\x12\x37\n\x10\x65mpty_dir_mounts\x18\x11 \x03(\x0b\x32\x1d.kfp_kubernetes.EmptyDirMount\"8\n\x13\x45nabledSharedMemory\x12\x13\n\x0bvolume_name\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\t\"\xb1\x01\n\x0eSecretAsVolume\x12\x17\n\x0bsecret_name\x18\x01 \x01(\tB\x02\x18\x01\x12\x12\n\nmount_path\x18\x02 \x01(\t\x12\x15\n\x08optional\x18\x03 \x01(\x08H\x00\x88\x01\x01\x12N\n\x15secret_name_parameter\x18\x04 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpecB\x0b\n\t_optional\"\x97\x02\n\x0bSecretAsEnv\x12\x17\n\x0bsecret_name\x18\x01 \x01(\tB\x02\x18\x01\x12\x41\n\nkey_to_env\x18\x02 \x03(\x0b\x32-.kfp_kubernetes.SecretAsEnv.SecretKeyToEnvMap\x12\x15\n\x08optional\x18\x03 \x01(\x08H\x00\x88\x01\x01\x12N\n\x15secret_name_parameter\x18\x04 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpec\x1a\x38\n\x11SecretKeyToEnvMap\x12\x12\n\nsecret_key\x18\x01 \x01(\t\x12\x0f\n\x07\x65nv_var\x18\x02 \x01(\tB\x0b\n\t_optional\"\xab\x02\n\x08PvcMount\x12l\n\x15task_output_parameter\x18\x01 \x01(\x0b\x32G.ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskOutputParameterSpecB\x02\x18\x01H\x00\x12\x16\n\x08\x63onstant\x18\x02 \x01(\tB\x02\x18\x01H\x00\x12\'\n\x19\x63omponent_input_parameter\x18\x03 \x01(\tB\x02\x18\x01H\x00\x12\x12\n\nmount_path\x18\x04 \x01(\t\x12K\n\x12pvc_name_parameter\x18\x05 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpecB\x0f\n\rpvc_reference\"\xe4\x01\n\tCreatePvc\x12\x12\n\x08pvc_name\x18\x01 \x01(\tH\x00\x12\x19\n\x0fpvc_name_suffix\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x61\x63\x63\x65ss_modes\x18\x03 \x03(\t\x12\x0c\n\x04size\x18\x04 \x01(\t\x12\x1d\n\x15\x64\x65\x66\x61ult_storage_class\x18\x05 \x01(\x08\x12\x1a\n\x12storage_class_name\x18\x06 \x01(\t\x12\x13\n\x0bvolume_name\x18\x07 \x01(\t\x12,\n\x0b\x61nnotations\x18\x08 \x01(\x0b\x32\x17.google.protobuf.StructB\x06\n\x04name\"\xbf\x01\n\tDeletePvc\x12h\n\x15task_output_parameter\x18\x01 \x01(\x0b\x32G.ml_pipelines.TaskInputsSpec.InputParameterSpec.TaskOutputParameterSpecH\x00\x12\x12\n\x08\x63onstant\x18\x02 \x01(\tH\x00\x12#\n\x19\x63omponent_input_parameter\x18\x03 \x01(\tH\x00\x42\x0f\n\rpvc_reference\"\xc4\x01\n\x0cNodeSelector\x12\x38\n\x06labels\x18\x01 \x03(\x0b\x32(.kfp_kubernetes.NodeSelector.LabelsEntry\x12K\n\x12node_selector_json\x18\x02 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpec\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xec\x01\n\x0bPodMetadata\x12\x37\n\x06labels\x18\x01 \x03(\x0b\x32\'.kfp_kubernetes.PodMetadata.LabelsEntry\x12\x41\n\x0b\x61nnotations\x18\x02 \x03(\x0b\x32,.kfp_kubernetes.PodMetadata.AnnotationsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x32\n\x10\x41nnotationsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x01\n\x11\x43onfigMapAsVolume\x12\x1b\n\x0f\x63onfig_map_name\x18\x01 \x01(\tB\x02\x18\x01\x12\x12\n\nmount_path\x18\x02 \x01(\t\x12\x15\n\x08optional\x18\x03 \x01(\x08H\x00\x88\x01\x01\x12R\n\x19\x63onfig_map_name_parameter\x18\x04 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpecB\x0b\n\t_optional\"\xaf\x02\n\x0e\x43onfigMapAsEnv\x12\x1b\n\x0f\x63onfig_map_name\x18\x01 \x01(\tB\x02\x18\x01\x12G\n\nkey_to_env\x18\x02 \x03(\x0b\x32\x33.kfp_kubernetes.ConfigMapAsEnv.ConfigMapKeyToEnvMap\x12R\n\x19\x63onfig_map_name_parameter\x18\x03 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpec\x12\x15\n\x08optional\x18\x04 \x01(\x08H\x00\x88\x01\x01\x1a?\n\x14\x43onfigMapKeyToEnvMap\x12\x16\n\x0e\x63onfig_map_key\x18\x01 \x01(\t\x12\x0f\n\x07\x65nv_var\x18\x02 \x01(\tB\x0b\n\t_optional\"\xcf\x01\n\x16GenericEphemeralVolume\x12\x13\n\x0bvolume_name\x18\x01 \x01(\t\x12\x12\n\nmount_path\x18\x02 \x01(\t\x12\x14\n\x0c\x61\x63\x63\x65ss_modes\x18\x03 \x03(\t\x12\x0c\n\x04size\x18\x04 \x01(\t\x12\x1d\n\x15\x64\x65\x66\x61ult_storage_class\x18\x05 \x01(\x08\x12\x1a\n\x12storage_class_name\x18\x06 \x01(\t\x12-\n\x08metadata\x18\x07 \x01(\x0b\x32\x1b.kfp_kubernetes.PodMetadata\"z\n\x0fImagePullSecret\x12\x17\n\x0bsecret_name\x18\x01 \x01(\tB\x02\x18\x01\x12N\n\x15secret_name_parameter\x18\x02 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpec\"2\n\x0e\x46ieldPathAsEnv\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nfield_path\x18\x02 \x01(\t\"\xcc\x01\n\nToleration\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x10\n\x08operator\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x0e\n\x06\x65\x66\x66\x65\x63t\x18\x04 \x01(\t\x12\x1f\n\x12toleration_seconds\x18\x05 \x01(\x03H\x00\x88\x01\x01\x12H\n\x0ftoleration_json\x18\x06 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpecB\x15\n\x13_toleration_seconds\"D\n\x13SelectorRequirement\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x10\n\x08operator\x18\x02 \x01(\t\x12\x0e\n\x06values\x18\x03 \x03(\t\"\xfa\x01\n\x10NodeAffinityTerm\x12>\n\x11match_expressions\x18\x01 \x03(\x0b\x32#.kfp_kubernetes.SelectorRequirement\x12\x39\n\x0cmatch_fields\x18\x02 \x03(\x0b\x32#.kfp_kubernetes.SelectorRequirement\x12\x13\n\x06weight\x18\x03 \x01(\x05H\x00\x88\x01\x01\x12K\n\x12node_affinity_json\x18\x04 \x01(\x0b\x32/.ml_pipelines.TaskInputsSpec.InputParameterSpecB\t\n\x07_weight\"\xa3\x04\n\x0fPodAffinityTerm\x12\x42\n\x15match_pod_expressions\x18\x01 \x03(\x0b\x32#.kfp_kubernetes.SelectorRequirement\x12M\n\x10match_pod_labels\x18\x02 \x03(\x0b\x32\x33.kfp_kubernetes.PodAffinityTerm.MatchPodLabelsEntry\x12\x14\n\x0ctopology_key\x18\x03 \x01(\t\x12\x12\n\nnamespaces\x18\x04 \x03(\t\x12H\n\x1bmatch_namespace_expressions\x18\x05 \x03(\x0b\x32#.kfp_kubernetes.SelectorRequirement\x12Y\n\x16match_namespace_labels\x18\x06 \x03(\x0b\x32\x39.kfp_kubernetes.PodAffinityTerm.MatchNamespaceLabelsEntry\x12\x13\n\x06weight\x18\x07 \x01(\x05H\x00\x88\x01\x01\x12\x11\n\x04\x61nti\x18\x08 \x01(\x08H\x01\x88\x01\x01\x1a\x35\n\x13MatchPodLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a;\n\x19MatchNamespaceLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07_weightB\x07\n\x05_anti\"\x80\x01\n\rEmptyDirMount\x12\x13\n\x0bvolume_name\x18\x01 \x01(\t\x12\x12\n\nmount_path\x18\x02 \x01(\t\x12\x13\n\x06medium\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x17\n\nsize_limit\x18\x04 \x01(\tH\x01\x88\x01\x01\x42\t\n\x07_mediumB\r\n\x0b_size_limitBIZGgithub.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatformb\x06proto3') - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'ZGgithub.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform' - _SECRETASVOLUME.fields_by_name['secret_name']._options = None - _SECRETASVOLUME.fields_by_name['secret_name']._serialized_options = b'\030\001' - _SECRETASENV.fields_by_name['secret_name']._options = None - _SECRETASENV.fields_by_name['secret_name']._serialized_options = b'\030\001' - _PVCMOUNT.fields_by_name['task_output_parameter']._options = None - _PVCMOUNT.fields_by_name['task_output_parameter']._serialized_options = b'\030\001' - _PVCMOUNT.fields_by_name['constant']._options = None - _PVCMOUNT.fields_by_name['constant']._serialized_options = b'\030\001' - _PVCMOUNT.fields_by_name['component_input_parameter']._options = None - _PVCMOUNT.fields_by_name['component_input_parameter']._serialized_options = b'\030\001' - _NODESELECTOR_LABELSENTRY._options = None - _NODESELECTOR_LABELSENTRY._serialized_options = b'8\001' - _PODMETADATA_LABELSENTRY._options = None - _PODMETADATA_LABELSENTRY._serialized_options = b'8\001' - _PODMETADATA_ANNOTATIONSENTRY._options = None - _PODMETADATA_ANNOTATIONSENTRY._serialized_options = b'8\001' - _CONFIGMAPASVOLUME.fields_by_name['config_map_name']._options = None - _CONFIGMAPASVOLUME.fields_by_name['config_map_name']._serialized_options = b'\030\001' - _CONFIGMAPASENV.fields_by_name['config_map_name']._options = None - _CONFIGMAPASENV.fields_by_name['config_map_name']._serialized_options = b'\030\001' - _IMAGEPULLSECRET.fields_by_name['secret_name']._options = None - _IMAGEPULLSECRET.fields_by_name['secret_name']._serialized_options = b'\030\001' - _PODAFFINITYTERM_MATCHPODLABELSENTRY._options = None - _PODAFFINITYTERM_MATCHPODLABELSENTRY._serialized_options = b'8\001' - _PODAFFINITYTERM_MATCHNAMESPACELABELSENTRY._options = None - _PODAFFINITYTERM_MATCHNAMESPACELABELSENTRY._serialized_options = b'8\001' - _KUBERNETESEXECUTORCONFIG._serialized_start=104 - _KUBERNETESEXECUTORCONFIG._serialized_end=1052 - _ENABLEDSHAREDMEMORY._serialized_start=1054 - _ENABLEDSHAREDMEMORY._serialized_end=1110 - _SECRETASVOLUME._serialized_start=1113 - _SECRETASVOLUME._serialized_end=1290 - _SECRETASENV._serialized_start=1293 - _SECRETASENV._serialized_end=1536 - _SECRETASENV_SECRETKEYTOENVMAP._serialized_start=1480 - _SECRETASENV_SECRETKEYTOENVMAP._serialized_end=1536 - _PVCMOUNT._serialized_start=1539 - _PVCMOUNT._serialized_end=1838 - _CREATEPVC._serialized_start=1841 - _CREATEPVC._serialized_end=2069 - _DELETEPVC._serialized_start=2072 - _DELETEPVC._serialized_end=2263 - _NODESELECTOR._serialized_start=2266 - _NODESELECTOR._serialized_end=2462 - _NODESELECTOR_LABELSENTRY._serialized_start=2417 - _NODESELECTOR_LABELSENTRY._serialized_end=2462 - _PODMETADATA._serialized_start=2465 - _PODMETADATA._serialized_end=2701 - _PODMETADATA_LABELSENTRY._serialized_start=2417 - _PODMETADATA_LABELSENTRY._serialized_end=2462 - _PODMETADATA_ANNOTATIONSENTRY._serialized_start=2651 - _PODMETADATA_ANNOTATIONSENTRY._serialized_end=2701 - _CONFIGMAPASVOLUME._serialized_start=2704 - _CONFIGMAPASVOLUME._serialized_end=2892 - _CONFIGMAPASENV._serialized_start=2895 - _CONFIGMAPASENV._serialized_end=3162 - _CONFIGMAPASENV_CONFIGMAPKEYTOENVMAP._serialized_start=3099 - _CONFIGMAPASENV_CONFIGMAPKEYTOENVMAP._serialized_end=3162 - _GENERICEPHEMERALVOLUME._serialized_start=3165 - _GENERICEPHEMERALVOLUME._serialized_end=3372 - _IMAGEPULLSECRET._serialized_start=3374 - _IMAGEPULLSECRET._serialized_end=3496 - _FIELDPATHASENV._serialized_start=3498 - _FIELDPATHASENV._serialized_end=3548 - _TOLERATION._serialized_start=3551 - _TOLERATION._serialized_end=3755 - _SELECTORREQUIREMENT._serialized_start=3757 - _SELECTORREQUIREMENT._serialized_end=3825 - _NODEAFFINITYTERM._serialized_start=3828 - _NODEAFFINITYTERM._serialized_end=4001 - _PODAFFINITYTERM._serialized_start=4004 - _PODAFFINITYTERM._serialized_end=4551 - _PODAFFINITYTERM_MATCHPODLABELSENTRY._serialized_start=4417 - _PODAFFINITYTERM_MATCHPODLABELSENTRY._serialized_end=4470 - _PODAFFINITYTERM_MATCHNAMESPACELABELSENTRY._serialized_start=4472 - _PODAFFINITYTERM_MATCHNAMESPACELABELSENTRY._serialized_end=4531 - _EMPTYDIRMOUNT._serialized_start=4554 - _EMPTYDIRMOUNT._serialized_end=4682 +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'kubernetes_executor_config_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'ZGgithub.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform' + _globals['_SECRETASVOLUME'].fields_by_name['secret_name']._loaded_options = None + _globals['_SECRETASVOLUME'].fields_by_name['secret_name']._serialized_options = b'\030\001' + _globals['_SECRETASENV'].fields_by_name['secret_name']._loaded_options = None + _globals['_SECRETASENV'].fields_by_name['secret_name']._serialized_options = b'\030\001' + _globals['_PVCMOUNT'].fields_by_name['task_output_parameter']._loaded_options = None + _globals['_PVCMOUNT'].fields_by_name['task_output_parameter']._serialized_options = b'\030\001' + _globals['_PVCMOUNT'].fields_by_name['constant']._loaded_options = None + _globals['_PVCMOUNT'].fields_by_name['constant']._serialized_options = b'\030\001' + _globals['_PVCMOUNT'].fields_by_name['component_input_parameter']._loaded_options = None + _globals['_PVCMOUNT'].fields_by_name['component_input_parameter']._serialized_options = b'\030\001' + _globals['_NODESELECTOR_LABELSENTRY']._loaded_options = None + _globals['_NODESELECTOR_LABELSENTRY']._serialized_options = b'8\001' + _globals['_PODMETADATA_LABELSENTRY']._loaded_options = None + _globals['_PODMETADATA_LABELSENTRY']._serialized_options = b'8\001' + _globals['_PODMETADATA_ANNOTATIONSENTRY']._loaded_options = None + _globals['_PODMETADATA_ANNOTATIONSENTRY']._serialized_options = b'8\001' + _globals['_CONFIGMAPASVOLUME'].fields_by_name['config_map_name']._loaded_options = None + _globals['_CONFIGMAPASVOLUME'].fields_by_name['config_map_name']._serialized_options = b'\030\001' + _globals['_CONFIGMAPASENV'].fields_by_name['config_map_name']._loaded_options = None + _globals['_CONFIGMAPASENV'].fields_by_name['config_map_name']._serialized_options = b'\030\001' + _globals['_IMAGEPULLSECRET'].fields_by_name['secret_name']._loaded_options = None + _globals['_IMAGEPULLSECRET'].fields_by_name['secret_name']._serialized_options = b'\030\001' + _globals['_PODAFFINITYTERM_MATCHPODLABELSENTRY']._loaded_options = None + _globals['_PODAFFINITYTERM_MATCHPODLABELSENTRY']._serialized_options = b'8\001' + _globals['_PODAFFINITYTERM_MATCHNAMESPACELABELSENTRY']._loaded_options = None + _globals['_PODAFFINITYTERM_MATCHNAMESPACELABELSENTRY']._serialized_options = b'8\001' + _globals['_KUBERNETESEXECUTORCONFIG']._serialized_start=104 + _globals['_KUBERNETESEXECUTORCONFIG']._serialized_end=1052 + _globals['_ENABLEDSHAREDMEMORY']._serialized_start=1054 + _globals['_ENABLEDSHAREDMEMORY']._serialized_end=1110 + _globals['_SECRETASVOLUME']._serialized_start=1113 + _globals['_SECRETASVOLUME']._serialized_end=1290 + _globals['_SECRETASENV']._serialized_start=1293 + _globals['_SECRETASENV']._serialized_end=1572 + _globals['_SECRETASENV_SECRETKEYTOENVMAP']._serialized_start=1503 + _globals['_SECRETASENV_SECRETKEYTOENVMAP']._serialized_end=1559 + _globals['_PVCMOUNT']._serialized_start=1575 + _globals['_PVCMOUNT']._serialized_end=1874 + _globals['_CREATEPVC']._serialized_start=1877 + _globals['_CREATEPVC']._serialized_end=2105 + _globals['_DELETEPVC']._serialized_start=2108 + _globals['_DELETEPVC']._serialized_end=2299 + _globals['_NODESELECTOR']._serialized_start=2302 + _globals['_NODESELECTOR']._serialized_end=2498 + _globals['_NODESELECTOR_LABELSENTRY']._serialized_start=2453 + _globals['_NODESELECTOR_LABELSENTRY']._serialized_end=2498 + _globals['_PODMETADATA']._serialized_start=2501 + _globals['_PODMETADATA']._serialized_end=2737 + _globals['_PODMETADATA_LABELSENTRY']._serialized_start=2453 + _globals['_PODMETADATA_LABELSENTRY']._serialized_end=2498 + _globals['_PODMETADATA_ANNOTATIONSENTRY']._serialized_start=2687 + _globals['_PODMETADATA_ANNOTATIONSENTRY']._serialized_end=2737 + _globals['_CONFIGMAPASVOLUME']._serialized_start=2740 + _globals['_CONFIGMAPASVOLUME']._serialized_end=2928 + _globals['_CONFIGMAPASENV']._serialized_start=2931 + _globals['_CONFIGMAPASENV']._serialized_end=3234 + _globals['_CONFIGMAPASENV_CONFIGMAPKEYTOENVMAP']._serialized_start=3158 + _globals['_CONFIGMAPASENV_CONFIGMAPKEYTOENVMAP']._serialized_end=3221 + _globals['_GENERICEPHEMERALVOLUME']._serialized_start=3237 + _globals['_GENERICEPHEMERALVOLUME']._serialized_end=3444 + _globals['_IMAGEPULLSECRET']._serialized_start=3446 + _globals['_IMAGEPULLSECRET']._serialized_end=3568 + _globals['_FIELDPATHASENV']._serialized_start=3570 + _globals['_FIELDPATHASENV']._serialized_end=3620 + _globals['_TOLERATION']._serialized_start=3623 + _globals['_TOLERATION']._serialized_end=3827 + _globals['_SELECTORREQUIREMENT']._serialized_start=3829 + _globals['_SELECTORREQUIREMENT']._serialized_end=3897 + _globals['_NODEAFFINITYTERM']._serialized_start=3900 + _globals['_NODEAFFINITYTERM']._serialized_end=4150 + _globals['_PODAFFINITYTERM']._serialized_start=4153 + _globals['_PODAFFINITYTERM']._serialized_end=4700 + _globals['_PODAFFINITYTERM_MATCHPODLABELSENTRY']._serialized_start=4566 + _globals['_PODAFFINITYTERM_MATCHPODLABELSENTRY']._serialized_end=4619 + _globals['_PODAFFINITYTERM_MATCHNAMESPACELABELSENTRY']._serialized_start=4621 + _globals['_PODAFFINITYTERM_MATCHNAMESPACELABELSENTRY']._serialized_end=4680 + _globals['_EMPTYDIRMOUNT']._serialized_start=4703 + _globals['_EMPTYDIRMOUNT']._serialized_end=4831 # @@protoc_insertion_point(module_scope) diff --git a/kubernetes_platform/python/kfp/kubernetes/node_affinity.py b/kubernetes_platform/python/kfp/kubernetes/node_affinity.py new file mode 100644 index 00000000000..231abb51d26 --- /dev/null +++ b/kubernetes_platform/python/kfp/kubernetes/node_affinity.py @@ -0,0 +1,137 @@ +# Copyright 2025 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List, Optional, Union +from google.protobuf import json_format +from kfp.dsl import PipelineTask, pipeline_channel +from kfp.kubernetes import common +from kfp.kubernetes import kubernetes_executor_config_pb2 as pb +from kubernetes import client + + +def add_node_affinity( + task: PipelineTask, + match_expressions: Optional[List[dict]] = None, + match_fields: Optional[List[dict]] = None, + weight: Optional[int] = None, +) -> PipelineTask: + """Add a constraint to the task Pod's `nodeAffinity + `_. + + Each constraint is specified as a match expression (key, operator, values) or match field, + corresponding to the PodSpec's `nodeAffinity `_ field. + + Args: + task: Pipeline task. + match_expressions: List of dicts for matchExpressions (keys: key, operator, values). + match_fields: List of dicts for matchFields (keys: key, operator, values). + weight: If set, this affinity is preferred (K8s weight 1-100); otherwise required. + + Returns: + Task object with added node affinity. + """ + VALID_OPERATORS = {"In", "NotIn", "Exists", "DoesNotExist", "Gt", "Lt"} + msg = common.get_existing_kubernetes_config_as_message(task) + affinity_term = pb.NodeAffinityTerm() + + _add_affinity_terms(affinity_term.match_expressions, match_expressions, "match_expression", VALID_OPERATORS) + _add_affinity_terms(affinity_term.match_fields, match_fields, "match_field", VALID_OPERATORS) + + if weight is not None: + if not (1 <= weight <= 100): + raise ValueError(f"weight must be between 1 and 100, got {weight}.") + affinity_term.weight = weight + msg.node_affinity.append(affinity_term) + task.platform_config["kubernetes"] = json_format.MessageToDict(msg) + return task + +def validate_node_affinity(node_affinity_json: dict): + """ + Validates a Python dictionary against the Kubernetes V1NodeAffinity model. + + Args: + node_affinity_json: A dictionary representing a Kubernetes NodeAffinity object. + + Returns: + True if the dictionary is a valid V1NodeAffinity. + + Raises: + ValueError: If the dictionary does not conform to the V1NodeAffinity schema. + """ + from kubernetes import client + + try: + k8s_model_dict = common.deserialize_dict_to_k8s_model_keys(node_affinity_json) + client.V1NodeAffinity(**k8s_model_dict) + except (TypeError, ValueError) as e: + raise ValueError(f"Invalid V1NodeAffinity JSON: {e}") + +def add_node_affinity_json( + task: PipelineTask, + node_affinity_json: Union[pipeline_channel.PipelineParameterChannel, dict], +) -> PipelineTask: + """Add a node affinity constraint to the task Pod's `nodeAffinity` + using a JSON struct or pipeline parameter. + + This allows parameterized node affinity to be specified, + matching the Kubernetes NodeAffinity schema: + https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#nodeaffinity + + Args: + task: Pipeline task. + node_affinity_json: Dict or pipeline parameter for node affinity. Should match K8s NodeAffinity schema. + + Returns: + Task object with added node affinity. + """ + + if isinstance(node_affinity_json, dict): + validate_node_affinity(node_affinity_json) + msg = common.get_existing_kubernetes_config_as_message(task) + # Remove any previous JSON-based node affinity terms + for i in range(len(msg.node_affinity) - 1, -1, -1): + if msg.node_affinity[i].HasField("node_affinity_json"): + del msg.node_affinity[i] + affinity_term = pb.NodeAffinityTerm() + input_param_spec = common.parse_k8s_parameter_input(node_affinity_json, task) + affinity_term.node_affinity_json.CopyFrom(input_param_spec) + msg.node_affinity.append(affinity_term) + task.platform_config["kubernetes"] = json_format.MessageToDict(msg) + return task + +def _add_affinity_terms( + affinity_list, + selector_terms: Optional[List[dict]], + term_kind: str, + valid_operators: set, +): + for selector in selector_terms or []: + key = selector.get("key") + operator = selector.get("operator") + + if not key: + raise ValueError(f"Each {term_kind} must have a non-empty 'key'.") + if not operator: + raise ValueError(f"Each {term_kind} for key '{key}' must have a non-empty 'operator'.") + if operator not in valid_operators: + raise ValueError( + f"Invalid operator '{operator}' for key '{key}' in {term_kind}. " + f"Must be one of {sorted(valid_operators)}." + ) + + affinity_list.add( + key=key, + operator=operator, + values=selector.get("values", []), + ) \ No newline at end of file diff --git a/kubernetes_platform/python/kfp/kubernetes/secret.py b/kubernetes_platform/python/kfp/kubernetes/secret.py index 72b83b5b06f..adce3e6cb67 100644 --- a/kubernetes_platform/python/kfp/kubernetes/secret.py +++ b/kubernetes_platform/python/kfp/kubernetes/secret.py @@ -26,6 +26,7 @@ def use_secret_as_env( task: PipelineTask, secret_name: Union[pipeline_channel.PipelineParameterChannel, str], secret_key_to_env: Dict[str, str], + optional: bool = False, ) -> PipelineTask: """Use a Kubernetes Secret as an environment variable as described by the `Kubernetes documentation https://kubernetes.io/docs/concepts/configuration/secret/#using-secrets-as-environment-variables `_. @@ -34,6 +35,7 @@ def use_secret_as_env( task: Pipeline task. secret_name: Name of the Secret. secret_key_to_env: Dictionary of Secret data key to environment variable name. For example, ``{'password': 'PASSWORD'}`` sets the data of the Secret's password field to the environment variable ``PASSWORD``. + optional: Optional field specifying whether the Secret must be defined. Returns: Task object with updated secret configuration. @@ -47,7 +49,7 @@ def use_secret_as_env( env_var=env_var, ) for secret_key, env_var in secret_key_to_env.items() ] - secret_as_env = pb.SecretAsEnv(key_to_env=key_to_env) + secret_as_env = pb.SecretAsEnv(key_to_env=key_to_env, optional=optional) secret_name_parameter = common.parse_k8s_parameter_input(secret_name, task) secret_as_env.secret_name_parameter.CopyFrom(secret_name_parameter) diff --git a/kubernetes_platform/python/release.sh b/kubernetes_platform/python/release.sh index 415751a28fa..3f90e9cd328 100755 --- a/kubernetes_platform/python/release.sh +++ b/kubernetes_platform/python/release.sh @@ -37,7 +37,7 @@ else TARGET_TAR_FILE=kfp-kubernetes-$KFP_KUBERNETES_VERSION.tar.gz pushd "$(dirname "$0")" dist_dir=$(mktemp -d) - python3 setup.py sdist --format=gztar --dist-dir "$dist_dir" + python3 setup.py sdist bdist_wheel --dist-dir "$dist_dir" cp "$dist_dir"/*.tar.gz $TARGET_TAR_FILE popd echo "Created package." diff --git a/kubernetes_platform/python/requirements-dev.txt b/kubernetes_platform/python/requirements-dev.txt new file mode 100644 index 00000000000..debfb23fe07 --- /dev/null +++ b/kubernetes_platform/python/requirements-dev.txt @@ -0,0 +1,8 @@ +docformatter==1.4 +isort==5.10.1 +mypy==0.941 +pre-commit==2.19.0 +pycln==2.1.1 +pytest==7.1.2 +pytest-xdist==2.5.0 +yapf==0.32.0 \ No newline at end of file diff --git a/kubernetes_platform/python/requirements.txt b/kubernetes_platform/python/requirements.txt new file mode 100644 index 00000000000..53e31f1962b --- /dev/null +++ b/kubernetes_platform/python/requirements.txt @@ -0,0 +1,5 @@ +# protobuf version should be identical to the one in kfp-pipeline-spec +# api/v2alpha1/python/requirements.txt +protobuf==6.31.1,<7.0 +kfp==2.14.3 +kfp-server-api==2.14.3 diff --git a/kubernetes_platform/python/setup.py b/kubernetes_platform/python/setup.py index ad56ad76e1d..b694fe4c90d 100644 --- a/kubernetes_platform/python/setup.py +++ b/kubernetes_platform/python/setup.py @@ -14,25 +14,21 @@ import os import re +from typing import List import setuptools NAME = 'kfp-kubernetes' -REQUIREMENTS = [ - 'protobuf>=4.21.1,<5', - 'kfp>=2.6.0,<3', -] -DEV_REQUIREMENTS = [ - 'docformatter==1.4', - 'isort==5.10.1', - 'mypy==0.941', - 'pre-commit==2.19.0', - 'pycln==2.1.1', - 'pytest==7.1.2', - 'pytest-xdist==2.5.0', - 'yapf==0.32.0', -] +def get_requirements(requirements_file: str) -> List[str]: + """Read requirements from requirements.in.""" + + file_path = os.path.join(os.path.dirname(__file__), requirements_file) + with open(file_path, 'r') as f: + lines = f.readlines() + lines = [line.strip() for line in lines] + lines = [line for line in lines if not line.startswith('#') and line] + return lines def find_version(*file_path_parts: str) -> str: """Get version from kfp.__init__.__version__.""" @@ -77,10 +73,10 @@ def read_readme() -> str: }, packages=setuptools.find_namespace_packages(include=['kfp.*']), python_requires='>=3.9.0', - install_requires=REQUIREMENTS, + install_requires=get_requirements('requirements.txt'), include_package_data=True, extras_require={ - 'dev': DEV_REQUIREMENTS, + 'dev': get_requirements('requirements-dev.txt'), }, license='Apache 2.0', ) diff --git a/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc.yaml b/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc.yaml index 088631632f9..18060a50157 100644 --- a/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc.yaml +++ b/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc.yaml @@ -86,7 +86,7 @@ deploymentSpec: - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.11.0'\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh @@ -161,7 +161,7 @@ root: taskInfo: name: deletepvc schemaVersion: 2.1.0 -sdkVersion: kfp-2.11.0 +sdkVersion: kfp-2.13.0 --- platforms: kubernetes: diff --git a/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc_from_task_output.yaml b/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc_from_task_output.yaml index 1ebd162adda..f14df06e469 100644 --- a/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc_from_task_output.yaml +++ b/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc_from_task_output.yaml @@ -92,7 +92,7 @@ deploymentSpec: - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.11.0'\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh @@ -126,7 +126,7 @@ deploymentSpec: - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.11.0'\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh @@ -140,7 +140,8 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef get_pvc_name() -> str:\n return 'static-pvc-name-cmdepfto'\n\n" + \ *\n\ndef get_pvc_name() -> str:\n return 'static-pvc-name-cmdepfto'\n\ + \n" image: python:3.9 pipelineInfo: name: my-pipeline @@ -205,7 +206,7 @@ root: taskInfo: name: get-pvc-name schemaVersion: 2.1.0 -sdkVersion: kfp-2.11.0 +sdkVersion: kfp-2.13.0 --- platforms: kubernetes: diff --git a/kubernetes_platform/python/test/snapshot/data/nodeaffinity.py b/kubernetes_platform/python/test/snapshot/data/nodeaffinity.py new file mode 100644 index 00000000000..7631dd3fb35 --- /dev/null +++ b/kubernetes_platform/python/test/snapshot/data/nodeaffinity.py @@ -0,0 +1,102 @@ +# Copyright 2025 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import kfp +from kfp import dsl +from kfp.kubernetes.node_affinity import add_node_affinity, add_node_affinity_json + +@dsl.component +def print_hello_with_json_affinity(): + pass + +@dsl.component +def print_hello_with_preferred_affinity(): + pass + +@dsl.component +def print_hello_with_empty_json(): + pass + +@dsl.pipeline(name="test-node-affinity") +def my_pipeline(): + + # Task 1: JSON-based node affinity with preferred scheduling + task1 = print_hello_with_json_affinity() + task1 = add_node_affinity_json( + task1, + { + "preferredDuringSchedulingIgnoredDuringExecution": [ + { + "weight": 100, + "preference": { + "matchExpressions": [ + { + "key": "disktype", + "operator": "In", + "values": ["ssd"] + } + ] + } + } + ] + } + ) + + # Task 2: Preferred scheduling with weight + task2 = print_hello_with_preferred_affinity() + task2 = add_node_affinity( + task2, + match_expressions=[ + {"key": "zone", "operator": "In", "values": ["us-west-1"]} + ], + weight=50 + ) + + # Task 3: Empty JSON (should not set any affinity) + task3 = print_hello_with_empty_json() + task3 = add_node_affinity_json(task3, {}) + + # Task 4: Complex JSON with multiple preferred terms + task4 = print_hello_with_json_affinity() + task4 = add_node_affinity_json( + task4, + { + "preferredDuringSchedulingIgnoredDuringExecution": [ + { + "weight": 100, + "preference": { + "matchExpressions": [ + { + "key": "zone", + "operator": "In", + "values": ["us-west-1"] + } + ] + } + }, + { + "weight": 50, + "preference": { + "matchExpressions": [ + { + "key": "instance-type", + "operator": "In", + "values": ["n1-standard-4"] + } + ] + } + } + ] + } + ) diff --git a/kubernetes_platform/python/test/snapshot/data/nodeaffinity.yaml b/kubernetes_platform/python/test/snapshot/data/nodeaffinity.yaml new file mode 100644 index 00000000000..db8e0218a4b --- /dev/null +++ b/kubernetes_platform/python/test/snapshot/data/nodeaffinity.yaml @@ -0,0 +1,211 @@ +# PIPELINE DEFINITION +# Name: test-node-affinity +components: + comp-print-hello-with-empty-json: + executorLabel: exec-print-hello-with-empty-json + comp-print-hello-with-json-affinity: + executorLabel: exec-print-hello-with-json-affinity + comp-print-hello-with-json-affinity-2: + executorLabel: exec-print-hello-with-json-affinity-2 + comp-print-hello-with-preferred-affinity: + executorLabel: exec-print-hello-with-preferred-affinity +deploymentSpec: + executors: + exec-print-hello-with-empty-json: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_hello_with_empty_json + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_hello_with_empty_json():\n pass\n\n" + image: python:3.9 + exec-print-hello-with-json-affinity: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_hello_with_json_affinity + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_hello_with_json_affinity():\n pass\n\n" + image: python:3.9 + exec-print-hello-with-json-affinity-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_hello_with_json_affinity + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_hello_with_json_affinity():\n pass\n\n" + image: python:3.9 + exec-print-hello-with-preferred-affinity: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_hello_with_preferred_affinity + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.13.0'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_hello_with_preferred_affinity():\n pass\n\n" + image: python:3.9 +pipelineInfo: + name: test-node-affinity +root: + dag: + tasks: + print-hello-with-empty-json: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-hello-with-empty-json + taskInfo: + name: print-hello-with-empty-json + print-hello-with-json-affinity: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-hello-with-json-affinity + taskInfo: + name: print-hello-with-json-affinity + print-hello-with-json-affinity-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-hello-with-json-affinity-2 + taskInfo: + name: print-hello-with-json-affinity-2 + print-hello-with-preferred-affinity: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-hello-with-preferred-affinity + taskInfo: + name: print-hello-with-preferred-affinity +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 +--- +platforms: + kubernetes: + deploymentSpec: + executors: + exec-print-hello-with-empty-json: + nodeAffinity: + - nodeAffinityJson: + runtimeValue: + constant: {} + exec-print-hello-with-json-affinity: + nodeAffinity: + - nodeAffinityJson: + runtimeValue: + constant: + preferredDuringSchedulingIgnoredDuringExecution: + - preference: + matchExpressions: + - key: disktype + operator: In + values: + - ssd + weight: 100.0 + exec-print-hello-with-json-affinity-2: + nodeAffinity: + - nodeAffinityJson: + runtimeValue: + constant: + preferredDuringSchedulingIgnoredDuringExecution: + - preference: + matchExpressions: + - key: zone + operator: In + values: + - us-west-1 + weight: 100.0 + - preference: + matchExpressions: + - key: instance-type + operator: In + values: + - n1-standard-4 + weight: 50.0 + exec-print-hello-with-preferred-affinity: + nodeAffinity: + - matchExpressions: + - key: zone + operator: In + values: + - us-west-1 + weight: 50.0 diff --git a/kubernetes_platform/python/test/snapshot/test_data_config.yaml b/kubernetes_platform/python/test/snapshot/test_data_config.yaml index 4ff7b681c1b..9704deeae46 100644 --- a/kubernetes_platform/python/test/snapshot/test_data_config.yaml +++ b/kubernetes_platform/python/test/snapshot/test_data_config.yaml @@ -12,17 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. - test_cases: -# Disabled while https://github.com/kubeflow/pipelines/issues/10918 is failing -# - module: create_mount_delete_dynamic_pvc -# name: my_pipeline + # Disabled while https://github.com/kubeflow/pipelines/issues/10918 is failing + # - module: create_mount_delete_dynamic_pvc + # name: my_pipeline - module: create_mount_delete_existing_pvc name: my_pipeline - module: create_mount_delete_existing_pvc_from_task_output name: my_pipeline - + - module: nodeaffinity + name: my_pipeline # Disabled while https://github.com/kubeflow/pipelines/issues/10918 is failing # - module: secret_as_env # name: my_pipeline diff --git a/kubernetes_platform/python/test/unit/test_config_map.py b/kubernetes_platform/python/test/unit/test_config_map.py index c2603ec3b3b..176f5c97311 100644 --- a/kubernetes_platform/python/test/unit/test_config_map.py +++ b/kubernetes_platform/python/test/unit/test_config_map.py @@ -205,7 +205,8 @@ def my_pipeline(): 'keyToEnv': [{ 'configMapKey': 'foo', 'envVar': 'CM_VAR' - }] + }], + 'optional': False }], 'configMapAsVolume': [{ 'configMapName': 'cm-name2', @@ -518,7 +519,102 @@ def my_pipeline(): 'configMapKey': 'bar', 'envVar': 'BAR' }, - ] + ], + 'optional': False + }] + } + } + } + } + } + } + + def test_use_one_optional_true(self): + + @dsl.pipeline + def my_pipeline(): + task = comp() + kubernetes.use_config_map_as_env( + task, + config_map_name='cm-name', + config_map_key_to_env={ + 'foo': 'FOO', + 'bar': 'BAR', + }, + optional=True, + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'configMapAsEnv': [{ + 'configMapName': 'cm-name', + 'configMapNameParameter': { + 'runtimeValue': { + 'constant': 'cm-name' + } + }, + 'keyToEnv': [ + { + 'configMapKey': 'foo', + 'envVar': 'FOO' + }, + { + 'configMapKey': 'bar', + 'envVar': 'BAR' + }, + ], + 'optional': True + }] + } + } + } + } + } + } + + def test_use_one_optional_false(self): + + @dsl.pipeline + def my_pipeline(): + task = comp() + kubernetes.use_config_map_as_env( + task, + config_map_name='cm-name', + config_map_key_to_env={ + 'foo': 'FOO', + 'bar': 'BAR', + }, + optional=False, + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'configMapAsEnv': [{ + 'configMapName': 'cm-name', + 'configMapNameParameter': { + 'runtimeValue': { + 'constant': 'cm-name' + } + }, + 'keyToEnv': [ + { + 'configMapKey': 'foo', + 'envVar': 'FOO' + }, + { + 'configMapKey': 'bar', + 'envVar': 'BAR' + }, + ], + 'optional': False }] } } @@ -561,7 +657,8 @@ def my_pipeline(): 'keyToEnv': [{ 'configMapKey': 'foo1', 'envVar': 'CM_VAR1' - }] + }], + 'optional': False }, { 'configMapName': 'cm-name2', @@ -573,7 +670,8 @@ def my_pipeline(): 'keyToEnv': [{ 'configMapKey': 'foo2', 'envVar': 'CM_VAR2' - }] + }], + 'optional': False }, ] } @@ -617,7 +715,8 @@ def my_pipeline(): 'keyToEnv': [{ 'configMapKey': 'foo', 'envVar': 'CM_VAR' - }] + }], + 'optional': False }], 'configMapAsVolume': [{ 'configMapName': 'cm-name2', @@ -681,7 +780,8 @@ def my_pipeline(): 'configMapKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }] } } @@ -719,7 +819,8 @@ def my_pipeline(cm_name_input_1: str): 'configMapKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }] } } @@ -772,7 +873,8 @@ def my_pipeline(cm_name_input_1: str, cm_name_input_2: str): 'configMapKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }, { 'configMapNameParameter': { @@ -783,7 +885,8 @@ def my_pipeline(cm_name_input_1: str, cm_name_input_2: str): 'configMapKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }, ] }, @@ -798,7 +901,8 @@ def my_pipeline(cm_name_input_1: str, cm_name_input_2: str): 'configMapKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }, ] } @@ -840,7 +944,8 @@ def my_pipeline(): 'configMapKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }] } } @@ -900,7 +1005,8 @@ def my_pipeline(): 'configMapKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }, { 'configMapNameParameter': { @@ -914,7 +1020,8 @@ def my_pipeline(): 'configMapKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False } ] }, @@ -932,7 +1039,8 @@ def my_pipeline(): 'configMapKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False } ] } diff --git a/kubernetes_platform/python/test/unit/test_node_affinity.py b/kubernetes_platform/python/test/unit/test_node_affinity.py new file mode 100644 index 00000000000..9a5d2577790 --- /dev/null +++ b/kubernetes_platform/python/test/unit/test_node_affinity.py @@ -0,0 +1,564 @@ +# Copyright 2025 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import json_format +from kfp import dsl +from kfp import kubernetes + + +class TestNodeAffinity: + + def test_add_match_expressions(self): + """Test adding node affinity with matchExpressions (required scheduling).""" + @dsl.pipeline + def my_pipeline(): + task = comp() + kubernetes.add_node_affinity( + task, + match_expressions=[ + { + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd', 'nvme'] + } + ] + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [{ + 'matchExpressions': [{ + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd', 'nvme'] + }] + }] + } + } + } + } + } + } + + def test_add_match_fields(self): + """Test adding node affinity with matchFields (required scheduling).""" + @dsl.pipeline + def my_pipeline(): + task = comp() + kubernetes.add_node_affinity( + task, + match_fields=[ + { + 'key': 'metadata.name', + 'operator': 'In', + 'values': ['node-1', 'node-2'] + } + ] + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [{ + 'matchFields': [{ + 'key': 'metadata.name', + 'operator': 'In', + 'values': ['node-1', 'node-2'] + }] + }] + } + } + } + } + } + } + + def test_add_preferred_scheduling_with_weight(self): + """Test adding node affinity with weight (preferred scheduling).""" + @dsl.pipeline + def my_pipeline(): + task = comp() + kubernetes.add_node_affinity( + task, + match_expressions=[ + { + 'key': 'zone', + 'operator': 'In', + 'values': ['us-west-1'] + } + ], + weight=100 + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [{ + 'matchExpressions': [{ + 'key': 'zone', + 'operator': 'In', + 'values': ['us-west-1'] + }], + 'weight': 100 + }] + } + } + } + } + } + } + + def test_add_combined_match_expressions_and_fields(self): + """Test adding node affinity with both matchExpressions and matchFields.""" + @dsl.pipeline + def my_pipeline(): + task = comp() + kubernetes.add_node_affinity( + task, + match_expressions=[ + { + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + }, + { + 'key': 'gpu', + 'operator': 'Exists' + } + ], + match_fields=[ + { + 'key': 'metadata.name', + 'operator': 'NotIn', + 'values': ['node-5', 'node-6'] + } + ] + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [{ + 'matchExpressions': [ + { + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + }, + { + 'key': 'gpu', + 'operator': 'Exists' + } + ], + 'matchFields': [{ + 'key': 'metadata.name', + 'operator': 'NotIn', + 'values': ['node-5', 'node-6'] + }] + }] + } + } + } + } + } + } + + def test_add_multiple_node_affinity_terms(self): + """Test adding multiple node affinity terms.""" + @dsl.pipeline + def my_pipeline(): + task = comp() + kubernetes.add_node_affinity( + task, + match_expressions=[ + { + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + } + ] + ) + kubernetes.add_node_affinity( + task, + match_fields=[ + { + 'key': 'metadata.name', + 'operator': 'In', + 'values': ['node-1'] + } + ] + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [ + { + 'matchExpressions': [{ + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + }] + }, + { + 'matchFields': [{ + 'key': 'metadata.name', + 'operator': 'In', + 'values': ['node-1'] + }] + } + ] + } + } + } + } + } + } + + def test_respects_other_configuration(self): + """Test that node affinity respects other Kubernetes configurations.""" + @dsl.pipeline + def my_pipeline(): + task = comp() + kubernetes.use_secret_as_volume( + task, secret_name='my-secret', mount_path='/mnt/my_vol') + kubernetes.add_node_affinity( + task, + match_expressions=[ + { + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + } + ] + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [{ + 'matchExpressions': [{ + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + }] + }], + 'secretAsVolume': [{ + 'secretName': 'my-secret', + 'secretNameParameter': {'runtimeValue': {'constant': 'my-secret'}}, + 'mountPath': '/mnt/my_vol', + 'optional': False + }] + } + } + } + } + } + } + + def test_invalid_match_expression_missing_key(self): + @dsl.pipeline + def my_pipeline(): + task = comp() + try: + kubernetes.add_node_affinity( + task, + match_expressions=[{'operator': 'In', 'values': ['ssd']}] + ) + except ValueError as e: + assert "non-empty 'key'" in str(e) + else: + assert False, 'Expected ValueError for missing key' + + def test_invalid_match_expression_missing_operator(self): + @dsl.pipeline + def my_pipeline(): + task = comp() + try: + kubernetes.add_node_affinity( + task, + match_expressions=[{'key': 'disktype', 'values': ['ssd']}] + ) + except ValueError as e: + assert "non-empty 'operator'" in str(e) + else: + assert False, 'Expected ValueError for missing operator' + + def test_invalid_match_expression_invalid_operator(self): + @dsl.pipeline + def my_pipeline(): + task = comp() + try: + kubernetes.add_node_affinity( + task, + match_expressions=[{'key': 'disktype', 'operator': 'INVALID', 'values': ['ssd']}] + ) + except ValueError as e: + assert "Invalid operator" in str(e) + else: + assert False, 'Expected ValueError for invalid operator' + + def test_invalid_weight_too_low(self): + @dsl.pipeline + def my_pipeline(): + task = comp() + try: + kubernetes.add_node_affinity( + task, + match_expressions=[{'key': 'disktype', 'operator': 'In', 'values': ['ssd']}], + weight=0 + ) + except ValueError as e: + assert "weight must be between 1 and 100" in str(e) + else: + assert False, 'Expected ValueError for weight < 1' + + def test_invalid_weight_too_high(self): + @dsl.pipeline + def my_pipeline(): + task = comp() + try: + kubernetes.add_node_affinity( + task, + match_expressions=[{'key': 'disktype', 'operator': 'In', 'values': ['ssd']}], + weight=101 + ) + except ValueError as e: + assert "weight must be between 1 and 100" in str(e) + else: + assert False, 'Expected ValueError for weight > 100' + + +class TestNodeAffinityJSON: + + def test_component_pipeline_input_required_scheduling(self): + """Test JSON-based node affinity with pipeline input for required scheduling.""" + @dsl.pipeline + def my_pipeline(affinity_input: dict): + task = comp() + kubernetes.add_node_affinity_json( + task, + node_affinity_json=affinity_input, + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [{ + 'nodeAffinityJson': { + 'componentInputParameter': 'affinity_input' + } + }] + } + } + } + } + } + } + + def test_component_pipeline_input_multiple_tasks(self): + """Test JSON-based node affinity with multiple tasks and pipeline inputs.""" + @dsl.pipeline + def my_pipeline(affinity_input_1: dict, affinity_input_2: dict): + t1 = comp() + kubernetes.add_node_affinity_json( + t1, + node_affinity_json=affinity_input_1, + ) + + t2 = comp() + kubernetes.add_node_affinity_json( + t2, + node_affinity_json=affinity_input_2, + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [{ + 'nodeAffinityJson': { + 'componentInputParameter': 'affinity_input_1' + } + }] + }, + 'exec-comp-2': { + 'nodeAffinity': [{ + 'nodeAffinityJson': { + 'componentInputParameter': 'affinity_input_2' + } + }] + } + } + } + } + } + } + + def test_component_upstream_input(self): + """Test JSON-based node affinity with upstream task input parameters.""" + @dsl.pipeline + def my_pipeline(): + upstream_task = comp_with_output() + task = comp() + kubernetes.add_node_affinity_json( + task, + node_affinity_json=upstream_task.output, + ) + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [{ + 'nodeAffinityJson': { + 'taskOutputParameter': { + 'producerTask': 'comp-with-output', + 'outputParameterKey': 'Output' + } + } + }] + } + } + } + } + } + } + + def test_overwrite_previous_json(self): + """Test that applying node affinity JSON multiple times overwrites the previous.""" + @dsl.pipeline + def my_pipeline(affinity_input_1: dict, affinity_input_2: dict): + task = comp() + kubernetes.add_node_affinity_json( + task, + node_affinity_json=affinity_input_1, + ) + # This should overwrite the previous JSON + kubernetes.add_node_affinity_json( + task, + node_affinity_json=affinity_input_2, + ) + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [{ + 'nodeAffinityJson': { + 'componentInputParameter': 'affinity_input_2' + } + }] + } + } + } + } + } + } + + def test_mixed_explicit_and_json(self): + """Test mixing explicit node affinity with JSON-based node affinity.""" + @dsl.pipeline + def my_pipeline(affinity_input: dict): + task = comp() + kubernetes.add_node_affinity( + task, + match_expressions=[ + { + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + } + ] + ) + kubernetes.add_node_affinity_json( + task, + node_affinity_json=affinity_input, + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'nodeAffinity': [ + { + 'matchExpressions': [{ + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + }] + }, + { + 'nodeAffinityJson': { + 'componentInputParameter': 'affinity_input' + } + } + ] + } + } + } + } + } + } + + def test_invalid_node_affinity_json(self): + @dsl.pipeline + def my_pipeline(): + task = comp() + # Missing required fields for V1NodeAffinity + invalid_json = {"foo": "bar"} + try: + kubernetes.add_node_affinity_json(task, node_affinity_json=invalid_json) + except ValueError as e: + assert "Invalid V1NodeAffinity JSON" in str(e) + else: + assert False, 'Expected ValueError for invalid node_affinity_json' + + +@dsl.component +def comp(): + pass + + +@dsl.component() +def comp_with_output() -> str: + return "test_output" diff --git a/kubernetes_platform/python/test/unit/test_secret.py b/kubernetes_platform/python/test/unit/test_secret.py index 9fd52b19e15..896cabf30ec 100644 --- a/kubernetes_platform/python/test/unit/test_secret.py +++ b/kubernetes_platform/python/test/unit/test_secret.py @@ -181,7 +181,8 @@ def my_pipeline(): 'keyToEnv': [{ 'secretKey': 'password', 'envVar': 'SECRET_VAR' - }] + }], + 'optional': False }], 'secretAsVolume': [{ 'secretName': 'secret-name2', @@ -477,7 +478,94 @@ def my_pipeline(): 'secretKey': 'password', 'envVar': 'PASSWORD' }, - ] + ], + 'optional': False + }] + } + } + } + } + } + } + + def test_use_one_optional_true(self): + + @dsl.pipeline + def my_pipeline(): + task = comp() + kubernetes.use_secret_as_env( + task, + secret_name='secret-name', + secret_key_to_env={ + 'username': 'USERNAME', + 'password': 'PASSWORD', + }, + optional=True, + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'secretAsEnv': [{ + 'secretName': 'secret-name', + 'secretNameParameter': {'runtimeValue': {'constant': 'secret-name'}}, + 'keyToEnv': [ + { + 'secretKey': 'username', + 'envVar': 'USERNAME' + }, + { + 'secretKey': 'password', + 'envVar': 'PASSWORD' + }, + ], + 'optional': True + }] + } + } + } + } + } + } + + def test_use_one_optional_false(self): + + @dsl.pipeline + def my_pipeline(): + task = comp() + kubernetes.use_secret_as_env( + task, + secret_name='secret-name', + secret_key_to_env={ + 'username': 'USERNAME', + 'password': 'PASSWORD', + }, + optional=False, + ) + + assert json_format.MessageToDict(my_pipeline.platform_spec) == { + 'platforms': { + 'kubernetes': { + 'deploymentSpec': { + 'executors': { + 'exec-comp': { + 'secretAsEnv': [{ + 'secretName': 'secret-name', + 'secretNameParameter': {'runtimeValue': {'constant': 'secret-name'}}, + 'keyToEnv': [ + { + 'secretKey': 'username', + 'envVar': 'USERNAME' + }, + { + 'secretKey': 'password', + 'envVar': 'PASSWORD' + }, + ], + 'optional': False }] } } @@ -515,7 +603,8 @@ def my_pipeline(): 'keyToEnv': [{ 'secretKey': 'password1', 'envVar': 'SECRET_VAR1' - }] + }], + 'optional': False }, { 'secretName': 'secret-name2', @@ -523,7 +612,8 @@ def my_pipeline(): 'keyToEnv': [{ 'secretKey': 'password2', 'envVar': 'SECRET_VAR2' - }] + }], + 'optional': False }, ] } @@ -562,7 +652,8 @@ def my_pipeline(): 'keyToEnv': [{ 'secretKey': 'password', 'envVar': 'SECRET_VAR' - }] + }], + 'optional': False }], 'secretAsVolume': [{ 'secretName': 'secret-name2', @@ -610,7 +701,8 @@ def my_pipeline(): 'keyToEnv': [{ 'secretKey': 'password', 'envVar': 'SECRET_VAR' - }] + }], + 'optional': False }] } } @@ -648,7 +740,8 @@ def my_pipeline(secret_name_input1: str): 'secretKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }] } } @@ -701,7 +794,8 @@ def my_pipeline(secret_name_input1: str, secret_name_input2: str): 'secretKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }, { 'secretNameParameter': { @@ -712,7 +806,8 @@ def my_pipeline(secret_name_input1: str, secret_name_input2: str): 'secretKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }, ] }, @@ -727,7 +822,8 @@ def my_pipeline(secret_name_input1: str, secret_name_input2: str): 'secretKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }, ] } @@ -769,7 +865,8 @@ def my_pipeline(): 'secretKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }] } } @@ -829,7 +926,8 @@ def my_pipeline(): 'secretKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False }, { 'secretNameParameter': { @@ -843,7 +941,8 @@ def my_pipeline(): 'secretKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False } ] }, @@ -861,7 +960,8 @@ def my_pipeline(): 'secretKey': 'foo', 'envVar': 'CM_VAR' }, - ] + ], + 'optional': False } ] } diff --git a/kubernetes_platform/python/test/unit/test_volume.py b/kubernetes_platform/python/test/unit/test_volume.py index 4fe8dc23004..5a9403b5fde 100644 --- a/kubernetes_platform/python/test/unit/test_volume.py +++ b/kubernetes_platform/python/test/unit/test_volume.py @@ -123,7 +123,8 @@ def my_pipeline(): 'keyToEnv': [{ 'secretKey': 'password', 'envVar': 'SECRET_VAR' - }] + }], + 'optional': False }] } } diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/application.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/application.yaml index f1e18c4270a..ad1ed082b4b 100644 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/application.yaml +++ b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/application.yaml @@ -12,7 +12,7 @@ metadata: spec: descriptor: type: Kubeflow Pipelines - version: 2.5.0 + version: 2.14.3 description: |- Reusable end-to-end ML workflow maintainers: diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml index 104f7b283af..84c5e9e71f6 100644 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml +++ b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml @@ -828,9 +828,9 @@ subjects: apiVersion: v1 data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.5.14/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.5.14/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.5.14/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.6.7/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.6.7/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.6.7/docs/workflow-controller-configmap.yaml # Choice of an executor is deprecated in favor of emissary executor executor: | diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/cache.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/cache.yaml index b27de0c4d72..899ea78f4bb 100644 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/cache.yaml +++ b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/cache.yaml @@ -190,7 +190,7 @@ data: mysql_driver: "mysql" mysql_host: "mysql" mysql_port: "3306" - cache_image: "registry.k8s.io/busybox" + cache_image: "ghcr.io/containerd/busybox" cache_node_restrictions: "false" --- apiVersion: apps/v1 diff --git a/manifests/gcp_marketplace/schema.yaml b/manifests/gcp_marketplace/schema.yaml index bd519632abc..c797bcd39c4 100644 --- a/manifests/gcp_marketplace/schema.yaml +++ b/manifests/gcp_marketplace/schema.yaml @@ -1,9 +1,9 @@ x-google-marketplace: schemaVersion: v2 applicationApiVersion: v1beta1 - publishedVersion: 2.5.0 + publishedVersion: 2.14.3 publishedVersionMetadata: - releaseNote: Based on 2.5.0 version. + releaseNote: Based on 2.14.3 version. releaseTypes: - Feature recommended: false diff --git a/manifests/kustomize/README.md b/manifests/kustomize/README.md index d0b7bfd1825..63cde27ddf2 100644 --- a/manifests/kustomize/README.md +++ b/manifests/kustomize/README.md @@ -1,6 +1,6 @@ # Install Kubeflow Pipelines Standalone using Kustomize Manifests -This folder contains [Kubeflow Pipelines Standalone](https://www.kubeflow.org/docs/components/pipelines/installation/standalone-deployment/) +This folder contains [Kubeflow Pipelines Standalone](https://www.kubeflow.org/docs/components/pipelines/installation/standalone-deployment/) Kustomize manifests. Kubeflow Pipelines Standalone is one option to install Kubeflow Pipelines. You can review all other options in @@ -40,20 +40,6 @@ Data: Application data are persisted in in-cluster PersistentVolumeClaim storage. -### (env/gcp) install on Google Cloud with Cloud Storage and Cloud SQL - -Cloud Storage and Cloud SQL are better for operating a production cluster. - -Refer to [Google Cloud Instructions](sample/README.md) for installation. - -### (env/aws) install on AWS with S3 and RDS MySQL - -S3 and RDS MySQL are better for operating a production cluster. - -Refer to [AWS Instructions](env/aws/README.md) for installation. - -Note: Community maintains a different opinionated installation manifests for AWS, refer to [e2fyi/kubeflow-aws](https://github.com/e2fyi/kubeflow-aws/tree/master/pipelines). - ## Uninstall If the installation is based on CloudSQL/GCS, after the uninstall, the data is still there, @@ -66,8 +52,6 @@ kubectl kustomize env/platform-agnostic | kubectl delete -f - # or kubectl kustomize env/dev | kubectl delete -f - # or -kubectl kustomize env/gcp | kubectl delete -f - -# or kubectl delete applications/pipeline -n kubeflow ### 2. cluster scoped diff --git a/manifests/kustomize/base/cache-deployer/kustomization.yaml b/manifests/kustomize/base/cache-deployer/kustomization.yaml index ca9e0e0d791..ec900058334 100644 --- a/manifests/kustomize/base/cache-deployer/kustomization.yaml +++ b/manifests/kustomize/base/cache-deployer/kustomization.yaml @@ -6,7 +6,7 @@ resources: - cache-deployer-deployment.yaml images: - name: ghcr.io/kubeflow/kfp-cache-deployer - newTag: 2.5.0 + newTag: 2.14.3 labels: - includeSelectors: true pairs: diff --git a/manifests/kustomize/base/cache/kustomization.yaml b/manifests/kustomize/base/cache/kustomization.yaml index ebcb7433ab8..675a3ad1d20 100644 --- a/manifests/kustomize/base/cache/kustomization.yaml +++ b/manifests/kustomize/base/cache/kustomization.yaml @@ -8,7 +8,7 @@ resources: - cache-service.yaml images: - name: ghcr.io/kubeflow/kfp-cache-server - newTag: 2.5.0 + newTag: 2.14.3 labels: - includeSelectors: true pairs: diff --git a/manifests/kustomize/base/installs/generic/pipeline-install-config.yaml b/manifests/kustomize/base/installs/generic/pipeline-install-config.yaml index 2593a12c82a..12bb9279ecd 100644 --- a/manifests/kustomize/base/installs/generic/pipeline-install-config.yaml +++ b/manifests/kustomize/base/installs/generic/pipeline-install-config.yaml @@ -11,7 +11,7 @@ data: until the changes take effect. A quick way to restart all deployments in a namespace: `kubectl rollout restart deployment -n `. appName: pipeline - appVersion: 2.5.0 + appVersion: 2.14.3 dbHost: mysql # relic to be removed after release dbPort: "3306" # relic to be removed after release dbType: mysql @@ -63,8 +63,8 @@ data: ## cacheImage is the image that the mutating webhook will use to patch ## cached steps with. Will be used to echo a message announcing that ## the cached step result will be used. If not set it will default to - ## 'registry.k8s.io/busybox' - cacheImage: "registry.k8s.io/busybox" + ## 'ghcr.io/containerd/busybox' + cacheImage: "ghcr.io/containerd/busybox" ## cacheNodeRestrictions the dummy container runing if output is cached ## will run with the same affinity and node selector as the default pipeline ## step. This is defaulted to 'false' to allow the pod to be scheduled on @@ -94,3 +94,11 @@ data: ## to be nanoseconds. ConMaxLifeTime: "120s" LOG_LEVEL: "info" + ## ARTIFACTS_PROXY_ENABLED: Controls whether the artifact proxy is enabled + ## to support accessing out-of-kubeflow scope buckets. The artifact proxy + ## has known security and architectural flaws, so it should only be enabled + ## if you specifically need to access unique pipeline root buckets per + ## namespace that are outside the kubeflow namespace's access. + ## Valid values are 'true' and 'false'. Defaults to 'false'. + ## For more information see: https://github.com/kubeflow/pipelines/issues/11987 + ARTIFACTS_PROXY_ENABLED: "false" diff --git a/manifests/kustomize/base/installs/generic/postgres/pipeline-install-config.yaml b/manifests/kustomize/base/installs/generic/postgres/pipeline-install-config.yaml index fbfa8da41a1..65da0bdc809 100644 --- a/manifests/kustomize/base/installs/generic/postgres/pipeline-install-config.yaml +++ b/manifests/kustomize/base/installs/generic/postgres/pipeline-install-config.yaml @@ -63,8 +63,8 @@ data: ## cacheImage is the image that the mutating webhook will use to patch ## cached steps with. Will be used to echo a message announcing that ## the cached step result will be used. If not set it will default to - ## 'registry.k8s.io/busybox' - cacheImage: "registry.k8s.io/busybox" + ## 'ghcr.io/containerd/busybox' + cacheImage: "ghcr.io/containerd/busybox" ## cacheNodeRestrictions the dummy container runing if output is cached ## will run with the same affinity and node selector as the default pipeline ## step. This is defaulted to 'false' to allow the pod to be scheduled on @@ -93,3 +93,12 @@ data: ## If this value doesn't include a unit abbreviation, the units will be assumed ## to be nanoseconds. ConMaxLifeTime: "120s" + LOG_LEVEL: "info" + ## ARTIFACTS_PROXY_ENABLED: Controls whether the artifact proxy is enabled + ## to support accessing out-of-kubeflow scope buckets. The artifact proxy + ## has known security and architectural flaws, so it should only be enabled + ## if you specifically need to access unique pipeline root buckets per + ## namespace that are outside the kubeflow namespace's access. + ## Valid values are 'true' and 'false'. Defaults to 'false'. + ## For more information see: https://github.com/kubeflow/pipelines/issues/11987 + ARTIFACTS_PROXY_ENABLED: "false" diff --git a/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/deployment.yaml b/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/deployment.yaml index faa73ccb336..4b7c2a79a5e 100644 --- a/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/deployment.yaml +++ b/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/deployment.yaml @@ -9,19 +9,21 @@ spec: labels: sidecar.istio.io/inject: "false" spec: + securityContext: + seccompProfile: + type: RuntimeDefault containers: - name: profile-controller securityContext: allowPrivilegeEscalation: false - seccompProfile: - type: RuntimeDefault runAsNonRoot: true runAsUser: 1000 runAsGroup: 0 capabilities: drop: - ALL - image: public.ecr.aws/docker/library/python:3.12 + # We just need an image with the python botocore library installed + image: docker.io/alpine/k8s:1.32.3 command: ["python", "/hooks/sync.py"] envFrom: - configMapRef: @@ -32,22 +34,32 @@ spec: configMapKeyRef: name: pipeline-install-config key: appVersion - - name: KFP_DEFAULT_PIPELINE_ROOT - valueFrom: - configMapKeyRef: - optional: true - name: pipeline-install-config - key: defaultPipelineRoot - - name: MINIO_ACCESS_KEY + - name: AWS_ENDPOINT_URL + value: http://seaweedfs:8111 + - name: AWS_REGION + value: us-east-1 + - name: AWS_ACCESS_KEY_ID valueFrom: secretKeyRef: - name: mlpipeline-minio-artifact key: accesskey - - name: MINIO_SECRET_KEY + name: mlpipeline-minio-artifact + - name: AWS_SECRET_ACCESS_KEY valueFrom: secretKeyRef: - name: mlpipeline-minio-artifact key: secretkey + name: mlpipeline-minio-artifact + - name: KFP_DEFAULT_PIPELINE_ROOT + valueFrom: + configMapKeyRef: + key: defaultPipelineRoot + name: pipeline-install-config + optional: true + - name: ARTIFACTS_PROXY_ENABLED + valueFrom: + configMapKeyRef: + key: ARTIFACTS_PROXY_ENABLED + name: pipeline-install-config + optional: true volumeMounts: - name: hooks mountPath: /hooks diff --git a/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/sync.py b/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/sync.py index 3d39e3187f9..554554a0254 100644 --- a/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/sync.py +++ b/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/sync.py @@ -17,6 +17,17 @@ import os import base64 +# From awscli installed in alpine/k8s image +import botocore.session + +S3_BUCKET_NAME = 'mlpipeline' + +session = botocore.session.get_session() +# To interact with seaweedfs user management. Region does not matter. +iam = session.create_client('iam', region_name='foobar') +# S3 client for lifecycle policy management +s3 = session.create_client('s3', region_name='foobar') + def main(): settings = get_settings_from_env() @@ -26,8 +37,7 @@ def main(): def get_settings_from_env(controller_port=None, visualization_server_image=None, frontend_image=None, - visualization_server_tag=None, frontend_tag=None, disable_istio_sidecar=None, - minio_access_key=None, minio_secret_key=None, kfp_default_pipeline_root=None): + visualization_server_tag=None, frontend_tag=None, disable_istio_sidecar=None): """ Returns a dict of settings from environment variables relevant to the controller @@ -74,62 +84,53 @@ def get_settings_from_env(controller_port=None, disable_istio_sidecar if disable_istio_sidecar is not None \ else os.environ.get("DISABLE_ISTIO_SIDECAR") == "true" - settings["minio_access_key"] = \ - minio_access_key or \ - base64.b64encode(bytes(os.environ.get("MINIO_ACCESS_KEY"), 'utf-8')).decode('utf-8') - - settings["minio_secret_key"] = \ - minio_secret_key or \ - base64.b64encode(bytes(os.environ.get("MINIO_SECRET_KEY"), 'utf-8')).decode('utf-8') - - # KFP_DEFAULT_PIPELINE_ROOT is optional - settings["kfp_default_pipeline_root"] = \ - kfp_default_pipeline_root or \ - os.environ.get("KFP_DEFAULT_PIPELINE_ROOT") - return settings def server_factory(visualization_server_image, visualization_server_tag, frontend_image, frontend_tag, - disable_istio_sidecar, minio_access_key, - minio_secret_key, kfp_default_pipeline_root=None, - url="", controller_port=8080): + disable_istio_sidecar, url="", controller_port=8080): """ Returns an HTTPServer populated with Handler with customized settings """ class Controller(BaseHTTPRequestHandler): + def upsert_lifecycle_policy(self, bucket_name): + """Configure TTL lifecycle policy for SeaweedFS using S3 API""" + lfc = { + "Rules": [ + { + "Status": "Enabled", + "Filter": {"Prefix": "private-artifacts/"}, + "Expiration": {"Days": 183}, + "ID": "private-artifacts", + }, + ] + } + print('upsert_lifecycle_policy:', lfc) + try: + api_response = s3.put_bucket_lifecycle_configuration( + Bucket=bucket_name, + LifecycleConfiguration=lfc + ) + print('Lifecycle policy configured successfully:', api_response) + except Exception as e: + print(f'Warning: Failed to configure lifecycle policy: {e}') + def sync(self, parent, attachments): # parent is a namespace namespace = parent.get("metadata", {}).get("name") + pipeline_enabled = parent.get("metadata", {}).get( "labels", {}).get("pipelines.kubeflow.org/enabled") if pipeline_enabled != "true": return {"status": {}, "attachments": []} - desired_configmap_count = 1 - desired_resources = [] - if kfp_default_pipeline_root: - desired_configmap_count = 2 - desired_resources += [{ - "apiVersion": "v1", - "kind": "ConfigMap", - "metadata": { - "name": "kfp-launcher", - "namespace": namespace, - }, - "data": { - "defaultPipelineRoot": kfp_default_pipeline_root, - }, - }] - - # Compute status based on observed state. desired_status = { "kubeflow-pipelines-ready": len(attachments["Secret.v1"]) == 1 and - len(attachments["ConfigMap.v1"]) == desired_configmap_count and + len(attachments["ConfigMap.v1"]) == 3 and len(attachments["Deployment.apps/v1"]) == 2 and len(attachments["Service.v1"]) == 2 and len(attachments["DestinationRule.networking.istio.io/v1alpha3"]) == 1 and @@ -138,242 +139,116 @@ def sync(self, parent, attachments): } # Generate the desired attachment object(s). - desired_resources += [ + desired_resources = [ { "apiVersion": "v1", "kind": "ConfigMap", "metadata": { - "name": "metadata-grpc-configmap", + "name": "kfp-launcher", "namespace": namespace, }, "data": { - "METADATA_GRPC_SERVICE_HOST": - "metadata-grpc-service.kubeflow", - "METADATA_GRPC_SERVICE_PORT": "8080", + "defaultPipelineRoot": f"minio://{S3_BUCKET_NAME}/private-artifacts/{namespace}/v2/artifacts", }, }, - # Visualization server related manifests below - { - "apiVersion": "apps/v1", - "kind": "Deployment", - "metadata": { - "labels": { - "app": "ml-pipeline-visualizationserver" - }, - "name": "ml-pipeline-visualizationserver", - "namespace": namespace, - }, - "spec": { - "selector": { - "matchLabels": { - "app": "ml-pipeline-visualizationserver" - }, - }, - "template": { - "metadata": { - "labels": { - "app": "ml-pipeline-visualizationserver" - }, - "annotations": disable_istio_sidecar and { - "sidecar.istio.io/inject": "false" - } or {}, - }, - "spec": { - "containers": [{ - "image": f"{visualization_server_image}:{visualization_server_tag}", - "imagePullPolicy": - "IfNotPresent", - "name": - "ml-pipeline-visualizationserver", - "ports": [{ - "containerPort": 8888 - }], - "resources": { - "requests": { - "cpu": "50m", - "memory": "200Mi" - }, - "limits": { - "cpu": "500m", - "memory": "1Gi" - }, - } - }], - "serviceAccountName": - "default-editor", - }, - }, - }, - }, - { - "apiVersion": "networking.istio.io/v1alpha3", - "kind": "DestinationRule", - "metadata": { - "name": "ml-pipeline-visualizationserver", - "namespace": namespace, - }, - "spec": { - "host": "ml-pipeline-visualizationserver", - "trafficPolicy": { - "tls": { - "mode": "ISTIO_MUTUAL" - } - } - } - }, - { - "apiVersion": "security.istio.io/v1beta1", - "kind": "AuthorizationPolicy", - "metadata": { - "name": "ml-pipeline-visualizationserver", - "namespace": namespace, - }, - "spec": { - "selector": { - "matchLabels": { - "app": "ml-pipeline-visualizationserver" - } - }, - "rules": [{ - "from": [{ - "source": { - "principals": ["cluster.local/ns/kubeflow/sa/ml-pipeline"] - } - }] - }] - } - }, { "apiVersion": "v1", - "kind": "Service", + "kind": "ConfigMap", "metadata": { - "name": "ml-pipeline-visualizationserver", + "name": "metadata-grpc-configmap", "namespace": namespace, }, - "spec": { - "ports": [{ - "name": "http", - "port": 8888, - "protocol": "TCP", - "targetPort": 8888, - }], - "selector": { - "app": "ml-pipeline-visualizationserver", - }, - }, - }, - # Artifact fetcher related resources below. - { - "apiVersion": "apps/v1", - "kind": "Deployment", - "metadata": { - "labels": { - "app": "ml-pipeline-ui-artifact" - }, - "name": "ml-pipeline-ui-artifact", - "namespace": namespace, + "data": { + "METADATA_GRPC_SERVICE_HOST": + "metadata-grpc-service.kubeflow", + "METADATA_GRPC_SERVICE_PORT": "8080", }, - "spec": { - "selector": { - "matchLabels": { - "app": "ml-pipeline-ui-artifact" - } - }, - "template": { - "metadata": { - "labels": { - "app": "ml-pipeline-ui-artifact" - }, - "annotations": disable_istio_sidecar and { - "sidecar.istio.io/inject": "false" - } or {}, - }, - "spec": { - "containers": [{ - "name": - "ml-pipeline-ui-artifact", - "image": f"{frontend_image}:{frontend_tag}", - "imagePullPolicy": - "IfNotPresent", - "ports": [{ - "containerPort": 3000 - }], - "env": [ - { - "name": "MINIO_ACCESS_KEY", - "valueFrom": { - "secretKeyRef": { - "key": "accesskey", - "name": "mlpipeline-minio-artifact" - } - } - }, - { - "name": "MINIO_SECRET_KEY", - "valueFrom": { - "secretKeyRef": { - "key": "secretkey", - "name": "mlpipeline-minio-artifact" - } - } - } - ], - "resources": { - "requests": { - "cpu": "10m", - "memory": "70Mi" - }, - "limits": { - "cpu": "100m", - "memory": "500Mi" - }, - } - }], - "serviceAccountName": - "default-editor" - } - } - } }, { "apiVersion": "v1", - "kind": "Service", + "kind": "ConfigMap", "metadata": { - "name": "ml-pipeline-ui-artifact", + "name": "artifact-repositories", "namespace": namespace, - "labels": { - "app": "ml-pipeline-ui-artifact" + "annotations": { + "workflows.argoproj.io/default-artifact-repository": "default-namespaced" } }, - "spec": { - "ports": [{ - "name": - "http", # name is required to let istio understand request protocol - "port": 80, - "protocol": "TCP", - "targetPort": 3000 - }], - "selector": { - "app": "ml-pipeline-ui-artifact" - } + "data": { + "default-namespaced": json.dumps({ + "archiveLogs": True, + "s3": { + "endpoint": "minio-service.kubeflow:9000", + "bucket": S3_BUCKET_NAME, + "keyFormat": f"private-artifacts/{namespace}/{{{{workflow.name}}}}/{{{{workflow.creationTimestamp.Y}}}}/{{{{workflow.creationTimestamp.m}}}}/{{{{workflow.creationTimestamp.d}}}}/{{{{pod.name}}}}", + "insecure": True, + "accessKeySecret": { + "name": "mlpipeline-minio-artifact", + "key": "accesskey", + }, + "secretKeySecret": { + "name": "mlpipeline-minio-artifact", + "key": "secretkey", + } + } + }) } }, ] print('Received request:\n', json.dumps(parent, sort_keys=True)) print('Desired resources except secrets:\n', json.dumps(desired_resources, sort_keys=True)) + # Moved after the print argument because this is sensitive data. - desired_resources.append({ - "apiVersion": "v1", - "kind": "Secret", - "metadata": { - "name": "mlpipeline-minio-artifact", - "namespace": namespace, - }, - "data": { - "accesskey": minio_access_key, - "secretkey": minio_secret_key, - }, - }) + + # Check if secret is already there when the controller made the request. If yes, then + # use it. Else create a new credentials on seaweedfs for the namespace. + if s3_secret := attachments["Secret.v1"].get(f"{namespace}/mlpipeline-minio-artifact"): + desired_resources.append(s3_secret) + print('Using existing secret') + else: + print('Creating new access key.') + s3_access_key = iam.create_access_key(UserName=namespace) + # Use the AWS IAM API of seaweedfs to manage access policies to bucket. + # This policy ensures that a user can only access artifacts from his own profile. + iam.put_user_policy( + UserName=namespace, + PolicyName=f"KubeflowProject{namespace}", + PolicyDocument=json.dumps( + { + "Version": "2012-10-17", + "Statement": [{ + "Effect": "Allow", + "Action": [ + "s3:Put*", + "s3:Get*", + "s3:List*" + ], + "Resource": [ + f"arn:aws:s3:::{S3_BUCKET_NAME}/artifacts/*", + f"arn:aws:s3:::{S3_BUCKET_NAME}/private-artifacts/{namespace}/*", + f"arn:aws:s3:::{S3_BUCKET_NAME}/private/{namespace}/*", + f"arn:aws:s3:::{S3_BUCKET_NAME}/shared/*", + ] + }] + }) + ) + + self.upsert_lifecycle_policy(S3_BUCKET_NAME) + + desired_resources.insert( + 0, + { + "apiVersion": "v1", + "kind": "Secret", + "metadata": { + "name": "mlpipeline-minio-artifact", + "namespace": namespace, + }, + "data": { + "accesskey": base64.b64encode(s3_access_key["AccessKey"]["AccessKeyId"].encode('utf-8')).decode("utf-8"), + "secretkey": base64.b64encode(s3_access_key["AccessKey"]["SecretAccessKey"].encode('utf-8')).decode("utf-8"), + }, + }) return {"status": desired_status, "attachments": desired_resources} @@ -392,4 +267,4 @@ def do_POST(self): if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/manifests/kustomize/base/installs/multi-user/pipelines-ui/deployment-patch.yaml b/manifests/kustomize/base/installs/multi-user/pipelines-ui/deployment-patch.yaml index 0403cc36274..cfea4408040 100644 --- a/manifests/kustomize/base/installs/multi-user/pipelines-ui/deployment-patch.yaml +++ b/manifests/kustomize/base/installs/multi-user/pipelines-ui/deployment-patch.yaml @@ -21,7 +21,11 @@ spec: - name: ARTIFACTS_SERVICE_PROXY_PORT value: '80' - name: ARTIFACTS_SERVICE_PROXY_ENABLED - value: 'true' + valueFrom: + configMapKeyRef: + name: pipeline-install-config + key: ARTIFACTS_PROXY_ENABLED + optional: true - name: ENABLE_AUTHZ value: 'true' - name: KUBEFLOW_USERID_HEADER diff --git a/manifests/kustomize/base/installs/multi-user/view-edit-cluster-roles.yaml b/manifests/kustomize/base/installs/multi-user/view-edit-cluster-roles.yaml index ed2e2ba0e44..8c86b7971ae 100644 --- a/manifests/kustomize/base/installs/multi-user/view-edit-cluster-roles.yaml +++ b/manifests/kustomize/base/installs/multi-user/view-edit-cluster-roles.yaml @@ -44,7 +44,7 @@ rules: - pipelines.kubeflow.org resources: - pipelines - - pipelines/versions + - pipelineversions verbs: - create - delete @@ -112,7 +112,7 @@ rules: - pipelines.kubeflow.org resources: - pipelines - - pipelines/versions + - pipelineversions - experiments - jobs verbs: diff --git a/manifests/kustomize/base/metadata/base/kustomization.yaml b/manifests/kustomize/base/metadata/base/kustomization.yaml index 13633113803..24542036b39 100644 --- a/manifests/kustomize/base/metadata/base/kustomization.yaml +++ b/manifests/kustomize/base/metadata/base/kustomization.yaml @@ -10,4 +10,4 @@ resources: - metadata-grpc-sa.yaml images: - name: ghcr.io/kubeflow/kfp-metadata-envoy - newTag: 2.5.0 + newTag: 2.14.3 diff --git a/manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml b/manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml index a2f419ab648..d367b97dd7b 100644 --- a/manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml +++ b/manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml @@ -69,7 +69,8 @@ spec: "--mysql_config_port=$(MYSQL_PORT)", "--mysql_config_user=$(DBCONFIG_USER)", "--mysql_config_password=$(DBCONFIG_PASSWORD)", - "--enable_database_upgrade=true" + "--enable_database_upgrade=true", + "--grpc_channel_arguments=grpc.max_metadata_size=16384" ] ports: - name: grpc-api diff --git a/manifests/kustomize/base/pipeline/allow-same-namespace-networkpolicy.yaml b/manifests/kustomize/base/pipeline/allow-same-namespace-networkpolicy.yaml new file mode 100644 index 00000000000..91ba34c619c --- /dev/null +++ b/manifests/kustomize/base/pipeline/allow-same-namespace-networkpolicy.yaml @@ -0,0 +1,12 @@ +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: default-allow-same-namespace + namespace: kubeflow +spec: + podSelector: {} + ingress: + - from: + - podSelector: {} + policyTypes: + - Ingress diff --git a/manifests/kustomize/base/pipeline/kustomization.yaml b/manifests/kustomize/base/pipeline/kustomization.yaml index 775e323c655..e17b443a0d5 100644 --- a/manifests/kustomize/base/pipeline/kustomization.yaml +++ b/manifests/kustomize/base/pipeline/kustomization.yaml @@ -34,16 +34,17 @@ resources: - container-builder-sa.yaml - viewer-sa.yaml - kfp-launcher-configmap.yaml + - allow-same-namespace-networkpolicy.yaml images: - name: ghcr.io/kubeflow/kfp-api-server - newTag: 2.5.0 + newTag: 2.14.3 - name: ghcr.io/kubeflow/kfp-persistence-agent - newTag: 2.5.0 + newTag: 2.14.3 - name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller - newTag: 2.5.0 + newTag: 2.14.3 - name: ghcr.io/kubeflow/kfp-frontend - newTag: 2.5.0 + newTag: 2.14.3 - name: ghcr.io/kubeflow/kfp-viewer-crd-controller - newTag: 2.5.0 + newTag: 2.14.3 - name: ghcr.io/kubeflow/kfp-visualization-server - newTag: 2.5.0 + newTag: 2.14.3 diff --git a/manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml b/manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml index 679a95a61be..18f17463ec0 100644 --- a/manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml +++ b/manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml @@ -7,4 +7,4 @@ resources: - metadata-writer-sa.yaml images: - name: ghcr.io/kubeflow/kfp-metadata-writer - newTag: 2.5.0 + newTag: 2.14.3 diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-apiserver-deployment.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-apiserver-deployment.yaml index 24b5905b2ae..cd9f3dd2962 100644 --- a/manifests/kustomize/base/pipeline/ml-pipeline-apiserver-deployment.yaml +++ b/manifests/kustomize/base/pipeline/ml-pipeline-apiserver-deployment.yaml @@ -44,6 +44,10 @@ spec: configMapKeyRef: name: pipeline-install-config key: bucketName + - name: OBJECTSTORECONFIG_HOST + value: "minio-service.kubeflow" + - name: OBJECTSTORECONFIG_PORT + value: "9000" # relic variables - name: DBCONFIG_USER valueFrom: @@ -119,9 +123,9 @@ spec: name: mlpipeline-minio-artifact key: secretkey - name: V2_DRIVER_IMAGE - value: ghcr.io/kubeflow/kfp-driver:2.5.0 + value: ghcr.io/kubeflow/kfp-driver:2.14.3 - name: V2_LAUNCHER_IMAGE - value: ghcr.io/kubeflow/kfp-launcher:2.5.0 + value: ghcr.io/kubeflow/kfp-launcher:2.14.3 image: ghcr.io/kubeflow/kfp-api-server:dummy imagePullPolicy: IfNotPresent name: ml-pipeline-api-server @@ -131,26 +135,16 @@ spec: - name: grpc containerPort: 8887 readinessProbe: - exec: - command: - - wget - - -q # quiet - - -S # show server response - - -O - - "-" # Redirect output to stdout - - http://localhost:8888/apis/v1beta1/healthz + httpGet: + path: /apis/v1beta1/healthz + port: 8888 initialDelaySeconds: 3 periodSeconds: 5 timeoutSeconds: 2 livenessProbe: - exec: - command: - - wget - - -q # quiet - - -S # show server response - - -O - - "-" # Redirect output to stdout - - http://localhost:8888/apis/v1beta1/healthz + httpGet: + path: /apis/v1beta1/healthz + port: 8888 initialDelaySeconds: 3 periodSeconds: 5 timeoutSeconds: 2 @@ -158,14 +152,9 @@ spec: # liveness probe takes over to accomodate the occasional database # migration. startupProbe: - exec: - command: - - wget - - -q # quiet - - -S # show server response - - -O - - "-" # Redirect output to stdout - - http://localhost:8888/apis/v1beta1/healthz + httpGet: + path: /apis/v1beta1/healthz + port: 8888 failureThreshold: 12 periodSeconds: 5 timeoutSeconds: 2 diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-ui-deployment.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-ui-deployment.yaml index c11685ea5d8..4dc2f7fe9fc 100644 --- a/manifests/kustomize/base/pipeline/ml-pipeline-ui-deployment.yaml +++ b/manifests/kustomize/base/pipeline/ml-pipeline-ui-deployment.yaml @@ -67,27 +67,23 @@ spec: value: "true" - name: DISABLE_GKE_METADATA value: "true" + - name: ARTIFACTS_SERVICE_PROXY_ENABLED + valueFrom: + configMapKeyRef: + name: pipeline-install-config + key: ARTIFACTS_PROXY_ENABLED + optional: true readinessProbe: - exec: - command: - - wget - - -q # quiet - - -S # show server response - - -O - - "-" # Redirect output to stdout - - http://localhost:3000/apis/v1beta1/healthz + httpGet: + path: /apis/v1beta1/healthz + port: 3000 initialDelaySeconds: 3 periodSeconds: 5 timeoutSeconds: 2 livenessProbe: - exec: - command: - - wget - - -q # quiet - - -S # show server response - - -O - - "-" # Redirect output to stdout - - http://localhost:3000/apis/v1beta1/healthz + httpGet: + path: /apis/v1beta1/healthz + port: 3000 initialDelaySeconds: 3 periodSeconds: 5 timeoutSeconds: 2 diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-visualization-deployment.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-visualization-deployment.yaml index e3773c4cff9..9c31fc4737a 100644 --- a/manifests/kustomize/base/pipeline/ml-pipeline-visualization-deployment.yaml +++ b/manifests/kustomize/base/pipeline/ml-pipeline-visualization-deployment.yaml @@ -26,26 +26,16 @@ spec: - name: http containerPort: 8888 readinessProbe: - exec: - command: - - wget - - -q # quiet - - -S # show server response - - -O - - "-" # Redirect output to stdout - - http://localhost:8888/ + httpGet: + path: / + port: 8888 initialDelaySeconds: 3 periodSeconds: 5 timeoutSeconds: 2 livenessProbe: - exec: - command: - - wget - - -q # quiet - - -S # show server response - - -O - - "-" # Redirect output to stdout - - http://localhost:8888/ + httpGet: + path: / + port: 8888 initialDelaySeconds: 3 periodSeconds: 5 timeoutSeconds: 2 diff --git a/manifests/kustomize/base/pipeline/pipeline-runner-role.yaml b/manifests/kustomize/base/pipeline/pipeline-runner-role.yaml index e81fd91a53f..eba0ee9f2d6 100644 --- a/manifests/kustomize/base/pipeline/pipeline-runner-role.yaml +++ b/manifests/kustomize/base/pipeline/pipeline-runner-role.yaml @@ -78,3 +78,10 @@ rules: - seldondeployments verbs: - '*' +- apiGroups: + - argoproj.io + resources: + - workflowtaskresults + verbs: + - create + - patch diff --git a/manifests/kustomize/env/aws/README.md b/manifests/kustomize/env/aws/README.md deleted file mode 100644 index 978adf1f745..00000000000 --- a/manifests/kustomize/env/aws/README.md +++ /dev/null @@ -1,56 +0,0 @@ -# Sample installation - -1. Create an EKS cluster - -Run this command to create EKS cluster -``` -eksctl create cluster \ ---name AWS-KFP \ ---version 1.17 \ ---region us-west-2 \ ---nodegroup-name linux-nodes \ ---node-type m5.xlarge \ ---nodes 2 \ ---nodes-min 1 \ ---nodes-max 4 \ ---managed -``` - -2. Prepare S3 - -Create S3 bucket. [Console](https://console.aws.amazon.com/s3/home). - -Run this command to create S3 bucket by changing `` to your prefer s3 bucket name. - -``` -export S3_BUCKET= -export AWS_REGION=us-west-2 -aws s3 mb s3://$S3_BUCKET --region $AWS_REGION -``` - -3. Prepare RDS - -Follow this [doc](https://awslabs.github.io/kubeflow-manifests/docs/deployment/rds-s3/guide/) to set up AWS RDS instance. - -4. Customize your values -- Edit [params.env](params.env), [secret.env](secret.env) and [minio-artifact-secret-patch.env](minio-artifact-secret-patch.env) - -5. Install - -``` -kustomize build ../../cluster-scoped-resources | kubectl apply -f - -# If upper one action got failed, e.x. you used wrong value, try delete, fix and apply again -# kubectl delete -k ../../cluster-scoped-resources - -kubectl wait crd/applications.app.k8s.io --for condition=established --timeout=60s - -kustomize build ./ | kubectl apply -f - -# If upper one action got failed, e.x. you used wrong value, try delete, fix and apply again -# kubectl delete -k ./ - -kubectl wait applications/pipeline -n kubeflow --for condition=Ready --timeout=1800s - -kubectl port-forward -n kubeflow svc/ml-pipeline-ui 8080:80 -``` - -Now you can access via `localhost:8080` diff --git a/manifests/kustomize/env/aws/aws-configuration-pipeline-patch.yaml b/manifests/kustomize/env/aws/aws-configuration-pipeline-patch.yaml deleted file mode 100644 index 598d8c4d213..00000000000 --- a/manifests/kustomize/env/aws/aws-configuration-pipeline-patch.yaml +++ /dev/null @@ -1,32 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ml-pipeline -spec: - template: - metadata: - labels: - app: ml-pipeline - spec: - containers: - - env: - - name: OBJECTSTORECONFIG_SECURE - value: "true" - - name: OBJECTSTORECONFIG_BUCKETNAME - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: bucketName - - name: OBJECTSTORECONFIG_HOST - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: minioServiceHost - - name: OBJECTSTORECONFIG_REGION - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: minioServiceRegion - - name: OBJECTSTORECONFIG_PORT - value: "" - name: ml-pipeline-api-server diff --git a/manifests/kustomize/env/aws/aws-configuration-pipeline-ui-patch.yaml b/manifests/kustomize/env/aws/aws-configuration-pipeline-ui-patch.yaml deleted file mode 100644 index 2a4de3838e1..00000000000 --- a/manifests/kustomize/env/aws/aws-configuration-pipeline-ui-patch.yaml +++ /dev/null @@ -1,27 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ml-pipeline-ui -spec: - template: - metadata: - labels: - app: ml-pipeline-ui - spec: - volumes: - - name: config-volume - configMap: - name: ml-pipeline-ui-configmap - containers: - - name: ml-pipeline-ui - env: - - name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: mlpipeline-minio-artifact - key: accesskey - - name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: mlpipeline-minio-artifact - key: secretkey diff --git a/manifests/kustomize/env/aws/config b/manifests/kustomize/env/aws/config deleted file mode 100644 index ebf05538dc9..00000000000 --- a/manifests/kustomize/env/aws/config +++ /dev/null @@ -1,20 +0,0 @@ -{ -artifactRepository: -{ - s3: { - bucket: $(kfp-artifact-bucket-name), - keyPrefix: artifacts, - endpoint: s3.amazonaws.com, - insecure: true, - accessKeySecret: { - name: mlpipeline-minio-artifact, - key: accesskey - }, - secretKeySecret: { - name: mlpipeline-minio-artifact, - key: secretkey - } - }, - archiveLogs: true -} -} diff --git a/manifests/kustomize/env/aws/kustomization.yaml b/manifests/kustomize/env/aws/kustomization.yaml deleted file mode 100644 index 93a5bc5e8c1..00000000000 --- a/manifests/kustomize/env/aws/kustomization.yaml +++ /dev/null @@ -1,39 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -namespace: kubeflow -resources: -- ../../env/platform-agnostic -configMapGenerator: -- behavior: merge - envs: - - params.env - name: pipeline-install-config -- behavior: replace - files: - - config - name: workflow-controller-configmap -- behavior: replace - files: - - viewer-pod-template.json - name: ml-pipeline-ui-configmap -secretGenerator: -- behavior: merge - envs: - - secret.env - name: mysql-secret -- behavior: merge - envs: - - minio-artifact-secret-patch.env - name: mlpipeline-minio-artifact -generatorOptions: - disableNameSuffixHash: true -patches: -- path: aws-configuration-pipeline-patch.yaml -- path: aws-configuration-pipeline-ui-patch.yaml -# Identifier for application manager to apply ownerReference. -# The ownerReference ensures the resources get garbage collected -# when application is deleted. -labels: -- includeSelectors: true - pairs: - application-crd-id: kubeflow-pipelines diff --git a/manifests/kustomize/env/aws/minio-artifact-secret-patch.env b/manifests/kustomize/env/aws/minio-artifact-secret-patch.env deleted file mode 100644 index 3f11b74138c..00000000000 --- a/manifests/kustomize/env/aws/minio-artifact-secret-patch.env +++ /dev/null @@ -1,2 +0,0 @@ -accesskey=YOUR_AWS_ACCESS_ID -secretkey=YOUR_AWS_SECRET_KEY diff --git a/manifests/kustomize/env/aws/params.env b/manifests/kustomize/env/aws/params.env deleted file mode 100644 index 30e966592ca..00000000000 --- a/manifests/kustomize/env/aws/params.env +++ /dev/null @@ -1,5 +0,0 @@ -dbHost=YOUR_RDS_ENDPOINT - -bucketName=YOUR_S3_BUCKET_NAME -minioServiceHost=s3.amazonaws.com -minioServiceRegion=YOUR_AWS_REGION diff --git a/manifests/kustomize/env/aws/secret.env b/manifests/kustomize/env/aws/secret.env deleted file mode 100644 index cdd7b0a5fc4..00000000000 --- a/manifests/kustomize/env/aws/secret.env +++ /dev/null @@ -1,2 +0,0 @@ -username=YOUR_RDS_USERNAME -password=YOUR_RDS_PASSWORD diff --git a/manifests/kustomize/env/aws/viewer-pod-template.json b/manifests/kustomize/env/aws/viewer-pod-template.json deleted file mode 100644 index 5cce566794e..00000000000 --- a/manifests/kustomize/env/aws/viewer-pod-template.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "spec": { - "containers": [ - { - "env": [ - { - "name": "AWS_ACCESS_KEY_ID", - "valueFrom": { - "secretKeyRef": { - "name": "mlpipeline-minio-artifact", - "key": "accesskey" - } - } - }, - { - "name": "AWS_SECRET_ACCESS_KEY", - "valueFrom": { - "secretKeyRef": { - "name": "mlpipeline-minio-artifact", - "key": "secretkey" - } - } - }, - { - "name": "AWS_REGION", - "valueFrom": { - "configMapKeyRef": { - "name": "pipeline-install-config", - "key": "minioServiceRegion" - } - } - } - ] - } - ] - } -} \ No newline at end of file diff --git a/manifests/kustomize/env/azure/kustomization.yaml b/manifests/kustomize/env/azure/kustomization.yaml deleted file mode 100644 index 75efe953bd0..00000000000 --- a/manifests/kustomize/env/azure/kustomization.yaml +++ /dev/null @@ -1,29 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -namespace: kubeflow - -resources: -- ../../base/installs/generic -- ../../base/metadata/base -- ../../third-party/argo/installs/namespace -- minio-azure-gateway - -configMapGenerator: -- behavior: merge - envs: - - params.env - name: pipeline-install-config - -secretGenerator: -- behavior: merge - envs: - - mysql-secret.env - name: mysql-secret - -# Identifier for application manager to apply ownerReference. -# The ownerReference ensures the resources get garbage collected -# when application is deleted. -labels: -- includeSelectors: true - pairs: - application-crd-id: kubeflow-pipelines diff --git a/manifests/kustomize/env/azure/minio-azure-gateway/kustomization.yaml b/manifests/kustomize/env/azure/minio-azure-gateway/kustomization.yaml deleted file mode 100644 index 65a0957e278..00000000000 --- a/manifests/kustomize/env/azure/minio-azure-gateway/kustomization.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization - -resources: -- minio-azure-gateway-deployment.yaml -- minio-azure-gateway-service.yaml - -secretGenerator: -- envs: - - minio-artifact-secret.env - name: mlpipeline-minio-artifact -generatorOptions: - # mlpipeline-minio-artifact needs to be referred by exact name - disableNameSuffixHash: true diff --git a/manifests/kustomize/env/azure/minio-azure-gateway/minio-artifact-secret.env b/manifests/kustomize/env/azure/minio-azure-gateway/minio-artifact-secret.env deleted file mode 100644 index 7d9d25d6f05..00000000000 --- a/manifests/kustomize/env/azure/minio-azure-gateway/minio-artifact-secret.env +++ /dev/null @@ -1,2 +0,0 @@ -accesskey=[STORAGEACCOUNTNAME] -secretkey=[STORAGEACCOUNTKEY] \ No newline at end of file diff --git a/manifests/kustomize/env/azure/minio-azure-gateway/minio-azure-gateway-deployment.yaml b/manifests/kustomize/env/azure/minio-azure-gateway/minio-azure-gateway-deployment.yaml deleted file mode 100644 index 56979c42e94..00000000000 --- a/manifests/kustomize/env/azure/minio-azure-gateway/minio-azure-gateway-deployment.yaml +++ /dev/null @@ -1,40 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: minio - labels: - app: minio -spec: - selector: - matchLabels: - app: minio - strategy: - type: Recreate - template: - metadata: - labels: - app: minio - spec: - containers: - - name: minio - image: gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance - args: - - gateway - - azure - env: - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - name: mlpipeline-minio-artifact - key: accesskey - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - name: mlpipeline-minio-artifact - key: secretkey - ports: - - containerPort: 9000 - resources: - requests: - cpu: 20m - memory: 25Mi diff --git a/manifests/kustomize/env/azure/minio-azure-gateway/minio-azure-gateway-service.yaml b/manifests/kustomize/env/azure/minio-azure-gateway/minio-azure-gateway-service.yaml deleted file mode 100644 index 7dd18174965..00000000000 --- a/manifests/kustomize/env/azure/minio-azure-gateway/minio-azure-gateway-service.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: minio-service -spec: - ports: - - port: 9000 - targetPort: 9000 - protocol: TCP - selector: - app: minio \ No newline at end of file diff --git a/manifests/kustomize/env/azure/mysql-secret.env b/manifests/kustomize/env/azure/mysql-secret.env deleted file mode 100644 index 93f6075227d..00000000000 --- a/manifests/kustomize/env/azure/mysql-secret.env +++ /dev/null @@ -1,2 +0,0 @@ -username=[SQLUSER]@[SQLSERVERNAME] -password=[SQLPASS] \ No newline at end of file diff --git a/manifests/kustomize/env/azure/params.env b/manifests/kustomize/env/azure/params.env deleted file mode 100644 index 44ed71784f1..00000000000 --- a/manifests/kustomize/env/azure/params.env +++ /dev/null @@ -1 +0,0 @@ -dbHost=[SQLSERVERNAME].mysql.database.azure.com diff --git a/manifests/kustomize/env/azure/readme.md b/manifests/kustomize/env/azure/readme.md deleted file mode 100644 index 5487d6aa02f..00000000000 --- a/manifests/kustomize/env/azure/readme.md +++ /dev/null @@ -1,15 +0,0 @@ -# KFP customizations for Azure - -This template provides a starting point to configure KFP to use an Azure hosted MySQL database, as well as an Azure Blob backed MinIO service. - -## MySQL - -1. [Create an Azure Database for MySQL](https://docs.microsoft.com/azure/mysql/quickstart-create-mysql-server-database-using-azure-portal). Ensure that it will allow connections from the Kubernetes cluster. - -2. Substitute the server name into [params.env](./params.env), and the username and password into [mysql-secret.env](./mysql-secret.env) - -## MinIO Gateway for Azure Blobstore - -1. [Create an Azure Storage account](https://docs.microsoft.com/azure/storage/common/storage-account-create). Ensure that it will allow connections from the Kubernetes cluster. - -2. Substitute the storage name and access key into [minio-artifact-secret.env](./minio-azure-gateway/minio-artifact-secret.env). diff --git a/manifests/kustomize/env/dev-kind/kustomization.yaml b/manifests/kustomize/env/dev-kind/kustomization.yaml index fd65eab5968..e0a113687e2 100644 --- a/manifests/kustomize/env/dev-kind/kustomization.yaml +++ b/manifests/kustomize/env/dev-kind/kustomization.yaml @@ -64,7 +64,7 @@ patches: - $patch: replace - port: 9000 protocol: TCP - targetPort: 9000 + targetPort: 8333 nodePort: 30900 type: NodePort - patch: |- @@ -164,3 +164,67 @@ patches: - ml-pipeline-reverse-proxy - ml-pipeline-reverse-proxy.$(kfp-namespace) - ml-pipeline-reverse-proxy.$(kfp-namespace).svc +- patch: |- + apiVersion: networking.k8s.io/v1 + kind: NetworkPolicy + metadata: + name: default-allow-same-namespace + namespace: kubeflow + spec: + podSelector: {} + ingress: + - from: + - podSelector: {} + - from: + - ipBlock: + cidr: 0.0.0.0/0 + ports: + - port: 3000 + protocol: TCP + - port: 3306 + protocol: TCP + - port: 8080 + protocol: TCP + - port: 9000 + protocol: TCP + - port: 8888 + protocol: TCP + policyTypes: + - Ingress +- patch: |- + apiVersion: networking.k8s.io/v1 + kind: NetworkPolicy + metadata: + name: seaweedfs + namespace: kubeflow + spec: + ingress: + - from: + - namespaceSelector: + matchExpressions: + - key: app.kubernetes.io/part-of + operator: In + values: + - kubeflow-profile + ports: + - port: 8333 + - from: + - namespaceSelector: + matchExpressions: + - key: kubernetes.io/metadata.name + operator: In + values: + - istio-system + - from: + - ipBlock: + cidr: 0.0.0.0/0 + ports: + - port: 8333 + podSelector: + matchExpressions: + - key: app + operator: In + values: + - seaweedfs + policyTypes: + - Ingress diff --git a/manifests/kustomize/env/dev/api-server-patch.yaml b/manifests/kustomize/env/dev/api-server-patch.yaml new file mode 100644 index 00000000000..1c590f556f0 --- /dev/null +++ b/manifests/kustomize/env/dev/api-server-patch.yaml @@ -0,0 +1,16 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ml-pipeline + namespace: kubeflow +spec: + template: + spec: + containers: + - env: + - name: V2_DRIVER_IMAGE + value: ghcr.io/kubeflow/kfp-driver:master + - name: V2_LAUNCHER_IMAGE + value: ghcr.io/kubeflow/kfp-launcher:master + name: ml-pipeline-api-server + serviceAccountName: ml-pipeline diff --git a/manifests/kustomize/env/dev/kustomization.yaml b/manifests/kustomize/env/dev/kustomization.yaml index 5e467de6684..f2ed5ddabf1 100644 --- a/manifests/kustomize/env/dev/kustomization.yaml +++ b/manifests/kustomize/env/dev/kustomization.yaml @@ -8,6 +8,32 @@ resources: - ../platform-agnostic - ../gcp/inverse-proxy +images: +- name: ghcr.io/kubeflow/kfp-api-server + newTag: master +- name: ghcr.io/kubeflow/kfp-frontend + newTag: master +- name: ghcr.io/kubeflow/kfp-persistence-agent + newTag: master +- name: ghcr.io/kubeflow/kfp-scheduled-workflow-controller + newTag: master +- name: ghcr.io/kubeflow/kfp-cache-deployer + newTag: master +- name: ghcr.io/kubeflow/kfp-cache-server + newTag: master +- name: ghcr.io/kubeflow/kfp-metadata-envoy + newTag: master +- name: ghcr.io/kubeflow/kfp-metadata-writer + newTag: master +- name: ghcr.io/kubeflow/kfp-viewer-crd-controller + newTag: master +- name: ghcr.io/kubeflow/kfp-visualization-server + newTag: master +- name: ghcr.io/kubeflow/kfp-inverse-proxy-agent + newTag: master + +patches: + - path: api-server-patch.yaml # !!! If you want to customize the namespace, # please refer sample/cluster-scoped-resources to update the namespace for cluster-scoped-resources namespace: kubeflow diff --git a/manifests/kustomize/env/gcp/inverse-proxy/kustomization.yaml b/manifests/kustomize/env/gcp/inverse-proxy/kustomization.yaml index 66a96b64739..41ea944b4e9 100644 --- a/manifests/kustomize/env/gcp/inverse-proxy/kustomization.yaml +++ b/manifests/kustomize/env/gcp/inverse-proxy/kustomization.yaml @@ -2,7 +2,7 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization images: - name: ghcr.io/kubeflow/kfp-inverse-proxy-agent - newTag: 2.5.0 + newTag: 2.14.3 resources: - proxy-configmap.yaml - proxy-deployment.yaml diff --git a/manifests/kustomize/env/gcp/kustomization.yaml b/manifests/kustomize/env/gcp/kustomization.yaml index 99837f5769d..65fcea315b3 100644 --- a/manifests/kustomize/env/gcp/kustomization.yaml +++ b/manifests/kustomize/env/gcp/kustomization.yaml @@ -9,7 +9,6 @@ resources: - ../../base/metadata/base - ../../third-party/argo/installs/namespace - inverse-proxy -- minio-gcs-gateway - cloudsql-proxy diff --git a/manifests/kustomize/env/gcp/minio-gcs-gateway/kustomization.yaml b/manifests/kustomize/env/gcp/minio-gcs-gateway/kustomization.yaml deleted file mode 100644 index fa4d6df3e6b..00000000000 --- a/manifests/kustomize/env/gcp/minio-gcs-gateway/kustomization.yaml +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization - -resources: -- minio-gcs-gateway-deployment.yaml -- minio-gcs-gateway-sa.yaml -- minio-gcs-gateway-service.yaml - -secretGenerator: -- envs: - - minio-artifact-secret.env - name: mlpipeline-minio-artifact -generatorOptions: - # mlpipeline-minio-artifact needs to be referred by exact name - disableNameSuffixHash: true diff --git a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-artifact-secret.env b/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-artifact-secret.env deleted file mode 100644 index bc8613ce2a0..00000000000 --- a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-artifact-secret.env +++ /dev/null @@ -1,2 +0,0 @@ -accesskey=minio -secretkey=minio123 diff --git a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-deployment.yaml b/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-deployment.yaml deleted file mode 100644 index bb85c95eb0d..00000000000 --- a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-deployment.yaml +++ /dev/null @@ -1,47 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: minio - labels: - app: minio -spec: - selector: - matchLabels: - app: minio - strategy: - type: Recreate - template: - metadata: - labels: - app: minio - spec: - serviceAccountName: kubeflow-pipelines-minio-gcs-gateway - containers: - - name: minio - image: gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance - args: - - gateway - - gcs - - $(GCP_PROJECT_ID) - env: - - name: GCP_PROJECT_ID - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: gcsProjectId - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - name: mlpipeline-minio-artifact - key: accesskey - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - name: mlpipeline-minio-artifact - key: secretkey - resources: - requests: - cpu: 20m - memory: 25Mi - ports: - - containerPort: 9000 diff --git a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-sa.yaml b/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-sa.yaml deleted file mode 100644 index 2aa4f937685..00000000000 --- a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-sa.yaml +++ /dev/null @@ -1,4 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - name: kubeflow-pipelines-minio-gcs-gateway diff --git a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-service.yaml b/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-service.yaml deleted file mode 100644 index 7dd18174965..00000000000 --- a/manifests/kustomize/env/gcp/minio-gcs-gateway/minio-gcs-gateway-service.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: minio-service -spec: - ports: - - port: 9000 - targetPort: 9000 - protocol: TCP - selector: - app: minio \ No newline at end of file diff --git a/manifests/kustomize/env/openshift/README.md b/manifests/kustomize/env/openshift/README.md index 53ff019302b..48e3a91ebd7 100644 --- a/manifests/kustomize/env/openshift/README.md +++ b/manifests/kustomize/env/openshift/README.md @@ -32,4 +32,11 @@ Access the route via: echo https://$(oc get routes -n kubeflow ml-pipeline-ui --template={{.spec.host}}) ``` +## Clean up +To delete the `kubeflow` Openshift Project: + +```bash +oc -n kubeflow delete -k . +``` + [oc]: https://docs.redhat.com/en/documentation/openshift_container_platform/latest/html/cli_tools/openshift-cli-oc diff --git a/manifests/kustomize/env/plain-multi-user/kustomization.yaml b/manifests/kustomize/env/plain-multi-user/kustomization.yaml index 8b6d13f5142..5f1016d0288 100644 --- a/manifests/kustomize/env/plain-multi-user/kustomization.yaml +++ b/manifests/kustomize/env/plain-multi-user/kustomization.yaml @@ -7,8 +7,7 @@ resources: - ../../base/metadata/options/istio - ../../third-party/mysql/base - ../../third-party/mysql/options/istio -- ../../third-party/minio/base -- ../../third-party/minio/options/istio +- ../../third-party/seaweedfs/istio - ../../third-party/metacontroller/base diff --git a/manifests/kustomize/env/plain/kustomization.yaml b/manifests/kustomize/env/plain/kustomization.yaml index a60ce3a8e7e..dcbc92514b3 100644 --- a/manifests/kustomize/env/plain/kustomization.yaml +++ b/manifests/kustomize/env/plain/kustomization.yaml @@ -4,7 +4,7 @@ kind: Kustomization resources: - ../../base/installs/generic - ../../base/metadata/base -- ../../third-party/minio/base +- ../../third-party/seaweedfs/base - ../../third-party/mysql/base # Identifier for application manager to apply ownerReference. diff --git a/manifests/kustomize/env/platform-agnostic-minio/kustomization.yaml b/manifests/kustomize/env/platform-agnostic-minio/kustomization.yaml new file mode 100644 index 00000000000..d8ba71c4a4e --- /dev/null +++ b/manifests/kustomize/env/platform-agnostic-minio/kustomization.yaml @@ -0,0 +1,23 @@ +# Minio deployment manifests in KFP will be removed in 2.16.0, in the future end-users will need to provision their own minio deployments +# The default will be SeaweedFS (Gateway docs: https://github.com/kubeflow/pipelines/blob/master/manifests/kustomize/third-party/seaweedfs/README.md) +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../../base/installs/generic +- ../../base/metadata/base +- ../../third-party/argo/installs/namespace +- ../../third-party/minio/base +- ../../third-party/mysql/base + +# Identifier for application manager to apply ownerReference. +# The ownerReference ensures the resources get garbage collected +# when application is deleted. + +# !!! If you want to customize the namespace, +# please also update base/cache-deployer/cluster-scoped/cache-deployer-clusterrolebinding.yaml +namespace: kubeflow +labels: +- includeSelectors: true + pairs: + application-crd-id: kubeflow-pipelines diff --git a/manifests/kustomize/env/platform-agnostic-multi-user-legacy/kustomization.yaml b/manifests/kustomize/env/platform-agnostic-multi-user-legacy/kustomization.yaml index e600b13e732..b8deb06d39c 100644 --- a/manifests/kustomize/env/platform-agnostic-multi-user-legacy/kustomization.yaml +++ b/manifests/kustomize/env/platform-agnostic-multi-user-legacy/kustomization.yaml @@ -9,8 +9,7 @@ resources: - ../../third-party/argo/installs/cluster - ../../third-party/mysql/base - ../../third-party/mysql/options/istio -- ../../third-party/minio/base -- ../../third-party/minio/options/istio +- ../../third-party/seaweedfs/istio # !!! If you want to customize the namespace, diff --git a/manifests/kustomize/env/platform-agnostic-multi-user-minio/kustomization.yaml b/manifests/kustomize/env/platform-agnostic-multi-user-minio/kustomization.yaml new file mode 100644 index 00000000000..f7990f8466f --- /dev/null +++ b/manifests/kustomize/env/platform-agnostic-multi-user-minio/kustomization.yaml @@ -0,0 +1,63 @@ +# Minio deployment manifests in KFP will be removed in 2.16.0, in the future end-users will need to provision their own minio deployments +# The default will be SeaweedFS (Gateway docs: https://github.com/kubeflow/pipelines/blob/master/manifests/kustomize/third-party/seaweedfs/README.md) +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: +- ../../third-party/metacontroller/base +- ../../base/installs/multi-user +- ../../base/metadata/base +- ../../base/metadata/options/istio +- ../../third-party/argo/installs/cluster +- ../../third-party/mysql/base +- ../../third-party/mysql/options/istio +- ../../third-party/minio/base +- ../../third-party/minio/options/istio + + +# !!! If you want to customize the namespace, +# please also update base/cache-deployer/cluster-scoped/cache-deployer-clusterrolebinding.yaml +namespace: kubeflow + +# Identifier for application manager to apply ownerReference. +# The ownerReference ensures the resources get garbage collected +# when application is deleted. +labels: +- includeSelectors: true + pairs: + application-crd-id: kubeflow-pipelines + +configMapGenerator: +- name: kubeflow-pipelines-profile-controller-code + behavior: replace + files: + # swap sync.py with minio compatible sync.py + - sync.py=sync.py + +patches: +- target: + group: apps + version: v1 + kind: Deployment + name: kubeflow-pipelines-profile-controller + patch: | + apiVersion: apps/v1 + kind: Deployment + metadata: + name: kubeflow-pipelines-profile-controller + spec: + template: + spec: + containers: + - name: profile-controller + env: + - name: MINIO_ACCESS_KEY + valueFrom: + secretKeyRef: + name: mlpipeline-minio-artifact + key: accesskey + - name: MINIO_SECRET_KEY + valueFrom: + secretKeyRef: + name: mlpipeline-minio-artifact + key: secretkey diff --git a/manifests/kustomize/env/platform-agnostic-multi-user-minio/sync.py b/manifests/kustomize/env/platform-agnostic-multi-user-minio/sync.py new file mode 100644 index 00000000000..3d39e3187f9 --- /dev/null +++ b/manifests/kustomize/env/platform-agnostic-multi-user-minio/sync.py @@ -0,0 +1,395 @@ +# Copyright 2020-2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from http.server import BaseHTTPRequestHandler, HTTPServer +import json +import os +import base64 + + +def main(): + settings = get_settings_from_env() + server = server_factory(**settings) + server.serve_forever() + + +def get_settings_from_env(controller_port=None, + visualization_server_image=None, frontend_image=None, + visualization_server_tag=None, frontend_tag=None, disable_istio_sidecar=None, + minio_access_key=None, minio_secret_key=None, kfp_default_pipeline_root=None): + """ + Returns a dict of settings from environment variables relevant to the controller + + Environment settings can be overridden by passing them here as arguments. + + Settings are pulled from the all-caps version of the setting name. The + following defaults are used if those environment variables are not set + to enable backwards compatibility with previous versions of this script: + visualization_server_image: ghcr.io/kubeflow/kfp-visualization-server + visualization_server_tag: value of KFP_VERSION environment variable + frontend_image: ghcr.io/kubeflow/kfp-frontend + frontend_tag: value of KFP_VERSION environment variable + disable_istio_sidecar: Required (no default) + minio_access_key: Required (no default) + minio_secret_key: Required (no default) + """ + settings = dict() + settings["controller_port"] = \ + controller_port or \ + os.environ.get("CONTROLLER_PORT", "8080") + + settings["visualization_server_image"] = \ + visualization_server_image or \ + os.environ.get("VISUALIZATION_SERVER_IMAGE", "ghcr.io/kubeflow/kfp-visualization-server") + + settings["frontend_image"] = \ + frontend_image or \ + os.environ.get("FRONTEND_IMAGE", "ghcr.io/kubeflow/kfp-frontend") + + # Look for specific tags for each image first, falling back to + # previously used KFP_VERSION environment variable for backwards + # compatibility + settings["visualization_server_tag"] = \ + visualization_server_tag or \ + os.environ.get("VISUALIZATION_SERVER_TAG") or \ + os.environ["KFP_VERSION"] + + settings["frontend_tag"] = \ + frontend_tag or \ + os.environ.get("FRONTEND_TAG") or \ + os.environ["KFP_VERSION"] + + settings["disable_istio_sidecar"] = \ + disable_istio_sidecar if disable_istio_sidecar is not None \ + else os.environ.get("DISABLE_ISTIO_SIDECAR") == "true" + + settings["minio_access_key"] = \ + minio_access_key or \ + base64.b64encode(bytes(os.environ.get("MINIO_ACCESS_KEY"), 'utf-8')).decode('utf-8') + + settings["minio_secret_key"] = \ + minio_secret_key or \ + base64.b64encode(bytes(os.environ.get("MINIO_SECRET_KEY"), 'utf-8')).decode('utf-8') + + # KFP_DEFAULT_PIPELINE_ROOT is optional + settings["kfp_default_pipeline_root"] = \ + kfp_default_pipeline_root or \ + os.environ.get("KFP_DEFAULT_PIPELINE_ROOT") + + return settings + + +def server_factory(visualization_server_image, + visualization_server_tag, frontend_image, frontend_tag, + disable_istio_sidecar, minio_access_key, + minio_secret_key, kfp_default_pipeline_root=None, + url="", controller_port=8080): + """ + Returns an HTTPServer populated with Handler with customized settings + """ + class Controller(BaseHTTPRequestHandler): + def sync(self, parent, attachments): + # parent is a namespace + namespace = parent.get("metadata", {}).get("name") + pipeline_enabled = parent.get("metadata", {}).get( + "labels", {}).get("pipelines.kubeflow.org/enabled") + + if pipeline_enabled != "true": + return {"status": {}, "attachments": []} + + desired_configmap_count = 1 + desired_resources = [] + if kfp_default_pipeline_root: + desired_configmap_count = 2 + desired_resources += [{ + "apiVersion": "v1", + "kind": "ConfigMap", + "metadata": { + "name": "kfp-launcher", + "namespace": namespace, + }, + "data": { + "defaultPipelineRoot": kfp_default_pipeline_root, + }, + }] + + + # Compute status based on observed state. + desired_status = { + "kubeflow-pipelines-ready": + len(attachments["Secret.v1"]) == 1 and + len(attachments["ConfigMap.v1"]) == desired_configmap_count and + len(attachments["Deployment.apps/v1"]) == 2 and + len(attachments["Service.v1"]) == 2 and + len(attachments["DestinationRule.networking.istio.io/v1alpha3"]) == 1 and + len(attachments["AuthorizationPolicy.security.istio.io/v1beta1"]) == 1 and + "True" or "False" + } + + # Generate the desired attachment object(s). + desired_resources += [ + { + "apiVersion": "v1", + "kind": "ConfigMap", + "metadata": { + "name": "metadata-grpc-configmap", + "namespace": namespace, + }, + "data": { + "METADATA_GRPC_SERVICE_HOST": + "metadata-grpc-service.kubeflow", + "METADATA_GRPC_SERVICE_PORT": "8080", + }, + }, + # Visualization server related manifests below + { + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "labels": { + "app": "ml-pipeline-visualizationserver" + }, + "name": "ml-pipeline-visualizationserver", + "namespace": namespace, + }, + "spec": { + "selector": { + "matchLabels": { + "app": "ml-pipeline-visualizationserver" + }, + }, + "template": { + "metadata": { + "labels": { + "app": "ml-pipeline-visualizationserver" + }, + "annotations": disable_istio_sidecar and { + "sidecar.istio.io/inject": "false" + } or {}, + }, + "spec": { + "containers": [{ + "image": f"{visualization_server_image}:{visualization_server_tag}", + "imagePullPolicy": + "IfNotPresent", + "name": + "ml-pipeline-visualizationserver", + "ports": [{ + "containerPort": 8888 + }], + "resources": { + "requests": { + "cpu": "50m", + "memory": "200Mi" + }, + "limits": { + "cpu": "500m", + "memory": "1Gi" + }, + } + }], + "serviceAccountName": + "default-editor", + }, + }, + }, + }, + { + "apiVersion": "networking.istio.io/v1alpha3", + "kind": "DestinationRule", + "metadata": { + "name": "ml-pipeline-visualizationserver", + "namespace": namespace, + }, + "spec": { + "host": "ml-pipeline-visualizationserver", + "trafficPolicy": { + "tls": { + "mode": "ISTIO_MUTUAL" + } + } + } + }, + { + "apiVersion": "security.istio.io/v1beta1", + "kind": "AuthorizationPolicy", + "metadata": { + "name": "ml-pipeline-visualizationserver", + "namespace": namespace, + }, + "spec": { + "selector": { + "matchLabels": { + "app": "ml-pipeline-visualizationserver" + } + }, + "rules": [{ + "from": [{ + "source": { + "principals": ["cluster.local/ns/kubeflow/sa/ml-pipeline"] + } + }] + }] + } + }, + { + "apiVersion": "v1", + "kind": "Service", + "metadata": { + "name": "ml-pipeline-visualizationserver", + "namespace": namespace, + }, + "spec": { + "ports": [{ + "name": "http", + "port": 8888, + "protocol": "TCP", + "targetPort": 8888, + }], + "selector": { + "app": "ml-pipeline-visualizationserver", + }, + }, + }, + # Artifact fetcher related resources below. + { + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "labels": { + "app": "ml-pipeline-ui-artifact" + }, + "name": "ml-pipeline-ui-artifact", + "namespace": namespace, + }, + "spec": { + "selector": { + "matchLabels": { + "app": "ml-pipeline-ui-artifact" + } + }, + "template": { + "metadata": { + "labels": { + "app": "ml-pipeline-ui-artifact" + }, + "annotations": disable_istio_sidecar and { + "sidecar.istio.io/inject": "false" + } or {}, + }, + "spec": { + "containers": [{ + "name": + "ml-pipeline-ui-artifact", + "image": f"{frontend_image}:{frontend_tag}", + "imagePullPolicy": + "IfNotPresent", + "ports": [{ + "containerPort": 3000 + }], + "env": [ + { + "name": "MINIO_ACCESS_KEY", + "valueFrom": { + "secretKeyRef": { + "key": "accesskey", + "name": "mlpipeline-minio-artifact" + } + } + }, + { + "name": "MINIO_SECRET_KEY", + "valueFrom": { + "secretKeyRef": { + "key": "secretkey", + "name": "mlpipeline-minio-artifact" + } + } + } + ], + "resources": { + "requests": { + "cpu": "10m", + "memory": "70Mi" + }, + "limits": { + "cpu": "100m", + "memory": "500Mi" + }, + } + }], + "serviceAccountName": + "default-editor" + } + } + } + }, + { + "apiVersion": "v1", + "kind": "Service", + "metadata": { + "name": "ml-pipeline-ui-artifact", + "namespace": namespace, + "labels": { + "app": "ml-pipeline-ui-artifact" + } + }, + "spec": { + "ports": [{ + "name": + "http", # name is required to let istio understand request protocol + "port": 80, + "protocol": "TCP", + "targetPort": 3000 + }], + "selector": { + "app": "ml-pipeline-ui-artifact" + } + } + }, + ] + print('Received request:\n', json.dumps(parent, sort_keys=True)) + print('Desired resources except secrets:\n', json.dumps(desired_resources, sort_keys=True)) + # Moved after the print argument because this is sensitive data. + desired_resources.append({ + "apiVersion": "v1", + "kind": "Secret", + "metadata": { + "name": "mlpipeline-minio-artifact", + "namespace": namespace, + }, + "data": { + "accesskey": minio_access_key, + "secretkey": minio_secret_key, + }, + }) + + return {"status": desired_status, "attachments": desired_resources} + + def do_POST(self): + # Serve the sync() function as a JSON webhook. + observed = json.loads( + self.rfile.read(int(self.headers.get("content-length")))) + desired = self.sync(observed["object"], observed["attachments"]) + + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + self.wfile.write(bytes(json.dumps(desired), 'utf-8')) + + return HTTPServer((url, int(controller_port)), Controller) + + +if __name__ == "__main__": + main() diff --git a/manifests/kustomize/env/platform-agnostic-multi-user/kustomization.yaml b/manifests/kustomize/env/platform-agnostic-multi-user/kustomization.yaml index aa4d6c6670a..014f327cc2a 100644 --- a/manifests/kustomize/env/platform-agnostic-multi-user/kustomization.yaml +++ b/manifests/kustomize/env/platform-agnostic-multi-user/kustomization.yaml @@ -9,8 +9,7 @@ resources: - ../../third-party/argo/installs/cluster - ../../third-party/mysql/base - ../../third-party/mysql/options/istio -- ../../third-party/minio/base -- ../../third-party/minio/options/istio +- ../../third-party/seaweedfs/istio # !!! If you want to customize the namespace, diff --git a/manifests/kustomize/env/platform-agnostic-postgresql/kustomization.yaml b/manifests/kustomize/env/platform-agnostic-postgresql/kustomization.yaml index e6e4af38686..a456d1cd881 100644 --- a/manifests/kustomize/env/platform-agnostic-postgresql/kustomization.yaml +++ b/manifests/kustomize/env/platform-agnostic-postgresql/kustomization.yaml @@ -5,7 +5,7 @@ resources: - ../../base/installs/generic/postgres - ../../base/metadata/overlays/postgres - ../../third-party/argo/installs/namespace -- ../../third-party/minio/base +- ../../third-party/seaweedfs/base - ../../third-party/postgresql/base diff --git a/manifests/kustomize/env/platform-agnostic/kustomization.yaml b/manifests/kustomize/env/platform-agnostic/kustomization.yaml index 0a9d90b016e..e1e4cdd4779 100644 --- a/manifests/kustomize/env/platform-agnostic/kustomization.yaml +++ b/manifests/kustomize/env/platform-agnostic/kustomization.yaml @@ -5,7 +5,7 @@ resources: - ../../base/installs/generic - ../../base/metadata/base - ../../third-party/argo/installs/namespace -- ../../third-party/minio/base +- ../../third-party/seaweedfs/base - ../../third-party/mysql/base # Identifier for application manager to apply ownerReference. diff --git a/manifests/kustomize/hack/test.sh b/manifests/kustomize/hack/test.sh index 6df4293c585..532b907506e 100755 --- a/manifests/kustomize/hack/test.sh +++ b/manifests/kustomize/hack/test.sh @@ -35,8 +35,6 @@ kustomization_yamls=( "env/gcp" "env/platform-agnostic" "env/platform-agnostic-emissary" - "env/aws" - "env/azure" ) for path in "${kustomization_yamls[@]}" do diff --git a/manifests/kustomize/third-party/argo/.krmignore b/manifests/kustomize/third-party/argo/.krmignore deleted file mode 100644 index 045951300cf..00000000000 --- a/manifests/kustomize/third-party/argo/.krmignore +++ /dev/null @@ -1 +0,0 @@ -upstream diff --git a/manifests/kustomize/third-party/argo/Kptfile b/manifests/kustomize/third-party/argo/Kptfile deleted file mode 100644 index bf0baf9863a..00000000000 --- a/manifests/kustomize/third-party/argo/Kptfile +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: kpt.dev/v1 -kind: Kptfile -metadata: - name: argo -info: - description: argo Kptfile in order to ignore upstream/ folder. diff --git a/manifests/kustomize/third-party/argo/Makefile b/manifests/kustomize/third-party/argo/Makefile index c057af11932..03ba544a99f 100644 --- a/manifests/kustomize/third-party/argo/Makefile +++ b/manifests/kustomize/third-party/argo/Makefile @@ -1,12 +1,17 @@ REPO_ROOT=../../../.. +# Update all remote Git references to use the version specified in third_party/argo/VERSION file update: - rm -rf upstream - mkdir upstream - kpt pkg get "https://github.com/argoproj/argo-workflows.git/manifests@$$(cat $(REPO_ROOT)/third_party/argo/VERSION)" upstream/ - # Remove the pre-hydrated manifests which we do not use. - rm upstream/manifests/*.yaml - # Remove README.md which might be confusing here. - rm upstream/manifests/README.md - # Include argo license file - curl -Lo upstream/manifests/LICENSE "https://raw.githubusercontent.com/argoproj/argo-workflows/$$(cat $(REPO_ROOT)/third_party/argo/VERSION)/LICENSE" + @echo "Updating Argo Workflows version references..." + @VERSION=$$(cat $(REPO_ROOT)/third_party/argo/VERSION); \ + echo "Using Argo Workflows version: $$VERSION"; \ + sed -i.bak -E "s|ref=v[0-9]+\.[0-9]+\.[0-9]+|ref=$$VERSION|g" base/kustomization.yaml && \ + sed -i.bak -E "s|quay.io/argoproj/argoexec:v[0-9]+\.[0-9]+\.[0-9]+|quay.io/argoproj/argoexec:$$VERSION|g" base/workflow-controller-deployment-patch.yaml && \ + sed -i.bak -E "s|quay.io/argoproj/workflow-controller:v[0-9]+\.[0-9]+\.[0-9]+|quay.io/argoproj/workflow-controller:$$VERSION|g" base/workflow-controller-deployment-patch.yaml && \ + sed -i.bak -E "s|https://github.com/argoproj/argo-workflows/blob/v[0-9]+\.[0-9]+\.[0-9]+|https://github.com/argoproj/argo-workflows/blob/$$VERSION|g" base/workflow-controller-configmap-patch.yaml && \ + sed -i.bak -E "s|ref=v[0-9]+\.[0-9]+\.[0-9]+|ref=$$VERSION|g" installs/namespace/kustomization.yaml && \ + sed -i.bak -E "s|ref=v[0-9]+\.[0-9]+\.[0-9]+|ref=$$VERSION|g" installs/namespace/cluster-scoped/kustomization.yaml && \ + sed -i.bak -E "s|ref=v[0-9]+\.[0-9]+\.[0-9]+|ref=$$VERSION|g" installs/cluster/kustomization.yaml && \ + rm -f base/kustomization.yaml.bak base/workflow-controller-deployment-patch.yaml.bak base/workflow-controller-configmap-patch.yaml.bak installs/namespace/kustomization.yaml.bak installs/namespace/cluster-scoped/kustomization.yaml.bak installs/cluster/kustomization.yaml.bak; \ + echo "Successfully updated all kustomization files to use $$VERSION" + diff --git a/manifests/kustomize/third-party/argo/README.md b/manifests/kustomize/third-party/argo/README.md index b5f82380158..68d28f88221 100644 --- a/manifests/kustomize/third-party/argo/README.md +++ b/manifests/kustomize/third-party/argo/README.md @@ -2,34 +2,26 @@ Kubeflow Pipelines uses [Argo Workflows](https://argoproj.github.io/argo-workflows/) as the underlying workflow execution engine. -This folder contains: +This folder contains preconfigured Argo Workflows installations used in Kubeflow Pipelines distributions that use **remote references** to the upstream Argo Workflows repository instead of local copies. -* `upstream/manifests` a mirror of argo workflows manifests upstream. It should never be edited here. Run `make update` to update it. -* `installs` a folder with preconfigured argo workflows installations used in Kubeflow Pipelines distributions. +## Remote References Implementation - Major differences from upstream argo manifests: +KFP uses remote Git references to Argo Workflows manifests instead of maintaining local copies. This approach: - * Argo server is not included. - * Argo workflow controller configmap is preconfigured to integrate with KFP. - * Images are configured to use KFP redistributed ones which comply with open source licenses. - * A default artifact repository config is added for in-cluster minio service. +* Eliminates licensing concerns about copying manifests +* Simplifies deployment and upgrade processes +* Ensures direct tracking of upstream changes -## Upgrade argo +All kustomization files reference manifests directly from the [Argo Workflows repository](https://github.com/argoproj/argo-workflows) using versioned Git references. -Refer to [third_party/argo/README.md](../../../../third_party/argo/README.md). - -### Upgrade argo manifests - -Requirement: +## Upgrade Argo Workflows -Use kpt version above 1.0.0-beta.6, refer to [kpt installation](https://kpt.dev/installation/) for downloading kpt. - -As one step of above, we need to upgrade argo manifests in this folder. +Refer to [third_party/argo/README.md](../../../../third_party/argo/README.md). -1. Run: +### Update Manifests - ```bash - make update - ``` +To upgrade to a new Argo version: - Note, argo version is pulled from [third_party/argo/VERSION](../../../../third_party/argo/VERSION). Edit the VERSION file first. +1. Update the version in [third_party/argo/VERSION](../../../../third_party/argo/VERSION) +2. Run `make update` to automatically update all remote Git references to the new version +3. Test the new configuration with your KFP deployment diff --git a/manifests/kustomize/third-party/argo/base/kustomization.yaml b/manifests/kustomize/third-party/argo/base/kustomization.yaml index f8351753c6c..466dedf742a 100644 --- a/manifests/kustomize/third-party/argo/base/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/base/kustomization.yaml @@ -2,7 +2,7 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: -- ../upstream/manifests/base/workflow-controller +- https://github.com/argoproj/argo-workflows/manifests/base/workflow-controller?ref=v3.6.7 patches: - path: workflow-controller-deployment-patch.yaml diff --git a/manifests/kustomize/third-party/argo/base/workflow-controller-configmap-patch.yaml b/manifests/kustomize/third-party/argo/base/workflow-controller-configmap-patch.yaml index 782e2c6fbce..4cd84f880e4 100644 --- a/manifests/kustomize/third-party/argo/base/workflow-controller-configmap-patch.yaml +++ b/manifests/kustomize/third-party/argo/base/workflow-controller-configmap-patch.yaml @@ -4,9 +4,9 @@ metadata: name: workflow-controller-configmap data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.5.14/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.5.14/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.5.14/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.6.7/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.6.7/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.6.7/docs/workflow-controller-configmap.yaml # In artifactRepository.s3.endpoint, $(kfp-namespace) is needed, because in multi-user mode, pipelines may run in other namespaces. artifactRepository: | @@ -25,8 +25,7 @@ data: # The following format looks like: # artifacts/my-workflow-abc123/2018/08/23/my-workflow-abc123-1234567890 # Adding date into the path greatly reduces the chance of {{pod.name}} collision. - keyFormat: "artifacts/{{workflow.name}}/{{workflow.creationTimestamp.Y}}/{{workflow.creationTimestamp.m}}/{{workflow.creationTimestamp.d}}/{{pod.name}}" - # insecure will disable TLS. Primarily used for minio installs not configured with TLS + keyFormat: "private-artifacts/{{workflow.namespace}}/{{workflow.name}}/{{workflow.creationTimestamp.Y}}/{{workflow.creationTimestamp.m}}/{{workflow.creationTimestamp.d}}/{{pod.name}}" insecure: true accessKeySecret: name: mlpipeline-minio-artifact diff --git a/manifests/kustomize/third-party/argo/base/workflow-controller-deployment-patch.yaml b/manifests/kustomize/third-party/argo/base/workflow-controller-deployment-patch.yaml index 4724bb46779..2ea35587b7b 100644 --- a/manifests/kustomize/third-party/argo/base/workflow-controller-deployment-patch.yaml +++ b/manifests/kustomize/third-party/argo/base/workflow-controller-deployment-patch.yaml @@ -5,18 +5,24 @@ metadata: spec: template: spec: + securityContext: + seccompProfile: + type: RuntimeDefault containers: - name: workflow-controller - image: quay.io/argoproj/workflow-controller:v3.5.14 + image: quay.io/argoproj/workflow-controller:v3.6.7 args: - --configmap - workflow-controller-configmap - --executor-image - - quay.io/argoproj/argoexec:v3.5.14 + - quay.io/argoproj/argoexec:v3.6.7 securityContext: - seccompProfile: - type: RuntimeDefault + readOnlyRootFilesystem: true runAsNonRoot: true + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL resources: requests: cpu: 100m diff --git a/manifests/kustomize/third-party/argo/installs/cluster/kustomization.yaml b/manifests/kustomize/third-party/argo/installs/cluster/kustomization.yaml index 3d53f0718d4..13a01a0f324 100644 --- a/manifests/kustomize/third-party/argo/installs/cluster/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/installs/cluster/kustomization.yaml @@ -5,8 +5,8 @@ kind: Kustomization # because people who deploy cluster-scoped resources should be the same as who deploys # namespaced resources. resources: -- ../../upstream/manifests/base/crds -- ../../upstream/manifests/cluster-install/workflow-controller-rbac +- https://github.com/argoproj/argo-workflows/manifests/base/crds?ref=v3.6.7 +- https://github.com/argoproj/argo-workflows/manifests/cluster-install/workflow-controller-rbac?ref=v3.6.7 - ../../base patches: diff --git a/manifests/kustomize/third-party/argo/installs/namespace/cluster-scoped/kustomization.yaml b/manifests/kustomize/third-party/argo/installs/namespace/cluster-scoped/kustomization.yaml index 91b2fb9358b..fcbd36a4c9a 100644 --- a/manifests/kustomize/third-party/argo/installs/namespace/cluster-scoped/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/installs/namespace/cluster-scoped/kustomization.yaml @@ -2,4 +2,4 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: # Minimal CRDs omit schema validation, recommended for production cluster. -- ../../../upstream/manifests/base/crds/minimal +- https://github.com/argoproj/argo-workflows/manifests/base/crds/minimal?ref=v3.6.7 diff --git a/manifests/kustomize/third-party/argo/installs/namespace/kustomization.yaml b/manifests/kustomize/third-party/argo/installs/namespace/kustomization.yaml index 55eb28483ff..00a3786d996 100644 --- a/manifests/kustomize/third-party/argo/installs/namespace/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/installs/namespace/kustomization.yaml @@ -1,14 +1,14 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization -# This kustomization.yaml is built mirroring ../../upstream/manifests/namespace-install/kustomization.yaml. +# This kustomization.yaml mirrors the upstream Argo Workflows namespace-install configuration with remote references. # The differences: # * this does not include argo server. # * this separates cluster-scoped resources to its own folder. resources: - ../../base -- ../../upstream/manifests/namespace-install/workflow-controller-rbac +- https://github.com/argoproj/argo-workflows/manifests/namespace-install/workflow-controller-rbac?ref=v3.6.7 patches: - path: workflow-controller-deployment-patch.json target: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile b/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile deleted file mode 100644 index 7859465eeaa..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: kpt.dev/v1 -kind: Kptfile -metadata: - name: manifests -upstream: - type: git - git: - repo: https://github.com/argoproj/argo-workflows - directory: /manifests - ref: v3.5.14 - updateStrategy: resource-merge -upstreamLock: - type: git - git: - repo: https://github.com/argoproj/argo-workflows - directory: /manifests - ref: v3.5.14 - commit: d94c214176716ece96974fd98ac5107c38d61344 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/LICENSE b/manifests/kustomize/third-party/argo/upstream/manifests/LICENSE deleted file mode 100644 index 67e99b065bc..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2017-2018 The Argo Authors - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-deployment.yaml deleted file mode 100644 index 861f98f005a..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-deployment.yaml +++ /dev/null @@ -1,48 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: # kpt-merge: /argo-server - name: argo-server - annotations: - internal.kpt.dev/upstream-identifier: 'apps|Deployment|default|argo-server' -spec: - selector: - matchLabels: - app: argo-server - template: - metadata: - labels: - app: argo-server - spec: - serviceAccountName: argo-server - containers: - - name: argo-server - image: quay.io/argoproj/argocli:latest - securityContext: - readOnlyRootFilesystem: true - runAsNonRoot: true - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - args: [server] - env: [] - ports: - - name: web - containerPort: 2746 - readinessProbe: - httpGet: - port: 2746 - scheme: HTTPS - path: / - initialDelaySeconds: 10 - periodSeconds: 20 - volumeMounts: - - mountPath: /tmp - name: tmp - volumes: - - name: tmp - emptyDir: {} - securityContext: - runAsNonRoot: true - nodeSelector: - kubernetes.io/os: linux diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-sa.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-sa.yaml deleted file mode 100644 index 478935f5e6e..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-sa.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: # kpt-merge: /argo-server - name: argo-server - annotations: - internal.kpt.dev/upstream-identifier: '|ServiceAccount|default|argo-server' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-service.yaml deleted file mode 100644 index c4da7e46f88..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-service.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: # kpt-merge: /argo-server - name: argo-server - annotations: - internal.kpt.dev/upstream-identifier: '|Service|default|argo-server' -spec: - selector: - app: argo-server - ports: - - name: web - port: 2746 - targetPort: 2746 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/kustomization.yaml deleted file mode 100644 index e17bc754ce5..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/kustomization.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- argo-server-deployment.yaml -- argo-server-sa.yaml -- argo-server-service.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/README.md b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/README.md deleted file mode 100644 index bca5186fefe..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Full CRDs - -These CRDs have full schema validation. As a result, they are large and probably not suitable to be used in your cluster. diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml deleted file mode 100644 index 817556270df..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml +++ /dev/null @@ -1,19154 +0,0 @@ -# This is an auto-generated file. DO NOT EDIT -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /clusterworkflowtemplates.argoproj.io - name: clusterworkflowtemplates.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|clusterworkflowtemplates.argoproj.io' -spec: - group: argoproj.io - names: - kind: ClusterWorkflowTemplate - listKind: ClusterWorkflowTemplateList - plural: clusterworkflowtemplates - shortNames: - - clusterwftmpl - - cwft - singular: clusterworkflowtemplate - scope: Cluster - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - activeDeadlineSeconds: - format: int64 - type: integer - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLogs: - type: boolean - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - artifactGC: - properties: - forceFinalizerRemoval: - type: boolean - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - podSpecPatch: - type: string - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactRepositoryRef: - properties: - configMap: - type: string - key: - type: string - type: object - automountServiceAccountToken: - type: boolean - dnsConfig: - properties: - nameservers: - items: - type: string - type: array - options: - items: - properties: - name: - type: string - value: - type: string - type: object - type: array - searches: - items: - type: string - type: array - type: object - dnsPolicy: - type: string - entrypoint: - type: string - executor: - properties: - serviceAccountName: - type: string - type: object - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - hostNetwork: - type: boolean - imagePullSecrets: - items: - properties: - name: - type: string - type: object - type: array - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - nodeSelector: - additionalProperties: - type: string - type: object - onExit: - type: string - parallelism: - format: int64 - type: integer - podDisruptionBudget: - properties: - maxUnavailable: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - minAvailable: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - type: object - podGC: - properties: - deleteDelayDuration: - type: string - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - strategy: - type: string - type: object - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - podPriority: - format: int32 - type: integer - podPriorityClassName: - type: string - podSpecPatch: - type: string - priority: - format: int32 - type: integer - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - shutdown: - type: string - suspend: - type: boolean - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - templateDefaults: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - templates: - items: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - image - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - image - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - type: array - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - ttlStrategy: - properties: - secondsAfterCompletion: - format: int32 - type: integer - secondsAfterFailure: - format: int32 - type: integer - secondsAfterSuccess: - format: int32 - type: integer - type: object - volumeClaimGC: - properties: - strategy: - type: string - type: object - volumeClaimTemplates: - items: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - status: - properties: - accessModes: - items: - type: string - type: array - allocatedResources: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - capacity: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - conditions: - items: - properties: - lastProbeTime: - format: date-time - type: string - lastTransitionTime: - format: date-time - type: string - message: - type: string - reason: - type: string - status: - type: string - type: - type: string - required: - - status - - type - type: object - type: array - phase: - type: string - resizeStatus: - type: string - type: object - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - workflowMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - labelsFrom: - additionalProperties: - properties: - expression: - type: string - required: - - expression - type: object - type: object - type: object - workflowTemplateRef: - properties: - clusterScope: - type: boolean - name: - type: string - type: object - type: object - required: - - metadata - - spec - type: object - served: true - storage: true diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_cronworkflows.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_cronworkflows.yaml deleted file mode 100644 index 0949d08d44c..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_cronworkflows.yaml +++ /dev/null @@ -1,19219 +0,0 @@ -# This is an auto-generated file. DO NOT EDIT -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /cronworkflows.argoproj.io - name: cronworkflows.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|cronworkflows.argoproj.io' -spec: - group: argoproj.io - names: - kind: CronWorkflow - listKind: CronWorkflowList - plural: cronworkflows - shortNames: - - cwf - - cronwf - singular: cronworkflow - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - concurrencyPolicy: - type: string - failedJobsHistoryLimit: - format: int32 - type: integer - schedule: - type: string - startingDeadlineSeconds: - format: int64 - type: integer - successfulJobsHistoryLimit: - format: int32 - type: integer - suspend: - type: boolean - timezone: - type: string - workflowMetadata: - type: object - workflowSpec: - properties: - activeDeadlineSeconds: - format: int64 - type: integer - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLogs: - type: boolean - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - artifactGC: - properties: - forceFinalizerRemoval: - type: boolean - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - podSpecPatch: - type: string - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactRepositoryRef: - properties: - configMap: - type: string - key: - type: string - type: object - automountServiceAccountToken: - type: boolean - dnsConfig: - properties: - nameservers: - items: - type: string - type: array - options: - items: - properties: - name: - type: string - value: - type: string - type: object - type: array - searches: - items: - type: string - type: array - type: object - dnsPolicy: - type: string - entrypoint: - type: string - executor: - properties: - serviceAccountName: - type: string - type: object - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - hostNetwork: - type: boolean - imagePullSecrets: - items: - properties: - name: - type: string - type: object - type: array - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - nodeSelector: - additionalProperties: - type: string - type: object - onExit: - type: string - parallelism: - format: int64 - type: integer - podDisruptionBudget: - properties: - maxUnavailable: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - minAvailable: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - type: object - podGC: - properties: - deleteDelayDuration: - type: string - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - strategy: - type: string - type: object - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - podPriority: - format: int32 - type: integer - podPriorityClassName: - type: string - podSpecPatch: - type: string - priority: - format: int32 - type: integer - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - shutdown: - type: string - suspend: - type: boolean - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - templateDefaults: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - templates: - items: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - image - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - image - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - type: array - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - ttlStrategy: - properties: - secondsAfterCompletion: - format: int32 - type: integer - secondsAfterFailure: - format: int32 - type: integer - secondsAfterSuccess: - format: int32 - type: integer - type: object - volumeClaimGC: - properties: - strategy: - type: string - type: object - volumeClaimTemplates: - items: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - status: - properties: - accessModes: - items: - type: string - type: array - allocatedResources: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - capacity: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - conditions: - items: - properties: - lastProbeTime: - format: date-time - type: string - lastTransitionTime: - format: date-time - type: string - message: - type: string - reason: - type: string - status: - type: string - type: - type: string - required: - - status - - type - type: object - type: array - phase: - type: string - resizeStatus: - type: string - type: object - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - workflowMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - labelsFrom: - additionalProperties: - properties: - expression: - type: string - required: - - expression - type: object - type: object - type: object - workflowTemplateRef: - properties: - clusterScope: - type: boolean - name: - type: string - type: object - type: object - required: - - schedule - - workflowSpec - type: object - status: - properties: - active: - items: - properties: - apiVersion: - type: string - fieldPath: - type: string - kind: - type: string - name: - type: string - namespace: - type: string - resourceVersion: - type: string - uid: - type: string - type: object - type: array - conditions: - items: - properties: - message: - type: string - status: - type: string - type: - type: string - type: object - type: array - lastScheduledTime: - format: date-time - type: string - required: - - active - - conditions - - lastScheduledTime - type: object - required: - - metadata - - spec - type: object - served: true - storage: true diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowartifactgctasks.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowartifactgctasks.yaml deleted file mode 100644 index f3308c9e9d5..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowartifactgctasks.yaml +++ /dev/null @@ -1,1014 +0,0 @@ -# This is an auto-generated file. DO NOT EDIT -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workflowartifactgctasks.argoproj.io - name: workflowartifactgctasks.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workflowartifactgctasks.argoproj.io' -spec: - group: argoproj.io - names: - kind: WorkflowArtifactGCTask - listKind: WorkflowArtifactGCTaskList - plural: workflowartifactgctasks - shortNames: - - wfat - singular: workflowartifactgctask - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - artifactsByNode: - additionalProperties: - properties: - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - artifacts: - additionalProperties: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - type: object - type: object - type: object - status: - properties: - artifactResultsByNode: - additionalProperties: - properties: - artifactResults: - additionalProperties: - properties: - error: - type: string - name: - type: string - success: - type: boolean - required: - - name - type: object - type: object - type: object - type: object - type: object - required: - - metadata - - spec - type: object - served: true - storage: true - subresources: - status: {} diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml deleted file mode 100644 index 4259d24d0bd..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml +++ /dev/null @@ -1,619 +0,0 @@ -# This is an auto-generated file. DO NOT EDIT -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workfloweventbindings.argoproj.io - name: workfloweventbindings.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workfloweventbindings.argoproj.io' -spec: - group: argoproj.io - names: - kind: WorkflowEventBinding - listKind: WorkflowEventBindingList - plural: workfloweventbindings - shortNames: - - wfeb - singular: workfloweventbinding - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - event: - properties: - selector: - type: string - required: - - selector - type: object - submit: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - metadata: - type: object - workflowTemplateRef: - properties: - clusterScope: - type: boolean - name: - type: string - type: object - required: - - workflowTemplateRef - type: object - required: - - event - type: object - required: - - metadata - - spec - type: object - served: true - storage: true diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflows.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflows.yaml deleted file mode 100644 index b048baa5834..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflows.yaml +++ /dev/null @@ -1,49299 +0,0 @@ -# This is an auto-generated file. DO NOT EDIT -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workflows.argoproj.io - name: workflows.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workflows.argoproj.io' -spec: - group: argoproj.io - names: - kind: Workflow - listKind: WorkflowList - plural: workflows - shortNames: - - wf - singular: workflow - scope: Namespaced - versions: - - additionalPrinterColumns: - - description: Status of the workflow - jsonPath: .status.phase - name: Status - type: string - - description: When the workflow was started - format: date-time - jsonPath: .status.startedAt - name: Age - type: date - - description: Human readable message indicating details about why the workflow is in this condition. - jsonPath: .status.message - name: Message - type: string - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - activeDeadlineSeconds: - format: int64 - type: integer - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLogs: - type: boolean - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - artifactGC: - properties: - forceFinalizerRemoval: - type: boolean - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - podSpecPatch: - type: string - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactRepositoryRef: - properties: - configMap: - type: string - key: - type: string - type: object - automountServiceAccountToken: - type: boolean - dnsConfig: - properties: - nameservers: - items: - type: string - type: array - options: - items: - properties: - name: - type: string - value: - type: string - type: object - type: array - searches: - items: - type: string - type: array - type: object - dnsPolicy: - type: string - entrypoint: - type: string - executor: - properties: - serviceAccountName: - type: string - type: object - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - hostNetwork: - type: boolean - imagePullSecrets: - items: - properties: - name: - type: string - type: object - type: array - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - nodeSelector: - additionalProperties: - type: string - type: object - onExit: - type: string - parallelism: - format: int64 - type: integer - podDisruptionBudget: - properties: - maxUnavailable: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - minAvailable: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - type: object - podGC: - properties: - deleteDelayDuration: - type: string - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - strategy: - type: string - type: object - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - podPriority: - format: int32 - type: integer - podPriorityClassName: - type: string - podSpecPatch: - type: string - priority: - format: int32 - type: integer - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - shutdown: - type: string - suspend: - type: boolean - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - templateDefaults: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - templates: - items: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - image - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - image - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - type: array - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - ttlStrategy: - properties: - secondsAfterCompletion: - format: int32 - type: integer - secondsAfterFailure: - format: int32 - type: integer - secondsAfterSuccess: - format: int32 - type: integer - type: object - volumeClaimGC: - properties: - strategy: - type: string - type: object - volumeClaimTemplates: - items: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - status: - properties: - accessModes: - items: - type: string - type: array - allocatedResources: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - capacity: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - conditions: - items: - properties: - lastProbeTime: - format: date-time - type: string - lastTransitionTime: - format: date-time - type: string - message: - type: string - reason: - type: string - status: - type: string - type: - type: string - required: - - status - - type - type: object - type: array - phase: - type: string - resizeStatus: - type: string - type: object - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - workflowMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - labelsFrom: - additionalProperties: - properties: - expression: - type: string - required: - - expression - type: object - type: object - type: object - workflowTemplateRef: - properties: - clusterScope: - type: boolean - name: - type: string - type: object - type: object - status: - properties: - artifactGCStatus: - properties: - notSpecified: - type: boolean - podsRecouped: - additionalProperties: - type: boolean - type: object - strategiesProcessed: - additionalProperties: - type: boolean - type: object - type: object - artifactRepositoryRef: - properties: - artifactRepository: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - keyFormat: - type: string - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repoURL: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blobNameFormat: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - keyFormat: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - pathFormat: - type: string - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - keyFormat: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - keyFormat: - type: string - keyPrefix: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - configMap: - type: string - default: - type: boolean - key: - type: string - namespace: - type: string - type: object - compressedNodes: - type: string - conditions: - items: - properties: - message: - type: string - status: - type: string - type: - type: string - type: object - type: array - estimatedDuration: - type: integer - finishedAt: - format: date-time - type: string - message: - type: string - nodes: - additionalProperties: - properties: - boundaryID: - type: string - children: - items: - type: string - type: array - daemoned: - type: boolean - displayName: - type: string - estimatedDuration: - type: integer - finishedAt: - format: date-time - type: string - hostNodeName: - type: string - id: - type: string - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoizationStatus: - properties: - cacheName: - type: string - hit: - type: boolean - key: - type: string - required: - - cacheName - - hit - - key - type: object - message: - type: string - name: - type: string - nodeFlag: - properties: - hooked: - type: boolean - retried: - type: boolean - type: object - outboundNodes: - items: - type: string - type: array - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - phase: - type: string - podIP: - type: string - progress: - type: string - resourcesDuration: - additionalProperties: - format: int64 - type: integer - type: object - startedAt: - format: date-time - type: string - synchronizationStatus: - properties: - waiting: - type: string - type: object - templateName: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - templateScope: - type: string - type: - type: string - required: - - id - - name - - type - type: object - type: object - offloadNodeStatusVersion: - type: string - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - persistentVolumeClaims: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - phase: - type: string - progress: - type: string - resourcesDuration: - additionalProperties: - format: int64 - type: integer - type: object - startedAt: - format: date-time - type: string - storedTemplates: - additionalProperties: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - type: object - storedWorkflowTemplateSpec: - properties: - activeDeadlineSeconds: - format: int64 - type: integer - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLogs: - type: boolean - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - artifactGC: - properties: - forceFinalizerRemoval: - type: boolean - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - podSpecPatch: - type: string - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactRepositoryRef: - properties: - configMap: - type: string - key: - type: string - type: object - automountServiceAccountToken: - type: boolean - dnsConfig: - properties: - nameservers: - items: - type: string - type: array - options: - items: - properties: - name: - type: string - value: - type: string - type: object - type: array - searches: - items: - type: string - type: array - type: object - dnsPolicy: - type: string - entrypoint: - type: string - executor: - properties: - serviceAccountName: - type: string - type: object - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - hostNetwork: - type: boolean - imagePullSecrets: - items: - properties: - name: - type: string - type: object - type: array - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - nodeSelector: - additionalProperties: - type: string - type: object - onExit: - type: string - parallelism: - format: int64 - type: integer - podDisruptionBudget: - properties: - maxUnavailable: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - minAvailable: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - type: object - podGC: - properties: - deleteDelayDuration: - type: string - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - strategy: - type: string - type: object - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - podPriority: - format: int32 - type: integer - podPriorityClassName: - type: string - podSpecPatch: - type: string - priority: - format: int32 - type: integer - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - shutdown: - type: string - suspend: - type: boolean - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - templateDefaults: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - templates: - items: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - type: array - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - ttlStrategy: - properties: - secondsAfterCompletion: - format: int32 - type: integer - secondsAfterFailure: - format: int32 - type: integer - secondsAfterSuccess: - format: int32 - type: integer - type: object - volumeClaimGC: - properties: - strategy: - type: string - type: object - volumeClaimTemplates: - items: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - status: - properties: - accessModes: - items: - type: string - type: array - allocatedResources: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - capacity: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - conditions: - items: - properties: - lastProbeTime: - format: date-time - type: string - lastTransitionTime: - format: date-time - type: string - message: - type: string - reason: - type: string - status: - type: string - type: - type: string - required: - - status - - type - type: object - type: array - phase: - type: string - resizeStatus: - type: string - type: object - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - workflowMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - labelsFrom: - additionalProperties: - properties: - expression: - type: string - required: - - expression - type: object - type: object - type: object - workflowTemplateRef: - properties: - clusterScope: - type: boolean - name: - type: string - type: object - type: object - synchronization: - properties: - mutex: - properties: - holding: - items: - properties: - holder: - type: string - mutex: - type: string - type: object - type: array - x-kubernetes-list-type: atomic - waiting: - items: - properties: - holder: - type: string - mutex: - type: string - type: object - type: array - x-kubernetes-list-type: atomic - type: object - semaphore: - properties: - holding: - items: - properties: - holders: - items: - type: string - type: array - x-kubernetes-list-type: atomic - semaphore: - type: string - type: object - type: array - waiting: - items: - properties: - holders: - items: - type: string - type: array - x-kubernetes-list-type: atomic - semaphore: - type: string - type: object - type: array - type: object - type: object - taskResultsCompletionStatus: - additionalProperties: - type: boolean - type: object - type: object - required: - - metadata - - spec - type: object - served: true - storage: true - subresources: {} diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtaskresults.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtaskresults.yaml deleted file mode 100644 index 4d3f86a6d89..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtaskresults.yaml +++ /dev/null @@ -1,600 +0,0 @@ -# This is an auto-generated file. DO NOT EDIT -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workflowtaskresults.argoproj.io - name: workflowtaskresults.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workflowtaskresults.argoproj.io' -spec: - group: argoproj.io - names: - kind: WorkflowTaskResult - listKind: WorkflowTaskResultList - plural: workflowtaskresults - singular: workflowtaskresult - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - message: - type: string - metadata: - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - phase: - type: string - progress: - type: string - required: - - metadata - type: object - served: true - storage: true diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml deleted file mode 100644 index ddaecf65665..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtasksets.yaml +++ /dev/null @@ -1,8803 +0,0 @@ -# This is an auto-generated file. DO NOT EDIT -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workflowtasksets.argoproj.io - name: workflowtasksets.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workflowtasksets.argoproj.io' -spec: - group: argoproj.io - names: - kind: WorkflowTaskSet - listKind: WorkflowTaskSetList - plural: workflowtasksets - shortNames: - - wfts - singular: workflowtaskset - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - tasks: - additionalProperties: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - type: object - type: object - status: - properties: - nodes: - additionalProperties: - properties: - message: - type: string - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - phase: - type: string - progress: - type: string - type: object - type: object - type: object - required: - - metadata - - spec - type: object - served: true - storage: true - subresources: - status: {} diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml deleted file mode 100644 index c7d7e34b0da..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml +++ /dev/null @@ -1,19153 +0,0 @@ -# This is an auto-generated file. DO NOT EDIT -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workflowtemplates.argoproj.io - name: workflowtemplates.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workflowtemplates.argoproj.io' -spec: - group: argoproj.io - names: - kind: WorkflowTemplate - listKind: WorkflowTemplateList - plural: workflowtemplates - shortNames: - - wftmpl - singular: workflowtemplate - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - activeDeadlineSeconds: - format: int64 - type: integer - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLogs: - type: boolean - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - artifactGC: - properties: - forceFinalizerRemoval: - type: boolean - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - podSpecPatch: - type: string - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactRepositoryRef: - properties: - configMap: - type: string - key: - type: string - type: object - automountServiceAccountToken: - type: boolean - dnsConfig: - properties: - nameservers: - items: - type: string - type: array - options: - items: - properties: - name: - type: string - value: - type: string - type: object - type: array - searches: - items: - type: string - type: array - type: object - dnsPolicy: - type: string - entrypoint: - type: string - executor: - properties: - serviceAccountName: - type: string - type: object - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - hostNetwork: - type: boolean - imagePullSecrets: - items: - properties: - name: - type: string - type: object - type: array - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - nodeSelector: - additionalProperties: - type: string - type: object - onExit: - type: string - parallelism: - format: int64 - type: integer - podDisruptionBudget: - properties: - maxUnavailable: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - minAvailable: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - type: object - podGC: - properties: - deleteDelayDuration: - type: string - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - strategy: - type: string - type: object - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - podPriority: - format: int32 - type: integer - podPriorityClassName: - type: string - podSpecPatch: - type: string - priority: - format: int32 - type: integer - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - shutdown: - type: string - suspend: - type: boolean - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - templateDefaults: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - templates: - items: - properties: - activeDeadlineSeconds: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - affinity: - properties: - nodeAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - preference: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - weight: - format: int32 - type: integer - required: - - preference - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - properties: - nodeSelectorTerms: - items: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchFields: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - type: object - type: array - required: - - nodeSelectorTerms - type: object - type: object - podAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - podAntiAffinity: - properties: - preferredDuringSchedulingIgnoredDuringExecution: - items: - properties: - podAffinityTerm: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - weight: - format: int32 - type: integer - required: - - podAffinityTerm - - weight - type: object - type: array - requiredDuringSchedulingIgnoredDuringExecution: - items: - properties: - labelSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaceSelector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - namespaces: - items: - type: string - type: array - topologyKey: - type: string - required: - - topologyKey - type: object - type: array - type: object - type: object - archiveLocation: - properties: - archiveLogs: - type: boolean - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - raw: - properties: - data: - type: string - required: - - data - type: object - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - type: object - automountServiceAccountToken: - type: boolean - container: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - image - type: object - containerSet: - properties: - containers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - dependencies: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - retryStrategy: - properties: - duration: - type: string - retries: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - retries - type: object - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - required: - - containers - type: object - daemon: - type: boolean - dag: - properties: - failFast: - type: boolean - target: - type: string - tasks: - items: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - continueOn: - properties: - error: - type: boolean - failed: - type: boolean - type: object - dependencies: - items: - type: string - type: array - depends: - type: string - hooks: - additionalProperties: - properties: - arguments: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - expression: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - type: object - type: object - inline: {} - name: - type: string - onExit: - type: string - template: - type: string - templateRef: - properties: - clusterScope: - type: boolean - name: - type: string - template: - type: string - type: object - when: - type: string - withItems: - items: - type: object - type: array - withParam: - type: string - withSequence: - properties: - count: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - end: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - format: - type: string - start: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - type: object - required: - - name - type: object - type: array - required: - - tasks - type: object - data: - properties: - source: - properties: - artifactPaths: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: object - transformation: - items: - properties: - expression: - type: string - required: - - expression - type: object - type: array - required: - - source - - transformation - type: object - executor: - properties: - serviceAccountName: - type: string - type: object - failFast: - type: boolean - hostAliases: - items: - properties: - hostnames: - items: - type: string - type: array - ip: - type: string - type: object - type: array - http: - properties: - body: - type: string - bodyFrom: - properties: - bytes: - format: byte - type: string - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - insecureSkipVerify: - type: boolean - method: - type: string - successCondition: - type: string - timeoutSeconds: - format: int64 - type: integer - url: - type: string - required: - - url - type: object - initContainers: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - inputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - type: object - memoize: - properties: - cache: - properties: - configMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - configMap - type: object - key: - type: string - maxAge: - type: string - required: - - cache - - key - - maxAge - type: object - metadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - metrics: - properties: - prometheus: - items: - properties: - counter: - properties: - value: - type: string - required: - - value - type: object - gauge: - properties: - operation: - type: string - realtime: - type: boolean - value: - type: string - required: - - realtime - - value - type: object - help: - type: string - histogram: - properties: - buckets: - items: - type: number - type: array - value: - type: string - required: - - buckets - - value - type: object - labels: - items: - properties: - key: - type: string - value: - type: string - required: - - key - - value - type: object - type: array - name: - type: string - when: - type: string - required: - - help - - name - type: object - type: array - required: - - prometheus - type: object - name: - type: string - nodeSelector: - additionalProperties: - type: string - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - parallelism: - format: int64 - type: integer - plugin: - type: object - podSpecPatch: - type: string - priority: - format: int32 - type: integer - priorityClassName: - type: string - resource: - properties: - action: - type: string - failureCondition: - type: string - flags: - items: - type: string - type: array - manifest: - type: string - manifestFrom: - properties: - artifact: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - required: - - artifact - type: object - mergeStrategy: - type: string - setOwnerReference: - type: boolean - successCondition: - type: string - required: - - action - type: object - retryStrategy: - properties: - affinity: - properties: - nodeAntiAffinity: - type: object - type: object - backoff: - properties: - duration: - type: string - factor: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - maxDuration: - type: string - type: object - expression: - type: string - limit: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - retryPolicy: - type: string - type: object - schedulerName: - type: string - script: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - source: - type: string - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - image - - source - type: object - securityContext: - properties: - fsGroup: - format: int64 - type: integer - fsGroupChangePolicy: - type: string - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - supplementalGroups: - items: - format: int64 - type: integer - type: array - sysctls: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - serviceAccountName: - type: string - sidecars: - items: - properties: - args: - items: - type: string - type: array - command: - items: - type: string - type: array - env: - items: - properties: - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - secretKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - required: - - name - type: object - type: array - envFrom: - items: - properties: - configMapRef: - properties: - name: - type: string - optional: - type: boolean - type: object - prefix: - type: string - secretRef: - properties: - name: - type: string - optional: - type: boolean - type: object - type: object - type: array - image: - type: string - imagePullPolicy: - type: string - lifecycle: - properties: - postStart: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - preStop: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - type: object - type: object - livenessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - mirrorVolumeMounts: - type: boolean - name: - type: string - ports: - items: - properties: - containerPort: - format: int32 - type: integer - hostIP: - type: string - hostPort: - format: int32 - type: integer - name: - type: string - protocol: - default: TCP - type: string - required: - - containerPort - type: object - type: array - x-kubernetes-list-map-keys: - - containerPort - - protocol - x-kubernetes-list-type: map - readinessProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - securityContext: - properties: - allowPrivilegeEscalation: - type: boolean - capabilities: - properties: - add: - items: - type: string - type: array - drop: - items: - type: string - type: array - type: object - privileged: - type: boolean - procMount: - type: string - readOnlyRootFilesystem: - type: boolean - runAsGroup: - format: int64 - type: integer - runAsNonRoot: - type: boolean - runAsUser: - format: int64 - type: integer - seLinuxOptions: - properties: - level: - type: string - role: - type: string - type: - type: string - user: - type: string - type: object - seccompProfile: - properties: - localhostProfile: - type: string - type: - type: string - required: - - type - type: object - windowsOptions: - properties: - gmsaCredentialSpec: - type: string - gmsaCredentialSpecName: - type: string - hostProcess: - type: boolean - runAsUserName: - type: string - type: object - type: object - startupProbe: - properties: - exec: - properties: - command: - items: - type: string - type: array - type: object - failureThreshold: - format: int32 - type: integer - grpc: - properties: - port: - format: int32 - type: integer - service: - type: string - required: - - port - type: object - httpGet: - properties: - host: - type: string - httpHeaders: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - path: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - scheme: - type: string - required: - - port - type: object - initialDelaySeconds: - format: int32 - type: integer - periodSeconds: - format: int32 - type: integer - successThreshold: - format: int32 - type: integer - tcpSocket: - properties: - host: - type: string - port: - anyOf: - - type: integer - - type: string - x-kubernetes-int-or-string: true - required: - - port - type: object - terminationGracePeriodSeconds: - format: int64 - type: integer - timeoutSeconds: - format: int32 - type: integer - type: object - stdin: - type: boolean - stdinOnce: - type: boolean - terminationMessagePath: - type: string - terminationMessagePolicy: - type: string - tty: - type: boolean - volumeDevices: - items: - properties: - devicePath: - type: string - name: - type: string - required: - - devicePath - - name - type: object - type: array - volumeMounts: - items: - properties: - mountPath: - type: string - mountPropagation: - type: string - name: - type: string - readOnly: - type: boolean - subPath: - type: string - subPathExpr: - type: string - required: - - mountPath - - name - type: object - type: array - workingDir: - type: string - required: - - name - type: object - type: array - steps: - items: - type: array - type: array - suspend: - properties: - duration: - type: string - type: object - synchronization: - properties: - mutex: - properties: - name: - type: string - namespace: - type: string - type: object - semaphore: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - namespace: - type: string - type: object - type: object - timeout: - type: string - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - type: array - tolerations: - items: - properties: - effect: - type: string - key: - type: string - operator: - type: string - tolerationSeconds: - format: int64 - type: integer - value: - type: string - type: object - type: array - ttlStrategy: - properties: - secondsAfterCompletion: - format: int32 - type: integer - secondsAfterFailure: - format: int32 - type: integer - secondsAfterSuccess: - format: int32 - type: integer - type: object - volumeClaimGC: - properties: - strategy: - type: string - type: object - volumeClaimTemplates: - items: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - status: - properties: - accessModes: - items: - type: string - type: array - allocatedResources: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - capacity: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - conditions: - items: - properties: - lastProbeTime: - format: date-time - type: string - lastTransitionTime: - format: date-time - type: string - message: - type: string - reason: - type: string - status: - type: string - type: - type: string - required: - - status - - type - type: object - type: array - phase: - type: string - resizeStatus: - type: string - type: object - type: object - type: array - volumes: - items: - properties: - awsElasticBlockStore: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - azureDisk: - properties: - cachingMode: - type: string - diskName: - type: string - diskURI: - type: string - fsType: - type: string - kind: - type: string - readOnly: - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - properties: - readOnly: - type: boolean - secretName: - type: string - shareName: - type: string - required: - - secretName - - shareName - type: object - cephfs: - properties: - monitors: - items: - type: string - type: array - path: - type: string - readOnly: - type: boolean - secretFile: - type: string - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - monitors - type: object - cinder: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeID: - type: string - required: - - volumeID - type: object - configMap: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - csi: - properties: - driver: - type: string - fsType: - type: string - nodePublishSecretRef: - properties: - name: - type: string - type: object - readOnly: - type: boolean - volumeAttributes: - additionalProperties: - type: string - type: object - required: - - driver - type: object - downwardAPI: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - emptyDir: - properties: - medium: - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - properties: - volumeClaimTemplate: - properties: - metadata: - type: object - spec: - properties: - accessModes: - items: - type: string - type: array - dataSource: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - dataSourceRef: - properties: - apiGroup: - type: string - kind: - type: string - name: - type: string - required: - - kind - - name - type: object - resources: - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - selector: - properties: - matchExpressions: - items: - properties: - key: - type: string - operator: - type: string - values: - items: - type: string - type: array - required: - - key - - operator - type: object - type: array - matchLabels: - additionalProperties: - type: string - type: object - type: object - storageClassName: - type: string - volumeMode: - type: string - volumeName: - type: string - type: object - required: - - spec - type: object - type: object - fc: - properties: - fsType: - type: string - lun: - format: int32 - type: integer - readOnly: - type: boolean - targetWWNs: - items: - type: string - type: array - wwids: - items: - type: string - type: array - type: object - flexVolume: - properties: - driver: - type: string - fsType: - type: string - options: - additionalProperties: - type: string - type: object - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - required: - - driver - type: object - flocker: - properties: - datasetName: - type: string - datasetUUID: - type: string - type: object - gcePersistentDisk: - properties: - fsType: - type: string - partition: - format: int32 - type: integer - pdName: - type: string - readOnly: - type: boolean - required: - - pdName - type: object - gitRepo: - properties: - directory: - type: string - repository: - type: string - revision: - type: string - required: - - repository - type: object - glusterfs: - properties: - endpoints: - type: string - path: - type: string - readOnly: - type: boolean - required: - - endpoints - - path - type: object - hostPath: - properties: - path: - type: string - type: - type: string - required: - - path - type: object - iscsi: - properties: - chapAuthDiscovery: - type: boolean - chapAuthSession: - type: boolean - fsType: - type: string - initiatorName: - type: string - iqn: - type: string - iscsiInterface: - type: string - lun: - format: int32 - type: integer - portals: - items: - type: string - type: array - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - targetPortal: - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - type: string - nfs: - properties: - path: - type: string - readOnly: - type: boolean - server: - type: string - required: - - path - - server - type: object - persistentVolumeClaim: - properties: - claimName: - type: string - readOnly: - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - properties: - fsType: - type: string - pdID: - type: string - required: - - pdID - type: object - portworxVolume: - properties: - fsType: - type: string - readOnly: - type: boolean - volumeID: - type: string - required: - - volumeID - type: object - projected: - properties: - defaultMode: - format: int32 - type: integer - sources: - items: - properties: - configMap: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - downwardAPI: - properties: - items: - items: - properties: - fieldRef: - properties: - apiVersion: - type: string - fieldPath: - type: string - required: - - fieldPath - type: object - mode: - format: int32 - type: integer - path: - type: string - resourceFieldRef: - properties: - containerName: - type: string - divisor: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - type: string - required: - - resource - type: object - required: - - path - type: object - type: array - type: object - secret: - properties: - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - name: - type: string - optional: - type: boolean - type: object - serviceAccountToken: - properties: - audience: - type: string - expirationSeconds: - format: int64 - type: integer - path: - type: string - required: - - path - type: object - type: object - type: array - type: object - quobyte: - properties: - group: - type: string - readOnly: - type: boolean - registry: - type: string - tenant: - type: string - user: - type: string - volume: - type: string - required: - - registry - - volume - type: object - rbd: - properties: - fsType: - type: string - image: - type: string - keyring: - type: string - monitors: - items: - type: string - type: array - pool: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - user: - type: string - required: - - image - - monitors - type: object - scaleIO: - properties: - fsType: - type: string - gateway: - type: string - protectionDomain: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - sslEnabled: - type: boolean - storageMode: - type: string - storagePool: - type: string - system: - type: string - volumeName: - type: string - required: - - gateway - - secretRef - - system - type: object - secret: - properties: - defaultMode: - format: int32 - type: integer - items: - items: - properties: - key: - type: string - mode: - format: int32 - type: integer - path: - type: string - required: - - key - - path - type: object - type: array - optional: - type: boolean - secretName: - type: string - type: object - storageos: - properties: - fsType: - type: string - readOnly: - type: boolean - secretRef: - properties: - name: - type: string - type: object - volumeName: - type: string - volumeNamespace: - type: string - type: object - vsphereVolume: - properties: - fsType: - type: string - storagePolicyID: - type: string - storagePolicyName: - type: string - volumePath: - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - workflowMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - labelsFrom: - additionalProperties: - properties: - expression: - type: string - required: - - expression - type: object - type: object - type: object - workflowTemplateRef: - properties: - clusterScope: - type: boolean - name: - type: string - type: object - type: object - required: - - metadata - - spec - type: object - served: true - storage: true diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/kustomization.yaml deleted file mode 100644 index e6467d54a5d..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/kustomization.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- argoproj.io_clusterworkflowtemplates.yaml -- argoproj.io_cronworkflows.yaml -- argoproj.io_workflows.yaml -- argoproj.io_workflowtemplates.yaml -- argoproj.io_workfloweventbindings.yaml -- argoproj.io_workflowtasksets.yaml -- argoproj.io_workflowtaskresults.yaml -- argoproj.io_workflowartifactgctasks.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/kustomization.yaml deleted file mode 100644 index 7f3724b739e..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/kustomization.yaml +++ /dev/null @@ -1,4 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- minimal diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/README.md b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/README.md deleted file mode 100644 index 55f48f08081..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Minimal CRDs - -These CRDs omit schema validation. diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_clusterworkflowtemplates.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_clusterworkflowtemplates.yaml deleted file mode 100644 index caddc6288a1..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_clusterworkflowtemplates.yaml +++ /dev/null @@ -1,38 +0,0 @@ -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /clusterworkflowtemplates.argoproj.io - name: clusterworkflowtemplates.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|clusterworkflowtemplates.argoproj.io' -spec: - group: argoproj.io - names: - kind: ClusterWorkflowTemplate - listKind: ClusterWorkflowTemplateList - plural: clusterworkflowtemplates - shortNames: - - clusterwftmpl - - cwft - singular: clusterworkflowtemplate - scope: Cluster - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_cronworkflows.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_cronworkflows.yaml deleted file mode 100644 index 58f6a9d33c6..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_cronworkflows.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /cronworkflows.argoproj.io - name: cronworkflows.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|cronworkflows.argoproj.io' -spec: - group: argoproj.io - names: - kind: CronWorkflow - listKind: CronWorkflowList - plural: cronworkflows - shortNames: - - cwf - - cronwf - singular: cronworkflow - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - status: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowartifactgctasks.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowartifactgctasks.yaml deleted file mode 100644 index f82ff035d5f..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowartifactgctasks.yaml +++ /dev/null @@ -1,43 +0,0 @@ -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workflowartifactgctasks.argoproj.io - name: workflowartifactgctasks.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workflowartifactgctasks.argoproj.io' -spec: - group: argoproj.io - names: - kind: WorkflowArtifactGCTask - listKind: WorkflowArtifactGCTaskList - plural: workflowartifactgctasks - shortNames: - - wfat - singular: workflowartifactgctask - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - status: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true - subresources: - status: {} diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workfloweventbindings.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workfloweventbindings.yaml deleted file mode 100644 index 2e81dda97d5..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workfloweventbindings.yaml +++ /dev/null @@ -1,37 +0,0 @@ -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workfloweventbindings.argoproj.io - name: workfloweventbindings.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workfloweventbindings.argoproj.io' -spec: - group: argoproj.io - names: - kind: WorkflowEventBinding - listKind: WorkflowEventBindingList - plural: workfloweventbindings - shortNames: - - wfeb - singular: workfloweventbinding - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflows.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflows.yaml deleted file mode 100644 index a85be3a2194..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflows.yaml +++ /dev/null @@ -1,56 +0,0 @@ -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workflows.argoproj.io - name: workflows.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workflows.argoproj.io' -spec: - group: argoproj.io - names: - kind: Workflow - listKind: WorkflowList - plural: workflows - shortNames: - - wf - singular: workflow - scope: Namespaced - versions: - - additionalPrinterColumns: - - description: Status of the workflow - jsonPath: .status.phase - name: Status - type: string - - description: When the workflow was started - format: date-time - jsonPath: .status.startedAt - name: Age - type: date - - description: Human readable message indicating details about why the workflow is in this condition. - jsonPath: .status.message - name: Message - type: string - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - status: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true - subresources: {} diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtaskresults.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtaskresults.yaml deleted file mode 100644 index 7b4327db9e0..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtaskresults.yaml +++ /dev/null @@ -1,599 +0,0 @@ -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workflowtaskresults.argoproj.io - name: workflowtaskresults.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workflowtaskresults.argoproj.io' -spec: - group: argoproj.io - names: - kind: WorkflowTaskResult - listKind: WorkflowTaskResultList - plural: workflowtaskresults - singular: workflowtaskresult - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - message: - type: string - metadata: - type: object - outputs: - properties: - artifacts: - items: - properties: - archive: - properties: - none: - type: object - tar: - properties: - compressionLevel: - format: int32 - type: integer - type: object - zip: - type: object - type: object - archiveLogs: - type: boolean - artifactGC: - properties: - podMetadata: - properties: - annotations: - additionalProperties: - type: string - type: object - labels: - additionalProperties: - type: string - type: object - type: object - serviceAccountName: - type: string - strategy: - enum: - - "" - - OnWorkflowCompletion - - OnWorkflowDeletion - - Never - type: string - type: object - artifactory: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - url: - type: string - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - url - type: object - azure: - properties: - accountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - blob: - type: string - container: - type: string - endpoint: - type: string - useSDKCreds: - type: boolean - required: - - blob - - container - - endpoint - type: object - deleted: - type: boolean - from: - type: string - fromExpression: - type: string - gcs: - properties: - bucket: - type: string - key: - type: string - serviceAccountKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - key - type: object - git: - properties: - branch: - type: string - depth: - format: int64 - type: integer - disableSubmodules: - type: boolean - fetch: - items: - type: string - type: array - insecureIgnoreHostKey: - type: boolean - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - repo: - type: string - revision: - type: string - singleBranch: - type: boolean - sshPrivateKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - required: - - repo - type: object - globalName: - type: string - hdfs: - properties: - addresses: - items: - type: string - type: array - force: - type: boolean - hdfsUser: - type: string - krbCCacheSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbConfigConfigMap: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbKeytabSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - krbRealm: - type: string - krbServicePrincipalName: - type: string - krbUsername: - type: string - path: - type: string - required: - - path - type: object - http: - properties: - auth: - properties: - basicAuth: - properties: - passwordSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - usernameSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - clientCert: - properties: - clientCertSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - oauth2: - properties: - clientIDSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - clientSecretSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - endpointParams: - items: - properties: - key: - type: string - value: - type: string - required: - - key - type: object - type: array - scopes: - items: - type: string - type: array - tokenURLSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - type: object - headers: - items: - properties: - name: - type: string - value: - type: string - required: - - name - - value - type: object - type: array - url: - type: string - required: - - url - type: object - mode: - format: int32 - type: integer - name: - type: string - optional: - type: boolean - oss: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - createBucketIfNotPresent: - type: boolean - endpoint: - type: string - key: - type: string - lifecycleRule: - properties: - markDeletionAfterDays: - format: int32 - type: integer - markInfrequentAccessAfterDays: - format: int32 - type: integer - type: object - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - securityToken: - type: string - useSDKCreds: - type: boolean - required: - - key - type: object - path: - type: string - raw: - properties: - data: - type: string - required: - - data - type: object - recurseMode: - type: boolean - s3: - properties: - accessKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - bucket: - type: string - caSecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - createBucketIfNotPresent: - properties: - objectLocking: - type: boolean - type: object - encryptionOptions: - properties: - enableEncryption: - type: boolean - kmsEncryptionContext: - type: string - kmsKeyId: - type: string - serverSideCustomerKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - type: object - endpoint: - type: string - insecure: - type: boolean - key: - type: string - region: - type: string - roleARN: - type: string - secretKeySecret: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - useSDKCreds: - type: boolean - type: object - subPath: - type: string - required: - - name - type: object - type: array - exitCode: - type: string - parameters: - items: - properties: - default: - type: string - description: - type: string - enum: - items: - type: string - type: array - globalName: - type: string - name: - type: string - value: - type: string - valueFrom: - properties: - configMapKeyRef: - properties: - key: - type: string - name: - type: string - optional: - type: boolean - required: - - key - type: object - default: - type: string - event: - type: string - expression: - type: string - jqFilter: - type: string - jsonPath: - type: string - parameter: - type: string - path: - type: string - supplied: - type: object - type: object - required: - - name - type: object - type: array - result: - type: string - type: object - phase: - type: string - progress: - type: string - required: - - metadata - type: object - served: true - storage: true diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtasksets.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtasksets.yaml deleted file mode 100644 index a74a9c15447..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtasksets.yaml +++ /dev/null @@ -1,43 +0,0 @@ -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workflowtasksets.argoproj.io - name: workflowtasksets.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workflowtasksets.argoproj.io' -spec: - group: argoproj.io - names: - kind: WorkflowTaskSet - listKind: WorkflowTaskSetList - plural: workflowtasksets - shortNames: - - wfts - singular: workflowtaskset - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - status: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true - subresources: - status: {} diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtemplates.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtemplates.yaml deleted file mode 100644 index 81acc2c16e5..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtemplates.yaml +++ /dev/null @@ -1,37 +0,0 @@ -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: # kpt-merge: /workflowtemplates.argoproj.io - name: workflowtemplates.argoproj.io - annotations: - internal.kpt.dev/upstream-identifier: 'apiextensions.k8s.io|CustomResourceDefinition|default|workflowtemplates.argoproj.io' -spec: - group: argoproj.io - names: - kind: WorkflowTemplate - listKind: WorkflowTemplateList - plural: workflowtemplates - shortNames: - - wftmpl - singular: workflowtemplate - scope: Namespaced - versions: - - name: v1alpha1 - schema: - openAPIV3Schema: - properties: - apiVersion: - type: string - kind: - type: string - metadata: - type: object - spec: - type: object - x-kubernetes-map-type: atomic - x-kubernetes-preserve-unknown-fields: true - required: - - metadata - - spec - type: object - served: true - storage: true diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/kustomization.yaml deleted file mode 100644 index e6467d54a5d..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/kustomization.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- argoproj.io_clusterworkflowtemplates.yaml -- argoproj.io_cronworkflows.yaml -- argoproj.io_workflows.yaml -- argoproj.io_workflowtemplates.yaml -- argoproj.io_workfloweventbindings.yaml -- argoproj.io_workflowtasksets.yaml -- argoproj.io_workflowtaskresults.yaml -- argoproj.io_workflowartifactgctasks.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/kustomization.yaml deleted file mode 100644 index 02094eb9c9d..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/kustomization.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- crds -- workflow-controller -- argo-server diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/kustomization.yaml deleted file mode 100644 index 10d00a4862a..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/kustomization.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- workflow-controller-configmap.yaml -- workflow-controller-deployment.yaml -- workflow-controller-sa.yaml -- workflow-controller-priorityclass.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-configmap.yaml deleted file mode 100644 index 82a6988b328..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-configmap.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: # kpt-merge: /workflow-controller-configmap - name: workflow-controller-configmap - annotations: - internal.kpt.dev/upstream-identifier: '|ConfigMap|default|workflow-controller-configmap' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-deployment.yaml deleted file mode 100644 index 8a3160b80c6..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-deployment.yaml +++ /dev/null @@ -1,53 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: # kpt-merge: /workflow-controller - name: workflow-controller - annotations: - internal.kpt.dev/upstream-identifier: 'apps|Deployment|default|workflow-controller' -spec: - selector: - matchLabels: - app: workflow-controller - template: - metadata: - labels: - app: workflow-controller - spec: - priorityClassName: workflow-controller - serviceAccountName: argo - containers: - - name: workflow-controller - image: quay.io/argoproj/workflow-controller:latest - securityContext: - readOnlyRootFilesystem: true - runAsNonRoot: true - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - command: - - workflow-controller - args: [] - env: - - name: LEADER_ELECTION_IDENTITY - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.name - ports: - - name: metrics - containerPort: 9090 - - containerPort: 6060 - livenessProbe: - httpGet: - port: 6060 - path: /healthz - # Require three failures to tolerate transient errors. - failureThreshold: 3 - initialDelaySeconds: 90 - periodSeconds: 60 - timeoutSeconds: 30 - securityContext: - runAsNonRoot: true - nodeSelector: - kubernetes.io/os: linux diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-priorityclass.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-priorityclass.yaml deleted file mode 100644 index 9d9364cab34..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-priorityclass.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: scheduling.k8s.io/v1 -kind: PriorityClass -metadata: # kpt-merge: /workflow-controller - name: workflow-controller - annotations: - internal.kpt.dev/upstream-identifier: 'scheduling.k8s.io|PriorityClass|default|workflow-controller' -value: 1000000 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-sa.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-sa.yaml deleted file mode 100644 index b4dcc68be56..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-sa.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: # kpt-merge: /argo - name: argo - annotations: - internal.kpt.dev/upstream-identifier: '|ServiceAccount|default|argo' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml deleted file mode 100644 index a7cedb459ce..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml +++ /dev/null @@ -1,67 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: # kpt-merge: /argo-server-cluster-role - name: argo-server-cluster-role - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRole|default|argo-server-cluster-role' -rules: - - apiGroups: - - "" - resources: - - configmaps - verbs: - - get - - watch - - list - - apiGroups: - - "" - resources: - - secrets - verbs: - - get - - create - - apiGroups: - - "" - resources: - - pods - - pods/exec - - pods/log - verbs: - - get - - list - - watch - - delete - - apiGroups: - - "" - resources: - - events - verbs: - - watch - - create - - patch - - apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list - - watch - - apiGroups: - - argoproj.io - resources: - - eventsources - - sensors - - workflows - - workfloweventbindings - - workflowtemplates - - cronworkflows - - clusterworkflowtemplates - verbs: - - create - - get - - list - - watch - - update - - patch - - delete diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml deleted file mode 100644 index 1e557d9ba35..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: # kpt-merge: /argo-server-binding - name: argo-server-binding - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRoleBinding|default|argo-server-binding' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: argo-server-cluster-role -subjects: - - kind: ServiceAccount - name: argo-server diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/kustomization.yaml deleted file mode 100644 index 91d213a3943..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/kustomization.yaml +++ /dev/null @@ -1,5 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- argo-server-clusterole.yaml -- argo-server-clusterolebinding.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/kustomization.yaml deleted file mode 100644 index bfdc9ec83d7..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/kustomization.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- ../base -- ./workflow-controller-rbac -- ./argo-server-rbac -namespace: argo diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/kustomization.yaml deleted file mode 100644 index ab7574c6ea5..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/kustomization.yaml +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- workflow-aggregate-roles.yaml -- workflow-controller-clusterrole.yaml -- workflow-controller-clusterrolebinding.yaml -- workflow-controller-role.yaml -- workflow-controller-rolebinding.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml deleted file mode 100644 index e8fce6b107f..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml +++ /dev/null @@ -1,98 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: # kpt-merge: /argo-aggregate-to-view - name: argo-aggregate-to-view - labels: - rbac.authorization.k8s.io/aggregate-to-view: "true" - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRole|default|argo-aggregate-to-view' -rules: -- apiGroups: - - argoproj.io - resources: - - workflows - - workflows/finalizers - - workfloweventbindings - - workfloweventbindings/finalizers - - workflowtemplates - - workflowtemplates/finalizers - - cronworkflows - - cronworkflows/finalizers - - clusterworkflowtemplates - - clusterworkflowtemplates/finalizers - - workflowtaskresults - - workflowtaskresults/finalizers - verbs: - - get - - list - - watch ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: # kpt-merge: /argo-aggregate-to-edit - name: argo-aggregate-to-edit - labels: - rbac.authorization.k8s.io/aggregate-to-edit: "true" - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRole|default|argo-aggregate-to-edit' -rules: -- apiGroups: - - argoproj.io - resources: - - workflows - - workflows/finalizers - - workfloweventbindings - - workfloweventbindings/finalizers - - workflowtemplates - - workflowtemplates/finalizers - - cronworkflows - - cronworkflows/finalizers - - clusterworkflowtemplates - - clusterworkflowtemplates/finalizers - - workflowtaskresults - - workflowtaskresults/finalizers - verbs: - - create - - delete - - deletecollection - - get - - list - - patch - - update - - watch ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: # kpt-merge: /argo-aggregate-to-admin - name: argo-aggregate-to-admin - labels: - rbac.authorization.k8s.io/aggregate-to-admin: "true" - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRole|default|argo-aggregate-to-admin' -rules: -- apiGroups: - - argoproj.io - resources: - - workflows - - workflows/finalizers - - workfloweventbindings - - workfloweventbindings/finalizers - - workflowtemplates - - workflowtemplates/finalizers - - cronworkflows - - cronworkflows/finalizers - - clusterworkflowtemplates - - clusterworkflowtemplates/finalizers - - workflowtasksets - - workflowtasksets/finalizers - - workflowtaskresults - - workflowtaskresults/finalizers - verbs: - - create - - delete - - deletecollection - - get - - list - - patch - - update - - watch diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml deleted file mode 100644 index a65406782ff..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml +++ /dev/null @@ -1,115 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: # kpt-merge: /argo-cluster-role - name: argo-cluster-role - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRole|default|argo-cluster-role' -rules: -- apiGroups: - - "" - resources: - - pods - - pods/exec - verbs: - - create - - get - - list - - watch - - update - - patch - - delete -- apiGroups: - - "" - resources: - - configmaps - verbs: - - get - - watch - - list -- apiGroups: - - "" - resources: - - persistentvolumeclaims - - persistentvolumeclaims/finalizers - verbs: - - create - - update - - delete - - get -- apiGroups: - - argoproj.io - resources: - - workflows - - workflows/finalizers - - workflowtasksets - - workflowtasksets/finalizers - - workflowartifactgctasks - verbs: - - get - - list - - watch - - update - - patch - - delete - - create -- apiGroups: - - argoproj.io - resources: - - workflowtemplates - - workflowtemplates/finalizers - - clusterworkflowtemplates - - clusterworkflowtemplates/finalizers - verbs: - - get - - list - - watch -- apiGroups: - - argoproj.io - resources: - - workflowtaskresults - verbs: - - list - - watch - - deletecollection -- apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list -- apiGroups: - - argoproj.io - resources: - - cronworkflows - - cronworkflows/finalizers - verbs: - - get - - list - - watch - - update - - patch - - delete -- apiGroups: - - "" - resources: - - events - verbs: - - create - - patch -- apiGroups: - - "policy" - resources: - - poddisruptionbudgets - verbs: - - create - - get - - delete -- apiGroups: - - "" - resources: - - secrets - verbs: - - get - resourceNames: - - argo-workflows-agent-ca-certificates diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml deleted file mode 100644 index eff03f75ce8..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: # kpt-merge: /argo-binding - name: argo-binding - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRoleBinding|default|argo-binding' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: argo-cluster-role -subjects: -- kind: ServiceAccount - name: argo diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-role.yaml deleted file mode 100755 index 116b690d922..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-role.yaml +++ /dev/null @@ -1,21 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /argo-role - name: argo-role - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|argo-role' -rules: - - apiGroups: - - coordination.k8s.io - resources: - - leases - verbs: - - create - - get - - update - - apiGroups: - - "" - resources: - - secrets - verbs: - - get diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml deleted file mode 100644 index 098511fbdd9..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /argo-binding - name: argo-binding - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|argo-binding' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: argo-role -subjects: - - kind: ServiceAccount - name: argo diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml deleted file mode 100644 index 826b400898e..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml +++ /dev/null @@ -1,67 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /argo-server-role - name: argo-server-role - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|argo-server-role' -rules: - - apiGroups: - - "" - resources: - - configmaps - verbs: - - get - - watch - - list - - apiGroups: - - "" - resources: - - secrets - verbs: - - get - - create - - apiGroups: - - "" - resources: - - pods - - pods/exec - - pods/log - verbs: - - get - - list - - watch - - delete - - apiGroups: - - "" - resources: - - events - verbs: - - watch - - create - - patch - - apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list - - watch - - apiGroups: - - argoproj.io - resources: - - eventsources - - sensors - - workflows - - workfloweventbindings - - workflowtemplates - - cronworkflows - - cronworkflows/finalizers - verbs: - - create - - get - - list - - watch - - update - - patch - - delete diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-rolebinding.yaml deleted file mode 100644 index 6ba7a39e880..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-rolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /argo-server-binding - name: argo-server-binding - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|argo-server-binding' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: argo-server-role -subjects: -- kind: ServiceAccount - name: argo-server diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/kustomization.yaml deleted file mode 100644 index c1ca948941f..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/kustomization.yaml +++ /dev/null @@ -1,5 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- argo-server-role.yaml -- argo-server-rolebinding.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/kustomization.yaml deleted file mode 100644 index f45d0e3a50a..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/kustomization.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- ../base -- ./argo-server-rbac -- ./workflow-controller-rbac -patches: -- path: ./overlays/workflow-controller-deployment.yaml - target: - group: apps - kind: Deployment - name: workflow-controller - version: v1 -- path: ./overlays/argo-server-deployment.yaml - target: - group: apps - kind: Deployment - name: argo-server - version: v1 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.yaml deleted file mode 100644 index 90fd8a53dcb..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- op: add - path: /spec/template/spec/containers/0/args/- - value: --namespaced diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.yaml deleted file mode 100644 index 90fd8a53dcb..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- op: add - path: /spec/template/spec/containers/0/args/- - value: --namespaced diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/kustomization.yaml deleted file mode 100644 index 1f9553fe592..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/kustomization.yaml +++ /dev/null @@ -1,5 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- workflow-controller-role.yaml -- workflow-controller-rolebinding.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml deleted file mode 100644 index 04d169892c7..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml +++ /dev/null @@ -1,119 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /argo-role - name: argo-role - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|argo-role' -rules: - - apiGroups: - - coordination.k8s.io - resources: - - leases - verbs: - - create - - get - - update - - apiGroups: - - "" - resources: - - pods - - pods/exec - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - - apiGroups: - - "" - resources: - - configmaps - verbs: - - get - - watch - - list - - apiGroups: - - "" - resources: - - persistentvolumeclaims - - persistentvolumeclaims/finalizers - verbs: - - create - - update - - delete - - get - - apiGroups: - - argoproj.io - resources: - - workflows - - workflows/finalizers - - workflowtasksets - - workflowtasksets/finalizers - - workflowartifactgctasks - verbs: - - get - - list - - watch - - update - - patch - - delete - - create - - apiGroups: - - argoproj.io - resources: - - workflowtemplates - - workflowtemplates/finalizers - verbs: - - get - - list - - watch - - apiGroups: - - argoproj.io - resources: - - workflowtaskresults - verbs: - - list - - watch - - deletecollection - - apiGroups: - - "" - resources: - - serviceaccounts - verbs: - - get - - list - - apiGroups: - - "" - resources: - - secrets - verbs: - - get - - apiGroups: - - argoproj.io - resources: - - cronworkflows - - cronworkflows/finalizers - verbs: - - get - - list - - watch - - update - - patch - - delete - - apiGroups: - - "" - resources: - - events - verbs: - - create - - patch - - apiGroups: - - "policy" - resources: - - poddisruptionbudgets - verbs: - - create - - get - - delete diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml deleted file mode 100644 index f66a5104dd9..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /argo-binding - name: argo-binding - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|argo-binding' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: argo-role -subjects: -- kind: ServiceAccount - name: argo diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/agent-default-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/agent-default-rolebinding.yaml deleted file mode 100644 index 1e376fada0c..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/agent-default-rolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /agent-default - name: agent-default - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|agent-default' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: agent -subjects: - - kind: ServiceAccount - name: default diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/agent-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/agent-role.yaml deleted file mode 100644 index d9be185fe3b..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/agent-role.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# https://argo-workflows.readthedocs.io/en/release-3.5/workflow-rbac/ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /agent - name: agent - annotations: - workflows.argoproj.io/description: | - This is the minimum recommended permissions needed if you want to use the agent, e.g. for HTTP or plugin templates. - - If <= v3.2 you must replace `workflowtasksets/status` with `patch workflowtasksets`. - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|agent' -rules: - - apiGroups: - - argoproj.io - resources: - - workflowtasksets - verbs: - - list - - watch - - apiGroups: - - argoproj.io - resources: - - workflowtasksets/status - verbs: - - patch diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/argo-server-sso-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/argo-server-sso-secret.yaml deleted file mode 100644 index c4e6d5c9a4e..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/argo-server-sso-secret.yaml +++ /dev/null @@ -1,9 +0,0 @@ -kind: Secret -apiVersion: v1 -metadata: # kpt-merge: /argo-server-sso - name: argo-server-sso - annotations: - internal.kpt.dev/upstream-identifier: '|Secret|default|argo-server-sso' -stringData: - clientID: argo-server - clientSecret: ZXhhbXBsZS1hcHAtc2VjcmV0 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifact-repositories-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifact-repositories-configmap.yaml deleted file mode 100644 index c0e4409400f..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifact-repositories-configmap.yaml +++ /dev/null @@ -1,35 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: # kpt-merge: /artifact-repositories - name: artifact-repositories - annotations: - # you'll want to change the default over time, e.g. when you move to new storage solution, - # so we recommend you version them from the outset by suffixing the version - workflows.argoproj.io/default-artifact-repository: default-v1 - internal.kpt.dev/upstream-identifier: '|ConfigMap|default|artifact-repositories' -data: - default-v1: | - archiveLogs: true - s3: - bucket: my-bucket - endpoint: minio:9000 - insecure: true - accessKeySecret: - name: my-minio-cred - key: accesskey - secretKeySecret: - name: my-minio-cred - key: secretkey - my-key: | - archiveLogs: true - s3: - bucket: my-bucket - endpoint: minio:9000 - insecure: true - accessKeySecret: - name: my-minio-cred - key: accesskey - secretKeySecret: - name: my-minio-cred - key: secretkey - empty: "" diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifactgc-default-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifactgc-default-rolebinding.yaml deleted file mode 100644 index 892fd4eb753..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifactgc-default-rolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /artifactgc-default - name: artifactgc-default - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|artifactgc-default' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: artifactgc -subjects: - - kind: ServiceAccount - name: default diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifactgc-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifactgc-role.yaml deleted file mode 100644 index 8052a2c607a..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifactgc-role.yaml +++ /dev/null @@ -1,23 +0,0 @@ -# https://argo-workflows.readthedocs.io/en/release-3.5/workflow-rbac/ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /artifactgc - name: artifactgc - annotations: - workflows.argoproj.io/description: | - This is the minimum recommended permissions needed if you want to use artifact GC. - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|artifactgc' -rules: - - apiGroups: - - argoproj.io - resources: - - workflowartifactgctasks - verbs: - - list - - watch - - apiGroups: - - argoproj.io - resources: - - workflowartifactgctasks/status - verbs: - - patch diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/cluster-workflow-template-rbac.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/cluster-workflow-template-rbac.yaml deleted file mode 100644 index 74c53087d62..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/cluster-workflow-template-rbac.yaml +++ /dev/null @@ -1,66 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: # kpt-merge: /argo-server-clusterworkflowtemplate-role - name: argo-server-clusterworkflowtemplate-role - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRole|default|argo-server-clusterworkflowtemplate-role' -rules: - - apiGroups: - - argoproj.io - resources: - - clusterworkflowtemplates - - clusterworkflowtemplates/finalizers - verbs: - - create - - delete - - watch - - get - - list - - watch ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: # kpt-merge: /argo-clusterworkflowtemplate-role - name: argo-clusterworkflowtemplate-role - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRole|default|argo-clusterworkflowtemplate-role' -rules: - - apiGroups: - - argoproj.io - resources: - - clusterworkflowtemplates - - clusterworkflowtemplates/finalizers - verbs: - - get - - list - - watch ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: # kpt-merge: /argo-clusterworkflowtemplate-role-binding - name: argo-clusterworkflowtemplate-role-binding - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRoleBinding|default|argo-clusterworkflowtemplate-role-binding' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: argo-clusterworkflowtemplate-role -subjects: - - kind: ServiceAccount - name: argo - namespace: argo ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: # kpt-merge: /argo-server-clusterworkflowtemplate-role-binding - name: argo-server-clusterworkflowtemplate-role-binding - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRoleBinding|default|argo-server-clusterworkflowtemplate-role-binding' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: argo-server-clusterworkflowtemplate-role -subjects: - - kind: ServiceAccount - name: argo-server - namespace: argo diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/default.service-account-token-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/default.service-account-token-secret.yaml deleted file mode 100644 index c41b7c1253e..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/default.service-account-token-secret.yaml +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: # kpt-merge: /default.service-account-token - name: default.service-account-token - annotations: - kubernetes.io/service-account.name: default - internal.kpt.dev/upstream-identifier: '|Secret|default|default.service-account-token' -type: kubernetes.io/service-account-token diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor-default-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor-default-rolebinding.yaml deleted file mode 100644 index 8c06081beb8..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor-default-rolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /executor-default - name: executor-default - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|executor-default' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: executor -subjects: - - kind: ServiceAccount - name: default diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/docker/executor-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/docker/executor-role.yaml deleted file mode 100644 index 7fe881deec0..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/docker/executor-role.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /executor - name: executor - annotations: - workflows.argoproj.io/description: | - Recommended minimum permissions for the `docker` executor. - - This executor is superseded by the `emmisary` executor, so we do not recommend you use it anymore. - workflows.argoproj.io/version: "< 3.4.0" - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|executor' -rules: - - apiGroups: - - argoproj.io - resources: - - workflowtaskresults - verbs: - - create - - patch diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/emissary/executor-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/emissary/executor-role.yaml deleted file mode 100644 index 99d1bfd462d..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/emissary/executor-role.yaml +++ /dev/null @@ -1,16 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /executor - name: executor - annotations: - workflows.argoproj.io/description: | - Recomended minimum permissions for the `emissary` executor. - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|executor' -rules: - - apiGroups: - - argoproj.io - resources: - - workflowtaskresults - verbs: - - create - - patch diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/k8sapi/executor-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/k8sapi/executor-role.yaml deleted file mode 100644 index 75dbbab7311..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/k8sapi/executor-role.yaml +++ /dev/null @@ -1,38 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /executor - name: executor - annotations: - workflows.argoproj.io/description: | - Recommended minimum permissions for `k8siapi` executor. - - This executor is superseded by the `emmisary` executor, so we do not recommend you use it anymore. - workflows.argoproj.io/version: "< 3.4.0" - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|executor' -rules: - - apiGroups: - - argoproj.io - resources: - - workflowtaskresults - verbs: - - create - - patch - - apiGroups: - - "" - resources: - - pods - verbs: - - get - - watch - - apiGroups: - - "" - resources: - - pods/exec - verbs: - - create - - apiGroups: - - "" - resources: - - pods/log - verbs: - - get diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/kubelet/executor-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/kubelet/executor-role.yaml deleted file mode 100644 index f54cb57cf0e..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/kubelet/executor-role.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /executor - name: executor - annotations: - workflows.argoproj.io/description: | - Recommended minimum permissions for `kubelet` executor. - - This executor is superseded by the `emmisary` executor, so we do not recommend you use it anymore. - workflows.argoproj.io/version: "< 3.4.0" - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|executor' -rules: - - apiGroups: - - argoproj.io - resources: - - workflowtaskresults - verbs: - - create - - patch diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/kubelet/kubelet-executor-clusterrole.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/kubelet/kubelet-executor-clusterrole.yaml deleted file mode 100644 index f8c54ae14c5..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/kubelet/kubelet-executor-clusterrole.yaml +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: # kpt-merge: /kubelet-executor - name: kubelet-executor - annotations: - workflows.argoproj.io/version: "< 3.4.0" - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRole|default|kubelet-executor' -rules: - # This allows the kubelet executor. - - apiGroups: - - "" - resources: - - nodes/proxy - verbs: - - get diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/kubelet/kubelet-executor-default-clusterrolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/kubelet/kubelet-executor-default-clusterrolebinding.yaml deleted file mode 100644 index 150c75739a7..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/kubelet/kubelet-executor-default-clusterrolebinding.yaml +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: # kpt-merge: /kubelet-executor-default - name: kubelet-executor-default - annotations: - workflows.argoproj.io/version: "< 3.4.0" - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|ClusterRoleBinding|default|kubelet-executor-default' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: kubelet-executor -subjects: - - kind: ServiceAccount - name: default - namespace: argo diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/pns/executor-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/pns/executor-role.yaml deleted file mode 100644 index 895efb69549..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/executor/pns/executor-role.yaml +++ /dev/null @@ -1,29 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /executor - name: executor - annotations: - workflows.argoproj.io/description: | - Recomended minimum permissions for `pns` executor. - workflows.argoproj.io/version: "< 3.4.0" - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|executor' -rules: - - apiGroups: - - argoproj.io - resources: - - workflowtaskresults - verbs: - - create - - patch - - apiGroups: - - "" - resources: - - pods - verbs: - - watch - - apiGroups: - - "" - resources: - - pods/log - verbs: - - get diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/httpbin-deploy.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/httpbin-deploy.yaml deleted file mode 100644 index 03c5119593d..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/httpbin-deploy.yaml +++ /dev/null @@ -1,36 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: # kpt-merge: /httpbin - name: httpbin - labels: - app: httpbin - annotations: - internal.kpt.dev/upstream-identifier: 'apps|Deployment|default|httpbin' -spec: - selector: - matchLabels: - app: httpbin - template: - metadata: - labels: - app: httpbin - spec: - automountServiceAccountToken: false - containers: - - name: main - image: kong/httpbin - ports: - - containerPort: 80 - name: api - readinessProbe: - httpGet: - path: /get - port: 80 - initialDelaySeconds: 5 - periodSeconds: 10 - livenessProbe: - httpGet: - path: /get - port: 80 - initialDelaySeconds: 5 - periodSeconds: 10 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/httpbin-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/httpbin-service.yaml deleted file mode 100644 index 22094428ef4..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/httpbin-service.yaml +++ /dev/null @@ -1,16 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: # kpt-merge: /httpbin - name: httpbin - labels: - app: httpbin - annotations: - internal.kpt.dev/upstream-identifier: '|Service|default|httpbin' -spec: - selector: - app: httpbin - ports: - - port: 9100 - name: api - protocol: TCP - targetPort: 80 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/kustomization.yaml deleted file mode 100644 index b5c1f840a8c..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/kustomization.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- httpbin-deploy.yaml -- httpbin-service.yaml -- my-httpbin-cred-secret.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/my-httpbin-cred-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/my-httpbin-cred-secret.yaml deleted file mode 100644 index 30e09902816..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/httpbin/my-httpbin-cred-secret.yaml +++ /dev/null @@ -1,99 +0,0 @@ -apiVersion: v1 -stringData: - # for basic auth authentication - user: admin - pass: password - # for oauth2 authentication - clientID: admin - clientSecret: password - tokenURL: "http://httpbin:9100/response-headers?access_token=faketoken&token_type=Bearer" # this URL will return a body with the "access_token" field set and can simulate an oauth token flow - # for client cert authentication - cert.pem: | - -----BEGIN CERTIFICATE----- - MIIEmjCCAoICCQDQejieQSZTxzANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0 - ZXN0MB4XDTIyMDQyNTEzNDc0MloXDTMyMDQyMjEzNDc0MlowDzENMAsGA1UEAwwE - dGVzdDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMHT/tfskuXizar1 - 5DDrSkaT1cuCdQhEO7b6haxfvfMJPY9sxaxR570bw5TWQzA0xdAeUzSCbRsvxw6b - fEyLD4NajdXtcKocYUUcLclzjgyogTDPqlzAfDVZD25ySOTZ150pQaBuIi6TgnqH - WdJEh9w5//5VZmKyMx49JZMW7ADb9qYxkKVPIan3aNEXOO4SxyjsSekUFefkZOld - /RVZ8nO8hnDQ7r5NXsIIWVh35A94CA8y6QpKL2qiEFW1fofRcr/Fe/Y/5ohBQ1Ur - NMcX87zm9kXX1y6wbp3wn5f1PUa1sCUPlxChmRmPPmr4yIqq0a8C1d71jOIbhkox - 7A30HsP1D3rdxU6eb7KBYb7kShZge1batHRogRe5uX6hGO8iHBV/GdDE6jszoGPU - ejhfwblr6AeR6ImrWmrJ4rAx/jNqcHPuktnMRlLsBzdhqRwelwgnN13O5ZYiEJg4 - X3YYp678kHnc58aOkhG2nM32cIGha4tkoGM/GpDnFAd0P0gyJVwKo2A2Wc4cMlzQ - 7dokXbkkzK6lrHJnJjiOfzjD5yMB1Q1zQXKGHB2hJSWAMTjJ9f6qQd3ZaarYPTLx - vc4WTu+547Sx81Vlnes2xTSgt6pyFSBppHpS7KkOxb+wRF2oIpgLA3mQmsq2c60+ - G8/ro91YAYN+cl+v7m1DyEpD9TW/AgMBAAEwDQYJKoZIhvcNAQELBQADggIBACO7 - 2hU2BSGU66FwpIOihgcaADH0SwokjrEJVXlnMv26JzG/Ja63gTNE5OyghufsJtUi - E7E1gOH+dH6lVOIEmQdgGZazGxye20diLlicBATa5W2IuaBzb8Bq7ap75jOB7/sH - Yh+ZV9w0CWgV7KgzJQsp6KPfpMUXn9aJkRkLlCToCj60tC1agw5wzQcokDhOMJaY - 49FFVoKtVYwN6DfXL5Qi4GUmg7NwMUQAOGD6BQ8VLdbSJoWSHvgR2z5SDIubpdyy - XDe2V6lusdka8jdRsFH+TUKyGubs3c5YVq80A8itavxPXBUM/OJCHhUA1VpL3rvz - VgANVV7XFn5fN5TdTOrgJa2LBjflYBC3KiLf1jiW68ZT2rLDrC0yVdHFY0UJG/du - kWWQpZTfdpGbZOl1rQcYQ3BREWkr5kAv8Sh3sPliibVRvyFzwAqpEUDbpCz/Z3kZ - mRPU1Ukz8gjr5FBwzNn4x/l+80kgM22qXLMgxf7cqSLxH+dylmIieLGU0s1k7BqK - Dw77DP1QZe4G6WwrdGooxSYSBn4joKV4TI9sbyd34HJQnkMch0ugz9dlpZyT1P8Y - 3xU8Qj1BIF8yoyRuzbOokd9cEjNC6N+Z4g5lLEKYM/j1f0r3tGEoZAu2p39UGLa8 - aszMnFjeymK5OCkMUhg/KNr4WK58pc/3uFMhy8bn - -----END CERTIFICATE----- - key.pem: | - -----BEGIN PRIVATE KEY----- - MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQDB0/7X7JLl4s2q - 9eQw60pGk9XLgnUIRDu2+oWsX73zCT2PbMWsUee9G8OU1kMwNMXQHlM0gm0bL8cO - m3xMiw+DWo3V7XCqHGFFHC3Jc44MqIEwz6pcwHw1WQ9uckjk2dedKUGgbiIuk4J6 - h1nSRIfcOf/+VWZisjMePSWTFuwA2/amMZClTyGp92jRFzjuEsco7EnpFBXn5GTp - Xf0VWfJzvIZw0O6+TV7CCFlYd+QPeAgPMukKSi9qohBVtX6H0XK/xXv2P+aIQUNV - KzTHF/O85vZF19cusG6d8J+X9T1GtbAlD5cQoZkZjz5q+MiKqtGvAtXe9YziG4ZK - MewN9B7D9Q963cVOnm+ygWG+5EoWYHtW2rR0aIEXubl+oRjvIhwVfxnQxOo7M6Bj - 1Ho4X8G5a+gHkeiJq1pqyeKwMf4zanBz7pLZzEZS7Ac3YakcHpcIJzddzuWWIhCY - OF92GKeu/JB53OfGjpIRtpzN9nCBoWuLZKBjPxqQ5xQHdD9IMiVcCqNgNlnOHDJc - 0O3aJF25JMyupaxyZyY4jn84w+cjAdUNc0FyhhwdoSUlgDE4yfX+qkHd2Wmq2D0y - 8b3OFk7vueO0sfNVZZ3rNsU0oLeqchUgaaR6UuypDsW/sERdqCKYCwN5kJrKtnOt - PhvP66PdWAGDfnJfr+5tQ8hKQ/U1vwIDAQABAoICAQCL2aAIv4MGJ2zpq10oBryi - y8v4eHpkqobDcWK9ip8NGl+2em7t9HLWOZAWdboosAsCLL8wJeL/OKvRWFKJD9Tz - m4S3FAi0VKHCMaC/t4aIj5QXWd676Y41F7tQn1kE9kDh/oCBdrVnEbuVGM+wLQ4x - 0g9ovMmQ8K59ZPUVefZycEM4io6pF71cW0zfgHftHtNgLYzuhTWBCYPd9ZjDrRCI - fUArajS4Ti7OpSOB948vshVukfcfG4O21pQeo0NWT8MRpzXX6Sc2rJAehXwhIqEU - bTjIEAIMh/RoNNOR2rqJqFIdi3Ad6dsDXB1XJYXct39vXQZfRqCOC/oK0pZVQwxm - aMbb6VzMjE/paHcBLKorvSIEpuAkgesUkqJeMPxhVnVG6Tg5Xl0WM0pCh/mfir6i - gFGz/xXb0h8pj9Ksk6QpTOTqDf9JAHCuhp9hnuUR+wpnfKyOfOoDXfAyKjHR0bXz - XF9DhycErHDY4CWlhFiu8+qzrtR/sZ/AIp2MfjOzBZYoq7Zj2Z3yXDsvr5fpXUW8 - EU+ClnE/dgRBj5z1sKEQd471+R7PU3Q5apw3YlQZClsaaciTIeWOMOwBjxm9PbZL - CX9BzYaobVAy19ib+/7JgqNxsZ/3gL2xBQU1JoKeY2GnAyyyr8arLZaFR/CUGYyV - SWOdWwLxgThXIJofA3c5QQKCAQEA701sUqcRN8W/m6nCcav8I2EMU/4h18J3bk88 - NbK8vCsDvvFl/2EcjU/6iKcuxhNg1CjHD96H42FeXVXApkdnf2pF24nJHW8M18yH - uwPNzIDnLn9LSN6nJsgyo5LuVCXhf2C4UImv9P3Ae1meI/ApBJsad/bAY8MMHwtS - G/ph/yzhbAb2xF4oJwgOXBm0G2c9sfA0OlHSvYM/kvsQE6770BQ5S1ltrfIv++4J - qydiJ0Hq0RFM4aHCCi02cWp+43ALhh3EAPHN3ANpmV1IQKqyAeRFX1sqQuqpryQs - wHQxdF9FLCXHwaF8JOwheu9MTclUZdrkIRf2xac2qdFIszxCkQKCAQEAz1pHtm+f - HYJdOT3XKKgri4/OPZ7nzb1NcT34NbOPKmhRi38ZxutKrFTy+DCtpjUwF4YlE7oa - r13upHaxgxHMa9cpLN+69hmT2Pg2oMBvfEIkAiDKt4dcFQBDDKw98lpXNIStsCDp - nRcToI2TO1AMJNWCulAONov9vGggjS7mxt76cQ2QZH4k6W4yYDcC6g311vR+obA9 - MwJxZfuESw1CLzvE8Ua0esQnXQzpwECC05Q6oObeJ/44huQF7R2MP5iEmDLkgYjj - G5cmHAdD3u0Ceol3zFqF0YDxcfuglMvpmdBpjNj2rl093ufziy84iVTXJ50CRceS - e17et+3kKNF7TwKCAQBJpEHZjaA20ZwNg0hbQtns6Uip8GLpyuaGA8w7mi2KmpIk - iJUi6fenZR1sQEacngoGQCZCM/ENgEFR57nJcd/fzgyBav2BGVOSdVavrpP+gwyh - unqoihxWSvWKcQT20FF8qX8PCdAkTJKXYxTPanC1AiY7FKxQBw4L36f9BCh0JpOY - cuwtsewZVtlUbnSGmlbaE1l/OP7pYyKFUM25wPetKQwYrAScqxMpLC+9g/koq5hf - jjtilCzqhM9kR6mUxD5Hn5FZ2Q/IzSQKFjLN87mj62ON3Lg8r4pYY4GCGD+/2DGp - TFcUt2VE14XWFx4cMgDO93WM2ZsPaE3iJI2C2uCBAoIBADGmr5da4SICzmnfif7d - ThgMJlmRDHayhrHAIghR581Cz4v0smp0exwK92dA2MP85ngrkgNIRA2ME5HkLhtx - jp6gFeb959n4Q/Pnc8VIbym0+MRdr80Ep6MLvgJx2B+JTGpx/tk2+Fm6ZePDIudI - ArBrQ/NzKgQbv3V3BZxpB6/FQvkBQ3sczZ/r2Do70gHTt/Nx9kSnW/Az/I1sDcCe - +yMuT7lqsdrXz4kzh2GW0Pzy+JsAzV+MO2LphRXDRosP7Wg4f4kZCzDXH7QEdVcT - L83BzyLq5jJFiws9MrWOonBHfI7SgTc9coxGxIWmmAYif6anrRyibkwGapRmbYTs - rHcCggEATsKrZHJkZIfxVdw1uELZxDssxtSd3KS09xN2aypGPdSvWg2Di3NbQsNt - 4xSljnjWsYLZpxKYv3dUOOJIiIFGxVCPNCF1vL3ofqrBelXF3AAICa+ktzPQqBDI - eGPj1/h/HodY2pVHVyhZmFFsFy8We/wD64QRx4xI0w9xFAt0qmKVMoCsJmdrXGcO - kYqZnhkq3OyCswrk78OvFcB2Wnk7SWH2tYhBhOqFv8uPojaiRLOb/6xZaZOA9TPi - 0mpJScl+pVxs1UGShVH74lIvhPaPq0AHgK1y1yYphKc1A07l2z0+S1tSYOvdQY8k - NuJLvtwCMGDCxhdYm7OrJ0aUfZzP6w== - -----END PRIVATE KEY----- -kind: Secret -metadata: # kpt-merge: /my-httpbin-cred - name: my-httpbin-cred - labels: - app: httpbin - annotations: - internal.kpt.dev/upstream-identifier: '|Secret|default|my-httpbin-cred' -type: Opaque diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kustomization.yaml deleted file mode 100644 index 8560cf9a139..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kustomization.yaml +++ /dev/null @@ -1,24 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- ../../cluster-install -- minio -- httpbin -- webhooks -- default.service-account-token-secret.yaml -- argo-server-sso-secret.yaml -- executor/emissary/executor-role.yaml -- executor-default-rolebinding.yaml -- pod-manager-role.yaml -- pod-manager-default-rolebinding.yaml -- workflow-manager-role.yaml -- workflow-manager-default-rolebinding.yaml -- agent-role.yaml -- agent-default-rolebinding.yaml -- artifactgc-role.yaml -- artifactgc-default-rolebinding.yaml -- cluster-workflow-template-rbac.yaml -- artifact-repositories-configmap.yaml -patches: -- path: overlays/workflow-controller-configmap.yaml -- path: overlays/argo-server-deployment.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/memoizer-default-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/memoizer-default-rolebinding.yaml deleted file mode 100644 index 7fbe2bbb4c8..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/memoizer-default-rolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /memoizer-default - name: memoizer-default - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|memoizer-default' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: memoizer -subjects: - - kind: ServiceAccount - name: default diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/memoizer-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/memoizer-role.yaml deleted file mode 100644 index 7b71bf6e989..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/memoizer-role.yaml +++ /dev/null @@ -1,17 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /memoizer - name: memoizer - annotations: - workflows.argoproj.io/description: | - Only needed if you are using ConfigMap-based cache for memoization. - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|memoizer' -rules: - - apiGroups: - - "" - resources: - - configmaps - verbs: - - create - - get - - update diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/kustomization.yaml deleted file mode 100644 index 6b8e240e748..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/kustomization.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- minio-deploy.yaml -- minio-service.yaml -- my-minio-cred-secret.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-deploy.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-deploy.yaml deleted file mode 100644 index 6d3d79ca87e..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-deploy.yaml +++ /dev/null @@ -1,48 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: # kpt-merge: /minio - name: minio - labels: - app: minio - annotations: - internal.kpt.dev/upstream-identifier: 'apps|Deployment|default|minio' -spec: - selector: - matchLabels: - app: minio - template: - metadata: - labels: - app: minio - spec: - automountServiceAccountToken: false - containers: - - name: main - image: minio/minio:RELEASE.2022-11-17T23-20-09Z - env: - - name: MINIO_ACCESS_KEY - value: admin - - name: MINIO_SECRET_KEY - value: password - ports: - - containerPort: 9000 - name: api - - containerPort: 9001 - name: dashboard - command: [minio, server, --console-address, ":9001", /data] - lifecycle: - postStart: - exec: - command: [mkdir, -p, /data/my-bucket] - readinessProbe: - httpGet: - path: /minio/health/ready - port: 9000 - initialDelaySeconds: 5 - periodSeconds: 10 - livenessProbe: - httpGet: - path: /minio/health/live - port: 9000 - initialDelaySeconds: 5 - periodSeconds: 10 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-service.yaml deleted file mode 100644 index 37437d1ab10..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-service.yaml +++ /dev/null @@ -1,20 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: # kpt-merge: /minio - name: minio - labels: - app: minio - annotations: - internal.kpt.dev/upstream-identifier: '|Service|default|minio' -spec: - selector: - app: minio - ports: - - port: 9000 - name: api - protocol: TCP - targetPort: 9000 - - port: 9001 - name: dashboard - protocol: TCP - targetPort: 9001 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/my-minio-cred-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/my-minio-cred-secret.yaml deleted file mode 100644 index a8b92a6ba1b..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/my-minio-cred-secret.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: v1 -stringData: - accesskey: admin - secretkey: password -kind: Secret -metadata: # kpt-merge: /my-minio-cred - name: my-minio-cred - labels: - app: minio - annotations: - internal.kpt.dev/upstream-identifier: '|Secret|default|my-minio-cred' -type: Opaque diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/argo-server-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/argo-server-deployment.yaml deleted file mode 100644 index f76326f1f7b..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/argo-server-deployment.yaml +++ /dev/null @@ -1,17 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: # kpt-merge: /argo-server - name: argo-server - annotations: - internal.kpt.dev/upstream-identifier: 'apps|Deployment|default|argo-server' -spec: - template: - spec: - containers: - - name: argo-server - args: - - server - - --auth-mode - - server - - --auth-mode - - client diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml deleted file mode 100644 index 2a518382636..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml +++ /dev/null @@ -1,54 +0,0 @@ -apiVersion: v1 -data: - executor: | - resources: - requests: - cpu: 10m - memory: 64Mi - images: | - docker/whalesay:latest: - cmd: [cowsay] - artifactRepository: | - s3: - bucket: my-bucket - endpoint: minio:9000 - insecure: true - accessKeySecret: - name: my-minio-cred - key: accesskey - secretKeySecret: - name: my-minio-cred - key: secretkey - metricsConfig: | - enabled: true - path: /metrics - port: 9090 - namespaceParallelism: "10" - links: | - - name: Workflow Link - scope: workflow - url: http://logging-facility?namespace=${metadata.namespace}&workflowName=${metadata.name}&startedAt=${status.startedAt}&finishedAt=${status.finishedAt} - - name: Pod Link - scope: pod - url: http://logging-facility?namespace=${metadata.namespace}&podName=${metadata.name}&startedAt=${status.startedAt}&finishedAt=${status.finishedAt} - - name: Pod Logs Link - scope: pod-logs - url: http://logging-facility?namespace=${metadata.namespace}&podName=${metadata.name}&startedAt=${status.startedAt}&finishedAt=${status.finishedAt} - - name: Event Source Logs Link - scope: event-source-logs - url: http://logging-facility?namespace=${metadata.namespace}&podName=${metadata.name}&startedAt=${status.startedAt}&finishedAt=${status.finishedAt} - - name: Sensor Logs Link - scope: sensor-logs - url: http://logging-facility?namespace=${metadata.namespace}&podName=${metadata.name}&startedAt=${status.startedAt}&finishedAt=${status.finishedAt} - - name: Completed Workflows - scope: workflow-list - url: http://workflows?label=workflows.argoproj.io/completed=true - columns: | - - name: Workflow Completed - type: label - key: workflows.argoproj.io/completed -kind: ConfigMap -metadata: # kpt-merge: /workflow-controller-configmap - name: workflow-controller-configmap - annotations: - internal.kpt.dev/upstream-identifier: '|ConfigMap|default|workflow-controller-configmap' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/pod-manager-default-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/pod-manager-default-rolebinding.yaml deleted file mode 100644 index 918bfa4b69c..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/pod-manager-default-rolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /pod-manager-default - name: pod-manager-default - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|pod-manager-default' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: pod-manager -subjects: - - kind: ServiceAccount - name: default diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/pod-manager-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/pod-manager-role.yaml deleted file mode 100644 index b360a0fc5ba..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/pod-manager-role.yaml +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /pod-manager - name: pod-manager - annotations: - workflows.argoproj.io/description: | - This is an example of the permissions you would need if you wanted to use a resource template to create and manage - other pods. The same pattern would be suitable for other resurces, e.g. a service - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|pod-manager' -rules: - - apiGroups: - - "" - resources: - - pods - verbs: - - create - - get - - patch diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/kustomization.yaml deleted file mode 100644 index 17eb100d1ac..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/kustomization.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- prometheus-deployment.yaml -- prometheus-config-cluster.yaml -- prometheus-service.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-config-cluster.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-config-cluster.yaml deleted file mode 100644 index 12fd006ba87..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-config-cluster.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: # kpt-merge: /prometheus-config - name: prometheus-config - annotations: - internal.kpt.dev/upstream-identifier: '|ConfigMap|default|prometheus-config' -data: - prometheus.yaml: | - global: - scrape_interval: 15s - scrape_configs: - - job_name: 'argo' - static_configs: - - targets: ['workflow-controller-metrics:9090', 'argo-server:2746'] diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-deployment.yaml deleted file mode 100644 index 7aa9858041d..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-deployment.yaml +++ /dev/null @@ -1,37 +0,0 @@ -# This manifests creates a minimal Prometheus server to scrape and display the metrics emitted by the workflow -# controller. To open this server, create an external IP for the prometheus service or use kubectl port-forward, -# then open: -# -# localhost:9091/graph -# -# Note: this assumes the workflow-controller is emitting metrics in the default port (9090). This will need to -# be modified if the default is overridden. -apiVersion: apps/v1 -kind: Deployment -metadata: # kpt-merge: /prometheus - name: prometheus - annotations: - internal.kpt.dev/upstream-identifier: 'apps|Deployment|default|prometheus' -spec: - replicas: 1 - selector: - matchLabels: - app: prometheus - template: - metadata: - labels: - app: prometheus - name: prometheus - spec: - containers: - - name: prometheus - image: prom/prometheus - args: - - --config.file=/config/prometheus.yaml - volumeMounts: - - name: config - mountPath: /config - volumes: - - name: config - configMap: - name: prometheus-config diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-service.yaml deleted file mode 100644 index 018bdcf86c4..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-service.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: # kpt-merge: /prometheus - name: prometheus - annotations: - internal.kpt.dev/upstream-identifier: '|Service|default|prometheus' -spec: - selector: - app: prometheus - ports: - - name: metrics - port: 9090 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/argo-workflows-webhook-clients-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/argo-workflows-webhook-clients-secret.yaml deleted file mode 100644 index 7de2408d10d..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/argo-workflows-webhook-clients-secret.yaml +++ /dev/null @@ -1,24 +0,0 @@ -kind: Secret -apiVersion: v1 -metadata: # kpt-merge: /argo-workflows-webhook-clients - name: argo-workflows-webhook-clients - annotations: - internal.kpt.dev/upstream-identifier: '|Secret|default|argo-workflows-webhook-clients' -# The data keys must be the name of a service account. -stringData: - # https://support.atlassian.com/bitbucket-cloud/docs/manage-webhooks/ - bitbucket.org: | - type: bitbucket - secret: "my-uuid" - # https://confluence.atlassian.com/bitbucketserver/managing-webhooks-in-bitbucket-server-938025878.html - bitbucketserver: | - type: bitbucketserver - secret: "shh!" - # https://developer.github.com/webhooks/securing/ - github.com: | - type: github - secret: "shh!" - # https://docs.gitlab.com/ee/user/project/integrations/webhooks.html - gitlab.com: |- - type: gitlab - secret: "shh!" diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-rolebinding.yaml deleted file mode 100644 index 93e3d44605e..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-rolebinding.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /github.com - name: github.com - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|github.com' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: submit-workflow-template -subjects: - - kind: ServiceAccount - name: github.com - namespace: argo diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-sa.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-sa.yaml deleted file mode 100644 index d3d3f66f37e..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-sa.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: # kpt-merge: /github.com - name: github.com - annotations: - internal.kpt.dev/upstream-identifier: '|ServiceAccount|default|github.com' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-secret.yaml deleted file mode 100644 index 8f51e57bf52..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-secret.yaml +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: # kpt-merge: /github.com.service-account-token - name: github.com.service-account-token - annotations: - kubernetes.io/service-account.name: github.com - internal.kpt.dev/upstream-identifier: '|Secret|default|github.com.service-account-token' -type: kubernetes.io/service-account-token diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/kustomization.yaml deleted file mode 100644 index 07699b46631..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/kustomization.yaml +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- submit-workflow-template-role.yaml -- github.com-sa.yaml -- github.com-secret.yaml -- github.com-rolebinding.yaml -- argo-workflows-webhook-clients-secret.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/submit-workflow-template-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/submit-workflow-template-role.yaml deleted file mode 100644 index 1efa8aa4d5d..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/submit-workflow-template-role.yaml +++ /dev/null @@ -1,27 +0,0 @@ -# Just enough permissions to submit a workflow template. -# You could tighten this further (but perhaps impractically) by using `resourceNames` -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /submit-workflow-template - name: submit-workflow-template - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|submit-workflow-template' -rules: - - apiGroups: - - argoproj.io - resources: - - workfloweventbindings - verbs: - - list - - apiGroups: - - argoproj.io - resources: - - workflowtemplates - verbs: - - get - - apiGroups: - - argoproj.io - resources: - - workflows - verbs: - - create diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-default-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-default-rolebinding.yaml deleted file mode 100644 index 7d3489e5924..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-default-rolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /workflow-default-binding - name: workflow-default-binding - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|workflow-default-binding' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: workflow -subjects: - - kind: ServiceAccount - name: default diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-manager-default-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-manager-default-rolebinding.yaml deleted file mode 100644 index b7ee0062a71..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-manager-default-rolebinding.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /workflow-manager-default - name: workflow-manager-default - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|workflow-manager-default' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: workflow-manager -subjects: - - kind: ServiceAccount - name: default diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-manager-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-manager-role.yaml deleted file mode 100644 index a91263929b7..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-manager-role.yaml +++ /dev/null @@ -1,17 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /workflow-manager - name: workflow-manager - annotations: - workflows.argoproj.io/description: | - This is an example of the permissions you would need if you wanted to use a resource template to create and manage - other workflows. The same pattern would be suitable for other resurces, e.g. a service - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|workflow-manager' -rules: - - apiGroups: - - argoproj.io - resources: - - workflows - verbs: - - create - - get diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/minimal/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/minimal/kustomization.yaml deleted file mode 100644 index 8a924f2add3..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/minimal/kustomization.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- ../base -patches: -- path: overlays/workflow-controller-configmap.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/minimal/overlays/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/minimal/overlays/workflow-controller-configmap.yaml deleted file mode 100644 index f3030c62535..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/minimal/overlays/workflow-controller-configmap.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: v1 -data: - retentionPolicy: | - completed: 10 - failed: 3 - errored: 3 -kind: ConfigMap -metadata: # kpt-merge: /workflow-controller-configmap - name: workflow-controller-configmap - annotations: - internal.kpt.dev/upstream-identifier: '|ConfigMap|default|workflow-controller-configmap' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/argo-mysql-config-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/argo-mysql-config-secret.yaml deleted file mode 100644 index 923e7357e6b..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/argo-mysql-config-secret.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: v1 -stringData: - username: mysql - password: password -kind: Secret -metadata: # kpt-merge: /argo-mysql-config - name: argo-mysql-config - labels: - app: mysql - annotations: - internal.kpt.dev/upstream-identifier: '|Secret|default|argo-mysql-config' -type: Opaque diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/kustomization.yaml deleted file mode 100644 index b6979aec7dd..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/kustomization.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- ../base -- argo-mysql-config-secret.yaml -- mysql-deployment.yaml -- mysql-service.yaml -patches: -- path: overlays/workflow-controller-configmap.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-deployment.yaml deleted file mode 100644 index c724cca3372..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-deployment.yaml +++ /dev/null @@ -1,40 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: # kpt-merge: /mysql - name: mysql - labels: - app: mysql - annotations: - internal.kpt.dev/upstream-identifier: 'apps|Deployment|default|mysql' -spec: - selector: - matchLabels: - app: mysql - template: - metadata: - name: mysql - labels: - app: mysql - spec: - automountServiceAccountToken: false - containers: - - name: main - image: mysql:8 - env: - - name: MYSQL_USER - value: mysql - - name: MYSQL_PASSWORD - value: password - - name: MYSQL_DATABASE - value: argo - - name: MYSQL_RANDOM_ROOT_PASSWORD - value: "yes" - ports: - - containerPort: 3306 - readinessProbe: - tcpSocket: - port: 3306 - initialDelaySeconds: 30 - periodSeconds: 10 - nodeSelector: - kubernetes.io/os: linux diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-service.yaml deleted file mode 100644 index 4a9c3cf0222..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-service.yaml +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: # kpt-merge: /mysql - name: mysql - labels: - app: mysql - annotations: - internal.kpt.dev/upstream-identifier: '|Service|default|mysql' -spec: - selector: - app: mysql - ports: - - protocol: TCP - port: 3306 - targetPort: 3306 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/overlays/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/overlays/workflow-controller-configmap.yaml deleted file mode 100644 index d8ef893f33d..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/overlays/workflow-controller-configmap.yaml +++ /dev/null @@ -1,30 +0,0 @@ -apiVersion: v1 -data: - persistence: | - connectionPool: - maxIdleConns: 100 - maxOpenConns: 0 - connMaxLifetime: 0s - nodeStatusOffLoad: true - archive: true - archiveTTL: 7d - mysql: - host: mysql - port: 3306 - database: argo - tableName: argo_workflows - userNameSecret: - name: argo-mysql-config - key: username - passwordSecret: - name: argo-mysql-config - key: password - retentionPolicy: | - completed: 10 - failed: 3 - errored: 3 -kind: ConfigMap -metadata: # kpt-merge: /workflow-controller-configmap - name: workflow-controller-configmap - annotations: - internal.kpt.dev/upstream-identifier: '|ConfigMap|default|workflow-controller-configmap' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/argo-postgres-config-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/argo-postgres-config-secret.yaml deleted file mode 100644 index 26786751c4c..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/argo-postgres-config-secret.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: v1 -stringData: - username: postgres - password: password -kind: Secret -metadata: # kpt-merge: /argo-postgres-config - name: argo-postgres-config - labels: - app: postgres - annotations: - internal.kpt.dev/upstream-identifier: '|Secret|default|argo-postgres-config' -type: Opaque diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/kustomization.yaml deleted file mode 100644 index 669aa227346..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/kustomization.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- ../base -- argo-postgres-config-secret.yaml -- postgres-deployment.yaml -- postgres-service.yaml -patches: -- path: overlays/workflow-controller-configmap.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/overlays/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/overlays/workflow-controller-configmap.yaml deleted file mode 100644 index 593f0a96401..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/overlays/workflow-controller-configmap.yaml +++ /dev/null @@ -1,30 +0,0 @@ -apiVersion: v1 -data: - persistence: | - connectionPool: - maxIdleConns: 100 - maxOpenConns: 0 - connMaxLifetime: 0s - nodeStatusOffLoad: true - archive: true - archiveTTL: 7d - postgresql: - host: postgres - port: 5432 - database: postgres - tableName: argo_workflows - userNameSecret: - name: argo-postgres-config - key: username - passwordSecret: - name: argo-postgres-config - key: password - retentionPolicy: | - completed: 10 - failed: 3 - errored: 3 -kind: ConfigMap -metadata: # kpt-merge: /workflow-controller-configmap - name: workflow-controller-configmap - annotations: - internal.kpt.dev/upstream-identifier: '|ConfigMap|default|workflow-controller-configmap' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-deployment.yaml deleted file mode 100644 index ad54bc0506a..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-deployment.yaml +++ /dev/null @@ -1,33 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: # kpt-merge: /postgres - name: postgres - labels: - app: postgres - annotations: - internal.kpt.dev/upstream-identifier: 'apps|Deployment|default|postgres' -spec: - selector: - matchLabels: - app: postgres - template: - metadata: - name: postgres - labels: - app: postgres - spec: - containers: - - name: main - image: postgres:12-alpine - env: - - name: POSTGRES_PASSWORD - value: password - ports: - - containerPort: 5432 - readinessProbe: - exec: - command: ["psql", "-U", "postgres", "-c", "SELECT 1"] - initialDelaySeconds: 15 - timeoutSeconds: 2 - nodeSelector: - kubernetes.io/os: linux diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-service.yaml deleted file mode 100644 index a7c913b1a3d..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-service.yaml +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: # kpt-merge: /postgres - name: postgres - labels: - app: postgres - annotations: - internal.kpt.dev/upstream-identifier: '|Service|default|postgres' -spec: - selector: - app: postgres - ports: - - protocol: TCP - port: 5432 - targetPort: 5432 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dev-svc.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dev-svc.yaml deleted file mode 100644 index 2b1a9c89eb6..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dev-svc.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: # kpt-merge: /dex - name: dex - annotations: - internal.kpt.dev/upstream-identifier: '|Service|default|dex' -spec: - ports: - - name: http - port: 5556 - selector: - app: dex diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-cm.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-cm.yaml deleted file mode 100644 index 767890f48f7..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-cm.yaml +++ /dev/null @@ -1,36 +0,0 @@ -apiVersion: v1 -# this contain secret data, so do not use in production -data: - # https://github.com/dexidp/dex/blob/master/examples/config-dev.yaml - config.yaml: | - issuer: http://dex:5556/dex - storage: - type: sqlite3 - config: - file: ":memory:" - web: - http: 0.0.0.0:5556 - logger: - level: debug - staticClients: - - id: argo-server - redirectURIs: - - http://localhost:2746/oauth2/callback - - http://localhost:8080/oauth2/callback - name: Argo Server - secret: ZXhhbXBsZS1hcHAtc2VjcmV0 - connectors: - - type: mockCallback - id: mock - name: Example - enablePasswordDB: true - staticPasswords: - - email: admin@example.com - hash: $2a$10$2b2cU8CPhOTaGrs1HRQuAueS7JTT5ZHsHSzYiFPm1leZck7Mc8T4W - username: admin - userID: 08a8684b-db88-4b73-90a9-3cd1661f5466 -kind: ConfigMap -metadata: # kpt-merge: /dex - name: dex - annotations: - internal.kpt.dev/upstream-identifier: '|ConfigMap|default|dex' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-deploy.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-deploy.yaml deleted file mode 100644 index 327d8468059..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-deploy.yaml +++ /dev/null @@ -1,35 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: # kpt-merge: /dex - labels: - app: dex - name: dex - annotations: - internal.kpt.dev/upstream-identifier: 'apps|Deployment|default|dex' -spec: - selector: - matchLabels: - app: dex - template: - metadata: - labels: - app: dex - spec: - serviceAccountName: dex - containers: - - name: dex - image: ghcr.io/dexidp/dex:v2.35.0 - args: - - dex - - serve - - /data/config.yaml - ports: - - name: http - containerPort: 5556 - volumeMounts: - - mountPath: /data - name: config - volumes: - - name: config - configMap: - name: dex diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-rb.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-rb.yaml deleted file mode 100644 index a30ccf299d2..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-rb.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: # kpt-merge: /dex - name: dex - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|RoleBinding|default|dex' -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: dex -subjects: - - kind: ServiceAccount - name: dex diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-role.yaml deleted file mode 100644 index 45cf8782242..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-role.yaml +++ /dev/null @@ -1,16 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: # kpt-merge: /dex - name: dex - annotations: - internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|dex' -rules: -- apiGroups: - - "" - resources: - - secrets - - configmaps - verbs: - - get - - list - - watch diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-sa.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-sa.yaml deleted file mode 100644 index c47359e0db4..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-sa.yaml +++ /dev/null @@ -1,6 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: # kpt-merge: /dex - name: dex - annotations: - internal.kpt.dev/upstream-identifier: '|ServiceAccount|default|dex' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/kustomization.yaml deleted file mode 100644 index 045eb101972..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/kustomization.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -labels: -- includeSelectors: true - pairs: - app.kubernetes.io/part-of: dex -resources: -- dex-cm.yaml -- dex-role.yaml -- dex-sa.yaml -- dex-rb.yaml -- dex-deploy.yaml -- dev-svc.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/kustomization.yaml deleted file mode 100644 index 12cf0b3e170..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/kustomization.yaml +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- ../base -- dex -patches: -- path: overlays/workflow-controller-configmap.yaml -- path: overlays/argo-server-sa.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/argo-server-sa.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/argo-server-sa.yaml deleted file mode 100644 index 386593983ab..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/argo-server-sa.yaml +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: # kpt-merge: /argo-server - name: argo-server - annotations: - workflows.argoproj.io/rbac-rule: "'authors' in groups && email == 'kilgore@kilgore.trout'" - workflows.argoproj.io/rbac-rule-precedence: "1" - internal.kpt.dev/upstream-identifier: '|ServiceAccount|default|argo-server' diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/workflow-controller-configmap.yaml deleted file mode 100644 index bb879dcd479..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/workflow-controller-configmap.yaml +++ /dev/null @@ -1,23 +0,0 @@ -apiVersion: v1 -data: - sso: | - issuer: http://dex:5556/dex - issuerAlias: http://dex:5556/dex - clientId: - name: argo-server-sso - key: clientID - clientSecret: - name: argo-server-sso - key: clientSecret - redirectUrl: http://localhost:8080/oauth2/callback - scopes: - - groups - - email - - profile - rbac: - enabled: true -kind: ConfigMap -metadata: # kpt-merge: /workflow-controller-configmap - name: workflow-controller-configmap - annotations: - internal.kpt.dev/upstream-identifier: '|ConfigMap|default|workflow-controller-configmap' diff --git a/manifests/kustomize/third-party/mysql/base/mysql-deployment.yaml b/manifests/kustomize/third-party/mysql/base/mysql-deployment.yaml index 407961b9964..70ea2310b54 100644 --- a/manifests/kustomize/third-party/mysql/base/mysql-deployment.yaml +++ b/manifests/kustomize/third-party/mysql/base/mysql-deployment.yaml @@ -22,7 +22,7 @@ spec: # Ext4, Btrfs etc. volumes root directories have a lost+found directory that should not be treated as a database. # ignore-db-dir option has been deprecated in mysql v5.7.16. # - # If upgrading MySQL to v8.0 fails, try removing /var/lib/mysql/lost+found folder in + # If upgrading MySQL to v8.0 fails, try removing /var/lib/mysql/lost+found folder in # mysql-pv-claim (mysql-persistent-storage): # # kubectl exec -it -n kubeflow -- bash @@ -34,7 +34,7 @@ spec: - --datadir - /var/lib/mysql # MLMD workloads (metadata-grpc-deployment and metadata-writer) depend on mysql_native_password authentication plugin. - # mysql_native_password plugin implements native authentication; that is, authentication based on the password + # mysql_native_password plugin implements native authentication; that is, authentication based on the password # hashing method in use from before the introduction of pluggable authentication in MySQL 8.0. # # The mysql_native_password authentication plugin is deprecated as of MySQL 8.0.34, disabled by default diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/__init__.py b/manifests/kustomize/third-party/seaweedfs/OWNERS similarity index 100% rename from samples/contrib/azure-samples/kfp-azure-databricks/tests/__init__.py rename to manifests/kustomize/third-party/seaweedfs/OWNERS diff --git a/manifests/kustomize/third-party/seaweedfs/README.md b/manifests/kustomize/third-party/seaweedfs/README.md new file mode 100644 index 00000000000..74e01cacf19 --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/README.md @@ -0,0 +1,173 @@ +# SeaweedFS + +- [Official documentation](https://github.com/seaweedfs/seaweedfs/wiki) +- [Official repository](https://github.com/seaweedfs/seaweedfs) + +SeaweedFS is a simple and highly scalable distributed file system. It has an S3 interface which makes it usable as an object store for kubeflow. + +## Prerequisites + +- Kubernetes (any recent Version should work) +- You should have `kubectl` available and configured to talk to the desired cluster. +- `kustomize` +- If you installed kubeflow with minio, use the `istio` dir instead of `base` for the kustomize commands. + +## Compile manifests + +```bash +kubectl kustomize ./base/ +``` + +## Install SeaweedFS + +**WARNING** +This replaces the service `minio-service` and will redirect the traffic to seaweedfs. + +```bash +# Optional, but recommended to backup existing minio-service +kubectl get -n kubeflow svc minio-service -o=jsonpath='{.metadata.annotations.kubectl\.kubernetes\.io/last-applied-configuration}' > svc-minio-service-backup.json + +kubectl kustomize ./base/ | kubectl apply -f - +``` + +## Verify deployment + +Run + +```bash +./test.sh +``` + +With the ready check on the container it already verifies that the S3 starts correctly. +You can then use it with the endpoint at . +To create access keys open a shell on the pod and use `weed shell` to configure your instance. +Create a user with the command `s3.configure -user -access_key -secret-key -actions Read:/,Write::/ -apply` +Documentation for this can also be found [here](https://github.com/seaweedfs/seaweedfs/wiki/Amazon-S3-API). + +## Gateway to Remote Object Storage + +The Gateway to Remote Object Storage feature allows SeaweedFS to automatically synchronize local storage with remote cloud storage providers (AWS S3, Azure Blob Storage, Google Cloud Storage). This enables: + +- **Automatic Bucket Synchronization**: Local new buckets are automatically created in remote storage +- **Bidirectional Sync**: Changes in local storage are uploaded to remote storage +- **Automatic Cleanup**: Local deleted buckets are automatically deleted in remote storage +- **Multi-Cloud Support**: Connect to multiple cloud storage providers simultaneously + +### Configure Remote Storage + +Remote storage must be configured before using the gateway. Use the `weed shell` to configure remote storage connections: + +#### 1. Access SeaweedFS Shell + +```bash +kubectl exec -n kubeflow deployment/seaweedfs -it -- weed shell +``` + +#### 2. Configure Remote Storage + +**AWS S3 Configuration:** + +```bash +# Configure AWS S3 remote storage +remote.configure -name=aws1 -type=s3 -s3.access_key=YOUR_ACCESS_KEY -s3.secret_key=YOUR_SECRET_KEY -s3.region=us-east-1 -s3.endpoint=s3.amazonaws.com -s3.storage_class="STANDARD" +``` + +**Azure Blob Storage Configuration:** + +```bash +# Configure Azure Blob Storage +remote.configure -name=azure1 -type=azure -azure.account_name=YOUR_ACCOUNT_NAME -azure.account_key=YOUR_ACCOUNT_KEY +``` + +**Google Cloud Storage Configuration:** + +```bash +# Configure Google Cloud Storage +remote.configure -name=gcs1 -type=gcs -gcs.appCredentialsFile=/path/to/service-account-file.json +``` + +#### 3. View and Manage Configurations + +```bash +# List all remote storage configurations +remote.configure + +# Delete a configuration +remote.configure -delete -name=aws1 +``` + +### Setup Gateway to Remote Storage + +#### Step 1: Mount Existing Remote Buckets (Optional) + +If you have existing buckets in remote storage, mount them as local buckets: + +```bash +# In weed shell +remote.mount.buckets -remote=aws1 -apply +``` + +#### Step 2: Start the Remote Gateway + +The gateway process continuously monitors local changes and syncs them to remote storage. + +**Basic Gateway Setup:** + +```bash +# Start the gateway (run this in the SeaweedFS deployment) +kubectl exec -n kubeflow deployment/seaweedfs -- weed filer.remote.gateway -createBucketAt=aws1 +``` + +**Gateway with Random Suffix (for unique bucket names):** + +```bash +# Some cloud providers require globally unique bucket names +kubectl exec -n kubeflow deployment/seaweedfs -- weed filer.remote.gateway -createBucketAt=aws1 -createBucketWithRandomSuffix +``` + +#### Step 3(Optional): Cache Management + +Optimize performance by managing cache: + +```bash +# In weed shell + +# Cache all PDF files in all mounted buckets +remote.cache -include=*.pdf + +# Cache all PDF files in a specific bucket +remote.cache -dir=/buckets/some-bucket -include=*.pdf + +# Uncache files older than 1 hour and larger than 10KB +remote.uncache -minAge=3600 -minSize=10240 +``` + +### Troubleshooting + +**Common Issues:** + +- **Configuration not found**: Ensure remote storage is configured before starting gateway +- **Permission denied**: Check cloud storage credentials and permissions +- **Connection timeout**: Verify network connectivity to cloud storage +- **Bucket conflicts**: Use random suffix for globally unique bucket names + +**Debug Commands:** + +```bash +# Check remote configurations +kubectl exec -n kubeflow deployment/seaweedfs -- weed shell -c "remote.configure" + +# Check mounted buckets +kubectl exec -n kubeflow deployment/seaweedfs -- weed shell -c "remote.mount.buckets -remote=aws1" + +# Check gateway logs +kubectl logs -n kubeflow deployment/seaweedfs -f +``` + +## Uninstall SeaweedFS + +```bash +kubectl kustomize ./base/ | kubectl delete -f - +# Restore minio-service from backup +kubectl apply -f svc-minio-service-backup.json +``` diff --git a/manifests/kustomize/third-party/seaweedfs/UPDGRADE.md b/manifests/kustomize/third-party/seaweedfs/UPDGRADE.md new file mode 100644 index 00000000000..0193a918448 --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/UPDGRADE.md @@ -0,0 +1,3 @@ +# Upgrade SeaweedFS + +Change the image tag in the Deployment to the desired version. You can find the available images [here](https://hub.docker.com/r/chrislusf/seaweedfs). diff --git a/manifests/kustomize/third-party/seaweedfs/base/kustomization.yaml b/manifests/kustomize/third-party/seaweedfs/base/kustomization.yaml new file mode 100644 index 00000000000..9138ac90bb1 --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/base/kustomization.yaml @@ -0,0 +1,6 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization +namespace: kubeflow + +resources: +- seaweedfs/ diff --git a/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/kustomization.yaml b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/kustomization.yaml new file mode 100644 index 00000000000..9dbf9e742b3 --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/kustomization.yaml @@ -0,0 +1,13 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization +namespace: kubeflow + +resources: +- seaweedfs-deployment.yaml +- seaweedfs-pvc.yaml +- seaweedfs-networkpolicy.yaml +- seaweedfs-create-admin-user-job.yaml +- seaweedfs-service.yaml +- seaweedfs-service-account.yaml +- minio-service.yaml +- mlpipeline-minio-artifact-secret.yaml diff --git a/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/minio-service.yaml b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/minio-service.yaml new file mode 100644 index 00000000000..245153420cd --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/minio-service.yaml @@ -0,0 +1,16 @@ +apiVersion: v1 +kind: Service +metadata: + name: minio-service + namespace: kubeflow + labels: + app: seaweedfs + component: minio-compatibility +spec: + ports: + - name: http + port: 9000 + protocol: TCP + targetPort: 8333 + selector: + app: seaweedfs diff --git a/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/mlpipeline-minio-artifact-secret.yaml b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/mlpipeline-minio-artifact-secret.yaml new file mode 100644 index 00000000000..ac298d9b3d0 --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/mlpipeline-minio-artifact-secret.yaml @@ -0,0 +1,7 @@ +kind: Secret +apiVersion: v1 +metadata: + name: mlpipeline-minio-artifact +stringData: + accesskey: minio + secretkey: minio123 diff --git a/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-create-admin-user-job.yaml b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-create-admin-user-job.yaml new file mode 100644 index 00000000000..0fcbfe00e2e --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-create-admin-user-job.yaml @@ -0,0 +1,77 @@ +kind: Job +apiVersion: batch/v1 +metadata: + name: init-seaweedfs +spec: + template: + metadata: + name: init-seaweedfs + spec: + securityContext: + seccompProfile: + type: RuntimeDefault + restartPolicy: OnFailure + containers: + - name: init-seaweedfs + image: 'chrislusf/seaweedfs:3.85' + env: + - name: WEED_CLUSTER_DEFAULT + value: "sw" + - name: WEED_CLUSTER_SW_MASTER + value: "seaweedfs.kubeflow:9333" + envFrom: + - secretRef: + name: mlpipeline-minio-artifact + command: + - "/bin/sh" + - "-ec" + - | + wait_for_service() { + local url=$1 + local max_attempts=60 # 5 minutes total (5s * 60) + local attempt=1 + + echo "Waiting for service at $url..." + while [ $attempt -le $max_attempts ]; do + if wget -q --spider "$url" >/dev/null 2>&1; then + echo "Service at $url is up!" + return 0 + fi + echo "Attempt $attempt: Service not ready yet, retrying in 5s..." + sleep 5 + attempt=$((attempt + 1)) + done + echo "Service at $url failed to become ready within 5 minutes" + exit 1 + } + wait_for_service "http://minio-service.kubeflow:9000/status" + echo "Creating S3 bucket..." + echo "s3.bucket.create --name mlpipeline" | /usr/bin/weed shell > /dev/null 2>&1 + if [ $? -eq 0 ]; then + echo "Bucket created successfully" + else + echo "Failed to create bucket or bucket already exists" + fi + echo "Configuring S3 credentials..." + echo "s3.configure -user kubeflow-admin \ + -access_key $accesskey \ + -secret_key $secretkey \ + -actions Admin \ + -apply" | /usr/bin/weed shell > /dev/null 2>&1 + if [ $? -eq 0 ]; then + echo "S3 credentials configured successfully" + else + echo "Failed to configure S3 credentials" + exit 1 + fi + securityContext: # Using restricted profile + allowPrivilegeEscalation: false + privileged: false + runAsNonRoot: true + # image defaults to root user + runAsUser: 1001 + runAsGroup: 1001 + capabilities: + drop: + - ALL + serviceAccountName: seaweedfs diff --git a/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-deployment.yaml b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-deployment.yaml new file mode 100644 index 00000000000..635e8ac6e37 --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-deployment.yaml @@ -0,0 +1,72 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: seaweedfs + namespace: kubeflow + labels: + app: seaweedfs +spec: + selector: + matchLabels: + app: seaweedfs + strategy: + type: Recreate + # Single container setup not scalable + replicas: 1 + template: + metadata: + labels: + app: seaweedfs + application-crd-id: kubeflow-pipelines + spec: + securityContext: + seccompProfile: + type: RuntimeDefault + containers: + - name: seaweedfs + image: 'chrislusf/seaweedfs:3.92' + args: + - 'server' + - '-dir=/data' + - '-s3' + - '-iam' + - '-filer' + - '-master.volumePreallocate=false' + ports: + - containerPort: 8333 + - containerPort: 8111 + - containerPort: 9333 + - containerPort: 19333 + - containerPort: 8888 + readinessProbe: + httpGet: + path: /status + port: 8333 + scheme: HTTP + initialDelaySeconds: 15 + periodSeconds: 15 + successThreshold: 1 + failureThreshold: 100 + timeoutSeconds: 10 + securityContext: # Using restricted profile + allowPrivilegeEscalation: false + privileged: false + runAsNonRoot: true + # image defaults to root user + runAsUser: 1001 + runAsGroup: 1001 + capabilities: + drop: + - ALL + volumeMounts: + - mountPath: /data + name: data + resources: + requests: + cpu: 32m + memory: 128Mi + volumes: + - name: data + persistentVolumeClaim: + claimName: seaweedfs-pvc + serviceAccountName: seaweedfs diff --git a/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-networkpolicy.yaml b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-networkpolicy.yaml new file mode 100644 index 00000000000..0375f83270a --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-networkpolicy.yaml @@ -0,0 +1,30 @@ +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: seaweedfs +spec: + ingress: + - from: + - namespaceSelector: + matchExpressions: + - key: app.kubernetes.io/part-of + operator: In + values: + - kubeflow-profile + ports: + - port: 8333 + - from: + - namespaceSelector: + matchExpressions: + - key: kubernetes.io/metadata.name + operator: In + values: + - istio-system + podSelector: + matchExpressions: + - key: app + operator: In + values: + - seaweedfs + policyTypes: + - Ingress diff --git a/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-pvc.yaml b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-pvc.yaml new file mode 100644 index 00000000000..b0302f9cb7f --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-pvc.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: seaweedfs-pvc + namespace: kubeflow +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 20Gi diff --git a/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-service-account.yaml b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-service-account.yaml new file mode 100644 index 00000000000..9e0b2176f25 --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-service-account.yaml @@ -0,0 +1,4 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: seaweedfs diff --git a/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-service.yaml b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-service.yaml new file mode 100644 index 00000000000..0134c34f391 --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/base/seaweedfs/seaweedfs-service.yaml @@ -0,0 +1,33 @@ +# Separate service for new ports of seaweedfs. If we add them to the existing minio-service there will be a problem +# with the mlpipeline api server because it relies on MINIO_SERVICE_SERVICE_PORT pointing to the S3 port. +# But with multiple ports on a service that is not really reliable. So we use the existing minio-service for +# backwards-compatibility, but everything new, seaweedfs related is here. +apiVersion: v1 +kind: Service +metadata: + name: seaweedfs + namespace: kubeflow +spec: + ports: + - name: http-iam + port: 8111 + protocol: TCP + targetPort: 8111 + - name: http-master + port: 9333 + protocol: TCP + targetPort: 9333 + - name: grpc-master + port: 19333 + protocol: TCP + targetPort: 19333 + - name: grpc-filer + port: 18888 + protocol: TCP + targetPort: 18888 + - name: http-filer + port: 8888 + protocol: TCP + targetPort: 8888 + selector: + app: seaweedfs diff --git a/manifests/kustomize/third-party/seaweedfs/istio/istio-authorization-policy.yaml b/manifests/kustomize/third-party/seaweedfs/istio/istio-authorization-policy.yaml new file mode 100644 index 00000000000..4c6ac2c1630 --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/istio/istio-authorization-policy.yaml @@ -0,0 +1,32 @@ +apiVersion: security.istio.io/v1beta1 +kind: AuthorizationPolicy +metadata: + name: seaweedfs-service +spec: + action: ALLOW + selector: + matchLabels: + app: seaweedfs + rules: + - from: + - source: + principals: + - cluster.local/ns/kubeflow/sa/ml-pipeline + - from: + - source: + principals: + - cluster.local/ns/kubeflow/sa/ml-pipeline-ui + # Allow traffic to s3 endpoint from User Pipeline Pods, which don't have a sidecar. + # Also needed for traffic from seaweedfs init pod. Seaweedfs gives the client an ip to connect to. This can not be + # handled well by istio (AuthPolicy). Instead, access to the sensitive ports will be limited by the NetworkPolicy. + - {} +--- +apiVersion: "networking.istio.io/v1alpha3" +kind: DestinationRule +metadata: + name: ml-pipeline-seaweedfs +spec: + host: seaweedfs.kubeflow.svc.cluster.local + trafficPolicy: + tls: + mode: ISTIO_MUTUAL diff --git a/manifests/kustomize/third-party/seaweedfs/istio/kustomization.yaml b/manifests/kustomize/third-party/seaweedfs/istio/kustomization.yaml new file mode 100644 index 00000000000..2dffd5d150b --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/istio/kustomization.yaml @@ -0,0 +1,7 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization +namespace: kubeflow + +resources: +- ../base/ +- istio-authorization-policy.yaml diff --git a/manifests/kustomize/third-party/seaweedfs/test.sh b/manifests/kustomize/third-party/seaweedfs/test.sh new file mode 100644 index 00000000000..f55ca2135e1 --- /dev/null +++ b/manifests/kustomize/third-party/seaweedfs/test.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +set -xe + +kubectl create ns kubeflow || echo "namespace kubeflow already exists" +kubectl get -n kubeflow svc minio-service -o=jsonpath='{.metadata.annotations.kubectl\.kubernetes\.io/last-applied-configuration}' > svc-minio-service-backup.json +kustomize build istio/ | kubectl apply --server-side -f - +kubectl -n kubeflow wait --for=condition=available --timeout=600s deploy/seaweedfs +kubectl -n kubeflow exec deployments/seaweedfs -c seaweedfs -- sh -c "echo \"s3.configure -user minio -access_key minio -secret_key minio123 -actions Read,Write,List -apply\" | /usr/bin/weed shell" + +kubectl -n kubeflow port-forward svc/minio-service 8333:9000 +echo "S3 endpoint available on localhost:8333" & + +function trap_handler { + kubectl -n kubeflow logs -l app=seaweedfs --tail=100 + kustomize build istio/ | kubectl delete -f - + kubectl apply -f svc-minio-service-backup.json +} + +trap trap_handler EXIT diff --git a/package-lock.json b/package-lock.json deleted file mode 100644 index adc4d7ef347..00000000000 --- a/package-lock.json +++ /dev/null @@ -1,2894 +0,0 @@ -{ - "name": "kubeflow-pipelines", - "version": "1.0.0-rc.2", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "kubeflow-pipelines", - "license": "Apache-2.0", - "devDependencies": { - "standard-version": "^8.0.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.10.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.1.tgz", - "integrity": "sha512-IGhtTmpjGbYzcEDOw7DcQtbQSXcG9ftmAXtWTu9V936vDye4xjjekktFAtgZsWpzTj/X01jocB46mTywm/4SZw==", - "dev": true, - "dependencies": { - "@babel/highlight": "^7.10.1" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.10.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.1.tgz", - "integrity": "sha512-5vW/JXLALhczRCWP0PnFDMCJAchlBvM7f4uk/jXritBnIa6E1KmqmtrS3yn1LAnxFBypQ3eneLuXjsnfQsgILw==", - "dev": true - }, - "node_modules/@babel/highlight": { - "version": "7.10.1", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.10.1.tgz", - "integrity": "sha512-8rMof+gVP8mxYZApLF/JgNDAkdKa+aJt3ZYxF8z6+j/hpeXL7iMsKCPHa2jNMHu/qqBwzQF4OHNoYi8dMA/rYg==", - "dev": true, - "dependencies": { - "@babel/helper-validator-identifier": "^7.10.1", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - } - }, - "node_modules/@types/color-name": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", - "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", - "dev": true - }, - "node_modules/@types/minimist": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha1-aaI6OtKcrwCX8G7aWbNh7i8GOfY=", - "dev": true - }, - "node_modules/@types/normalize-package-data": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz", - "integrity": "sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==", - "dev": true - }, - "node_modules/add-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/add-stream/-/add-stream-1.0.0.tgz", - "integrity": "sha1-anmQQ3ynNtXhKI25K9MmbV9csqo=", - "dev": true - }, - "node_modules/ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/array-find-index": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", - "integrity": "sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/array-ify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz", - "integrity": "sha1-nlKHYrSpBmrRY6aWKjZEGOlibs4=", - "dev": true - }, - "node_modules/arrify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", - "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/buffer-from": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", - "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", - "dev": true - }, - "node_modules/camelcase": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.0.0.tgz", - "integrity": "sha512-8KMDF1Vz2gzOq54ONPJS65IvTUaB1cHJ2DMM7MbPmLZljDH1qpzzLsWdiN9pHh6qvkRVDTi/07+eNGch/oLU4w==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/camelcase-keys": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", - "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", - "dev": true, - "dependencies": { - "camelcase": "^5.3.1", - "map-obj": "^4.0.0", - "quick-lru": "^4.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/camelcase-keys/node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/cliui": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", - "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", - "dev": true, - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^6.2.0" - } - }, - "node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "dev": true, - "optional": true - }, - "node_modules/compare-func": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-1.3.4.tgz", - "integrity": "sha512-sq2sWtrqKPkEXAC8tEJA1+BqAH9GbFkGBtUOqrUX57VSfwp8xyktctk+uLoRy5eccTdxzDcVIztlYDpKs3Jv1Q==", - "dev": true, - "dependencies": { - "array-ify": "^1.0.0", - "dot-prop": "^3.0.0" - } - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "node_modules/concat-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", - "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", - "dev": true, - "engines": [ - "node >= 6.0" - ], - "dependencies": { - "buffer-from": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^3.0.2", - "typedarray": "^0.0.6" - } - }, - "node_modules/conventional-changelog": { - "version": "3.1.18", - "resolved": "https://registry.npmjs.org/conventional-changelog/-/conventional-changelog-3.1.18.tgz", - "integrity": "sha512-aN6a3rjgV8qwAJj3sC/Lme2kvswWO7fFSGQc32gREcwIOsaiqBaO6f2p0NomFaPDnTqZ+mMZFLL3hlzvEnZ0mQ==", - "dev": true, - "dependencies": { - "conventional-changelog-angular": "^5.0.6", - "conventional-changelog-atom": "^2.0.3", - "conventional-changelog-codemirror": "^2.0.3", - "conventional-changelog-conventionalcommits": "^4.2.3", - "conventional-changelog-core": "^4.1.4", - "conventional-changelog-ember": "^2.0.4", - "conventional-changelog-eslint": "^3.0.4", - "conventional-changelog-express": "^2.0.1", - "conventional-changelog-jquery": "^3.0.6", - "conventional-changelog-jshint": "^2.0.3", - "conventional-changelog-preset-loader": "^2.3.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/conventional-changelog-angular": { - "version": "5.0.10", - "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-5.0.10.tgz", - "integrity": "sha512-k7RPPRs0vp8+BtPsM9uDxRl6KcgqtCJmzRD1wRtgqmhQ96g8ifBGo9O/TZBG23jqlXS/rg8BKRDELxfnQQGiaA==", - "dev": true, - "dependencies": { - "compare-func": "^1.3.1", - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-atom": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/conventional-changelog-atom/-/conventional-changelog-atom-2.0.7.tgz", - "integrity": "sha512-7dOREZwzB+tCEMjRTDfen0OHwd7vPUdmU0llTy1eloZgtOP4iSLVzYIQqfmdRZEty+3w5Jz+AbhfTJKoKw1JeQ==", - "dev": true, - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-codemirror": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/conventional-changelog-codemirror/-/conventional-changelog-codemirror-2.0.7.tgz", - "integrity": "sha512-Oralk1kiagn3Gb5cR5BffenWjVu59t/viE6UMD/mQa1hISMPkMYhJIqX+CMeA1zXgVBO+YHQhhokEj99GP5xcg==", - "dev": true, - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-config-spec": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/conventional-changelog-config-spec/-/conventional-changelog-config-spec-2.1.0.tgz", - "integrity": "sha512-IpVePh16EbbB02V+UA+HQnnPIohgXvJRxHcS5+Uwk4AT5LjzCZJm5sp/yqs5C6KZJ1jMsV4paEV13BN1pvDuxQ==", - "dev": true - }, - "node_modules/conventional-changelog-conventionalcommits": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-4.2.3.tgz", - "integrity": "sha512-atGa+R4vvEhb8N/8v3IoW59gCBJeeFiX6uIbPu876ENAmkMwsenyn0R21kdDHJFLQdy6zW4J6b4xN8KI3b9oww==", - "dev": true, - "dependencies": { - "compare-func": "^1.3.1", - "lodash": "^4.17.15", - "q": "^1.5.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/conventional-changelog-core": { - "version": "4.1.7", - "resolved": "https://registry.npmjs.org/conventional-changelog-core/-/conventional-changelog-core-4.1.7.tgz", - "integrity": "sha512-UBvSrQR2RdKbSQKh7RhueiiY4ZAIOW3+CSWdtKOwRv+KxIMNFKm1rOcGBFx0eA8AKhGkkmmacoTWJTqyz7Q0VA==", - "dev": true, - "dependencies": { - "add-stream": "^1.0.0", - "conventional-changelog-writer": "^4.0.16", - "conventional-commits-parser": "^3.1.0", - "dateformat": "^3.0.0", - "get-pkg-repo": "^1.0.0", - "git-raw-commits": "2.0.0", - "git-remote-origin-url": "^2.0.0", - "git-semver-tags": "^4.0.0", - "lodash": "^4.17.15", - "normalize-package-data": "^2.3.5", - "q": "^1.5.1", - "read-pkg": "^3.0.0", - "read-pkg-up": "^3.0.0", - "shelljs": "^0.8.3", - "through2": "^3.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-core/node_modules/git-semver-tags": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/git-semver-tags/-/git-semver-tags-4.0.0.tgz", - "integrity": "sha512-LajaAWLYVBff+1NVircURJFL8TQ3EMIcLAfHisWYX/nPoMwnTYfWAznQDmMujlLqoD12VtLmoSrF1sQ5MhimEQ==", - "dev": true, - "dependencies": { - "meow": "^7.0.0", - "semver": "^6.0.0" - }, - "bin": { - "git-semver-tags": "cli.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-core/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/conventional-changelog-ember": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/conventional-changelog-ember/-/conventional-changelog-ember-2.0.8.tgz", - "integrity": "sha512-JEMEcUAMg4Q9yxD341OgWlESQ4gLqMWMXIWWUqoQU8yvTJlKnrvcui3wk9JvnZQyONwM2g1MKRZuAjKxr8hAXA==", - "dev": true, - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-eslint": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/conventional-changelog-eslint/-/conventional-changelog-eslint-3.0.8.tgz", - "integrity": "sha512-5rTRltgWG7TpU1PqgKHMA/2ivjhrB+E+S7OCTvj0zM/QGg4vmnVH67Vq/EzvSNYtejhWC+OwzvDrLk3tqPry8A==", - "dev": true, - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-express": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/conventional-changelog-express/-/conventional-changelog-express-2.0.5.tgz", - "integrity": "sha512-pW2hsjKG+xNx/Qjof8wYlAX/P61hT5gQ/2rZ2NsTpG+PgV7Rc8RCfITvC/zN9K8fj0QmV6dWmUefCteD9baEAw==", - "dev": true, - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-jquery": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/conventional-changelog-jquery/-/conventional-changelog-jquery-3.0.6.tgz", - "integrity": "sha512-gHAABCXUNA/HjnZEm+vxAfFPJkgtrZvCDIlCKfdPVXtCIo/Q0lN5VKpx8aR5p8KdVRQFF3OuTlvv5kv6iPuRqA==", - "dev": true, - "dependencies": { - "q": "^1.5.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/conventional-changelog-jshint": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/conventional-changelog-jshint/-/conventional-changelog-jshint-2.0.7.tgz", - "integrity": "sha512-qHA8rmwUnLiIxANJbz650+NVzqDIwNtc0TcpIa0+uekbmKHttidvQ1dGximU3vEDdoJVKFgR3TXFqYuZmYy9ZQ==", - "dev": true, - "dependencies": { - "compare-func": "^1.3.1", - "q": "^1.5.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-preset-loader": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/conventional-changelog-preset-loader/-/conventional-changelog-preset-loader-2.3.4.tgz", - "integrity": "sha512-GEKRWkrSAZeTq5+YjUZOYxdHq+ci4dNwHvpaBC3+ENalzFWuCWa9EZXSuZBpkr72sMdKB+1fyDV4takK1Lf58g==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-writer": { - "version": "4.0.16", - "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-4.0.16.tgz", - "integrity": "sha512-jmU1sDJDZpm/dkuFxBeRXvyNcJQeKhGtVcFFkwTphUAzyYWcwz2j36Wcv+Mv2hU3tpvLMkysOPXJTLO55AUrYQ==", - "dev": true, - "dependencies": { - "compare-func": "^1.3.1", - "conventional-commits-filter": "^2.0.6", - "dateformat": "^3.0.0", - "handlebars": "^4.7.6", - "json-stringify-safe": "^5.0.1", - "lodash": "^4.17.15", - "meow": "^7.0.0", - "semver": "^6.0.0", - "split": "^1.0.0", - "through2": "^3.0.0" - }, - "bin": { - "conventional-changelog-writer": "cli.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-writer/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/conventional-commits-filter": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-2.0.6.tgz", - "integrity": "sha512-4g+sw8+KA50/Qwzfr0hL5k5NWxqtrOVw4DDk3/h6L85a9Gz0/Eqp3oP+CWCNfesBvZZZEFHF7OTEbRe+yYSyKw==", - "dev": true, - "dependencies": { - "lodash.ismatch": "^4.4.0", - "modify-values": "^1.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-commits-parser": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-3.1.0.tgz", - "integrity": "sha512-RSo5S0WIwXZiRxUGTPuYFbqvrR4vpJ1BDdTlthFgvHt5kEdnd1+pdvwWphWn57/oIl4V72NMmOocFqqJ8mFFhA==", - "dev": true, - "dependencies": { - "is-text-path": "^1.0.1", - "JSONStream": "^1.0.4", - "lodash": "^4.17.15", - "meow": "^7.0.0", - "split2": "^2.0.0", - "through2": "^3.0.0", - "trim-off-newlines": "^1.0.0" - }, - "bin": { - "conventional-commits-parser": "cli.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-recommended-bump": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/conventional-recommended-bump/-/conventional-recommended-bump-6.0.5.tgz", - "integrity": "sha512-srkferrB4kACPEbKYltZwX1CQZAEqbQkabKN444mavLRVMetzwJFJf23/+pwvtMsWbd+cc4HaleV1nHke0f8Rw==", - "dev": true, - "dependencies": { - "concat-stream": "^2.0.0", - "conventional-changelog-preset-loader": "^2.3.0", - "conventional-commits-filter": "^2.0.2", - "conventional-commits-parser": "^3.0.8", - "git-raw-commits": "2.0.0", - "git-semver-tags": "^3.0.1", - "meow": "^5.0.0", - "q": "^1.5.1" - }, - "bin": { - "conventional-recommended-bump": "cli.js" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/conventional-recommended-bump/node_modules/arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/conventional-recommended-bump/node_modules/camelcase": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", - "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/conventional-recommended-bump/node_modules/camelcase-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-4.2.0.tgz", - "integrity": "sha1-oqpfsa9oh1glnDLBQUJteJI7m3c=", - "dev": true, - "dependencies": { - "camelcase": "^4.1.0", - "map-obj": "^2.0.0", - "quick-lru": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/conventional-recommended-bump/node_modules/indent-string": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", - "integrity": "sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/conventional-recommended-bump/node_modules/map-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-2.0.0.tgz", - "integrity": "sha1-plzSkIepJZi4eRJXpSPgISIqwfk=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/conventional-recommended-bump/node_modules/meow": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/meow/-/meow-5.0.0.tgz", - "integrity": "sha512-CbTqYU17ABaLefO8vCU153ZZlprKYWDljcndKKDCFcYQITzWCXZAVk4QMFZPgvzrnUQ3uItnIE/LoUOwrT15Ig==", - "dev": true, - "dependencies": { - "camelcase-keys": "^4.0.0", - "decamelize-keys": "^1.0.0", - "loud-rejection": "^1.0.0", - "minimist-options": "^3.0.1", - "normalize-package-data": "^2.3.4", - "read-pkg-up": "^3.0.0", - "redent": "^2.0.0", - "trim-newlines": "^2.0.0", - "yargs-parser": "^10.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/conventional-recommended-bump/node_modules/minimist-options": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-3.0.2.tgz", - "integrity": "sha512-FyBrT/d0d4+uiZRbqznPXqw3IpZZG3gl3wKWiX784FycUKVwBt0uLBFkQrtE4tZOrgo78nZp2jnKz3L65T5LdQ==", - "dev": true, - "dependencies": { - "arrify": "^1.0.1", - "is-plain-obj": "^1.1.0" - }, - "engines": { - "node": ">= 4" - } - }, - "node_modules/conventional-recommended-bump/node_modules/quick-lru": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-1.1.0.tgz", - "integrity": "sha1-Q2CxfGETatOAeDl/8RQW4Ybc+7g=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/conventional-recommended-bump/node_modules/redent": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-2.0.0.tgz", - "integrity": "sha1-wbIAe0LVfrE4kHmzyDM2OdXhzKo=", - "dev": true, - "dependencies": { - "indent-string": "^3.0.0", - "strip-indent": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/conventional-recommended-bump/node_modules/strip-indent": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-2.0.0.tgz", - "integrity": "sha1-XvjbKV0B5u1sv3qrlpmNeCJSe2g=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/conventional-recommended-bump/node_modules/trim-newlines": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-2.0.0.tgz", - "integrity": "sha1-tAPQuRvlDDMd/EuC7s6yLD3hbSA=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/conventional-recommended-bump/node_modules/yargs-parser": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-10.1.0.tgz", - "integrity": "sha512-VCIyR1wJoEBZUqk5PA+oOBF6ypbwh5aNB3I50guxAL/quggdfs4TtNHQrSazFA3fYZ+tEqfs0zIGlv0c/rgjbQ==", - "dev": true, - "dependencies": { - "camelcase": "^4.1.0" - } - }, - "node_modules/core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", - "dev": true - }, - "node_modules/currently-unhandled": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz", - "integrity": "sha1-mI3zP+qxke95mmE2nddsF635V+o=", - "dev": true, - "dependencies": { - "array-find-index": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/dargs": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/dargs/-/dargs-4.1.0.tgz", - "integrity": "sha1-A6nbtLXC8Tm/FK5T8LiipqhvThc=", - "dev": true, - "dependencies": { - "number-is-nan": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/dateformat": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz", - "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/decamelize-keys": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.0.tgz", - "integrity": "sha1-0XGoeTMlKAfrPLYdwcFEXQeN8tk=", - "dev": true, - "dependencies": { - "decamelize": "^1.1.0", - "map-obj": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/decamelize-keys/node_modules/map-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", - "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/detect-indent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.0.0.tgz", - "integrity": "sha512-oSyFlqaTHCItVRGK5RmrmjB+CmaMOW7IaNA/kdxqhoa6d17j/5ce9O9eWXmV/KEdRwqpQA+Vqe8a8Bsybu4YnA==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/detect-newline": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", - "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/dot-prop": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-3.0.0.tgz", - "integrity": "sha1-G3CK8JSknJoOfbyteQq6U52sEXc=", - "dev": true, - "dependencies": { - "is-obj": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/dotgitignore": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/dotgitignore/-/dotgitignore-2.1.0.tgz", - "integrity": "sha512-sCm11ak2oY6DglEPpCB8TixLjWAxd3kJTs6UIcSasNYxXdFPV+YKlye92c8H4kKFqV5qYMIh7d+cYecEg0dIkA==", - "dev": true, - "dependencies": { - "find-up": "^3.0.0", - "minimatch": "^3.0.4" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/dotgitignore/node_modules/find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "dependencies": { - "locate-path": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/dotgitignore/node_modules/locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "dependencies": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/dotgitignore/node_modules/p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "dependencies": { - "p-limit": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/dotgitignore/node_modules/path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dev": true, - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/figures": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.1.0.tgz", - "integrity": "sha512-ravh8VRXqHuMvZt/d8GblBeqDMkdJMBdv/2KntFH+ra5MXkO7nxNKpzQ3n6QD/2da1kH0aWmNISdvhM7gl2gVg==", - "dev": true, - "dependencies": { - "escape-string-regexp": "^1.0.5" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "dev": true, - "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fs-access": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/fs-access/-/fs-access-1.0.1.tgz", - "integrity": "sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o=", - "dev": true, - "dependencies": { - "null-check": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-pkg-repo": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/get-pkg-repo/-/get-pkg-repo-1.4.0.tgz", - "integrity": "sha1-xztInAbYDMVTbCyFP54FIyBWly0=", - "dev": true, - "dependencies": { - "hosted-git-info": "^2.1.4", - "meow": "^3.3.0", - "normalize-package-data": "^2.3.0", - "parse-github-repo-url": "^1.3.0", - "through2": "^2.0.0" - }, - "bin": { - "get-pkg-repo": "cli.js" - } - }, - "node_modules/get-pkg-repo/node_modules/camelcase": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz", - "integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/camelcase-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", - "integrity": "sha1-MIvur/3ygRkFHvodkyITyRuPkuc=", - "dev": true, - "dependencies": { - "camelcase": "^2.0.0", - "map-obj": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/find-up": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", - "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", - "dev": true, - "dependencies": { - "path-exists": "^2.0.0", - "pinkie-promise": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/indent-string": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz", - "integrity": "sha1-ji1INIdCEhtKghi3oTfppSBJ3IA=", - "dev": true, - "dependencies": { - "repeating": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/map-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", - "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/meow": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz", - "integrity": "sha1-cstmi0JSKCkKu/qFaJJYcwioAfs=", - "dev": true, - "dependencies": { - "camelcase-keys": "^2.0.0", - "decamelize": "^1.1.2", - "loud-rejection": "^1.0.0", - "map-obj": "^1.0.1", - "minimist": "^1.1.3", - "normalize-package-data": "^2.3.4", - "object-assign": "^4.0.1", - "read-pkg-up": "^1.0.1", - "redent": "^1.0.0", - "trim-newlines": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/path-exists": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", - "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", - "dev": true, - "dependencies": { - "pinkie-promise": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/read-pkg": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", - "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=", - "dev": true, - "dependencies": { - "load-json-file": "^1.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/read-pkg-up": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", - "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=", - "dev": true, - "dependencies": { - "find-up": "^1.0.0", - "read-pkg": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/get-pkg-repo/node_modules/redent": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-1.0.0.tgz", - "integrity": "sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94=", - "dev": true, - "dependencies": { - "indent-string": "^2.1.0", - "strip-indent": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/get-pkg-repo/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/get-pkg-repo/node_modules/strip-indent": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz", - "integrity": "sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI=", - "dev": true, - "dependencies": { - "get-stdin": "^4.0.1" - }, - "bin": { - "strip-indent": "cli.js" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-pkg-repo/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/get-pkg-repo/node_modules/trim-newlines": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-1.0.0.tgz", - "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/get-stdin": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", - "integrity": "sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/git-raw-commits": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-2.0.0.tgz", - "integrity": "sha512-w4jFEJFgKXMQJ0H0ikBk2S+4KP2VEjhCvLCNqbNRQC8BgGWgLKNCO7a9K9LI+TVT7Gfoloje502sEnctibffgg==", - "dev": true, - "dependencies": { - "dargs": "^4.0.1", - "lodash.template": "^4.0.2", - "meow": "^4.0.0", - "split2": "^2.0.0", - "through2": "^2.0.0" - }, - "bin": { - "git-raw-commits": "cli.js" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/git-raw-commits/node_modules/arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/git-raw-commits/node_modules/camelcase": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", - "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-raw-commits/node_modules/camelcase-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-4.2.0.tgz", - "integrity": "sha1-oqpfsa9oh1glnDLBQUJteJI7m3c=", - "dev": true, - "dependencies": { - "camelcase": "^4.1.0", - "map-obj": "^2.0.0", - "quick-lru": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-raw-commits/node_modules/indent-string": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", - "integrity": "sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-raw-commits/node_modules/map-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-2.0.0.tgz", - "integrity": "sha1-plzSkIepJZi4eRJXpSPgISIqwfk=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-raw-commits/node_modules/meow": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/meow/-/meow-4.0.1.tgz", - "integrity": "sha512-xcSBHD5Z86zaOc+781KrupuHAzeGXSLtiAOmBsiLDiPSaYSB6hdew2ng9EBAnZ62jagG9MHAOdxpDi/lWBFJ/A==", - "dev": true, - "dependencies": { - "camelcase-keys": "^4.0.0", - "decamelize-keys": "^1.0.0", - "loud-rejection": "^1.0.0", - "minimist": "^1.1.3", - "minimist-options": "^3.0.1", - "normalize-package-data": "^2.3.4", - "read-pkg-up": "^3.0.0", - "redent": "^2.0.0", - "trim-newlines": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-raw-commits/node_modules/minimist-options": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-3.0.2.tgz", - "integrity": "sha512-FyBrT/d0d4+uiZRbqznPXqw3IpZZG3gl3wKWiX784FycUKVwBt0uLBFkQrtE4tZOrgo78nZp2jnKz3L65T5LdQ==", - "dev": true, - "dependencies": { - "arrify": "^1.0.1", - "is-plain-obj": "^1.1.0" - }, - "engines": { - "node": ">= 4" - } - }, - "node_modules/git-raw-commits/node_modules/quick-lru": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-1.1.0.tgz", - "integrity": "sha1-Q2CxfGETatOAeDl/8RQW4Ybc+7g=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-raw-commits/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/git-raw-commits/node_modules/redent": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-2.0.0.tgz", - "integrity": "sha1-wbIAe0LVfrE4kHmzyDM2OdXhzKo=", - "dev": true, - "dependencies": { - "indent-string": "^3.0.0", - "strip-indent": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-raw-commits/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/git-raw-commits/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/git-raw-commits/node_modules/strip-indent": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-2.0.0.tgz", - "integrity": "sha1-XvjbKV0B5u1sv3qrlpmNeCJSe2g=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-raw-commits/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/git-raw-commits/node_modules/trim-newlines": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-2.0.0.tgz", - "integrity": "sha1-tAPQuRvlDDMd/EuC7s6yLD3hbSA=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-remote-origin-url": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/git-remote-origin-url/-/git-remote-origin-url-2.0.0.tgz", - "integrity": "sha1-UoJlna4hBxRaERJhEq0yFuxfpl8=", - "dev": true, - "dependencies": { - "gitconfiglocal": "^1.0.0", - "pify": "^2.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-semver-tags": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/git-semver-tags/-/git-semver-tags-3.0.1.tgz", - "integrity": "sha512-Hzd1MOHXouITfCasrpVJbRDg9uvW7LfABk3GQmXYZByerBDrfrEMP9HXpNT7RxAbieiocP6u+xq20DkvjwxnCA==", - "dev": true, - "dependencies": { - "meow": "^5.0.0", - "semver": "^6.0.0" - }, - "bin": { - "git-semver-tags": "cli.js" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/git-semver-tags/node_modules/arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/git-semver-tags/node_modules/camelcase": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", - "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-semver-tags/node_modules/camelcase-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-4.2.0.tgz", - "integrity": "sha1-oqpfsa9oh1glnDLBQUJteJI7m3c=", - "dev": true, - "dependencies": { - "camelcase": "^4.1.0", - "map-obj": "^2.0.0", - "quick-lru": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-semver-tags/node_modules/indent-string": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-3.2.0.tgz", - "integrity": "sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-semver-tags/node_modules/map-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-2.0.0.tgz", - "integrity": "sha1-plzSkIepJZi4eRJXpSPgISIqwfk=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-semver-tags/node_modules/meow": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/meow/-/meow-5.0.0.tgz", - "integrity": "sha512-CbTqYU17ABaLefO8vCU153ZZlprKYWDljcndKKDCFcYQITzWCXZAVk4QMFZPgvzrnUQ3uItnIE/LoUOwrT15Ig==", - "dev": true, - "dependencies": { - "camelcase-keys": "^4.0.0", - "decamelize-keys": "^1.0.0", - "loud-rejection": "^1.0.0", - "minimist-options": "^3.0.1", - "normalize-package-data": "^2.3.4", - "read-pkg-up": "^3.0.0", - "redent": "^2.0.0", - "trim-newlines": "^2.0.0", - "yargs-parser": "^10.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/git-semver-tags/node_modules/minimist-options": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-3.0.2.tgz", - "integrity": "sha512-FyBrT/d0d4+uiZRbqznPXqw3IpZZG3gl3wKWiX784FycUKVwBt0uLBFkQrtE4tZOrgo78nZp2jnKz3L65T5LdQ==", - "dev": true, - "dependencies": { - "arrify": "^1.0.1", - "is-plain-obj": "^1.1.0" - }, - "engines": { - "node": ">= 4" - } - }, - "node_modules/git-semver-tags/node_modules/quick-lru": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-1.1.0.tgz", - "integrity": "sha1-Q2CxfGETatOAeDl/8RQW4Ybc+7g=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-semver-tags/node_modules/redent": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-2.0.0.tgz", - "integrity": "sha1-wbIAe0LVfrE4kHmzyDM2OdXhzKo=", - "dev": true, - "dependencies": { - "indent-string": "^3.0.0", - "strip-indent": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-semver-tags/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/git-semver-tags/node_modules/strip-indent": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-2.0.0.tgz", - "integrity": "sha1-XvjbKV0B5u1sv3qrlpmNeCJSe2g=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-semver-tags/node_modules/trim-newlines": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-2.0.0.tgz", - "integrity": "sha1-tAPQuRvlDDMd/EuC7s6yLD3hbSA=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/git-semver-tags/node_modules/yargs-parser": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-10.1.0.tgz", - "integrity": "sha512-VCIyR1wJoEBZUqk5PA+oOBF6ypbwh5aNB3I50guxAL/quggdfs4TtNHQrSazFA3fYZ+tEqfs0zIGlv0c/rgjbQ==", - "dev": true, - "dependencies": { - "camelcase": "^4.1.0" - } - }, - "node_modules/gitconfiglocal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gitconfiglocal/-/gitconfiglocal-1.0.0.tgz", - "integrity": "sha1-QdBF84UaXqiPA/JMocYXgRRGS5s=", - "dev": true, - "dependencies": { - "ini": "^1.3.2" - } - }, - "node_modules/glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.4", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz", - "integrity": "sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==", - "dev": true - }, - "node_modules/handlebars": { - "version": "4.7.6", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.6.tgz", - "integrity": "sha512-1f2BACcBfiwAfStCKZNrUCgqNZkGsAT7UM3kkYtXuLo0KnaVfjKOyf7PRzB6++aK9STyT1Pd2ZCPe3EGOXleXA==", - "dev": true, - "dependencies": { - "minimist": "^1.2.5", - "neo-async": "^2.6.0", - "source-map": "^0.6.1", - "wordwrap": "^1.0.0" - }, - "bin": { - "handlebars": "bin/handlebars" - }, - "engines": { - "node": ">=0.4.7" - }, - "optionalDependencies": { - "uglify-js": "^3.1.4" - } - }, - "node_modules/hard-rejection": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", - "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/hosted-git-info": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", - "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", - "dev": true - }, - "node_modules/indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dev": true, - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "node_modules/ini": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", - "deprecated": "Please update to ini >=1.3.6 to avoid a prototype pollution issue", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/interpret": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", - "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", - "dev": true, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", - "dev": true - }, - "node_modules/is-finite": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-finite/-/is-finite-1.1.0.tgz", - "integrity": "sha512-cdyMtqX/BOqqNBBiKlIVkytNHm49MtMlYyn1zxzvJKWmFMlGzm+ry5BBfYyeY9YmNKbRSo/o7OX9w9ale0wg3w==", - "dev": true, - "engines": { - "node": ">=0.10.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", - "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-plain-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-text-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-text-path/-/is-text-path-1.0.1.tgz", - "integrity": "sha1-Thqg+1G/vLPpJogAE5cgLBd1tm4=", - "dev": true, - "dependencies": { - "text-extensions": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-utf8": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", - "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", - "dev": true - }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", - "dev": true - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true - }, - "node_modules/json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", - "dev": true - }, - "node_modules/json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", - "dev": true - }, - "node_modules/jsonparse": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", - "integrity": "sha1-P02uSpH6wxX3EGL4UhzCOfE2YoA=", - "dev": true, - "engines": [ - "node >= 0.2.0" - ] - }, - "node_modules/JSONStream": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", - "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", - "dev": true, - "dependencies": { - "jsonparse": "^1.2.0", - "through": ">=2.2.7 <3" - }, - "bin": { - "JSONStream": "bin.js" - }, - "engines": { - "node": "*" - } - }, - "node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/lines-and-columns": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", - "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=", - "dev": true - }, - "node_modules/load-json-file": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", - "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0", - "strip-bom": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/load-json-file/node_modules/parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", - "dev": true, - "dependencies": { - "error-ex": "^1.2.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "dev": true, - "dependencies": { - "p-locate": "^4.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", - "dev": true - }, - "node_modules/lodash._reinterpolate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz", - "integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=", - "dev": true - }, - "node_modules/lodash.ismatch": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", - "integrity": "sha1-dWy1FQyjum8RCFp4hJZF8Yj4Xzc=", - "dev": true - }, - "node_modules/lodash.template": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz", - "integrity": "sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A==", - "dev": true, - "dependencies": { - "lodash._reinterpolate": "^3.0.0", - "lodash.templatesettings": "^4.0.0" - } - }, - "node_modules/lodash.templatesettings": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz", - "integrity": "sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ==", - "dev": true, - "dependencies": { - "lodash._reinterpolate": "^3.0.0" - } - }, - "node_modules/loud-rejection": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz", - "integrity": "sha1-W0b4AUft7leIcPCG0Eghz5mOVR8=", - "dev": true, - "dependencies": { - "currently-unhandled": "^0.4.1", - "signal-exit": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/map-obj": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", - "integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/meow": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/meow/-/meow-7.0.1.tgz", - "integrity": "sha512-tBKIQqVrAHqwit0vfuFPY3LlzJYkEOFyKa3bPgxzNl6q/RtN8KQ+ALYEASYuFayzSAsjlhXj/JZ10rH85Q6TUw==", - "dev": true, - "dependencies": { - "@types/minimist": "^1.2.0", - "arrify": "^2.0.1", - "camelcase": "^6.0.0", - "camelcase-keys": "^6.2.2", - "decamelize-keys": "^1.1.0", - "hard-rejection": "^2.1.0", - "minimist-options": "^4.0.2", - "normalize-package-data": "^2.5.0", - "read-pkg-up": "^7.0.1", - "redent": "^3.0.0", - "trim-newlines": "^3.0.0", - "type-fest": "^0.13.1", - "yargs-parser": "^18.1.3" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/meow/node_modules/read-pkg": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", - "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", - "dev": true, - "dependencies": { - "@types/normalize-package-data": "^2.4.0", - "normalize-package-data": "^2.5.0", - "parse-json": "^5.0.0", - "type-fest": "^0.6.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/meow/node_modules/read-pkg-up": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", - "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", - "dev": true, - "dependencies": { - "find-up": "^4.1.0", - "read-pkg": "^5.2.0", - "type-fest": "^0.8.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/meow/node_modules/read-pkg-up/node_modules/type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/meow/node_modules/read-pkg/node_modules/type-fest": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", - "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/min-indent": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", - "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", - "dev": true - }, - "node_modules/minimist-options": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", - "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", - "dev": true, - "dependencies": { - "arrify": "^1.0.1", - "is-plain-obj": "^1.1.0", - "kind-of": "^6.0.3" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/minimist-options/node_modules/arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/modify-values": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/modify-values/-/modify-values-1.0.1.tgz", - "integrity": "sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/neo-async": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.1.tgz", - "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw==", - "dev": true - }, - "node_modules/normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "dependencies": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "node_modules/normalize-package-data/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/null-check": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/null-check/-/null-check-1.0.0.tgz", - "integrity": "sha1-l33/1xdgErnsMNKjnbXPcqBDnt0=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "dev": true, - "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/parse-github-repo-url": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/parse-github-repo-url/-/parse-github-repo-url-1.4.1.tgz", - "integrity": "sha1-nn2LslKmy2ukJZUGC3v23z28H1A=", - "dev": true - }, - "node_modules/parse-json": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.0.0.tgz", - "integrity": "sha512-OOY5b7PAEFV0E2Fir1KOkxchnZNCdowAJgQ5NuxjpBKTRP3pQhwkrkxqQjeoKJ+fO7bCpmIZaogI4eZGDMEGOw==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", - "dev": true - }, - "node_modules/path-type": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", - "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.2", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pinkie": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", - "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pinkie-promise": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", - "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", - "dev": true, - "dependencies": { - "pinkie": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true - }, - "node_modules/q": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", - "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=", - "deprecated": "You or someone you depend on is using Q, the JavaScript Promise library that gave JavaScript developers strong feelings about promises. They can almost certainly migrate to the native JavaScript promise now. Thank you literally everyone for joining me in this bet against the odds. Be excellent to each other.\n\n(For a CapTP with native promises, see @endo/eventual-send and @endo/captp)", - "dev": true, - "engines": { - "node": ">=0.6.0", - "teleport": ">=0.2.0" - } - }, - "node_modules/quick-lru": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", - "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "dev": true, - "dependencies": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", - "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=", - "dev": true, - "dependencies": { - "find-up": "^2.0.0", - "read-pkg": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up/node_modules/find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", - "dev": true, - "dependencies": { - "locate-path": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up/node_modules/locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", - "dev": true, - "dependencies": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up/node_modules/p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "dev": true, - "dependencies": { - "p-try": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up/node_modules/p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", - "dev": true, - "dependencies": { - "p-limit": "^1.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up/node_modules/p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg-up/node_modules/path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg/node_modules/load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg/node_modules/parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "dependencies": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg/node_modules/path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "dependencies": { - "pify": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg/node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg/node_modules/strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/rechoir": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", - "integrity": "sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=", - "dev": true, - "dependencies": { - "resolve": "^1.1.6" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/redent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", - "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", - "dev": true, - "dependencies": { - "indent-string": "^4.0.0", - "strip-indent": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/repeating": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/repeating/-/repeating-2.0.1.tgz", - "integrity": "sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo=", - "dev": true, - "dependencies": { - "is-finite": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true - }, - "node_modules/resolve": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", - "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", - "dev": true, - "dependencies": { - "path-parse": "^1.0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/semver": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.1.1.tgz", - "integrity": "sha512-WfuG+fl6eh3eZ2qAf6goB7nhiCd7NPXhmyFxigB/TOkQyeLP8w8GsVehvtGNtnNmyboz4TgeK40B1Kbql/8c5A==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", - "dev": true - }, - "node_modules/shelljs": { - "version": "0.8.4", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.4.tgz", - "integrity": "sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ==", - "dev": true, - "dependencies": { - "glob": "^7.0.0", - "interpret": "^1.0.0", - "rechoir": "^0.6.2" - }, - "bin": { - "shjs": "bin/shjs" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", - "dev": true - }, - "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/spdx-correct": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", - "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", - "dev": true, - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", - "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", - "dev": true - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "dev": true, - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", - "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", - "dev": true - }, - "node_modules/split": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", - "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", - "dev": true, - "dependencies": { - "through": "2" - }, - "engines": { - "node": "*" - } - }, - "node_modules/split2": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/split2/-/split2-2.2.0.tgz", - "integrity": "sha512-RAb22TG39LhI31MbreBgIuKiIKhVsawfTgEGqKHTK87aG+ul/PB8Sqoi3I7kVdRWiCfrKxK3uo4/YUkpNvhPbw==", - "dev": true, - "dependencies": { - "through2": "^2.0.2" - } - }, - "node_modules/split2/node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/split2/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/split2/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/split2/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/standard-version": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/standard-version/-/standard-version-8.0.0.tgz", - "integrity": "sha512-cS/U9yhYPHfyokFce6e/H3U8MaKwZKSGzH25J776sChrae/doDQjsl3vCQ0hW1MSzdrUTb7pir4ApjnbDt/TAg==", - "deprecated": "standard-version is deprecated. If you're a GitHub user, I recommend https://github.com/googleapis/release-please as an alternative.", - "dev": true, - "dependencies": { - "chalk": "2.4.2", - "conventional-changelog": "3.1.18", - "conventional-changelog-config-spec": "2.1.0", - "conventional-changelog-conventionalcommits": "4.2.3", - "conventional-recommended-bump": "6.0.5", - "detect-indent": "6.0.0", - "detect-newline": "3.1.0", - "dotgitignore": "2.1.0", - "figures": "3.1.0", - "find-up": "4.1.0", - "fs-access": "1.0.1", - "git-semver-tags": "3.0.1", - "semver": "7.1.1", - "stringify-package": "1.0.1", - "yargs": "15.3.1" - }, - "bin": { - "standard-version": "bin/cli.js" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string-width": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", - "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", - "dev": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/stringify-package": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/stringify-package/-/stringify-package-1.0.1.tgz", - "integrity": "sha512-sa4DUQsYciMP1xhKWGuFM04fB0LG/9DlluZoSVywUMRNvzid6XucHK0/90xGxRoHrAaROrcHK1aPKaijCtSrhg==", - "deprecated": "This module is not used anymore, and has been replaced by @npmcli/package-json", - "dev": true - }, - "node_modules/strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-bom": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", - "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", - "dev": true, - "dependencies": { - "is-utf8": "^0.2.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/strip-indent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", - "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", - "dev": true, - "dependencies": { - "min-indent": "^1.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/text-extensions": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-1.9.0.tgz", - "integrity": "sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==", - "dev": true, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/through": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", - "dev": true - }, - "node_modules/through2": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz", - "integrity": "sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==", - "dev": true, - "dependencies": { - "readable-stream": "2 || 3" - } - }, - "node_modules/trim-newlines": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.0.tgz", - "integrity": "sha512-C4+gOpvmxaSMKuEf9Qc134F1ZuOHVXKRbtEflf4NTtuuJDEIJ9p5PXsalL8SkeRw+qit1Mo+yuvMPAKwWg/1hA==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/trim-off-newlines": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/trim-off-newlines/-/trim-off-newlines-1.0.1.tgz", - "integrity": "sha1-n5up2e+odkw4dpi8v+sshI8RrbM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/type-fest": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz", - "integrity": "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/typedarray": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", - "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", - "dev": true - }, - "node_modules/uglify-js": { - "version": "3.9.4", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.9.4.tgz", - "integrity": "sha512-8RZBJq5smLOa7KslsNsVcSH+KOXf1uDU8yqLeNuVKwmT0T3FA0ZoXlinQfRad7SDcbZZRZE4ov+2v71EnxNyCA==", - "dev": true, - "optional": true, - "dependencies": { - "commander": "~2.20.3" - }, - "bin": { - "uglifyjs": "bin/uglifyjs" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", - "dev": true - }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "node_modules/which-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", - "dev": true - }, - "node_modules/wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", - "dev": true - }, - "node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", - "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", - "dev": true, - "dependencies": { - "@types/color-name": "^1.1.1", - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/wrap-ansi/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, - "node_modules/xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "dev": true, - "engines": { - "node": ">=0.4" - } - }, - "node_modules/y18n": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", - "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", - "dev": true - }, - "node_modules/yargs": { - "version": "15.3.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.3.1.tgz", - "integrity": "sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==", - "dev": true, - "dependencies": { - "cliui": "^6.0.0", - "decamelize": "^1.2.0", - "find-up": "^4.1.0", - "get-caller-file": "^2.0.1", - "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^4.2.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^18.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/yargs-parser": { - "version": "18.1.3", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", - "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", - "dev": true, - "dependencies": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/yargs-parser/node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, - "engines": { - "node": ">=6" - } - } - } -} diff --git a/package.json b/package.json deleted file mode 100644 index df6e7ab2a67..00000000000 --- a/package.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "name": "kubeflow-pipelines", - "private": true, - "description": "kubeflow pipelines package that only helps with release tooling", - "scripts": { - "changelog": "standard-version -t ''" - }, - "standard-version": { - "header": "# Changelog\n", - "types": [ - { - "type": "feat", - "section": "Features" - }, - { - "type": "fix", - "section": "Bug Fixes" - }, - { - "type": "perf", - "section": "Performance" - }, - { - "type": "", - "section": "Other Pull Requests" - }, - { - "type": "chore", - "hidden": true - }, - { - "type": "docs", - "hidden": true - }, - { - "type": "style", - "hidden": true - }, - { - "type": "refactor", - "hidden": true - }, - { - "type": "test", - "hidden": true - } - ], - "issuePrefixes": [ - "never-match-an-issue" - ], - "skip": { - "bump": true, - "tag": true, - "commit": true - }, - "packageFiles": [ - { - "filename": "./VERSION", - "type": "plain-text" - } - ] - }, - "author": "google", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "https://github.com/kubeflow/pipelines.git" - }, - "devDependencies": { - "standard-version": "^8.0.0" - } -} diff --git a/proposals/11551-kubernetes-native-api/TestPlan.md b/proposals/11551-kubernetes-native-api/TestPlan.md new file mode 100644 index 00000000000..bc9e7ef0529 --- /dev/null +++ b/proposals/11551-kubernetes-native-api/TestPlan.md @@ -0,0 +1,54 @@ +## Feature Summary +Please refer [this](https://github.com/kubeflow/pipelines/blob/master/proposals/11551-kubernetes-native-api/README.md) for more details + +## Test Sections +### Migration of existing pipelines +#### With the migration script +Create few pipelines in DB mode and migrate it via the migration script + +| **Test Case** | **Test Steps** | **Expected Result** | +|--------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| (KFP DB mode) Create a pipeline and a pipeline version and run migrate script | | All (except ID) Pipeline and pipeline version fields under spec should match exactly where original ID is stored in a field called `pipelines.kubeflow.org/original-id` | +| (KFP DB mode) Create a pipeline and 2 pipeline versions with same spec and run migrate script | | All (except ID) Pipeline and pipeline versions fields under spec should match exactly where original ID is stored in a field called `pipelines.kubeflow.org/original-id` | +| (KFP DB mode) Create a pipeline and 2 pipeline versions with different specs and run migrate script | | All (except ID) Pipeline and pipeline versions fields under spec should match exactly where original ID is stored in a field called `pipelines.kubeflow.org/original-id` | +| (KFP DB mode) Create 2 pipelines and 1 pipeline version per pipeline spec and run migrate script | | All (except ID) Pipeline and pipeline versions fields under spec should match exactly where original ID is stored in a field called `pipelines.kubeflow.org/original-id` | +| (KFP DB mode) Create 2 pipeline and 2 pipeline versions with different specs and run migrate script | | All (except ID) Pipeline and pipeline versions fields under spec should match exactly where original ID is stored in a field called `pipelines.kubeflow.org/original-id` | +| (KFP DB mode) Create all pipelines and 1 pipeline version per pipeline spec under "data/pipeline_files/valid" and run migrate script | Run [Pipeline Upload API Tests](https://github.com/nsingla/kfp_pipelines/tree/pipeline_run_tests) without clean up and run migration | Confirm that the migration script does not error out and is able to migrate all pipelines and their versions | +| (K8s Mode) Create a pipeline in DB mode and switch to k8s mode and try creating the same pipeline without migration | | New pipeline with the same name will be created, but what happens to existing runs associated with it? and what happens when we run the migration script? | +| (K8s Mode) Create a pipeline run in an experiment in DB mode and while the run is in progress switch to k8s mode | | The run continue after migration | +| (K8s Mode) Create a pipeline recurring run in an experiment in DB mode and switch to k8s mode | | The run should still exist with the same cron settings | +| Create a pipeline run associated with an experiment in the **DB Mode**, migrate it and then rerun the run | | Pipeline run should be allowed and the experiment association (to the specific experiment) should stay inplace | +| --------------After migration-------------------- | +| (K8s Mode) Try creating an existing pipeline and pipeline verison | | Pipeline should not get created with a duplicate name | +| (K8s Mode) Run an existing pipeline and get run details and match | | | +| (K8s Mode) Run all migrated pipeline and match the run details | | | +| (K8s Mode) Create an experiment and a run | | | + +### New Pipelines +Create pipeline and pipeline versions and run pipelines with default and custom experiment + +| **Test Case** | **Test Steps** | **Expected Result** | +|--------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------| +| (K8s mode) Create pipeline (via CR) and a pipeline version using CRs | Match the pipeline and pipeline version | Pipeline and Pipeline version creation should be successful | +| (K8s mode) Create pipeline (via CR) and 2 pipeline versions with same spec using CRs | Match the pipeline and pipeline versions | Pipeline and Pipeline versions creation should be successful | +| (K8s mode) Create pipeline (via CR) and 2 pipeline versions with different specs using CRs | Match the pipelines and pipeline versions | Pipeline and Pipeline versions creation should be successful | +| (K8s mode) Create pipeline and a pipeline version (via CR) with a different **pipelineName** than the one in pipelineSpecs | | Should this be allowed? Will webhooks throw a validation error | +| (K8s mode) Create pipeline run (via API) of the pipeline created via CR | Get Run details and validate | Run should be successful | +| (K8s mode) Create pipeline (via API) and a pipeline version (via API) | Match the pipeline and pipeline version | Pipeline and Pipeline version creation should be successful | +| (K8s mode) Create pipeline run (via API) of the pipeline created via API in the above test | Get Run details and validate | Run should be successful | +| (K8s mode) Create an experiment and a pipeline run (via API) | Get Experiment and Run details and validate | Run should be associated to the correct experiment and should succeed | +| (K8s mode) Create pipeline (via UI) and a pipeline version using CRs | Match the pipeline and pipeline version | Pipeline and Pipeline version creation should be successful | +| (K8s mode) Create pipeline (via UI) and 2 pipeline versions with same spec using CRs | Match the pipeline and pipeline versions | Pipeline and Pipeline versions creation should be successful | +| (K8s mode) Create pipeline (via UI) and 2 pipeline versions with different specs using CRs | Match the pipelines and pipeline versions | Pipeline and Pipeline versions creation should be successful | +| (K8s mode) Create pipeline run (via UI) | Get Run details and validate | Run should be successful | +| (K8s mode) Create an experiment and a pipeline run (via UI) | Get Experiment and Run details and validate | Run should be associated to the correct experiment and should succeed | +| (KFP DB mode) Create all pipelines and 1 pipeline version per pipeline spec under "data/pipeline_files/valid" and run migrate script | Use pipelines defined in [Pipeline Upload API Tests](https://github.com/nsingla/kfp_pipelines/tree/pipeline_run_tests) | Confirm that you are able to create all pipelines and pipeline versions | + +### Cluster Config (in Kubeflow Mode) +| **Test Case** | **Test Steps** | **Expected Result** | +|-----------------------------------------------------------------------------------|-------------------------------------------------------|----------------------------------------------------------------------------------------------------| +| (K8s mode) Create pipeline and a pipeline version using CRs | Match the pipeline and pipeline version (DB vs K8s) | Pipeline and Pipeline version creation should be successful | +| (K8s mode) Create pipeline and 2 pipeline versions with same spec using CRs | Match the pipeline and pipeline versions (DB vs K8s) | Pipeline and Pipeline versions creation should be successful | +| (K8s mode) Create pipeline and 2 pipeline versions with different specs using CRs | Match the pipelines and pipeline versions (DB vs K8s) | Pipeline and Pipeline versions creation should be successful | +| (K8s mode) Create pipeline run | Get Run details and validate | Pipeline run should succeed | +| (K8s mode) Create an experiment and a pipeline run | Get Experiment and Run details and validate | Pipeline run should be correctly associated to the created experiment and run should be successful | \ No newline at end of file diff --git a/proposals/11875-pipeline-workspace/README.md b/proposals/11875-pipeline-workspace/README.md index 1c355aabc92..e4f856ddc40 100644 --- a/proposals/11875-pipeline-workspace/README.md +++ b/proposals/11875-pipeline-workspace/README.md @@ -509,7 +509,7 @@ kind: Workflow spec: volumeClaimTemplates: - metadata: - name: kfp-workspace-46f1d52e-c72b-42fc-88ae-789edf7c33fd # The suffix is the run ID + name: kfp-workspace spec: accessModes: - ReadWriteMany @@ -564,8 +564,9 @@ When an input artifact is from a workspace, an input parameter is `dsl.WORKSPACE parameter is a component output result that is a path to the workspace (e.g. starts with `/kfp-workspace`), the driver should set the volume mount of the KFP workspace in the `Pod` spec patch. -Lastly, the Driver should disallow user mounted volumes in or under `/kfp-workspace` as this could lead to confusing -behavior. +Lastly, the Driver should disallow user mounted volumes in or under `/kfp-workspace` as this could lead to confusing behavior. The validation should check for: +1. Volume mount paths that conflict with the workspace mount path (`/kfp-workspace`) +2. Volume mount names that conflict with the workspace volume name (`kfp-workspace`) #### Launcher diff --git a/proposals/12020-model-registry-integration/README.md b/proposals/12020-model-registry-integration/README.md new file mode 100644 index 00000000000..eeec1c26803 --- /dev/null +++ b/proposals/12020-model-registry-integration/README.md @@ -0,0 +1,399 @@ +# KEP-12020: Model Registry Integration for Kubeflow Pipelines + + +- [Summary](#summary) +- [Motivation](#motivation) + - [Current State and Limitations](#current-state-and-limitations) + - [Integration Benefits](#integration-benefits) + - [Current Workarounds](#current-workarounds) + - [Goals](#goals) + - [Non-Goals](#non-goals) +- [Proposal](#proposal) + - [SDK User Experience](#sdk-user-experience) + - [API Reference](#api-reference) + - [Backend Translation](#backend-translation) + - [Cross-Reference Metadata](#cross-reference-metadata) + - [Configuration Management](#configuration-management) + - [User Stories](#user-stories) + - [Story 1](#story-1) + - [Story 2](#story-2) + - [Risks and Mitigations](#risks-and-mitigations) +- [Design Details](#design-details) + - [Architecture Overview](#architecture-overview) + - [Implementation Notes](#implementation-notes) + - [Metadata Handling](#metadata-handling) + - [Launcher Processing](#launcher-processing) + - [API Design](#api-design) + - [Model Registry Request Structure](#model-registry-request-structure) + - [Security Considerations](#security-considerations) +- [Test Plan](#test-plan) + - [Unit Tests](#unit-tests) + - [Configuration Tests](#configuration-tests) + - [SDK Tests](#sdk-tests) + - [Integration Tests](#integration-tests) + - [Successful Scenarios](#successful-scenarios) + - [Error Scenarios](#error-scenarios) + - [Graduation Criteria](#graduation-criteria) +- [Implementation History](#implementation-history) +- [Drawbacks](#drawbacks) +- [Alternatives](#alternatives) + - [Alternative 1: Direct SDK Integration](#alternative-1-direct-sdk-integration) + + +## Summary + +The [Kubeflow Model Registry](https://www.kubeflow.org/docs/components/model-registry/) serves as the centralized +metadata store for the Kubeflow ecosystem, providing comprehensive model cataloging, versioning, and discovery +capabilities. This proposal introduces seamless integration between Kubeflow Pipelines (KFP) and Model Registry, +enabling automated model registration as part of pipeline execution workflows. + +The integration abstracts away connection details, authentication mechanisms, and Model Registry-specific APIs, +providing data scientists with a simple interface for model registration through the KFP SDK. This enhancement bridges +the gap between KFP's artifact storage and Model Registry's cataloging capabilities, creating a unified model management +experience. + +## Motivation + +### Current State and Limitations + +Kubeflow Pipelines currently maintains its own artifact store for pipeline outputs, which provides basic storage +functionality but lacks advanced cataloging and versioning features. Models can only be added to this store through: + +1. **Pipeline execution outputs** (e.g., `dsl.Output[dsl.Model]`) +2. **Importer components** for external model registration + +This approach has several limitations: + +- **No centralized cataloging**: Models are scattered across pipeline runs without unified discovery +- **Limited versioning**: No structured version management or lineage tracking +- **Tool-specific isolation**: Models created in KFP are not discoverable by other Kubeflow components +- **Manual registration overhead**: Users must manually register models outside of pipeline workflows + +### Integration Benefits + +By enabling seamless Model Registry integration, users will experience: + +- **Centralized model catalog**: Single source of truth for all models across the Kubeflow ecosystem +- **Versioning**: Structured version management with lineage tracking +- **Cross-tool discovery**: Models become discoverable by other Kubeflow components (e.g. KServe) +- **Simplified workflows**: One-line model registration within pipeline components +- **Enhanced governance**: Better model lifecycle management and compliance tracking + +### Current Workarounds + +Without this integration, users must implement complex workarounds: + +1. Add Model Registry infrastructure details in the pipeline (e.g. URLs) through hardcoded values, pipeline input + parameters, or mounting a Kubernetes `ConfigMap`/`Secret`. +1. Mount a Kubernetes `Secret` to access the token. +1. Know the KFP standards for registering models in Model Registry (e.g. `model_source_kind="kfp"`). + +### Goals + +1. **Seamless SDK Integration**: Provide a simple, API for model registration within KFP components +2. **Infrastructure Abstraction**: Hide Model Registry connection details and authentication from user code +3. **Standardized Metadata**: Automatically populate KFP-specific metadata for proper model lineage tracking +4. **Error Resilience**: Provide configurable error handling to prevent pipeline failures due to registration issues +5. **Multi-tenancy Support**: Enable namespace-specific configuration for isolated deployments + +### Non-Goals + +1. Implement KFP-specific RBAC controls over Model Registry APIs, such as model allowlists for version registration. + +## Proposal + +### SDK User Experience + +The proposed integration introduces a `register()` method on KFP Model artifacts, providing a clean interface for model +registration: + +```python +@dsl.component() +def train_model( + model: dsl.Output[dsl.Model], +): + # Training logic + with open(model.path, "r") as model_file: + print("Training the model...") + + # Set model metadata + model.name = "my-model-v1.0.0" + model.metadata["training_epochs"] = 100 + model.metadata["accuracy"] = 0.95 + + # Register model with Model Registry + model.register( + model_name="sentiment-classifier", + description="BERT-based sentiment classification model", + model_format_name="vLLM", + model_format_version=None, + owner="ml-team", + author="data-scientist@company.com", + continue_on_error=True, # Default: True + ) +``` + +#### API Reference + +The `model.register()` method accepts the following parameters: + +| Parameter | Type | Required | Default | Description | +| ---------------------- | ---- | -------- | -------------------- | ---------------------------------------------------- | +| `model_name` | str | Yes | - | Name of the model in Model Registry | +| `description` | str | No | "" | Human-readable model description | +| `model_format_name` | str | No | None | Model format (e.g., "PyTorch", "TensorFlow", "ONNX") | +| `model_format_version` | str | No | None | Version of the model format | +| `owner` | str | No | "Kubeflow Pipelines" | Model owner/team | +| `author` | str | No | "Kubeflow Pipelines" | Model author | +| `continue_on_error` | bool | No | True | Whether to fail pipeline on registration error | + +#### Backend Translation + +The SDK call translates to the following Model Registry API invocation: + +```python +# Equivalent Model Registry client call +registered_model = registry.register_model( + name="sentiment-classifier", + description="BERT-based sentiment classification model", + owner="ml-team", + author="data-scientist@company.com", + version="my-model-v1.0.0", # model.name + uri="s3://kfp-artifacts/run-123/model", # model.uri + metadata={ + "training_epochs": 100, + "accuracy": 0.95 + }, # model.metadata + model_format_name="vLLM", + model_format_version=None, + model_source_id="b6a9dde3-1647-463f-aeb8-5800089c84e8", # Pipeline run ID + model_source_name="sentiment-training-pipeline", # Pipeline run name + model_source_class="pipelinerun", # KFP-specific identifier + model_source_kind="kfp", # KFP-specific identifier + model_source_group="ml-team", # Pipeline namespace +) +``` + +#### Cross-Reference Metadata + +After successful registration, KFP adds metadata to the model artifact for UI cross-referencing. This is a list/array +since multiple pipeline runs could register the same model. + +```python +model.metadata["registered_models"] = [{ + "modelName": "sentiment-classifier", + "versionName": "my-model-v1.0.0", + "versionID": 42, + "modelID": 15, + "modelRegistryURL": "https://model-registry.example.com:8443/models/15/versions/42", +}] +``` + +The KFP UI should prominently display the model versions and link to the Model Registry UI when viewing the model's +details. + +### Configuration Management + +Extend the existing `kfp-launcher` ConfigMap to include Model Registry configuration: + +```yaml +apiVersion: v1 +kind: ConfigMap +metadata: + name: kfp-launcher + namespace: kubeflow +data: + # Existing configuration... + pipelineRoot: "s3://kfp-artifacts" + + # New Model Registry configuration + modelRegistry: | + url: https://model-registry.example.com:8443 + tokenSecretRef: # Follows the same key names for existing configurations + secretName: model-registry-auth + secretNamespace: model-registry-system # Defaults to current namespace + tokenKey: token + caConfigMapRef: # Optional TLS certificate + configMapName: model-registry-ca-bundle + configMapNamespace: model-registry-system + key: ca-bundle.crt + timeout: 30s # HTTP timeout for registration requests + retryAttempts: 3 # Number of retry attempts on failure +``` + +### User Stories + +#### Story 1 + +As a data scientist, I would like to register my model to Model Registry in a pipeline without knowing the underlying +Model Registry infrastructure and APIs, so that I can easily track model versions, share models with my team, and +maintain a centralized model catalog as part of my automated ML workflows. + +#### Story 2 + +As a data scientist, I would like to access a centralized model catalog that is independent of the specific tool used to +create the models, enabling easy discovery and simplified model management across different ML workflows. + +### Risks and Mitigations + +The Model Registry API doesn't have granular RBAC so the `pipeline-runner` service account has full access to the Model +Registry API. + +## Design Details + +### Architecture Overview + +``` +┌─────────────────┠┌──────────────────┠┌─────────────────┠+│ KFP Pipeline │ │ KFP Launcher │ │ Model Registry │ +│ │ │ │ │ │ +│ model.register()│───▶│ Extract metadata │───▶│ API Server │ +│ │ │ Register model │ │ │ +└─────────────────┘ └──────────────────┘ └─────────────────┘ + ▲ + │ + ┌───────────────────┠+ │ kfp-launcher │ + │ ConfigMap │ + │ │ + └───────────────────┘ +``` + +### Implementation Notes + +#### Metadata Handling + +The `model.register()` call sets a special metadata field that the launcher processes: + +```python +# SDK sets this metadata field +model.metadata["_kfp_model_registry_request"] = json.dumps({ + "model_name": "sentiment-classifier", + "description": "BERT-based sentiment classification model", + "model_format_name": "vLLM", + "model_format_version": None, + "owner": "ml-team", + "author": "data-scientist@company.com", + "continue_on_error": True +}) +``` + +#### Launcher Processing + +In `backend/src/v2/component/launcher_v2.go`, here is some sample code to illustrate the potential flow: + +```go +// Before uploadOutputArtifacts +var modelRegistryRequests []ModelRegistryRequest +for _, artifact := range outputArtifacts { + if request, exists := artifact.Metadata["_kfp_model_registry_request"]; exists { + var req ModelRegistryRequest + if err := json.Unmarshal([]byte(request), &req); err == nil { + modelRegistryRequests = append(modelRegistryRequests, req) + } + // Remove from metadata to avoid MLMD storage + delete(artifact.Metadata, "_kfp_model_registry_request") + } +} + +// After uploadOutputArtifacts +for _, req := range modelRegistryRequests { + if err := registerModelInRegistry(req, artifact); err != nil { + if !req.ContinueOnError { + return fmt.Errorf("model registration failed: %w", err) + } + log.Warnf("Model registration failed (continuing): %v", err) + } +} +``` + +### API Design + +#### Model Registry Request Structure + +```go +type ModelRegistryRequest struct { + ModelName string `json:"model_name"` + Description string `json:"description,omitempty"` + ModelFormatName string `json:"model_format_name,omitempty"` + ModelFormatVersion string `json:"model_format_version,omitempty"` + Owner string `json:"owner,omitempty"` + Author string `json:"author,omitempty"` + ContinueOnError bool `json:"continue_on_error,omitempty"` +} + +type ModelRegistryConfig struct { + URL string `json:"url"` + TokenSecretRef TokenSecretReference `json:"tokenSecretRef"` + CAConfigMapRef *CAConfigMapReference `json:"caConfigMapRef,omitempty"` + Timeout string `json:"timeout,omitempty"` + RetryAttempts int `json:"retryAttempts,omitempty"` + TLSVerify *bool `json:"tlsVerify,omitempty"` +} + +type TokenSecretReference struct { + SecretName string `json:"secretName"` + SecretNamespace string `json:"secretNamespace,omitempty"` + TokenKey string `json:"tokenKey"` +} +``` + +### Security Considerations + +1. **Token Management**: Authentication tokens stored in Kubernetes secrets +2. **Network Security**: TLS certificate validation for Model Registry connections +3. **Namespace Isolation**: Configuration scoped to individual namespaces +4. **Audit Logging**: All registration attempts are logged +5. **Input Validation**: Limits the fields that can be set by a user + +## Test Plan + +### Unit Tests + +#### Configuration Tests + +- Valid configuration parsing +- Invalid configuration error handling +- Default value application + +#### SDK Tests + +- `model.register()` method validation +- Parameter validation and defaults +- Metadata serialization +- Error handling in SDK + +### Integration Tests + +#### Successful Scenarios + +- Model registration with minimal configuration +- Model registration with full metadata + +#### Error Scenarios + +- Model Registry API unavailable +- Invalid authentication token +- Invalid model metadata +- Duplicate model version registration + +### Graduation Criteria + +N/A + +## Implementation History + +- Initial proposal: 2025-06-27 + +## Drawbacks + +1. **Configuration Overhead**: Requires per-namespace configuration, though this enables proper multi-tenancy +2. **Dependency on Model Registry**: Creates dependency on external Model Registry service availability +3. **API Coupling**: Tight coupling to Model Registry API version and structure + +## Alternatives + +### Alternative 1: Direct SDK Integration + +Instead of launcher-based registration, implement direct Model Registry client integration in the SDK. This would expose +infrastructure details to user code and require authentication handling in components. diff --git a/proposals/12147-mlmd-removal/README.md b/proposals/12147-mlmd-removal/README.md new file mode 100644 index 00000000000..dc59fc4c399 --- /dev/null +++ b/proposals/12147-mlmd-removal/README.md @@ -0,0 +1,62 @@ +# Kubeflow Pipelines ML-Metadata (MLMD) Removal Proposal + +This proposal outlines a comprehensive plan to remove ML-Metadata (MLMD) from Kubeflow Pipelines (KFP) v2 and replace its functionality with native KFP API server capabilities. The goal is to simplify the architecture, reduce dependencies, improve maintainability, and provide better integration with existing KFP infrastructure. + +## Motivation + +KFP v2 currently uses ML-Metadata (MLMD) to track pipeline executions, store artifacts and metadata, manage execution parameters and metrics, and provide lineage tracking and caching capabilities. + +However, MLMD introduces significant operational and architectural challenges: + +**Operational Complexity:** +- Additional services to deploy and maintain +- Complex KFP-MLMD integration points +- Difficult debugging and troubleshooting +- Lack of control over a key component of KFP + +**Technical Limitations:** +- Separate database schema misaligned with KFP's native structure +- Limited control over metadata storage and querying +- Lack of multi-tenancy support for artifacts +- Blocks MySQL upgrades beyond 8.x and PostgreSQL support +- MLMD project is in maintenance mode and no longer actively maintained, leading to stagnated development and bug fixes + +Removing MLMD and implementing native metadata management will eliminate these pain points while maintaining full functionality, resulting in a simpler, more maintainable system. + +## User Stories + +- As a Data Scientist, I want to track my pipeline executions and their states without relying on external services, so I can have a simpler and more reliable system. +- As an ML Engineer, I want to store and retrieve input/output artifacts and their metadata directly through KFP API, so I can have better control and visibility over my pipeline data. +- As a KFP Admin, I want to reduce the number of services I need to maintain, so I can improve system reliability and reduce operational overhead. +- As a KFP Admin, I want to ensure that artifact endpoints are properly secured and authenticated, so I can maintain data privacy and prevent unauthorized access. +- As a Developer, I want to have a simplified architecture with fewer integration points, so I can more easily debug and troubleshoot pipeline issues. + +## Risks and Mitigations + +1. Data Migration Risk + - Risk: Loss or corruption of existing MLMD data during migration + - Mitigation: Implement robust data migration tools with validation, and backups before migration +2. Feature Parity + - Risk: Missing or incomplete implementation of current MLMD functionality + - Mitigation: Comprehensive feature audit, thorough testing plan, and regression testing + +## Design Details + +The design details can be found [here](design-details.md). + +## Delivery Plan + +* Add the proto files, tables, and gorm model changes +* Add API Server Logic +* Start adding all _post_ server logic in Driver and Launcher (alongside MLMD) +* Update the UI to start reading from the API server, removing MLMD logic from frontend +* Update resolve/input logic to use Task details and remove all MLMD invocations from the backend +* Remove MLMD writer and associated CI +* Remove MLMD logic from Driver/Launcher, and add Migration logic +* Remove MLMD from manifests + +## Conclusion + +Removing MLMD from KFP will significantly simplify the architecture while maintaining full functionality. The proposed approach provides a clear migration path with minimal risk and substantial long-term benefits. The phased implementation ensures careful validation at each step while allowing for early feedback and course correction. + +This migration will result in a more maintainable, performant, and operationally simple KFP deployment that better serves the needs of ML practitioners and platform operators. diff --git a/proposals/12147-mlmd-removal/design-details.md b/proposals/12147-mlmd-removal/design-details.md new file mode 100644 index 00000000000..85a56ca4e50 --- /dev/null +++ b/proposals/12147-mlmd-removal/design-details.md @@ -0,0 +1,479 @@ +## Design Details + +### New KFP Database Schema + +See [schema_changes.sql] for the database schema additions and changes. + +Note that a `Task` is a db model for a task node type as viewed in the Run Graph of the UI. + +Note also that we will be dropping the `Task` table that exists today and recreating it. This is because it is rarely used within KFP, and where it is used, it is unnecessary (i.e. caching). This will require a migration strategy, addressed later in the proposal. + +[schema_changes.sql]: ./schema_changes.sql + +### KFP Server API + +The KFP APIServer will now handle Artifacts, DAGs, input resolution, and other responsibilities previously managed by MLMD. + +The Artifact service changes are detailed in [artifacts.proto]. The Driver and Launcher will introduce a +`v2beta1.ArtifactServiceClient` to interact with this API. + +For the Driver and Launcher, a `v2beta1.RunServiceClient` obtained via `NewRunServiceClient()` in `backend/api/v2beta1` +will replace the MLMD client. + +The additions to the RunService client are documented in [runs.proto]. + +An example of the updated run response format can be found in [runs.json]. + +[artifacts.proto]: ./protos/artifacts.proto +[runs.proto]: ./protos/runs.proto +[runs.json]: ./protos/runs.json + +### MLMD Client replacement + +This section explains how we will replace the MLMD client with new KFP Server API functionality in Driver/Launcher code. + +The key changes and replacements are outlined below. +```go +package metadata + +type DAG struct { + Execution *Execution +} +type Execution struct { + execution *pb.Execution + pipeline *Pipeline +} +// A pipeline context contains: create/update time, namespace, pipeline_root +// The pipelineCtx represents a Pipeline (not PipelineVersion) and is created once per pipeline +// This struct is primarily used for creating execution Associations and can most likely be discarded +type Pipeline struct { + pipelineCtx *pb.Context + pipelineRunCtx *pb.Context +} + +// These can be replaced with Artifacts from KFP artifacts.proto +type InputArtifact struct { + Artifact *pb.Artifact +} +type OutputArtifact struct { + Name string + Artifact *pb.Artifact + Schema string +} + +// GetPipeline actually gets or creates a context if it doesn't already exist for this pipeline and pipelineRun (one context each) +func (c *Client) GetPipeline(ctx context.Context, pipelineName, runID, namespace, runResource, pipelineRoot, storeSessionInfo string) (*Pipeline, error) +func (c *Client) GetDAG(ctx context.Context, executionID int64) (*DAG, error) +func (c *Client) PublishExecution(ctx context.Context, execution *Execution, outputParameters map[string]*structpb.Value, outputArtifacts []*OutputArtifact, state pb.Execution_State) error +func (c *Client) CreateExecution(ctx context.Context, pipeline *Pipeline, config *ExecutionConfig) (*Execution, error) +// Creates execution, updating it with pod and status info +func (c *Client) PrePublishExecution(ctx context.Context, execution *Execution, config *ExecutionConfig) (*Execution, error) +func (c *Client) UpdateDAGExecutionsState(ctx context.Context, dag *DAG, pipeline *Pipeline) error +func (c *Client) PutDAGExecutionState(ctx context.Context, executionID int64, state pb.Execution_State) error +func (c *Client) GetExecutions(ctx context.Context, ids []int64) ([]*pb.Execution, error) +func (c *Client) GetExecution(ctx context.Context, id int64) (*Execution, error) +func (c *Client) GetPipelineFromExecution(ctx context.Context, id int64) (*Pipeline, error) +func (c *Client) GetExecutionsInDAG(ctx context.Context, dag *DAG, pipeline *Pipeline, filter bool) (executionsMap map[string]*Execution, err error) + +func (c *Client) GetEventsByArtifactIDs(ctx context.Context, artifactIds []int64) ([]*pb.Event, error) +func (c *Client) GetArtifactName(ctx context.Context, artifactId int64) (string, error) // Not used +func (c *Client) GetArtifacts(ctx context.Context, ids []int64) ([]*pb.Artifact, error) +func (c *Client) GetOutputArtifactsByExecutionId(ctx context.Context, executionId int64) (map[string]*OutputArtifact, error) +func (c *Client) GetInputArtifactsByExecutionID(ctx context.Context, executionID int64) (inputs map[string]*pipelinespec.ArtifactList, err error) +func (c *Client) RecordArtifact(ctx context.Context, outputName, schema string, runtimeArtifact *pipelinespec.RuntimeArtifact, state pb.Artifact_State, bucketConfig *objectstore.Config) (*OutputArtifact, error) +func (c *Client) GetOrInsertArtifactType(ctx context.Context, schema string) (typeID int64, err error) +func (c *Client) FindMatchedArtifact(ctx context.Context, artifactToMatch *pb.Artifact, pipelineContextId int64) (matchedArtifact *pb.Artifact, err error) +``` + +These will be replaced by calls to v2beta1.RunService instead: + +```go +package run_client + +// Replaces GetPipeline, additionally we will need to pass experiment ID to the Driver/Launcher +// It also replaces GetPipelineFromExecution, since Tasks have a RunID +func (c *RunServerClient) GetRun(ctx, runID, experimentID) (*apiv2beta1.Run, error) +// Replaces GetDAG (filter on task type), GetExecutions, GetExecution +func (c *RunServerClient) GetTask(ctx, taskID) (*apiv2beta1.PipelineTaskDetail, error) // uses GetTask() in RunsAPI +func (c *RunServerClient) GetTasks(ctx, taskID) ([]*apiv2beta1.PipelineTaskDetail, error) // uses ListTasks() in RunsAPI + +// Replaces PublishExecution, CreateExecution, PrePublishExecution, UpdateDAGExecutionsState +func (c *RunServerClient) CreateTask(ctx context.Context, task apiv2beta1.PipelineTaskDetail) (*apiv2beta1.PipelineTaskDetail, error) +func (c *RunServerClient) UpdateTask(ctx context.Context, task apiv2beta1.PipelineTaskDetail) (*apiv2beta1.PipelineTaskDetail, error) + +// Replaces GetExecutionsInDAG +// Queries Run API's ListTasks() with run_id field +func (c *RunServerClient) GetChildTasks(ctx context.Context, task apiv2beta1.PipelineTaskDetail) (map[string]*apiv2beta1.PipelineTaskDetail, error) +``` + +In a similar manner, the v2beta1 ArtifactService can be used to implement the following: + +* `GetEventsByArtifactIDs` -> `GetArtifactTasks`, queries `ListArtifactTasks` +* `GetArtifacts` -> `ListArtifacts` +* `RecordArtifact` -> `CreateArtifact` +* `GetOutputArtifactsByExecutionId` -> `GetOutArtifactsByTaskID`, queries `ListArtifactTasks` and `ListArtifacts` +* `GetInputArtifactsByExecutionID` -> `GetInputArtifactsByTaskID`, queries `ListArtifactTasks` and `ListArtifacts` +* `GetOrInsertArtifactType` -> Use a combination of `GetArtifact` and `UpdateArtifact` +* `FindMatchedArtifact` -> Use `ListArtifacts` with `uri` filter + +### Driver changes + +Various portions of the Driver require adjustments to transition away from MLMD. The key change involves transitioning from creating Executions to creating Tasks. + +In the pipeline, the `ROOT_DAG` Driver currently passes an `execution_id` flag to subsequent drivers. This needs to be +updated to pass `parent_task_id` instead. Downstream Driver tasks will continue to propagate their corresponding tasks as `parent_task_id` as well. + +Multiple control flows involving execution creation and management exist. These are detailed in the sections below. + +#### Control Flows + +The KFP Driver component creates and manages different types of executions during pipeline execution. These executions follow two main patterns: + +**DagExecution** + +Manages pipeline control flow. Has two subtypes: + +- **RootDag** (runs once per pipeline) + - Creates the Pipeline and PipelineRun context in MLMD + - Stores pipeline runtime input information + +- **Dag** (runs for each task group) + - Handles conditional logic (Condition, ConditionBranch, Loop, LoopIteration) + - Resolves conditional expressions + - Processes inputs for task groups + - Calculates iteration counts for loops + +**ContainerExecution** + - Runs before every Launcher/executor pod + - Handles caching decisions and input resolution + - Generates the pod specification for the executor + - Stores cache fingerprint in the task store + - Download and Upload artifacts + +Each execution type has distinct responsibilities and interacts with MLMD differently based on its role in the pipeline workflow. + +##### ContainerExecution + +Container Drivers will now create a task of type `Runtime`. When creating the PodSpecPatch, the Driver will pass the `--task_id` instead of the `execution_id` flag. + +##### DagExecution — Loops + +Loops in KFP today require two types of dags, there is either a dag that has an `iteration_count` or a dag that has an `iteration_index`. +We'll refer to these as `Loop` and `LoopIteration` respectively. A `Loop` is a task grouping of components that will run within this loop. It tracks the total count of iterations via `iteration_count`. A `LoopIteration` is a dag that tracks the current iteration for a given loop via `iteration_index`. + +Each of these results in a `DagExecution`. Recall that, the components that run within a `LoopIteration` will continue to have their regular `Runtime` tasks. + +Each of these loop types is used to resolve inputs/outputs and will need to be logged as Tasks into the Tasks table. The Tasks will be logged as `Loop` and `LoopIteration` respectively, and leverage the `RunServer` and `ArtifactServer` for input resolution. + +##### DagExecution — Exit handler + +Any task under the `dsl.Exithandler` group falls within a Dag execution. These tasks will now be grouped under a task of type `Exithandler`. + +##### DagExecution — DSL If/Else/ElseIf + +When working with Conditions in KFP, new nodes are introduced in the Pipeline Graph; they are prefixed with `condition-` +or `condition-branches-`. + +1. **`Condition`** - Represents a conditional task group (i.e., per If/Else/ElseIf); in KFP it is represented by a DAG Driver that outputs a condition parameter which determines whether the underlying dag or components should execute. + +2. **`ConditionBranch`** - Represents the branches that stem from a conditional statement (i.e., an If/Else wrapper). + +Each of these results in a new dag execution. Instead of these executions, we will be switching to creating Tasks of types `ConditionBranch` and `Condition` respectively, and leverage the RunServer and ArtifactServer for input resolution. + +##### Caching + +###### Caching explained +To understand how caching should be handled in a post mlmd world, let's first review how caching in KFP works. + +Caching has two parts. The first being Cache Fingerprint creations that happen in the Launcher, and the second is detecting Cache hit detections, which happens in Container Drivers. + +1. At the end of Launcher `Execute()` procedure, there is a call to `l.clientManager.CacheClient().CreateExecutionCache(ctx, task)` which stores a `Task` with a `cache_fingerprint`. Underneath, this uses the `TaskServiceClient.CreateTaskV1` api, meaning this is execution data stored in the Task database table. + +1. When the Container Driver runs, it does the following: + +```go +if !opts.CacheDisabled { + fingerPrint, cachedMLMDExecutionID, err := getFingerPrintsAndID(execution, &opts, cacheClient) + if err != nil { + return execution, err + } + ecfg.CachedMLMDExecutionID = cachedMLMDExecutionID + ecfg.FingerPrint = fingerPrint +} +createdExecution, err := mlmd.CreateExecution(ctx, pipeline, ecfg) +``` + +The call to `getFingerPrintsAndID` makes a subsequent call to `TaskServiceClient.ListTasksV1` and fetches the execution ID for the Task with the fingerprint stored in the Launcher step. If such an execution ID is found, we assume there was a cache hit, and we don't run the next Launcher. + +Notice also that we store the `ecfg.FingerPrint = fingerPrint` in the MLMD execution as well, this means the container execution also has the `cache_fingerprint` found in the task table. + +###### Caching post mlmd removal + +Much of the logic flow will stay the same, but instead of calls to `TaskServiceClient`'s v1 API, the v2 `RunService` api will be used. + +When the Launcher finishes running `Execute()`, it `defers` an `UpdateDAGExecutionsState()` call, this can be replaced with the `UpdateTask` call using the v2 `RunServerClient`, providing the `cache_fingerprint` for this `Runtime` task. The fingerprint should only be provided upon a successful launcher execution. + +In the Driver, `getFingerPrintsAndID` will be updated to leverage `ListTasks` and its `filter` field to search by `cache_fingerprint` to detect a hit, much like how it uses `ListTasksV1` today. Note that unlike how the Driver works today, the `cache_fingerprint` should not be stored for an upcoming task that will be created in the Driver, it should instead be updated by the Launcher once an execution successfully completes. + +**Migration Note** + +Caching needs special consideration for migration. Since the `tasks` table will be dropped and re-populated using data from MLMD, caching will need to be handled carefully. When converting a `ContainerExecution` to a `Runtime` task, we will need to only store the fingerprint if the execution has a `COMPLETE` status. This avoids storing fingerprints that may be present in an execution, but where an executor pod never ran to success. + +### Launcher changes + +Like the Driver component, the Launcher will need to establish new client connections to access the Runs API via RunServerClient and the Artifacts API via ArtifactServiceClient. + +The following flags in the launcher will need to be removed: + +```text +--execution_id +--mlmd_server_address +--mlmd_server_port +``` + +And replaced with: + +```text +--task_id # note that unlike the Driver we call this `task_id` instead of `parent_task_id` +--kfp_server_address +--kfp_server_port +``` + +This is a good opportunity to also replace the endpoints used in `cacheDefaultEndpoint` to use this address/port value, instead of relying on hardcoded defaults. + +Other changes that will be required in Launcher are mentioned elsewhere in the proposal (see [Caching](#caching), and [Metrics](#metrics) sections). + +### Nested Pipelines + +There is no direct way to infer whether a Driver run is for a Nested execution, to accommodate this, there is a generic `DAG` task type provided to fit such cases. +Alternatively, we could provide an SDK update to declare a task type in a field on a `ComponentSpec` `dag` field. + +### StoreSessionInfo + +Currently, Artifact object storage credential info is stored as a custom property, and is called `store_session_info`. Storing such system level info as a custom property is an anti-pattern and should be avoided. In this effort, we will not port over this functionality, and we will instead remove the following from `root_dag.go`: + +```go +storeSessionInfo, err = cfg.GetStoreSessionInfo(pipelineRoot) +``` + +And use it directly in `launcher_v2.go`, replacing: + +```go +storeSessionInfo, err := objectstore.GetSessionInfoFromString(execution.GetPipeline().GetStoreSessionInfo()) +``` + +Removing this property from the Artifact will also require Frontend changes, the server will now need to also parse the Launcher config, instead of the client code sending this as part of the call to: + +```typescript + // Apis.ts + public static readFile({ + path, + providerInfo, + namespace, + peek, + }: { } +``` + +The server in [artifacts.tx] will instead need to build this object similar to how the root Driver does it in `cfg.GetStoreSessionInfo(pipelineRoot)`. + +[artifacts.tx]: https://github.com/kubeflow/pipelines/blob/2c91fb797ed5e95bb51ae80c4daa2c6b9334b51b/frontend/server/handlers/artifacts.ts#L102 + +### Metrics + +Metrics in KFP today are stored as Artifacts, they have the following Artifact Types: + +* system.Metrics - Regular Key and NumberValue pair +* system.ClassificationMetrics - Key and JSON pair +* system.SlicedClassificationMetrics - Key and JSON pair + +The values for these Metrics Artifacts are stored as `CustomProperties`; unlike other Artifacts, they are not stored in the object store. Therefore, it is questionable that they are treated as Artifacts to begin with. Instead of porting this behavior, we'll instead leverage the Metrics table in KFP which is currently unused. + +We will log the Metrics in this table when such artifact types are encountered in the Launcher. These can be addressed in `launcher_v2.go` when `uploadOutputArtifact` is called. During this invocation we can check for an artifacts type via:: + +```go + schemaTitle := runtimeArtifact.Type.GetSchemaTitle() + switch schemaTitle { + case "system.Metrics": // Handles Metric type, do something similar for ClassificationMetrics & SlicedClassificationMetrics + err := LogMetric(...) + ... + case "system.Artifact": + err := RecordArtifact() + ... +``` + +In the executor Input we can abstain from storing a URI since this does not apply to Metrics. + +The Python SDK will continue to interpret Metrics as artifacts, this maintains backwards compatibility. The Driver will need to ensure when it is creating the Artifacts list during the call to `resolveInputs -> resolveInputArtifact -> resolveUpstreamArtifacts() -> artifact.ToRuntimeArtifact()`, we are converting Metrics to output Artifacts. The updated pseudocode in `resolveUpstreamArtifacts` will be something like: + +```go +package driver + +func resolveUpstreamArtifacts(cfg resolveUpstreamOutputsConfig) (*pipelinespec.ArtifactList, error) { + for { + ... + } else { + // use the Component *pipelinespec.ComponentSpec.ComponentInputsSpec from Options in driver.go to determine + // artifact schema type, + schemaTitle := determineArtifactSchema(ComponentInputSpec, TaskSpec) + switch schemaTitle { + case "system.Metrics": // Handles Metric type, do something similar for ClassificationMetrics & SlicedClassificationMetrics + // GetOutputMetricsByTaskID can fetch the Task via GetTask (if we don't already have the task), + // and can parse the `output_metrics` to return map[string]*OutputArtifact or just the *OutputArtifact + outputs, err := GetOutputMetricsByTaskID(cfg.ctx, taskID) + case "system.Artifact": + outputs, err := GetOutArtifactsByTaskID(cfg.ctx, taskID) + } +} +``` + +### Frontend Changes + +There are three primary pages in the UI where MLMD encounters happen, the Run Details, Artifacts, and Executions pages. + +The Executions page can be removed entirely since all information will be present in the run task nodes in the run graph. The FrontEnd will need to be updated to ensure all relevant information will continue to be surfaced in a Task Node's details sidebar. + +On the Run Details page, MLMD data is fetched in `RuntimeNodeDetailsV2.tsx` via: + +```typescript +const context = await getKfpV2RunContext(runId); +const executions = await getExecutionsFromContext(context); +const artifacts = await getArtifactsFromContext(context); +const events = await getEventsByExecutions(executions); +``` + +These can be replaced by the following new implementations: + +```typescript +// context no longer needed, use the Run object which often readily available wherever context is required +const tasks = run.run_details.task_details; // run is a V2beta1Run +const artifacts = await fetchArtifactsFromTasks(tasks); // the information is now available in the task. +// a separate call for this may not needed, as the required info may already be present in `tasks` +const events = await getArtifactEventsByTasks(tasks); // uses ListArtifactEvents() +``` + +The `Visualization` Nav in `RuntimeNodeDetailsV2.tsx` will also need to be updated to take `Metrics` fetched from `Task` response object, instead of `Artifacts` from MLMD. + +The Artifact Node in the UI should also no longer display an `Artifact URI` for metrics, as this is not applicable. + +The `CompareV2.tsx` also makes various calls to MLMD, much like `RuntimeNodeDetailsV2`: + +```typescript + Promise.all( + runIds.map(async runId => { + const context = await getKfpV2RunContext(runId); + const executions = await getExecutionsFromContext(context); + const artifacts = await getArtifactsFromContext(context); + const events = await getEventsByExecutions(executions); + return { + executions, + artifacts, + events, + } as MlmdPackage; + }), + ) +``` + +This and underlying code will also need to be updated to leverage Tasks retrieved via the Runs API server. + +#### Run Reporting + +The Persistence Agent calls the KFP API Server's [report_server.go] for updating the Run metadata in the DB. This includes updates to the Task metadata in the DB as well. + +Because we are relying on Driver/Launcher to create/update tasks, we will no longer require Persistence Agent to report on task details, and we will need to get rid of this portion of the code. + +This is the key piece of code from `report-server.go`: + +```go +_, err = s.reportTasksFromExecution(newExecSpec, runId) +``` +[report_server.go]: ../../backend/src/apiserver/server/report_server.go + +### Task States + +We will follow the following method for handling Task states: + +RuntimeStates +* Tasks will always be in a subset of the [RuntimeStates](../../backend/api/v2beta1/run.proto) +* When the parent run is in a terminal state, don't allow updates to the underlying tasks associated with that run + * Terminal States are: `SUCCEEDED`, `FAILED`, `CANCELED` + +StorageStates +* Tasks don't need a storage state +* If a Run is deleted, all tasks should be deleted + +### Auth Considerations + +The Driver/Launcher will be introducing a new `RunServerClient` and `ArtifactServerClient` using the `v2beta1`. All calls to this endpoint must be protected via SubjectAccessReview. The new server implementations can simply use `resourceManager.IsAuthorized(ctx, resourceAttributes)`, which is the standard everywhere else in KFP. All tasks/artifacts/metrics endpoints will be doing SAR on the `run` resource. If a user makes a REST request with a `verb` that matches their permissions on the `Run` KFP resource, they will be authorized to perform that action. + +For example, if a user makes a request to `ListArtifactRequest`, they require `list` verb on the `Run` resource for that particular namespace. + +A few more notes: +* the Driver/Launcher communicates with the KFP API Server via the CacheClient. This has no auth mechanism today and will need to be updated. +* the Driver/Launcher will provide the Pipeline Runner's Service Account token in the auth header for authorization. + * As such, the Pipeline Runner SA will need the appropriate namespace-level access to such resources for the Driver & Launcher to communicate with the API Server. + +### Manifests + +The following changes will need to be made: + +* For Driver/Launcher authentication purpose, the Pipeline Runner SA rbac will need to be updated accordingly to support basic verbs on the Artifact resource. +* Envoy manifests will be removed +* MLMD manifests will need to be removed +* Any configmaps, env vars, or such fields referencing MLMD will need to be removed + +### Migration + +This change will come with some drastic changes to the DB schema, namely the `Tasks` table. We will be dropping this table entirely. The only usage this table sees is described in the [caching](#caching) section. As noted there, all information that is relevant already exists in MLMD. + +To accommodate the transition, the KFP release containing this change will provide a migration script for users to apply to their DB. MLMD will be required so that the script may use the mlmd client. The script will do the following: + +* Drop the Tasks table and recreate it +* Drop the Metrics table (it is not used at all) +* Scan MLMD executions, converting them to their Task counterparts. + * When encountering ContainerExecutions with `cache_fingerprints`, the fingerprint should only be stored if the execution has a `COMPLETE` state. + * To detect exit handler dags, the execution name will need to be parsed for `exit-handler-*` prefixed, as there's no other declarative way to determine this type. +* Scan all `Artifacts` and recreate in the KFP artifact table. +* In the case of metrics, artifacts will need to be logged to the `Metrics` table instead of `Artifacts`. +* Validation Step + +Due to the nature of the change, we will require users to opt in to this upgrade by running this script. If the API Server detects the new fields are not present, KFP will assume the migration script has not been executed, and thus the server will fail to start up, logging a meaningful message to the user. + +#### Migration Alternative + +An alternative to this migration strategy is to have the KFP server perform the migration. We can enable opt-in by having a one time API Server config option `mlmdMigrate=true`. + +The benefit of this approach is a more seamless migration that's automated. However, if a user wants to have more granular control of their migration, they may prefer the script method which they can adjust as needed. + +There is also the option of doing a hybrid approach at the cost of more overhead. + +### Testing + +1. Unit Tests +- Driver/Launcher unit tests will need to be updated to use KFP server instead of MLMD +- API server unit tests for new task and artifact endpoints +- Frontend unit tests for updated components using task data instead of MLMD + +2. Integration Tests +- Existing integration tests will verify no regressions +- With the change to metrics handling, we may need more rigorous testing if current sample pipelines are insufficient +- Similarly, more testing around data passing might be required + +3. Migration Tests (manual but accompanied by supporting evidence) +- Testing of a migration script +- Previous and post upgrade mysql dumps should be provided +- Verify old cached pipelines continue to be cached post-upgrade +- Verify recurring runs pre-upgrade and post-upgrade continue to work +- Test running old and new pipelines before and after migration + +4. Security Tests (requires multi-user mode) +- Test authentication/authorization for new API endpoints +- Verify proper RBAC enforcement for task/artifact operations +- Test multi-tenant isolation of tasks and artifacts + +5. Frontend Verification Tests +- Verify frontend reporting of metrics in the "Artifact Info" and "Visualization" navs in run details +- Verify the frontend comparison UI, confirming artifacts and metrics are fetched accordingly + +6. Performance Testing +- Monitor and assess changes in CI times +- Load testing on Kubernetes clusters before/after mlmd removal \ No newline at end of file diff --git a/proposals/12147-mlmd-removal/protos/artifacts.proto b/proposals/12147-mlmd-removal/protos/artifacts.proto new file mode 100644 index 00000000000..120ed3cac4e --- /dev/null +++ b/proposals/12147-mlmd-removal/protos/artifacts.proto @@ -0,0 +1,297 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +option go_package = "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client"; +package kubeflow.pipelines.backend.api.v2beta1; + +import "google/api/annotations.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/struct.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + schemes: [1, 2], // http + https + responses: { + key: "default"; + value: { + schema: { + json_schema: { + ref: ".google.rpc.Status"; + } + } + } + } + // Use bearer token for authorizing access to artifact service. + // Kubernetes client library(https://kubernetes.io/docs/reference/using-api/client-libraries/) + // uses bearer token as default for authorization. The section below + // ensures security definition object is generated in the swagger definition. + // For more details see https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securityDefinitionsObject + security_definitions: { + security: { + key: "Bearer"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + } + } + } +}; + +service ArtifactService { + // Finds all artifacts within the specified namespace. + rpc ListArtifacts(ListArtifactRequest) returns (ListArtifactResponse) { + option (google.api.http) = { + get: "/apis/v2beta1/artifacts" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "list_artifacts" + summary: "Finds all artifacts within the specified namespace." + tags: "ArtifactService" + }; + } + + // Finds a specific Artifact by ID. + rpc GetArtifact(GetArtifactRequest) returns (Artifact) { + option (google.api.http) = { + get: "/apis/v2beta1/artifacts/{artifact_id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "get_artifact" + summary: "Finds a specific Artifact by ID." + tags: "ArtifactService" + }; + } + + rpc ListArtifactTasks(ListArtifactTasksRequest) returns (ListArtifactTasksResponse) { + option (google.api.http) = { + get: "/apis/v2beta1/artifact_tasks" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "list_artifact_tasks" + summary: "Lists artifact-task relationships." + tags: "ArtifactService" + }; + } + + // Creates an artifact-task relationship. + // While we always create an artifact-task link when an artifact is created, + // In the case of Importer, we only create a link (and not an artifact) + // if Reimport = false. + rpc CreateArtifactTask(CreateArtifactTaskRequest) returns (ArtifactTask) { + option (google.api.http) = { + post: "/apis/v2beta1/artifact_tasks" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "create_artifact_task" + summary: "Creates an artifact-task relationship." + tags: "ArtifactService" + }; + } + + // Creates a new artifact. + rpc CreateArtifact(CreateArtifactRequest) returns (Artifact) { + option (google.api.http) = { + post: "/apis/v2beta1/artifacts" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "create_artifact" + summary: "Creates a new artifact." + tags: "ArtifactService" + }; + } +} + +message CreateArtifactRequest { + // Required. The artifact to create. + Artifact artifact = 1; + + // An artifact is always created in the context of a + // run. + string run_id = 2; + string task_id = 3; + ArtifactTaskType type = 4; + string producer_task_name = 5; + string producer_key = 6; +} + +message GetArtifactRequest { + // Required. The ID of the artifact to be retrieved. + string artifact_id = 1; +} + +message ListArtifactRequest { + // Optional input. Namespace for the artifacts. + string namespace = 1; + + // A page token to request the results page. + string page_token = 2; + + // The number of artifacts to be listed per page. If there are more artifacts + // than this number, the response message will contain a valid value in the + // nextPageToken field. + int32 page_size = 3; + + // Sorting order in form of "field_name", "field_name asc" or "field_name desc". + // Ascending by default. + string sort_by = 4; + + // A url-encoded, JSON-serialized filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/artifacts/blob/master/backend/api/filter.proto)). + string filter = 5; +} + +message ListArtifactResponse { + // The list of artifacts returned. + repeated Artifact artifacts = 1; + + // The total number of artifacts available. This field is not always populated. + int32 total_size = 2; + + // A token to retrieve the next page of results, or empty if there are no + // more results in the list. + string next_page_token = 3; +} + +// The fields here work the same as previous backend api calls +message ListArtifactTasksRequest { + // Optional, filter artifact task by a set of task_ids + // We can also likely just rely on filter for this and omit this field + repeated string task_ids = 1; + // Optional, filter artifact task by a set of run_ids + repeated string run_ids = 2; + // Optional, filter artifact task by a set of artifact_ids + // We can also likely just rely on filter for this and omit this field + repeated string artifact_ids = 3; + + // Optional. Only list artifact tasks that have artifacts of this type. + ArtifactTaskType type = 4; + + string page_token = 5; + int32 page_size = 6; + string sort_by = 7; + string filter = 8; +} + +message ListArtifactTasksResponse { + repeated ArtifactTask artifact_tasks = 1; + int32 total_size = 2; + string next_page_token = 3; +} + +// Request to create an artifact-task relationship +message CreateArtifactTaskRequest { + // Required. The artifact-task relationship to create. + ArtifactTask artifact_task = 1; +} + +// Describes the I/O relationship between +// this Artifact and Task +enum ArtifactTaskType { + INPUT = 0; + OUTPUT = 1; +} + +message ArtifactTask { + // Output only. The unique server generated id of the ArtifactTask. + string id = 1; + string artifact_id = 2; + string run_id = 3; + string task_id = 4; + ArtifactTaskType type = 5; + + // The task that produced this artifact + // For example in the case of a pipeline channel + // that is an output artifact you might have as + // input something like the following in the IR: + // taskOutputArtifact: + // outputArtifactKey: output_dataset + // producerTask: create-dataset + // These fields are used to track this lineage. + // + // For outputs, the producer task is the component name + // of the task that produced the artifact. + string producer_task_name = 6; + // The key is often the parameter name used + // as input/output on the component, but + // can also take on the value of other values. + // For example: + // * "param-#" when using parameters in a ParallelFor + // * "Output" when using Pythonic Artifacts + // + // For outputs, the key is the name of the parameter + // in the component spec (found in OutputDefinitions) + // used to output the artifact. + string producer_key = 7; + + // The parameter name for the input/output artifact + // This maybe the same as the Artifact name if the + // artifact name is not specified. It is used to + // resolve artifact pipeline channels. + string artifact_key = 8; +} + +// Note to be confused with RuntimeArtifact in pipelinespec +message Artifact { + // Output only. The unique server generated id of the artifact. + // Note: Updated id name to be consistent with other api naming patterns (with prefix) + string artifact_id = 1; + + // Required. The client provided name of the artifact. + // Note: it seems in MLMD when name was set, it had to be unique for that type_id + // this restriction is removed here + // If this is a "Metric" artifact, the name of the metric + // is treated as the Key in its K/V pair. + string name = 2; + + string description = 3; + + enum ArtifactType { + // default; treated as "not set" + // reject if unset. + TYPE_UNSPECIFIED = 0; + + Artifact = 1; + Model = 2; + Dataset = 3; + HTML = 4; + Markdown = 5; + + Metric = 6; + ClassificationMetric = 7; + SlicedClassificationMetric = 8; + } + // Required. The name of an ArtifactType. E.g. Dataset + ArtifactType type = 4; + + // The uniform resource identifier of the physical artifact. + // May be empty if there is no physical artifact. + optional string uri = 5; + + // Optional. User provided custom properties which are not defined by its type. + map metadata = 6; + + // Used primarily for metrics + optional double number_value = 7; + + // Output only. Create time of the artifact in millisecond since epoch. + // Note: The type and name is updated from mlmd artifact to be consistent with other backend apis. + google.protobuf.Timestamp created_at = 8; + + string namespace = 9; +} diff --git a/proposals/12147-mlmd-removal/protos/runs.json b/proposals/12147-mlmd-removal/protos/runs.json new file mode 100644 index 00000000000..bf33f227bd4 --- /dev/null +++ b/proposals/12147-mlmd-removal/protos/runs.json @@ -0,0 +1,144 @@ +{ + "experiment_id": "f7344db6-de5d-4e68-816b-98b4f0d1ca7f", + "run_id": "9e68aca5-3afa-4028-8777-f697d858053f", + "namespace": "some_namespace", + "display_name": "mypipeline-run", + "storage_state": "AVAILABLE", + "pipeline_version_reference": {}, + "pipeline_root": "minio://mlpipeline/v2/artifacts/pipeline/f7344db6-de5d-4e68-816b-98b4f0d1ca7f", + "runtime_config": {}, + "service_account": "pipeline-runner", + "created_at": "2025-08-08T15:15:41Z", + "scheduled_at": "2025-08-08T15:15:41Z", + "finished_at": "2025-08-08T15:18:24Z", + "state": "SUCCEEDED", + // These are new: + "tasks": [ + { + "task_id": "task_id_1", + // This should match the component task names created during sdk compilation + // UI can use this to look up matching tasks in the UI. + // In the case of task_groups this would take on names like: "condition-branches-1, for-loop-1, etc." + "run_id": "run_id", + "name": "task_name", + "display_name": "train-model", + "create_time": "2025-08-08T15:15:41Z", + "start_time": "2025-08-08T15:17:36Z", + "end_time": "2025-08-08T15:18:24Z", + "status": "SUCCEEDED", + "status_metadata": { + "custom_message": "this is a custom message" + }, + "state_history": [], + // in non Runtime types, we expect only one pod + "pods": [ + { + "name": "runtime_driver_pod", + "uid" : "some_uid_a", + "type": "DRIVER" + }, + { + "name": "runtime_executor_pod", + "uid" : "some_uid_a", + "type": "EXECUTOR" + } + ], + "inputs": { + "artifacts": [ + { + // Parameter name is only applicable on a + // runtime task type + "parameter_name": "some_param", + "producer_task_name": "create_metric", + "producer_key": "my_metric_out_param_name", + "value": { + "artifact_id": "1", + "name": "my_metric", + "type": "Metric", + "custom_properties": {}, + "number_value": 23.21, + "created_at": "2025-08-08T15:15:41Z" + } + }, + { + "name": "input_dataset", + "producer_task_name": "create_dataset", + "producer_key": "my_dataset_out_param_name", + "value": { + "artifact_id": "5", + "name": "my_dataset", + "type": "Model", + "uri": "minio://mlpipeline/v2/artifacts/pipeline/9e68aca5-3afa-4028-8777-f697d858053f/input_dataset", + "custom_properties": { + "my_data": ["some", "data"], + "more_data": { "can_be": "anythingJSON"} + }, + "created_at": "2025-08-08T15:15:41Z" + } + } + ], + "parameters": [ + { + "name": "min_max_scaler", + "value": "false" + }, + { + "value": "this", + "producer": { + // For tasks, we may get PipelineChannels as input parameters + // PipelineChannel can be of the following forms: + // * pipelinechannel--output-msg-output_artifact + // * pipelinechannel--output-msg-Output + "task_name": "output-msg", + // We call it ID because it's not always a parameter "name" so producer_parameter_name would be misleading + // (e.g. in the case of loops pipelinechannel--loop-item-param-1), if we can infer the name of the iteration + // param then "name" would be better. + "key": "a_msg" + } + } + ] + }, + // Same structure as inputs + "outputs": { + "artifacts": [], + // At first this is not intuitive, but there are output parameter pipeline channels, like in the case of dsl.collected. + // This is defined in the pipeline spec, though we don't surface these pipeline channels today in the runtime executions. + // I.e., it is not visible in the RunDetails graph details. It is not in scope of this proposal to add this feature but + // the api should support it regardless. + "parameters": [] + }, + "child_tasks": [ + { + "task_id": "pipeline-j9t66-382940577", + "name": "child_task_name", + "pods": [ + { + "name": "child_task_pod1", + "uid" : "some_uid_b", + "type": "EXECUTOR" + }, + { + "name": "child_task_pod2", + "uid" : "some_uid_b", + "type": "DRIVER" + } + ] + } + ], + "type": "LOOP", + // Included for LoopIteration + "iteration_index": 2, + // Included for LoopCount, iteration_index & iteration_count are mutually exclusive + "iteration_count": 2, + "cache_fingerprint": "0d32871640a827e4abaec95747b8780602f38f2f66d447ee70af3a7310d5849e", + } + ], + + // We have 2 paths here: + // 1) As it's not used in KFP and half implemented, we can remove this outright. But it is part of the Runs object api, which is widely used. + // 2) We can deprecate it, but we intend to drop the "tasks" table, so it can just be something like: + "run_details": { + "task_details": [ ] // Same as "tasks" at root level + }, + "state_history": [] +} diff --git a/proposals/12147-mlmd-removal/protos/runs.proto b/proposals/12147-mlmd-removal/protos/runs.proto new file mode 100644 index 00000000000..f91d6282b30 --- /dev/null +++ b/proposals/12147-mlmd-removal/protos/runs.proto @@ -0,0 +1,243 @@ +syntax = "proto3"; +import "artifacts.proto"; + +// All of the following services must comply to the RBAC of `runs` resource on that particular namespace +// For example, user can call "CreateTask" if they have the permission verb "update" on the Runs resource +// in the target namespace. +// Note: We do "update" instead of "create" for "CreateTask" because creating a "task" is an implicit update +// to its parent Run. A user that can only "create" a run should not have access to "update" for that +// the run, without explicitly having that verb. +service RunService { + rpc CreateTask(CreateTaskRequest) returns (PipelineTaskDetail) { + option (google.api.http) = { + post: "/apis/v2beta1/tasks" + body: "task" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "create_task" + summary: "Creates a new task." + tags: "RunService" + }; + } + + rpc UpdateTask(UpdateTaskRequest) returns (PipelineTaskDetail) { + option (google.api.http) = { + patch: "/apis/v2beta1/tasks/{task_id}" + body: "task" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "update_task" + summary: "Updates an existing task." + tags: "RunService" + }; + } + + rpc GetTask(GetTaskRequest) returns (PipelineTaskDetail) { + option (google.api.http) = { + get: "/apis/v2beta1/tasks/{task_id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "get_task" + summary: "Gets a specific task by ID." + tags: "RunService" + }; + } + + rpc ListTasks(ListTasksRequest) returns (ListTasksResponse) { + option (google.api.http) = { + get: "/apis/v2beta1/tasks" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "list_tasks" + summary: "Lists tasks with optional filtering." + tags: "RunService" + }; + } +} + +message CreateTaskRequest { + PipelineTaskDetail task = 1; +} + +message UpdateTaskRequest { + string task_id = 1; + PipelineTaskDetail task = 2; +} + +message GetTaskRequest { + string task_id = 1; +} + +message ListTasksRequest { + // Required. Must specify either parent_id or run_id to filter tasks. + oneof parent_filter { + // List all tasks with this parent task. + string parent_id = 1; + // List all tasks for this run. + string run_id = 2; + } + + int32 page_size = 3; + string page_token = 4; + string filter = 5; + string order_by = 6; +} + +message ListTasksResponse { + repeated PipelineTaskDetail tasks = 1; + string next_page_token = 2; + int32 total_size = 3; +} + +// Runtime information of a task execution. +message PipelineTaskDetail { + string name = 1; + // User specified name of a task that is defined in + // [Pipeline.spec][]. + string display_name = 2; + + // System-generated ID of a task. + string task_id = 3; + + // ID of the parent run. + string run_id = 4; + + // Name of the corresponding pod assigned by the orchestration engine. + // Also known as node_id. + enum TaskPodType { + DRIVER = 0; + EXECUTOR = 1; + } + message TaskPod { + string name = 1; + string uid = 2; + string type = 3; + } + repeated TaskPod pods = 5; + + string cache_fingerprint = 6; + + // Creation time of a task. + google.protobuf.Timestamp create_time = 7; + + // Starting time of a task. + google.protobuf.Timestamp start_time = 8; + + // Completion time of a task. + google.protobuf.Timestamp end_time = 9; + + // Runtime state of a Task + RuntimeState status = 10; + + // Custom status metadata, this can be used to provide + // additional status info for a given task during runtime + map status_metadata = 11; + + // A sequence of task statuses. This field keeps a record + // of state transitions. + repeated RuntimeStatus state_history = 12; + + enum TaskType { + // Root task replaces Root Execution, it is the top ancestor task to all tasks in the pipeline run + ROOT = 0; + RUNTIME = 1; + CONDITION_BRANCH = 2; + CONDITION = 3; + LOOP = 4; + LOOP_ITERATION = 5; + EXIT_HANDLER = 6; + // Generic DAG task type for types like Nested Pipelines + // where there is no declarative way to detect this within + // a driver. + DAG = 7; + } + TaskType type = 13; + + message TypeAttributes { + // Optional. Applies to type LOOP_ITERATION + int64 iteration_index = 1; + // Optional. Applies to type LOOP + int64 iteration_count = 2; + } + + TypeAttributes type_attributes = 14; + + // The error that occurred during task execution. + // Only populated when the task is in FAILED or CANCELED state. + google.rpc.Status error = 15; + + // ID of the parent task if the task is within a component scope. + // Empty if the task is at the root level. + string parent_task_id = 16; + + // A dependent task that requires this one to succeed. + // Represented by either task_id or pod_name. + message ChildTask { + // System-generated ID of a task. + string task_id = 1; + + string name = 2; + + // Name of the corresponding pod assigned by the orchestration engine. + // Also known as node_id. + repeated TaskPod pods = 3; + } + + // Sequence of dependent tasks. + repeated ChildTask child_tasks = 17; + + message InputOutputs { + + message IOProducer { + string task_name = 1; + // This would be the equivalent of output_parameter_key from the upstream task + // when it's a parameter input, or output_artifact_key when it is an Artifact. + string key = 2; + } + + message Parameter { + string value = 1; + // Optional, this is only included on Runtime Tasks when the parameter name is known. + optional string name = 2; + // Not all Parameters have task producers, + // For example they can also be Runtime Constants. + // Whereas in the case of a PipelineChannel, they + // do have a producer. + optional IOProducer producer = 3; + } + message IOArtifact { + // Optional, this is only included on Runtime Tasks when the parameter name is known. + string parameter_name = 1; + + Artifact value = 2; + + // All IO artifacts have a producer, so the following + // fields are required. In the case of importer + // where the artifact is set to reimport = true + // the name & key are importer-[0-9]+ and "artifact" + IOProducer producer = 3; + } + repeated Parameter parameters = 1; + // Output Only. To create Artifacts for a task are created + // via ArtifactTasks. + repeated IOArtifact artifacts = 2; + } + + InputOutputs inputs = 18; + InputOutputs outputs = 19; +} + + +message Run { + // ... + // This will be added + // output only + string pipeline_id = 18; + // output only + string pipeline_version_id = 19; + repeated PipelineTaskDetail tasks = 20; + + + // Either remove or deprecate this + RunDetails run_details = 15; +} diff --git a/proposals/12147-mlmd-removal/schema_changes.sql b/proposals/12147-mlmd-removal/schema_changes.sql new file mode 100644 index 00000000000..2f823969d66 --- /dev/null +++ b/proposals/12147-mlmd-removal/schema_changes.sql @@ -0,0 +1,83 @@ +CREATE TABLE `artifacts` +( + `UUID` varchar(191) NOT NULL, + `Namespace` varchar(63) NOT NULL, -- enables multi-tenancy on artifacts + `Type` varchar(64) DEFAULT NULL, -- examples: Artifact, Model, Dataset + -- URI is immutable, reject in API server call if update artifact attempts to change the URI for a pre-existing artifact + `Uri` text, + `Name` varchar(128) DEFAULT NULL, + `Description` varchar(128) DEFAULT NULL, + `CreatedAtInSec` bigint NOT NULL DEFAULT '0', + `LastUpdateInSec` bigint NOT NULL DEFAULT '0', + `Metadata` JSON DEFAULT NULL, -- equivalent to mlmd custom properties + `NumberValue` bigint DEFAULT NULL, -- used for metrics + + PRIMARY KEY (`UUID`), + KEY idx_type_namespace (`Namespace`, `Type`), + KEY idx_created_timestamp (`CreatedAtInSec`), + KEY idx_last_update_timestamp (`LastUpdateInSec`) +); + +-- Analogous to an mlmd Event, except it is specific to artifacts <-> tasks (instead of executions) +CREATE TABLE `artifact_tasks` +( + `UUID` varchar(191) NOT NULL, + `ArtifactID` varchar(191) NOT NULL, + `TaskID` varchar(191) NOT NULL, + -- 0 for INPUT, 1 for OUTPUT + `Type` int NOT NULL, + + `RunUUID` varchar(191) NOT NULL, + `ProducerTaskName` varchar(128) NOT NULL, + `ProducerKey` varchar(128) NOT NULL, + `ArtifactKey` varchar(128) NOT NULL, + + PRIMARY KEY (`UUID`), + UNIQUE KEY `UniqueLink` (`ArtifactID`,`TaskID`,`Type`), + KEY `idx_link_task_id` (`TaskID`), + KEY `idx_link_artifact_id` (`ArtifactID`), + KEY `idx_created_timestamp` (`CreatedAtInSec`), + + KEY `idx_run_uuid` (`RunUUID`), + CONSTRAINT `fk_artifact_tasks_run_details` FOREIGN KEY (`RunUUID`) REFERENCES `run_details` (`UUID`) ON DELETE CASCADE ON UPDATE CASCADE, + CONSTRAINT fk_artifact_tasks_tasks FOREIGN KEY (TaskID) REFERENCES tasks (UUID) ON DELETE CASCADE ON UPDATE CASCADE, + CONSTRAINT fk_artifact_tasks_artifacts FOREIGN KEY (ArtifactID) REFERENCES artifacts (UUID) ON DELETE CASCADE ON UPDATE CASCADE +); + +CREATE TABLE `tasks` +( + `UUID` varchar(191) NOT NULL, + `Namespace` varchar(63) NOT NULL, -- updated to 63 (max namespace size in k8s) + -- This is used for searching for cached_fingerprints today + -- likely to prevent caching across pipelines + `PipelineName` varchar(128) NOT NULL, + `RunUUID` varchar(191) NOT NULL, + `Pods` json NOT NULL, -- This is broken today and will need to be fixed + `CreatedAtInSec` bigint NOT NULL, + `StartedInSec` bigint DEFAULT '0', + `FinishedInSec` bigint DEFAULT '0', + `Fingerprint` varchar(255) NOT NULL, + `Name` varchar(128) DEFAULT NULL, + `DisplayName` varchar(128) DEFAULT NULL, + `ParentTaskUUID` varchar(191) DEFAULT NULL, + `Status` varchar(64) DEFAULT NULL, + `StatusMetadata` json DEFAULT NULL, + `InputParameters` json, + `OutputParameters` json, + -- Corresponds to the executions created for each driver pod, which result in a Node on the Run Graph. + -- E.g values are: Runtime, Condition, Loop, etc. + `Type` varchar(64) NOT NULL, + -- All type-specific attributes (Runtime.DisplayName, Loop.IterationIndex/Count) + `TypeAttrs` json NOT NULL, + + PRIMARY KEY (`UUID`), + KEY idx_task_type (`Type`), + KEY idx_pipeline_name (`PipelineName`), + KEY idx_parent_run (`RunUUID`, `ParentTaskUUID`), + KEY idx_parent_task_uuid (`ParentTaskUUID`), + KEY idx_created_timestamp (`CreatedAtInSec`), + KEY idx_started_timestamp (`StartedInSec`), + KEY idx_finished_timestamp (`FinishedInSec`), + CONSTRAINT `fk_tasks_parent_task` FOREIGN KEY (`ParentTaskUUID`) REFERENCES tasks (`UUID`) ON DELETE CASCADE ON UPDATE CASCADE, + CONSTRAINT `tasks_RunUUID_run_details_UUID_foreign` FOREIGN KEY (`RunUUID`) REFERENCES `run_details` (`UUID`) ON DELETE CASCADE ON UPDATE CASCADE +); diff --git a/proposals/12147-mlmd-removal/test_plan.md b/proposals/12147-mlmd-removal/test_plan.md new file mode 100644 index 00000000000..e4165f859b3 --- /dev/null +++ b/proposals/12147-mlmd-removal/test_plan.md @@ -0,0 +1,994 @@ +# Test Plan: KFP MLMD Removal + +## Document Information +- **Feature**: Complete MLMD (ML Metadata) Removal +- **Version**: v2.0 (Major Refactoring) +- **Date Created**: 2025-10-06 +- **Repository**: kubeflow/pipelines +- **Branch**: Current development branch +- **Total Files Changed**: 235+ files + +--- + +## Table of Contents + +1. [Overview](#overview) +2. [Summary of Changes](#summary-of-changes) +3. [Impact Analysis](#impact-analysis) +4. [Test Environment Requirements](#test-environment-requirements) +5. [Test Categories](#test-categories) + - [Unit Tests](#unit-tests) + - [Integration Tests](#integration-tests) + - [API Contract Tests](#api-contract-tests) + - [Storage Layer Tests](#storage-layer-tests) + - [Driver and Launcher Tests](#driver-and-launcher-tests) + - [Conditional Logic Tests](#conditional-logic-tests) + - [Security Tests](#security-tests) + - [Performance Tests](#performance-tests) + - [Regression Tests](#regression-tests) + - [Migration Tests](#migration-tests) +6. [Success Criteria](#success-criteria) +7. [Risk Assessment](#risk-assessment) + +--- + +## Overview + +This test plan covers comprehensive testing for the major refactoring of Kubeflow Pipelines (KFP) that includes: + +1. **Complete MLMD Removal**: Removing all ML Metadata (MLMD) client dependencies from the driver, launcher, and backend +2. **New Storage Architecture**: Introduction of artifact_store, task_store, and run_store without MLMD +3. **OneOf Conditional Branching**: Implementation of new conditional logic for pipeline branching +4. **Parameter/Artifact Resolution**: Complete rework of input/output resolution logic +5. **API Changes**: Updates to v2beta1 protobuf definitions and generated clients + +### Architecture Changes + +**Before**: +- MLMD client for artifact and execution tracking +- MLMD-based context management (Pipeline, PipelineRun) +- MLMD executions for all task types +- Custom property storage in MLMD + +**After**: +- KFP v2beta1 API Server for all metadata operations +- Direct database storage via artifact_store, task_store, run_store +- Task-based execution tracking with explicit types +- Dedicated metrics table (no longer stored as artifacts) + +--- + +## Summary of Changes + +### Core Components Modified + +#### API Layer (66 files) +- **v2alpha1 protobuf**: Added oneOf support to pipeline_spec.proto +- **v2beta1 protobuf**: New artifact.proto service, extensive run.proto updates + - **ArtifactService**: New service with ListArtifacts, GetArtifact, CreateArtifact, ListArtifactTasks, CreateArtifactTask + - **RunService**: Enhanced with CreateTask, UpdateTask, GetTask, ListTasks endpoints + - **Artifact types**: TYPE_UNSPECIFIED, Artifact, Model, Dataset, HTML, Markdown, Metric, ClassificationMetric, SlicedClassificationMetric + - **Task types**: ROOT_DAG, DAG, RUNTIME, LOOP, LOOP_ITERATION, CONDITION, CONDITION_BRANCH, EXITHANDLER +- **Generated clients**: Complete regeneration of Go and Python HTTP clients +- **New models**: IOParameter, IOArtifact, IOProducer, ArtifactTask, PipelineTaskDetail enhancements + +#### Backend Storage (20 files) +- **New stores**: artifact_store.go, artifact_task_store.go (with comprehensive tests) +- **Updated stores**: task_store.go, run_store.go (MLMD removal) +- **Database schema**: New tables for artifacts and artifact_tasks +- **Client manager**: Removed MLMD client initialization + +#### Backend API Server (15 files) +- **New server**: artifact_server.go (handles artifact CRUD and queries) +- **Updated servers**: run_server.go (added task endpoints), report_server.go (removed MLMD reporting) +- **Converters**: api_converter.go refactored for new task models +- **Utilities**: list_request_util.go for unified listing logic + +#### V2 Driver (40+ files) +- **Complete refactoring**: driver.go, container.go, dag.go, root_dag.go +- **New resolver package**: artifacts.go, parameters.go, resolve.go, util.go +- **Removed**: resolve.go (old monolithic resolver - 1099 lines deleted) +- **Test data**: 6 new test pipelines for oneOf, nested dags, loops, parameters + +#### V2 Compiler (10 files) +- **Argo compiler**: Updates for oneOf and conditional branches +- **DAG handling**: New logic for ConditionBranch task types +- **Test data**: New test cases for multiple parallel loops + +#### Metadata Client (5 files) +- **v2/metadata/client.go**: Removed MLMD-specific methods, added KFP API client integration +- **Fake client**: Updated for testing without MLMD + +### New Features Implemented + +#### OneOf Conditional Branching +- Enables mutually exclusive outputs from conditional branches +- Example: `dsl.OneOf(output_a, output_b, output_c)` returns the output from the executed branch +- Compiler generates ConditionBranch and Condition task types +- Driver resolves oneOf outputs through artifact selectors + +#### Nested Naming Conflict Resolution +- Handles deeply nested pipeline components with same names +- Uses scope-based resolution (e.g., `pipeline-b/pipeline-c/task-a`) +- Test case: `nested_naming_conflicts.py` with 3 levels of nesting + +#### Parameter Iterator and Collection +- Supports iterating over parameter lists from task outputs +- Collects outputs from loop iterations +- Example: `dsl.Collected()` gathers all iteration outputs into a list + +#### Loop Artifact Passing +- Artifacts from loop iterations can be collected +- Supports both raw iterators and input parameter iterators +- Proper parent-child task relationships maintained + +--- + +## Impact Analysis + +### Criticality: CRITICAL + +This is a **breaking architectural change** that affects: + +1. **All pipeline executions** - Every pipeline run now uses new task and artifact storage +2. **Client compatibility** - Generated clients have breaking API changes +3. **Database schema** - Requires migration script for existing deployments +4. **Authentication** - New RBAC requirements for Driver/Launcher +5. **Frontend** - UI must transition from MLMD queries to Task API queries +6. **Caching** - Cache mechanism completely reworked +7. **Metrics** - Metrics no longer stored as artifacts (moved to dedicated table) + +### Components with High Risk + +| Component | Risk Level | Reason | Mitigation | +|-----------|-----------|---------|------------| +| Task Storage | **CRITICAL** | Complete replacement of MLMD-based execution tracking | Comprehensive storage layer tests + migration validation | +| Artifact Storage | **CRITICAL** | New artifact and artifact_task tables | Full CRUD test coverage + artifact linking tests | +| Driver | **CRITICAL** | 1099 lines deleted, complete resolver rewrite | Extensive unit tests + integration tests with real pipelines | +| Caching | **HIGH** | Different fingerprint storage and lookup mechanism | Cache hit/miss tests + performance benchmarks | +| Parameter/Artifact Resolution | **HIGH** | Complete rewrite of input resolution logic | Resolution tests for all parameter/artifact types | +| OneOf Logic | **HIGH** | New feature with complex conditional evaluation | Edge case testing + integration tests | +| Authentication | **MEDIUM** | New API endpoints need RBAC | Security tests with multi-tenant scenarios | +| Frontend | **MEDIUM** | UI must query new APIs | UI integration tests (manual verification) | + +--- + +## Test Environment Requirements + +### Development Environment +- **Go Version**: 1.21+ +- **Python Version**: 3.9+ +- **Database**: MySQL 8.0 or compatible +- **Container Runtime**: Docker or Podman +- **Kubernetes**: Kind cluster (v1.29.2 or v1.31.0) + +### Test Cluster Configurations + +#### 1. Local Development Cluster +```yaml +Kubernetes: Kind v1.29.2 +KFP Mode: Standalone +Database: MySQL 8.0 +Storage: MinIO (local S3-compatible) +RBAC: Disabled (for quick iteration) +Purpose: Unit test validation, quick smoke tests +``` + +#### 2. Standard KFP Cluster +```yaml +Kubernetes: Kind v1.31.0 +KFP Mode: Standalone with RBAC +Database: MySQL 8.0 +Storage: MinIO +RBAC: Enabled +Multi-tenancy: Single namespace +Purpose: Main integration testing, regression tests +``` + +#### 3. Multi-Tenant Cluster +```yaml +Kubernetes: Kind v1.31.0 +KFP Mode: Multi-tenant +Database: MySQL 8.0 +Storage: MinIO with namespace isolation +RBAC: Enabled with ServiceAccount tokens +Multi-tenancy: Multiple namespaces +Purpose: Security testing, RBAC validation +``` + +#### 4. Performance Testing Cluster +```yaml +Kubernetes: Kind v1.31.0 (or GKE/EKS for realistic perf) +KFP Mode: Standalone +Database: MySQL 8.0 (optimized) +Storage: MinIO (or cloud storage) +Resources: Higher CPU/memory allocation +Purpose: Load testing, performance regression +``` + +### Required Services +- **KFP API Server**: Running with new v2beta1 endpoints +- **MySQL Database**: With migrated schema (artifacts, artifact_tasks, tasks tables) +- **Object Storage**: MinIO or S3-compatible service +- **Kubernetes Cluster**: For end-to-end pipeline execution + +### Test Data Requirements +- **Sample Pipelines**: 10+ test pipelines covering various scenarios +- **Migration Data**: Sample MLMD database dump for migration testing +- **Performance Baselines**: Pre-refactor performance metrics for comparison + +--- + +## Test Categories + +## Unit Tests + +### Storage Layer Tests + +#### Artifact Store Tests +**Location**: `backend/src/apiserver/storage/artifact_store_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Create artifact with valid data |
    1. Initialize artifact store with test DB
    2. Create artifact with name, type, uri, metadata
    3. Verify artifact is persisted
    |
    1. Artifact UUID is generated
    2. Artifact is stored in DB
    3. CreatedAtInSec and LastUpdateInSec are set
    | Yes | +| Get artifact by UUID |
    1. Create an artifact
    2. Retrieve artifact by UUID
    3. Verify all fields match
    |
    1. Artifact is retrieved successfully
    2. All fields (name, type, uri, metadata) match original
    | Yes | +| List artifacts with filtering |
    1. Create multiple artifacts in different namespaces
    2. List artifacts with namespace filter
    3. Verify only matching artifacts are returned
    |
    1. Only artifacts in specified namespace are returned
    2. Pagination works correctly
    3. Total count is accurate
    | Yes | +| Create artifact with invalid metadata |
    1. Attempt to create artifact with malformed JSON metadata
    2. Verify error is returned
    |
    1. Error is returned with clear message
    2. No artifact is created in DB
    | Yes | +| List artifacts with pagination |
    1. Create 50 artifacts
    2. List with page_size=10
    3. Verify pagination token works
    |
    1. 10 artifacts returned per page
    2. next_page_token allows fetching next page
    3. All 50 artifacts can be retrieved
    | Yes | + +#### Artifact Task Store Tests +**Location**: `backend/src/apiserver/storage/artifact_task_store_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Create artifact task linking artifact to task |
    1. Create an artifact and a task
    2. Create artifact_task entry linking them
    3. Verify relationship is stored
    |
    1. ArtifactTask UUID is generated
    2. Foreign keys to artifact and task are valid
    3. IOType is correctly stored
    | Yes | +| List artifact tasks by task ID |
    1. Create task with multiple input/output artifacts
    2. Create artifact_tasks for each
    3. List artifact_tasks by task_id
    |
    1. All artifact_tasks for task are returned
    2. Input and output artifacts are distinguishable
    | Yes | +| Batch create artifact tasks |
    1. Prepare list of artifact_task entries
    2. Call BatchCreateArtifactTasks
    3. Verify all are created
    |
    1. All artifact_tasks are created in single transaction
    2. Rollback occurs if any fails
    | Yes | +| Create artifact task with iteration index |
    1. Create loop task
    2. Create artifact_task with iteration_index=2
    3. Verify iteration_index is stored
    |
    1. Iteration index is correctly stored
    2. Can query by iteration index
    | Yes | +| Filter artifact tasks by IO type |
    1. Create tasks with INPUT, OUTPUT, ITERATOR_OUTPUT types
    2. Query with io_type filter
    |
    1. Only matching io_type artifact_tasks returned
    | Yes | + +#### Task Store Tests +**Location**: `backend/src/apiserver/storage/task_store_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Create runtime task |
    1. Initialize task store
    2. Create task with type=RUNTIME
    3. Verify task is persisted with all fields
    |
    1. Task UUID generated
    2. Task type, status, timestamps correctly stored
    3. Input/output parameters serialized correctly
    | Yes | +| Create DAG task with parent reference |
    1. Create parent task
    2. Create child DAG task with parent_task_uuid
    3. Verify parent-child relationship
    |
    1. ParentTaskUUID is stored correctly
    2. Can query child tasks by parent ID
    | Yes | +| Update task status and outputs |
    1. Create task with RUNNING status
    2. Update status to SUCCEEDED with output parameters
    3. Verify update persisted
    |
    1. Status updated to SUCCEEDED
    2. Output parameters stored correctly
    3. FinishedInSec timestamp set
    | Yes | +| List tasks by parent ID |
    1. Create parent task with 5 child tasks
    2. Call ListTasks with parent_id filter
    |
    1. All 5 child tasks returned
    2. No tasks from other parents returned
    | Yes | +| Store and retrieve cache fingerprint |
    1. Create task with cache fingerprint
    2. Query by fingerprint
    3. Verify cache hit detection works
    |
    1. Fingerprint stored correctly
    2. Can find task by fingerprint
    | Yes | +| Create task with loop iteration index |
    1. Create LOOP_ITERATION task with iteration_index=3
    2. Verify iteration index stored in TypeAttrs
    |
    1. TypeAttrs JSON contains iteration_index
    2. Can query tasks by iteration index
    | Yes | +| Get child tasks for DAG |
    1. Create DAG task
    2. Create 3 child runtime tasks
    3. Call GetChildTasks
    |
    1. All 3 child tasks returned
    2. Tasks ordered by creation time
    | Yes | + +#### Run Store Tests +**Location**: `backend/src/apiserver/storage/run_store_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Create run with runtime config |
    1. Create run with parameter values
    2. Verify run stored with tasks relationship
    |
    1. Run UUID generated
    2. Runtime config stored correctly
    3. Can retrieve run with tasks
    | Yes | +| Update run state |
    1. Create run in RUNNING state
    2. Update to SUCCEEDED state
    3. Verify state change persisted
    |
    1. State updated correctly
    2. FinishedAtInSec set
    3. State history updated
    | Yes | +| List runs with filtering |
    1. Create multiple runs in different namespaces
    2. Query with namespace filter
    |
    1. Only matching runs returned
    2. Pagination works
    | Yes | + +### API Server Tests + +#### Artifact Server Tests +**Location**: `backend/src/apiserver/server/artifact_server_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| CreateArtifact API endpoint |
    1. Call CreateArtifact gRPC endpoint
    2. Verify artifact created in DB
    3. Check response contains UUID
    |
    1. Artifact created successfully
    2. Response contains all artifact fields
    | Yes | +| GetArtifact API endpoint |
    1. Create artifact
    2. Call GetArtifact with UUID
    3. Verify response matches
    |
    1. Artifact retrieved successfully
    2. All fields match original
    | Yes | +| ListArtifacts with filtering |
    1. Create artifacts with different types
    2. Call ListArtifacts with type filter
    |
    1. Only matching artifacts returned
    2. Pagination works correctly
    | Yes | +| CreateArtifactTask API endpoint |
    1. Create artifact and task
    2. Call CreateArtifactTask to link them
    |
    1. ArtifactTask created successfully
    2. Link can be queried
    | Yes | +| BatchCreateArtifactTasks API endpoint |
    1. Prepare multiple artifact_task requests
    2. Call BatchCreateArtifactTasks
    |
    1. All tasks created atomically
    2. Failure rolls back all
    | Yes | + +#### Run Server Task Endpoints Tests +**Location**: `backend/src/apiserver/server/run_server_tasks_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| CreateTask API endpoint |
    1. Call CreateTask gRPC endpoint
    2. Verify task created in DB
    |
    1. Task created with UUID
    2. Response contains task details
    | Yes | +| GetTask API endpoint |
    1. Create task
    2. Call GetTask with task_id
    |
    1. Task retrieved successfully
    2. Includes inputs, outputs, status
    | Yes | +| UpdateTask API endpoint |
    1. Create task
    2. Call UpdateTask to set status=SUCCEEDED
    |
    1. Task updated successfully
    2. Timestamps updated
    | Yes | +| ListTasks with parent filter |
    1. Create parent task with children
    2. Call ListTasks with parent_id filter
    |
    1. Only child tasks returned
    2. Correct ordering
    | Yes | + +#### API Converter Tests +**Location**: `backend/src/apiserver/server/api_converter_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Convert Task model to API PipelineTaskDetail |
    1. Create task model with all fields
    2. Call ToApiTaskDetail converter
    3. Verify all fields mapped correctly
    |
    1. All fields converted
    2. JSON fields deserialized properly
    3. Enums mapped correctly
    | Yes | +| Convert Artifact model to API Artifact |
    1. Create artifact model
    2. Call ToApiArtifact converter
    |
    1. All fields converted correctly
    2. Metadata JSON deserialized
    | Yes | +| Convert API request to Task model |
    1. Create API CreateTaskRequest
    2. Convert to storage model
    |
    1. Fields mapped correctly
    2. Validation applied
    | Yes | + +### Driver Tests + +#### Parameter Resolution Tests +**Location**: `backend/src/v2/driver/resolver/parameters.go` (with tests in `dag_test.go`) + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Resolve component input parameters |
    1. Create runtime config with parameters
    2. Run driver to resolve component inputs
    3. Verify ExecutorInput contains parameters
    |
    1. All component input parameters resolved
    2. Types preserved (string, number, bool, list, struct)
    3. IOType set to COMPONENT_INPUT
    | Yes | +| Resolve task output parameter reference |
    1. Create upstream task with output parameter
    2. Create downstream task referencing it
    3. Run driver to resolve input
    |
    1. Parameter value fetched from upstream task
    2. IOType set to TASK_OUTPUT
    3. Value matches upstream output
    | Yes | +| Resolve parameter iterator |
    1. Create task with parameter iterator (list)
    2. Run driver for loop task
    3. Verify iteration count calculated
    |
    1. Iteration count matches list length
    2. Each iteration gets correct parameter value
    3. IOType set to ITERATOR_INPUT
    | Yes | +| Resolve parameter from constant |
    1. Create task with constant parameter value
    2. Run driver
    |
    1. Parameter resolved to constant value
    2. IOType set to RUNTIME_VALUE_INPUT
    | Yes | +| Handle null parameter value |
    1. Create task with null parameter
    2. Run driver
    |
    1. Null value handled correctly
    2. No error thrown
    | Yes | + +#### Artifact Resolution Tests +**Location**: `backend/src/v2/driver/resolver/artifacts.go` (with tests in `dag_test.go`) + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Resolve task output artifact reference |
    1. Create upstream task with output artifact
    2. Create downstream task referencing it
    3. Run driver to resolve input artifact
    |
    1. Artifact fetched from artifact_tasks table
    2. URI, type, metadata populated
    3. IOType set to TASK_OUTPUT
    | Yes | +| Resolve artifact iterator |
    1. Create task with artifact iterator (list)
    2. Run driver for loop
    3. Verify iteration count
    |
    1. Iteration count matches artifact list length
    2. Each iteration gets single artifact
    | Yes | +| Resolve dsl.Collected() artifacts |
    1. Create loop with output artifacts
    2. Create downstream task using dsl.Collected()
    3. Run driver
    |
    1. All loop iteration artifacts collected
    2. Artifact list passed to downstream task
    3. IOType set to ITERATOR_OUTPUT
    | Yes | +| Resolve nested pipeline artifact output |
    1. Create nested pipeline with output artifact
    2. Reference artifact in parent pipeline
    3. Run driver
    |
    1. Artifact from nested pipeline resolved
    2. Correct artifact retrieved via scope resolution
    | Yes | + +#### Container Driver Tests +**Location**: `backend/src/v2/driver/container_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Container driver creates runtime task |
    1. Run container driver
    2. Verify task created with type=RUNTIME
    3. Check ExecutorInput generated
    |
    1. Task created in task_store
    2. ExecutorInput contains resolved inputs
    3. PodSpec generated correctly
    | Yes | +| Container driver handles caching |
    1. Run task twice with same inputs
    2. Verify second run detects cache hit
    |
    1. First run stores fingerprint
    2. Second run finds fingerprint
    3. Cached task ID returned, no pod created
    | Yes | +| Container driver resolves all input types |
    1. Create task with parameters and artifacts
    2. Run container driver
    |
    1. All parameters resolved
    2. All artifacts resolved
    3. ExecutorInput complete
    | Yes | + +#### DAG Driver Tests +**Location**: `backend/src/v2/driver/dag_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| DAG driver creates DAG task |
    1. Run DAG driver for pipeline task
    2. Verify task created with type=DAG
    |
    1. DAG task created
    2. Parent-child relationships established
    | Yes | +| Loop DAG driver calculates iteration count |
    1. Run loop DAG with list input
    2. Verify iteration count calculated
    |
    1. Iteration count matches list length
    2. Loop task type=LOOP
    | Yes | +| Loop iteration driver sets iteration index |
    1. Run loop iteration driver for index 2
    2. Verify iteration_index stored
    |
    1. Task type=LOOP_ITERATION
    2. TypeAttrs contains iteration_index=2
    | Yes | +| Condition driver evaluates condition |
    1. Run condition driver with expression
    2. Verify condition evaluated
    |
    1. Condition result stored
    2. Task type=CONDITION
    | Yes | +| ConditionBranch driver for If/Elif/Else |
    1. Run condition branch driver
    2. Verify branch task created
    |
    1. Task type=CONDITION_BRANCH
    2. Child conditions grouped correctly
    | Yes | + +#### Root DAG Driver Tests +**Location**: `backend/src/v2/driver/root_dag_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Root DAG creates root task |
    1. Run root DAG driver
    2. Verify root task created
    |
    1. Root task type=ROOT_DAG
    2. Runtime config stored
    3. Run associated with task
    | Yes | +| Root DAG stores runtime parameters |
    1. Run with runtime parameter values
    2. Verify parameters stored in root task
    |
    1. All runtime parameters stored
    2. Can be retrieved by child tasks
    | Yes | + +### Launcher Tests + +#### Launcher V2 Tests +**Location**: `backend/src/v2/component/launcher_v2.go` (test file needs creation) + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Launcher updates task with output parameters |
    1. Run launcher that outputs parameters
    2. Verify task updated with output values
    |
    1. Task outputs contain parameters
    2. UpdateTask API called successfully
    | Partial | +| Launcher creates output artifacts |
    1. Run launcher that outputs artifacts
    2. Verify artifacts created via CreateArtifact API
    |
    1. Artifacts created with correct URI
    2. ArtifactTasks link artifacts to task
    | Partial | +| Launcher updates cache fingerprint on success |
    1. Run launcher to completion
    2. Verify fingerprint stored in task
    |
    1. Task updated with fingerprint
    2. Fingerprint only stored on success
    | Partial | +| Launcher uploads artifacts to object store |
    1. Run launcher with artifact outputs
    2. Verify artifacts uploaded to S3/MinIO
    |
    1. Artifacts uploaded to correct URI
    2. Artifact metadata contains URI
    | Partial | + +### Compiler Tests + +#### Argo Compiler Tests +**Location**: `backend/src/v2/compiler/argocompiler/argo_test.go` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Compile pipeline with oneOf |
    1. Compile pipeline using dsl.OneOf
    2. Verify Argo workflow generated correctly
    |
    1. ConditionBranch tasks created
    2. Artifact selectors configured for oneOf
    | Yes | +| Compile nested pipelines |
    1. Compile multi-level nested pipeline
    2. Verify DAG tasks created for each level
    |
    1. Correct number of DAG tasks
    2. Parent-child relationships correct
    | Yes | +| Compile loop with parameter iterator |
    1. Compile pipeline with for-loop over parameters
    2. Verify loop tasks generated
    |
    1. LOOP and LOOP_ITERATION tasks in spec
    2. Iterator configuration correct
    | Yes | + +--- + +## Integration Tests + +### End-to-End Pipeline Execution Tests + +#### Basic Pipeline Execution +**Test Data**: `backend/src/v2/driver/test_data/componentInput_level_1_test.py.yaml` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Execute simple 2-task pipeline |
    1. Submit pipeline with 2 sequential tasks
    2. Monitor execution via GetRun API
    3. Verify both tasks complete successfully
    |
    1. Pipeline completes with SUCCEEDED state
    2. Both tasks visible in run graph
    3. Task outputs stored correctly
    | Yes | +| Execute pipeline with artifact passing |
    1. Submit pipeline where task B consumes artifact from task A
    2. Verify artifact resolution
    |
    1. Artifact created by task A
    2. Task B receives artifact as input
    3. Artifact metadata correct
    | Yes | +| Execute pipeline with parameter passing |
    1. Submit pipeline with parameter flow
    2. Verify parameter values propagated
    |
    1. Output parameters from task A
    2. Input parameters to task B match
    | Yes | + +#### Loop Pipeline Execution +**Test Data**: `backend/src/v2/driver/test_data/loop_collected_raw_Iterator.py.yaml` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Execute for-loop with raw list iterator |
    1. Submit pipeline with for-loop over [1,2,3]
    2. Verify 3 iterations execute
    3. Check artifacts collected
    |
    1. LOOP task created
    2. 3 LOOP_ITERATION tasks created
    3. Each iteration completes
    4. Artifacts collected correctly
    | Yes | +| Execute for-loop with parameter iterator |
    1. Submit pipeline with for-loop over task output list
    2. Verify iterations match output length
    |
    1. Iteration count calculated correctly
    2. Each iteration gets correct parameter value
    3. Parameters collected via dsl.Collected()
    | Yes | +| Execute nested for-loops |
    1. Submit pipeline with loop inside loop
    2. Verify correct nesting and iteration counts
    |
    1. Outer loop creates inner loop for each iteration
    2. Total iterations = outer × inner
    3. Parent-child relationships correct
    | Partial | + +#### Conditional Pipeline Execution +**Test Data**: `backend/src/v2/driver/test_data/oneof_simple.yaml` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Execute If/Elif/Else pipeline with If branch taken |
    1. Submit pipeline with condition evaluating to true for If
    2. Verify only If branch executes
    |
    1. CONDITION_BRANCH task created
    2. CONDITION task for If created and succeeds
    3. Elif and Else conditions not executed
    4. Correct branch artifact output
    | Yes | +| Execute If/Elif/Else pipeline with Elif branch taken |
    1. Submit pipeline with condition true for Elif
    2. Verify only Elif branch executes
    |
    1. If condition fails
    2. Elif condition succeeds
    3. Elif branch tasks execute
    4. OneOf resolves to Elif output
    | Yes | +| Execute If/Elif/Else with Else branch taken |
    1. Submit pipeline with all conditions false
    2. Verify Else branch executes
    |
    1. If and Elif fail
    2. Else branch executes as default
    | Yes | +| Execute pipeline with dsl.OneOf() output |
    1. Submit pipeline using dsl.OneOf to select branch output
    2. Verify downstream task receives correct output
    |
    1. OneOf resolves to executed branch's output
    2. Downstream task receives artifact
    3. IOType = ONEOF_OUTPUT
    | Yes | + +#### Nested Pipeline Execution +**Test Data**: `backend/src/v2/driver/test_data/nested_naming_conflicts.py.yaml` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Execute 3-level nested pipeline |
    1. Submit pipeline with pipeline_a → pipeline_b → pipeline_c
    2. Verify all DAG tasks created
    3. Check artifact passing through levels
    |
    1. 3 DAG tasks created (pipeline_b, pipeline_c, and their nesting)
    2. Artifacts passed correctly between levels
    3. Naming conflicts resolved via scope
    | Yes | +| Verify nested naming conflict resolution |
    1. Submit nested pipeline with same task names in different levels
    2. Verify correct task is resolved for artifact inputs
    |
    1. Task "a" in pipeline_c gets artifact from task "b" in pipeline_c, not pipeline_b
    2. Scope-based resolution works correctly
    | Yes | + +### API Integration Tests + +#### Artifact Service Integration +**Location**: New test file needed + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Create artifact via API and verify in DB |
    1. Call CreateArtifact gRPC endpoint
    2. Query DB directly to verify storage
    |
    1. Artifact exists in DB
    2. All fields match API request
    | No | +| Create artifact task and query via ListArtifactTasks |
    1. Create artifact_task linking artifact to task
    2. Call ListArtifactTasks API
    |
    1. ArtifactTask returned
    2. Correct artifact and task IDs
    | No | +| Batch create artifacts and verify atomicity |
    1. Create batch with 1 invalid artifact
    2. Verify entire batch fails
    |
    1. No artifacts created
    2. Transaction rolled back
    | No | + +#### Run Service Task Integration +**Location**: New test file needed + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Create task via API and retrieve via GetTask |
    1. Call CreateTask API
    2. Call GetTask with returned task_id
    |
    1. Task retrieved successfully
    2. All fields match
    | No | +| Update task status and verify state transition |
    1. Create task with RUNNING status
    2. Update to SUCCEEDED
    3. Verify state history
    |
    1. Status updated
    2. Timestamps set correctly
    | No | +| List tasks with complex filters |
    1. Create tasks with various attributes
    2. Query with filters (parent_id, status, type)
    |
    1. Only matching tasks returned
    2. Pagination works
    | No | + +--- + +## API Contract Tests + +### Protobuf Validation Tests + +#### v2beta1 Artifact Proto Tests +**Location**: `backend/api/v2beta1/artifact.proto` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| CreateArtifact request validation |
    1. Create request with all required fields
    2. Validate proto serialization
    |
    1. Proto serializes correctly
    2. All fields accessible
    | Yes | +| CreateArtifact request with missing required fields |
    1. Create request without namespace
    2. Attempt to process
    |
    1. Validation error returned
    | Yes | +| ArtifactTask proto with iteration_index |
    1. Create ArtifactTask with iteration_index
    2. Verify field serialization
    |
    1. iteration_index field present
    2. Can be null for non-loop tasks
    | Yes | + +#### v2beta1 Run Proto Tests +**Location**: `backend/api/v2beta1/run.proto` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| PipelineTaskDetail proto completeness |
    1. Create PipelineTaskDetail with all task types
    2. Verify all fields serialize
    |
    1. All task types (RUNTIME, DAG, LOOP, etc.) supported
    2. Inputs/outputs correctly structured
    | Yes | +| IOParameter proto with all value types |
    1. Create IOParameter with string, number, bool, list, struct
    2. Verify serialization
    |
    1. All value types supported
    2. JSON structs handled correctly
    | Yes | +| IOArtifact proto with metadata |
    1. Create IOArtifact with custom metadata
    2. Verify metadata JSON
    |
    1. Metadata field accepts arbitrary JSON
    | Yes | + +### Generated Client Tests + +#### Go HTTP Client Tests +**Location**: `backend/api/v2beta1/go_http_client/` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| ArtifactServiceClient CreateArtifact call |
    1. Initialize client
    2. Call CreateArtifact method
    3. Verify HTTP request formed correctly
    |
    1. HTTP POST to /v2beta1/artifacts
    2. Request body matches proto
    | Partial | +| RunServiceClient GetTask call |
    1. Call GetTask with task_id
    2. Verify HTTP GET request
    |
    1. HTTP GET to /v2beta1/runs/{run_id}/tasks/{task_id}
    | Partial | + +#### Python HTTP Client Tests +**Location**: `backend/api/v2beta1/python_http_client/test/` + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| ArtifactServiceApi create_artifact call |
    1. Initialize Python client
    2. Call create_artifact
    3. Verify request
    |
    1. Correct HTTP request formed
    2. Response deserialized to V2beta1Artifact
    | Yes | +| RunServiceApi list_tasks call |
    1. Call list_tasks with filters
    2. Verify query parameters
    |
    1. Query params include filters
    2. Response is V2beta1ListTasksResponse
    | Yes | + +--- + +## Conditional Logic Tests + +### OneOf Implementation Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| OneOf with single output artifact |
    1. Submit pipeline with If/Else using dsl.OneOf for artifact
    2. Verify only executed branch output is returned
    |
    1. OneOf artifact selector matches executed branch
    2. Downstream task receives correct artifact
    3. IOType = ONEOF_OUTPUT
    | Yes | +| OneOf with multiple possible outputs |
    1. Submit pipeline with If/Elif/Elif/Else with dsl.OneOf
    2. Verify correct output selected
    |
    1. Only one output artifact created
    2. Matches executed branch
    | Yes | +| OneOf with nested conditions |
    1. Submit pipeline with nested If inside If with dsl.OneOf
    2. Verify resolution through nesting
    |
    1. Correct nested branch output selected
    2. Artifact resolution correct
    | Partial | +| Multiple OneOf in same pipeline |
    1. Submit pipeline with 2 separate dsl.OneOf outputs
    2. Verify both resolve independently
    |
    1. Each OneOf resolves to correct branch
    2. No interference between them
    | Partial | + +### Condition Evaluation Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| String equality condition |
    1. Create condition: "value" == task.output
    2. Run with matching and non-matching values
    |
    1. Condition correctly evaluates true/false
    2. Branch execution matches condition
    | Yes | +| Numeric comparison condition |
    1. Create condition: task.output > 10
    2. Run with values above and below threshold
    |
    1. Numeric comparison works
    2. Correct branch executes
    | Yes | +| Boolean condition |
    1. Create condition based on bool output
    2. Run with true and false values
    |
    1. Boolean condition evaluated correctly
    | Yes | +| Complex condition with AND/OR |
    1. Create condition: (a > 5) AND (b == "test")
    2. Test various combinations
    |
    1. Complex expressions evaluate correctly
    | Partial | + +### Conditional Artifact Passing Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Pass artifact from If branch to downstream task |
    1. If branch creates artifact
    2. Downstream task consumes via dsl.OneOf
    |
    1. Artifact passed correctly
    2. Type and metadata preserved
    | Yes | +| Pass multiple artifacts from conditional branch |
    1. Branch creates 2 artifacts
    2. Downstream tasks consume both
    |
    1. Both artifacts passed
    2. Correct artifact resolution
    | Partial | +| Conditional artifact with different types in branches |
    1. If branch outputs Dataset, Else outputs Model
    2. Downstream task accepts Artifact (base type)
    |
    1. Type compatibility validated
    2. Artifact passed correctly
    | Partial | + +--- + +## Security Tests + +### Authentication Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Driver authenticates with Pipeline Runner SA token |
    1. Configure Pipeline Runner SA
    2. Run driver
    3. Verify auth header contains SA token
    |
    1. Driver sends Authorization header
    2. Token is valid SA token
    | No | +| Launcher authenticates with Pipeline Runner SA token |
    1. Run launcher
    2. Verify API calls include auth token
    |
    1. Launcher authenticated correctly
    | No | +| API server rejects unauthenticated requests |
    1. Call CreateArtifact without auth header
    2. Verify rejection
    |
    1. 401 Unauthorized returned
    | No | + +### Authorization Tests (RBAC) + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| User with run RBAC can access artifacts in run |
    1. Grant user RBAC on run resource
    2. User calls ListArtifacts for run's artifacts
    |
    1. User can access artifacts
    2. No explicit artifact RBAC needed
    | No | +| User without run RBAC cannot access run's artifacts |
    1. User has no RBAC on run
    2. User calls GetArtifact for run's artifact
    |
    1. 403 Forbidden returned
    | No | +| Pipeline Runner SA has namespace-level artifact access |
    1. Configure Pipeline Runner SA RBAC
    2. Driver/Launcher creates artifacts
    |
    1. Artifacts created successfully
    2. SA has sufficient permissions
    | No | +| Cross-namespace artifact access denied |
    1. User tries to access artifact from different namespace
    2. Verify denial
    |
    1. 403 Forbidden
    2. Namespace isolation enforced
    | No | + +### Multi-Tenant Isolation Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Artifacts isolated by namespace |
    1. Create artifacts in namespace-a and namespace-b
    2. List artifacts in namespace-a
    |
    1. Only namespace-a artifacts returned
    2. No cross-namespace leakage
    | No | +| Tasks isolated by namespace |
    1. Create tasks in multiple namespaces
    2. Query tasks with namespace filter
    |
    1. Only matching namespace tasks returned
    | No | +| Run isolation by namespace and experiment |
    1. Create runs in different namespaces
    2. Verify isolation
    |
    1. Runs properly isolated
    | No | + +--- + +## Performance Tests + +### Baseline Performance Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Measure task creation latency |
    1. Create 100 tasks sequentially
    2. Measure avg latency per CreateTask call
    3. Compare to baseline
    |
    1. Avg latency < 100ms
    2. No regression vs. MLMD-based system
    | Partial | +| Measure artifact creation latency |
    1. Create 100 artifacts
    2. Measure avg latency
    |
    1. Avg latency < 50ms
    | Partial | +| Measure task update latency |
    1. Update 100 tasks
    2. Measure latency
    |
    1. Avg latency < 100ms
    | Partial | + +### Load Testing + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Concurrent pipeline executions |
    1. Submit 50 pipelines concurrently
    2. Monitor API server CPU/memory
    3. Verify all complete successfully
    |
    1. All pipelines complete
    2. No API server crashes
    3. Resource usage within limits
    | No | +| High-frequency task updates |
    1. Simulate 100 launcher pods updating tasks concurrently
    2. Monitor DB connections and latency
    |
    1. All updates succeed
    2. DB connection pool not exhausted
    3. No deadlocks
    | No | +| Large artifact metadata |
    1. Create artifacts with 10KB metadata JSON
    2. Verify no performance degradation
    |
    1. Large metadata handled
    2. No significant latency increase
    | No | + +### Database Performance Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Task query performance with large task count |
    1. Create run with 1000 tasks
    2. Query ListTasks for run
    3. Measure query time
    |
    1. Query completes in < 1 second
    2. Pagination works efficiently
    | No | +| Artifact query performance with joins |
    1. Create 1000 artifacts linked to tasks
    2. Query ListArtifactTasks
    3. Measure time
    |
    1. Query optimized with proper indexes
    2. Completes in < 1 second
    | No | + +### Cache Performance Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Cache hit detection speed |
    1. Run pipeline twice with caching
    2. Measure fingerprint lookup time on 2nd run
    |
    1. Fingerprint lookup < 50ms
    2. Cache hit detected correctly
    | Partial | +| Cache with large fingerprint dataset |
    1. Create 10,000 cached tasks
    2. Attempt cache lookup
    |
    1. Lookup remains fast
    2. Index on fingerprint column effective
    | No | + +--- + +## Regression Tests + +### Full E2E Regression Suite + +#### Standard RHOAI Cluster Testing + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Execute all KFP sample pipelines |
    1. Deploy KFP to standard cluster
    2. Submit all official sample pipelines
    3. Verify all complete successfully
    |
    1. All samples pass
    2. Outputs match expected
    3. No crashes or errors
    | Partial | +| Execute pipelines with all component types |
    1. Run pipelines using container, importer, condition, loop components
    2. Verify all work correctly
    |
    1. All component types supported
    2. Backward compatibility maintained
    | Partial | +| Recurring runs continue to work |
    1. Create recurring run schedule
    2. Wait for multiple executions
    3. Verify each execution succeeds
    |
    1. Scheduled runs execute
    2. Each run completes successfully
    | Partial | + +#### Caching Regression Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Previously cached pipelines still use cache |
    1. Migrate DB with cached tasks
    2. Run same pipeline post-migration
    3. Verify cache hit
    |
    1. Cache fingerprint found
    2. Task reused, not re-executed
    | No | +| New cache entries work correctly |
    1. Run new pipeline twice with caching enabled
    2. Verify cache hit on 2nd run
    |
    1. Fingerprint stored correctly
    2. Cache mechanism functional
    | Partial | + +#### Metrics Regression Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| System metrics logged to metrics table |
    1. Run pipeline outputting system.Metrics
    2. Verify metrics stored in metrics table
    |
    1. Metrics in metrics table, not artifacts table
    2. Values correct
    | Partial | +| Classification metrics stored correctly |
    1. Output system.ClassificationMetrics
    2. Verify JSON storage
    |
    1. JSON metrics stored
    2. Can be retrieved and displayed
    | Partial | + +#### Artifact Regression Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Artifact download from object store works |
    1. Pipeline creates artifact
    2. Use UI to download artifact
    |
    1. Pre-signed URL generated
    2. Artifact downloaded correctly
    | No | +| Artifact visualization in UI |
    1. Create pipeline with visualizations
    2. View in UI
    |
    1. Visualizations render
    2. Data correct
    | No | + +--- + +## Migration Tests + +### Database Migration Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Migration script runs without errors |
    1. Prepare MLMD database dump
    2. Run migration script
    3. Verify completion without errors
    |
    1. Script completes successfully
    2. All MLMD executions converted to tasks
    3. All MLMD artifacts converted to KFP artifacts
    | No | +| Task table dropped and recreated |
    1. Check task table schema before migration
    2. Run migration
    3. Verify new schema
    |
    1. Old task table dropped
    2. New schema with correct columns
    | No | +| MLMD executions converted to tasks |
    1. Count MLMD executions before migration
    2. Run migration
    3. Count tasks after migration
    |
    1. Task count >= execution count
    2. All execution types mapped correctly (Container→RUNTIME, DAG→DAG, etc.)
    | No | +| MLMD artifacts converted to KFP artifacts |
    1. Count MLMD artifacts
    2. Run migration
    3. Count KFP artifacts
    |
    1. Non-metric artifacts migrated to artifacts table
    2. Metric artifacts migrated to metrics table
    | No | +| Cache fingerprints migrated correctly |
    1. Verify MLMD executions with fingerprints
    2. Migrate
    3. Verify fingerprints in tasks
    |
    1. Only COMPLETE executions have fingerprints
    2. Fingerprints match MLMD values
    | No | +| Artifact relationships preserved |
    1. Check MLMD events for artifact-execution links
    2. Migrate
    3. Verify artifact_tasks table
    |
    1. All artifact-task relationships preserved
    2. Input/output types correct
    | No | + +### Post-Migration Validation Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| API server starts successfully post-migration |
    1. Complete migration
    2. Start API server
    3. Verify no startup errors
    |
    1. Server starts without errors
    2. Health check passes
    | No | +| Existing runs visible in UI |
    1. Migrate DB with completed runs
    2. Open UI
    3. Verify runs listed
    |
    1. All pre-migration runs visible
    2. Run details accessible
    | No | +| Can execute new pipelines post-migration |
    1. Complete migration
    2. Submit new pipeline
    3. Verify execution
    |
    1. New pipeline executes successfully
    2. Uses new task/artifact storage
    | No | +| Pre-migration artifacts accessible |
    1. Migrate DB
    2. Access artifact from old run
    3. Download artifact
    |
    1. Artifact metadata retrieved
    2. Artifact downloadable
    | No | + +### Rollback Testing + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Database backup can be restored |
    1. Backup DB before migration
    2. Run migration
    3. Restore backup
    4. Verify MLMD-based system works
    |
    1. Backup restores successfully
    2. Old system functional
    | No | + +--- + +## Task Type-Specific Tests + +Based on the proposal design document, the new system defines specific task types to replace MLMD executions. Each task type requires specific testing. + +### Task Type Definitions from Proposal + +| Task Type | Purpose | MLMD Equivalent | +|-----------|---------|-----------------| +| ROOT_DAG | Root pipeline execution | Pipeline context | +| DAG | Task group execution | DAG execution | +| RUNTIME | Container executor execution | Container execution | +| LOOP | For-loop group | Loop DAG execution | +| LOOP_ITERATION | Single loop iteration | Loop iteration DAG execution | +| CONDITION | Conditional evaluation | Condition DAG execution | +| CONDITION_BRANCH | If/Elif/Else branch grouping | Condition branch DAG execution | +| EXITHANDLER | Exit handler task grouping | Exit handler DAG execution | + +### ROOT_DAG Task Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Root DAG stores pipeline runtime config |
    1. Submit pipeline with runtime parameters
    2. Verify ROOT_DAG task created
    3. Check runtime config stored in task
    |
    1. ROOT_DAG task type set correctly
    2. Runtime config JSON stored in task inputs
    3. Can be retrieved by child tasks
    | Yes | +| Root DAG associates with run |
    1. Create ROOT_DAG task
    2. Verify run_id association
    3. Check parent_task_uuid is null
    |
    1. Task linked to run correctly
    2. No parent task (root level)
    | Yes | +| Root DAG passes execution_id to child drivers |
    1. ROOT_DAG driver creates task
    2. Child drivers receive parent_task_id
    |
    1. Child drivers get parent_task_id instead of execution_id
    | Yes | + +### RUNTIME Task Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Container driver creates RUNTIME task |
    1. Run container driver
    2. Verify RUNTIME task created
    3. Check executor inputs populated
    |
    1. Task type set to RUNTIME
    2. ExecutorInput contains resolved parameters/artifacts
    3. PodSpec generated with --task_id flag
    | Yes | +| RUNTIME task stores cache fingerprint on success |
    1. Complete launcher execution successfully
    2. Verify task updated with fingerprint
    |
    1. Fingerprint stored only on successful completion
    2. Can be found for cache hit detection
    | Yes | +| RUNTIME task updates with output artifacts |
    1. Launcher creates output artifacts
    2. Verify task updated with outputs
    |
    1. Task outputs contain artifact references
    2. ArtifactTasks created linking artifacts to task
    | Yes | + +### LOOP and LOOP_ITERATION Task Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| LOOP task calculates iteration count |
    1. Create for-loop with list input [1,2,3,4,5]
    2. Verify LOOP task created
    3. Check iteration_count in TypeAttrs
    |
    1. Task type set to LOOP
    2. TypeAttrs JSON contains iteration_count=5
    3. Child LOOP_ITERATION tasks created
    | Yes | +| LOOP_ITERATION task sets iteration index |
    1. Run loop iteration driver for index 2
    2. Verify LOOP_ITERATION task created
    3. Check iteration_index stored
    |
    1. Task type set to LOOP_ITERATION
    2. TypeAttrs contains iteration_index=2
    3. Parent task is LOOP task
    | Yes | +| Loop artifact collection works |
    1. Each LOOP_ITERATION creates output artifact
    2. Downstream task uses dsl.Collected()
    3. Verify all artifacts collected
    |
    1. All iteration artifacts linked to downstream task
    2. IOType set to ITERATOR_OUTPUT
    | Yes | + +### CONDITION and CONDITION_BRANCH Task Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| CONDITION task evaluates condition expression |
    1. Create If condition with expression "param > 5"
    2. Run condition driver with param=10
    3. Verify condition result
    |
    1. Task type set to CONDITION
    2. Condition result stored in task outputs
    3. Child tasks execute based on result
    | Yes | +| CONDITION_BRANCH task groups If/Elif/Else |
    1. Create If/Elif/Else structure
    2. Verify CONDITION_BRANCH task created
    3. Check child CONDITION tasks
    |
    1. Task type set to CONDITION_BRANCH
    2. Multiple child CONDITION tasks created
    3. OneOf resolution works correctly
    | Yes | +| Nested conditions create proper task hierarchy |
    1. Create nested If inside If
    2. Verify task parent-child relationships
    |
    1. Outer CONDITION_BRANCH contains inner CONDITION_BRANCH
    2. Proper task nesting preserved
    | Partial | + +### EXITHANDLER Task Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Exit handler tasks grouped under EXITHANDLER |
    1. Create pipeline with dsl.ExitHandler
    2. Verify EXITHANDLER task created
    3. Check exit handler tasks are children
    |
    1. Task type set to EXITHANDLER
    2. Exit handler components are child tasks
    3. Executes regardless of pipeline success/failure
    | Partial | +| Exit handler execution name parsing |
    1. Migration script encounters execution with name "exit-handler-*"
    2. Verify converted to EXITHANDLER task
    |
    1. Execution name parsed correctly
    2. Task type set to EXITHANDLER
    | No | + +--- + +## Metrics Handling Tests + +The proposal specifies that metrics are no longer stored as artifacts but in a dedicated metrics table. This requires comprehensive testing. + +### Metrics Storage Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| system.Metrics stored in metrics table |
    1. Create component that outputs system.Metrics
    2. Run pipeline
    3. Verify metrics stored in metrics table, not artifacts
    |
    1. No artifact created for metrics
    2. Entry created in metrics table
    3. Key-value pairs stored correctly
    | Yes | +| system.ClassificationMetrics stored as JSON |
    1. Output system.ClassificationMetrics
    2. Verify JSON storage in metrics table
    |
    1. Classification metrics stored as JSON
    2. Can be retrieved for UI display
    | Yes | +| system.SlicedClassificationMetrics stored as JSON |
    1. Output system.SlicedClassificationMetrics
    2. Verify JSON storage
    |
    1. Sliced metrics stored as JSON
    2. Structure preserved
    | Yes | +| Metrics have no URI |
    1. Create metrics
    2. Verify no URI field populated
    3. Check UI doesn't show download link
    |
    1. Metrics have no URI
    2. UI shows metrics data inline
    3. No artifact download option
    | Partial | + +### Metrics Resolution Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Driver resolves metrics as input artifacts |
    1. Task A outputs system.Metrics
    2. Task B consumes metrics as input
    3. Verify resolution works
    |
    1. Driver queries metrics table
    2. Converts metrics to RuntimeArtifact format
    3. Downstream task receives metrics
    | Yes | +| GetOutputMetricsByTaskID function works |
    1. Create task with metrics output
    2. Call GetOutputMetricsByTaskID
    3. Verify metrics returned
    |
    1. Function queries task's output_metrics
    2. Returns map[string]*OutputArtifact
    | Yes | +| Metrics schema type determination |
    1. Driver determines artifact schema type from ComponentInputSpec
    2. Verify metrics vs artifacts handled correctly
    |
    1. SchemaTitle correctly identified
    2. Metrics routed to metrics table
    3. Artifacts routed to artifacts table
    | Yes | + +### Metrics Migration Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| MLMD metrics artifacts migrated to metrics table |
    1. MLMD DB contains system.Metrics artifacts
    2. Run migration script
    3. Verify metrics moved to metrics table
    |
    1. Metrics artifacts not in artifacts table
    2. Entries created in metrics table
    3. Custom properties converted to JSON
    | No | +| Non-metrics artifacts remain in artifacts table |
    1. MLMD DB contains Model/Dataset artifacts
    2. Run migration
    3. Verify only metrics moved
    |
    1. Model/Dataset artifacts in artifacts table
    2. Only metrics moved to metrics table
    | No | + +--- + +## Authentication and Authorization Tests (Enhanced) + +Based on the proposal's specific RBAC requirements for Driver/Launcher and new API endpoints. + +### Driver/Launcher Authentication Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Driver uses Pipeline Runner SA token for RunService |
    1. Configure Pipeline Runner SA with tokens
    2. Run driver
    3. Verify RunService calls include auth header
    |
    1. Authorization header contains SA token
    2. RunService accepts authenticated requests
    | No | +| Driver uses Pipeline Runner SA token for ArtifactService |
    1. Driver creates artifacts
    2. Verify ArtifactService calls authenticated
    |
    1. CreateArtifact calls include auth header
    2. ArtifactService validates SA token
    | No | +| Launcher uses Pipeline Runner SA token for all APIs |
    1. Launcher updates task status
    2. Launcher creates artifacts
    3. Verify both calls authenticated
    |
    1. UpdateTask includes auth header
    2. CreateArtifact includes auth header
    | No | +| Pipeline Runner SA has required RBAC permissions |
    1. Configure minimal RBAC for Pipeline Runner SA
    2. Attempt pipeline execution
    3. Verify success
    |
    1. SA can create/update tasks
    2. SA can create artifacts
    3. SA has namespace-level access
    | No | + +### SubjectAccessReview Integration Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| RunService endpoints use SubjectAccessReview |
    1. Call CreateTask with valid token but no RBAC
    2. Verify resourceManager.IsAuthorized called
    3. Check 403 returned
    |
    1. SubjectAccessReview performed
    2. Request denied due to insufficient RBAC
    | No | +| ArtifactService endpoints use SubjectAccessReview |
    1. Call CreateArtifact without proper RBAC
    2. Verify authorization check
    |
    1. Authorization checked against "artifacts" resource
    2. 403 Forbidden returned
    | No | + +### Artifact Access Control Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Artifacts in runs accessible with run RBAC |
    1. User has RBAC on run resource
    2. User calls GetArtifact for artifact in that run
    3. Verify access granted
    |
    1. Access granted based on run RBAC
    2. No separate artifact RBAC needed
    | No | +| Get/List artifacts requires artifacts resource RBAC |
    1. User calls ListArtifacts globally
    2. Verify requires "artifacts" resource permission
    |
    1. RBAC checked against "artifacts" resource
    2. Only artifacts user has access to returned
    | No | +| Reimport=false artifacts require originating namespace RBAC |
    1. Artifact with Reimport=false from namespace-a
    2. User in namespace-b tries to access
    3. Verify denied
    |
    1. Access denied due to originating namespace
    2. RBAC checked against artifact's namespace
    | No | + +### Pre-signed URL Download Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| UI artifact download uses pre-signed URLs |
    1. User views artifact in UI
    2. Click download link
    3. Verify pre-signed URL generated
    |
    1. KFP server generates pre-signed URL
    2. No direct UI download from object store
    3. RBAC enforced for URL generation
    | No | +| Pre-signed URL authorization via RBAC |
    1. User without artifact access tries download
    2. Verify URL generation denied
    |
    1. URL generation requires artifact access
    2. 403 returned if no access
    | No | + +--- + +## Frontend Changes Tests + +The proposal specifies specific UI changes to transition from MLMD queries to Task API queries. + +### UI Data Fetching Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Run Details page uses Task API instead of MLMD |
    1. Open run details page
    2. Verify API calls made
    3. Check data displayed correctly
    |
    1. No MLMD API calls (getKfpV2RunContext, getExecutionsFromContext)
    2. Uses run.run_details.task_details from Run object
    3. Task information displayed correctly
    | No | +| RuntimeNodeDetailsV2 uses tasks instead of executions |
    1. Click on task node in run graph
    2. Verify task details sidebar
    3. Check all information present
    |
    1. Task details fetched from tasks field
    2. All execution info available as task info
    3. No missing information
    | No | +| Artifact fetching uses fetchArtifactsFromTasks |
    1. View artifacts in run details
    2. Verify artifact API calls
    |
    1. Artifacts fetched via Task relationship
    2. ListArtifactTasks API used
    3. All artifacts displayed
    | No | +| Execution page removed |
    1. Verify execution page no longer accessible
    2. All execution info available in task nodes
    |
    1. Execution page returns 404 or redirects
    2. Task node details contain all needed info
    | No | + +### Metrics Visualization Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Metrics fetched from Task response instead of Artifacts |
    1. View task with metrics output
    2. Check Visualization nav
    3. Verify metrics displayed
    |
    1. Metrics data from task.output_metrics
    2. No MLMD artifact query for metrics
    3. Visualization renders correctly
    | No | +| Metrics artifacts show no URI in UI |
    1. View metrics in artifact list
    2. Verify no download link
    |
    1. Metrics show as data, not downloadable files
    2. No URI displayed
    | No | +| Classification metrics JSON displayed correctly |
    1. View ClassificationMetrics in UI
    2. Verify JSON data rendered
    |
    1. JSON structure preserved
    2. Data displayed in appropriate format
    | No | + +### Compare UI Tests + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| CompareV2 uses Task API instead of MLMD |
    1. Compare multiple runs
    2. Verify API calls made
    3. Check comparison data correct
    |
    1. No MLMD calls (getKfpV2RunContext, getExecutionsFromContext)
    2. Uses Task API for all run data
    3. Comparison functionality preserved
    | No | + +### Store Session Info Tests + +The proposal removes store_session_info as a custom property and handles it directly in launcher. + +| Test Case Summary | Test Steps | Expected Result | Automated? | +|-------------------|------------|-----------------|------------| +| Artifact download works without store_session_info property |
    1. Create artifact without store_session_info
    2. Download via UI
    3. Verify download succeeds
    |
    1. Server builds session info from launcher config
    2. Download works correctly
    3. No dependency on custom property
    | No | +| Server builds session info from pipeline root |
    1. Server handles artifact download request
    2. Verify cfg.GetStoreSessionInfo(pipelineRoot) called
    |
    1. Session info built server-side
    2. Same logic as root driver
    | No | +| Launcher uses session info directly |
    1. Launcher uploads artifacts
    2. Verify session info used from launcher config
    3. Check no custom property stored
    |
    1. Session info used directly in launcher
    2. No store_session_info custom property
    | Partial | + +--- + +### Continuous Integration + +**Automated Test Execution**: +- Unit tests run on every commit (via GitHub Actions) +- Integration tests run on PR merge to master +- Nightly full regression suite +- Performance tests run weekly + +**GitHub Workflow Integration**: +- `.github/workflows/kfp-sdk-tests.yml` - SDK tests +- `.github/workflows/api-server-tests.yml` - API server tests +- `.github/workflows/e2e-test.yml` - End-to-end tests +- New workflow needed: `.github/workflows/mlmd-removal-tests.yml` - Specialized tests for this refactoring + +### Manual Testing Requirements + +**Manual Verification Needed**: +1. **UI Testing**: Frontend changes cannot be fully automated + - Verify run details page shows tasks correctly + - Check artifact visualization + - Validate metrics display + - Test artifact download links + +2. **Migration Testing**: Requires real MLMD database + - Migration script execution + - Post-migration validation + - Rollback testing + +3. **Performance Testing**: Realistic load testing + - Deploy to larger cluster (GKE/EKS) + - Run at scale (100+ concurrent pipelines) + - Monitor resource usage + +4. **Security Testing**: Multi-user scenarios + - Multiple users with different RBAC roles + - Cross-namespace access attempts + - Token expiration scenarios + +--- + +## Success Criteria + +### Functional Success Criteria + +1. **All Unit Tests Pass**: 100% of new unit tests pass, 80%+ code coverage +2. **All Integration Tests Pass**: End-to-end pipelines execute successfully +3. **New Features Work**: OneOf, nested pipelines, parameter iterators function correctly +4. **No Regressions**: All existing sample pipelines continue to work +5. **Migration Validated**: Migration script successfully converts MLMD data to new schema + +### Performance Success Criteria + +1. **No Significant Latency Regression**: Task creation/update latency within 20% of baseline +2. **Scalability Maintained**: Can handle 100+ concurrent pipeline executions +3. **Database Performance**: Queries complete in < 1 second for typical workloads +4. **Cache Performance**: Cache hit detection < 50ms + +### Security Success Criteria + +1. **RBAC Enforced**: All API endpoints protected by RBAC +2. **Namespace Isolation**: No cross-namespace data leakage +3. **Authentication Required**: No unauthenticated access possible +4. **Audit Trail**: All operations logged appropriately + +### Operational Success Criteria + +1. **API Server Stable**: No crashes during testing +2. **Database Integrity**: No data corruption +3. **Migration Reversible**: Can rollback migration if needed +4. **Documentation Complete**: Migration guide, API docs, release notes + +--- + +## Risk Assessment + +### Critical Risks + +| Risk | Probability | Impact | Mitigation | +|------|------------|--------|------------| +| Migration script fails on customer data | Medium | Critical | Extensive testing with diverse MLMD databases; provide dry-run mode | +| Performance regression unacceptable | Low | High | Early performance testing; optimization before release | +| Data loss during migration | Low | Critical | Mandatory backup before migration; validation script | +| Incompatibility with existing pipelines | Medium | High | Extensive regression testing; backward compatibility checks | +| Security vulnerabilities in new APIs | Low | Critical | Security audit; penetration testing | + +### Medium Risks + +| Risk | Probability | Impact | Mitigation | +|------|------------|--------|------------| +| UI doesn't display all information | Medium | Medium | Manual UI testing; user acceptance testing | +| Cache mechanism doesn't work as expected | Low | Medium | Dedicated cache tests; performance validation | +| Generated clients have bugs | Low | Medium | Client integration tests; sample code validation | +| Metrics display broken in UI | Medium | Medium | Manual verification; automated screenshot tests if possible | + +### Low Risks + +| Risk | Probability | Impact | Mitigation | +|------|------------|--------|------------| +| Documentation incomplete | Medium | Low | Documentation review process | +| Edge cases not covered | Low | Low | Comprehensive test case design | + +--- + +## Test Data and Resources + +### Test Pipelines + +1. **componentInput_level_1_test.py**: Component inputs and runtime constants +2. **loop_collected_raw_Iterator.py**: Loop with raw list iterator +3. **loop_collected_InputParameter_Iterator.py**: Loop with parameter iterator +4. **nested_naming_conflicts.py**: 3-level nested pipelines with naming conflicts +5. **oneof_simple.py**: OneOf with If/Elif/Else branches +6. **taskOutputArtifact_test.py**: Artifact output from task +7. **taskOutputParameter_test.py**: Parameter output from task + +### Required Infrastructure + +- **MySQL Test Database**: Fresh instance for each test run +- **MinIO Server**: Local S3-compatible storage +- **Kind Clusters**: Multiple clusters for different test scenarios +- **GitHub Actions Runners**: For CI/CD test execution + +### Test Artifacts + +- **Performance Baselines**: JSON file with latency/throughput metrics +- **MLMD Database Dumps**: Sample databases for migration testing +- **Expected Outputs**: Golden files for regression testing + +--- + +## Appendix + +### Test Environment Setup + +#### Local Development Setup + +```bash +# 1. Set up database +docker run -d --name kfp-mysql -e MYSQL_ROOT_PASSWORD=root -e MYSQL_DATABASE=kfp -p 3306:3306 mysql:8.0 + +# 2. Set up MinIO +docker run -d --name kfp-minio -p 9000:9000 -p 9001:9001 \ + -e MINIO_ROOT_USER=minio -e MINIO_ROOT_PASSWORD=minio123 \ + minio/minio server /data --console-address ":9001" + +# 3. Create Kind cluster +kind create cluster --name kfp-test --config=test-cluster-config.yaml + +# 4. Deploy KFP +kubectl apply -k manifests/kustomize/env/platform-agnostic + +# 5. Run tests +cd backend/src/apiserver/storage +go test -v ./... +``` + +#### CI Test Cluster Config + +```yaml +# test-cluster-config.yaml +kind: Cluster +apiVersion: kind.x-k8s.io/v1alpha4 +nodes: + - role: control-plane + - role: worker + - role: worker +``` + +### Glossary + +- **MLMD**: ML Metadata - the metadata store being removed +- **OneOf**: Conditional branching construct that selects one output from multiple possible branches +- **DAG**: Directed Acyclic Graph - represents pipeline structure +- **IOType**: Input/Output Type - categorizes parameter/artifact sources (e.g., COMPONENT_INPUT, TASK_OUTPUT) +- **Artifact Task**: Join table linking artifacts to tasks with metadata +- **Cache Fingerprint**: Hash of task inputs used for cache hit detection +- **Iteration Index**: Index of current loop iteration (0-based) + +--- + +## Document Revision History + +| Version | Date | Author | Changes | +|---------|------|--------|---------| +| 1.0 | 2025-10-06 | KFP Test Team | Initial test plan creation | + +--- + +**End of Test Plan** diff --git a/proposals/12238-notebook-component/README.md b/proposals/12238-notebook-component/README.md new file mode 100644 index 00000000000..2ed1e7f3ee1 --- /dev/null +++ b/proposals/12238-notebook-component/README.md @@ -0,0 +1,299 @@ +# KEP-12238: Jupyter Notebook Components + + + +- [Summary](#summary) +- [Motivation](#motivation) + - [Goals](#goals) + - [Non-Goals](#non-goals) +- [Proposal](#proposal) + - [Baseline Feature For Embedded Assets](#baseline-feature-for-embedded-assets) + - [SDK User Experience](#sdk-user-experience) + - [Example](#example) + - [notebook_component Decorator Arguments](#notebook_component-decorator-arguments) + - [Behavior Notes](#behavior-notes) + - [User Stories](#user-stories) + - [Notes/Constraints/Caveats](#notesconstraintscaveats) + - [Risks and Mitigations](#risks-and-mitigations) +- [Design Details](#design-details) + - [Security Considerations](#security-considerations) + - [Test Plan](#test-plan) + - [Unit Tests](#unit-tests) + - [Integration tests](#integration-tests) + - [Graduation Criteria](#graduation-criteria) +- [Implementation History](#implementation-history) +- [Drawbacks](#drawbacks) +- [Alternatives](#alternatives) + + +## Summary + +This proposal introduces a first-class `@dsl.notebook_component` decorator that lets users build Kubeflow Pipelines +(KFP) components directly from Jupyter notebooks. The decorator embeds a `.ipynb` file into the component and executes +it at runtime via `nbclient`, with parameters injected as a prepended cell. This provides a simple path for +notebook-centric workflows to run in KFP without requiring separate packaging or bespoke wrappers. + +## Motivation + +Many users begin experimentation and development in Jupyter notebooks. Turning those notebooks into pipeline components +currently requires boilerplate: exporting to Python, writing a wrapper function, or managing custom container images. A +native notebook component: + +- Reduces friction to productionize notebook code in pipelines +- Preserves the notebook as the source of truth while allowing parameterization +- Avoids extra build steps by embedding notebook content into the component + +### Goals + +1. Enable defining a component from a `.ipynb` notebook with a single decorator. +2. Support parameter injection into the notebook at execution time. +3. Use the existing Python executor to keep compatibility with existing inputs/outputs concepts. + +### Non-Goals + +1. Notebook validation or linting beyond JSON and structural checks. +2. Adding a new pipeline IR or backend executor. This reuses the Python executor. + +## Proposal + +### Baseline Feature For Embedded Assets + +This KEP establishes a baseline, generic capability for Python-function components to embed arbitrary files or +directories directly into a lightweight component. The goal is to support cases where a component needs a small amount +of read-only assets (configs, scripts, models, notebooks, etc.) without requiring a custom image. + +- Add a new decorator argument to `@dsl.component`: + - `embedded_artifact_path: Optional[str]` — path to a file or directory on the authoring machine to embed within the + component. +- Add a new SDK type: `dsl.EmbeddedInput[T]` — a runtime-only input annotation that resolves to an artifact instance of + type `T` (e.g. `dsl.EmbeddedInput[dsl.Dataset]`) whose `path` points to the extracted embedded artifact root. + - If a directory is embedded, `.path` points to the extracted directory. + - If a single file is embedded, `.path` points to that file. + +Example: + +```python +@dsl.component( + embedded_artifact_path="assets/config_dir", +) +def my_component(cfg: dsl.EmbeddedInput[dsl.Dataset], param: int): + # cfg.path is a directory when a directory is embedded; a file path if a file is embedded + print(cfg.path) +``` + +Execution model (lightweight components): + +- At compile time, the file/dir is archived (tar + gzip) and base64-embedded into the ephemeral module. +- At runtime, the embedded artifact is made available by extracting to a temporary location and the `EmbeddedInput[...]` + parameter is injected as an artifact with `.path` pointing to the extracted file/dir in a temporary directory. The + extracted root is also added to `sys.path` for Python module resolution if it's a directory. +- `sys.path` precedence: the embedded path/zip is prepended to `sys.path` (before existing entries) to ensure + deterministic use of embedded modules when names overlap. + +Relationship to `@dsl.notebook_component`: + +- The notebook component leverages the same embedding pattern, specializing the runtime helper to execute `.ipynb` + content via `nbclient`. If `notebook_path` is a directory, the `.ipynb` file in the directory is executed to allow for + utility functions to be embedded. Pipeline compilation fails if there is more than one `.ipynb` file. + +### SDK User Experience + +#### Example + +```python +from kfp import dsl + + +@dsl.notebook_component( + notebook_path="train.ipynb", + packages_to_install=["pandas", "scikit-learn", "nbclient"], +) +def train_from_notebook(dataset_uri: str, model: dsl.Output[dsl.Model]): + dsl.run_notebook( + dataset_uri=dataset_uri, + output_model_path=model.path, + ) + + +@dsl.pipeline(name="nb-pipeline") + +def pipeline(dataset_uri: str = "s3://bucket/dataset.csv"): + train_from_notebook(dataset_uri=dataset_uri) +``` + +#### Complex Example + +A mixed pipeline with a Python preprocessor, a notebook training step, and a notebook evaluation step: + +```python +from kfp import dsl + + +@dsl.component( + base_image="python:3.11-slim", + packages_to_install=["pandas==2.2.2"], +) +def preprocess(text: str, cleaned_text: dsl.Output[dsl.Dataset]): + """Cleans whitespace from input text and writes to cleaned_text.""" + import re + + cleaned = re.sub(r"\s+", " ", text).strip() + with open(cleaned_text.path, "w", encoding="utf-8") as f: + f.write(cleaned) + + +@dsl.notebook_component( + notebook_path="dev-files/nb_train.ipynb", + base_image="registry.access.redhat.com/ubi9/python-311:latest", +) +def train_model( + cleaned_text: dsl.Input[dsl.Dataset], + model: dsl.Output[dsl.Model], +): + """Trains a model from cleaned text and writes model.""" + import shutil + + # Read the dataset for the notebook since it expects a string + with open(cleaned_text.path, "r", encoding="utf-8") as f: + cleaned_text_str = f.read() + + # Execute the embedded notebook with kwargs injected as variables + dsl.run_notebook(cleaned_text=cleaned_text_str) + + # Translate notebook outputs into KFP outputs + nb_model_dir = "/tmp/kfp_nb_outputs/model_dir" + shutil.copytree(nb_model_dir, model.path, dirs_exist_ok=True) + + +@dsl.notebook_component( + notebook_path="dev-files/nb_eval.ipynb", + base_image="registry.access.redhat.com/ubi9/python-311:latest", +) +def evaluate_model( + model: dsl.Input[dsl.Model], + metrics_json: dsl.Output[dsl.Metrics], +): + """Evaluates a model and writes metrics JSON output.""" + import json + # Execute the notebook with the model artifact path + dsl.run_notebook(model=model.path) + + # Copy notebook-generated metrics into output parameter + with open("/tmp/kfp_nb_outputs/metrics.json", "r", encoding="utf-8") as f: + metrics_dict = json.load(f) + + for metric_name, metric_value in metrics_dict.items(): + if isinstance(metric_value, (int, float)): + metrics_json.log_metric(metric_name, metric_value) + + +@dsl.pipeline(name="three-step-nb-mix") +def pipeline(text: str = "Hello world"): + p = preprocess(text=text).set_caching_options(False) + t = train_model(cleaned_text=p.output).set_caching_options(False) + evaluate_model(model=t.output).set_caching_options(False) +``` + +#### notebook_component Decorator Arguments + +Only differences from the standard Python executor component: + +- `notebook_path: str` – New parameter specifying the `.ipynb` file to embed and execute (required). +- `packages_to_install: Optional[List[str]]` – Same as Python executor except when `None` (default here) it installs a + slimmer default runtime of `nbclient>=0.10,<1`; `[]` installs nothing; a non-empty list installs exactly the provided + packages. + +All other decorator arguments and behaviors are identical to the Python executor. + +#### Behavior Notes + +- The notebook JSON is compressed using gzip and base64-encoded before embedding into the ephemeral Python module used + by the Python executor. This reduces command line length and allows for larger notebooks. +- At runtime, `dsl.run_notebook(**kwargs)` is bound to a helper that: + 1. Decompresses and parses the embedded notebook into memory + 2. Injects parameters following Papermill semantics: + - If the notebook contains a code cell tagged with `parameters`, a new code cell tagged `injected-parameters` is + inserted immediately after it to override defaults. + - If no `parameters` cell exists, the `injected-parameters` cell is inserted at the top of the notebook. + 3. Executes via `nbclient.NotebookClient` + 4. Streams cell outputs (stdout/stderr and `text/plain` displays) + +For the baseline bundling feature: + +- Files/directories are archived (tar+gz) and base64-embedded similarly; by default they are extracted at import time to + satisfy the `EmbeddedInput[...]` contract of providing a real filesystem `.path` and to support non-Python assets. +- For Python import resolution, the embedded path (extracted root or zip archive) is prepended into `sys.path` if it's a + directory. +- `dsl.EmbeddedInput[T]` is not part of the component interface; it is injected at runtime and provides an artifact with + `.path` set to the extracted file/dir. + +### User Stories + +1. As a data scientist, I can take an existing exploratory notebook and run it as a KFP component with parameters, + without rewriting it into a Python script. +2. As a platform user, I can standardize execution images and dependency sources while still allowing teams to embed + notebooks into components. + +### Notes/Constraints/Caveats + +- Embedded content increases the size of the generated command; extremely large notebooks may hit container argument + length limits, though gzip compression typically reduces notebook size significantly. +- Notebooks must be valid JSON and include a `cells` array; otherwise creation fails with a clear error. +- The SDK warns when embedded artifacts or notebooks exceed 1MB to flag potential issues. The backend has a configurable + maximum pipeline spec size; if exceeded, the error recommends moving content to a container image or object store. + +### Risks and Mitigations + +- **Dependency drift/conflicts**: Installing packages at runtime can introduce variability. + - Mitigation: Encourage providing a `base_image` with pinned deps or using `packages_to_install` with exact versions. +- **Command length/performance**: Large embedded notebooks may slow compilation or exceed limits. + - Mitigation: Automatic gzip compression reduces notebook size; warn on large files (>1MB original); recommend + refactoring or pre-building images for very large notebooks. + +## Design Details + +### Security Considerations + +This feature does not introduce additional security risks beyond those inherent to executing notebooks. It relies on the +`nbclient` package within the execution environment (installed automatically unless overridden). + +### Test Plan + +#### Unit Tests + +- Verify `packages_to_install` behavior for `None`, `[]`, and non-empty lists. +- Ensure helper source is generated and injected correctly; binding of `dsl.run_notebook`. +- Test notebook compression and decompression round-trip correctness. +- Large-notebook warning logic. +- KFP local executing a pipeline with embedded artifacts. +- KFP local executing a pipeline with notebooks. +- Large notebook over 1 MB. +- Failure path: invalid notebook JSON; notebook cell raises execution error. + +#### Integration tests + +- Execute a pipeline that leverages `embedded_artifact_path`. +- Execute a pipeline with two parameterized notebooks that writes to an output artifact. + - One notebook should have no `parameters` cell. + - The other notebook should have a `parameters` cell with some overrides. + +### Graduation Criteria + +N/A + +## Implementation History + +- Initial proposal: 2025-09-10 + +## Drawbacks + +- Embedded notebooks can bloat the command payload and slow compilation/execution for large files, though gzip + compression typically helps. +- Notebooks are less modular than Python modules for code reuse and testing. + +## Alternatives + +1. Use `@dsl.component` with manual `nbconvert` calls inside the function. This requires boilerplate and manual + packaging of the notebook. +2. Pre-build a container image containing the notebook and its dependencies, then use `@dsl.container_component`. This + improves reproducibility but increases operational overhead. diff --git a/proposals/README.md b/proposals/README.md index 4f891659793..287bcac3b35 100644 --- a/proposals/README.md +++ b/proposals/README.md @@ -4,3 +4,12 @@ Kubeflow uses the KEP process to document large scale changes to the project. Details on the process (including the KEP template, recommendations, etc.) can be found at [kubeflow/community/proposals](https://github.com/kubeflow/community/blob/master/proposals/README.md) + +Additionally, please address the following questions and topics in your proposal: + +* Test plan +* Migration strategy (if applicable) +* Frontend Considerations + * If this is a backend or SDK related proposal, address any impacts to Frontend +* KFP Local Considerations + * Address whether this proposal requires changes to the KFP local experience. These are often proposals that introduce SDK features requiring changes to the KFP remote backend. diff --git a/proposals/support_different_argo_wf_versions_testplan/different_aw_test_plan.md b/proposals/support_different_argo_wf_versions_testplan/different_aw_test_plan.md new file mode 100644 index 00000000000..3a704265db0 --- /dev/null +++ b/proposals/support_different_argo_wf_versions_testplan/different_aw_test_plan.md @@ -0,0 +1,47 @@ +# Test Plan to verify multiple supported argo versions + +## Compatibility Matrix Tests +Based on the compatability matrix as defined in [Test Environments](#test-environments) + +### Test Environments +| Environment | Argo Version | KFP Version | Purpose | +|-------------|---------------|-------------|---------------------------| +| Env-1 | Latest(3.7.x) | None | Future Upgrade | +| Env-2 | 3.6.x | Current | N version compatibility | +| Env-3 | 3.5.y | Previous | N-1 version compatibility | + + +### 1. Current Version (N) Compatibility + +| Test Case ID | TC-CM-001 | +|-----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **Test Case Summary** | Validate compatibility with current supported Argo version | +| **Test Steps** |
    1. Install current supported Argo version (e.g., 3.6.x)
    2. Configure KFP with this Argo version
    3. Execute pipeline run
    4. Verify all features work correctly (Pipeline Run, and Scheduled Pipeline Runs works)
    5. Document any limitations
    | +| **Expected Results** |
    • Full compatibility with current version
    • All pipeline features operational
    • No breaking changes or issues
    • Performance within acceptable range
    • | + +### 2. Previous Version (N-1) Compatibility + +| Test Case ID | TC-CM-002 | +|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **Test Case Summary** | Validate compatibility with previous supported Argo version | +| **Test Steps** |
      1. Install previous supported Argo version (e.g., 3.5.y)
      2. Configure KFP with this Argo version
      3. Execute pipeline run
      4. Document compatibility differences
      5. Verify core functionality maintained
      | +| **Expected Results** |
      • Core functionality works with N-1 version
      • Any limitations clearly documented
      • No critical failures or data loss
      • Upgrade path available
      • | + +### 2.1 Z-Stream Version Compatibility + +| Test Case ID | TC-CM-002a | +|-----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **Test Case Summary** | Validate compatibility with z-stream (patch) versions of Argo | +| **Test Steps** |
        1. Test current KFP with multiple z-stream versions of same minor Argo release
          • Example: Test KFP with Argo v3.4.16, v3.4.17, v3.4.18
        2. Execute standard pipeline test suite for each z-stream version
        3. Document any breaking changes in patch versions
        4. Verify backward and forward compatibility within minor version
        | +| **Expected Results** |
        • Z-stream versions maintain compatibility
        • No breaking changes in patch releases
        • Smooth operation across patch versions
        • Clear documentation of any exceptions
        • | + +### 3. Version Matrix Validation + +| Test Case ID | TC-CM-003 | +|-----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **Test Case Summary** | Systematically validate compatibility matrix | +| **Test Steps** |
          1. For each version in compatibility matrix:
            • Deploy specific Argo version
            • Configure KFP
            • Execute standard test suite
            • Document results and issues
            • Update compatibility matrix
          2. Identify unsupported combinations
          | +| **Expected Results** |
          • Compatibility matrix accurately reflects reality
          • All supported versions documented
          • Unsupported combinations identified
          • Clear guidance for version selection
          • | + +## Regression Testing +Verify that for each PR, integration tests and KFP Sample Pipeline tests are successful \ No newline at end of file diff --git a/proposals/tests-refactor/End2End-DuringPR.png b/proposals/tests-refactor/End2End-DuringPR.png new file mode 100644 index 00000000000..a3b0f58f96c Binary files /dev/null and b/proposals/tests-refactor/End2End-DuringPR.png differ diff --git a/proposals/tests-refactor/End2End-Full Regression.png b/proposals/tests-refactor/End2End-Full Regression.png new file mode 100644 index 00000000000..d2c3fec366c Binary files /dev/null and b/proposals/tests-refactor/End2End-Full Regression.png differ diff --git a/proposals/tests-refactor/KFP PipelineUpload API Workflow.png b/proposals/tests-refactor/KFP PipelineUpload API Workflow.png new file mode 100644 index 00000000000..7827f873358 Binary files /dev/null and b/proposals/tests-refactor/KFP PipelineUpload API Workflow.png differ diff --git a/proposals/tests-refactor/Overall Test Strategy.png b/proposals/tests-refactor/Overall Test Strategy.png new file mode 100644 index 00000000000..8c3eebc56de Binary files /dev/null and b/proposals/tests-refactor/Overall Test Strategy.png differ diff --git a/proposals/tests-refactor/PipelineUpload API Test Strategy.png b/proposals/tests-refactor/PipelineUpload API Test Strategy.png new file mode 100644 index 00000000000..114edbc66a6 Binary files /dev/null and b/proposals/tests-refactor/PipelineUpload API Test Strategy.png differ diff --git a/proposals/tests-refactor/README.md b/proposals/tests-refactor/README.md new file mode 100644 index 00000000000..61787647526 --- /dev/null +++ b/proposals/tests-refactor/README.md @@ -0,0 +1,219 @@ +# Revolutionizing Kubeflow Pipeline Testing: A 2025 Proposal + + +* [Revolutionizing Kubeflow Pipeline Testing: A 2025 Proposal](#revolutionizing-kubeflow-pipeline-testing-a-2025-proposal) + * [Summary](#summary) + * [Goals](#goals) + * [Non-Goals](#non-goals) +* [Proposal](#proposal) + * [Current Testing Process](#current-testing-process) + * [Proposed Changes](#proposed-changes) + * [Testing Strategy](#testing-strategy) + * [Server API Tests](#server-api-tests) + * [SDK Tests](#sdk-tests) + * [Targeted Tests](#targeted-tests) + * [Compilation Tests](#compilation-tests) + * [API Tests](#api-tests) + * [Semi Exploratory Tests](#semi-exploratory-tests) + * [Full Exploratory Tests](#full-exploratory-tests) + * [End to End Tests](#end-to-end-tests) + * [Critical Regression Testing](#critical-regression-testing) + * [Full Regression Testing](#full-regression-testing) + * [Test Architecture Changes](#test-architecture-changes) + * [Test Code Architecture](#test-code-architecture) + * [Benefits of Proposed Changes](#benefits-of-proposed-changes) + * [Next Steps](#next-steps) + * [POC](#poc) + * [Server API Tests:](#server-api-tests-1) + * [SDK Compilation Tests:](#sdk-compilation-tests) + + +## Summary +To boost the release efficiency of the Kubeflow Pipelines project and increase confidence in our Pull Requests (PRs), we must prioritize tests that verify functional changes at the service/component level. We aim to enhance test coverage transparency, broaden coverage beyond basic positive scenarios, and implement multi-tiered testing throughout the Software Development Life Cycle (SDLC). We also need user-friendly test reports for quick coverage assessment and simplified debugging (eliminating the need to sift through log files). Our tests should be structured and grouped logically for easy understanding. + +This proposal outlines changes to our testing approach, emphasizing improved test efficacy, reduced testing time, enhanced confidence, and more targeted functional testing. It also explores leveraging existing data for real-world end-to-end testing scenarios and developing a scalable testing framework for broader use and integration. +## Goals +1. Comprehensive test coverage for Kubeflow Pipelines APIs. +2. Main list of pipeline files - A repository of diverse valid and invalid pipeline files (organized in a single dedicated directory) sourced from existing tests, customer contributions, user scenarios. +3. A standardized test framework for all contributors: Ginkgo + Gomega for all tests except SDK Unit/Component tests, which will use Pytest and front end Javascript tests. +4. Refactoring of existing v2 tests, to be conducted in phases with specific goals for each phase. +5. Cleanup and reorganization of test code to eliminate redundancy. +6. Documentation improvements: + 1. A new Test Process strategy document will be added to the CONTRIBUTING guide. + 2. Test code documentation with examples of creating new test cases. + 3. All Tests should be environment agnostic, i.e. these tests should be able to run in any type of cluster Kind, Minikube, Cloud and on any namespace + 4. Test code guidelines will be added (such as Ginkgo description, logging etc.) +7. Current Linting styles will be kept intact and Python SDK tests will be run through pylint to make sure it passes +8. Test Coverage reporting will be produced as part of either a new or existing workflow +9. Max CI workflow time will be kept to 1 hr, any workflow taking > 1 hr will be split into multiple workflows + +## Non-Goals +1. We will not initially cover components that users can not directly interact with . For e.g. “Launcher/Executorâ€, "Driver", we will still add indirect coverage for this pipeline service/component but until we have a direct way to interact with this, direct functional coverage will be out of scope. +2. Stable third-party dependencies, such as Argo Workflows, will not be included in this initiative. +3. Front End Tests will not be refactored +4. [Semi Exploratory E2E tests](#semi-exploratory-tests) will not be considered in this effort +5. AI-generated pipeline specs (using Gemini, ChatGPT, or Cursor) will not be included as part of this + +# Proposal +This section describes the proposed changes to our current testing processes and supporting test architecture. These changes aim to improve the quality, reliability, and speed of our testing, leading to better product outcomes. + +## Current Testing Process +Our current testing process includes: +* **Unit Testing**: Good coverage, as reflected in test coverage reports. +* **Integration Testing**: API tests - Focuses on basic positive scenarios with minimal verification steps. Test code lacks readability and standardization. +* **End-to-End Testing**: Sample Tests and SDK Execution Tests - Covers a few basic pipelines but lacks component state verification, complicating debugging. + +## Proposed Changes +The following table outlines the proposed changes to the testing process. + +| **Area** | **Current Process** | **Proposed Process** | **Justification** | +|-----------|----------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------| +| Automation | Limited functional coverage, indirect testing, inconsistent code, unreadable/unmaintainable code | Increase functional coverage, standardize framework, readable/maintainable code, negative test coverage, organize documentation | Improved testing, higher confidence, easier coverage expansion with a standardized framework, maintainable code | +| Testing Process | Lack of transparency, disorganized tests, no test plan reviews | Test plans/requirements included in design reviews, merge strategy with sign-off | No post-merge surprises, testable code, quality-first approach | | | | +| Feedback Loop | Hard to debug test failures, no test reports and failures do not correspond to any severity | Better logging & code doc, improved reporting for easier debugging | Easier to debug failures, easily digestible reporting by devs and non-developers as well | | | | + +## Testing Strategy +Before we get into the test strategy, let's revisit the project architecture as described [here](https://github.com/kubeflow/pipelines/blob/master/docs/sdk/Architecture.md). If we have to describe the workflow in short, it would that, user interact with KFP via APIs, data is persisted in DB, and based on the endpoint, we invoke different components, like Argo Workflows which schedules pods, that runs a specific action, persists data in DB + +### Server API Tests +Validation output of the service/components that a specific endpoint interacts with provides direct comprehensive functional coverage. To visualize this, please see below: +![Overall Test Strategy.png](Overall%20Test%20Strategy.png) + +And to further explain it, lets take an example of PipelineUploadAPI, the workflow for this API is as follows: +![KFP PipelineUpload API Workflow.png](KFP%20PipelineUpload%20API%20Workflow.png) + +And the proposed testing strategy for this would be: +![PipelineUpload API Test Strategy.png](PipelineUpload%20API%20Test%20Strategy.png) + +### SDK Tests +Lightweight SDK tests focused on compiled output validation, with opportunities to leverage Pytest parameterized tests and other techniques for increased coverage. In order to achieve this, we will have define DSL components as independent objects that can be referenced in the tests for compilation. And with this approach, we can achieve multiple different type of SDK test coverage: +#### Targeted Tests +##### Compilation Tests +![SDK testing - Targeted.png](SDK%20testing%20-%20Targeted.png) + + +Using Pytest’s parameterized feature, we can define components as parameters to a compile function, whose output is then validated against a known yaml file specified by an expected parameter. + +**Pros:** +* Code reusage - for components +* Targeted testing +* Easy expansion of tests +* No need to spin up a KFP server + +**Cons:** +* As a developer, we need to come up with combinations of components making up a pipeline +* Expected valid YAML files needs to be generated +* However we already have a lot of DSL Components defined that are used in different tests. And we can leverage existing passing tests to generate valid YAML for us as well. + +##### API Tests + +~~**Option 1:** + +![SDK API Option 1.png](SDK%20API%20Option%201.png) + +Add a proxy to capture all Http calls to the API server, and validate if the request is valid or not. This way, we won’t need a KFP service running in a cluster, and we can verify SDK tests in a standalone environment~~ + + +**Option 2:** + +![SDK API Option 2.png](SDK%20API%20Option%202.png) + +Do not add a proxy but instead let the API server validate the request and we just validate the response from the API service to confirm if SDK made the right request or not. + +**NOTE:** The suggestion here is choose only 1 option, and I personally align towards **Option 2** as it has less overhead from implementation and maintenance pov and this also means that we won’t have to recreate the validation logic in the test code. + + +#### Semi Exploratory Tests +Leveraging existing independent components, and using graphical representation of a make up of a pipeline, we can use tools like Graphwalker to perform semi exploratory tests as shown below: + +![SDK testing - Semi Exploratory.png](SDK%20testing%20-%20Semi%20Exploratory.png) + +**Pros:** +* Provides some auto exploratory testing +* Semi Self Managed - Since there is no expected outcome, and the validation is against KFP API, the tests can be as random as possible +* Increases our test coverage beyond known use cases + +**Cons:** +* Developers will have to create connection points between different components +* Time and cluster resource utilization may be high here, so we will need to limit the number of paths to traverse and time these tests will take + +**NOTE**: Sometime components make assumptions about environments. A common example would be expecting environment variables to be present, and those are (today) defined at @pipeline decorator scope. Nested pipelines may be able to help here. + +#### Full Exploratory Tests +Using an AI tool, we can compile a pipeline and validate it against the Pipeline Upload API (so basically, leverage Semi Exploratory tests but use an AI tool to generate a YAML): +![SDK testing - Full Exploratory.png](SDK%20testing%20-%20Full%20Exploratory.png) + +**Pros**: +* Fully self managed tests +* Provides much broader coverage of edge/non edge cases that we may not have covered in targeted/semi targeted tests + +**Cons** +* We may have to build a custom SDG - out of the box models may not work for us +* Potentially duplicate coverage +* Fail on edge cases that are of trivial priority + +### End to End Tests +#### Critical Regression Testing +Using a subset of the main list of pipeline files (already used for API tests and SDK tests), we can run full end to end tests that will confirm the integration of different components that make up KFP. +![End2End-DuringPR.png](End2End-DuringPR.png) + +#### Full Regression Testing +Develop a simple application to run pipeline files from a directory (local or remote), run them in parallel, and verify successful execution. +![End2End-Full Regression.png](End2End-Full%20Regression.png) + +**Pros**: +* An ad hoc way for us to load the system +* Full end to end testing +* Lot of indirect testing of components without really worrying about the internals +* Easy to expand - all it will take is to generate a new pipeline file +* Flexible testing app for other products that integrates with KFP +* Easy integration with AI tools to perform some exploratory testing + +**Cons**: +* Failed pipelines may get hard to debug +* Resource utilization will be high +* Some pipelines may not be possible due to resource constraints +* Long running tests (even after parallelization) +* Parallel tests can cause storage to fill up on the GitHub workers, since we clean up only after the pipeline run finishes. So we may need to explore running tests in parallel on separate GitHub workers/workflows. + +## Test Architecture Changes +* Implement all tests in Go using Ginkgo + Gomega, providing BDD-style, readable, and organized test scripts with multi-format output and parallel execution. +* Use Pytest for SDK tests +* Sample tests will be renamed to End to End Tests and labeled with “Critical†label. These tests will be a subset of full regression tests covering critical use cases, and will run with every PR to confirm regression in integration. These tests will also be converted to Ginkgo tests. +* Full Regression (Lightweight End-to-end) Tests will run on a schedule May be start with every merge to master and then change frequency as we gain confidence and as the list of tests grow, to daily, weekly or per release. Go.mod file will be added to these tests to allow reuse of this code at other places. +* Front End Tests will stay the same + +### Test Code Architecture + +Utilize Ginkgo for API tests, aligning with the primarily Go codebase. And a test should follow the following pattern: +![Test Code Architecture.png](Test%20Code%20Architecture.png) + +## Benefits of Proposed Changes + +The proposed changes will result in the following benefits: + +* Increased testing efficiency and speed. +* Improved test coverage and quality. +* Reduced risk of release failures. +* Enhanced team collaboration and communication. +* Tests become self-documenting if we use Ginkgo + +## Next Steps +We recommend the following next steps: + +* Conduct a detailed analysis of the proposed changes. +* Develop an implementation plan with timelines and resources. +* Begin pilot testing the new processes and architecture. + +## POC +#### Server API Tests: +A POC of new test architecture using Ginkgo + Gomega and testing PipelineUpload API tests is available [here](https://github.com/kubeflow/pipelines/pull/11956) + +### SDK Compilation Tests: +[WIP] A simple POC for SDK compilation tests that uses dsl components as pytest parameters, [here](https://github.com/kubeflow/pipelines/pull/11983) + +## Miscellaneous +Going forward, we can start to measure the success of this effort by measuring following metrics: +1. Measuring number of NEW issues reported after every release is a great measure of success/confidence +2. Git workflow history - but we can measure trends of post merge failures +3. Number of new automated tests per release diff --git a/proposals/tests-refactor/SDK API Option 1.png b/proposals/tests-refactor/SDK API Option 1.png new file mode 100644 index 00000000000..2078078fa75 Binary files /dev/null and b/proposals/tests-refactor/SDK API Option 1.png differ diff --git a/proposals/tests-refactor/SDK API Option 2.png b/proposals/tests-refactor/SDK API Option 2.png new file mode 100644 index 00000000000..36b9cd1e8d2 Binary files /dev/null and b/proposals/tests-refactor/SDK API Option 2.png differ diff --git a/proposals/tests-refactor/SDK testing - Full Exploratory.png b/proposals/tests-refactor/SDK testing - Full Exploratory.png new file mode 100644 index 00000000000..a48e008487a Binary files /dev/null and b/proposals/tests-refactor/SDK testing - Full Exploratory.png differ diff --git a/proposals/tests-refactor/SDK testing - Semi Exploratory.png b/proposals/tests-refactor/SDK testing - Semi Exploratory.png new file mode 100644 index 00000000000..2af062a7fc4 Binary files /dev/null and b/proposals/tests-refactor/SDK testing - Semi Exploratory.png differ diff --git a/proposals/tests-refactor/SDK testing - Targeted.png b/proposals/tests-refactor/SDK testing - Targeted.png new file mode 100644 index 00000000000..dc95c8bb70d Binary files /dev/null and b/proposals/tests-refactor/SDK testing - Targeted.png differ diff --git a/proposals/tests-refactor/Test Code Architecture.png b/proposals/tests-refactor/Test Code Architecture.png new file mode 100644 index 00000000000..e267b8dcb28 Binary files /dev/null and b/proposals/tests-refactor/Test Code Architecture.png differ diff --git a/samples/README.md b/samples/README.md index cbee13e4847..d714435d7a6 100644 --- a/samples/README.md +++ b/samples/README.md @@ -12,34 +12,6 @@ The core samples will also include intermediate samples that are more complex than basic samples such as flip coins but simpler than TFX samples. It serves to demonstrate a set of the outstanding features and offers users the next level KFP experience. -**Contrib samples** are not tested by KFP and could potentially be moved to -the core samples if the samples are of good quality and tests are covered and it demonstrates certain KFP functionality. -Another reason to put some samples in this directory is that some samples require certain -platform support that is hard to support in our test infra. - -In the Core directory, each sample will be in a separate directory. -In the Contrib directory, there is an intermediate directory for each contributor, -e.g. ibm and arena, within which each sample is in a separate directory. -An example of the resulting structure is as follows: -``` -pipelines/samples/ -Core/ - dsl_static_type_checking/ - dsl_static_type_checking.ipynb - xgboost_training_cm/ - xgboost_training_cm.py - condition/ - condition.py - recursion/ - recursion.py -Contrib/ - IBM/ - ffdl-seldon/ - ffdl_pipeline.ipynb - ffdl_pipeline.py - README.md -``` - # Run Samples ## Compile the pipeline specification diff --git a/samples/contrib/arena-samples/README.md b/samples/contrib/arena-samples/README.md deleted file mode 100644 index ef49ee6c884..00000000000 --- a/samples/contrib/arena-samples/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# Arena demo - -There are a series of examples about how to build deeplearning models with [Arena](https://github.com/kubeflow/arena). These demos will show how to run a pipeline standalone Job, MPI Job, TFJob(PS mode) and TensorFlow Estimator Job. - -## Setup - -1. Install the arena - -``` -kubectl create -f https://raw.githubusercontent.com/kubeflow/pipelines/eb830cd73ca148e5a1a6485a9374c2dc068314bc/samples/arena-samples/arena.yaml -``` - -2. Add addtional RBAC role to service account `pipeline-runner` - -``` -kubectl create -f https://raw.githubusercontent.com/kubeflow/pipelines/eb830cd73ca148e5a1a6485a9374c2dc068314bc/samples/arena-samples/arena_launcher_rbac.yaml -``` - -## Demos - -- [Standalone Job](standalonejob/README.md) -- [MPI Job](mpi/README.md) -- [TensorFlow Estimator Job]() -- [TFJob]() - diff --git a/samples/contrib/arena-samples/arena.yaml b/samples/contrib/arena-samples/arena.yaml deleted file mode 100644 index 5beecfafff8..00000000000 --- a/samples/contrib/arena-samples/arena.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: v1 -kind: Pod -metadata: - name: arena-installer - namespace: kube-system -spec: - restartPolicy: Never - serviceAccountName: admin - hostNetwork: true - containers: - - name: arena - image: registry.cn-beijing.aliyuncs.com/acs/arena:0.2.0-f6b6188 \ No newline at end of file diff --git a/samples/contrib/arena-samples/arena_launcher_rbac.yaml b/samples/contrib/arena-samples/arena_launcher_rbac.yaml deleted file mode 100644 index e5e5aaf3cdc..00000000000 --- a/samples/contrib/arena-samples/arena_launcher_rbac.yaml +++ /dev/null @@ -1,34 +0,0 @@ ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - name: arena - namespace: kubeflow -rules: -- apiGroups: - - "" - resources: - - configmaps - verbs: - - '*' -- apiGroups: - - "" - resources: - - services/proxy - verbs: - - get - ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: pipeline-runner-arena-role - namespace: kubeflow -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: arena -subjects: -- kind: ServiceAccount - name: pipeline-runner - namespace: kubeflow \ No newline at end of file diff --git a/samples/contrib/arena-samples/mpi/README.md b/samples/contrib/arena-samples/mpi/README.md deleted file mode 100644 index 1d2c61d077d..00000000000 --- a/samples/contrib/arena-samples/mpi/README.md +++ /dev/null @@ -1,95 +0,0 @@ -# Run a MPI Job - -The `mpirun.py` sample creates a pipeline runs allreduce-style distributed training. - -## Requirements - -- [Install arena](https://github.com/kubeflow/arena/blob/master/docs/installation/README.md) - -- This sample requires to create distributed storage. In this sample, we use NFS as example. - -1.You need to create `/data` in the NFS Server - -``` -# mkdir -p /nfs -# mount -t nfs -o vers=4.0 NFS_SERVER_IP:/ /nfs -# mkdir -p /data -# cd / -# umount /nfs -``` - -2\.Create Persistent Volume. Moidfy `NFS_SERVER_IP` to yours. - -``` -# cat nfs-pv.yaml -apiVersion: v1 -kind: PersistentVolume -metadata: - name: user-susan - labels: - user-susan: pipelines -spec: - persistentVolumeReclaimPolicy: Retain - capacity: - storage: 10Gi - accessModes: - - ReadWriteMany - nfs: - server: NFS_SERVER_IP - path: "/data" - - # kubectl create -f nfs-pv.yaml -``` - -3\.Create Persistent Volume Claim. - -``` -# cat nfs-pvc.yaml -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: user-susan - annotations: - description: "this is the mnist demo" - owner: Tom -spec: - accessModes: - - ReadWriteMany - resources: - requests: - storage: 5Gi - selector: - matchLabels: - user-susan: pipelines -# kubectl create -f nfs-pvc.yaml -``` - -> Notice: suggest to add `description` and `owner` - -## Instructions - -### 1.With command line to compile the python code to p - -First, install the necessary Python Packages -```shell -pip3 install http://kubeflow.oss-cn-beijing.aliyuncs.com/kfp/0.1.16/kfp.tar.gz --upgrade -pip3 install http://kubeflow.oss-cn-beijing.aliyuncs.com/kfp-arena/kfp-arena-0.6.tar.gz --upgrade -``` - -Then run [mpi_run.py](mpi_run.py) with different parameters. - -``` -dsl-compile --py mpi_run.py --output mpi_run.py.tar.gz -``` - -Then, submit `[mpi_run.py.tar.gz](mpi_run.py.tar.gz)` to the kubeflow pipeline UI. - -![](choose_pipelines.jpg) - -You can use the mpirun pipeline definition to submit run, and choose the different parameters. - -![](submit_run.jpg) - -### 2.Check the result of the MPI Run pipeline - -![](demo.jpg) diff --git a/samples/contrib/arena-samples/mpi/choose_pipelines.jpg b/samples/contrib/arena-samples/mpi/choose_pipelines.jpg deleted file mode 100644 index 0efdb8c2dba..00000000000 Binary files a/samples/contrib/arena-samples/mpi/choose_pipelines.jpg and /dev/null differ diff --git a/samples/contrib/arena-samples/mpi/demo.jpg b/samples/contrib/arena-samples/mpi/demo.jpg deleted file mode 100644 index a237daefe38..00000000000 Binary files a/samples/contrib/arena-samples/mpi/demo.jpg and /dev/null differ diff --git a/samples/contrib/arena-samples/mpi/mpi_run.py b/samples/contrib/arena-samples/mpi/mpi_run.py deleted file mode 100644 index 946245f5d33..00000000000 --- a/samples/contrib/arena-samples/mpi/mpi_run.py +++ /dev/null @@ -1,48 +0,0 @@ -import kfp -import arena -import kfp.dsl as dsl -import argparse - -FLAGS = None - -@dsl.pipeline( - name='pipeline to run mpi job', - description='shows how to run mpi job.' -) -def mpirun_pipeline(image="uber/horovod:0.13.11-tf1.10.0-torch0.4.0-py3.5", - batch_size="64", - optimizer='momentum', - sync_source='https://github.com/tensorflow/benchmarks.git', - git_sync_branch='cnn_tf_v1.9_compatible', - data='user-susan:/training', - gpus=1, - workers=1, - cpu_limit='2', - metric='images/sec', - memory_limit='10Gi'): - """A pipeline for end to end machine learning workflow.""" - - env = ['NCCL_DEBUG=INFO','GIT_SYNC_BRANCH={0}'.format(git_sync_branch)] - - train=arena.mpi_job_op( - name="all-reduce", - image=image, - env=env, - data=[data], - workers=workers, - sync_source=sync_source, - gpus=gpus, - cpu_limit=cpu_limit, - memory_limit=memory_limit, - metrics=[metric], - command=""" - mpirun python code/benchmarks/scripts/tf_cnn_benchmarks/tf_cnn_benchmarks.py --model resnet101 \ - --batch_size {0} --variable_update horovod --optimizer {1}\ - --summary_verbosity=3 --save_summaries_steps=10 - """.format(batch_size, optimizer) - ) - - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(mpirun_pipeline, __file__ + '.tar.gz') diff --git a/samples/contrib/arena-samples/mpi/submit_run.jpg b/samples/contrib/arena-samples/mpi/submit_run.jpg deleted file mode 100644 index 092c62d4320..00000000000 Binary files a/samples/contrib/arena-samples/mpi/submit_run.jpg and /dev/null differ diff --git a/samples/contrib/arena-samples/mpi/upload_pipelines.jpg b/samples/contrib/arena-samples/mpi/upload_pipelines.jpg deleted file mode 100644 index 3eebad8b18e..00000000000 Binary files a/samples/contrib/arena-samples/mpi/upload_pipelines.jpg and /dev/null differ diff --git a/samples/contrib/arena-samples/standalonejob/README.md b/samples/contrib/arena-samples/standalonejob/README.md deleted file mode 100644 index 63583b026b9..00000000000 --- a/samples/contrib/arena-samples/standalonejob/README.md +++ /dev/null @@ -1,98 +0,0 @@ -# Run a Standalone Job - -The `standalone_pipeline.py` sample creates a pipeline runs preparing dataset, ML code, training and exporting the model. - -## Requirements - -- [Install arena](https://github.com/kubeflow/arena/blob/master/docs/installation/README.md) - -- This sample requires to create distributed storage. In this sample, we use NFS as example. - -1.You need to create `/data` in the NFS Server - -``` -# mkdir -p /nfs -# mount -t nfs -o vers=4.0 NFS_SERVER_IP:/ /nfs -# mkdir -p /data -# cd / -# umount /nfs -``` - -2\.Create Persistent Volume. Moidfy `NFS_SERVER_IP` to yours. - -``` -# cat nfs-pv.yaml -apiVersion: v1 -kind: PersistentVolume -metadata: - name: user-susan - labels: - user-susan: pipelines -spec: - persistentVolumeReclaimPolicy: Retain - capacity: - storage: 10Gi - accessModes: - - ReadWriteMany - nfs: - server: NFS_SERVER_IP - path: "/data" - - # kubectl create -f nfs-pv.yaml -``` - -3\.Create Persistent Volume Claim. - -``` -# cat nfs-pvc.yaml -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: user-susan - annotations: - description: "this is the mnist demo" - owner: Tom -spec: - accessModes: - - ReadWriteMany - resources: - requests: - storage: 5Gi - selector: - matchLabels: - user-susan: pipelines -# kubectl create -f nfs-pvc.yaml -``` - -> Notice: suggest to add `description` and `owner` - -## Instructions - -### 1.With command line - -First, install the necessary Python Packages -```shell -pip3 install http://kubeflow.oss-cn-beijing.aliyuncs.com/kfp/0.1.14/kfp.tar.gz --upgrade -pip3 install http://kubeflow.oss-cn-beijing.aliyuncs.com/kfp-arena/kfp-arena-0.5.tar.gz --upgrade -``` - -Then run [standalone_pipeline.py](standalone_pipeline.py) with different parameters. - -``` -python3 standalone_pipeline.py --learning_rate 0.0001 --dropout 0.8 --model_version 2 -``` - -``` -python3 standalone_pipeline.py --learning_rate 0.0005 --dropout 0.8 --model_version 3 -``` - -### 2.With Jupyter Notebook -Run `jupyter notebook` to start running your jupyter server and load the notebook `standalone_pipeline.ipynb` - - -### 3.Compare the result in pipelines dashboard - - -![](demo.jpg) - - diff --git a/samples/contrib/arena-samples/standalonejob/demo.jpg b/samples/contrib/arena-samples/standalonejob/demo.jpg deleted file mode 100644 index 88e1c3347a0..00000000000 Binary files a/samples/contrib/arena-samples/standalonejob/demo.jpg and /dev/null differ diff --git a/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb b/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb deleted file mode 100644 index 353001f2ecd..00000000000 --- a/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb +++ /dev/null @@ -1,347 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Arena Kubeflow Pipeline Notebook demo\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Prepare data volume\n", - "\n", - "You should prepare data volume `user-susan` by following [docs](https://github.com/kubeflow/arena/blob/master/docs/userguide/4-tfjob-distributed-data.md). \n", - "\n", - "And run `arena data list` to check if it's created." - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "NAME ACCESSMODE DESCRIPTION OWNER AGE\r\n", - "katib-mysql ReadWriteOnce 49d\r\n", - "minio-pv-claim ReadWriteOnce 49d\r\n", - "mysql-pv-claim ReadWriteOnce 49d\r\n", - "user-susan ReadWriteMany 49d\r\n" - ] - } - ], - "source": [ - "! arena data list" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Define the necessary environment variables and install the KubeFlow Pipeline SDK\n", - "We assume this notebook kernel has access to Python's site-packages and is in Python3.\n", - "\n", - "**Please fill in the below environment variables with you own settings.**\n", - "\n", - "- **KFP_PACKAGE**: The latest release of kubeflow pipeline platform library.\n", - "- **KUBEFLOW_PIPELINE_LINK**: The link to access the KubeFlow pipeline API.\n", - "- **MOUNT**: The mount configuration to map data above into the training job. The format is 'data:/directory'\n", - "- **GPUs**: The number of the GPUs for training.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "KFP_SERVICE=\"ml-pipeline.kubeflow.svc.cluster.local:8888\"\n", - "KFP_PACKAGE = 'http://kubeflow.oss-cn-beijing.aliyuncs.com/kfp/0.1.14/kfp.tar.gz'\n", - "KFP_ARENA_PACKAGE = 'http://kubeflow.oss-cn-beijing.aliyuncs.com/kfp-arena/kfp-arena-0.3.tar.gz'\n", - "KUBEFLOW_PIPELINE_LINK = ''\n", - "MOUNT=\"['user-susan:/training']\"\n", - "GPUs=1" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Install the necessary python packages\n", - "\n", - "Note: Please change pip3 to the package manager that's used for this Notebook Kernel." - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Collecting http://kubeflow.oss-cn-beijing.aliyuncs.com/kfp/v0.4.0/kfp.tar.gz\n", - "\u001b[?25l Downloading http://kubeflow.oss-cn-beijing.aliyuncs.com/kfp/v0.4.0/kfp.tar.gz (133kB)\n", - "\u001b[K 100% |████████████████████████████████| 143kB 2.0MB/s ta 0:00:01\n", - "\u001b[?25hRequirement already satisfied, skipping upgrade: urllib3>=1.15 in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (1.22)\n", - "Requirement already satisfied, skipping upgrade: six>=1.10 in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (1.11.0)\n", - "Requirement already satisfied, skipping upgrade: certifi in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (2018.11.29)\n", - "Requirement already satisfied, skipping upgrade: python-dateutil in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (2.7.5)\n", - "Requirement already satisfied, skipping upgrade: PyYAML in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (3.13)\n", - "Requirement already satisfied, skipping upgrade: google-cloud-storage==1.13.0 in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (1.13.0)\n", - "Requirement already satisfied, skipping upgrade: kubernetes==8.0.0 in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (8.0.0)\n", - "Requirement already satisfied, skipping upgrade: PyJWT==1.6.4 in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (1.6.4)\n", - "Requirement already satisfied, skipping upgrade: cryptography==2.4.2 in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (2.4.2)\n", - "Requirement already satisfied, skipping upgrade: google-auth==1.6.1 in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (1.6.1)\n", - "Requirement already satisfied, skipping upgrade: requests_toolbelt==0.8.0 in /opt/conda/lib/python3.6/site-packages (from kfp==0.1) (0.8.0)\n", - "Requirement already satisfied, skipping upgrade: google-cloud-core<0.29dev,>=0.28.0 in /opt/conda/lib/python3.6/site-packages (from google-cloud-storage==1.13.0->kfp==0.1) (0.28.1)\n", - "Requirement already satisfied, skipping upgrade: google-resumable-media>=0.3.1 in /opt/conda/lib/python3.6/site-packages (from google-cloud-storage==1.13.0->kfp==0.1) (0.3.1)\n", - "Requirement already satisfied, skipping upgrade: google-api-core<2.0.0dev,>=0.1.1 in /opt/conda/lib/python3.6/site-packages (from google-cloud-storage==1.13.0->kfp==0.1) (1.6.0)\n", - "Requirement already satisfied, skipping upgrade: adal>=1.0.2 in /opt/conda/lib/python3.6/site-packages (from kubernetes==8.0.0->kfp==0.1) (1.2.1)\n", - "Requirement already satisfied, skipping upgrade: requests-oauthlib in /opt/conda/lib/python3.6/site-packages (from kubernetes==8.0.0->kfp==0.1) (1.0.0)\n", - "Requirement already satisfied, skipping upgrade: websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0 in /opt/conda/lib/python3.6/site-packages (from kubernetes==8.0.0->kfp==0.1) (0.54.0)\n", - "Requirement already satisfied, skipping upgrade: requests in /opt/conda/lib/python3.6/site-packages (from kubernetes==8.0.0->kfp==0.1) (2.18.4)\n", - "Requirement already satisfied, skipping upgrade: setuptools>=21.0.0 in /opt/conda/lib/python3.6/site-packages (from kubernetes==8.0.0->kfp==0.1) (38.4.0)\n", - "Requirement already satisfied, skipping upgrade: cffi!=1.11.3,>=1.7 in /opt/conda/lib/python3.6/site-packages (from cryptography==2.4.2->kfp==0.1) (1.11.4)\n", - "Requirement already satisfied, skipping upgrade: idna>=2.1 in /opt/conda/lib/python3.6/site-packages (from cryptography==2.4.2->kfp==0.1) (2.6)\n", - "Requirement already satisfied, skipping upgrade: asn1crypto>=0.21.0 in /opt/conda/lib/python3.6/site-packages (from cryptography==2.4.2->kfp==0.1) (0.24.0)\n", - "Requirement already satisfied, skipping upgrade: pyasn1-modules>=0.2.1 in /opt/conda/lib/python3.6/site-packages (from google-auth==1.6.1->kfp==0.1) (0.2.2)\n", - "Requirement already satisfied, skipping upgrade: cachetools>=2.0.0 in /opt/conda/lib/python3.6/site-packages (from google-auth==1.6.1->kfp==0.1) (3.0.0)\n", - "Requirement already satisfied, skipping upgrade: rsa>=3.1.4 in /opt/conda/lib/python3.6/site-packages (from google-auth==1.6.1->kfp==0.1) (4.0)\n", - "Requirement already satisfied, skipping upgrade: protobuf>=3.4.0 in /opt/conda/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=0.1.1->google-cloud-storage==1.13.0->kfp==0.1) (3.6.1)\n", - "Requirement already satisfied, skipping upgrade: pytz in /opt/conda/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=0.1.1->google-cloud-storage==1.13.0->kfp==0.1) (2018.7)\n", - "Requirement already satisfied, skipping upgrade: googleapis-common-protos!=1.5.4,<2.0dev,>=1.5.3 in /opt/conda/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=0.1.1->google-cloud-storage==1.13.0->kfp==0.1) (1.5.5)\n", - "Requirement already satisfied, skipping upgrade: oauthlib>=0.6.2 in /opt/conda/lib/python3.6/site-packages (from requests-oauthlib->kubernetes==8.0.0->kfp==0.1) (2.1.0)\n", - "Requirement already satisfied, skipping upgrade: chardet<3.1.0,>=3.0.2 in /opt/conda/lib/python3.6/site-packages (from requests->kubernetes==8.0.0->kfp==0.1) (3.0.4)\n", - "Requirement already satisfied, skipping upgrade: pycparser in /opt/conda/lib/python3.6/site-packages (from cffi!=1.11.3,>=1.7->cryptography==2.4.2->kfp==0.1) (2.18)\n", - "Requirement already satisfied, skipping upgrade: pyasn1<0.5.0,>=0.4.1 in /opt/conda/lib/python3.6/site-packages (from pyasn1-modules>=0.2.1->google-auth==1.6.1->kfp==0.1) (0.4.4)\n", - "Building wheels for collected packages: kfp\n", - " Running setup.py bdist_wheel for kfp ... \u001b[?25ldone\n", - "\u001b[?25h Stored in directory: /tmp/pip-ephem-wheel-cache-1wm5ld15/wheels/f0/e0/47/2f1e28c1a54da10332867d1f9cc25bfb916c0a4b8ea47029db\n", - "Successfully built kfp\n", - "Installing collected packages: kfp\n", - " Found existing installation: kfp 0.1\n", - " Uninstalling kfp-0.1:\n", - " Successfully uninstalled kfp-0.1\n", - "Successfully installed kfp-0.1\n", - "\u001b[33mYou are using pip version 18.1, however version 19.0.3 is available.\n", - "You should consider upgrading via the 'pip install --upgrade pip' command.\u001b[0m\n" - ] - } - ], - "source": [ - "!pip3 install $KFP_PACKAGE --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note: Install arena's python package" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Collecting http://kubeflow.oss-cn-beijing.aliyuncs.com/kip-arena/kfp-arena-0.1.tar.gz\n", - " Downloading http://kubeflow.oss-cn-beijing.aliyuncs.com/kip-arena/kfp-arena-0.1.tar.gz\n", - "Requirement already satisfied, skipping upgrade: kfp>=0.1 in /opt/conda/lib/python3.6/site-packages (from kfp-arena==0.1) (0.1)\n", - "Requirement already satisfied, skipping upgrade: python-dateutil in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (2.7.5)\n", - "Requirement already satisfied, skipping upgrade: urllib3>=1.15 in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (1.22)\n", - "Requirement already satisfied, skipping upgrade: six>=1.10 in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (1.11.0)\n", - "Requirement already satisfied, skipping upgrade: requests-toolbelt==0.8.0 in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (0.8.0)\n", - "Requirement already satisfied, skipping upgrade: google-auth==1.6.1 in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (1.6.1)\n", - "Requirement already satisfied, skipping upgrade: PyYAML in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (3.13)\n", - "Requirement already satisfied, skipping upgrade: kubernetes==8.0.0 in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (8.0.0)\n", - "Requirement already satisfied, skipping upgrade: certifi in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (2018.11.29)\n", - "Requirement already satisfied, skipping upgrade: google-cloud-storage==1.13.0 in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (1.13.0)\n", - "Requirement already satisfied, skipping upgrade: cryptography==2.4.2 in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (2.4.2)\n", - "Requirement already satisfied, skipping upgrade: PyJWT==1.6.4 in /opt/conda/lib/python3.6/site-packages (from kfp>=0.1->kfp-arena==0.1) (1.6.4)\n", - "Requirement already satisfied, skipping upgrade: requests<3.0.0,>=2.0.1 in /opt/conda/lib/python3.6/site-packages (from requests-toolbelt==0.8.0->kfp>=0.1->kfp-arena==0.1) (2.18.4)\n", - "Requirement already satisfied, skipping upgrade: cachetools>=2.0.0 in /opt/conda/lib/python3.6/site-packages (from google-auth==1.6.1->kfp>=0.1->kfp-arena==0.1) (3.0.0)\n", - "Requirement already satisfied, skipping upgrade: rsa>=3.1.4 in /opt/conda/lib/python3.6/site-packages (from google-auth==1.6.1->kfp>=0.1->kfp-arena==0.1) (4.0)\n", - "Requirement already satisfied, skipping upgrade: pyasn1-modules>=0.2.1 in /opt/conda/lib/python3.6/site-packages (from google-auth==1.6.1->kfp>=0.1->kfp-arena==0.1) (0.2.2)\n", - "Requirement already satisfied, skipping upgrade: websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0 in /opt/conda/lib/python3.6/site-packages (from kubernetes==8.0.0->kfp>=0.1->kfp-arena==0.1) (0.54.0)\n", - "Requirement already satisfied, skipping upgrade: requests-oauthlib in /opt/conda/lib/python3.6/site-packages (from kubernetes==8.0.0->kfp>=0.1->kfp-arena==0.1) (1.0.0)\n", - "Requirement already satisfied, skipping upgrade: setuptools>=21.0.0 in /opt/conda/lib/python3.6/site-packages (from kubernetes==8.0.0->kfp>=0.1->kfp-arena==0.1) (38.4.0)\n", - "Requirement already satisfied, skipping upgrade: adal>=1.0.2 in /opt/conda/lib/python3.6/site-packages (from kubernetes==8.0.0->kfp>=0.1->kfp-arena==0.1) (1.2.1)\n", - "Requirement already satisfied, skipping upgrade: google-resumable-media>=0.3.1 in /opt/conda/lib/python3.6/site-packages (from google-cloud-storage==1.13.0->kfp>=0.1->kfp-arena==0.1) (0.3.1)\n", - "Requirement already satisfied, skipping upgrade: google-api-core<2.0.0dev,>=0.1.1 in /opt/conda/lib/python3.6/site-packages (from google-cloud-storage==1.13.0->kfp>=0.1->kfp-arena==0.1) (1.6.0)\n", - "Requirement already satisfied, skipping upgrade: google-cloud-core<0.29dev,>=0.28.0 in /opt/conda/lib/python3.6/site-packages (from google-cloud-storage==1.13.0->kfp>=0.1->kfp-arena==0.1) (0.28.1)\n", - "Requirement already satisfied, skipping upgrade: cffi!=1.11.3,>=1.7 in /opt/conda/lib/python3.6/site-packages (from cryptography==2.4.2->kfp>=0.1->kfp-arena==0.1) (1.11.4)\n", - "Requirement already satisfied, skipping upgrade: asn1crypto>=0.21.0 in /opt/conda/lib/python3.6/site-packages (from cryptography==2.4.2->kfp>=0.1->kfp-arena==0.1) (0.24.0)\n", - "Requirement already satisfied, skipping upgrade: idna>=2.1 in /opt/conda/lib/python3.6/site-packages (from cryptography==2.4.2->kfp>=0.1->kfp-arena==0.1) (2.6)\n", - "Requirement already satisfied, skipping upgrade: chardet<3.1.0,>=3.0.2 in /opt/conda/lib/python3.6/site-packages (from requests<3.0.0,>=2.0.1->requests-toolbelt==0.8.0->kfp>=0.1->kfp-arena==0.1) (3.0.4)\n", - "Requirement already satisfied, skipping upgrade: pyasn1>=0.1.3 in /opt/conda/lib/python3.6/site-packages (from rsa>=3.1.4->google-auth==1.6.1->kfp>=0.1->kfp-arena==0.1) (0.4.4)\n", - "Requirement already satisfied, skipping upgrade: oauthlib>=0.6.2 in /opt/conda/lib/python3.6/site-packages (from requests-oauthlib->kubernetes==8.0.0->kfp>=0.1->kfp-arena==0.1) (2.1.0)\n", - "Requirement already satisfied, skipping upgrade: protobuf>=3.4.0 in /opt/conda/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=0.1.1->google-cloud-storage==1.13.0->kfp>=0.1->kfp-arena==0.1) (3.6.1)\n", - "Requirement already satisfied, skipping upgrade: pytz in /opt/conda/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=0.1.1->google-cloud-storage==1.13.0->kfp>=0.1->kfp-arena==0.1) (2018.7)\n", - "Requirement already satisfied, skipping upgrade: googleapis-common-protos!=1.5.4,<2.0dev,>=1.5.3 in /opt/conda/lib/python3.6/site-packages (from google-api-core<2.0.0dev,>=0.1.1->google-cloud-storage==1.13.0->kfp>=0.1->kfp-arena==0.1) (1.5.5)\n", - "Requirement already satisfied, skipping upgrade: pycparser in /opt/conda/lib/python3.6/site-packages (from cffi!=1.11.3,>=1.7->cryptography==2.4.2->kfp>=0.1->kfp-arena==0.1) (2.18)\n", - "Building wheels for collected packages: kfp-arena\n", - " Running setup.py bdist_wheel for kfp-arena ... \u001b[?25ldone\n", - "\u001b[?25h Stored in directory: /home/jovyan/.cache/pip/wheels/6a/d3/d5/f99c7966cacbcbad2922bf614c88c523c869c16d26e549a087\n", - "Successfully built kfp-arena\n", - "Installing collected packages: kfp-arena\n", - "Successfully installed kfp-arena-0.1\n", - "\u001b[33mYou are using pip version 18.1, however version 19.0.3 is available.\n", - "You should consider upgrading via the 'pip install --upgrade pip' command.\u001b[0m\n" - ] - } - ], - "source": [ - "!pip3 install $KFP_ARENA_PACKAGE --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 2. Define pipeline tasks using the kfp library. " - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "import arena\n", - "import kfp.dsl as dsl\n", - "\n", - "@dsl.pipeline(\n", - " name='pipeline to run jobs',\n", - " description='shows how to run pipeline jobs.'\n", - ")\n", - "def sample_pipeline(learning_rate='0.01',\n", - " dropout='0.9',\n", - " model_version='1'):\n", - " \"\"\"A pipeline for end to end machine learning workflow.\"\"\"\n", - "\n", - " # 1. prepare data\n", - " prepare_data = arena.StandaloneOp(\n", - " name=\"prepare-data\",\n", - "\timage=\"byrnedo/alpine-curl\",\n", - " data=MOUNT,\n", - "\tcommand=\"mkdir -p /training/dataset/mnist && \\\n", - " cd /training/dataset/mnist && \\\n", - " curl -O https://code.aliyun.com/xiaozhou/tensorflow-sample-code/raw/master/data/t10k-images-idx3-ubyte.gz && \\\n", - " curl -O https://code.aliyun.com/xiaozhou/tensorflow-sample-code/raw/master/data/t10k-labels-idx1-ubyte.gz && \\\n", - " curl -O https://code.aliyun.com/xiaozhou/tensorflow-sample-code/raw/master/data/train-images-idx3-ubyte.gz && \\\n", - " curl -O https://code.aliyun.com/xiaozhou/tensorflow-sample-code/raw/master/data/train-labels-idx1-ubyte.gz\")\n", - " # 2. prepare source code\n", - " prepare_code = arena.StandaloneOp(\n", - " name=\"source-code\",\n", - " image=\"alpine/git\",\n", - " data=MOUNT,\n", - " command=\"mkdir -p /training/models/ && \\\n", - " cd /training/models/ && \\\n", - " if [ ! -d /training/models/tensorflow-sample-code ]; then https://github.com/cheyang/tensorflow-sample-code.git; else echo no need download;fi\")\n", - "\n", - " # 3. train the models\n", - " train = arena.StandaloneOp(\n", - " name=\"train\",\n", - " image=\"tensorflow/tensorflow:1.11.0-gpu-py3\",\n", - " gpus=GPUs,\n", - " data=MOUNT,\n", - " command=\"echo %s; \\\n", - " echo %s; \\\n", - " python /training/models/tensorflow-sample-code/tfjob/docker/mnist/main.py --max_steps 500 --data_dir /training/dataset/mnist --log_dir /training/output/mnist\" % (prepare_data.output, prepare_code.output),\n", - " metric_name=\"Train-accuracy\",\n", - " metric_unit=\"PERCENTAGE\",\n", - " )\n", - " # 4. export the model\n", - " export_model = arena.StandaloneOp(\n", - " name=\"export-model\",\n", - " image=\"tensorflow/tensorflow:1.11.0-py3\",\n", - " data=MOUNT,\n", - " command=\"echo %s; \\\n", - " python /training/models/tensorflow-sample-code/tfjob/docker/mnist/export_model.py --model_version=%s --checkpoint_step=400 --checkpoint_path=/training/output/mnist /training/output/models\" % (train.output,model_version))\n" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Run link here" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The above run link is assuming you ran this cell on JupyterHub that is deployed on the same cluster. The actual run link is /#/runs/details/cac8aef4-4aaa-11e9-8264-00163e13f33e\n" - ] - } - ], - "source": [ - "learning_rate = \"0.001\"\n", - "dropout = \"0.8\"\n", - "model_verison = \"1\"\n", - "\n", - "arguments = {\n", - " 'learning_rate': learning_rate,\n", - " 'dropout': dropout,\n", - " 'model_version': model_version,\n", - "}\n", - "\n", - "import kfp\n", - "client = kfp.Client(host=KUBEFLOW_PIPELINE_LINK)\n", - "run = client.create_run_from_pipeline_func(sample_pipeline, arguments=arguments).run_info\n", - "\n", - "print('The above run link is assuming you ran this cell on JupyterHub that is deployed on the same cluster. ' +\n", - " 'The actual run link is ' + KUBEFLOW_PIPELINE_LINK + '/#/runs/details/' + run.id)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.8" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/samples/contrib/arena-samples/standalonejob/standalone_pipeline.py b/samples/contrib/arena-samples/standalonejob/standalone_pipeline.py deleted file mode 100644 index a317dbacd71..00000000000 --- a/samples/contrib/arena-samples/standalonejob/standalone_pipeline.py +++ /dev/null @@ -1,91 +0,0 @@ -import kfp -import arena -import kfp.dsl as dsl -import argparse - -FLAGS = None - -@dsl.pipeline( - name='pipeline to run jobs', - description='shows how to run pipeline jobs.' -) -def sample_pipeline(learning_rate='0.01', - dropout='0.9', - model_version='1', - commit='f097575656f927d86d99dd64931042e1a9003cb2'): - """A pipeline for end to end machine learning workflow.""" - data=["user-susan:/training"] - gpus=1 - - # 1. prepare data - prepare_data = arena.standalone_job_op( - name="prepare-data", - image="byrnedo/alpine-curl", - data=data, - command="mkdir -p /training/dataset/mnist && \ - cd /training/dataset/mnist && \ - curl -O https://code.aliyun.com/xiaozhou/tensorflow-sample-code/raw/master/data/t10k-images-idx3-ubyte.gz && \ - curl -O https://code.aliyun.com/xiaozhou/tensorflow-sample-code/raw/master/data/t10k-labels-idx1-ubyte.gz && \ - curl -O https://code.aliyun.com/xiaozhou/tensorflow-sample-code/raw/master/data/train-images-idx3-ubyte.gz && \ - curl -O https://code.aliyun.com/xiaozhou/tensorflow-sample-code/raw/master/data/train-labels-idx1-ubyte.gz") - - # 2. download source code and train the models - train = arena.standalone_job_op( - name="train", - image="tensorflow/tensorflow:1.11.0-gpu-py3", - sync_source="https://code.aliyun.com/xiaozhou/tensorflow-sample-code.git", - env=["GIT_SYNC_REV=%s" % (commit)], - gpus=gpus, - data=data, - command='''echo prepare_step_name=%s and prepare_wf_name=%s && \ - python code/tensorflow-sample-code/tfjob/docker/mnist/main.py --max_steps 500 \ - --data_dir /training/dataset/mnist \ - --log_dir /training/output/mnist \ - --learning_rate %s --dropout %s''' % ( - prepare_data.outputs['step'], - prepare_data.outputs['workflow'], - learning_rate, - dropout), - metrics=["Train-accuracy:PERCENTAGE"]) - # 3. export the model - export_model = arena.standalone_job_op( - name="export-model", - image="tensorflow/tensorflow:1.11.0-py3", - sync_source="https://code.aliyun.com/xiaozhou/tensorflow-sample-code.git", - env=["GIT_SYNC_REV=%s" % (commit)], - data=data, - command="echo train_step_name=%s and train_wf_name=%s && \ - python code/tensorflow-sample-code/tfjob/docker/mnist/export_model.py \ - --model_version=%s \ - --checkpoint_path=/training/output/mnist \ - /training/output/models" % (train.outputs['step'], train.outputs['workflow'], model_version)) - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--model_version', type=str, - default="1", - help='model version.') - parser.add_argument('--dropout', type=str, default="0.9", - help='Keep probability for training dropout.') - parser.add_argument('--learning_rate', type=str, default="0.001", - help='Initial learning rate.') - parser.add_argument('--commit', type=str, default="f097575656f927d86d99dd64931042e1a9003cb2", - help='commit id.') - FLAGS, unparsed = parser.parse_known_args() - - model_version = FLAGS.model_version - dropout = FLAGS.dropout - learning_rate = FLAGS.learning_rate - commit = FLAGS.commit - - arguments = { - 'learning_rate': learning_rate, - 'dropout': dropout, - 'model_version': model_version, - 'commit': commit, - } - - KFP_SERVICE="ml-pipeline.kubeflow.svc.cluster.local:8888" - client = kfp.Client(host=KFP_SERVICE) - - client.create_run_from_pipeline_func(sample_pipeline, arguments=arguments) diff --git a/samples/contrib/aws-samples/README.md b/samples/contrib/aws-samples/README.md deleted file mode 100644 index 866999b74e6..00000000000 --- a/samples/contrib/aws-samples/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# Sample AWS SageMaker Kubeflow Pipelines - -This folder contains many example pipelines which use [AWS SageMaker Components for KFP](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker). The following sections explain the setup needed to run these pipelines. Once you are done with the setup, [simple_train_pipeline](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/simple_train_pipeline) is a good place to start if you have never used these components before. - - - -## Prerequisites - - You need a cluster with Kubeflow Pipelines installed and permissions configured. Kubeflow Pipelines offers two installation options. Select the option that applies to your use case: - -
            Full Kubeflow on AWS Deployment -

            - -1. To use other Kubeflow components in addition to Kubeflow Pipelines, install the [AWS Distribution of Kubeflow.](https://awslabs.github.io/kubeflow-manifests/docs/deployment/). - -2. Configure permissions to access SageMaker services by following the guide on [Kubeflow on AWS documentation](https://awslabs.github.io/kubeflow-manifests/docs/amazon-sagemaker-integration/sagemaker-components-for-kubeflow-pipelines/) -

            -
            - -
            Standalone Kubeflow Pipelines Deployment -

            - -1. Install Kubeflow Pipelines standalone by following the documentation on [SageMaker developer guide](https://docs.aws.amazon.com/sagemaker/latest/dg/setup.html#kubeflow-pipelines-standalone). - -2. Configure permissions to access SageMaker services by following the guide on [SageMaker developer guide](https://docs.aws.amazon.com/sagemaker/latest/dg/setup.html#configure-permissions-for-pipeline) -

            -
            - - -## Inputs to the pipeline - -### SageMaker execution role -**Note:** Ignore this section if you plan to run [titanic-survival-prediction](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/titanic-survival-prediction) example - -This role is used by SageMaker jobs created by the KFP to access the S3 buckets and other AWS resources. -Run these commands to create the sagemaker-execution-role. -Note down the Role ARN. You need to give this Role ARN as input in pipeline. - -``` -TRUST="{ \"Version\": \"2012-10-17\", \"Statement\": [ { \"Effect\": \"Allow\", \"Principal\": { \"Service\": \"sagemaker.amazonaws.com\" }, \"Action\": \"sts:AssumeRole\" } ] }" -aws iam create-role --role-name kfp-example-sagemaker-execution-role --assume-role-policy-document "$TRUST" -aws iam attach-role-policy --role-name kfp-example-sagemaker-execution-role --policy-arn arn:aws:iam::aws:policy/AmazonSageMakerFullAccess -aws iam attach-role-policy --role-name kfp-example-sagemaker-execution-role --policy-arn arn:aws:iam::aws:policy/AmazonS3FullAccess -aws iam get-role --role-name kfp-example-sagemaker-execution-role --output text --query 'Role.Arn' - -# note down the Role ARN or export to env variable. -export SAGEMAKER_EXECUTION_ROLE_ARN=$(aws iam get-role --role-name kfp-example-sagemaker-execution-role --output text --query 'Role.Arn') -echo $SAGEMAKER_EXECUTION_ROLE_ARN -``` \ No newline at end of file diff --git a/samples/contrib/aws-samples/ground_truth_pipeline_demo/README.md b/samples/contrib/aws-samples/ground_truth_pipeline_demo/README.md deleted file mode 100644 index 6e2be753a18..00000000000 --- a/samples/contrib/aws-samples/ground_truth_pipeline_demo/README.md +++ /dev/null @@ -1,75 +0,0 @@ -The `mini-image-classification-pipeline.py` sample runs a pipeline to demonstrate usage for the create workteam, Ground Truth, and train components. - -This sample is based on [this example](https://github.com/awslabs/amazon-sagemaker-examples/blob/master/ground_truth_labeling_jobs/from_unlabeled_data_to_deployed_machine_learning_model_ground_truth_demo_image_classification/from_unlabeled_data_to_deployed_machine_learning_model_ground_truth_demo_image_classification.ipynb). - -The sample goes through the workflow of creating a private workteam, creating data labeling jobs for that team, and running a training job using the new labeled data. - -## Prerequisites - -Make sure you have the setup explained in this [README.md](https://github.com/kubeflow/pipelines/blob/master/samples/contrib/aws-samples/README.md) -(This pipeline does not use mnist dataset. Follow the instruction bellow to get sample dataset) - -## Prep the dataset, label categories, and UI template - -For this demo, you will be using a very small subset of the [Google Open Images dataset](https://storage.googleapis.com/openimages/web/index.html). - -Run the following to download `openimgs-annotations.csv`: -```bash -wget --no-verbose https://storage.googleapis.com/openimages/2018_04/test/test-annotations-human-imagelabels-boxable.csv -O openimgs-annotations.csv -``` -Create a s3 bucket and run [this python script](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/ground_truth_pipeline_demo/prep_inputs.py) to get the images and generate `train.manifest`, `validation.manifest`, `class_labels.json`, and `instuctions.template`. - - -## Amazon Cognito user groups - -From Cognito note down Pool ID, User Group Name and client ID -You need this information to fill arguments user_pool, user_groups and client_ID - -[Official doc for Amazon Cognito](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-getting-started.html) - -For this demo you can create a new user pool (if you don't have one already). - -AWS console -> Amazon SageMaker -> Ground Truth, Labeling workforces -> Private -> Create Private Team -> Give it "KFP-ground-truth-demo-pool" name and use your email address -> Create Private team -> Click on the radio button and from summary note down the "Amazon Cognito user pool", "App client" and "Labeling portal sign-in URL" -> click on the team name that you created and note down "Amazon Cognito user group" - -Use the info that you noted down to fill arguments for the pipeline -user_pool = Amazon Cognito user pool -user_groups = Amazon Cognito user group -client_ID = App client - -> Note : Once you start a run on the pipeline you will receive the ground_truth labeling jobs at "Labeling portal sign-in URL" link - - -## Compiling the pipeline template - -Follow the guide to [building a pipeline](https://www.kubeflow.org/docs/guides/pipelines/build-pipeline/) to install the Kubeflow Pipelines SDK, then run the following command to compile the sample Python into a workflow specification. The specification takes the form of a YAML file compressed into a `.tar.gz` file. - - -```bash -dsl-compile --py mini-image-classification-pipeline.py --output mini-image-classification-pipeline.tar.gz -``` - -## Deploying the pipeline - -Open the Kubeflow pipelines UI. Create a new pipeline, and then upload the compiled specification (`.tar.gz` file) as a new pipeline template. - -The pipeline requires several arguments - replace `role_arn`, Amazon Cognito information, and the S3 input paths with your settings, and run the pipeline. - -> Note : team_name, ground_truth_train_job_name and ground_truth_validation_job_name need to be unique or else pipeline will error out if the names already exist - -If you are a new worker, you will receive an email with a link to the labeling portal and login information after the create workteam component completes. -During the execution of the two Ground Truth components (one for training data, one for validation data), the labeling jobs will appear in the portal and you will need to complete these jobs. - -After the pipeline finished, you may delete the user pool/ user group and the S3 bucket. - - - -## Components source - -Create Workteam: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/workteam/src) - -Ground Truth Labeling: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/ground_truth/src) - -Training: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/train/src) diff --git a/samples/contrib/aws-samples/ground_truth_pipeline_demo/mini-image-classification-pipeline.py b/samples/contrib/aws-samples/ground_truth_pipeline_demo/mini-image-classification-pipeline.py deleted file mode 100644 index ab3685af51e..00000000000 --- a/samples/contrib/aws-samples/ground_truth_pipeline_demo/mini-image-classification-pipeline.py +++ /dev/null @@ -1,156 +0,0 @@ -#!/usr/bin/env python3 - -import kfp -import json -import copy -from kfp import components -from kfp import dsl -from kfp.aws import use_aws_secret - -sagemaker_workteam_op = components.load_component_from_file( - "../../../../components/aws/sagemaker/workteam/component.yaml" -) -sagemaker_gt_op = components.load_component_from_file( - "../../../../components/aws/sagemaker/ground_truth/component.yaml" -) -sagemaker_train_op = components.load_component_from_file( - "../../../../components/aws/sagemaker/train/component.yaml" -) - -channelObjList = [] - -channelObj = { - "ChannelName": "", - "DataSource": { - "S3DataSource": { - "S3Uri": "", - "S3DataType": "AugmentedManifestFile", - "S3DataDistributionType": "FullyReplicated", - "AttributeNames": ["source-ref", "category"], - } - }, - "ContentType": "application/x-recordio", - "CompressionType": "None", - "RecordWrapperType": "RecordIO", -} - - -@dsl.pipeline( - name="Ground Truth image classification test pipeline", - description="SageMaker Ground Truth job test", -) -def ground_truth_test( - region="us-west-2", - team_name="ground-truth-demo-team", - team_description="Team for mini image classification labeling job", - user_pool="", - user_groups="", - client_id="", - ground_truth_train_job_name="mini-image-classification-demo-train", - ground_truth_validation_job_name="mini-image-classification-demo-validation", - ground_truth_label_attribute_name="category", - ground_truth_train_manifest_location="s3://your-bucket-name/mini-image-classification/ground-truth-demo/train.manifest", - ground_truth_validation_manifest_location="s3://your-bucket-name/mini-image-classification/ground-truth-demo/validation.manifest", - ground_truth_output_location="s3://your-bucket-name/mini-image-classification/ground-truth-demo/output", - ground_truth_task_type="image classification", - ground_truth_worker_type="private", - ground_truth_label_category_config="s3://your-bucket-name/mini-image-classification/ground-truth-demo/class_labels.json", - ground_truth_ui_template="s3://your-bucket-name/mini-image-classification/ground-truth-demo/instructions.template", - ground_truth_title="Mini image classification", - ground_truth_description="Test for Ground Truth KFP component", - ground_truth_num_workers_per_object=1, - ground_truth_time_limit=30, - ground_truth_task_availibility=3600, - ground_truth_max_concurrent_tasks=20, - training_algorithm_name="image classification", - training_input_mode="Pipe", - training_hyperparameters={ - "num_classes": "2", - "num_training_samples": "14", - "mini_batch_size": "2", - }, - training_output_location="s3://your-bucket-name/mini-image-classification/training-output", - training_instance_type="ml.m5.2xlarge", - training_instance_count=1, - training_volume_size=50, - training_max_run_time=3600, - role_arn="", -): - - workteam = sagemaker_workteam_op( - region=region, - team_name=team_name, - description=team_description, - user_pool=user_pool, - user_groups=user_groups, - client_id=client_id, - ) - - ground_truth_train = sagemaker_gt_op( - region=region, - role=role_arn, - job_name=ground_truth_train_job_name, - label_attribute_name=ground_truth_label_attribute_name, - manifest_location=ground_truth_train_manifest_location, - output_location=ground_truth_output_location, - task_type=ground_truth_task_type, - worker_type=ground_truth_worker_type, - workteam_arn=workteam.output, - label_category_config=ground_truth_label_category_config, - ui_template=ground_truth_ui_template, - title=ground_truth_title, - description=ground_truth_description, - num_workers_per_object=ground_truth_num_workers_per_object, - time_limit=ground_truth_time_limit, - task_availibility=ground_truth_task_availibility, - max_concurrent_tasks=ground_truth_max_concurrent_tasks, - ) - - ground_truth_validation = sagemaker_gt_op( - region=region, - role=role_arn, - job_name=ground_truth_validation_job_name, - label_attribute_name=ground_truth_label_attribute_name, - manifest_location=ground_truth_validation_manifest_location, - output_location=ground_truth_output_location, - task_type=ground_truth_task_type, - worker_type=ground_truth_worker_type, - workteam_arn=workteam.output, - label_category_config=ground_truth_label_category_config, - ui_template=ground_truth_ui_template, - title=ground_truth_title, - description=ground_truth_description, - num_workers_per_object=ground_truth_num_workers_per_object, - time_limit=ground_truth_time_limit, - task_availibility=ground_truth_task_availibility, - max_concurrent_tasks=ground_truth_max_concurrent_tasks, - ) - - channelObj["ChannelName"] = "train" - channelObj["DataSource"]["S3DataSource"]["S3Uri"] = str( - ground_truth_train.outputs["output_manifest_location"] - ) - channelObjList.append(copy.deepcopy(channelObj)) - channelObj["ChannelName"] = "validation" - channelObj["DataSource"]["S3DataSource"]["S3Uri"] = str( - ground_truth_validation.outputs["output_manifest_location"] - ) - channelObjList.append(copy.deepcopy(channelObj)) - - training = sagemaker_train_op( - region=region, - algorithm_name=training_algorithm_name, - training_input_mode=training_input_mode, - hyperparameters=training_hyperparameters, - channels=json.dumps(channelObjList), - instance_type=training_instance_type, - instance_count=training_instance_count, - volume_size=training_volume_size, - max_run_time=training_max_run_time, - model_artifact_path=training_output_location, - role=role_arn, - ) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile(ground_truth_test, __file__ + ".zip") diff --git a/samples/contrib/aws-samples/ground_truth_pipeline_demo/prep_inputs.py b/samples/contrib/aws-samples/ground_truth_pipeline_demo/prep_inputs.py deleted file mode 100644 index c706e6edf77..00000000000 --- a/samples/contrib/aws-samples/ground_truth_pipeline_demo/prep_inputs.py +++ /dev/null @@ -1,134 +0,0 @@ -# Source: https://github.com/awslabs/amazon-sagemaker-examples/blob/master/ground_truth_labeling_jobs/from_unlabeled_data_to_deployed_machine_ -# learning_model_ground_truth_demo_image_classification/from_unlabeled_data_to_deployed_machine_learning_model_ground_truth_demo_image_ -# classification.ipynb - -import itertools -import json -import numpy as np -import boto3 - -BUCKET = "" -EXP_NAME = "mini-image-classification/ground-truth-demo" - -# Make sure the bucket is in the same region as this notebook. -region = boto3.session.Session().region_name -s3 = boto3.client("s3") -bucket_region = s3.head_bucket(Bucket=BUCKET)["ResponseMetadata"]["HTTPHeaders"][ - "x-amz-bucket-region" -] -assert ( - bucket_region == region -), "You S3 bucket {} and this notebook need to be in the same region.".format(BUCKET) - -# Process the Open Images annotations. -with open("openimgs-annotations.csv", "r") as f: - all_labels = [line.strip().split(",") for line in f.readlines()] - -# Extract image ids in each of our desired classes. -ims = {} -ims["Musical Instrument"] = [ - label[0] for label in all_labels if (label[2] == "/m/04szw" and label[3] == "1") -][:500] -ims["Fruit"] = [ - label[0] for label in all_labels if (label[2] == "/m/02xwb" and label[3] == "1") -][:371] -ims["Fruit"].remove( - "02a54f6864478101" -) # This image contains personal information, let's remove it from our dataset. -num_classes = len(ims) - -# If running the short version of the demo, reduce each class count 50 times. -for key in ims.keys(): - ims[key] = set(ims[key][: int(len(ims[key]) / 50)]) - -# Copy the images to our local bucket. -print("Copying images to bucket") -s3 = boto3.client("s3") -for img_id, img in enumerate(itertools.chain.from_iterable(ims.values())): - copy_source = {"Bucket": "open-images-dataset", "Key": "test/{}.jpg".format(img)} - s3.copy(copy_source, BUCKET, "{}/images/{}.jpg".format(EXP_NAME, img)) - -# Create and upload the input manifests. -input_data_paths = [ - "s3://{}/{}/images/{}.jpg".format(BUCKET, EXP_NAME, img) - for img in itertools.chain.from_iterable(ims.values()) -] - -# Shuffle input paths in place. -np.random.shuffle(input_data_paths) - -dataset_size = len(input_data_paths) -train_test_split_index = round(dataset_size * 0.8) - -print("Number of training samples: " + str(train_test_split_index)) -print("Number of validation samples: " + str(dataset_size - train_test_split_index)) - -train_data_paths = input_data_paths[:train_test_split_index] -validation_data_paths = input_data_paths[train_test_split_index:] - -with open("train.manifest", "w") as f: - for img_path in train_data_paths: - f.write('{"source-ref": "' + img_path + '"}\n') - -with open("validation.manifest", "w") as f: - for img_path in validation_data_paths: - f.write('{"source-ref": "' + img_path + '"}\n') - -s3.upload_file("train.manifest", BUCKET, EXP_NAME + "/" + "train.manifest") -s3.upload_file("validation.manifest", BUCKET, EXP_NAME + "/" + "validation.manifest") -print("Uploaded manifests at s3://{}/{}".format(BUCKET, EXP_NAME)) - -# Specify categories -CLASS_LIST = list(ims.keys()) -print("Label space is {}".format(CLASS_LIST)) - -json_body = {"labels": [{"label": label} for label in CLASS_LIST]} -with open("class_labels.json", "w") as f: - json.dump(json_body, f) - -s3.upload_file("class_labels.json", BUCKET, EXP_NAME + "/class_labels.json") - -# Create UI template -img_examples = [ - "https://s3.amazonaws.com/open-images-dataset/test/{}".format(img_id) - for img_id in ["0634825fc1dcc96b.jpg", "0415b6a36f3381ed.jpg"] -] - - -def make_template(test_template=False, save_fname="instructions.template"): - template = r""" - - - - - - -

            Dear Annotator, please tell me whether what you can see in the image. Thank you!

            -

            -
            Example "Musical Instrument".

            - -

            -
            Example "Fruit".

            - -
            - -
            -
            """.format( - *img_examples, - categories_str=str(CLASS_LIST) - if test_template - else "{{ task.input.labels | to_json | escape }}" - ) - - with open(save_fname, "w") as f: - f.write(template) - - -make_template(test_template=True, save_fname="instructions.html") -make_template(test_template=False, save_fname="instructions.template") -s3.upload_file("instructions.template", BUCKET, EXP_NAME + "/instructions.template") diff --git a/samples/contrib/aws-samples/hosting_model_monitor_pipeline/README.md b/samples/contrib/aws-samples/hosting_model_monitor_pipeline/README.md deleted file mode 100644 index 7c009d73848..00000000000 --- a/samples/contrib/aws-samples/hosting_model_monitor_pipeline/README.md +++ /dev/null @@ -1,105 +0,0 @@ -# Host a Model and Create a SageMaker Model Monitor - -This sample demonstrates a Kubeflow pipeline that -- Hosts a machine learning model in Amazon SageMaker -- Monitors a live endpoint for violations against constraints - -## Prerequisites -Follow the steps in [Sample AWS SageMaker Kubeflow Pipelines](../README.md#inputs-to-the-pipeline) - -### Install required packages -Run the following commands to install the script dependencies: - -``` -pip install -r requirements.txt -``` -### Create an IAM Role -Follow [SageMaker execution role](../README.md#inputs-to-the-pipeline) and create an IAM role for SageMaker execution. - -### Create an S3 Bucket -To setup an endpoint and create a monitoring schedule, we need an S3 bucket to store model and baseline data. Run the following commands to create an S3 bucket. Specify the value for `SAGEMAKER_REGION` as the region you want to create your SageMaker resources. For ease of use in the samples (using the default values of the pipeline), we suggest using `us-east-1` as the region. - -``` -export SAGEMAKER_REGION=us-east-1 -export S3_BUCKET_NAME="kfp-sm-data-bucket-${SAGEMAKER_REGION}-$RANDOM" - -if [[ $SAGEMAKER_REGION == "us-east-1" ]]; then - aws s3api create-bucket --bucket ${S3_BUCKET_NAME} --region ${SAGEMAKER_REGION} -else - aws s3api create-bucket --bucket ${S3_BUCKET_NAME} --region ${SAGEMAKER_REGION} \ - --create-bucket-configuration LocationConstraint=${SAGEMAKER_REGION} -fi - -echo ${S3_BUCKET_NAME} -``` - -### Copy data to bucket -Fill the S3 bucket you just created with [sample data](./model-monitor/) which contains: -- A pre-trained model -- Baselining constraints and statistics generated by a ProcessingJob - -1. Clone this repository to use the pipelines and sample data. - ``` - git clone https://github.com/kubeflow/pipelines.git - cd samples/contrib/aws-samples/hosting_model_monitor_pipeline - ``` -1. Download the sample model from the SageMaker sample bucket: - ``` - aws s3 cp s3://sagemaker-sample-files/models/xgb-churn/xgb-churn-prediction-model.tar.gz model-monitor - ``` -1. Run the following command to upload the sample data to your S3 bucket: - ``` - aws s3 cp model-monitor s3://$S3_BUCKET_NAME/model-monitor --recursive - ``` - -After going through above steps, make sure you have the following environment variables set: -- `S3_BUCKET_NAME`: The name of the S3 bucket you created. -- `SAGEMAKER_EXECUTION_ROLE_ARN`: The ARN of the IAM role you created. -- `SAGEMAKER_REGION`: The region where you want to run the pipeline. - -## Compile and run the pipelines -1. To compile the pipeline run: `python hosting_model_monitor_pipeline.py`. This will create a `tar.gz` file. After the compilation completes, you will see a message like this: - ``` - =================Pipeline compiled================= - Name prefix: 2023-05-11-10-45-32 - To delete the resources created by this pipeline, run the following commands: - export NAME_PREFIX=2023-05-11-10-45-32 - export NAMESPACE= # Change it to your Kubeflow name space - kubectl delete MonitoringSchedule $NAME_PREFIX-monitoring-schedule -n $NAMESPACE - kubectl delete DataQualityJobDefinition $NAME_PREFIX-data-qual-job-defi -n $NAMESPACE - kubectl delete Endpoint $NAME_PREFIX-endpoint -n $NAMESPACE - kubectl delete EndpointConfig $NAME_PREFIX-endpointcfg -n $NAMESPACE - kubectl delete Model $NAME_PREFIX-model -n $NAMESPACE - ``` - You can use the commands in the message to delete the resources created by this pipeline after you finished running the pipeline. -1. In the Kubeflow Pipelines UI, upload this compiled pipeline specification (the *.tar.gz* file) and click on create run. -1. Once the pipeline completes, you can see the outputs under 'Output parameters' in the component's Input/Output section. - -## Delete resources created by pipeline - -Export the following environment variables: -``` -export NAMESPACE= -export NAME_PREFIX= -``` -If using standalone installation, namespace is Kubeflow. If using full kubeflow installation, you can find your Kubeflow NAMESPACE from the top bar on the Kubeflow central dashboard. - -You can find the NAME_PREFIX from the component's output parameters `sagemaker_resource_name`, or from the command line output when you compile the pipeline. - -To delete the custom resources created by this pipeline, run the following commands: - -``` -kubectl delete MonitoringSchedule $NAME_PREFIX-monitoring-schedule -n $NAMESPACE -kubectl delete DataQualityJobDefinition $NAME_PREFIX-data-qual-job-defi -n $NAMESPACE -kubectl delete Endpoint $NAME_PREFIX-endpoint -n $NAMESPACE -kubectl delete EndpointConfig $NAME_PREFIX-endpointcfg -n $NAMESPACE -kubectl delete Model $NAME_PREFIX-model -n $NAMESPACE -``` - -To delete the S3 bucket, run the following command: -``` -aws delete-bucket --bucket $S3_BUCKET_NAME --region $SAGEMAKER_REGION -``` - -## Reference -[Sample Notebook - Introduction to Amazon SageMaker Model Monitor](https://sagemaker-examples.readthedocs.io/en/latest/sagemaker_model_monitor/introduction/SageMaker-ModelMonitoring.html) \ No newline at end of file diff --git a/samples/contrib/aws-samples/hosting_model_monitor_pipeline/hosting_model_monitor_pipeline.py b/samples/contrib/aws-samples/hosting_model_monitor_pipeline/hosting_model_monitor_pipeline.py deleted file mode 100644 index 7109ed9abe3..00000000000 --- a/samples/contrib/aws-samples/hosting_model_monitor_pipeline/hosting_model_monitor_pipeline.py +++ /dev/null @@ -1,195 +0,0 @@ -#!/usr/bin/env python3 - -import kfp -import sagemaker -import os -from kfp import components -from kfp import dsl -from datetime import datetime - -sagemaker_Model_op = components.load_component_from_url( - "https://raw.githubusercontent.com/kubeflow/pipelines/master/components/aws/sagemaker/Modelv2/component.yaml" -) - -sagemaker_EndpointConfig_op = components.load_component_from_url( - "https://raw.githubusercontent.com/kubeflow/pipelines/master/components/aws/sagemaker/EndpointConfig/component.yaml" -) - -sagemaker_Endpoint_op = components.load_component_from_url( - "https://raw.githubusercontent.com/kubeflow/pipelines/master/components/aws/sagemaker/Endpoint/component.yaml" -) - -sagemaker_DataQualityJobDefinition_op = components.load_component_from_url( - "https://raw.githubusercontent.com/kubeflow/pipelines/master/components/aws/sagemaker/DataQualityJobDefinition/component.yaml" -) - -sagemaker_MonitoringSchedule_op = components.load_component_from_url( - "https://raw.githubusercontent.com/kubeflow/pipelines/master/components/aws/sagemaker/MonitoringSchedule/component.yaml" -) - -# Fetch environment variables -SAGEMAKER_EXECUTION_ROLE_ARN = os.getenv("SAGEMAKER_EXECUTION_ROLE_ARN", "") -S3_BUCKET_NAME = os.getenv("S3_BUCKET_NAME", "") -SAGEMAKER_REGION = os.getenv("SAGEMAKER_REGION", "us-east-1") - -name_prefix = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") - -# Parameters for Model -xgboost_image = sagemaker.image_uris.retrieve( - framework="xgboost", region=SAGEMAKER_REGION, version="0.90-1" -) -primary_container = { - "containerHostname": "xgboost", - "environment": {"my_env_key": "my_env_value"}, - "image": xgboost_image, - "mode": "SingleModel", - "modelDataURL": f"s3://{S3_BUCKET_NAME}/model-monitor/xgb-churn-prediction-model.tar.gz", -} - -# Parameters for EndpointConfig -data_capture_config = { - "enableCapture": True, - "captureOptions": [{"captureMode": "Input"}, {"captureMode": "Output"}], - "initialSamplingPercentage": 100, - "destinationS3URI": f"s3://{S3_BUCKET_NAME}/model-monitor/datacapture", -} - -# Parameters for DataQualityJobDefinition -model_monitor_image = sagemaker.image_uris.retrieve( - framework="model-monitor", region=SAGEMAKER_REGION -) -data_quality_app_specification = { - "imageURI": model_monitor_image, -} - -data_quality_baseline_config = { - "constraintsResource": { - "s3URI": f"s3://{S3_BUCKET_NAME}/model-monitor/baselining/data_quality/constraints.json", - }, - "statisticsResource": { - "s3URI": f"s3://{S3_BUCKET_NAME}/model-monitor/baselining/data_quality/statistics.json", - }, -} - -data_quality_job_output_config = { - "monitoringOutputs": [ - { - "s3Output": { - "localPath": "/opt/ml/processing/output", - "s3URI": f"s3://{S3_BUCKET_NAME}/model-monitor/reports/data-quality-job-definition/", - "s3UploadMode": "Continuous", - } - } - ] -} - -job_resources = { - "clusterConfig": { - "instanceCount": 1, - "instanceType": "ml.m5.large", - "volumeSizeInGB": 20, - } -} - -stopping_condition = {"maxRuntimeInSeconds": 1800} - - -@dsl.pipeline( - name="Hosting_Model_Monitoring", description="SageMaker Hosting and Model Monitor" -) -def Hosting_Model_Monitoring( - region=SAGEMAKER_REGION, - execution_role_arn=SAGEMAKER_EXECUTION_ROLE_ARN, - primary_container=primary_container, - data_capture_config=data_capture_config, - data_quality_app_specification=data_quality_app_specification, - data_quality_baseline_config=data_quality_baseline_config, - data_quality_job_output_config=data_quality_job_output_config, - job_resources=job_resources, - stopping_condition=stopping_condition, - model_name=name_prefix + "-model", - endpoint_config_name=name_prefix + "-endpointcfg", - endpoint_name=name_prefix + "-endpoint", - job_definition_name=name_prefix + "-data-qual-job-defi", - monitoring_schedule_name=name_prefix + "-monitoring-schedule", -): - Model = sagemaker_Model_op( - region=region, - execution_role_arn=execution_role_arn, - model_name=model_name, - primary_container=primary_container, - ) - - production_variants_ = [ - { - "initialInstanceCount": 1, - "initialVariantWeight": 1, - "instanceType": "ml.m5.large", - "modelName": Model.outputs["sagemaker_resource_name"], - "variantName": "instanceVariant", - "volumeSizeInGB": 10, - } - ] - - EndpointConfig = sagemaker_EndpointConfig_op( - region=region, - endpoint_config_name=endpoint_config_name, - data_capture_config=data_capture_config, - production_variants=production_variants_, - ) - - Endpoint = sagemaker_Endpoint_op( - region=region, - endpoint_config_name=EndpointConfig.outputs["sagemaker_resource_name"], - endpoint_name=endpoint_name, - ) - - data_quality_job_input = { - "endpointInput": { - "endpointName": Endpoint.outputs["sagemaker_resource_name"], - "localPath": "/opt/ml/processing/input/endpoint", - "s3DataDistributionType": "FullyReplicated", - "s3InputMode": "File", - } - } - - DataQualityJobDefinition = sagemaker_DataQualityJobDefinition_op( - region=region, - data_quality_app_specification=data_quality_app_specification, - data_quality_baseline_config=data_quality_baseline_config, - data_quality_job_input=data_quality_job_input, - data_quality_job_output_config=data_quality_job_output_config, - job_definition_name=job_definition_name, - job_resources=job_resources, - role_arn=SAGEMAKER_EXECUTION_ROLE_ARN, - stopping_condition=stopping_condition, - ) - - monitoring_schedule_config = { - "monitoringType": "DataQuality", - "scheduleConfig": {"scheduleExpression": "cron(0 * ? * * *)"}, - "monitoringJobDefinitionName": DataQualityJobDefinition.outputs[ - "sagemaker_resource_name" - ], - } - - MonitoringSchedule = sagemaker_MonitoringSchedule_op( - region=region, - monitoring_schedule_config=monitoring_schedule_config, - monitoring_schedule_name=monitoring_schedule_name, - ) - - -kfp.compiler.Compiler().compile(Hosting_Model_Monitoring, __file__ + ".tar.gz") -print("=================Pipeline compiled=================") -print("Name prefix: ", name_prefix) -print( - f"""To delete the resources created by this pipeline, run the following commands: - export NAMESPACE= # Change it to your Kubeflow name space - export NAME_PREFIX={name_prefix} - kubectl delete MonitoringSchedule $NAME_PREFIX-monitoring-schedule -n $NAMESPACE - kubectl delete DataQualityJobDefinition $NAME_PREFIX-data-qual-job-defi -n $NAMESPACE - kubectl delete Endpoint $NAME_PREFIX-endpoint -n $NAMESPACE - kubectl delete EndpointConfig $NAME_PREFIX-endpointcfg -n $NAMESPACE - kubectl delete Model $NAME_PREFIX-model -n $NAMESPACE""" -) diff --git a/samples/contrib/aws-samples/hosting_model_monitor_pipeline/model-monitor/baselining/data_quality/constraints.json b/samples/contrib/aws-samples/hosting_model_monitor_pipeline/model-monitor/baselining/data_quality/constraints.json deleted file mode 100644 index 12169c2fcb5..00000000000 --- a/samples/contrib/aws-samples/hosting_model_monitor_pipeline/model-monitor/baselining/data_quality/constraints.json +++ /dev/null @@ -1,505 +0,0 @@ -{ - "version" : 0.0, - "features" : [ { - "name" : "Churn", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Account Length", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "VMail Message", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Day Mins", - "inferred_type" : "Fractional", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Day Calls", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Eve Mins", - "inferred_type" : "Fractional", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Eve Calls", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Night Mins", - "inferred_type" : "Fractional", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Night Calls", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Intl Mins", - "inferred_type" : "Fractional", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Intl Calls", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "CustServ Calls", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_AK", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_AL", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_AR", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_AZ", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_CA", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_CO", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_CT", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_DC", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_DE", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_FL", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_GA", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_HI", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_IA", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_ID", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_IL", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_IN", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_KS", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_KY", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_LA", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_MA", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_MD", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_ME", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_MI", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_MN", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_MO", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_MS", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_MT", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_NC", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_ND", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_NE", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_NH", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_NJ", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_NM", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_NV", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_NY", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_OH", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_OK", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_OR", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_PA", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_RI", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_SC", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_SD", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_TN", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_TX", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_UT", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_VA", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_VT", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_WA", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_WI", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_WV", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "State_WY", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Area Code_408", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Area Code_415", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Area Code_510", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Int'l Plan_no", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "Int'l Plan_yes", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "VMail Plan_no", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - }, { - "name" : "VMail Plan_yes", - "inferred_type" : "Integral", - "completeness" : 1.0, - "num_constraints" : { - "is_non_negative" : true - } - } ], - "monitoring_config" : { - "evaluate_constraints" : "Enabled", - "emit_metrics" : "Enabled", - "datatype_check_threshold" : 1.0, - "domain_content_threshold" : 1.0, - "distribution_constraints" : { - "perform_comparison" : "Enabled", - "comparison_threshold" : 0.1, - "comparison_method" : "Robust" - } - } -} \ No newline at end of file diff --git a/samples/contrib/aws-samples/hosting_model_monitor_pipeline/model-monitor/baselining/data_quality/statistics.json b/samples/contrib/aws-samples/hosting_model_monitor_pipeline/model-monitor/baselining/data_quality/statistics.json deleted file mode 100644 index d21582be2ba..00000000000 --- a/samples/contrib/aws-samples/hosting_model_monitor_pipeline/model-monitor/baselining/data_quality/statistics.json +++ /dev/null @@ -1,4627 +0,0 @@ -{ - "version" : 0.0, - "dataset" : { - "item_count" : 2333 - }, - "features" : [ { - "name" : "Churn", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.1393056150878697, - "sum" : 325.0, - "std_dev" : 0.34626515951342846, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2008.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 325.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "Account Length", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 101.2768966995285, - "sum" : 236279.0, - "std_dev" : 39.552442167470566, - "min" : 1.0, - "max" : 243.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 1.0, - "upper_bound" : 25.2, - "count" : 70.0 - }, { - "lower_bound" : 25.2, - "upper_bound" : 49.4, - "count" : 150.0 - }, { - "lower_bound" : 49.4, - "upper_bound" : 73.6, - "count" : 353.0 - }, { - "lower_bound" : 73.6, - "upper_bound" : 97.8, - "count" : 518.0 - }, { - "lower_bound" : 97.8, - "upper_bound" : 122.0, - "count" : 538.0 - }, { - "lower_bound" : 122.0, - "upper_bound" : 146.2, - "count" : 401.0 - }, { - "lower_bound" : 146.2, - "upper_bound" : 170.4, - "count" : 208.0 - }, { - "lower_bound" : 170.4, - "upper_bound" : 194.6, - "count" : 72.0 - }, { - "lower_bound" : 194.6, - "upper_bound" : 218.8, - "count" : 19.0 - }, { - "lower_bound" : 218.8, - "upper_bound" : 243.0, - "count" : 4.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 119.0, 100.0, 111.0, 181.0, 95.0, 104.0, 70.0, 120.0, 88.0, 111.0, 33.0, 106.0, 54.0, 87.0, 94.0, 135.0, 107.0, 159.0, 106.0, 136.0, 116.0, 115.0, 103.0, 95.0, 115.0, 143.0, 48.0, 94.0, 153.0, 94.0, 107.0, 91.0, 141.0, 58.0, 49.0, 41.0, 137.0, 111.0, 71.0, 43.0, 97.0, 3.0, 124.0, 86.0, 87.0, 83.0, 67.0, 46.0, 129.0, 90.0, 97.0, 87.0, 141.0, 136.0, 88.0, 170.0, 44.0, 121.0, 111.0, 105.0, 112.0, 73.0, 147.0, 66.0, 136.0, 119.0, 135.0, 102.0, 169.0, 60.0, 73.0, 83.0, 90.0, 148.0, 59.0, 152.0, 136.0, 112.0, 122.0, 44.0, 122.0, 89.0, 176.0, 64.0, 112.0, 133.0, 52.0, 91.0, 127.0, 153.0, 117.0, 163.0, 76.0, 80.0, 136.0, 91.0, 143.0, 125.0, 126.0, 87.0, 119.0, 13.0, 138.0, 159.0, 111.0, 46.0, 68.0, 107.0, 70.0, 215.0, 22.0, 122.0, 73.0, 75.0, 87.0, 148.0, 105.0, 182.0, 139.0, 105.0, 166.0, 60.0, 76.0, 28.0, 94.0, 146.0, 101.0, 132.0, 93.0, 105.0, 100.0, 134.0, 63.0, 126.0, 166.0, 160.0, 162.0, 70.0, 116.0, 75.0, 74.0, 115.0, 42.0, 132.0, 171.0, 135.0, 99.0, 27.0, 139.0, 76.0, 123.0, 54.0, 70.0, 163.0, 96.0, 62.0, 115.0, 97.0, 137.0, 82.0, 118.0, 64.0, 186.0, 117.0, 117.0, 116.0, 164.0, 103.0, 137.0, 97.0, 144.0, 96.0, 183.0, 42.0, 100.0, 131.0, 88.0, 91.0, 104.0, 63.0, 159.0, 147.0, 123.0, 100.0, 105.0, 163.0, 90.0, 125.0, 64.0, 113.0, 101.0, 123.0, 212.0, 73.0, 44.0, 96.0, 74.0, 77.0, 120.0, 122.0, 87.0, 52.0, 48.0, 61.0, 141.0, 170.0, 17.0, 162.0, 85.0, 160.0, 29.0, 91.0, 96.0, 104.0, 95.0, 84.0, 157.0, 165.0, 57.0, 95.0, 51.0, 97.0, 13.0, 50.0, 46.0, 121.0, 68.0, 72.0, 82.0, 38.0, 41.0, 96.0, 129.0, 31.0, 122.0, 51.0, 109.0, 161.0, 72.0, 65.0, 129.0, 137.0, 48.0, 134.0, 125.0, 153.0, 103.0, 45.0, 80.0, 57.0, 94.0, 59.0, 72.0, 62.0, 155.0, 96.0, 77.0, 58.0, 134.0, 24.0, 158.0, 89.0, 138.0, 61.0, 123.0, 87.0, 74.0, 37.0, 105.0, 56.0, 64.0, 202.0, 91.0, 120.0, 89.0, 95.0, 92.0, 45.0, 106.0, 125.0, 129.0, 159.0, 99.0 ], [ 1.0, 1.0, 1.0, 1.0, 3.0, 3.0, 5.0, 6.0, 7.0, 9.0, 9.0, 10.0, 11.0, 13.0, 13.0, 13.0, 15.0, 16.0, 16.0, 16.0, 17.0, 19.0, 19.0, 20.0, 21.0, 21.0, 21.0, 22.0, 23.0, 24.0, 24.0, 25.0, 26.0, 27.0, 27.0, 28.0, 28.0, 29.0, 30.0, 31.0, 31.0, 32.0, 32.0, 32.0, 33.0, 33.0, 34.0, 35.0, 35.0, 35.0, 36.0, 36.0, 36.0, 36.0, 37.0, 37.0, 37.0, 38.0, 38.0, 39.0, 39.0, 39.0, 40.0, 40.0, 40.0, 40.0, 41.0, 41.0, 41.0, 41.0, 42.0, 42.0, 43.0, 43.0, 43.0, 44.0, 44.0, 44.0, 45.0, 45.0, 45.0, 45.0, 46.0, 46.0, 46.0, 46.0, 47.0, 47.0, 47.0, 48.0, 48.0, 48.0, 48.0, 49.0, 49.0, 50.0, 50.0, 51.0, 51.0, 51.0, 51.0, 52.0, 52.0, 52.0, 52.0, 52.0, 53.0, 53.0, 53.0, 53.0, 54.0, 54.0, 54.0, 54.0, 54.0, 55.0, 55.0, 55.0, 55.0, 55.0, 55.0, 55.0, 56.0, 56.0, 56.0, 56.0, 57.0, 57.0, 57.0, 57.0, 57.0, 57.0, 58.0, 58.0, 58.0, 58.0, 58.0, 59.0, 59.0, 59.0, 59.0, 59.0, 60.0, 60.0, 60.0, 60.0, 60.0, 60.0, 60.0, 61.0, 61.0, 61.0, 61.0, 61.0, 61.0, 62.0, 62.0, 62.0, 62.0, 62.0, 62.0, 62.0, 63.0, 63.0, 63.0, 63.0, 63.0, 63.0, 63.0, 63.0, 64.0, 64.0, 64.0, 64.0, 64.0, 64.0, 64.0, 64.0, 65.0, 65.0, 65.0, 65.0, 65.0, 65.0, 65.0, 65.0, 66.0, 66.0, 66.0, 66.0, 66.0, 66.0, 66.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 68.0, 68.0, 68.0, 68.0, 68.0, 68.0, 68.0, 68.0, 68.0, 68.0, 69.0, 69.0, 69.0, 69.0, 69.0, 69.0, 70.0, 70.0, 70.0, 70.0, 70.0, 70.0, 70.0, 70.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 72.0, 72.0, 72.0, 72.0, 72.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 75.0, 75.0, 75.0, 75.0, 75.0, 75.0, 75.0, 75.0, 75.0, 75.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 77.0, 77.0, 77.0, 77.0, 77.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 129.0, 129.0, 129.0, 129.0, 129.0, 129.0, 129.0, 130.0, 130.0, 130.0, 130.0, 130.0, 130.0, 130.0, 131.0, 131.0, 131.0, 131.0, 131.0, 132.0, 132.0, 132.0, 132.0, 132.0, 132.0, 132.0, 132.0, 132.0, 133.0, 133.0, 133.0, 133.0, 133.0, 133.0, 134.0, 134.0, 134.0, 134.0, 135.0, 135.0, 135.0, 135.0, 136.0, 136.0, 136.0, 136.0, 136.0, 136.0, 136.0, 137.0, 137.0, 137.0, 137.0, 137.0, 137.0, 137.0, 138.0, 138.0, 138.0, 138.0, 138.0, 138.0, 138.0, 139.0, 139.0, 139.0, 139.0, 139.0, 139.0, 139.0, 140.0, 140.0, 140.0, 140.0, 140.0, 141.0, 141.0, 141.0, 141.0, 141.0, 141.0, 141.0, 141.0, 142.0, 142.0, 142.0, 142.0, 142.0, 143.0, 143.0, 143.0, 143.0, 143.0, 144.0, 144.0, 144.0, 144.0, 144.0, 144.0, 145.0, 145.0, 145.0, 145.0, 145.0, 145.0, 146.0, 146.0, 146.0, 146.0, 146.0, 146.0, 147.0, 147.0, 147.0, 147.0, 147.0, 148.0, 148.0, 148.0, 148.0, 148.0, 148.0, 148.0, 148.0, 148.0, 148.0, 149.0, 149.0, 149.0, 149.0, 149.0, 149.0, 149.0, 150.0, 150.0, 150.0, 150.0, 151.0, 151.0, 151.0, 151.0, 151.0, 152.0, 152.0, 153.0, 153.0, 153.0, 154.0, 154.0, 154.0, 154.0, 155.0, 155.0, 155.0, 155.0, 155.0, 155.0, 156.0, 156.0, 156.0, 157.0, 157.0, 157.0, 157.0, 158.0, 158.0, 158.0, 159.0, 159.0, 159.0, 160.0, 160.0, 160.0, 160.0, 161.0, 161.0, 161.0, 161.0, 161.0, 162.0, 162.0, 163.0, 163.0, 163.0, 164.0, 164.0, 165.0, 165.0, 165.0, 166.0, 166.0, 166.0, 166.0, 167.0, 168.0, 168.0, 169.0, 169.0, 170.0, 170.0, 172.0, 172.0, 172.0, 173.0, 173.0, 174.0, 174.0, 174.0, 176.0, 176.0, 177.0, 177.0, 177.0, 179.0, 179.0, 180.0, 180.0, 181.0, 181.0, 182.0, 183.0, 183.0, 184.0, 184.0, 185.0, 185.0, 189.0, 189.0, 190.0, 190.0, 192.0, 193.0, 193.0, 195.0, 197.0, 201.0, 204.0, 205.0, 209.0, 210.0, 217.0, 224.0, 225.0 ] ] - } - } - } - } - }, { - "name" : "VMail Message", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 8.214316330904415, - "sum" : 19164.0, - "std_dev" : 13.776907846587017, - "min" : 0.0, - "max" : 51.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 5.1, - "count" : 1684.0 - }, { - "lower_bound" : 5.1, - "upper_bound" : 10.2, - "count" : 2.0 - }, { - "lower_bound" : 10.2, - "upper_bound" : 15.3, - "count" : 15.0 - }, { - "lower_bound" : 15.3, - "upper_bound" : 20.4, - "count" : 52.0 - }, { - "lower_bound" : 20.4, - "upper_bound" : 25.5, - "count" : 127.0 - }, { - "lower_bound" : 25.5, - "upper_bound" : 30.6, - "count" : 171.0 - }, { - "lower_bound" : 30.6, - "upper_bound" : 35.7, - "count" : 135.0 - }, { - "lower_bound" : 35.7, - "upper_bound" : 40.8, - "count" : 106.0 - }, { - "lower_bound" : 40.8, - "upper_bound" : 45.9, - "count" : 32.0 - }, { - "lower_bound" : 45.9, - "upper_bound" : 51.0, - "count" : 9.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 19.0, 0.0, 0.0, 40.0, 36.0, 0.0, 0.0, 24.0, 0.0, 0.0, 35.0, 0.0, 0.0, 0.0, 0.0, 41.0, 0.0, 0.0, 0.0, 24.0, 0.0, 33.0, 0.0, 37.0, 0.0, 0.0, 43.0, 0.0, 31.0, 28.0, 0.0, 37.0, 0.0, 0.0, 28.0, 34.0, 0.0, 0.0, 0.0, 35.0, 0.0, 36.0, 0.0, 29.0, 0.0, 30.0, 35.0, 0.0, 33.0, 0.0, 0.0, 0.0, 37.0, 0.0, 0.0, 0.0, 0.0, 24.0, 0.0, 24.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 26.0, 0.0, 0.0, 29.0, 20.0, 33.0, 0.0, 0.0, 0.0, 23.0, 0.0, 0.0, 22.0, 0.0, 32.0, 0.0, 34.0, 0.0, 22.0, 0.0, 0.0, 0.0, 0.0, 27.0, 0.0, 0.0, 29.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 23.0, 0.0, 28.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 19.0, 16.0, 0.0, 0.0, 0.0, 38.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 19.0, 46.0, 0.0, 16.0, 0.0, 0.0, 0.0, 22.0, 0.0, 0.0, 0.0, 0.0, 0.0, 39.0, 0.0, 25.0, 33.0, 0.0, 0.0, 0.0, 19.0, 0.0, 36.0, 0.0, 26.0, 0.0, 0.0, 35.0, 30.0, 0.0, 50.0, 28.0, 0.0, 26.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 49.0, 15.0, 0.0, 0.0, 27.0, 0.0, 23.0, 0.0, 0.0, 0.0, 0.0, 21.0, 0.0, 0.0, 0.0, 0.0, 26.0, 33.0, 0.0, 0.0, 0.0, 0.0, 24.0, 36.0, 40.0, 0.0, 0.0, 35.0, 0.0, 0.0, 0.0, 37.0, 0.0, 0.0, 0.0, 27.0, 0.0, 0.0, 0.0, 30.0, 0.0, 28.0, 0.0, 0.0, 0.0, 0.0, 35.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 28.0, 0.0, 0.0, 0.0, 38.0, 0.0, 29.0, 31.0, 0.0, 34.0, 0.0, 0.0, 0.0, 24.0, 0.0, 38.0, 0.0, 0.0, 0.0, 0.0, 0.0, 30.0, 21.0, 0.0, 30.0, 32.0, 0.0, 0.0, 0.0, 17.0, 27.0, 0.0, 0.0, 0.0, 0.0, 31.0, 0.0, 31.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 33.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.0, 12.0, 13.0, 14.0, 14.0, 14.0, 15.0, 15.0, 16.0, 16.0, 16.0, 16.0, 17.0, 17.0, 17.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 19.0, 19.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 23.0, 23.0, 23.0, 23.0, 23.0, 23.0, 23.0, 23.0, 23.0, 23.0, 23.0, 23.0, 23.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 30.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 31.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 32.0, 33.0, 33.0, 33.0, 33.0, 33.0, 33.0, 33.0, 33.0, 33.0, 33.0, 33.0, 34.0, 34.0, 34.0, 34.0, 34.0, 34.0, 34.0, 34.0, 34.0, 35.0, 35.0, 35.0, 35.0, 35.0, 35.0, 35.0, 36.0, 36.0, 36.0, 36.0, 36.0, 36.0, 36.0, 36.0, 36.0, 36.0, 36.0, 36.0, 37.0, 37.0, 37.0, 37.0, 37.0, 37.0, 37.0, 37.0, 37.0, 38.0, 38.0, 38.0, 38.0, 38.0, 38.0, 38.0, 38.0, 38.0, 39.0, 39.0, 39.0, 39.0, 39.0, 39.0, 39.0, 39.0, 39.0, 39.0, 40.0, 40.0, 40.0, 40.0, 40.0, 40.0, 41.0, 41.0, 41.0, 42.0, 42.0, 42.0, 42.0, 42.0, 43.0, 43.0, 43.0, 44.0, 44.0, 45.0, 45.0, 46.0, 47.0, 50.0 ] ] - } - } - } - } - }, { - "name" : "Day Mins", - "inferred_type" : "Fractional", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 180.22648949849963, - "sum" : 420468.3999999996, - "std_dev" : 53.987178959901556, - "min" : 0.0, - "max" : 350.8, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 35.08, - "count" : 14.0 - }, { - "lower_bound" : 35.08, - "upper_bound" : 70.16, - "count" : 48.0 - }, { - "lower_bound" : 70.16, - "upper_bound" : 105.24000000000001, - "count" : 130.0 - }, { - "lower_bound" : 105.24000000000001, - "upper_bound" : 140.32, - "count" : 318.0 - }, { - "lower_bound" : 140.32, - "upper_bound" : 175.4, - "count" : 565.0 - }, { - "lower_bound" : 175.4, - "upper_bound" : 210.48000000000002, - "count" : 587.0 - }, { - "lower_bound" : 210.48000000000002, - "upper_bound" : 245.56, - "count" : 423.0 - }, { - "lower_bound" : 245.56, - "upper_bound" : 280.64, - "count" : 180.0 - }, { - "lower_bound" : 280.64, - "upper_bound" : 315.72, - "count" : 58.0 - }, { - "lower_bound" : 315.72, - "upper_bound" : 350.8, - "count" : 10.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 178.1, 160.3, 197.1, 105.2, 283.1, 113.6, 232.1, 212.7, 73.3, 176.9, 161.9, 128.6, 190.5, 223.2, 157.9, 173.1, 273.5, 275.8, 119.2, 174.6, 133.3, 145.0, 150.6, 220.2, 109.7, 155.4, 172.0, 235.6, 218.5, 92.7, 90.7, 162.3, 146.5, 210.1, 214.4, 194.4, 237.3, 255.9, 197.9, 200.2, 120.8, 118.1, 131.8, 225.4, 205.2, 272.5, 181.1, 122.2, 119.6, 109.6, 112.7, 136.3, 185.4, 199.6, 218.2, 259.9, 143.2, 218.2, 249.8, 274.7, 167.8, 182.3, 157.0, 207.7, 250.2, 81.9, 246.8, 103.1, 147.2, 252.7, 192.2, 226.4, 145.5, 178.3, 133.1, 214.6, 203.9, 185.4, 140.0, 240.3, 134.2, 141.1, 47.4, 200.4, 167.6, 221.1, 165.5, 175.3, 146.7, 167.7, 184.5, 202.9, 273.3, 194.8, 187.7, 133.7, 209.1, 260.8, 211.6, 156.8, 109.2, 303.2, 240.8, 167.4, 110.4, 90.4, 162.1, 212.1, 214.8, 83.6, 182.1, 170.5, 198.2, 143.2, 184.5, 185.2, 156.5, 69.1, 139.0, 101.4, 274.3, 220.6, 107.3, 121.7, 163.5, 176.6, 118.9, 240.1, 179.3, 246.4, 177.1, 258.8, 211.8, 226.2, 220.7, 166.4, 115.1, 213.4, 155.7, 214.1, 200.4, 133.3, 155.4, 195.1, 189.8, 197.1, 217.2, 236.7, 192.8, 224.4, 114.4, 206.9, 134.7, 219.6, 183.3, 281.0, 147.9, 144.2, 175.3, 199.3, 294.9, 97.2, 74.3, 161.6, 181.5, 118.0, 238.8, 246.5, 186.5, 202.3, 201.1, 145.8, 116.7, 303.9, 107.2, 211.8, 113.7, 149.0, 118.5, 214.9, 113.9, 225.2, 172.2, 221.7, 150.0, 160.0, 142.4, 182.3, 219.6, 92.6, 238.0, 224.0, 226.0, 224.4, 204.6, 175.8, 193.7, 169.4, 252.1, 173.6, 169.1, 170.9, 230.9, 105.0, 215.6, 285.7, 198.5, 220.6, 96.7, 97.5, 235.0, 109.8, 197.7, 264.0, 129.5, 159.0, 276.2, 207.7, 234.5, 167.6, 276.7, 146.0, 220.4, 131.7, 250.3, 68.7, 207.6, 118.2, 208.8, 137.8, 209.9, 179.5, 216.0, 210.5, 234.1, 181.5, 222.5, 240.4, 109.1, 158.1, 193.0, 205.9, 198.0, 244.1, 240.7, 122.5, 111.8, 155.7, 236.6, 149.3, 181.3, 151.8, 139.9, 248.7, 61.6, 247.6, 185.9, 178.1, 80.3, 235.6, 172.4, 178.7, 225.2, 187.5, 204.4, 134.2, 262.3, 191.1, 109.6, 197.0, 228.6, 115.4, 147.2, 198.8, 129.2, 238.1, 208.0, 211.3, 194.8, 143.2, 143.7, 198.8, 179.1 ], [ 0.0, 2.6, 7.9, 17.6, 19.5, 27.0, 34.0, 37.8, 40.9, 45.0, 47.8, 49.9, 51.1, 51.8, 54.7, 55.3, 55.6, 57.5, 58.4, 58.9, 60.0, 61.3, 61.9, 62.4, 62.8, 62.9, 64.9, 67.4, 68.5, 70.7, 70.9, 72.5, 72.8, 75.8, 77.6, 78.6, 80.3, 81.3, 81.7, 82.3, 82.5, 82.6, 83.2, 83.8, 84.8, 85.7, 85.9, 86.0, 86.3, 87.2, 87.7, 88.5, 89.5, 89.7, 90.0, 91.5, 92.3, 92.8, 93.4, 93.8, 94.7, 95.0, 95.5, 95.9, 96.3, 96.8, 97.5, 98.0, 98.2, 98.4, 99.4, 99.9, 100.8, 101.1, 101.7, 102.1, 102.6, 102.8, 103.2, 103.4, 103.5, 103.7, 104.6, 104.7, 104.9, 105.0, 105.3, 105.8, 105.9, 106.4, 106.7, 107.5, 107.8, 108.3, 108.6, 109.1, 109.4, 109.5, 110.1, 110.5, 111.1, 111.6, 111.9, 112.8, 113.0, 113.3, 114.3, 114.3, 114.8, 115.0, 115.4, 115.4, 115.5, 115.5, 115.6, 115.8, 115.9, 116.2, 116.9, 117.5, 117.6, 117.9, 118.1, 118.4, 119.0, 119.2, 119.3, 119.7, 120.5, 120.7, 120.9, 121.1, 121.7, 122.0, 122.9, 123.1, 123.2, 123.7, 123.7, 124.0, 124.1, 124.3, 124.3, 124.4, 124.5, 124.7, 124.8, 125.0, 125.2, 125.4, 125.5, 125.7, 126.0, 126.1, 126.3, 126.7, 126.9, 127.3, 127.8, 128.2, 128.3, 128.5, 128.7, 128.8, 129.0, 129.3, 129.4, 129.5, 129.7, 129.9, 130.1, 130.2, 130.5, 130.9, 131.1, 131.6, 131.9, 132.0, 132.1, 132.4, 133.1, 133.3, 133.3, 133.5, 133.8, 134.0, 134.2, 134.3, 134.4, 134.7, 134.8, 134.9, 135.0, 135.1, 135.2, 135.4, 135.8, 135.9, 136.1, 136.1, 136.4, 136.7, 136.8, 137.0, 137.1, 137.4, 137.5, 137.9, 138.1, 138.3, 138.5, 138.7, 138.9, 139.0, 139.2, 139.3, 139.4, 139.6, 139.7, 139.8, 140.1, 140.1, 140.5, 140.7, 141.1, 141.2, 141.4, 141.6, 141.7, 142.0, 142.1, 142.3, 142.3, 142.4, 142.5, 142.5, 142.8, 142.9, 143.3, 143.3, 143.4, 143.5, 143.6, 143.7, 143.7, 143.9, 144.0, 144.2, 144.4, 144.5, 144.6, 144.8, 144.9, 145.0, 145.3, 145.5, 145.6, 145.9, 146.2, 146.3, 146.3, 146.3, 146.4, 146.4, 146.5, 146.6, 146.7, 146.8, 147.0, 147.0, 147.1, 147.2, 147.5, 147.7, 147.9, 148.1, 148.2, 148.2, 148.4, 148.5, 148.5, 148.6, 148.7, 149.0, 149.2, 149.3, 149.4, 149.4, 149.7, 149.7, 149.8, 149.9, 150.0, 150.1, 150.4, 150.5, 150.6, 150.7, 151.0, 151.1, 151.5, 151.5, 151.7, 151.8, 152.0, 152.1, 152.4, 152.6, 152.9, 153.1, 153.2, 153.2, 153.5, 153.5, 153.5, 153.6, 153.7, 153.8, 154.0, 154.0, 154.0, 154.1, 154.2, 154.3, 154.4, 154.5, 154.5, 154.6, 154.6, 154.7, 154.8, 155.0, 155.0, 155.2, 155.3, 155.3, 155.5, 156.0, 156.1, 156.2, 156.4, 156.6, 156.7, 157.0, 157.1, 157.2, 157.3, 157.4, 157.6, 157.6, 157.7, 157.8, 158.0, 158.0, 158.4, 158.6, 158.6, 158.7, 158.7, 158.8, 159.0, 159.1, 159.3, 159.5, 159.7, 159.7, 159.8, 159.9, 160.0, 160.1, 160.1, 160.2, 160.4, 160.4, 160.6, 160.9, 161.1, 161.3, 161.5, 161.5, 161.7, 161.9, 162.0, 162.1, 162.3, 162.4, 162.6, 162.8, 163.0, 163.3, 163.5, 163.6, 163.7, 163.8, 164.1, 164.2, 164.5, 164.8, 164.9, 165.0, 165.3, 165.4, 165.4, 165.8, 165.9, 166.0, 166.1, 166.2, 166.5, 166.5, 166.6, 166.8, 166.9, 166.9, 167.1, 167.3, 167.4, 167.5, 167.8, 167.8, 167.9, 168.0, 168.3, 168.4, 168.6, 168.6, 168.6, 168.8, 169.2, 169.2, 169.3, 169.3, 169.5, 169.5, 169.6, 169.7, 169.8, 169.9, 169.9, 170.2, 170.5, 170.5, 170.6, 170.7, 170.7, 170.9, 171.2, 171.2, 171.5, 171.6, 171.7, 171.7, 171.8, 172.1, 172.3, 172.5, 172.7, 172.8, 173.0, 173.0, 173.1, 173.2, 173.5, 173.9, 174.1, 174.3, 174.4, 174.4, 174.5, 174.5, 174.7, 174.7, 174.8, 174.9, 175.2, 175.3, 175.4, 175.4, 175.5, 175.5, 175.7, 175.7, 175.8, 175.9, 176.0, 176.2, 176.2, 176.3, 176.4, 176.6, 176.8, 176.9, 177.2, 177.2, 177.3, 177.5, 177.9, 178.2, 178.3, 178.4, 178.7, 178.8, 178.8, 179.1, 179.2, 179.2, 179.3, 179.4, 179.4, 179.5, 179.9, 180.0, 180.0, 180.5, 180.7, 180.7, 180.9, 181.1, 181.4, 181.5, 181.5, 181.6, 181.8, 182.0, 182.1, 182.1, 182.3, 182.6, 182.8, 183.0, 183.1, 183.1, 183.2, 183.3, 183.4, 183.4, 183.6, 183.8, 183.9, 184.0, 184.1, 184.2, 184.5, 184.6, 184.8, 185.0, 185.0, 185.1, 185.3, 185.3, 185.6, 186.0, 186.0, 186.1, 186.2, 186.4, 186.7, 186.8, 187.1, 187.2, 187.3, 187.4, 187.5, 187.7, 187.8, 187.8, 187.9, 188.0, 188.5, 188.8, 188.9, 189.1, 189.2, 189.3, 189.3, 189.5, 189.6, 189.8, 189.8, 190.0, 190.1, 190.2, 190.3, 190.3, 190.4, 190.4, 190.5, 190.7, 190.8, 190.9, 191.0, 191.1, 191.3, 191.3, 191.4, 191.4, 191.9, 191.9, 192.0, 192.1, 192.3, 192.3, 192.6, 192.6, 193.0, 193.0, 193.2, 193.3, 193.4, 193.6, 193.7, 193.8, 193.9, 194.0, 194.2, 194.3, 194.4, 194.6, 194.8, 194.8, 194.9, 195.0, 195.3, 195.4, 195.5, 195.7, 195.9, 196.0, 196.1, 196.1, 196.4, 196.5, 196.6, 196.6, 196.7, 196.8, 197.0, 197.1, 197.2, 197.3, 197.4, 197.6, 197.7, 197.8, 197.9, 198.1, 198.2, 198.3, 198.4, 198.4, 198.6, 198.7, 198.8, 198.9, 199.1, 199.2, 199.3, 199.6, 200.0, 200.2, 200.3, 200.3, 200.6, 201.1, 201.3, 201.3, 201.4, 201.5, 201.8, 201.9, 201.9, 202.0, 202.1, 202.4, 202.6, 202.7, 202.9, 203.1, 203.2, 203.3, 203.3, 203.4, 203.4, 203.5, 203.5, 203.7, 203.9, 204.0, 204.2, 204.4, 204.5, 204.5, 204.7, 204.9, 205.0, 205.1, 205.2, 205.2, 205.4, 205.7, 205.7, 205.9, 206.0, 206.2, 206.2, 206.2, 206.3, 206.5, 206.7, 206.9, 206.9, 207.0, 207.0, 207.2, 207.3, 207.6, 207.6, 207.7, 207.8, 207.9, 208.0, 208.3, 208.7, 208.8, 208.9, 209.1, 209.2, 209.4, 209.4, 209.7, 209.8, 209.9, 210.2, 210.3, 210.5, 210.6, 210.7, 210.8, 211.0, 211.1, 211.1, 211.2, 211.3, 211.7, 211.9, 212.0, 212.1, 212.4, 212.8, 213.0, 213.0, 213.1, 213.4, 213.5, 213.6, 213.8, 214.0, 214.1, 214.2, 214.2, 214.3, 214.3, 214.6, 214.7, 214.9, 215.1, 215.4, 215.5, 215.6, 215.8, 215.9, 215.9, 216.0, 216.0, 216.2, 216.4, 216.7, 216.8, 216.9, 217.1, 217.2, 217.2, 217.5, 217.8, 218.0, 218.5, 218.7, 218.8, 218.9, 219.2, 219.4, 219.9, 220.0, 220.1, 220.2, 220.3, 220.7, 220.8, 220.9, 221.0, 221.1, 221.1, 221.3, 221.4, 221.6, 221.8, 221.9, 222.1, 222.3, 222.4, 222.6, 222.7, 222.8, 223.0, 223.2, 223.5, 223.9, 224.0, 224.2, 224.5, 224.7, 224.9, 225.0, 225.2, 225.3, 225.5, 225.9, 226.2, 226.5, 226.9, 227.1, 227.2, 227.4, 227.4, 227.5, 227.8, 228.1, 228.4, 228.7, 228.9, 229.3, 229.6, 229.9, 230.1, 230.2, 230.2, 230.4, 230.6, 230.7, 230.9, 231.0, 231.3, 231.8, 231.9, 232.1, 232.4, 232.5, 232.6, 232.8, 233.2, 233.5, 233.8, 234.2, 234.4, 234.5, 234.8, 234.9, 235.1, 235.5, 235.6, 235.7, 236.2, 236.2, 236.4, 236.5, 236.8, 236.8, 237.1, 237.7, 237.8, 238.0, 238.9, 239.1, 239.2, 239.2, 239.5, 239.7, 239.8, 239.8, 239.9, 239.9, 240.2, 240.3, 240.8, 241.1, 241.2, 241.7, 241.8, 241.9, 242.2, 242.4, 242.5, 242.9, 243.0, 243.1, 243.4, 243.4, 243.7, 243.9, 244.3, 244.8, 244.9, 245.0, 245.2, 245.3, 245.5, 245.7, 245.8, 246.2, 246.4, 246.8, 247.2, 247.5, 247.8, 248.6, 248.9, 249.4, 249.6, 249.9, 250.9, 251.4, 251.5, 251.6, 252.3, 252.4, 252.9, 253.0, 253.2, 253.4, 254.1, 254.4, 254.7, 254.9, 255.8, 256.0, 256.5, 256.7, 257.2, 257.7, 258.0, 259.4, 259.8, 260.5, 261.3, 261.4, 261.7, 261.9, 262.2, 262.9, 263.8, 264.4, 265.1, 265.9, 266.0, 266.3, 266.7, 267.4, 268.4, 268.7, 269.0, 269.8, 270.3, 270.7, 271.1, 271.2, 271.5, 271.6, 271.8, 272.4, 272.6, 272.7, 273.2, 273.6, 274.0, 274.6, 275.2, 276.5, 277.0, 277.9, 278.5, 279.1, 279.8, 280.0, 280.4, 281.1, 281.3, 282.5, 283.2, 283.9, 285.7, 286.4, 287.1, 287.4, 288.0, 288.5, 289.5, 290.4, 291.8, 293.3, 295.0, 298.1, 299.5, 301.5, 305.1, 308.0, 309.9, 312.0, 313.2, 314.1, 315.6, 322.3, 322.4, 326.3, 335.5, 345.3 ] ] - } - } - } - } - }, { - "name" : "Day Calls", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 100.25932276039434, - "sum" : 233905.0, - "std_dev" : 20.165008436664074, - "min" : 0.0, - "max" : 165.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 16.5, - "count" : 2.0 - }, { - "lower_bound" : 16.5, - "upper_bound" : 33.0, - "count" : 0.0 - }, { - "lower_bound" : 33.0, - "upper_bound" : 49.5, - "count" : 14.0 - }, { - "lower_bound" : 49.5, - "upper_bound" : 66.0, - "count" : 80.0 - }, { - "lower_bound" : 66.0, - "upper_bound" : 82.5, - "count" : 344.0 - }, { - "lower_bound" : 82.5, - "upper_bound" : 99.0, - "count" : 636.0 - }, { - "lower_bound" : 99.0, - "upper_bound" : 115.5, - "count" : 737.0 - }, { - "lower_bound" : 115.5, - "upper_bound" : 132.0, - "count" : 377.0 - }, { - "lower_bound" : 132.0, - "upper_bound" : 148.5, - "count" : 127.0 - }, { - "lower_bound" : 148.5, - "upper_bound" : 165.0, - "count" : 16.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 110.0, 138.0, 117.0, 61.0, 112.0, 87.0, 122.0, 73.0, 86.0, 128.0, 85.0, 83.0, 108.0, 109.0, 105.0, 85.0, 104.0, 103.0, 142.0, 76.0, 94.0, 72.0, 125.0, 109.0, 148.0, 112.0, 111.0, 131.0, 130.0, 107.0, 90.0, 107.0, 121.0, 126.0, 78.0, 63.0, 103.0, 97.0, 108.0, 105.0, 96.0, 117.0, 82.0, 79.0, 106.0, 105.0, 59.0, 67.0, 104.0, 88.0, 119.0, 97.0, 87.0, 89.0, 76.0, 68.0, 77.0, 88.0, 109.0, 99.0, 88.0, 115.0, 79.0, 85.0, 121.0, 75.0, 129.0, 70.0, 115.0, 97.0, 86.0, 117.0, 92.0, 98.0, 114.0, 108.0, 106.0, 114.0, 101.0, 146.0, 85.0, 92.0, 125.0, 80.0, 100.0, 137.0, 78.0, 96.0, 91.0, 104.0, 97.0, 100.0, 66.0, 116.0, 84.0, 75.0, 127.0, 81.0, 70.0, 93.0, 96.0, 133.0, 104.0, 68.0, 103.0, 108.0, 86.0, 95.0, 87.0, 148.0, 94.0, 94.0, 107.0, 92.0, 81.0, 87.0, 102.0, 114.0, 110.0, 48.0, 110.0, 57.0, 140.0, 48.0, 136.0, 88.0, 112.0, 115.0, 93.0, 83.0, 88.0, 85.0, 84.0, 88.0, 106.0, 117.0, 89.0, 86.0, 104.0, 62.0, 87.0, 110.0, 127.0, 100.0, 122.0, 113.0, 112.0, 110.0, 104.0, 121.0, 91.0, 143.0, 96.0, 99.0, 115.0, 66.0, 109.0, 91.0, 96.0, 112.0, 106.0, 80.0, 107.0, 104.0, 95.0, 103.0, 100.0, 47.0, 94.0, 97.0, 99.0, 108.0, 92.0, 106.0, 98.0, 115.0, 67.0, 115.0, 92.0, 86.0, 102.0, 111.0, 92.0, 100.0, 106.0, 104.0, 126.0, 64.0, 126.0, 85.0, 88.0, 99.0, 127.0, 90.0, 117.0, 96.0, 91.0, 102.0, 110.0, 110.0, 105.0, 71.0, 92.0, 78.0, 113.0, 44.0, 123.0, 117.0, 97.0, 113.0, 101.0, 100.0, 68.0, 108.0, 106.0, 80.0, 95.0, 109.0, 130.0, 96.0, 121.0, 121.0, 100.0, 108.0, 100.0, 95.0, 68.0, 106.0, 101.0, 86.0, 105.0, 125.0, 85.0, 101.0, 101.0, 108.0, 74.0, 112.0, 97.0, 104.0, 99.0, 88.0, 70.0, 99.0, 82.0, 145.0, 85.0, 110.0, 69.0, 100.0, 135.0, 98.0, 117.0, 109.0, 103.0, 95.0, 95.0, 111.0, 94.0, 132.0, 114.0, 81.0, 116.0, 124.0, 88.0, 80.0, 114.0, 69.0, 108.0, 110.0, 88.0, 137.0, 121.0, 56.0, 71.0, 65.0, 125.0, 87.0, 133.0, 80.0, 114.0, 107.0, 93.0 ], [ 0.0, 35.0, 40.0, 44.0, 45.0, 49.0, 51.0, 52.0, 53.0, 54.0, 54.0, 55.0, 55.0, 55.0, 56.0, 56.0, 57.0, 57.0, 58.0, 58.0, 59.0, 59.0, 60.0, 61.0, 61.0, 61.0, 61.0, 61.0, 61.0, 62.0, 62.0, 62.0, 63.0, 63.0, 63.0, 63.0, 64.0, 65.0, 65.0, 65.0, 65.0, 65.0, 66.0, 66.0, 66.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 68.0, 68.0, 68.0, 68.0, 69.0, 69.0, 69.0, 69.0, 70.0, 70.0, 70.0, 70.0, 70.0, 70.0, 70.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 72.0, 72.0, 72.0, 72.0, 72.0, 72.0, 72.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 75.0, 75.0, 75.0, 75.0, 75.0, 75.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 129.0, 129.0, 129.0, 129.0, 129.0, 129.0, 129.0, 130.0, 130.0, 130.0, 130.0, 130.0, 130.0, 130.0, 130.0, 131.0, 131.0, 131.0, 131.0, 132.0, 132.0, 132.0, 132.0, 132.0, 132.0, 133.0, 133.0, 133.0, 133.0, 133.0, 133.0, 134.0, 134.0, 134.0, 134.0, 134.0, 134.0, 135.0, 135.0, 135.0, 136.0, 136.0, 137.0, 137.0, 137.0, 137.0, 137.0, 138.0, 138.0, 138.0, 138.0, 138.0, 139.0, 139.0, 139.0, 140.0, 140.0, 140.0, 140.0, 140.0, 140.0, 141.0, 141.0, 141.0, 141.0, 142.0, 142.0, 143.0, 143.0, 145.0, 145.0, 145.0, 146.0, 147.0, 147.0, 149.0, 150.0, 151.0, 151.0, 151.0, 156.0, 158.0, 163.0 ] ] - } - } - } - } - }, { - "name" : "Eve Mins", - "inferred_type" : "Fractional", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 200.0501071581652, - "sum" : 466716.8999999994, - "std_dev" : 50.01592824933489, - "min" : 31.2, - "max" : 361.8, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 31.2, - "upper_bound" : 64.26, - "count" : 7.0 - }, { - "lower_bound" : 64.26, - "upper_bound" : 97.32000000000001, - "count" : 43.0 - }, { - "lower_bound" : 97.32000000000001, - "upper_bound" : 130.38, - "count" : 135.0 - }, { - "lower_bound" : 130.38, - "upper_bound" : 163.44, - "count" : 360.0 - }, { - "lower_bound" : 163.44, - "upper_bound" : 196.5, - "count" : 555.0 - }, { - "lower_bound" : 196.5, - "upper_bound" : 229.56, - "count" : 587.0 - }, { - "lower_bound" : 229.56, - "upper_bound" : 262.62, - "count" : 404.0 - }, { - "lower_bound" : 262.62, - "upper_bound" : 295.68, - "count" : 178.0 - }, { - "lower_bound" : 295.68, - "upper_bound" : 328.74, - "count" : 49.0 - }, { - "lower_bound" : 328.74, - "upper_bound" : 361.8, - "count" : 15.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 212.8, 221.3, 227.8, 341.3, 286.2, 158.6, 292.3, 257.5, 161.4, 102.8, 151.2, 134.0, 259.7, 127.5, 155.0, 203.9, 183.8, 189.5, 228.4, 176.6, 247.8, 194.5, 169.1, 185.3, 223.8, 290.9, 200.2, 194.8, 134.2, 127.8, 207.5, 233.9, 169.9, 248.9, 235.2, 254.9, 176.7, 204.1, 181.5, 244.4, 169.8, 221.5, 284.3, 187.1, 99.5, 253.0, 215.9, 167.2, 278.7, 137.6, 217.7, 172.2, 178.5, 211.4, 169.3, 245.0, 169.8, 348.5, 242.4, 193.5, 247.9, 199.2, 103.1, 196.7, 267.1, 253.8, 187.8, 275.0, 161.9, 221.1, 168.6, 234.7, 217.7, 282.6, 221.2, 96.6, 187.6, 191.4, 196.4, 164.6, 227.3, 249.1, 167.8, 131.1, 154.5, 264.9, 205.5, 262.3, 203.5, 246.8, 351.6, 178.6, 263.6, 209.9, 221.0, 195.3, 106.1, 163.7, 216.9, 215.8, 153.1, 170.5, 144.5, 143.8, 137.3, 276.2, 155.0, 150.1, 131.0, 120.9, 164.6, 173.7, 139.1, 209.1, 172.0, 170.4, 140.2, 230.3, 132.9, 159.1, 52.9, 211.1, 238.2, 125.8, 143.7, 162.7, 228.3, 180.4, 188.8, 256.2, 163.7, 129.5, 230.9, 140.3, 177.8, 317.0, 196.8, 204.7, 185.4, 200.9, 309.2, 185.7, 164.1, 148.8, 173.7, 259.4, 246.7, 231.9, 234.4, 147.9, 216.6, 127.8, 235.9, 210.4, 201.4, 160.6, 228.4, 226.7, 241.3, 193.4, 165.7, 186.2, 177.3, 196.3, 205.1, 167.2, 230.0, 195.5, 178.0, 69.2, 303.5, 192.2, 213.8, 232.2, 86.8, 260.5, 165.1, 245.3, 177.8, 198.2, 145.3, 184.9, 162.6, 236.1, 293.8, 189.4, 126.2, 139.8, 303.3, 177.6, 209.6, 210.7, 304.6, 159.5, 205.2, 206.6, 246.1, 184.9, 226.1, 91.7, 169.9, 201.4, 167.6, 180.6, 200.6, 167.5, 270.6, 155.2, 193.8, 268.1, 183.3, 189.6, 250.5, 132.2, 248.9, 167.9, 165.8, 164.8, 195.2, 176.0, 203.7, 203.0, 211.2, 216.5, 260.6, 209.2, 251.6, 167.2, 213.7, 286.3, 121.9, 162.3, 186.9, 250.5, 200.2, 196.9, 169.7, 201.8, 115.7, 322.2, 224.8, 209.3, 273.7, 246.9, 269.4, 273.3, 239.6, 260.3, 197.5, 200.2, 182.4, 209.9, 223.6, 220.0, 255.1, 256.3, 212.0, 236.7, 199.9, 115.9, 256.6, 233.7, 173.4, 146.6, 137.5, 165.0, 198.9, 129.2, 249.3, 222.8, 248.5, 178.7, 175.2, 230.1, 214.1, 187.2, 198.9, 165.7, 213.4, 88.1, 297.8, 195.5, 238.3 ], [ 31.2, 58.9, 60.8, 65.2, 66.5, 71.0, 73.2, 75.3, 77.1, 78.3, 79.3, 80.6, 82.2, 83.9, 87.6, 88.6, 89.7, 90.0, 90.5, 92.0, 93.7, 95.1, 98.3, 101.3, 102.2, 102.6, 103.4, 105.5, 105.7, 106.2, 106.8, 107.9, 108.2, 109.9, 110.2, 110.8, 112.5, 113.2, 113.3, 114.3, 114.5, 114.7, 115.0, 115.7, 116.5, 116.6, 117.0, 117.9, 118.0, 118.5, 118.7, 118.9, 119.3, 119.6, 120.0, 120.3, 120.4, 120.5, 120.7, 121.0, 121.6, 122.2, 122.8, 123.0, 123.4, 123.5, 123.5, 123.9, 123.9, 124.4, 126.0, 126.9, 127.3, 127.8, 128.7, 128.9, 129.1, 129.3, 129.4, 129.8, 130.1, 130.2, 130.7, 131.1, 131.4, 131.7, 131.8, 132.3, 132.5, 132.9, 133.0, 133.4, 133.9, 134.1, 134.3, 134.5, 134.7, 134.9, 135.0, 135.2, 136.0, 136.1, 136.4, 136.7, 136.9, 137.3, 137.8, 138.1, 138.3, 138.5, 138.7, 138.9, 139.1, 139.5, 139.6, 140.2, 140.9, 140.9, 141.2, 141.4, 141.6, 141.8, 141.9, 142.0, 142.1, 142.3, 142.6, 142.6, 142.7, 143.1, 143.4, 143.7, 143.8, 144.1, 144.3, 144.4, 145.0, 145.1, 145.5, 145.9, 146.4, 146.7, 146.9, 147.0, 147.4, 147.7, 148.0, 148.2, 148.3, 148.5, 148.7, 149.1, 149.3, 149.5, 149.6, 149.9, 150.0, 150.0, 150.1, 150.6, 150.8, 151.3, 151.4, 151.7, 152.0, 152.1, 152.3, 152.5, 152.5, 152.7, 152.7, 152.8, 152.9, 153.1, 153.1, 153.2, 153.3, 153.6, 153.7, 153.8, 154.0, 154.2, 154.5, 154.6, 154.7, 154.9, 154.9, 155.1, 155.4, 155.5, 155.6, 155.8, 156.0, 156.0, 156.1, 156.3, 156.4, 156.9, 157.0, 157.3, 157.5, 157.6, 157.6, 158.0, 158.2, 158.4, 158.6, 158.8, 159.2, 159.4, 159.5, 159.6, 159.6, 159.7, 159.9, 160.1, 160.1, 160.3, 160.5, 160.6, 160.6, 160.7, 160.8, 160.9, 161.1, 161.4, 161.7, 161.7, 161.7, 161.8, 161.9, 161.9, 162.1, 162.3, 162.3, 162.5, 162.5, 162.6, 162.8, 163.0, 163.1, 163.2, 163.3, 163.4, 163.6, 163.7, 164.0, 164.2, 164.5, 164.5, 164.5, 164.7, 164.9, 165.1, 165.2, 165.4, 165.6, 165.8, 165.8, 165.9, 165.9, 166.4, 166.6, 166.7, 166.8, 167.0, 167.1, 167.2, 167.2, 167.3, 167.5, 167.6, 167.7, 167.7, 167.7, 167.8, 167.9, 167.9, 168.0, 168.2, 168.3, 168.4, 168.5, 168.6, 168.7, 169.0, 169.1, 169.3, 169.5, 169.6, 169.8, 169.9, 169.9, 170.0, 170.0, 170.2, 170.5, 170.5, 170.7, 170.8, 170.9, 171.2, 171.4, 171.6, 171.7, 171.8, 171.9, 172.2, 172.7, 172.8, 173.1, 173.3, 173.3, 173.4, 173.5, 173.7, 174.0, 174.4, 174.5, 174.6, 174.8, 174.9, 175.1, 175.2, 175.4, 175.7, 175.8, 175.9, 176.0, 176.1, 176.2, 176.4, 176.6, 176.6, 176.7, 177.0, 177.2, 177.5, 177.6, 177.8, 177.9, 178.2, 178.3, 178.4, 178.6, 178.8, 178.9, 179.1, 179.3, 179.3, 179.5, 179.7, 179.9, 180.0, 180.0, 180.0, 180.2, 180.3, 180.4, 180.5, 180.5, 180.6, 180.8, 181.0, 181.2, 181.4, 181.6, 181.6, 181.7, 182.0, 182.0, 182.2, 182.2, 182.5, 182.9, 182.9, 183.0, 183.1, 183.4, 183.5, 183.6, 183.6, 183.9, 184.0, 184.1, 184.2, 184.3, 184.5, 184.6, 184.8, 185.0, 185.4, 185.5, 185.5, 185.7, 185.8, 185.9, 186.0, 186.4, 186.6, 186.6, 186.7, 186.8, 187.0, 187.0, 187.1, 187.2, 187.2, 187.3, 187.4, 187.5, 187.5, 187.7, 187.8, 188.0, 188.2, 188.2, 188.4, 188.5, 188.5, 188.6, 188.8, 188.8, 189.0, 189.1, 189.3, 189.3, 189.4, 189.6, 189.6, 189.7, 189.8, 190.0, 190.0, 190.2, 190.3, 190.4, 190.6, 190.7, 190.8, 190.9, 191.1, 191.3, 191.6, 191.8, 191.9, 192.0, 192.2, 192.3, 192.4, 192.6, 192.7, 192.8, 193.0, 193.0, 193.1, 193.3, 193.6, 193.8, 193.9, 194.0, 194.1, 194.4, 194.4, 194.5, 194.6, 194.6, 194.7, 194.8, 194.9, 195.2, 195.5, 195.5, 195.6, 195.7, 195.7, 195.7, 195.8, 195.9, 196.0, 196.2, 196.3, 196.5, 196.6, 196.7, 196.7, 196.8, 197.0, 197.0, 197.3, 197.3, 197.4, 197.4, 197.5, 197.6, 197.6, 197.7, 197.8, 198.0, 198.3, 198.4, 198.5, 198.6, 198.6, 198.9, 199.4, 199.5, 199.6, 199.7, 199.7, 199.9, 200.0, 200.2, 200.3, 200.5, 200.6, 200.7, 200.7, 200.9, 201.0, 201.0, 201.0, 201.1, 201.3, 201.6, 202.2, 202.3, 202.4, 202.4, 202.5, 202.6, 202.6, 202.7, 202.8, 202.9, 203.0, 203.2, 203.4, 203.4, 203.6, 203.7, 203.8, 203.9, 203.9, 204.0, 204.1, 204.2, 204.3, 204.5, 204.6, 204.7, 204.9, 204.9, 205.0, 205.1, 205.2, 205.2, 205.5, 205.5, 205.7, 205.9, 206.0, 206.0, 206.2, 206.4, 206.5, 206.7, 206.8, 206.9, 207.0, 207.0, 207.1, 207.3, 207.5, 207.6, 207.7, 207.9, 207.9, 208.0, 208.0, 208.2, 208.2, 208.4, 208.5, 208.6, 208.8, 208.9, 209.0, 209.1, 209.1, 209.3, 209.4, 209.4, 209.4, 209.5, 209.5, 209.6, 209.8, 209.8, 209.9, 210.0, 210.1, 210.2, 210.2, 210.4, 210.5, 210.6, 210.7, 210.8, 210.9, 211.0, 211.1, 211.2, 211.3, 211.4, 211.5, 211.5, 211.6, 211.6, 211.7, 211.8, 211.8, 212.0, 212.1, 212.2, 212.3, 212.5, 212.7, 213.0, 213.2, 213.3, 213.4, 213.6, 213.7, 213.7, 213.9, 213.9, 214.1, 214.2, 214.2, 214.3, 214.5, 214.7, 214.8, 215.1, 215.2, 215.3, 215.5, 215.5, 215.6, 215.7, 215.8, 216.2, 216.3, 216.3, 216.4, 216.5, 216.5, 216.6, 216.7, 216.8, 216.9, 217.0, 217.1, 217.2, 217.4, 217.5, 217.7, 218.2, 218.6, 218.8, 218.9, 219.1, 219.1, 219.2, 219.3, 219.5, 219.6, 219.6, 219.7, 219.9, 220.0, 220.2, 220.4, 220.5, 220.6, 220.6, 220.9, 220.9, 221.1, 221.2, 221.4, 221.6, 221.8, 221.9, 222.0, 222.1, 222.2, 222.3, 222.4, 222.7, 223.0, 223.0, 223.1, 223.2, 223.3, 223.5, 223.5, 223.5, 223.6, 224.1, 224.2, 224.3, 224.4, 224.6, 224.7, 224.9, 224.9, 225.0, 225.1, 225.1, 225.1, 225.2, 225.3, 225.5, 225.9, 225.9, 226.1, 226.2, 226.6, 226.7, 226.8, 227.2, 227.4, 227.8, 228.4, 228.6, 228.9, 229.0, 229.4, 229.4, 229.7, 229.9, 230.0, 230.1, 230.1, 230.3, 230.5, 230.7, 230.9, 230.9, 231.3, 231.4, 231.6, 231.7, 231.8, 231.8, 232.1, 232.3, 232.4, 232.6, 232.7, 232.9, 232.9, 233.2, 233.2, 233.5, 233.6, 233.7, 233.7, 233.8, 234.1, 234.5, 234.9, 235.0, 235.3, 235.3, 235.5, 235.8, 235.9, 236.0, 236.0, 236.2, 236.3, 236.5, 236.7, 236.8, 237.0, 237.2, 237.4, 237.7, 237.9, 238.1, 238.3, 238.6, 238.7, 239.5, 239.7, 240.1, 240.1, 240.2, 240.6, 240.7, 240.7, 240.8, 241.0, 241.4, 241.5, 241.6, 241.9, 242.0, 242.2, 242.2, 242.3, 242.6, 242.8, 242.8, 243.1, 243.2, 243.3, 243.5, 243.7, 243.9, 244.0, 244.3, 244.7, 244.8, 245.1, 245.4, 245.6, 246.0, 246.1, 246.3, 246.5, 246.5, 246.5, 246.6, 246.7, 247.0, 247.2, 247.3, 247.6, 247.7, 247.8, 248.1, 248.2, 248.5, 248.6, 248.7, 248.7, 248.8, 249.0, 249.1, 249.3, 249.5, 249.6, 249.7, 249.8, 249.8, 249.9, 250.0, 250.2, 250.7, 250.7, 250.8, 251.1, 251.3, 251.7, 251.8, 252.2, 252.4, 252.7, 253.0, 253.4, 253.4, 253.6, 253.6, 253.9, 254.1, 254.2, 254.5, 255.1, 255.6, 255.7, 255.9, 255.9, 256.1, 256.1, 256.6, 256.6, 256.7, 256.8, 256.8, 256.9, 257.2, 257.2, 257.5, 257.7, 258.0, 258.2, 258.4, 258.7, 258.8, 259.2, 259.3, 259.6, 259.8, 260.0, 260.2, 260.7, 260.9, 261.5, 261.5, 261.6, 262.0, 262.2, 262.3, 262.6, 263.0, 263.4, 263.7, 264.1, 264.4, 264.5, 264.7, 264.7, 265.0, 265.3, 265.5, 265.7, 265.8, 266.3, 266.5, 266.9, 267.1, 267.4, 267.8, 268.3, 268.5, 268.5, 269.1, 269.5, 269.8, 270.5, 270.8, 271.0, 271.7, 272.8, 273.0, 273.0, 273.6, 273.8, 274.0, 274.4, 274.6, 274.8, 275.0, 275.4, 275.6, 276.0, 276.3, 276.4, 276.4, 276.8, 277.1, 277.5, 278.0, 278.2, 278.5, 279.3, 280.1, 280.8, 281.1, 281.7, 282.8, 283.2, 283.3, 284.5, 285.2, 285.8, 285.9, 286.0, 286.7, 287.3, 287.4, 287.7, 288.0, 288.7, 289.4, 289.8, 290.0, 290.9, 291.3, 292.5, 292.7, 292.8, 293.6, 294.0, 295.7, 298.0, 298.6, 300.9, 301.3, 301.5, 302.6, 303.4, 303.8, 304.9, 307.2, 308.7, 313.2, 313.7, 317.2, 317.8, 318.8, 319.3, 322.3, 327.0, 328.2, 329.3, 330.6, 332.8, 337.1, 347.3, 354.2 ] ] - } - } - } - } - }, { - "name" : "Eve Calls", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 99.57393913416202, - "sum" : 232306.0, - "std_dev" : 19.67557809959058, - "min" : 12.0, - "max" : 170.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 12.0, - "upper_bound" : 27.8, - "count" : 2.0 - }, { - "lower_bound" : 27.8, - "upper_bound" : 43.6, - "count" : 2.0 - }, { - "lower_bound" : 43.6, - "upper_bound" : 59.4, - "count" : 44.0 - }, { - "lower_bound" : 59.4, - "upper_bound" : 75.2, - "count" : 195.0 - }, { - "lower_bound" : 75.2, - "upper_bound" : 91.0, - "count" : 530.0 - }, { - "lower_bound" : 91.0, - "upper_bound" : 106.8, - "count" : 708.0 - }, { - "lower_bound" : 106.8, - "upper_bound" : 122.6, - "count" : 576.0 - }, { - "lower_bound" : 122.6, - "upper_bound" : 138.4, - "count" : 215.0 - }, { - "lower_bound" : 138.4, - "upper_bound" : 154.2, - "count" : 56.0 - }, { - "lower_bound" : 154.2, - "upper_bound" : 170.0, - "count" : 5.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 100.0, 92.0, 128.0, 79.0, 86.0, 98.0, 112.0, 103.0, 82.0, 56.0, 82.0, 114.0, 108.0, 86.0, 101.0, 107.0, 68.0, 108.0, 139.0, 114.0, 126.0, 157.0, 126.0, 99.0, 87.0, 92.0, 64.0, 107.0, 103.0, 86.0, 109.0, 115.0, 125.0, 108.0, 100.0, 110.0, 84.0, 129.0, 109.0, 88.0, 101.0, 125.0, 119.0, 112.0, 122.0, 83.0, 116.0, 62.0, 88.0, 108.0, 109.0, 108.0, 128.0, 96.0, 60.0, 122.0, 114.0, 108.0, 106.0, 118.0, 81.0, 97.0, 94.0, 112.0, 118.0, 114.0, 121.0, 129.0, 123.0, 121.0, 116.0, 97.0, 114.0, 110.0, 82.0, 82.0, 99.0, 119.0, 77.0, 83.0, 132.0, 126.0, 90.0, 84.0, 90.0, 99.0, 89.0, 122.0, 78.0, 91.0, 80.0, 46.0, 121.0, 93.0, 147.0, 87.0, 80.0, 112.0, 80.0, 68.0, 80.0, 86.0, 92.0, 74.0, 102.0, 77.0, 86.0, 88.0, 114.0, 91.0, 59.0, 109.0, 123.0, 142.0, 103.0, 96.0, 134.0, 109.0, 93.0, 119.0, 109.0, 115.0, 133.0, 112.0, 111.0, 66.0, 97.0, 91.0, 65.0, 101.0, 108.0, 114.0, 137.0, 114.0, 118.0, 129.0, 111.0, 77.0, 118.0, 111.0, 105.0, 111.0, 45.0, 95.0, 85.0, 95.0, 89.0, 92.0, 96.0, 97.0, 123.0, 72.0, 90.0, 99.0, 87.0, 108.0, 117.0, 137.0, 146.0, 120.0, 115.0, 90.0, 116.0, 119.0, 88.0, 106.0, 121.0, 84.0, 106.0, 84.0, 74.0, 89.0, 112.0, 54.0, 122.0, 102.0, 127.0, 105.0, 109.0, 89.0, 146.0, 98.0, 76.0, 70.0, 123.0, 64.0, 118.0, 121.0, 100.0, 92.0, 84.0, 80.0, 83.0, 88.0, 94.0, 84.0, 96.0, 144.0, 103.0, 84.0, 102.0, 80.0, 121.0, 100.0, 81.0, 144.0, 74.0, 121.0, 95.0, 69.0, 79.0, 104.0, 53.0, 75.0, 90.0, 128.0, 119.0, 94.0, 116.0, 89.0, 99.0, 141.0, 79.0, 103.0, 90.0, 69.0, 123.0, 136.0, 87.0, 76.0, 105.0, 139.0, 114.0, 86.0, 121.0, 87.0, 75.0, 102.0, 96.0, 81.0, 87.0, 86.0, 121.0, 111.0, 85.0, 103.0, 102.0, 103.0, 68.0, 110.0, 108.0, 92.0, 96.0, 118.0, 110.0, 150.0, 98.0, 109.0, 124.0, 129.0, 69.0, 74.0, 88.0, 103.0, 111.0, 71.0, 96.0, 113.0, 119.0, 102.0, 109.0, 70.0, 87.0, 73.0, 68.0, 98.0, 76.0, 97.0, 73.0, 94.0, 98.0, 91.0, 102.0 ], [ 12.0, 42.0, 44.0, 48.0, 48.0, 48.0, 50.0, 52.0, 52.0, 53.0, 54.0, 56.0, 56.0, 57.0, 58.0, 58.0, 58.0, 58.0, 59.0, 59.0, 59.0, 60.0, 60.0, 60.0, 60.0, 60.0, 61.0, 61.0, 61.0, 62.0, 62.0, 63.0, 63.0, 63.0, 63.0, 63.0, 64.0, 64.0, 64.0, 65.0, 65.0, 65.0, 65.0, 65.0, 66.0, 66.0, 66.0, 66.0, 66.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 67.0, 68.0, 68.0, 68.0, 69.0, 69.0, 69.0, 69.0, 70.0, 70.0, 70.0, 70.0, 70.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 72.0, 72.0, 72.0, 72.0, 72.0, 72.0, 72.0, 72.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 75.0, 75.0, 75.0, 75.0, 75.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 124.0, 124.0, 124.0, 124.0, 124.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 126.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 127.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 129.0, 129.0, 129.0, 129.0, 130.0, 130.0, 130.0, 130.0, 130.0, 130.0, 131.0, 131.0, 131.0, 131.0, 131.0, 131.0, 131.0, 131.0, 132.0, 132.0, 133.0, 133.0, 133.0, 133.0, 134.0, 134.0, 134.0, 134.0, 135.0, 135.0, 136.0, 136.0, 136.0, 136.0, 136.0, 136.0, 137.0, 137.0, 137.0, 137.0, 138.0, 138.0, 138.0, 138.0, 139.0, 140.0, 140.0, 140.0, 141.0, 141.0, 141.0, 142.0, 142.0, 143.0, 143.0, 143.0, 144.0, 144.0, 146.0, 147.0, 147.0, 148.0, 149.0, 150.0, 151.0, 152.0, 154.0, 155.0, 164.0 ] ] - } - } - } - } - }, { - "name" : "Night Mins", - "inferred_type" : "Fractional", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 201.3885983711959, - "sum" : 469839.6000000001, - "std_dev" : 50.6279607960497, - "min" : 23.2, - "max" : 395.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 23.2, - "upper_bound" : 60.379999999999995, - "count" : 9.0 - }, { - "lower_bound" : 60.379999999999995, - "upper_bound" : 97.56, - "count" : 35.0 - }, { - "lower_bound" : 97.56, - "upper_bound" : 134.74, - "count" : 171.0 - }, { - "lower_bound" : 134.74, - "upper_bound" : 171.92, - "count" : 463.0 - }, { - "lower_bound" : 171.92, - "upper_bound" : 209.1, - "count" : 623.0 - }, { - "lower_bound" : 209.1, - "upper_bound" : 246.28, - "count" : 590.0 - }, { - "lower_bound" : 246.28, - "upper_bound" : 283.46, - "count" : 325.0 - }, { - "lower_bound" : 283.46, - "upper_bound" : 320.64, - "count" : 98.0 - }, { - "lower_bound" : 320.64, - "upper_bound" : 357.82, - "count" : 15.0 - }, { - "lower_bound" : 357.82, - "upper_bound" : 395.0, - "count" : 4.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 226.3, 150.4, 214.0, 165.7, 261.7, 187.7, 201.2, 227.8, 239.6, 213.7, 191.0, 210.6, 141.5, 289.3, 189.6, 122.2, 153.8, 223.9, 197.9, 214.4, 219.0, 242.3, 221.2, 205.1, 240.3, 228.4, 233.1, 170.6, 118.9, 225.6, 169.4, 277.4, 238.8, 158.6, 206.2, 160.2, 263.4, 171.3, 281.4, 207.2, 194.1, 103.9, 305.5, 281.1, 189.5, 180.8, 216.3, 194.8, 263.4, 159.7, 152.1, 137.5, 218.3, 72.4, 141.1, 134.4, 215.8, 212.6, 231.8, 299.6, 155.1, 120.2, 211.8, 261.7, 151.0, 213.1, 154.5, 141.1, 142.1, 109.9, 139.8, 133.6, 146.9, 181.0, 131.6, 170.7, 101.7, 144.0, 120.1, 240.7, 122.4, 136.0, 163.1, 230.7, 281.4, 168.9, 213.6, 143.9, 203.4, 203.9, 215.8, 203.8, 165.2, 194.1, 145.7, 280.5, 179.6, 271.7, 153.5, 223.3, 240.0, 227.6, 125.7, 140.1, 189.6, 146.5, 189.7, 219.8, 216.9, 226.6, 128.8, 248.6, 199.1, 173.0, 183.4, 165.1, 227.4, 256.7, 272.0, 259.2, 246.1, 249.0, 271.8, 261.6, 253.4, 215.5, 180.1, 133.4, 253.2, 169.0, 242.7, 193.6, 217.1, 208.9, 206.1, 160.4, 212.4, 256.6, 192.7, 246.8, 152.1, 161.5, 157.7, 224.5, 257.1, 134.7, 226.1, 164.7, 203.2, 183.8, 250.6, 199.2, 260.2, 242.7, 177.4, 77.9, 299.7, 144.6, 211.4, 254.4, 189.2, 189.0, 296.3, 294.8, 204.0, 205.7, 206.3, 200.5, 215.6, 257.6, 224.0, 165.1, 214.3, 147.1, 156.2, 144.2, 141.5, 260.0, 255.7, 170.8, 195.2, 143.2, 250.3, 192.7, 250.7, 229.9, 274.2, 171.6, 154.5, 159.8, 233.0, 231.9, 181.2, 192.8, 164.6, 178.0, 138.0, 234.3, 155.6, 211.7, 244.9, 159.0, 270.0, 174.1, 153.8, 260.0, 209.9, 186.7, 171.7, 255.3, 139.8, 206.7, 181.2, 177.7, 268.0, 167.6, 151.6, 54.5, 268.8, 250.9, 246.2, 151.8, 259.3, 196.1, 195.0, 197.4, 191.6, 214.2, 175.1, 167.0, 253.7, 264.5, 210.7, 241.6, 237.4, 187.2, 264.3, 206.1, 295.8, 210.0, 197.6, 289.9, 217.9, 200.0, 187.1, 197.8, 268.3, 192.2, 209.5, 231.7, 180.6, 266.9, 240.8, 265.7, 225.9, 158.6, 282.3, 264.0, 170.8, 185.4, 235.3, 131.9, 145.8, 225.7, 226.0, 173.1, 165.9, 207.5, 321.2, 225.3, 167.1, 185.7, 136.3, 119.8, 214.9, 190.0, 76.4, 265.9, 190.8, 233.2, 212.6, 213.3, 165.7 ], [ 23.2, 45.0, 53.3, 56.6, 61.4, 65.8, 71.1, 75.8, 77.3, 79.3, 80.2, 82.4, 88.2, 89.7, 91.6, 94.3, 94.9, 95.3, 96.4, 97.4, 98.6, 99.3, 100.9, 102.0, 102.1, 103.7, 104.5, 104.7, 104.8, 105.4, 105.6, 107.3, 107.9, 108.1, 108.9, 109.6, 109.6, 109.6, 110.1, 110.4, 111.2, 111.6, 111.7, 112.9, 113.5, 114.2, 114.3, 114.5, 115.7, 116.1, 116.4, 117.0, 117.8, 117.9, 118.0, 118.3, 119.1, 119.1, 119.4, 120.0, 121.0, 121.1, 122.0, 122.3, 122.6, 123.0, 123.4, 124.0, 125.6, 126.3, 126.9, 127.1, 127.4, 127.7, 127.9, 128.2, 128.4, 128.7, 128.9, 129.1, 129.6, 129.6, 129.7, 129.9, 130.6, 130.9, 132.0, 132.5, 132.6, 132.9, 133.1, 133.4, 133.7, 134.0, 134.2, 134.3, 134.6, 134.9, 135.0, 135.0, 135.9, 136.1, 136.2, 136.3, 136.6, 136.7, 137.4, 138.2, 138.4, 138.7, 139.2, 139.3, 139.4, 140.0, 140.5, 140.5, 141.1, 141.5, 141.6, 141.9, 142.4, 142.6, 143.1, 143.2, 143.3, 143.6, 143.7, 143.9, 144.1, 144.2, 144.9, 145.2, 145.5, 145.7, 146.2, 146.4, 146.7, 146.8, 146.9, 147.1, 147.4, 147.7, 147.8, 148.0, 148.1, 148.2, 148.4, 148.6, 148.7, 149.3, 149.5, 149.5, 149.9, 150.0, 150.0, 150.2, 150.3, 150.4, 150.6, 150.9, 151.1, 151.2, 151.5, 151.9, 152.2, 152.3, 152.4, 152.5, 152.7, 152.8, 152.9, 153.0, 153.2, 153.4, 153.5, 153.7, 153.9, 153.9, 154.0, 154.1, 154.2, 154.4, 154.7, 154.8, 154.9, 154.9, 155.3, 155.6, 155.7, 156.3, 156.6, 156.7, 156.9, 157.0, 157.4, 157.4, 157.6, 157.8, 157.9, 158.1, 158.4, 158.5, 158.6, 158.6, 158.7, 158.8, 158.9, 159.0, 159.0, 159.4, 159.6, 159.8, 160.2, 160.5, 160.6, 160.8, 161.1, 161.3, 161.5, 161.9, 162.1, 162.2, 162.4, 162.6, 162.6, 162.8, 162.9, 163.0, 163.1, 163.2, 163.2, 163.4, 163.6, 163.7, 164.0, 164.3, 164.3, 164.4, 164.5, 164.7, 164.8, 165.1, 165.4, 165.4, 165.6, 165.7, 165.8, 165.9, 166.3, 166.4, 166.6, 166.7, 166.8, 167.0, 167.1, 167.2, 167.3, 167.5, 167.6, 167.7, 167.9, 168.2, 168.3, 168.4, 168.5, 168.6, 168.7, 168.8, 168.8, 168.9, 169.0, 169.2, 169.3, 169.4, 169.4, 169.5, 169.6, 169.7, 169.7, 169.9, 170.1, 170.2, 170.2, 170.3, 170.5, 170.5, 170.6, 170.8, 170.9, 170.9, 171.0, 171.0, 171.4, 171.5, 171.7, 172.0, 172.4, 172.5, 172.7, 172.7, 172.8, 172.9, 172.9, 173.1, 173.2, 173.4, 173.6, 173.8, 173.9, 174.0, 174.1, 174.3, 174.4, 174.7, 174.9, 175.0, 175.1, 175.3, 175.4, 175.8, 175.8, 175.9, 175.9, 176.0, 176.0, 176.1, 176.3, 176.4, 176.6, 176.9, 177.1, 177.3, 177.4, 177.5, 177.7, 177.9, 178.1, 178.2, 178.3, 178.4, 178.5, 178.6, 178.7, 179.1, 179.3, 179.3, 179.4, 179.5, 179.6, 179.7, 179.7, 179.8, 179.8, 180.0, 180.1, 180.2, 180.5, 180.5, 180.6, 180.6, 180.9, 180.9, 181.1, 181.2, 181.2, 181.4, 181.5, 181.5, 181.7, 181.8, 182.1, 182.1, 182.2, 182.2, 182.3, 182.4, 182.4, 182.5, 182.5, 182.7, 183.4, 183.5, 183.5, 183.9, 184.0, 184.2, 184.3, 184.4, 184.5, 184.6, 184.8, 184.9, 185.2, 185.3, 185.4, 185.5, 185.6, 185.7, 185.8, 185.9, 186.1, 186.2, 186.4, 186.5, 186.6, 186.7, 186.9, 187.2, 187.3, 187.4, 187.5, 187.9, 187.9, 188.1, 188.2, 188.2, 188.2, 188.3, 188.4, 188.7, 188.8, 188.9, 189.0, 189.1, 189.3, 189.3, 189.4, 189.4, 189.6, 189.6, 189.8, 190.1, 190.4, 190.5, 190.5, 190.6, 190.6, 190.7, 190.8, 190.9, 191.0, 191.4, 191.4, 191.4, 191.6, 191.9, 192.0, 192.0, 192.1, 192.4, 192.5, 192.6, 192.7, 193.0, 193.0, 193.1, 193.4, 193.4, 193.6, 193.6, 193.7, 193.8, 193.8, 194.0, 194.1, 194.3, 194.3, 194.3, 194.4, 194.7, 194.7, 195.0, 195.1, 195.3, 195.4, 195.5, 195.6, 195.9, 196.0, 196.2, 196.2, 196.3, 196.8, 196.8, 196.9, 197.1, 197.3, 197.4, 197.4, 197.4, 197.6, 197.7, 197.8, 197.9, 198.0, 198.4, 198.5, 198.5, 198.7, 198.9, 198.9, 199.4, 199.5, 199.7, 199.8, 200.0, 200.1, 200.4, 200.4, 200.7, 200.8, 200.9, 201.0, 201.1, 201.4, 201.4, 201.6, 201.7, 201.8, 202.0, 202.0, 202.5, 202.7, 203.0, 203.0, 203.1, 203.1, 203.3, 203.5, 203.6, 203.7, 203.7, 203.8, 203.9, 204.0, 204.0, 204.1, 204.3, 204.4, 204.6, 204.6, 204.7, 204.8, 205.0, 205.0, 205.1, 205.5, 205.7, 205.7, 205.9, 206.1, 206.1, 206.1, 206.2, 206.4, 206.4, 206.6, 206.7, 206.9, 207.1, 207.2, 207.4, 207.8, 207.9, 208.0, 208.0, 208.2, 208.3, 208.4, 208.4, 208.7, 208.8, 209.0, 209.3, 209.5, 209.6, 210.0, 210.0, 210.1, 210.1, 210.2, 210.5, 210.5, 210.5, 210.6, 210.6, 210.7, 210.8, 210.8, 211.3, 211.4, 211.7, 211.9, 212.1, 212.4, 212.5, 212.6, 212.7, 212.8, 212.8, 212.9, 213.1, 213.3, 213.5, 213.7, 213.7, 213.8, 213.9, 214.0, 214.0, 214.1, 214.2, 214.2, 214.5, 214.5, 214.6, 214.6, 214.6, 214.7, 214.7, 214.7, 214.8, 215.3, 215.4, 215.5, 215.6, 215.7, 215.8, 215.9, 216.0, 216.1, 216.4, 216.5, 216.6, 216.8, 217.0, 217.0, 217.2, 217.4, 217.5, 217.5, 217.6, 217.8, 218.0, 218.3, 218.4, 218.5, 218.7, 218.8, 219.3, 219.4, 219.6, 219.7, 220.0, 220.0, 220.2, 220.2, 220.3, 220.4, 220.6, 220.8, 220.9, 221.0, 221.3, 221.5, 221.6, 221.6, 221.7, 221.7, 222.1, 222.2, 222.4, 222.6, 222.7, 222.8, 223.0, 223.1, 223.5, 223.5, 223.7, 223.8, 224.0, 224.2, 224.2, 224.3, 224.6, 224.7, 224.8, 224.9, 225.1, 225.2, 225.5, 225.6, 225.6, 226.0, 226.1, 226.5, 226.5, 226.7, 226.7, 226.9, 227.0, 227.1, 227.2, 227.3, 227.4, 227.5, 227.6, 227.7, 228.0, 228.1, 228.2, 228.3, 228.4, 228.4, 228.5, 228.6, 228.6, 228.7, 228.9, 229.0, 229.2, 229.4, 229.5, 229.5, 229.6, 229.8, 229.9, 229.9, 230.1, 230.1, 230.1, 230.5, 230.6, 230.7, 230.8, 231.1, 231.2, 231.4, 231.5, 231.5, 231.5, 231.9, 232.1, 232.5, 232.7, 232.8, 233.0, 233.1, 233.2, 233.3, 233.4, 233.6, 233.7, 234.0, 234.5, 234.7, 234.8, 234.9, 234.9, 235.0, 235.3, 235.4, 235.5, 235.7, 236.0, 236.0, 236.1, 236.3, 236.4, 236.5, 236.7, 236.9, 237.0, 237.3, 237.6, 237.7, 238.2, 238.4, 238.4, 238.6, 238.9, 239.2, 239.4, 239.5, 239.7, 239.8, 239.9, 239.9, 240.1, 240.2, 240.3, 240.3, 240.4, 240.6, 240.7, 241.0, 241.1, 241.2, 241.3, 241.4, 241.5, 242.0, 242.6, 242.7, 242.8, 242.9, 243.1, 243.2, 243.3, 243.3, 243.5, 243.6, 244.0, 244.3, 244.3, 244.4, 244.7, 244.7, 244.9, 245.0, 245.0, 245.1, 245.3, 245.3, 245.5, 245.5, 245.7, 245.9, 245.9, 246.0, 246.1, 246.5, 246.8, 247.0, 247.1, 247.2, 247.5, 247.8, 247.9, 248.1, 248.3, 249.0, 249.4, 249.4, 249.7, 249.7, 249.9, 250.1, 250.5, 250.6, 250.8, 251.4, 251.6, 251.9, 252.1, 252.3, 252.5, 252.9, 253.0, 253.2, 253.4, 253.5, 253.9, 254.0, 254.2, 254.4, 254.7, 254.8, 254.9, 255.1, 255.2, 255.3, 255.6, 255.7, 255.8, 255.9, 256.1, 256.3, 256.5, 256.6, 256.7, 256.7, 257.0, 257.5, 257.9, 258.2, 258.2, 258.3, 258.6, 259.0, 259.0, 259.2, 259.6, 259.8, 259.9, 260.1, 260.4, 260.6, 260.8, 261.3, 261.6, 261.8, 262.0, 262.2, 262.4, 262.7, 262.9, 263.2, 263.3, 263.6, 263.7, 263.9, 263.9, 264.1, 264.2, 264.7, 264.8, 265.0, 265.3, 265.5, 266.0, 266.7, 267.4, 267.6, 268.1, 268.2, 268.2, 268.5, 268.7, 269.0, 269.2, 269.3, 269.7, 269.8, 270.0, 270.1, 270.2, 270.6, 270.9, 271.5, 271.8, 271.8, 271.9, 272.1, 272.8, 273.1, 273.4, 274.0, 274.7, 275.0, 275.8, 276.6, 276.9, 277.6, 277.8, 278.4, 279.1, 279.2, 279.6, 279.8, 279.8, 280.2, 280.5, 280.8, 281.1, 281.5, 281.9, 282.5, 282.8, 283.2, 284.4, 284.6, 285.1, 285.3, 285.4, 285.9, 286.5, 286.7, 286.9, 287.6, 287.7, 288.0, 288.8, 289.2, 289.3, 289.5, 289.6, 290.0, 291.0, 291.6, 291.8, 292.1, 293.3, 293.9, 294.8, 297.1, 298.9, 299.0, 300.0, 302.0, 302.8, 304.2, 304.3, 305.4, 306.2, 306.6, 307.6, 309.2, 310.5, 311.1, 311.6, 312.1, 312.8, 314.1, 316.7, 320.7, 323.0, 326.4, 329.2, 333.5, 345.8, 352.2, 364.9, 381.9 ] ] - } - } - } - } - }, { - "name" : "Night Calls", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 100.22717531075868, - "sum" : 233830.0, - "std_dev" : 19.28202933863294, - "min" : 42.0, - "max" : 175.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 42.0, - "upper_bound" : 55.3, - "count" : 20.0 - }, { - "lower_bound" : 55.3, - "upper_bound" : 68.6, - "count" : 101.0 - }, { - "lower_bound" : 68.6, - "upper_bound" : 81.9, - "count" : 281.0 - }, { - "lower_bound" : 81.9, - "upper_bound" : 95.2, - "count" : 540.0 - }, { - "lower_bound" : 95.2, - "upper_bound" : 108.5, - "count" : 604.0 - }, { - "lower_bound" : 108.5, - "upper_bound" : 121.8, - "count" : 493.0 - }, { - "lower_bound" : 121.8, - "upper_bound" : 135.1, - "count" : 212.0 - }, { - "lower_bound" : 135.1, - "upper_bound" : 148.4, - "count" : 66.0 - }, { - "lower_bound" : 148.4, - "upper_bound" : 161.7, - "count" : 15.0 - }, { - "lower_bound" : 161.7, - "upper_bound" : 175.0, - "count" : 1.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 123.0, 120.0, 101.0, 97.0, 129.0, 87.0, 112.0, 119.0, 76.0, 84.0, 131.0, 113.0, 111.0, 83.0, 84.0, 78.0, 67.0, 93.0, 61.0, 91.0, 78.0, 138.0, 104.0, 82.0, 96.0, 91.0, 96.0, 93.0, 105.0, 86.0, 96.0, 94.0, 112.0, 88.0, 107.0, 115.0, 81.0, 84.0, 56.0, 97.0, 63.0, 89.0, 101.0, 112.0, 75.0, 123.0, 106.0, 98.0, 175.0, 121.0, 76.0, 101.0, 107.0, 84.0, 99.0, 121.0, 77.0, 118.0, 78.0, 109.0, 108.0, 113.0, 96.0, 83.0, 114.0, 125.0, 109.0, 92.0, 103.0, 100.0, 87.0, 82.0, 123.0, 98.0, 103.0, 145.0, 107.0, 78.0, 133.0, 106.0, 96.0, 73.0, 107.0, 67.0, 107.0, 108.0, 124.0, 76.0, 110.0, 117.0, 90.0, 116.0, 84.0, 100.0, 110.0, 89.0, 90.0, 117.0, 60.0, 77.0, 102.0, 80.0, 98.0, 111.0, 105.0, 111.0, 87.0, 111.0, 104.0, 110.0, 102.0, 75.0, 139.0, 96.0, 96.0, 104.0, 111.0, 96.0, 120.0, 53.0, 119.0, 129.0, 116.0, 122.0, 82.0, 98.0, 111.0, 122.0, 88.0, 151.0, 72.0, 106.0, 99.0, 110.0, 102.0, 121.0, 98.0, 101.0, 116.0, 126.0, 118.0, 113.0, 128.0, 117.0, 84.0, 135.0, 89.0, 85.0, 101.0, 74.0, 102.0, 120.0, 113.0, 88.0, 84.0, 74.0, 90.0, 72.0, 109.0, 117.0, 63.0, 92.0, 90.0, 111.0, 82.0, 102.0, 66.0, 96.0, 100.0, 64.0, 119.0, 96.0, 112.0, 76.0, 117.0, 96.0, 142.0, 94.0, 98.0, 139.0, 137.0, 146.0, 101.0, 91.0, 65.0, 118.0, 71.0, 96.0, 65.0, 72.0, 95.0, 75.0, 132.0, 74.0, 84.0, 105.0, 92.0, 89.0, 83.0, 103.0, 106.0, 124.0, 87.0, 115.0, 107.0, 97.0, 130.0, 89.0, 88.0, 62.0, 106.0, 85.0, 67.0, 91.0, 115.0, 101.0, 79.0, 91.0, 94.0, 113.0, 88.0, 120.0, 112.0, 126.0, 104.0, 42.0, 100.0, 106.0, 86.0, 77.0, 104.0, 133.0, 109.0, 125.0, 89.0, 119.0, 94.0, 112.0, 84.0, 96.0, 91.0, 84.0, 71.0, 133.0, 74.0, 71.0, 81.0, 98.0, 102.0, 101.0, 103.0, 86.0, 93.0, 78.0, 96.0, 72.0, 81.0, 118.0, 117.0, 136.0, 104.0, 120.0, 99.0, 129.0, 100.0, 102.0, 90.0, 117.0, 101.0, 91.0, 124.0, 113.0, 80.0, 81.0, 100.0, 115.0, 97.0, 72.0, 92.0, 135.0, 86.0, 120.0, 96.0 ], [ 44.0, 49.0, 50.0, 51.0, 52.0, 53.0, 53.0, 54.0, 55.0, 57.0, 57.0, 57.0, 58.0, 58.0, 59.0, 59.0, 59.0, 60.0, 60.0, 60.0, 61.0, 61.0, 61.0, 62.0, 62.0, 63.0, 63.0, 63.0, 63.0, 64.0, 64.0, 64.0, 64.0, 65.0, 65.0, 65.0, 66.0, 66.0, 66.0, 66.0, 67.0, 67.0, 67.0, 67.0, 67.0, 68.0, 68.0, 68.0, 68.0, 68.0, 68.0, 68.0, 68.0, 69.0, 69.0, 69.0, 69.0, 69.0, 70.0, 70.0, 70.0, 70.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 71.0, 72.0, 72.0, 72.0, 72.0, 72.0, 72.0, 72.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 73.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 74.0, 75.0, 75.0, 75.0, 75.0, 75.0, 75.0, 75.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 76.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 77.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 78.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 79.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 80.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 81.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 82.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 83.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 84.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 85.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 86.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 87.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 88.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 89.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 90.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 91.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 92.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 93.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 94.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 95.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 96.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 97.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 98.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 99.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 101.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 102.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 103.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 104.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 105.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 106.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 107.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 108.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 109.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 110.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 111.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 112.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 113.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 114.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 115.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 116.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 117.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 118.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 119.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 120.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 121.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 122.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 123.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 124.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 125.0, 126.0, 126.0, 126.0, 126.0, 126.0, 127.0, 127.0, 127.0, 127.0, 127.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 128.0, 129.0, 129.0, 129.0, 129.0, 129.0, 129.0, 129.0, 130.0, 130.0, 130.0, 130.0, 130.0, 130.0, 131.0, 131.0, 131.0, 131.0, 131.0, 131.0, 131.0, 131.0, 131.0, 132.0, 132.0, 132.0, 132.0, 132.0, 133.0, 133.0, 133.0, 134.0, 134.0, 134.0, 134.0, 135.0, 135.0, 136.0, 136.0, 136.0, 136.0, 137.0, 137.0, 137.0, 137.0, 138.0, 138.0, 138.0, 138.0, 139.0, 139.0, 140.0, 140.0, 140.0, 141.0, 142.0, 142.0, 143.0, 144.0, 144.0, 145.0, 145.0, 146.0, 147.0, 148.0, 148.0, 151.0, 151.0, 153.0, 154.0, 155.0, 156.0, 157.0 ] ] - } - } - } - } - }, { - "name" : "Intl Mins", - "inferred_type" : "Fractional", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 10.253064723531933, - "sum" : 23920.4, - "std_dev" : 2.7787657409289586, - "min" : 0.0, - "max" : 18.4, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 1.8399999999999999, - "count" : 15.0 - }, { - "lower_bound" : 1.8399999999999999, - "upper_bound" : 3.6799999999999997, - "count" : 15.0 - }, { - "lower_bound" : 3.6799999999999997, - "upper_bound" : 5.52, - "count" : 79.0 - }, { - "lower_bound" : 5.52, - "upper_bound" : 7.359999999999999, - "count" : 226.0 - }, { - "lower_bound" : 7.359999999999999, - "upper_bound" : 9.2, - "count" : 427.0 - }, { - "lower_bound" : 9.2, - "upper_bound" : 11.04, - "count" : 644.0 - }, { - "lower_bound" : 11.04, - "upper_bound" : 12.879999999999999, - "count" : 529.0 - }, { - "lower_bound" : 12.879999999999999, - "upper_bound" : 14.719999999999999, - "count" : 296.0 - }, { - "lower_bound" : 14.719999999999999, - "upper_bound" : 16.56, - "count" : 82.0 - }, { - "lower_bound" : 16.56, - "upper_bound" : 18.4, - "count" : 20.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 10.0, 11.2, 9.3, 6.3, 11.3, 10.5, 0.0, 9.7, 8.2, 10.5, 8.5, 11.4, 9.7, 14.5, 8.0, 14.6, 11.0, 7.4, 8.4, 8.8, 11.3, 14.2, 10.4, 4.1, 15.4, 13.9, 8.0, 8.6, 9.4, 9.9, 5.6, 9.2, 8.2, 14.4, 8.0, 17.2, 14.2, 12.3, 6.7, 11.6, 11.9, 11.9, 11.3, 12.9, 13.4, 8.7, 16.9, 9.7, 5.9, 11.0, 6.5, 7.1, 8.0, 11.0, 8.0, 8.4, 7.6, 7.5, 11.6, 10.8, 11.9, 18.0, 7.1, 6.8, 13.0, 8.9, 12.6, 11.2, 7.2, 12.4, 9.4, 10.8, 10.9, 11.4, 6.8, 7.9, 10.5, 10.0, 9.7, 10.6, 8.5, 10.8, 10.5, 7.6, 17.3, 15.4, 12.2, 5.6, 13.7, 7.5, 8.7, 12.8, 12.0, 12.8, 10.0, 5.9, 14.0, 17.0, 7.8, 7.6, 9.8, 11.5, 11.6, 10.3, 7.7, 12.7, 11.0, 7.7, 9.4, 10.7, 12.7, 11.3, 8.8, 11.9, 13.7, 9.5, 12.2, 6.5, 12.1, 12.2, 10.9, 6.8, 10.0, 8.3, 12.6, 14.6, 8.2, 8.0, 12.1, 3.8, 7.4, 10.9, 10.7, 6.4, 12.4, 10.0, 11.4, 5.7, 8.2, 9.2, 10.0, 5.6, 9.0, 6.7, 10.3, 14.6, 15.8, 12.7, 13.0, 6.7, 11.0, 9.2, 7.6, 13.8, 10.4, 0.0, 9.6, 13.8, 7.8, 7.0, 9.8, 10.4, 14.5, 13.8, 14.7, 11.8, 13.2, 11.7, 12.1, 6.7, 13.2, 9.9, 9.7, 5.8, 9.7, 10.8, 10.8, 8.3, 12.1, 8.2, 11.8, 9.9, 8.7, 8.0, 10.3, 10.4, 4.6, 11.6, 9.7, 14.4, 10.0, 2.1, 12.6, 13.0, 10.7, 11.1, 14.6, 2.0, 13.8, 9.7, 9.9, 4.1, 7.6, 10.2, 12.4, 8.7, 8.1, 10.5, 9.7, 13.2, 5.7, 11.1, 10.5, 10.6, 11.9, 12.3, 2.2, 7.9, 11.4, 13.4, 8.3, 13.3, 13.6, 11.0, 13.3, 11.4, 10.9, 12.2, 12.4, 14.1, 9.6, 6.6, 4.9, 11.5, 13.1, 10.3, 9.0, 16.1, 8.3, 8.9, 10.3, 14.5, 7.6, 7.2, 10.1, 8.0, 6.9, 11.0, 9.5, 11.9, 6.7, 11.9, 12.7, 13.2, 12.4, 10.8, 11.3, 8.4, 16.6, 16.2, 0.0, 9.1, 11.7, 6.4, 10.0, 10.7, 6.6, 12.9, 8.3, 10.6, 9.0, 6.0, 13.3, 9.9, 10.3, 11.8, 8.6, 13.3, 11.5, 8.8, 11.4, 16.5, 10.6 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 1.1, 2.0, 2.5, 2.9, 3.3, 3.5, 3.6, 3.7, 3.8, 3.8, 4.1, 4.2, 4.2, 4.3, 4.3, 4.4, 4.5, 4.6, 4.7, 4.7, 4.7, 4.8, 4.8, 4.9, 5.0, 5.0, 5.0, 5.1, 5.1, 5.1, 5.2, 5.3, 5.3, 5.3, 5.3, 5.3, 5.4, 5.4, 5.4, 5.4, 5.5, 5.5, 5.5, 5.5, 5.6, 5.6, 5.6, 5.7, 5.7, 5.8, 5.8, 5.8, 5.8, 5.9, 5.9, 5.9, 5.9, 5.9, 5.9, 5.9, 6.0, 6.0, 6.0, 6.1, 6.1, 6.1, 6.1, 6.2, 6.2, 6.3, 6.3, 6.3, 6.3, 6.3, 6.3, 6.4, 6.4, 6.4, 6.4, 6.4, 6.4, 6.4, 6.5, 6.5, 6.5, 6.5, 6.5, 6.5, 6.6, 6.6, 6.6, 6.6, 6.6, 6.6, 6.6, 6.7, 6.7, 6.7, 6.7, 6.7, 6.8, 6.8, 6.8, 6.8, 6.8, 6.8, 6.9, 6.9, 6.9, 6.9, 6.9, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.1, 7.1, 7.1, 7.1, 7.1, 7.1, 7.1, 7.2, 7.2, 7.2, 7.2, 7.2, 7.2, 7.2, 7.3, 7.3, 7.3, 7.3, 7.3, 7.3, 7.3, 7.3, 7.3, 7.4, 7.4, 7.4, 7.4, 7.4, 7.4, 7.4, 7.5, 7.5, 7.5, 7.5, 7.5, 7.5, 7.5, 7.5, 7.5, 7.5, 7.5, 7.5, 7.6, 7.6, 7.6, 7.6, 7.6, 7.6, 7.7, 7.7, 7.7, 7.7, 7.7, 7.7, 7.7, 7.7, 7.7, 7.7, 7.8, 7.8, 7.8, 7.8, 7.8, 7.8, 7.8, 7.8, 7.8, 7.8, 7.8, 7.8, 7.9, 7.9, 7.9, 7.9, 7.9, 7.9, 7.9, 7.9, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.1, 8.1, 8.1, 8.1, 8.1, 8.1, 8.2, 8.2, 8.2, 8.2, 8.2, 8.2, 8.2, 8.2, 8.2, 8.2, 8.2, 8.3, 8.3, 8.3, 8.3, 8.3, 8.3, 8.3, 8.3, 8.3, 8.4, 8.4, 8.4, 8.4, 8.4, 8.4, 8.4, 8.4, 8.4, 8.4, 8.4, 8.4, 8.4, 8.4, 8.5, 8.5, 8.5, 8.5, 8.5, 8.5, 8.5, 8.5, 8.5, 8.5, 8.6, 8.6, 8.6, 8.6, 8.6, 8.6, 8.6, 8.6, 8.6, 8.6, 8.7, 8.7, 8.7, 8.7, 8.7, 8.7, 8.7, 8.7, 8.7, 8.7, 8.8, 8.8, 8.8, 8.8, 8.8, 8.8, 8.8, 8.8, 8.8, 8.8, 8.8, 8.8, 8.9, 8.9, 8.9, 8.9, 8.9, 8.9, 8.9, 8.9, 8.9, 8.9, 8.9, 8.9, 8.9, 8.9, 8.9, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.1, 9.1, 9.1, 9.1, 9.1, 9.1, 9.1, 9.1, 9.1, 9.1, 9.1, 9.1, 9.1, 9.1, 9.1, 9.2, 9.2, 9.2, 9.2, 9.2, 9.2, 9.2, 9.2, 9.2, 9.2, 9.2, 9.2, 9.2, 9.2, 9.3, 9.3, 9.3, 9.3, 9.3, 9.3, 9.3, 9.3, 9.3, 9.3, 9.3, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.4, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.5, 9.6, 9.6, 9.6, 9.6, 9.6, 9.6, 9.6, 9.6, 9.6, 9.6, 9.6, 9.6, 9.7, 9.7, 9.7, 9.7, 9.7, 9.7, 9.7, 9.7, 9.7, 9.7, 9.7, 9.7, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.8, 9.9, 9.9, 9.9, 9.9, 9.9, 9.9, 9.9, 9.9, 9.9, 9.9, 9.9, 9.9, 9.9, 9.9, 9.9, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.1, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.2, 10.3, 10.3, 10.3, 10.3, 10.3, 10.3, 10.3, 10.3, 10.3, 10.3, 10.3, 10.3, 10.4, 10.4, 10.4, 10.4, 10.4, 10.4, 10.4, 10.4, 10.4, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.5, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.6, 10.7, 10.7, 10.7, 10.7, 10.7, 10.7, 10.7, 10.7, 10.7, 10.7, 10.7, 10.8, 10.8, 10.8, 10.8, 10.8, 10.8, 10.8, 10.8, 10.8, 10.8, 10.8, 10.8, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 10.9, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.1, 11.1, 11.1, 11.1, 11.1, 11.1, 11.1, 11.1, 11.1, 11.1, 11.1, 11.1, 11.1, 11.2, 11.2, 11.2, 11.2, 11.2, 11.2, 11.2, 11.2, 11.2, 11.2, 11.2, 11.2, 11.2, 11.2, 11.2, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.3, 11.4, 11.4, 11.4, 11.4, 11.4, 11.4, 11.4, 11.4, 11.4, 11.4, 11.4, 11.4, 11.4, 11.4, 11.4, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.5, 11.6, 11.6, 11.6, 11.6, 11.6, 11.6, 11.6, 11.6, 11.6, 11.6, 11.6, 11.6, 11.6, 11.6, 11.6, 11.7, 11.7, 11.7, 11.7, 11.7, 11.7, 11.7, 11.7, 11.7, 11.7, 11.7, 11.7, 11.8, 11.8, 11.8, 11.8, 11.8, 11.8, 11.8, 11.8, 11.8, 11.8, 11.8, 11.8, 11.8, 11.8, 11.8, 11.9, 11.9, 11.9, 11.9, 11.9, 11.9, 11.9, 11.9, 11.9, 11.9, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.1, 12.2, 12.2, 12.2, 12.2, 12.2, 12.2, 12.2, 12.2, 12.2, 12.2, 12.2, 12.3, 12.3, 12.3, 12.3, 12.3, 12.3, 12.3, 12.3, 12.3, 12.3, 12.3, 12.3, 12.4, 12.4, 12.4, 12.4, 12.4, 12.4, 12.4, 12.4, 12.4, 12.5, 12.5, 12.5, 12.5, 12.5, 12.5, 12.5, 12.5, 12.5, 12.5, 12.5, 12.6, 12.6, 12.6, 12.6, 12.6, 12.6, 12.6, 12.6, 12.6, 12.6, 12.6, 12.7, 12.7, 12.7, 12.7, 12.7, 12.7, 12.7, 12.7, 12.7, 12.8, 12.8, 12.8, 12.8, 12.8, 12.8, 12.8, 12.8, 12.8, 12.8, 12.8, 12.9, 12.9, 12.9, 12.9, 12.9, 12.9, 12.9, 12.9, 12.9, 12.9, 12.9, 12.9, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.1, 13.1, 13.1, 13.1, 13.1, 13.1, 13.1, 13.1, 13.1, 13.1, 13.1, 13.2, 13.2, 13.2, 13.2, 13.2, 13.2, 13.2, 13.2, 13.2, 13.2, 13.3, 13.3, 13.3, 13.3, 13.3, 13.3, 13.3, 13.3, 13.3, 13.3, 13.3, 13.3, 13.3, 13.4, 13.4, 13.4, 13.4, 13.4, 13.4, 13.4, 13.4, 13.4, 13.4, 13.5, 13.5, 13.5, 13.5, 13.5, 13.5, 13.5, 13.5, 13.6, 13.6, 13.6, 13.6, 13.7, 13.7, 13.7, 13.7, 13.7, 13.8, 13.8, 13.8, 13.8, 13.8, 13.8, 13.8, 13.9, 13.9, 13.9, 13.9, 13.9, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.1, 14.1, 14.1, 14.1, 14.1, 14.1, 14.2, 14.2, 14.2, 14.2, 14.3, 14.3, 14.3, 14.3, 14.3, 14.4, 14.4, 14.4, 14.4, 14.5, 14.5, 14.5, 14.6, 14.6, 14.6, 14.7, 14.7, 14.7, 14.7, 14.8, 14.8, 14.8, 14.8, 14.9, 14.9, 15.0, 15.0, 15.0, 15.1, 15.1, 15.1, 15.2, 15.2, 15.3, 15.3, 15.4, 15.4, 15.5, 15.5, 15.6, 15.6, 15.6, 15.6, 15.7, 15.7, 15.8, 15.8, 15.9, 15.9, 16.0, 16.1, 16.2, 16.3, 16.4, 16.4, 16.5, 16.5, 16.7, 16.7, 16.9, 17.3, 17.5, 17.6, 18.2 ] ] - } - } - } - } - }, { - "name" : "Intl Calls", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 4.4950707243891985, - "sum" : 10487.0, - "std_dev" : 2.4881367315548273, - "min" : 0.0, - "max" : 20.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 2.0, - "count" : 124.0 - }, { - "lower_bound" : 2.0, - "upper_bound" : 4.0, - "count" : 805.0 - }, { - "lower_bound" : 4.0, - "upper_bound" : 6.0, - "count" : 765.0 - }, { - "lower_bound" : 6.0, - "upper_bound" : 8.0, - "count" : 383.0 - }, { - "lower_bound" : 8.0, - "upper_bound" : 10.0, - "count" : 157.0 - }, { - "lower_bound" : 10.0, - "upper_bound" : 12.0, - "count" : 62.0 - }, { - "lower_bound" : 12.0, - "upper_bound" : 14.0, - "count" : 25.0 - }, { - "lower_bound" : 14.0, - "upper_bound" : 16.0, - "count" : 6.0 - }, { - "lower_bound" : 16.0, - "upper_bound" : 18.0, - "count" : 4.0 - }, { - "lower_bound" : 18.0, - "upper_bound" : 20.0, - "count" : 2.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 6.0, 2.0, 11.0, 3.0, 3.0, 6.0, 0.0, 13.0, 3.0, 2.0, 2.0, 2.0, 2.0, 4.0, 5.0, 15.0, 9.0, 5.0, 9.0, 5.0, 5.0, 3.0, 8.0, 2.0, 8.0, 5.0, 5.0, 4.0, 6.0, 4.0, 5.0, 4.0, 5.0, 2.0, 13.0, 9.0, 4.0, 5.0, 5.0, 4.0, 3.0, 6.0, 2.0, 3.0, 3.0, 6.0, 4.0, 6.0, 2.0, 5.0, 5.0, 5.0, 3.0, 4.0, 1.0, 3.0, 4.0, 7.0, 4.0, 3.0, 4.0, 5.0, 6.0, 3.0, 2.0, 1.0, 5.0, 5.0, 6.0, 4.0, 6.0, 7.0, 2.0, 4.0, 3.0, 5.0, 6.0, 11.0, 4.0, 2.0, 2.0, 2.0, 8.0, 5.0, 3.0, 4.0, 6.0, 11.0, 3.0, 11.0, 4.0, 3.0, 7.0, 3.0, 4.0, 2.0, 6.0, 6.0, 1.0, 6.0, 5.0, 3.0, 1.0, 3.0, 6.0, 2.0, 9.0, 2.0, 3.0, 9.0, 4.0, 2.0, 1.0, 9.0, 3.0, 13.0, 2.0, 4.0, 1.0, 2.0, 5.0, 3.0, 3.0, 2.0, 5.0, 6.0, 5.0, 6.0, 5.0, 4.0, 2.0, 7.0, 9.0, 2.0, 9.0, 2.0, 3.0, 4.0, 2.0, 6.0, 2.0, 4.0, 3.0, 2.0, 1.0, 5.0, 7.0, 6.0, 3.0, 2.0, 3.0, 1.0, 6.0, 8.0, 15.0, 0.0, 9.0, 4.0, 2.0, 10.0, 5.0, 6.0, 3.0, 2.0, 9.0, 2.0, 8.0, 4.0, 4.0, 3.0, 2.0, 2.0, 6.0, 3.0, 4.0, 7.0, 3.0, 3.0, 4.0, 5.0, 9.0, 1.0, 4.0, 4.0, 7.0, 7.0, 4.0, 7.0, 5.0, 4.0, 5.0, 5.0, 4.0, 2.0, 5.0, 2.0, 3.0, 7.0, 3.0, 7.0, 10.0, 5.0, 4.0, 2.0, 6.0, 4.0, 10.0, 11.0, 3.0, 4.0, 7.0, 3.0, 3.0, 8.0, 3.0, 5.0, 4.0, 3.0, 4.0, 6.0, 3.0, 2.0, 8.0, 5.0, 2.0, 4.0, 6.0, 3.0, 6.0, 3.0, 4.0, 2.0, 10.0, 2.0, 9.0, 2.0, 3.0, 6.0, 6.0, 6.0, 8.0, 4.0, 4.0, 2.0, 3.0, 3.0, 4.0, 1.0, 10.0, 3.0, 2.0, 5.0, 4.0, 2.0, 5.0, 6.0, 4.0, 2.0, 3.0, 2.0, 0.0, 4.0, 4.0, 6.0, 4.0, 5.0, 5.0, 1.0, 4.0, 6.0, 1.0, 3.0, 3.0, 3.0, 4.0, 4.0, 6.0, 6.0, 7.0, 7.0, 8.0, 7.0, 1.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 11.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 13.0, 13.0, 13.0, 13.0, 13.0, 14.0, 15.0, 16.0, 17.0, 19.0 ] ] - } - } - } - } - }, { - "name" : "CustServ Calls", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 1.5803686240891557, - "sum" : 3687.0, - "std_dev" : 1.307464947005547, - "min" : 0.0, - "max" : 8.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.8, - "count" : 482.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 1.6, - "count" : 804.0 - }, { - "lower_bound" : 1.6, - "upper_bound" : 2.4, - "count" : 551.0 - }, { - "lower_bound" : 2.4, - "upper_bound" : 3.2, - "count" : 312.0 - }, { - "lower_bound" : 3.2, - "upper_bound" : 4.0, - "count" : 0.0 - }, { - "lower_bound" : 4.0, - "upper_bound" : 4.8, - "count" : 115.0 - }, { - "lower_bound" : 4.8, - "upper_bound" : 5.6, - "count" : 45.0 - }, { - "lower_bound" : 5.6, - "upper_bound" : 6.4, - "count" : 17.0 - }, { - "lower_bound" : 6.4, - "upper_bound" : 7.2, - "count" : 6.0 - }, { - "lower_bound" : 7.2, - "upper_bound" : 8.0, - "count" : 1.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 1.0, 0.0, 0.0, 2.0, 3.0, 2.0, 3.0, 2.0, 4.0, 4.0, 1.0, 0.0, 2.0, 3.0, 4.0, 0.0, 2.0, 2.0, 2.0, 2.0, 5.0, 2.0, 8.0, 0.0, 3.0, 1.0, 1.0, 1.0, 0.0, 3.0, 2.0, 0.0, 0.0, 4.0, 3.0, 2.0, 0.0, 3.0, 3.0, 3.0, 4.0, 2.0, 1.0, 1.0, 1.0, 3.0, 0.0, 1.0, 2.0, 2.0, 1.0, 0.0, 4.0, 3.0, 1.0, 3.0, 1.0, 3.0, 0.0, 3.0, 0.0, 1.0, 0.0, 1.0, 1.0, 2.0, 1.0, 1.0, 3.0, 2.0, 1.0, 1.0, 3.0, 1.0, 1.0, 1.0, 3.0, 2.0, 4.0, 1.0, 2.0, 2.0, 2.0, 1.0, 2.0, 2.0, 0.0, 1.0, 1.0, 1.0, 1.0, 5.0, 1.0, 0.0, 3.0, 0.0, 0.0, 1.0, 1.0, 1.0, 2.0, 0.0, 0.0, 0.0, 2.0, 1.0, 5.0, 3.0, 3.0, 0.0, 3.0, 1.0, 2.0, 1.0, 3.0, 1.0, 2.0, 0.0, 0.0, 1.0, 0.0, 1.0, 4.0, 6.0, 1.0, 1.0, 2.0, 3.0, 1.0, 0.0, 0.0, 2.0, 3.0, 0.0, 1.0, 1.0, 2.0, 1.0, 3.0, 0.0, 1.0, 2.0, 0.0, 0.0, 0.0, 2.0, 3.0, 1.0, 3.0, 2.0, 0.0, 3.0, 3.0, 2.0, 3.0, 1.0, 3.0, 3.0, 0.0, 0.0, 3.0, 2.0, 2.0, 1.0, 2.0, 2.0, 1.0, 1.0, 2.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 3.0, 1.0, 1.0, 2.0, 1.0, 3.0, 2.0, 0.0, 2.0, 1.0, 0.0, 2.0, 2.0, 1.0, 1.0, 3.0, 3.0, 2.0, 3.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 3.0, 2.0, 1.0, 3.0, 3.0, 1.0, 1.0, 3.0, 0.0, 2.0, 2.0, 0.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 3.0, 3.0, 2.0, 1.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 0.0, 0.0, 1.0, 2.0, 2.0, 1.0, 0.0, 0.0, 2.0, 1.0, 1.0, 2.0, 2.0, 0.0, 1.0, 2.0, 1.0, 1.0, 2.0, 3.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 4.0, 0.0, 0.0, 3.0, 0.0, 4.0, 2.0, 3.0, 3.0, 2.0, 2.0, 5.0, 4.0, 0.0, 1.0, 0.0, 4.0, 4.0, 5.0, 2.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 7.0, 7.0, 7.0 ] ] - } - } - } - } - }, { - "name" : "State_AK", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.014573510501500214, - "sum" : 34.0, - "std_dev" : 0.11983790424220074, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2299.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 34.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_AL", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.021431633090441493, - "sum" : 50.0, - "std_dev" : 0.14481822465946118, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2283.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 50.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_AR", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.018859837119588514, - "sum" : 44.0, - "std_dev" : 0.13602993664414878, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2289.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 44.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_AZ", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.020574367766823833, - "sum" : 48.0, - "std_dev" : 0.14195444043008756, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2286.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 47.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_CA", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.009858551221603087, - "sum" : 23.0, - "std_dev" : 0.09879959609944836, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2310.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 23.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_CO", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02100300042863266, - "sum" : 49.0, - "std_dev" : 0.143394122618842, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2284.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 49.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_CT", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.021431633090441493, - "sum" : 50.0, - "std_dev" : 0.14481822465946126, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2284.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 49.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_DC", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.015859408486926702, - "sum" : 37.0, - "std_dev" : 0.12493153184593325, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2297.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 36.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_DE", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.018859837119588514, - "sum" : 44.0, - "std_dev" : 0.13602993664414878, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2290.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 43.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_FL", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.018431204457779682, - "sum" : 43.0, - "std_dev" : 0.1345046287679914, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2290.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 43.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_GA", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.015430775825117874, - "sum" : 36.0, - "std_dev" : 0.12325853715890364, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2298.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 35.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_HI", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.016716673810544362, - "sum" : 39.0, - "std_dev" : 0.12820774792209796, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2294.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 39.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_IA", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.014144877839691384, - "sum" : 33.0, - "std_dev" : 0.11808810384874349, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2300.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 33.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_ID", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.022288898414059153, - "sum" : 52.0, - "std_dev" : 0.1476214869913827, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2282.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 51.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_IL", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.016288041148735534, - "sum" : 38.0, - "std_dev" : 0.12658096564757523, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2295.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 38.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_IN", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.020145735105015, - "sum" : 47.0, - "std_dev" : 0.14049869914733581, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2286.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 47.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_KS", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02186026575225032, - "sum" : 51.0, - "std_dev" : 0.14622720175634665, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2282.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 51.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_KY", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.018859837119588514, - "sum" : 44.0, - "std_dev" : 0.13602993664414845, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2289.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 44.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_LA", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.017573939134162022, - "sum" : 41.0, - "std_dev" : 0.13139671151695853, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2292.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 41.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_MA", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.015430775825117874, - "sum" : 36.0, - "std_dev" : 0.12325853715890375, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2298.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 35.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_MD", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.020574367766823833, - "sum" : 48.0, - "std_dev" : 0.1419544404300876, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2285.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 48.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_ME", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.017573939134162022, - "sum" : 41.0, - "std_dev" : 0.1313967115169584, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2293.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 40.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_MI", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.018431204457779682, - "sum" : 43.0, - "std_dev" : 0.1345046287679917, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2291.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 42.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_MN", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02700385769395628, - "sum" : 63.0, - "std_dev" : 0.16209456919835663, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2270.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 63.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_MO", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02100300042863266, - "sum" : 49.0, - "std_dev" : 0.14339412261884218, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2285.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 48.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_MS", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02100300042863266, - "sum" : 49.0, - "std_dev" : 0.14339412261884207, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2284.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 49.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_MT", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02100300042863266, - "sum" : 49.0, - "std_dev" : 0.14339412261884218, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2285.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 48.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_NC", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.017145306472353194, - "sum" : 40.0, - "std_dev" : 0.12981273026295337, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2293.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 40.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_ND", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.017145306472353194, - "sum" : 40.0, - "std_dev" : 0.12981273026295312, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2293.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 40.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_NE", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.020574367766823833, - "sum" : 48.0, - "std_dev" : 0.14195444043008779, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2286.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 47.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_NH", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.018002571795970854, - "sum" : 42.0, - "std_dev" : 0.13296044225521278, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2291.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 42.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_NJ", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.020145735105015, - "sum" : 47.0, - "std_dev" : 0.1404986991473358, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2286.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 47.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_NM", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.020574367766823833, - "sum" : 48.0, - "std_dev" : 0.1419544404300877, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2286.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 47.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_NV", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.022288898414059153, - "sum" : 52.0, - "std_dev" : 0.14762148699138244, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2281.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 52.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_NY", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02486069438491213, - "sum" : 58.0, - "std_dev" : 0.15570048252851415, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2275.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 58.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_OH", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.020574367766823833, - "sum" : 48.0, - "std_dev" : 0.14195444043008773, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2286.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 47.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_OK", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.016716673810544362, - "sum" : 39.0, - "std_dev" : 0.12820774792209788, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2294.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 39.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_OR", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.022288898414059153, - "sum" : 52.0, - "std_dev" : 0.1476214869913825, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2282.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 51.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_PA", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.012430347192456065, - "sum" : 29.0, - "std_dev" : 0.1107963612269422, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2304.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 29.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_RI", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.01928846978139734, - "sum" : 45.0, - "std_dev" : 0.13753699398667063, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2289.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 44.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_SC", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.015859408486926702, - "sum" : 37.0, - "std_dev" : 0.12493153184593352, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2297.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 36.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_SD", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.018431204457779682, - "sum" : 43.0, - "std_dev" : 0.13450462876799163, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2291.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 42.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_TN", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.015859408486926702, - "sum" : 37.0, - "std_dev" : 0.12493153184593345, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2296.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 37.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_TX", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02186026575225032, - "sum" : 51.0, - "std_dev" : 0.14622720175634676, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2282.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 51.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_UT", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02357479639948564, - "sum" : 55.0, - "std_dev" : 0.15172022071631874, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2279.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 54.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_VA", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02314616373767681, - "sum" : 54.0, - "std_dev" : 0.15036761234356774, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2280.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 53.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_VT", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02400342906129447, - "sum" : 56.0, - "std_dev" : 0.1530596761220729, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2278.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 55.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_WA", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.020145735105015, - "sum" : 47.0, - "std_dev" : 0.14049869914733565, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2287.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 46.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_WI", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.021431633090441493, - "sum" : 50.0, - "std_dev" : 0.1448182246594614, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2283.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 50.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_WV", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.034719245606515216, - "sum" : 81.0, - "std_dev" : 0.18306780053037638, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2252.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 81.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "State_WY", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.02271753107586798, - "sum" : 53.0, - "std_dev" : 0.14900149280354524, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2280.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 53.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "Area Code_408", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.24303471924560652, - "sum" : 567.0, - "std_dev" : 0.4289158944208243, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 1766.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 567.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "Area Code_415", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.5023574796399486, - "sum" : 1172.0, - "std_dev" : 0.4999944422588591, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 1162.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 1171.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "Area Code_510", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.2546078011144449, - "sum" : 594.0, - "std_dev" : 0.43564052695555344, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 1740.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 593.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "Int'l Plan_no", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.8984140591513073, - "sum" : 2096.0, - "std_dev" : 0.3021030245968729, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 237.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 2096.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "Int'l Plan_yes", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.10158594084869267, - "sum" : 237.0, - "std_dev" : 0.30210302459687277, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 2096.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 237.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "VMail Plan_no", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.7213887698242606, - "sum" : 1683.0, - "std_dev" : 0.4483157510011219, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 651.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 1682.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - }, { - "name" : "VMail Plan_yes", - "inferred_type" : "Integral", - "numerical_statistics" : { - "common" : { - "num_present" : 2333, - "num_missing" : 0 - }, - "mean" : 0.2786112301757394, - "sum" : 650.0, - "std_dev" : 0.448315751001122, - "min" : 0.0, - "max" : 1.0, - "distribution" : { - "kll" : { - "buckets" : [ { - "lower_bound" : 0.0, - "upper_bound" : 0.1, - "count" : 1684.0 - }, { - "lower_bound" : 0.1, - "upper_bound" : 0.2, - "count" : 0.0 - }, { - "lower_bound" : 0.2, - "upper_bound" : 0.3, - "count" : 0.0 - }, { - "lower_bound" : 0.3, - "upper_bound" : 0.4, - "count" : 0.0 - }, { - "lower_bound" : 0.4, - "upper_bound" : 0.5, - "count" : 0.0 - }, { - "lower_bound" : 0.5, - "upper_bound" : 0.6, - "count" : 0.0 - }, { - "lower_bound" : 0.6, - "upper_bound" : 0.7, - "count" : 0.0 - }, { - "lower_bound" : 0.7, - "upper_bound" : 0.8, - "count" : 0.0 - }, { - "lower_bound" : 0.8, - "upper_bound" : 0.9, - "count" : 0.0 - }, { - "lower_bound" : 0.9, - "upper_bound" : 1.0, - "count" : 649.0 - } ], - "sketch" : { - "parameters" : { - "c" : 0.64, - "k" : 2048.0 - }, - "data" : [ [ 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0 ], [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] ] - } - } - } - } - } ] -} \ No newline at end of file diff --git a/samples/contrib/aws-samples/hosting_model_monitor_pipeline/requirements.txt b/samples/contrib/aws-samples/hosting_model_monitor_pipeline/requirements.txt deleted file mode 100644 index 75e747b0daa..00000000000 --- a/samples/contrib/aws-samples/hosting_model_monitor_pipeline/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -sagemaker==2.237.3 -kfp==1.8.13 \ No newline at end of file diff --git a/samples/contrib/aws-samples/mnist-kmeans-sagemaker/README.md b/samples/contrib/aws-samples/mnist-kmeans-sagemaker/README.md deleted file mode 100644 index 7727c08cfe4..00000000000 --- a/samples/contrib/aws-samples/mnist-kmeans-sagemaker/README.md +++ /dev/null @@ -1,100 +0,0 @@ -# MNIST Classification with KMeans - -The `mnist-classification-pipeline.py` sample pipeline shows how to create an end to end ML workflow to train and deploy a model on SageMaker. We will train a classification model using Kmeans algorithm with MNIST dataset on SageMaker. Additionally, this sample also demonstrates how to use SageMaker components v1 and v2 together in a Kubeflow pipeline workflow. This example was taken from an existing [SageMaker example](https://github.com/awslabs/amazon-sagemaker-examples/blob/master/sagemaker-python-sdk/1P_kmeans_highlevel/kmeans_mnist.ipynb) and modified to work with the Amazon SageMaker Components for Kubeflow Pipelines. - -## Prerequisites - -1. Make sure you have completed all the pre-requisites mentioned in this [README.md](https://github.com/kubeflow/pipelines/blob/master/samples/contrib/aws-samples/README.md). - -### Sample MNIST dataset - -1. Clone this repository to use the pipelines and sample scripts. - ``` - git clone https://github.com/kubeflow/pipelines.git - cd pipelines/samples/contrib/aws-samples/mnist-kmeans-sagemaker - ``` -The following commands will copy the data extraction pre-processing script to an S3 bucket which we will use to store artifacts for the pipeline. - -2. [Create a bucket](https://docs.aws.amazon.com/AmazonS3/latest/gsg/CreatingABucket.html) in `us-east-1` region. -For the purposes of this demonstration, all resources will be created in the us-east-1 region. - - Specify your S3_BUCKET_NAME - ``` - export S3_BUCKET_NAME= - ``` - ``` - export SAGEMAKER_REGION=us-east-1 - if [[ $SAGEMAKER_REGION == "us-east-1" ]]; then - aws s3api create-bucket --bucket ${S3_BUCKET_NAME} --region ${SAGEMAKER_REGION} - else - aws s3api create-bucket --bucket ${S3_BUCKET_NAME} --region ${SAGEMAKER_REGION} \ - --create-bucket-configuration LocationConstraint=${SAGEMAKER_REGION} - fi - - echo ${S3_BUCKET_NAME} - ``` -3. Upload the `mnist-kmeans-sagemaker/kmeans_preprocessing.py` file to your bucket with the prefix `mnist_kmeans_example/processing_code/kmeans_preprocessing.py`. - ``` - aws s3 cp kmeans_preprocessing.py s3://${S3_BUCKET_NAME}/mnist_kmeans_example/processing_code/kmeans_preprocessing.py - ``` - -## Compile and run the pipelines - -1. To compile the pipeline run: `python mnist-classification-pipeline.py`. This will create a `tar.gz` file. -2. In the Kubeflow Pipelines UI, upload this compiled pipeline specification (the *.tar.gz* file) and click on create run. -3. Provide the sagemaker execution `role_arn` you created and `bucket_name` you created as pipeline inputs. -4. Once the pipeline completes, you can go to `batch_transform_output` to check your batch prediction results. -You will also have an model endpoint in service. Refer to [Prediction section](#Prediction) below to run predictions aganist your deployed model aganist the endpoint. - -## Prediction - -1. Find your endpoint name by: - - Checking the `sagemaker_resource_name` field under Output artifacts of the Endpoint component in the pipeline run. - ``` - export ENDPOINT_NAME= - ``` -2. Setup AWS credentials with `sagemaker:InvokeEndpoint` access. [Sample commands](https://sagemaker.readthedocs.io/en/v1.60.2/kubernetes/using_amazon_sagemaker_components.html#configure-permissions-to-run-predictions) -4. Run the script below to invoke the endpoint - ``` - python invoke_endpoint.py $ENDPOINT_NAME - ``` - -## Cleaning up the endpoint - -You can find the model/endpoint configuration name in the `sagemaker_resource_name` field under Output artifacts of the EndpointConfig/Model component in the pipeline run. - -``` -export ENDPOINT_CONFIG_NAME= -export MODEL_NAME= -``` -To delete all the endpoint resources use: - -Note: The namespace for the standard kubeflow installation is "kubeflow". For multi-tenant installations the namespace is located at the left in the navigation bar. - -``` -export MY_KUBEFLOW_NAMESPACE= - -kubectl delete endpoint $ENDPOINT_NAME -n $MY_KUBEFLOW_NAMESPACE -kubectl delete endpointconfig $ENDPOINT_CONFIG_NAME -n $MY_KUBEFLOW_NAMESPACE -kubectl delete model $MODEL_NAME -n $MY_KUBEFLOW_NAMESPACE -``` - -## Components source - -Hyperparameter Tuning: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/hyperparameter_tuning/src) - -Training: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/TrainingJob/src) - - -Endpoint: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/Endpoint/src) - -Endpoint Config: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/EndpointConfig/src) - -Model: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/Modelv2/src) - -Batch Transformation: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/batch_transform/src) diff --git a/samples/contrib/aws-samples/mnist-kmeans-sagemaker/invoke_endpoint.py b/samples/contrib/aws-samples/mnist-kmeans-sagemaker/invoke_endpoint.py deleted file mode 100644 index 1b09c92b4b0..00000000000 --- a/samples/contrib/aws-samples/mnist-kmeans-sagemaker/invoke_endpoint.py +++ /dev/null @@ -1,40 +0,0 @@ -import json -import io -import boto3 -import pickle -import gzip -import numpy -import sys - - -if len(sys.argv) < 2: - print("Must pass your endpoint name") - exit(1) - -ENDPOINT_NAME = sys.argv[1] -# Simple function to create a csv from numpy array -def np2csv(arr): - csv = io.BytesIO() - numpy.savetxt(csv, arr, delimiter=",", fmt="%g") - return csv.getvalue().decode().rstrip() - - -# Prepare input for the model -# Load the dataset -s3 = boto3.client("s3") -s3.download_file( - "sagemaker-sample-files", "datasets/image/MNIST/mnist.pkl.gz", "mnist.pkl.gz" -) - -with gzip.open("mnist.pkl.gz", "rb") as f: - train_set, _, _ = pickle.load(f, encoding="latin1") - -payload = np2csv(train_set[0][30:31]) - -# Run prediction aganist the endpoint created by the pipeline -runtime = boto3.Session(region_name="us-east-1").client("sagemaker-runtime") -response = runtime.invoke_endpoint( - EndpointName=ENDPOINT_NAME, ContentType="text/csv", Body=payload -) -result = json.loads(response["Body"].read().decode()) -print(result) diff --git a/samples/contrib/aws-samples/mnist-kmeans-sagemaker/kmeans_preprocessing.py b/samples/contrib/aws-samples/mnist-kmeans-sagemaker/kmeans_preprocessing.py deleted file mode 100644 index 426bc0d86b5..00000000000 --- a/samples/contrib/aws-samples/mnist-kmeans-sagemaker/kmeans_preprocessing.py +++ /dev/null @@ -1,29 +0,0 @@ -import pickle -import gzip -import numpy -import io -from sagemaker.amazon.common import write_numpy_to_dense_tensor - -print("Extracting MNIST data set") -# Load the dataset -with gzip.open("/opt/ml/processing/input/mnist.pkl.gz", "rb") as f: - train_set, valid_set, test_set = pickle.load(f, encoding="latin1") - -# process the data -# Convert the training data into the format required by the SageMaker KMeans algorithm -print("Writing training data") -with open("/opt/ml/processing/output_train/train_data", "wb") as train_file: - write_numpy_to_dense_tensor(train_file, train_set[0], train_set[1]) - -print("Writing test data") -with open("/opt/ml/processing/output_test/test_data", "wb") as test_file: - write_numpy_to_dense_tensor(test_file, test_set[0], test_set[1]) - -print("Writing validation data") -# Convert the valid data into the format required by the SageMaker KMeans algorithm -numpy.savetxt( - "/opt/ml/processing/output_valid/valid-data.csv", - valid_set[0], - delimiter=",", - fmt="%g", -) diff --git a/samples/contrib/aws-samples/mnist-kmeans-sagemaker/mnist-classification-pipeline.py b/samples/contrib/aws-samples/mnist-kmeans-sagemaker/mnist-classification-pipeline.py deleted file mode 100644 index f7280b9d47a..00000000000 --- a/samples/contrib/aws-samples/mnist-kmeans-sagemaker/mnist-classification-pipeline.py +++ /dev/null @@ -1,243 +0,0 @@ -#!/usr/bin/env python3 - - -import ast -import json -import random -import kfp -from kfp import components -from kfp import dsl - -sagemaker_hpo_op = components.load_component_from_file( - "../../../../components/aws/sagemaker/hyperparameter_tuning/component.yaml" -) -sagemaker_process_op = components.load_component_from_file( - "../../../../components/aws/sagemaker/process/component.yaml" -) -sagemaker_train_op = components.load_component_from_file( - "../../../../components/aws/sagemaker/TrainingJob/component.yaml" -) -sagemaker_Model_op = components.load_component_from_file("../../../../components/aws/sagemaker/Modelv2/component.yaml") -sagemaker_EndpointConfig_op = components.load_component_from_file( - "../../../../components/aws/sagemaker/EndpointConfig/component.yaml" -) -sagemaker_Endpoint_op = components.load_component_from_file( - "../../../../components/aws/sagemaker/Endpoint/component.yaml" -) -sagemaker_batch_transform_op = components.load_component_from_file( - "../../../../components/aws/sagemaker/batch_transform/component.yaml" -) - - -def processing_input(input_name, s3_uri, local_path): - return { - "InputName": input_name, - "S3Input": { - "S3Uri": s3_uri, - "LocalPath": local_path, - "S3DataType": "S3Prefix", - "S3InputMode": "File", - }, - } - - -def processing_output(output_name, s3_uri, local_path): - return { - "OutputName": output_name, - "S3Output": { - "S3Uri": s3_uri, - "LocalPath": local_path, - "S3UploadMode": "EndOfJob", - }, - } - - -def training_input(input_name, s3_uri): - return { - "ChannelName": input_name, - "DataSource": {"S3DataSource": {"S3Uri": s3_uri, "S3DataType": "S3Prefix"}}, - } - - -def model_input(train_image, model_artifact_url): - return { - "containerHostname": "mnist-kmeans", - "image": train_image, - "mode": "SingleModel", - "modelDataURL": model_artifact_url, - } - - -def get_production_variants(model_name): - return [ - { - "initialInstanceCount": 1, - "initialVariantWeight": 1, - "instanceType": "ml.t2.medium", - "modelName": model_name, - "variantName": "mnist-kmeans-sample", - "volumeSizeInGB": 5, - } -] - - -@dsl.pipeline( - name="MNIST Classification pipeline", - description="MNIST Classification using KMEANS in SageMaker", -) -def mnist_classification(role_arn="", bucket_name=""): - # Common component inputs - region = "us-east-1" - instance_type = "ml.m5.2xlarge" - train_image = "382416733822.dkr.ecr.us-east-1.amazonaws.com/kmeans:1" - - # Training input and output location based on bucket name - hpo_channels = [ - training_input("train", f"s3://{bucket_name}/mnist_kmeans_example/train_data"), - training_input("test", f"s3://{bucket_name}/mnist_kmeans_example/test_data"), - ] - train_output_location = f"s3://{bucket_name}/mnist_kmeans_example/output" - - process = sagemaker_process_op( - role=role_arn, - region=region, - image="763104351884.dkr.ecr.us-east-1.amazonaws.com/pytorch-training:1.5.0-cpu-py36-ubuntu16.04", - instance_type=instance_type, - container_entrypoint=[ - "python", - "/opt/ml/processing/code/kmeans_preprocessing.py", - ], - input_config=[ - processing_input( - "mnist_tar", - "s3://sagemaker-sample-files/datasets/image/MNIST/mnist.pkl.gz", - "/opt/ml/processing/input", - ), - processing_input( - "source_code", - f"s3://{bucket_name}/mnist_kmeans_example/processing_code/kmeans_preprocessing.py", - "/opt/ml/processing/code", - ), - ], - output_config=[ - processing_output( - "train_data", - f"s3://{bucket_name}/mnist_kmeans_example/", - "/opt/ml/processing/output_train/", - ), - processing_output( - "test_data", - f"s3://{bucket_name}/mnist_kmeans_example/", - "/opt/ml/processing/output_test/", - ), - processing_output( - "valid_data", - f"s3://{bucket_name}/mnist_kmeans_example/input/", - "/opt/ml/processing/output_valid/", - ), - ], - ) - - hpo = sagemaker_hpo_op( - region=region, - image=train_image, - metric_name="test:msd", - metric_type="Minimize", - static_parameters={"k": "10", "feature_dim": "784"}, - integer_parameters=[ - {"Name": "mini_batch_size", "MinValue": "500", "MaxValue": "600"}, - {"Name": "extra_center_factor", "MinValue": "10", "MaxValue": "20"}, - ], - categorical_parameters=[ - {"Name": "init_method", "Values": ["random", "kmeans++"]} - ], - channels=hpo_channels, - output_location=train_output_location, - instance_type=instance_type, - max_num_jobs=3, - max_parallel_jobs=2, - role=role_arn, - ).after(process) - - trainingJobName = "sample-mnist-v2-trainingjob" + str(random.randint(0, 999999)) - - training = sagemaker_train_op( - region=region, - algorithm_specification={ - "trainingImage": train_image, - "trainingInputMode": "File", - }, - hyper_parameters=hpo.outputs["best_hyperparameters"], - input_data_config=[ - { - "channelName": "train", - "dataSource": { - "s3DataSource": { - "s3DataType": "S3Prefix", - "s3URI": f"s3://{bucket_name}/mnist_kmeans_example/train_data", - "s3DataDistributionType": "FullyReplicated", - }, - }, - "compressionType": "None", - "RecordWrapperType": "None", - "InputMode": "File", - } - ], - output_data_config={"s3OutputPath": f"s3://{bucket_name}"}, - resource_config={ - "instanceCount": 1, - "instanceType": "ml.m4.xlarge", - "volumeSizeInGB": 5, - }, - role_arn=role_arn, - training_job_name=trainingJobName, - stopping_condition={"maxRuntimeInSeconds": 3600}, - ) - - def get_s3_model_artifact(model_artifacts) -> str: - import ast - - model_artifacts = ast.literal_eval(model_artifacts) - return model_artifacts["s3ModelArtifacts"] - - get_s3_model_artifact_op = kfp.components.create_component_from_func( - get_s3_model_artifact, output_component_file="get_s3_model_artifact.yaml" - ) - model_artifact_url = get_s3_model_artifact_op( - training.outputs["model_artifacts"] - ).output - - Model = sagemaker_Model_op( - region=region, - execution_role_arn=role_arn, - primary_container=model_input(train_image, model_artifact_url), - ) - model_name = Model.outputs["sagemaker_resource_name"] - EndpointConfig = sagemaker_EndpointConfig_op( - region=region, - production_variants=get_production_variants(model_name), - ) - - endpoint_config_name = EndpointConfig.outputs["sagemaker_resource_name"] - - Endpoint = sagemaker_Endpoint_op( - region=region, - endpoint_config_name=endpoint_config_name, - ) - - - sagemaker_batch_transform_op( - region=region, - model_name=model_name, - instance_type=instance_type, - batch_strategy="MultiRecord", - input_location=f"s3://{bucket_name}/mnist_kmeans_example/input", - content_type="text/csv", - split_type="Line", - output_location=f"s3://{bucket_name}/mnist_kmeans_example/output", - ) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile(mnist_classification, __file__ + ".tar.gz") - print("#####################Pipeline compiled########################") diff --git a/samples/contrib/aws-samples/rlestimator_pipeline/README.md b/samples/contrib/aws-samples/rlestimator_pipeline/README.md deleted file mode 100644 index 5baa623a544..00000000000 --- a/samples/contrib/aws-samples/rlestimator_pipeline/README.md +++ /dev/null @@ -1,72 +0,0 @@ -The two examples in this directory each run a different type of RLEstimator Reinforcement Learning job as a SageMaker training job. - -## Examples - -Each example is based on a notebook from the [AWS SageMaker Examples](https://github.com/aws/amazon-sagemaker-examples) repo. -(It should be noted that all of these examples are available by default on all SageMaker Notebook Instance's) - -The `rlestimator_pipeline_custom_image` pipeline example is based on the -[`rl_unity_ray`](https://github.com/aws/amazon-sagemaker-examples/blob/master/reinforcement_learning/rl_unity_ray/rl_unity_ray.ipynb) notebook. - -The `rlestimator_pipeline_toolkit_image` pipeline example is based on the -[`rl_news_vendor_ray_custom`](https://github.com/aws/amazon-sagemaker-examples/blob/master/reinforcement_learning/rl_resource_allocation_ray_customEnv/rl_news_vendor_ray_custom.ipynb) notebook. - -## Prerequisites - -To run these examples you will need to create a number of resources that will then be used as inputs for the pipeline component. - -rlestimator_pipeline_custom_image required inputs: -``` -output_bucket_name = -input_bucket_name = -input_key = -job_name_prefix = -image_uri = -assume_role = -``` - -rl_news_vendor_ray_custom required inputs: -``` -output_bucket_name = -input_bucket_name = -input_key = -job_name_prefix = -role = -``` - -You could go to the bother of creating all of these resources individually, but it might be easier to run each of the notebooks -mentioned above, and then use the resources that are created by the notebooks. For the input bucket and output bucket they -will be created under a name like 'sagemaker-us-east-1-520713654638' depending on your region and account number. Within -these buckets a key will be created for each of your training job runs. After you have executed all cells in each of the notebooks -a key for each training job that has completed will be made and any custom code required for the training job will be placed -there as a .tar.gz file. The tar.gz file full S3 URI can be used as the source_dir input for these pipeline components. - - -## Compiling the pipeline template - -Follow the guide to [building a pipeline](https://www.kubeflow.org/docs/guides/pipelines/build-pipeline/) to install the Kubeflow Pipelines SDK, then run the following command to compile the sample Python into a workflow specification. The specification takes the form of a YAML file compressed into a `.tar.gz` file. - -```bash -dsl-compile --py rlestimator_pipeline_custom_image.py --output rlestimator_pipeline_custom_image.tar.gz -dsl-compile --py rlestimator_pipeline_toolkit_image.py --output rlestimator_pipeline_toolkit_image.tar.gz -``` - -## Deploying the pipeline - -Open the Kubeflow pipelines UI. Create a new pipeline, and then upload the compiled specification (`.tar.gz` file) as a new pipeline template. - -Once the pipeline done, you can go to the S3 path specified in `output` to check your prediction results. There're three columes, `PassengerId`, `prediction`, `Survived` (Ground True value) - -``` -... -4,1,1 -5,0,0 -6,0,0 -7,0,0 -... -``` - -## Components source - -RLEstimator Training Job: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/rlestimator/src) diff --git a/samples/contrib/aws-samples/rlestimator_pipeline/rlestimator_pipeline_custom_image.py b/samples/contrib/aws-samples/rlestimator_pipeline/rlestimator_pipeline_custom_image.py deleted file mode 100644 index c3334baaeac..00000000000 --- a/samples/contrib/aws-samples/rlestimator_pipeline/rlestimator_pipeline_custom_image.py +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python3 - -# Uncomment the apply(use_aws_secret()) below if you are not using OIDC -# more info : https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/README.md - -import kfp -import os -from kfp import components -from kfp import dsl -from kfp.aws import use_aws_secret -from sagemaker.rl import RLEstimator, RLToolkit - - -cur_file_dir = os.path.dirname(__file__) -components_dir = os.path.join(cur_file_dir, "../../../../components/aws/sagemaker/") - -sagemaker_rlestimator_op = components.load_component_from_file( - components_dir + "/rlestimator/component.yaml" -) - -output_bucket_name = "kf-pipelines-rlestimator-output" -input_bucket_name = "kf-pipelines-rlestimator-input" -input_key = "sourcedir.tar.gz" -job_name_prefix = "rlestimator-pipeline-custom-image" -image_uri = "your_sagemaker_image_name" -role = "your_sagemaker_role_name" -security_groups = ["sg-0490601e83f220e82"] -subnets = [ - "subnet-0efc73526db16a4a4", - "subnet-0b8af626f39e7d462", -] - -# You need to specify your own metric_definitions if using a custom image_uri -metric_definitions = RLEstimator.default_metric_definitions(RLToolkit.RAY) - - -@dsl.pipeline( - name="RLEstimator Custom Docker Image", - description="RLEstimator training job where we provide a reference to a Docker image containing our training code", -) -def rlestimator_training_custom_pipeline( - region="us-east-1", - entry_point="train-unity.py", - source_dir="s3://{}/{}".format(input_bucket_name, input_key), - image_uri=image_uri, - assume_role=role, - instance_type="ml.c5.2xlarge", - instance_count=1, - output_path="s3://{}/".format(output_bucket_name), - base_job_name=job_name_prefix, - metric_definitions=metric_definitions, - hyperparameters={}, - vpc_security_group_ids=security_groups, - vpc_subnets=subnets, -): - rlestimator_training_custom = sagemaker_rlestimator_op( - region=region, - entry_point=entry_point, - source_dir=source_dir, - image=image_uri, - role=assume_role, - model_artifact_path=output_path, - job_name=base_job_name, - metric_definitions=metric_definitions, - instance_type=instance_type, - instance_count=instance_count, - hyperparameters=hyperparameters, - vpc_security_group_ids=vpc_security_group_ids, - vpc_subnets=vpc_subnets, - ) # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile( - rlestimator_training_custom_pipeline, __file__ + ".zip" - ) diff --git a/samples/contrib/aws-samples/rlestimator_pipeline/rlestimator_pipeline_toolkit_image.py b/samples/contrib/aws-samples/rlestimator_pipeline/rlestimator_pipeline_toolkit_image.py deleted file mode 100644 index 0973da1c002..00000000000 --- a/samples/contrib/aws-samples/rlestimator_pipeline/rlestimator_pipeline_toolkit_image.py +++ /dev/null @@ -1,93 +0,0 @@ -#!/usr/bin/env python3 - -# Uncomment the apply(use_aws_secret()) below if you are not using OIDC -# more info : https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/README.md - -import kfp -import os -from kfp import components -from kfp import dsl -from kfp.aws import use_aws_secret - - -cur_file_dir = os.path.dirname(__file__) -components_dir = os.path.join(cur_file_dir, "../../../../components/aws/sagemaker/") - -sagemaker_rlestimator_op = components.load_component_from_file( - components_dir + "/rlestimator/component.yaml" -) - -metric_definitions = [ - { - "Name": "episode_reward_mean", - "Regex": "episode_reward_mean: ([-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)", - }, - { - "Name": "episode_reward_max", - "Regex": "episode_reward_max: ([-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)", - }, - { - "Name": "episode_len_mean", - "Regex": "episode_len_mean: ([-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)", - }, - {"Name": "entropy", "Regex": "entropy: ([-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)"}, - { - "Name": "episode_reward_min", - "Regex": "episode_reward_min: ([-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)", - }, - {"Name": "vf_loss", "Regex": "vf_loss: ([-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)"}, - { - "Name": "policy_loss", - "Regex": "policy_loss: ([-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)", - }, -] - -output_bucket_name = "your_sagemaker_bucket_name" -input_bucket_name = "your_sagemaker_bucket_name" -input_key = "rl-newsvendor-2020-11-11-10-43-30-556/source/sourcedir.tar.gz" -job_name_prefix = "rlestimator-pipeline-toolkit-image" -role = "your_sagemaker_role_name" - - -@dsl.pipeline( - name="RLEstimator Toolkit & Framework Pipeline", - description="RLEstimator training job where the AWS Docker image is auto-selected based on the Toolkit and Framework we define", -) -def rlestimator_training_toolkit_pipeline( - region="us-east-1", - entry_point="train_news_vendor.py", - source_dir="s3://{}/{}".format(input_bucket_name, input_key), - toolkit="ray", - toolkit_version="0.8.5", - framework="tensorflow", - assume_role=role, - instance_type="ml.c5.2xlarge", - instance_count=1, - output_path="s3://{}/".format(output_bucket_name), - base_job_name=job_name_prefix, - metric_definitions=metric_definitions, - max_run=300, - hyperparameters={}, -): - rlestimator_training_toolkit = sagemaker_rlestimator_op( - region=region, - entry_point=entry_point, - source_dir=source_dir, - toolkit=toolkit, - toolkit_version=toolkit_version, - framework=framework, - role=assume_role, - instance_type=instance_type, - instance_count=instance_count, - model_artifact_path=output_path, - job_name=base_job_name, - metric_definitions=metric_definitions, - max_run=max_run, - hyperparameters=hyperparameters, - ) # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile( - rlestimator_training_toolkit_pipeline, __file__ + ".zip" - ) diff --git a/samples/contrib/aws-samples/robomaker_simulation/README.md b/samples/contrib/aws-samples/robomaker_simulation/README.md deleted file mode 100644 index 18a103b1235..00000000000 --- a/samples/contrib/aws-samples/robomaker_simulation/README.md +++ /dev/null @@ -1,87 +0,0 @@ -Examples for creating a simulation application, running a simulation job, running a simulation job batch, and deleting a simulation application. - -## Examples - -The examples are based on a notebook from the [AWS SageMaker Examples](https://github.com/aws/amazon-sagemaker-examples) repo. - -The simulation jobs that are launched by these examples are based on the -[`rl_objecttracker_robomaker_coach_gazebo`](https://github.com/aws/amazon-sagemaker-examples/tree/3de42334720a7197ea1f15395b66c44cf5ef7fd4/reinforcement_learning/rl_objecttracker_robomaker_coach_gazebo) notebook. -This is an older notebook example, but you can still download it from github and upload directly to Jupyter Lab in SageMaker. - - -## Prerequisites - -To run these examples you will need to create a number of resources that will then be used as inputs for the pipeline component. -Some of the inputs are used to create the RoboMaker Simulation Application and some are used as inputs for the RoboMaker -Simulation Job. - -required inputs for simulation job example: -``` -role = -region = -app_name = -sources = -simulation_software_suite = -rendering_engine = -robot_software_suite = -timeout_in_secs = , -max_concurrency = , -simulation_job_requests = , -sim_app_arn=robomaker_create_sim_app.outputs["arn"] -sim_app_arn = -``` - -You could go to the bother of creating all of these resources individually, but it might be easier to run the notebook -mentioned above, and then use the resources that are created by that notebook. The notebook should create the output_bucket, -output_key, vpc configs, launch config, etc and you can use those as the inputs for this example. - -## Compiling the pipeline template - -Follow the guide to [building a pipeline](https://www.kubeflow.org/docs/guides/pipelines/build-pipeline/) to install the Kubeflow Pipelines SDK, then run the following command to compile the sample Python into a workflow specification. The specification takes the form of a YAML file compressed into a `.tar.gz` file. - -```bash -dsl-compile --py rlestimator_pipeline_custom_image.py --output rlestimator_pipeline_custom_image.tar.gz -dsl-compile --py rlestimator_pipeline_toolkit_image.py --output rlestimator_pipeline_toolkit_image.tar.gz -dsl-compile --py sagemaker_robomaker_rl_job.py --output sagemaker_robomaker_rl_job.tar.gz -``` - -## Deploying the pipeline - -Open the Kubeflow pipelines UI. Create a new pipeline, and then upload the compiled specification (`.tar.gz` file) as a new pipeline template. - -Once the pipeline done, you can go to the S3 path specified in `output` to check your prediction results. There're three columes, `PassengerId`, `prediction`, `Survived` (Ground True value) - -``` -... -4,1,1 -5,0,0 -6,0,0 -7,0,0 -... -``` - -## Components source - -RoboMaker Create Simulation Application: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/create_simulation_application/src) diff --git a/samples/contrib/aws-samples/robomaker_simulation/robomaker_simulation_job_app.py b/samples/contrib/aws-samples/robomaker_simulation/robomaker_simulation_job_app.py deleted file mode 100644 index f3b4f0a843c..00000000000 --- a/samples/contrib/aws-samples/robomaker_simulation/robomaker_simulation_job_app.py +++ /dev/null @@ -1,128 +0,0 @@ -#!/usr/bin/env python3 - -# Uncomment the apply(use_aws_secret()) below if you are not using OIDC -# more info : https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/README.md - -import kfp -import os -from kfp import components -from kfp import dsl -import random -import string -from kfp.aws import use_aws_secret - - -cur_file_dir = os.path.dirname(__file__) -components_dir = os.path.join(cur_file_dir, "../../../../components/aws/sagemaker/") - -robomaker_create_sim_app_op = components.load_component_from_file( - components_dir + "/create_simulation_app/component.yaml" -) - -robomaker_sim_job_op = components.load_component_from_file( - components_dir + "/simulation_job/component.yaml" -) - -robomaker_delete_sim_app_op = components.load_component_from_file( - components_dir + "/delete_simulation_app/component.yaml" -) - -launch_config = { - "packageName": "object_tracker_simulation", - "launchFile": "evaluation.launch", - "environmentVariables": { - "MODEL_S3_BUCKET": "your_sagemaker_bucket_name", - "MODEL_S3_PREFIX": "rl-object-tracker-sagemaker-201116-051751", - "ROS_AWS_REGION": "us-east-1", - "MARKOV_PRESET_FILE": "object_tracker.py", - "NUMBER_OF_ROLLOUT_WORKERS": "1", - }, - "streamUI": True, -} - -simulation_app_name = "robomaker-pipeline-simulation-application" -sources_bucket = "your_sagemaker_bucket_name" -sources_key = "object-tracker/simulation_ws.tar.gz" -sources_architecture = "X86_64" -simulation_software_name = "Gazebo" -simulation_software_version = "7" -robot_software_name = "ROS" -robot_software_version = "Kinetic" -rendering_engine_name = "OGRE" -rendering_engine_version = "1.x" -role = "your_sagemaker_role_name" -output_bucket = "kf-pipelines-robomaker-output" -output_key = "test-output-key" -security_groups = ["sg-0490601e83f220e82"] -subnets = [ - "subnet-0efc73526db16a4a4", - "subnet-0b8af626f39e7d462", -] - - -@dsl.pipeline( - name="RoboMaker Simulation Job Pipeline", - description="RoboMaker simulation job and simulation application created via pipeline components", -) -def robomaker_simulation_job_app_pipeline( - region="us-east-1", - role=role, - name=simulation_app_name - + "".join(random.choice(string.ascii_lowercase) for i in range(10)), - sources=[ - { - "s3Bucket": sources_bucket, - "s3Key": sources_key, - "architecture": sources_architecture, - } - ], - simulation_software_name=simulation_software_name, - simulation_software_version=simulation_software_version, - robot_software_name=robot_software_name, - robot_software_version=robot_software_version, - rendering_engine_name=rendering_engine_name, - rendering_engine_version=rendering_engine_version, - output_bucket=output_bucket, - output_path=output_key, - sim_app_launch_config=launch_config, - vpc_security_group_ids=security_groups, - vpc_subnets=subnets, -): - robomaker_create_sim_app = robomaker_create_sim_app_op( - region=region, - app_name=name, - sources=sources, - simulation_software_name=simulation_software_name, - simulation_software_version=simulation_software_version, - robot_software_name=robot_software_name, - robot_software_version=robot_software_version, - rendering_engine_name=rendering_engine_name, - rendering_engine_version=rendering_engine_version, - ) - # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - robomaker_simulation_job = robomaker_sim_job_op( - region=region, - role=role, - output_bucket=output_bucket, - output_path=output_path, - max_run=300, - failure_behavior="Fail", - sim_app_arn=robomaker_create_sim_app.outputs["arn"], - sim_app_launch_config=sim_app_launch_config, - vpc_security_group_ids=vpc_security_group_ids, - vpc_subnets=vpc_subnets, - use_public_ip="True", - ).after(robomaker_create_sim_app) - # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - robomaker_delete_sim_app = robomaker_delete_sim_app_op( - region=region, arn=robomaker_create_sim_app.outputs["arn"], - ).after(robomaker_simulation_job, robomaker_create_sim_app) - # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile( - robomaker_simulation_job_app_pipeline, __file__ + ".zip" - ) diff --git a/samples/contrib/aws-samples/robomaker_simulation/robomaker_simulation_job_batch_app.py b/samples/contrib/aws-samples/robomaker_simulation/robomaker_simulation_job_batch_app.py deleted file mode 100644 index 9bddb175bfa..00000000000 --- a/samples/contrib/aws-samples/robomaker_simulation/robomaker_simulation_job_batch_app.py +++ /dev/null @@ -1,136 +0,0 @@ -#!/usr/bin/env python3 - -# Uncomment the apply(use_aws_secret()) below if you are not using OIDC -# more info : https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/README.md - -import kfp -import os -from kfp import components -from kfp import dsl -import random -import string -from kfp.aws import use_aws_secret - - -cur_file_dir = os.path.dirname(__file__) -components_dir = os.path.join(cur_file_dir, "../../../../components/aws/sagemaker/") - -robomaker_create_sim_app_op = components.load_component_from_file( - components_dir + "/create_simulation_app/component.yaml" -) - -robomaker_sim_job_batch_op = components.load_component_from_file( - components_dir + "/simulation_job_batch/component.yaml" -) - -robomaker_delete_sim_app_op = components.load_component_from_file( - components_dir + "/delete_simulation_app/component.yaml" -) - -simulation_app_name = "robomaker-pipeline-simulation-batch-application" -sources_bucket = "your_sagemaker_bucket_name" -sources_key = "object-tracker/simulation_ws.tar.gz" -sources_architecture = "X86_64" -simulation_software_name = "Gazebo" -simulation_software_version = "7" -robot_software_name = "ROS" -robot_software_version = "Kinetic" -rendering_engine_name = "OGRE" -rendering_engine_version = "1.x" -role = "your_sagemaker_role_name" - -job_requests = [ - { - "outputLocation": { - "s3Bucket": "kf-pipelines-robomaker-output", - "s3Prefix": "test-output-key", - }, - "loggingConfig": {"recordAllRosTopics": True}, - "maxJobDurationInSeconds": 900, - "iamRole": "your_sagemaker_role_name", - "failureBehavior": "Fail", - "simulationApplications": [ - { - "application": "test-arn", - "launchConfig": { - "packageName": "object_tracker_simulation", - "launchFile": "evaluation.launch", - "environmentVariables": { - "MODEL_S3_BUCKET": "your_sagemaker_bucket_name", - "MODEL_S3_PREFIX": "rl-object-tracker-sagemaker-201116-051751", - "ROS_AWS_REGION": "us-east-1", - "MARKOV_PRESET_FILE": "object_tracker.py", - "NUMBER_OF_ROLLOUT_WORKERS": "1", - }, - "streamUI": True, - }, - } - ], - "vpcConfig": { - "subnets": ["subnet-0efc73526db16a4a4", "subnet-0b8af626f39e7d462",], - "securityGroups": ["sg-0490601e83f220e82"], - "assignPublicIp": True, - }, - } -] - - -@dsl.pipeline( - name="RoboMaker Job Batch Pipeline", - description="RoboMaker simulation job batch is launched via a pipeline component", -) -def robomaker_simulation_job_batch_app_pipeline( - region="us-east-1", - role=role, - name=simulation_app_name - + "".join(random.choice(string.ascii_lowercase) for i in range(10)), - sources=[ - { - "s3Bucket": sources_bucket, - "s3Key": sources_key, - "architecture": sources_architecture, - } - ], - simulation_software_name=simulation_software_name, - simulation_software_version=simulation_software_version, - robot_software_name=robot_software_name, - robot_software_version=robot_software_version, - rendering_engine_name=rendering_engine_name, - rendering_engine_version=rendering_engine_version, - timeout_in_secs="900", - max_concurrency="3", - simulation_job_requests=job_requests, -): - robomaker_create_sim_app = robomaker_create_sim_app_op( - region=region, - app_name=name, - sources=sources, - simulation_software_name=simulation_software_name, - simulation_software_version=simulation_software_version, - robot_software_name=robot_software_name, - robot_software_version=robot_software_version, - rendering_engine_name=rendering_engine_name, - rendering_engine_version=rendering_engine_version, - ) - # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - robomaker_simulation_batch_job = robomaker_sim_job_batch_op( - region=region, - role=role, - timeout_in_secs=timeout_in_secs, - max_concurrency=max_concurrency, - simulation_job_requests=simulation_job_requests, - sim_app_arn=robomaker_create_sim_app.outputs["arn"], - ).after(robomaker_create_sim_app) - # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - robomaker_delete_sim_app = robomaker_delete_sim_app_op( - region=region, arn=robomaker_create_sim_app.outputs["arn"], - ).after(robomaker_simulation_batch_job, robomaker_create_sim_app) - # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile( - robomaker_simulation_job_batch_app_pipeline, __file__ + ".zip" - ) diff --git a/samples/contrib/aws-samples/robomaker_simulation/sagemaker_robomaker_rl_job.py b/samples/contrib/aws-samples/robomaker_simulation/sagemaker_robomaker_rl_job.py deleted file mode 100644 index cb1d440e116..00000000000 --- a/samples/contrib/aws-samples/robomaker_simulation/sagemaker_robomaker_rl_job.py +++ /dev/null @@ -1,196 +0,0 @@ -#!/usr/bin/env python3 - -# Uncomment the apply(use_aws_secret()) below if you are not using OIDC -# more info : https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/README.md - -import kfp -import os -from kfp import components -from kfp import dsl -import random -import string -from kfp.aws import use_aws_secret - -cur_file_dir = os.path.dirname(__file__) -components_dir = os.path.join(cur_file_dir, "../../../../components/aws/sagemaker/") - -robomaker_create_sim_app_op = components.load_component_from_file( - components_dir + "/create_simulation_app/component.yaml" -) - -robomaker_sim_job_op = components.load_component_from_file( - components_dir + "/simulation_job/component.yaml" -) - -robomaker_delete_sim_app_op = components.load_component_from_file( - components_dir + "/delete_simulation_app/component.yaml" -) - -sagemaker_rlestimator_op = components.load_component_from_file( - components_dir + "/rlestimator/component.yaml" -) - -metric_definitions = [ - {"Name": "reward-training", "Regex": "^Training>.*Total reward=(.*?),"}, - {"Name": "ppo-surrogate-loss", "Regex": "^Policy training>.*Surrogate loss=(.*?),"}, - {"Name": "ppo-entropy", "Regex": "^Policy training>.*Entropy=(.*?),"}, - {"Name": "reward-testing", "Regex": "^Testing>.*Total reward=(.*?),"}, -] - -# Simulation Application Inputs -region = "us-east-1" -simulation_software_name = "Gazebo" -simulation_software_version = "7" -robot_software_name = "ROS" -robot_software_version = "Kinetic" -rendering_engine_name = "OGRE" -rendering_engine_version = "1.x" -simulation_app_name = "robomaker-pipeline-objecttracker-sim-app" + "".join( - random.choice(string.ascii_lowercase) for i in range(10) -) -sources_bucket = "your_sagemaker_bucket_name" -sources_key = "object-tracker/simulation_ws.tar.gz" -sources_architecture = "X86_64" -sources = [ - { - "s3Bucket": sources_bucket, - "s3Key": sources_key, - "architecture": sources_architecture, - } -] - -# RLEstimator Inputs -entry_point = "training_worker.py" -rl_sources_key = "rl-object-tracker-sagemaker-201123-042019/source/sourcedir.tar.gz" -source_dir = "s3://{}/{}".format(sources_bucket, rl_sources_key) -rl_output_path = "s3://{}/".format(sources_bucket) -train_instance_type = "ml.c5.2xlarge" -train_instance_count = 1 -toolkit = "coach" -toolkit_version = "0.11" -framework = "tensorflow" -job_name = "rl-kf-pipeline-objecttracker" + "".join( - random.choice(string.ascii_lowercase) for i in range(10) -) -max_run = 300 -s3_prefix = "rl-object-tracker-sagemaker-201123-042019" -hyperparameters = { - "s3_bucket": sources_bucket, - "s3_prefix": s3_prefix, - "aws_region": "us-east-1", - "RLCOACH_PRESET": "object_tracker", -} -role = "your_sagemaker_role_name" -security_groups = ["sg-0490601e83f220e82"] -subnets = [ - "subnet-0efc73526db16a4a4", - "subnet-0b8af626f39e7d462", -] - -# Simulation Job Inputs -output_bucket = "kf-pipelines-robomaker-output" -output_key = "test-output-key" - - -@dsl.pipeline( - name="SageMaker & RoboMaker pipeline", - description="SageMaker & RoboMaker Reinforcement Learning job where the jobs work together to train an RL model", -) -def sagemaker_robomaker_rl_job( - region=region, - role=role, - name=simulation_app_name, - sources=sources, - simulation_software_name=simulation_software_name, - simulation_software_version=simulation_software_version, - robot_software_name=robot_software_name, - robot_software_version=robot_software_version, - rendering_engine_name=rendering_engine_name, - rendering_engine_version=rendering_engine_version, - output_bucket=output_bucket, - robomaker_output_path=output_key, - vpc_security_group_ids=security_groups, - vpc_subnets=subnets, - entry_point=entry_point, - source_dir=source_dir, - toolkit=toolkit, - toolkit_version=toolkit_version, - framework=framework, - assume_role=role, - instance_type=train_instance_type, - instance_count=train_instance_count, - output_path=rl_output_path, - job_name=job_name, - metric_definitions=metric_definitions, - max_run=max_run, - hyperparameters=hyperparameters, - sources_bucket=sources_bucket, - s3_prefix=s3_prefix, -): - robomaker_create_sim_app = robomaker_create_sim_app_op( - region=region, - app_name=name, - sources=sources, - simulation_software_name=simulation_software_name, - simulation_software_version=simulation_software_version, - robot_software_name=robot_software_name, - robot_software_version=robot_software_version, - rendering_engine_name=rendering_engine_name, - rendering_engine_version=rendering_engine_version, - ) - # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - rlestimator_training_toolkit_coach = sagemaker_rlestimator_op( - region=region, - entry_point=entry_point, - source_dir=source_dir, - toolkit=toolkit, - toolkit_version=toolkit_version, - framework=framework, - role=assume_role, - instance_type=instance_type, - instance_count=instance_count, - model_artifact_path=output_path, - job_name=job_name, - max_run=max_run, - hyperparameters=hyperparameters, - metric_definitions=metric_definitions, - vpc_subnets=vpc_subnets, - vpc_security_group_ids=vpc_security_group_ids, - ) - # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - robomaker_simulation_job = robomaker_sim_job_op( - region=region, - role=role, - output_bucket=output_bucket, - output_path=robomaker_output_path, - max_run=3800, - failure_behavior="Continue", - sim_app_arn=robomaker_create_sim_app.outputs["arn"], - sim_app_launch_config={ - "packageName": "object_tracker_simulation", - "launchFile": "evaluation.launch", - "environmentVariables": { - "MODEL_S3_BUCKET": sources_bucket, - "MODEL_S3_PREFIX": s3_prefix, - "ROS_AWS_REGION": region, - "NUMBER_OF_ROLLOUT_WORKERS": "1", - "MARKOV_PRESET_FILE": "object_tracker.py", - }, - "streamUI": True, - }, - vpc_security_group_ids=vpc_security_group_ids, - vpc_subnets=vpc_subnets, - use_public_ip="True", - ) - # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - robomaker_delete_sim_app = robomaker_delete_sim_app_op( - region=region, arn=robomaker_create_sim_app.outputs["arn"], - ).after(robomaker_simulation_job, robomaker_create_sim_app) - # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile(sagemaker_robomaker_rl_job, __file__ + ".zip") diff --git a/samples/contrib/aws-samples/sagemaker_debugger_demo/README.md b/samples/contrib/aws-samples/sagemaker_debugger_demo/README.md deleted file mode 100644 index b3d0d0e359c..00000000000 --- a/samples/contrib/aws-samples/sagemaker_debugger_demo/README.md +++ /dev/null @@ -1,56 +0,0 @@ -# Sample Pipeline for Training Component with Debugger - -The `sagemaker-debugger-demo.py` sample creates a pipeline consisting of only a training component. In that component we are using the XGBoost algorithm but with poor hyperparameter choices. By enabling debugger rules and hooks, we can quickly learn that the model produced has issues. - -## Prerequisites - -This pipeline uses the exact same setup as [simple_training_pipeline](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/simple_train_pipeline). For the purposes of this demonstration, all resources will be created in the `us-east-1` region. - -## Steps -1. Compile the pipeline: - `dsl-compile --py debugger-training-pipeline.py --output debugger-training-pipeline.tar.gz` -2. In the Kubeflow UI, upload this compiled pipeline specification (the .tar.gz file), fill in the necessary run parameters, and click create run. -3. Once the pipeline has finished running, you can view the results of each debugger rule under 'Logs'. - -Inputs format to `debug_hook_config` and `debug_rule_config` : -```buildoutcfg -debug_hook_config = { - "S3OutputPath": "s3:///path/for/data/emission/", - "LocalPath": "/local/path/for/data/emission/", - "CollectionConfigurations": [ - { - "CollectionName": "losses", - "CollectionParameters": { - "start_step": "25", - "end_step": "150" - } - }, { - "CollectionName": "gradient", - "CollectionParameters": { - "start_step": "5", - "end_step": "100" - } - } - ], - "HookParameters": { - "save_interval": "10" - } -} - -debug_rule_config = { - "RuleConfigurationName": "rule_name" - "RuleEvaluatorImage": "503895931360.dkr.ecr.us-east-1.amazonaws.com/sagemaker-debugger-rules:latest" - "RuleParameters": { - "rule_to_invoke": "VanishingGradient", - "threshold": "0.01" - } -} -``` - -# Resources -* [Amazon SageMaker Debugger](https://docs.aws.amazon.com/sagemaker/latest/dg/train-debugger.html) -* [Available Frameworks to Use Debugger](https://docs.aws.amazon.com/sagemaker/latest/dg/train-debugger.html#debugger-supported-aws-containers) -* [Debugger Built-In Rules](https://docs.aws.amazon.com/sagemaker/latest/dg/debugger-built-in-rules.html) -* [Debugger Custom Rules](https://docs.aws.amazon.com/sagemaker/latest/dg/debugger-custom-rules.html) -* [Debugger API Examples](https://docs.aws.amazon.com/sagemaker/latest/dg/debugger-createtrainingjob-api.html) - diff --git a/samples/contrib/aws-samples/sagemaker_debugger_demo/debugger-training-pipeline.py b/samples/contrib/aws-samples/sagemaker_debugger_demo/debugger-training-pipeline.py deleted file mode 100644 index 6ca5f2fc47e..00000000000 --- a/samples/contrib/aws-samples/sagemaker_debugger_demo/debugger-training-pipeline.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python3 - -import kfp -import json -import os -import copy -from kfp import components -from kfp import dsl - - -cur_file_dir = os.path.dirname(__file__) -components_dir = os.path.join(cur_file_dir, "../../../../components/aws/sagemaker/") - -sagemaker_train_op = components.load_component_from_file( - components_dir + "/train/component.yaml" -) - - -def training_input(input_name, s3_uri, content_type): - return { - "ChannelName": input_name, - "DataSource": {"S3DataSource": {"S3Uri": s3_uri, "S3DataType": "S3Prefix"}}, - "ContentType": content_type, - } - - -def training_debug_hook(s3_uri, collection_dict): - return { - "S3OutputPath": s3_uri, - "CollectionConfigurations": format_collection_config(collection_dict), - } - - -def format_collection_config(collection_dict): - output = [] - for key, val in collection_dict.items(): - output.append({"CollectionName": key, "CollectionParameters": val}) - return output - - -def training_debug_rules(rule_name, parameters): - return { - "RuleConfigurationName": rule_name, - "RuleEvaluatorImage": "503895931360.dkr.ecr.us-east-1.amazonaws.com/sagemaker-debugger-rules:latest", - "RuleParameters": parameters, - } - - -collections = { - "feature_importance": {"save_interval": "5"}, - "losses": {"save_interval": "10"}, - "average_shap": {"save_interval": "5"}, - "metrics": {"save_interval": "3"}, -} - - -bad_hyperparameters = { - "max_depth": "5", - "eta": "0", - "gamma": "4", - "min_child_weight": "6", - "silent": "0", - "subsample": "0.7", - "num_round": "50", -} - - -@dsl.pipeline( - name="XGBoost Training Pipeline with bad hyperparameters", - description="SageMaker training job test with debugger", -) -def training(role_arn="", bucket_name="my-bucket"): - train_channels = [ - training_input( - "train", - f"s3://{bucket_name}/mnist_kmeans_example/input/valid_data.csv", - "text/csv", - ) - ] - train_debug_rules = [ - training_debug_rules( - "LossNotDecreasing", - {"rule_to_invoke": "LossNotDecreasing", "tensor_regex": ".*"}, - ), - training_debug_rules( - "Overtraining", - { - "rule_to_invoke": "Overtraining", - "patience_train": "10", - "patience_validation": "20", - }, - ), - ] - training = sagemaker_train_op( - region="us-east-1", - image="683313688378.dkr.ecr.us-east-1.amazonaws.com/sagemaker-xgboost:0.90-2-cpu-py3", - hyperparameters=bad_hyperparameters, - channels=train_channels, - instance_type="ml.m5.2xlarge", - model_artifact_path=f"s3://{bucket_name}/mnist_kmeans_example/output/model", - debug_hook_config=training_debug_hook( - f"s3://{bucket_name}/mnist_kmeans_example/hook_config", collections - ), - debug_rule_config=train_debug_rules, - role=role_arn, - ) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile(training, __file__ + ".zip") diff --git a/samples/contrib/aws-samples/simple_train_pipeline/README.md b/samples/contrib/aws-samples/simple_train_pipeline/README.md deleted file mode 100644 index aa009a6977a..00000000000 --- a/samples/contrib/aws-samples/simple_train_pipeline/README.md +++ /dev/null @@ -1,125 +0,0 @@ -# Simple pipeline for the training component - -An example pipeline with only one [training component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/train). - - -## Prerequisites - -Make sure you have set up your EKS cluster as described in this [README.md](https://github.com/kubeflow/pipelines/blob/master/samples/contrib/aws-samples/README.md). - -Use the following python script to copy `train_data`, `test_data`, and `valid_data.csv` to your bucket. -[Create a bucket](https://docs.aws.amazon.com/AmazonS3/latest/gsg/CreatingABucket.html) in `us-east-1` region if you don't have one already. -For the purposes of this demonstration, all resources will be created in the `us-east-1` region. - - -Create a new file named `s3_sample_data_creator.py` with the following content: -``` -import pickle, gzip, numpy, urllib.request, json -from urllib.parse import urlparse - -################################################################### -# This is the only thing that you need to change to run this code -# Give the name of your S3 bucket -bucket = '' - -# If you are gonna use the default values of the pipeline then -# give a bucket name which is in us-east-1 region -################################################################### - - -# Load the dataset -s3 = boto3.client("s3") -s3.download_file( - "sagemaker-sample-files", "datasets/image/MNIST/mnist.pkl.gz", "mnist.pkl.gz") - -with gzip.open("mnist.pkl.gz", "rb") as f: - train_set, valid_set, test_set = pickle.load(f, encoding="latin1") - - - -# Upload dataset to S3 -from sagemaker.amazon.common import write_numpy_to_dense_tensor -import io -import boto3 - -train_data_key = 'mnist_kmeans_example/train_data' -test_data_key = 'mnist_kmeans_example/test_data' -train_data_location = 's3://{}/{}'.format(bucket, train_data_key) -test_data_location = 's3://{}/{}'.format(bucket, test_data_key) -print('training data will be uploaded to: {}'.format(train_data_location)) -print('training data will be uploaded to: {}'.format(test_data_location)) - -# Convert the training data into the format required by the SageMaker KMeans algorithm -buf = io.BytesIO() -write_numpy_to_dense_tensor(buf, train_set[0], train_set[1]) -buf.seek(0) - -boto3.resource('s3').Bucket(bucket).Object(train_data_key).upload_fileobj(buf) - -# Convert the test data into the format required by the SageMaker KMeans algorithm -write_numpy_to_dense_tensor(buf, test_set[0], test_set[1]) -buf.seek(0) - -boto3.resource('s3').Bucket(bucket).Object(test_data_key).upload_fileobj(buf) - -# Convert the valid data into the format required by the SageMaker KMeans algorithm -numpy.savetxt('valid-data.csv', valid_set[0], delimiter=',', fmt='%g') -s3_client = boto3.client('s3') -input_key = "{}/valid_data.csv".format("mnist_kmeans_example/input") -s3_client.upload_file('valid-data.csv', bucket, input_key) -``` -Run this file with the follow command: `python3 s3_sample_data_creator.py` - - -## Steps -1. Compile the pipeline: - `dsl-compile --py training-pipeline.py --output training-pipeline.tar.gz` -2. In the Kubeflow UI, upload this compiled pipeline specification (the .tar.gz file) and click on create run. -3. Once the pipeline completes, you can see the outputs under 'Output parameters' in the Training component's Input/Output section. - -Example inputs to this pipeline : -```buildoutcfg -region : us-east-1 -endpoint_url : -image : 382416733822.dkr.ecr.us-east-1.amazonaws.com/kmeans:1 -training_input_mode : File -hyperparameters : {"k": "10", "feature_dim": "784"} -channels : In this JSON, along with other parameters you need to pass the S3 Uri where you have data - - [ - { - "ChannelName": "train", - "DataSource": { - "S3DataSource": { - "S3Uri": "s3:///mnist_kmeans_example/train_data", - "S3DataType": "S3Prefix", - "S3DataDistributionType": "FullyReplicated" - } - }, - "ContentType": "", - "CompressionType": "None", - "RecordWrapperType": "None", - "InputMode": "File" - } - ] - -instance_type : ml.m5.2xlarge -instance_count : 1 -volume_size : 50 -max_run_time : 3600 -model_artifact_path : This is where the output model will be stored - s3:///mnist_kmeans_example/output -output_encryption_key : -network_isolation : True -traffic_encryption : False -spot_instance : False -max_wait_time : 3600 -checkpoint_config : {} -role : Paste the role ARN that you noted down - (The IAM role with Full SageMaker permissions and S3 access) - Example role input-> arn:aws:iam::999999999999:role/SageMakerExecutorKFP -``` - - -# Resources -* [Using Amazon built-in algorithms](https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-algo-docker-registry-paths.html) diff --git a/samples/contrib/aws-samples/simple_train_pipeline/training-pipeline.py b/samples/contrib/aws-samples/simple_train_pipeline/training-pipeline.py deleted file mode 100644 index 241c5e4f94a..00000000000 --- a/samples/contrib/aws-samples/simple_train_pipeline/training-pipeline.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python3 - -# Uncomment the apply(use_aws_secret()) below if you are not using OIDC -# more info : https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/README.md - -import kfp -import json -import os -import copy -from kfp import components -from kfp import dsl -from kfp.aws import use_aws_secret - - -cur_file_dir = os.path.dirname(__file__) -components_dir = os.path.join(cur_file_dir, "../../../../components/aws/sagemaker/") - -sagemaker_train_op = components.load_component_from_file( - components_dir + "/train/component.yaml" -) - -channelObjList = [] - -channelObj = { - "ChannelName": "", - "DataSource": { - "S3DataSource": { - "S3Uri": "", - "S3DataType": "S3Prefix", - "S3DataDistributionType": "FullyReplicated", - } - }, - "CompressionType": "None", - "RecordWrapperType": "None", - "InputMode": "File", -} - -channelObj["ChannelName"] = "train" -channelObj["DataSource"]["S3DataSource"][ - "S3Uri" -] = "s3://kubeflow-pipeline-data/mnist_kmeans_example/train_data" -channelObjList.append(copy.deepcopy(channelObj)) - - -@dsl.pipeline(name="Training pipeline", description="SageMaker training job test") -def training( - region="us-east-1", - endpoint_url="", - image="382416733822.dkr.ecr.us-east-1.amazonaws.com/kmeans:1", - training_input_mode="File", - hyperparameters={"k": "10", "feature_dim": "784"}, - channels=channelObjList, - instance_type="ml.m5.2xlarge", - instance_count=1, - volume_size=50, - max_run_time=3600, - model_artifact_path="s3://kubeflow-pipeline-data/mnist_kmeans_example/output", - output_encryption_key="", - network_isolation=True, - traffic_encryption=False, - spot_instance=False, - max_wait_time=3600, - checkpoint_config={}, - role="", -): - training = sagemaker_train_op( - region=region, - endpoint_url=endpoint_url, - image=image, - training_input_mode=training_input_mode, - hyperparameters=hyperparameters, - channels=channels, - instance_type=instance_type, - instance_count=instance_count, - volume_size=volume_size, - max_run_time=max_run_time, - model_artifact_path=model_artifact_path, - output_encryption_key=output_encryption_key, - network_isolation=network_isolation, - traffic_encryption=traffic_encryption, - spot_instance=spot_instance, - max_wait_time=max_wait_time, - checkpoint_config=checkpoint_config, - role=role, - ) # .apply(use_aws_secret('aws-secret', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile(training, __file__ + ".zip") diff --git a/samples/contrib/aws-samples/titanic-survival-prediction/README.md b/samples/contrib/aws-samples/titanic-survival-prediction/README.md deleted file mode 100644 index 1599cc8fed7..00000000000 --- a/samples/contrib/aws-samples/titanic-survival-prediction/README.md +++ /dev/null @@ -1,73 +0,0 @@ -The `titanic-survival-prediction.py` sample runs a Spark ML pipeline to train a classfication model using random forest on AWS Elastic Map Reduce(EMR). - -## The dataset - -Check Kaggle [Titanic: Machine Learning from Disaster](https://www.kaggle.com/c/titanic) for more details about this problem. 70% training dataset is used to train model and rest 30% for validation. - -Please upload training dataset [train.csv](https://www.kaggle.com/c/titanic/data) to your s3 bucket. - -## Spark ML Job - -Please check [aws-emr-titanic-ml-example](https://github.com/Jeffwan/aws-emr-titanic-ml-example) for example spark project. - -To get jar file, you can clone that project and run - -``` -sbt clean package - -# copy this jar to your s3 bucket. main class is `com.amazonaws.emr.titanic.Titanic` -ls target/scala-2.11/titanic-survivors-prediction_2.11-1.0.jar -``` - -## EMR permission - -This pipeline use aws-secret to get access to EMR services, please make sure you have a `aws-secret` in the kubeflow namespace and attach `AmazonElasticMapReduceFullAccess` policy. - -```yaml -apiVersion: v1 -kind: Secret -metadata: - name: aws-secret - namespace: kubeflow -type: Opaque -data: - AWS_ACCESS_KEY_ID: YOUR_BASE64_ACCESS_KEY - AWS_SECRET_ACCESS_KEY: YOUR_BASE64_SECRET_ACCESS -``` - -> Note: To get base64 string, try `echo -n $AWS_ACCESS_KEY_ID | base64` - - -## Compiling the pipeline template - -Follow the guide to [building a pipeline](https://www.kubeflow.org/docs/guides/pipelines/build-pipeline/) to install the Kubeflow Pipelines SDK, then run the following command to compile the sample Python into a workflow specification. The specification takes the form of a YAML file compressed into a `.tar.gz` file. - -```bash -dsl-compile --py titanic-survival-prediction.py --output titanic-survival-prediction.tar.gz -``` - -## Deploying the pipeline - -Open the Kubeflow pipelines UI. Create a new pipeline, and then upload the compiled specification (`.tar.gz` file) as a new pipeline template. - -Once the pipeline done, you can go to the S3 path specified in `output` to check your prediction results. There're three columes, `PassengerId`, `prediction`, `Survived` (Ground True value) - -``` -... -4,1,1 -5,0,0 -6,0,0 -7,0,0 -... -``` - -## Components source - -Create Cluster: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/emr/create_cluster/src) - -Submit Spark Job: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/emr/submit_spark_job/src) - -Delete Cluster: - [source code](https://github.com/kubeflow/pipelines/tree/master/components/aws/emr/delete_cluster/src) diff --git a/samples/contrib/aws-samples/titanic-survival-prediction/titanic-survival-prediction.py b/samples/contrib/aws-samples/titanic-survival-prediction/titanic-survival-prediction.py deleted file mode 100644 index cca0fbf59da..00000000000 --- a/samples/contrib/aws-samples/titanic-survival-prediction/titanic-survival-prediction.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python3 - -import kfp -from kfp import components -from kfp import dsl -from kfp import gcp -from kfp.aws import use_aws_secret - -emr_create_cluster_op = components.load_component_from_file( - "../../../../components/aws/emr/create_cluster/component.yaml" -) -emr_submit_spark_job_op = components.load_component_from_file( - "../../../../components/aws/emr/submit_spark_job/component.yaml" -) -emr_delete_cluster_op = components.load_component_from_file( - "../../../../components/aws/emr/delete_cluster/component.yaml" -) - - -@dsl.pipeline( - name="Titanic Suvival Prediction Pipeline", - description="Predict survival on the Titanic", -) -def titanic_suvival_prediction( - region="us-west-2", - log_s3_uri="s3://kubeflow-pipeline-data/emr/titanic/logs", - cluster_name="emr-cluster", - job_name="spark-ml-trainner", - input="s3://kubeflow-pipeline-data/emr/titanic/train.csv", - output="s3://kubeflow-pipeline-data/emr/titanic/output", - jar_path="s3://kubeflow-pipeline-data/emr/titanic/titanic-survivors-prediction_2.11-1.0.jar", - main_class="com.amazonaws.emr.titanic.Titanic", - instance_type="m4.xlarge", - instance_count="3", -): - - create_cluster = emr_create_cluster_op( - region=region, - name=cluster_name, - instance_type=instance_type, - instance_count=instance_count, - log_s3_uri=log_s3_uri, - ).apply(use_aws_secret("aws-secret", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")) - - training_and_prediction = emr_submit_spark_job_op( - region=region, - jobflow_id=create_cluster.output, - job_name=job_name, - jar_path=jar_path, - main_class=main_class, - input=input, - output=output, - ).apply(use_aws_secret("aws-secret", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")) - - delete_cluster = emr_delete_cluster_op( - region=region, - jobflow_id=create_cluster.output, - dependent=training_and_prediction.outputs["job_id"], - ).apply(use_aws_secret("aws-secret", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile(titanic_suvival_prediction, __file__ + ".zip") diff --git a/samples/contrib/azure-samples/databricks-pipelines/.gitignore b/samples/contrib/azure-samples/databricks-pipelines/.gitignore deleted file mode 100644 index d9fc0b95f21..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/.gitignore +++ /dev/null @@ -1,130 +0,0 @@ -#### joe made this: http://goel.io/joe - -#### python #### -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -pip-wheel-metadata/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -.hypothesis/ -.pytest_cache/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -.python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -*.tar.gz - diff --git a/samples/contrib/azure-samples/databricks-pipelines/README.md b/samples/contrib/azure-samples/databricks-pipelines/README.md deleted file mode 100644 index 40e9770b8e6..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/README.md +++ /dev/null @@ -1,55 +0,0 @@ -# Introduction to Azure Databricks pipeline samples - -This folder contains several [Kubeflow Pipeline](https://www.kubeflow.org/docs/pipelines/) samples -which show how to manipulate [Databricks](https://azure.microsoft.com/services/databricks/) -resources using the [Azure Databricks for Kubeflow Pipelines]( -../kfp-azure-databricks/) package. - -## Setup - -1) [Create an Azure Databricks workspace]( - https://docs.microsoft.com/en-us/azure/databricks/getting-started/try-databricks?toc=https%3A%2F%2Fdocs.microsoft.com%2Fen-us%2Fazure%2Fazure-databricks%2FTOC.json&bc=https%3A%2F%2Fdocs.microsoft.com%2Fen-us%2Fazure%2Fbread%2Ftoc.json#--step-2-create-an-azure-databricks-workspace) -2) [Deploy the Azure Databricks Operator for Kubernetes]( - https://github.com/microsoft/azure-databricks-operator/blob/master/docs/deploy.md) -3) Some samples reference 'sparkpi.jar' library. This library can be found here: [Create and run a -jar job](https://docs.databricks.com/dev-tools/api/latest/examples.html#create-and-run-a-jar-job). -Upload it to [Databricks File System]( -https://docs.microsoft.com/en-us/azure/databricks/data/databricks-file-system) using e.g. [DBFS -CLI](https://docs.microsoft.com/en-us/azure/databricks/dev-tools/databricks-cli#dbfs-cli). -4) Some samples that use CreateSecretScopeOp reference a secret in Kubernetes. This secret must be -created before running these pipelines. For example: -```bash -kubectl create secret generic -n kubeflow mysecret --from-literal=username=alex -``` -5) [Install the Kubeflow Pipelines SDK](https://www.kubeflow.org/docs/pipelines/sdk/install-sdk/) -6) Install Azure Databricks for Kubeflow Pipelines package: -``` -pip install -e "git+https://github.com/kubeflow/pipelines#egg=kfp-azure-databricks&subdirectory=samples/contrib/azure-samples/kfp-azure-databricks" --upgrade -``` -To uninstall Azure Databricks for Kubeflow Pipelines package use: -``` -pip uninstall kfp-azure-databricks -``` - -## Testing the pipelines - -Install the requirements: -```bash -pip install --upgrade -r requirements.txt -``` -Compile a pipeline with any of the following commands: -```bash -dsl-compile --py databricks_run_pipeline.py --output databricks_run_pipeline.py.tar.gz -# Or -python3 databricks_run_pipeline.py -# Or -python3 pipeline_cli.py compile databricks_run_pipeline.py -``` -Then run the compiled pipeline in Kubeflow: -```bash -python3 pipeline_cli.py run databricks_run_pipeline.py.tar.gz http://localhost:8080/pipeline '{"run_name":"test-run","parameter":"10"}' -``` -Or compile and run a pipeline in Kubeflow with a single command: -```bash -python3 pipeline_cli.py compile_run databricks_run_pipeline.py http://localhost:8080/pipeline '{"run_name":"test-run","parameter":"10"}' -``` \ No newline at end of file diff --git a/samples/contrib/azure-samples/databricks-pipelines/databricks_cluster_pipeline.py b/samples/contrib/azure-samples/databricks-pipelines/databricks_cluster_pipeline.py deleted file mode 100644 index 1edda0b497b..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/databricks_cluster_pipeline.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Create a cluster in Databricks. Then submit a one-time Run to that cluster.""" -import kfp.dsl as dsl -import kfp.compiler as compiler -import databricks - -def create_cluster(cluster_name): - return databricks.CreateClusterOp( - name="createcluster", - cluster_name=cluster_name, - spark_version="5.3.x-scala2.11", - node_type_id="Standard_D3_v2", - spark_conf={ - "spark.speculation": "true" - }, - num_workers=2 - ) - -def submit_run(run_name, cluster_id, parameter): - return databricks.SubmitRunOp( - name="submitrun", - run_name=run_name, - existing_cluster_id=cluster_id, - libraries=[{"jar": "dbfs:/docs/sparkpi.jar"}], - spark_jar_task={ - "main_class_name": "org.apache.spark.examples.SparkPi", - "parameters": [parameter] - } - ) - -def delete_run(run_name): - return databricks.DeleteRunOp( - name="deleterun", - run_name=run_name - ) - -def delete_cluster(cluster_name): - return databricks.DeleteClusterOp( - name="deletecluster", - cluster_name=cluster_name - ) - -@dsl.pipeline( - name="DatabricksCluster", - description="A toy pipeline that computes an approximation to pi with Azure Databricks." -) -def calc_pipeline(cluster_name="test-cluster", run_name="test-run", parameter="10"): - create_cluster_task = create_cluster(cluster_name) - submit_run_task = submit_run(run_name, create_cluster_task.outputs["cluster_id"], parameter) - delete_run_task = delete_run(run_name) - delete_run_task.after(submit_run_task) - delete_cluster_task = delete_cluster(cluster_name) - delete_cluster_task.after(delete_run_task) - -if __name__ == "__main__": - compiler.Compiler()._create_and_write_workflow( - pipeline_func=calc_pipeline, - package_path=__file__ + ".tar.gz") diff --git a/samples/contrib/azure-samples/databricks-pipelines/databricks_job_pipeline.py b/samples/contrib/azure-samples/databricks-pipelines/databricks_job_pipeline.py deleted file mode 100644 index 404bacd8805..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/databricks_job_pipeline.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Submit a Job with implicit cluster creation to Databricks. Then submit a Run for that Job.""" -import kfp.dsl as dsl -import kfp.compiler as compiler -import databricks - -def create_job(job_name): - return databricks.CreateJobOp( - name="createjob", - job_name=job_name, - new_cluster={ - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - libraries=[{"jar": "dbfs:/docs/sparkpi.jar"}], - spark_jar_task={ - "main_class_name": "org.apache.spark.examples.SparkPi" - } - ) - -def submit_run(run_name, job_name, parameter): - return databricks.SubmitRunOp( - name="submitrun", - run_name=run_name, - job_name=job_name, - jar_params=[parameter] - ) - -def delete_run(run_name): - return databricks.DeleteRunOp( - name="deleterun", - run_name=run_name - ) - -def delete_job(job_name): - return databricks.DeleteJobOp( - name="deletejob", - job_name=job_name - ) - -@dsl.pipeline( - name="DatabricksJob", - description="A toy pipeline that computes an approximation to pi with Azure Databricks." -) -def calc_pipeline(job_name="test-job", run_name="test-job-run", parameter="10"): - create_job_task = create_job(job_name) - submit_run_task = submit_run(run_name, job_name, parameter) - submit_run_task.after(create_job_task) - delete_run_task = delete_run(run_name) - delete_run_task.after(submit_run_task) - delete_job_task = delete_job(job_name) - delete_job_task.after(delete_run_task) - -if __name__ == "__main__": - compiler.Compiler()._create_and_write_workflow( - pipeline_func=calc_pipeline, - package_path=__file__ + ".tar.gz") diff --git a/samples/contrib/azure-samples/databricks-pipelines/databricks_notebook_pipeline.py b/samples/contrib/azure-samples/databricks-pipelines/databricks_notebook_pipeline.py deleted file mode 100644 index 3de44eb7dbf..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/databricks_notebook_pipeline.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Import a notebook into a Databricks workspace and submit a job run to execute it in a cluster. -Notebook will accept some parameters and access a file in DBFS and some secrets in a secret scope. -""" -from pathlib import Path -import base64 -import kfp.dsl as dsl -import kfp.compiler as compiler -import databricks - -def create_dbfsblock(block_name): - return databricks.CreateDbfsBlockOp( - name="createdbfsblock", - block_name=block_name, - data="QWxlamFuZHJvIENhbXBvcyBNYWdlbmNpbw==", - path="/data/foo.txt" - ) - -def create_secretscope(scope_name): - return databricks.CreateSecretScopeOp( - name="createsecretscope", - scope_name=scope_name, - initial_manage_principal="users", - secrets=[ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ] - ) - -def import_workspace_item(item_name, user): - current_path = Path(__file__).parent - notebook_file_name = current_path.joinpath("notebooks", "ScalaExampleNotebook") - notebook = open(notebook_file_name).read().encode("utf-8") - notebook_base64 = base64.b64encode(notebook) - return databricks.ImportWorkspaceItemOp( - name="importworkspaceitem", - item_name=item_name, - content=notebook_base64, - path=f"/Users/{user}/ScalaExampleNotebook", - language="SCALA", - file_format="SOURCE" - ) - -def create_cluster(cluster_name): - return databricks.CreateClusterOp( - name="createcluster", - cluster_name=cluster_name, - spark_version="5.3.x-scala2.11", - node_type_id="Standard_D3_v2", - spark_conf={ - "spark.speculation": "true" - }, - num_workers=2 - ) - -def create_job(job_name, cluster_id, user): - return databricks.CreateJobOp( - name="createjob", - job_name=job_name, - existing_cluster_id=cluster_id, - notebook_task={ - "notebook_path": f"/Users/{user}/ScalaExampleNotebook" - } - ) - -def submit_run(run_name, job_name, parameter1, parameter2): - return databricks.SubmitRunOp( - name="submitrun", - run_name=run_name, - job_name=job_name, - notebook_params={ - "param1": parameter1, - "param2": parameter2 - } - ) - -def delete_run(run_name): - return databricks.DeleteRunOp( - name="deleterun", - run_name=run_name - ) - -def delete_job(job_name): - return databricks.DeleteJobOp( - name="deletejob", - job_name=job_name - ) - -def delete_cluster(cluster_name): - return databricks.DeleteClusterOp( - name="deletecluster", - cluster_name=cluster_name - ) - -def delete_workspace_item(item_name): - return databricks.DeleteWorkspaceItemOp( - name="deleteworkspaceitem", - item_name=item_name - ) - -def delete_secretscope(scope_name): - return databricks.DeleteSecretScopeOp( - name="deletesecretscope", - scope_name=scope_name - ) - -def delete_dbfsblock(block_name): - return databricks.DeleteDbfsBlockOp( - name="deletedbfsblock", - block_name=block_name - ) - -@dsl.pipeline( - name="Databrick", - description="A toy pipeline that runs a sample notebook in a Databricks cluster." -) -def calc_pipeline( - dbfsblock_name="test-block", - secretescope_name="test-scope", - workspaceitem_name="test-item", - cluster_name="test-cluster", - job_name="test-job", - run_name="test-run", - user="user@foo.com", - parameter1="38", - parameter2="43"): - create_dbfsblock_task = create_dbfsblock(dbfsblock_name) - create_secretscope_task = create_secretscope(secretescope_name) - import_workspace_item_task = import_workspace_item(workspaceitem_name, user) - create_cluster_task = create_cluster(cluster_name) - create_job_task = create_job(job_name, create_cluster_task.outputs["cluster_id"], user) - submit_run_task = submit_run(run_name, job_name, parameter1, parameter2) - submit_run_task.after(create_dbfsblock_task) - submit_run_task.after(create_secretscope_task) - submit_run_task.after(import_workspace_item_task) - submit_run_task.after(create_job_task) - delete_run_task = delete_run(run_name) - delete_run_task.after(submit_run_task) - delete_job_task = delete_job(job_name) - delete_job_task.after(delete_run_task) - delete_cluster_task = delete_cluster(cluster_name) - delete_cluster_task.after(delete_job_task) - delete_workspace_item_task = delete_workspace_item(workspaceitem_name) - delete_workspace_item_task.after(submit_run_task) - delete_secretscope_task = delete_secretscope(secretescope_name) - delete_secretscope_task.after(submit_run_task) - delete_dbfsblock_task = delete_dbfsblock(dbfsblock_name) - delete_dbfsblock_task.after(submit_run_task) - -if __name__ == "__main__": - compiler.Compiler()._create_and_write_workflow( - pipeline_func=calc_pipeline, - package_path=__file__ + ".tar.gz") diff --git a/samples/contrib/azure-samples/databricks-pipelines/databricks_run_pipeline.py b/samples/contrib/azure-samples/databricks-pipelines/databricks_run_pipeline.py deleted file mode 100644 index 70a478e0d36..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/databricks_run_pipeline.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Submit a one-time Run with implicit cluster creation to Databricks.""" -import kfp.dsl as dsl -import kfp.compiler as compiler -import databricks - -def submit_run(run_name, parameter): - return databricks.SubmitRunOp( - name="submitrun", - run_name=run_name, - new_cluster={ - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - libraries=[{"jar": "dbfs:/docs/sparkpi.jar"}], - spark_jar_task={ - "main_class_name": "org.apache.spark.examples.SparkPi", - "parameters": [parameter] - } - ) - -def delete_run(run_name): - return databricks.DeleteRunOp( - name="deleterun", - run_name=run_name - ) - -@dsl.pipeline( - name="DatabricksRun", - description="A toy pipeline that computes an approximation to pi with Azure Databricks." -) -def calc_pipeline(run_name="test-run", parameter="10"): - submit_run_task = submit_run(run_name, parameter) - delete_run_task = delete_run(run_name) - delete_run_task.after(submit_run_task) - -if __name__ == "__main__": - compiler.Compiler()._create_and_write_workflow( - pipeline_func=calc_pipeline, - package_path=__file__ + ".tar.gz") diff --git a/samples/contrib/azure-samples/databricks-pipelines/databricks_secretscope_pipeline.py b/samples/contrib/azure-samples/databricks-pipelines/databricks_secretscope_pipeline.py deleted file mode 100644 index b38c2b40af2..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/databricks_secretscope_pipeline.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Create a new secret scope in Databricks.""" -import kfp.dsl as dsl -import kfp.compiler as compiler -import databricks - -def create_secretscope( - scope_name, - string_secret, - byte_secret, - ref_secret_name, - ref_secret_key, - principal_name): - return databricks.CreateSecretScopeOp( - name="createsecretscope", - scope_name=scope_name, - initial_manage_principal="users", - secrets=[ - { - "key": "string-secret", - "string_value": string_secret - }, - { - "key": "byte-secret", - "byte_value": byte_secret - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": ref_secret_name, - "key": ref_secret_key - } - } - } - ], - acls=[ - { - "principal": principal_name, - "permission": "READ" - } - ] - ) - -def delete_secretscope(scope_name): - return databricks.DeleteSecretScopeOp( - name="deletesecretscope", - scope_name=scope_name - ) - -@dsl.pipeline( - name="DatabricksSecretScope", - description="A toy pipeline that sets some secrets and acls in an Azure Databricks Secret Scope." -) -def calc_pipeline( - scope_name="test-secretscope", - string_secret="helloworld", - byte_secret="aGVsbG93b3JsZA==", - ref_secret_name="mysecret", - ref_secret_key="username", - principal_name="user@foo.com" - ): - create_secretscope_task = create_secretscope( - scope_name, - string_secret, - byte_secret, - ref_secret_name, - ref_secret_key, - principal_name) - delete_secretscope_task = delete_secretscope(scope_name) - delete_secretscope_task.after(create_secretscope_task) - -if __name__ == "__main__": - compiler.Compiler()._create_and_write_workflow( - pipeline_func=calc_pipeline, - package_path=__file__ + ".tar.gz") diff --git a/samples/contrib/azure-samples/databricks-pipelines/databricks_workspaceitem_pipeline.py b/samples/contrib/azure-samples/databricks-pipelines/databricks_workspaceitem_pipeline.py deleted file mode 100644 index 7972dca5710..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/databricks_workspaceitem_pipeline.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Import an item into a Databricks Workspace.""" -import kfp.dsl as dsl -import kfp.compiler as compiler -import databricks - -def import_workspace_item(item_name, user): - return databricks.ImportWorkspaceItemOp( - name="importworkspaceitem", - item_name=item_name, - content="cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - path=f"/Users/{user}/ScalaExampleNotebook", - language="SCALA", - file_format="SOURCE" - ) - -def delete_workspace_item(item_name): - return databricks.DeleteWorkspaceItemOp( - name="deleteworkspaceitem", - item_name=item_name - ) - -@dsl.pipeline( - name="DatabricksWorkspaceItem", - description="A toy pipeline that imports some source code into a Databricks Workspace." -) -def calc_pipeline(item_name="test-item", user="user@foo.com"): - import_workspace_item_task = import_workspace_item(item_name, user) - delete_workspace_item_task = delete_workspace_item(item_name) - delete_workspace_item_task.after(import_workspace_item_task) - -if __name__ == "__main__": - compiler.Compiler()._create_and_write_workflow( - pipeline_func=calc_pipeline, - package_path=__file__ + ".tar.gz") diff --git a/samples/contrib/azure-samples/databricks-pipelines/notebooks/ScalaExampleNotebook b/samples/contrib/azure-samples/databricks-pipelines/notebooks/ScalaExampleNotebook deleted file mode 100644 index 2d852eccff9..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/notebooks/ScalaExampleNotebook +++ /dev/null @@ -1,23 +0,0 @@ -val param1 = dbutils.widgets.get("param1") -val param2 = dbutils.widgets.get("param2") -var output = s"param1 = $param1, param2 = $param2" - -val txt = dbutils.fs.head("/data/foo.txt") -output = s"$output, foo.txt exists and contains '$txt'" - -val string_secret = dbutils.secrets.get(scope = "test-scope", key = "string-secret") -if (string_secret == "helloworld") { - output = s"$output, string-secret is correct" -} - -val byte_secret = dbutils.secrets.get(scope = "test-scope", key = "byte-secret") -if (byte_secret == "helloworld") { - output = s"$output, byte-secret is correct" -} - -val ref_secret = dbutils.secrets.get(scope = "test-scope", key = "ref-secret") -if (ref_secret == "alex") { - output = s"$output, ref-secret is correct" -} - -dbutils.notebook.exit(output) \ No newline at end of file diff --git a/samples/contrib/azure-samples/databricks-pipelines/pipeline_cli.py b/samples/contrib/azure-samples/databricks-pipelines/pipeline_cli.py deleted file mode 100644 index 2b3abca9712..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/pipeline_cli.py +++ /dev/null @@ -1,85 +0,0 @@ -import os -import sys -import logging -import kfp -import fire - -logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) - -class MyCLI: - """ - CLI for Kubeflow Pipelines. - - This CLI allows us to compile and run our pipelines in Kubeflow Pipelines without accessing - Kubeflow portal. - """ - - @staticmethod - def compile( - path - ): - """ - Compile a Kubeflow pipeline. - - Args: - path: Path to the pipeline (e.g. databricks_run_pipeline.py) - """ - - logging.info("Compiling '%s'...", path) - compiled_path = f"{path}.tar.gz" - result = os.system(f"dsl-compile --py {path} --output {compiled_path}") - if result != 0: - logging.error("Failed to compile '%s' with error code %i.", path, result) - sys.exit(result) - - return compiled_path - - @staticmethod - def run( - path, - host, - params={} - ): - """ - Run a compiled pipeline in Kubeflow. - - Args: - path: Path to the compiled pipeline (e.g. databricks_run_pipeline.py.tar.gz) - host: Host name to use to talk to Kubeflow Pipelines (e.g. http://localhost:8080/pipeline) - params: Pipeline parameters (e.g. '{\"run_name\":\"test-run\",\"parameter\":\"10\"}') - """ - - logging.info("Running '%s' in '%s'...", path, host) - client = kfp.Client(f"{host}") - try: - result = client.create_run_from_pipeline_package( - pipeline_file=path, - arguments=params - ) - logging.info("View run: %s/#/runs/details/%s", - host, - result.run_id) - except Exception as ex: - logging.error("Failed to run '{%s}' with error:\n{%s}", path, ex) - sys.exit(1) - - @staticmethod - def compile_run( - path, - host, - params={} - ): - """ - Compile and run a Kubeflow pipeline. - - Args: - path: Path to the pipeline (e.g. databricks_run_pipeline.py) - host: Host name to use to talk to Kubeflow Pipelines (e.g. http://localhost:8080/pipeline) - params: Pipeline parameters (e.g. '{\"run_name\":\"test-run\",\"parameter\":\"10\"}') - """ - - compiled_path = MyCLI.compile(path) - MyCLI.run(compiled_path, host, params) - -if __name__ == '__main__': - fire.Fire(MyCLI()) diff --git a/samples/contrib/azure-samples/databricks-pipelines/requirements.txt b/samples/contrib/azure-samples/databricks-pipelines/requirements.txt deleted file mode 100644 index 96aedcf195a..00000000000 --- a/samples/contrib/azure-samples/databricks-pipelines/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -https://storage.googleapis.com/ml-pipeline/release/latest/kfp.tar.gz --e "git+https://github.com/kubeflow/pipelines#egg=kfp-azure-databricks&subdirectory=samples/contrib/azure-samples/kfp-azure-databricks" -fire>=0.2.1 diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/README.md b/samples/contrib/azure-samples/kfp-azure-databricks/README.md deleted file mode 100644 index adddec2dbb3..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/README.md +++ /dev/null @@ -1,184 +0,0 @@ -# Introduction to Azure Databricks for Kubeflow Pipelines - -Azure Databricks Package provides a set of [Kubeflow Pipeline](https://www.kubeflow.org/docs/pipelines/) -Tasks (Ops) which let us manipulate [Databricks](https://azure.microsoft.com/services/databricks/) -resources using the [Azure Databricks Operator for Kubernetes]( -https://github.com/microsoft/azure-databricks-operator). This makes the user experience much nicer, -and less error prone, than using the [ResourceOp]( -https://www.kubeflow.org/docs/pipelines/sdk/manipulate-resources/#resourceop) to manipulate -these Databricks resources. - -## Supported Ops - -- CreateClusterOp, to create a cluster in Databricks. -- DeleteClusterOp, to delete a cluster created with CreateClusterOp. -- CreateJobOp, to create a Spark job in Databricks. -- DeleteJobOp, to delete a job created with CreateJobOp. -- SubmitRunOp, to submit a job run in Databricks. -- DeleteRunOp, to delete a run submitted with SubmitRunOp. -- CreateSecretScopeOp, to create a secret scope in Databricks. -- DeleteSecretScopeOp, to delete a secret scope created with CreateSecretScopeOp. -- ImportWorkspaceItemOp, to import an item into a Databricks Workspace. -- DeleteWorkspaceItemOp, to delete an item imported with ImportWorkspaceItemOp. -- CreateDbfsBlockOp, to create Dbfs Block in Databricks. -- DeleteDbfsBlockOp, to delete Dbfs Block created with CreateDbfsBlockOp. - -For each of these there are two ways a Kubeflow user can create the Ops: -1) By passing the complete Databricks spec for the Op within a Python Dictionary. -2) By using named parameters. - -## Setup - -1) [Create an Azure Databricks workspace]( - https://docs.microsoft.com/en-us/azure/databricks/getting-started/try-databricks?toc=https%3A%2F%2Fdocs.microsoft.com%2Fen-us%2Fazure%2Fazure-databricks%2FTOC.json&bc=https%3A%2F%2Fdocs.microsoft.com%2Fen-us%2Fazure%2Fbread%2Ftoc.json#--step-2-create-an-azure-databricks-workspace) -2) [Deploy the Azure Databricks Operator for Kubernetes]( - https://github.com/microsoft/azure-databricks-operator/blob/master/docs/deploy.md) -3) [Install the Kubeflow Pipelines SDK](https://www.kubeflow.org/docs/pipelines/sdk/install-sdk/) -4) Install Databricks Package: -``` -pip install -e "git+https://github.com/kubeflow/pipelines#egg=kfp-azure-databricks&subdirectory=samples/contrib/azure-samples/kfp-azure-databricks" --upgrade -``` -To uninstall Databricks Package use: -``` -pip uninstall kfp-azure-databricks -``` - -## Example - -The following sample pipeline will submit a one-time job run with implicit cluster creation to Azure -Databricks: - -```python -import kfp.dsl as dsl -import databricks - -@dsl.pipeline( - name="DatabricksRun", - description="A toy pipeline that computes an approximation to pi with Databricks." -) -def calc_pipeline(run_name="test-run", parameter="10"): - submit_run_task = databricks.SubmitRunOp( - name="submitrun", - run_name=run_name, - new_cluster={ - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - libraries=[{"jar": "dbfs:/docs/sparkpi.jar"}], - spark_jar_task={ - "main_class_name": "org.apache.spark.examples.SparkPi", - "parameters": [parameter] - } - ) - - delete_run_task = databricks.DeleteRunOp( - name="deleterun", - run_name=run_name - ) - delete_run_task.after(submit_run_task) -``` - -This sample is based on the following article: [Create a spark-submit job]( -https://docs.databricks.com/dev-tools/api/latest/examples.html#create-and-run-a-jar-job), which -points to the library *sparkpi.jar*. You may upload the library to [Databricks -File System](https://docs.microsoft.com/en-us/azure/databricks/data/databricks-file-system) using -[DBFS CLI](https://docs.microsoft.com/en-us/azure/databricks/dev-tools/databricks-cli#dbfs-cli). - -## Example using ResourceOp - -This sample pipeline shows the code that would be required to submit a one-time job run with -implicit cluster creation to Azure Databricks, but using ResourceOp instead of this package: - -```python -import kfp.dsl as dsl -import kfp.compiler as compiler - -@dsl.pipeline( - name="DatabricksRun", - description="A toy pipeline that computes an approximation to pi with Databricks." -) -def calc_pipeline(run_name="test-run", parameter="10"): - submit_run_task = dsl.ResourceOp( - name="submitrun", - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "Run", - "metadata": { - "name":run_name, - }, - "spec":{ - "run_name": run_name, - "new_cluster": { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "libraries": [{"jar": "dbfs:/docs/sparkpi.jar"}], - "spark_jar_task": { - "main_class_name": "com.databricks.ComputeModels", - "parameters": [parameter] - } - }, - }, - action="create", - success_condition="status.metadata.state.life_cycle_state in (TERMINATED, SKIPPED, INTERNAL_ERROR)", - attribute_outputs={ - "name": "{.metadata.name}", - "job_id": "{.status.metadata.job_id}", - "number_in_job": "{.status.metadata.number_in_job}", - "run_id": "{.status.metadata.run_id}", - "run_name": "{.status.metadata.run_name}", - "life_cycle_state": "{.status.metadata.state.life_cycle_state}", - "result_state": "{.status.metadata.state.result_state}", - "notebook_output_result": "{.status.notebook_output.result}", - "notebook_output_truncated": "{.status.notebook_output.truncated}", - "error": "{.status.error}" - } - ) - - delete_run_task = dsl.ResourceOp( - name="deleterun", - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "Run", - "metadata": { - "name": run_name - } - }, - action="delete" - ) - delete_run_task.after(submit_run_task) -``` - -## Additional examples - -More sample pipelines can be found in folder -[samples/contrib/azure-samples/databricks-pipelines](../databricks-pipelines/) and in the tests of -this package: [samples/contrib/azure-samples/kfp-azure-databricks/tests](./tests/). - -## Additional information -- [Kubeflow Pipelines](https://www.kubeflow.org/docs/pipelines/) -- [Azure Databricks documentation](https://docs.microsoft.com/azure/azure-databricks/) -- [Azure Databricks Operator for Kubernetes](https://github.com/microsoft/azure-databricks-operator) -- [Golang SDK for DataBricks REST API 2.0 and Azure DataBricks REST API 2.0]( - https://github.com/xinsnake/databricks-sdk-golang), used by Azure Databricks Operator. -- [Databricks REST API 2.0](https://docs.databricks.com/dev-tools/api/latest/index.html) -- [Azure Databricks REST API 2.0]( - https://docs.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/) - -The following articles provide information on the supported spec fields for the supported Databricks -Ops: -- Cluster Ops: [Azure Databricks Cluster API]( - https://docs.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/clusters) -- Job Ops: [Azure Databricks Jobs API]( - https://docs.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/jobs) -- Run Ops: [Azure Databricks Jobs API - Runs Submit]( - https://docs.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/jobs#--runs-submit) -- Secret Scope Ops: [Azure Databricks Secrets API]( - https://docs.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/secrets) -- Workspace Item Ops: [Azure Databricks Workspace API]( - https://docs.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/workspace) -- DbfsBlock Ops: [Azure Databricks DBFS API]( - https://docs.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/dbfs) - diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/__init__.py b/samples/contrib/azure-samples/kfp-azure-databricks/databricks/__init__.py deleted file mode 100644 index f9190cc6b94..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from ._job_op import CreateJobOp, DeleteJobOp -from ._run_op import SubmitRunOp, DeleteRunOp -from ._cluster_op import CreateClusterOp, DeleteClusterOp -from ._secretscope_op import CreateSecretScopeOp, DeleteSecretScopeOp -from ._workspaceitem_op import ImportWorkspaceItemOp, DeleteWorkspaceItemOp -from ._dbfsblocks_op import CreateDbfsBlockOp, DeleteDbfsBlockOp -__version__ = "0.2.0" diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_cluster_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_cluster_op.py deleted file mode 100644 index fcf758e5e43..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_cluster_op.py +++ /dev/null @@ -1,274 +0,0 @@ -import json -from kfp.dsl import ResourceOp - -class CreateClusterOp(ResourceOp): - """Represents an Op which will be translated into a Databricks Cluster creation resource - template. - - Examples: - - import databricks - - databricks.CreateClusterOp( - name="createcluster", - cluster_name="test-cluster", - spec={ - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "spark_conf": { - "spark.speculation": "true" - }, - "num_workers": 2 - } - ) - - databricks.CreateClusterOp( - name="createcluster", - cluster_name="test-cluster", - spark_version="5.3.x-scala2.11", - node_type_id="Standard_D3_v2", - spark_conf={ - "spark.speculation": "true" - }, - num_workers=2 - ) - - databricks.CreateClusterOp( - name="createcluster", - cluster_name="test-cluster", - spark_version="5.3.x-scala2.11", - node_type_id="Standard_D3_v2", - autoscale={ - "min_workers": 2, - "max_workers": 50 - } - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - cluster_name: str = None, - spec: {} = None, - num_workers: int = None, - autoscale: {} = None, - spark_version: str = None, - spark_conf: {} = None, - node_type_id: str = None, - driver_node_type_id: str = None, - custom_tags: {} = None, - cluster_log_conf: {} = None, - init_scripts: {} = None, - spark_env_vars: {} = None, - autotermination_minutes: int = None, - instance_pool_id: str = None): - """Create a new instance of CreateClusterOp. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, cluster_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - cluster_name: Cluster name requested by the user. - spec: Full specification of the Databricks cluster to create. - num_workers: Number of worker nodes that this cluster should have. - autoscale: Parameters needed in order to automatically scale clusters up and down based - on load. - spark_version: The runtime version of the cluster. - spark_conf: An object containing a set of optional, user-specified Spark configuration - key-value pairs. - node_type_id: This field encodes, through a single value, the resources available to - each of the Spark nodes in this cluster. - driver_node_type_id: The node type of the Spark driver. - custom_tags: Additional tags for cluster resources. - cluster_log_conf: The configuration for delivering Spark logs to a long-term storage - destination. - init_scripts: The configuration for storing init scripts. - spark_env_vars: An object containing a set of optional, user-specified environment - variable key-value pairs. - autotermination_minutes: Automatically terminates the cluster after it is inactive for - this time in minutes. If not set, this cluster will not be automatically terminated. - instance_pool_id: The optional ID of the instance pool to which the cluster belongs. - - Raises: - - ValueError: If no k8s resource name or Cluster name are provided. - """ - - if not spec: - spec = {} - - if cluster_name: - spec["cluster_name"] = cluster_name - if num_workers: - spec["num_workers"] = num_workers - if autoscale: - spec["autoscale"] = autoscale - if spark_version: - spec["spark_version"] = spark_version - if spark_conf: - spec["spark_conf"] = spark_conf - if node_type_id: - spec["node_type_id"] = node_type_id - if driver_node_type_id: - spec["driver_node_type_id"] = driver_node_type_id - if custom_tags: - spec["custom_tags"] = custom_tags - if cluster_log_conf: - spec["cluster_log_conf"] = cluster_log_conf - if init_scripts: - spec["init_scripts"] = init_scripts - if spark_env_vars: - spec["spark_env_vars"] = spark_env_vars - if autotermination_minutes: - spec["autotermination_minutes"] = autotermination_minutes - if instance_pool_id: - spec["instance_pool_id"] = instance_pool_id - - if not k8s_name and "cluster_name" in spec: - k8s_name = spec["cluster_name"] - elif not k8s_name: - raise ValueError("You need to provide a k8s_name or a cluster_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "Dcluster", - "metadata": { - "name": k8s_name, - }, - "spec": spec, - }, - action="create", - success_condition="status.cluster_info.state in (RUNNING, TERMINATED, UNKNOWN)", - attribute_outputs={ - "name": "{.metadata.name}", - "cluster_id": "{.status.cluster_info.cluster_id}", - "cluster_name": "{.status.cluster_info.cluster_name}", - "state": "{.status.cluster_info.state}" - }, - name=name) - - @classmethod - def from_json_spec(cls, - name: str = None, - k8s_name: str = None, - cluster_name: str = None, - json_spec: str = None): - """Create a new instance of CreateClusterOp from a json specification. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, cluster_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - cluster_name: Cluster name requested by the user. - json_spec: Full specification of the Databricks cluster to create in json format. - """ - - spec = json.loads(json_spec) - return cls(name=name, k8s_name=k8s_name, cluster_name=cluster_name, spec=spec) - - @classmethod - def from_file_name(cls, - name: str = None, - k8s_name: str = None, - cluster_name: str = None, - file_name: str = None): - """Create a new instance of CreateClusterOp from a file with a json specification. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, cluster_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - cluster_name: Cluster name requested by the user. - json_spec_file_name: Name of the file containing the full specification of the - Databricks cluster to create in json format. - - Raises: - - ValueError: if the file name doesn't exist. - """ - - with open(file_name) as json_file: - spec = json.loads(json_file.read()) - return cls(name=name, k8s_name=k8s_name, cluster_name=cluster_name, spec=spec) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource - -class DeleteClusterOp(ResourceOp): - """Represents an Op which will be translated into a Databricks Cluster deletion resource - template. - - Example: - - import databricks - - databricks.DeleteClusterOp( - name="deletecluster", - cluster_name="test-cluster" - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - cluster_name: str = None): - """Create a new instance of DeleteClusterOp. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, cluster_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - cluster_name: The name of the cluster. - If k8s_name is provided, this will be ignored. - - Raises: - - ValueError: If no k8s resource name or Cluster name are provided. - """ - - k8s_name = k8s_name or cluster_name - if not k8s_name: - raise ValueError("You need to provide a k8s_name or a cluster_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "Dcluster", - "metadata": { - "name": k8s_name - } - }, - action="delete", - name=name) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_dbfsblocks_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_dbfsblocks_op.py deleted file mode 100644 index b1c44dfc8eb..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_dbfsblocks_op.py +++ /dev/null @@ -1,209 +0,0 @@ -import json -from kfp.dsl import ResourceOp - -class CreateDbfsBlockOp(ResourceOp): - """Represents an Op which will be translated into a Databricks DbfsBlock Create - resource template. - - Example: - - import databricks - - databricks.CreateDbfsBlockOp( - name="createdbfsblock", - block_name="test-item", - spec={ - "data": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/data/foo.txt", - } - ) - - databricks.CreateDbfsBlockOp( - name="createdbfsblock", - block_name="test-item", - data="cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - path="/data/foo.txt", - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - block_name: str = None, - spec: {} = None, - path: str = None, - data: str = None): - """Create a new instance of CreateDbfsBlockOp. - - Args: - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, block_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - block_name: A name for the DbfsBlock. - If k8s_name is provided, this will be ignored. - data: The base64-encoded content. - This has a limit of 10 MB. - path: The absolute path of the file. - Importing directory is only support for DBC format. - - Raises: - - ValueError: If no k8s resource name or DbfsBlock name are provided. - """ - - if not spec: - spec = {} - if path: - spec["path"] = path - if data: - spec["data"] = data - - k8s_name = k8s_name or block_name - if not k8s_name: - raise ValueError("You need to provide a k8s_name or a block_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "DbfsBlock", - "metadata": { - "name": k8s_name - }, - "spec": spec - }, - action="create", - success_condition="status.file_hash", - attribute_outputs={ - "name": "{.metadata.name}", - "file_info_path": "{.status.file_info.path}", - "file_info_is_dir": "{.status.file_info.is_dir}", - "file_info_file_size": "{.status.file_info.file_size}", - "file_hash": "{.status.file_hash}" - }, - name=name) - - @classmethod - def from_json_spec(cls, - name: str = None, - k8s_name: str = None, - block_name: str = None, - json_spec: str = None): - """Create a new instance of CreateDbfsBlockOp from a json specification. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, block_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - block_name: A name for the DbfsBlock Item. - If k8s_name is provided, this will be ignored. - json_spec: Full specification of the DbfsBlock Item to import in json format. - """ - - spec = json.loads(json_spec) - return cls(name=name, k8s_name=k8s_name, block_name=block_name, spec=spec) - - @classmethod - def from_file_name(cls, - name: str = None, - k8s_name: str = None, - block_name: str = None, - file_name: str = None): - """Create a new instance of CreateDbfsBlockOp from a file with json specification. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, block_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - block_name: A name for the DbfsBlock Item. - If k8s_name is provided, this will be ignored. - json_spec: Name of the file containing the full specification of the DbfsBlock Item to - import in json format. - - Raises: - - ValueError: if the file name doesn't exist. - """ - - with open(file_name) as json_file: - spec = json.loads(json_file.read()) - return cls(name=name, k8s_name=k8s_name, block_name=block_name, spec=spec) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource - -class DeleteDbfsBlockOp(ResourceOp): - """Represents an Op which will be translated into a Databricks DbfsBlock Item deletion - resource template. - - Example: - - import databricks - - databricks.DeleteDbfsBlockOp( - name="deletedbfsblock", - block_name="test-item" - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - block_name: str = None): - """Create a new instance of DeleteDbfsBlockOp. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, block_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - block_name: The name of the DbfsBlock Item. - If k8s_name is provided, this will be ignored. - - Raises: - - ValueError: If no k8s resource name or Dbfs Item name are provided. - """ - - k8s_name = k8s_name or block_name - if not k8s_name: - raise ValueError("You need to provide a k8s_name or a block_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "DbfsBlock", - "metadata": { - "name": k8s_name - } - }, - action="delete", - name=name) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_job_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_job_op.py deleted file mode 100644 index 62fb0d2392b..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_job_op.py +++ /dev/null @@ -1,312 +0,0 @@ -import json -from kfp.dsl import ResourceOp - -class CreateJobOp(ResourceOp): - """Represents an Op which will be translated into a Databricks Job creation - resource template. - - Example: - - import databricks - - databricks.CreateJobOp( - name="createjob", - job_name="test-job", - spec={ - "new_cluster" : { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "libraries" : [ - { - "jar": 'dbfs:/my-jar.jar' - }, - { - "maven": { - "coordinates": 'org.jsoup:jsoup:1.7.2' - } - } - ], - "timeout_seconds" : 3600, - "max_retries": 1, - "schedule":{ - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles", - }, - "spark_jar_task": { - "main_class_name": "com.databricks.ComputeModels", - }, - } - ) - - databricks.CreateJobOp( - name="createjob", - job_name="test-job", - new_cluster={ - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - libraries=[ - { - "jar": 'dbfs:/my-jar.jar' - }, - { - "maven": { - "coordinates": 'org.jsoup:jsoup:1.7.2' - } - } - ], - timeout_seconds=3600, - max_retries=1, - schedule={ - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles", - }, - spark_jar_task={ - "main_class_name": "com.databricks.ComputeModels", - } - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - job_name: str = None, - spec: {} = None, - existing_cluster_id: str = None, - new_cluster: {} = None, - libraries: {} = None, - spark_jar_task: {} = None, - spark_python_task: {} = None, - spark_submit_task: {} = None, - notebook_task: {} = None, - timeout_seconds: int = None, - max_retries: int = None, - min_retry_interval_millis: int = None, - retry_on_timeout: bool = None, - schedule: {} = None, - max_concurrent_runs: int = None, - email_notifications: {} = None): - - """Create a new instance of CreateJobOp. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, job_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - job_name: A name for the Job. - spec: Full specification of the Job to create. - existing_cluster_id: The ID of an existing cluster that will be used for all runs of - this job. - new_cluster: A description of a cluster that will be created for each run - libraries: An optional list of libraries to be installed on the cluster that will - execute the job. - spark_jar_task: Indicates that this job should run a JAR. - spark_python_task: Indicates that this job should run a Python file. - spark_submit_task: Indicates that this job should run spark submit script. - notebook_task: Indicates that this job should run a notebook. - timeout_seconds: An optional timeout applied to each run of this job. - The default behavior is to have no timeout. - max_retries: An optional maximum number of times to retry an unsuccessful run. - The value -1 means to retry indefinitely and the value 0 means to never retry. - The default behavior is to never retry. - min_retry_interval_millis: An optional minimal interval in milliseconds between the - start of the failed run and the subsequent retry run. - The default behavior is that unsuccessful runs are immediately retried. - retry_on_timeout: An optional policy to specify whether to retry a job when it times - out. - The default behavior is to not retry on timeout. - schedule: An optional periodic schedule for this job. - The default behavior is that the job runs when triggered by clicking Run Now in the - Jobs UI or sending an API request to runNow. - max_concurrent_runs: An optional maximum allowed number of concurrent runs of the job. - email_notifications: An optional set of email addresses notified when runs of this job - begin and complete and when this job is deleted. - - Raises: - ValueError: If no k8s resource name or Job name are provided. - """ - - if not spec: - spec = {} - - if job_name: - spec["name"] = job_name - if new_cluster: - spec["new_cluster"] = new_cluster - if existing_cluster_id: - spec["existing_cluster_id"] = existing_cluster_id - if spark_jar_task: - spec["spark_jar_task"] = spark_jar_task - if spark_python_task: - spec["spark_python_task"] = spark_python_task - if spark_submit_task: - spec["spark_submit_task"] = spark_submit_task - if notebook_task: - spec["notebook_task"] = notebook_task - if libraries: - spec["libraries"] = libraries - if timeout_seconds: - spec["timeout_seconds"] = timeout_seconds - if max_retries: - spec["max_retries"] = max_retries - if min_retry_interval_millis: - spec["min_retry_interval_millis"] = min_retry_interval_millis - if retry_on_timeout: - spec["retry_on_timeout"] = retry_on_timeout - if schedule: - spec["schedule"] = schedule - if max_concurrent_runs: - spec["max_concurrent_runs"] = max_concurrent_runs - if email_notifications: - spec["email_notifications"] = email_notifications - - if not k8s_name and "name" in spec: - k8s_name = spec["name"] - elif not k8s_name: - raise ValueError("You need to provide a k8s_name or a job_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "Djob", - "metadata": { - "name": k8s_name - }, - "spec": spec - }, - action="create", - success_condition="status.job_status.job_id > 0", - attribute_outputs={ - "name": "{.metadata.name}", - "job_id": "{.status.job_status.job_id}", - "job_name": "{.status.job_status.settings.name}" - }, - name=name) - - @classmethod - def from_json_spec(cls, - name: str = None, - k8s_name: str = None, - job_name: str = None, - json_spec: str = None): - """Create a new instance of CreateJobOp from a json specification. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, job_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - job_name: A name for the Job. - json_spec: Full specification of the Job to create in json format. - """ - - spec = json.loads(json_spec) - return cls(name=name, k8s_name=k8s_name, job_name=job_name, spec=spec) - - @classmethod - def from_file_name(cls, - name: str = None, - k8s_name: str = None, - job_name: str = None, - file_name: str = None): - """Create a new instance of CreateJobOp from a file with json specification. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, job_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - job_name: A name for the Job. - json_spec_file_name: Name of the file containing the full specification of the Job to - create in json format. - - Raises: - - ValueError: if the file name doesn't exist. - """ - - with open(file_name) as json_file: - spec = json.loads(json_file.read()) - return cls(name=name, k8s_name=k8s_name, job_name=job_name, spec=spec) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource - -class DeleteJobOp(ResourceOp): - """Represents an Op which will be translated into a Databricks Spark Job deletion - resource template. - - Example: - - import databricks - - databricks.DeleteJobOp( - name = "deletejob", - job_name = "test-job" - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - job_name: str = None): - """Create a new instance of DeleteJobOp. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, job_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - job_name: the name of the Job. - If k8s_name is provided, this will be ignored. - - Raises: - - ValueError: If no k8s resource name or Job name are provided. - """ - - k8s_name = k8s_name or job_name - if not k8s_name: - raise ValueError("You need to provide a k8s_name or a job_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "Djob", - "metadata": { - "name": k8s_name - } - }, - action="delete", - name=name) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_run_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_run_op.py deleted file mode 100644 index 085be3992b6..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_run_op.py +++ /dev/null @@ -1,314 +0,0 @@ -import json -from kfp.dsl import ResourceOp - -class SubmitRunOp(ResourceOp): - """Represents an Op which will be translated into a Databricks Run submission - resource template. - - Examples: - - import databricks - - databricks.SubmitRunOp( - name="submitrun", - run_name="test-run", - spec={ - "new_cluster": { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "spark_submit_task": { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - } - ) - - databricks.SubmitRunOp( - name="submitrun", - run_name="test-run", - new_cluster={ - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - libraries=[ - { - "jar": "dbfs:/my-jar.jar" - }, - { - "maven": { - "coordinates": "org.jsoup:jsoup:1.7.2" - } - } - ], - spark_jar_task={ - "main_class_name": "com.databricks.ComputeModels" - } - ) - - databricks.SubmitRunOp( - name="submitrun", - run_name="test-run", - existing_cluster_id="cluster-id", - spark_python_task={ - "python_file": "dbfs:/docs/pi.py", - "parameters": ["10"] - } - ) - - databricks.SubmitRunOp( - name="submitrun", - run_name="test-run", - job_name="test-job", - jar_params=["param1", "param2"] - ) - - databricks.SubmitRunOp.from_json_spec( - name="submitrun", - run_name="test-run", - json_spec=_JSON_SPEC - ) - - databricks.SubmitRunOp.from_file_name( - name="submitrun", - run_name="test-run", - file_name="run_spec.json" - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - run_name: str = None, - spec: {} = None, - job_name: str = None, - jar_params: {} = None, - python_params: {} = None, - spark_submit_params: {} = None, - notebook_params: {} = None, - existing_cluster_id: str = None, - new_cluster: {} = None, - libraries: {} = None, - spark_jar_task: {} = None, - spark_python_task: {} = None, - spark_submit_task: {} = None, - notebook_task: {} = None, - timeout_seconds: int = None): - """Create a new instance of SubmitRunOp. - - Args: - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, run_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - run_name: A name for the Run. - spec: Full specification of the Run to submit. - job_name: The name of an existing Job to run. - jar_params: A list of parameters for jobs with JAR task. - python_params: A list of parameters for jobs with Python tasks. - spark_submit_params: A list of parameters for jobs with spark submit task. - notebook_params: A map from keys to values for jobs with notebook task. - existing_cluster_id: The ID of an existing cluster that will be used for all runs of - this job. - new_cluster: A description of a cluster that will be created for each run - libraries: An optional list of libraries to be installed on the cluster that will - execute the job. - spark_jar_task: Indicates that this job should run a JAR. - spark_python_task: Indicates that this job should run a Python file. - spark_submit_task: Indicates that this job should run spark submit script. - notebook_task: Indicates that this job should run a notebook. - timeout_seconds: An optional timeout applied to each run of this job. - The default behavior is to have no timeout. - - Raises: - - ValueError: If no k8s resource name or Run name are provided. - """ - - if not spec: - spec = {} - - if run_name: - spec["run_name"] = run_name - if job_name: - spec["job_name"] = job_name - if jar_params: - spec["jar_params"] = jar_params - if python_params: - spec["python_params"] = python_params - if spark_submit_params: - spec["spark_submit_params"] = spark_submit_params - if notebook_params: - spec["notebook_params"] = notebook_params - if new_cluster: - spec["new_cluster"] = new_cluster - if existing_cluster_id: - spec["existing_cluster_id"] = existing_cluster_id - if spark_jar_task: - spec["spark_jar_task"] = spark_jar_task - if spark_python_task: - spec["spark_python_task"] = spark_python_task - if spark_submit_task: - spec["spark_submit_task"] = spark_submit_task - if notebook_task: - spec["notebook_task"] = notebook_task - if libraries: - spec["libraries"] = libraries - if timeout_seconds: - spec["timeout_seconds"] = timeout_seconds - - if not k8s_name and "run_name" in spec: - k8s_name = spec["run_name"] - elif not k8s_name: - raise ValueError("You need to provide a k8s_name or a run_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "Run", - "metadata": { - "name": k8s_name - }, - "spec": spec - }, - action="create", - success_condition=("status.metadata.state.life_cycle_state in " - "(TERMINATED, SKIPPED, INTERNAL_ERROR)"), - attribute_outputs={ - "name": "{.metadata.name}", - "job_id": "{.status.metadata.job_id}", - "number_in_job": "{.status.metadata.number_in_job}", - "run_id": "{.status.metadata.run_id}", - "run_name": "{.status.metadata.run_name}", - "life_cycle_state": "{.status.metadata.state.life_cycle_state}", - "result_state": "{.status.metadata.state.result_state}", - "notebook_output_result": "{.status.notebook_output.result}", - "notebook_output_truncated": "{.status.notebook_output.truncated}", - "error": "{.status.error}" - }, - name=name) - - @classmethod - def from_json_spec(cls, - name: str = None, - k8s_name: str = None, - run_name: str = None, - json_spec: str = None): - """Create a new instance of SubmitRunOp from a json specification. - - Args: - - name: The name of the Op. - It does not have to be unique within a pipeline - because the pipeline will generates a unique new name in case of conflicts. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, run_name will be used as the resource name. - run_name: A name for the Run. - json_spec: Full specification of the Run to submit in json format. - """ - - spec = json.loads(json_spec) - return cls(name=name, k8s_name=k8s_name, run_name=run_name, spec=spec) - - @classmethod - def from_file_name(cls, - name: str = None, - k8s_name: str = None, - run_name: str = None, - file_name: str = None): - """Create a new instance of SubmitRunOp from a file with a json specification. - - Args: - - name: The name of the Op. - It does not have to be unique within a pipeline - because the pipeline will generates a unique new name in case of conflicts. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, run_name will be used as the resource name. - run_name: A name for the Run. - json_spec_file_name: Name of the file containing the full specification of the Run - to submit in json format. - - Raises: - - ValueError: if the file name doesn't exist. - """ - - with open(file_name) as json_file: - spec = json.loads(json_file.read()) - return cls(name=name, k8s_name=k8s_name, run_name=run_name, spec=spec) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource - -class DeleteRunOp(ResourceOp): - """Represents an Op which will be translated into a Databricks Run deletion resource - template. - - Example: - - import databricks - - databricks.DeleteRunOp( - name="deleterun", - run_name="test-run" - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - run_name: str = None): - """Create a new instance of DeleteRunOp. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, run_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - run_name: The name of the Run. - If k8s_name is provided, this will be ignored. - - Raises: - - ValueError: If no k8s resource name or Run name are provided. - """ - - k8s_name = k8s_name or run_name - if not k8s_name: - raise ValueError("You need to provide a k8s_name or a run_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "Run", - "metadata": { - "name": k8s_name - } - }, - action="delete", - name=name) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_secretscope_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_secretscope_op.py deleted file mode 100644 index fc3f091d06c..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_secretscope_op.py +++ /dev/null @@ -1,261 +0,0 @@ -import json -from kfp.dsl import ResourceOp - -class CreateSecretScopeOp(ResourceOp): - """Represents an Op which will be translated into a Databricks Secret Scope creation - resource template. - - Example: - - import databricks - - databricks.CreateSecretScopeOp( - name="createsecretscope", - scope_name="test-secretscope", - spec={ - "initial_manage_permission": "users", - "secrets": [ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ], - "acls": [ - { - "principal": "user@foo.com", - "permission": "READ" - } - ] - } - ) - - databricks.CreateSecretScopeOp( - name="createsecretscope", - scope_name="test-secretscope", - initial_manage_permission="users", - secrets=[ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ], - acls=[ - { - "principal": "user@foo.com", - "permission": "READ" - } - ] - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - scope_name: str = None, - spec: {} = None, - initial_manage_principal: str = None, - secrets: {} = None, - acls: {} = None): - """Create a new instance of CreateSecretScopeOp. - - Args: - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, scope_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - scope_name: A name for the Secret Scope. - If k8s_name is provided, this will be ignored. - spec: Full specification of the Secret Scope to create. - initial_manage_principal: The principal that is initially granted MANAGE permission to - the created scope. - secrets: Secrets that will be stored at this scope. - acls: ACLs that will be set on this scope. - - Raises: - - ValueError: If no k8s resource name or Secret Scope name are provided. - """ - - if not spec: - spec = {} - - if initial_manage_principal: - spec["initial_manage_permission"] = initial_manage_principal - if secrets: - spec["secrets"] = secrets - if acls: - spec["acls"] = acls - - k8s_name = k8s_name or scope_name - if not k8s_name: - raise ValueError("You need to provide a k8s_name or a scope_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "SecretScope", - "metadata": { - "name": k8s_name - }, - "spec": spec - }, - action="create", - success_condition="status.secretscope.name", - attribute_outputs={ - "name": "{.metadata.name}", - "secretscope_name": "{.status.secretscope.name}", - "secretscope_backend_type": "{.status.secretscope.backend_type}", - "secret_in_cluster_available": "{.status.secretinclusteravailable}" - }, - name=name) - - @classmethod - def from_json_spec(cls, - name: str = None, - k8s_name: str = None, - scope_name: str = None, - json_spec: str = None): - """Create a new instance of CreateSecretScopeOp from a json specification. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, scope_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - scope_name: A name for the Secret Scope. - If k8s_name is provided, this will be ignored. - json_spec: Full specification of the Secret Scope to create in json format. - """ - - spec = json.loads(json_spec) - return cls(name=name, k8s_name=k8s_name, scope_name=scope_name, spec=spec) - - @classmethod - def from_file_name(cls, - name: str = None, - k8s_name: str = None, - scope_name: str = None, - file_name: str = None): - """Create a new instance of CreateSecretScopeOp from a file with json specification. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, scope_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - scope_name: A name for the Secret Scope. - If k8s_name is provided, this will be ignored. - json_spec_file_name: Name of the file containing the full specification of the Secret - Scope to create in json format. - - Raises: - - ValueError: if the file name doesn't exist. - """ - - with open(file_name) as json_file: - spec = json.loads(json_file.read()) - return cls(name=name, k8s_name=k8s_name, scope_name=scope_name, spec=spec) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource - -class DeleteSecretScopeOp(ResourceOp): - """Represents an Op which will be translated into a Databricks Secret Scope deletion - resource template. - - Example: - - import databricks - - databricks.DeleteSecretScopeOp( - name = "deletesecretscope", - scope_name = "test-secretscope" - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - scope_name: str = None): - """Create a new instance of DeleteSecretScopeOp. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, scope_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - scope_name: The name of the Secret Scope. - If k8s_name is provided, this will be ignored. - - Raises: - - ValueError: If no k8s resource name or Secret Scope name are provided. - """ - - k8s_name = k8s_name or scope_name - if not k8s_name: - raise ValueError("You need to provide a k8s_name or a scope_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "SecretScope", - "metadata": { - "name": k8s_name - } - }, - action="delete", - name=name) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource \ No newline at end of file diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_workspaceitem_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_workspaceitem_op.py deleted file mode 100644 index ce9f40e2bc6..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/databricks/_workspaceitem_op.py +++ /dev/null @@ -1,226 +0,0 @@ -import json -from kfp.dsl import ResourceOp - -class ImportWorkspaceItemOp(ResourceOp): - """Represents an Op which will be translated into a Databricks Workspace Item import - resource template. - - Example: - - import databricks - - databricks.ImportWorkspaceItemOp( - name="importworkspaceitem", - item_name="test-item", - spec={ - "content": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/Users/user@foo.com/ScalaExampleNotebook", - "language": "SCALA", - "format": "SOURCE" - } - ) - - databricks.ImportWorkspaceItemOp( - name="importworkspaceitem", - item_name="test-item", - content="cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - path="/Users/user@foo.com/ScalaExampleNotebook", - language="SCALA", - file_format="SOURCE" - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - item_name: str = None, - spec: {} = None, - content: str = None, - path: str = None, - language: str = None, - file_format: str = None): - """Create a new instance of ImportWorkspaceItemOp. - - Args: - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, item_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - item_name: A name for the Workspace Item. - If k8s_name is provided, this will be ignored. - spec: Full specification of the Workspace Item to import. - content: The base64-encoded content. - This has a limit of 10 MB. - path: The absolute path of the notebook or directory. - Importing directory is only support for DBC format. - language: The language. - If format is set to SOURCE, this field is required; otherwise, it will be ignored. - file_format: This specifies the format of the file to be imported. - By default, this is SOURCE. However it may be one of: SOURCE, HTML, JUPYTER, DBC. - The value is case sensitive. - - Raises: - - ValueError: If no k8s resource name or Workspace Item name are provided. - """ - - if not spec: - spec = {} - - if content: - spec["content"] = content - if path: - spec["path"] = path - if language: - spec["language"] = language - if file_format: - spec["format"] = file_format - - k8s_name = k8s_name or item_name - if not k8s_name: - raise ValueError("You need to provide a k8s_name or a item_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "WorkspaceItem", - "metadata": { - "name": k8s_name - }, - "spec": spec - }, - action="create", - success_condition="status.object_hash", - attribute_outputs={ - "name": "{.metadata.name}", - "object_hash": "{.status.object_hash}", - "object_language": "{.status.object_info.language}", - "object_type": "{.status.object_info.object_type}", - "object_path": "{.status.object_info.path}" - }, - name=name) - - @classmethod - def from_json_spec(cls, - name: str = None, - k8s_name: str = None, - item_name: str = None, - json_spec: str = None): - """Create a new instance of ImportWorkspaceItemOp from a json specification. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, item_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - item_name: A name for the Workspace Item. - If k8s_name is provided, this will be ignored. - json_spec: Full specification of the Workspace Item to import in json format. - """ - - spec = json.loads(json_spec) - return cls(name=name, k8s_name=k8s_name, item_name=item_name, spec=spec) - - @classmethod - def from_file_name(cls, - name: str = None, - k8s_name: str = None, - item_name: str = None, - file_name: str = None): - """Create a new instance of ImportWorkspaceItemOp from a file with json specification. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, item_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - item_name: A name for the Workspace Item. - If k8s_name is provided, this will be ignored. - json_spec: Name of the file containing the full specification of the Workspace Item to - import in json format. - - Raises: - - ValueError: if the file name doesn't exist. - """ - - with open(file_name) as json_file: - spec = json.loads(json_file.read()) - return cls(name=name, k8s_name=k8s_name, item_name=item_name, spec=spec) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource - -class DeleteWorkspaceItemOp(ResourceOp): - """Represents an Op which will be translated into a Databricks Workspace Item deletion - resource template. - - Example: - - import databricks - - databricks.DeleteWorkspaceItemOp( - name="deleteworkspaceitem", - item_name="test-item" - ) - """ - - def __init__(self, - name: str = None, - k8s_name: str = None, - item_name: str = None): - """Create a new instance of DeleteWorkspaceItemOp. - - Args: - - name: The name of the pipeline Op. - It does not have to be unique within a pipeline - because the pipeline will generate a new unique name in case of a conflict. - k8s_name = The name of the k8s resource which will be submitted to the cluster. - If no k8s_name is provided, item_name will be used as the resource name. - This name is DNS-1123 subdomain name and must consist of lower case alphanumeric - characters, '-' or '.', and must start and end with an alphanumeric character. - item_name: The name of the Workspace Item. - If k8s_name is provided, this will be ignored. - - Raises: - - ValueError: If no k8s resource name or Workspace Item name are provided. - """ - - k8s_name = k8s_name or item_name - if not k8s_name: - raise ValueError("You need to provide a k8s_name or a item_name.") - - super().__init__( - k8s_resource={ - "apiVersion": "databricks.microsoft.com/v1alpha1", - "kind": "WorkspaceItem", - "metadata": { - "name": k8s_name - } - }, - action="delete", - name=name) - - @property - def resource(self): - """`Resource` object that represents the `resource` property in - `io.argoproj.workflow.v1alpha1.Template`. - """ - return self._resource diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/setup.py b/samples/contrib/azure-samples/kfp-azure-databricks/setup.py deleted file mode 100644 index dbe6907f18c..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/setup.py +++ /dev/null @@ -1,10 +0,0 @@ -from setuptools import setup -import databricks - -setup( - name='kfp-azure-databricks', - version=databricks.__version__, - description='Python package to manage Azure Databricks on Kubeflow Pipelines using Azure Databricks operator for Kubernetes', - url='https://github.com/kubeflow/pipelines/tree/master/samples/contrib/azure-samples/kfp-azure-databricks', - packages=['databricks'] -) diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/cluster_spec.json b/samples/contrib/azure-samples/kfp-azure-databricks/tests/cluster_spec.json deleted file mode 100644 index 1536b33b0ee..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/cluster_spec.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "spark_conf": { - "spark.speculation": "true" - }, - "num_workers": 2 -} diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/dbfsblock_spec.json b/samples/contrib/azure-samples/kfp-azure-databricks/tests/dbfsblock_spec.json deleted file mode 100644 index 010e6e8999c..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/dbfsblock_spec.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "data": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/data/foo.txt" -} diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/job_spec.json b/samples/contrib/azure-samples/kfp-azure-databricks/tests/job_spec.json deleted file mode 100644 index 9138280aa56..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/job_spec.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "new_cluster": { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "libraries": [ - { - "jar": "dbfs:/my-jar.jar" - }, - { - "maven": { - "coordinates": "org.jsoup:jsoup:1.7.2" - } - } - ], - "timeout_seconds": 3600, - "max_retries": 1, - "schedule": { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - }, - "spark_jar_task": { - "main_class_name": "com.databricks.ComputeModels" - } -} diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/run_spec.json b/samples/contrib/azure-samples/kfp-azure-databricks/tests/run_spec.json deleted file mode 100644 index 0e88c923352..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/run_spec.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "new_cluster": { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "spark_submit_task": { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } -} diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/run_tests.sh b/samples/contrib/azure-samples/kfp-azure-databricks/tests/run_tests.sh deleted file mode 100644 index 43658708e06..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/run_tests.sh +++ /dev/null @@ -1,4 +0,0 @@ -pip install --upgrade .. -pip install coverage -coverage run -m unittest discover --verbose -coverage report \ No newline at end of file diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/secretscope_spec.json b/samples/contrib/azure-samples/kfp-azure-databricks/tests/secretscope_spec.json deleted file mode 100644 index c32be65f2cf..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/secretscope_spec.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "initial_manage_permission": "users", - "secrets": [ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ], - "acls": [ - { - "principal": "user@foo.com", - "permission": "READ" - } - ] -} diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_cluster_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_cluster_op.py deleted file mode 100644 index 974a9865a1a..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_cluster_op.py +++ /dev/null @@ -1,262 +0,0 @@ -import unittest -from pathlib import Path -import kfp -from kfp.dsl import PipelineParam -from databricks import CreateClusterOp, DeleteClusterOp - -class TestCreateClusterOp(unittest.TestCase): - - def test_databricks_create_cluster_without_k8s_or_cluster_name(self): - def my_pipeline(): - CreateClusterOp( - name="createcluster", - spark_version="5.3.x-scala2.11", - node_type_id="Standard_D3_v2", - spark_conf={ - "spark.speculation": "true" - }, - num_workers=2 - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_create_cluster(self): - def my_pipeline(): - cluster_name = "test-cluster" - spark_version = "5.3.x-scala2.11" - node_type_id = "Standard_D3_v2" - spark_conf = { - "spark.speculation": "true" - } - num_workers = 2 - - expected_spec = { - "cluster_name": cluster_name, - "spark_version": spark_version, - "node_type_id": node_type_id, - "spark_conf": spark_conf, - "num_workers": num_workers - } - - res = CreateClusterOp( - name="createcluster", - cluster_name=cluster_name, - spark_version=spark_version, - node_type_id=node_type_id, - spark_conf=spark_conf, - num_workers=num_workers - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_autoscaling_cluster(self): - def my_pipeline(): - cluster_name = "test-cluster" - spark_version = "5.3.x-scala2.11" - node_type_id = "Standard_D3_v2" - autoscale = { - "min_workers": 2, - "max_workers": 50 - } - - expected_spec = { - "cluster_name": cluster_name, - "spark_version":spark_version, - "node_type_id": node_type_id, - "autoscale": autoscale - } - - res = CreateClusterOp( - name="createcluster", - cluster_name=cluster_name, - spark_version=spark_version, - node_type_id=node_type_id, - autoscale=autoscale - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_cluster_with_spec(self): - def my_pipeline(): - spec = { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "spark_conf": { - "spark.speculation": "true" - }, - "num_workers": 2 - } - - res = CreateClusterOp( - name="createcluster", - cluster_name="test-cluster", - spec=spec - ) - - self.assert_res(res, spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_cluster_with_spec_and_extra_args(self): - def my_pipeline(): - spec = { - "spark_version":"5.3.x-scala2.11", - "spark_conf": { - "spark.speculation": "true" - } - } - - expected_spec = { - "cluster_name": "test-cluster", - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "spark_conf": { - "spark.speculation": "true" - }, - "num_workers": 2 - } - - res = CreateClusterOp( - name="createcluster", - cluster_name="test-cluster", - spec=spec, - node_type_id="Standard_D3_v2", - num_workers=2 - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_cluster_with_json_spec(self): - def my_pipeline(): - cluster_name = "test-cluster" - json_spec = """ - { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "spark_conf": { - "spark.speculation": "true" - }, - "num_workers": 2 - } - """ - - expected_spec = { - "cluster_name": cluster_name, - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "spark_conf": { - "spark.speculation": "true" - }, - "num_workers": 2 - } - - res = CreateClusterOp.from_json_spec( - name="createcluster", - cluster_name=cluster_name, - json_spec=json_spec - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_cluster_with_json_file_spec(self): - def my_pipeline(): - cluster_name = "test-cluster" - current_path = Path(__file__).parent - json_spec_file_name = current_path.joinpath("cluster_spec.json") - - expected_spec = { - "cluster_name": cluster_name, - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "spark_conf": { - "spark.speculation": "true" - }, - "num_workers": 2 - } - - res = CreateClusterOp.from_file_name( - name="createcluster", - cluster_name=cluster_name, - file_name=json_spec_file_name - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def assert_res(self, res, expected_spec): - self.assertEqual(res.name, "createcluster") - self.assertEqual(res.resource.action, "create") - self.assertEqual( - res.resource.success_condition, - "status.cluster_info.state in (RUNNING, TERMINATED, UNKNOWN)" - ) - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - expected_attribute_outputs = { - "name": "{.metadata.name}", - "cluster_id": "{.status.cluster_info.cluster_id}", - "cluster_name": "{.status.cluster_info.cluster_name}", - "state": "{.status.cluster_info.state}", - "manifest": "{}" - } - self.assertEqual(res.attribute_outputs, expected_attribute_outputs) - expected_outputs = { - "name": PipelineParam(name="name", op_name=res.name), - "cluster_id": PipelineParam(name="cluster_id", op_name=res.name), - "cluster_name": PipelineParam(name="cluster_name", op_name=res.name), - "state": PipelineParam(name="state", op_name=res.name), - "manifest": PipelineParam(name="manifest", op_name=res.name) - } - self.assertEqual(res.outputs, expected_outputs) - self.assertEqual( - res.output, - PipelineParam(name="name", op_name=res.name) - ) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "Dcluster") - self.assertEqual(res.k8s_resource["metadata"]["name"], "test-cluster") - self.assertEqual(res.k8s_resource["spec"], expected_spec) - -class TestDeleteClusterOp(unittest.TestCase): - - def test_databricks_delete_cluster_without_k8s_or_cluster_name(self): - def my_pipeline(): - DeleteClusterOp( - name="deletecluster" - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_delete_cluster(self): - def my_pipeline(): - - res = DeleteClusterOp( - name="deletecluster", - cluster_name="test-cluster" - ) - - self.assertEqual(res.name, "deletecluster") - self.assertEqual(res.resource.action, "delete") - self.assertEqual(res.resource.success_condition, None) - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - self.assertEqual(res.attribute_outputs, {}) - self.assertEqual(res.outputs, {}) - self.assertEqual(res.output, None) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "Dcluster") - self.assertEqual(res.k8s_resource["metadata"]["name"], "test-cluster") - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - -if __name__ == '__main__': - unittest.main() diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_dbfsblock_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_dbfsblock_op.py deleted file mode 100644 index c714fb155ad..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_dbfsblock_op.py +++ /dev/null @@ -1,196 +0,0 @@ -import unittest -from pathlib import Path -import kfp -from kfp.dsl import PipelineParam -from databricks import CreateDbfsBlockOp, DeleteDbfsBlockOp - -class TestCreateDbfsBlockOp(unittest.TestCase): - - def test_databricks_createdbfsblock_without_k8s_or_block_name(self): - def my_pipeline(): - CreateDbfsBlockOp( - name="createdbfsitem", - data="cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - path="/Users/user@foo.com/ScalaExampleNotebook" - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_createdbfsblock(self): - def my_pipeline(): - block_name = "createdbfsitem" - data = "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK" - path = "/Users/user@foo.com/ScalaExampleNotebook" - - - expected_spec = { - "path": path, - "data": data - } - - res = CreateDbfsBlockOp( - name="createdbfsitem", - block_name=block_name, - path=path, - data=data - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_createdbfsblock_with_spec(self): - def my_pipeline(): - block_name = "createdbfsitem" - spec = { - "data": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/Users/user@foo.com/ScalaExampleNotebook" - } - - res = CreateDbfsBlockOp( - name="createdbfsitem", - block_name=block_name, - spec=spec - ) - - self.assert_res(res, spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_createdbfsblock_with_spec_and_extra_args(self): - def my_pipeline(): - block_name = "createdbfsitem" - data = "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK" - spec = { - "path": "/Users/user@foo.com/ScalaExampleNotebook" - } - - expected_spec = { - "data": data, - "path": "/Users/user@foo.com/ScalaExampleNotebook" - } - - res = CreateDbfsBlockOp( - name="createdbfsitem", - block_name=block_name, - data=data, - spec=spec - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_createdbfsblock_with_json_spec(self): - def my_pipeline(): - block_name = "createdbfsitem" - json_spec = """ - { - "data": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/Users/user@foo.com/ScalaExampleNotebook" - } - """ - - expected_spec = { - "data": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/Users/user@foo.com/ScalaExampleNotebook" - } - - res = CreateDbfsBlockOp.from_json_spec( - name="createdbfsitem", - block_name=block_name, - json_spec=json_spec - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_createdbfsblock_with_json_file_spec(self): - def my_pipeline(): - block_name = "createdbfsitem" - current_path = Path(__file__).parent - json_spec_file_name = current_path.joinpath("dbfsblock_spec.json") - - expected_spec = { - "data": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/data/foo.txt" - } - - res = CreateDbfsBlockOp.from_file_name( - name="createdbfsitem", - block_name=block_name, - file_name=json_spec_file_name - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def assert_res(self, res, expected_spec): - self.assertEqual(res.name, "createdbfsitem") - self.assertEqual(res.resource.action, "create") - self.assertEqual(res.resource.success_condition, "status.file_hash") - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - expected_attribute_outputs = { - "name": "{.metadata.name}", - "file_info_path": "{.status.file_info.path}", - "file_info_is_dir": "{.status.file_info.is_dir}", - "file_info_file_size": "{.status.file_info.file_size}", - "file_hash": "{.status.file_hash}", - "manifest": "{}" - } - self.assertEqual(res.attribute_outputs, expected_attribute_outputs) - expected_outputs = { - "name": PipelineParam(name="name", op_name=res.name), - "file_info_path": PipelineParam(name="file_info_path", op_name=res.name), - "file_info_is_dir": PipelineParam(name="file_info_is_dir", op_name=res.name), - "file_info_file_size": PipelineParam(name="file_info_file_size", op_name=res.name), - "file_hash": PipelineParam(name="file_hash", op_name=res.name), - "manifest": PipelineParam(name="manifest", op_name=res.name) - } - self.assertEqual(res.outputs, expected_outputs) - self.assertEqual( - res.output, - PipelineParam(name="name", op_name=res.name) - ) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "DbfsBlock") - self.assertEqual(res.k8s_resource["metadata"]["name"], "createdbfsitem") - self.assertEqual(res.k8s_resource["spec"], expected_spec) - -class TestDeletedbfsblockOp(unittest.TestCase): - - def test_databricks_delete_dbfsblock_without_k8s_or_block_name(self): - def my_pipeline(): - DeleteDbfsBlockOp( - name="deletedbfsblock" - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_delete_dbfsblock(self): - def my_pipeline(): - - res = DeleteDbfsBlockOp( - name="deletedbfsblock", - block_name="createdbfsitem" - ) - - self.assertEqual(res.name, "deletedbfsblock") - self.assertEqual(res.resource.action, "delete") - self.assertEqual(res.resource.success_condition, None) - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - self.assertEqual(res.attribute_outputs, {}) - self.assertEqual(res.outputs, {}) - self.assertEqual(res.output, None) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "DbfsBlock") - self.assertEqual(res.k8s_resource["metadata"]["name"], "createdbfsitem") - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_job_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_job_op.py deleted file mode 100644 index 61f786633b7..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_job_op.py +++ /dev/null @@ -1,494 +0,0 @@ -import unittest -from pathlib import Path -import kfp -from kfp.dsl import PipelineParam -from databricks import CreateJobOp, DeleteJobOp - -class TestCreateJobOp(unittest.TestCase): - - def test_databricks_create_job_without_k8s_or_job_name(self): - def my_pipeline(): - CreateJobOp( - name="createjob", - new_cluster={ - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - libraries=[ - { - "jar": "dbfs:/my-jar.jar" - }, - { - "maven": { - "coordinates": "org.jsoup:jsoup:1.7.2" - } - } - ], - timeout_seconds=3600, - max_retries=1, - schedule={ - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - }, - spark_jar_task={ - "main_class_name": "com.databricks.ComputeModels" - } - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_create_job_with_new_cluster_and_spark_jar_task(self): - def my_pipeline(): - job_name = "test-job" - new_cluster = { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - } - libraries = [ - { - "jar": "dbfs:/my-jar.jar" - }, - { - "maven": { - "coordinates": "org.jsoup:jsoup:1.7.2" - } - } - ] - timeout_seconds = 3600 - max_retries = 1 - schedule = { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - } - spark_jar_task = { - "main_class_name": "com.databricks.ComputeModels" - } - - expected_spec = { - "name": job_name, - "new_cluster": new_cluster, - "libraries": libraries, - "timeout_seconds": timeout_seconds, - "max_retries": max_retries, - "schedule": schedule, - "spark_jar_task": spark_jar_task - } - - res = CreateJobOp( - name="createjob", - job_name=job_name, - new_cluster=new_cluster, - libraries=libraries, - timeout_seconds=timeout_seconds, - max_retries=max_retries, - schedule=schedule, - spark_jar_task=spark_jar_task - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_job_with_new_cluster_and_spark_python_task(self): - def my_pipeline(): - job_name = "test-job" - new_cluster = { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - } - timeout_seconds = 3600 - max_retries = 3 - min_retry_interval_millis = 3600 - retry_on_timeout = True - schedule = { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - } - spark_python_task = { - "python_file": "dbfs:/docs/pi.py", - "parameters": [ - "10" - ] - } - - expected_spec = { - "name": job_name, - "new_cluster": new_cluster, - "timeout_seconds": timeout_seconds, - "max_retries": max_retries, - "min_retry_interval_millis": min_retry_interval_millis, - "retry_on_timeout": retry_on_timeout, - "schedule": schedule, - "spark_python_task": spark_python_task - } - - res = CreateJobOp( - name="createjob", - job_name=job_name, - new_cluster=new_cluster, - timeout_seconds=timeout_seconds, - max_retries=max_retries, - min_retry_interval_millis=min_retry_interval_millis, - retry_on_timeout=retry_on_timeout, - schedule=schedule, - spark_python_task=spark_python_task - ) - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_job_with_new_cluster_and_spark_submit_task(self): - def my_pipeline(): - job_name = "test-job" - new_cluster = { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - } - schedule = { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - } - spark_submit_task = { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - - expected_spec = { - "name": job_name, - "new_cluster": new_cluster, - "schedule": schedule, - "spark_submit_task": spark_submit_task - } - - res = CreateJobOp( - name="createjob", - job_name=job_name, - new_cluster=new_cluster, - schedule=schedule, - spark_submit_task=spark_submit_task - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_job_with_existing_cluster_and_notebook_task(self): - def my_pipeline(): - job_name = "test-job" - existing_cluster_id = "1201-my-cluster" - schedule = { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - } - notebook_task = { - "notebook_path": "/Users/donald@duck.com/my-notebook" - } - timeout_seconds = 120 - - expected_spec = { - "name": job_name, - "existing_cluster_id": existing_cluster_id, - "schedule": schedule, - "notebook_task": notebook_task, - "timeout_seconds": timeout_seconds - } - - res = CreateJobOp( - name="createjob", - job_name=job_name, - existing_cluster_id=existing_cluster_id, - schedule=schedule, - notebook_task=notebook_task, - timeout_seconds=timeout_seconds - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_job_with_spec(self): - def my_pipeline(): - spec = { - "name": "test-job", - "new_cluster": { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "libraries": [ - { - "jar": "dbfs:/my-jar.jar" - }, - { - "maven": { - "coordinates": "org.jsoup:jsoup:1.7.2" - } - } - ], - "timeout_seconds": 3600, - "max_retries": 1, - "schedule": { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - }, - "spark_jar_task": { - "main_class_name": "com.databricks.ComputeModels" - } - } - - res = CreateJobOp( - name="createjob", - spec=spec - ) - - self.assert_res(res, spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_job_with_spec_and_extra_args(self): - def my_pipeline(): - spark_submit_task = { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - spec = { - "name": "test-job", - "new_cluster": { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "schedule": { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - }, - "spark_submit_task": { - "parameters": [ - "--class" - ] - } - } - - expected_spec = { - "name": "test-job", - "new_cluster": { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "schedule": { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - }, - "spark_submit_task": { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - } - - res = CreateJobOp( - name="createjob", - spec=spec, - spark_submit_task=spark_submit_task - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_job_with_json_spec(self): - def my_pipeline(): - job_name = "test-job" - json_spec = """ - { - "new_cluster": { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "libraries": [ - { - "jar": "dbfs:/my-jar.jar" - }, - { - "maven": { - "coordinates": "org.jsoup:jsoup:1.7.2" - } - } - ], - "timeout_seconds": 3600, - "max_retries": 1, - "schedule": { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - }, - "spark_jar_task": { - "main_class_name": "com.databricks.ComputeModels" - } - } - """ - - expected_spec = { - "name": job_name, - "new_cluster": { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "libraries": [ - { - "jar": "dbfs:/my-jar.jar" - }, - { - "maven": { - "coordinates": "org.jsoup:jsoup:1.7.2" - } - } - ], - "timeout_seconds": 3600, - "max_retries": 1, - "schedule": { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - }, - "spark_jar_task": { - "main_class_name": "com.databricks.ComputeModels" - } - } - - res = CreateJobOp.from_json_spec( - name="createjob", - job_name=job_name, - json_spec=json_spec - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_job_with_json_file_spec(self): - def my_pipeline(): - job_name = "test-job" - current_path = Path(__file__).parent - json_spec_file_name = current_path.joinpath("job_spec.json") - - expected_spec = { - "name": job_name, - "new_cluster": { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "libraries": [ - { - "jar": "dbfs:/my-jar.jar" - }, - { - "maven": { - "coordinates": "org.jsoup:jsoup:1.7.2" - } - } - ], - "timeout_seconds": 3600, - "max_retries": 1, - "schedule": { - "quartz_cron_expression": "0 15 22 ? * *", - "timezone_id": "America/Los_Angeles" - }, - "spark_jar_task": { - "main_class_name": "com.databricks.ComputeModels" - } - } - - res = CreateJobOp.from_file_name( - name="createjob", - job_name=job_name, - file_name=json_spec_file_name - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def assert_res(self, res, expected_spec): - self.assertEqual(res.name, "createjob") - self.assertEqual(res.resource.action, "create") - self.assertEqual( - res.resource.success_condition, - "status.job_status.job_id > 0" - ) - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - expected_attribute_outputs = { - "name": "{.metadata.name}", - "job_id": "{.status.job_status.job_id}", - "job_name": "{.status.job_status.settings.name}", - "manifest": "{}" - } - self.assertEqual(res.attribute_outputs, expected_attribute_outputs) - expected_outputs = { - "name": PipelineParam(name="name", op_name=res.name), - "job_id": PipelineParam(name="job_id", op_name=res.name), - "job_name": PipelineParam(name="job_name", op_name=res.name), - "manifest": PipelineParam(name="manifest", op_name=res.name) - } - self.assertEqual(res.outputs, expected_outputs) - self.assertEqual( - res.output, - PipelineParam(name="name", op_name=res.name) - ) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "Djob") - self.assertEqual(res.k8s_resource["metadata"]["name"], "test-job") - self.assertEqual(res.k8s_resource["spec"], expected_spec) - -class TestDeleteJobOp(unittest.TestCase): - - def test_databricks_delete_job_without_k8s_or_job_name(self): - def my_pipeline(): - DeleteJobOp( - name="deletejob" - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_delete_job(self): - def my_pipeline(): - - res = DeleteJobOp( - name="deletejob", - job_name="test-job" - ) - - self.assertEqual(res.name, "deletejob") - self.assertEqual(res.resource.action, "delete") - self.assertEqual(res.resource.success_condition, None) - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - self.assertEqual(res.attribute_outputs, {}) - self.assertEqual(res.outputs, {}) - self.assertEqual(res.output, None) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "Djob") - self.assertEqual(res.k8s_resource["metadata"]["name"], "test-job") - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - -if __name__ == '__main__': - unittest.main() diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_run_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_run_op.py deleted file mode 100644 index 53f93786b8e..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_run_op.py +++ /dev/null @@ -1,494 +0,0 @@ -import unittest -from pathlib import Path -import kfp -from kfp.dsl import PipelineParam -from databricks import SubmitRunOp, DeleteRunOp - -class TestSubmitRunOp(unittest.TestCase): - - def test_databricks_submit_run_without_k8s_or_run_name(self): - def my_pipeline(): - SubmitRunOp( - name="submitrun", - new_cluster={ - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - libraries=[ - {"jar": "dbfs:/my-jar.jar"}, - {"maven": {"coordinates": "org.jsoup:jsoup:1.7.2"}} - ], - spark_jar_task={ - "main_class_name": "com.databricks.ComputeModels" - } - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_submit_run_with_job_name_and_jar_params(self): - def my_pipeline(): - run_name = "test-run" - job_name = "test-job" - jar_params = ["param1", "param2"] - - expected_spec = { - "run_name": run_name, - "job_name": job_name, - "jar_params": jar_params - } - - res = SubmitRunOp( - name="submitrun", - run_name=run_name, - job_name=job_name, - jar_params=jar_params - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_job_name_and_python_params(self): - def my_pipeline(): - run_name = "test-run" - job_name = "test-job" - python_params = ["john doe", "35"] - - expected_spec = { - "run_name": run_name, - "job_name": job_name, - "python_params": python_params - } - - res = SubmitRunOp( - name="submitrun", - run_name=run_name, - job_name=job_name, - python_params=python_params - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_job_name_and_spark_submit_params(self): - def my_pipeline(): - run_name = "test-run" - job_name = "test-job" - spark_submit_params = ["--class", "org.apache.spark.examples.SparkPi"] - - expected_spec = { - "run_name": run_name, - "job_name": job_name, - "spark_submit_params": spark_submit_params - } - - res = SubmitRunOp( - name="submitrun", - run_name=run_name, - job_name=job_name, - spark_submit_params=spark_submit_params - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_job_name_and_notebook_params(self): - def my_pipeline(): - run_name = "test-run" - job_name = "test-job" - notebook_params = { - "dry-run": "true", - "oldest-time-to-consider": "1457570074236" - } - - expected_spec = { - "run_name": run_name, - "job_name": job_name, - "notebook_params": notebook_params - } - - res = SubmitRunOp( - name="submitrun", - run_name=run_name, - job_name=job_name, - notebook_params=notebook_params - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_new_cluster_and_spark_jar_task(self): - def my_pipeline(): - run_name = "test-run" - new_cluster = { - "spark_version": "5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - } - libraries = [ - {"jar": "dbfs:/my-jar.jar"}, - {"maven": {"coordinates": "org.jsoup:jsoup:1.7.2"}} - ] - spark_jar_task = { - "main_class_name": "com.databricks.ComputeModels" - } - - expected_spec = { - "run_name": run_name, - "new_cluster": new_cluster, - "libraries": libraries, - "spark_jar_task": spark_jar_task - } - - res = SubmitRunOp( - name="submitrun", - run_name=run_name, - new_cluster=new_cluster, - libraries=libraries, - spark_jar_task=spark_jar_task - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_new_cluster_and_spark_python_task(self): - def my_pipeline(): - run_name = "test-run" - new_cluster = { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - } - spark_python_task = { - "python_file": "dbfs:/docs/pi.py", - "parameters": [ - "10" - ] - } - - expected_spec = { - "run_name": run_name, - "new_cluster": new_cluster, - "spark_python_task": spark_python_task - } - - res = SubmitRunOp( - name="submitrun", - run_name=run_name, - new_cluster=new_cluster, - spark_python_task=spark_python_task - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_new_cluster_and_spark_submit_task(self): - def my_pipeline(): - run_name = "test-run" - new_cluster = { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - } - spark_submit_task = { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - - expected_spec = { - "run_name": run_name, - "new_cluster": new_cluster, - "spark_submit_task":spark_submit_task - } - - res = SubmitRunOp( - name="submitrun", - run_name=run_name, - new_cluster=new_cluster, - spark_submit_task=spark_submit_task - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_existing_cluster_and_notebook_task(self): - def my_pipeline(): - run_name = "test-run" - existing_cluster_id = "1201-my-cluster" - notebook_task = { - "notebook_path": "/Users/donald@duck.com/my-notebook" - } - timeout_seconds = 120 - - expected_spec = { - "run_name": run_name, - "existing_cluster_id": existing_cluster_id, - "notebook_task": notebook_task, - "timeout_seconds": timeout_seconds - } - - res = SubmitRunOp( - name="submitrun", - run_name=run_name, - existing_cluster_id=existing_cluster_id, - notebook_task=notebook_task, - timeout_seconds=timeout_seconds - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_spec(self): - def my_pipeline(): - spec = { - "run_name": "test-run", - "new_cluster": { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "spark_submit_task": { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - } - - res = SubmitRunOp( - name="submitrun", - spec=spec - ) - - self.assert_res(res, spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_spec_and_extra_args(self): - def my_pipeline(): - spark_submit_task = { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - spec = { - "run_name": "test-run", - "new_cluster": { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "spark_submit_task": { - "parameters": [ - "--class" - ] - } - } - - expected_spec = { - "run_name": "test-run", - "new_cluster": { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "spark_submit_task": { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - } - - res = SubmitRunOp( - name="submitrun", - spec=spec, - spark_submit_task=spark_submit_task - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_json_spec(self): - def my_pipeline(): - run_name = "test-run" - json_spec = """ - { - "new_cluster": { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "spark_submit_task": { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - } - """ - - expected_spec = { - "run_name": run_name, - "new_cluster": { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "spark_submit_task": { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - } - - res = SubmitRunOp.from_json_spec( - name="submitrun", - run_name=run_name, - json_spec=json_spec - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_submit_run_with_json_file_spec(self): - def my_pipeline(): - run_name = "test-run" - current_path = Path(__file__).parent - json_spec_file_name = current_path.joinpath("run_spec.json") - - expected_spec = { - "run_name": run_name, - "new_cluster": { - "spark_version":"5.3.x-scala2.11", - "node_type_id": "Standard_D3_v2", - "num_workers": 2 - }, - "spark_submit_task": { - "parameters": [ - "--class", - "org.apache.spark.examples.SparkPi", - "dbfs:/docs/sparkpi.jar", - "10" - ] - } - } - - res = SubmitRunOp.from_file_name( - name="submitrun", - run_name=run_name, - file_name=json_spec_file_name - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def assert_res(self, res, expected_spec): - self.assertEqual(res.name, "submitrun") - self.assertEqual(res.resource.action, "create") - self.assertEqual( - res.resource.success_condition, - "status.metadata.state.life_cycle_state in (TERMINATED, SKIPPED, INTERNAL_ERROR)" - ) - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - expected_attribute_outputs = { - "name": "{.metadata.name}", - "job_id": "{.status.metadata.job_id}", - "number_in_job": "{.status.metadata.number_in_job}", - "run_id": "{.status.metadata.run_id}", - "run_name": "{.status.metadata.run_name}", - "life_cycle_state": "{.status.metadata.state.life_cycle_state}", - "result_state": "{.status.metadata.state.result_state}", - "notebook_output_result": "{.status.notebook_output.result}", - "notebook_output_truncated": "{.status.notebook_output.truncated}", - "error": "{.status.error}", - "manifest": "{}" - } - self.assertEqual(res.attribute_outputs, expected_attribute_outputs) - expected_outputs = { - "name": PipelineParam(name="name", op_name=res.name), - "job_id": PipelineParam(name="job_id", op_name=res.name), - "number_in_job": PipelineParam(name="number_in_job", op_name=res.name), - "run_id": PipelineParam(name="run_id", op_name=res.name), - "run_name": PipelineParam(name="run_name", op_name=res.name), - "life_cycle_state": PipelineParam(name="life_cycle_state", op_name=res.name), - "result_state": PipelineParam(name="result_state", op_name=res.name), - "notebook_output_result": PipelineParam(name="notebook_output_result", - op_name=res.name), - "notebook_output_truncated": PipelineParam(name="notebook_output_truncated", - op_name=res.name), - "error": PipelineParam(name="error", op_name=res.name), - "manifest": PipelineParam(name="manifest", op_name=res.name) - } - self.assertEqual(res.outputs, expected_outputs) - self.assertEqual( - res.output, - PipelineParam(name="name", op_name=res.name) - ) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "Run") - self.assertEqual(res.k8s_resource["metadata"]["name"], "test-run") - self.assertEqual(res.k8s_resource["spec"], expected_spec) - -class TestDeleteRunOp(unittest.TestCase): - - def test_databricks_delete_run_without_k8s_or_run_name(self): - def my_pipeline(): - DeleteRunOp( - name="deleterun" - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_delete_run(self): - def my_pipeline(): - - res = DeleteRunOp( - name="deleterun", - run_name="test-run" - ) - - self.assertEqual(res.name, "deleterun") - self.assertEqual(res.resource.action, "delete") - self.assertEqual(res.resource.success_condition, None) - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - self.assertEqual(res.attribute_outputs, {}) - self.assertEqual(res.outputs, {}) - self.assertEqual(res.output, None) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "Run") - self.assertEqual(res.k8s_resource["metadata"]["name"], "test-run") - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - -if __name__ == '__main__': - unittest.main() diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_secretscope_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_secretscope_op.py deleted file mode 100644 index 7d4fdefa712..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_secretscope_op.py +++ /dev/null @@ -1,388 +0,0 @@ -import unittest -from pathlib import Path -import kfp -from kfp.dsl import PipelineParam -from databricks import CreateSecretScopeOp, DeleteSecretScopeOp - -class TestCreateSecretScopeOp(unittest.TestCase): - - def test_databricks_create_secretscope_without_k8s_or_scope_name(self): - def my_pipeline(): - CreateSecretScopeOp( - name="createsecretscope", - initial_manage_principal="users", - secrets=[ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ], - acls=[ - { - "principal": "user@foo.com", - "permission": "READ" - } - ] - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_create_secretscope(self): - def my_pipeline(): - scope_name = "test-secretscope" - initial_manage_principal = "users" - secrets = [ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ] - acls = [ - { - "principal": "user@foo.com", - "permission": "READ" - } - ] - - expected_spec = { - "initial_manage_permission": initial_manage_principal, - "secrets": secrets, - "acls": acls - } - - res = CreateSecretScopeOp( - name="createsecretscope", - scope_name=scope_name, - initial_manage_principal=initial_manage_principal, - secrets=secrets, - acls=acls - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_secretscope_with_spec(self): - def my_pipeline(): - scope_name = "test-secretscope" - spec = { - "initial_manage_permission": "users", - "secrets": [ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ], - "acls": [ - { - "principal": "user@foo.com", - "permission": "READ" - } - ] - } - - res = CreateSecretScopeOp( - name="createsecretscope", - scope_name=scope_name, - spec=spec - ) - - self.assert_res(res, spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_secretscope_with_spec_and_extra_args(self): - def my_pipeline(): - scope_name = "test-secretscope" - acls = [ - { - "principal": "user@foo.com", - "permission": "READ" - } - ] - spec = { - "initial_manage_permission": "users", - "secrets": [ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ] - } - - expected_spec = { - "initial_manage_permission": "users", - "secrets": [ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ], - "acls": acls - } - - res = CreateSecretScopeOp( - name="createsecretscope", - scope_name=scope_name, - spec=spec, - acls=acls - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_secretscope_with_json_spec(self): - def my_pipeline(): - scope_name = "test-secretscope" - json_spec = """ - { - "initial_manage_permission": "users", - "secrets": [ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ], - "acls": [ - { - "principal": "user@foo.com", - "permission": "READ" - } - ] - } - """ - - expected_spec = { - "initial_manage_permission": "users", - "secrets": [ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ], - "acls": [ - { - "principal": "user@foo.com", - "permission": "READ" - } - ] - } - - res = CreateSecretScopeOp.from_json_spec( - name="createsecretscope", - scope_name=scope_name, - json_spec=json_spec - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_create_secretscope_with_json_file_spec(self): - def my_pipeline(): - scope_name = "test-secretscope" - current_path = Path(__file__).parent - json_spec_file_name = current_path.joinpath("secretscope_spec.json") - - expected_spec = { - "initial_manage_permission": "users", - "secrets": [ - { - "key": "string-secret", - "string_value": "helloworld" - }, - { - "key": "byte-secret", - "byte_value": "aGVsbG93b3JsZA==" - }, - { - "key": "ref-secret", - "value_from": { - "secret_key_ref": { - "name": "mysecret", - "key": "username" - } - } - } - ], - "acls": [ - { - "principal": "user@foo.com", - "permission": "READ" - } - ] - } - - res = CreateSecretScopeOp.from_file_name( - name="createsecretscope", - scope_name=scope_name, - file_name=json_spec_file_name - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def assert_res(self, res, expected_spec): - self.assertEqual(res.name, "createsecretscope") - self.assertEqual(res.resource.action, "create") - self.assertEqual( - res.resource.success_condition, - "status.secretscope.name" - ) - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - expected_attribute_outputs = { - "name": "{.metadata.name}", - "secretscope_name": "{.status.secretscope.name}", - "secretscope_backend_type": "{.status.secretscope.backend_type}", - "secret_in_cluster_available": "{.status.secretinclusteravailable}", - "manifest": "{}" - } - self.assertEqual(res.attribute_outputs, expected_attribute_outputs) - expected_outputs = { - "name": PipelineParam(name="name", op_name=res.name), - "secretscope_name": - PipelineParam(name="secretscope_name", op_name=res.name), - "secretscope_backend_type": - PipelineParam(name="secretscope_backend_type", op_name=res.name), - "secret_in_cluster_available": - PipelineParam(name="secret_in_cluster_available", op_name=res.name), - "manifest": PipelineParam(name="manifest", op_name=res.name) - } - self.assertEqual(res.outputs, expected_outputs) - self.assertEqual( - res.output, - PipelineParam(name="name", op_name=res.name) - ) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "SecretScope") - self.assertEqual(res.k8s_resource["metadata"]["name"], "test-secretscope") - self.assertEqual(res.k8s_resource["spec"], expected_spec) - -class TestDeleteSecretScopeOp(unittest.TestCase): - - def test_databricks_delete_secretscope_without_k8s_or_scope_name(self): - def my_pipeline(): - DeleteSecretScopeOp( - name="deletesecretscope" - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_delete_secretscope(self): - def my_pipeline(): - - res = DeleteSecretScopeOp( - name="deletesecretscope", - scope_name="test-secretscope" - ) - - self.assertEqual(res.name, "deletesecretscope") - self.assertEqual(res.resource.action, "delete") - self.assertEqual(res.resource.success_condition, None) - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - self.assertEqual(res.attribute_outputs, {}) - self.assertEqual(res.outputs, {}) - self.assertEqual(res.output, None) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "SecretScope") - self.assertEqual(res.k8s_resource["metadata"]["name"], "test-secretscope") - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_workspaceitem_op.py b/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_workspaceitem_op.py deleted file mode 100644 index 1141b2c4fc3..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/test_workspaceitem_op.py +++ /dev/null @@ -1,215 +0,0 @@ -import unittest -from pathlib import Path -import kfp -from kfp.dsl import PipelineParam -from databricks import ImportWorkspaceItemOp, DeleteWorkspaceItemOp - -class TestImportWorkspaceItemOp(unittest.TestCase): - - def test_databricks_import_workspaceitem_without_k8s_or_item_name(self): - def my_pipeline(): - ImportWorkspaceItemOp( - name="importworkspaceitem", - content="cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - path="/Users/user@foo.com/ScalaExampleNotebook", - language="SCALA", - file_format="SOURCE" - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_import_workspaceitem(self): - def my_pipeline(): - item_name = "test-item" - content = "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK" - path = "/Users/user@foo.com/ScalaExampleNotebook" - language = "SCALA" - file_format = "SOURCE" - - expected_spec = { - "content": content, - "path": path, - "language": language, - "format": file_format - } - - res = ImportWorkspaceItemOp( - name="importworkspaceitem", - item_name=item_name, - content=content, - path=path, - language=language, - file_format=file_format - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_import_workspaceitem_with_spec(self): - def my_pipeline(): - item_name = "test-item" - spec = { - "content": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/Users/user@foo.com/ScalaExampleNotebook", - "language": "SCALA", - "format": "SOURCE" - } - - res = ImportWorkspaceItemOp( - name="importworkspaceitem", - item_name=item_name, - spec=spec - ) - - self.assert_res(res, spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_import_workspaceitem_with_spec_and_extra_args(self): - def my_pipeline(): - item_name = "test-item" - content = "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK" - spec = { - "path": "/Users/user@foo.com/ScalaExampleNotebook", - "language": "SCALA", - "format": "SOURCE" - } - - expected_spec = { - "content": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/Users/user@foo.com/ScalaExampleNotebook", - "language": "SCALA", - "format": "SOURCE" - } - - res = ImportWorkspaceItemOp( - name="importworkspaceitem", - item_name=item_name, - spec=spec, - content=content - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_import_workspaceitem_with_json_spec(self): - def my_pipeline(): - item_name = "test-item" - json_spec = """ - { - "content": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/Users/user@foo.com/ScalaExampleNotebook", - "language": "SCALA", - "format": "SOURCE" - } - """ - - expected_spec = { - "content": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/Users/user@foo.com/ScalaExampleNotebook", - "language": "SCALA", - "format": "SOURCE" - } - - res = ImportWorkspaceItemOp.from_json_spec( - name="importworkspaceitem", - item_name=item_name, - json_spec=json_spec - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def test_databricks_import_workspaceitem_with_json_file_spec(self): - def my_pipeline(): - item_name = "test-item" - current_path = Path(__file__).parent - json_spec_file_name = current_path.joinpath("workspaceitem_spec.json") - - expected_spec = { - "content": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/Users/user@foo.com/ScalaExampleNotebook", - "language": "SCALA", - "format": "SOURCE" - } - - res = ImportWorkspaceItemOp.from_file_name( - name="importworkspaceitem", - item_name=item_name, - file_name=json_spec_file_name - ) - - self.assert_res(res, expected_spec) - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - - def assert_res(self, res, expected_spec): - self.assertEqual(res.name, "importworkspaceitem") - self.assertEqual(res.resource.action, "create") - self.assertEqual(res.resource.success_condition, "status.object_hash") - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - expected_attribute_outputs = { - "name": "{.metadata.name}", - "object_hash": "{.status.object_hash}", - "object_language": "{.status.object_info.language}", - "object_type": "{.status.object_info.object_type}", - "object_path": "{.status.object_info.path}", - "manifest": "{}" - } - self.assertEqual(res.attribute_outputs, expected_attribute_outputs) - expected_outputs = { - "name": PipelineParam(name="name", op_name=res.name), - "object_hash": PipelineParam(name="object_hash", op_name=res.name), - "object_language": PipelineParam(name="object_language", op_name=res.name), - "object_type": PipelineParam(name="object_type", op_name=res.name), - "object_path": PipelineParam(name="object_path", op_name=res.name), - "manifest": PipelineParam(name="manifest", op_name=res.name) - } - self.assertEqual(res.outputs, expected_outputs) - self.assertEqual( - res.output, - PipelineParam(name="name", op_name=res.name) - ) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "WorkspaceItem") - self.assertEqual(res.k8s_resource["metadata"]["name"], "test-item") - self.assertEqual(res.k8s_resource["spec"], expected_spec) - -class TestDeleteWorkspaceItemOp(unittest.TestCase): - - def test_databricks_delete_workspaceitem_without_k8s_or_item_name(self): - def my_pipeline(): - DeleteWorkspaceItemOp( - name="deleteworkspaceitem" - ) - - self.assertRaises(ValueError, lambda: kfp.compiler.Compiler()._create_workflow(my_pipeline)) - - def test_databricks_delete_workspaceitem(self): - def my_pipeline(): - - res = DeleteWorkspaceItemOp( - name="deleteworkspaceitem", - item_name="test-item" - ) - - self.assertEqual(res.name, "deleteworkspaceitem") - self.assertEqual(res.resource.action, "delete") - self.assertEqual(res.resource.success_condition, None) - self.assertEqual(res.resource.failure_condition, None) - self.assertEqual(res.resource.manifest, None) - self.assertEqual(res.attribute_outputs, {}) - self.assertEqual(res.outputs, {}) - self.assertEqual(res.output, None) - self.assertEqual(res.dependent_names, []) - self.assertEqual(res.k8s_resource["kind"], "WorkspaceItem") - self.assertEqual(res.k8s_resource["metadata"]["name"], "test-item") - - kfp.compiler.Compiler()._create_workflow(my_pipeline) - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/samples/contrib/azure-samples/kfp-azure-databricks/tests/workspaceitem_spec.json b/samples/contrib/azure-samples/kfp-azure-databricks/tests/workspaceitem_spec.json deleted file mode 100644 index 97c9ed49564..00000000000 --- a/samples/contrib/azure-samples/kfp-azure-databricks/tests/workspaceitem_spec.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "content": "cHJpbnQoImhlbGxvLCB3b3JsZCIpCgoK", - "path": "/Users/user@foo.com/ScalaExampleNotebook", - "language": "SCALA", - "format": "SOURCE" -} diff --git a/samples/contrib/e2e-outlier-drift-explainer/README.md b/samples/contrib/e2e-outlier-drift-explainer/README.md deleted file mode 100644 index 8771eb14af2..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# Build, Train and Deploy with Drift, Outlier Detectors and Explainers enabled on KFServing and Seldon - -These examples show how to build a model and then deploy with [KFServing](https://github.com/kubeflow/kfserving) or [Seldon Core](https://github.com/SeldonIO/seldon-core) with model explainers, drift detectors and outlier detectors. The pipelines are built using [Kale](https://github.com/kubeflow-kale/kale). - -## Examples - - * [Seldon](./seldon/README.md) - * [KFServing](./kfserving/README.md) - - -## GCP Setup - -For a GCP cluster we need a RWX Persistent Volume for the shared data Kale needs. To set this up on GCP update and run the script `gcp-create-rwx-pv.sh` after setting the values for your project, Filestore name and Zone: - -``` -PROJECT=seldon-demos -FS=pipeline-data -ZONE=europe-west1-b - -gcloud beta filestore instances create ${FS} --project=${PROJECT} --zone=${ZONE} --tier=STANDARD --file-share=name="volumes",capacity=1TB --network=name="default",reserved-ip-range="10.0.0.0/29" - -FSADDR=$(gcloud beta filestore instances describe ${FS} --project=${PROJECT} --zone=${ZONE} --format="value(networks.ipAddresses[0])") - -helm install nfs-cp stable/nfs-client-provisioner --set nfs.server=${FSADDR} --set nfs.path=/volumes --namespace=kubeflow - -kubectl rollout status deploy/nfs-cp-nfs-client-provisioner -n kubeflow -``` - -If you build the pipeline Python DSL using Kale from the notebook you will at present need to modify the created pyhton and change the Kale `VolumeOp` by adding a `storage_class` for the NFS PV, for example: - -``` -marshal_vop = dsl.VolumeOp( - name="kale-marshal-volume", - resource_name="kale-marshal-pvc", - storage_class="nfs-client", - modes=dsl.VOLUME_MODE_RWM, - size="1Gi") -``` - -## Tested on - -If you have tested these pipelines successfully please add a PR to extend the table below. - -| K8S | Kubeflow | Knative Eventing | Seldon | KFServing | Kale | Notes -| ----| ------- | ---------------- | ------ | --------- | ---- | ----- | -| GKE 1.14.10 | 1.0 | 0.11 | 1.2.1 | 0.3.0 | 0.5.0 | GCP Setup above, Kale storage_class fix | - - - - - - - - diff --git a/samples/contrib/e2e-outlier-drift-explainer/gcp-create-rwx-pv.sh b/samples/contrib/e2e-outlier-drift-explainer/gcp-create-rwx-pv.sh deleted file mode 100644 index 80561c4033d..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/gcp-create-rwx-pv.sh +++ /dev/null @@ -1,11 +0,0 @@ -PROJECT=seldon-demos -FS=pipeline-data -ZONE=europe-west1-b - -gcloud beta filestore instances create ${FS} --project=${PROJECT} --zone=${ZONE} --tier=STANDARD --file-share=name="volumes",capacity=1TB --network=name="default",reserved-ip-range="10.0.0.0/29" - -FSADDR=$(gcloud beta filestore instances describe ${FS} --project=${PROJECT} --zone=${ZONE} --format="value(networks.ipAddresses[0])") - -helm install nfs-cp stable/nfs-client-provisioner --set nfs.server=${FSADDR} --set nfs.path=/volumes --namespace=kubeflow - -kubectl rollout status deploy/nfs-cp-nfs-client-provisioner -n kubeflow diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/README.md b/samples/contrib/e2e-outlier-drift-explainer/kfserving/README.md deleted file mode 100644 index 325a865979f..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/kfserving/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# KFServing End to End Deployment Examples - - * [CIFAR10 Image Classifier with Explainer, Outlier Detector and Drift Detector](#cifar10-image-classification-model) - * [Income Classifier with Explainer and Outlier Detector](#income-classification-model) - -## Running Pipelines - - * [See "tested on" section](../README.md#tested-on) - -## Running in Kubeflow Jupyter Lab - - 1. Create a new JupyterLab server using the image `seldonio/jupyter-lab-alibi-kale:0.11` - - -## CIFAR10 Image Classification Model - - * Tensorflow CIFAR10 Image Model with deployed explainer, outlier detector and drift detector - * [Kale annotated Jupyter Notebook](./kfserving_e2e_cifar10.ipynb) - * [GCP Pipeline](./kfserving_e2e_cifar10.kale.nfs.py) - * Assumes `storage_class="nfs-client"` - * [Default Pipeline](./kfserving_e2e_cifar10.kale.default.py) - * Assumes a ReadWriteMany PVC will succeed on your cluster. - -![pipeline](cifar10-pipeline.png) - - -### Model Deployment and Predictions - -Deploy a trained Tensorflow model and call the KFServing API to get predictions. - -![cifar10 prediction](cifar10-prediction.png) - -### Build and test a Model Explainer - -Train an Anchor Images Explainer using [Alibi](https://github.com/SeldonIO/alibi) and test. - -![cifar10 explainer](cifar10-explainer.png) - -### Outlier Detector Deployment and Test - -Deploy a trained outlier detector using [Alibi-Detect](https://github.com/SeldonIO/alibi-detect) and send corrupted images to the model to check they are marked as outliers. - -![cifar10 outlier](cifar10-outlier.png) - -### Drift Detector Deployment and Test - -Create a drift detector using [Alibi-Detect](https://github.com/SeldonIO/alibi-detect) and deploy and test using motion blurred images. - -![cifar10 drift](cifar10-drift.png) - - -## Income Classification Model - -This example illustrates a simple tabular data use case built on [demographic features from a 1996 US census](https://archive.ics.uci.edu/ml/datasets/census+income) to create a binary classifier and attach explanations and outlier detection.. - - * KFServing Income Classification Model with deployed explainer and outlier detector - * [Kale annotated Jupyter Notebook](./kfserving_e2e_adult.ipynb) - * [Pipeline](./kfserving_e2e_adult.kale.nfs.py) - * Assumes `storage_class="nfs-client"` - * [Pipeline](./kfserving_e2e_adult.kale.default.py) - * Assumes a ReadWriteMany PVC will succeed on your cluster - -![income pipeline](income-pipeline.png) - -### Model Predictions Explanations - -``` -Prediction: <=50K -Anchor: Marital Status = Separated AND Sex = Female -Precision: 0.98 -Coverage: 0.11 -``` - -## Outlier Detection - -``` -{'data': {'instance_score': None, 'feature_score': None, 'is_outlier': [1]}, 'meta': {'name': 'IForest', 'detector_type': 'offline', 'data_type': 'tabular'}} -Outlier True -``` diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-drift.png b/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-drift.png deleted file mode 100644 index 795ebacf3c6..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-drift.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-explainer.png b/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-explainer.png deleted file mode 100644 index f41749badd0..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-explainer.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-outlier.png b/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-outlier.png deleted file mode 100644 index 2ff490f87c7..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-outlier.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-pipeline.png b/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-pipeline.png deleted file mode 100644 index 0154df0e113..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-pipeline.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-prediction.png b/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-prediction.png deleted file mode 100644 index 41dc87f11b5..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/kfserving/cifar10-prediction.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/income-pipeline.png b/samples/contrib/e2e-outlier-drift-explainer/kfserving/income-pipeline.png deleted file mode 100644 index 8ad541608b5..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/kfserving/income-pipeline.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_adult.ipynb b/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_adult.ipynb deleted file mode 100644 index af343e0a189..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_adult.ipynb +++ /dev/null @@ -1,1185 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "# End to End Machine Learning Pipeline for Income Prediction\n", - "\n", - "We use [demographic features from the 1996 US census](https://archive.ics.uci.edu/ml/datasets/census+income) to build an end to end machine learning pipeline. The pipeline is also annotated so it can be run as a [Kubeflow Pipeline](https://www.kubeflow.org/docs/pipelines/overview/pipelines-overview/) using the [Kale](https://github.com/kubeflow-kale/kale) pipeline generator.\n", - "\n", - "The notebook/pipeline stages are:\n", - "\n", - " 1. Setup \n", - " * Imports\n", - " * pipeline-parameters\n", - " * minio client test\n", - " 1. Train a simple sklearn model and push to minio\n", - " 1. Prepare an Anchors explainer for model and push to minio\n", - " 1. Test Explainer\n", - " 1. Train an isolation forest outlier detector for model and push to minio\n", - " 1. Deploy a KfSering model and test\n", - " 1. Deploy an outlier detector and test\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "imports" - ] - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "from sklearn.ensemble import RandomForestClassifier\n", - "from sklearn.compose import ColumnTransformer\n", - "from sklearn.pipeline import Pipeline\n", - "from sklearn.impute import SimpleImputer\n", - "from sklearn.metrics import accuracy_score\n", - "from sklearn.preprocessing import StandardScaler, OneHotEncoder\n", - "from alibi.explainers import AnchorTabular\n", - "from alibi.datasets import fetch_adult\n", - "from minio import Minio\n", - "from minio.error import ResponseError\n", - "from joblib import dump, load\n", - "import dill\n", - "import time\n", - "import json\n", - "from subprocess import run, Popen, PIPE\n", - "from alibi_detect.utils.data import create_outlier_batch" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "pipeline-parameters" - ] - }, - "outputs": [], - "source": [ - "MINIO_HOST=\"minio-service.kubeflow:9000\"\n", - "MINIO_ACCESS_KEY=\"minio\"\n", - "MINIO_SECRET_KEY=\"minio123\"\n", - "MINIO_MODEL_BUCKET=\"seldon\"\n", - "INCOME_MODEL_PATH=\"sklearn/income/model\"\n", - "EXPLAINER_MODEL_PATH=\"sklearn/income/explainer\"\n", - "OUTLIER_MODEL_PATH=\"sklearn/income/outlier\"\n", - "DEPLOY_NAMESPACE=\"admin\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "functions" - ] - }, - "outputs": [], - "source": [ - "def get_minio():\n", - " return Minio(MINIO_HOST,\n", - " access_key=MINIO_ACCESS_KEY,\n", - " secret_key=MINIO_SECRET_KEY,\n", - " secure=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:setup" - ] - }, - "outputs": [], - "source": [ - "minioClient = get_minio()\n", - "buckets = minioClient.list_buckets()\n", - "for bucket in buckets:\n", - " print(bucket.name, bucket.creation_date)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "if not minioClient.bucket_exists(MINIO_MODEL_BUCKET):\n", - " minioClient.make_bucket(MINIO_MODEL_BUCKET)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train Model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:build_model", - "prev:setup" - ] - }, - "outputs": [], - "source": [ - "adult = fetch_adult()\n", - "adult.keys()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:" - ] - }, - "outputs": [], - "source": [ - "data = adult.data\n", - "target = adult.target\n", - "feature_names = adult.feature_names\n", - "category_map = adult.category_map" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Note that for your own datasets you can use our utility function [gen_category_map](../api/alibi.utils.data.rst) to create the category map:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from alibi.utils.data import gen_category_map" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Define shuffled training and test set" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np.random.seed(0)\n", - "data_perm = np.random.permutation(np.c_[data, target])\n", - "data = data_perm[:,:-1]\n", - "target = data_perm[:,-1]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "idx = 30000\n", - "X_train,Y_train = data[:idx,:], target[:idx]\n", - "X_test, Y_test = data[idx+1:,:], target[idx+1:]" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### Create feature transformation pipeline\n", - "Create feature pre-processor. Needs to have 'fit' and 'transform' methods. Different types of pre-processing can be applied to all or part of the features. In the example below we will standardize ordinal features and apply one-hot-encoding to categorical features.\n", - "\n", - "Ordinal features:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ordinal_features = [x for x in range(len(feature_names)) if x not in list(category_map.keys())]\n", - "ordinal_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')),\n", - " ('scaler', StandardScaler())])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Categorical features:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "categorical_features = list(category_map.keys())\n", - "categorical_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')),\n", - " ('onehot', OneHotEncoder(handle_unknown='ignore'))])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Combine and fit:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "preprocessor = ColumnTransformer(transformers=[('num', ordinal_transformer, ordinal_features),\n", - " ('cat', categorical_transformer, categorical_features)])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### Train Random Forest model\n", - "\n", - "Fit on pre-processed (imputing, OHE, standardizing) data." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np.random.seed(0)\n", - "clf = RandomForestClassifier(n_estimators=50)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "model=Pipeline(steps=[(\"preprocess\",preprocessor),(\"model\",clf)])\n", - "model.fit(X_train,Y_train)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Define predict function" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:" - ] - }, - "outputs": [], - "source": [ - "def predict_fn(x):\n", - " return model.predict(x)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:", - "prev:build_model" - ] - }, - "outputs": [], - "source": [ - "#predict_fn = lambda x: clf.predict(preprocessor.transform(x))\n", - "print('Train accuracy: ', accuracy_score(Y_train, predict_fn(X_train)))\n", - "print('Test accuracy: ', accuracy_score(Y_test, predict_fn(X_test)))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "dump(model, 'model.joblib') " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{INCOME_MODEL_PATH}/model.joblib\", 'model.joblib'))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train Explainer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:train_explainer", - "prev:build_model" - ] - }, - "outputs": [], - "source": [ - "model.predict(X_train)\n", - "explainer = AnchorTabular(predict_fn, feature_names, categorical_names=category_map)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Discretize the ordinal features into quartiles" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "explainer.fit(X_train, disc_perc=[25, 50, 75])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "with open(\"explainer.dill\", \"wb\") as dill_file:\n", - " dill.dump(explainer, dill_file) \n", - " dill_file.close()\n", - "print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{EXPLAINER_MODEL_PATH}/explainer.dill\", 'explainer.dill'))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Get Explanation\n", - "\n", - "Below, we get an anchor for the prediction of the first observation in the test set. An anchor is a sufficient condition - that is, when the anchor holds, the prediction should be the same as the prediction for this instance." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:explain", - "prev:train_explainer" - ] - }, - "outputs": [], - "source": [ - "model.predict(X_train)\n", - "idx = 0\n", - "class_names = adult.target_names\n", - "print('Prediction: ', class_names[explainer.predict_fn(X_test[idx].reshape(1, -1))[0]])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "We set the precision threshold to 0.95. This means that predictions on observations where the anchor holds will be the same as the prediction on the explained instance at least 95% of the time." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "explanation = explainer.explain(X_test[idx], threshold=0.95)\n", - "print('Anchor: %s' % (' AND '.join(explanation['names'])))\n", - "print('Precision: %.2f' % explanation['precision'])\n", - "print('Coverage: %.2f' % explanation['coverage'])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train Outlier Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:build_outlier", - "prev:build_model" - ] - }, - "outputs": [], - "source": [ - "from alibi_detect.od import IForest\n", - "\n", - "od = IForest(\n", - " threshold=0.,\n", - " n_estimators=200,\n", - ")\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "od.fit(X_train)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np.random.seed(0)\n", - "perc_outlier = 5\n", - "threshold_batch = create_outlier_batch(X_train, Y_train, n_samples=1000, perc_outlier=perc_outlier)\n", - "X_threshold, y_threshold = threshold_batch.data.astype('float'), threshold_batch.target\n", - "#X_threshold = (X_threshold - mean) / stdev\n", - "print('{}% outliers'.format(100 * y_threshold.mean()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "od.infer_threshold(X_threshold, threshold_perc=100-perc_outlier)\n", - "print('New threshold: {}'.format(od.threshold))\n", - "threshold = od.threshold" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "X_outlier = [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "od.predict(\n", - " X_outlier\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from os import listdir\n", - "from os.path import isfile, join\n", - "\n", - "filepath=\"ifoutlier\"\n", - "save_detector(od, filepath) \n", - "onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{OUTLIER_MODEL_PATH}/{filename}\", join(filepath, filename)))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy KFServing Model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_kfserving", - "prev:train_explainer" - ] - }, - "outputs": [], - "source": [ - "secret=f\"\"\"apiVersion: v1\n", - "kind: Secret\n", - "metadata:\n", - " name: income-kf-secret\n", - " namespace: {DEPLOY_NAMESPACE}\n", - " annotations:\n", - " serving.kubeflow.org/s3-endpoint: {MINIO_HOST} # replace with your s3 endpoint\n", - " serving.kubeflow.org/s3-usehttps: \"0\" # by default 1, for testing with minio you need to set to 0\n", - "type: Opaque\n", - "stringData:\n", - " awsAccessKeyID: {MINIO_ACCESS_KEY}\n", - " awsSecretAccessKey: {MINIO_SECRET_KEY}\n", - "\"\"\"\n", - "with open(\"secret.yaml\",\"w\") as f:\n", - " f.write(secret)\n", - "run(\"kubectl apply -f secret.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "secret = f\"\"\"apiVersion: v1\n", - "kind: Secret\n", - "metadata:\n", - " name: seldon-init-container-secret\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "type: Opaque\n", - "stringData:\n", - " AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY}\n", - " AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY}\n", - " AWS_ENDPOINT_URL: http://{MINIO_HOST}\n", - " USE_SSL: \"false\"\n", - "\"\"\"\n", - "with open(\"secret.yaml\",\"w\") as f:\n", - " f.write(secret)\n", - "run(\"cat secret.yaml | kubectl apply -f -\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "sa = f\"\"\"apiVersion: v1\n", - "kind: ServiceAccount\n", - "metadata:\n", - " name: minio-kf-sa\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "secrets:\n", - " - name: income-kf-secret\n", - "\"\"\"\n", - "with open(\"sa.yaml\",\"w\") as f:\n", - " f.write(sa)\n", - "run(\"kubectl apply -f sa.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from kubernetes import client\n", - "from kfserving import KFServingClient\n", - "from kfserving import constants\n", - "from kfserving import utils\n", - "from kfserving import V1alpha2EndpointSpec\n", - "from kfserving import V1alpha2PredictorSpec\n", - "from kfserving import V1alpha2ExplainerSpec\n", - "from kfserving import V1alpha2AlibiExplainerSpec\n", - "from kfserving import V1alpha2SKLearnSpec\n", - "from kfserving import V1alpha2InferenceServiceSpec\n", - "from kfserving import V1alpha2InferenceService\n", - "from kfserving import V1alpha2Logger\n", - "from kubernetes.client import V1ResourceRequirements\n", - "\n", - "api_version = constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION\n", - "default_endpoint_spec = V1alpha2EndpointSpec(\n", - " predictor=V1alpha2PredictorSpec(\n", - " service_account_name='minio-kf-sa',\n", - " sklearn=V1alpha2SKLearnSpec(\n", - " storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ INCOME_MODEL_PATH,\n", - " resources=V1ResourceRequirements(\n", - " requests={'cpu':'100m','memory':'1Gi'},\n", - " limits={'cpu':'100m', 'memory':'1Gi'})),\n", - " logger=V1alpha2Logger(\n", - " mode='all'\n", - " )),\n", - " explainer=V1alpha2ExplainerSpec(\n", - " service_account_name='minio-kf-sa',\n", - " alibi=V1alpha2AlibiExplainerSpec(\n", - " type='AnchorTabular',\n", - " storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ EXPLAINER_MODEL_PATH,\n", - " resources=V1ResourceRequirements(\n", - " requests={'cpu':'100m','memory':'1Gi'},\n", - " limits={'cpu':'100m', 'memory':'1Gi'}))))\n", - " \n", - "isvc = V1alpha2InferenceService(api_version=api_version,\n", - " kind=constants.KFSERVING_KIND,\n", - " metadata=client.V1ObjectMeta(\n", - " name='kf-income', namespace=DEPLOY_NAMESPACE),\n", - " spec=V1alpha2InferenceServiceSpec(default=default_endpoint_spec))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "KFServing = KFServingClient()\n", - "KFServing.create(isvc)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "KFServing.get('kf-cifar10', namespace=DEPLOY_NAMESPACE, watch=True, timeout_seconds=120)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Test Model and explainer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:test_model", - "prev:deploy_kfserving" - ] - }, - "outputs": [], - "source": [ - "payload='{\"instances\": [[53,4,0,2,8,4,4,0,0,0,60,9]]}'\n", - "cmd=f\"\"\"curl -v -d '{payload}' \\\n", - " -H \"Host: kf-income.admin.example.com\" \\\n", - " -H \"Content-Type: application/json\" \\\n", - " http://kfserving-ingressgateway.istio-system/v1/models/kf-income:predict\n", - "\"\"\"\n", - "ret = Popen(cmd, shell=True,stdout=PIPE)\n", - "raw = ret.stdout.read().decode(\"utf-8\")\n", - "print(raw)\n", - "res=json.loads(raw)\n", - "arr=np.array(res[\"predictions\"])\n", - "if arr[0] > 0:\n", - " print(\"Prediction: High Income\")\n", - "else:\n", - " print(\"Prediction: Low Income\")" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Make an explanation request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "payload='{\"instances\": [[53,4,0,2,8,4,4,0,0,0,60,9]]}'\n", - "cmd=f\"\"\"curl -v -d '{payload}' \\\n", - " -H \"Host: kf-income.admin.example.com\" \\\n", - " -H \"Content-Type: application/json\" \\\n", - " http://kfserving-ingressgateway.istio-system/v1/models/kf-income:explain\n", - "\"\"\"\n", - "ret = Popen(cmd, shell=True,stdout=PIPE)\n", - "raw = ret.stdout.read().decode(\"utf-8\")\n", - "res=json.loads(raw)\n", - "print(res[\"names\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy Outier Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_outlier", - "prev:build_outlier", - "prev:test_model" - ] - }, - "outputs": [], - "source": [ - "outlier_yaml=f\"\"\"apiVersion: serving.knative.dev/v1\n", - "kind: Service\n", - "metadata:\n", - " name: income-outlier\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " template:\n", - " metadata:\n", - " annotations:\n", - " autoscaling.knative.dev/minScale: \"1\"\n", - " spec:\n", - " containers:\n", - " - image: seldonio/alibi-detect-server:1.2.2-dev_alibidetect\n", - " imagePullPolicy: IfNotPresent\n", - " args:\n", - " - --model_name\n", - " - adultod\n", - " - --http_port\n", - " - '8080'\n", - " - --protocol\n", - " - tensorflow.http\n", - " - --storage_uri\n", - " - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH}\n", - " - --reply_url\n", - " - http://default-broker \n", - " - --event_type\n", - " - org.kubeflow.serving.inference.outlier\n", - " - --event_source\n", - " - org.kubeflow.serving.incomeod\n", - " - OutlierDetector\n", - " envFrom:\n", - " - secretRef:\n", - " name: seldon-init-container-secret\n", - "\"\"\"\n", - "with open(\"outlier.yaml\",\"w\") as f:\n", - " f.write(outlier_yaml)\n", - "run(\"kubectl apply -f outlier.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "trigger_outlier_yaml=f\"\"\"apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: income-outlier-trigger\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " filter:\n", - " sourceAndType:\n", - " type: org.kubeflow.serving.inference.request\n", - " subscriber:\n", - " ref:\n", - " apiVersion: serving.knative.dev/v1alpha1\n", - " kind: Service\n", - " name: income-outlier\n", - "\"\"\"\n", - "with open(\"outlier_trigger.yaml\",\"w\") as f:\n", - " f.write(trigger_outlier_yaml)\n", - "run(\"kubectl apply -f outlier_trigger.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=income-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy KNative Eventing Event Display" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_event_display", - "prev:deploy_outlier" - ] - }, - "outputs": [], - "source": [ - "event_display=f\"\"\"apiVersion: apps/v1\n", - "kind: Deployment\n", - "metadata:\n", - " name: event-display\n", - " namespace: {DEPLOY_NAMESPACE} \n", - "spec:\n", - " replicas: 1\n", - " selector:\n", - " matchLabels: &labels\n", - " app: event-display\n", - " template:\n", - " metadata:\n", - " labels: *labels\n", - " spec:\n", - " containers:\n", - " - name: helloworld-go\n", - " # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display\n", - " image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477\n", - "---\n", - "kind: Service\n", - "apiVersion: v1\n", - "metadata:\n", - " name: event-display\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " selector:\n", - " app: event-display\n", - " ports:\n", - " - protocol: TCP\n", - " port: 80\n", - " targetPort: 8080\n", - "---\n", - "apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: income-outlier-display\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " broker: default\n", - " filter:\n", - " attributes:\n", - " type: org.kubeflow.serving.inference.outlier\n", - " subscriber:\n", - " ref:\n", - " apiVersion: v1\n", - " kind: Service\n", - " name: event-display\n", - "\"\"\"\n", - "with open(\"event_display.yaml\",\"w\") as f:\n", - " f.write(event_display)\n", - "run(\"kubectl apply -f event_display.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Test Outlier Detection" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:test_outliers", - "prev:deploy_event_display" - ] - }, - "outputs": [], - "source": [ - "def predict():\n", - " payload='{\"instances\": [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]]}'\n", - " cmd=f\"\"\"curl -v -d '{payload}' \\\n", - " -H \"Host: kf-income.admin.example.com\" \\\n", - " -H \"Content-Type: application/json\" \\\n", - " http://kfserving-ingressgateway.istio-system/v1/models/kf-income:predict\n", - " \"\"\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " raw = ret.stdout.read().decode(\"utf-8\")\n", - " print(raw)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def get_outlier_event_display_logs():\n", - " cmd=f\"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " res = ret.stdout.read().decode(\"utf-8\").split(\"\\n\")\n", - " data= []\n", - " for i in range(0,len(res)):\n", - " if res[i] == 'Data,':\n", - " j = json.loads(json.loads(res[i+1]))\n", - " if \"is_outlier\"in j[\"data\"].keys():\n", - " data.append(j)\n", - " if len(data) > 0:\n", - " return data[-1]\n", - " else:\n", - " return None\n", - "j = None\n", - "while j is None:\n", - " predict()\n", - " print(\"Waiting for outlier logs, sleeping\")\n", - " time.sleep(2)\n", - " j = get_outlier_event_display_logs()\n", - " \n", - "print(j)\n", - "print(\"Outlier\",j[\"data\"][\"is_outlier\"]==[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Clean Up Resources" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "skip" - ] - }, - "outputs": [], - "source": [ - "run(f\"kubectl delete inferenceservice kf-income -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete ksvc income-outlier -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete sa minio-kf-sa -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete secret seldon-init-container-secret -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete secret income-kf-secret -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete deployment event-display -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete svc event-display -n {DEPLOY_NAMESPACE}\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "kubeflow_notebook": { - "docker_image": "seldonio/jupyter-lab-alibi-kale:0.11", - "experiment": { - "id": "new", - "name": "kfserving-e2e-adult" - }, - "experiment_name": "kfserving-e2e-adult", - "katib_metadata": { - "algorithm": { - "algorithmName": "grid" - }, - "maxFailedTrialCount": 3, - "maxTrialCount": 12, - "objective": { - "objectiveMetricName": "", - "type": "minimize" - }, - "parallelTrialCount": 3, - "parameters": [] - }, - "katib_run": false, - "pipeline_description": "KFServing e2e adult", - "pipeline_name": "kfserving-e2e-adult", - "volumes": [] - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.8" - }, - "varInspector": { - "cols": { - "lenName": 16, - "lenType": 16, - "lenVar": 40 - }, - "kernels_config": { - "python": { - "delete_cmd_postfix": "", - "delete_cmd_prefix": "del ", - "library": "var_list.py", - "varRefreshCmd": "print(var_dic_list())" - }, - "r": { - "delete_cmd_postfix": ") ", - "delete_cmd_prefix": "rm(", - "library": "var_list.r", - "varRefreshCmd": "cat(var_dic_list()) " - } - }, - "types_to_exclude": [ - "module", - "function", - "builtin_function_or_method", - "instance", - "_Feature" - ], - "window_display": false - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_adult.kale.default.py b/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_adult.kale.default.py deleted file mode 100644 index c2a8773b38a..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_adult.kale.default.py +++ /dev/null @@ -1,1425 +0,0 @@ -import kfp.dsl as dsl -import json -import kfp.components as comp -from collections import OrderedDict -from kubernetes import client as k8s_client - - -def setup(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - minioClient = get_minio() - buckets = minioClient.list_buckets() - for bucket in buckets: - print(bucket.name, bucket.creation_date) - ''' - - block4 = ''' - if not minioClient.bucket_exists(MINIO_MODEL_BUCKET): - minioClient.make_bucket(MINIO_MODEL_BUCKET) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/setup.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('setup') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def build_model(INCOME_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - INCOME_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - adult = fetch_adult() - adult.keys() - ''' - - block4 = ''' - data = adult.data - target = adult.target - feature_names = adult.feature_names - category_map = adult.category_map - ''' - - block5 = ''' - from alibi.utils.data import gen_category_map - ''' - - block6 = ''' - np.random.seed(0) - data_perm = np.random.permutation(np.c_[data, target]) - data = data_perm[:,:-1] - target = data_perm[:,-1] - ''' - - block7 = ''' - idx = 30000 - X_train,Y_train = data[:idx,:], target[:idx] - X_test, Y_test = data[idx+1:,:], target[idx+1:] - ''' - - block8 = ''' - ordinal_features = [x for x in range(len(feature_names)) if x not in list(category_map.keys())] - ordinal_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')), - ('scaler', StandardScaler())]) - ''' - - block9 = ''' - categorical_features = list(category_map.keys()) - categorical_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')), - ('onehot', OneHotEncoder(handle_unknown='ignore'))]) - ''' - - block10 = ''' - preprocessor = ColumnTransformer(transformers=[('num', ordinal_transformer, ordinal_features), - ('cat', categorical_transformer, categorical_features)]) - ''' - - block11 = ''' - np.random.seed(0) - clf = RandomForestClassifier(n_estimators=50) - ''' - - block12 = ''' - model=Pipeline(steps=[("preprocess",preprocessor),("model",clf)]) - model.fit(X_train,Y_train) - ''' - - block13 = ''' - def predict_fn(x): - return model.predict(x) - ''' - - block14 = ''' - #predict_fn = lambda x: clf.predict(preprocessor.transform(x)) - print('Train accuracy: ', accuracy_score(Y_train, predict_fn(X_train))) - print('Test accuracy: ', accuracy_score(Y_test, predict_fn(X_test))) - ''' - - block15 = ''' - dump(model, 'model.joblib') - ''' - - block16 = ''' - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{INCOME_MODEL_PATH}/model.joblib", 'model.joblib')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_test, "X_test") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(Y_train, "Y_train") - _kale_marshal_utils.save(adult, "adult") - _kale_marshal_utils.save(category_map, "category_map") - _kale_marshal_utils.save(feature_names, "feature_names") - _kale_marshal_utils.save(model, "model") - _kale_marshal_utils.save(predict_fn, "predict_fn") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - block10, - block11, - block12, - block13, - block14, - block15, - block16, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/build_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('build_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def build_outlier(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - Y_train = _kale_marshal_utils.load("Y_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - from alibi_detect.od import IForest - - od = IForest( - threshold=0., - n_estimators=200, - ) - ''' - - block4 = ''' - od.fit(X_train) - ''' - - block5 = ''' - np.random.seed(0) - perc_outlier = 5 - threshold_batch = create_outlier_batch(X_train, Y_train, n_samples=1000, perc_outlier=perc_outlier) - X_threshold, y_threshold = threshold_batch.data.astype('float'), threshold_batch.target - #X_threshold = (X_threshold - mean) / stdev - print('{}% outliers'.format(100 * y_threshold.mean())) - ''' - - block6 = ''' - od.infer_threshold(X_threshold, threshold_perc=100-perc_outlier) - print('New threshold: {}'.format(od.threshold)) - threshold = od.threshold - ''' - - block7 = ''' - X_outlier = [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]] - ''' - - block8 = ''' - od.predict( - X_outlier - ) - ''' - - block9 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="ifoutlier" - save_detector(od, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/{filename}", join(filepath, filename))) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - ) - html_artifact = _kale_run_code(blocks) - with open("/build_outlier.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('build_outlier') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_explainer(EXPLAINER_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - EXPLAINER_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - category_map = _kale_marshal_utils.load("category_map") - feature_names = _kale_marshal_utils.load("feature_names") - model = _kale_marshal_utils.load("model") - predict_fn = _kale_marshal_utils.load("predict_fn") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model.predict(X_train) - explainer = AnchorTabular(predict_fn, feature_names, categorical_names=category_map) - ''' - - block4 = ''' - explainer.fit(X_train, disc_perc=[25, 50, 75]) - ''' - - block5 = ''' - with open("explainer.dill", "wb") as dill_file: - dill.dump(explainer, dill_file) - dill_file.close() - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{EXPLAINER_MODEL_PATH}/explainer.dill", 'explainer.dill')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(explainer, "explainer") - _kale_marshal_utils.save(model, "model") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_explainer.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_explainer') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_kfserving(DEPLOY_NAMESPACE: str, EXPLAINER_MODEL_PATH: str, INCOME_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - EXPLAINER_MODEL_PATH = "{}" - INCOME_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - secret=f"""apiVersion: v1 - kind: Secret - metadata: - name: income-kf-secret - namespace: {DEPLOY_NAMESPACE} - annotations: - serving.kubeflow.org/s3-endpoint: {MINIO_HOST} # replace with your s3 endpoint - serving.kubeflow.org/s3-usehttps: "0" # by default 1, for testing with minio you need to set to 0 - type: Opaque - stringData: - awsAccessKeyID: {MINIO_ACCESS_KEY} - awsSecretAccessKey: {MINIO_SECRET_KEY} - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("kubectl apply -f secret.yaml", shell=True) - ''' - - block4 = ''' - secret = f"""apiVersion: v1 - kind: Secret - metadata: - name: seldon-init-container-secret - namespace: {DEPLOY_NAMESPACE} - type: Opaque - stringData: - AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY} - AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY} - AWS_ENDPOINT_URL: http://{MINIO_HOST} - USE_SSL: "false" - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("cat secret.yaml | kubectl apply -f -", shell=True) - ''' - - block5 = ''' - sa = f"""apiVersion: v1 - kind: ServiceAccount - metadata: - name: minio-kf-sa - namespace: {DEPLOY_NAMESPACE} - secrets: - - name: income-kf-secret - """ - with open("sa.yaml","w") as f: - f.write(sa) - run("kubectl apply -f sa.yaml", shell=True) - ''' - - block6 = ''' - from kubernetes import client - from kfserving import KFServingClient - from kfserving import constants - from kfserving import utils - from kfserving import V1alpha2EndpointSpec - from kfserving import V1alpha2PredictorSpec - from kfserving import V1alpha2ExplainerSpec - from kfserving import V1alpha2AlibiExplainerSpec - from kfserving import V1alpha2SKLearnSpec - from kfserving import V1alpha2InferenceServiceSpec - from kfserving import V1alpha2InferenceService - from kfserving import V1alpha2Logger - from kubernetes.client import V1ResourceRequirements - - api_version = constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION - default_endpoint_spec = V1alpha2EndpointSpec( - predictor=V1alpha2PredictorSpec( - service_account_name='minio-kf-sa', - sklearn=V1alpha2SKLearnSpec( - storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ INCOME_MODEL_PATH, - resources=V1ResourceRequirements( - requests={'cpu':'100m','memory':'1Gi'}, - limits={'cpu':'100m', 'memory':'1Gi'})), - logger=V1alpha2Logger( - mode='all' - )), - explainer=V1alpha2ExplainerSpec( - service_account_name='minio-kf-sa', - alibi=V1alpha2AlibiExplainerSpec( - type='AnchorTabular', - storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ EXPLAINER_MODEL_PATH, - resources=V1ResourceRequirements( - requests={'cpu':'100m','memory':'1Gi'}, - limits={'cpu':'100m', 'memory':'1Gi'})))) - - isvc = V1alpha2InferenceService(api_version=api_version, - kind=constants.KFSERVING_KIND, - metadata=client.V1ObjectMeta( - name='kf-income', namespace=DEPLOY_NAMESPACE), - spec=V1alpha2InferenceServiceSpec(default=default_endpoint_spec)) - ''' - - block7 = ''' - KFServing = KFServingClient() - KFServing.create(isvc) - ''' - - block8 = ''' - KFServing.get('kf-cifar10', namespace=DEPLOY_NAMESPACE, watch=True, timeout_seconds=120) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_kfserving.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_kfserving') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_model(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - payload='{"instances": [[53,4,0,2,8,4,4,0,0,0,60,9]]}' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-income.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-income:predict - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - res=json.loads(raw) - arr=np.array(res["predictions"]) - if arr[0] > 0: - print("Prediction: High Income") - else: - print("Prediction: Low Income") - ''' - - block4 = ''' - payload='{"instances": [[53,4,0,2,8,4,4,0,0,0,60,9]]}' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-income.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-income:explain - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - res=json.loads(raw) - print(res["names"]) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_outlier(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - outlier_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: income-outlier - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.2-dev_alibidetect - imagePullPolicy: IfNotPresent - args: - - --model_name - - adultod - - --http_port - - '8080' - - --protocol - - tensorflow.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - org.kubeflow.serving.inference.outlier - - --event_source - - org.kubeflow.serving.incomeod - - OutlierDetector - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("outlier.yaml","w") as f: - f.write(outlier_yaml) - run("kubectl apply -f outlier.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: income-outlier-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: org.kubeflow.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1alpha1 - kind: Service - name: income-outlier - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=income-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_outlier.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_outlier') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_event_display(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - event_display=f"""apiVersion: apps/v1 - kind: Deployment - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - replicas: 1 - selector: - matchLabels: &labels - app: event-display - template: - metadata: - labels: *labels - spec: - containers: - - name: helloworld-go - # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display - image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477 - --- - kind: Service - apiVersion: v1 - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - selector: - app: event-display - ports: - - protocol: TCP - port: 80 - targetPort: 8080 - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: income-outlier-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: org.kubeflow.serving.inference.outlier - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - """ - with open("event_display.yaml","w") as f: - f.write(event_display) - run("kubectl apply -f event_display.yaml", shell=True) - ''' - - block4 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_event_display.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_event_display') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_outliers(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - def predict(): - payload='{"instances": [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]]}' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-income.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-income:predict - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - ''' - - block4 = ''' - def get_outlier_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_outlier"in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - while j is None: - predict() - print("Waiting for outlier logs, sleeping") - time.sleep(2) - j = get_outlier_event_display_logs() - - print(j) - print("Outlier",j["data"]["is_outlier"]==[1]) - ''' - - block5 = ''' - - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_outliers.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_outliers') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def explain(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - X_train = _kale_marshal_utils.load("X_train") - adult = _kale_marshal_utils.load("adult") - explainer = _kale_marshal_utils.load("explainer") - model = _kale_marshal_utils.load("model") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model.predict(X_train) - idx = 0 - class_names = adult.target_names - print('Prediction: ', class_names[explainer.predict_fn(X_test[idx].reshape(1, -1))[0]]) - ''' - - block4 = ''' - explanation = explainer.explain(X_test[idx], threshold=0.95) - print('Anchor: %s' % (' AND '.join(explanation['names']))) - print('Precision: %.2f' % explanation['precision']) - print('Coverage: %.2f' % explanation['coverage']) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/explain.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('explain') - - _kale_mlmd_utils.call("mark_execution_complete") - - -setup_op = comp.func_to_container_op( - setup, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -build_model_op = comp.func_to_container_op( - build_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -build_outlier_op = comp.func_to_container_op( - build_outlier, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_explainer_op = comp.func_to_container_op( - train_explainer, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_kfserving_op = comp.func_to_container_op( - deploy_kfserving, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_model_op = comp.func_to_container_op( - test_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_outlier_op = comp.func_to_container_op( - deploy_outlier, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_event_display_op = comp.func_to_container_op( - deploy_event_display, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_outliers_op = comp.func_to_container_op( - test_outliers, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -explain_op = comp.func_to_container_op( - explain, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -@dsl.pipeline( - name='kfserving-e2e-adult-sfmg7', - description='KFServing e2e adult' -) -def auto_generated_pipeline(DEPLOY_NAMESPACE='admin', EXPLAINER_MODEL_PATH='sklearn/income/explainer', INCOME_MODEL_PATH='sklearn/income/model', MINIO_ACCESS_KEY='minio', MINIO_HOST='minio-service.kubeflow:9000', MINIO_MODEL_BUCKET='seldon', MINIO_SECRET_KEY='minio123', OUTLIER_MODEL_PATH='sklearn/income/outlier'): - pvolumes_dict = OrderedDict() - volume_step_names = [] - volume_name_parameters = [] - - marshal_vop = dsl.VolumeOp( - name="kale-marshal-volume", - resource_name="kale-marshal-pvc", - modes=dsl.VOLUME_MODE_RWM, - size="1Gi" - ) - volume_step_names.append(marshal_vop.name) - volume_name_parameters.append(marshal_vop.outputs["name"].full_name) - pvolumes_dict['/marshal'] = marshal_vop.volume - - volume_step_names.sort() - volume_name_parameters.sort() - - setup_task = setup_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after() - setup_task.container.working_dir = "/home/jovyan" - setup_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'setup': '/setup.html'}) - setup_task.output_artifact_paths.update(output_artifacts) - setup_task.add_pod_label("pipelines.kubeflow.org/metadata_written", "true") - dep_names = setup_task.dependent_names + volume_step_names - setup_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - setup_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - build_model_task = build_model_op(INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(setup_task) - build_model_task.container.working_dir = "/home/jovyan" - build_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'build_model': '/build_model.html'}) - build_model_task.output_artifact_paths.update(output_artifacts) - build_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = build_model_task.dependent_names + volume_step_names - build_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - build_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - build_outlier_task = build_outlier_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_model_task) - build_outlier_task.container.working_dir = "/home/jovyan" - build_outlier_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'build_outlier': '/build_outlier.html'}) - build_outlier_task.output_artifact_paths.update(output_artifacts) - build_outlier_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = build_outlier_task.dependent_names + volume_step_names - build_outlier_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - build_outlier_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_explainer_task = train_explainer_op(EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_model_task) - train_explainer_task.container.working_dir = "/home/jovyan" - train_explainer_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'train_explainer': '/train_explainer.html'}) - train_explainer_task.output_artifact_paths.update(output_artifacts) - train_explainer_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_explainer_task.dependent_names + volume_step_names - train_explainer_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_explainer_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_kfserving_task = deploy_kfserving_op(DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_explainer_task) - deploy_kfserving_task.container.working_dir = "/home/jovyan" - deploy_kfserving_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_kfserving': '/deploy_kfserving.html'}) - deploy_kfserving_task.output_artifact_paths.update(output_artifacts) - deploy_kfserving_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_kfserving_task.dependent_names + volume_step_names - deploy_kfserving_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_kfserving_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_model_task = test_model_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_kfserving_task) - test_model_task.container.working_dir = "/home/jovyan" - test_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'test_model': '/test_model.html'}) - test_model_task.output_artifact_paths.update(output_artifacts) - test_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_model_task.dependent_names + volume_step_names - test_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_outlier_task = deploy_outlier_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_outlier_task, test_model_task) - deploy_outlier_task.container.working_dir = "/home/jovyan" - deploy_outlier_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_outlier': '/deploy_outlier.html'}) - deploy_outlier_task.output_artifact_paths.update(output_artifacts) - deploy_outlier_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_outlier_task.dependent_names + volume_step_names - deploy_outlier_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_outlier_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_event_display_task = deploy_event_display_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_outlier_task) - deploy_event_display_task.container.working_dir = "/home/jovyan" - deploy_event_display_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_event_display': '/deploy_event_display.html'}) - deploy_event_display_task.output_artifact_paths.update(output_artifacts) - deploy_event_display_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_event_display_task.dependent_names + volume_step_names - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_outliers_task = test_outliers_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_event_display_task) - test_outliers_task.container.working_dir = "/home/jovyan" - test_outliers_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'test_outliers': '/test_outliers.html'}) - test_outliers_task.output_artifact_paths.update(output_artifacts) - test_outliers_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_outliers_task.dependent_names + volume_step_names - test_outliers_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_outliers_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - explain_task = explain_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_explainer_task) - explain_task.container.working_dir = "/home/jovyan" - explain_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'explain': '/explain.html'}) - explain_task.output_artifact_paths.update(output_artifacts) - explain_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = explain_task.dependent_names + volume_step_names - explain_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - explain_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - -if __name__ == "__main__": - pipeline_func = auto_generated_pipeline - pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz' - import kfp.compiler as compiler - compiler.Compiler().compile(pipeline_func, pipeline_filename) - - # Get or create an experiment and submit a pipeline run - import kfp - client = kfp.Client() - experiment = client.create_experiment('kfserving-e2e-adult') - - # Submit a pipeline run - from kale.utils.kfp_utils import generate_run_name - run_name = generate_run_name('kfserving-e2e-adult-sfmg7') - run_result = client.run_pipeline( - experiment.id, run_name, pipeline_filename, {}) diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_adult.kale.gcp.py b/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_adult.kale.gcp.py deleted file mode 100644 index d91913b3264..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_adult.kale.gcp.py +++ /dev/null @@ -1,1426 +0,0 @@ -import kfp.dsl as dsl -import json -import kfp.components as comp -from collections import OrderedDict -from kubernetes import client as k8s_client - - -def setup(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - minioClient = get_minio() - buckets = minioClient.list_buckets() - for bucket in buckets: - print(bucket.name, bucket.creation_date) - ''' - - block4 = ''' - if not minioClient.bucket_exists(MINIO_MODEL_BUCKET): - minioClient.make_bucket(MINIO_MODEL_BUCKET) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/setup.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('setup') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def build_model(INCOME_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - INCOME_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - adult = fetch_adult() - adult.keys() - ''' - - block4 = ''' - data = adult.data - target = adult.target - feature_names = adult.feature_names - category_map = adult.category_map - ''' - - block5 = ''' - from alibi.utils.data import gen_category_map - ''' - - block6 = ''' - np.random.seed(0) - data_perm = np.random.permutation(np.c_[data, target]) - data = data_perm[:,:-1] - target = data_perm[:,-1] - ''' - - block7 = ''' - idx = 30000 - X_train,Y_train = data[:idx,:], target[:idx] - X_test, Y_test = data[idx+1:,:], target[idx+1:] - ''' - - block8 = ''' - ordinal_features = [x for x in range(len(feature_names)) if x not in list(category_map.keys())] - ordinal_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')), - ('scaler', StandardScaler())]) - ''' - - block9 = ''' - categorical_features = list(category_map.keys()) - categorical_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')), - ('onehot', OneHotEncoder(handle_unknown='ignore'))]) - ''' - - block10 = ''' - preprocessor = ColumnTransformer(transformers=[('num', ordinal_transformer, ordinal_features), - ('cat', categorical_transformer, categorical_features)]) - ''' - - block11 = ''' - np.random.seed(0) - clf = RandomForestClassifier(n_estimators=50) - ''' - - block12 = ''' - model=Pipeline(steps=[("preprocess",preprocessor),("model",clf)]) - model.fit(X_train,Y_train) - ''' - - block13 = ''' - def predict_fn(x): - return model.predict(x) - ''' - - block14 = ''' - #predict_fn = lambda x: clf.predict(preprocessor.transform(x)) - print('Train accuracy: ', accuracy_score(Y_train, predict_fn(X_train))) - print('Test accuracy: ', accuracy_score(Y_test, predict_fn(X_test))) - ''' - - block15 = ''' - dump(model, 'model.joblib') - ''' - - block16 = ''' - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{INCOME_MODEL_PATH}/model.joblib", 'model.joblib')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_test, "X_test") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(Y_train, "Y_train") - _kale_marshal_utils.save(adult, "adult") - _kale_marshal_utils.save(category_map, "category_map") - _kale_marshal_utils.save(feature_names, "feature_names") - _kale_marshal_utils.save(model, "model") - _kale_marshal_utils.save(predict_fn, "predict_fn") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - block10, - block11, - block12, - block13, - block14, - block15, - block16, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/build_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('build_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def build_outlier(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - Y_train = _kale_marshal_utils.load("Y_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - from alibi_detect.od import IForest - - od = IForest( - threshold=0., - n_estimators=200, - ) - ''' - - block4 = ''' - od.fit(X_train) - ''' - - block5 = ''' - np.random.seed(0) - perc_outlier = 5 - threshold_batch = create_outlier_batch(X_train, Y_train, n_samples=1000, perc_outlier=perc_outlier) - X_threshold, y_threshold = threshold_batch.data.astype('float'), threshold_batch.target - #X_threshold = (X_threshold - mean) / stdev - print('{}% outliers'.format(100 * y_threshold.mean())) - ''' - - block6 = ''' - od.infer_threshold(X_threshold, threshold_perc=100-perc_outlier) - print('New threshold: {}'.format(od.threshold)) - threshold = od.threshold - ''' - - block7 = ''' - X_outlier = [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]] - ''' - - block8 = ''' - od.predict( - X_outlier - ) - ''' - - block9 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="ifoutlier" - save_detector(od, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/{filename}", join(filepath, filename))) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - ) - html_artifact = _kale_run_code(blocks) - with open("/build_outlier.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('build_outlier') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_explainer(EXPLAINER_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - EXPLAINER_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - category_map = _kale_marshal_utils.load("category_map") - feature_names = _kale_marshal_utils.load("feature_names") - model = _kale_marshal_utils.load("model") - predict_fn = _kale_marshal_utils.load("predict_fn") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model.predict(X_train) - explainer = AnchorTabular(predict_fn, feature_names, categorical_names=category_map) - ''' - - block4 = ''' - explainer.fit(X_train, disc_perc=[25, 50, 75]) - ''' - - block5 = ''' - with open("explainer.dill", "wb") as dill_file: - dill.dump(explainer, dill_file) - dill_file.close() - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{EXPLAINER_MODEL_PATH}/explainer.dill", 'explainer.dill')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(explainer, "explainer") - _kale_marshal_utils.save(model, "model") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_explainer.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_explainer') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_kfserving(DEPLOY_NAMESPACE: str, EXPLAINER_MODEL_PATH: str, INCOME_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - EXPLAINER_MODEL_PATH = "{}" - INCOME_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - secret=f"""apiVersion: v1 - kind: Secret - metadata: - name: income-kf-secret - namespace: {DEPLOY_NAMESPACE} - annotations: - serving.kubeflow.org/s3-endpoint: {MINIO_HOST} # replace with your s3 endpoint - serving.kubeflow.org/s3-usehttps: "0" # by default 1, for testing with minio you need to set to 0 - type: Opaque - stringData: - awsAccessKeyID: {MINIO_ACCESS_KEY} - awsSecretAccessKey: {MINIO_SECRET_KEY} - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("kubectl apply -f secret.yaml", shell=True) - ''' - - block4 = ''' - secret = f"""apiVersion: v1 - kind: Secret - metadata: - name: seldon-init-container-secret - namespace: {DEPLOY_NAMESPACE} - type: Opaque - stringData: - AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY} - AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY} - AWS_ENDPOINT_URL: http://{MINIO_HOST} - USE_SSL: "false" - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("cat secret.yaml | kubectl apply -f -", shell=True) - ''' - - block5 = ''' - sa = f"""apiVersion: v1 - kind: ServiceAccount - metadata: - name: minio-kf-sa - namespace: {DEPLOY_NAMESPACE} - secrets: - - name: income-kf-secret - """ - with open("sa.yaml","w") as f: - f.write(sa) - run("kubectl apply -f sa.yaml", shell=True) - ''' - - block6 = ''' - from kubernetes import client - from kfserving import KFServingClient - from kfserving import constants - from kfserving import utils - from kfserving import V1alpha2EndpointSpec - from kfserving import V1alpha2PredictorSpec - from kfserving import V1alpha2ExplainerSpec - from kfserving import V1alpha2AlibiExplainerSpec - from kfserving import V1alpha2SKLearnSpec - from kfserving import V1alpha2InferenceServiceSpec - from kfserving import V1alpha2InferenceService - from kfserving import V1alpha2Logger - from kubernetes.client import V1ResourceRequirements - - api_version = constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION - default_endpoint_spec = V1alpha2EndpointSpec( - predictor=V1alpha2PredictorSpec( - service_account_name='minio-kf-sa', - sklearn=V1alpha2SKLearnSpec( - storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ INCOME_MODEL_PATH, - resources=V1ResourceRequirements( - requests={'cpu':'100m','memory':'1Gi'}, - limits={'cpu':'100m', 'memory':'1Gi'})), - logger=V1alpha2Logger( - mode='all' - )), - explainer=V1alpha2ExplainerSpec( - service_account_name='minio-kf-sa', - alibi=V1alpha2AlibiExplainerSpec( - type='AnchorTabular', - storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ EXPLAINER_MODEL_PATH, - resources=V1ResourceRequirements( - requests={'cpu':'100m','memory':'1Gi'}, - limits={'cpu':'100m', 'memory':'1Gi'})))) - - isvc = V1alpha2InferenceService(api_version=api_version, - kind=constants.KFSERVING_KIND, - metadata=client.V1ObjectMeta( - name='kf-income', namespace=DEPLOY_NAMESPACE), - spec=V1alpha2InferenceServiceSpec(default=default_endpoint_spec)) - ''' - - block7 = ''' - KFServing = KFServingClient() - KFServing.create(isvc) - ''' - - block8 = ''' - KFServing.get('kf-cifar10', namespace=DEPLOY_NAMESPACE, watch=True, timeout_seconds=120) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_kfserving.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_kfserving') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_model(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - payload='{"instances": [[53,4,0,2,8,4,4,0,0,0,60,9]]}' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-income.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-income:predict - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - res=json.loads(raw) - arr=np.array(res["predictions"]) - if arr[0] > 0: - print("Prediction: High Income") - else: - print("Prediction: Low Income") - ''' - - block4 = ''' - payload='{"instances": [[53,4,0,2,8,4,4,0,0,0,60,9]]}' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-income.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-income:explain - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - res=json.loads(raw) - print(res["names"]) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_outlier(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - outlier_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: income-outlier - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.2-dev_alibidetect - imagePullPolicy: IfNotPresent - args: - - --model_name - - adultod - - --http_port - - '8080' - - --protocol - - tensorflow.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - org.kubeflow.serving.inference.outlier - - --event_source - - org.kubeflow.serving.incomeod - - OutlierDetector - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("outlier.yaml","w") as f: - f.write(outlier_yaml) - run("kubectl apply -f outlier.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: income-outlier-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: org.kubeflow.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1alpha1 - kind: Service - name: income-outlier - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=income-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_outlier.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_outlier') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_event_display(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - event_display=f"""apiVersion: apps/v1 - kind: Deployment - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - replicas: 1 - selector: - matchLabels: &labels - app: event-display - template: - metadata: - labels: *labels - spec: - containers: - - name: helloworld-go - # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display - image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477 - --- - kind: Service - apiVersion: v1 - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - selector: - app: event-display - ports: - - protocol: TCP - port: 80 - targetPort: 8080 - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: income-outlier-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: org.kubeflow.serving.inference.outlier - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - """ - with open("event_display.yaml","w") as f: - f.write(event_display) - run("kubectl apply -f event_display.yaml", shell=True) - ''' - - block4 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_event_display.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_event_display') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_outliers(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - def predict(): - payload='{"instances": [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]]}' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-income.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-income:predict - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - ''' - - block4 = ''' - def get_outlier_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_outlier"in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - while j is None: - predict() - print("Waiting for outlier logs, sleeping") - time.sleep(2) - j = get_outlier_event_display_logs() - - print(j) - print("Outlier",j["data"]["is_outlier"]==[1]) - ''' - - block5 = ''' - - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_outliers.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_outliers') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def explain(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - X_train = _kale_marshal_utils.load("X_train") - adult = _kale_marshal_utils.load("adult") - explainer = _kale_marshal_utils.load("explainer") - model = _kale_marshal_utils.load("model") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model.predict(X_train) - idx = 0 - class_names = adult.target_names - print('Prediction: ', class_names[explainer.predict_fn(X_test[idx].reshape(1, -1))[0]]) - ''' - - block4 = ''' - explanation = explainer.explain(X_test[idx], threshold=0.95) - print('Anchor: %s' % (' AND '.join(explanation['names']))) - print('Precision: %.2f' % explanation['precision']) - print('Coverage: %.2f' % explanation['coverage']) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/explain.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('explain') - - _kale_mlmd_utils.call("mark_execution_complete") - - -setup_op = comp.func_to_container_op( - setup, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -build_model_op = comp.func_to_container_op( - build_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -build_outlier_op = comp.func_to_container_op( - build_outlier, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_explainer_op = comp.func_to_container_op( - train_explainer, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_kfserving_op = comp.func_to_container_op( - deploy_kfserving, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_model_op = comp.func_to_container_op( - test_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_outlier_op = comp.func_to_container_op( - deploy_outlier, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_event_display_op = comp.func_to_container_op( - deploy_event_display, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_outliers_op = comp.func_to_container_op( - test_outliers, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -explain_op = comp.func_to_container_op( - explain, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -@dsl.pipeline( - name='kfserving-e2e-adult-sfmg7', - description='KFServing e2e adult' -) -def auto_generated_pipeline(DEPLOY_NAMESPACE='admin', EXPLAINER_MODEL_PATH='sklearn/income/explainer', INCOME_MODEL_PATH='sklearn/income/model', MINIO_ACCESS_KEY='minio', MINIO_HOST='minio-service.kubeflow:9000', MINIO_MODEL_BUCKET='seldon', MINIO_SECRET_KEY='minio123', OUTLIER_MODEL_PATH='sklearn/income/outlier'): - pvolumes_dict = OrderedDict() - volume_step_names = [] - volume_name_parameters = [] - - marshal_vop = dsl.VolumeOp( - name="kale-marshal-volume", - resource_name="kale-marshal-pvc", - storage_class="nfs-client", - modes=dsl.VOLUME_MODE_RWM, - size="1Gi" - ) - volume_step_names.append(marshal_vop.name) - volume_name_parameters.append(marshal_vop.outputs["name"].full_name) - pvolumes_dict['/marshal'] = marshal_vop.volume - - volume_step_names.sort() - volume_name_parameters.sort() - - setup_task = setup_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after() - setup_task.container.working_dir = "/home/jovyan" - setup_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'setup': '/setup.html'}) - setup_task.output_artifact_paths.update(output_artifacts) - setup_task.add_pod_label("pipelines.kubeflow.org/metadata_written", "true") - dep_names = setup_task.dependent_names + volume_step_names - setup_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - setup_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - build_model_task = build_model_op(INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(setup_task) - build_model_task.container.working_dir = "/home/jovyan" - build_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'build_model': '/build_model.html'}) - build_model_task.output_artifact_paths.update(output_artifacts) - build_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = build_model_task.dependent_names + volume_step_names - build_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - build_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - build_outlier_task = build_outlier_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_model_task) - build_outlier_task.container.working_dir = "/home/jovyan" - build_outlier_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'build_outlier': '/build_outlier.html'}) - build_outlier_task.output_artifact_paths.update(output_artifacts) - build_outlier_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = build_outlier_task.dependent_names + volume_step_names - build_outlier_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - build_outlier_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_explainer_task = train_explainer_op(EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_model_task) - train_explainer_task.container.working_dir = "/home/jovyan" - train_explainer_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'train_explainer': '/train_explainer.html'}) - train_explainer_task.output_artifact_paths.update(output_artifacts) - train_explainer_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_explainer_task.dependent_names + volume_step_names - train_explainer_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_explainer_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_kfserving_task = deploy_kfserving_op(DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_explainer_task) - deploy_kfserving_task.container.working_dir = "/home/jovyan" - deploy_kfserving_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_kfserving': '/deploy_kfserving.html'}) - deploy_kfserving_task.output_artifact_paths.update(output_artifacts) - deploy_kfserving_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_kfserving_task.dependent_names + volume_step_names - deploy_kfserving_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_kfserving_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_model_task = test_model_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_kfserving_task) - test_model_task.container.working_dir = "/home/jovyan" - test_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'test_model': '/test_model.html'}) - test_model_task.output_artifact_paths.update(output_artifacts) - test_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_model_task.dependent_names + volume_step_names - test_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_outlier_task = deploy_outlier_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_outlier_task, test_model_task) - deploy_outlier_task.container.working_dir = "/home/jovyan" - deploy_outlier_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_outlier': '/deploy_outlier.html'}) - deploy_outlier_task.output_artifact_paths.update(output_artifacts) - deploy_outlier_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_outlier_task.dependent_names + volume_step_names - deploy_outlier_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_outlier_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_event_display_task = deploy_event_display_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_outlier_task) - deploy_event_display_task.container.working_dir = "/home/jovyan" - deploy_event_display_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_event_display': '/deploy_event_display.html'}) - deploy_event_display_task.output_artifact_paths.update(output_artifacts) - deploy_event_display_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_event_display_task.dependent_names + volume_step_names - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_outliers_task = test_outliers_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_event_display_task) - test_outliers_task.container.working_dir = "/home/jovyan" - test_outliers_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'test_outliers': '/test_outliers.html'}) - test_outliers_task.output_artifact_paths.update(output_artifacts) - test_outliers_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_outliers_task.dependent_names + volume_step_names - test_outliers_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_outliers_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - explain_task = explain_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_explainer_task) - explain_task.container.working_dir = "/home/jovyan" - explain_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'explain': '/explain.html'}) - explain_task.output_artifact_paths.update(output_artifacts) - explain_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = explain_task.dependent_names + volume_step_names - explain_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - explain_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - -if __name__ == "__main__": - pipeline_func = auto_generated_pipeline - pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz' - import kfp.compiler as compiler - compiler.Compiler().compile(pipeline_func, pipeline_filename) - - # Get or create an experiment and submit a pipeline run - import kfp - client = kfp.Client() - experiment = client.create_experiment('kfserving-e2e-adult') - - # Submit a pipeline run - from kale.utils.kfp_utils import generate_run_name - run_name = generate_run_name('kfserving-e2e-adult-sfmg7') - run_result = client.run_pipeline( - experiment.id, run_name, pipeline_filename, {}) diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_cifar10.ipynb b/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_cifar10.ipynb deleted file mode 100644 index c08df49745d..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_cifar10.ipynb +++ /dev/null @@ -1,1502 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "# KFServing Deployment Pipeline For CIFAR10 Image Recognition\n", - "\n", - "In this example (and Kale Kubeflow pipeline) we create a deployment pipeline for a pretrained CIFAR10 image model. \n", - "The following steps will be run:\n", - "\n", - " * Setup Minio client\n", - " * Download and test CIFAR10 model and train an Anchors Images explainer on it. Save both model and explainer to Minio.\n", - " * Deploy model and explainer using KFServing and test\n", - " * Train outlier detector\n", - " * Train drift detector\n", - " * Deploy knative eventing display to show asynchronous results from outlier and drift detectors. \n", - " * Deploy outlier detector and test\n", - " * deploy drift detector and test\n", - " \n", - " ### Setup\n", - " \n", - " You will need a kubeflow cluster >= 1.0 with \n", - " \n", - " * Knative eventing \n", - " * Seldon >= 1.2.2\n", - " * KFServing >= 0.3.0\n", - " \n", - " ### Kubeflow Jupyter Notebook Server\n", - " \n", - " To run this notebook inside kubeflow. Create a Jupyter notebook server using the image `seldonio/jupyter-lab-alibi-kale:0.11`\n", - " \n", - " ### GCP Setup\n", - " \n", - " On GCP If you use Kale to save this notebook as a pipeline you will need to add the storage_class of the `VolumeOp` to `nfs-client` if you have followed the steps to create a NFS RWX PV on GCP. e.g.:\n", - " \n", - " ```\n", - " marshal_vop = dsl.VolumeOp(\n", - " name=\"kale-marshal-volume\",\n", - " resource_name=\"kale-marshal-pvc\",\n", - " storage_class=\"nfs-client\",\n", - " modes=dsl.VOLUME_MODE_RWM,\n", - " size=\"1Gi\"\n", - " )\n", - " ```\n", - " \n", - "\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "imports" - ] - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "from sklearn.ensemble import RandomForestClassifier\n", - "from sklearn.compose import ColumnTransformer\n", - "from sklearn.pipeline import Pipeline\n", - "from sklearn.impute import SimpleImputer\n", - "from sklearn.metrics import accuracy_score\n", - "from sklearn.preprocessing import StandardScaler, OneHotEncoder\n", - "from alibi.explainers import AnchorImage\n", - "from alibi.datasets import fetch_adult\n", - "from minio import Minio\n", - "from minio.error import ResponseError\n", - "from joblib import dump, load\n", - "import dill\n", - "from subprocess import run, Popen, PIPE\n", - "from alibi_detect.utils.data import create_outlier_batch\n", - "from alibi_detect.utils.fetching import fetch_tf_model\n", - "import json\n", - "import logging\n", - "import matplotlib.pyplot as plt\n", - "import tensorflow as tf\n", - "tf.keras.backend.clear_session()\n", - "from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer\n", - "from tqdm import tqdm\n", - "\n", - "from alibi_detect.models.losses import elbo\n", - "from alibi_detect.od import OutlierVAE\n", - "from alibi_detect.utils.fetching import fetch_detector\n", - "from alibi_detect.utils.perturbation import apply_mask\n", - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image\n", - "import time\n", - "\n", - "logger = tf.get_logger()\n", - "logger.setLevel(logging.ERROR)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Setup Pipeline Paramers\n", - "\n", - "The following global variables can be set. These will be used as Pipeline parameters." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "pipeline-parameters" - ] - }, - "outputs": [], - "source": [ - "MINIO_HOST=\"minio-service.kubeflow:9000\"\n", - "MINIO_ACCESS_KEY=\"minio\"\n", - "MINIO_SECRET_KEY=\"minio123\"\n", - "MINIO_MODEL_BUCKET=\"seldon\"\n", - "CIFAR10_MODEL_PATH=\"tfserving/cifar10/model\"\n", - "EXPLAINER_MODEL_PATH=\"tfserving/cifar10/explainer\"\n", - "OUTLIER_MODEL_PATH=\"tfserving/cifar10/outlier\"\n", - "DRIFT_MODEL_PATH=\"tfserving/cifar10/drift\"\n", - "DEPLOY_NAMESPACE=\"admin\"\n", - "TRAIN_OUTLIER_DETECTOR=False\n", - "TRAIN_DRIFT_DETECTOR=False" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "functions" - ] - }, - "outputs": [], - "source": [ - "def get_minio():\n", - " return Minio(MINIO_HOST,\n", - " access_key=MINIO_ACCESS_KEY,\n", - " secret_key=MINIO_SECRET_KEY,\n", - " secure=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:setup" - ] - }, - "outputs": [], - "source": [ - "minioClient = get_minio()\n", - "buckets = minioClient.list_buckets()\n", - "for bucket in buckets:\n", - " print(bucket.name, bucket.creation_date)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "if not minioClient.bucket_exists(MINIO_MODEL_BUCKET):\n", - " minioClient.make_bucket(MINIO_MODEL_BUCKET)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Test and save Model\n", - "\n", - "For simplicity we will use a pretrained Resnet32 CIFAR10 tensorflow model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:train_model_and_explainer", - "prev:setup" - ] - }, - "outputs": [], - "source": [ - "model = fetch_tf_model('cifar10', 'resnet32')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "train, test = tf.keras.datasets.cifar10.load_data()\n", - "X_train, y_train = train\n", - "X_test, y_test = test\n", - "\n", - "X_train = X_train.astype('float32') / 255\n", - "X_test = X_test.astype('float32') / 255\n", - "print(X_train.shape, y_train.shape, X_test.shape, y_test.shape)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer',\n", - " 'dog', 'frog', 'horse', 'ship', 'truck']" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Test model locally." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "idx = 1\n", - "X = X_test[idx].reshape(1, 32, 32, 3)\n", - "plt.imshow(X.reshape(32, 32, 3))\n", - "plt.axis('off')\n", - "plt.show()\n", - "print(\"class:\",class_names[y_test[idx][0]])\n", - "print(\"prediction:\",class_names[model.predict(X_test[idx:idx+1])[0].argmax()])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "modelfilepath=\"resnet\"\n", - "tf.saved_model.save(model, modelfilepath)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from os import listdir\n", - "from os.path import isfile, join\n", - "\n", - "model_filepath=\"resnet\"\n", - "print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{CIFAR10_MODEL_PATH}/1/saved_model.pb\", modelfilepath+\"/saved_model.pb\"))\n", - "variable_filepath = modelfilepath+\"/variables\"\n", - "onlyfiles = [f for f in listdir(variable_filepath) if isfile(join(variable_filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{CIFAR10_MODEL_PATH}/1/variables/{filename}\", join(variable_filepath, filename)))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train Explainer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:" - ] - }, - "outputs": [], - "source": [ - "def predict_fn(x):\n", - " return model.predict(x)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:" - ] - }, - "outputs": [], - "source": [ - "\n", - "image_shape = (32, 32, 3)\n", - "segmentation_fn = 'slic'\n", - "kwargs = {'n_segments': 5, 'compactness': 20, 'sigma': .5}\n", - "explainer = AnchorImage(predict_fn, image_shape, segmentation_fn=segmentation_fn, \n", - " segmentation_kwargs=kwargs, images_background=None)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "idx=0\n", - "image = X_test[0]\n", - "np.random.seed(0)\n", - "explanation = explainer.explain(image, threshold=.95, p_sample=.5, tau=0.25)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "X = X_test[idx].reshape(1, 32, 32, 3)\n", - "plt.imshow(X.reshape(32, 32, 3))\n", - "plt.axis('off')\n", - "plt.show()\n", - "print(\"class:\",class_names[y_test[idx][0]])\n", - "print(\"prediction:\",class_names[model.predict(X_test[idx:idx+1])[0].argmax()])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "plt.imshow(explanation[\"anchor\"])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "with open(\"explainer.dill\", \"wb\") as dill_file:\n", - " dill.dump(explainer, dill_file) \n", - " dill_file.close()\n", - "print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{EXPLAINER_MODEL_PATH}/explainer.dill\", 'explainer.dill'))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train Outlier Detector\n", - "\n", - "For further details and extended notebook see [Alibi-Detect Documentation](https://docs.seldon.io/projects/alibi-detect/en/stable/). These steps were derived from [Alibi-Detect CIFAR10 Example](https://docs.seldon.io/projects/alibi-detect/en/stable/examples/od_vae_cifar10.html)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:train_outlier_detector", - "prev:train_model_and_explainer" - ] - }, - "outputs": [], - "source": [ - "import logging\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import tensorflow as tf\n", - "tf.keras.backend.clear_session()\n", - "from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer\n", - "from tqdm import tqdm\n", - "\n", - "from alibi_detect.models.losses import elbo\n", - "from alibi_detect.od import OutlierVAE\n", - "from alibi_detect.utils.fetching import fetch_detector\n", - "from alibi_detect.utils.perturbation import apply_mask\n", - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image\n", - "\n", - "logger = tf.get_logger()\n", - "logger.setLevel(logging.ERROR)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "if TRAIN_OUTLIER_DETECTOR:\n", - " latent_dim = 1024\n", - " \n", - " encoder_net = tf.keras.Sequential(\n", - " [\n", - " InputLayer(input_shape=(32, 32, 3)),\n", - " Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu)\n", - " ])\n", - "\n", - " decoder_net = tf.keras.Sequential(\n", - " [\n", - " InputLayer(input_shape=(latent_dim,)),\n", - " Dense(4*4*128),\n", - " Reshape(target_shape=(4, 4, 128)),\n", - " Conv2DTranspose(256, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2DTranspose(64, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2DTranspose(3, 4, strides=2, padding='same', activation='sigmoid')\n", - " ])\n", - " \n", - " # initialize outlier detector\n", - " od = OutlierVAE(threshold=.015, # threshold for outlier score\n", - " score_type='mse', # use MSE of reconstruction error for outlier detection\n", - " encoder_net=encoder_net, # can also pass VAE model instead\n", - " decoder_net=decoder_net, # of separate encoder and decoder\n", - " latent_dim=latent_dim,\n", - " samples=2)\n", - " # train\n", - " od.fit(X_train, \n", - " loss_fn=elbo,\n", - " cov_elbo=dict(sim=.05),\n", - " epochs=50,\n", - " verbose=True)\n", - "else:\n", - " od = load_detector(\"/home/models/samples/od/cifar10\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "idx = 8\n", - "X = X_train[idx].reshape(1, 32, 32, 3)\n", - "X_recon = od.vae(X)\n", - "plt.imshow(X.reshape(32, 32, 3))\n", - "plt.axis('off')\n", - "plt.show()\n", - "plt.imshow(X_recon.numpy().reshape(32, 32, 3))\n", - "plt.axis('off')\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "X = X_train[:500]\n", - "print(X.shape)\n", - "od_preds = od.predict(X,\n", - " outlier_type='instance', # use 'feature' or 'instance' level\n", - " return_feature_score=True, # scores used to determine outliers\n", - " return_instance_score=True)\n", - "print(list(od_preds['data'].keys()))\n", - "target = np.zeros(X.shape[0],).astype(int) # all normal CIFAR10 training instances\n", - "labels = ['normal', 'outlier']\n", - "plot_instance_score(od_preds, target, labels, od.threshold)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from os import listdir\n", - "from os.path import isfile, join\n", - "\n", - "filepath=\"cifar10outlier\"\n", - "save_detector(od, filepath) \n", - "onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{OUTLIER_MODEL_PATH}/{filename}\", join(filepath, filename)))\n", - "filepath=\"cifar10outlier/model\"\n", - "onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{OUTLIER_MODEL_PATH}/model/{filename}\", join(filepath, filename)))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train a Drift Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:train_drift_detector", - "prev:train_model_and_explainer" - ] - }, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import os\n", - "import tensorflow as tf\n", - "from tensorflow.keras.layers import Conv2D, Dense, Flatten, InputLayer, Reshape\n", - "\n", - "from alibi_detect.cd import KSDrift\n", - "from alibi_detect.cd.preprocess import uae, hidden_output\n", - "from alibi_detect.models.resnet import scale_by_instance\n", - "from alibi_detect.utils.fetching import fetch_tf_model, fetch_detector\n", - "from alibi_detect.utils.prediction import predict_batch\n", - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "tf.random.set_seed(0)\n", - "\n", - "if True:\n", - " np.random.seed(0)\n", - " n_test = X_test.shape[0]\n", - " idx = np.random.choice(n_test, size=n_test // 2, replace=False)\n", - " idx_h0 = np.delete(np.arange(n_test), idx, axis=0)\n", - " X_ref,y_ref = X_test[idx], y_test[idx]\n", - " X_h0, y_h0 = X_test[idx_h0], y_test[idx_h0]\n", - " print(X_ref.shape, X_h0.shape)\n", - " # define encoder\n", - " encoding_dim = 32\n", - " encoder_net = tf.keras.Sequential(\n", - " [\n", - " InputLayer(input_shape=(32, 32, 3)),\n", - " Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Flatten(),\n", - " Dense(encoding_dim,)\n", - " ]\n", - " )\n", - "\n", - " # initialise drift detector\n", - " p_val = .05\n", - " cd = KSDrift(\n", - " p_val=p_val, # p-value for K-S test\n", - " X_ref=X_ref, # test against original test set\n", - " preprocess_fn=uae, # UAE for dimensionality reduction\n", - " preprocess_kwargs={'encoder_net': encoder_net, 'batch_size': 128},\n", - " alternative='two-sided' # other options: 'less', 'greater'\n", - " )\n", - "else:\n", - " cd = load_detector(\"/home/models/samples/cd/cifar10\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from os import listdir\n", - "from os.path import isfile, join\n", - "\n", - "filepath=\"cifar10Drift\"\n", - "save_detector(cd, filepath) \n", - "onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{DRIFT_MODEL_PATH}/{filename}\", join(filepath, filename)))\n", - "filepath=\"cifar10Drift/model\"\n", - "onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{DRIFT_MODEL_PATH}/model/{filename}\", join(filepath, filename)))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy KFServing Model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_model", - "prev:train_model_and_explainer" - ] - }, - "outputs": [], - "source": [ - "secret=f\"\"\"apiVersion: v1\n", - "kind: Secret\n", - "metadata:\n", - " name: cifar10-kf-secret\n", - " namespace: {DEPLOY_NAMESPACE}\n", - " annotations:\n", - " serving.kubeflow.org/s3-endpoint: {MINIO_HOST} # replace with your s3 endpoint\n", - " serving.kubeflow.org/s3-usehttps: \"0\" # by default 1, for testing with minio you need to set to 0\n", - "type: Opaque\n", - "stringData:\n", - " awsAccessKeyID: {MINIO_ACCESS_KEY}\n", - " awsSecretAccessKey: {MINIO_SECRET_KEY}\n", - "\"\"\"\n", - "with open(\"secret.yaml\",\"w\") as f:\n", - " f.write(secret)\n", - "run(\"kubectl apply -f secret.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "secret = f\"\"\"apiVersion: v1\n", - "kind: Secret\n", - "metadata:\n", - " name: seldon-init-container-secret\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "type: Opaque\n", - "stringData:\n", - " AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY}\n", - " AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY}\n", - " AWS_ENDPOINT_URL: http://{MINIO_HOST}\n", - " USE_SSL: \"false\"\n", - "\"\"\"\n", - "with open(\"secret.yaml\",\"w\") as f:\n", - " f.write(secret)\n", - "run(\"cat secret.yaml | kubectl apply -f -\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "sa = f\"\"\"apiVersion: v1\n", - "kind: ServiceAccount\n", - "metadata:\n", - " name: minio-kf-sa\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "secrets:\n", - " - name: cifar10-kf-secret\n", - "\"\"\"\n", - "with open(\"sa.yaml\",\"w\") as f:\n", - " f.write(sa)\n", - "run(\"kubectl apply -f sa.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from kubernetes import client\n", - "from kfserving import KFServingClient\n", - "from kfserving import constants\n", - "from kfserving import utils\n", - "from kfserving import V1alpha2EndpointSpec\n", - "from kfserving import V1alpha2PredictorSpec\n", - "from kfserving import V1alpha2ExplainerSpec\n", - "from kfserving import V1alpha2AlibiExplainerSpec\n", - "from kfserving import V1alpha2TensorflowSpec\n", - "from kfserving import V1alpha2InferenceServiceSpec\n", - "from kfserving import V1alpha2InferenceService\n", - "from kfserving import V1alpha2Logger\n", - "from kubernetes.client import V1ResourceRequirements\n", - "\n", - "api_version = constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION\n", - "default_endpoint_spec = V1alpha2EndpointSpec(\n", - " predictor=V1alpha2PredictorSpec(\n", - " service_account_name='minio-kf-sa',\n", - " tensorflow=V1alpha2TensorflowSpec(\n", - " storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ CIFAR10_MODEL_PATH,\n", - " resources=V1ResourceRequirements(\n", - " requests={'cpu':'100m','memory':'1Gi'},\n", - " limits={'cpu':'100m', 'memory':'1Gi'})),\n", - " logger=V1alpha2Logger(\n", - " mode='all'\n", - " )),\n", - " explainer=V1alpha2ExplainerSpec(\n", - " service_account_name='minio-kf-sa',\n", - " alibi=V1alpha2AlibiExplainerSpec(\n", - " type='AnchorImages',\n", - " storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ EXPLAINER_MODEL_PATH,\n", - " resources=V1ResourceRequirements(\n", - " requests={'cpu':'100m','memory':'1Gi'},\n", - " limits={'cpu':'100m', 'memory':'1Gi'}))))\n", - " \n", - "isvc = V1alpha2InferenceService(api_version=api_version,\n", - " kind=constants.KFSERVING_KIND,\n", - " metadata=client.V1ObjectMeta(\n", - " name='kf-cifar10', namespace=DEPLOY_NAMESPACE),\n", - " spec=V1alpha2InferenceServiceSpec(default=default_endpoint_spec))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "KFServing = KFServingClient()\n", - "KFServing.create(isvc)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "KFServing.get('kf-cifar10', namespace=DEPLOY_NAMESPACE, watch=True, timeout_seconds=240)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Test Model and Explainer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:test_model_and_explainer", - "prev:deploy_model" - ] - }, - "outputs": [], - "source": [ - "def test_model():\n", - " idx=10\n", - " test_example=X_test[idx:idx+1].tolist()\n", - " payload='{\"instances\":'+f\"{test_example}\"+' }'\n", - " cmd=f\"\"\"curl -v -d '{payload}' \\\n", - " -H \"Host: kf-cifar10.admin.example.com\" \\\n", - " -H \"Content-Type: application/json\" \\\n", - " http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:predict\n", - " \"\"\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " raw = ret.stdout.read().decode(\"utf-8\")\n", - " print(raw)\n", - " res=json.loads(raw)\n", - " arr=np.array(res[\"predictions\"])\n", - " X = X_test[idx].reshape(1, 32, 32, 3)\n", - " plt.imshow(X.reshape(32, 32, 3))\n", - " plt.axis('off')\n", - " plt.show()\n", - " print(\"class:\",class_names[y_test[idx][0]])\n", - " print(\"prediction:\",class_names[arr[0].argmax()])\n", - "\n", - "ok = False\n", - "while not ok:\n", - " try:\n", - " test_model()\n", - " ok = True\n", - " except:\n", - " print(\"Failed calling model, sleeping\")\n", - " time.sleep(2)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Make an explanation request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "idx=1\n", - "test_example=X_test[idx:idx+1].tolist()\n", - "payload='{\"instances\":'+f\"{test_example}\"+' }'\n", - "cmd=f\"\"\"curl -v -d '{payload}' \\\n", - " -H \"Host: kf-cifar10.admin.example.com\" \\\n", - " -H \"Content-Type: application/json\" \\\n", - " http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:explain\n", - "\"\"\"\n", - "ret = Popen(cmd, shell=True,stdout=PIPE)\n", - "raw = ret.stdout.read().decode(\"utf-8\")\n", - "res=json.loads(raw)\n", - "plt.imshow(np.array(explanation[\"anchor\"]))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy KNative Eventing Event Display" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_event_display", - "prev:train_drift_detector", - "prev:train_outlier_detector", - "prev:test_model_and_explainer" - ] - }, - "outputs": [], - "source": [ - "event_display=f\"\"\"apiVersion: apps/v1\n", - "kind: Deployment\n", - "metadata:\n", - " name: event-display\n", - " namespace: {DEPLOY_NAMESPACE} \n", - "spec:\n", - " replicas: 1\n", - " selector:\n", - " matchLabels: &labels\n", - " app: event-display\n", - " template:\n", - " metadata:\n", - " labels: *labels\n", - " spec:\n", - " containers:\n", - " - name: helloworld-go\n", - " # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display\n", - " image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477\n", - "---\n", - "kind: Service\n", - "apiVersion: v1\n", - "metadata:\n", - " name: event-display\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " selector:\n", - " app: event-display\n", - " ports:\n", - " - protocol: TCP\n", - " port: 80\n", - " targetPort: 8080\n", - "---\n", - "apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: cifar10-outlier-display\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " broker: default\n", - " filter:\n", - " attributes:\n", - " type: org.kubeflow.serving.inference.outlier\n", - " subscriber:\n", - " ref:\n", - " apiVersion: v1\n", - " kind: Service\n", - " name: event-display\n", - "---\n", - "apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: cifar10-drift-display\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " broker: default\n", - " filter:\n", - " attributes:\n", - " type: org.kubeflow.serving.inference.drift\n", - " subscriber:\n", - " ref:\n", - " apiVersion: v1\n", - " kind: Service\n", - " name: event-display\n", - "\"\"\"\n", - "with open(\"event_display.yaml\",\"w\") as f:\n", - " f.write(event_display)\n", - "run(\"kubectl apply -f event_display.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy KFServing Outlier Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_outlier_detector", - "prev:deploy_event_display" - ] - }, - "outputs": [], - "source": [ - "outlier_yaml=f\"\"\"apiVersion: serving.knative.dev/v1\n", - "kind: Service\n", - "metadata:\n", - " name: cifar10-outlier\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " template:\n", - " metadata:\n", - " annotations:\n", - " autoscaling.knative.dev/minScale: \"1\"\n", - " spec:\n", - " containers:\n", - " - image: seldonio/alibi-detect-server:1.2.1\n", - " imagePullPolicy: IfNotPresent\n", - " args:\n", - " - --model_name\n", - " - cifar10od\n", - " - --protocol\n", - " - tensorflow.http\n", - " - --storage_uri\n", - " - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH}\n", - " - --reply_url\n", - " - http://default-broker \n", - " - --event_type\n", - " - org.kubeflow.serving.inference.outlier\n", - " - --event_source\n", - " - org.kubeflow.serving.cifar10od\n", - " - OutlierDetector\n", - " envFrom:\n", - " - secretRef:\n", - " name: seldon-init-container-secret\n", - "\"\"\"\n", - "with open(\"outlier.yaml\",\"w\") as f:\n", - " f.write(outlier_yaml)\n", - "run(\"kubectl apply -f outlier.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "trigger_outlier_yaml=f\"\"\"apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: cifar10-outlier-trigger\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " filter:\n", - " sourceAndType:\n", - " type: org.kubeflow.serving.inference.request\n", - " subscriber:\n", - " ref:\n", - " apiVersion: serving.knative.dev/v1\n", - " kind: Service\n", - " name: cifar10-outlier\n", - "\"\"\"\n", - "with open(\"outlier_trigger.yaml\",\"w\") as f:\n", - " f.write(trigger_outlier_yaml)\n", - "run(\"kubectl apply -f outlier_trigger.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Test KFServing Outlier Detection" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:test_oulier_detection", - "prev:deploy_outlier_detector" - ] - }, - "outputs": [], - "source": [ - "idx = 1\n", - "X = X_train[idx:idx+1]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np.random.seed(0) \n", - "X_mask, mask = apply_mask(X.reshape(1, 32, 32, 3),\n", - " mask_size=(10,10),\n", - " n_masks=1,\n", - " channels=[0,1,2],\n", - " mask_type='normal',\n", - " noise_distr=(0,1),\n", - " clip_rng=(0,1))\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def predict():\n", - " test_example=X_mask.tolist()\n", - " payload='{\"instances\":'+f\"{test_example}\"+' }'\n", - " cmd=f\"\"\"curl -v -d '{payload}' \\\n", - " -H \"Host: kf-cifar10.admin.example.com\" \\\n", - " -H \"Content-Type: application/json\" \\\n", - " http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:predict\n", - " \"\"\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " raw = ret.stdout.read().decode(\"utf-8\")\n", - " print(raw)\n", - " res=json.loads(raw)\n", - " arr=np.array(res[\"predictions\"])\n", - " plt.imshow(X_mask.reshape(32, 32, 3))\n", - " plt.axis('off')\n", - " plt.show()\n", - " print(\"class:\",class_names[y_train[idx][0]])\n", - " print(\"prediction:\",class_names[arr[0].argmax()])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def get_outlier_event_display_logs():\n", - " cmd=f\"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " res = ret.stdout.read().decode(\"utf-8\").split(\"\\n\")\n", - " data= []\n", - " for i in range(0,len(res)):\n", - " if res[i] == 'Data,':\n", - " j = json.loads(json.loads(res[i+1]))\n", - " if \"is_outlier\"in j[\"data\"].keys():\n", - " data.append(j)\n", - " if len(data) > 0:\n", - " return data[-1]\n", - " else:\n", - " return None\n", - "j = None\n", - "while j is None:\n", - " predict()\n", - " print(\"Waiting for outlier logs, sleeping\")\n", - " time.sleep(2)\n", - " j = get_outlier_event_display_logs()\n", - " \n", - "print(j)\n", - "print(\"Outlier\",j[\"data\"][\"is_outlier\"]==[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy KFServing Drift Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_drift_detector", - "prev:test_oulier_detection" - ] - }, - "outputs": [], - "source": [ - "drift_yaml=f\"\"\"apiVersion: serving.knative.dev/v1\n", - "kind: Service\n", - "metadata:\n", - " name: cifar10-drift\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " template:\n", - " metadata:\n", - " annotations:\n", - " autoscaling.knative.dev/minScale: \"1\"\n", - " spec:\n", - " containers:\n", - " - image: seldonio/alibi-detect-server:1.2.2-dev\n", - " imagePullPolicy: IfNotPresent\n", - " args:\n", - " - --model_name\n", - " - cifar10cd\n", - " - --protocol\n", - " - tensorflow.http\n", - " - --storage_uri\n", - " - s3://{MINIO_MODEL_BUCKET}/{DRIFT_MODEL_PATH}\n", - " - --reply_url\n", - " - http://default-broker\n", - " - --event_type\n", - " - org.kubeflow.serving.inference.drift\n", - " - --event_source\n", - " - org.kubeflow.serving.cifar10cd\n", - " - DriftDetector\n", - " - --drift_batch_size\n", - " - '500'\n", - " envFrom:\n", - " - secretRef:\n", - " name: seldon-init-container-secret\n", - "\"\"\"\n", - "with open(\"drift.yaml\",\"w\") as f:\n", - " f.write(drift_yaml)\n", - "run(\"kubectl apply -f drift.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "trigger_outlier_yaml=f\"\"\"apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: cifar10-drift-trigger\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " filter:\n", - " sourceAndType:\n", - " type: org.kubeflow.serving.inference.request\n", - " subscriber:\n", - " ref:\n", - " apiVersion: serving.knative.dev/v1\n", - " kind: Service\n", - " name: cifar10-drift\n", - "\"\"\"\n", - "with open(\"outlier_trigger.yaml\",\"w\") as f:\n", - " f.write(trigger_outlier_yaml)\n", - "run(\"kubectl apply -f outlier_trigger.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-drift -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Test KFServing Drift Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:test_drift_detector", - "prev:deploy_drift_detector" - ] - }, - "outputs": [], - "source": [ - "def show(X):\n", - " plt.imshow(X.reshape(32, 32, 3))\n", - " plt.axis('off')\n", - " plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c\n", - "corruption = ['motion_blur']\n", - "X_corr, y_corr = fetch_cifar10c(corruption=corruption, severity=5, return_X_y=True)\n", - "X_corr = X_corr.astype('float32') / 255" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "show(X_corr[0])\n", - "show(X_corr[1])\n", - "show(X_corr[2])\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def predict(X):\n", - " test_example=X.tolist()\n", - " payload='{\"instances\":'+f\"{test_example}\"+' }'\n", - " with open(\"payload.json\",\"w\") as f:\n", - " f.write(payload)\n", - " cmd=f\"\"\"curl -d @./payload.json \\\n", - " -H \"Host: kf-cifar10.admin.example.com\" \\\n", - " -H \"Content-Type: application/json\" \\\n", - " http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:predict\n", - " \"\"\"\n", - " run(cmd, shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def get_drift_event_display_logs():\n", - " cmd=f\"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " res = ret.stdout.read().decode(\"utf-8\").split(\"\\n\")\n", - " data= []\n", - " for i in range(0,len(res)):\n", - " if res[i] == 'Data,':\n", - " j = json.loads(json.loads(res[i+1]))\n", - " if \"is_drift\" in j[\"data\"].keys():\n", - " data.append(j)\n", - " if len(data) > 0:\n", - " return data[-1]\n", - " else:\n", - " return None\n", - "j = None\n", - "for i in range(0,1000,50):\n", - " X = X_corr[i:i+50]\n", - " predict(X)\n", - " print(\"Waiting for drift logs, sleeping\")\n", - " time.sleep(2)\n", - " j = get_drift_event_display_logs()\n", - " if j is not None:\n", - " break\n", - " \n", - "print(j)\n", - "print(\"Drift\",j[\"data\"][\"is_drift\"]==1)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Clean up" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "skip" - ] - }, - "outputs": [], - "source": [ - "run(f\"kubectl delete inferenceservice kf-cifar10 -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete ksvc cifar10-outlier -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete ksvc cifar10-drift -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete trigger --all -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete sa minio-kf-sa -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete secret seldon-init-container-secret -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete secret cifar10-kf-secret -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete deployment event-display -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete svc event-display -n {DEPLOY_NAMESPACE}\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "kubeflow_notebook": { - "docker_image": "seldonio/jupyter-lab-alibi-kale:0.11", - "experiment": { - "id": "new", - "name": "kfserving-e2e-cifar10" - }, - "experiment_name": "kfserving-e2e-cifar10", - "katib_metadata": { - "algorithm": { - "algorithmName": "grid" - }, - "maxFailedTrialCount": 3, - "maxTrialCount": 12, - "objective": { - "objectiveMetricName": "", - "type": "minimize" - }, - "parallelTrialCount": 3, - "parameters": [] - }, - "katib_run": false, - "pipeline_description": "KFServing CIFAR10 Example", - "pipeline_name": "kfserving-e2e-cifar10", - "volumes": [] - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.8" - }, - "varInspector": { - "cols": { - "lenName": 16, - "lenType": 16, - "lenVar": 40 - }, - "kernels_config": { - "python": { - "delete_cmd_postfix": "", - "delete_cmd_prefix": "del ", - "library": "var_list.py", - "varRefreshCmd": "print(var_dic_list())" - }, - "r": { - "delete_cmd_postfix": ") ", - "delete_cmd_prefix": "rm(", - "library": "var_list.r", - "varRefreshCmd": "cat(var_dic_list()) " - } - }, - "types_to_exclude": [ - "module", - "function", - "builtin_function_or_method", - "instance", - "_Feature" - ], - "window_display": false - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_cifar10.kale.default.py b/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_cifar10.kale.default.py deleted file mode 100644 index cc8d91d13d5..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_cifar10.kale.default.py +++ /dev/null @@ -1,2000 +0,0 @@ -import kfp.dsl as dsl -import json -import kfp.components as comp -from collections import OrderedDict -from kubernetes import client as k8s_client - - -def setup(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - minioClient = get_minio() - buckets = minioClient.list_buckets() - for bucket in buckets: - print(bucket.name, bucket.creation_date) - ''' - - block4 = ''' - if not minioClient.bucket_exists(MINIO_MODEL_BUCKET): - minioClient.make_bucket(MINIO_MODEL_BUCKET) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/setup.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('setup') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_model_and_explainer(CIFAR10_MODEL_PATH: str, EXPLAINER_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - CIFAR10_MODEL_PATH = "{}" - EXPLAINER_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(CIFAR10_MODEL_PATH, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model = fetch_tf_model('cifar10', 'resnet32') - ''' - - block4 = ''' - train, test = tf.keras.datasets.cifar10.load_data() - X_train, y_train = train - X_test, y_test = test - - X_train = X_train.astype('float32') / 255 - X_test = X_test.astype('float32') / 255 - print(X_train.shape, y_train.shape, X_test.shape, y_test.shape) - ''' - - block5 = ''' - class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', - 'dog', 'frog', 'horse', 'ship', 'truck'] - ''' - - block6 = ''' - idx = 1 - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[model.predict(X_test[idx:idx+1])[0].argmax()]) - ''' - - block7 = ''' - modelfilepath="resnet" - tf.saved_model.save(model, modelfilepath) - ''' - - block8 = ''' - from os import listdir - from os.path import isfile, join - - model_filepath="resnet" - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{CIFAR10_MODEL_PATH}/1/saved_model.pb", modelfilepath+"/saved_model.pb")) - variable_filepath = modelfilepath+"/variables" - onlyfiles = [f for f in listdir(variable_filepath) if isfile(join(variable_filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{CIFAR10_MODEL_PATH}/1/variables/{filename}", join(variable_filepath, filename))) - ''' - - block9 = ''' - def predict_fn(x): - return model.predict(x) - ''' - - block10 = ''' - - image_shape = (32, 32, 3) - segmentation_fn = 'slic' - kwargs = {'n_segments': 5, 'compactness': 20, 'sigma': .5} - explainer = AnchorImage(predict_fn, image_shape, segmentation_fn=segmentation_fn, - segmentation_kwargs=kwargs, images_background=None) - ''' - - block11 = ''' - idx=0 - image = X_test[0] - np.random.seed(0) - explanation = explainer.explain(image, threshold=.95, p_sample=.5, tau=0.25) - ''' - - block12 = ''' - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[model.predict(X_test[idx:idx+1])[0].argmax()]) - ''' - - block13 = ''' - plt.imshow(explanation["anchor"]) - ''' - - block14 = ''' - with open("explainer.dill", "wb") as dill_file: - dill.dump(explainer, dill_file) - dill_file.close() - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{EXPLAINER_MODEL_PATH}/explainer.dill", 'explainer.dill')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_test, "X_test") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(class_names, "class_names") - _kale_marshal_utils.save(explanation, "explanation") - _kale_marshal_utils.save(y_test, "y_test") - _kale_marshal_utils.save(y_train, "y_train") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - block10, - block11, - block12, - block13, - block14, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_model_and_explainer.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_model_and_explainer') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_model(CIFAR10_MODEL_PATH: str, DEPLOY_NAMESPACE: str, EXPLAINER_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - CIFAR10_MODEL_PATH = "{}" - DEPLOY_NAMESPACE = "{}" - EXPLAINER_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(CIFAR10_MODEL_PATH, DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - secret=f"""apiVersion: v1 - kind: Secret - metadata: - name: cifar10-kf-secret - namespace: {DEPLOY_NAMESPACE} - annotations: - serving.kubeflow.org/s3-endpoint: {MINIO_HOST} # replace with your s3 endpoint - serving.kubeflow.org/s3-usehttps: "0" # by default 1, for testing with minio you need to set to 0 - type: Opaque - stringData: - awsAccessKeyID: {MINIO_ACCESS_KEY} - awsSecretAccessKey: {MINIO_SECRET_KEY} - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("kubectl apply -f secret.yaml", shell=True) - ''' - - block4 = ''' - secret = f"""apiVersion: v1 - kind: Secret - metadata: - name: seldon-init-container-secret - namespace: {DEPLOY_NAMESPACE} - type: Opaque - stringData: - AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY} - AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY} - AWS_ENDPOINT_URL: http://{MINIO_HOST} - USE_SSL: "false" - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("cat secret.yaml | kubectl apply -f -", shell=True) - ''' - - block5 = ''' - sa = f"""apiVersion: v1 - kind: ServiceAccount - metadata: - name: minio-kf-sa - namespace: {DEPLOY_NAMESPACE} - secrets: - - name: cifar10-kf-secret - """ - with open("sa.yaml","w") as f: - f.write(sa) - run("kubectl apply -f sa.yaml", shell=True) - ''' - - block6 = ''' - from kubernetes import client - from kfserving import KFServingClient - from kfserving import constants - from kfserving import utils - from kfserving import V1alpha2EndpointSpec - from kfserving import V1alpha2PredictorSpec - from kfserving import V1alpha2ExplainerSpec - from kfserving import V1alpha2AlibiExplainerSpec - from kfserving import V1alpha2TensorflowSpec - from kfserving import V1alpha2InferenceServiceSpec - from kfserving import V1alpha2InferenceService - from kfserving import V1alpha2Logger - from kubernetes.client import V1ResourceRequirements - - api_version = constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION - default_endpoint_spec = V1alpha2EndpointSpec( - predictor=V1alpha2PredictorSpec( - service_account_name='minio-kf-sa', - tensorflow=V1alpha2TensorflowSpec( - storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ CIFAR10_MODEL_PATH, - resources=V1ResourceRequirements( - requests={'cpu':'100m','memory':'1Gi'}, - limits={'cpu':'100m', 'memory':'1Gi'})), - logger=V1alpha2Logger( - mode='all' - )), - explainer=V1alpha2ExplainerSpec( - service_account_name='minio-kf-sa', - alibi=V1alpha2AlibiExplainerSpec( - type='AnchorImages', - storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ EXPLAINER_MODEL_PATH, - resources=V1ResourceRequirements( - requests={'cpu':'100m','memory':'1Gi'}, - limits={'cpu':'100m', 'memory':'1Gi'})))) - - isvc = V1alpha2InferenceService(api_version=api_version, - kind=constants.KFSERVING_KIND, - metadata=client.V1ObjectMeta( - name='kf-cifar10', namespace=DEPLOY_NAMESPACE), - spec=V1alpha2InferenceServiceSpec(default=default_endpoint_spec)) - ''' - - block7 = ''' - KFServing = KFServingClient() - KFServing.create(isvc) - ''' - - block8 = ''' - KFServing.get('kf-cifar10', namespace=DEPLOY_NAMESPACE, watch=True, timeout_seconds=240) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_model_and_explainer(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - class_names = _kale_marshal_utils.load("class_names") - explanation = _kale_marshal_utils.load("explanation") - y_test = _kale_marshal_utils.load("y_test") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - def test_model(): - idx=10 - test_example=X_test[idx:idx+1].tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-cifar10.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:predict - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - res=json.loads(raw) - arr=np.array(res["predictions"]) - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[arr[0].argmax()]) - - ok = False - while not ok: - try: - test_model() - ok = True - except: - print("Failed calling model, sleeping") - time.sleep(2) - ''' - - block4 = ''' - idx=1 - test_example=X_test[idx:idx+1].tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-cifar10.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:explain - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - res=json.loads(raw) - plt.imshow(np.array(explanation["anchor"])) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_model_and_explainer.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_model_and_explainer') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_drift_detector(DRIFT_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DRIFT_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - y_test = _kale_marshal_utils.load("y_test") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - import matplotlib.pyplot as plt - import numpy as np - import os - import tensorflow as tf - from tensorflow.keras.layers import Conv2D, Dense, Flatten, InputLayer, Reshape - - from alibi_detect.cd import KSDrift - from alibi_detect.cd.preprocess import uae, hidden_output - from alibi_detect.models.resnet import scale_by_instance - from alibi_detect.utils.fetching import fetch_tf_model, fetch_detector - from alibi_detect.utils.prediction import predict_batch - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c - ''' - - block4 = ''' - tf.random.set_seed(0) - - if True: - np.random.seed(0) - n_test = X_test.shape[0] - idx = np.random.choice(n_test, size=n_test // 2, replace=False) - idx_h0 = np.delete(np.arange(n_test), idx, axis=0) - X_ref,y_ref = X_test[idx], y_test[idx] - X_h0, y_h0 = X_test[idx_h0], y_test[idx_h0] - print(X_ref.shape, X_h0.shape) - # define encoder - encoding_dim = 32 - encoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(32, 32, 3)), - Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu), - Flatten(), - Dense(encoding_dim,) - ] - ) - - # initialise drift detector - p_val = .05 - cd = KSDrift( - p_val=p_val, # p-value for K-S test - X_ref=X_ref, # test against original test set - preprocess_fn=uae, # UAE for dimensionality reduction - preprocess_kwargs={'encoder_net': encoder_net, 'batch_size': 128}, - alternative='two-sided' # other options: 'less', 'greater' - ) - else: - cd = load_detector("/home/models/samples/cd/cifar10") - ''' - - block5 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="cifar10Drift" - save_detector(cd, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{DRIFT_MODEL_PATH}/{filename}", join(filepath, filename))) - filepath="cifar10Drift/model" - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{DRIFT_MODEL_PATH}/model/{filename}", join(filepath, filename))) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/train_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_outlier_detector(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str, TRAIN_OUTLIER_DETECTOR: bool): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - TRAIN_OUTLIER_DETECTOR = {} - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH, TRAIN_OUTLIER_DETECTOR) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - import logging - import matplotlib.pyplot as plt - import numpy as np - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block4 = ''' - if TRAIN_OUTLIER_DETECTOR: - latent_dim = 1024 - - encoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(32, 32, 3)), - Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu) - ]) - - decoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(latent_dim,)), - Dense(4*4*128), - Reshape(target_shape=(4, 4, 128)), - Conv2DTranspose(256, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2DTranspose(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2DTranspose(3, 4, strides=2, padding='same', activation='sigmoid') - ]) - - # initialize outlier detector - od = OutlierVAE(threshold=.015, # threshold for outlier score - score_type='mse', # use MSE of reconstruction error for outlier detection - encoder_net=encoder_net, # can also pass VAE model instead - decoder_net=decoder_net, # of separate encoder and decoder - latent_dim=latent_dim, - samples=2) - # train - od.fit(X_train, - loss_fn=elbo, - cov_elbo=dict(sim=.05), - epochs=50, - verbose=True) - else: - od = load_detector("/home/models/samples/od/cifar10") - ''' - - block5 = ''' - idx = 8 - X = X_train[idx].reshape(1, 32, 32, 3) - X_recon = od.vae(X) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - plt.imshow(X_recon.numpy().reshape(32, 32, 3)) - plt.axis('off') - plt.show() - ''' - - block6 = ''' - X = X_train[:500] - print(X.shape) - od_preds = od.predict(X, - outlier_type='instance', # use 'feature' or 'instance' level - return_feature_score=True, # scores used to determine outliers - return_instance_score=True) - print(list(od_preds['data'].keys())) - target = np.zeros(X.shape[0],).astype(int) # all normal CIFAR10 training instances - labels = ['normal', 'outlier'] - plot_instance_score(od_preds, target, labels, od.threshold) - ''' - - block7 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="cifar10outlier" - save_detector(od, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/{filename}", join(filepath, filename))) - filepath="cifar10outlier/model" - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/model/{filename}", join(filepath, filename))) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_train, "X_train") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_outlier_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_outlier_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_event_display(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - event_display=f"""apiVersion: apps/v1 - kind: Deployment - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - replicas: 1 - selector: - matchLabels: &labels - app: event-display - template: - metadata: - labels: *labels - spec: - containers: - - name: helloworld-go - # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display - image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477 - --- - kind: Service - apiVersion: v1 - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - selector: - app: event-display - ports: - - protocol: TCP - port: 80 - targetPort: 8080 - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-outlier-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: org.kubeflow.serving.inference.outlier - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-drift-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: org.kubeflow.serving.inference.drift - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - """ - with open("event_display.yaml","w") as f: - f.write(event_display) - run("kubectl apply -f event_display.yaml", shell=True) - ''' - - block4 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_event_display.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_event_display') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_outlier_detector(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - outlier_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: cifar10-outlier - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.1 - imagePullPolicy: IfNotPresent - args: - - --model_name - - cifar10od - - --protocol - - tensorflow.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - org.kubeflow.serving.inference.outlier - - --event_source - - org.kubeflow.serving.cifar10od - - OutlierDetector - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("outlier.yaml","w") as f: - f.write(outlier_yaml) - run("kubectl apply -f outlier.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-outlier-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: org.kubeflow.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1 - kind: Service - name: cifar10-outlier - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_outlier_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_outlier_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_oulier_detection(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - class_names = _kale_marshal_utils.load("class_names") - y_train = _kale_marshal_utils.load("y_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - idx = 1 - X = X_train[idx:idx+1] - ''' - - block4 = ''' - np.random.seed(0) - X_mask, mask = apply_mask(X.reshape(1, 32, 32, 3), - mask_size=(10,10), - n_masks=1, - channels=[0,1,2], - mask_type='normal', - noise_distr=(0,1), - clip_rng=(0,1)) - ''' - - block5 = ''' - def predict(): - test_example=X_mask.tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-cifar10.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:predict - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - res=json.loads(raw) - arr=np.array(res["predictions"]) - plt.imshow(X_mask.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_train[idx][0]]) - print("prediction:",class_names[arr[0].argmax()]) - ''' - - block6 = ''' - def get_outlier_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_outlier"in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - while j is None: - predict() - print("Waiting for outlier logs, sleeping") - time.sleep(2) - j = get_outlier_event_display_logs() - - print(j) - print("Outlier",j["data"]["is_outlier"]==[1]) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_oulier_detection.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_oulier_detection') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_drift_detector(DEPLOY_NAMESPACE: str, DRIFT_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - DRIFT_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - drift_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: cifar10-drift - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.2-dev - imagePullPolicy: IfNotPresent - args: - - --model_name - - cifar10cd - - --protocol - - tensorflow.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{DRIFT_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - org.kubeflow.serving.inference.drift - - --event_source - - org.kubeflow.serving.cifar10cd - - DriftDetector - - --drift_batch_size - - '500' - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("drift.yaml","w") as f: - f.write(drift_yaml) - run("kubectl apply -f drift.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-drift-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: org.kubeflow.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1 - kind: Service - name: cifar10-drift - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-drift -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_drift_detector(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - def show(X): - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - ''' - - block4 = ''' - from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c - corruption = ['motion_blur'] - X_corr, y_corr = fetch_cifar10c(corruption=corruption, severity=5, return_X_y=True) - X_corr = X_corr.astype('float32') / 255 - ''' - - block5 = ''' - show(X_corr[0]) - show(X_corr[1]) - show(X_corr[2]) - ''' - - block6 = ''' - def predict(X): - test_example=X.tolist() - payload='{"instances":'+f"{test_example}"+' }' - with open("payload.json","w") as f: - f.write(payload) - cmd=f"""curl -d @./payload.json \\ - -H "Host: kf-cifar10.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:predict - """ - run(cmd, shell=True) - ''' - - block7 = ''' - def get_drift_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_drift" in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - for i in range(0,1000,50): - X = X_corr[i:i+50] - predict(X) - print("Waiting for drift logs, sleeping") - time.sleep(2) - j = get_drift_event_display_logs() - if j is not None: - break - - print(j) - print("Drift",j["data"]["is_drift"]==1) - ''' - - block8 = ''' - - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -setup_op = comp.func_to_container_op( - setup, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_model_and_explainer_op = comp.func_to_container_op( - train_model_and_explainer, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_model_op = comp.func_to_container_op( - deploy_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_model_and_explainer_op = comp.func_to_container_op( - test_model_and_explainer, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_drift_detector_op = comp.func_to_container_op( - train_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_outlier_detector_op = comp.func_to_container_op( - train_outlier_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_event_display_op = comp.func_to_container_op( - deploy_event_display, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_outlier_detector_op = comp.func_to_container_op( - deploy_outlier_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_oulier_detection_op = comp.func_to_container_op( - test_oulier_detection, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_drift_detector_op = comp.func_to_container_op( - deploy_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_drift_detector_op = comp.func_to_container_op( - test_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -@dsl.pipeline( - name='kfserving-e2e-cifar10-m0cya', - description='KFServing CIFAR10 Example' -) -def auto_generated_pipeline(CIFAR10_MODEL_PATH='tfserving/cifar10/model', DEPLOY_NAMESPACE='admin', DRIFT_MODEL_PATH='tfserving/cifar10/drift', EXPLAINER_MODEL_PATH='tfserving/cifar10/explainer', MINIO_ACCESS_KEY='minio', MINIO_HOST='minio-service.kubeflow:9000', MINIO_MODEL_BUCKET='seldon', MINIO_SECRET_KEY='minio123', OUTLIER_MODEL_PATH='tfserving/cifar10/outlier', TRAIN_DRIFT_DETECTOR='False', TRAIN_OUTLIER_DETECTOR='False'): - pvolumes_dict = OrderedDict() - volume_step_names = [] - volume_name_parameters = [] - - marshal_vop = dsl.VolumeOp( - name="kale-marshal-volume", - resource_name="kale-marshal-pvc", - modes=dsl.VOLUME_MODE_RWM, - size="1Gi" - ) - volume_step_names.append(marshal_vop.name) - volume_name_parameters.append(marshal_vop.outputs["name"].full_name) - pvolumes_dict['/marshal'] = marshal_vop.volume - - volume_step_names.sort() - volume_name_parameters.sort() - - setup_task = setup_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after() - setup_task.container.working_dir = "/home/jovyan" - setup_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'setup': '/setup.html'}) - setup_task.output_artifact_paths.update(output_artifacts) - setup_task.add_pod_label("pipelines.kubeflow.org/metadata_written", "true") - dep_names = setup_task.dependent_names + volume_step_names - setup_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - setup_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_model_and_explainer_task = train_model_and_explainer_op(CIFAR10_MODEL_PATH, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(setup_task) - train_model_and_explainer_task.container.working_dir = "/home/jovyan" - train_model_and_explainer_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_model_and_explainer': '/train_model_and_explainer.html'}) - train_model_and_explainer_task.output_artifact_paths.update( - output_artifacts) - train_model_and_explainer_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_model_and_explainer_task.dependent_names + volume_step_names - train_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_model_task = deploy_model_op(CIFAR10_MODEL_PATH, DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - deploy_model_task.container.working_dir = "/home/jovyan" - deploy_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_model': '/deploy_model.html'}) - deploy_model_task.output_artifact_paths.update(output_artifacts) - deploy_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_model_task.dependent_names + volume_step_names - deploy_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_model_and_explainer_task = test_model_and_explainer_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_model_task) - test_model_and_explainer_task.container.working_dir = "/home/jovyan" - test_model_and_explainer_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'test_model_and_explainer': '/test_model_and_explainer.html'}) - test_model_and_explainer_task.output_artifact_paths.update( - output_artifacts) - test_model_and_explainer_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_model_and_explainer_task.dependent_names + volume_step_names - test_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_drift_detector_task = train_drift_detector_op(DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - train_drift_detector_task.container.working_dir = "/home/jovyan" - train_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_drift_detector': '/train_drift_detector.html'}) - train_drift_detector_task.output_artifact_paths.update(output_artifacts) - train_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_drift_detector_task.dependent_names + volume_step_names - train_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_outlier_detector_task = train_outlier_detector_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH, TRAIN_OUTLIER_DETECTOR)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - train_outlier_detector_task.container.working_dir = "/home/jovyan" - train_outlier_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_outlier_detector': '/train_outlier_detector.html'}) - train_outlier_detector_task.output_artifact_paths.update(output_artifacts) - train_outlier_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_outlier_detector_task.dependent_names + volume_step_names - train_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_event_display_task = deploy_event_display_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_drift_detector_task, train_outlier_detector_task, test_model_and_explainer_task) - deploy_event_display_task.container.working_dir = "/home/jovyan" - deploy_event_display_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_event_display': '/deploy_event_display.html'}) - deploy_event_display_task.output_artifact_paths.update(output_artifacts) - deploy_event_display_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_event_display_task.dependent_names + volume_step_names - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_outlier_detector_task = deploy_outlier_detector_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_event_display_task) - deploy_outlier_detector_task.container.working_dir = "/home/jovyan" - deploy_outlier_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_outlier_detector': '/deploy_outlier_detector.html'}) - deploy_outlier_detector_task.output_artifact_paths.update(output_artifacts) - deploy_outlier_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_outlier_detector_task.dependent_names + volume_step_names - deploy_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_oulier_detection_task = test_oulier_detection_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_outlier_detector_task) - test_oulier_detection_task.container.working_dir = "/home/jovyan" - test_oulier_detection_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'test_oulier_detection': '/test_oulier_detection.html'}) - test_oulier_detection_task.output_artifact_paths.update(output_artifacts) - test_oulier_detection_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_oulier_detection_task.dependent_names + volume_step_names - test_oulier_detection_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_oulier_detection_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_drift_detector_task = deploy_drift_detector_op(DEPLOY_NAMESPACE, DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(test_oulier_detection_task) - deploy_drift_detector_task.container.working_dir = "/home/jovyan" - deploy_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_drift_detector': '/deploy_drift_detector.html'}) - deploy_drift_detector_task.output_artifact_paths.update(output_artifacts) - deploy_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_drift_detector_task.dependent_names + volume_step_names - deploy_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_drift_detector_task = test_drift_detector_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_drift_detector_task) - test_drift_detector_task.container.working_dir = "/home/jovyan" - test_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'test_drift_detector': '/test_drift_detector.html'}) - test_drift_detector_task.output_artifact_paths.update(output_artifacts) - test_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_drift_detector_task.dependent_names + volume_step_names - test_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - -if __name__ == "__main__": - pipeline_func = auto_generated_pipeline - pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz' - import kfp.compiler as compiler - compiler.Compiler().compile(pipeline_func, pipeline_filename) - - # Get or create an experiment and submit a pipeline run - import kfp - client = kfp.Client() - experiment = client.create_experiment('kfserving-e2e-cifar10') - - # Submit a pipeline run - from kale.utils.kfp_utils import generate_run_name - run_name = generate_run_name('kfserving-e2e-cifar10-m0cya') - run_result = client.run_pipeline( - experiment.id, run_name, pipeline_filename, {}) diff --git a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_cifar10.kale.nfs.py b/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_cifar10.kale.nfs.py deleted file mode 100644 index c84cea29433..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/kfserving/kfserving_e2e_cifar10.kale.nfs.py +++ /dev/null @@ -1,2001 +0,0 @@ -import kfp.dsl as dsl -import json -import kfp.components as comp -from collections import OrderedDict -from kubernetes import client as k8s_client - - -def setup(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - minioClient = get_minio() - buckets = minioClient.list_buckets() - for bucket in buckets: - print(bucket.name, bucket.creation_date) - ''' - - block4 = ''' - if not minioClient.bucket_exists(MINIO_MODEL_BUCKET): - minioClient.make_bucket(MINIO_MODEL_BUCKET) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/setup.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('setup') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_model_and_explainer(CIFAR10_MODEL_PATH: str, EXPLAINER_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - CIFAR10_MODEL_PATH = "{}" - EXPLAINER_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(CIFAR10_MODEL_PATH, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model = fetch_tf_model('cifar10', 'resnet32') - ''' - - block4 = ''' - train, test = tf.keras.datasets.cifar10.load_data() - X_train, y_train = train - X_test, y_test = test - - X_train = X_train.astype('float32') / 255 - X_test = X_test.astype('float32') / 255 - print(X_train.shape, y_train.shape, X_test.shape, y_test.shape) - ''' - - block5 = ''' - class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', - 'dog', 'frog', 'horse', 'ship', 'truck'] - ''' - - block6 = ''' - idx = 1 - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[model.predict(X_test[idx:idx+1])[0].argmax()]) - ''' - - block7 = ''' - modelfilepath="resnet" - tf.saved_model.save(model, modelfilepath) - ''' - - block8 = ''' - from os import listdir - from os.path import isfile, join - - model_filepath="resnet" - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{CIFAR10_MODEL_PATH}/1/saved_model.pb", modelfilepath+"/saved_model.pb")) - variable_filepath = modelfilepath+"/variables" - onlyfiles = [f for f in listdir(variable_filepath) if isfile(join(variable_filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{CIFAR10_MODEL_PATH}/1/variables/{filename}", join(variable_filepath, filename))) - ''' - - block9 = ''' - def predict_fn(x): - return model.predict(x) - ''' - - block10 = ''' - - image_shape = (32, 32, 3) - segmentation_fn = 'slic' - kwargs = {'n_segments': 5, 'compactness': 20, 'sigma': .5} - explainer = AnchorImage(predict_fn, image_shape, segmentation_fn=segmentation_fn, - segmentation_kwargs=kwargs, images_background=None) - ''' - - block11 = ''' - idx=0 - image = X_test[0] - np.random.seed(0) - explanation = explainer.explain(image, threshold=.95, p_sample=.5, tau=0.25) - ''' - - block12 = ''' - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[model.predict(X_test[idx:idx+1])[0].argmax()]) - ''' - - block13 = ''' - plt.imshow(explanation["anchor"]) - ''' - - block14 = ''' - with open("explainer.dill", "wb") as dill_file: - dill.dump(explainer, dill_file) - dill_file.close() - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{EXPLAINER_MODEL_PATH}/explainer.dill", 'explainer.dill')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_test, "X_test") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(class_names, "class_names") - _kale_marshal_utils.save(explanation, "explanation") - _kale_marshal_utils.save(y_test, "y_test") - _kale_marshal_utils.save(y_train, "y_train") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - block10, - block11, - block12, - block13, - block14, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_model_and_explainer.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_model_and_explainer') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_model(CIFAR10_MODEL_PATH: str, DEPLOY_NAMESPACE: str, EXPLAINER_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - CIFAR10_MODEL_PATH = "{}" - DEPLOY_NAMESPACE = "{}" - EXPLAINER_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(CIFAR10_MODEL_PATH, DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - secret=f"""apiVersion: v1 - kind: Secret - metadata: - name: cifar10-kf-secret - namespace: {DEPLOY_NAMESPACE} - annotations: - serving.kubeflow.org/s3-endpoint: {MINIO_HOST} # replace with your s3 endpoint - serving.kubeflow.org/s3-usehttps: "0" # by default 1, for testing with minio you need to set to 0 - type: Opaque - stringData: - awsAccessKeyID: {MINIO_ACCESS_KEY} - awsSecretAccessKey: {MINIO_SECRET_KEY} - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("kubectl apply -f secret.yaml", shell=True) - ''' - - block4 = ''' - secret = f"""apiVersion: v1 - kind: Secret - metadata: - name: seldon-init-container-secret - namespace: {DEPLOY_NAMESPACE} - type: Opaque - stringData: - AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY} - AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY} - AWS_ENDPOINT_URL: http://{MINIO_HOST} - USE_SSL: "false" - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("cat secret.yaml | kubectl apply -f -", shell=True) - ''' - - block5 = ''' - sa = f"""apiVersion: v1 - kind: ServiceAccount - metadata: - name: minio-kf-sa - namespace: {DEPLOY_NAMESPACE} - secrets: - - name: cifar10-kf-secret - """ - with open("sa.yaml","w") as f: - f.write(sa) - run("kubectl apply -f sa.yaml", shell=True) - ''' - - block6 = ''' - from kubernetes import client - from kfserving import KFServingClient - from kfserving import constants - from kfserving import utils - from kfserving import V1alpha2EndpointSpec - from kfserving import V1alpha2PredictorSpec - from kfserving import V1alpha2ExplainerSpec - from kfserving import V1alpha2AlibiExplainerSpec - from kfserving import V1alpha2TensorflowSpec - from kfserving import V1alpha2InferenceServiceSpec - from kfserving import V1alpha2InferenceService - from kfserving import V1alpha2Logger - from kubernetes.client import V1ResourceRequirements - - api_version = constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION - default_endpoint_spec = V1alpha2EndpointSpec( - predictor=V1alpha2PredictorSpec( - service_account_name='minio-kf-sa', - tensorflow=V1alpha2TensorflowSpec( - storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ CIFAR10_MODEL_PATH, - resources=V1ResourceRequirements( - requests={'cpu':'100m','memory':'1Gi'}, - limits={'cpu':'100m', 'memory':'1Gi'})), - logger=V1alpha2Logger( - mode='all' - )), - explainer=V1alpha2ExplainerSpec( - service_account_name='minio-kf-sa', - alibi=V1alpha2AlibiExplainerSpec( - type='AnchorImages', - storage_uri='s3://'+MINIO_MODEL_BUCKET+'/'+ EXPLAINER_MODEL_PATH, - resources=V1ResourceRequirements( - requests={'cpu':'100m','memory':'1Gi'}, - limits={'cpu':'100m', 'memory':'1Gi'})))) - - isvc = V1alpha2InferenceService(api_version=api_version, - kind=constants.KFSERVING_KIND, - metadata=client.V1ObjectMeta( - name='kf-cifar10', namespace=DEPLOY_NAMESPACE), - spec=V1alpha2InferenceServiceSpec(default=default_endpoint_spec)) - ''' - - block7 = ''' - KFServing = KFServingClient() - KFServing.create(isvc) - ''' - - block8 = ''' - KFServing.get('kf-cifar10', namespace=DEPLOY_NAMESPACE, watch=True, timeout_seconds=240) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_model_and_explainer(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - class_names = _kale_marshal_utils.load("class_names") - explanation = _kale_marshal_utils.load("explanation") - y_test = _kale_marshal_utils.load("y_test") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - def test_model(): - idx=10 - test_example=X_test[idx:idx+1].tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-cifar10.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:predict - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - res=json.loads(raw) - arr=np.array(res["predictions"]) - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[arr[0].argmax()]) - - ok = False - while not ok: - try: - test_model() - ok = True - except: - print("Failed calling model, sleeping") - time.sleep(2) - ''' - - block4 = ''' - idx=1 - test_example=X_test[idx:idx+1].tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-cifar10.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:explain - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - res=json.loads(raw) - plt.imshow(np.array(explanation["anchor"])) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_model_and_explainer.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_model_and_explainer') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_drift_detector(DRIFT_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DRIFT_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - y_test = _kale_marshal_utils.load("y_test") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - import matplotlib.pyplot as plt - import numpy as np - import os - import tensorflow as tf - from tensorflow.keras.layers import Conv2D, Dense, Flatten, InputLayer, Reshape - - from alibi_detect.cd import KSDrift - from alibi_detect.cd.preprocess import uae, hidden_output - from alibi_detect.models.resnet import scale_by_instance - from alibi_detect.utils.fetching import fetch_tf_model, fetch_detector - from alibi_detect.utils.prediction import predict_batch - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c - ''' - - block4 = ''' - tf.random.set_seed(0) - - if True: - np.random.seed(0) - n_test = X_test.shape[0] - idx = np.random.choice(n_test, size=n_test // 2, replace=False) - idx_h0 = np.delete(np.arange(n_test), idx, axis=0) - X_ref,y_ref = X_test[idx], y_test[idx] - X_h0, y_h0 = X_test[idx_h0], y_test[idx_h0] - print(X_ref.shape, X_h0.shape) - # define encoder - encoding_dim = 32 - encoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(32, 32, 3)), - Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu), - Flatten(), - Dense(encoding_dim,) - ] - ) - - # initialise drift detector - p_val = .05 - cd = KSDrift( - p_val=p_val, # p-value for K-S test - X_ref=X_ref, # test against original test set - preprocess_fn=uae, # UAE for dimensionality reduction - preprocess_kwargs={'encoder_net': encoder_net, 'batch_size': 128}, - alternative='two-sided' # other options: 'less', 'greater' - ) - else: - cd = load_detector("/home/models/samples/cd/cifar10") - ''' - - block5 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="cifar10Drift" - save_detector(cd, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{DRIFT_MODEL_PATH}/{filename}", join(filepath, filename))) - filepath="cifar10Drift/model" - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{DRIFT_MODEL_PATH}/model/{filename}", join(filepath, filename))) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/train_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_outlier_detector(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str, TRAIN_OUTLIER_DETECTOR: bool): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - TRAIN_OUTLIER_DETECTOR = {} - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH, TRAIN_OUTLIER_DETECTOR) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - import logging - import matplotlib.pyplot as plt - import numpy as np - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block4 = ''' - if TRAIN_OUTLIER_DETECTOR: - latent_dim = 1024 - - encoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(32, 32, 3)), - Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu) - ]) - - decoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(latent_dim,)), - Dense(4*4*128), - Reshape(target_shape=(4, 4, 128)), - Conv2DTranspose(256, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2DTranspose(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2DTranspose(3, 4, strides=2, padding='same', activation='sigmoid') - ]) - - # initialize outlier detector - od = OutlierVAE(threshold=.015, # threshold for outlier score - score_type='mse', # use MSE of reconstruction error for outlier detection - encoder_net=encoder_net, # can also pass VAE model instead - decoder_net=decoder_net, # of separate encoder and decoder - latent_dim=latent_dim, - samples=2) - # train - od.fit(X_train, - loss_fn=elbo, - cov_elbo=dict(sim=.05), - epochs=50, - verbose=True) - else: - od = load_detector("/home/models/samples/od/cifar10") - ''' - - block5 = ''' - idx = 8 - X = X_train[idx].reshape(1, 32, 32, 3) - X_recon = od.vae(X) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - plt.imshow(X_recon.numpy().reshape(32, 32, 3)) - plt.axis('off') - plt.show() - ''' - - block6 = ''' - X = X_train[:500] - print(X.shape) - od_preds = od.predict(X, - outlier_type='instance', # use 'feature' or 'instance' level - return_feature_score=True, # scores used to determine outliers - return_instance_score=True) - print(list(od_preds['data'].keys())) - target = np.zeros(X.shape[0],).astype(int) # all normal CIFAR10 training instances - labels = ['normal', 'outlier'] - plot_instance_score(od_preds, target, labels, od.threshold) - ''' - - block7 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="cifar10outlier" - save_detector(od, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/{filename}", join(filepath, filename))) - filepath="cifar10outlier/model" - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/model/{filename}", join(filepath, filename))) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_train, "X_train") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_outlier_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_outlier_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_event_display(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - event_display=f"""apiVersion: apps/v1 - kind: Deployment - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - replicas: 1 - selector: - matchLabels: &labels - app: event-display - template: - metadata: - labels: *labels - spec: - containers: - - name: helloworld-go - # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display - image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477 - --- - kind: Service - apiVersion: v1 - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - selector: - app: event-display - ports: - - protocol: TCP - port: 80 - targetPort: 8080 - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-outlier-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: org.kubeflow.serving.inference.outlier - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-drift-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: org.kubeflow.serving.inference.drift - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - """ - with open("event_display.yaml","w") as f: - f.write(event_display) - run("kubectl apply -f event_display.yaml", shell=True) - ''' - - block4 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_event_display.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_event_display') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_outlier_detector(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - outlier_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: cifar10-outlier - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.1 - imagePullPolicy: IfNotPresent - args: - - --model_name - - cifar10od - - --protocol - - tensorflow.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - org.kubeflow.serving.inference.outlier - - --event_source - - org.kubeflow.serving.cifar10od - - OutlierDetector - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("outlier.yaml","w") as f: - f.write(outlier_yaml) - run("kubectl apply -f outlier.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-outlier-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: org.kubeflow.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1 - kind: Service - name: cifar10-outlier - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_outlier_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_outlier_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_oulier_detection(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - class_names = _kale_marshal_utils.load("class_names") - y_train = _kale_marshal_utils.load("y_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - idx = 1 - X = X_train[idx:idx+1] - ''' - - block4 = ''' - np.random.seed(0) - X_mask, mask = apply_mask(X.reshape(1, 32, 32, 3), - mask_size=(10,10), - n_masks=1, - channels=[0,1,2], - mask_type='normal', - noise_distr=(0,1), - clip_rng=(0,1)) - ''' - - block5 = ''' - def predict(): - test_example=X_mask.tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -v -d '{payload}' \\ - -H "Host: kf-cifar10.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:predict - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - res=json.loads(raw) - arr=np.array(res["predictions"]) - plt.imshow(X_mask.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_train[idx][0]]) - print("prediction:",class_names[arr[0].argmax()]) - ''' - - block6 = ''' - def get_outlier_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_outlier"in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - while j is None: - predict() - print("Waiting for outlier logs, sleeping") - time.sleep(2) - j = get_outlier_event_display_logs() - - print(j) - print("Outlier",j["data"]["is_outlier"]==[1]) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_oulier_detection.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_oulier_detection') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_drift_detector(DEPLOY_NAMESPACE: str, DRIFT_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - DRIFT_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - drift_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: cifar10-drift - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.2-dev - imagePullPolicy: IfNotPresent - args: - - --model_name - - cifar10cd - - --protocol - - tensorflow.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{DRIFT_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - org.kubeflow.serving.inference.drift - - --event_source - - org.kubeflow.serving.cifar10cd - - DriftDetector - - --drift_batch_size - - '500' - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("drift.yaml","w") as f: - f.write(drift_yaml) - run("kubectl apply -f drift.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-drift-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: org.kubeflow.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1 - kind: Service - name: cifar10-drift - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-drift -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_drift_detector(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - def show(X): - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - ''' - - block4 = ''' - from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c - corruption = ['motion_blur'] - X_corr, y_corr = fetch_cifar10c(corruption=corruption, severity=5, return_X_y=True) - X_corr = X_corr.astype('float32') / 255 - ''' - - block5 = ''' - show(X_corr[0]) - show(X_corr[1]) - show(X_corr[2]) - ''' - - block6 = ''' - def predict(X): - test_example=X.tolist() - payload='{"instances":'+f"{test_example}"+' }' - with open("payload.json","w") as f: - f.write(payload) - cmd=f"""curl -d @./payload.json \\ - -H "Host: kf-cifar10.admin.example.com" \\ - -H "Content-Type: application/json" \\ - http://kfserving-ingressgateway.istio-system/v1/models/kf-cifar10:predict - """ - run(cmd, shell=True) - ''' - - block7 = ''' - def get_drift_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_drift" in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - for i in range(0,1000,50): - X = X_corr[i:i+50] - predict(X) - print("Waiting for drift logs, sleeping") - time.sleep(2) - j = get_drift_event_display_logs() - if j is not None: - break - - print(j) - print("Drift",j["data"]["is_drift"]==1) - ''' - - block8 = ''' - - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -setup_op = comp.func_to_container_op( - setup, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_model_and_explainer_op = comp.func_to_container_op( - train_model_and_explainer, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_model_op = comp.func_to_container_op( - deploy_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_model_and_explainer_op = comp.func_to_container_op( - test_model_and_explainer, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_drift_detector_op = comp.func_to_container_op( - train_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_outlier_detector_op = comp.func_to_container_op( - train_outlier_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_event_display_op = comp.func_to_container_op( - deploy_event_display, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_outlier_detector_op = comp.func_to_container_op( - deploy_outlier_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_oulier_detection_op = comp.func_to_container_op( - test_oulier_detection, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_drift_detector_op = comp.func_to_container_op( - deploy_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_drift_detector_op = comp.func_to_container_op( - test_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -@dsl.pipeline( - name='kfserving-e2e-cifar10-m0cya', - description='KFServing CIFAR10 Example' -) -def auto_generated_pipeline(CIFAR10_MODEL_PATH='tfserving/cifar10/model', DEPLOY_NAMESPACE='admin', DRIFT_MODEL_PATH='tfserving/cifar10/drift', EXPLAINER_MODEL_PATH='tfserving/cifar10/explainer', MINIO_ACCESS_KEY='minio', MINIO_HOST='minio-service.kubeflow:9000', MINIO_MODEL_BUCKET='seldon', MINIO_SECRET_KEY='minio123', OUTLIER_MODEL_PATH='tfserving/cifar10/outlier', TRAIN_DRIFT_DETECTOR='False', TRAIN_OUTLIER_DETECTOR='False'): - pvolumes_dict = OrderedDict() - volume_step_names = [] - volume_name_parameters = [] - - marshal_vop = dsl.VolumeOp( - name="kale-marshal-volume", - resource_name="kale-marshal-pvc", - storage_class="nfs-client", - modes=dsl.VOLUME_MODE_RWM, - size="1Gi" - ) - volume_step_names.append(marshal_vop.name) - volume_name_parameters.append(marshal_vop.outputs["name"].full_name) - pvolumes_dict['/marshal'] = marshal_vop.volume - - volume_step_names.sort() - volume_name_parameters.sort() - - setup_task = setup_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after() - setup_task.container.working_dir = "/home/jovyan" - setup_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'setup': '/setup.html'}) - setup_task.output_artifact_paths.update(output_artifacts) - setup_task.add_pod_label("pipelines.kubeflow.org/metadata_written", "true") - dep_names = setup_task.dependent_names + volume_step_names - setup_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - setup_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_model_and_explainer_task = train_model_and_explainer_op(CIFAR10_MODEL_PATH, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(setup_task) - train_model_and_explainer_task.container.working_dir = "/home/jovyan" - train_model_and_explainer_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_model_and_explainer': '/train_model_and_explainer.html'}) - train_model_and_explainer_task.output_artifact_paths.update( - output_artifacts) - train_model_and_explainer_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_model_and_explainer_task.dependent_names + volume_step_names - train_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_model_task = deploy_model_op(CIFAR10_MODEL_PATH, DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - deploy_model_task.container.working_dir = "/home/jovyan" - deploy_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_model': '/deploy_model.html'}) - deploy_model_task.output_artifact_paths.update(output_artifacts) - deploy_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_model_task.dependent_names + volume_step_names - deploy_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_model_and_explainer_task = test_model_and_explainer_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_model_task) - test_model_and_explainer_task.container.working_dir = "/home/jovyan" - test_model_and_explainer_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'test_model_and_explainer': '/test_model_and_explainer.html'}) - test_model_and_explainer_task.output_artifact_paths.update( - output_artifacts) - test_model_and_explainer_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_model_and_explainer_task.dependent_names + volume_step_names - test_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_drift_detector_task = train_drift_detector_op(DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - train_drift_detector_task.container.working_dir = "/home/jovyan" - train_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_drift_detector': '/train_drift_detector.html'}) - train_drift_detector_task.output_artifact_paths.update(output_artifacts) - train_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_drift_detector_task.dependent_names + volume_step_names - train_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_outlier_detector_task = train_outlier_detector_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH, TRAIN_OUTLIER_DETECTOR)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - train_outlier_detector_task.container.working_dir = "/home/jovyan" - train_outlier_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_outlier_detector': '/train_outlier_detector.html'}) - train_outlier_detector_task.output_artifact_paths.update(output_artifacts) - train_outlier_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_outlier_detector_task.dependent_names + volume_step_names - train_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_event_display_task = deploy_event_display_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_drift_detector_task, train_outlier_detector_task, test_model_and_explainer_task) - deploy_event_display_task.container.working_dir = "/home/jovyan" - deploy_event_display_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_event_display': '/deploy_event_display.html'}) - deploy_event_display_task.output_artifact_paths.update(output_artifacts) - deploy_event_display_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_event_display_task.dependent_names + volume_step_names - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_outlier_detector_task = deploy_outlier_detector_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_event_display_task) - deploy_outlier_detector_task.container.working_dir = "/home/jovyan" - deploy_outlier_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_outlier_detector': '/deploy_outlier_detector.html'}) - deploy_outlier_detector_task.output_artifact_paths.update(output_artifacts) - deploy_outlier_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_outlier_detector_task.dependent_names + volume_step_names - deploy_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_oulier_detection_task = test_oulier_detection_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_outlier_detector_task) - test_oulier_detection_task.container.working_dir = "/home/jovyan" - test_oulier_detection_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'test_oulier_detection': '/test_oulier_detection.html'}) - test_oulier_detection_task.output_artifact_paths.update(output_artifacts) - test_oulier_detection_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_oulier_detection_task.dependent_names + volume_step_names - test_oulier_detection_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_oulier_detection_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_drift_detector_task = deploy_drift_detector_op(DEPLOY_NAMESPACE, DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(test_oulier_detection_task) - deploy_drift_detector_task.container.working_dir = "/home/jovyan" - deploy_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_drift_detector': '/deploy_drift_detector.html'}) - deploy_drift_detector_task.output_artifact_paths.update(output_artifacts) - deploy_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_drift_detector_task.dependent_names + volume_step_names - deploy_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_drift_detector_task = test_drift_detector_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_drift_detector_task) - test_drift_detector_task.container.working_dir = "/home/jovyan" - test_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'test_drift_detector': '/test_drift_detector.html'}) - test_drift_detector_task.output_artifact_paths.update(output_artifacts) - test_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_drift_detector_task.dependent_names + volume_step_names - test_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - -if __name__ == "__main__": - pipeline_func = auto_generated_pipeline - pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz' - import kfp.compiler as compiler - compiler.Compiler().compile(pipeline_func, pipeline_filename) - - # Get or create an experiment and submit a pipeline run - import kfp - client = kfp.Client() - experiment = client.create_experiment('kfserving-e2e-cifar10') - - # Submit a pipeline run - from kale.utils.kfp_utils import generate_run_name - run_name = generate_run_name('kfserving-e2e-cifar10-m0cya') - run_result = client.run_pipeline( - experiment.id, run_name, pipeline_filename, {}) diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/README.md b/samples/contrib/e2e-outlier-drift-explainer/seldon/README.md deleted file mode 100644 index c7b8ec1adf8..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/seldon/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# Seldon End to End Deployment Examples - - * [CIFAR10 Image Classifier with Explainer, Outlier detector and drift detector](#cifar10-image-classification-model) - * [Income Classifier with Explainer and Outlier Detector](#income-classification-model) - -## Running Pipelines - - * [See "tested on" section](../README.md#tested-on) - -## Running in Kubeflow Jupyter Lab - - 1. Create a new JupyterLab server using the image `seldonio/jupyter-lab-alibi-kale:0.11` - - -## CIFAR10 Image Classification Model - - * Seldon CIFAR10 Image Model with deployed explainer, outlier detector and drift detector - * [Kale annotated Jupyter Notebook](./seldon_e2e_cifar10.ipynb) - * [Pipeline](./seldon_e2e_cifar10.kale.nfs.py) - * Assumes `storage_class="nfs-client"` - * [Pipeline](./seldon_e2e_cifar10.kale.default.py) - * Assumes a ReadWriteMany PVC will succeed on your cluster - -![pipeline](cifar10-pipeline.png) - - -### Model Deployment and Predictions - -Deploy a trained Tensorflow model and call the Seldon API to get predictions. - -![cifar10 prediction](cifar10-prediction.png) - -### Build and test a Model Explainer - -Train an Anchor Images Explainer using [Alibi](https://github.com/SeldonIO/alibi) and test. - -![cifar10 explainer](cifar10-explainer.png) - -### Outlier Detector Deployment and Test - -Deploy a trained outlier detector using [Alibi-Detect](https://github.com/SeldonIO/alibi-detect) and send corrupted images to the model to check they are marked as outliers. - -![cifar10 outlier](cifar10-outlier.png) - -### Drift Detector Deployment and Test - -Create a drift detector using [Alibi-Detect](https://github.com/SeldonIO/alibi-detect) and deploy and test using motion blurred images. - -![cifar10 drift](cifar10-drift.png) - - -## Income Classification Model - -This example illustrates a simple tabular data use case built on [demographic features from a 1996 US census](https://archive.ics.uci.edu/ml/datasets/census+income) to create a binary classifier and attach explanations and outlier detection.. - - * Seldon Income Classification Model with deployed explainer and outlier detector - * [Kale annotated Jupyter Notebook](./seldon_e2e_adult.ipynb) - * [Pipeline](./seldon_e2e_adult.kale.nfs.py) - * Assumes `storage_class="nfs-client"` - * [Pipeline](./seldon_e2e_adult.kale.default.py) - * Assumes a ReadWriteMany PVC will succeed on your cluster - -![income pipeline](income-pipeline.png) - -### Model Predictions Explanations - -``` -Prediction: <=50K -Anchor: Marital Status = Separated AND Sex = Female -Precision: 0.98 -Coverage: 0.11 -``` - -## Outlier Detection - -``` -{'data': {'instance_score': None, 'feature_score': None, 'is_outlier': [1]}, 'meta': {'name': 'IForest', 'detector_type': 'offline', 'data_type': 'tabular'}} -Outlier True -``` diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-drift.png b/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-drift.png deleted file mode 100644 index 795ebacf3c6..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-drift.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-explainer.png b/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-explainer.png deleted file mode 100644 index f41749badd0..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-explainer.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-outlier.png b/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-outlier.png deleted file mode 100644 index 2ff490f87c7..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-outlier.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-pipeline.png b/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-pipeline.png deleted file mode 100644 index dcb54d9bc80..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-pipeline.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-prediction.png b/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-prediction.png deleted file mode 100644 index 41dc87f11b5..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/seldon/cifar10-prediction.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/income-pipeline.png b/samples/contrib/e2e-outlier-drift-explainer/seldon/income-pipeline.png deleted file mode 100644 index 709b1c4f46e..00000000000 Binary files a/samples/contrib/e2e-outlier-drift-explainer/seldon/income-pipeline.png and /dev/null differ diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_adult.ipynb b/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_adult.ipynb deleted file mode 100644 index 5b91eb6a070..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_adult.ipynb +++ /dev/null @@ -1,1135 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "# End to End Machine Learning Pipeline for Income Prediction\n", - "\n", - "We use [demographic features from the 1996 US census](https://archive.ics.uci.edu/ml/datasets/census+income) to build an end to end machine learning pipeline. The pipeline is also annotated so it can be run as a [Kubeflow Pipeline](https://www.kubeflow.org/docs/pipelines/overview/pipelines-overview/) using the [Kale](https://github.com/kubeflow-kale/kale) pipeline generator.\n", - "\n", - "The notebook/pipeline stages are:\n", - "\n", - " 1. Setup \n", - " * Imports\n", - " * pipeline-parameters\n", - " * minio client test\n", - " 1. Train a simple sklearn model and push to minio\n", - " 1. Prepare an Anchors explainer for model and push to minio\n", - " 1. Test Explainer\n", - " 1. Train an isolation forest outlier detector for model and push to minio\n", - " 1. Deploy a Seldon model and test\n", - " 1. Deploy a KfServing model and test\n", - " 1. Deploy an outlier detector \n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "imports" - ] - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "from sklearn.ensemble import RandomForestClassifier\n", - "from sklearn.compose import ColumnTransformer\n", - "from sklearn.pipeline import Pipeline\n", - "from sklearn.impute import SimpleImputer\n", - "from sklearn.metrics import accuracy_score\n", - "from sklearn.preprocessing import StandardScaler, OneHotEncoder\n", - "from alibi.explainers import AnchorTabular\n", - "from alibi.datasets import fetch_adult\n", - "from minio import Minio\n", - "from minio.error import ResponseError\n", - "from joblib import dump, load\n", - "import dill\n", - "import time\n", - "import json\n", - "from subprocess import run, Popen, PIPE\n", - "from alibi_detect.utils.data import create_outlier_batch" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "pipeline-parameters" - ] - }, - "outputs": [], - "source": [ - "MINIO_HOST=\"minio-service.kubeflow:9000\"\n", - "MINIO_ACCESS_KEY=\"minio\"\n", - "MINIO_SECRET_KEY=\"minio123\"\n", - "MINIO_MODEL_BUCKET=\"seldon\"\n", - "INCOME_MODEL_PATH=\"sklearn/income/model\"\n", - "EXPLAINER_MODEL_PATH=\"sklearn/income/explainer\"\n", - "OUTLIER_MODEL_PATH=\"sklearn/income/outlier\"\n", - "DEPLOY_NAMESPACE=\"admin\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "functions" - ] - }, - "outputs": [], - "source": [ - "def get_minio():\n", - " return Minio(MINIO_HOST,\n", - " access_key=MINIO_ACCESS_KEY,\n", - " secret_key=MINIO_SECRET_KEY,\n", - " secure=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:setup" - ] - }, - "outputs": [], - "source": [ - "minioClient = get_minio()\n", - "buckets = minioClient.list_buckets()\n", - "for bucket in buckets:\n", - " print(bucket.name, bucket.creation_date)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "if not minioClient.bucket_exists(MINIO_MODEL_BUCKET):\n", - " minioClient.make_bucket(MINIO_MODEL_BUCKET)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train Model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:build_model", - "prev:setup" - ] - }, - "outputs": [], - "source": [ - "adult = fetch_adult()\n", - "adult.keys()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:" - ] - }, - "outputs": [], - "source": [ - "data = adult.data\n", - "target = adult.target\n", - "feature_names = adult.feature_names\n", - "category_map = adult.category_map" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Note that for your own datasets you can use our utility function [gen_category_map](../api/alibi.utils.data.rst) to create the category map:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from alibi.utils.data import gen_category_map" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Define shuffled training and test set" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np.random.seed(0)\n", - "data_perm = np.random.permutation(np.c_[data, target])\n", - "data = data_perm[:,:-1]\n", - "target = data_perm[:,-1]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "idx = 30000\n", - "X_train,Y_train = data[:idx,:], target[:idx]\n", - "X_test, Y_test = data[idx+1:,:], target[idx+1:]" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### Create feature transformation pipeline\n", - "Create feature pre-processor. Needs to have 'fit' and 'transform' methods. Different types of pre-processing can be applied to all or part of the features. In the example below we will standardize ordinal features and apply one-hot-encoding to categorical features.\n", - "\n", - "Ordinal features:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ordinal_features = [x for x in range(len(feature_names)) if x not in list(category_map.keys())]\n", - "ordinal_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')),\n", - " ('scaler', StandardScaler())])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Categorical features:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "categorical_features = list(category_map.keys())\n", - "categorical_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')),\n", - " ('onehot', OneHotEncoder(handle_unknown='ignore'))])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Combine and fit:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "preprocessor = ColumnTransformer(transformers=[('num', ordinal_transformer, ordinal_features),\n", - " ('cat', categorical_transformer, categorical_features)])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### Train Random Forest model\n", - "\n", - "Fit on pre-processed (imputing, OHE, standardizing) data." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np.random.seed(0)\n", - "clf = RandomForestClassifier(n_estimators=50)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "model=Pipeline(steps=[(\"preprocess\",preprocessor),(\"model\",clf)])\n", - "model.fit(X_train,Y_train)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Define predict function" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:" - ] - }, - "outputs": [], - "source": [ - "def predict_fn(x):\n", - " return model.predict(x)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:", - "prev:build_model" - ] - }, - "outputs": [], - "source": [ - "#predict_fn = lambda x: clf.predict(preprocessor.transform(x))\n", - "print('Train accuracy: ', accuracy_score(Y_train, predict_fn(X_train)))\n", - "print('Test accuracy: ', accuracy_score(Y_test, predict_fn(X_test)))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "dump(model, 'model.joblib') " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{INCOME_MODEL_PATH}/model.joblib\", 'model.joblib'))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train Explainer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:train_explainer", - "prev:build_model" - ] - }, - "outputs": [], - "source": [ - "model.predict(X_train)\n", - "explainer = AnchorTabular(predict_fn, feature_names, categorical_names=category_map)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Discretize the ordinal features into quartiles" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "explainer.fit(X_train, disc_perc=[25, 50, 75])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "with open(\"explainer.dill\", \"wb\") as dill_file:\n", - " dill.dump(explainer, dill_file) \n", - " dill_file.close()\n", - "print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{EXPLAINER_MODEL_PATH}/explainer.dill\", 'explainer.dill'))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Get Explanation\n", - "\n", - "Below, we get an anchor for the prediction of the first observation in the test set. An anchor is a sufficient condition - that is, when the anchor holds, the prediction should be the same as the prediction for this instance." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:explain", - "prev:train_explainer" - ] - }, - "outputs": [], - "source": [ - "model.predict(X_train)\n", - "idx = 0\n", - "class_names = adult.target_names\n", - "print('Prediction: ', class_names[explainer.predict_fn(X_test[idx].reshape(1, -1))[0]])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "We set the precision threshold to 0.95. This means that predictions on observations where the anchor holds will be the same as the prediction on the explained instance at least 95% of the time." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "explanation = explainer.explain(X_test[idx], threshold=0.95)\n", - "print('Anchor: %s' % (' AND '.join(explanation['names'])))\n", - "print('Precision: %.2f' % explanation['precision'])\n", - "print('Coverage: %.2f' % explanation['coverage'])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train Outlier Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:build_outlier", - "prev:build_model" - ] - }, - "outputs": [], - "source": [ - "from alibi_detect.od import IForest\n", - "\n", - "od = IForest(\n", - " threshold=0.,\n", - " n_estimators=200,\n", - ")\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "od.fit(X_train)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np.random.seed(0)\n", - "perc_outlier = 5\n", - "threshold_batch = create_outlier_batch(X_train, Y_train, n_samples=1000, perc_outlier=perc_outlier)\n", - "X_threshold, y_threshold = threshold_batch.data.astype('float'), threshold_batch.target\n", - "#X_threshold = (X_threshold - mean) / stdev\n", - "print('{}% outliers'.format(100 * y_threshold.mean()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "od.infer_threshold(X_threshold, threshold_perc=100-perc_outlier)\n", - "print('New threshold: {}'.format(od.threshold))\n", - "threshold = od.threshold" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "X_outlier = [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "od.predict(\n", - " X_outlier\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from os import listdir\n", - "from os.path import isfile, join\n", - "\n", - "filepath=\"ifoutlier\"\n", - "save_detector(od, filepath) \n", - "onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{OUTLIER_MODEL_PATH}/{filename}\", join(filepath, filename)))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy Seldon Core Model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_seldon", - "prev:train_explainer" - ] - }, - "outputs": [], - "source": [ - "secret = f\"\"\"apiVersion: v1\n", - "kind: Secret\n", - "metadata:\n", - " name: seldon-init-container-secret\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "type: Opaque\n", - "stringData:\n", - " AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY}\n", - " AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY}\n", - " AWS_ENDPOINT_URL: http://{MINIO_HOST}\n", - " USE_SSL: \"false\"\n", - "\"\"\"\n", - "with open(\"secret.yaml\",\"w\") as f:\n", - " f.write(secret)\n", - "run(\"cat secret.yaml | kubectl apply -f -\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "sa = f\"\"\"apiVersion: v1\n", - "kind: ServiceAccount\n", - "metadata:\n", - " name: minio-sa\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "secrets:\n", - " - name: seldon-init-container-secret\n", - "\"\"\"\n", - "with open(\"sa.yaml\",\"w\") as f:\n", - " f.write(sa)\n", - "run(\"kubectl apply -f sa.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "model_yaml=f\"\"\"apiVersion: machinelearning.seldon.io/v1\n", - "kind: SeldonDeployment\n", - "metadata:\n", - " name: income-classifier\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " predictors:\n", - " - componentSpecs:\n", - " graph:\n", - " implementation: SKLEARN_SERVER\n", - " modelUri: s3://{MINIO_MODEL_BUCKET}/{INCOME_MODEL_PATH}\n", - " envSecretRefName: seldon-init-container-secret\n", - " name: classifier\n", - " logger:\n", - " mode: all\n", - " explainer:\n", - " type: AnchorTabular\n", - " modelUri: s3://{MINIO_MODEL_BUCKET}/{EXPLAINER_MODEL_PATH}\n", - " envSecretRefName: seldon-init-container-secret\n", - " name: default\n", - " replicas: 1\n", - "\"\"\"\n", - "with open(\"model.yaml\",\"w\") as f:\n", - " f.write(model_yaml)\n", - "run(\"kubectl apply -f model.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=income-classifier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=income-classifier -o jsonpath='{{.items[1].metadata.name}}' -n {DEPLOY_NAMESPACE})\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Make a prediction request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:test_model", - "prev:deploy_seldon" - ] - }, - "outputs": [], - "source": [ - "payload='{\"data\": {\"ndarray\": [[53,4,0,2,8,4,4,0,0,0,60,9]]}}'\n", - "cmd=f\"\"\"curl -d '{payload}' \\\n", - " http://income-classifier-default.{DEPLOY_NAMESPACE}:8000/api/v1.0/predictions \\\n", - " -H \"Content-Type: application/json\"\n", - "\"\"\"\n", - "ret = Popen(cmd, shell=True,stdout=PIPE)\n", - "raw = ret.stdout.read().decode(\"utf-8\")\n", - "print(raw)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Make an explanation request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "payload='{\"data\": {\"ndarray\": [[53,4,0,2,8,4,4,0,0,0,60,9]]}}'\n", - "cmd=f\"\"\"curl -d '{payload}' \\\n", - " http://income-classifier-default-explainer.{DEPLOY_NAMESPACE}:9000/api/v1.0/explain \\\n", - " -H \"Content-Type: application/json\"\n", - "\"\"\"\n", - "ret = Popen(cmd, shell=True,stdout=PIPE)\n", - "raw = ret.stdout.read().decode(\"utf-8\")\n", - "print(raw)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy Outier Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_outlier", - "prev:build_outlier", - "prev:test_model" - ] - }, - "outputs": [], - "source": [ - "outlier_yaml=f\"\"\"apiVersion: serving.knative.dev/v1\n", - "kind: Service\n", - "metadata:\n", - " name: income-outlier\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " template:\n", - " metadata:\n", - " annotations:\n", - " autoscaling.knative.dev/minScale: \"1\"\n", - " spec:\n", - " containers:\n", - " - image: seldonio/alibi-detect-server:1.2.2-dev_alibidetect\n", - " imagePullPolicy: IfNotPresent\n", - " args:\n", - " - --model_name\n", - " - adultod\n", - " - --http_port\n", - " - '8080'\n", - " - --protocol\n", - " - seldon.http\n", - " - --storage_uri\n", - " - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH}\n", - " - --reply_url\n", - " - http://default-broker \n", - " - --event_type\n", - " - io.seldon.serving.inference.outlier\n", - " - --event_source\n", - " - io.seldon.serving.incomeod\n", - " - OutlierDetector\n", - " envFrom:\n", - " - secretRef:\n", - " name: seldon-init-container-secret\n", - "\"\"\"\n", - "with open(\"outlier.yaml\",\"w\") as f:\n", - " f.write(outlier_yaml)\n", - "run(\"kubectl apply -f outlier.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "trigger_outlier_yaml=f\"\"\"apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: income-outlier-trigger\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " filter:\n", - " sourceAndType:\n", - " type: io.seldon.serving.inference.request\n", - " subscriber:\n", - " ref:\n", - " apiVersion: serving.knative.dev/v1alpha1\n", - " kind: Service\n", - " name: income-outlier\n", - "\"\"\"\n", - "with open(\"outlier_trigger.yaml\",\"w\") as f:\n", - " f.write(trigger_outlier_yaml)\n", - "run(\"kubectl apply -f outlier_trigger.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=income-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy KNative Eventing Event Display" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_event_display", - "prev:deploy_outlier" - ] - }, - "outputs": [], - "source": [ - "event_display=f\"\"\"apiVersion: apps/v1\n", - "kind: Deployment\n", - "metadata:\n", - " name: event-display\n", - " namespace: {DEPLOY_NAMESPACE} \n", - "spec:\n", - " replicas: 1\n", - " selector:\n", - " matchLabels: &labels\n", - " app: event-display\n", - " template:\n", - " metadata:\n", - " labels: *labels\n", - " spec:\n", - " containers:\n", - " - name: helloworld-go\n", - " # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display\n", - " image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477\n", - "---\n", - "kind: Service\n", - "apiVersion: v1\n", - "metadata:\n", - " name: event-display\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " selector:\n", - " app: event-display\n", - " ports:\n", - " - protocol: TCP\n", - " port: 80\n", - " targetPort: 8080\n", - "---\n", - "apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: income-outlier-display\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " broker: default\n", - " filter:\n", - " attributes:\n", - " type: io.seldon.serving.inference.outlier\n", - " subscriber:\n", - " ref:\n", - " apiVersion: v1\n", - " kind: Service\n", - " name: event-display\n", - "\"\"\"\n", - "with open(\"event_display.yaml\",\"w\") as f:\n", - " f.write(event_display)\n", - "run(\"kubectl apply -f event_display.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Test Outlier Detection" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:test_outliers", - "prev:deploy_event_display" - ] - }, - "outputs": [], - "source": [ - "def predict():\n", - " payload='{\"data\": {\"ndarray\": [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]]}}'\n", - " cmd=f\"\"\"curl -d '{payload}' \\\n", - " http://income-classifier-default.{DEPLOY_NAMESPACE}:8000/api/v1.0/predictions \\\n", - " -H \"Content-Type: application/json\"\n", - " \"\"\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " raw = ret.stdout.read().decode(\"utf-8\")\n", - " print(raw)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def get_outlier_event_display_logs():\n", - " cmd=f\"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " res = ret.stdout.read().decode(\"utf-8\").split(\"\\n\")\n", - " data= []\n", - " for i in range(0,len(res)):\n", - " if res[i] == 'Data,':\n", - " j = json.loads(json.loads(res[i+1]))\n", - " if \"is_outlier\"in j[\"data\"].keys():\n", - " data.append(j)\n", - " if len(data) > 0:\n", - " return data[-1]\n", - " else:\n", - " return None\n", - "j = None\n", - "while j is None:\n", - " predict()\n", - " print(\"Waiting for outlier logs, sleeping\")\n", - " time.sleep(2)\n", - " j = get_outlier_event_display_logs()\n", - " \n", - "print(j)\n", - "print(\"Outlier\",j[\"data\"][\"is_outlier\"]==[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Clean Up Resources" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "skip" - ] - }, - "outputs": [], - "source": [ - "run(f\"kubectl delete sdep income-classifier -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete ksvc income-outlier -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete sa minio-sa -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete secret seldon-init-container-secret -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete deployment event-display -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete svc event-display -n {DEPLOY_NAMESPACE}\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "kubeflow_notebook": { - "docker_image": "seldonio/jupyter-lab-alibi-kale:0.11", - "experiment": { - "id": "new", - "name": "seldon-e2e-adult" - }, - "experiment_name": "seldon-e2e-adult", - "katib_metadata": { - "algorithm": { - "algorithmName": "grid" - }, - "maxFailedTrialCount": 3, - "maxTrialCount": 12, - "objective": { - "objectiveMetricName": "", - "type": "minimize" - }, - "parallelTrialCount": 3, - "parameters": [] - }, - "katib_run": false, - "pipeline_description": "Seldon e2e adult", - "pipeline_name": "seldon-e2e-adult", - "volumes": [] - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.8" - }, - "varInspector": { - "cols": { - "lenName": 16, - "lenType": 16, - "lenVar": 40 - }, - "kernels_config": { - "python": { - "delete_cmd_postfix": "", - "delete_cmd_prefix": "del ", - "library": "var_list.py", - "varRefreshCmd": "print(var_dic_list())" - }, - "r": { - "delete_cmd_postfix": ") ", - "delete_cmd_prefix": "rm(", - "library": "var_list.r", - "varRefreshCmd": "cat(var_dic_list()) " - } - }, - "types_to_exclude": [ - "module", - "function", - "builtin_function_or_method", - "instance", - "_Feature" - ], - "window_display": false - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_adult.kale.default.py b/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_adult.kale.default.py deleted file mode 100644 index 74bd6192208..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_adult.kale.default.py +++ /dev/null @@ -1,1380 +0,0 @@ -import kfp.dsl as dsl -import json -import kfp.components as comp -from collections import OrderedDict -from kubernetes import client as k8s_client - - -def setup(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - minioClient = get_minio() - buckets = minioClient.list_buckets() - for bucket in buckets: - print(bucket.name, bucket.creation_date) - ''' - - block4 = ''' - if not minioClient.bucket_exists(MINIO_MODEL_BUCKET): - minioClient.make_bucket(MINIO_MODEL_BUCKET) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/setup.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('setup') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def build_model(INCOME_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - INCOME_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - adult = fetch_adult() - adult.keys() - ''' - - block4 = ''' - data = adult.data - target = adult.target - feature_names = adult.feature_names - category_map = adult.category_map - ''' - - block5 = ''' - from alibi.utils.data import gen_category_map - ''' - - block6 = ''' - np.random.seed(0) - data_perm = np.random.permutation(np.c_[data, target]) - data = data_perm[:,:-1] - target = data_perm[:,-1] - ''' - - block7 = ''' - idx = 30000 - X_train,Y_train = data[:idx,:], target[:idx] - X_test, Y_test = data[idx+1:,:], target[idx+1:] - ''' - - block8 = ''' - ordinal_features = [x for x in range(len(feature_names)) if x not in list(category_map.keys())] - ordinal_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')), - ('scaler', StandardScaler())]) - ''' - - block9 = ''' - categorical_features = list(category_map.keys()) - categorical_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')), - ('onehot', OneHotEncoder(handle_unknown='ignore'))]) - ''' - - block10 = ''' - preprocessor = ColumnTransformer(transformers=[('num', ordinal_transformer, ordinal_features), - ('cat', categorical_transformer, categorical_features)]) - ''' - - block11 = ''' - np.random.seed(0) - clf = RandomForestClassifier(n_estimators=50) - ''' - - block12 = ''' - model=Pipeline(steps=[("preprocess",preprocessor),("model",clf)]) - model.fit(X_train,Y_train) - ''' - - block13 = ''' - def predict_fn(x): - return model.predict(x) - ''' - - block14 = ''' - #predict_fn = lambda x: clf.predict(preprocessor.transform(x)) - print('Train accuracy: ', accuracy_score(Y_train, predict_fn(X_train))) - print('Test accuracy: ', accuracy_score(Y_test, predict_fn(X_test))) - ''' - - block15 = ''' - dump(model, 'model.joblib') - ''' - - block16 = ''' - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{INCOME_MODEL_PATH}/model.joblib", 'model.joblib')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_test, "X_test") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(Y_train, "Y_train") - _kale_marshal_utils.save(adult, "adult") - _kale_marshal_utils.save(category_map, "category_map") - _kale_marshal_utils.save(feature_names, "feature_names") - _kale_marshal_utils.save(model, "model") - _kale_marshal_utils.save(predict_fn, "predict_fn") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - block10, - block11, - block12, - block13, - block14, - block15, - block16, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/build_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('build_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def build_outlier(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - Y_train = _kale_marshal_utils.load("Y_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - from alibi_detect.od import IForest - - od = IForest( - threshold=0., - n_estimators=200, - ) - ''' - - block4 = ''' - od.fit(X_train) - ''' - - block5 = ''' - np.random.seed(0) - perc_outlier = 5 - threshold_batch = create_outlier_batch(X_train, Y_train, n_samples=1000, perc_outlier=perc_outlier) - X_threshold, y_threshold = threshold_batch.data.astype('float'), threshold_batch.target - #X_threshold = (X_threshold - mean) / stdev - print('{}% outliers'.format(100 * y_threshold.mean())) - ''' - - block6 = ''' - od.infer_threshold(X_threshold, threshold_perc=100-perc_outlier) - print('New threshold: {}'.format(od.threshold)) - threshold = od.threshold - ''' - - block7 = ''' - X_outlier = [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]] - ''' - - block8 = ''' - od.predict( - X_outlier - ) - ''' - - block9 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="ifoutlier" - save_detector(od, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/{filename}", join(filepath, filename))) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - ) - html_artifact = _kale_run_code(blocks) - with open("/build_outlier.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('build_outlier') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_explainer(EXPLAINER_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - EXPLAINER_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - category_map = _kale_marshal_utils.load("category_map") - feature_names = _kale_marshal_utils.load("feature_names") - model = _kale_marshal_utils.load("model") - predict_fn = _kale_marshal_utils.load("predict_fn") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model.predict(X_train) - explainer = AnchorTabular(predict_fn, feature_names, categorical_names=category_map) - ''' - - block4 = ''' - explainer.fit(X_train, disc_perc=[25, 50, 75]) - ''' - - block5 = ''' - with open("explainer.dill", "wb") as dill_file: - dill.dump(explainer, dill_file) - dill_file.close() - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{EXPLAINER_MODEL_PATH}/explainer.dill", 'explainer.dill')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(explainer, "explainer") - _kale_marshal_utils.save(model, "model") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_explainer.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_explainer') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_seldon(DEPLOY_NAMESPACE: str, EXPLAINER_MODEL_PATH: str, INCOME_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - EXPLAINER_MODEL_PATH = "{}" - INCOME_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - secret = f"""apiVersion: v1 - kind: Secret - metadata: - name: seldon-init-container-secret - namespace: {DEPLOY_NAMESPACE} - type: Opaque - stringData: - AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY} - AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY} - AWS_ENDPOINT_URL: http://{MINIO_HOST} - USE_SSL: "false" - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("cat secret.yaml | kubectl apply -f -", shell=True) - ''' - - block4 = ''' - sa = f"""apiVersion: v1 - kind: ServiceAccount - metadata: - name: minio-sa - namespace: {DEPLOY_NAMESPACE} - secrets: - - name: seldon-init-container-secret - """ - with open("sa.yaml","w") as f: - f.write(sa) - run("kubectl apply -f sa.yaml", shell=True) - ''' - - block5 = ''' - model_yaml=f"""apiVersion: machinelearning.seldon.io/v1 - kind: SeldonDeployment - metadata: - name: income-classifier - namespace: {DEPLOY_NAMESPACE} - spec: - predictors: - - componentSpecs: - graph: - implementation: SKLEARN_SERVER - modelUri: s3://{MINIO_MODEL_BUCKET}/{INCOME_MODEL_PATH} - envSecretRefName: seldon-init-container-secret - name: classifier - logger: - mode: all - explainer: - type: AnchorTabular - modelUri: s3://{MINIO_MODEL_BUCKET}/{EXPLAINER_MODEL_PATH} - envSecretRefName: seldon-init-container-secret - name: default - replicas: 1 - """ - with open("model.yaml","w") as f: - f.write(model_yaml) - run("kubectl apply -f model.yaml", shell=True) - ''' - - block6 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=income-classifier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - block7 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=income-classifier -o jsonpath='{{.items[1].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_seldon.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_seldon') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_model(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - payload='{"data": {"ndarray": [[53,4,0,2,8,4,4,0,0,0,60,9]]}}' - cmd=f"""curl -d '{payload}' \\ - http://income-classifier-default.{DEPLOY_NAMESPACE}:8000/api/v1.0/predictions \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - ''' - - block4 = ''' - payload='{"data": {"ndarray": [[53,4,0,2,8,4,4,0,0,0,60,9]]}}' - cmd=f"""curl -d '{payload}' \\ - http://income-classifier-default-explainer.{DEPLOY_NAMESPACE}:9000/api/v1.0/explain \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_outlier(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - outlier_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: income-outlier - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.2-dev_alibidetect - imagePullPolicy: IfNotPresent - args: - - --model_name - - adultod - - --http_port - - '8080' - - --protocol - - seldon.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - io.seldon.serving.inference.outlier - - --event_source - - io.seldon.serving.incomeod - - OutlierDetector - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("outlier.yaml","w") as f: - f.write(outlier_yaml) - run("kubectl apply -f outlier.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: income-outlier-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: io.seldon.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1alpha1 - kind: Service - name: income-outlier - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=income-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_outlier.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_outlier') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_event_display(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - event_display=f"""apiVersion: apps/v1 - kind: Deployment - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - replicas: 1 - selector: - matchLabels: &labels - app: event-display - template: - metadata: - labels: *labels - spec: - containers: - - name: helloworld-go - # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display - image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477 - --- - kind: Service - apiVersion: v1 - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - selector: - app: event-display - ports: - - protocol: TCP - port: 80 - targetPort: 8080 - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: income-outlier-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: io.seldon.serving.inference.outlier - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - """ - with open("event_display.yaml","w") as f: - f.write(event_display) - run("kubectl apply -f event_display.yaml", shell=True) - ''' - - block4 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_event_display.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_event_display') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_outliers(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - def predict(): - payload='{"data": {"ndarray": [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]]}}' - cmd=f"""curl -d '{payload}' \\ - http://income-classifier-default.{DEPLOY_NAMESPACE}:8000/api/v1.0/predictions \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - ''' - - block4 = ''' - def get_outlier_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_outlier"in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - while j is None: - predict() - print("Waiting for outlier logs, sleeping") - time.sleep(2) - j = get_outlier_event_display_logs() - - print(j) - print("Outlier",j["data"]["is_outlier"]==[1]) - ''' - - block5 = ''' - - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_outliers.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_outliers') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def explain(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - X_train = _kale_marshal_utils.load("X_train") - adult = _kale_marshal_utils.load("adult") - explainer = _kale_marshal_utils.load("explainer") - model = _kale_marshal_utils.load("model") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model.predict(X_train) - idx = 0 - class_names = adult.target_names - print('Prediction: ', class_names[explainer.predict_fn(X_test[idx].reshape(1, -1))[0]]) - ''' - - block4 = ''' - explanation = explainer.explain(X_test[idx], threshold=0.95) - print('Anchor: %s' % (' AND '.join(explanation['names']))) - print('Precision: %.2f' % explanation['precision']) - print('Coverage: %.2f' % explanation['coverage']) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/explain.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('explain') - - _kale_mlmd_utils.call("mark_execution_complete") - - -setup_op = comp.func_to_container_op( - setup, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -build_model_op = comp.func_to_container_op( - build_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -build_outlier_op = comp.func_to_container_op( - build_outlier, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_explainer_op = comp.func_to_container_op( - train_explainer, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_seldon_op = comp.func_to_container_op( - deploy_seldon, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_model_op = comp.func_to_container_op( - test_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_outlier_op = comp.func_to_container_op( - deploy_outlier, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_event_display_op = comp.func_to_container_op( - deploy_event_display, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_outliers_op = comp.func_to_container_op( - test_outliers, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -explain_op = comp.func_to_container_op( - explain, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -@dsl.pipeline( - name='seldon-e2e-adult-ttonn', - description='Seldon e2e adult' -) -def auto_generated_pipeline(DEPLOY_NAMESPACE='admin', EXPLAINER_MODEL_PATH='sklearn/income/explainer', INCOME_MODEL_PATH='sklearn/income/model', MINIO_ACCESS_KEY='minio', MINIO_HOST='minio-service.kubeflow:9000', MINIO_MODEL_BUCKET='seldon', MINIO_SECRET_KEY='minio123', OUTLIER_MODEL_PATH='sklearn/income/outlier'): - pvolumes_dict = OrderedDict() - volume_step_names = [] - volume_name_parameters = [] - - marshal_vop = dsl.VolumeOp( - name="kale-marshal-volume", - resource_name="kale-marshal-pvc", - modes=dsl.VOLUME_MODE_RWM, - size="1Gi" - ) - volume_step_names.append(marshal_vop.name) - volume_name_parameters.append(marshal_vop.outputs["name"].full_name) - pvolumes_dict['/marshal'] = marshal_vop.volume - - volume_step_names.sort() - volume_name_parameters.sort() - - setup_task = setup_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after() - setup_task.container.working_dir = "/home/jovyan" - setup_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'setup': '/setup.html'}) - setup_task.output_artifact_paths.update(output_artifacts) - setup_task.add_pod_label("pipelines.kubeflow.org/metadata_written", "true") - dep_names = setup_task.dependent_names + volume_step_names - setup_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - setup_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - build_model_task = build_model_op(INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(setup_task) - build_model_task.container.working_dir = "/home/jovyan" - build_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'build_model': '/build_model.html'}) - build_model_task.output_artifact_paths.update(output_artifacts) - build_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = build_model_task.dependent_names + volume_step_names - build_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - build_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - build_outlier_task = build_outlier_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_model_task) - build_outlier_task.container.working_dir = "/home/jovyan" - build_outlier_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'build_outlier': '/build_outlier.html'}) - build_outlier_task.output_artifact_paths.update(output_artifacts) - build_outlier_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = build_outlier_task.dependent_names + volume_step_names - build_outlier_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - build_outlier_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_explainer_task = train_explainer_op(EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_model_task) - train_explainer_task.container.working_dir = "/home/jovyan" - train_explainer_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'train_explainer': '/train_explainer.html'}) - train_explainer_task.output_artifact_paths.update(output_artifacts) - train_explainer_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_explainer_task.dependent_names + volume_step_names - train_explainer_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_explainer_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_seldon_task = deploy_seldon_op(DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_explainer_task) - deploy_seldon_task.container.working_dir = "/home/jovyan" - deploy_seldon_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_seldon': '/deploy_seldon.html'}) - deploy_seldon_task.output_artifact_paths.update(output_artifacts) - deploy_seldon_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_seldon_task.dependent_names + volume_step_names - deploy_seldon_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_seldon_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_model_task = test_model_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_seldon_task) - test_model_task.container.working_dir = "/home/jovyan" - test_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'test_model': '/test_model.html'}) - test_model_task.output_artifact_paths.update(output_artifacts) - test_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_model_task.dependent_names + volume_step_names - test_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_outlier_task = deploy_outlier_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_outlier_task, test_model_task) - deploy_outlier_task.container.working_dir = "/home/jovyan" - deploy_outlier_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_outlier': '/deploy_outlier.html'}) - deploy_outlier_task.output_artifact_paths.update(output_artifacts) - deploy_outlier_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_outlier_task.dependent_names + volume_step_names - deploy_outlier_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_outlier_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_event_display_task = deploy_event_display_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_outlier_task) - deploy_event_display_task.container.working_dir = "/home/jovyan" - deploy_event_display_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_event_display': '/deploy_event_display.html'}) - deploy_event_display_task.output_artifact_paths.update(output_artifacts) - deploy_event_display_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_event_display_task.dependent_names + volume_step_names - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_outliers_task = test_outliers_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_event_display_task) - test_outliers_task.container.working_dir = "/home/jovyan" - test_outliers_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'test_outliers': '/test_outliers.html'}) - test_outliers_task.output_artifact_paths.update(output_artifacts) - test_outliers_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_outliers_task.dependent_names + volume_step_names - test_outliers_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_outliers_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - explain_task = explain_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_explainer_task) - explain_task.container.working_dir = "/home/jovyan" - explain_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'explain': '/explain.html'}) - explain_task.output_artifact_paths.update(output_artifacts) - explain_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = explain_task.dependent_names + volume_step_names - explain_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - explain_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - -if __name__ == "__main__": - pipeline_func = auto_generated_pipeline - pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz' - import kfp.compiler as compiler - compiler.Compiler().compile(pipeline_func, pipeline_filename) - - # Get or create an experiment and submit a pipeline run - import kfp - client = kfp.Client() - experiment = client.create_experiment('seldon-e2e-adult') - - # Submit a pipeline run - from kale.utils.kfp_utils import generate_run_name - run_name = generate_run_name('seldon-e2e-adult-ttonn') - run_result = client.run_pipeline( - experiment.id, run_name, pipeline_filename, {}) diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_adult.kale.nfs.py b/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_adult.kale.nfs.py deleted file mode 100644 index fff32667b41..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_adult.kale.nfs.py +++ /dev/null @@ -1,1381 +0,0 @@ -import kfp.dsl as dsl -import json -import kfp.components as comp -from collections import OrderedDict -from kubernetes import client as k8s_client - - -def setup(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - minioClient = get_minio() - buckets = minioClient.list_buckets() - for bucket in buckets: - print(bucket.name, bucket.creation_date) - ''' - - block4 = ''' - if not minioClient.bucket_exists(MINIO_MODEL_BUCKET): - minioClient.make_bucket(MINIO_MODEL_BUCKET) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/setup.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('setup') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def build_model(INCOME_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - INCOME_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - adult = fetch_adult() - adult.keys() - ''' - - block4 = ''' - data = adult.data - target = adult.target - feature_names = adult.feature_names - category_map = adult.category_map - ''' - - block5 = ''' - from alibi.utils.data import gen_category_map - ''' - - block6 = ''' - np.random.seed(0) - data_perm = np.random.permutation(np.c_[data, target]) - data = data_perm[:,:-1] - target = data_perm[:,-1] - ''' - - block7 = ''' - idx = 30000 - X_train,Y_train = data[:idx,:], target[:idx] - X_test, Y_test = data[idx+1:,:], target[idx+1:] - ''' - - block8 = ''' - ordinal_features = [x for x in range(len(feature_names)) if x not in list(category_map.keys())] - ordinal_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')), - ('scaler', StandardScaler())]) - ''' - - block9 = ''' - categorical_features = list(category_map.keys()) - categorical_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')), - ('onehot', OneHotEncoder(handle_unknown='ignore'))]) - ''' - - block10 = ''' - preprocessor = ColumnTransformer(transformers=[('num', ordinal_transformer, ordinal_features), - ('cat', categorical_transformer, categorical_features)]) - ''' - - block11 = ''' - np.random.seed(0) - clf = RandomForestClassifier(n_estimators=50) - ''' - - block12 = ''' - model=Pipeline(steps=[("preprocess",preprocessor),("model",clf)]) - model.fit(X_train,Y_train) - ''' - - block13 = ''' - def predict_fn(x): - return model.predict(x) - ''' - - block14 = ''' - #predict_fn = lambda x: clf.predict(preprocessor.transform(x)) - print('Train accuracy: ', accuracy_score(Y_train, predict_fn(X_train))) - print('Test accuracy: ', accuracy_score(Y_test, predict_fn(X_test))) - ''' - - block15 = ''' - dump(model, 'model.joblib') - ''' - - block16 = ''' - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{INCOME_MODEL_PATH}/model.joblib", 'model.joblib')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_test, "X_test") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(Y_train, "Y_train") - _kale_marshal_utils.save(adult, "adult") - _kale_marshal_utils.save(category_map, "category_map") - _kale_marshal_utils.save(feature_names, "feature_names") - _kale_marshal_utils.save(model, "model") - _kale_marshal_utils.save(predict_fn, "predict_fn") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - block10, - block11, - block12, - block13, - block14, - block15, - block16, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/build_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('build_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def build_outlier(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - Y_train = _kale_marshal_utils.load("Y_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - from alibi_detect.od import IForest - - od = IForest( - threshold=0., - n_estimators=200, - ) - ''' - - block4 = ''' - od.fit(X_train) - ''' - - block5 = ''' - np.random.seed(0) - perc_outlier = 5 - threshold_batch = create_outlier_batch(X_train, Y_train, n_samples=1000, perc_outlier=perc_outlier) - X_threshold, y_threshold = threshold_batch.data.astype('float'), threshold_batch.target - #X_threshold = (X_threshold - mean) / stdev - print('{}% outliers'.format(100 * y_threshold.mean())) - ''' - - block6 = ''' - od.infer_threshold(X_threshold, threshold_perc=100-perc_outlier) - print('New threshold: {}'.format(od.threshold)) - threshold = od.threshold - ''' - - block7 = ''' - X_outlier = [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]] - ''' - - block8 = ''' - od.predict( - X_outlier - ) - ''' - - block9 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="ifoutlier" - save_detector(od, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/{filename}", join(filepath, filename))) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - ) - html_artifact = _kale_run_code(blocks) - with open("/build_outlier.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('build_outlier') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_explainer(EXPLAINER_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - EXPLAINER_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - category_map = _kale_marshal_utils.load("category_map") - feature_names = _kale_marshal_utils.load("feature_names") - model = _kale_marshal_utils.load("model") - predict_fn = _kale_marshal_utils.load("predict_fn") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model.predict(X_train) - explainer = AnchorTabular(predict_fn, feature_names, categorical_names=category_map) - ''' - - block4 = ''' - explainer.fit(X_train, disc_perc=[25, 50, 75]) - ''' - - block5 = ''' - with open("explainer.dill", "wb") as dill_file: - dill.dump(explainer, dill_file) - dill_file.close() - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{EXPLAINER_MODEL_PATH}/explainer.dill", 'explainer.dill')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(explainer, "explainer") - _kale_marshal_utils.save(model, "model") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_explainer.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_explainer') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_seldon(DEPLOY_NAMESPACE: str, EXPLAINER_MODEL_PATH: str, INCOME_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - EXPLAINER_MODEL_PATH = "{}" - INCOME_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - secret = f"""apiVersion: v1 - kind: Secret - metadata: - name: seldon-init-container-secret - namespace: {DEPLOY_NAMESPACE} - type: Opaque - stringData: - AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY} - AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY} - AWS_ENDPOINT_URL: http://{MINIO_HOST} - USE_SSL: "false" - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("cat secret.yaml | kubectl apply -f -", shell=True) - ''' - - block4 = ''' - sa = f"""apiVersion: v1 - kind: ServiceAccount - metadata: - name: minio-sa - namespace: {DEPLOY_NAMESPACE} - secrets: - - name: seldon-init-container-secret - """ - with open("sa.yaml","w") as f: - f.write(sa) - run("kubectl apply -f sa.yaml", shell=True) - ''' - - block5 = ''' - model_yaml=f"""apiVersion: machinelearning.seldon.io/v1 - kind: SeldonDeployment - metadata: - name: income-classifier - namespace: {DEPLOY_NAMESPACE} - spec: - predictors: - - componentSpecs: - graph: - implementation: SKLEARN_SERVER - modelUri: s3://{MINIO_MODEL_BUCKET}/{INCOME_MODEL_PATH} - envSecretRefName: seldon-init-container-secret - name: classifier - logger: - mode: all - explainer: - type: AnchorTabular - modelUri: s3://{MINIO_MODEL_BUCKET}/{EXPLAINER_MODEL_PATH} - envSecretRefName: seldon-init-container-secret - name: default - replicas: 1 - """ - with open("model.yaml","w") as f: - f.write(model_yaml) - run("kubectl apply -f model.yaml", shell=True) - ''' - - block6 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=income-classifier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - block7 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=income-classifier -o jsonpath='{{.items[1].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_seldon.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_seldon') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_model(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - payload='{"data": {"ndarray": [[53,4,0,2,8,4,4,0,0,0,60,9]]}}' - cmd=f"""curl -d '{payload}' \\ - http://income-classifier-default.{DEPLOY_NAMESPACE}:8000/api/v1.0/predictions \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - ''' - - block4 = ''' - payload='{"data": {"ndarray": [[53,4,0,2,8,4,4,0,0,0,60,9]]}}' - cmd=f"""curl -d '{payload}' \\ - http://income-classifier-default-explainer.{DEPLOY_NAMESPACE}:9000/api/v1.0/explain \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_outlier(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - outlier_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: income-outlier - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.2-dev_alibidetect - imagePullPolicy: IfNotPresent - args: - - --model_name - - adultod - - --http_port - - '8080' - - --protocol - - seldon.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - io.seldon.serving.inference.outlier - - --event_source - - io.seldon.serving.incomeod - - OutlierDetector - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("outlier.yaml","w") as f: - f.write(outlier_yaml) - run("kubectl apply -f outlier.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: income-outlier-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: io.seldon.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1alpha1 - kind: Service - name: income-outlier - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=income-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_outlier.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_outlier') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_event_display(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - event_display=f"""apiVersion: apps/v1 - kind: Deployment - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - replicas: 1 - selector: - matchLabels: &labels - app: event-display - template: - metadata: - labels: *labels - spec: - containers: - - name: helloworld-go - # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display - image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477 - --- - kind: Service - apiVersion: v1 - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - selector: - app: event-display - ports: - - protocol: TCP - port: 80 - targetPort: 8080 - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: income-outlier-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: io.seldon.serving.inference.outlier - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - """ - with open("event_display.yaml","w") as f: - f.write(event_display) - run("kubectl apply -f event_display.yaml", shell=True) - ''' - - block4 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_event_display.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_event_display') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_outliers(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - def predict(): - payload='{"data": {"ndarray": [[300, 4, 4, 2, 1, 4, 4, 0, 0, 0, 600, 9]]}}' - cmd=f"""curl -d '{payload}' \\ - http://income-classifier-default.{DEPLOY_NAMESPACE}:8000/api/v1.0/predictions \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - ''' - - block4 = ''' - def get_outlier_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_outlier"in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - while j is None: - predict() - print("Waiting for outlier logs, sleeping") - time.sleep(2) - j = get_outlier_event_display_logs() - - print(j) - print("Outlier",j["data"]["is_outlier"]==[1]) - ''' - - block5 = ''' - - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_outliers.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_outliers') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def explain(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - X_train = _kale_marshal_utils.load("X_train") - adult = _kale_marshal_utils.load("adult") - explainer = _kale_marshal_utils.load("explainer") - model = _kale_marshal_utils.load("model") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorTabular - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - import time - import json - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model.predict(X_train) - idx = 0 - class_names = adult.target_names - print('Prediction: ', class_names[explainer.predict_fn(X_test[idx].reshape(1, -1))[0]]) - ''' - - block4 = ''' - explanation = explainer.explain(X_test[idx], threshold=0.95) - print('Anchor: %s' % (' AND '.join(explanation['names']))) - print('Precision: %.2f' % explanation['precision']) - print('Coverage: %.2f' % explanation['coverage']) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/explain.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('explain') - - _kale_mlmd_utils.call("mark_execution_complete") - - -setup_op = comp.func_to_container_op( - setup, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -build_model_op = comp.func_to_container_op( - build_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -build_outlier_op = comp.func_to_container_op( - build_outlier, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_explainer_op = comp.func_to_container_op( - train_explainer, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_seldon_op = comp.func_to_container_op( - deploy_seldon, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_model_op = comp.func_to_container_op( - test_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_outlier_op = comp.func_to_container_op( - deploy_outlier, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_event_display_op = comp.func_to_container_op( - deploy_event_display, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_outliers_op = comp.func_to_container_op( - test_outliers, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -explain_op = comp.func_to_container_op( - explain, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -@dsl.pipeline( - name='seldon-e2e-adult-ttonn', - description='Seldon e2e adult' -) -def auto_generated_pipeline(DEPLOY_NAMESPACE='admin', EXPLAINER_MODEL_PATH='sklearn/income/explainer', INCOME_MODEL_PATH='sklearn/income/model', MINIO_ACCESS_KEY='minio', MINIO_HOST='minio-service.kubeflow:9000', MINIO_MODEL_BUCKET='seldon', MINIO_SECRET_KEY='minio123', OUTLIER_MODEL_PATH='sklearn/income/outlier'): - pvolumes_dict = OrderedDict() - volume_step_names = [] - volume_name_parameters = [] - - marshal_vop = dsl.VolumeOp( - name="kale-marshal-volume", - resource_name="kale-marshal-pvc", - storage_class="nfs-client", - modes=dsl.VOLUME_MODE_RWM, - size="1Gi" - ) - volume_step_names.append(marshal_vop.name) - volume_name_parameters.append(marshal_vop.outputs["name"].full_name) - pvolumes_dict['/marshal'] = marshal_vop.volume - - volume_step_names.sort() - volume_name_parameters.sort() - - setup_task = setup_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after() - setup_task.container.working_dir = "/home/jovyan" - setup_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'setup': '/setup.html'}) - setup_task.output_artifact_paths.update(output_artifacts) - setup_task.add_pod_label("pipelines.kubeflow.org/metadata_written", "true") - dep_names = setup_task.dependent_names + volume_step_names - setup_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - setup_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - build_model_task = build_model_op(INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(setup_task) - build_model_task.container.working_dir = "/home/jovyan" - build_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'build_model': '/build_model.html'}) - build_model_task.output_artifact_paths.update(output_artifacts) - build_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = build_model_task.dependent_names + volume_step_names - build_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - build_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - build_outlier_task = build_outlier_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_model_task) - build_outlier_task.container.working_dir = "/home/jovyan" - build_outlier_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'build_outlier': '/build_outlier.html'}) - build_outlier_task.output_artifact_paths.update(output_artifacts) - build_outlier_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = build_outlier_task.dependent_names + volume_step_names - build_outlier_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - build_outlier_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_explainer_task = train_explainer_op(EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_model_task) - train_explainer_task.container.working_dir = "/home/jovyan" - train_explainer_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'train_explainer': '/train_explainer.html'}) - train_explainer_task.output_artifact_paths.update(output_artifacts) - train_explainer_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_explainer_task.dependent_names + volume_step_names - train_explainer_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_explainer_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_seldon_task = deploy_seldon_op(DEPLOY_NAMESPACE, EXPLAINER_MODEL_PATH, INCOME_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_explainer_task) - deploy_seldon_task.container.working_dir = "/home/jovyan" - deploy_seldon_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_seldon': '/deploy_seldon.html'}) - deploy_seldon_task.output_artifact_paths.update(output_artifacts) - deploy_seldon_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_seldon_task.dependent_names + volume_step_names - deploy_seldon_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_seldon_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_model_task = test_model_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_seldon_task) - test_model_task.container.working_dir = "/home/jovyan" - test_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'test_model': '/test_model.html'}) - test_model_task.output_artifact_paths.update(output_artifacts) - test_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_model_task.dependent_names + volume_step_names - test_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_outlier_task = deploy_outlier_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(build_outlier_task, test_model_task) - deploy_outlier_task.container.working_dir = "/home/jovyan" - deploy_outlier_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_outlier': '/deploy_outlier.html'}) - deploy_outlier_task.output_artifact_paths.update(output_artifacts) - deploy_outlier_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_outlier_task.dependent_names + volume_step_names - deploy_outlier_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_outlier_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_event_display_task = deploy_event_display_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_outlier_task) - deploy_event_display_task.container.working_dir = "/home/jovyan" - deploy_event_display_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_event_display': '/deploy_event_display.html'}) - deploy_event_display_task.output_artifact_paths.update(output_artifacts) - deploy_event_display_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_event_display_task.dependent_names + volume_step_names - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_outliers_task = test_outliers_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_event_display_task) - test_outliers_task.container.working_dir = "/home/jovyan" - test_outliers_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'test_outliers': '/test_outliers.html'}) - test_outliers_task.output_artifact_paths.update(output_artifacts) - test_outliers_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_outliers_task.dependent_names + volume_step_names - test_outliers_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_outliers_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - explain_task = explain_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_explainer_task) - explain_task.container.working_dir = "/home/jovyan" - explain_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'explain': '/explain.html'}) - explain_task.output_artifact_paths.update(output_artifacts) - explain_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = explain_task.dependent_names + volume_step_names - explain_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - explain_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - -if __name__ == "__main__": - pipeline_func = auto_generated_pipeline - pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz' - import kfp.compiler as compiler - compiler.Compiler().compile(pipeline_func, pipeline_filename) - - # Get or create an experiment and submit a pipeline run - import kfp - client = kfp.Client() - experiment = client.create_experiment('seldon-e2e-adult') - - # Submit a pipeline run - from kale.utils.kfp_utils import generate_run_name - run_name = generate_run_name('seldon-e2e-adult-ttonn') - run_result = client.run_pipeline( - experiment.id, run_name, pipeline_filename, {}) diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_cifar10.ipynb b/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_cifar10.ipynb deleted file mode 100644 index aae0677209f..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_cifar10.ipynb +++ /dev/null @@ -1,1449 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "# Seldon Deployment Pipeline For CIFAR10 Image Recognition\n", - "\n", - "In this example (and Kale Kubeflow pipeline) we create a deployment pipeline for a pretrained CIFAR10 image model. \n", - "The following steps will be run:\n", - "\n", - " * Setup Minio client\n", - " * Download and test CIFAR10 model and train an Anchors Images explainer on it. Save both model and explainer to Minio.\n", - " * Deploy model and explainer using Seldon and test\n", - " * Train outlier detector\n", - " * Train drift detector\n", - " * Deploy knative eventing display to show asynchronous results from outlier and drift detectors. \n", - " * Deploy outlier detector and test\n", - " * deploy drift detector and test\n", - " \n", - " ### Setup\n", - " \n", - " You will need a kubeflow cluster >= 1.0 with \n", - " \n", - " * Knative eventing \n", - " * Seldon >= 1.2.1\n", - " \n", - " ### Kubeflow Jupyter Notebook Server\n", - " \n", - " To run this notebook inside kubeflow. Create a Jupyter notebook server using the image `seldonio/jupyter-lab-alibi-kale:0.11`\n", - " \n", - " ### GCP Setup\n", - " \n", - " On GCP If you use Kale to save this notebook as a pipeline you will need to add the storage_class of the `VolumeOp` to `nfs-client` if you have followed the steps to create a NFS RWX PV on GCP. e.g.:\n", - " \n", - " ```\n", - " marshal_vop = dsl.VolumeOp(\n", - " name=\"kale-marshal-volume\",\n", - " resource_name=\"kale-marshal-pvc\",\n", - " storage_class=\"nfs-client\",\n", - " modes=dsl.VOLUME_MODE_RWM,\n", - " size=\"1Gi\"\n", - " )\n", - " ```\n", - " \n", - "\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "imports" - ] - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "from sklearn.ensemble import RandomForestClassifier\n", - "from sklearn.compose import ColumnTransformer\n", - "from sklearn.pipeline import Pipeline\n", - "from sklearn.impute import SimpleImputer\n", - "from sklearn.metrics import accuracy_score\n", - "from sklearn.preprocessing import StandardScaler, OneHotEncoder\n", - "from alibi.explainers import AnchorImage\n", - "from alibi.datasets import fetch_adult\n", - "from minio import Minio\n", - "from minio.error import ResponseError\n", - "from joblib import dump, load\n", - "import dill\n", - "from subprocess import run, Popen, PIPE\n", - "from alibi_detect.utils.data import create_outlier_batch\n", - "from alibi_detect.utils.fetching import fetch_tf_model\n", - "import json\n", - "import logging\n", - "import matplotlib.pyplot as plt\n", - "import tensorflow as tf\n", - "tf.keras.backend.clear_session()\n", - "from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer\n", - "from tqdm import tqdm\n", - "\n", - "from alibi_detect.models.losses import elbo\n", - "from alibi_detect.od import OutlierVAE\n", - "from alibi_detect.utils.fetching import fetch_detector\n", - "from alibi_detect.utils.perturbation import apply_mask\n", - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image\n", - "import time\n", - "\n", - "logger = tf.get_logger()\n", - "logger.setLevel(logging.ERROR)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Setup Pipeline Paramers\n", - "\n", - "The following global variables can be set. These will be used as Pipeline parameters." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "pipeline-parameters" - ] - }, - "outputs": [], - "source": [ - "MINIO_HOST=\"minio-service.kubeflow:9000\"\n", - "MINIO_ACCESS_KEY=\"minio\"\n", - "MINIO_SECRET_KEY=\"minio123\"\n", - "MINIO_MODEL_BUCKET=\"seldon\"\n", - "CIFAR10_MODEL_PATH=\"tfserving/cifar10/model\"\n", - "EXPLAINER_MODEL_PATH=\"tfserving/cifar10/explainer\"\n", - "OUTLIER_MODEL_PATH=\"tfserving/cifar10/outlier\"\n", - "DRIFT_MODEL_PATH=\"tfserving/cifar10/drift\"\n", - "DEPLOY_NAMESPACE=\"admin\"\n", - "TRAIN_OUTLIER_DETECTOR=False\n", - "TRAIN_DRIFT_DETECTOR=False" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "functions" - ] - }, - "outputs": [], - "source": [ - "def get_minio():\n", - " return Minio(MINIO_HOST,\n", - " access_key=MINIO_ACCESS_KEY,\n", - " secret_key=MINIO_SECRET_KEY,\n", - " secure=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:setup" - ] - }, - "outputs": [], - "source": [ - "minioClient = get_minio()\n", - "buckets = minioClient.list_buckets()\n", - "for bucket in buckets:\n", - " print(bucket.name, bucket.creation_date)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "if not minioClient.bucket_exists(MINIO_MODEL_BUCKET):\n", - " minioClient.make_bucket(MINIO_MODEL_BUCKET)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Test and save Model\n", - "\n", - "For simplicity we will use a pretrained Resnet32 CIFAR10 tensorflow model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:train_model_and_explainer", - "prev:setup" - ] - }, - "outputs": [], - "source": [ - "model = fetch_tf_model('cifar10', 'resnet32')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "train, test = tf.keras.datasets.cifar10.load_data()\n", - "X_train, y_train = train\n", - "X_test, y_test = test\n", - "\n", - "X_train = X_train.astype('float32') / 255\n", - "X_test = X_test.astype('float32') / 255\n", - "print(X_train.shape, y_train.shape, X_test.shape, y_test.shape)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer',\n", - " 'dog', 'frog', 'horse', 'ship', 'truck']" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Test model locally." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "idx = 1\n", - "X = X_test[idx].reshape(1, 32, 32, 3)\n", - "plt.imshow(X.reshape(32, 32, 3))\n", - "plt.axis('off')\n", - "plt.show()\n", - "print(\"class:\",class_names[y_test[idx][0]])\n", - "print(\"prediction:\",class_names[model.predict(X_test[idx:idx+1])[0].argmax()])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "modelfilepath=\"resnet\"\n", - "tf.saved_model.save(model, modelfilepath)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from os import listdir\n", - "from os.path import isfile, join\n", - "\n", - "model_filepath=\"resnet\"\n", - "print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{CIFAR10_MODEL_PATH}/1/saved_model.pb\", modelfilepath+\"/saved_model.pb\"))\n", - "variable_filepath = modelfilepath+\"/variables\"\n", - "onlyfiles = [f for f in listdir(variable_filepath) if isfile(join(variable_filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{CIFAR10_MODEL_PATH}/1/variables/{filename}\", join(variable_filepath, filename)))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train Explainer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:" - ] - }, - "outputs": [], - "source": [ - "def predict_fn(x):\n", - " return model.predict(x)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:" - ] - }, - "outputs": [], - "source": [ - "\n", - "image_shape = (32, 32, 3)\n", - "segmentation_fn = 'slic'\n", - "kwargs = {'n_segments': 5, 'compactness': 20, 'sigma': .5}\n", - "explainer = AnchorImage(predict_fn, image_shape, segmentation_fn=segmentation_fn, \n", - " segmentation_kwargs=kwargs, images_background=None)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "idx=0\n", - "image = X_test[0]\n", - "np.random.seed(0)\n", - "explanation = explainer.explain(image, threshold=.95, p_sample=.5, tau=0.25)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "X = X_test[idx].reshape(1, 32, 32, 3)\n", - "plt.imshow(X.reshape(32, 32, 3))\n", - "plt.axis('off')\n", - "plt.show()\n", - "print(\"class:\",class_names[y_test[idx][0]])\n", - "print(\"prediction:\",class_names[model.predict(X_test[idx:idx+1])[0].argmax()])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "plt.imshow(explanation[\"anchor\"])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "with open(\"explainer.dill\", \"wb\") as dill_file:\n", - " dill.dump(explainer, dill_file) \n", - " dill_file.close()\n", - "print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{EXPLAINER_MODEL_PATH}/explainer.dill\", 'explainer.dill'))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train Outlier Detector\n", - "\n", - "For further details and extended notebook see [Alibi-Detect Documentation](https://docs.seldon.io/projects/alibi-detect/en/stable/). These steps were derived from [Alibi-Detect CIFAR10 Example](https://docs.seldon.io/projects/alibi-detect/en/stable/examples/od_vae_cifar10.html)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:train_outlier_detector", - "prev:train_model_and_explainer" - ] - }, - "outputs": [], - "source": [ - "import logging\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import tensorflow as tf\n", - "tf.keras.backend.clear_session()\n", - "from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer\n", - "from tqdm import tqdm\n", - "\n", - "from alibi_detect.models.losses import elbo\n", - "from alibi_detect.od import OutlierVAE\n", - "from alibi_detect.utils.fetching import fetch_detector\n", - "from alibi_detect.utils.perturbation import apply_mask\n", - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image\n", - "\n", - "logger = tf.get_logger()\n", - "logger.setLevel(logging.ERROR)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "if TRAIN_OUTLIER_DETECTOR:\n", - " latent_dim = 1024\n", - " \n", - " encoder_net = tf.keras.Sequential(\n", - " [\n", - " InputLayer(input_shape=(32, 32, 3)),\n", - " Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu)\n", - " ])\n", - "\n", - " decoder_net = tf.keras.Sequential(\n", - " [\n", - " InputLayer(input_shape=(latent_dim,)),\n", - " Dense(4*4*128),\n", - " Reshape(target_shape=(4, 4, 128)),\n", - " Conv2DTranspose(256, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2DTranspose(64, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2DTranspose(3, 4, strides=2, padding='same', activation='sigmoid')\n", - " ])\n", - " \n", - " # initialize outlier detector\n", - " od = OutlierVAE(threshold=.015, # threshold for outlier score\n", - " score_type='mse', # use MSE of reconstruction error for outlier detection\n", - " encoder_net=encoder_net, # can also pass VAE model instead\n", - " decoder_net=decoder_net, # of separate encoder and decoder\n", - " latent_dim=latent_dim,\n", - " samples=2)\n", - " # train\n", - " od.fit(X_train, \n", - " loss_fn=elbo,\n", - " cov_elbo=dict(sim=.05),\n", - " epochs=50,\n", - " verbose=True)\n", - "else:\n", - " od = load_detector(\"/home/models/samples/od/cifar10\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "idx = 8\n", - "X = X_train[idx].reshape(1, 32, 32, 3)\n", - "X_recon = od.vae(X)\n", - "plt.imshow(X.reshape(32, 32, 3))\n", - "plt.axis('off')\n", - "plt.show()\n", - "plt.imshow(X_recon.numpy().reshape(32, 32, 3))\n", - "plt.axis('off')\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "X = X_train[:500]\n", - "print(X.shape)\n", - "od_preds = od.predict(X,\n", - " outlier_type='instance', # use 'feature' or 'instance' level\n", - " return_feature_score=True, # scores used to determine outliers\n", - " return_instance_score=True)\n", - "print(list(od_preds['data'].keys()))\n", - "target = np.zeros(X.shape[0],).astype(int) # all normal CIFAR10 training instances\n", - "labels = ['normal', 'outlier']\n", - "plot_instance_score(od_preds, target, labels, od.threshold)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from os import listdir\n", - "from os.path import isfile, join\n", - "\n", - "filepath=\"cifar10outlier\"\n", - "save_detector(od, filepath) \n", - "onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{OUTLIER_MODEL_PATH}/{filename}\", join(filepath, filename)))\n", - "filepath=\"cifar10outlier/model\"\n", - "onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{OUTLIER_MODEL_PATH}/model/{filename}\", join(filepath, filename)))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Train a Drift Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:train_drift_detector", - "prev:train_model_and_explainer" - ] - }, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import os\n", - "import tensorflow as tf\n", - "from tensorflow.keras.layers import Conv2D, Dense, Flatten, InputLayer, Reshape\n", - "\n", - "from alibi_detect.cd import KSDrift\n", - "from alibi_detect.cd.preprocess import uae, hidden_output\n", - "from alibi_detect.models.resnet import scale_by_instance\n", - "from alibi_detect.utils.fetching import fetch_tf_model, fetch_detector\n", - "from alibi_detect.utils.prediction import predict_batch\n", - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "tf.random.set_seed(0)\n", - "\n", - "if True:\n", - " np.random.seed(0)\n", - " n_test = X_test.shape[0]\n", - " idx = np.random.choice(n_test, size=n_test // 2, replace=False)\n", - " idx_h0 = np.delete(np.arange(n_test), idx, axis=0)\n", - " X_ref,y_ref = X_test[idx], y_test[idx]\n", - " X_h0, y_h0 = X_test[idx_h0], y_test[idx_h0]\n", - " print(X_ref.shape, X_h0.shape)\n", - " # define encoder\n", - " encoding_dim = 32\n", - " encoder_net = tf.keras.Sequential(\n", - " [\n", - " InputLayer(input_shape=(32, 32, 3)),\n", - " Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu),\n", - " Flatten(),\n", - " Dense(encoding_dim,)\n", - " ]\n", - " )\n", - "\n", - " # initialise drift detector\n", - " p_val = .05\n", - " cd = KSDrift(\n", - " p_val=p_val, # p-value for K-S test\n", - " X_ref=X_ref, # test against original test set\n", - " preprocess_fn=uae, # UAE for dimensionality reduction\n", - " preprocess_kwargs={'encoder_net': encoder_net, 'batch_size': 128},\n", - " alternative='two-sided' # other options: 'less', 'greater'\n", - " )\n", - "else:\n", - " cd = load_detector(\"/home/models/samples/cd/cifar10\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from alibi_detect.utils.saving import save_detector, load_detector\n", - "from os import listdir\n", - "from os.path import isfile, join\n", - "\n", - "filepath=\"cifar10Drift\"\n", - "save_detector(cd, filepath) \n", - "onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{DRIFT_MODEL_PATH}/{filename}\", join(filepath, filename)))\n", - "filepath=\"cifar10Drift/model\"\n", - "onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))]\n", - "for filename in onlyfiles:\n", - " print(filename)\n", - " print(get_minio().fput_object(MINIO_MODEL_BUCKET, f\"{DRIFT_MODEL_PATH}/model/{filename}\", join(filepath, filename)))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy Seldon Core Model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_model", - "prev:train_model_and_explainer" - ] - }, - "outputs": [], - "source": [ - "secret = f\"\"\"apiVersion: v1\n", - "kind: Secret\n", - "metadata:\n", - " name: seldon-init-container-secret\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "type: Opaque\n", - "stringData:\n", - " AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY}\n", - " AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY}\n", - " AWS_ENDPOINT_URL: http://{MINIO_HOST}\n", - " USE_SSL: \"false\"\n", - "\"\"\"\n", - "with open(\"secret.yaml\",\"w\") as f:\n", - " f.write(secret)\n", - "run(\"cat secret.yaml | kubectl apply -f -\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "sa = f\"\"\"apiVersion: v1\n", - "kind: ServiceAccount\n", - "metadata:\n", - " name: minio-sa\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "secrets:\n", - " - name: seldon-init-container-secret\n", - "\"\"\"\n", - "with open(\"sa.yaml\",\"w\") as f:\n", - " f.write(sa)\n", - "run(\"kubectl apply -f sa.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "model_yaml=f\"\"\"apiVersion: machinelearning.seldon.io/v1\n", - "kind: SeldonDeployment\n", - "metadata:\n", - " name: cifar10-classifier\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " protocol: tensorflow\n", - " predictors:\n", - " - componentSpecs:\n", - " graph:\n", - " implementation: TENSORFLOW_SERVER\n", - " modelUri: s3://{MINIO_MODEL_BUCKET}/{CIFAR10_MODEL_PATH}\n", - " envSecretRefName: seldon-init-container-secret\n", - " name: classifier\n", - " logger:\n", - " mode: all\n", - " explainer:\n", - " type: AnchorImages\n", - " name: default\n", - " replicas: 1\n", - "\"\"\"\n", - "with open(\"model.yaml\",\"w\") as f:\n", - " f.write(model_yaml)\n", - "run(\"kubectl apply -f model.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=cifar10-classifier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=cifar10-classifier -o jsonpath='{{.items[1].metadata.name}}' -n {DEPLOY_NAMESPACE})\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Make a prediction request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def test_model():\n", - " idx=10\n", - " test_example=X_test[idx:idx+1].tolist()\n", - " payload='{\"instances\":'+f\"{test_example}\"+' }'\n", - " cmd=f\"\"\"curl -d '{payload}' \\\n", - " http://cifar10-classifier-default.{DEPLOY_NAMESPACE}:8000/v1/models/classifier/:predict \\\n", - " -H \"Content-Type: application/json\"\n", - " \"\"\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " raw = ret.stdout.read().decode(\"utf-8\")\n", - " print(raw)\n", - " res=json.loads(raw)\n", - " arr=np.array(res[\"predictions\"])\n", - " X = X_test[idx].reshape(1, 32, 32, 3)\n", - " plt.imshow(X.reshape(32, 32, 3))\n", - " plt.axis('off')\n", - " plt.show()\n", - " print(\"class:\",class_names[y_test[idx][0]])\n", - " print(\"prediction:\",class_names[arr[0].argmax()])\n", - "\n", - "ok = False\n", - "while not ok:\n", - " try:\n", - " test_model()\n", - " ok = True\n", - " except:\n", - " print(\"Failed calling model, sleeping\")\n", - " time.sleep(2)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "Make an explanation request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "idx=10\n", - "test_example=X_test[idx:idx+1].tolist()\n", - "payload='{\"instances\":'+f\"{test_example}\"+' }'\n", - "cmd=f\"\"\"curl -d '{payload}' \\\n", - " http://cifar10-classifier-default-explainer.{DEPLOY_NAMESPACE}:9000/v1/models/cifar10-classifier/:explain \\\n", - " -H \"Content-Type: application/json\"\n", - "\"\"\"\n", - "ret = Popen(cmd, shell=True,stdout=PIPE)\n", - "raw = ret.stdout.read().decode(\"utf-8\")\n", - "print(raw)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy KNative Eventing Event Display" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_event_display", - "prev:train_drift_detector", - "prev:train_outlier_detector", - "prev:deploy_model" - ] - }, - "outputs": [], - "source": [ - "event_display=f\"\"\"apiVersion: apps/v1\n", - "kind: Deployment\n", - "metadata:\n", - " name: event-display\n", - " namespace: {DEPLOY_NAMESPACE} \n", - "spec:\n", - " replicas: 1\n", - " selector:\n", - " matchLabels: &labels\n", - " app: event-display\n", - " template:\n", - " metadata:\n", - " labels: *labels\n", - " spec:\n", - " containers:\n", - " - name: helloworld-go\n", - " # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display\n", - " image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477\n", - "---\n", - "kind: Service\n", - "apiVersion: v1\n", - "metadata:\n", - " name: event-display\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " selector:\n", - " app: event-display\n", - " ports:\n", - " - protocol: TCP\n", - " port: 80\n", - " targetPort: 8080\n", - "---\n", - "apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: cifar10-outlier-display\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " broker: default\n", - " filter:\n", - " attributes:\n", - " type: io.seldon.serving.inference.outlier\n", - " subscriber:\n", - " ref:\n", - " apiVersion: v1\n", - " kind: Service\n", - " name: event-display\n", - "---\n", - "apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: cifar10-drift-display\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " broker: default\n", - " filter:\n", - " attributes:\n", - " type: io.seldon.serving.inference.drift\n", - " subscriber:\n", - " ref:\n", - " apiVersion: v1\n", - " kind: Service\n", - " name: event-display\n", - "\"\"\"\n", - "with open(\"event_display.yaml\",\"w\") as f:\n", - " f.write(event_display)\n", - "run(\"kubectl apply -f event_display.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy Seldon Outlier Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_outlier_detector", - "prev:deploy_event_display" - ] - }, - "outputs": [], - "source": [ - "outlier_yaml=f\"\"\"apiVersion: serving.knative.dev/v1\n", - "kind: Service\n", - "metadata:\n", - " name: cifar10-outlier\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " template:\n", - " metadata:\n", - " annotations:\n", - " autoscaling.knative.dev/minScale: \"1\"\n", - " spec:\n", - " containers:\n", - " - image: seldonio/alibi-detect-server:1.2.1\n", - " imagePullPolicy: IfNotPresent\n", - " args:\n", - " - --model_name\n", - " - cifar10od\n", - " - --protocol\n", - " - tensorflow.http\n", - " - --storage_uri\n", - " - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH}\n", - " - --reply_url\n", - " - http://default-broker \n", - " - --event_type\n", - " - io.seldon.serving.inference.outlier\n", - " - --event_source\n", - " - io.seldon.serving.cifar10od\n", - " - OutlierDetector\n", - " envFrom:\n", - " - secretRef:\n", - " name: seldon-init-container-secret\n", - "\"\"\"\n", - "with open(\"outlier.yaml\",\"w\") as f:\n", - " f.write(outlier_yaml)\n", - "run(\"kubectl apply -f outlier.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "trigger_outlier_yaml=f\"\"\"apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: cifar10-outlier-trigger\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " filter:\n", - " sourceAndType:\n", - " type: io.seldon.serving.inference.request\n", - " subscriber:\n", - " ref:\n", - " apiVersion: serving.knative.dev/v1\n", - " kind: Service\n", - " name: cifar10-outlier\n", - "\"\"\"\n", - "with open(\"outlier_trigger.yaml\",\"w\") as f:\n", - " f.write(trigger_outlier_yaml)\n", - "run(\"kubectl apply -f outlier_trigger.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Test Seldon Outlier Detection" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:test_oulier_detection", - "prev:deploy_outlier_detector" - ] - }, - "outputs": [], - "source": [ - "idx = 1\n", - "X = X_train[idx:idx+1]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "np.random.seed(0) \n", - "X_mask, mask = apply_mask(X.reshape(1, 32, 32, 3),\n", - " mask_size=(10,10),\n", - " n_masks=1,\n", - " channels=[0,1,2],\n", - " mask_type='normal',\n", - " noise_distr=(0,1),\n", - " clip_rng=(0,1))\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def predict():\n", - " test_example=X_mask.tolist()\n", - " payload='{\"instances\":'+f\"{test_example}\"+' }'\n", - " cmd=f\"\"\"curl -d '{payload}' \\\n", - " http://cifar10-classifier-default.{DEPLOY_NAMESPACE}:8000/v1/models/classifier/:predict \\\n", - " -H \"Content-Type: application/json\"\n", - " \"\"\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " raw = ret.stdout.read().decode(\"utf-8\")\n", - " print(raw)\n", - " res=json.loads(raw)\n", - " arr=np.array(res[\"predictions\"])\n", - " plt.imshow(X_mask.reshape(32, 32, 3))\n", - " plt.axis('off')\n", - " plt.show()\n", - " print(\"class:\",class_names[y_train[idx][0]])\n", - " print(\"prediction:\",class_names[arr[0].argmax()])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def get_outlier_event_display_logs():\n", - " cmd=f\"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " res = ret.stdout.read().decode(\"utf-8\").split(\"\\n\")\n", - " data= []\n", - " for i in range(0,len(res)):\n", - " if res[i] == 'Data,':\n", - " j = json.loads(json.loads(res[i+1]))\n", - " if \"is_outlier\"in j[\"data\"].keys():\n", - " data.append(j)\n", - " if len(data) > 0:\n", - " return data[-1]\n", - " else:\n", - " return None\n", - "j = None\n", - "while j is None:\n", - " predict()\n", - " print(\"Waiting for outlier logs, sleeping\")\n", - " time.sleep(2)\n", - " j = get_outlier_event_display_logs()\n", - " \n", - "print(j)\n", - "print(\"Outlier\",j[\"data\"][\"is_outlier\"]==[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Deploy Seldon Drift Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:deploy_drift_detector", - "prev:test_oulier_detection" - ] - }, - "outputs": [], - "source": [ - "drift_yaml=f\"\"\"apiVersion: serving.knative.dev/v1\n", - "kind: Service\n", - "metadata:\n", - " name: cifar10-drift\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " template:\n", - " metadata:\n", - " annotations:\n", - " autoscaling.knative.dev/minScale: \"1\"\n", - " spec:\n", - " containers:\n", - " - image: seldonio/alibi-detect-server:1.2.2-dev\n", - " imagePullPolicy: IfNotPresent\n", - " args:\n", - " - --model_name\n", - " - cifar10cd\n", - " - --protocol\n", - " - tensorflow.http\n", - " - --storage_uri\n", - " - s3://{MINIO_MODEL_BUCKET}/{DRIFT_MODEL_PATH}\n", - " - --reply_url\n", - " - http://default-broker\n", - " - --event_type\n", - " - io.seldon.serving.inference.drift\n", - " - --event_source\n", - " - io.seldon.serving.cifar10cd\n", - " - DriftDetector\n", - " - --drift_batch_size\n", - " - '500'\n", - " envFrom:\n", - " - secretRef:\n", - " name: seldon-init-container-secret\n", - "\"\"\"\n", - "with open(\"drift.yaml\",\"w\") as f:\n", - " f.write(drift_yaml)\n", - "run(\"kubectl apply -f drift.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "trigger_outlier_yaml=f\"\"\"apiVersion: eventing.knative.dev/v1alpha1\n", - "kind: Trigger\n", - "metadata:\n", - " name: cifar10-drift-trigger\n", - " namespace: {DEPLOY_NAMESPACE}\n", - "spec:\n", - " filter:\n", - " sourceAndType:\n", - " type: io.seldon.serving.inference.request\n", - " subscriber:\n", - " ref:\n", - " apiVersion: serving.knative.dev/v1\n", - " kind: Service\n", - " name: cifar10-drift\n", - "\"\"\"\n", - "with open(\"outlier_trigger.yaml\",\"w\") as f:\n", - " f.write(trigger_outlier_yaml)\n", - "run(\"kubectl apply -f outlier_trigger.yaml\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "run(f\"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-drift -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})\", shell=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Test Seldon Drift Detector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "block:test_drift_detector", - "prev:deploy_drift_detector" - ] - }, - "outputs": [], - "source": [ - "def show(X):\n", - " plt.imshow(X.reshape(32, 32, 3))\n", - " plt.axis('off')\n", - " plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c\n", - "corruption = ['motion_blur']\n", - "X_corr, y_corr = fetch_cifar10c(corruption=corruption, severity=5, return_X_y=True)\n", - "X_corr = X_corr.astype('float32') / 255" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "show(X_corr[0])\n", - "show(X_corr[1])\n", - "show(X_corr[2])\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def predict(X):\n", - " test_example=X.tolist()\n", - " payload='{\"instances\":'+f\"{test_example}\"+' }'\n", - " with open(\"payload.json\",\"w\") as f:\n", - " f.write(payload)\n", - " cmd=f\"\"\"curl -d @./payload.json \\\n", - " http://cifar10-classifier-default.{DEPLOY_NAMESPACE}:8000/v1/models/classifier/:predict \\\n", - " -H \"Content-Type: application/json\"\n", - " \"\"\"\n", - " run(cmd, shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "def get_drift_event_display_logs():\n", - " cmd=f\"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}\"\n", - " ret = Popen(cmd, shell=True,stdout=PIPE)\n", - " res = ret.stdout.read().decode(\"utf-8\").split(\"\\n\")\n", - " data= []\n", - " for i in range(0,len(res)):\n", - " if res[i] == 'Data,':\n", - " j = json.loads(json.loads(res[i+1]))\n", - " if \"is_drift\"in j[\"data\"].keys():\n", - " data.append(j)\n", - " if len(data) > 0:\n", - " return data[-1]\n", - " else:\n", - " return None\n", - "j = None\n", - "for i in range(0,1000,50):\n", - " X = X_corr[i:i+50]\n", - " predict(X)\n", - " print(\"Waiting for drift logs, sleeping\")\n", - " time.sleep(2)\n", - " j = get_drift_event_display_logs()\n", - " if j is not None:\n", - " break\n", - " \n", - "print(j)\n", - "print(\"Drift\",j[\"data\"][\"is_drift\"]==1)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Clean up" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "skip" - ] - }, - "outputs": [], - "source": [ - "run(f\"kubectl delete sdep cifar10-classifier -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete ksvc cifar10-outlier -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete ksvc cifar10-drift -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete trigger --all -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete sa minio-sa -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete secret seldon-init-container-secret -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete deployment event-display -n {DEPLOY_NAMESPACE}\", shell=True)\n", - "run(f\"kubectl delete svc event-display -n {DEPLOY_NAMESPACE}\", shell=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "kubeflow_notebook": { - "docker_image": "seldonio/jupyter-lab-alibi-kale:0.11", - "experiment": { - "id": "new", - "name": "seldon-e2e-cifar10" - }, - "experiment_name": "seldon-e2e-cifar10", - "katib_metadata": { - "algorithm": { - "algorithmName": "grid" - }, - "maxFailedTrialCount": 3, - "maxTrialCount": 12, - "objective": { - "objectiveMetricName": "", - "type": "minimize" - }, - "parallelTrialCount": 3, - "parameters": [] - }, - "katib_run": false, - "pipeline_description": "Seldon CIFAR10 Example", - "pipeline_name": "seldon-e2e-cifar10", - "volumes": [] - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.8" - }, - "varInspector": { - "cols": { - "lenName": 16, - "lenType": 16, - "lenVar": 40 - }, - "kernels_config": { - "python": { - "delete_cmd_postfix": "", - "delete_cmd_prefix": "del ", - "library": "var_list.py", - "varRefreshCmd": "print(var_dic_list())" - }, - "r": { - "delete_cmd_postfix": ") ", - "delete_cmd_prefix": "rm(", - "library": "var_list.r", - "varRefreshCmd": "cat(var_dic_list()) " - } - }, - "types_to_exclude": [ - "module", - "function", - "builtin_function_or_method", - "instance", - "_Feature" - ], - "window_display": false - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_cifar10.kale.default.py b/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_cifar10.kale.default.py deleted file mode 100644 index 98bcbc92461..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_cifar10.kale.default.py +++ /dev/null @@ -1,1857 +0,0 @@ -import kfp.dsl as dsl -import json -import kfp.components as comp -from collections import OrderedDict -from kubernetes import client as k8s_client - - -def setup(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - minioClient = get_minio() - buckets = minioClient.list_buckets() - for bucket in buckets: - print(bucket.name, bucket.creation_date) - ''' - - block4 = ''' - if not minioClient.bucket_exists(MINIO_MODEL_BUCKET): - minioClient.make_bucket(MINIO_MODEL_BUCKET) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/setup.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('setup') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_model_and_explainer(CIFAR10_MODEL_PATH: str, EXPLAINER_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - CIFAR10_MODEL_PATH = "{}" - EXPLAINER_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(CIFAR10_MODEL_PATH, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model = fetch_tf_model('cifar10', 'resnet32') - ''' - - block4 = ''' - train, test = tf.keras.datasets.cifar10.load_data() - X_train, y_train = train - X_test, y_test = test - - X_train = X_train.astype('float32') / 255 - X_test = X_test.astype('float32') / 255 - print(X_train.shape, y_train.shape, X_test.shape, y_test.shape) - ''' - - block5 = ''' - class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', - 'dog', 'frog', 'horse', 'ship', 'truck'] - ''' - - block6 = ''' - idx = 1 - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[model.predict(X_test[idx:idx+1])[0].argmax()]) - ''' - - block7 = ''' - modelfilepath="resnet" - tf.saved_model.save(model, modelfilepath) - ''' - - block8 = ''' - from os import listdir - from os.path import isfile, join - - model_filepath="resnet" - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{CIFAR10_MODEL_PATH}/1/saved_model.pb", modelfilepath+"/saved_model.pb")) - variable_filepath = modelfilepath+"/variables" - onlyfiles = [f for f in listdir(variable_filepath) if isfile(join(variable_filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{CIFAR10_MODEL_PATH}/1/variables/{filename}", join(variable_filepath, filename))) - ''' - - block9 = ''' - def predict_fn(x): - return model.predict(x) - ''' - - block10 = ''' - - image_shape = (32, 32, 3) - segmentation_fn = 'slic' - kwargs = {'n_segments': 5, 'compactness': 20, 'sigma': .5} - explainer = AnchorImage(predict_fn, image_shape, segmentation_fn=segmentation_fn, - segmentation_kwargs=kwargs, images_background=None) - ''' - - block11 = ''' - idx=0 - image = X_test[0] - np.random.seed(0) - explanation = explainer.explain(image, threshold=.95, p_sample=.5, tau=0.25) - ''' - - block12 = ''' - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[model.predict(X_test[idx:idx+1])[0].argmax()]) - ''' - - block13 = ''' - plt.imshow(explanation["anchor"]) - ''' - - block14 = ''' - with open("explainer.dill", "wb") as dill_file: - dill.dump(explainer, dill_file) - dill_file.close() - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{EXPLAINER_MODEL_PATH}/explainer.dill", 'explainer.dill')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_test, "X_test") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(class_names, "class_names") - _kale_marshal_utils.save(y_test, "y_test") - _kale_marshal_utils.save(y_train, "y_train") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - block10, - block11, - block12, - block13, - block14, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_model_and_explainer.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_model_and_explainer') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_model(CIFAR10_MODEL_PATH: str, DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - CIFAR10_MODEL_PATH = "{}" - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(CIFAR10_MODEL_PATH, DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - class_names = _kale_marshal_utils.load("class_names") - y_test = _kale_marshal_utils.load("y_test") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - secret = f"""apiVersion: v1 - kind: Secret - metadata: - name: seldon-init-container-secret - namespace: {DEPLOY_NAMESPACE} - type: Opaque - stringData: - AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY} - AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY} - AWS_ENDPOINT_URL: http://{MINIO_HOST} - USE_SSL: "false" - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("cat secret.yaml | kubectl apply -f -", shell=True) - ''' - - block4 = ''' - sa = f"""apiVersion: v1 - kind: ServiceAccount - metadata: - name: minio-sa - namespace: {DEPLOY_NAMESPACE} - secrets: - - name: seldon-init-container-secret - """ - with open("sa.yaml","w") as f: - f.write(sa) - run("kubectl apply -f sa.yaml", shell=True) - ''' - - block5 = ''' - model_yaml=f"""apiVersion: machinelearning.seldon.io/v1 - kind: SeldonDeployment - metadata: - name: cifar10-classifier - namespace: {DEPLOY_NAMESPACE} - spec: - protocol: tensorflow - predictors: - - componentSpecs: - graph: - implementation: TENSORFLOW_SERVER - modelUri: s3://{MINIO_MODEL_BUCKET}/{CIFAR10_MODEL_PATH} - envSecretRefName: seldon-init-container-secret - name: classifier - logger: - mode: all - explainer: - type: AnchorImages - name: default - replicas: 1 - """ - with open("model.yaml","w") as f: - f.write(model_yaml) - run("kubectl apply -f model.yaml", shell=True) - ''' - - block6 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=cifar10-classifier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - block7 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=cifar10-classifier -o jsonpath='{{.items[1].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - block8 = ''' - def test_model(): - idx=10 - test_example=X_test[idx:idx+1].tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -d '{payload}' \\ - http://cifar10-classifier-default.{DEPLOY_NAMESPACE}:8000/v1/models/classifier/:predict \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - res=json.loads(raw) - arr=np.array(res["predictions"]) - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[arr[0].argmax()]) - - ok = False - while not ok: - try: - test_model() - ok = True - except: - print("Failed calling model, sleeping") - time.sleep(2) - ''' - - block9 = ''' - idx=10 - test_example=X_test[idx:idx+1].tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -d '{payload}' \\ - http://cifar10-classifier-default-explainer.{DEPLOY_NAMESPACE}:9000/v1/models/cifar10-classifier/:explain \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_drift_detector(DRIFT_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DRIFT_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - y_test = _kale_marshal_utils.load("y_test") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - import matplotlib.pyplot as plt - import numpy as np - import os - import tensorflow as tf - from tensorflow.keras.layers import Conv2D, Dense, Flatten, InputLayer, Reshape - - from alibi_detect.cd import KSDrift - from alibi_detect.cd.preprocess import uae, hidden_output - from alibi_detect.models.resnet import scale_by_instance - from alibi_detect.utils.fetching import fetch_tf_model, fetch_detector - from alibi_detect.utils.prediction import predict_batch - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c - ''' - - block4 = ''' - tf.random.set_seed(0) - - if True: - np.random.seed(0) - n_test = X_test.shape[0] - idx = np.random.choice(n_test, size=n_test // 2, replace=False) - idx_h0 = np.delete(np.arange(n_test), idx, axis=0) - X_ref,y_ref = X_test[idx], y_test[idx] - X_h0, y_h0 = X_test[idx_h0], y_test[idx_h0] - print(X_ref.shape, X_h0.shape) - # define encoder - encoding_dim = 32 - encoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(32, 32, 3)), - Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu), - Flatten(), - Dense(encoding_dim,) - ] - ) - - # initialise drift detector - p_val = .05 - cd = KSDrift( - p_val=p_val, # p-value for K-S test - X_ref=X_ref, # test against original test set - preprocess_fn=uae, # UAE for dimensionality reduction - preprocess_kwargs={'encoder_net': encoder_net, 'batch_size': 128}, - alternative='two-sided' # other options: 'less', 'greater' - ) - else: - cd = load_detector("/home/models/samples/cd/cifar10") - ''' - - block5 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="cifar10Drift" - save_detector(cd, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{DRIFT_MODEL_PATH}/{filename}", join(filepath, filename))) - filepath="cifar10Drift/model" - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{DRIFT_MODEL_PATH}/model/{filename}", join(filepath, filename))) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/train_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_outlier_detector(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str, TRAIN_OUTLIER_DETECTOR: bool): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - TRAIN_OUTLIER_DETECTOR = {} - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH, TRAIN_OUTLIER_DETECTOR) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - import logging - import matplotlib.pyplot as plt - import numpy as np - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block4 = ''' - if TRAIN_OUTLIER_DETECTOR: - latent_dim = 1024 - - encoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(32, 32, 3)), - Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu) - ]) - - decoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(latent_dim,)), - Dense(4*4*128), - Reshape(target_shape=(4, 4, 128)), - Conv2DTranspose(256, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2DTranspose(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2DTranspose(3, 4, strides=2, padding='same', activation='sigmoid') - ]) - - # initialize outlier detector - od = OutlierVAE(threshold=.015, # threshold for outlier score - score_type='mse', # use MSE of reconstruction error for outlier detection - encoder_net=encoder_net, # can also pass VAE model instead - decoder_net=decoder_net, # of separate encoder and decoder - latent_dim=latent_dim, - samples=2) - # train - od.fit(X_train, - loss_fn=elbo, - cov_elbo=dict(sim=.05), - epochs=50, - verbose=True) - else: - od = load_detector("/home/models/samples/od/cifar10") - ''' - - block5 = ''' - idx = 8 - X = X_train[idx].reshape(1, 32, 32, 3) - X_recon = od.vae(X) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - plt.imshow(X_recon.numpy().reshape(32, 32, 3)) - plt.axis('off') - plt.show() - ''' - - block6 = ''' - X = X_train[:500] - print(X.shape) - od_preds = od.predict(X, - outlier_type='instance', # use 'feature' or 'instance' level - return_feature_score=True, # scores used to determine outliers - return_instance_score=True) - print(list(od_preds['data'].keys())) - target = np.zeros(X.shape[0],).astype(int) # all normal CIFAR10 training instances - labels = ['normal', 'outlier'] - plot_instance_score(od_preds, target, labels, od.threshold) - ''' - - block7 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="cifar10outlier" - save_detector(od, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/{filename}", join(filepath, filename))) - filepath="cifar10outlier/model" - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/model/{filename}", join(filepath, filename))) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_train, "X_train") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_outlier_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_outlier_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_event_display(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - event_display=f"""apiVersion: apps/v1 - kind: Deployment - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - replicas: 1 - selector: - matchLabels: &labels - app: event-display - template: - metadata: - labels: *labels - spec: - containers: - - name: helloworld-go - # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display - image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477 - --- - kind: Service - apiVersion: v1 - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - selector: - app: event-display - ports: - - protocol: TCP - port: 80 - targetPort: 8080 - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-outlier-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: io.seldon.serving.inference.outlier - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-drift-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: io.seldon.serving.inference.drift - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - """ - with open("event_display.yaml","w") as f: - f.write(event_display) - run("kubectl apply -f event_display.yaml", shell=True) - ''' - - block4 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_event_display.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_event_display') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_outlier_detector(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - outlier_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: cifar10-outlier - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.1 - imagePullPolicy: IfNotPresent - args: - - --model_name - - cifar10od - - --protocol - - tensorflow.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - io.seldon.serving.inference.outlier - - --event_source - - io.seldon.serving.cifar10od - - OutlierDetector - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("outlier.yaml","w") as f: - f.write(outlier_yaml) - run("kubectl apply -f outlier.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-outlier-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: io.seldon.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1 - kind: Service - name: cifar10-outlier - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_outlier_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_outlier_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_oulier_detection(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - class_names = _kale_marshal_utils.load("class_names") - y_train = _kale_marshal_utils.load("y_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - idx = 1 - X = X_train[idx:idx+1] - ''' - - block4 = ''' - np.random.seed(0) - X_mask, mask = apply_mask(X.reshape(1, 32, 32, 3), - mask_size=(10,10), - n_masks=1, - channels=[0,1,2], - mask_type='normal', - noise_distr=(0,1), - clip_rng=(0,1)) - ''' - - block5 = ''' - def predict(): - test_example=X_mask.tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -d '{payload}' \\ - http://cifar10-classifier-default.{DEPLOY_NAMESPACE}:8000/v1/models/classifier/:predict \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - res=json.loads(raw) - arr=np.array(res["predictions"]) - plt.imshow(X_mask.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_train[idx][0]]) - print("prediction:",class_names[arr[0].argmax()]) - ''' - - block6 = ''' - def get_outlier_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_outlier"in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - while j is None: - predict() - print("Waiting for outlier logs, sleeping") - time.sleep(2) - j = get_outlier_event_display_logs() - - print(j) - print("Outlier",j["data"]["is_outlier"]==[1]) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_oulier_detection.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_oulier_detection') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_drift_detector(DEPLOY_NAMESPACE: str, DRIFT_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - DRIFT_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - drift_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: cifar10-drift - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.2-dev - imagePullPolicy: IfNotPresent - args: - - --model_name - - cifar10cd - - --protocol - - tensorflow.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{DRIFT_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - io.seldon.serving.inference.drift - - --event_source - - io.seldon.serving.cifar10cd - - DriftDetector - - --drift_batch_size - - '500' - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("drift.yaml","w") as f: - f.write(drift_yaml) - run("kubectl apply -f drift.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-drift-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: io.seldon.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1 - kind: Service - name: cifar10-drift - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-drift -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_drift_detector(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - def show(X): - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - ''' - - block4 = ''' - from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c - corruption = ['motion_blur'] - X_corr, y_corr = fetch_cifar10c(corruption=corruption, severity=5, return_X_y=True) - X_corr = X_corr.astype('float32') / 255 - ''' - - block5 = ''' - show(X_corr[0]) - show(X_corr[1]) - show(X_corr[2]) - ''' - - block6 = ''' - def predict(X): - test_example=X.tolist() - payload='{"instances":'+f"{test_example}"+' }' - with open("payload.json","w") as f: - f.write(payload) - cmd=f"""curl -d @./payload.json \\ - http://cifar10-classifier-default.{DEPLOY_NAMESPACE}:8000/v1/models/classifier/:predict \\ - -H "Content-Type: application/json" - """ - run(cmd, shell=True) - ''' - - block7 = ''' - def get_drift_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_drift"in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - for i in range(0,1000,50): - X = X_corr[i:i+50] - predict(X) - print("Waiting for drift logs, sleeping") - time.sleep(2) - j = get_drift_event_display_logs() - if j is not None: - break - - print(j) - print("Drift",j["data"]["is_drift"]==1) - ''' - - block8 = ''' - - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -setup_op = comp.func_to_container_op( - setup, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_model_and_explainer_op = comp.func_to_container_op( - train_model_and_explainer, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_model_op = comp.func_to_container_op( - deploy_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_drift_detector_op = comp.func_to_container_op( - train_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_outlier_detector_op = comp.func_to_container_op( - train_outlier_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_event_display_op = comp.func_to_container_op( - deploy_event_display, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_outlier_detector_op = comp.func_to_container_op( - deploy_outlier_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_oulier_detection_op = comp.func_to_container_op( - test_oulier_detection, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_drift_detector_op = comp.func_to_container_op( - deploy_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_drift_detector_op = comp.func_to_container_op( - test_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -@dsl.pipeline( - name='seldon-e2e-cifar10-glv9p', - description='Seldon CIFAR10 Example' -) -def auto_generated_pipeline(CIFAR10_MODEL_PATH='tfserving/cifar10/model', DEPLOY_NAMESPACE='admin', DRIFT_MODEL_PATH='tfserving/cifar10/drift', EXPLAINER_MODEL_PATH='tfserving/cifar10/explainer', MINIO_ACCESS_KEY='minio', MINIO_HOST='minio-service.kubeflow:9000', MINIO_MODEL_BUCKET='seldon', MINIO_SECRET_KEY='minio123', OUTLIER_MODEL_PATH='tfserving/cifar10/outlier', TRAIN_DRIFT_DETECTOR='False', TRAIN_OUTLIER_DETECTOR='False'): - pvolumes_dict = OrderedDict() - volume_step_names = [] - volume_name_parameters = [] - - marshal_vop = dsl.VolumeOp( - name="kale-marshal-volume", - resource_name="kale-marshal-pvc", - modes=dsl.VOLUME_MODE_RWM, - size="1Gi" - ) - volume_step_names.append(marshal_vop.name) - volume_name_parameters.append(marshal_vop.outputs["name"].full_name) - pvolumes_dict['/marshal'] = marshal_vop.volume - - volume_step_names.sort() - volume_name_parameters.sort() - - setup_task = setup_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after() - setup_task.container.working_dir = "/home/jovyan" - setup_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'setup': '/setup.html'}) - setup_task.output_artifact_paths.update(output_artifacts) - setup_task.add_pod_label("pipelines.kubeflow.org/metadata_written", "true") - dep_names = setup_task.dependent_names + volume_step_names - setup_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - setup_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_model_and_explainer_task = train_model_and_explainer_op(CIFAR10_MODEL_PATH, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(setup_task) - train_model_and_explainer_task.container.working_dir = "/home/jovyan" - train_model_and_explainer_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_model_and_explainer': '/train_model_and_explainer.html'}) - train_model_and_explainer_task.output_artifact_paths.update( - output_artifacts) - train_model_and_explainer_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_model_and_explainer_task.dependent_names + volume_step_names - train_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_model_task = deploy_model_op(CIFAR10_MODEL_PATH, DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - deploy_model_task.container.working_dir = "/home/jovyan" - deploy_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_model': '/deploy_model.html'}) - deploy_model_task.output_artifact_paths.update(output_artifacts) - deploy_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_model_task.dependent_names + volume_step_names - deploy_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_drift_detector_task = train_drift_detector_op(DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - train_drift_detector_task.container.working_dir = "/home/jovyan" - train_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_drift_detector': '/train_drift_detector.html'}) - train_drift_detector_task.output_artifact_paths.update(output_artifacts) - train_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_drift_detector_task.dependent_names + volume_step_names - train_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_outlier_detector_task = train_outlier_detector_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH, TRAIN_OUTLIER_DETECTOR)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - train_outlier_detector_task.container.working_dir = "/home/jovyan" - train_outlier_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_outlier_detector': '/train_outlier_detector.html'}) - train_outlier_detector_task.output_artifact_paths.update(output_artifacts) - train_outlier_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_outlier_detector_task.dependent_names + volume_step_names - train_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_event_display_task = deploy_event_display_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_drift_detector_task, train_outlier_detector_task, deploy_model_task) - deploy_event_display_task.container.working_dir = "/home/jovyan" - deploy_event_display_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_event_display': '/deploy_event_display.html'}) - deploy_event_display_task.output_artifact_paths.update(output_artifacts) - deploy_event_display_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_event_display_task.dependent_names + volume_step_names - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_outlier_detector_task = deploy_outlier_detector_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_event_display_task) - deploy_outlier_detector_task.container.working_dir = "/home/jovyan" - deploy_outlier_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_outlier_detector': '/deploy_outlier_detector.html'}) - deploy_outlier_detector_task.output_artifact_paths.update(output_artifacts) - deploy_outlier_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_outlier_detector_task.dependent_names + volume_step_names - deploy_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_oulier_detection_task = test_oulier_detection_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_outlier_detector_task) - test_oulier_detection_task.container.working_dir = "/home/jovyan" - test_oulier_detection_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'test_oulier_detection': '/test_oulier_detection.html'}) - test_oulier_detection_task.output_artifact_paths.update(output_artifacts) - test_oulier_detection_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_oulier_detection_task.dependent_names + volume_step_names - test_oulier_detection_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_oulier_detection_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_drift_detector_task = deploy_drift_detector_op(DEPLOY_NAMESPACE, DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(test_oulier_detection_task) - deploy_drift_detector_task.container.working_dir = "/home/jovyan" - deploy_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_drift_detector': '/deploy_drift_detector.html'}) - deploy_drift_detector_task.output_artifact_paths.update(output_artifacts) - deploy_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_drift_detector_task.dependent_names + volume_step_names - deploy_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_drift_detector_task = test_drift_detector_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_drift_detector_task) - test_drift_detector_task.container.working_dir = "/home/jovyan" - test_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'test_drift_detector': '/test_drift_detector.html'}) - test_drift_detector_task.output_artifact_paths.update(output_artifacts) - test_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_drift_detector_task.dependent_names + volume_step_names - test_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - -if __name__ == "__main__": - pipeline_func = auto_generated_pipeline - pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz' - import kfp.compiler as compiler - compiler.Compiler().compile(pipeline_func, pipeline_filename) - - # Get or create an experiment and submit a pipeline run - import kfp - client = kfp.Client() - experiment = client.create_experiment('seldon-e2e-cifar10') - - # Submit a pipeline run - from kale.utils.kfp_utils import generate_run_name - run_name = generate_run_name('seldon-e2e-cifar10-glv9p') - run_result = client.run_pipeline( - experiment.id, run_name, pipeline_filename, {}) diff --git a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_cifar10.kale.nfs.py b/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_cifar10.kale.nfs.py deleted file mode 100644 index 62a241e42f7..00000000000 --- a/samples/contrib/e2e-outlier-drift-explainer/seldon/seldon_e2e_cifar10.kale.nfs.py +++ /dev/null @@ -1,1858 +0,0 @@ -import kfp.dsl as dsl -import json -import kfp.components as comp -from collections import OrderedDict -from kubernetes import client as k8s_client - - -def setup(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - minioClient = get_minio() - buckets = minioClient.list_buckets() - for bucket in buckets: - print(bucket.name, bucket.creation_date) - ''' - - block4 = ''' - if not minioClient.bucket_exists(MINIO_MODEL_BUCKET): - minioClient.make_bucket(MINIO_MODEL_BUCKET) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/setup.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('setup') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_model_and_explainer(CIFAR10_MODEL_PATH: str, EXPLAINER_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - CIFAR10_MODEL_PATH = "{}" - EXPLAINER_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(CIFAR10_MODEL_PATH, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - model = fetch_tf_model('cifar10', 'resnet32') - ''' - - block4 = ''' - train, test = tf.keras.datasets.cifar10.load_data() - X_train, y_train = train - X_test, y_test = test - - X_train = X_train.astype('float32') / 255 - X_test = X_test.astype('float32') / 255 - print(X_train.shape, y_train.shape, X_test.shape, y_test.shape) - ''' - - block5 = ''' - class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', - 'dog', 'frog', 'horse', 'ship', 'truck'] - ''' - - block6 = ''' - idx = 1 - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[model.predict(X_test[idx:idx+1])[0].argmax()]) - ''' - - block7 = ''' - modelfilepath="resnet" - tf.saved_model.save(model, modelfilepath) - ''' - - block8 = ''' - from os import listdir - from os.path import isfile, join - - model_filepath="resnet" - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{CIFAR10_MODEL_PATH}/1/saved_model.pb", modelfilepath+"/saved_model.pb")) - variable_filepath = modelfilepath+"/variables" - onlyfiles = [f for f in listdir(variable_filepath) if isfile(join(variable_filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{CIFAR10_MODEL_PATH}/1/variables/{filename}", join(variable_filepath, filename))) - ''' - - block9 = ''' - def predict_fn(x): - return model.predict(x) - ''' - - block10 = ''' - - image_shape = (32, 32, 3) - segmentation_fn = 'slic' - kwargs = {'n_segments': 5, 'compactness': 20, 'sigma': .5} - explainer = AnchorImage(predict_fn, image_shape, segmentation_fn=segmentation_fn, - segmentation_kwargs=kwargs, images_background=None) - ''' - - block11 = ''' - idx=0 - image = X_test[0] - np.random.seed(0) - explanation = explainer.explain(image, threshold=.95, p_sample=.5, tau=0.25) - ''' - - block12 = ''' - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[model.predict(X_test[idx:idx+1])[0].argmax()]) - ''' - - block13 = ''' - plt.imshow(explanation["anchor"]) - ''' - - block14 = ''' - with open("explainer.dill", "wb") as dill_file: - dill.dump(explainer, dill_file) - dill_file.close() - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{EXPLAINER_MODEL_PATH}/explainer.dill", 'explainer.dill')) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_test, "X_test") - _kale_marshal_utils.save(X_train, "X_train") - _kale_marshal_utils.save(class_names, "class_names") - _kale_marshal_utils.save(y_test, "y_test") - _kale_marshal_utils.save(y_train, "y_train") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - block10, - block11, - block12, - block13, - block14, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_model_and_explainer.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_model_and_explainer') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_model(CIFAR10_MODEL_PATH: str, DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - CIFAR10_MODEL_PATH = "{}" - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(CIFAR10_MODEL_PATH, DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - class_names = _kale_marshal_utils.load("class_names") - y_test = _kale_marshal_utils.load("y_test") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - secret = f"""apiVersion: v1 - kind: Secret - metadata: - name: seldon-init-container-secret - namespace: {DEPLOY_NAMESPACE} - type: Opaque - stringData: - AWS_ACCESS_KEY_ID: {MINIO_ACCESS_KEY} - AWS_SECRET_ACCESS_KEY: {MINIO_SECRET_KEY} - AWS_ENDPOINT_URL: http://{MINIO_HOST} - USE_SSL: "false" - """ - with open("secret.yaml","w") as f: - f.write(secret) - run("cat secret.yaml | kubectl apply -f -", shell=True) - ''' - - block4 = ''' - sa = f"""apiVersion: v1 - kind: ServiceAccount - metadata: - name: minio-sa - namespace: {DEPLOY_NAMESPACE} - secrets: - - name: seldon-init-container-secret - """ - with open("sa.yaml","w") as f: - f.write(sa) - run("kubectl apply -f sa.yaml", shell=True) - ''' - - block5 = ''' - model_yaml=f"""apiVersion: machinelearning.seldon.io/v1 - kind: SeldonDeployment - metadata: - name: cifar10-classifier - namespace: {DEPLOY_NAMESPACE} - spec: - protocol: tensorflow - predictors: - - componentSpecs: - graph: - implementation: TENSORFLOW_SERVER - modelUri: s3://{MINIO_MODEL_BUCKET}/{CIFAR10_MODEL_PATH} - envSecretRefName: seldon-init-container-secret - name: classifier - logger: - mode: all - explainer: - type: AnchorImages - name: default - replicas: 1 - """ - with open("model.yaml","w") as f: - f.write(model_yaml) - run("kubectl apply -f model.yaml", shell=True) - ''' - - block6 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=cifar10-classifier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - block7 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l seldon-deployment-id=cifar10-classifier -o jsonpath='{{.items[1].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - block8 = ''' - def test_model(): - idx=10 - test_example=X_test[idx:idx+1].tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -d '{payload}' \\ - http://cifar10-classifier-default.{DEPLOY_NAMESPACE}:8000/v1/models/classifier/:predict \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - res=json.loads(raw) - arr=np.array(res["predictions"]) - X = X_test[idx].reshape(1, 32, 32, 3) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_test[idx][0]]) - print("prediction:",class_names[arr[0].argmax()]) - - ok = False - while not ok: - try: - test_model() - ok = True - except: - print("Failed calling model, sleeping") - time.sleep(2) - ''' - - block9 = ''' - idx=10 - test_example=X_test[idx:idx+1].tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -d '{payload}' \\ - http://cifar10-classifier-default-explainer.{DEPLOY_NAMESPACE}:9000/v1/models/cifar10-classifier/:explain \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - block9, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_model.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_model') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_drift_detector(DRIFT_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DRIFT_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_test = _kale_marshal_utils.load("X_test") - y_test = _kale_marshal_utils.load("y_test") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - import matplotlib.pyplot as plt - import numpy as np - import os - import tensorflow as tf - from tensorflow.keras.layers import Conv2D, Dense, Flatten, InputLayer, Reshape - - from alibi_detect.cd import KSDrift - from alibi_detect.cd.preprocess import uae, hidden_output - from alibi_detect.models.resnet import scale_by_instance - from alibi_detect.utils.fetching import fetch_tf_model, fetch_detector - from alibi_detect.utils.prediction import predict_batch - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c - ''' - - block4 = ''' - tf.random.set_seed(0) - - if True: - np.random.seed(0) - n_test = X_test.shape[0] - idx = np.random.choice(n_test, size=n_test // 2, replace=False) - idx_h0 = np.delete(np.arange(n_test), idx, axis=0) - X_ref,y_ref = X_test[idx], y_test[idx] - X_h0, y_h0 = X_test[idx_h0], y_test[idx_h0] - print(X_ref.shape, X_h0.shape) - # define encoder - encoding_dim = 32 - encoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(32, 32, 3)), - Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu), - Flatten(), - Dense(encoding_dim,) - ] - ) - - # initialise drift detector - p_val = .05 - cd = KSDrift( - p_val=p_val, # p-value for K-S test - X_ref=X_ref, # test against original test set - preprocess_fn=uae, # UAE for dimensionality reduction - preprocess_kwargs={'encoder_net': encoder_net, 'batch_size': 128}, - alternative='two-sided' # other options: 'less', 'greater' - ) - else: - cd = load_detector("/home/models/samples/cd/cifar10") - ''' - - block5 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="cifar10Drift" - save_detector(cd, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{DRIFT_MODEL_PATH}/{filename}", join(filepath, filename))) - filepath="cifar10Drift/model" - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{DRIFT_MODEL_PATH}/model/{filename}", join(filepath, filename))) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/train_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def train_outlier_detector(MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str, TRAIN_OUTLIER_DETECTOR: bool): - pipeline_parameters_block = ''' - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - TRAIN_OUTLIER_DETECTOR = {} - '''.format(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH, TRAIN_OUTLIER_DETECTOR) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - import logging - import matplotlib.pyplot as plt - import numpy as np - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block4 = ''' - if TRAIN_OUTLIER_DETECTOR: - latent_dim = 1024 - - encoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(32, 32, 3)), - Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu) - ]) - - decoder_net = tf.keras.Sequential( - [ - InputLayer(input_shape=(latent_dim,)), - Dense(4*4*128), - Reshape(target_shape=(4, 4, 128)), - Conv2DTranspose(256, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2DTranspose(64, 4, strides=2, padding='same', activation=tf.nn.relu), - Conv2DTranspose(3, 4, strides=2, padding='same', activation='sigmoid') - ]) - - # initialize outlier detector - od = OutlierVAE(threshold=.015, # threshold for outlier score - score_type='mse', # use MSE of reconstruction error for outlier detection - encoder_net=encoder_net, # can also pass VAE model instead - decoder_net=decoder_net, # of separate encoder and decoder - latent_dim=latent_dim, - samples=2) - # train - od.fit(X_train, - loss_fn=elbo, - cov_elbo=dict(sim=.05), - epochs=50, - verbose=True) - else: - od = load_detector("/home/models/samples/od/cifar10") - ''' - - block5 = ''' - idx = 8 - X = X_train[idx].reshape(1, 32, 32, 3) - X_recon = od.vae(X) - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - plt.imshow(X_recon.numpy().reshape(32, 32, 3)) - plt.axis('off') - plt.show() - ''' - - block6 = ''' - X = X_train[:500] - print(X.shape) - od_preds = od.predict(X, - outlier_type='instance', # use 'feature' or 'instance' level - return_feature_score=True, # scores used to determine outliers - return_instance_score=True) - print(list(od_preds['data'].keys())) - target = np.zeros(X.shape[0],).astype(int) # all normal CIFAR10 training instances - labels = ['normal', 'outlier'] - plot_instance_score(od_preds, target, labels, od.threshold) - ''' - - block7 = ''' - from alibi_detect.utils.saving import save_detector, load_detector - from os import listdir - from os.path import isfile, join - - filepath="cifar10outlier" - save_detector(od, filepath) - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/{filename}", join(filepath, filename))) - filepath="cifar10outlier/model" - onlyfiles = [f for f in listdir(filepath) if isfile(join(filepath, f))] - for filename in onlyfiles: - print(filename) - print(get_minio().fput_object(MINIO_MODEL_BUCKET, f"{OUTLIER_MODEL_PATH}/model/{filename}", join(filepath, filename))) - ''' - - data_saving_block = ''' - # -----------------------DATA SAVING START--------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.save(X_train, "X_train") - # -----------------------DATA SAVING END----------------------------------- - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - data_saving_block) - html_artifact = _kale_run_code(blocks) - with open("/train_outlier_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('train_outlier_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_event_display(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - event_display=f"""apiVersion: apps/v1 - kind: Deployment - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - replicas: 1 - selector: - matchLabels: &labels - app: event-display - template: - metadata: - labels: *labels - spec: - containers: - - name: helloworld-go - # Source code: https://github.com/knative/eventing-contrib/tree/master/cmd/event_display - image: gcr.io/knative-releases/knative.dev/eventing-contrib/cmd/event_display@sha256:f4628e97a836c77ed38bd3b6fd3d0b06de4d5e7db6704772fe674d48b20bd477 - --- - kind: Service - apiVersion: v1 - metadata: - name: event-display - namespace: {DEPLOY_NAMESPACE} - spec: - selector: - app: event-display - ports: - - protocol: TCP - port: 80 - targetPort: 8080 - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-outlier-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: io.seldon.serving.inference.outlier - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - --- - apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-drift-display - namespace: {DEPLOY_NAMESPACE} - spec: - broker: default - filter: - attributes: - type: io.seldon.serving.inference.drift - subscriber: - ref: - apiVersion: v1 - kind: Service - name: event-display - """ - with open("event_display.yaml","w") as f: - f.write(event_display) - run("kubectl apply -f event_display.yaml", shell=True) - ''' - - block4 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/event-display -n {DEPLOY_NAMESPACE}", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_event_display.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_event_display') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_outlier_detector(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str, OUTLIER_MODEL_PATH: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - OUTLIER_MODEL_PATH = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - outlier_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: cifar10-outlier - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.1 - imagePullPolicy: IfNotPresent - args: - - --model_name - - cifar10od - - --protocol - - tensorflow.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{OUTLIER_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - io.seldon.serving.inference.outlier - - --event_source - - io.seldon.serving.cifar10od - - OutlierDetector - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("outlier.yaml","w") as f: - f.write(outlier_yaml) - run("kubectl apply -f outlier.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-outlier-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: io.seldon.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1 - kind: Service - name: cifar10-outlier - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-outlier -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_outlier_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_outlier_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_oulier_detection(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - data_loading_block = ''' - # -----------------------DATA LOADING START-------------------------------- - from kale.marshal import utils as _kale_marshal_utils - _kale_marshal_utils.set_kale_data_directory("/marshal") - _kale_marshal_utils.set_kale_directory_file_names() - X_train = _kale_marshal_utils.load("X_train") - class_names = _kale_marshal_utils.load("class_names") - y_train = _kale_marshal_utils.load("y_train") - # -----------------------DATA LOADING END---------------------------------- - ''' - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - idx = 1 - X = X_train[idx:idx+1] - ''' - - block4 = ''' - np.random.seed(0) - X_mask, mask = apply_mask(X.reshape(1, 32, 32, 3), - mask_size=(10,10), - n_masks=1, - channels=[0,1,2], - mask_type='normal', - noise_distr=(0,1), - clip_rng=(0,1)) - ''' - - block5 = ''' - def predict(): - test_example=X_mask.tolist() - payload='{"instances":'+f"{test_example}"+' }' - cmd=f"""curl -d '{payload}' \\ - http://cifar10-classifier-default.{DEPLOY_NAMESPACE}:8000/v1/models/classifier/:predict \\ - -H "Content-Type: application/json" - """ - ret = Popen(cmd, shell=True,stdout=PIPE) - raw = ret.stdout.read().decode("utf-8") - print(raw) - res=json.loads(raw) - arr=np.array(res["predictions"]) - plt.imshow(X_mask.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - print("class:",class_names[y_train[idx][0]]) - print("prediction:",class_names[arr[0].argmax()]) - ''' - - block6 = ''' - def get_outlier_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_outlier"in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - while j is None: - predict() - print("Waiting for outlier logs, sleeping") - time.sleep(2) - j = get_outlier_event_display_logs() - - print(j) - print("Outlier",j["data"]["is_outlier"]==[1]) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, data_loading_block, - block1, - block2, - block3, - block4, - block5, - block6, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_oulier_detection.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_oulier_detection') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def deploy_drift_detector(DEPLOY_NAMESPACE: str, DRIFT_MODEL_PATH: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_MODEL_BUCKET: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - DRIFT_MODEL_PATH = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_MODEL_BUCKET = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - drift_yaml=f"""apiVersion: serving.knative.dev/v1 - kind: Service - metadata: - name: cifar10-drift - namespace: {DEPLOY_NAMESPACE} - spec: - template: - metadata: - annotations: - autoscaling.knative.dev/minScale: "1" - spec: - containers: - - image: seldonio/alibi-detect-server:1.2.2-dev - imagePullPolicy: IfNotPresent - args: - - --model_name - - cifar10cd - - --protocol - - tensorflow.http - - --storage_uri - - s3://{MINIO_MODEL_BUCKET}/{DRIFT_MODEL_PATH} - - --reply_url - - http://default-broker - - --event_type - - io.seldon.serving.inference.drift - - --event_source - - io.seldon.serving.cifar10cd - - DriftDetector - - --drift_batch_size - - '500' - envFrom: - - secretRef: - name: seldon-init-container-secret - """ - with open("drift.yaml","w") as f: - f.write(drift_yaml) - run("kubectl apply -f drift.yaml", shell=True) - ''' - - block4 = ''' - trigger_outlier_yaml=f"""apiVersion: eventing.knative.dev/v1alpha1 - kind: Trigger - metadata: - name: cifar10-drift-trigger - namespace: {DEPLOY_NAMESPACE} - spec: - filter: - sourceAndType: - type: io.seldon.serving.inference.request - subscriber: - ref: - apiVersion: serving.knative.dev/v1 - kind: Service - name: cifar10-drift - """ - with open("outlier_trigger.yaml","w") as f: - f.write(trigger_outlier_yaml) - run("kubectl apply -f outlier_trigger.yaml", shell=True) - ''' - - block5 = ''' - run(f"kubectl rollout status -n {DEPLOY_NAMESPACE} deploy/$(kubectl get deploy -l serving.knative.dev/service=cifar10-drift -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE})", shell=True) - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - ) - html_artifact = _kale_run_code(blocks) - with open("/deploy_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('deploy_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -def test_drift_detector(DEPLOY_NAMESPACE: str, MINIO_ACCESS_KEY: str, MINIO_HOST: str, MINIO_SECRET_KEY: str): - pipeline_parameters_block = ''' - DEPLOY_NAMESPACE = "{}" - MINIO_ACCESS_KEY = "{}" - MINIO_HOST = "{}" - MINIO_SECRET_KEY = "{}" - '''.format(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY) - - from kale.utils import mlmd_utils as _kale_mlmd_utils - _kale_mlmd_utils.init_metadata() - - block1 = ''' - import numpy as np - from sklearn.ensemble import RandomForestClassifier - from sklearn.compose import ColumnTransformer - from sklearn.pipeline import Pipeline - from sklearn.impute import SimpleImputer - from sklearn.metrics import accuracy_score - from sklearn.preprocessing import StandardScaler, OneHotEncoder - from alibi.explainers import AnchorImage - from alibi.datasets import fetch_adult - from minio import Minio - from minio.error import ResponseError - from joblib import dump, load - import dill - from subprocess import run, Popen, PIPE - from alibi_detect.utils.data import create_outlier_batch - from alibi_detect.utils.fetching import fetch_tf_model - import json - import logging - import matplotlib.pyplot as plt - import tensorflow as tf - tf.keras.backend.clear_session() - from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Layer, Reshape, InputLayer - from tqdm import tqdm - - from alibi_detect.models.losses import elbo - from alibi_detect.od import OutlierVAE - from alibi_detect.utils.fetching import fetch_detector - from alibi_detect.utils.perturbation import apply_mask - from alibi_detect.utils.saving import save_detector, load_detector - from alibi_detect.utils.visualize import plot_instance_score, plot_feature_outlier_image - import time - - logger = tf.get_logger() - logger.setLevel(logging.ERROR) - ''' - - block2 = ''' - def get_minio(): - return Minio(MINIO_HOST, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - ''' - - block3 = ''' - def show(X): - plt.imshow(X.reshape(32, 32, 3)) - plt.axis('off') - plt.show() - ''' - - block4 = ''' - from alibi_detect.datasets import fetch_cifar10c, corruption_types_cifar10c - corruption = ['motion_blur'] - X_corr, y_corr = fetch_cifar10c(corruption=corruption, severity=5, return_X_y=True) - X_corr = X_corr.astype('float32') / 255 - ''' - - block5 = ''' - show(X_corr[0]) - show(X_corr[1]) - show(X_corr[2]) - ''' - - block6 = ''' - def predict(X): - test_example=X.tolist() - payload='{"instances":'+f"{test_example}"+' }' - with open("payload.json","w") as f: - f.write(payload) - cmd=f"""curl -d @./payload.json \\ - http://cifar10-classifier-default.{DEPLOY_NAMESPACE}:8000/v1/models/classifier/:predict \\ - -H "Content-Type: application/json" - """ - run(cmd, shell=True) - ''' - - block7 = ''' - def get_drift_event_display_logs(): - cmd=f"kubectl logs $(kubectl get pod -l app=event-display -o jsonpath='{{.items[0].metadata.name}}' -n {DEPLOY_NAMESPACE}) -n {DEPLOY_NAMESPACE}" - ret = Popen(cmd, shell=True,stdout=PIPE) - res = ret.stdout.read().decode("utf-8").split("\\n") - data= [] - for i in range(0,len(res)): - if res[i] == 'Data,': - j = json.loads(json.loads(res[i+1])) - if "is_drift"in j["data"].keys(): - data.append(j) - if len(data) > 0: - return data[-1] - else: - return None - j = None - for i in range(0,1000,50): - X = X_corr[i:i+50] - predict(X) - print("Waiting for drift logs, sleeping") - time.sleep(2) - j = get_drift_event_display_logs() - if j is not None: - break - - print(j) - print("Drift",j["data"]["is_drift"]==1) - ''' - - block8 = ''' - - ''' - - # run the code blocks inside a jupyter kernel - from kale.utils.jupyter_utils import run_code as _kale_run_code - from kale.utils.kfp_utils import \ - update_uimetadata as _kale_update_uimetadata - blocks = (pipeline_parameters_block, - block1, - block2, - block3, - block4, - block5, - block6, - block7, - block8, - ) - html_artifact = _kale_run_code(blocks) - with open("/test_drift_detector.html", "w") as f: - f.write(html_artifact) - _kale_update_uimetadata('test_drift_detector') - - _kale_mlmd_utils.call("mark_execution_complete") - - -setup_op = comp.func_to_container_op( - setup, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_model_and_explainer_op = comp.func_to_container_op( - train_model_and_explainer, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_model_op = comp.func_to_container_op( - deploy_model, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_drift_detector_op = comp.func_to_container_op( - train_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -train_outlier_detector_op = comp.func_to_container_op( - train_outlier_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_event_display_op = comp.func_to_container_op( - deploy_event_display, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_outlier_detector_op = comp.func_to_container_op( - deploy_outlier_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_oulier_detection_op = comp.func_to_container_op( - test_oulier_detection, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -deploy_drift_detector_op = comp.func_to_container_op( - deploy_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -test_drift_detector_op = comp.func_to_container_op( - test_drift_detector, base_image='seldonio/jupyter-lab-alibi-kale:0.11') - - -@dsl.pipeline( - name='seldon-e2e-cifar10-glv9p', - description='Seldon CIFAR10 Example' -) -def auto_generated_pipeline(CIFAR10_MODEL_PATH='tfserving/cifar10/model', DEPLOY_NAMESPACE='admin', DRIFT_MODEL_PATH='tfserving/cifar10/drift', EXPLAINER_MODEL_PATH='tfserving/cifar10/explainer', MINIO_ACCESS_KEY='minio', MINIO_HOST='minio-service.kubeflow:9000', MINIO_MODEL_BUCKET='seldon', MINIO_SECRET_KEY='minio123', OUTLIER_MODEL_PATH='tfserving/cifar10/outlier', TRAIN_DRIFT_DETECTOR='False', TRAIN_OUTLIER_DETECTOR='False'): - pvolumes_dict = OrderedDict() - volume_step_names = [] - volume_name_parameters = [] - - marshal_vop = dsl.VolumeOp( - name="kale-marshal-volume", - resource_name="kale-marshal-pvc", - storage_class="nfs-client", - modes=dsl.VOLUME_MODE_RWM, - size="1Gi" - ) - volume_step_names.append(marshal_vop.name) - volume_name_parameters.append(marshal_vop.outputs["name"].full_name) - pvolumes_dict['/marshal'] = marshal_vop.volume - - volume_step_names.sort() - volume_name_parameters.sort() - - setup_task = setup_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after() - setup_task.container.working_dir = "/home/jovyan" - setup_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'setup': '/setup.html'}) - setup_task.output_artifact_paths.update(output_artifacts) - setup_task.add_pod_label("pipelines.kubeflow.org/metadata_written", "true") - dep_names = setup_task.dependent_names + volume_step_names - setup_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - setup_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_model_and_explainer_task = train_model_and_explainer_op(CIFAR10_MODEL_PATH, EXPLAINER_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(setup_task) - train_model_and_explainer_task.container.working_dir = "/home/jovyan" - train_model_and_explainer_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_model_and_explainer': '/train_model_and_explainer.html'}) - train_model_and_explainer_task.output_artifact_paths.update( - output_artifacts) - train_model_and_explainer_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_model_and_explainer_task.dependent_names + volume_step_names - train_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_model_and_explainer_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_model_task = deploy_model_op(CIFAR10_MODEL_PATH, DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - deploy_model_task.container.working_dir = "/home/jovyan" - deploy_model_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update({'deploy_model': '/deploy_model.html'}) - deploy_model_task.output_artifact_paths.update(output_artifacts) - deploy_model_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_model_task.dependent_names + volume_step_names - deploy_model_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_model_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_drift_detector_task = train_drift_detector_op(DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - train_drift_detector_task.container.working_dir = "/home/jovyan" - train_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_drift_detector': '/train_drift_detector.html'}) - train_drift_detector_task.output_artifact_paths.update(output_artifacts) - train_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_drift_detector_task.dependent_names + volume_step_names - train_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - train_outlier_detector_task = train_outlier_detector_op(MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH, TRAIN_OUTLIER_DETECTOR)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_model_and_explainer_task) - train_outlier_detector_task.container.working_dir = "/home/jovyan" - train_outlier_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'train_outlier_detector': '/train_outlier_detector.html'}) - train_outlier_detector_task.output_artifact_paths.update(output_artifacts) - train_outlier_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = train_outlier_detector_task.dependent_names + volume_step_names - train_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - train_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_event_display_task = deploy_event_display_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(train_drift_detector_task, train_outlier_detector_task, deploy_model_task) - deploy_event_display_task.container.working_dir = "/home/jovyan" - deploy_event_display_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_event_display': '/deploy_event_display.html'}) - deploy_event_display_task.output_artifact_paths.update(output_artifacts) - deploy_event_display_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_event_display_task.dependent_names + volume_step_names - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_event_display_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_outlier_detector_task = deploy_outlier_detector_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY, OUTLIER_MODEL_PATH)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_event_display_task) - deploy_outlier_detector_task.container.working_dir = "/home/jovyan" - deploy_outlier_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_outlier_detector': '/deploy_outlier_detector.html'}) - deploy_outlier_detector_task.output_artifact_paths.update(output_artifacts) - deploy_outlier_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_outlier_detector_task.dependent_names + volume_step_names - deploy_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_outlier_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_oulier_detection_task = test_oulier_detection_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_outlier_detector_task) - test_oulier_detection_task.container.working_dir = "/home/jovyan" - test_oulier_detection_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'test_oulier_detection': '/test_oulier_detection.html'}) - test_oulier_detection_task.output_artifact_paths.update(output_artifacts) - test_oulier_detection_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_oulier_detection_task.dependent_names + volume_step_names - test_oulier_detection_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_oulier_detection_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - deploy_drift_detector_task = deploy_drift_detector_op(DEPLOY_NAMESPACE, DRIFT_MODEL_PATH, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_MODEL_BUCKET, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(test_oulier_detection_task) - deploy_drift_detector_task.container.working_dir = "/home/jovyan" - deploy_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'deploy_drift_detector': '/deploy_drift_detector.html'}) - deploy_drift_detector_task.output_artifact_paths.update(output_artifacts) - deploy_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = deploy_drift_detector_task.dependent_names + volume_step_names - deploy_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - deploy_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - test_drift_detector_task = test_drift_detector_op(DEPLOY_NAMESPACE, MINIO_ACCESS_KEY, MINIO_HOST, MINIO_SECRET_KEY)\ - .add_pvolumes(pvolumes_dict)\ - .after(deploy_drift_detector_task) - test_drift_detector_task.container.working_dir = "/home/jovyan" - test_drift_detector_task.container.set_security_context( - k8s_client.V1SecurityContext(run_as_user=0)) - output_artifacts = {} - output_artifacts.update( - {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'}) - output_artifacts.update( - {'test_drift_detector': '/test_drift_detector.html'}) - test_drift_detector_task.output_artifact_paths.update(output_artifacts) - test_drift_detector_task.add_pod_label( - "pipelines.kubeflow.org/metadata_written", "true") - dep_names = test_drift_detector_task.dependent_names + volume_step_names - test_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/dependent-templates", json.dumps(dep_names)) - if volume_name_parameters: - test_drift_detector_task.add_pod_annotation( - "kubeflow-kale.org/volume-name-parameters", - json.dumps(volume_name_parameters)) - - -if __name__ == "__main__": - pipeline_func = auto_generated_pipeline - pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz' - import kfp.compiler as compiler - compiler.Compiler().compile(pipeline_func, pipeline_filename) - - # Get or create an experiment and submit a pipeline run - import kfp - client = kfp.Client() - experiment = client.create_experiment('seldon-e2e-cifar10') - - # Submit a pipeline run - from kale.utils.kfp_utils import generate_run_name - run_name = generate_run_name('seldon-e2e-cifar10-glv9p') - run_result = client.run_pipeline( - experiment.id, run_name, pipeline_filename, {}) diff --git a/samples/contrib/ibm-samples/ffdl-seldon/README.md b/samples/contrib/ibm-samples/ffdl-seldon/README.md deleted file mode 100644 index 9236cef03a1..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# Simple IBM OSS demo - -This simple IBM OSS demo will demonstrate how to train a model using [Fabric for Deep Learning](https://github.com/IBM/FfDL) and then deploy it with [Seldon](https://github.com/SeldonIO/seldon-core). - -## Prerequisites -1. Install [Fabric for Deep Learning](https://github.com/IBM/FfDL) and [Seldon](https://github.com/SeldonIO/seldon-core) on the same Kubernetes cluster as KubeFlow Pipeline. -2. Create two S3 Object Storage buckets, then store the training data, model definition file, and FfDL manifest file in the training bucket. - * The training data for this demo is from the [UTKface's aligned & cropped faces dataset](https://susanqq.github.io/UTKFace/). We will be using the data binary `UTKFace.tar.gz`. - - * The model definition file needs to be packaged as `gender-classification.zip`. - with the following commands - ```shell - zip -j source/gender-classification source/model-source-code/gender_classification.py - ``` - Then upload the model definition file and FfDL manifest file in the source directory. They are named `gender-classification.zip` and `manifest.yml`. - -3. Fill in the necessary credentials at [credentials/creds.ini](credentials/creds.ini) and upload it to one of your GitHub private repositories. The details of each parameter are defined below. - -## Instructions - -### 1. With Command line -First, install the necessary Python Packages -```shell -pip3 install ai_pipeline_params -``` - -In this repository, run the following commands to create the argo files using the Kubeflow pipeline SDK. -```shell -dsl-compile --py ffdl_pipeline.py --output ffdl-pipeline.tar.gz -``` - -Then, submit `ffdl-pipeline.tar.gz` to the kubeflow pipeline UI. From there you can create different experiments and runs using the ffdl pipeline definition. - -### 2. With Jupyter Notebook -Run `jupyter notebook` to start running your jupyter server and load the notebook `ffdl_pipeline.ipynb` and follow the instructions. - - -## Pipeline Parameters -- **config-file-url**: GitHub raw content link to the pipeline credentials file -- **github-token**: GitHub Token that can access your private repository -- **model-def-file-path**: Model definition path in the training bucket -- **manifest-file-path**: FfDL manifest path in the training bucket -- **model-deployment-name**: Seldon Model deployment name -- **model-class-name**: PyTorch model class name -- **model-class-file**: Model file that contains the PyTorch model class - -## Credentials needed to be stored in GitHub -- **s3_url**: S3 Object storage endpoint for your FfDL training job. -- **s3_access_key_id**: S3 Object storage access key id -- **s3_secret_access_key**: S3 Object storage secret access key -- **training_bucket**: S3 Bucket for the training job data location. -- **result_bucket**: S3 Bucket for the training job result location. -- **ffdl_rest**: RESTAPI endpoint for FfDL. -- **k8s_public_nodeport_ip**: IP of the host machine. It will be used to generate a web service endpoint for the served model. - -## Naming convention for Training Model Files - -Since libraries such as Spark and OpenCV are huge to put inside the serving containers, users who use libraries which are not from the standard PyTorch container from FfDL should consider defining their PyTorch model class file in a separate Python file. This is because when python tries to load the model class from users' training files, python interpreter will need to read and import all the -modules in memory within the same file in order to properly construct the module dependencies. - -Therefore, by default users should consider naming their model class as `ModelClass` and put the model class code in a file call `model_class.py`. However, users can choose not to follow the naming convention as long as they provide the model class and file name as part of the pipeline parameters. diff --git a/samples/contrib/ibm-samples/ffdl-seldon/credentials/creds.ini b/samples/contrib/ibm-samples/ffdl-seldon/credentials/creds.ini deleted file mode 100644 index fa27de9c681..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/credentials/creds.ini +++ /dev/null @@ -1,8 +0,0 @@ -[CREDENTIALS] -s3_url = https://s3-api.us-geo.objectstorage.softlayer.net -s3_access_key_id = S3AccessKeyID -s3_secret_access_key = S3SecretAccessKey -training_bucket = data-bucket -result_bucket = result-bucket -ffdl_rest = http://xxx.xx.xxx.xxx:xxxxx -k8s_public_nodeport_ip = xxx.xx.xxx.xxx diff --git a/samples/contrib/ibm-samples/ffdl-seldon/credentials/github-creds.ini b/samples/contrib/ibm-samples/ffdl-seldon/credentials/github-creds.ini deleted file mode 100644 index f3d97f7dd03..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/credentials/github-creds.ini +++ /dev/null @@ -1,3 +0,0 @@ -[CREDENTIALS] -config_file_url = https://raw.githubusercontent.com/user/repository/branch/creds.ini -github_token = GithubToken diff --git a/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb b/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb deleted file mode 100644 index 44f41334dd3..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb +++ /dev/null @@ -1,216 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Train and deploy with FfDL and Seldon demo\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "##### A simple IBM OSS pipeline demonstrates how to train a model using Fabric for Deep Learning and then deploy it with Seldon.\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Model for this pipeline\n", - "\n", - "We will be training a PyTorch model that can classify the gender of a human face image. This PyTorch model is a simple convolutional neural network (CNN) with 3 convolutional layers and 2 fully connected layers using the [UTKFace](https://susanqq.github.io/UTKFace/) dataset. We will be training for 5 epochs for the purpose of this demo.\n", - "\n", - "
            " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Define the necessary environment variables and install the KubeFlow Pipeline SDK\n", - "We assume this notebook kernel has access to Python's site-packages and is in Python3.\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "config_file_url = ''\n", - "github_token = ''" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Install the necessary python packages\n", - "\n", - "Note: Please change pip to the package manager that's used for this Notebook Kernel." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!pip install kfp --upgrade\n", - "!pip install ai_pipeline_params --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Import the KubeFlow Pipeline library and define the client and experiment " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp\n", - "from kfp import compiler\n", - "import kfp\n", - "from kfp import components\n", - "from kfp import dsl\n", - "from kfp import notebook\n", - "\n", - "# Run client with KUBEFLOW_PIPELINE_LINK if this notebook server\n", - "# is running on localhost without enterprise gateway.\n", - "\n", - "# KUBEFLOW_PIPELINE_LINK = ''\n", - "# client = kfp.Client(KUBEFLOW_PIPELINE_LINK)\n", - "\n", - "client = kfp.Client()\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 2. Define pipeline tasks using the kfp library. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# define secret name that contains the credentials for this pipeline, and load components\n", - "secret_name = 'kfp-creds'\n", - "configuration_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/eb830cd73ca148e5a1a6485a9374c2dc068314bc/components/ibm-components/commons/config/component.yaml')\n", - "train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/eb830cd73ca148e5a1a6485a9374c2dc068314bc/components/ibm-components/ffdl/train/component.yaml')\n", - "serve_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/eb830cd73ca148e5a1a6485a9374c2dc068314bc/components/ibm-components/ffdl/serve/component.yaml')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "import ai_pipeline_params as params\n", - "\n", - "\n", - "# create pipeline\n", - "@dsl.pipeline(\n", - " name='FfDL pipeline',\n", - " description='A pipeline for machine learning workflow using Fabric for Deep Learning and Seldon.'\n", - ")\n", - "def ffdlPipeline(\n", - " GITHUB_TOKEN=github_token,\n", - " CONFIG_FILE_URL=config_file_url,\n", - " model_def_file_path='gender-classification.zip',\n", - " manifest_file_path='manifest.yml',\n", - " model_deployment_name='gender-classifier',\n", - " model_class_name='ThreeLayerCNN',\n", - " model_class_file='gender_classification.py'\n", - "):\n", - " \"\"\"A pipeline for end to end machine learning workflow.\"\"\"\n", - "\n", - " get_configuration = configuration_op(\n", - " token = GITHUB_TOKEN,\n", - " url = CONFIG_FILE_URL,\n", - " name = secret_name\n", - " )\n", - "\n", - " train = train_op(\n", - " model_def_file_path,\n", - " manifest_file_path\n", - " ).apply(params.use_ai_pipeline_params(secret_name))\n", - "\n", - " serve = serve_op(\n", - " train.output, \n", - " model_deployment_name, \n", - " model_class_name, \n", - " model_class_file\n", - " ).apply(params.use_ai_pipeline_params(secret_name))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Below are the default parameters for the above pipeline, \n", - "# you can customize these parameters for each pipeline run.\n", - "\n", - "parameters={'config-file-url': config_file_url,\n", - " 'github-token': github_token,\n", - " 'model-def-file-path': 'gender-classification.zip',\n", - " 'manifest-file-path': 'manifest.yml',\n", - " 'model-deployment-name': 'gender-classifier',\n", - " 'model-class-name': 'ThreeLayerCNN',\n", - " 'model-class-file': 'gender_classification.py'}\n", - "\n", - "\n", - "run = client.create_run_from_pipeline_func(ffdlPipeline, arguments=parameters).run_info\n", - "\n", - "import IPython\n", - "html = ('

            '\n", - " % (client._get_url_prefix(), run.id))\n", - "IPython.display.HTML(html)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.8" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.py b/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.py deleted file mode 100644 index ba6c97e34bc..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.py +++ /dev/null @@ -1,51 +0,0 @@ -import kfp -from kfp import components -from kfp import dsl -import ai_pipeline_params as params - -# generate default secret name -secret_name = 'kfp-creds' - -configuration_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/eb830cd73ca148e5a1a6485a9374c2dc068314bc/components/ibm-components/commons/config/component.yaml') -train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/eb830cd73ca148e5a1a6485a9374c2dc068314bc/components/ibm-components/ffdl/train/component.yaml') -serve_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/eb830cd73ca148e5a1a6485a9374c2dc068314bc/components/ibm-components/ffdl/serve/component.yaml') - -# create pipeline -@dsl.pipeline( - name='FfDL pipeline', - description='A pipeline for machine learning workflow using Fabric for Deep Learning and Seldon.' -) - -def ffdlPipeline( - GITHUB_TOKEN='', - CONFIG_FILE_URL='https://raw.githubusercontent.com/user/repository/branch/creds.ini', - model_def_file_path='gender-classification.zip', - manifest_file_path='manifest.yml', - model_deployment_name='gender-classifier', - model_class_name='ThreeLayerCNN', - model_class_file='gender_classification.py' -): - """A pipeline for end to end machine learning workflow.""" - - create_secrets = configuration_op( - token = GITHUB_TOKEN, - url = CONFIG_FILE_URL, - name = secret_name - ) - - train = train_op( - model_def_file_path, - manifest_file_path - ).apply(params.use_ai_pipeline_params(secret_name)) - - serve = serve_op( - train.output, - model_deployment_name, - model_class_name, - model_class_file - ).apply(params.use_ai_pipeline_params(secret_name)) - - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(ffdlPipeline, __file__ + '.tar.gz') diff --git a/samples/contrib/ibm-samples/ffdl-seldon/source/README.md b/samples/contrib/ibm-samples/ffdl-seldon/source/README.md deleted file mode 100644 index 4ff4d472056..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/source/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Source Code for IBM OSS pipeline demo using PyTorch. - -This dictretory contains the source code for the PyTorch Gender Classification demo using FfDL and Seldon on KubeFlow Pipeline. - -- [manifest.yml](manifest.yml): Manifest file for FfDL training. -- [model-source-code](model-source-code): Model training source code for the demo -- [seldon-pytorch-serving-image](seldon-pytorch-serving-image): Seldon Serving image for the demo. diff --git a/samples/contrib/ibm-samples/ffdl-seldon/source/manifest.yml b/samples/contrib/ibm-samples/ffdl-seldon/source/manifest.yml deleted file mode 100644 index 3bebff29d42..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/source/manifest.yml +++ /dev/null @@ -1,52 +0,0 @@ -# name: Replace with any name for your model -# description: Replace with any description for your model -# version: Replace with any version of your model -# gpus: Replace with the number of gpus to be used in -# training your model -# cpus: Replace with the number of cpus to be used in -# training your model -# learners: Replace with the number of learner nodes to be used -# memory: Replace with the amount of memory to be -# dedicated to training your model -name: Pytorch gender_classification model -description: Pytorch gender_classification model -version: "1.0" -gpus: 0 -cpus: 2 -learners: 1 -memory: 4Gb - -# Object stores that allow the system to retrieve training data. -# id: The data_store id -# type: The type of data_store -# training_data: container: Replace with the name of the bucket at which -# you stored the fashion MNIST dataset -# training_results: container: Replace with the name of the bucket where -# the resulting model should be saved to -# connection: type: The type of connection -# connection: auth_url: Replace with your Cloud Object Storage Endpoint url -# for IBM Cloud -# connection: user_name: Replace with the access_key_id found in the service -# credentials tab on IBM Cloud -# connection: password: Replace with the secret_access_key found in service -# credentials tab on IBM Cloud -data_stores: - - id: test-datastore - type: mount_cos - training_data: - container: gender-data - training_results: - container: gender-result - connection: - # Update the object storage credentials below - auth_url: http://s3-api.us-geo.objectstorage.softlayer.net - user_name: xxxxxx - password: xxxxxxxxx - -# name: The name of the Deep Learning framework that will be used -# version: The version of the framework to be used -# command: The command to initiate training -framework: - name: pytorch - version: "latest" - command: tar -xzvf $DATA_DIR/UTKFace.tar.gz -C / --owner root --group root --no-same-owner 2>&1 > dummy.log; pip install torchsummary Pillow pandas; python -u gender_classification.py --data_dir /UTKFace/ --result_path $RESULT_DIR/model.pt --label_dir $RESULT_DIR diff --git a/samples/contrib/ibm-samples/ffdl-seldon/source/model-source-code/gender_classification.py b/samples/contrib/ibm-samples/ffdl-seldon/source/model-source-code/gender_classification.py deleted file mode 100644 index cbf2b7fc84c..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/source/model-source-code/gender_classification.py +++ /dev/null @@ -1,195 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import glob -import PIL -from PIL import Image -import numpy as np -import argparse - -import torch -import torch.utils.data -from torch.autograd import Variable -import torch.nn as nn -from torchsummary import summary -import time - -import pandas as pd - -np.random.seed(99) -torch.manual_seed(99) - - -class ThreeLayerCNN(torch.nn.Module): - """ - Input: 128x128 face image (eye aligned). - Output: 1-D tensor with 2 elements. Used for binary classification. - Parameters: - Number of conv layers: 3 - Number of fully connected layers: 2 - """ - def __init__(self): - super(ThreeLayerCNN,self).__init__() - self.conv1 = torch.nn.Conv2d(3,6,5) - self.pool = torch.nn.MaxPool2d(2,2) - self.conv2 = torch.nn.Conv2d(6,16,5) - self.conv3 = torch.nn.Conv2d(16,16,6) - self.fc1 = torch.nn.Linear(16*4*4,120) - self.fc2 = torch.nn.Linear(120,2) - - - def forward(self, x): - x = self.pool(torch.nn.functional.relu(self.conv1(x))) - x = self.pool(torch.nn.functional.relu(self.conv2(x))) - x = self.pool(torch.nn.functional.relu(self.conv3(x))) - x = x.view(-1,16*4*4) - x = torch.nn.functional.relu(self.fc1(x)) - x = self.fc2(x) - return x - - -if __name__ == "__main__": - - parser = argparse.ArgumentParser() - parser.add_argument('--data_dir', type=str, help='Dataset directory path') - parser.add_argument('--result_path', type=str, help='Result model path') - parser.add_argument('--label_dir', type=str, help='Label directory path') - args = parser.parse_args() - - image_dir = args.data_dir - result_dir = args.result_path - label_dir = args.label_dir - - """ Load and Process Images """ - - races_to_consider = [0,4] - unprivileged_groups = [{'race': 4.0}] - privileged_groups = [{'race': 0.0}] - favorable_label = 0.0 - unfavorable_label = 1.0 - - img_size = 64 - - protected_race = [] - outcome_gender = [] - feature_image = [] - feature_age = [] - - for i, image_path in enumerate(glob.glob(image_dir + "*.jpg")): - try: - age, gender, race = image_path.split('/')[-1].split("_")[:3] - age = int(age) - gender = int(gender) - race = int(race) - - if race in races_to_consider: - protected_race.append(race) - outcome_gender.append(gender) - feature_image.append(np.array(Image.open(image_path).resize((img_size, img_size)))) - feature_age.append(age) - except: - print("Missing: " + image_path) - - feature_image_mat = np.array(feature_image) - outcome_gender_mat = np.array(outcome_gender) - protected_race_mat = np.array(protected_race) - age_mat = np.array(feature_age) - - """ Split the dataset into train and test """ - - feature_image_mat_normed = 2.0 *feature_image_mat.astype('float32')/256.0 - 1.0 - - N = len(feature_image_mat_normed) - ids = np.random.permutation(N) - train_size=int(0.7 * N) - X_train = feature_image_mat_normed[ids[0:train_size]] - y_train = outcome_gender_mat[ids[0:train_size]] - X_test = feature_image_mat_normed[ids[train_size:]] - y_test = outcome_gender_mat[ids[train_size:]] - - p_train = protected_race_mat[ids[0:train_size]] - p_test = protected_race_mat[ids[train_size:]] - - age_train = age_mat[ids[0:train_size]] - age_test = age_mat[ids[train_size:]] - - batch_size = 64 - - X_train = X_train.transpose(0,3,1,2) - X_test = X_test.transpose(0,3,1,2) - - train = torch.utils.data.TensorDataset(Variable(torch.FloatTensor(X_train.astype('float32'))), Variable(torch.LongTensor(y_train.astype('float32')))) - train_loader = torch.utils.data.DataLoader(train, batch_size=batch_size, shuffle=True) - test = torch.utils.data.TensorDataset(Variable(torch.FloatTensor(X_test.astype('float32'))), Variable(torch.LongTensor(y_test.astype('float32')))) - test_loader = torch.utils.data.DataLoader(test, batch_size=batch_size, shuffle=False) - - device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') - model = ThreeLayerCNN().to(device) - summary(model, (3,img_size,img_size)) - - - """ Training the network """ - - num_epochs = 5 - learning_rate = 0.001 - print_freq = 100 - - # Specify the loss and the optimizer - criterion = nn.CrossEntropyLoss() - optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate) - - # Start training the model - num_batches = len(train_loader) - for epoch in range(num_epochs): - for idx, (images, labels) in enumerate(train_loader): - images = images.to(device) - labels = labels.to(device) - - outputs = model(images) - loss = criterion(outputs, labels) - optimizer.zero_grad() - loss.backward() - optimizer.step() - - if (idx+1) % print_freq == 0: - print ('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}' .format(epoch+1, num_epochs, idx+1, num_batches, loss.item())) - - # Run model on test set in eval mode. - model.eval() - correct = 0 - y_pred = [] - with torch.no_grad(): - for images, labels in test_loader: - images = images.to(device) - labels = labels.to(device) - outputs = model(images) - _, predicted = torch.max(outputs.data, 1) - correct += predicted.eq(labels.data.view_as(predicted)).sum().item() - y_pred += predicted.tolist() - print('Test_set accuracy: ' + str(100. * correct / len(test_loader.dataset)) + '%') - # convert y_pred to np array - y_pred = np.array(y_pred) - - # Save the entire model to enable automated serving - torch.save(model.state_dict(), result_dir) - print("Model saved at " + result_dir) - - # Save labels and protected features for fairness check. - np.savetxt(label_dir + '/y_train.out', y_train) - np.savetxt(label_dir + '/p_train.out', p_train) - np.savetxt(label_dir + '/y_test.out', y_test) - np.savetxt(label_dir + '/p_test.out', p_test) - np.savetxt(label_dir + '/y_pred.out', y_pred) - np.save(label_dir + '/x_test', X_test) - - print("Labels stored at directory " + label_dir) diff --git a/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/.s2i/environment b/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/.s2i/environment deleted file mode 100644 index b7a2700355e..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/.s2i/environment +++ /dev/null @@ -1,4 +0,0 @@ -MODEL_NAME=Serving -API_TYPE=REST -SERVICE_TYPE=MODEL -PERSISTENCE=0 diff --git a/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/README.md b/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/README.md deleted file mode 100644 index 115ead493a2..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# PyTorch Gender Classification Seldon Serving container - -# Wrap the Runtime Scorer -You can skip this step if you are happy to use the already packaged image ```aipipeline/seldon-pytorch:0.1``` from DockerHub. - -The runtime MNIST scorer is contained within a standalone [python class Serving.py](./Serving.py). This needs to be packaged in a Docker container to run within Seldon. For this we use [Redhat's Source-to-image](https://github.com/openshift/source-to-image). - - * Install [S2I](https://github.com/openshift/source-to-image#installation) - * From this `seldon-pytorch-serving-image` folder, run the following s2i build. You will need to change **aipipeline** to your DockerHub repo. - ```shell - s2i build . seldonio/seldon-core-s2i-python2:0.4 aipipeline/seldon-pytorch:0.1 - ``` - * Push image to DockerHub or your Docker registry that's accessible from the KubeFlow Pipeline cluster. - ```shell - docker push aipipeline/seldon-pytorch:0.1 - ``` diff --git a/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/Serving.py b/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/Serving.py deleted file mode 100644 index 06870391989..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/Serving.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# Copyright 2017-2018 IBM Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import torch -import boto3 -import botocore -import os -import zipfile -import importlib - - -class Serving(object): - def __init__(self): - training_id = os.environ.get("TRAINING_ID") - endpoint_url = os.environ.get("BUCKET_ENDPOINT_URL") - bucket_name = os.environ.get("BUCKET_NAME") - bucket_key = os.environ.get("BUCKET_KEY") - bucket_secret = os.environ.get("BUCKET_SECRET") - model_file_name = os.environ.get("MODEL_FILE_NAME") - model_class_name = os.environ.get("MODEL_CLASS_NAME") - model_class_file = os.environ.get("MODEL_CLASS_FILE") - - - # Uncomment the below print statement for debugging purpose. - # print("Training id:{} endpoint URL:{} key:{} secret:{}".format(training_id,endpoint_url,bucket_key,bucket_secret)) - - # Define S3 resource and download the model files - client = boto3.resource( - 's3', - endpoint_url=endpoint_url, - aws_access_key_id=bucket_key, - aws_secret_access_key=bucket_secret, - ) - - KEY = training_id + '/' + model_file_name - model_files = training_id + '/_submitted_code/model.zip' - - try: - client.Bucket(bucket_name).download_file(KEY, 'model.pt') - client.Bucket(bucket_name).download_file(model_files, 'model.zip') - except botocore.exceptions.ClientError as e: - if e.response['Error']['Code'] == "404": - print("The object does not exist.") - else: - raise - - zip_ref = zipfile.ZipFile('model.zip', 'r') - zip_ref.extractall('model_files') - zip_ref.close() - - modulename = 'model_files.' + model_class_file.split('.')[0].replace('-', '_') - - ''' - We required users to define where the model class is located or follow - some naming convention we have provided. - ''' - model_class = getattr(importlib.import_module(modulename), model_class_name) - self.model = model_class() - self.model.load_state_dict(torch.load("model.pt")) - self.model.eval() - - def predict(self, X, feature_names): - return self.model(X) diff --git a/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/requirements.txt b/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/requirements.txt deleted file mode 100644 index e989d6811a9..00000000000 --- a/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -torch==2.6.0 -torchvision==0.2.1 -boto3==1.9.83 -pandas -numpy -pyyaml -torchsummary -numpy diff --git a/samples/contrib/ibm-samples/openscale/README.md b/samples/contrib/ibm-samples/openscale/README.md deleted file mode 100644 index 1cfe2a0a836..00000000000 --- a/samples/contrib/ibm-samples/openscale/README.md +++ /dev/null @@ -1,113 +0,0 @@ -# Watson OpenScale Example - -This simple OpenScale pipeline will demonstrate how to train a model using IBM Spark Service, store and deploy it with Watson Machine Learning, and then use Watson OpenScale for fairness and quality monitoring. - -## Prerequisites -This pipeline requires the user to have provisioned OpenScale, Spark, and Machine Learning Service on Watson, a cloud object store set up and the service credentials configured in the creds.ini file. - -To provision your own OpenScale, Spark, Watson Machine Learning services and cloud object store, following are the required steps. - -1. IBM Watson Machine Learning service instance - -To create a Watson Machine Learning service, go to [IBM Cloud](https://cloud.ibm.com/), login with IBM account id first. From the `Catalog` page, click on `AI` tab on the left side to go to this [page](https://cloud.ibm.com/catalog?category=ai). Then click on the [`Machine Learning`](https://cloud.ibm.com/catalog/services/machine-learning) link and follow the instructions to create the service. - -Once the service is created, from the service's `Dashboard`, follow the instructions to generate `service credentials`. Refer to IBM Cloud [documents](https://cloud.ibm.com/docs) for help if needed. Collect the `url`, `apikey`, and `instance_id` info from the service credentials as these will be required to access the service. - -2. IBM Watson OpenScale service instance - -The IBM Watson OpenScale service will help us monitor the quality and fairness status for the deployed models. From the `Catalog` page, click on `AI` tab on the left side to go to this [page](https://cloud.ibm.com/catalog?category=ai). Then click on the [`Watson OpenScale`](https://cloud.ibm.com/catalog/services/watson-openscale) link and follow the instructions to create the service. - -Once the service is created, click on service's `Launch Application` and click on configuration (the 4th icon on the left side) from the new pop up link. Collect the `Datamart ID` which is the GUID for Watson OpenScale. - -In addition, collect the IBM Cloud API Key from this [page](https://cloud.ibm.com/iam#/apikeys) to enable service binding for OpenScale service. - -3. IBM Spark service instance - -The IBM Spark service will provide several spark executors to help train our example model. From the `Catalog` page, click on `Web and Application` tab on the left side to go to this [page](https://cloud.ibm.com/catalog?category=app_services). Then click on the [`Apache Spark`](https://cloud.ibm.com/catalog/services/apache-spark) link and follow the instructions to create the service. - -Once the service is created, from the service's `Service credentials` on the left side, follow the instructions to generate `service credentials`. Refer to IBM Cloud [documents](https://cloud.ibm.com/docs) for help if needed. -Collect the `tenant_secret`, `tenant_id`, `cluster_master_url`, and `instance_id` info from the service credentials as these will be required to access the service. - -4. A cloud object store - -Watson Machine Learning service loads datasets from cloud object store and stores model outputs and other artifacts to cloud object store. Users can use any cloud object store they already preserve. Users can also create a cloud object store with `IBM Cloud Object Storage` service by following this [link](https://console.bluemix.net/catalog/services/cloud-object-storage). - -Collect the `endpoint`, `access_key_id` and `secret_access_key` fields from the service credentials for the cloud object store. Create the service credentials first if not exist. To ensure generating HMAC credentials, specify the following in the `Add Inline Configuration Parameters` field: `{"HMAC":true}`. - -Create a bucket for storing the train datasets and model source codes. - -Then, upload all the files in the `source` folder to the created bucket. - -5. Set up access credentials - -This pipeline sample reads the credentials from a file hosted in a github repo. Refer to `creds.ini` file and input user's specific credentials. Then upload the file to a github repo the user has access. - -To access the credentials file, the user should provide a github access token and the link to the raw content of the file. Modify the `GITHUB_TOKEN` and `CONFIG_FILE_URL` variables in the below code block and run the Python code to create a Kubernetes secret using the KubeFlow pipeline. - -```python -import kfp.dsl as dsl -import kfp.components as components -import kfp -secret_name = 'aios-creds' -configuration_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/commons/config/component.yaml') - -@dsl.pipeline( - name='create secret', - description='' -) -def secret_pipeline( - GITHUB_TOKEN='', - CONFIG_FILE_URL='https://raw.githubusercontent.com/user/repository/branch/creds.ini', -): - get_configuration = configuration_op( - token=GITHUB_TOKEN, - url=CONFIG_FILE_URL, - name=secret_name - ) - -kfp.Client().create_run_from_pipeline_func(secret_pipeline, arguments={}) -``` - -## Instructions - -First, install the necessary Python Packages -```shell -pip3 install ai_pipeline_params -``` - -In this repository, run the following commands to create the argo files using the Kubeflow pipeline SDK. -```shell -dsl-compile --py openscale.py --output openscale.tar.gz -``` - -Then, submit `openscale.tar.gz` to the kubeflow pipeline UI. From there you can create different experiments and runs with the OpenScale pipeline. - -## Pipeline Parameters -- **bucket-name**: Object Storage bucket that has Spark training files and OpenScale manifest -- **training-data-link**: Link to a public data source if the data is not being preprocessed. -- **postgres-schema-name**: PostgreSQL schema name for storing model payload metrics -- **label-name**: Model label name in the dataset. -- **problem-type**: Model output type. Possible options are `BINARY_CLASSIFICATION`, `MULTICLASS_CLASSIFICATION`, and `REGRESSION` -- **threshold**: Model threshold that is recommended for the OpenScale service to monitor. -- **aios-manifest-path**: Manifest files path in the object storage that defines the fairness definition and model schema. -- **model-file-path**: Model file path in the object storage for the spark service to execute. -- **spark-entrypoint**: Entrypoint command to execute the model training using spark service. -- **model-name**: Model name for storing the trained model in Watson Machine Learning service. -- **deployment-name**: Deployment name for deploying the stored model in Watson Machine Learning service. - -## Credentials needed to be stored in the creds.ini -- **aios_guid**: GUID of the OpenScale service -- **cloud_api_key**: IBM Cloud API Key -- **postgres_uri**: PostgreSQL URI for storing model payload. Leave it with the empty string `""` if you wish to use the default database that comes with the OpenScale service. -- **spark_tenant_id**: Spark tenant ID from the IBM Apache Spark service. -- **spark_tenant_secret**: Spark tenant secret from the IBM Apache Spark service. -- **spark_cluster_master_url**: Spark cluster master URL from the IBM Apache Spark service. -- **spark_instance_id**: Spark instance ID from the IBM Apache Spark service. -- **cos_endpoint**: Object Storage endpoint. -- **cos_access_key**: Object Storage access key ID -- **cos_secret_key**: Object Storage secret access key. -- **wml_url**: URL endpoint from the Watson Machine Learning service. -- **wml_username**: Username from the Watson Machine Learning service. -- **wml_password**: Password from the Watson Machine Learning service. -- **wml_instance_id**: Instance ID from the Watson Machine Learning service. -- **wml_apikey**: API Key from the Watson Machine Learning service. diff --git a/samples/contrib/ibm-samples/openscale/credentials/creds.ini b/samples/contrib/ibm-samples/openscale/credentials/creds.ini deleted file mode 100644 index ab0fa0ca7f5..00000000000 --- a/samples/contrib/ibm-samples/openscale/credentials/creds.ini +++ /dev/null @@ -1,17 +0,0 @@ -[CREDENTIALS] -aios_guid = OpenscaleGuid -cloud_api_key = IBMCloudAPIKey -postgres_uri = postgreSQLURI - -spark_tenant_id = SparkTenantId -spark_tenant_secret = SparkTenantSecret -spark_cluster_master_url = https://spark.bluemix.net -spark_instance_id = SparkInstanceId - -cos_endpoint = ObjectStoreEndpointUrl -cos_access_key = ObjectStoreAccessKeyID -cos_secret_key = ObjectStoreSecretAccessKey - -wml_url = https://us-south.ml.cloud.ibm.com -wml_instance_id = WMLInstanceId -wml_apikey = WMLAPIKey diff --git a/samples/contrib/ibm-samples/openscale/openscale.py b/samples/contrib/ibm-samples/openscale/openscale.py deleted file mode 100644 index da451cec0b4..00000000000 --- a/samples/contrib/ibm-samples/openscale/openscale.py +++ /dev/null @@ -1,83 +0,0 @@ -import kfp.dsl as dsl -import kfp.components as components -import ai_pipeline_params as params - -secret_name = 'aios-creds' - -preprocess_spark_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/spark/data_preprocess_spark/component.yaml') -train_spark_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/spark/train_spark/component.yaml') -store_spark_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/spark/store_spark_model/component.yaml') -deploy_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/deploy/component.yaml') -subscribe_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/manage/subscribe/component.yaml') -fairness_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/manage/monitor_fairness/component.yaml') -quality_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/manage/monitor_quality/component.yaml') - - -@dsl.pipeline( - name='Watson OpenScale Pipeline', - description='A pipeline for end to end Spark machine learning workflow and model monitoring.' -) -def aiosPipeline( - BUCKET_NAME='', - TRAINING_DATA_LINK='https://raw.githubusercontent.com/emartensibm/german-credit/master/german_credit_data_biased_training.csv', - POSTGRES_SCHEMA_NAME='data_mart_credit', - LABEL_NAME='Risk', - PROBLEM_TYPE='BINARY_CLASSIFICATION', - THRESHOLD='0.7', - AIOS_MANIFEST_PATH='aios.json', - MODEL_FILE_PATH='model.py', - SPARK_ENTRYPOINT='python model.py', - MODEL_NAME='Spark German Risk Model - Final', - DEPLOYMENT_NAME='Spark German Risk Deployment - Final' -): - - """A pipeline for Spark machine learning workflow with OpenScale.""" - - data_preprocess_spark = preprocess_spark_op( - bucket_name=BUCKET_NAME, - data_url=TRAINING_DATA_LINK - ).apply(params.use_ai_pipeline_params(secret_name)) - train_spark = train_spark_op( - bucket_name=BUCKET_NAME, - data_filename=data_preprocess_spark.output, - model_filename=MODEL_FILE_PATH, - spark_entrypoint=SPARK_ENTRYPOINT - ).apply(params.use_ai_pipeline_params(secret_name)) - store_spark_model = store_spark_op( - bucket_name=BUCKET_NAME, - aios_manifest_path=AIOS_MANIFEST_PATH, - problem_type=PROBLEM_TYPE, - model_name=MODEL_NAME, - deployment_name=DEPLOYMENT_NAME, - model_filepath=train_spark.outputs['model_filepath'], - train_data_filepath=train_spark.outputs['train_data_filepath'] - ).apply(params.use_ai_pipeline_params(secret_name)) - deploy = deploy_op( - model_uid=store_spark_model.output, - model_name=MODEL_NAME, - deployment_name=DEPLOYMENT_NAME - ).apply(params.use_ai_pipeline_params(secret_name)) - subscribe = subscribe_op( - model_uid=deploy.outputs['model_uid'], - model_name=MODEL_NAME, - aios_schema=POSTGRES_SCHEMA_NAME, - label_column=LABEL_NAME, - aios_manifest_path=AIOS_MANIFEST_PATH, - bucket_name=BUCKET_NAME, - problem_type=PROBLEM_TYPE - ).apply(params.use_ai_pipeline_params(secret_name)) - monitor_quality = quality_op( - model_name=subscribe.output, - quality_threshold=THRESHOLD - ).apply(params.use_ai_pipeline_params(secret_name)) - monitor_fairness = fairness_op( - model_name=subscribe.output, - aios_manifest_path=AIOS_MANIFEST_PATH, - cos_bucket_name=BUCKET_NAME, - data_filename=data_preprocess_spark.output - ).apply(params.use_ai_pipeline_params(secret_name)) - - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(aiosPipeline, __file__ + '.tar.gz') diff --git a/samples/contrib/ibm-samples/openscale/source/aios.json b/samples/contrib/ibm-samples/openscale/source/aios.json deleted file mode 100644 index 6113f246eaa..00000000000 --- a/samples/contrib/ibm-samples/openscale/source/aios.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "fairness_favourable_classes": ["No Risk"], - "fairness_unfavourable_classes": ["Risk"], - "fairness_features": [ - { - "feature_name": "Sex", - "majority": ["male"], - "minority": ["female"], - "threshold": 0.95 - }, - { - "feature_name": "Age", - "majority": [[26,75]], - "minority": [[18,25]], - "threshold": 0.95 - } - ], - "model_schema": [ - {"metadata": {"measure": "discrete","modeling_role": "feature"}, "name": "CheckingStatus", "nullable": true, "type": "string"}, - {"metadata": {"modeling_role": "feature"}, "name": "LoanDuration", "nullable": true, "type": "integer"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"}, "name": "CreditHistory", "nullable": true, "type": "string"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"}, "name": "LoanPurpose", "nullable": true, "type": "string"}, - {"metadata": {"modeling_role": "feature"}, "name": "LoanAmount", "nullable": true, "type": "integer"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"}, "name": "ExistingSavings", "nullable": true, "type": "string"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"}, "name": "EmploymentDuration", "nullable": true, "type": "string"}, - {"metadata": {"modeling_role": "feature"}, "name": "InstallmentPercent", "nullable": true, "type": "integer"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"}, "name": "Sex","nullable": true,"type": "string"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"},"name": "OthersOnLoan","nullable": true,"type": "string"}, - {"metadata": {"modeling_role": "feature"},"name": "CurrentResidenceDuration","nullable": true,"type": "integer"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"},"name": "OwnsProperty","nullable": true,"type": "string"}, - {"metadata": {"modeling_role": "feature"},"name": "Age","nullable": true,"type": "integer"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"},"name": "InstallmentPlans","nullable": true,"type": "string"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"},"name": "Housing","nullable": true,"type": "string"}, - {"metadata": {"modeling_role": "feature"},"name": "ExistingCreditsCount","nullable": true,"type": "integer"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"},"name": "Job","nullable": true,"type": "string"}, - {"metadata": {"modeling_role": "feature"},"name": "Dependents","nullable": true,"type": "integer"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"},"name": "Telephone","nullable": true,"type": "string"}, - {"metadata": {"measure": "discrete", "modeling_role": "feature"},"name": "ForeignWorker","nullable": true,"type": "string"}, - {"metadata": {"modeling_role": "probability"},"name": "probability","nullable": true,"type": {"containsNull": true, "elementType": "double", "type": "array"}}, - {"metadata": {"modeling_role": "prediction"},"name": "prediction","nullable": true,"type": "double"}, - {"metadata": {"modeling_role": "decoded-target"},"name": "predictedLabel","nullable": true,"type": "string"}, - {"metadata": {"modeling_role": "debiased-prediction"},"name": "debiased_prediction","nullable": true,"type": "double"}, - {"metadata": {"modeling_role": "debiased-probability"},"name": "debiased_probability","nullable": true,"type": {"containsNull": true,"elementType": "double","type": "array"}} - ] -} diff --git a/samples/contrib/ibm-samples/openscale/source/model.py b/samples/contrib/ibm-samples/openscale/source/model.py deleted file mode 100644 index be46c8077f5..00000000000 --- a/samples/contrib/ibm-samples/openscale/source/model.py +++ /dev/null @@ -1,75 +0,0 @@ -import pyspark -from pyspark.sql import SparkSession -from pyspark.ml.feature import OneHotEncoder, StringIndexer, IndexToString, VectorAssembler -from pyspark.ml.evaluation import BinaryClassificationEvaluator -from pyspark.ml import Pipeline, Model -from pyspark.ml.classification import RandomForestClassifier -import json - -''' Read data with Spark SQL ''' -spark = SparkSession.builder.getOrCreate() -df_data = spark.read.csv(path="german_credit_data_biased_training.csv", sep=",", header=True, inferSchema=True) -df_data.head() - -spark_df = df_data -(train_data, test_data) = spark_df.randomSplit([0.8, 0.2], 24) - -print("Number of records for training: " + str(train_data.count())) -print("Number of records for evaluation: " + str(test_data.count())) - -spark_df.printSchema() - -si_CheckingStatus = StringIndexer(inputCol = 'CheckingStatus', outputCol = 'CheckingStatus_IX') -si_CreditHistory = StringIndexer(inputCol = 'CreditHistory', outputCol = 'CreditHistory_IX') -si_LoanPurpose = StringIndexer(inputCol = 'LoanPurpose', outputCol = 'LoanPurpose_IX') -si_ExistingSavings = StringIndexer(inputCol = 'ExistingSavings', outputCol = 'ExistingSavings_IX') -si_EmploymentDuration = StringIndexer(inputCol = 'EmploymentDuration', outputCol = 'EmploymentDuration_IX') -si_Sex = StringIndexer(inputCol = 'Sex', outputCol = 'Sex_IX') -si_OthersOnLoan = StringIndexer(inputCol = 'OthersOnLoan', outputCol = 'OthersOnLoan_IX') -si_OwnsProperty = StringIndexer(inputCol = 'OwnsProperty', outputCol = 'OwnsProperty_IX') -si_InstallmentPlans = StringIndexer(inputCol = 'InstallmentPlans', outputCol = 'InstallmentPlans_IX') -si_Housing = StringIndexer(inputCol = 'Housing', outputCol = 'Housing_IX') -si_Job = StringIndexer(inputCol = 'Job', outputCol = 'Job_IX') -si_Telephone = StringIndexer(inputCol = 'Telephone', outputCol = 'Telephone_IX') -si_ForeignWorker = StringIndexer(inputCol = 'ForeignWorker', outputCol = 'ForeignWorker_IX') - -si_Label = StringIndexer(inputCol="Risk", outputCol="label").fit(spark_df) -label_converter = IndexToString(inputCol="prediction", outputCol="predictedLabel", labels=si_Label.labels) -va_features = VectorAssembler(inputCols=["CheckingStatus_IX", "CreditHistory_IX", "LoanPurpose_IX", "ExistingSavings_IX", "EmploymentDuration_IX", "Sex_IX", \ - "OthersOnLoan_IX", "OwnsProperty_IX", "InstallmentPlans_IX", "Housing_IX", "Job_IX", "Telephone_IX", "ForeignWorker_IX", \ - "LoanDuration", "LoanAmount", "InstallmentPercent", "CurrentResidenceDuration", "LoanDuration", "Age", "ExistingCreditsCount", \ - "Dependents"], outputCol="features") - -''' Train Model with RF classifier ''' -classifier = RandomForestClassifier(featuresCol="features") - -pipeline = Pipeline(stages=[si_CheckingStatus, si_CreditHistory, si_EmploymentDuration, si_ExistingSavings, si_ForeignWorker, si_Housing, si_InstallmentPlans, si_Job, si_LoanPurpose, si_OthersOnLoan,\ - si_OwnsProperty, si_Sex, si_Telephone, si_Label, va_features, classifier, label_converter]) -model = pipeline.fit(train_data) - -predictions = model.transform(test_data) -evaluatorDT = BinaryClassificationEvaluator(rawPredictionCol="prediction") -area_under_curve = evaluatorDT.evaluate(predictions) - -# default evaluation is areaUnderROC -print("areaUnderROC = %g" % area_under_curve) -print(model) -print(predictions) - -# Persistent model, pipeline, and training data -model.write().overwrite().save('model') -train_data.write.option("header", "true").mode("overwrite").csv('train_data') - -evaluation_metrics = { - 'metrics': [ - { - "name": "areaUnderROC", - "value": area_under_curve, - "threshold": 0.7 - } - ] -} - -with open('evaluation.json', 'w') as f: - json.dump(evaluation_metrics, f, indent=2) -f.close() diff --git a/samples/contrib/ibm-samples/watson/README.md b/samples/contrib/ibm-samples/watson/README.md deleted file mode 100644 index ce5cbeb76e0..00000000000 --- a/samples/contrib/ibm-samples/watson/README.md +++ /dev/null @@ -1,73 +0,0 @@ -The `Watson Train and Serve` sample pipeline runs training, storing and deploying a Tensorflow model with MNIST handwriting recognition using [IBM Watson Studio](https://www.ibm.com/cloud/watson-studio) and [IBM Watson Machine Learning](https://www.ibm.com/cloud/machine-learning) service. - -# Requirements - -This sample requires the user to have provisioned a machine learning service on Watson, a cloud object store set up and the service credentials configured in the creds.ini file. - -To provision your own Watson Machine Learning services and cloud object store, following are the required steps. - -* IBM Watson Machine Learning service instance - -To create a machine learning service, go to [IBM Cloud](https://console.bluemix.net), login with IBM account id first. From the `Catalog` page, click on `AI` tab on the left side to go to this [page](https://console.bluemix.net/catalog/?category=ai). Then click on the [`Machine Learning`](https://console.bluemix.net/catalog/services/machine-learning) link and follow the instructions to create the service. - -Once the service is created, from service's `Dashboard`, follow the instruction to generate `service credentials`. Refer to IBM Cloud [documents](https://console.bluemix.net/docs/) for help if needed. Collect the `url`, `apikey`, and `instance_id` info from the service credentials as these will be required to access the service. - -* A cloud object store - -Watson Machine Learning service loads datasets from cloud object store and stores model outputs and other artifacts to cloud object store. Users can use any cloud object store they already preserve. Users can also create a cloud object store with `IBM Cloud Object Storage` service by following this [link](https://console.bluemix.net/catalog/services/cloud-object-storage). - -Collect the `access_key_id` and `secret_access_key` fields from the service credentials for the cloud object store. Create the service credentials first if not existed. To ensure generating HMAC credentials, specify the following in the `Add Inline Configuration Parameters` field: `{"HMAC":true}`. - -Collect the `endpoint` info from the endpoint section in the cloud object store service. - -Create two buckets, one for storing the train datasets and model source codes, and one for storing the model outputs. - -* Set up access credentials - -This pipeline sample reads the credentials from a file hosted in a github repo. Refer to `creds.ini` file and input user's specific credentials. Then upload the file to a github repo the user has access. - -Note: make sure the `cos_endpoint` value in the `creds.ini` file must have at least a scheme and hostname. - -To access the credentials file, the user should provide a github access token and the link to the raw content of the file. Modify the `GITHUB_TOKEN` and `CONFIG_FILE_URL` variables in the `watson_train_serve_pipeline.py` file with the user's access token and link. - -# The datasets - -This pipeline sample uses the [MNIST](http://yann.lecun.com/exdb/mnist) datasets, including [train-images-idx3-ubyte.gz](http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz), [train-labels-idx1-ubyte.gz](http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz), [t10k-images-idx3-ubyte.gz](http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz), and [t10k-labels-idx1-ubyte.gz](http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz). - -If users are using their own cloud object store instances, download these datasets and upload to the input bucket created above on the cloud object store. - -# Upload model code and datasets - -Once the user has the model train source code ready, compress all files into one `zip` format file. - -For example, run following command to compress the sample model train codes - -```command line -pushd source/model-source-code -zip -j tf-model tf-model/convolutional_network.py tf-model/input_data.py -popd -``` - -This should create a `tf-model.zip` file. - -Upload the model train code, together with the train datasets, to the input bucket created above in the cloud object store. - -# Upload model scoring payload - -At the end of the deploy stage of this sample, `tf-mnist-test-payload.json` is used as the scoring payload to test the deployment. Upload this file to the input bucket in the cloud object store. - -# Compling the pipeline template - -First, install the necessary Python package for setting up the access to the Watson Machine Learning service and cloud object store, with following command - -```command line -pip3 install ai_pipeline_params -``` - -Then follow the guide to [building a pipeline](https://www.kubeflow.org/docs/pipelines/build-pipeline/) to install the Kubeflow Pipelines SDK, and run the following command to compile the sample `watson_train_serve_pipeline.py` file into a workflow specification. - -``` -dsl-compile --py watson_train_serve_pipeline.py --output wml-pipeline.tar.gz -``` - -Then, submit `wml-pipeline.tar.gz` to the kubeflow pipeline UI. From there you can create different experiments and runs using the Watson pipeline definition. diff --git a/samples/contrib/ibm-samples/watson/credentials/creds.ini b/samples/contrib/ibm-samples/watson/credentials/creds.ini deleted file mode 100644 index 0b574a6816f..00000000000 --- a/samples/contrib/ibm-samples/watson/credentials/creds.ini +++ /dev/null @@ -1,13 +0,0 @@ -[CREDENTIALS] -wml_url=https://us-south.ml.cloud.ibm.com -wml_instance_id=WMLInstanceId -wml_apikey=WMLAPIKey - -wml_data_source_type=s3 - -cos_endpoint = ObjectStoreEndpointUrl -cos_access_key = ObjectStoreAccessKeyID -cos_secret_key = ObjectStoreSecretAccessKey -cos_input_bucket = data-bucket -cos_output_bucket = result-bucket - diff --git a/samples/contrib/ibm-samples/watson/credentials/github-creds.ini b/samples/contrib/ibm-samples/watson/credentials/github-creds.ini deleted file mode 100644 index f3d97f7dd03..00000000000 --- a/samples/contrib/ibm-samples/watson/credentials/github-creds.ini +++ /dev/null @@ -1,3 +0,0 @@ -[CREDENTIALS] -config_file_url = https://raw.githubusercontent.com/user/repository/branch/creds.ini -github_token = GithubToken diff --git a/samples/contrib/ibm-samples/watson/source/model-source-code/tf-mnist-test-payload.json b/samples/contrib/ibm-samples/watson/source/model-source-code/tf-mnist-test-payload.json deleted file mode 100644 index 597c8edd5bf..00000000000 --- a/samples/contrib/ibm-samples/watson/source/model-source-code/tf-mnist-test-payload.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "modelId": "", - "deploymentId": "", - "payload": { - "values": [ - [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0313725508749485, 0.48235297203063965, 0.6352941393852234, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3294117748737335, 0.40392160415649414, 0.7921569347381592, 0.8823530077934265, 0.3333333432674408, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07058823853731155, 0.1568627506494522, 0.6705882549285889, 0.874509871006012, 0.9411765336990356, 0.9254902601242065, 0.29019609093666077, 0.22745099663734436, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.19215688109397888, 0.250980406999588, 0.5254902243614197, 0.9411765336990356, 1.0, 0.9921569228172302, 0.6313725709915161, 0.22352942824363708, 0.05490196496248245, 0.1411764770746231, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.38431376218795776, 0.8313726186752319, 0.8352941870689392, 0.9529412388801575, 0.9921569228172302, 0.9921569228172302, 0.9921569228172302, 0.7529412508010864, 0.2666666805744171, 0.08235294371843338, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6274510025978088, 0.9019608497619629, 0.9058824181556702, 0.9019608497619629, 0.6235294342041016, 0.3137255012989044, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1411764770746231, 0.3294117748737335, 0.4745098352432251, 0.10588236153125763, 0.10588236153125763, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16862745583057404, 0.4941176772117615, 0.3686274588108063, 0.22352942824363708, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3333333432674408, 0.7921569347381592, 0.1882353127002716, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05490196496248245, 0.9294118285179138, 0.9529412388801575, 0.13725490868091583, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1411764770746231, 0.7058823704719543, 1.0, 0.8235294818878174, 0.7568628191947937, 0.14509804546833038, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.125490203499794, 0.24705883860588074, 0.24705883860588074, 0.7176470756530762, 0.5568627715110779, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5843137502670288, 0.8000000715255737, 0.03529411926865578, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9960784912109375, 0.8823530077934265, 0.05490196496248245, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9960784912109375, 0.3686274588108063, 0.01568627543747425, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8627451658248901, 0.04313725605607033, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08627451211214066, 0.6784313917160034, 0.9960784912109375, 0.24705883860588074, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007843137718737125, 0.4901961088180542, 0.9921569228172302, 0.8784314393997192, 0.125490203499794, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08627451211214066, 0.9921569228172302, 0.7843137979507446, 0.1411764770746231, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01568627543747425, 0.5098039507865906, 0.027450982481241226, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], - [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3607843220233917, 0.9921569228172302, 0.5529412031173706, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07450980693101883, 0.7686275243759155, 0.988235354423523, 0.9450981020927429, 0.0941176563501358, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5764706134796143, 0.8823530077934265, 0.3803921937942505, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22352942824363708, 0.988235354423523, 0.988235354423523, 0.9921569228172302, 0.10588236153125763, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.125490203499794, 0.9921569228172302, 0.988235354423523, 0.7647059559822083, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22352942824363708, 0.988235354423523, 0.988235354423523, 0.6980392336845398, 0.03529411926865578, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5490196347236633, 0.9921569228172302, 0.988235354423523, 0.40000003576278687, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22352942824363708, 0.988235354423523, 0.988235354423523, 0.5490196347236633, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.062745101749897, 0.7960785031318665, 0.9921569228172302, 0.988235354423523, 0.21568629145622253, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4705882668495178, 0.9921569228172302, 0.9921569228172302, 0.5529412031173706, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11372549831867218, 0.9921569228172302, 1.0, 0.8431373238563538, 0.12156863510608673, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7725490927696228, 0.988235354423523, 0.988235354423523, 0.5490196347236633, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11372549831867218, 0.988235354423523, 0.9921569228172302, 0.16470588743686676, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7725490927696228, 0.988235354423523, 0.988235354423523, 0.12156863510608673, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05098039656877518, 0.30980393290519714, 0.988235354423523, 0.9921569228172302, 0.10588236153125763, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22352942824363708, 0.917647123336792, 0.988235354423523, 0.9254902601242065, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22352942824363708, 0.988235354423523, 0.988235354423523, 0.6980392336845398, 0.03529411926865578, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3333333432674408, 0.988235354423523, 0.988235354423523, 0.7411764860153198, 0.5529412031173706, 0.5490196347236633, 0.5490196347236633, 0.5490196347236633, 0.5490196347236633, 0.30980393290519714, 0.1882353127002716, 0.6470588445663452, 0.988235354423523, 0.988235354423523, 0.5490196347236633, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.33725491166114807, 0.9921569228172302, 0.9921569228172302, 0.9921569228172302, 1.0, 0.9921569228172302, 0.9921569228172302, 0.9921569228172302, 0.9921569228172302, 1.0, 0.9921569228172302, 0.9921569228172302, 0.9921569228172302, 0.9921569228172302, 0.5529412031173706, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22352942824363708, 0.9137255549430847, 0.988235354423523, 0.988235354423523, 0.9921569228172302, 0.988235354423523, 0.988235354423523, 0.9490196704864502, 0.8392157554626465, 0.8431373238563538, 0.9529412388801575, 0.988235354423523, 0.988235354423523, 0.988235354423523, 0.5490196347236633, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.29411765933036804, 0.7647059559822083, 0.7647059559822083, 0.2196078598499298, 0.21568629145622253, 0.21568629145622253, 0.19215688109397888, 0.12156863510608673, 0.12156863510608673, 0.19607844948768616, 0.8196079134941101, 0.988235354423523, 0.8627451658248901, 0.12156863510608673, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2862745225429535, 0.917647123336792, 0.988235354423523, 0.4392157196998596, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8823530077934265, 0.988235354423523, 0.988235354423523, 0.4392157196998596, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8862745761871338, 0.9921569228172302, 0.9921569228172302, 0.4392157196998596, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3960784673690796, 0.9764706492424011, 0.988235354423523, 0.9490196704864502, 0.29019609093666077, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4431372880935669, 0.988235354423523, 0.988235354423523, 0.9647059440612793, 0.3450980484485626, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.14901961386203766, 0.917647123336792, 0.988235354423523, 0.6039215922355652, 0.38823533058166504, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.572549045085907, 0.988235354423523, 0.3294117748737335, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] - ] - } -} diff --git a/samples/contrib/ibm-samples/watson/source/model-source-code/tf-model/convolutional_network.py b/samples/contrib/ibm-samples/watson/source/model-source-code/tf-model/convolutional_network.py deleted file mode 100644 index 4b1d78233c5..00000000000 --- a/samples/contrib/ibm-samples/watson/source/model-source-code/tf-model/convolutional_network.py +++ /dev/null @@ -1,216 +0,0 @@ -''' -A Convolutional Network implementation example using TensorFlow library. -This example is using the MNIST database of handwritten digits -(http://yann.lecun.com/exdb/mnist/) - -Author: Aymeric Damien -Project: https://github.com/aymericdamien/TensorFlow-Examples/ -''' - -import tensorflow as tf -import input_data -import sys -import os -import itertools -import re -import time -from random import randint - -train_images_file = "" -train_labels_file = "" -test_images_file = "" -test_labels_file = "" - -# Parameters -learning_rate = 0.001 -training_iters = 200000 -batch_size = 128 -display_step = 10 - -model_path = os.environ["RESULT_DIR"]+"/model" - -# This helps distinguish instances when the training job is restarted. -instance_id = randint(0,9999) - -def main(argv): - - if len(argv) < 12: - sys.exit("Not enough arguments provided.") - - global train_images_file, train_labels_file, test_images_file, test_labels_file, learning_rate, training_iters - - i = 1 - while i <= 12: - arg = str(argv[i]) - if arg == "--trainImagesFile": - train_images_file = str(argv[i+1]) - elif arg == "--trainLabelsFile": - train_labels_file = str(argv[i+1]) - elif arg == "--testImagesFile": - test_images_file = str(argv[i+1]) - elif arg == "--testLabelsFile": - test_labels_file = str(argv[i+1]) - elif arg == "--learningRate": - learning_rate = float(argv[i+1]) - elif arg =="--trainingIters": - training_iters = int(argv[i+1]) - i += 2 - -if __name__ == "__main__": - main(sys.argv) - -# Import MINST data -mnist = input_data.read_data_sets(train_images_file, - train_labels_file, test_images_file, test_labels_file, one_hot=True) - -# Network Parameters -n_input = 784 # MNIST data input (img shape: 28*28) -n_classes = 10 # MNIST total classes (0-9 digits) -dropout = 0.75 # Dropout, probability to keep units - -# tf Graph input -x = tf.placeholder(tf.float32, [None, n_input], name="x_input") -y = tf.placeholder(tf.float32, [None, n_classes]) - -# Create some wrappers for simplicity -def conv2d(x, W, b, strides=1): - # Conv2D wrapper, with bias and relu activation - x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME') - x = tf.nn.bias_add(x, b) - return tf.nn.relu(x) - - -def maxpool2d(x, k=2): - # MaxPool2D wrapper - return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1], - padding='SAME') - - -# Create model -def conv_net(x, weights, biases, dropout): - # Reshape input picture - x = tf.reshape(x, shape=[-1, 28, 28, 1]) - - # Convolution Layer - conv1 = conv2d(x, weights['wc1'], biases['bc1']) - # Max Pooling (down-sampling) - conv1 = maxpool2d(conv1, k=2) - - # Convolution Layer - conv2 = conv2d(conv1, weights['wc2'], biases['bc2']) - # Max Pooling (down-sampling) - conv2 = maxpool2d(conv2, k=2) - - # Fully connected layer - # Reshape conv2 output to fit fully connected layer input - fc1 = tf.reshape(conv2, [-1, weights['wd1'].get_shape().as_list()[0]]) - fc1 = tf.add(tf.matmul(fc1, weights['wd1']), biases['bd1']) - fc1 = tf.nn.relu(fc1) - # Apply Dropout - fc1 = tf.nn.dropout(fc1, dropout) - - # Output, class prediction - out = tf.add(tf.matmul(fc1, weights['out']), biases['out']) - return out - -# Store layers weight & bias -weights = { - # 5x5 conv, 1 input, 32 outputs - 'wc1': tf.Variable(tf.random_normal([5, 5, 1, 32])), - # 5x5 conv, 32 inputs, 64 outputs - 'wc2': tf.Variable(tf.random_normal([5, 5, 32, 64])), - # fully connected, 7*7*64 inputs, 1024 outputs - 'wd1': tf.Variable(tf.random_normal([7*7*64, 1024])), - # 1024 inputs, 10 outputs (class prediction) - 'out': tf.Variable(tf.random_normal([1024, n_classes])) -} - -biases = { - 'bc1': tf.Variable(tf.random_normal([32])), - 'bc2': tf.Variable(tf.random_normal([64])), - 'bd1': tf.Variable(tf.random_normal([1024])), - 'out': tf.Variable(tf.random_normal([n_classes])) -} - -# Construct model -pred = conv_net(x, weights, biases, dropout) - -# Define loss and optimizer -cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y)) -optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost) - -predictor = tf.argmax(pred, 1, name="predictor") - -# Evaluate model -correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1)) -accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32)) - -# Initializing the variables -init = tf.global_variables_initializer() - -# Launch the graph -with tf.Session() as sess: - sess.run(init) - - step = 1 - # Keep training until reach max iterations - while step * batch_size < training_iters: - batch_x, batch_y = mnist.train.next_batch(batch_size) - # Run optimization op (backprop) - sess.run(optimizer, feed_dict={x: batch_x, y: batch_y}) - if step % display_step == 0: - - # Calculate batch loss and accuracy - loss, acc = sess.run([cost, accuracy], feed_dict={x: batch_x, - y: batch_y}) - print("Time " + "{:.4f}".format(time.time()) + \ - ", instance " + str(instance_id) + \ - ", Iter " + str(step * batch_size) + \ - ", Minibatch Loss= " + "{:.6f}".format(loss) + \ - ", Training Accuracy= " + "{:.5f}".format(acc)) - sys.stdout.flush() - step += 1 - print("Optimization Finished!") - - classification_inputs = tf.saved_model.utils.build_tensor_info(x) - classification_outputs_classes = tf.saved_model.utils.build_tensor_info(predictor) - - classification_signature = ( - tf.saved_model.signature_def_utils.build_signature_def( - inputs={ - tf.saved_model.signature_constants.CLASSIFY_INPUTS: - classification_inputs - }, - outputs={ - tf.saved_model.signature_constants.CLASSIFY_OUTPUT_CLASSES: - classification_outputs_classes - }, - method_name=tf.saved_model.signature_constants.CLASSIFY_METHOD_NAME)) - - print("classification_signature content:") - print(classification_signature) - - # Calculate accuracy for 256 mnist test images - print("Testing Accuracy:", \ - sess.run(accuracy, feed_dict={x: mnist.test.images[:256], - y: mnist.test.labels[:256]})) - - - builder = tf.saved_model.builder.SavedModelBuilder(model_path) - legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op') - builder.add_meta_graph_and_variables( - sess, [tf.saved_model.tag_constants.SERVING], - signature_def_map={ - 'predict_images': classification_signature, - }, - legacy_init_op=legacy_init_op) - - save_path = str(builder.save()) - - # save_path = saver.save(sess, model_path) - print("Model saved in file: %s" % save_path) - - os.system("(cd $RESULT_DIR/model;tar cvfz ../saved_model.tar.gz .)") - print(str(os.listdir(os.environ["RESULT_DIR"]))) - print(os.environ["RESULT_DIR"]) - sys.stdout.flush() diff --git a/samples/contrib/ibm-samples/watson/source/model-source-code/tf-model/input_data.py b/samples/contrib/ibm-samples/watson/source/model-source-code/tf-model/input_data.py deleted file mode 100644 index 9c0427473e0..00000000000 --- a/samples/contrib/ibm-samples/watson/source/model-source-code/tf-model/input_data.py +++ /dev/null @@ -1,148 +0,0 @@ - -#!/usr/bin/env python - -"""Functions for downloading and reading MNIST data.""" -import gzip -import os -from six.moves import xrange -from six.moves.urllib.request import urlretrieve -import numpy - -def _read32(bytestream): - dt = numpy.dtype(numpy.uint32).newbyteorder('>') - return numpy.frombuffer(bytestream.read(4), dtype=dt)[0] - - -def extract_images(filename): - """Extract the images into a 4D uint8 numpy array [index, y, x, depth].""" - print('Extracting', filename) - with gzip.open(filename) as bytestream: - magic = _read32(bytestream) - if magic != 2051: - raise ValueError( - 'Invalid magic number %d in MNIST image file: %s' % - (magic, filename)) - num_images = _read32(bytestream) - rows = _read32(bytestream) - cols = _read32(bytestream) - buf = bytestream.read(rows * cols * num_images) - data = numpy.frombuffer(buf, dtype=numpy.uint8) - data = data.reshape(num_images, rows, cols, 1) - return data - - -def dense_to_one_hot(labels_dense, num_classes=10): - """Convert class labels from scalars to one-hot vectors.""" - num_labels = labels_dense.shape[0] - index_offset = numpy.arange(num_labels) * num_classes - labels_one_hot = numpy.zeros((num_labels, num_classes)) - labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1 - return labels_one_hot - - -def extract_labels(filename, one_hot=False): - """Extract the labels into a 1D uint8 numpy array [index].""" - print('Extracting', filename) - with gzip.open(filename) as bytestream: - magic = _read32(bytestream) - if magic != 2049: - raise ValueError( - 'Invalid magic number %d in MNIST label file: %s' % - (magic, filename)) - num_items = _read32(bytestream) - buf = bytestream.read(num_items) - labels = numpy.frombuffer(buf, dtype=numpy.uint8) - if one_hot: - return dense_to_one_hot(labels) - return labels - - -class DataSet(object): - def __init__(self, images, labels, fake_data=False): - if fake_data: - self._num_examples = 10000 - else: - assert images.shape[0] == labels.shape[0], ( - "images.shape: %s labels.shape: %s" % (images.shape, - labels.shape)) - self._num_examples = images.shape[0] - # Convert shape from [num examples, rows, columns, depth] - # to [num examples, rows*columns] (assuming depth == 1) - assert images.shape[3] == 1 - images = images.reshape(images.shape[0], - images.shape[1] * images.shape[2]) - # Convert from [0, 255] -> [0.0, 1.0]. - images = images.astype(numpy.float32) - images = numpy.multiply(images, 1.0 / 255.0) - self._images = images - self._labels = labels - self._epochs_completed = 0 - self._index_in_epoch = 0 - - @property - def images(self): - return self._images - - @property - def labels(self): - return self._labels - - @property - def num_examples(self): - return self._num_examples - - @property - def epochs_completed(self): - return self._epochs_completed - - def next_batch(self, batch_size, fake_data=False): - """Return the next `batch_size` examples from this data set.""" - if fake_data: - fake_image = [1.0 for _ in xrange(784)] - fake_label = 0 - return [fake_image for _ in xrange(batch_size)], [ - fake_label for _ in xrange(batch_size)] - start = self._index_in_epoch - self._index_in_epoch += batch_size - if self._index_in_epoch > self._num_examples: - # Finished epoch - self._epochs_completed += 1 - # Shuffle the data - perm = numpy.arange(self._num_examples) - numpy.random.shuffle(perm) - self._images = self._images[perm] - self._labels = self._labels[perm] - # Start next epoch - start = 0 - self._index_in_epoch = batch_size - assert batch_size <= self._num_examples - end = self._index_in_epoch - return self._images[start:end], self._labels[start:end] - - -def read_data_sets(train_images_file, train_labels_file, test_images_file, test_labels_file, fake_data=False, one_hot=False): - class DataSets(object): - pass - data_sets = DataSets() - if fake_data: - data_sets.train = DataSet([], [], fake_data=True) - data_sets.validation = DataSet([], [], fake_data=True) - data_sets.test = DataSet([], [], fake_data=True) - return data_sets - TRAIN_IMAGES = train_images_file - TRAIN_LABELS = train_labels_file - TEST_IMAGES = test_images_file - TEST_LABELS = test_labels_file - VALIDATION_SIZE = 5000 - train_images = extract_images(TRAIN_IMAGES) - train_labels = extract_labels(TRAIN_LABELS, one_hot=one_hot) - test_images = extract_images(TEST_IMAGES) - test_labels = extract_labels(TEST_LABELS, one_hot=one_hot) - validation_images = train_images[:VALIDATION_SIZE] - validation_labels = train_labels[:VALIDATION_SIZE] - train_images = train_images[VALIDATION_SIZE:] - train_labels = train_labels[VALIDATION_SIZE:] - data_sets.train = DataSet(train_images, train_labels) - data_sets.validation = DataSet(validation_images, validation_labels) - data_sets.test = DataSet(test_images, test_labels) - return data_sets diff --git a/samples/contrib/ibm-samples/watson/watson_train_serve_pipeline.py b/samples/contrib/ibm-samples/watson/watson_train_serve_pipeline.py deleted file mode 100644 index 896bd02f411..00000000000 --- a/samples/contrib/ibm-samples/watson/watson_train_serve_pipeline.py +++ /dev/null @@ -1,103 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# generate default secret name -import os -import kfp -from kfp import components -from kfp import dsl - -secret_name = 'kfp-creds' -configuration_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/commons/config/component.yaml') -train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/train/component.yaml') -store_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/store/component.yaml') -deploy_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/watson/deploy/component.yaml') - -# Helper function for secret mount and image pull policy -def use_ai_pipeline_params(secret_name, secret_volume_mount_path='/app/secrets', image_pull_policy='IfNotPresent'): - def _use_ai_pipeline_params(task): - from kubernetes import client as k8s_client - task = task.add_volume(k8s_client.V1Volume(name=secret_name, # secret_name as volume name - secret=k8s_client.V1SecretVolumeSource(secret_name=secret_name))) - task.container.add_volume_mount(k8s_client.V1VolumeMount(mount_path=secret_volume_mount_path, - name=secret_name)) - task.container.set_image_pull_policy(image_pull_policy) - return task - return _use_ai_pipeline_params - - -# create pipelines - -@dsl.pipeline( - name='KFP on WML training', - description='Kubeflow pipelines running on WML performing tensorflow image recognition.' -) -def kfp_wml_pipeline( - GITHUB_TOKEN='', - CONFIG_FILE_URL='https://raw.githubusercontent.com/user/repository/branch/creds.ini', - train_code='tf-model.zip', - execution_command='\'python3 convolutional_network.py --trainImagesFile ${DATA_DIR}/train-images-idx3-ubyte.gz --trainLabelsFile ${DATA_DIR}/train-labels-idx1-ubyte.gz --testImagesFile ${DATA_DIR}/t10k-images-idx3-ubyte.gz --testLabelsFile ${DATA_DIR}/t10k-labels-idx1-ubyte.gz --learningRate 0.001 --trainingIters 20000\'', - framework='tensorflow', - framework_version='1.15', - runtime = 'python', - runtime_version='3.6', - run_definition = 'wml-tensorflow-definition', - run_name = 'wml-tensorflow-run', - model_name='wml-tensorflow-mnist', - scoring_payload='tf-mnist-test-payload.json', - compute_name='k80', - compute_nodes='1' -): - # op1 - this operation will create the credentials as secrets to be used by other operations - get_configuration = configuration_op( - token=GITHUB_TOKEN, - url=CONFIG_FILE_URL, - name=secret_name - ) - - # op2 - this operation trains the model with the model codes and data saved in the cloud object store - wml_train = train_op( - config=get_configuration.output, - train_code=train_code, - execution_command=execution_command, - framework=framework, - framework_version=framework_version, - runtime=runtime, - runtime_version=runtime_version, - run_definition=run_definition, - run_name=run_name, - compute_name=compute_name, - compute_nodes=compute_nodes - ).apply(use_ai_pipeline_params(secret_name, image_pull_policy='Always')) - - # op3 - this operation stores the model trained above - wml_store = store_op( - wml_train.outputs['run_uid'], - model_name, - framework=framework, - framework_version=framework_version, - runtime_version=runtime_version - ).apply(use_ai_pipeline_params(secret_name, image_pull_policy='Always')) - - # op4 - this operation deploys the model to a web service and run scoring with the payload in the cloud object store - wml_deploy = deploy_op( - wml_store.output, - model_name, - scoring_payload - ).apply(use_ai_pipeline_params(secret_name, image_pull_policy='Always')) - -if __name__ == '__main__': - # compile the pipeline - import kfp.compiler as compiler - pipeline_filename = kfp_wml_pipeline.__name__ + '.zip' - compiler.Compiler().compile(kfp_wml_pipeline, pipeline_filename) diff --git a/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb b/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb deleted file mode 100644 index db02802207e..00000000000 --- a/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb +++ /dev/null @@ -1,953 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Image Captioning with Attention in Tensorflow 2.0" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This notebook modifies the [Image Captioning with Attention Tensorflow 2.0 notebook](https://colab.sandbox.google.com/github/tensorflow/docs/blob/master/site/en/r2/tutorials/text/image_captioning.ipynb)\n", - "to work with kubeflow pipelines. This pipeline creates a model that can caption an image." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Before running notebook:\n", - "Make sure you completed the setup instructions in the README (including creating the base image).\n", - "\n", - "### Install Kubeflow pipelines\n", - "Install the `kfp` package if you haven't already." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!pip3 install kfp --upgrade" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Activate service account credentials\n", - "This allows for using `gsutil` from the notebook to upload the dataset to GCS." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!gcloud auth activate-service-account --key-file=${GOOGLE_APPLICATION_CREDENTIALS}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Download dataset and upload to GCS " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "First, you have to download the [MS COCO dataset](http://cocodataset.org/#download). This sample uses both the 2014 train images and 2014 train/val annotations. The following cells download a small subset (<1000 imgs) of the dataset and the annotations to the GCS bucket specified below with `GCS_DATASET_PATH`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Location to download dataset and put onto GCS (should be associated\n", - "# with Kubeflow project)\n", - "GCS_BUCKET = 'gs://[YOUR-BUCKET-NAME]'\n", - "GCS_DATASET_PATH = GCS_BUCKET + '/ms-coco'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Download images\n", - "Downloads images to `${GCS_DATASET_PATH}/train2014/train2014`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Download images (use -x to ignore ~99% of images)\n", - "!gsutil -m rsync -x \".*0\\.jpg|.*1\\.jpg|.*2\\.jpg|.*3\\.jpg|.*4\\.jpg|.*5\\.jpg|.*6\\.jpg|.*7\\.jpg|.*8\\.jpg|.*09\\.jpg|.*19\\.jpg|.*29\\.jpg|.*39\\.jpg|.*49\\.jpg|.*59\\.jpg|.*69\\.jpg|.*79\\.jpg|.*89\\.jpg\" gs://images.cocodataset.org/train2014 {GCS_DATASET_PATH}/train2014/train2014\n", - "\n", - "# To download the entire dataset uncomment and use the following command instead\n", - "# !gsutil -m rsync gs://images.cocodataset.org/train2014 {GCS_DATASET_PATH}/train2014/train2014" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Download annotations\n", - "For some reason MS COCO blocks using `gsutil` with the annotations (GitHub issue [here](https://github.com/cocodataset/cocoapi/issues/216)). You can work around this by downloading it locally, and then uploading it to GCS." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Download to local, upload to GCS, then delete local download\n", - "!wget http://images.cocodataset.org/annotations/annotations_trainval2014.zip\n", - "!unzip annotations_trainval2014.zip -d annotations_trainval2014\n", - "!gsutil -m cp -r annotations_trainval2014 {GCS_DATASET_PATH}\n", - "!rm -r annotations_trainval2014\n", - "!rm annotations_trainval2014.zip" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Setup project info and imports" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Kubeflow project settings\n", - "PROJECT_NAME = '[YOUR-PROJECT-NAME]' \n", - "PIPELINE_STORAGE_PATH = GCS_BUCKET + '/ms-coco/components' # path to save pipeline component images\n", - "BASE_IMAGE = 'gcr.io/%s/img-cap:latest' % PROJECT_NAME # using image created in README instructions\n", - "\n", - "# Target images for creating components\n", - "PREPROCESS_IMG = 'gcr.io/%s/ms-coco/preprocess:latest' % PROJECT_NAME\n", - "TOKENIZE_IMG = 'gcr.io/%s/ms-coco/tokenize:latest' % PROJECT_NAME\n", - "TRAIN_IMG = 'gcr.io/%s/ms-coco/train:latest' % PROJECT_NAME\n", - "PREDICT_IMG = 'gcr.io/%s/ms-coco/predict:latest' % PROJECT_NAME" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp\n", - "import kfp.dsl as dsl\n", - "from kfp import compiler" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Create pipeline components" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Data preprocessing component\n", - "This component takes `num_examples` images from `dataset_path` and feeds them through the deep CNN inceptionV3 (without the head). The model outputs a tensor of shape `(64 x 2048)` that represents (2048) features obtained after dividing the image into an 8x8 (64) grid. The resulting model outputs are stored in `OUTPUT_DIR`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.python_component(\n", - " name='img_data_preprocessing',\n", - " description='preprocesses images with inceptionV3',\n", - " base_image=BASE_IMAGE\n", - ")\n", - "def preprocess(dataset_path: str, num_examples: int, OUTPUT_DIR: str, \n", - " batch_size: int) -> str:\n", - " import json\n", - " import numpy as np\n", - " import tensorflow as tf\n", - " from tensorflow.python.lib.io import file_io\n", - " from sklearn.utils import shuffle\n", - " \n", - " if OUTPUT_DIR == 'default':\n", - " OUTPUT_DIR = dataset_path + '/preprocess/'\n", - " \n", - " annotation_file = dataset_path + '/annotations_trainval2014/annotations/captions_train2014.json'\n", - " PATH = dataset_path + '/train2014/train2014/'\n", - " files_downloaded = tf.io.gfile.listdir(PATH)\n", - " \n", - " # Read the json file (CHANGE open() TO file_io.FileIO to use GCS)\n", - " with file_io.FileIO(annotation_file, 'r') as f:\n", - " annotations = json.load(f)\n", - "\n", - " # Store captions and image names in vectors\n", - " all_captions = []\n", - " all_img_name_vector = []\n", - " \n", - " print('Determining which images are in storage...')\n", - " for annot in annotations['annotations']:\n", - " caption = ' ' + annot['caption'] + ' '\n", - " image_id = annot['image_id']\n", - " img_name = 'COCO_train2014_' + '%012d.jpg' % (image_id)\n", - " full_coco_image_path = PATH + img_name\n", - " \n", - " if img_name in files_downloaded: # Only have subset\n", - " all_img_name_vector.append(full_coco_image_path)\n", - " all_captions.append(caption)\n", - "\n", - " # Shuffle captions and image_names together\n", - " train_captions, img_name_vector = shuffle(all_captions,\n", - " all_img_name_vector,\n", - " random_state=1)\n", - "\n", - " # Select the first num_examples captions/imgs from the shuffled set\n", - " train_captions = train_captions[:num_examples]\n", - " img_name_vector = img_name_vector[:num_examples]\n", - " \n", - "\n", - " \n", - " # Preprocess the images before feeding into inceptionV3\n", - " def load_image(image_path):\n", - " img = tf.io.read_file(image_path)\n", - " img = tf.image.decode_jpeg(img, channels=3)\n", - " img = tf.image.resize(img, (299, 299))\n", - " img = tf.keras.applications.inception_v3.preprocess_input(img)\n", - " return img, image_path\n", - " \n", - " # Create model for processing images \n", - " image_model = tf.keras.applications.InceptionV3(include_top=False,\n", - " weights='imagenet')\n", - " new_input = image_model.input\n", - " hidden_layer = image_model.layers[-1].output\n", - " image_features_extract_model = tf.keras.Model(new_input, hidden_layer)\n", - " \n", - " # Save extracted features in GCS\n", - " print('Extracting features from images...')\n", - " \n", - " # Get unique images\n", - " encode_train = sorted(set(img_name_vector))\n", - " \n", - " image_dataset = tf.data.Dataset.from_tensor_slices(encode_train)\n", - " image_dataset = image_dataset.map(\n", - " load_image, num_parallel_calls=tf.data.experimental.AUTOTUNE).batch(batch_size)\n", - " \n", - " for img, path in image_dataset:\n", - " batch_features = image_features_extract_model(img)\n", - " batch_features = tf.reshape(batch_features,\n", - " (batch_features.shape[0], -1, batch_features.shape[3]))\n", - "\n", - " for bf, p in zip(batch_features, path):\n", - " path_of_feature = p.numpy().decode(\"utf-8\")\n", - " \n", - " # Save to a different location and as numpy array\n", - " path_of_feature = path_of_feature.replace('.jpg', '.npy')\n", - " path_of_feature = path_of_feature.replace(PATH, OUTPUT_DIR)\n", - " np.save(file_io.FileIO(path_of_feature, 'w'), bf.numpy())\n", - " \n", - " # Create array for locations of preprocessed images\n", - " preprocessed_imgs = [img.replace('.jpg', '.npy') for img in img_name_vector]\n", - " preprocessed_imgs = [img.replace(PATH, OUTPUT_DIR) for img in preprocessed_imgs]\n", - " \n", - " # Save train_captions and preprocessed_imgs to file\n", - " train_cap_path = OUTPUT_DIR + 'train_captions.npy' # array of captions\n", - " preprocessed_imgs_path = OUTPUT_DIR + 'preprocessed_imgs.py'# array of paths to preprocessed images\n", - " \n", - " train_captions = np.array(train_captions)\n", - " np.save(file_io.FileIO(train_cap_path, 'w'), train_captions)\n", - " \n", - " preprocessed_imgs = np.array(preprocessed_imgs)\n", - " np.save(file_io.FileIO(preprocessed_imgs_path, 'w'), preprocessed_imgs)\n", - " \n", - " return (train_cap_path, preprocessed_imgs_path)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "preprocessing_img_op = compiler.build_python_component(\n", - " component_func=preprocess,\n", - " staging_gcs_path=PIPELINE_STORAGE_PATH,\n", - " base_image=BASE_IMAGE,\n", - " dependency=[kfp.compiler.VersionedDependency(name='scikit-learn', version='0.21.2')],\n", - " target_image=PREPROCESS_IMG)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Tokenizing component" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This component takes the training captions from the previous step and tokenizes them to convert them into numerical values so that they can be fed into the model as input. It outputs the tokenized captions in `OUTPUT_DIR`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.python_component(\n", - " name='tokenize_captions',\n", - " description='Tokenize captions to create training data',\n", - " base_image=BASE_IMAGE\n", - ")\n", - "def tokenize_captions(dataset_path: str, preprocess_output: str, OUTPUT_DIR: str,\n", - " top_k: int) -> str:\n", - " import pickle\n", - " import tensorflow as tf\n", - " import numpy as np\n", - " from tensorflow.python.lib.io import file_io\n", - " from io import BytesIO\n", - " from ast import literal_eval as make_tuple\n", - " \n", - " # Convert output from string to tuple and unpack\n", - " preprocess_output = make_tuple(preprocess_output)\n", - " train_caption_path = preprocess_output[0]\n", - " \n", - " if OUTPUT_DIR == 'default':\n", - " OUTPUT_DIR = dataset_path + '/tokenize/'\n", - " \n", - " tokenizer = tf.keras.preprocessing.text.Tokenizer(num_words=top_k,\n", - " oov_token=\"\",\n", - " filters='!\"#$%&()*+.,-/:;=?@[\\]^_`{|}~ ')\n", - " f = BytesIO(file_io.read_file_to_string(train_caption_path, \n", - " binary_mode=True))\n", - " train_captions = np.load(f)\n", - " \n", - " # Tokenize captions\n", - " tokenizer.fit_on_texts(train_captions)\n", - " train_seqs = tokenizer.texts_to_sequences(train_captions)\n", - " tokenizer.word_index[''] = 0\n", - " tokenizer.index_word[0] = ''\n", - " \n", - " cap_vector = tf.keras.preprocessing.sequence.pad_sequences(train_seqs, padding='post')\n", - " \n", - " # Find the maximum length of any caption in our dataset\n", - " def calc_max_length(tensor):\n", - " return max(len(t) for t in tensor)\n", - " \n", - " max_length = calc_max_length(train_seqs)\n", - " \n", - " # Save tokenizer\n", - " tokenizer_file_path = OUTPUT_DIR + 'tokenizer.pickle'\n", - " with file_io.FileIO(tokenizer_file_path, 'wb') as output:\n", - " pickle.dump(tokenizer, output, protocol=pickle.HIGHEST_PROTOCOL)\n", - " \n", - " # Save train_seqs\n", - " cap_vector_file_path = OUTPUT_DIR + 'cap_vector.npy'\n", - " np.save(file_io.FileIO(cap_vector_file_path, 'w'), cap_vector)\n", - " \n", - " return str(max_length), tokenizer_file_path, cap_vector_file_path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "tokenize_captions_op = compiler.build_python_component(\n", - " component_func=tokenize_captions,\n", - " staging_gcs_path=PIPELINE_STORAGE_PATH,\n", - " base_image=BASE_IMAGE,\n", - " target_image=TOKENIZE_IMG)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Component for training model (and saving it)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This component trains the model by creating a `tf.data.Dataset` from the captions and preprocessed images. The trained model is saved in `train_output_dir/checkpoints/`. The training loss is plotted in tensorboard. There are various parameters of the model(s) that can be tuned, but it uses the values from the original notebook by default. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.python_component(\n", - " name='model_training',\n", - " description='Trains image captioning model',\n", - " base_image=BASE_IMAGE\n", - ")\n", - "def train_model(dataset_path: str, preprocess_output: str, \n", - " tokenizing_output: str, train_output_dir: str, valid_output_dir: str, \n", - " batch_size: int, embedding_dim: int, units: int, EPOCHS: int)-> str:\n", - " import json\n", - " import time\n", - " import pickle\n", - " import models\n", - " import numpy as np\n", - " import tensorflow as tf\n", - " from io import BytesIO\n", - " from datetime import datetime\n", - " from sklearn.model_selection import train_test_split\n", - " from tensorflow.python.lib.io import file_io\n", - " from ast import literal_eval as make_tuple\n", - " \n", - " # Convert output from string to tuple and unpack\n", - " preprocess_output = make_tuple(preprocess_output)\n", - " tokenizing_output = make_tuple(tokenizing_output)\n", - " \n", - " # Unpack tuples\n", - " preprocessed_imgs_path = preprocess_output[1]\n", - " tokenizer_path = tokenizing_output[1]\n", - " cap_vector_file_path = tokenizing_output[2]\n", - " \n", - " if valid_output_dir == 'default':\n", - " valid_output_dir = dataset_path + '/valid/'\n", - " \n", - " if train_output_dir == 'default':\n", - " train_output_dir = dataset_path + '/train/'\n", - " \n", - " # load img_name_vector\n", - " f = BytesIO(file_io.read_file_to_string(preprocessed_imgs_path, binary_mode=True))\n", - " img_name_vector = np.load(f)\n", - " \n", - " # Load cap_vector\n", - " f = BytesIO(file_io.read_file_to_string(cap_vector_file_path, binary_mode=True))\n", - " cap_vector = np.load(f)\n", - " \n", - " # Load tokenizer\n", - " with file_io.FileIO(tokenizer_path, 'rb') as src:\n", - " tokenizer = pickle.load(src)\n", - " \n", - " # Split data into training and testing\n", - " img_name_train, img_name_val, cap_train, cap_val = train_test_split(\n", - " img_name_vector,\n", - " cap_vector,\n", - " test_size=0.2,\n", - " random_state=0)\n", - " \n", - " # Create tf.data dataset for training\n", - " BUFFER_SIZE = 1000 # common size used for shuffling dataset\n", - " vocab_size = len(tokenizer.word_index) + 1\n", - " num_steps = len(img_name_train) // batch_size\n", - " \n", - " # Shape of the vector extracted from InceptionV3 is (64, 2048)\n", - " features_shape = 2048\n", - " \n", - " # Load the numpy files\n", - " def map_func(img_name, cap):\n", - " f = BytesIO(file_io.read_file_to_string(img_name.decode('utf-8'), binary_mode=True))\n", - " img_tensor = np.load(f)\n", - " return img_tensor, cap\n", - " \n", - " dataset = tf.data.Dataset.from_tensor_slices((img_name_train, cap_train))\n", - "\n", - " # Use map to load the numpy files in parallel\n", - " dataset = dataset.map(lambda item1, item2: tf.numpy_function(\n", - " map_func, [item1, item2], [tf.float32, tf.int32]),\n", - " num_parallel_calls=tf.data.experimental.AUTOTUNE)\n", - "\n", - " # Shuffle and batch\n", - " dataset = dataset.shuffle(BUFFER_SIZE).batch(batch_size)\n", - " dataset = dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)\n", - " \n", - " # get models from models.py\n", - " encoder = models.CNN_Encoder(embedding_dim)\n", - " decoder = models.RNN_Decoder(embedding_dim, units, vocab_size)\n", - " \n", - " optimizer = tf.keras.optimizers.Adam()\n", - " loss_object = tf.keras.losses.SparseCategoricalCrossentropy(\n", - " from_logits=True, reduction='none')\n", - " \n", - " # Create loss function\n", - " def loss_function(real, pred):\n", - " mask = tf.math.logical_not(tf.math.equal(real, 0))\n", - " loss_ = loss_object(real, pred)\n", - "\n", - " mask = tf.cast(mask, dtype=loss_.dtype)\n", - " loss_ *= mask\n", - "\n", - " return tf.reduce_mean(loss_)\n", - " \n", - " # Create check point for training model\n", - " ckpt = tf.train.Checkpoint(encoder=encoder,\n", - " decoder=decoder,\n", - " optimizer = optimizer)\n", - " ckpt_manager = tf.train.CheckpointManager(ckpt, train_output_dir + 'checkpoints/', max_to_keep=5)\n", - " start_epoch = 0\n", - " if ckpt_manager.latest_checkpoint:\n", - " start_epoch = int(ckpt_manager.latest_checkpoint.split('-')[-1])\n", - " \n", - " # Create training step\n", - " loss_plot = []\n", - " @tf.function\n", - " def train_step(img_tensor, target):\n", - " loss = 0\n", - "\n", - " # initializing the hidden state for each batch\n", - " # because the captions are not related from image to image\n", - " hidden = decoder.reset_state(batch_size=target.shape[0])\n", - "\n", - " dec_input = tf.expand_dims([tokenizer.word_index['']] * batch_size, 1)\n", - "\n", - " with tf.GradientTape() as tape:\n", - " features = encoder(img_tensor)\n", - "\n", - " for i in range(1, target.shape[1]):\n", - " # passing the features through the decoder\n", - " predictions, hidden, _ = decoder(dec_input, features, hidden)\n", - "\n", - " loss += loss_function(target[:, i], predictions)\n", - "\n", - " # using teacher forcing\n", - " dec_input = tf.expand_dims(target[:, i], 1)\n", - "\n", - " total_loss = (loss / int(target.shape[1]))\n", - "\n", - " trainable_variables = encoder.trainable_variables + decoder.trainable_variables\n", - "\n", - " gradients = tape.gradient(loss, trainable_variables)\n", - "\n", - " optimizer.apply_gradients(zip(gradients, trainable_variables))\n", - "\n", - " return loss, total_loss\n", - " \n", - " # Create summary writers and loss for plotting loss in tensorboard\n", - " tensorboard_dir = train_output_dir + 'logs/' + datetime.now().strftime(\"%Y%m%d-%H%M%S\")\n", - " train_summary_writer = tf.summary.create_file_writer(tensorboard_dir)\n", - " train_loss = tf.keras.metrics.Mean('train_loss', dtype=tf.float32)\n", - " \n", - " # Train model\n", - " path_to_most_recent_ckpt = None\n", - " for epoch in range(start_epoch, EPOCHS):\n", - " start = time.time()\n", - " total_loss = 0\n", - "\n", - " for (batch, (img_tensor, target)) in enumerate(dataset):\n", - " batch_loss, t_loss = train_step(img_tensor, target)\n", - " total_loss += t_loss\n", - " train_loss(t_loss)\n", - " if batch % 100 == 0:\n", - " print ('Epoch {} Batch {} Loss {:.4f}'.format(\n", - " epoch + 1, batch, batch_loss.numpy() / int(target.shape[1])))\n", - " \n", - " \n", - " \n", - " # Storing the epoch end loss value to plot in tensorboard\n", - " with train_summary_writer.as_default():\n", - " tf.summary.scalar('loss per epoch', train_loss.result(), step=epoch)\n", - " \n", - " train_loss.reset_states()\n", - " \n", - " if epoch % 5 == 0:\n", - " path_to_most_recent_ckpt = ckpt_manager.save()\n", - "\n", - " print ('Epoch {} Loss {:.6f}'.format(epoch + 1,\n", - " total_loss/num_steps))\n", - " print ('Time taken for 1 epoch {} sec\\n'.format(time.time() - start))\n", - " \n", - " # Add plot of loss in tensorboard\n", - " metadata ={\n", - " 'outputs': [{\n", - " 'type': 'tensorboard',\n", - " 'source': tensorboard_dir,\n", - " }]\n", - " }\n", - " with open('/mlpipeline-ui-metadata.json', 'w') as f:\n", - " json.dump(metadata, f)\n", - " \n", - " # Save validation data to use for predictions\n", - " val_cap_path = valid_output_dir + 'captions.npy'\n", - " np.save(file_io.FileIO(val_cap_path, 'w'), cap_val)\n", - " \n", - " val_img_path = valid_output_dir + 'images.npy'\n", - " np.save(file_io.FileIO(val_img_path, 'w'), img_name_val)\n", - " \n", - " return path_to_most_recent_ckpt, val_cap_path, val_img_path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "model_train_op = compiler.build_python_component(\n", - " component_func=train_model,\n", - " staging_gcs_path=PIPELINE_STORAGE_PATH,\n", - " base_image=BASE_IMAGE,\n", - " dependency=[kfp.compiler.VersionedDependency(name='scikit-learn', version='0.21.2')],\n", - " target_image=TRAIN_IMG)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Component for model prediction" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This component uses the model to predict on a new image. It prints the predicted and real caption in the logs and outputs the first 10 attention images with captions in tensorboard. (Currently Kubeflow [only supports up to 10 outputs](https://github.com/kubeflow/pipelines/issues/1641) Tensorboard)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.python_component(\n", - " name='model_predictions',\n", - " description='Predicts on images in validation set',\n", - " base_image=BASE_IMAGE\n", - ")\n", - "def predict(dataset_path: str, tokenizing_output: str, \n", - " model_train_output: str, preprocess_output_dir: str, \n", - " valid_output_dir: str, embedding_dim: int, units: int):\n", - " import pickle\n", - " import json\n", - " import models\n", - " import matplotlib.pyplot as plt\n", - " import numpy as np\n", - " import tensorflow as tf\n", - " from datetime import datetime\n", - " from io import BytesIO\n", - " from tensorflow.python.lib.io import file_io\n", - " from ast import literal_eval as make_tuple\n", - " \n", - " tokenizing_output = make_tuple(tokenizing_output)\n", - " model_train_output = make_tuple(model_train_output)\n", - " \n", - " # Unpack tuples\n", - " max_length = int(tokenizing_output[0])\n", - " tokenizer_path = tokenizing_output[1]\n", - " model_path = model_train_output[0]\n", - " val_cap_path = model_train_output[1]\n", - " val_img_path = model_train_output[2]\n", - " \n", - " if preprocess_output_dir == 'default':\n", - " preprocess_output_dir = dataset_path + '/preprocess/'\n", - " \n", - " if valid_output_dir == 'default':\n", - " valid_output_dir = dataset_path + '/valid/'\n", - " \n", - " tensorboard_dir = valid_output_dir + 'logs' + datetime.now().strftime(\"%Y%m%d-%H%M%S\")\n", - " summary_writer = tf.summary.create_file_writer(tensorboard_dir)\n", - "\n", - " # Load tokenizer, model, test_captions, and test_imgs\n", - " \n", - " # Load tokenizer\n", - " with file_io.FileIO(tokenizer_path, 'rb') as src:\n", - " tokenizer = pickle.load(src)\n", - " \n", - " vocab_size = len(tokenizer.word_index) + 1\n", - " \n", - " # Shape of the vector extracted from InceptionV3 is (64, 2048)\n", - " attention_features_shape = 64\n", - " features_shape = 2048\n", - " \n", - " encoder = models.CNN_Encoder(embedding_dim)\n", - " decoder = models.RNN_Decoder(embedding_dim, units, vocab_size)\n", - " \n", - " # Load model from checkpoint (encoder, decoder)\n", - " optimizer = tf.keras.optimizers.Adam()\n", - " ckpt = tf.train.Checkpoint(encoder=encoder,\n", - " decoder=decoder, optimizer=optimizer)\n", - " ckpt.restore(model_path).expect_partial()\n", - " \n", - " # Load test captions\n", - " f = BytesIO(file_io.read_file_to_string(val_cap_path, \n", - " binary_mode=True))\n", - " cap_val = np.load(f)\n", - " \n", - " # load test images\n", - " f = BytesIO(file_io.read_file_to_string(val_img_path, \n", - " binary_mode=True))\n", - " img_name_val = np.load(f)\n", - " \n", - " # To get original image locations, replace .npy extension with .jpg and \n", - " # replace preprocessed path with path original images\n", - " PATH = dataset_path + '/train2014/train2014/'\n", - " img_name_val = [img.replace('.npy', '.jpg') for img in img_name_val]\n", - " img_name_val = [img.replace(preprocess_output_dir, PATH) for img in img_name_val]\n", - " \n", - " image_model = tf.keras.applications.InceptionV3(include_top=False,\n", - " weights='imagenet')\n", - " new_input = image_model.input\n", - " hidden_layer = image_model.layers[-1].output\n", - "\n", - " image_features_extract_model = tf.keras.Model(new_input, hidden_layer)\n", - " \n", - " # Preprocess the images using InceptionV3\n", - " def load_image(image_path):\n", - " img = tf.io.read_file(image_path)\n", - " img = tf.image.decode_jpeg(img, channels=3)\n", - " img = tf.image.resize(img, (299, 299))\n", - " img = tf.keras.applications.inception_v3.preprocess_input(img)\n", - " return img, image_path\n", - " \n", - " # Run predictions\n", - " def evaluate(image):\n", - " attention_plot = np.zeros((max_length, attention_features_shape))\n", - "\n", - " hidden = decoder.reset_state(batch_size=1)\n", - "\n", - " temp_input = tf.expand_dims(load_image(image)[0], 0)\n", - " img_tensor_val = image_features_extract_model(temp_input)\n", - " img_tensor_val = tf.reshape(img_tensor_val, (img_tensor_val.shape[0], -1, img_tensor_val.shape[3]))\n", - "\n", - " features = encoder(img_tensor_val)\n", - "\n", - " dec_input = tf.expand_dims([tokenizer.word_index['']], 0)\n", - " result = []\n", - "\n", - " for i in range(max_length):\n", - " predictions, hidden, attention_weights = decoder(dec_input, features, hidden)\n", - "\n", - " attention_plot[i] = tf.reshape(attention_weights, (-1, )).numpy()\n", - "\n", - " predicted_id = tf.argmax(predictions[0]).numpy()\n", - " result.append(tokenizer.index_word[predicted_id])\n", - "\n", - " if tokenizer.index_word[predicted_id] == '':\n", - " return result, attention_plot\n", - "\n", - " dec_input = tf.expand_dims([predicted_id], 0)\n", - "\n", - " attention_plot = attention_plot[:len(result), :]\n", - " return result, attention_plot\n", - " \n", - " # Modified to plot images on tensorboard\n", - " def plot_attention(image, result, attention_plot):\n", - " img = tf.io.read_file(image)\n", - " img = tf.image.decode_jpeg(img, channels=3)\n", - " temp_image = np.array(img.numpy())\n", - " \n", - " len_result = len(result)\n", - " for l in range(min(len_result, 10)): # Tensorboard only supports 10 imgs\n", - " temp_att = np.resize(attention_plot[l], (8, 8))\n", - " plt.title(result[l])\n", - " img = plt.imshow(temp_image)\n", - " plt.imshow(temp_att, cmap='gray', alpha=0.6, extent=img.get_extent())\n", - " \n", - " # Save plt to image to access in tensorboard\n", - " buf = BytesIO()\n", - " plt.savefig(buf, format='png')\n", - " buf.seek(0)\n", - " \n", - " final_im = tf.image.decode_png(buf.getvalue(), channels=4)\n", - " final_im = tf.expand_dims(final_im, 0)\n", - " with summary_writer.as_default():\n", - " tf.summary.image(\"attention\", final_im, step=l)\n", - " \n", - " # Select a random image to caption from validation set\n", - " rid = np.random.randint(0, len(img_name_val))\n", - " image = img_name_val[rid]\n", - " real_caption = ' '.join([tokenizer.index_word[i] for i in cap_val[rid] if i not in [0]])\n", - " result, attention_plot = evaluate(image)\n", - " print ('Image:', image)\n", - " print ('Real Caption:', real_caption)\n", - " print ('Prediction Caption:', ' '.join(result))\n", - " plot_attention(image, result, attention_plot)\n", - " \n", - " # Plot attention images on tensorboard\n", - " metadata = {\n", - " 'outputs': [{\n", - " 'type': 'tensorboard',\n", - " 'source': tensorboard_dir,\n", - " }]\n", - " }\n", - " with open('/mlpipeline-ui-metadata.json', 'w') as f:\n", - " json.dump(metadata, f)\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "predict_op = compiler.build_python_component(\n", - " component_func=predict,\n", - " staging_gcs_path=PIPELINE_STORAGE_PATH,\n", - " base_image=BASE_IMAGE,\n", - " dependency=[kfp.compiler.VersionedDependency(name='matplotlib', version='3.1.0')],\n", - " target_image=PREDICT_IMG)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Create and run pipeline\n", - "### Create pipeline\n", - "The pipeline parameters are specified below in the `caption pipeline` function signature. Using the value `'default'` for the output directories saves them in a subdirectory of `GCS_DATASET_PATH`.\n", - "\n", - "### Requirements\n", - "* The pipeline can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - "* Read/write permissions for the storage buckets." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.pipeline(\n", - " name='Image Captioning Pipeline',\n", - " description='A pipeline that trains a model to caption images'\n", - ")\n", - "def caption_pipeline(\n", - " dataset_path=GCS_DATASET_PATH,\n", - " num_examples=30000,\n", - " epochs=20,\n", - " training_batch_size=64,\n", - " hidden_state_size=512,\n", - " vocab_size=5000,\n", - " embedding_dim=256,\n", - " preprocessing_batch_size=16,\n", - " preprocessing_output_dir='default',\n", - " tokenizing_output_dir='default',\n", - " training_output_dir='default',\n", - " validation_output_dir='default',\n", - " ): \n", - " \n", - " preprocessing_img_task = preprocessing_img_op(\n", - " dataset_path, \n", - " output_dir=preprocessing_output_dir,\n", - " batch_size=preprocessing_batch_size, \n", - " num_examples=num_examples)\n", - " \n", - " tokenize_captions_task = tokenize_captions_op(\n", - " dataset_path, \n", - " preprocessing_img_task.output, \n", - " output_dir=tokenizing_output_dir, \n", - " top_k=vocab_size)\n", - " \n", - " model_train_task = model_train_op(\n", - " dataset_path, \n", - " preprocessing_img_task.output,\n", - " tokenize_captions_task.output,\n", - " train_output_dir=training_output_dir, \n", - " valid_output_dir=validation_output_dir,\n", - " batch_size=training_batch_size, \n", - " embedding_dim=embedding_dim, \n", - " units=hidden_state_size, \n", - " epochs=epochs)\n", - " \n", - " predict_task = predict_op(\n", - " dataset_path,\n", - " tokenize_captions_task.output, \n", - " model_train_task.output,\n", - " preprocess_output_dir=preprocessing_output_dir,\n", - " valid_output_dir=validation_output_dir,\n", - " embedding_dim=embedding_dim,\n", - " units=hidden_state_size)\n", - "\n", - " # The pipeline should be able to authenticate to GCP.\n", - " # Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", - " #\n", - " # For example, you may uncomment the following lines to use GSA keys.\n", - " # from kfp.gcp import use_gcp_secret\n", - " # kfp.dsl.get_pipeline_conf().add_op_transformer(use_gcp_secret('user-gcp-sa'))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Run pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Test run to make sure all parts of the pipeline are working properly\n", - "arguments = {\n", - " 'dataset_path': GCS_DATASET_PATH, \n", - " 'num_examples': 100, # Small test to make sure pipeline functions properly\n", - " 'training_batch_size': 16, # has to be smaller since only training on 80/100 examples \n", - "}\n", - "\n", - "kfp.Client().create_run_from_pipeline_func(pipeline, arguments=arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Model checkpoints are saved at `training_output_dir`, which is `[GCS_DATASET_PATH]/train/checkpoints/` by default." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.3" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} \ No newline at end of file diff --git a/samples/contrib/image-captioning-gcp/README.md b/samples/contrib/image-captioning-gcp/README.md deleted file mode 100644 index 66afd87e032..00000000000 --- a/samples/contrib/image-captioning-gcp/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# Image Captioning TF 2.0 - -## Overview -This notebook is an example of how to convert an existing Tensorflow notebook into a Kubeflow pipeline using jupyter notebook. Specifically, this notebook takes an example tensorflow notebook, [image captioning with attention](https://colab.sandbox.google.com/github/tensorflow/docs/blob/master/site/en/r2/tutorials/text/image_captioning.ipynb), and creates a kubeflow pipeline. This pipeline produces a model that can generate captions for images. - -### Example generated captions -The following example captions were created when using `num_examples = 30000`. - -![bus-output](https://user-images.githubusercontent.com/17008638/61419442-17989a00-a8b3-11e9-9ab3-a5a304ff96d0.PNG) - -![sandwich-output](https://user-images.githubusercontent.com/17008638/61419487-44e54800-a8b3-11e9-9b7f-68ccc970c10d.PNG) - -## Setup - -### Setup notebook server -This pipeline requires you to [setup a notebook server](https://www.kubeflow.org/docs/notebooks/setup/) in the Kubeflow UI. After you are setup, upload this notebook and then run it in the notebook server. - -### Create a GCS bucket -This pipeline requires a GCS bucket. If you haven't already, [create a GCS bucket](https://cloud.google.com/storage/docs/creating-buckets) to run the notebook. Make sure to create the storage bucket in the same project that you are running Kubeflow on to have the proper permissions by default. You can also create a GCS bucket by running `gsutil mb -p gs://`. - -### Upload the notebook in the Kubeflow UI -In order to run this pipeline, make sure to upload the notebook to your notebook server in the Kubeflow UI. You can clone this repo in the Jupyter notebook server by connecting to the notebook server and then selecting New > Terminal. In the terminal type `git clone https://github.com/kubeflow/pipelines.git`. - -### Create base image -In order to run this pipeline, you need to first build the docker base image and upload it to a container registry. This can be done locally with the following commands: - -``` -git clone https://github.com/kubeflow/pipelines.git -cd pipelines/samples/contrib/image-captioning-gcp/src -docker build -t img-cap . -docker tag img-cap gcr.io/[PROJECT-ID]/img-cap:latest -docker push gcr.io/[PROJECT ID]/img-cap:latest -``` - -### Download dataset -To download the dataset, run the first few cells in the notebook. - -## Artifacts -Below are some screenshots of the final pipeline and the model outputs. - -![pipeline-screenshot](https://user-images.githubusercontent.com/17008638/61160416-41694f80-a4b4-11e9-9317-5a92f625c173.png) - -![attention-screenshot](https://user-images.githubusercontent.com/17008638/61160441-59d96a00-a4b4-11e9-809b-f3df7cbe0dae.PNG) - -## Steps taken to convert the original TF 2.0 notebook -1. Componentize notebook to run in different steps, and not linearly. -2. Store the dataset in GCS to make it easily accessible in Kubeflow. -3. Use `file_io.FileIO()` instead of `open()` when loading files from GCS. -4. To pass multiple outputs downstream, pass them as a tuple of strings. Kubeflow converts this tuple to a string when you pass it downstream. So, you have to convert it from a string back to a tuple in the downstream component to get the multiple outputs. -5. To pass many numpy arrays to downstream components, first save them on GCS. Put the paths to the saved numpy files in a new array, and then save that array on GCS as well. Pass the path to this array to the downstream components. -6. Use `tf.io.read_file` and then `tf.image.decode_jpeg` instead of `PIL.Image` to be compatible with GCS. diff --git a/samples/contrib/image-captioning-gcp/src/Dockerfile b/samples/contrib/image-captioning-gcp/src/Dockerfile deleted file mode 100644 index 58ae295df92..00000000000 --- a/samples/contrib/image-captioning-gcp/src/Dockerfile +++ /dev/null @@ -1,2 +0,0 @@ -FROM tensorflow/tensorflow:2.0.0b0-py3 -ADD models.py /ml/ diff --git a/samples/contrib/image-captioning-gcp/src/models.py b/samples/contrib/image-captioning-gcp/src/models.py deleted file mode 100644 index 9941da1b6a6..00000000000 --- a/samples/contrib/image-captioning-gcp/src/models.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright 2019 The Kubeflow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This file contains the models used in the image captioning pipeline""" - -import tensorflow as tf - - -class BahdanauAttention(tf.keras.Model): - def __init__(self, units): - super(BahdanauAttention, self).__init__() - self.W1 = tf.keras.layers.Dense(units) - self.W2 = tf.keras.layers.Dense(units) - self.V = tf.keras.layers.Dense(1) - - def call(self, features, hidden): - # features(CNN_encoder output) shape == (batch_size, 64, embedding_dim) - - # hidden shape == (batch_size, hidden_size) - # hidden_with_time_axis shape == (batch_size, 1, hidden_size) - hidden_with_time_axis = tf.expand_dims(hidden, 1) - - # score shape == (batch_size, 64, hidden_size) - score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis)) - - # attention_weights shape == (batch_size, 64, 1) - # you get 1 at the last axis because you are applying score to self.V - attention_weights = tf.nn.softmax(self.V(score), axis=1) - - # context_vector shape after sum == (batch_size, hidden_size) - context_vector = attention_weights * features - context_vector = tf.reduce_sum(context_vector, axis=1) - - return context_vector, attention_weights - -# CNN Encoder model -class CNN_Encoder(tf.keras.Model): - # Since you have already extracted the features and dumped it using pickle - # This encoder passes those features through a Fully connected layer - def __init__(self, embedding_dim): - super(CNN_Encoder, self).__init__() - # shape after fc == (batch_size, 64, embedding_dim) - self.fc = tf.keras.layers.Dense(embedding_dim) - - def call(self, x): - x = self.fc(x) - x = tf.nn.relu(x) - return x - -# RNN Decoder model -class RNN_Decoder(tf.keras.Model): - def __init__(self, embedding_dim, units, vocab_size): - super(RNN_Decoder, self).__init__() - self.units = units - - self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim) - self.gru = tf.keras.layers.GRU(self.units, - return_sequences=True, - return_state=True, - recurrent_initializer='glorot_uniform') - self.fc1 = tf.keras.layers.Dense(self.units) - self.fc2 = tf.keras.layers.Dense(vocab_size) - - self.attention = BahdanauAttention(self.units) - - def call(self, x, features, hidden): - # defining attention as a separate model - context_vector, attention_weights = self.attention(features, hidden) - - # x shape after passing through embedding == (batch_size, 1, embedding_dim) - x = self.embedding(x) - - # x shape after concatenation == (batch_size, 1, embedding_dim + hidden_size) - x = tf.concat([tf.expand_dims(context_vector, 1), x], axis=-1) - - # passing the concatenated vector to the GRU - output, state = self.gru(x) - - # shape == (batch_size, max_length, hidden_size) - x = self.fc1(output) - - # x shape == (batch_size * max_length, hidden_size) - x = tf.reshape(x, (-1, x.shape[2])) - - # output shape == (batch_size * max_length, vocab) - x = self.fc2(x) - - return x, state, attention_weights - - def reset_state(self, batch_size): - return tf.zeros((batch_size, self.units)) diff --git a/samples/contrib/intel-oneapi-samples/README.md b/samples/contrib/intel-oneapi-samples/README.md deleted file mode 100644 index 451a1f2787c..00000000000 --- a/samples/contrib/intel-oneapi-samples/README.md +++ /dev/null @@ -1,164 +0,0 @@ -

            - Intel Logo -

            - -# Intel® Optimized XGBoost daal4py Kubeflow Pipeline - -This example demonstrates how to optimize an XGBoost Kubeflow Pipeline using a sample -dataset to predict the probability of loan default. -The reference solution enables the use of the -[Intel® Optimization for XGBoost*](https://www.intel.com/content/www/us/en/developer/tools/oneapi/optimization-for-xgboost.html), -[Intel® oneAPI Data Analytics Library (Intel® oneDAL)](https://www.intel.com/content/www/us/en/developer/tools/oneapi/onedal.html), -and [Intel® Extension for Scikit-Learn*](https://www.intel.com/content/www/us/en/developer/tools/oneapi/scikit-learn.html) -to accelerate an end-to-end training and inference XGBoost pipeline. - -## Table of Contents -- [System Requirements](#system-requirements) -- [Overview](#pipeline-overview) -- [Pipeline Optimizations](#pipeline-optimizations) -- [Pipeline Parameters](#pipeline-parameters) -- [Pipeline Results](#pipeline-results) - -## System Requirements - -- Before running the code for the pipeline, please ensure you have downloaded and installed -[Kubeflow Pipelines SDK](https://v1-5-branch.kubeflow.org/docs/components/pipelines/sdk-v2/) -v2.0.1 or above. -- To attain the most performance benefits from the Intel software optimizations, deploy the -pipeline on a 3rd or 4th Generation [Intel® Xeon® Processor](https://www.intel.com/content/www/us/en/products/details/processors/xeon.html). - -## Pipeline Overview - -This pipeline is derived from the -[Loan Default Risk Prediction AI Reference Kit](https://github.com/oneapi-src/loan-default-risk-prediction). -The code has been enhanced through refactoring to achieve better modularity and suitability for -Kubeflow Pipelines. The credit risk data set used in the pipeline is obtained from -[Kaggle](https://www.kaggle.com/datasets/laotse/credit-risk-dataset)* -and synthetically augmented for testing and benchmarking purposes. Below is a graph of the full -XGBoost daal4py Kubeflow Pipeline. - -

            - Intel XGBoost daal4py Pipeline -

            - -The pipeline consists of the following seven components: -- **Load data**: This component loads the dataset (`credit_risk_dataset.csv`) from the URL specified -in the pipeline run parameters and performs synthetic data augmentation. -- **Create training and test sets**: This component splits the data into training and test sets of an -approximately 75:25 split for model evaluation. -- **Preprocess features**: This component transforms the categorical features of the training and -test sets by using one-hot encoding, imputes missing values, and power-transforms numerical features. -- **Train XGBoost model**: This component trains an XGBoost model using the accelerations provided by -the Intel Optimizations for XGBoost. -- **Convert XGBoost model to daal4py**: This component converts the XGBoost model to an -inference-optimized daal4py classifier. -- **daal4py Inference**: This component computes predictions using the inference-optimized daal4py -classifier and evaluates model performance. It returns a summary of the precision, recall, and F1 -score for each class, as well as the area under the curve (AUC) and accuracy score of the model. -- **Plot ROC Curve**: This component performs model validation on the test data and generates a -graph of the receiver operating characteristic (ROC) curve. - -[Back to Table of Contents](#table-of-contents) - -## Pipeline Optimizations - -#### Enable the Intel Optimization for XGBoost - -The [XGBoost optimizations](https://www.intel.com/content/www/us/en/developer/tools/oneapi/optimization-for-xgboost.html) -for training and inference on CPUs are upstreamed into the open source XGBoost framework. -Ensure you are using the latest version of XGBoost to access the most Intel optimizations. -The following code sample is implemented in the `train_xgboost_model` component. - -``` -dtrain = xgb.DMatrix(X_train.values, y_train.values) - -# define model parameters -params = { - "objective": "binary:logistic", - "eval_metric": "logloss", - "nthread": 4, # num_cpu - "tree_method": "hist", - "learning_rate": 0.02, - "max_depth": 10, - "min_child_weight": 6, - "n_jobs": 4, # num_cpu, - "verbosity": 1} - -# train XGBoost model -clf = xgb.train(params = params, - dtrain = dtrain, - num_boost_round = 500) -``` - -#### Convert the Trained XGBoost Model to daal4py - -[daal4py](https://www.intel.com/content/www/us/en/developer/articles/guide/a-daal4py-introduction-and-getting-started-guide.html) -is the Python API of the oneAPI Data Analytics Library, oneDAL. daal4py helps to further -optimize model prediction, or inference, on CPUs. The following code demonstrates how to -convert a trained XGBoost model into daal4py format and calculate the predicted -classification results, implemented in the `convert_xgboost_to_daal4py` and `daal4py_inference` -components. - -``` -# convert XGBoost model to daal4py -daal_model = d4p.get_gbt_model_from_xgboost(clf) - - -# compute class labels and probabilities -daal_prediction = d4p.gbt_classification_prediction( - nClasses = 2, - resultsToEvaluate = "computeClassLabels|computeClassProbabilities" -).compute(X_test, daal_model) -``` - -#### Enable the Intel Extension for Scikit-Learn -The [Intel Extension for Scikit-Learn](https://www.intel.com/content/www/us/en/developer/tools/oneapi/scikit-learn.html) -provides CPU accelerations for many scikit-learn libraries. Below is an example -using the scikit-learn extension to accelerate the computation of the ROC curve. -The following code is implemented in the `plot_roc_curve` component. - -``` -# call patch_sklearn() before importing scikit-learn libraries -from sklearnex import patch_sklearn -patch_sklearn() -from sklearn.metrics import roc_curve - - -# calculate the ROC curve using the CPU-accelerated version -fpr, tpr, thresholds = roc_curve( - y_true = prediction_data['y_test'], - y_score = prediction_data['y_prob'], - pos_label = 1) -``` - -[Back to Table of Contents](#table-of-contents) - -## Pipeline Parameters - -The XGBoost daal4py Kubeflow Pipeline consists of the following two parameters: -- `data_url`: The sample dataset can be downloaded from -[Kaggle](https://www.kaggle.com/datasets/laotse/credit-risk-dataset) -and hosted on a public URL of your choice. -- `data_size`: The recommended data size for the pipeline is 1 million. - -## Pipeline Results - -When the Pipeline tasks `daal4py-inference` and `plot-roc-curve` are finished running, -click on the Visualization tab of the `metrics` and `roc_curve_daal4py` artifacts to -view the model performance results. You should see a similar graph of the receiver -operating characteristic (ROC) curve as the one below. - -

            - ROC Curve -

            - -[Back to Table of Contents](#table-of-contents) - -## Next Steps - -Thanks for checking out this tutorial! If you would like to implement this reference solution -on a cloud service provider like AWS, Azure, or GCP, you can view the full deployment steps, -as well as additional Intel® Optimized Cloud Modules -[here](https://www.intel.com/content/www/us/en/developer/topic-technology/cloud-optimization.html). - -[Back to Table of Contents](#table-of-contents) \ No newline at end of file diff --git a/samples/contrib/intel-oneapi-samples/assets/intel-xgb-d4p-pipeline-roc-curve.png b/samples/contrib/intel-oneapi-samples/assets/intel-xgb-d4p-pipeline-roc-curve.png deleted file mode 100644 index 73d0b530ac6..00000000000 Binary files a/samples/contrib/intel-oneapi-samples/assets/intel-xgb-d4p-pipeline-roc-curve.png and /dev/null differ diff --git a/samples/contrib/intel-oneapi-samples/assets/intel-xgb-d4p-pipeline.png b/samples/contrib/intel-oneapi-samples/assets/intel-xgb-d4p-pipeline.png deleted file mode 100644 index d826cb2a76a..00000000000 Binary files a/samples/contrib/intel-oneapi-samples/assets/intel-xgb-d4p-pipeline.png and /dev/null differ diff --git a/samples/contrib/intel-oneapi-samples/assets/logo-classicblue-800px.png b/samples/contrib/intel-oneapi-samples/assets/logo-classicblue-800px.png deleted file mode 100644 index c9f1c817222..00000000000 Binary files a/samples/contrib/intel-oneapi-samples/assets/logo-classicblue-800px.png and /dev/null differ diff --git a/samples/contrib/intel-oneapi-samples/intel_xgboost_daal4py_pipeline.py b/samples/contrib/intel-oneapi-samples/intel_xgboost_daal4py_pipeline.py deleted file mode 100644 index 377722136c9..00000000000 --- a/samples/contrib/intel-oneapi-samples/intel_xgboost_daal4py_pipeline.py +++ /dev/null @@ -1,502 +0,0 @@ -from kfp import dsl -from kfp import compiler -from kfp.dsl import (Input, Output, Dataset, Model, Metrics, ClassificationMetrics) - -@dsl.component( - base_image="python:3.10", - packages_to_install=["numpy", "pandas", "loguru"]) -def load_data( - data_url: str, - data_size: int, - credit_risk_dataset: Output[Dataset]): - - ''' - Downloads credit_risk_dataset.csv file and generates - additional synthetic data for benchmarking and testing purposes. - - Input Parameters - ---------------- - data_url : str - url where the dataset is hosted - data_size : int - size of final dataset desired, default 1M rows - - Output Artifacts - ---------------- - credit_risk_dataset : Dataset - data that has been synthetically augmented or loaded from URL provided - ''' - - import numpy as np - import pandas as pd - from loguru import logger - - logger.info("Loading csv from {}", data_url) - data = pd.read_csv(data_url) - logger.info("Done!") - - # number of rows to generate - if data_size < data.shape[0]: - pass - else: - logger.info("Generating {:,} rows of data...", data_size) - repeats = data_size // len(data) - data = data.loc[np.repeat(data.index.values, repeats + 1)] - data = data.iloc[:data_size] - - # perturbing all int/float columns - person_age = data["person_age"].values + np.random.randint( - -1, 1, size=len(data) - ) - person_income = data["person_income"].values + np.random.normal( - 0, 10, size=len(data) - ) - person_emp_length = data[ - "person_emp_length" - ].values + np.random.randint(-1, 1, size=len(data)) - loan_amnt = data["loan_amnt"].values + np.random.normal( - 0, 5, size=len(data) - ) - loan_int_rate = data["loan_int_rate"].values + np.random.normal( - 0, 0.2, size=len(data) - ) - loan_percent_income = data["loan_percent_income"].values + ( - np.random.randint(0, 100, size=len(data)) / 1000 - ) - cb_person_cred_hist_length = data[ - "cb_person_cred_hist_length" - ].values + np.random.randint(0, 2, size=len(data)) - - # perturbing all binary columns - perturb_idx = np.random.rand(len(data)) > 0.1 - random_values = np.random.choice( - data["person_home_ownership"].unique(), len(data) - ) - person_home_ownership = np.where( - perturb_idx, data["person_home_ownership"], random_values - ) - perturb_idx = np.random.rand(len(data)) > 0.1 - random_values = np.random.choice( - data["loan_intent"].unique(), len(data) - ) - loan_intent = np.where(perturb_idx, data["loan_intent"], random_values) - perturb_idx = np.random.rand(len(data)) > 0.1 - random_values = np.random.choice( - data["loan_grade"].unique(), len(data) - ) - loan_grade = np.where(perturb_idx, data["loan_grade"], random_values) - perturb_idx = np.random.rand(len(data)) > 0.1 - random_values = np.random.choice( - data["cb_person_default_on_file"].unique(), len(data) - ) - cb_person_default_on_file = np.where( - perturb_idx, data["cb_person_default_on_file"], random_values - ) - data = pd.DataFrame( - list( - zip( - person_age, - person_income, - person_home_ownership, - person_emp_length, - loan_intent, - loan_grade, - loan_amnt, - loan_int_rate, - data["loan_status"].values, - loan_percent_income, - cb_person_default_on_file, - cb_person_cred_hist_length, - ) - ), - columns = data.columns, - ) - - data = data.drop_duplicates() - assert len(data) == data_size - data.reset_index(drop = True) - - data.to_csv(credit_risk_dataset.path, index = None) - -@dsl.component( - base_image="python:3.10", - packages_to_install=["pandas", "scikit-learn", "loguru"]) -def create_train_test_set( - data: Input[Dataset], - x_train_data: Output[Dataset], - y_train_data: Output[Dataset], - x_test_data: Output[Dataset], - y_test_data: Output[Dataset]): - - ''' - Creates 75:25 split of input dataset for model evaluation. - - Input Artifacts - --------------- - data : Dataset - dataset that has been synthetically augmented by the load_data() function - - Output Artifacts - ---------------- - x_train_data : Dataset - training features, 75% of original dataset - y_train_data : Dataset - training labels of target variable, loan_status - x_test_data : Dataset - test features, 25% of original dataset - y_test_data : Dataset - test labels of target variable, loan_status - ''' - - import pandas as pd - from loguru import logger - from sklearn.model_selection import train_test_split - - data = pd.read_csv(data.path) - - logger.info("Creating training and test sets...") - train, test = train_test_split(data, test_size = 0.25, random_state = 0) - - X_train = train.drop(["loan_status"], axis = 1) - y_train = train["loan_status"] - - X_test = test.drop(["loan_status"], axis = 1) - y_test = test["loan_status"] - - logger.info("Training and test sets created.\n" \ - "X_train size: {}, y_train size: {}\n" \ - "X_test size: {}, y_test size: {}", - X_train.shape, y_train.shape, X_test.shape, y_test.shape) - - X_train.to_csv(x_train_data.path, index = False) - y_train.to_csv(y_train_data.path, index = False, header = None) - X_test.to_csv(x_test_data.path, index = False) - y_test.to_csv(y_test_data.path, index = False, header = None) - -@dsl.component( - base_image="python:3.10", - packages_to_install=["pandas", "scikit-learn"]) -def preprocess_features( - x_train: Input[Dataset], - x_test: Input[Dataset], - x_train_processed: Output[Dataset], - x_test_processed: Output[Dataset]): - - ''' - Performs data preprocessing of training and test features. - - Input Artifacts - --------------- - x_train : Dataset - original unprocessed training features - x_test : Dataset - original unprocessed test features - - Output Artifacts - ---------------- - x_train_processed : Dataset - processed and scaled training features - x_test_processed : Dataset - processed and scaled test features - ''' - - import pandas as pd - from sklearn.compose import ColumnTransformer - from sklearn.impute import SimpleImputer - from sklearn.pipeline import Pipeline - from sklearn.preprocessing import OneHotEncoder, PowerTransformer - - X_train = pd.read_csv(x_train.path) - X_test = pd.read_csv(x_test.path) - - # data processing pipeline - num_imputer = Pipeline(steps=[("imputer", SimpleImputer(strategy = "median"))]) - pow_transformer = PowerTransformer() - cat_transformer = OneHotEncoder(handle_unknown = "ignore") - preprocessor = ColumnTransformer( - transformers = [ - ( - "num", - num_imputer, - [ - "loan_int_rate", - "person_emp_length", - "cb_person_cred_hist_length", - ], - ), - ( - "pow", - pow_transformer, - ["person_age", "person_income", "loan_amnt", "loan_percent_income"], - ), - ( - "cat", - cat_transformer, - [ - "person_home_ownership", - "loan_intent", - "loan_grade", - "cb_person_default_on_file", - ], - ), - ], - remainder="passthrough", - ) - - preprocess = Pipeline(steps = [("preprocessor", preprocessor)]) - - X_train = pd.DataFrame(preprocess.fit_transform(X_train)) - X_test = pd.DataFrame(preprocess.transform(X_test)) - - X_train.to_csv(x_train_processed.path, index = False, header = None) - X_test.to_csv(x_test_processed.path, index = False, header = None) - -@dsl.component( - base_image="python:3.10", - packages_to_install=["pandas", "xgboost", "joblib", "loguru"]) -def train_xgboost_model( - x_train: Input[Dataset], - y_train: Input[Dataset], - xgb_model: Output[Model]): - - ''' - Trains an XGBoost classification model. - - Input Artifacts - --------------- - x_train : Dataset - processed and scaled training features - y_train : Dataset - training labels of target variable, loan_status - - Output Artifacts - ---------------- - xgb_model : Model - trained XGBoost model - ''' - - import joblib - import pandas as pd - import xgboost as xgb - from loguru import logger - - X_train = pd.read_csv(x_train.path, header = None) - y_train = pd.read_csv(y_train.path, header = None) - - dtrain = xgb.DMatrix(X_train.values, y_train.values) - - # define model parameters - params = { - "objective": "binary:logistic", - "eval_metric": "logloss", - "nthread": 4, # num_cpu - "tree_method": "hist", - "learning_rate": 0.02, - "max_depth": 10, - "min_child_weight": 6, - "n_jobs": 4, # num_cpu, - "verbosity": 1 - } - - # train XGBoost model - logger.info("Training XGBoost model...") - clf = xgb.train(params = params, - dtrain = dtrain, - num_boost_round = 500) - - with open(xgb_model.path, "wb") as file_writer: - joblib.dump(clf, file_writer) - -@dsl.component( - base_image="python:3.10", - packages_to_install=["daal4py", "joblib", "loguru"]) -def convert_xgboost_to_daal4py( - xgb_model: Input[Model], - daal4py_model: Output[Model]): - - ''' - Converts XGBoost model to inference-optimized daal4py classifier. - - Input Artifacts - --------------- - xgb_model : Model - trained XGBoost classifier - - Output Artifacts - ---------------- - daal4py_model : Model - inference-optimized daal4py classifier - ''' - - import daal4py as d4p - import joblib - from loguru import logger - - with open(xgb_model.path, "rb") as file_reader: - clf = joblib.load(file_reader) - - logger.info("Converting XGBoost model to Daal4py...") - daal_model = d4p.get_gbt_model_from_xgboost(clf) - logger.info("Done!") - - with open(daal4py_model.path, "wb") as file_writer: - joblib.dump(daal_model, file_writer) - -@dsl.component( - base_image="python:3.10", - packages_to_install=["daal4py", "pandas", "scikit-learn", - "scikit-learn-intelex", "joblib"]) -def daal4py_inference( - x_test: Input[Dataset], - y_test: Input[Dataset], - daal4py_model: Input[Model], - prediction_data: Output[Dataset], - report: Output[Dataset], - metrics: Output[Metrics] -): - - ''' - Computes predictions using the inference-optimized daal4py classifier - and evaluates model performance. - - Input Artifacts - --------------- - x_test : Dataset - processed and scaled test features - y_test : Dataset - test labels of target variable, loan_status - daal4py_model : Model - inference-optimized daal4py classifier - - Output Artifacts - ---------------- - prediction_data : Dataset - dataset containing true test labels and predicted probabilities - report : Dataset - summary of the precision, recall, F1 score for each class - metrics : Metrics - scalar classification metrics containing the model's AUC and accuracy - ''' - - import daal4py as d4p - import joblib - import pandas as pd - - from sklearnex import patch_sklearn - patch_sklearn() - from sklearn.metrics import roc_auc_score, accuracy_score, classification_report - - X_test = pd.read_csv(x_test.path, header = None) - y_test = pd.read_csv(y_test.path, header = None) - - with open(daal4py_model.path, "rb") as file_reader: - daal_model = joblib.load(file_reader) - - daal_prediction = d4p.gbt_classification_prediction( - nClasses = 2, - resultsToEvaluate = "computeClassLabels|computeClassProbabilities" - ).compute(X_test, daal_model) - - y_pred = daal_prediction.prediction - y_prob = daal_prediction.probabilities[:,1] - - results = classification_report( - y_test, y_pred, - target_names = ["Non-Default", "Default"], - output_dict = True - ) - results = pd.DataFrame(results).transpose() - results.to_csv(report.path) - - auc = roc_auc_score(y_test, y_prob) - metrics.log_metric('AUC', auc) - - accuracy = (accuracy_score(y_test, y_pred)*100) - metrics.log_metric('Accuracy', accuracy) - - predictions = pd.DataFrame({'y_test': y_test.values.flatten(), - 'y_prob': y_prob}) - predictions.to_csv(prediction_data.path, index = False) - -@dsl.component( - base_image="python:3.10", - packages_to_install=["numpy", "pandas", "scikit-learn", - "scikit-learn-intelex"]) -def plot_roc_curve( - predictions: Input[Dataset], - class_metrics: Output[ClassificationMetrics] -): - - ''' - Function to plot Receiver Operating Characteristic (ROC) curve. - - Input Artifacts - --------------- - predictions : Dataset - dataset containing true test labels and predicted probabilities - - Output Artifacts - ---------------- - class_metrics : ClassificationMetrics - classification metrics containing fpr, tpr, and thresholds - ''' - - import pandas as pd - from numpy import inf - - from sklearnex import patch_sklearn - patch_sklearn() - from sklearn.metrics import roc_curve - - prediction_data = pd.read_csv(predictions.path) - - fpr, tpr, thresholds = roc_curve( - y_true = prediction_data['y_test'], - y_score = prediction_data['y_prob'], - pos_label = 1) - thresholds[thresholds == inf] = 0 - - class_metrics.log_roc_curve(fpr, tpr, thresholds) - -@dsl.pipeline -def intel_xgboost_daal4py_pipeline( - data_url: str, - data_size: int): - - load_data_op = load_data( - data_url = data_url, data_size = data_size - ) - - create_train_test_set_op = create_train_test_set( - data = load_data_op.outputs['credit_risk_dataset'] - ) - - preprocess_features_op = preprocess_features( - x_train = create_train_test_set_op.outputs['x_train_data'], - x_test = create_train_test_set_op.outputs['x_test_data'] - ) - - train_xgboost_model_op = train_xgboost_model( - x_train = preprocess_features_op.outputs['x_train_processed'], - y_train = create_train_test_set_op.outputs['y_train_data'] - ) - - convert_xgboost_to_daal4py_op = convert_xgboost_to_daal4py( - xgb_model = train_xgboost_model_op.outputs['xgb_model'] - ) - - daal4py_inference_op = daal4py_inference( - x_test = preprocess_features_op.outputs['x_test_processed'], - y_test = create_train_test_set_op.outputs['y_test_data'], - daal4py_model = convert_xgboost_to_daal4py_op.outputs['daal4py_model'] - ) - - plot_roc_curve_op = plot_roc_curve( - predictions = daal4py_inference_op.outputs['prediction_data'] - ) - -if __name__ == '__main__': - # Compiling the pipeline - compiler.Compiler().compile( - pipeline_func = intel_xgboost_daal4py_pipeline, - package_path = 'intel-xgboost-daal4py-pipeline.yaml') \ No newline at end of file diff --git a/samples/contrib/kubeflow-e2e-mnist/kubeflow-e2e-mnist-kfpv2.ipynb b/samples/contrib/kubeflow-e2e-mnist/kubeflow-e2e-mnist-kfpv2.ipynb deleted file mode 100644 index c818f16ea2f..00000000000 --- a/samples/contrib/kubeflow-e2e-mnist/kubeflow-e2e-mnist-kfpv2.ipynb +++ /dev/null @@ -1,767 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Kubeflow Pipelines e2e mnist example (in KFPv2 syntax)\n", - "\n", - "**Note**: KFPv2 is currently in Alpha phase, the syntax and usage below are subject to change.\n", - "\n", - "In this notebook you will create e2e mnist Kubeflow Pipeline to perform:\n", - "- Hyperparameter tuning using Katib\n", - "- Distributive training with the best hyperparameters using TFJob\n", - "- Serve the trained model using KServe\n", - "\n", - "We use KFPv2 syntax to define pipeline template, for detail about KFPv2, refer to:\n", - "\n", - "- [KFPv2 documentation](https://www.kubeflow.org/docs/components/pipelines/v2/)\n", - "\n", - "Reference documentation:\n", - "\n", - "- https://www.kubeflow.org/docs/components/training/tftraining/\n", - "- https://www.kubeflow.org/docs/components/katib/\n", - "- https://www.kubeflow.org/docs/external-add-ons/kserve/\n", - "\n", - "**Note**: This Pipeline runs in the multi-user mode. Follow [full Kubeflow connection guide](https://www.kubeflow.org/docs/components/pipelines/v1/sdk/connect-api/) to give your Notebook access to Kubeflow Pipelines.\n", - "\n", - "**Note**: Due to [issue #8300](https://github.com/kubeflow/pipelines/issues/8300), connecting to Kubeflow Pipelines using PodDefaults is not working for now. You can still use the Kubeflow Dashboard to upload pipeline and create run." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Install required packages (Kubeflow Pipelines and Katib SDK).\n", - "!python3 -m pip install --no-cache-dir --force-reinstall --pre kfp\n", - "!pip install kubeflow-katib==0.12.0" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Make sure KFP SDK version is at least 2.0.0-beta.4\n", - "!python3 -c \"import kfp; print('KFP SDK version: {}'.format(kfp.__version__))\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Create Persistent Volume Claim to store Model\n", - "\n", - "[dsl.VolumeOp](https://kubeflow-pipelines.readthedocs.io/en/latest/_modules/kfp/dsl/_volume_op.html) is currently not available in KFPv2. For now, you can create Persistent Volume Claim using `kubectl` from commandline. In this example, we assume your project is `kubeflow-user-project`, make sure you have access to namespace `kubeflow-user-project` in `kubectl` kubeconfig. \n", - "\n", - "**Note**: Run the following command in your terminal, replace `` with your Kubeflow project name:\n", - "\n", - "```\n", - "cat <'\n", - "spec:\n", - " accessModes:\n", - " - ReadWriteOnce\n", - " resources:\n", - " requests:\n", - " storage: 1Gi\n", - "EOF\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "## Define the Pipelines tasks\n", - "\n", - "To run this Pipeline, you should define:\n", - "1. Katib hyperparameter tuning\n", - "2. TFJob training\n", - "3. KServe inference\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Import necessary modules" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp\n", - "import kfp.dsl as dsl\n", - "from kfp import components\n", - "from kfp.dsl import Output, Artifact\n", - "\n", - "from kubeflow.katib import ApiClient\n", - "from kubeflow.katib import V1beta1ExperimentSpec\n", - "from kubeflow.katib import V1beta1AlgorithmSpec\n", - "from kubeflow.katib import V1beta1ObjectiveSpec\n", - "from kubeflow.katib import V1beta1ParameterSpec\n", - "from kubeflow.katib import V1beta1FeasibleSpace\n", - "from kubeflow.katib import V1beta1TrialTemplate\n", - "from kubeflow.katib import V1beta1TrialParameterSpec\n", - "\n", - "from typing import Dict, List, NamedTuple" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### Step 1. Katib hyperparameter tuning task\n", - "\n", - "Create the Kubeflow Pipelines task for the Katib hyperparameter tuning. This Experiment uses \"random\" algorithm and TFJob for the Trial's worker.\n", - "\n", - "The Katib Experiment is similar to this example: https://github.com/kubeflow/katib/blob/master/examples/v1beta1/kubeflow-training-operator/tfjob-mnist-with-summaries.yaml." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# You should define the Experiment name, namespace and number of training steps in the arguments.\n", - "@dsl.component(\n", - " base_image='python:3.8',\n", - " packages_to_install=['kubeflow-katib==0.12.0']\n", - ")\n", - "def create_katib_experiment_task(experiment_name: str, experiment_namespace: str, training_steps: str\n", - " ) -> NamedTuple('Outputs', [('experiment_spec_json', Dict[str, str])]):\n", - " \n", - " from kubeflow.katib import ApiClient\n", - " from kubeflow.katib import V1beta1ExperimentSpec\n", - " from kubeflow.katib import V1beta1AlgorithmSpec\n", - " from kubeflow.katib import V1beta1ObjectiveSpec\n", - " from kubeflow.katib import V1beta1ParameterSpec\n", - " from kubeflow.katib import V1beta1FeasibleSpace\n", - " from kubeflow.katib import V1beta1TrialTemplate\n", - " from kubeflow.katib import V1beta1TrialParameterSpec\n", - " \n", - " # Trial count specification.\n", - " max_trial_count = 5\n", - " max_failed_trial_count = 3\n", - " parallel_trial_count = 2\n", - "\n", - " # Objective specification.\n", - " objective = V1beta1ObjectiveSpec(\n", - " type=\"minimize\",\n", - " goal=0.001,\n", - " objective_metric_name=\"loss\"\n", - " )\n", - "\n", - " # Algorithm specification.\n", - " algorithm = V1beta1AlgorithmSpec(\n", - " algorithm_name=\"random\",\n", - " )\n", - "\n", - " # Experiment search space.\n", - " # In this example we tune learning rate and batch size.\n", - " parameters = [\n", - " V1beta1ParameterSpec(\n", - " name=\"learning_rate\",\n", - " parameter_type=\"double\",\n", - " feasible_space=V1beta1FeasibleSpace(\n", - " min=\"0.01\",\n", - " max=\"0.05\"\n", - " ),\n", - " ),\n", - " V1beta1ParameterSpec(\n", - " name=\"batch_size\",\n", - " parameter_type=\"int\",\n", - " feasible_space=V1beta1FeasibleSpace(\n", - " min=\"80\",\n", - " max=\"100\"\n", - " ),\n", - " )\n", - " ]\n", - "\n", - " # Experiment Trial template.\n", - " # TODO (andreyvelich): Use community image for the mnist example.\n", - " trial_spec = {\n", - " \"apiVersion\": \"kubeflow.org/v1\",\n", - " \"kind\": \"TFJob\",\n", - " \"spec\": {\n", - " \"tfReplicaSpecs\": {\n", - " \"Chief\": {\n", - " \"replicas\": 1,\n", - " \"restartPolicy\": \"OnFailure\",\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"tensorflow\",\n", - " \"image\": \"docker.io/liuhougangxa/tf-estimator-mnist\",\n", - " \"command\": [\n", - " \"python\",\n", - " \"/opt/model.py\",\n", - " \"--tf-train-steps=\" + str(training_steps),\n", - " \"--tf-learning-rate=${trialParameters.learningRate}\",\n", - " \"--tf-batch-size=${trialParameters.batchSize}\"\n", - " ]\n", - " }\n", - " ]\n", - " }\n", - " }\n", - " },\n", - " \"Worker\": {\n", - " \"replicas\": 1,\n", - " \"restartPolicy\": \"OnFailure\",\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"tensorflow\",\n", - " \"image\": \"docker.io/liuhougangxa/tf-estimator-mnist\",\n", - " \"command\": [\n", - " \"python\",\n", - " \"/opt/model.py\",\n", - " \"--tf-train-steps=\" + str(training_steps),\n", - " \"--tf-learning-rate=${trialParameters.learningRate}\",\n", - " \"--tf-batch-size=${trialParameters.batchSize}\"\n", - " ]\n", - " }\n", - " ]\n", - " }\n", - " }\n", - " }\n", - " }\n", - " }\n", - " }\n", - "\n", - " # Configure parameters for the Trial template.\n", - " trial_template = V1beta1TrialTemplate(\n", - " primary_container_name=\"tensorflow\",\n", - " trial_parameters=[\n", - " V1beta1TrialParameterSpec(\n", - " name=\"learningRate\",\n", - " description=\"Learning rate for the training model\",\n", - " reference=\"learning_rate\"\n", - " ),\n", - " V1beta1TrialParameterSpec(\n", - " name=\"batchSize\",\n", - " description=\"Batch size for the model\",\n", - " reference=\"batch_size\"\n", - " ),\n", - " ],\n", - " trial_spec=trial_spec\n", - " )\n", - "\n", - " # Create an Experiment from the above parameters.\n", - " experiment_spec = V1beta1ExperimentSpec(\n", - " max_trial_count=max_trial_count,\n", - " max_failed_trial_count=max_failed_trial_count,\n", - " parallel_trial_count=parallel_trial_count,\n", - " objective=objective,\n", - " algorithm=algorithm,\n", - " parameters=parameters,\n", - " trial_template=trial_template\n", - " )\n", - " \n", - " # Convert experiment_spec to Dict type.\n", - " experiment_spec_json = ApiClient().sanitize_for_serialization(experiment_spec)\n", - " output = NamedTuple('Outputs', [('experiment_spec_json', Dict[str, str])])\n", - " return output(experiment_spec_json)\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.component\n", - "def convert_experiment_spec_to_str(experiment_spec_json: Dict[str, str])-> NamedTuple('Outputs', [('experiment_spec_str_output', str)]):\n", - " import json\n", - " output = NamedTuple('Outputs', [('experiment_spec_str_output', str)])\n", - " return output(json.dumps(experiment_spec_json))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# This container component is katib launcher, its API is same as the following yaml file in KFPv1.\n", - "# https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/katib-launcher/component.yaml\n", - "from kfp.dsl import Input, Output, Artifact, container_component, ContainerSpec\n", - "\n", - "@dsl.container_component\n", - "def create_dataset(experiment_name: str, experiment_namespace: str, experiment_timeout_minutes: int, \n", - " experiment_spec_json: str, parameter_set: Output[Artifact]):\n", - " return ContainerSpec(\n", - " image='docker.io/kubeflowkatib/kubeflow-pipelines-launcher',\n", - " command=['python', 'src/launch_experiment.py'],\n", - " args=[\n", - " '--experiment-name', experiment_name,\n", - " '--experiment-namespace', experiment_namespace,\n", - " '--experiment-spec', experiment_spec_json,\n", - " '--experiment-timeout-minutes',experiment_timeout_minutes,\n", - " '--delete-after-done', 'False',\n", - " '--output-file', parameter_set.path,\n", - " ])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### Step 2. TFJob training task\n", - "\n", - "Create the Kubeflow Pipelines task for the TFJob training. In this example TFJob runs the Chief and Worker with 1 replica.\n", - "\n", - "Learn more about TFJob replica specifications in the Kubeflow docs: https://www.kubeflow.org/docs/components/training/tftraining/#what-is-tfjob." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# This function converts Katib Experiment HP results to args.\n", - "@dsl.component\n", - "def convert_katib_results(katib_results: Input[Artifact]) -> str:\n", - " import json\n", - " import pprint\n", - " katib_results_str = ''\n", - " with open(katib_results.path, 'r') as f:\n", - " katib_results_str = f.read()\n", - " katib_results_json = json.loads(katib_results_str)\n", - " print(\"Katib results:\")\n", - " pprint.pprint(katib_results_json)\n", - " best_hps = []\n", - " for pa in katib_results_json[\"currentOptimalTrial\"][\"parameterAssignments\"]:\n", - " if pa[\"name\"] == \"learning_rate\":\n", - " best_hps.append(\"--tf-learning-rate=\" + pa[\"value\"])\n", - " elif pa[\"name\"] == \"batch_size\":\n", - " best_hps.append(\"--tf-batch-size=\" + pa[\"value\"])\n", - " print(\"Best Hyperparameters: {}\".format(best_hps))\n", - " return \" \".join(best_hps)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# You should define the TFJob name, namespace, number of training steps, output of Katib and model volume tasks in the arguments.\n", - "@dsl.component\n", - "def create_tfjob_task(tfjob_name: str, tfjob_namespace: str, training_steps: str, best_hps: str, model_volume_name: str,\n", - " ) -> NamedTuple('Outputs', [('chief_spec', Dict[str, str]), ('worker_spec', Dict[str, str])]):\n", - " # Get parameters from the Katib Experiment.\n", - " # Parameters are in the format \"--tf-learning-rate=0.01 --tf-batch-size=100\"\n", - "\n", - " # Create the TFJob Chief and Worker specification with the best Hyperparameters.\n", - " # TODO (andreyvelich): Use community image for the mnist example.\n", - " tfjob_chief_spec = {\n", - " \"replicas\": 1,\n", - " \"restartPolicy\": \"OnFailure\",\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"tensorflow\",\n", - " \"image\": \"docker.io/liuhougangxa/tf-estimator-mnist\",\n", - " \"command\": [\n", - " \"sh\",\n", - " \"-c\"\n", - " ],\n", - " \"args\": [\n", - " \"python /opt/model.py --tf-export-dir=/mnt/export --tf-train-steps={} {}\".format(training_steps, best_hps)\n", - " ],\n", - " \"volumeMounts\": [\n", - " {\n", - " \"mountPath\": \"/mnt/export\",\n", - " \"name\": \"model-volume\"\n", - " }\n", - " ]\n", - " }\n", - " ],\n", - " \"volumes\": [\n", - " {\n", - " \"name\": \"model-volume\",\n", - " \"persistentVolumeClaim\": {\n", - " \"claimName\": model_volume_name\n", - " }\n", - " }\n", - " ]\n", - " }\n", - " }\n", - " }\n", - "\n", - " tfjob_worker_spec = {\n", - " \"replicas\": 1,\n", - " \"restartPolicy\": \"OnFailure\",\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"tensorflow\",\n", - " \"image\": \"docker.io/liuhougangxa/tf-estimator-mnist\",\n", - " \"command\": [\n", - " \"sh\",\n", - " \"-c\",\n", - " ],\n", - " \"args\": [\n", - " \"python /opt/model.py --tf-export-dir=/mnt/export --tf-train-steps={} {}\".format(training_steps, best_hps) \n", - " ],\n", - " }\n", - " ],\n", - " }\n", - " }\n", - " }\n", - " \n", - " output = NamedTuple('Outputs', [('chief_spec', Dict[str, str]), ('worker_spec', Dict[str, str])])\n", - " return output(tfjob_chief_spec, tfjob_worker_spec)\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# This container component is TFJob launcher, its API is same as the following yaml file in KFPv1.\n", - "# https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/launcher/component.yaml\n", - "from kfp.dsl import Input, Output, Artifact, container_component, ContainerSpec\n", - "\n", - "@dsl.container_component\n", - "def tfjob_launcher(tfjob_name: str, tfjob_namespace: str,\n", - " worker_spec: Dict[str, str],\n", - " chief_spec: Dict[str, str],\n", - " tfjob_timeout_minutes: int):\n", - " return ContainerSpec(\n", - " image='nikenano/launchernew:latest',\n", - " command=['python', '/ml/launch_tfjob.py'],\n", - " args=[\n", - " '--name', tfjob_name,\n", - " '--namespace', tfjob_namespace,\n", - " '--workerSpec', worker_spec,\n", - " '--chiefSpec', chief_spec,\n", - " '--tfjobTimeoutMinutes', tfjob_timeout_minutes,\n", - " '--deleteAfterDone', 'False',\n", - " ])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "tags": [] - }, - "source": [ - "### Step 3. KServe inference\n", - "\n", - "Create the Kubeflow Pipelines task for the KServe inference." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.component\n", - "def create_serving_task(model_name: str, model_namespace: str, model_volume_name: str\n", - " ) -> NamedTuple('Outputs', [('inferenceservice_yaml', Dict[str, str])]):\n", - "\n", - " api_version = 'serving.kserve.io/v1beta1'\n", - " inference_service = {\n", - " \"apiVersion\": api_version,\n", - " \"kind\": \"InferenceService\",\n", - " \"metadata\": {\n", - " \"name\": model_name,\n", - " \"namespace\": model_namespace,\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\":{\n", - " \"predictor\":{\n", - " \"tensorflow\": {\n", - " \"storageUri\": \"pvc://{}/\".format(model_volume_name)\n", - " }\n", - " }\n", - " }\n", - " }\n", - "\n", - " output = NamedTuple('Outputs', [('inferenceservice_yaml', Dict[str, str])])\n", - " return output(inference_service)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# This container component is KServe launcher, its API is same as the following yaml file in KFPv1.\n", - "# https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kserve/component.yaml\n", - "from kfp.dsl import Input, Output, Artifact, container_component, ContainerSpec\n", - "\n", - "@dsl.container_component\n", - "def serving_launcher(action: str, inferenceservice_yaml: Dict[str, str]):\n", - " return ContainerSpec(\n", - " image='quay.io/aipipeline/kserve-component:v0.7.0',\n", - " command=['python', 'kservedeployer.py' ],\n", - " args=[\n", - " '--action', action,\n", - " '--inferenceservice-yaml', inferenceservice_yaml,\n", - " ])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.component\n", - "def convert_inference_service_to_artifact(inferenceservice_yaml: Dict[str, str], inferenceservice_artifact: Output[Artifact]):\n", - " import json\n", - " with open(inferenceservice_artifact.path, 'w') as f:\n", - " f.write(json.dumps(inferenceservice_yaml))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.pipeline\n", - "def serving_pipeline(model_name: str, model_namespace: str, model_volume_name: str) -> Artifact:\n", - " create_serving_task_op = create_serving_task(model_name=model_name, model_namespace=model_namespace, model_volume_name=model_volume_name)\n", - " convert_inference_service_to_artifact_op = convert_inference_service_to_artifact(\n", - " inferenceservice_yaml=create_serving_task_op.outputs['inferenceservice_yaml'])\n", - " serving_launcher_op = serving_launcher(\n", - " action='apply', \n", - " inferenceservice_yaml=create_serving_task_op.outputs['inferenceservice_yaml']\n", - " ).after(convert_inference_service_to_artifact_op)\n", - " \n", - " return convert_inference_service_to_artifact_op.outputs['inferenceservice_artifact']" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Run the Kubeflow Pipeline\n", - "\n", - "You should create the Kubeflow Pipeline from the above tasks." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "name=\"mnist-e2e\"\n", - "namespace=\"kubeflow-user-project\"\n", - "training_steps=\"200\"\n", - "model_volume_name=\"workflow1-model-volume\"\n", - "\n", - "\n", - "@dsl.pipeline(\n", - " name=\"End to End Pipeline\",\n", - " description=\"An end to end mnist example including hyperparameter tuning, train and inference\"\n", - ")\n", - "def mnist_pipeline(name: str =name, namespace: str = namespace, training_steps: str =training_steps, model_volume_name: str = model_volume_name):\n", - " # Run the hyperparameter tuning with Katib.\n", - " katib_op = create_katib_experiment_task(experiment_name=name, experiment_namespace=namespace, training_steps=training_steps)\n", - " convert_experiment_spec_to_str_op = convert_experiment_spec_to_str(experiment_spec_json=katib_op.outputs['experiment_spec_json'])\n", - " create_dataset_op = create_dataset(experiment_name=name, experiment_namespace=namespace, experiment_timeout_minutes=60,\n", - " experiment_spec_json=convert_experiment_spec_to_str_op.outputs['experiment_spec_str_output'])\n", - " \n", - " # Run the distributive training with TFJob.\n", - " convert_katib_results_op = convert_katib_results(katib_results=create_dataset_op.outputs['parameter_set'])\n", - " tfjob_op = create_tfjob_task(tfjob_name= name,\n", - " tfjob_namespace=namespace,\n", - " training_steps= training_steps, \n", - " best_hps=convert_katib_results_op.output,\n", - " model_volume_name=model_volume_name)\n", - " tfjob_launcher_op = tfjob_launcher(tfjob_name= name,\n", - " tfjob_namespace=namespace,\n", - " chief_spec=tfjob_op.outputs['chief_spec'],\n", - " worker_spec=tfjob_op.outputs['worker_spec'],\n", - " tfjob_timeout_minutes=60).after(convert_katib_results_op)\n", - "\n", - " # Create the KServe inference.\n", - " serving_pipeline_op = serving_pipeline(model_name=name, model_namespace=namespace, model_volume_name=model_volume_name).after(tfjob_launcher_op)\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from kfp import compiler\n", - "\n", - "OUTPUT_PACKAGE_PATH = 'mnist_pipeline.yaml'\n", - "\n", - "compiler.Compiler().compile(\n", - " pipeline_func=mnist_pipeline, \n", - " package_path=OUTPUT_PACKAGE_PATH,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Due to [issue #8300](https://github.com/kubeflow/pipelines/issues/8300), \n", - "# connecting to Kubeflow Pipelines using PodDefaults is not working for now. You can still use the Kubeflow Dashboard to upload pipeline and create run.\n", - "\n", - "# Run the Kubeflow Pipeline in the user's namespace.\n", - "# kfp_client=kfp.Client()\n", - "# run_id = kfp_client.create_run_from_pipeline_func(mnist_pipeline, namespace=namespace, arguments={}).run_id\n", - "# print(\"Run ID: \", run_id)" - ] - }, - { - "attachments": { - "pipeline_run_detail_mnist_v2.png": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAACj4AAAhcCAYAAADnvRD2AAAAAXNSR0IArs4c6QAAIABJREFUeF7s3QeQFFW7xvGXpICIgJKzIEH8EAFRcpCoBAlLRmDJWTIsSUByzlFYJMclSY6rgiAZVmAFyUgOIoLCwq0z3F2mp3tmemZz8+8q6ytnTp/wOz29Vt3nvifO8+fPnwsXAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggMArJXDu3Dn5888/w9acJUsWSZcuncbAsY36Mnv27JIqVSpbO/V/bg4ODpbbt2+btnvvvfckZcqUmvYnT56Uu3fvGvaRNWtWSZs2re27Q4cOyePHj8PaFSxYUF577TXTY586dUru3LmjaR83blz5+OOPJV68eKb7edUaztzxUAZu+NurZfetlETafPaG7d6AA4+k/ZK/wvrxyZdQxjd8K+zfD59/IjWm3pH/npkbKnPyeLKhcwpJljiu7YZr90KkwLe3wm5OniiOnBj84lm1vyZu+VtGbHkY9lHXz96QLpWShP37jO0PZdBG8+ut9sHrMrVJMreTdrd+xw6u3g2Rj4e8XI/bAUQk6etx5OSQl2v+oN8NufvoZSzkQN93JG0y7bN+8NwTqTrl5e/io/QJZH3nFGaGo42dwMWLF+Xy5cthn2TIkEEyZcqkM3ry5ImcOHFCHj16ZNrvo48+kkSJEtnaq/ft6dOnw+5NkyaNvPvuu077MtNezeXw4cNhfSRJkkTy5s2r63PPnj2az4oUKRL272fPnpXr16+H/XuuXLkkRYoXz9F///0nBw4cCPsuYcKEkj9/fl3/V69elfPnz4d9njlzZkmfPr2EhITIvn37dO0TJEggceLEsfVvdBUuXNj2fej1999/y7Fjx5xaha4nvHtkemNjQcPfrz2VUqO1f+Mn1k4qNQu9eB6dXc2/uycbT/4b9rV6Hx8blErixnH/t8Cxz1/O/Cc1pxv/N4LR+Oo9eKB/Snnj9Tim/y4s2fNIuq56+ffpq0IJZVjtF3+fLtwKkSLDX76L074ZVw4M0P53TOg8jlx4Il9M0v53hvou1Rtx5dA3KcXucQyb+o37IVJh7B258dDkHz8R2dXtbXkvTfxY8ARF7hTjEHyMXGB6RwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBGKigJngowqbHD16VBM2VGtRgRUVXFGXJ+HHjBkzivrH8XIM3dh/H5HBx/v370tQUJBmeBWqUeEaLtcCe4L/k6/m3pNHT8zVVnotrsjoWtpwjJng36HzT6SmifCjY+hRzT6igo+qr6nbHsqQTe7Dj4UzJ5D5rZJLotdehqucSZpZv+O9/Vf9Jd/tcR6Qy5oinly5FxIWFiX4GH2/ZLPBRzVDT4J177//viRL9jJYaybIaK9gpn1MDz6q9dy4cUPOnDnjdINViP3ZM21wzDH4qG5Wfai+jC77IGd49ij6nsKIH3lQwF8y42ftO+j0tyklScIXgXNn17YTj6Wx/33N1ytbJ5dPs7/mNgRv1Kf6G+Qz0334Ub0DV7VNIbnTvwgFmv27EFHBRzVmqeG35PdbIZplfFvlTWlaMrFTr+v3Q6SiyfCjf+O3pNz/Xvw32Kt+EXx81Z8A1o8AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAq+kgGPwUVUMU5XDHC+jClmqYqOq3Bh6qfCjCv2oCpKOwRPVRlUPU5XP7MM7juM8fPhQ/vjjD3nw4EHYVyrIoipMvvPOO7bPVEUy+yppqlKjqvhl9lLVIlXVSPvLPsRptp9Xtd39R89k+S+PZMquf5xWplKhkzoFE0rHckkkRRJtMGbNoUfSdtHLiloNP04oI+q8rPgY6nr0whPpG/BADl1+Ykj95f9el29rJZXkb2j7V1WzPhrsviqXY8VHvwpJpF25F1Up7S8VhPl24wNNxcTQ79U6O5V5Q1qWecNWwczMZXb99n2FPBOZtPVvGbX1ZYXK0O+LvfuazGmWTEoOvyV/PngR+HIMPn404KZmr44MeEdSvum64qMKc67oQMVHM3tq3+bSpUui/gm9nFV8DP1eVSlU1Q1v3TKu6vn222/b3puhlR5D71NVa1X12tBLVepVFXudXWbaOwYfkyZNKh988IGuS08qPtoHNlWI8Ndffw3rL3HixJIvXz5d/44VH+2D76rxzZs3RVWWdPw7o/pTFSaPHz9uC5WGXkbBR3WvGkdV57Tvx6jKpbd75OmzE5PbO1aNLZ/rdZnb3H2F2ychzyVH7xuaCr6h73xv3oXKaP/Z/6TXigdy+uZTQ7K6+RNK7ypJ5B27d5zZvwurfn0kHZYaV3y8fCdEPhn68neqgvd7+rz47xKja+X+R9Jx2cu+1P8jwPFB7sOiKqTZe8UD2XLqZaVM+/7VO//bGm9S6dEOheBjTH57MDcEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgFgmoEIkKMP7777+2SpAqlPj666/rgjuulqSqTKqwiTp62pNjrM0wqZCRCryEXipYlDNnTjO30sZO4PlzkYu3QkSFQdQ/6no7SRxJnyK+5EoX3/AoT28AVd/Bfz6Vm3+9CPVlfDue5MkYX95K5LrSmDdjObvn2XORoEtP5Pztp/LgkcibiUQypogv/8uYQOJF3TTk7sNncvLKU7l0O0TSJIsrH2VJIEmj0CEiTelLK/D06VPbezP0uGb13nzjjTc8CnW/Sqbqb4QKaqoguwrHKyv1d8abS5mr/tT9qi9nF3vkjW7k3XPuRoicvf5Ebj14Lgnii6RPEU/ypI8vb1rknXjrQYjtfX/17ou/feqdnztdfEn1lja0HnnCsadngo+xZ6+YKQIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIICAlwIquLJ//37N3aqimapsxoUAAggggAACCCAQuwQIPsau/WK2CCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAJeCFy5ckUuXLgQdqezo1a96JpbEEAAAQQQQAABBKJYgOBjFIMzHAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBA1AqoI7hVtUf1v6FX9uzZJVWqVFE7EUZDAAEEEEAAAQQQiBABgo8RwkgnCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIxVeDGjRty5syZsOnFjRtXChUqJOp/uRBAAAEEEEAAAQRinwDBx9i3Z8wYAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMADgcOHD8ujR4/C7siYMaOof7gQQAABBBBAAAEEYqcAwcfYuW/MGgEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDApEBISIimpar0GCdOHJN30wwBBBBAAAEEEEAgpgkQfIxpO8J8EEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAqQDBRx4OBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCINQIEH2PNVjFRBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBAg+MgzgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACsUaA4GOs2SomigACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACBB95BhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAINYIEHyMNVvFRBFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAgOAjzwACCCCAAAIWFbh+/YaUK19JOnRoK75Nm0i8ePEsulKWZVYgJCRE5sz1l0mTpsqWzRskTZrUZm+lHQIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAjFGgOBjjNkKJoIAAggggEDECaiAm2+zFhIY+JOt08KffiKjRg2X9OnTR9wg9BSrBK5cuSLdu/eSvb/ss827aNEiMs//OwKxsWoXmSwCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAgBIg+MhzgAACMUpAhbWuX78uN2/ekidPn0qG9OkkZcqUBHMiYZcePHggx44dl3/++Udy5HhPMmfOHAmj0GV0CcycNVuGDRupGb50qZIyZ86s6JoS40azgK9vC9m5a7dmFr179ZCWLZtH88wYHgEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAc8ECD565kXrKBDYsGGj9Ord1/RIbyROJGnSppFMmTLJe+9llwrly9v+lyt2Cdy/f1/mfb9AZs+eIyqQ53gVKJBfFsz3l4QJE8auhcXQ2QYHB0vjxr5y7fqNsBkO6N9HmjRpHENn7P20nj59Krdv35E4ceLI22+neCVCtEeOHJHqNWrr0NatC5AP8uTxHtNid6qg9V9//SWPHz+W5MmTx4r3i3o/qn+SJk0qSZIk8WhHTgQFSZUq1XX3rFq5TD76KJ9HfdEYAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACB6BQg+Bid+oxtKBAQECBduvYMl44KyfXq2V0KFiwQrn64OWoEbt26JU2atpCgoCCXA/4WdFQSJUoUNZOy+Ch16taX/fsP6Fb5Y+BOyZAhdh+F/OzZMzly5KgErF4j27Zu04Q71YLTpE4lpUqVlBo1qkv+/B95HITcsWOn/PfkSbiekPTp0sr//ve/cPXh7OZHjx5JmTLldOvu3buHtGwR+yr7qfXsDvxRs9x4ceNKuXJlvfJTod8fftgoP2zYJGfPntX08eabb0qRIp9KzRrVpVixohH2vjlz5qyc+f+xEiVKKCVLlDA997t378rmzVtkxcoAOXjwkO4+9feuWrUq8nmlivL222+77XfW7O9k6NARmnbqN7Fjx9YIW6/bSdAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBcAoQfAwnILdHvEBEBB9DZ/V1pw7Stm1rSZAgQcRPlB4jRODPP/+URl/56gJIRp0TfIwQclFV7rK/l9uwsznfzZTSpUtFzEDR0MvFixele49ehqFOo+lky5ZNJk0cJ7lz5zI1W1d2pjr4/0ZNGjeSAQP6eXKL6bb+/vNk4KAhmvaFChWURQvnexzyND1oJDYcMWKUTJ+hP5773B/BHo16584d6df/G9mwYZOp+1QIcuLEcVKqpPmQomPH6nlZsHCRfPPN4LCvMmXKILt37XA7B3XvosVLpH//gW7bevI3T/Vbv0Ej3W+kfz8/adq0iemxaIgAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEB0ChB8jE59xjYUiMjgoxqgdu1aMnzYENsxt1wxS+DJkydStlwFuXjxsqmJEXw0xeS20fPnz+XDfAUNjxRfsniBfPJJIbd9RHSDjZs2S8+efppuVyxfLDly5DA91M5du8XXt4Xp9vYNR4wYKrV9arm999q1a1K4iPdBuNABIiv4qI5ALlqslG5vA3fvkIwZM7hdX0xrcODgIfHxqWs4LU+Cj+qI5/r1vzJ85t2tuXWrFtKjRzeP/4bcvHlTevXuIzt27NIMYSb4qKpctmvXUdQz7emlKkBOnTJJUqVK6fTWy5evSPESpTXfq6DnTz/utB2hzYUAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEBMF7B08HHUqFE2/+7du8f0fWB+dgJGwUcVPHkn5TuGTo8fP5bLly7Lrl27dUe7ht7Qr6+f+PpSySqmPWhbt26Tlq3a6qZVqWJ5adOmlaROnUbu3bsrhw4dltOng6VvX79YWbEuprmr+YweM06mTJmmmVp0Hne7fv0P0qFjZ818Nm1cJzlz5jTFt2fvXmnQoLGpts4azZ41XT77rIzLPo4cOSLVa9QO1zjq5sgKPk6bNkNGjhqjmV+P7l1tv6fYdj18+FA+/6KK02C02eDjjRs3pWy5il6FHkPNvvmmnzT+qpFpwt2BgdKhQ2fDMd0FH//77z9p07a9LjBpenARUeHHBfP9JWHChE5vmz59powYOVrzfbdunaVd2zaeDEVbBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEokXAssFHFXrs0aOHDXXkyJGEH6Pl8fJuUKPg47atmyRbtndddqiqB67/YYN06WIcdFWVrNKnT+/dpLgrUgS6de8pK1cGaPpu2LC+DBo4wOPqapEyQQt3+u+//8qUqdPE33++vJE4keTOnVv69+8rWbJkjpZVhyf4ePv2bSldprxhyCxXrhzyxRefS+ZMmeXO3Tty4MBB2b37R6chuN27tkmmTJmcGmzcuEnatusYbqO+fXtLM9+m4e7HvgN1lHOBgp/q+jx0aL8kT5YsQseKis4GDR4ic+fOczqU2eBj4ya+Ehj4k64fVeGwYYN6kj17NkmUOLFcuXJFdu0KlJ9/3mM45oYf1ro9El0F8ceMHS+zZ89xOm93wUf1uxw9epzh/fXr15U8778vau6//fab/PjTHgkKCjJs27RpY+nfr4/Tedy7d08+yq+v7nrwwC+SIkWKqNhixkAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMBrAUsGH+1Dj6EyhB+9fkai/EZvg4+hE92+fYc0b9FaN+/27dtI1y7ainJRvjgG1AjU8qkrBw8e0nx25PCv8tZbbyH1igmEJ/jYp29/WbRoiU5s0KABUr9eXV2V0JCQEJkydbqMGzdBd4+qDKsqxDq7Zn83R4YMGa75Wh3B7umlKvHFiRPH09tcth87brxMmjRV06bz1x2lY8f2ETpOVHRmpoKnmeDjyZOn5PMvquqmrEKBHTu0l2TJtO8adQz87sAfpWNHfbXGunV8ZNiwIU6X//vvZ6Rjp6/l1Klgl0Sugo8XLlyQUqXL6e4vVKigDBs6RN59N6vuO3VP3boNDCse790TKGnSpHE6n4mTpuh+B/ytjIonnDEQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBMIrYLngo1HoUSHt2bNHChcuHF4v7o8CgfAGH9UUnVVlCz4dJAkSJIiCVTCEGYHChYtpwjp58uSR9eu0FSDN9EOb2C/gbfBRVXss+LH+3T5xwjipUuULlzAzZsyS4SNG6docPvSrLhAX2qj/gIEyf/7CsHvUkcIrlutDl1G9IyrMqar3PXjwQDP0r/v3yDvvvBPV0wnXeGoN5ctXMgzy2XdsJvg4+NuhMmeOv2Y+lSt/Lur5cBU8VZVBfWrXM3g29ksyh+qZz549k2XLlktvv36m1u0q+Gh0BL16Ly5bulASJ07stH9VrbJ+g0a6Y8G7dOkkHdq3c3qf0e9HVZM8fGi/LjBsanE0QgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIIoELBV8JPSofWqO3jwl351YHvbh3hsvqpIVTvWh7X8LpPpA8qXKLR+mzBVFj5u5YSIi+KhGqlO3vuzff0Az6I+BOyVDBo67NrcTkd8q74cFNEGtqlUry4TxYyN/YEaIcQLeBh+Dg4OlcWNfTUhOHZc+eNA3ptbYsFET3dHGSxYvkE8+0R8BrDp0PDa5Zs3qMnrUCFNjRWajAwcPiY9PXc0Qbdq0kh7du0bmsJHSt7MKno6DmQk+Gv0dMHv098CBg8V/3nzNsKsDVsiHH+bVfDZz1mwZNmykoUXhTz+RK1evaAKJroKPQ4YO1x2T7e749dCBjZ4BVSly6ZJFLvfJKGy5fNliKViwQKTsL50igAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCAQEQKWCT4Senz5OMwLCpDJJ15WJDPzoLT/oIE0zlPdTNNIbxNRwUd19K0K0NhfjmEOVSVN/RN6qQpgnlaEVEejPnnyxG0fqirY06dPw9rFixdPU1FLzePHn36WY0ePybnz5+X6teuSPkN6yZkzh7ybNatkz55NMmXKFOn+9gM8evTIFgq7ePGSXLp0SS5euiTPnz2XDBkzSMaMGSRDhgxStEhhSZo0qal5KSflFXqpSn32FepUJbYxo40DRK+99pqpMZw1+u+//5zukdqbX/btkwMHDsm5c+ds9qnTpJYsmTPLBx/kkZIlS0j8+PGdjn/v3n3Zvn27nP3jnM3p7t17kjlzJtveqX3LnTu3JHeoEuesM8dnUj2P9pXprl69Ktu275Bzf5yz7Ye6cuZ4MU6WLFkkd+5ckihRIlNWjmM5PpPOOnn8+LEcOnxYDvx6UK5cvSo3b9yUkGchkiF9ekmbLq1ky5ZNSpYo7rJCnePv5ocfNkiXrj00Q65ds1Jy5sypm4bai7hx44Z9rtaxd+8vsnjJUtmwYZNs37bZ8Ehgo/X4+8+TgYO0xxePHTNCqlc3fh86Vil1V1HP1EZEQKORI0fLtOkzNT2tX7da8uR536velem+ffvkzNk/5NKly6KOU/73339tv/mMGTJIuvTppHjxYqafa7OT2B0YKE2aNNc0L1GimO2Zmjt3nuZzM8FHx3B1rlw5ZOOG9aams3bdeunUqYum7bRpk6VihfKaz1T4tkLFyro+e/fuIc18m0q79h1l8+atYd+7Cj6qRteuXZN163+QOd/NlWLFi8mokdqj1V2943Lm+kDztareeOzoQZfr/e23k/JF5WqaNm1at5QePbqZcqIRAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAAC0SFgieAjoccXj05ohcfQyo7ePFAxIQAZUcHHrVu3SctWbTUMM6ZPlfLly4Z91q17T1m58uXRymZCIo6uEydNkXHjJoR97KwPx7Fq1aphC7SogNHy5StkwoRJbo92bdmymXzdqaPpYJs3z4C65/LlK7Js+XLx95+vOzrXqE9f3yZSr24dW/jO2bVv336pW6+ht1MSMyEnZ52vXLlKunXvZbhHO3bslKHDRsrZs2edzi1N6lTi59dbd3Ty/fv3ZcLEybpAlmNH6pkYN3aUfPZZGZfr//33M1K+wueaNju2b5GsWbPIxYsXZdTosbJ+/QaXfahQ1dixo6VA/vwu2927d892NLL91a+vn6i9dHapIOyChYtk6FBzFQ6bNG4kLVo0k3Tp0um6nD59powYOdqr52HTxnWGgUjVmZqj2eCnan/kyBGpXqO2Zh5+fj2lRfNmurmp4F+u3P/TfD52zEipXv1Lr9YRUTep4G7RoiU07w9vjytWfhs2bJSJkybrjk02mm+jRg2kYYN6kiNHjnAvRz2TJUp+pnvn/PTjTpn3/XyZNWuOZgx37wTlki27tqKxqsC4aJG2iqOzie/aHShNm2pDmOqdrd7djtes2d+F/S7Ub3DSxAmSN++LZ6V1m3YeBR9D+1Z/G1RY/vXXXzdt6+vbQnbu2q1pH3w6yGWg3+iYdPXO++mn3Rx3bVqehggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACUS0Q64OPhB5fPDLeVHl09rBFd/gxooKPy5avkJ49/TTLXLTweylc+NOwzyIi+Ni1a3dZFbAmrE9nwceWrdrI1q3bw9qp8IyfXy/p3buPJhTj7iWgQjVTpkySD/LkcdfUq+9XrQqQrt16enWvCoypCmf21fhCOwoIWK2r6OfJIO5CTq768vf/XgYO+lazR4cO7pMxY8bJ9BmzTE+j89cdpX37trb1nT59WtSeXrx42fT9KgjYp09vp9Ujjx8/LlWr1dT0p4KPp4ODpU2b9qbHUQ1VxbYuXb52OtatW7fk40JFNH26Cj6qqn8NG33l0XpV5+r3MH3aJClSRDtWz15+smzZCo/WFNrYVfDR0w4PHz4iNWpqg4/Owm0qEFy8RGnNEK6OxfZ0Lt62PxEUJFWqaCtUqmdtwIB+HnV55coVad6ilZw6FezRfarxyBHDxMdH++x62onj+1jdHxosHTpsuMfBR3V/2XKVdKHms2dOGb6jHOdrVA10wXx/KVpU+yyr+1R4sE7dBpIt27vSt09v23MfenkbfPTUT7V3DD6aDfMbHeu9bl1ApP2d8WZt3IMAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggIC9QKwOPhJ6fLGVERl6DH04ojP8GFHBR8egoVqb4xG40Rl8VMc637hxQ/bvP+DxW0lV49qxY6tHle3cDaKOgh42bIT4zzNXDc1Zf1WrVpahQwbLG2+8oWkS04KPqkqd49HA7ozU96NHDZeCBQtIqdLlzDTXtXEWqlMNjYKPvXv1kGHDjY//djeBYUMHS926dQybeRJ8VEfvVq9ey21FUlfzcay2GlOCj0uXLZdevfpopr5wgb8uqKkaHDhwUHxq19O03b1rW5QfQe/oPG3aDBk5aozmY3//2VKyRAl3j0jY9yo8Wb/+V6YqvDrrtGnTxtLHr5dXVQKNKvRWqFBOpk2dbDvq3dvgY+cu3WT16rWaKc+ZM0tKlyrp0kYd5169Ri1dCPTHwJ2SIUN6w3vV8e32x9KHNoqq4KOqDpm/wCeaPSxQIL+sWL7E7XNgdMR4t26dpV3bNm7vpQECCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAALRIRBrg4+EHl88LpERegx9EKMr/BgRwceDhw5JrVp1Nb8pVfnq4IFfNEd+Rmfw0egHny1bNilduqSoI1/Vcb0qCHfkyDHDd0PPHt2kdeuWEfbe6D+/EEZMAAAgAElEQVRgoMyfv9CwP2WnQkiZM2eS+PHiy6XLl2Xduh+chqRU26lTJmmqqoUn+Gi2apkzDMeKj0btVCXNihUrSPJkyeXChQuyZ+9ep9UN1XwePHig6UbdX6FCeXk7xdu2Y6n37T9geHy2unffLz8ZhlaNgo9Gc1Xh0tdee01Sp05tm+vu3T8a7oUaK3D3dkmWLJmuG0+Cj6oq6ZKlyw15G3/VULJkySLxE8SXa9euy8oVKw0DkmouKiSYPHlyWz/hCT5GVNhQBdWqVK0hQUFBmrXt3RMoadKk0a137br10qlTF83nJ387JgkTJrTtw3n1z7nzcu7ceUnw2muSPl06SZ8+nXz8cUHDPYioH69j1VnVb9CJI5I4cWJTQ6hga+EizkOSKiT4fp73Jembb8qff/4pP/60x+nR8K7Cts4mc/v2bSldprzmGVbPy7atmyRVqpS227wNPv766wGpXae+7m/Bls0/GO5xaMMRI0bpqsGq99r0aVNMmdo3iqrgozriWlV8tL9UGLV/P22w12gB//zzj+T5IJ/mqy+/rCrjxnp3HL3HSNyAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIICAhwKxMvhI6PHFLkdm6DH0OZpd5lv5MGUuDx+r8DUPb/BRBdYaNGism0S7dm2kW9fOms9jUvBx6tRJUr5cWV21tD179kj/AYMNg0bOAlqe7oBRtTXVhwofDfl2kJQtW0YX1Hvy5Ins2rVbvh0y1DAgOHjwN9KwwcvAkQpzqgqX9lctn3qasJMKWPXq1V03/RQpUsg777zj6bLC2rsLPqpjemvU+FJjrypgzpr9nYwePc7tuOr+6tWraY6VVkffqiCp/RHboR19800/afxVI12/7oKPlSqWl759/SRdunSae+/fvy9Tpk7THQWsGrVu1UJ69tSbmg0+3rx5Uwp9UlQ3V3WUtvpNOVb2VOvesnWbtG3bQXdP+/ZtpGuXF79BFaCzD4/u2hWoq2w5dswIyeNwpHuiRIklY8YMbvfETAOjMG6ZMqXku9kzDW+fMWOWDB8xKuw7VXl18eKFMnLkKNm4aYvLIRs1aiBfNWoo2bNnMzM1j9rUqVtfUznWbJU/NYgKf7Zq3Va2bt2uG1PNuUvnTrrQptrjffv3iwrEGh31vmvnVsmcObPpNXTo+LWsX79B0169DytVrBD2mbfBR7W+Zs1aigoF2l/q3aYCgep3Gy9evLCvVIB1yNDhhh7eHv0cFcHHR48e2UK8Z8+e1awzYNVyyZfvQ1N7Ucunrhw8eCisbaFCBWXpkkWm7qURAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACUS0Q64KPhB5fPiIfL60Voc9L4VQfysTS/aTjzsGy98ZRW9+hn0XoQG468zb4+OzZM9mwYaN06KgNN4YOZ1QlLqYEH2fOmCrlypV1KmMUUlONHcNB3uyTCrZ9VrairmJg0aJFZNzYUZIy5YuKa86ue/fuS4+evQyDQlu3bHQZ9Mr7YQHNuJFVYcxV8LF/Pz9p2rSJ0/WNGTtOJk+e5vR7d/fPnesvgwYP1dxfrtxnMnOGvk9XwUcVZpv//VyXx5tPmDBJxk+YpBlLVaLcvWuHbv5mg49GFfPq1vGRYcOGuHwujh07LtW+rBnWpnPnTlKvbm2nz9P69T/ofrubNq6TnDlzevNYu73n/PkLUrqM/rhyxyO57Tvq06efLFq81G3frhqoo9Jr1qwRrj4cby5cuJimymblyp/LpInjTY2hKrd27KR/Z86dO1tKlXR9VLb67bdt2172/rJPM5YK6E6dOtnU+Oqd3a59J01bo/eAt8FH1bEKBbZr11EXfgwdVP22Hj/+V27fuun0OHdnx5+bWWRUBB+/+WaQzPt+gWY6qoKwqmwZN25cM9MUxwCqCvbu3fuTqXtphAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQFQLxKrgI6HHl49HRFd7dAw42ocfo/rIa6Pg46BBAyRd2rSGv4+/HjyQy5cvy5o1650ev6qqFtavrz36WnUWE4KPvXp2l1attMeTGi106bLl0quX9shSs/e6erGoqoZDh47QNFGBmXVrV7kM2dnfoKojqqpzjsdy+/o2kX59/ZwOH93Bx5o1q8uokcMlTpw4TueojuEt+HFhw+9r164lw4cNcXm/OkL208LFNQFPZ2FEZ8FHFUBasyYg7NhfZ5NVAa8yZcrpwluhxzHb32c2+GgUSJwwYaxUrVLZ7d+rVasC5J9Hj+TLalUlSZIkLttHZfDx7t278mX1mrpqhfny5ZUVy5fqqq6GTrx+/Ua6kJ9bBIMGzZv7Sh+/Xt7cqrtHVV/M/l5uzeee9F+5SnXdUd9+fj2lRfNmpuZ3/foNKVe+ki44ffDAL6Iqtbq6jO61HYm+e7skdziePTzBRzWHx48f2wKeRpUt3S100cLvpXDhT901c/p9ZAcf5y9YKP37D9SN/93sGVKmTGnT81aVLmfPnqNpf+b3k05/D6Y7piECCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAKRIBBrgo+EHrW7H5HVHh1Dj0dvnpLmO/pqBvy1zopIePyMuzQKPoZn8CaNG9mOB7Y/zjS0v+gOPqqjnWfPnmGqItfde/ckf/5CGopatWrYgnveXio0VaxYSV1QbvOm9ZIjRw6Pur148aKULKWvWvlb0FGnAcroDD6qgNW+X34yFe5s2aqNLjCl7t+/72dJmDChWyd1JPCSpcs17YJPB0mCBAk0nzkLPq5csVTy5//I7Tiqwegx42TKFG01yR3bt0jWrFk095sNPh44eEh8fLShYTMVH01N1q5RVAUfVQCucRNfzdHQodNYv2615MnzvtOpOz6vRg0Lf/qJpE6TWk6dOiWnTgU77Wvs2FFS/ctqnjLp2t+5c0cKFNSG8swGF8+cOWsLLdpf6njjRQvnexR2U8esOx4J766ypTqCuk3b9rJ581bN+P7+s6VkCX2lyfAGH9UgJ4KCpEqV6h6Zq+qVEyeO1xxj71EH6rj5Nu0063QWfPa0X9V++/Yd0rxFa92tn39eUaZMnuhRl0YheDMBVo8GoTECCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIRJBArgo/OQo8RZOCyGxXOiGmXUTDR2zmaCT2qvmeX+VY+TJnL22E8ui8ig4/dunWWtm1aO63IF93BR3dHXDvCZX1XG0ZUFeoCVnkfSj1w4KD41K6nGUZVQRw9SlsB0uwGDhw4WPznzdc0nzplolSqVNGwi+gMPrZp00p6dO9qamlG62rTuqX06NHN1P2Tp0yVMWO0Rw//GLhTMmRIr7nfKPioqm9u3bLBZVVJ+06WL18pPXr21vSrjsguVqyo5jOzwUejwK3qqH37NtKmdStJnDixKQN3jaIi+Pj06VPp9HUX2bBhk246HTu0E3Uct7Pr2bNnUrRoCcOjkHPlyiGDBn4jH3yQRxOkVdU+Z836Tnf8eOgYRoFUd06O3xuFF8eOGSnVq3/ptqtp02bIyFFjNO3MVvO0v+nIkSNSvUZtTT/ugncBAaulS9cemntUVV5VndfoCk/wUQW8VSXD4SNGuTUxapAnTx4ZP260ZM+ezav7Iyv4aBRKVhNUoWx1xHWaNGk8mm/A6jXSpUt3zT1bNm+Q997L7lE/NEYAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCAqBGJ88DE6Q49qA2Ji8DGijrk2G3pUDo5tI/PhjIjgY4kSxcSvd0/JmTOny6lGd/DxyOFf5a233jLNWcunrhw8eCisvQrFbdu60fT9jg0XLFwk/fp9o/l44cJ5UqSw8dHO7gYyCu6pgFzXLp0Nb43O4OPCBf5SpEgRd0uyfW9UCc1ZZTqjDtWRz1279dR8tW3rJsmW7V3NZ0Z+HTq0lS6dvzY1T9Voz5490qBhE037WTOnSdmyn2k+Mxt8VDepComBgT/p5qACVvXq1Zayn32mC/2ZnvD/N4zs4KMKv/Xq3UdWrFilm5oK6U2cMM5tlUN1pPv2HTtl5sxZYce6Fy1aRKZMnuDyd6z2Ve3JgwcPNGM3bdpY+vfTHl/vqZtRFUNXYWP7/h0Deeq7YUMHS7LkyT2ahnLp1KmL5h4VFly/LsCwn6tXr0rRYqU036nj3Lds2WgL7Rld3gYf1d/wYcNHyKxZ2iOc7cdQc02W7C05duy4bo9C26l5rV2zSrJkyeyRjWocGcFHV9UrA1Ytk3z58nk8z42bNkvbth00961bFyAf5MnjcV/cgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQGQLEHx0IxwTg48ddw6WvTeOhuvZ8CT0qAaKLcFHFWDq3aunrpKeM6zoDj6e+8P5UbhGc27WvKXs2LEr7KvwBh9VtTdV9c3+OnXyuLz++utePV/q9/JuNm3Y9Msvq8q4saMN+4vO4KMnx3kvWbJUevv106zhh/Vr5P33c5tyMgoUmQ0+jhgxVGr71DI1jmp09Ogx+bK6tn14g4/Xrl2T8hW+cBoKC52cChyXKFFcihcr6vFR6ZEZfFShx4GDvpX58xfqHFXVVHW0c6JEiUwbq+f80OHDcuL4Calbt46p34uzI4mPHT3oNOxnZkKXL1+R4iVKa5p+O3igNGigreRq1Felzyu7PI7bzPjO2qigoFqb46X2okWL1rJz127NV0sWL5BPPinkdEhvg49z5vjL4G+H6vpVYcd+fXtLwYIFNIHX27dvi/+872XyZO1x8aoDFc7csGGdJPcwGBrRwcfg4GCp5VPP8Pc4bdpkqVihvFdbt2jREunTt7/mXqPKtF51zk0IIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAghEsECMDz6q9UZn1cfYEnwMDSaaCUV6GnpUexDdwUejkNigwUNk7tx5mp9EpkwZZNPGH0yHmKIz+KjmunvXDo9+0hEdfFTH/q5duz5sDs7CSp5Msmy5SnL27NmwWwoUyC8rli8x7CI6g4979/woadKkNrU0o+Bj4O4dkjFjBlP3hyf4OGP6VClfvqypcVSjyAg+qn4PHz4ibdu2Nzzq2Why6vhnFQqsWrWKJE+WzO38Iyv4qN7hQ4eNsB117Hip4PCSxfPlnXfecTu/iGjQuUs3Wb16raYro1CqJ2M9evRI3s/zoeaWrzt1kE6dtJX7jPrM+m4OT4byuO3J345JwoQJNfctW75Cevb003zWsmUzW2Dd1eVN8FGFGAt+rK9eq464b9myucsKn+fPX5B69RronneztvZricjg49mzf0j1Gj6GoUezgVdnzhMnTpZx4ydqvv4t6Kjpv6cePyDcgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQDgEYkXwUa3PWfhRHeta2MtjecPhFq23frxUW83NMZToKvzoTegxdLG/1lkRJes2OuraKPj48OFDKVu2gi6Y4uvbRPr11QZrnE08OoOP3lRrjOjgY8NGTeTnn/eE8aiw2sYNL4OQ3my4Y5+qStrevfpjklXf0Rl8/GXvT5I6dSpTS4zO4OPMGVOlXLnoDz4qqL/++ktGjxlnWDnRGaQK006eNN5WCdLVFRnBRxV6HDN2vEyZoq/eF9WhR7V29VtTvw/7a0D/vtKkyVemnkNnjRwDjHXr+MiwYUNc9vn48WPJ/X7ecI3r7mbH0NzFixelZCnts6z2Yd3aVW7Ddd4EHydMmCTjJ0zSTFNVoB07ZpTEiRPH3fTlyJGjtpCh43Xo4D6Pqj5GVPDxwoULUqVqDcPQ46BBA6RRwwZu1+SqQZ8+/WTR4qWaJp5WJQ7XBLgZAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABDwRiTfBRrYnw44uddRd8VG2Mwo/hCT2qPmNa8FHNad++/VK3XkPdI79wgb8UKVLE7U8hIoKPjlXcnFVNbNmqjWzduj1sTjEh+Gh27m4h7RrEloqPBB9vyceFtL8RFRhWwWF3171792THjp2yZu06CQw0DrU69tG7Vw9blT1nV2QEH8eNmyATJ03RDal+e4sXfS8pU6Z0t9QI/f7KlStSrLj2WOrmzX2lj1+vcI3jeGR16VIlZc6cWS77NDqWXt3g5+e68qLZiapgYfNmvmHN1RHXjRo1kb2/7NN0oYKIRU28q6fPmKWpJGv7b4KRwzV9qcqoSZMmDfuscpXqEhQUpGnj6dHiI0aMEjW2/fX9vDlSvHgxsxQSEcFHFRqtU6e+YcXVgQP7y1eN9H8HTU/w/xv6+rbQHEEeEUF4T+dAewQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBMwKxKrgo1oU4UdzoUZlZR9+DG/oMSYedR36kA/+dqjMmeOveeZVlcGNG3+QZMnecvlbiIjgo2OFw9gUfBwzdpxMnqythmd0PK3ZF8qzZ88kW/ZcmuY1qleTMWNGGXZBxcd3NS7Hjx+XqtVqaj6LSRUfjTZRHbV84kSQLdCmjk23P+bcsX3AqmWSL18+w2chooOPkyZPkbFjJ+jGiojQ499//y1vvPGGqaqB9hNQ4b/s7+U2/fsw+7tzDFW7qrJq36djYDJfvrwSsCpyKvv+9NPP0uirpmaX5FW7Hdu3SNasWWz3GlkXKJBfVixf4lHf27ZtlxYt22ju8fRI6fAGHy9fviI+PnUMQ4/fDOgrjRuHr2Jo6OIKFy6mGaNMmVLy3eyZHnnRGAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgagSiHXBRwXzqocfnR1lbRROVG3VNbF0v7Bn6ujNU9J8R1+PnrGYHHx0duR1zZrVZfSoES7X6Rh8VI1PnTwur7/+umkfx7BIbAo+Lly4WPr2G6BZ64L5/lK0qPtqmUZAR44ckeo1amu+6tChrXTp/LWhJ8HH2B98dNzYs2f/kDlz5uqOzFXt8uTJI+vXBRg+CxEZfJw+faaMGDlaN44KPS5a+L2kSuV5pUcVpNu3f78sXbrcFvBcsWKJFMif3/R7QjU0qvjYulUL6dmzu0f9ODbuP2Cg7vhx+xCgs84dA3mqXXiCz64WsX37DmneonW41unuZvs137lzRwoU/FRzS61aNXRVIt31eSIoSKpUqa5p1qZNK+nRvau7W8O+D0/w8erVq1KzZm3D0GNEHJMeOsnz5y9I6TLlNGtq1KiBDBqo/ftgetE0RAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIJIFYmXwUZm8yuFHZ8FH5eIuoOhN6FH12/6DBtI4jzb8EVnPZkBAgHTpqj1uddvWTZItmzYkZj/+r78ekNp16uumNHXqJKlUsYLTqRodY7p71zbJlCmTqeUFBwdLhYqVNW1jU/Dx0KHDUrNWHc38q1atLBPGjzW1fsdGRgGsadMmS8UK5Q37I/hoveBj6EYHBv4ojZs00+170IkjkjhxYt3nERV8nDlrtgwbNlLXvwo9LlwwT1KnTuXxs/3gwQMpX76SJnxWt46PDBs2xKO+jKoeDho0QBo1bOBRP46NjewGD/5GGjbQvxPt750wYZKMnzBJ093ECeOkSpUvwjUfo5ujOvj4+PFjyf1+Xs1UvKlouWXLNmnVuq2mH3fHtjuu39vg47Vr16RO3fpy8eJlHemA/n2kSZPGEbZPRiH4yHoWImzSdIQAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggg8EoLxNrgo9q1VzX86C686Cz86O4+V7+E2WW+lQ9Tao8wjqxfjjfBRzWXIUOHy+zZczTTUiHErVs2Og07LV68RPz69NfcoyrCFS6srRTmbK1Gx2zHpuCjOpq6dJmyumDND+vXyPvva4/kdbfff/xxTj4rqw+Z/hZ0VBIlSmR4O8HH2BF8vHTpsqjfyrTpM2XzpvWSI0cOd4+D7XujIOy6dQHyQZ48uvuNwnsrVyyV/Pk/MjWWajRr9ncydKi+ymumTBlk2dIlXoUeVb/Ofid7ft4tadOmNT2/Tl93sVWLtL/mzp0tpUqWMN2HUcO79+5J/vyFNF+pqq2qequr6/Tp01KxUhVNE1WVc83qFRIvXjzTc1I+PXr2trVv2aKZ4fOxa3egdOzY2XSfjg1V+NTxUu9a+2vd2lWSOXPmsI8cq/GqL44dPSiO97ma1LDhI2TmzO80TaZOmSiVKlU0vRZvgo+uQo/9+/lJ06ZNTI9vpmHjJr4SGPiTpumhg/skefLkZm6nDQIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAlEuEKuDj0rrVQ0/fry0lsuHxTH8GJ7Qoxro1zorouzh9Db4qI68/vyLKroQX7lyn8n0aVMkbty4ujUYVWCrVLG8TJ062e16A1avkS5d9EfUxqbgo1rk3Ln+MmjwUM16VVDsh/VrJUmSJG4dVANVXa2WTz0JCgrStG/Rwlf8evdy2gfBx5gdfLx165YtULx69dqwPVSBuvnfz5U4ceK4fTYmT5kqY8aM17Rbu2al/O9//9Pdu3HTZmnbtoPmc08CZt/NmSvffjtM1696lpcuWSxp0qR2O19XDYwqSRYokN8WLkyYMKHbvjdt3iJt2rTXtFPviv37fjZ1v7sBfH1byM5duzXNDh/6VZIle8vlrZWrVNf9btu1ayPdupoPKS5fvjIs+KgGq1z5c+n8dSd5992s7qZt+vuhw4bLrFnaYPu5P4Jd3t+2bXvZuGmLps2XX1aVsWNGmXp+jxw5KtVr+OjG2PDDWsmd2/z/I4Cnwcdr165Lnbr1DCs99u3bW5r5NjXtZqbh/fv3Jd9HH2ualihRTOb5a73N9EUbBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEokog1gcfFdSrGH6cFxQgk08sdPmchIYfwxt6jMpjrtWCvA0+qnudHXk9csQw8fGpqfNyVqVw4cJ5UqRwYae+e/f+IvUbfGX4fWwLPt6+fVtKlykvjhXVVKhr8qTxkiZNGpfPmbq/S9fuumph6qadO7ZKliwvK7A5dkTwMWYHH2/evCmFPimq2//mzX2lV8/uLqsC/vvvv1K3XgM5cuSY5v6DB36RFClS6Po8fvy4VK2m/Y2WKVNKvps90+3fQ6PwrrrpRehxkdtn2O0AInL58hUpXqK0rmmtWjVk0MABTquaqhv27dsvLVq20f3GOnZoJ507dzIzvNs2juFDdYOrY+ZDO1yzdp18/XVXXf8qYNek8VduKz+uXbdeOnXqort/6tRJUqmivgKs24U4aeBN8NHZcevq2VXPsKuqlhcuXJC6dRtojjdXU1PvxRXLl3i0DE+Cj9ev35AGDRvL2bNndWP06dNLmjfz9WhsM42NjvN29jfTTH+0QQABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAICoELBF8VFCvYvix487BsvfGUZfPiQo/umvj7kGLymqPai7hCT6q+42OvFaf7961TTJlyqRZbkhIiFT7spau4plqNHTIIKld20cTjlHtt2zdpqtMZ99pbAs+qrmrY2ibNm2uexTUWvr36yMVK5bXVX9UVR63bdsuQ4YM04WDVEdmgjMEH2N28FHto1GlQ/W5quqnqgLaHy0c+gCpY3r9/PrpKhBmy5ZNtm3daPjKMao6pxo2bFhfWrZoLilTviMXL16UgIA14uvbRFKmTGnrR4UK69ZraNinChZmzqz9zbt736nv1Tw//DCvrqkzCxWwHDrkW/n000/C3hfq+Oc///xTpk6dLosWLzX8bW3Z/EOEhDJV58q8cBHtkdkqpLd82WKX1Q3VPNu376irjKj6LF2qpHTp+rXkef99XR9qL+bOnSf+8+br1qaejYkTxpmqqmhmP2zvYy8qPqr3dYWKlQ1DhOpI7wH9+9iOUrcPQKpjw/3nzpOJk6YYTm3OdzOldOlSZqdta+dJ8LF+/Uay95d9hv2PGa0/xt3dRFRlVnUst7OqpM+fPxef2vXk4MFDmq727gmMsGfT3Rz5HgEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAW8ELBN8VIt/1cKP4a3kaOaBiepqj2pO4Q0+/vPPP1Lp88q6Y0ILFSooixbO11X5clW9UQWaChYoYAtM3rp9WwIDAw2PH7W3jI3BRzV/Z4HR0LWpMFPatGnk6dMQuXv3rmzfvlNXwc6+rZngE8HHmB98fPTokZQpU84w3Kr2Wx0NnzdvXkmeIrncvXNXjhw9Kps3bzV8vbirQGh07LJRRwsX+EuRIkVsX23fvkOat2ht5nVmuk2a1Klk796fdO2fPHkitevU01WxtG+owobx4sWV/fsPuBzv+3lzpHjxYqbnZKZhnbr1deO6q16r+nVW9TV0zFy5csinn3wib7/ztty9e09Onw6Wn3/eYzgl9f7btXOrYVVPM2tw1sab4KPq6+TJU/L5F1VdDp0vX15JmjSpBJ8Odvqcqw5UCFdV9zRzzLv9gJ4EH8uWq2QY1AyP3YQJY6VqlcqGXezZu1caNGis+U79rVSVUrkQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCIyQKWCj4q6Fct/GjmyGtvH8DoCD2quYY3+Kj6cHbktbOjQh2DKWbNVECqcpXKMnv2nLBbYmvw8enTpzJmzDiZPmOW2eUbtjNz9G/ojQQfY37wUe3VpUuXpWGjr9yGfl09ODWqV5ORI4e7PF74t99OyheVq7l9/iI7+OjsN6wmpioC+vo2dxl+dLeAfn39bFUrI/oyqtxa+NNPZNEifZqVyvMAACAASURBVFVGx7GDg4OlRcvW4dpjFRT3nztHsmbNEtFL86riY+gkdgcGSpMm+oq2nkxSHbs+fdoUSZAggSe32dpGd/Bx/PgxUq1qFcN5G1WYnDNnlq3aJxcCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIxWcBywUeFbRR+HDlypHTv3j0m74XXc4uM8GN0hR4VQkQEH1U/RhXC1OcbN6yTXLlyarxv3bolfn36ydat203vgzoqdfKk8bJz504ZNHho2H2xNfgYuoD163+QDh07m3awbzhwYH9p1LCB6YpoBB9jR/BR7fGNGzelcZOmcupUsMfPRv36dW2V8uyPFHbWyffzF8iAAYNcjhGdwUc1sb///lv69O0va9eu98hCvRumTpkoxYoV9eg+s43VsdW169TXHVu8bOki+fjjgm67UaHOrl26644od3ujiBQtWkQmTRovyZMlM9Pc4zbeVnwMHejYsePSpWsPr6opdu36te249ddee83jeasbYmrw8cCBg7Zjru0vVbFUPS9x48b1aq3chAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQFQJWDL4qPDsw49WDj2GPigRGX6MztCjWs/GjZukbbuOmt/Avl9+llSpUnr0u3B25PWXX1aVcWNH6/pSoaFVq1ZL9x69XI6jwkutWjWXZr5NJWHChBIQsNoWqAm9nAUfO33dRROU8uY40Z69/GTZshVhY5mt5uYRnIhcu3ZdVq9eI1OnzXB6nLV9n+3btxGfWjVtR4J7cpUsVUZTYc7Z3njSp1HbpcuWS69efTRfHTt6UNRembmWLFkqvf36aZoG7t4hGTNmMHO7GB0nu23rJsmWTRt8VFX3KlTUHkm7ZPEC+eSTQqbGUY3On78gpcuU07Q3Cr7du3dPPsqv7dddJUJ11PPWrdtk0uQppgKQKkTVq2d3KViwgOn5q4bKQYWJnR2nvGb1Ssmb93+2PlWbho0itnqiq4qP9gvZu/cXGTlqtNvqj6q/xo0bSoP69SVNmtQeWXjaeN++/VK3XkPNbSqUqI7WNhNmCwkJkd27A2XO3HlO/e07V303a9ZUShQvZirY6ul6QtuPHTdeJk2aGna7qi65e9cOj7pTx7YvWLhIFixYaKqypapS2qyZr7z/fm6PxnFs3LlLN1m9em3Yx67e/dVr1HL7PHk6GaOKj+rv3VeNfXV7vGjh91K48KeeDkF7BBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEolzAssFHJanCj+qyaqVHx6clvOHHwqk+lGYf+MiHKXNF+YMYkwZUgbAzZ87KuXPn5fczv8uDvx7I05AQyZgxo+TOlVOKFy9mCzy+Cte///5rOzb8woWLcuXKFbl0+bKEPH0qGTNllPTp00vGDBlsobzEiRO/Chys8f8FVDjuyNGjcuH8Bbl85apcvHhR/vrrL3nrrbckderUku3dLPLpp59K2rRpw2Wmgmrq2VMVFlWQWfWfI8d7kihRonD1G9E3q/WrAOb5Cxfk6tU/RQVE06VLJ+nTp5OsWbJIkSKFo/Sd0biJrwQG/qRZ5sgRw8THp6ZHS//99zNy4sQJuXL1qu24c3XFjx/f9rtPmy6t7X2YI0cOj/qMCY1V6E9VgDx67Jhcv3bdtr7EiRJJ/AQJJF3atJI5cyZbBUv1vFn1Wr58pfTo2VuzPLXmBfP9rbpk1oUAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggYDEBSwcfLbZXppfjTQAyuqs8ml4cDRFAAAEEXAocOXJUqtfw0bXZsX2LZM2aBb1XXMCoKqwiCVi1TPLly/eK67B8BBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIHYIkDwMbbslBfzPHrzlBy5cVIO3jhhu3vvjaO2/1WVHUMvKjx6AcstCCCAQAwX6D9goMyfv1Azy3z58sqypYslQYIEMXz2TC+yBFQ10tp16umO027UqIEMGjggsoalXwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBCJcgOBjhJPSIQIIIIAAAtEroI4Jr1HTR06dCtZMpEuXTtKhfbvonRyjR5vA5ClTZcyY8Zrxs2XLJuvWropxR8hHGxIDI4AAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgjECgGCj7Fim5gkAggggAACngkEBwdLhYqVNTd17NBOOnfu5FlHtLaMwMSJk2Xc+Ima9WzetF5y5MhhmTWyEAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBV0OA4OOrsc+sEgEEEEDgFRRYtnyF9OzpJ2+++aZMmDBWSpcq+QoqsGR7gZ27dkunTl3kwYMHMnz4EKlT2wcgBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEYp0AwcdYt2VMGAEEEEAAAXMCz58/l61bt8tHH30oKVOmNHcTrSwvcPPmTVEBSJ9aNSVOnDiWXy8LRAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCwngDBR+vtKStCAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwLICBB8tu7UsDAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAHrCRB8tN6esiIEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEELCtA8NGyW8vCEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEELCeAMFH6+0pK0IAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAsgIEHy27tSwMAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAesJEHy03p6yIgQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQsK0Dw0bJby8IQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQsJ4AwUfr7SkrQgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCyAgQfLbu1LAwBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAAB6wkQfLTenrIiBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBCwrQPDRslvLwhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCwnkCcO3fuPLfeslgRAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAghYUYDgoxV3lTUhgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggYFEBjrq26MayLAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSsKEDw0Yq7ypoQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQsKgAwUeLbizLQgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCKAgQfrbirrAkBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABiwoQfLToxrIsBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBKwoQPDRirvKmhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCwqADBR4tuLMtCAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwIoCBB+tuKusCQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAGLChB8tOjGsiwEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEErChA8NGKu8qaEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEELCoAMFHi24sy0IAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAigIEH624q6wJAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAYsKEHy06MayLAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSsKEDw0Yq7ypoQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQsKgAwUeLbizLQgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCKAgQfrbirrAkBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABiwoQfLToxrIsBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBKwoQPDRirvKmhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCwqADBR4tuLMtCAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwIoCBB+tuKusCQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAGLChB8tOjGsiwEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEErChA8NGKu8qaEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEELCoAMFHi24sy0IAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAigIEH624q6wJAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAYsKEHy06MayLAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSsKEDw0Yq7ypoQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQsKgAwUeLbizLQgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCKAgQfrbirrAkBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABiwoQfLToxrIsBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBKwoQPDRirvKmhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCwqADBR4tuLMtCAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwIoCBB+tuKusCQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAGLChB8tOjGsiwEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEErChA8NGKu8qaEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEELCoAMFHi24sy0IAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAigIEH624q6wJAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAYsKEHy06MayLAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSsKEDw0Yq7ypoQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQsKgAwUeLbizLQgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCKAgQfrbirrAkBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABiwoQfLToxrIsBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBKwoQPDRirvKmhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCwqADBR4tuLMtCAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwIoCBB+tuKusCQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAGLChB8tOjGsiwEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEErChA8NGKu8qaEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEELCoAMFHi24sy0IAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAigIEH624q6wJAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAYsKEHy06MayLAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSsKEDw0Yq7ypoQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQsKgAwUeLbizLQgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCKAgQfrbirrAkBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABiwoQfLToxrIsBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBKwoQPDRirvKmhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCwqADBR4tuLMtCAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwIoCBB+tuKusCQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAGLChB8tOjGsiwEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEErChA8NGKu8qaEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEELCoAMFHi24sy0IAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAigIEH624q6wJAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAYsKEHy06MayLAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSsKPBKBx9DnonsPvmv/Pz7f3LuVoicvx0iceOKZHgrnryXOp6U/9/rkj/LaxIvrnW2/vKdEPn17H9hC8qSMr58lCWBdRYYS1Zy8NARefT4keFsEyRIICmSJ5fkyZJJ8uTJJZ6VHkARORH0m6xZt0Hu//WXfF6hnJQqWTzM4ebNW3L699/D/j11qtTyXvZ3Y8muxvxpnjn7h1y7ft1wonEljqRPn04yZMjg8pn789o12/6dPHlaShQvIp9XLC+JEiWKUYt39xy5egZj1EJcTObx48fy+5k/JPj33+XkqWB5+M8/kiZ1KsmQPp18VrqkJEuWLLYshXkigAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIeC7ySwUcVeJyx46FM2vlQ/vr3uUu0tG/GlXnNkkueDPE9xo2JNwQceCTtl/wVNjWffAllfMO3YuJULT2nDl93lwd//+12jSoEWaJYESldqoQt0BTbr4cPH0q7Tt00y/Dr2VVyvJfd9tnPe/bKrDnfh33/aaGPpXVL39i+7Bgz/5mz/WXPL/vczidrlszyRaUKUrDAR7q2vfsNlD//vBb2ebUqX0j1apXd9hmVDVw9R+6ewaicp7djXbx0WYaOGCMq/Ojsqvx5BalV40tvh+A+BBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgRgu8csHHa/dCpPHse3Li2lOPNsavQhJp/dkbsb76I8FHj7Y90hqbDT7aT0BV1qtZvVqkVYDcuftHCfrtZNiQ1atVkfTp0kaowY8/75Hv5s7X9KkqPjZpVN/2GcHHCOXWdWY2+Bh6Y4N6PlLuszJh/dy+fUe69uyj6fftFClkzMghkTtxD3t39Ry5ewY9HCrKmwf9dkrGTpgsISEhbseuWL6s1K1d0207GiCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAArFN4JUKPqrQY7ERt+XRE9dVHp1tYrsSicWv6puxbY818yX4GDO2z5vgo5p55v9j7z7ArCru/wF/6U2ko6iABUVNrFiJvfea2I0klqixd+y9995/+jfR2Bv2XhCxxAaKFTsgShEB6f9nDtnL3t2F3YUFzrLveZ488d4zZ86cd84dbvnsTNcucVrvE6JhwwY1fiHX33hrvPXOu4V6D9q/V/Rcd+0aPc/PP/8Sx510alGd/zzkwFizx+rZc4KPNcpdrrLqBh9TBTvtuF3stP22WV3Tpk2LI44+oWi20o3W/1P02m+fudvwatY+q/uosnuwmqeap8UnTJgQhx5xbJVCjyUNO+PUkyLN4GkjQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECCxIAnUm+DhpyrTY8tIR8enw4pkeG9eP2K1H09h+1aaxeJuGMWLs1Bjw/aR4+dOJ8eygCYW+/uOiDeOhw9tGiyb1anX/Cz7mo/vKBh9XXGH56Nx5iZQsi+E//xLf//BDpNn1KprVbcP1/xR/mwtBs3kRfEz6jzz+RDze56ns2tZao0f848C/RYMG04Ocgo9z9/4sG3xs375d9Fh9+nLWEydMiP5vvxPjxo0v14irL784Fl54euj7/fc/jNvuuCsLP6ZA3T8PPjBSPXnaKruPZnUP5uk6yrblzf5vx4233F709Oabbhzr9VwnmjZtGu/89/24/8GHi/aXnlE1z9embQQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgACB6gjUmeDjxX3GxFUvjyuyadaoXjx/TLtYskPFs+c999HvcdBdo+MPnRrF/f9sE80aVz/0OHVaRP1KDhs/cdps1V22oydOnhaNG876ZJUFHydPmRYN6teLetW/1Orcd3W+bNng48knHhvLLdutyGXipEnxyKN94smnny3nddzRh8cf/7BipY6pjsaNGlVaLhWYk+BjSUCzJMBY2QknT56cBR+bNGlSVLSywFpl9do/a4Gywccdt982dt5xu8JBaUbH9z/4MK669saiivbbd6/YeMP1i8qNGz8+WjRvXi3yyZOnzNFspVOnTs1mnazsPqvKfTSze7CyC0r3bf369aPebAySVW3/zNrw8COPx6N9nizsXnTRReLCc88sKv7CCy/HXffcGyv9ccXYZqstYoXlu1d2SUX9mtpYme+sKqzOmFNRPdUdS6p8cQoSIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECC5RAnQg+pkBg95N/iolTZ/RdZaHHkpIjfpsazZrUi1S+ou2Kp36LH0ZNyXa1aV4/TtmxZbwwYEI8+M74ePbTidmy2ku1bRCvnNg+/jepXaQltx959/foP3hSvDl4Yvw6YVqkmSe7tm0QPbo0iiO2WCi6ti8fxvxX33Hx/reTCs24aLdW8fbgiXF3v3Hx4mcTY+T46fWsvHijOHSj5rH5yk3LhS7LBh97rd0sTtxuobjuubHxwqcT45Nh02fE7N6hYeyyetM4ZNMW0aD+AnXP5+JiqhJ8LGnoN998G2ecc0FRu1OY6cTjjip3LSlY9mb//vHMcy/Gj0OGFmaMbLXwwrHOOmvFFptuHO3atS0cN3bs2Lj73vuzxx9+OLBoCeM0m99iiy2a7WvdqlX8Zdedi873zrvvxcCPP4lBn30eQ4YMzfY1b94sFuvUKf7Uc51IM1OmgFjZLc1IN2r06MLT++61RzZbXdqqEljLRQfW0kZUFnwsuaxTTj87fvhxSOEqt9xis9hzt12zJGHFXQAAIABJREFUx2+88WYMHDSosG+zTTYuLKX88SeDom+/Nwv7ttlqy2jYoEE8/ezz8dHAjyMtM522NEPk5ptsHBtuuF40LRN+LUs7ZOjQLPz7/vsfFe7PRo0aRdcunWPLLTaNHqutWu4+q+w+mtU9+M2338Wzz79QaMaG668XnZdYPB55/Mn48KMBhXs9hQ7TUvDbbb1lhfd5SQWz0/6Z3V6PPfFUPPTwY4XdLRdaKK66/KJy5x8zZky0bDl9hs6y25133R0TJ03Mnl6kY8fYYbtt4uVXXou33vlvfPrZ59mYkV6PaRba7bbZMpZeaslZ3u2//TY2nn/x5Xjt9TfilxEjsrIpONmhQ/vYZKP1Y4P1/lR4fVdUUQqypnHkmedeiM+/+Cp+//33rFhJH2++2caxxuqrzVEYs5a+XDWbAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQmIVAnQg+PvT2+Dj83l+LGF45rl10W7ThHN8cfzztpyxwmLYUjjxr+5ZxwkPF50r7vr6oYzRqUC/e+Gxi7H3ryKIQZkWNOH7zFnHUlgsV7drl6hHRv1Tw8YKdWkbvR8bM9Br2WbNpXLR7q6L9ZYOPe6/RNF79fFJ8N3p6eLPslgKQTxzdtkZmpJxj7AWoguoEH9Nl3/fAw+VmfkyBpxRoLNkmTpwY55x/cXz3/Q8zlUqBpDNP650FudKWltM+9sRTKpVNAatrrrwkK5fOc8PNt8V77384y+PatG4daSbLFIAqvR182NGFcFN6vvR1VBZYq7ShCsxSoKrBx4svuypSiLFk23ijDWK/ffbMHl5z/U3x7n/fL+w7+MC/xzprr5k9TktIp1lKS7Y0o2QKLU6aNCOwXbqBKTx41mm9y838WVImnSedb1ZbOvc/Dvhb0QyMld1H1bkHU/iv7xv9Y+SoURU2o0vnznH6KSdWOJPl7LZ/Ztf7wYcD4oqrryva3XOdtWPvvXar8uybvQ44pHB8x44dotsyS8cb/frPlPgfB/491v1f/5YtNHTYT3HamefOtH9T+RRyPeeMU6JZs2YVnuOOO/8VL7/Wd5Z9nO6T3scfHa1aFf975uVOgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQI1F2BOhF8PO6e0XHPu9NnkUrbn5ZqHPf9s02N9Hrp4OOsKkzBx3v6jZ9lULHs8fce2DrW6z5jKeCywceqXMDRm7SI47aZEaAsG3ycnTqqcowysxaobvAxLR970CFHFFV69BH/jFVW/mP2XJql7YKLL48vvvyqUvo0k9p5Z50WKfRU3eDj6F9/jbPPvagws1tlJ1t8sU5x7lmnFYXSqhM6W2etNePgg/5e2Wnsr6JAVYKPo0aNjqOOO6moxj13+3M2u2LaqhN8rEqz0oyCp5x0XLkZ/QYM/DguveKaqlQRm22yUeyz1+6FsjUZfKxKA3b/yy6x9ZabFxWdk/bP7JwpdHzsCacUzcxaUnatNdeIDdbvGSsu332WM1CWDj5W5dpSmWOPOjxbOrv0NmLkyDj5tLOLQswzqy8FrU875cRo3KhRUZE0++RLr7xWpWak8PVll5xfro4qHawQAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgsMAJ1Ing45+vGRH9vpkx49h5O7SMXhs0L9eZQ0ZNiQ9LlZtZb7dqUT/W6dY42z2r4GNa4nromKnZctcp+Nj/i4mx722jCrM9rr5Eo9h+5SbRunn9eHrAhHjpswlFM0F2alk/3jmjQ6EZswo+dm3TIIaMnlLhTJJX/Hnh2G2d6bNtzSr4mNo7eETFMz9+c1HHaNig4uW+F7hXxTy4oOoGH1OTjj6ud9Gsc3/de4/YZOMNs9ZeduU18dGAj4tavtyy3WKdtdbIQoqvvvZGUVgqLUl95WUXxbixY+O0M8/Ljhs3fnxhaez0OAUkS5YhXnLJLln4acKECXH9TbdGmnkubWkGyfXX6xnLLbN0fP/jkOj35lvlZsYrO2Oc4OM8uMFmcoqywccUlktLkqdt8uTJMXTYsHj08Sdi3LjxRTWcf/bpsdhinbLnZjf4mO657D4rU3d6bo0eq8VhhxxUOOeXXw3OZi8tvaXj11/vT9GxQ/tIocKyM47uu/cesen/Xg9zK/iYZi8sWa67dNvSa+WWG66usfbP6g75/Isv48JLrih6rZZ12mmHbSMt0d2kgmXEZxV8nNn1pfrPPfPUWOJ/M8Wm5a17n3pm0ZiSxoI1e6weyy+/XHz9zbfZ0tcpkF2yrbrKynHU4TNmm6xoFttUtl3bttm9mELWZbftt90qdt15x/n3AnJmAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAIDcCdSL4uMZZw2PImKkF9Dv2axWbr9S0XCc8/Pb4OKzMktgV9VSbZvViwDkds10VBR//ulbTOGfXhQtBwW9+nhJd2zfIyn8/Ykr0um1U/GX1pvGPTVsUVf/F0Mmx4aW/FD93fsfCMtMVBR83WbZxXLNvqyw8OXVaxGuDJsRetxUvydqxRf1476zpAcqKgo9p/yOHtY2uHRrEb79PjaueGRvXvzauqB19T2wfS3aYfg22OReYneBj2XDjtttsEX/ZZef47PMv4vyLLitqVFpiOgUfS7axY8fGORdcEkOHDis8d8g/9o+111yj8Pj6G2+Nt955t/D4oP17Rc91167wYp946tl4/Y1+ceyRh2VL2ZbeytazxWabxF57/KVQRPBxzu+f2a2hbPCxKvWkmf5S6LVkq27wMc3Ud8pJx8aiiy6aVTF06NA478LLys1aeMUlF0SbNq2zMmede2EM/vqbwjnTcsxpVsh69WaEr9P9d+vt/69QJi37npZNT1tNBx9TGC+9ptq1a5sFN+978OF4ucxMhddddWm0aDF9TJ/T9lfWLykUeMf/+/csl5tPHiced1QhsFpSZ0XBx5123C6223rLaNiwYRY67PPUM0VLlqdj01iQxoS0PfxonywgW7Kl4OeVl11YtNz2N99+F2efd1FR+PGqyy7MlqtOhocecUzRZabZYdMstiXjyahRo+KMsy8oBCCX775s7Lzj9tF9uWUr47GfAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQqAMCdSL4uN75PxfNZHj9ngvHjj2mzz5Weqtq8DEd88Oli2SHlg0+rtKpYTx5bHEQrKL76NfxU2PAd5Pjkx8nR5sW9aJ7p0ax9CINYo/rR8Y7382YnfLpI9vGSp2nLw9aNviYZox8+PA25WZi7PvpxNjtlpFFp33x2HbRvVPDCoOPTx7eNlbpOmMJ0hR+7H7q8KLj796/dWy4woxlt+vAa2OuXuLsBB/PveCSoqWsUwhox+23iZtvuyPe6Ne/0N60rOwBf/trufZ/MGBgPPTwY4Xn02x/h/5j/8Lj6gQf00HTpk2LIUOHxeDBg+PXMb/FYp06RefOi8fw4T9ny26XbCmAmUJjJZvg41y9tWZZeXWDj2kp9cMPPTgaNpwReq5O8DEF4i4+/+xCoLGkcSNHjooTTj49Jk2aMdbtufufY8vNN4207+jjexddR5oNskOZgG0qcMElVxQttXzZRedl4cSaDj6efcYp0aXzEoU2jR49Oo48tng58DNOPSmWWrJrjbS/qndImvnyjrvurnAWypI6Ljz3jELoND1XNvi41RabxR677VrulPfe/1A89cxzhefTjI633XRt9rjs+JXqWHftNcvVcf+Dj8SAjz8pPH/A3/aN9f7UM155rW/8353/Kqo7BSdTSLb0lmbXfOW117PZK8sGrKtqpBwBAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECCwYArUieDj324dFc8OmlDowWM2aRHHblMcsEg7Z7UMdOnuLz2DYtng4637tIqtVy0/m2TJ8SPHTo39bxsV/b+dEfiZ1a1134Ft4k/dpy+rXTb4eO0eC8fOa5QPcKayq50xPH4aW36Wy7LXmJa3fv3k9uWasPv1I+P1ryYWnq/suhbMl8fcu6rZCT6WDQwe9Pf9omfPdeKU08+OH34cUu3GppBWCmuVbNUJPqZg2e13/numy+2Wbkyaye28s08vPCX4WO2uqrEDqhN83GjD9WO/ffYsmmUxNaQ6wcd11l4zDj7w7xW2v2xbNk7n23evLCh36eUzlo2uzsWXzHRak8HHjh07ZOHNsttJp55ZNIPqSccfHct3X65G2l+da05lv/v+h3j62eei7xszAtAldZR9nZcNPl5+yfnRtk2bcqesKIB63dWXRZPGTeKAgw+rbhOz8jtuv23svON28Z/7Hoynn32+UEdaojwtVW4jQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECFRVoE4EH895ZEzc+PqMpZvTUtUfnt0x6s9YNTXzmjh5Wgz6cXI5u+cGTIjLXxxbeD4tL33XP6YHRcoGH186tl0s16lhhf5pKeudrhsRI8dPq2r/xKyCj7NafvqQO0bFYwNmhD0v2rll7POn5uXCnVuv0CRu3X/68rKlt3/836joM3DG8YKPVe6yKhWsbvAxLQ980qlnFdXd+4RjsmVfywYJq9SAiCgb6Kpq8PFfd98bz7/4clVPE4KPVaaa6wXLhg3TLHpLL7nk9PPWqxddOi+e3VMpLJeWPa5oq07wMQUZU6Cxou2lV16LO++6u7ArzS6Zljp+9fU34vY77poti6MOOyRWXXXlGp3xcbVVV44jDzukXHvSMs5fDf668HxJ8LEm2j9bFx8Rv/wyIq669sb49rvviqq48tILonXr6eN86eBjmpHzlhtmHjI98JAjimblPP+cM6Jxo0Zx3EmnzlYTN9tko9hnr93j2htujnfefa9Qx7577R6bbrLRbNXpIAIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQKBuCtSJ4OMH30yKba4ZUdTDZ267UBy4cYsq9Xqvm0fFc5/NCAEetkHz6L1Dy+zYssHHd05tH51az1gWtvQJDr1jVDxaKoyY9nXv0DBW69IwW/L6gyHlQ5ezCj7ef1Cb6Lnc9Nkgy27bXPZLUX3X7bFw7LRGs3LBx7+s2jSu3KdVueMFH6t0a8x2oeoEH9OS0meee2F88823hfOlwNKN114RafnZo4/rHSNHjSrsW3TRRWKhFpXf2ynctveeuxWOq0rw8aefhmdLFJfeUltW6L5cNGjYID797PMYN2580X7Bx9m+TWr8wLLBx5IZ+KpzouoEH7fZaovY7c87V1j9/Q8+HE889WxhX8911o6DDugVb7/z37juxlsKz6d7PN2rVdlS0DIt9V6TMz6us9aacfBB5WetPOvcC2Pw198UmlUSfKyJ9s/sWtNY8MmgT+PZ51+KocOGxflnnx7169cvKj5+/Pg47Kjji2ZjPeHYI2PFFZbPypWd8fHm66+Kxo3L/zsyceLEOOjQI4vqvuKSC6Jho4bZUtelt27LLF2V7olNNlw/m6X2jjv/FS+/1rdwzOzch1U6oUIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIDAAitQJ4KPqfc2uvDn+PznKUUdWZXw44NvjY8j7vu16Lh/7986NlqhSfZcVYOPU6ZGdDtpWEycsfp03Llfq9hspRnLYk+ZErH+hT/HNyNntHNWwcdD128ep+w4PYBZehszfmosf9rwoucePqRNrLVMY8HHnLyUqxp8nDx5ctx7/0Px3AsvFbW8ZOa09OT5F10Wn33+RWH/Qfv3ip7rrl3hlY4dOzZazCQUWTb4WNFsfc88+0Lcc98DhbrTjIGXXHBO0XLI/fq/HTfdcnuhjOBjTm66iJjXwccUwr3w3DMrBCi7VPT2224Vu+68YxYmTKHCkq3lQgvFNVdeUmEdEyZMiAYNGkbDhsVh8/kZfKyJ9ld0sT/+OCTOvfCSomDxzGbUPPKYE2P0rzP+3TrkH/vH2muukVVbNviYZtlMs22W3T74cEBccfV1RU/fdtN10aBB/XJ1lJ5RsvQBKaj5+++/R7NmzYrq6fPkM/HAQ4/MdIwoXTh5Ltm1S7kl1/PzqtISAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAYH4I1Jng44sDJ8S+/zdjVrwS7GM3bRGHbNYimjUqXvd61LipkZbI/s9/fy/ql7W7NIqHjmhbeK6qwcevh0+JP130c1Fd31zUMRo2mHHewT9NifUuLi4zq+Bjquy2fVvFVqvMCE+Onzgtdrp6RAwYWjx75Bfnd4xmjesJPs6PV1kF55xV8DGFHYf9NDyb4fGeex+IMb/9VlRDmgHvkgvPibZtpi+3XjaM2Lx5s7jo/LMjBcZKby+/8lrccdfdsdIfV4w/77JTdO3SuWh/2VBcKnfsUYdnZSZOmpQtcXvVtTfEe+9/WDiuJKxWuqLrb7ot3nr7ncJTgo85uenmQ/AxXXla6joF9Epvd/7rnnjp5VeLniuZMXHKlCnZ8u2TJk0q7E/363bbbFlUfuy4cXHG2efHb7+NjZ133C423miD7B5N2/wMPtZE+yu6Y9Jr8J9HHFvkkmZbPe+s07Jl60u29z/8KK68+vqiKk4/5cRYeqnpS5qXDT6mOs454+RYdNFFC8cMHTosTjvrvKJzdencOc4+4+SszMWXXRUffzKoUH757svGiccdXRROTKHHG26+PRsLNt14w9hhu62jVavpswt/+933cfpZ5xW1cc/d/xxbbr5p0XMPP9onHn38iWws23H7bWL99XpGkybT/+jARoAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECNRtgToTfEzdfMFjY+LaV8eV6/HG9SM2X6FJLNWuQYyZMC0++mFyDPhxUtHsjCUHvXFi++jaYcbsYlUNPk6cPC2WOumnonNfsFPL2Ltn82hQP+Krn6bEPreMLJrtMRWuLPiYymz3hybRs1vj+OnXqXHvO+NjyJhS00pGxK6rNImr922dnfvhd8bHYf+ZMROYpa7nzwBQNviYwowlS9aWDnxV1LqjDjskVl115cKuNIvjP488rqho06ZN4y+77pSFnYYN+yk+/GhgvPFm/6Iyf++1b2ywXs/Cc32efDoeeOjRojIptJhmdRww8JO47aZr474HHo4nn56xPHGb1q3jjFNPjNatW8fUqVPjsSeeikce7VOujvPOnrE8dgq1pVngSrarLr8oWi28cPawssDa/OmtBees83rGxxK5rl27xJo9Vssevv3ue0XLtqfnUlj3uqsuKwTn/nPfg/H0s88Xwa++2qqxycYbRJPGjeOTTz+Ll156tWiJ99J1VHYf1cQ9OLOlrlOj57T9M7vjHn7k8Xi0z5PldvdYfdVo2bJlfPDBR0UmqWAaW66/+rJCYLBs8DGVSeHHdddeM1I/pcB1mrW17Di0/9/2jfX/NH28GDDw47j0imuK2pFm99x15x2ifbt2MXjw1/HGm2/FF19+VVTm/HPOiMU6TQ9Y9j7trBgyZGjR/j+uuEKs0WO1mDhpYrz/wYCicGUqmNp31mm9F5wXpCshQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIEZlugTgUf03LTh945KvoMnDBbYJftsnDs0bN4yc6qBh/TCTe56Jf4dHjxTIwpdNmiSb0YOX5ahW2qSvBxVhfTtU2DeOaYttGyWf2smODjbHV9jR9UNvhY1RPMbGnbTz/9LC645IqqVpMFDS++4Oyi2dM+/eyLuODiy2Zaxx233hAfDhgYl195bbkyKXQ2YcLESLPdld3M+FjlbpnrBedX8HFWF5aCeWlGwtIzkKb76LIrry0XfJtVPb323Ss22nD9rMj8Dj7Oaftndp0pMHzqmefGzz//UuV75S+77hzbbr1FoXxFwcfKKltrzTXikIP+XjSjY9nlqiurI4UzDz/0H4Vio0aNitPOPK/cjLazqueMU0+KpZbsWtmp7CdAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgTqgECdCj6W9OddfcfFSQ+PqXL3pvDg//29dXTv1LDcMdUJPn764+TY5PJZB1ZSEHJiqQkbZxV87Niifvw0tnh2x9INTPufO65ttG85Y4ZKwccqd/tcLVjd4GOaTe2wgw+MJZZYfKbt+mTQp3HRpVdW2u60bOzJJx0bnUotbVtyUNmlrEtXloKPabvi6uvigw8HzPQ8KchWOgAp+Fhpl8yzAvM6+JjutbJLtZe92N4nHBPdl1u2nEG6h9Ksgum+rmzbaovNYo/ddi0Um9/Bx9SQOWn/rK43Le192pnnlpvZsaJj9t1z99h0042KdpUOPpbMNDurWWZXWL57HHf0EdEgTU1cZnusz5Px0COPV9Y90W2ZpeP4Y44ot0z1iJEj49Qzzolx48ZXWsdB+/eKnuuuXWk5BQgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAIG6IVAng4+pa78fMSXuen1c3NZvfIyfVPFsiynwuMtqTeOoLVtEwwb1KrwjVjtjeFH48P0z2keHUkHDsge9/dXEOPzfv8Z3o4tnxkuBx1v/2joeeGd8PDZgxoyUjxzaJtZcunFWzS5Xj4j+304qVPnoP9vEjS+Oi6c+KT+D5bpdG8WVe7eKJdrOCD2mAx/97/g49O4ZS13vs2bTuGj3VuWu7Yi7RsWDH8yo99Z9WsXWqzatG6+KeXCVlQUfU2CsTZs2seyyy8RGG6wXnWcReCzd3E8/+zzuuOvuckvIpjIp5LTuOmvGXrvvli0tXNE2cdKkePSxPvH0sy8UhRfTcteXXnhudsiECRPivgcfiRdefLlcFSuusHzs/ued44xzLijsK7s8bdllhq+76tJo0aJFVv6Nfv3j5tvuKBy7/nrrxv69/joPeqRunOKW2++Ivm/MWPL8z7vsFNtts2W1Lv76G2+Nt955t3DM4f/8R/RYbdXs8SOPP1G01HmabXDS5Enllj9PZdMy6fv//a+Rljee2ZbCg3f/5/54+dXXK5xNNN2Xe+/xl1ht1VWKqqjsPqqJe7DsUten9j4+C/iV3ma3/ZV1yMSJE+O5F16ORx7rU25J6nRscjnkoP1jmaWXKldV2Rkf0+v6wkuvqHAWyT/1XDv+9td9omHD8qH/kopfeuW1uP/BhysMLzZt2jS7v7bdesui2SJLN2rEiJFx+x13xYCPP6nwspfttkzstcdfzPRY2U1hPwECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQqGMCdTb4WNLPaWXer4ZPjh9GTolho6ZG08YRKyzWKJbu2GCmYcc5vUfSOQcNmRyfDZ0UMS1ixcUbRbdFG0YFE2oVnaps8PHBg9vEOt0ax7gJ0+K9ryfFFz9NjsVa148VFm9ULvA4p212fO0SGDp0WPwwZEj8+uuYaNiwQbRv1y6WWmrJaNqkSZUuZNq0aZECSRMmTsxCVI0bNSp33KhRo+Pb77+PEb+MiNZtWke3pZeOhRaaHmC01U2BssHHFHpL4coUAPzm2+9i8FdfR7PmzSLNApoCsVXdUtDvq8Ffx88//xyTJk+JhVu2jE6LLhKLLdapqlXM13Jzq/0pqDxkyNDsf79PmJDN4rpk187lZlYsffFlg4//d8v1WShx9OjRkZa7HzNmTLRv3z4LTVb19ZzGi6+/+TaGDvspxo8bF82aN4+OHdpnYcX69cvPFFlRZ4z+9dfsHknjTtpSMLZz58WjbZs287XvnJwAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBDIp0CdDz7ms1sqbtXMgo+16Rq0lQCBBVdgZsHHBfeKa9+VzSz4WPuuRIsJECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgACBuiwg+FiLel/wsRZ1lqYSqIMCgo/573TBx/z3kRYSIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAEClQsIPlZulJsSgo+56QoNIUCgAgHBx/zfFoKP+e8jLSRAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQqFxB8rNwoNyUEH3PTFRpCgEAFAoKP+b8tBB/z30daSIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECFQuIPhYuVFuSpQNPj55eNtYpWuj3LRPQwgQqNsCZYOPu+y8Q+yw7dZ1GyVnVy/4mLMO0RwCBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIEBgtgQEH2eLbf4cNHHytJgydca5mzWuN38a4qwECBCoQGDKlCkxefLkwp7GjRtHvXrGqTzdLBMmTCg0p0GDBtGwYcM8NU9bCBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQJVEhB8rBKTQgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgEAeBAQf89AL2kCAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAhUSUDwsUpMChEgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQJ5EBB8zEMvaAMBAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBQJQHBxyoxKUSAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAjkQUDwMQ+9oA0ECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIBAlQQEH6vEpBABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECCQBwHBxzz0gjY04YoNAAAgAElEQVQQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECVRIQfKwSk0IECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIBAHgQWqODjjS+MjXOe+i0PrrW+DadtvVAcvGmLWn8dLoAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIEFiyBBSb4KPRY8zem8GPNm6qRAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBOZMYIEIPgo9ztlNMKujhR/nnq2aCRAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQKD6ArU++Cj0WP1Or+4Rwo/VFVOeAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBOaWQK0OPgo9zq3bony9wo/zztqZCBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQGDmArU2+Cj0OO9va+HHeW/ujAQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBQLFBv6NCh06AQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBGqDgOBjbeglbSRAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQygVq71LX+I0CAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBOqegOBj3etzV0yAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBGqtgOBjre06DSdAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAnVPQPCx7vW5KyZAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABArVWQPCx1nadhhMgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAgbonIPhY9/rcFRMgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAgVorIPhYa7tOwwkQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAQN0TEHyse33uigkQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAQK0VEHystV2n4QQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAoO4JCD7WvT53xQQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAoNYKCD7W2q7TcAIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgUPcEBB/rXp+7YgIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgUGsFBB9rbddpOAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQqHsCgo91r89dMQECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQqLUCgo+1tus0nAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQI1D0Bwce61+eumAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQI1FoBwcda23UaToAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIE6p6A4GPd63NXTIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIEaq2A4GOt7ToNJ0CAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECdU9A8LHu9bkrJkCAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECtVZA8LHWdp2GEyBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgACBuicg+Fj3+twVEyBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgACBWisg+Fhru07DCRAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIBA3RMQfKx7fe6KCRAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIBArRUQfKy1XafhBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECg7gkIPta9PnfFBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECg1goIPtbartNwAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBQ9wQEH+ten7tiAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBQawUEH2tt12k4AQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBCoewKCj3Wvzyu84jvuuKPo+V69epEhQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQK5ExB8zF2XzJ8GCT7OH3dnJUCAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAIHqCQg+Vs9rgS0t+LjAdq0LI0CAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAwAIlIPi4QHXn7F+M4OPs2zmSAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBOadgODjvLPO9ZkEH3PdPRpHgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAv8TEHx0K2QCgo9uBAIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBCoDQKCj7Whl+ZBGwUf5wGyUxAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIDAHAsIPs4x4YJRgeDjgtGProIAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQILukCug49vvvlmvPrqq1kftGzZMg455JAFvT/m2/UJPs43eicmQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAgWoI5Dr4ePvtt0ffvn2zy+nevXuccMIJ1bg0RasjUNeDj/37vxUjRozIyJZeeqnsfrMRIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAQP4Ech18PPXUU2PIkCGZ2mabbRZ77rln/gQXkBbV9eDjPvv2ir5938h68+ijj4wjDv/nAtKzLoMAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQILlkBug4+TJk2Kgw8+uKDdq1evWH/99Rcs/RxdjeCj4GOObkdNIUCAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAwEwFcht8/PHHH+O0004rNLx3797RrVu3anXlqFGj4ueff44GDRrE4osvHo0bNy53/JQpU2LatGnZ8/Xq1cvKlt7SvlSmZKtfv36k/5XeUkgztTcd26lTp3J1lJQtfa5UNtWdjhszZkx07do1mjdvXqg27Rs9enSMHDkyGjVqNMt6q4Uyk8LzIviYrr/EMl1/+l+69i+//DI6duwYiy22WLnWjR8/Pr76anA0bdo0llyy60xtK7qsZPftt99Fhw7tM7/Uv2W31HfJeq+9/xrvvvvfbPfB/zgwm/UxlU/2NgIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBDIj0Bug4/vvvtuXH/99QWpq666KhZaaKFK5VKI7e23345nn302Bg8eXFR++eWXjzRzZIcOHQrPn3XWWfHtt99mj7fccsvYbbfdio5Jdd14442F54477rhYYYUVssfffPNN3HPPPfH5558XHdOzZ8/Ye++9s7Be6e3444+PESNGZE9ttNFG0b9//0jBvrQdcMABse6662YhvHTOJ554Ir7//vui41dZZZX461//Gq1bt67UoboF5kXwcfc99oq33nona9r5550d7/73vXjwwYezx9tss1Vcd+3VhWY/99zzcfkVV8agQZ8VXcoGG6wXp55yciy7bMUh2BSsvOWW2+LOO/9fDB32U+HYli1bxrbbbBUnnnh8wS+V7bbs9L6c2fbF559UK2xZXXflCRAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQKB6ArkNPj7++OPxyCOPZFfTrFmzuPbaayu9sqlTp8a9994bzz///EzLLrzwwnHyyScXwo+333579O3bNyu/2mqrxWGHHVY4NoUQzzzzzEIAsUuXLnH66adnMwG++uqrceedd870PMsss0wcffTRWdvTlgKOpesue+Cxxx4bK664Yjz88MPRp0+fmdab6jvppJNiiSWWqNSjOgXmdfCxbNtKgo8pjHjuuefHHXfeNcvmX3HFpbHTjjsUlfnll1/i8MOPin5v9p/psYsu0jFuufWm+OMf/pDNPin4WJ27RFkCBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAjMf4HcBh/TbI9p1se0rbTSSnHUUUdVqlV2lsg//OEPsd5662VLRv/nP/8pHL/11lvHn//85+zxM888E/fdd1/232m55QsuuKBQbuDAgXH55ZcXHqcg4x//+Mf4+OOP47LLLis8n0KIq6++egwfPjz69etXeH7ffffNZnZMW5od8uyzzy53DSnImEKR55xzTvb/559/fqHMqquuGmn2yLQc9AMPPFCYHXKDDTaI/fbbr1KP6hSYn8HHNBvjhhuuH9dcfWU8+tjjcdRRxxaavvzyy8UWm28eY377LR544KHMomR7+aXnsiXCS7ZTTjkt7r7n3sLjNDvkOmuvHV9+9VVhZsm0s0uXJeK5Z5+Ohg0bxr/vvidSYPbMM88p4jrzzNOyJc333mvPckubV8dVWQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBCoWYHcBh9LLwu93Xbbxc4771zplaclqdMy0WlLsycec8wx2eyMaUtLRz/00EPZf5cOUn744YeRltEu2W6++ebC0sYXX3xxfPrpp9muNINj7969Y9KkSXHKKacUlqzu0aNHHHTQQVmILm0vvPBC3H333dl/pxkizzjjjOy/33zzzbjlllsK50lt2HPPPWORRRYpPFd6tsfu3bvHCSecUNhXesntFJa85pprCtdWKUwVCsyP4OMpp5wUu+/2l0jBxzS7Zgp+rrPu+oVw4yabbJQtf12yZHhaknz33fcqLGFdennsgQM/ju2236lwpcccc2Qcftg/C49fffW12K/X/oXH55xzZuyz916Fx/vs2yv69n0je3z00UfGEYfPOLYKfIoQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAwDwSyGXwseyy0AceeGCss846lZKkpY7HjRuXBQJbtWqVBepKtjfeeCNuu+227GHbtm3jkksuyf572LBh2dLXJdull14abdq0ia+++irOO++8wvMlS1GXriftvPrqq6NFixaFcr/++mu2xHXJlsKOaebABx98MJ588snC8zfccEM0bty46Jr+/e9/x4svvpg9l673gAMOKIQbk8mgQYOygGD63yqrrFIIW1YKU4UC8yP4OPirz4pa9ka/frH33jNmsnzu2aeiW7dlisrcd/8DceKJM/prwEfvZf5XX3NdXHHF9ABr6vf/vtu/nM/RxxwXjzzyWFamR4/V44H7Z8wCKvhYhZtEEQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECORAIJfBx6+//jpb+rlkO/3004uWNK7M7ccff8xmahw1alSkIGIKCr722muFw0rPpjhlypRsxsaSLc3q2K1btyi91Payyy4bJ510Ulbk9ttvj759+xbK9+rVK/vvkpklJ0yYUJjxMT1/0003ZQG8K6+8Mj766KPCcSUhzNLXUjZUmc6bApBpdsh27dpVdtlztH9eBx/TctOvvDw95Fmy3XPPf+LkU07PHqYZNp9/7qly1zRs2E+xzrrrFZ5/+qnHI/Xn4UccFX36TA+WHnDA3+OUk6f3V+ntueeej4P+cWjhqS+/GFRYxlrwcY5uHwcTIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIEBgngnkMvjYr1+/uPXWWwsI1113XWG541nJfPPNN1kw8fvvv58l4GabbZYtM12ynXvuuTF48ODsYQpBdu7cOU477bTC/rTkdArXpQDl4Ycfni3JXNWtZGbHww47rHBcmslx3XXXLVdFCmGmwOX7779fbl86/+67716tAGhV25jKzevgY0XLSZ9/wYVxyy23Z81Oy1zfduvNFRp1W3aFwvM333R9bL75ZrHZ5lvHl19+mT1/2qknx9//Pj2QWnpLwdMddty18FT/N/tGx44dsseCj9W5W5QlQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIDA/BPIZfDxgQceiKeemj7bX+llqWfFlGZ4vPjii4uK9OjRIzp16hRNmjTJlpou2fbbb7/YYIMNCo/vvPPOePXVV7PHu+yySwwfPrwwQ2Tp2SEnTpwYhxxySJV7q1mzZnHttddms06WXv76jDPOiC5dulRYTwo/vvXWW/HEE0/EkCFDypWp6rLfVW7k/wrO6+DjiSccFwcfPGOmzdSMiy66JG686ZasRZtvvmncfNMN5S4j+ZQOPt5y8w2x2WabxtbbbBeDBk1fOvuM00+NXr3+Wu7YssHHt/r3jQ4dBB+re68oT4AAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAgfkpkMvg4+WXXx4DBw7MXNZcc804+OCDKzVKoccUfkxbChUeeeSR0bp16+xx2cBiyQyOJZW+8MILheWpl1pqqcLsj2n/iSeeGMstt1xWNM34mGZrLNlSPan8zLa0/HWjRo3is88+i4suuqhQLM3qmMKYlW3Dhg2LAQMGxMMPP1yYLbJjx45xwQUXVHZotffnIfj4n//cG71Pnj7TZnWXuj7yqGPiscf6ZMda6rra3e8AAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQI1BqBXAYfSy8LnWZg3HbbbWcJ+ttvv2VBx5Lt2GOPjRVXXLHwuOzS2ZdddlkhFJkKffzxx5GeK7ulOlJdpbc0c2OawTFte++9d2yyySbljktLYafZHku2NJtkmlUybRUFF1OgMoUbJ02aFCksueWWW0arVq0Kx6eZH0899dTC4xTybNeuXY3eZHkIPr7Rr1/svfd+het67tmnolu3ZYqu8777H4gTTzy58NzAAe9H8+bN45prr4vLL78qe75ly5bx7jtvZqHTor475rh45JHHsqfWWmuNuPc/dxd2W+q6Rm8nlREgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQGCuCeQu+Fh2WegUglxttdVmCZBmRjz55BlhuN69e0e3bt2yY0aNGlUuvHjrrbdmAcOSbcSIEXH88ceXO0eqM808WHq7/fbbo2/fvtlTCy+8cKRlq0tmlpw6dWo8/fTT2bLapdtwzz33xPPPP58dM7MZLM8999zCTJO9evWK9ddfv3DaQYMGxSWXXFJ4fPXVV0eLFi1q9KbIQ/AxBUY32WTzGDrsp+zaNt5ow7juuqsLIdJvvvkm9thj78L+HXbYLq668vKsbJrtc6utty+YHHH4P+Ooo44o9PNLL78Sf//7gYX9F5x/Tuyxx+6Fxwcf8s945pnnssd//vMuccnFF9aor8oIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAoGYEchd8LLssdLrMWS0nve+++2ZLWx9++OGF5aBTIHGzzTbLZmZMIcUUqCvZllhiiTjrrLOK9MouYZ12rrTSSnHUUUeVU/7xxx/jtNOmL8ectnSutdZaK/vvDz/8MH76aXpoL834eOWVV0bDhg3jwgsvjM8//zx7ftddd41tttmmXL1PPvlkFpgs2bbffvtYbLHF4ocffog+faYv4Zy2imahrIlbIQ/Bx3QdTz31dBz6zyMKl9SlyxKx1VZbxm+/jY3HH38ixowZU9j36isvRufOSxQen3nm2XHn//tX4XGa1XGttdaMr7/+Jvr0ebLwfAqzPv3U41nflGylZ4xMz+211x7ZTJKHH3Zo1sc2AgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIEMiHQO6Cj6WXha4KUQoVdujQIZ544ol46KGHKj0khShLLxtdckCa8THN/FiypTIzC1w+++yzce+9987yXCXLbadZIA88cMZMgymgueqqq5Y7NgX6rrvuukJAsqLK27ZtGyeccEJ2vTW95SX4OGXKlLjiyqvjuutumOUlXn/9NbH1VlsWlRk1anQce9zx8eKLL8/02BSkvPmmG6J79+5FZcous12y881+r8cii3SsaW71ESBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgMBsCuQu+HjffffFM888U+XLueWWW6J+/fpZ+RRIfOyxx4pmeNxoo42yoOD999+flUkzMV577bXl6t9///0Lz6VgYgoozmobOHBgPPzww4XlqUvKpiWqt9hii2y2xrSNHj06jjnmmEJV5513Xiy66KIVVj158uR49NFH4/XXX89mqyzZUps33XTT2GSTTaJVq1ZVtqlOwXkRfNxrr32j35v9s2b17n1CHHTgATNt4uuv940rr7om3n33v0Vlttxy8zjpxBNiySW7VnhsCk7+61//jv/ce18MGvRZocyii3TMZo485pijomXLlhUee//9D8YJJ/Yu2if4WJ27SFkCBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAjMfYHcBR/n9JLTstVDhw6NSZMmZYHHFBpMz5Vs9erVK3eKsuHE008/Pbp2rThYV/bgiRMnZudr0qRJtGvXrmj55Dm5llGjRkX6X2p/+/bto0GDBnNSXaXHzovgY6WNqKBACoOmpaqbNm0aiy3WqRByrUpd48aNi++++y7rl2RYlS0FJ7/99ruYOHFCdv+kWTZtBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIJAfgQUu+Fhd2hSKvOmmm+Ltt9/ODu3Ro0cceuih1a2m1pfPa/Cx1sO6AAIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBCoUYE6G3z89ttvs2WoX3nllXjvvfcKqOecc05hmeoalc55ZYKPOe8gzSNAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgACBTKDOBh979+4dP/30U9FtsP3228dOO+1UJ28Nwcc62e0umgABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABArVOoM4GH/fff/+iztpss81i9913j/r169e6TqyJBgs+1oSiOggQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIEBgbgvUyeDjtGnT4rXXXotffvklFl544Vh22WWjS5cuc9s61/ULPua6ezSOAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBP4nUCeDj3q/vIDgo7uCAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBGqDgOBjbeiledBGwcd5gOwUBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIDDHAoKPc0y4YFQg+Lhg9KOrIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAwIIuIPi4oPdwFa9P8LGKUIoRIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAwHwVEHycr/xOToAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECFRHQPCxOlrKEiBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAvNVQPBxvvI7OQECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIFAdAcHH6mgpS4AAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECMxXAcHH+crv5AQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgEB1BAQfq6OlLAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIDBfBQQf5yu/kxMgQIAAAQIECPnLI30AACAASURBVBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQLVERB8rI6WsgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgMB8FRB8nK/8Tk6AAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAhUR0DwsTpayhIgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQLzVUDwcb7yOzkBAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBQHQHBx+poKUuAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAjMVwHBx/nK7+QECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIBAdQQEH6ujpSwBAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECAwXwUEH+crv5MTIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAEC1REQfKyOlrIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIDAfBUQfJyv/E5OgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIVEdA8LE6WsoSIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAEC81VA8HG+8js5AQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgUB0BwcfqaClLgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIzFcBwcf5yu/kBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAQHUEBB+ro6UsAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgMF8FBB/nK7+TEyBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAtUREHysjpayBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAwHwVEHycr/xOToAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECFRHQPCxOlrKEiBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAvNVQPBxvvI7OQECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIFAdAcHH6mgpS4AAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECMxXgXpDhw6dNl9b4OQECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAgSoKCD5WEUoxAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAYP4L1BsxYoQZH+d/P2gBAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgUAUBwccqIClCgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQI5ENA8DEf/aAVBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAQBUE6n399dfTRo8eEwsv3DL7X7169apw2Lwt8tNPw2P8+Amx6KIdokmTJvP25FU425QpU2Lo0J+ykp06LRL169evwlHztsjYseNixIhR0bx502jTpnUu2/jLLyMjtbNduzbRvHmzXN6LP/44NCZNmhRLLLFYNGjQYN52YhXONmHChBg+/Jdo1KhhdOjQPpf9/Ouvv0Yac1q1ahktWxpzqtCt5YpMnjwlhg2rLWNOs2jbtnUuX8/GnNm5+4qPKRlzGjduFO3btzPmzCZp3t/nTJ48OYYNG15L3uc0izZtWuXyXpw+5oyNdu3a5vZ9zg8/DI3Jk/P/PifPY87o0dPf57Ru7X3ObA6JUTvGnLExYsTo7LWc9zGnffu20axZPj9blYw5nTsvnstxuza8z0ljzq+/1pbPVh2jSZPGszs0zLXjZow59aJTp465vBd/+21sjBw5Olq0SGNOvj9bTR9zmuby89/0MWdyLLFEp1x+n/P77xPi559/iby/z5k+5iwcLVsulMt+Tt+V/P77xOz13Lhxfsec9P37oosac2Z3cP/llxHZd8jpewhjzuwpGnNmz63sUXkfcyZNmv59Tv36tWHMaZ59tsrj75M//zwixo0z5szJq6Y2jDmjRk3/bNW6tfc5s9vXxpzZlSs+Lu9jzrRpEem38ilT0merxXL5GXrGmNM40mfUPOY2ateYs0j2OTVvW8prDBv2c614n7PQQs2jdet8v8/p0KFdNG2av+9zpk2bFj/+OCznY87vkcbu9B2EMWf2R4oZn61mb8yp9/LLr0/74IOBsdJKK8Qf/7h8NGzYcPZbM5eOfPbZl+OHH4bEtttuGu3bd8gGsDxt48aNj8cffzYipsWOO24dTZvmL5z51VffRL9+70SXLovH2muvnssv//r2fTu++OKrWG+9tWPJJTvn8ovoBx98PNKbkT333CX7oTNv208//RwvvPBaFircdNP1cxkU/vDDjyPvY84zz7yUvXHfdtvNsgBp3r5wmTHmROy441a5HnO6dl0i1lprtZyOOW/FF18MjvXXXydSO/MYZn7ggccj/aC91167ZKGFvG0psPfCC69nXwhtssl6uRxz0niTxp2VV14x/vCH7rl8n/P00y/FkCFpzNk80oeLvI056QelPn2ezdq1ww75HHO+/PLrePPNd7PXcl7HnNdffyu+/HJwbLDBOtGlSz7HnPvvfyz7kjevY0764PPii69noY+NN/5TrsecVVZZMVZcMa9jzovZlwXbb7959mNx/sacsdGnz3PZZ77tt8/zmPNOLLlkl1hzzfQ+J39f/r3+ev9IY2O+x5xH49dffzPmzMEbvPffHxAfffRJrLLKH2LFFZfL5fucp556MYYMSWPOFtmXf3kbc1Ko8Ikn0phTP7bffstcfrZKn1n6938312POa69NH3M22mid6Nw5r+9zHo0xY8bGnnvunMvPVukPql966fVo27ZNbLRRz1y+z3nvveljzmqr/TFWWGHZnI45L8SQIT/FDjtsmf1hdV7HnAYNpo85efwD/88/nz7mLL1011hzzVWjUaP8vc959dU346uvvo6NN+4ZSyyxeO6+z0mBgAceSGPOuP+NOU3n4F/7uXNoyZiT/jBvww3XzeW9+N57H/1vzFkpt2POk08+H0OHDo8dd9wyG7/zNuaMGfNbpDam7zzTe7F8jjlfRf/+/41llukaa6yR1zGnX6Tf16aPOel9Tr4mPkmBgAceeCx++21c7LFHep+TvzEnfR546aW+2eeBvI45//3vhzFgwKBYffWVYvnl8/k+54knns/CzOk3oTxONpG+U3zyyReiUaMGsd12+RxzPvvsq3jrrTTmLBlrrLFKLt/nvPJKvxg8OI05f/rf+5x8jTlTp06N9Fv52LHjY/fdd8rlmJO+93z55b7RoUPb2GCDfL7PKRlzevRYKbp3z+uY81w25uy009a5/CPMkjGnceOG2W9reXyf89lnX8Zbb70X3botFT16rJzTMeeNGDz429h44/Wic+f8hZnTmJN+K0/ZiPyOOUPj5Zff+N+Yk77Pyd8fYZaMOWussXIst1y3XH6fk34TSrmDnXfeZraCwvXeeee9aelH2K5dO2dfaOQx+PjOO+9ns9ittdbq/5utMF/BxzQTRAoVphdez55r5fKL8vTBYuDAQVmIa4UVlsvlj3Opfd9/PyQLpqSZM/MYQnr11X6RfhhJbzjzGEIaNWp0pIGrefPm2RfRefxHPn1JkMac9EPxUkt1yemY814MHz4iCwnncUaN6WPO25G+1Fh33XyPOR07dsi+KMhjIGDgwE/i+++Hxh/+sHw2G0Rex5z041wKFebxS6uSMadFi+ax6qor5fLNXPoBNv0gkucx5+2338v+GmjttXvk8q/b019IvvlmGnMievZcM5f/tqSw+scffxp5HnMGDPgk0kxD6Q+N0uwueRxzXnnljeyL8ryOOWm2q/fe+zBatGgRq66a3ufk7wPkjDGnayy1VOdcvs95++3/xs8/j8zxmPN7vPnmO9kvvOuum/8xJwU/8hgImDHmrJCtnJDXMSeF69MXa3l8n5NWTHj//Y9yPeakQFz6QWSppbpmfzyYx+9zSsacddbpMVtfWs2duMeMWseP/z36909jTr1Yd901cvk+J71/+OSTT2ORRTrG8st3y+WYk8Jw6f1Y+qPq1M68BQJSj6cvodNsTXkfcxZaqEWssko+3+eUjDlLL71k9gdHeRxz0g/Zaab19HpOM1PmLYRUO8acIfHJJ59ln1m6d8/7mLNiLLJIep+Tr0BAyZgzfvz47DvkNHNK3rYRI0ZG+gOKvI856XvkFE4x5szeHZR+IH7rrXcjon6su26PXL7PSb8HDRqU7zEn/UF1+n0t/VF1GnPyNnNY+o0gBaXSmLPRRul3q/yNOenf5g8++CibMXrllfP5Pufzz7/Kgh/dui2Z/cFyHt/npJBwGr/TdyVp8pO8vc9JY07644n0Gkmf//L4++T33/8YgwZ9nuv3OdPHnKGx8sp/yPWY8/vvv2d/sJXH9zkzxpyWmWMev0OeMeYslU1alc8x591sNdH0m1AeV41Nn+/TuJjnMee7736MTz/9PMu9LLfcMrn8PufDDwdmkySk31s6dszfaqIpf5X+8C3PY076fffDDwdkr5M05uRx9YkUwv366+9i2WWXirT6Uh7HnDS5zciRsz/m1Bs+fPi0tNRL+iEkjxeYPtKl9qWlXdNNkrfZHlP70geL1MYpU6bmMvSY2pgGhTTlb7169XMZQEptTEuGJ8d0H+bxh7nUxmSY7sU8fngsuRdTG9M9mccPFaX72Zgze19YlR5z0us6r/1szJn9/i19pDFnzh1L/m2pDWNO+gCety+satuYkz7k5jGA5H3OnL+Wi9/nRC6/sKpt73PyPOak5ZGmTfM+Z05eOdM/n07x2WoOENNnqumfrYw5c8D4v+9zJmefW/L6Pqe2jDl5fp8zY8xplMsAUu36PseYM+djzpTsvaIxZ/YkS77PMebMnl/JUXn/Pif1cxq7vc+Zs36e/ruV9zlzomjMmRO9GcemMSd9/stjAKnk90ljzpz3tTFnzg1ry5gzPRPRKHdB69Lvc4w5c3Y/1p7frfL+PiffmYjaMuak+zH9rpa3P+4w5szZOFP66BljTsNo2LBBzVVcgzXN6fuceiNGjJhWg+1RFQECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIEBgrgkIPs41WhUTIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECNS0g+FjTouojQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIE5pqA4ONco1UxAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgUNMCgo81Lao+AgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAYK4JCD7ONVoVEyBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAjUtIPhY06LqI0CAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBOaagODjXKNVMQECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIFDTAoKPNS2qPgIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQGCuCQg+zjVaFRMgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQI1LSD4WNOi6iNAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgTmmoDg41yjVTEBAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBQ0wKCjzUtqj4CBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIEBgrgkIPs41WhUTIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECNS0g+FjTouojQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIE5pqA4ONco1UxAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgUNMCgo81Lao+AgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAYK4JCD7ONVoVEyBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAjUtIPhY06LqI0CAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBOaagODjXKNVMQECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIFDTAoKPNS2qPgIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQGCuCQg+zjVaFRMgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQI1LSD4WNOi6iNAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgT+P3v3AWdHWe9//Ld9s33TEwgJ6SQhgRAgoYaEpnRERFBRL169KvaKf0TkqsDVK16u9V5ERYpcSFQgSAskIaGnkEYIJKSQspuyvZf/6/ess5k9ZXfmnDnnPGf3M/fGkN2ZZ555PzPPmfI9zyRMgOBjwmgpGAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEghYg+Bi0KOUhgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEDCBAg+JoyWghFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIGgBgo9Bi1IeAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggkTIDgY8JoKRgBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBIIWIPgYtCjlIYAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAwgQIPiaMloIRQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCBoAYKPQYtSHgIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJEyA4GPCaCkYAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSCFiD4GLQo5SGAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQMIECD4mjJaCEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgaAGCj0GLUh4CCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCRMgOBjwmgpGAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEghYg+Bi0KOUhgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEDCBDLWr9/Y+f77e2X06JHmT2ZmZsJWFmvBmze/LVVVNTJjxlQpKiqUjIyMWItKyHItLS2yYcNm6ewUmTVrumRnZydkPfEUeuDAQdm+facMHlwu48aNkaysrHiKS8iy27btkMrKgzJhwlgZMmSwde2sG71mzXppbGySk08+QXJychLiEE+htbV1smXLOzJoUL5MnTrJynbevXuv7Nmjfc4oGT16hKV9zhapqqqV44+fKoWF9vU5zc0tsnHjZrOrzJxpZ5+jx/J779nd57z77g45cOCATJhwrAwZUk6fE0PnU1NTK2+//a4UFOTLlCl29zlHHTVKRo2ys8/ZtGmLVFdrn3OcFBYWWLcvHulzMmTmzGlWnuekR5/znuj52MSJx5rzMdvOZ7ULWL16vTQ12Xuec6TPGSRTpky09Dxnj+zZs0/oc2L4UPnnIs3NzbJhw1vmGLG9z9FrlrFjj7ZyX3z33ffMtdWkSTb3OW9KU1OztddW6dDn7Nq1R/bu1T5ntIwaNdzKa6tNm96S6uo6mTnzOCkosPE8x/4+p6LigOzYscvcJ7G1z3nnna7znEmTxsvgwWWWnue8KXpeO2fOLCvv5zh9TmHhIJk82c7zHKfPOfro0TJypJ19zsaNb0lNjfY506SgYJB1++KR85xM0y/aeA/Z6XOGDh0sxxxj53nOO+9slwMHDsnkyeOlvJw+J5Yz75qaGnn77W3mHsTkyROsPJ/dtet92bNnv4wZQ58TSxvrMnqurf2iPvPTe0529jmV5jxn6NAh9DkxNnRnZ6d5bmXzeU51dY1s3Wp3n7Nz5/uyb5/2OUfJiBHDrLy20mdCNTX1MmvWNBk0yL7znPTpc3aLnueMHTvGynbeunW7HDx4SKZMmSBlZaXWnc86fU5LS6ucdNJMK6+t0qXP2bt3vxxzjL19jmZfamu1z5lucge2Pc9w+hzNvGiGyObznGHDhprPFxvzOfr5fPDg4bToc/R+jo3t3NXnvCtFRUXmvpiN7bxz527Zu7dCxo49SoYPt/M8x+lzTjhhuuTn++9zMpYuXd6pJ8V6Q0j/2BjmWrLkWdFw5sUXn2/lCWdDQ6MsXrxERDrliisuMjfWbJv0wdeLL75ibpLPmzdH8vLybKuirFjxsgnQnHnmXBk/fpxkZ9sXzvzLX/4qhw9Xy8c+dpUJ4do27d9fIU8/vUzKykrkvPPmS36+fe28Zs0GWbt2vTlR0hsuNvc5l1xygQwfPtS6i5/6+gb561+fNLvfFVd80Mo+Rx98rVxpd5+zfPnL5kTkrLPmyfjxY608EXnoob9KVVW1fPzjV5kQrm3TkT6nVM4772xL+5z1ov2OnijZ2uc88cQz8v77++Syyy4QvQCy7UsoTp+jF7ZXXPEBc2PNtkkffK1c+ar5csfcuSdZeZ6zfPlL5sHS/PmnybHHHmNln/Pgg4tFL9I+/vEPmwdgtk379lXIM88sMw83zz33LCv7HA2Prl2rfc4M8wUKG89zHn/8GRPOtLXPqaurl7/97R+SmZkhl19uZ5+jN4RWrnzNHMtz5862ss9ZtmyV6M1yu/ucRaJBn499zO4+R0NcCxfa2ue8KWvXbpQTT5xhbvLa3OdcfvmFMnSonufY9UVW/fLg3//+lDn/0jraeJ6j5w+rVr0m48cfI6eequc5ubZ9RMsLL6wSPR8755zTrf2y7YMPap9TZ+7n2Hieow+9nnlmuQwZ0nWeY+N9u9Wru/qc2bOPl+nTp1jZ5zz22NOill19zhDrrq2cPkcfhOi5mJ19zrumz5kwYZyccoqe59jY56wUve+0YMHpJrRg44Ml7XP0QfF1133Iyj5HA4XPPrvMBD8WLDjTymurN95YJ+vWbbK6z9FzCL1O1euWYcOGWBcI0HNtrWNOTrZceqmdfY4O4vDSS6+nSZ9zhpVfQtEQkt7Pqa+vl2uvtbXP2SfPPrvc9DkLF55p5XnOG2+slXXrNpsg17Rpk608z+nqc/abZ0J6nmNbCOlIn5Pzzz4n37rrFqfP0S+mn3LKiZKba+N5zovmPEePFRu/hNLR0SkPPaR9ToNce+2VVp7n6LOW555bbj6bFyw4w8o+5/XX18qbb26SOXNOkOOO0z7HvkG19P6sPgO88sqLrBywSr/I8/e/Py25uTmiz/M1nGnb9NZbW+Xll98wYTgdVMvGPuf5518UzRGde+6ZMmaMfV986+joEH1Wrnmsj370Ckv7nL3y3HMrTKZE74vZeD/ntdfWyPr1m81+OHWqrX3Ok7J/f6VceeXFMQ1YlbFly9bOyspK02HZ+LBdO6gdO3aam5PHHjvWypGQWlvbZNu296Sjo10mTpxg5YeTPsTWUfYKC4vkqKNGWnlDSB/AHjp02IyQU1paYt3NSd0X9SGndqx6EqIfpLZNWrcdO3abumnI1cbUu448U1FRaS5ybe5zdFQSHX3UxlFJWltbRUdI1Q/7iRPHW93n6LcbdDRhG29CH+lzRktpabHVfY6tN1u6+pxd5mTd3j7ngOiIFcOGDf7nw3b7RrbW85yuPmeclaOS0OcE82nfs8/R8xy7gh+6lfoFFB3Z2tbzHL2hpt9Ms7nP0f6mslL7nCFWPmzXdtYRmfXaytY+R78xvn273ec5+jaCvXv3SlFRsRnB3MbzHP3i4OHDVaKjYQDUIQAAIABJREFUcpWU2N3n2HqeQ58TzOef0+dMnDjOylFJ9A0e+nYMm6+t9ItQOrJnuvQ5ej/Htoew7vMc2/scvUGuo37YeD8nPc5zdpiRkPRhto2jkmifs23bTtGQitbRxgecTp9TXFxs3ppg93nOUVJSovdz7Ly20lFojjtukpXhGQ1I6ehmdvc5lWYEc5uvrfS6RQOudvc5O8xJnb7xxsY+RweZ2Ldvn9jc5+gbrKqqqsxITdrn2Haeo58p+tzK5j5Hv+Coo7h29Tn63Mq+QU/0mVVXnzPUPLuy7Yvpehzb3ufoqKNax/Toc0rMWxNsPM/ZvXuPGYBHrwnoc2K7L+H0OTo4kPbddl5bpU+fo6E9tbTt809H09f7OSIZovecbGxnvTerX8zT+yT61gT6HP/HtHOeo58xU6dOtPLaqmefY+t5ToVUVh4yoz3qWzBtPs+Jtc/JOHjwYGd7e7vZOBs3UHd/vQGtddQOy7ZO1Tk8tX76LQcbLx61jtopaB3Vz8ZO9Ug7d0hWlr37Ylc7d1jZqTrtrPXT9rbxA95pZ60jfY7/D3f3EvQ58fn17HOyrLxJrnWkzwmmnelz4nekz4nfsOt8Vs9z6HNi1dTzG85zYtU7shzXVvEbpte1FX1OrC3e1c4d5s0OXFvFqsj9nNjljixJnxOEItdWQSjqOQTXVvFLcm0VvyHXVvEbHrm2EisDSNxDjr+N3c8KeG4Vn6fT52hYz+7nkzy3iqelOc+JR69r2XS4b5dO11b0ObHvk0fu53CeE7ui/fdz6HPiad3QZwUd5pqA85zYTOlzYnMLXSre51YZhw4d6gymKpSCAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJFaA4GNifSkdAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQCFCD4GCAmRSGAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQGIFCD4m1pfSEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgQAGCjwFiUhQCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCRWgOBjYn0pHQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEAhQg+BggJkUhgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEBiBQg+JtaX0hFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIEABgo8BYlIUAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggkVoDgY2J9KR0BBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBAIUIPgYICZFIYAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAYgUIPibWl9IRQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCBAAYKPAWJSFAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJFaA4GNifSkdAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQCFCD4GCAmRSGAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQGIFCD4m1pfSEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgQAGCjwFiUhQCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCRWgOBjYn0pHQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEAhQg+BggJkUhgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEBiBQg+JtaX0hFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIEABgo8BYlIUAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggkVoDgY2J9KR0BBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBAIUIPgYICZFIYAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAYgUIPibWl9IRQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCBAAYKPAWJSFAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJFaA4GNifSkdAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQCFCD4GCAmRSGAAAIIIIAAAggggAACNgncf//98sgjj8hnP/tZufDCC22qGnVBAAEEEEAAAQQQQAABywT27Nkj3/3ud2Xs2LHywx/+0LLaUR0EEEAAAQQQQAABBBBAAAEEegoQfGSPQAABBBBAAAEEEEAAAQT6qcDkyZPlwIEDMnfuXFmyZEk/3Uo2CwEEEEAAAQQQQAABBIIQ+MUvfiG33nqrKWrFihUyffr0IIqlDAQQQAABBBBAAAEEEEAAAQQSIkDwMSGsFIoAAggggAACCCCAAAIIpF7gJz/5ifzyl7+UH/zgB3LDDTekvkLUAAEEEEAAAQQQQAABBKwVWLNmjXzyk5+U4447Th566CFr60nFEEAAAQQQQAABBBBAAAEEEFABgo/sBwgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggkDYCBB/TpqmoKAIIIIAAAgggkDiBzs7OxBVOyQgggAACCCCQUIGMjIyElk/hCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgjYJkDw0bYWoT4IIIAAAggggEDAAoQaAwalOAQQQAABBNJMgGBkmjUY1UUAAQQQQAABBBBAAAEEEEAAAQQQQAABBBDoU4DgY59EzIAAAggggAACCKSXAEHH9GovaosAAggggEAqBAhDpkKddSKAAAIIIIAAAggggAACCCCAAAIIIIAAAggEJUDwMShJykEAAQQQQAABBJIoQLgxidisCgEEEEAAgX4sQACyHzcum4YAAggggAACCCCAAAIIIIAAAggggAACCPRjAYKP/bhx2TQEEEAAAQQQ6H8CBB77X5uyRQgggAACCNgiQAjSlpagHggggAACCCCAAAIIIIAAAggggAACCCCAAAJ9CRB87EuI3yOAAAIIIIAAAikWIOyY4gZg9QgggAACCCAghCLZCRBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQRsEiD4aFNrUBcEEEAAAQQQQMAlQOCR3QEBBBBAAAEEbBMgAGlbi1AfBBBAAAEEEEAAAQQQQAABBBBAAAEEEEBgYApkrFr1aueWLVtl8uQJ5k9WVpZ1EitWvCwVFZVyzjlnSHl5mXWjDDQ1Nctzzy03buedd7bk5uZaZ7hr1/uyevV6OfrokTJr1gzJzs62ro6rV78pO3bskjlzTpCjjholmZmZ1tXxH/9YKrW1tXLppRdKXl6edfU7ePCwrFr1mhQXF8npp58sOTk51tXxrbe2iv6ZMmWSTJ48nj4nhhZqbGySpUtXiEiGnHfeWVb2OTt3vi9r1mifM0pmzZpuaZ+zTnbs2E2fE8M+6Cxy8OAhWbXqdSkpKZLTTrOzz9m8eavoec7UqZNk0iQ7+5zly1+WyspKWbDgTCkrK+0+z4kWetTPR+3f9bNcz9s0fJDIAILWo7W11TS7jec4Wq+Ojg5pbW2TrKxMK/sbrWNbW5u0t3eYtsvMzIjjyEvcoi0trdLZ2WHlOY5utbMv6v5u4zmO1rG9vV3a2tolOzvLynMcraMezx0dnZKbm5PQviPWPZU+J1a5nsulR5/TIp2dInl5sV0/17TUSGtnWzBg0UrRCuqUYWe/7fSN8Z4HZEjX/2VKppgt7RDpbO2Uzo5/bn8EH6/rXLbsJTlw4IAsXHiWlJaWWNfvNDQ0yvPPv2juPyxceKaV5zp6n2Tt2o0yZsxomTlzmpXnOq+/vlZ27twtp556kowaNcLK+zlLljwnDQ31csklFyT1XEc/16IdL3v2H+4+upzz2cyMDMnOzTbHpHsaUl4kebk97/HU1DVKXX2Tp36wqDBfSooG9Zi3uaVVDh6u87R8bm62VO7bI1u2vCvTpk2RCRPGmXMd9zb0VdDoEeVhsxw4XCstLd76ci8Gej6rlrl5OZKZ0fO+YhAGQ8uLw7bBj0FpUZ688MJKY7dgwRmmzwnaoLd28GLgnM9mZmWY/sa9L+p+EK9BEPvB+jc3iN53mjt3jowcOVzqGpo9HwuR9iM/x0IkQzUP3Q+am/U8p1Py8nM9GfrZDyIZ+ukP1KC6qlpeeeUNKSsrk7lzZ4t+5HvtD7waRNsXo+1HoQbmfLatXbJysiQ7q+ezjCAMQvvUWPYDPZ4PHDhkngmVlBTL3oqqvrpC83uvBr0V5sVA90HtF/UTJScvp8e+mKxjoa/94L33dsq6dZvkmGOOkuOPP06qahs9fy54MejN0KuBuYZu7zCfz1mZR56fJutY6Gs/0HZ+8sml0tjYIBdffL40t3ZY1ydGO8+J+VjI0CcjYj7ry8sKJTcn/uedmza9JW+/vU2mT58q48ePtfKejtPnnH/+2VJcXGzdtVVdXb288MIqycnJlnPOOd3Kayunzxk79miZMWOqlddWr722RvSZ/rx5c2TEiOHWXVvp8fyPfzwvjY2NcvHF5yX12srTh6yIyZS88spqkyk59dTZVt5H3rjxLdm6lT7Ha5tGmi8d+pzt23fIm29uFpv7nFdfXS27d++RefNOlhEjhlna5yyVpqYmc55j4zNK7XNefnm1DBlSLqeccqLVfc6MGcfJscceY+V5jt6f1dzB+efPj+k8J+PZZ1/ofOONdSaYcsIJM6zcWR577CnZtWuPXHbZhVbeQK2vb5BHHnnMPLT58IcvlsLCwnj66YQsqx+ey5e/JOPGjZEzzjjVyhMRPSHWQNz8+afJxInHWnnCef/9j8rhw1XyiU9cbW5m2Dbt318hejO/vLxULrjgHBk0qOdNbRvqqwFX7XO0v9F+x8YPqL///SnzIX/55R8wN1BtC+E6fY6251VX2d3nHHvsGDn9dFv7nJXy1lvvWN3n/PnPj0hVVbVcf/1HTKDZtmnfvgp58snnZPDgUjn/fFv7nHXy+utvyuzZM2TmTFv7nH/I7t17u/ucaA9Gtb/U0L2tgS/b9k/qgwACCCDQvwWqmqukub2lf29kCrdOz0dyJUekXaSjpSNqTfoKQP7tb0/K++/vkyuvvEiGDx9q3bVVbW2dLFr0hKmX1rGwsCCF6pFXvWXLO7JixSsyceJYcyPaxi9hPv/8CtmyZZsJcumDYhu/bHvfff8nNTW15n5Ooq+t9AsGDU3N0tjUIk3NrTJ8cInk5YV/MXXX3oOe97dIZdTUNkh1XaOnMkqLBklJcc/9u7m5VSoO1XhaXgNCO7dvM1+qnjNnpsyYMc18icLPNowZNSRsXRUHa0TDRl4mGwyGDykJq6ofg9LCPNPn6Jdkrrjig6bPGWgGQewHK1e+bMIpGljXhzYNjS2ej4VI+5GfYyHSsaQ7hdf9QI+lSPuRn/0gkqGf/kANDhw4KE899bwMHTrYhPYyM7M89wf9xSC0X45lP/jrX5fInj375UMfukiGDRsq77sC7b31a7bsB0EYhG6n32NBv7C8cuWrMnHiOBPyqa5r9vy5EMSxYINBqKHf/kCDj3oPWc9rP/7xD0unZA74PlFN9ZiOdP4V7djU4MfatRvMIAkaCtDzHNumxYufkL1795tnQkOHajjFri/o1dTUyKJFS8y9a322ZuO11ebNb5s+RwdImDv3JCuvrXSgJX2mf+65Z8u4cceY80abJg0+/vnPj0p9fb187GNXJfzaKpZt1+e7Tz/9grkHoV/CHDQoP5ZiErqM0+ecfPKJJoRr4zOfI33OJeY8p6/7PwkFi1B4dXWNLF68xOQMNENkc5+jg89pCNfG+znPPrtM3nlnu5x33nyTI7JtkLyuPucR0S8uX3fdh6zsczTH9swz2ucMM9eoNvY5GgZft26DnHLKbJk+fYqVfc6iRY/L3r0V8uEPx9bnZOzdu7dTdxQ90LQRbOu0tB/TD08dVbGkpMR8U8S2SQ+4uro6aWvrMIEzGw1bWlpMh6CdVVFRoZV11G+H6Eh2+sFkY8ev+52O9qjf2B0yZLCVhvpNWA3F6aTBTBv3xebmZrMvpkOfoyOS2PjAJp36HPXTY9rGfZE+J/5PU6fP0UGQbPyWaddNrq4+Jz8/T/LzbT/PKY7Y5+hFb0FBgXUXHPHvQZSAAAIIIIBA7AIEH2O387tkXkauSJtIR6v/AKRzP8f2aysdldk98rZfo0TO79zPSY9rq8KYR3FNpKGWraFHtUzk/ZzW1napa2yS+vomcY+XGm9ISOtvQ+hPRyvsurbK/+e1lfewl25DEIG30ACDn7BXEOHPeIOPR40oN/eQ3X2On4CLDftBvAZB7Aft7W1mxA8dfEBHjvazHxB87OpPsrIyzD1k536OjrzqNQgdb5/Wn0J/OtKQ3ncqLS01wRS/ob/Qzz4//UF/Cf2FnudUHqol+OjjSwHOftB1ntNqRhqqrWsc8MFHfRPH3soqyc7KksKCPCku7PuesD6D1ucFXec5eVY+z9A+R+up1y22heG0P3OeW7W3d0pZmX2j/Xc9K2gx7WzztZWeb7vPcxJ9nRRL+e4+x8Znf0eeW2WYkJSNdXT6HM3m6PNyG+vo9DmafbEtDJdufY7mm/QZn43t7PQ5mh+ycbCqI/dzus5zbDTUt9E1NOi1FX1OLJ8pzjLx9jkZhw4d6ux6laK+JjGeqiRuWedVjzbuyM5WU8dg2r+3VwAFs4b4SqGd4/NLl+OFdh4Y7axbSZ8Tf1tzvMRnqH69GerFkN5wY0IAAQQQQACBngIEH5O7R+jrd3MkRzoao4cfnRq5791wrhhMO6WL40C9b9fe0SG9vWY2WsjHa0BG9yJbAm+h19B+tiGIwFu6Bx/VIPR49hN0smU/CO3ZUrEfuPdFgo8SU/jTvS/GMtphrPtBfwo+hh7PXo+F/mQQxH7gdvTTJ/aX8GeoYSwGbsOB1CfmZmfLiGGlYSfcVTX1Ulvf1P3zrMxMKS4aZAKQvU08K4j/2iVdrlt0S22/drG9fulgmA51pJ3j63foc+Lzc+c22Bfjs0yXfdHmdo43t2GCj/E1I0sjgAACCCCAAAII+BFwToKjLdP1zaDII0D6WQ/zIoAAAggg0F8FCD6mpmV19Mf2pnbJ6Oz7m7O230xLjSBr7W8CdQ3NUl1bL/p6696mSOEMrwEZLZfAGwbsB11HWLzHAiM+RjYk+Nj1WlxGvcSA/SC9joVoIeK9FVXS1t4edmqWl5MjpSWDRJdjQgABBBBAAAEEEOg/AgQf+09bsiUIIIAAAgggkAYCXkKPJSX66qmsNNgaqogAAggggEBqBAg+psZd10r4MXX2rNkeAX15zuHqOqlvbPZUqUhhq+qaBk/L6kz6msbQVxrqq/qamts8lZGflx32GnJ9DWR9g7f6Z2VnSVFBXti6/GxDaUlB2PIaHG1vCw8mRNooDNgPgjgWIu1Hfo6FSMeS1svrsRDtWPJzLEQ6lvz0BxhEPpbYD0QwwED7s7TYDzK63uSk905DR3FsaW2T/Qeqez0/Kisp7HP0R08nWMyEAAIIIIAAAgggYIUAwUcrmoFKIIAAAggggEB/F+gr8Ohsv470mJPDN4/7+/7A9iGAAAIIxCdA8DE+v3iX1vBjR1OHiId3iDDyY7zaLG+jQE1to1TX9R1cLBiUK4PycmVQfq7Vr9Oz0Zg6IYAAAggggAACfgX0Fdf6quu+pqKCfCkvLexrNn6PAAIIIIAAAgggkAYCBB/ToJGoIgIIIIAAAgikr4DXwKNuYUFBgeTn56fvxlJzBBBAAAEEkiRA8DFJ0L2sxoQfGzs8VYTwoycmZkozgf2V1dLSFj7iYlZWpujDdP2Tmdn3a+HTbLOpLgIIIIAAAgggYLWAjvqoo3LX1Tf1Ws+C/FwZUl5s9bZQOQQQQAABBBBAAIG+BQg+9m3EHAgggAACCCCAgG8BP4FHLVxHedTRHpkQQAABBBBAoG8Bgo99GyV6juyMLMlszZTONg/DPoow2l2iG4Tyky7Q0dEplQdreoQfS4sGSXFRgWSQd0x6e7BCBBBAAAEEEEDALdDa2i41dQ3S0NQSESY3O1uGDimWrMxM4BBAAAEEEEAAAQTSWIDgYxo3HlVHAAEEEEAAATsF/IYedStKSkokOzvbzg2iVggggAACCFgmQPDRjgbxM+qj1piRH+1oN2oRnICOKFR5qEays7KkrKRA8nJzgiuckhBAAAEEEEAAAQTiFqhvaJZD1XU9ytHQ47AhJYzOHbcuBSCAAAIIIIAAAqkXIPiY+jagBggggAACCCDQjwRiCT0y2mM/2gHYFAQQQACBpAgQfEwKc58ryZAMyWnLkc42b6+81gIJP/bJygwWCug5frR9V0cTysnJsrDWVAkBBBBAAAEEEEBABZpb2uRwVZ20trcLoUf2CQQQQAABBBBAoH8JEHzsX+3J1iCAAAIIIIBACgViCT1qdYuKiiQ3NzeFNWfVCCCAAAIIpJcAwUd72svvqI9OzQlA2tOG1KR3gdq6RmlobJHyskLJzWGEdvYXBBBAAAEEEEAgHQXa2zukqrZeykuKGOkxHRuQOiOAAAIIIIAAAlEECD6yayCAAAIIIIAAAgEIxBp61If+5eXlAdSAIhBAAAEEEBg4AgQf7WnrnIxskSYR6YytTgQgY3NjqeQI6GiO+w5UmZVlZmZIWXGhFBbkJWflrAUBBBBAAAEEEEAAAQQQQAABBBBAAAEEehUg+MgOggACCCCAAAIIxCkQa+hRV6sjPeqIj0wIIIAAAggg4F2A4KN3q2TMmdeWKx0+XncdWifCj8loJdYRi8CBw7XS2NTSY1ENPpaVFEpmRkYsRbIMAggggAACCCCAgGUCbW3tUlXbIPm5OVJUmG9Z7agOAggggAACCCCAQG8CBB/ZPxBAAAEEEEAAgTgE4gk96moHDRpk/jAhgAACCCCAgHcBgo/erZIxZ57kSkdTR1yrIvwYFx8LJ0CgqblFKg/VhpWs++qIoaWSk52VgLVSJAIIIIAAAggggEAyBWpqG6S2oUk6OjolOytTRg3nzTzJ9GddCCCAAAIIIIBAvAIEH+MVZHkEEEAAAQQQGLAC8YYeFa64uFhycnIGrCEbjgACCCCAQCwCBB9jUUvcMnkZudLRSPAxccKUnAqBykM10tTcGrZqHe2xmJGAUtEkrBMBBBBAAAEEEAhMoKW1TQ4erpO29vYeZXKuFxgxBSGAAAIIIIAAAkkRIPiYFGZWggACCCCAAAL9TSCI0KOalJaWSlYWo8X0t/2D7UEAAQQQSKwAwcfE+votPTcjVzrjDD7qOhn10a888ydKQB+E7z9QHVZ8bna2jBhWmqjVUi4CCCCAAAIIIIBAkgQ430sSNKtBAAEEEEAAAQQSLEDwMcHAFI8AAggggAAC/U8gqNCjllNWVkbwsf/tImwRAggggECCBQg+JhjYZ/G5GTnS0dARd3CR4KNPeGZPmEB1TYPU1DeGlT+krEgKBuUlbL0UjAACCCCAAAIIIJA8gUPVdVLf0By2wuGDiyUvLzd5FWFNCCCAAAIIIIAAAjELEHyMmY4FEUAAAQQQQGCgCgQRfNQy9E95eTnBx4G6I7HdCCCAAAIxCxB8jJkuIQtq8LG9vl0yMzPjLp/wY9yEFBCAwN6KqrDXHmZnZcmo4WUBlE4RCCCAAAIIIIAAAjYINLe0SsXBmrCqFBXmS3lJoQ1VpA4IIIAAAggggAACfQgQfGQXQQABBBBAAAEEfAgEGXrU1RJ89IHPrAgggAACCPxTgOCjXbuCE3zUWmlwMd7wYrzL26VDbdJNoKWlTfYfDH/NdWnRICkpLki3zaG+CCCAAAIIIIAAAr0IVByolubWth5z8IUXdhkEEEAAAQQQQCB9BAg+pk9bUVMEEEAAAQQQsEAg3uCjM9KjsykEHy1oVKqAAAIIIJB2AgQf7Woyd/BRa0b40a72oTb+BGrrm6Sqpj5soRFDSyU3J9tfYcyNAAIIIIAAAgggYLVATV2jVNc2hNVx5LAyycnOsrruVA4BBBBAAAEEEEBAhOAjewECCCCAAAIIIOBRIIjQo67KXQ7BR4/4zIYAAggggIBLgOCjXbtDpOCj1jDekRvjXd4uJWqTLgIHq2qlobGlR3WzszJl1PDydNkE6okAAggggAACCCDgUSDa664HlxVJ4aA8j6UwGwIIIIAAAggggECqBAg+pkqe9SKAAAIIIIBA2gnEE3x0L0vwMe2angojgAACCFgmQPDRrgaJFnzUWsYbXox3ebukqE26CLS0tklra7u0t7dLU0ubGe2nvLQwXapPPRFAAAFyVkalAAAgAElEQVQEEEAAAQR8COzed6jHF9V10ZLCQVJaUuCjFGZFAAEEEEAAAQQQSIUAwcdUqLNOBBBAAAEEEEg7gaBCj7rhBB/TrvmpMAIIIICAZQIEH+1qkN6Cj1rTeMOL8S5vlxa1QQABBBBAAAEEEEAAAZsE9ldWS0tbW48qFeTnypDyYpuqSV0QQAABBBBAAAEEIggQfGS3QAABBBBAAAEE+hBIVOhRV8urrtn9EEAAAQQQ8C9A8NG/WSKXCA0+6rpCw4rxhhfjXT6R20/ZCCCAAAIIIIAAAgggkL4CBw7XSkdHZ/cG5OZkSU52thQW8Krr9G1Vao4AAggggAACA0WA4ONAaWm2EwEEEEAAAQRiFog1+BhpudCfEXyMuVlYEAEEEEBgAAsQfLSr8XMycqSjvr1HpSIFFeMJL8azrF1a1AYBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgCAGCj0EoUgYCCCCAAAII9FuBWEOPChK6bKR/Dx48WLKysvqtHxuGAAIIIIBAIgQIPiZCNfYyNfjYXtfmaZTHeAKM8Swb+9axJAIIIIAAAggggAACCCCAAAIIIIAAAggggICNAgQfbWwV6oQAAggggAAC1gjEGnzsa7RH5/cEH61paiqCAAIIIJBGAgQf7WosJ/iotXKHExn10a52ojbeBJqbW8NmzMnJkszMTG8FMBcCCCCAAAIIIIAAAggggAACCCCAAAIIJEWA4GNSmFkJAggggAACCKSrQCzBx2jLuH+u/61/hgwZktQRH1fvapHXdjWnVXN89rTitKovlUUAAQQQSLwAwcfEG/tZgwYf22pbu0OPTuAx2giNsY7cGOtyfraFeRHYtfdgGMKwwcWSn5cLDgIIIIAAAggggAACCCCAAAIIIIAAAghYJEDw0aLGoCoIIIAAAgggYJdAkKFH3TKnPPffyQo+/nZVrfx4Ra1dwD5qs/jaoTJ7DA+bfZAxKwIIINCvBQg+2tW8ocFHrZ2GFHsLKsYaYox1ObvEqI3NAgQfbW4d6oYAAggggAACCCCAAAIIIIAAAggggMARAYKP7A0IIIAAAggggEAUgUQEH0NHfUxW8HHsHXvSup0XjM2Te68ZktbbQOURQAABBIITIPgYnGUQJUUKPmq5fb0aOJYQYyzLBLGNlDFwBAg+Dpy2ZksRQAABBBBAAAEVOFxTL3X1TT0wcrKyZOTwMoAQQAABBBBAAAEELBcg+Gh5A1E9BBBAAAEEEEiNQNChR92Kjo6OHhuTzFddE3xMzX7EWhFAAAEEEiOQrsHHtyu3doNsqdwiGys3yfRh07p/NmXYFJk8bFJi0BJYarTgI6M+JhCdohMmQPAxYbQUjAACCCCAAAIIWClA8NHKZqFSCCCAAAIIIICAJwGCj56YmAkBBBBAAAEEBppA0MFHLS+0TIKP3vcqRnz0bsWcCCCAwEAQSKfgo4YdF21aLOsrNnhumuOHzzCByEumXex5mVTOGC34qHVi1MdUtgzrjkWA4GMsaiyDAAIIIIAAAgikrwDBx/RtO2qOAAIIIIAAAggQfGQfQAABBBBAAAEEQgSCDj1q8aGjPerPCD563/UIPnq3Yk4EEEBgIAikQ/AxlsBjpLa7ZvrV1gcgNfjYWtMSMeTIqI8D4YjsX9tI8LF/tSdbgwACCCCAAAII9CVA8LEvIX6PAAIIIIAAAgjYK0Dw0d62oWYIIIAAAgggkAKBWEKPWs3elos22qP+fOjQoZKVlZXwLY30quvF1w6V2WNyE75uvyv41EMHZemO5h6LEXz0q8j8CCCAQP8WsDn4+Nimx+WhjQ8H3gA2ByCd4GOkkKP+TCfn70gwvf0uGmQsywTeKBTYLwUIPvbLZmWjEEAAAQQQQACBqAIEH9k5EEAAAQQQQACB9BUg+Ji+bUfNEUAAAQQQQCABArEEH/sKPWo1Q+dxRoAk+BjeiAQfE7BjUyQCCCDQzwRsDT4mKvToNJ+t4Ucn+Kj1DA0/Jir46Kyrn+3abI4FAgQfLWgEqoAAAggggAACCCRRgOBjErFZFQIIIIAAAgggELAAwceAQSkOAQQQQAABBNJXIJbQo26t3+CjewRIgo/h+wvBx/Q9hqg5AgggkCwBG4OPty+7U9ZXbEgKwS3zb5bJwyYlZV1eVhIafNRlIgUeGfXRiybzpFqA4GOqW4D1I4AAAggggAACyRUg+Jhcb9aGAAIIIIAAAggEKUDwMUhNykIAAQQQQACBtBaIJfjoJfSoKM58oX8TfAzfZQg+pvVhROURQACBpAjYFHx8u3KrLNq0OGmhRwfYpvBjqoKPasErr5NyyA2olRB8HFDNzcYigAACCCCAAAJC8JGdAAEEEEAAAQQQSF8Bgo/p23bUHAEEEEAAAQQCFiD4GDBojMURfIwRjsUQQACBASRgU/AxmSM9hjbx/R/+kxWtHin4qBULfe2187NIlY41wBjrclbAUQkrBWpqG8LqNWhQnuRkZ1lZXyqFAAIIIIAAAgggEJ8Awcf4/FgaAQQQQAABBBBIpQDBx1Tqs24EEEAAAQQQsErAb/DR62iPupHu11s7/9a/bRzx8berasPa5eQxeTJ7TG7YzyPN66dRP3tacdjsBB/9CDIvAgggMDAFbAk+PrbpcXlo48Mpa4Tjh8+Q75z9rZSt31mxBh9bqpvDgo5+go9aViwhxliWSTkYFUAAAQQQQAABBBBAAAFrBAg+WtMUVAQBBBBAAAEEEPAtkPH++3s6m5qaJT8/z/yx8YZxbW2dtLa2SUlJkWRnZ/veyEQv0NHRITU1dWY1paXFVho2N7dIY2Oj5ORkS0FBgZV1rK9vkJaWFikoGCS5ublW1rG6ukba2zukrKxEMjMzE71r+S5fj5P6+npTt+LiIisNGxubhD7Hd9P2WCA9+pxm0bbOyckxx7SNny1H+pwCyc3NsbKO6dHnNEhmZka/6XOSHXzU9Q0ePMTsg4mext6xJ2wVi68dGhZmXL2rRa544EBc8/rZlh3fHh02O8FHP4IDa95169bJoUOHzLnOqaeeKvn5+dYCbNq0SZYvXy5HH320nHPOOVJYWNhd12XLlol+nut5uW4HU/oItLe3y6pVq0T3xRNOOEFOO+00K68L0kc09praEHxMdejR0btm+tVyybSLY8cMYMlowUc95450Pyfa9UEs1w2xLONssnNtlZEhUlJi6/2c9Lq2yssL/6JMALtY3EU411bl5aVWXvu1traKHi9ZWVlSVFRoZR2d+zmDBuWLtnM8x17cDRqlgJqaWmlra7f2HrLeU9T73Lb3OQ0NTeYa1db7OXV19aLHTGGh3s+xu8+x9x4yfU4Q/RB9TvyKzc32n+fQ58TfzulwnlN5sEaaWlp7bGxOVpaMHF4WP0BAJRzpc4ol28KRyPV+SW1tvfXnObY/t0qXPkevpUtLbX1Wng7nOY3S1NQi6XBtpdkXvU61bUqXPkevrfLycmTQIDufldPnxL9np8N5jubE+nufk7Fs2crON9/cJDNmTJXp06dYGSx85pkXZM+e/fKBDyyQYcOGWndjTXeUxx9/RsdykksvvVDy8vLiP0ICLmH79h3y8sur5ZhjjpJTTjnRhJFsm1566XV5553tcvrpp8jYsUdb+SG6ePETUlVVI9dcc4U5GbFtqqg4IM8/v9IEcBcsOMPKm3/r128W7XOOP/44mTZtstV9zgc/uFCGDh1iXZ/T0NAoTzzxtI6HIpdeegF9TowHotPnnHHGKXLMMXb2OYsWPS7V1bXy0Y9eYWW4p6vPeVHKykrlnHNOt7LP0f5G+52ZM4+T447ru8/xE3zsa97Q3+vNAPfkjABZUFBoHrQneiL4mGhhyk+GwH//93/LmjVrzKpuvvlmGTduXDJW63sddXV18uUvf7l7uUsvvVQuu+yy7n//y7/8S/d/33PPPb7LZ4HUCaxfv17uuuuu7gp84xvfkOOOOy51FRrAa7Yh+Hjd/33Cmha4Zf7NMnnYpJTVxwk+agXcXxJ89tllsm9fpVx00bkyeHB597VVb4Epv2Eqv/O7kfQG75NPPmvqfPHF51t5bfXuu+/Jq6+ulnHjjpE5c2ZZeT9n5cpX5d13d8jZZ58qY8YcbWUg/NFHHzMPYq+55nIrr63276+QF15YJYMHl8nZZ59m5bXVunUbZcOGt2TWrGkydeokK+/n/OMfS02fc8kl5xvLePqHRHSoTp+jDw4vuug8K/ucd955T157bbUce+xYOemkmVb2OS+++Ips27ZTzj57rowZc5TFfU6DXHPNZVb2Ofv2VciyZXb3OWvXbjB9zoknzpApUyZa3OdUmPuz5eX29TkatH7yyefMYBgf/OC5lvY52+XVV9fI+PH29jkrVrwi+nzt7LPnWdnn6P3FRx993HyB4iMfsbPP2bu3q88ZOrRczjprnpXnOWvWbJCNG9+S2bNnyOTJep5jX8hHj+f9+yvlsssuNPfjbTvP0QGC/vEPu/ucrVu3yWuvrZUJE8bJ7NnHW3mes2LFy7J9+06ZP/80Ofro0dad5+gzjkWLnhB9Tnn11drn2JeJ2Lt3vyxb9pIMG1YuZ55pa5+zXjZu3GL2Qz3PsTFYuGTJc1JRYXOfUyt6/adf2PrABxZaeZ7z9tvb5PXX18rEiePkxBPt7HOWL39Z3ntvp3nGe9RRo+hzYrgRoDm25cu1zxksZ54518rznNWr18umTVvkpJOOl8mTbe1znhXNHVx++QdMsN7veU7GypWvdG7evEUmT55gOlYbA3F6Qqwnc/ohryEk20baa2pqkmee0VFbOuX88882iW3bpp07d8vq1W/KqFEj5IQTjjffzLZteuONteZk7qSTZpkb5TZeWCxZ8qwZ3fOSS87rMWqPLZYHDhySlStfMSOvnXbaKVaecG7e/LboH/qc2Pca/UaaPkDs7BQ577yzrOxzduzYLWvWvCmjR4+UWbNmWNnn6MmmnszNmXOCuWll54WF9jm1csklF5iRDGybnD5HR2SeN8/uPkfPcbTf6es8p68wo7sN/LzmWpcLDT46/y4sLDL9dqIngo+JFu4f5esohVu3bjUbo6PZjR071qoNsyH4qKM4Hj582Lice+65Ec8Jt23bJj/60Y+67dTyxhtv7P43wUerditflXnqqafk4YePvNb42muvlYULF/oqg5mDEUhF8PHtyq7+cUvlFtlYuUnWV2wIZmMCKCXVr7x2Bx/dr7fWG3+VlQfNDdTBgwebkcKdKahRH/3eCHNz6wPi555bYeq1cKFeW9n3BccdO3aK3qDUh14zZ0638tpKQ1J6DXjyySeaetp4baVfWNY3ZGjA1cZrK725+9JLr5mbu/PmzbHyoY2GAd56a6v5EuvEiXptZd9beV544UWpqDgoCxacacLW7j4ngK427iKO9DmZsnDhmVb2OXqfZM2a9TJmTFefY+OIihoG1z7nlFNmy9FHj7K2z2loaDABV/qc2A6drj7nnX/2OeOt7HOef36FVFQcMsfzkCFHvuAR2xYHv5SGrZcufVGysjJNv2jjeY4GCjXkSp8Te/vr/cknnnhW7O5zKkUHISgtLZV5806y8jxnw4bNsmWL9jlTZOJEO/ucpUtXSGXlITn33K7znHiug2Lf46IveaTPyTIDs9jc5+ggQccfP83K85xXXlkt+kz/1FNnmxCSbddWmoVYsuQZ85ZJPc/Rt9nYNmlYT/scDQjPnWt7nzNVJk481srzHKfP0efQNn7Bw+lzNE9yzjl29jnbtu2Qdes2mMF3dEAoG6+tXnnlDdPnzJ07xzzTt6/P6RDN59jc5+gXWV9++Q1znGjfbeMgeTpA0NtvvyPTp0+VCRPs7HOee265HDhw2GRfYulzMiorKzt1+E3difsKA6Tqg0vrp69G0m8N2BZ6VBO9sGhpaZWOjnYrA0haRw1XqKNONh5sWq+2tjbTzrof2hh61Drqq7i1jjbesHK3swbibPyWjdZRh56mz4mvN+3qc1rMcW1j0Lpnn5Nh5YM5d5+j3way7UTO2UPoc+I7Vvz2OX5Cj87nf7QahpbljO7ont8JPg4dOjQpIxcQfIx/fxoIJfz1r3+Vxx57zGzqDTfcIPPmzbNqs20IPv74xz+Wd99917jcfvvtMmzYsDAjPb5vu+022blzp/ndl770JZk1a1b3fAQfrdqtfFXm4MGDcsstt5ibLXoepu1cXl7uqwxmDkYgmcFHDTwu2rTYqqBjJMVUjvoYLfio1/n62tn8/PywAJINwccj11adVj6Yc85n1VFH/LfxS6xHrq1azU18rq1i6+PS4b7dkfs52Ql/MOf32sxRd+4h64N228IAzjWk1lFfeW1jGODINXSb6bNtfDCXTn2O9t02hgGO3LfTz5ZOa58VJLPPia3nFnOPu+u5Vfh5TqxlBrmc9qX0OfGLdj23St15jpfPM72HbH+fY/fzyXTqc2w+z+l6bmX3tZX225znxNc30ufE5+ecz2q/nZWV+GurWGubDtdW9Dmxtu6R5VJ9nuNlC+hzvCj1Po+Tw8rO7r99TsahQ4c646eiBAQQQAABBBBAIL0F/Dxc8zvaY2jw0f1vDU0l4wGxn+Dj3StrwxrzxtOLZfaYniNGr97VIpHm9bMn3HvNkLDZP/XQQVm6o7nHzxeMzZNI8/pZF/P2LUDwsW8jL8FH52GiBh81FKejGrgngo99O9s8R3Nzs+zatUuOOeYYawMBNvsFVbdkBB+DDjzqqIxep1hGk0zlqI+hwUfdTuchbejfjoEtr7v22ibMN3AEdu09GLaxwwYXS76Fb09J11bxc+2VrttIvRFAAAEEEEiGgJdgZDLqwToQQAABBBBAAAEEEEiVAMHHVMmzXgQQQAABBBCwSsDPwzc/wUdnXvcy7uCjbSM+2tAoBB9T1woEH/u29xp87K0kgo99OzMHAn0JJDr4+Nimx+WhjUdea95Xfbz8/v4P/8nLbGae6/7vE57ndWZMt+Cj1tuGUR99Q7NAvxcg+BhsE/u5zgp2zZSGAAIIIIDAwBMgCDnw2pwtRgABBBBAAAEEBroAwceBvgew/QgggAACCCAgfh/GRZs/0s9Dg4+h/yb4GL4D2hh81JH7du/eLfqaWx0WXttt+PDhMnHixIgjdurw+01NTWbjioqKJDMzU6qqqmTTpk2mDB0BcPz48XL00UeHAeh6dH2VlZXmd0OGDJGjjjpKxo0b5+mVffoK3rfeesssX1tba9alI4uOHDlSRowYEfGIr6mpMT9/8skn5emnnzb/fd1118mcOXO65y8sLIy4rbpdb7/9ttku3WZdn65nypQpgb/G3eurrtVAX8fhTCUlJT2226+Rbpe2qU4///nPu19h/f/+3/8z7RNpPe59INQuUvCxvr5eNmzYIAcOHDDrctpd95NEP7jw24ZaV30NlE7R9gv9nc6j8zqTM6/b02kb/dnGjRuloqLCvEZ68ODBMmrUqKjHWKQdWV8TunXrVtm/f7/ZH/Py8kw5xx57bNR9X8vxe7y696/QfUuPOe3nc3JyzKuwddq+fbt5PXp1dbU5PnSkyEmTJoW163vvvSfbtm0zfUVxcbGMHj3aHPfqxhQukMjg4+3L7kzIa60THXxUpVS97trLiI9av9D+jOAjR7eNAgQfY2sVv9dUsa2FpRBAAAEEEEDAq0Ci7yV4rUc6zFdb3ySNTV33fZwpOytTBpcVpUP1qSMCCCCAAAIIIDCgBQg+DujmZ+MRQAABBBBAQAX8PqQj+JjY/cam4KMG+h555BETWoo0afjxiiuukFNOOaXHr3/1q1/JG2+8YX7205/+VFauXCmLFy/uMc+ZZ54pn/zkJ7t/poHHv/zlLyYcGWnSkOT1119vApORJt0vNbj46KOPRm2g008/Xa688kopKyvrnke38Y477uizUb/zne+YsJYz1dXVmXUtX7484rIa+rr22mvltNNO67NsrzN4CT6q389+9rPuIrXOWnfnWI/FyD0SZm91/d3vftcdDu2trqHBx2eeeUYeeuihiEVrSO6jH/2oTJ482SuT5/liaUMNM6rnoUOHzHo+8IEPyFVXXRVxnQ8++KA8++yz5ne6P+h+piE+DYzu3bvX/Pyee+6RV199VX77299GLEOPsauvvlpOPPHEXrdrzZo18vDDD5vgZKRp1qxZ5ngLDSrqvH6P12gjdmpg+dZbbzWrP/fcc+Xyyy83ZUc6pnW//OpXv2rCmQ0NDaL7y5YtW8Kqrm6f//znZdq0aZ7bdaDMmKjgY6JCj9ouyQg+XjP9arlk2sVJ3w0iBR+1Evqw1f3A1Wvw0VnWz4bwYNePFvP2JkDw0fv+4fc6ynvJzIkAAggggAACQQlwnty35OGaeqmr7/oCszPlZGXJyOFH7p/1XQpzIIAAAggggAACCKRCgOBjKtRZJwIIIIAAAghYI+D3YV0sr7nWjXW/3tr5t/7NiI/hu4ItwcfnnntOHnjgAU/7qoYfL774SNDEHXqbN2+evPTSS2HluIOPOtLdf/7nf/a5Lg1B3XTTTWYkOPeko1Def//9UUOI7nm1jNtuu03Ky8vNj0ODgtEq4Q4+6miSWt9oITN3GZ/5zGdk7ty5fW6blxn6Cj7qSJf/8R//0V3UhAkTTLhMtzkeo0QFH7VeCxculMcff7zPzdewXZAh0njaMDQs++1vfzssmBnaFl/5ylfk+OOPN9vpDj5qmPePf/xjn9sfeoy5F3jiiSdk0aJFfZahIdJvfetb3SMxOgv4PV6jBR937NghP/zhD02xp556qhm9M1poWuc5+eST5eMf/7gZRVRHhextuvnmm83oj0xHBBIRfEzE663dbZaM4KN7fRqC1CkZQchYg49aP0Z95Mi2TYDgY+8t4vf6ybb2pT4IIIAAAggg0FOAYKQIwUeOCgQQQAABBBBAIH0FCD6mb9tRcwQQQAABBBAIQMDvgzuCjwGg91GEDcHHtWvXyt13391dUx0lTsN7M2bMEH2dro7M9sorr3SPeqcz3njjjXLCCSeYZdxBKvfm6qh1+tpqDUjp63cvu+wy81peDTM6k4bhPvShD5mQk75SV9elI+I5IUOty/e+9z0TmnUm98h6+jMd2XH69OkyZswYM5rc+vXre4TrdPS4r33tayZssmfPHlm2bJkpSgNtOmqdTjrC5NSpU7vX8cEPftC8plcDhD/4wQ+6R+zTGTT0qWVmZ2ebV/WqnwbfnEnDh2oX79Rb8FGd7rzzzu5VuEOP+sN4jHR7Nm/ebMrW0Tu1XXTS9nRedZ2fn29G+HMeGHgd8dGpsLartps66quXtc10H3PWpfNpYFD3m3inINpQRzB1Apta9x//+MfdgUKts+7TzivUzzvvPLnmmmu6q+0OPrr3e2f7dT/SMLAGhp0ydD4d+fCkk07qsfmho0Xqfqv7o44Uqa+71v3C3Wa6T2sIU19F7Ux+jlddxkvw0b1d8+fPl5kzZ5ofrVq1SlasWBGxCTUkrdunoWQ9jjTM7Ey6X3z961+Pt+n71fJBBx8THXpU/GQHH90NnuiRIL0GH7VOXkd9jOUBbCzL9KsDg40JRIDgYzij32umQBqCQhBAAAEEEEAgaQID/Tya4GPSdjVWhAACCCCAAAIIBC5A8DFwUgpEAAEEEEAAgXQS8PsQL5bXXKtHuoz4uHpXi9y9sjasCW88vVhmj8nt8fNo8/pp/3uvGRI2e6qDjxra+uY3v9kdONMA1Xe/+92wV+TqfDq6mxNI1Pl+9KMfSWZmZljwcfDgwSa0psHB0ElHKHRCgjoi3Te+8Q3zOmD3pOvS1zc7o8JpuMt5pa7Op+FGHTXv9ddfl09/+tMmQBc6vffee2akR2eKNIKce2TDG264QTSIFTotWbKkx+u0tR5aH/ek+/uf//xneeGFF7p/rCNERtp+P/tLtDDh1q1b5fbbb+8uSsOBGhLTEKkzBWWkAT9nFD9d57BhwyJugp/g46hRo8w+Ftru+/btk3//93/vsS/qv7OysvywJaQNNTyp+7sTlHWPYHrfffd1t71u2/e//33JzT3Sf4QGHyOFebXS1dXVZvud12pre+rx4rRrfX296GiTTjhUA4b6evVQHw33ars584W+njs0+Njb8ar18hN81JBy6OvpQ0O4Wqa+gv6iiy7q0Vah+/Vdd90lxcXFcbV9f1o46ODjdf/3iYTzpDL4qBuXyPCjO/io63I/ONXPRfdE8DHhuxoriFOA4OMRQL/XSvoZrF9g0ONe/4S+7j7OpmFxBBBAAAEEEOhDQD+79Qu7+nd7e7v58qr+2+s0UAOQBB+97iHMhwACCCCAAAII2CdA8NG+NqFGCCCAAAIIIJBEAT8P82Id7VE3J/Qmo1OWba+61jDjFQ8cCGuBxdcOjRh8jDSvn+bb8e2er2zWZVMdfNTR4X7/+9+bzYgWyHK2ce/evSbQqK/wXbBggflbbxK7g1Qa0tKQ4YgRI8JoNNimwShnuuOOO3qM5OheoLa21oxU50yhgUPdpzSEGWk9zjLuUfoivTq5r+Cj3jTXYKYzCt8XvvAFmT17dsQm13k1rKkj7umkr/TVYJozebnxHvqwPFKYUEOIGmpzJg096miWBQUFYfUKwijo4KPuY7fccouUlZVFdAwNrOqrmqdMmRKzY5BtqIFC3bedSUc91YCjtnu0/VR/7g4+6vGhoU8dCTXSpK/k1n3dCS26A7nuY1VDw3osadgi0qQjdv70pz81v9J1/uIXv+gOSHo9Xp1yvQYfQ/d5Z/nQUV51pNgvfvGLEV/3q8FPJ/CsAdKxY8dG3L6B+MMgg4/JGO1R2yjVwUetw/HDZ8h3zv5W4LtMIoKPWkm/D179zh84BAX2CwGCj11f2vIyabhRP/t1JGX9DOYY9KLGPAgggAACCCRXwAlAtra2mjdMeJkG2mc6wUcvewXzIIAAAggggAACdgoQfLSzXagVAggggAACCCRBwOsDPacqQQUf3eUQfLQv+MkUuY4AACAASURBVOgegfGjH/2onHvuub3ujXrT2D2anc7sDlKdddZZcv3110cs48knn5RHHnnE/K63+ZyFf/e735nXH+sUrW56I1tf8at/NCypI8Tpq3P1lczvvPOO6MiLOoWOeqc/6yv4qMv/5Cc/MctrYE/L6u1m+PLly81IlDppuEuDcTqFhhWjAevIe9oezhQafNRjSYNhzhRtxMzQ8uMxCjr4qCMULly4sNd97A9/+EP365F1f9S2j9UxqDZ0Krx06dIer2TWUKETUoy2be7g4wUXXCBXX311r9v/xBNPyKJFi8w8Gi52AsAasNy0aZP5+Wc+8xnzOvpokz7o+fKXv9xdN/eIp16PV6dsr8HHn//852EjxWoZWpd//dd/7a5qtNFVdQZ32+vxo8cRU5dAkMHHZIz2qHW2IfhojqMEhB/jCT5qnaJ9lvh94Op3fo4nBCIJDPTgo5drJA055ufnh50Ds0chgAACCCCAgN0C+iXU5uZm88frF1Lt3qJgakfwMRhHSkEAAQQQQAABBFIhQPAxFeqsEwEEEEAAAQSsEPDyUM9dUYKP4a+67o8jPurIa05w66abbpIJEyb43l/dQareQk06sqSOWudMGibsbXJGWtR5QgNjWmcNGj722GPd9Q8tS7fFeU2zO0DnzNdX8HHVqlVyzz33xFRffeWxE1IMIviooboHHnigxyZqKLG3ES+DMAo6+KghQB2lsrfpxRdflHvvvdfMMn36dDOipU6xOAbVhk59tV/U0RPXr1/fYxM0oKhBw0ghJHfw8fOf/7ycdNJJvW6/hhudUSTdYVj3saoF+Dl+3CFCr8erU0kvwcfQ0G7oBrrrrq+gHz06PASuy7hHafVi5buzSuMFggo+vl25VW594bakSNgSfNSNDfq116HBR12Hc/xHetUtr7tOyi7HSmIUGKjBRy/XRhp41C856AiPTAgggAACCCCQ3gJNTU3m/pGXcwD3+X16b3Xk2hN87I+tyjYhgAACCCCAwEARIPg4UFqa7UQAAQQQQACBMAGvN/acBWMNPupy7mUZ8fFIU9j2qmv9truOHOdMv/71r2MaycYdpOot2OYeXdLvITpv3jzRUKVOeqP6Rz/6keirt71OsQQflyxZIo8++qjXVfSYTx+Sq4tO1dXV3SNX9laYjlap2+lMbtdIy5155pnyyU9+MmKRQRkFHXz8zW9+02d4YOfOneZ1zzoNHz68e9TNWByDakM3clVVlXz961/v4a6jgZaWlkZsC3fwsbfXuzsL19fXy5e+9KXusv7nf/7HjEzx2c9+NqZ9URf61Kc+JWeccYZZ3uvx6qzMS/Bx1qxZPeocWtGvfvWr3a+M/+Uvf2lGzYo0EXyM3sRBBR+T9Zpr3RKbgo9an1vm3yyTh02K+ThyL6jBx+aqph5hZ4KPgdBSSAoEKg7WhK21tHiQ5OX2v7Cfn+uhgoKCqJ9XKWgmVokAAggggAACAQjouUBDQ4MZAdLL1F9HWCf46KX1mQcBBBBAAAEEELBTgOCjne1CrRBAAAEEEEAgCQJ+HvRpdaLNH+nnoUFH59+h89r4quu7V9aG6d94erHMHhM+4mOkef003b3XDAmb/VMPHZSlO3recF0wNk8izetnXV7mDQ0+9hZI6q280Fcyjxs3LuLs7uCjjpA3cuRIL9U082iZzqt97777blm7dm33svoa6zlz5pjgmb6K+9ChQ/Lmm2/K008/3T1PvMFHHcFxxowZnuubl5cnV1xxhef5I80YKfjofrWyLhPtdcBBGQUdfPzVr34latPb9N5774mOCqiTe+TMWDDdwceg2tA9IqNTp29961syZcqUiFV0Bx/7GqVTC9CApzPKpf77f//3f83rot3Bx/nz5/cZIHVX5tRTT+0eadPr8eosT/Axlj0v+GWCCj7evuxOWV+xIZAK6iuke5u+c/a3PK9H69XbFESdg3zldaKCj2rg9+Gq3/k9NwozItCPBPxcB+koj4WFhZKVldWPBNgUBBBAAAEEEHAL6L0j/dKh13OE/nbOTfCR4wEBBBBAAAEEEEhfAYKP6dt21BwBBBBAAAEE4hTwejPPWY3X4GPofO4RH0MDkcOGDRN9mJjoaewde8JWsfjaoWFhxkTXw0v5qQw+av3cI7F9+9vflsmTJ3updo95vAap/vCHP8iKFSvMsldddZVoYNHvFDoantY/WiBRX4X9xz/+0awiluDjSy+9ZEJnOk2bNi1slD+/dfc7f2jwUV8RrSMNPvjgg92vDNcgpIbp3K89DtIo6ODjd7/7XZk4cWKvFC+88ILcd999Zh4NyH7lK1/xS9c9f9BtqLYaZHS/hl1Xpv76anMNSoRO7uCjhhdPOeWUXrdHX6N91113mXncr5B2H6teHKOtxOvx6ixP8DHm3S/QBYMKPl73f58IrF5Bvz46WsWCHKUyqFEfIwUftf7u11y7H456fdW1U4afRupvD2H9bDvzIuBFwM81kH45I9JnuZf1MA8CCCCAAAIIpJeAfsGwtrbWfNEwdIp2jt1fzr0JPqbXvkptEUAAAQQQQAABtwDBR/YHBBBAAAEEEBiwAn4e+sX6mmvFJfjobxdLdfDxv/7rv2TdunWm0l7CiPqaX30ls3sUHK9Bqqeeekoefvhhs64JEybITTfd1CuWc/PZva4tW7bInXd2jQx2zDHHyC233BK1DA0tavBNp1iCj++++64JFTqTbqcGDXub2traAgv3ul2d0KOuPzR8d+KJJ8oXvvCF7lHCgjRyBx97G62wt33AHZy7+uqr5YILLujV0N1uOq8uE+sUdBvqa6dffvnl7n04MzNTtm7dav6tI5K6Xx3v1NkdfFywYIFcd911vW6O+3XP2rZf/OIXzfz6Ou2NGzea/7700kvlsssu67Wc1tbWiKNCej1encIJPsa69wW7nI3BR93CRIcfgww9BllfJ/ioZYYGHN2vvHb2AoKPwR4PlIaAFwE/1z5anp5j9XWe52W9zIMAAggggAAC6SOg5wsaftTr59Cpt5BjfwlApk9LUVMEEEAAAQQQQACB7nvNhw4d6oQDAQQQQAABBBAYiAJ+Hv4FEXyMNBIkIz6G73mpDj6+9tpr8pvf/Ka7YrfeeqscffTREQ+Rffv2yfe+9z0zCt2FF14oZ599tgn5eQ1SVVZWyne+853usnt7PbCGHn/729/K9u3bzSujdZQ8Xddbb70l+spsnfTVwlpGpGn//v09gpV9BR8vuugiufLKK3sUpa8C1xEWndH99Pc6X7TpmWeekYceesjU95xzzol7xCC3q7qPHz++e9XuUQH1hxpOO+2008zvgzRyBx915EUdgTHS5DX4qIECDQJGe825Bvs04OdModvtt+8Osg1fffVVs086k9po8NG9T2vw0XkluzOfO/ioP9P5J02aFHFTdu7cKXoMOtPnPvc5Ofnkk80/NXCpwUuddITJ73//+1JeXh6xHH1dto5AqX2uhiSnTp3aPZ/X49VZwKbgo76OLCcnJ+KrgPV32kdom4ROGkjWB1Pp/NpSW4OPap2o8GPQoUeta1Cvu/YbfNR1ew0/+n2I6nd+v/0o8yOQjgJ+rnt0+wg9pmMrU2cEEEAAAQSCE9D7PpHCj5HO4521ch4enD8lIYAAAggggAACCHgXYMRH71bMiQACCCCAAAL9TMDPA8BYg4/Ocu5RHx1G/RnBx/CdKtXBRw0L6SuunXCfPvjVURS1rdxTY2Oj/PCHP5SKigrzYw0//uQnP/EVfNTlQl/fHClMp/vKn/70J9FXVTvTbbfdJqNHjxat77/92791/1xDZToaonvS0OPtt9/e43XEkYKP7ldhjxo1ygTJcnNzzailGpjTkNRzzz0nDzzwQHfxGiLTP6E3uN2vVNaZ3UHEWLuSvgJq7leH6zp0JMwhQ4YEaqShWA3H6qSjFV577bVm2zWYqgEzx8Fr8FHLiRba05CrhvWcSUf01DaJ92FCEG146NAh+eY3v9ldNx21UT10cu9H7nZwZg4NPurPIwWMdb/V/VyPNcdJ92N97aZOTU1Noq+4do5VPQY1+BvpWNXldu/ebZYbPny4WZ/u26HH4M033yzjxo3rdRe1Ifiox+Sf//xn0deg67F6ww039Kj33/72N/n73/9u+qVPfepT5tX0zrR06VJZtGiRCUzq/usESWM9LlO1nM3BRzUJOvyYiNCj03b3f/hPcTcjwce4CSkAgYQI+LnecSqQn58vBQUFCakPhSKAAAIIIIBAegjoOYR+gTDSa6+dLejvr79Oj5ailggggAACCCCAAAIEH9kHEEAAAQQQQGDACvh5EBht3kijOLpBowUfnZ8TfAzf/VIdfNQahb4OWH82Z84cmTlzpgkBbtq0Sd58883uQJb+3h1Y7Cug595qvZGso/g54S79nb7O97jjjjOj173zzjuyZs2a7oCl/v6rX/2qzJgxo7uYu+66S3TEQ2fSkRj11dn6eiId7fCVV17pUb7OFyn4qOvS8KYzaehT16Oviv70pz9tRjfUm94/+9nPzM+cSdelvxszZoy8//77pi7O6451nosvvtiM+hjv1JdrQ0ODsXSCcDqqn45QqYHEoIyefPJJeeSRR7o3RYNlGjR94403RF+TXlhYaH7nJ/joFKYjI+qonRoy1f3LeeW683sn7BqvY7xtqPX7+c9/bo4DnbTO3/jGN7pHFtRjREepdP9e28EZXTBS8NHZ73U/0ocnOtLl66+/3mNTdR16XLinzZs3y09/+tMe+6weq5MnTzbl6H6q5bjDk7p+DcQ6U1/7Vai3DcHH0D7qhBNOkBtvvNFUNXQkWR2x1hk1U18L/6Uvfal7kzR0q37pOPJjUMHH25fdKesrNsR7WEVcPqjwYyJDj1pxgo8JaX4KTWOBioM1YbUvLS6QvNzstNoqP9c6zobpSMH62cCEAAIIIIAAAgjovYOqqqo+ISIFIOP9wmafK2UGBBBAAAEEEEAAAQT+KUDwkV0BAQQQQAABBAasgJ+HgQMl+Lh6V4vcvbI2bJ+48fRimT2ma3Q0Z4o2r58d6t5rjoSPnOVsCD5qXdauXSt33323p83REdXOOOOM7nn9Bql27NhhgnlOYK+3lUZ6dfDBgwdNeMkZfTLa8jq6mzNaY6Tgoy73q1/9yoT4Qid3sFPrqSMfusOP0dY5f/58+djHPhb3KIVavhfX0Fde6zYvXLhQgjLSAN1NN90Usa38Bh915EFtB/cImtEce3uttqedNGSmeNowdMRIfcX1iBEjeqxBvd2vXb/66qvlggsuMPO4g4967Nx77719bkLoMeZeIHR00WiFaZBX205HSnVPXvYr9/w2BB/12NMRTZ1p+vTp8rWvfc38c9++fSYA7Ey6nzmB5rq6Ovnyl7/c/Ts10b5Hgy7pNqVD8FFN4w0/Jjr0qHVMZPBRy3e/bt39AJRXXafbUTdw6rtr78GwjR02uFjy83qeC9su4udaR7dFj0kNPaZjGN72tqB+CCCAAAIIpKuAhh/1S7W9jfzo3rbezvfT1YB6I4AAAggggAACCNgtQPDR7vahdggggAACCCCQIAG/DwIHUvDxigcOhKkvvnZoxOBjpHn9NNmOb/cMIOmytgQftS4aIHrsscfk5ZdfjrhZOqLgZZddZkaXc0+//vWvu0er8/LqXF1WR358/PHHRV9DG2nSUdsuv/xyMxpkpElHcrvnnnvCRgnUeTX4pK/D1nCT1kenaMFHDfbpNj/11FM9VqOvNdbtdaa2tjZ59tlnzXyRApsaqLrooovk/PPPD+wBulfXP/7xjz1eC66jE+qD/KCMdES9Bx98MMxaA3S63Tr1VtcvfvGLZgRCZyS+V1991bzK3D3qp+Os7a0jeIaG9fwcZ9HmjaUNdbQHHb3RmTTUes4550RcxcqVK+X3v/999+/uuOMOGTp0aI/go44MuXfvXrPv6uuzQycdTVK3f+LEib1usoaH9dXOGliONOmIiHqs6uvCQyev+5WzXLTgo75K+5ZbbjGzzZo1q8fIiqHr1OPJ2d5f/vKXoq8VjTQtXrzY9As66UiNWq5OOuqmhpR1NFjdtz/3uc+ZkTed6b777jOvwdb9UUdrnT17dvfvnNdg6w+uv/56Oeuss4LYnZJeRlDBx2QEC2MNPyajbtpwtgcftY5+R4zxO3/Sd2BWaLVAfwg++r3W0QbRUavz8vKsbhsqhwACCCCAAALJF9DziqamJtG3bHiZnHPxdDonb2pukZaWth6bp1/gKiqMfK3uxYF5EEAAAQQQQAABBJIjQPAxOc6sBQEEEEAAAQQsE/D7MJDg48AMPjq7rYYS9RXOGlTSwJG+glpfUz5y5MjA92y9kbxz587udRUVFZmw2FFHHeUp+NHS0iK7du2S/fv3m/k19Dh+/HhPy7o3RkNxhw8fNje3y8rKpLi4OOK26nwaOtMR/pqbm02AS1//PHbsWGtHkQvKSG3USPsH3eZo4TUvO4k66uuL1VHL03bXsKPuZ4mekt2G7hEf9bXpun/p6BHbt283r2nWAKh66iiSo0aN8rX56rdnz57uESmccvQY6o+Thnl1v4s0Opc66s9zc8NHKNN9V/uHdA64BBV8fLtyq9z6wm0J3z38hh+TFXo8fvgM+c7Z34p7+3MycqS5qsmUE/qAM9qIj5HmjfZw1O9DU7/zxw1AAf1KIN2Dj36vc7TxeMV1v9qF2RgEEEAAAQQCF9DzC70fpm8R0HsIXiY9J0+X8/LDNfVSV991PeNMOVlZMnJ4mZdNZR4EEEAAAQQQQACBFAoQfEwhPqtGAAEEEEAAgdQJ+Hkg2Nu8ob+L9m/9uft3zn9rqCkZrxgde8eeMGw/ozj6mddPq9o+4qOfbWFeBBDwJhAp+OhtSeZC4IhAUMFHLfH2ZXfK+ooNCef1Gn5MVuhRN9hrnfrCIfjYlxC/TyeBdAw++rm2idQWOnpwMq5J0mk/oK4IIIAAAgggcETAfa6hX9jVL9N5ndIhAEnw0WtrMh8CCCCAAAIIIGCfAMFH+9qEGiGAAAIIIIBAEgT8PByMNfjoXk6/Fe2eCD52aRB8TMLOzioQsEyA4KNlDZKm1Qky+JisUR+Vuq+gYTJDj17q43X30OBj0+FGM6ILIz56VWM+WwXSLfjo57omknlOTk7Ukb1tbSPqhQACCCCAAALJF3Cfc/h59bVTU/dI8Mmvfe9rJPhoW4tQHwQQQAABBBBAwLsAwUfvVsyJAAIIIIAAAv1IwM8DwoEWfHxtV3NYS3/2tPDXHK/e1SKR5vWzm0Qq91MPHZSlO3rWYcHYPLn3miF+imZeBBCwVIDgo6UNk2bVCjL4qJuerFEfdV3Rwo/JDj1qXe7/8J8CaXmCj4EwUoglAukUfPRzTRONt7i4WDT8yIQAAggggAACCPQmEHre0Z/CjwQf2fcRQAABBBBAAIH0FSD4mL5tR80RQAABBBBAIA4BPw8Jo80b6eeRXmet1UyXER/jIA1sUYKPgVFSEAJWChB8tLJZ0q5SQQcfkznqo2KHhh9TEXrsa/RJPzsFwUc/Wsxru0C6BB/9XM9EM9eRl8rKymxvEuqHAAIIIIAAApYIhJ5/NDY2iv7xOjmjw4eOEu91+UTNR/AxUbKUiwACCCCAAAIIJF6A4GPijVkDAggggAACCFgo4OdBIcHH5DYgwcfkerM2BJItQPAx2eL9c31BBx9VKdnhQyd4mOz1OntEUKM9anm9BR/dr78OfcDZ17+duvp9MOp3/v55lLBVsQqkQ/DRz7VMbw75+flSUFAQKxXLIYAAAggggMAAE9Avdoeea9fV1UlLS4tnCRvDjwQfPTcfMyKAAAIIIIAAAtYJEHy0rkmoEAIIIIAAAggkQ8DPw0KvwcfQ+dz/ZsRH761K8NG7FXMikI4CixcvlurqasnKypKPfOQjkpubm46bQZ1TLJCI4KNuUrJDiMcPnyHrKzYkXfOW+TfL5GGTAlsvwcfAKCnIAgHbg49+rmN649RySkpKeM21BfscVUAAAQQQQCBdBPT8Qf+4w4/675qaGmlvb/e1Ge4vSPlaMAEzE3xMACpFIoAAAggggAACSRIg+JgkaFaDAAIIIIAAAnYJ+HlgSPAxuW1H8DG53qwNAQQQSEeBRAUf1SLZ4cdk+wf5imun7gQfk92KrC+RAgMh+OiEFoYOHZpISspGAAEEEEAAgX4m4JxD6Ga5w4864qOO/Oh3siX8SPDRb8sxPwIIIIAAAgggYI8AwUd72oKaIIAAAggggEASBWwIPmodhg8fLtnZ2Qnf8rF37Albx+Jrh8rsMfaNtEbwMeG7AytAAAEE0l4gkcFHxemv4cegR3p0diSvwcfQB6S86jrtD0U2IMkCfq5holXNCSzoNUhZWVmSt4DVIYAAAggggEC6C7jfauM+n6+vr5fm5mbfm2dD+JHgo+9mYwEEEEAAAQQQQMAaAYKP1jQFFUEAAQQQQACBZAr4eWiYqBEfUx18TKZ3vOtaMDZP7r1mSLzFsDwCCCCAQD8RSHTw0WHqLwFIfaX2ldOuCPT11u5dieBjPzmw2AzrBfxcw0TaGCeooOUUFBRIYWGh9dtMBRFAAAEEEEDALgF9pbU78Oj8t55nVFVV+a5spLJ8FxLnAgQf4wRkcQQQQAABBBBAIIUCBB9TiM+qEUAAAQQQQCB1An4eGhJ8TF07OWsm+Jj6NqAGCCCAgE0CyQo+6jZr+FGnhzY+bBOBp7okOvDoVILgo6fmYCYE4hLwc/0SaUXu0Zm0rKKiIhk0aFBcdWJhBBBAAAEEEBh4AnpOoecSkQKLDQ0N0tTU5Bsl1eHH1rZ26Wjv6FnvjAzJy038W3p8Y7EAAggggAACCCCAQA8Bgo/sEAgggAACCCAwIAX8PDgk+Jj6XYTgY+rbgBoggAACNgkkM/jo3m4nBOn8bGPlJptYTF2mD5smU4ZNSdjojpE2mOCjdbsBFeqHAn6uX0I333m9tf7cKae0tFRyc3P7oRSbhAACCCCAAAKJFIgUfNT1aXhRR4Osrq6OafVO+NEdgoypIBZCAAEEEEAAAQQQGFACBB8HVHOzsQgggAACCCDgCPh5cEjwMfX7zU1nFstnTytOfUWoAQIIIICAFQKpCj5asfEWVoLgo4WNQpX6lYCfa5fQDXeHHvV3TlllZWWSk5PTr5zYGAQQQAABBBBIvEC04KOuWUOLtbW10tra6rsiqR710XeFWQABBBBAAAEEEEDACgGCj1Y0A5VAAAEEEEAAgWQL+Hl42B+Cj79dVSs/XlGbbObA1rfj26MDK4uCEEAAAQTSX4Dgo11tSPDRrvagNvEJ1NQ2hBUwaFCe5GRnxVdwjEv7uW4JXUW00KPOV15eLtnZvL4xxmZhMQQQQAABBAasgBN8VIBIozM2NzeLvvLa7xRaVqJHfuzo7JSOjq4vhXR26t/mGyKif4VOpi76/xkimRkZkql/6/8wIYAAAggggAACCKRcgOBjypuACiCAAAIIIIBAKgT8PEDsD8FHNV69q0Ve29WcCu6Y13nymDyZPYZX8MUMyIIIIIBAPxUg+GhXwxJ8tKs9qE18Arv2HgwrYNjgYsnPS805qZ/rltCKazDBPbnLIvgY337C0ggggAACCAxUAXfwUQ1CA4p6vnH48OGIoci+zBIZfuzo6JT2jq6wo4Yeg5g0BJmRKZKVmWH+MCGAAAIIIIAAAggkX4DgY/LNWSMCCCCAAAIIWCDg5wFifwk+WsBOFRBAAAEEEAhEgOBjIIyBFeIEH7XAzMzMHuXqw8tor63z+mDT72gvfucPDIKC+oWATcFHP9csofi9hR51XoKP/WJ3ZSMQQAABBBBIukBfwUetUHV1tbS3t5u6+T0393qN4GXDOzpF2ts7pKO9U3p+HcTL0v7m0dijjgKZlUUI0p8ccyOAAAIIIIAAAvEJEHyMz4+lEUAAAQQQQCBNBfw8RCT4mKaNTLURQAABBPqtAMFHu5qW4KNd7UFt4hOwJfjo53oldItDQ4/6e3d5+t+DBw/mVdfx7SosjQACCCCAwIAU8BJ8rK+vl6ampu7Qo5/wY6R5/SyvjdLe3iltGnYMaGRHvw2tXwXLysqU7GxGgfRrx/wIIIAAAggg8P/ZOw/wOKqr/R/tarWqVrMs4V6wAVeaA6aY3kJPQoCQL5VU/klISEIS0tuXhJLkS0gvJCENQoDQQjc2xhgDtjHGGOOCjZtkyeq9/J93zKxHszM7ZWd37+6+l0cP1s4t5/7u3as7c985hwS8EqDw0Ssx5icBEiABEiABEsgJAl4OEil8zIkhZydIgARIgARyiACFj2oNJoWPao0HrUmOQLYLH61EjyCi39Po/6fwMbl5wtIkQAIkQAIkkK8EzMJHcDALE/v6+qSjo2OUN3gv4kW/Xh8zLXg0zwnIHiGAjFAAma9fF/abBEiABEiABEggDQQKNm7cNLJv3z6pra2VsWNr4kISpcEGxybeeGO7dHZ2y7RpU6SkpNizW3THBpLMMDAwKFu2bBO8ODRz5jQJh8NJ1hh88ba2dtm1a4+MGVMhhxxSr+Q4w77W1laZMGG8ZqeXm6DgiVnX+NprW6S/v08OP3ymkm/Fd3f3yPbtb0o0WiSTJ09Uci42Ne2TffualV5ztm3bLl1d3TJ9+hQpLlZxzRmQLVve0CbpoYdyzfH7/d+5c4+0tam+5myW/v5+hdecbtm+facUFxfJpElqrjmNjfukublZxo6tldra0fscVYSPCDFXUlLidyqzHAmQAAmQAAnkJQEKH9Uadjvh4xtv7JCenj6ZNm2yFBdHtft8u7DXVgemei+9Ph9wmx977S1btmsh6WbMmKrkPXRra5vs3r1XKivHSEPDOCWf5+zcuVsLZzhx4nipqFDzec7GjZtlYMDdvVUmhI94BrFjB+6totrzHLdz2LwS4B4n0UtbxmsUPqq1jtIaEiABEiABEsgWAnoIa6O95r3L4OCgdt6HjYIVlQAAIABJREFUz7u7u+XNN3dr57vYL7o5Q/Xq9XF4eEQGBv15eNTEiYWFUhgOaXvt2D0LLoyMCOoeGRmWwaFhGRocluER70GzUVVhYUgKw/49QG7d+obgDHDGjGnanlG11NfXL1u3bteEnjhbczPO6e7D/v2tsmdPo1RVVUp9fZ2S91ZvvrlLcKY/adIEqago931fkCq2uJ/AWfng4IAcdtihSp6Vd3Z2yY4du6S0tFjjiO+1aqmxsUmam1tk3Lg6qampVm6cwQvn0Fi/Z86cLtGoimtOn2zduoNrTpKTG9+Vjo52mThR7TVnaGhAZs3imuN3uPfubZTm5v3a3z6V15yenm459FB/a07Bk08uG1mz5mWZN+8I7ScSifjllbJy//3vE4KHqOeff5aMGzdWuT9Q2Gjee+9/sQOWSy55u7Z5Vy3hj9Py5c/J5MkTZNGiY6WoqEg1E+Xpp1fKpk1b5KSTjlN2U3znnfdKa2u7vOc975SyslLlGGKj9OijS7UDkbPOWqzkRmTt2vWCNWf+/Nkyd+7hSq45Dz30hOzatVsuuOAsqatTb83Bgch//vOwNv8uueQ8JdeczZu3yTPPrJIpUybK8ccfo/Sac/LJx2sHsSreiN9xx73aTe5VV71TSkvVXXOqqsbImWequeZgvcG6s2DBHJkz57BRa44qwseysnJt3WYiARIgARIgARJwT4DCR/es0pHTTvj48MNPyu7djXLhhWdrL9uqJnzEgch99z0soVBYLr74HO3FN9XSpk1bZcWK52X69MnytrcdpeS91dKlK+T117fJqaeeoN0Dqnhv9c9/3iMdHZ3a85zS0sQvHWVC+IgD2MceWyq1tdVy2mknaS+0ek1OokfUR+GjV6rMTwIkQAIkQAIkYCbgRvgIr5AtLS1a0T17muTJJ5/W7gdOPfVE16I9t+LHgUGIEkdcD1Q4FJZoNCLRokLtWXFhoTdnNkNDwwKHOP0Dg9LX36/9220KFRRIJFIg+L/XdP/9j2iiPZxDY8/o90UZr+26zd/e3iH33/+oRCKF2v2fivdWGze+LitXvqg5FDn22AVK3lstWfKM5mzp9NNP0kR7qt1bQQiMcyucU1555aWO91Zu50+Q+eBo6Yknnpa6ulrtHlVF0d4LL6yVdes2yDHHLJAjjlDT2dJ99z0iEEtdeunblRRK6WtOUVGhXHCB2msOHKdhrFXU5yxZslwTuZ5xxkma+FG9NWdY7rjjP5oI94or1Fxz4GgJ+xzo2E45ZZGia84aWbfuVe1vn6oO3qB9gdbp0kvPl5qaKs/7nIJ169aP4O2BhoZ6mTjxEOUmM/7QbdiwUVpa2jSRlIqeAOEhAH+choeHZP78ub4eTgb5B92qLnjYw6KFt1jwpk1hYWGqm/RcP+zbu7dJZsyYonnmUvENjNWr12mbuYULj1Ry0cJD/I0bN0k0WiyHH36okqJCiJjx9sD48fUyYYLaa868eYcr6a0Cb869/DLWnGGZP38O1xzPq82BArh53Lt3H9ccn/xQDDcWr732utJrDt4mxj5n/PgGmTChYdQ+RxXhY1VVtZIPCJKYGixKAiRAAiRAAiknQOFjyhF7asBO+Pjqq5u0lwfxom15eZlywkeE4MO9FdK8ebOVfAgND+aISgAv4dOmTVLyec7rr28VRHfAAaLZy7qniZTCzC+++JLmIcfN85xMCB9xb4WDWLxMPWvWDF/Pc5yEj+b7H3p8TOGEY9UkQAIkQAIkkMMErISP6K5RiId9B6LwIHV0dGlOT/DyyaxZ07X9rBvRnl0e/XMvXh7xolNpSVRKios0YV6QCTx6+/o1T/cQQ7pJkXBICj2Gv16/foO0tnZoTkX0eys3baUrT29vn8BGjM/cuUcoem/VJNu27dDuWfDClopn5fiuNDU1a98VeOVS7awc322clff09GoCGhVFhXAm8tprm99ac/zdW6X6e4NIahBowgvuIYeMU1Kfg2clbW0dmlMROINys26nmpuxfq45wdA+uObM0MRmqq45GO9jjpmv9JpTVlYiM2equua8Kbt27ZVJk8Zr0WRUE7hiNie75hTs27dvBC7H0TkVO4hOYtMIG6GCVm1RhX34Iw8b8ZaPnzeyg1mWEtcCgRRsBD8VN3L6OMNG2KfaoqrTxTyEjSpu5PS5CBuRVPTeCrswF7NjzRmSoqKIkmsOOIIhWKr4dog+zlxzkl/dueYkz/DAmjOkubs373NUET6OGzdO2b/NyY8AayABEiABEiCB1BCg8DE1XP3Waid8xF4MP7g/1Z/nqBTqWn+ew3srvyN/oNyBZ2K58zwnE8JHzEX9eY6f53aJRI/6Pbp5lCl8TG7eszQJkAAJkAAJ5CsB7Puw93DyyLhv3z4NEfJiv20+n3Rz3muXZ2hIZGDIOeQ0znjKSoulJE2hoeH9sau7V7p7eh2nRzhUIEUR9yF4s+WsnPdWjkOfMEOu3VslR8NfaeO9Fc/K/THU7/OzQZ/DNcf/GOfi85zkaPgrzTXHHzdzqWT3OQUtLS3u/X8HYzNrIQESIAESIAESIIGME6DwMeNDQANIgARIgARIwDcBCh99o0tJQV34aA5ljcYShbc2H2Q6eXVxa7ybQ1S3dTFf/hHIhPBRp+zlHsU4Ml69PSI/hI+qHgTm36xjj0mABEiABEggewjgYFoXMibaz+vCR6t7Av0zp15b7esHBkdkaDjx0TYEj+VlJVIcLXJqIiXXwaijs8dRAHkg9HVIQt4jX6fEblZKAiRAAiRAAiRAAtlIgMLHbBw12kwCJEACJEACJJA0AS+HinZ5zZ8n+h0PBI1JP5ykx8ekh5IVkAAJkAAJ5CEBCh/VGnQKH9UaD1qTHIFsEz56FT2CDu5NamtrKXxMbqqwNAmQAAmQAAnkJYFEwkcA0cWKTU1No7xC2r0klQiiWfjYPzAiwyP2okdEk6usKJWSkmIlxgahrxHqu69/wNYeaB7h+TFE9aMSY0YjSIAESIAESIAEso8AhY/ZN2a0mARIgARIgARIIAACFD4GAJFVkAAJkAAJkECGCFD4mCHwNs1S+KjWeNCa5Aj09cUfTEciYcFBeqqTl3sU3RbzC1ZGG61EkfpnFD6mejRZPwmQAAmQAAnkJgGj8BE9tAt5DeGj8bqV8NGuvJGcXn//wLAkcvRYWlIsYyrKlBQQdnX3SFt7V8IJAfEjwl8zkQAJkAAJkAAJkAAJeCNA4aM3XsxNAiRAAiRAAiSQIwS8HCrS42OODDq7QQIkQAIkkDMEKHxUaygpfFRrPGhNdhLwcn+i99DJ2yPyWXmlx2cUPmbnPKHVJEACJEACJJBpAmbhI+yxCnmtCx/163qeROGx7fqG8NaJRI9VleUC4aPKaWBwSFrbOmRgYNDWzCg9P6o8hLSNBEiABEiABEhAUQIUPio6MDSLBEiABEiABEggtQS8HCymUvhYV1fHEHOpHWrWTgIkQAIkkIMEKHxUa1ApfFRrPGhNdhLwcn+i99DJ2yPyGevV/03hY3bOEVpNAiRAAiRAAioQcCN8hJ379u0bZa7uPdvOQ6Rd3xKJHsPhsNRUVUgkUqgCGlc2tLZ2SHdvn2VeLex1UUhCBfT86AomM5EACZAACZAACZAAXsJpaWkZIQkSIAESIAESIAESyDcCXg4WUyl8rKmpkeJitd9Izre5wf6SAAmQAAmoT4DCR7XGKCIR6W3t0Ty9WHlwMX5m92/0yOoQNNHndhTs6lGLGq0hgdEEvNyfoKQbb496Pqv/0+MjZyAJkAAJkAAJkIAfAhA+4kcXMup1mPfgZuGj8V7BrfgxkeixKFIo1VVjJBwO+elGRsu0d3RJZ1ePpQ3oTVFRWKh9zOgQsXESIAESIAESIIEsIkDhYxYNFk0lARIgARIgARIIjoCXg8VUCR/Rm4qKCikvLw+uY6yJBEiABEiABPKAAIWPag1yZKhQejt6KXxUa1hoTRYR8HJvoneLwscsGmCaSgIkQAIkQAI5REAXPppfenISPgJBIq+PuG6sY2hoRAaHrcFFiyJSUz3G9sWpbMDd0dkt+LFK8PgYLco+QWc2cKeNJEACJEACJEACuUeAwsfcG1P2iARIgARIgARIwAUBL4eLqRQ+wttjdXW1C4uZhQRIgARIgARIQCdA4aNacyHUVyADPQOWwkejJxgrb5DGntDjo1rjmq/W9PUNxHU9EgnHeTUKko+XexO061b0aM5rbIceH4McQdZFAiRAAiRAAvlDQBc+oseJvD42NTUl9AafyOvj8IgIvD1aJXh6rK2pzGrRo96v9o5u6eyyFj8WhgskUkjxY/58s9hTEiABEiABEiABvwQofPRLjuVIgARIgARIgASymoCXw8VEeY3XzPmMvw8Px7+ijOt4yNfQ0JDVLGk8CZAACZAACaSbAIWP6SZu315hQaH0tfaKjMQffJoPQyl8VGfcaIk9gR27m+Mu1tVUSHG0KGXYvNybwIhkhY8oP3bsWIlEIinrEysmARIgARIgARLITQJuhY+NjY2Wwkj9niDRS08DAyNi5ewxHA7L2JrKrAxvbTcbWts6pbun1/JyNBKSUKggNycSe0UCJEACJEACJEACARGg8DEgkKyGBEiABEiABEgguwj4OVy06mEisaMb4SPy1NTUCDw/MpEACZAACZAACbgjQOGjO07pyFUkEelp7dGaMnt8MX9G4WM6RoRtJEsg3cJHP/clbj3Sg4X+Apb5hS0KH5OdKSxPAiRAAiRAAvlJwCh8TBTuGsJH83UQM35mJX4cHBKBx0erNLa2SuDxMddSc0ub9PXHex1nyOtcG2n2hwRIgARIgARIIBUEKHxMBVXWSQIkQAIkQAIkoDwBPweMVp0KQvgI0SPEj0wkQAIkQAIkQALuCFD46I5TqnNpvkd6RQZ7B7WmKHxMNXHWnw4CuS581O9fKHxMx2xiGyRAAiRAAiSQewS8Ch9BwChwNHp8NAsfRxDiesiaWVVluZSW5OaL42DatK9VhgHAlArDIYkU0utj7n2T2CMSIAESIAESIIGgCFD4GBRJ1kMCJEACJEACJJB1BLyIH916VfES7loPUYf/19XVSVFR6sLnZd3g0GASIAESIAESSECAwkc1pkdRQZF0t3TFvLZYeXS0OuTUrXfyAGmXL1Hv7ULmqUGMVmQDgXQKH73cj+jsdA+OViyt7kXMYbEpfMyGWUgbSYAESIAESEBdAmbhIyy1Cl+te3w0XjfnNe/dBwfFMsR1aXFUqqoq1IUSgGU9PX2yv63DsqbiorAUUPsYAGVWQQIkQAIkQAIkkIsEKHzMxVFln0iABEiABEiABFwR8HLQmErhI4yNRqNSW1vrym5mIgESIAESIIF8J0DhY+ZnQLggLMM9QzFvj05h7IyHnLr1iUSRxh56ETN6yZt5irRARQIqCx/NIkYjP6v7FeOLVnpeCh9VnHW5YVNfX5/s379f+vv7pby8XPvhy31qjS3ESuFwWC2jstyaXGeq/83g/irLJ2rA5g8ODgrmvlXIauNcMQofjfcC5nsA/ffhYZHB4XhjQwUhGVdXZeldPuCuZby61tYO6e7ti7MjHCqQokgo4/bRABIgARIgARIgARJQkQCFjyqOCm0iARIgARIgARJICwEVhI/oqH4gWVlZqR0OMZEACZAACZAACSQmQOFj5mdIZKhQetp7Rnl38eLx0a23R/TUy2G7l7yZp0gLVCSgsvDRq7dH8DWXofAxM7Nuy5YtsmTJEtvGQ6GQjBkzRqqqqqS6uloOOeQQaWhoCMTYDRs2yIoVK2J1zZ49W44//vik64bwZe3atbJs2TJ5/fXXpaenJ67OcePGycKFC2X+/Ply6KGHJt0mK/BHoLm5We68805ZtWqVNh6XX365Ns+Y/BOA6Otf//qXPPLIIzJjxgy57LLLZObMmf4rzEDJpqYmue+++2Itv+Md79DWID0tXbpU7r77biksLJSLL75YTjrppAxYySZVJGAlfNT37MkIH+28PeZyiGvz+GJtaWzaL/EBr0WiRSEJ0e2jil8J2kQCJEACJEACJJBhAhQ+ZngA2DwJkAAJkAAJkEDmCKgmfASJsWPHat4fmUiABEiABEiABOwJUPiY2dlRJBHpau2WgpHRYe3MokMIeYzJS9hru3JOPafw0YkQrzsRUFX4mMjbI/pkFeYan9sJH+Ftnt74nGZDcNdfeOEF+cUvfuGpQoipTj31VDnqqKOkpKTEU1lj5ptvvlleeeWV2EcQWN50002+Pf9BlAGx1wMPPGApdrQz9LDDDtPEYdOmTfPdFxb0R+Dvf/+7PPbYY7HC5513nrzrXe/yVxlLaQTM3+mJEyfKt771rayi88Ybb8i3v/3tmM3f//73pb6+Xvsd3ls///nPj+rPj3/8Y02gzUQCuvARJIz7fbMH+L17947yCon8Vl7iUYedt8eiSKGMrT0oyM0H+h2dPdLR2RXXVXp9zIfRZx9JgARIgARIgAT8EKDw0Q81liEBEiABEiABEsgJAkEIHwHCWI/dgSPyWXloMYefwwPAuro6iUQiOcGYnSABEiABEiCBVBCg8DEVVN3VWVQQka79XSLDow8uzSJH1GY+CDW2QI+P7ngzV/oJZKPw0W2Ya+O9C4WP6Z1bfoSPuoUQGl1zzTW+PCbu27dPrr/++rjOfvGLXxQIEb2m1tZW+c1vfiMbN270WjSW/4orrpCzzjrLd3kW9E7gu9/9rmzdujVWEJ4Jv/SlL3mviCViBOAJ8f777x9F5NZbb5Xi4uKsoZRI+Lh+/Xq55ZZbRvXF77qRNUBoqGsCdsJH8/7fi/DRzttjTdUYKS4ucm1brmTc09hi+Ry5uCgsdPqYK6PMfpAACZAACZAACQRFgMLHoEiyHhIgARIgARIggawjkArhIyDYCSETCR/1cigLkQAOIun5MeumFA0mARIgARJIEwEKH9ME2tQMPD12t3XLyNCB4Gu6sNHKc4vxOv7tJHS089To1YOj1/yZIclWVSaQC8JH/X7EykskPsN9CV62osfH9M1Es/Dx7LPP1sLH6qm3t1d27twp27dvt/WiCG+J55xzTtx6mqgXDz74oNx1111xWU488UT50Ic+5AkARFI33nhjnH0IZ33mmWfK1KlTpaamRrNv165dsmPHDk1sh/DK5oRQy2DAlB4CTz31lPz5z3+ONfb+979fFi9enJ7Gc7SVN998U77xjW/Eeofw8R/5yEeyqreJhI8Qtl177bWx73uynmKzCgyNdSRgFD6a7wOMLz5B+Gi+J7D6HXcWQ8OjPcUjXz56e9Thd3b1SHtHvNfHSDgkhYUFjmPEDCRAAiRAAiRAAiSQTwQofMyn0WZfSYAESIAESIAERhFIh/ARDRrbcfL6aDycrK6ulvLyco4aCZAACZAACZCAiQCFj+mdEjjQLBwKHxA9Do/EQtTpIkMrb4/mQ1AKH9M7ZmzNPwFVhY9W9xF6L628zhvFj0YaFD76nxvJlDQLH+E1MRwOW1bZ3t4ur776qiZU6+npGZXnve99r5x22mmuTMGcue666wT1WaWf//znrkNod3R0yNe//vVRdUEIdfXVV8vs2bMTijH37Nkjv/zlLwVCMSSE7f7qV78qDQ0NrvrBTMkTQHjydevWyWuvvaZ5+pw7d67vUOfJW5M7NUDYu2bNGu3F1YULF7r+PqlCIJHwETY2NzfLypUrNXOPO+44rZ9MJAACQQsfh4cLZFjixXzVlRVSUhLNS+jDwyOyp7E5ru+hggKJFsWLRPMSEjtNAiRAAiRAAiRAAm8RoPCRU4EESIAESIAESCBvCXgRPgKSXf5E4a3N5dx4fdTzoF4cClVVVTH0dd7OUnacBEiABEjAigCFj+mbFwht3d/TL70dvXEeHil8TN84sKX0EWjv6I5rDIfukUJrkVoylnm5H7ETPiYKc211D4N6UIYeH5MZOe9lvQgf9dq7u7vl3nvvlccee2xUg/AyN3nyZEcjEI76Rz/6USwfRJO333577PcPfvCDctJJJznWA9Hcz372M004p6cFCxZoHiPdvqjX39+veZ5cvny5fOUrX5Hx48c7tssMJEACqSXgJHxMbeusPZsJQPiIH+OLTvp9gfEz3eOj1QtRxpei4O3xgD/5gylUEJKG+pqkMXV198jephaBB0W7VF9XI/hxSqgHP4mS27qc2sL11rZO6e7pjcsajYQkFKLXRzcMmYcESIAESIAESCA/CFD4mB/jzF6SAAmQAAmQAAlYEPBy0IjiQQgf7cLNmcPR6b/rB5xlZWXaoVJxcTHHkgRIgARIgATyngCFj6mdAgVSIIVSKIN9A9LTfuCQUD+wtBI7mj0+Gg8+dUuNh5tW4agZ6jq1Y8ra1SPg9V4kCOGj7u0RNCh8TO+c8CN81C1ctmyZ3HbbbTGDEVr6e9/7XkyMbtcTlEFZJLxQ99Of/lQLVb1p0ybts2nTpmmeF53S008/LX/84x9j2WbMmCFf/OIXR4XqdqpDvw4BJEOsu6XFfCSQWgIUPqaWby7X7lb4CI+/5nsI430F/j0yIjI0Eu/BsKy0RCrHlCWFEaLHzdt2OtbhVqzoRviIxmZMnSCwP9nU1z8gzS1tcdUUhkMSYbjrZPGyPAmQAAmQAAmQQA4RoPAxhwaTXSEBEiABEiABEvBGwOthY6L8xmuJPEBaCR9htfEg0+jx0Zgf/y4sLNQOrXBYhB/8bicUOPAA0fzOtDdGzE0CJEACJEACKhLoGOiQ/pEBFU3LOpsgcsR/ISmQkSGRocEh6e/uk8H+A15ckMwHlmbvLlZhrBMJHZ3CXhshJtrnmGF7yZt1A0WDc46A1326W+Gj+X7DfJ+i10PhY3qnVDLCR4zhTTfdpIW/1hOEhwhZbJcQIvv//b//F7t8wQUXyKWXXirPPPOM/P73v499/p3vfCeh90W0/bWvfU12796tlcG9KMpUV1enDCA8TMJbJfo7f/58gdDS7/re1NSkearcvn27vP7661o/DjnkEKmvr5ejjz5a+0GfnBJsWrt2rWAcd+3apdWHUN+oa8qUKbJo0aKEXjj1MminoqJC5syZo3lLQ30IJbxjxw7t3h51vfOd7xS0t23btphZCE3txrsmyj3//POx5wBgh+865gPsR0KIdfTbGGrdyj6MPUJjQ/iKkM5gN3HiRK2fU6dOlcWLF7uOTIH+6fWgLSSEOp81a5bGbtKkSdpnzz33nPZsBON91FFHBSaS3bdvnzb+SNFoVKsbCbbALvQTAi0k9BHjcPLJJ2v/TpRWrVqljRUSxhRjq6dUtWm0J9l56SR8BJeWlgPe7TDuZk+t+njh+hFHHCGVlZXS0dEhK1as0EKA43uCNH36dI3l2972Nm3uuE2YdxifnTt3yptvvhmbN7ADobcPP/xwhmx3CzPgfH6EjzDB/HIUfh8eOfBjTmNrqqSoqDApy90KFYMWPpaXlcj0KROSsl0vvLdxvwwNH1hn9MRw14GgZSUkQAIkQAIkQAI5RIDCxxwaTHaFBEiABEiABEjAOwEvB45uhY+wIpEQ0k24a+OBpTG//rlZEGn+XSdhJcK0C4dnR88LI7t2vY8MS5AACZAACZBAbhLwK95IVM5JeAiSifLoHhutBI56WTuxo9nbozG/PoJOQkd6e8zNuc5eJSbgZY9t9/KU+b5D/93uXkQPc418FD6md4YmI3yEpQgXihDReoIo6wMf+IBtJ+wEjnaCSLuKIHz64Q9/GLt85ZVXyplnnplSePfdd5/cc889sTbe//73ayI7r+nZZ5+V3/72t47FIBDVhXBWmcEA9egCMLsK582bJ1dffbWlQPHxxx+Xv/3tb1pRCFY/8pGPyE9+8pOYmMtYJ8YZf1u/+93vxj5+97vfLeecc45jXyAWhVdPPX3sYx/TxGYQ+EHAqqdvf/vbMmHCQVGO2b5PfvKTmpdPiNfsEkSf6AdEgnYJ8w31YP4nSu95z3vkjDPOkA9/+MOxbF/4whc0YVsQ6cUXX5Rbb71Vq6qmpkbzmHr33XfLI488krB6zAuEdC8tLbXMh7mDPiKZ52mq2tQNCWJeOgkfMe/xPUJauHChfPzjHx/FwThe119/vbS3t8svf/nLhEwvvPBCgRAbQl+7tH//fu07BwF0ooSx/NSnPpVQdBzE/GEd8QR04SOuGO8j8LtR3AjBNK5bCR71vMPDBTIso4WPKNMwzjn0tNPYbHljZyzENcSNECTaJbceGuFF0ioZw2kHKXy0C3ddHA2bqDnR4HUSIAESIAESIAESyF0CFD7m7tiyZyRAAiRAAiRAAi4IeD1wtKsykZdHlDFet/PW4tbro16fXqdRDGm+ZmWv3UGoF2GnC7TMQgIkQAIkQAIkEBABN6JHp1DSuilmASM+N4oXrQ4v7USPxs+NXTV/TuFjQBOB1eQMAS/3IOi03f2D+T7DKq/5PkT/ncLH9E6nZIWPsPYvf/mLLFmyJGb4r371K1uPe9///vdl8+bNWl54avvGN74RKwePjxBGIukhsI3e/4xk/vSnP8nSpUtjHyFcthvPg37pYn5CyKSLyazsd1P3X//6V3niiSfcZNXy2Ikr4dHu17/+tet6IMT6/Oc/r3mUNCajsBAhxru6uqSxsdGyXggf4akRY6Z7uUO9P/rRjxw9XxqFahjbW265RfOa6EX4CM988CSoe/lM1Hm08YMf/MByTkC8dvPNN7uqB21AEAfRq55SJXyEp07wXb16tatxRWj56667TsaOHRuX363wMcg2YURQ8zJI4ePxxx8fE0k6gb3kkku08bZKmPOYNxBRuk2f+cxnNO+wTOkjYCV8ROvmewaz8NGcB78PjYTjDC8tjkpV1UEPqn57ZhY+QvyYqmRsK0jhY09vv+xvjf8+FEVCEg7Fe8pMVf9YLwmQAAmQAAmQAAmoTIDCR5VHh7aRAAmQAAmQAAmknICXQ0cvwsBEQkg7jy1WXhwBwMqbo1nsiHxWniH+I8oqAAAgAElEQVR1gG776ZTP6XrKB4wNkAAJkAAJkEAeEXDyDul0XUeVyMuK8ZqVKBJ12Ikhcc2rt0krm+nxMY8mNbuqEfC6p3Yb5trqnsB4P2C8r6DwMb2TMQjhI4R8EPTpCZ4YrcRYEJp89atfjeUzi/peeeUVTVikp2uvvVbgrdAqGQWUCC370Y9+NOXg4OkQIW71dMwxxwg8ELpNEHzCbmM69dRTtVC84AVh1csvvywIU2xMEBbW1tbGPjKLwnAB3hpPOeUUzWMqQhlv2LBB84poFGlB2PjlL395VAheo/DR3A+IGo888kjNq+f69es1z54Q5i1btkxuu+22WHZ41ENoaLuEEMMYSz3p4c3xuxfho7F+iP4gUIN3yN7eXk1Me9ddd40yAR5A4QnUmLBmYY4Zw7Pj+vnnn6+FhIa3P4Sefumll+Ly6PWkSvho5gevjieddJLmCbK7u1uz6eGHHx6VzWpMkcGt8DHINoOcl0EKH419PP300+XYY4+V4uJi7XuC+W/23mj+vqE8BM+Y/8bvE8YFcxAeRnEdni7xnTMLc+HpFHmZ0kPAi/AR+3zjS1bG+40RPMu0ED5WVZZLaUlx0p3JBeHj0PCw7G08EHLemArDBRIpDCXNiBWQAAmQAAmQAAmQQC4QoPAxF0aRfSABEiABEiABEvBNwOuho11+Lx4fYayV+NHswVHPh/+bRY16h/XPreqzK6OX9dp3I+RkyvoeLBYkARIgARIggTwh4FbQaIUjkXdFK0+MZuGjXmciQaRZSGlVxvyZ29+NffLCwUvePJlG7KZPAjt2N8eVrKupkOJokc8a44t52UvbvTRlvFcw7u8TeXen8DGwIfRcURDCR7NgURfImY1BCN/7778/9rHZSyO8+cEroS4ugkDpE5/4hGWfjMKuRF7aPANJUGDt2rVamFuInOBREGK+Qw891HUTDzzwgPz73/+O5f/6179uGY4ZeZAXCeJKhJPWhaRghHDQusdF5IGAFCHGzX9vYCdEXNu3b4+1qYdu1j+wEj6ibxD3GUNFo10keOA0hyV3Cm9uFsZC/Kl7nvQjfISnUIgtIV4zpubmZs0bpdErJ8Ibw7Oknp566in585//HPsdfYUYt6GhIW4czd8NPUM6hI92nj4x7kYvqbDpsssuk3PPPXeU/X6Ej8m0GfS8TIXwUQ+vbgSFvz34vj344IOxj9/xjndoQlhj+vvf/y6PPfZY7COEuIeo1ji39L99t99++ygPuBCwYjyY0kNgYGBA8wyLZCdqxFqpC1SNXoVHCR9HEOY6XrxXV1slkYh9OHS3vcwF4SP6urdpf4y33nd4e4TXRyYSIAESIAESIAESIAERCh85C0iABEiABEiABPKegNeDRytgVnUE4fVRf6ir/98ojtTtsPMIaSxrzGu2362YM+8nCgGQAAmQAAmQQIYJuAl5bTTRyRujnYdHu9DWqNuujH7N+H/zv938rtvvVcjoNX+Gh5LNK0wgV4WP5hem4MnNLCZReFiy3rQghI/wnAYhmp7gBRGCPWOCEAVhX3VRmp2o0SyO/PGPfywIx2tM8PB3zTXXxD768Ic/LCeccILjWPz85z+XPXv2OObTM8CTpDnsbX9/v1bH+PHjNe+AXpI5zLVZlKfXhftghPyGd0WzIG/dunXyk5/8JNas0XuilS3wtgihoR6+Gt+v//3f/41ltRI+QggIT4KJkrkvYAsRoTmhL1/72tdiIiN4Vfzc5z4Xy+ZV+Ig2vvOd70h1dbWleQh/jjDoeoKXTnjkQ4ItEOUaQ3nDtqlTp9p21VwfMqZa+HjppZcKxtUuwSOlcQytwsJ7FT4m22bQ8zJo4SO+xxBIWyV8p7F+6YLrRYsWydVXXx3L2tnZqa1deoJ3VYQYNwrmzGvdb37zG3n++edjH8PLaFVVlZflgnl9EjAKH+3uG4zCR4gjjXt1XSw5YiN8PKR+rBQEEMU5V4SPLa3t0tvbP2q0QgUFEi2i8NHnFGYxEiABEiABEiCBHCNA4WOODSi7QwIkQAIkQAIk4J1AEMJHtOrF66OVgNFYh9mzi1ncaOfJRa/Drn4rO+0+M5L0wsj7CLAECZAACZAACZBAIgJOoj434aPtwl2jXWP5VIoezW1Z/a5zcOqzmZfX/JxxJGBHIJuFj+aQ2MY9PIWPmZ3zQQgf29raRonZEHYaokFjMgujPv3pT8uCBQviOm8Wwl111VWC8LTG1NXVJSivJytPblZUjWIwN9S9hrJ2qtMsMrziiisE4Zi9/J343e9+JytWrNCaguANIkgnAabZ4+K3vvUtmThxolaH2SYrb3dW/TIL0z74wQ9qYZnNacuWLfK9730v9vGnPvUpLXy2nrwKH63mg7FNiFJvuOGG2EcQrM2fP1/7HWHKIYTU03vf+1457bTTnIZNIOpcvXp1LJ9Z+Pjss89qIbvdpMrKSjnjjDNiWV988UW59dZbY79jzsHLqdOcePrpp+WPf/xjrBzEpBCV6smL8DGINoOel0ELHyFEtBMqgtnvf/97TWyMhO8GviN6MrP+5je/KZMmTUo43PCyaqzjfe97nxaKnin1BNwIH2GFLoI3Cx/1+42RkZAMy2iFI+ZQfZ216Nprz3JF+Nje0S2dXd2jug9qxdGwVyTMTwIkQAIkQAIkQAI5SYDCx5wcVnaKBEiABEiABEjACwEvor5Eeb0IH2Gfk/gReczhqs1l7DxNJhJGGts2c3Ji4XTdC3fmJQESIAESIAESSEzAq4dHvbZE4a6RxxzyWv/M6f9mgaRVe3Z5jHUbe23XRycxgpmc1/yceyRgR0Al4WOiMNdWe3rzvYOxjxQ+ZnbOByF83Lx5s+ZVUE9WHvF+9atfyapVq7QsToI9iNMgUkOCtz6jWE2fX0aPbAgFfc455ziC9Cp8hEAPQr2gklkMhXohskKYYojzysrKHJsy9gFlL7/8cscyO3bskDvuuCOWz+iR0yx8/PKXv+w6fDeEXXoYbXiIhKdIc/rLX/4SC/sLz5033XTTKAGaV+GjXXhwvV147zOGRzd6A12yZInAHj0ZBaCJIJoZmef3LbfcIuvXr3ccB2RAmG5jqGqz8BGeBGfPnu1Yl9nr6UUXXSQXX3xxrJwX4WMQbQY9L4MUPrr5Ht9///0Cb7NImKfwNKsnhLeHuFVP4OWU8HfNWAcEzgiNzZR6AhA+Dg4Oxu4prLw5wgon4ePwSEhGTMLHaFFEamsqA+lErggfu7p7pK29K45JCYWPgcwTVkICJEACJEACJJD9BCh8zP4xZA9IgARIgARIgASSJOBVzOclNLRTuGvd9ET5rA4wrTxCGjE4iTCR1+tBqh1mr/ySHC4WJwESIAESIIGcI+BFtOckFPTi/REg7bw9Gq+ZhZJuxY1uBJh2g+mFibkfOTdB2KG0EsgW4aPVHtxO+GjlPZ6hrtM6rSQI4SM8pcFjmp7g4c8Yohnhlq+99trY9Xnz5gm8HdolCM3gpVBPVmI3iMfefPNNLQs8QsIToFOCOA0htxMlCAR3796tZTnvvPPkXe96l1O1nq7fe++98p///MeyDERx8IKJMOC6R0ZjRrPYzVPDhsxGr4lmUZ9d+G2rtszjbhYSIqw5BHF6svIm6VX4+Otf/9rRwyXEjnqCQBahi5HMYdSdvADqdZhDS6dS+PiLX/xCotGoq6G98cYb5dVXX9XynnzyyfKBD3wgVs6L8DHZNlMxL4MUProRRhu/B2bhI0TdEHcnk4L2HpuMLble1qvwUb+XMHqgx7+thI8lxVGprqrIdYSe+tfT2y/7W9vjyhQXhQMJCe7JGGYmARIgARIgARIgAQUJUPio4KDQJBIgARIgARIggfQS8CrcC8rroxdRpJ13yEReXXSKicJeI49T/52up3e02BoJkIAXAgjpBo8rOARPdPDtpU7mJQESyAwBJyFgIsGjk1gSPbITNybyDqmX04kkEkSa81r9blWPW9pOfNzWw3wkkK3Cx0QvRhmv6fcPFD6md64HIXz897//LQ888EDMcIQGhldHPT355JNy++23++7YWWedFbdfhEDv+eef1+q08iTopzHMQYTQhmAPyegt0E99dmUgVgOzRGIqeLp8//vfLzNnzoxV09TUJF/60peSNuWSSy6RCy+8UKvHKPjCmGHs3Caz4M0sFDULI+Htsbp6dJhYL8JHt/YZhY/GMbzttttk2bJlWvdmzJghX/nKV1x11SzgNAsf0QeIe90kiBqnTp0ay2r2+GgUEDvV94c//EGWL1+uZYOXSKMnQi/Cx2TbTMW8DFL46OZ7nOh78NnPflba2+OFXU7jY7x+2GGHyRe/+EUvRZjXJ4FUCh9LS4qlqrLcp2W5Wayvf0CaW9riOhctCkmoYHSo8NwkwF6RAAmQAAmQAAmQQGICFD5yhpAACZAACZAACeQ9Aa/CvmSEj4BtF4bajRDSykujsZxRCGk3sF48Vprr8Moq7ycXAZBAhgnAk83+/fvl6KOPljvvvDPD1rB5EiABLwTcCPn8hsKGHaFQaJQ5bjw/okBQ3h7NdRmNcdN3M0s/ZbyMB/PmDwFVhI9eX17yInxE3vr6eikqKsqfgc1wT5MVPvb19WkhjltaWrSeWAl8jN4Z/XQXgjeEEzbOixUrVsjvfve7WHUISY2QtskkhD694YYbYlVAGAeBXCoS5vorr7wijz76qKxbt862ife85z1yxhlnaNdbW1tHidvmzJkj06dP92weRHKzZs3SyiUjfET5f/zjH1ofkDBOCO8biUS0340hy+083qVT+Gi01ezVLxFE87ywCuXueRDeKmAWPmJOu903GMW/Rx111Cjvml6Ej8m2mYp5qZLwEeHfGxsbtRGDMP+4447zPNwod8IJJ3guxwLeCZiFj6hB/04Z7zHg2dfo7dHs8XFoJBzXeFlpiVSOKfNuVA6X6O8flH0trXE9jEZCEgpR+JjDQ8+ukQAJkAAJkAAJuCRA4aNLUMxGAiRAAiRAAiSQuwT8iPm8iAedwk77FULalcNIObWpj6aXfriZAX5YuqmXeUiABPwRgMcVhPr7wAc+KBdffJG/SliKBEggZQTcHrobDXDjvdFssNuQ08bDSL2OoISOTja46WOigfDDMmUDy4qzmkAuCh+NL0fh3xQ+pn+KJit8vOuuu+TBBx+MGW4MLYwPzQImvz385Cc/KRDP6am/v18+97nPxbwzTps2TSBQCofjxSpu20SY64cffjiW/Sc/+YlUVKQ+rCn6snXrVk0ACQ9+Zu9yX/va1zQvgQjT/dGPfjRm39lnny2XX3652+5Z5ktW+Ihw4xC26gmCO4jwdu7cKQhRrid4I4Tg0pzSKXzE2GKM9XTzzTdLVVWVI79nn31Wfvvb38bypVL46NYmGAPBsR6W/cwzz5Qrr7xy1DjonkvhOXTx4sWxa2axZbJtpmJeqiR8BB+IlJHgiRWCXiZ1CQQhfETvRqRQRkzdpPAxftz7BwZlXzOFj+p+I2gZCZAACZAACZBApglQ+JjpEWD7JEACJEACJEACShDwKthLxuujG8+OOhQ34kY7bzBWNnrtp/YgcsT8GFKJIaMRJEACLgg888yB0IRIJ5xwrIsSzEICJKAqAT/CvkThr/V+2oW4xvVEYkW/14ztWrEOqp+qjiPtUp9AqoWPbvfWQXp8pPAx8/MuGeGjlajRHOb6n//8pzzyyCOxjn7/+9+PC3dsRWFwcFALDauLt+bNmyfXXnvtqKxm0eU555wj7373u31BNfcF3hQhrEx3gggSYcPvv//+WNPGUN8Q3eneNWtqauTGG29MysRkhY9oHGOqh+1esGCBFi78X//6lzz00EOabbDzhz/8YZxHZ1xLp/Bx9erVo0J5w07Y65T+/ve/y2OPPRbLlkrh4//8z//Iqaee6mSSmL1QXnHFFYJ5oicvHh+DaDPoeamS8PEvf/mLLFmyJMb2Rz/6kdTW1jqOETNkhoAufETruodHszdHXDN7fNTvL/S8VsLH0tISqaLHx1EDy1DXmZnnbJUESIAESIAESCB7CFD4mD1jRUtJgARIgARIgARSSMDtAaRughfhI8o4eWB0I3D0Wg/yW4W+9tpXK+xB1JHC4WTVJEACbxFYseKFGItFiw567yEgEiABNQn4Ef1Z9cRO8OjkdTGd3h39eK60G7WguKk5K2hVuglkq/DRvO83CieNe3d6fEz3jDrQnl/h44YNG+Q3v/nNKO+EZs9yZq+M8PgHz39uk1lwBpEfRHR6gmdEeBs0ekh0K+Iy2rB//36Bd0d4L9RTkOI2vU6IOBGi+5lnnpGPfexjUldXZ4vCKCacOXOmfOlLX9Ly3n333aNEkfCqOGXKlIRIu7q65A9/+IMsWrRI88Zo9IoZhPDR7BHxe9/7nuYVTxetmkV5RmPTKXzs7e2Va665JtY8uELcagyhbgYJcRT6o/cF14OcG2bviwjB/c1vflMqKyttxxQeFuGBctWqVbE8sLGhoSH2uxfhYxBtBj0vVRI+wtsjvD7qyRh+3m6Q8Lftz3/+s+Yh8uSTT9bCwDOlh4AX4SMsgjjSShg5ImEZkdGhmkuLo1JVlXovwOkhFUwrvb390tLaHldZcVFYChjpOhjIrIUESIAESIAESCCrCVD4mNXDR+NJgARIgARIgASCJOBVzOdF/OhF+Ig+uRVCmvO6+V1n5tRfp+tBsmddJEACqSGwYsVBj4+LFtHjY2oos1YSSC+BRCI/L2LCZDw2uhVIgowbr5NGgl5FjF7zp3e02Fo2Emhsjj9YrqwokWhRJJDuuN1jI5+X+w2j8NF8L2EWQTLUdSBD6akSr8JHiAzvvPNOTbxnTMcdd5x85CMfGbW2mkVduH788ce7tg9eBCEA1NNll10m55577qjy5jy4CIHfVVdd5UpshP7/8Y9/HCVsg1DpAx/4QJyd8EK5du1aLeztkUceqYVudhtaGwI6hCbW08SJE+UrX/mKRKNRSx4Qeb766qvaNaNg1FwPBFUQRaI+qwSR3K9//WtN4Io0btw4zY6ysjLt9yCEj2aBK4R0RjHqj3/8Y8FnVimdwke0/6c//UmWLl0aMwVC0E984hOW4whB7Le//e240OOpFD7CMIwlxtROLGcWBM+YMUObS8bkRfgYRJtBz0uVhI/4Dn3+858fNQ+c1rJHH31U/vGPf2hDgnGEt9pDDz3U9drHjP4JpFL4GI1GpLbaXpTs3+rsLdnd0yutbZ1xHaDwMXvHlJaTAAmQAAmQAAkES4DCx2B5sjYSIAESIAESIIEsJuD2EFLvopeDSJRJRvxoLu+1Lqv2jUPltu9u82XxNKDpJJBTBCh8zKnhZGfyjIAXQV9QgkcgTiRqNF/3k9+qDn1ovfQ5mTJ5NpXYXcUIuN1Pp0r4qAsk6+vrE3pgUwxb1ptjFj5ef/31o0RgfX19snfvXi207o4dO2Tjxo1xfYbw6rOf/WycUOv//u//NKGgnsxhsJ3gYa4h3LUxtDPCzJrX5CeeeEL++te/jqoOYqMzzjhDTjjhBMGcMiYImSCsQvja5cuXj7o2efJk+fKXv2w5B81e7SCuPP300526oV1Hm/CCuH379lj+hQsXCrxkGgVuEFdCmGfsz3nnnSfvete7YuXMwjeU//jHPy6HH364FBYWxvJh3G6//XZNqKknCE8h2tJTEMJH1HXHHXfIww8/HMcC/D/84Q/bMkq38LG1tVUTfho9OEL8eMopp2jCNLDEvINHU4Q4bmxsjLM91cJHNHjYYYfJhz70IRk7dmysfXisfPDBB7VQ6MYE0SO+g8bkVfgYRJtBzkuVhI9gY14n8Rk8P2J+G7+/8K6K8TF+F+Cl9jvf+Y4UFxe7WiuYKTkCqRQ+Yn0dN7YqOQNzrHRHZ7fgx5xKouEc6ym7QwIkQAIkQAIkQAL+CFD46I8bS5EACZAACZAACeQgAbeHkMauexE/WuVNJGB0sscunJ0b+5zqdrruZfiDrMtLu8xLAiQgwlDXnAUkoA4BP6I+O+ud6nIrhDTnQxi6RMlLqGzUk2pvj3ZtqDPqtIQE4gm43RtT+Jhbs8dK0OOlh29/+9vlkksuifOYB2958JKmpxNPPFETcnlNEHrdddddsWLwhIcQxeZk9i5pvA5hEsIAQxCDvydNTU2jhG96XnhW/OQnP2npaQ/z/lOf+tSocghjCzGj22TlnRJl4WGyoqJCtm3bNkqkqNcLsWdtbW2sGXhYhCdCeNkzp3nz5ml5cc0sUoXXRdire3tE2aCEj2YBo26X3Xjp19MtfES7a9askZ/97GeWw4YxhTjSKIyEl0yjADIdwkfduDlz5khVVZVgzF9++eW4eXvBBRfIpZdeGtcXP8LHZNsMcl6qJnwEm9tuu02WLVsWx3ratGlauPm2tjZZvXp13PUbbrhBpk+f7naZYL4kCfgRPqJJ/d5Av+cYkZDgx5gKpEAOaTi4Fidpak4Ub23tkO7evlF9AbUohY85Mb7sBAmQAAmQAAmQQPIECl54YfXIli1vyLRpk2XKlEmuw0Yk37T7Gl54Ya00N++X4447WsaMqbB8cO6+tuBz4o3cZ599QUZGRE48caFEIsGEvgnS0j17GuWVVzbKuHFj5YgjZik5zrBv587dMm/eETJuXJ32gEy1tGzZSunq6pbTTz9RybfiW1vbZc2adVJWVipHHTVv1NvHqrDEerN1K9acKTJ16iQlxzkb1hyIGAoKRE44Qc01Z/fuRtmwYaPU19fJ4YfPVHTNeVV27twj8+bN1tZGNdecZ99ac05SdM1pkzVrXpby8lI58ki115zp06do+xw34+z2IFJfV70IH1HGnP+FF16Slpb98ra3HSUVFeVxy7WTPeYwdlZt2H2W6HPdkN7ePnn++bXa/uu4445S8vu8Z0+TvPbaZqmvr5WZM2e4Gud0/13cuHGT7NrVKHPnHi5jx9Yot58FD3gG7O7ulsWLFym5n21r65B1617RDhDnzz9Cybm4bdsOeeONN7U9zuTJE5QYZ9yn6On444+R1atflv37W7U1B3tGJ/FUur8rWHOwFwuFCmThwqOU3M/u3t0kmzZxzUl2bqxYsUp6enrl5JOPV3TNaZd16zZIeXmZdo/qNsRmslzsylt9V7du3S7bt++UadMmysSJExLaaCdAdPLuaLTHq+gRZV98ca20tLQJ1h+wRB1+QnV74eJlDDAHV658Uds7LFp0jJJzcdeuPfLqq5vkkEPGyaxZh2Z8LlrxffnlDbJ7915ZsGC25jXLzZ7byzgFkXfp0hXamnPaaZl/nmO1v8ff5rVrX9buBxYsmKv9/aPwMYiRV6cOv8LHBQsWyNlnn615GbRK8HgGL4B6uu6667SQzV4TvBYaw/guXrxY85JolZD3V7/61Sivim7bO//88+Wiiy5KuMeDANEoJkRI7auvvtptE1o+eF+8+eabXZWBUBECNrM3PxTu7OzUQlgbvTkmqhTiLNQFEZ0xBSV8RJ1mPhARwtNdor+vmRA+wtb169fLLbfc4jgOYP/pT39aPvOZz8TyplL4+LGPfUwbVzcJgmMIH634ehE+BtVmkPNSReEjvLbC6+tDDz3kZng0ATU84Vp9f11VwEy+CHgVPu7f366toxUVldrznINnqAUyLPFeC8fVVUthmN4M9cFp3Ncq8FRsTKGCAokWqXWG2t3dI889t1oKC8Paeb6KZ+U4g9648XU55JB6mTVrhpL3VngOsWcP7q3mas+QVbu3wj3KsmXPCrQRp5xygpLnVtCUrFu3XrDHmz9/tpLPFjdt2ipvvLFdZs6cLpMmTVBunLHePPfci4L1G9oXFZ8hZ8Oa8+abu+W117DmNMisWdOVXHNeemmDFnngyCOx5tQqd1YA5y/Q5/T398mpp56o5N+W5uYWeemlV6SyUuU1Z4u88cYO7fx00qTxSq45eD7b2tomJ574Nl9rTsHjjz818uKLL2kPJ+fPnytFReqJ9u6//1F5881dctFF50hDwzjlBgIHxHfddSD0wTvecb42EKql11/fqm1Epk6dKCec8DaJRqOqmShPPbVCW/xx2D5jxlQlNyJ///u/Zf/+Nvmf/7nMUpySaah79zbKf//7pFRVjZGzzz5NSkrUC+3w4ovrZPXql+TII+fIvHlzlF5zLr74XE24p9qNBcS3//73gTXnne88X0pL1VtzsGl/+mmsOZM0caaaa84zsnHjZu3mbMaMKUquOX/727+1P/Lve9+7tYNi1dLBNadSzjnnVCXDyWCPg3XnqKPmaiJXN/scJ6Gh1TjYlXHj4fGBBx7VhP8XXXSu1NXVamuOUxhrNza4adtcj1UZ7HPuvfdh7YbnoovOTrjm+GEXxLzesmWbLF++SqZOnaw9WHMzzkG066WOp59+TjZt2iKLFx+n2ZlpAY2V7Xfcca9AXHjVVe9Q8m9LY+M+efTRp6SmpkpOP/0kJf+2rFmzXtauXa/dW82de4T2sDfTyRzq+qGHHpddu/bKhReereQDVKw5//nPw5rw8cILz7H0BJRppps3b5Vnnnlee3lQ1Yf5eCCEe8BTTjlepkzBmqPWgQjG8I477pG2tk5l15y9e5vksceWSm1tlZx2WmrXHL8CZAiZX3ppvfZwEsJ6Y9hN/Xvi1/Oj1fcskUjSTlh5//2PCIR7l1zydm3NSfT3zw8HP2WMfevo6JR77nlI239deul5Sv79w8sd2EfgngUCUhXvrZYsWS6vvbZFExXiJUcV/v6Z5/Bf/3qXtLd3aM9zMn1vZbVnhnD04YeflNraGjnrrMXavVUi4aPV/YLVC1HgYPQSjzwMdZ2ZnUQi73e6RTichYgNngQnTZokCNFcXV2d0GB4Fty6dauWBwKgn/70p77vNX7wgx/Ipk2bYnXBW5/dOou5hLxPPfWUPPvsswlthF3wRAkBp9Gjol0hiB5/97vfaaG3weOjH/2oIDS21wSxHzxZrlixwrboMcccI+973/ukvGVAGVIAACAASURBVDz+JUS9EIRYCIuNuvRw4OYKMXYIlX3qqadaCgBQ/k9/+pNWDCF5b7zxRq/dieVftWqVJjzVE+xHCOlEySxshSfLCRMmxIr4sc8YWhv/Rjhiq9Tc3CxPPvmkpZANcwPMLrzwQm3Nu+aaa2JVIPw6QlEHkcyeSiF6xPz429/+Fpvz5nYgpLv44osF3iDtkhfhY1BtBjkvd+7cKV//+tdj3bvppptGrTl/+MMfYmHqsR4h1LsxGefAJz7xCTn22GMTDpcXAfC6deu0OWP2qGpsAGsKfpzWySDmEOsYTcBK+Igc+ktO+t8O3WMuHLM88cQyqasbq+0XD4YutxY+VldWSElJ8meYe5tapKu7RzO+vq5GykpLUjaUxrbQDtoLImFt3L23Oa6qcKhAiiJq3ee3t7fLPff8VxOl4GytNIW8/bLFC2XPPLNKDj10uvaCv4r3Vviu4HnOmWcuVtJhFe4lcG6Fc8qrrnpnxu+trOYCzlrwDBlrzhlnnCzFxcmvJ37nnF25VatWa8+Qjz32SJkz5zAlxVz33POgYP1+5zsvUFIQp685RUVFmoZIxTVnw4ZNghe/IXCFEwIV15zHH18qmzdvkzPPPEWmTJno+14y6O+IXp++5kDo+p73vEPJNQc6NjxDhpOl009Xc82BkBniTDiamD17lrJrDhxrvetdF/pyHlOwbdsbIxBVwJMiVKjJPjROxaRuatqn/QFtaKiXaLRIORvx8AOHIoODQzJhQoNyCwLGBAeIzc2t2h/32tpq5YRcsBFvt+MhNB7yqvjmAGzEWzbwEDBp0kQlH+TjDZt9+1q08VVRsAeGGGN9zcFDQRxqq5b0NQdvfRUVRTXPiiolfc3B/8eP55rjd2yyac2ZPFm9zSa462sODrBV9Zp5cM0ZI5WV7rxG+xHvJeP1sampWdvnwIsPHgzpe7EgxI8YJy+iTPP3CWWHhoYF6yLWHLyAoqJgD/zwnYbgv7q6StF9Tpt0dGCfU62JKlTcc+NBRl9fr6PXML/rbrLl+vr6BW/PYQ7qIuFk6wy6PNactrZ27d5KFU/1eKCrJ7yMsG9fs3ZvUF+Pfc7BNSdoFn7rw1oDkSv+j70Y1xx/JLEmQtDFNccfvwP7HHXXHP1vCMYY91Z4lgMvcW68N3oNPa0TdKrbzhvkwX1OveNhg5+/jX7KGGcFPKccWHOGZfx4ddcceAfHATHE/6q9mAeesA/zEd4BcNiQ7Lj4/+bal4SwsLe3V1S4t7Lan2PNwZ4bAmb93orCx1TMBNaZCgJdXV1aiGKIAhF6G+sU1gGElUb4Yog4va4L2AviMBOeE72WNfcRdkHghf/je4UDx/Hjx2uiShyUuk04dEOYbITwhm2oq7KyUhNzwtOjivtWt31LRz6EaMYY4L4Y44vnwnV1dbFDN7MI74c//KHmRTiIZCV81F8YgQBy+/btmm2Ya2gTc2PixIlJNZ2uNnN9XkI4hx8IaJHwPAXiYYS9VtEZQFKTJosKexU+Ij/OrfAMAvtF4wtbw1IY13PsJ6vGJO8EYMsbO6Wz66DwMSgxotVQGdsqLyuR6VMOisuTGdq+/gFpbmmLq6KoMCThsFoHVwMDg9p+FuuSus9zuqSlpVW7Z1H1GTK8FcLjtKr3Vtj/HHiG3Kd5KlRx/4NoMnj+GYkUauJHFe+hsZdEFEc4MsKeOdn9bjLrjF3ZxsYmgdgM3gqhz1EtHVhzmmR4eESzUcWXvjs7u7Rzq2xYc3DegudOqs1FrjnBfPNwZoU1p7pa/TUH2hcv9+mxZ9fNzc0jBx64FSgpQIKh+tvI+kOTYIY32FqwkYOdKv6B1xnqD1ZV/APPcQ5mPhofiqs8zrDTKcRZMET81XJwzQkrJ3rUe8Q1x9/YGkvxb0swDLPhb4vXNceP8FH/O2ZH1apO/TPjXDSXD0r86Mc+oy1Wa45fTsnPvPgasuXvHzhyP+t/BmTPOI9o91WqPCQwCx/598//HNRLZs9c5JqTzGhnepzdrCEHvs8H15xEZfwKHsHQTtQYe7Bk8aaY8UUO/P3DsxKv9jmNnxtGTnXgOu+t3FBKnCfX/rY0NrfHdbiyolSiRfEH837o2e2jdU+MRg/wbl9iMoskjeXo8dHPKLEMCZBAEATgjRQebCEkdErwzAlPn3rCv4P6W59IhOhkl9/rmWjTr60sRwJeCRiFj8ZzHrPHRwiL9e+x/n/sc4xnVyMSlhEZLeDTXgQZW+XVrLj8uSB8bO/ols6u7ri+FRepeXbFe6ukpy01EckjHOU5n2fl/oHq9/mqal8OPs8RJUWPsC/TzxbdjH6uPc9x0+eg82TPOKt1bmUeh2TXnIKWlhaoHplIgARIgARIgARIgATeIuBH1JeoTCLho9XmzviZH/GjflNlNaBu+uYmTzomiyp2pKOvbIMEUkUA4TT0tGjRwlQ1w3pJIK8IBHUInyw0JzvsrjuV0+1yI5Z0kwf1ObXpdN2KlZ8yyTJn+fwgsGN3fDjBupoKKQ7Iy4WbPa7xJSk3e3oKH/NjbrKXJJAtBCD6u++++zRPivCGecMNNyTcC8Ab5PXXX6950kQ6/PDD5Qtf+EJg3c2ECDETbQYGjBWRgAMBJ+Gjvv/XhY9mQeRoIVJIhiU+ZPO4sdVJR13LBeFj075WGRgcHDUioBWNhjlPSYAESIAESIAESIAE3iJA4SOnAgmQAAmQAAmQAAmYCLg5jLSCFoT40a1I0q2NXm1KNBnctskJRQIkoA4Bs8dHdSyjJSRAAm4JeBX4JSt41A8qjfYlK3AM2tujlY1ueTIfCTgRyGXho+6FBwzq6+t9hQ9y4sfrJEACJPDXv/5VnnjiiRiI008/Xa644grLaFkQUP3jH/+QJUuWxPJfddVVgjJBpUyIEDPRZlC8WA8JOBFIVvg4KhpYQUiGR+KFj2MqygQho5NJ2S58HBwcksZ9++MQIMQ1Ql0zkQAJkAAJkAAJkAAJHCBA4SNnAgmQAAmQAAmQAAmYCPgV+PkRGSYjdPRqp1N+p+vJTJRU1p2MXSxLArlOgMLHXB9h9i9bCHgVL3rpl1PdTtfNbTmFttYeJlmEt7b73Kl9p+t2LPyW88KWefOTQDYKH43hrDFqdqGuKXzMzznNXpNAugm0trbKddddN6rZGTNmyIUXXijTp0/Xwl+3tLQIvMHdeeedsnv37lhe5IP3xyDDKmZChJiJNtM9zmwvfwkEKXzEnn5oJN57YRDhrrNd+NjR2S34MaeiSEjCodHhwfN3NrLnJEACJEACJEACJEDhI+cACZAACZAACZAACcQRSEakF5T40XxgqRvpN/S1uZNu++g2H6cRCZCAmgQofFRzXGgVCfgh4EXo5yWvbksqvDo62eF0PRGnZMr64c8y+UOAwsf8GWv2lARIIHUE1qxZIz/72c88N/Dd735XDjnkEM/lEhXIhAgxE20GCo2VkUACAn6Ej6hOD3E9yuOj1k5YhiVeyFdbUynRoojvsch24eOexv0yPDw0qv+gVMww177nBAuSAAmQAAmQAAnkJgF6fMzNcWWvSIAESIAESIAEkiTgV/DnVM7uupfP3XqJ9ILAyW63dQVVj9v2mI8ESCAxAQofOUNIQG0CQYn3kqknCO+NbkWT5tHwa7ffcmrPBlqnCgEKH1UZCdpBAiSQ7QQ2bNggt956q/T09Dh2Zc6cOYIQ1/X19Y55vWbIhAgxE2165cL8JOCXQPDCx5AMS3zo5pLiqFRXVfg1U4zCx0SV1NfVCH6c0t6mFsGPU3JbX6J6enp6ZX9bZ1yWwnCBRBjm2mkIeJ0ESIAESIAESCDPCFD4mGcDzu6SAAmQAAmQAAm4I5CMgM+prBeRI6z1InR0attd7w/kCrIuL+0yLwmQQHAEli9/LlbZiSe+LbiKWRMJkEDGCAQp+vMiWPQikAQcJzudrtsB9lsuYwPGhrOOQC4JH7GfN4e91n+HuKioqCjrxocGkwAJZBeB/v5+WblypTzxxBOyffv2UcaPGzdOJk+eLAsXLpRjjjnGce/gt+d79uyRJ598Mlb83e9+d6ChtK3sykSbfvmwHAl4JRC08BH7++GRkIxYeH2sG1stkcL4UNhubO7q7pHN23Y6ZnUrVEyn8LGpuVUGBgbjbI8WhSRUwDDXjoPKDCRAAiRAAiRAAnlFgMLHvBpudpYESIAESIAESMALgWSEf05lgxA/oi9e6/HS/0R5nfoXVDushwRIIDkCzzxzQPhYXl4u8+fPTq4yliYBEkgbgVSK+xLV7UUICRhexZA6QL/981subQPHhnKCAIWPOTGM7AQJkICCBPAcobOzU4aGhqS0tJTiawXHiCaRgBsCqRA+jowUpMTroxuxYpDCR7d1JeLc09Mn+9s64rJA8AjhIxMJkAAJkAAJkAAJkMBoAhQ+ckaQAAmQAAmQAAmQgA2BZMR9bsr6ES0GWYYDTwIkkPsEdI+PFRVlMn/+nNzvMHtIAiRgS8Cr4BEV+RE2OokTna4nGsJkynJqkIBbAhQ+uiXFfCRAAiRAAiRAAvlIwIvwEXxCoQNiPf3/2NMb9/X6v4dGrD071lZXSjQa8Y0anh87u3psy5eXlUhZaYlj/YnqQR1Ibupxamhv035NIG5ORZGQhEP09ujEj9dJgARIgARIgATyjwCFj/k35uwxCZAACZAACZCABwJuBIx21bkpG7SQ0alNp+se0DArCZBAFhCg8DELBokmkkAKCTgJBYMWNvptzw0Cp7rd1ME8JOCGAIWPbigxDwmQAAmQAAmQQL4SSJXw0c7rYyRSKHW1VXmBu6OzW/BjTvT2mBfDz06SAAmQAAmQAAn4JEDho09wLEYCJEACJEACJJAfBJIVCropnyhPKq7pI+fGtvwYZfaSBHKXwMFQ1/T4mLujzJ6RwGgCTgLBZK778RqpW+fUbqJxTKYs5wcJeCWQq8JH7P31HzCpr69nmFmvk4P5SYAESIAESIAEJFXCR6AdHgnJiMR7NawoL5OKcmevjNk8PP0Dg7KvudWyC9FISEL09pjNw0vbSYAESIAESIAEUkiAwscUwmXVJEACJEACJEACuUEgWYGgm/J+BY4g7FS/03XjKHnJmxujy16QQG4TeOaZVVoHEep63rzZud1Z9o4E8oyAVzGgU/5krjuVxdC4yWM3hMmUzbNpwe5mAQE3+209j1vv8MPDw6N6bixv/rf+O4WPWTBZaCIJkAAJkAAJKEgglcJHO6+PwDC2plKKivyHvFYQ5SiTmppbZWBgMM5MhLdGmGsmEiABEiABEiABEiABawIUPnJmkAAJkAAJkAAJkIALAm4OKO2qcVvWKV+y12GfUx0uUARSh5t2mIcESCB5AhQ+Js+QNZBApggEIfZzU4dTnmSvg59THYkYJ1M2U2PHdkkgEQE3+3EvwkejF0e9XQofOQdJgARIgARIgARSRSCVwscDzy5DMmzh9TEcDktdbaWEQrknAmxt65Tunt64IYPvy2hRWArinWCmanhZLwmQAAmQAAmQAAlkHQEKH7NuyGgwCZAACZAACZBApgi4OaQM6pDTrh63NgSdL1PM2S4JkEByBJ57brUWhgoeH+fPn5NcZSxNAiSgNAEvAkE3eZ3yOF3XYbnNZwc32fJKDxqNy0sCbvbpqRY+ov6GhgaGus7LGchOkwAJkAAJkEByBFItfITKb2jYWtxYHC2SmuoxyXVAsdKdXT3S3tFlaVWkMCSFYaoeFRsymkMCJEACJEACJKAYAQofFRsQmkMCJEACJEACJKA2ATcHlYl64KZ8UHlgh5u67OxNpqzao0jrSCB/CKxaBeHjoJSXl1L4mD/Dzp7mMIFkRIBeyjrldbquD4HbfHZDlmz5HJ4K7FoWE3Czx6bwMYsHmKaTAAmQAAmQQI4TSLXwEfcACHk9NGIt+CstKZaqyvKcoNzT0yf72zos+8IQ1zkxxOwECZAACZAACZBAGghQ+JgGyGyCBEiABEiABEggtwi4OaxM1GO35d3kc5PHaIvX/MmOXLrbS9ZelieBXCOwatWamMfHefNm51r32B8SyDoC6RbyeWnPTV43efRB8ZLXaiCTLZ91k4MG5w0BN/vjVAsfAbu+vp4eH/Nm1rGjJEACJEACJBAcgXQIH3EvMDQsMmwjfiwrLZHKMWXBdSoDNfX09sn+VmvRI/xdRqPhDFjFJkmABEiABEiABEgg+whQ+Jh9Y0aLSYAESIAESIAEMkzAzWGlk4le6nCb120+o21+yjj1jddJgATUIcBQ1+qMBS0hgVQT8CsUdFvObT7000teKy7Jlk81a9af2wTaO7rjOlhSEpVIYTCHz2723xQ+5vYcY+9IgARIgARIIJsJpEv4CEZDQwUybAOrtLRYqsZkp+fHRJ4e0d1oJCShEENcZ/P3hLaTAAmQAAmQAAmkjwCFj+ljzZZIgARIgARIgARyiICbA0un7nqtw21+t/mCts+pPl4nARJIPwEKH9PPnC2SQCoJBCkIdFuX23x6v73mN/NKtnwq+bPu/CCwY3dzXEfraiqkOFoUCAA3e3UKHwNBzUpIgARIgARIgARSQCCdwseRt8SP+L9VKimOSlVlhRRkkUaws6tH2ju6bEemqDAk4XAWdSgFc4xVkgAJkAAJkAAJkIAXAhQ+eqHFvCRAAiRAAiRAAiTwFgE3B5ZuYXmty0t+L3nd2us2Xybbdmsj85FArhNAqOv+/gEZM6ZMGOo610eb/VORgEoiPq+2pDq/1Xh5bVPFMadN2U+AwsfsH0P2gARIgARIgARIIHUE0il8PNCLAhkcErETP0YKC6WqqiIw79ypIyfS2tYp3T29tk1ECkNSSNFjKoeAdZMACZAACZAACeQgAQofc3BQ2SUSIAESIAESIIH0EAhS2OenLj9lQMZvufRQZSskQAJBEoDHR134OH/+nCCrZl0kQAKKE/ArIvRTzk8ZM74g6lB8SGhelhDIVeEj8A8PD8fuBerr66WoKBgvllkytDSTBEiABEiABEggAALpFj7iPmF4BGGv7cWP6FZVZbmUlhQH0MPgq+gfGJS2tk4ZGBy0rTwSDklhIT09Bk+fNZIACZAACZAACeQ6AQofc32E2T8SIAESIAESIIGUEghaROi3Pr/ljHCCqCOlsFk5CZCAZwIUPnpGxgIkkHUEghAM+q3Dbzkz5KDqybrBo8FKEqDwUclhoVEkQAIkQAIkQAKKEMiE8BFdHxmRhJ4fkae4uEjGlJdJYWFYEVoi7R3d0tnVndAeenpUZrhoCAmQAAmQAAmQQBYSoPAxCweNJpMACZAACZAACahFIGjBYDL1JVPWDdVU1+/GBuYhARJwT4DCR/esmJMEVCKQLiFgMu0kU9bIOqh6VBo/2pLdBCh8zO7xo/UkQAIkQAIkQAKpJZAp4SN6BfEjPD8OO3SxvKxUystKJBTKnAfFnp5eae/skSEYnCAVFYYkzPDWqZ20rJ0ESIAESIAESCCnCVD4mNPDy86RAAmQAAmQAAmki0AqBIHJ1pls+XSxYzskQAKpI7By5WoZGOiXiooyWbBgbuoaYs0kQAJZQyBZoWGy5c2ggq4vawaChipLIJeEj4CM8NZ6YqhrZacdDSMBEiABEiCBrCGQauFjKBQaxcLqfmEQ4seRxMgKpEDKykqkrDQq4XD6PEB29/RKV1dvwrDWsBy9jERCGRVnZs2ko6EkQAIkQAIkQAIkkIAAhY+cHiRAAiRAAiRAAiQQEIFUCA2DrjPo+gJCx2pIgARSRED3+HhA+DgnRa2wWhIgAVUJBCkqDLIunVcq6lR1LGhX9hDIF+HjuHHjJBqNZs/A0FISIAESIAESIAElCKggfASIoeEDP25SSXFUSkqiUhwtcpPdc56BwSHp6ekTiB6NL53YVRQOFUhRZLTA03OjLEACJEACJEACJEACJKARoPCRE4EESIAESIAESIAEAiSQKmFhqurVu57q+gNEzKpIgAQ8EKDw0QMsZiWBLCaQSgFhKupORZ1ZPHw0XTEC+SJ8HDt2rJSUlChGn+aQAAmQAAmQAAmoTqC/vz8Wvhn7en1vb/73rl27tK7oHhz1/xvz4br53sCNx0e9HLw+Dg2NOHp/1JmGCkJSHI1INBqRoqKIb0+Qw8Mj0j8wIH39A9LXNyCDg4Ouhg2BtwsLQ1LI0NaueDETCZAACZAACZAACbghQOGjG0rMQwIkQAIkQAIkQAIeCKRaRJjq+t12VRU73NrLfCSQjwQofMzHUWefs52ACqLAVNqQyrqzfexpvxoE8kX4WF1dLeXl5WpApxUkQAIkQAIkQAJZQ6Cvry/m1TDTwkdAgw2DQyOaANIh+nUcYwghI0VhKQwXSjgcknAopNWn37OgxpFhCCuHtfqHhgZlYGBYBofcCR2NDcLLY6QQ9WfNUNNQEiABEiABEiABEsgKAhQ+ZsUw0UgSIAESIAESIIFsI5AOUWA62sg27rSXBEhgNIGVK1+U/v4BQajrI4+cSzwkQAIkYEsgHYLEdLTBISaBZAlko/ARfTbeGxj/bQy3iH/r1yoqKqSqqipZXCxPAiRAAiRAAiSQZwR6e3tj+wlVhI8H9kJyQAAJN5AKpVABBI8FEgpR8ajQsNAUEiABEiABEiCBHCJA4WMODSa7QgIkQAIkQAIkoBaBdAoT09mWWpRpDQmQQCICFD5yfpAACTgRSJcYMV3tOPWX10nAiQDCFZpTJBKOhWl0Ku903c2+Xc9jl9dK5OhV+BiNRmXcuHFO5vI6CZAACZAACZAACYwi0NPTE/tdJeGjbpQugEQ46kxKICF0jIQpeOTXhwRIgARIgARIgARSTYDCx1QTZv0kQAIkQAIkQAJ5TcDNwWYqAGWq3VT0hXWSAAn4J7By5WoZGOiXiopyWbBgjv+KWJIESCCnCKRbhJju9nJqsNiZnCPgZp+eDuEjvpcTJ07MOb7sEAmQAAmQAAmQQOoIDA0NSX9/f6wBK+GjvvfftWuXli8UCo36v7EMLpjvFfT8eiN29xL654nuNeABEgLIdDmBhE/HcLhACsMHw2WnbjRYMwmQAAmQAAmQAAmQgLafbGlpyeQLLxwFEiABEiABEiABEsgLAm4OOFMNQgUbUt1H1k8CJDCawHPPrY6FuqbwkbODBPKPgAqCQxVsyL+RZ49VJuBmT54O4SPCXjc0NAg8PzKRAAmQAAmQAAmQgBsCAwMDMjg4GMtqFCnq/1ZJ+HiwTwVaCOwhCCHhEjLAhFDW0HaGQ/TuGCBWVkUCJEACJEACJEACrglQ+OgaFTOSAAmQAAmQAAmQQPIE3Bx0Jt+K/xpUt89/z1iSBPKTAIWP+Tnu7HX+EFBZVKiybfkzQ9hTFQm42W87CR/RL3Meu1DX+Fy/BrGjXhb/rqyslKqqKhUx0SYSIAESIAESIAEFCfT29sb2FWbPjSoLH833JpoXyGHRRJDQQWr7JQfe8Oao1VMgEsIPhI4F8Oyo4EDRJBIgARIgARIgARLIIwIUPubRYLOrJEACJEACJEACahBwc9iphqWjrchWu1VkSZtIIF0EKHxMF2m2QwLBEMgFsWAu9CGY0WQtJGBNwM2eOl3Cx0gkIuPHj+dQkQAJkAAJkAAJkIAjAbw0AeGjMcS03b+xl9m9e7dWZyZDXeudcnuPojuD1GWQBVA5aoJHRzzMQAIkQAIkQAIkQAIkkCECFD5mCDybJQESIAESIAESIAE3h56kRAIkQALJEFi58sVYqOsjj5ybTFUsSwIkQAIJCbg9TCRGElCdQF/fQJyJkUg4dmifrP1u7gG8CB9hj9Gro/67bqfxmv5v4//r6+uluLg42W6xPAmQAAmQAAmQQI4TQJhr/LgRPg4NDcnevXs1IhA+Gr1Dmj1Fmu8jjOGzUd7uPsNohxN63qs4EeJ1EiABEiABEiABEsheAhQ+Zu/Y0XISIAESIAESIIEcIeDm8DNHuspukAAJpJkAhY9pBs7mSCBPCfAgMU8HPke7vWN3c1zP6moqpDhaFEiP3ez90yl8LC0tlbq6ukD6xkpIgARIgARIgARyl0BPT4/2soUb4SMEkk1NTRoMCh9zd06wZyRAAiRAAiRAAiSgAgEKH1UYBdpAAiRAAiRAAiRAAm8RcHMQSlgkQAIk4JYAhY9uSTEfCZCAHwIUPPqhxjKqE1BJ+AhWdvcHxs/NHh+N5Zw8PiJvQ0ODRKNR1YeG9pEACZAACZAACWSIQH9/vwwODmqtuxE+QiS5f/9+LT+FjxkaNDZLAiRAAiRAAiRAAnlCgMLHPBlodpMESIAESIAESCD7CFAEmX1jRotJQDUCFD6qNiK0hwSynwDFjtk/huxBYgK5JnxEb4eHh7VOW4W6xucIdY2Q10wkQAIkQAIkQAIkYCaA/UN3d/eocNXIYxe+Gp93dHRoP0hehI/mMNh6O1ajEmSo6/3tXdLZ1TuqmUg4LA3jqjghSIAESIAESIAESIAEFCdA4aPiA0TzSIAESIAESIAESIACSM4BEiABvwQofPRLjuVIgATMBCh45JzIFwKpFj6Co9P+3uzN0Yq9VR67cmbho26D8fOxY8dKeXl5vgwz+0kCJEACJEACJOCSQF9fnyB0tS5gRDGz6BDX9IRrzc3NgnJIqRQ+ur1HccpH4aPLycBsJEACJEACJEACJKAggYKmpn0jw8NDEgqFJRw+uDFVyVa4T8eDu8LCwthmWiX7YNvg4JBmUiRSqJJpMVvwIHNoaFhCoQIJh8NK2jg0NKS9gQ77jDdJKhk7MABX/urPxYIC0b4vKibMQ/XXnCEZGRnmmpPEBOKakwQ8Q1H9718kEgmmwoBr0f/+ZcOao/LfFi/7HKcD0oCHWKsue/Y52M+GuM9JYhLo+xyuOf4hqrjPMQsfvaw5/kn4L8k1xz87Y8lsurfimuN/zPU1J9X7HKdDwkQ94Jrjf3z1kgfurbjPSZakl31OJoSP5nurTAgf8V2fOHGisvvpZOcAy5MACZAACZAAnHrSVQAAIABJREFUCXgngP18b29vzLujVZhr1Kqf6enX9+zZo5336R4csZ/Fv/VzXuM9hvHf5rPBRPciVt4h7XrodE/T0topXT0HhJp6Us3jY7bcW6l8VsBzK+9rgFUJL/dWwbTorZaD91ZYc6iJ8EbvYG6uOX7JHSzHNSd5hqhB9TVneHhEe253YJ/DNcfvqCe75hQsXbp8ZN26V2Xu3MPkiCMOU3IwHn98qezatVfOPfd0GTu2RjnxY09Przz44KPaGF5wwdkSjUb9jmfKym3btl1WrlwtkyaNl4ULjxQVD5eeffYF2bx5m5x44rEyefIkJcWP99zzkLS1tcvll1+shQFSLTU17ZMlS5ZLZWWlnHrqCVJUVKSaifLyyxvkwJpzuMyePUvJB+qPPfaU7N7dKOedd7rU1qq35nR398hDDz2G9yrlggvOUnjNeVEmT54oxx67QOk156STFsqkSRMVXXMelLa2DrniikuUHOfGxn3y1FPLpaqqUk45Rc01Z926DfLyy6/KvHmHyxFHqL3mvP3tZ0hNTbXnfU6qxZBYc/7738c1u84//ywl/7Zs3bpdnntutUyZMkGOOUbNNWfFiudly5btcmDNmcA1x8cOpbGxSZ56aoVUV1fK4sWLlJyLB9ecI+SII2Yqsc8xCx/1fY7fNcfH0Hkq0tXVLQ8//IQUFITk/PPPVHKct2x5Q1atWiNTphzY56j4wtEzzzwvWBtPPnmhTJzINcfTJHwrc7asOevXvyrz58+Www47NJA1x+lA0CvLRx9dInv2NGnf5+rqKs/7HK/tec3f2dmlrTk4YH37289Ucs99YM1ZLVOnTtL2OWquOau0Neekk47Tnjup+DLr3Xc/IB0dXdrzHKfndpkQPhrXHNxbGcfZbr/vxeOjOcQ1viv4TPf4iN/xkLmsrEwaGhq8fpWYnwRIgARIgARIIAcJYJ+AENdIZmFjojDXEEq2tLRo5ZCvqalZli9fJTU1ldp+EedWqgkfN23ZIcUlpaNGUTXh48MPPyl79zbJRRedI5WVY5S7t+ro6BTYCAdB5557hkSj6p1Pvv76Vnn++bUyY8ZkOeqo+UreWz399HOCM/1TTlkkEyYcoty9Fe4h7r77QcGZwWWXXeh4b5WJpXHPnkZZunSFpik5+eTjlTyfXLPmZXnlldfkqKPmyqxZMwJ5nhM06//+9wnBGaCqa057e4c88sgSbXxxnq+iJmLTpq3ywgtYc6bIUUfNU3TNWSnbtu3QdCXjxzcot+ZgL3LPPVhzepVdc3bv3ivLlj0rdXU1ctJJaq45q1e/LBs2vCZHHz1PZs6crvSac/HF58qYMRWe9zkFTz65bGT16nUyb94R2o+Ki8JDDz0uO3fu1h5CNzSMU+4Lhz/uEMTBE+DFF58nZWWjN8dB/6HxUx8EhcuXPyeTJ0+QRYuOVXIjsmzZStm0abOceOJx2h8AFR/m33HHvdLa2iZXXHGp9oVTLeFB+SOPPCWVlRVy5pmnSEmJeuJMbObwM3/+ETJ3rtprDgQ+9fV1yq05EATce+9/temHxV/tNWeiLFp0jKJrzrOyadMW7WHL9Olqrjn//Oe90tbWJlde+Q6pqFAv5NjevY3y6KNLpapqjJxxxmLF15zZmuBaxX3Ogw8+Jrt27dFenhg3bmxSa04qRJB4wHnvvQ9rm8wLLzxbyTUHD60gLMQ+5/jjsc9R78Ha0qXPyubNW7U1Z9q0yQrvc9rlyisvVXTNaZLHHsOaUymnn36SsmvO2rXrNRHSnDlYczLvsRcvP/X398uYMeWyYMEcObDm7NVenkh2zUnFXhgipPvue0TpNQcPrZ59FmvORDn+eOxzVFxzVsjrr2+Tk09Wd83BPqe9vV27t1Jxn4MH5Y8/vkwTW592mpprDh5avfTSeu27PXv2Yb7XnKDFjsa14YEHHhU8AMQeoq4uuX1Oqtac//znYW3/peo+B/cs2OdA+HjccUcreW+FFxOwH8PhHOxU8XnOP/5xt+Aw1s2akwnhI9Yc7HMgEMY+p7j44EvVQQsfddGjUQyJz+ChAJ9VV1dLTU1NKr5yrJMESIAESIAESCCLCOCZoP6ShDHMtS5otPL+iM9aW1tHCSaxz3nyyeVSV1crixcfLyUlJcoJH197/Q0pKRv9/F014SOelYDlxRefo91bpfI+zs80hQgJNkL4iLM1Fc+tNm7crD3PmTFjquYkyOmFKD8cki0D5zY408dzCLxwq1oUR3g2u+OOewTnlJdffomUl5cl2+XAy+OsBc9zIHw87bQTlXRk9MILa+SllzbI0UfP15wEqeiwCs9KILbGOTTWb/XWnHa5775HpajowJpTWqqePufVV1+XlStfkEMPnaa9PK/mmvO0bN78hvYcAs+61VtzhgXPkKHHwousKq450LE98cTT2vcEAlIVnac9//wagdMOvFANhx0qrjnQvkDrdMkl58nYsd7XnILGxsaR/v4BzdOjimIA/LXDQR1sLC0tSUoMEPhfzrcqxENB2Ihw1ypu5PSHl2CIUNcqLqqwcWBgQBtnHByq+JAcNvb19Wk2qngwB/twE4q5ODIiSooBYCM8CHDNSW414pqTHD+9NNec5DlyzUmeYTr3OX5Fkfqag4NQFW8eR+9zQkoKkPR9DtYd7LdV3efoe24Vbx6zb59T6FuAFMzKcrAWeEM9sH8t08RRGGeEh8ALMip65MqeNadfQqEw15wkJizXnCTgvVX04L2V85qTqQfUfJ6T/DhjDwaOWLP5PMc/Ty/PczIhfDTfW7kJdQ0aej7z/43X9H/rQkfjNbSrl9WFj7heW1srVVVV/oGzJAmQAAmQAAmQQFYT6Onp0c5ydM+OZg+P6Jxd2Ovdu3fH+o49LPYaeCamn1uZQ1T7CXXtJcy10Va7QWlp7ZCunv5Rl1UTPh68tyrVznpVS9n1PEf9eyu8CKWaAEmfc7i3wrNF1Z8hw14VBUiw68DznH4pLIwo8wzZvKboaw60L5l6ppRoneOaE8xfgQNn5f3ad4Vrjj+m+vOcbFhzIHhUUfQIdsmuOQUtLS0j/oaQpUiABEiABEiABEiABLKRgF8RZDb2lTaTQL4TMAsf850H+08C+UJAxYfS+cKe/cx+ApkQPpqpmffrTl4frYSPqNNYzihy1D83fqb/G9cggqyrq9O8PzKRAAmQAAmQAAnkFwGIHiGEMIa3TiR8NL5U2dXVpUUw0u9HdE+R5tDWRtGkka75BU27+5qghY/727uks6t31ECrJnzMr1nI3pIACZAACZAACZCAewIUPrpnxZwkQAIkQAIkQAIkkLcEKJbM26Fnx7OcAIWPWT6AND8vCVC0mJfDzk4rRIDCxxFNMAnxI0JeQwDJRAIkQAIkQAIkkPsE8Pcf4a11T49G0aKdcBFUjAJJeHvEyxSJhI9GcaP53ofCx9yfZ+whCZAACZAACZAACQRNgMLHoImyPhIgARIgARIgARLIAwIUQubBILOLOUGAwsecGEZ2IscJUOiY4wPM7mUdgfaO7jibS0qiEikMB9YXp710Kjw+ok4IEZCMHiL1z/A5xI5I+Ez3AFleXi4NDQ3Khr0KbFBYEQmQAAmQAAnkMQGIHSF6xB7BHN7abZjrzs5OaW9vHxUS1ezxMVGYaysvjvT4mMeTkl0nARIgARIgARIgAZcEKHx0CYrZSIAESIAESIAESIAESIAESCDbCKxc+aL09w9IRUWZHHnk3Gwzn/aSAAmQAAmQQE4ScBI+otPGPE6hro357crhc/1Hz2/+TBc7Gv+PvBAtjBs3TiorK3NyPNgpEiABEiABEshnAght3dfXpyEwem/U/23lodHo0RHl8PLE3v/P3n0AyXHddx7/LzbngF2AIBIJAiCIzAhGMYo0xSRSok1RlmWV7HLZsmyfq5xKtk8ulezTlVS2yy7ZVyXXue5OyZIISUwixRzAIEYABAiAJEACRNoFNud09e9lA7293dNhemZez3yHhSKw0/36vc/redvhN6+PHbPKcD7G2lme/Z579kjbPkrw0T0zZKb+C/NFMx51XcqfANqOAAIIIIAAAmkXIPiY9h6k/ggggAACCCCAAAIIIICAjwDBR3YNBBBAAAEEzBMoVPBRJZyzPtr1cP7MDj3aM0Q6/19fXy+LFi2Sqqoq81CpEQIIIIAAAghEEtCwo4YenS/34629ZnvU5Z2zQuq/T5w4ISMjI6d+bi+TluBj/+CIDI+MzbKoKJ8nbS0NkUxZGAEEEEAAAQQQQCD/AgQf82/OFhFAAAEEEEAAAQQQQACBvAgQfMwLMxtBAAEEEEAgskBQ+DHMjI+6Uedjq53/tivkLidM8FHXdT7uWv+tMznZocilS5eKhiArKysjt5sVEEAAAQQQQKCwAhpQ1NCj/m53zpxohx61dmFme7TDj/39/dLb2zsnDOkszzkLpN+Mj16zOPrN1pj0jI+F7RG2jgACCCCAAAIIIJCNAMHHbPRYFwEEEEAAAQQQQAABBBAwWIDgo8GdQ9UQQAABBEpaIErwUaGCHnftDkDauO7go9fjrnVZv1kfNRRhb1//rutXVFRIe3u79X+d/VH/6N95IYAAAggggICZAmNjY2L/sWvons3RHXbMNNujlqHL64yROtuj17Lu4KMz/Oisg/49ymOu7W2HlQ7zqOuwZbEcAggggAACCCCAgHkCBB/N6xNqhAACCCCAAAIIIIAAAggkIkDwMRFGCkEAAQQQQCBxgVwFH7WifrNF2qFHZ0jS/Xd7GTsIaYcdtVz777qMzvbY0dFxKjCpoYLy8nIrAKn/17BDlNmYbOAgl8Q7ggIRQAABBBAwWCBqaE9/j9ozNE9MTFi/uzXw6HzktPsx1e5/Z5rtUans5cfHx6Wzs9M67shUZpjZHqMEH72WzdSFUQ0N3h2oGgIIIIAAAggggICHAMFHdgsEEEAAAQQQQAABBBBAoEgFCD4WacfSLAQQQACBnAkcPHJiTtkdbY1SU12V6DaDAn7u98PO+KiVzPSYbPejrv2Cj84ApHOWSA1R6EuDFNXV1dbMj/Y2ncvpz+xt2XBBbfKbtdIPPsgw0Q6jMAQQQAABBBISCBvE85ohUavgXt/r3+6wof1vZwDSLssZJHQHJO0mu8vT0OPx48fnhB6dy/lty12msx5OYj8ngo8J7YgUgwACCCCAAAIIFIkAwcci6UiagQACCCCAAAIIIIAAAgi4BQg+sk8ggAACCCAQTSBfwUetVabgXlBI0G6VV1gwU/DRPeujM5xozxClZds/1/+7H4PtnA1Sw49tbW2nZpJyl+fUj9qmoJ4j+BgkxPsIIIAAAiYKRA0+utvgtb7zZ84Zl93hQ+d7YWZp1G07y9a/6+OtT548eapaXmFJ92Ou7XLcZdmFMOOjiXsqdUIAAQQQQAABBNIhQPAxHf1ELRFAAAEEEEAAAQQQQACByAIEHyOTsQICCCCAQIkLmBp81G7JNOtjplChOwjpDEt6BRXd4Uh7lketgzMIaT/6Wh9t3draKjU1NVYdndvzmvUxbGAx7HLOXTbOOiW+y9N8BBBAAIEcCoQNOTqrEHYdv7Cg16Ol3csGzfLoFVDUn/X19Ul3d7f1hQc73Ogsyxmm9KuH3Vb3NoJmsnSuF9ZI1wmzbP/giAyPjM3aEyrK50lbS0MO9w6KRgABBBBAAAEEEEhCgOBjEoqUgQACCCCAAAIIIIAAAggYKEDw0cBOoUoIIIAAAkYLpDX4qKh+Mz26w4B+szxqGe7ZHe2fuR+Rbc8OqWXbYcjm5mbRP35BS7vj3eFId92N3kGoHAIIIIAAAgUQyPQ4a7s6mcKNYWaEdAYVnWXq3/XLDjrLo872aIce9edesz1qOUGPudZ1/erk3rab2zlrZZiuCBN87O4blIHBkVnFVZaXyxkLWsJsgmUQQAABBBBAAAEECihA8LGA+GwaAQQQQAABBBBAAAEEEMilAMHHXOpSNgIIIIBAMQqUQvDRHTzUMIP9coYi7Z87Z4B0BiDtGR91XTv8WFVVJY2NjVJfX28V6fUobq9t2T8LM2NjmGWKcd+kTQgggAACxSMQJoynrfV7rLXf7Iju8KLfrIru2RgzPYK6t7dX9I9dHzvY6Jzd0fme1wyQ7rbEne1Ry4kSfAzrTPCxeD5btAQBBBBAAAEESk+A4GPp9TktRgABBBBAAAEEEEAAgRIRIPhYIh1NMxFAAAEEEhMwJfioDcr0+Gp3gzPNouhVjvtx1M6AovM9O/xoz/Co27XDj16zPtrragCyqalJ6urqTlXVK7DoVW+vtmfqYIKQie3+FIQAAgggkCOBsAE8e/N+gUd9P9Mjod2zPrqXd4YG3QFF57L6u1Ufa93f32/93ncuW15eblXTb1bHXAYfvdqXqcvCuhN8zNGOT7EIIIAAAggggEAeBAg+5gGZTSCAAAIIIIAAAggggAAChRAg+FgIdbaJAAIIIJBmgVIIPmr/OAOHzhkd3UFEr5Cjcxn3rJD2zI/2NioqKqzwo/6pqanxDXP6hRfzEWrMxzbS/Jmg7ggggAACpwXCBunimmUqPyjUqNvMNBOk36OlneUODQ3J4OCg9cdeXgOO7r9nM9uju55h2mV7Rpnt0cvDr18IPsbdY1kPAQQQQAABBBAovADBx8L3ATVAAAEEEEAAAQQQQAABBHIiQPAxJ6wUigACCCBQxAImBx+VPVNA0G+GyDAzLTofYR0UinQuq3WyZ350hiSdj8y2/65hBQ0/6myQlZWVoqFI/Zk7xBAliBhl2SLebWkaAggggEAKBOKEJqM+6tpm8JsZ0f6dq7+b9csL4+Pj1p+RkREZHR21Vtd17Vkd7cda2z+3Q5Du8p2zP7ofo22v6+wir2W83nd3K8HHFOzoVBEBBBBAAAEEEMizAMHHPIOzOQQQQAABBBBAAAEEEEAgXwIEH/MlzXYQQAABBIpFIJ/BRzXLFNzzCyz6WTvDhvYyzkdYO9dz/9wZdnSHIO16upfxWscZgnQHKO1y3MFJv1km3W3wa3fc8GPc9YplX6cdCCCAAAKFF8j02OpMtQsKRHo9hlrLc87e6Awv2svbX0hwBxPtZb0Cjna5zv87/x710dy6rl9INErwMUrQlBkfC/9ZoAYIIIAAAggggEBcAYKPceVYDwEEEEAAAQQQQAABBBAwXIDgo+EdRPUQQAABBIwTMCn4qDh+szh6wbkDhM71g2aDdAYhnY++dpbhNyuk1+OwdT07BGn/3V0fZ1DTXXevwGYUC+N2LCqEAAIIIIBAFgLOEJ/zsdN2kc6got/PnKFB96Oq7UCkVzDSLts5C6TXY7OdQUr3351Nd7clbPjTbxZLP1aCj1nscKyKAAIIIIAAAgikSIDgY4o6i6oigAACCCCAAAIIIIAAAlEECD5G0WJZBBBAAAEERI6f6JvD0NxYK9VVlTnhCZp1MKlZHzOV4w49akOd4Ufnus6Qo3s5/bcz7GiX4TWDpL2uVxk2tFedvWa1dHdMkGlOOpJCEUAAAQQQiCgQJZiXKfSXKfToDjtqFd3BSftx1u6wontddyjSLsvv0dZe9XJuP+h9NyfBx4g7GIsjgAACCCCAAAIlIkDwsUQ6mmYigAACCCCAAAIIIIBA6QkQfCy9PqfFCCCAAALpEwgK6kWZ6dDvcddhg4+q5/Xoaa9wpN8MkO71JycnrU5xhiLt7Th7y/2+1zL28lECoUG+6dtjqDECCCCAQJoEogQcne0KeqyzV+BQ18/0GGy/R1h7zdbo/FnSoUe7nn519erfKMHHqOY86jpNnyjqigACCCCAAAIIzBYg+MgegQACCCCAAAIIIIAAAggUqQDBxyLtWJqFAAIIIFBUAkHBvKghP7/lMwUovR41HfT4a+c6zhCk3Tnun3nNIumsk9/ftbxMRkF+fjtL3PWKauejMQgggAACBRUI+5hnZyX9Zkq0l/EqM1Ow0V2evaxzZki/x1N7BSm1HkGzOQa979UpBB8LuquycQQQQAABBBBAwFgBgo/Gdg0VQwABBBBAAAEEEEAAAQSyEyD4mJ0fayOAAAIIIJAPgaAAXpTgo9Y3zqyPup4zyOg166Nt4TfTo3O7XqFJr/Xt7Xr9320fxykf/cc2EEAAAQQQiCMQdVbCTMtnety11s0ODdqzSHqFHZ3LOd93P/I6KCiZKZjpt13bz6+NUUKPdjvi9AnrIIAAAggggAACCKRPgOBj+vqMGiOAAAIIIIAAAggggAACoQQIPoZiYiEEEEAAAQQKLhAn1Oe3jju0aDfOLxBpvx9m1kdd1i/UqD/3Cj/a67iDlU50+3HYzp9lap9fhwU5FryjqQACCCCAQEkJRA03ZsLJ9Fhov5kjg0KLuj2/2SD1vaD1nbNC2nUPms0xTDDS7RAl+JikeUntrDQWAQQQQAABBBBIqQDBx5R2HNVGAAEEEEAAAQQQQAABBIIECD4GCfE+AggggAACZggEBfaizProF3z0+rm7XHc4MtMsjracewZI/bldrt/6uox7W16Pug4KPwa5mdG71AIBBBBAAIHwAmGCe+6wol26O6ho/9xvRkj3I6yd6/uFHrXMTDNCOt93tjrubI9ahrMuQZJh/ILK4H0EEEAAAQQQQACB9AgQfExPX1FTBBBAAAEEEEAAAQQQQCCSAMHHSFwsjAACCCCAgBw/0TdHobmxTqqrKnKqExTgCwoAOivnnJHRXemgWR91eXdY0au8TI/Ctt/zW8+uU6aApi7jDkJmMgryy7bzcl1+tvVjfQQQQAABcwSiBu/CLh92pkTncmFmibRDlM5l3Y/EVl2v4KI7kOg3M6M7YJmpXu6ejBJ6dNbTnD2CmiCAAAIIIIAAAgjkUoDgYy51KRsBBBBAAAEEEEAAAQQQKKAAwccC4rNpBBBAAIFUChw8cmJOvTvaGqWmuirn7YkT7Iv6uGttRKaZFvV9r0dee/3cqxx32DHTzI9OUL9teqG72xxlNsycdyIbQAABBBBA4COBsIHGILCgxzz7zeboVa5XyNFdvv7bK9Co5blDle5Ao3MZ5/aDZnv0W09/TvAxaA/hfQQQQAABBBBAoLQFCD6Wdv/TegQQQAABBBBAAAEEEChiAYKPRdy5NA0BBBBAICcCaQs+KkKm4J/Xe2FmfXQGETPNvBh2Oa2nbtfrcdZeHZlp1kp7+bizMMZdLyc7HIUigAACCCDgCBQqRpjApFfg0A0ZJpTo9Thr9/a9ytH1wpTvbo/frJV+bQ4KfXrtPGH83OuNjI7J2NjErB9rGxvqa9g/EUAAAQQQQAABBAwXIPhoeAdRPQQQQAABBBBAAAEEEEAgrgDBx7hyrIcAAgggUKoCxRZ81H6ME370m7lRywsz06O9Xb/ZGd2Pww7a38IEJgk0BinyPgIIIICAKQJxwnmZHnXtbJdXcNHvZ0GPoPZ7P+wjrrVe+Zzt0bm9KH3d3TcoA4Mjs1apLC+XMxa0RCmGZRFAAAEEEEAAAQQKIEDwsQDobBIBBBBAAAEEEEAAAQQQyIcAwcd8KLMNBBBAAIFiEihk8FEdMz26OpNz1Fkf3Y+Wtsv2Chh6zb7o97hsr3KDHmPtt45fe6MGHMMuH3a5YtrfaQsCCCCAQDiBOEHFcCV7LxV1pkOv+nk91lq35lW21+OknWU63w96DLazRdmEHqMa2NuN01cEH7PZW1kXAQQQQAABBBAorADBx8L6s3UEEEAAAQQQQAABBBBAIGcCBB9zRkvBCCCAAAJFKmBq8FG5MwXzMgUfvdbN9Chpv/Cj1+yNfuVkCkb6tcVvdsigtie5KxJ+TFKTshBAAAEEnAJxAnl+gkGhQL/3vQKOmUKP7nLihB61DZlmqyz0Y661fgQf+awigAACCCCAAALpFSD4mN6+o+YIIIAAAggggAACCCCAQEYBgo/sIAgggAACCEQTKHTwUWubi1kfvcoNE3501yXsY65tdefymYKTzl5Ksv3Rep+lEUAAAQQQME8gKOTorLFXiFHf9ws3ZpopUtfL9CjsoEdvZzPbo1+dg3onbri0u3dQBoZcj7quKJczOnjUdZA57yOAAAIIIIAAAoUWIPhY6B5g+wgggAACCCCAAAIIIIBAjgQIPuYIlmIRQAABBIpWwCv42N7aILU11Xlrc9SZHe2KBc36qMt5zebo/rm7vEzhR3d5QaHFoMda+9XPCz+p2RmTKidvOwgbQgABBBBIvUCmgF6U8F5QKDKbIGQSoUftqHzN9ujcVtQd5GTPgAwOj85araqiQhZ2NEctiuURQAABBBBAAAEE8ixA8DHP4GwOAQQQQAABBBBAAAEEEMiXAMHHfEmzHQQQQACBYhHwCj62NtVLQ31NXpuYq/Bj1MdJh3mUddiZHG1AZ/gxbNDRPdOkX2cQYszrbsrGEEAAAQRiCEQJNjqLdwcR/TadKQzp9Z77Z+76+a2j2/ebMdKuW5zQo67rF9jMxB3XVcvsPNknI6Pjs4qvrqqUBfObYvQwqyCAAAIIIIAAAgjkU4DgYz612RYCCCCAAAIIIIAAAgggkEcBgo95xGZTCCCAAAJFIfDhsZMyNTU9qy1N9TXS3FSf9/YFzZ7oV6FM4UZdJ2r4UdfxesS1syyvugaFFf1ClXFDn3E98t6xbBABBBBAoOgEsgndKUbQ+mHf91su088zBSr9HpGdqc7ZPuI6aBZLv50nyCjTTnf0eI+MT07OWqS+tlraWhqKbl+lQQgggAACCCCAQLEJEHwsth6lPQgggAACCCCAAAIIIIDARwIEH9kVEEAAAQQQiCZwvKtPRsdnZvzRmX6qKsulprpSaqqrohWUwNLZBACjhB+DlrWb4hd+1Pe9HmFt/9z5fy+WTI/oDjODY5hlEugOikAAAQQQQCDnAmHDe1Fmf/SqtNdMjO5tJxF61G0HzQrprF8hQo96HHHo6Mk5TM0NddLUWJvzPmcDCCCAAAIIIIAAAtkJEHzMzo+1EUAAAQQQQAABBBBAAAFZkLcPAAAgAElEQVRjBQg+Gts1VAwBBBBAwFCB0bFx6wZ9VWWFETWMO+ujVj7oMdKZ3s+03aD1goKUcdvkN0OkX0cRiDRiF6YSCCCAAAIhZnT0QsoUgow6s6NdfpjAoy7rDj2GCVqGLTuoXWHDn06zOOvY64+Mjknnyf45XdDe2ii1Nfn/4gsfGAQQQAABBBBAAIFoAgQfo3mxNAIIIIAAAggggAACCCCQGgGCj6npKiqKAAIIIICAr0CmGRGD2HIVftTtZhuAdJfhbEuY0GKYZYJ8knjflHok0RbKQAABBBAIJ5BN0C7cFmYvFTfoqKX4hRb9ZmJ0/7yYQ4/qMzo2If2DQzI8MjPjt/06c0GrlJfPi9NdrIMAAggggAACCCCQR4GyAwcOTPf09Etzc6M0NTV6Tjmex/p4bur48U4ZHh6VM87okOrq6kJXZ872Jycn5ciR49bPzzxz4ZxvQplQ4cHBITl5skfq6mqktbXFyDqeONEtg4ODMn9+m9TV1Rq5L3744VGZmBiXJUvOlPLychO6dlYdRkdHpbPzhFRWVkhHR7uR/dzb2ye9vYw52ew8ExOTcvQoY042hrouY062giJpGnNaWhqlsZHjnDi9PjExIUePdkpZmciiRaYe5wx+dJxTJ21tLUYeQzDmxNn7Zq9jjzlVVRXS3m72cY5JY447+Hjs2HEZGdFzqwVGnlulYcwZGBiU7m49t6qT1tZmI4+5T5w4KXoOaPa51RHR/jb13Eo/J11dJ6SqqlLa2+cb2c/2uZVJY457tD895iyU6gI8Jjjot096xpxeqa+vlZYWc8ecgYEh6ehok9paU6/npHvMiTtDov0ZCBt+zLSdvr5+6esbkMbGeusacpiyvbYbpy1hg4XHj3dZ56kLF3ZIZWVl0BCQ9/d1zNE6aoBE6+j1CNG8V8q1QT1+6Onpta7N6piT72BPmPbrudXQ0LC0t7dKTU2NkXU8ckSvIU/K4sWLjOxnPc7R48WqqiqZP7/VyDrqcU5//6B136qhod7Ifp4Zc8Zk4cJ2y9K01/j4hHR2MuZk2y96X03HxnyNOXECjocPH5WpqSlrzPGadVENwoQXMz3a2l2G0zVopkfnmNPe3uY75sR5zHVSv6d6evqs45yGxgbFkonJaVkwvynb3SfR9e1zK70+a+qYc+xYp8ybV2ZdczLxOMe+nlNfX29dz0lq/0myo7u6Zq7ndHTMl9pa845zpqdFDh/Wc6tJWbJkkZH3ykdGRqSz86RUV5t7PWdmzOmXlpYmaWxsMHJf1PvQem5l7pgzLseOdRk95uixbE9Pj5g95pyQwcFhWbBgvpHnVnotQI9zdMxZuvRMI3+3DA+PiI7det0z03FOkr8ropal5/h6nJOOMecM63p81FfZU089N/3mm2/J+vVrZMOG86SiwozHuDgb8sgjT1m/RG+55Xojb3LqhZb7739Uv+csd9xxs9TUmBfOfO+99+WFF16RZcsWy5YtFxh5ULxt269k37735Mort8hZZy018mDpJz+5X/Rg5DOfucu6AGjaSy+2PP74s9YFoeuvv8rIm9nbt+8SHXN0vNFxx8wx50nrl+gtt9xg5E3OmTHnEb1cIXfc8WuGjzlLZMuW840cc55//mV55539ctVVW2T5cjPHnB//+H7RC7333nuXdQPRtJd+MUHHHL0Zct11Vxo55uh4o+POxo1rZd26c40cc37xi5kx59ZbP25d0DDtgoteaHnggUetet1+u5ljzrvvHpAXX3zF+ixfcoke50Q/KM715+u552bGnI997FJZvnyJkcc5P/rRz60LLqaPOfolnmuvvcLoMWfTprWydq0ZY447+PiLXzwhhw8fk9tu+7h1nMOYE/3Tb485Z521TC6+WI9zTBxzXpJ33jkgV199qSxbZuqY8zPrgoupY47eVHriieesLw6aOua88cZO2b59t2zevE7Wrl1t5HHOww8/IUeO6Jhzo3Xxz7QxR298PfjgL60Lp7fffpORv1v0+OHFF1+Vs882d8x59tmX5N1398s111wmS5eaO+boTYfPfOZOI8+t9KbSk08+J21trXLNNZd77ovZzPqov+2yDT/qmLNz59vWmHPuuSutMcfrkdZBj7m265IpzBg26Oj+Lf7YY0/JkSOd8olPXG+N32HHnLjbi3oUoddzHnnkCZk3r1xuvvk6I6+V7N//vvzqV2+IHudccMEGI3+36DXu/fs/sK4h6w1304IVGgj4+c8flv7+Ibn77tuMDP7rNeRnnnlR2tub5YorLjUyKLxjx25566091pizatUKI8+hH3vsaeuLonoN2cSgsF7PeeSRJy07U8ec9947IK+88qZ1nHP++WaOOXrf6sCBg9Y15MWL9YZ7WajhP+h3UND7oTby0UJbtz4sQ0OD8qlP3TrnOMcvCOkOLmaa5TGb0KOuO3Oc87wsWNAmH/vY5Z6//+KEHjPVK4qfLvvaa9ut4xz93bdmzSojf/89+OBjouepeh/axC9+6/n9Qw89JpWV5XLrrTcaeW61d+978vLLr8k555wlF120ycjff08//YLo8Zheh1i6dLFxxzkastb7VhqUuueeT1rhTNNeeg1Cxxz9Yt7HPnaZkfuiPeZceOEGOfdcU8ecX4qGmT/5yZsjnVvla3/Q+xgPPfS46CQJt9zycSP7ee/ed+Xll1+XlSvPlgsv3GjomLPNOre69torjQwW2mOOnkv/xm+YOebo/d2nntpm+JjzpuzcuUcuumijrF49cz3HtNcDD/xSNHfwyU9+ItaXE8peffX1ab04uXz5MuvkwsRGvvrqG1YyX28qzcxWGO7EIl+dpWl3veAyPT0ll19+iZEDq/6S37VrjzULoB60m3hzbteut+XQoaPWDRv99oCJMyo+++yL0t8/YB1wmhhC0rT266/vsEKZmzevN3Jf1BCujjl6AdX0MeeSS2bGnCQvQiQxLtljjoatL7vsYiP72TnmnHfeKiMP5t56623RWVw1DKffQDRxzNGL0AMDOuZcaeQJ5Okxp+6jMce8b7fbY87ZZy+3QvUmHue88sob1reBNLBn4jdN9VvZGirU12WXXWT0mLNgQYd1E9bE45y33totH354TNatW2PNYm7mmPOCaADE1DGnu7tX3nhju/UNyU2b9DjHvDFHA3F648akMccdfPzVr163Zj3WL0OZeHNOv5WtAR+Txxy9mLF7917RMWfNmpVGHufs3LnbCtWvX3+eLFyoxznmPZ5LL+brjP/mjjk98sYbO1Ix5qxYcZYVqjfxOOf0mHOh9a1i086t9FvZL73EmJPteWpaxpyhoSG55hq9nhP95tzo6LgMjoxKRXm5NDUk/6U0nU3qzTd3WLOabdzof5wTJlTo159BwUd7PV3OKwioxzkHDnxgXc/RL1bbY47Xsnpzwv3KNNNjUiHI115705qV+cILN0tzs5ljjgZ85s0TufDCTdaMGqa9dPKBvXv3y4IF7bJq1dlGHudoGO7o0WPWF430eMzE45znn39J9HeMhjNN7Ofu7m7ZseNtqa+vs44XTTy30us5779/0Lp+rDOEm3ico9dz9NrYRReZOeboDeJXX93OmJPlQGuPOXoNWcccE8LWzmNq/R36wgsvW0/N0zFHn5oXJuxohwb9Hndts2U6fveaRdJreT3O2b59pzQ0NIl+UdTrul0c1yTPLXRSFj3OOeecs2cd52S5+yS6ugb21FKvz5r4BEcdc7SOZWXz5NJLLzDyGvKhQ0dkz569csYZC2X16nOMPM7RSRz0/tqmTeus47E4n41EdzxXYTrmPPPMC9Zxjn5hy8TjHL3u+eabO6WpqUE2bFhn5HGOPeZoIE6/PFhRYd5TJvWaso45l19u9pijnxG9zm3iU2MPHTose/bss3Ivq1Yx5sQZm/TaguZz9D7l1VdfZuiYc9Ka9Et/N+skPCbOyjx7zDl9PSdOn+RqnZkxp1uuuOLiWE9wLOvs7JzWx2zozVcTTx4VTuun05fqTmJa6FHrp7/ktY76wTNxUNU6at300QZ6ImBiGEDrqI8MV0fdD00MA2gdx8fHrX0xzkXyXA0CznJ1X9Q66jeLTbxg5eznNIw5apjkyXNS+0B6xpxx66TMxMdLpWnM0bHRxJNH+/cfY072n2z7OIcxJ77lzHEOY058wZk1Oc7JVvD08axJxznu4CNjTvb9zJiTvaE95nCck52lfQ5t0pjjblEaxhy9VqJfZDX7eg7HOdl9WmaOc+KMOcMjo9I/OCqjY+NWFfQcd9GCFplXluyXssNez8n1rI+2s1f40b62qCEz93W7TPWKEtYMWjZoZsaZcXHS2GtiWn+to/4xeczRsVv3dVPvFdi///R6k2lhAPszNPP7b8LY6zn6eVZHvYZs6r0Ce8zRIICp9wrUUI8jzL6eM/nRfSvzvjho37dizDl9lBP3XoT9uyXMNeRMocigWR+dx2NhQ4/OcdFvzIk7lsf1sus0OTUl5fptBMf9Sc6t4h91zxzPcm4VX3BmTR0T9TNt8nFO3HOrbG3Crq/HEOrIvfKwYt7L2cezzkB9diUmu/bpc+hpo8+tTL9vxZiT/X5pjzlakomhx3Qd5+i51ekv8UTpnbKTJ09OR1mBZRFAAAEEEEAAAQQQQAABBNIh4A4+pqPW1BIBBBBAAIHCC/T2DUnf4PCcijTW10hLU33BKhgUDgyqWLYzP2r5mWZvdG/fb3tJzfKYqT6Z6hLkxPsIIIAAAghEEcg2hBdlW7ps1LBgLgKPWo+g2SWDXKK2w+kUVHaQ6bHOXqmsKpeWxnojJ90Jqj/vI4AAAggggAACCMwIEHxkT0AAAQQQQAABBBBAAAEEilSA4GORdizNQgABBBDIucD4xKQc7ezx3E57a6PU1hRm9qxsg4/aILuMoBkUdVmvx1Y7y3ADRQ1FhqlDmGWCdogkygjaBu8jgAACCCCQhEDQ46fDbCPM7I1Rt+NXpl8AMSiYqO8HLePX1rjr2eX19A1K/+CI9U990mBjXY00NdaFoWUZBBBAAAEEEEAAAcMECD4a1iFUBwEEEEAAAQQQQAABBBBISoDgY1KSlIMAAgggUIoCzpvizvbr454XtDWJPoK1EK98hx+9Hn2t7Y4ScrSdgmZ6DBtQDLtcIfqHbSKAAAIIIBBVIJsgX5TgYZRl7TZkClHGKU/LzSb0aK8f1dhefnB4VE72DMxZvaK8XBa2N0WeTTNuPVgPAQQQQAABBBBAIBkBgo/JOFIKAggggAACCCCAAAIIIGCcAMFH47qECiGAAAIIpEhAw3VHO3tlYnJyTq2rKyukvU1vjpcVpEVJhB+14n6hRnej/GaJDAogxglH2vXygw3aZkE6hI0igAACCCCQY4GgcGScEGLUMp3Lx9meEhUy9DgyOiadJ/s9e6q+rlramhty3IsUjwACCCCAAAIIIJC0AMHHpEUpDwEEEEAAAQQQQAABBBAwRIDgoyEdQTUQQAABBFIrMDwyKl3dc2cF0gZVV1bK/NYG0Rkg8/1KKvio9Y4afrTXsdscJYgYt95RthG3L/Kxjbh1Yz0EEEAAgfQJBIUKk2hR1MdV29sMqlucWR617DDlBi0T5BJ3fQ09dnX3y/T03C3MzPbYXLAvtAS1mfcRQAABBBBAAAEE/AUIPrJ3IIAAAggggAACCCCAAAJFKkDwsUg7lmYhgAACCORVoLd/SPoGhj23WVleLgs6mmVeWf5nfowbIvTDm5qaCnTNtM04wUHnOlHXj7p8YONYAAEEEEAAAYMF4oYcnU0KCg0GbSPT+mHKDlomiD/u+gNDo9Ld6/1FFt1mR1uj1FRXBW2e9xFAAAEEEEAAAQQMFCD4aGCnUCUEEEAAAQQQQAABBBBAIAkBgo9JKFIGAggggAACIid6+mVoeGwORUtTvTTW1xSMyITwozY+mwCjF17YWSjDwBOQDKPEMggggAACpghk+yhoZzvCBgWDHlsdVE6Y94OWCfKPu35376AMDI34Ft/aVC8NBTyWC2o37yOAAAIIIIAAAghkFiD4yB6CAAIIIIAAAggggAACCBSpAMHHIu1YmoUAAgggUBCBzpN9MjI6fmrb9bXV0tbSUJC6ODeadPgxbOjQL1Bo/zzpwGGc8uKsU/AOpQIIIIAAAqkWiBvQS6LRUbYddvbGsMv51T+JIGeUdrnr0dXdJ8Mjp4/fnO83N9RJU2NtEvSUgQACCCCAAAIIIFAgAYKPBYJnswgggAACCCCAAAIIIIBArgUIPuZamPIRQAABBEpNoKu7X4ZHxqSqokIWdjTPaf7U1LR09w1IXW211ObxkYm5mHFRGxcUHAx6X8sI8wjtbPejMPXIdhusjwACCCCAgAkC2YQAg9Z1vh9lWT+XfIcep0WkzFWZiYlJOXaiV/QYzfki9GjC3kwdEEAAAQQQQACB7AUIPmZvSAkIIIAAAggggAACCCCAgJECBB+N7BYqhQACCCCQcoGevkFpaqiTefPct9ZF+gdHRN/XV0X5PKmprpLq6gorKFlRUZ7Tlucq/KiVDgoWBr3vLiPM8rnEKvT2c9k2ykYAAQQQSJdAUMAw29aEKd9eJsqyQfXKV+hxbHxC9I/Oyj05OSUL2+d+McV5fKb1bm1ukIa66qAm8D4CCCCAAAIIIIBACgQIPqagk6giAggggAACCCCAAAIIIBBHgOBjHDXWQQABBBBAIL7AkeM9MjE56VmABgAq5s2TefPmSX1dtfXH/Tre1Rdq45VV5dLaVD9n2e6+QRkfm7t9r8dXL2hvmrP+4NCo6J+5r2mZPU+SSEtTnVQ6wpy6jfGJSSv4Oe1e2KPEhroqqa05bWCHETtP9ocyqKosl+bGujnL9vYNytiEdx+4A48L5s8NR1gGwyOzyvVrjvZBVWXFrGU1fKH9EObVUFvjuR/ozFRhXhqobW322A961WAiTBGy0MdgwGXgVxgGYn0W2Q/MNAj7WdL92+uz0B3hs+S1H+h4Evaz5DUeRBlP/MYDDMSa7S/sK+p+UOaaX1B/N7rHA90Phoa9fre6alUmUl9bbc0a7XzN/G4dCtUE3bbWYc7xxck+KSvT2RD1Sxtzv7jhXH7B/LnHBz39QzI+7nN84yhPS25urpWqykpxhij9jy9m11TXqa+tkrq66lnr62ehp/e0gR6V6AyOXsdcizpaPL9scvxEn7VOa1OdVFdXhvJkIQQQQAABBBBAAAHzBQg+mt9H1BABBBBAAAEEEEAAAQQQiCVA8DEWGyshgAACCCAQS0Bv6p/sHQi1bnNDrTR5hPYOHjkRav3qqkrxCiboTf3RsfFZZUxNOf95+h9LF82fs62+gSHp7R/2r4Mj0djR1iRVVbNDf6NjE9J54nR4M9PMik2NtdJYXzNnWx8eOXnqZ5nykzqTZntr45z19XHko6PhQn+Lz2ids77OCtWXycCxRntbo1R7GHSFDG82NdZIY33tXIOj3eH2g5wZDEtf/+zwp1+FMBDBwFyDD0N+lnT/9hoPoownXvtB/2D4z5LXeKBjatjxxG9MxECk8Abhf694/W4cG5+c9bs10y8I3Q/097P7dfhYT6jfK7rQkkVts5bVMKL+bncfX8ws9FGIUkOVmqwUkY62RmvGa+err39IegcyHF98VJYW0dxQI02Ns0P9o6PjcvxkuC+H6LFFi8eXQ6ampqRs3ryA2GdoJhZEAAEEEEAAAQQQMESA4KMhHUE1EEAAAQQQQAABBBBAAIGkBQg+Ji1KeQgggAACCPgLZJrt0b1WPoOPum3NK57OLM6EH2MFH+3CNNgwv9kK/TnDje7go7Pd7hCkhjuaGk6H/uz3PQMqrikkNRDpF+7oPNkXOvjoDndoffsGNKgUFM6YaVnH/CbRmSedrygBFd/wZ8iwlgnhTwzYD0z+LOQz8Ob1WYgSpE4i8OYVBscgv8FH3Q/cgfgov1fcvxv185Xpd6u+f3r+xjKpqSmX+a3xg48aPFx8xkzw0Tlj49zgo2PWSEfoUdfTL2boFzScr+Dg40x5SQQfK8rLZdGCFg5ZEUAAAQQQQAABBEpEgOBjiXQ0zUQAAQQQQAABBBBAAIHSEyD4WHp9TosRQAABBAonMDg885ho7xmRZtcr38FHe+unZ3+clqWuGZ10mcAZHx3N0Bml5sx2ODouGjx0v5y5xZm/T4tXuEPfOeSY8TFTb/qG/j4KPoZ42vacWa1mDKIFH71mfHTOepmpDUkYeM3sle/wJwYenwXX7KfsB5nH5lx9FsKOJ1o7ryB0lM9SIQJvTlW/MDgG4X+vmLwfdDlmU57pdyttOPM3RwZR94P5LQ2nQot2eDFoP3CWocFHZ+hRt3E6+Hh6hseZbc99bHb04OPsdngdI0WZ8VHrtUCPUXicdeEOitkyAggggAACCCCQRwGCj3nEZlMIIIAAAggggAACCCCAQD4FCD7mU5ttIYAAAgggMCMwPj4pw6NjMjI67huCLFTwUetnz/64dNHcxzxnHXy0wl69oXaFxgbvxzwHhTPswmPPduhIYS72DH8OS3/IGR/bPWb2sh5NOyeg4k3S6Jr10l7K+bjvTJiWgccjTbsizHqJgQgGxWsQ9rOknzOv/SDKZ8lrPNAgddjxxGs8iDKe+I0HGIikwuCjAGGT/m50zIas+6buBye6B0L9bo0SgPXILFrbWLJo/pxt6SPX9bjGerlmeHQvXKjg47x586wvZNRWV0ldbZVnKDMUIgshgAACCCCAAAIIpEqA4GOquovKIoAAAggggAACCCCAAALhBQg+hrdiSQQQQAABBHIhoBm7sfFxmZiYksnJSZmcmpap6WmpramUuprqOZs80dMfqhqV5eXS1Fg3Z1l9lOT45GSoMlqb6ucsNzQyJsMjY6HWb6qvloqKilnLTkxMSO/ASKj11aDG9ShMXbG7d9D5XG7fssorymc9KtteUINGExMhDZrnGoyMqsFH4Y6AljTW10hFxexHXeu29fG2YV6WQXXVnEUtgxAv3bbWwf3S7WOAAfvBR+NJiM+SLtLqMR5EMfQaD6KMJ17jQZTxxG88CDueYDCzo+RjP5g7R+JHO2mZPqp67u/Gycmp0L9XKirmSVPD3OODkz0Djmdi+9bAqojOGDn398qwjE9MzZpd0u+j1VRfK5WVs48PZo4vRues4jVjZG1N1ZxjpPHxCekbHD61fpmUybx5+meeaJsryyuksnL27+OQH30WQwABBBBAAAEEEEi5AMHHlHcg1UcAAQQQQAABBBBAAAEE/AQIPrJvIIAAAggggICXwNTpZ15nDTTtfI51zNK0jCTKibl5VkMAAQQQQMA4Aa9QYBKVzLZcXT/bMrzakYsyk/CiDAQQQAABBBBAAAGzBQg+mt0/1A4BBBBAAAEEEEAAAQQQiC1A8DE2HSsigAACCCBQ9AJJhw2TDi6GKS/MMkXfkTQQAQQQQCBVAvkM+CW5LZ1dMVevJOuZqzpSLgIIIIAAAggggICZAgQfzewXaoUAAggggAACCCCAAAIIZC1A8DFrQgpAAAEEEECgqAWSDj8qVi7DiLksu6g7msYhgAACCBS9QK7Cg7ma4dHukFzVu+g7nAYigAACCCCAAAIIWAIEH9kREEAAAQQQQAABBBBAAIEiFSD4WKQdS7MQQAABBBBIUCAX4UetXqFCioXaboJdQlEIIIAAAgjMEihEODDXgUfrJnVZGT2NAAIIIIAAAggggEBWAgQfs+JjZQQQQAABBBBAAAEEEEDAXAGCj+b2DTVDAAEEEEDAJIFchR/tNhJGNKm3qQsCCCCAAAL+AgQe2TsQQAABBBBAAAEE0iRA8DFNvUVdEUAAAQQQQAABBBBAAIEIAgQfI2CxKAIIIIAAAiUu4A4n5iqsmKtyS7z7aD4CCCCAAAKxBZwzL+ZyFsZclh278ayIAAIIIIAAAgggkGoBgo+p7j4qjwACCCCAAAIIIIAAAgj4CxB8ZO9AAAEEEEAAgSgCmUKJuQos5qrcKO1mWQQQQAABBEpJwB1AzEcgMR/bKKU+pK0IIIAAAggggAACMwIEH9kTEEAAAQQQQAABBBBAAIEiFSD4WKQdS7MQQAABBBDIoUCYIGKYZbKpYq7Lz6ZurItAsQu899570tbWJi0tLcXeVNqHQNELZAob5iuImK/tFH1n0kAEEEAAAQQQQAABTwGCj+wYCCCAAAIIIIAAAggggECRChB8LNKOpVkIIIAAAgjkQSBM+DDMMklUNV/bSaKulIFAmgUeeOAB+cIXviBNTU3yyiuvSGtra5qbQ90RKCmBMAHDMMskiZbv7SVZd8pCAAEEEEAAAQQQSIcAwcd09BO1RAABBBBAAAEEEEAAAQQiCxB8jEzGCggggAACCCDgEggTOgyzTD5gTalHPtrKNhDIhcB3vvMd+au/+iur6BdeeEFWrlyZi81QJgIIxBCIGiKMunyMKvmuUshtJ9kOykIAAQQQQAABBBAwX4Dgo/l9RA0RQAABBBBAAAEEEEAAgVgCBB9jsbESAggggAACCLgEkggUJlEGHYMAArkV6OsbkG9/+9vS0dEhd999tzQ1NeR2g5SOAAKJCJgUNDSpLongUggCCCCAAAIIIICA0QIEH43uHiqHAAIIIIAAAggggAACCMQXePbZl6yVGxvrZfPm9fELYk0EEEAAAQQQKHmBpIOLSZdX8h0EAAIJCDz33MunSrnyyksSKJEiEEAgW4G0BAnTUs9s+4P1EUAAAQQQQAABBMwSIPhoVn9QGwQQQAABBBBAAAEEEEAgMQGCj4lRUhACCCCAAAIIiEiuwoq5KpdOQwCB8AI7d+6Rnp5ea4XlyxfL0qWLw6/MkgggkKhA2kKEaatvop1FYQgggAACCCCAAAIFFSD4WFB+No4AAggggAACCCCAAAII5E6A4GPubCkZAQQQQAABBHIXhMQWAQTyJ9Db2y8ffPDhqdBjS0uzbNiwJn8VYEsIIJBKAcKOqew2Ko0AAggggAACCG2m7y4AACAASURBVBSdAMHHoutSGoQAAggggAACCCCAAAIIzAgQfGRPQAABBBBAAIF8CjBzYz612RYC8QU07KgvZ+BR/03oMb4payJQ7AIEHYu9h2kfAggggAACCCCQTgGCj+nsN2qNAAIIIIAAAggggAACCAQKEHwMJGIBBBBAAAEEEMiRACHIHMFSLAIxBfzCjnZx+njrZcuWxCyd1RBAoBgFCDsWY6/SJgQQQAABBBBAoLgECD4WV3/SGgQQQAABBBBAAAEEEEDglADBR3YGBBBAAAEEECi0AAHIQvcA2y9lAfsx1mrQ09PrScEsj6W8h9B2BOYKEHZkr0AAAQQQQAABBBBIkwDBxzT1FnVFAAEEEEAAAQQQQAABBCII2MFHXeWqq7ZEWJNFEUAAAQQQQACBZAUIQCbrSWkIuAXsGR0PHvzQequ72zvoqO+1tjbL0qWLpbm5EUgEEEDAEiDwyI6AAAIIIIAAAgggkEYBgo9p7DXqjAACCCCAAAIIIIAAAgiEECD4GAKJRRBAAAEEEEDAGAHCkcZ0BRUxXKCvr9+qoc7i2Nc3ID09fYE1bmlpkmXLFlvLNTUReAwEYwEEikyAYGORdSjNQQABBBBAAAEEELAECD6yIyCAAAIIIIAAAggggAACRSpA8LFIO5ZmIYAAAgggUCICBCFLpKNpZkYBO+T4wQczMzmGCTnqchp0bGpqEH2UNUFHdjIESk+AoGPp9TktRgABBBBAAAEESlGA4GMp9jptRgABBBBAAAEEEEAAgZIQIPhYEt1MIxFAAAEEECgJAUKQJdHNJd/IbEKOiseMjiW/CwFQ4gKEHUt8B6D5CCCAAAIIIIBACQqU7djx1vSHHx6RM888w/ozb9484xh2794jPT39sn79GmloqBfTDtzHxsZkx47dltumTeukoqLCOMOurhOyf/8H0traImefvUzKy8uNq+N7770vx493ycqVZ8n8+W3G9bOCvf76DhkeHpGLL94slZWVxhnqY0327n1HamtrZM2aVUb286FDR0THnMWLzR9zNmxYI/X15o05o6NjsnPnbikrE9m40cwxp7PzhBw48L60tbXJWWctNXJffPfdA6L1XLnybJk/v9XIMee113bIyIjJY06/7N37birGnCVLFsmiRQuNPM7ZtWuP9Pb2y4YN50l9fZ1x++Lo6Kjs3Pk2Y06Wv/XtMWfVqrOlrc3UMWe7jIyMGnycMzPm1NXVyLnnmnqcc1g+/PComDTmuIOPu3a9Lb29AykYc8pk48a1Rp5bdXZ2yYEDHxh9nPPOO/ulq+ukMObEH7w1dDAz5tTKueeuNPJ49uDBw3L4sI45Z8qiRQuMPM556623rcdvbtx4ntTVmXycY+6Yo9dJ3n//A5k/f74sX77EyH3x9JizQtraWow7ntWR4LXXTD/O6bPGHD0fWL3a7DFn6dIz5YwzTB9z1lrjt2nXkE+fW5k95uhxTnu7+WPO6tUrrGvdpvWzPebo9buLLtqU8Rpyb+/M46njzOToDDk2NzdFOujp67PHnHpZvfocI3+3HDz4oRw+fEzMHnN2S1/foHXeYuKYo+f3eixWVjbPOhYz8b7V8eOdcuDAQenomC/Llpl5nLNv3345ceKkmDrm6JcC9L5VmDEn0kCR4MI61u3bp8c55o45Og4fOXLMCo8vXNhh5LmV3hPq7x+UTZvWSm2tecc5M2PObpk3r9y65sSYE+9DtG/fe3LiRLf1+7m1tdm44xzGnHj96l4rXWPOOuseoGnH3KfHnArR+/kmjzkLFsyXpUtNPc6ZGXPOPfcca5Z40/rZHnPGxsblwgs3GpnPOX2c02AdL5qYw3r//UNy9Ohxo49zNG83MKDHOfHGnLInnnhmWg+K9eRM/5gY5nroocdEw1K33XajkQecQ0PDsnXrQyIyLXfeeYt1kmvaS2+2P/fcS9ZF8ssuu0iqq6tNq6I8++yL1kXeq666VFasOEsqKswLZ/7whz+V7u5e+c3f/LQVwjXtpRcKHnnkKdGLXTfeeI3U1JjXz6+/vtM6Ed+8ea1s2GD2mHP77TfJggXtxp3kDg4OyU9/+rC1+9155yeMHHPeeeeAPP+8jjlL5bLLLjRyzHnmmZkx5+qrL5MVK5YbeSDygx/8VHp6euVzn/u0dWHItNexY8fl0Ueftg6GP/7xqw0dc3aIjjubN6+zLriYeJzz4IO/tIJSd9xxk3R0mDvm6AnPnXfebF1YM+2lN9ufe+5lOfvspXLppaaOOS/I3r3vyTXXXG7sl1C+//2toidppo45emL2y18+bV30u+EGM8ccDay/8YZZY447+PjAA7+0glKmjjl6cvuznz1s3ZwzdczRi9DPP/8r67N86aUXGHmc8/TT20Rv0Jk95txnBf9NHnP0OEdDXDfc8DEjj3M0yPXGG2/J+eevt74oauJxzgMPPGqFFj75yV+T9nY9zikz6jCiv39Afv7zR6xzPq2jicc5evywbdvL1jnLli16nFNllKFW5qmntokej1177RXGfvHt+9+/zwrh6vUcDRea9tIb7Xqco1/Ku/56s8ecCy7YIOvWnWvkmHP//Y9aoQX9POu5lWk3bXTM+dnPfmFd87zjDlPHnHdl27ZfyTnnnCWXXKLHOSaOOc+LXne67rorjQ2Ef+9791k3bT772U+dGnMKFXL0Gu/0d/Njjz0t7e1tcv31Vxl5PPvqq2/Km2++JRdcsNHYMUePIY4e1THnE1Zwz7QxR7/Io3XUIICe/5l4nLNnzzvywguvpGDM2S/XXXeVkWOOBgL0eo57zDHpWEevQTz22DOGjzlvyJtv7pILL9wka9euNvI4xx5z9D60jt+mjjl6Xnr77TcaPeboZBgXX3y+kcc5Tz75nOg9ff39bGIgfGpqWn7wg62i9ynvvfcuI8+t9F7L448/Y/1u1uNFEzMRr7zyhmzfvksuumiznHfeKiPHHD1v0XuAd911i5ETVukXeX7+80elqqpSbrtNj3NqTPrVZ9Xl7bf3yYsvviqrVq2wJpuoqjLv3Moec2644Sojw5lTU1Oi98qHhobkM58xc8zRHNsTTzxrZUr0upiJY86vfvW6NZGf7odr1uhxjnkT+ek9oWPHOuWuu26NNWFV2d69+6b12+N6Yc3Em+06KLz//kHr4uSKFcuMnCFgfHxC9u8/IJOTU7Jy5QojdxS9sKInFxrW05k9TUwa64XJkydPyplnLpLm5mbjbojovvjOO+/J0NCINZui/iI17aUh3A8+OGQdIGnI1cRvN+gMezpLTnt760c3vsybZfb0mLPcyG/rjo+PWzO4Tk5OpmDMaZAzz1xo5JijY2J3d7eceeaZVljYtJuwOr5osEJnmTX1xMcec/RgXb8Na/aY02bNWGHizNZpGHN0VmY9wTD/OMf8MWfx4jOlqYkxJ84xlJ7c6rdhzR5zuj46zpmf9zHHGXCM6nvVVVuirpKz5fXbm/v365gzbc3KbOJJuH1u1djYYM0mbOK5lX2cw5gTf1fVi/g605DJY45ey+nq0nOr/I85YWV1pkKdCemcc5YbOSuJPeboDeNzzjFzzOnp6ZMjR45KOsacxdLU1Mi5VdgPiGM5e8zRkJnOBGHil4FPjznt1s12E8+tdKZCnQlJQ3smzkqiTw3S6zk65uhxjonn0PrlS71G29jYaM0mbOJxjj5Jpru7R5YunRlzkg5+JHFcrTObDQwMWYEFPS7Tl47nQa+WlpmZG+3HVUedyTGofPv92WOOmddz0jHmvC/9/UOMOWF3PI/l7DFHP8s6m3ApjjlZ8Fmr6u8UvW81MjIma9asNDI8Mzg4KDpbvdnHOZ2iT87TL2uZe5wzM+boU/Nqasybfc0+ztH9Uo/FzD3OOWpdm2XMiTf6pGHM0SD4oUPpGHM6OjqsjI6Z51b2mHO29WXgpI+54+2Bp9fSmY71/M/kMUcn0zp61OwxR0N7PT091vmHngOa1s865ui9cu1vU49z7DGnpqZKliwx9XrOzHGOyWOO3hPSc2i9VhJnzCnr6uqa1hvZOqCaOKjqYKX104CPHiSZ9mGzh1etn96cM/HGnH3yo3VUPxNPHu1+ntkXy428SK51nOnnKSNPHu1+1vrpLwETTyqc+yJjTnaHdIw52fmd/t0yZY2JJoYeGXOy7+N0jTlT1s1NjnPi9bv+3uM4J56dcy2Oc7I31OOwQp1b6Tf2wtxMdbdSL2roF2ZMenGck31vnN4XOc6Jq6m/W2bOrcTIANLsc2iu58TtZ/uYOw3n0KZfz9EvA3NuFX9PZMyJb+dcMy3XkBlzsuvvmX7O3Tl0NsfVGlzUL2vp8YM9w6Nfa/MVcvTaPmNOdvugvTZjTvaOabmek8sxJ3vFNN234twqm/42fcw5/bvF/PuTaTi3Mv9eQRrulTPmMOZkI5D9uhznZG+YlnvleqyoLxO/xFoq15DLTp48OZ3MLkcpCCCAAAIIIIAAAggggAAChRDQG6vbt++OtGm92bphw3mR1mFhBBBAAAEEEEAAAQSKXUCPq4OCi24DPbb2+yJSIUOOxd5XtA8BBBBAAAEEEEAAAQQQQKC0BQg+lnb/03oEEEAAAQQQQAABBBAoEoGo4ceNG8+TXD0+r0hIaQYCCCCAAAIIIIBACQpEPa72ItKwY64fWV2CXUOTEUAAAQQQQAABBBBAAAEEEJglQPCRHQIBBBBAAAEEEEAAAQQQKBKB998/ZD1eL+hl4iOug+rM+wgggAACCCCAAAII5Esg7HG1sz522JEvF+Wrl9gOAggggAACCCCAAAIIIIBAqQsQfCz1PYD2I4AAAggggAACCCCAQFEJBN2kJfRYVN1NYxBAAAEEEEAAAQRyJBB0XK2bJeyYI3yKRQABBBBAAAEEEEAAAQQQQCCEAMHHEEgsggACCCCAAAIIIIAAAgikSWDHjt3S09M3p8qEHtPUi9QVAQQQQAABBBBAoNACfsfVNTXVsnr1CmF2x0L3ENtHAAEEEEAAAQQQQAABBBAoZQGCj6Xc+7QdAQQQQAABBBBAAAEEilKgt7dPtm/fPattOhvNhg3nFWV7aRQCCCCAAAIIIIAAArkS0ONqPb52vq66akuuNke5CCCAAAIIIIAAAggggAACCCAQUoDgY0goFkMAAQQQQAABBBBAAAEE0iTgDj9u3HgeM9KkqQOpKwIIIIAAAggggIARAhxXG9ENVAIBBBBAAAEEEEAAAQQQQACBOQIEH9kpEEAAAQQQQAABBBBAAIEiFfjWt/5Rfvazn8rnP//b8sUvfqFIW0mzEEAAAQQQQAABBBDIrcC//uu35Yc//KHcc8898qUv/X5uN0bpCCCAAAIIIIAAAggggAACCCAQSoDgYygmFkIAAQQQQAABBBBAAAEE0iewevVq6erqkksvvVQeeuih9DWAGiOAAAIIIIAAAgggYIDAli1bZN++fbJmzRrZtm2bATWiCggggAACCCCAAAIIIIAAAgggQPCRfQABBBBAAAEEEEAAAQQQKFKBf/iHf5Bvf/vb8tWvflW++MUvFmkraRYCCCCAAAIIIIAAArkV+Od//mf55je/KX/+538uX/7yl3O7MUpHAAEEEEAAAQQQQAABBBBAAIFQAgQfQzGxEAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIImCBA8NGEXqAOCCCAAAIIIIBAgQWmp6cLXAM2jwACCCCAAAJxBcrKyuKuynoIIGCowNTUlKE1o1oIIIAAAgiYJTBv3jyzKkRtEEAAAQQQQAABBBBAIG8CBB/zRs2GEEAAAQQQQACBwggQaiyMO1tFAAEEEEDAFAGCkab0BPVA4LQAwUb2BgQQQAABBPIjQDAyP85sBQEEEEAAAQQQQACBQggQfCyEOttEAAEEEEAAAQRyKEDQMYe4FI0AAggggECRCBCGLJKOpBmpESDomJquoqIIIIAAAiUgQBiyBDqZJiKAAAIIIIAAAgiUhADBx5LoZhqJAAIIIIAAAsUmQLix2HqU9iCAAAIIIFAYAQKQhXFnq8UpQLixOPuVViGAAAIIlJYAocjS6m9aiwACCCCAAAIIIJBuAYKP6e4/ao8AAggggAACJSZA4LHEOpzmIoAAAgggkEcBQpB5xGZTRSNA2LFoupKGIIAAAgggMEuAACQ7BAIIIIAAAggggAAC5gsQfDS/j6ghAggggAACCJS4AGHHEt8BaD4CCCCAAAIGCBCKNKATqIIxAoQdjekKKoIAAggggEBeBAhB5oWZjSCAAAIIIIAAAgggEFmA4GNkMlZAAAEEEEAAAQTyI0DgMT/ObAUBBBBAAAEEwgsQgAxvxZLFJUDYsbj6k9YggAACCCAQV4AQZFw51kMAAQQQQAABBBBAIHkBgo/Jm1IiAggggAACCCCQtYBf6FEvrlZWVkpFRYWUl5eLhg8IIGTNTQEIIIAAAikT6Bvrk/HpiZTV2szqlsnMf/NknpRpFadEpsenZXpq2rfCHHuY2ZfUKjcCQYFHPSavrq62js3t4/Pc1IRSEUAAAQQQQCCXAnotbnJy0vozOjpq/T/TiwBkLnuDshFAAAEEEEAAAQQQCCdA8DGcE0shgAACCCCAAAJ5EfALPFZVVVk3VDX0yAsBBBBAAIFSF+gZ7ZHRybFSZ8hZ+zXYWCWVIpMiU2NTvtshAJmzLqBgQwT8Qo+679fW1ooeo2vYkRcCCCCAAAIIFJ+ABh/HxsZkeHhYMj2VhQBk8fU9LUIAAQQQQAABBBBIjwDBx/T0FTVFAAEEEEAAgSIW8LuAqkHHuro6bqgWcd/TNAQQQACB6AIEH6ObxV2juqxKZEJkapwAZFxD1kungF/oUY/Na2pqmHU9nd1KrRFAAAEEEIgsoNfsNPyof/xehB8js7ICAggggAACCCCAAAKJCBB8TISRQhBAAAEEEEAAgegCmb4trqXZN1Wjl8waCCCAAAIIFLcAwcf89u/MDJBVMjWc+XF/Witmgcxv37C15AX8Ao86s2NDQ4NUVFQkv1FKRAABBBBAAAHjBSYmJmRgYCDjI7AJQBrfjVQQAQQQQAABBBBAoMgECD4WWYfSHAQQQAABBBAwXyAo8KgXSbmpan4/UkMEEEAAgcIJEHwsjL3O/jg5Mill02WBFSAAGUjEAoYJ+AUetZo6C3tjYyPBXsP6jOoggAACCCCQbwG9ptff3y/j4+MZN00AMt89w/YQQAABBBBAAAEESlWA4GOp9jztRgABBBBAAIGCCASFHjUk0NTUxKOtC9I7bBQBBBBAIC0CBB8L11OEHwtnz5ZzI5Ap8Khb1NCjHp/zQgABBBBAAAEEbIG+vj7Cj+wOCCCAAAIIIIAAAggYIEDw0YBOoAoIIIAAAgggUPwCQYFHW0BnktGbq7wQQAABBBBAwF+A4GNh9w4NP06NTIlMB9eDmR+DjViicAJBoUd9vHVzczMzPRaui9gyAggggAACRgrodb7e3t6Mj722K87sj0Z2IZVCAAEEEEAAAQQQKBIBgo9F0pE0AwEEEEAAAQTMFAgbeNTa19XVSU1NjZkNoVYIIIAAAggYJEDwsfCdYYUfh6dCVYTwYygmFsqTQFDY0VmNlpYWZmLPU7+wGQQQQAABBNImMDk5KT09PaGrTQAyNBULIoAAAggggAACCCAQWoDgY2gqFkQAAQQQQAABBMILRAk8aqk6y6PO9sgLAQQQQAABBIIFCD4GG+V6ifKycqkYL5epCcKPubam/OQEooQe9UtJtbW1yW2ckhBAAAEEEECg6ASGh4dlaGgoUrsIQEbiYmEEEEAAAQQQQAABBDIKEHxkB0EAAQQQQAABBBIWiBp61M03NTVJRUVFwjWhOAQQQAABBIpTgOCjGf0aZdZHrTEzP5rRb6VYiyiBR/XRR1zrbI+8EEAAAQQQQACBIAGd9VFnf4zyIvwYRYtlEUAAAQQQQAABBBDwFyD4yN6BAAIIIIAAAggkKBAn9FhVVSUNDQ0J1oKiEEAAAQQQKG4Bgo9m9G+ZlEnlRKVMh5z1UWtN+NGMviulWkQNPapNfX291NTUlBITbUUAAQQQQACBmAIjIyMyODgYeW3Cj5HJWAEBBBBAAAEEEEAAgTkCBB/ZKRBAAAEEEEAAgYQE4oQeddP6iGt91DUvBBBAAAEEEAgnQPAxnFM+loo666NdJwKQ+egdthEn9Kj7ZmtrKyFddh8EEEAAAQQQCCWg1wO7u7slznVBwo+hiFkIAQQQQAABBBBAAAFfAYKP7BwIIIAAAggggEACAnEubupm7RurCVSBIhBAAAEEECgZAYKP5nR1RVmFlI2IyHS8OhGAjOfGWuEE4gQfq6urmY09HC9LIYAAAggggMBHAgMDAzI6OhrLg/BjLDZWQgABBBBAAAEEEEDAEiD4yI6AAAIIIIAAAghkKRA39Kib5THXWeKzOgIIIIBASQoQfDSr26snqmQqwuOu3bUn/GhWfxZLbeKEHrXtDQ0NouFHXggggAACCCCAQFgBDT1q+DHui/BjXDnWQwABBBBAAAEEECh1AYKPpb4H0H4EEEAAAQQQyEogm9Cjbri2ttb6wwsBBBBAAAEEwgsQfAxvlY8lq6VKpkamstoU4ces+FjZIRA38GgX0dLSIuXl5ZgigAACCCCAAAKhBSYnJ6Wnpyf08l4LEn7Mio+VEUAAAQQQQAABBEpUgOBjiXY8zUYAAQQQQACB7AWyDT1qDRobG6WysjL7ylACAggggAACJSRA8NGszq4uq5KpYYKPZvVKadYm29Cjqs2fP7808Wg1AggggAACCGQlcOLEiazW15UJP2ZNSAEIIIAAAggggAACJSZA8LHEOpzmIoAAAggggEAyAkmEHrUmzc3NzCiTTJdQCgIIIIBACQkQfDSrs6vKqmQ6y+CjtohZH83q17TVJonQo4YNWltb09Z06osAAggggAACBgh0d3dLUscjBjSHKiCAAAIIIIAAAgggkAoBgo+p6CYqiQACCCCAAAImCSQVetRyeJSeST1LXRBAAAEE0iJA8NGsnqoqq5Spoamsg4sEH83q17TVJomgge6DbW1taWs69UUAAQQQQAABAwROnjwpSV0zZOZHAzqUKiCAAAIIIIAAAgikQoDgYyq6iUoigAACCCCAgEkCSVzE1DL0j84oU15eblLzqAsCCCCAAALGCxB8NKuLNPg4OTiZyKP5CD+a1bdpqU0SoUe7rTzqOi29Tj0RQAABBBAwS0Afda3HJEmFFpMqxywlaoMAAggggAACCCCAQLICZdu2vTT99tv75NxzV8rq1ecYeeP9mWdelOPHj8t1110lra0tWc8gkCyhyMjIiDz22LNWsTfeeLVUVVUlvYmsy/vggw/l9dd3yOLFC2Xz5g1SUVGRdZlJF/Daa2/KgQMH5aKLzpclSxYldnKYZD0ffvgJ6e/vlzvu+DWprq5OsuhEyjpxoluef/5laWpqkCuuuEQqKysTKTfJQnbv3id79jDmZGM6PDwijz/+rJSViXz84yaPOdutz/KmTeuNHnMuvvh8WbyYMSfOPtnVdVK2bfuVNDU1yhVXXGzomLNX9ux5R9asWSWrVq1I5DgnydCjuldX10hjY0OcLmAdBBBAAAEESlaA4KNZXW8HH7VWGly0w4tPP71NOju75IYbrpbm5qbQ13PyFX4cHBySJ5983rr+cMMNVxl5Pef99w/KG2/slKVLF8vGjWuNPLd65ZU35IMPDsmWLRfKokUL8349J0zo8Re/eFz6+wet6zle1+20DP2j18sIPpo1vlAbBBBAAAEE0iLQ1dUlk5MzXwZKIrQYpgy9Tvnww4/L0NCQ3HbbTUbetzp+vFNefPE1aW1tlksvvdDIa8hvvfW27N37rqxbt0bOOeesRK4hJ73fPvXU89LVdUJuvPEaaWxsDH1ulXQ9/MobGBgUraMeT1933ZVGnlsdOPCBvPnmW7Js2RLZsOE8I8+tXn75NTl48LBcdtlFcsYZCxIZS5LcB+wxZ3h4WG699Uajx5y2thbZsuUCo8ec9evPkxUrlhs55jz55HOi9wBvusnsMUezENdee4WRY87+/e/L9u27ZPnypbJ+/RqDx5wP5fLLL5GFCzuMG3P0Oo3mczSPdeutHzd6zJk/v1UuueR8I8ecnTt3y75978mGDWvl7LOXGTrmPCtdXd2xx5yyxx57avqVV96UzZvXyebN640cFO6//xHrl7xenCzEBdSgAwK9UP7jH99vLfbpT98q9fX1Qavk/X3dkZ955gU566ylcuWVW4wcFJ56aptoCPfqqy+XVavONnLw/+53fyzd3b3yW7/161bQx7TX0aPHrZPc1tYmuemm66S2tta0Ksprr20XHXPOP3+9bNq0zsgx5+c/f0QOHTosn/zkzUaeWKRlzHn66RdkxYqlcsUVpo45z8vbb78j11xzuaxcaeaY8//+34+lp6dXPv/53zAyGKdjzkMPPS5tbc1y003XGjrmvCmvvLJdLrhgvWzcmMyYk23w0Z7p0R6gKyurrCAALwQQQAABBBAIL0DwMbxVPpZ0Bh91e3b48Wc/e1g+/PCo3HXXLbJgQXukC6j5CD/29w/IT37yoJSXz7PqWF9flw+uSNvQL/E8++xLsnLlcrnssouNvJ7z5JPPyp4971k3OPWmTb6/bBsm+Ph//++PZGCgXz73uV+XhobZXzqyQ4/aMQQfI+2eLIwAAggggAACDgE7+Kg/SiL8GDb4OHOcMyif+9zdRl5DPnToiDz66JPS0THf+kJUbW2NcfuNhs30y0YXXbRZNIhUVWXepCJbtz4oR44cs+5Dt7drOKXMKMe+vj65776HrMCH3lsz8dxq9+691uQxOkGChnBNnODmsceelnfe2W99Vs46a5lUVJj1pCg9d9L7Vnqf8jd/89OGjzntcsMNHzN6zNGJWTRwbeKYc999M2PO3XffJh0d7caFrXt7+2Tr1oes+xtR7gAAIABJREFUnIFmiMwec86RSy+9wNgxZ9++/VaoXnNEpj2dzh5z9Asen/2smWOO5th++cunZMGCDrn++quMHHNeeuk1efPNnXLJJRfI2rXnGjrmPCBHjhyPPeaUHTlyZFpnD6uurpKamhrjBi09atNfnqOjo1bQLN8XT8McNeoHTk8q9JtcLS3NRhqOjY2LfvtCBysd+PNxAyGMnXMZ3Q81rV1XV2vkwK91HRgYkJGRMdHEtomGExOTMjQ0aLGa+I0vrZd+lmfGnGqpqak20pExJ+qnd+7yjDnZG6ZjzJmwvk1cSmNOEqFH9XKW09zcIpWV5s3EnMxeTCkIIIAAAgjkRoDgY25c45bqFXzUsoaGhmVkZFSam+Ndz8n1effp6zlT0tISfkbKuE5x1hsbG7POoc2+njPTz3V1ddb1xXy/wgQf9XrO6OiYtLXNvp5jr2v/n+BjvnuP7SGAAAIIIFA8Au7go7YsTHgxk0CY9fXLPHrM6D7OMUV2YsK+hlxmhaRyfYwfp932fSu9Z6X3yk18DQ4OWvcn9bzFtGCKetnnVvr/KLP959PaPrfSY369F23ivqj38oeHR6Whoc7IiWO0vxhzst9rdcwZGhqR2lrGnLiajDlx5Wavl54xZ1x0FlcTx+20HOfMjDk1Vj7HxFe2xzllJ0+enNab7ybuJE5w0+toBxhMdkxLHTHMbqihn7Pzs9dmzMnekX2xdAy1paaP3UnVL5vgo3Nd599bW1uNvFiV/R5MCQgggAACCOROgOBj7mzjlOwXfLTLyuZYLJt1w7QlLectpXTMHabf7GXChB51Wa9+dq5L8DGKOssigAACCCCAgJeAV/BRlwsTXvQTDbsu9zOy3ydNN7SPaXN9fpSNJOdW2eidXpd9MXvHtOyLJn+eGXOy3w9PX4sokzKzJumd1TjGnOz7mjEne8Nsxxwr+JhMNSgFAQQQQAABBBAoXoGkQo+nT3ZmrAg+Fu8+Q8sQQAABBHInQPAxd7ZxSs5l8FHrY/rNgDhmrJO9QNjQo9eW3OsSfMy+PygBAQQQQACBUhfwCz6qS9gAo5dhNuuWep/QfgQQQAABBBBAAIHiFyD4WPx9TAsRQAABBBBAIEuBXIUetVoEH7PsHFZHAAEEEChJAYKPZnW7O/iotXOHFbMNL2a7vlli1CZbgVyEHrVOPOo6255hfQQQQAABBEpXwBl8VAV3YDGbAGM265Zuj9ByBBBAAAEEEEAAgVIQIPhYCr1MGxFAAAEEEEAgK4G4wUev9dw/I/iYVdewMgIIIIBAiQoQfDSr4yvLKmVqcHJWpbyCitmEF7NZ1ywtapOtQJKhR62LszyCj9n2DusjgAACCCBQugJBwUeViRtgjLte6fYGLUcAAQQQQAABBBAoFQGCj6XS07QTAQQQQAABBGIJxA096sbc63r9u62tTcrLy2PVjZUQQAABBBAoVQGCj2b1vAYfJwcmQs3ymE2AMZt1zRKjNnEFchl61DrpcXl7e3vc6rEeAggggAACCJSwgDv4qBRegcW4Ica465Vwl9B0BBBAAAEEEEAAgRIQIPhYAp1MExFAAAEEEEAgvkDc4GPQbI/2+wQf4/cNayKAAAIIlK4AwUez+t4OPmqtnOFEZn00q5/SXpukQ4/q4SxTj891xkeCj2nfU6g/AggggAAChRHQ4OPExOwvAxF8LExfsFUEEEAAAQQQQACB0hEg+Fg6fU1LEUAAAQQQQCCGQJzgo986zp/r3/XP/Pnz8zrj42sHx+RXB0djSBRuld+7vLFwG2fLCCCAAAJGChB8NKtbNPg40T9+KvRoBx79ZmiMO3Nj3PXM0qI2cQVyGXy0j9N1xseOjo64VWQ9BBBAAAEEEChhgePHj5/6UoV93Oo3S2Pc2RvjrlfC3ULTEUAAAQQQQAABBIpcgOBjkXcwzUMAAQQQQACB+AJJhh61FnZ5zv/nK/j4v7b1y98/2x8fo8Brbr23XS5YWlXgWrB5BBBAAAFTBAg+mtITM/VwBx/1Z3qzN1NQMW6IMe56ZolRm6gCuQg9ah20XOcxv4YJFixYELV6LI8AAggggAACCIgz+GgfD2cKKsYJMcZZh65BAAEEEEAAAQQQQKCYBQg+FnPv0jYEEEAAAQQQyEogF8FH96yP+Qo+Lv/G4awsCr3ydcur5X/fM7/Q1WD7CCCAAAKGCBB8NKQjPqqGV/BR3wq6MRsnxBhnHbO0qE0cgVwFHycnJ2dVh+BjnN5hHQQQQAABBBBQAXfwUX+ms0lnegUdL3utG2cdeggBBBBAAAEEEEAAgWIVIPhYrD1LuxBAAAEEEEAgK4GkQ49aGfcN23w+6prgY1a7AysjgAACCBgmkNbg497Ofack93Tukbc6d8m6jrWzdG9be6th2sHV8Qs+MutjsB1LhBOIG3wMWs8dfNR9duHCheEqleVSe/bssUro7OzMsiSRK6+8MusyKAABBBBAAAEEshM4duzYrJmktTSCj9mZsjYCCCCAAAIIIIAAAkECBB+DhHgfAQQQQAABBEpSIOngo5bnLpPgY/hdixkfw1uxJAIIIFAKAmkKPmrY8b5dW2XH8Z2hu2bDgvVWIDItIUi/4KM2OGhGmjgzOMZZJzQ+CxopEBRg9Kt0pvXcoUctI1/BRw097tq1KzHrjo4Owo+JaVIQAggggAAC8QS8go9aEuHHeJ6shQACCCCAAAIIIIBAGAGCj2GUWAYBBBBAAAEESkog6dCj4nnddCX4GH63IvgY3oolEUAAgVIQSEPwMU7g0avv7ln368YHIDX4ON435hlyZNbHUvhE5raNuQg9apl+x/xnnHFGThv03HPPJTLLo7uShB9z2m0UjgACCCCAQKDA0aNHreML95d09N+ZvgwU9EUhrw3HWSewASyAAAIIIIAAAggggEAKBQg+prDTqDICCCCAAAII5E4gTuhRa5NpPb/ZHvXn7e3tgd/8TqK1Xo+63npvu1ywtCqJ4hMt4ws/OCFPvD86q0yCj4kSUxgCCCCQegGTg4/373pAfvDWfyVubHIA0g4+eoUc7Ru/mWZpjDODY5x1Eu8UCsyLQL6Cj7odDRHkMviY9EyP7g4g/JiXXZKNIIAAAggg4CmgwUf7eMK5gH3cSviRHQcBBBBAAAEEEEAAgeQFCD4mb0qJCCCAAAIIIJBigTjBx6DQo3K4l7Fv4BJ8nLuzEHxM8QeIqiOAAAJ5EjA1+Jir0KPNamr40Q4+aj3d4cdcBR/tbeVpl2MzBRSIE3zMtI79nvP4XP+uj76uqKjIafAxV7M9OruH8GMBd1Y2jQACCCBQ0gIafJyYmLC+SOEMOYYJPipc1Fkcoy5f0p1D4xFAAAEEEEAAAQSKVoDgY9F2LQ1DAAEEEEAAgagCcUKPuo2owUfnDJAEH+f2EsHHqHsuyyOAAAKlJ2Bi8PF/PP0/ZcfxnXnpjP9+zd/I6o5VedlWmI24g4+6jlfgkVkfw2iyjFMg6dCjlu0VfNSf6Z9cBx+3bt2alw7W8OOaNWtyti09h+GFAAIIIIAAArMFnMFH55eBnMfAzPrIXoMAAggggAACCCCAQLICBB+T9aQ0BBBAAAEEEEixQJzgY5jQo5LYy7n/T/Bx7g5D8DHFHyKqjgACCORJwKTg497OfXLfrq15Cz3axCaFHwsVfFQLHnmdpw9dgTaTdPDRWZ7zuFz/XkzBx3x019q1a+Xcc8/Nx6bYBgIIIIAAAqkQcAYftcJ2yDFs8NG5TtgGM+tjWCmWQwABBBBAAAEEEChWAYKPxdqztAsBBBBAAAEEIgsQfIxMlpMVCD7mhJVCEUAAgaISMCn4mM+ZHt2d+N27/48R/eoVfNSKuR97bf/Mq9JxA4xx1zMCjkoECkQNPgYt7xV8tH9G8DGwO+YsQPgxuhlrIIAAAggUr4A7+Kgt1WCi+3iVWR+Ldx+gZQgggAACCCCAAAL5FyD4mH9ztogAAggggAAChgpEDT6Gne1Rm+t8vLX9b/2/iTM+/q9t/XN66OKl1XLB0qo5P/daNkr3/t7ljXMWJ/gYRZBlEUAAgdIUMCX4eP+uB+QHb/1XwTphw4L18pdX/3nBtm9vWIOPY72jc4KOUYKPWlacEGOcdQoORgVCCQSFGL0KybSO+z17lke7HIKPobplzkKEH+O5sRYCCCCAQPEJeAUf9VjVK+iYZPiRWR+Lb1+iRQgggAACCCCAAALhBQg+hrdiSQQQQAABBBAocoF8Bx91ex0dHVJeXp5z2eXfODxnG1vvbZ8TZnzt4Jjc+b2urJaN0pj3/+LMOYsTfIwiyLII5F9g165d8swzz8iSJUvk2muvlfr6+vxXgi2WvIAJwcdChx7tneCedb8ut629taD7hF/wUSvldSPWL6wYJ8QYZ52CYrHx0AJRg49RQo9aCV3eefxP8DF018xZ8M4774y/MmsiUEQCXV1d8tJLL8mHH34oR44ckc7OTqmtrZXFixdb5/4bN24UDQvn4xpAEbHSFARSI6Cf+4mJiTmfca/PPMHH1HQrFUUAAQQQQAABBBAwXIDgo+EdRPUQQAABBBBAIH8CUYKPQcu63/eaYYbgowjBx/zt32wJgSQEBgYG5I//+I9PFXX77bfLHXfckUTRlIFAJAETgo+f/dFvRapzLhf+79f8jazuWJXLTWQs2w4+6kLum7hRgo+6ftQgY9TlC4bEhiML5Dr4ODk5OatO+u/Kyko544wzItc17Apbt24Nu2iqliP4mKruorI5EDh48KDcf//98uqrrwaW3tbWJjfffLNcffXVBCADtVgAgXQJ+AUfvR537XXcbLc2zgyOcdZJly61RQABBBBAAAEEEEDAW4DgI3sGAggggAACCCDwkUBQmNEJFeUx17qe+8at/W9mfGTGRz6A3gLbt2+X/fv3W29eddVVojcIeRVe4L333pOvf/3rpyqyefNm+fKXv1z4ihW4BseOHZMXX3zRqsWyZcvk/PPPL3CNin/zhQg+7u3cZ8Hu6dwjb3Xukh3HdxoDXehHXjuDj+7HW3s97lrhkpr1keCjMbthohWJGnr0Ot52VijoMde6LMHH+F2ox2rt7e3xC2BNBFIs8MILL8h3vvOdyC3YsGGD/O7v/m5eZk/XGdv37Zs5jtHj9+XLl0eub6FWSHPdC2XGdgsncPjwYWvGRw0hOoOIUYOP2oKoQcaoyxdOiS0jgAACCCCAAAIIIJCsAMHHZD0pDQEEEEAAAQRSKhAl9KhNjBJ81GX9ZoAk+EjwMaUfmZxX+4c//KE8+uij1na+9KUvyQUXXJDzbbKBYAENjnzta1+TDz74wFr4j/7oj2TTpk3BKxb5EnpD9lvf+pbVSg1//PZv/3aRt7jwzctn8FEDj/ft2mpU0NGrBwo56yPBx8J/JoqtBlGDj1Efc63H5s519O/6R2d8XLRoUc44i3XGR4KPOdtlKNhwAf1MP/DAA7Nqec0118iVV15pzR6rj7nWUHVPT4/s2LFDHnnkETl+/Pip5fXLXV/5ylekpaUlpy396U9/as1Iqa/f+Z3fkcsuuyyn20uy8DTXPUkHykqHQKbgo7bA/YUdHnedjn6llggggAACCCCAAAJmCxB8NLt/qB0CCCCAAAII5EkgSvAxSuhRq+8OPjr/bWLw8V+e75+j/uUrGuWCpVWzfv7awTHxWjZKl/3ve+bPWfwLPzghT7w/Ouvn1y2vFq9lo2yLZdMlQPDR3P7SYIgGH1tbW6W5udnciuaxZgQf84j90abyEXxMOvCoszKGfcWZTbKQsz66g4/aTvvGrvv/tkGmmRqjzuIYdfmw/cByhRPIZfDRPpYn+Jhc/xJ8TM6SktIj8MYbb8i//Mu/nKrw2WefLX/wB3+QcaZ6HX/0y13/9V//dWq9NWvWyJ/+6Z/m9LHXaQ4Pprnu6dmbqWlSAs7go5ZpBxvt/3sdsxJ+TEqfchBAAAEEEEAAAQRKVYDgY6n2PO1GAAEEEEAAgVkCuQo+2uU6y3cGH/WRcBUVFTnvjeXfODxnG1vvbZ8TZsx5RUJsgOBjCKQSWITgYwl0chE1keBj/jsz18HH+3c9ID9463QoIYkWfvfu/xO6mM/+6LdCL2svmLbgo9abx11H7uaSWCFq6FFR/Nbx+rk7+GgfmzPjY/zdi+BjfDvWTKfAyZMn5W//9m9leHjYasBFF11kPbY67Lm989hR17/rrrvklltuyRlGmsODaa57zjqUgo0VCAo+eh3/Enw0tjupGAIIIIAAAggggEBKBAg+pqSjqCYCCCCAAAII5E4gSuhRa+G3vNfP3cFH978JPs7tVxODjzq73aFDh+TEiRMyMTEh2m8LFiyQlStXhpqZY3x8XPbs2WM91kwfc6aPPJs/f76cc8451v/9XvpYtMHBQettXUcfv6g/27dvnzXj3sDAgFUXfYzaWWedJVVVp2fldK6r6+n6Qa+hoSGrffqqq6ubc+NuZGRE3n77benq6pLe3l5pamqyZjTRWUrq6+t9i9cbgmqgF/QbGhqs5cbGxuTAgQOyf/9+qa6uFn0knH4++vtnZhz90Y9+JNu2bbP+/oUvfEE2btx4qnzdVnl5eVBzQr8ftV1qpFb6CrK12+5cVoMV2nf6qqmpOdVvBw8elHfffVe6u7utn+mMsKtXr4706Dvdv/bu3Wvtq9ounZFx4cKFcu655/reiPXaz7R/9HF8x44ds9q4ePFiWbt27am+07L15e4Lr77u7OyU3bt3W3XS9uojRNevXz+nPvq+vZzeND7zzDNl6dKl1j4e5qWfUf1c6Pb0pZ8trbd+NvzCVdl8xpz7q36+//3f/93a7pYtW+See+45VWXdv/VPEi91P3r0qDUe6Xii/trOdevWhfqMJ1EHU8rIZfDxfzz9P3PyWOtcBx+1bwr1uOswMz5q/dyfRYKPpnyizKpH1OBjlMdcO4/X7fWc/+dR1/H2BYKP8dxYK70CP/7xj+Xhhx+2GqDnhRqCDHO+5WyxPvbaOfPjv/7rv84qQ88D9VhVX3oOlSkcpecW9ljW2Nh46vdtX1+ftb7WVWea1NdnP/tZK6hpv5zH8/Y2nec4en6hQU09Vtf66DnKsmXLrON0v5ces+r5hL7s81i/ZZ3nS85z0Kh1T+/eRM2LScAdfNRjXf3j/PzyuOti6nHaggACCCCAAAIIIGCCAMFHE3qBOiCAAAIIIIBAQQUIPhaUf87GTQo+aoBMb2ppGM3rpTe57rzzTrnkkks839cbQ3pD66GHHjo1G4h7wQsvvFA+9alPWeE09+uJJ56Q7373u9aP9bFpS5YskX/6p3+yAk/ul9bl937v96yQl76cs4hoQPGb3/xmxrCg3pj6/d///VPFfvWrXz11M0vboTfKHnzwQd923HjjjfLpT3/acxtf/OIXrXK1jl//+tfl+9//vmjb7JfeDNMbfTt37pR//Md/DNwh9cbi8uXLA5cLWiBuuzSQ+R//8R9W8Vr3r33ta9Zjn90vDeD95V/+5akfq8/NN98szsfiaUDuiiuukH/7t3+z+szrpWGCu+++O2O4VG92/uQnP5FnnnnGswyt57333iuXX3554H6modLvfOc7s/ra7iNdWfvq9ddft8r5m7/5m1P7nP7b2dd///d/b9XJvins3LDuk3/xF39hhXb1Jq0GXe2bse4KqtH111/ve7NXQ4A6Q6ifn35uPv/5z8uKFSsC2x7lM6Y3dL/0pS8F7WbW5/sTn/hE4HJBC6i57nf2zELu5W+66SZrPNIb1aXwylXwMVehR+2TfAQf71n363Lb2lvzvgt4BR+1EvbNXrtCYYOP9rpRGsLjrqNomb1sPoOPzpnYmfEx/n5B8DG+HWumT0DPm/TR1PYx2Z/92Z9ZXwSL+tIx5+/+7u+sL7ToS7/sdeWVV54q5q//+q/lyJEj1r+/9a1vZfwy1B/+4R+eqs+3v/1t60s3ei77jW98I7Baer6yatUq6wtof/Inf2Itv2nTJtEy77vvPs9jeV1Gv1il5xd6/Ox+aaBTz4P1peexes7r9/rP//xPefbZZ623bcuwdf/KV77ieYwf2GgWQCBHAhp81C99Or+oqaHHTMFHrYpfsDlT4NmvCXHWyREHxSKAAAIIIIAAAgggkBcBgo95YWYjCCCAAAIIIGCqQFKhR22fuyy/x1s7l2XGx7l7hinBx8cff1y+973vhdp1NWx0662zgyYaRNMwm86QGOb13/4/e+cBfldR5v8JKQTSSQNCSAKElkoSIASkF+lFRRBEd1ddu6vuY1mfXXfVtaCrq67rsqz6dy1rpUuXGgIBktAhCYQklPSQkN7I//meOL9MTs65p9xz7p1z7+f4+JDfPVPe+bxz586c+Z53Pv3pIAqee7k2qHz9HSd6svkkwlOkPEUl1MaVTa/NOUWFi7seffRRc8011wS3R4wYYbTRpmvTpk3m2muv7RC61WqLojZeeeWVu0X0csVwEonecsstuxRjRXWuILBWPUUIH+tplzYpJUB99tlnAzMVCVH+czcYlEZiU0UC1KXonhL6aQPEbacEoxLs2Q3PuHZLNCofKsJK+JLA8rvf/W6kIDacVkfwTZ48ObafHXfccebhhx/erY6swkdFAp0wYYK5++67Y12pNmmz8sYbb9xFCBuVQZuqEj+GL/lAbU+6ZP8//MM/BN+Nor5jjRQ+upvHtdqqTWttGBcZETWJbbPulyF8LON4a5dPI4SPbn0SQepqhBAyr/BR9hH1sVnfIj/rzSp6VCvyHHNt87l59UKExOPh34oiSV1//fVFFudNWQgfvXEFhjSAgF5G0YtAujTnvfrqq2N/y5LMkeBPwj9dWi9ovmqveoWP4eO042yxwkdFWNSaRpfWN4oqbgWJtdoRtY6tV/iY1naEj0k9jPuNJlC08FH2ZxUyZk3faEbUBwEIQAACEIAABCAAgaIJIHwsmijlQQACEIAABCBQKQIIH3cejeyL43wQPoYFeIpOJ7GYhInaIJeYbfr06WblypUd2D7xiU+Y8ePHB3+HhXH6TFFAdF9R53RMmDZzHn/88V2EjF/5yleCo3ntFSW+lC0nnniiGTNmTFDPU089tUsUDokbJZDTdfPNN5sbbrgh+PfRRx9tPvzhD8e6WRt2VqT3gQ98wEgAp+sXv/iFue+++zryqRxFKJQdOppZwk5XKCeBpoSg7mWFj+5nyi8eEvLp+ONTTz01OKb4oYceCpJJ0GYjnKhNOh7ZXhIL1joiPE1frrddOvJNm5FWWKoj49QGe7nROvXZN7/5zQ7RYpzAU31EYkFF7ZQQUpuqOm7aXoqmogiLOgbaXhK4KjqnZaXP5QNtVirdvHnzAqGlK8ANb07GiXy1+XrEEUcER5srAoztV2kiPro+UH9Ru3TstuxQFNWoS6I9pdXReapT6Wx0UwkXv/3tb+9y/J+O4XY3h5VG0RXFT35Rf5ag15ahPqfNUffo7Hq+Y4rkYdsie8VZl+xQO+wlsa9Y5r10HPzXvva1juxHHXVUEDVHxwuqbRoD3E3pd7/73UbfkVa/ihY+li16lD8aLXx0+0DZkSDTCh9lU9qoj3kiOObJ0+rflaq1r0zhY3jer7oQPhbTQxA+FsORUqpBQNH8FdVc1yWXXGLOPffc3IbraOlPfvKTHfkVdd3+ltUrfJQA6/777w/KVgRFrbV0aU3hRqhUZHLN013ho9sgrcO0BlSe1atXB1H67ZrNpnPXOvqsXuFjWtvFXnN8Lgj4QsAKHyU+dOel7ho+aj5cS6yYVciYNb0v7LADAhCAAAQgAAEIQAACeQkgfMxLjnwQgAAEIAABCLQEgaKEj1HlVDHi48xXNpsfPrRmN99+4vheZsLQXUWScWmzdIyfXdZ/t+TNFj5KMKWIaVbQpqh0X/ziF3fbUNF9CRWtqMoe46yHzPfee6/55S9/2dG2qIiQuilxmzaJbF3aVNJxZzZaW1iUJUGV6lRkEfeaMWOG0ZFm9rJHoUmMpSiD9tIx0lEbQ4sXLw4EYfayx6PNnTs3sM9eEjBGHZXsRotU2k996lNm7Nixu+Rz7VVkyEsvvTQ4gi3u0tHF9uhjHbMmsVdRV1HtCnOXOE0+DAvywlEWo4SPUYJRtVcMxMJeOvL67W9/e8ff7sarPlT/CR83p7FI/dEVsCpKojY4dUWJ/9TnDznkkEjkWYSPl19+uTn99NNr9lfdlLhV/caNVLh+/Xrzuc99ruP74YqLlUdCSCvolFjy7//+73c7DlzfLX0fJB7UJTZiZK96v2O2HDcqjcQf73//+4vqruauu+4yv/nNb4LytOmsI+3D4i4dQ6ij6HVfm9fi0epX0cLHK35/VenImil8VOPKFD+6wkfV5fbR8OYrwsfSu1qlK8gqfKyVPnwvPF9XhEf7mf5rj7om4mP2LoTwMTszclSXwM9//nPzwAMPBA3QvEwvudRzucdUK6p8r169guLqFT66NullOL0Up8t9yc1NEyV8lNhRpwgoGq57vfjii+Yb3/hGx0fuy3f6sF7hY1bb6+FPXggUSSCv8FE2FHXcNcLHIj1KWRCAAAQgAAEIQAACVSCA8LEKXsJGCEAAAhCAAARKI5BF+Fgrba1jrmV83Marb0ddS8x48a+X78b7+vcMiBQ+RqXN4qwFn9/12FnlbbbwUdErfvrTnwbNiIoQ57ZPUfa0IaXoi4r2p/+qL0hsaKNBxgnabDkLFiwIxIz2knhLUfZ0hUVZtY7ykpjLRvGwx5WpjB/84AfmySefDMqLOy5Yxy7a46fPOuusQJSo6yc/+YmZNm1a8G+J1yRii7t0XPFNN90U3J40aZL5yEc+0pHUjfioyHeyL+lhfFrhY1qBhFtfUe1SA3U0nY22J7GZBIMSFUpcqUuRM7W56F5h4aMieF511VWxR+QpquBtt93W0Scl5FN7JNhQf9Empa6PfexjQWTFqEtplc9G9Xzve99rJECN6me1ylH6tMLHOJGefPbZz362w25RUbkmAAAgAElEQVR9zyQa7dGjx26mu33T7b9hse63vvWtXSI5ugUpWqU2bO3likOL+I6p3LTCR40PaX53JAyz4jD53kaWHDdu3C5RgWybVKYi8PTt2zf2O9pqN4oUPjYi2qP4N1v4KBvGDBptvnDS5wrvDmUIH2Vk1giOWdMXDoIC6yaQ9nfdVlSP8FFRk91xFOFjfvchfMzPjpzVI6C5vqLT69K679BDD62rEV/+8peDF+J0ufPUZgsftbbRS0h6+S7q0hpTa017ff3rXzeDBw8O/kT4WFeXIHOFCbz22mtG8wvNSd1nEHrBz52nRs1ZET5W2PGYDgEIQAACEIAABCDQVAIIH5uKn8ohAAEIQAACEGgmgTTiE9e+ooSPbjkIH/0TPrpR5KKi1YX77ObNm023bjujYb700ktGmz72uuaaa3Y5mjiqz//qV78yOhpZlxstzhVlKaKkG1UjXI4becSNMKhjcL///e8HyRWN8Ktf/eouD9wliFOkPRt10kYtVLtc8WL4+LJw/YpsoGOY7XXttdd2POh3hY+usLPW9z+N8FECBbU1zfWd73zH9OvXzxTZLtUrbjpu2YoPJeJz/62+EN4sDAsf4yJx2naFbdYm6IgRI4wbaUX1ahO2luhHkWnUT3TpmHFFUNTl9jOVI3tqXWmFj+EIjW6Z7oZxrQiJOkZdR/7pckW5rhhQwtH3ve99NW3+7//+7+B4el3u97qI75jKTCt8dNtdy2B99yQg1uV+h/W3xNQSrao/t/NVpPCxEdEe5SsfhI+yowzxYz3CR9kUN3ZlFTJmTd/O3yFf216U8DEp2qPm45oD2YuIj/X1CISP9fEjd7UI/PjHPzaPP/54YPSnP/1pM3r06LoaoNMG7Etz7pqr2cLHWnN52+Crr76648Wqd7/73ebMM88MbiF8rKtLkLnCBFzho/sym0SNSVHQET5W2PGYDgEIQAACEIAABCDQVAIIH5uKn8ohAAEIQAACEGgmAYSP6aM4tlPER/eoMQnaFKEwy+UKtY488sggsl3S5R4VLUGbNrl0uaIsCZ0UpS/uciMuukdSh6MCWtGcLceN1HHYYYcFUT10hY9rjjoiO2yLFfzpc/eYNlf4aI/hTmJSlvCxyHbZNoSPerOfxx0X7QofkwSttiwJKCWq1WWP1FM0TkWvtFeSj1z/SAQrkWu4n6U5pjmt8FEbof37736cvepUVFVFV9WlY6FVb9T1zDPPdAgx3aijbn7ly9J2V0BZxHdM9ZcpfNR3WBEtrf8tJ0WY1Ua7ogwNHTo0c2S8pO+g7/eLEj7OWTbX/Mt9X21Ic30RPqqxRR97HRY+qg4rQnQ3fS1ojrtuSJerZCWNEj6qHluXjcar/3bp0sUMGTKkNHaKZNyKF8LHVvQqbYoj4EZjrzWPTUNQ87wPfehDHUklqrQv1TVb+Jhm3eYeoe2uVxE+pvE+aVqRAMLHVvQqbYIABCAAAQhAAAIQ8J0AwkffPYR9EIAABCAAAQiURqBRwsfw0aZEfNzpUt+Oug5HEHQ3ntJ2xFtvvdX88Y9/DJJfcskl5txzz03MumzZsuD4Z12KDihhmS5XlHXZZZeZM844I7asm2++2WjjSZcrfNTfrk06kvuKK67oKOeHP/yhkRBP10c/+lEzceLE4N9z5swJxFZ5Lxs50tpjy1H0vjQRudIIH/VdEqM0Igm1W2KGItvlsnHt1ednn322eec73xmJzxU+polWqELcDVbbF1y/ZvVTXD9zI7XElZlW+OiKMsNluUeESxwskXDUpWMEFSVRlyt8dCOzZm27e/x4Ud+xtMLHmTNnmuXLlyeafNRRR5mBAwd2pJNoVVEu77zzzsi8EtBK0CnhiY5Ra4erKOFjo465lk98Ej7Kni+f/I/m0IEjC+kuEj5uWrUx8gg/hI+FIG6bQtL8prsw4tInRXxE+Fhsl0L4WCxPSvObgCL1K2K/rilTpgRrr7zXggULzFe+8pUguzs/19/NFD6miQIvG92X6MaNG2c++clPBm1B+Ji3R5Cv6gRc4aPaYqM4pon46KYPc4iLBhnFK0vaqvPGfghAAAIQgAAEIAABCIgAwkf6AQQgAAEIQAACbUugKOFjVDnuZ67wMZzWx6Ouf/jQmt36xCeO72UmDN15nLMSzHxls4lKm6VD/eyy3aPB/dVvVph7FmzapZhTh+1potJmqStN2rDw8Uc/+pHp3r17mqwdadwjeC+44AJz4YUXJuZfvHix+dKXvhSkK0P4+MYbbxgdMW0v2y73c9WrKI0SB+qaO3eu0VFr1qYTTjghsR1uAgk+e/XqFXzkbgbWEsO5+V0h4cc+9jEzYcKETPXHJS6yXbYORWpRRJTZs2d3VHv44Yebz3zmM5EiNFf4mCbCogp1edijml3hoyI4Zjlmb8899zQXX3xxYG8W8Z/S+yZ8VOTDfffdN3X/GD58uJk8eXLmttcSF6cVPqY2MiahvrN33HGHmTp1asfx9G5SRW3VhnPWcateu5qRvyjh4zfvv9o8vfSZQpqgI6RrXV84aUdE3TSX7Kp1FWFzkUdelyV8FIM0YnmXVdb0afxBmsYRyCJ8rJUW4WPjfKaaED42lje1NZeAhE3/9E//1GHE97//fdOzZ89cRklAKSGlrvALUVUQPs6aNavjpb3x48cbHY+tK4vw0Y3krmO/tY5yLzeq5Ac+8AGjl5i4IOArgVdffdXo+UA48rnEiGleBiriuGuEj772DuyCAAQgAAEIQAACECiLAMLHsshSLgQgAAEIQAAC3hMoS/gYLjdO+KjPFdHLCs3KBDbsW6/vVnzU8dVl2pC27GYKH2Xjpz/9aWOPBP785z8fHCOb5XrkkUfMtddeG2QZOXJkRyTHWmW4RxbraG0dsa0riyCtlihLZf3nf/6nmTFjRlDuBz/4wUD45Yo0zz//fHPRRRd1mBk+EjpP9EtbmE/CxyLbZdsnIZo298JXXJROV/iYNprKv/zLv5iFCxcGVXzkIx8xkyZNMnmOVY/qh1n6mfL7IHx0I0YqsqYibOa5srTdB+GjbaN+PxYtWmQUEfP+++8P/m2vM8880yhyZ6tfRQkfr/j9VYWhKvr46DjDioxSWVTUxyjho+x3N3hdQWLao65tGVmchPAxCy3/0jZC+Gjn5rYu98hrjrrO1ycQPubjRq7qEvj6179uXnrppaAB5513XscLRVla5Eb9Vz4JHUeMGNFRhCt8vPrqq03//ru/NKjEa9euNZ/61Kc68mndp5ec3CuNeFBrYK2F7VWrTptGJx3oZSxd7skCrvDxb//2b80xxxwTi+bLX/6ykVhMF8LHLD2ItD4SUF/eunVrR6RHOxd2Iz+6dofnrQgfffQqNkEAAhCAAAQgAAEI+E4A4aPvHsI+CEAAAhCAAARKI5BF+FgrbZTQ0TUa4WM2FzZb+PiDH/wgOLJLVxpB1apVq4LIhvZ4Wfe4MpWRRjD485//3DzwwANBne6GUVGiLJXrHhmsqHCKAClh58qVK4N6Fd3RPVp38+bNgcDOXlGbUGHPbtmyxXTt2nU3h/skfCyyXWroK6+8Yv75n/+5o806jvyuu+7q+Fv3hg4dugsTV/ioG9/5zndMv379Yr8oGzZsMB//+Mc77ivCzLBhw4LNVm262kuCREXurHVpEyYsts7Sz1S2D8JHV2zqioXj2q6oG7rCx0BnaXuzhY/qB1H+Vdu04WyPulcaHWHf6uIvH4WP6mNlix+LFD3K3qKiPlrho8oMCxzdiDf2O4rwMdvcqJ1SFyF8TIr26Aofw/N0/U4ccMABpSG//vrrSyu7mQUjfGwmfepuBoFHH33UXHPNNZnWSq6dmpNr/fXyyy8HHx944IFGAkD3csWVejFOc96o65lnnjHf+973Om4VJXy0L1vV4vu1r32tow3vec97zGmnnRYkv/HGG81NN90U/Ptd73qXefvb3x5ZzPr16zuiRCoBwsdm9GbqLJKAD8JHtYeoj0V6lbIgAAEIQAACEIAABHwngPDRdw9hHwQgAAEIQAACpRFotPAxSiBJxMfd3dts4eNjjz1m/uu//qvDMEXai9sAt0dU77PPPsFmzkknnRQIq/7xH/+xIwKbjix773vfG/vg2T0iV5W6m1pFibJUrsRREjvaaJZXXHGF0dFqusaNGxccjxu+XEGm0nz0ox+NjVD64osvmm984xvBUYdi4R49XK/w8ZJLLjE6Oruoq6h2SUT5la98pcPXJ598cuDr//u//zN33313YK6OoJZQsVu3nUfFh4WPRx11VCAyDYvylF/iDUU3fOihh4LyBg0aFIgdJRrSvc9+9rMdPk3iJEHmb37zmyAizSmnnGJ69OgRlJmlnym9D8LHcIScz33uc0aC3qhLfV8b09pYVtsVccaKP7O0Pa3wMWrjOm/fle3qL4rOqvFG41FUtB+l+9CHPtRRjQTc1r/6cNOmTbtF/rGJa91TH3f7bt52lJHPV+Gj2lqW+LFo0aNsbZbwUXWnFT9mFfFmTV9G/6TM/AQaIXx0Iz26wkd9rt8HhI/Z/YfwMTszclSbgMYOzYk1T9OlF080nx81alRiw9atW2d0vLPNqwxRa043wrkrKnQr0BzwRz/6UceLe7qXJHzUukrrhvAVjvioNa6iTvbp0yeyTVOnTjU/+9nPOu65L3O5kendI7DDBbknEOhekvAxzvZE6CSAQIMIIHxsEGiqgQAEIAABCEAAAhCAgEOg02uvvb5948aNpnv37qZ79z29jArx5ptrzNat20yvXj1N165dvHOgHozKRl19+vT2kqE2s9av3xjw69Fjby9tXLduvdm0abPp0WOvYHPNx82K1avfDEQDffv28fKtuS1btho9PNMbffq++Mhww4aNpipjTu/ePRty/GzWQa1KY063bl3M3nsz5mT1sU3PmJOX3M58SWNOI4SPtg53U9VaqM8kYGmEqISjrtP3Jwl9FAnRCgS1iaXoG240RJWm6GsSvS1dujQoXBtDEv5pw9w97lr3FMVRm1Xh3+Y5c+aYb33rWx3GKYrHF7/4xY50RYmybAW33367+f3vf78bDB2NNnbs2N0+D0evHDNmTLChFz46TQ/33Qgl2vx+//vf31FeHuHjnXfeaX77298GZejIty984QsBW/0O6rsTJRJM6+Wi2vWHP/whEKTp0pHVEiSqv2iuI/GrjaZ51llnmUsvvbTDvLDwUTckkL3qqqt26yNuHUp35ZVXBqJFe7l9RJ9dcMEFwf/Dfc3dfFQ6+WTKlClBMVn6mdL7IHwM26G//+7v/s6oj7qX+sr//u//dkRU1b2vfvWrZv/998/c9lrCx9deey0QuNpL3+sBAwYEf0ZF2UzbVzUefeYznwnGG10SVWqMCP9uWOGxLfd//ud/gj6gcUz/VsRXsfnABz5gevbsGSTTGvEnP/mJmTFjhhk5cmRwz9qs75m4Pfjgg0Gd6i9lCoHS8nDT+Sx8lJ1Fix/LED1anr961//mccEueeIiPmqdr+9A7969dltb+SB83LbtLbNmjZ7ndDJ9+vTycg1tn+d069bV7L33Xl7auHbtOrN585bgedOee+4U+ufpWGUJH1evXmO2bdsaPM+xl51T2Pk6wsc8HtuRB+FjfnbkrC4BHTGtOb9dN6ol55xzThD1sG/fvrs1TM+Un3766eDlM7tOUKL3ve99wVogfLmiQK0xFF0xXK77wpXNHyV81OkCevlLl/tilsY/jX1aW4WFj0qr+Z/WYeGo4+H1zKRJk3Y5LSA8N9XLWkceeeQuTdQcVLa6V5TwMY3t1e1FWN5qBPRsRCdhuM8rtFeT9qhr8chz3PWqVW8GKO181reIj2KidYG49OzZw8v5rNbcGzduMnvt1T145uXj3tqOvXKtrXqbLl06e/f10e/cmjVrA3Za//nIsEprq549927InkGejqQxR/OHfv36eOlnjTlr164Pvif+jzl7BWtoH78vVp+z43kOY06e7wpjTh5qu+exY07fvr291OfoeZgi2fs8z1m/fkOwF6B1nd9jztZAb5dn76/T/fc/tP2pp54zo0cfbkaNOsxLkc9dd91nXn99iTn77FPNwIEDvBv8NSG+5ZY7g2/hBRe8PTaSRjFf7XylvPzyAvPIIzPN0KH7m2OPnRB5BGG+kovL9fDDj5sXX3zZHH/8MWbYsANydejirIku6brr/mQkRLrssosSjxEs25ao8pcuXW7uvXdqMCCceuoJXk6Kn376eaMxZ8yYI8yRRx7q9ZhzzjmnmQED+ns35ujH6U9/0pjTyVxwwVlejzkHHjjEHHPMUV6OOdOmPW5eeullc8IJx5gDD/R1zLnFaIPu8ssvDl5Q8O2yY442D0855XgvxxyNN08//ZwZO/ZIc8QRu485RQgf0xxzLd+FhY8239579wjG7bIvhI/ZCIePEFZubeZIHCjfKUrjU0891SFG0v2w6Oraa68NBJD2UqQ+RbvQEcXaVNKml8pxL1cspc+zCNJqibJsHTqWW5tO7qUHporOEbeYcI8pUz4JPMXikEMOMatXrw7aMGvWrI4iDz/88ICFe+R1HuFjOBKmFmWjR482OsrtE5/4RGx0v7Serrdds2fPNldffXVHdYqmecQRR3T8HRa1upt9UcJHZVQfmThxYrDBuGjRokCQpv/aSxENVY7rKz1Y/rd/+zcje+wlAa1EbjpiW4I89bW5c+d23D/vvPOCyIf2ytLPlMcX4aP635e+9KVdvoeKnik/6Ohwbbiqb1pxsmz/9Kc/HfSjPG2v9R0LH6Gu8mWLojTqv+94xzvSds3d0k2bNi0QKNpL38Fjjz02aKfGEvU1bQjbSxvn2kDXFRY7u9GCwmLYs88+27zzne8M8oW/f5MnTzYf/OAHc7ehjIxFCR+/ef/V5umlz5RhYmHixzJFj2p4mcLHu+9+wCxevMycf/7pwfcyfAy2Cz7uQX/WDYAs6SXYu+22u4MHp+edd6aXa6uXXppvHn10phk+/EAzadI4L9dWDz30qJGdJ5002QwdekDuB9FFiB7Vp6KOur7hhtuCl5YvvfSCjnULwsfihj6Ej8WxpKRqEdBcXZG23fmmWqA5vV4e6dWrVyBSUbTy6dOn7yKSVDodA62XpKJ+u8JrN62H9IKTIutLXKWTChYuXLgbsCjhY1iIaNdWWkP89V//dbB2iBI+qnCl1frv0EMPDV641/zaXXvo/r/+67/uEhlSaxQJNV371E6VoXpmzpwZrFHCV5TwsZbtinjursGq1XuwthUJRAkf9f22a3j9u9Z8WEyyCh/1jOqGG24NRD7vetf5wTNk34SPixcvNffdN80MGNDXnHjiFC+fIc+a9Yx59tkXzIQJo82hhx7i5b7Vbbf92SxZsizYh/ZRcCbRo2xUAJ5zzjndy7XV3LnzzGOPPWEOPni4mTBhjJdrqwcffMS8/PJCc/LJU8wBB+zv3ff5rbe2m+uuu8Von1JrKx/3rRYtWmLuv/9hM2BAP3Piicd5OuY8bZ59dnbQDw877BAvNRG33vpns3TpMnPhhW8PXiDM8qylEb/xWt/ffvs9Ri+Jnn32aV6POYccMtwcdZSfY84DDzxi5s9fGOzxDhmyn4djzltG+pwdY86FQSA/3y7p2B544GEzcOA+5m1vm+zlmDNzpvYiZ5uJE8cE85w8wsKyud96691GuoOLLjo7V7DBTg89NH37Cy/MMSNHHhQMrO4GZdnGpy1fHWXJkqXmpJOmBCIk3ybtiuhy110PBJvgZ5xxopeCuIULXzWzZj0VPBgZP3503W/gp/VdlnQzZjwZDKwTJ44NHpT7+IXTpF0/pOedd8YuR8dlaWeZaVesWGm04dCrVw9z3HHHeDn4P//8HPPCC3PNoYceFAysPo45mhDrgeVJJx1vBgzYx7sxRxHstIFozHZz+uknBW8h+nbZMWe//fY148b5OuY8YebPf8VMnDjODB06xMsxRwsLRaDRJqyip/h2LV++Y8xRdNQpU472cmGhMUf/1xzn0EMP3m3M8UH4qMhb2gwp+0orfJz5ymbzw4d2RLJ2r08c38tMGLprBJ+4tFna8rPL+u+WvNlHXVuD4sRpUe37q7/6K3PCCSfscktvUClimit+jGMj8aGiLg4fPnyXJFkEaWmEjypcx3hrk8xeEjpJ8BR3adPqd7/7XcfRzbX8q809HTkcjgiSR/io7+f3vve9IFJd+Kp1rHHa/ldPu/Tik44kt9Fd7BHX4brF7Y477gg+lo8VaVDfebdvKRro66+/bl544YWapkvM+PGPfzwoJ3zJDvnV3YCMK0y2Kmqk+7AqSz9Tub4IH2WLonf++7//+26byFHtl3BPAj73ytL2pO9YOPqmrUeRf+oRPu7YyLrB3HLLLYndW8LjD3/4wx2/K3/605/Mdddd15FPkUe16awrfDzhGWecYS677LLgnjaixdVe4Sg+iYY0IEEVhI/CUG/kx7JFj7KxTOHj1KmPmGXLVpqTTz7e7LNP313WVj5EfFTkmXvumRqMiaed9jYv11YLFrxiZs162hxwwH5m7NhRXj5AfeyxWWbBglfN0UePDzbn8j7PKVP4ePvtd5u1azcEGyJ2DY3wsbjBGuFjcSwpqXoEtDbQkc96aSntpfWS5vaau9W6br31VvPHP/6xZhrN7SQO1Mt7uqKEj/bzKBvtC3yu8FHrOh3bbaPbxxmgdujFLEXoD1/hF8GiytCLXXpZ6+677w5uRwkfa9kefvksLX/SQaAsAlb4qD1MO9etJXyUHeE5cR7ho4QfijQksZm+l77toUo48/DDM4wiNU2ePNHLZ8gSPc6e/WLw4vwhhxzk5emDCnqybNkKc9ppJwZrK99ESHqpTDZqLXDKKSd4ubZSkKAnn3w22A9ScJZGnMKUdbzRS2/aXzvmmIlmyJB9c6+tstabNr2eUUmcIhHSueeeHpz45tu1c8zpYyZPnuDlmPPMM8+bOXNeMkceeZg5+OARno45DwbPc04//UTTr5+/Y45OiNIzJx/3yu2YoyBBo0f7OeZMnz7TvPLKq+bYYyeZ/fcf7N2Yo+c20udozXXOOWcEJ6H4dknH9sgjM4LviQLQhU9L88HenWPO4YH438dTlu+550Ej3YG0L3le8Oi0bNmy7Tb0uo8CJHUE2acjfKXg9W3CLvv0I68Qpvri+TioykbZJo6KEFfvsUNlfTn19qv8LGV+3ofkZdlmy1UkF9noowDJ9fP27cZL0aNslMiAMae+nsqYUx8/m5sxp36OO39bjJcTuaQxJ4vo0f7eR1GrN+KjjhVtxBwsi/Dx4l8v362p179nQKTwMSptlt614PM7jpt1L1+Ej7JJ0dokdooTL2qj6sILLwwiV8T1D+WV+MiN3OemPfPMMwPhYZSg7b777jO/+MUvguQSJEmYFHclibJsPgnsvv3tb3cUo2iBUUexhevRJtlNN90URBeJuiTgO//88yPbkUf4qDoUUUSCr3vuuWeXKiU6lBCwiCtPu8KCRnvEddgeiV//+Z//uSMCjBhdccUVuwgf5dNLLrnE/PrXvw6OFQ5f2rRQGvWRWg9k9bumDUMJLd3j9mx5Kufcc8816m/huXaWfqbyfvzjH5vHH388KFrH+7mC3bS+1hF/1q9RR99ZuyV8/e53vxv86UYkdDkp8qNEgeF+YtMoguZFF10URF4MX1nanvQd0++ixITqH/ZoatWnuvXdqPeSYFmRSuPGEtUjkaXr3zfeeCOICKo84qCIl/b7LhslLtYmuaKNSnytF+V0ac7+wx/+MBBAamzSxnxR37l6Odj8RQkfGyEszCt+bIRt4lmm8FF9SePTnnsq8kztCDe1Nu6ybuqlTb9jbbXZKGoFz3PyfzuLWlsVIXyMKsP6Wc9z9JDcPdraTa/+qnm5xsuyruuvv76soptaLsLHpuKnck8IzJs3z2hu+dBDD8VapGOmJVQ8+uijU0VI0nilSJE6TSDqsi+xffOb3+yI8B4nfNTcT/NZ+2KWLc+KDV3ho+aG3/jGN4L1hY7TjrqOP/74YJ6raORx1/z5880111yzW0RMpdcLSYpSLnu05tP1xS9+MThZIHzF2a5juEeOHOlJD8AMCJjgeYn2PuKEj2IU3uOsV/ioMlWn5mP2JVTf9lGr9Qy5i5diAOtnzWe1bkm73mnk97IKa6sd+5Nbg7Wpj6JH+WvH2mpLYJ/Pe+Wy00fRoxhWYcwRP/2/c2fGnLzjVFXGHPlZYzZjTl5Pm+C53dat27wUPTLm5PdrOKfVBOad53RauXLl9uLMoSQIQAACEIAABCBQDQK+CB8HDhzYkONTED7W1y8lrNKRwStXrgwenui4TvnOioSSSld/e+WVV8yKFSuMhEh6GK0NIkXSCEdHTCqrmffVDnHQUW0SJeoBnNohgUCPHj1KM80eD6cHlBJtKWpikVej2+VGfDz99NPN5ZdfHjRHx9lpc1B9RG8G9u/fPzgaPcuxMWKlKIjqaxJeKq98pHL0BmwrX4pwoeP07PdU/UTi8iFDhjR0U0D9STboZADxlx+LujT+aDPNjiV9+vQJ2qfxKO6BuOxRBOkocbXs0ia3Ig9HbZysXbs2GKN8fNhelPBxzrK55l/u+2pRLootJ6v4sVGixzGDRpsvnPS5utvftVNXs2nVxqCcWpu3SVEem3Hcdd2Np4DCCJQhfIx6Scl+pnmFex/hY35XInzMz46crUdAc1KtlzSn15zQrv00J9fcNI9YRcK/JUuWBC+zaI6vsiT4y7OW1HpBtmmuqrWVPYEiSvgo76g+RZRUWzRmam6r+Weal+eUX/XpZB29VKj1juavenEqzxw5zvbW60W0qKoE9NxH/bTRwscoXr6JH6vqU+yGAAQgAAEIQAACEPCfAMJH/32EhRCAAAQgAAEIlEAgi/CxVtp6Iz4ifPQ74mMJXY8iIbBLxEdX+AgaCEAgPYGihI+q8Zv3X22eXvpM+spzpkwrfmyU6DcHMXIAACAASURBVFHNSGtTUpMRPiYR4n4aAs0UPmpOr/qJ+JjGU7unQfiYjxu5IOATgTjho082YgsEfCeA8NF3D2EfBCAAAQhAAAIQgEArEkD42IpepU0QgAAEIAABCCQSaITw0a0jvJFr7yF8RPiY2FlJ0HIE4iI+tlxDaRAESiRQpPCxUVEfhSNJaNhI0WMae9K6UMLHjW9sCKJYEfExLTXShQk0QvgYPtrazskRPtbXHxE+1seP3BDwgQDCRx+8gA1VJxAlfFSb3NMXyjjqOoobER+r3puwHwIQgAAEIAABCEAgLQGEj2lJkQ4CEIAABCAAgZYigPDRmOvfM8BMGNptF7/OfGWzeeyVTbv5+m+n9Nrts7i0WTpKVLl/9ZsV5p4Fu9pw6rA9zc8uK+642Cw2khYCRRNA+Fg0UcprRwJFCh/Fr1FRH1VXnPix0aJH2fKrd/1vId0H4WMhGNu+EISP1e0CCB+r6zssh4AlgPCRvgCB+gnECR87d+7c8XIQwsf6OVMCBCAAAQhAAAIQgAAEXAIIH+kPEIAABCAAAQi0JYEihI9RZbifVTHiow+dAeGjD17AhjIJIHwsky5ltwuBooWPjYz6KB+FxY/NED0mRZ/M0pcQPmahRdo4Aggfq9s3ED5W13dYDgFLAOEjfQEC9RNwhY8qzUZCzyJ8VL6oaI1ZIzhmTV9/6ykBAhCAAAQgAAEIQAACzSGA8LE53KkVAhCAAAQgAIEmE0D4GB3xscluCapH+OiDF7ChTAIIH8ukS9ntQqBo4aO4NVp8aIWHja7X9pGioj2qvFrCR/f46/Ax2El/W1vD6ZL6edb0SeVxvzEEGi183Lp1a0fDOOq6Ph8jfKyPH7kh4AMBhI8+eAEbqk4gjfDRnRurvVHzVoSPVe8J2A8BCEAAAhCAAAQg0EgCCB8bSZu6IAABCEAAAhDwhkAZwsdwmUR8zOduhI/5uJGrOgQWLFhg7r333sDg8ePHB//nggAEshEoQ/goCxotQhwzaLR5eukz2RpfQOovn/yP5tCBIwsoaUcRCB8LQ9nWBfkgfOzSpYsZOnRoaX64/vrrSyu7mQUjfGwmfeqGQDEENmzYYH77298GhQ0ePNicffbZxRRMKRBoIwIIH9vI2TQVAhCAAAQgAAEIQMAbAggfvXEFhkAAAhCAAAQg0EgCCB+J+NjI/kZdEIAABCBQLIGyhI+ystHix2LJJJdW5BHXtjaEj8ncSZFMoJHCR60Ftm3b1mGUjfhYtvBx6tSpZtmyZckwKpbi4osvrpjFmAsBCEAAAhAonkAe4aOsCEd9JOJj8b6hRAhAAAIQgAAEIACB1iWA8LF1fUvLIAABCEAAAhCoQcAH4aNsGDRokNEGa9nXsG+9vlsV179ngJkwtFvZVWcun4iPmZGRAQIQgEDbEShT+CiYrSp+LDrSo+14aYWPSu9u7HLUddt9dWs2uB2Ej7NnzzbPPfdcSzl+4MCB5oQTTmipNtEYCEAAAhCAQB4CCxcuDF6ssMJFO9ft3Llzxxw4fNR1eH6svxE+5qFPHghAAAIQgAAEIACBdiWA8LFdPU+7IQABCEAAAm1OAOFjtTrAqcP2ND+7rH+1jMZaCEAAAhAojUDZwkdreKsIIHWk9iVHXlzo8daucxE+ltbV26rgsoWPmv/bNUCzIj7Koa123DXHXLfV15TGQgACEIBADQISPm7dutVI6KgL4SPdBQIQgAAEIAABCEAAAuUTQPhYPmNqgAAEIAABCEDAQwIIHz10Sg2TED5Wy19YCwEIQKBsAo0SPqodEj/q+s2zvyu7WYWXX7bg0RqM8LFw17Vlge0ifFy+fLl54YUXWuLIa0SPbflVpdEQgAAEIBBDIE74qAiObhTHcERHjrqmS0EAAhCAAAQgAAEIQCA/AYSP+dmREwIQgAAEIACBChNA+Fgt5yF8rJa/sBYCEIBA2QQaKXx02zJn2Vwze9nsjo+eXebfkbWjBh5pDht4WGnRHaN8i/Cx7B7fHuU3S/ho1wWqv0uXLmbo0KENAW4FkA2prMBKdLS1rsMOO6zAUikKAhCAAAQgUH0CCB+r70NaAAEIQAACEIAABCBQPQIIH6vnMyyGAAQgAAEIQKAAAggfC4DYwCL+4W29zN9O6dXAGqkKAhCAAAR8JtAs4aPPTJppG8LHZtJvnbrbTfjYOp6jJRCAAAQgAAEIiADCR/oBBCAAAQhAAAIQgAAEGk8A4WPjmVMjBCAAAQhAAAIeEGg34eM109aYrz+4xgPy+UxY8Pn982UkFwQgAAEItCQBhI9+uRXho1/+qKo1CB+r6jnshgAEIAABCEBABMoUPqr88BHZtahnSYv3IAABCEAAAhCAAAQgUGUCCB+r7D1shwAEIAABCEAgN4F2Ez4K1MxXNpvHXtmUm1kzMh49dE8zYWi3ZlRNnRCAAAQg4DEBhI9+OQfho1/+qKo1CB+r6jnshgAEIAABCEBABBA+0g8gAAEIQAACEIAABCDQeAIIHxvPnBohAAEIQAACEPCAQDsKHz3AjgkQgAAEIACBQgggfCwEY2GFWOGjCgxHl+nUqZPR/+0V92/dd++5xsV9HteArOkLA0FBdRFA+FgXPjJDAAIQgAAEINBkAggfm+wAqocABCAAAQhAAAIQaEsCCB/b0u00GgIQgAAEIAABhI/0AQhAAAIQgEB1CSB89Mt3CB/98kdVrUH4WFXPYTcEIAABCEAAAiKA8JF+AAEIQAACEIAABCAAgcYTQPjYeObUCAEIQAACEICABwQQPnrgBEyAAAQgAAEI5CSA8DEnuJKyIXwsCWybFYvwsc0cTnMhAAEIQAACLUYA4WOLOZTmQAACEIAABCAAAQhUggDCx0q4CSMhAAEIQAACECiaAMLHoolSHgQgAAEIQKBxBBA+No51mpoQPqahRJokAggfkwhxHwIQgAAEIAABnwkgfPTZO9gGAQhAAAIQgAAEINCqBBA+tqpnaRcEIAABCEAAAjUJIHykg0AAAhCAAASqSwDho1++Q/jolz+qag3Cx6p6DrshAAEIQAACEBABhI/0AwhAAAIQgAAEIAABCDSeAMLHxjOnRghAAAIQgAAEPCCA8NEDJ2ACBCAAAQhAICcBhI85wZWUDeFjSWDbrFiEj23mcJoLAQhAAAIQaDECCB9bzKE0BwIQgAAEIAABCECgEgQQPlbCTRgJAQhAAAIQgEDRBBA+Fk2U8iAAAQhAAAKNI4DwsXGs09SE8DENJdIkEUD4mESI+xCAAAQgAAEI+EwA4aPP3sE2CEAAAhCAAAQgAIFWJYDwsVU9S7sgAAEIQAACEKhJAOEjHQQCEIAABCBQXQIIH/3ynRU+durUyej/7hX+zL0flTaqZeF0Sa3Pmj6pPO43hgDCx8ZwphYIQAACEIAABMohgPCxHK6UCgEIQAACEIAABCAAgVoEED7SPyAAAQhAAAIQaEsCCB/b0u00GgIQgAAEWoQAwke/HInw0S9/VNUahI9V9Rx2QwACEIAABCAgAggf6QcQgAAEIAABCEAAAhBoPAGEj41nTo0QgAAEIAABCHhAAOGjB07ABAhAAAIQgEBOAggfc4IrKRvCx5LAtlmxCB/bzOE0FwIQgAAEINBiBMoUPu6xxx6ZaGVNn6lwEkMAAhCAAAQgAAEIQMAjAggfPXIGpkAAAhCAAAQg0DgCCB8bx5qaIAABCEAAAkUTQPhYNNH6ykP4WB8/cu8ggPCRngABCEAAAhCAQJUJIHyssvewHQIQgAAEIAABCECgqgQQPlbVc9gNAQhAAAIQgEBdBHwRPg4cONB07dq1rraQGQIQgAAEINBuBBA++uVxhI9++aOq1jRL+CheWhuo/s6dO5sDDzywqgixGwIQgAAEIACBJhJYsGBBMJ+w0RY7deoUWKO/3c/s59bU8N9R0RqzRnDMmr6J2KgaAhCAAAQgAAEIQAACdRFA+FgXPjJDAAIQgAAEIFBVAr4IH/fZZx/TvXv3qmLEbghAAAIQgEBTCCB8bAr22Eq7ma5mw6oNRpu2URu57mdx/1bh4by2wrjP4wzKmt4vmu1rTdnCR5G1dWgtsG3btg7YVvio/x500EHt6wRaDgEIQAACEIBAbgLz5s0L5rNh4aNerLDz0/B8OWreivAxtwvICAEIQAACEIAABCDQhgQQPrah02kyBCAAAQhAAAI7orqkveLShj+v9Xd4I9em7dWrl+nZs2daU0gHAQhAAAIQgIAxBuGjX92g67YuZuOajQgf/XJL5axppPBRcLZu3drByBU+KuJjly5dKscPgyEAAQhAAAIQaB4BzSt01HW9wse4SI1ZIzhmTd88ctQMAQhAAAIQgAAEIACB+gggfKyPH7khAAEIQAACEKgoAV+Ej4r22K9fv4pSxGwIQAACEIBAcwggfGwO97ha99jUyWzZsCVS+OhuuiYd60fER7/82mhrfBE+Dh482PTo0aPRzac+CEAAAhCAAAQqTGDdunVmyZIlicLHsCAxzTHXwpJVyJg1fYXRYzoEIAABCEAAAhCAQJsTQPjY5h2A5kMAAhCAAATalUARwkexc8vJEvHR5tUDzn333bdd3UC7IQABCEAAArkIIHzMha2UTF06dTGbVm00Znv0hizCx1Kwt2ShjRY+6qhrO3+3/9VnvXv3NgMGDGhJxjQKAhCAAAQgAIFyCCxbtsysWbPG6Fhre1lRo3vUNcLHcvhTKgQgAAEIQAACEIBA+xJA+Ni+vqflEIAABCAAgbYmkEX4KFB5jrt280Rt5Oq+/r/PPvsYRX7kggAEIAABCEAgHQGEj+k4NSJVN9PVbFi1IagqKrIMwsdGeKE16vBF+Kg+O2zYsCBiExcEIAABCEAAAhBIIqBne/Pnzw+e8VnhozuPQPiYRJD7EIAABCAAAQhAAAIQyE8A4WN+duSEAAQgAAEIQKDCBHwSPkr0KPEjFwQgAAEIQAAC6QggfEzHqexUnUwnYzZuN1s3bg2qQvhYNvHWLt8H4aNs0Dph0KBBpmfPnq0NnNZBAAIQgAAEIFAIAUV6XLp0afDSRJTwsUuXLh31EPGxEOQUAgEIQAACEIAABCAAgQ4CCB/pDBCAAAQgAAEItC2BLOLHPBEfBbZW1Ecb8VH/HThwoOnWrVvb+oKGQwACEIAABLIQQPiYhVZ5abt16mbWr1wXbPLa/7u1hT8LR9BL+tuWlSXyXpa05ZGh5DwEihA+qt5wOe583M6/7TxdR1u7c3YrfOzatasZOnRonmaQBwIQgAAEIACBNiOwcOFCs2XLlkjho4SOrtgR4WObdQ6aCwEIQAACEIAABCBQOgGEj6UjpgIIQAACEIAABHwl4IvwUXz23HNP079/f19RYRcEIAABCEDAKwIIH5vvjs6dOpu31m8zWzftiPZYr/CxlmAxi5gxS9rmU8QCl4BPwkf7YlLv3r1xEgQgAAEIQAACEIglsHr1arN8+fJgLqwrHPER4SOdBwIQgAAEIAABCEAAAuUSQPhYLl9KhwAEIAABCEDAYwI+CB+Fx0ae6dOnD0fqedxfMA0CEIAABPwhgPCx+b7ouq2L2fDmho5N3qzCx7TRHtXSLGLGLGmbTxELXAJZhI/KF5c+T8RHOye3ER81P9e/hw8fbhT9kQsCEIAABCAAAQiECWzevNnMnz8/EDvaubCN6OgKIe2/4+bLbrnhiJD2XtzncV7Jmh7vQgACEIAABCAAAQhAoKoEED5W1XPYDQEIQAACEIBA3QR8Ez6qQQMGDAiiP3JBAAIQgAAEIBBPAOFjc3tHN9PVrFu13nTavlOUGLWRW+soP4SPzfWhj7U3Qviodrv16KhruyawYkf7UpL+2717dzNkyJBM4lsf2WITBCAAAQhAAALFEtA84dVXXzWbNm3qED1qfluG8DGPiDFPnmIJURoEIAABCEAAAhCAAAQaQ6DT7Nlzty9bttwMGNDfDBiwT8ekvDHVp6tl/vyFZu3adeagg4abvfbq7t3Dxi1btpiXXpofNGbkyIM6Qtmna11jUq1e/aZ5/fXFplevnmb//ff10s+vvbbYrFq1yhxwwP6md+9e3vlZnpoz5yWzadNmc8QRI02XLl0a47wMtaxfv8EsWPCq6d69mznwwAO87ItLly43y5evYMzJ4Ndw0p1jTiczcuQIL/28atWbZtEijTm9zP77D/Z0zFlkVq1a7fWYM3v2S0ZvDvs65qxbt94sXPhapcecIoSP+o665YTLdP+O2sydN2+BWb9+vRkx4sBA8KgHkwMHDiSyTB3jJFkhAAEIQKD1CSB8bJ6Pu3Xqata9sc6Yt3Y93jpqc1XrU61TDz54x/Mc9/JB+Ki5tuZisuWQQ3xdW60O1la9e/cx++03yMu11auvLjI65nHo0P2DNWDeqJtlCh/nzp1nNm/eYg477OBdGIaFj+qj+kyX7NH/ddz1vvvu27wvHTVDAAIQgAAEIOAdgcWLF5s333wzmFfYF4DCx1zLaBsNUv8OvygUNWeKmlPXEjHquePcuS+ZrVu3mUMPPTjYt/JN9Kj93VdeeS1YD2jfyjf75JulS5cF+1aDBg0y++zT10sb7TPkQw45yHTv7t9L89o7ffnlBQE7rf/s98GnL+8bb6wyixcvMTp1ad99/VxbvfLK68HYMnTokGBPP+/aqizuGnO0V659ysMP93Ov3I45e+/d3RxwwBAv++KSJUvNihUrKzHmSPviY6AOvXjw8ssLvR5zVq5cZZYsWWL69u1rBg8e6OVvS1XGnK1bt5rDDjvES33OzjFnr0Bz4OPvnx1zBg/WPKefd78t+s2y85y8Y06ne+99cPsTTzxjxow5Ivi/j8e33H77Pea11xaZc889wwwaNMC7QUEP8W+88fZA9HDxxefs9jC/rMlFlnLVUR566FFz4IFDzOTJk8yee3bLkr0haadOnW70IPptbzvWjBgxzMtB4Xe/u9FIRPqe97zD9Oixd0O4ZKlEi7O77nrA9OnT25xxxoleTkSeeOJZ8+STz5ixY480o0cf7uWYc9ttO8ac888/wwwc6N+YI7HZTTfdEXSNiy4628sxR2LwadMeM8OGHWAmT55ounXzc8yZM2eeOfHEyYHgzMeJiMYciUivvPIdZu+9fR1z7jd9+/Yxp5/u65jzjNG4M378KDNqlMacXUXrZQgf9d2ME0JGbeZqnvP660vMOeecZvr33zHh1AOi/v37ezmOZ/ldIi0EIAABCECgLAIIH8siW7tciR7Xr1pvtm/bHiR0I9pEbcjccce9ZtGipeaCC84KXrZ106QVPmbd6MmSXg8nb775DrPHHp3NhReeFUT38+3Sc5KHH37cHHTQMHPMMUd5ubZ64IGHzYsvzjcnnzwlWAPmXVuVKXz8wx9uNm++udZcdtlFu2wURwkfw8dda27fs2dPs99++3n5cNi3Pos9EIAABCAAgVYmoHmCRI9r164NmumKHaOEj24AjVrR0N25dZhfkvDx97+/2axbty6Y5+y1117e7aHquec99zwYrAdOOeV4L593zpz5lHnqqefMxIljzRFHHOqlsOKWW+40ixcvDfaE+vffdW3lw3fuzTfXmFtuuct07drZnH++n2srBZqYPn1G8NLbpEnjvFxb3XffNDNv3nxz6qknBOLHvGursvrEW29tN9q30j7l5ZdfbPbee6+yqspdrgJB3XPPVDNwYP9gjeqjaG/GjCfN008/byZOHOdt4JObb77TSCwl7YuPQik75nTr1sWcd96ZXj7PmT37RTN9+kzPx5yHAsHZaaed4KVQWPOu3/3upiB4zGWX+TnmKLjbvfdONYMG9TcnneTrmPOEefrpF4LfPl9F69K+SOt08cXnBi+hZHm+qx+MTk8//dz2V199PXhrfMiQ/bz7AZWRzz8/x7zxxmozatRhXkYCVISAZ555PngLfOzYUV5OlJYvX2lefnm+6dOnrznooAO9nLRrUFVn1sN8RSD18a0viYQ1mdOg4ONEac2atUY/orLt8MMP8VJUKEGhokHst99gM2TIvl6POaNHH1ZXtIrcs/KEjHpz7tlnNeZsDwSkPooK9Xak3u7TWywSFfoYIXXnmDPc24jHs2Y9HYw5Rx893usxR2+Y6k0bH1+esGOOIo8q4nH4QUEjhI/6SteK+qh5jqKPHnnkYR2iepu+X79+wSYrFwQgAAEIQAACuxJA+NjYHqGHTZ23dTYbVq8329/avstxfnGbtMrzwgsvBvMcvWjbs2cP74SPihDw7LMvmO3bTWCjj2srndKik1A0Lxw+fKiXayu9+KbTHbSBqBd58j7PKVP4qOc5GzZsMBMmjA387B5vbSM8unN2K4h0j7/WsxZFfvRRINvYEYHaIAABCEAAAu1JYOPGjYHoUf+1kR7D/w02Xjt1CgDpnjsvKkv4+OSTz5qNGzeZCRPGBPOcvHOxsryq6HWKECeB1MiRB3v5DFkRKSWWGjJk/2C/3DexmXyjdcvq1Wsi11Zl+S5LueqDzz33gr4BQdATH9dWWrNobSXhqF7Y8nHf6sUXXzbLlq0ITpj0Mfqo1kfat5K/JRT2ca9cAYwUCVfBRMTRx30rO+YoOpyij/o55jxvVq9eG+xDKxhUVhFSlvEjT9pqjDnLzPz5rwS6FwUnY8zJ7mk75uj5nZ7nMOZkZ6gcOr1Rp8koCu6++w70csyR3k7znHHjRuUaczotX758u0Kgd+68h5cNlCP0sFE2duvW1btBVfbpCycb9YDWx4mcbJRtslE/Sj4OqtbPstHHUPx2CFEYXdno46Bq+6K+K8Zs93IiZ/siY06+HyU3l/oiY059HPVdZsypj6F+/9QXdfm4eEwz5mQRPtpxNopareOtw/nCG7o75jlbg98//U7bzVWbT29qS0TsK+P6ehG5IQABCEAAAvkIIHzMxy1Prq6dupotGzabjWt2bPDqssf0uZu64bLtvEbrP0XdTtrojXuInvXhepb0PM/J0yN2z1Pk2iqL+DEubdTnOorNPs+R3935u13TuPN2NxJkWASpIxAVnT1LXyuGNKVAAAIQgAAEINAMAppbrFixwixbtmyXo62jjrm2c2X91z3mWn8nzYfjBItJQsbwXkFS+kYzrM4z5K2Bz3wUIMln9hmy9qF9nIeytirmm1Xk2qoYi3YvpRp75VXYt6rCmIM+p57vEfqceujtzMuYUz9H9UVxbOV5TqeVK1fuOJ+ICwIQgAAEIAABCLQZgWYIH8MbrULufmb/bW2zG7c9evQIoj8SYabNOinNhQAEIACBSAIIH8vtGJ1MJ9PFdDFbN20xG97cEFRWS+wYt4kbd7R11EZdM4SP5VKk9DwEihA+qt5wOeF5v3tf/7Z/23RuFMjw0df6Ww+LBwwYELyg5OsLxnn4kwcCEIAABCAAgZ0E9OLE6tWrA8Gjfv9tBEc7b611zLUrirQlliV8DPvMN+EjfQoCEIAABCAAAQhAAAJlEkD4WCZdyoYABCAAAQhAwGsCRQkf1Ui3rFoRIKOEj8of3ny1ZYZFkXqoqmMa9HavokDaSJA2qpLXwDEOAhCAAAQgUBCBNVvWmM3btxRUWnsXI5Gj/reH6WS2bzNm29ZtZvP6TWbr5q0dkUTCokf7t8i5/7Ykw5+FRY1Jf7seyRLNJEva9va6v63PInwMz6HdVmURPtroNOE5fdRx13Zubu+pnl69egUvKClSu+boCCH97V9YBgEIQAACEKhFQJFwNm/ebDZs2GDWrl1r1qxZE8x1JSS0Isfw0db2vp2Hxh1znTQ/ll15Iz6G24TwkX4OAQhAAAIQgAAEINBOBBA+tpO3aSsEIAABCEAAArsRyCJ+rJU2SexYayPWbrLaMuKiPrrpakWJTBJhRrUjS9vSdKMsXNOURxoIQAACEIBAqxDIK86rlS9KSJgkLnQ3X+OOsHY3cOPEjlEbq0kbu0m2WV9nZZU1fav0qVZqR7OFj3a+rf+6kSDDUR8117X37bzcplFee3y2+1l4jh+e21s/xr0o5fo571w7b75W6mO0BQIQgAAEmkugnvlaUl53Xhqex6rVUS/z2DxuNEdXzGj/be+rHDeao/u5rcOmce3JG+3RlpXWa4ge05IiHQQgAAEIQAACEIBAqxBA+NgqnqQdEIAABCAAAQjkIpBl8y+LOLCWEDJuQzdt1Ec1NEn4mNbWWiJJF2gWTrkcQSYIQAACEIAABGIJpBE9ummS0kdFpLGVuyJIfRa1QRy1kewaj/CRzpyXQKOEj2FxoRUq2nm2/a8reLRiR92zokdX2BiV1j1COzx/D9dlmdl5dxKLtPP4vL4gHwQgAAEIQKDZBNLOb2WnFSOG56R2Phv+ryt41D1FdHSjNdr5bC1BZDifW4dbnrUvyrbwHDyKeRYxY5a0zfYv9UMAAhCAAAQgAAEIQKAIAggfi6BIGRCAAAQgAAEIVJZAFkFfWjGhYCRFgIyLuhiO+mjLcj93P3PLcTdHw5u5aduZlC7pfmU7AoZDAAIQgAAEPCOQFNFG5qZJ46YLCx6jojiG0yh/nBgyyoaw6DGcJsrmuHakbZ91Xdb0nrkcc/4iKMwCIs0LRba88Dw2/NKRK1J08+hYazv/DosbVaa9795TejdipBsVMs6eqPVD0tw7TXTILDxJCwEIQAACEPCNQNTc0rUx7n5chHE3WqOdp9pjrN0Xftyoj66o0k3r/tud80bVEWePbQvHXPvW87AHAhCAAAQgAAEIQKAqBBA+VsVT2AkBCEAAAhCAQCkEkjYTw5XGpa8ldFQZaTYy40SPyh8WNVq7wlFkXHvj8tg0Wdvull1P3lIcSaEQgAAEIACBFiBQhHCv1qZqVCTGuOiN4c+j0sXVVSs6T9Kmr+vGLDyypG2BrtKyTUiKchhueK304Xu1hI9hYWJ4Xu5GdqwlhAwfbR0nrrTrg6Q1RJZ5OyLIlv1a0DAIQAACbUcgSexogaR5eSZq/msFi7acsPAxfJR1OBKk8tmIzhZbCQAAIABJREFUjuF5bziSpNImHXMdlcbaljWCY9b0bde5aDAEIAABCEAAAhCAQMsRQPjYci6lQRCAAAQgAAEIZCWQRcSXVvgoG4qI+uiW425mumVHfR6OEGmZRNmfpU1Z2ZIeAhCAAAQgAIHiCNQS96WJpJgkfLSWRkWCjLvnti4qWmSS0DHNhnUagggf01CqRpos4sd6hI9hoWA4QqM7D4+K3mjn0O69uDJsXeE6o8SK4Xm+67Us65Y4bxdRRjV6ElZCAAIQgIBvBIqar6V5+UZtj5v7xs117bHWyuuKH91IkFH3bF3he5Y/wkffeiL2QAACEIAABCAAAQi0EgGEj63kTdoCAQhAAAIQgEAuAlk2/2qlTYrYErWJGZen1qZo1D234XECSZsmi/ixVp5csMkEAQhAAAIQgEBmAkmbxGlFj6o4SZxYpujRrd9CQPiYuTu0fIYswkfBKOq4azfqY9RLRvosHG09LGh0y5BtcS8oxa0L4kSPSeuMLJ0iy9onS7mkhQAEIAABCKQhkDSvjSsjab4bnuNGRY20AkS3LPczfe5GbHTLdMWP7tHXbppwWXbum/QiUK0ojVkjOGZNn8ZnpIEABCAAAQhAAAIQgIDPBBA++uwdbIMABCAAAQhAoCEEsmz+FSV8VMPSRGUMH1cdzhMnYqwVKcatOww4iUXS/YY4jEogAAEIQAACbUIga4RHiyVpczVqIzhqs1jlhTd8o+pw64v7t1uW6z6Ej23SmTM0s9nCR5nqHllt5852Hqyjrt05uSt+dNO6IsmoY7ejxIxpXlAqUgSZwS0khQAEIAABCDSFQJp5bdiwuEiP4TlonOhR6cLiRiuKjJsbu8dnu+LDeqI9WjvSgkf0mJYU6SAAAQhAAAIQgAAEWokAwsdW8iZtgQAEIAABCEAgF4GsYr649Fk3KmuJE917YfGjGpkm6qMLI41AMgwvC5csaXM5iUwQgAAEIACBFiaQJfJNklAwbTQcizMsVIwSLkZtHkfl12dZhI95hZ1RXSELwxbuSi3RtKKEj4IRJTgMz5Hj5t0SONorHLlRf0eJI6MiQlo70ggWwyLKuPl5mrl3mjQt0WFoBAQgAAEIVJ5AmnlcrTmmBRAV0TFuzmqFim4kRzuXdee+9t9xUR6VJ0okGbYpzg6bP8qJWYWMWdNXvuPQAAhAAAIQgAAEIAABCOh59MqVK7dDAgIQgAAEIAABCLQzgaybgkVFfay1+RkWRcZFh4wSRYZ9GZfXpktqf9L9du47tB0CvhP4j//4D/Pzn//cfPaznzWXXXaZ7+ZiHwQgUINA0oZwLcFjklhS1caJG2tFh7T5rNlJG9JJEXuiyknbKZL4pC2HdH4QKEr8mCR8VGtrRVhPuhcWRKo8O3euFTXSTRNHvJ6XrfzwIlZAoLUI/PKXvzQ//elPzd/8zd+YK664orUaR2sg4CmBtHPH8JzUbU6UGDI8v7XCRTeaoy1D98JluPld0aPyuOLDpHl0OH3YDVmFjFnTe+p2zIIABCAAAQhAAAIQgEAmAggfM+EiMQQgAAEIQAACrUggq7CvHuGj+MVFekwjhAxHegyXl2aTOMsmatjfWVm1Yn+hTRCoEoFJkyaZN954w0ycONH87ne/q5Lp2AqBtieQRshXT8TEWsfupY3wWGszOs1GdRpBZtqOkIZX2rJI13wCaea0rpVx6aM+j5pz14q27kZxtHNv98UiG/0xPC+3f4fzpzn22rYtzdw7y9qk+Z7FAghUl8DFF19sFi5caEaMGGH+8Ic/VLchWA4BzwgkzeGS7qs5cWmi5rtR0RyjXt7RZ+GjsN263EiRrmDSLYtjrj3rbJgDAQhAAAIQgAAEINCSBBA+tqRbaRQEIAABCEAAAlkIpNlQDJeXRTyYdKxdXiFkXD7ZmlSnbU+WdqRhmodlmnJJAwEI5CPw//7fz83NN99krrrq/ebCC8/PVwi5IACB0gik2cgNV55HLJgkQgxHt4mL3FiP0DHJBredRXIpzXkUXCqBooSPMrLeqI9uGbUiPFoBZJKwMimSe9x8OiuTUh1E4RBoQwK33HKL+f3vf28uvfRSc+6557YhAZoMAb8IREU3jBMgyvK4F3vC895a0R3D5UdFjrSU0kR7VNq4KI1ZozdmTe+XN7EGAhCAAAQgAAEIQAAC+QkgfMzPjpwQgAAEIAABCLQQgayCvSyRVZJEiGkFjHHlxG2eRtmYtZ1ycZ48LdQ1aAoEKk1g2rTHO+yfMmVSpduC8RBodwJFiQGjBIhFCx3TiBzzCDjj+kAeNu3en3xvf1aRX630RUR9DAsewxEf3TlzkjjSsnfTuf6I+zxrGt99jH0QgAAEIACBLASihITh/LXEhuH5btTfcS8D1RI8po32KFvDc9ZaYsUsQsYsabMwJy0EIAABCEAAAhCAAASqQADhYxW8hI0QgAAEIAABCJROIKu4L4vwUcaXLX60gML1pNnozQs3K7O89ZAPAhDIT+Dhh2d0ZD7uuIn5CyInBCDQEAJFCfiiykkTdSZOAKnGp70XThv1d9xntT6v5YCiuDXEyVSSikBW4aMKree463D+sPjQ/h0nagy/iBT1YlKtl5VqRWFPO+dOI5hMBZ9EEIAABCAAgSYSCIsPk0yJE0RGlZPms1riR7euMqM9qs1ZxIxZ0ibx5D4EIAABCEAAAhCAAASqRgDhY9U8hr0QgAAEIAABCJRGIO2mojUgi/gxi/BR5aeNAhlOm+bvNPZHlVMaeAqGAARKI/DwwzsjPh53HBEfSwNNwRBoIIFaIr8sURRrRWVMithYjwhSqPK0IQ4xoscGdr4GVlWk8FFmp3kZKE7s6M6L7Rzdllcr8mNUvrg5dpwoMjxvz7peSeOyPKzTlEsaCEAAAhCAgAiUIcpLEkfWig4ZNY/NEunRzmVrRZCsdQy37RVJXJLuu70rS1p6JQQgAAEIQAACEIAABFqNAMLHVvMo7YEABCAAAQhAIDeBrBuJWYSPMqoe8WM4f9ayoup3QaVte9p0uZ1ARghAoFACCB8LxUlhEGgogSyCvqIEj2pgLVFj+H6e9FFlWLBZ2lxPnoY6kspyEcgjxqv3uGsZGi7D/TsqqmP4ftTLS7UiPUbBSRJBJq0LcgEnEwQgAAEIQMBTAkkix/CcMM28OIv40YoKkyJBunYkvUSktEUdc51UlqduxSwIQAACEIAABCAAAQgURgDhY2EoKQgCEIAABCAAgaoTyCPqyyJ+jEpbS8CYZE94YzZN+dZHSWUn3c/i6yLLylIvaSEAAWM46ppeAAF/COQR9cVZn1RWmg1flR1OlxQtJmkTN+l+VJ1uG5PaFcUjTx5/egWW1CLQDPFjraiPsjXqvtuGbdu2RUZuzxLN3S0vjQgyjmHSHDzpPr0TAhCAAAQgUBSBNPO1NGnC9qQRRcbNT6Py6jPNh7PkCc+fw39HtStpzp10P8wha/qi/Eo5EIAABCAAAQhAAAIQ8IEAwkcfvIANEIAABCAAAQh4QyDrBmAW4aMamTVSY5I9taLKuFDjykkq35aRNp03jsQQCEAgIPDIIzM6SEyePBEqEIBARQmk2QiulSbqXvgIwKQ6kkSNcXWEkWe1s5bLkmyuqLsx+y8EihY+qtg0R15HvVwUnnOnETIqTdSR2FEOrlVeUno6DAQgAAEIQKAdCKSd90VFc3T5hO/HRXS0eeIiPep+eD7d6COuZQOix3bo/bQRAhCAAAQgAAEIQKDmM+KVK1duBxEEIAABCEAAAhCAwA4CeQR+WUSFaaIyJokjo3yVJk9S25LuJ/WRevMnlc99CEAgOwGOus7OjBwQaDSBtJu4cXblERImiRij6qq1idwM0aNsrJddo31NfdkI5BE+qoYyjrxOEwnSbV1UpMY8LyxlWWdko0tqCEAAAhCAQLUIpJ3z1prnRs1na0WNzCJ6DIsg4+aqRR5xrToQPlarH2MtBCAAAQhAAAIQgEDxBIj4WDxTSoQABCAAAQhAoMIE8oj36o36KFxJwsU0dqURVVrX5C2vwq7FdAi0JYFp0x7raPeUKUe3JQMaDYFWI5BG7BeXJq1AMcwsS+Qc5c1TT5p21bKr1fxMe3YQaJb4MSx0jLIlSgwZ9ltaAWTUeiDNGiFNP0kz709TDmkgAAEIQAACZRCodw6YZt5bS/CYNHeNEkZGiRybEe1RtiN8LKNXUiYEIAABCEAAAhCAQJUIIHyskrewFQIQgAAEIACB0gnk3Rj0WfwYtWnqgszT5jx5SnceFUAAArsRQPhIp4BANQnUuwEcbnUeIaItIyk6ZNqy00bpSeuxPIzSlk06fwg0S/ho58/hOW+aY7DD9OKOsU5zXHatsuqdz/vjZSyBAAQgAAEIRBNIO8+MmremifyYVH6RokfZWHS0x6Qy6VcQgAAEIAABCEAAAhBoBwIIH9vBy7QRAhCAAAQgAIFMBPKI+ooQPsrIIiI/RpVjAaRpW5o0mYCSGAIQaBqBadMe7ah7ypRjmmYHFUMAAsUTSBL+pYl+U8uqpI1g5U2TJi6dW3dSW6LszJOneC9QYtkE8gofZVcRR14nHXHtzrtrCRnD8/Ososc08/M0acr2F+VDAAIQgAAEiiCQdp6X9IJOeB5a61jrNGnzRnpU2UmRGZPux3HNm68IP1EGBCAAAQhAAAIQgAAEfCCA8NEHL2ADBCAAAQhAAAJeEci7aViE+DHtcdVpbcxqUy1HpK3TK2diDATanAARH9u8A9D8liCQduPXNrZewaPKybqJXE/dWduXVFdLOJ1G7EIgr/gxq/BRlUbNd6PKiYr8GM4fN3cuSvTI3JwvCgQgAAEItBOBtC/chOeyUcdch7lFzX3TfJbmeGtbF9Ee26m30lYIQAACEIAABCAAgUYSQPjYSNrUBQEIQAACEIBAJQjk3UTMIzKsR+iY1c6k9En363FemWXXYxd5IdDqBBA+trqHaV9VCOQV96VpX1LZSffTbPwmpbH3s2xI18pTRLvTlEGa6hDwUfwYFQ1SRJMiuFvqadKlEU/W40Xm6PXQIy8EIAABCKQhkHUuGldmrXKSXtpJ+2JQXETItJEeZXtUXUlRGZPuxzHJmy+N30gDAQhAAAIQgAAEIACBqhBA+FgVT2EnBCAAAQhAAAINI1DPBmBR4kc1No0oMq+tafOlTdcw51ARBCCQiQDCx0y4SAwBrwlk2TTOktY2Oq1oMe3GscpNsiPpfi2H1JPXa0djXCSBMoSPqiiq3Lj5b1zaOAFjGmGj29i06YuanxdVDl0WAhCAAAQgkESgyHlbvXPWqDlqraiQiB6TvMt9CEAAAhCAAAQgAAEINJcAwsfm8qd2CEAAAhCAAAQ8JZB3IzApX9aoLWnEj0KYVG8S5nrz2/KLKifJXu5DAALpCCB8TMeJVBBoFoGiNoHrKacIIWPaDegw57x2583XLD9TbzEEfBA/hqM82nl4rTl7WkGjSynrmqEWYebnxfQ/SoEABCAAgeIJ5JnTZZm7Js09swoeVV5clMU8kR5rlZdEm2iPSYS4DwEIQAACEIAABCDQLgQQPraLp2knBCAAAQhAAAKZCNSzQZiUN+tGZlrxoxqYVHcWCEWWlaVe0kIAAsUReOihRzsKO/74Y4ormJIgAIGmEcizQRxnbBbBYtZN5iQ7k+5nsblpzqDihhJopPCx1rw6Svxo02cRQGadu2eZm2dJ21AnUhkEIAABCEAghkCWuWGWtKounL6W4NGmj8qTdT6cJE5Muh/XWfLmo/NBAAIQgAAEIAABCECgFQkgfGxFr9ImCEAAAhCAAAQKIVDPhmFS3iLEj7U2S5PqrxdQ2eXXax/5IQCBHQSmTdshfOzZs6cZO/ZIsEAAAhUhkHUzN0uzapWdRQipOrNu/lo787Yvb74sfEjrNwFfxI+iVOuY7KQXl5Lu5/UCc/S85MgHAQhAAAI+Eah3zpdmjho3783yea35cJI4Mel+LX/Uk9cnP2MLBCAAAQhAAAIQgAAEiiCA8LEIipQBAQhAAAIQgEBLEqhn4zBN3qziR0EuMk9LOo1GQQACuxCwER979ephxo4dBR0IQKCNCWQVPApVmk3jMNKkjeqk+7VcVE/eNnZ9SzU9r/BREJLy1rpfaw5eT5THpDVD0v2Wci6NgQAEIAABCOQgkDQ/TIruaOe8ceXUEhnmyaP66hEu1pM3B16yQAACEIAABCAAAQhAwHsCCB+9dxEGQgACEIAABCDQTAL1bDamyVu0kDGpzqT7zWRN3RCAQPEEED4Wz5QSIVAVAkmbwHaTN6o9eUSStcqzdaSxKY5vPXmr4jPsTEcgScAYV0qafHnEj6ovKV/aOX+WuXqWtOnIkgoCEIAABCDgL4G0c8Fax1q7rbPpar3ok2dOnCRMTLqf5IF68yeVz30IQAACEIAABCAAAQhUjQDCx6p5DHshAAEIQAACEGgogXo3FNPkr5WmjHsWYBrbGgqbyiAAgcIJ7DzqmoiPhcOlQAh4SCDNhnBSmjwbvEJRT7lJKJPKTsrP/dYikEbAGNfitHnj0iXNn3U/SeSYZ36fVG+RHm5kXUXaTVkQgAAEINBYAs2en9UzZ03Km3Q/jnQaUWKaNPWU39heQG0QgAAEIAABCEAAAhBoPgGEj833ARZAAAIQgAAEIOA5gXo3/9Lkz7MBarEllZ9038WfJa3nbsM8CEDAGDNt2mMBBx11PWbMkTCBAARaiEDWzeak9PXcT8or7GnSxLmnnrwt5HKa4hBIK2CMgpY2b1IUx1oOSaqjlkAyqtykOXrSfToPBCAAAQhAoGoE0sz/0qRx56FJgsak8mrdTyNoTJOmlp/qzV+1PoC9EIAABCAAAQhAAAIQSEMA4WMaSqSBAAQgAAEIQKDtCdSzmZg2b1K6eu/LiUllpHF0EWWkqYc0EIBA/QQQPtbPkBIg0CwCSRuvSXalzZ+Urt77sjOpjFptqSdvEiPuV5dAkrAwqWVp89cjfrRz76S5s72flK5Wm+rJm8SK+xCAAAQgAIFmEcg7D3TzJZWh+2nS1GKQRpCYJk29dTTLT9QLAQhAAAIQgAAEIACBZhJA+NhM+tQNAQhAAAIQgEClCNS7oZgmf1KapPsWaNHpKuUojIUABDoIPProLLNly5Yg4uPYsaMgAwEItCiBpM3acLPTpE9Kk3Tf1pk2XZxr6s3foi6nWcaYtOLFOFhp8yelSzPvzhLhMVxemvLpEBCAAAQgAIF2JZBF5OjOT9POMeuN8qg6ET22a++k3RCAAAQgAAEIQAACjSCA8LERlKkDAhCAAAQgAIGWIVDvxmOa/EWlEfQ0ZcU5p568LeNwGgKBihN47LEn/iJ83NuMGYPwseLuxHwIJEajqYUo7eauykhKm3Tf2pE2XZzd9eany7Q+gSRRYhKBLPmT0qadO2cRQUbZjzAyyavchwAEIACBViEQNRfMOz9ME9nR5ZZUT1oxY9p0tXxWRBmt0idoBwQgAAEIQAACEIAABMIEED7SJyAAAQhAAAIQgEBGAmk3NeOKTZs/Tbo0aVw7sqbPiGa35I2ur157yQ+BViOwU/jYw4wZc2SrNY/2QKByBJI2UItsUNa60qRPk8a2IUvaqHbXm79IlpTlL4EkMWIay7OUkSZt1vlvEUddp2mnTZPVvixlkxYCEIAABCBQi0Aj53e2rqx1JqXPIkLMkjaOWxFl0CshAAEIQAACEIAABCDQygQ6zZgxa/tLL803I0YMM8OHDzWdO3f2rr0zZjxhli9/w0yePNH07t0rMfJAoxuwadMm8/DDMxRTyRx//DGma9eujTYhsb5Fi5aa55+fbQYNGmCOOOJQL/383HMvmNdeW2xGjz7CDB48sO7w/4lQciR48MFHzLp1682pp55gunXrlqOEcrOsWrXazJr1tOnZc29z1FFjTZcuXcqtMEfp8+YtMPPmzTcHHTTcDBt2gJd9sRpjzuOKw2KOP/5oT8ecJeb55+eYwYMHmMMP93PMefbZF8zrry8ORCAaG318iPTAAzvGnNNO833M6WGOOmqM12POwQcPNwceWNyYU8RmoS1jxownzYoVO+Y5vXr1jJznpK0vbTp3+E7Ks3HjJvPYY5rndDLHHXe0l37WPOeFFzTmDDKHH36Il9/n556bYxYt2jHPGTiwv5c2Tp2qMWejOfXU4738bXnjjdXmySefCb4n48eP9nIOoXnO/PkLzUEHDQvGHB9+W8JHXT/++JPmjTfeMMceGz/m5JjiFZZFY86jj84IxkJfx5zXX19iZs+e6/WY8+yzs83ixUvMmDFHmAED/BxztLbasGGjOeUUX8ecVebJJ5+twJjzSjDmaG2Vd8xJ2ty1X/C06ZTepn3ssVlm5cpVZsqUSaZnz+h5TtQAkqWuegYg9cHp02cE7I47bpKXv39as2httd9+g8xhh4308vfvmWeeNxobx4070gwc2Pi1VRox4gMPPGzWr99gTjvtbZF+TlOG29fSpE+aZ7vl6Xvy1FPPmd69e5qxY4/s8HOWMur5LqTJq3nOggU7xpyhQ4fkHnPS1JU3zeOPP2H0bOzYYydkGnPy1pc1n8acxx+fZTp12iOw0ddnyLNnzzH77jvYjBx5kJdjjp7nLF681Ot5ztSp083GjRvNSSdN8dLPb7yxY8zR2kpjjp/PkOebBQteDZ4hDx26v5djjl4yW73a3zFHv3saFzXP8XfM0drqRbPvvoPqGnPyihzTzHO1tlq0aEnwHGLAgH1S98W8c3P3tyVNGZor6BmyxhytrXzct9Jzz6eeejbY3x03bpSXY86LL74cPM8ZOXKEGTo0/9oq69wgS/pHH51p9GxsypSjTc+ePbzbK9eYIxulM9Bzbh/nOa++uih4nrP//vuaQw892Mt5ztNPP/+XMWeUl89z7JgjbcTJJ0/xeszp06eXGTvWzzFn7tx5wdpK821f11bTp88M1lbah+7Rw8cxZ73Rc2/NY32d57z66uvBPMfnMUdrAq2tNM/RvlWjnsel/f3Tsxc9Q960abPHY87KYJ7Tp08fb9dWc+bMMwsXVmXMOcb06LF35r7Y6c9/vn/7zJlPBQ8nNfj7OCm+5Za7jAaGCy44K1gApZnsp/2yFJFOk7k//vGW4CjJd7zjvMARvl2atGtQGD78ADNlyjFmzz339M1Ec//9DweD/0knHWckUPHxgcuvf31d8CP/3ve+K3gw5Nu1ZMlSc/vt95q+fXubM888xey1V3ffTDQzZz5tNOYcddSo4LjFbt38EwrbMefCC9/upQhXQrjrrvtT4NtLLjnX2zFHD1xGjBgaLMT9HHOmmdmzXwomStq48XnMueqqS4OHGb5dO8ecPubMM0/2dMx5Khh3jjpqdCByLXLMKWITUmVozHnttUXmwgvPMoMG1Rb+p60zbbqkPqVyNmzYYK677tZgknnxxWebvfbaKylbw+9rnjN16qPmoIOGmmOPnWT23NO/lxMefFDznHnm5JOPC1448vFlo//7v+vNm2++aa688l1e/rYsWbLM3HHHfaZ//76BaKF7d//mOXoBZdasZ4IxRyLXrl2b/xJKWPj4pz9pzFlsNM/xUYSrec4NN9zm+Zgzz0yd+pg5+OAdY46Pa+gHHphm5sx5OZjn7Bhz9mj42JxUoe9jjh763Xnn/WbAgL7m1FP9GXPcB5BaVz3xhMacMYH4I8t8NsuDzCxp5Xc3/U033RG8bHTxxeekHnOy1pfU12rdX7Nmrbn++tuC74jmOXvv7d/znDlzXvrLPGeYOe64iV6ure677yGjh6jabNeY06VL41+qThIi/upX15u1a1ebK654V821VVI5bn/KkjZpfq4XZO6++0Gzzz615zluOUll1vPdiMqrMefJJ58LxpxRow71cjNb8xyJUy644O2ZxClFs4orb926deaGG24PxusLLjjTy7WVNmGnTXvMHHLIMHP00RM8nec8bF58UWur482wYX4GctA8R/6+/PJLvFxbaZ5z1133m4ED9zGnnHKCl78tM2Y8FWwgTpgw1hx5pJ9jzi233Bm8bHThheeY/v0liOvUqOEkVT1r164zN95ox5yzvHxup/nDww8/ZkaOHB6MOXFCKXd+WNRcMW059903LQjkoO9K2uAxRexhpi1D84Ff/eqPRv6+8sp3evkMWc8g7rrrvmA94OvzHL2Y8MQTz5pJk8abUaMO83Kec+ONtwXzHO0J6WWjtH041YBRQKI331wTPM/R91jPnPbe279nyC+8MPcv85wRgVDKx32re+550OhZ92mnnZh6zCnAfamL0BpIe+V6fnfFFe/wdMxZ9Jd5jr9jjl4S1cu2Po85N9xwq1HACWlffBTEaR9Dayvt+Wn95+OY8/zzc/8yzznIHHPMUV6OOX/+8wNGQfJOP/0kLwNW2TFn/fr15j3v8XPMkY7t7rsfCIIs7XiG7J8OSy8mSOR69NFHebu2uv76P5nFi5eZd77zvED4n3We02n+/AXbV69+0/Tq1ctI+Z61gNS/hHUkXLZshVFnluixW7c9TSe/1o9m27ZtZunS5Wbr1q2BYtvHjWyJM1eufCP4ou2zTz/vxKPqHnrTVJsOsi+PireOLpY6qx4K6c05vX3ho5/1hs2KFSvNHnt09jaCnRY/GnP0dp+PEVzVGXYdc7p5Ny7uHHO2mf33H+xlX2TMST2s1ExYhTFn+fKVQR/0NWpmmWNOURuNdsxRtEIt0tLMxbLUnSVtVIfcMeYsM1pgKOpHvb9/9doTZaPEmZpHSAjXt28fL+c5enFizZp1ZsCAfoGdafxczEiSvhQJCxVtz9doGlu2bAl+o9UHfRTsibQ2GhTxQ2uruAiu6T1STEpFIdm8eYvp3XvHUdeaK65bt+Evayv/XkDRWOOOOWk3e4qhla4Uxpx0nJJS+T7m6HuzfPmKQJySJbJLuN1ljvdaP9sxJ83aKqstRaRftmx5EGVvv/0GpxLPZK0zqZ8l3ddzHNm4bdtbZr8DSHaHAAAgAElEQVT99DzHP5GwNpQ0z9GLjf369fVyniP73nxzbfBd0WZDo/1o/VxLiLhjbbVjnpM0n80iaFTdWdNHzYf1PEfP7Tp3rm/MSerzup93Pq61lf6v8caXeU64vRq3Nc9RhFQfX0zQ2mrnmDPYy++zxmw75vi6tlK0K/0GaszR2NisMafW923JkiVmw4bN5sAD/YyOunnzZmOf5/i6tqrCmLPjeY7fY472rd56a3swLvqytnK/s42e52QdL8RMv89r164PnufoZeCkMorgnLUMCXA1lzjgAH/3rTTm6OWYZkQHTzM/koBm9eo1wT65nukk+TlNmUWn0fdZzyP0fNbHl761tpKNmmvueIbs89pqL9Ovn5/PkDXm6BmyXk5IM+YU3c+SypN/FRBD0dcOOCB5bZVUXhn3NR5WYcxZtWqN6dvX9zFH+px9vRxztmzZ8TzH7zFnXRCpV99l/8ec/l6uraow5uhZk/ZcFATD15OXNM/ZMeb09vZ5jvaENM/R89k8z3M6rVixYrs6jCZxPk7k7AM5PUTUZN9XG2WfOCY9PC3jBzxNmbLNPtjMumhKU34RaWSfFuF6MxI/5ydqH7j77OfqjDmdvRNauxs6jDn5vyc7f1sYc+qhWJXfljLHnLybhi73Hb9/b+Wai+WtP2u+HfMc4+UDK/t9Zp5Tz7d5R17ms/UztOOiT2srRXy0wkdF+Ldjjv9rK8acenoka6t66O3I2wrznLzr6jz54vLYMSfNs5I89dbvafv7x5hTD0ufflviRIhZ5zlZxYx2LpWHo53H8jwnD71d82QZc+qvLV8JrK3ycYtaQ/s/n/V3r8Ads3iGnL9P7vz98/0ZcvvOc/LOL93vRdp5TlHfpTzlZJ3n5O/1+XK2wtoqX8uLzcU8p36eVemLaCLq83V1/Oz3/iRjTn39sErPFhlz6vM1Y059/GzuesecTitXrtxejCmUAgEIQAACEIAABNqPQFYRYRKhvOXlzRfeQEqyj/sQgEC1CISFj9WyHmshAIE0BPJu6Lpl5y0jb75wu4oqJw0v0rQ+gTyixTgqecvKmy9sRxFz/Nb3OC2EAAQgAIF2JlDkPDKP6FDs8+YL+62octq5P9B2CEAAAhCAAAQgAIH2I4Dwsf18ToshAAEIQAACECiYQNEbkvWUV0/eLFgaVU8Wm0gLAQjsTgDhI70CAtUkUOQGbhyBeuuoN7+1q6hyqulprC6LQFHCQ9lXb1n15o9jxHy8rN5DuRCAAAQg4COBMuaM9QoN683vci6yLB/9h00QgAAEIAABCEAAAhAoiwDCx7LIUi4EIAABCEAAAm1FoIyNx3rLrDd/WzmQxkKgRQlMnz7LbNmy2fTq1cOMGze6RVtJsyAAgbQEitgwLqIM196iy0vLgnStT6BowWER5RVRRut7jhZCAAIQgAAEyiNQlMCwqHLU0iLLKo8cJUMAAhCAAAQgAAEIQMBPAggf/fQLVkEAAhCAAAQgUEECZQgNiy6z6PIq6CZMhkBbEbARH3cIH0e1VdtpLAQgYEyRosIiy7K+KaNM/A4Bl0AZQsOiyiyqHDwOAQhAAAIQgEA8gaJFhb6XR1+AAAQgAAEIQAACEIBAuxFA+NhuHqe9EIAABCAAAQiUSqAsYWFZ5VoYZZdfKnQKhwAEYgkgfKRzQKA9CJQpICyj7DLKbA9P08o8BMoSGJZVrm1j2eXnYUkeCEAAAhCAgK8EihYkuu0sq+yyyvXVR9gFAQhAAAIQgAAEIACBMgggfCyDKmVCAAIQgAAEINDWBMoWEZZdflrn+WJHWntJB4F2JIDwsR29TpurTsAHUWCZNpRZdtV9j/3lEShbRFh2+WnJ+GJHWntJBwEIQAACEAgT8EUMWLYdZZdPz4IABCAAAQhAAAIQgEC7EED42C6epp0QgAAEIAABCDSUQCNEgY2oo6HQqAwCECicwPTpM83mzVuMjroeP3504eVTIAQg0DoEGiFIbEQdreMRWlI0gUaIAhtRR9FcKA8CEIAABCAAgZ0EGiFIbEQd+BQCEIAABCAAAQhAAALtQgDhY7t4mnZCAAIQgAAEINBwAo0UJjayroaDpEIIQCA3AYSPudGREQJtQ6BRYsRG1dM2jqOhuQg0UpjYyLpywSATBCAAAQhAAAIBgUYKERtZF+6FAAQgAAEIQAACEIBAOxBA+NgOXqaNEIAABCAAAQg0jUCzBInNqrdpoKkYAhCIJDB9+iyzZctm06tXTzNu3CgoQQACEAgINFqE2Oj6cDMEkgg0Q5TYjDqTOHAfAhCAAAQg0I4EmiU+bFa97ehj2gwBCEAAAhCAAAQg0D4EED62j69pKQQgAAEIQAACTSTggxDRBxua6AKqhkBbEnj00VkdR10jfGzLLkCj25yAD4JDH2xo825A82sQaJYYsVn10hkgAAEIQAAC7UqgmaLDZtbdrv6m3RCAAAQgAAEIQAAC7UMA4WP7+JqWQgACEIAABCDgCQGfBYg+2+aJ+zADApUigPCxUu7CWAhkJuCzqNBn2zKDJkNLE/BNhOibPS3tfBoHAQhAAAItR8AnkaFPtrSco2kQBCAAAQhAAAIQgAAE/kIA4SNdAQIQgAAEIAABCDSJQNVEhlWzt0lupVoIeEUA4aNX7sAYCCQSaAWxYCu0IdFRJGhJAlUVHFbV7pbsRDQKAhCAAAQKJ1BV8WBV7S7cgRQIAQhAAAIQgAAEIACBkgkgfCwZMMVDAAIQgAAEIACBJAIICpMIcR8CEMhLYPr0mR1HXY8fPzpvMeSDAAQgkEgAwWMiIhJUgAAiwgo4CRMhAAEIQAACHhNA8OixczANAhCAAAQgAAEIQKAlCSB8bEm30igIQAACEIAABKpGAPFj1TyGvRCoBgGEj9XwE1ZCoOoEED1W3YPYHyaAAJI+AQEIQAACEIBAFgIIHrPQIi0EIAABCEAAAhCAAASKI4DwsTiWlAQBCEAAAhCAAATqJoAAsm6EFAABCDgEED7SHSAAgTIJIHgsky5l+0IAEaQvnsAOCEAAAhCAgF8EEDv65Q+sgQAEIAABCEAAAhBoTwIIH9vT77QaAhCAAAQgAIEKEEAEWQEnYSIEPCeA8NFzB2EeBCpIALFjBZ2GyYURQARZGEoKggAEIAABCFSSAGLHSroNoyEAAQhAAAIQgAAEWpgAwscWdi5NgwAEIAABCECgNQgggGwNP9IKCDSDAMLHZlCnTgi0JgEEj63pV1qVnwAiyPzsyAkBCEAAAhCoGgEEj1XzGPZCAAIQgAAEIAABCLQLAYSP7eJp2gkBCEAAAhCAQOUJIICsvAtpAAQaTgDhY8ORUyEEWo4AgseWcykNKpgAAsiCgVIcBCAAAQhAwCMCCB49cgamQAACEIAABCAAAQhAIIIAwke6BQQgAAEIQAACEKgwAcSQFXYepkOgAQQQPjYAMlVAoIUIIHJsIWfSlIYTQADZcORUCAEIQAACECiNAILH0tBSMAQgAAEIQAACEIAABAolgPCxUJwUBgEIQAACEIAABJpHABFk89hTMwR8JTB9+iyzefNm07t3TzNu3ChfzcQuCECgiQQQOzYRPlW3JAEEkC3pVhoFAQhAAAJtQgDBY5s4mmZCAAIQgAAEIAABCLQMAYSPLeNKGgIBCEAAAhCAAATiCSCKpHdAoD0JPPqohI9bTK9ePRA+tmcXoNUQCAggbqQjQKD5BBBENt8HWAABCEAAAu1JADFje/qdVkMAAhCAAAQgAAEItAcBhI/t4WdaCQEIQAACEIAABDoIIIKkM0CgfQggfGwfX9NSCLgEEDrSHyDgNwFEkH77B+sgAAEIQKD6BBA7Vt+HtAACEIAABCAAAQhAAAJpCCB8TEOJNBCAAAQgAAEIQAACuxFAQEmngID/BBA++u8jLIRAmACiRfoEBCCQhgDiyTSUSAMBCEAAAq1AABFjK3iRNkAAAhCAAAQgAAEIQKAcAggfy+FKqRCAAAQgAAEIQKDtCCCEbDuX0+AKEED4WAEnYWLbE0Do2PZdAAAQKIQAQshCMFIIBCAAAQh4QAChowdOwAQIQAACEIAABCAAAQhUhADCx4o4CjMhAAEIQAACEIAABCAAAQhkJTB9+kyzefMW06tXDzN+/Ois2UkPAQhAAAIQgAAEIAABCEAAAhCAAAQgAAEIQAACEIAABCAAAS8JIHz00i0YBQEIQAACEIAABCAAAQhAoH4CCB/rZ0gJEIAABCAAAQhAAAIQgAAEIAABCEAAAhCAAAQgAAEIQAAC/hFA+OifT7AIAhCAAAQgAAEIQAACEIBAIQQQPhaCkUIgAAEIQAACEIAABCAAAQhAAAIQgAAEIAABCEAAAhCAAAQ8I4Dw0TOHYA4EIAABCEAAAhCAAAQgAIGiCCB8LIok5UAAAhCAAAQgAAEIQAACEIAABCAAAQhAAAIQgAAEIAABCPhEAOGjT97AFghAAAIQgAAEIAABCEAAAgUSQPhYIEyKggAEIAABCEAAAhCAAAQgAAEIQAACEIAABCAAAQhAAAIQ8IYAwkdvXIEhEIAABCAAAQhAAAIQgAAEiiWA8LFYnpQGAQhAAAIQgAAEIAABCEAAAhCAAAQgAAEIQAACEIAABCDgBwGEj374ASsgAAEIQAACEIAABCAAAQgUTgDhY+FIKRACEIAABCAAAQhAAAIQgAAEIAABCEAAAhCAAAQgAAEIQMADAggfPXACJkAAAhCAAAQgAAEIQAACECiDAMLHMqhSJgQgAAEIQAACEIAABCAAAQhAAAIQgAAEIAABCEAAAhCAQLMJIHxstgeoHwIQgAAEIAABCEAAAhCAQEkEED6WBJZiIQABCEAAAhCAAAQgAAEIQAACEIAABCAAAQhAAAIQgAAEmkoA4WNT8VM5BCAAAQhAAAL/n737gI7jLPc//qi6ynKR3OXeux3HJY5rEqe5JYT0EDqXQ7nAhZA/BLiUCwkE7uVyDxC4tHCTmCTESdwS9957771KrpKrbEv/84wy8mq18u5skZ7d/c45HGJ7Zvadzzv77Duzv30HAQQQQACB2AkQfIydLXtGAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCoOQGCjzVnzysjgAACCCCAAAIIIIAAAjEVIPgYU152jgACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggUEMCBB9rCJ6XRQABBBBAAAEEEEAAAQRiLUDwMdbC7B8BBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQKAmBFIKCgpKb9wokbS0VElLS6uJNgR9zevXr0tJSalkZKRLSkpK0PWre4XS0lLRNuqSkZFR3S8f0uuVlJTI9es3JDU1RdLT00PaprpX0vZpO9PT0yQ1NbW6Xz6k17t27Zpof2s/2z4XU5z3i8Xlxo0bEh81pyQO+jkeak6q8562uFBzIu+Vm59/1JxINMvGOdScSAxvjnOoOZE4Wh/n6PXAjRvXnTGY1fGsxXGOf/Dx2rXrUlpKzYnkvULNiUTv5rb2a06J6HuamhNZfzPOicxPt46PmlN23477OeH3t/rp+8XyfTuL4xx/ccY54Z+D7pbUnMgNdQ/2xznUnGj0NDUnckVqTuSG8VFz7H8/GT/jHL4rj+Rd49aceMhE2L+20u8nbeY24iETQc2J5J1ctm185XMsf28VL/dzqDmRvGtu1pw0JxdocSm7tgp/nJOyePHS0k2bdkivXl2lR4+uJsOPc+YskuPHT8p9942WnJwm5gJnly9fkRkzZjvnx7hxY6VWrVrmzpUDBw6JfumZl9dKbr+9n8mA5ooVa2Xv3gMybNhAadMmz2T48d13Z8j580Xy2GMTpXbt2ub6uaDglCxYsFSys7Nl1Kg7JDMz01wbt2zZLps3a83pJj16dDFacxbK8eP5cv/9Y6RJk8bmas6lS5dl5sw5cVFz2rRpLQMH9jVec24XbafFwLVbcx5/fJLJz5b8/FOycOFSadgwW0aOtFlzNm/eLlp3evfuLt272645DzxwlzRu3MhszdHgx4MP3mPyXNy//6CsWrVe2rZtLbfdZrPmLF++RvbtOyB33jnYGY9ZrDlTpkyXwsILQs0Jf/hkseb4Bx9nz14gJ04UiNWac/HiJfngg7lOLbRac/btOyirV2vNyfuo5tj7wdGyZWtEa+Oddw6i5oT5ls7PL5CFC5eZHuds2rRNtm7dIX369JBu3TqbvLbSmnP8eIGMG3e3NGrU0Nw4x605+rn8wAN3mxznuDWnXbuymmMx/L9s2WrRdg4fbnucU1R00bmfY/G+ndacBQuWSePGDWXECL22svfDaq05W7bskH79ekjXrjZrzqxZOs7Jl/Hjxzr129qPli9cuCgffDDPCQnff/9dJs9FvTe7evUGad++jdx2Wx+TNWfp0tXOOGfEiCHSunVLk9dW77wzXfQz5tFHJ5js55Mn82XhwuXSpElDGT7cZs3ZuHGrbN26U/r16yldu3YyOc758MP5cvJkgdmao597H35ou+bs2XNA1qzZIB06tJEBA6zWnFWyf/8hGTFiqLRu3cJczdEviKdMmWG65uhn86JFWnMaOY4WJ5Bxa07//j2lSxfbNWfChHslO7uBuXFOUdEF0bqoYbj77htj8vNvz579smbNRunYsY3072+z5ixZskr0O/2RI4dKq1b2ao7+YOvdd6fLpUtX5OMfH2+yn92ak5PTWIYPH2Ky5mzYsEW2bdsl/fv3ki5dOpoc5+h1i34HaLXmFBYWiV7/6WdKWc2xl4nYvXu/rF2rNaet9O/f2+S11ZIlK+XAgcNOrqRly+bmxjkaZNbvyi3XHM2xLV68QnJzG8udd9qsOevXb5bt23fLgAG9pXPnDiZrzsyZ80SzThMn3icNGmR5HuekzJ+/uFSLq4aQNBRgMSilhfXo0ePODaHmzZuae8NpCOm99z5wEqjaEfXq1Q3zq5XYbaY3oJcuXSV5eS1l6NCBJgciWlh3794nw4YNkg4d2pos/m+99Z6cO1cojz/+kGRl1Y9dh4W5Z71RPnv2IsnOzpK77hohderYC2fqBaTWHK03Wncs1hwtrMeOHXe++GrWLNdczdEbp++//6FzluiA02LN0Rvl+uVXmzatZMiQ24zXnMHOzTWLXyC++WZZzXniCcs1Z6ETtr7rruEma47Wmw0btkqfPlpzdJxj7wvEmTPnytGjJ2TcuHukadMcozXnA2eQOX68zZqjN8qXL9ea01qGDBlgsubohY9e6GogQL9EtFhz/vGP9+T8ebs1R7+c0x9E6ZfYY8bYrDnr12+RjRu3SJ8+PT8a59R8zfEPPs6YMUeOHTtptuZoIGDq1A8lJSXV+QLR4jhHb5TrOEdDSIMH26w5ixatEG2nBgK0nTZrzruiNymtXlvpjfK5c7XmNJQxY+40Oc7Rm1YbN26Tvn17SM+eem1V8zXH/1J2+vQ5zg9Zx4+/R3Jz7Y1z3JqjwUf9IavFmqPjBx3n6Phh0KD+Jsc5+kW2jsfKao6Oc+zN+v+Pf2jNuSBPPDFJ6te3dz9Ha46OczT4OHq0zZqzbt1m2bRpq/Tr18v58bzNmjPb+SHrhAn3SE6OvZqjgYCpU2c57xH9gYfFmrNr1z5ZsWKN6ZqjP0zQ+076A0z9IYrFmjN58ruinzH6o7L69euFeac3dpvpZ/PcuYudENKoUcNMjnPWrdskGri2XHOmTZstJ06cdO7PltUcW08r8605es+pbl1731vt2rVXdEIM/T5IJ+yw+OME35qj11bWnpyn30vq/ZyLF/UHHjZrzrFjJ2TevCVOzRk9epjJSUXWrdsomzZtd4Ip+uN5i+OcadNmOT/w0Jqj11bWfuCh1/daFzMyysY5FmvOzp17ZeVK2zVHJ7fR7/T181l/5G+t5mjwUb+30u8p9UdlFsc5bs3R4KOGuSxOZKRhOP0BvYb+u3fvbDKcqfdn9QceEybcJ7m59iYmc2tOZma6PPCA1pw6sRs8h7nnnTv3OBOTdezYzpkkyOI4x605+vms36/Zqzkl8uab78ulS/qjMps1R3Ns8+cvdd4nGlq3WXM2OBOT6Y8b9cfzFn+EotkXzTpNnHi/aP32Os5Jyc/PLy0uvubcILAYQNI6Ulxc7Dweok6dOubCAO5UutpGfWyqxRtW2kadvlT7WS++LRZVdzp+baMm8i1+MadtvHr1quNoMfSo7dPUu56LpaVi8oaVtlEf3RQPNUfbqIMkizNy6c0Mak6Yo0yfzfRzhZoTmWN81Zx0kzes3HEONSeyc5FxTmR+7taMcyJ3vDnOsVNz/IOPOoag5kTW19ScyPyoOdHxq3htZafm+B8dNSfy/qbmRG7o3s/Ra0CLocf4u59DzQn3rOR+TrhyFbeLl/s5+qgui2EAak50zsN4up+jYwmLAaSb31sVO/fg7X9vVctk0PrmOMd+zdG2WgwDxN+1VV1zQWs1dMc51JzIPmdujnOoOeFKut9bxUPN0RlSLQaQKo5zqDnhnotl93PiY5xTu3Ytc6FH33vI8XBtRc0J951Stp17D1nzdl5Dj7p9ypkzZ0ojawJbI4AAAggggAACCCCAAAIIWBTwDz5abCNtQgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCrAMFHr2KsjwACCCCAAAIIIIAAAgjEiQDBxzjpKJqJAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCDgSYDgoycuVkYAAQQQQAABBBBAAAEE4keA4GP89BUtRQABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQCF2A4GPoVqyJAAIIIIAAAggggAACCMSVAMHHuOouGosAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIBCiAMHHEKFYDQEEEEAAAQQQQAABBBCINwGCj/HWY7QXAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgFAGCj6EosQ4CCCCAAAIIIIAAAgggEIcCBB/jsNNoMgIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQFABgo9BiVgBAQQQQAABBBBAAAEEEIhPAYKP8dlvtBoBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQODWAgQfOUMQQAABBBBAAAEEEEAAgQQVIPiYoB3LYSGAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACSS5A8DHJTwAOHwEEEEAAAQQQQAABBBJXgOBj4vYtR4YAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJLMAwcdk7n2OHQEEEEAAAQQQQAABBBJagOBjQncvB4cAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJK0Awcek7XoOHAEEEEAAAQQQQAABBBJdgOBjovcwx4cAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJKcAwcfk7HeOGgEEEEAAAQQQQAABBJJAgOBjEnQyh4gAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJKEAwcck7HQOGQEEEEAAAQQQQAABBJJDgOBjcvQzR4kAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJJsAwcdk63GOFwEEEEAAAQQQQAABBJJGgOBj0nQ1B4oAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJJUAwcek6m4OFgEEEEAAAQQQQAABBJJJgOBjMvU2x4oAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJI8Awcfk6WuOFAEEEEAAAQQQQAABBJJMgOBjknU4h4sAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJIkAwcck6WgOEwEEEEAAAQQQQAABBJJPgOBj8vU5R4wAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJIMAwcdk6GWOEQEEEEAAAQQQQAABBJJSgOBjUnY7B40AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJLwAwceE72IOEAEEEEAAAQQQQAABBJJVgOBjsvY8x40AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJLYAwcfE7l+ODgEEEEAAAQQQQAABBJJYgOBjEnc+h44AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJLAAwccE7lwODQEEEEAAAQQQQAABBJJbgOBjcvc/R48AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJKoAwcdE7VmOCwEEEEAAAQQQQAABBJJegOBj0p8CACCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCSlA8DEhu5WDQgABBBBAAAEEEEAAAQRECD5yFiCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCSiAMHHROxVjgkBBBBAAAEEEEAAAQQQEIKPnAQIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAKJKUDwMTH7laNCAAEEEEAAAQQQQAABBJjxkXMAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgIQUIPiZkt3JQCCCAAAIIIIAAAggggAAzPnIOIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJKYAwcfE7FeOCgEEEEAAAQQQQAABBBBgxkfOAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgYQUIPiYkN3KQSGAAAIIIIAAAggggAACzPjIOYAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIJCYAgQfE7NfOSoEEEAAAQQQQAABBBBAgBkfOQcQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQSUoDgY0J2KweFAAIIIIAAAggggAACCDDjI+cAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAYgoQfEzMfuWoEEAAAQQQQAABBLPaW/sAACAASURBVBBAAAFmfOQcQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQSEgBgo8J2a0cFAIIIIAAAggggAACCCDAjI+cAwgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAokpQPAxMfuVo0IAAQQQQAABBBBAAAEEmPGRcwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCAhBQg+JmS3clAIIIAAAggggAACCCCAADM+cg4ggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggkpgDBx8TsV44KAQQQQAABBBBAAAEEEGDGR84BBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBhBQg+JiQ3cpBIYAAAggggAACCCCAAALM+Mg5gAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggkJgCBB8Ts185KgQQQAABBBBAAAEEEECAGR85BxBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBJSIGX//gOl588XSnZ2A2nQIEtSUlLMHejJk/ly5cpVad68qdSqVctc+65fvyEnTpx02tWyZXNJTU0118aLFy/K6dPnpG7dOtK4cUOTbTx9+qxcuHBRcnIaSd26dU2ei0ePHpdr165JXl4rSUtLM9fPV69elfz805KRkS5Nm+aY7GetN9ScyE6d69evy4kT+aZrjr6Xz5yxXnPOfFRzmji10eLnX3zUnFOSmZkhubl2a865c4XSsCHjnHArj9ac48fzRYeIVsc5bs2pV6+ONGpkc5xz6tQZ0fFYTo7dmnPkyHG5ft3uOEevBwoKqDle38srV66T4uJrkpVVT/r16yX2r63iZ5xDzfF6NlZcn5oTmZ9urddVOs5p1KiBZGVxPycc0ZvXVinSsmUzk9fQN8c5dZ37ORavW+JlnHPjxnVp3bqlyfs58TDO0XqjdcdyzdF7JXpvrHnzZlKrVmY4ZSGm27jXVqmpKdKihf2a06hRtsm6eOrUabl48ZLk5jaROnVs3s/RcQ41J7K3EzUnMj/d+tq1smsrak5kljdrTo7UqVPb3FistFRE7yHbrjlXpKDgtPPZrPfFLH6Heu7ceTl/viguxjk6hsjMtDfO0e9OT5woMF1zioouyNmz56V+/brOPWSL11b6Xrl0Scc5VmtOqRw9eoKaE9lHi9ysOdmSlVXf5LnoXlvZrzmp0qJFU5OfLdScCN8oIlJaerPmaD7H4hji8uUrouPF+BjnJG7NSVmwYEnpxo1bpXfvbtKzZ3cnMGVt+fDD+XLs2Al58MG7JCfHXrDi0qXLMnXqh6IXGJMm3Se1a9e2Rij79h2U5cvXSJs2rWTw4AEmB8VLl66WPXv2yZ13DpZ27fJM3oh+++2pzk3eJ5542AlKWVvy8wtk7tzFTpD5rruGmwwKb9q0TcpqTnfp1aubpKfbrDk6cB837m6TF+JuzRFJkYkTtebYC4SX1ZzV0qZNngwe3N9ozVklu3fvlxEjBkvbtlZrzvvODZcnn3zYuZlvbXFrTsOG2TJmzJ0ma47WG607ffr0kJ49u5qsOR98ME+OHTsp48bd43xxY+2Gi36hNG3aLKddEybYrDl79x6QFSvWOO/lQYNs1pwlS1bJnj1ac4ZI27atTY5z3nqrrOY89ZTNmqOBvXnzljg3JkePHmay5mzYoDVnq/Tt29NMzfEPPro1Z/z4e5xxDjXH+6frnj1lNad9+zZy++39TI5zlixZ6dSckSOHSps2VmvOe1JYeMHsOCc+as4WZ5yjoeYePbqYHOfMnDnX+QHF+PFjJSensbmao6HC6dNnOzdOx4+/1+S1lb6XV6xY+1HN0XFOhvfCFeMtFi9eKXv37pdRo4ZKXp7lmnNRnnzyIZPXVvqlko5zmjRpJKNG3WFynLN+/RbZvHmb9O/fW7p372yy5syYMdf5cfqECfdKkyY2a860abMlPb2s5lj8gb/eJ1m5cq106NBWBg7UcY69mrNo0QrZt++A2Zqj3xG89dZ7cuHCJXniiUnUnDA/g9av3yybN283XnPmOMFCvVei9dvatZV+2T59+hxJT09zxmI2a84+5ykFWnP02iojw2LNWe58vzZ69B3SurW9CTE0EKD3cyzXnOPHT8r8+Uud6wG9RrV4Lq5bt1m2bNkuAwb0lm7dbI5z9P2s16kTJ95v8gdRen0/Y8YcychIk3HjbNacXbv2yapV66Rjx3YycGBfkzVn4cLlsn+/1pxhziQ81kI+JSWl8vbb78vFi5fl8cd1nGMvE+HWnNzcxjJihNWas0m2bNkht93WW7p2tVlz9LpFvwOcNOl+k0HhwsIi0eu/zMx0efDBe0x+tuzatVdWrVovnTq1l9tu62O05iyT/fsPyejRd0peXkuDNadENJ+j2YjHHrNZczTHtmDBMrFdczbKli07nfOwa9dOJu/n3Kw5D4j+CNPrtVXK2rXrS/XLYv2iWL+4sRhCWrt2o+ivx/Vmi8Vft+svifUmdGlpiQwdervJwqoX4Nu27XS+3NSbkxYvILV9+su0Hj26OrN7WpxRcfHiFc4McaNGDTM5mNNfwm7YsEnq1KnrfPll8dftepNAb062a9fGCbharjm3394/rMIa5v28kDe7WXNKZejQgSZrjl5YbN++y/lVWrdunYzWnB3OL9OoOSGfepVW1F+lbdiw2Zmlt29fak64kmvXbpBTp846gT0NkXodzIX7uqFup7PP6DhHhJoTqlmg9bZuLas5Gvpv1izX9DhHb6xZ/CGP/iJ740Zqjtfz0D/4uGbNemcmePs1R2To0NtMjnP0ZsaOHbukadNc50aBxWurrVu3y9GjJ03XnEWLljuzNdmtOedk48YtUq9eXenTx+Y4R+/l6Bci7du3dUL1Fq+tbtacAc4M3PbGOVdkxYp1zszWQ4ZQc7x+xrjr65fE+kMe/YGj1sa0NHtPQtGaozfKNVRoc5zj1px60qdPT5P3c+Kh5qxevV7OnDkrgwff5vwo2FrN0ZkgdGym7RoyZIDpcY5es3TpYnOcQ80Jt1rf3E6f0qLjnPr17dYcDf4fOHBIOnRo50zmYHGcs3r1OueJN4MHD5TsbHuzb1++fPmjmpNqtubo90E7d+6WZs2aSpcuHU1eW/nWHK2N1kJIGnzU7630M0ZDhRbHOfrZrD9O1ydR9O5tc5wTLzVH7+fod0IWn+CoY20NFep7RCfgsRhwjYeao6F//X5NJ3Kw+GS/eKg5+oRJ/WF6gwb1pXfvHiZ/sFxWcw5Kx47tnYCrxXGOvp91nGO95mieRO9zW6w5Ogu8jnN0RsrOnW2Oc6zXnJKSEtEf+Ov3lDqpiMVxzunTZ5wfpluuObt375ODBw99VHP0HrK9J9vqvZKzZ8/JHXcMDOvJRikFBQWl+pgNLQoWi6pejmv79HHSOnW3TstvbdEPeW2jvvEsFlX10rbpow30xprFX+pqG2/cuOE46nloMfSobdSp2rWdFouqtk/PRW2j/r/Vc9Ht53ioORoctXaT3O3neKk5WrMthgGoOdH5JL1Zc8TkF3O+/UzNCb/PGeeEb+e7ZbyMc3TMbfGXuhXHOdQcL2elf/DRvbZinONFseK6ZddW15yb+YxzwneMn2srak74vXzzfg41J3zF+Ko5GSZDj/F1P4eaE/67JT5qjt6f1R/PW71vR82J5Ay8uS3jnMgduYccuWH83M+xfW2l19B6Puq1n7XQo3uWWK85+tmijjojrsXJOriHHHm9uXnfjnFOpJru5x81J3xJ93sr3YPFx8JTc8LvW98ty/qZmhOpJuOcSAXLcljqGB81J91k6FHtyr63uu7cKwknn5Ny5syZ0si7kz0ggAACCCCAAAIIIIAAAghYE/APPlprH+1BAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIBwBgo/hqLENAggggAACCCCAAAIIIBAHAgQf46CTaCICCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggIBnAYKPnsnYAAEEEEAAAQQQQAABBBCIDwGCj/HRT7QSAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAmwDBR29erI0AAggggAACCCCAAAIIxI0Awce46SoaigACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggg4EGA4KMHLFZFAAEEEEAAAQQQQAABBOJJgOBjPPUWbUUAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEAhVgOBjqFKshwACCCCAAAIIIIAAAgjEmQDBxzjrMJqLAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCAQkgDBx5CYWAkBBBBAAAEEEEAAAQQQiD8Bgo/x12e0GAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAILgAwcfgRqyBAAIIIIAAAggggAACCMSlAMHHuOw2Go0AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIBBEgOAjpwgCCCCAAAIIIIAAAgggkKACixevdI4sK6ue9OvXK0GPksNCAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBINgGCj8nW4xwvAggggAACCCCAAAIIJI0Awcek6WoOFAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBIKgGCj0nV3RwsAggggAACCCCAAAIIJJMAwcdk6m2OFQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBIHgGCj8nT1xwpAggggAACCCCAAAIIJJkAwcck63AOFwEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBIEgGCj0nS0RwmAggggAACCCCAAAIIJJ8Awcfk63OOGAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBIBgGCj8nQyxwjAggggAACCCCAAAIIJKUAwcek7HYOGgEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBIeAGCjwnfxRwgAggggAACCCCAAAIIJKuAG3zU4x8+fHCyMnDcCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCSZA8DHBOpTDQQABBBBAAAEEEEAAAQRcAYKPnAsIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAKJKEDwMRF7lWNCAAEEEEAAAQQQQAABBESE4COnAQIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQCIKEHxMxF7lmBBAAAEEEEAAAQQQQAABgo+cAwgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgkqQPAxQTuWw0IAAQQQQAABBBBAAAEEmPGRcwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCARBQg+JmKvckwIIIAAAggggAACCCCAADM+cg4ggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggkqADBxwTtWA4LAQQQQAABBBBAAAEEkkfAd2ZHr0c9fPhgr5uwPgIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAI1KkDwsUb5eXEEEEAAAQQQQAABBBBAIHKBzZu3y7lzhZ531KZNK2nbtrXn7dgAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgZoUIPhYk/q8NgIIIIAAAggggAACCCAQJQGvsz42bNhAevfuHqVXZzcIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIVJ8Awcfqs+aVEEAAAQQQQAABBBBAAIGYCZw/XyibNm0Pef99+nSX7OwGIa/PiggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAghYESD4aKUnaAcCCCCAAAIIIIAAAgggEKHAwYNH5NCho0H3wiOugxKxAgIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAgGEBgo+GO4emIYAAAggggAACCCCAAAJeBYKFHwk9ehVlfQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAWsCBB+t9QjtQQABBBBAAAEEEEAAAQQiFNi8ebucO1dYaS+EHiOEZXMEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAETAgQfTXQDjUAAAQQQQAABBBBAAAEEoiuwePHKCjts2LCB9O7dPbovwt4QQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQqAEBgo81gM5LIoAAAggggAACCCCAAAKxFjh/vlA2bdpe/jJ9+nSX7OwGsX5Z9o8AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAzAUIPsacmBdAAAEEEEAAAQQQQAABBGpG4OWXfyXvvfeuPPvsJ+Wzn/10zTSCV0UAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgygIEH6MMyu4QQAABBBBAAAEEEEAAASsCXbp0kVOnTsngwYNl5syZVppFOxBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCISIDgY0R8bIwAAggggAACCCCAAAII2BV46aWX5Le//a18//vfl8985jN2G0rLEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEPAgQPDRAxarIoAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAzQoQfKxZf14dAQQQQAABBBAwIVBaWmqiHTQCAQQQQAABBLwLpKSkeN+ILRBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQiGMBgo9x3Hk0HQEEEEAAAQQQCEWAUGMoSqyDAAIIIIBA4goQjEzcvuXIEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBZBUg+JisPc9xI4AAAggggEDCChB0TNiu5cAQQAABBBCImgBhyKhRsiMEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBGhAg+FgD6LwkAggggAACCCAQqQDhxkgF2R4BBBBAAAEEVIAAJOcBAggggAACCCCAAAIIIIAAAggggAACCCCAQDwKpGzatLX06NHj0rJlc2nVqrmkpqaaO45t23bKuXOF0rt3d6lfv565m/LFxcWyefM2KS0V6devl6Snp5szLCg4Lfv3H5TGjRtJ+/ZtJC0tzVwb9+49IAUFp6Rjx/aSk9PYXD8r2Lp1m+Ty5SsyaFB/ycjIMGdYWFgkO3fukTp1akv37l1M9vORI8dFa47WG6071Bzvp9HVq8WyZcs2Z8O+fak53gXLttCak59/Sjp3bi9NmlBzwnF0a07dunWkW7fORmvOMTl69ITxmrNDzp0rkj59eki9enWDfv5Vd+CxbJyz3WmXttHuOOeQNGnSSNq1yzN5Lu7ZUzbO6dy5g9NOiyET6+OcoqIi2bFjj1iuOYcPH5Njx7TmtJCWLZuZHOds3bpDzp8PveaE8/kQyTY6ztFrq9RUrTk9jdacU7J//2HjNWe/6DVgly4dnGtAmzVno1y+fNX8tVW9enWka1eb4xy35rRu3UJatIhOzYn2uWK/5lx1xjmWa45esxw4cMi5T9K2rdVxTrzUnGIZNKif0fs5hbJz516xXXOOOtdWeXmtpEWLpibHOVu2bJfCwgvSt29PZ8wY7ZoWyRhHt7161a05qWavrfLzC+TAgcOSk9NE2rZtbfLaavfufXLq1Bnp0qWjNG7c0Fw/l91D3ihXrhTL7bfbrDnnzxfKrl1ac+pK166dTPbzoUNHnWurNm1aSfPm1Jxw6s+VK1dF66Lef7d6Pyeeak7Xrh2lUSN7NUfvE65fvykuak79+nWlSxdqTjjvZ93GHef069dT6tSxN865WXPSpE+f7ibv57g1Jze3ibRpY3ucY7nm6D3k4uJrMnBgX5PXVu44RzMlOl60mIk4dOiIHDt20hlvN2uWa/LaSu+VFBVdcLIvmjuwdm115coV2bJlh9O/miGy+L3VyZP5cvDgEbFcc/Sa4PTps9KtWydp2DDbXD/rOMd6zdEc2+7de50cm9Wao+fh8eOJXXNS5s1bVLpu3Wbp27eHc/FjMcw1Y8Yc0bDU+PFjTRb/S5cuy5QpM0SkVB566EHnxpq1RQM+S5asdD5Ahw4dKLVq1bLWRFm8eIVzk3fEiCHSoUM7SU+3F86cPPldOXfuvDz99CNO8bK26AforFkLJTu7gYwdO0pq17bXz+vXb5H16zdLv349pHdv2zVnwoR7pWnTHHMDzosXL8m77850Tr+HHnrAZM3Zs2e/LFmyStq1s1tzFi1a4dzkHTlyqLRv39Z0zXnmmUekXj27NUcHw/fcM9JozdksWnf0hpBe/Fgc50yfPtv5AnHixHslNzdwzanusKPv51tZzZkhKSlpMmnSfaZrTvv2ec44JzMz09pHtCxcuFz27NknI0feYfZHKG+88a4UFpaNcyzWnBMnCmTWrPlO2Oyuu0aYrjn9+/eUXr2s1pw5cuzYcZkw4b6Pak6KqffLhQsX5b33Zjo156GH7nNu5ltb9Mv2pUtXOeOHoUNvM1pzlomOxyzXnNdfnyJFRYWGa45eWy1wfiBz113DjdacTbJ+/Vbp37+X9OrVLebjnHBuck+bNsu5ma9jiJwcHefYqzl6baU3yrWNFmvOrl37ZNmyVdKhQ1sZPHiAyfs5CxaU1ZzRo4eZ/RHK66+/43xpUzbOqWvto8W5AT179kLT4xwNcm3YsFUGDOgjPXt2jXnNCaeTpk6dJcePn5BJk+43WXP0HHzvvQ+c+w8TJ1qtOXtl2bLV0rFjOxk0SGuOvWurBQuWflRz7jRdc3Rc+9RTHzNZc/Szec4crTl2xzlr126UjRtt15z33/9QTpzQcc4Dzhfa4YyVwql1oW6jP1jWNmoQQO85WRzn6CQOy5eviYuaM2bMcJOBcL1n+MYbU8R2zTkhc+Yscn7Io9dWFr+fXLt2g2zcuE1uu62v9OjRxeQ4x605+p2Q/kDBas3R++8TJow1XXM6dWovt9/e3+Q4Z/78Jc6kHfpesRjOLCkplcmTp4h+Z/Dkkw+bHOfody1z5y5yPpvHjLnTZM1Zs2aDbNq0TQYO7OdMZJSRYW9SLb1u0dzBww8/aHLymMLCQnn//VmSmZkh48frOKd2qEOkaltvx47dsmLFWmcyDP1BlMXvrebPXyx79x6Uu+8eLnl59gLhJSUlovmcS5cuyRNPWK05x2Xu3MVOpkTvi1kc56xevd754beeh926Wa05M0W/A/zYx8KrOSm7du0pLSgocAqWDpQszr6mCVSd4UW/WLL4a91r1647syneuFEiOliy+OGkxV9vaOiX2Dr7jMVfN+hN3rNnz0qLFi2c4J61L0T0U1Bv5OuMj/pLWP0gtbZoCPfw4aPORZn+GtbirxtOnTrtzHhFzQn/7Ll27Zrs339I9MNeZ0i1WnP04iIrq74z+4zVmnPmzFlnVq4GDSzXnMvObIoWA3vUnPDfx75bHjx42PkSNtA4pyYDj24bfWuOjnMsfrborzh1HKE/SqDmhH9elo1z7NccHYPpRbjFH8noDHunTlkf52jNuSgdOrQxOUNA/NScE1K/fpYz45XFcY7OjnP27Dlp1aqlNGiQZfLaSgOkOiOE/qLY5jjnkuiMinpTUmc3s1xz9F6OXl9V1/0cL1+wueMc/XGjxRkCdJYKnU1Rv7zp1El/gGnvywYd5+j9nLJrK9s1p3Xrsprj5RwJf+TibUvrNUe/ODxyRGuO5XHOKdF7Ok2aNHGCC9VVc7z0tP2aU+zMpqjXeRostFhzdLYKDY/qe1ln2bM8zqHmeHl3VFxXa46Oc2rVouaEryhy8OChj66trI5z4qHmnHfu51Bzwj8T9TNF7+forML6vZXFayu35tSunSGtW9u8n6OzrJ8+fdr54YT+4NbiOEevWzTgqt8J6aQn1sbc+tSgsnGOSMeOOtGEvWsrndimrOY0kObNc02Oc/SJedrOvLyWkpVl79oqPmrORWeco++T1q1t3s+h5oT/ueduqU8N0us/XfSHopZrjuZedGZPi9dW1JzIz0X9bNb7ObZrToGcPn3G+DjnoBQVXXLydrVr6yyz3vom5dSpU6UantFBnMWBnB6Otk9DhfpFg7WBnMt948YN50a5xQCStlEHItpG9bNYVN1+tn4ulvVzicmLR7eftX3a3xY/4OOln6k53j5IAq0dXzUnzWQYQF3jp+aIyTBAvNecqkKPOl7Tm5ha5/UzXT/brY6PIq8m7AEBBBBAAIHAAoXFhXKt9Do8URDQ+0gpkiKpkibOPaUSkdJrpVJaUlrl3kMde5RdW91wxi2hbhOFQ/K0C22f5WvoeLm20vt2Oja1+CNWrq08vSWqXFnfz9bv28XL/RxqTmTnpHsuutfDke0tNltzPydy1/ipOYxzIunteBnn6PlIzQm/p7Wfy7634h5y+Irud+V2a87Nfrb7/WS81Jz4yETEw3fl1BxqTiQCkW9LzYncMJ7u52hbyWGF3+eR3kNOOXPmTNV3ssNvF1sigAACCCCAAAIIhCFQVeBRZ5zSX7lYDbaHcahsggACCCCAQNgC566ek6s3isPeng1vLaAhxUzJELkhUlJcUuXKVsOM9C8CCCCAAAIIIIAAAggggAACCCCAAAIIIIBA4gsQfEz8PuYIEUAAAQQQQCAOBKoKPOrsjnXr1jX7S6E4oKWJCCCAAAIJKEDwsfo6tVZKpsh1kZJrBCCrT51XQgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEAgmQPAxmBD/jgACCCCAAAIIxEigqrCj+3IaeNRZHlkQQAABBBBAoKIAwcfqPSPcGSBLLlcdfnRbxCyQ1ds3vBoCCCCAAAIIIIAAAggggAACCCCAAAIIIJCsAgQfk7XnOW4EEEAAAQQQqDGBYIHH1NRUqV+/Po+1rrEe4oURQAAB0P7mrgAAIABJREFUBKwLEHysmR7S2R9vXLkhKaUpQRtAADIoESsggAACCCCAAAIIIIAAAggggAACCCCAAAIIRCBA8DECPDZFAAEEEEAAAQS8CoQSeszKyuLR1l5hWR8BBBBAIKkECD7WXHcTfqw5e14ZAQQQQAABBBBAAAEEEEAAAQQQQAABBBBA4KYAwUfOBgQQQAABBBBAoBoEggUe3SY0aNCAmR6roT94CQQQQACB+BYg+Fiz/afhx5IrJSKlwdvBzI/BjVgDAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwLsAwUfvZmyBAAIIIIAAAgiELBBq4FF3WLduXaldu3bI+2ZFBBBAAAEEklWA4GPN97wTfrxcElJDCD+GxMRKCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAh4ECD56wGJVBBBAAAEEEEAgVAEvgUfdZ0ZGhugjrlkQQAABBBBAILgAwcfgRrFeIy0lTdKvpUnJdcKPsbZm/wgggAACCCCAAAIIIIAAAggggAACCCCAAAKVBQg+clYggAACCCCAAAJRFvAaetSX5xHXUe4EdocAAgggkNACBB9tdK+XWR+1xcz8aKPfaAUCCCCAAAIIIIAAAggggAACCCCAAAIIIJAIAgQfE6EXOQYEEEAAAQQQMCMQTugxMzNT6tevb+YYaAgCCCCAAALWBQg+2uihFEmRjOsZUhrirI/aasKPNvqOViCAAAIIIIAAAggggAACCCCAAAIIIIAAAvEuQPAx3nuQ9iOAAAIIIICAGYFwQo/aeH3EtT7qmgUBBBBAAAEEQhMg+BiaU3Ws5XXWR7dNBCCro3d4DQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIHEFCD4mbt9yZAgggAACCCBQjQLhhh71S/9GjRpVY0t5KQQQQAABBOJfgOCjnT5MT0mXlCsiUhpemwhAhufGVggggAACCCCAAAIIIIAAAggggAACCCCAQLILEHxM9jOA40cAAQQQQACBiAXCDT3qC/OY64j52QECCCCAQBIKEHy01em1rmdKiYfHXfu3nvCjrf6kNQgggAACCCCAAAIIIIAAAggggAACCCCAQDwIEHyMh16ijQgggAACCCBgViCS0KMeVJ06dZz/sSCAAAIIIIBA6AIEH0O3qo41a0mmlFwpieilCD9GxMfGCCCAAAIIIIAAAggggAACCCCAAAIIIIBA0gkQfEy6LueAEUAAAQQQQCBaApGGHrUdWVlZkpGREa0msR8EEEAAAQSSQoDgo61urpWSKSWXCT7a6hVagwACCCCAAAIIIIAAAggggAACCCCAAAIIJLYAwcfE7l+ODgEEEEAAAQRiJBCN0KM2LTs7W9LS0mLUSnaLAAIIIIBAYgoQfLTVr5kpmVIaYfBRj4hZH231K61BAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQsCxB8tNw7tA0BBBBAAAEETApEK/So+2nYsCHBR5O9TKMQQAABBCwLEHy01TuZKRlScqkk4uAiwUdb/UprEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABywIEHy33Dm1DAAEEEEAAAZMC0Qg+6j70f40aNSL4aLKXaRQCCCCAgGUBgo+2ekeDjzcu3pDU1NSIG0b4MWJCdoAAAggggAACCCCAAAIIIIAAAggggAACCCSFAMHHpOhmDhIBBBBAAAEEoiUQzdCjtongY7R6hv0ggAACCCSTAMFHW73tBh+1VRpcjDS8GOn2tnRoDQIIIIAAAggggAACCCCAAAIIIIAAAggggEAsBAg+xkKVfSKAAAIIIIBAwgpEGnx0Z3p0gQg+JuypwoEhgAACCMRQgOBjDHHD2LVv8FE3J/wYBiKbIIAAAggggAACCCCAAAIIIIAAAggggAACCHgSIPjoiYuVEUAAAQQQQCCZBaIRelQ/3/0QfEzmM4pjRwABBBAIV4DgY7hysdkuUPBRXynSmRsj3T42R8teEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBCwIEHy00Au0AQEEEEAAAQTiQiCS4KPvtgQf46K7aSQCCCCAgGEBgo+2Oqeq4KO2MtLwYqTb25KiNQgggAACCCCAAAIIIIAAAggggAACCCCAAALREiD4GC1J9oMAAggggAACCS0QrdCjIhF8TOhThYNDAAEEEKgGAYKP1YDs4SVuFXzU3UQaXox0ew+HwqoIIIAAAggggAACCCCAAAIIIIAAAggggAACcSJA8DFOOopmIoAAAggggEDNCcQq9KhHxKOua65feWUEEEAAgfgVIPhoq+/8g4/aOv+wYqThxUi3tyVGaxBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQiFSD4GKkg2yOAAAIIIIBAwguEG3wMtJ3/3xF8TPjThwNEAAEEEIiBAMHHGKBGsMuMlAwpuXijwh4CBRUjCS9Gsm0Eh8amCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAkYFCD4a7RiahQACCCCAAAI2BMINPWrr/bcN9OfGjRtLWlqajYOlFQgggAACCMSJAMFHWx2lwccbF66HNMtjJAHGSLa1JUZrEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBCIVIPgYqSDbI4AAAggggEBCC4QbfAw226P77wQfE/r04eAQQAABBGIkQPAxRrBh7tYNPurmvuFEZn0ME5TNEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBIIKEHwMSsQKCCCAAAIIIJDMAuEEH6vaxvfv9b/1f02aNKnWGR/XHS6W1YevxlWXfuGOrLhqL41FAAEEEIi9AMHH2Bt7eQUNPl4vulYeenQDj1XN0BjuzI3hbuflWFgXAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAID4ECD7GRz/RSgQQQAABBBCoAYFohh61+e7+fP+/uoKPrywrkp8uLqoBxei85JQnc2RAXmZ0dsZeEEAAAQTiXoDgo60u9A8+aus0pHiroGK4IcZwt7MlRmsQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEIhUg+BipINsjgAACCCCAQMIKxCL46D/rY3UFH9u+dCyu+2lM21ryl8ebxPUx0HgEEEAAgegJEHyMnmU09hQo+Kj7TU1NveXuwwkxhrNNNI6RfSCAAAIIIIAAAggggAACCCCAAAIIIIAAAgjYEiD4aKs/aA0CCCCAAAIIGBGIduhRD6ukpKTC0VXno64JPho5sWgGAggggEBUBOI1+LirYHf58e8s2ClbC7ZJz9weFUzG9xgXFaPq3ElVwUdmfazOXuC1EEAAAQQQQAABBBBAAAEEEEAAAQQQQACB5BIg+Jhc/c3RIoAAAggggECIAtEOPur+/PdJ8DHEzhARZnwM3Yo1EUAAgWQQiKfgo4Yd39k2RTbnbwm5a3o37eUEIuMlBFlV8FEPmFkfQ+52VkQAAQQQQAABBBBAAAEEEEAAAQQQQAABBBDwIEDw0QMWqyKAAAIIIIBAcghEO/Soav6zPerfEXwM/Xwi+Bi6FWsigAACySAQD8HHcAKPgfru8Z6Pmg9AavDxWmFxwJAjsz4mwzuSY0QAAQQQQAABBBBAAAEEEEAAAQQQQAABBKpfgOBj9ZvziggggAACCCBgWCCc0KMezq22q2q2R/37nJwcSUtLi7lIoEddT3kyRwbkZcb8tb2+wKcmn5Z5B69W2Izgo1dF1kcAAQQSW8By8HHqtmkyeeubUe8AywFIN/gYKOSof6eL+/+BYG71b1VBhrNN1DuFHSKAAAIIIIAAAggggAACCCCAAAIIIIAAAgjUmADBxxqj54URQAABBBBAwKJAOMHHYKFHPU7/ddwZIAk+Vj4LCD5afGfQJgQQQMCWgNXgY6xCj66+1fCjG3zUdvqHH2MVfHRfy9aZSWsQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEqkuA4GN1SfM6CCCAAAIIIGBeIJzQox6U1+Cj7wyQBB8rnxYEH82/VWggAgggUOMCFoOPLy78uWzO31ItNj8Y9T3pktu5Wl4rlBfxDz7qNoECj8z6GIom6yCAAAIIIIAAAggggAACCCCAAAIIIIAAAgiEIkDwMRQl1kEAAQQQQACBpBAIJ/gYSuhR8dz1/P+f4GPlU4vgY1K83ThIBBBAICIBS8HHXQW75Z1tU6ot9OjCWQo/1lTwUS145HVEbyU2RgABBBBAAAEEEEAAAQQQQAABBBBAAAEE4lYgZdmylaXbt++Sbt06SZcunSQtLc3cwSxatFxOniyQMWOGS+PGDc3d1L5y5YrMmbNISktF7r13lGRmZpozPHToqKxfv0latWou/fr1lvT0dHNtXLt2oxw4cEhuv72/tG7dUlJTU821cebMuVJYeEEmTbpPatWqZa59p0+fkaVLV0uDBvVl2LBBkpGRYa6NWm927Ngt3bp1li5dOpquOXfdNVwaNbJXcy5fviJz5y5y+nbsWKs154isX79ZWrVqIf369TJdcwYNGuC002LNmTFjrhQV2a05p06dkWXLVkmDBllxUHO6SJcuHYLWnJoIPtatW0+ysurHvF63felYpdeY8mSODMizN24h+Bjz04EXQAABBOJewFLw8am3PlFjnq99/NUae23fFw4UfNR/X7JkpeTnn5K77x4h2dkNKj0G23cf4QYYw91OX/vSpcsyd+5iSUtLddpo8X7OwYOHZcOGLZKX11L69Olp8tpq9er1cujQERk8+DZp2bK5yWur6dNny8WLl2TiRJv3cwoKTsmyZWukUaNsGTp0oMn7Odu27ZSdO/dIjx5dpVOn9kGvrWqiOC1YsFTU8p57RpXXnJpoR1WvqefgvHlLHLu77x5usubovVmtOW3atJY+fXqYrjlDhgyUFi2ama05ly5dkgkTbNYc/Wxevtx2zdm6dYfs2rXXeM1ZIgUFp2Xs2NHOvbFIxiSxqFUXL16UefOWSnp6muh9bovjnPioOetEv1+zWnP0XuaMGXOcce2ECfea/N4qP79Ali9f63y/O2TIbSbHOVpzdu7cK716dZOOHduZHOfMn79ETp06LffeO1qysuzVnAsXLsr8+bZrzv79B2Xjxq3Stm2e9O7d3eQ4Z9WqdXL48FEZOvR2ad68qblxjltzLl++LOPHU3PC/fzesmW77Nq1zzkPO3RoS80JA9KtORkZ6TJmzJ0mxznxUHNWriyrOZoradYs11zNKSkpEc3naB5r3LixJsc5J0/my4oV66RJk0YyePAAk+OcspqzV3r37mG25sybt1g0d3DffeGNc1LmzFlQumbNRunXr6cTTrF48TN16ody+PAx5+akxZsZetPq7benOjM5ffzj46VevXphlOfYbrJ79z5ZuHC5tG+fJ3feOdhkUdCbkzt27JGRI++Qzp3bmxxw/t//vS3nzp2XT3ziUedmhrXlxIl80aBU48YN5N57x0idOnWsNVHWrdsoa9Zskv79e0nfvj1N1pz33/9QjhzRmnO/tGhh78LCrTnauY88Ms50zenQIU+GDbNdc0aNusP50sZiINytOc8++1i1BOO8FoybNSfbueFiueYMGNDL+aI42DjHa/Ax1Nke1db38dbun/X/a9WqLQ0bZnvl97y+l+DjK8uKKu3/9rxaAUOSgdb10rgv3FH585TgoxdB1kUAAQSSU8BK8HHqtmkyeeubNdYJvZv2kudHPldjr+++sAYfi89frRRsnDZtlhw9ekImTbq//Aaqpcdd64+M/vnPac6XDA8//KDUq1e3xi39G6BBs8WLV0qnTm2dL78s/ghTb07qlzb6ZYN+aWPx2urvf39LCguLnPs51fGjI68n0rFjJ+SDD+ZJkyaNnR841qlT2+suYr7+mjXrZd26zTJwYF/p1at70GurmDcowAu8++4MUcuHHx4nTZvmmPvSRn9M/c4705z3yEMPPWCy5uiPlTW0rvdJNIRrseboj4H1XveYMSOkQ4c2JmvOq6++Kfpl7DPPfNxkzdHP5g8/nCc5OY2doLDFmqOhev1RdVnN6SGZmfZ+4O/WnI99bJzk5lqsOUXyzjvTTdec7dt3y9Kl8VFz7rprhLRvr+McW5PH6P1GHedYrjlHjhyXWbPmS25uE7n77pEma46GzTT4P3Bgv4/GOfZqzpQp0+XYsZPOd0K5uRpOSamJ4VaVr1lYWCjvvDPDCXzo9Z/FayudmGXp0lXSuXMHJ4RrcZwzZ85C2b17v9xzz0hp107HObZqjoaQ9Hsr/Z7y6acfMTnOuVlzcpwfOFoc57g1RyeD6tmzm8lxjl63HD+e72RfdJxj7Qce588XypQpM5zrUs0Q2a45HWXIkAGma47eh2jXLs9cCNetOfqjsqeesllzNMc2e/YCado01/mxkcWaowFXHedoMLNnz64mw5mR1pyU48ePl+rsYfrhbrETdASlJ/KVK1edoJnFm6f6htNfz12/XiING5bNYGBtuXbtmvOLL72Zr4XfYhs1qa3nYt26dUwWfu3TCxcuyNWrxdK4cSOThtev35BLly46p5/FX3xpu65ever0MzUn/CpBzQnfznfLmzWnrtSqZW/Gu/ioOdedz2iRFOcC1+Jni9eaU93BR329Jk2aVMsgM9Tg47rDxfLQ66cqvdECzQ5Z1bpe3qUHv92y0uoEH70Isi4CVQts375dTpw44awwaNAgkz+WoP8QCFfAQvCxpkOPrt3jPR+V8T3GhUsZle2qCj7qLBB6Dd2gQYMKX9hUNW4MZzwZzjbuQbvXVjdulJicHU7bWVxc7FxD6xdedevavJ+j7dPrK73fFOyHRlE54cLYif37OfFzbVW7di2pXdteMFNPC/0CVq8BdYZZi081iq+ak+7co42kxobxVg1pk/ipOddMPr1Kka9fp+aEdLIFWSkeao6G4bT2uDNvR+O4o7mPm+Mcak4krmXjHGpOJIbuPWTGOeEraq2h5oTv526p19CaibB8baU/ICwupuZE0ts3a05t0bpjcYmfcU6pZGfbmwW34v0cy+Oc+Kg5mnWy+JTOitdWqZKVVc/kNbRvzdGMjsE4m5O303vI4d7PSTlz5kypfulu8SaGb5G33kY3KGHZMV7aiGFkwxv6OTI/d2tqTuSOnIvJY6hHar12h9o+L8HHYOv6/7ve/PEf2+g6+gvd6vhSjuBj5O9J9oBAvAn87W9/k0WLFjnN/sY3viE9e/aMt0OgvQhUKWAh+FiTj7j2h/nBqO9Jl9zONXbGuMFHbUBqamqFdug4zH8sFs1ZH0Md51WFEy/XLfpjI4s3JrmGjt7bLl7OxUjfc9ETC7wn7udELlx2LlJzIpGMl/dzIt3PiaS/ItmWmhOJXtm21JxoGdq/P0vNiU5fWx6L8fkXeR+7dZF+jswyXs5Fy/3MuRjZOeh7r4TPv8gs4+X9TD9H1s+R1hwn+Bh5E9gDAggggAACCCAQ/wLBwoy+R+jlMde6nX/w0f0zwUdmfIz/d07iHIGGBM+ePesc0N133x33MyTGOvi4bds22b17t+PVr18/adu2beKcDH5HkmjnRiJ0VE0EH3cVlJ3vOwt2ytaCbbI5f4sZypp+5LVv8NE/6Bgo+Hirm4Feb/x7Xd9Mp9EQBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgYgECD5GxMfGCCCAAAIIIJAoAl5Cj3rMXoKPum5VM0ASfCT4mCjvoUQ4jp/+9Keyd+9e51BefPFFZ0bWeF5iHXx89913ZerUqQ7RZz/7WRk6dGg8c92y7Yl2biRCR1Vn8FEDj+9sm2Iq6BioD2ty1keCj4nwruIYEEAAAQQQQAABBBBAAAEEEEAAAQQQQACB+BIg+Bhf/UVrEUAAAQQQQCBGAl6Cj15Cj9pc/+Cj758tBh9/s7SokvJXhmXJgLzMCn+/7nCxBFrXSxf95fEmlVb/1OTTMu/g1Qp/P6ZtLQm0rpfXYl0EggkkWriN4GOwHg/93xPt3Aj9yO2uWR3Bx2gHHnVWxlCXcGaTrMlZH/2Dj3qc7kyM/v/vGlh63HWo/cJ6CCCAAAIIIIAAAggggAACCCCAAAIIIIAAAnYECD7a6QtaggACCCCAAAI1KBCr4KO7X9/9+wYfc3JyJD09PeZH3valY5VeY8qTOZXCjDFvSAgvQPAxBCRWiYlAooXbCD5G7zRJtHMjejI1t6dYBx+nbpsmk7e+GdUDfO3jr4a8v6fe+kTI67orxlvwUdtdVfjR6+Orva7vGZcNEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBMwJEHw01yU0CAEEEEAAAQSqW8BL6FHbVtX6gf7eP/jo/2eCj5V7m+Bjdb8Dbv16N27ckD179sjJkyfl9OnTkpmZKY0bN5bWrVtLXl5ewI2Li4vlypUrzr/Vr19fUlNT5dy5c7Jt2zZnH9nZ2dKhQwdnH/7LkSNH5NChQ1JQUOD8U5MmTaRVq1bSrl27KgMyvvu4fPmy7Nixw9m+qKjIeS2dWbV58+bSrFmzSq+n7dT26vKf//mfzmvr8sILLziv7S4NGjQIeKx6XLt27XKOS/elr6ev07Vr15iHms+cOVP+2noM2t5OnTpJy5Zlj5APJfjo1Uv3W1hY6Ox/5syZMmvWLOe/n3rqKRk4cGC5Ub169SQtLa2CmXsuHT16VM6fPy+1atWSpk2bOv3Tpk2boP2rvidOnBA9R/Lz80VfQ4+5Z8+eUqdOnaBvnJKSEtm9e3f5uayvr+dy+/btY3JuBG0QK0QkEMvg44sLfx6Tx1rHOviooDX1uOtQZnzU9vkHFAk+RvQ2YGMEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCCpBQg+JnX3c/AIIIAAAgggoAIEH22dBwQfbfSHvi+WLFki77zzTnnQzb9lGu579NFHnVCi7/Lb3/5W1q5d6/zVyy+/LEuXLpUpU6ZUWGf48OHyyU9+svzvNMz2j3/8wwlHBlo0JPnss886gclAi7ZXg3j//Oc/qwQcNmyYPPzww9KwYcPydd59912ZOnVqUPQ//OEPFYJ8Fy5ccF5r0aJFAbfVIN6TTz4pd9xxR9B9e11BA51vvvmmLFu2LOCm/fr1k8997nPy1ltvyYIFC5x1vvGNbzgBQXcJ10tDni+99FLQJj///PPSuXPn8vW2bNkiv//970WDloGWjh07Ol7+55K77vr16+VPf/pTldvfe++98tBDD0lGRkbA/ev2aqaByUBL3759nfPRN+Aa7rkRFIcVoiIQq+BjrEKPetDVEXx8vOejMr7HuKgYe9lJoOCjbq/BRt9wY6jBR3dbL21g1kcvWqyLAAIIIIAAAggggAACCCCAAAIIIIAAAgjEvwDBx/jvQ44AAQQQQAABBCIQiFboUZvgv6+qHm/tuy4zPlbuPIKPEZzQUdr02rVr8uqrr1YZrPN/Gf9Q3f/8z/+IBs10GTp0qCxfvrxSy3yDj1u3bpVf/epXQVuvYcLvfOc75TMauhtcv35dXnvttSpDiL471n38+Mc/lkaNGjl/HU64TWeT1PZWFaLzfT0NIA4ZMiTosYW6gs54+Mtf/lJ0tsdbLTqLoc5y6dr79lEkXhpM1dcPtvgGHzUcqrNPhrL867/+q/Tp06fCqhpY/PDDD4NurkHLb33rW5Vmmpw+fboT4A226KyTzz33XPnskeGcG8Feg3+PnkAsgo+xeLy17xFXR/DR9/U0BKlLdQQhww0+avuY9TF67wv2hAACCCCAAAIIIIAAAggggAACCCCAAAIIJJMAwcdk6m2OFQEEEEAAAQQqCRB8zDR3VhB8rPku8Q+a6WyLgwYNki5dusjZs2edWRnXrFlTYfY9DcO5Myn6Bh99j6Z///7OY6sPHjzoPF544sSJzmOHNczoLhpM/NjHPubM/KezA+7cuVNWrVpVHjLUGfm++93vioaG3eWNN96QOXPmlP9ZZ3bU2Q31UdyXLl2SzZs3y7Rp08r/vUePHs4MiBq22bBhg2zfvt35N52Z0p2RUNvqPuq6du3aMmnSJGd9DQ3++7//uxw/frx8f+PGjRPdZ3p6uuzbt8/Zpz5u212+/vWvS69evSLuWH2c9Y9+9KMKr62zO+psheq6f/9+WbdunWPmv/gGHyPxOnbsmCxcuNDZvc7+6D4aXM+Rbt26lb/sAw884Dz2W2f+1BlA3UVnCR08eHD5zI7a5rfffrvcXfv/Zz/7mWRlZTmb6L//5Cc/Kd9e++W2225z+laDp5s2bZLFixeX//tjjz0mY8eOLf+znjuvvPJK+Z+1ndpf+ohtfTy5Wvn2ux7D1772NWfmSK/nRsQdzA48CUQ7+Bjr0KMeXHUHH31BYz0TZKjBR21TqLM+hjODYzjbeDrxWBkBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAjADBRzNdQUMQQAABBBBAoCYEohV8DLSfeJzxcd3hYvnN0qJKXfGVYVkyIK9iSLKqdb30418eb1JpdYKPXgSjv+6ePXuc4Jm7DBw4UD7/+c9XmkXv8OHDTgDQXUaNGiXPPPOM80f/4GPjxo3lhRdecIJw/ssvfvGL8pCgzrj3zW9+U+rVq1dhNQ0jarBSQ3C6aHjthz/8Yfk6Gm7UGQU1jPnpT39aNPjovxw4cMCZ6dFdvve971V6rPJPf/pT2bt3r7PKiy++KLm5uZX2M2PGjAqP09Z2aHt8F33v/9///V/5Y6b133SGyEDH76UH9ZHcOguhuzz99NMyevToSrsINEOib/AxWl6+MyJ+9rOfdWb39F9u3LghH3zwgTPj4ogRI0TbnJaWVmG1ixcvOueSO4vl448/Lvfcc4+zzuzZs2Xy5MnOf99+++3yhS98oVJoSvetszrqv2vgUs8jXXS/3/72t8tDlXqO6uO0/V9fw5za927o9f7775dHHnmkQhtDOTe89CXrRi4Q7eDjU299IvJGBdlDTQYftWmxDD/6Bh/1tXwDiKmpqRVkCD7G/FTjBRBAAAEEEEAAAQQQQAAHYqO/AAAgAElEQVQBBBBAAAEEEEAAgaQQIPiYFN3MQSKAAAIIIIBAVQJego+3WvdWj7nW1y4pKanQBHd9a4+61jDjQ6+fqsQ15cmcgMHHQOt6OdsOfrtlpdUJPnoRjP66f/rTn8ofce07+12gV1qxYoX88Y9/FA2UjRw5sjxw5ht81Bn8NGTYrFmzSrvQxzbr7I3u8tJLL1WYydF3g6KiImcmPnfxDxzqe0pnAAz0Ou42U6ZMKZ/58TOf+YzccccdFdoULNymIT4NZhYWFjrbfelLX5IBAwYE7ARdV8Oa7uyLGgpVJ120raHUHg0HuQEh3Z8+BtoN502YMMGZMTPQovXmr3/9qzOTobv4P448Gl6hBB/d19eZPXWWxapmY/N9hLbvY9BnzpzpzAipi85s+dWvfrXSIeuxnD9/vnzGUXcFPf4///nPzh81DKnnms7KGWjRWT9ffvll55/0nP31r39dISAZ7NyI/juRPQYTiGbwsTpme9Tjqengo7ahd9Ne8vzI54Lxev73WAQftRFeZ3D0ur7nA2UDBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAATMCBB/NdAUNQQABBBBAAIHqFggleOTbpmgFH333Q/CR4GN1n/e3ej19lPIXv/jF8lUCzYrov71uk5lZcTZQ3+CjzvL37LPPBnxZ31DbrdZzN/7DH/4gK1eudP74xBNPyN13311pv9euXXMeYaz/07CkPjK5UaNGzmOrdTZLnXlRl3Bm9fOdDVMfua37ulXIZtGiRc5MlLroI6m/8pWvOP/94Ycfis7KGGwZM2aMPPXUU85q+lhpDYb6WvjPXOi7P30kuYY03cU/+Oj+fSReXoKP+noayNR+0ZkdtX36CHG3b/RYvvzlLzvN8p3RUx9lrSFEd9HHVGuAVLcLtmjwVAOVunzuc5+TIUOGVLmJf7DU/9wn+BhMu/r/PZrBx+qY7VGFLAQftR2xCD9GEnzUNlVVS70GGb2uX/1nLq+IAAIIIIAAAggggAACCCCAAAIIIIAAAgggEC0Bgo/RkmQ/CCCAAAIIIBB3AgQfQ5/FkRkf4+70DqvBOivfd77znfJtX3nllSpnyLvVC/gGH6t6BLJur7Px+c5KqGHCWy3uTIu6zr333iuPPvpo+eo6E6IGDfVx0O6siP776tixY/mjrDU0qeFJ3yVYuG3ZsmWiM2K6i5f2tmjRQn7yk584m4YTfPR97d69e1eY/bIqs69//evls1P6Bx+j4RVq8FFDhevWrZP33ntPjh8/HrC5Orvojh07nH/TmSHdx63rthr4dB9B7m6sBr169ZIuXbpIXl5ewNCUBil9zwUv/aUhVQ2rukuwcyOsNxwbRSQQreDjroLd8sMFP46oLaFubCX4qO2N9mOv/YOP+hpuCNF39lrXisddh3rWsB4CCCCAAAIIIIAAAggggAACCCCAAAIIIIBAVQIEHzk3EEAAAQQQQCBpBaor+Oj/WFtmfLx5yvGoa1tvP99ZBbt27SrPPRfe41B9g48vvPCCtG/fPuCB/uIXvygPu3mVGDp0qGioUhcNt/3Hf/xHlaG6QPsOJ/g4Y8YM+ec//+m1qc76+vhkddHlwIEDzgyOwZa2bduK9oMu06dPl3feecf574cfflgefPDBYJs7IU0NTOriG3yMllcowUcNLupMnWvWrAnaXncF3+Cj/p0GXnV20FmzZgXch66vQVh9RLY7C+b169flC1/4Qsiv6b/ipz71KbnzzjvL/5rgY9iUMdswWsHH6nrMtUJYCj5qe34w6nvSJbdzVPpIg49Xz12pEEIm+BgVWnaCAAIIIIAAAggggAACCCCAAAIIIIAAAgggUIUAwUdODQQQQAABBBBIWoFoBR8D7cf373yDj/7rWnzU9W+WFlU6J74yLEsG5FV8nPG6w8USaF0vJ9RfHm9SafVPTT4t8w5erfD3Y9rWkkDrenkt1g0u4PsoZ50d0Xf2x+Bb31zDN/h4q8dl+wYfdQa/5s2bh/wy7dq1K3908W9+8xvZsGFD+bb6GOuBAwdKdna26KO49dHK+shk3+BcpMFHncFRZxwMdalVq5Y89NBDoa5eaT3fx4JPmDBBJk6cGHRfvo8G9w0+RssrlOCj/+yW2i/6CG999LiGorRv9u3bJx988EH57JT+wUf3QPXx2Lq/JUuWBJzVU0OiX/3qV51HaPsHH/Xx2BkZGUHN3BUGDx5cIbBL8DFkumpbMVrBxxcX/lw252+JSrv1EdK3Wp4fGXqYXNt1qyUabY7mI69jFXxUA6+Pr/a6flQ6n50ggAACCCCAAAIIIIAAAggggAACCCCAAAIIVLsAwcdqJ+cFEUAAAQQQQMCKQKyCj/77rSr4qH+fm5sb1qOEvRq2felYpU0CPb7a635jsT7Bx1iohrbPgoICef7558tX/t3vfieZmRUDr6HsKdTg41//+ldZvHixs8tHHnlENLDodbl48aITdnMXfbxzVYFEfRT23/72N2fVcIKPy5cvl//93/91tu/Ro4f827/9m9fmhr2+72uHMhun1hd9XLP7qGc3+BhNr2DBR22Dvq77iPJx48ZVGf48duyYaEhWl6qCjy6e7lcfmb1161ZZuHBhhZk+x44dK4899pizqu+jvv/f//t/0qlTp7D9CT6GTRezDaMVfHzqrU9ErY3Rfnx0VQ2L5iyV0Zr1MVDwUdvv+5hr30BiqI+6dvfhpZMIPnrRYl0EEEAAAQQQQAABBBBAAAEEEEAAAQQQQCB+BQg+xm/f0XIEEEAAAQQQiFDAS/DxVusGCjr6No3go7eOIvjozSuaa/vPkhdKWExn7GvcuHGFZoQafPSdDTCUGSb1scm6uI8z1v/euXOn/PznZTOjtWnTRn7wgx9USaKhRQ0Q6hIs+KhBt2bNmlXY1969e0X/3l30OPUR1rda1DQ9PT3ibvJ/7d///ve3nMHQP8TqBh+j6RUs+OgfsryVl87i+Je//MVxChR81ABnIGs9J/QR5NoWXXQdndFSg0+/+tWvnHCkLqHMknnt2rUqTX2Dj4HOjYg7mB14FrAYfNSDiHX4MZqhR21vtGZ9dIOPuk//gKPvI6/djib46PmUZwMEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABPwGCj5wSCCCAAAIIIJC0AtUdfAwUkGTGx8qnH8HHmn1LvvbaazJv3jynEa1bt3ZmgKwq3Ld69WrRAJ6GFnW2xv79+zvbhRp89A/nPffcc6KzGQZaNOD2yiuvyP79+51ZAwcNGuQECnfs2CH6yGxdbjUT4smTJys8ujtY8PFrX/ua6OO3fZeSkhJnlkd3BsOHH35YHnzwwSo7bPbs2TJ58mSnvaNHj5Z69eqF3bn+r62Pbn766acDPgJWw5baB5s3by5/PTf4GE0v3+CjOqiH73LlyhX50pe+VP5XVc0gevXqVacPtW91cYOP2uf6CHN9zPeJEyfkhz/8ofOIbP9F1/v85z9f/tf//d//7VivWLFC/vjHPzp/36BBA/n+978vjRo1CtgH58+fl5/85CfOLLwakuzWrVuF9XyDj4HOjbA7lg3DFrAafNQDilX4MdqhR21rTQUf9bVDDT96ncHR6/phn4RsiAACCCCAAAIIIIAAAggggAACCCCAAAIIIFCjAgQfa5SfF0cAAQQQQACBmhSojuCj+xq+sz66x8yjrgP3PsHHmnxXiBw5cqTCrIkaJtSgl/8jrw8ePCg/+tGPyhs7fPhw+eQnP+n8OdTgo/+6+udAoTJ9r7z66quij6p2lx//+MfSsmVLKS4uli9+8Yvlf//CCy9I+/btKyBq6PHFF18sDyzqPwYKPmqIU8OcuowZM0aefPJJJ5ijwbrU1FTnv+fOnSuvv/56+f41JKf/8w/a+D6aWlf+zGc+I3fccUdEnesGKd2dBHp0tAYk//znP5fPbOmu6wYfo+nl++jwFi1aOMFCPU+0v7QdOjOnBho1bKnLE0884bj7LhqO1Bka3XX039zgo7ZV2+0+rltn9NRZSP3PxT179sjPfvaz8t3qzJ7aH7pvXd8NqurMpBqu1XCj76L71/NDz3339TVk6fs6oZwbEXUuG3sWsBx81IOJdvgxFqFHF/21j7/q2d9/A68zPur2BB8jZmcHCCCAAAIIIIAAAggggAACCCCAAAIIIIBAUgsQfEzq7ufgEUAAAQQQSG6BaAQfQ3nMtSr7Bx/d7ZjxsfI5SPCx5t+Xvo+g1tbojI+DBw92ZsE7d+6cbNq0SbZt21ahob/85S+lYcOGzt95CT7qTHvf/e53y8Ntur3OHNm9e3dndj4Nta1fv17y8/PLX+/rX/+69OrVq/zP//Vf/1VhdkOdeVBnoSwqKnICdStXrqywf90wUPBRZxZ8++23y/erQTkNUa5du1bcWQQ1BKnHqo+Mdhd9LZ0dMi8vT44ePeq0Zffu3eX/HiigGE4v62trwM+dGVH3obNyDhgwwHks96FDh5zgpj5+3H9xg4/699Hy8g8c6nmi/aI2n/70px0T/3NpxIgRzmydGmrct2+frFq1qkLfavt8H3W9bNky+dOf/lShT/Rc1PNDA427du2qEIjV/T/77LPl62/fvl1efvnl8j9rGwcOHChdunRxQlfa1jVr1pSfHzozpIZn/WeWDOXcCKdP2SZ8gWgFH19c+HPZnL8l/IbcYstohR9jGXrU5hN8jEn3s1MEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBGAsQfIwxMLtHAAEEEEAAAbsCBB9zZEBeZoUOWne4WH6ztKhSp31lWFbI63rp8b88XvmxtQQfvQjGZl2dre/999+XqVOnBn0BDZJ985vflHbt2pWv6yX4qBvp7JEaxnNn5rvVi37uc5+TIUOGVFjl9OnTTrjNNxwZaB86g6M7W2Og4KPO/Ped73wnYDvc4KPuV9upMwD6hh+ravOtHkkdFDfACho8/fWvf+2EHG+1aOhQQ5Ea2NPFN/gYLS/d729/+1snGOq/uDN3asBRg4saLrzV8swzz8jf//53ZxXf4KPWaX2k9rRp04JyaTD3X/7lXyQrK6vCuv6zb1a1Iz2Xtf91JlH/JdRzI2gjWSFqAvEQfNSDjTT8GOvQo7YxlsFH3b/OmOsuvrM8MuNj1N4O7AgBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEhKAYKPSdntHDQCCCCAAAIIeAk9qlZV6yfajI8afHzo9VOVTpApTwYOSQZa18vZdfDblQNGBB+9CMZ23a1btzrhR9/ZC31fUR9v/eCDD1Z6dPDvfve78qDb9773vQqhyKparDM/arht3rx5AVfREN+kSZOc2SADLRcvXnQCdhs3bqz0zxqk08dhp6eni7ZHl0DBR/37goICeeONNyrtR8OcGoxzl+vXr8ucOXOcGQ0DBTZ1XbUZO3as88jnaC5Xr16VGTNmVBkG1Md0P/bYYzJ//nyZPHmy89K+wUf9c7S8NBCo54g6+C7f+ta3nBlCddGZKqdMmVIewvRdT52efvppJ8z65S9/2Zl50Tf46K6rM1m+9957cvz48YCUem488MADVVpruFbDvBs2bAi4fb9+/WTixImij9Ouagn13IhmX7OvqgWiFXzcVbBbfrjgxzGlDjf8WB2hRz1w68FHbaN/SDJYh3ldP9j++HcEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABewIEH+31CS1CAAEEEEAAgWoQIPgo4iXM6GVdL91H8NGLVs2te+LECTl58qScPXtWMjMzRR8B3aJFC8nOzo56oy5duuTMZqiPa9aZJ+vXry85OTnSqlWrkIIvOsPg4cOHnfZq8EVDdB06dAhpW9+DuXLlinO8Wiv0eGvXrh3wWDUAqaE6nUVRA4m6nq7ftm1bJ2gZy0VDgmqlr63HrY8a12PVxzWHukTLSx3US920Hf6zLmp7NACp55I+DlzX037t1KmTc06Fsuj5cOTIEed49bX0/NPzIjc3N+RwqW577Ngx5zHo2h7tK31MuLYl1CXUcyPU/bFeeALxFHzUI/Qafqyu0GPvpr3k+ZHPhdcJPltlpGTI1XNXnL/xDx1WNeNjoHWrCix6DTJ6XT9iAHaAAAIIIIAAAggggAACCCCAAAIIIIAAAgggUO0CBB+rnZwXRAABBBBAAAELAl6Cj7dalxkfK88O6aV/CT560WJdBBBAAAFXIFrBR93fiwt/Lpvzt8QcN9TwY3WFHvWAQ21TMByCj8GE+HcEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBaAsQfIy2KPtDAAEEEEAAgbgQqI7go+9r6Exlvov7bzpTWaxnhdPXbfvSsUr94mUWRy/rejkBCD560WJdBBBAAAFXIJrBx+p43LXb7mBBw+oMPWqbgrUn1DNOg49Xzl52ZntkxsdQ1VgPAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIBIBgo+R6LEtAggggAACCMStAMHHqh91vfrw1Ur9+oU7sir93brDxRJoXS8nRaD9fmryaZl3sGIbxrStJX95vImXXbMuAggggEACC0Qz+KhM1TXro75WVWHD6g49alte+/irUTlLCD5GhZGdIIAAAggggAACCCCAAAIIIIAAAggggAACCHgQIPjoAYtVEUAAAQQQQCBxBKIRfAy0D9+/i8cZHy30MMFHC71AGxBAAAHbAtEOPlbnrI8q6x9+rInQY7Rme9TjIfho+/1C6xBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgUQUIPiYiL3KMSGAAAIIIIBAUAGCj4FnfAwKVw0rEHysBmReAgEE/j979/Ucx5nme/6BpwVAGIoiAVD0BEmAVqToRVFSixJFSt0z3ZK6+9zuHzAXG7ERG3u3EXs/N3s1ZyPOOTHT6lbLUKIcLeg9AdBb0BOggSEJGpiNJ6EUilABlVlZVXiy6osJxcyw0rz5ebOeevPNX2UhEHKBRAcflSPV4UM3eJjq/bpdn6inPer2hgo+Rv789cCfwY71/7ttHbhcrNPX7/KxtsfrCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAvYECD7a6xNahAACCCCAAAIpEEhG8HHgNnniY3wdSfAxPjfWQgABBDJJIBnBR/VLdQixZvw8aWhuTHnX/V9v/p8ys3xGwvZL8DFhlGwIAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwKMAwUePUCyGAAIIIIAAAuklQPCRJz6m1xnN0SCAAAKZJZCs4KMqpjr8mOqeS+RPXLttJ/iY6l5kfwgggAACCCCAAAIIIIAAAggggAACCCCAAAIEHzkHEEAAAQQQQCAjBSwEH7UN48ePl9zc3KT3weT/59Zv9vHPz8pkUWV+0vftdwc88dGvGMsjgAACmSeQzOCjaqZr+DHRT3p0zzyvwUddPvJnqPmp68x773LECCCAAAIIIIAAAggggAACCCCAAAIIIIBAogQIPiZKku0ggAACCCCAQKgECD6GqrvkrckF8h+flIar0bQWAQQQQCBpAskOProNT5cApP6k9u/nfJzQn7eO7FyCj0k71dkwAggggAACCCCAAAIIIIAAAggggAACCCCAwCACBB85NRBAAAEEEEAgIwUIPoar2wk+hqu/aC0CCCCQbIFUBR/1ODT8qH//eepvyT6shG8/2YFHt8EEHxPedWwQAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIIYAwUdOEQQQQAABBBDISAGCj+HqdoKP4eovWosAAggkWyCVwcfIYznfckHOtZz79Z9OtZxO9qH63v7c8jkyq3xW0p7uGK1BBB99dxMrIIAAAggggAACCCCAAAIIIIAAAggggAACCAQUIPgYEJDVEUAAAQQQQCCcAgQfw9Vv/8fqsfK/rRgbrkbTWgQQQACBpAkMV/AxaQcU8g0TfAx5B9J8BBBAAAEEEEAAAQQQQAABBBBAAAEEEEAghAIEH0PYaTQZAQQQQAABBIILZFrw8f/d1yH/d11HcLhh2kLT/z5xmPbMbhFAAAEELAoQfLTVKwQfbfUHrUEAAQQQQAABBBBAAAEEEEAAAQQQQAABBDJBgOBjJvQyx4gAAggggAACvxHItOCjAhy7/lwOX38WqrPh9coCWVSZH6o201gEEEAAgeQLEHxMvrGfPRB89KPFsggggAACCCCAAAIIIIAAAggggAACCCCAAAKJEMi6efNWb2fnUxk5skBGjBghWVlZidhuQrfR3t4hL150SWHhWMnLy03othOxsZ6eHmlr63uCUnFxoUnDZ8+eyZMnnZKXlyejR48y2cZHjx7L8+fPnfYVFBQkomsSvo3W1jbp7u6RceOKJDs7O+HbD7pBfZ88evRIcnKyZezYsSb7WetNX80ZISNGFJhsIzUn6JkoojXn8eNOyc8PR83Jz883eS7arzkvRGt3Tk6OjB07xqThUDXHSvCxrKxM9BzkDwEEEEAAAQS8CxB89G6ViiXd4KPuK/JauaPjsXR3d0lRUaEzZtS/yHmngXNQg81J+Z2r8rq8Xt/r9Z9OhWkbva6XClN3H+61VUFBnowaZX0+Z7RzDWjRkWur4Getzis+ffrM+HxOu7x40S1FRWMlN9feHDI1J/h5qFvon0Om5sQr+uKF/fkcak68vdu/Xnd3t7S3PwrJOCdfRo0aaXIMEVlzCgpszt9ZH+c8f/5CHj+2PYdMzUlkzclyxmIWrwn6r62oOUF6nJoTRK9vXbfmjBo1wslEWHy/tLe711aFkpvbN59j6a9/nEPNCdIvOm/34sVzGTNGr63sjXP0Pm5bW7toHqu42GY+JzzjnKfOeDtda07Wrl17e+vrT8u8ebNl7txZJieFfvppp9y6dVc2bHhLNByQnW0rnNnZ2Slbtvwovb0imze/ZzK0d+VKkxw4cFQqKyfJsmWLnACktb/9+4/IxYtXZOXKpTJ5csWvN0UstfOLL751iusnn3wkI0eOtNQ0py3Nzfdkx449zg2bdetWicUL8YaGM1Jff0pqaubInDkzTdec999fL2VlpeYGnDog/vbbH/WWoWza9DvTNaeqqkKWLl1osubs23dELl26IqtWLRVtp3sj1tIb+4svtkhra4d89tnHzpcTrP25NUcHm+vWrTQ5KNYxTkPD6ag1x0rwcfToMU7d5g8BBBBAAAEEvAsQfPRulYolBws+/vzzbrlzp1k2bnxHxo0rdq6tLAUf9Qbst9/+7Hx5cOPGd01eW126dFUOHjwmU6ZUyZIl801eW+3de0i0nWvXviGVlRUmvyj6979/Izqh/+mnH5m8trp7t1l27NgnJSXF8uabK0xeW508eUp0TmfBgrkye/YMk/M533+/3ak5H374rpSUjDM3n6Phme+++9m5cfjBB++YrDk6N3vo0HGZOrVKFi+2WXP27Dkoly83ydq1y525botfTteao/2tc8gW53P0fbJz5z4pLS2WtWtt1pwTJxqlsfGsLFw4V2bNsllztm7dLlq/dX7WHeekYtzldR8dHY9k69ZtIak5k2Xx4lqT4xzrNUfnN//xjy3y+PET+dOfNpusObdvN8uuXeGoOYsWzZOZM6ebHOfo+/nu3RbZtOk95+Es1oJSbs3RBxi9//7bJsc5Fy5ckcOHj8u0aa/JokU1JmtOXd0BuXLlmnNNUFEx0dw4p6enV/S+le2ac1d27dovZWXjZM2a5SavrY4fb5BTp84552FfzbEXLPzuu23S3NziZF/0HqC1mqNfYtXrP/3y5YYN603WnPPnL8uRIydk+vTXZOFCmzVn9+4DcvWq1pyVUlHxqsGa0yOaz9FsxB//uMnkOEdzbLt375fy8hJZvfoNkzXn2LF6OX36vCxe3FdzLGYidK5EcwcffbQhri+nZ+3bd7D3zJnzMmPGVJk1a7rJD3l9wzU3NzsfThpCsjaZ8fTpU/n55zrp7e2Rt99e63zz2drftWs35fjxenn11Vdk/vx5JgNxx46dlKtXrzsf8jpRbvENpxcWOnjXyUl9MqW1v/v3H8jevYdl7NjRsmLF6yY/5M+cuSBnz56XmTOnOf9ZDOHqh5MO5tasWSFlZSXmao4+wW7btjoR6ZX169cYrTk3RAfulmvO0aNac645E/k6UU7N8V/R7t17IPv2HZbCwtGyfLnVmnNezp69ILNmTZMZM16uOVaCj2PGjHWeas0fAggggAACCHgXIPjo3SoVSw4WfNQbxS0t952bNm4IyVLwUSdOt2+vcybw33prtclrq6am66Lhj0mTXpXa2jkmJ1CPHDkuTU03ZMmShc5EucVrK51A1RCShnD1yZnW/vR9ol8I1uuC5csXm5zPOX36nJw7d9EJPU6fPtXkr/Ls3LlXWlruybp1q6W01F7wUW8Q6xeWdW5bv7BscQ65qemanDhxynTN0cDCtWv2a46G63UO2WLN0RtKBw4ccW4ovfGG1ZpzVs6duxSKmqNjCIthaz0Hd+zYa7rm6NysBut1/KAPSrD4pCG35rz++kKnNlob5+j8pt630s+YDz5422jNaXEezFJUVCRvvLHI5Djn1Kmzcv681pyZMn36FKPjnD3OtdVbb61xvixjLYSkY20di+l7RAM0Fsc5V682ycmTp6WycqLMm1dtsuYcOnRMrl+/KUuXLpKJEyeYrDkaiOvsfCLvv6/jHIsPCeqrORrW04dBWfyVSbfmVFfPlGnTbNYcvW65d+++cx/a4hc8tOboOEef9L9u3QqTgTh9MJk+DEpzL/oQOovjnP6as1gmTnzFXM3RJz3ql42ePu2UDRt0nGOx5jTL/v3HnM9mfRiUxZrT2HhGLly4LHPmzHLC/xZ/IWPHjjrR3MH69Wvj+oJHVktLS6/+tIEORCwGkHQiUtunP+GrP4trLfSo7dMLC22j/lyJxYGctlGLgrZRnxBn8SmA2sauri6nn/U8tPjNBm2j/hS3ttPihNXL/Swmi6q2UR89Tc0JdotFa44+Nlnf19Sc+MFjs5EAACAASURBVC3dmqPfBrI2YeUeldYcrYsWg9bpUHOsBB/Ly8vNjsHif4exJgIIIIAAAskVIPiYXF+/Wx8s+Khjbr0GLCgY8euvd1gKPvZdWz0XfWqF1Wsr9VNHdbM4SR45n8O1ld93Tv/y/fN2YZjPyTUZBnh5Drm/5sTfK4lfk5qTGNOwzOcwhxysv/vnkO3XHB1DWAsgufetGOcEOw/7xzkvnHGY5Tlkak6wvqbmBPMLU83R+y36y5JcW8Xf59wrj9/OXVNrtv6Xk8M4J15N99pK/7fFJ6zrcfV9toSh5oRhnNNtMvQYlnvlbs3RwKPF0GPkfE6811ZZDx486I23oLAeAggggAACCCAQVgErwcfx48ebHWiGtW9pNwIIIIBA+gsQfLTVx4MFH7WVQ/289cCQwGChAb9hAr/L29KkNQgggAACCCCAAAIIIIAAAggggAACCCCAAAJeBAg+elFiGQQQQAABBBBIOwGCj2nXpRwQAggggEAGCRB8tNXZBB9t9QetQQABBBBAAAEEEEAAAQQQQAABBBBAAAEEMkGA4GMm9DLHiAACCCCAAAK/ESD4yEmBAAIIIIBAeAUIPtrqO4KPtvqD1iCAAAIIIIAAAggggAACCCCAAAIIIIAAApkgQPAxE3qZY0QAAQQQQACB3wgQfOSkQAABBBBAILwCBB9t9Z0bfBz4s9baSn7q2lZf0RoEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCBdBAg+pktPchwIIIAAAggg4EuA4KMvLhZGAAEEEEDAlADBR1PdIQQfbfUHrUEAAQQQQAABBBBAAAEEEEAAAQQQQAABBDJBgOBjJvQyx4gAAggggAACvxEg+MhJgQACCCCAQHgFCD7a6juCj7b6g9YggAACCCCAAAIIIIAAAggggAACCCCAAAKZIEDwMRN6mWNEAAEEEEAAgd8IEHzkpEAAAQQQQCC8AgQfbfUdwUdb/UFrEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBTBAg+JgJvcwxIoAAAggggMBvBKwEH8vLyyUvL48eQgABBBBAAAEfAgQffWClYFGCjylAZhcIIIAAAggggAACCCCAAAIIIIAAAggggAACLwkQfOSEQAABBBBAAIGMFLASfCwpKZERI0ZkZB9w0AgggAACCMQrQPAxXrnkrJcvedLZ2ilZWVnOf5F/A/8t8vVoy0Zr4cDlYh2F3+VjbY/XEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBOwJEHy01ye0CAEEEEAAAQRSIGAh+KiHOXbsWBkzZkwKjphdIIAAAgggkD4CBB9t9WVed6487XhK8NFWt9AaBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgrQUIPqZ193JwCCCAAAIIIDCYgJXgoz7tcdy4cXQUAggggAACCPgQIPjoAysFi2Y/y5IXnS+iBh+zs7N/bUGsJzwO9qRGv09w9Lt8CojYBQIIIIAAAggggAACCCCAAAIIIIAAAggggECCBQg+JhiUzSGAAAIIIIBAOAQSEXzUI43czsBtRv7/PT09v4HR1/XG/IQJE8KBRisRQAABBBAwIkDw0UhHiEhuVq48a30q0isSGXJ0W0jw0U5f0RIEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCCdBAg+plNvciwIIIAAAggg4FnAT/BRNzrY8kOFHb0EH3WZkpIS0Sc/8ocAAggggAAC3gQIPnpzSsVS+ZInna2dzq4IPqZCnH0ggAACCCCAAAIIIIAAAggggAACCCCAAAIIqADBR84DBBBAAAEEEMhIAUvBRw09aviRPwQQQAABBBDwJkDw0ZtTspfKkiyRp73S9bTL2RXBx2SLs30EEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABV4DgI+cCAggggAACCGSsgJ/wYzxPfFTYoZ76qK+5/5WXl0t+fn7G9gUHjgACCCCAgB8Bgo9+tJK3rD7t8cnDJ5KVlfXrf5F7c//910morKyXGqOvD1w+WmsHLjfUEflZNnkybBkBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAg2QIEH5MtzPYRQAABBBBAwKyAleCjAhUUFEhpaalZKxqGAAIIIICAJQGCj8PfGzlZOdL9pEu6n3U7jRkYcoz2b0MFHYcKLPoJM/pZdvgVaQECCCCAAAIIIIAAAggggAACCCCAAAIIIIBAvAIEH+OVYz0EEEAAAQQQCL2AheCjIrpPfSwqKpIxY8aE3pUDQAABBBBAINkCBB+TLRx7+3ndudLZ3ukEHvXPb/DR69Me3W3HblHfEgQfvUqxHAIIIIAAAggggAACCCCAAAIIIIAAAgggEG4Bgo/h7j9ajwACCCCAAAIBBKwFH/VQysrKnKc/8ocAAggggAACgwsQfBzes0N/4vpx62PJ6u37iWv9ixZ8zM7OfqmhkaFEgo/D24fsHQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQCLsAwcew9yDtRwABBBBAAIG4BRIRfNSdR25n4DYj//+enp7ftNV92qO7nIYAysvLJS8vL+7jYkUEEEAAAQTSXYDg4/D1cH5Wnjx++Fik5+Ww48CQo7Yw8t9iBR35qevh61P2jAACCCCAAAIIIIAAAggggAACCCCAAAIIhFGA4GMYe402I4AAAggggEBCBJIRfNSGDRaEHCr46K6n62pIoLS0lCc/JqSX2QgCCCCAQDoKEHwcnl51nvTY9liku2//brAx2tMeI1/X/zve4KPfn672u/zwSLJXBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgaACBB+DCrI+AggggAACCIRWIBXBR8Xx89RH9wmQut64ceNkzJgxofWl4QgggAACCCRLgOBjsmSjb1fDhDldOdLZ9sQZ17hBRzdkGO1pjwPDkAQfU9tn7A0BBBBAAAEEEEAAAQQQQAABBBBAAAEEEEh3AYKP6d7DHB8CCCCAAAIIDCrgJ/ioGxls+aF+3nrgel6e+uguo9sdOXKkFBUVSX5+Pj2JAAIIIIAAAr8IEHxM3amQl5UnL548l6ePnv7mCY8EH1PXD+wJAQQQQAABBBBAAAEEEEAAAQQQQAABBBBA4GUBgo+cEQgggAACCCCQsQLDEXyMfKKjCx/5b+7/7bbNDUGOHj3aefrjiBEjMra/OHAEEEAAAQRcAYKPyT0XNNCY25srXc9eSGd7p7OzoZ7yOPCJj24gMvIpj4P93+6RDPYT1X5/utrv8smVZOsIIIAAAggggAACCCCAAAIIIIAAAggggAACyRIg+JgsWbaLAAIIIIAAAuYFEhV81AON3NZQT4CMFnzU9SOfBBn5xMeBocicnBznKZAFBQWSl5cnubm5ThCBPwQQQAABBDJJoONFhzzvfZFJh5y0Y82S/v+RbpHurm55/uSZdD3v+nWMMTD0GPkz1gN/0lobmqifuXa35fXgGRN5lWI5BBBAAAEEEEAAAQQQQAABBBBAAAEEEEAg/AIEH8PfhxwBAggggAACCAQQ8BN+HGrZWGHHyCZ6+bnryMBj5PKRT4R09znwKZGxQpjRjsPPsXnh9uPqZXssgwACCCCAQLoIxBvOG2q9ga8NFkaMNIxcxn1iY7SAo64zVPBx4NMeI5d39xetfQPbEq1//Vr5XT5dzimOAwEEEEAAAQQQQAABBBBAAAEEEEAAAQQQyEQBgo+Z2OscMwIIIIAAAgj8KuAnoOcnHDhUEDJa8FEb5PWpj7rsUD+PPdhTJd2DHiwY6ef4OIUQQAABBBBAIHUCXkKPsX5K2m3twKc16r9HhhcjQ5D6mt+nPUauE7nPSK1YQcjB1oslTvAxlhCvI4AAAggggAACCCCAAAIIIIAAAggggAAC6SNA8DF9+pIjQQABBBBAAIE4BIYj+DhYMDFamFEPaeC/R/5bZPujPRnSJfF6nLGWi/V6HF3AKggggAACCCAQRcBLiM/LMrppd7lo/zvy36KFInX9wcKQkdt2DyHW0yajtXmw4/B6fJH75mRCAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQyAwBgo+Z0c8cJQIIIIAAAggMIuA3yDfY8kM94VF3He31wdaJFnQcGGp0D8f992hhysHWcdf1e+yRhEHW5WREAAEEEEAAgegCfoN+0bYy1NMUB4YSBwYdoz01MtYTHyPbMDBYqa/Ferqjl6dZejlfEmHnZT8sgwACCCCAAAIIIIAAAggggAACCCCAAAIIIGBDgOCjjX6gFQgggAACCCAwjAJ+Qnxeg496OEOFIb0+9TFyO9ECke7r7r4G+98ub7T2+zmmYewmdo0AAggggEDGC/gNCUYLHQ4MNw4WVhz4JEgXf7CnQurriQw++g0y+l0+408mABBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgZALEHwMeQfSfAQQQAABBBAILpCI4KO2ws9THwcGFN2jiPz3yO0N9jPYg+13sO1HW36wf4uU9WMUvEfYAgIIIIAAAghECsQK9Xn5+ejBfu5a9zNUGNJLUDJyG9GeGjnUsfAz15zrCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAvEIEHyMR411EEAAAQQQQCCtBPyE+oZa1k/wUQFjhR91mYE/Vx3riY7udgeGJgd2WLxPefRjlVYnCQeDAAIIIIDAMAj4fcKj20QvPy892DKDBSSj/Ux2tP0NFXz0EtAc7Bhi8ccKh8Zan9cRQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEwiVA8DFc/UVrEUAAAQQQQCAJAn7DfH5Cg7F+7to9nKGWGxh+1HUG/lS239BltG0MpPXj4mfZJHQhm0QAAQQQQCDUAn5Ce7GekBgrXDhUKHKwcKPX0KN2gp/gY7zBzmid7ccw1CcLjUcAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBwBAg+ciIggAACCCCAQMYL+A3tJeqpj35CkYM9HTJaKHJghw71s9e6bKzjj/V6xp9AACBgWODf//3f5T/+4z/k3/7t3+Szzz4z3FKahgACsQRiBfuGCjzGCks6E0RZWVFDiwP/3V3Wbe9gQcdYAcyB24k8/ljHGs0qnnVimfM6AggggAACCCCAAAIIIIAAAggggAACCCCAgF0Bgo92+4aWIYAAAggggECKBPwG+4IEH/WQBvsZai9ByIFPehy4vcgg5GB8fp5YOXAbfq1S1IXsBgEEBhFYsmSJPHz4UBYtWiSff/45TgggECIBL0G+IE9MzM7OfkljYIDRS6Ax1tMjI3fgJQjpLu/l2Ad2ZTzrhOh0oKkIIIAAAggggAACCCCAAAIIIIAAAggggAACAwQIPnJKIIAAAggggEDGC8QT5vMTHvTzM9SDhSK1kwa+5nXZgetGdrif4/ByosRj6WW7LIMAAvEJ/Pf//v/Jli3fyF/+8lf56KPN8W2EtRBAIGkC8YT1vDy9cWCDhwoo6rLu6wP/d+RrA/9vP6/FWjdWe710QDyWXrbLMggggAACCCCAAAIIIIAAAggggAACCCCAAAI2BQg+2uwXWoUAAggggAACKRbwG9gL8tRHL092dA/fS7hxsJ+yjtZGv8ep7YhnnRR3H7tDAIFBBPbtO+K8UlRUKHPnzsQJAQRCLBBPsM/LUxYH+4lrpYr3iY6xgpbRtu12TaKOM8RdTdMRQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEPAgQfPSAxCIIIIAAAgggkP4CfsN9foKPqhfvUx8HrutnO7putJ++9nusg/V+oraT/mcXR4jA8Ans33/U2bkGH+fMmTF8DWHPCCDgSSCe0F+0DQ8WeIwVSBzs5611H15fG7hstP9/sH8b6t+HAkyUm6dOYiEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABEwJZZ8+e721puSfl5aVSVlYq2dnZJhoW2YgrV5rk0aMnMm3aZBk5cuRvnjgw3A1+8eKFXLx41WnGzJlTJScnZ7ib9Jv9t7a2y61bt6WwcKxMnDjBZD/fvHlbHj5slYqKSVJUNNZcPyvquXMX5dmz5zJnzkzJzc0118+PHz+Ra9duSEFBgUyeXGHyXGxuvifhqTmvyciRI8ydi9ScxLz1bty4La2trVJZOcmpjRZvllJzgvd1f80pk7Kykpiff36DfH7Cj4MFFq9evS6PHj12xjlav91z0cuTHl0hv2HIwdYbKK7b7erqFh2LZWWJTJ36WkzD4L3mfwttbW1y+3aLjB07RiZOfMXk+/nmzTvS1tYukyZNMFtzLly4LM+fP5fZs2eYHEM8edIp16/flBEjRkhl5UST52JLy325d++Bc21VWjrOxLm4f3//Ex91DNvUdEMeP34sU6e+XHP8v/OSs8aLF12/1pxp06aY7OfW1ja5c6dFCgvHyKuvWq05t6WtrYOaE+A0tVxz3LGC1pz79/tqTklJX83x8zPY8T7NUVm9hiCvXLkmep06ffoU59rK/Rtq7O13XO53+cjTQj/3Ll3ScU6WzJgxxeTn38OHOs654wTYteZYnLe7ceOWaG2sqpokY8favLY6e/aC6GdMdfUMo/M5j+XaNR3nFEhVldX5nBbRujN+fLkzzrF4Ll6+3PRrzVHLIPUhwEfIoKu6NSc7O8upixbnkHVu9vbtu6ZrzvXrt0SvAW3XnIui83dWa47OQei11ciRBVJZabPm3L3b7FxbhaHm6Bgicj4nGfUjnm3qfQyti2GoOcXFRTJhwniTny3Wa47O3ekcsuVxTn/N0fmcSSY//9ya88or5c61lcVxjl63PHnyxLlu0bkxa3/9NSdbpk9/zWQ/P3jQKnfu3BXbNeemM4es1wQ6121tPOvWnK6uLmcO2eK98nDVnPFSUlJstOZcFZ0bmzFjqnOdau3v2bNncvnyNcnJyZZp06g58faPzkO0t1Nz4vXT9To6HonOi+m8p9Zui2OI/nGO/ZqjeTu9tvL7l7VjR13viRONUlNT7fyXl5fndxtJX/7777eLBlQ2bnxHxo8vM3eyaNH/6qut0tsr8vHH7780mZ90HI870AHxvn2HnAmh5cuXSH5+vsc1U7fYnj0H5fz5y7J69TLnRqzFyb+//e0r0RDpn//8exk9enTqcDzuqbm5RX76aZcUFRXJO++siasoeNxV3IudOHFKtObMnz9H5s2bbbLmbN26XTSI++GH70h5ub2aozcOv/76e73dKB99tMFozbkq+/YddgK4b7yx2HTNWbPmDZkypcp0zfnLX/4go0aNivt9l6wV3ZqjEwVvv2215jSK1h2vNSeZwUfth2gBxe+/3yG3bt1xxjkazoyczIj3J7Gj7Wuwf3PPj8GOvbOzU7Zs+clp1wcfvGPyIlcvcA8ePCKTJ1fK668vMPnZsn//Ybl4sUlWrVrq1EaLFz//+Me30tHRIZ988pHJCVQNMu/YsUfGjSuStWtXmBzn1NefkYaG01JbO0eqq/WLMsP/haj+4GOR88THn37a4QSFN2xY73zxTUPNlv702urbb39y3iMffPC2yXNRbx4ePHhUXnutUpYssVlzdBym14C2a84WZ2LIas25e7dFdu7cKyUlRbJmzUopKEj9NXSsGxz19aedmjN//lyprp4VteZ4/anryDowVKhRl/MTmNRrKw3QbNr0Oykt1S+h9BUdPwHNWDUqltNQ6+sNka+//sG5Fti8+Xcma86FC1dExxE6T7J06UKT11a7d++XixevyJtvrjT7Jcz//M8vnZqj8zkWr63u3GmWn3/e7QQK33prlclxzrFj9aJ1Z+HCGpkzZ5bk5dn7QrCOIbTmbN783i/jHFsDHT0Hv/nmR6dea120GFrQL0Pp+NFyzdm1a59cunRV1q1baTYorDXn0aNH8tlnNmuOvk+2basLRc1ZtKjWubayWHO2bPlRtH5v3rzhN/M5scYvqXi9vf2RaBst15zz5y/JgQNHncCCzudYvG+1c+c+uXzZbs3R+bz/+q+vnC9V99Wckak4vXzt49atu7J9e53zPtHaHc/NbF87jGNhd5yzeHGtzJ5ts+boGEKDC3pPSK+tglwHxUEUc5X29g5nDjkvL0c+/NDmOOfcuUvOfI4GM3U+x2bN2euE1vWawGJQuKenV/ReuX6p+tNPrdacO7J9+x7nS6JvvmlzDvno0ZPS0HBGFi+eb/aLMt9884Po3JhmX9wv28YsBClcwK05+fm5snHjuyavrfSLCQcPHnO+9LZkyXzTNWf9+lXOw8ms5XP0F+3+9revneD/J598bHKcow890ftW48eXmr1vdfToCWloOOuch1ZD6zo/q7mDjz/+wAmE+x3nZDU2nu7VBKp+m2rSpFfNncxaH8+cOS/6Dfd582aZ/Oa4flu3sfGs9PR0S23tXJNFS78dqU9r0nCKBnwsfgNDB3J6Q3vq1CqzTx/VwJ6GzrQoWLw40wnU8+cvOm2bNWu6yeCHBgr1vwkTXnGeQGPtAzQMNUe/OXfqlNacHidYYfHiLFw1Z7KnJwGmcLz+666oOcHV3ZqjT8fRJx7Hqjl+g49+w4TRtn/mzAXniccaBh8z5uVQfaz2DPwZ62jLD7aNWNt2X9dvjGvN0f8/3sB6rH0F7emHDx/KlSvXnUCchh8thgqbmq47T8iZMmVyXIP2oEZe1tcb2Z2dT2XRohqTYwidxL948bIUFIww+5R1nczXp3LptZWVp3IN/KlrHSvqF3k0sDCw5ng5T5K9jD4Z5/Tpc7/UnGqT1y0PHjwUfVqv5Zpz9eo15wk5GloYN87/REGy+1m3H5aao6EU/XZ7rDGEFzO/EzZDbVO3pRNrfTVHxzkvPwnQa7Bw4HKxPkP9/FS2tl+fsqc1xx3nDPVUyqECkbEsvPhHW0afEKDjHP2bN6/a5LWVjh+uXm2ScePGOYFri/M5GkDSdmpoweqTAI8fb3CeVmF1Pkdv2ly4cMm5WTNz5jSTYzGdP9a6o3M5Os5JRF2M97072Ho6hnBrjsUn5ISj5twTHUfYrzn3RJ8Obrnm6LWVBmgsziHrU130AQT6VBKrNUefsqdfErVdc85Ka2uH80ARvbZK5FgvEfWxv+ZkOWMxi3PIej+oqemalJSUOF+esDvOueeEFiw+CVDn3HQOOQw1Z9QovbayOc7przmvyquvjjc6zrFdc54+fSanT+u1FTUnSA3XL5Tdu3dfpk+favJJgGGoOfrETP0yjwbBdT7H4kO/9MnbOo9cUfGqM49s8dpK50r012T0PvTo0aPMjXPCUXNaRO8LlZaWOg8nszjO0Zqj8zn6lD2dQ441LxikvsWzrltztL/1vpXFayu35owePdKp3RZrjj7ZU7/8VlExUSZMKDdac85IW9ujuGtO1r1793r1Jwz1MbAWi6q+Abq7u52fWczPzzNXVN3Ag7ZRwwcWLx61jdo2baNefFssqm4/axu1fdaKqluI9dHd2kaLRdU9F7WN+mexqLrnIjUnno/2/nX0Q56aE8yQmhPcr7/mdOtzDNOq5vgN6fn5uWvXLbIH3HGOPr1AP6dj/Wz1wN6L9pPYiQxAuu8XHUtYHUO4dVH9rI5n3bGYGlq7GeKeU3ou6n9Wx7NuP2t7LZ+L7jjHynhWn56hf/oTqfpTe3ouahvdmpOYT4TEbUX7Wduo/1kdz1JzEtPf1Bx/jtE+O/o+W3qc+RytOX5/PnpgCDHW55Pf0GP/XIS+n/s///y2cyipWG2Opcy1VSwhb6+772fmc7x5RVtKz0Xmc+L3ixzPMocczJE55GB+7trMIQd3dK9buG8VvyXjnPjtBs7bcd8qmGV4xjldzryi1blF7pUHOw/7r0+5Vx5UknFOUMG+3Ab3yoM5Ms4J5jfwnhDzOfF7Ms6J327gmFs/X/T+ZDzzvVkPHjzoTUxT2AoCCCCAAAIIIBB+Ab/BRz1iP09V9BJK9Bt+jNYGL/sZ2FvxHHvkNoKuH/6zhyNAwJ5A/09dF8qcOTPtNZAWIYBAXJM5kWzxBAljhRijdctQP4Ht5Se1dZvxtHWoUySeiTBOOQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIH0ECD4mB79yFEggAACCCCAQIIE4gnvBX3qozY9VtjRS7v8hB3j3V6CmNkMAgikSGDfvsPOnvSJj3PnzkrRXtkNAggkU8BL2M/rT11rO/1uz0vI0csyA428tCMR6ySzb9g2AggggAACCCCAAAIIIIAAAggggAACCCCAQOoECD6mzpo9IYAAAggggEAIBLwEAqMdhuXwo7bXb/tidVW8TrG2y+sIIJBYATf4WFysT3wk+JhYXbaGQPIEEh0CjCeI6B5drKdDet02T3tM3vnClhFAAAEEEEAAAQQQQAABBBBAAAEEEEAAgUwUIPiYib3OMSOAAAIIIIDAkALxhPr8Bgu9/jx2rCdBDnYgXrcfbf14jp9TCgEEbArs23fIaVhRURFPfLTZRbQKgbgFYoUj/Tz1MVojvAQavSyj2463rUPhxNpm3LCsiAACCCCAAAIIIIAAAggggAACCCCAAAIIIBAKAYKPoegmGokAAggggAACqRSIN/iXiPCj15+r9tpGv20aytnrPlPZV+wLAQSGFuCnrjlDEAi/gN+AX9DAo4rFespjtGUG+7eh/j3Wa0P1nl+X8J8JHAECCCCAAAIIIIAAAggggAACCCCAAAIIIIBApADBR84HBBBAAAEEEEBggEC8Ab94QoZBgo5+2xlr+VivBzlRkrntIO1iXQTSXcANPlZWTpLKyonpfrgcHwJmBZIZ0ou17VivD0SLN/So2/H6BMiXJqaysuLqN7/HFddOWAkBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDArADBR7NdQ8MQQAABBBBAYLgEgoT0EhV+1GP3EoqMt61e1/O63HD1FftFAIHBBdrbO6Sx8ayzQEXFRKmqmgQXAgiEWMBP0M/Psi6J19CinydKxmpHrNeH6q4g64b4NKDpCCCAAAIIIIAAAggggAAC8GnaBwAAIABJREFUCCCAAAIIIIAAAr8IEHzkVEAAAQQQQAABBKIIxBv4i7XeYK/7+XcvgUi/nRqr3V63l6jteN0fyyGAwOACkcFHnvjImYKATYFEhfeCbCcRQUavocmBvRBvu+Ndz+ZZQKsQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE4hEg+BiPGusggAACCCCAQNoLBAnwxVrXT8hRof0EHWPt20/HJXJbfvbLsgggkBgBDT42NJxxNqbBR574mBhXtoLAcAskMvTnJ7DoJyCpRrHaGev1wZzjXW+4+439I4AAAggggAACCCCAAAIIIIAAAggggAACCCRWgOBjYj3ZGgIIIIAAAgikkUCQ4F+sdRMRflRqv9tJVPfEOr5E7YftIIBA/AJ9T3zsCz5WVBB8jF+SNRFIvUAyw31DbdtPEFJV/IYhXcl4jy/e9VLfg+wRAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAINkCBB+TLcz2EUAAAQQQQCC0AkHCfV7WjSe0mMh1QtsxNBwBBDwJtLW1S2PjWWfZefNmS1FRoaf1WAgBBNJTwG/gURXiCTbGCifGen0o/SDrpmevclQIIIAAAggggAACCCCAAAIIIIAAAggggEDmChB8zNy+58gRQAABBBBAwIOAlwDjYJvxsm6ig4yx9hnrdQ8kLIIAAiERuHbtply/ftNp7bx51VJUNDYkLaeZCCCQCAEvIcFEBxtj7TPW60Mdd5B1E+HJNhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQRsCRB8tNUftAYBBBBAAAEEjAkEDQp6WX+oZZLxmkvspW3GuoPmIICADwENPbrBx7lzCT76oGNRBEIr4CUcGGuZeJ4MqWBBthsLPNa2Y63P6wgggAACCCCAAAIIIIAAAggggAACCCCAAALpJ0DwMf36lCNCAAEEEEAAgQQLBA0Ielk/3oCjHmqs7cd6PZLLz7IJZmZzCCCQYIHr129FPPFxthQW8sTHBBOzOQSGVcBvGDDW8kFej7WuQnlZZjDQIOsOayexcwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIGkCRB8TBotG0YAAQQQQACBdBIIEgj0um6s5YK+rv0Raxte+iwR2/CyH5ZBAIFgAi8HH6ulsHBMsA2yNgIIpFQgaNjP6/qxlgv6uqLF2sZQsEHWTWmHsTMEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBlAoQfEwpNztDAAEEEEAAgTALBA38eVk/1jKxXnd9E71cmPuNtiOQqQKNjeekra3NOfyVK5dmKgPHjUBGCPgNB3pZPtYysV534b0uN1hHBV0/I04ADhIBBBBAAAEEEEAAAQQQQAABBBBAAAEEEMhAAYKPGdjpHDICCCCAAAIIxC/gNVA42B68rJ+oZbQNXrYVpK3xS7ImAggkW0CDj+3t7c5uVqx4Pdm7Y/sIIJACgSAhQD/rxlo21usuhdflBqMLun4KuoRdIIAAAggggAACCCCAAAIIIIAAAggggAACCAyTAMHHYYJntwgggAACCCAQXoEgYUI9aq/re1nOyzKR0n6XD9pLqd5f0PayPgLpJHDq1Plfn/hI8DGdepZjCbNAKoN8fvflZXkvy7j942fZaH0adP0wnye0HQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCC2AMHH2EYsgQACCCCAAAIIvCSQiDCfn214XdbrcpEHE886nA4IIBAOAfenrouKimTevFnhaDStRACBuASChAS9rut1OT0AP8tGO+Cg68eFyEoIIIAAAggggAACCCCAAAIIIIAAAggggAACoRIg+Biq7qKxCCCAAAIIIGBFIBGBQb/b8Lq81+ViWSZqO7H2w+sIIJAcgb17DzkbJviYHF+2isBwCCQqEOhnO36WVRO/yw90DLr+cPQL+0QAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBIvQDBx9Sbs0cEEEAAAQQQSAOBRIYC/W7Lz/J+lk1Ft1hrTyqOmX0gMFwCe/cednZdXFwoc+fyxMfh6gf2m9kClkJ8ftuS7OWjnRl+95nZZxdHjwACCCCAAAIIIIAAAggggAACCCCAAAIIZLYAwcfM7n+OHgEEEEAAAQQCCCQyxBfPtuJZRw833vUCULEqAgikWKCtrUMaGs44ey0u5qeuU8zP7hAwIRBviDCe9eJZZyBSIrZhAp5GIIAAAggggAACCCCAAAIIIIAAAggggAACCKREgOBjSpjZCQIIIIAAAgikq0CiQ4Txbi/e9SL7JRHbSNd+5rgQCJtAZPBx8uRJUlk5KWyHQHsRQMCjQCICg/FuI971Bh5aorbjkYzFEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBNJAgOBjGnQih4AAAggggAACwyuQ6MBgkO0FWdePYqr246dNLIsAAv0C7e2PpL7+tPMPBB85MxAIn0AqgoBB9xF0fbdXErWd8PUyLUYAAQQQQAABBBBAAAEEEEAAAQQQQAABBBAIIkDwMYge6yKAAAIIIIAAAr8IJCMIGHSbQdencxFAILwCfU987As+1tTMkaKiseE9GFqOAAIJE0hEyDAR24g8oERvL2FYbAgBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAtADBR9PdQ+MQQAABBBBAIEwCyQgaJnqbid5emPqHtiKQSQLXrt2Qa9duOYdcU1NN8DGTOp9jRSBCIJGhwkRuy21iMrbJCYAAAggggAACCCCAAAIIIIAAAggggAACCCCQGQIEHzOjnzlKBBBAAAEEEEiRQLKChcnarsuS7O2niJ/dIIDALwKNjeektbXN+f8IPnJaIJD+AskMECZj28nYZvr3MkeIAAIIIIAAAggggAACCCCAAAIIIIAAAgggEClA8JHzAQEEEEAAAQQQSLBAskOEyd6+Vw4r7fDaXpZDIJMEIoOPq1YtzaRD51gRCL2AhVBgMtuQzG2HvvM5AAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAHPAgQfPVOxIAIIIIAAAggg4F0gFaHAVOzD+xGzJAIIWBJoaDjrPPGxuLhIampmW2oabUEAAaMCqQgkpmIfRnlpFgIIIIAAAggggAACCCCAAAIIIIAAAggggECCBQg+JhiUzSGAAAIIIIAAAq5AKoOJqdwXPYwAAvYF6uoOOo0k+Gi/r2ghAsMtkKowYqr2M9ye7B8BBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgNQIEH1PjzF4QQAABBBBAIEMFhiuQOFz7zdBu5rARMCXQ1tYhDQ2nnTZp8HHePJ74aKqDaAwCBgRSHUJM9f4MENMEBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgSQLEHxMMjCbRwABBBBAAAEEVMBCENFCGzgbEEAg+QJ9wcczzo6qqiZKVVVF8nfKHhBAwKSAhcChhTaY7BwahQACCCCAAAIIIIAAAggggAACCCCAAAIIIBBIgOBjID5WRgABBBBAAAEE/AtYDiBabpt/adZAIDMF2tsfSX193xMfa2vnSGHhmMyE4KgRyAABy6FCy23LgFODQ0QAAQQQQAABBBBAAAEEEEAAAQQQQAABBNJegOBj2ncxB4gAAggggAACVgXCFjIMW3ut9jvtQiDZAtev35SmppvObgg+Jlub7SOQWIF0CAumwzEktlfZGgIIIIAAAggggAACCCCAAAIIIIAAAggggEAyBAg+JkOVbSKAAAIIIIAAAj4ECBT6wGJRBBCIKdDQcFZaW9uc5VavXhZzeRZAAAEEEiFA4DERimwDAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwKsAwUevUiyHAAIIIIAAAggkUYDwYxJx2TQCGSZA8DHDOpzDRcCAAKFHA51AExBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgQwTIPiYYR3O4SKAAAIIIICAbQECkLb7h9YhEAaBurqDTjOLi4ukpmZ2GJpMGxFAIKQCBB5D2nE0GwEEEEAAAQQQQAABBBBAAAEEEEAAAQQQSAOBrCNHjvdeunRVpk6dLK+9Vik5OTnmDuvIkRNy795DWb58sRQWjhVrE+vPnj2T/fuPiEivrFy5TPLy8swZ3r59V06fPievvFIu1dUzTfbzqVNn5ebNO1JTU+20Mzs725zj7t375fHjJ7J+/WrJz8831z79ScPjx+tl9OjRsmhRreTm5ppr4+XLTXLp0hWZOvU14zXngSxfvsR4zRFZuXIpNSfOs9ytObW11TJ+PDUnHka35owZM1oWLrRZc3SMc/nyVZk2bYpTcyx+thw+fFzu338oK1YskbFjXx7nWAhBPn36TA4cOCJZWSIrVtisObdu3ZEzZ87LhAnjZfbsGSbHOY2NZ0XbWVs7R8aPLzN5Lu7atV+ePLE7znn4sG+cM3bsGFm4sMbkOKev5jTJtGmvyeTJFcNyLra1dUh9/WmnrEcLPvbXnNcdS2vXVk+fPpUDB4467Vqx4nWT4xxqTjyjht+us2vXPnnypNPstdXDh61y/HiD+Zpz5YrWnClOzUnVOMdP3Th06Jg8eNAadZyTmDMp2FY6O5/KwYO2a47Ok5w9e15efXW8zJplc5zT0HBGdN5p/vw5Ul5uc5yzc+c+0f5ev36VyfkcfZ+cONEghYVjZMECm+OcixeviNacGTOmSFWVzWur/ppjc5zTX3OynbpocQ755s3bcvbsBWpOsI8X0Zqj49q33rJacx7KiRONzrznggXzTF5bXbhwRa5etV1zDh48Jjpm1OsWi9dWOtbWuqhjRJ3ntlhzbty4LefOac15RWbNmj4s19Cx3u56fX3nTrMsWDBXyspKUzbmjtUu93WdO9T5HMs1R+c9T55slKKisTJ/vtWac1muXr0mM2ZMlaqq1F1bee1nXc6tOXpPSOfj/VyX+dlPvMv21Zyjkp2d69zPt1xzJk58RWbOtFtzbt9uloULbdcczUasW7fS5LVVf80plPnz5xod5/TVnJkzp0ll5SRzny2RNWfVqqVO7sBazdG8xuHDxyQnJ1feeMNqzbnlXFtNmvSq09cWc1g6zumrOfOkrKzE3LnY09Mju3cfkOfPn8ubb64wWXPu3Xsg9fWnpLi4UGprbdac8+cvS1OT7Zqj94Q0d7Bq1TIZPXqU75qTtW3brt6jR+tlwYI5TkdYDHNt2fKT3LhxSzZt+p1zQztVk/leB3c6mPvHP7aIXmD84Q8bnY6w9qeTk1oUpkypcEILBQUF1proXJydO3dR1q5d7twsthja+1//6wvnDffXv/6rM5lh7e/u3Wb5/vsdTmF99911MnLkCGtNdG4e9tWcub/UHHtB4S1bfhSddNm8+T2TIVwdzH3xxbdO3/7+9x+YrDk6OVlXpzWn0pn8s1lz9sm5c5dk7doVMm3aZNM157/9tz86kxnW/rTmbN26Q8aNK5J3333TZM05dqxejh1rcAbtNTVzJD/fXs355psfRW8uDVVzhjMA+fjxY/nii++cQebHH79vtOZcFn3C3dSplbJ8uY5z7H05QQM+58/31Rz9wpHFcc7//J//kPb2dvnrX23WHL3RoOOckpIieecduzVHxzpac+bNG56aExl81KCt3liI/Pv66x/k9u07snnzBpMh3EeP+mpOdnaW/P7378uoUfaurfS9vGfPIZk6tUqWL9dxjsWas1d0QuPNN1fKlClac+x9wdF6zdFJvx9+2C4lJePk3XfXyogR9q6tjh07KcePN8qiRTUyd2510sc58Uxya83RsPDHH28wGYjr6Hgk//znd87ks7bRbs3Rcc5rzg1Ei9dWO3dqzbkk69atMltz/sf/+Ltof+t8jsVrKw2O/vDDDiktLZF33lljsuYcPXril5oz33mitMWb2V999b0zztHrFoshXLfm6OfyRx/ZrDk6N7t37yFnblZvIFqsOTt27JELFy7LW2+tlilTqkzeQLRec/Sz+ccfd5quOfowDA1nLl48X+bNs1pztjrBf52fLSvT4H+Wqam79vYO+fLLrc78Q1/NGWmqfdoYDQPs23fY+SLPG28sMllztm+vE72/tn79GpMPctA5Q7220mvpv/zlX0yOc3Te86efdjnB0bffXm1ynHPkyHE5ceKULF68QObNm2V0nLNVbt26K3/4wwfOOCee67NkFgGdU/zyy+8dO53ntlxzpk+fIsuWUXPiOR80hKT3yvU+5Z///AfTNae8vNT5sq3F+Rz9YvrJk6dkyZIFMneuzZrz5ZffOYE4zb6opdWao/f8Nm2yWXP0YR368DQN1S9dutDkOGfbtt1y8eJVeeedtcP2IIehapFbc/SBHZ99ZrPmaI7t5593O/da9Bp1xAh7OSz9MpSGXF9/faHMmTPT5Djnn//8Vu7caZF/+ZeNzpjRb83Jampq6m1t7XBCXHpTzO8G4vlQ9LvOvXv3nQ9QDT3m5xc4Txyy9Nfd3S3Nzfekq6tb9FsiFtPaGs7Ubx/qTTm9cWMtPKr9qU/x6ejocNoXT4o3FeeEhnz029n67QuL/azfsNFvsmj/Wn2alE646A15fYqBxSe46nn0cs3JN1cX3Zqj//vVVydITo69p6P215wCKSkpNl1zSkvHOTc4LX7+UXOCV/a+mtPu1Jt0qDnDEYAMR815Ig8etDkXFHZrTqtzs91yzdFgoT4hwOo4R7/Vp9+e0zGYTrZYHM/qWLa1dXhrzrVrN6Sp6aZTQKMFH1ta7ktn5xN55ZVXTAb29JqqpeWe2B7nPHGuXbTmjBtndZzj1pwS52aDxXEONSf4OCdVNSfI+aM1RycndT7HYnimq6tLtI3d3T3Ok4ZsXlvpOKfV+ZIRNSf+943WnGfPnkpFhe35HD0HLQb2VN69ttL544FPqo+/ZxK7po4hdD7C6hxyGGqOzsHrHDI1J9i5Sc0J5tdXc9qdOeRw1Byr11Z94xy9YTxhgs1xTn/NGel8sdridb6Owx490vkc29dWem+oomKi2ftWOp+Tl5fzS0jY3v0Mak7wuu2Oc6g5wSy15ugcss596ngsyPV4sJZEX1vvT9y92yLhqDm5Jp/UG5ZxjmZfOjs7nbkSiw9OC1PN0bnZ4mKdQzYWchKRBw8eSkfHY2pOgIKp9bBvnBOGmlNo8kn1yh+05mTdv3+/Vz+k9IPT2oene35p+3SglJ2dYy706LZR29fbKyYnybWNauiGJSxePEa2MQznosXQY+S5qP83/Rz/J1R/zck2WxepOfH3b+RnS1g+/6g58fe3+/kXhs8WrdvxjMVSEYak5sR/Dr48nu11Lm7j6efgLYi9hb5+7jU5Sc54Nnb/uUs0NJx1nlCuf6tXL/vNiu44x/pnC9dW3vs82pJ9/UzNCaKYqdfQif6MouYEOQv71g3LuUjNCdbXYelnrqGD9bOuzbVVcEPGOYkx5F5BYhz1PW392ioM9wpEskyGAdyxGOOcYO8XxjnB/F6eW6TmBNEMy7lIzQnSy33r6udzGD7/wnDfinFO/OdjeGqO5rDiuz8Zv473Nblv5d1qsCXDdK883pqT9eDBg97gVGwBAQQQQAABBBBAYLgFUhGCHO5jZP8IIDC0QGPjWedphPoXLfiIHwIIIDBQINFhR4QRQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEiFAMHHVCizDwQQQAABBBBAYJgFCEUOcwewewRSJLBnzyFnT8XFhTJv3uwU7ZXdIICAdQHCjdZ7iPYhgAACCCCAAAIIIIAAAggggAACCCCAAAII+BUg+OhXjOURQAABBBBAAIGQCxCCDHkH0nwEBhFob++Q+vozzqtVVROlqqoCKwQQyFABgo4Z2vEcNgIIIIAAAggggAACCCCAAAIIIIAAAgggkEECBB8zqLM5VAQQQAABBBBAIJECBCgTqcm2EAgucO3aTdH/9K+2tloKC8cG3yhbQACBlAsQWkw5OTtEAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQCKEAwccQdhpNRgABBBBAAAEELAoQhLTYK7QpkwQaG89Ka2u7c8irVi3NpEPnWBEItQBBx1B3H41HAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQGCYBgo/DBM9uEUAAAQQQQAABBBBAAIFECjQ0nHGCj8XFhVJTU53ITbMtBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBEwJEHw01R00BgEEEEAAAQQQQAABBBCIT6Cu7qCzIsHH+PxYCwEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIDwCBB/D01e0FAEEEEAAAQQQQAABBBCIKtDW1i719Wec16qqJsnkyRVIIYAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIJC2AgQf07ZrOTAEEEAAAQQQQAABBBDIFAH3Z671eGtrq6WoqDBTDp3jRAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQyEABgo8Z2OkcMgIIIIAAAggggAACCKSXQGTwcfXqZel1cBwNAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgMECD5ySiCAAAIIIIAAAggggAACIReoqzvoHEFxcaHU1FSH/GhoPgIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAJDCxB85AxBAAEEEEAAAQQQQAABBEIs0NbWLvX1Z5wjIPgY4o6k6QgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAp4FCD56pmJBBBBAAAEEEEAAAQQQQMCeQGTwsba2WoqKCu01khYhgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggkEABgo8JxGRTCCCAAAIIIIAAAggggECqBRoazkhra7uzW4KPqdZnfwgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAsMhQPBxONTZJwIIIIAAAggggAACCCCQIIG6uoPOlviZ6wSBshkEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAHzAgQfzXcRDUQAAQQQQAABBBBAAAEEogtE/sw1wUfOEgQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgUwRIPiYKT3NcSKAAAIIIIAAAggggEDaCUQGH/mZ67TrXg4IAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEBgEAGCj5waCCCAAAIIIIAAAggggEBIBRoazkhra7vTeoKPIe1Emo0AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIOBbgOCjbzJWQAABBBBAAAEEEEAAAQRsCNTVHXQaws9c2+gPWoEAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIJAaAYKPqXFmLwgggAACCCCAAAIIIIBAQgUif+aa4GNCadkYAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIICAcQGCj8Y7iOYhgAACCCCAAAIIIIAAAtEEIoOP/Mw15wgCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEAmCRB8zKTe5lgRQAABBBBAAAEEEEAgbQQaGs5Ia2u7czwEH9OmWzkQBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABDwIEHz0gsQgCCCCAAAIIIIAAAgggYE3ADT7yM9fWeob2IIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJFuA4GOyhdk+AggggAACCCCAAAIIIJBggcifuSb4mGBcNocAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIGBegOCj+S6igQgggAACCCCAAAIIIIDAywKRwUd+5pqzAwEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAINMECD5mWo9zvAgggAACCCCAAAIIIBB6AfdnrvVACD6Gvjs5AAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAZ8CBB99grE4AggggAACCCCAAAIIIDDcApHBx9Wrlw13c9g/AggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAikVIPiYUm52hgACCCCAAAIIIIAAAggEE4j8mevi4kKpqakOtkHWRgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCBkAgQfQ9ZhNBcBBBBAAAEEEEAAAQQyWyAy+MjPXGf2ucDRI4AAAggggAACCCCAAAIIIIAAAggggAACCCCAAAKZKkDwMVN7nuNGAAEEEEAAAQQQQACBUApE/sw1wcdQdiGNRgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQCChA8DEgIKsjgAACCCCAAAIIIIAAAqkUiAw+rl69LJW7Zl8IIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIImBAg+GiiG2gEAggggAACCCCAAAIIIOBNoK7uoLNgcXGh1NRUe1uJpRBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBIIwGCj2nUmRwKAggggAACCCCAAAIIpLdAW1u71NefcQ6yqmqSTJ5ckd4HzNEhgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggEEWA4COnBQIIIIAAAggggAACCCAQEoHIn7mura2WoqLCkLScZiKAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQOAGCj4mzZEsIIIAAAggggAACCCCAQFIFIoOPq1cvS+q+2DgCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACVgUIPlrtGdqFAAIIIIAAAggggAACCAwQqKs76PxLcXGh1NRU44MAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBARgpktbS09HZ3d0tOTo7zn8W/Fy9eSE9Pr+Tl5Ul2dpa5Jvb29oq2Uf+0jVlZ9trY09MjXV1dkp2dLbm5ueYMtUHavu7uHsnLy3XCuuIJAAAgAElEQVTaafHv+fMXov2dn2+1n3ulq+uFcw7quWjxT+tNeGqOzXORmpOYM5uaE9zR/Wyh5gSzDM84Rz9bchnnxNnd1Jw44SJWo+aItLW1S339GUcl3uCjW3OsjmfDMM7Ra5bubq6tgr6r7V9b9V1DM84J1tPhqTl2xznUnGDnoLs2NSe4Y5jmc+yPc6g5Qc5Irq2C6PWtG4Zrq66ubunpCcd9K2pO/Ock45z47SLXfP78ufT2iuH7VlxbJaKnubYKrkjNCW6oW6DmBHdknBPcsH8OmWurIJrhuLYKyzgn27mHavEvE2pO1u7de3v159LmzZst1dUzTYbifv55l9y+3SzvvbdOyspKzd1w7+x8Kt9995NzYfHhh+9KQUGBufP56tVrcvDgMamsnCSvv77AZCjuwIGjcunSVVm5colUVVWaDD9++eV30tbWIX/602YZMWKEuX5uabkvO3fukaKiInnzzRWSn59vro2NjWekr+ZUS3X1DNM1Z8OGt6S0tMRczXnypFO2bv3Z6duNG63WnCY5ePC4VFVVyJIl8w3XnCuycuVSp50WA9duzfnkk49MfrY0N9+TXbv2mq45Wm+07uhT0XScY/FLHj/9tFPu3GmR999fLyUl48zWHA1+fPDBOybPxStXmuTQoeMyeXKFLF5ss+bs339ELl++KqtWLZXKSps155///Fba2h7Jp59arTktsmvXPikuLpK1a22Oc7Tm6H+1tcmpOZHBR91HUVGh77Ge1pzbt1vkgw9s1pzHj5/I999vc2qh1Zpz+XKTHD6sNadSFi+uNTnO0Zqj11b6c+h6DWhxnKM1p739kdgd57TIzp37ZNw4uzWnvv60NDaelfnz58js2TNMjnN+/FHHOc2yceM7Mm5csblxjtacrVu3SU5Otrz//tsmxzlac3ScM2WK1pz5Jq+h9+07LNrO1avfkMrKiSZrzhdffCuPHj125nMsztvdvdssu3btl5KSYlmzRsc59r7M6tacBQvmyKxZ1BzfgzAR5xz8/vvtkpubIxs2rDd5Lur44fDhEzJlSpUzzrH4Bfq9ew/JlSvXZM2aN6SiwmrN2SKPHj0xX3NKS/tqjsUv0J88eUpOnTonCxbMlVmzppsc5/zwww65e7fFuSek16nWHojR0fFItI2Wa87Fi1flyJETMnVqlSxaZL3mLJeKilfNjXM0+KHXVo8fd8of/7jJ5GeLXg/s3r1fSkvHyZo1y03WnBMnGuX06fOycOFcmTnTds3ZtOl3znyQ1ZqjoY/33nvL5Ll48eIVOXLkpEybViULF9qsOXv2HBK9p7927XKZNMlezdEHVX35ZThqTllZiXONanGc019z5snMmdNMjnP0ukXvAVqtOe3tHaJzTlpz9NrKYibiwoUrcvSo1pzJsnBhjclrqz17DsrVq9edXMnEiRPMjXP0C1t6r/zJk6fyr//6ocnPltu370pd3QEpLy+RVats1pzjxxvlzBkd59TIzJlTTdacrVu3S0vLPdm8+T0pLBzre5yTtWPHnt4TJxqkpma2E0SyWBS0sN68eVs2bHhbJkwoN/eG0xDSV199LyK9smnTezJ69Kh45r6Suo5OQOukkN70Wr58scmioIX1woXLTghp6tTJJov/559/Ja2t7fKnP30shYVjktpn8Wy8ublFfvpptxQVjZX169fIyJH2wpk6aaUDOg0haeDacs3RG1+vvGK55ogz4LRYc3SiXG9+VVVNkjfesF5zljmTaxYn8//2t6+cp3t98snHMnas3ZqjTx17663VJmuO1hutO301R8c59m4g6s32W7fuOAGf8ePLzI1zNBDw9dc/OINMncy3XHM0+Lhs2SKT4xy98NHJtVWrljk3Eak5/kc6Ggj4+ec650mH1mtObe0cmTtXxzmJrTkaqtSxqP7FG3z87rufRS/GrdYcDQR8882PzpP+9QseFmuOvpc1WBiGmqMTvK+9Vmmy5vzXf30lOkmpwUeL4xy35mjwcd26VSbHOcePN4gGkZJVc/xX6t+u4dYcDT6Wl9sb5/TVnB8kOzvHCWdarDk6UX7gwBHnvbx06UKT4xy9ka3XgHojW0PhGrCw9qc1p6NDa87HMmbMaGvNcwLC27bVOcHHN99cabLmHDvWIA0Np2X+/LkyZ86shI9zEtEp3377069ha4s1R0NIW7b85IStdSxms+ZcFv1yuuWaoyFh/VKZ7ZrzpXR0PHbGORZrjl4PbN+uNWec4ZpT7zztfuHCeTJnzkyToQV9P2v93rTpXSkr03GOrV8C66s5Pzo3NnWcM2qUvftW589fch7YofMk+sAOi19O0C9gXrrU5ISQtDZa+1K1Bh91DrnvCx42a86tW3dlx4465yETGqyw+FCRY8dOSn392ZDUnN85DwmyVnP0+l7HYnotoOMcizXn3LlLcujQMecetD6ww2LN0S9g6j19fa/ovJO1mqPBR71XrvcM/vjHzSbHOXqvZceOPaGoOYsW1TgPCbIYztT5Wf2Cx+bNfTXHWtjarTkafHz/fa05IxNxSZnQbZw7d9EZ50yfPsX5UpnNmrNXLl++JuvWrXAeEmSv5vTI559//esXPCxeW9282VdzystLnfGixXHO0aMnpKHhrPOF6tmzp5usOXofWrNOmzdvEA2u+605Wc3Nzb36cy86ELEYQHIfmayP8B45cqS5MIC2Ty8s9LHO+ohQixNW2kb9ORrtZx0IWyyq2kbtY21jQUG+yRtz2sZnz5457Rwzxl4ASdunqXf355sshh61jfrI5DDUHG2jDpIsPh2HmpOYcWdYao6eixbDANScxJyH7jiHmhPMk3FOMD93bR3n2K85fT9rYH+ck5uUMEBk8FGf5BfPn163UHPiketfh5oTzI+akxi/l6+tklNzEtFSak5wxXDVnAKTocf++Zwukzfm+q+tMnucE/zd0veze2EY5+j72mIYIHxzyNSceN83fXPI1Jx4/dz1qDlBBd37Vs+dOXj7962oOfH2uFtzdH2LYYDIaysN0FgMIL08hzzKXOhR2+fet7I/zqHmxPtejpzPefHC/rUVNSdYT/ePc6g58Ur2zeeEo+aMGFFgLvRIzYn3zPvtem4+JwzjHM3b+Q096hFnPXjwoDdxZGwJAQQQQAABBBBAAAEEEEAgGQJ1dQedzepTL/VJtvwhgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggkKkCBB8ztec5bgQQQAABBBBAAAEEEAiNQFtbu/NTb/pXVTXJ+bkb/hBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBDIVAGCj5na8xw3AggggAACCCCAAAIIhEagqemGXLt202lvbW21FBUVhqbtNBQBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBRAsQfEy0KNtDAAEEEEAAAQQQQAABBBIs0NBwRlpb252tEnxMMC6bQwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQCJ0AwcfQdRkNRgABBBBAAAEEEEAAgUwTiAw+rl69LNMOn+NFAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBA4CUBgo+cEAgggAACCCCAAAIIIICAcYG6uoNOC4uLC6Wmptp4a2keAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAskVIPiYXF+2jgACCCCAAAIIIIAAAggEEmhra5f6+jPONqqqJsnkyRWBtsfKCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCIRdgOBj2HuQ9iOAAAIIIIAAAggggEBaC0QGH2trq6WoqDCtj5eDQwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCCWAMHHWEK8jgACCCCAAAIIIIAAAggMo0BT0w25du2m0wKCj8PYEewaAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAjADBRzNdQUMQQAABBBBAAAEEEEAAgd8KNDSckdbWdueF1auXQYQAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAxgsQfMz4UwAABBBAAAEEEEAAAQQQsCzgBh+LiwulpqbaclNpGwIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIpESD4mBJmdoIAAggggAACCCCAAAIIxCdQV3fQWZHgY3x+rIUAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIJB+AgQf069POSIEEEAAAQQQQAABBBBIE4G2tnaprz/jHA3BxzTpVA4DAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgsADBx8CEbAABBBBAAAEEEEAAAQQQSI5AZPCxqmqSTJ5ckZwdsVUEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEQiRA8DFEnUVTEUAAAQQQQAABBBBAILMEIoOPtbXVUlRUmFkAHC0CCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACUQQIPnJaIIAAAggggAACCCCAAAJGBZqabsi1azed1hF8NNpJNAsBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCDlAgQfU07ODhFAAAEEEEAAAQQQQAABbwINDWektbXdWXj16mXeVmIpBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBNJcgOBjmncwh4cAAggggAACCCCAAALhFSD4GN6+o+UIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAALJEyD4mDxbtowAAggggAACCCCAAAIIBBKoqzvorF9cXCg1NdWBtsXKCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCKSLAMHHdOlJjgMBBBBAAAEEEEAAAQTSToDgY9p1KQeEAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQAAGCjwlAZBMIIIAAAggggAACCCCAQDIECD4mQ5VtIoAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIhF2A4GPYe5D2I4AAAggggAACCCCAQFoKtLW1S339GefYqqomyeTJFWl5nBwUAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAn4FCD76FWN5BBBAAAEEEEAAAQQQQCAFAgQfU4DMLhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBEIpQPAxlN1GoxFAAAEEEEAAAQQQQCDdBSKDj7W11VJUVJjuh8zxIYAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIOBJgOCjJyYWQgABBBBAAAEEEEAAAQRSK9DUdEOuXbvp7JTgY2rt2RsCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggIBtAYKPtvuH1iGAAAIIIIAAAggggECGChB8zNCO57ARQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQRiChB8jEnEAggggAACCCCAAAIIIIBA6gUaGs5Ia2u7s+PVq5elvgHsEQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAGjAgQfjXYMzUIAAQQQQAABBBBAAIHMFiD4mNn9z9EjgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggMLkDwkbMDAQQQQAABBBBAAAEEEDAoQPDRYKfQJAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAARMCBB9NdAONQAABBBBAAAEEEEAAAQReFnCDj8XFhVJTUw0PAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgj8IkDwkVMBAQQQQAABBBBAAAEEEDAoUFd30GkVwUeDnUOTEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEhlWA4OOw8rNzBBBAAAEEEEAAAQQQQCC6AMFHzgwEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEogsQfOTMQAABBBBAAAEEEEAAAQQMChB8NNgpNAkBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCEAMFHE91AIxBAAAEEEEAAAQQQQACBlwUIPnJGIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIBBdgOAjZwYCCCCAAAIIIIAAAgggYEygra1d6uvPOK2qqpokkydXGGshzUEAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEBg+AQIPg6fPXtGAAEEEEAAAQQQQAABBKIKEHzkxEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEBgcAGCj5wdCCCAAAIIIIAAAggggIAxAYKPxjqE5iCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCJgSyLpy5Wpva2ubFBUVSVHRWMnKyjLVQG3M3bvN0tn5VF599RUpKCgw176urm65ffuuiPTKpEmvSnZ2trk2Pnr0WB48eCijRo2UkpJxJtt4794Defz4sZSWlsjo0aNMnos3btyWFy+eS1VVheTk5Jjr52fPnsndu/ckPz9Pxo8vM9nPehNfa05xcZEUFlJz4jmJ+muOyKRJE0z2s9ac+/cfOu/lkpJik210a05ZWYmMGmW15tySrq4XUllJzYnnvaLrhKHm3LnTLE+fWh7ndP0yzsmi5sR7IooINScA3i+rPn36TJqbbY9zWlvbpa0tMeOcZAUfqTnBz8VwjHPuy+PHT6SsrNS5BrR4nX/jho5zuqSycpLJa6tMqznB3xnRtxCWmqPvkYkTbV9bjRkzSsaNs3lt1dJyX548oeYEeR+5NaegIE/Ky23O5+hcjo5Pxo0rkrFjbc7nWK85L150yZ07d53PZas1p6Ojbw6ZmhPkHS1y/fot6e62PM55Ks3N94WaE6yf+2vOBCkoyA+2sSSs3V9zsmXixFdMzs92dDySBw9aZcyY0c4cssXrFnecU15eKiNH2ru26u0V0WsryzVH7++qo+Wa8/Bhm7S32x7n3L7dLM+ePXXGEPn5FmvOC9G6mJUVjpqjY1qL9/Pt15xe0XvlPT1dUlFhcz6HmpOYQYVmXzR3YL3m6PtYM0QW38+R4xz7NadMRo4cYW4s1tvr1pxuqaiYaHIO2a05I0bkO3PxFs/F/nFOsYwdO8ZcP2vVClpzsnbu3NN78uQpmTdvtsybVy15ebmJqYYJ3MoPP+yQmzfvyMaN66WszN7k35MnnfLNNz+IXmB89NF7MmLEiAQefWI2dflyk+zff9gJ7C1btsjkoHjv3sNy8eJlWbVqmbz2WqXJwvX3v3/jTPJ++unHTlDK2l9zc4ts21YnRUWFsn79apNB4fr606I1p6am2qk7ubmWa87bv9QcW4Fwt+aIZMnmzVpz7AXCw1FzDsmFC1dkzZplMnmy1ZrztbS2dsif//x7Z2LN2p9bczTI/NZbq0zWHK03J0+elvnz58jcubNM1pzvv98ut27dlY0b3xGdRLU2yavBmS1bfnTatWmTzZpz6dJV2b//iDN+WLp0oclxzp49h5xxzpo1y2XyZJth5s8//1ra2uzWHP0y1Pbte5wvT1itOSdOnJL6+lMyf/7cwDUnWcHHrVu3OxeRH374jnMhbq/mPJYtW34KRc2ZMqVKXn99gdGac1AuXrwia9cuN/ulrc8//0ra2x/JZ5/ZHOdozdm2bY9zA3bdupUmxzknTjSKXl8tWDBP5syZaXKcs3XrNtEbdB9++K7oF46s1RwNMmvNycnJkg8/tDnO0ffygQNHpa/m6Dgnz9plgdTVHZRLl67Im28uN/ulrb6a81g+++xjk9dWepNYxzmlpePkzTdXmKw5x483SkPDaVm4sEaqq2eYrDnffbfNCRZu2vQ754vVVmtObm62bNz4O5PzOTpPcvDgUZk6dbIsWaLjHHs1Z/fuA3L58lWzNUfvEWjN0c8YnUO2OJ8TjprTIA0NZ4zXnJ+dkI/OlWj9tlZz9Gb7t9/+LFpzPvzwdyY/Wy5cuCwHDx5zao5eW+XlWaw5+0XnutetW2Ey5KOBAJ3PefToiXz66Ucma47OQezYsde5HtBrVIsPuDl2rF4aG8/KokU1Mnu2zXGOvp/1OlXvCekDbqzVnPb2DtGxWF5ejmzc+K7Jfj5//pIcOnRcpk17TZYsmW+y5uzatV+uXNGas9L5oqi1AE1PT6/8/e9fO1+2/eQT2zWnvLzEmYu3XHMWL66RWbOs1pyf5O7dFvnoow3OlzCt1pz8/Fz54IN3TPbzuXOX5PDh4zJ9+hRZvLjWaM3ZJ1euXJN161b9Eiy09YC3np4e0XyOZiP+9CetOfZyWLdu3ZGdO/eJ7ZpzUhobz8mSJbUyc+Z0k/M5eh9aH3wSb83JOvr/s3cf0HKc9fnHf5KuunTVe69WtyTbcsO927hiIHROAiEkQCAQkmNKDhCCKQZO4FAC5EDyB0Ix4I6NLduSbMm2LFnNVjPqvV/1cnX/53nFXM8dbZvZ3bvv7H7nHB1Jd2feeefzzr53ZvbZ9315cZMeFAwfPsw9RPUxhLRo0RLbtWuvu/HxsWNV2l0PoZuaTtvFF1/gZceqG/BXX13lPtzUw0kfbyBVvy1bttmkSefYwIH9vQw+zpv3gntodcUVl3jZsWqUoVdeWeZGdTn33CleftNUF+zqc0aMGO4CKj72OS+/vMSNzKUPlfQNDN8u5t7oc5rs4ovP97LP0Yebr722yo1Ucc45Y718UJ6GPkcfIKrP0QdfPl7MacQPfeBOn1PcZ88vv/yK7d69zwX2FOjyrc/R6DP64EsPUv3tc3bYa6+tdn3OhAljPb3OWem+yDN58gQbMKCfl9c5c+cucH2OHqz5+EUefSttyRJd53Tx9jpHH4boOmfkyBE2cuTQoq5zwsHHadMmui+2lGJZuPAVNyqz/32O2cUXn+fldY7C6itXrnYjrOs6x8d7qxUrVpoeupzpc3Rv5ddDK53L6nP0oFzXOT73ORrBfNo0P++tgj5n1KgRLlTv473VG33OTOvZs97D65xj7sN2LRdd5Gufs91WrlzjeZ/zmvsij75Q3b+/rnP863MUlNKolP72OfttyZLl1rVrV5s2bbKXz3P0ZSM90xk9eqQNHz7E0z5nse3Zs9996VvXTr7dW2kkiBdfXOTqpTr6+CGsrh/U5wwY0Nd9IOLjdc7y5a+5L/Koz9G9lW+BAP1OmTNnvpvBSgEfH69zNMLe0qXL3Sh7U6fS5yS9z9IH2bK88MLzvJxN7ejRo/bCC4utbVt/+xx9HrRqlfqc/jZ+/Bj6nAQno57X6d7K7z5nn/uSaLdu3WzatElefnlQXzZav36jC8QpbObjvdVLLy2yvXsPuPsWH2dTUyhFoUL/+5y1NnBgPxs3zs8+R6F/fZFn6tRJXs7sF/Q5msFKoUI/r3PO9Dka1UyOPo6Q2rLP0fMc/2aZ1H2Lnsf73ufo+YOec/t4b6XRUVevXmMDBw6wceNGe3mdoz5H91YaPMbH2ScUfFQ+R59Tnulz/BsMas+eve6L6frdrIG/fOxz9GWjDRs22dixI23oUJ/7nP3uc+gks4y02bVrV5Oml9K0vT5eyOk6X/XT1K46SXTB5NuiX/Kqo954Pnaq8lLdNLWB/Hx8YKU6NjY2Okedhz5OI606njx50tXTxws51U/nouqobxb7OL1GuJ3pc5L3ZGnqc/Qw38fRAehzkp9/4S3pc0rjGFznqN/27YO54HcL1znFtzXXOcUb1lqfs2HDZtu4cYuDK2XwkT6n+HMxXfdW7b0MIHFvVfx5mLZ7K65zkrc5fU5yu/CWPM8p3jG4nk3D8xz6nOTtfabPOenChP4/Q+Y6J2lL19q9VVKnfNtxb5VPKP/raepz1Cf6GLROw72V2lnvFy0+hgG4t8r/Xi1kjfR8buX3dY7eK7rups8p5KzLvA59TnK78JZc5xTvyHVO8YZvXOec9jL0qPqlq8+p8zJoLcdi+5w2e/fubSrNKUcpCCCAAAIIIIAAAggggAACpRAoV/CxFHWjDAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQqLUDwsdItwP4RQAABBBBAAAEEEECgZgXCAce4CKUcCTLuvlkfAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgUoKEHyspD77RgABBBBAAAEEEEAAgZoWKCb4eNllF9a0HQePAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCBQuwIEH2u37TlyBBBAAAEEEEAAAQQQ8EAgSfhx+PAhNmLEUA9qTxUQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQaH0Bgo+tb84eEUAAAQQQQAABBBBAAIEWAsuWvWb79zcUpELosSAmVkIAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEKhiAYKPVdy4HBoCCCCAAAIIIIAAAgikQ+DAgQZbuvS1vJUl9JiXiBUQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQRqQIDgYw00MoeIAAIIIIAAAggggAAC/gvkCz/27FlvU6dO9P9AqCECCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACZRYg+FhmYIpHAAEEEEAAAQQQQAABBAoV2LBhs23cuCXj6pdddmGhxbAeAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAlUtQPCxqpuXg0MAAQQQQAABBBBAAIG0CWQKPzLFddpakfoigAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgiUU4DgYzl1KRsBBBBAAAEEEEAAAQQQSCDw0kuv2LFjx92WAwb0s/HjRycohU0QQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQqE4Bgo/V2a4cFQIIIIAAAggggAACCOmK0cAAACAASURBVKRYYN68BfZv//Y5mzx5iv3nf34rxUdC1RFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBAovQDBx9KbUiICCCCAAAIIIIAAAgggUJTAl7/8ZbvvvvtcGYsWLbKRI0cWVR4bI4AAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIFBNAgQfq6k1ORYEEEAAAQQQQAABBBCoCoEFCxbYhz70Ibvgggvsxz/+cVUcEweBAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQKkECD6WSpJyEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEECg7AIEH8tOzA4QQAABBBBAAAH/BU6fPu1/JakhAggggAACHgm0bdvWo9pQFQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEakuA4GNttTdHiwACCCCAAAI1KkCwsUYbnsNGAAEEEKiYAMHIitGzYwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIEaECD4WAONzCEigAACCCCAQG0JEHKsrfbmaBFAAAEE0iFAEDId7UQtEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBNIhQPAxHe1ELRFAAAEEEEAAgbMECDhyUiCAAAIIIJBOAUKQ6Ww3ao0AAggggAACCCCAAAIIIIAAAggggAACCCDgjwDBR3/agpoggAACCCCAAAJ5BQg75iViBQQQQAABBFIlQAgyVc1FZRFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQ8ESD46ElDUA0EEEAAAQQQQCCbAGFHzg0EEEAAAQRqU4BQZG22O0eNAAIIIIAAAggggAACCCCAAAIIIIAAAgggkF+A4GN+I9ZAAAEEEEAAAQQqIkDgsSLs7BQBBBBAAAHvBAhAetckVAgBBBBAAAEEEEAAAQQQQAABBBBAAAEEEECgwgIEHyvcAOweAQQQQAABBBDIJJAt9NiuXTtr37691dXVmf7dpk0b94cFAQQQQAABBNIl0NTUZPp9f+rUKTt58qT7k2sh/Jiu9qW2CCCAAAIIIIAAAggggAACCCCAAAIIIIAAAuUVIPhYXl9KRwABBBBAAAEEYglkCzx27NjROnXq5AKPLAgggAACCCBQfQIKQh4/ftyOHTtmjY2NWQ+QAGT1tT1HhAACCCCAAAIIIIAAAggggAACCCCAAAIIIBBfgOBjfDO2QAABBBBAAAEESi6QLfDYoUMH69KlixvdkQUBBBBAAAEEakNAAcgjR464ESGzLQQga+Nc4CgRQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEMgsQfOTMQAABBBBAAAEEKiSQK8ygKnXt2tWN8siCAAIIIIAAArUnoBEgDx8+7EaBzLcQgswnxOsIIIAAAggggAACCCCAAAIIIIAAAggggAAC1SZA8LHaWpTjQQABBBBAAAHvBfIFHhVe6N69O9Nae9+SVBABBBBAAIHyCxw9etSN/ljIQgCyECXWQQABBBBAAAEEEEAAAQQQQAABBBBAAAEEEKgGAYKP1dCKHAMCCCCAAAIIpEagkNBjfX09U1unpkWpKAIIIIAAAuUXIPxYfmP2gAACCCCAAAIIIIAAAggggAACCCCAAAIIIJAuAYKP6WovaosAAggggAACKRXIF3gMDqtHjx6M9JjSNqbaCCCAAAIIlFNA014fO3asoF0w8mNBTKyEAAIIIIAAAggggAACCCCAAAIIIIAAAgggkGIBgo8pbjyqjgACCCCAAAL+CxQaeNSRdO3a1Tp16uT/QVFDBBBAAAEEEKiIwIEDB+zUqVMF7ZvwY0FMrIQAAggggAACCCCAAAIIIIAAAggggAACCCCQUgGCjyltOKqNAAIIIIAAAn4LxAk86kjat29vmuKaBQEEEEAAAQQQyCbQ2Nho+/fvLxiI8GPBVKyIAAIIIIAAAggggAACCCCAAAIIIIAAAgggkDIBgo8pazCqiwACCCCAAAL+C8QNPeqImOLa/3alhggggAACCPggEGfKa9WX8KMPrUYdEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBEotQPCx1KKUhwACCCCAAAI1LZAk9NihQwfr3r17Tbtx8AgggAACCCBQmEDcUR9VKuHHwmxZCwEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCA9AgQf09NW1BQBBBBAAAEEPBdIEnrUIWmKa011zYIAAggggAACCBQicPDgQTtx4kQhqzavQ/gxFhcrI4AAAggggAACCCCAAAIIIIAAAggggAACCHguQPDR8waieggggAACCCCQDoGkoUeFEHr16pWOg6SWCCCAAAIIIOCFwPHjx+3QoUOJ6kIAMhEbGyGAAAIIIIAAAggggAACCCCAAAIIIIAAAgh4JkDw0bMGoToIIIAAAgggkD6BpKFHHSnTXKevvakxAggggAAClRbQtce+ffuKqgYByKL42BgBBBBAAAEEEEAAAQQQQAABBBBAAAEEEECgwgIEHyvcAOweAQQQQAABBNItUEzoUUfepUsX69y5c7oRqD0CCCCAAAIItLrA/v37rbGxMfF+CT4mpmNDBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQ8ECD560AhUAQEEEEAAAQTSKVBs6FFHXV9fb+3bt08nALVGAAEEEEAAgYoJNDQ02MmTJ4vaP+HHovjYGAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQKCCAm2WLl3etHnzNhsyZKANGTLIfHzo/eqrK23fvoM2bdpE69atq7Vp06aCZGfv+sSJE7Z06Qr3wvTpU62urs6r+qkyu3btsT//eYP17t3TRo8eYe3atfOujq+/vt527txtY8eOsj59enl5Li5atNSOHDlqF14408uQSkPDQVu1ao0bOWzixPFetvOmTVttyxb1OYNcv+Njn7NixUrbv9/fPuf48RO2bJn6nDY2ffoUr/scvZdHjRru5bm4du1627Vrt40bpz6nt3e/W9RJL1q0xI4cOeZ1n7Ny5Rrr0qWzTZgwzstzMehzhg4dZIMHl7bPKUXoUe3csWMnd33DggACCCCAAAIIxBHYtGmzLV36qg0bNtgGDRqQ6N6q3Pdjy5e/ZgcOHLRzz51sXbt28e6a+/jx47Zs2avWpk1bV0cfn+foOcm6dRutb9/eNnLkMC/vrdasWefurc45Z4z17t3Lu3bW++rll5fYsWPHbdasGZ4+z2mwlSvXuveJ7q18fG63ceMW27p1e1F9Tpw+Lsm6QZ8zffpkN7K+b8+Qgz5Hfe+0ab72Obts3bpN1q9fbxsxwtc+58/uWbfffc4rduzYCW/7nAMHGmzVqrXWrVsXO+ccX/uczbZ16w6v+5xly14zPY/X81k9G/Otz9HvPV3ntGuXhj6nj40YMdTL33+rV//Zdu9WnzPWfb7mWzs3NTW5Z8jHjp20WbOme3md80af09U5+nids2HDZtu2bYcNHz7EBg7sn+jeKsm1S5xt0tPntLNp0yZ5eW+1Y8dOW79+s/Xr53Of87rt3r3XJkwYa716+dvnHD9+0i64wO8+p3v3rjZ+PH1OnH4mvG7Q58yYoeucLkmLKdt2x44dM9VRv1N873P69+9jw4f7ep0T9DnjrFevHt5e5/jc5+zf32CrV6+17t272fjxYzy9ztnk7q30XHHAgH5eXufoGffBg4dsxoyp1rlzp9jnYpvZs+c0LVq0zM49d5LrFHwccejRR580hTNvvfV6LxtCQbjf//5R0w3GXXfd4m5yfVsUKpw79wUbOXKoXXzx+daxY0ffqmhz5y6wVatet8svv8hGjx5pdXX+hTP/7//+YPv3H7B3v/tuL0Mqumh/4olnrUePerv++iutUyf/2nnx4uW2ePEymz59kk2d6mef88gjT7pw5m233WD9+/f1rvM/fPiI/eEPj7n38J133uxln7N27TqbN+9Fr/ucOXMW2OrVr9sVV1xso0aN8LrPec977rauXf0LxqnPefzxZ61Xr3q77jpf+5xlpn5HH3xNnTqxZNc5pQo9qpwOHTq6fpsFAQQQQAABBBCII6DAwoMP/tFmzpxikydPSHydU87w48MPP+GCUnfccZP17at7K7++yHro0GF3b6UH5XfccaP7EqFviz5sf/75F92XWC+88Dzr2LGDb1W0Z5553tau/bNdddWbvA1n/uIXv3MPUPU8R+FC3xZ90P6nPz3rvgh8zTWXe/k8R18GfuWV5TZz5jSbPPmcxH1OOe0feugJ27bN3z5H5+ADD/zRPX+4/XZf+5zX7fnnX7IxY0barFkzPe1znjM9d/K9zzl06JC9611+9jn60OvJJ591ofqrr77Myz5HgfUlS1Z43ec8+ODjtn37DrvjjptdiMa3QJxCmaqjvthx++03eHmdo+vZ+fMXet7nzDN9gV7vFR/Dmfpc8pe//L3puvZd73qLl9c5uh948sk5Xvc5Cxe+4r5Udt5559qkSeO9vM4J+hx9JtS3r799Tvv2de6zNR/vrfRFowULFroBeC64YIaX1zlPPz3P9Jn+Nddc5mVQ6vTpJvu///u96XPKd77zLi/7nC1btttTT81xv5uvvvpNXmYigj7n/POnu4GM9L7xbdF9y/btO+0tb7nFy8FjNAvKgw8+YR06tHcZIj/7nDW2YMHLNm7caBcU7tDBv+c5Tz89115/fYNde+1lNmyYf+FMfYaqfM6RI0fsHe/wtc/ZZk89NddlSq666lIv+5yXXlrsgsI6DydM8LXPecy2b9+VuM9ps3r12iZ9K1sP1nShVM4H3kk77I0bN9vBgwdt5MgRXn5z7uTJU7Z+/UZrbGy0MWNGefnLSZ2/HmgoODN48AAvk8Z6yLtv3z4bNGiQC4D49oGIzt/XX1/nRl/TN218DAkrhLtp0xZXN31zwMfwqL6p5Hufo2/3qc9RGM7Hb+uG+xzdoPk4KknQ52gUO40+4+O3OIM+Z/DgQW6qYfqc+L+lgz5HNxbDhg3x8lzUN7I1EkSpr3NKEXxUGfrTo0dPLz9kiH9GsAUCCCCAAAIItKaAvoSyZs1a9yxHI5gX8zynmG1zHfOGDZtc2Exfbkzybd1ye544cdI9z9EHxgr5+HhvpRFydO+ib45r9Bkf7630Yfb+/fttyJDBVl/f3bvgh84jhaSOHvX3eY4+ONy8eav70EYfNvj4PEf3Vbt36xlyHxdcKFe/Ucz7nj6nGL0z26rP0TPk+nr6nGI0FQbXaHsa2czHZ8hBn9OxY3sbOtTXPme3G2WvFNc5xbRlIdc5uobo1Cn+qCTlqldQrmYqW79+k9fXORohR9c59DnJz4amJn1ulZY+p4MNHapnyP4NeqJR1vfsUZ/T19sZ6TZs2GgHDx52n0Nr0BPfwtZv9DlmY8ZooAn/glwa2EZ9jj6D1ohXvt5b7du334143L27f/dWunfWZ+XpuM7p4D638rGd6XOS/94LttTsiLr/06IvitLnJDPVQFDqG33uc/Q858SJ424EVz/vrQ67Qfw6depgQ4b4fZ3Tr19fN1OLj89z9HxWX+Q5c52je6t453SbPXv2NCmwp4Pz8QB1OAoFNDaedhfDvl3IBdwy1C97HztV1VF1Ux3l5+Mv+KCd1dZt27bzMoCkOspQdfSxUw3aWfVLw7lInxOvs46uTZ9TnF9a+pwzv/8aU9DnmJcPrFq2c+muc0oZelQd9UDN19/Nxb/TKAEBBBBAAAEEyiWgL2vpj+6tdC1R7DOdYrfPdJw8zym+9dP0PEfnoc/P7dLxPMffe6vgXOR5TnHva57nFOcXvs+nz0luqffzmWfI/vY5wZdF6XOSt3PweUYaPitIw+dW9DnJz8Wgz1EJvj4Dpc9J3r7Blm/8buGz8mI0g3ORPie5Itc5ye3CW/r+PIc+p3TtrLb2v89p8nLwObUC1zmlOxf1vER5uyTPFtvs3bu3qTRVoRQEEEAAAQQQQKD6BYoNPgYPLwIpgo/Vf85whAgggAACCJRDQFNoHj58uLnoUgQDyhF+LMexUyYCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgQfOQcQQAABBBBAAIECBUoRetSuwuUQfCwQn9UQQAABBBBAoIVApuCjVig2vFjs9jQTAggggAACCCCAAAIIIIAAAggggAACCCCAAAKtIUDwsTWU2QcCCCCAAAIIVIVAMcHH8LYEH6vidOAgEEAAAQQQqKhAtuCjKlVseLHY7SsKw84RQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEakKA4GNNNDMHiQACCCCAAALFCpQq9Kh6EHwstjXYHgEEEEAAAQRyBR+lU2x4sdjtaSEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBMopQPCxnLqUjQACCCCAAAJVIVCu0KNwmOq6Kk4RDgIBBBBAAIFWF4gGH1WBaFixmPBiMdu2OgY7RAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEKg5AYKPNdfkHDACCCCAAAIIxBVIGnzMtF30ZwQf47YG6yOAAAIIIICABA4ePGhHjhxpgZEprFhMgLGYbWklBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQTKKUDwsZy6lI0AAggggAACqRdIGnrUgUe3jf6/qanJ+vXrZ+3atUu9EweAAAIIIIAAAq0r0NDQ4IKPbdq0abFjRn1s3XZgbwgggAACCCCAAAIIIIAAAggggAACCCCAAAKVESD4WBl39ooAAggggAACKRFIGnzMN9qjQo9aCD6m5ESgmggggAACCHgmEAQfVa1w+JFRHz1rKKqDAAIIIIAAAggggAACCCCAAAIIIIAAAgggUBYBgo9lYaVQBBBAAAEEEKgWgSTBx2zbhH+u4KP+P2DAAEZ8rJaTheNAAAEEEECgFQUUfDx06JAFQccg/Jhteuqk01Yn3a4VKdgVAggggAACCCCAAAIIIIAAAggggAACCCCAQA0KEHyswUbnkBFAAAEEEECgMIEkoUeVnC/4GIz2SPCxsHZgLQQQQAABBBA4W6C1go/aM+FHzkAEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAAB3wQIPvrWItQHAQQQQAABBLwRSBJ8zLWNXgtCjzpIgo/eNDUVQQABBBBAIHUC0eCjDkCjPuYKKSYNMCbdLnWoVBgBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgNQIEH1PTVFQUAQQQQAABBFpToNShR9W9sbGxxSG0ZvDx9K7F1rTzZbf/pp0vFEXZdsrfW9t+M4oqg40RQAABBBBAoDiBTMFHldiuXbucBScNMSbdrrijZGsEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBDILEHzkzEAAAQQQQAABBDIIlDr4GB3tUbtsreBj44ofW+Pyr5W0neuu/hXhx5KKUhgCCCCAAALxBLIFH1VKrvBj0gBj0u3iHRVrI4AAAggggAACCCCAAAIIIIAAAggggAACCCBQmADBx8KcWAsBBBBAAAEEakig1KFH0UVHe9TPWiP4WI7QY3AqEH6soTcFh4oAAggg4J2Ago/6U1dXd1bdGPXRu+aiQggggAACCCCAAAIIIIAAAggggAACCCCAAAIlFiD4WGJQikMAAQQQQACBdAskCT3qiHNtl2m0x6amJheGHDRoUN4pKZOKanrrU7PfnnTzgrYj/FgQEyshgAACCCBQcoEg+KiQY5s2bVqUr//nGqEx6eiNSbcr+cFTIAIIIIAAAggggAACCCCAAAIIIIAAAggggEDNCxB8rPlTAAAEEEAAAQQQCAskCT7m2yZT8PHUqVNut+UMPpZztMewGeFH3kMIIIAAAgi0vkAQfNSeo+HHIAhZ6vAjwcfWb2f2iAACCCCAAAIIIIAAAggggAACCCCAAAIIIJBZgOAjZwYCCCCAAAIIIPAXgXwBxmxQ+UZ71HYa4TFYtH6wTTmDj6ee/qCd3vlsq7Svwo/lWtr2m1GuoikXAQQQQACB1AqEg48KJIZDieUKPgqL8GNqTxkqjgACCCCAAAIIIIAAAggggAACCCCAAAIIVJUAwceqak4OBgEEEEAAAQSKEUgSfCwk9Kg6BcFH/a0/1RZ8LMa9kG3b9r/C6q76USGrsg4CCCCAAAI1IXDgwAE7ePCgO9YgjBj8HZ76mlEfa+J04CARQAABBBBAAAEEEEAAAQQQQAABBBBAAIGaEyD4WHNNzgEjgAACCCCAQDaB1gg+Bvsg+JjsPOzw9tXJNmQrBBBAAAEEqkwgU/BRgcfgT/hws4Ufk47emHS7KmsCDgcBBBBAAAEEEEAAAQQQQAABBBBAAAEEEECgggIEHyuIz64RQAABBBBAwC+BuMHHQkd71FEGIz0GIz8SfEzW9u2mfNraTf5Aso3ZCgEEEEAAgSoS2L9/vx06dMgdUTiIqH+HR3yMvh4lSBJiTLJNFdFzKAgggAACCCCAAAIIIIAAAggggAACCCCAAAIeCBB89KARqAICCCCAAAII+CFQ7uBjuHz9WyHIwYMHW7t27coCcOrpD9rpnc+WpexKFlp39a+sbb8ZlawC+0YAAQQQQKDiAgo+aqprhRzDQcTo/1VRpruueHNRAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIESCxB8LDEoxSGAAAIIIIBAegXiBB/zrRt9PQg6BjrB/wcNGmR1dXVlQavW4COjPpbldKFQBCoqsGPHDluwYIGrw/Dhw23GDMLNxTbInDlzbN++fa6Ya6+91rp27VpskWzvmYCCjw0NDS74GP0SRaYvVZQy/MiIj56dDFQHAQQQQAABBBBAAAEEEEAAAQQQQAABBBCoQQGCjzXY6BwyAggggAACCGQWyBdmDG8VZ5prbdfY2Nhip/p/MOIjwcd4ZyTBx3herI1AGgReffVVu++++1xVL7vsMnv/+9+fhmp7Xcf/+I//sNdff93V8d5777V+/fp5XV8qF18gV/Cx3NNdE3yM315sgQACCCCAAAIIIIAAAggggAACCCCAAAIIIFBaAYKPpfWkNAQQQAABBBBIqUCc0KMOMU7wUQHH6PoEH5OfKAQfk9uxJQK+ChB8LH3LEHwsvalvJQbBR9VLQcRwGDFT8DFYL9NxxA0yxl3fNzvqgwACCCCAAAIIIIAAAggggAACCCCAAAIIIJB+AYKP6W9DjgABBBBAAAEESiAQJ/gYJ/SoqkWDj9o+KGPw4MFMdR2z/Qg+xgRjdQRSIEDwsfSNRPCx9Ka+lZgv+Kj6ahrs8MJ01761IvVBAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQSCpA8DGpHNshgAACCCCAQFUJlCv4qNCjlnD5BB+LO3UIPhbnVytb63126NAhd7idOnWyDh06uH9v2rTJTf+7b98+9zNN/zt+/Hjr2bNnQTTabuXKlW77Y8eOWd++fV0ZQ4YMsfr6+qxlHDx40IWg27dvb507d3Z9wurVq119Tp48aQMHDrQpU6Y011MFHT161O1r165dpu179Ojh9qV1BwwYkHVf2k5lKuDUrVs3t57KeO2112zPnj3OY9CgQW5/dXV1LcrR68F6ek3h7GHDhrnjLGTZvHmzbdy40e1PS58+fZzNyJEjzwpgyUPHpWXVqlX2gx/8wP37wgsvtL/6q79q3l3Hjh1Nf6KL/OWze/duO3DggPPv3bu3TZgwwbp27RrLR/XWedHQ0OCOdeLEiQWfE7lcdHxbt241la86qg3Vfqpju3bt8pLGPUatf+LECVfut771LdcWWj772c+6tgiWXOdq3kqxgjcC0eCjQo5B0DEIOEaDj6p8tvBj3FEc467vDRwVQQABBBBAAAEEEEAAAQQQQAABBBBAAAEEEKgKAYKPVdGMHAQCCCCAAAIIFCMQJ/So/WRbP9PPo8FH/T88AiQjPsZvOYKP8c1qcYtXXnnFvvOd77hDV4ju0ksvte9///umkQUzLZdddpm99a1vzRqYU6Dsf//3f23BggVZOd/ylrfYtdde2yK8qJUVPvvCF77gttPrV111lX33u9+1bdu2tShLI/Qp0Kg+4rHHHrP7778/6750PHfddVfGcN7f/M3fuO369+9vKlPlqLzoovDbv/zLv7ggnvqv3/zmN/bEE09k3KcMr7nmmqyBKQX7fvWrX2X1HTp0qL3vfe+z0aNHN5cv03/4h3/Ie3rK9eabb25er7Gx0dXzkUceceHQTMv1119vd999d8ZwYdjn85//vP30pz+1hQsXtihG61xyySV565ZtBdXx0UcftT/84Q8ZV1H49R3veIc7LzMtSY9R+3vooYfy1vtHP/pR1rbMuzEreCOg4KMCtQo3BiHE6N8EH71pLiqCAAIIIIAAAggggAACCCCAAAIIIIAAAgggUGIBgo8lBqU4BBBAAAEEEEifQGsGH4N96W+FmzQSWnTEtVIJnnr6g3Z657OlKs6bcgg+etMUXlckHHxUCE6BR4Xzci0KCv7TP/2TG1UxvChYpBDlunXr8h7zmDFjXJgwPJrfhg0b7Itf/KLbdvr06W50w0yBPYUUNSrfz3/+c5szZ07efSk896Uvfcl69erVYt0g2KfRD2fOnGlPPvlk1rJ0zJ/5zGfsgQcesNmzZ+fc5zvf+U4XfowuK1assG9+85sF1feee+5xo0hqSRJ8PH78uCm0t3jx4rz7u/LKK+3d7373WSNNhn3kvWbNmrPKKib4qDpqtMVM5UZ3dMstt7gAa3gp5hgJPuY9LapqhUzBx2DUx/BojEx3XVXNzsEggAACCCCAAAIIIIAAAggggAACCCCAAAII/EWA4COnAgIIIIAAAgjUtECpQo9CjJYVjPYYvBaM9hj+P8HH+Kcfwcf4ZrW4RTj4GD5+TTGsMKCmXlYQUgG6ZcuWNa+ikQk/97nPNQeS9b5VWG/nzp3N6yispoCjRmfcu3evPfvssy1GDIyOUBgOMDNKIwAAIABJREFUPobromCipnVWCHLt2rUudKlRAsNBRY0IOHnyZDfd9JEjR1xdH3744eZiJk2a5LYLB5uCYF94XypHx62pljU99G9/+9uMp8W4cePcKITDhw93U0hrveDYFbT8+te/7qbqDpYdO3Y4n2DRazp++eq4FPJ88cUXm8vQKJMKWmo6aU3HHdRD+1KbaVEZ4ZEQZ82a5by1aNTNZ555pnl/F1xwgVtX5WracB3b/Pnzm19/85vfbHfeeWeLY83ko32qLE2RrSnIb7vtNjcVeJJFo1Fq9Mtg0Sifmjpb7a1zbt68ec4lWDQN9ahRo5r/X8wxylBTlWt57rnnmgO2M2bMaJ7qWlOd33HHHWcFQpMcK9tUViAIPqoW4bC1Qo+5go9an+muK9t27B0BBBBAAAEEEEAAAQQQQAABBBBAAAEEEECgeAGCj8UbUgICCCCAAAIIpFigNYOP4X1pGlMtBB/jnzwEH+Ob1eIWmYKPmUJwsokG1TTl9Y033tjMtnXrVvve975nChl96lOfcqG+6PLUU0/ZL37xi+Yf/9d//VdzEClT8FFTR1933XVnlaNw489+9jMXpPzrv/7rjFMhr1+/3o30GCwKaobrFA32aUplhe/Cy8svv+yOKbwoYPmP//iPLQJUqs+nP/3p5gDdRz/6UTdqZbAoCKmwoRaFJeWj8GB4UQDyvvvuax4xU+HSYOrvYD2NyKl1tGja8fe///1n2WgExXvvvbf559lGZVTQ8oc//GHzejqmadOmtdguXPh5551nH/rQhzJOi53kvfOf//mftmTJErfpe97zHtPIk+HlxIkT9u1vf9uFH2+66SZ3HiiQqqVUx6iyNILo66+/7sqVW3Qk0yTHxjZ+Cezbt88aGhpcpXIFH/V6oaM+ZgtE5jryJNv4JUltEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBNIoQPAxja1GnRFAAAEEEECgZAKlCj5mKic64iPBx9I0G8HH0jhWeynR4OPll19u733ve7OOcqeRBx977DHHotEDFcILh3k0/bACfD179sxIp/f7V77yleagmUJnGhFSSzT4qBCiwojZFpWlURaD7TOt9/vf/7555MdoADAcfNQohgr1RUNP6o8++clPNoemdMz//u//flZoUfsO7ys83fX27dvd6I3B8tWvftWN5JhpOXjwoH384x9vfknBRwUgg6WQ4ONPfvITe/75590m+Qw1dfeDDz7o1j3//PPtwx/+cPO+wj4aaVHBzg4dOpTsLaFzR8ejJVvAVeeT2iA8eqbWL9UxqiyCjyVrUm8LCgcf1V8F73P9HQ5C6gAIPnrbjFQMAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIKEAwceEcGyGAAIIIIAAAtUhECf4mGvdfMFHjfAYDkIy4mPy84fgY3K7WtoyGnz81re+5QKN2RaNwhcOx0WnHw62U/hx165dboprbaOR+hSGVOBPQbtgGurwKIPR4GOho+9pKug9e/a4PwoOdu/e3Xr16uWmLNbU2N/85jddtTRq4N133918aOFgX3SExvDxa/sVK1a4H2UbZVGvaeroH//4x269G264wd72tre5fysoGkxVrWDp+973vpynmEbBfOGFF9w60VEo8wUfo+2Tz1CjdGokzGD50Y9+1BxkDft88IMftIsuuqikb41wiFbBxre//e1uGm1NMZ1rKeUxaj8EH0varF4Wli34qMrW1dW1qHOhwUdtFHcEx7jre4lJpRBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQRSJ0DwMXVNRoURQAABBBBAoFQCcUKP2mec4GM45KhtT5061VxtvRaUxVTX8VuT4GN8s1rcIhx87N+/vxuNMd8SDopplMRZs2Y1b7J79257/PHHbfbs2RmLUcBNAcht27a51//+7//eNIWylnDwUeFLhTBzLQpXzpkzxx566KHmKaaj648ZM6Z5dMno6IfhYN/XvvY1F5TMtPz3f/+3Pffcc+4lTS2t8GOmZfny5c11Du8rvL22yxUs1evBlLz6dzhAqf/nCz7u2LHD7rnnnubq5dtXdH+aWlrBUS1hn/DInPnOj0JfVxhNoUu1Y3i5+OKLbdKkSTZhwgTr3bv3WcWV8hhVOMHHQlssvesRfExv21FzBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgeIFCD4Wb0gJCCCAAAIIIJBSAYKP6Ww4go/pbLfWrnU4+FjIaISqX3ikvvAUxRpdsZDgZPgYswUfp06d2mLK56iLwnJf/vKXmwOUhbjlCj5q6uRsy09/+lObO3eue1nTXiuUl2nRqJDB6JLhfX3961+3lStXFlLFs9ZRCPADH/hA88/zBR9Xr15tmko76aJpvAcNGuQ2DwcfNZJldCS8pPsIb7dlyxY3RfjixYszFqfw480332yTJ09ufr2Ux6hCCT6WoiX9LiMcfNR5HPxRrTXVdfjcznSeZxupMckIjkm28VuX2iGAAAIIIIAAAggggAACCCCAAAIIIIAAAgj4LkDw0fcWon4IIIAAAgggUDaBSgUftd9gREhGfIzfvAQf45vV4hbh4GOuaZzDNr/61a/siSeecD8KpmI+duyYfepTn2oevU8jDd566602fvx469atmx0+fNhNff3888/byy+/3FxctuDjueeeax/72MeyNsl3vvMdU92DRdNYn3/++W5KbU2FrCm2ly5d2lxPredD8FGBzoEDBxZ8qo0cObLFFNP5go9r1qwxTW+tRaNrvulNbyp4X1rxlltuyTjiY65gaKwdZFl5/fr1bkrwhQsXZlwjPE15KY9ROyP4WIoW9LuMXMFHBRGjYcRCp7tOEmJMso3futQOAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwHcBgo++txD1QwABBBBAAIGyCZQq+JipnPBU1/p3Y2OjOw79O/ij/xN8jN+8BB/jm9XiFuHgYyHTS8voC1/4gm3cuNFxffjDH3aBw/nz55tGBdSiwN0Xv/jFjNMU6/Uf/ehHtmDBArdukuCjQpThUOQnPvEJmzJlSsbm01TYP/vZz9xrlQo+hkeMvPvuu00hvqRLvuBjdBro73//+9ahQ4dEuwuP+Fju4GNQwVOnTplCkMuWLbOnnnqqxTTYH/nIR2zGjBlWymPUfgk+Jjo9UrWRgo8HDhxoHukxPOIjwcdUNSWVRQABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEggQPAxARqbIIAAAggggEB1CJQr+BgOPUpK+wn2FQ4+6t9Dhw61urq6soCeevqDdnrns2Upu5KFEnyspH569h0OPqrW3/jGN6xXr15ZD0BTTCuAFiyf//znbcSIEXb//ffbo48+6n785je/2e68886MZej9/NGPfrQ50JYk+Lhq1Sr72te+5sofPny4/du//VvW+iqMqVCmlkoFHx9//HH79a9/7eowZswYu+eee3KeIEEAXFPwRpd8wUeNdqkwarD88z//s2m66FzLyZMnrX379met0hrBRx2rwo4dO3Y8a/861374wx+6EKSWCy+80P72b//WjehZqmNUueHgo/49YMCA9LyBqWlBAuUKPmrncUdwjLt+QQfISggggAACCCCAAAIIIIAAAggggAACCCCAAAII5BBo89xzLzStXLnaJkwYa+PHj7VMH0JVWnDOnPm2Y8dOu/rqy6x3715uNAOflqNHj9mTT87RGE52ww1XJR55pJzHtHHjZlu0aJkNGTLQZsyYWraARTHH8PLLS2z9+o12wQUzbOjQwbE/aClm34Vu++ijT9nBg4fsjjtuzPghZqHllGu9PXv22rx5L1qPHt3s0ksvzPhBb7n2XWi5r7222lauXGMTJoyz8ePHeNnnPPvsfNu5c6ddc83l1qtXT4/7HLMbbrjS4z5nqQ0dOsimT/e1z3nF1q/fZBdcMNPV08cPax999Mm/9Dk3ednn7N691557Tn1Od7v00lne9jmvvbbGJk7M3OfECT7mWjf6WpzgY+/efay+vnuh3Wis9Qg+xuJi5SoTiAYfNaKeQmWZ7nf0Htbohc8995xT6N+/vwuN6b7joYcesj/84Q/u5wo9KvyYadE019/73veaX0oSfFy5cqV9/etfd2Wcc8459ulPfzrjvqIjA1Yq+Kgpvv/1X/+1uY6qr+qdaVEQUGG/devWOcdZs2a1uCcJBx+zhT41wqVGutSiKcNlnC04vnbtWvvKV75imub8xhtvbDENdzmDjwo1atrzhx9+2Pr16+faMFMdg/rpWBSA12ijWkp1jCorHHz8+Mc/bpqOnKW6BFatWmNPPPG0TZo0zsaMGeWuZ4NRH8P/Do660KmutX7ca+Ns6z/zzHO2a9duu+66K61Hj3rv7q2OHDlqTz011x3vdddd7uW91YYNm2zx4mU2fPgQmzZtspfPc156abFt2LDZLrroPBs8eGDs86c13pmPPPInO3z4iN1+u5/Pc3bt2mPPP/+S9ezZwy655Hwv761efXWlrVy51iZPnmBjx47y8nmO+pydO3fb9df72efoHJw9e561a9fWrr3Wzz5Hz2ZfeWW5DR8+1KZNm+Rpn7PINmzYYhdffL4NGjTA2z7nyJEjdtttfvY5ep/Mn7/QevXq4RwzfVmoNfrmXPtYsWKlrVrld5/z9NPzTP23PhPScx3fPrfSjALqc3Q/cs01l3l5nZOGPufFFxfZxo3+9jl6BqlnyH73Obts/vyXve5zli9/zVavft2mTJloY8aM9PI6x/c+59Chw6Y6+tznrFu3wZYsWWEjRgyzqVMnenmdE/Q5l1xygQ0c2N+76xz1OY888qQdO3bUbr31Bi8/t9q580yf07t3T3eP6uN1Thr6nNmz55o+A7zxxquse3f/rnOCPkfte/XVb/LyOicNfc4LLyyyTZs2u894Bwzwr8/R5zbK5xw7dsxuvfV6L/sc5dgWLFhkffr0sgsvnOl1nzN16iQbPXqEl9c5xfY5bZ588pmmhQuX2PTpk2369CledgoPPfS4bdq01T2c9PFhhh5a/fa3D7lpK9/61luta9eulX4ucNb+16z5synMNWrUUHvTmy7yslPQw0k9QL3iikts3LhRXl5w/r//91vbv/+Avfe9bytbSKWYk2f79p2u8+/Vq95uvPFqNx2jb8uiRUtNfc6MGZPt3HP97HMefPBx27xZfc5NNmiQf7/kgz5HbXv33W/2us8ZPXqYC+FmGu2o0udm0OdceeUl7kObco24V8xxBn3O+973duvevVsxRZVl26DP6d27h3vI62efs8QWLlxqM2dOcR8UR6dFrUTwMTryY8+evdwHnOVYCD6WQ5Uy0yIQDT6q3pdffrm9973vPesDqd/+9rf22GOPNR/au9/9brvqqqvc/1evXm1f/epX3b979+7tRmHs1q1ln5xpX0mCj9ER/z772c/aqFGjWpAr9HjvvfdaQ0ND888rFXxUBb773e/a4sWLm+uSKWCn+6T/+Z//aQ4tauUvfelLNnjw4ObttmzZYhplM1hk3rdvX/dfjZyo39MbNmxwU40Hi4J8CrNGrzM2b97cYrRMhR/f//73N29XzuCjpkoPQoza4dVXX23vete7znrb6HzTeadFodxgtNFSHaPK/cEPfmAvvfSS24fq8c53vtOd+wqh+viFx7T0LT7Vc/nyV+03v3nQZsyY4j600cPecPBRdY0GEsMfyOcKN5Yq+PjAA4/Zli3b7M47b7EBA/p596GNvth4//0Pu/fEXXfdYl27dvGpiV1dFPqYO3eBjR070i6++AIv7630cFIfFOsLy3qA6uO91f/8z2/s4MGD7nmOj/dWW7dutz/+cbb16dPbhfY6d+7k3bm4cOFi96Xq888/14UCovdWPlT4D3941GR5111vtv79+3rX5zQ0HLLf/e5h9x65886bvexz9GXlefNecM9JFIjz8XnO7NlzbPXqP7svLI8aNdzTPufXdujQIXvPe/zsc7Zs2W6PPz7b+vbtY9ddd4WXfY5C9Qr+n+lzJlmHDmeP5F7pfkd9jiz1fFZfemrb1q8BOxoaDtrvfveItW9fZ3fc4Wefoy8rP/ec333OU0+d6XMUWB81Stc5Z89gUMlzUffb//u/vzEFQN7znrd6eZ2zefM294Wtfv362LXX+tnnKGym4P/550//y3WOf33O73//iG3dusPjPqfBfve7R9196R133OTldY4GZtFADuPGjXaBOB+vc5588llbs2ad+/08cqSuc/zqc/TZhj63Urj+3e/2vc/p6/puH++t1OcsXrzcZs2a4b5Y5uN1ju5btm3b6bIv/fr19e4LHgcONNjvf/+ouy9VhsjH5zlv9Dlj7KKLZnrd5+g5xMiRw7x7Zhz0OfqCx7vedbeX1znKsf3pT8+4ZxC6R/Wxz1HAVdc5CmZOnnyOl+HMYvucNtu2bWs6duy46xR8bATdMOgb+MePH3cnso8PT/WGUxBJHyD5OIKBDDXNm0Yfadeuzrp06ezdLyfVUUltnYsKznTs2KGS94pZ960LuePHT3g5CqAqrXNQ56IWvV98+5ap6qX3Mn1Ocad3+vqcLubZQL2uAehzijsP09bn6PdKp05nf3jYGsHHYGpr7Sv4t/z0b/1syJAhZfvQkOBj8ec5JaRXIFMYUUej0RzPO+88N8retm3bTCM16u9g0YiFn/zkJ5tv8nUd/Xd/93fNryv8ePvtt7sPtjTioaYrXrhw4VlQSYKPKuTb3/528xTI+v9dd93lppFWWEMjQr7wwgvN02kHO61k8PHAgQP2mc98pkWdFOSbOHGim1pcIxsqGKnRtIPlE5/4hE2ZMqWFWTT0qRdVzvbt293fb3nLW9z6DzzwgD344IMt2uP888+3sWPHmuqikSPDQUxNh60wZvjb3eUMPqpiGj107ty5zXVUeFXnnP7eunWra0edd8HywQ9+0C666KLm/5fiGFVYOFyp/+vcVR20b42+6eO9dXp7nMrUXH3Q1q3brFOnjs1fQIkTfFStswUcSxV8DJ7naBQkHwO3b9xbnXajmPt4D93yeY7f91Z63uRjGE7nusIA+l3j46wOqp9C/nq/mLWx7t27enkuBs9z9AGx+h0fF30Youd2vvc56nt8HB1ObUqfU5ozOy19jn7vdetGn5O01elzksq9sd2JEyfdqGFp+NyK65zk7R1c59DnJDfUlupzjh074e5bfL638vk6R/cD+lzI9z7n6NHjLsTlYxjujXurk24UVx/voelziutrgq3pc4p3DPocPYNV/sXH94tmtlVugz4neXunqc/RsxwfQ/+luM5ps3fv3iZ98O7jGy18elHH5G+2YMtg2k2f25p2LkU7qwy/39O0c/Ht7FrZ877b9/qlwTANdWxqSnefU8ngYxCEHDZsWNnCJwQfS9PfUko6BcLBR412FwTOch2NAoYaea++vr7FauEpqLNtryClRiAMRo5MGnzcs2ePfeMb32gRFMy0T43e94tf/MK9VMngo/avUQoV2AyPQpnNKRryC6/31FNPNR9T+Oc333xzc/BRX/T59a9/bU8++WTeE1NTZmuq6eiIxOUOPuohuqY9X7FiRd46Xn/99Xb33Xe3+NCiFMeoHeuLb/fcc0/GdiH4mLdpUrHCvn37TH8UUgyCir4FH9NwPZuGOnJvVZq3JI7FO2JYGkOVwvPZ4iw5F4vzC7b23dH3+qXlGoI+p/j3C+di8YZpeb/4/Ps5LYb0OcW/X+hzSmPIuVgaR5/7RbIvxbdxGn63pKGOnIuVPxdd8LE01aAUBBBAAAEEEEAgXQKlCD5GywgucAOJ4PXoiI8EH5OfK+2mfNraTf5A8gLYsiYEwsHH6667zo2cqKBgeCS+AELBOK1z0003ZR2patOmTW6UvPDokMH2Cjwq0Ldo0SI32p+WcPAxPPXyueeeax/72MdytoFGGf/JT35iS5YsOWs9jVip6Z31TdHPfe5z7vWkwcef//znNnv2bFeGRrmcNGlSxnopwPfNb37TvSYjBfWii0ZbfPjhh5vLi76uYOgdd9zhRm/MtqhfnDdvngs2KrQXLNru1ltvbbGZRi3UyI+yzbQo7KptoiFWrVvu4KP2odGSFM585JFHzhqhU6/rnFM9cnkUc4yBiUYE/OUvf3nWuUTwsTq6QYUe9+/f78IzPgcfq0Obo0AAAQQQQAABBBBAAAEEEEAAAQQQQAABBBDwTYDgo28tQn0QQAABBBBAoFUE4oQeVaFs6xN8bJXmarETgo+tb57GPYaDj+FgoEJC69evd6OkaVj/Pn362IgRI6xTp04FHeahQ4ds48aNtnfvXre9Rm0dOHBgQdvGXUnTYShwuWPHDhdsUuhx9OjRXo8QpGlQAh/1j926dbO+ffvakCFDCq63AuTy1ciJahe1UaZF623ZssVNOa6wqKZ60nTOCll27do1LndZ1g/aUCN56txRGw4aNMjVs5BvTJfqGGWpc17lad+Fnu9lQaHQkgkEwUcVGJ7qLDwCZHTK6uh5V+6prkt2sBSEAAIIIIAAAggggAACCCCAAAIIIIAAAggggEBEgOAjpwQCCCCAAAII1KQAwcf0NjvBx/S2XWvWPFvwsTXrwL4QQACBcgqEg48KMAahRv0dBCGDqa+DehQafNT62UKR2Y4p7vrltKFsBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgeoXIPhY/W3MESKAAAIIIIBABoE4wcdc68YZ8TG8bmNjo6uVRovTlLXlWE49/UE7vfPZchRd0TIJPlaUPzU7J/iYmqaiogggkFCgFMFH7ZpRHxM2AJshgAACCCCAAAIIIIAAAggggAACCCCAAAIIVFSA4GNF+dk5AggggAACCFRKoLWDjwo6aorRYCH4mLzlCT4mt6ulLQk+1lJrc6wI1KZAIcFHyTDddW2eHxw1AggggAACCCCAAAIIIIAAAggggAACCCBQ7QIEH6u9hTk+BBBAAAEEEMgo0BrBRwUdg7BjOPionwX7Z8TH+Ccowcf4ZrW4BcHHWmz10hzz1772NTt8+HDswtq3b2+f/OQnrXPnzrG3ZQMEkgjs2bPHGhoa3KbZproOXguXX+h013Gnro67fpJjZhsEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBAIBgo+cCwgggAACCCBQkwIEH9Pb7AQf09t2rVlzgo+tqV1d+/rIRz5iR48eTXRQ9913n/Xs2TPRtmyEQFwBBR8PHDhgCjISfIyrx/oIIIAAAggggAACCCCAAAIIIIAAAggggAACaRcg+Jj2FqT+CCCAAAIIIJBIoBTBx0xlhKezZsTHRE2TdyOCj3mJWMHMNmzYYE8//bSzmD59uvvDgkAhAr/85S/t+PHjhax61jpvfetbrWvXrom2ZSME4goQfIwrxvoIIIAAAggggAACCCCAAAIIIIAAAggggAAC1SRA8LGaWpNjQQABBBBAAIGCBQg+Fkzl3YoEH71rEiqEAAIIIFABAYKPFUBnlwgggAACCCCAAAIIIIAAAggggAACCCCAAALeCBB89KYpqAgCCCCAAAIItKZAOYKP4dEedSzhfTQ2Nlrwuv7Wa/p7+PDhVldXV5ZDb1zxY2tc/rWylF3JQuuu/pW17TejklVg3wgggAACCFRcgOBjxZuACiCAAAIIIIAAAggggAACCCCAAAIIIIAAAghUUIDgYwXx2TUCCCCAAAIIVE6A4GPl7Ivdc4e3ry62CLZHAAEEEEAg9QIEH1PfhBwAAggggAACCCCAAAIIIIAAAggggAACCCCAQBECBB+LwGNTBBBAAAEEEEivQC0EH9U61TbqI6M9pvc9R80RQAABBEorkC34qL2ER5Nu27Ztix23adOmxf+jrwcvZvt5tqOIu35pNSgNAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAoNYECD7WWotzvAgggAACCCDgBGol+KhjrZbwY7spn7Z2kz/AGYwAAggggAACZkbwkdMAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAoJYFCD7Wcutz7AgggAACCNSwQC0FH9XMp3cttqadL1vTzhdS1ept+l/o6kvgMVXNRmURQAABBFpBIFfwsV27dhaM7Ki/w6M8MuJjKzQOu0AAAQQQQAABBBBAAAEEEEAAAQQQQAABBBAouwDBx7ITswMEEEAAAQQQ8FGg1oKPPrYBdUIAAQQQQACB5AIEH5PbsSUCCCCAAAIIIIAAAggggAACCCCAAAIIIIBA+gUIPqa/DTkCBBBAAAEEEEggQPAxARqbIIAAAggggIA3AtHgoyoWjObIiI/eNBMVQQABBBBAAAEEEEAAAQQQQAABBBBAAAEEECiTAMHHMsFSLAIIIIAAAgj4LdDawcdTp041gzQ1NZn2r7+HDx9udXV1fmNROwQQQAABBBDwToDgo3dNQoUQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEWlGA4GMrYrMrBBBAAAEEEPBHgOCjP21BTRBAAAEEEEAgvgDBx/hmbIEAAggggAACCCCAAAIIIIAAAggggAACCCBQPQIEH6unLTkSBBBAAAEEEIghQPAxBharIoAAAggggIB3AgQfvWsSKoQAAggggAACCCCAAAIIIIAAAggggAACCCDQigIEH1sRm10hgAACCCCAgD8CBB/9aQtqggACCCCAAALxBQg+xjdjCwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIHqESD4WD1tyZEggAACCCCAQAwBgo8xsFgVAQQQQAABBLwTIPjoXZNQIQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIFWFCD42IrY7AoBBBBAAAEE/BEg+OhPW1ATBBBAAAEEEIgvQPAxvhlbIIAAAggggAACCCCAAAIIIIAAAggggAACCFSPAMHH6mlLjgQBBBBAAAEEYggQfIyBxaoIIIAAAggg4J0AwUfvmoQKIYAAAggggAACCCB0ZR2bAAAgAElEQVSAAAIIIIAAAggggAACCLSiAMHHVsRmVwgggAACCCDgjwDBR3/agpoggAACCCCAQHwBgo/xzdgCAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAoHoECD5WT1tyJAgggAACCCAQQ6C1g4+NjY3W1NTkaqi/tX/9PXz4cKurq4tRc1ZFAAEEEEAAAQTMCD5yFiCAAAIIIIAAAggggAACCCCAAAIIIIAAAgjUsgDBx1pufY4dAQQQQACBGhYg+FjDjc+hI4AAAgggUAUCBB+roBE5BAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIHEAgQfE9OxIQIIIIAAAgikWYDgY5pbj7ojgAACCCCAwL59+0x/2rRpY23btnV/B0u7du2a/6+fh18L/1vra9tMS7afZ5OPuz4tiAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAMQIEH4vRY1sEEEAAAQQQSK0AwcfUNh0VRwABBBBAAAEzF3ok+MipgAACCCCAAAIIIIAAAggggAACCCCAAAIIIFCrAgQfa7XlOW4EEEAAAQRqXMCX4OPQoUOtQ4cONd4aHD4CCCCAAAIIxBXYvXu3NTQ0ZBzxsa6urrm46EiMjPgYV5r1EUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBHwUIPjoY6tQJwQQQAABBBAou4APwUcd5MCBA61z585lP152gAACCCCAAALVJbB9+3Y7cuTIWcFHBRs11XWwEHysrnbnaBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQTOCBB85ExAAAEEEEAAgZoU8CX42KdPH6uvr6/JNuCgEUAAAQQQQCC5wKZNm+zkyZMEH5MTsiUCCCCAAAIIIIAAAggggAACCCCAAAIIIIBAigUIPqa48ag6AggggAACCCQXKEXwUXuPltPU1NRcKf07+H9jY2Pzv/WzYLtu3bpZv379kh8IWyKAAAIIIIBAzQnoumL9+vUu9Kg/GtUxmMKaER9r7nTggBFAAAEEEEAAAQQQQAABBBBAAAEEEEAAgZoUIPhYk83OQSOAAAIIIIBApYOPagHVQUGFESNG0CAIIIAAAggggEDBAocPH7YdO3a49YNprQk+FszHiggggAACCCCAAAIIIIAAAggggAACCCCAAAJVIEDwsQoakUNAAAEEEEAAgfgCcYKPKj3b+klGfFR5waiP+nvQoEHWuXPn+AfBFggggAACCCBQkwLbt2+3I0eONI/2KASCjzV5KnDQCCCAAAIIIIAAAggggAACCCCAAAIIIIBAzQoQfKzZpufAEUAAAQQQqG0Bn4KPXbp0sYEDB9Z2g3D0CCCAAAIIIFCQwKlTp2zdunVupMdgmmttmCn4GEyFHS44WC/4mUafzrRk+3mx6xZ0kKyEAAIIIIAAAggggAACCCCAAAIIIIAAAggggEAeAYKPnCIIIIAAAgggULMCccKPSUZ8FGx4OwUVgiU84qPWGTZsmHXs2LFm24IDRwABBBBAAIHCBPbs2WP79u0zBRMzBR+DQKRKiwYfCw09aluCj4W1B2shgAACCCCAAAIIIIAAAggggAACCCCAAAIIVEagzZYtW5qOHDlmnTt3cn+iD8ErU62We21oaLCTJ09ZfX13a9++vQ9ValEHhRUOHGhwP+vRoz7WhwOtdTDHjx+3w4ePWIcOHaxr1y5etvOhQ4ft+PET1q1bF1dPH8/FffsOWGNjo/Xu3dPLdj558qQdPHjY6uraWvfu3b00PHLkqB09mpY+p97at69rrbdpwfsJ9zk9e/bwsp3T1uf4Gjajzyn4bZF1xaDP6dKlk3XqdPZ1TmsHH/U7RIFHLUHwUdcQus4ZNGigDR06pPiDpgQEEEAAAQQQqFoBXWdv3LjR3Y8eO3bcTpw4YV26dG5xneNT8FHPc06cOGU9e9ZbXZ1/91aNjadNdTRr4+ro43OINNxb6TmEzkWe5yTvetL0POdMn9PRy/eL/31OozU0HKTPSf5WcVsePHjITpw46Xmfs9/0O8bXZ8jy07P4urp21r17Ny/fz288z/G3zwme5/h7ndNoBw4cdO3LdU7yjudMn6PrnK5ef251+nSj9erl5+dW9DnJz7/wlvQ5xTvqHvrIEb8/K3+jz+lmHTt2KP6gy1CCPreizykOVufh0aPH//I8x897qzT0Obq30nWO8jk+Ps8J+hy9lzXrm4919L3P0eeo+/erzznNdU4R3U66+pwe7j417tLm2Wefa1q6dIVNmTLRJk8+x8sH0X/60zO2desOu+mmq61fv77edQpHjx61hx9+wpRjuP32G70crWndug22YMHLNmzYELvwwpleBkjnz19oa9eus0svnWUjRgx103b5tvzud4+4kOtf/dUd1rlzZ9+qZzt37rann57nfsFfddWbvLwoXrbsNVOfM3XqRJs0yc8+54knnrFt23bYzTdfY3379vGuz9GDv0ceecKdf7fd5nefM3z4UJs1a4aXfc7zz79ka9eut8sum2Wqp599zsO2f/9Be+c773QfZvu2BH2OArhXXnmpl33O0qWvmvqdM33O+LOuc3wIPqrf3rZtp11zzWU2adIEq6+v962pqQ8CCCCAAAIIeCKwZcsW90GNgo+vvrra/Tn33El2zjljm69nwwHDSo/4+PjjT7t7qze/+Xrr06eXd/dWhw8ftkceedLatWvr6ujjF6Jef329vfDCIhs1aridf/65Xt5bPffci6Z6XnHFRTZs2FAvvyh6//0PWUPDYXvHO+7w8t5qx46d9vTTz7uQ1JVXXuKCFb4tS5ascPdW06dPtgkTxnn5DPmPf5xt27fvtFtvvcFZ+vbBkoJmjz76pHuIf8st13nZ5+jZ7IsvLrbRo4fbeef52efMm/eC/fnPG+yKKy52z7rjjFDcWu+r3/72IRcs1DNkH5/n6H3yzDPPW58+Pe2KK/zsc155ZbktX77SZsyY4q5zfPwCxWOPzTb137fddoP7INa3PkcfZD/22FPui/0333yt533OCDvvvGleXufMnfuC6fM1X/scBQLuv/9hN/DJ299+u5d9jp57Pvus333O4sXLbMWKVX/pc3Sd49/nk3o/79ixy9s+RwEkXYv53OesWfNne+mlV2zMmBE2c6avfc4CW7duo7snGDp0sHfXOadPN9nvfud7n7PDnn12vvXt28suv/xiL++tgj5n5swpNn68n33Oo48+ZTt37nLZFx8H4Qn6nA4d2ttNN13j5XXO6tV/toULX7GxY0fajBlTvbzOmTNnvq1fv8muuupSGzJkkId9zmnX52ggv7e97TYvr3OUY5Njv3697bLLLvKyz1m0aKl7hnzeeVNt/Pg3niG31v1xIfvR89ldu3bb7bfflOhLW22ef/6FppUr19i4caPcQfo4ouLcuQvcDaR+OSmE5NvDjGPHjtlTT801/bK/9trL3ciZvi0bN24x/RIdNKi/nXvuFC/DKYsWLXEdqy429dDKxxCSLtr1i1QPJzVypm/Lnj377PnnX3TfPrzkkgu8/CX/2mtrbNUq9Tmjbfz4MV72OXPmLLCdO9XnXGJ9+/b2rs/RiJnqc8ya7JprfO5zltrgwQNs2jS/+xw9VNMoez72OXqAevCgz33OXlOAVKMDXHzx+R73OavdNY76neh1jg/BRwX/9dBKwf8ePbrbiBEjvLx49+13HvVBAAEEEECg1gR2795te/fudfcn+qMPbRRQGT9+tI0ZM7r5w7lwMCD6/KS1p7rWB5x6aHXllW+y3r17Wdu2bbxqNn2pbPbseS6ocPXVb/Lyec6GDZtM4Q89gJ42bZKXD1D1IH/jxs123nnTbejQQV7eWylspqCrnudopAXfll279pjuC3Q/cNFF53l6b7XKVq5c60KPY8fq3sq/UVwV5FJfqT7Hz7D1EXv66edcX6gvLPv4DDnoc/RenjrVzz7npZcWuz7nggtmuL7Rx+c56nP0RYWbb1af4+eX5zVIgr48f9FFM73sc159dZWtWqU+Z7yNHTvK0z7nOdu9e497P+s6x7fgo37vqc/Re0RfWPaxz1m/fqMpWK9wj7607GPw/6WXFpk+X7vggpk2ZMhA7/ocBR8ViDvT51zr5XWOvjyvPkcjj2pgFh+/bLRixUpbvfp1mzhxvI0Z42+fo2tGfXnex7C1Av/PPHOmz1GAxsfg//r1G2zJkldt2LDBbkAon/ucWbNm2uDBPvc5R//S5/h4nbPLFixYlJo+R9c5Pn7B45ln5tmuXXvd59C9evk3+2DQ58hOQWEf+xx9cUKDsyj3MmXKBC/7nBdfXGSbNm2xWbPOc5/p+3Zvpc9wlc/RQHQ33aTrHH/7HL1PNBiUj9c5y5e/5p4ja1CyMWNGetnnaJAg3Vtdc80VifqcNrt27WrSdCo6iX0MPepBpOqnKSA1jYpvoUfVTzcWqqOmr/Dx5lF1VKegOmr6Jl+Hxj516pRrZyXzfetUgwfimtJA9fTxIXnLdjYvO1XVUdO80ucU9xELfU5xfsHW9DnFO77xuyW9fU4pgo9B/xuIBlNZB/8P7yM61bXW0fSFwe8//V83akOGDPHyJqj4s4YSEEAAAQQQQCCJwP79+23Hjh3uA/Ug+KhrDF1b6FlO8JBcr4fvp5MGH+M+e8m2/hvPczp5F3oMnufoPl9fZPX1eY7aWPcualsfP5iTI/dWSd7VLbdJ171VnZcBpJbPkH3uc06657S+9jnBuUifU9z7mmfIxfm1fIbsf5+j97NvoUeuc4o/B9P3DLnRyzBA+j63os9J+u7RM3HurZLqvbFdeu6t6HOKae03Piunz0nqGPQ5+tvH0GNwPcvznKQt/MZ2Z+6t6HOKkdR5qD96fuxj0Dr8PCfpvVWbvXv3NhWDxLYIIIAAAggggEBaBcoRfAweLgcmuvEKwpDh4GOwnuoQfV2BBYUffb1hS2t7U28EEEAAAQTSKKBRHjUivcKFQfAxmMI6CBwGH7b7FnxMozd1RgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEiHAMHHdLQTtUQAAQQQQACBMgi0RvBR1Q7vR9+qCZYg8BiEH/V38G+9NmjQIOvRo0cZjpwiEUAAAQQQQMB3AX1hQoHHAwcOuMBjNPQYjOwYHmFIPwuHIKOjDxU61XWpRnz03Zj6IYAAAggggAACCCCAAAIIIIAAAggggAACCKRXgOBjetuOmiOAAAIIIIBAkQJxgo/aVbb1oz+PO911OOwYDT/W19dbv379vJ3asMgmYHMEEEAAAQQQyCDQ0NBg27Ztc1NZByM9BqM5BuHFTMHH8HQlQVgyKD7TFIzZAo4EHzktEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBHwXIPjoewtRPwQQQAABBBAom0Algo8KRSrEoCU64qP+Hw5BBuvq7969e1uvXr2sc+fOZfOgYAQQQAABBBConICuARR43L17tx09etQFHoMRHDON+Bid5lr/DwcWo+HFco32KLG4QcnKKbNnBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgWoRIPhYLS3JcSCAAAIIIIBAbIFSBR+141yjPiq4GIQcw8FHbRf8PByGDP4dnQJb/+/UqZN1797dunTpYh07drT27ds3T2kZG4ANEEAAAQQQQKBiAvp9f/LkSRdyPHz4sO3bt695OusgxBgEHuNOc62Daq3gI6HHip1C7BgBBBBAAAEEEEAAAQQQQAABBBBAAAEEEKhpAYKPNd38HDwCCCCAAAIIxAk/5lq3lNNdR0d+DE9/HQ5DhkeMDNZRiwZBy3DgMmjpYPtwy2daL/p6kjMlOuV3kjLYBgEEEEAAgVIIZJrmuZByc22XaSrpfD8LAozadzAtdTTUGA49hqe5jv5bZYTrF4wOGfw8WvdyjfhI8LGQM4l1EEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBEotQPCx1KKUhwACCCCAAAKpEvAl+Cg0jfyUa/rrcLgxOiV2tuCjyg2PNhn+f/S1XEHF8GsEGlN1ilNZBBBAAIECBfIFBcPFhEdiDH4ebJ/p73AgMttojionCDeGp60OBx7DIUmtH65zEKQMfh5+LVOAM1tgMW6QMe76BTYHqyGAAAIIIIAAAggggAACCCCAAAIIIIAAAgggkFOA4CMnCAIIIIAAAgjUtEAlgo/ZprvONNJjNAgZnhI7PHpjtMxwEFINHA0rZgovFjLyI6HHmn67cPAIIIBA1QtER2zMdMDZ1skWnAwHF1We1tPojMG/w6NABqM2ZpriWuuHR3UMts/086TTXKusuEHGuOtX/UnEASKAAAIIIIAAAggggAACCCCAAAIIIIAAAgi0igDBx1ZhZicIIIAAAggg4LNAKcKP+aa6joYKoyM0Bj5BgDEIGIaDjkEZ4dEeg/0GocloOeH/69+FjNyYL9yYLyDpc1tTNwQQQAABBKIC0REaswnlm/Y62C4ajAyP3qh1giBk+N/h8GP49XBAMvxvbRvUJxqsDMoNHwfTXHPeI4AAAggggAACCCCAAAIIIIAAAggggAACCFSbAMHHamtRjgcBBBBAAAEEYguUI/ioSkQDhOH9FDrqYxAyzDQFdnSEyGCf4WBipkBmttEfs40CGRv0LxvkC1AmLZftEEAAAQQQiCOQK7BYSDm5psDO9Fp0iunotNiZpr1WPcIByfA2wb/DgUitHw4+hkddzDQiJcHHQlqadRBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQTSJEDwMU2tRV0RQAABBBBAoCwCpQg+qmL5Rn2MBh/DozQGIcFwwDE8mmM0zBgOREbXU13C5WUa5TE6amO+kSCThBiTbFOWBqZQBBBAAIGaFkgafCw08BgeMTJT6FH4mdYJj9SYKeiYaUTH6OiUSUZ7VH2yTU8dd9rquOvX9InIwSOAAAIIIIAAAggggAACCCCAAAIIIIAAAgiUVIDgY0k5KQwBBBBAAAEE0igQJ/io48u2fr7gY6bproPyosHDXFNdh8sJT38d2AdTYYfbopCRH7V+ttEgg7IIM6bxDKfOCCCAAAKFCmQKSUbDjJnKigYAM426mG2Ka5UXDT4GPwtPbx2tR7DPTFNrh+uY6ZgIPhZ6RrAeAggggAACCCCAAAIIIIAAAggggAACCCCAgK8CBB99bRnqhQACCCCAAAKtJlCu4KMOIN9019EprMPbZBv9MVgn24iQ4ZEkA8Rs01hHw5hh9HyjQGZbt9Uajh0hgAACCCCQUKCQUSCjoytGdxUOHOYaHVLb6fVoUDE6omNQfrButPzwPrIFIjMFGgud5lr7jzuCY9z1EzYXmyGAAAIIIIAAAggggAACCCCAAAIIIIAAAgggcJYAwUdOCgQQQAABBBCoeYFSBR8FmW/Ux+jr4f83NjY2t0W2UR2zjQyZa5TH8DbZjjXXSI75RoGs+RMIAATKIPDKK6/YZz/7WZsyZYrde++9ZdgDRSKAQFQg32iP4fVzBSfDAUdtEx2RMRqCDNYJrxseHTLTiI7ZptfONPJk9DhLNdqjyiX4yPsIAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAoFICBB8rJc9+EUAAAQQQQMAbgdYMPuqgw/uLBhzDozhq3SB0GKwXHs0xUyAx0/a5Rp0MN0Ih01jHCUh608BUBIEUCnzve9+zn/zkJ67mDzzwgA0dOjSFR0GVEfBXIFtwMc5IkNGjyzS9dXgKa60fDjQG20dHfswUasw2OmQ4eJht/9F6Enz097ykZggggAACCCCAAAIIIIAAAggggAACCCCAAAKFCxB8LNyKNRFAAAEEEECgSgXiBh/FkG2bTD/PFzyMBiHzhR+1/yAImW2EyWwjQwZNGA5SZmvWJC5VeopwWAi0usC6devsq1/9qo0ePcY+/el/bvX9s0MEEGgpkC0smG3kRW2daaTHTFNiR0d3DLaNlpFvJMliRnvUvuKO3hh3fc4pBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQRKKUDwsZSalIUAAggggAACqRWIG/LLtX6+6a7DozwKLDrqo36Wa+TG8CiQ2dYNyg0aJLpN+Of5RnrMtm1qG5uKI5ACgS1bttvWrdtcTSdMGG/du3dNQa2pIgLVI5Bp9MRMR5cp/Jcp8Khtw6HH6EiQuUZ5DK8bLoPRHqvnfONIEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBOILEHyMb8YWCCCAAAIIIFCFAuUMPoor7qiP0TBkNHyY7//BPjNNh52pPtnWz9bU0fBmFZ4SHBICFRXYunWHbdlyJvg4ceI469aN4GNFG4SdV61AOHBY6EFmC0Xmmmo6GnrUvqL7Dm8fDULmGz2ytUd7VP0Z8bHQM4b1EEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBMohQPCxHKqUiQACCCCAAAKpFChn+DFTADHXdNThYGGuqa8FnS0EGX4t2iD5gov5Xk/SwHF9k+yDbRCoFoGtW7c3Bx8nTBhn3bt3q5ZD4zgQaBWBUoby8oUjc72eaWrrbCHIaDmZprYWXr4QZLBOFDqXSVyvuOu3SqOzEwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIGaEiD4WFPNzcEigAACCCCAQC6BuMG8ONNda7/FhB+DIGK+kR5zTWmd6dgLnca60PU4wxBAoDQCmup6y5YtrjBNdV1f3700BVMKAgjkFMgXcgxvnGs67EyBR22bqfzoSI/5go3R0CGjPXJSI4AAAggggAACCCCAAAIIIIAAAggggAACCNSiAMHHWmx1jhkBBBBAAAEEMgrEDT6qkDjhx2jwMdP24fJyjeSobRsbG91xZAolxg1AhsuJ/ruQ0yXTsQXb5XqtkLJZB4FaFFDwcevWM1Ndn3POOKuvZ8THWjwPOObiBKLhw3BpuV7LttdMIzVG180WeNR6maa21s/btWvXopjofnL9P9PIi5mOrZSjPaqyjPhY3LnJ1ggggAACCCCAAAIIIIAAAggggAACCCCAAALFCxB8LN6QEhBAAAEEEECgigTihh/jBB/FlC/8GJ1iOl/4Mfx6timxo80TnWK7kOYjvFiIEusgUDqBzZu32JYtO1yBGvGxRw+Cj6XTpSQE8gsUGoyMG4bU+goNZhtZMk7oMdOIk4Qe87ctayCAAAIIIIAAAggggAACCCCAAAIIIIAAAghUhwDBx+poR44CAQQQQAABBEokEDf4qN1m2ybTzzMFCDOFHTOFE3NNN50rIFnIdjoORm0s0UlEMQiUQGDz5m22efNWV9KkSecw4mMJTCkCgbgChYwYmWmdTGHIfD9T3eKEHrU+oz3GbVHWRwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEKgmAYKP1dSaHAsCCCCAAAIIFC1QyuCjKlNo+DG6XtyRH7WvXFNeh1+P/jtAiwYf447yGHf9ohuLAhCoYoEtWxR8PDPVtYKP3bt3reKj5dAQaF2BQkdzDNcq38iOuV7PNsV1rvJzhSC1XSlCj9nKydcaTHOdT4jXEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBFpDgOBjayizDwQQQAABBBBIlUBawo9CzRVWzDaldZyAY7YwIyHHVJ3SVDaFAmdGfHwj+FhfT/Axhc1IlVMmkC0QWejPo+tlms461wiR2dbPVG6+kSYD+lwhxSQBxiTbpOw0oLoIIIAAAggggAACCCCAAAIIIIAAAggggAACKREg+JiShqKaCCCAAAIIINB6ApUKPuoI4478qG2yTZ8diGULQGZ6PVt5mfQJP7beOcmeak+Aqa5rr8054soJxBkBMlu4Mah9tpEf8wUVWzv0qPomCTEm2aZyLcueEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBKpZgOBjNbcux4YAAggggAACiQSSBB+1o1zbFTrldaZyotNea51sP4secK7QY9zRHAk6Jjqd2AiBRAKbN2+NjPjYLVE5bIQAAskECh3lUaXnCkMWUg6hx2RtxFYIIIAAAggggAACCCCAAAIIIIAAAggggAACtS1A8LG225+jRwABBBBAAIEMAq0VfNSus4UJ8438GN4234iOwSHmm+I6V7Cx2NBjsdtzoiJQawLhqa4nTx5v3bsTfKy1c4DjLU4gziiOmfaUb/s4Iz+Gy880ImQhU1lrnXyjRob3wxTXxZ0/bI0AAggggAACCCCAAAIIIIAAAggggAACCCDgvwDBR//biBoigAACCCCAQCsLJA0+qppxR33UNoWGH4N1MwUY84Uao4T5pscOr1+K0GIpymjl04DdIVBRgYMHD9mKFatdHYYOHeT+sCCAQDyBfOHFQkorZMTGoJxc+8sWlGzt0KPqGne66rjrF+LKOggggAACCCCAAAIIIIAAAggggAACCCCAAAIIFCtA8LFYQbZHAAEEEEAAgaoUSBp+zLddttdLEX5UQ8QJNIYbLu6019kanYBjVb4dOKgKCDQ0HLRXXw2Cj4MJPlagDdhl9QrEDUTGCT9G1YoJPKqsUo70qPKShBiTbFO9Zw9HhgACCCCAAAIIIIAAAggggAACCCCAAAIIIOCLAMFHX1qCeiCAAAIIIICAVwL5Aoy5Kptv23KFH1WnIHiYNAAZPq5CQ4yFrudVA1MZBDwXaGjQiI8rXS2HDVPwcbDnNaZ6CKRToNAQZKHrSSHflNSFjPKocrIFDrPVJV9AMd/r2Vow6XbpPCOoNQIIIIAAAggggAACCCCAAAIIIIAAAggggEBaBAg+pqWlqCcCCCCAAAIItLpAvgBjrgrl2zZu+FHhwmxhxnwhx1KN5hg+XsKOrX46ssMaEzhwQCM+rnJHrdCjwo8sCCBQXoE44cZoTeKGHbV9ttEc9Rqhx/K2NaUjgAACCCCAAAIIIIAAAggggAACCCCAAAIIpF+A4GP625AjQAABBBBAAIEyCeQLL+babSHbxg0/an+5tikk4JgrsEiYsUwnEsUikFDg+VsVOOAAACAASURBVOcXui2HDRtkw4YNSVgKmyGAQKkFcgUks01tHa5DsE62sGTcqbULGZGxkHUyOSXdrtTmlIcAAggggAACCCCAAAIIIIAAAggggAACCCCAQFSA4CPnBAIIIIAAAgggkEOgkABjts0L2TZJ+DHb6I+qR67XgtfD9S007FjoepxMCCBQOoH5888EH3v06GGTJo0rXcGUhAACeQUKHf2xkKBjsLOkgUdtn3R6a21bTHixmG3zIrMCAggggAACCCCAAAIIIIAAAggggAACCCCAAAJFCBB8LAKPTRFAAAEEEECg+gUKCS/mUihk+yThR+0zXwAyWCdb/QoZIbISLUzIshLq7NNHgQULXnbVqq+vJ/joYwNRp4oIFBpILHXl4o7CGOw/V9hR6+Sa7jpaRvSYCgklFrJONqtiti21P+UhgAACCCCAAAIIIIAAAggggAACCCCAAAIIIBAVIPjIOYEAAggggAACCOQRKCS8mKuIQrbPtU6+IGChozzmK0fHUKp1OKkQQKB4gfnzzwQfe/ast4kTGfGxeFFKQCC3QL5QZb7Xg9LzhR2D9fIFC3PtL9+2he4jl0ih++C8QgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEECgEgIEHyuhzj4RQAABBBBAIHUChYQXsx1UodsWE37UvvMFIIN1wn8naYhCwpFJymUbBBBoKfDGVNca8XE8PAggUGaBQoON0WqEtyukjEIChYQey9zYFI8AAggggAACCCCAAAIIIIAAAggggAACCCCQeoE2K1eubtq1a7f169fH/j97dxYc1ZXl+39JQhICNCIxaWIGARIzGLCMwUPZZQYPNXiorqiOjrj3qbujX+7//1JxHzp6+FfcfuuXeui4ETdu1/W1qzwwGBtjJjGJGUkgZpBASEggNDNK+sfaciKBASnzZKZWZn4V4ajBeU7u/dknV+6zz++czM4eK0NZgA93r69cqZGOji6ZNq1QUlJS3E9BWfp7+PChXLx4xTVp5sxpkpCQYKl5ri0tLW1y40a9pKamSm7uBJPjfP16vbS0tEhe3iRJT08zN87qeO7cRbl//4G78DxixAhz49zZ2SW1tdclOTlZCgvzTB6LjY23JHJqzmRJSRlp7ljsqzlXf6w5U02Oc0tLq9y40SBpaakyaZLtmpOfn+vaae27RQf47NmL8uCB9ZpzTZKTRxquOU3S1HRbcnKyJTs7y/P331ADjM/7gnjW9leu1EpHR6dMm9ZXcwYLFQ72733vPZQQpL524P6ete+HDx/J5ctX3WdE22hzntMqDQ033Wd5woTxnsc5FF/wOg/T+Vhu7kSzNef8+cuu5uiTBS2Os85zrl+vc/Mcrd0W23jrls5zmiUnJ0uysrzXnAMHjrjDUefGc+fOCsqhefVqrXR23pWpUwtk5EiL85xIqjlpMmHCOGpOgEfm+fOX5MGDh2ZrTldXl1y7Vuc+J1pzfGsl/oYcfTxD+Ulrfe1gc9KBazZ6bnX79h03xxk7NvOJY9HL2o6XbZ8+HC5frhGt39OnT3HzHGt/+r136VLfPGfGDJvnVnfutEp9fb2kp6fLxIk25znXrt2Q1tZWKSjIdetOgx3Hw3EcnD17wdUcq+s5ej5w7dp1V3MKCqyu5/SdW40bly1jx3qf54TiOBhYc0aOTDZ3LPbXnHiZMWOKyfnsnTstUl/fQM3xeIBqzdFzaT23sriGHAk15+bNRrl1q1nGjcv5yTzH4/AEbXOdQ3R13XWfZz1Ptfb9p9cxdD1H53Y6F7N4Dt1fczJk4kSb51Z6TtDa2mZ2nqNreXrdKhJqjp4P5OfbnOdEWs3ROaO1v/v374vOxSzXnObmFmloaJCMjAyz6zm1tXXS1kbN8XJ8++Y5mimxuobcV3Nuy/jx49y5lbU5hPr3z3Omip5bWfvrrzkJMn26zetWkVVz8iQ1dYy5Y9E3z3n06JHMnm3z3Kq9vcNdt7JccxoaGuX2ba054yUrK8Pk9QytOXrdatasqe7cyt+/uF27ynpPnKiSkpIiKS4uksTERH/3EfLXf/vtTtFQ3Lp1b7jFtWAuwgej8Xpy+/XX26S3V+S9935ucjFfJ5v79x92J2crViyRpKSkYHQ9qPvYt69c9IJ7aelymTq10OSJ+Geffe1CC5988oGMHj0qqP0Pxs4aG5vk++/3uMXJN954JaCiEIx2vGgfJ0+elpMnq2T+/Dkyb95skzVn27YfpK6uQdavf8OFpazVHL1wuGnTt3pZVN59922TNUe/nPbvPyKTJ+fJSy8tNl1zXnnlJZkypcB0zfnNbz6QUaNs1pzt2/dIZma6vP661ZpT9WPNmRu0mhPs8OO33+5yQeF1615/4iaUwd5nqAFIrclDDUE+Xb91O53nbN36vTvhWbfuzccnuf68f6i/Wy5frpXy8qNSWJgvS5cuMPndcvDgERdaf/nl5W4+ZvGCw1/+skXa2jrkww83upM0a39648TOnWWSkZEua9a8bPK7pbKyWioqTsv8+XNl9my9UcbbDVFnzpyT1tZ2NxQ6hw/G3/btu6S+vkneeee1H8OZtm4q07BZX82Jf6LmBKPvwdqHnluVlx+TyZPzZckSmzVHQ7M6H7NdczZLW1unqZozcKH55s0m2bVrv2Rlpcmrr77s97nVUIOOvuNyKIvcT5+XaL3R86uFC+dJUdGsxzXHy/mLl22f9RnbunWHC9Bs2PDWj/McWzVHL4hs2vSdJCTEuzZaDGdeuHBFdB4xZUqhLF++0OT33969B90Nwa++usrsDVGffvqV6GL0J5+8b/LcShehd+zY68I9a9f6X3OC9R33ov0cP17p5jkLFxbLnDmzJDHR3g3BOoeor78pGzf21Zyh1NZw2PneQ4/BzZu3u3q9YcPPXNDV2p+uzR46dNStzS5bZrPm7NlzwM1z1qxZZTYorDWno6NDPv5Y13PsnVvp5+SHH8rchfa1a1f5Pc8Jx3F7/HiFVFSckUWLSqSoaKbJmrNly/fuJsyNG992N6JYqzl6fr9ly3ZJTEyQ9eut1pxLcujQMXejra7nWLxutXv3ARcgtVpzdH3u//7fr91N1R9/rPMcezVH1z137tznPifqGMjF7FDXnePHT0lFRbWrOVZvlNE5hIal9JqQxaBUW1u7aF20XHM0JFxeftx4zdnvAqR6TmAxtNfT0yt6rbyzs1M++sh2zdGHfr366kqTNefYsVOi68iLF883e6PM5s3fia6NafYlKyvT4Dynr+YkJY34cQ3Z3rmVr+boDShLlsw3Os/pqzmvvfay5OXZu26l10k/+2yT6DWDDz98z+Q8p66u3q0hjxs3Vlavtllzjh49KVVVZ91xaDVAquuzWnPefz+wmhNXVXWm9/r1G+7pOPokQIsXYfUOSb3Dfe7cmSbvHNe7dfVA6e3tkeLiOSaLlt4defVqjQvEacDH4p2m+mRPfWKFLuYH46lcoTgROnWqyiWNFy8uMTlR0gXUCxcuuWNw1qzpJoMfepKrNUefVKFPArRYc6qrL4g+sZCaE/inyFdz9M45DQVQcwKz1JCwhs6s15zk5CSZOdNmzdEQc12d1pwJMmnS+KDVnMFCiYON+MDtz5/XeU67W1QbM2b0EyeQQ3kffwOIvtcPdTt9Mo6Gr/T1gQTWh/o+g5m96N/r065qaq65QJzVJx7rPEyfBKjzMF0oCHawxIufb1tdbNFFK11wsXgzlC7ia/gjJSVJpk+favK7Rec5N27cdHeN69MqvM5zzpw57+Yk+rdy5dJgDLPoU/Z0n1pzRo9+suYE5Q087kSfbK399tUci3MInefoU9Yjo+YUmr2LU2uOLlrphaVQ15xALkK3t7fLhQtXJSUl2T0hZ7A2+ht09BJ49G2rC2v19Y1ujjN+fM6gbRzs4xmK76bqaq1jbe6ptRbvHNcnBJw+fc7R6DzH4sV2fcJeTU2tZGZmunmOxbqoi+R6g8LUqZPNPpXL+rmVXii+cOGyqzkzZkzz/Hke7PMeyL/Xm9L1KeaTJk0MyjwnkDYMtk119TlpaWl3n+enz60G2zYc/56aExxlDT1qbdQnu1g9tzpxolLu3bvn5jkWAz59NeeSu+FNn3g82DwnOCPn3150/VjPr/RXE/T8yuu5lX/vPrRXU3OG5vSiV+n1oNraa6bnOf01Z4rJcys9d9Z5zr1792XRomKjNaftx3mO3ZqjTzDXG7Ys1xzfzbHFxbNNrufoMaht1Ad2zJs3y+S5ldYcXUPW4KjemG7x3MpXc/RpwpmZ9p7KFUk1R4PguoZscZ7TX3MmyYQJOSbnOWfOnJXW1g734DR9GFQga2veZwrP34Ov5mi7dM3J4npOJNQcvYlV17r1nEAfchOKtUEvx4Gv5ui59MKFem5l7+Fu+mTwixcvu5tsh7KG7MUj0G31CeZ685v+8q6uIVs8tzp9+qzoearm7QKpOXG3bt3qffSo293dbrGDOnjd3d2ibUxKSjRXVLV9+oHTNmo4wWJR9bVRHwGrxd/iRM43zuqo7bNWVH1FRA21jRYXrPrHuVuPSpMTOW2jfk6oOYF+LfVtFwk1R8dZPyuWa07fsfiImuPhcNRjUT/PsVhzhhJIHIzWtw/9rHR397inFzzv5HGo7xdIyHAoQUj9rOjrLC4S+L5brNcc31xR59uW5zn257P2a46OtY5xMM6tdBFMA3b6t2rVssHKypD+/VBqzpB2FKIX+eY51BxvwNQcb36DnVv5vq+9LPoOddsXfWf0zbl7JD4+Lijf0aH4fmI9x/uxGAnnVn3fLazneBntWD638uL29LbUHO+a1BzvhroH1pC9O7KG7N2QNWTvhly3Co5h3zznkduZ5bVFrlt5G+/+mtPrrudb/GOeE5xR6Zvn9JgMIPWv51BzvI525JxbUXO8jHXkrOdQc7yMcyycW8U1Nzf3ekFiWwQQQAABBBBAIBYFhhpGfJGNv/sY6usDCUA+q53P2k+w9h2Lxwx9RsBfAQ0+XrvWF3zUO930aWn8IYBA6AWeDiUONaQ4WMv82Y8/IUR/Xvu8NgZjH4P1n3+PAAIIIIAAAggggAACCCCAAAIIIIAAAggggEAwBQg+BlOTfSGAAAIIIIBAzAgMNYQ4FBB/9+Xv6y2FFS21ZShjw2sQGE6B2to60Z940z/9yca0tNThbA7vjUDECPgTMAx1p/xti78BRH9f/6z+BmMfoXZk/wgggAACCCCAAAIIIIAAAggggAACCCCAAAIIPC1A8JFjAgEEEEAAAQQQCFDA3wDii94mkH0Fso22gfBhgAPOZgiEWaCm5vrj4GNe3iQpLMwLcwt4OwQQ8EfA35DjwH0HEj4MZJun+xOMffhjxGsRQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEAiWAMHHYEmyHwQQQAABBBCISYFAw4fPwwp0f4FuN7AdBCJj8hCm04YFWlvbpKrqrGthQUGe5OdPMtxamoZAbAl4CTn6pAINHQa63dMjFKz9xNbI01sEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABKwIEH62MBO1AAAEEEEAAgYgVCEbocGDnvezPy7b+DAAhSX+0eC0CgQm0tXVIZeUZtzHBx8AM2QoBfwSCEWYc7P28hg29bu9rX7D2M1h/+fcIIIAAAggggAACCCCAAAIIIIAAAggggAACCIRKgOBjqGTZLwIIIIAAAgjElEAoAode9+l1+5gaQDqLgFGBAwcOu5bl5eVKQUGu0VbSLAQQeJFAMEKGwdjHwDYGe38cAQgggAACCCCAAAIIIIAAAggggAACCCCAAAIIhFuA4GO4xXk/BBBAAAEEEIhagVAFDYO532DuK2oHko4hYEjgwIEjrjXp6ekyd+5MQy2jKQgg8DyBYIYKg7kvX3tDsU+OBgQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEwi1A8DHc4rwfAggggAACCES1QCiDhZG676gecDqHQIgFCD6GGJjdIxCgQKjDg6HYfyj2GSAfmyGAAAIIIIAAAggggAACCCCAAAIIIIAAAggg4FmA4KNnQnaAAAIIIIAAAgg8KRDKgKK+U6j3P9TxtNKOobaX1yEQiQKnT5+X1tZW1/SVK5dGYhdoMwIRIWAhFBjKNoRy3xExwDQSAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIOoECD5G3ZDSIQQQQAABBBCwIBCOUGA43sOCJW1AIJYFamvrpLb2uiN4+eXlsUxB3xGISoFwBBLD8R5ROTh0CgEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMC0AMFH08ND4xBAAAEEEEAgkgXCGUwM53tF8pjQdgQiTWBg8LG4eI6kp6dGWhdoLwIIPEMgXGHEcL0Pg4wAAggggAACCCCAAAIIIIAAAggggAACCCCAQLgFCD6GW5z3QwABBBBAAIGYEhiuQOJwvW9MDS6dRSAMAhp8vHatzr1Tfn6uFBTkhuFdeQsEEAiFQLhDiOF+v1CYsU8EEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBJ4nQPCRYwMBBBBAAAEEEAiDgIUgooU2hIGat0AgqgRaW9ulquqs61N+/iSCj1E1unQmWgUsBA4ttCFax5d+IYAAAggggAACCCCAAAIIIIAAAggggAACCNgQIPhoYxxoBQIIIIAAAgjEiID18KH19sXIYUI3EXhCYP/+I+5/FxbmSl7eJHQQQGCYBSyHCi23bZiHjbdHAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQiDIBgo9RNqB0BwEEEEAAAQTsC0RquDBS223/iKCFCLxYwBd8zMhIl7lzZ8KFAAJBFoiGsGA09CHIw8ruEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBKJcgOBjlA8w3UMAAQQQQAABuwIECe2ODS1DwJJAWVn54+aUli631DTaggACwyxA4HGYB4C3RwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEBg2AYKPw0bPGyOAAAIIIIAAAn0CBCA5EhBA4EUClZVnpaWl1b2E4CPHCgIIqACBR44DBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgVgXIPgY60cA/UcAAQQQQAABUwKEIE0NB41BwIRAbe11qampc20pKZkj6empJtpFIxBAYHgECD0OjzvvigACCCCAAAIIIIAAAggggAACCCCAAAIIIGBLgOCjrfGgNQgggAACCCCAwGMBQpAcDAggoAIDg4+FhblSUJAHDAIIxJgAYccYG3C6iwACCCCAAAIIIIAAAggggAACCCCAAAIIIDCoAMHHQYl4AQIIIIAAAgggMLwCBCCH1593R2C4BVpb26Wi4oxrBsHH4R4N3h+B8AoQeAyvN++GAAIIIIAAAggggAACCCCAAAIIIIAAAgggEDkCBB8jZ6xoKQIIIIAAAgjEuAAByBg/AOh+TAuUlZW7/mdkpEtx8eyYtqDzCES7AGHHaB9h+ocAAggggAACCCCAAAIIIIAAAggggAACCCAQDAGCj8FQZB8IIIAAAggggMAwCRCGHCZ43haBMAv4go/6tqWly8P87rwdAgiEUoCgYyh12TcCCCCAAAIIIIAAAggggAACCCCAAAIIIIBAtAoQfIzWkaVfCCCAAAIIIBBzAoQgY27I6XAMCdTWXpeamjrXY4KPMTTwdDVqBQg7Ru3Q0jEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBMAkQfAwTNG+DAAIIIIAAAggMpwChyOHU570R8C6gwcdr1+rdjvLzJ0pBQZ73nbIHBBAIqQDhxpDysnMEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBGBcg+BjjBwDdRwABBBBAAIHYEyAEGXtjTo8jX6C9vUMqKqpdRwg+Rv540oPoFCDoGJ3jSq8QQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEbAoQfLQ5LrQKAQQQQAABBBAwL0CA0vwQ0cAoE9i//4jrUUZGmsydOyvKekd3EBheAUKLw+vPuyOAAAIIIIAAAggggAACCCCAAAIIIIAAAggg4K8AwUd/xXg9AggggAACCCCAwDMFCEJyYCAQWgFf8FHfZdWqpaF9M/aOQJQLEHSM8gGmewgggAACCCCAAAIIIIAAAggggAACCCCAAAJRL0DwMeqHmA4igAACCCCAAAIIIIBANAjU1FyX2to615XS0uXR0CX6gAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEBAAgQfA2JjIwQQQAABBBBAAAEEEEAgvAIDg48FBblSWJgX3gbwbggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggYESD4aGQgaAYCCCCAAAIIIIAAAggg8CKB1tY2qaiodi8h+MixggACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggEMsCBB9jefTpOwIIIIAAAggggAACCESUQFlZuWtvRkaaFBcXRVTbaSwCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACwRIg+BgsSfaDAAIIIIAAAggggAACCIRYoLKyWlpa2ty7lJYuD/G7sXsEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEbAoQfLQ5LrQKAQQQQAABBBBAAAEEEPiJQE3NdamtrXP/f0lJkaSnp6GEAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQMwJEHyMuSGnwwgggAACCCCAAAIIIBCpAgODjwUFuVJYmBepXaHdCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCAQsQPAxYDo2RAABBBBAAAEEEEAAAQTCL1BWVu7elOBj+O15RwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAARsCBB9tjAOtQAABBBBAAAEEEEAAAQSGJOALPuqLS0uXD2kbXoQAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBANAkQfIym0aQvCCCAAAIIIIAAAgggEPUCA3/uuqSkSNLT06K+z3QQAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgYECcUePnui9dOmKTJ06WSZPzpeEhARzQkePnpRbt5rlpZcWu4t6cXFxptp4//59OXjwqIj0yqpVyyUxMdFU+7Qx9fU35cyZczJuXI7MmTPT5DifPn1W6uoapLi4SMaPz5H4+Hhzjnv3HpSOji557bVSSU5OMte+lpZWOX68QsaMGS2LFpXIiBEjzLXx8uUasV9zTsitW3dkxYolkpaWaq7m3Lt3Xw4d0pojsmrVMtM1Rz/LRUU2a05VVbXcuHHTdM3Zs+egdHZ2yeuvl0pSkr2ac+dOq5w4YbvmXLp0VS5fvirTpk2RwsI8k99/R46ckNu378jKlUskNdVqzTkiInFma86NGw1unjNhwjjTNaeu7qbMnz9Hxo3LNjnP2bPngHR23jVcc1rkxIlKSU0dLQsX2pznhKvmDAw++vtz10eOHJfbt1sM15x7P85z4mXVqqUm5zl6zlJdfU4mThwns2fbnOdUVuo8p0Hmz59rtubs3n1A7t69686tLM5zmptb5ORJrTljZOHCYpPnVhcvXpUrV67K9Ok6z8k3+d1y+PBxUUur85y7d+9JeflRiYuLl5Urrdec8TJ79gyT81lfzVmwYK7k5Nic50RKzUlLGyMLFlitOVfkypWaCKk5S139traGrDVH13Pi4xNcXbS4hlxXVy/V1edl0qTxMmuWzZpTUVEt9fUNsnDhXMnOtltz7t27J2vXvmx0nnNHTp6scuueCxbMMznPuXDhily9WiMzZkyVgoI8k/Oc8vLjcueOznNs1pyurrtSXn7MzR10ndtizbl+vV7OntWaM0FmzZpucp5TUXHGXV+zWnN6e3tF13P0moHVmqPrnqdOVUl6eqrMn2+15lyWq1drI6Lm6DUhvQZobZ4TGTXnhpw9e0FycyfIzJnWa848d25lbZz7as5B0WzEmjWrTM5zbt9ullOnTrtMia6LWbxWfuFCX82ZOXOq5OfbnOccOnRM9BpgaekyGT3aXs3Ra6e65qTjqxkii/Oca9duyLlzWnMmysyZ00zOc/Sz0tDQ6NY+s7OzzM25e3p6RPM59+8/MFtzNMdWUXFaMjLSpKTEZs05f/6S1NRcc8dhfn6uuXHWzEt/zVkuo0eP8vv7L+6HH/b0HjtWIQsWzHEDYfGCw5Yt38v16zdkw4afuQva1gJxOpn7y1+2iH7Zf/DBOjcQ1v4uXrwie/cekilT8mTlymWSnJxsrYluonTu3EVZvXqFTJs22eRE5E9/+kI0XPhXf/VLt4Bq7e/mzUb59ttdrrC++eYaSUkZaa2Jcvx4pQtn6gWRkpI5RmvOdtFFl40b3zIZwtXJ3BdfbHVj+/7775isObo4WVamNSffLf7ZrDkH5Ny5SxFRc37721/KmDE2a862bbskMzNd3nzzVaM1p8LVnYUL50lxsdYcezcnbN68XfTikt2a0yl/+cs3Eh8fJ++//3MZNcrePEcXCvSnf6dOzZcVK2zWnN2798v585fl1VdXytSphSbnOf/5n3+R1tY2+e1vf+UWUK396Qn4t9/ulKysDHnjDas155QcP14lixbNk3nzQltzfD937W/wcdOm71wg7t133zYZiOvo6HTzHD3ns1pzdKFg377DMnVqgbuAaHGe01dzLsmaNS/LlClac+zd4Gi95ujFze++2yVZWZny5purZeRIi+dWWnMq3U1v8+YVmZzn9NWcennvvZ+bDMS1t3fIl19+4xaf33vvbZPznL6ao/OcybJixWKjNWefm+dYrjn/+3//WXS8dT3H4jzHV3PGjs2SN954xWTNOXbspJw4ofOcEncDocULS19//a0LxFmvOXpx7t133zJZc3Rtdv/+w25tVi8gWpzn7Nq1T/QccO3aUpk8ucDkPEdrTkdHVEV/wgAAIABJREFUh/zmNzZrjp4PbN++213cfP11mzVHH4ah4czFi+fLvHmzjdacbe6m6g8+eOfHEK6tB3a0tbXLV19tk8TEEW7NyeJ6jgaQDhw44m5YfumlRSZrzs6dZaLX1/SGLa051h4eo9cl9dxKz6V/85tfmJzn6Lrn99/vkezsse5mW4vnVnpjuoY/IqXmWAzEtbW1yVdffevqdV/NSbG2tCjV1Rfk4MEj7kae5cvt1hy9vqbfzxYfWKUhJL1WrtcpP/nkA9M1JydnrKvdlmvOkiULZO7cWSbnOV999Y3U1ze67ItaWgvh+mqOXvPbsMFqzTnvHp6mN/IsW7bQ5Dznhx/2it5Y/cYbq00+PMZXc7q6uuTjj23WHM2x7dix111r0ZtQLNYcDQmfOnXGHYf6kDyL6zlffrlVGhqa5Be/WOfmjP7WnLiamtpevcCpIS6LTzbTWZGmZPULdMKEHBeS8reToZ5ZdXd3S1PTbXn06JFMnDhBEhLsPalQw5l696EuVunFYmvhUR0jDRTqIrS2T0/CrY2ztlGDhXrnXF7eJHMnuNo+Tbs3N99x4RSrT1nQMabmeKtK/TWnWyZOHG+65owcmSyZmdZrTqY7Cafm+H9c6l19fTUn3nDNaZfW1nbj85zb7il7Os+xeFHJV3P0PydMoOb4/0np26J/nkPNCdRQa44+JUDn2lbnOXphSf8Jx7mVL/ionqWly4fMeuuW7Zrz6FG33Lp1S7q7eyKg5ox04X+L51Z6R7ZebNfQHvOcIX88nnhhJNUcfUKcxadG963nUHMCOwL7t9LFXf1M68IpNSdwTfvrOX3zHA2q6wKvxe8W3zwnMmrOOJO/1KJrx1oXdZ6j6zkWx/nJmqPrObaCXFoFdI27vb1TsrMzJSWF9ZxAKqNvnkPNCUSvfxu9JqTXXfRhHRZ/Haq/5vS6p9VbrDl63U/XS/QBDhkZVs+tfDUny7WTNWT/PzfUHP/NnrWFr+bo59niQ4x8Naenp9fkQ4zUlJoTnGOx79zqgeTlTTR6rTySzq1STT6pXo+UpqZb0tV1z80hrNYcrYt6A4DFB6c9WXNS3EOrLM7F+s+tbM5zdHwbG5uoOR7LtwaF29o6XB7Q4q9j9Necu26tJJCaE3f79u1ePWB0sm5xwq6d1PbpREkXWqy2UdPGvb1iMoDkM1RH/bNYVAe20f6x2GNyIuerd3osMs7eqn+k1JxIGOdIaCM1x9vnhZrjzW/gPMfijROR9t2iP8dt8cJcpMxz+uazvcxzPHys1S9c51YDf+7an+Cjb55DzQl8oH3jTM0J3FC3pOZ484uU7xZqTvDGmXMrb5bWa07/dwvrdl5Guq/msG7n1ZA1ZC+CfdtSc7wbhvPcKtDWUnMClevfjnMr74Z9NUfXIux+/zHPCc44U3O8O0ZKzbGeibB+LFJzvH9W+q9b2f1u8c25I2GtxPoasvWaw7mV9890LJxbxTU3N/el4fhDAAEEEEAAAQQQQAABBBCICIGBwUd/f+46IjpIIxFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBB4gQDBRw4PBBBAAAEEEEAAAQQQQCACBXw/d03wMQIHjyYjgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgh4EiD46ImPjRFAAAEEEEAAAQQQQACB4RHwBR/13f35uevhaS3vigACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEDwBAg+Bs+SPSGAAAIIIIAAAggggAACYRMY+HPXBB/Dxs4bIYAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIGBAg+GhgEGgCAggggAACCCCAAAIIIOCvwMDgIz937a8er0cAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEIhkAYKPkTx6tB0BBBBAAAEEEEAAAQRiVqC1tU0qKqpd/wk+xuxhQMcRQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgZgUIPgYk8NOpxFAAAEEEEAAAQQQQCAaBMrKyh93g5+7joYRpQ8IIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAJDESD4OBQlXoMAAggggAACCCCAAAIIGBQY+HPXBB8NDhBNQgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQCIkAwceQsLJTBBBAAAEEEEAAAQQQQCD0AgODj/zcdei9eQcEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEbAgQfbYwDrUAAAQQQQAABBBBAAAEE/BZobW2Tiopqtx3BR7/52AABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCBCBQg+RujA0WwEEEAAAQQQQAABBBBAQAXKysofQ/Bz1xwTCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACsSBA8DEWRpk+IoAAAggggAACCCCAQNQKDPy5a4KPUTvMdAwBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQGCAAMFHDgcEEEAAAQQQQAABBBBAIIIFBgYf+bnrCB5Imo4AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIDBkAYKPQ6bihQgggAACCCCAAAIIIICAPYHW1japqKh2DSP4aG98aBECCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEDwBQg+Bt+UPSKAAAIIIIAAAggggAACYRUoKyt375eRkSbFxUVhfW/eDAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIFwCxB8DLc474cAAggggAACCCCAAAIIBFmgsrJaWlra3F5LS5cHee/sDgEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAFbAgQfbY0HrUEAAQQQQAABBBBAAAEE/BaoqbkutbV1bruSkiJJT0/zex9sgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggECkCBB8jJSRop0IIIAAAggggAACCCCAwHMEBgYfCwpypbAwDysEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEolaA4GPUDi0dQwABBBBAAAEEEEAAgVgSKCsrd90l+BhLo05fEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIHYFCD4GJvjTq8RQAABBBBAAAEEEEAgygR8wUftVmnp8ijrHd1BAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAoF+A4CNHAwIIIIAAAggggAACCCAQBQIDf+6a4GMUDChdQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQeK4AwUcODgQQQAABBBBAAAEEEEAgCgQGBh9LSookPT0tCnpFFxBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBD4qQDBR44KBBBAAAEEEEAAAQQQQCAKBFpb26Siotr1pKAgVwoL86KgV3QBAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgZ8KEHzkqEAAAQQQQAABBBBAAAEEokSgrKzc9SQjI02Ki4uipFd0AwEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIEnBQg+ckQggAACCCCAAAIIIIAAAlEi4As+andKS5dHSa/oBgIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAJPChB85IhAAAEEEEAAAQQQQAABBKJEoKbmutTW1rneEHyMkkGlGwgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAj8RIPjIQYEAAggggAACCCCAAAIIRInAwOBjSUmRpKenRUnP6AYCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAAC/QIEHzkaEEAAAQQQQAABBBBAAIEoERgYfCwoyJXCwrwo6RndQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQKBfgOAjRwMCCCCAAAIIIIAAAgggEEUCZWXlrjcEH6NoUOkKAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIDAEwIEHzkgEEAAAQQQQAABBBBAAIEoEvAFHzMy0qS4uCiKekZXEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEOgTIPjIkYAAAggggAACCCCAAAIIRJFAZWW1tLS0uR6Vli6Pop7RFQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQT6BAg+ciQggAACCCCAAAIIIIAAAlEkUFNzXWpr61yPCD5G0cDSFQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgccCBB85GBBAAAEEEEAAAQQQQACBKBIYGHwsKSmS9PS0KOodXUEAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEECAJz5yDCCAAAIIIIAAAggggAACUSUwMPhYUJArhYV5UdU/OoMAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAT3zkGEAAAQQQQAABBBBAAAEEokigtbVNKiqqXY8IPkbRwNIVBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBxwIEHzkYEEAAAQQQQAABBBBAAIEoEygrK3c9IvgYZQNLdxBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBJxAXFNTU++jR92SkJAgI0YkmGR5+PCh9PT0SGJiosTHx5trY29vr2gb9U/bGBcXZ66N3d098ujRI+eXmDjCXPu0Qdo+beeIESMkIcHeOGsbHzx4IDreSUlJJsdZPycPH+o4x7lj0eKf1pvubmqOl7EZWHP0WLT456s5+lnWz7TFv76a0232u4WaE5yjRsc4cuY5Sa5+W/uj5gRnRHzzHJ2HWZzPUnOCNc598xw9r9Lzq+H88wUfMzLSpLi46HFTIuncyu48x/fdYn2eo+fQ1JxAP4eRdG5loeY8z7m/5jDPCfRYjJxzK2pOoGOs21FzvOj1b+urOVbX7SJjDdn+PEfXPvuuFTDPCfSTo356jqrXMayvIUfCPIeaE+iRKO78uW/dzu65VV/NYQ058FFmnuPFbuC2Dx48lN7eHrPXJ5nnBGekqTneHTm38m7Yd62AmuNVknmOV8G+7cnneHfsz2EN/3Wr5/XGa82J27t3f29lZbXMnTtb5syZaTKgsmPHHqmvb5S33loj2dljzQXO7t69J99887309oqsX/+mJCcnez/6gryHq1drpbz8uOTn58rSpQtMLmgcOnRMLl26KqtWLZGCgnyToYCvvvpGWlvb5de/3igjR44M8ih5311T023ZvXufpKeny6uvrnQnQNb+qqqqRWvOvHlFUlSkNWd4QwHP8vHVnLffXitjx2aZqzldXXdl27Ydrunr1tmuOfqUqSVLrNecpVJQkGe65nz44bsmv1saG2/Jnj37TdccrTdadzT0ozVnuINIz6o533+/W+rrm+Sdd16TrKxMszVHL4i8884bJo/FK1dq5PDhE+6zvGTJfJPznIMHj7p5TmnpMsnPt1lzvvxyq7S2dshHH1mtOU2ye/cBycxMl9Wrbc5ztOboPyUlw19zfMFHrTulpcsflx/rNaezs0u2bfvBBcGt1pzLl/tqzuTJ+bJ4sdYcezd5HDhwRLSdOvZ6DmgxcK01p62tQ+zOc+zXnIqKM1JVdVbmz58js2fPMDnP2b59tzQ0NMq6dW9IZmaGuXlOR0enfPvtTnex/ec/f93kPMdXc6ZMyZdFi6g5ga5xfPHFVtHx1vUci+t2N282yZ49ByQrK0NeeWWFyfUcX81ZsGCOzJpFzQnkWPTVHF0Le/vt10wei3rOcuTISZkypUAWLy4xea1g//4joueAr7zykuTlTTI5z/niiy3S0dFluOY0yp49B2Xs2EwpLX3JZM05deq0nD59ThYunCszZ043Oc/57rtdovVbrwllZKSbm+e0t3eItjExMUHeestmzbl48aocPXpSpk4tkEWLrNacw3LlSq3ZmqNhMz236uy8K7/61QaT3y16PrB3b1/N0XmOxcD1yZNVcubMeWpOIBOcH7fprzkj5K231po8Fi9evCJHj54yXXP27Tssek1/9eoVkps70dw8p6enV776KjJqTnZ2lpvn2K4582TmzGkm5zm6VqLXADds+Jmkp6eZm+e0tbXL9u06z0k0W3MuXLgix46dkmnTCmXhwmKT51b79pXL1avXXK5k0qQJBmtOj2g+p6vrnvzyl+tNfrfU19+UsrJDkpOTJS+/bLPmnDhRJdXVOs+xW3O2bdspTU23ZOPGtyQtLdXvmhO3e/e+Xp3QzZs3ywWRLAal9OSsrq7eLQiNH59j7gOnIaRNm75zTwLU4j969CgPU8PQbKoL5XrxKz9/krz00mKTRWH//sNy4cJlWblyqUydWmiy+P/5z5ukpaVNfv3rdyU1dUxoBsvDXhsbm2THjjJXDF57rVRSUuyFM3XRSv+ZN2+2+8dizdHJ3I0bDWZrjgYCNm/eLiK9sn699ZqTKy+9tMh0zVm1aplb0Lf4ZMrPP98kra32a056eqqsXWu75mjwUW/ySEqy9zRcX83Ri+3jxmWbm+f01Zzv3CRTw9YW5zl6cU6DhRq2Xr7cZs3RE0g90dUQkoalLNaczz772t3goSEki/Ocmzcb5YcfytwFpTVrXjY5z9HzKg0FWKg5GsDUeevTwUcNFerJuNWao4GALVu2S1xcvAtKWaw5ulCuN20VFubJsmULTc5zdLFFLyL21Ryd59i72Uhrji5SfvjhezJmzGgPZ0Gh2VQvzu3cGRk1p6RkjsyZM8vkPOebb/pqzrp1r0tOjr15jq/maDhYa86oUfbWc/pqzlEpLMw3X3M0hKTttFpz9GKsrudYrjkaEH711VUm5zknTlS6ec78+XNl7txZJi8gfvPNDnfz/Pr1b7ib560F/9vbO2Xr1u0ubK03eFisObo2q/McXSfRm+ctBoU1PKPngBoI0Jvn7dacvrC1xZqjnxOd52gISS9yWrzB//jxCndT2YIF89wDOyyGFrZu/d7d4KHBx+xsnefY+hUP/d7bsuV79xl5553XTdac8+cvS3m51pxCWbp0vsmaoyHhy5e15qx0N9xaqzl6XVLXkHVe+6tfWa05N2Xnzn0RUXM0mFJUNMN4zfnZj/McWzVHz++3bt3hwta65mRxnnP+/CX3kCC9Bq03z1uc5+jNUHpNX7+fteZYe5CDBh/1Wrne4KFha4vznBs3bsquXftEg486X7Q5zzkllZVnXRjOas3R9Vm9wUOzLxYfTOarOUlJI+Ttt7XmpIRmgdDDXs+duySHDx+XadMmu5vKLNYcfdCE3lRmt+b0yOefb5auri755S+t1pwG2bVrvws+6g0eFmuOBnD15nm90Wj27Okm5zmafdGs04YNb7n67e+vLMc1Njb26mMjdbJuMYDke3yp/lRJSkqKuQUrbZ+eWOgjVvWR/BYvzGkb9VG6Os568m2xqGobdYy1jcnJSSbDANrG+/fvu5+StjiR0/bp47v7HgPba3KRXNuoj9Kl5niYJVFzvOEN2Jqa450ysmrOCJNhAOY53o9D5jnBMeyf5zyUMWPs3dzRP8954J6ybvHmjifnOcNfc2pqrkttbd1Pgo963sK5lbfPDedW3vx8W3Nu5d2x/9xq+GvO83pDzfE+ztQc74aRs57DPMfraGvN0TUnvehlLfQ4cA1ZP9cWwwCRd26VbC6AFFnznAeuuRYvzFk7t3rRPKev5owyF3qk5nj9Runfvn8NmZoTqGrfGjI1J1A/33b98xxqTqCWkXRuNXJksrnQI/OcQI+8n27nW8/RX5GxeHOHtjhyak6PydBjpJ1bUXMC/3xH0jwnEmqO5u38DT3q6MU1Nzf3Bj6MbIkAAggggAACCCCAAAIIIGBNYGDwUX96W3+ShD8EEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEokWA4GO0jCT9QAABBBBAAAEEEEAAAQR+FCD4yKGAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCAQzQIEH6N5dOkbAggggAACCCCAAAIIxKRAa2ubVFRUu74XFORKYWFeTDrQaQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgegUIPgYneNKrxBAAAEEEEAAAQQQQCCGBQg+xvDg03UEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIAYECD7GwCDTRQQQQAABBBBAAAEEEIg9gbKyctdpnvgYe2NPjxFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBaBcg+BjtI0z/EEAAAQQQQAABBBBAICYFfMHHjIw0KS4uikkDOo0AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIBCdAgQfo3Nc6RUCCCCAAAIIIIAAAgjEuADBxxg/AOg+AggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIBDFAgQfo3hw6RoCCCCAAAIIIIAAAgjErgDBx9gde3qOAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCES7AMHHaB9h+ocAAggggAACCCCAAAIxKVBZWS0tLW2u76Wly2PSgE4jgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAghEpwDBx+gcV3qFAAIIIIAAAggggAACMS5A8DHGDwC6jwACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAghEsQDBxygeXLqGAAIIIIAAAggggAACsStQU3NdamvrHABPfIzd44CeI4AAAggggAACCCCAAAIIIIAAAggggAACCCCAAALRKEDwMRpHlT4hgAACCCCAAAIIIIBAzAsMDD6WlBRJenpazJsAgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggEB0CBB+jYxzpBQIIIIAAAggggAACCCDwhADBRw4IBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBaBUg+BitI0u/EEAAAQQQQAABBBBAIKYFCD7G9PDTeQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgqgUIPkb18NI5BBBAAAEEEEAAAQQQiFUBgo+xOvL0GwEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCIfgGCj9E/xvQQAQQQQAABBBBAAAEEYlCA4GMMDjpdRgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQRiRIDgY4wMNN1EAAEEEEAAAQQQQACB2BJobW2Tiopq1+mCglwpLMyLLQB6iwACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggELUCBB+jdmjpGAIIIIAAAggggAACCMSyAMHHWB59+o4AAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIRLcAwcfoHl96hwACCCCAAAIIIIAAAjEqQPAxRgeebiOAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACMSBA8DEGBpkuIoAAAggggAACCCCAQOwJEHyMvTGnxwgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBArAgQfIyVkaafCCCAAAIIIIAAAgggEHMCZWXlrs8FBblSWJgXc/2nwwgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAtEpQPAxOseVXiGAAAIIIIAAAggggAACQvCRgwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCAaBQg+RuOo0icEEEAAAQQQQAABBBBAQORx8DEjI02Ki4swQQABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCAqBAg+RsUw0gkEEEAAAQQQQAABBBCIZQHfkx0DMSgtXR7IZmyDAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAwLAJEHwcNnreGAEEEEAAAQQQQAABBBAIjkBlZbW0tLT5vbOCglwpLMzzezs2QAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQGA4BQg+Dqc+740AAggggAACCCCAAAIIBEnA36c+8vPXQYJnNwgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAmEXIPgYdnLeEAEEEEAAAQQQQAABBBAIvkBra5tUVFQPecclJUWSnp425NfzQgQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSsCBB8tDIStAMBBBBAAAEEEEAAAQQQ8ChQU3NdamvrBt0LP3E9KBEvQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQMCxA8NHw4NA0BBBAAAEEEEAAAQQQQMBfgcHCj4Qe/RXl9QgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAtYECD5aGxHagwACCCCAAAIIIIAAAgh4FKisrJaWlraf7IXQo0dYNkcAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEDAhQPDRxDDQCAQQQAABBBBAAAEEEEAguAJlZeVP7DAjI02Ki4uC+ybsDQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIFhECD4OAzovCUCCCCAAAIIIIAAAgggEGqB1tY2qaiofvw2JSVFkp6eFuq3Zf8IIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIhFyA4GPIiXkDBBBAAAEEEEAAAQQQQGB4BP7wh/8hX331lfzud38t/+W//M3wNIJ3RQABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCDIAgQfgwzK7hBAAAEEEEAAAQQQQAABKwIzZ86UW7duyfLly2Xbtm1WmkU7EEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEPAkQPDREx8bI4AAAggggAACCCDEsPLlAAAgAElEQVSAAAJ2Bf7whz/IH//4R/n9738vv/vd7+w2lJYhgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggg4IcAwUc/sHgpAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggMrwDBx+H1590RQAABBBBAAAETAr29vSbaQSMQQAABBBBAwH+BuLg4/zdiCwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEIlgg7sqVq70tLa2Snp7m/rG4WH7zZqPcvXtPJk4cL0lJSeba+OhRt9TXN4jmBfLyJkp8fLy5Q6Kjo1Nu374jo0alyNixmSbbeOtWs3R2dsrYsVkyevQoc+Osg3r9+g15+PChFBTkSUJCgrlxvn//vty82SSJiUkyfny2yXFuaWmT1latOemSnp5qcpwbGhrl3r2+mpOcnGxunB89eiT19Tddu3Jzbdec0aNTJCvLcs3pkLFjx1JzAjzK7927L42NTe67edw42zUnIyNd0tJit+Z4CTVqzblx46ZonsJqzens7JLbt5tl9OjRkpmZbvL7T9vX0dEhOTnZkpKSYvL779q1G6LjXVCQa3Ke46s5+t2ckzPW5Di3trZLS0uLZGZmSGrqGJPjrHPFu3fvyqRJE1z9tvbXP8+Jk9zcCSbHORJqTt+5FTXHy/EdGTVHz63aROc5g9Wc4VrrsX5u9fBh37mV+litOe3tHdLc3OLOWbKyMkzWxaam2249R+c5uu40XMfbiz7zOs/p7n4o+fk213Mi49yq9XHNieVzKy/fLb6aEx8f5+ZiFteQfTVnzJhRbk5rsY2RU3MeSX6+1XOre9LYeEuSk5Nc7bY4znfutEpbW5s7x09Ntbmeo3MIXY+fOHGCs7T2R80JzohQc7w76vXdpiZqjldJX82xup6j107r6xuFeY63kdbPiq476fUWi2vIep3h+vV66e62O8+h5ng7Bn1bR07NiZdJk8abnM/2n1vZvW6l5wRdXVpzciQlZaS59RxqTnA+z3futEhbW3tEnFsFOs+J2717X++pU6dl3rzZUlxcJCNGjAiOXhD38t13u6SurkHeeec1kyfiXV13ZfPm71zw8d1335KRI0cGsffB2dXlyzVy8OARF9hbvnyRyYuc+/cfkQsXLktp6XKZPDnf5AX3P/95k7S0tMvHH78no0aNCs7gBHEvGkD64YcyF2J+7bVSk6G9U6fOiNackpIiV3cs1pxvv90lN240yLp1r0t2ti7+2Xp6ik5ANm/eLiJxsnHjz8zWnAMHjkhhoeWac9jVnFdeeUkKC23XnE8+ed+d5Fr789Ucvdi+du3LRmvOaVdz5s+fK3PnzjJac3a6YOG6dW9IdraGubzXHC9Bx6ePM605W7Z870541q/XmmMvEH7p0lU5dOiYmz8sXbrA6DznsFy8eEVKS7Xm2Lzg/uc/bxYN7uk8x27N2ecuwK5du8rkOOs8p6Kir+bMmTPTaM3Z5UI+69e/4W44shZO6ZvnfO9qodWac/HiVSkvPyZTpuTLkiULJSkp0dpXtOzbd1guXbri5jlWb9rSc6vW1g7zNUeDZmvWWK05p6Wi4owsWDBPiopm+F1zwvH537bthx9rzs8kO9tezdGbRHWek5CgNUfXc+zNc3T+oPOcKVMKZOlSmzWnrOyQ6Hxs9eoVZmvOZ599Le3tnWZrjoaEd+7c525WfvXVlSbPrU6cqJLKyjOycKHWHJvznG+++UEaGm7Khg0/MznP8dWcESPiZd06m+dWFy5ccfOcqVMLZckSPbeyN8/Zu/eQXL581X1WLAYL9RrB559/LTreH31k89wqMmpOpVRWVsuiRcUye7b/85xwTNC3bt0h+tCODRvecvU7HHMrf/qlF9u1jSNGJMj69W+a/G7Rtdny8uPGa85B0etra9aslLw8e2FmXQP8/PNN0tHRJR999K7J9Rxdg9i1a79kZ2fK6tU25znHj1dIVdXZiKg5Gze+5R42Ya3maKBC52KJiQmybp3NmnP+/CU5fPiETJs2WZYsmS+JifbmOXv2HJQrV7TmrPqx5th62FJPT6/oeo6GMz/80GbN0Wstu3drzcly56gWH3DjqzmLFxfLrFk25zm6VqLXAN999223Hm+15iQljZB33nnD5Dj7as706VNk8eISozXngFy5Uutqjp5bWbshqqenR/S6leaxfv1rrTn2cliaKdm9+4Dk5GTJK69YrTmnpKrqnDsOZ82a7vcasj/nIIG+dsuW7XLz5i15773Aak7c8eMnei9dqnGhD71YbDGEpMVfn1ihk5CMDL3T1HsgIFDwZ22nd/XpyZl+8FasWGKysOpiRnX1eReo0IPZ4qJVdfU5qaurl6KiWTJhwjiTwcf9+w+LLhjoyZnFwqpP+zhxotI9YWH+/Hkm7zTVCbsuFNiuOafk1q07boFX7yq2Npnrrzm9smLFYvM1Z/bs6SYnc76aM2fObBk/Psdkzdm3r9wtlOtivsVQvT7B9eTJStEne5aU2K45kycXuLCZzXlOX83RwJ6GSIdac4IZbnzRvEifPnP48HHR93vpJZ3n2HuKgS6gnj17wd0JO3PmNMM1p0HmzJkl48frPMfWopUeA1pz9GlNOs+xWXNa5dSpKvdkz5KSOSaDjzrH0bmO9ZqjT4LXwF5Ghr0n/mvN0XMrEWqOl3PWvnmO1hzfPMdezdGglAZdrdYcfcpQRUVk1BwNxGnANdB5zlDnHoEck8eOnZTbt1tk2bKFJn9lRJ8EoRe+9MnWepOoxQsiuoB67pzOc3LMznNOnz7rbh6cN6/InVtZWyjXY7ev5tx1F74sznP07nu9YWvMGPvznClTCt0TwgOtOYHUkqFuc/ToSfeE1GXLFpn8lRFqzlBH8sWvO3262t08SM0J3FM/J3rDVmrqaCkutnlupYH6q1drXSBOL8LarDknXM1ZvnyxyV8Z6as5x933ss7FLM9zdP4wY4bN9Zyqqmp3I49+Viz+4o2u1+l6jo633vhmcZ7T3HzH3bBFzQm8buuWR46cEJ0zWq05OtfWNlquOXoN+vz5i25tdsaMqSbXkCOr5ui5lb2bB/trzhj30C+Lv3ijNzjW1Fz7cZ6j6zn2fmXyyJHj0tzc6q5DW3z6dn/NSZBlyxaYnOf4ao7mXqZPt1lz9EYjvXlQr7dYfBK85q80n6O/1FlaarPm6LUWvUk0LW2MO0e1XHM0+K838lisOXreouvxgdacuKampl79OTH92V6LJ486mdP26c9J60FiLfSo7dMTC22jfvAsnjxqG7Vt+tMGekHDYuhR29jd3e0c9Ti0+DPS2kZ9VLu20+LJo+9Y1DbqncUWgykDxzkSao4ahvIiYKCnuZFUc7RmW7xrjpoT6NH35HZ6LFJzvFv65jlDrTnhCjz6etZfc3rNfrfoPEcdtWbbrjndP85z7AWQImmeo221ePLo+27RuaIu8lo/txpqzfFe5fzbAzXHP6/nvbr/3CrRZNCamhO8cQ52zQn2+Y+/85zgyAx9L5xbDd3qRa+k5nh39J1bWZ/nRMoasv15jv015EhYz9FzP4tBa+Y53msia8jBMWSeExxH3zzHfs3pMRlAGnjdKjLmOSNMhgHUjnMr759p37Vy5jneLPuuldutOb5rBdQcb+NMzfHmp1v31ZyH7pzF6nUrHWed6zDPCXy8I2s9J3rnOXHNzc29gQ8jWyKAAAIIIIAAAgiEWiDcYcdQ94f9I4AAAggggEDkCQQ7FBl5ArQYAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAwJIAwUdLo0FbEEAAAQQQQACBAQIEHjkcEEAAAQQQQMCaAAFIayNCexBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgdgUIPgYm+NOrxFAAAEEEEDAuMDzQo++x+Lrz+gmJCS4x+QTQDA+mDQPAQQQQCDoAq33W+Vh76Og7zdWdxgncZIgCRKnAD0ivQ97pbfn+T8QwtwjVo8U+o0AAggggAACCCCAAAIIIIAAAggggAACCNgRIPhoZyxoCQIIIIAAAgggIM8LPCYnJ4v+o4FH/hBAAAEEEIh1gZb7LXK/+0GsM4Ss/xpsTJJEF4Lsud/z3PchABmyIWDHCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAoMIEHzkEEEAAQQQQAABBAwIPC/wmJSUJCkpKe7pjvwhgAACCCCAQJ8AwcfwHQnJcUkij0R6HhKADJ8674QAAggggAACCCCAAAIIIIAAAggggAACCCAwmADBx8GE+PcIIIAAAggggECIBJ4XdvS93ahRo2TkyJEhend2iwACCCCAQOQKEHwM79j5ngDZc09/B/vF781TIMM7NrwbAggggAACCCCAAAIIIIAAAggggAACCCAQqwIEH2N15Ok3AggggAACCAybwGCBx/j4eBkzZgw/az1sI8QbI4AAAghYFyD4ODwjpE9/7L7fLXE9cYM2gADkoES8AAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCDAMFHD3hsigACCCCAAAII+CswlNBjamoqP23tLyyvRwABBBCIKQGCj8M33C78eK9b4noJPw7fKPDOCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgQfOQYQQAABBBBAAIEwCAwWePQ1IS0tjSc9hmE8eAsEEEAAgcgWIPg4vOOn4ceh/Oy1tpInPw7vWPHuCCCAAAIIIIAAAggggAACCCCAAAIIIIBAtAoQfIzWkaVfCCCAAAIIIGBCYKiBR23sqFGjZOTIkSbaTSMQQAABBBCwLEDwcfhHx4Uf7/YMqSGEH4fExIsQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE/BAg+OgHFi9FAAEEEEAAAQSGKuBP4FH3mZiYKPoT1/whgAACCCCAwOACBB8HNwr1KxLiEiThYYL0PiL8GGpr9o8AAggggAACCCCAAAIIIIAAAggggAACCCDwUwGCjxwVCCCAAAIIIIBAkAX8DT3q2/MT10EeBHaHAAIIIBDVAgQfbQyvP0991Bbz5Ecb40YrEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBaBAg+BgNo0gfEEAAAQQQQMCMQCChx6SkJBkzZoyZPtAQBBBAAAEErAsQfLQxQnESJ4mPEof81EdtNeFHG2NHKxBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgUgXIPgY6SNI+xFAAAEEEEDAjEAgoUdtvP7Etf7UNX8IIIAAAgggMDQBgo9DcwrHq/x96qOvTQQgwzE6vAcCCCCAAAIIIIAAAggggAACCCCAAAIIIBC9AgQfo3ds6RkCCCCAAAIIhFEg0NBjfHy8ZGRkhLGlvBUCCCCAAAKRL0Dw0c4YjogbIXH3RKQ3sDYRgAzMja0QQAABBBBAAAEEEEAAAQQQQAABBBBAAIFYFyD4GOtHAP1HAAEEEEAAAc8CgYYe9Y35mWvP/OwAAQQQQCAGBQg+2hr05EdJ0vOoJ+BGEX4MmI4NEUAAAQQQQAABBBBAAAEEEEAAAQQQQACBmBUg+BizQ0/HEUAAAQQQQCAYAl5Cj/r+KSkp7h/+EEAAAQQQQGDoAgQfh24VjlcmS5L03As8+KhtJPwYjpHiPRBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgegRIPgYPWNJTxBAAAEEEEAgzAJeQ4/a3NTUVElMTAxzy3k7BBBAAAEEIluA4KOt8UuOS5KeuwQfbY0KrUEAAQQQQAABBBBAAAEEEEAAAQQQQAABBKJbgOBjdI8vvUMAAQQQQACBEAkEI/SoTUtPT5eEhIQQtZLdIoAAAgggEJ0CBB9tjWtSXJL0egw+ao946qOtcaU1CCCAAAIIIIAAAggggAACCCCAAAIIIICAZQGCj5ZHh7YhgAACCCCAgEmBYIUedT8ZGRkEH02OMo1CAAEEELAsQPDR1ugkxSVKT1eP5+AiwUdb40prEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABywIEHy2PDm1DAAEEEEAAAZMCwQg+6j70n8zMTIKPJkeZRiGAAAIIWBYg+GhrdDT42N3ZLfHx8Z4bRvjRMyE7QAABBBBAAAEEEEAAAQQQQAABBBBAAAEEYkKA4GNMDDOdRAABBBBAAIFgCQQz9KhtIvgYrJFhPwgggAACsSRA8NHWaPuCj9oqDS56DS963d6WDq1BAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQCIUAwcdQqLJPBBBAAAEEEIhaAa/BR9+THn1ABB+j9lChYwgggAACIRQg+BhC3AB2PTD4qJsTfgwAkU0QQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE/BIg+OgXFy9GAAEEEEAAgVgWCEboUf0G7ofgYywfUfQdAQQQQCBQAYKPgcqFZrtnBR/1nbw+udHr9qHpLXtFAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQsCBA8NHCKNAGBBBAAAEEEIgIAS/Bx4HbEnyMiOGmkQgggAAChgUIPtoanOcFH7WVXsOLXre3JUVrEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBIIlQPAxWJLsBwEEEEAAAQSiWiBYoUdFIvgY1YcKnUMAAQQQCIMAwccwIPvxFi8KPupuvIYXvW7vR1d4KQIIIIAAAggggAACCCCAAAIIIIAAAggggECECBB8jJCBopkIIIAAAgggMHwCoQo9ao/4qevhG1feGQEEEEAgcgUIPtoau6eDj9q6p8OKXsOLXre3JUZrEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBLwKEHz0Ksj2CCCAAAIIIBD1AoEGH5+13dP/H8HHqD986CACCCCAQAgECD6GANXDLhPjEqWns/uJPTwrqOglvOhlWw9dY1MEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABowIEH40ODM1CAAEEEEAAARsCgYYetfVPb/us/52VlSUJCQk2OksrEEAAAQQQiBABgo+2BkqDj90dj4b0lEcvAUYv29oSozUIIIAAAggggAACCCCAAAIIIIAAAggggAACXgUIPnoVZHsEEEAAAQQQiGqBQIOPgz3t0ffvCT5G9eFD5xBAAAEEQiRA8DFEsAHu1hd81M0HhhN56mOAoGyGAAIIIIAAAggggAACCCCAAAIIIIAAAgggMKgAwcdBiXgBAggggAACCMSyQCDBx+dtM/D/1/+u/4wdOzasT3w8fu2BHLl2P6KG9L+uTI2o9tJYBBBAAIHQCxB8DL2xP++gwcdH7Q8fhx59gcfnPaEx0Cc3BrqdP33htQgggAACCCCAAAIIIIAAAggggAACCCCAAAKRIUDwMTLGiVYigAACCCCAwDAIBDP0qM337W/gf4Yr+PjHA+3yz2Xtw6AYnLf88uNsWZSfFJydsRcEEEAAgYgXIPhoawifDj5q6zSk+KKgYqAhxkC3syVGaxBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQS8ChB89CrI9ggggAACCCAQtQKhCD4+/dTHcAUfC/+/GxE9TmsLk+V/fjg2ovtA4xFAAAEEgidA8DF4lsHY07OCj7rf+Pj4F+4+kBBjINsEo4/sAwEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCWAMFHW+NBaxBAAAEEEEDAiECwQ4/arZ6enid6F86fuib4aOTAohkIIIAAAkERiNTg4/mmC67/55rOyemmM+6/z82Z84TJ+jnrgmIUzp08L/jIUx/DOQq8FwIIIIAAAggggAACCCCAAAIIIIAAAgggEFsCBB9ja7zpLQIIIIAAAggMUSDYwUfd39P7JPg4xMEQEZ74OHQrXokAAgjEgkAkBR817PjFmS+lsrFqyENTPG6eC0RGSgjyecFH7TBPfRzysPNCBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAT8ECD76gcVLEUAAAQQQQCA2BIIdelS1p5/2qP8fwcehH08EH4duxSsRQACBWBCIhOBjIIHHZ43dh3N/ZT4AqcHHh20Pnhly5KmPsfCJpI8IIIAAAggggAACCCCAAAIIIIAAAggggED4BQg+ht+cd0QAAQQQQAABwwKBhB61Oy/a7nlPe9T/Pzs7WxISEkIu8qyfuv7y42xZlJ8U8vf29w3++tPbsrPm/hObEXz0V5HXI4AAAtEtYDn4uPnMFvn09GdBHwDLAUhf8PFZIUf9//TP95/PgnnRv3seZCDbBH1Q2CECCCCAAAIIIIAAAggggAACCCCAAAIIIIDAsAnEVVRU9V6/Xi+5uRMkN3fioD9BNBwtPX36rLS0tElJSZGMGTPmhYvlw9G++/cfSEXFaffWCxcWy4gRI4ajGS98z6am23L5co1kZWXI1KmFYQlY+Itw8eJVaWxskhkzpsjYsVkmj8Xjx09JV9c9Wb58kSQmJvrbxZC/vq2tXc6evSApKSlSVDTD5LF47doNqavTmjPR1Z3BfvYs5GjPeIP+mjNHxowZbbbm6PXDBQus15xMmTq1wGjNuSKNjbdkxoypkp2dZW6c9dCMlJozalSKzJ5tuebckLy8STJp0tBqTiDBx8FCjzqeT7/G9wTI1NQ0UcNQ/xF8DLUw+0cAAQQQCKeA1eBjqEKPPlur4Udf8FHbOTD8eObMeWltbZP58+fK6NGjnjvnDjTEGOh2Ps/79+9LRcUZiY+Pk/nz55k8h9Z1kitXrrlzlsmT802eW124cFl03WnWrGmSlZVp8tzq2LGTcvfufcPrOW1y9uxF9znRc6tw3Bjlb82ura1z6zkFBbkyceJ4k+s5lZXVomtjCxbMlVGjnl9z/O17sF7fX3PiXV20uIYcCTXn/PnLcuuW7Zpz9OhJ0WsGy5YtNLmGrN/N585dlDFjRsmsWVZrznWpq2uQgoI8mThxnPGaM8+t63idlwSr1vj2c+/efamsPCMJCfFSUkLNCdT3/PlLcutWs8yaNd1dX7M2zrreeOzYKWpOoAP843Y1Ndflxo0GKSzMkwkTqDmBcN67d090Lqbz2JKSOSbnOTdvNsrVq9ckJyfbjbXFObev5syePV0yMy3XnIeybNkC4/Oc0a52WxznSKg5ulbS3t4hCxfOc7kDa99/d+/ek6oq+zVH13PGjbNbc86duyS3bze7dYjMzHRz4+yb5zx48FCWLrVZc1paWkVrd2rqaJk502rNuSY3btw0Pc/prznFkpIy0u9jMW7nzr29x49Xyvz5Re7kx2KY65tvdoiGM9evf1PGj88xd5Lb1XVXvvxyq/T2irz//jthCS/4O+m8dOmqlJWVy+TJebJixRJJTk72dxchf/3evYdcUXjllZdk6tTJMmJE6J9+5W+nPv30K9Hi9Zvf/MIF4qz96aT9u+92S0ZGurz55moZOXKktSbKiRNVcuJEpSxYMEeKi+eYrDlbt+5wi/kbNvzMTUashTM7O7vkq6+2ubF9772fm6w5GmTet++QuzBnt+YcFF0sX716hUyZUmi65vzVX/1CRo+2W3N0MvzGG6/KyJH2vlu03mjd0QtfxcVFg9acQEKPzwo1Diy+vn0O3PfAJ0COHJki6elpIa/XBB9DTswbIIAAAgiEUcBi8PFf9/xBKhurwqLw31/9vczMmRGW9xrKmzwdfNRtdFFcz63q6xvk3XfflrFjx7oL78/7C2QRPZBtBr6/LuJ//fW37kLIxo1vmTy30nWSAweOuJtY9SZMi+s5u3cfkIsXL8uaNatk8mSbN7796U9fuIs2up6j4UJrf/X1N+X77/fI2LGZ8tprr5g8t9Ib806ePC2LFpXI3LmzBj23Gg7jzZu3P645ekHba40Idh98NUfXPLXm6AVEa3++mjNt2mRZtkxrjr1fCNi9e79cvHhF1qx52WwgXGtOR0eHfPKJzZqjF7127NjjQvVr15aarDka5Dp16rQsXlwic+bYrDmbNn0nDQ035d13fy45OWPN1RwNgmsbExNHuHVuizVHA7gHDx4V2zVnn+hat35WLAaldI3x//yfL6Wjo1M++eQDk/McDRTu2LHX1ZzXXis1OZ/VwLpecF+8eL7MmTPT5Dxn06Zvpb6+Ud5//+eSnU3NCWQOpTcaHTp0VKZPnyJLly40Oc/ZtWuf6DV9/axo+N9aaK+np1c+/fRL6ezslI8/tllz9MaJH37Y676b16592XTNWbJkgRQVac2x91AtXStpaNCa847Jh8e0tbXJpk3bJSkp0WWILM5z9GFVhw4dcw/g0dBeUpK9c6tdu8rk4sUaef11qzWnRzSf09XVJR999L7JeY7m2HbuLHOZEl0Xs7hud+TICamoqHaBdQ25WswEfv31NmloaJIPPnjHPSTP3/WcuAsXLvbqE690YU0nStYCPjpx0TuK29vb3eKpxTvnHj16JFeu1Ep3d7ebLFm8W1eL/40bje4uTr0r29pEScdZvzzv3LkjEydOkLS0NPfEBWt/Otm8e/euu0PEYkHQtukTFXWClJ+fZzLIpXdHNjVZrznX3QURqzXn4cOH7q40fVqcLgrZrjmj3V3ZNmvOTblzp8XVHA2d+fsFGo76ZL3maPD/+vVIqTlZ7uRssHlOIMHHoTztcWA48ukgZHp6eliC6gQfw/Gp5T0QQAABBMIlYCn4eL7pgnxx5suwhR59xpbCj88LPup6jt64pTdE6d26L5qLBTofD3Q7ddQ7xmtq9NyqV6ZN05uh7F1s0KdyaSguNXWMe/qMxXMrbV9LS4tMmjRR0tJSjZ5bXRF9IoTV9RzfuZVetMnLyzW5nqNP9dSn7On6sa4jD3ZuFa7vg4Hvo59nDX5oUFhvBvZSH0LRfl/N0XNCbaPlmqOfZX0AgdWao+s5+msylmuOPvlKn/hhcQ1Zv5v1pm+tOfn5uSbHOXJqTod7iIPNmvNA9IlS1BxvFV1De/owjLy8iZKaam+eow9kuXTpity/b7vmaCggOdnyPOeWe7JnZMxztLmQAiAAACAASURBVOYkG5zn2K85+uuSGli3PM+xX3N6XTBTn2TeN8+xdw6t8xy9bjVyZJI7t7I4n9Xr5FpzNJypv5pg99xK5zlTTNcc/ZafMqXA5LmVzh80/6K5l/Hjs00ei/01Z5Jbd7J2Dq3z2P6aM83ouVWne4iffjfrrw9arDmaB9Qne+oNovoEc5s1p9at50ybNkWSk3U9x785fNzt27d7NbCnnbPYQe2Ohnu6u3vcop+1D5uPWw31g2dxwcoXtlBDPUAsfth846xjHR+fYDL02H8sdpssqr5xVkM92bX4xMz+Y5Ga41+p/umrI6Hm+H7G13rN0fZZ/W7p+/6j5nj5vOh3sz/znOEIPmZnh+eEh+CjlyOJbRFAAAEErAlYCj5+8vlvh43nP3/5v4btvQe+8bOCj77zP51fDZxzP2/uHeicPNDtWM8J3qGj5y36j+VzKz0n0DZaDCBFynqOb5xZQ/b22YmE9ZxIWUO2XHN81zMshgEG1hz971bX7fxdz/H2yQxsa65bBeY2cCvfOOt8zuqxyDwnOOMcKdcKmOd4G2/mOd78Bl4rtzzP6Tu36jUZeoyUeQ7nVt4/KzqH6MtEREI+R+c5z/8FFO8age8hcuY51JzAR7kvb9eXw4reTGBcc3NzrxcktkUAAQQQQAABBKJFwN/g41Cf9jjwwr/PyretxeDjHw+0/2RIl+Yny6L8nz6K/1mv9ed4+K8rU3/y8r/+9LbsrLn/xP+/tjBZ/ueHY/3ZNa9FIGwCp06dkubmZnfiuHz58rA8xTXQzp05c0b27t0reXl5smbNGhk9evTjXe3Zs8edAI8aNcr1g7/QCkTScRNaicD2biX4uPnMFvn09GeBdSIIWxWPmyf/7+r/FoQ9eduFBh8ftN53NxQNDCI+/b/1XV4UVAwkxBjINt56y9YIIIAAAggggAACCCCAAAIIIIAAAggggAACFgQIPloYBdqAAAIIIIAAAiYEwh181PfLyQnPz4cN9YmPx689kPf+dOsn4/Hlx9k/CT4+77X+DGbN/zPpJy8n+OiPIK+1IPDv//7vcuLECdeU3//+9zJ58mQLzfpJGzo6OuTv//7vH///GzZskI0bNz7+33/zN3/z+L//x3/8h8k+RFOjIuW4sWpuIfg43KFH39h8OPdXsn7OumEdqucFH7VRz/p1kWA+9ZHg47AOPW+OAAIIIIAAAggggAACCCCAAAIIIIAAAggMmwDBx2Gj540RQAABBBBAwJqAP8HHwV779L/3/aSLr8/67wk+ihB8tPYpiM723Lt3T7777jvXuXHjxsmKFSuC2lELATZ9iuOdO3dcv15//fUnnuTo6+zly5fln/7pnx73fcGCBfK3f/u3j/83wcegHhaD7myw4+bmzZty6NAht5+CggJZuHDhoPuMpRdYCD4O509cPz3W//3V38vMnBnDdgj4go/agKeDjv4EH3V7f4OM/r5+2JB4YwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIGgChB8DConO0MAAQQQQACBSBYYLMw4sG/+/My1bvd08NH3v3niI098jOTPTKS0va2tTf7hH/7BNVeDj//yL/8S1KYPFmAL6ps9Z2f//M//LJcuXXL/9l//9V/d02Sf/tO684//+I9SW1vr/tXf/d3fyfz58x+/jOBjOEaq/z0GO270Z8n/7d/+zW1QWloqv/vd78LbQOPvNhzBx/NNF5zKF2e+dP9Z2VhlRmm4f/J6YPBxKD93rXDBeuojwUczhyENQQABBBBAAAEEEEAAAQQQQAABBBBAAAEEwipA8DGs3LwZAggggAACCFgV8Cf0qH3wJ/joe7rjwL4TfOzT4ImPVj8R0dUugo/946m1R4OPmZmZkp6e/sRAE3wM73FP8NGbdziDjxp41LCjpaDjs/SG86mPBB+9Hc9sjQACCCCAAAIIIIAAAggggAACCCCAAAIIIOC/AMHH/5+9+wCT4yjzP/6udrWKq1WWLCtZyVaOlhUc5CDnbGOEbeJDOILv4LjjDJz/YODuwGT8HHBwwGHAGINtsOUky7JkWTnnLFk5Z2m1QeH/vLX0urd3Zrp7pme2eubbfvRY2u2urvpUT21Pz2+rwptxBAIIIIAAAgjkoUCY4GOY0KNSeYOP7n/bOOPjk3NONujhRyaWyagepfW+vnRntSTaN8zl8ZspHRrs/tFnDsuM7VX1vn5dr2aSaN8w52LfwhUg+Bis7wk+BnOKai+Cj5lJ5iL4GHXgUWdlDLqlE7JszFkfvcFHbaczE6P3/45Bqpkaw87iGHb/oP3AfggggAACCCCAAAIIIIAAAggggAACCCCAAAL2ChB8tLdvqBkCCCCAAAII5FAgW8FHp1x3+e7gY8eOHaWkpCTrLe31nT0NzvHCgx0bhBmzXpEAJ8j34OOZM2dkw4YNcvjwYTl27Ji0bt1a9Dro06ePmYUvyHby5EnZuHGjKUP/rjP3aRmXXnqptGjRImkReu6amhpp0qSJOa9uWoc1a9bIwYMHpbS0VC666CLp1q2bdOnSpV45zrH6xVatWklxcXHKqp49e1YqKirMPrqvHuPd9NxOOyorK0079LzajmSvi3Pnzsnp06dNUdrWpk2bmr9r/Tdv3ixHjx6VESNGmDZomdXV1XL8+HH5+te/bvZr3769PPbYY3VV0eNTmQXpD78Am1OG21C/1qZNmwbG69evN21x+lXD0V27dm3QH3qg0z79+w9/+MO6Jaz//d//XTp0eC9U7D6PeuhxifoxUfBRrVevXi2HDh0yllruxRdfbK7XqING7rrp9anXqV4juuSzXut6feh5u3fvXs9NZ7HctGmT7N+/3+zXrFkz08+XXHJJQjdvn+qYrMft3r3b/KmqqjLt7Nmzp/Tq1atBO/V8p06dMsW4X0uJrhX39ep9HSS6brQu2ve66Tjx85//3Pz9iiuukClTptSdQtuof9xb2HYEubZt3ifbwceX1k6VZ9Y8GynBH973VODyHvrzhwLv6+wYt+Cj1pvlrkN3MwcggAACCCCAAAIIIIAAAggggAACCCCAAAII/F2A4COXAgIIIIAAAggUvECY0KNiJds/0de9wUfvvwk+Nrz88jX4qKGuF198UV599dWkr7mrrrpK7rnnngZLEDsHaCDq+eefl7fffjtpGTfeeKPceeedCcN8TrCtc+fO8p//+Z+mPvon0Xb77bfLbbfdZsKQun3rW9+Sbdu2mb9/9KMflSuvvDLl2PH666/Ls8/WhobGjBkjn/70p+v219DYc889l7QdGkR88MEHZcKECQ3OMWPGDPnDH/5gvv6Zz3zGBD41QHbkyJG6fbWdeuzjjz9eFwZMVtmRI0fK5z73uYzGwSDBRw3vff/73687T//+/eXRRx81/9ZxQa8LNUm2TZw4Ue69915p27Zt3S5//etf5aWXXvKt+y9+8Yu6oGqqunqDj2+88YY888wzCcvXUOAHPvABGTBggO/5g+7w05/+VJYsWWJ2/973vidz5syRF154od7h+hr5yEc+Uve1ZcuWmevswIEDCU8zfPhws783ZOrsrNfiU089VXdebyEaBP7Yxz5mApfOFmYGUQ1jfuUrXzGH9u3bt+7v+u9EfaGh1M9+9rO+ZPfdd5/ceuut9V5TYdvhexLLd8hm8PHbs57IyrLW2Q4+apc11nLXQWZ81Pp5g44EHy1/oVE9BBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAYgGCjxZ3DlVDAAEEEEAAgdwIEHzMjXPQs+Rj8FFnIfzJT37iG8JTI52p7ktf+pLoTH/ubceOHWZWPw1d+W0a1nrkkUcazHbnBNv0HBpGnDZtWsqixo8fLx//+MfNPhq2/O1vf2v+rjPp6ayCyTZ9TWkbnDDiP//zP8vgwYPN7jqb4Q9+8IOkQTV3mZ/4xCdk3Lhx9U7z5ptvytNPP22+puGvV155RXQmRffmBB+1jnv37k3ZxlwEH3UWx+9+97t19dAA3Be+8AUTTtWZMTXImSrM6hyo+3/zm9+smxk0W8FHPc/1118vU6dO9bvUxLH23THADu4goF578+bNa3CUO/j48ssvmyCw36YhTb0evTN77tmzx/RLkNfUP/7jP4qGKHWzLfiYbjv83Gz/fraCj9kKPapnLoKPUwY/IJd2ulQGdOqf0y5MFHzUCmiw0R1uDBp8dI4N04ioZ6ENc272RQABBBBAAAEEEEAAAQQQQAABBBBAAAEEEMi9AMHH3JtzRgQQQAABBBCwSCCq0KM2yVtWsuWt3fsy42PDiyHfgo+61O1//dd/1c2WqC0eOnSoWY65R48esnPnTrOUsM5c52wa1NLQnrOctAatdNY4d8BPl77VMKGGHLWMVatW1StDZ3X82te+Js2bN68r1z2jn/PF0aNHy6hRo8zS0Dprnobp3GHBL3/5y9KvXz9zbvfMiN/4xjfMkseJNl2i94knnjDf0pn2dPY+bYuG/HTJaXf5OrPkoEGDzNLWW7duleXLl4sGBZ1NA4JDhgyp+7c7+Og+t5rqssRajoY61UWDcWqnSxfPnj3b7K7hN5090dm0DVdffXVGo1KqWRTdFnoSd+hR//3HP/5Rpk+fXnd+rZv2q14bulS49qs7gKhWGiTVgI9arVu3zhyrsyM614eGOZ2lrrX/77777rrgUdAZH50Kaf9pnfS8Omup1mfBggX1rkW9VjUMm+nmrpu7LG2P9tP27dvNee666y5ZuHCh/M///E/dbrr8tV5Let3rstXq7ja57LLL5POf/3zd0uh64I9+9CPTHue6uOmmm8wMlnoNvfvuu/Laa6/VhXf1utGQpP4/m8FHXYr+L3/5i6mTLi+ufZzouh07dqy5ljJpR6b91djHZyP4mI3lrd1OuQg+us+nIUjd7hh0e9a7K93go1aMWR+z3j2cAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQyEsBgo952a00CgEEEEAAAQSCChB8rF3G2KYt34KPuoSxE2RSZ13CWWfT824azPu///u/ui9/6lOfEg036fazn/1MFi9eXPe9f/mXf5GBAwc2KGP+/Pnyy1/+su7rN9xwg1mO2Nm8wUcNiunS2u5Nw4k6q+CuXbvMl91l/O53v5OZM2ear99yyy1y//33J7x0fv3rX5vQmW4PPPCAaKBMN52d0b2csy5FrYE196avyd///vd159Hv6QyR5eXlZjdv8FGDebqMdqoll8ME1dJ5LSQLE27atEm+/e1v1xWpob0vfvGL9WYe1HCjzqSp/atLKrtDmc6BGsLTPnG2xx57THr37l2vqrp0+ZYtW8zX9JzeGUOdncMEHzU8qsHXVq1a1TvXvn37zNLnTtBSw4b6byeom46hHuMNPurMpBqqdPreKff06dPyb//2b3XnnzRpknldec+vMyGqi1NP9zWrgeRPfvKTdVVNdC3q7KT69aZNm8odd9whOtuk/j3M9RR2qWu3nXt5dO8S385+mbQj3X6y5biog4/ZDj2qW66Dj+6+0hBkNgOQQYOPWqegsz6mM4NjOsfYck1TDwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIFwAgQfw3mxNwIIIIAAAgjkmUBUwcdE5cRxxselO6vlyTknG/TyIxPLZFSP+iHJZPuGuUR+M6VDg93zKfiooSQNKTpL6SYKGroBNAC3aNEiExTUoFPbtm3N0tCPPvpo3W46a53Obphs8wYon3zySWnZsqXZ3R181BknP/vZz0qTJk0aFLVy5Ur58Y9/bL6uM+X967/+q/m7O4CnM9/p0tsaBHNvGkrTZYGdzQktei303DrTZKJN9/3+979vZu3T7YMf/KBouE03b/DRvYx2MpOgQTV9zQYZE7xLtyYKE2oIUUN3zqahR62r0xfuuuo5dbbNLl26JO3XF154oW7mx0TLS0cdfNRAqc4Yqtdgos0bxtSlpC+99FKzaxSOen1pwDORiYZqNVyrm86O+tWvftXM0pho0xkxdcZR3bRMva6d2Uc1XOxsicKk+j29djT46Q5VBr2e9PhsBx81qJxuO5JebDH5RtTBx4f+/KGst7wxg4/auGyGH93BRz2XO4Do/TlD8DHrlxonQAABBBBAAAEEEEAAAQQQQAABBBBAAAEECkKA4GNBdDONRAABBBBAAIFkAkFCTs6xqfZNtcy1Hn/+/Pl6VXD2t22paw0z3vP0oQZcLzzYMWHwMdG+Ya627f/WrcHu+RR89M7494tf/CLlrHga+NPNHbJ6/fXX5dlnnzVf18CjBh/9Ng0qHjlyxOz28Y9/XMaPH2/+7g4+pgoeusOWGoDTgKOzaRjOmQ1Sl77WZYjd29tvv21mMNRNz6vn123z5s1myW/dtEwNRKaamctdjoY0H3nkEXOsO/ioS/3qEuB+W9Cgmts6VZnXXXedPPTQQ3W7eIOP+vrWGRCdTcN5GoD1zpzoPYcuc6zLNOufkydPSllZmbRr184sW61+aqZbotk2ow4+JpuZ1F1nnaHUWULcPTNoFI66/PiHP/zhhN2goVidDVG3T3ziEzJu3Lik3aWvqX/6p3+qm/XRHXB0X8s68+h9991nlhn3m7ky6PWklcp28FHPkW47/F43tn8/yuBjLmZ7VM/GDj5qHYZ2HiKPXvOlyLs3G8FHrWTYGRzD7h85BAUigAACCCCAAAIIIIAAAggggAACCCCAAAII5EyA4GPOqDkRAggggAACCNgmECb0qHWPKvjoLofgY34HH+fOnSu/+tWvzKWvgSqd8S/s5g6X6dLSGnrz29xLUrtnmXQHH5944gkTqEu0VVVVyWc+8xnzLZ0lT4N9zvbOO+/Ib37zG/PP4cOH15vdUb/mDmHpcsTOEtRuC91Pw4+pNmeWTN1Hl1x2goTu4KN3Ke9k5QUNqkUR2NPA4NNPP12vKhpKTDWboy7FrEHPl156qS6g522LhjydpawTtTvq4KMuMa2zVKba3NeC+/qOwtEd2PXWQQO3zvLVYa8lDdBqkFY398ymzjn0utQl5nWmU/2j1793C3o96XG5CD6m2w6/ccT270cZfMzFbI/qaUPwUeuRjfBjJsFHrVOywGLYIGPY/W2/zqkfAggggAACCCCAAAIIIIAAAggggAACCCCAQHIBgo9cHQgggAACCCBQsAIEH4PP4siMj+m9TF555RV57rnnzMH33nuv3HbbbaEL+u53vyvr1683x7mXE05V0Lx58+R///d/zS7uWRfdwUcnkJmonOrqavn0pz9tvuUNPmrgTINnzqb1a9++vfnn9u3b5Rvf+Ib5e+fOnc1Sz04IxW0RFsFdB3fw8f3vf7/ceOONvsUFDarp8s0bN270La9Xr151yzrrzu4ZHxMdrMuWf+QjH0lYrnr+x3/8h+zdu9f3vM4OuQg+/vznP2+wjLm3gjt27JDHH3+8rr+dGT2jcEwWvPQu7RwY7e87fvSjH5Urr7yy7rAVK1bI888/XzeLqbc8ff1oeLhr16513wp6PekBuQg+6nnSaUdYO9v2jyr4uPHgJnl85jdz0jxbgo/a2KiXvfYGH/Uczviv//db3jqq4KP7vDnpVE6CAAIIIIAAAggggAACCCCAAAIIIIAAAggg0GgCBB8bjZ4TI4AAAggggEBjC+Qq+KjncZ+LGR/f6/l8X+r61Vdflb/85S+mwXfeeafcddddoS9797K+ulzywIEDfcuYM2eO/PrXvzb7TZw4UT72sY+Zv0cRfNRy/vjHP8r06dNNme5ZKN1ff/jhh+Xaa6+tq6s7+KgzOA4ZMsS3Hc4OzZo1k3vuucf80x18nDJlikyePNm3nDBBNd/CEuyQKPioYU33rITumQbdRTz55JOyfPnyui/pjJ5jxoyR8vJy0QCqLlmuM/pNmzatbp9cBB9/+tOfirqn2jTg+M1v1gbG3LNypmOox3iXDO/du3eDorzBx0mTJvkGNN2FXHHFFQ1msjx//rzpg9dee61uVk3viXUG1NGjR5svh7me9u3bJ1/96lfNcd6l2f3aq8t56+tft1ThWaeuYduRbj/ZclxUwcdcLXOtbjYFH7U+X5v0mAzo1D+SLtXgY9WxynoBR4KPkdBSCAIIIIAAAggggAACCCCAAAIIIIAAAggggEASAYKPXBoIIIAAAgggULACUQUfE5XjDTo6//bua+NS10/OOdngmnhkYpmM6lFa7+tLd1ZLon3DXFC/mdJwqeWPPnNYZmyvqlfMdb2aSaJ9w5yrMfadP3++/PKXvzSnvvTSS82MjWG33/72t2YZZN3uvvtuueOOO3yLcC+P7Q5cRhV8dM/0p7M9fuc73zEhvc9+9rN1dfvJT34irVq1qvu3exbKQYMGyRe/+EXfdiTaIQ7BR10iWtunQVANoeqmQUidAdO9xPfp06frLRX+hS98IWkgVK8BvRZ0y0Xw8ctf/rL069cvZR/NnDlTdFl13YYOHSqf//zn0+pT5yC/IKCznzo5S6EHqWeYSmlYVWf9XLZsmcyePbveoc7y8O7go/bnD3/4w6Sn0JkY9bWgW7aDj+5KBGlHGBcb940q+PjtWU/IqgOrI2miLiGdanv0muA/A7ReqbYo6hzlktfZCj6qQdjlq8PuH0nnUwgCCCCAAAIIIIAAAggggAACCCCAAAIIIIBAzgUIPuacnBMigAACCCCAgC0C2Qo+est1z/joDUR26tRJSkpKsk7S6zt7Gpwj0fLVWa9IgBPkU/DRPSOeNv1nP/uZlJbWD5C6Sc6dOycahnOH49xBv6DhSXcw7B/+4R/k8ssvN6eJKvioZX3rW9+Sbdu2mXI10Hn06NG6kOfVV18tH/7wh+v19pYtW0zwz9k05KZhwFSbzu7nfX3YHnx0Qo/aNu1LXbLZCemNHDnShEOdUM6GDRtEw3S69ezZU772ta8l5dClyzU8qptf8FGdu3TpkrCsVOFC9/XxwAMPyE033ZSyf9x10n31mEy2oMHHH/zgB7JmzRpzqiAzqdbU1CSdFbKystJ8r7i4uEHVDx48aK5Zp//0mtZrW49xh3x/8YtfJDxeC3zhhRdk6tSppuxsBh/TaUcmfWXDsVEFHx/684cia07Uy0cnq1iUs1RGNetjouCj1t+9zLU7kOi39LW77WGDjGH3j+wCoCAEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBnAoQfMwpNydDAAEEEEAAAZsEwgQfU+2bKOjobifBx3C9nk/BR+37r3zlK3LgwAGDoKGpD33oQ0lnr/rDH/4gM2bMkHHjxpnQmYbhjh07Vm92RA2nTZgwISnqSy+9JH/961/rvu9esjjK4KN7BkddTlvbuGnTJnPexx57TLzLFOsyvDoLohMiu/fee+W2225L2o433nhDnnnmGbPEtS6Z7cwemWnwUU/oF0ANd8XWX6JZlzXu06dPXRGrVq2SH/3oR3X/dvff+vXr5bvf/a75XqpQ6/79+8115Gx+wUedeVFnYEy0BQ0+anBTQ5tdu3ZNWI4GDzWA6Gzedoc11P2DBh/dM6lqSPj//b//J+3atUt4yuPHj5uQrobMNSR52WWXmf0OHz4sb731luhy9Lq8uC7ZnmhzL1fv3s8dLtbz9+rVq8Hheu6vf/3rddd8JsHHZMHYTNuRTj/ZcoyNwUe1yXb4McrQo9Y3qlkfneCjlukNOLqXvHauH4KPtrySqAcCCCCAAAIIIIAAAggggAACCCCAAAIIIBBfAYKP8e07ao4AAggggAACGQrkOviYKCDJjI8NOzGfgo/aOvcSxfpvDftp6M+7efdzB+Tcy13rcZ/61Kdk7NixDcp4/fXX5dlnn637ui6LrctjO1uUwUfvrHfOOVLNXOgOLer+GkTTP94AjDtUqfu5LdIJPupMmp/85CfrHB555BEZMWKE+XeiWSXDDi1+gT330uNatrNksi4P/ulPf7rudBo01Bkj3ZuGHr/97W/Xhef0e4mCjz//+c9l0aJF5tDrrrtOHnzwQeOqbW/SpEmdcdDgo5aTLFSoM31qmNDd5xr+y3SWNT9H53x67ekS106IVpdb11lHdTx1b7rcs9rt2rXLfLlz587y+OOPm1lXNfT4+9//vm5398yozhfV7sknnxQNr+qmptdff735+/e//31Zu3at+buGKTUI6Z6dtKqqyoRanVlRdb+wwcfdu3ebUKez6ZLyHTt2rHfdZtqOsNe6TfvbGnxUo2yFH6MOPWpdGyv4qOcOGn4MO7aE3d+m65q6IIAAAggggAACCCCAAAIIIIAAAggggAACCAQXIPgY3Io9EUAAAQQQQCDPBHIRfHTO4Z710WHUrxF8bHhR5VvwUftZw1MrVqyoa+xFF10ko0ePNrPEbd++XVauXCk7duyo+76G33SGPw2s6aYBLg1AHTlypG4fDVENGzbMhLk0ILV8+fK6gJfupGU8+uij9cJYUQYf9Rx/+tOfZNq0afU6MdWMlBok08CYLvHsbNoOnZ2wR48eph0aMnNmjtR9br/9djPro7OlE3zUY91Lc+u/dYZFZ9PQXCabX2CvoqJCdEZEJ6inQTmd/VL7V2eDdIJ1WgcNxarJyZMnRWeEXLBggel/95Yo+OiemVD31TCgXgNLliyRn/zkJ3UzZoYJPjrn1BlI1Utn7dRr1X0t6z7f/OY3pVu3bpkQmmP9HN0nWLdunXzve9+r+5LOUDlmzBgZMGCACVPpNbZ48eI6Ow1xarC0Q4cO5hgNvGp40v2aGj58uIwaNcrY6TL1S5curRdcdC8hrq83fV07m76mb775ZmndurVs3LjRLEvu9Lf7WnfP3OnXXm8wVsvR5dL37dtn/n/fffdl3I6MO60RC7A5+KgsUYcfsxF6dLrvD+97KuOeDDvjo56Q4GPG7BSAAAIIIIAAAggggAACCCCAAAIIIIAAAggUtADBx4LufhqPAAIIIIBAYQtEEXwMssy1KnuDj85xBB8bXoP5FnzUFmpw7Ze//GWDwFiiV6DOmKiz/I1FDgAAIABJREFUx2lQy70dPHjQBMOc2etSvXo1pKazQpaXl9fbLergo3dGOj3Zf//3f0vz5s2TVk/DYDo7oTv8mGznSZMmycMPP1wvHJNu8FHDlDr7n3dLtcR00BHSL8Cm5XiXvHZmD9SlijXA5yyHnuycuv/TTz9tvp0o+KjXmIbqvGE73T9s8FHDtHoO53ypHFItqx3Uz9kviKO7TO/MoMnOp6FItfGGMxPNppmsjE984hNmCXpn0zFcl/p2Zn1Mdpy+5n71q1+Zb4ed8VGP8c6S6pzn1ltvNcFH3TJpR9g+smn/qIKP3571hKw6sDorTYsq/JjN0KM2nOBjVrqfQhFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSyLEDwMcvAFI8AAggggAAC9goQfOwoo3qU1uugpTur5ck5Jxt02iMTywLvG6bHfzOldvY195aPwUdtn852qMtZv/LKK/VmmXParuGsm266yQTO9O+JNl0+V2dY1CWtvbMA6v4altRA1LXXXltvpkenrHSCj1rmD3/4w6TdqmFCZ4bGyZMny5QpU3wvAZ1tb/r06aYdiYJ62n5dEvzGG2+U4uLieuXNnDlTfve735mv6bn0nEG31atXiy4b7p7lT2eb1PBeJtvPfvYzM7ugbo899pj07t07YXHeJcvVVX1Pnz5twnHemRS1EA0h6nLYuoyylq1bouCjfl3DsX/84x8blKOBQueaSlXXz33uc+a66t69u1kSeuHChfLUU08lvNZ0xkGdnTKKmR4drKCOblydMfXFF180M54m2nRJ87vuuks0UJxoO3bsmLz00kui11WiTS00RJyonToj49SpU+Xll19ucKj2q77edAZKZzlzb/AxSHt1ls133nnHLGHvfs3rEva6lL2zZdKOTK79xjw2DsFHM04NfkDuGHR72lTZDj1qxbIZfNTyndmL9e/uWR6Z8THty4IDEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBPSZ85EjRy4ggQACCCCAAAIIFJpAmNCj2iTbP99mfNTg4z1PH2pwObzwYOKQZKJ9w1xL2/+t4fK4+Rp8dFw0ALlz507Rmf6OHz9ulsbVpXU1YJVqpkS3a01NjVkaW8vQJZF1ZkddwlfDXd6gYJj+yPW+GoDU4Jq2Q0Od2n610CXANeiXrU3DlkePHjXn69ixozVmGqTTa0Nn8NNAkIYe+/Tp02A5WD+XyspK0z4dn9Qz6HWVqFztoy1btpg+0vL0etUQoM5Wa9Om9duzZ495PehrTNvdpUsX079BNr0m9u7da9qpbe7atatpp7bXb9PXsS4/rf2mAS+16devX6TXldprYFf7VvvTWbLbW7dM2uHXTtu+H1XwcePBTfL4zG9mtXnphh9zEXrUhtsefNQ6ekOSfh0Wdn+/8vg+AggggAACCCCAAAIIIIAAAggggAACCCCAgH0CBB/t6xNqhAACCCCAAAI5ECD4KBImzBhm3zDdV4jBxzA+7IsAAgggkFggTsFHbUHY8GOuQo9DOw+RR6/5UsaXWdOiplJ1rNKU4w0dJpvxMdG+yQKLYYOMYffPGIACEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBHIuQPAx5+ScEAEEEEAAAQRsEAgTfEy1LzM+NpwdMkz/EnwMo8W+CCCAAAKOQFTBRy3v27OekFUHVmcdN2j4MVehR21w0Dr54RB89BPi+wgggAACCCCAAAIIIIAAAggggAACCCCAAAJRCxB8jFqU8hBAAAEEEEAgFgK5CD66z3H+/Pl6Ls73dEnUbC7p65y013f2NOiXMLM4htk3zAVA8DGMFvsigAACCDgCUQYfc7HctVNvv6BhLkOPWie/+gS94jT4WHn0jJntkRkfg6qxHwIIIIAAAggggAACCCCAAAIIIIAAAggggEAmAgQfM9HjWAQQQAABBBCIrQDBx+RLXS/aWdWgXz81oazB15burJZE+4a5KBKV+9FnDsuM7fXrcF2vZvKbKR3CFM2+CCCAAAJ5LBBl8FGZcjXro54rWdgw16FHrcsf3vdUJFcJwcdIGCkEAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIIQAwccQWOyKAAIIIIAAAvkjEEXwMVEZ7q/FccZHG3qY4KMNvUAdEEAAAbsFog4+5nLWR5X1hh8bI/QY1WyP2h6Cj3a/XqgdAggggAACCCCAAAIIIIAAAggggAACCCCQjwIEH/OxV2kTAggggAACCPgKEHxMPOOjL1wOdiD4mANkToEAAgjEXCDq4KNy5Dp86AQPc31ep+ujmu1Ry0sVfHQvf+1dBtvv305dvfv5Xb5h9/crj+8jgAACCCCAAAIIIIAAAggggAACCCCAAAII2CdA8NG+PqFGCCCAAAIIIJADgWwEH71lMuNjeh1J8DE9N45CAAEECkkgG8FH9ct1CHFo5yGy6sDqnHfd1yY9JgM69Y/svAQfI6OkIAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIGAAgQfA0KxGwIIIIAAAgjklwDBR2Z8zK8rmtYggAAChSWQreCjKuY6/JjrnotyiWun7gQfc92LnA8BBBBAAAEEEEAAAQQQQAABBBBAAAEEEECA4CPXAAIIIIAAAggUpIANwUetQ+fOnaWkpCTrfdDrO3sanOOFBzvKqB6lWT932BMw42NYMfZHAAEECk8gm8FH1czX8GPUMz06V17Q4KPu716GmqWuC++1S4sRQAABBBBAAAEEEEAAAQQQQAABBBBAAIGoBAg+RiVJOQgggAACCCAQKwGCj7HqLrmuVzP5zZQO8ao0tUUAAQQQyJpAtoOPTsXzJQCpS2rfO+ieSJe3dncuwcesXeoUjAACCCCAAAIIIIAAAggggAACCCCAAAIIIJBEgOAjlwYCCCCAAAIIFKQAwcd4dTvBx3j1F7VFAAEEsi2Qq+CjtkPDj7o9s+bZbDcr8vKzHXh0KkzwMfKuo0AEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABHwGCj1wiCCCAAAIIIFCQAgQf49XtBB/j1V/UFgEEEMi2QC6Dj+62bDy4STYc3FD3pTUH12a7qaHLH9xpkFza6dKsze6YqEIEH0N3EwcggAACCCCAAAIIIIAAAggggAACCCCAAAIIZChA8DFDQA5HAAEEEEAAgXgKEHyMV7995aoy+dSEsnhVmtoigAACCGRNoLGCj1lrUMwLJvgY8w6k+ggggAACCCCAAAIIIIAAAggggAACCCCAQAwFiubMmX9h3bqNMnBgfxkwoJ8UFxdb14xZs+bJ/v0H5Prrr5L27dtJUVGRVXU8c6ZS3nhjlqnTzTdfK6WlpVbVTyuzY8cuWbp0pVx8cVcZOXKYlJSUWFfHJUuWy7ZtO2Ts2FHSvXs3adKkiXV1fOWV6XLixCm5555bpFmzZtbV79ChIzJnzkJp06a1XHnlFdK0aVPr6qjjje1jzsyZc+XAgYNyww1XS7t2ba0dc3QovOkmO8ec7dt3ybJlK6V794tkxIihVo45ixcvl3ff1TFntKmnjWPOyy9Pl5Mn7R9zystby8SJto85A2TAgL717nNsCT62bl0mbdpkP9D3P3NPyn/OPmndz4UgFWK2xyBK7IMAAggUlgDBR7v6O1nwcfbseXLw4BHz3qq8vI15b+V+puN9vpPseU/Y50BB9z99ukLefHO2FBc3kcmTr7HyeY6+Z1m2bLX07HmxDB8+2Mr3VosWLRV9Dzhu3Bjp1q2rle+tpk59Q7S/7777Ziuf5xw4cEjmzVskbduWy4QJl1v5PGfNmvWyfv1mGTz4Munf/xIrnyG/9dY7cvDgYbnxxkl1Y45No+V7Y06xTJ58tcVjzirp1au7DBtm55izcOFS86x7/PjL5aKLulg75lRUVMhdd9k+5rSVCRPGWD3mDBlymfTrZ/eYc9NNk6RNm9r7HJu2U6dOy4wZ70hJSbG5F7Pxc6tt27bL8uWrpVevHjJs2CAr73MWLKgdc/Tns41jjj7f1GfIdo85B2XevMXms5bx4+0cc1avXicbNmyRoUMvk7597RxzZsyYbd5b6efQ+iyZMSf8iBunMWfixLHStWtn6+5znDHnzJkKufNOW+9znDGnnYwfP9rK+5zaMWezDB06SPr27W3leytnzLnllmulrMy+MefkydOi7/+aNi0xGSIb73O2bt0uK1aslt69e8rQoQOtvM+ZP3+J7Ny5W2wdc86fPy+az6msrJQ77rjJyuc5mmNTR82xjRtn55izatU62bhxs3mP36dPLyvHnDfffFsOHToq6Y45RdOnz7ywePEKGTFisIwYMcTKQeHFF1+XXbv2mAcFNr6x0IdWf/7zSyJyQe6//w5p3bpV+LutLB+xadNW0QDpJZd0lyuvHGfloDBz5hzzAPWaayaYB6g2hjN///u/yLFjx+VDH3ogJyGVsJfFvn0H5JVX3pR27drIzTdfJy1atAhbRNb3X7p0hSxevFJGjhwsw4fbOua8Jrt27bV+zNFnaffff7u0amXzmNPDhHBtDAo7Y86kSRPMA1Sbx5wPf/j9UlbWOuuvz7AncMac9u3LTQjX5jFn1Kgh5obO/ebHluBjy5atzAecudiW7qyWRTurcnGqyM5xeY9mMqqHfb9UElkDKQgBBBBAIC0Bgo9psWXtoGTBx6lTp8mePfvkrrtukS5dOpkPbWwKPuovGT333FTzwO/ee2+18r2VPid555355sMQ/cDdxvdW+oHIxo1b5LrrrjIPUG18b/XUU8+aXyrT5zk2vrfS18lrr82QDh3am9BeixbNs/Z6TbfgxYuXydKlq2TMmOEyZMhAK58h//Wvr8ju3fvkvvtul86dO1r3QfGJEyfl+edfNq+Re+7RMadlut2RtePWr98k77yzwDwn0XCKjWOOfiCyceNWE+S65JKe1o45p06dkg9+0M4xR18nr78+Qzp27GCC/zaOORqq1+D/mDEj/j7m2PcL/s6Yo89nO3XS+xy7go/OmKOBgLvvtnPMWbduk8yZw5iTyaCuzzd/97s/iwZdP/jB91l5n6OftUyb9pbVY46G6nXMufxye8ecF154Wfbs2W8+E7JzzDkhzz//igmZ3X33LVbe5+ikLDp5TP/+fUw4xcb7nOnTZ8mmTdvMz2cNS2l43aZNQ0j6Wfnp06fl4YdtHXP2yLRpM6VTp47mftHG+xxnzBk7dqT5xbLSUvvuc55/fqrs3XtA3ve+O4ylbWHr48dPyAsvvGLs9JmTje+t4jDm6ORumzdvM88hevfuYV0g7r0xp0Iefvh+K+9zdu6sHXO6dOko119v55ijv8ijv2xk85ijz2c1d5DumFO0b9++C5qQ1SBA8+b2PVTTH+ZnzpyRqqpqEyi08eGpvrHQ8OO5c+es/C0bNaypqRGdmVIf5ts48Gsdq6qqpLKyytyA2JjK1zpqP1dXV5uAim0/4LV+eg1WVJwx98D6erGxjvparqqyf8yprKw2Pzxte1OhfXv+/AXzG5yMOZm93YvLmKOvmXbtGHPS7W3tZ/1TWtpMmjevP1OvLcFHfVBl4wy96ZpzHAIIIIAAArkQIPiYC+Xg53CCj3qEeyZ1fd5UVVUj5eVldV+3KfioD1D1PXQc3lvpe9OWLe0LSWmf67McvefWX4Sy8QOb2uc5p821aPd7qwoRKbL4eU7y91bBR4vs7qnPkPV5jq6EYuOqRvEZc86YZ/A2jzk67rRsafeYU11dY+0z5LNnz5nPXOx+hlz7WYE+y7ExmKJ2eg+hz+1sH3POnz9n5UxN7s+tascc+yZxqL3PqTQ/Wxhz0v8ZXjvmxOM+R8cbW8ec2vucKvM5tL33ORWi9zs2zg733pij9zlNLR9zqsxn+bZ+ZqDvrey+zzn79/scu99bxeM+hzEn/Z9+Tj6HMScTQ+d5ThzGnKKiJmbstjOfE5f3VjrmtDEr84Tdio4cOWI+97dsFv4G7dBK2niRuCtqex1tr59aUsewL+HE+9vuyJhTKP3MuB1FT9v+eo7D2J1szLEl+Ni5c2crf7EjiuuXMhBAAAEEEMiWAMHHbMmmV26y4KNTmjsMaVPw0amf7ffcttcvDu8J4lLH9F6BHBWn57NxuBYZc6J5TcXBMZqWZq+UOBhSx8z7H8PMDePwsyWaVma3FK7FaHxtd7S9fnF5PcfBMZorOnulxMGQOmbe/xhmbhiXcTGalmavlHy/Fk3wMXt8lIwAAggggAACCNgpQPDRzn6hVggggAACCAQRIPgYRCl3+6QKPmrQMVnY0fsLrsl+4TXsL8KG3T93UpwJAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAICoBgo9RSVIOAggggAACCMRKgOBjrLqLyiKAAAIIIFBPgOCjXRcEwUe7+oPaIIAAAggggAACCCCAAAIIIIAAAggggAAChSBA8LEQepk2IoAAAggggEADAYKPXBQIIIAAAgjEV4Dgo119R/DRrv6gNggggAACCCCAAAIIIIAAAggggAACCCCAQCEIEHwshF6mjQgggAACCCDQQIDgIxcFAggggAAC8RUg+GhX3xF8tKs/qA0CCCCAAAIIIIAAAggggAACCCCAAAIIIFAIAgQfC6GXaSMCCCCAAAIINBAg+MhFgQACCCCAQHwFCD7a1XcEH+3qD2qDAAIIIIAAAggggAACCCCAAAIIIIAAAggUggDBx0LoZdqIAAIIIIAAAg0ECD5yUSCAAAIIIBBfAYKPdvWdE3wsKioS/ePevF9zfz/Rvola5t3Pr/Vh9/crj+8jgAACCCCAAAIIIIAAAggggAACCCCAAAII2CdA8NG+PqFGCCCAAAIIIJADAYKPOUDmFAgggAACCGRJgOBjlmDTLJbgY5pwHIYAAggggAACCCCAAAIIIIAAAggggAACCCCQtgDBx7TpOBABBBBAAAEE4ixA8DHOvUfdEUAAAQQKXYDgo11XAMFHu/qD2iCAAAIIIIAAAggggAACCCCAAAIIIIAAAoUgQPCxEHqZNiKAAAIIIIBAAwGCj1wUCCCAAAIIxFeA4KNdfUfw0a7+oDYIIIAAAggggAACCCCAAAIIIIAAAggggEAhCBB8LIRepo0IIIAAAggg0EDAluBjp06dpGnTpvQQAggggAACCIQQIPgYAisHuxJ8zAEyp0AAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCoJ0DwkQsCAQQQQAABBApSwJbgY/v27aV58+YF2Qc0GgEEEEAAgXQFCD6mK5ed40qlqZw5dkaKiorMH/fm/Zr7+4n2TVRD735+rQi7v195fB8BBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAPgGCj/b1CTVCAAEEEEAAgRwI2BB81GaWlZVJ69atc9BiToEAAggggED+CBB8tKsvm54rkcqTlQQf7eoWaoMAAggggAACCCCAAAIIIIAAAggggAACCOS1AMHHvO5eGocAAggggAACyQRsCT7qbI/t2rWjoxBAAAEEEEAghADBxxBYOdi1qLJIzlbWJAw+NmnSpK4GfjM8JpupMewMjmH3zwERp0AAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCIWIDgY8SgFIcAAggggAAC8RCIIvioLXWX4y3T/e/z5883gNHv6wfzXbt2jQcatUQAAQQQQMASAYKPlnSEiJQUlUjVsUqRCyLukKNTQ4KP9vQVNUEAAQQQQAABBBBAAAEEEEAAAQQQQAABBPJJgOBjPvUmbUEAAQQQQACBwAJhgo9aaLL9U4UdgwQfdZ/27duLzvzIhgACCCCAAALBBAg+BnPKxV6lF5rKmeNnzKkIPuZCnHMggAACCCCAAAIIIIAAAggggAACCCCAAAIIqADBR64DBBBAAAEEEChIAZuCjxp61PAjGwIIIIAAAggEEyD4GMwp23sVSZFI5QU5W3nWnIrgY7bFKR8BBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAESD4yLWAAAIIIIAAAgUrECb8mM6MjwqbatZH/Z7zp1OnTlJaWlqwfUHDEUAAAQQQCCNA8DGMVvb2LZWmUnG0QoqKiur+uM/mfL3uIVRRUb3K6Pe9+yeqrXe/VC0Ks2/2ZCgZAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAINsCBB+zLUz5CCCAAAIIIGCtgC3BRwVq1qyZdOjQwVorKoYAAggggIBNAgQfG783iouK5VzFWTlXdc5UxhtyTPS1VEHHVIHFMGHGMPs2viI1QAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEhXgOBjunIchwACCCCAAAKxF7Ah+KiIzqyP5eXl0rp169i70gAEEEAAAQSyLUDwMdvC/uWXnCuRyhNnTOBRt7DBx6CzPTpl+9eodg+Cj0Gl2A8BBBBAAAEEEEAAAQQQQAABBBBAAAEEEIi3AMHHePcftUcAAQQQQACBDARsCz5qU3TWx+bNm2fQKg5FAAEEEEAg/wUIPjZuH+sS16ePnpYi/S9F8LFJkyb1KuoOJRJ8bNw+5OwIIIAAAggggAACCCCAAAIIIIAAAggggEDcBQg+xr0HqT8CCCCAAAIIpC0QRfBRT+4ux1um+9/nz59vUFdntkdnPw0BdOzYUUpLS9NuFwcigAACCCCQ7wIEHxuvh0uLSuX00VMi5+vP8ugNOWoN3V/zCzqy1HXj9SlnRgABBBBAAAEEEEAAAQQQQAABBBBAAAEE4ihA8DGOvUadEUAAAQQQQCASgWwEH7ViyYKQqYKPznF6rIYE2rdvz8yPkfQyhSCAAAII5KMAwcfG6dWm0lQqjp02oUfdnGBjomWu3d/Xv6cbfAy7dHXY/RtHkrMigAACCCCAAAIIIIAAAggggAACCCCAAAIIZCpA8DFTQY5HAAEEEEAAgdgK5CL4qDhhZn10ZoDU49q2bStlZWWx9aXiCCCAAAIIZEuA4GO2ZBOXq2HC4nPFUnGsQm9sTIjRHXZMNNujNwxJ8DG3fcbZEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBfBcg+JjvPUz7EEAAAQQQQCCpQJjgoxaSbP9Uy1t7jwsy66Ozj5bbokULKS8vZ+lrrmMEEEAAAQRcAgQfc3c5lBY1laqKaqk6VVlvhkd32JHgY+76gzMhgAACCCCAAAIIIIAAAggggAACCCCAAAII1AoQfORKQAABBBBAAIGCFWiM4KN7RkcH3v015+9O3ZwQZKtWrUT/aBCSDQEEEEAAgUIXIPiY3StAZ2csuVAiNZXVUnmysvYBUopZHr3BR2d2R/csj8n+7rQk2RLVYZeuDrt/diUpHQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCBbAgQfsyVLuQgggAACCCBgvUBUwUdtqLusVDNAJgo+6vHumSDdMz56Q5HFxcUm/FhaWmr+lJSUmCACGwIIIIAAAoUkcKL6pJyVs4XU5Ky2tUiKpIkUyYVzF+RszTmprqiSczXn6u4xvKFH9zLW3iWttaJRLXPtlBW08dwTBZViPwQQQAABBBBAAAEEEEAAAQQQQAABBBBAIP4CBB/j34e0AAEEEEAAAQQyEAgTfky1r1/Y0V3FIMtduwOP7v3dM0I65/TOEukXwkzUjjBtC8IdxjVIeeyDAAIIIIBAPghkEsxLdaz3e8nCiG5D9z7OjI2JAo56TJjZHt37O+dLVD9vXRL1b1ivsPvnwzVFGxBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgUIVIPhYqD1PuxFAAAEEEEDACIQJ6IUJB6YKQiYKPmpdgs766NQ7VfAxaF1ThSTdl0gYJy4tBBBAAAEEEIhWIEjo0W8paadG3tka9evupardIUj9XtjZHt3HuM/pFvELQiY7zk+V4KOfEN9HAAEEEEAAAQQQQAABBBBAAAEEEEAAAQTyR4DgY/70JS1BAAEEEEAAgTQEwgT6goYJtRp+M0Amm3XRG2Z0ynJ/3f01dzmJZoZ0SIK2028/v++n0QUcggACCCCAAAIJBIKE+ILso0U7+yX6v/triUKRenyyMKS7bKcJfrNNJqpzsnYEbZ/73FxMCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAoUhQPCxMPqZViKAAAIIIIBAEoGwQb5k+6cKOuqpE30/2THuZa6dfbyhRqc5ztfdx3i/l+j8yb4W9EIJ6xa0XPZDAAEEEECgkAXCBv0SWaWaTdEbSvQGHRPNGhk29Kh1SjX7ZNDZHr3l+F0XUdj5nYPvI4AAAggggAACCCCAAAIIIIAAAggggAACCNgjQPDRnr6gJggggAACCCDQSAJhQnxBg4/alChmfXSXkygQ6Xw/0YyQfnVwuMO0qZG6iNMigAACCCCAgCdQ6AUJMpOiX/DRKdMvEOmdOdJ9nP49iuBj2CBj2P25oBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQTiLUDwMd79R+0RQAABBBBAIAKBKIKPWo0wsz56g4pOMxItde2Unep7boZkAUnvObzHpKIMYxRBl1AEAggggAACCLgEUoX6gi4TnWy5az2NN6joXf7aqUqypbDdZaQKPXrPlejf7nOFuQgIPobRYl8EEEAAAQQQQAABBBBAAAEEEEAAAQQQQCD+AgQf49+HtAABBBBAAAEEMhQIE+pLtW+Y4KNW2S/8qPt4l7hONrOjm8C77HWiOqc7y2MYqwy7hcMRQAABBBAoeIF0Ao+K5rectHfmR/cxyQKSiWaLdDooWdjRrx6J6pqozCAXAsHHIErsgwACCCCAAAIIIIAAAggggAACCCCAAAII5I8Awcf86UtaggACCCCAAAJpCoQN84UJDfotd+1UOdV+3vCjHuMNNzpfC1Kee58wQc5UvGEN0+wqDkMAAQQQQCAvBcKE9vxmePRb8jpVGDFZuDFo6FE7J4plrr3lBOn0MIZBymMfBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAbsFCD7a3T/UDgHihqebAAAgAElEQVQEEEAAAQRyIBA2tBcmLBg0+KjNdO/r/Xuy2SEThSK9ZMmOdfbza7/f93PQRZwCAQTSFPjxj38sv/71r+VLX/qSPPTQQ2mWwmEIIGCDgF+wL1Xg0S8sqe0LGnp09nVMgs726D0u0b8TlRnU3s8naDnshwACCCCAAAIIIIAAAggggAACCCCAAAIIIBAPAYKP8egnaokAAggggAACWRQIG+zLJPiozUgVcHQ3M9F+fjM9uoOQycjCzFiZqIywXlnsOopGAAEfgTFjxsjRo0dl9OjR8uyzz+KFAAIxEggS5Et3KWxlaNKkST0Nb4AxSKDRb/ZI9wn8ZqL029ev64J4+ZXB9xFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQTiI1C0a9fuC2fOVEqLFs3NHxsfFJ84cUKqq89KeXmZNG3a1DpdDRgcO3bC1Ktt2zYNPjywocJVVVVy6lSFlJY2ldatW1nZz6dOnRatp9avtLTUyjoePXpczp07K+3bt7Oyn2tqauTkyVNSXFwsbdqUWWlYUXFGGHMyGxUYczLzc47WMaeyskrKymwec47JuXPnGHMy6PKgY046Qb4w4cFUsz6eOHFSamrOmnG7pKQ4aShSGZLNAun9XpB/O6x+7dAxR+uom9bRG5Lwdk86lhl0sTm0urpGtK/1PsfW+9nTp89IdXW1tGrVUpo2LbHyZ7Tec587d17ati23sn5nz56V06crzDVo6/2s/lzRP82bN5Nmzey4n33qqd/J1KlT5YMf/KDcccft5l7x7NlzUlbW2ow5tm3nz1+QkydPSlGRSFmZnfez+lquqKi0fMypEH1voGNOSQljTjrXuf5srqjI7piT7vMX5zh9X1VZWSktW7Yw76FT/YxOFVBUH+f73v+7v+f9e5DvHT9+0ryHLi9vY65F95ZJaDNRn6bjqffaWkcdc2z9+ec8z9GfK/qaTqed6bwGwhyjP1t0bLT7eY6+tzov7du39b2fDdP2qPbV+9lTp06Zn822/vxz3lvpmKP3OjZei8ePnzDvrfT5rHfMiaqvMimHMScTvfeO1TGnqqo6Bs9zGHMy6fHaMeeMtGzZkjEnTcj3xpwiMy7aOG7r+2d9n2/7fQ5jTpoX4d8Pi8N9jl6Hte+tGHPS7W3GnHTl6h/33n1OazM22rgdPWr7e6tq0c//bH5vVTvmVJnnOby3Su8qZ8xJz817lO1jzoULIseOxWXMKTGfudh4z10IY07RrFlzLqxcuUaGDBkogwdfauVDoTfemCm7d++XW2+9Tjp16mjdxaJvwKdOnSb6wrvrrpulWbNm0Yw0EZaybdt2mT9/ifTocbFcccUoKwOk8+Ytlk2btsmVV46VXr26m/Cebdvzz0+VY8dOygc+cLe0aNHCturJgQOH5K233jEfKl177ZVW3hSvWrVOVqxYI8OGDZRBg+wcc6ZNmyl79uiYc7106tTBujFHH/y9/PI0c/3deaedY87WrdtlwYIl0rNndxk7dqSVY87cuYtk8+ZtctVVV5h62jzmPPjgPdK8eXPGnDQEVq5cK/pn2LBBMmjQgJT3OWEDe5nM+ug+9o03ZsnevTrm3GA+iPVuyWaH1P28S1inClg65YZtp94Qv/76TGnSpEhuvvlaS+9zdsjixSukZ8+LZdSooVaOOQsWLBUdGydMGCPdu3ezcsz5299eM6G4+++/3doxZ/bs+dKuXZlMnDjeyvuc1avXy5o1G2TIkMvk0kv7WvHeau7cxXXDil5/M2bomHPIvJ71l3lseyOuY47ei2mAy9YxR1/LS5astHrMmT9/sWzbtlMmTLj872NO/Zn90viRGvkhto85+/cflHfeWSDt29eOORosbMwt0WtV31utXbvRjDmXXdYv4c+WILMsJlviWtub7oyOznGvv/6WqOXtt09uMOYEWX47qHm6Y5l+GPLKK9OluLiJ3H77jVbe52zZ8q7ofcQll/SUMWOGW3mfM2fOQtF6XnPNOOnRo7uVwcK//OUlc5/zgQ/Y+d5q//4D8tZbc837gUmTJjT6mJPotbd8+RpZvXqdjBgxWC67rL8V9zneer722gzZt++A3HHHjVbe5zhjjn4Ie9ttk60cc/Q5ycKFy6RPn54yerSdY47+fNb7sWuuGW+edfv9cl7QnyVR7qdjjvb3lCl3W/neSl8nM2fOlQ4d2so119g65qwWfX81cuQQufTSflaOOa++OkN0/L7zzpukXbu21r230p97r776pgl+2Drm6OdBixbpmNNLRo8eZuV9jj6H2LZth7Vjjj7ne+65qWbikylT7rJyzNm794DMmmX3mLNs2SrzPMfuMedN897K1jFHf3Fe78X0F771ObeNn5Vv2rRVFi1aLn379pJRo+wec/Q9gT5Dtu0+R39h+fnnXxL9Jf/3v9/WMWe/zJo1Tzp2bCdXX934z3MS3WM6Y86oUUNkwAB9b2VfJkKflWjuQLMvNv6iqDPmlJaWyC232DnmbNy4VRYv1jGnt7WfW7399jx5992dMmnSROne/SILx5zzovkcnYTggQfutPI+RzMlb78912RKrrpqnJXPc5YuXWmeIY8ePVT699f3VvaNOS+/PF0OHkx/zCmaO3fhhQ0bNkq/fn1kwIC+1r6xOHDgoFx11Xjp2LG9dS84TeS/+ebbJnxw/fVXm5mGbNt27twt+kP0oos6y7BhQ6z8oFhfcNu375SRI4eah1Y2hpD0pl1noLn11slmpgXbtsOHj4qGuXQGu/Hjx1j5xmL9+k2yYcMm6d+/j/Tvb/eYozfEHTrYN+bozC4zZsw2l991111l5ZizY0ftmNOtWxcZNmyw1WOOhqS6d2fMSWc8O3z4iGigxvYxR8cdvcfRcSfVzNFhA4Fhgo/ukKI3gKgf2hw8eNjcEOsHnfrBfaoAo1+40fv9REtfh2mr/razvvnRTe/FvPc5YcpK5zoLcszu3Xtk5cp10rVrZ/OLPI0dTklU5xUrVouOjXqfc9FFXay8z9H72ZMnT5uwmY1h6yNHjpkPYXXM0eCHnQ9Qt4g+0NDxRgMqNsxWP2/ekrpLcvz40eZe8dChw3LllVdI27Y661VRkJdZzvapHXPmm9nXdMzR33q2bdu1a49o4MzmMWf58lWyc+ceq8ec6dNnmQ/n7B1zjsrChctzOuaEDe9t3LhF9IMbHXP69Old76FVssBj0JkfMwk9uo+dPXueHDx4xAS52rXTlRPeG3NsCD7qL5XpLw9qXfSXB218nqPPSTRw1r17Vxk6dJCV9zlLliyX7dt3yejRI8yDchuf52jw4/Tp0+ZDWJ3Jx7ZN3w/oLyzrDOvjxo2y8j5n3boNsmHDFhNA0ufI+qG2bZuGKvRBub6ebf0Fj5kz55hn2/rBkt1jzkUydOhAK8cc/fBwx45dMmbMSLn44q6WjzmTzUw+tm36IbaG6nWFLZ0kwcb3VmvXbhC919Ggdd++l1g55mh49NAhHXOuqnueY1Nf6y+V6ZijP5c14GrjmPPuuztkxYq10qPHRWZyFhuf52gwUz9fu/zykdKtm31jjj6X08+tdLZ6DX7YO+YskfLycrniipGWjjnrzfMcu8ecOeZ5jn4mZGPYWgP/ei+mY46+/7Px2eJ7Y0438wuEjDnhf2rFY8w5aO5zNKynE7PYeJ+zZs168zxn4MABJhRn42z1eg9RO+ZcLe3a2bdCVDzGnO2yYsU66dHD3jFn4cKlos+6x44dZeXnVvr55muvvSWVlWfk5puvt/Q+p3bM0Z/Nl18+gjEn/I8Wc8TMme/IoUNHTN4unbB10cGDBy/oEiD62+02fDCXyEGX6NKl7fQHk22/2eAEGbSOumSOjW8etY46KGgdRYqsDCBpHbWP9VrUZSptfEiuddSlm3RpQBvfPNbvZ7FyUNU66tTTcRhztI76QTtjTno/nRhz0nPzHlU75py18oO5fB1zwob4woQfkwUWa3/+1Zhx2x0ACDLTo3PNhA1DJjvOew065Wod9eefjQEk515MXy86Ztt6P1v786/GdynSaEaP9Epx7rltnNXafc+ts6zbutSL/vzTa7GkpKk1vzWns6o7m/5ijHPPbesyKjruaB31NWPjQ3LGnPTGl1Tv8+0fcy5Y9d7Kfa+QaMwJEyZMdzZH7U/3sanKeW/MaR4o9OgtO8gVFzYw6i5Txxzbn+foeKiO2k4bP5jjeU6Qq9R/n/feQ9v+PKdGiotLrAwgqbK+nvV5jj6fzWRs8O+x9PbQMUeX+9T+tvUZMmNOen2b+HkOz5Az0XTeQzPmpK/ojDkXLpy39r0VY076/es+ks+tMndkzMnckDEnc0PeW0VjGKf3Vhp4tDH0GJ/3VtVmEhFbnyFznxPNa5r7nMwdnfucfB5zio4cOXIhcypKQAABBBBAAAEE4i+QzeCj6oQNKEY582Oi87t7LEzbw+wb/6uCFiAQbwFv8DHeraH2CBSWQJjQTlSBRxVOFWr0fj+d/ROV4fRsmDZnckxhXUm0FgEEEEAAAQQQQAABBBBAAAEEEEAAAQQQyE8Bgo/52a+0CgEEEEAAAQTSEEgn0JfJrI9axTDhRm+TvMtYJ6pLsvr5tdXv+2F4oywrzHnZFwEERLxLXWOCAAKNJ5BOqC9Zbf3KChqE9O7nN+N9mKWyte7JltxOt12JjvOzaLwe58wIIIAAAggggAACCCCAAAIIIIAAAggggAAC2RQg+JhNXcpGAAEEEEAAgdgJhA3phQk+Kkamsz56QRMtiR1lANI5X1iX2HU8FUYgTwXmz19S17Jx40bnaStpFgL5LRA02Jdqv2QBRL/ZHd2yYUOPemy2g49BbfL7CqF1CCCAAAIIIIAAAggggAACCCCAAAIIIIBAYQoQfCzMfqfVCCCAAAIIIJBEIJ2AX5hZFYOEEv3CkYmqHuQYv7b5fd/vosn0eL/y+T4CCIQXYKnr8GYcgUCuBKIK7YUNPGr7/EKMXgO//YMGHNOpa6r+iMowV33OeRBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSiEyD4GJ0lJSGAAAIIIIBAHgikE97LdNZHZfMLLgapV5BQpdNF6ZaXB11MExAoKIG5cxfVtXfChMsLqu00FoF8FQgS9gu61LUa+ZXnF3pMVEbQIKS7j/zqkag/0zkmX68L2oUAAggggAACCCCAAAIIIIAAAggggAACCBSaAMHHQutx2osAAggggAACKQWCBAITFWBz+FHrG7Z+fpdJuk5+5fJ9BBCIVoDgY7SelIZArgTSCfSFnU0xyDnSCT2qUdjgY5C6eO3TOSZX/cd5EEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBLIvQPAx+8acAQEEEEAAAQRiJpBOqC9ssDDo8th+M0Emow1afqLj02l/zLqY6iJQMAJz5y6sa+uECWMLpt00FIFCEPAL/oWZ9TGRV65Cj3puv7YEqV8h9DltRAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEHhPgOAjVwMCCCCAAAIIIOARSDf4F0X4Mehy1WHqmEkI0k0T5pxcVAggYIcAMz7a0Q/UAoFMBMKGAjMNPGpdoww9JirP7RG2fc6x6R6XSV9wLAIIIIAAAggggAACCCCAAAIIIIAAAggggIA9AgQf7ekLaoIAAggggAAClgikG/ALG3zU5mYSdAxbT7/9/b6fSfdks+xM6sWxCOS7AMHHfO9h2hcXgWyG9PzK9vu+1zDd0KOWE3aJ62THBOnXsO0KUib7IIAAAggggAACCCCAAAIIIIAAAggggAACCMRHgOBjfPqKmiKAAAIIIIBAjgQyCelFFX7UpmYSivSjCtrGoPv5nY/vI4BA4wgQfGwcd86KQDYEwgT9wuzr1DVoaDHMjJJ+9fD7firHTI7NRv9QJgIIIIAAAggggAACCCCAAAIIIIAAAggggEBuBQg+5tabsyGAAAIIIIBATATSDfz5HRd22elshh/dXeFX76DdFlU5Qc/HfgggkFqA4CNXCAJ2C0QV3suknCiCjEFDk97eSLfe6R5n99VA7RBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQTCCBB8DKPFvggggAACCCBQMAKZBPj8js1W+FE7x+/cYTowyrLCnJd9EUAgOoE5cxbWFTZx4tjoCqYkBBBoNIEoQ39hAothApKK41dPv+8nA073uEbrME6MAAIIIIAAAggggAACCCCAAAIIIIAAAgggkBUBgo9ZYaVQBBBAAAEEEMgHgUyCf37HRhF+VOOw5UTVL37ti+o8lIMAApkJODM+tm7dSoYNG5RZYRyNAAJZFchVoC/KAGPYshzAdNua7nFZ7TgKRwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEGgUAYKPjcLOSRFAAAEEEEAgDgKZhPuCHJtOaDHKY+LQB9QRAQQyE3BmfCwr0+Dj4MwK42gEEIi1QKrQYNgAYzplOXiZhBczOTbWnUflEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBoIEHzkokAAAQQQQAABBFIIBAkwJjs8yLFRBxn9zun3fS4GBBDILwGCj/nVn7QGgTACQUKCYQOPen5Cj2F6gX0RQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEsiVA8DFbspSLAAIIIIAAAnkhkGlQMMjxqfZJ93uK73duv+/nRQfSCAQKXGDu3IVGoHapa2Z8LPDLgebnuUCQoKMSZCu46Hd+v++n6p5Mjs3zbqd5CCCAAAIIIIAAAggggAACCCCAAAIIIIBAwQoQfCzYrqfhCCCAAAIIIBBUINOAYJDjbQk4BqlrUDf2QwCBxheYO3eRqYQudT106KDGrxA1QACBSATSCQJmEnjUSmfzeD+UdNrrVybfRwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEIi3AMHHePcftUcAAQQQQACBHAlkEggMeqzffpl+X6n8ygjCGUUZQc7DPgggkLkAwcfMDSkBgVwLRBXyC1qO336Zfl/9/MpIZZzJsbnuO86HAAIIIIAAAggggAACCCCAAAIIIIAAAgggkDsBgo+5s+ZMCCCAAAIIIBBzgUwDf0GO99vH7/sOcdT7xbzrqD4CBSuwcOEyqampMTM+stR1wV4GNLwABMKGA4Ps77eP3/cd9qD7JeumTI8vgO6niQgggAACCCCAAAIIIIAAAggggAACCCCAQEEKEHwsyG6n0QgggAACCCCQrkDQQGGy8oMe77ef3/fd5w+zbxTHpWvLcQggEL3AokXL/x58bClDhw6O/gSUiAACORXIJAQY9Ngg+wXZR2GC7pcMMdPjc9o5nAwBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgpwIEH3PKzckQQAABBBBAIB8E0g0SOm0Pc7zfvn7f93qH3T/T/sr1+TKtL8cjkG8C7wUfW8nQoYPyrXm0B4HYCeQyyBf2XEH2D7KP0ylh9k3UkZkeH7uLgwojgAACCCCAAAIIIIAAAggggAACCCCAAAIIhBIg+BiKi50RQAABBBBAAAGRKMJ8YcoIum/Q/dx9mM4xXAMIIBAfAZa6jk9fUVMEMhHIJCQY9Nig+2k7wuybqN2ZHp+JJccigAACCCCAAAIIIIAAAggggAACCCCAAAIIxEOA4GM8+olaIoAAAggggIBlAlEEBsOWEXT/oPv5kUZVjt95+D4CCGRPgOBj9mwpGYHGEIgqEBimnDD7qknY/b2OmR7fGP3CORFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQRyL0DwMffmnBEBBBBAAAEE8kAgylBg2LLC7B9m31x0i231yUWbOQcCjSmgS11XV9dImzYsdd2Y/cC5C1fAphBf2Lpke/9EV0XYcxbulUXLEUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBAg+cg0ggAACCCCAAAJpCkQZ4kunrHSO0aame1yaTByGAAKNKKAzPjrBx2HDBjdiTTg1AgjkWiDdEGE6x6VzjNcjijJybcz5EEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBpPgOBj49lzZgQQQAABBBDIA4GoQ4Tplpfuce4uiKKMPOhSmoBAXgkQfMyr7qQxCCQUiCIwmG4Z6R7nbUhU5XCJIIAAAggggAACCCCAAAIIIIAAAggggAACCBSOAMHHwulrWooAAggggAACWRKIOjCYSXmZHBuGJ1fnCVMn9kUAgYYCBB+5KhCIp0AugoCZniPT452eiaqcePY0tUYAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBIV4DgY7pyHIcAAggggAACCLgEshEEzLTMTI+ngxFAIP4CCxYsk5qaaikrayXDhw+Jf4NoAQIIZCQQRcgwijLcjYi6vIyAOBgBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgNgIEH2PTVVQUAQQQQAABBGwXyEbQMOoyoy7P9j6hfggUuoAz42Nt8HFwoXPQfgQKTiDKUGGUZTkdkY0yC66TaTACCCCAAAIIIIAAAggggAACCCCAAAIIIFCgAgQfC7TjaTYCCCCAAAIIZEcgW8HCbJXrKGS7/OxoUyoCCPgJEHz0E+L7COSHQDYDhNkoOxtl5kdP0goEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBoAIEH4NKsR8CCCCAAAIIIBBQINshwmyXH7CZYks9gtaX/RAoRAGCj4XY67Q57gI2hAKzWYdslh33vqf+CCCAAAIIIIAAAggggAACCCCAAAIIIIAAAsEFCD4Gt2JPBBBAAAEEEEAgsEAuQoG5OEfgBrMjAghYKbBgwVKprq4RXep6xIghVtaRSiGAgB0CuQgk5uIcdmhSCwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIFsCxB8zLYw5SOAAAIIIIBAwQrkMpiYy3MVbIfScARiKEDwMYadRpURyLFArsKIuTpPjvk4HQIIIIAAAggggAACCCCAAAIIIIAAAggggEAjCRB8bCR4TosAAggggAAChSHQWIHExjpvYfQqrUQgPgILFiyTmppqKStrLcOHD45PxakpAghkVSDXIcRcny+reBSOAAIIIIAAAggggAACCCCAAAIIIIAAAgggYIUAwUcruoFKIIAAAggggEC+C9gSRLSlHvne37QPAVsEFi5cVrfUNcFHW3qFeiCQOwEbAoc21CF34pwJAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIFcCBB9zJc15EEAAAQQQQACBvwvYHD60uW5cQAggEF6A4GN4M45AIE4CNocKba5bnPqYuiKAAAIIIIAAAggggAACCCCAAAIIIIAAAggkFiD4yJWBAAIIIIAAAgg0kkA+hAzzoQ2N1P2cFoGcCBB8zAkzJ0EgMoF8CAvmQxsi61AKQgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEMiaQNH69RsvHDhwUDp37igdO3aQJk2aZO1k6Ra8bdt2OXnytPTr11tatGghtj1Er6mpkc2bt5nmDRjQV4qLi9NtataOO3bsuOzevVfatCmTiy++yMp+3rVrjxw9elx69Ogm5eVtrOtn7Zz16zdJVVW1DB58qZSUlGStv9It+PTpCtm+fac0a9ZMevfuYeW1qOPNgQOHrB5ztm7dLqdO6ZhzibRo0dy6a7G6uka2bInLmNNGLr64q+VjzsVSXl5mXT8z5qQ7EtY/Lk5jTt++ep9j75hTVCTSv7+d9zl6/7Bnz15z/9Ctm51jzs6de0Tvx3r2vNjcj9l2PxunMad58+bSq1d3i+9zDkuXLh2lQ4f2Vvz8W7Bgad1S1yNGDBHnPqd//0tELW27Fqurq2XLlndNvfr372NlPzPmRPMz2vb3Vvp+YMeOXeZ1YuuYs3//QTl40J4xJ9F4oq9nfZ9q+5gjUiQDBtg65hyTPXv2SXl5uXTr1sWKny3eUWDnzt1y7NgJq+9z1q3bJPr8btCgAVY+z3HGHH0/0LOnnfc5+/cfMGNO586dpGNHO+5zvNfie2NOH2nevJl19zn6THHrVtvvc2rHnLZty+Wii+wcc3bs2C3Hj5+QXr0ulrIyO99bMeZkfr/ojDldunSy5r0VY07m/eot4ciRY7J3L2NOJrL6C8n63qqm5iz3ORlA7tt3QA4d0vdWnaVDh3ZW3nM79zn6vkXfp9q2VVVVmWdORUVNzPs/Gz8rP3LkqOzdu5/7nAwunjiMOSdPnhJ9j2rze6s4jDmbN78rFRUVJvui761s25wxR7NN+nm+zWNOu3ZtpWvXzlb+bNFnn8ePnzTPPsvKWlv3HtoZc86ePScDB/a38nlO7ZizS1q2bCk9elxs5bXojDl6HernVrZ9JqTji+btKirOpD3mFL311uwLy5atlmHDBsrQoQOladOmto1b8uqrM0xo7/bbJ5uwlG3hTO2Av/3tVblwQeSee241P0ht27Zs2S5z5iwwg9a4cWOkWbNS26oo77yzQDZu3CpXXXWF9OnTy8pB4dln/2Ye5j/00L3SqlUr6ww14DNt2ixp27aNTJ58jQlA2rYtX75Gli9fJcOGDZahQy+zdMx5U3bv3id33DFZOnWyb8zRDw5ffPE10Q/n7r77FkvHnHdlzpyFJoA7btxoKS21b8yZPXuBbNq0Va6+epxccklPq8echx++z9ww2bY5Y067duVyww1XWzrmrJbly1fL8OGDZcgQO8ecV15503ywdOedk6VDB/t+CUXHnL/97TVp0qRI7rrLzjFHb4jnzl0kvXv3lHHjRlk65sw3Y84110yw9pcT/vSnv8nx48fl4Yfvt3LM0YDPG2/Mkvbt28r1119l5ZizbNkqWbFijWjAUH9Rxob3Vt7g48svTzcfLN15503SsaPe5xRZ9eNFgx8vvvi6FBc3kTvvvNnK+xxnzNH7hyuusHPMefvteeZhgc1jzjPP/FVOnDgptt7n6AOh6dPfjtGYo/c5jffLeckemL388hvmgyV9Pdf+si1jTthBV+8f5s1bLJIu+DgAACAASURBVJdc0kuuuGKklfc5zpgzadJEa4PCOubow2h9nmPjeytnzNEP2q+77kor73OWLl0pK1eulZEjh8qgQXqf03hjTrLX0dSpb8i+ffvlrrtqxxzbHubrNfjSS9OkpKTY3IvZGFrQZ7Pz5y82z2bHjrVzzJk1a675RZlrr51obVBYx5xTp07Jgw/aOeboz+Y335xtPvS67rqJVo85o0YNk4EDB1g65kwTHb/1WYkGwhlzwt7liGzcuEXmz18i+svAl18+wsr7nJkz55rQuq1jjgYC9HmOvpeuHXNahO+ILB+xZ89+mTFjtnmdqKONn1stWbJCVq1aJzrm2PqLMnoPoaFw/UzIxtCCvr/Xe7GmTYvljjvsvM/ZsGGz6HMyu8ecOSZAqu8JbAzQnD9/QfSz8tOnT8sHPmDrmLNPZsx4Rzp16iCTJk2weswZPXq4tWGul156XfbtOyj33nurtG/fzrr7HGfMKS0tkdtvv9HK91bOmKPBzDFjhlt6nzNHNEd0ww1XSvfu9oX2zp8/b8YczWNNmXKPlfc5mmN76605Vo85ixcvl1Wr1svllw+Xyy6zM0CqnwnpZ4DpjjlFq1evvaCdoelOnSHHxjS0/raShs0GDepv5W9x6qwka9ZsEH3haXjUxoDP4cNH5N13d5gZAjSIZONshTqzp/7muIYWbP3Ncf0gWwMgo0cPs/JGSR+gbtq0xdRNfwPDhg/bve+ZNdxTO+Z0MbNV2D3mDLDytxucMefChfMyZIidY86hQ0dk+/Z4jDkaWrBlVi7v64UxJ/Onbs6YozNV6J84jDmZtzraEuI05rRt29Z82G7nfc4OM+OxfoBo62+OO2OOrW/Cnfsc/Q1TnX3Uxvsc/cUJnX3UpjHHG3xcv36jHDt2UgYPHiCtW9v3W5zOmCNyQQYPvszK91Y6C4TOsm7zmKMPyfW9lc1jjv5igj60snfMOWkC63aPOXvNL0805pjj9wH/unUbzW+Oaxi8detW1j0o1xkC1q7dKPqBsf6SjI3Pc/S1rGOOzhBg632OM+b07dvLfCBi2y8s692xjjlnzlTKqFFDrXyeox/abN6sY05zM+Oxnfc5OubslW7dLjLPkW18bxWfMUdkyJBLrR5z9LWss9Xb+N7qvTGnt/kFBcac8M8AnDFHJ3Do18/OMWfXrr2yd6/tY86Gv9/nXGb1fY5eIbXvreyb9MS5z7F5zNGgtT7r1qCUjWOO3sfq85zKykoZOVI/t7JvAoITJ06YX8yze8zZY35JtFu3btK1aydL73Nqxxz9TKhVq5bWvbeqrKwSvRezeczRZ7M6u1kcxhxdBVPfA9p2n/PemFNlfiHK7jGnhZkJ0Mb3VroKpo45F1/cTXR2azvfW20wz5A1+2LrmLN27QYzy6w+c7LxPscZc/TzIA0y2/jeyrnP0deKTnJj65ijz+9GjIjDmKPvrez7JVFdkW7fPrvHnLVr18vx46fSHnOKDh06dOHcuXPmIrZxUNUbJK2f/tEfTH4P1cO/zY/miLNnz5rgo40PybWFWjc1VD8bB1Wnjuqo9bNtUHWuEq2fOtr4G2laR73h1Prp/228kXNfi7aPOTplst4k2TjmOP1s85ijddTXi81jjvOzhTEn/Z+DcRlz9PWsM4fF8T5HjRt7i8OYE4f7nLiMOfb/bDmndzzc54QYGLzBR9vfWzHmhOjcFLsy5mTuWHs/y5iTSDLMeyS9Fnlvldn1yH1OZn5xep4ThzEnDu+tGHMye80w5mTmx5gTjd97nxXY/zwnHmPOBSvDAHH53Cou763OnTtvZQDJ+dxKPyvQFaxsDAMw5kQzdr/3PIcxJxNRxpxM9GqPfe95jjDmZMAZn+c5jDkZdHNdDsv+z8ptv8/RZ8iMOZlei5m8tyo6cuRI43+inokAxyKAAAIIIIAAAnkuYEMAMs+JaR4CeSvgDT7mbUNpGAIIZF0gTOAx65XhBAgggAACCCCAAAIIIIAAAggggAACCCCAAAIFL0DwseAvAQAQQAABBBBAIC4CBCDj0lPUEwF7BAg+2tMX1ASBuAoQeIxrz1FvBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgvwUIPuZ3/9I6BBBAAAEEEMhzAcKQed7BNA+BDAUIPmYIyOEIFJgAIccC63CaiwACCCCAAAIIIIAAAggggAACCCCAAAIIxFiA4GOMO4+qI4AAAggggAACbgFCkFwPCCDgFViwYJlUV1dLmzatZfjwwQAhgAACDQQIO3JRIIAAAggggAACCCCAAAIIIIAAAggggAACCMRRgOBjHHuNOiOAAAIIIIAAAiEFCEWGBGN3BPJEYOFCDT7WSFlZK4KPedKnNAOBdAQIN6ajxjEIIIAAAggggAACCCCAAAIIIIAAAggggAACNgsQfLS5d6gbAggggAACCCCQJQGCkFmCpVgELBMg+GhZh1AdBHIkQNAxR9CcBgEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQKDRBAg+Nho9J0YAAQQQQAABBOItQHgy3v1H7QtDgOBjYfQzrcwvAUKL+dWftAYBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgOwIEH7PjSqkIIIAAAggggEDBCRCELLgup8ExECD4GINOoooFL0DQseAvAQAQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE0hAg+JgGGocggAACCCCAAAIIIIAAAnEQWLBgqVRX10hZWSsZMWJIHKpMHRFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBDwFSD46EvEDggggAACCCCAAAIIIIBAPAUIPsaz36g1AggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAagGCj1whCCCAAAIIIIAAAggggECeChB8zNOOpVkIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQIELEHws8AuA5iOAAAIIIIAAAggggED+ChB8zN++pWUIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQCELEHws5N6n7QgggAACCCCAAAIIIJDXAgQf87p7aRwCCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggULACBB8LtutpOAIIIIAAAggggAACCOS7AMHHfO9h2ocAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIFKYAwcfC7HdajQACCCCAAAIIIIAAAgUgQPCxADqZJiKAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACBShA8LEAO50mI4AAAggggAACCCCAQGEIEHwsjH6mlQgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAoQkQfCy0Hqe9CCCAAAIIIIAAAgggUDACBB8LpqtpKAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCBQUAIEHwuqu2ksAggggAACCCCAAAIIFJIAwcdC6m3aigACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggUjgDBx8Lpa1qKAAIIIIAAAggggAACBSZA8LHAOpzmIoAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIFIkDwsUA6mmYigAACCCCAAAIIIIBA4QkQfCy8PqfFCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggEAhCBB8LIRepo0IIIAAAggggAACCCBQkAIEHwuy22k0AggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIJD3AgQf876LaSACCCCAAAIIIIAAAggUqgDBx0LtedqNAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCOS3AMHH/O5fWocAAggggAACCCCAAAIFLEDwsYA7n6YjgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAnksQPAxjzuXpiGAAAIIIIAAAggggEBhCxB8LOz+p/UIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQL4KEHzM156lXQgggAACCCCAAAIIIFDwAgQfC/4SAAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQyEsBgo952a00CgEEEEAAAQQQQAABBBAQIfjIVYAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIJCPAgQf87FXaRMCCCCAAAIIIIAAAgggIAQfuQgQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQTyU4DgY372K61CAAEEEEAAAQQQQAABBJjxkWsAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgLwUIPuZlt9IoBBBAAAEEEEAAAQQQQIAZH7kGEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE8lOA4GN+9iutQgABBBBAAAEEEEAAAQSY8ZFrAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIC8FCD7mZbfSKAQQQAABBBBAAAEEEECAGR+5BhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBPJTgOBjfvYrrUIAAQQQQAABBBBAAAEEmPGRawABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCAvBQg+5mW30igEEEAAAQQQQAABBBBAgBkfuQYQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQTyU4DgY372K61CAAEEEEAAAQQQQAABBJjxkWsAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgLwUIPuZlt9IoBBBAAAEEEEAAAQQQQIAZH7kGEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE8lOA4GN+9iutQgABBBBAAAEEEEAAAQSY8ZFrAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIC8FCD7mZbfSKAQQQAABBBBAAAEEEECAGR+5BhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBPJToGjx4mUXNm/eJn369JJLLukpxcXF1rV00aJlcujQURk/frSUl7eRoqIiq+pYVVUlc+cuMnW68sorpGnTplbVTyuzd+9+WbNmvXTp0kkGDhwgJSUl1tVx9ep1snv3Phk2bJCpZ5MmTayr46xZc+X06Qq5/vqrpFmzZtbV79ix47JkyQpp3bq1jB49zMp+3rLlXdE/ffv2lt69e1g85hyRCRMulzZtyqwbcyorq2TePLvHnD179snatRukS5fOMmjQACv7OQ5jzsyZtWPO5MlXS2lpqXVjztGjx2Xp0hVSVtZaRo2ye8zp16+39Opl65iz1Nzn2D7m6P3XxIljrbzPccacrl07m/scG+9nV61aJ1rP4cMHS+fOHa28z7F/zDkmS5eutHrM2bz5Xdm69V3p1+8S6dWruxXX4oIFS6W6ukbKylrJiBFDZOHCpXL48FGZOPFyKSuz8T6nUubNW2zuv2wdc/Q9y7p1G4QxJ7Nbk5kz50hFxRm54QY773OOHDkmy5atlDZtWsvIkXbe5+iznK1bt1s15niviv/P3n1Ax1Vd+x/f6u62LMkY9967DS5gMKZjG2wIzZRF8h4keemdJOTPIxUIKS/JS30sUnk8MBiwwRTbGDfce+8FN7lKcpOL9F/7iCuPRiNpuvbMfG8WKyDdcu7n3Nm6c+c352gNOnbMbs05c+asLFpkveYckI0bt8jll7eSXr1s3+cMGtRXCgqs3udozTkrN9ww2uR7q0s1p6kMHtzf5POcrVt3ys6du6V7d73PaW/yfvZSzbnS3TNae4Z8qeaku3sxi8+QP/74gGzapDXnMunVq7uJ+1n/vy1r1myQ/fsPyeDBtmuO9rc+Q7b4PEffD6xatVaaN28qgwZZrTk7ZOfOPdScCG659V578eLlrl7rMyebNWe/u89p27a19Oxps+asXr3Bfb42ZEhfyc+3d59TXl4u+rmV7ZpzTFatWuc+39XnEhY/n9yyZYfs2rVHevToLB062LzPWbRouRw/fsJ9Dt2kSWNz9zmnT592s46kp2fIqFHDTNacvXv3u/uctm0vl549u5m8z1m9ev0nNae/e29l7X7Wqzn6OeXYsVcbvc+pqDktWjSTgQOt1pztsmvXXunRo4t06NDO5Hsr6zVHPztdsmS5ZGRkysiRtmtOu3aXS48e9mtOfn6euWuxrKxM5s79SCzXnCNHjsnq1VpzmrvP/yze52zevF12794rPXt2kfbtbdYc/UxIs06jRw+Xxo1Dv89JmzXrw/Lly9fIoEF9ZMCAvib/QE2f/r58/PF+uf32m92HS9YCcfoG8tVXp4v+sb/rrvHSuHGjCN6OxmZT/UBk7txF0qlTO/cBosXQnr450xfdtdeOdKE4i0XhxRdfcy+4hx662z1AtbYcOlQoM2Z8ILm5zeSmm66Thg0bWGuiCyysWLFW9AORippjLyg8bdp7sm/fAbnjjltMhnD1Zu7VV98SzYDfeec4kzVn69YdMm/eYuncub17sGa75oySrl07mq45Dz98tws0W1sOHiyUd96ZLbm5LeSmm8aYrjn6gUj//tSccK6hU6dOuZqj91+TJt1muOYski5dOsjIkTZrjgZ8tmzZLmPGXOW+cGTxPudf/3pVioqK5OGH73UPUK0tXs1p2bKF3Hij1Zqz2t3nDBnSX/r162PiPsc/+Pjmm+/K/v0HZOLE20yGcE+ePCWvvfaWZGRU1JxGjey9t9LX8vz5i91rWR+sWbzPSYSa889/TpHi4hJ5+OF7TNYc/XDz3Xc/kJYtc+Wmm66VBg3svbfSL72tXKk1Z4D069fbRM3x/9vxxhvvyIEDB93r2WIgrqTkpEyd+rb7wGvSpFtN1hx9TrJggfWaM1/0w+LrrrtaOnfW+xx7X6rWmqP9rc9zLN7neDUnLy9XbrzRas1ZJStXrnM1p3//3iY/zNaao182uvNO2zVHXyMTJ1qtOdtkwYIl7tnsiBFDTd7nfPDBfNHnTmPHjpZOnTqYrTknT56UBx+0WXP0dfLee3MkP7+l+xKKxfucZctWudDC0KEDpV+/XkZrzgwXwr3rrtskP18HcrA1YIfea7/++gz3/GHixFtM3uds2rTVDSrStWtnGTFiiMmaM3v2PNHwv355QmuOtS/b6ueS+jxH30s/+OCnTN7n6Gct77//oWigQh0t1hwdgEcDZ4lRc8Z9EsK1VnOK5fXX35GsrEy54w69z2lo7dGibNy41Q0qol9YHj7ces25xuTgMRpC0s/K9TODBx6wXXMKCvLcl1Bs1pwVsmrVBrniikHSt29Pk/c5r7/+thw4UOiyL2ppLYRbXFxRczRncPvtep9jseZskYULl7mA65VXDjZ5nzNr1lzRwRz0OYSVgRx8/3h4NUfD9ZMn32XyPkdzbDNnznWfteh71AYN7A2epoNh6Jd59DrUAassfiFq6tS35ODBw/KpT41394yh1py03bv3lOsbIH3oZ3FkM72wjx49JqdOnZHLLst3wcxQTzLWd1YXL16UI0eOyoULF923Ya0FM/X8z5w5IzoyV05OtguoWGxjUVGxewit7dM/Ttb6WR0LCw+7b861a9fG3BtcbV9p6Tk3moY+ZLH4oZK2UftYa46OOmRxlCH/mmPxg+zEqjk5n9QcW2/CtZ81xKwPhOzXnFLRbwNZe6jmW3M0nGLxm0DUnOjcASVCzdEvoehrWmt2bm5zk/c52r6SklOioT279zmFcuaM1hyr9zmloqMhUXNCe237Bx8r3luddl/usHqfc/jwUbl4scyNbmbxfQs1J7RrsKa1CwupOZFKlpSUSHHxSdPvrfRbz/qaqag59kYw1+c4+jxHH6Ra/KKtXiNezdEHp/rtcYt10bvPyctrIQ0bWn2eozXnnOH3Volwn+PVnCYmR1PU14u+nitqTiujNeeCaF2k5kT2F/BSzcl1X8C0+QyZmhNZL4t7fqzPkXXwAYsjuFJzIu3hiu31Q+wTJ4pdKKXiPsfmM2R9nqNfTrBcc86ePedGsbP5DNn+fY7WHH1vpSP+W685+r7F4mjCFy4kwn2O/Zqjn+Xrlyfy8lpSc8L8U6MzdVY8Q85wX/Kw+B6amhNm5/ps5tUc/QKA1ZlEvfscvX9o3tzmfY6OJKyfletrRe/HrL230v7VfE5p6Xk3Qrjt+xzrNafE5QGt3ufoZ0KaaWvd+rKwvtyfdvTo0XK9YPQitnYhe7VL21dWVu7e9Fhtoz6w0sXiH09tlxrqP4nQRvvXYpnJouq9XhLlWrTfz+UuWGF1SZR+Fkkz+cDKty7avxapOZG8Dr2/f/b7mZoTjX7mPicSxYr7Rf37YvHNI/c54fetf/DRe2/FfU74pry3Ct/Od0tqTuSOiXOfw9+WSHr7Us3hvVWkjtznRCJ46dmi/fdW1JxIepqaE4nepW2t3+dwPxvNfuZ5TiSa1JxI9C5tq/c4amn1eQ41Jzr9zPOcyB2pOZEb6h6oOZE7Js7zHO5zIultak4ketznREevYi+pUHPSjh07VpGGY0EAAQQQQAABBBBAAAEEEEgqAf/gY1KdHCeDAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQsgIEH1O26zlxBBBAAAEEEEAAAQQQSHYBgo/J3sOcHwIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQmgIEH1Oz3zlrBBBAAAEEEEAAAQQQSAEBgo8p0MmcIgIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQggIEH1Ow0zllBBBAAAEEEEAAAQQQSA0Bgo+p0c+cJQIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQagIEH1OtxzlfBBBAAAEEEEAAAQQQSBkBgo8p09WcKAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQUgIEH1OquzlZBBBAAAEEEEAAAQQQSCUBgo+p1NucKwIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQOgIEH1OnrzlTBBBAAAEEEEAAAQQQSDEBgo8p1uGcLgIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQIgIEH1OkozlNBBBAAAEEEEAAAQQQSD0Bgo+p1+ecMQIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQCgIEH1OhlzlHBBBAAAEEEEAAAQQQSEkBgo8p2e2cNAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQ9AIEH5O+izlBBBBAAAEEEEAAAQQQSFUBgo+p2vOcNwIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQ3AIEH5O7fzk7BBBAAAEEEEAAAQQQSGEBgo8p3PmcOgIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQxAIEH5O4czk1BBBAAAEEEEAAAQQQSG0Bgo+p3f+cPQIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQrAIEH5O1ZzkvBBBAAAEEEEAAAQQQSHkBgo8pfwkAgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggkpQDBx6TsVk4KAQQQQAABBBBAAAEEEBAh+MhVgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggkIwCBB+TsVc5JwQQQAABBBBAAAEEEEBACD5yESCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCSnAMHH5OxXzgoBBBBAAAEEEEAAAQQQYMRHrgEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIGkFCD4mJTdykkhgAACCCCAAAIIIIAAAoz4yDWAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQnAIEH5OzXzkrBBBAAAEEEEAAAQQQQIARH7kGEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEklKA4GNSdisnhQACCCCAAAIIIIAAAggw4iPXAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQHIKEHxMzn7lrBBAAAEEEEAAAQQQQAABRnzkGkAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEhKAYKPSdmtnBQCCCCAAAIIIIAAAgggwIiPXAMIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAALJKUDwMTn7lbNCAAEEEEAAAQQQQAABBBjxkWsAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgKQUIPiZlt3JSCCCAAAIIIIAAAggggAAjPnINIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJKcAwcfk7FfOCgEEEEAAAQQQQAABBBBgxEeuAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgaQUIPiYlN3KSSGAAAIIIIAAAggggAACjPjINYAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIJCcAgQfk7NfOSsEEEAAAQQQQAABBBBAgBEfuQYQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSSUoDgY1J2KyeFAAIIIIAAAggggAACCDDiI9cAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAcgoQfEzOfuWsEEAAAQQQQAABBBBAAAFGfOQaQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQSEoBgo9J2a2cFAIIIIAAAggggAACCCDAiI9cAwgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAskpQPAxOfuVs0IAAQQQQAABBBBAAAEEGPGRawABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQCApBQg+JmW3clIIIIAAAggggAACCCCAACM+cg0ggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggkpwDBx+TsV84KAQQQQAABBBBAAAEEqHSopgAAIABJREFUEGDER64BBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBpBQg+JiU3cpJIYAAAggggAACCCCAAAIi8+YtdgxNmzaWQYP6QYIAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIBAUggQfEyKbuQkEEAAAQQQQAABBBBAAIHqAgQfuSoQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSSUYDgYzL2KueEAAIIIIAAAggggAACCAgjPnIRIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIJKcAwcfk7FfOCgEEEEAAAQQQQAABBBBgqmuuAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgaQUSCssPFx+8eIFycjIkMzMTJMnee7ceSkvL5OsrCxJT08318by8nI5d+6ca1d2drakpaWZa+PFixflwoULzk8dLS7nz1+QsrKL7jrU69HiUlqq/Vxutp/Lysrk/Pnz7hrUfrZ4Lep1qNcjNSf8K5yaE76d75b6WtHXDDUnfM9LNUf/tmRSc8Kk5D4nTDifzS7d52S4a9HiQs2JvFeoOeEZ+k91Tc0Jz9F3K2pO5Ia6h8R5b8V9TiQ9rs9K9P2L/ec5aZKdbfM9NDUnkivw0rbUnMgdLz3PyZTMTJvP7S7VnGxJT7f3fPbS8xxqTiRX5KX3VlmSkWHvswLucyLp3UvbUnMid6TmRG6oe0iMmlPqTtbq55O+z3P0ntviQs2JvFeoOZEb+tYcq++hK+5zqDmR9nYi1Ryrf1uoOZFehRXbe/c5tmtOouRz0t2zRYtLKtSctLlzF5SvXbtR+vbtJX369DAZfpw580M5cKBQbrnlOsnPzzMXrDhz5qy8/fb77mH+hAk3S05OjrnredeuPbJ48Qpp376tXHHFIJPhx0WLlsv27btk1Khh0rFje5Mh19dff1uKikrk3nvvkAYNGpjr58OHj8icOQukWbNmct11V7k3utaWdes2itacfv16u5pjMeSqNWf//kK57baxkpfX0lzNOX36jMyYMdN17fjxN5msOTt37pYlS1ZKhw5tZdgwuzVn27adcvXVV0qHDu3M1pwTJ0rk/vsnmuznwsKKmtOiRXMZM2aUyZqj9Ub/GTCgt/TubbPmvP/+HHefc9ttN0heXq7JmvP22zPdB4fjxt1o8lrUmrN48Urp2LGdDBs20OR9zkcfLXP3OaNHXynt29usOVOnviVFRScN15zDMmfOQsnNbS7XXmuz5qxZs1H0XsdSzfEPPno1Z9y4G6RlS3s159Sp0zJjxizTNWfHjor7nE6d2snQoTZrzsKFS0XbOXr0cPce0OIXCF97bbqUlJyS++6zep+TCDVng6xbt0kGDuwtvXrZvM957705cvBgoYwff6Pk5rYwd59z8uQpeeed2S44o/diFp/nXKo57T+pOfa+5EHNifzJy6FDhfLhhx9Jy5Yt5JprRpp8b7VmTUXNGTSoj/Ts2d3k85xEqTkaHL311utN1hx9z7J06Srp3LmDDB06wORnBQsWLJGdO/fINdeMkHbt2pi9zzl58rR7hmzxb4tXc/QZxOjRWnPsfUC3evV6Wb9+s+ma8+67H8ihQ4dlwoSb3LMxa4MQlJScFG2j5ZqzbdsuWbZslXTp0kGGDLFZc+bPXyL6+ZrVmqOfS+rznFOnzsg999xusubo+4G5cz9yzz2t1pxVq9bJhg1bZPDgvtKjRzeT9zmJUXNmu+eyt9wy1uS1qJ8HLVu22njNWSy7du2Va68dKW3bXm7uPqesrFxefz0xak5+fksZPXqEyc8KLtWcftKjR1eTNUeflehngLfffrM0b97M3H1OcfFJee892zVn69adsnz5KunataMMHmzzPkc/O9i9e6/7jLdNm9YGa06ZaD7n9OmzcvfdE0z+bTlw4JDMm7dICgpaytVX26w5K1eulY0bt8rgwXZrzowZs0WzTnfccbPLOoX63iptzpz55fomsm/fntKvXy+TD9b0Zm7fvoNy661j5bLLCsy94DSENG3au5XBx8aNG0X+xDPKe9AH5R99tNQ9DBoxYqjJoqAPrfSmc+TIK9xNp8URSKdMmSZFRcVyzz13SNOmTaLcS5HvrrDwsMyaNU+aNWsqY8eOloYN7YUz9UH56tXrXNhaw48WH6zZrzmnZdq09yqDj5Zrjn7QPnz4ENM1Z9SoK9wDfZs1503R4ON9990hTZrYrTn6pue66642WXP0Hkf/6d+/t6s7FmuOvoHcv/+Q3Hbb9dKqVb65+xwNIU2f/p67ydTgo8Waox/OLVq0zIWYr7xysMmaM3/+Ynefc/XVw6VTp/Yma84rr7zpvuChIaQmTRpHfmMS5T3oh3OzZ8//JGx9lcmas2rVelmzRmtOH/f+ykLN8YKP2h0agtM3kPpmfNy466WgwGLNOSXTpr1fGXy0WHP0wzmtORq2tlpztN+3b7ddc15++Q3RD2PvvdduzZk1a74LW48ZY7XmrBN9fzVgQB/p08dGzfEv/Rpkrqg5N5isORp8nD79fcnIqLjPadTI3vMcvX/QL4rq/YN+kdVigEYf8FZ8wWOE+yKrxdEAK2rOKRdCsnifo4EAvc/R4KN+4GDxy7YrV66TtWs3yMCBfd2Xyizc5/jXnLffniUHDx5yYev8fL3PsTXq46Wak+7qosWaox/OLV683D0n0S+VWaw5c+cukh07drmQsL4HtFZzystFXnnlDdH+1mfIlmtOXl4L96UymzVnrfsiq+2aM7PyCx4Wa47ea7/11kzJzKz4gofNmrPDDdhhu+Z85L5UpjVH3wNaG8hBg4/6PEfD1hp8tFhz9P3ABx8scMFHDXNZrDkrVqz55Ase/aR37+4mg1L6etZnYzoYhg4SZO0+p7i4RPReTP8uV9SchlF+Mhj57rZs2SFLlqyQLl06ui94WLzP0S9D6Zf89bWi9znWao4GH6dMeVP0M4O777Zdc/Lzc13ttlxzBg/uL716dTNac97/5AseN4uGSEMNIUX+iq19D17NycrSL5XZrDmbN2+XpUtXGq85C92XysaMGekG7LBXc8pE8zmax/rUpyaYvM/Zv/+gG7BDXyf6RRmbNWe1rFu3WSzXHP0cWsPWOtBgODUnrbCwsFyHMNWL2OLocFrSdJoSbWPDhg3NhQG0fd5QuhcuXDQZBtA26tRI3hTIFm/kvKF0tY16HVoMIGkbdfhunZLb4ptHbZ83ZYDeeFoMPWobdShdnWJR3/xQc8K77UuUmqP9rG++LdccbWNODjUnvCuxouZUTJmaCDUn0+QHc9znhHv1Vd1O73OoOZFbJsJ9DjUn9H72Dz7y3ip0Q/8tqDmRGybKeyutOSLlJh9YVX1vxX1OuFel995KX9cWwwDe85zEuc/JMRdA8q6NxLjP0embhJoT7gs6gZ4hU3Mi6ORPpmOreJ5DzQlXsuJ5DjUnXD9vOzXUa1HDPRZHWOc+J9IertheP7PSfm7QIMdcGID7nOj0se97q6ysTJMBpKrPkBuZCz1q+y7VnDKTocdEe29FzQn/9Z1I9znWa47+/dMvpVsLPVJzwn99+G/JfU7klpdqTpq7X7S4ePmcZK45aceOHSu3iE+bEEAAAQQQQAABBBBAAAEEIhPwDz5Gtje2RgABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCGAMFHG/1AKxBAAAEEEEAAAQQQQACBqAsQfIw6KTtEAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBAwIEDw0UAn0AQEEEAAAQQQQAABBBBAIBYCBB9joco+EUAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE6luA4GN99wDHRwABBBBAAAEEEEAAAQSiIFBUVFxtL2vWbKz82YABvav9vnnzZlE4MrtAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIL4CBB/j683REEAAAQQQQAABBBBAAIGoC/iO7BjqzkePHh7qJqyPAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAQL0KEHysV34OjgACCCCAAAIIIIAAAghELrB27UY5caL6iI917blDh7bSsWO7ulbj9wgggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgiYEiD4aKo7aAwCCCCAAAIIIIAAAgggEJ5AqKM+tmjRTPr3rz79dXhHZysEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE4idA8DF+1hwJAQQQQAABBBBAAAEEEIiZQFFRsaxZszHo/Q8Y0FuaN28W9PqsiAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggIAVAYKPVnqCdiCAAAIIIIAAAggggAACEQrs3v2x7Nmzr869MMV1nUSsgAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgggYFiA4KPhzqFpCCCAAAIIIIAAAggggECoAnWFHwk9hirK+ggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAtYECD5a6xHagwACCCCAAAIIIIAAAghEKLB27UY5caK42l4IPUYIy+YIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAImBAg+mugGGoEAAggggAACCCCAAAIIRFdg3rzFVXbYokUz6d+/d3QPwt4QQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQqAcBgo/1gM4hEUAAAQQQQAABBBBAAIFYCxQVFcuaNRsrDzNgQG9p3rxZrA/L/hFAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCIuQDBx5gTcwAEEEAAAQQQQAABBBBAoH4Enn32OZk69TX59Kc/I4899u/10wiOigACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggECUBQg+RhmU3SGAAAIIIIAAAggggAACVgR69OghR44ckeHDh8uMGTOsNIt2IIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIBCRAMHHiPjYGAEEEEAAAQQQQAABBBCwK/Dcc8/Jn//8Z/ne974njzzyiN2G0jIEEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEQhAg+BgCFqsigAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAggggED9ChB8rF9/jo4AAggggAACCJgQKC8vN9EOGoEAAggggAACoQukpaWFvhFbIIAAAggggAACCCCAAAIIIIAAAggggAACCCCQwAIEHxO482g6AggggAACCCAQjAChxmCUWAcBBBBAAIHkFSAYmbx9y5khgAACCCCAAAIIIIAAAggggAACCCCAAAKpKkDwMVV7nvNGAAEEEEAAgaQVIOiYtF3LiSGAAAIIIBA1AcKQUaNkRwgggAACCCCAAAIIIIAAAggggAACCCCAAAL1IEDwsR7QOSQCCCCAAAIIIBCpAOHGSAXZHgEEEEAAAQRUgAAk1wECCCCAAAIIIIAAAggggAACCCCAAAIIIIBAIgoQfEzEXqPNCCCAAAIIIJCyAgQeU7brOXEEEEAAAQRiLkAIMubEHAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgSgIEH6MEyW4QQAABBBBAAIFYCRB2jJUs+0UAAQQQQACBYAUIRQYrxXoIIIAAAggggAACCCCAAAIIIIAAAggggAAC8RAg+BgPZY6BAAIIIIAAAgiEIUDgMQw0NkEAAQQQQACBmAoQgIwpLztHAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQCFKA4GOQUKyGAAIIIIAAAgjEU6Cm0GN6erpkZ2dLRkaGZGZmioYPCCDEs2c4FgIIIICABYGi0iI5V37eQlMSvg1pUvG/DEl3/y9lIuXny6W8rLzGc+PeI+G7nRNAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQSXoDgY8J3ISeAAAIIIIAAAskkUFPgMScnR/QfDTuyIIAAAgggkOoCJ0pPSOnFc6nOELPz1wBkdlqWC0GWlZbVeBwCkDHrAnaMAAIIIIAAAggggAACCCCAAAIIIIAAAgggUIcAwUcuEQQQQAABBBBAwIBATYFHHd2xYcOGboRHFgQQQAABBBCoECD4GL8rITstW9IuiJSdJwAZP3WOhAACCCCAAAIIIIAAAggggAACCCCAAAIIIFCXAMHHuoT4PQIIIIAAAgggECOBmsKO3uEaN27sRnlkQQABBBBAAIGqAgQf43tF6MiO2ZIlZWd1Huzaj80okPHtG46GAAIIIIAAAggggAACCCCAAAIIIIAAAgikqgDBx1Ttec4bAQQQQAABBOpNoK7AY3p6ujRp0oRpreuthzgwAggggIB1AYKP9dNDOWnZcrH0oqSVpdXZAAKQdRKxAgIIIIAAAggggAACCCCAAAIIIIAAAggggEAEAgQfI8BjUwQQQAABBBBAIFSBYEKPTZs2ZWrrUGFZHwEEEEAgpQQIPtZfd7vw49mLklZO+LH+eoEjI4AAAggggAACCCCAAAIIIIAAAggggAACCBB85BpAAAEEEEAAAQTiIFBX4NFrQrNmzRjpMQ79wSEQQAABBBJbgOBj/fafhh+DmfZaW8nIj/XbVxwdAQQQQAABBBBAAAEEEEAAAQQQQAABBBBIVgGCj8nas5wXAggggAACCJgQCDbwqI1t1KiRNGjQwES7aQQCCCCAAAKWBQg+1n/v5Mgn4ccgmkL4MQgkVkEAAQQQQAABBBBAAAEEEEAAAQQQQAABBBAISYDgY0hcrIwAAggggAACCAQnEErgUfeYlZUlOsU1CwIIIIAAAgjULUDwsW6jWK+RkZYhmeczpOxCWVCHIvwYFBMrIYAAAggggAACCCCAAAIIIIAAAggggAACCAQpQPAxSChWQwABBBBAAAEEghUINfSo+2WK62B1WQ8BBBBAAAERgo82rgI35fWZ4IKP2mLCjzb6jVYggAACCCCAAAIIIIAAAggggAACCCCAAALJIEDwMRl6kXNAAAEEEEAAATMC4YQes7OzpUmTJmbOgYYggAACCCBgXYDgo40eSpM0ybqQJeVBjvqorSb8aKPvaAUCCCCAAAIIIIAAAggggAACCCCAAAIIIJDoAgQfE70HaT8CCCCAAAIImBEIJ/SojdcprnWqaxYEEEAAAQQQCE6A4GNwTvFYK9RRH702EYCMR+9wDAQQQAABBBBAAAEEEEAAAQQQQAABBBBAIHkFCD4mb99yZggggAACCCAQR4FwQ4/6oX9ubm4cW8qhEEAAAQQQSHwBgo92+jAzLVPSzopIeXhtIgAZnhtbIYAAAggggAACCCCAAAIIIIAAAggggAACqS6QtmPHrvKioiJp3ryZ+8fiA+eDBwvlzJkz0qZNa9GpIK218cKFi7J//wF3LbVr10bS09PNXVcnT56So0ePSaNGjSQvL9dkGw8fPiqnTp2S/Pw8ady4kbl+1k7du3e/nD9/Xjp2bCcZGRnm+vns2VI5dOiwZGdnyWWXFZjs5xMniqWo6IQ0b948IWpOTk6OuX6+cOGC7N9/UCeJk3btLjfZz9Sc6Fw21JzIHU+cKJKK+5wW0rx505j9bQk39KhnePr0GWnXrm3kJ8seEEAAAQQQSCEBgo+2OjvnYraUnS+r1qgDBw6Jvk9t27bieU5NS3095zl//oIcOHDQ3SO2bWvzvVVJyUk5duy4e07SsqXt5zkFBfnSqFHDmN1zR3LV79mzTy5evCAdOlh9nnNWDh06Ijk52dKqVb7J9/nHj1e8t8rNbSHNmsXuvVUk/VxRc866Z8gWn+ckVs1pLC1btjB5LR4+fEROnTot1JzwXy1nzpyVwkJqTviCFVvarznnXRvT0tLdvZjFz60u3efYrTn6Wjl9+pQUFBRwnxPmiyYxas4JKSoqNn2fs3//ISkt1fucy909o7VFPzul5kTeKxU1x+59jn4Oop9blZVdkPbtbb638mpOgwbZ7n7R4t+/48crao7eb+tsYPX1TKS2K9arOfqspLbnOZFf9eHtwas52r/6/s9iPxcXn5Tjx49LkyZ6n5Nrsp+9+5xWrQqkYUN7z3Mu1ZyL0r59W5P5nESoOceOnZDiYus156CUluoz5PBqTtqcOfPLV61aJ/3795Z+/XpLVlZmeNUlhlu9++4Hsm/fQRk37nqTf6A0sDBt2rtSXi4yceIt0qBBgxhqhLfrHTt2y8KFS6VDh7YyYsRQk3+gFixYIlu37pDRo4dLp04dTBauKVOmiQb3Jk+e5EKk1pbCwsMya9Y8Fyi8/vrRJh/yrlmzQbTmDBjQR/r16yWZmXZrzvjxN0h+vt4Up5nqan3TM23ae65Nd9xhu+ZoSHj48CHGa84I6dSpvdGa86acOFEiDzxwp7vhtLZozZk5c57k5jaXsWOvNllzVq9eL/rPwIF9pW/fnjGpOZGEHrVP9UObnj17WOte2oMAAggggIBpAYKPtronR7Kl7Gz14OM778wSfVg+YcLNkp/fstaHvPXxoF+/sDV9+vuSkZEmEyboeyt7X3zbtm2nLFq0XDp37iBXXDHI5HurefMWyfbtu+Taa0eaDRa+/PIbUlJyyj3PsfjeSr/0PXv2fPfB13XXXWXyvdXKletk7doNMnhwP+ndu0dM3ltFWtnefnuWHDx4SG6//WbJy6u95kR6rHC292pOZma6jB9/s8mao89mFy9eIV26dJRhw7TmZIVzqjHdZu7cRbJjxy4ZM2aUyQ+/9DOCV155Q7S/77/fds3RARLU0WJQeMWKtbJu3UYZPLi/9O7d3WTNeeutmXLoUKHcfvstbrCJ+riXqe3FpqFCbWOi1By9z8nKsldzPvzwI9m5c/cnNUdDPrYGPtHnkq+88qacPHla7r9/osn7HA3DffDBAvd+QO8XbdacNbJu3SYZMqS/9Opltea87wY+0c+ELAZoiotLRO/FsrIyZPz4m0z285YtO2TJkhXStWsnGTZsoOmao+8JdLAEazWnrKxcpkx5030J5b77bNYcfQYxZ05i1JyhQ/tLz542a44+K9GaM2nSrS4Ubu0+p6LmzHSv4/HjbzRZczZv3i5Ll66Ubt06y9ChA0zWnDlzFsrOnXtk7Nir3HsrawHSsrIy0XyO5rHuvfcOk/c5OliVOhYUtJRrrrF6n7Na1q3bLLZrznvuC8Hh1py0FStWlWsoTr/xrMEPiyGkFSvWuNEKhw4d5MIV1gqrJk8XL14p5eVlLlRo8aZdH6Bu2rRF8vLypFevbiYL68aNW9zImb169ZTWrQtMhpA0nKkPrbRoNWxoL+Cq3w7RUGGjRg1kwIB+Jr/1tWvXbtmxY4907NjejZxpueboA94WLag54Tyt9mqOjuDas6fVmrPZher79Okpl13WytwbSHX3ao4+ELIYqtcg+OrVWnMayoABfU3WHH0wqTftsao5kYYetZ/1YYG+VlgQQAABBBBAIHgBgo/BW8VjzZy0bCk7Uz34uGLFajda4bBhg+sc8b8+nvXoyHBLlqx0o+kPHz7Y5PMc/aB48+at7ovAPXp0Nfk8Z8OGTS7g2rdvL7OzT8yfv9g9KL/mmhEm31vpaIr6ha2mTRtJ//59TAZc9fnxrl17XAhXPxCx+Dxn+fJVcvToCbnyyiExHfE/3LqqI0HoB19a76680mbN0Q9ttmzZ5kYe7d7dZs1Zv36TG1FKB3GwOkLqpZqjz3Pshep1lKHVqzdI06aN3YAYFkfx0UD97t1aczq6EaUyM+3NvrRs2SrR0VP0S98WR8JNlJqzefM2d//QvXsXk/c569dvlAMHCs3WHH02qc+Qdda80aNt1hx9neiAGNSccO8gKrazXnP0XlvbqKEZDTJb/KxcPw/S+xz9DLpbN9s1R/8+WxytMDFqznFXc5o1a+Jqt+X7nC5dOrmAq837nJVy7FiRjBgxxOSolIlRcw7Ili3bpXXrVi78aPELHvpFI/1M32rN0eCjDu5m+T7n6NHj7kui+n5AB/2yWHP0S9W7d+91wX+rNUefleizsXBrTtrhw4fLddpUnbbX4gMrvZnT9ul00nqRWBt5Tdunf+S1jfrCs3gjVxGqKHNt1AdrFouqtvHixYuujXodWpxGWtuowyZrOy0GkLxrUduo3yy2ONS9bz9brznaz1pz6uPDt7re/lJz6hIK7vfUnOCcaltLr8VUrjnRCD2qb7Nmzczeg0V+lbAHBBBAAAEEYiNA8DE2ruHuNTstW8oDBB+95zn6/jSY91bBrBNuGwNtlyjvrXR6XH0eZv95TpbJL5Ql0vMcbavFh+SJ9DxHnyEHW3OiWU+C2Rc1JxilutfxnudoTbQ2IonX+opnyGUmQ4++z5CpOXVfb7WtEep9TmRHC31rak7oZoG2oOZE7ug9Q6bmRGZJzYnMT7fWz8oT5b0V9znh97eXiUiMmpNpMvSodtSc8K9Bb0tqTuSGl57n8N4qEk3vfjYjI3lrTtqxY8fKI0FiWwQQQAABBBBAINUEohV61P20aNHCbOA/1fqV80UAAQQQSBwBgo+2+io7LUvKTpcFFW6sreXxDj7aUqQ1CCCAAAIIIIAAAggggAACCCCAAAIIIIAAAqEIEHwMRYt1EUAAAQQQQACBT0Z7jhRCQ4/6T25uLsHHSDHZHgEEEEAg5QQIPtrqcg0+Xjx1MSojbxF+tNW3tAYBBBBAAAEEEEAAAQQQQAABBBBAAAEEELAqQPDRas/QLgQQQAABBBAwKRCN0R690KOeIMFHk91MoxBAAAEEjAsQfLTVQV7wUVulwcVIw4uRbm9Lh9YggAACCCCAAAIIIIAAAggggAACCCCAAAIIxEKA4GMsVNknAggggAACCCStQKTBR9/QoyIRfEzaS4UTQwABBBCIoQDBxxjihrFr3+Cjbk74MQxENkH+8ZdGAAAgAElEQVQAAQQQQAABBBBAAAEEEEAAAQQQQAABBBAISYDgY0hcrIwAAggggAACqSwQjdCj+vnuh+BjKl9RnDsCCCCAQLgCBB/DlYvNdoGCj3qkSEdujHT72Jwte0UAAQQQQAABBBBAAAEEEEAAAQQQQAABBBCwIEDw0UIv0AYEEEAAAQQQSAiBSIKPvtsSfEyI7qaRCCCAAAKGBQg+2uqcmoKP2spIw4uRbm9LitYggAACCCCAAAIIIIAAAggggAACCCCAAAIIREuA4GO0JNkPAggggAACCCS1QLRCj4pE8DGpLxVODgEEEEAgDgIEH+OAHMIhags+6m4iDS9Gun0Ip8KqCCCAAAIIIIAAAggggAACCCCAAAIIIIAAAgkiQPAxQTqKZiKAAAIIIIBA/QnEKvSoZ8RU1/XXrxwZAQQQQCBxBQg+2uo7/+Cjts4/rBhpeDHS7W2J0RoEEEAAAQQQQAABBBBAAAEEEEAAAQQQQACBSAUIPkYqyPYIIIAAAgggkPQC4QYfA23n/zOCj0l/+XCCCCCAAAIxECD4GAPUCHaZlZYlZacuVtlDoKBiJOHFSLaN4NTYFAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCoAMFHox1DsxBAAAEEEEDAhkC4oUdtvf+2gf67ZcuWkpGRYeNkaQUCCCCAAAIJIkDw0VZHafDx4skLQY3yGEmAMZJtbYnRGgQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIFIBQg+RirI9ggggAACCCCQ1ALhBh/rGu3R+z3Bx6S+fDg5BBBAAIEYCRB8jBFsmLv1go+6uW84kVEfwwRlMwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIE6BQg+1knECggggAACCCCQygLhBB9r2sb35/rv+k9eXl7cRnxcsfec68qle0sTqks/O6ppQrWXxiKAAAIIxF6A4GPsjUM5ggYfL5Scrww9eoHHmkZoDHfkxnC3C+VcWBcBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgMQQIPiZGP9FKBBBAAAEEEKgHgWiGHrX53v58/z9ewcc/LSyRn84rqQfF6Bxy6uR8GdI+Ozo7Yy8IIIAAAgkvQPDRVhf6Bx+1dRpSrC2oGG6IMdztbInRGgQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIFIBQg+RirI9ggggAACCCCQtAKxCD76j/oYr+Bjx2f2J3Q/je2YIy/cl5fQ50DjEUAAAQSiJ0DwMXqW0dhToOCj7jc9Pb3W3YcTYgxnm2icI/tAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQsCVA8NFWf9AaBBBAAAEEEDAiEO3Qo55WWVlZlbOL51TXBB+NXFg0AwEEEEAgKgKJGnzccnirO//NhzfL+sMb3L/3LehTxWRCn/FRMYrnTmoKPjLqYzx7gWMhgAACCCCAAAIIIIAAAggggAACCCCAAAKpJUDwMbX6m7NFAAEEEEAAgSAFoh181P3575PgY5CdISKM+Bi8FWsigAACqSCQSMFHDTu+tmGqrC1cF3TX9G/VzwUiEyUEWVPwUU+YUR+D7nZWRAABBBBAAAEEEEAAAQQQQAABBBBAAAEEEAhBgOBjCFisigACCCCAAAKpIRDt0KOq+Y/2qD8j+Bj89UTwMXgr1kQAAQRSQSARgo/hBB4D9d19fe8xH4DU4OP54nMBQ46M+pgKr0jOEQEEEEAAAQQQQAABBBBAAAEEEEAAAQQQiL8Awcf4m3NEBBBAAAEEEDAsEE7oUU+ntu1qGu1Rf56fny8ZGRkxFwk01fXUyfkypH12zI8d6gE+/dJRmb27tMpmBB9DVWR9BBBAILkFLAcfp22YLi+tfznqHWA5AOkFHwOFHPVnunj/Hwimtt/VBBnONlHvFHaIAAIIIIAAAggggAACCCCAAAIIIIAAAgggUG8CBB/rjZ4DI4AAAggggIBFgXCCj3WFHvU8/dfxRoAk+Fj9KiD4aPGVQZsQQAABWwJWg4+xCj16+lbDj17wUdvpH36MVfDRO5atK5PWIIAAAggggAACCCCAAAIIIIAAAggggAACCMRLgOBjvKQ5DgIIIIAAAgiYFwgn9KgnFWrw0XcESIKP1S8Lgo/mXyo0EAEEEKh3AYvBx6c/fFbWFq6Li82TY34gPQq6x+VYwRzEP/io2wQKPDLqYzCarIMAAggggAACCCCAAAIIIIAAAggggAACCCAQjADBx2CUWAcBBBBAAAEEUkIgnOBjMKFHxfPW8/9/go/VLy2CjynxcuMkEUAAgYgELAUftxzeKq9tmBq30KMHZyn8WF/BR7VgyuuIXkpsjAACCCCAAAIIIIAAAggggAACCCCAAAIIJKwAwceE7ToajgACCCCAAALRFiD4GG3R8PZH8DE8N7ZCAAEEUknAUvDxgVcerjf6f93993o7tu+BAwUf9ff+0157PwvU6HADjOFuZwKORiCAAAIIIIAAAggggAACCCCAAAIIIIAAAgiELUDwMWw6NkQAAQQQQACBZBMINfgY7GiP6uQ7vbX33/r/Fkd8/NPCkmpde0X7HBnSPrvazwOtG8p18dlRTautTvAxFEHWTQWBkpIS2b59u2zbtk2KioqkdevW0rNnT+nWrZtcvHhRFi5cKKtXr5ZBgwbJqFGjJD09PeosH374oZSVlUmjRo1k+PDhUd9/suwQp/j1pJXg47QN0+Wl9S/H78T9jtS/VT95/Npv19vxvQNr8PFcUWm1oGMowUfdVzghxnC2qXcwGoAAAggggAACCCCAAAIIIIAAAggggAACCCAQsQDBx4gJ2QECCCCAAAIIJItAvIOPeryCggLJyMiIOWHHZ/ZXO8bUyfnVwowr9p6TSS8eiWjdUE5m93faVFud4GMogqyb7AK7du2S5557Ts6cOVPlVK+66ir5zGc+I2vXrpVf//rXlb/75je/Kb179446y7/9279V7vP555+P+v6TZYc4xa8nLQQf6zv06Gnf1/cemdBnfPzwAxyppuCjrhoojF1TWDGcEGM429QrFgdHAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQiIoAwceoMLITBBBAAAEEEEgGgVCCj3Wt6/97HSnNd/FGgCT4SPAxGV47Vs7h0KFDsmjRItecDh06yODBg600rUo75s6dK8ePH3c/u+GGG6Rx48YB27lnzx556qmnAv7OCz6+++678vLLl0abmzx5slx//fVRP28Lgb4LFy7IW2+95c6tQYMGcvPNN0f9PCPdoQWnSM8hUba3EHyszymu/fvpyTE/kB4F3eut+7zgozbAP+gYSvBRtw81yBjq+vWGxIERQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEoipA8DGqnOwMAQQQQAABBBJZoK4wo++5hTLNtW7nH3z0/pvgI8HHRH7NWGv7hg0b5Be/+IVr1ujRo+WRRx6x1kTXnp/+9Kdu6mpdnn76aTfya6BlypQpMmPGDPerZs2aycSJE6VPnz5y9OhR0RqkIzvqvz/55JNuRMiGDRvKj370I8nNzY36eVsI9J09e1a+8IUvVHr86le/ivp5RrpDC06RnkOibF8fwccth7c6ntc2THX/v7ZwnRmu+p7y2jf46D+9daDprhUuWqM+Enw0cxnSEAQQQAABBBBAAAEEEEAAAQQQQAABBBBAIK4CBB/jys3BEEAAAQQQQMCqQCihRz2HUIKP3uiOvudO8LFCg6murb4iErNdyRZ81EDjxx9/7Drj0UcflREjRgTsmNLSUtm7d68b5TI7OzsmnWch0EfwMSZdm7A7jWfwUQOPGna0FHQM1HH1OeojwceEfSnRcAQQQAABBBBAAAEEEEAAAQQQQAABBBBAIGEFCD4mbNfRcAQQQAABBBCIpkAowcdQQo/aRv/go+9/Wxzx8bcLSqrRfumqpjKkfdVA1Yq95yTQuqH0ywv35VVb/dMvHZXZu0ur/HxsxxwJtG4ox2Ld5BdIpuDjxYsX5bHHHqvstN/97nduRMf6Wgg+BidvwSm4lib+WvEIPkY78KijMga7hBOyrM9RH/2Dj3qe3kiM/v/vGdQ2UmOooziGun6w/cB6CCCAAAIIIIAAAggggAACCCCAAAIIIIAAAnYFCD7a7RtahgACCCCAAAJxFIhV8NHbr+/+fYOP+fn5kpmZGfMz7fjM/mrHmDo5v1qYMeYNCeIABB+DQApxlePHj8vWrVvdtMg6JXKLFi0kLy9PevbsKQ0aNAi4t5KSEhfazcrKcoE7HaV0y5YtbmTB8+fPS+vWraVfv37VRhg8ceKEW0+PpSP0NW/eXC677DJ3rGCudQ38bdu2Tfbt2ydFRUWSk5MjrVq1ctNB64iG/uEWbaO2VZfNmzfLH//4R/fvw4cPl/vuu6/y3HQ/+o//om3ctGmTHDlyxB1Pp5Ru2bKl9OrVSxo3bhyidM2r63HOnTvnVtApmvfs2eP+/YknnnB94b+o9ze+8Q33Y/XX6bG9RQ2aNm1a+d/FxcWV/67tr205ePCg7Nq1y/WPtkfPVX179OghGRkZNW4aKNB36tQpWbdunbPTfel5tG3bVrp06VLjFLbhgOpx9LrQkS0ff/zxgCZ6bTVq1Kja7vXa1/7V/9c+0Jqr15K2sy4rPeahQ4fcqJt6PepomjqNePfu3WucnjzY4KNvn3mvsXBsUnmbWAcfp22YLi+tfzmqxP+6++9B7++BVx4Oel1vxUQLPmq7me465G5mAwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIFPBAg+cikggAACCCCAQMoLhBJ6VKya1g/0c//go/9/E3ysfvkRfIzeS1LDba+88oosXbo04E41UHfzzTfLrbfeWiWUqKG8p556ym1zww03yHXXXSc64uCBAweq7EfDeBpq1OXkyZPy6quvyty5c2s81uTJk2XUqFE1nqCG6DS4qOHMQEvXrl1F99GpU6fKX/tOf1yb3F133SW33XZb5SoaanvvvffkrbfeqvF4N910k3zqU5+qNRAYbG+9/vrrMm3atGBXr3O9559/vnKdYMJ22qdTpkyR9evXB9y3hgAnTJjg+jpQEMn/GO+//7689NJLAfelAdX777/fhSmjseh1tn379lp3pUHVb33rW1Wui3/84x+yaNGiGrfTa0Kv70DTg2vQ8Q9/+EO1a97b2aBBg+Shhx5yIWLfJZi+mD59ukydOrVys4kTJzp7ltAEYhl8fPrDZ2MyrXWsg48qWF/TXQcz4qO2z7++EHwM7bpnbQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIFLAgQfuRoQQAABBBBAIOUFCD7augQIPkanP3TUxd/85jc1hvp8jzJw4ED53Oc+VxkA2717t/zwhz90q2jAS0dSDBRG9IKPhw8fll/+8pdSWFhYZ+MfffRRGTFiRLX1NDD5t7/9rc7tdYWvfOUrMmDAALduOMFHHTnwL3/5i6xcubLO440ZM0YefPDBiEcwrM/g4+LFi+XPf/5zneeqKwwdOlQ+85nPVBsJ1Av0aVj2+uuvFw3v1bXoNrUFXeva3vt9qMFHHbnzt7/9rezcubPOQ2iY9jvf+U6VcOvy5cvl97//fZ3bqsWTTz5ZZfTHuoKPb7/9tgsIe4sGL3VkUqbprZO72gqxCj7GKvSoJxCP4ON9fe+RngU9pUdB99BRI9giUPBRd6fXtu/1HWzw0ds2lCbxOgpFi3URQAABBBBAAAEEEEAAAQQQQAABBBBAAIHEFyD4mPh9yBkggAACCCCAQAQC0Qo9ahP891XT9Na+6zLiY/XOI/gYwQX9yaY60uO3v/3tKjvSAFr//v3dCHU6lfSqVauqjKKnIz/ec889bhvf4KPvTnRaZJ1CWkOQuo+vf/3rbjro//zP/6wyMt748eOlT58+bhTJHTt2uGPpdMPe8rWvfc1Nk+0t/kEznRZbj+ON7KgBNh2t0AtfauDsZz/7mZvuWafd1t/polMu67F00XWuuuqqymNceeWVoiE3XXQkwDlz5lT+7oorrnDr6qiHOpW3tvWjjz6q/L2ez6RJkyLqGG3Xxo0b3T4WLFhQeS6DBw+ucarr2bNnVx5TA3Lekp6eLvfee2/lf9cWttMA7DPPPFO5rrqMHDnS9Y/2nfbjihUrqoQE9Vrx3adu7P/f+jP1Ujfdl051vXbtWtGQpW9IVqfy7ty5c0R27777rhw7dswdw3dEUV+Tjh07upCl1t3vfe97VUK448aNc32vo5Pqfj788ENZtmxZZZt8RwPVIO03v/nNynNo166dXHPNNdKtWzfRKbd1yvg333yzclt9TX31q18Nqi/0PF5++dLUyYQeI7osJBbBx1hMb+17lvEIPvoeT0OQukzoMz4y7CC2Djf4qLtm1McggFkFAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAoJoAwUcuCgQQQAABBBBIaQGCj9nm+p/gY+Rd8otf/EI2bNhQuaPvf//70qVLlyo71mtfp0v2Dfg9/fTTbvS6QMFHHZXuxhtvrNY4/xHsdIpsDYv5Lnqsf/7zn1XChjpCZPPmzd1qOu30O++8I6+99poLmekIixkZGVX2oaEzDVhqcE2XQO3Rc9Zz12X06NHyyCOPVGuvBtf0PL2lplEJlyxZIn/6058q1/MdZTLSHvIdwdAz99+nmjz22GPuxxpW1KnGa1pqCj7qyJaPP/64FBcXu001qPiDH/xANMBaV//8x3/8hxv90dfJd5vLL79cvvvd77rwpO9y8OBB+fGPf1wZHGzVqpX7b//+DMfQd3RPPZdf/epXAXezf/9+N2LjiRMnXIjRd2p0b4NZs2bJiy++WLm9joipbdSQrbbX89L+ycnJqXKc1atXu9FUNdCpoVgdFTWQk+905P5Tg48dO9ZN284IdeFcCRXbRDv4GOvQo7Y53sFHX10NQcYyABls8FHbFOyoj+G8PsLZJvyrkC0RQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE6lOA4GN96nNsBBBAAAEEEKh3gWgFHwPtJxFHfFyx95z8dkFJtX750lVNZUj7qiHJmtYNpVNfuC+v2uoEH0MRrL6uBs806OgtOhqdjkoXaNFgnIbwNGR3yy23iI6KmJ2dXS34qCPT3X///dV2odtpsMwL1n3hC1+QIUOGBDyWrquhRJ02W5eHHnpIdBpp3+XQoUOiQbmagit1BRvr+r0eS8NoCxcudIet6by8Nr3xxhuVo/sNGzZMPv/5z1c2t6ysLKiO0tEZ/Zd4BR99p7jWoKCOhKjB1kCL9o9ONa6jUeqiIyTq+t7iG67UfekUzzp6aKBl165d8qMf/ajyVzr6qI7iqYvWxWDqrv/0uLptsMFHXVevbR15sqY2aht01NDt27e7dnnTtusImPpzXTRwqtesf/BRf3f8+HHJzc2tdvqBQqj+IUu97h944AEJdG0EdVGxkhOIdvDxgVcejrlsfQYf9eRiGX70DT7qsXzruP+1TvAx5pcaB0AAAQQQQAABBBBAAAEEEEAAAQQQQAABBFJCgOBjSnQzJ4kAAggggAACNQkEE8Dxtq1t3dqmudbt/UNS3vrWprrWMOOkF49U45o6OT9g8DHQuqFcbbu/06ba6gQfQxGsvq7vCIw6Et2XvvSlWneo0wdnZWVVCan4j/hY06iEviExDcPpKI61jbal0xRruE6XmtqmrxWdqltHdtRwWYMGDVzALC8vz43I98UvftFtr6NK6uiSvktdwUc9V9/wYk3n5e1TRw7UERK95S9/+YsLq2kbH3300aA66rnnnqsWkItX8FH7Y/369a6dOo25Tmde26LeGmT1Fl8f30CfjlR4/fXX17qvv/71rzJv3jy3jm/A1H+655p2oiMiajjQdwkl+Ohtp+HHw4cPV06VraOMahhSa69OWT19+nS3qjeip44s+uUvf7nysBp4ve2220Sn0g5m8Q8+6rTaf//73ys31RFNNfRL6DEYzdrXiWbwMR6jPerZ1HfwUdvQv1U/efzab0feAX57iEXwUQ8R6giOoa4fdQh2iAACCCCAAAIIIIAAAggggAACCCCAAAIIIBA3AYKPcaPmQAgggAACCCBgTSCU0KO2PVrBR9/9EHwk+Bjt14Vv4CyYsFug4/sGH2ubUlhHTvSdzlfXrW3xRobUdXSqZG9KYf1vHXFwxYoVoqMsHjhwIOBuevXqJZs2bXK/05EhvZH5vJXrCj7qiJK+oxjW1V7dr2+bf/3rX0vTpk0TJvj4ta99rbL9Oi11t27d6rzcvvWtb1VOJ/71r39d+vbt67bxDfQ98cQTbprn2pb58+fLCy+84FbRfei+dIlX8PHIkSPuWLNnzw7YTB3NUQOQ3rXmO7X3lClTZMaMGVW269Chgwvr6jWojjVN3e3rpFOt6+vRW/SY//Vf/xWVab/r7MgUWCGawcd4jPaoXWIh+KjtiEX4MZLgo7appsBiqEHGUNdPgZcKp4gAAggggAACCCCAAAIIIIAAAggggAACCCStAMHHpO1aTgwBBBBAAAEE6hIg+Bj8KI6M+FjX1WTn9z//+c8rw4Hf+c53pEePHiE3zjf4qNNk63TZgRbf0SVDPYiGwH73u9+5zTT0+Oc//1mWLVsW9G7CCT5u2bJFnnnmmaCP4b+iBjU1sKm1Q6cvDma6ax25MDMzs8qu4jHi44ULF+Szn/1s5XH/8Ic/uGnM61p8g7Of/vSn5eqrr3ab+Ab6/vjHP7pRQmtb9uzZUzkip29f6TTY2g91LTrCojc9trdusCM++o5EWtdxvN/7Bh/VTkdqnDp1qpsu23/Ra1enqx43bpybDtt38XUKdGz9/ahRo4JtFuvVIhCt4OOWw1vlqTmXpmaPJbqV4KOeY7TDj/7BRz2GF0IMNHU9013H8kpj3wgggAACCCCAAAIIIIAAAggggAACCCCAQGoIpK1eva7844/3S9u2l0u7dpebnHJr/fpNcvx4kQwY0EeaNm0S8lRHse7K0tJzsmaNTiFXLoMHD6j2wWasjx/M/g8fPiLbt++Wli1zpWvXjiZHGdm2bacUFh6R7t07S15eS5PX4vLlq+X06TMyYsTQOj/sDaZfor1OcXGJbNq01X0A27t3d5PX4t69+0VrjtYbrTsWp/lbt26jnDhRLAMH9pUmTRobrDmln9ScNBk8uL/JftbX8o4duyUvL1e6dLFbcw4dOiI9enSR/PyW5vpZ68Py5avk9OmzpmvOxo1bpVEjyzVnn3z88QFp166NtG3bulrNiVfwUY/jeyzff9cR5xo1ahTtPwnV9tfxmf3VfhZKmDGUdUM5Gaa6DkUruHV/8YtfiI58qIuO3qcj1IW6+AYfBw4cWGXqX999+QYfNRDYr1+/oA+Vk5MjkyZNcuv7jwKo0wtrYFCnt9ZwjE57vWPHDnnnnXcqRzAMJ/i4detW0embddH7JS/UF2yjNeimIz5GusQj+Khh0scee6yyqf/93//tpg2va9ERPHUkT118Q3q+gb7f//73ov1X26IBxx/9qCJM5j+6Z11tqOn3wQQfdR2drtsLLGqNnTBhggsAN2nSRHQqa536Ws9x+fLllYfyDT56P9R96XTdOmpkYWFhtWbpvr/97W+78/OWQMFHvdZ8A5TPPvusu7ZZIhOIVvAxXtNc69laCj5qe54c8wPpUdA9so74ZGsNPpaeOFvlnlrr9/r1m0Xfpw4a1M/dM/qGIX0PHK0RH3WfoYz6WFpa8d4qLS3dtdE/qB4VnAh3Ulh4WHbs2OPes3Tu3MHk85wtW3aIPnfq2bObew8YSh9EyBP05suWrZIzZ0plxIghJp/nFBUVy6ZN26RJk4bSq1cPk/28Z8/Hsm/fQWnfvq20aXOZyec5a9duFLUcPFhrTiNz16JXc/RZ2MCBtmtOQUGedOrU3uS1uGXLdjl8+Kj06tXNPeu2WXNWytmz52T4cOs1p5H06tXdZD/v3v2x7N9/UDp0aCuXX26z5qxZs8Hd51itOWfPVtznZGTYrTmHDhXKzp17xXLN2bx5uxw5Yrfm6DNGfYZMzQn6tjDgirt375V9+w5Jx46JUHP6V3lvFdmZR29rfYaidVFnyNDP1iy+t/JqTqtWedKxo837HK/m6Ge8ubktzN3neDWntPS8XHnlYJPvrfTz3c2bt0nTpo2kZ0+r9zlacw66++3WrVuZfG/l3ecMGdLfPUe3ds995sxZWbt2g2RmZsiAAdZrTr507NjO5D23vlaOHDnmciXUnPD+Jp44USRau5s2beyei9U0U1R4e4/OVhX3ObZrzurV66Wk5KSEW3PSZs+eW75ixVoZOLC3Kwp1jdwRHdrQ9vL22zNdaGHChJvksssKzBV/DcJNnfqWlJeL3HnnOHfDaW3Zvn2XzJu3SDp1aicjR15R5weV9dH+uXMXiT64uuaaEdKlSyf3h8ra8tJLr4sWrwcf/JQLxFlb9Kb93XfnSIsWzeWmm64N6sPteJ/DypXrZOXKtTJoUB/p37+PyZrz1lszZd++A3L77TdLq1b55mrOqVOnZerUt91N5qRJt5msORpknj9/sbtpHzlymNGa85HoB3TXXjtSOnfuaLTmTHUh3Ice+pQ0bmy35uTmNpcbbxwjDRrUHoKJd73R42m9qag5/aR//97Vak60go+B9uMfdPT+23/dhg0bSfPmtU8PHA27UIKPv11QUu2QX7qqqQxpX3W0uBV7z0mgdUNp7wv3VQ//fPqlozJ7d2mV3YztmCOB1g3lWKmy7t/+9jeZO3euO90777zTjUoX6hJs8PGjjz6S//mf/3G779Onj3zjG98I9VAuFKzTIHtTSo8fP74yEOm/s/3798sPfvAD9+Nwgo/+U10HOwpiyCdVxwbxCD5qE3ynutaAnv8Iiv7N9O8LDRD27t3breYb6Atm2uw5c+bIP/7xD7dtbaOGhmIbTPDR95rUh3I//OEPpWXLlgEP85e//EUWLUdqzoAAACAASURBVFrkfhco+Oi70fHjx2Xjxo1u/fXr9UtvFUu7du3k//2//1f5QMM/+KgB3rvuusuNfumFJzWMrK8Vi18ACqU/6nvdaAUfn/7wWVlbuC4qp6OjKNa2PH7tt4M+jrartiUabY7mqI81BR9nzJglBw4clNtvv0Xy8/Mqr/tgR3xUg1Afroeyvj5Qe+ONd1wgYOLEW6uN4hp0h8VwRX3PsnDhEveFMg3Q1BU8j2FTatz1nDkLZdu2HXLddVebDUq9+OJr7gHqgw/eZfK91YEDh+T99z90wdHrr7/G5HurFStWy6pV62XIkAHSt29Pk89zpk17V9RSX88FBfkh149Yv368mqPPPO+44xajNWe7LFy4VLp27eQ+zLZZcxaIPneyXnNOnjwpDzxgs+bs339IZs780IXqx44dbbLm6AAE+uGX5Zrz5psVNUefz2pwL5R7kFjXG92/hjK1jRo+uuOOm03WHP2w/aOPln1Sc/Q+p+5ZCuJh53uMOXPmy7Ztu9xrxWJoQd/H/+//TpWTJ099UnNi/+XqUPtAQ8wzZ841XXP0SzIa8hk6dKD06dPD5H2Ovm85eLBQ7rzzNvfeymrNycrKdJ+t+c+QEep1E4v19YtGixYtk27dOssVV+h9jr2a88EHFTXnhhtGS4cO9oJSZWXl8tJLU90XiydP1vscezVHwz2zZs11f5vHjr3a5P2s1pzVqzfIFVcMlN69bdecu+4a5wassldziuXNN9+T7OwslyGyWXO2ykcfLXcD8AwbNshkzZk9e54bPM1uzSkTzeecPn1a7r//TpM1R3Ns6qjPIMaOvcpkzVm6dKWsWbNRrrhi0Cc1p+oMZbH4mxvqPt94Y4YcPHhYwq05aVu3bivXb0i2bNmiykPoUBsSy/X37t3nHk7qty98RwiI5TFD2bdOhbZr1143RZ8+FLL4LRZ9k6tvwjWsp98csJg01hv248dPuG9wagDE2h9QvSZ0BLszZ05Ljx7dTL7x0W836GiK+gC1fft2JoNcR48ec9/K1m9k68M1ix/4ejVHQ3sWv8Vy/vwF0WS+7ZpTLAcOFCZIzWktzZo1lfT0tFBKf1zWtV9zzrgvJmRlZbjgh8XAel01J1bBR//9+o746B+IbNEiVxo2rHsUtkgvumCDj5EeJxrbE3yMTPH999+Xl156ye0kmDCijkSn912+owEGG3zcvn27aIjPW3Tq6roeMui9s+/9sj4o+/KXvxzUPubPny8vvPCCWzec4OO5c+fk85//fOWxghkR8/z581G/7/MNPuq/X3bZZdU63XfERt9pwQNdHb5hOx2x0bc/Vq5c6f4zmBCsjoT4+OOPV27/3HPPSW5urvtv32Pcc889cvPNN9d6oWogVkOIuui6uk2kSzDBx1dffVV0JFJdagvRai3+0pe+VDkSo3/wUR+m1DQa79q1a+XXv/515enoyJZt2rSp5qShx8mTJ7vXl/9rRX9+/fXXR0qS0ttHK/j4wCsPR83xvr73yIQ+46O2v5p2FM1RKqM16mOg4KO2X79Qph/CdurUwd3vhDrio+4j1GcDoayvNV5HlCorK3PBQpvPcyreW+kMKPplYIvPc/R5k35JtE2bivdWofRBzF8wnxxAvxCsf0cqnufYe8CrX6rW5zk5OVnSrl1bk/2so13pSBD6oZcGNC0+z9FRKTVs1rlzJxfksnYtnjt3XrSN+oFxly4dTNYcHTFTn9EmQs3RWR10NHZr/axl51LN6Rr19xLRqJt6r6vPczTwoTNkWPzbos+P9ZmO5Zqj9xCnTtmvOfreR0eNtnifQ82J/BWtA7Ls2LFLSkvPSvfuNu9zdCAHfV+QCDVHP7PSz64s3ufYrznnZM+efe4LztZrjr5nadVK31ulR/4ijPIeKt5bnXAz5lm8z9H+1c+tqDmRdbze5+h7Kw1nakbHZs3Z6wKuOliVfhnK2j23PufX91b6dzAxak6+yXtu/XJCUVGRtG3bxuTMu5dqTql0767vrew9z0mM+5wjn9Sc/KStOWlHjx4t1w/VtKBaLKr6Z0MfQOs/+gbcWlH1/qypob7wLL551DZq29RQF4sPMnz7OT09w2QAyWvjxYtlJouqbz/rH3mLASSvjdScyG6IdetEqDn6WklLo+ZE0ttat7WvLY7GnCw1J5TgY23rBgo6+vZ9bcHHgoKCuNw/EHyM5NWYWNvqtNAa6POWf//3f5eRI0cGPAl9QPCzn/1M9uzZI7feeqvceOON0rx5cwk2+Kh1Skeu80ZrrCtc54UydYrr6667zo245Btm00bWNAqjTg3485//XHbu3OnOpa7gY4cOHeTJJ5+sdt6+I2LqNN4aeKvpHn7btm3OZ/To0XLLLbdI69ato3Ix+AYfv/rVr7oREf2XaAQfV61aJb/97W/drjU8qaM+qkugRcM/Oh22hvp06du3rxuJ01t8g4+6ryeeeKJGDx0R8Ze//GXltt///velS5cuEdv5Xyu/+c1vqo3aNW3aNHn99dfdsfQ60/BjoEWnudYpu73FCz7q9aVTry9dutQFcvUaCbT4Tin/la98RQYMGOBW83XS8Kfv+1ffUKau++Mf/7jKNNkRA6XYDiwGH7ULYh1+jGboUdsbrVEfveCj7tP/uY0GfPQDJd9nTlZGfNT2JsJ7q0R5nmP9uZ06Wn9vpdek/ed2tp8h67MIfSbGM+Twbgz0vWuiPM+xX3PKzT9DpuaE9zrxtqp4bkfNiUSRmhOJ3qVt9TqsuM+xFwbQVibC55MV16L9z8qpOZG9Zi7VnDSToUc9u0TIRHh//6g54V+P1Jzw7Xy3TITnORXvrag5kfS49rM+W6TmhK/o/W2xngmM5D4n7dixY+XhE7ElAggggAACCCCQuALxDj4GCkgSfKx+/TDiY+SvKd8pfHVvvsEsb+96PWowy5vqV3/+k5/8xIXZgg0+6jazZs2SF198sbLRt99+u+g//h82+05BrCtrQGzUqFFuOw00btq0yf37/fffLzfccEMVBA28aYDPW0d/GSj4uG/fPjftsLc888wzkp+f7/7TG2nS99z05xo61FEg/afx+/jjj6sEJzX8+Mgjj0TeOSLyxz/+0QXrdPEdFdB7yK520Qg+6jnr1ODeFMsaWPze975XOTqhdzJ6LA2ceqND6s810KojhnqL/xTOzZo1c9beiJDeehoc1ECft2jQUteLRvhAr1kN8nrLZz/7Wbnyyiur9O+WLVtE+10XneJaw69NmjSp0m++gVDvF17wUR2WLVtWub73mvDdQUlJieh03zpaqi56fh07dnT/XtPom/o7DRpre7z+UBsNkFoNuETlYo/hTqwGH/WUYxV+jHbo0dXAVv0klCm4a+rSmoKP+toPdpTHmupEqPUj1PVjeJmyawQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIEYChB8jCEuu0YAAQQQQAAB2wLxCD56x/Ad9dFT0Z8RfKx+jRB8jPx1o1P8aeDNG4lR99izZ08ZPHiwG9Fx69atLtzl+/tx48a56ZB1CSX4qKE5Hf1u8+bNlQ3v2rWrCxS2b99eNIyoowjqMb3FfwpiHWHv5Zdfrvz9Nddc4wJtGhTbsWOHLFmypDIs5q0UKPjoP5W1rqvnfPDgQff/d911l9v8jTfekDfffLPyeBqQGzZsmHTr1s1N7bBhw4YqIcBevXqJjswYrVGiZsyYIVOmTKly/M6dO4uOQuiNYhiN4KMeQEfzfOqpp6pcVIMGDXIjOur05hpU9L8WbrrpJrn33nurbOMffPR+OWLECHdt6bcG16xZI6tXr66yne800JFf2eKuNe0fb+nevbsLDup0fRoq1JErP/e5z1WxveOOO1yt1am89Vr0DTZ6K3rBR71WdIRKb9GwqF4bej3rcXQU0AULFlS+dvQ61KCnF16sLfio+/QPhtY2KmU0vJJ5H5aDj+oe7fBjLEKP3vXxr7v/HvGlQvAxYkJ2gAACCCCAAAIIIIAAAggggAACCCCAAAIIIBCiAMHHEMFYHQEEEEAAAQSSRyAawcdgprlWMf/go7cdwcfq1xPBx+i8xvbv3+9GSfRGl6ttrxo0fPDBByvDW6EEH3W/GqDUUQx9w481HW/MmDHuWL4jcmlg8fnnnw8YSPPdz0MPPST/+Mc/3I8CBR/15/4jUHrb33bbbZXBRw0VatBy5syZdWLrqHw6RbQG4KK16EiBOvKib/DU23e0g4+6Xx0p83e/+13lCIW1nYeOtnn33XdXm/7bC/Spu67jO8pnTfuraRrvSBz9R+z09uU7tbmer44iWtvSrl07F2bUEKouXvBR/13Dm9oPdS066qWeozfao65fV/BR15k6dapMnz69cve+I0bWdUx+f0kgWsHHpz98VtYWrosJbbTCj7EMPeqJE3yMSfezUwQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIEYCxB8jDEwu0cAAQQQQAABuwIEH/NlSPvsKh20Yu85+e2Ckmqd9qWrmga9big9/sJ9edVWJ/gYimDt6+ooeG+//XZluMt/bQ2xTZgwQUaOHFkliOg7zfPAgQPly1/+cp2N0mmVNUioozcGCvRpcFBHldTRBANN7athRA2EeUE03wPqthqW1NEFv/jFL7oAX03BRx15cP78+S7Y6E1FrPuaOHGiO1ffRUdY1JEf9XwDLToNtW6jAbdoLzr64P/+7/9WGyFRA4p6vr4jPuqIlLUF+YIJ2x05ckTeeustmTt3bsBT0REntX90ZMxAi+eugUEdQVJH4fz73/8eMEyp+9DRQ9u0aRNtNrc/DdhqAPbAgQOV+9eRHx9//PHK/967d6/86U9/qrKO90sNPD766KOyYsUK+etf/+p+7Bt81P/etWuXvP76626EyECLjgL58MMPS+PGjav8Opi+0KDvD3/4w8q26QipGoRlCU0gEYKPekaRhh9jHXrUNsYy+Kj7T09Pr+xc39C7/5TUTHUd2muAtRFAAAEEEEAAAQQQQAABBBBAAAEEEEAAgVQXIPiY6lcA548AAggggECKCoQSelSimtZPthEfNfg46cUj1a6KqZMDhyQDrRvKJbX7O9WDUQQfQxEMbt2zZ8+6KY+PHTvmpgTOzc0VDdNpiC1QCDG4vQZeSwOQOirf0aNHpbS01E2nrMfSUfEyMzPr3LUG/nS6YZ0iW9udn5/vpqDOzq4a0q1rR/ra1PPVfWgb8vKqh2y917YeS4OIp06dch6ejX+ora5jhvN7bd/x48ddjdHjaltDXYIJ23n71GnQNeipNto/ei1oiDSckKL29fbt211fa/ubNGni9qMj2cZjKSkpcSFb7TM9ZqBrWc/Xu/ZzcnLc9OutW7cOunmHDh1yo6ZqH+n127ZtW7e97oulfgWiFXzccnirPDXnRzE9mXDDj/EIPeqJWw8+ahtrCkXW1HGhrh/TC4CdI4AAAggggAACCCCAAAIIIIAAAggggAACCMREgOBjTFjZKQIIIIAAAghYFyD4KBJKmDGUdUPpe4KPoWixLgKBBUIJPmKIQLIIJFLwUc1DDT/GK/TYv1U/efzab0d8WWSlZUnpibNuP/6hw5pGfAy0LqM+RtwV7AABBBBAAAEEEEAAAQQQQAABBBBAAAEEEEgZAYKPKdPVnCgCCCCAAAII+AqEEnysbV1GfKw+OmQoVxrBx1C0WBeB6gI6cuO3vvWtyl88//zzMCGQEgLRCj4q1tMfPitrC9fF3C3Y8GO8Qo96wsG2qS4cgo91CfF7BBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgWgLEHyMtij7QwABBBBAAIGEEIhH8NH3GGVlZVVcvN/p9KzBTP8bKWrHZ/ZX20UooziGsm4obSX4GIoW6yJQIaD1Q6cE11HUZs6cKf/3f//nft65c2d54oknYEIgJQSiGXyMx3TXXqfUFTSMZ+hR21RXe4K9mDT4ePb4GTfaIyM+BqvGeggggAACCCCAAAIIIIAAAggggAACCCCAAAKRCBB8jESPbRFAAAEEEEAgYQUIPtY81fXSvaXV+vWzo5pW+9mKveck0LqhXBSB9vvpl47K7N1V2zC2Y468cF9eKLtmXQSSVqCkpES++93vSlZWlhQXF1ee5/jx42XSpElJe96cGAK+AtEMPup+4zXqox6rprBhvEOP2pZ/3f33qFxYBB+jwshOEEAAAQQQQAABBBBAAAEEEEAAAQQQQAABBEIQIPgYAharIoAAAggggEDyCEQj+BhoH74/S8QRHy30MMFHC71AGywLaPDxq1/9apUmNmvWTH76059Kw4YNLTedtiEQNYFoBx/jOeqjIviHH+sj9Bit0R71fAg+Ru3SZkcIIIAAAggggAACCCCAAAIIIIAAAggggAACQQoQfAwSitUQQAABBBBAILkECD4GHvHRQi8TfLTQC7TBssCZM2fkJz/5iZw4cUIKCgqkd+/eoqM9NmrUyHKzaRsCURWIdvBRGxfv8KEXPIz3cb2OiNZoj7q/2oKPvtNf+0+DXdd/e231X6+uiynU9evaH79HAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQsCdA8NFen9AiBBBAAAEEEIiDQCyCj/77ZMTH8DqS4GN4bmyFAAIIpJJALIKP6hfvEGL/Vv1kbeG6uHfdk2N+ID0KukftuAQfo0bJjhBAAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSCFCD4GCQUqyGAAAIIIIBAcgkQfGTEx+S6ojkbBBBAILUEYhV8VMV4hx/j3XPRnOLaazvBx3j3IsdDAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQIPjINYAAAggggAACKSlgIfiobWjVqpVkZmbGvA86PrO/2jGmTs6XIe2zY37sUA/AiI+hirE+AgggkHoCsQw+qmayhh+jPdKjd+UFG3zU9X2noWaq69R77XLGCCCAAAIIIIAAAggggAACCCCAAAIIIIBAtAQIPkZLkv0ggAACCCCAQEIJEHxMqO6SsR1z5IX78hKr0bQWAQQQQCBmArEOPnoNT5YApE6pfWefSVGd3tq3cwk+xuxSZ8cIIIAAAggggAACCCCAAAIIIIAAAggggAACNQgQfOTSQAABBBBAAIGUFCD4mFjdTvAxsfqL1iKAAAKxFohX8FHPQ8OPury0/uVYn1bU9x/rwKPXYIKPUe86dogAAggggAACCCCAAAIIIIAAAggggAACCCBQhwDBRy4RBBBAAAEEEEhJAYKPidXtBB8Tq79oLQIIIBBrgXgGH33PZcvhrbL58ObKH60/vCHWpxry/vsW9JGeBT1jNrpjoAYRfAy5m9gAAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIEIBgo8RArI5AggggAACCCSmAMHHxOq3741uKp8d1TSxGk1rEUAAAQRiJlBfwceYnVCC75jgY4J3IM1HAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQSUIDgYwJ2Gk1GAAEEEEAAgcgFUi34+KeFJfLTeSWRw9XDHhjtsR7QOSQCCCBgXIDgo60OIvhoqz9oDQIIIIAAAggggAACCCCAAAIIIIAAAgggkAoCBB9ToZc5RwQQQAABBBCoJpBqwUcFWLH3nCzdW5pQV8MV7XNkSPvshGozjUUAAQQQiL0AwcfYG4dyBIKPoWixLgIIIIAAAggggAACCCCAAAIIIIAAAggggEA0BAg+RkORfSCAAAIIIIBAwgmkYvAx4TqJBiOAAAIIIFCDAMFHW5eGF3zUVqWnp1dpXFpamug/3lLTv+vvfX/nu5Oafl6TQqjr29KkNQgggAACCCCAAAIIIIAAAggggAACCCCAAALBCBB8DEaJdRBAAAEEEEAg6QQIPiZdl3JCCCCAAAIpJEDw0VZnE3y01R+0BgEEEEAAAQQQQAABBBBAAAEEEEAAAQQQSAUBgo+p0MucIwIIIIAAAghUEyD4yEWBAAIIIIBA4goQfLTVdwQfbfUHrUEAAQQQQAABBBBAAAEEEEAAAQQQQAABBFJBgOBjKvQy54gAAggggAAC1QQIPnJRIIAAAgggkLgCBB9t9R3BR1v9QWsQQAABBP4/e/cRHMed5fv+wJGgg6H33oIgRSOJpOjkKEMZSmq10bR6+saLeBF383YKLWbVyxt9exaznYjZzMy9d263XEuiKJGiE70RvffeW5BwJMyLk1AKpWIBVZlZmXWy6osJhXqENP/8nKyDf2b9KgsBBBBAAAEEEEAAAQQQQAABBBAoBAGCj4VQZY4RAQQQQAABBJ4QIPjISYEAAggggEB8BQg+2qodwUdb9WA0CCCAAAIIIIAAAggggAACCCCAAAIIIIBAIQgQfCyEKnOMCCCAAAIIIPCEAMFHTgoEEEAAAQTiK0Dw0VbtCD7aqgejQQABBBBAAAEEEEAAAQQQQAABBBBAAAEECkGA4GMhVJljRAABBBBAAIEnBAg+clIggAACCCAQXwGCj7ZqR/DRVj0YDQIIIIAAAggggAACCCCAAAIIIIAAAgggUAgCBB8LococIwIIIIAAAgg8IUDwkZMCAQQQQACB+AoQfLRVOzf4WFRUJPpP4k/yf0v8faplUx1Z8nLpjt7r8um2x+8RQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE7AkQfLRXE0aEAAIIIIAAAhEIEHyMAJldIIAAAgggEJIAwceQYH1uluCjTzhWQwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEPAtQPDRNx0rIoAAAggggECcBQg+xrl6jB0BBBBAoNAFCD7aOgMIPtqqB6NBAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQKQYDgYyFUmWNEAAEEEEAAgScECD5yUiCAAAIIIBBfAYKPtmpH8NFWPRgNAggggAACCCCAAAIIIIAAAggggAACCCBQCAIEHwuhyhwjAggggAACCDwhYCX4OGjQICkrK6NCCCCAAAIIIOBBgOCjB6wIFiX4GAEyu0AAAQQQQAABBBBAAAEEEEAAAQQQQAABBBD4hQDBR04IBBBAAAEEEChIASvBx/79+0t5eXlB1oCDRgABBBBAwK8AwUe/cuGs10PKpPFeoxQVFTn/JP4k/7fE36daNtUIk5dLdxRel0+3PX6PAAIIIIAAAggggAACCCCAAAIIIIAAAgggYE+A4KO9mjAiBBBAAAEEEIhAwELwUQ+zX79+0rdv3wiOmF0ggAACCCCQPwIEH23Vsqy1VJoeNBF8tFUWRoMAAggggAACCCCAAAIIIIAAAggggAACCOS1AMHHvC4vB4cAAggggAACXQlYCT7q0x6rq6spFAIIIIAAAgh4ECD46AErgkWLmoqkpelxyuBjcXHxzyNI94THrp7U6PUJjl6Xj4CIXSCAAAIIIIAAAggggAACCCCAAAIIIIAAAghkWYDgY5ZB2RwCCCCAAAIIxEMgG8FHPdLE7SRvM/H/b2trewJGf69vzA8dOjQeaIwSAQQQQAABIwIEH40UQkRKi0ql+V6TSLtIYsjRHSHBRzu1YiQIIIAAAggggAACCCCAAAIIIIAAAggggEA+CRB8zKdqciwIIIAAAgggkLGAl+CjbrSr5bsLO2YSfNRl9ImPvXr1ynjsLIgAAggggEChCxB8tHMG9JAyabzX6AyI4KOdujASBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAg3wWKtmzZ3n7kyHGZNm2STJkySUpKSswd84YNW+X69Zvy8suLpX//aufJSJZ+GhubZM2ajRqJkNdee1F69OhhaXjOWM6fvyR79uyXESOGyZw5M6W0tNTcGHfv3idnz16QZ5+dLaNGjUj5hkmuB71y5fdSV/dA3ntvufTs2TPXw3li/7du3ZEtW3ZIRUVfWbRovpSVlZkb49GjJ0R7Tk3NZJk8eaLpnrNs2RKprq4y3HNEXnvtBdM9Z+TI4TJ79gyjPWevnD17UebNmyM6zlRv0ub6BbRy5Rqpq3sYg57TTxYtmme05xyXI0dOSE3NFJk8ecIveo6V4OPGjdukX7++8sYbr+T6lGP/CCCAAAIIxEaA4KONUhVJkUhTu7Q0tTgDSp5T//DDNrl5844sW7ZYKisrn/h9uq++do/S632gTJevr2+Q77//QUpKiuWVV543eW117twF2bv3oIwePUKeeqrW5LXVzp175Pz5i7JgwTMyfPhQk9dWX321WrTe7777usn7OTdu3JJt23Y5r5OFC58xeW11+PAxOXbspEyfPlUmTRpv8n7O+vWb5caN2/Lqq89LZWWFufs5bs8pLS2RZcuWmu45Y8aMlJkzp5vsOTt27JELFy7Kc889I8OG2e05DQ2N8s47rxntOTdl69Zdzn1PdbR4D/nQoaNy/PgpmT59mkyaNM5kz1m3bpPcvKk95wWTPefhw3pZu3aTWO45Z8+el717D8nYsXHoOc/KsGFDzM1z9P7m11+vkXj0nGp57rmnTfec2tppMnGi7Z6j70NXVPQzN89xe05ZWam8/PISk/Oczp4zSmbOrDE5z9m+/Ue5ePGSPPec7Z7T2NgoK1bYnudopmTBAps95+DBjnmOnocTJow1Oc9Zu/YH0dzB66+/KP362es5Dx48lHXrNjt/UzRDZDGfc+bMedm3T+c5tnvOhQuXnPd4hw4dbG6eo9+kp/mcpqZGefttmz3n+vUbsm3bbhkwoL/Mnz/X5DynEHpO0fffb2jfvXu/zJo1XWbNqjXZFL788ju5dOmK8wfU4oWF3rT629++coKPv/71W9KnTx8b70AkjOLkyTOiwYpx40bKwoXzpbzcXmhvw4YtcuzYKVm69DnnZobFcOZ//ucncu/effnHf/yNc2Fh7efatRvyzTdrpbq6Ql599UXp3dve08M0gLt79wGZPXu686aNxYlIZ895XYYNs/dHvqPnfOlc2L7//pvGe84oZ7JkMSjs9pznn3/OuZlBz/He0Tp6zvfSv3+Vc5PX4hMLO3rOfpkzctmnOQAAIABJREFUZ4bzpk1yz/ESfvTzxEdV7e6pjx03J1fL5ctX5aOP/j/p29feHML7mcEaCCCAAAIIhC9A8DF840z2oE97bLjb4FybuP8krrdy5Wq5dOmavPvuchk8eOATN/PDCD5mGnrUceqN8k8//doZ13vvvSF9+vTO5LAjXUbfDNm0abvzZoiGUyxeW2nw4/jx0/LSS4tl/PgxJq+t/v3f/+rUW+/n6IeOrP1cuXJNvv12nXOjXEO4vXqVWxui7N69V/bsOShPP/2UaCjA4v2cL774Ri5fvia/+tWbTs+x9gFH/TD1Z5+tdF4j2hct9hwNt27evMO5T6JvFFvsOfom7IkTZ5xQxbhxo+k5PrqF3oP47rv1MnBgf1m2zGbP2bVrj+zZc0ieeUZ7To306GHvA/6ff75Srly5/lPPGSTFxbYe2BGHnnP06EnnQQ70HB8v5J9W0XuL//EffxMNnf3hD782Oc+5dOmqrF6tPWeAE/y3OM/RD/JoCPeZZ2b9NM+x23P0PaFBgyz2nDr57LNvnMDHO++8bnKeow9m2bJlp0ycOF4WLJhrcp7z/fcb5eTJs85rZexYnefYemCVhpD0vfL6+nr58EOrPeeKfPfdBud6QOeLFnuOfpBHA3H6MCj9YJnFec5nn30tV6/ecLIvgwYNNBe2vn+/Tj7//BvHbsUKqz3nuGzevMt5KMv8+XNM9pw1azbIqVPnnPsQY8aMMtxzGuTDD983Oc+5ePGKrF69QYYMGSgvvUTP8Tur1fuzmjvw23OKrl271t7U1CQ9evQ0GYZTGH2iYnNzs/Tt29fci80NNDQ0NEhLS6vJT9noGB8/bhH99IXezLd4U03HqDVuamp2JiAWb57qGLXOzc2PpKqq0twfeB1fa2ur88k+/dHXi7GHozrjevTokcSl5+ibIRafgqs3M+z3nMdO79aLst697b15SM/xO+345Xr50HMsBB/177P+bdE3OQcPHpyd4rAVBBBAAAEE8lyA4GPuC1xSVCKtDS3S2tzqDCZV8FGv83Weox8c1ABSd0HH7gKLXsKMXpbVN230GrqtrdXkEww67ue411alJj/c+Mtrq14m37DRMeoHCPV+BPdz/PcOfS03Nzc5b9ZYDMN13EPuuLayej+HnuP//Etcs/MeMj3Hr2gc7ud0zCGa6Tl+iywi9JwAeAmr6ntWei7qQyYsPh01TvMcnafr+5Ne5uvZqWL6rdBz0hulW4Kek04os9/Hp+c8lqoqe09YV2XNa+h1AT0ns3Ouq6U6r636Od+SYe2HnpOditBzgju2tGgOq4meE5BSe05TU8c9ZD89p+jOnTvt+qa/xYlmok08xqg3+gNWtMBXb2+3bxiHc9H6aRQHw3iM0f7rxf65aN8wDuei/Tp3Pc+xEHxUP71I0x/9ajsNrvODAAIIIIAAAt0LEHzM/RlS2loqTXUdbyboT6rgY+Lv3GXckWf6tMfk9dIduZ/7W3G4F5HuuHP9+zgYcm0V/CyJg2E8xmj/XkTwsyXcLdBzwvW1svV4vJ55b83K+RLmOOg5Yera2TY9Jzu1iINjdo40vK3Qc7Jja90xDq8Vxpidc9H6Vqy/VtQvDuei/Trn93WLE3y0XgTGhwACCCCAAAIIhCFgJfio43DHMmDAACkvt/fVdmH4s00EEEAAAQT8ChB89CuXnfX0K67r79ZLkf5fN8HH5K+ZTQwlWgo+ZkeFrSCAAAIIIIAAAggggAACCCCAAAIIIIAAAghEKUDwMUpt9oUAAggggAACpgSyEXzUA0rcTvI2E/9/98mOiQhu6DFxuUGDBkmPHj1MWTEYBBBAAAEELAkQfMxdNXoUdYQepe2XT3lMDjnqCBP/W7qgYy6+6jp3iuwZAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIKgAwceggqyPAAIIIIAAArEVCCP4qBhdBSG7Cz666+m6GhLo378/T36M7ZnFwBFAAAEEwhYg+Bi2cOrtl0mZNNzrCD3qjxtsTPU114m/1//tN/jo9aurvS6fG0n2igACCCCAAAIIIIAAAggggAACCCCAAAIIIBBUgOBjUEHWRwABBBBAAIHYCkQRfFQcL099TPza66qqKunXr19sfRk4AggggAACYQkQfAxLNvV2NUxY0loiDfcadGLjhBgTw46pnvaYHIYk+BhtzdgbAggggAACCCCAAAIIIIAAAggggAACCCCQ7wIEH/O9whwfAggggAACCHQp4CX4qBvpavnuvt46eb1MnvroLqPbLS8vFw1A8tXXnMgIIIAAAgh0ChB8jO5s0K+2bm54JM0Pm37xhMfEsCPBx+jqwZ4QQAABBBBAAAEEEEAAAQQQQAABBBBAAAEEOgQIPnImIIAAAggggEDBCuQi+Jj4REcXPvG/uf/bHZsbguzdu7f07dtXevXqVbD14sARQAABBBBwBQg+hnsu6NMZS9tL5XHTI2l60NRxA6mbpzwmBx/dpzsmPuWxq//tHklXX1Ht9aurvS4friRbRwABBBBAAAEEEEAAAQQQQAABBBBAAAEEEAhLgOBjWLJsFwEEEEAAAQTMC2Qr+KgHmrit7p4AmSr4qOsnPgky8YmPyaFIDRZoCFKfAFlWVub8wxv85k81BogAAgggkGWBukcPpEVasrzVwtxckXPYRVIsRdLe2i6tj1uluaHZ+XdigDHxq6u7+t+uYLa+5toZWVHHCDP58bJsJttjGQQQQAABBBBAAAEEEEAAAQQQQAABBBBAAAG7AgQf7daGkSGAAAIIIIBABAJewo/dLZsu7Jh4KJl83XVi4DFx+cQnQrr7TH5KZLoQZqrj8HJsmZTFi2sm22MZBBBAAAEE8kEgSDCvu3WTf5ccPFS77pZxn9iY/FTHroKPfM11PpyNHAMCCCCAAAIIIIAAAggggAACCCCAAAIIIBBvAYKP8a4fo0cAAQQQQACBgAJeAnpewoHdBSFTBR/1MDJ96qMu293XY3f1VEmXqqtgpJfjC8jO6ggggAACCCDgQSCT0GO6r5J2d5f8tEb976mCjH6f9qjby9YTH70GRb0u76EELIoAAggggAACCCCAAAIIIIAAAggggAACCCBgTIDgo7GCMBwEEEAAAQQQiFYgF8HHroKJqcKMqpH83xP/W+L4Uz0Z0tXM9DjTLZfu99FWj70hgAACCCCQvwKZhPgyWUaFEkOMyf9/dwFHVzfxiZDu+snbdJdN97TJVGPu6jgyPb7EfefvGcGRIYAAAggggAACCCCAAAIIIIAAAggggAACCCQKEHzkfEAAAQQQQACBghbwGuTravnunvCowKl+39U6qYKOyaFGt2juf08VpuxqHXddr8eeeKIEWbegTzgOHgEEEEAAgW4EvAb9Um0q1Vdau8ulehJjcvAxcVn9315Dj7pOd0+f7G58ycfjxcPLspyECCCAAAIIIIAAAggggAACCCCAAAIIIIAAAvEXIPgY/xpyBAgggAACCCAQUMBLiC/T4KMOqbswZKZPfUzcTqpApPt7d19d/dslSjV+L8cUkJrVEUAAAQQQQCCAQCZfeZ24+VQhw+RQYqonNyZ/HXYm6+h+u9pWujGlIvEaZPS6fIAysCoCCCCAAAIIIIAAAggggAACCCCAAAIIIICAAQGCjwaKwBAQQAABBBBAILcC2Qg+6hF4eepjckDRFUj874nb6+prsLvab1fbT7V8V/8tsSpejHJbTfaOAAIIIIBA/gl4DTyqQFdPVkwXTgwz9NjduJKr5jXI6HX5/DtLOCIEEEAAAQQQQAABBBBAAAEEEEAAAQQQQKCwBAg+Fla9OVoEEEAAAQQQSCHgJdTX3bJego86jHThR10m+euq0z3R0d1ucmgy+bD9PuXRixUnGwIIIIAAAggEE/ATeNQ9pvs66eSvvE5cJzkYme6rsJP35+VrrlON1RXzGmT0unywyrA2AggggAACCCCAAAIIIIAAAggggAACCCCAQK4FCD7mugLsHwEEEEAAAQRyLuA1zOclNJju667dg+9uueTwo66T6quyuws7dvUV116CnN0VyqthzovOABBAAAEEEDAk4CW019WyqZ7k6B5ipmHE5ECk19Cj7i/TfSUvm1wOLybptmWo1AwFAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIEsCBB+zBMlmEEAAAQQQQCC+Al5De17CgpkGH1Wvq+Bi8tdcJ0qnCkUmV6K7r71O3m+qKnr1ie+ZwMgRyD+Bf/mXf5F/+7d/k48//lg+/PDD/DtAjgiBAhJIFwRM9fvuwpBKlxxSTBVa7O7pkKm24Zaku/F0t0zyNjMtcTqfTLfDcggggAACCCCAAAIIIIAAAggggAACCCCAAALxECD4GI86MUoEEEAAAQQQCFHAa7AvSPBRD6O7gGPiYaZaLt2THhODkF2ReXliZaptePUKsXRsGgEE0gg888wzcufOHZk7d6789a9/xQsBBGIkkEmQz+9XYStDcXHxLzQyCUHqCpk+0THd120nbytxMJkce3Ip/awTo9OBoSKAAAIIIIAAAggggAACCCCAAAIIIIAAAggkCRB85JRAAAEEEEAAgYIX8BPk8xIe7O6pj4rvNwjp5autvYw3eUxeThA/ll62z7IIIOBN4N///T9k5cqv5fe//4O8/fab3lZmaQQQCFXAb1Av3Vddpxp0uhBi8pMhuwo3dredTPfhji9IaDOTYwy1eGwcAQQQQAABBBBAAAEEEEAAAQQQQAABBBBAIOcCBB9zXgIGgAACCCCAAAIWBLwG9oI89TEbX3+tZslfYZ0uYJm4jhdzrzZets2yCCAQrsDWrbudHVRWVsj06ZPD3RlbRwCB0ASyGZJMFVDMdtAxXQhSofwEOLsC9usTWsHYMAIIIIAAAggggAACCCCAAAIIIIAAAggggEDoAgQfQydmBwgggAACCCAQBwGv4T4vwUc9/nShxEyf3uhlO7rfVF997fVYu6tfNrcVh/OEMSIQN4Ft2350hqzBx5qaSXEbPuNFoKAEshneS7Ut/W/pAonZ+hrrdPvRwhJ8LKjTm4NFAAEEEEAAAQQQQAABBBBAAAEEEEAAAQSyLkDwMeukbBABBBBAAAEE4irgNcTnJfzoNbCYaRBSrb1u261PuuNN9/u41plxI1BIAtu2dT7xsaaGJz4WUu051vwUSBeO9BImzNbXVqcLOXYVwuyqQumOMXk9r8vn55nBUSGAAAIIIIAAAggggAACCCCAAAIIIIAAAoUnQPCx8GrOESOAAAIIIIBAFwJeg35ego+6S68BRb9fiZ1qX139N5fCy7F7WZaTDQEEcivQGXys5ImPuS0Fe0fAs4CXQF+2Ao86yO6e+pj8ez/Lp9qGi+PlmIOs47kYrIAAAggggAACCCCAAAIIIIAAAggggAACCCBgToDgo7mSMCAEEEAAAQQQyJWAn0Cfl/BjqmW9hBuTXZK/xjqT7bvbSHes6X7vpUbZ3JaX/bIsAgiI8FXXnAUI2BHwE+rravTptpVpEDJ5ueLi4m7BvD7dMeynPepg01nYOQMYCQIIIIAAAggggAACCCCAAAIIIIAAAggggEA2BQg+ZlOTbSGAAAIIIIBA7AW8hvS8BB8VJ+hTH5OBU30ldjYDkO7+vLrE/kTgABDIE4Ht2390jqSyskKmTZuUJ0fFYSBQWAKZBvu6W66rAGK6pzsmSnsNPeq6YQcfM7UprDOGo0UAAQQQQAABBBBAAAEEEEAAAQQQQAABBApDgOBjYdSZo0QAAQQQQACBDAX8BPy6WifTAKLXMGSqQ0m3DV0n3bGl+306wqDrp9s+v0cAAe8CnV91XSE1NZO9b4A1EEAgNIFshfa8Bh71gNKFGJMPOt3ymQYc/Yy1uwJkyzC0IrNhBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgdAECD6GRsuGEUAAAQQQQCCOAn7Ce0Gf+qhO6YKLmYwr06Blqv2lqlUm+4xjjRkzAoUksHXrLudw9YmP06dPKaRD51gRyFuBTMJ+mX7VtSKl21660GOqbWQahEwsUrpxpCqon3Xy9sTgwBBAAAEEEEAAAQQQQAABBBBAAAEEEEAAgQITIPhYYAXncBFAAAEEEECgewG/YT/L4Uc9Yq/jS3ee+HVKt11+jwAC2RVwg49VVfrER4KP2dVlawiEJ+An0Of1aYqZ7MNP6FFVvAYfMxlLsrafdcKrGFtGAAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQiFqA4GPU4uwPAQQQQAABBMwL+An1eQ0WZvr12OmeBNkVZqbbT7W+n+M3X1QGiECBCmzdutM58srKSp74WKDnAIedvwLpgn9envqYSimq0KPuO92xZDK+/K00R4YAAggggAACCCCAAAIIIIAAAggggAACCCCQ8j7xnTt32qFBAAEEEEAAAQQQ6BTwG/zLRvgx06+r9jLGICHIxPPCyz45nxBAwIYAX3Vtow6MAoEgAl5DgUEDjzrWbIYeU20v0cPr8bnr+l0vSC1YFwEEEEAAAQQQQAABBBBAAAEEEEAAAQQQQMCOAE98tFMLRoIAAggggAACRgT8Bvy8Bh/1cIMEHb2OM93y6X4fpDxhbjvIuFgXgXwX6Pyq60qpqZmc74fL8SFgViDMkF66baf7fTKa39CjbsfrV1x3tU4mhfR6XJlsk2UQQAABBBBAAAEEEEAAAQQQQAABBBBAAAEE4iNA8DE+tWKkCCCAAAIIIBCRQJCQXrbCj3qoQUKR6agyPcZMl0u3P36PAAK5EeCJj7lxZ68IhCHgJejnZVl3rJmGFr08UTLdONL9vjvHIOuGUR+2iQACCCCAAAIIIIAAAggggAACCCCAAAIIIBCtAMHHaL3ZGwIIIIAAAgjERMBv4C/del6/djrM8GNiKdKNO9OyZWs7me6P5RBAoHuBzic+VkhNzRS4EEDAmEC2wntBtpONIGOmoclkfr/j9ruesfIzHAQQQAABBBBAAAEEEEAAAQQQQAABBBBAAIEAAgQfA+CxKgIIIIAAAgjkr0CQAF+6dcMKP2o10u3bS8WyuS0v+2VZBBDInsCWLTudjVVVVcr06QQfsyfLlhDInUA2Q39eAoteApKqk26c6X7flbDf9XJXMfaMAAIIIIAAAggggAACCCCAAAIIIIAAAgggEIYAwccwVNkmAggggAACCOSFQJDgX7p1sxF+VGSv28lWYdIdX7b2w3YQQCCYAF91HcyPtRGIUiCqQF82A4xet+V6+j1Wv+tFWUf2hQACCCCAAAIIIIAAAggggAACCCCAAAIIIBCNAMHHaJzZCwIIIIAAAgjEUCBIuC+Tdf2EFrO5TgxLwpARQMCjQOcTHytk+vSpHtdmcQQQyCeB7kKDXgOMfrblWgYJLwZZN59qybEggAACCCCAAAIIIIAAAggggAACCCCAAAIIiBB85CxAAAEEEEAAAQS6EcgkwNjV6pmsm+0gY7p9pvs9JwMCCOSXAF91nV/15GgQ8CKQSUjQa+BR90/o0UsVWBYBBBBAAAEEEEAAAQQQQAABBBBAAAEEEEAgLIGiS5cutzc0NErv3uXSq1evbm9ghzWIdNu9f79OHj16LFVVFVJWVpZu8ch/39bWJvfu3Zf2dpHq6kopLi6OfAzpdtjc3CwPH9ZLjx49pG/fPibr/ODBQ2lufiT9+vVxxpnJmzTpjjvbv79z5560trbKgAHVJuv8+PFjqat7KKWlJVJR0c+kofabjp7TS3r1Kjc5xrj0HD2/q6roOX5f5x09p1n69etLz/GJWCg9J2hQMN362nPUsrKyQkpLS5+oRnfrp9t20N/rYNra2kXHWFTU0XMs/uhrub6+4ed5jsUx6jxMx9m3b1/p2bOHxSHK3bv3f57nWJyHPX7cItq7S0qKndeLxZ/GRp3nNP08z7Ewxq1bdzrDqKyslOnTpzivZ7XU17POGa39aM/Rays9B/XayuIPPSc7VaHnBHdsamr6ueeUl/fM+NoqrOBiqu12znMqpUcP//dzwvq7pNf39+7pPKfIuedk8X5OU5POc+JwP6fj2qpnz57BT+4QtmD9fo7e99R5juX7OTrfbmzsnOeE9boMUn73fo7OIVJdWwXZdjbWjVPP0ddynz69M/7bkg2fTLfB/ZxMpbpeLk49p0+fXlJebvseMj3H/znpznMs9xx9v+XRI+4h+6+yyKNHj+TBg/pYzHMs9xy9btF7yNZ7TnFxkXPfzvK1FT0nyCta5M6du9La2mb2vXK355SVlUi/fjbfK3evrXS+7eV+TrDKeVs7Tj1H73NbvD7VeY6+L6Q1tnptVVf3wPk7bfW9cs1f3b17VzSP1b+/zXwOPcdbb+lq6aA9p2jjxi3t+/cflhkzpjlvhFm8KbRmzQa5fPm6LF/+ogwaNNBc49I3OL/+erUTfFyx4jWTN3nPnj0v27b9KKNHj5B58+aYDJBu27ZbTp48I4sWzZMxY0ZKSYm9N2I/++xruXfvgXzwwTtOUNjaz40bt2T9+s3ORcULLyw0eS4eOHBUDhyw3XNWr94gV65ck+XLX5ZBgwaY6zkaHNWeoyGkt9+23XP0tfzss7NN9pytW3fJqVNnZfHieTJ6tO2e8w//8K5zk9faj/acdes2O28Sv/DCIpNhrgMHjoj+M3NmjdTUTPY9z0kXIExXm+7W13nOlSvX5Y03XnYm7qku0NLtP+jvE8efvC19c/Obb9aK3rTSMeqHE6z9nD17QXbu3OvMH55++infdQ7zuHQedubMOVm06FkZNWqEyZt/X3yxSu7ffyC/+90Kk3OImzdvy4YNW52es3Tpc4ECNGHV+tCho3Lw4DGZOXOqTJ062cR8Vv/e6Y/OD/V6b82ajXLt2o2fe05YFn63q/OcVavWOT1n+fKXTJ6LZ86cl1279hnvObtFx9nRc0Y6ntZ+Pv98lejNNas9R+c5Gzduc3rO888/Z24+q/OFgweP/NRzamTatElpe06QwKOeP37W12urq1evy5tvvuK8KeL3RrTf9dKd93oD+ptvvndC9TpGi6G906fPyY4de2TcuNHOPMfiB4I3b97pzHOWLp3/U8+x94HgTz75ygkWfvCBzWur69dvyPr1W6V//yqn51icc+/bd1h0rjNr1nSZOnWSyTn3t9+uc+Y5b731SpfXVun6Qpi/d3uOBlzfeGOZyZ6j90n02mr8+NEyd67VnrPDmecsXbrA7LWV9hyt9+9+947J+zn6OtFrqwEDqn66trJ3nb9v3yE5dOiYzJ5dK1OmTDTZc1atWivXr9+Ut99+Vaqrq3zPc8LqO/p3T8cYj54zRubOnWlynrNp03bR+05We47ex/v0U+05DWZ7ztWrN2TjRts9Z+/eg3L48HF6ToCGpCHhb79dK2Vlpc57axavrfQ9aL2fM2HCGJkzx3bP0WuCkSOHm7uHrB9Y/uyzr6S+vlF++9sVJuc5eg9Ce87Agf1lyZIFJq+t3J4zZ06tTJ5sc56j90r03phmXywGC/Weol7/We45J06ckd27teeMlTlzZpic5/zwwzY5d+6iPP/8Qhk5cpjBntMmms/Rh0385jdvm+w5+v7uDz9slUGD+svixTZ7zp49B+TIkRPOedjRc+zlsFau7Og577zjr+cUbdu2s/3YsZMyadI4mTRpgskX3ObNO+TGjZuyaNF854+UtU+JaFp73bpNohcYL7642HmKnbWfixeviP4RHTZssMycOd1kOEVfcOfPX3RecCNHjkj7pk0ujL/7br08ePBAXn/9ZSeZb+3n9u27ogHSvn17y4IFT5u8sDh27JQcP649Z7zzj803bTp6zuLF82XAgAHm3ijWEJIGXDVs/eKLi4z2nMuyd+8hGT68o+dYfNOms+fMdC4gLYat3Z6jNwp697bYc+44oXp9kvCCBXON9pyTcvz4KZk8ebxMnBis56QLF3b3N6G7dfUG6s2bt5yLcL1R3t08J90Y0v1ex5jJMonHok+T0jdE9EdvuGgI1+s2wv57eenSVSdUP3ToEJk5c5rJN0T0TZsLFy7J7NkzZMQIexeQWiMNxOmbc6+//qLJC0h9UtPOnR0959ln55j826I3M06cOOXMcSZOHGfib0tn8FGf+DhZ9KuvNUS6ZMl8qa7WEFLYr1Bv29drK7056fYcizfKL1264oTqhw0b4nyA0OKHBzt7zkwZMWKouWvoX/acl5xPPlv70ScY7Ny556eeMzfSsHWmIT+d45w4cVomT54gEyaMe+KmVabbSbdckN+785ylSxf6Dj6m23+Qc0fD1hs2bHGCCnqT1+L9nPPnL4l+YFlfy9pzLF5b/fjjfmeeoyEpnedYvLbSN0QePnwoy5cvc57MbO1H/zbv2PGj8+0d+oFli3//jh498VPPmejMc/QNJms/Gli/deuW83ru6kNluRyzPtlF5zl6zedeW+VyPKn2rfdmtefom161tVZ7zj65cOGyzJ07y+mNVnuOPq1X7yHb7jkVMm/ebKM957jo9ZWGHnWeY7PnbJVbt27L888vcoLrYc5Z/PQK7Tl6P0c/4GG/5wyX2tqpJuc5u3fvFX1/7emnZ8nw4fZ6jt6j03lOQ0OD2Z6jb2LrtZV+IFMfkmBxnnPkyHHnwSxTpkySiRPHmrzO19fz7du3nQcQWAxb6z1FnYvZ7jkXZP/+I877QfZ7zmwZPnyIuXlOR89ZL42NDfLaay89Mc/pafABHn7+hrIOAggggEC0At99u1Zu377j5O38hK2Lbt682a5fdaYTEYsBJOXUR3e3tLQ4k2FroUc3QKBj1Mc6W7xJrmPUx7/qGEWKTIYedYxaYz0X9euvLN6w0jHqo2pbWlpN3rD6ZZ3F5MWjjlG/1oeeE+wPhV5YxKXn6A0/i2/MxavntJgMPRZqzwka+Eu1fuc8p9wJWqfbR7rfux0mW8vpdvRvtPZvi08ededi+jda54lW57Mdf/8eOz3R4nw2cc5t8anWiXNuPSct3iSoThOkAAAgAElEQVR3+6J+bZx+Ys5KGE6DjvqjX3VdWzvl5zm31a9RoecEmye6a9Nzgju6c279sFHPnvaegpQ4FystLfP8Sd1M3phPt0y633f+bemYQ/h98mgm+/Fbca6t/Mr9cj3u5wR37LxvF4f7OSUmA0huz9F7Tnp/Nsze4bficeg5OofQ1zT3c/xWuWM97iEH8+u8h/xYSkpK6Tk+ObXn6PVpe3ub2fs59ByfxU1ajZ4T3NG9hqbn+Lfs6DmPnHvbVu8h03P81zdxze56DsHH7BizFQQQQKDQBB4+eOBkiPzezym6c+dOe6GhcbwIIIAAAggggIBfgUwDhV1tP9P10y2X7veJ+/eybDbW82vLegggkH0B94mPFRUVTvCRHwQQiLdAkCBPputmslwmy6h0pst1VZWg68e72oweAQQQQAABBBBAAAEEEEAAgfgIEHyMT60YKQIIIGBJoLmpKdBwCD4G4mNlBBBAAAEEEChEAb9BQtfKy/rplk33++T6eF0+aH2j3l/Q8bI+AvkmkPxV1/l2fBwPAnETiDLI53VfmSyfyTJuTbwsm6qOQdeP27nBeBFAAAEEEEAAAQQQQAABBBCIswDBxzhXj7EjgAACuRMg+Jg7e/aMAAIIIIAAAgUqkI0wn5dtZLpspsslls3POgVadg4bgVgKJH/VdSwPgkEjgEBagSAhwUzXzXQ5HayXZVMdXND104KxAAIIIIAAAggggAACCCCAAAIIZFWA4GNWOdkYAgggUDACBB8LptQcKAIIIIAAAghYEshGYNDrNjJdPtPl0nlmazvp9sPvEUAgPAGCj+HZsmUEciGQrUCgl+14WVZNvC6f7Bh0/VzUhX0igAACCCCAAAIIIIAAAgggUOgCBB8L/Qzg+BFAAAF/AgQf/bmxFgIIIIAAAgggEEggm6FAr9vysryXZQOBZLiytfFkOGwWQyC2Alu27HLGXlVVIdOnT4ntcTBwBOIqYCnE53UsYS+fqqZe9xnX84JxI4AAAggggAACCCCAAAIIIJBvAgQf862iHA8CCCAQjQDBx2ic2QsCCCCAAAIIIPCEQDZDfH625WcdPQi/63EKIIBA/AQ2b97pDLqqqlJqawk+xq+CjBgB/wJ+Q4R+1vOzTvKRZWMb/rVYEwEEEEAAAQQQQAABBBBAAAEEgggQfAyix7oIIIBA4QoQfCzc2nPkCCCAAAIIIGBAINshQr/b87teImE2tmGgJAwBAQQSBAg+cjogkP8C2QgM+t2G3/WSq5Kt7eR/tTlCBBBAAAEEEEAAAQQQQAABBGwKEHy0WRdGhQACCFgXIPhovUKMDwEEEEAAAQTyXiDbgcEg2wuyrpdCRbUfL2NiWQQQeFKA4CNnBQLxFIgiCBh0H0HXdyuTre3Es9KMGgEEEEAAAQQQQAABBBBAAIH8ECD4mB915CgQQACBqAUIPkYtzv4QQAABBBBAAIEUAmEEAYNuM+j6FBoBBOIvsHnzDucgKisrZcaMqfE/II4AAQQCCWQjZJiNbSQeRLa3FwiIlRFAAAEEEEAAAQQQQAABBBBAwJcAwUdfbKyEAAIIFLwAwceCPwUAQAABBBBAAAErAmEEDbO9zWxvz4o940AAgdQCPPGRMwOBwhbIZqgwm9tyqxLGNgu74hw9AggggAACCCCAAAIIIIAAArkRIPiYG3f2igACCMRdgOBj3CvI+BFAAAEEEEAgrwTCChaGtV0XP+zt51WRORgEYiRA8DFGxWKoCAQQCDNAGMa2w9hmAD5WRQABBBBAAAEEEEAAAQQQQACBgAIEHwMCsjoCCCBQoAIEHwu08Bw2AggggAACCNgVCDtEGPb2M5W1Mo5Mx8tyCBSiAMHHQqw6xxx3AQuhwDDHEOa24157xo8AAggggAACCCCAAAIIIIBAXAUIPsa1cowbAQQQyK0Awcfc+rN3BBBAAAEEEEAgpUAUocAo9kF5EUAg3gKbNu1wDqCqqlJmzJga74Nh9AggEKpAFIHEKPYRKhIbRwABBBBAAAEEEEAAAQQQQACBlAIEHzkxEEAAAQT8CBB89KPGOggggAACCCCAQAQCUQYTo9xXBHTsAgEEsiRA8DFLkGwGgTwWiCqMGNV+8rhUHBoCCCCAAAIIIIAAAggggAACZgUIPpotDQNDAAEETAsQfDRdHgaHAAIIIIAAAoUukKtAYq72W+j15vgRsCaweXPnEx9ra3nio7X6MB4EciUQdQgx6v3lypX9IoAAAggggAACCCCAAAIIIFCoAgQfC7XyHDcCCCAQTIDgYzA/1kYAAQQQQAABBCIRsBJEtDKOSNDZCQIIyObNOx0F/arr2topiCCAQIEJWAgcWhhDgZWdw0UAAQQQQAABBBBAAAEEEEAgcgGvwceysjIpKy2NfJzsEAEEEEAgPIHHLS3y+PFjTzsg+OiJi4URQAABBBBAAIHcC1gOH1oeW+4rxwgQiJ8Awcf41YwRI+BFwHKo0PLYvBizLAIIIIAAAggggAACCCCAAAIIpBcg+JjeiCUQQACBfBcg+JjvFeb4EEAAAQQQQACBBIF8CBnmwzFwUiKQzwIEH/O5uhxbPgrkQ1gwH44hH88tjgkBBBBAAAEEEEAAAQQQQACBMAUIPoapy7YRQACBeAgQfIxHnRglAggggAACCCCQVQHCg1nlZGMIIJAgsGnTDuf/06+6njFjKjYIIIBAaAIEHkOjZcMIIIAAAggggAACCCCAAAIImBcg+Gi+RAwQAQQQCF2A4GPoxOwAAQQQQAABBBCwKUD40WZdGBUCcRcg+Bj3CjJ+BOIhQOgxHnVilAgggAACCCCAAAIIIIAAAgiEJUDwMSxZtosAAgjER4DgY3xqxUgRQAABBBBAAIFQBAhAhsLKRhEoWAGCjwVbeg4cgUgECDxGwsxOEEAAAQQQQAABBBBAAAEEEDAvQPDRfIkYIAIIIBC6AMHH0InZAQIIIIAAAgggEB8BQpDxqRUjRcCqAMFHq5VhXAjEV4CwY3xrx8gRQAABBBBAAAEEEEAAAQQQCEuA4GNYsmwXAQQQiI8Awcf41IqRIoAAAggggAACkQkQgIyMmh0hkHcCBB/zrqQcEAI5EyDwmDN6dowAAggggAACCCCAAAIIIICAeQGCj+ZLxAARQACB0AUIPoZOzA4QQAABBBBAAIH4ChCAjG/tGDkCuRIg+JgrefaLQP4IEHjMn1pyJAgggAACCCCAAAIIIIAAAgiEJUDwMSxZtosAAgjER4DgY3xqxUgRQAABBBBAAAETAoQhTZSBQSBgVoDgo9nSMDAETAoQcjRZFgaFAAIIIIAAAggggAACCCCAgHkBgo/mS8QAEUAAgdAFCD6GTswOEEAAAQQQQACB/BUgBJm/teXIEPAr4AYfq6srpbZ2qt/NsB4CCOSxAGHHPC4uh4YAAggggAACCCCAAAIIIIBARAIEHyOCZjcIIICAYQGCj4aLw9AQQAABBBBAAIE4CxCKjHP1GDsC/gU2b97prFxVVUHw0T8jayIQewHCjbEvIQeAAAIIIIAAAggggAACCCCAgGkBgo+my8PgEEAAgUgECD5GwsxOEEAAAQQQQAABBAhCcg4gUBgCBB8Lo84cJQLJAgQdOScQQAABBBBAAAEEEEAAAQQQQCBKAYKPUWpntq8//elPmS3IUikFovKLaj+UuUMA73DPBIKP4fqydQQQQAABBBBAAIEsChCezCImm0IgJAGCjyHBslkEQhQgtBgiLptGAAEEEEAAAQQQQAABBBBAAIFQBAg+hsIaaKMa8CLk5Z8wKr+o9uNfIn/WxDr8WhJ8DN+YPSCAAAIIIIAAAgiEJEAQMiRYNotAAAGCjwHwWBWBiAQIOkYEzW4QQAABBBBAAAEEEEAAAQQQQCA0AYKPodH63jAhL990zopR+EWxj2AK+bU23uHXk+Bj+MbsAQEEEEAAAQQQQAABBBAoGIFNm3Y4x1pVVSEzZkwrmOPmQBFAAAEEEEAAAQQQQAABBBBAAAEEEEAgOgGCj9FZZ7onQl6ZSqVeLgq/KPYRTCG/1sY7/HoSfAzfmD0ggAACCCCAAAIIIIAAAgUjQPCxYErNgSKAAAIIIIAAAggggAACCCCAAAIIIJAzAYKPOaPvcseEvILVJAq/KPYRTCG/1sY7/HoSfAzfmD0ggAACCCCAAAIIIIAAAgUjQPCxYErNgSKAAAIIIIAAAggggAACCCCAAAIIIJAzAYKPOaPvcseEvILVJAq/KPYRTCG/1sY7/HoSfAzfmD0ggAACCCCAAAIIIIAAAgUjQPCxYErNgSKAAAIIIIAAAggggAACCCCAAAIIIJAzAYKPOaPvcseEvILVJAq/KPYRTCG/1sY7/HoSfAzfmD0ggAACCCCAAAIIIIAAAgUjQPCxYErNgSKAAAIIIIAAAggggAACCCCAAAIIIJAzAYKPOaPvcseEvILVJAq/KPYRTCG/1sY7/HoSfAzfmD0ggAACCCCAAAIIIIAAAgUjQPCxYErNgSKAAAIIIIAAAggggAACCCCAAAIIIJAzAYKPOaPvcseEvILVJAq/KPYRTCG/1sY7/HoSfAzfmD0ggAACCCCAAAIIIIAAAgUjQPCxYErNgSKAAAIIIIAAAggggAACCCCAAAIIIJAzAYKPOaPvcseEvILVJAq/KPYRTCG/1sY7/HrmJPh47Njx9uvXb8ngwQNk0KCBUlxcHP6RetzDmTPn5cGDhzJx4jjp3buXFBUVedxCuIs/fvxYTp06I+3tIlOmTJSSkpJwd+hj6/fu3ZdLl65IRUWFjBw5zGSdL168Infv3pPRo0dIZWWFuTor+9GjJ6W5uVlqa6dKaWmpj0qEu0p9fYOcO3dBysvLZezYUSbPxRs3bor2nCFDBsrAgQNMnotuz5k0aZz06mWv5zx69FhOnz7jnEyTJ9vsOXfv3pfLl684r+URI+g5fl/59By/cp3rxaHnnD59Xh4+fCiTJo2XXr3Kzf39056j8xydf02ePMHk35bOnlMpI0YMNfm35eLFy6LjHD16pFRW9jNX5455zglpbn5keJ5TL+fOXTQ9z7l+/YbcuHHb1DwnOfh4+vQ5efiw3nDPeSSnTp013nPuyeXLV6Wy0n7PGTNmpFRUWO45j6W2dorJayt9nZw/T88JOhtze87kyeOd/m3tfs6jR/HpOVVVlTJ8uM15zoULl0XvO1nvOTqvnT7dds/RexDqaPHeYuc8Z5AMHNjf5Jy7s+dMkPLynuZ6js61T5+2Pc+5c+eeXLlyVeg5wf4C6rUVPSeY4bVrN+TmTb22sttz9LpF78frvRLbPadYdC5m8W/LnTt35cqVa1JVVSXDhw8x+bflwoVLcu9enYwdO1L69bN3bdXe3i7Hjp003XP0/V11tDzPcXvO0KGDZMAAm/Mct+dMmaI9pzxYkw1hbX3vVOdimjPQ+9yWe051dZUMG2az55w/f0nu37ffcx4/bpGamslP3M8h+BjCiyvgJgl5BQOMwi+KfQRTyK+18Q6/nn6Cj4cOHpWGhgYnb6fXVl5/itav39S+d+9BmTmzRmbMmCZlZWVetxH68qtWrXPeWHrzzWUyeLC9cGZDQ6N88cUqx+Hdd5c7oQVrPxqq2LJlh3PzdP78p6Vnzx7WhiibN++QEyfOyOLF82T8+DEmJ8V//evfnYvc3//+PenTp485Qw34rF69UaqqKmTZsqXSs6f3phD2Qe3bd0j0n5kzp8uMGVON9py1cvnyNXnrrWUmA+F6Q+3vf/9WNAP+zjtWe8452bJlp4wZM0rmz59rsudoEOTkydOyZMkCGTdutOGec18+/PB96d27d9gvT8/bd3tOdXWlvPzyEtM956mnpjthLovznG++Weu8sfT226/IwIH25jluzykuLpIVK143Oc/RG39bt+5yQv/ac3r0sDfP2bRpu5w8eUaWLn3O7IcT/u///bvcv2+35+ib7WvW/CB6c/Lllxeb7Dl79x6S/fsPyaxZtU6wwkLPSQ4+rlz5vVy9ek3efvvVn3qOrQ+Vadjsyy+/c26Ur1jxmsmec/LkWdm2bZczf5g3b47JnvPDD9ucAKnlnvNf//WF1NU9kA8//JXJeY6+8fX99z9I//5V8tJLNnvOnj0H5cCBw6Z6TvKEcuXKNXL16nWn5+iHba0FH92eU1JSLG+/bbXnnJFt23ab7jkbN25zwlzPP7/QbGhPe46+6a73cyxeW7k9Z8CAannxxUUm5zl79hyQAweOyOzZM6SmRuc59j4Q/PXXa+TatevOHEI/bGut5+g5+NVXq6W0tETeeutVk/McvTe7fftu597ss8/ONjnP2bhxqxOseOGFhc4HyywGK7Tn6Acc/+EfbPYc/du8du0miUPPmTNnhkybZrXnrBbt33qvRAPh1npOXd1D+frrjp6jczGLQakTJ07L9u0/yoQJY+WZZ2aZ7DkbNmyVM2fs9hwNPur9HJ3XdvScXp7v8Ya9goZb163b7LxOtHdbfN9qz579cuDAUZkzZ6ZMmzbZ5DxH5xDac95993UnnGmv5zwQnYuVlXXMcyz2nOPHT8uOHdZ7zhbR9/RfemmhjBplb57T1tYu+l55fX29fPDBkz2H4GPYHdX79gl5eTdLXCMKvyj2EUwhv9bGO/x6+gk+/u2vf5fr12/Ke+8tl/79qz3Pc4oOHz7ark/lGjJksPPJcYs3Co4dO+V8crymZpLJT1TpEwIOHz4u7e1tUls7zeTF2e3bd5wnAepTSTQUYPFphefOnXeekDN27GiznxzXG7waANEbLhYvzvQGqoYqNNiqnzS18GZ7cuvUi1wN+AwZMsT5FKfNnnPSCbjqp5X69evrubGG/eeCnpMdYbfnaGjB6qc49Y3s+vpG8z2nvLyHTJpku+cMHTrE+RQnPcf760efSnLkiM5z2p3wqMVQ4a1bd+T8+QvOEwL0Qx4W5zlnz553nlYxbtwY580li09Z37//sOgHeqzPc6z3HP3AlvYbKz0nOfioTxPWT45bnefoEwKOHDlhvOfcdp4ESM/x/jclcQ3rPUdDmRoe1U+Y6tMqLF5bab/R66thw4bKsGGDTc5z9IlX9+8/cMLgffv2MXdtRc8J9jp213bnORqU0puTzHO8u7o9p1evnjJxou2eo/ePhw6l53ivsjjfIqPzHP3Rvmjx2kqvWS5cuCjV1dXOt/JYvLbSb2q5deu2jB8/1vmAgs2ec0gaG5tl9uxak/eQO3rOGefpa/otW5bnOfQcP92mYx16jn+7xDXdnqPhTP0gprWeo/fr9NqqqUl7jr5vZe/DwHV1dc61VUfP0XmOvQ9P6DfmaShcv71KnzRr8R7y0aPH5f79h8792T59epu9thIp+mmeY+9BSzdv3nKePqrXLKNGWZ3nnBO91x3XnkPwMTt/e7K5FUJewTSj8ItiH8EU8mttvMOvp5/g4949+0U/uOV3nlN069at9tbWVmeybnEip+w6Pv1HL8CtfYLFPS1aWlqkra3N5A0rHaOOTQ3Vz+INK3eM6qjjs3bx6NZZDVtaWk1ePOoY9SJXx6j/tnjDKvFcpOf4/6Pi1pme498wXj2nxeRNcrfnaE8UoecEORutz3Pi0HN0jDqHsDzPcetseZ6jhq2tbcxzAryg3Tm3pXlOcvDRnc/26GHz2ioOPYdrqwAvkoRVubYK7qjnos7F9GmFlu/n6BjpOf7rTc/xb5e4Zsc8R+/n2PtmDK6tslPjONxD7pzntDt90eJPHHoO11bBz5yOa2i9nyMmA0id9+2Y5wSpNj0niF7nuvSc4I6dPcf2PWSurYLVmp4TzC/xfWjtO3G9h0zwMTvnQTa3QsgrmGYUflHsI5hCfq2Nd/j19BN8bKivd65R/d5DLrpz5057+IfGHhBAAAEEEEAAAQQQQAABBKIWSA4+Rr1/9ocAAggggAACCCCAAAIIIIAAAggggAAC+S9A8NFejTXkpT/uv+2N0O6IogrIRbUfu9LRjgzv8L39BB+bm5oCDYzgYyA+VkYAAQQQQAABBBBAAAEE7AoQfLRbG0aGAAIIIIAAAggggAACCCCAAAIIIIBAvggQfLRbSYKP3msTlRlBPO+1CbIG3kH0MluX4GNmTiyFAAIIIIAAAggggAACCCCQgQDBxwyQWAQBBBBAAAEEEEAAAQQQQAABBBBAAAEEAgkQfAzEx8oFKkAQL9rC4x2+N8HH8I3ZAwIIIIAAAggggAACCCBQMAIEHwum1BwoAggggAACCCCAAAIIIIAAAggggAACORMg+JgzenYcYwGCeNEWD+/wvQk+hm/MHhBAAAEEEEAAAQQQQACBghEg+FgwpeZAEUAAAQQQQAABBBBAAAEEEEAAAQQQyJkAwcec0bPjGAsQxIu2eHiH703wMXxj9oAAAggggAACCCCAAAIIFIwAwceCKTUHigACCCCAAAIIIIAAAggggAACCCCAQM4ECD7mjJ4dx1iAIF60xcM7fG+Cj+EbswcEEEAAAQQQQAABBBBAoGAECD4WTKk5UAQQQAABBBBAAAEEEEAAAQQQQAABBHImQPAxZ/TsOMYCBPGiLR7e4XsTfAzfmD0ggAACCCCAAAIIIIAAAgUjQPCxYErNgSKAAAIIIIAAAggggAACCCCAAAIIIJAzAYKPOaNnxzEWIIgXbfHwDt+b4GP4xuwBAQQQQAABBBBAAAEEECgYAYKPBVNqDhQBBBBAAAEEEEAAAQQQQAABBBBAAIGcCRB8zBk9O46xAEG8aIuHd/jeBB/DN2YPCCCAAAIIIIAAAggggEDBCBB8LJhSc6AIIIAAAggggAACCCCAAAIIIIAAAgjkTIDgY87o2XGMBQjiRVs8vMP3JvgYvjF7QAABBBBAAAEEEEAAAQQKRoDgY8GUmgNFAAEEEEAAAQQQQAABBBBAAAEEEEAgZwIEH3NGz45jLEAQL9ri4R2+N8HH8I3ZAwIIIIAAAggggAACCCBQMAIEHwum1BwoAggggAACCCCAAAIIIIAAAggggAACORMg+JgzenYcYwGCeNEWD+/wvQk+hm/MHhBAAAEEEEAAAQQQQACBghEg+FgwpeZAEUAAAQQQQAABBBBAAAEEEEAAAQQQyJkAwcec0bPjGAsQxIu2eHiH703wMXxj9oAAAggggAACCCCAAAIIFIwAwceCKTUHigACCCCAAAIIIIAAAggggAACCCCAQM4ECD7mjJ4dx1iAIF60xcM7fG+Cj+EbswcEEEAAAQQQQAABBBBAoGAECD4WTKk5UAQQQAABBBBAAAEEEEAAAQQQQAABBHImQPAxZ/TsOMYCBPGiLR7e4XsTfAzfmD0ggAACCCCAAAIIIIAAAgUjQPCxYErNgSKAAAIIIIAAAggggAACCCCAAAIIIJAzAYKPOaNnxzEWIIgXbfHwDt+b4GP4xuwBAQQQQAABBBBAAAEEECgYAYKPBVNqDhQBBBBAAAEEEEAAAQQQQAABBBBAAIGcCRB8zBk9O465AGG86AqIdfjWBB/DN2YPCCCAAAIIIIAAAggggEDBCBB8LJhSc6AIIIAAAggggAACCCCAAAIIIIAAAgjkTIDgY87o2XHMBTSMx090AniHa03wMVxfto4AAggggAACCCCAAAIIFJQAwceCKjcHiwACCCCAAAIIIIAAAggggAACCCCAQE4ECD7mhJ2dIoAAAqYECD6aKgeDQQABBBBAAAEEEEAAAQTiLUDwMd71Y/QIIIAAAggggAACCCCAAAIIIIAAAgjEQYDgYxyqxBgRQACBcAUIPobry9YRQAABBBBAAAEEEEAAgYISIPhYUOXmYBFAAAEEEEAAAQQQQAABBBBAAAEEEMiJAMHHnLCzUwQQQMCUAMFHU+VgMAgggAACCCCAAAIIIIBAvAUIPsa7foweAQQQQAABBBBAAAEEEEAAAQQQQACBOAgQfIxDlRgjAgggEK4Awcdwfdk6AggggAACCCCAAAIIIFBQAgQfC6rcHCwCCCCAAAIIIIAAAggggAACCCCAAAI5ESD4mBN2dooAAgiYEiD4aKocDAYBBBBAAAEEEEAAAQQQiLcAwcd414/RI4AAAggggAACCCCAAAIIIIAAAgggEAcBgo92q/SnP/3J7uCMjiwXZrnYp1H+J4aFTVwqJULwMT61YqQIIIAAAggggAACCCCAgHkBgo/mS8QAEUAAAQQQQAABBBBAAAEEEEAAAQQQiL0AwUd7JXTDYoTGvNdGzaJyo07p64NReiMrSxB8tFIJxoEAAggggAACCCCAAAII5IEAwcc8KCKHgAACCCCAAAIIIIAAAggggAACCCCAgHEBgo/2ChRleM/e0QcfUVR+Ue0nuEhut4BTbv0z3TvBx0ylWA4BBBBAAAEEEEAAAQQQQCCtAMHHtEQsgAACCCCAAAIIIIAAAggggAACCCCAAAIBBQg+BgQMYXWCYsFQo/CLYh/BFGytjZeteqQaDcFH+zVihAgggAACCCCAAAIIIIBAbAQIPsamVAwUAQQQQAABBBBAAAEEEEAAAQQQQACB2AoQfLRXOkJiwWoShV8U+wimYGttvGzVI9VoCD7arxEjRAABBBBAAAEEEEAAAQRiI0DwMTalYqAIIIAAAggggAACCCCAAAIIIIAAAgjEVoDgo73SERILVpMo/KLYRzAFW2vjZaseqUZD8NF+jRghAggggAACCCCAAAIIIBAbAYKPsSkVA0UAAQQQQAABBBBAAAEEEEAAAQQQQCC2AgQf7ZWOkFiwmkThF8U+ginYWhsvW/VINRqCj/ZrxAgRQAABBBBAAAEEEEAAgdgIEHyMTakYKAIIIIAAAggggAACCCCAAAIIIIAAArEVIPhor3SExILVJAq/KPYRTMHW2njZqkeq0RB8tF8jRogAAggggAACCCCAAAIIxEaA4GNsSsVAEUAAAQQQQAABBBBAAAEEEEAAAQQQiK0AwUd7pSMkFqwmUfhFsY9gCrbWxstWPVKNhuCj/U1u1R8AACAASURBVBoxQgQQQAABBBBAAAEEEEAgNgIEH2NTKgaKAAIIIIAAAggggAACCCCAAAIIIIBAbAUIPtorHSGxYDWJwi+KfQRTsLU2XrbqkWo0BB/t14gRIoAAAggggAACCCCAAAKxESD4GJtSMVAEEEAAAQQQQAABBBBAAAEEEEAAAQRiK0Dw0V7pCIkFq0kUflHsI5iCrbXxslWPVKMh+Gi/RowQAQQQQAABBBBAAAEEEIiNAMHH2JSKgSKAAAIIIIAAAggggAACCCCAAAIIIBBbAYKP9kpHSCxYTaLwi2IfwRRsrY2XrXqkGo2J4GN7e7t9qQAjLCoqCrA2qyKAAAIIIIAAAggggAAC8REg+BifWjFSBBBAAAEEEEAAAQQQQAABBBBAAAEE4ipA8NFe5QiJBatJFH5R7COYgq218bJVj1SjyWnwMTHwmK/hx8TQIwFI+y8IRogAAggggAACCCCAAALBBAg+BvNjbQQQQAABBBBAAAEEEEAAAQQQQAABBBBIL0DwMb1R1EsQEgsmHoVfFPsIpmBrbbxs1SPVaHISfNy1a0/76dPnZNy40TJ27CgpLi52xmYp/Lh79z65efOOLFgwVyorK8RPaNFdR/+d+L+zcVo0NzfLli07nU0tXjxfysrKsrHZrG7jypXrcvjwURkyZLDU1EyW0tLSrG4/Gxs7dOioXLp0VWbOrJGhQwf/fC5mY9vZ2saGDVulvr5eXn55ifTs2TNbm83adu7evS979uyXPn36yNNPP2Wyztpv9J8JE8bI2LGjpaSkJGvHn60N7dq1R27evCsLFz4jFRX9fPWcbI0l1Xaamppl69adog+wXbTIas+5JocPH5OhQ4fItGmTTJ6LBw8elcuXr8pTT02XIUMGme45y5YtlR49eoR5Wvnatttz+vbtK3PnzjRZ51OnOnrOxIljnXmOxZ6zc+ceuXUrDj2nSBYtmmdynnP58jU5cuSYDBumPWeyyTq7PWfWrOkyeLDVnrNF6usbxG7PuSd79hyQfv36ypw5VnvOWTl9+rypnpMcfNyxY4/cvn1XFi16Rvr1szfPaWxskm3bdklxcZEsXGi151yVI0eOm+45Bw4clStXrsqsWbUyePBAo/OczVJf32i259y5c0/27rXdc06ePCtnzpyXSZPG/eJ+jq+JXUgrdfScO7Jo0bPGe06xLFz4rNF5Thx6zhG5cuVaDHpOkyxbtsTktdWdO3edeU5lZT+ZPdvmPOfkyTNOz5k8ebyMGdN5Dzmk9uFrs509Z54zZ/RzD9nXjjNcqXOeY7fn6L3Zo0ePy/DhQ2TqVJvXVgcOdPSc2bNrZdAgu/OchoYm5x6yxfs5ej2g8xx9r2X27Bkm7+fEoeds3/6jaP/WeyUWe05DQ6Ns375biotLnPvcFt+3unjxihw7dkKGDx8qU6dOMnk/Z//+w3L16nWzPUffy92wYYs0NjbLyy8vNtpz7sjevQdN95wTJ07L2bMXZMqU8TJ6tM15jttzFi+eJ3o/3to8R+8p7tjxo/M6fu452z1nxIihMmWK7Z4zZ06tDBxob56jPWfjxo6e89JLT/Ycgo8ZXhhEuBghsWDYUflFtZ9gGrlfG6fc1yCTEfgJPm5Yv1nu3r0nS5bMd7JOXuc5RWvXbmz/8cf98tRTNTJjRo0zIdE/WpaCj6tWrXUCcW++ucwJxHk9SMV3A48a7HT/t5/tpCpkQ0ODfPLJSo2Lyq9+9ab06dM7k3pHusypU2flhx+2y9ixI52b+RZDexs3bpXjx0/LkiULnDeLLYYz//f//kzu3bsvf/jDr52bGdZ+rl+/IatWrZPq6kp55ZUXpFevcmtDdG7k79lzUDT4MXPmdOnRw15Q+OuvVzs95+23XzUZwtULyE8//drpZe+994bJnqNvwm7atM0J1etFrt2ec0qWLn1OJkyw3HPuyT/+42+cmxnWfjp7TpW88srzhnvOAefm5IwZNnvOV19954RwV6x43WQIVwP/n3660gnNvPvucqM954xs2rRdxo8fLQsW2Ow5ehNab6I+//xCGT9+jMl5zv/6X58685w//tFmz7l27YZ8++066d+/SpYts9pz9jvzHH3zUK+tLMxzkoOPX375nROIe+ed5SYDcQ8f1stnn9nuOfpa3rx5h/NaXrDgaZPznDj0nP/8z0+kru7BT/OcPtamOc6bm999t1769692wpkWr630Xo6+gThnzgyprbXRc5IL+eWX34p+QEHnEBZDuA8ePJTPP//GuRf27ruvS+/e9u7n6H2SLVus95zNcuLEGXnhhUUybpzOc+x9wFF7jtZb7+f07Wuv5+gHllevXi8DBnT0nPJye/dzfvxxn+zde8j5AMqMGdNMBmj+/vdvnUDce+8tl0GD9MNGRab+vrg9R18j77xjteeccj7gr/dJ5s+fa3Kes359R8/RN9v1Q9X0HO+nub5OVq/eIAMH9nfCmRZ7zu7de2XfvsMyZ85TMmPGVKM9Z5Vo//7Vr5bLwIH2eo7Otb/4YpVz/+Gdd14zOc85duykbN26SyZMGCfz588x2XPWrdskeq9be47e67b2oWp9L1fv5+i19Icfvm9ynqP3Pdes2SgDBw5wwpkWe86uXR09Rx8oUltrvee88VMgztY8p66uTr744lspKyuVFSvs9pwtW3Y5Hx6cN89qz/lBTp485/x9tvggh7a2NtH3yvU9g9///smeQ/DR+7ws7DU0KMaPf4Go/KLaj38JO2tiZacWXY3ET/Dxv/7Pp3Lt2k15//03nTmj1yxf0YULF9rv3atzUpN9+vT6OfRoKfyoT1rQcKF+grNnT+9PvEoMOuqb9u4/biAy6KnR2toqt27dlpaWVuepH+5TM4NuN5vrNzY2Om9k9+jRU6qrq8zd+NNjvX+/zrkJrePr3buX55M5m15dbevGjZvOp1hGjhxm7gJXx9zc/Eju3r3rnIPaECyei1pjvenSr18fk0/8UEd9Iol+IlbfmLMY2OvsOW0ybJjNp6Nqz9GnAaqfBnEtnov0nOBdk54T3DBOPae1tc1kGFwN3Z5TXt5Tqqps9hydbz98yDwnyKuGnuNPLzn4GJd5juWeo/NEvbbq6Dk2r610fPrGlwaFe/Xi2srPq0e/2UE/Zcq1lR+9znW05+iTPfUJ637u5wTbe/q19T7O7du3hZ6T3qq7JTp7TrUTEvZ6czLY3jNbW+/n6LcnjBhh9X5OnHpOX5NPNnOvrbTnDB06yOQTr1paWpx7TvHoOeU/XVvZClVonek5mfW97pbSeY6+56IBLg0/Wrxv9+DBA6mre+j0G4tPU1TfW7c67iHb7zntzhgt1rnz2oqeE+SVfePGDWlqehSDeY7dnqPvWel7V/Qc/2eiO89pa2s3+eF+PTK35+g1S2Wl3kNmnuOn4h0957HokzOTw+AEH/2Isg4CCCCQXwJ+go+XL10W/dYEfRCinweKFN26datd0/n6o/9O/MfKUx8Tx+Hn5qmukxh41D/C2X7qo2to8eJRa9sZZFULexO5xDEmfh25tZe4OuqkvaSk4yvhLf7E5Vy0XmettdXXs9uv9d9Wx5gYnrc+RuvnIj0nWKd1z0XrdabnZKfOIsxzgkh2zHPaTH64wz0u5jneK5wcfHT7otW/z/Ga59BzvJ+RnWtwbRVEr2PduMxzmM8GqzX3c4L5uWvTc4I70nOCGzLPyY4h52J2HLm2Cu7I35bsGHa8/8e1VRBN7ucE0ePaKrhe5xb42xJcM+7zHIKPwc8BtoAAAgjEXcBP8LGpsdG51+33faui27dvt7uTYn2KmE5K9N8jR46Mu+cvxn/p0iXnTVyFcv9tOQSRV/gcDAIIIIAAAggggAACCOREIDn4mJNBsFMEEEAAAQQQQAABBBBAAAEEEEAAAQQQyGsBgo95XV4ODgEEEMhIwE/wsbmpKaNtd7XQE8FHDT3qP6NGjQq0YWsrX7x40Qk8uv9oAJLgo7UqMR4EEEAAAQQQQAABBBDIpgDBx2xqsi0EEEAAAQQQQAABBBBAAAEEEEAAAQQQSCVA8JHzAgEEEECA4GOI5wDBxxBx2TQCCCCAAAIIIIAAAgiYFCD4aLIsDAoBBBBAAAEEEEAAAQQQQAABBBBAAIG8EiD4mFfl5GAQQAABXwIEH32xZbYSwcfMnFgKAQQQQAABBBBAAAEE8keA4GP+1JIjQQABBBBAAAEEEEAAAQQQQAABBBBAwKoAwUerlWFcCCCAQHQCBB9DtCb4GCIum0YAAQQQQAABBBBAAAGTAgQfTZaFQSGAAAIIIIAAAggggAACCCCAAAIIIJBXAgQf86qcHAwCCCDgS4Dgoy+2zFYi+JiZE0shgAACCCCAAAIIIIBA/ggQfMyfWnIkCCCAAAIIIIAAAggggAACCCCAAAIIWBUg+Gi1MowLAQQQiE6A4GOI1gQfQ8Rl0wgggAACCCCAAAIIIGBSgOCjybIwKAQQQAABBBBAAAEEEEAAAQQQQAABBPJKgOBjXpWTg0EAAQR8CRB89MWW2UoEHzNzYikEEEAAAQQQQAABBBDIHwGCj/lTS44EAQQQQAABBBBAAAEEEEAAAQQQQAABqwIEH61WhnEhgAAC0QkQfAzRmuBjiLhsGgEEEEAAAQQQQAABBEwKEHw0WRYGhQACCCCAAAIIIIAAAggggAACCCCAQF4JEHzMq3JyMAgggIAvAYKPvtgyW4ngY2ZOLIUAAggggAACCCCAAAL5I0DwMX9qyZEggAACCCCAAAIIIIAAAggggAACCCBgVYDgo9XKMC4EEEAgOgGCjyFaE3wMEZdNI4AAAggggAACCCCAgEkBgo8my8KgEEAAAQQQQAABBBBAAAEEEEAAAQQQyCsBgo95VU4OBgEEEPAlQPDRF1tmKxF8zMyJpRBAAAEEEEAAAQQQQCB/BAg+5k8tORIEEEAAAQQQQAABBBBAAAEEEEAAAQSsChB8tFoZxoUAAghEJ0DwMURrgo8h4rJpBBBAAAEEEEAAAQQQMClA8NFkWRgUAggggAACCCCAAAIIIIAAAggggAACeSVA8DGvysnBIIAAAr4ECD76YstsJYKPmTmxFAIIIIAAAggggAACCOSPAMHH/KklR4IAAggggAACCCCAAAIIIIAAAggggIBVAYKPVivDuBBAAIHoBAoq+PiXv/xFPv74Y0f3z3/+s3z00UehShN8DJWXjSOAAAIIIIAAAggggIBBAYKPBovCkBBAAAEEEEAAAQQQQAABBBBAAAEEEMgzAYKPeVZQDgcBBBDwIUDw0QdapqsQfMxUiuUQQAABBBBAAAEEEEAgXwQIPuZLJTkOBBBAAAEEEEAAAQQQQAABBBBAAAEE7AoQfLRbG0aGAAIIRCVA8DFEaYKPIeKyaQQQQAABBBBAAAEEEDApQPDRZFkYFAIIIIAAAggggAACCCCAAAIIIIAAAnklQPAxr8rJwSCAAAK+BAg++mLLbCWCj5k5sRQCCCCAAAIIIIAAAgjkjwDBx/ypJUeCAAIIIIAAAggggAACCCCAAAIIIICAVQGCj1Yrw7gQQACB6AQIPoZoTfAxRFw2jQACCCCAAAIIIIAAAiYFCD6aLAuDQgABBBBAAAEEEEAAAQQQQAABBBBAIK8ECD7mVTk5GAQQQMCXAMFHX2yZrUTwMTMnlkIAAQQQQAABBBBAAIH8ESD4mD+15EgQQAABBBBAAAEEEEAAAQQQQAABBBCwKkDw0WplGBcCCCAQnQDBxxCtCT6GiMumEUAAAQQQQAABBBBAwKQAwUeTZWFQCCCAAAIIIIAAAggggAACCCCAAAII5JUAwUe75fzTn/5kd3BGR4aZ0cIwLPMCBB9DLBHBxxBx2TQCCCCAAAIIIIAAAgiYFCD4aLIsDAoBBBBAAAEEEEAAAQQQQAABBBBAAIG8EiD4aK+cbniPEJ/32mDn3Yw1EFABgo8hngcEH0PEZdMIIIAAAggggAACCCBgUoDgo8myMCgEEEAAAQQQQAABBBBAAAEEEEAAAQTySoDgo71yaniP0KP/uuDn3441C1eA4GOItSf4GCIum0YAAQQQQAABBBBAAAGTAgQfTZaFQSGAAAIIIIAAAggggAACCCCAAAIIIJBXAgQf7ZWT4F6wmuAXzI+1C1MgL4KPf/nLXzKq3vr162XVqlXOsq+//rq88MILGa330UcfZbRc8kIEH32xsRICCCCAAAIIIIAAAgjEROD+/bonRnrgwFHnv/Xt20fGjx/9xO8rKyticnQMEwEEEEAAAQQQQAABBBBAAAEEEEAAAQSsChB8tFcZgnvBaoJfMD/WLkyBvAg+FhcXh1q9trY2X9sn+OiLjZUQQAABBBBAAAEEEEAgBgLukx39DHXx4nl+VmMdBBBAAAEEEEAAAQQQQAABBBBAAAEEEEDAESD4aO9EILgXrCb4BfNj7cIUIPiYQd0JPmaAxCIIIIAAAggggAACCCBQUAIHDx6Ve/eefOJjOoTRo0fImDEj0y3G7xFAAAEEEEAAAQQQQAABBBBAAAEEEEAAgS4FCD7aOzkI7gWrCX7B/Fi7MAXyIvjIV10X5snLUSOAAAIIIIAAAggggEBuBbw+9bGqqkJmzJiW20GzdwQQQAABBBBAAAEEEEAAAQQQQAABBBCIvQDBR3slJLgXrCb4BfNj7cIUyIvgY6al04Dkxx9/7Cz+5z//WT766KNMV/W1HF917YuNlRBAAAEEEEAAAQQQQCAmAvfv18mBA0czHu3MmdOksrIi4+VZEAEEEEAAAQQQQAABBBBAAAEEEEAAAQQQSCVA8NHeeUFwL1hN8Avmx9qFKUDwMcS6E3wMEZdNI4AAAggggAACCCCAgAmB8+cvyYULl9OOha+4TkvEAggggAACCCCAAAIIIIAAAggggAACCCCQoQDBxwyhIlyM4F4wbPyC+bF2YQoQfAyx7gQfQ8Rl0wgggAACCCCAAAIIIGBGIF34kdCjmVIxEAQQQAABBBBAAAEEEEAAAQQQQAABBPJCgOCjvTIS3AtWE/yC+bF2YQoQfAyx7gQfQ8Rl0wgggAACCCCAAAIIIGBK4ODBo3LvXt0TYyL0aKpMDAYBBBBAAAEEEEAAAQQQQAABBBBAAIG8ECD4aK+MBPeC1QS/YH6sXZgCBB9DrDvBxxBx2TQCCCCAAAIIIIAAAgiYE9i0accvxlRVVSEzZkwzN04GhAACCCCAAAIIIIAAAggggAACCCCAAALxFiD4aK9+BPeC1QS/YH6sXZgCBB9DrDvBxxBx2TQCCCCAAAIIIIAAAgiYE7h/v04OHDj687hmzpwmlZUV5sbJgBBAAAEEEEAAAQQQQAABBBBAAAEEEEAg3gIEH+3Vj+BesJrgF8yPtQtTgOBjiHUn+BgiLptGAAEEEEAAAQQQQAABkwL/43/8T/nii8/kv/23/0f++3//f02OkUEhgAACCCCAAAIIIIAAAggggAACCCCAQLwFCD7aqx/BvWA1wS+YH2sXpgDBxxDrTvAxRFw2jQACCCCAAAIIIIAAAiYFJk+eLLdu3ZJ58+bJqlWrTI6RQSGAAAIIIIAAAggggAACCCCAAAIIIIBAvAUIPtqsn4b3+PEngJ0/N9YqbIGcBB+vX7/e3tLSIsXFxY5+a2ur88+oUaNCrcZf/vIX+fjjj519/PnPf5aPPvoo1P2FGXxsb2+XR48eOePv0aOHFBUVhXosfjauNdU6FxUVS48eZX42Efo6jx8/ltbWNikrK5WSkpLQ9+dnB83NzdLeLtKzp806t7W1iTqKFDl1tngu6nmo56PWuLS01E8ZQl9HX89tbe2OodsbQ9+phx3EqeeoX1kZPcdDeX+xKD3Hr1znevSc4Ib0nOCGugXmOcEdmef4M/znf/5n+dd//Vf5p3/6J/njH/8ozc2PRF/XzHP8ebrXzO41NPMc/45xmefoNZVe51v8icM8h54T/Mxx7+dYvrZ69Oix6N9p7uf4r7c7z6Hn+DfUNek5wfziMs+h5wSvMz0nuCE9JzuGzHOy4xinayu9hrb4vtXjxy3S1mb7fSvmOcFfL/Sc4IYdf/+6fq+c4GN2jNkKAgggEGcBP8HHuvt1gd63Ktq4cUv7gQNHpKZmskyZMsHxI/jo7TRqbGySlSvXiEi7vPXWq9KzZ09vG4hg6XPnLsj27Xtk9OgR8swzs0wGkbZv/1FOnTorCxc+I2PGjDIZOPvii2/k3r0H8rvfrZDy8vIIKudtFzdu3JING7ZIZWWFvPDCQpNv0B06dFQOHDgqM2ZMk2nTJpkMP37//Ua5cuW6LF/+kgwY0N/chXhDQ6N88833ohnrN998xWTPOXv2vOzYsVfGjBkhTz9tu+csWvSsjB490nTP+eCDd0zW2e05VVWV8vzzz5nsOQcPHhX9Z+bMaTJ1qs2es2bNBrl69YYsX/6yDBhQbbbnFBcXyRtvLDN5Lnb2nJHy9NNPmZznbNu2W06fPieLFz8ro0bZ7Dmff75S7t9/IB988K7JOt+4cVM2bNgq1dWVsnSp/Z4zbdpkkx/mcXvOG2+8LP372+s59fUNsmrVWrHcc86cOS87d+6VsWNHyty5NnvO1q27RMe5ePE8GTVqhMl5zmeffS11dfVid55jv+fovZxDh47JU0/pPMdmz1m9eoNcu3bDmUP0719lbp7z8GG9fPvtOikpKXbmYhbv53T2nFE/9Rx7HyCMS8958KBefvc7m9dW16/fkI0btzmvkyVLFpi8ttq//4gcPnxMZs2qkSlTJpmc57g95803l0l1td2eU1paIq+//pLJnqPXLLt27ZNx40bLnDkznUCztZ8tW3bK2bMXZMmS+TJy5HCz85yHDxvkt79dYbLObs8ZMKBKFi+22nMOy+HDx033nO++Wy/Xr9+Ut956RfTemLUw14MHD+W779Y599+t9pxTp87J7t37ZPz4jp5j8UEJmzfvFH1/zWrP0Q826v0cyz1Hrwd++GGbc99z8eL5Juc5+/YdkiNHTsjs2dNl8uSJJuc53367XvTemPWeo+HW11570eTfP30PuqPnjDHcc3bIuXMXZenSBTJixDBz8xx9aMwXX6yU+vpG+c1v3n6izgQfrc2cGQ8CCCAQvYCf4OMXn6+Umzdvy4oVr0pFRYXna6ui9es3te/ff1hqaiY5N8r1DaYogo9R84b5xEcNIX311WongaoTzj59ekd9eGn3pzfK9Q33UaOGy7x5c0xOOPVG+cmTZ+S5555xbq5ZvMj99NOv5N69OvnNb1ZIv35907pHvYBe9Kxdu1kqKvrJiy8ukl697IUz9c057TnTp0+V2tqpJp9Aqjetrly55twQGjx4oLkLi46e853zZE+9mW+754yQefNmm+w5eqNcL3Qt95xPPvlK7t+vk9/+9h3p27dP1C0l7f7cnlNZ2U9eeMFmz9F+o31H+432HYtPPdabVlev5r7n6Dwm1U9DQ4N8/fUaZ5KpQanevW3Oc/QDFB0f8NCeY+/JXDrP6fiAx7MyZsxIs/McDT5qz7H4t0V7zrp1m503lDT4aHWeoz2n4wMek032HJ3nXL3a8QGPgQMHGJznxKnnjHQ+VGax5+g8R4MLHR/wGCUasLD288knX0pd3cNY9Bz9gIfFD77pPEc/4KE9p6Zmismes2rVOrl2raPnDBpk79pKg4/6QVY3bG1xnqPzhx079jjzh46eY+/Dtps27ZAzZ7TnzHM+yGqx5/ztb38XDT7q/RyL11YaCFi/frMT1rPac/buPSSHDh2RmTOnOx+gt/jUY/3wREfY+mWT8xy357hha7s950cZO3a086Eymz1nu5w+fd4JIekHWa31HL20/uQT7TkNTiDAcs/RD0JpsMLiPGfv3oOiH6C33HP0g+nXrt2UN9/UnqPzHFvfBKbBRx1jR89ZJr1797J2SeC8H7Rz5x7TPUcDe/r+mn4wQedj1r6tTO/n6bWVBh9//WubPUfvQegDOyz3nD17DjgfKps1q9Z5YIfFeU5nz1kmgwYN8BwICLsB1NU9kG++WStlZfoBD72HbK/nnDjR0XM0+Dh37kyT8xy35+jfZ53nWOs5Gnz89NMvneDj+++/9cQ8h+Bj2K80to8AAgjYF/ATfPz0k69++oDHqzJwoPcHkxXpV13r17rqV+boP1F91XXU5Qgz+Oh+BWRLS6vJN4nVWuuqX7GooQWLN6x0jDo+/Ue/Rsxi6FHHqI/v1kfeW7xhpePTrynRr3vRc9JiGEDHqF/HpnXWybrVr4zTnqhj7NWrl7kwgBrGpefouag3/Og5/v/i0XP827lras/Rc1H/rlgMPeo4o+g5XYUaMxF2e47OJSy+MefOczp6TrHJAFJc5jkd52KL2flsXOY5ceg52hv1zU19zVj76eg5j6W1tYWeE6A4cbi2oucEKPBPq3qZ5+TqCURRzHOCSMZrnmP72kp7t177WQsguedHXK6t9NtkLAaQ3Ps5Wmd9AqDFMEBU11b0nCAC2VlX5zn0nGCW7rUVPSeYY3zmOW0mA0i/vJ9jf55TXt7TXACJeU6w13Di2u61VTzmOb3NBa0T37dqbaXnBDkz3XlOXHsOwccg1WddBBBAID8E/AQfH9TVOdf5+lAWP/exi27fvt2uN3r1YtcNPfLEx/w4oTgKBBBAAAEEEChMgSBBx8IU46gRQAABBBAoPAE/N5EKT4kjRgABBBBAAAEEEEAAAQQQQACBTAQIPmaixDIIIIBAfgv4CT42NzUFQiH4WGTr6wcCVZOVEUAAAQQQQKBgBAg3FkypOVAEEEAAAQRCFSAAGSovG0cAAQQQQAABBBBAAAEEEECgIAQIPhZEmTlIBBBAoFsBgo8hniBhftV1iMNm0wgggAACCCCAwC8ECDxyQiCAAAIIIIBAWAKEIMOSZbsIIIAAAggggAACCCCAAAII/P/s3QeYHMWZ//F3o9KuckRaJZBQBJGEhAgGTJBJFhgMwvl85+fubJ/PJ8v8K+gcfAAAIABJREFUbTDYYAxYDnf4nM4+39lnDgO2bIOsAwMiCZGUAygnlANCcVdh9X/eFr30zk7oVL3VM9/m0YO0U1Vd/ame2pme31QXtwDBx+IeX44OAQQQ8CNA8NGPUsgyBB9DwlENAQQQQAABBFpdgLBjqw8BHUAAAQQQQKDkBQhFlvwpAAACCCCAAAIIIIAAAggggAACOQUIPnJyIIAAAggQfDR4DhB8NIhL0wgggAACCCBgRIDAoxFWGkUAAQQQQACBCAIEICPgURUBBBBAAAEEEEAAAQQQQACBIhUg+FikA8thIYAAAgEECD4GwApalOBjUDHKI4AAAggggEBrCuQKPZaXl0t1dbVUVFRIZWWlaPiAAEJrjhT7RgABBBBoDYF3G96Vw8ePtMaui26fZXLivwopd/4vx0WOHz4uxxuP5zxWXnsU3WnAASGAAAIIIIAAAggggAACCCAQSYDgYyQ+KiOAAAJFIUDw0eAwEnw0iEvTCCCAAAIIIBCbQK7AY5s2bUT/aNiRDQEEEEAAgVIX2NOwRxqOHS51BmPHrwHI6rIqkUaRxobGnPshAGlsCGgYAQQQQAABBBBAAAEEEEAAgVQJEHxM1XDRWQQQQMCIAMFHI6wnGiX4aBCXphFAAAEEEEAgskCuwKOu7tiuXTtnhUc2BBBAAAEEEDghQPAxuTOhTVm1yFGRxiMEIJNTZ08IIIAAAggggAACCCCAAAIIpEuA4GO6xoveIoAAAiYECD6aUH2vTYKPBnFpGgEEEEAAAQRCCeQKO7qNdejQwVnlkQ0BBBBAAAEEmgsQfEz2jNCVHaulShrrG51bYefbWAUy2bFhbwgggAACCCCAAAIIIIAAAgjYIEDw0YZRoA8IIIBA6woQfDToT/DRIC5NI4AAAggggEAggUKBx/LycqmpqeG21oFUKYwAAgggUEoCBB9bZ7R19cdjDcekrLGsYAcIQBYkogACCCCAAAIIIIAAAggggAACRSNA8LFohpIDQQABBEILEHwMTVe4IsHHwkaUQAABBBBAAAHzAn5Cj7W1tdza2vxQsAcEEEAAgRQLEHxsvcFzwo/1x6TsOOHH1hsF9owAAggggAACCCCAAAIIIICAXQIEH+0aD3qDAAIItIYAwUeD6gQfDeLSNAIIIIAAAggUFCgUeHQb6NixIys9FtSkAAIIIIBAqQsQfGzdM+BE+LFRygrc9lp7ycqPrTtW7B0BBBBAAAEEEEAAAQQQQACBJAQIPiahzD4QQAABuwWsCj7qSkPFtO3bt89ZOcn9o7eQ1IvvXIAvplHmWBBAAAEEELBPwG/gUXvevn17adu2rX0HQY8QQAABBBCwTIDgY+sPiIYfGw81+uoI1158MVEIAQQQQAABBBBAAAEEEEAAgdQKEHxM7dDRcQQQQCA2AYKPsVG2bIjgo0FcmkYAAQQQQACBFgJBAo9auaqqSortiyecFggggAACCJgSIPhoStZ/uxVlFVJ5pEIajxJ+9K9GSQQQQAABBBBAAAEEEEAAAQSKU4DgY3GOK0eFAAIIBBGwKvhYV1cXpO/Wl+VW19YPER1EAAEEEECgaASChh71wLnFddEMPweCAAIIIJCAAMHHBJB97CLIqo/aHCs/+kClCAIIIIAAAggggAACCCCAAAIpFCD4mMJBo8sIIIBAzAIEH2MG9TZH8NEgLk0jgAACCCCAQJNAmNBjdXW11NTUoIgAAggggAACPgUIPvqEMlysTMqk6miVHPe56qN2h/Cj4UGheQQQQAABBBBAAAEEEEAAAQRaQYDgYyugs0sEEEDAMgGCjwYHhOCjQVyaRgABBBBAAAFHIEzoUevpLa71VtdsCCCAAAIIIOBPgOCjP6ckSgVd9dHtEwHIJEaHfSCAAAIIIIAAAggggAACCCCQjADBx2Sc2QsCCCBgswDBR4OjQ/DRIC5NI4AAAggggEDo0GN5ebl07twZQQQQQAABBBAIIEDwMQCW4aKVZZVSVq/fAAm3IwKQ4dyohQACCCCAAAIIIIAAAggggIBNAgQfbRoN+oIAAgi0jgDBR4PuBB8N4tI0AggggAACJS4QdqVHZeM21yV+8nD4CCCAAAKhBAg+hmIzVqnNsWppPNIYun3Cj6HpqIgAAggggAACCCCAAAIIIICAFQIEH60YhhaduOuuu+zsmGW9wsmyAaE7qRUg+Ghw6Ag+GsSlaQQQQAABBEpYIEroUdnat28vbdu2LWFBDh0BBBBAAIHgAgQfg5uZrNFGqqWxPnzwUftG+NHkCNE2AggggAACCCCAAAIIIIAAAmYFCD6a9Q3Tuob5CPT5k8PKnxOlECgkQPCxkFCExwk+RsCjKgIIIIAAAghkFYgaetRGa2trpaqqCmEEEEAAAQQQCCBA8DEAVgJF25RVS+Mhgo8JULMLBBBAAAEEEEAAAQQQQAABBKwUIPho37AQ5vM/Jlj5t6IkAvkECD4aPD8IPhrEpWkEEEAAAQRKUCCO0KOyderUSSoqKkpQkENGAAEEEEAgvADBx/B2JmpWl1XL8YjBR+0Xqz6aGB3aRAABBBBAAAEEEEAAAQQQQMC8AMFH88ZB90CYz78YVv6tKIlAPgGCjwbPD4KPBnFpGgEEEEAAgRITiCv0qO107tyZ4GOJnT8cLgIIIIBAdAGCj9EN42yhuqxKGg82Rg4uEnyMc1RoCwEEEEAAAQQQQAABBBBAAIHkBAg+Jmftd0+E+fxKiXNLcG4L7t+LkgjkEiD4aPDcIPhoEJemEUAAAQQQKDGBOIKP2ob+6dKlC8HHEjt/OFwEEEAAgegCBB+jG8bZggYfjx04JuXl5ZGbJfwYmZAGEEAAAQQQQAABBBBAAAEEEEhcgOBj4uQFd0iYryBRUwGs/FtREoF8AgQfDZ4fBB8N4tI0AggggAACJSQQZ+hR2Qg+ltDJw6EigAACCMQmQPAxNspYGnKDj9qYBhejhhej1o/loGgEAQQQQAABBBBAAAEEEEAAAQR8CxB89E2VWEHCfP6psfJvRUkE8gkQfDR4fhB8NIhL0wgggAACCJSQQNTgo7vSo0tG8LGETh4OFQEEEEAgNgGCj7FRxtKQN/ioDRJ+jIWVRhBAAAEEEEAAAQQQQAABBBBIjQDBR/uGijCf/zHByr8VJRHIJ0Dw0eD5QfDRIC5NI4AAAgggUCICcYQelcrbDsHHEjl5OEwEEEAAgVgFCD7Gyhm5sWzBR2006sqNUetHPjAaQAABBBBAAAEEEEAAAQQQQAABXwIEH30xJVqIMJ9/bqz8W1ESgXwCBB8Nnh8EHw3i0jQCCCCAAAIlIhAl+OitS/CxRE4YDhMBBBBAwJgAwUdjtKEazhV81Maihhej1g91QFRCAAEEEEAAAQQQQAABBBBAAIFAAgQfA3ElUpgwn39mrPxbURKBfAIEHw2eHwQfDeLSNAIIIIAAAiUgEFfoUakIPpbACcMhIoAAAggYFSD4aJQ3cOP5go/aWNTwYtT6gQ+ICggggAACCCCAAAIIIIAAAgggEEiA4GMgrkQKE+bzz4yVfytKIpBPgOCjwfOD4KNBXJpGAAEEEECgyAVMhR6VjVtdF/nJw+EhgAACCBgRIPhohDV0o5nBR20oM6wYNbwYtX7og6MiAggggAACCCCAAAIIIIAAAggUFCD4WJAo8QKE+fyTY+XfipII5BMg+Gjw/CD4aBCXphFAAAEEEChygbDBx2z1Mn9G8LHITx4ODwEEEEDAiADBRyOsoRutKquSxgPHmtXPFlSMEl6MUjf0gVERAQQQQAABBBBAAAEEEEAAAQR8CRB89MWUaCHCfP65sfJvRUkE8gkQfDR4fhB8NIhL0wgggAACCBSxQNjQo5Jk1s32765du0pFRUURC3JoCCCAAAIIxC9A8DF+0ygtavDx2P6jvlZ5jBJgjFI3yvFRFwEEEEAAAQQQQAABBBBAAAEE8gsQfLTvDCHM539MsPJvRUkE8gkQfDR4fhB8NIhL0wgggAACCBSxQNjgY6HVHt3HCT4W8cnDoSGAAAIIGBMg+GiMNlTDbvBRK3vDiaz6GIqTSggggAACCCCAAAIIIIAAAgikToDgo31DRpjP/5hg5d+KkgjkEyD4aPD8IPhoEJemEUAAAQQQKGKBMMHHXHW8P9e/659u3boltuLjvI2HnZF6fWNDqkbsc+fVpqq/dBYBBBBAwLwAwUfzxkH2oMHHo/uONIUe3cBjrhUaw67cGLZekGOhLAIIIIAAAggggAACCCCAAAIIBBcg+BjcLIkaGuhjKyyAU2EjSiDgR4Dgox+lkGUIPoaEoxoCCCCAAAIlLBBn6FEZ3fa8/08q+Pizl/fJvS/uS+1oTp/cXc6sq05t/+k4AggggEC8AgQf4/WM2lpm8FHb05BivqBi2BBj2HpRj5H6CCCAAAIIIIAAAggggAACCCCQW4DgI2cHAggggADBR4PnAMFHg7g0jQACCCCAQJEKmAg+Zq76mFTwccD9m1M9SpcMaCO/urlbqo+BziOAAAIIxCdA8DE+yzhayhZ81HbLy8vzNh8mxBimThzHSBsIIIAAAggggAACCCCAAAIIIJBbgOAjZwcCCCCAAMFHg+cAwUeDuDSNAAIIIIBAEQrEHXpUosbGxmZSSd7qmuBjEZ6kHBICCCBQwgJpDT6u2LHSGbXlO5bL0h3LnL+P7DGi2UheM+Lq1I1sruAjqz6mbijpMAIIIIAAAggggAACCCCAAAKhBAg+hmKjEgIIIFBUAq0SfFy9es3xd955V2pra6Rjxxo5duyY86eurq6ocE0GH48ePSqbNm11vOrqTiq4okFrwO7ff0B27dot7dq1k+7du1rZxx07dsn+/fulR4/u0qFD+7y3xGoNQ93nhg2bRMd7wIB+UlFR0VrdyLnf+voG2bZtu1RVVUnv3j2tHOc9e94V/dO5cyfp1KmjleO8Zcs2OXSoXvr27S1t2rSxbpzTM+fskvbt20u3bsw5YU+itMw51dXV0qtXj6Kbc+IOPmp7mW3qvzt16pTIXEPwMewzkXoIIIAAAjYKpCn4qGHHPyybLou3L/FNObrnKCcQmZYQZK7g47ZtO5z3Vv369RF9zZhtC7OCY5g62fZ95MhR2bx5q5SVifTrZ+f1nH379jvXczp06CDdunWx8jX39u075cCBA9KzZ3fnPWBc4+P7CeOj4IYNb8vRo8esvZ6jz5Pt23c4zxNb31u9884eeffdvdKlS2fp2LHWynHevHmb1Nfbez3nyJEjon3U54jOi4VWxfVxasdexJ1zamo6SNeuts85PaR9+3ZWnotpmXP0uqfO3TaeiyfmnHelS5cuFs85W0Wvx9t6DTkNc87evftk9+53hDkn2nTOnBPNT2vv3r1H9u61/XWOO+f0kTZtsr+3ii4RvgV3zikvL5O+fe18nfP+nFMjXbt2tvL33/vvrex8naOfZ2zcuMnJkvTv3/KzcoKP4Z9D1EQAAQSKRSBM8HHN6nXS0NCQ9xpyPp+yWbNePL5gwRIZNepUGTZsqHOxN0rw0fSb5MyVkvwOvsng48GDh+TPf37S6cqHP3yltG3b1m+3Eiu3Zs16efnl150XIePGnZnzA4fEOpRlR7NnvyYrV66RCy44VwYO7G9lsPCxx/4se/bsk8mTJzkX823b9CL500+/KJ07d5RLL70gkSBNUIOFC5eK/jnttBEycuQwqaqqDNqE8fJPPjlLNm3aIldffZl0764X/8qM7zPIDtw5R+fr666ze87RkPC559o+54yTgQPrLJ5z9sqtt97gBNdt296fczrJpZeeb/Wcc/rpI2XkyFOlstLfnBN36FHHLttrGN2Pvm7o2LGj8eEl+GicmB0ggAACCCQokIbgY5jAYzbCm0feZH0AUoOPR/YebvHBkb632rp1m1xzzeXSrVu3rOGUsCG5sPW8xvol0See+KtUVJTJNdfoeyv7vvi2atVaeeWVuTJoUH8555wzpLq6KsFnmr9dvfjiK7J69Tq56KLxWT/88teK2VKPPPIn0UDX5MnXW/neauvW7fLssy85H8BefPEEK99bzZ+/WBYvflPOOGOUDB8+1Pd7K7Mj27z1v/zlaVHLa6+9wvkSZhzzRJz9d+ecyspyufrqK6ycc1auXCuvvjpXBg8eIGefPcbKOeeFF16RNWvWyQc+cJ7U1fW17nrO8eMijz76J9HxvuWWSVbOOVu2bJdZs15yAvXqaOMXv+fNWyxLluicM1qGDx9i5ZwzY8bTziIE1157pWNp25yjv/e0jzbPOStWrJHXXptn9Zzz/PNzRD9fu/hiW+ec4/Loo3+W/fsPyi23fNjSOWebzJo1W7p37yIXXWTrnLNIlix5S848c7QMG2brnPNX2bp1h/M5tH45wbY5R0OF+lpMP/O76qrLrXyds2LFannttfly8skD5ayzTrfydY4751xyyQTp109f55TH+XI0cluNjcdFPys/cOCg3HxzyzmH4GNkYhpAAAEEUi8QJvj4+8f+LNu27ZRJkyY6X7gN+jqnbO7c+cfXrl0vJ53UR/r37+sgEnwMdi5p8lRfKGmgYdy4s6y8UKBvwN98c6Wz2uOpp57irAho2/bmmyuclRY0gNu7dw/rLlqp18svv+ZctLrggvHSrp19AVf95r0GmfXbzqedNtLKb32tW7dBdM7p37/OWWnBbwgpyfN13rxFzooaeoFXV6YMOrGa7uv7c85xJ8hs48VJ/aDhrbfSMOdskeHDT5VevXpa9wZSzyMNhOucox8g2hiqd+ecDh1OzDm5VvEx/ZzI1/66detl7doNgeacMKFH7UO+erlWe9Sf19TUOisdm96yBR+nT+4uZ9bZ9w3hTz+8S55d39CM5JIBbeRXN3czzUT7CCCAAAIpEbA5+Pj4sifk4aWPxC5pcwDSDT5m3tp6wYLFsmvXHjnnnDF5V/wP854rTJ3MQdGV4V5/fYHz47Fjz7DyvZXekWD58pXOilxDhpxs5fWcZcuWy5YtW2XEiGHWrlb40kuvin6J8MIL9b2VfQFXvRvPokVLpaamvYwePcLK91Ya+tBrOhrCravT6zn23Qll7tyFzsphY8eeaeUKcbqyp845+kVW++ecHjJkyGBL55y3nJUzR40abu1qhTrnHDp0yLmGbOecs0cWLVrmrLI3evTwFMw5fa28hjx37gJnlTi755z5zhdT9LWYjdeQ9fOgFStWSc+e9s45S5e+Jfp6zNY5R68t6jVkm+ccfZ4sXrxMams7OI42XkPWL/GsX79RBg/uL/362fk65403FoiuhmvrnKOvtd94Y6GzkIjtc46usH7KKXa+zlm69E3RLyjo72e9Q6LpBaeCXrgoNOcQfAwqSnkEEECg+ATCBB9fenGOc9dYXVCrtjb4XUbKtm/fflxvm+peMI56q2vTv4BtXPFRf8mrofbNxjeP+lTRvrnjbGPoUfuo5572UYNwNt5GWvuoS7UfO9Zo5QUrN3SjfdRvFtu41L13nHWMbQw9ah/1PNTzUd+Ax/FhWty/rphz4hF9f86psjL06J6Lejs2Gy+SF/OcEyb4WCj0mC0Y6b6m0dWPkpgPCT7GM3fQCgIIIICAHQK2Bh9NhR5ddVvDj27wUfvpDT+eeM19zHl/mu96Udj3XWHruZ68t4rn+ey+t9LrTaavC4btcVqu5+jx2RgGSNP1HHfOiTo/hD3X8tVLy5xz5MhRJ7Rg+zVk5pzwZ6meizovpmPOqbQyaO29bqevc5hzwp2Pem2MOSecnbcWr3OiG7qvZysqmHPCavI6J6xc83ppf29F8DGe84BWEEAAgTQLhAk+Hti/v+kacpj3VmW7du06ri9G9A2GG3qMsuLj9OnTjY7BpEmTQrVv8lbXoTpEJQQQQAABBBCwTiBM6FEPImjw0bsCZPfu3RMJ/BN8tO50o0MIIIAAAhEEbAw+3vf8A7J4+5IIR+W/6p0fuEOG9hjiv4LhkpnBR92de5HKe7Eq34WrUBe1dNk0NgQQQAABBBBAAAEEEEAAAQQQaHUBgo+tPgR0AAEEEGh1gTDBx4b6+kj9jj34GKk3BisTfDSIS9MIIIAAAggUiUCY4KOf0KPyuOUy/0/wseXJw62ui+QJxWEggAACBgVsCj6u2LFS/rBsemKhR5fVpvBjawUf1SJMYNLgqUnTCCCAAAIIIIAAAggggAACCJSkAMHHkhx2DhoBBBBoJkDw0eAJQfDRIC5NI4AAAgggUCQCBB/tGEiCj3aMA71AAAEEbBawKfh466OfaDUqW8KP2YKPiuK97bWLlCuoGDbAGLZeqw0aO0YAAQQQQAABBBBAAAEEEECgCAUIPhbhoHJICCCAQEABgo8BwYIUJ/gYRIuyCCCAAAIIlKZA0OCj39UeVdN7e2v33/p/G1d8/NnL+1qcAOfUtZEz66pb/Dxb2SBnz+fOq21RnOBjEEF7yx4+fFjWrFkjq1evls2bNzvnet++fWXs2LFOp5ctWyYvvPCC9OvXTy6++GLp0KFD7Afz/PPPS2Njo7Rv317OPffc2Nsv1ODChQtl9+7dUl5e7uy/bdu2haqU5OMbNmxwzhPdRowYIb169SpJBw46mIAtwcfHlz0hDy99JFjnYyw9uucoue2iqTG2GK4pDT4efrehRdAxSPBR9xwmxBimTrijpBYCCCCAAAIIIIAAAggggAACCOQSIPjIuYEAAgggQPDR4DlA8NEgLk0jgAACCCBQJAJJBx91fz169JCKigrjggPu39xiH9Mnd28RZpy38bBMemhnpLJBDmb9V09qUZzgYxBBO8vu3btXfvCDH4gG2rxbu3bt5Ec/+pHs379f/umf/qnpoWuvvVauu+662A/mb/7mb5ra/OUvfxl7+4Ua1GOdP3++U+yOO+6QgQMHFqpSko8/88wz8tBDDznHfvPNN8tll11Wkg4cdDABG4KPrR16dMVuHnmTXDPi6mCAMZfOFXzU3Wj4O3OLc9VHgo8xDybNIYAAAggggAACCCCAAAIIIBBCgOBjCDSqIIAAAkUmQPDR4IASfDSIS9MIIIAAAggUiUCQ4GOhspmP66pz3s1dAZLgI8HHtDx9dIXGlStXOt0dM2aMDBgwIGvXDx06JN/61rdk+/btLR53g4+6EuS3v/3tpse1vS984QuxUxB8jJ3USIMEH42wFn2jNgQfW/MW15kD3Nq3vHaDj9qvzKBjkOCj1g8aZAxavuifHBwgAggggAACCCCAAAIIIIAAAq0gQPCxFdDZJQIIIGCZAMFHgwNC8NEgLk0jgAACCCBQJAKFwozewwxym2utlxl8dP9N8JHgY1qePn/84x/l8ccfd7r72c9+VsaPH5+164sWLZJ//dd/bXrs6quvdm7zfOTIEdmyZYuMGzfOeT7cfffdTStCfvGLX5TTTz89dgqCj7GTGmmQ4KMR1qJvtDWCjyt2nAh//2HZdOf/i7cvsca5tW957Q0+Zt7eOtvtrhUurlUfCT5acxrSEQQQQAABBBBAAAEEEEAAgRIWIPhYwoPPoSOAAALvCRB8NHgqEHw0iEvTCCCAAAIIFIFAkNCjHm6Q4KO7uqOXieDjCQ1udZ2eJ4/f4ONjjz0mM2fOdA7swgsvlE9+8pNZD1KfA3or7C5dukinTp2MQBB8NMIae6MEH2MnLYkGkww+auBRw442BR2zDXJrrvpI8LEknnYcJAIIIIAAAggggAACCCCAAAI5BQg+cnIggAACCBB8NHgOEHw0iEvTCCCAAAIIFIFAkOBjkNCj0mQGH73/tnHFxwdn72sxol+YUCtn1lU3+/m8jYclW9kgp8Ovbu7WovinH94lz65vaPbzSwa0kWxlg+yLstEE/AYfv/e974neFlu3r3zlKzJs2LBoO45Qm+BjBLwEqxJ8TBC7iHaVRPAx7sCjrsrodwsTsmzNVR8zg496nO5KjJn/dw3yrdQYdBXHoOX9jgPlEEAAAQQQQAABBBBAAAEEEEDAnwDBR39OlEIAAQSKWYDgo8HRJfhoEJemEUAAAQQQKAIBU8FHt11v+97gY/fu3aWystK44ID7N7fYx/TJ3VuEGY13xMcOCD76QEqwyN69e5296SqOTz31lPP3W2+9Vc4+++ysvfjxj38sK1eeuB3s1772NdFwr7t16NBBKioqnH8eOnTIuf21bt6fZ2t069atsm7dOtm1a5ccPnxYunbtKj179pShQ4c2tZetXrbg44EDB2TJkiWyc+dOp61u3bpJ3759ZfDgwTlvuxqW+0c/+pHMnz/fqX7HHXfIwIEDWzSlDm+99Zbs2LFD9u3b56x+qWa9e/eWXr16Zd21rpa5f/9+57Hy8nKpqanJ2cVjx46JHrNuaq/W7qbHX19f7/xT29C2GhoaHB8118f79Onj/Onfv79vH13J8+2333bG6+jRo6LznI7XKaecknW8sgUftd/Lly932lEXNVEPNWzTpo2vIdG62he11c0da20jV0gqm8mePXucMK8ej46Pniv9+vXz1QcKmRMwHXx8fNkT8vDSR2I9gN/e+Gvf7d366Cd8l3ULpi34qP3mdteBh5kKCCCAAAIIIIAAAggggAACCFgpQPDRymGhUwgggECiAgQfDXITfDSIS9MIIIAAAgikXCBI6FEPNVf5bD/PDD5m/pvgY8uTh+CjPU+oFStWyP333x9bh+666y6pq6tz2vMTCtTQmt46e+nSpVn70LFjR7nmmmvk4osvzhqeyQw+/vWvf5WHH344a1sa7LvlllucMGVcW75j1LlAw6S///3vc+5uwoQJcv3110vnzp2bldEw6j//8z87P9NA4Xe+852cbWzbts0JoOp28sknN/1d/60h1blz5zqP6Uqd69evl//4j/9wQqmZ25gxY+TjH/94i754y+n5ouO1evXqrP3Rvk6aNEnGjh3b7HFv8HHy5MkycuRIpz+7d+9u0Y6GXj/3uc85IcpcmwYef/e73zWtPJpZTkOLegt2DTBmbl6TadMAJdQyAAAgAElEQVSmyezZs2X69OnNil1wwQXyqU99Kq7ThHZCCpgMPt73/ANGbmttOviolK11u2s/Kz5q/zKDjgQfQz4BqIYAAggggAACCCCAAAIIIICAZQIEHy0bELqDAAIItIIAwUeD6AQfDeLSNAIIIIAAAikXIPho1wASfLRnPHSVOw2gxbUFCT6++uqr8vOf/9zXrs866yz5zGc+I23btm1W3g0+tmvXTi699FJ54oknCrandc4777yC5fwUyBV81FUQf/vb38oLL7xQsBnt+9133y1dunRpKhtX8NHbv5tuukkeeST/6nYaOvz2t78t1dXNb3uvHfOGFwsdlIYfr7766qZi3rqXXHKJzJkzJ2v40tvu7bffLoMGDWqxKw3Jfv/73y/UBVFXDYSedNJJzcp6TcaPH+/0JXMj+FiQN5ECpoKPpkKPipJE8PHmkTfJqT1OlaE9hiQyDu5OsgUf9TENNnrDjX6Dj27dIAfB7a6DaFEWAQQQQAABBBBAAAEEEEAAgXgFCD7G60lrCCCAQBoFCD4aHDWCjwZxaRoBBBBAAIEUC8QVelSCzLZy3d7aW5YVH1uePAQf7XlCbd68WZ5//nmnQ7qan67AqJuumDds2LCsHX3ttdfEvT22rhKo57i7XXnllU0BvnyrIWauNKkhNQ2hjRgxwrlV86pVq2TevHmydu3aprY1rOhd4VEfyPy3/kxXidSVFLUtva3x4sWLRUOW3lUOc4Xqgo5MrmP83//9X3n66aebmtP+6CqHuhrmwYMHnT55Q5ra1y9/+ctN4SETwUe3M7ry5fnnn++sqKi30541a1bT7bq1zI033ig6jt5twYIF8uCDDzb9SI3HjRsno0aNEr0tt96yWo29Kzh+4QtfED0/dMsWmtQxVxctU1VV1cJEQ48aXNTbc7ubd3VL/Zm2ccMNNzi3x9bx1X7o+bl9+3anivbz61//erNz1Dtm3mM844wznFui66qYuu/rrrsu6OlA+ZgFTAQfTdze2nvYSQQfvfvTEKRu14x4P2gc8zA0NRc2+KgNsOqjqVGhXQQQQAABBBBAAAEEEEAAAQSSEyD4mJw1e0IAAQRsFSD4aHBkCD4axKVpBBBAAAEEUixA8LHlym2tPZwEH1t7BLLv/49//KM8/vjjzoOf/exnnSBitk1XiNSVInW79957pVevXlnL5QoFNjQ0yG233dYUntRw2h133CG62qB30+fu//zP/8hzzz3X9ON/+Id/EF390d0yg499+vSR//f//p8TnvRuW7dulXvuuacp/Ki3ZNZ/V1RURBqMXMeo4cb//u//ljfeeMNZqVIDfpnbunXrnJUe3U0NNMCnm6ngo4ZZ9RbalZWVzbrjDWpm3lpbA4Vf+cpXmtmpsY6bd9Ny3/rWt5pCh9qOrh6pwcVswcdvfOMbMmDAgGZtaCD0hz/8YdPPvvnNbzohXHf77ne/K2+99ZbzTw1wTpkypcVYaz/0HHVDs1pf23G3zOCjnncahO3UqVOkc4HK8QvEHXw0HXpUgaSDj151DUGaDED6DT5qn/yu+hhmBccwdeI/O2kRAQQQQAABBBBAAAEEEEAAgdITIPhYemPOESOAAAKZAgQfDZ4TBB8N4tI0AggggAACKRaIK/iYrZ00rvg4b+NheXD2vhYj+oUJtXJmXfOQZK6yQU6HX93crUVxgo9BBJMrm1Tw0XuLaw3P6ap+PXr0yHqgx44dcwKEs2fPdh4/+eSTnfLu5g0+alt33nmndO7cOWtbmUHDqVOnyqmnntpUVlcuLLRl3tI136qWOj/oyoO5gqG6r+nTpzet/Oi9BbeJ4KP6aNCypqamxWEeOHBAvvjFLzb9/Gc/+1lTOFLt//M//9N5LNsKit7GtmzZ4oQIR48eLXpLa/2/mmUGHz//+c+LrrCYbfMGa73lNLyqqze62/33399sJUdvW/v27ZMvfelLTT/yBii9Y6YrRmrgNN8YFToneNycQNzBx1sf/YS5zr7XcmsGH7ULJsOP3uCj7ssbQPSuzJr5WLZ/uwMRJsQYpo7xgWcHCCCAAAIIIIAAAggggAACCJSAAMHHEhhkDhEBBBAoIEDw0eApQvDRIC5NI4AAAgggkGKBIMHHfGXz3eZaeTJDU2552251rWHGSQ/tbDGi0yd3zxp8zFY2yOmw/qsntShO8DGIYHJlkwo+fv/735elS5c6B3bTTTfJFVdckfcg33nnHWdlP3e77777moKS3uDj5MmT5dJLL83b1n/913/Jiy++6JT54Ac/KLfccovz99WrVzurVxbadHVAXXXQ3fIFH90yR44ckV27djl/NJBXW1vr3A68W7duzi291UO3iRMnykc+8hHn7yaCj97jzXacuhKkewtzr7F3lUX10nbybXp78erq5iFqb/BRw5N6zLnCS94w6M033yyXXXaZs7uZM2fKY4895vz9wgsvlE9+8pN5+/Hzn//cuf22bt5+e8fMTzuFzgkeNycQZ/AxidUeVaK1g4/ah9E9R8ltF02NfWBMBB+1k0GDjEHLxw5BgwgggAACCCCAAAIIIIAAAgiUqADBRzsH/q677rKzYynoFXYpGCS6aJ0AwUeDQ0Lw0SAuTSOAAAIIIJBSgSChRz3EuIKP3nYIPhJ8TMvTJ6ngozdgp7dMPuWUUwoS6a2Wd+/e7ZT78pe/LCNHjnT+7g0+6kqDgwYNytvWSy+9JL/61a+cMtqGtqWbieCj3nL5hRdecG4frn/PtukKlrpv3bzBRBPBx8997nMyduzYnD4a/HT7oreo7t27t1NWV110+6+rbWqfg27e4KPe9ltv/51r01ub/+Y3v3Ee1iCoBkJ101Un3ZU/9d+Zt9rObM8NcerPNVyrIVvdvMHHfLd0D3qMlI9fIM7gYxKrPaqADcFH7YeJ8GOU4KP2KVdgMWiQMWj5+M9MWkQAAQQQQAABBBBAAAEEEECgNAUIPto37hrcI7wXflzwC29HzdIVIPhocOwJPhrEpWkEEEAAAQRSKkDw0f8qjqz4mNKTPMZuJxF8PHr0qGgAz91+8pOftFgdMNsheVdq/PSnPy3nn3++U8wbfPzpT38qVVVVeUU2bNggettj3Xr27Cnf+c53nL+/++67TasD5mtAV2scP358U5FcKz5qUFDDg3rrZ7+b6eBjoWCorvK4cuVKp7tu8DHseGUeszf4+NGPflQuv/zynCwaFtXbm+vmDT56V570a+qW0zHTkKNu3jErZBJ0P5SPVyCu4OOKHSvlm8/dHW/ncrRmS/BRuxd3+DEz+Kj7cEOI+v/MQGKhf7uEYYKMYeokcgKwEwQQQAABBBBAAAEEEEAAAQSKWIDgo32DS3Av2pjgF82P2qUpQPDR4LgTfDSIS9MIIIAAAgikVCCp4KPux7svVnx8/4ThVtfpefIkEXw8duyY/N3f/V0Tyr//+79L27ZtCyL98pe/lJdfftkpp2HH8847r+nvbuUf//jH0qZNm7xtrVu3Tu6++0QAqk+fPnLPPfcU3He+ArmCjw8++KAsWLCgqaquWnj22WdLp06dRG8FratXLlq0SJ566qmmMmGDj1u3bpWvf/3rTju6GqOuyuhufm7F7ZbNFnwMO16ZZt7go/f21dls/QQfR48e3bQipZ8BHDhwoIwbN84pGsTET9uUMScQV/Axqdtcq4RNwUftz50fuEOG9hgSyyBp8LFhT32zgCPBx1hoaQQBBBBAAAEEEEAAAQQQQACBVAgQfLRvmAjuRRsT/KL5Ubs0BQg+Ghx3go8GcWkaAQQQQACBlArEFXzM1k5m0NH9d2ZZG291/eDsfS1G9AsTauXMuupmP5+38bBkKxvkdPjVzd1aFP/0w7vk2fUNzX5+yYA2kq1skH1RNppAEsFH7aH3VtdTp06VU089NW/H9Tmlt6R2b108ZcoUGT58uFPHu+Kjn9tme2+jrOG5L33pS5HQsoXoDhw4IF/84heb2tXjHTVqVNb9eEN+uYKPekvnH/zgBzn7uXDhQvm3f/s35/G4g4+Z4/XVr35Vhg4dGtgsjuCjd9VP70qQQTtD8DGoWOuVjyv4eN/zD8ji7UtiORBdRTHfdttFU33vR/uVb4ujz3Gu+mgq+KgGQVdwDFre96BQEAEEEEAAAQQQQAABBBBAAAEEcgoQfLTv5CC4F21M8IvmR+3SFCD4aHDcCT4axKVpBBBAAAEEUipgKviY2a53xcfMQGSPHj2ksrLSuOCA+ze32Ee221cb74iPHRB89IHUCkWSCj56g2fXX3+9XHXVVXmPdseOHXLbbbc1lZk2bZp06dLF+bc3+HjTTTfJFVdckbetX/ziFzJnzhynjJbVOlG2bCG65cuXywMPnAg09e/fX+68886cu/D2xxt8rK+vl3/8x39sqvfzn/9cKioqsrYzffp0eeKJJ5zHTAQfNVSp4Urd/AQO9+zZI3pLcG9/4wg+Pvnkk/LII49kPc5sMLpapW6ZbgQfo5zxydaNK/h466OfiK3jN4+8Sa4ZcXVs7eVqKM5VKuNa9TFb8FH7773NtTeQ6PdW124bQVAJPgbRoiwCCCCAAAIIIIAAAggggAAC8QgQfIzHMc5WCO5F08Qvmh+1S1OA4KPBcSf4aBCXphFAAAEEEEipQJDgY76y2YKOXhKCj8FOEIKPwbySKu0NPmoYUUOJ2bbvfe97smzZMuehe++9V3r16pW1XK6Amd4CWm8FrVu7du1EV33UgGC27ciRI6K3w168eLHz8MiRI53VH93NG3zUtm6//fact0BeunSpfP/732+qq7eHHjx4cCTebMf41ltvyXe/+12nXV3NUo8v27Zt27Zmt6X2Bh+1vHdlzG984xsyYMCAFs28++67ohdn3NUwTQQfX3/9dfnpT3/atO9vfvOb0q9fv6zH5N52u2vXrnLllVfKRRdd5AS/4wg+ZgZg860WqqHHn/3sZ7J27VqZNGmSjB07timATvAx0imfaGUbg48KYDr8GGfoUfsb16qPbvBR28wMOHpvee2eJAQfE326sDMEEEAAAQQQQAABBBBAAAEEjAsQfDROHHgHBPcCkzWrgF80P2qXpgDBR4PjTvDRIC5NI4AAAgggkFKBpIOP2QKSrPjY8uQh+GjnE8p72+U+ffqIhu2qq6tFz+vGxsamlfOiBh+PHj0qd9xxh2zfvt2B0MDi1772NTnppJOawWh47Sc/+YnMnz+/6ef/8i//IiNGjGj6tzf4qD/U20Jrv90VId2CGoC75557mupp0FLLRV01LFuI7vDhw/L3f//3TfvSMOagQYOaHZuGHu+7776mwKI+mBl89DoPGzbMCUJ6V49taGhwApZ6bO5mIviox6O3uHbDlTpeuoqlzm3e7dChQ/Ktb32raVw1/Pid73wntuCj7svrrf/WW5XrLcu9m56vv/71r0XPZ3e7++67m84vgo92zj/ZemVr8FH7air8GHfoUfvaWsFH3bff8GPQuTho+fSc9fQUAQQQQAABBBBAAAEEEEAAAXsFCD7aNzYE96KNCX7R/KhdmgIEHw2OO8FHg7g0jQACCCCAQEoFkgg+uvvwrvrocunPCD62PHkIPtr5hFq1apUTVHM3DbiNGjVK9NbNn/nMZ5oCZlGDj9r+hg0bRFcO9G5jxoxxVnRs27atE+Z74403mgUDL7/8cvnoRz/arE5m8NF9cNy4cc5qixrYXLRoUdOtmt3HvUG4KKORK0T3wx/+sGmVSm1fV8/UUOK+fftEV4R89dVXRYOC3i0z+OhdGVPLaRhVV1GsqamRFStWOLfsdsOIbjsmgo/a9urVq53VPb3b2WefLaeddpoTjNUVQNXZe0zeUGIcKz7qvnWFS12p07ufM844Q4YPH+6EXfUc1qCsG6rVOhoY1fPY3Qg+Rjnjk61rc/BRJeIOP5oIPboj9tsbfx158IKu+Kg7JPgYmZ0GEEAAAQQQQAABBBBAAAEEELBGgOCjNUPR1BGCe9HGBL9oftQuTQGCjwbHneCjQVyaRgABBBBAIKUCcQQf/dzmWnkyg49uPYKPLU8ego/2PqF+/OMfy9y5c1t00BtiiyP4qDvQAKCG0DIDgNl0NBR44403NlvxUMu5wceePXs6KyY+9NBDBXGzrRJYsFKOArlCdLt27ZJp06Y1C+Bla2Ly5MlNfc4MPuocorfmdm8rnquPavDLX/7SedhU8FHbzgxi5jP79Kc/Leeff35TkbiCj9rg+vXrRYOlmaHPbP3527/9W9EQrHcj+Bj2bE++XlzBx/uef0AWb19i5ADiCj+aDD3qgRN8NDL8NIoAAggggAACCCCAAAIIIIBASQkQfLRvuAnuRRsT/KL5Ubs0BawKPtbW1hbVKOjqKRUVFU1/ysvLndUFuAVSUQ0zB4MAAggggEAgAYKP3eXMuupmZvM2HpZJD+1s4Th9sv+yQQZh/Veb375Y6xJ8DCKYbFkNIT7++OPy5JNPNtvxV77yFdHbLev24IMPOiE43fRWy3pL42ybn4DZzp07ZcaMGc1uS+xtS28RfdVVV4mu6pdt+/znP+8EJ/v16+esIPnaa685tznOFqbUNnTlxcxbakcR1ltx68qUuuntuwcOHNjU3IEDB5xA4sKFC1vsQoOaejtsvXW11tMtM/ioP9PbTD/xxBOOUeamt/XW0OPQoUObbq2dGXzM17/M9vTW2ytXrnR+/O1vf1t69+7dYp9bt251zo9XXnklK5ueI9ddd53TJ+/23HPPyW9+8xvnRzfffLNcdtllOdlffvnlpiDnLbfc4rhkbrryo7o8++yzWdvR8+HDH/5w1vMmiEmUc4O60QXSEHx0zumRN8k1I64OfcCmQ4/aMZPBR21fr7+4m/caDCs+hj4tqIgAAggggAACCCCAAAIIIICAdQIEH60bEiG4F21M8IvmR+3SFCD4aHDcCT4axKVpBBBAAAEEUigQJPSoh5erfLGt+EjwMYUncyt0+ejRo/LOO+9IfX29dO7cWcJ8acpP8NE9tP3798vbb78tu3fvloaGBue2xRoODBNS1L7rrZl11UV9/uqtobUdXX21NTYNL+rq9Nu2bXO+lKXHNXjw4EBf0NKgn4YOtQ0NGOmxnHLKKc6Xvlpj0/5s2rTJGS+9nbiOl/YpW1jSZP8OHjzo3Dbd7YeOdffu3aVv376BfE32kbbDC8QVfFyxY6V887m7w3fER82w4cckQo/afduDj9rHoF9aDVrexzBSBAEEEEAAAQQQQAABBBBAAAEE8ggQfLTv9CC4F21M8IvmR+3SFLAq+FhXV1dUo8CtrotqODkYBBBAAAEEIgsQfBQJsopjkLJBBocVH4NoFVdZ78qQmashFteRcjQIIGBCIE3BRz3+oOHHpEKPo3uOktsumhp5iKrKqqRhT73TTmboMNeKj9nK5gosBg0yBi0fGYAGEEAAAQQQQAABBBBAAAEEEChxAYKP9p0ABPeijQl+0fyoXZoCBB8NjjvBR4O4NI0AAggggEAKBYIEH/OVZcXHlrfFDnI6EHwMolVcZfX22LoSn2533313qNUbi0uEo0EAgSACcQUfdZ/3Pf+ALN6+JMjuQ5X1G35MKvSoB+G3T4UOmOBjISEeRwABBBBAAAEEEEAAAQQQQKC4BQg+2je+BPeijQl+0fyoXZoCBB8NjjvBR4O4NI0AAggggEAKBZIIPnr3obd79W7uY3r718rKSuOCA+7f3GIfQVZxDFI2yMEQfAyilf6yelvn6upq51bT9957b9MB6W2v27Vrl/4D5AgQQCAxgTiDj0nc7tqFKRQ0TDL0qH0q1B+/A6rBx/p3DjmrPbLio181yiGAAAIIIIAAAggggAACCCBQPAIEH+0bSw3u6eb+374e2tsj7OwdG3pmtwDBR4PjQ/DRIC5NI4AAAgggkEIBgo+5b3X9+saGFiP6ufNqW/xs3sbDkq1skNMhW7uffniXPLu+eR8uGdBGfnVztyBNU9ZCgTvvvFM0/Lh9+/am3g0aNEhuv/12C3tLlxBAwGaBOIOPepxJrfqo+8oVNkw69Kh9+e2Nv45lmAk+xsJIIwgggAACCCCAAAIIIIAAAgikVoDgo71DR/Ax+NhgFtyMGgioAMFHg+cBwUeDuDSNAAIIIIBACgXiCD5ma8P7szSu+GjDUBJ8tGEUzPRBA45btmxp1rj+TMOPbAgggEAQgbiDj0mu+qjHmRl+bI3QY1yrPerxEHwMcvZSFgEEEEAAAQQQQAABBBBAAIHiEyD4WHxjyhEhgAACQQUIPgYVC1Ce4GMALIoigAACCCBQAgIEH7Ov+GjD0BN8tGEUzPThpz/9qSxZskRqa2tl8ODBcs0110jv3r3N7IxWEUCgqAXiDj4qVtLhQzd4mPR+9VhH9xwlt100NbZzJF/w0Xv768zbYBf6t9vBzHKFOh60fKH2eBwBBBBAAAEEEEAAAQQQQAABBPILEHzkDEEAAQQQIPho8Bwg+GgQl6YRQAABBBBIoYCJ4GNmm6z4GO7EIPgYzo1aCCCAQCkJmAg+ql/SIUQNIC7eviTxobvzA3fI0B5DYtsvwcfYKGkIAQQQQAABBBBAAAEEEEAAgVQKEHxM5bDRaQQQQCBWAYKPsXI2b4zgo0FcmkYAAQQQQCCFAgQfWfExhactXUYAAQQQeE/AVPBRm086/Jj0oMZ5i2u37wQfkx5F9ocAAggggAACCCCAAAIIIICAXQIEH+0aD3qDAAIItIYAwUeD6gQfDeLSNAIIIIAAAikUsCH4qH3o2bOnVFZWGhcccP/mFvuYPrm7nFlXbXzfQXfAio9BxSiPAAIIlJ6AyeCjahZr+DHulR7dM89v8FHLe29Dza2uS++5yxEjgAACCCCAAAIIIIAAAggUpwDBx+IcV44KAQQQCCJA8DGIVsCyBB8DglEcAQQQQACBIhcg+JiuAb5kQBv51c3d0tVpeosAAgggYEzAdPDR7XixBCD1ltrXj5gU6+2tvYNL8NHYqU7DCCCAAAIIIIAAAggggAACCKRCgOBjKoaJTiKAAAJGBVol+LhgweLjGzdukpNO6iV9+vSSxsZGOXbsmNTV1Rk92KQbNxl8bGg4LAsXLnEO6cwzT0tk1aagftu375TVq9dJt25d5OSTB0pFRUXQJoyXX7lyrWzfvkOGDBksPXp0a7YKhPGd+9zBG28skIMHD8n48WdLVVWVz1rJFdu7d5+8+eZKadeurYwYMdTKc1Hnm40bN0u/fidJv359pLy8PDkgn3tasuRNeeedvTJmzAipqamx7lxM15zTVU4+eYClc84a0blx6NDB0q1bVyvPxbTMOe3bt5Phw4dYOeds2LBJ3n57s9TVnSR9+zafc2wJPnbu3Fnat2/vc4YKXyzbio/hW0u+JsHH5M3ZIwIIIGCzQFLBRzXQ8KNuDy99xGaSrH0zHXh0d5or+Lh06XLR96ljxowSfc2oKzzatOJjQ0ODLFy4VMrLy2TMmNFWvp7V6ySrV693rpMMGtTfyvdWK1aslh07dsmpp57svLfKXMnThifO66/Pl/r6Bhk37iwrr+e8++5eeeutVdKhQzsZNszO91br178tmzZtkf79+8pJJ/W28j30okXLRC3POGO0dOjQ3rpzUc/BRYt0zil35sUkVv4P+vxLw5yzfPmJOWf48FOka9cu1o2zmqdhztFryLW17WXYsKFW/m55f87p53x2ZeM1ZHfOOfPM0c51Hdt+/6Vhztm2bbusWbPB6tc5y5evkh07dls75+j1Tb2GXF9/WMaNO9PK1zl79ujrHNvnnI2yadNWGTCgn/NZud1zzmlN762C/p43Wf7QoXpZvHiZVFSUy+mn2/k6x51zevbsLgMH1ln5++/9OWeIdO3a2brfLe6co59TnntuyzmH4KPJZxltI4AAAukQCBN8fO3Vuc415LPO0tc5wT8zL3vmmeePz5u3WEaPHiYjR57q/AIl+BjshNEg3B/+MMOpdP31VzkvOG3bNPT44ouvyMCB/WT8+HOkTZs2tnVRXnjhFdGL5RdeOE4GDx4olZX2hTMffviPsmfPu/Kxj31Eamo6WGeoL9qffPI56dy5k1x++UXStm1b6/o4f/4SmT9/sRMqHD16hJVvxGfMeFo2bdos1157pegbINve5B44cFCmT/+LM19PmvQhK+ecVavWyksvveq8edSgsJ1zzhxnzrnoovNk0KABls4505055+Mfv1E6dLB3zunSpZNcdtkHpG1b+3636HxzYs4ZJaNHD28259gSfGzfvkY6d+5ofL4m+GicmB0ggAACCCQokGTw0XtYK3aslOU7ljf9aOmOZQketb9djewxQk7tcaqx1R2z9SJX8HHmzGdky5Ztcu21V0j37t2c91Y2BR/37dsvf/rT/zkfzl133UQr31utWLFGXn75NRk8eIDzwZKN762ee+5lWbVqjVx88fnWfoD40EN/EB3vj33sBivfW+nz5K9/fd75wvKll15o5XurefMWyoIFS50vfes1ZBu/EPz440/K5s3bZNKkidK9u17PKfM3cSZUyp1z9JrnddddKe3a2XcNWUOFc+a87lybPffcMyydc2aLXneyfc7Zv3+/3HrrR5wQrm2bPk+eflrnnK5y6aUXWDnnzJ270Plygs1zzp///KTzOkevz9q4kIN+cKh91JD1ddddYemcs0rmzHnDWaxj7Fh9nVNt29NFnnvuJVm5cp3zXNFQnG2Liuj1zf/93+myf/8BufVWfZ1j45yzVZ5++gXp3r2rXHKJnXOOhkc1zHzWWac7i4rY+DpH37ds3bpdrr/+Q857K9vC1u6cU1VV6bz/s/F1jn7RSOecIUMGyTnn6Osc++acWbNeklWr7J1zGhuPy8MPT5cDBw7I5Mkt5xyCj9b9GqNDCCCAQOICYYKPj/xuumzdukNuuOGqUF+qLluxYtXxHTt2OkEpDS6w4mPwcT969KisW7dRjh1rdFY2s/HbuvqCU18Q65ue3r17WvfmTLVOfKkAACAASURBVNU1tLd79x7p06e3dOxYa93FSe3jmjXr5dChQzJ06MlWvvHRb1TpymZVVRXSr18/K4Ncu3btdr6Vrd/I1je6toUKdZx1VUq9ODlgQH9n9Uzb3kAeOXJUNmw4Mefoh1/MOcHnba2hc+I77+xxVqrQOce2cU7HnHNI3n57i+jFjLq6vlb+bnHnHP0AMXNlT1uCj126dHXmGtNb2oOPX7ugVj53Xq1pJtpHAAEEEEiJQGsFH1PCk3g3cwUf9f3pgQOHnC9h6hfzbFvx8fDhI7Jhw9uiH94MHtzf0vdWe2XLlu1SW1sjvXr1sPI1t/veqm/f3lJba+t7q3Wi10yGDj3Fef9i26ZfqtbVFKurK6VfPzvfW+3cuVt27tzlvK/S91c2Xs/R57MGP/TLjfrFPNve57tzjr4X1RVcbbyeoytm6nPa5jlHg2b6JVHb55z6+noZMsTWOeegs7JZdXWVc1ce24JcOkfrfKPzjl4/1uvIzDnBf3Ola86pfe91jn13h7J9ztHfKWvXrheb5xxdyGHzZrvnHP3MSq8ja6BQV9mzcc7RlXA1bGbv65zDondfSsPrHP08qGdPfW9l85zTx3k9ZtvrWR1f/axc754wZIh+Vt78vRXBx+C/r6mBAAIIFJtAmODjiuUrndc5+iVM/dJ30N9/ZTt37jyuwT3d9JeVrvbIio/BTy01Uz8bL1i5Y6uhVt1svJCh/dL+6R/tX9ATOfiIhatxIhjcaOVF8rSMszvP6BtHG988puVcZM4J9xz21mLOiW6oz2fbf7fkm3NsCT727NkzkdcPP3t5n9z74r7oA98KLXCb61ZAZ5cIIICA5QIEH+0aoFzBR/f1lnelR5tWfFRF3ltFP5d4bxXdMO3vraILxNOCe91OV1S09dpieuacMivDAGm5bsc15OjP6bRcQ9bPCphzwo/3+7//mHPCK574bI3PraIIvv/5pO2fWzHnRBvnE79bGp3XiTaGHovhdQ7Bx2jnKLURQACBYhAIE3w8dPCg8zta83ZlIW7eUbZr167j7psLN/RI8LEYTieOAQEEEEAAAQTyCZRa8FEt5m08LK9vbEjViXFOXRs5s86+246kCpHOIoAAAkUoQPDRrkHNFXzUXuZb5TEzmJQrqBQ0wBS0vF2a9AYBBBBAAAEEEEAAAQQQQACB9AkQfEzfmNFjBBBAIG6BMMHHhvr6SN0wEnycNm2azJo1S2bOnNmscw888IBMmTIlb4e17tSpU5uVmThxotx+++0yfvz40Ae7ceNGZyVB94+72gAXw0OTUhEBBBBAAIFUC5Ri8DHVA0bnEUAAAQQQ8AgQfLTrdHCDj9qrzJX9CT7aNVb0BgEEEEAAAQQQQAABBBBAAAETAgQfTajSJgIIIJAugaIIPs6ZM0cmTJiQVT5s8NFtzE/9XENO8DFdTwZ6iwACCCCAgGkBgo+mhWkfAQQQQAABcwIEH83ZhmmZ4GMYNeoggAACCCCAAAIIIIAAAgggUDwCBB+LZyw5EgQQQCCsQFEEH6+66qqmlR51pUbv5mfVRne1SG89d+VIbW/GjBmhfAk+hmKjEgIIIIAAAkUrQPCxaIeWA0MAAQQQKAEBgo92DTLBR7vGg94ggAACCCCAAAIIIIAAAgggkLQAwcekxdkfAgggYJ9AUQQf3VsaRQkpZg6NN0zZ2NgYauQIPoZioxICCCCAAAJFK0DwsWiHlgNDAAEEECgBAYKPdg0ywUe7xoPeIIAAAggggAACCCCAAAIIIJC0AMHHpMXZHwIIIGCfAMHHHGNC8NG+k5UeIYAAAgggkHYBgo9pH0H6jwACCCBQygIEH+0afYKPdo0HvUEAAQQQQAABBBBAAAEEEEAgaQGCj0mLsz8EEEDAPgGCjznGhOCjfScrPUIAAQQQQCDtAgQf0z6C9B8BBBBAoJQFCD7aNfoEH+0aD3qDAAIIIIAAAggggAACCCCAQNICBB+TFmd/CCCAgH0CBB9zjAnBR/tOVnqEAAIIIIBA2gUIPqZ9BOk/AggggEApCxB8tGv0CT7aNR70BgEEEEAAAQQQQAABBBBAAIGkBQg+Ji3O/hBAAAH7BIoi+Dhnzpwm2fHjx8eiHEebGzdulIqKiqY/5eXlUlZW5vxhQwABBBBAAIHSEyD4WHpjzhEjgAACCBSPAMFHu8bSDT5mu86S+TPvdZjMazK5rtEEvXYTtLxdmvQGAQQQQAABBBBAAAEEEEAAgfQJEHxM35jRYwQQQCBugaIIPsaNEld7BB/jkqQdBBBAAAEEikOA4GNxjCNHgQACCCBQmgIEH+0ad4KPdo0HvUEAAQQQQAABBBBAAAEEEEAgaQGCj0mLsz8EEEDAPgGCjwbHhOCjQVyaRgABBBBAIIUCBB9TOGh0GQEEEEAAgfcECD7adSoQfLRrPOgNAggggAACCCCAAAIIIIAAAkkLEHxMWpz9IYAAAvYJEHw0OCYEHw3i0jQCCCCAAAIpFCD4mMJBo8sIIIAAAgi8J0Dw0a5TgeCjXeNBbxBAAAEEEEAAAQQQQAABBBBIWoDgY9Li7A8BBBCwT4Dgo8ExIfhoEJemEUAAAQQQSKGALcHHHj16SFVVVQoF6TICCCCAAAKtJ0DwsfXss+2Z4KNd40FvEEAAAQQQQAABBBBAAAEEEEhagOBj0uKF93fXXXcVLkSJnAL4cXIgEFyA4GNwM981CD76pqIgAggggAACJSFgS/Cxa9eu0rZt25Iw5yARQAABBBCIS4DgY1yS8bRTLVVyaM8hKSsrc/54t8yfeR/PVjZbjzLLFep10PKF2uNxBBBAAAEEEEAAAQQQQAABBBDIL0Dw0b4zRIN7hPfCjwt+4e2oWboCBB8Njj3BR4O4NI0AAggggEAKBWwIPipbbW2t1NTUpFCQLiOAAAIIINB6AgQfW88+256rjlVK/b56go92DQu9QQABBBBAAAEEEEAAAQQQQCAxAYKPiVH73hHBPd9UWQviF82P2qUpUBTBx2nTpjmjN2HCBBk/fnwsI+m2qY1NmTIlVJsEH0OxUQkBBBBAAIGiFbAl+KirPXbp0qVonTkwBBBAAAEETAgQfDShGr7NsvoyOVp/JGvwsby8vKnhQis85lqpMegKjkHLhz9yaiKAAAIIIIAAAggggAACCCCAgAoQfLTvPCC4F21M8IvmR+3SFCiK4KN7QXvixIkyY8aMWEbyqquukpkzZzptNTY2hmqT4GMoNiohgAACCCBQtAJxBB8Vx9tOZpvef2d7DaOP6wfzvXv3LlpnDgwBBBBAAAETAgQfTaiGa7OyrFIa9tSLHBfxhhzd1gg+hnOlFgIIIIAAAggggAACCCCAAAJpEiD4aN9oEdyLNib4RfOjdmkKEHzMMe4EH0vzCcFRI4AAAgggYFIgSPBR+5GrfL6wo5/go5bRFR/btWtn8nBpGwEEEEAAgaISIPhoz3BWS5Uc2nPI6RDBR3vGhZ4ggAACCCCAAAIIIIAAAgggkKQAwccktf3ti+CeP6dcpfCL5kft0hQg+Jhj3Ak+luYTgqNGAAEEEEDApIBNwUe93XXXrl1NHi5tI4AAAgggUFQCBB/tGM4yKROpPy5H6486HSL4aMe40AsEEEAAAQQQQAABBBBAAAEEkhYg+Ji0eOH9EdwrbJSvBH7R/KhdmgJFFXzUIZw9e7aMHz8+0mjOmTNHJkyY0NQGt7qOxEllBBBAAAEEEPAIBAk/hlnxUXeVb9VHfcz906NHD6murmZ8EEAAAQQQQMCHAMFHH0gJFNHVHg++c1DKysqa/nh36/7c/Zn+O/PxfP/OVS/foWXuIwEGdoEAAggggAACCCCAAAIIIIBAyQsQfLTvFCC4F21M8IvmR+3SFCiK4OO0adNk6tSpWUfwgQcekClTpuQd3Xz1J06cKDNmzAh1dmzcuFEqKiqa/ugqBJkX4EM1TCUEEEAAAQQQSK2ALcFHBdTQY/fu3VNrSccRQAABBBBIUoDgY5La2fdVWVYhRw8elWMNx5wC2a6xBAk+5gssBgkzBinb+or0AAEEEEAAAQQQQAABBBBAAIHiECD4aN84EtyLNib4RfOjdmkKFEXwUYfOe2tq71BGDT5GWUGS4GNpPqk4agQQQAABBPIJ2BB81P65qz526tRJampqGDQEEEAAAQQQKCBA8LH1T5HKY5VSv/eQE3jULWjwsdDqj94jDBJmDFK29RXpAQIIIIAAAggggAACCCCAAALFIUDw0b5xJLgXbUzwi+ZH7dIUKJrgow6f3qL6nnvuaTaSt99+e8FbX+uKj7NmzWqqd/HFFzt/L7RSZKFThuBjISEeRwABBBBAoPQEbAs+6gh069ZN2rZtW3qDwREjgAACCCAQQIDgYwAsA0X1FtcH3jkgZfpfnuCj3m3Du3lDiQQfDQwMTSKAAAIIIIAAAggggAACCCDQSgIEH1sJPs9uCe5FGxP8ovlRuzQFiir4aNsQEny0bUToDwIIIIAAAq0vEEfwUY/C205mm95/NzY2tjhod7VHb7kePXo4t75mQwABBBBAAIHsAgQfW+/MqC47EXqUxuarPGaGHLWH3p8VCjpyq+vWG1P2jAACCCCAAAIIIIAAAggggEBUAYKPUQXjr09wL5opftH8qF2aAgQfDY47wUeDuDSNAAIIIIBASgVMBB+VIlcQMl/w0a2ndfWDf1Z+TOlJRbcRQAABBBIRIPiYCHOLnVRJlRzccyL0qJsbbMx2m2vv4/r3sMHHoLeuDlq+dSTZKwIIIIAAAggggAACCCCAAALFJUDw0b7xJLgXbUzwi+ZH7dIUIPhocNwJPhrEpWkEEEAAAQRSKpBE8FFpgqz66K4AqfU6d+4stbW1KdWl2wgggAACCJgTIPhozjZby+Vl5VJ+rPxE6PH4+ys9uiHDbKs9ZoYhCT4mO2bsDQEEEEAAAQQQQAABBBBAAIEkBQg+Jqntb18E9/w55SqFXzQ/apemAMFHg+NO8NEgLk0jgAACCCCQUoEgwUc9xFzl893eOrOen1Uf3TLabtu2bZ0AJLe+TulJRrcRQAABBIwIEHw0wpq1Ub21dcPBw9Kwv77ZCo/esCPBx+TGgz0hgAACCCCAAAIIIIAAAgggYKMAwUf7RkWDe7q5/7evh/b2iNCjvWNDz+wWIPhocHwIPhrEpWkEEEAAAQRSKtAawUfvio4um/dn7t/dvrkhyPbt20tNTY20a9cupdp0GwEEEEAAgfgECD7GZ5mtJV2dsfJ4pRypPyz1++qdIu4KjtlWecwMPrplvKs85vq7u/9ct6gOeuvqoOXNStI6AggggAACCCCAAAIIIIAAAqUhQPDR3nEm+Bh8bDALbkYNBFSA4KPB84Dgo0FcmkYAAQQQQCClAnEFH/XwvW3lWwEyW/BR63tXgvSu+JgZitRggYYf27RpI1VVVVJZWdm0+pKJYQhqZKIPtIkAAggggECmwL4j++WoHAUmBoEyKXNaKTteJtJ4XI4dOSYNBxuc/3sDjN5bV+f6u9uduG5z7fSr7ET//GxByvppjzIIIIAAAggggAACCCCAAAIIIOBPgOCjPydKIYAAAsUsQPDR4OgSfDSIS9MIIIAAAgikWCBIsC9f2UJhRy+Rn9tdewOP3vLeFSHdfWauElkohJntOIIcm5/hDuLqpz3KIIAAAgggUAwCUYJ5+epmPpYZPFS7fGXcFRszV3XMFXzkNtfFcDZyDAgggAACCCCAAAIIIIAAAgjEJ0DwMT5LWkIAAQTSKkDw0eDIEXw0iEvTCCCAAAIIpFggSEAvSDgwXxAyW/BRCf2u+qhl890eO9eqku4w5QpGBjm+FA85XUcAAQQQQCB1An5Cj4VuJe0edOZqjfrzbEHGsKs9antxrfgYNCgatHzqTgQ6jAACCCCAAAIIIIAAAggggIClAgQfLR0YuoUAAggkKEDw0SA2wUeDuDSNAAIIIIBAigVaI/iYK5iYLcyotJk/9/7M2/9sK0O6Q+P3OAuVK/R4ik8Fuo4AAggggIBVAn5CfH7K6EF5Q4yZ/84XcHRBvCtCuvUz23TLFlptMlufcx2H3+Pz7tuqQaQzCCCAAAIIIIAAAggggAACCJSIAMHHEhloDhMBBBDII0Dw0eDpQfDRIC5NI4AAAgggkGKBoEG+XOXzrfCoPNkez1UnW9AxM9Tokrs/zxamzFXHrRv02L3DHKVuik8Xuo4AAggggIBRgaBBv2ydyXZLa7dctpUYM4OP3rL696ChR62Tb/XJfP3LPJ4gHkHKGh1EGkcAAQQQQAABBBBAAAEEEECgBAUIPpbgoHPICCCAQIYAwUeDpwTBR4O4NI0AAggggEDKBYKE+PwGH5UkXxjS76qP3nayBSLdx9195fq/O0TZ+h/kmFI+1HQfAQQQQACBVAv4ueW19wCzhQwzQ4nZVm7MvB22nzq631xtFepTtkEJGmQMWj7VJwKdRwABBBBAAAEEEEAAAQQQQMAyAYKPlg0I3UEAAQRaQYDgo0F0go8GcWkaAQQQQACBlAvEEXxUgiCrPmYGFF1C78+97eW6DXau/eZqP1v5XD/zDmsQo5SfDnQfAQQQQAAB6wSCBh71AHKtrFgonGgy9JivX5noQYOMQctbN8h0CAEEEEAAAQQQQAABBBBAAIEUCxB8TPHg0XUEEEAgJgGCjzFBZmuG4KNBXJpGAAEEEEAg5QJBQn35ygYJPipZofCjlsm8XXWhFR3ddjNDk5lDFHaVxyBWKT8t6D4CCCCAAAKtLhAm8KidLnQ76cxbXnvrZAYjC90KO3N/QW5zna2vLnrQIGPQ8q0+uHQAAQQQQAABBBBAAAEEEEAAgSISIPhYRIPJoSCAAAIhBQg+hoTzU43gox8lyiCAAAIIIFCaAkHDfEFCg4Vud+2K5yuXGX7UOtlulZ0v7JjrFtdBgpz5zo6ghqV5pnHUCCCAAAIIZBcIEtrLVTbbSo7u3vyGETMDkUFDj7o/v/vKLJspE8SkUFucdwgggAACCCCAAAIIIIAAAgggYFaA4KNZX1pHAAEE0iDQKsHHl16ac3zp0uUybNgpcsopg5wP0Y8dOyZ1dXVpMPPdR5PBx0OH6uWpp55z+jJx4iVSXV3tu19JFVy//m2ZO3eB9Ot3kpx55mlSWVmZ1K597+eNN+bLmjUb5Nxzz5S6ur5SXl7uu25SBZ944q+yb98+uf76q6RNmzZJ7db3fnbu3C0vvfSKdOxYKxdcME6qqqp8102q4JtvLpelS1fIiBFD5dRTT5GKioqkdu17P889N1u2bt0hl19+kXTp0rnFaim+GzJUMB1zzkaZO3eh1NWdJGecYeec8/rr82Xt2g0ybtxZztxo65yzd+8+ueEGW+ecXfLSS69Kp061cv75ds45y5Ytl2XLVsjIkUNl6NDcc07Q0F6QsGCh4OMLL7wsW7fulMsuu1A6derYNOfku821d3rJForMnH7y3fZay+Y7noMHD8nzz89x+nXppedb+Rpi48bNsnDhEunbt7eMHj3Cyj7On79I1q/fLOecM0b69Olp5Zzz5JPPyb59++Xaay+38vXsrl3vyKuvzpWOHWtk3LizrRzn5ctXif4ZNmyInHzyQCte5/zwhz+UX/ziF/LVr35VPv7xj8uLL74iO3bsdp7P+poxaLDI0MubpmZ1znnhhRNzziWXnG/l69kNGzbJokVLrZ5z5s1bJBs2bJaxY8dI7962zjmzZP/+A3LNNXbOOfre6rXX5lk152Q+X996a5WsWLFahg/PPefkC0PqEy8zpJgttJhvdchsbbhPaK2n7622b9/lvLfS1zmZr7kLhTiDzDlh5rMDBw7K00+/4PTriis+YOXvv3XrNog+p/v37ydjxoyy8vefPlfWrdso5513jpx0Um8rX+c8/viTsn//Qbn++g9ZeT1n+/ad8vLLr0nnzp1kwoSxVv7+W7r0LXnzzRUyatRwGTJksBWvczLniGeffdGZc6688uJm762CzCUmy7pzTkVFuVx+uZ1zztq162X+/MUyYEA/Of10O+ecV1+dJ+vX2z3n/PnPT4q+rp00aaKlc84Oefnl16Vz584yYcI5Vs45S5a8KW+9tdLqOeeZZ15w3lvZOufoa+1nnnnRma/1tZiNn1u9P+fUyemnj7Tydc4rr8yVDRvedn4/9+nTy7rXOXpN7/HHn0rFnKOftejrRRs/t1q8+E1Zvnylc11RPyu38XMrd87Rz6FtvJ5zYs55wXkeX3aZnXPOmjXrZcGCxTJwYJ2cdhpzTpjXve6co59TfvjDV7Z4nUPwMYwqdRBAAIHiEggTfJzxxJOi1+M/9KFLpbY2+OdWZU8/Pev4668vlNNOGyGjRw9zXrRr8FEbK6ZNw2r6QtX9o8eZ7eJ9mGPWi1aPPvpnp+pHPnKN1NR0CNOM0TorV65xQguDBvWTCRPGSdu29oX29AMR/eDmoovGOxdQbQxn/s//PCZ79rwrn/jETc4bC9u2rVu3y1/+8ox06dJRrrjiEmnfvp1tXZR58xbKG28skjPOGOlcQLXxgotenHz77U1y3XUTrbyY4c45Ood95CNXS4cO9s05K1ascUILgwbVyfnnn2vlRd4Tc85K+cAHJjgXM2ydc95551355CftnnO6du0kl19+sZVzjgZw9c8ZZ4x2LqDmmnOSDD5mBg2feELnnC1y7bVXOuEU75ZtBcdCKz16g5C5fgkEWbFS55w//WmmlJWVO4E4nXOCepn+ZbRmzVp56aXXZeDA/nLuuWdYOedo2GzVqjVy4YXjZcCAOivnnN/97o/y7rt7ZfLkG6x8PauvczSc0rVrZycQ17ZtW9OnVuD2NYA7f/4SGTNmpIwcOcyKi/nnnHOO7N69W8466yx55JFH5C9/eVo2b94qV199ufTs2d26D23cOUffr1177RXSvn37wONgusLq1Wtl9uzXZdCg/jJ2LHNOWO+HH57uhK1vueV65pwciIWCfPqBzYIFS50w3MiRp7Z4nVOofr4QYq6VH7Wrfld01HKPP/5/8vbbW2XSpA9Jr149rAs+6jn4+98/4Vwr0i84duhg35yjgXp9HaGBev2g2MYvYWrYbPny1XLppRfI4MEDrHyd8+tfP+J8kfUTn/io1NbWhJ26jNXT383/93/PSrduXZ1AXLt29r3O0S8sz527WM4553QniGTj9Zw//vEvsmnTVrnhhqutfJ2jX2z8wx9mOM8RnRdtnHP0Ool+wVGvk4wff7aVc46GKvS60wc/eKHzeszG6zkn5pz9zjVkG+ecTZu2yJNPzpLu3bs54RQb55zXX58n8+YteW/OGSHV1fZ9wX/69BmyefM25pwIvx3ffHOlzJ5t+5zzvKxYsdbaOUev0f3mN486Xyr7+MdvtHLO0eueTz1l95yjX+TR6zn6heUTr3PsnXP0M6EePfS9VVmEZ1/8Vffu3St/+MNfnGthH/7wRCtf5+iXeGbPfk1OOWWwjB9/lpWvc55++nlZuXKt8/tZr3VXVtq1eIx+9qCflR84cEA+9rGWcw7Bx/ifW7SIAAIIpE0gTPDxod8+JvoZ4I03XiM9enQPvGBH2ZYtW47X1zc4F3mrqiqd0CPBx2Cnjr6x0G9wHjt2VGprdaWmYPWTKH306FHRb1/oONsYhlODhoYG549+iG3jxVPto47z4cOHrfzWuPZPn7s6ziLHpabGvgv52kf10zlHx9jGAK72sb6+XhoaDjsfwNr4zT7mnHhmzffnnHZWXshI15wjVgYWvHNOmzbVeS9khAnyBQkP5lv10TvnaADB7+2qMwOQ+fahFkH66y2v9XTe1tcS+rul0OucMJZRn9VHjx5zXkPonG3r75bDh480vc7R19w2bjrOhw83hPo2VRLHoxfWtI+62fp69sgRHefDzu8VW17PPvbYY/KnP/1JPvWpT8mll17a9FpMX+fYuNpx0DkniXMvcx86H+o4p2HO0Q+xbQwDuK+59b1BmG9wJjHuScw5hYKJuY7Trad++vtFf69oGC5fe5mP5fp3thUigwQdvX3Wevo6R/uY7b1VkP76GfMwnjrOhw7p9Rz9ArCd13P0d4t73c7W33/ue6t27dpZEfrPdr4cPHjQORe9K6z7Oa+SKvP+9Zz0v7dKyizbfnTOqa/X3y12Xs95f85pdIIpYeYt077MOfEIM+dEd3SvIRe6nhN9T+Fb0Ovw+r6AOSe8oTvnaLBHX0fYuPE6J/qoeF/n6BeqC11bjL7H4C2kYc55/3VOjejq0bZt6XmdU+9cJ7F5ztH3f/rez8bVUfW8y/c6h+Cjbc9M+oMAAggkLxAm+LjnnT3Oeyu941uYz63Kdu3adVw/XNIXJG7okVtdJz/47BEBBBAIKnD8uK62ErQW5RFAIJdA0MBeXLe71v4ECTt6+595C+tC4cfMffk9G4La+G2XcggggAACCCDgTyBsOCZbvWyhx1zhxnyBybCPuUfc2re59idPKQQQQAABGwW4JmbjqNAnBBBAAAEEECh1AYKPpX4GcPwIIICASJjgY0O9Lu4WfiP4SGoo/NlDTQQQQAABBIpIIGi4L0jwUZkKhRKjhh/docjcT7ZbXwc91nzDHGdbRXQ6cSgIIIAAAggEFggbbsy2o1yBR78rP2qbUYKNhfaTrX33OMI4hKkTeICogAACCCCAAAIIIIAAAggggAACOQUIPnJyIIAAAggQfDR4DmzcuNG5FZn7R5fH1AvjXBw3iE7TCCCAAAIIpEwgaIgvSPgxSPBR2fwGITPL+vm3OyyFjrfQ4ykbXrqLAAIIIIBA6gUKXcMIsopiXMHGQiFHP6tOegem0DFmDmLQ8qk/CTgABBBAAAEEEEAAAQQQQAABBCwUIPho4aDQJQQQQCBhAYKPBsEJPhrEpWkEEEAAAQSKRCBo0C9I8FGJooQfM+sHbSvb/r3DFuTYg5QtklODw0AAAQQQQCBxgSCBvrgCj3qQuW577QIECUxmtperjUI/z4cfXOk5FAAAHnNJREFUxCnxQWSHCCCAAAIIIIAAAggggAACCJSIAMHHEhloDhMBBBDII0Dw0eDpQfDRIC5NI4AAAgggUCQCYQJ9QcKP2crmCzAW6k/mbaz9tO8OVaG2Cz0eZMjjbCvIfimLAAIIIICATQJxBvQKteU3CJlZTu+OkW8Lurqj6dUeta+FLGw6B+gLAggggAACCCCAAAIIIIAAAsUqQPCxWEeW40IAAQT8CxB89G8VuCTBx8BkVEAAAQQQQKAkBYKG9IIEHxU06EqNhfqT7ZbYcQYg3ZOgUD9K8mThoBFAAAEEEEhAwG+wL1+5XAHEQqs7eg8vaOhR65oOPvq1SWCY2AUCCCCAAAIIIIAAAggggAACJS1A8LGkh5+DRwABBBwBgo8GTwSCjwZxaRoBBBBAAIEiEggT8MtVx28AMWgYMht3oTa0TqFjK/R4oWGOWr9Q+zyOAAIIIIBAMQnEFdoLGnhUw0IhxkznQuX9BhzD9DXfmMdlWEznFceCAAIIIIAAAggggAACCCCAQGsIEHxsDXX2iQACCNglQPDR4HgQfDSIS9MIIIAAAggUkUCY8F7UVR+Vr1Bw0U+//AYts+0v2xD62WcRDT2HggACCCCAQCoE/IT9/N7qWg+4UHuFQo/Z2vAbhPSCF+pHtsEJUycVg0wnEUAAAQQQQAABBBBAAAEEEEiZAMHHlA0Y3UUAAQQMCBB8NIDqNknw0SAuTSOAAAIIIFBEAmHDfjaHH3V4gvav0JCGdSrULo8jgAACCCCAQOEwYjajoKsp+gkNhgk9at+CBh/99CXzmMPU4dxCAAEEEEAAAQQQQAABBBBAAAEzAgQfzbjSKgIIIJAmAYKPBkeL4KNBXJpGAAEEEECgyATChPqCBgv93h670EqQuej9tp+tfpjjL7JTgMNBAAEEEEDAeoFCwb8gqz5mO9ikQo+670LH4qd/1g8YHUQAAQQQQAABBBBAAAEEEECgiAUIPto7uHfddZe9nbO0Z5hZOjB0y3oBgo8Gh4jgo0FcmkYAAQQQQKDIBMIG/+IIP/q9XXWQPkYJQXqHNsg+i+yU4HAQQAABBBBoNYGgocCogUc90DhDj9na82IGPT63bth6rTaQ7BgBBBBAAAEEEEAAAQQQQACBIhYg+Gjf4LrhPUJ8wcdGzXAL7kYNBAg+GjwHCD4axKVpBBBAAAEEikwgbMAvaPBR2aIEHYP2s1D5Qo9HGWaTbUfpF3URQAABBBBIQsBkSK9Q24Uezzz+sKFHbSfoLa5z1fEzJkGPy0+blEEAAQQQQAABBBBAAAEEEEAAgXACBB/DuZmsRXgvmi5+0fyoXZoCBB8NjjvBR4O4NI0AAggggECRCUQJ6cUVflTSKKHIQkPi9xj9liu0Px5HAAEEEEAAgWgCQYJ+Qcq6vfIbWgyyomShfhR6PJ9YlLrRRoLaCCCAAAIIIIAAAggggAACCCCQKUDw0b5zguBetDHBL5oftUtTgOCjwXEn+GgQl6YRQAABBBAoQoGwgb9C9YLedtpk+NE7bIX67XeI42rH7/4ohwACCCCAQJoF4grvRWknjiCj39Bk5liF7XfYemk+V+g7AggggAACCCCAAAIIIIAAAjYLEHy0b3QI7kUbE/yi+VG7NAVKKvg4Z84cueeee2TmzJnywAMPyIQJE2T8+PHGRp7gozFaGkYAAQQQQKAoBaIE+ArVNRV+1IEotO8ggxVnW0H2S1kEEEAAAQQQyC0QZ+gvSGAxSEBSe1+on4UezyUQth7nFAIIIIAAAggggAACCCCAAAIImBMg+GjONmzLBPfCyp2oh180P2qXpkBJBR+nTZsms2bNcoKP7qYByClTphgZfYKPRlhpFAEEEEAAgaIWiBL8K1Q3jvCj4gdtJ64BK3R8ce2HdhBAAAEEECgVgaQCfXEGGIO25Y5l2GMNW69UziGOEwEEEEAAAQQQQAABBBBAAIHWEiD42FryufdLcC/amOAXzY/apSlQUsFH7xBrCHLq1KnOj2bPnm1k5UeCj6X5pOKoEUAAAQQQiCIQJdznp26Y0GKcdaLYUBcBBBBAAAEE0iWQLzQYNMAYpi1XK0p4MUrddI0WvUUAAQQQQAABBBBAAAEEEEAgXQIEH+0bL4J70cYEv2h+1C5NgZINPupwe8OPjY2NsZ8BBB9jJ6VBBBBAAAEESkLAT4AxF4SfunEHGQvts9DjJTGoHCQCCCCAAAIlIOAnJBg08KhshB5L4OThEBFAAAEEEEAAAQQQQAABBBAIKEDwMSBYAsUJ7kVDxi+aH7VLU6Ckg4865OXl5c7Im1j1keBjaT6pOGoEEEAAAQSiCkQNCvqpn69M2Mf0uAvtu9DjUe2ojwACCCCAAALJCfgJOmpvTAUXC+2/0OP5pKLUTW4E2BMCCCCAAAIIIIAAAggggAACpStA8NG+sSe4F21M8IvmR+3SFCj54ONVV10lM2fOJPhYmuc/R40AAggggIC1AlEDgn7q2xJw9NNXaweKjiGAAAIIIFAiAmGCgFECj8pqsn6hYQtzvIXa5HEEEEAAAQQQQAABBBBAAAEEEIhPgOBjfJZxtURwL5okftH8qF2aAgQf3ws+PvDAAzJlypRYzwJWfIyVk8YQQAABBBAoOYEogUC/dQuVi/q4DlqhNvwMbBxt+NkPZRBAAAEEEChFgbhCfn7bKVQu6uM6hoXayDfOUeqW4vnDMSOAAAIIIIAAAggggAACCCDQGgIEH1tDPf8+Ce5FGxP8ovlRuzQFCD4SfCzNM5+jRgABBBBAICUCUQN/fuoXKlPocZcy7nIpGSK6iQACCCCAQEkKBA0H+ilfqEyhx92B8Fsu18BFrV+SJwQHjQACCCCAAAIIIIAAAggggEDCAgQfEwb3sTuCez6Q8hTBL5oftUtToOiCj9OmTZOpU6eGHk1d+XHChAkyfvz40G24FVnxMTIhDSCAAAIIIIBADCsmxhVI9NuODlqQst5BDluPEwUBBBBAAAEEzAhECQH6reunnJ8yKuC3XC6tqPXNjAKtIoAAAggggAACCCCAAAIIIIBApgDBR/vOCYJ70cYEv2h+1C5NgaIKPs6ZM8cJLUbdJk6cKDNmzIjajBB8jExIAwgggAACCCDwnkDUQGCQ+oXKFno8c9CClo866EnvL2p/qY8AAggggIBJgSSDfEH35ae8nzKuX5Cy2cyj1jc5jrSNAAIIIIAAAggggAACCCCAAALNBQg+2ndGENyLNib4RfOjdmkKFFXwMepqj95ToLGxMfIZQfAxMiENIIAAAggggMB7AnGE+YK04bes33LegQxThxMBAQQQQAABBOwSiBIS9FvXbzmVCVI2m2TU+naNDr1BAAEEEEAAAQQQQAABBBBAoPgFCD7aN8Ya3GMLL4BfeDtqlq4AwcccY0/wsXSfFBw5AggggAACtgrEERgM2obf8n7LFbKNq51C++FxBBBAAAEEEPAnEFcgMEg7QcrqUQQtn3nkUev7k6QUAggggAACCCCAAAIIIIAAAgjEKUDwMU5N2kIAAQTSKUDwMce4EXxM5wlNrxFAAAEEEChmgThDgUHbClI+SNkkxsu2/iRxzOwDAQQQQKB0BWwK8QXti+ny2c6KoPss3TOLI0cAAQQQQAABBBBAAAEEEEDALgGCj3aNB71BAAEEWkOg5IKPEydOlJkzZxa0JvhYkIgCCCCAAAIIINAKAnGG+MK0FaaOMoWt1wrE7BIBBBBAAAEEQgqEDRGGqRemTuZhxdFGSCqqIYAAAggggAACCCCAAAIIIIBARAGCjxEBqY4AAggUgUBJBR9nz54t48ePd4Zt2rRpMnXq1JxDSPCxCM5uDgEBBBBAAIEiFYg7RBi2vbD1vMMSRxtFOswcFgIIIIAAAtYKxBEYDNtG2HqZmHG1Y+0g0TEEEEAAAQQQQAABBBBAAAEEilyA4GORDzCHhwACCPgQKJng4wMPPCBTpkxpRlJeXp6TiOCjj7OHIggggAACCCDQagJxBwajtBelbhDApPYTpE+URQABBBBAoFgEkggCRt1H1PruWMXVTrGMPceBAAIIIIAAAggggAACCCCAQBoFCD6mcdToMwIIIBCvQMkEH/UW1zNmzGimR/Ax3pOJ1hBAAAEE/n97d94lVXXuAfiFlkkmkeRGw6AyiOKAqHEICpjpqknU+1diPofLD+Hyo0T/uGpUEgUNYRBRFEQBhUAblaER6GYe+q73YEGLw60eqmpX9VNr1WIt1qkzPHvX2/vs8zunCBBorkAjgoDDXedwP99cQVsjQIAAAQIEGi0wEiHDkVjHwOMc6fU12tD6CRAgQIAAAQIECBAgQIAAge8KCD7qFQQIECAwaoKP2dQDn+Lop651fgIECBAgQKATBBoRNBzpdY70+jqh3RwDAQIECBDoVIGRDBWO5Lpq3o1YZ6e2peMiQIAAAQIECBAgQIAAAQIlCwg+ltw69o0AAQLNERhVwcckzZ+8Xr16dbz66qs/KuynrpvTAW2FAAECBAgQGL5Ao4KFjVpv7Ygbvf7hy1oDAQIECBAg8EMCjQwQNmLdjVin3kGAAAECBAgQIECAAAECBAi0TkDwsXX2tkyAAIFSBEZd8LFeeMHHeqUsR4AAAQIECJQg0OgQYaPXX69hKftR7/5ajgABAgQItINACaHARu5DI9fdDu1rHwkQIECAAAECBAgQIECAQCcKCD52Yqs6JgIECAxOQPDxB7wEHwfXkSxNgAABAgQItF6gGaHAZmyj9ZL2gAABAgQIEGiGQDMCic3YRjOsbIMAAQIECBAgQIAAAQIECBD4toDgox5BgAABAh0VfFy/fn0sW7ZsRFpV8HFEGK2EAAECBAgQaLJAM4OJzdxWkxltjgABAgQIEGigQLPCiM3aTgOprJoAAQIECBAgQIAAAQIECBD4AQHBR12DAAECBAQfv6cPPPfcc/HMM88Mu3d0d3dHV1fXpffYsWMjJ91NvA+b1goIECBAgACBHxFoVSCxVdvVGQgQIECAAIHyBZo9F9Ls7ZXfAvaQAAECBAgQIECAAAECBAh0loDgY2e1p6MhQIDAUARaEnzs7v68//jx4zFx4oSYMGFCnD9/vnrPmTNnKMfwrc88//zz8eyzzw55PY899li88sorQ/78wA82MviYT6T8+uujEdEfM2ZcExmqLO116tTp6Os7HuPHj4upU6cUGfjs7e2L3M9p06bE+PHji9zHw4e/jnPnzsdPfnJtke189uzZOHq0N666qiumT59WpOGJEyfi+PGTcfXVk6p3iRfAjh49FmfOnIlrrpke48aNK+3rHJdrTsSMGdOL7IuXa874mDp1cpHtrOYMv2ufOXM2jh3LmnNVTJ8+tch2Pn78RJw4cTImT54Ukya1tub8UBAx6/bFmjOtKTVnsIHI8+cvxJEjOc4pv+ZMnDg+Jk8ut+acPn26GoeVOs7p6fk6Llw4HzNnlj7OKbvmnDyZ45yrY9KkiUXWxSNHjsXZs+WOc/Jc9GLNGVP8OEfNGd5Yol1qzrhxV8W0aWWOc3KMUxvnTJz4/TWn1edbtXFOzpWkZWmvdqo5EyaMjylTyhzn5DnB6dNnip7PyZqT7V3qfE6eDxw71ld9T0qtOZfPrcoe56Rl2TXnWIwZE9WcU8lzyO1Sc/J6RokvNWf4rdIeNedo5NzYtddeU82NlfaqjXNyPFhuzTkVfX0nol1qjvmcofVy45yhuV35qZwraZeak2OxVp+Lfp/6qVNqzkj0xovzORdi5swZ3xnPDjb4OBL7Yx0ECBAg0P4C+7/aP6xxzpg1a9b2f/DBR3HbbYvillsWVCIjFXwsibeRwce8uPnSS6uqw33yyUerAGlprz179sb69Ztj7txZcf/9dzclWDFYg/Xr341du3bHQw/dHzfcMLt6OmdprxdffDmOHOmNp59+qgrQlPY6cOBQvPnm2io888gjy4rsi1u3fhxZc+6889ZYvHhRkZNCq1atiS+++Coef/w38dOfzizuBC0vbr788qpqovyJJ8quOfldvu++pUXWnHXrNsWnn+6Jhx++P+bOLbvm/OUv/xN5Mbu017drzkPVJGVprw8/3B75vvPOxbF48c3F1JyB4cO///2tqub8/ve/rSYLWj0pdGUwMmvOK6/8I/K+jj/84XdF/m3Zs2dfbNiwOW68cU7ce++SQmvOu/HZZ3ti+fIHYvbsnxc7zsmAytNPl1lzDh7Mcc6/qjDcypXLqht6Snt9+OHHsXXr9liyZHHccsvCYmrOQKcc53z55f5ias6VbdgONWf37r2xceN7hdecTfHZZ//+pubMiq6u8m7Oe+GFl6uQT55blTrOWb269JqzPfL8asmS2+LWW8urOTmmef311VXNyTFECeOcK2tO3iD6t7/9o/qOlDrOye9y1pybbppb7Dhn7dp3Yvfuf8eKFQ/EnDmziwxz/fWvL0Xe/FbqOGf//gOxevW6KjyzcuUvqxtlSntt2fJRbNv2cdx1123FjnNee+3N+OqrA/HHP/4urr229edWP1Rz8oblPP8rcQ4550neeef9mDdvbtxzT5nnVmvXbowcj61Y8WDMmTOr0Jrzv1WY689/LnOck9+TNWvWxcyZ18SKFaXWnG2xbdsnRdecV199I/bvPxhPPPHfVeC61fM5V9ac/LuX+1hyzdm1a09s2pQ154a45547i5zP+ec/N0TOO5Vac3Ie74UXXiq65nz55YF4662ya87772+Njz7aEUuX3h6LFi0ocj6n9JqT5/evvfZGdSNPXlsrcZyT16A3bdoS8+ffEHffXXbNyXOCnEMu7UaZCxf648UXX6oecPOnPz35nfkcwcfSzuLsDwECBNpD4MUXXo7MHTz11KPVTVuDPbcas27dxv4dO3bFvHk3VicXuQLBx8E1fj7ZLMNm/f0X4le/erh6uktpr+7uL2LLlq1x3XX/VV0UKXEC9b33tsa+fd3VicXs2Xlxrrzg46pVq6unmz322G9i8uSrS2vmyLtsNmx4t9q3Bx+8t8gTix07Po1PPtkVCxfOq94lPlExJ1APHDgYDz/84DdPvRpTVFvnXWkZ/MinzD7yyEOF1pz/xJYt2+L6639WBc7KrDkfxr59n8fSpXcUG0LKC8W9vb3VREE+Pay0V9acDK3nUz0feOCeImtO1pusOzffPC8WLJhf3JOGcnIya04GurLm5MW5sWPLqjknT56K1avXVt3v+2rOYJ8g2Yh+fHGcsy1mzfpZ3HHH4iL/tuQEatacnFT7+c+vK3Scs+ZSzSnxBo+L45zN1VMz80aeEsPWWW927vw0Fi6cH/Pn31RczcnvX63mLF/+YMyYUWbNWbPmX9V5aU7ylhiI6+7Occ5Hhdeci+Ocu+9eUmzNyXFOX19fPP74b4scz/b0HI4NG96rnmB3333NqzmDmdT55JOdsWPHZ7FoUdacPLcq40lDA4/h7bc3xKFDh2L58l8WGXzMsPXlmrOsyL64d+/n1c2Ds2dfF7fffmuR51abN38Q3d0Xa86sWdcXOc7JQFz+4k3O5+SvT5T2OnSo51LNuTjOKe+m6o8/3hk7d2bNWVDsOOftt9dHWq5YsawKkQ6mpjajT+QT7DL4kRewSx3n7N3bHR98sL3wmrMl9u37T9x7713FjnNKrzkHD/bExo2bq18MyhuWy6w5O2Lnzt1F15z8PmfNWbnyITVniEX0cs25Pm6//ZYixznvvrsl8hyw1JqT83J5bpW/tPXoo78ucpyTF7Hfeee9omvO9u07qgezLFq0MBYsuLHI4GOt5uT8bIlh67ypLMdieW03g8Ilzue0R815P3Ku+xe/WFpdXyvtWvn/V3MEH4f4B9HHCBAgMMoFXn/tjcj5+MzbDSn4eODAgf58NHZeZK+FHgUfB9er8o98/sRw/hRkiaHHPJp85HTuY/5kXIkXiXMfz507F2fPnque3lPaQK7WI/KR/PlT1yVOkn+7naPICavcx6wv2c75RI0SQ4+5j/ldyf6Yk36l3U2V+9dONSf/rpQYeqzVnGzn7Iel1pzsi/l9UXMG93d54NLtUnOyrdWcobdzbZxTes0xzhl6G9fGOVm3+/tznFPeU5C+Pc7pKiaAdKW6cc7w+uHAMbeaMzzLdji3UnMG18bfFzC6XHMmFndzh3OrwbXvjy1tPmf4lpfn7dphPqf8cU5eaC8t9Diw5mR7lxgGMM4Z/nfZHPLIGV6czzkbXV1XFX1ulef5eU1IzRla22c75zjCudXQ/NSc4bl9dw5ZzRmOaO26lXHOcBTb/1q54OPw2t+nCRAgMFoF+np7q0zEUM+txvT09PTnYCQHInmSUXvPmTOno0wb+VPXHQXlYAgQIECAAIGWCJTw5MSWHLiNEiBAgAABAm0vUOLF/rZHdQAECBAgQIAAAQIECBAgQKCNBAQf26ix7CoBAgQKEjh96tSw9kbwcUxZPyc5rNb0YQIECBAgQKDtBQQg274JHQABAgQIEBg1AgKPo6apHSgBAgQIECBAgAABAgQIEPhRAcFHHYQAAQIEhiIg+Finmic+1gllMQIECBAgQKAYASHIYprCjhAgQIAAAQLfCAg76goECBAgQIAAAQIECBAgQIDAlQKCj/oEAQIECAxFQPCxTjXBxzqhLEaAAAECBAgUJyAAWVyT2CECBAgQIDDqBAQeR12TO2ACBAgQIECAAAECBAgQIFC3gOBj3VQWJECAAIEBAoKPdXYHwcc6oSxGgAABAgQIFCsgAFls09gxAgQIECDQsQICjx3btA6MAAECBAgQIECAAAECBAiMmIDg44hRWhEBAgRGlYDgY53NLfhYJ5TFCBAgQIAAgbYSEIZsq+ayswQIECBAoGgBIceim8fOESBAgAABAgQIECBAgACBYgUEH4ttGjtGgACBogUEH+tsHsHHOqEsRoAAAQIECLStgBBk2zadHSdAgAABAi0TEHZsGb0NEyBAgAABAgQIECBAgACBjhEQfOyYpnQgBAgQaKqA4GOd3IKPdUJZjAABAgQIEOhIAaHIjmxWB0WAAAECBOoSEG6si8lCBAgQIECAAAECBAgQIECAwBAFBB+HCOdjBAgQGOUCgo91dgDBxzqhLEaAAAECBAiMCgFByFHRzA6SAAECBEapgKDjKG14h02AAAECBAgQIECAAAECBFokIPjYInibJUCAQJsLCD7W2YCCj3VCWYwAAQIECBAgUKeA8GSdUBYjQIAAAQKDEBBaHASWRQkQIECAAAECBAgQIECAAIEiBAQfi2gGO0GAAIG2ExB8rLPJBB/rhLIYAQIECBAgQGCIAoKQQ4TzMQIECBAY1QKCjqO6+R08AQIECBAgQIAAAQIECBDoCAHBx45oRgdBgACBpgs0LPg4derUph9MIzfY29sbXV1dl95jx46NvLjgAkMj1a2bAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQ6GQBwcdObl3HRoAAgcYJjHjw8cKFC3H+/PmYMmVK4/a6BWvu6+urQo8ZeKz9K/jYgoawSQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQKBjBAQfO6YpHQgBAgSaKjBiwcf8acIMPGbwsfbulJ8rzIBjBh5r7ww+1kKPnvjY1P5qYwQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAYFgCY3p6evpzDVcGHjP02EnBx1rQcWAAMo9b8HFY/ceHCRAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIBAUwXGHD58uL8Wcsx/MwDZSaHHmubA4KOnPTa1j9kYAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAYMYFLwcdc48DAY6c87bEmVXuyYy30mP/vaY8j1o+siAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQINEWgCj7mlgYGHTst9FiTHBh0FHpsSv+yEQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgMKICl4KPtbV2auixdnwCjyPaf6yMAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAg0VeA7wcembt3GCBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQKDEBB8HASWRQkQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAIHWCgg+ttbf1gkQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAAECBAgQIECAAIFBCAg+DgLLogQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgAABAgQIECBAgEBrBf4PjKAcOtNpe84AAAAASUVORK5CYII=" - } - }, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The finished Pipeline should look as follows: ![pipeline_run_detail_mnist_v2.png](attachment:pipeline_run_detail_mnist_v2.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Predict from the trained model\n", - "\n", - "Once Kubeflow Pipeline is finished, you are able to call the API endpoint with [mnist image](https://raw.githubusercontent.com/kubeflow/katib/master/examples/v1beta1/kubeflow-pipelines/images/9.bmp) to predict from the trained model.\n", - "\n", - "**Note**: If you are using Kubeflow + Dex setup and runing this Notebook outside of your Kubernetes cluster, follow [this guide](https://github.com/kserve/kserve/tree/master/docs/samples/istio-dex#authentication) to get Session ID for the API requests." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "from PIL import Image\n", - "import requests\n", - "\n", - "\n", - "# Due to [issue #8300](https://github.com/kubeflow/pipelines/issues/8300), \n", - "# connecting to Kubeflow Pipelines using PodDefaults is not working for now. You can still use the Kubeflow Dashboard to upload pipeline and create run.\n", - "\n", - "# Pipeline Run should be succeeded.\n", - "# kfp_run = kfp_client.get_run(run_id=run_id)\n", - "# if kfp_run.run.status != \"Succeeded\":\n", - "# print(\"Run {} is not Succeeded\\n\".format(run_id))\n", - "# import os\n", - "# os._exit(os.EX_OK)\n", - "\n", - "\n", - "# Specify the image URL here.\n", - "image_url = \"https://raw.githubusercontent.com/kubeflow/katib/master/examples/v1beta1/kubeflow-pipelines/images/9.bmp\"\n", - "image = Image.open(requests.get(image_url, stream=True).raw)\n", - "data = np.array(image.convert('L').resize((28, 28))).astype(np.float).reshape(-1, 28, 28, 1)\n", - "data_formatted = np.array2string(data, separator=\",\", formatter={\"float\": lambda x: \"%.1f\" % x})\n", - "json_request = '{{ \"instances\" : {} }}'.format(data_formatted)\n", - "\n", - "# Specify the prediction URL. If you are runing this notebook outside of Kubernetes cluster, you should set the Cluster IP.\n", - "url = \"http://{}-predictor-default.{}.svc.cluster.local/v1/models/{}:predict\".format(name, namespace, name)\n", - "response = requests.post(url, data=json_request)\n", - "\n", - "print(\"Prediction for the image\")\n", - "display(image)\n", - "print(response.json())" - ] - } - ], - "metadata": { - "environment": { - "kernel": "python3", - "name": "common-cpu.m96", - "type": "gcloud", - "uri": "gcr.io/deeplearning-platform-release/base-cpu:m96" - }, - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/kubeflow-e2e-mnist/kubeflow-e2e-mnist.ipynb b/samples/contrib/kubeflow-e2e-mnist/kubeflow-e2e-mnist.ipynb deleted file mode 100644 index c73bf5b4778..00000000000 --- a/samples/contrib/kubeflow-e2e-mnist/kubeflow-e2e-mnist.ipynb +++ /dev/null @@ -1,690 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Kubeflow Pipelines e2e mnist example\n", - "\n", - "In this notebook you will create e2e mnist Kubeflow Pipeline to perform:\n", - "- Hyperparameter tuning using Katib\n", - "- Distributive training with the best hyperparameters using TFJob\n", - "- Serve the trained model using KServe\n", - "\n", - "Reference documentation:\n", - "\n", - "- https://www.kubeflow.org/docs/components/training/tftraining/\n", - "- https://www.kubeflow.org/docs/components/katib/\n", - "- https://www.kubeflow.org/docs/external-add-ons/kserve/\n", - "\n", - "**Note**: This Pipeline runs in the multi-user mode. Follow [this guide](https://www.kubeflow.org/docs/components/pipelines/sdk/connect-api/#multi-user-mode) to give your Notebook access to Kubeflow Pipelines." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Requirement already satisfied: kfp==1.8.4 in /opt/conda/lib/python3.8/site-packages (1.8.4)\n", - "Requirement already satisfied: jsonschema<4,>=3.0.1 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (3.2.0)\n", - "Requirement already satisfied: PyYAML<6,>=5.3 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (5.4.1)\n", - "Requirement already satisfied: kfp-server-api<2.0.0,>=1.1.2 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.6.0)\n", - "Requirement already satisfied: click<8,>=7.1.1 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (7.1.2)\n", - "Requirement already satisfied: kubernetes<19,>=8.0.0 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (12.0.1)\n", - "Requirement already satisfied: uritemplate<4,>=3.0.1 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (3.0.1)\n", - "Requirement already satisfied: google-cloud-storage<2,>=1.20.0 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.41.1)\n", - "Requirement already satisfied: protobuf<4,>=3.13.0 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (3.17.3)\n", - "Requirement already satisfied: typing-extensions<4,>=3.10.0.2 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (3.10.0.2)\n", - "Requirement already satisfied: google-api-python-client<2,>=1.7.8 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.12.10)\n", - "Requirement already satisfied: cloudpickle<2,>=1.3.0 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.6.0)\n", - "Requirement already satisfied: kfp-pipeline-spec<0.2.0,>=0.1.10 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.1.13)\n", - "Requirement already satisfied: google-auth<2,>=1.6.1 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.34.0)\n", - "Requirement already satisfied: strip-hints<1,>=0.1.8 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.1.10)\n", - "Requirement already satisfied: docstring-parser<1,>=0.7.3 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.13)\n", - "Requirement already satisfied: pydantic<2,>=1.8.2 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.9.0)\n", - "Requirement already satisfied: fire<1,>=0.3.1 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.4.0)\n", - "Requirement already satisfied: absl-py<=0.11,>=0.9 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.11.0)\n", - "Requirement already satisfied: requests-toolbelt<1,>=0.8.0 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.9.1)\n", - "Requirement already satisfied: Deprecated<2,>=1.2.7 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.2.13)\n", - "Requirement already satisfied: tabulate<1,>=0.8.6 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.8.9)\n", - "Requirement already satisfied: six in /opt/conda/lib/python3.8/site-packages (from absl-py<=0.11,>=0.9->kfp==1.8.4) (1.16.0)\n", - "Requirement already satisfied: wrapt<2,>=1.10 in /opt/conda/lib/python3.8/site-packages (from Deprecated<2,>=1.2.7->kfp==1.8.4) (1.13.3)\n", - "Requirement already satisfied: termcolor in /opt/conda/lib/python3.8/site-packages (from fire<1,>=0.3.1->kfp==1.8.4) (1.1.0)\n", - "Requirement already satisfied: google-auth-httplib2>=0.0.3 in /opt/conda/lib/python3.8/site-packages (from google-api-python-client<2,>=1.7.8->kfp==1.8.4) (0.1.0)\n", - "Requirement already satisfied: httplib2<1dev,>=0.15.0 in /opt/conda/lib/python3.8/site-packages (from google-api-python-client<2,>=1.7.8->kfp==1.8.4) (0.20.4)\n", - "Requirement already satisfied: google-api-core<3dev,>=1.21.0 in /opt/conda/lib/python3.8/site-packages (from google-api-python-client<2,>=1.7.8->kfp==1.8.4) (1.29.0)\n", - "Requirement already satisfied: requests<3.0.0dev,>=2.18.0 in /opt/conda/lib/python3.8/site-packages (from google-api-core<3dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (2.27.1)\n", - "Requirement already satisfied: setuptools>=40.3.0 in /opt/conda/lib/python3.8/site-packages (from google-api-core<3dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (49.6.0.post20210108)\n", - "Requirement already satisfied: pytz in /opt/conda/lib/python3.8/site-packages (from google-api-core<3dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (2021.1)\n", - "Requirement already satisfied: googleapis-common-protos<2.0dev,>=1.6.0 in /opt/conda/lib/python3.8/site-packages (from google-api-core<3dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (1.53.0)\n", - "Requirement already satisfied: packaging>=14.3 in /opt/conda/lib/python3.8/site-packages (from google-api-core<3dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (20.9)\n", - "Requirement already satisfied: pyasn1-modules>=0.2.1 in /opt/conda/lib/python3.8/site-packages (from google-auth<2,>=1.6.1->kfp==1.8.4) (0.2.8)\n", - "Requirement already satisfied: rsa<5,>=3.1.4 in /opt/conda/lib/python3.8/site-packages (from google-auth<2,>=1.6.1->kfp==1.8.4) (4.8)\n", - "Requirement already satisfied: cachetools<5.0,>=2.0.0 in /opt/conda/lib/python3.8/site-packages (from google-auth<2,>=1.6.1->kfp==1.8.4) (4.2.2)\n", - "Requirement already satisfied: google-resumable-media<3.0dev,>=1.3.0 in /opt/conda/lib/python3.8/site-packages (from google-cloud-storage<2,>=1.20.0->kfp==1.8.4) (2.2.1)\n", - "Requirement already satisfied: google-cloud-core<3.0dev,>=1.6.0 in /opt/conda/lib/python3.8/site-packages (from google-cloud-storage<2,>=1.20.0->kfp==1.8.4) (2.2.2)\n", - "Requirement already satisfied: google-crc32c<2.0dev,>=1.0 in /opt/conda/lib/python3.8/site-packages (from google-resumable-media<3.0dev,>=1.3.0->google-cloud-storage<2,>=1.20.0->kfp==1.8.4) (1.3.0)\n", - "Requirement already satisfied: pyparsing!=3.0.0,!=3.0.1,!=3.0.2,!=3.0.3,<4,>=2.4.2 in /opt/conda/lib/python3.8/site-packages (from httplib2<1dev,>=0.15.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (2.4.7)\n", - "Requirement already satisfied: pyrsistent>=0.14.0 in /opt/conda/lib/python3.8/site-packages (from jsonschema<4,>=3.0.1->kfp==1.8.4) (0.17.3)\n", - "Requirement already satisfied: attrs>=17.4.0 in /opt/conda/lib/python3.8/site-packages (from jsonschema<4,>=3.0.1->kfp==1.8.4) (21.2.0)\n", - "Requirement already satisfied: certifi in /opt/conda/lib/python3.8/site-packages (from kfp-server-api<2.0.0,>=1.1.2->kfp==1.8.4) (2021.5.30)\n", - "Requirement already satisfied: python-dateutil in /opt/conda/lib/python3.8/site-packages (from kfp-server-api<2.0.0,>=1.1.2->kfp==1.8.4) (2.8.1)\n", - "Requirement already satisfied: urllib3>=1.15 in /opt/conda/lib/python3.8/site-packages (from kfp-server-api<2.0.0,>=1.1.2->kfp==1.8.4) (1.26.5)\n", - "Requirement already satisfied: websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0 in /opt/conda/lib/python3.8/site-packages (from kubernetes<19,>=8.0.0->kfp==1.8.4) (1.0.1)\n", - "Requirement already satisfied: requests-oauthlib in /opt/conda/lib/python3.8/site-packages (from kubernetes<19,>=8.0.0->kfp==1.8.4) (1.3.1)\n", - "Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /opt/conda/lib/python3.8/site-packages (from pyasn1-modules>=0.2.1->google-auth<2,>=1.6.1->kfp==1.8.4) (0.4.8)\n", - "Requirement already satisfied: charset-normalizer~=2.0.0 in /opt/conda/lib/python3.8/site-packages (from requests<3.0.0dev,>=2.18.0->google-api-core<3dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (2.0.12)\n", - "Requirement already satisfied: idna<4,>=2.5 in /opt/conda/lib/python3.8/site-packages (from requests<3.0.0dev,>=2.18.0->google-api-core<3dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (3.2)\n", - "Requirement already satisfied: wheel in /opt/conda/lib/python3.8/site-packages (from strip-hints<1,>=0.1.8->kfp==1.8.4) (0.36.2)\n", - "Requirement already satisfied: oauthlib>=3.0.0 in /opt/conda/lib/python3.8/site-packages (from requests-oauthlib->kubernetes<19,>=8.0.0->kfp==1.8.4) (3.2.0)\n", - "Requirement already satisfied: kubeflow-katib==0.12.0 in /opt/conda/lib/python3.8/site-packages (0.12.0)\n", - "Requirement already satisfied: six>=1.10 in /opt/conda/lib/python3.8/site-packages (from kubeflow-katib==0.12.0) (1.16.0)\n", - "Requirement already satisfied: setuptools>=21.0.0 in /opt/conda/lib/python3.8/site-packages (from kubeflow-katib==0.12.0) (49.6.0.post20210108)\n", - "Requirement already satisfied: urllib3>=1.15.1 in /opt/conda/lib/python3.8/site-packages (from kubeflow-katib==0.12.0) (1.26.5)\n", - "Requirement already satisfied: certifi>=14.05.14 in /opt/conda/lib/python3.8/site-packages (from kubeflow-katib==0.12.0) (2021.5.30)\n", - "Requirement already satisfied: kubernetes>=12.0.0 in /opt/conda/lib/python3.8/site-packages (from kubeflow-katib==0.12.0) (12.0.1)\n", - "Requirement already satisfied: pyyaml>=3.12 in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (5.4.1)\n", - "Requirement already satisfied: python-dateutil>=2.5.3 in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (2.8.1)\n", - "Requirement already satisfied: requests-oauthlib in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (1.3.1)\n", - "Requirement already satisfied: websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0 in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (1.0.1)\n", - "Requirement already satisfied: requests in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (2.27.1)\n", - "Requirement already satisfied: google-auth>=1.0.1 in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (1.34.0)\n", - "Requirement already satisfied: rsa<5,>=3.1.4 in /opt/conda/lib/python3.8/site-packages (from google-auth>=1.0.1->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (4.8)\n", - "Requirement already satisfied: cachetools<5.0,>=2.0.0 in /opt/conda/lib/python3.8/site-packages (from google-auth>=1.0.1->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (4.2.2)\n", - "Requirement already satisfied: pyasn1-modules>=0.2.1 in /opt/conda/lib/python3.8/site-packages (from google-auth>=1.0.1->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (0.2.8)\n", - "Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /opt/conda/lib/python3.8/site-packages (from pyasn1-modules>=0.2.1->google-auth>=1.0.1->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (0.4.8)\n", - "Requirement already satisfied: charset-normalizer~=2.0.0 in /opt/conda/lib/python3.8/site-packages (from requests->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (2.0.12)\n", - "Requirement already satisfied: idna<4,>=2.5 in /opt/conda/lib/python3.8/site-packages (from requests->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (3.2)\n", - "Requirement already satisfied: oauthlib>=3.0.0 in /opt/conda/lib/python3.8/site-packages (from requests-oauthlib->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (3.2.0)\n" - ] - } - ], - "source": [ - "# Install required packages (Kubeflow Pipelines and Katib SDK).\n", - "!pip install kfp==1.8.4\n", - "!pip install kubeflow-katib==0.12.0" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp\n", - "import kfp.dsl as dsl\n", - "from kfp import components\n", - "\n", - "from kubeflow.katib import ApiClient\n", - "from kubeflow.katib import V1beta1ExperimentSpec\n", - "from kubeflow.katib import V1beta1AlgorithmSpec\n", - "from kubeflow.katib import V1beta1ObjectiveSpec\n", - "from kubeflow.katib import V1beta1ParameterSpec\n", - "from kubeflow.katib import V1beta1FeasibleSpace\n", - "from kubeflow.katib import V1beta1TrialTemplate\n", - "from kubeflow.katib import V1beta1TrialParameterSpec" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define the Pipelines tasks\n", - "\n", - "To run this Pipeline, you should define:\n", - "1. Katib hyperparameter tuning\n", - "2. TFJob training\n", - "3. KServe inference\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Step 1. Katib hyperparameter tuning task\n", - "\n", - "Create the Kubeflow Pipelines task for the Katib hyperparameter tuning. This Experiment uses \"random\" algorithm and TFJob for the Trial's worker.\n", - "\n", - "The Katib Experiment is similar to this example: https://github.com/kubeflow/katib/blob/master/examples/v1beta1/kubeflow-training-operator/tfjob-mnist-with-summaries.yaml." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "# You should define the Experiment name, namespace and number of training steps in the arguments.\n", - "def create_katib_experiment_task(experiment_name, experiment_namespace, training_steps):\n", - " # Trial count specification.\n", - " max_trial_count = 5\n", - " max_failed_trial_count = 3\n", - " parallel_trial_count = 2\n", - "\n", - " # Objective specification.\n", - " objective = V1beta1ObjectiveSpec(\n", - " type=\"minimize\",\n", - " goal=0.001,\n", - " objective_metric_name=\"loss\"\n", - " )\n", - "\n", - " # Algorithm specification.\n", - " algorithm = V1beta1AlgorithmSpec(\n", - " algorithm_name=\"random\",\n", - " )\n", - "\n", - " # Experiment search space.\n", - " # In this example we tune learning rate and batch size.\n", - " parameters = [\n", - " V1beta1ParameterSpec(\n", - " name=\"learning_rate\",\n", - " parameter_type=\"double\",\n", - " feasible_space=V1beta1FeasibleSpace(\n", - " min=\"0.01\",\n", - " max=\"0.05\"\n", - " ),\n", - " ),\n", - " V1beta1ParameterSpec(\n", - " name=\"batch_size\",\n", - " parameter_type=\"int\",\n", - " feasible_space=V1beta1FeasibleSpace(\n", - " min=\"80\",\n", - " max=\"100\"\n", - " ),\n", - " )\n", - " ]\n", - "\n", - " # Experiment Trial template.\n", - " # TODO (andreyvelich): Use community image for the mnist example.\n", - " trial_spec = {\n", - " \"apiVersion\": \"kubeflow.org/v1\",\n", - " \"kind\": \"TFJob\",\n", - " \"spec\": {\n", - " \"tfReplicaSpecs\": {\n", - " \"Chief\": {\n", - " \"replicas\": 1,\n", - " \"restartPolicy\": \"OnFailure\",\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"tensorflow\",\n", - " \"image\": \"docker.io/liuhougangxa/tf-estimator-mnist\",\n", - " \"command\": [\n", - " \"python\",\n", - " \"/opt/model.py\",\n", - " \"--tf-train-steps=\" + str(training_steps),\n", - " \"--tf-learning-rate=${trialParameters.learningRate}\",\n", - " \"--tf-batch-size=${trialParameters.batchSize}\"\n", - " ]\n", - " }\n", - " ]\n", - " }\n", - " }\n", - " },\n", - " \"Worker\": {\n", - " \"replicas\": 1,\n", - " \"restartPolicy\": \"OnFailure\",\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"tensorflow\",\n", - " \"image\": \"docker.io/liuhougangxa/tf-estimator-mnist\",\n", - " \"command\": [\n", - " \"python\",\n", - " \"/opt/model.py\",\n", - " \"--tf-train-steps=\" + str(training_steps),\n", - " \"--tf-learning-rate=${trialParameters.learningRate}\",\n", - " \"--tf-batch-size=${trialParameters.batchSize}\"\n", - " ]\n", - " }\n", - " ]\n", - " }\n", - " }\n", - " }\n", - " }\n", - " }\n", - " }\n", - "\n", - " # Configure parameters for the Trial template.\n", - " trial_template = V1beta1TrialTemplate(\n", - " primary_container_name=\"tensorflow\",\n", - " trial_parameters=[\n", - " V1beta1TrialParameterSpec(\n", - " name=\"learningRate\",\n", - " description=\"Learning rate for the training model\",\n", - " reference=\"learning_rate\"\n", - " ),\n", - " V1beta1TrialParameterSpec(\n", - " name=\"batchSize\",\n", - " description=\"Batch size for the model\",\n", - " reference=\"batch_size\"\n", - " ),\n", - " ],\n", - " trial_spec=trial_spec\n", - " )\n", - "\n", - " # Create an Experiment from the above parameters.\n", - " experiment_spec = V1beta1ExperimentSpec(\n", - " max_trial_count=max_trial_count,\n", - " max_failed_trial_count=max_failed_trial_count,\n", - " parallel_trial_count=parallel_trial_count,\n", - " objective=objective,\n", - " algorithm=algorithm,\n", - " parameters=parameters,\n", - " trial_template=trial_template\n", - " )\n", - "\n", - " # Create the KFP task for the Katib Experiment.\n", - " # Experiment Spec should be serialized to a valid Kubernetes object.\n", - " katib_experiment_launcher_op = components.load_component_from_url(\n", - " \"https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/katib-launcher/component.yaml\")\n", - " op = katib_experiment_launcher_op(\n", - " experiment_name=experiment_name,\n", - " experiment_namespace=experiment_namespace,\n", - " experiment_spec=ApiClient().sanitize_for_serialization(experiment_spec),\n", - " experiment_timeout_minutes=60,\n", - " delete_finished_experiment=False)\n", - "\n", - " return op" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Step 2. TFJob training task\n", - "\n", - "Create the Kubeflow Pipelines task for the TFJob training. In this example TFJob runs the Chief and Worker with 1 replica.\n", - "\n", - "Learn more about TFJob replica specifications in the Kubeflow docs: https://www.kubeflow.org/docs/components/training/tftraining/#what-is-tfjob." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "# This function converts Katib Experiment HP results to args.\n", - "def convert_katib_results(katib_results) -> str:\n", - " import json\n", - " import pprint\n", - " katib_results_json = json.loads(katib_results)\n", - " print(\"Katib results:\")\n", - " pprint.pprint(katib_results_json)\n", - " best_hps = []\n", - " for pa in katib_results_json[\"currentOptimalTrial\"][\"parameterAssignments\"]:\n", - " if pa[\"name\"] == \"learning_rate\":\n", - " best_hps.append(\"--tf-learning-rate=\" + pa[\"value\"])\n", - " elif pa[\"name\"] == \"batch_size\":\n", - " best_hps.append(\"--tf-batch-size=\" + pa[\"value\"])\n", - " print(\"Best Hyperparameters: {}\".format(best_hps))\n", - " return \" \".join(best_hps)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "# You should define the TFJob name, namespace, number of training steps, output of Katib and model volume tasks in the arguments.\n", - "def create_tfjob_task(tfjob_name, tfjob_namespace, training_steps, katib_op, model_volume_op):\n", - " import json\n", - " # Get parameters from the Katib Experiment.\n", - " # Parameters are in the format \"--tf-learning-rate=0.01 --tf-batch-size=100\"\n", - " convert_katib_results_op = components.func_to_container_op(convert_katib_results)\n", - " best_hp_op = convert_katib_results_op(katib_op.output)\n", - " best_hps = str(best_hp_op.output)\n", - "\n", - " # Create the TFJob Chief and Worker specification with the best Hyperparameters.\n", - " # TODO (andreyvelich): Use community image for the mnist example.\n", - " tfjob_chief_spec = {\n", - " \"replicas\": 1,\n", - " \"restartPolicy\": \"OnFailure\",\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"tensorflow\",\n", - " \"image\": \"docker.io/liuhougangxa/tf-estimator-mnist\",\n", - " \"command\": [\n", - " \"sh\",\n", - " \"-c\"\n", - " ],\n", - " \"args\": [\n", - " \"python /opt/model.py --tf-export-dir=/mnt/export --tf-train-steps={} {}\".format(training_steps, best_hps)\n", - " ],\n", - " \"volumeMounts\": [\n", - " {\n", - " \"mountPath\": \"/mnt/export\",\n", - " \"name\": \"model-volume\"\n", - " }\n", - " ]\n", - " }\n", - " ],\n", - " \"volumes\": [\n", - " {\n", - " \"name\": \"model-volume\",\n", - " \"persistentVolumeClaim\": {\n", - " \"claimName\": str(model_volume_op.outputs[\"name\"])\n", - " }\n", - " }\n", - " ]\n", - " }\n", - " }\n", - " }\n", - "\n", - " tfjob_worker_spec = {\n", - " \"replicas\": 1,\n", - " \"restartPolicy\": \"OnFailure\",\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"tensorflow\",\n", - " \"image\": \"docker.io/liuhougangxa/tf-estimator-mnist\",\n", - " \"command\": [\n", - " \"sh\",\n", - " \"-c\",\n", - " ],\n", - " \"args\": [\n", - " \"python /opt/model.py --tf-export-dir=/mnt/export --tf-train-steps={} {}\".format(training_steps, best_hps) \n", - " ],\n", - " }\n", - " ],\n", - " }\n", - " }\n", - " }\n", - "\n", - " # Create the KFP task for the TFJob.\n", - " tfjob_launcher_op = components.load_component_from_url(\n", - " \"https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/launcher/component.yaml\")\n", - " op = tfjob_launcher_op(\n", - " name=tfjob_name,\n", - " namespace=tfjob_namespace,\n", - " chief_spec=json.dumps(tfjob_chief_spec),\n", - " worker_spec=json.dumps(tfjob_worker_spec),\n", - " tfjob_timeout_minutes=60,\n", - " delete_finished_tfjob=False)\n", - " return op" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Step 3. KServe inference\n", - "\n", - "Create the Kubeflow Pipelines task for the KServe inference." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "def create_serving_task(model_name, model_namespace, tfjob_op, model_volume_op):\n", - "\n", - " api_version = 'serving.kserve.io/v1beta1'\n", - " serving_component_url = 'https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kserve/component.yaml'\n", - "\n", - " # Uncomment the following two lines if you are using KFServing v0.6.x or v0.5.x\n", - " # api_version = 'serving.kubeflow.org/v1beta1'\n", - " # serving_component_url = 'https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/kfserving/component.yaml'\n", - "\n", - " inference_service = '''\n", - "apiVersion: \"{}\"\n", - "kind: \"InferenceService\"\n", - "metadata:\n", - " name: {}\n", - " namespace: {}\n", - " annotations:\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - "spec:\n", - " predictor:\n", - " tensorflow:\n", - " storageUri: \"pvc://{}/\"\n", - "'''.format(api_version, model_name, model_namespace, str(model_volume_op.outputs[\"name\"]))\n", - "\n", - " serving_launcher_op = components.load_component_from_url(serving_component_url)\n", - " serving_launcher_op(action=\"apply\", inferenceservice_yaml=inference_service).after(tfjob_op)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Run the Kubeflow Pipeline\n", - "\n", - "You should create the Kubeflow Pipeline from the above tasks." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "tags": [] - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'size': {{pipelineparam:op=model-volume;name=size}}, 'name': {{pipelineparam:op=model-volume;name=name}}, 'manifest': {{pipelineparam:op=model-volume;name=manifest}}}\n", - "{{pipelineparam:op=model-volume;name=name}}\n" - ] - }, - { - "data": { - "text/html": [ - "Experiment details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "Run details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Run ID: 9519f884-8baf-4768-a728-29de8ef5b4e6\n" - ] - } - ], - "source": [ - "name=\"mnist-e2e\"\n", - "namespace=\"kubeflow-user-example-com\"\n", - "training_steps=\"200\"\n", - "\n", - "@dsl.pipeline(\n", - " name=\"End to End Pipeline\",\n", - " description=\"An end to end mnist example including hyperparameter tuning, train and inference\"\n", - ")\n", - "def mnist_pipeline(name=name, namespace=namespace, training_steps=training_steps):\n", - " # Run the hyperparameter tuning with Katib.\n", - " katib_op = create_katib_experiment_task(name, namespace, training_steps)\n", - "\n", - " # Create volume to train and serve the model.\n", - " model_volume_op = dsl.VolumeOp(\n", - " name=\"model-volume\",\n", - " resource_name=\"model-volume\",\n", - " size=\"1Gi\",\n", - " modes=dsl.VOLUME_MODE_RWO\n", - " )\n", - "\n", - " # Run the distributive training with TFJob.\n", - " tfjob_op = create_tfjob_task(name, namespace, training_steps, katib_op, model_volume_op)\n", - "\n", - " # Create the KServe inference.\n", - " create_serving_task(name, namespace, tfjob_op, model_volume_op)\n", - "# Run the Kubeflow Pipeline in the user's namespace.\n", - "\n", - "kfp_client=kfp.Client()\n", - "run_id = kfp_client.create_run_from_pipeline_func(mnist_pipeline, namespace=namespace, arguments={}).run_id\n", - "print(\"Run ID: \", run_id)" - ] - }, - { - "attachments": { - "f947c4a5-dc78-4ba4-8e47-ae73d8f0ecea.png": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABpAAAANvCAYAAADa35mqAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAP+lSURBVHhe7N0LYBTlvffxH96oWBHRRLSgEi4toi8FERFsTxAVSqkWrYGeqqUoFClUpQqUeqloY6CKtrGIINSjtiXxQkWkoCK0FcQI5FiMnnIRlKgx6wVpxaJA3nlmntmd2exmNxcg2Xw/553u3C/PzgTf+e///7SocggAAAAAAAAAAACwDrGfAAAAAAAAAAAAgIsAEgAAAAAAAAAAAEIIIAEAAAAAAAAAACCEABIAAAAAAAAAAABCCCABAAAAAAAAAAAghAASAAAAAAAAAAAAQgggAQAAAAAAAAAAIIQAEgAAAAAAAAAAAEIIIAEAAAAAAAAAACCEABIAAAAAAAAAAABCCCABAAAAAAAAAAAghAASAAAAAAAAAAAAQlq8XxmpUpX7/+T8rzezRQsz4Yw4H94sAAAAAAAAAAAAZAgTCorFgmx8yPyfme/8T4uPPt5RtW/vXhs6spwJE0wyK5r/BwAAAAAAAAAAgAySJBZkRg859FC12Pmvf1WZyJJdxawfQvwIAAAAAAAAAAAgsySKB5l5LUzEyGQg7d69u2pfYK3oKLXrAAAAAAAAAAAAMptbsy6cUHSIM9Hiiz17qoIRJLcfJDOZsalHb2jRzcVaY6dijtdF1/5EfY+3k/vRm0W36dkTfqKxufv/YB+t/J3uev885Q/vZuc0Zh9ozW9+p//tkUbbvF6sqX96wxmpx/dm9vHcibrh2m+o7Qd/1+zfvKcLb89Tjl0MAAAAAAAAAEDGszEhr06dp8UhztTevXtNLpJNPXLDR3YV73/9jpMyx2t6/IZHpR8W6Htn2FkZ7MPnZ+r29y7UvVecbuc0ZpV6cfpMrTtzoq49P9vOS8xc16O6POV6NdrwqK5bcpJunnyejqt8Qb+Z8a6G3HW5utjFAAAAAAAAAABkKlOmzhMXGzIjVVVqsW+fST9q4QaKousGZF4A6R8q+ulD0lUzNbyHnRX06kMa93R7TbvpfJmklg+WTdct735Ls370/7Tx9xO16KQh6vDMEv3VrNtzpDvfZ5bfW+qN53x7im4YZIIblfrrHQV6RSfozcr39V/OcXuuN/vxlifa57ST/qJbnnnfmThBw2+arP/yYyTm3Ob9wxvPHhI9R++cpf9yru2vlWaG3e69wPr+uQb3of+n6347Ul3tVG0885dl+vCjj3XlD0bYOXUXbbfs/+dew/az/LZL3Kbud+K2jyPhdTlt4X+/NXyfsWXZWu7cE27716NNAAAAAAAAAABoKmIBpBgTEvLmV+kQO0uHmIJ2hrPA7RwpYwdp39692qdEy5zh6z/S7b1LNG9ppTP9Dz33ci/dPqqHu6xq315tWlSiE266R/cXjtQ31s7Tn171tvtwab7u3v4t3V7oLLvpW9q76Fd2mdO6ZrsTv+Vsc49GfN3bz177JXj73K6v2+1Ocfb5i3Jv3Wt7vKs/Pv0Pd70Wlc9p+px3lecee4ry9j2tX8yzy5zvb+97pdr3HbvseLuduZYhx2tvj5G6372Gf+hP0X3c4yx7Vwvd6/SuoTbDebn/pbfeelv/88gfEy5Pd3Dbbe0ZutZc/5ivaNt7sbZJ1qZZg6eEryvUNua6jtcLTz2nD80xTNvssx1+maHK2f8+b/+xZT00wrT9XnMeP9JX/XUZGBgYGBgYGBgYGBgYGBgYGBgYGBgYmsHg/I8bIvJiRV5i0SH+Qj/TyFslJriDzBicS9+7Ty/cP15X/zgw3LpMH9h1soZcrbPXzNWdt85V1cWDlOVvW7VP+3oO1XntzPTXdeHQbL3wyqvO+PvasOY9dTyrh7duu0Ea1nOf3nr3ffd42rdPHU9q5+3D7seP4nn7PFPd3O1OUIe92fr+d77uLjv+pGy5+WHO+AfrSrQ5q7fOcI/dTudd3EP7tld45+x8mfv29tCZPb1lZ5wV284cyB93J010xtnCTGcNuUk/HxI4r1oMRx3VSjdOvFbb3t6u3z/8aMJ10hk+LHfabei37fV77ea1TU1tGnddzrKf33+T/V6c6zqxndPk7kKvbfzx+O2Cy5xx0zbufAYGBgYGBgYGBgYGBgYGBgYGBgYGBoYMH4L8Ka9anbfczUAKzsh8VW4QYcDY32nenMAwbbCy7BpSO5333XbaVPUdDeppZzlMO+V8pZ2dMkwwwtmfO1qlTX+eph+NHucO019xpsvfiy47+cTYdmY/dqu4fXrnFhPYvxl/b5Em2f3/aNZ6Z3q7PnAXOctOaK/j3PWMuO2i41/X5dPO0ks3233cvFQRu6TOnH1XmSBMnVTo3e2xtjCCbWP2nbRNQ9clvfGAvSa/bfxFzjrh0wtsF7csdGwAAAAAAAAAAJoRP05k3pUbbgCppsCR+1I9wwYTQDBDomXesF4Pz9qnH/RZo9lPvxebv69Km95+NzodedtZ5swz42Z/Jig1f05gGPP16LLQ8ZxtTOZLtXFn2GcykqLr7Yvu3x3vdXV4/3Ou1tfcdU3WjhmSbBfYf9UJgzTVbj/dub4b718fW1aL4dNPd6lgxt1q/5WTNGrkFQnXST2coHYnha8/2B41tWnoutY9oOmv9NCN/jpjezjz/fYwn/54+DsLLzPtHluPgYGBgYGBgYGBgYGBgYGBgYGBgYGBIZOHZPyYke0DCUFvzJ4rXTNGA4f+WOe8/ICWV9gFxrpX9IY7Uqq166Tz+pgUpXb6f2efoBcW/sVm9JTqkdHjdMfi4Ib1k9W7jzqtWxQ9lzdmmwwi/3hpqviL7hg9x56/p1P7E+1Y7fzfPzeqQ4f2unrUD+2cuunW5+va8tQi75yc83vCaVNPXdu0QssX/q8dtypK9A93M+87AwAAAAAAAAAANWtRVVOYKSOV6pGr52i5nQoaOO5+DX73Nt1YfpEeGmtr15mgy03bdemDY6TZ1+gJOfPXlmqzs6jzd2/VTUNjpenecJZPX2sneo+x+6jQ8ptu07uX3K8renmLzHpPtPe2DY5757ZIJ91xqwY6k5HFceeyfo5Gzir1xp3zmOycUzczauY/2UG/vuNbbhm+0Hb+NvZ83GV/tkGYdhdFtzmYYu3WU1d89z29pB9H2zVxm8a3jdfGj7iX1U5XjOujl2Z535lpn/j9PxJsm2i7+fdFoF0BAAAAAAAAAGimmmEAqe7CwR4AAAAAAAAAAIDMRAk7AAAAAAAAAAAAhJCBBAAAAAAAAAAAgBAykAAAAAAAAAAAABBCAAkAAAAAAAAAAAAhlLADAAAAAAAA0DTtfl8bVpUpYidDsrvr3NNP0BF2MrVPtO2Fj3XseafqGDunyar8WKNnfK4ldtKYPPIE/fR0O6Gd+u0Nn2m6DtOiu47TmXYuGpq5p9bpTTtVTa3v0brwzyFHZzbgvf35+xv0T31VZ5ywf8++rtY98r4uetWMtdB9k7I1LNudXQ+pn5nK5yrVc1mVhgw6WnMvaGXnNm1kIAEAAAAAAADIPJVlenHbJ3YilRQv+puS1z7UV+KCR8b0h97X6Od22Sk0H8fo1PPO03kNHDx6sSxh2LaR2KlVbvDIqNKSVw/MfZ99QbbeueuEjAkeGWQgAQAAAAAAAGia/AykapkcibIu4oJEOWfqvFPNkurBo5ze5+mkz7yX5Fk5OdKbbyqiI9X6i8+08/BE+2zY7I6687MkwhlHfmaEotkTibIpdmnhjH9pfKU74QhnWvgZHZMHHaENy/wAVVw2RlzmU02ZGNFsjR6HSa/ucbYxmSJf0vYZcedlAmIP7YnuK5blcYTOcM7DXGvDZZnsT4nvlU+2vaB1sZsy4b2Vk/OmuQXde/ZMrXPXz+neXf8qs9l39l6O7StL3fufoRNamvG44waeme5fLlOZPXZW93MD2USf6/3XXlRZ9F6ILa8WPPKfvbhswPD+Dqxq91b2ESqddKyit4d/n2Yfpsnao+nOdcbfX57gfRV4ZiYdotn+fd7jSL1zRWszFrg3nX312O0dI7A8dtzY+cQypYy456kRIAMJAAAAAAAAQIarHiTSm+u04f3P7URyETd45MjO0SlfNSNv6uOd5tOx82NvnznHNoLgkeO1L7yASo8jA+XqTGbEl3VftnkZnuzldHzwyNiji26o1MLQPGl6NHhkOOs8YhsjQdm8Jcv+pd++ZieSWOIGjxzZh6t/LQJAS6LBI6NK4x/6WHGn2uiFg0fGm1r32vsK35U2eOTIaRu7y970g0eGuZdf2xDYV0Rlm+L3E6cyFjwyImX/1Pu7vfFPtoWDR0ZweTUJSklGyl7UNv85OaB2aVWpCQC10JBBR2qIuacqv9CqRDdHpRc8ctftER88Mpz7akb8M+Dc88H7/NXPEt/j2S29Y7/6hdZ5c1TpjJvthvRsmSB4ZJh9N677mAASAAAAAAAAgKbNlKt74QW9EB28YFFW95O8wI4N9JisiPPccl7nqnu2ecn9rj5x1jj1vDOVY9ZzszTO06k2YcBlMizMNqefoKy23lr/+sx7Nf/JR94b+OCL/YOp8r297ufknsELMFppWE0ZOq995gWPTGbEXSe4ZbhKB7VwZlRp/LK4KIDJqDDrjDzMm3aOaTb1X46bzCd3+V1HarIzPX1Jihfi/jGDGSLpiG53hIaY6cp92u4uaCo+0cfm9vHvL2c4t3uWey+/G9fkJiPOLA/dlybryN/GEak82r13z+vfXe6cys/0mbskGZOlFHsWnD3oMxsgOuZU73j+cKZ723vLjzjhjOgx3efJeS60o9INHvnneZ59nt58O0UQa3+o3K0l7r1sApKt1L+ndx8nLmNngqrmXjXPxk4tcINH/rzYMxC/rX+Pe8ulDe8l2rd/7D1a5QaYAoGtHiYrz5bZi3/mKj/XghRB1wOJABIAAAAAAACADOO9HPdLaH3+2b/cT5MV4QWY/AyLf+k/ybIqrKysY2Ol8Vof674Yj0Q+1ufO//3n32Zmjo6Nj9ccZIlfaCe3rnSP+zl5SCyIk33Bl9wAkB8g8kWDU6cf7i23tld4mRumr6Wv3GAGr5ReqsCOn41RW9Ht/EyPpmb3f5y7zxEIfvql4fwApSfx/eUHLY848mj3M5oF1/JLsnNqlp2tY90Sd0fo2CwvIBTPlKsz5xXOkqrus8+8835zbTiAmzqI1fDis3yyexzuBhiXlO6uHsgMZr1V7tUG8xmYl7hPo8PU32b3ZZ94qDeShH/s6aU7nf3bwFaPL3mBXP94lZ+rp/u8vB/Nfqrt87s/EUACAAAAAAAA0LT5WRx+9oUiqtyRTu5DLOsiPcfoWBNBqqzUxzs/Nh+xF/eNgP9Ce0mFFxCKMSXq3k9aTq5DOy+TYv/Yq7ervblPn59V1ZxEPjvQYZcwU1rPBILePfKMQAZSXaQO0DYsP8vHK5/oBjL9cnMHI7MnUMZuoZ+hVy07sLrqz+/BQwAJAAAAAAAAQGZoeYLO6O297Q722eJnacRK2MWGUFmwNBzjlrGLqGyt1+dLYylf5/KzguL6Zal87t9uibrpD1Xv08jwA0/BcnOVz/3HyyBylvlJGjXxg1CxEnb+UEPpvKT8oFMsIJBx/EyhQAm76HDqwbynbGm9aOaTn2mX3JFHemHbWAk7fzhDJ7hZTgeIX4oxCTcTKJnsQ3WG+QwGml770AtC+f181VqsjN14N7solr0UPV6ghF10uKKWf5T2IwJIAAAAAAAAADJH61OjfbaUbbJ9sPil56Il7OzwWnwfLW9qnTN/w/s1ZC/ZfXkaW/m61hph+2WJlZKLlcYaMujLiYM5px+p+9yyWvHltFrovkHpXWC0XFfguO5Qq5fvh6mDe35VGj/DbP+vGgMCTZufzRbff9eGaODz4PKehVi5x+q852mbPmuT7Wb+xUrY2WHbJ96KB4hfinHIoKPDARm/n6xXv9A6d41EEjw7D5n9pf8MJBItBWk4z8iZdtQcr38P5yPwzHlD4iDvwUIACQAAAAAAAEBGOeZUrxN/83L+n24w6Bidajv2j8nRmaefYPs3OkYndU/cD0x19sW/0YjK1/ncflv8F+YBJjMo3JdLUCsNm3S0F0SKOkyLapM9lH2s5sYf12RX1CqbwjmPkcF9OOcw8jA7nnmOObV6ebic3gc4a6ca51mxWXwukyFlS0O++ZEXEDrihJPDz5LJ/IuWj7TMdgc0k2qnVr1qPltoSI+4+zzaT9YeraqhjJ15dkptEMk3eWRdMuiCbKDIEV++7swrTtAiu8xX/+M1rBZVDjsOAAAAAAAAAEjB9BGz7k2vZFdtS+ABQFNBBhIAAAAAAAAApC2+jxgAyExkIAEAAAAAAABAGj5/f4NeLIu442QfAch0BJAAAAAAAAAAAAAQQgk7AAAAAAAAAAAAhBBAAgAAAAAAAAAAQAgBJAAAAAAAAAAAAIQQQAIAAAAAAAAAAEAIASQAAAAAAAAAAACEEEBqNkqVn9NFHXMKtN7OqbOKl3RfwdOK2MkDbm2Bcx3OtRSU2hkHVuTx0QmOH1HRKNO+o1VUYWc1Mju3rNTcn12pC84052mGM3XBlRM198Wav8nIi/N03ZWD1cPd5nT1GfoT5T/9unbutSskEFn7mPKvuVh9TrfHOr2fhl5ToKde32nXSGDnFq2YM1E/uOBMe35d1OOCK3XdnJcUqeFYqRyw80+lDte3+X8ujq6beKjl87w3ovWPF+iaof30NbuPr/W7WNc4z3NZDZfWWNpDe3eq7Ong+ad3D9da+WMaZfY/6rHkf+cO1LkAAAAAAAAAB0mjCSA99NBD+tGPfmSn0GhFntaoflfq7o3/sTPQFGxf+BN944LRyl/4kjark7qfcZq6d5I2v/i08q/spwsmPavt1V7a79bmP4zWN64s0FMvbpE6Oduc1lo7X39Wc6+9WJf+PPE26++5WN/Im6q5y17XztbONuZYrXeqbNk8XTe0ny69/3VnrTjlT+uaAYM1quBprXYO1dlsc4Zzglte0lMFV6rP4KlaWm7XTdsBPP9U6nR9O7V5/et2vAHsKtXdFw/QpZPmaenrO9X6NO/aWu98XUvnTNTQfnm6b0P8lTWi9thbrqeuHaCh1wbO/7SW2l7jPVwHe1/XfT+aqhV2MrGdWn1HXvhcos+T08Z1aRMAAAAAAACgsalqBH7/+99XmVMxw4oVK+xcNErvFVf9qGPnqlN/VFxVaWc1N5WPXV11qmmDO9fbOY3c5keqLjHn23FQ1S3Ph7+1T8oeqfpRd7Osc9WPHgsv+8/6mVXnm+26X131P//8j53reG9F1S3nm226V/3oybhtVvyy6qv+NmWf2Lmeyud/6e2vY27Vr14J7G/P5qr/ucQ7h/N/uaKqco+db3xSVvU/V3d3l9X2njtg559Kna+vrKrwG2a766v+Ej6VOvhP1Qs3ecf56tWPVL0W3N+eyqoXfjnIO4dv3Fm1LnBpjak9Nj10mTf//DurVgVPJfB9XvtMfRvqP1Xr7rRtkeAcfJVPXu21y/m/rHohsELseRpUddf6WrQJAAAAAAAA0Agd9AykYObR73//e+Xm5rrjB9q/vvhUf33nFc0tK3YHM27mAU1d2bPz3DJnHa6fqdsGZnkzrdanXa7777tcLZ3xFY8s02ZvtmOnls6b5U4PvvNuXdnVrGG1y9Vts29VL+3WinseU1k062OnVix81M28cLc5rbU328oaeKvuv7WnM1auuU+9FMvQ+OcyPWiqAba/XvfelKusQ73Zrtan6crf3KcrWznjKx/VX7Z4s1M7gOefSl2vL7JF600Wzhm91Dl8KrW386966g/mjC/U9BmXq3twf4dmacBNv9Nt7qXN01Mv+VfWiNpjb6mKf+1upJ/NnKJ+wVNxvs+pv7jcGdmtpxb+tV6lNXevvUeT52xR54G56m7nVbP3dRXds9I5mjmXWzUg8EjFnqctum/es04LAgAAAAAAAE3XQQ0gxQePRo4c6Y4fSCZIdFvJ73Tekz/UDS9O15zXit3BjJt5Zll9A0l+nzmjHo9IO19X0c150b5EelzwE90X7TNjtzabPjX8PkHOHKxrClZWK8sUv7+nCn4S7dfma/3ykvQhUkMfSBUvhfvGsf2bFK0Nv4pdX+As62dLO62cqj7J9lcL7j6d/eSvda5+49PKv8bvq+ZMXZCsj5VEfSBVBPos2RvR6jkTdWm/0931krdJQHwbOG3/g5/N0+q0+zNK3AdS8Pq8Y4S/+xr74rHf7VB7HXXrdyai7dtbq/Ox0uBzTrPzwlp+7TT1MyMbymMvvHeu0YolZuRy5V2YIHrRKVd55zif5Y9pxT+9WdIWbd92mrJa5Sr3rMQRj87de3kj70Six4psK1frTs76Q85R92Awwdequ07rY0ZeV/nH7pzUDuD5p1Ln6yvfrNXm8/+1Vwd3Rj1s2a7Np2WpZW6u+jn3QjWHdtLpZ3mj2z/wr6wRtUfkE+3udZo6n3aZBpxh5wW0/LINEG6r1CfeWO3tWqn8kfO0udM4TZ9yobLt7Gr+uVLFbmDvKg1OdC7fuFBXmpElK7Q63UYBAAAAAAAAGqGDFkBqDMGjjTu26QfLbtTircl7uzDLzDpm3fra/X/zNKrfxZryhy06xvTH0q6ldm55VndfOUDXLSnX+oKLdcG187Ra7dXdvOz9eIuWzhmtC8Y+nfBX9bv/71FdM+BiXTfnr/rkRNMfSJZUUer2IfKNi+cFMiuS2722QBf0u9LtG2d7S9MPiTOcuNvt32RK3gBd6pxrVFvvGO6r2lZZ3rpnhDNa6mr332fp0ksmau6yiLLNftvt1mbbx8qox2vT+c0WFV07WD8oeFplbl8/ndTStkmfwQVav8uuFrB9ydRoG0T7B2pZrtULC/SDfoM1xflu6mvn8/do6PnmGPa779Ta/e5NXzzf+Hn1TIXdG5z2cO6V6+Y8q7KdrUPfS+36ncnS4Pyn9Ny6TZra286K9+luVdrRqC1lWmo+zzlNnQPJOzHt1cV9ee7ct6/7d2dPjX7qKZW8NlfD29lZcXb/u/pZZw3J1+Ln1mnrFJPNkshO7f7QjqbrAJ5/KnW9vu3/LHW/437dO2jni7PiAsvO/V2b4ETPq7R48Wr93/zLnDsikd36pFqcvBG1h8kae9i5jxePS5gZtLnMC2O3PPe0Ogbbdmrpz8fr4V09ddvs69XrKDs7gYjzvWw3I2d1Umd3TpxDO5v4pONprd/ozgEAAAAAAACapIMSQGosmUc3vDhD731a7dV5NWYds259M5FWz5+n1efcqsX/u07PPfWUFq9+Tc/dZF6i7tZTkwbrvx/toNsWr9OrzznLzMveFbdqgFm6/MGEpbtWz5+lFb3M/l5TyWJ/m3wNaOVs83qB5i5L9Ya5XEV3znODJqP/sE7/t9rZhzmv55xz+MNV6uyc1/qbZ+opmwXQa4yz7MHrvWyVPtfr92bdp66SzUGol4cL79HmuLYpmXu5ew4rJl2t+163K6aycp7uXpKlK+c6beFez1K9+r9P6baBLaUt8/Tf8cGa12dp1PjH3DYw22xdt9RrA3P8h8epV6stKho/UQ+nXTotsaI5s7T7srkq2WSvL9rG0s7H79TDwev7+FlN/v49Wr+rpQbc+pRefXV17HtZnK/hnZzv5dd5yl9Z+5f3iZQtmacy57PlD/8r+nJ+53vb3eCFjj9Gx7hzquvQ2Ss3uWJzugG2nVr6+KPuWK+B5yQJZCSwYZke3OB8trpcuQkyPhJpVOefSpLr2775Jfez8tGJ+saVs7SuZXs3INraDSxP1NB+o/Xwxoa5B8w9V/wHM9JTuWele2UHtj0S2lWu1YVX6tLbSp31c3XbVed4Ae5a2v74z3Td07s1IH+mruxkZyZRWeEVeuxwXLI7K0tdunpjm7clCv0DAAAAAAAATcMBDyA1huCRMXP979MKHg09NVd/GHSXu67Zpn6q9z/S+TtXuEEi7dqtAfH9jJxykfK+Y0Ze1/p/JgoGJejP5JTLNPVGr1TZ0n+minpEVO5WgctVbp9wiarW51yvqT9oqazTyrV5SwO9pK5Jq8t1/2/C1xLrY2WL7v6T6XMkPb1u/V24r59Avyq7n/6d/hxtlp1a+sA9bj85A/IfrNY/UNa512v2L02QoVT5j9aij5dEEvT34rWxGSt3vt/Yi+ayBXfqqV1Sh1Fzdf8PT1Pr4DanXabb7rxKHZyzebjwMS8Toh5MBtp195gASk9NvTz28n33rn95IydmKXxn1J15ST/ZlJVzvuvxw9p7M1PZ5bT9xHvc6+x14+Xql2Z0oNGcfypJr2+LNr/ijW3e3Uv3m4CmCRKbgOgbS3Xv9zq5JdduvfoerU6QVVcre8tVNOnnbsZWyx+M0/BTvNmpHNj2iOOXrDx9gH5wz0va2cn5+7F0robX5TTKH9Otv1wpDczXbZel3sHuT7yAY+esBguZAQAAAAAAAI3SAQ0gBYNHhhlv0aJF2kNDMZlEi7ettFPJmeDRrWePd8ePPvwod5t6ZSEl6n8kq70tg5Son5HWyj7RG9u5K0H4YuDghP2ZHNPa671jd8qIR0u1dt+XPqq7f7tSmyPBDVpqwO3eS+uf9a7Lb/prp8NPLnMzp+J1HuTMNyN/WKN1aZTkk76j0cMSpBC0ytXFl5vreF3LXrEZJ7vX66Wnzch3lDck8YvjrP+6yMsCe2yNm6VTVy2HJervpaWOaWvbNnptW7RuqTm/9sq7JHE2Rcveg3SxaavSv2p9PRIcTJm8/zZ9vpjvekbqzIv6MGUCR00yQcBOGv3QlITfdTW7Xtd9I6/Q3C3ONQ/M172X78cTTKFO559KTde3e6ciOk1Zx+aq4Pe3akCwhFzLTrr4zt/pNhNbLZ+nuxemm0GVwN5yLf351Zqy3Hn2O12lP/48N+E9F++At0e8j3bqk9NMCU3TR5MzveVRXXPxlZq7oZZh3r1b9PC1pl+3XN1262XqkKhPJgAAAAAAAKCZOmh9IB1M6ypThwL84JHp+2jsC7dGA0fpbJtU184NV+bJ6NQ+4f6yOqT7ov00Db/xQrXWbq0vHK0Lzj5dX+t3sa65+VE9taFcu9MK2DSMAd2SnHO7zjbAtkXb0wmWnNFLnZOknHTofI77udovK1VernXuyF919xUXa+jFCYarf+ets+t1bapwV66Tfh0SB6g6dPDOKWr3dm0x5bsU0cOTEpyPO/xSRe7KK1X2ljtSa5GVBbo0UCbv/u/VJXUjHbu1+Q8/0VC/TOD8RzQ1nYBkZKXy8/J099rdajnwVi3+zcF6uV/H808l1fW17KmfPfWUStYlyao5tJO+9X2vBN/6laUJ+0hLaZcJnjjP++NbnL8ll2v+H6eoV8pA0EFqj3inXaUn3Iwsp41eXa0/TOiplh+/pPzvj9dTaT+nzt+93/5Et5a21MUz7q5b9hIAAAAAAACQwQ5oAMmUqzNl63xmvKqqKu2hoWz8eJv7+eXDW7nl6UywKChZ8Mjwt80UWd/5nf6+eKZGD+rklvraXfG6lv7hNl138QB9rUc/jZpTqp0HIJDU8ssN8BLaOK5l0j5vqvm43GYV7dTmDa+rLOGwxVl6AH0csWXpdivyeqLz8YZIncuWeQGA80fNU9mu1ho8Y6nm/7BTtayT1sfbt+mffuJsUbOWyb66vRGtuCNPQ29+VjtbnabxfyjWbbmpQ6i7Nz6qawaP1tzXd6v1kHw9N/tydY4LbKwv6KKOpoRZ/DDqMTeYcuDOP6KiUQnOwwwFbn3IatK5vnRknWqDrnv/433WhhuwuVi3LtmplqeN0x+euFUDUn01jbU9Ds1Sv+sf1PQhzviulbp7oe1QzC9zl2DIX+scd+09mly4RS1/cJ+zbfqFDluf6JUIrfw0jb8MZDQBAAAAAACgCTvgGUjBIJIpYWfK2h0s//5ilzZ+vNUNFvlBpJqCR5mq9Wnf0dT7l+rVTa/pb0/NVcGE76hXu5bSrohWFOTpRwvqUSIrXV/Yz/raLaX9Or1Va3VwP8fpiTc3aWuNw1wND5YR219aHWP77MnVvS8nOo/wMLW3u3J6TLmySRfrgmgAYEXSzKOWx7T2gkpvRpSsp7Dtm70ykAmzq0w5su8P0Kj5r2v3sReq4Jmn9LNzUr+kN6XRhg6+TUs/bqnuEx7W3+uYeXSwzj+VWl3f3t3aWVOgsI7PjCldeOmAQMDmqevVL9WlNYb2qFFr9cv1/oZv/7/ytIK+Zc+b8o1Oe/xhtL4WH2TqZ8raOVZOVR933mgV2cwmv0Ro2fZkd1ZEmzZ6Y507pA6YAgAAAAAAAI3VQSlhd7CDSF2PPdWOSbeV/E6Lt65wg0a39vlJyuBRcNuMc2hLdTgjV8Ovn6knVr+mv91uS2Qtf6luJbJqYfXGLXYsTsVm9yWvWp2mLukEcF56XZuTpJxs3/yS+zngqzZg0L6zepnPepana1Ct26uze3ov1bk8XUJ7y1U0drAtV3aZ7k0VAOjUXYPNZ9L23KLX3OZsr16nxb0k3/mS8i++2CtH1vt6PbHidxp+il1Wg+0LRusCWxpt+G+WavH156h1kmBCrymJA2pb51/mlXU8YOefpeHzE5yHGaaYTopianN9O5dMVMcup6vH6MdsRlp1mzd693PLWpTG3PlSgYZe7JUu7HVjcXoBm0bQHpFlt2no0H762s2m36XE/rPHLsk62gvCtrtM8xOdhzO4gde2Xh9KCQfnnnADkK2y7LwsHWPPLeurPb3A8ytbvL9N8XaX6XU3Nvkd9erqzgEAAAAAAACapIPWB9LBDCKdmd3djnn8INLQjgNSZh7Fb9uklc7T0IsHq8f4pxP+Yr/DV9PtS6n+yha9pM0JSuVtXvaYmwnQ8rK+Sq/lH9PKlxK8Yt61Uk89aub3VG4v+7q9dV8NMGWvtFLFyxIHsHavLdA3T++noXmztD7Zm+sGdZoGXGYiSLv18FNJXpa7pbnO1AUXT9RT6SSH2eDRlOW7bbmyfF2cKqATbZtHVfxsgrtjy0t6yvTV1P4yDfiqN8u1q1T5l16puU5zutktfxqnXmkkqmx/fLQumOpcry2NVvCdBFlBtXGAzz+V2l5f6+69vODmS879n+jW3PWSiueZUm0tNfxcr6RaKuZevvQHJuvGK134xDU9kwZsohpJe2R1dv4WvR7R7j88pqUf25lBe7doxWNeQG1wj/T+UvQa4/WhlHB48Hr1Myv1uV6/d+fla7AfpftqrvLM6W6Yp6Vuf2Vhu1/6q9dH2ZABqTO7AAAAAAAAgEbsoAWQjIMVRDr68KOq9Xtkgkhzy4prDB6Zbcy2GeO003Tmli3aueTnum7BFu0OBnBM0OGBR93Rzmd1r57hsHKLNjVk30ilt+maO1YqEthnZLkz7zbTb0qubvvxOdX66Ulstx4eP14Pvx4IGux8XQ9f68zbJXUY8/NA9kRrDb5qnDo7Y+tv+4mmPP56qL+n3Vue1q0/n6ftuyLaPSBXvdI7gXrrftlEDWjlHP8P4/Xf97wUahN9XKr7fnqbVmintp86WAPSiLNEFt2mW5fvljpdpT8Wp1GuzBVrm6U//1m4PStW6taxt2m9Mzrgp5epezQIsVvrH/i5G2xoOTBfi9MtR1bhtPMvTbCsk0Y/VNwgpdEO6PmnUpfrO+Uyjf+BueFK3XNdEcyQ2+ncAyNHa265Oc87Nf7cNG7M3c42zr282XmKBsx4KmnpwrBG1B6dLtL475jrfFaTJz2qsmBM0Hm+i37+E91q/lR0GqfRF+7nqM2hp2n49blOS5br7onh72bn64/qmvGPOtfWXqNHXuhlQgEAAAAAAABNVIsqhx0/aEzgyASQjK1bt+rUU/d/mTgTJPrBshv03qfpFWc78ags/WHQXXUKIEUeH60+k1ZKY4qrlXEyL4jzc/I0V7kqWF29n531BV106RxpwIzVmv89L4xT8/4cawvUMW9e3HL/OFfpiTeneNkNjsiyqfrva0wZKYcp19TJHGO3tm/Y4mYluSWrHhqn7n6n9nud/fRw9rPLWdbuNHXOytVUE5SoY3DFv77uA3O1e/lKbfbPIbJFZRUm/6aTrpz/SLiz/kTXZ7Jy3H5LcjVg4EtasXy3Wnc6TR1aBa5l4K1a/JvqHfRvX/gTDf3Zs14W1rGd1L29czG7ylW2xXtL3XrITGe770Rfnidu/4iKRvXTlJXh7zHR9xfk7yt+ueng/9KRs1Rm+sDx22RvRJtNFoYzy2QSmWBQr7hrqSbwfaUWvjfMfVB2/xW69Nel7jHd9mwZO4fO3/ud5t95YSyoEP0O0pCbrxJbbm59welOG5k9pja6uDb9Ph2Y80+lztdn+h0ameeWjnO+cWWd1knZzn0WvQfin80aRO/ZNETvxUbXHqW6O+8K3fd64vYwpRnv/32+BqcTG0vFv/Zk12X6E/v51V5JSP9cdvt/M7zygH+85jRnDAAAAAAAAGi6DmoGks/PRDLDgQgeGSYQdNe5k93AUCpmHbNuRmUfWVmD8vXEc3M1ddg56t56p8o2vO4M5Wp52oUa/ZunVPKnuBfUh/bUz/40RYM7tdbuCrPuSm1Op4xaCtmDbtUTi/N15WnSZnMOO1ur17Ap+sPqpeHgUUqdNH72Cv3h+guV/ZE5vy3OLHsts6sHj4wOw36nkr897LZBZ23x2sDZrPO539HUh1fr1ftiwaMDxQQHFv/9Kd075sLY9/L6TrX2v5en0ggeGf98SUvTCh4l0lLdrynW3x+eoovP7SRtCZ/DE8Hgi2Pn+pfSCzaEvK7VS9ILJtTegTj/VOpxfaa8259Wa/FvrtLg01pr5+tx90D8s5nUTq37e3rBo6DG1x7O356nVuiJGQnaY0axSpY2UPAoHYe21+A7i8PfTfRvhnOOBI8AAAAAAACQARpFBtLBZDKRZq7/vRZvS/yC1ZStm9jrRxkZPGoMUmXopC2aLRGfRQMAAAAAAAAAAGqr2QeQfCaQtK6yTBs/3uZOdz32VJ2Z3Z3A0X5GAAkAAAAAAAAAgManUZSwawxMoCj3K3005vQ8dzDjBI8AAAAAAAAAAEBzRAAJ9VSq/Jwu6ljroUDr7R4AAAAAAAAAAEDjQgk71FO5lt7zmMrsVPq6K+/6CxWhhB0AAAAAAAAAAI0OASQAAAAAAAAAAACEUMIOAAAAAAAAAAAAIQSQAAAAAAAAAAAAEEIACQAAAAAAAAAAACEtPvroI/pAAgAAAAAAAAAAQFSLKocdBwAAAAAAAAAAAChhBwAAAAAAAAAAgDACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACGlR5bDjjcguLZzxL42vtJOuw7ToruN0pp3abyo/1ugZn2tJ9hEqnXSssu3spuSN2ddo+lo7EWfguPt1RS870Zitn6ORJWfpobE97YyGEVl8mx7Qj3XT0HZ2TlNQqkeunqPldsro/N1bG/4aTJvPKrUTPTX5jg564qZF2tzuIv36jm8pyy4BAAAAAAAAAGS+xpeBZAI4N8QHj4w9uuiGSi2sNh+1sXzWbVpeYScaq1Ago+GY4NGNf27sFx+n4i+6Iy54ZGz+820aObth2+iNErO/drrijvv10INj1K3dt3TTg844wSMAAAAAAAAAaHYaWQZSIPOox5F654rW3mzt1G9v+EzTzWh0vj/vME3usUfTX/WXKbauNXnkCfrp6WYsts19g/Zq/DJ76cFjBTOQhuxTz4f2uLOHDDpacy9o5Y43dn4GUny2kR9A8bNXqq9nM12iGSd+5ktPXfHd9/SIH3zpPabmzKC4AFA4Wya2z8kmSGFm2fXd9U5aFBc8MuudpbWpzsMEWuKyZfzrNdc3+N244FGTyKqp0PKbbtMjzmmH2jDavibYc6sG2tnhzLPwsth3f5FO+fMiG5Dy14kdJ8q07XcrqrVptJ3tOr9uvyjaxk0isw0AAAAAAAAAkJbGlYFUuVtLTPDIBG+iwSOjtX466QgNqTbfsMEjx+Seh2nhjHDwyJj+0IdaZ8c9e2LBI+PVzzT6uV12wqr8PBo8MpYs+3czzn4qjQVtjLVz9Mh6Ox4vQfZQw2XLVD+POxYHox4ZpqJUL5nLa3eRfhwsV9drjCb3NoG/ZMEjo0KP3HRNte9pczR4ZDjr3PcXRexUak77+8Ejw2n/B9bYcQAAAAAAAABARmlkAaR9WuJ8DOnZsnrfQ9nHam6SPolMhtE7d5kso1YaNskb94ajdZ+7wV69HRf88bd5Z+Rh7vSS0t0Kr9JC99l9Lephpqu0vYkFkJbPukYjr44NXgZOT11ah75zTIbJQw/e7wYujLfeTRS4qdDyJ71Akb++KYU20MxYuyi90nm9xuihcTaryGTB+FlKVnS/dp3Na0rTCoBkDb1Vv/6ud90mm6dJlGV7d7sbrOnct2e1c+02NpDxU/EXPeEGj0y2Vrh9lj8ZFyAy2URm+R0XqbOZrtiuD9ROA++4VVe4zWNL2CXIMIsstsEnfx/Od3NKRQYH8AAAAAAAAACgGWt8fSA5llTEMn/02of6yg3vB4b4bKLD1N8tTxe27hGzbqK+lBzZR2iEv83pR3pBpsp92u7N8WQfrv42WtWhXQtvpKlLEJBJT0/1tsGKbn2qBxaiAhkzg6PlzHpqsBu4qdC773pz6iy4314XeQEPNwCS2TaXv2fHEousLfECTd+9KPbdJmmfaDCqXU+d48XT0vZBuRcsGniJH3zzv1sAAAAAAAAAQKZpXAGk7EM0xHy+tzcuGyhdpg8lEziq1NuDghlIceKDRRkqPmtIa1/RG3Y0oYoKvWVH66RdO51iR2sj8m7NAZKoWgaL/IBHk3VSBy9LyLmO+CwrU7LOL9+XddKJ7icAAAAAAAAAAA2lkQWQWmqImw30uXo+stObd/pxthzdkZrszUku2oeSnz20J0nZuT1a9Zodfe0zL0sp+xB18OZknG5j/fJkpZqeoC8ivxydn8lSdyfqJDfrZZGWRvveKdVSt3ReO510kjfH8568w1boH2vSDfSUaq2/3/WL9Ii72w463pvjiQaZnHVDfQI1QX6WkNOeDwT7elo/x+3vyPQt5fZxZANNpn+jaIAwWfvUw/HtvWyjWFk8/7sFAAAAAAAAAGSaRlbCrpWGjTzCy0J69bNA2TozfKbpZn6Pw3Wm+ayJCUAFt0lg+kN2vw955fIS9ruUMdpp4Hjb583aOV7QweEHBEwgItZHUn04x7nE9r0T7X9pjtdvTu+LNNA9nA0yqUKP3GSW3+YFOhJxztUsD/adFN3vLC8QFivJ5mc/lWp68LgJeNc7p+ZsrEYh1p7+dxS8dlOS0O0Hqd23dKmbZeZfe2ydWLm5+ssa6nyHZqRikW60bfyW0+4AAAAAAAAAgMzT+PpAyj5WcxOWnjtMi0wm0hWt7XQCzra/GhTor6jHkXpn5GHOSJWWvLrLm+c6TPfFrTf3glZ2IkO1+5Z+bPurWT7LC55kDf2xzUzyDBw3xgsQ1EevMXpoXLifpM7fvVUPjfXnBYJZrp6aHLd+tP+eanrqimCfO73H6Kah/rSzLLifdhdpclz/PNEASFNi2vPB6t9LuE1NllmgVKGrna64434vwNRgnDa+I/bdmXP4cV87AQAAAAAAAADIKC2qHHa8Gdip37pZSSYYdVzqTCY0EqV6xM0o6qnJD45RNzsXB9j6OTazKfY9mL6YTDk9099WwwarAAAAAAAAAAAHU+PLQALQONm+loKl8kzwyASUehM8AgAAAAAAAICMQgAJQHrafUs3xZcbdEvlkRUGAAAAAAAAAJmmmZWwAwAAAAAAAAAAQCpkIAEAAAAAAAAAACCEABIAAAAAAAAAAABCCCABAAAAAAAAAAAghAASAAAAAAAAAAAAQlpUOez4QXH3sn9r5nOf2qnGaeIFR+lng75spwAAAAAAAAAAADLbQc1AagrBI8OcozlXAAAAAAAAAACA5uCgBZCaSvDIRxAJAAAAAAAAAAA0Fy0qKioOagk7AAAAAAAAAAAANC4ttm3bVtW2bVs7CQAAMkEkElFOTo6dAgAAAAAAAGrnoPaBBAAAAAAAAAAAgMaHABIAAAAAAAAAAABCCCABAAAAAAAAAAAghAASAAAAAAAAAAAAQgggAQAAAAAAAAAAIIQAEgAAAAAAAAAAAEIIIAEAAAAAAAAAACCEABIAAAAAAAAAAABCCCABAAAAAAAAAAAghAASAAAAAAAAAAAAQgggAQAAAAAAAAAAIGS/BJCqqqoSDgAAAAAAAAAAAGj8GiyA5AeJ9u3b5w579+4NDf58gkkAAAAAAAAAAACNW4MEkPzAkQkUtWjRQkcccYRatWqlL3/5y+5gxs08s8wPJhFEAgAAAAAAAAAAaJzqFUAKBo4OOeQQHXnkke5w+OGHu9M+M27m+cvNNIEkAAAAAAAAAACAxqnOASQT+DGDCQSZ7CITGDr00EPt0uTMOmZds43Z1t8PAAAAAAAAAAAAGocW27Ztq2rbtq2dTJ+feWQCQS1btrRza+e1117T6tWr3YwkU94uGbP86KOPVvv27dW1a1e3JB4AAEguEokoJyfHTgEAAAAAAAC1U6cMJJMxZAJIJrBT1+CRcfrpp6tz584ps5DMsT755BOVlZXpqaee0ubNm+2S+vpAS244Q2ecERvmvGoXGa/OcebNUXBWfb36gHOcB7w9BsdD5xKdl67q13H9Xz6wyxrY+0t0/RnXa8n7dhoAAAAAAAAAAGScWgeQ/OCRyT5Kp2RdKr179472hVRTEMln1n355ZcbIIhkgi4DNLnLo9qwYYMdHpUujwsiHSivPqnJyyboUXMeP+5hZ6bjVc05o/p1dJs0oA6BKAAAAAAAAAAAgHpkIO3Zs0df+tKX7Jy6a926tZuFVFuvvPKKdu3aZafq4h2VL5Mm9A0Ga3rokhnnq3CNDbz0GKMNG8Y4c/ePHj+OCxYNaq+v2NF0vfrA5SocNF0rQkGnHhrz6ATpvpcbNHsKANC8vb29XAW/vsf59/czOyc5s45Z12wDAAAAAACApqdOGUgm+8gEkBrKqaee6mYW1YZZf+PGjXaqLr6i9oMUCxZZx3/rnlhQJ1DCzis3tyRQKs7MN9k/wWlP4nWri5awM8e5vFBaNlkDzrhev/t1XAk6s/yGJapelO5VvXyfNOGKITrezokKBb+8EndzHjDXY4/pcI8fPf9AWTp73Uv+cn10ebWsrPV2X2Yg0wkAmoU/LijW//1zowp+PbPGIJIXPJrprmu2AQAAjdFGLRwxQgs32clGqR7nuGmhRoxY6OyhHj5apZkjZmrVR854Q+yvHjY+OcI5vh2e3B9n4bV19Bj3rtIOuyTEtEPcsh2rZsa2c4bw9xXe78xVcXt129VfnqR90zhmaNvQPs1gv8OQHVp1b/Bcvenwdsm2BQCg+ahVAMkvM2eCN0cddZSdW3/Z2dnRfZshXeXl9flV8/EacuN0nX/f5dFASMp+g+57TrrRlIhboemDCnX5GS/rbFsyboIK9VBw+/smq/wKr6Tco+OddRMGgCwT7DEZQ85eHt1wj35y4QQ9v7wkuv6rawp1/sA+1YNE77+jN3S+2rez0ykUbmqvFX6JvFfn6PJN071p9xyf1+Q/BwNBhZr89kh3mTm3wsuD/R456759trfsea8ND0rZPwDAAfXTn1yjDh3a20ykxEEkP3hk1jHrmm0AAEDT4wYs9kugArVlgiW3vD1Bsxcs0IIF0zS8+JbqgZh6McGTW7R1wmxn/+YYs913HGPjv38TmLm5yE5YzryxhR01zd3OGW4frqKb/aBL3H5nTZAKx8aCNiZA5+xv+O3etrMnbNUt8YGrRMd0VL5foj7R8zXDMHW1y3ZUbpXypgWWTVT/tnahy5zXWBWusZOuNup/nb++N0zLc2bnDY3bFgCA5qVOGUhmaMgA0pe//OVaZyAZ//rXv+xYHZ0wRPfYAMqKGefredNvUKJsG9/4kRpyghk5Xl/pYqbPthk+XjZTyKDpusQmMvX47nSdv+w5lUQDMCn0OFsTouubLKPzdUGvauGjhMJZReFrCQWhTNDqLj9z6QO9U+0XXedr+nf9C7hE0wc9r+fW+yGtwLITvqJu3hgAIMO1anWkfn7jxKRBpPjgkVnXbAMAAHDAdRmmYFCh3hp6f7VggiU6OUtt3Kmu6p4nlbxf6U41DC94MrG/dwQz3f2sPtLbkWgwxw0o3rxVw/Oc+UHx7dKlu4arRJUfmom4/bbtrrP7Slsrvb3ueONlleRN0zDzfsXRpv9ELbiuv73OGo7pnFXkbaljtr9mmBtcOiHbTsVxs8rGqvDk4c551mDTQt1SPFzTLjkY3zgAAI1HnfpAqkuwpya1zTzaH9zSdTaQVHh54pJztdLlK7FgTa2DLD109ngbsDFZRoMucP7jxy4Kcvf7vMor7LTD7VfJDYqZrKgavL9E10cDTb/Sc3Z2TDd9JXpMGzADADR7yYJIBI8AADhwkpUz87KGFgZKhsWV3wqW9nqyzM6szs14MVVoi2+J7d998Z5kvwmESowFskoa6hwNb1+rAqXH4suYedM1rmekc22B/fnjCwPfQzgjKFy2raYSfObcZjrt4R7ftJPZ973Ovt1z9c4l+4RgMGejypzvZvgZscCGe33usZz1VwXO08208Zc5Q30yys4wGT0TNSDRu4l0fVSml9f00dndTOBnh8peqSHQYyQ9ZqUqnf1kH2cnQ2oOLklZGjprgRZc0t1OJ+K02zNF6jNhwEEJGAIA0JjUKYBk/Pvf/7Zj9VfXfR199NF2rA6S9Ct0/EkNlE+z6Z3Yvt1Sc7XTo69Xxu7V9c9JicrXuUygSSp8pIbyeEm8+ufJen78ozbYdI9GVgsQvaF3ohlTiTKUAADNVaIgEsEjAAAOjHA5s9ma8HZcObPirco2L8jdElwlKnzYD95s1MJgubATtqp6YTCPyQTxyndN0wI3A8PZdlyhOkZLjTn//9BxcUGYoE3hsmbTTo4rh9YA5xhVXKjKb/v7KqpeAs2XdL3wtS24vWPN1xZVpK0n2BJqtw9XSeF8G3iqXrZta7SkW2IlxdJQs66ffbPGuWr3XL3Sa25mzrcrNdYNBC12287P2vGCfcNtW4+SXom12I5V81V4sl/KzbtX0utPaqNWFJaoz1ndo9lAXbukF0rZsWqx0zLD1T30jsEGspx2LokrCdcxuzIQbAu3e9JjfhTRVjn3zTh/u2CQzgSXnG/n5tiy0PPRtqu6pipJt2mFCtcM19BoRhYAAM1XnQJIhxxyiHbu3Gmn6u+9995TixYt7FT62rdvb8fqwC0TN1kDHgjmGn2gJY8UBkrT1UOgZJ0brEmWRZSMPb/LJz2vbiclDh8ZPX78aOLruOFyOVdSMz/I9f4SPXSfOycgULLu1Sc1eVn6ZfQAAJkvPohE8AgAgAPBy9oY/m2/zFcb9f/2cJW8UhYLmgRe0LuZK75NZaEX+236D625hFdQwm2LVJYkGLFxQzh7o2vuBPUpLosFBxryHPtO0AC7vnucNS+rLFGwJtl65piBZeoyQBP6Jr+2mECA4bhsRa/CzbIJLGvbX0PzSvTyGwnDWp687nGZLuEAjJth9Ey2DRqOkh72gyI2iyfa1t79EFK82AavvHJyfuApOS8AZtpkVG0DKG7g0Lk/b48v9ecd2w9iBTOhim4uU3f3ulIEAIM+rJRz1Zpgg5Chfpfc4JLTgn5AML7fpTSY+7f6dwIAQPNU6wCSCfSYoSFLzm3dutUNStWGWb9r1/r8c95DYzas0PRNlwf6DBqg5wau0IYf1zt8JI3vpvLzvf1eft8EPRrtbyhdXnaRNEFn13g65jo26FGFr2OypmuFM39Mkm29fpkma4BZ/9fSyBnnO/+xF8iaco7b7W2vT6gzLi/UhEfvsf0/AQAQE/z5R+1/CgIAAGqnenbFiJuLpDXOfLtGMjsqt0p9sxUrGOaM9/XGQuXm4su7OapvG1N9W6+EWEnh2Nh8k3mirYrUkIVj1Okco30DOdpmqaMdrSbJeu4x1xTa7B4zjFWh08Z+Pz215gY3inRLdH8j3HKAbp9FoVJ5NZe2i7El6+KDhoUrqn1P8Uzmkpct5h/TD7IkOw8TPHKuXxM0O9AXUVpM6T2bPZY8SGUDXIFgYrVAY7IAYJDb75KXneVNm6CfDdK17a+JCwLn4AbwnGdmQ6rW8nn3b7BEIAAAzVmtojZ+8MgEb8znp59+apfU3Y4dO7Rx48bovs2QjrPOOkutWrWyU3V1vIbc5fcZ5A33fCsQ5ukxxpk3xs1GcvsWCgSWwtPefkLb6mw3sOPt19uHEdwutI/AsXxfOfn8tLOhYn0f2SEUsEpwficM0T2BdXuYPqDiglxnB/YZDUS52wWDSV4AK1mgCgCQmeL7PAqWszPLAADA/uAFVKLZFdEhPuOjujbZHeMCTV4wynBLpNWwr+rbxlTfto2yTjaBAVvCLToEXvgnUadzjPYN5LDZJwklWc89Zl+/JGBsmFjX8mVuNpJfUi4wmFKANrjhz0udDVR/wXaLlgtMeB42eGRK3tUpeLTVzQhKdU2xIKF3n1TXUVmpSswlkbzfI+d+rKmvpSDbT1Pi/pUAAGh+6pSBdOihh6ply5b6+OOP7dy6+/vf/+7uL90MJLPe2Wefrc6dO9s5meoDlSx/XhP6EpkBADQu8cEjU7Yuvk8kgkgAAOwPbdT9rD4qeiZW5sstb5ZO2a8u3WXKzi32+4Nxy8WlyW7rl3VL3M9NTNczwhkyXvZQGv0K1eUcAxkrG1cWqqTv2eqeKACRbD1zzDWFWuFn4djsnNqUPAtp211n9w1cg3PVpo+fUD88tdJV3U0GTfQ736FVz/gl1uKzkewyy703AuXi3HKBwUysgI1P2swjt8+rWjDt5WYeJQoQmqBU8NrDfSvF3yfu95JG6Tj3fgrc825fT2vs/WiCWcF7zZm+pbiPzu6WZkjMZJAlu4cAAGiGah1AMkwQ6fDDD3c/Tf9FdbVmzRq3fN1hhx1m5yRmgkbHHHOMunfvrosvvjjzg0fvL9H1pgxdl0fJ7AEANCqJgkemz6P4PpEIIgEAsH+YjJJpJ8dKrt1SPFzT0soY6aphtj8Yt3TZM9JwWx4uEfNyX8W32Bf13rZbbem8sYXShFk1ZD11GabZE7ZGy7ilXD+qdufoyuuoSluirca2SLpe+NpMuT1NmF2P7CDT3880dYyW8LtFRXnT6p7R5Oh6yWxNkP+d2ywhP9ATauuxqjw51geSu53pc8hdZr6HjpqWMEDklckLl/IzQ+qg3443XlaJ8xkqq+gMXtAoRVuYUnS3K3qf3PJ2egEs8wzMPuvl6LmG7q+4fXpl9VJnv/ncDCkAABDVYtu2bVVt29bupxWm/6O9e/fqiy++cEvQmWBS+/bt7dL0lJeXu9u3adPG3d5kIaVbvg4AANQsEokoJyfHTjWMZMGjILPOnc462511TnbWmZJgHQAAgIZgMmxuUSCYkkS662UEt5ycNC2NsoYAAACp1DkDyWQFmcwhkxn0+eef64033kirTySzjlnXbGO2Nfvw+1QCAACN129/d3+NwSMjPhPJbAMAAAAAAICmp04BJMMEfEzWkMkeOvbYY/XlL39Z//d//6d//OMfeuedd7Rr1y43U8kMZtzMM8vMOmZdsw2ZRwAANB3/PSJPX/tql6TBI58fRDLrmm0AAAAAAADQ9NSphJ3PDxDt27cvWtLOBItMltG///1vd9owgSITNDrqqKPUqlWraODIzzwigAQAQMPaHyXsAAAAAAAA0HzUK4Dk84NI5tMEkoLjhp9lZAJGwXECRwAA7B8EkAAAAAAAAFAfdS5hFxQMDpnsoiOOOMIdjjzySHfwp+MzjwAAAAAAAAAAAND4NEgAyTABIT+Q5AeTgoM/318PAAAAAAAAAAAAjVODBZCC/CBR/AAAAAAAAAAAAIDGb78EkAAAAAAAAAAAANB0EUACAAAAAAAAAABACAEkAAAAAAAAAAAAhLTYtm1bVdu2be0kDpYdO3Zo165d2rdvn50DND6HHHKIWrVqpTZt2tg5ABqrSCSinJwcOwUAAAAAAADUDgGkRsAEjw499FBlZ2frsMMOs3OB5N77NGLHDo49H/2HIBLQyBFAAgAAAAAAQH1Qwq4RMJlHBI/QlJh7FgAAAAAAAACQuQggNQKmbB3BIzQllFoEAAAAAAAAgMxGAAkAAAAAAAAAAAAhBJAAAAAAAAAAAAAQQgAJAAAAAAAAAAAAIQSQgGbi0z279HLFP/SnjYvdwYybeQAAAAAAAAAAxGuxbdu2qrZt29pJHAzl5eXq1q2bnQJSe+/TiB1LzQSJHix7XMu3v2TnhA3scI6u7v49HXVYKzsntR1vf6D27dvbKQCNUSQSUU5Ojp0CAAAAAAAAaocMJCCDbd1Zrmv/lp80eGSYZWYdsy4AAAAAAAAAAAYBJCBDmcyjX62drcpdH9o5yZl1zLqUtAMAAAAAAAAAGASQgAz14GuPpRU8Gti+r37zzV+465ptAAAAAAAAAAAggARkIJNJtLx8jZ1KzgSPrv36D91x0weS2YYsJAAAAAAAAAAAAaQmp1T5OV2Uv9ZOWpHHR6tjgvmprC/ooo4FpdXG66sh91VfjelcDpQNH2yyY8n5wSPT99HU1fdEA0fpbAsAAAAAAAAAyGwEkDLB2gL1mSQVrN6kqb3tvKQSB6CQWUxQyDjqsCPd8nQmWBSULHhk+NsCAAAAAAAAAJovAkhNXcVjGpU3T6OL52p4OzuvFnpN2aStU3raKWSaT/d8pjc/2e4Gi/wgUk3BIwAAAAAAAAAADAJITVqp8vtNlWasrp55tLbALWnnD6MejzgzIyoalae5ztjcPC8LqXp5t83OOv52BVpv5zY097jR8xutogq7wD3vwHFNgMxfbpflB7b1rsvnZVf5y8JZVsHrChwvQ3Vs3d6OSb959WEt3/6SGzS6tseVKYNHwW0BAAAAAAAAAM0TAaQmywaDxhRr/vey7DxfqfLztrgl7ba+6QzFV2nFpHlarywNn1+s0c4ao4uTlLubM1VbxnnbPTFmni4d9ZhzpAa2tkCXbsxXiTk39zgrNeWhdPsomqfNnVcHrmuqDQZ57bF5hl22Ol+b8wKBojnPSvmB403dD9fViJxxfBc75vGDSAM7nJMy8yh+WwAAAAAAAABA80MAqYmam9dPU5SrAXPyEvRn1FNT34yVtIts2+KNpCM3X6NtYKnXyHwNWPmsXmjobJ3eU7R1/mXywl4RbdrojqTpKo33A2btO2mANyZVrNSylbkadK5d1u4yzQ+0gcaMi4536JzrjWSwow5rVa3fIxNE+tPGxTUGj8w2ZlsAAAAAAAAAQPNGAKmpys1Xyfy5mj4jV3ODmTYuk40TK+U2eYmdnY6unW1gx9Guszrb0RC3rFxs/9UDWCmEtp+qZXZ2vZRv0Qp1Upc69AOVqa4+/TJlt2prpzx/2vhM0uCRWddsAwAAAAAAAAAAAaQmavQ4L4Mn63tzq5dkWztPU1ZepSdsibj54zrZBbVUsVmb7WiIm93j7dsMCUvh1WD9Q1O1Ykyx3X6uxne1C+rDzUbaok0Z3rdRbZhMol/0vqZaECkRs45Zl+wjAAAAAAAAAIBBACkD9JpSrNErp6pPQbAfIT+YElHRrHnunLTMmRXNZnIDPbkX6rz9kdWzcbMX8Kp4TPfNced43EDQPC21WU2RF5/VCm+0Zu1yNSh3pZa96IfRSpVfl+yoDNOxdXv95pu/qFbOLsgsM+uYdQEAAAAAAAAAMAggZYSemlp8lTQnTx1NEKn3VSrIXakp/bwScRqXH8jO6anBY0wfSl006vFozlLMmAulqV55uUvnXKUnon0V1YE5n2ipOjN4pfa8vpWmqo+Z55ze+Bm5sYBSu8s03p6f2WayLtRod2epZGn4/GJ1ntTPHitPm2esrnV2VCYyWUXXfv2H+tPgu532GKvvdx3qDmbczDPLyDwCAAAAAAAAAAS12LZtW1XbtqlLXGH/KS8vV7du3ewUkNp7nyYI/h1AO97+QO3bk7EENGaRSEQ5OTl2qjHYqd/e8JmmZx+h0knHKtvOPSBe+1BfKT1c71zR2s5wmHkPSYvuOk5nujPs+bnjVl3PNdHxGkDlc5X6hb6suRc09qB/hZbfdJseqZAGjrtfV/Sys12leuTqOVqudrrijov07k1mvKcmPzhG+/+/hPxj+8eLn64vs79X1Nvu643Z12j62kRt0Fz57V1d7dvI3GMPSONv1cA0MuUji2/TjX+uUOfv3qqbhrbLoO8m3WctRTtV/EV33LRIm9tdpF/f8a26/3isGYu/x5CO5H8T6t2O6+do5KzSJvt9+H+jEjqgz2n437WmLfb30gjfG8F70f+b6c9L878R/L+j7kTc3117P7pq8/2Z7UrO0kNje9oZtVSH58D8LXtAP+bvGAAghAwkAACQudxA0R47EbOu1JmXfYg6mInKjzU6PnhkVH6unjdUamGlnU5HkuPVlwke9VxWZacau3b6f329Fw/LS4LldR3rX/Fe0PS+SAPb9dQVD96vhw7ai6mGPH71F6Hdxpp9EzxKx/JZ1+iOxfatXkrhl4DpyBp6q/tdZN4LsXSfNXcO0GRs/vNttfibgP0jeYAvE2wuf8+OOfy/lw2mQu++a0cdb8T/fU5HMOh0gPiBcAAA4hFAAgAAB4HJ+HlfX7nhQy18rtL5NOPO8MhOu9wLmph5o5/72K5rhmBAJ7aPdXaOG8Bxt9kVDua8+llgvV16+z1pSM+WynbGFz70uZaY2T2O1Dt3nWCHo3Wfm3pUpfHL/HOq2/HWPeKd+2+d6xjtbh+3DzeA5cyb8bH8S/Ov/bevhYNHS5b9K7ReY5XVu486m5G1r+gNd47Hf4kysI/5Na15OXWNRl49J7aOeWHizrPDTX/xStwa5te9cfPMyw6z3iPr7YzoPmNDbFm88PH9fcUP/vbVlkfPI/iSrVTT7TbmF+TB7V1x1xd6QRq4vuV222rrVLu+27S8yb3rMb/m9oJr7jDO+2X15j8/ELoWv/28wb9HgsGjCj1yU6x9kn8/sWXJX0g33XZN71lz1HTvxUv5rJnvwbbT4th+vX36y8wQeLYNf7+h9X3B7bwh+bPbhKX6HoJtNLs0rt2NTPgbEC/J34TgC/64e8e0TViwXeLuuybMZBZG28Uf3OwV/3kJXmvgubT3RPjvYvheif4btTjYtv7+Ev+7llHPaeBvZp0CPCnEgvqlWpssmyzZ38Rg8Git+ZsR/F5qav9Uz0H1788/prlX/OCRCeBG//7H/3dZwv0CADIdASQAAHAQ7dH4YGbNq5+5QZOgJcs+D2QHVWn8jEDwpS4qd2tJpXTGia2i4265ulDJuVYaNulITTajr/6ndllISUx3rsMNVLn26KImEAiqs3bf0qVuP4SlesJ/IVLxFz3hvkTpqd6JsnLMi5T4X9tWLNKN1V4UJmNejFT/tfTyWQ3wsmP9nOq/ynXO7YH4l781Cb4QstyXNPHX5+z3kcDLplhgJdH1mSBKE3+Z02uMJrv3SuwX29XLN5VqeiCYUU29vp8m3q7pPGvp3nu15rTTn2P7MPfqHaHsMOd7849hnu9oeSePOQf/5ecbs6tnlS2flQnBkYCU30Opc98F2mjtHD2wxo67MvRvQCoJ7h33pXr0b0J8uzj3Xfy/JRnHzz4s1Vo/gFBRqpfM89Kuj/6fsygYEPAkvleW/znYtoFnNoHMeE7baWDvYNs5//aUm9k9GyZb09+PH6CqqNBbzkfn3j29YL8vxd/E6lL9N07q5yDR9xf/442QRP9dluIeAQBkJgJIAADgoJo80sv6WdTDm97w3i5vxGeCO6GsoD1aFRdkSuj04/TOyMO8cTe7yPZ3VLlPS9RCHcy+3HHHiYcm6Ouotfrbc0pLsuP5ohlONjBV+YVWpRFByr4gW6WDWrjjQwYdrXcOdB9SddTNZj5sXlPqvuSLrC3xXpT0PqvmknG9x4R/bZ127f92GnhHYLsHb9UV7sug9/RuGi+3/DJnZvACGo52F2mweQHfK+6cQr+QN6XwxmigO8f7JX31snUVWv6k98Il9otyu83aRXEvb0zfCWa5f/7hUjjVfq1/0EoANpzj23tv7d5yvyj7a23TT4S9xl9/11lesUhL15vv2G8Xr53csnQ1fj/parrtWvOzVpt7r/ZM3xpmn34QcLPs92a/A5VXhM4p/hyWPxkMDPr3vj+k18dV05D6e4gsdj7NdPTeH6NTKhJ9QZn2N6DUzXCJZjjYF9Z+9twbNsDh32vRv43u3wTnc/0i+1I81i7Rv+FNnCnvGc7+iGWM+NmHfqaL/4x17tvTzVD6xxqzXuBecZ/JQKDZ5/+bG3pma/p3rek/pyf18drO/TfHBt469z1LJ3mL66mDervBPe+/PbzvpZ3O6eMWTY6q8W+i+TfN/z7c78c84yn+GyeN58AvresPwR9vmP8Gcv+tdbjPmsl0a/ct3RRY/6E7LvKCYPbvOgCg+SCABAAADqLD1P90b+zMnjb4EmfyED9g0krDhnjrVAsy1ULle3ul7MPV/4BGYVrovkF+hlNrjXADQlXanrEpSI5eZ3kvnypK9I8K/2VWO13xXftSJF67njrHvLtwy7WEX5TVlleap/ovbdMRy37pqcnxHV2bX+Oac6vtr9ujvwy3ASlXTw12X9bEBYjsr8dNW/n923hivziPvmytKSunqbK/1jYvh2+01+n/it4LMNWgTt9PBrRrTc9abe69Wmunc9xf8seCgN7La8dJHUK/tv+g3PvuYi/E7S/lK7brA+fDC4KZDInY8ozKqknje/DbaOAl/t8df7mvmfwNcJiX6l7Awi//1VOXRvswa+e0kffviPmbEHnXCxR3/u5F0UCaH1TNaNF/M02mSyxg5LWT/8OJwL1i/y7GB9ajZS79vyM1yJjn9KR2OsX5MEH3N2wg55STTnQXNYTj3X1V6KW1pfZ7OVEnxUWnUv1NrEmi/8apzXPglzYMZ/rWxJbGi88EBAA0GwSQmpxS5ed0UcdEw6jHDtz/B6LiMY3KGa2iwH+0AACwP9QmWOQGh2q0S6tKq2IZR9mHaIj5dLarHsvZqVWv2tEkUh/PV7tg0faKQFm/Jqunettft77050Wh0jqJ+b+u9X/1LK+8Uw0vSP0XMB5TvsV7qfLud81+/F/nps+8VPFeqJhfWAd+1W9KT5mXJ39u5/0a1/9lcLraeS+r6svPkor+qtgNsjT9Ml/+93jKSTV/YUkziur5/TT9dq3hWWugey/8rDUk+6LbZpH5v4D3X3w32f5V4vE3oAaBLBk7xLJdTjTv+putWHZKbHCzLl2BMnaLbYAyVYav4TzLdX5nkDHPqf2bWVGiJ2zgLWFp3bqywbjNa+zf43S+l6hkWdP1/28cv9+rpSfF/R1Jxv9Rxk0VGmzuPz8DCQDQ7BBAaqJGF2/S1jfjhvmXhX8lm2HWF3RRx4Ja/uIXANDkLSndbYM7u7RwyR53zO2/KGqv3nZXsMGhGu1xAzmTe9psoOyWGmIiSZWfq+cjO715LudYMz7z+l6qlq1Um+PFTC/1979TC9x+n2wZPV/lPm13R1IHrpqKaGmttaWB0jqpmNI5gZcjblaFt8QV/XWu/8t0K67/h+QvYZII9KMzcFy4JI/fwbb/S23/l77psy9B/ZJLrlItdY/Xrtovk1Pxy9B4L/HMr5xrc6GNTEVcfz3+i/ZACbvokKScYf2/H09Tbtfkz1o97r1kz1od+BlK1V+Ih5+1aClJGwSMdUTf1KX+Hvw2ipX185dXl1F/A2rktE178xksvRYrB2iCzlk2c2TznxfF+oKxyzNdtIydLfMXzSaKBt6qB+fc0mTuOnWXCc+p97xVaLP7CHbQ8e7chuI/76asp/P3uH317KZ0/yZGpfhvnNTPQSybzwuWVXh9P9UgXBbR8e52dxoA0PwQQELdtLtM89+cq+GJ/uMGjdKirct17d9+pYsWX6PvL/2ZfvHSPVpeHuqZFwAaJxPcueF9feWGf2m8G7jxy94dZgMwVRo/I7g8gVc/c5ZXauEbe7UhFLhppWEjj/CykNx1zH6C+2qh+0b6JfTqcLzg8uj+4wJT2YfqDDOtPboouDyBJcv+5Sz/UOvsdKPX66LAL2SDJYgS8LNIooMtz1Itk8IvyVO9M2mX+4v8GpYn4QchjGC/E8Eyev78cMfkQd65VS+9Fyu5VK1cTe+L0uw/wpaQCQzeecTKiDUNgZJKZrAlcWJld/xfhsdK2HlDfJaFLaMU6Mw79feTSIa0a9JnrQ73XrrPWi1EX3QHni13cL8//5f1gSGuH5ymws2aDF6HM3h/D1J/D1lDnU8zHfgb9pbzXcRkyt+A2unm/G0w906sbf1/G/w+6vx7379f61a+tDGq9ry4Q6BsXLtv6dJoFkkwiyZBuUM71D5byP937bWMeU4N/2+SUfOPW6q3Yeg7SMhvf2880fNZ89/EALesr/Pvn1/XLtl/46T9HKR+TrxnbU40WBR99mpbvhcAkDEIIGWciIpGhcvZRR4frY45BVpvl416/LFYGby4snfeugmWrS1wpguUb/ZtStdtCJSwM8uc/RcFts1fazOGAtNRbvk7f1mgDJ4ti5dfEDgHm3FkzuvSOc7InDyykGrp0z273MDRg2WPy/zefWD7vroo5zz964tdeu2Df3orAUAjNmTQEZpsx92AzqTjdKY7Hgj+uA7TopFx/SidfqTuiwaLpJ3//EJLdKhODsxT9rGae9eRgWNY2Ueo9K5sDYuuW/vjBU12riO07SQ/MNVaPw3uxznuIrePpJjsC75U/fyahMBLlFQlXIKdRkcF+yHqqSuCy9tdpMnRMjqOdt/Sj4PTpuNpd/36/zq/29hYWT1zTVfcYbOj3L4njPi+ShJIcH1uR9VJsmqqC3ZqHhOfLdUUmV9gx8oyedkV8aV1YtcZexHvS/391CRT2rWGZ63W916KZ60uTGfs8eWPTJaZew6mfGX1ckzmHGOlzDJAyu/BafdAG5llP+5rJ1yZ+zegRonuHfP3Pfpvg7l/4v4GJOtrLwNF+7mJe+5NllCs1Jynds9U/L9rx2fWc+r3IeXcL/sjABsNUCUr3Vvj30RH6EcBjuNT/TdOqueg+t/1X9vj+xlk0SC21Wboj0PnYP6tdv9tjs8MBwBkvBbbtm2ratu2rZ3EwVBeXq5u3dKtimv6QMrTXDsVZMraTXX/P9smUNRPy4as1vxzV2pUv2c1aLXJFvLmT1mZq4LgdNdibZ3i/MeECQTlSU+8OUXmvwFNAOhSBZfNix3DBHv8/ZZ7ywbMcI73vSw32NNn0srouu5+NuarxC2x552//P0Ej+nuc6pWjLHHtNOdg/vxz6eZe+/T9CtX//Rvv1Jk14e69us/VN92Pezc+tnx9gdq396tJwGgkYpEIsrJybFTTVPlc5XquaxKQwYdrbkXBEvWNS3rHnlfF70qTR55gn7qZk4BABDHZGG6v/A3Zce8PthMnyWmXzbz4jajgmkAAABoMshAaqIS9YHkBY+MLA3Pz5cm9VPHflOlGfnhUnNjxtlpZ71xV0lzlslksq9/3gSBrnKDR0avkfkaYJd5rtLg6DHiXaXx3/N+A5Z1aid32l+3Q+dcb8RYu0xzc/M12t9P76tUkDtPS6MZSrkqGGkDRO06h3+Rg1ozZeu27SxPGTza6qxjStqZbCUAAAAAB9hJHez/3ydWMssEj0xAqUE7+AcAAABqgQBSpmp3mcaPcT5z8zXdBnZ8AzoHMkfad9IAdySiTRulFSbo5JePM9lA2qJNDZieHNm2RVo5VX38Y+SYjChp87b0M2qQvue3r9GprdunDB5NXX2PNny40f0kiAQAAAAcYKakVVyJO7cM1R1eNhIAAABwMBBAylRrC3TpnFwN0FRNfjwcnFmxudyOOcq3aIU7kqUuXeWWoQtnNplSd+4KDcLNTsrNV0noGJvc0ndoeCb76IzjnC82CT945AeN4qcB4GDKviBb79x1QpMuX2ececUJ7nVQvg4AUCPTT9KD9weGDO/fCAAAAI0eAaSMVKp8t7+iuZrvlrKbqqJgFlG0LF1ERbPmSWMGuWXrep1/lVZMmhctWWf6MuqYUxAoYdcAeg/S6JVTNdcvWWf6OcrpovxoCTs0hN/87//oosXXuONPb33BHV++/SV3Oqhj6/b60+C7NbC910PvoqH36zff/IWOOqxpv6wFAAAAAAAAANQPAaQmam6eXwIuOJhgT6nyc/I0d0yx1ydSu8s0fYY0pV8sEDRgjHSfu34/TVG+SqbYUgm9p6hkxhZdavfXZ5JUsHpKtE+khtFTU1fna7N//m4fTasD/TclZwJcmpOnjqMeEwXvamb6PPKDQsa1Pa7UwA7n2Knqtuwsd0vdAQAAAAAAAABgtNi2bVtV27Zt7SQOhvLycnXrdiAqW0dUNKqflg1ZTcm4Ju69T9MLoZlMpNOP61pj8GhNxavKXzs7ZZApaMfbH6h9ewJOQGMWiUSUk5NjpwAAAAAAAIDaIQMJyGBuJlKK4NFv/vdhN/so3eARAAAAAAAAACDzkYHUCJCBhNpKNwMpyGQjvbmzXH3b9VDlZx/pzU+2a6stXXdnv+tr1e8RGUhA49cYMpDuXvZvzXzuUzuF+pp4wVH62aAv2ykAAAAAAID9iwykZiVLw+dvInjUTJlSdkcddqQWvfmClm9/SVXOPFO27rff/EWtgkcAkA6CRw3PtKdpVwAAAAAAgAOBDKRG4MBlICFT1CUDqSGRgQQ0fgczA4ng0f5FJhIAAAAAADgQCCA1AgSQUFsEkACkYgJIRx11lJ0CAAAAAAAAaocAUiNAAAm1RQAJQCqNoQ8kAAAAAAAANF30gQQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSI3AIYccoj179tgpoPEz9ywAAAAAAAAAIHPxFrgRaNWqlSorKwkiockw9ywAAAAAAAAAIHO12LZtW1Xbtm3tJA6WHTt2aNeuXdq3b5+dAzQ+JvPIBI/atGlj5wBorCKRiHJycuwUAAAAAAAAUDsEkNAs/PZ3D2jT5i366U9+rC6dO9m5AJC5CCABAAAAAACgPihhBwAAAAAAAAAAgBACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSAAAAAAAAAAAAAghgAQAAAAAAAAAAIAQAkgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAgTTu06t4RGjFihBZusrMOoo1PNo7zOPg2auGIhc7/AgAAAAAANBwCSGgWyt951/18x34CAJoyL5B1S7GdbM4+WqWZI25RkZ0EAAAAAABoKASQ0Cy0bXusHQMAAAAAAAAAAKm02LZtW1Xbtm3tJJCZfvu7B7Rp8xb99Cc/VpfOnexcAMhckUhEOTk5dqqhmMyfsSpcIw2/fYGGdXHmrJqpsYUlUt8Jmv3tSo292c+F6aMJsyaqv/1PDFNuzs0Yypumabollj1ktruuv9q4E6YUm5dN4+/fCB3juiytqJZxM1zTFgxTVzvlC2437ayXdYsZdyVYf9NCjYieu5Hs/IdreHGRd3znWhZc4uzFZAGNK5S/dyN4/v62fSbM1tD3x0av3UxP7N8mtu/AvJhYm7iC7VXtnB3+OTmC+42/ntg599HwPKmo2Jy9v07cMR3B6wEAAAAAAM0DGUgAAKD+1hQGgkdGiQrHJeiXpzgQPDLMdveu0g47uV84x4gFj4wi3RLsMyhRICbp+dvgkaPPCdkJg0dG0c0zteojO2GVFMaCR0ZJ4XzNjCvFZ9aJ9uuUqDydaa+U/R0lKvFnridRn1ElNnhkdFRWguCRUXQz/U0BAAAAANDcEEACAAANwmSpLFjgDLcPt3OKVFYt6GCyXOLWW/OyyuKCLcl11bAFszWhrzflHbN69lG86LnNmuCcgRE7t40bvHBJ9XW2KlLtvGLnbzKFdrzxshs8MplD7rYLpsm7qhJVfuiOBJjMp/A6JWvi5zlHrfTCaRtXeoGp2L4XaFqeWVKkxaucdboMC5yr3Y/JPtq0ws0Sc7OV7HZ+Wxc9kyBYZ7KW3PWcdvwo4ly14Z+Xf0xn2w01h60AAAAAAEBmIYAEAADqr+8EDfBLnHXpHg2GVJM3NFZGrcsAGwgq0ctv7MccpOC5te2voXEBka6XeIGSYV28zJ0RCTKKooLn72jTf6K7rV+KbkSC7J2ovO420NVV3e05JJzn2qgym0FkspJGjDD7jmUVlbxf6Y0k4AfEvGwlb7tohtWaSoW37KMJuYHwW9ssdXRHTJaW2XahZNvHL40HAAAAAACaBwJIAADggHHLvkW1UdbJdnR/OjnL9rHkyT7By9nxmb6SvACN179TrZjydzZIEy4ZV1342j2J5qXl7Ugdy/4lyqoK6qph0Qwyww8kjdBMk/UEAAAAAACaDQJIAADggAlnzuxQ5G07uj/FBVsq3w/mF23UCr9/JL+UW7QsXCo7tOoZm9kTLRcXK0XXUKKl9YLDdf1DQbGEoqXpgsPEUAZVQqY0nr9+oC1KClek6HsJAAAAAABkEgJIAADgwClerFV+BozfV4/66Oxu4XCI3w9QKMCTQGy9Gqwp1Aq/L6aPVmmxzRQafkZXZ9rv8ydWys3v1yi1SlXajKXh37YBnU1lyUvY1UqspF2s3yJbYi9hNlAss6jrGTaEFWjraJbVvQn6QAqKZlQt9IJFbftrYo19QgEAAAAAgExFAAkAABxAJSoc5wVBov3yRPsVigVNYv3+1NCnkMNbb2YsKJVE0c32mH7/Rn6/SNE+f2LnNbaGgFVYtrLdPpwC+/evqQF0zbWBm2hfRn6JveEa2j8+/8ievwkQBfqWir+maKArmei2sdJ1sTY7W91TZS8BAAAAAICMQQAJAAAcOHnTNM0GiVymzNolXuaP0fWS2TaAYfWdoGkT4gvKtVH/K9MtM+eI34cpNxctAddVw0Il6/powqwF9hxL9PIbNeXrOOdxXbhknSk3N9seq2hDPQu+hbJ/fMM1bcEw56wtZ51Ridrnurh2dJhzG2aCZjVKvK37PaVTNg8AAAAAAGSMFtu2batq25afkyKz/fZ3D2jT5i366U9+rC6dO9m5AJC5IpGIcnJy7NTBt/HJEbrFlI6LCxjtT6Zsm5t5EwoYAQAAAAAAIB1kIAEAAAAAAAAAACCEABIAAAAAAAAAAABCKGGHZoESdgCam8ZWwg4AAAAAAABNCwEkZJyFTy3WCyv/Zqdqdl7uNzXs4qF2CgAyBwEkAAAAAAAA1Acl7JBxTECoV88edio5sw7BIwAAAAAAAAAAqiOAhIz0w8u/r46nnmKnqjPLzDoAAAAAAAAAAKA6AkjISIcccohGXvkDtT22jZ0TY+aZZWYdAAAAAAAAAABQHW/QkbESBYpatGiRNLAEAAAAAAAAAAA8BJCQ0eJL1Y284r9rLG0HAAAAAAAAAACkFtu2batq27atnQQy0wsr/+Z+npf7TfcTADJdJBJRTk6OnQIAAAAAAABqhwASAAAZiAASAAAAAAAA6oMAUiOxY8cO7dq1S/v27bNzgMbH9CfVqlUrtWmTGX1I8dyhKajrc0cACQAAAAAAAPVBAKkRMC+xDz30UGVnZ+uwww6zc4Hk3vs0YscOjj0f/afJB5F47lBbTe25I4AEAAAAAACA+jjEfuIgMhkQvMRGU2Lu2aaO5w5NTSY8dwAAAAAAAGg6CCA1AqZ8Fi+x0ZRkQsk3njs0NZRaBAAAAAAAwIFEAAkAAAAAAAAAAAAhBJAAAAAAAAAAAAAQQgAJAAAAAAAAAAAAIQSQgGbi0z279HLFP/SnjYvdwYybeQAAAAAAAAAAxGuxbdu2qrZt29pJHAzl5eXq1q2bnQJSe+/TiB1LzQSJHix7XMu3v2TnhA3scI6u7v49HXVYKzsntR1vf6D27dvbqaaJ5w61VZvnbn+o7XMXiUSUk5NjpwAAAAAAAIDaIQMJyGBbd5br2r/lJw0eGWaZWcesCwAAAAAAAACAQQAJyFAm8+hXa2ercteHdk5yZh2zLiXtAAAAAAAAAAAGASQgQz342mNpBY8Gtu+r33zzF+66ZhsADYe+xwAAAAAAANBUEUACMpB5Qb28fI2dSs4Ej679+g/dcdMHktmGl9tA/Znn6DevPqzvL/2ZfrX2fv1p4zPuYMbNPLOMZw0AAAAAAACNGQGkJqtU+Tld1DE6FGi9XdL4RVQ0qotGPV6/DunXFzjXXVDqjDXM/hpS7NwOjg0fbLJjyfnBI9P30dTV90RfZqezLRpK+vfu/rinDvZ9mqnoewwAAAAAAACZgABSU1TxmEbl5GnzjNXa+uYmdyiZsUWXNqogkhfgyl9rJxuCe92jVVRhp5uIyOOj1XHUYzqQ4S3/pfRRhx3plqczwaKgZMEjgxfaQN2ZZ4m+xwAAAAAAAJAJCCA1QesfmqoVY4o1/3tZdo6U9b18FeTO032NKAtnf+s1ZZO2Tulpp5DIp3s+05ufbHeDRX4QqabgEYD6oe8xAAAAAAAAZAoCSE1OqZbOkUafHx84ydLw+ZtCQSU388UvcRfIgPHKVhUESuD5WT0JymmtLQiVx3O39bcLlL4y80c5+xzlHmu+HhyVp7nO/Ll5aWQhuccIrGen/cE7n1Ll95uqFVqpKf28861efmtl7JqSZvx415jvnKu7nt0+WVv560eXRY9Xva0SlgNzrqXPpJXOqU1VH7vfUBvup8ykjq3b2zG5fa245bK+/kNd2+PKlMGj4Law/Oy3x2P3pvnug/dN6Llx1/e/57isueCyUfO0xc72JXvGkqv7cxtT8/3sjT8WeBbMvoNlNMPZj8mfp8xmnif6HgMAAAAAAECmIIDU1FRs1mblqlOqd/xu4KKTnnBL3K1WgaaqT/DF8Zwt6rTaK3/3xJiVmjLVvOTN0nlDcrViycroC9/1z8+TxgxSL2fcvBS+dGO+Svx9bswLvXBeMUcab5bNH6Wr5xdrtDNvdPEmTe3tLU/IvOjOk3ue3nqlys/bogJ7bluLr9KKSfO0Xj01dXW+BjjXXrB6roa3c7cOWTHpWXtNCa43ztyNnbzrMBlMobZy2qNrbNvI41M1pWuxdy72mmtVlq/3FJXMyJVynXabf5mynGPF2tA71uTgi/8GcsbxXeyYxw8iDexwTsrMo/ht4XOek82DvHvBvS/7abLy3WnzHXv3qeEFOzs79763TJrSzw+wRFQ0darkl58c59yLK90FrlTPWGK1eW6d53JOOvtMYM6zUr49r9x5ujRnmQb7+1Qg+7GG5ynT0fcYAAAAAAAAMgkBpAwVfIHsZieNu0qasyyWJTBmXDQI06FzrjfiyDr3Qg1Y+axesBlJmzbmqmCkyXaK6IUlKzV63GXO3gxvn8GX1rHjpWnJVBs8mhLYrqemvhkLEEW2xedn1CB6TQmuN86AIbn2Ory2GjDjqug59BqZrwHBbefMshkkXpZXjQGxdKycqrk2CGXK8AWzxhqKyWrwS9b5TBDpTxsX1xg8MtuYbZGI/yw42ndyg5mDzrVPw6md3E/X2mWaq6s02N4nWd8b5wZYlprvvGKllq2MbafeV6kg+vil8Ywlkeq5jd3vPTXaBLvS2Gc1geerS1cz7T/v7dUp9ick9fOUwfz+w+h7DAAAAAAAAJmAAFJT066zOmulttT4rtG8QJYGdK5DKbJ2l2n8mJVa9mLEe9mtC3We+9K4XFtWeiXpoqWp8uZJK7dou7th7a1w9jegWr9N4ZJxk5fY2WkIXa/7gj8dXluZbJLodbml8rZoU4V5+T/XZpD45xRXjqy2ek9xs1eC7VirjKZauPr0y5Tdqq2d8vxp4zNJg0dmXbMN6scNeuZ2Ugc7HVK+xbm3OqmLDZBGgzGudJ+xYOk4W3YuxXPb+dSGD1ImVvPz1FzQ91g9mKxU/75OxS8HuT8y3PbnvgE0PZsWasSIhXL+iWvENmrhiBFaWIeE1o1PjtCIJ+t5dcE22o/ttWPVTGffzvma4d5V2mHnV/PRKs0cMVOrPrLT1XjtZfYzc1XSvexn6X5nO7Tq3oY+z/2xTwAAAGQiAkhNTk8NHiPNfb76S61YnyXeS+kVm+v2i/Ze53tZD+tffFaKZi54WQamJJ1beis6BLOHamfAjHzNz8+XJk2NBWXWztOUlVdFy1/NHxfI7KgN90V9Ory2GuCXFIsOsSwoE0Ty58fK/dWDCSLZ/ZnSZ3Pzwv3HNBSTSfSL3tdUCyIlYtYx65J9VH9uNlKywKob2AwGU7yAiyfdZ8xk6cWW+xlsNT23m7fV646thdTPUyaj7zEAaI7qHrhBLW1aqLGF0oRZC7RgwQJNO7lQY+sY+NqxarGK+k7QbGc/E7uVpQg2AQAAAM0XAaQmyCsJFdePielbZ06sxJZ5mRwr4RZR0axgSbsUeg/S6JVTdemkThofLa/m9bMyd1YseOIGrOrbQX67yzTdZPiEgjL+C3Z73mmK9UFTu+t1X7xHt3W2fny0OuZ4QZ1YUM7jlvvr2tlpDRuki5YCK9XSOe5Ijdx9B9rMDTYky1ZpAOaldKJSWkFmmVmHF9gNxDw/fsk6R+TxWbGSdu1yNSh3paY8ZO8pt6SdN1rvZ6yG5zZ4n86dFCxp56vb/ZxITc9TpqPvMQBAU9T1kgVacInzHwINpcswLVgwTA24R9fGDUXqM2GU+tvfRtX7vE/OUhs7CgAAACAxAkhNUbvLNP/NYnUOloly+xIK/Mq/9xSVzNiiS93l/TRF+SqZYvtvScnLcooPwJhMHLdDfHvMS+dcpSfm+/21xLOZUnmpSxG5fcSsnKrJZj23T5iVtmTcVGlcfixjw3/57ixLVPZtwIxOWlqX6w21VRf1mSQVrPayPnpNWa2CjXnufG9ZJz1h9+sG8pzz9tpjmTrNCHQEE+D1T2PWK9D2uDbsmLdFBfnJ2rBhmKwik/3wp8F3a2rvsfp+16HuYMbNPLOMzKOG1FNTV+drsy1FF7yfTKBm+PxijZ5j76l+W9TZPGtW7Z6xeOk8t3maO6Y4Yb9b6d7PKdXwPGU68xzFB2szru+xaIm5xwKlFEeraK0t+eYO4YChF0T0l8X//Q6WLXX2s83ODnADqf726QZUbQm64L8//nlEj++XqfOHGsrVVds2VGrPlpV0zq0ocK6jHi8NlWQNXXfcsRP9m+aLHttp8+g2oXYIl36NlVr15xeoKPgdONcZbNPwv9Fx+6KEH5q8WJkyM8SyhGz5ricXamZ0ebjkmlvWzd9ug51ZjdnPLSpyxopuDuzfLZ/mb586syV4rGApOa+03MLANcTtyy0V529XZmcm4pcrWxXbV6D8W6yEXc3rGWmVkAuUsEt5DWm31UaVFUsdsxOHfMxxot9n8LzeiLWRX67NXMPYwhKp+BZn/u/0u3GFKnH+r3BckuPb61kVuHbzXQe/t2AGWo1tVON3lux+rUGCUn2x79OdSr3PavsIZtUluSeC31voGutwDQAAAGjctm3bVrVz506Ggzi8/vrrVUBtvPvvyoM6mHs20b3clAaeO9RWomchfti0Y1vVkEWjq3ovuDStwaxrtkm0r/ihts/dli1b7Jk3oFfurDq1Y+eqU39UXFUZnO54Z9W64PSd681UVeVjV7vTv3rFnYwu/9Fj7tZxyyurFvzI7Cu2fN2dZvrqqgXvman1Vb8K7LvqveKqHwWnQ/x92fPyp/3zrrat3XeS5TVfh902/jyj6/vT9lwS7tvftjr/2MnOxWsj/zqD1xFrz8Tt60/7x45rM3ue/ncBND0fV714z/Cqu1/82Jv88MWqu4ffXfXih2bCWzZ8+JNV/3QX2uknvKmqjU9WXxadjvfPqieHD696cqOdjJv++MW7a9jWLr/nReco7lTonP/5hDmuf852OrpuouMEzyPIv4a467fX6+7XHa95vXC7BLdzBJcFxmtzDfH7DzPrOvvZaL5Hs8/gfvzjBLZ1v+/AOnY61F6hZbFzrMY9r9j3Et/WoWsKXUNc+8Vdb3g/3ro13a/RZSHxy4LHiFvmnluCfVa7/ur7iF2Ttyxxu8YdL1W7AgAAoEkgAwkAgAZiMomaRd9jbilPh9uvl8PPfPOnXRG9sGSllJuv0aaEo+FmmfrlEu1yv8SjydAbd5UZsWwpxdwLdZ6bXWuz7KLlWWvilW+UX07Slov0yzdGXnxWKxQr+2r2Pdpk3a18Vi9E+yirrU7q4p6n1/dY7Lr8aY93bGn0+d6x3SxVOef3Ys25VdH1TelTh9+3Wa8ppp+xq7TJzRzK01x3blCuOrkVSr1SlWZ60LmmFfxpyy+p6X+XbtZvsLQl0MR8VKaX1wzX0P42Y6Vtfw3NK9HLb8RyJfpMGGDLrLVR1snuiMuUSlNe9+iy/t8e7o6lZVOZijRc3W1l0jb9hzpTRSpLmImxQ2WvlGj4t/vbUmresUpeKYtldOQNjZZsyz6hjzdiJDxOCtF92WsqLlMw6yoqyXpeCTm/zZx/CnInqE+yfQTVdA19J2iAX8W1ywBN6JusrYwSFT4jjVpg+kCarQmK6wMp+p3FRNvW/f6log0pzzaJ2L3UJrujO+23ffCaEt47fhvV9J2lcb8m1kbdz+oTu2c+imir36buPvvo7G52n277prPP6mLfe7ay+zrTZ3W37Zol0xquOl8DAAAAGjMCSAAANKDm0PfYgM7pnHe5tkT7+Iqzcou2+8tT9QMXLa1oyjqaGX4/eTVzy6M6n3OfL40GjLzAibR9c9IT05ZyO1pbtezPzpR4dcvE9ZvqBpRWbK7bgb1ydP00pWuxtr5Z7F5zmB/YSpNfYtPs0zST+10BTdCHlSpRkW4JlNO6pVgqeb/SrpDMDkXelvqckG2nHcdlyw8RBMuWJSrhtqNyq9Q3W4Gto6pvW6nKNV75u+j8m4ukNc58u00y1Y/jvdg3QiXUAqX5kl1TvMTree1SUjg2tm+39NtWRZKWnauZew1rCjU2eq5jVei0x9bKHUmvIT7YljQI5uqj7OPsqCMUvEomVFKvtiXYEtw7ATV9Z+ner4naxQ1ErXlZZc73sOONlyU/uOPus6OyUv+mpWHU+ZkDAABAY0YACQCABmayiuh7LJx5E+IGW+zyVAGK3HyVvGmybPxhbqy/vyjbD5EdvH59/IylWZrsZkL5mUxSh85JT8xm6+x/o4uD1+QMpn8927eSN/h9GdXEz9KqTT+HKYwxgajguTWPPsyQgdzAx3BNc7NVAsMl8Tkq8bxspNBLb/fFuKfrJYF9XecHM2Lc7JQkAaDq23oBhOG3B+a7w7BqmTTxqh/HC0YZbfpPTLivZNcUL/F6Xrv0mTA7sG8zTIxmF9WWew19J2h2aH8LNLF/mwTXEAi2BCUJ1iVS+X6yKw5o218TA+cyzM+OSkuCeyegpu8s3fs18XfbVd3dTJ+NKntFsYwjd591D/DVWp2fOQAAADRmBJAAANhPTJCob7se+n7Xb7uDGc/8wJHPlpFbOVVzTRk5Y+08N6vFKyUXV2ZOERXNmmdGLBsAipaVs0GiUY8lKKnWU1MDQY/53/MyjXqdb0rirdQK55ijx13mHNHjl42b8lCpnVOquZPCQaag+LJx658PnmfteMf2MqOMyOOj3YBRvmmD3lMCgZtEgbIkbBAu8visBCXs0mRL1kVLBFY8plGmvQv8NgKamLbddXbfIi1e5ecIeZ37z4xOJ9f1DJPZslir7It3tyxZurp0V7Bk3Y5Vi0Nly8K88mNFz8QymdwspQSZTdXY40Svzy2PlkI0W2eHVj0TLLUWJ8l6pl1KCldEM368bJhYdlCtmWtYU6gVfpaPzf5JnPUT31beuUVLqSVUosKV9uycfS8uloafsX+DGd69k6Sda/rO6nG/Gt53c4sKT46VC/T2GSght2mFCoMl7XxuGbrgemncS4nU8xoAAADQOBFAAgAA+0XW9+aqZEZurFxb3jw388YP8JjlT4zxy7lNlYYE+0Dy+vd5YsxKTelnlpv+fa7SE/NjgaCUeg+yJd38/oisdpdp/up8DYiWa3P2bTJvku3b77tpUj9n3dHa1Dl8nrUSd+w+k1ZqwIzVmho8v7T11NRicy7zdKm7r04a7Qbd6lJ2LkvD5692rtPbl1taryEzm4ADro36XzdNHaMl125RUd40N7slpS7DNHuCVDjOK8O1WMOVvPiZyf7wytB5L8q7atisCdpqy9KNLZQmzEqeUWQySqadHCvjdkvxcE1LkNlUnXcc+df3jDQ8eeVUVx/nPBe7xxmrQk3Q7CSZIUnXc9tla7REWaprSy3cVqYknibMTpr1Y9pq9lkv27Zyzu3kVN9nH004oSz1vm2gxXzftStZl0CojeLbuabvrB73q+EGp+IDZHH7vLlIw29PlDHWVQMm9ImVJ9yQrQkp7qXE6nkNAAAAaJRabNu2rapt2wNVGBmJlJeXq1u3bnYKSO29Tw9ul+Y73v5A7ds3zb5bfDx3qK2m9txFIhHl5OTYKQBA87VDq+4dq5fPmp3iZX666wEAAABoLshAAgAAAAAAAAAAQAgBJKCZWLR1ua7926900eJr9P2lP9MvXrpHy8v9nnsBAAAAAAAAAIghgARkuE/37HIDRw+WPa4qZ3pg+766KOc8/euLXXrtg396KwEAACBDmb5pFqRRli7d9QAAAAA0F/SB1AjQFwtqqzZ9sfz0b79SZNeHuvbrP1Tfdj3s3PqhDyQ0R/SBBAAAAAAAgOaEDCQgg5myddt2lqcMHm111jEl7Uy2EgAAAAAAAAAABJCADPb89jU6tXX7lMGjqavv0YYPN7qfBJEAAAAAAAAAAASQgAxmso/OOK6rnarODx75QaP4aQAAAAAAAABA80QACchAv/nf/9FFi69xx5/e+oI7vnz7S+50UMfW7fWnwXdrYPu+7vSioffrN9/8hY46rJU7DQAAAAAAAABongggARnI9HnkB4WMa3tcqYEdzrFT1W3ZWe6WugMAAAAAAAAAwGixbdu2qrZt29pJHAzl5eXq1q2bnQJSe+/TiB2rmclEOv24rjUGj9ZUvKr8tbNTBpmCdrz9gdq3b9oBJ5471Fa6z93+UtvnLhKJKCcnx041jLfeesuOAQAAAAAAoLE55ZRT7FjDIIDUCPAiG7XVUC+yTfDoN//7sLJatdVvv/kLOzc1AkhojgggAQAAAAAAoDmhhB3QTJhspGv/9iv9aeNi/ebVh91xk3lkgkd39rvergUAAAAAAAAAAAEkoNkwpeyOOuxILXrzBS3f/pKqnHmmbJ3JPDrqsFbeSgAAAAAAAAAAOChh1whQSgu11dRKaTVGPHeoLUrYAQAAAAAAoDkhAwkAAAAAAAAAAAAhBJAAAAAAAAAAAAAQQgAJAAAAAAAAAAAAIQSQAAAAAAAAAAAAEEIACQAAAAAS+WiVZo6YqVUf2en9YqMWjhihEf5w7yrtsEsAAAAA4GAigAQAAAAA9bBj1cw6Bn52aNW9t2jrhNlasGCBM8zWBBVq7JMb7XIAAAAAOHgIIAEAAABAIm37a+KCierf1k43uDbqf90CTezfJjrd/aw+0tsRspAAAAAAHHQEkAAAQJpKlZ/TRR1HPaaiAufTjDtD/lq72FhbEJ3vDgWldkHibUc9XqqiUbHp5PsaraIKOx8AUvFLz61aGC0NN3PVDi9TKDDt2mTWWaiFT8bKyEWXxZWw2xhYJ5px5Gw/trBEWlOosTVkIQWPXatsJfcc/OPu73J6AAAAABBDAAkAANTOymelkZu09c1ijXYm5856TBEzv+IxjcqbpwEzVjvLNqlkRq40Jy8cFIrbdsWkPG0ZF9hXXoHWm/VM8CiwryfGrNSUfnYZAKSlRIXvd/dKw90+XCWFYzVfo9zp2RP6ONMrFCsUV6StJ9gycu6686sHajYt1C1vT9Bst9TcAk07uVDzTaCpyzB3f+rrLLuuv/xcohA3yNRR0wLbJi5Tt1ErCkvU56zudj8btXBcoTre7m234PaOKhy3MHDeAAAAALD/EEACAAC1k3uhzmtnRtqrU67zsXKLtpvJdpdp/pubNP/UeW7WUJ9JK83cOJ3UJbitrtLg3sFpz/rn5zn/m6tB52a5073Ov8r533laGgxGAUCN+mhCbldv9LhsZ6qPzu7mhWXaZHd0P2OGa6hfRs5dN4k1hVqxyRvtekmw9FzNNm4oUp8JA2TPRl1zJ6hPcVlcIMjrD6mo7wSN8ve7qcydHtDFm1SXAZrQt0hl9hwAAAAAYH8igAQAABqGyUAy5ebytqhgtc1AipfbSR3saGom68iWsMszASVp8zY31wkADrwuw9zspKKb/XJyI7QwQSAnVKpuhMkW2qHI23IzoKLzxxWqRFsViWY5meDRWBUqnMW0o3KrVxovuj9nnTXS1sq0C+ABAAAAQJ0RQAIAAA0i8uKzWuF8ji6eq+FullF95bqBKFPCzh/mf8/LSAKAg8IEkWwZOlO2rujm6uXk2vSfGF1nwYJh6qo2yjpZ6jPBlsiLDhPVv63ZwgaPTp6mBXEl8NxMKVMaL7Rd+plPAAAAAFAfBJAAAECD8rKESjU3YQm79Hgl61Zq2YtextH6ApOJNFpFFe4kABxwbmbRvavk5/54wZ1sZdvpmnQ9w/SrFOtzyctS8oJPG5+0mUeX+AXuArp01/BA2Tx9tEozRyTOfAIAAACAhkYACQAANIis7+WrIFdaMamfOubkafOYqzTAmV+nsnO9p2hr8VV2X1106ZyGzGwCgNozmUXTTg6Uk7t5qyZc6WUMtel2tvq4peaqZyS5ugzT7AlbdYvddmyhNGGWyU7aqLJiZ3moTJ0Z/P101bBZE7TVL5s3zmw4W8P8PpEAAAAAYD9qsW3btqq2bd3aCThIysvL1a1bNzsFpPbepwe3D5Adb3+g9u3b26mmiecOtdXUnrtIJKKcnBw7BQAAAAAAANQOGUgAAAAAAAAAAAAIIYAEAAAAAAAAAACAEAJIAAAAAAAAAAAACCGABAAAAAAAAAAAgBACSI3AIYccoj179tgpoPEz92xTx3OHpiYTnjsAAAAAAAA0HbyNagRatWqlyspKXmajyTD3bFPHc4emJhOeOwAAAAAAADQdLbZt21bVtm1bO4mDZceOHdq1a5f27dtn5wCNj8mAMC+x27RpY+c0bTx3aArq+txFIhHl5OTYKQAAAAAAAKB2CCChWfjt7x7Qps1b9NOf/FhdOneyc4EDY+26Ui1a/Bd9vGOHO/2tQRdoyOAL3PHm5vfPv6dr527WD/7rBN0/rqudi/2BABIAAAAAAADqgwASmgUCSDgYPvzwIz21eIlK//cf7vRXu3bRd78zRO3bf8Wdbo5efH2Hhty2QX26ttbzt/ewc7E/7I8A0ltvvWXHAAAAAAAA0NiccsopdqxhEEBCs0AACQfair/+XU89vUR79+7VEUccoYuHfkvf/EZ/u7T5qvzkC3Ues0ZtjjpMb88/x87F/kAGEgAAAAAAAOrjEPsJAGgA2956W7+5b7ae/PPTbvDorDN76eapNxI8srKPOVzHHX24dny6R+99tNvOBQAAAAAAANDYEEACgAayaPES3X3vfdq85U0df/xxGjXycl15+Qi1OeYYuwaMLicd6X5ueu8z9xMAAAAAAABA40MACQDq6R+vlemOO+/Sc8tXutPn5X5TN025QT17/D93GmHRANK7BJAAAAAAAACAxooAEgDU0b/+/W/94U/Fmjvvf/R+ZaU6nnqKrh0/VsMuHqpDDz3UroV4Xb/Syv0kgAQAAAAAAAA0XgSQAKAOVr/0spt1tKZkrTt90dBvaeK1P1HnTjnuNJLrcqKXgbTxnV3uJwAAAAAAAIDGhwASANTCu+9VaPbc3+tPxU9o165dOuP07rr55zfqgoED7BpIJZqBRB9IAAAAAAAAQKNFAAkA0rTsueW6c8ZMlb3+ho7+8pd1+ffzNOaqHyo7O8uugXR0PvFIHX7YIXqr8j/67PN9di4AAAAAAACAxoQAEgCk8M+Nm/Trmb/V4iXL3On+55ytm6beqLP79HanUXuUsQMA7FcfrdLMETO16iM7na5NCzVixEJttJO1s1ELR4xwth+hmUWP1mM/qL0dWnWv0+6rdthpAAAAAA2BABIAJPHFF1/osSf/rPvun6u3t5frpJNO1DVjRmlE3qVqdaQXAEHddDnJa79N71LGDgCQGXasWqyivhM0e8ECTex1lJ0LAAAAAE0XASQASGBd6f/q9vxf629/X+1OD75woH5+4/U6rdvX3GnUD/0gAQAy0slZamNHAQAAAKCpI4AEAAEfffSxHnr4j+7w8Y4d+mrXLrpx4k/17W8NsmugIfgZSJSwa2pKlZ/TRR1HPaaiAufTjDtD/lq72FhbEJ3vDgWldkHibUc9XqqiUbHp5PsaraIKO98ReXx0YJnZT8Qukda7+x+t/ILYOsHl0XOJDgVab5cY3vb+ED5uaJlzLcG9Ami8Nj5pSst5JeXc8SdjxeV2rJqpEfeuUrD4WcTMs+XoFm6yM12xMnXBZWYfYwtLpOJboscJcUvq+dv5pfW8smux/Xv7Dk8nKcMXX2ovVLLP269/jsFrNby2qL7MzJ/55ELvPOPaIyjh9vb6YiXk7DnELa9+XL/03KpYu5pjB9f3z8W/xlXm2r1lNZWsS3adCYXOL9zmNbXXiCdXBdrafB/B+4MShgAAAGj6CCABgLXyby/qjoK73Oyjww8/XJdd+l2Nv2a0Tu7Q3q6BhuL3gUQJuyZq5bPSyE3a+maxRjuTc2fZQErFYxqVN08DZqx2lm1SyYxcaU5eOCgUt+2KSXnaMi6wrzwbzDHBo8C+nhizUlP62WXOcSZPWhlYZvYzNRTocQ6kzZ3zo+exYlI/ex4RFY3K09zcfJU4y7auztcAzdOlNtBlAkSXzslVwWpzTqtVkBs7bniZc74rp6pPNEAGoLEyL/pv0TQtWDBMXe28mhWp8P2hzvoLtOD24Sq6ORiYuUVbJ8z2ls2aoK12WZv+EzV7Qh/JLWEXf5yNWjiuUB1vd7Zxtps9QSocZ4ILbdT9rD4q2mDDDJvKtLVveLqo79nq3tabTNeOVfNVeLK5XnO82Zrw9i2hQNctb3tl9vxlwSBMSbE01Cy7rn/CTKqk27ftr4lOe6hwvtse7jnIWe8S0xJOuz0cu37Tbn2KF4eCNCWFleru7nOahq8p1NiHpVH+us70imhQrUSFr2R7x3ePNzYuwOdJdZ1h4e9nwe0d7fcTvx/n3Irj9lP8snSlPUbfIt0yoix2Hc59tLiGABcAAADQFBBAAtDsvfX2dhXOmqMnFi5y+z3q3evruvnnN+qb5/aza6Ch+SXsNr5LBlKTlHuhzmtnRtqrU67zsXKLtpvJdpdp/pubNP/UeW6GTp9JK83cOJ3UJbitrtLg3sFpz/rn5zn/m6tB52a5073Ov8r533laGghGmaCQyQLqMMUEdOZquLtfX2zbrHMv1ADnc+7zJtiTpeHznfXnd9Zck0XUb6pWuGsZpVo6x/mIXp9d980p6lVtWU8NHuN8zFkWyl4C0Li8/LANHrmBjHT10YRcu36XAZrQt0Qvv7FD+qhML68ZrqH9bWilbX8NzbPLamICQRqu7l28yTb9h7rBhbJNznh2R+ntiJths6Nyqzp+21lmpzduKFKfs7rXrSReNEDTRv2vW6Bh7rF3qOyVEg3/th8ccpZ9e7hKXilzj+fK615DkC3F9k57jHKDYyM0tlCacGVgveg5OD6sVIkd9fWZMMAeN1vZfZ1p/7rbZslpoZDo8d32VyzgFpXGdQbFfT/qMkxesNHbT+w76KoBE/rEtddQ9XcDfG2UdbKZ9tvPuw4AAACgqSOABKBZe/qZv+iuewq1cdNmHde2rX505Q/0wyv+W8ce670qwP5x9JGHqv3xX9J/Pt+nbZX/sXPR5JkMJBOUydviZum4GUjxcjupgx1NzWT/2HJxeSagJG3eFnEDVeNN8MYwWUBmeVwZupB2ndXZfG7crIibgWTWz9Nmk8HkZiBZFZu12Xx27Swv9JRA9HgmG8nM2KJNocwnAI1HifN/faplu6TWUVnRrB8bGDDcwIfJMvFLlI3QLcXOUd6vtCskZgJD6putbDsd0qW7hq95WWUfmWCFlH2cWc+bjrzdR2d3c/57JK78W6KMmyAvG8oL5Hjb+BlUlapcIxXdHNvXiJuLpDXOfHfLMJN9E13PLceWensvOGYCQqNsYMUTKgP3zFbnW6mrPk4b2VFH9gmJ9lTTeW4MlSA02UTJvx9vPx2z+W9CAAAANF8EkAA0S6+Vva5fTb9bzz7v5R7k/te5uunnN6hXzx7uNPY/+kHKPJEXn3WzeUYXx2cD1ZVfLi42zP+ezUhys47M4JW+M9lJfhm6aoKBoYqVWmYSo8YUR/cVFQo0JeGXvosODXWtABpeH024cqKXFfNw8j59amYCOXb0uGxnj8M1zS1RFhhSZDe5WUZJgjQmq6V7XokqPzTLTbk6U9ZOqnyjTC+7084qpjxc4HjRTJ4amCCSv/40Z//e9XtZMcP9Um3RIXFpv+A+vHVSb7/xyVtU1LdPtJSd66NVWlzsfBez7PpXnm0X1IVpKzvqqHw/PpfJqOk8u2pYYN7E/m1q+H68/WytrNudAwAAAGQCAkgAmpV/f/qp/lj0uB548CFVVLyvU085WT/9yY916Xcv0mGHHWbXwoFAP0iZy80SUqnmJixhlx6vZN1KLXvRC+WY/oc65oz2+jky/SPldNGox82ynppqs4gGdA72V7ZSUx7yAkrRwNb5Pd1plw0SrX8oWMLOlqVb+axesFlF3nFNdlP8slLlm0ykUbb/JwCNVpv+ozRBhZpv+6Nxs1aKy9w+bvxyZ2GBvms2rVDhGpsJ1La7zu4b7NfGy2ZJ3reOZbKMbMk6Y8eqxaGSaeZ8ip5ZrK0nZ7ml0kxAY+srL0s1la9zg1mBfb7xcrQsnJvt82SsrJt7ve6+bZ9Lz8SCae6696YbXEux/aaFuqV4uKZdlyhoFwv8bFxZGD3XuoiWrHMDU9LwM+LDX7W8zrjvx8v4Mllb3n5iJes2akVhsKQdAAAAkPkIIAFoNl5aU6Jf3XmX+2l859uD9bPrxqtL507uNA6sWD9IBJAyRdb38lWQa/smMiXixlzlBna8gFIt9Z6ircVX2X155eKimU1xy9x+jKplFOWqoPMyd7npi2nAjNWaavpaaneZppvSerYU3aUbr9JoU2nPBpRMZtMTY2Kl8y6dY7KgTB9I8cvyNFdX6Yn5lyUvdwegkbB94NisGL8PIq8U3XzpLFN0LWi4ztb8aOmz4bdPjPZz0/+6aepYONaWQLtFRXnT3CyWmnXVsFkTtNWWVHP7B5oVy9pp0+1s9TH/bXKCLaJ2nPO5Rl7QKhm//x+7z/nOGftX0fWS2Zrw9i32HM3xOmqazZIyWUXTTi7UWLvMC/j4fQWllnT7TQttW3nX5QftxppAVty5lp0xzTnXcCZR+kwO2GLv2saZhpydMCOrdtcZ/n7Mfjva7zy8n3S/bwAAACBztNi2bVtV27aBAtVABvrt7x7Qps1b3EwTggXNj8k0emrxX9yydcbp3bvp4u98W+38FzU4KFZs2KGL79igc09royW3nmHnoqFEIhHl5OTYqebFZA15gR/KywFAxjCZQeNe1tmz/IAeAAAAgP2NDCQAGc30cWT6OjLBo6OOOko/GHGZfnz1jwgeNQJ+CTv6QAIAAAAAAAAaHwJIADKSyTi7655CPf3MX9zpfn376Oaf36i+Z5/lTuPga398S335S4eo8pPP9dG/9ti5AAAAAAAAABoDAkgAMsoXX3yhJxYucssWvvX2dp3Yrp2bcfT94d/TUUd5fe6g8Yj1g0QWEhqO6ato65uUrwOAjNK2vyYuoHwdAAAAcCARQAKQMdaV/q/uKLhLK//2ojs96ILzNHXyRLfPIzROXU7yAkib3v3M/QQAAAAAAADQOBBAQrOwd89eqcpOIGN8vGOHNm95Uy+/sk7zfv+wHnr4j/roo4/VtUsn3XD9BA0dMtiuicaq5WHeP0MvvPqx+wkAAAAAAACgcWixbdu2qrZtqQOAzPXJJ5/opl/+yh0fftklOrdfX3ccjd/u3bsV+eBDNyj0wYcf6sMPP3I+P3Kmvc89e2L95nQ/7Ws68ktf0imnnKzcb55r56Kxu/Oxt3Tn42+rT9fWev72HnYuGkIkElFOTo6dAgAAAAAAAGqHABKahSf+vEgr//qizjqzl668fISdi8bACw59pA9NgMiMf2A+zfRH+te//23XSqx166N1nPP367jj2up4Z+jX92wde2wbuxRNwd/Ldujb0zboG93b6JlbzrBz0RAIIAEAAAAAAKA+CCChWfj44x26ZVq+Oz7lxuv1lZNOdMex/+367DM3SBT54INQsMjLJPpYe/futWtWd8QRh8v8fTLBobZtj1XW8ce7n2b6uOOOU8sjjrBroqkigLT/EEACAAAAAABAfRBAQrNR/MSf9fcXV+ucs8/Sf4+4zM5FfVVVVUXLynnl5rzycl65uQ+1a9dnds3E2hxzjJtBFMwkMsGh49oeqzZtjrFrIVMRQNp/CCABAAAAAACgPgggodmIRD7QtPwZ7vhNP79BJ2Rnu+NI7d///ne18nLe9AfuZ01atmxpM4j84JD59AJEZvzwww+3a6I5IoC0/+yPANJbb71lxwAAAAAAANDYnHLKKXasYRBAQrOyoPgJrXrpZZ3br6+GX3aJnYs9e/YkLC9nMohMsOiz//zHrpmY6XfIlJczQaH4UnNHH320XQuojgDS/kMGEgAAAAAAAOqDABKalfcqKpQ/faY7/subprgZMM3FJzt3ukEhk0HkZRLFAkSmj6iaHHnkl9yycm4GUYJSc4ceeqhdE6gdAkj7DwEkAAAAAAAA1AcBJDQ7j/6xSC+/sk7f/EZ/XXbJxXZu0/fFF19EM4hMcCi+P6LPP//crlldixYt3MCQGxQ63gsKBTOJvnzUUXZNoGERQNp/CCABAAAAAACgPgggodlZUPykXly9xh2/45e/UJs2x7jjTcmSpc9p0+Yt2rXrM33pSy3drKJPPtlplyZmgkDx5eWC/RKZIBJwoBFA2n8IIAEAAAAAAKA+CCCh2THBl78se07HH3ec8r73XXX72lftkqbDv4aqKpM95M0zZeT+P3v3Ah9Vde/9/4sQhCAGIgGDECDBVIqAgBcK0oJWaantKVqB1nM8VCp/i1p97Kl6aI/neexTjpfWU4ulPlCV2kOFeInVaMULwXKRIPdIpYFIjNBgJhiiGIEg/Pfae83M3pOZXCCBXD5vX2P2de219t4zJPs367dig0LRdHNnuWnogJaGAFLzIYAEAAAAAACAE0EACe1OOPjy9UlXaPLXrrBLW5dwG84f+kVdNmG8GyDq2bOHXQu0HgSQmg8BJAAAAAAAAJyI0+xPAK1Q/37n6NzBWQSPAAAAAAAAAABNigASAAAAAAAAAAAAAgggAQAAAAAAAAAAIIAxkNDm5f45T8tX/NXO1e2yCV/WlH+6ys61HG2hDUA8jIHUfBgDCQAAAAAAACeCHkho80wwZdTIEXYuMbNNSw28tIU2AAAAAAAAAABaDwJIaBf+9Z+/q0EDB9i52sw6s01L1hbaAAAAAAAAAABoHQggoV047bTTNOP665Tas4ddEmWWmXVmm5asLbQBAAAAAAAAANA68LQZ7Ua8IEtdQZmWqC20AQAAAAAAAADQ8hFAQrsSm+atvrRwLVFbaAMAAAAAAAAAoGUjgIR2Z9TIEZryT1e5LzPdGrWFNgAAAAAAAAAAWi4CSGiXLpvwZffVmrWFNgAAAAAAAAAAWqYOJSUlx1JTU+0sWrr9+/erurpaR48etUuAlseMy5ScnKwePRiXCXVbuW2/vnFvocYP7aGX7hlml6IphEIhZWZm2jkAAAAAAACgcQggtSImeNSxY0f17t1bnTp1skuBxMo+DdmpU+PIRwcJIqFOBJCaDwEkAAAAAAAAnAhS2LUipucRwSO0JuaeBQAAAAAAAAC0PgSQWhGTto7gEVoTUi0CAAAAAAAAQOtEAAkAAAAAAAAAAAABBJAAAAAAAAAAAAAQQAAJAAAAAAAAAAAAAQSQAAR8eqRaBXu36qmiPPdlps0yAAAAAAAAAED7QQAJgMsEiR7e8qS++8qP9Yv1v9NTRS+5LzNtlpl1BJKA9m6/Vv96uqZPn67cHXbRKVT0XPPXI3iMIuU6ba/V/h257rLwq2nrZI6Z6/wfAAAAAADg5CKABEC7Pt6t2/46V2988JZdUptZZ7Yx2wLAqeUFsu7JsbPNoqHHKFLufyy108bF6n2WnTxRH63WQ9Pvkb90AAAAAACAk4UAEtDOmV5Fv1j/qMqr99kliZltzLb0RALQPmVrypIlWuK8ppxrF0VcrFvnm3V3aFyqXQQAAAAAANCKdSgpKTmWmsqTjtZg9+7dGjJkiJ0D6lf2achOJfbw5j/ojd1r7Vxil/cbo29lXq7b/voLd/q2C/7Vrklsf2mF+vXrZ+eA2lZu269v3Fuo8UN76KV7htmlaAqhUEiZmZl2rqmYXjk3aZ7zkTHt514QZf/qh3TTvHXSmFv16DfKdVOkN44JqESDKSYVnNubZ+q9ulf3RHv2mP1uH6ce7oxJ1+b1uAmXbwSOcXua8mv1ypmme5dMUbadCzC9eGbPk7N3RPyypzmlLNVS83E45tu6au3zyvM2scwxhmqbv37K1fRA7yNHpD3RcxXhtH3J1cFaRo5vXXzro7pjnHM2TFq82LIj+0fPU5i/TQAAAAAAAE2BHkhAO2Z6EjU0eBQOGHXrlOzuQy8kAAFr5/mCR8Y6zZsdZ+yeHF/wyDD7/Xq19tvZJhUneGQs/Y+HtPojOxO21gaPjIxUdbOTxydO8Mhw2v7Q6mhLTVDNHzwy1s27KbBNbbWDR8bS/2gZ41IBAAAAAIC2gwBSm7VJczPP1dz1dtYKPXOjBsVZXsvep3VD5o1autfON4p3bHOcGx57/ATKOVHxz8GpsvE+55zct8nOtQyFFfU/bQwHj8zYR3PW/HckcNSQfQG0L6YXjEnvtuTn0+ySpdpW66MinOrNt93aAm2LDegkZNLIPapbx3hz3jHj9z7a/26BGzwyvXrc4y25V94R16k8TtbOSP2vvrJhxzh3iq9M2y7T++ijbSpwezLdqkfd4y7RvVPdjbTuw3Jv4qPVyrOBtNjztm5evopM2fNvdUo1TO8nZ73pffRRSLv8y3xlLy2sFa4DAAAAAAA4bgSQ2pP19+niO6X71uzQnAvtsmYQema+Fk6Yq3Xv7dDj3+hul6IlMkEho1unrnr4yz91g0V+iYJHRnhfAHCNuVUTwynUzh1qgypxTL0qOkbQuRNtkGadCt5t+j5IPcbd4QZYTEo409tnepyeOxH++p+o1HG6wwR3TDDJpKKbbtP3+e0r93pGBc6bCUiZoFCCdHxGapoGuRNLdY9T7vTpudLVXiApNj0eAAAAAADAiSCA1F6YHkVTH9ONOQs17Wy7rDllD1aanUTL9+mRz/Re1QdusCgcRKoreAQAx+viPr3tlNFDaRl2sjnY4E3cAE6zMmnmvOPWGsfI2l/u9SNqvGxNifTwMsKBpOn1pL4DAAAAAABoHAJI7cImzR07R3pgTbDn0fr7NCjzPm20s3HT1q0y29h0dM+E7EKPm5LNrgunZjMp8i6+c4W0YGqw7DD3GOH9wscKaekN/lRzsannzHxzpMHzjhtpg6++senm3NR/Nzzt7OGw522ur/3BcxNN4WdewRR6O33HPFWp/aIGndnPTkkPb3lSb3zwlhs0um3E9fUGj/z7AkBDRVK4ufYrVGonm9x+rX7JBm8iqeTC6eaa1/7VebanUzRdXzjNXFiP3l4/ouMS6ankvCJp7mzqOzsNAAAAAABwogggtXkmSDJVC2fl6PHvNLZP0Ard/XKWm4pu15q50p1jI8EQE1C5pshLU7frvTW6r2iqG0RJ+85CrXtgguSmsLtbo7zNLS+QNTjH7LPD2U66e6wJ2qTpsskTtPB1G7BZv0w7JwTnF064Upc1cc+p0DNzdHd2jlsXtw0THtMjMUGyxB7TzsFrvH1zZir/zjm+YNhU7XzArnPO286pvkDRgleluV77n53lnN85Nih1igzrFczXFA4iXd7/S/X2PIrdFwAaJCdPq8PjHe3I1zwzVpAu1iVDeriLwnaVh3vTFCl/npvsLa7odrHKVe6WLU37xji5pe/YljiFXR0SHyO+8g9tfSPp+oq0LbYH1Fm9vcDP2nnKD48T9dFqPeT2Jnooeo5cuxSKnLNwr6pcL1hk0uVFgki+7QAAAAAAAE4QAaQ2buHUsbpbEzRxwdSYnjANc+Psa71UdGdfq1tmOeW5QZ2Qlr+8IrrO+f+02TOV7yyrMxhiAkGaqa/ZXlBp35mtG/WYXnHqlTYwSyra6e4fKinW4NnOOju/8fXHNHHyhCZPiWeCXbvuHmnndqt4hZ1skJm6JRyQ65elid6UtHeFlq2YoEmX2nXOeXv8PV/awFmzI9P9B0/wJk6hbp2Sa417ZIJITxXl1Rk8MvuYfQGg8dZp3uyY9G6RQEu2htqeOuvm3WQDJXWMW+TwtosNuBi91dt+vC39j5jjNVLiY8TXu4/tE5RzT+I2pI7TVbatkfrNnueNi+QfJ8plz9mvV2t/ZNyoaOq6yH5jLtHQwH4AAAAAAADHjwBSW2d6Aj2+UPc/MEEL/T1hGmSCsnxZyqIBDy/YsnBqNE3boKmPSSuK9YHdIh4TGNKELPW38wEXTtKNK17V8r0mOCXnuP2UJW9+R5EvIOPjppULHz9eurz6uKnowvvP186miOfsLla+U/NzT8Y4U03kB+dfq97JwSeOTxW9lDB4ZLY1+wDAcZl6bzCdmzO/5OpsOyNlX/2oDZBYY27VvbeGk7SF9dC466Op2+Jztrk9mLJu2s+X6FFb1tLC+pK9NeQY8fUYd0ftNoZ7CeVsi6SZy746Wp8I//lIHacb4rX99phzZJj9brc9rQAAAAAAAJpAh5KSkmOpqXxdtTXYvXu3hgwZYufqY8bhmSrl7IiMe2TG9XHTzj1uew6ZAIqzybPhVHNmfKKxr2rSmoWaJt+0DYa4+ytHu+7up6U3jFXx7GjZfu44SC9f6R3HX+bumOPF1NGU/8pXc6T5O3Wjs6+cchbqSu18Wbo/XOdGqX0OokyqubFaNnmNTe0XnI+21euhFGhTI89bWJ1lukuaXtmnDUuQZ9LV/WL971ReXffX603w6KcX/rDB4x/tL61Qv36MlYTEVm7br2/cW6jxQ3vopXuG2aVoCqFQSJmZmXbu1Ct6brruMWncYgJGAAAAAAAAaJnogdSOjLo7RzeumKOL77NjC7mp17wUckZo1avK9yatFbp7kd1279N6ZIF041dN8MOOWTQ/On6PCY4MuqGe8XxMLyP/8Z6ZH0hpZ3o4LZw/XzuzB7sBFZPWbufLr0rNkL4uLH/nbm9i/WO625fCzu1ttWCZ7dXkpexrkLMnaNKEFVq2KnwmTBDr3ONKH3gymYDQw1/+aa10dn5mndmmocEjAAAAAAAAAEDrRQCpXRmpOTkzpQVTNcgEkcLjGtlUdHfpSt1ot/RM0H2Dl3kp3sbOkR5YE+nJY8YPejZ7ji62KeCuWTBTz9bbk8Y5/pq52mmPd/Gd0n1rwr2RnDIvvVITV6yQBtsARb8sE8OKm76uMQKp9szLDXR54za558Ise32SnnXORTigFB6f6Rp3H6ftk51tG8Qp9/EcDb5zrD3eVO30nbeWzIxpdNsF/6qnvvYrp7436bvZV7kvM22WmXWMewQAAAAAAAAA7QMp7FqRxqWwAxqewq65kMIO9SGFXfNpaSnsAAAAAAAA0LrQAwkAAAAAAAAAAAABBJAAAAAAAAAAAAAQQAAJAAAAAAAAAAAAAQSQAAAAAAAAAAAAEEAACQAAAAAAAAAAAAEEkNqojfedq0GZCV73bXK22KS58dY5r7nrvTKiQlp6w7m64ZmQnU/MPa5bPgAAAAAAAAAAaK0IILVRo+7eoV3vea9nZzkLZuVE5nfdPdLbyHFjTnS78GvOhXYlAAAAAAAAAABolwggAQh4Ydcbuu2vv9C38n6o777yY/30rf/WG7vX2rUAAAAAAAAAgPaAABKOS+iZG6Np7254WsHkdjvdlHfe+vu00S5Fy/bpkWo3cPT7bc/omDN/eb8x+lbmZfqkplrvVPzd2wgAAAAAAAAA0C4QQGrnFk4NB3q8V0PGOdL6+3TxnVl61k15t0b3aY4u9o97tGCOimd76fCenfWYrqkVYEJL9O9r/lvl1fs058Kb9Jsv/1S3XfCv+m72NyLTAAAAAAAAAID2gwBSOxc7BtLj30mzaxLb+Ppj0qxJGuXOpWna7JnSgmXRnkYT5upGO47SqBlzNXHFq1q+15tHy2TS1pV8vNsNFI05e4RdWtsuZxuT0s70VgKaQmHJAfdn1adH3J8AAAAAAAAAWgYCSGikkHYUSRMH97PzcWQPViQMdfZgDbaTaLle/2CtBp7Zr97g0Zw1/63CfUXuT4JIaErvhw7aKQAAAAAAAAAtAQEkxOWOcRRJS7dbxSukwQNNWChN52ZL+Tt3e6vqs3endtpJtFym99Gws5wLm0A4eBQOGsXOA8dr2MAz3J/D7U8AAAAAAAAALQMBJMSVNjArmpbODQJNUJbtdDTqq/6UdSEtne9PaedYMF9Lbcq6jYvmKH/ClbrsbG8eLcvDm/+gb+X90J1+cddyd/qND95y5/0GndlPT33tV7q83xh3/oWrfqeHv/xTdeuU7M4DAAAAAAAAANoWAkiI78K79eysx3RN5rkaNHaO9MBcTQsHgZx16x4o9tZljtXdmqt1d4+0Kx2zrpTmmHXn6poFM/Xs49dGU9qhRTFjHoWDQsZtI67X5f2/ZOdqK/54t5vqDgAAAAAAAADQtnUoKSk5lpqaamfRku3evVtDhgyxc0D9yj4N2am6mZ5I55+VXWfwaO3eLZq7/tF6g0x++0sr1K8fAScktnLbfn3j3kKNH9pDL90zzC5FUwiFQsrMzLRzAAAAAAAAQOPQAwmA1xOpnuDRw5ufdHsfNTR4BAAAAAAAAABovQggAQgwvZFu++sv9FRRnh7e8qQ7bXoepSWn6r/G/i+7FQAAAAAAAACgLSOABCDApLLr1qmrXnhvud744C0dc5aZtHW/+fJPneXJ3kYAAAAAAAAAgDaNABKAAJOibu7YO/TU1x7SC1f9zg0ckbYOAAAAAAAAANoXAkgAAAAAAAAAAAAIIIAEAAAAAAAAAACAAAJIAAAAAAAAAAAACCCABAAAAAAAAAAAgAACSAAAAAAAAAAAAAgggAQAAAAAAAAAAIAAAkgAAAAAAAAAAAAIIIAEAAAAAAAAAACAAAJIAAAAAAAAAAAACCCABAAAAAAAAAAAgAACSAAAAAAAAAAAAAgggAQAAAAAAAAAAIAAAkgAAAAAAAAAAAAIIIAEAAAAAAAAAACAAAJIAAAAAAAAAAAACCCABAAAAAAAAAAAgAACSAAAAAAAAAAAAAgggAQAAAAAAAAAAIAAAkgAAAAAAAAAAAAIIIDUipx22mk6cuSInQNaPnPPAgAAAAAAAABaH57utiLJyckqLy8niIRWw9yzAAAAAAAAAIDWp0NJScmx1NRUO4uWbv/+/aqurtbRo0ftEqDlMT2PTPCoR48edgkQ38pt+/WNews1fmgPvXTPMLsUTSEUCikzM9POAQAAAAAAAI1DAAnwMQ+yzQNt8yDbPNAG0LwIIDUfAkgAAAAAAAA4EaSwAwAAAAAAAAAAQAABJAAA0Hg7cjV9+vTg67kiu/I4fbRaD/16tfbb2eawf/VDemh1cx6h9Sh6brpyd9gZAAAAAACAGASQAABAo5jAw/T/WGrnfHLu0fTpD2n1R3a+MUxAavY8rbOzzcHU+6Z5zXmE1mK/Vv96uu7JsbMAAAAAAABxEEACAAANZnrwhAMP036+REuWhF/3apq7dJ3mPdm8vYgAAAAAAADQ/DqUlJQcS01NtbNA+2YG8zeD+pvB/M2g/gCal3m/mfedeb+Z9x2aTigUUmZmpp1rKkXKnX6PTN+ji299VHeMi/2cjK43waUp53oBJ7fXz5hb9ejt4+TtEdxuYrndxseUf4Mej+x770UFuieyzTTdu2SKsu2c6VnkBrWm3qslV9ulJh2e26PpYt06/wbpyZs0b623ymOW36Fxsb8C+ff7+SUq+I9or6hwm6JMT56Ycn11iLZ9mlPjpVpqtouch7r3jdbDaev83sqL9M6ybY+s9y1zpz2RY1vRukfPfVR0/8T7Gb7rNtVpUY5XSnibyHUI87cHAAAAAAC0OvRAAgAADfNRSLvciYt1yZB4QfZsDZ3qTS0tPMHxkPzWzvMFj4ylumd6rprwCHGs0zxf8MhY+h/+MYPiBICMnHtqj7G01gaPjIy0+MEjI96+pq2RQJHhzP/6IV/wyDDLor2+4qXqM3Wvb+ynxuwXDh6Ze6H3WXGCR4ZJaXii42IBAAAAAIBThgASAABomH3lNmgxSGkJOi/37nOxnWq4HuPu0JKfewnw3B46S5bU6t1kerm4qfLm3yrvCEu1LRLMqU8Pjbt9ie61wS3Tu2nJkji9j2KZHjRuer5HdesYb1EkMPbRNhWEexTZNH7h8td9WO5N+ETqb3rkNHJfr75L9Oit9tyuda5CrWXlcvf8aLXy3ECO6VVkj2nP2bp5+SpStqb42uPVy+vRVPd+MSJ1N+dxv0Kl3uJIO8PXM2dbMwf6AAAAAABAcyGABAAAWrYxt2piOI1a6jhd1Ry9nGq5WLdOCKdf66Fx34gJiDj1uMMESkw6uh25mj49Tg+cMH/9jcbs69Qj3Nurx5BLbPAs3jLP/ncLbJDP9NKa7pY9PdJbaZdCH7kTtTR2v2nfCKcjNHooLcObMj2WzL65muIFkmJS6wEAAAAAgNaDABIAAGiYs3rbYEXiQET5h17IoUm5ad+ijqeXU+PF9LKKtD3MjAdkAy3/ERxRqH6N2Tdeb6/EPcDqtk7l++xko9S/X/bV98qG2FzhQNJ0X2o9AAAAAADQuhBAAgAADZOapkHuxDoVvBsvLFCkbbYnzbRhTdjvpDQUCEI0S5CqlpggWSR9n2f/6jx5oZ+Ldet8L21bOA1dfU5k3wbzpcfzv6b4e0LFc7z7uanxwttHU+SZ8avyG5xqEAAAAAAAtCQEkAAAQANla6Idc2fdvJv00Gp/WMf0qrnHBkamaWhswCE8Ro8jGkCJw7ddhD8IERmrJ06QyhdoKloRTr9WW7xxhmpbp3krwiny9mv1S7bGU4e6KdkiQaypV9mxlKLBs/qcyL71iaS0858zmyZv+vRc50hBu8q9M9bY/YKiPapy3X3NmFPRIFL4GAAAAAAAoHUhgAQAABqsx7g7Ir1lTBDJCzCYVzh4ZHrVRMe9iY7REx1b56Z5dfUgsts9FwxZRFKihcfl8Y0rlD3MJk9bO0832WMkHlPIkXOPu40X7KiD3W769Js0b61ZEB0XKZJGL7JNuP31O5F96+UfIyp8zmyavItvnVhrPCLvGj6k1WrcfkHRwGJkX985C4/XBAAAAAAAWhcCSAAAoFGyr16iJT/3j3hjTb1XS5bcYXvVWKnjdEfMttN+Hhwvx3XulMRp3MbcqnttgMJl0qzdPi46LpKz76P+9SbQ8/NbbeAqKnacnrrFlmECY9G2+QNpLtP2+Xb7nG119tg5kX0bwlyf2HN58a2P6o5x4TPWQ+Ouj3d+6tsvMdOm4DUwpune2PsBAAAAAAC0Gh1KSkqOpabylz1gfOPeQq3ctl8v3TNM44fyjWmguZn3m3nfmfebed+h6YRCIWVmZtq51mn/6oe83kqxAaPm9NFqPeT2cgoGjAAAAAAAANobeiABAAAAAAAAAAAggAASAAAAAAAAAAAAAgggAQCAFsmMq7NkyRItOVnp6wwzZpM5JmP3AAAAAACAdo4AEgAAAAAAAAAAAAIIIAEAAAAAAAAAACCAABIAAAAAAAAAAAACCCABAAAAAAAAAAAggAASAAAAAAAAAAAAAgggAQAAAAAAAAAAIIAAEgAAAAAAAAAAAAIIIAE+7+096Py/g0pDh7wFAJrcpweP6u97qrV8634t2/SRs+SYtwIAAAAAAABAi9GhpKTkWGpqqp0F2rcvzl6n3fu84FH3rh01MrO7Lsg8QxcMcl7Oz8HpXd11AOI7ekzaXXHQeR3Sno8Oez+d95R5X5np3fsO6qNPjtitPT26ddKwgWfopXuG2SVoCqFQSJmZmXYOAAAAAAAAaBwCSIC1/9Mj+v2rZdq864A2FX+iDypq90LqeUYnN5g0Mqt7JKg0sHcXuxZo+0JVhyOBoXBAaM8+Gyhyl5tefHXrenpHnZPaWf16ne78PF3907pozrUZdi2aCgEkAAAAAAAAnAgCSEACeysPu8Gkze85L/vzHx/VDiqlpSQFg0rOyzwYB1qbTw9+7uspZHoOeQEhExj6IGQCRYf02eGjduvEzjnrdOfVWf17dXV/9rPz/Xp1cafNewbNjwASAAAAAAAATgQBJKARzMN0E0zasutTbXrvE21+71OVVx22a6PSe3aOpL4zgaURA7spPZWgEk4dk1rOBIDcQFC4B9E+m2rOBIqcdR99UmO3Tsz0wuvfq0skIGR6EpkeRF6gyPvZ8bQOdmucSgSQAAAAAAAAcCIIIAEn6P3yg24wKRpUOqCPDgTHeDH6ndU50kspHFRKS+ls1wInpnZqOTv2kAkQnUBquWgPIm++2+mn2a3R0hFAAgAAAAAAwIkggAQ0g+K9n7njKG0p+dQNKJnA0sfVn9u1UWb8JP94Suan6eEB+IVTy5meQh+YlHI2tVw43ZyZ/+xw7fsrVjS1nAkMnR5JLecGilJJLdfWEEACAAAAAADAiSCABJwkf99THRhPyQSVqg/VHk9mcHrXSC+lcGCpe9eOdi3amqPHjnmBoUBqORMYsoEiZ/p4UsuZ4JDXk8immnOWk1qufSGABAAAAAAAgBNBAAk4hbaVej2UwkEl8/NQTe2g0nn9kmsFlbp2JpVYa1DxcU2gp5DXg+hQpCeRma5P3anlTA+irqSWQy0EkAAAAAAAAHAiCCABLcwWG0wy6e/CYyp9fvSYXRvVOyVJZ/forLN7dtYz/36+XYpT5b+efl8rt1Xp4+ojSj0zyQsYfXRYnx06vtRyJlgUDhiRWg7HgwASAAAAAAAATgQBJKCFMynOwinvtuyqtj8PSM5ydeig745P0/+75Ty7NU4VE0D6r2dKI9clLDa1nOlJ5I055PUoMsvILIfmQAAJAAAAAAAAJ4IAEtAKHT5yVHf8fqeezP9Q/+uf+un/fG+QXYNTJRxA+vroVM2a1NcLDp3VRd26kFoOpwYBJAAAAAAAAJwInmwCrVDnTqe5qc6MLkm8jVsSM0bV5SN66gvnJBM8AgAAAAAAANBq8XQTAAAAAAAAAAAAAQSQAAAAAAAAAAAAEMAYSEArMefJ9/TIS3vsXN1u+cY5mns9Y580J64HWjrGQAIAAAAAAMCJoAcS0EqYAMTVX0qzc4mZbQhWND+uBwAAAAAAAIC2jAAS0Io89qMv6JLsM+1cbWad2QYnB9cDAAAAAAAAQFtFAAloRTqe1kGP33ae+vc63S6JMsvMOrMNTg6uBwAAAAAAAIC2igAS0MrEC0zUFchA8+J6AAAAAAAAAGiLCCABrVBsarT6UqmheXE9AAAAAAAAALQ1HUpKSo6lpqbaWQCtySMv7XF/3vKNc9yfOLW4HmhJQqGQMjMz7RwAAAAAAADQOASQAABogwggAQAAAAAA4EQQQAJO0L0v1+jPW47o08PH7BKg5erWuYP+aUQn3TM5yS5BW0UACQAAAAAAACeCMZCAE2CCR396u4bgEVoNc6+ae9bcuwAAAAAAAACQCD2QgBMw+r8+cx/Iv3Rbqi7oT48ONEzZpyE7dWpc+J9H3Z5IG/69q12CtogeSAAAAAAAADgR9EACTkC45xHBI7Q29JoDAAAAAAAAUBcCSAAAAAAAAAAAAAgggAQAAAAAAAAAAIAAAkgAAAAAAAAAAAAIIIAEAAAAAAAAAACAAAJIANAKfHqkWgV7t+qpojz3ZabNMgAAAAAAAABoDh1KSkqOpaam2lkAjXHe//Ee4O/5ZR/3J9AQZZ+G7FT9TJDo99ue0RsfvGWXBF3e/0v6wdDvqFunZLukfhf+51H35/b/bPg+aH1CoZAyMzPtHAAAAAAAANA49EACgBZq18e7ddtf5yYMHhlmndnGbAsAAAAAAAAATYUAEgC0QKbn0S/WP6ry6n12SWJmG7MtKe0AAAAAAAAANBUCSADQAv3+nacbFDy6vN8YPfzln7rbmn0AAAAAAAAAoCkQQAKAFsb0JHpj91o7l5gJHt12wb+602YMJLMPvZAAAAAAAAAANAUCSMBJF9LSG87VoMzoa+56u6o9W3+fcy7u00Y7m9Dep3VD5o1autfON1bC43jX5YZnQnb+1Cms2GGnEgsHj8zYR3PW/HckcNSQfYEmsSNX06dPD76eK7Irj9NHq/XQr1drv51tDvtXP6SHVjfnEWofo+i5eOenSLlNee5inIx2AgAAAACAto0AEnBSmSDFWN2dnaNd7+2wrxxpKkGkU+JEg1HNxASFjG6durrp6UywyC9R8MgI7ws0Jzcg8h9L7ZxPzj2aPv0hrf7IzjeGCUjNnqd1drY5mHrfNK85j9DwYxQ9d4/8Z/DiPr3t1Ik7Ge0EAAAAAABtHwEk4KTareIV0o1fHWnnjZG68YEJWvj6JjuPZnXh3dr13t0aZWdbsk+PfKb3qj5wg0XhIFJdwSPgZDA9W+7J8aan/XyJliwJv+7VNHfpOs17snl7EbU22Vfbc3R1tl0SdfGtj7rr7hjXwy4BAAAAAABoGQggASdVP2VNUK1gUdp3FmrX3b6gktszJpziztdDxl1+n+beZ1Pf/U9sOraYNGyJyoljoy2zvm3d7e572peGzxx/k+ZG9g2mhwuUe1+w3f51c1+3C8MaUXeXSU13w9POGfCEnrmx9rw5fiSFnVPnsXOUrxW6e6y//BUJ23KyDDqzn52SHt7ypN744C03aHTbiOvrDR759wWaXpHybc8WE/iYcq47aWVrSjiItHae8m02RRNwclO0BVLTRdO35TrbuduEezQ5+97kLDfp1/z7FoWn3VeuU0JU3BRxJh2eu63pEbVfq389PRL4WjfvJrvcm6/N2z56vMRl5z4Xrtcv9Ojc+McI1s9re6K6RLYNv+Kl9ItNHxipW93trFV2E6fNAwAAAAAAbQsBJOCkStO0uXM1ccFUG6CIN+aOF9gYnGNT3OVk6e6x/kDGY9o5eI27bs4/T9KNzvwr4fR3e1do2YoJmnRpmjNTXzk+6+/TNUVztc6m1Xt21grdvaiOHlELXpXmmm3X6L4Jj+mazGX6mrtvjlufR2ybTNAmWq6zzml3pL3mmAtm6ll33RplFT3mLXc1ou5hFzrnYsWrWu4GgkJa/rI0Uf75FbV6fs1Z41wLTdB9axZq2tne0vw7i21bbNtigl4nw7BegafykSDS5f2/VG/Po9h9gSb1UUi73ImLdcmQeD1msjV0qje1tLAJgxNr5+meQEq2pbonJojUdEwQ5ibNW2tnw3LuiTOm0DotzQnXa4B6JNvJ42QCPOHgT4QJqPkCPYFgW5hJHVhPMChu2Q3YDwAAAAAAtF8EkICT7exr9bgboNihdQ9MUP6dY91AUmQMpPXLtHDCXN14oZ2/cKYbyIgEiRQOEBkj9bVZvh5Nu4uVP2u2Fwyptxwfk9bt8WvllRrSjvqeJ4aP4exxrsnINGuSTQnn9bDyeEGbiZMn2HK9VH35zjITQtr4+mO+/dI0bfZMd8rVmLpHmGOvULE7BNBuFetK3TJZ3rwbWJupr4XLq8PEB2ZG6uS27RTo1im51rhHJoj0VFFencEjs4/ZF2g2+8rtGEWDlJbqTtTSu8/Fdqrheoy7Q0t+7iXA05hb9WiclG6RdHnzb5V3hKXaZns51a+Hxt2+RPfa4JaXNu4OjYvXho+2qcAEj2w9zDHD+637sNyb8Jt6r1evJf+s6Q06humpFW+7Im1zAzzTdK897qO32nNZGrK9kII9wALnIydPqz9K1M79CpV6yyLnMXy+c7Y1UyAOAAAAAAC0dgSQgFPITV1nA0kLp3o9bEIlxdKKObo4kkZtrO5eIe0sie2p5Bk1w/RoWubua4Iy4V42jSonkC5ujpbZxSfGG+9p8MBwsMvPC1JNHOxLt9YvSxPtZGPPgSdNl022Y0mZAFT2YI0amOXNu4G1cLCqdfjB+deqd3LwyfNTRS8lDB6Zbc0+QJs05lZNDHeuSx2nq2yApEl7OYU55d9hAiy3j1MPmyquVs+diIt164SmijR7gaUlS6Yo26bIuynQ68oR6QE2TVeFA2zh+iYKiLl6KC3Dm1r6H176ulxN8QJJ5njeKgAAAAAAgAACSMDJFDNOT1jawCw7ZacnRNPJhV+PfydeIMZx9gRNcnvnbNIrC6K9bBpTzsZFc5Q/K8dus1C3NMnTRK83Uvygj9e7J3+n213IY4I8drLR58BKu/RKTSzaqY0lxV4gzaS1c+aX+gJrrYXpSfTTC39YK4gUj9nGbEvvIzS7s3rb3j+7FEowflD5hzFBj6aQkSZ/f6Tj6eXUcNHxmWqlimtWvnGXZs+zPb1iRHqANV721XZ8KiscSIo7xhIAAAAAAICDABJwMrnj9MzRxYFxdUJaOt+Xzs1uszAyrpHXOyiS4q4W2/Nm6lTtjKRfczS2nKKdXmDL2e6RBe6SE+TVK5yyzoxrtPDOaEq7UV+dKS2Yr6XuGEU2pV1Yo8+BdfZgDV7xqh55WcpyOzf1U5Ze1bKihqWva2kGndlPD3/5p7XS2fmZdWYbsy3Q7FLTNMidWKeCd+OFHcJp2KRpw5qwX0skhZunWYJU1v7VefLCRhfr1vmmh040JVyz2pEfGXcpnGYuksIuLBLAOx7hHk7m9ahuDX+srJ2n/AanAgQAAAAAAO0JASTgpBqpOe+t0X1FU21qNi8927LJa7Tr7nAPGWebNXO1c6pdP3aO9MAazakjAOL2vAmMjWQ0vBw3DV44ZZyz2S0PTIgGlE6ASdH3bHY4Fd1ULZyVE+1FdOHdWveAdPdYr36PaGYkhd3xnAOPGRNqhfJXZOncyBhNznx2gvR1bu+tFW4d6g1OnSKmV9FtF/yrnvrar5z236TvZl/lvsy0WWbW0fMIJ0+2Jtqgxrp5N+mh1f6wjum5c48NvkzT0HDKubC15QqPIBQN0sTh2y7CH+T4aLXyEgWpfIGmohUJevE44o5lZEWCU1OvsinhokGxxqjrGPHsL/eS00XT9e3XtrdjWhAJ4C1VXuTcR3tM5cYEgqJ1iN3GjJUUDSLtKo8XDAQAAAAAAO1dh5KSkmOpqfWnSAJQ23n/xxuPZs8v+7g/gYYo+/REQ3Mn5sL/POr+3P6fBJ7aslAopMzMTDvXtIqeq2dcoPm+8XjMeD6JUrI5TG+bKSZgYsYb8qeMm3qvHu2TV3scoLAxt+pRM06RmY7dNyBan9h6R47ts3/1Q4mP6dRpydXZvjbFtNUR7xhDC+2y8P6O8HYX3/qo7jDjGdXVBl9bE9bPt028OkwsT9Su2m0AAAAAAAAw6IEEAAAaJfvqJVryc/+IOpYJkCyJCUakjtMdMdtO+3lwPB7XuVMSp4obc6vu9adz8wePDGffYLq3i3Xrz291/h8UOw5QPD3G3RGsh2nTfFtWzjYVuQsTa8gx4optg2njElvW2gJts2NOmfrVOvcx5yNeHcx+tVLiOVvdG3u9AAAAAAAALHogASeAHkg4HvRAwsnQnD2QTpZIb5vYgBEAAAAAAACaHT2QAAAAAAAAAAAAEEAACQAAAAAAAAAAAAEEkAAAQIvkjvezZImWkL4OAAAAAADgpCOABJxUmzQ381zNXW9nrdAzN2pQYLm3nVnmvW7U0r12FQAAAAAAAAAAzYwAEnCqrb9PF98p3bdmh+ZcaBaY4NFU7XxgjXa9t8N7rblSy8YSRAIAAAAAAAAAnBwEkIBTae/TumHqY7oxZ6GmnR1etlM7NUGTLk2zCxxnX6tbZq3QslUhuwDtzQu73tBtf/2FvpX3Q333lR/rp2/9t97YvdauBQAAAAAAAICmRQAJOGU2ae7YOdIDa2zPI+vswRqs2sGiUXfv0OPfiQaVNt7nS3F33ya71Ft+w3336Qaz/Iantdxs51vvBq18KfHC6fPcl7M9IaqW5dMj1W7g6PfbntExZ/7yfmP0rczL9ElNtd6p+Lu3EQAAAAAAAAA0MQJIwCkR0tIbpmrhrJxAUMgzUnNyZir/zrGRwE68MZOuKZqrdW6KuzW6r2iqbngmGvrJXyDdYtY9fq0u++pMacEybbTrQqteVf6EK3WZ6fHkps/L0rM2Vd6z2XN0sT/YhFPu39f8t8qr92nOhTfpN1/+qW674F/13exvRKYBAAAAAAAAoDkQQAJOgYVTx+puTdDEBVNrBYdcF94dGf/o2VlmexNICvcaCmn5yyt04+xr5YWe0jRt9kzlO8siIaRZkzTKTurCSbpRj+kVe5wPdkb33fj6Y5r4wMzItqNmzHXqFA024dQyaetKPt7tBorGnD3CLq1tl7ONSWlneisBAAAAAAAAQFMggAScChPmat3jC3X/AxO0cGo0nVw8JnWdF0haobvnmBRzu1W8IhxUsq+pj0krivWB3SdopG40x3nd9CzapFcWzNTX3JR5Ie0oUqCn06Cxc5SvYu2ooz44eV7/YK0Gntmv3uDRnDX/rcJ9Re5PgkgAAAAAAAAAmgIBJOAUCPcASvvOQl9gyOOOSRQnjVz/wRPsVD9lOZM35niBpejr7mivoxhpl17p9Sxav0wLI72T0nRutjTxgTUx5SzUNJPeDqec6X007CznIiUQDh6Fg0ax8wAAAAAAAABwvAggAafYqLtzdOOK6NhDXrAnOKaR6Tm08M4Vmjh5gtKc/y5zfi6cHw06bbzvXA26ITpfy9kTNGnCY7pmarHumzHSLnSO/VUz1tJj0fGRTPAq8z5S2J1iD2/+g76V90N3+sVdy93pNz54y533G3RmPz31tV/p8n5j3PkXrvqdHv7yT9WtU7I7DwAAAAAAAADHiwAScMqN1JycmdKCqV7Po7Ov1ePv5WiwP7Vc5lQpZ4ce/44d9cj0XMqeo4vt+msWzNSzj4fHRIrHCzppwpW6zN+76MK7te6BYl1jy7n4Tum+NYl7MuHkMGMehYNCxm0jrtfl/b9k52or/ni3m+oOAAAAAAAAAJpKh5KSkmOpqal2FkBjnPd/vFRhe37Zx/0JNETZpwn7igWYnkjnn5VdZ/Bo7d4tmrv+0XqDTH4X/udR9+f2/6SnUlsWCoWUmZlp5wAAAAAAAIDGoQcSALRQbk+keoJHD29+0u191NDgEQAAAAAAAAA0BAEkAGgFTG+k2/76Cz1VlKeHtzzpTpueR2nJqfqvsf/LbgUAAAAAAAAATYMAEgC0AiaVXbdOXfXCe8v1xgdv6ZizzKSt+82Xf+osJxUdAAAAAAAAgKbFGEjACWAMJByPho6B1FwYA6l9YAwkAAAAAAAAnAh6IAEAAAAAAAAAACCAABIAAAAAAAAAAAACCCABAAAAAAAAAAAggAASAAAAAAAAAAAAAgggAQAAAAAAAAAAIIAAEgAAAAAAAAAAAAIIIAEAAAAAAAAAACCAABIAAAAAAAAAAAACCCABAAAAAAAAAAAggAASAAAAAAAAAAAAAgggAQAAAAAAAAAAIIAAEgAAAAAAAAAAAAIIIAEAgIbZ+7RuyDxXg+7bZBfUJ6SlNzjbm30yb9TSvXZxa7H+Plt3+2pwuxtrk+aa8m942jljzWPjfb52OK+56+0KANI7+3TOv+3TBjubUHmlbvy3cuWW2/lG2vDHD/Wbd+yMKeuBSjWqqIbWs6Hb1alauQ98qBtfqz7hdgd9rN80Zd0Mt70fOq+mqmN9mqINsUyZvvujRTr+Opp7/5w/fmznjpP/PmySe7y2JqlnI+pW/lq5vXeDr8i97TDbePPe+Y9sF/v54Z6f+OtijxO4hv79zKtW+/33u78OMW1sTDl1buu9vyPraq3316epeO1q2e+/mM+9xmiK94v//eefbkIt8f0Xy61jeNvYukb+LTKvOt4fse/dwH7m5Tu39byv/G2oVe+66hPmll97XeDc1PpdpTneg21JQz9PTuA9HZbwfm9k2Y1435xyp6Su9t+leL+32/fob95pgutZL+/eqv056Tt2M30+GwSQAABA81j/mO5eIU18YI12vbdQ0862y1sDEyyb+pg0K8ep+w6te2CCtGCqbnimuUI8zSf0zI26ZoF0Y84Opy1rdJ/TlIVT/QG9TZrbiOCVKY8AFNBY1Sot66D+ve1s+VG9nN5R4dl2o/xzFfY+Tf3tbEO5D7Pi/eFu/mh++YgmT+quPb/srSnHeUITlx/HcbbhRDTJg1WcBB/rN4uO2On69b6it3Pf9om+ZnRylnbSTVckexs4Pth7TMPSpdwHPlOhe5+bbbvrER3WyMg94bwPFh2W3PXeup+GH2K9s08jl3XUC75j3L8o/HDJqe8DhzVshl33y666a8tnwQdgvvt9wx8/0/0jurrbvjDiiL4VOX5jyqlv2yP6oLyDHrkzvN55/cuZdp2D9x8Savr3n5/5d+JbZZ21yd0+5r41D22dY99l7+tNkz7XtyL/ptTx/nSUl30u2feV9wr/W1bPe8U55k+XyXuv3NlZWnYgGHjy1WfPDPnqY5ltnPJftrMR7meGLdfZ94V0/2eN4xS8B9EAzRg4QLLGjezgnOOj+sAuCSvfUuO8hzpp3Pl2QXN6p8b5N7iT8zlQc0qCfQSQAAAAYu0uVr7zY+Lgfu5s2ncWuoGkx7+T5s43rZGa45S96/Fr1Rylf7BzhfP/Ccpym5KmaY+bQFI4oGd6P03VQjPZACZ4dPGdpjygDTn/LO355VkabWcT6t1TC483SFF+yPkDM0nj7L4bNh3RXSN9D0WbUkPb01An0u4Y5g9tjTz9BANnyZpyZx8t9D3kG5Ye/4Ffc2iaNrQvo/+lTzAIcKKa+h5vIuWvHVRh7w52rrG8h993zfC362Ot3mIeTMXe8/ZhVtnn3gNh8/lS3kGTR5j1zraTO+nlTYe8dbHn6vwk3aVj+sBdeaZ+9Ms++lHkwdeZGjdCenlv9CF89H43dVHkc2v0pM6aHHmI1Zhy6tnWPJxWR2UkeIPx/jsOLfjfhKbU9O+/IBPMVeSLH8H71tyXL4/oGrmv3eDUnT29bet6fzpMuZPPNsGrWHW/V9xj9ra/VzjX5KYRx/TyFi+45K3rrOnhfc/vqkdUo9X2oN6XJg5r2IjaxzW/n0yedEbk+sZ+fvMebEFa6L+FbVHvEUmarCNaHehZVq3Vm5z376SuzjWo/btp0/K+MHXXyK7q3/uIHvV/QeMkIYAEAACOiwkmuOnQnrGp7cwr3JPFpH8zPXgc+XeOjS4Pp8Gzr2hPlnAat/s01017F+4h40+D57x8aeTqPL4VTN0WTKMXWJegB46pe6JeRwn3j6T6c9rirr9HD5g2xN3GtKd2Crtw28KvQI+fQGq9hqYGXKG7x8Zua86tDR6tmKOLE53z8HLnuOHg0cKpzvK4dQ/ve582uvO129Iae3GhFQp/E/O1aAoX861df1qWyLd4/ekw7PRvfGlqAt8wTphaxv+tTy+VhD99SfCBS/ihsFnRiHr6eN+Ir/RSarjb+dJ5xGlPrq+8YFqVj710GHHXWf522+nf/DFaXuw38xOn+DF/aMs+RHMEzp9/W3v+/mjPyW9DGrnsmLP9YY10v0Htrb/xtSrn5ye6xanX/Yt8+7ttjpYbPH9x2utsHyw/pg2Bb23HtMGRqL3eNXLupfC6wD3i8Nfzj879kYD7rfctzsSWz6LlJ7z34mvuOhpeWfHvSW+dd9y6tnM1pG32vq73PetqwD0ej61H8P1vy7H1DxzHWf/TTUn6xeSOdoGn3vZa5uH3/f4Hvob7beOk+h8Omh6NsUGXON+Ubjzf/e4Gdny9KF2fq7See89T+32TkGlLwt4NrfP95/88D3ye1Hfv+o/zwGd1Xs867zPf+6VJ3n/uNnZdW37/xeh/ti9o6/47Hg6omvsyURDIUef70/ROPr4vQQQDWh5/0La2cODYkd7F7Un1o5F2PsJrV+L6BN+DgfdfTA+nOu/7B5x7xr0m5Xro985P33s3cH+Z2UTlxOX9fhDZPsH1dtl7N/b3I3+b/PdrnfVwy7Lran2eHMd7wD1H0WO4x46dN+fMtmGDOYbbm+yYbnnA/7495Dt2HeciwJ7D8PH8bYv9TPC/L/3r3OXRz4XfrA5eU8N7b4av+4l8TtT9O2mT6X26Jjtvtvs3+e9Vf3A4/Ltp+HMpfpsi184KngdvPvgZarnH8r5QYr5A4g9CnywEkAAAwAlZuHOwHg+neVsxRwtNsOPCu7UrZ6a73k1hZ3rXmKDJ2DnK96WFC6ZSc6woVtbccA8ZE4wYq7tXzNSzpofOmrmaGCeNXNzjO0yA55oFE3TfmnDqNhNE8QIbwXU5utEEUPxjHF040031ZrgBsDjBpzr3NxZIXzP1fu9efX9ysG6hVa8qX87+M2r95Rbp5eOlnNuhZ2f5Us7ZwJyXFtCsi7YpkVEznPPmTplt/cEe0xvJqbuZnDBX62yvpI33+c65aZvZb46zj3NN3XPscOt2d+261+Jc87uctkTra87nnOA1B5qN84f03iR56Vs66eVln+inOsOd3zTJ+eNr2WcJ/pg+osKzbaoodz9fWpgI74/1cGqZTZPk/NEe/uPc+xZi9FvD4TRU9oFMrfQvx1nPLYf1wWTv+G46qYQPVo445XeJlB9NX2X+2PWlxbqzswoj6+pyTPfLlufsM3nLZ4E/jKMpfrrrkbLY1FThh2jOsRf5UvO45RwMHPv+stO8cm5Oc8+Dejvlhr/R7UpyzrNzDGeBm8LO/Ya0c10WfR5Nf+Wez/D5S9De3mcFy39nn68NXvqeaLohfxvqa69jy+fqb+vywgjnOi8KXyNTz2h6oU1nf+6c0/jMN9lfGOFMmBRH4TYmvPdqOxl1jGjoPZlwu2Db9szoWGfbohK9Z4/zHjcPpR6o0eTIt4m9enkpqJxyRh51A5d+G5Y59Z7svz996j0vH2vJsmO6K2Z/k9oq/kNpb/vJ/l4A/s8UZ3qynYzlPihPlG7Hafejvl5Gsfe7/A/Be3fUMDtZS73l+MRs66bzMsHcyIM3//Vvfe8/8/DVn0KwVjqwuu7dSNozZ51zQe+v777l/dek779Ybq+iyUftvXnQvbf8/84PSz+S+GF9wvenuaftlyDsvnEfHhu13lfOv32+zwc3wGX1Tu/ovo+WhB/Ev/OZe84Ky7yye59/Zh1t7eDU1TyYt3UKnC/fe7Cufyvru+/LjziVN+t6644xThu2RFNyBXpW1fv+CSp/7YBuSQ+nAzS/H9TXU+OIXvb9vmWuw+qR9ljO+/7+l23bA/Vwyg2kEK3r8+Q43wOmp2h5uMeYDdpFepB5AUv/feD2VjO/RznX7pE7o70DX152VOPCdXbORTTtaCKmvp/o5ZFOfd3ft0zb/L9TOZ8J/s/ROj8vop8LPxpngi/RHnJmX38A9vh+Fw1L/Dtp07I9fxPdqwGJ2+T2ZIqU4ZyHsg7B+S3hgFSQeyz7hZLeV3Tx3R8nDwEkAABwQm78qhdISBuY5f7cWRK/h4kXNPFtf+mVmqgVWrbKt/2EK3VZeKykvc460+Fl1iSNMvNnT9CkCVL+yytsAMQT//ib9MoC50ekvHDqtrudsmLXjdTXZjk/FizzBWK87U3Aw+MFX7yeQA3Z3xGut8Nrq7TwdRNkCmm504ZAWyPsOs3U1y70loy629TbBndeN726nPNwqZfsbtRXTZDuMb3i76EU6+xr9bgbCLJMsMvXQyiWd7yZ2uH2JGp4eru6hHuh9fe1BWh+zh/Sk+wf2O4Dm+gfZe7DlYR8YyAkehBregb4HsK6f8w5fywHU1uEmT8Io9vWTv9ynPX0fVvaTSeV8I9JX/kmjU34j3j7bcZoW4MpcBLz19f/ENk+1Ig8hItJ1RPoTRETZHO/oR0UeDjeYCblT/ThifswOizwTVFHXWmYfA/eTPqeSEqSQBvqaa8xokukfP/Dvfj3TwM16t47yXVs6D2ZaDtzTN86735N1Da/BO/Z47nHyw7ah9e+e8Oei0g5br28SZd5kKpo+qpa6jsvMefaY66d8z6o9SDJezBlztMvwvVpKPchqHM/xE3T5T0M9KfhcuvVkB5QAY0pp/a2bq8Kp22Rh9L+B/6BclrD+082HZhJb+QJpv4z6rp3fZ9XsfdcPPXdZ2GJtjNt861r3++/2tyeAi/bLzb88gxpUTDYc/+iGvuwPua+rYvbs0+R4IN50Kxln8R5AB7nfVWX870vRkQCU5tOq//+iTimW5x/kH/htiUmWGLOlf+9nODfyobc95Hz7abVjN5n5jMg/L6uv5wgN8gXSbfnBefqFr2vvN+3ovXyf2aYeijSbu+zJhJIiPuZYB3371md1N/5Xc2rv9MOJekm589dd96WWd/9akTPXbIy0t2JOnyuR23wKJiGzbkfltnrb1Lmhb/IU+/nhT8QEtNrxv0ild33uM9RWKLfSZuel8Yu3PPW+zco7u+qdbXJraMtw5yH9C7OOjvvntP4ASnz+0CknW46y8aco6ZBAAkAAJxUbvozk87M9EZy5vN37vZWJLJgqk1/ZnrGOPMriutPy7J3p3aan9mDE48r5AZSvLpcYwJCKtaOmJ4xXjDF9F7y5hfO96Wha8D+EWdfq1vCQSYbGJs4eUKcuu1WsWnjhKwE6VsM25PIHNumCUwUtIuy4yyZXlzu/GN6pM7UfM65zjY9xXyBp+MRbrcROV9195gCWgM3MNG7gYNIx/Q4CvRGOhH+1DV1/tHs/+a/7yGCG7Q5om+Fv2XsvEyqprpT4NSl9jepz1nklGVT9cT2pvCniTnnZWedXX5izMP1aLk/9XcMjZdGKB7zgMTtqRUtJ9LDKtCGuttbl9r3j3lY5E0FzkucB5B13XtuapTwvu434JunjrWPYzX0nkywnXvMQO8TL0Vh+FvzjVbXPW56OfiWRx7YmhPe2/fArF7OPefcv9EHO3HUc17cdtcKsJhrF3u/mvvbOSfqHNMbz+G/pm67Y7yzz7325kF17QfQH+s3//aZ7o/0svGYevnfs4GUdfbBd1BDyzHib+uOt+Jrm/+Bf7Cck3xvW7X3jaq9r5eezPQqjSw3QYCGpP5zr2H8z27ef37N8/6rfY5tj4nYgKWvh3CtQIc/UJXo/dnbfJHB9550HzQ797U/TVaC94rh//faDb76uMEUG9Da8y8d3fdLQ3/3iG1nOFgSeA8m/Leysff9mZpugl1um/1fuKm7nLj/VprPufCyfzvo/O5lFjoS3W8N4tWj9meYp67PkzrfAz617zcv4OKeExNUcO7h0c7LnTdl1vr3ojHM/RStTzQIau6f2F7v5ks5XXWXSeEZ3scGExv7eeEG1sKf5c79FPki1fF8Tpwqbho7G7hxg0T+IJlPndc9Gvxxz4NzX/U/21ln5s37K15Ayu1B6Hwu+n6/dctLmEmheRBAAgAAJ1U4NVvkVV8qNJvyLvoyvYjqcfZgDTY/i3YGeisFuGnb/OUm6hnjS/Xm1+D9PeHeQo/M8dLXhXsRBfVTlglW1RkkC6fOi74e/45TVkPGRjK9kWxqwfjCvauctjUkRV0DeEE48wqfw8d0TWy6P6CV8dLD1P+g0gj2OAr2Rjohzh+akWcxcR/mhvkf2ngPQly9zTfFO0VSw0QfNNXxIK5O3kOTyDepIy/T28H79mTkD203FU8HX1qUJG/5iXL/yI62aaF/PAy3vQ14cGu4A1N7ZXjf4vYeBgd7hNTV3rrVvn+8h+GG+wA9XFZskMBR170XeGDo1qN56lj7OFZD78kE27nH7B3tfRJ+Bb8N3Qh13eP2wW14WeQB7oguWnin98CsQQ+r3IdIvgc7JojhPjjypeCp87yY+yrO+Cmx3/R3tnODRyY9U+x94bYzhv+Bphs88tIQJQoeual2Au/9mPs9bkDCH+BoYDmuRNsmYo5zct5/Ce9tq3HvPy/o46bYjCw3rwQ9H/1qfV5FP7t5//k00/uvvvsgKFHvDvv+qO/9GUe0PonfK4FedVatz5Ewt92xY5jF472vanHrG+e9HPffysbf95G0XoHPvbrLqf1vpQkmmh5L4e3PiJ73RPdbg3j1iA36hNX1eVLne8An3v3mnhPn3t3gvNxUb6anljOdu+lITPq6xjJBoWhdou/vTrrpTpOy03n/BFLd+bb3pYhr/OdFOHDi/B4c+LflOD4nTploT6oNzv0aP32do57rPnpkJ/d++sD5W92cB3OttfdQnH8rPV5PvNj3gfM56dxr9fcQbToEkIAT0K2z94/25g9iB8oDWrbwvQucTME0bt5YPybY4aWFi8OmrIukhjNjKJngSIOCDzat3IpXtdwGUryeNab3S+y6TZpryo2MDRTe1le3QI+m+veP68JJbgAlf8UKadbshMGqy8x4Sb60dN558gJCXhAqmvbPq6cNFplxpyJBpXAwy9bN1+snVFLs/hw80ASwbMAqlg1ghZ6ZH0hhF04TGBUT8AqnHQyzQS1v3KqRmmN7QE0c3M9dDbRaMSlXEo8rEvPApdZD4RPg+3azGf8h4R+y8qW5cAMs9huT7jcp/eMDeN9KTTj+Qr3sN2bDYwY43G8Im28Gm4d8iq1fdEBvt/7eZBMIP3T1HiZF+L856vLaG/uQ0v0msK/nj/eQ5DT1r9WGOtpr5xOy90/k3Jv7wpuqX4PvPeMk17Gh92Si7cwxfSmRwt8+Pu5vHR/3PX6mfuR+s9727rCpeaLnwhtPxBXzgMt8I988iHvB/7C0zvNiHjbW/hazeWDkf0i44Y+251HMg0dXoJ32IWrk293OOXR7HsV7eGuCUl7PhloP/Wrd797Dv3DPELcdkc+yxpRTx7buOv/1ceYX2eO0yveffVDo+5a419PA3ld1sZ9XkZ44pv31NYz3X5O9/2oz979895tzb5p/X+x7IPY6B94fdb0/3R4zvvvBpOOLjIFS13vFKdYpPzJ2oP1SRqQd7rULlxt+H0VTOiYW+77y1TfmPZjw30pnutH3vT1H3zKBbl9vsuN5/0QCPf775ASZekTHvgle+zo/T477PeAwQXvn3n3U+R3SC+p1cs5tjV4ua6IvISUw+l98AVz3PvIFY90vEthA5HF8XoSv5y3pvnvxRM7RKeC+75x2fyt2HEK/+tpkAkzOe/fRsnCQ2TmvZc61DfwbF2a+eBbvM8p+JgV6KzavDiUlJcdSU1PtLIDGuPflGv3pbYJHaH2+d1GS7pncRN/2RYsUCoWUmZlp55qICeCYtHOmR9DdI93AxsV3rnB7FM0x4/WYYMHUxzTxgTXRHjH+eX8Z3pxvnQl0TNVC0/Pl8Wt9qd1CWnqDTV1n+NbXe3yHCbB46eUM03Mn2ksouG6mno3p2RRc77DtDku4f8x58gvvE6mzq3bbw20LC2xv2xkWLCseW76dM/z7+I/lLpe//Jm6cdZjWrgg3D5fWeH6+usza67uK5rjXC/f+Yipb+x5BJqF+UPWP45C7Lz7jXzpBfNNz0TTseWYQaVrlXnY+YPPML1pwg+szEOfT9xBu390vvmj0YyL4JVnHn78VGdEHwYdZz31xw/1LXVy/sg/Yh9UmAdmts5x9rlrhLPdFndD9xv70W9xmvp9Fn3YEUmR47XBzYU/4lDic2D3l69M8xDXpNbw2HqZemxKCnzr1r/dXTO6SovC5fiOHThP5lyb8rqqNLJeMdt6+3oPjZxrMiNJLy+qXd9we803Ot39AuXb8xtpg7225bXbYMRtrzPlLlc05ZC59iOd/SOpx/z3T2/nWuqIU6EE37B1r6Oz3nzb1+zv3zdw78V3MurolRX/nvQfp67tXIG2xV4jey3NtYjzvnDL8G8X55pH7/HagufD3kumx4877yvHnovCWmNFOGLq07D2+utrmGMfcN4PCdoQ4SsrcK3sfWIWm2u6LJjaynDPq/vejp7rCLP/5KPuvRA8V/56JDi2X7xy6to23Isg8h72LY/zGWK457eFv/+C18C3faPuXaeOzue4GZS+1j3nqPM+8x2nzn87jJjrE/f95/93wP9eNDvU2QZHq3j/JRJzb8a2JXyfGJH72fKf19h1/v0ckX+jY65FhG9//70VuVZW4L6LPWZY7D1oBfYNt9NsG/MeDL7/gu+FBt/3lru9/z1qJSwnHv+5dOr9gj4LfA4ExNYjZj62PoF61Hl9vfsz+nmS6D3g3U+1xxyK8s5v4s+1YJ2j9+ddM7qr/8vBsmvt6xev7cs6evP+c+oI3Gcx92jcz4vAtfLOhf93Rk/DPycCda11nETlN6XweY69F2OvZ11tsmVE0tEmvhcSvS9c7rkwvYvNmGx2f//v7LV2ODEEkIATZIJIf3Z+Ifn0cPgfNaDlMj2P/mlEJ4JH7UCzBJAAAC1CnQ8C/GIeCgDNpaH3ZIPv3RYt8cOeWG2jvWjpeP/Fx/sPAJoGASQAANogAkgA0HY1+KEYASScJDzAjo8H2DgZeP/Fx/sPAJoGYyABAAAAAAAAAAAggB5IAAC0QfRAAgAAAAAAwImgBxIAAAAAAAAAAAACCCABAAAAAAAAAAAggAASAAAAAAAAAAAAAgggAQAAAAAAAAAAIIAAEgAAAAAAAAAAAAIIIAEAAABoe97Zp3P+7UPnVa7ccrusBSt/rdzW177++LFd04TKK3Vjs5yPauU+4Ku78/rNO3YVAAAAgFaLABIAAACANqZauS8f0eRJ3bXnl701pbdd3EKZ4NHIZR31wi/7OPX1Xi/os+YJIjU5Ezz6RLekd43Ufc8vu0qLfEGkRgSu3EDaA5VqyhhXc5QJAAAAtAcEkAAAAAC0ScPSk+1Uy/bB3mPSiCSNtvPG6EmdNXlLjTbY+SbRu6cWNnlA7Yg+KJfuGnmmnTfO1PRJHXT/ptYQAAMAAACQSIeSkpJjqampdhYAALQFoVBImZmZdg4A2heTTu2WcHeTEV2151/OtL18jnnLenfWpjt7yo2jmFR3L0t36YjuL++gR+70Aiwb/vihvrXF3TpShuEuVyfdtcXZ3l0S3cdlets8cFhOkY7gukRlRnsgnRUIIgV9rN/822f2mE59Z/TRj843U14PoA/SO+l+p0467zRN3n5Mk2OPK+d4kz536lbjW5eozHCd4pyvWnw9kGx7gvzHCJ8Pu0+kS1Anr+3mWixy2mC4xzxdq53tXh7ZXQuv8IKBkbaYYwXOdbD+EbXKdNrh36+3cy2da19ojjHiUKA8T8z1BQAAANoReiABAAAAaFOm3Nldj/SWl8LOBBre2RdIEfdC+mGN9KeHKz/ibGzWeYECEzz5VllnbXK3d8oq+0w3vlZtN3Zs+Vz977RljTimWxaF06N9rN88cFjDZnjr9szoqFse2Of2IqqrzN5XnOHU94i+FR5DqFa6NRNw+UyFbko+Z/87O6twUTAl3P1lp3ll/6CbJvc+ppe3hOv7sVZvie0hZNRRZn3nKyBZU2aY3lKfRcY/CpwrnakfOWVP9gViyl874Et5Z67VET1q9jn/LG2a1KGegFWYU/9FvnPt1t871wG1yvT2U7jdk51zFz6Pbg8tW57zemGEs2xEF4JHAAAAaLcIIAEAAABo0zZsMuMhdY307qmdHq6TxkV6rlRr9aZjumtyOICRrCmTO+nlTYeiQR1fUKH/2R28CeOdGt3vL+v8s7TH7VVUX5nOvA1ImUDI5PLDGmmCMeGgTfkhvVzeSTfZXjgm0HHTCH+QSJo88vRI2eNGdoiWXf65Cnt31vTYnjlumR00eUS0zHB6u/rPVwxf4MUEa15e9okbSIqMgRSj9xW9fb2VvBR4x+v+l22wza1DXT24rNh2n9/VDTbGigT84vaqAgAAANoHAkgAAAAA2rBqlZYpEtTweviYNGWfqzRu4MILaNy/yG5rXiYFWvlRfWC3SKS87HOp92nqb+ejGlFmOBhjAklbPvOCMM52LztlRHooOS+TCu/lvTY1W4zeV3TRXeU1Wu0cs3xLjRQJLvm4ZXZURq0VdZ8vk0Iuujy2p5QNDtlA0v3xegQZJq1cuIx/O6jCOAGc+pmgW3c9IhtsS1CfWhK22yfcA6veXlAAAABA20YACQAAAEAblqyMdJvOzvaS8V6JxrXppP7OcjOeTnD7+nu39E7vGD8oVGeZZoygYDo6V++OGmYnTVBqsjtOUMz+CXvHnKlxbg+lj7V6k9P2cG8bP7fMeEG0us/X6H/xLTMBFhMMihdIMucirmrlvmx6OIXLP8Opx/Hy9dz6ZVfdVX5YPw2kz4sjYbstMz7Sos/1yJ0N6M0EAAAAtHEEkAAAAAC0aaNHdtLLyz6L9IYx6cnO+bcEvWNsCrhIajSH2+umIb1bzk/SXTqi1eHUbSYY4QaH6irTC/aEx0oKK3/tYDQdXu/TNTk8TpDLBJ1ixxoKCrf5lvQEY/i4ZQbHSjJlmh5PjTpfps0m5V5gjCQvSKQRSQmDMJHeU+84dUx4Yr1gVjTVnzeekydaX48XpBuWHidY5hfb7sDxw2NYJQouAgAAAO1Lh5KSkmOpqal2FgAAtAWhUEiZmZl2DgDam2rlPvCJXh7ZXQvtuEEmCDJy2TF32vkzSI/caYMEpgfNIumFmB5GJsBj0sR5TO8fb727XF0jvX/ccjclaVM43ZkJGrkp3zym19GP7PhDico0gvVz9O4cLdNlAiaf6X47pxHhOtRuq8fbXr7je3Wr0eRw22PKNL2C6j1fcXl18AeC/GX517vnQ+ac2wCS044X9Fn0nEbOnz0/gfPZSY9M+ly37O0Ss60Vc04+mGzbXleZIzrrkTJn2jl/v9CB4DWw/NcQAAAAaE8IIAEA0AYRQAIAoCFigk0AAAAAIkhhBwAAAABoJ0yvK9+YU24Kuw5u+jsAAAAAQfRAAgCgDaIHEgAA8cWmCyRFHQAAABAfASQAANogAkgAAAAAAAA4EaSwAwAAAAAAAAAAQAABJAAAAAAAAAAAAASQwg4AgDaIFHYA2rPKyko7BQAAAADtR8+ePe1U06AHEgAAAAAAAAAAAAIIIAEAAAAAAAAAACCAABIAAAAAAAAAAAACCCABAAAAAAAAAAAggAASAAAAAAAAAAAAAgggAQAAAAAAAAAAIIAAEgAAAIA2r2rTIj38ZK62ltsFAAAAAIA6EUACAAAA0MZVafvaXrrq+ika3tsuAgAAAADUiQASAAAAAAAAAAAAAgggAQAAAGjDalRdVaZ/dE9TL7sEAAAAAFA/AkgAAAAA2q7qUm3IX6uqPulKtosAAAAAAPUjgAQAAACg7UrO0vhvT9GQnVtVahcBAAAAAOpHAAkAAAAAAAAAAAABBJAAAAAAtAM19icAAAAAoCEIIAEAAABo41J03qU1ypu3WBvL7SIAAAAAQJ06lJSUHEtNTbWzAACgLQiFQsrMzLRzANC+VFZW2ikAAAAAaD969uxpp5oGPZAAAAAAAAAAAAAQQAAJAAAAAAAAAAAAAQSQAAAAAAAAAAAAEEAACQAAAAAAAAAAAAEEkAAAAAAAAAAAABBAAAkAAAAAjkuVCn43XwWVxcqbmadiu7TxQk45P1NeuIDPq1RaHFKNnTWq9xYrVG1nmkJxnn72uwLnyMeradpe/OJMp93hsuzC5nbCbY+v+MWT2IbjdRxtr960WLnb/XdjAtUhFZdURe/bygLNf7Fxd0b1njI15javKt2usgN2xjDHdNpX5bRzZiOPDQAAgNoIIAEAAADAcUtR19OTlJRhZ49LmoZdc7PGD7SzHQ/r/Rdf1buRJ+khbViyVfuS7GxTGDheN18zzDnyiWiKtmcoKSlJXbvb2ZOhSdreSjW67c69t6GXRp9X181XrdLXFujhP/xJ//OX7Y0KAAVVqfCFDSqzc3Uq36iceQu0+LkHteFDuyyse1f3njqh2xIAAACujrfffvv/7tq1q50FAABtQXV1tXr27GnnAKB9OXjwoJ1qbh3VPa2P0pxX34E9lNKju8xj9urCXM3PydPKVWu0ZU93ZQ/toy6mx87zb2nPB6/p2edeVcH+vhqdneqUUKr8Xy3Qn1cuU/XASXIWObop+Ui+3vpkuIaf7ZRYvlHPVw/WVUOclQe2K29Rnt5+d41W/f10W7bX++XtvR9q+Yt/1vJ1B9R/1GCldKxR6Rt/1KKXXnPqskVHB1ykAWdKVZtzNO+pV5W/s5suuqifu79RU5qvPz75gl5btVI7PuuvoZkp6lhZoNw3d6l4+fP687ICVfYd7dSxo7N1/LYb1YWLdddj+zR8/CDVFxfqkpKpvr17KS29j1J7pKiLKVohbX1usZa8vFwrN5Sr+9Ah6tPZqd/elXr2T6u0eesybaxy6jfQqZ+Klffbt/VhaLme//NyFVT31+isZO147gFt6DZeg1Oc4g5s1KLFZcoema7quG031+ZN7So2ZfivTY2KX/l/emHTLq1Z/JCeLTik3sOHqk/4hMWoLHpb1RkXqd9pTp1+94IOZF2g9OT418AEXba/9ITy1m3XmtXbdfpgp1znz/Kqgvl6dudBbX/xeb381+3q+kWnjE9W6uEc53w69TfnuPSVh7W28xinbTUqW/Wsnlq1WYWvblRlv6EalOKewLgadd3tOpWu0rKkUboso5uz4Xbl3r9B3cY795azqnrTIi3em60L0pOV1GeIxn+xs/72jjQ8XPbBPXp72/v66K2/KDdw78Rp++GtyvltjlaW/F073tmktW/tca67d67L3lygx1907gXn/bTjsL3unVI06JIx6rPvBVWk/ZN93zg6dVef9DT16t1XmT1TlHJGXYEvAACAtqepYz0EkAAAaIMIIAFoz05mAKmLGzgJ//SYh+ljLhmncWPHKXXHsyrtc5H6dT2kPfnLdfjymzT1ijHqsSVHJWZ5cooGOdudn1ys0uSLIg/Cu3WpUf5b1Ro+vI/2b3pJ1ZkmuFSj7Xmvq8u3r9ekC519Q0u0VmOU3cMEL57SppTpmnX1Fco++qo2HjFllWhlbgddcet1mjQuHLiQupw91KlbXx1cX60B4Yf9Ndv1wlMH9dVbpuqKsRery4Y/qihtjAZ03KP8VR115U3X6IohHfT6phpd9AVTyfhtN5K69VB6xiANOKtLNBCRQNIZXtAo/NNV+Y6e3T5IM/+/qzXhEi94ZIJKa5aU6vwbp2j8yIt12ppntDdjtNK7VKpoySad+d1ZuuaKbHV4ZaNqLh6i7G7VeulvnTVmcIqqty1TUfpXNPrspPhtl7k2b6rjlTc5ZZynDq9tdsrIVmrlei0pHq4brxmvizKO6u3u4/SdIYlDYm4AqU8fbct5U2dPnaFRvczS+NegZnueXu96ta6fNFoX9S/XknUdNSY7RYf2vK3XK0dq5r98XeN771Lu7r7O+e6nzpuXq+LcC5TeuVRr86QhkwYrpXyNnnr/fM2aMl4XjD5Na3L3asCo9EhgKFajrns3d6W2v/G20saNcY7rzHbspW6fvqR33eBVtQpfKdI5XxmtPs7FT+rs/M8EjGIDSBuS9NUbTdk9tHlpiXpf7LQlXtuHZWvo2PPV7T3poh99X1eNjQbqug8cbd9P2Tr0krm+zrXplKSk08w5jwkgdeziBY3CPwEAANqZpo71kMIOAAAAAJpQzd4C5T6xSIueXKRlf7MLXRnq6wYVkpSeIYWq3IXx9T5Poyq2q7QmpO1bMjQkyyysUNn2Uq19zpb9rnOsT8IJwwZrRLaXmCztS7N1lbt9li771mEt+/WDWvTiRpXVlVvsQJXKMvra1Gamfkmq+sSdkfr18pbXFw0KOyNDw89NCQSVGqXnKE3J2q5Fv5qvxW9sV9XnzrJDZSp9b6tecdq96MnFWlt5RJ+Gx745d4TOc89rmi65+Sqn1Y6MURpVvFXFJgXbmhSNHppsltYhQ73cMjpH/0o+o5fSS7Zqa0VIWwt36rxz3A3qUKN3X12qioum2uCREf8aVOxxru2aHPc6LvqLcyEP7Y+kfhv2xSzv3J07RbMvMX19kjXsSylav61KNds3qPjCUW56tpoPS/Xetle8MpauVVXNp41PH1fXdT9QqA0arWFn2HlHxqhRKt5SLJVv0NqU0RpS32nt55Rt7pukdA04LaSPncm62h5P9Y5lWvx7c91z9dc9diEAAABOCgJIAAAAANBkqlSYt1NZ35uhGdfP0GXn2cUBNXq/WEoL95qIK03nXVimrWu2692MIV5QRMlK6Z2hS6d5Zc/44W2aMqzuJ/jJ507SjB//RFPPfV+L8kvt0jhO76qUiqrIg/yqyk5KOd5xiQ6FVLyn0aEMnySlXzJVs388W1eevlxLN1U59evhtP1CfdO023nNunm2xp9jN4/LOX8jSrX1jXf0btZo1TmETyJlO1U2bIh6VOxTj4m3a0qgkGoVF2xU2SE760rSkMkzNHjDb5VXXGOXxb8GySlnKePL13nX8fuzddvVw52rm1hS1hClrd+oVX+r1pgLbLine4rSR3/TK+P6WZr9w/FKd9cY8eoXRx3XPbR5vXpdeF4wENj7PA0r3ar8wneVFbuuLofe184ufd361d326HnzlGr5C5112Q9MG6doTJ3XvG41pQVaWXwi9yUAAED7Qwo7AADaIFLYAWjPTl4Ku3i6qKYyX395q0ylGzfqw6Nd1CN7uJfCbv2ftLLkc+3e9Lr2fOFafT2rm1Rlxn55UssLS/X3v2/Vpj3ddf753rhGJo3dm4+t0qBvf8sby8dZ2qvXPuU9/pqKP6lQ0dpC1WR6Kd4i4+/4/7QzZT/1pnZ/uFuF71boC2PGumPklL7xoBbkFurvJe/o3Xc26HDfcRqU1kfpny/Twrwilb/7pranTNbkYSnqaNKQlSZ7aev803Wofvd53Z/zcYPGQIqrJF/z8zbro39sV+H7XTR67Gj16Zqinl3e1pKnN6usskyFaz5Q9xGDlKJKFQXS0UV1S+2ojQve1uBrJ9mUbIrf9h7m2pQq+aJspZp0duHppGoV/eFlvXvoQ21fvV5FR/toSIYdH6hio5bc/7o6XDJeg2xaOvcaDJygS8Zkq/KFZ7Wn/wXqVxP/GnQ5K0378h7Ta+8dUMWOAhUeHKwhZye5Kez8qQwjTPq4j5boyZrLdN2IXl4devRU8tolWrK5TFX/KNSq3d11wUD3Rolbv0Zdd5Vq1atJGjVxgOyps7opLWmjHl03WFMnhddVaeuSeXpyxVaVvl+koncqvPGLOnykbXn5Kvp0tzbn79GQKZOUkezcxQnabu7v7p0KtOj5IlWX7FJ1v2z16ZKkgyXP6tXtzv2+rlhVnyepn7k2zj3y4GPPqLCoQjt2FGnT4f4al2nbnsCB7S/o/lUdNSHOvQIAANBWNHWsp0NJScmx1NS6/wAAAACtSygUUmZmpp0DgPalsrLSTrUkVSr43Ur1+qFNsYYWr2r9AuWd/n1dN8wEN0JaOa9AfW9tH9evZnuucj6bpOtG1pejrvWodtq08IPRuu0KkwAQAACgbWrqLxOTwg4AAAAAgBjJvc9VzauLlfPKMuU8sUilF17ijj3UHiSdN6VtBY+KV6qgarRuJHgEAADQKPRAAgCgDaIHEoD2rGX2QAIAAACA5kUPJAAAAAAAAAAAADQrAkgAAAAAAAAAAAAIIIAEAAAAAAAAAACAAAJIAAAAAAAAAAAACCCABAAAAAAAAAAAgAACSAAAAAAAAAAAAAgggAQAAAAAx6VKBb+br4LKYuXNzFOxXVr8ollmZ2KE3pqvn70Y3rKBivM0v6DKm64s0PzfFajKWTazseWExS2jRlUl27V9e/hVpmq7JrEalb6xSA/+6kE9+NuVKrNLW4yqUtuWYoUO2WVN5XPnmt/jXOcKO99o8e8dAAAAoCUhgAQAAAAAxy1FXU9PUlKGna1H2ohrdfOXs+zccereVUlJSWrgIeNLVMbetXpxe/2hI1dVoV75aIxu//FP9JObxyvdLm4Jakry9PCL70s9zlJKUo2O1NgVTaVjlsbPvlbDetn549K4ewcAAAA42Trefvvt/7tr1652FgAAtAXV1dXq2bOnnQOA9uXgwYN2qrl1VPe0PkpzXn0H9lBKj+5KcpZWFr2t6oyL1O+0YuX97gUdyLpA6clS6RsPasELb2rZpwM06QupXhGVBcp9c5eKlz+vPy8rUGXf0cpO7eisqNb2F5/Q4leXq6CwWB0HjNNF/bpInbqrT3qaevXuq8yeKUo5wxzRU/bGf+k/16Zo/LA+bj0SiltGR3Xp0Uu9Pt+tLR0v0Ncv6OWVkaB+Nc6/M4c+fl8b/5Gk4QO6q6ZGSkpy6n1gu/IW5entd9do1d9PV/bQPnJq7fbKenvvh1r+4p+1fN0B9R81WCkda1S26lk9tWqzCl/dqMp+QzUoxSmjOE+Lt37otP95vfDGdnUb4p0/HSpV/h8X6Jnla7R+w2H1vWSQUhS/jAM7V2hXr8v11exUdU/tpe6dTWMcjajf4bce1tP7huuCdHMmSrXs12vVeYxT76qtyvntk3p1xQ51G+Vc58if0yFtffoPevKVFU799ij5vKHq0yVBGxPcOwAAAMCJaOpYDwEkAADaIAJIANqzkxlA6uI++A//9LgBpD59tC3nTZ09dYZG2V4qKZnjNO6L3VRcmqyLwgGkg3uUv6qjrrzpGl0xpINe31TjrqvenKNlZ07TTf80QWPOrtbb1QO8AFLHLl7AJ/zTp0vPvup39gD1S6knFFFHGU7lo8cy4tavi0oKVmpz0U4VlVbq6KcVKt3XUX0zuqs473V1+fb1mnThReoXWqK1GqPsHqbYp7QpZbpmXX2Fso++qo1HLlL2kTV66v3zNWvKeF0w+jStyd2rAaPS1cWpQ25ppmb969f1lbN3KfcffZ36dNT2F/6fPhx7l74/eZzGuMEjR3n8MlLO6qtPVizU81uOKDm9j/p0M+2s0fZG1G/k4M7avLxC2SPTlVS6Vi+dNkSTspyjdumjoWPHqW+1DRTaP6dDq36vv6Zdr5u/c5lTPxM8chYmqF+XBPcOAAAAcCKaOtZDCjsAAAAAaFI1evfVpaq4aGokeFSnfr2UZn6ajilW2QdVGtLfdLtpuKSeWRqe0bh9GqRW/ZKVdekkTZo4QmlZYzTpa870pVnO0gqVbS/V2ucWadGTi7TsXedMfBJOhzdYI7LdUpT2pdm6KstZ92Gp3tv2irvtoqVrVVXzaWTcpbR02wPqtHBopVpVHw7TkIF21kpYxunpGn/9v+u2q7P02eu/1YL1ZgypxtVPZwzTmJT1Kqys0fb1xRo1ou5ccx9XJmmICTD51NVGAAAAoKUjgAQAAAAATSpJQybP0OANv1Ve8fENvnPmmZ/qH5XedKi81Juoz4EyFVc09WA/jZGslN4ZunTaDM243nn98DZNGZY4oJXUPUXpo7/pbXv9LM3+YV3jKCWpa/di/aPczlr1lnFGhi756hhVlVU4M42rnzlm1hfTtGHjKr1bPUaje9vFCSR1qdL7ZcHz37g2WhXbtXJzSKfySgIAAAAGKewAAGiDSGEHoD07eSns4nNT2A2coEvGZKvyhWe1p/8F6pdcpa1L5unJFVtVumOHtm4uU/ehQ9Xn2B69HU5pdzA63S2lqwqeXqainZtVeUaGOnbqp6HhtHIJlK18RL/e2rP+MZDisvVb9XeVvvuutm48rP5jBynFVyd//Vyx8+qiXr32Ke/x11T8SYWK1haqJnOI+nS258SX7s3Vo6eS1y7REudcVP2jUKt2d9cFA1OCafQi093Vp29n5S/K0bZQmQr/ulc9Rjv1S1CGO97Um3t0YOfbyi04pElfH68+yY2sn6Njr276aMmTqpl4nYan2S5YJfl68LFnVFhUqnfe3aoNB/trXGaKUvr11e5nntDy0gMq3rBRnw90ru/ZCdpYh5qytXryV3s04J+yFT6zAAAAQEM0daynQ0lJybHUVH4tBQCgLQmFQsrMzLRzANC+VFbarjtAa1SxUgteSdE///NwNUNCQgAAALRhTf1lYlLYAQAAAADQEpRv1bLtKZpyLcEjAAAAnHoEkAAAAAAAaAl6D9ekS4cr7XQ7DwAAAJxCBJAAAAAAAAAAAAAQQAAJAAAAAAAAAAAAAQSQAAAAAAAAAAAAEEAACQAAAAAAAAAAAAEEkAAAAAAAAAAAABBAAAkAAAAAjkuVCn43XwWVxcqbmadiu7TRDoVUVlljZxqu+EVzbDuTSGPKLs7TzBeLVVUwX/MLquzC41BTqmXzFihnVbGq7SIAAAAArQ8BJAAAAAA4binqenqSkjLs7PHYXRcXy5QAAEobSURBVKDcomYKtTSy7IwuTltOT7Fzx6l0q4ovnqapl2Yp2S4CAAAA0Pp0KCkpOZaammpnAQBAWxAKhZSZmWnnAKB9qaysr1tOU6lRVUmFkgb2Uo37M13JKlbeExtU81Gxai4Yo5qCtUq+9t805dwk1exdqdwXSlWT9A/VDJqm6yZkqOyNB7W0YJ/KDp6ldBO3uWCafnJ5hlS9VbmP5Wvn4cNS8gBNmj5Vw3s66w9sV96flqn4E+lwVZK+/L9m6xJnedmbC5SztUpm87TRdZddXZirhSt2utt2y5ik664dLjdkdKBMpTW9lJFU4f3smWSWOsqUP/dB/eOb/1fXDWtASKg4T/Mrxmv2JScYiAIAAADQKD17mj8amg4BJAAA2iACSADas5MXQIqnWHm//ocu+efOevrFJM34do2WF52nqy45rJW/36gBP5ikDNVo65+e0JFvzNIoE2OpL+CyI1fzP7rMWZ+krf+ToyPfnqFRZ3gp7Cou9QJIUSEV/LZAvW6+Sllmtp6yi593yvhKbBmxalS1411VpQ9XhnPcOh2qVtnb/6MN6bN0lVsBAAAAACdLUweQSGEHAAAAAE0pq6/SOjo/e6VEU7gdKlPpe1v1ypOLtOjJxVpbeUSfHrDr4nG2L3jObOu8Xt1uFzplVA1R/zhBnOody7T492b7XP11j12YQM3eAuU+4ZW97G92YZ2SlHJuA4JHjtC7K7VyRzdl8B1FAAAAoNUjgAQAAAAAze30HkrpfaG+ef0MzXBes26erfHn2HXG0cN2wlNV+KJ2nnudu+2My8+zS89UysF/6GN3OqSK3e6Eo1TLX+isy35gyp6iMf5yjUDZVSrM26ms73n1uCxcdD2q9xQrdMjO1CHtgkma+uUUrSqqsksAAAAAtFYEkAAAAACg2WXokgkh5cxbrNxXlinnyXyV2jXKGK6sgj9p0fO5WvymtzSl97mqeMPZ9ulFWry52hujSGkaflmV8p7I0eInNurwoF7uUqmXMtLXOts6+z/hlOv/K69W2SnqNahC+UtzlfPEYm2tbsg4RWUq+MNv9WpRtZ0HAAAA0B4wBhIAAG0QYyABaM9O7RhIcMddKr9Es7+UZhcAAAAAOBkYAwkAAAAA0HJlDNeQwkVa8Fqx6LMEAAAAtF70QAIAoA2iBxKA9oweSAAAAADaI3ogAQAAAAAAAAAAoFkRQAIAAAAAAAAAAEAAASQAAAAAAAAAAAAEEEACAAAAAAAAAABAAAEkAAAAAAAAAAAABBBAAgAAAAAAAAAAQAABJAAAAABocapU8Lv5KqgsVt7MPBXbpc2rRqE9Vc7/G+hAmcoO2Gm/4jz97HcFCtnZJuOUO/PFYlUVzNf8giq78DjUlGrZvAXKWVWsarsIAAAAQG0EkAAAAACgRUpR19OTlJRhZ5tdqQpe2N7goErVtlxt+NDO+A0cr5uvGaY0O9uUMro45+P0FDt3nEq3qvjiaZp6aZaS7SIAAAAAtXUoKSk5lpqaamcBAEBbEAqFlJmZaecAoH2prKy0U61ZjapKKpQ0sJdq3J/pbrCjujBXC1fs1OHDUreMSbru2uFKMb2Vnl+rw0nva+Pf9klfnKrZ38hSUmWBcgsOK6l0o96tkIZMm62rspKkiq3KfS5fOz857JZxrVNGWkm+Hnx2rfbt+VRnnWMCNBdq2o8nKkNlWrkgRxudbQ8rTRdee50mZlRr65IFWva3Mu3rkq6zTpcGXD5LUy9warI5RwveKNa+5Mv07z+8xKmbp6Y0X4v/vFUhp+JpF0zTdZdnJK6fQtr6XK7yd1XpcOfBmvT9KRp+hrP4QJlKa3opI6nC+9nTbGuUKX/ug/rHN/+vrhvWgJBQcZ7mV4zX7EtOMBAFAAAAtDA9e/a0U02DABIAAG0QASQA7VnbCCDVr/j5+ar4ymxd0tOku1ssXetM9wqp4LcF6nXzVcqqLND8Jc7iH16itApn+q1emv3NM7Xyt/lK+8FUnXe68+/Fmw8rP222pn7RBGOKlfe7Co33BX4CImVkubMmldzKXibo4876xJRTs125C0K69ObxSlONtj89X6Gv3KbxSfHq5xRm6v2XrprxveEN7CFUo6od76oqfbgyTKCpLoeqVfb2/2hD+qw49QYAAABat6YOIJHCDgAAAABaiZq9Bcp9YpEWPblIy/5mF7oy1KuX+dk5+Fdev15eKrmO7pzjY1WdNkDpp3tzaf2yVPFJXUnrqlX8ymItcI636Pm/qtQubZQDVSrL6GtT2iUpPSNJVZ+4M3Hq5+g5SlOytmvRr+Zr8RvbVfW5XZ5QklLObUDwyBF6d6VW7uimDL5DCQAAANSLABIAAAAAtApVKszbqazvzdCM62fosvPs4kZJUtKh/U5JnuqPq5Se4uvnc1Q6bCddJcv1XJfLNMs53oxvj1HscEw1zvb1Or2rUiqqImMrVVV2Ukp3OxNXktIvmarZP56tK09frqWbwrVNrHpPsUKH7Ewd0i6YpKlfTtGqovrLBAAAANo7AkgAAAAA0CqkqNegCuUvzVXOE4u1tfp4xvDJ0GXfrFHuvMXKfXqRFu0cpkvPC48llKHhWWv1pydzlfs/K73eRr0y1HeNc7znF2tRfrD/UUr2GFU9PV85r+Qob7MXkCl940E9+Kscrdrxohb86kHllzgLk4frymFb9dsFOU6587U86Wsa09vdPL6SfM3/n1wteyVXr+7opTFZ9bWzTAV/+K1eLaqrJxUAAACAxmIMJAAA2iDGQALQnrWXMZBwnIrzNL/8Es3+kpdUDwAAAGgrGAMJAAAAAIDjlTFcQwoXacFrxZG0egAAAABqowcSAABtED2QALRn9EACAAAA0B7RAwkAAAAAAAAAAADNigASAAAAAAAAAAAAAgggAQAAAAAAAAAAIIAAEgAAAAAAAAAAAAIIIAEAAAAAAAAAACCAABIAAAAAAAAAAAACCCABAAAAQHtSWaD5vytQVXGeZr5YbBfWJaSC3/1MeYk2dcqZX1BlZ06FeuoXo3pPmarttF/orfn6WYPORx2cc/Ez59yG7GxDVG9arNztNXbO0egySrXsnvkqqLCzjVaj0J4q5/9+Vc45dcqsLFbezDwd71kpfnGmc13CZdmFrhqVvrFID/7qQT3425Uqs0vjKX3D2ebBn+mOE7k29l6vKph/iu9VAACA1oUAEgAAAAC0N927KikpSRl2tm5pGnbNzRo/0M62OI2pX5UKX9gQN2CRNuJa3fzlLDt3nAaO183XDHNq1FAhbdjQS6PPS7LzjkaXkaExs6/VsF52ttFKVfDC9jhBtRR1PT1JSQ27SRLIcO+zrt3tbFhVoV75aIxu//FP9JObxyvdLo4n43Jnmx98U4Pt/PHK6OK05fQUOwcAAICG6FBSUnIsNTXVzgIAgLYgFAopMzPTzgFA+1JZGejqgFg1VSrdl6SMs2q8n2cne4tL87X4z1sVOnxY3S78Z83+inmsX6r8Xy3V+qoyDfnXh3RVJL5Sre0v/o+WlVRJB6uU9OWfaPYlKdKB7cr701pVJVer6vRLdd21w+U+sj/klPMnp5yPpM6dL9SUWycqQ2VauSBHGz85rMNK04XXXqeJGUleD6m/VGvYaVu18YPDSvnqLM0YmejBf6L6hbT1uVzl76rS4c6DNen7UzT8863K+f0ybd2zT93OOUudNECTfjBVw52iTS+XpRurVHbe9/XQN8OFmF4yi5X7t5AOH/bVL4GqzTla8Eax9iVfpn//4SVeu+sro3SZFpSO0qxLvXBR/DKqVPD8Wh1Oel8b/7ZP+uJUzf5GlpKqvPa8b7bY003f/I/ZuqSn2d65Ni/9j9ZWdlP1J0m6dLrTRne5c06eflrLSj91rsEATbzeWV6ZrwefXat9ez7VWeeYo12oaT8216ZGVSUVShrYSzXuz3R5d4lTeuFi/ezFvvrJnIl1Bn6M6r2lqjkrQ0n7vJ8pTtNrqqtVU7lBiwpSNONrJiyUpOTkJKfcXC1csdM5T1K3jEnRe8cw98SqXpoduTZxru8ZzuJE99+BMpXW9FJGUoX3s2fi6wgAANCa9ezp/uLXZAggAQDQBhFAAtCeEUA6PsUvPqjSUT/RxHPsAh+T+mtlr9mRAE315sXKOTZFM0Ymu+nB5leM1+xLkrX9uRxVX3mdRp0hla14WBsG3qarBtY4y3+pnaP+3Zn29q+lokDz37IBAjfFXkiTfnyVsk4vVu7vKnRZJJgSX2z9vCBUV8343vBI4MNj0qmtVK8fOmXbJRExQYqa7bla+OGlmv2VNOnQduU4dZp4+/h6egYVK8+p73hb37rLMOcler6igmV4dV4sXTtbl/QKqeC3Bep1s7/+dv10L4Bkjpnz2SRdZ67N3nw9vCFLt30jQ6FVD+ulLjM048LYMxl7vHocKNXWshQNOTdFjQ/DVKt41UrtLH9fa0u7acx5vaQzBmv8pVmB61T8/HxVfCUcEHPEBpDiXt/g+Yzef3Y1AABAO9DUASRS2AEAAAAAlDXxah1+9UE9+ESeNu6JN0pQVNkHVRrSPxiakSpUtr1Ua59bpEVPLtKyd6WaT0w51ar6cJiG1HqQX63iVxZrgbPtouf/qlK71DVsiLJONxNZmtLQwIZfz1GakrVdi341X4vf2K6qz+3yRqiuKlNGPxsuOj1dA06v0sfeXIPVWcaBQm3QaA0LBI8SyVAvN0Vd53r/iq/Ys12la3Lca7DoL85FOLTfOdPSx5VJGpLV6DNZ2xkZGn5cwSMjWVmXTtKkiSOUljVGk77mTNvgUc3eAuU+Ye+dv3lbJxT3+ia6/wAAAHC8CCABAAAAAKQzsjTp+z/RT6YN1vt/WB4M6MQ488xP9Q/b0StUHt4yWSm9M3TptBmacb3z+uFtmjLMhAbMGDjF+ke5t1VEyXI91+UyzTLbfnuMGjLUTs3ejSrY0ZCgQJLSL5mq2T+erStPX66lm6rscqPG/qybGS+n4uPwsaq0PylFZ9o5N/hVsFFlh+xsAnWVEdq8Xr0uPO84AzGJJaecpYwvX+ddg+/P1m1Xe710krpU6f2yBG0/Kh22k/U6FFJxPQHGxqtSYd5OZX3Pu3cuO88uTije9U10/wEAAOB4dbz99tv/d9euXe0sAABoC6qrq5u82zIAtBYHDx60U2i4Km1dskRv7v5Qu9/ZrorzxmjswBR1NOPs/PZJLS8s1d//vlWb9nTX+ef3UWpKVxU8vUxFOzer8owMdezUT0P7pahXr33Ke/w1FX9SoaK1harJHKI+nZPUp29n5S/K0bZQmQr/ulc9Rg9SSqeDKnnmVW3fV6S3i6tU45Rx0RdSnQu4R2+XJnvTMUpef1BPvJ+h8cP6eIGXBPXrUpKv+Xmb9dE/tqvw/S4aPXa0+rh/9nZR904FWvR8kapLdqm6X7b6dDFtn6cnV2xV6Y4d2rq5TN2HDlW/gek6+tpCvVhUru1/3a4eX5+soSkdTSFSxUYtuf91dbhkvAbZiJAZR2lBbqH+XvKO3n1ngw73Hafs8xKVUapVryZp1MQB6ubt7opXxqAeh7RnfamSL8pWqvzTYWZZoXT+RerntLHLWWnal/eYXnvvgCp2FKjw4GANOTtJKf36avczT2h56QEVb9iozwcOddpu9u+uLh/l6uk1ZSorrFC34QPq7PFV/e7zuj/nYw0fP8jZ8zjVusZdVFOZr7+8VabSjRv14dEu6pE93GlP/GvTZ2+869slwf1nDwEAANAONHWshzGQAABogxgDCUB7xhhIaOkC4xSdMDMuUr5SZk3VeU3dnQkAAACtSlN/mZgAEgAAbRABJADtGQEktAul+Xr4jZDS9b5qhs3QdRfasZYAAADQbhFAAgAA9SKABKA9I4AEAAAAoD1q6gDSafYnAAAAAAAAAAAA4CKABAAAAAAAAAAAgAACSAAAAAAAAAAAAAgggAQAAAAAAAAAAIAAAkgAAAAAAAAAAAAIIIAEAAAAAC1OSFufX6SHn9yoKrsEAAAAAE4mAkgAAAAA0OKkafi3Z+iqM9dqe6VdBAAAAAAnEQEkAAAAAGipOtqfAAAAAHCSEUACAAAAgBbqzJ6d9I+yKlXX2AUAAAAAcJIQQAIAAACAFqpHxgBVFSzXhtJquwQAAAAATg4CSAAAAADQQpUVvq8h356i8VnJdomjYrtWbg6JTkkAAAAAmhMBJAAAAABooWqOHrFTUTVVO7V8XoFK7TwAAAAANAcCSAAAAADQUn1uf/okpaQofWKG0u08AAAAADQHAkgAAAAA0OKEtPF/HlbewUt1Xk+7yCjfqmXbUzTl2uHyJbUDAAAAgCbXoaSk5FhqaqqdBQAAbUEoFFJmZqadA4D2pbKy0k4BAAAAQPvRs6f/22cnjh5IAAAAAAAAAAAACCCABAAAAAAAAAAAgAACSAAAAAAAAAAAAAgggAQAAAAAAAAAAIAAAkgAAAAAAAAAAAAIIIAEAAAAAAAAAACAgI633377/+7ataudBQAAbUF1dbV69uxp5wCgfTl48KCdas2qVPC7x7VncKo237JGHf8pW6l2TfOpUWjPAXU+s4s62iWekFOX/9Lm1MuUfTyVqKlS6Y4SlVVUqMK+arr0UvfOZuUJll2XA2UqO9zdHqcpNcW1SVRGtULFxfqgvFpJPVPUJfKVzwTLq0q1vaRMVUeTldotyS50ti4vVvEH5apO6qWULnZhQvHLblQZnzvXuMi5xlXHlJzaTV5NqlW2vVh77DWv7piilK4dVb13u4p3R++FigNJSulh7rlEbXdUh1S854i6u9vVJUEZceuXQMz9Gr1Xa5zTvUMlZVU6lpwq73QH21hR4Rw3xTmuU8niF2dqTacJqlxqrvNF6sdjFwAAcBI0dazH/ysZAAAAAKDFSFHX05OUlGFnm12pCl7Yrmo7F5WmYdfcrPED7ezx2rtWL26PLb2Jyo6jaluuNnxoZ5pcU1yb2mWUvZmnd2pSlJL0rhbPW6mQu7RGpS8t0qrqs5Se/L6WPmqXH9qqvPwyJfdIUdWbDyt3e427tfauVF5hjVJ6JOndPz2sleXe4vgSlN3IMra+vFxlyU69K/P18HPbnSVGmTa89G6c+8mo1rsvrXW2iIrf9mqVvrZAD//hT/qfv8S7N4MSnb/49UvgwHblrfPXzDDnaaGWV52ljF6fatUfljnvllghbXh6pd6PFJ6hpKQkde1uZwEAAFoheiABANAG0QMJQHvWNnogdVT3tD5Kc159B/ZQSo/uttdESFuf/oOefGWF1m/Yo+TzhqpPF9PbYb7e3vuhlr/4Zy1fd0D9Rw1WSscala16Vk+t2qzCVzeqst9QDUrpqOrCXM3PydPKVWu0ZU93ZQ/toy4l+XrwsWX6e8k7evedDVrz1mH1HztIKSpV/q8W6M8rl6l64CRfLyGnHs8t1pKXl2vllj3qPsipR7Kpx2JtCW3XC39+Qfl/66bzRqarW8cuSunVS70+360tHS/Q1y/oZ3t0xC+7+MVFen3ba3phy2Ed2bxES97rqzHnpero3pV69k+rtHnrMm2s6q+hA1Ocs1Sj0jf+qEUvvea0Z4uODrhIA86s0tYl85SzplR///tWbVq3RnuSz9fQs7vEb3tlgXKfXaOVy15XWU2V/vKnV9XhCxep32Fn+Zo9Kn39WeUuK1Bl39FOHU3/l0TXxvn3t3Cx7npsn4aPH6S64wbxy+g+0LlGvbqre2qajm4qUseLspVavU156/rqqkmD1b1LZ3288RntPnuCBp/VR9nn9VPKGd2V3qVK+ZV9dVE/52Y4Y4CGZvZS9zNSlXZ0o4o6XZS4d1eiss9pRBlOW/p8YYj6pTj17ttFVSsq1feifuqiShVt6agLvn6B+jnX3/Q+MpLOcO6FXsn6eEuF0r8+Vtm2V1HctjtnJanPEI3/Ymf97R1puFuuVb1Vi+96XPvOH69BZ3qL4peRqH5GmfLn/qfW9Biv4X3sVTy4R2+H0vX1cdlOPcO9j0q08rUeGntVtnok91Gv6pe0UWOcuiepu7m3ndexbS+pePh3dUV/r5wuKZnq27uX0tL7KLWH1ysJAACguTV1rIcAEgAAbRABJADtWVsJIHVxgwrhn57Qqt/rr2nX6+bvXKYxl3jBI6Oy6CltSpmuWVdfoeyjr2rjkYuUfWSNnnr/fM2aMl4XjD5Na3L3asCodHXvM8TZd5zGjR2n1B3PqrTPReqXPsiZ76uD7/XX1T+aqsvc4JGRokHOducnF6s0ORpECK36g9b3n6mZ/zRe4wZ8pCUrPtPoob1UVZSr9wfO0oyvf0V938/VHlN2+M/NyiK9XT3AC3K44pddWfSmNPZfNHjj20qedr0GvF+qlC+crg1LSnX+jVM0fuTFOm3NM9qbMVrpXUq0MreDrrj1Ok0aZ4JHpoQu6nO+V64uvl3f/8Y4N3hkmGBErbZrj1aWDtD3L5Nyys/XXV8+qM2Hs5XddY/y3zysr944VVeM7aHNS0vU++J+6pbg2hhJ3XooPWOQBpxVX6q1xGW4Krbo5Q96adzwPko6UKJVFam6KGWHns3ZqqT0Dqrp5Q/o1Kj4r69K2V/RYO+iWSFteWWPeo0Zrj6J0vjVW3YDyvCp2fGmXj0tW1/JMhU5oD1r39Tawre15m8H1WfwAKVEGnpIe9aXKtkN8MTwt92ZTers/N8EdWIDSEnd1aPvAA0akFo7OBNTRliwfkYXpZ7dT33691NKuH2HP9KulSv19oZV2rw3WecM7qNup32uD9e8roqBQ5V22n6Vbl6pqrMnRM9T+Ur9YVO2vntlv8jxks7wgkbhnwAAACcDKewAAAAAoJ36uDJJQyIPv/0Ga0R2mjuV9qXZuipLqvmwVO9te0WLnlykRUvXqqrmUzcFWM3eAuU+4Sxzli/7m7tLo31cKQ1It4/Ke/dVVkWVTS+Wpr697PLjfmiepb69zdP8Xko5w1uiQ2UqfW+rXjFteXKx1lYe0acHzIosXfatw1r26we16MWNKqsnx1mitvfNSHcf/Kf1jDm3/foqzbQjKV0DTgvpY29pYmdkaPi5KbUDQo1RXay8p0O6/NrhSraL9NYTmr+2qybNnKox6XaZFXprsVamf0+TAun0qlX84tMKTZyq4eFzWLVVOb96UA+a1xu+BGwJy25EGUZFgRav6qvvXRGuSIYm3jpbM34wW7MvrdLiV+pJHWfEa3tCycoYluULSlmJyqhVPyNJKecOV0a4fUbKcE29eZZm/PA2TUnfqD+9ZRLhpWn8rGuV9n6BCgr3ST2c91rk/g5p5dOluvTbDakzAABA60IACQAAAABaiaQuVXq/rN7H8K6k7ilKH/1Nzbh+hvOapdk/HK90Vakwb6eyvmeWzdBl59mNw45Kh+1kXZK61Gj/J3bmQJWq+qQ078Pz03sopfeF+qbblhmadfNsjT/HW5V87iTN+PFPNPXc97UoPxjUqHHaE1VP2+ty6H3tVJp62dmEDoVUvKe+kXrqYIIff3hXQ/71KmWdbpf17KWMsy7X1G8Pd9MSvl+crL59vFWhtxbpJX1DMy7xgoceE/hZpHfPm6GrsnzRFRMYcc7TT8zrchtESVh2I8owKgq06EXpG9df4pylOLp0U3JSUt2BtXhtr1ONQsVlNnBpJSqjjvpV7ylW6JCdiZFsxqhKsl2TktN03iUTNfGCJJXuzNJwO25X6M2nVTpuSjTIVp+K7Vq5OVR/MA0AAKAFIIUdAABtECnsALRnbSOFXXwp/fpq9zNPaHnpARVv2KjPB3pp7CqL3lZ1hi9dnNGjp5LXLtGSzWWq+kehVu3urgsG9lFNZb7+8laZSjdu1IdHu6hH9nC7X3d1+ShXT68pU1lhhboNH6AU0+Pkt09qeaEdT2hPd51/fh/16Zem7X/6o94uK9XG1R/pgslfUd/kYD1q1Sk2hV2CsquLiqQvDFC1m+LM+VlUoV5fGKr0Lm9rydObVVZZpsI1H6j7iEFe/Z56U7s/3K3Cdyv0hTFj3XGejC7dOqrgD7kq+nSXdh3op+yzU+K3XXtUtK+XspNLvfo5P4vkpbB7e+lK7arZrc35ezTk2q9rUD1Rsup3n9f9OR83YAykeKq08Q8/05/3SLsL12jNW+Gxm9LVM9lp+wvbVLYtXyXnXaOvD+wmleTpP369UUcPFmmts2143Cqtf0I/e3G3VLbVLWNNqXdeI6nfAlLjll3VqDJKlfezh7Tx6KcqWu/V+3DfcRrUYavyXtysXX9brtwtZ+k714xSSie7S60Udonafsgdz+rJFVtV+n6Rit6pUPehNnVj9TY9f//T+jgyBlKiMsrj16+H2adMqx75tTb38o2BVLJSi1dsV/H6l/T6vks0/WsD5Jxtlb7xoBbkrtf6Xadp3PRJGmTiSlUbtfiBNdp74B17Dfxlx1dTtlZP/mqPBvxTnPR9AAAAJ6ipYz0dSkpKjqWm8msLAABtSSgUUmZmpp0DgPalsrLSTgEnoLJA81f10uxvZtkFQBOoWKkFr6Ton/+ZlHcAAKDpNfWXiUlhBwAAAAAA0NzKt2rZ9hRNadAYTwAAAKcePZAAAGiD6IEEoD2jBxIAAACA9ogeSAAAAAAAAAAAAGhWBJAAAAAAAAAAAAAQQAAJAAAAAAAAAAAAAQSQAAAAAAAAAAAAEEAACQAAAAAAAAAAAAEEkAAAAACgxalSwe/mq6CyWHkz81Rsl7oOlSr/iQf14K8e1Pw3y+xCKfTWfP3sxcCWjVL6hlPmgz/THbXKCDl1+Znyjr/oRqqj7QAAAABOGgJIAAAAANAipajr6UlKyrCzVlXhKwpdcrt+8uOfaPZX0u1SKW3Etbr5y1l2rvEyLv+JfvKDb2qwnY9K07Brbtb4gXb2pIjfdgAAAAAnT4eSkpJjqampdhYAALQFoVBImZmZdg4A2pfKyko71ZrVqKqkQkkDe6nG/ZmuZGdZdbWz/O1FWps6Q5NMrCgpWclJXu+hpRurVHbe9/XQN20QqbJAuQWHlVS6Ue9WSEOmzdZVWUmqLszVwhU7dfiw1C1jkq67drhSvD3cfeav6qXZ4TJUqvxfLdX6qjIN+deHnP3t4pJ8PfjsenfyyL7tOuvqhzTrQqeUA9uV96e1qkquVtXpl0bKLn5xvnaemaH3t76rKg3XtJsmKSOpxqn3YuX+LeTUpZvGTJ+t8eeYEuO1HQAAAEB9evbsaaeaRsfbb7/9f3ft2tXOAgCAtqC6urrJf2kAgNbi4MGDdqo166guPborKfLTUV2itX/drKKSIpXuO6rqUKkqOvXVgNQkpWSO07gvdlNxabIu+oL9guDBPcpf1VFX3nSNrhjSQa9vqnHXJfUZojGXONuPHafUHc+qtM9F6hf+k9DZ521/GUrRIGe785OLVZp8kbLDi3sMcvcfl91JxRWj9a1vZKmbarQ973V1+fb1mnShU2ZoidZqjLJ7SJVFT2lTynTNuvoKZR99VRuPmLJKtDK3g6649TpNGneRBpxpy47XdgAAAAD1aupYDynsAAAAAKA1SM7S+K9N0mXD0pT1pUma5EyPz6qnb06/XkozPzu6c66avQXKfWKRFj25SMv+Zhcel5BWLt2pUdde4h1DFSrbXqq1z9my33WO9Um1u0YarBHZ3lZpXzI9ocxUli771mEt+/WDWvTiRpWFNwUAAADQIhBAAgAAAIB2o0qFeTuV9b0ZmnH9DF12nl3caDUqfvFpVX1tqoafYRcpWSm9M3TpNK/sGT+8TVOG1R3gSj53kmb8+Ceaeu77WpRfapcCAAAAaAlIYQcAQBtECjsA7VnbSGGX2KE9bwfTyalKW5fM05Mrtqp0xw5t3Vym7kOHqs8xXzq6SGq6dNVU5usvb5WpdONGfXi0i3pkD1e/rgnKOLRVOb99UssLS/X3v2/Vpj3ddf75fdSl5C/6v0vf1cF/bNaat9ZozWf9NS6zj3r12qe8x19T8ScVKlpbqJrMIerT2aSwe1vVGb5UeUaVU/ZTb2r3h7tV+G6FvjBmrAal+LpKAQAAAGiUpo71dCgpKTmWmhr5ywMAALQBoVBImZmZdg4A2pfKyko7BQAAAADtR1N/mZgUdgAAAAAAAAAAAAgggAQAAAAAAAAAAIAAAkgAAAAAAAAAAAAIIIAEAAAAAAAAAACAAAJIAAAAAAAAAAAACCCABAAAAAAAAAAAgAACSAAAAAAAAAAAAAgggAQAAAAAAAAAAIAAAkgAAAAAAAAAAAAIIIAEAAAAAAAAAACAAAJIAAAAAAAAAAAACCCABAAAAAAAAAAAgAACSAAAAAAAAAAAAAgggAQAAACgjatR2aYCFR+wsxHVKi7YqLJDdjbsQLEKNpU5ewEAAABA+0UACQAAAEDb9nmp1v5xqdbuqrYLrIpCLVuwTNsr7LwVKlymBX/ZrpjFAAAAANCudCgpKTmWmppqZwEAQFsQCoWUmZlp5wCgfamsrLRTAAAAANB+9OzZ0041DXogAQAAAAAAAAAAIIAAEgAAAAAAAAAAAAIIIAEAAAAAAAAAACCAABIAAAAAAAAAAAACCCABAAAAAAAAAAAggAASAAAAAAAAAAAAAgggAQAAAAAAAAAAIKBDSUnJsdTUVDsLAADaglAopMzMTDsHAAAAAAAANA49kAAAAAAAAAAAABBAAAkAAAAAAAAAAAABBJAAAAAAAAAAAAAQwBhIAAC0Qc0xBtL7779vpwAAAAAAANDSDBgwwE41DQJIAAC0Qc0RQAIAAAAAAED7QQo7AAAAAAAAAAAABBBAAgAAAAAAAAAAQAABJAAAAAAAAAAAAAQQQAIAAAAAAAAAAEAAASQAAAAAAAAAAAAEEEACAAAAAAAAAPz/7d1/dJXVne/xD0JAgnqaNIlEMdgEnVANiFRlqt6r7a24onUt2yn2Xtu70F5cs6xTu9ZUYez94/5TL0i71mjV9uK0zdRhrmIdHEA0ehVbUMEKSHAkU0iNsTTAiQ2xTBAC5u69n/2c8zznV04gYEjer7UOPL/Oc579PHs/J2d/994PAMQQQAIAAAAAAAAAAEAMASQAAHDy7XlKt9deoM8s3uoXDNLxvh8AQm9/oHO/94E2+9m89nVrwff2aeU+Pz9Imx/fq4fe9jN2Xw90a1C7KvY4i92uoF6tfGCvFrzYe9zpjvtQDw3lsVkuvXvNa6iOcSBDkYZMdp+R/DEsHfsx2rx/7uMf+rljFM2HQ5LHsw3JcQ7i2Pa9uM/n3fgrlbcNu00wH5z/1HaZ9w93fnKvy/yc2DWMvs++stIfze/RY8hI42D2U3DboHyn1mWtjx7PUAnSNbzLX8Z9bzCGorxEy190eggNx/KXyR1juG3msaa+i+yrQPnILLux99lX5NwOUK6iacg67kLHE3L7z14XOzdZf6sMcRk81vw0FPkaKIr/Xsr1d7svow+9eoz5eFD8d3DWffJE/c0eRwAJAAAAAE6oXnV0jtF5VX5238daWz1W4eyose+otledpvP8bLFcZVauH+72R/PaI2qce6Z2/7BKNx/jCc2//xyOMQ3HY0gqVnESfKiHmo746YFVfanK5Nuz06/548zScfrrL5UGGxjv7+lXQ7W08oGD2u7yud32TD2sw5qVyhOmHDQdltz6YN33w8rktz/QrOaxWhX5jCVNYeWSOd4HDqthvl/3w4lauO1gvCI6kt83P35QS2ZOdNuumnlEN6U+fzD7GWjbI3p/3xg9fG+43ry+eZZfZ1D+kNfQl78o+z1xU+d4bXXbZ+RbW2lrPnuhz9db5x7VTanvlALl09jXeVTy5Sp4hd9lA5QV85nfb1ZQVu4dLzUfiAeeIseze74ix+PZbcz+1/rZFHfP8Ps1711VHb3XGJ9AGQQ+WaW6ctYYk/c/1vt+SWjftj5Thsbpygv8ghPp7T7zHTzO3Af6PpHAKQEkAABw8k3+mn7++516d9EsvwAAPiEXf1q7f/hpzfazeVWV6bFjDVLsO2R+YJboSv/ezVuPaOGsSKXoUCo2PcU6nnRnsD+0NWvCcQbOSnXzvWfrsUglX0N17gq/E2Fo0jC6zP7m2fEgwPEa6jw+RPa9+JG2V43xc4MVVH4vnB9N14d6dds4XXlxZp73lVmdR4MKYXt/2TdGjTPterNt4zit3XooWJd5ri4u0UL163238ix954dn6zsXuzXGWbpyprR2T7oSPp3f7bEodd+aPXe8GlOVWIPZzwDb2sppjVVNngJG+TsGw/g7YSgNffmLs8FcpRp+xPOtzZdrZ05M5WsXnLq3LNi2UPk07H4bJ9vgVabCZcV9ZpX/u8Jck7+e2a+124LgUrBuvL4evvfiiXpYfXrVf2jQaOKwGmZmf679+6Rx7hmp65t5/x7yMjhM8xMQVTWzRI06oldjPVV79epWU37nTtTsE56PgwZTC2dN1HlVR/TTaAONk4QAEgAAKM6bi/WZ2gt0+6+e0v12+Dj3WqAn3/TDybnXYm3xm0tJPXl7uNxvu8evyhiCLvmrBW6b+82+U/u6/Smzh+JtWRx+jn1FPsvZGjvm+xf7z3vTrw6PJ3wN8rMBDDPhEA4vpodwsa12o8OypFrxRodB8dMPRYapibUwzju0THS4iGAoiehwSPEKl7BS2K4YxHFGBC3iu4MhNdx2kWFccqRnZWR/8WGaPgyGw8i5zoum208/9Hh6f5kt8/MP8WN/aMtXohmx8xfd1p+/x/05eSSpWc39ZvvDmuVaUAfrF7zYY/7/s+4yx7WkKfJ+l+b0fuPnL0d6zfbx/WekIdZqOyMNRr70BtfI5KVwXSyPGNHjfNzkjzxcq/dtZmLbwfT+8+a93E70MVrBvnLnyWBd8LmFtnOKSZvP1wOWWaeIPJ6LP454+ff78ccf+xyz/vtbS/SDxrF+QWDA9Hq28ntJtMLXcq2NSwau+Lc9GjODLjlaSg9eJL+7wE6kF6VzVB0D5L1AdrnJy6Ylb++GU7P8Re/nsfvJQHk3+jkPHCx4PQvms0h5GZLy57bx60Zy+ctw3uRI0NZ9j4cBVZsv8wWBjILl0/ZOPrZGEPGAViAatM0WBo6N6tNdT6rvZLXjC9KV/3ii9wSfD4r9+yVa5qLrovnJTxf6GyOvaB7JzLvRdQ+Y/JqZf4CBVE1QoylsS7ZG8mM0OBzNx07ue58rH1nfVfH5nHnTfVbQoMQ2IIkGoU8WAkgAAGBQ1q2VFtjeQyu+ZeZe0aJ5bborNf8zfdUHhbYs/rwWvfItPW3X/X6FFtht7yscmHls1zTXM+mNB64xu75Pj4UBngHY4NFXl12jxa/Zz7LvlxZ9Ph0g2rJ4nh5TuP5OadkrwQonqSfvu0/rrrlfb4TpMJ+98FeEkIBTW7/u2lOiYPiWcVrb/Gd9X2e4+a1zzY+v5oM5K5LsEErbJ/uhotz7IsPCpJgfhpGhZbbOle56IKyYCnoLpFsNh8NQ+QqZrOFfjvE4tx3W+43B57vhpDKHp0k5YvZ/emr/6eGrbCAmMizWveO1PbWukH4tkd+feU/jtoOxH8bpIX7O1MOdmUNThZVo5rObIkPzuP18FPvsJZ2nBfv5dqU7D6oy+w1bdDsl5jybzzAL3BB2roW0uS5NR9PDX7nzGZ6/POmt+nR8/29/EElDMHxPerihaBoGSq+x7ajOC4cBmmmuc1N4jexxpocX2jr5qDmnudmW7Ktmmgk7xFGYxrx5L9vJOMaUYvNk3u3iads9f2zBtKXlK7PHmMdtRdADfWpM9foJjisYgsrsZ9bHLnAZtbnZHHdjNH9GDHhePtQTzf1amPF+O7RV7krpYPvGaC+A6D3FTDf6yUyuotwOt5Orotyk+6eRXkaZ+V3RSvCqsWrwk1kG3E9ExrZuOC8bzE1VvEWv/6lX/myleXQIwazhwArl3dSwZ2aduaBLBsq3lL8hLX+ZXK+ixo993vzI5a3o93xD9ZFIhXHGectbPm2e9o0g/HtzVh5bWeXKfPdF7g8uwOVVVY915eiJMGD39kF3zrZ3BvuuuvisAmkdY441GnCJnq/Mslzs3y+2zOX7bs6U/2+M/AqVy2MoS0AW3/N3W3r4uFgvwJj89z7Xkym1jw/1aueY+Py2sLdinPss36Ck6kuna+G+dI/Ck4UAEgAAGJwLp6nS/j+lTtfa/++Yq0uj896li2yw5lva6Xoh2QDOwBb8l6ApXOX5de7/Xe3FBHG26vll5r877tQtk4MllX91pxaY/x/7fzaY5ddfc52+4NbPMj9Mr7ETca/cp8ttz6Upi1wQ6ud/5VIJ4JQ1Rg/P9RUtrsIm/aPMVa7kFXkGQr6KWNszIFIJ637MKXNoi1Ckx5GRPfzLMR5npLW0G04q74/JyP7tMDZVfpgb35oxndb4EDj5RY83WokctMJOV8JlDNUT602REWRzLbTjYpXjRbND/qSHEHGV0aHYMEJGoeFGIhVvdvie1NBhsTQMkF5r5ump/Ucr93LnnyINKu+d5GMsNk/m285+ZmRdkF/zpS0qT5k9ljze+ZGvvI7kDX8uUvtxxxVMOjboqPTwVVkGOi8Z5zoQaekfE1RM2fP0g/B4iuWCGSY/5BymK6iAjQ7D5Y6rmB5QMYPZT/a2rleFSVsqgBut8I/t51Qof/LDgU1MpT0+9J9VKO9G7leZeS6XgfJZKN92Nm2RdaO7/GVzPQXW+oYNPzxDaooHe5Y09enKXPm2ENezT6lgpq1oVvOfcwRMcpSrQi4OGkakAlNbTxs4/6T06y7zhfwDl5Yz489bs+cqVpaL/fulwHdzlnx/YxRQqFweS1kCcgiGsQt73gbfQTn/Vi1073N52u/Dlv/q0806P+/uwbkDUvbvgVS5cMNZFvP3+tAigAQAAAbl2mlT/FRhwZByn9eiC1f4HkgnyJ5d2uUns/xul5Lh+jDwlaVSt9xpe09Zr7ieS7Hh7QAgg6v8qCryIdIZPY5ivZGOR3TomoKVLNHWwqWqqfaTLmhzRDeFrYzNyw7VVHgInEKyW1Kf22T25YfqyexN4Srjwu3WmnV++fGxlevp/X4/6BAbyDWMUC72eSHzbU+t9H5SPaxiaSic3kKy88+41NBgsfOSowKyUN5zQ6OE73Ut4E/MMWZ/jldsnsyznfvMWO+TYIjCsNX8oBXK47aXQ2R5qsLWnvCqft3VHO0lUojJcyb/pit2chjgvLh0ZwVYMlv6WzZ/m3Oi8Rm98YzoNXXpzvD2B+7a24rq7AroD/XQ9w5qSaqXTcAeV7TMxoas8xXfccXux8q9rXveSiRt0Qr/+H5Oct72st+blv3eYHgy2ysjtdwGAYoZ+s9dw9z3bspf1Ikpf9nn2A9ZlxmwjPSiyQoURgNV+cqna8gQKZOuotnk6+gwWXnKihX9vnbB1wjXY8oHtHZ/c6wrL8X+7ZGZzrDXRbwMDkaB7+YhUKhcZpcl4Bi5Yex84CYzMBlV6N4XCf64BmWmPJ032ayz87Z85QpIuR6E5r4YKUNuf3l78Z0YBJAAAMAJEPb6uV9vLMoaYHuQos8vss9gyuiVNHmapvnJLDZoFK63wSS3MIfPBb2O7OvpO4JFj82LPs8JANKC4WEGrqi08j7/6HiZH5qpOsiclbmhaGVlUKHpVNnWwuNSQyulK5oKVMQVFFTCplpSp162t0NGbwo3FM+YyHA2JcHy4+V+ZKfT9Fj0eRguvUVU3FruofPBPoJW3EFlcLxHSKH0Fpadf4LKcMtVoIf7ygwSGIXyXqzC0B3HiTnG7M/xis2TebZznxnpfRK+Uj3ABqtQHvcVt+GyVAXuzNP12L0TtbCoYZMMV4kUqdixQQxXcRQZqqvgefGtmDMrZbNa+vvgUfXE7Hzh0pkhWpnqgkfB8FH5gkduqJ1Y2c/I7zkDEtFK2SL34+TbNh/7OSen/OXN297gyl8Q9HFDbKaW21eeno9RWfer9L2b8hdxgsrfQPkgLtIwI8aXj4HKZw7p48lfVmK96rys+0jIpTvzGWa5BOUqizveXGW5SIW+m4dAoXKZXZaAY5V+/tBm87d97uHrjEL3PmP2rHEumPT+nqA82Z5N2nMob/kKerJmfo+Y+6S51w3cQ3ToEEACAAAnzitt7o/55K8eLWoIu9xm6T4f3Mk9tNwsXW+DPsse1ZPmDzEr/LxgSDy//pUX9LJbv9X8OI0+A8kHqG4Pns906aLXtNiOcHdNXcEfdwBGsYtLYj/c8j9XJKPCJatS+DhEWjfb5z/k/SGryDAXrhLHt5h0LSmP6KepIXhsRdXxPFg6+GG9ZG2614zrTWN70dhKPmUeX/qB3u74g8khEFYU2Zbp6dbZsZajTpDezEpK1/I80vMnqFQ9TedlpaFAev18Xj7/pM69G36nSEXnPeskH2OxeTLfdvYzo8/t8L0UiqpIzuWY8/hZ+o7rheZ7d/ihvNLnInieiJNREW57r9lK2FXRIEHB82KDF9mtmG2FUfRZJ5sf9z2PcgVcYukM8n2qFbM9h67nUa6ghQ1KBT0bsoIEWfk9aDUd9gxx6UjdywaznwLbunXR62Pmm/znnJLlz1cURlqJBz1bIr2G8vH3q1RPHJv+gRJG+Ruy8pfN5n9F8pvJm/b7xZeBzOscKx+FyqcN7kbzgx2OL/UMlEJlxezW7D/17EDfKCOVDnftwv2G5Sg9pGN+meUqcrxZZXCw8nw3D4VC5dKXpfjfQcEkMFiu3Jn75E2ZzyGMGujeZwNMpuz+tDMMMo9VQ2dfnvKV77lI/p4U6614YhFAAgAAJ8As3bfCDgv3M3219gJdfm+dFrggThBQGmr2eUtP35Eefu7ye6XFr+3UfZ8L19sh9ML1zaqLPQPJBqjMevcMJLv+81r0yrf09M+/lmfIOwA4S99xD8Xd61o9z2qWHr43bKVsK33CSjdbOZVupX/sw7/kMHOs3vetrm/aNk6rcvRWCYxTwx4/hFKsIrlUN987UQ2p4ZXyV1QVy7badg+Jd/uLHNe+j7W2OjqMkB+mx5+/V2fZlpTpgFKm8Af7rIEqXl1FY9ga/YDUOD7S8jg7vbZVt235Ht3/+xlpcD035udIg5E3vX59fkH+sc+6cJ+xVlpY4E22clLbDvrK8UJ5L9vJOkan2DyZd7t42uxwX/bB59m9Zop1HHncV1rf5J79kbGfYs5FVKHzkrOniO1tEu0tEAyfFR9ezL7C8mCOb354rYJAU/h8JPfgbfN/bKg383IVWWFQwuatyDqXz3Lk99nfDHqG2G1cOsJg1mD2U2hbd57PVOPW8HpFgmanaPkLnkVzNDWU0YDbpwR5Ljzf5z7wsRrMPbMgyl9ugy5/uc3+pn8ekEuL7w0YlgE39KlS1/mmzmiwN3/5zHxf7Du6YFkxzPfoD+Yq+L5z1+qMdIDIrQvzXYHgcw62XG2d1RdLp7tmOcpg0Qp+NxfPBWDD5zHFFCqX0fNv1g/qeVBABhccshOFAs8D3PvCoKbCnojjzP/9ij8fNeCCoXkaAwTfTT6IfBKMaW9v7y8vL/ezAABgJEgmk6qtrfVzyJT81QIfZHpMt0z2CwHgFGFb2tuHhg84/JMbukpaVcSwTsDxKDZPFp13hzUbJP6z1s46c8DK8JGRXgx3lL/cKH/IFuSf9xtzDekJIB96IAEAgBEuqSdvD3om3f9msOT9XXYIuzpdQPAIAAAAAEYgO3xY5JlYfijfgZ8HBSCKABIAABjhKnXL/ffrWjP12LwgkPTVZddo8WuLdGmwAQAAAABgRDlLXw+H+rPDieV9JhyAQhjCDgCAEYgh7AAAAAAAAHA86IEEAAAAAAAAAACAGHogAQAwAtEDCcBo1t3d7acAAAAAYPQoKyvzU0ODHkgAAAAAAAAAAACIIYAEAAAAAAAAAACAGAJIAAAAAAAAAAAAiCGABAAAAAAAAAAAgBgCSAAAAAAAAAAAAIghgAQAAAAAAAAAAICYMe3t7f3l5eV+FgAAjATJZFK1tbV+DgBGl+7ubj8l9XV3qK23QvXnlto59bR3qeT8atk5qVedrZ0a95k6VU4I57tUWlejRImZ29Oqrgl1qikrcdMd+92bjIRq6sN94BPV16OOtk5z5aTSs4Nrlbnciq070KnWveNUV1cptyS6jwqzXYVd2qvkzi6NPz/IC1ZfV5s6x9r9BPOfuJzHbdj0/aHHTJQoMaVO1WcEi11Z2Js6I6o2+byku009pelt3D47pZqahF+A0cXcA9v7VHF+IigbETb/tx2q9vfSzPxk+ftigbLX09GqTrui5NNB+cvY1kpMqU/nR5za7PX9oEQ1k9N5pqukRtUTIveu6L25QP4DABSvrGxo/1ilBxIAAACAEav3d2u07EcrteWAm1Prc5vV6dYYHeu1+vk1evadsPqyU5ufXK71bX1mukPr/89SrfldsK7zt6u1w+0Dw8qBVq15I7iinev+Xsvf8tcysjxTx4bVal79rLaH1zOy7f7WFXrwF1uUVKnG/2mNlm+xgRirQy//0w71DaeK7chxd216VEuf7ZDNudq7Watb7Xn4D7U+sVTNHXZhUBY27gmmQyV/alHT634Do29ns1Z2HPZzGH3MPfC51lhAJ9Cr7eZe2bxqvSkJcZ1vZNwbc5a9PnU8+6DWvCslKhIq6TsS5NXQno0+z2JEsXnht+m8YO9Bm/eaiUgeid238+Y/AMAniQASAAAAgBHt6usq9ObmpJ9L63inTdO/fK1K3opUWM1skLbsUG/HDvV9cb5q/GJpks6pq1d9vX3R+2hYKatx1+WKmdO1eXeXX2j45faVbuHeoR3vTteN15SoZWekmtJvO+OqW3VzdbM2tUuJK25U3W+a1XpI6tn0vLqu+YLqhluzeH/cl95wo6b/S0uqcn9SdZ1ZPkOXNkzSe/vCIJhUcV6Yh4OeVSV101X/2x3+fX1q+7cuzbmo0s0BKQda1TJhrm6c1qodPpOV+LxXUxbeGyP3xayy16uuzoSmX1Kv6opq1YXbltheS2a78yp8nqX30ajh80jWfRsAMOwQQAIAAAAwopVccKmmv71BrbEm7zaQ0KD6umlqKGlRq21B39OjP46drhnlrVq5oVfTLhyvP3aFle9JbVvXrObnzWtDGy2kh5O2jea6rNTy3/Rq/hXpkF+wPLhmLfv8so4davtsveoubIgHDiMSZQl17LXXvUZfuEFa/UKzXt4+XdddMnzDhr1tu9TxxRpV+/nk9pdNuldo5TvTdX1Deji6ttd9Hn6+xeRoo6RO0+u3BEGBvjbt6Jmt+uEyRB+Gjd6dLSq9qE51f1Gv1ncy+yDlkFX2Emq4sU4tjzyoppdM3uMGinz3bQDAsEMACQAAAMAIV6k5X5Re3hQZVqljh1qmVGpSr1l7/uGgN8rHfTpiVtV8NqGu8tmqL5GOmGWBSs28dq7mXm9eV9XRA2k4qWnQ1Z+baiamqroiWOTUzQmul3nNqAoWdbzTopqzJ6nXXM+aPh84zCExIehqVPLZG/XFro0quXaOeccw9PoKLf3FCq3vnKpbb5mRypeVF8zRnAsSUoV93ohfaNT9pc/D18/w6SlR3Wfr1fpuUn1tO5RsqBdPP0Jcr1rfOqypFX3qLZ+qmu1hj7UCcpS9kslXa/59d2veBQf1wiPLtCX9qDqMRvnu2wCAYYcAEgAAAIARr6T+KjW0btSOj4P5jndaleh7T+t/s14tByapJ9obpWau7r6eFtGnjJJJKq24VNddsFnr3ol1M8vQoR2tCfW1rzfXvUW9pT3xYeyso0m1bE+oYVoYiilVoqJSleXDbew67y/n6Z7b5mnuVfVKjPXLrNPNcdd9QXP6mrUx7H2Vhx3Grmbr29q4M6mZFxI+QgY7fF3PJO3fYsrNhveksvQwdseitOYKXXd5j/74J78AI9eEiUr8oUthP96ufX1KnOlnir5vAwA+aQSQAAAAAIwClZo9W1q/xU7bQEK9rv9K2BvjRs35qEWt+w8rUXGW2zrloz7/sPddWv0PS7X0R/a1buAW+DjpKi/7grRuYzA0m2V757jrtVQr3upxvc5aL7peN/ueEXNvmKPeMHDotn1QD/5sncbfOE8zRsRzWEpUf1WDtm9o9XlY2vBkmIdXqCWs1bXD2NU0a2XP59TA8HXYuVrLfLlZ+lKHG76u5JqbUz2Kbr62TlsGGsYus+yZO+Y6M930VLNW/GKp/rlzrq6oCzbFCFY6XVdP36amX67Uyn96VM0l5rvW90gLZd23M/IfAOCTN6a9vb2/vLzczwIAgJEgmUyqtrbWzwHA6NLdzdhIAAAAAEafsrKhbRFEDyQAAAAAAAAAAADEEEACAAAAAAAAAABADAEkAAAAAAAAAAAAxBBAAgAAAAAAAAAAQAwBJAAAAAAAAAAAAMQQQAIAAAAAAAAAAEAMASQAAAAAAAAAAADEEEACAAAAAAAAAABADAEkAAAAAIhqW6P/+ZNNSvrZgg50qvOAnz5WPR1qbW1V275evwDF6uvuiJz/PvW0d6rQWWxb/ag2dfsZAAAAAAURQAIAAACAqPOv1re/2qBKP1tIz7+t1Oa9fuZYHGrRmnWdKv1UQj2/flArW/v8ChSj93drIue/V63PbVannwMAAABwfMZ+97vf/V8TJ070swAAYCTo7e1VWVmZnwOA0eWjjz7yU1LPppV6+vX1an6xU4f//JyeeGGM6i+boklta7S8Za9aVz+jVS+1atL0S1RdarZ/a4V+/H9f0Lpdk3SZ2e50v5++jnV6/Jer9OKG9drWV6vLzv9YLU/8WCte69C//3uLtr7xmnaXXqyLJofvyNa3Z72e/ucNequlWVt6ztNF5yc0dtzZurB+ihJnnKnq03u0rvscXTbF7qNXrc/+QmveaNVrr7ZqwrSLdDY/27Ic2v1bdZRepgvL3Zx2v9mh0ssuVHme69v9u9+qt+YyTTmtTWt+skoH6uzyHm165td6t+1lPfOvL2jT/nM02+xwrPrU8dLjanr2Ra3fsFOHp1ykzyR2q/knLUpcNlWT3BFIbc8s1x9qZujA88u1LdmqVf+6SuvemaT6WdWpbQAAAICTYahjPfRAAgAAADCCJVVy0W36St1mHf6LuzVvSlIf+jW79p+jm//mbv3d1xLa2NbjliUumad7/naernJzaR3b3tTUr9xt1t2jO/9ztVmS0Iyv36M7bpimq265xy2fd0ki2DinpDau6dWcO27Vrbd9V5f+8XltDz7S61Pb9jbVVQf76Gtt1uZzvqH537hVd95cqXWvdbjlKF6u6+sc6lDzP67XOf91vi6t8Ms62zT+ijt09z3/Q3Pad8ie7b7WNVoz7gbd/Tfm+t41R8l/2WiuYoWqz+wxeahXbb9uUad61NWV0KdK7U52af/ZN5vt/07zyjbq9wyVBwAAgFMcASQAAAAAI9g5mlpdYv6v1KfODJaEKqsrZNfoNPdvQXXXfkWHX1iqpb9Yoy27B3hWUU+LVvzIbGtfL/nAz6FOdfy+Rc//sklNv1yujd1H9B+RZyclX1+u9dX/TXNrgvmu3a3qeG2F2dZs/9wO8/79BZ/tg2y5r2+fdrzwpLoum5cOHjk1qnDz41O/knt7OlUzxQ9kOKFaUyfYwFGpPlX2R3V17NLmV9appaNLSZO3grBfpc6p8J81NvgPAAAAOJURQAIAAACAgZxRp7m33aN7bpmm9/7xZddDJdT3sZ8IJWZo3t8GvZLu+aKPCE34lBJVn9OX//t8zTevO759p64+N1iVfL1Jz+oGzb8i/dSl0sSnVfOfbnXbzr/tTt39lRlynVwQUzIhoY6usHdRl5KHEjrLz+VWoumN8zVt8yNa01b4eVN2310fhmG7Hu0vCfadqJik9zZ1qPob16pnS4t6zq7wASQAAABgZCGABAAAAABex0u259AKbdi5Wst+tFTr2u3SHrU80aQVzzer+dmN6vn8DNlB7KzEhXPU89SjZt0KrXkrNiZdhhpdcU1SK368XCvNflb8cl0QhGpfo//9D63qfK0p6LH0o2B5ouF6nfP632vZU+YznzHveYv+R7mUXnS1pm9rUtMzK7X8J80q+fIcpcNweYyt1BW3zVflb5ZrU5dflkPpJddpxvZHzDWw+35Z468L9p2oSKi5I6H6ummqP7BGPVWxrkwAAADAiDGmvb29v7zcPXEUAACMEMlkUrW1tX4OAEaX7m4ePgMAAABg9CkrK/NTQ4MeSAAAAAAAAAAAAIghgAQAAAAAAAAAAIAYAkgAAAAAAAAAAACIIYAEAAAAAAAAAACAGAJIAAAAAAAAAAAAiCGABAAAAAAAAAAAgBgCSAAAAAAAAAAAAIghgAQAAAAAAAAAAIAYAkgAAAAAAAAAAACIIYAEAAAAAAAAAACAGAJIAAAAAAAAAAAAiCGABAAAAAAAAAAAgBgCSAAAAABGuD51bt2ktgN+NqVXbZu2qPOQnw0daNOmrZ3mXQAAAAAwehFAAgAAADCyHe3Qxsef1MZ3e/0Cr2u7mpc1q7XLz3vJ7c1a9lyrMhYDAAAAwKgypr29vb+8vNzPAgCAkSCZTKq2ttbPAcDo0t3d7acAAAAAYPQoKyvzU0ODHkgAAAAAAAAAAACIIYAEAAAAAAAAAACAGAJIAAAAAAAAAAAAiCGABAAAAAAAAAAAgBgCSAAAAAAAAAAAAIghgAQAAAAAAAAAAIAYAkgAAAAAAAAAAACIGdPe3t5fXl7uZwEAwEiQTCZVW1vr5wBgdNm/f79aWlr0xhtvqKuryy8FAAAAgE9eRUWFLr/8cs2YMUP9/f1+6dAoKyvzU0ODHkgAAAAARhQbPFq7di3BIwAAAADDjv2dYn+v2N8twx0BJAAAAAAjiu15BAAAAADDmf3dMmbMGD83PBFAAgAAADCi0PMIAAAAwHB3KvxuIYAEAAAAYESxY4oDAAAAwHB2KvxuIYAEAAAAYESxD6QFAAAAgOHM/m7p7+/3c8MTASQAAAAAI8qMGTPU2NhITyQAAAAAw479nWJ/r9jfLcPdmPb29v7y8nI/CwAARoJkMqna2lo/BwAAAAAAAAwOPZAAAAAAAAAAAAAQQwAJAAAAAAAAAAAAMQSQAAAAAAAAAAAAEMMzkAAAGIFOxDOQ3nvvPT8FAAAAAACA4Wbq1Kl+amgQQAIAYAQ6EQEkAAAAAAAAjB4MYQcAAAAAAAAAAIAYAkgAAAAAAAAAAACIIYAEAAAAAAAAAACAGAJIAAAAAAAAAAAAiCGABAAAAAAAAAAAgBgCSAAAAAAAAAAAAIghgAQAAAAAAAAAAIAYAkgAAAAAAAAAAACIIYAEAAAAAAAAAACAGAJIAAAAAAAAAAAAiCGABAAAAAAAAAAAgBgCSAAAAAAAAAAAAIghgAQAAAAAAAAAAIAYAkgAAAAAAAAAAACIIYAEAAAAAAAAAACAGAJIAAAAAAAAAAAAiCGABAAAAAAAAAAAgAjp/wPkr/aG8oj8dQAAAABJRU5ErkJggg==" - } - }, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The finished Pipeline should look as follows. ![Kubeflow Pipeline.png](attachment:f947c4a5-dc78-4ba4-8e47-ae73d8f0ecea.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Predict from the trained model\n", - "\n", - "Once Kubeflow Pipeline is finished, you are able to call the API endpoint with [mnist image](https://raw.githubusercontent.com/kubeflow/katib/master/examples/v1beta1/kubeflow-pipelines/images/9.bmp) to predict from the trained model.\n", - "\n", - "**Note**: If you are using Kubeflow + Dex setup and runing this Notebook outside of your Kubernetes cluster, follow [this guide](https://github.com/kserve/kserve/tree/master/docs/samples/istio-dex#authentication) to get Session ID for the API requests." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Run 9519f884-8baf-4768-a728-29de8ef5b4e6 has been Succeeded\n", - "\n", - "Prediction for the image\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - ":13: DeprecationWarning: `np.float` is a deprecated alias for the builtin `float`. To silence this warning, use `float` by itself. Doing this will not modify any behavior and is safe. If you specifically wanted the numpy scalar type, use `np.float64` here.\n", - "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n", - " data = np.array(image.convert('L').resize((28, 28))).astype(np.float).reshape(-1, 28, 28, 1)\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAAAAABXZoBIAAAA1ElEQVR4nN3QPwtBYRQG8EMU0e0uZLIw+QKXRZlMGC0GX8CglE0pk0VxPwQmE5YrJYPVIjYMlImSwXNiMOi97319AM/6O6fzh+g/Y5hr5mrRNByseAZba4D7EnlSN8wy3uAYXJOwDEw0ohKwD9mtxehqRLQBCnZr8GPkJ/Ll79y0m37GiIjiK2AQsGMYiIbryyvjmZO20U9gAIcjTg43GhfethOROToO+En6xRUlZhnSjd+I6BY7xVIRY79w4XapR9IOSTWWYSWUqE0xlH771R7UrULefm5U2pxVCt0AAAAASUVORK5CYII=", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'predictions': [{'predictions': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], 'classes': 9}]}\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from PIL import Image\n", - "import requests\n", - "\n", - "# Pipeline Run should be succeeded.\n", - "kfp_run = kfp_client.get_run(run_id=run_id)\n", - "if kfp_run.run.status == \"Succeeded\":\n", - " print(\"Run {} has been Succeeded\\n\".format(run_id))\n", - "\n", - " # Specify the image URL here.\n", - " image_url = \"https://raw.githubusercontent.com/kubeflow/katib/master/examples/v1beta1/kubeflow-pipelines/images/9.bmp\"\n", - " image = Image.open(requests.get(image_url, stream=True).raw)\n", - " data = np.array(image.convert('L').resize((28, 28))).astype(np.float).reshape(-1, 28, 28, 1)\n", - " data_formatted = np.array2string(data, separator=\",\", formatter={\"float\": lambda x: \"%.1f\" % x})\n", - " json_request = '{{ \"instances\" : {} }}'.format(data_formatted)\n", - "\n", - " # Specify the prediction URL. If you are runing this notebook outside of Kubernetes cluster, you should set the Cluster IP.\n", - " url = \"http://{}-predictor-default.{}.svc.cluster.local/v1/models/{}:predict\".format(name, namespace, name)\n", - " response = requests.post(url, data=json_request)\n", - "\n", - " print(\"Prediction for the image\")\n", - " display(image)\n", - " print(response.json())\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/kubeflow-katib/README.md b/samples/contrib/kubeflow-katib/README.md deleted file mode 100644 index bcbbbd516e8..00000000000 --- a/samples/contrib/kubeflow-katib/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# Kubeflow Katib Component Samples - -These samples demonstrate how to create a Kubeflow Pipeline using -[Katib](https://github.com/kubeflow/katib). -The source code for the Katib Pipeline component can be found -[here](../../../components/kubeflow/katib-launcher). - -## Prerequisites - -You have to install the following Python SDK to run these examples: - -- [`kfp`](https://pypi.org/project/kfp/) >= 1.8.4 -- [`kubeflow-katib`](https://pypi.org/project/kubeflow-katib/) >= 0.12.0 - -Check the following examples: - -- Run Pipeline from Jupyter Notebook using Katib Experiment with - [random search algorithm and early stopping](early-stopping.ipynb). - -- Compile compressed YAML definition of the Pipeline using Katib Experiment with - [Kubeflow MPIJob and Horovod training container](mpi-job-horovod.py). diff --git a/samples/contrib/kubeflow-katib/early-stopping.ipynb b/samples/contrib/kubeflow-katib/early-stopping.ipynb deleted file mode 100644 index a8042bd3145..00000000000 --- a/samples/contrib/kubeflow-katib/early-stopping.ipynb +++ /dev/null @@ -1,476 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Kubeflow Pipelines with Katib component\n", - "\n", - "In this notebook you will:\n", - "- Create Katib Experiment using random algorithm.\n", - "- Use median stopping rule as an early stopping algorithm.\n", - "- Use Kubernetes Job with mxnet mnist training container as a Trial template.\n", - "- Create Pipeline to get the optimal hyperparameters.\n", - "\n", - "Reference documentation:\n", - "- https://kubeflow.org/docs/components/katib/experiment/#random-search\n", - "- https://kubeflow.org/docs/components/katib/early-stopping/\n", - "- https://kubeflow.org/docs/pipelines/overview/concepts/component/\n", - "\n", - "**Note**: This Pipeline runs in the multi-user mode. Follow [this guide](https://github.com/kubeflow/katib/tree/master/examples/v1beta1/kubeflow-pipelines#multi-user-pipelines-setup) to give your Notebook access to Kubeflow Pipelines." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Collecting kfp==1.8.4\n", - " Downloading kfp-1.8.4.tar.gz (252 kB)\n", - "\u001b[K |████████████████████████████████| 252 kB 5.3 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: absl-py<=0.11,>=0.9 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.11.0)\n", - "Requirement already satisfied: PyYAML<6,>=5.3 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (5.4.1)\n", - "Requirement already satisfied: google-cloud-storage<2,>=1.20.0 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.42.3)\n", - "Requirement already satisfied: kubernetes<19,>=8.0.0 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (12.0.1)\n", - "Requirement already satisfied: google-api-python-client<2,>=1.7.8 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.12.8)\n", - "Requirement already satisfied: google-auth<2,>=1.6.1 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.35.0)\n", - "Requirement already satisfied: requests-toolbelt<1,>=0.8.0 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.9.1)\n", - "Requirement already satisfied: cloudpickle<2,>=1.3.0 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.6.0)\n", - "Requirement already satisfied: kfp-server-api<2.0.0,>=1.1.2 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.6.0)\n", - "Requirement already satisfied: jsonschema<4,>=3.0.1 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (3.2.0)\n", - "Requirement already satisfied: tabulate<1,>=0.8.6 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.8.9)\n", - "Requirement already satisfied: click<8,>=7.1.1 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (7.1.2)\n", - "Requirement already satisfied: Deprecated<2,>=1.2.7 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (1.2.13)\n", - "Requirement already satisfied: strip-hints<1,>=0.1.8 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.1.10)\n", - "Requirement already satisfied: docstring-parser<1,>=0.7.3 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.11)\n", - "Requirement already satisfied: kfp-pipeline-spec<0.2.0,>=0.1.10 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.1.12)\n", - "Requirement already satisfied: fire<1,>=0.3.1 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (0.4.0)\n", - "Requirement already satisfied: protobuf<4,>=3.13.0 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (3.17.3)\n", - "Requirement already satisfied: uritemplate<4,>=3.0.1 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (3.0.1)\n", - "Collecting pydantic<2,>=1.8.2\n", - " Downloading pydantic-1.8.2-cp38-cp38-manylinux2014_x86_64.whl (13.7 MB)\n", - "\u001b[K |████████████████████████████████| 13.7 MB 22.4 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: typing-extensions<4,>=3.10.0.2 in /opt/conda/lib/python3.8/site-packages (from kfp==1.8.4) (3.10.0.2)\n", - "Requirement already satisfied: six in /opt/conda/lib/python3.8/site-packages (from absl-py<=0.11,>=0.9->kfp==1.8.4) (1.16.0)\n", - "Requirement already satisfied: wrapt<2,>=1.10 in /opt/conda/lib/python3.8/site-packages (from Deprecated<2,>=1.2.7->kfp==1.8.4) (1.13.1)\n", - "Requirement already satisfied: termcolor in /opt/conda/lib/python3.8/site-packages (from fire<1,>=0.3.1->kfp==1.8.4) (1.1.0)\n", - "Requirement already satisfied: google-auth-httplib2>=0.0.3 in /opt/conda/lib/python3.8/site-packages (from google-api-python-client<2,>=1.7.8->kfp==1.8.4) (0.1.0)\n", - "Requirement already satisfied: httplib2<1dev,>=0.15.0 in /opt/conda/lib/python3.8/site-packages (from google-api-python-client<2,>=1.7.8->kfp==1.8.4) (0.20.1)\n", - "Requirement already satisfied: google-api-core<2dev,>=1.21.0 in /opt/conda/lib/python3.8/site-packages (from google-api-python-client<2,>=1.7.8->kfp==1.8.4) (1.31.3)\n", - "Requirement already satisfied: pytz in /opt/conda/lib/python3.8/site-packages (from google-api-core<2dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (2021.1)\n", - "Requirement already satisfied: requests<3.0.0dev,>=2.18.0 in /opt/conda/lib/python3.8/site-packages (from google-api-core<2dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (2.25.1)\n", - "Requirement already satisfied: googleapis-common-protos<2.0dev,>=1.6.0 in /opt/conda/lib/python3.8/site-packages (from google-api-core<2dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (1.53.0)\n", - "Requirement already satisfied: packaging>=14.3 in /opt/conda/lib/python3.8/site-packages (from google-api-core<2dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (20.9)\n", - "Requirement already satisfied: setuptools>=40.3.0 in /opt/conda/lib/python3.8/site-packages (from google-api-core<2dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (49.6.0.post20210108)\n", - "Requirement already satisfied: rsa<5,>=3.1.4 in /opt/conda/lib/python3.8/site-packages (from google-auth<2,>=1.6.1->kfp==1.8.4) (4.7.2)\n", - "Requirement already satisfied: pyasn1-modules>=0.2.1 in /opt/conda/lib/python3.8/site-packages (from google-auth<2,>=1.6.1->kfp==1.8.4) (0.2.8)\n", - "Requirement already satisfied: cachetools<5.0,>=2.0.0 in /opt/conda/lib/python3.8/site-packages (from google-auth<2,>=1.6.1->kfp==1.8.4) (4.2.4)\n", - "Requirement already satisfied: google-resumable-media<3.0dev,>=1.3.0 in /opt/conda/lib/python3.8/site-packages (from google-cloud-storage<2,>=1.20.0->kfp==1.8.4) (2.0.3)\n", - "Requirement already satisfied: google-cloud-core<3.0dev,>=1.6.0 in /opt/conda/lib/python3.8/site-packages (from google-cloud-storage<2,>=1.20.0->kfp==1.8.4) (2.1.0)\n", - "Requirement already satisfied: google-crc32c<2.0dev,>=1.0 in /opt/conda/lib/python3.8/site-packages (from google-resumable-media<3.0dev,>=1.3.0->google-cloud-storage<2,>=1.20.0->kfp==1.8.4) (1.3.0)\n", - "Requirement already satisfied: pyparsing<3,>=2.4.2 in /opt/conda/lib/python3.8/site-packages (from httplib2<1dev,>=0.15.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (2.4.7)\n", - "Requirement already satisfied: attrs>=17.4.0 in /opt/conda/lib/python3.8/site-packages (from jsonschema<4,>=3.0.1->kfp==1.8.4) (21.2.0)\n", - "Requirement already satisfied: pyrsistent>=0.14.0 in /opt/conda/lib/python3.8/site-packages (from jsonschema<4,>=3.0.1->kfp==1.8.4) (0.17.3)\n", - "Requirement already satisfied: urllib3>=1.15 in /opt/conda/lib/python3.8/site-packages (from kfp-server-api<2.0.0,>=1.1.2->kfp==1.8.4) (1.26.5)\n", - "Requirement already satisfied: python-dateutil in /opt/conda/lib/python3.8/site-packages (from kfp-server-api<2.0.0,>=1.1.2->kfp==1.8.4) (2.8.1)\n", - "Requirement already satisfied: certifi in /opt/conda/lib/python3.8/site-packages (from kfp-server-api<2.0.0,>=1.1.2->kfp==1.8.4) (2021.5.30)\n", - "Requirement already satisfied: requests-oauthlib in /opt/conda/lib/python3.8/site-packages (from kubernetes<19,>=8.0.0->kfp==1.8.4) (1.3.0)\n", - "Requirement already satisfied: websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0 in /opt/conda/lib/python3.8/site-packages (from kubernetes<19,>=8.0.0->kfp==1.8.4) (1.0.1)\n", - "Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /opt/conda/lib/python3.8/site-packages (from pyasn1-modules>=0.2.1->google-auth<2,>=1.6.1->kfp==1.8.4) (0.4.8)\n", - "Requirement already satisfied: idna<3,>=2.5 in /opt/conda/lib/python3.8/site-packages (from requests<3.0.0dev,>=2.18.0->google-api-core<2dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (2.10)\n", - "Requirement already satisfied: chardet<5,>=3.0.2 in /opt/conda/lib/python3.8/site-packages (from requests<3.0.0dev,>=2.18.0->google-api-core<2dev,>=1.21.0->google-api-python-client<2,>=1.7.8->kfp==1.8.4) (4.0.0)\n", - "Requirement already satisfied: wheel in /opt/conda/lib/python3.8/site-packages (from strip-hints<1,>=0.1.8->kfp==1.8.4) (0.36.2)\n", - "Requirement already satisfied: oauthlib>=3.0.0 in /opt/conda/lib/python3.8/site-packages (from requests-oauthlib->kubernetes<19,>=8.0.0->kfp==1.8.4) (3.1.1)\n", - "Building wheels for collected packages: kfp\n", - " Building wheel for kfp (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25h Created wheel for kfp: filename=kfp-1.8.4-py3-none-any.whl size=349033 sha256=a8b852f8cac0b84d67aae100bf9364aad7f18eddadd49471a7be43345ddead9b\n", - " Stored in directory: /home/jovyan/.cache/pip/wheels/49/2b/fc/79b1a2787e4f73f9ee6b1c7fe88f7f42b82133c40baca38aa9\n", - "Successfully built kfp\n", - "Installing collected packages: pydantic, kfp\n", - " Attempting uninstall: kfp\n", - " Found existing installation: kfp 1.6.3\n", - " Uninstalling kfp-1.6.3:\n", - " Successfully uninstalled kfp-1.6.3\n", - "Successfully installed kfp-1.8.4 pydantic-1.8.2\n", - "Collecting kubeflow-katib==0.12.0\n", - " Downloading kubeflow_katib-0.12.0-py3-none-any.whl (89 kB)\n", - "\u001b[K |████████████████████████████████| 89 kB 7.3 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: setuptools>=21.0.0 in /opt/conda/lib/python3.8/site-packages (from kubeflow-katib==0.12.0) (49.6.0.post20210108)\n", - "Requirement already satisfied: urllib3>=1.15.1 in /opt/conda/lib/python3.8/site-packages (from kubeflow-katib==0.12.0) (1.26.5)\n", - "Requirement already satisfied: certifi>=14.05.14 in /opt/conda/lib/python3.8/site-packages (from kubeflow-katib==0.12.0) (2021.5.30)\n", - "Requirement already satisfied: kubernetes>=12.0.0 in /opt/conda/lib/python3.8/site-packages (from kubeflow-katib==0.12.0) (12.0.1)\n", - "Requirement already satisfied: six>=1.10 in /opt/conda/lib/python3.8/site-packages (from kubeflow-katib==0.12.0) (1.16.0)\n", - "Requirement already satisfied: google-auth>=1.0.1 in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (1.35.0)\n", - "Requirement already satisfied: requests-oauthlib in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (1.3.0)\n", - "Requirement already satisfied: pyyaml>=3.12 in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (5.4.1)\n", - "Requirement already satisfied: requests in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (2.25.1)\n", - "Requirement already satisfied: python-dateutil>=2.5.3 in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (2.8.1)\n", - "Requirement already satisfied: websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0 in /opt/conda/lib/python3.8/site-packages (from kubernetes>=12.0.0->kubeflow-katib==0.12.0) (1.0.1)\n", - "Requirement already satisfied: rsa<5,>=3.1.4 in /opt/conda/lib/python3.8/site-packages (from google-auth>=1.0.1->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (4.7.2)\n", - "Requirement already satisfied: pyasn1-modules>=0.2.1 in /opt/conda/lib/python3.8/site-packages (from google-auth>=1.0.1->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (0.2.8)\n", - "Requirement already satisfied: cachetools<5.0,>=2.0.0 in /opt/conda/lib/python3.8/site-packages (from google-auth>=1.0.1->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (4.2.4)\n", - "Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /opt/conda/lib/python3.8/site-packages (from pyasn1-modules>=0.2.1->google-auth>=1.0.1->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (0.4.8)\n", - "Requirement already satisfied: chardet<5,>=3.0.2 in /opt/conda/lib/python3.8/site-packages (from requests->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (4.0.0)\n", - "Requirement already satisfied: idna<3,>=2.5 in /opt/conda/lib/python3.8/site-packages (from requests->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (2.10)\n", - "Requirement already satisfied: oauthlib>=3.0.0 in /opt/conda/lib/python3.8/site-packages (from requests-oauthlib->kubernetes>=12.0.0->kubeflow-katib==0.12.0) (3.1.1)\n", - "Installing collected packages: kubeflow-katib\n", - "Successfully installed kubeflow-katib-0.12.0\n" - ] - } - ], - "source": [ - "# Install required packages (Kubeflow Pipelines and Katib SDK).\n", - "!pip install kfp==1.8.4\n", - "!pip install kubeflow-katib==0.12.0" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp\n", - "import kfp.dsl as dsl\n", - "from kfp import components\n", - "\n", - "from kubeflow.katib import ApiClient\n", - "from kubeflow.katib import V1beta1ExperimentSpec\n", - "from kubeflow.katib import V1beta1AlgorithmSpec\n", - "from kubeflow.katib import V1beta1EarlyStoppingSpec\n", - "from kubeflow.katib import V1beta1EarlyStoppingSetting\n", - "from kubeflow.katib import V1beta1ObjectiveSpec\n", - "from kubeflow.katib import V1beta1ParameterSpec\n", - "from kubeflow.katib import V1beta1FeasibleSpace\n", - "from kubeflow.katib import V1beta1TrialTemplate\n", - "from kubeflow.katib import V1beta1TrialParameterSpec" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define an Experiment\n", - "\n", - "You have to create an Experiment object before deploying it. This Experiment is similar to [this](https://github.com/kubeflow/katib/blob/master/examples/v1beta1/early-stopping/median-stop.yaml) YAML." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "# Experiment name and namespace.\n", - "experiment_name = \"median-stop\"\n", - "experiment_namespace = \"kubeflow-user-example-com\"\n", - "\n", - "# Trial count specification.\n", - "max_trial_count = 18\n", - "max_failed_trial_count = 3\n", - "parallel_trial_count = 2\n", - "\n", - "# Objective specification.\n", - "objective=V1beta1ObjectiveSpec(\n", - " type=\"maximize\",\n", - " goal= 0.99,\n", - " objective_metric_name=\"Validation-accuracy\",\n", - " additional_metric_names=[\n", - " \"Train-accuracy\"\n", - " ]\n", - ")\n", - "\n", - "# Algorithm specification.\n", - "algorithm=V1beta1AlgorithmSpec(\n", - " algorithm_name=\"random\",\n", - ")\n", - "\n", - "# Early Stopping specification.\n", - "early_stopping=V1beta1EarlyStoppingSpec(\n", - " algorithm_name=\"medianstop\",\n", - " algorithm_settings=[\n", - " V1beta1EarlyStoppingSetting(\n", - " name=\"min_trials_required\",\n", - " value=\"2\"\n", - " )\n", - " ]\n", - ")\n", - "\n", - "\n", - "# Experiment search space.\n", - "# In this example we tune learning rate, number of layer and optimizer.\n", - "# Learning rate has bad feasible space to show more early stopped Trials.\n", - "parameters=[\n", - " V1beta1ParameterSpec(\n", - " name=\"lr\",\n", - " parameter_type=\"double\",\n", - " feasible_space=V1beta1FeasibleSpace(\n", - " min=\"0.01\",\n", - " max=\"0.3\"\n", - " ),\n", - " ),\n", - " V1beta1ParameterSpec(\n", - " name=\"num-layers\",\n", - " parameter_type=\"int\",\n", - " feasible_space=V1beta1FeasibleSpace(\n", - " min=\"2\",\n", - " max=\"5\"\n", - " ),\n", - " ),\n", - " V1beta1ParameterSpec(\n", - " name=\"optimizer\",\n", - " parameter_type=\"categorical\",\n", - " feasible_space=V1beta1FeasibleSpace(\n", - " list=[\n", - " \"sgd\", \n", - " \"adam\",\n", - " \"ftrl\"\n", - " ]\n", - " ),\n", - " ),\n", - "]\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define a Trial template\n", - "\n", - "In this example, the Trial's Worker is the Kubernetes Job." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "# JSON template specification for the Trial's Worker Kubernetes Job.\n", - "trial_spec={\n", - " \"apiVersion\": \"batch/v1\",\n", - " \"kind\": \"Job\",\n", - " \"spec\": {\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"training-container\",\n", - " \"image\": \"docker.io/kubeflowkatib/mxnet-mnist:v1beta1-45c5727\",\n", - " \"command\": [\n", - " \"python3\",\n", - " \"/opt/mxnet-mnist/mnist.py\",\n", - " \"--batch-size=64\",\n", - " \"--lr=${trialParameters.learningRate}\",\n", - " \"--num-layers=${trialParameters.numberLayers}\",\n", - " \"--optimizer=${trialParameters.optimizer}\"\n", - " ]\n", - " }\n", - " ],\n", - " \"restartPolicy\": \"Never\"\n", - " }\n", - " }\n", - " }\n", - "}\n", - "\n", - "# Configure parameters for the Trial template.\n", - "# We set the retain parameter to \"True\" to not clean-up the Trial Job's Kubernetes Pods.\n", - "trial_template=V1beta1TrialTemplate(\n", - " retain=True,\n", - " primary_container_name=\"training-container\",\n", - " trial_parameters=[\n", - " V1beta1TrialParameterSpec(\n", - " name=\"learningRate\",\n", - " description=\"Learning rate for the training model\",\n", - " reference=\"lr\"\n", - " ),\n", - " V1beta1TrialParameterSpec(\n", - " name=\"numberLayers\",\n", - " description=\"Number of training model layers\",\n", - " reference=\"num-layers\"\n", - " ),\n", - " V1beta1TrialParameterSpec(\n", - " name=\"optimizer\",\n", - " description=\"Training model optimizer (sdg, adam or ftrl)\",\n", - " reference=\"optimizer\"\n", - " ),\n", - " ],\n", - " trial_spec=trial_spec\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define an Experiment specification\n", - "\n", - "Create an Experiment specification from the above parameters." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "experiment_spec=V1beta1ExperimentSpec(\n", - " max_trial_count=max_trial_count,\n", - " max_failed_trial_count=max_failed_trial_count,\n", - " parallel_trial_count=parallel_trial_count,\n", - " objective=objective,\n", - " algorithm=algorithm,\n", - " early_stopping=early_stopping,\n", - " parameters=parameters,\n", - " trial_template=trial_template\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create a Pipeline using Katib component\n", - "\n", - "The best hyperparameters are printed after Experiment is finished.\n", - "The Experiment is not deleted after the Pipeline is finished." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "# Get the Katib launcher.\n", - "katib_experiment_launcher_op = components.load_component_from_url(\n", - " \"https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/katib-launcher/component.yaml\")\n", - "\n", - "@dsl.pipeline(\n", - " name=\"Launch Katib early stopping Experiment\",\n", - " description=\"An example to launch Katib Experiment with early stopping\"\n", - ")\n", - "\n", - "def median_stop():\n", - "\n", - " # Katib launcher component.\n", - " # Experiment Spec should be serialized to a valid Kubernetes object.\n", - " op = katib_experiment_launcher_op(\n", - " experiment_name=experiment_name,\n", - " experiment_namespace=experiment_namespace,\n", - " experiment_spec=ApiClient().sanitize_for_serialization(experiment_spec),\n", - " experiment_timeout_minutes=60,\n", - " delete_finished_experiment=False)\n", - "\n", - " # Output container to print the results.\n", - " op_out = dsl.ContainerOp(\n", - " name=\"best-hp\",\n", - " image=\"library/bash:4.4.23\",\n", - " command=[\"sh\", \"-c\"],\n", - " arguments=[\"echo Best HyperParameters: %s\" % op.output],\n", - " )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Run the Kubeflow Pipeline\n", - "\n", - "You can check the Katib Experiment info in the Katib UI." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/opt/conda/lib/python3.8/site-packages/kfp/dsl/_container_op.py:1257: FutureWarning: Please create reusable components instead of constructing ContainerOp instances directly. Reusable components are shareable, portable and have compatibility and support guarantees. Please see the documentation: https://www.kubeflow.org/docs/pipelines/sdk/component-development/#writing-your-component-definition-file The components can be created manually (or, in case of python, using kfp.components.create_component_from_func or func_to_container_op) and then loaded using kfp.components.load_component_from_file, load_component_from_uri or load_component_from_text: https://kubeflow-pipelines.readthedocs.io/en/stable/source/kfp.components.html#kfp.components.load_component_from_file\n", - " warnings.warn(\n" - ] - }, - { - "data": { - "text/html": [ - "Experiment details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "Run details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "RunPipelineResult(run_id=bb6689a9-1efa-4fd7-bcb1-09f47bf1e932)" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Run the Kubeflow Pipeline in the user's namespace.\n", - "kfp.Client().create_run_from_pipeline_func(median_stop, namespace=experiment_namespace, arguments={})" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/kubeflow-katib/mpi-job-horovod.py b/samples/contrib/kubeflow-katib/mpi-job-horovod.py deleted file mode 100644 index 86171289d1a..00000000000 --- a/samples/contrib/kubeflow-katib/mpi-job-horovod.py +++ /dev/null @@ -1,232 +0,0 @@ -# Kubeflow Pipeline with Katib component. - -# In this example you will create Katib Experiment using Bayesian optimization algorithm. -# As a Trial template you will use Kubeflow MPIJob with Horovod MNIST training container. -# After that, you will compile a Kubeflow Pipeline with your Katib Experiment. -# Use Kubeflow Pipelines UI to upload the Pipeline and create the Experiment and Run. - -# This Experiment is similar to this: https://github.com/kubeflow/katib/blob/master/examples/v1beta1/kubeflow-training-operator/mpijob-horovod.yaml -# Check the training container source code here: https://github.com/kubeflow/mpi-operator/tree/master/examples/horovod. - -# Note: To run this example, your Kubernetes cluster should run MPIJob operator. -# Follow this guide to install MPIJob on your cluster: https://www.kubeflow.org/docs/components/training/mpi/ - -import kfp -import kfp.dsl as dsl -from kfp import components - -from kubeflow.katib import ApiClient -from kubeflow.katib import V1beta1ExperimentSpec -from kubeflow.katib import V1beta1AlgorithmSpec -from kubeflow.katib import V1beta1AlgorithmSetting -from kubeflow.katib import V1beta1ObjectiveSpec -from kubeflow.katib import V1beta1ParameterSpec -from kubeflow.katib import V1beta1FeasibleSpace -from kubeflow.katib import V1beta1TrialTemplate -from kubeflow.katib import V1beta1TrialParameterSpec - - -@dsl.pipeline( - name="Launch Katib MPIJob Experiment", - description="An example to launch Katib Experiment with MPIJob" -) -def horovod_mnist_hpo( - experiment_name: str = "mpi-horovod-mnist", - experiment_namespace: str = "kubeflow-user-example-com", -): - - # Trial count specification. - max_trial_count = 6 - max_failed_trial_count = 3 - parallel_trial_count = 2 - - # Objective specification. - objective = V1beta1ObjectiveSpec( - type="minimize", - goal=0.01, - objective_metric_name="loss", - ) - - # Algorithm specification. - algorithm = V1beta1AlgorithmSpec( - algorithm_name="bayesianoptimization", - algorithm_settings=[ - V1beta1AlgorithmSetting( - name="random_state", - value="10" - ) - ] - ) - - # Experiment search space. - # In this example we tune learning rate and number of training steps. - parameters = [ - V1beta1ParameterSpec( - name="lr", - parameter_type="double", - feasible_space=V1beta1FeasibleSpace( - min="0.001", - max="0.003" - ), - ), - V1beta1ParameterSpec( - name="num-steps", - parameter_type="int", - feasible_space=V1beta1FeasibleSpace( - min="50", - max="150", - step="10" - ), - ), - ] - - # JSON template specification for the Trial's Worker Kubeflow MPIJob. - trial_spec = { - "apiVersion": "kubeflow.org/v1", - "kind": "MPIJob", - "spec": { - "slotsPerWorker": 1, - "cleanPodPolicy": "Running", - "mpiReplicaSpecs": { - "Launcher": { - "replicas": 1, - "template": { - "metadata": { - "annotations": { - "sidecar.istio.io/inject": "false" - } - }, - "spec": { - "containers": [ - { - "image": "docker.io/kubeflow/mpi-horovod-mnist", - "name": "mpi-launcher", - "command": [ - "mpirun" - ], - "args": [ - "-np", - "2", - "--allow-run-as-root", - "-bind-to", - "none", - "-map-by", - "slot", - "-x", - "LD_LIBRARY_PATH", - "-x", - "PATH", - "-mca", - "pml", - "ob1", - "-mca", - "btl", - "^openib", - "python", - "/examples/tensorflow_mnist.py", - "--lr", - "${trialParameters.learningRate}", - "--num-steps", - "${trialParameters.numberSteps}" - ], - "resources": { - "limits": { - "cpu": "500m", - "memory": "2Gi" - } - } - } - ] - } - } - }, - "Worker": { - "replicas": 2, - "template": { - "metadata": { - "annotations": { - "sidecar.istio.io/inject": "false" - } - }, - "spec": { - "containers": [ - { - "image": "docker.io/kubeflow/mpi-horovod-mnist", - "name": "mpi-worker", - "resources": { - "limits": { - "cpu": "500m", - "memory": "4Gi" - } - } - } - ] - } - } - } - } - } - } - - # Configure parameters for the Trial template. - trial_template = V1beta1TrialTemplate( - primary_pod_labels={ - "mpi-job-role": "launcher" - }, - primary_container_name="mpi-launcher", - success_condition='status.conditions.#(type=="Succeeded")#|#(status=="True")#', - failure_condition='status.conditions.#(type=="Failed")#|#(status=="True")#', - trial_parameters=[ - V1beta1TrialParameterSpec( - name="learningRate", - description="Learning rate for the training model", - reference="lr" - ), - V1beta1TrialParameterSpec( - name="numberSteps", - description="Number of training steps", - reference="num-steps" - ), - ], - trial_spec=trial_spec - ) - - # Create Experiment specification. - experiment_spec = V1beta1ExperimentSpec( - max_trial_count=max_trial_count, - max_failed_trial_count=max_failed_trial_count, - parallel_trial_count=parallel_trial_count, - objective=objective, - algorithm=algorithm, - parameters=parameters, - trial_template=trial_template - ) - - # Get the Katib launcher. - # Load component from the URL or from the file. - katib_experiment_launcher_op = components.load_component_from_url( - "https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/katib-launcher/component.yaml") - # katib_experiment_launcher_op = components.load_component_from_file( - # "../../../components/kubeflow/katib-launcher/component.yaml" - # ) - - # Katib launcher component. - # Experiment Spec should be serialized to a valid Kubernetes object. - # The Experiment is deleted after the Pipeline is finished. - op = katib_experiment_launcher_op( - experiment_name=experiment_name, - experiment_namespace=experiment_namespace, - experiment_spec=ApiClient().sanitize_for_serialization(experiment_spec), - experiment_timeout_minutes=60) - - # Output container to print the results. - dsl.ContainerOp( - name="best-hp", - image="library/bash:4.4.23", - command=["sh", "-c"], - arguments=["echo Best HyperParameters: %s" % op.output], - ) - - -if __name__ == "__main__": - kfp.compiler.Compiler().compile(horovod_mnist_hpo, __file__ + ".tar.gz") diff --git a/samples/contrib/local_development_quickstart/Local Development Quickstart.ipynb b/samples/contrib/local_development_quickstart/Local Development Quickstart.ipynb deleted file mode 100644 index 067d4aa6b77..00000000000 --- a/samples/contrib/local_development_quickstart/Local Development Quickstart.ipynb +++ /dev/null @@ -1,618 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "# Copyright 2019 The Kubeflow Authors. All Rights Reserved.\n", - "#\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# KubeFlow Pipeline local development quickstart\n", - "\n", - "In this notebook, we will demo: \n", - "\n", - "* Author components with the lightweight method and ContainerOp based on existing images.\n", - "* Author pipelines.\n", - "\n", - "**Note: Make sure that you have docker installed in the local environment**" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Setup" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "tags": [ - "parameters" - ] - }, - "outputs": [], - "source": [ - "# PROJECT_ID is used to construct the docker image registry. We will use Google Container Registry, \n", - "# but any other accessible registry works as well. \n", - "PROJECT_ID='Your-Gcp-Project-Id'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Install Pipeline SDK\n", - "!pip3 install kfp --upgrade\n", - "!mkdir -p tmp/pipelines" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Part 1\n", - "# Two ways to author a component to list blobs in a GCS bucket\n", - "A pipeline is composed of one or more components. In this section, you will build a single component that lists the blobs in a GCS bucket. Then you build a pipeline that consists of this component. There are two ways to author a component. In the following sections we will go through each of them." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 1. Create a lightweight python component from a Python function." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1.1 Define component function\n", - "The requirements for the component function:\n", - "* The function must be stand-alone.\n", - "* The function can only import packages that are available in the base image.\n", - "* If the function operates on numbers, the parameters must have type hints. Supported types are `int`, `float`, `bool`. Everything else is passed as `str`, that is, string.\n", - "* To build a component with multiple output values, use Python’s `typing.NamedTuple` type hint syntax." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "def list_blobs(bucket_name: str) -> str:\n", - " '''Lists all the blobs in the bucket.'''\n", - " import subprocess\n", - "\n", - " subprocess.call(['pip', 'install', '--upgrade', 'google-cloud-storage'])\n", - " from google.cloud import storage\n", - " storage_client = storage.Client()\n", - " bucket = storage_client.get_bucket(bucket_name)\n", - " list_blobs_response = bucket.list_blobs()\n", - " blobs = ','.join([blob.name for blob in list_blobs_response])\n", - " print(blobs)\n", - " return blobs" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1.2 Create a lightweight Python component" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.components as comp\n", - "\n", - "# Converts the function to a lightweight Python component.\n", - "list_blobs_op = comp.func_to_container_op(list_blobs)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1.3 Define pipeline\n", - "Note that when accessing google cloud file system, you need to make sure the pipeline can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "\n", - "# Defines the pipeline.\n", - "@dsl.pipeline(name='List GCS blobs', description='Lists GCS blobs.')\n", - "def pipeline_func(bucket_name):\n", - " list_blobs_task = list_blobs_op(bucket_name)\n", - " # Use the following commented code instead if you want to use GSA key for authentication.\n", - " #\n", - " # from kfp.gcp import use_gcp_secret\n", - " # list_blobs_task = list_blobs_op(bucket_name).apply(use_gcp_secret('user-gcp-sa'))\n", - " # Same for below.", - "\n", - "# Compile the pipeline to a file.\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, 'tmp/pipelines/list_blobs.pipeline.tar.gz')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 2. Wrap an existing Docker container image using `ContainerOp`" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 2.1 Create a Docker container\n", - "Create your own container image that includes your program. If your component creates some outputs to be fed as inputs to the downstream components, each separate output must be written as a string to a separate local text file inside the container image. For example, if a trainer component needs to output the trained model path, it can write the path to a local file `/output.txt`. The string written to an output file cannot be too big. If it is too big (>> 100 kB), it is recommended to save the output to an external persistent storage and pass the storage path to the next component.\n", - "\n", - "Start by entering the value of your Google Cloud Platform Project ID." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The following cell creates a file `app.py` that contains a Python script. The script takes a GCS bucket name as an input argument, gets the lists of blobs in that bucket, prints the list of blobs and also writes them to an output file." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "%%bash\n", - "\n", - "# Create folders if they don't exist.\n", - "mkdir -p tmp/components/list-gcs-blobs\n", - "\n", - "# Create the Python file that lists GCS blobs.\n", - "cat > ./tmp/components/list-gcs-blobs/app.py < ./tmp/components/list-gcs-blobs/Dockerfile < ./tmp/components/list-gcs-blobs/build_image.sh < ./tmp/components/view-input/app.py < ./tmp/components/view-input/Dockerfile < ./tmp/components/view-input/build_image.sh <`) with the correct values for your environment." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Set your GCP project ID and the zone where you want to create the Kubeflow deployment\n", - "%env PROJECT=\n", - "%env ZONE=\n", - "\n", - "# google cloud storage bucket\n", - "%env GCP_BUCKET=gs://\n", - "\n", - "# Use the following kfctl configuration file for authentication with \n", - "# Cloud IAP (recommended):\n", - "uri = \"https://raw.githubusercontent.com/kubeflow/manifests/v0.7-branch/kfdef/kfctl_gcp_iap.0.7.0.yaml\"\n", - "uri = uri.strip()\n", - "%env CONFIG_URI=$uri\n", - "\n", - "# For using Cloud IAP for authentication, create environment variables\n", - "# from the OAuth client ID and secret that you obtained earlier:\n", - "%env CLIENT_ID=\n", - "%env CLIENT_SECRET=\n", - "\n", - "# Set KF_NAME to the name of your Kubeflow deployment. You also use this\n", - "# value as directory name when creating your configuration directory. \n", - "# For example, your deployment name can be 'my-kubeflow' or 'kf-test'.\n", - "%env KF_NAME=\n", - "\n", - "# Set up name of the service account that should be created and used\n", - "# while creating the Kubeflow cluster\n", - "%env SA_NAME=" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Configure gcloud and add kfctl to your path." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "! gcloud config set project ${PROJECT}\n", - "\n", - "! gcloud config set compute/zone ${ZONE}\n", - "\n", - "\n", - "# Set the path to the base directory where you want to store one or more \n", - "# Kubeflow deployments. For example, /opt/.\n", - "# Here we use the current working directory as the base directory\n", - "# Then set the Kubeflow application directory for this deployment.\n", - "\n", - "import os\n", - "base = os.getcwd()\n", - "%env BASE_DIR=$base\n", - "\n", - "kf_dir = os.getenv('BASE_DIR') + \"/\" + os.getenv('KF_NAME')\n", - "%env KF_DIR=$kf_dir\n", - "\n", - "# The following command is optional. It adds the kfctl binary to your path.\n", - "# If you don't add kfctl to your path, you must use the full path\n", - "# each time you run kfctl. In this example, the kfctl file is present in\n", - "# the current directory\n", - "new_path = os.getenv('PATH') + \":\" + os.getenv('BASE_DIR')\n", - "%env PATH=$new_path" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Create service account\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "! gcloud iam service-accounts create ${SA_NAME}\n", - "! gcloud projects add-iam-policy-binding ${PROJECT} \\\n", - " --member serviceAccount:${SA_NAME}@${PROJECT}.iam.gserviceaccount.com \\\n", - " --role 'roles/owner'\n", - "! gcloud iam service-accounts keys create key.json \\\n", - " --iam-account ${SA_NAME}@${PROJECT}.iam.gserviceaccount.com" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Set GOOGLE_APPLICATION_CREDENTIALS" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "key_path = os.getenv('BASE_DIR') + \"/\" + 'key.json'\n", - "%env GOOGLE_APPLICATION_CREDENTIALS=$key_path" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Setup and deploy Kubeflow" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "! mkdir -p ${KF_DIR}\n", - "%cd $kf_dir\n", - "! kfctl apply -V -f ${CONFIG_URI}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Install Kubeflow Pipelines SDK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture\n", - "\n", - "# Install the SDK (Uncomment the code if the SDK is not installed before)\n", - "! pip3 install 'kfp>=0.1.36' --quiet --user" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Sanity Check: Check the ingress created" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "! kubectl -n istio-system describe ingress" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Access the Kubeflow cluster at **`https://.endpoints..cloud.goog/`**\n", - "\n", - "Note that it may take up to 15-20 mins for the above url to be functional." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.3" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/samples/contrib/mnist/01_Lightweight_Python_Components.ipynb b/samples/contrib/mnist/01_Lightweight_Python_Components.ipynb deleted file mode 100644 index 8474f95f797..00000000000 --- a/samples/contrib/mnist/01_Lightweight_Python_Components.ipynb +++ /dev/null @@ -1,537 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Copyright 2019 The Kubeflow Authors. All Rights Reserved.\n", - "#\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License.\n", - "# ==============================================================================" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Lightweight Python Components\n", - "\n", - "To build a component, define a standalone python function and then call `kfp.components.func_to_container_op(func)` to convert your function to a component that can be used in a pipeline.\n", - "\n", - "There are several requirements for the function:\n", - "\n", - "- The function should be standalone. It should not use any code declared outside of the function definition. Any imports should be added inside the main function. Any helper functions must be defined inside the main function.\n", - "- The function can only import packages that are available in the base image. If you need to import a package that's not available, you can try to find a container image that already includes the required packages.\n", - "- **If the function operates on numbers, the parameters need to have type hints. Supported types are [int, float, bool]. Everything else is passed as string.**\n", - "- To build a component with multiple output values, use the typing.NamedTuple type hint syntax: `NamedTuple('MyFunctionOutputs', [('output_name_1', type), ('output_name_2', float)])`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp\n", - "import kfp.gcp as gcp\n", - "import kfp.dsl as dsl\n", - "import kfp.compiler as compiler\n", - "import kfp.components as comp\n", - "\n", - "import kubernetes as k8s" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameter" - ] - }, - "outputs": [], - "source": [ - "# Required Parameters\n", - "PROJECT_ID=''\n", - "GCS_BUCKET='gs://'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Create client\n", - "\n", - "If you run this notebook **outside** of a Kubeflow cluster, run the following command:\n", - "- `host`: The URL of your Kubeflow Pipelines instance, for example \"https://``.endpoints.``.cloud.goog/pipeline\"\n", - "- `client_id`: The client ID used by Identity-Aware Proxy\n", - "- `other_client_id`: The client ID used to obtain the auth codes and refresh tokens.\n", - "- `other_client_secret`: The client secret used to obtain the auth codes and refresh tokens.\n", - "\n", - "```python\n", - "client = kfp.Client(host, client_id, other_client_id, other_client_secret)\n", - "```\n", - "\n", - "If you run this notebook **within** a Kubeflow cluster, run the following command:\n", - "```python\n", - "client = kfp.Client()\n", - "```\n", - "\n", - "You'll need to create OAuth client ID credentials of type `Other` to get `other_client_id` and `other_client_secret`. Learn more about [creating OAuth credentials](\n", - "https://cloud.google.com/iap/docs/authentication-howto#authenticating_from_a_desktop_app)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameter" - ] - }, - "outputs": [], - "source": [ - "# Optional Parameters, but required for running outside Kubeflow cluster\n", - "\n", - "# The host for 'AI Platform Pipelines' ends with 'pipelines.googleusercontent.com'\n", - "# The host for pipeline endpoint of 'full Kubeflow deployment' ends with '/pipeline'\n", - "# Examples are:\n", - "# https://7c021d0340d296aa-dot-us-central2.pipelines.googleusercontent.com\n", - "# https://kubeflow.endpoints.kubeflow-pipeline.cloud.goog/pipeline\n", - "HOST = ''\n", - "\n", - "# For 'full Kubeflow deployment' on GCP, the endpoint is usually protected through IAP, therefore the following \n", - "# will be needed to access the endpoint.\n", - "CLIENT_ID = ''\n", - "OTHER_CLIENT_ID = ''\n", - "OTHER_CLIENT_SECRET = ''" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# This is to ensure the proper access token is present to reach the end point for 'AI Platform Pipelines'\n", - "# If you are not working with 'AI Platform Pipelines', this step is not necessary\n", - "! gcloud auth print-access-token" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Create kfp client\n", - "in_cluster = True\n", - "try:\n", - " k8s.config.load_incluster_config()\n", - "except:\n", - " in_cluster = False\n", - " pass\n", - "\n", - "if in_cluster:\n", - " client = kfp.Client()\n", - "else:\n", - " if HOST.endswith('googleusercontent.com'):\n", - " CLIENT_ID = None\n", - " OTHER_CLIENT_ID = None\n", - " OTHER_CLIENT_SECRET = None\n", - "\n", - " client = kfp.Client(host=HOST, \n", - " client_id=CLIENT_ID,\n", - " other_client_id=OTHER_CLIENT_ID, \n", - " other_client_secret=OTHER_CLIENT_SECRET)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Start with a simple function" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#Define a Python function\n", - "def add(a: float, b: float) -> float:\n", - " '''Calculates sum of two arguments'''\n", - " return a + b" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Convert the function to a pipeline operation" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "add_op = comp.func_to_container_op(add)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## A more complex example, with multiple outputs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Advanced function\n", - "# Demonstrates imports, helper functions and multiple outputs\n", - "from typing import NamedTuple\n", - "\n", - "def my_divmod(dividend: float, \n", - " divisor: float,\n", - " ) -> NamedTuple('MyDivmodOutput', [('quotient', float), ('remainder', float), \n", - " ('mlpipeline_ui_metadata', 'UI_metadata'), \n", - " ('mlpipeline_metrics', 'Metrics')]):\n", - " \n", - " '''Divides two numbers and calculate the quotient and remainder'''\n", - " \n", - " #Imports inside a component function:\n", - " import numpy as np\n", - "\n", - " #This function demonstrates how to use nested functions inside a component function:\n", - " def divmod_helper(dividend, divisor):\n", - " return np.divmod(dividend, divisor)\n", - "\n", - " (quotient, remainder) = divmod_helper(dividend, divisor)\n", - "\n", - " import json\n", - " \n", - " # Exports a sample tensorboard:\n", - " metadata = {\n", - " 'outputs' : [{\n", - " 'type': 'tensorboard',\n", - " 'source': 'gs://ml-pipeline-dataset/tensorboard-train',\n", - " }]\n", - " }\n", - "\n", - " # Exports two sample metrics:\n", - " metrics = {\n", - " 'metrics': [{\n", - " 'name': 'quotient',\n", - " 'numberValue': float(quotient),\n", - " },{\n", - " 'name': 'remainder',\n", - " 'numberValue': float(remainder),\n", - " }]}\n", - "\n", - " from collections import namedtuple\n", - " divmod_output = namedtuple('MyDivmodOutput', \n", - " ['quotient', 'remainder', 'mlpipeline_ui_metadata', 'mlpipeline_metrics'])\n", - " return divmod_output(quotient, remainder, json.dumps(metadata), json.dumps(metrics))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "my_divmod(100, 7)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "divmod_op = comp.func_to_container_op(func=my_divmod, \n", - " base_image=\"tensorflow/tensorflow:1.15.0-py3\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp.dsl as dsl\n", - "@dsl.pipeline(\n", - " name='Calculation pipeline',\n", - " description='A toy pipeline that performs arithmetic calculations.'\n", - ")\n", - "def calc_pipeline(\n", - " a='a',\n", - " b='7',\n", - " c='17',\n", - "):\n", - " #Passing pipeline parameter and a constant value as operation arguments\n", - " add_task = add_op(a, 4) #Returns a dsl.ContainerOp class instance. \n", - " \n", - " #Passing a task output reference as operation arguments\n", - " #For an operation with a single return value, the output reference can be accessed using `task.output` or `task.outputs['output_name']` syntax\n", - " divmod_task = divmod_op(add_task.output, b)\n", - "\n", - " #For an operation with a multiple return values, the output references can be accessed using `task.outputs['output_name']` syntax\n", - " result_task = add_op(divmod_task.outputs['quotient'], c)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Submit the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = calc_pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "experiment_name = 'python-functions'\n", - "\n", - "#Specify pipeline argument values\n", - "arguments = {'a': '7', 'b': '8'}\n", - "\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "\n", - "# Submit pipeline directly from pipeline function\n", - "run_result = client.create_run_from_pipeline_func(pipeline_func, \n", - " experiment_name=experiment_name, \n", - " run_name=run_name, \n", - " arguments=arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Train a keras model\n", - "\n", - "This following steps trains a neural network model to classify hand writing images using the [MNIST dataset](http://yann.lecun.com/exdb/mnist/)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def mnist_train(model_file: str, bucket: str) -> str:\n", - "\n", - " from datetime import datetime\n", - " import tensorflow as tf\n", - " \n", - " model = tf.keras.models.Sequential([\n", - " tf.keras.layers.Flatten(input_shape=(28, 28)),\n", - " tf.keras.layers.Dense(512, activation=tf.nn.relu),\n", - " tf.keras.layers.Dropout(0.2),\n", - " tf.keras.layers.Dense(10, activation=tf.nn.softmax)\n", - " ])\n", - " \n", - " model.compile(optimizer='adam',\n", - " loss='sparse_categorical_crossentropy',\n", - " metrics=['accuracy'])\n", - " \n", - " print(model.summary()) \n", - " \n", - " mnist = tf.keras.datasets.mnist\n", - " (x_train, y_train),(x_test, y_test) = mnist.load_data()\n", - " x_train, x_test = x_train / 255.0, x_test / 255.0\n", - "\n", - " callbacks = [\n", - " tf.keras.callbacks.TensorBoard(log_dir=bucket + '/logs/' + datetime.now().date().__str__()),\n", - " # Interrupt training if `val_loss` stops improving for over 2 epochs\n", - " tf.keras.callbacks.EarlyStopping(patience=2, monitor='val_loss'),\n", - " ]\n", - " \n", - " model.fit(x_train, y_train, batch_size=32, epochs=5, callbacks=callbacks,\n", - " validation_data=(x_test, y_test))\n", - " \n", - " \n", - " model.save(model_file)\n", - " \n", - " from tensorflow import gfile\n", - " \n", - " gcs_path = bucket + \"/\" + model_file\n", - " \n", - " if gfile.Exists(gcs_path):\n", - " gfile.Remove(gcs_path)\n", - " \n", - " gfile.Copy(model_file, gcs_path)\n", - " \n", - " return gcs_path" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": false - }, - "outputs": [], - "source": [ - "mnist_train(model_file='mnist_model.h5', \n", - " bucket=GCS_BUCKET)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "model_train_op = comp.func_to_container_op(func=mnist_train, \n", - " base_image=\"tensorflow/tensorflow:1.15.0-py3\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define and submit the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.pipeline(\n", - " name='Mnist pipeline',\n", - " description='A toy pipeline that performs mnist model training.'\n", - ")\n", - "def mnist_pipeline(\n", - " model_file: str = 'mnist_model.h5', \n", - " bucket: str = GCS_BUCKET\n", - "):\n", - " model_train_op(model_file=model_file, bucket=bucket).apply(gcp.use_gcp_secret('user-gcp-sa'))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Submit a pipeline run" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = mnist_pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "experiment_name = 'minist_kubeflow'\n", - "\n", - "arguments = {\"model_file\":\"mnist_model.h5\",\n", - " \"bucket\":GCS_BUCKET}\n", - "\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "\n", - "# Submit pipeline directly from pipeline function\n", - "run_result = client.create_run_from_pipeline_func(pipeline_func, \n", - " experiment_name=experiment_name, \n", - " run_name=run_name, \n", - " arguments=arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**As an alternative, you can compile the pipeline into a package.** The compiled pipeline can be easily shared and reused by others to run the pipeline.\n", - "\n", - "```python\n", - "pipeline_filename = pipeline_func.__name__ + '.pipeline.zip'\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)\n", - "\n", - "experiment = client.create_experiment('python-functions-mnist')\n", - "\n", - "run_result = client.run_pipeline(\n", - " experiment_id=experiment.id, \n", - " job_name=run_name, \n", - " pipeline_package_path=pipeline_filename, \n", - " params=arguments)\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "virtualPython35", - "language": "python", - "name": "virtualpython35" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.5.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/samples/contrib/mnist/02_Local_Development_with_Docker_Image_Components.ipynb b/samples/contrib/mnist/02_Local_Development_with_Docker_Image_Components.ipynb deleted file mode 100644 index e2d93f82477..00000000000 --- a/samples/contrib/mnist/02_Local_Development_with_Docker_Image_Components.ipynb +++ /dev/null @@ -1,562 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Copyright 2019 The Kubeflow Authors. All Rights Reserved.\n", - "#\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License.\n", - "# ==============================================================================" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Local development and docker image components\n", - "\n", - "- This section assumes that you have already created a program to perform the task required in a particular step of your ML workflow. This example uses an MNIST model training script.\n", - "\n", - "- Then, this example packages your program as a Docker container image.\n", - "\n", - "- Then, this example calls kfp.components.ContainerOp to convert it to a Kubeflow pipeline component." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note: Ensure that you have Docker installed, if you want to build the image locally, by running the following command:\n", - " \n", - "`which docker`\n", - " \n", - "The result should be something like:\n", - "\n", - "`/usr/bin/docker`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp\n", - "import kfp.gcp as gcp\n", - "import kfp.dsl as dsl\n", - "import kfp.compiler as compiler\n", - "import kfp.components as comp\n", - "import datetime\n", - "\n", - "import kubernetes as k8s" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [ - "parameter" - ] - }, - "outputs": [], - "source": [ - "# Required Parameters\n", - "PROJECT_ID=''\n", - "GCS_BUCKET='gs://'" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Create client\n", - "\n", - "If you run this notebook **outside** of a Kubeflow cluster, run the following command:\n", - "- `host`: The URL of your Kubeflow Pipelines instance, for example \"https://``.endpoints.``.cloud.goog/pipeline\"\n", - "- `client_id`: The client ID used by Identity-Aware Proxy\n", - "- `other_client_id`: The client ID used to obtain the auth codes and refresh tokens.\n", - "- `other_client_secret`: The client secret used to obtain the auth codes and refresh tokens.\n", - "\n", - "```python\n", - "client = kfp.Client(host, client_id, other_client_id, other_client_secret)\n", - "```\n", - "\n", - "If you run this notebook **within** a Kubeflow cluster, run the following command:\n", - "```python\n", - "client = kfp.Client()\n", - "```\n", - "\n", - "You'll need to create OAuth client ID credentials of type `Other` to get `other_client_id` and `other_client_secret`. Learn more about [creating OAuth credentials](\n", - "https://cloud.google.com/iap/docs/authentication-howto#authenticating_from_a_desktop_app)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Optional Parameters, but required for running outside Kubeflow cluster\n", - "\n", - "# The host for 'AI Platform Pipelines' ends with 'pipelines.googleusercontent.com'\n", - "# The host for pipeline endpoint of 'full Kubeflow deployment' ends with '/pipeline'\n", - "# Examples are:\n", - "# https://7c021d0340d296aa-dot-us-central2.pipelines.googleusercontent.com\n", - "# https://kubeflow.endpoints.kubeflow-pipeline.cloud.goog/pipeline\n", - "HOST = ''\n", - "\n", - "# For 'full Kubeflow deployment' on GCP, the endpoint is usually protected through IAP, therefore the following \n", - "# will be needed to access the endpoint.\n", - "CLIENT_ID = ''\n", - "OTHER_CLIENT_ID = ''\n", - "OTHER_CLIENT_SECRET = ''" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# This is to ensure the proper access token is present to reach the end point for 'AI Platform Pipelines'\n", - "# If you are not working with 'AI Platform Pipelines', this step is not necessary\n", - "! gcloud auth print-access-token" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Create kfp client\n", - "in_cluster = True\n", - "try:\n", - " k8s.config.load_incluster_config()\n", - "except:\n", - " in_cluster = False\n", - " pass\n", - "\n", - "if in_cluster:\n", - " client = kfp.Client()\n", - "else:\n", - " if HOST.endswith('googleusercontent.com'):\n", - " CLIENT_ID = None\n", - " OTHER_CLIENT_ID = None\n", - " OTHER_CLIENT_SECRET = None\n", - "\n", - " client = kfp.Client(host=HOST, \n", - " client_id=CLIENT_ID,\n", - " other_client_id=OTHER_CLIENT_ID, \n", - " other_client_secret=OTHER_CLIENT_SECRET)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Wrap an existing Docker container image using `ContainerOp`" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Writing the program code\n", - "\n", - "The following cell creates a file `app.py` that contains a Python script. The script downloads MNIST dataset, trains a Neural Network based classification model, writes the training log and exports the trained model to Google Cloud Storage.\n", - "\n", - "Your component can create outputs that the downstream components can use as inputs. Each output must be a string and the container image must write each output to a separate local text file. For example, if a training component needs to output the path of the trained model, the component writes the path into a local file, such as `/output.txt`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%bash\n", - "\n", - "# Create folders if they don't exist.\n", - "mkdir -p tmp/components/mnist_training\n", - "\n", - "# Create the Python file that lists GCS blobs.\n", - "cat > ./tmp/components/mnist_training/app.py < ./tmp/components/mnist_training/Dockerfile <= 0.7** and exploring **kaniko option**, you need to ensure that valid credentials are created within your notebook's namespace.\n", - "- With Kubeflow version >= 0.7, the credential is supposed to be copied automatically while creating notebook through `Configurations`, which doesn't work properly at the time of creating this notebook. \n", - "- You can also add credentials to the new namespace by either [copying credentials from an existing Kubeflow namespace, or by creating a new service account](https://www.kubeflow.org/docs/gke/authentication/#kubeflow-v0-6-and-before-gcp-service-account-key-as-secret).\n", - "- The following cell demonstrates how to copy the default secret to your own namespace.\n", - "\n", - "```bash\n", - "%%bash\n", - "\n", - "NAMESPACE=\n", - "SOURCE=kubeflow\n", - "NAME=user-gcp-sa\n", - "SECRET=$(kubectl get secrets \\${NAME} -n \\${SOURCE} -o jsonpath=\"{.data.\\${NAME}\\.json}\" | base64 -D)\n", - "kubectl create -n \\${NAMESPACE} secret generic \\${NAME} --from-literal=\"\\${NAME}.json=\\${SECRET}\"\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "IMAGE_NAME=\"mnist_training_kf_pipeline\"\n", - "TAG=\"latest\" # \"v_$(date +%Y%m%d_%H%M%S)\"\n", - "\n", - "GCR_IMAGE=\"gcr.io/{PROJECT_ID}/{IMAGE_NAME}:{TAG}\".format(\n", - " PROJECT_ID=PROJECT_ID,\n", - " IMAGE_NAME=IMAGE_NAME,\n", - " TAG=TAG\n", - ")\n", - "\n", - "APP_FOLDER='./tmp/components/mnist_training/'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# In the following, for the purpose of demonstration\n", - "# Cloud Build is choosen for 'AI Platform Pipelines'\n", - "# kaniko is choosen for 'full Kubeflow deployment'\n", - "\n", - "if HOST.endswith('googleusercontent.com'):\n", - " # kaniko is not pre-installed with 'AI Platform Pipelines'\n", - " import subprocess\n", - " # ! gcloud builds submit --tag ${IMAGE_NAME} ${APP_FOLDER}\n", - " cmd = ['gcloud', 'builds', 'submit', '--tag', GCR_IMAGE, APP_FOLDER]\n", - " build_log = (subprocess.run(cmd, stdout=subprocess.PIPE).stdout[:-1].decode('utf-8'))\n", - " print(build_log)\n", - " \n", - "else:\n", - " if kfp.__version__ <= '0.1.36':\n", - " # kfp with version 0.1.36+ introduce broken change that will make the following code not working\n", - " import subprocess\n", - " \n", - " builder = kfp.containers._container_builder.ContainerBuilder(\n", - " gcs_staging=GCS_BUCKET + \"/kfp_container_build_staging\"\n", - " )\n", - "\n", - " kfp.containers.build_image_from_working_dir(\n", - " image_name=GCR_IMAGE,\n", - " working_dir=APP_FOLDER,\n", - " builder=builder\n", - " )\n", - " else:\n", - " raise(\"Please build the docker image use either [Docker] or [Cloud Build]\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### If you want to use docker to build the image\n", - "Run the following in a cell\n", - "```bash\n", - "%%bash -s \"{PROJECT_ID}\"\n", - "\n", - "IMAGE_NAME=\"mnist_training_kf_pipeline\"\n", - "TAG=\"latest\" # \"v_$(date +%Y%m%d_%H%M%S)\"\n", - "\n", - "# Create script to build docker image and push it.\n", - "cat > ./tmp/components/mnist_training/build_image.sh <`.endpoints.``.cloud.goog/pipeline\"\n", - "- `client_id`: The client ID used by Identity-Aware Proxy\n", - "- `other_client_id`: The client ID used to obtain the auth codes and refresh tokens.\n", - "- `other_client_secret`: The client secret used to obtain the auth codes and refresh tokens.\n", - "\n", - "```python\n", - "client = kfp.Client(host, client_id, other_client_id, other_client_secret)\n", - "```\n", - "\n", - "If you run this notebook **within** a Kubeflow cluster, run the following command:\n", - "```python\n", - "client = kfp.Client()\n", - "```\n", - "\n", - "You'll need to create OAuth client ID credentials of type `Other` to get `other_client_id` and `other_client_secret`. Learn more about [creating OAuth credentials](\n", - "https://cloud.google.com/iap/docs/authentication-howto#authenticating_from_a_desktop_app)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Optional Parameters, but required for running outside Kubeflow cluster\n", - "\n", - "# The host for 'AI Platform Pipelines' ends with 'pipelines.googleusercontent.com'\n", - "# The host for pipeline endpoint of 'full Kubeflow deployment' ends with '/pipeline'\n", - "# Examples are:\n", - "# https://7c021d0340d296aa-dot-us-central2.pipelines.googleusercontent.com\n", - "# https://kubeflow.endpoints.kubeflow-pipeline.cloud.goog/pipeline\n", - "HOST = ''\n", - "\n", - "# For 'full Kubeflow deployment' on GCP, the endpoint is usually protected through IAP, therefore the following \n", - "# will be needed to access the endpoint.\n", - "CLIENT_ID = ''\n", - "OTHER_CLIENT_ID = ''\n", - "OTHER_CLIENT_SECRET = ''" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# This is to ensure the proper access token is present to reach the end point for 'AI Platform Pipelines'\n", - "# If you are not working with 'AI Platform Pipelines', this step is not necessary\n", - "! gcloud auth print-access-token" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Create kfp client\n", - "in_cluster = True\n", - "try:\n", - " k8s.config.load_incluster_config()\n", - "except:\n", - " in_cluster = False\n", - " pass\n", - "\n", - "if in_cluster:\n", - " client = kfp.Client()\n", - "else:\n", - " if HOST.endswith('googleusercontent.com'):\n", - " CLIENT_ID = None\n", - " OTHER_CLIENT_ID = None\n", - " OTHER_CLIENT_SECRET = None\n", - "\n", - " client = kfp.Client(host=HOST, \n", - " client_id=CLIENT_ID,\n", - " other_client_id=OTHER_CLIENT_ID, \n", - " other_client_secret=OTHER_CLIENT_SECRET)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Writing the program code" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The following cell creates a file `app.py` that contains a Python script. The script downloads MNIST dataset, trains a Neural Network based classification model, writes the training log and exports the trained model to Google Cloud Storage.\n", - "\n", - "Your component can create outputs that the downstream components can use as inputs. Each output must be a string and the container image must write each output to a separate local text file. For example, if a training component needs to output the path of the trained model, the component writes the path into a local file, such as `/output.txt`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%bash\n", - "\n", - "# Create folders if they don't exist.\n", - "mkdir -p tmp/reuse_components/mnist_training\n", - "\n", - "# Create the Python file that lists GCS blobs.\n", - "cat > ./tmp/reuse_components/mnist_training/app.py < ./tmp/reuse_components/mnist_training/Dockerfile <= 0.7** and exploring **kaniko option**, you need to ensure that valid credentials are created within your notebook's namespace.\n", - "- With Kubeflow version >= 0.7, the credential is supposed to be copied automatically while creating notebook through `Configurations`, which doesn't work properly at the time of creating this notebook. \n", - "- You can also add credentials to the new namespace by either [copying credentials from an existing Kubeflow namespace, or by creating a new service account](https://www.kubeflow.org/docs/gke/authentication/#kubeflow-v0-6-and-before-gcp-service-account-key-as-secret).\n", - "- The following cell demonstrates how to copy the default secret to your own namespace.\n", - "\n", - "```bash\n", - "%%bash\n", - "\n", - "NAMESPACE=\n", - "SOURCE=kubeflow\n", - "NAME=user-gcp-sa\n", - "SECRET=$(kubectl get secrets \\${NAME} -n \\${SOURCE} -o jsonpath=\"{.data.\\${NAME}\\.json}\" | base64 -D)\n", - "kubectl create -n \\${NAMESPACE} secret generic \\${NAME} --from-literal=\"\\${NAME}.json=\\${SECRET}\"\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "IMAGE_NAME=\"mnist_training_kf_pipeline\"\n", - "TAG=\"latest\" # \"v_$(date +%Y%m%d_%H%M%S)\"\n", - "\n", - "GCR_IMAGE=\"gcr.io/{PROJECT_ID}/{IMAGE_NAME}:{TAG}\".format(\n", - " PROJECT_ID=PROJECT_ID,\n", - " IMAGE_NAME=IMAGE_NAME,\n", - " TAG=TAG\n", - ")\n", - "\n", - "APP_FOLDER='./tmp/reuse_components/mnist_training/'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# In the following, for the purpose of demonstration\n", - "# Cloud Build is choosen for 'AI Platform Pipelines'\n", - "# kaniko is choosen for 'full Kubeflow deployment'\n", - "\n", - "if HOST.endswith('googleusercontent.com'):\n", - " # kaniko is not pre-installed with 'AI Platform Pipelines'\n", - " import subprocess\n", - " # ! gcloud builds submit --tag ${IMAGE_NAME} ${APP_FOLDER}\n", - " cmd = ['gcloud', 'builds', 'submit', '--tag', GCR_IMAGE, APP_FOLDER]\n", - " build_log = (subprocess.run(cmd, stdout=subprocess.PIPE).stdout[:-1].decode('utf-8'))\n", - " print(build_log)\n", - " \n", - "else:\n", - " if kfp.__version__ <= '0.1.36':\n", - " # kfp with version 0.1.36+ introduce broken change that will make the following code not working\n", - " import subprocess\n", - " \n", - " builder = kfp.containers._container_builder.ContainerBuilder(\n", - " gcs_staging=GCS_BUCKET + \"/kfp_container_build_staging\"\n", - " )\n", - "\n", - " kfp.containers.build_image_from_working_dir(\n", - " image_name=GCR_IMAGE,\n", - " working_dir=APP_FOLDER,\n", - " builder=builder\n", - " )\n", - " else:\n", - " raise(\"Please build the docker image use either [Docker] or [Cloud Build]\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### If you want to use docker to build the image\n", - "Run the following in a cell\n", - "```bash\n", - "%%bash -s \"{PROJECT_ID}\"\n", - "\n", - "IMAGE_NAME=\"mnist_training_kf_pipeline\"\n", - "TAG=\"latest\" # \"v_$(date +%Y%m%d_%H%M%S)\"\n", - "\n", - "# Create script to build docker image and push it.\n", - "cat > ./tmp/reuse_components/mnist_training/build_image.sh < mnist_component.yaml <`.endpoints.``.cloud.goog/pipeline\"\n", - "- `client_id`: The client ID used by Identity-Aware Proxy\n", - "- `other_client_id`: The client ID used to obtain the auth codes and refresh tokens.\n", - "- `other_client_secret`: The client secret used to obtain the auth codes and refresh tokens.\n", - "\n", - "```python\n", - "client = kfp.Client(host, client_id, other_client_id, other_client_secret)\n", - "```\n", - "\n", - "If you run this notebook **within** a Kubeflow cluster, run the following command:\n", - "```python\n", - "client = kfp.Client()\n", - "```\n", - "\n", - "You'll need to create OAuth client ID credentials of type `Other` to get `other_client_id` and `other_client_secret`. Learn more about [creating OAuth credentials](\n", - "https://cloud.google.com/iap/docs/authentication-howto#authenticating_from_a_desktop_app)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Optional Parameters, but required for running outside Kubeflow cluster\n", - "\n", - "# The host for 'AI Platform Pipelines' ends with 'pipelines.googleusercontent.com'\n", - "# The host for pipeline endpoint of 'full Kubeflow deployment' ends with '/pipeline'\n", - "# Examples are:\n", - "# https://7c021d0340d296aa-dot-us-central2.pipelines.googleusercontent.com\n", - "# https://kubeflow.endpoints.kubeflow-pipeline.cloud.goog/pipeline\n", - "HOST = ''\n", - "\n", - "# For 'full Kubeflow deployment' on GCP, the endpoint is usually protected through IAP, therefore the following \n", - "# will be needed to access the endpoint.\n", - "CLIENT_ID = ''\n", - "OTHER_CLIENT_ID = ''\n", - "OTHER_CLIENT_SECRET = ''" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# This is to ensure the proper access token is present to reach the end point for 'AI Platform Pipelines'\n", - "# If you are not working with 'AI Platform Pipelines', this step is not necessary\n", - "! gcloud auth print-access-token" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Create kfp client\n", - "in_cluster = True\n", - "try:\n", - " k8s.config.load_incluster_config()\n", - "except:\n", - " in_cluster = False\n", - " pass\n", - "\n", - "if in_cluster:\n", - " client = kfp.Client()\n", - "else:\n", - " if HOST.endswith('googleusercontent.com'):\n", - " CLIENT_ID = None\n", - " OTHER_CLIENT_ID = None\n", - " OTHER_CLIENT_SECRET = None\n", - "\n", - " client = kfp.Client(host=HOST, \n", - " client_id=CLIENT_ID,\n", - " other_client_id=OTHER_CLIENT_ID, \n", - " other_client_secret=OTHER_CLIENT_SECRET)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Build reusable components" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Writing the program code" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The following cell creates a file `app.py` that contains a Python script. The script downloads MNIST dataset, trains a Neural Network based classification model, writes the training log and exports the trained model to Google Cloud Storage.\n", - "\n", - "Your component can create outputs that the downstream components can use as inputs. Each output must be a string and the container image must write each output to a separate local text file. For example, if a training component needs to output the path of the trained model, the component writes the path into a local file, such as `/output.txt`." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%bash\n", - "\n", - "# Create folders if they don't exist.\n", - "mkdir -p tmp/reuse_components_pipeline/mnist_training\n", - "\n", - "# Create the Python file that lists GCS blobs.\n", - "cat > ./tmp/reuse_components_pipeline/mnist_training/app.py < ./tmp/reuse_components_pipeline/mnist_training/Dockerfile <= 0.7** and exploring **kaniko option**, you need to ensure that valid credentials are created within your notebook's namespace.\n", - "- With Kubeflow version >= 0.7, the credential is supposed to be copied automatically while creating notebook through `Configurations`, which doesn't work properly at the time of creating this notebook. \n", - "- You can also add credentials to the new namespace by either [copying credentials from an existing Kubeflow namespace, or by creating a new service account](https://www.kubeflow.org/docs/gke/authentication/#kubeflow-v0-6-and-before-gcp-service-account-key-as-secret).\n", - "- The following cell demonstrates how to copy the default secret to your own namespace.\n", - "\n", - "```bash\n", - "%%bash\n", - "\n", - "NAMESPACE=\n", - "SOURCE=kubeflow\n", - "NAME=user-gcp-sa\n", - "SECRET=$(kubectl get secrets \\${NAME} -n \\${SOURCE} -o jsonpath=\"{.data.\\${NAME}\\.json}\" | base64 -D)\n", - "kubectl create -n \\${NAMESPACE} secret generic \\${NAME} --from-literal=\"\\${NAME}.json=\\${SECRET}\"\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "IMAGE_NAME=\"mnist_training_kf_pipeline\"\n", - "TAG=\"latest\" # \"v_$(date +%Y%m%d_%H%M%S)\"\n", - "\n", - "GCR_IMAGE=\"gcr.io/{PROJECT_ID}/{IMAGE_NAME}:{TAG}\".format(\n", - " PROJECT_ID=PROJECT_ID,\n", - " IMAGE_NAME=IMAGE_NAME,\n", - " TAG=TAG\n", - ")\n", - "\n", - "APP_FOLDER='./tmp/reuse_components_pipeline/mnist_training/'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# In the following, for the purpose of demonstration\n", - "# Cloud Build is choosen for 'AI Platform Pipelines'\n", - "# kaniko is choosen for 'full Kubeflow deployment'\n", - "\n", - "if HOST.endswith('googleusercontent.com'):\n", - " # kaniko is not pre-installed with 'AI Platform Pipelines'\n", - " import subprocess\n", - " # ! gcloud builds submit --tag ${IMAGE_NAME} ${APP_FOLDER}\n", - " cmd = ['gcloud', 'builds', 'submit', '--tag', GCR_IMAGE, APP_FOLDER]\n", - " build_log = (subprocess.run(cmd, stdout=subprocess.PIPE).stdout[:-1].decode('utf-8'))\n", - " print(build_log)\n", - " \n", - "else:\n", - " if kfp.__version__ <= '0.1.36':\n", - " # kfp with version 0.1.36+ introduce broken change that will make the following code not working\n", - " import subprocess\n", - " \n", - " builder = kfp.containers._container_builder.ContainerBuilder(\n", - " gcs_staging=GCS_BUCKET + \"/kfp_container_build_staging\"\n", - " )\n", - "\n", - " kfp.containers.build_image_from_working_dir(\n", - " image_name=GCR_IMAGE,\n", - " working_dir=APP_FOLDER,\n", - " builder=builder\n", - " )\n", - " else:\n", - " raise(\"Please build the docker image use either [Docker] or [Cloud Build]\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### If you want to use docker to build the image\n", - "Run the following in a cell\n", - "```bash\n", - "%%bash -s \"{PROJECT_ID}\"\n", - "\n", - "IMAGE_NAME=\"mnist_training_kf_pipeline\"\n", - "TAG=\"latest\" # \"v_$(date +%Y%m%d_%H%M%S)\"\n", - "\n", - "# Create script to build docker image and push it.\n", - "cat > ./tmp/components/mnist_training/build_image.sh < mnist_pipeline_component.yaml < str:\n", - "\n", - " model_name = model_name.split(\"/\")[-1]\n", - " version = version.split(\"/\")[-1]\n", - " \n", - " import googleapiclient.discovery\n", - " \n", - " def predict(project, model, data, version=None):\n", - " \"\"\"Run predictions on a list of instances.\n", - "\n", - " Args:\n", - " project: (str), project where the Cloud ML Engine Model is deployed.\n", - " model: (str), model name.\n", - " data: ([[any]]), list of input instances, where each input instance is a\n", - " list of attributes.\n", - " version: str, version of the model to target.\n", - "\n", - " Returns:\n", - " Mapping[str: any]: dictionary of prediction results defined by the model.\n", - " \"\"\"\n", - "\n", - " service = googleapiclient.discovery.build('ml', 'v1')\n", - " name = 'projects/{}/models/{}'.format(project, model)\n", - "\n", - " if version is not None:\n", - " name += '/versions/{}'.format(version)\n", - "\n", - " response = service.projects().predict(\n", - " name=name, body={\n", - " 'instances': data\n", - " }).execute()\n", - "\n", - " if 'error' in response:\n", - " raise RuntimeError(response['error'])\n", - "\n", - " return response['predictions']\n", - "\n", - " import tensorflow as tf\n", - " import json\n", - " \n", - " mnist = tf.keras.datasets.mnist\n", - " (x_train, y_train),(x_test, y_test) = mnist.load_data()\n", - " x_train, x_test = x_train / 255.0, x_test / 255.0\n", - "\n", - " result = predict(\n", - " project=project_id,\n", - " model=model_name,\n", - " data=x_test[0:2].tolist(),\n", - " version=version)\n", - " print(result)\n", - " \n", - " return json.dumps(result)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# # Test the function with already deployed version\n", - "# deployment_test(\n", - "# project_id=PROJECT_ID,\n", - "# model_name=\"mnist\",\n", - "# version='ver_bb1ebd2a06ab7f321ad3db6b3b3d83e6' # previous deployed version for testing\n", - "# )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "deployment_test_op = comp.func_to_container_op(\n", - " func=deployment_test, \n", - " base_image=\"tensorflow/tensorflow:1.15.0-py3\",\n", - " packages_to_install=[\"google-api-python-client==1.7.8\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Create your workflow as a Python function" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Define your pipeline as a Python function. ` @kfp.dsl.pipeline` is a required decoration, and must include `name` and `description` properties. Then compile the pipeline function. After the compilation is completed, a pipeline file is created." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Define the pipeline\n", - "@dsl.pipeline(\n", - " name='Mnist pipeline',\n", - " description='A toy pipeline that performs mnist model training.'\n", - ")\n", - "def mnist_reuse_component_deploy_pipeline(\n", - " project_id: str = PROJECT_ID,\n", - " model_path: str = 'mnist_model', \n", - " bucket: str = GCS_BUCKET\n", - "):\n", - " train_task = mnist_train_op(\n", - " model_path=model_path, \n", - " bucket=bucket\n", - " ).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " \n", - " deploy_task = deploy(\n", - " project_id=project_id,\n", - " model_uri=train_task.outputs['gcs_model_path'],\n", - " model_id=\"mnist\", \n", - " runtime_version=\"1.14\",\n", - " python_version=\"3.5\"\n", - " ).apply(gcp.use_gcp_secret('user-gcp-sa')) \n", - " \n", - " deploy_test_task = deployment_test_op(\n", - " project_id=project_id,\n", - " model_name=deploy_task.outputs[\"model_name\"], \n", - " version=deploy_task.outputs[\"version_name\"],\n", - " ).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " \n", - " return True" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Submit a pipeline run" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = mnist_reuse_component_deploy_pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "experiment_name = 'minist_kubeflow'\n", - "\n", - "arguments = {\"model_path\":\"mnist_model\",\n", - " \"bucket\":GCS_BUCKET}\n", - "\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "\n", - "# Submit pipeline directly from pipeline function\n", - "run_result = client.create_run_from_pipeline_func(pipeline_func, \n", - " experiment_name=experiment_name, \n", - " run_name=run_name, \n", - " arguments=arguments)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**As an alternative, you can compile the pipeline into a package.** The compiled pipeline can be easily shared and reused by others to run the pipeline.\n", - "\n", - "```python\n", - "pipeline_filename = pipeline_func.__name__ + '.pipeline.zip'\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)\n", - "\n", - "experiment = client.create_experiment('python-functions-mnist')\n", - "\n", - "run_result = client.run_pipeline(\n", - " experiment_id=experiment.id, \n", - " job_name=run_name, \n", - " pipeline_package_path=pipeline_filename, \n", - " params=arguments)\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "virtualPython35", - "language": "python", - "name": "virtualpython35" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.5.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/samples/contrib/mnist/README.md b/samples/contrib/mnist/README.md deleted file mode 100644 index 9ac4b79c085..00000000000 --- a/samples/contrib/mnist/README.md +++ /dev/null @@ -1,63 +0,0 @@ -# Kubeflow Pipeline Tutorial -[`Kubeflow Pipelines`](https://github.com/kubeflow/pipelines) is a platform for building and deploying portable, -scalable machine learning (ML) pipelines or workflows based on Docker containers. -The `Kubeflow Pipelines` platform consists of: -- A user interface for managing and tracking experiments, jobs, and runs. -- An engine for scheduling multi-step ML workflows. -- An SDK for defining and manipulating pipelines and components. -- Notebooks for interacting with the system using the SDK. - -A pipeline is a description of an ML workflow, including all of the components in the workflow and -how they combine in the form of a graph. The pipeline includes the definition of the inputs (parameters) required to -run the pipeline and the inputs and outputs of each component. A pipeline component is a self-contained set of user -code, packaged as a Docker image, that performs one step in the pipeline. For example, a component can be responsible -for steps such as data preprocessing, data transformation, and model training. - -## Content Overview: -In this tutorial, we designed a series of notebooks to demonstrate how to interact with `Kubeflow Pipelines` through the -[Kubeflow Pipelines SDK](https://github.com/kubeflow/pipelines/tree/master/sdk/python/kfp). In particular -- [00 Kubeflow Cluster Setup](00_Kubeflow_Cluster_Setup.ipynb): this notebook helps you deploy a Kubeflow -cluster through CLI. Note that it is also possible to deploy the Kubeflow cluster though -[UI](https://www.kubeflow.org/docs/gke/deploy/deploy-ui/) - -Then, notebooks 01-04 use one concrete use case, -[MNIST classification](https://www.tensorflow.org/tutorials/quickstart/beginner), to demonstrate different ways of -authoring a pipeline component: -- [01 Lightweight Python Components](01_Lightweight_Python_Components.ipynb): this notebook demonstrates how to build a -component through defining a standalone python function and then calling `kfp.components.func_to_container_op(func)` to -convert, which can be used in a pipeline. - -- [02 Local Development with Docker Image Components](02_Local_Development_with_Docker_Image_Components.ipynb): this -notebook guides you on creating a pipeline component with `kfp.components.ContainerOp` from an existing Docker image -which should contain the program to perform the task required in a particular step of your ML workflow. - -- [03 Reusable Components](03_Reusable_Components.ipynb): this notebook describes the manual way of writing a full -component program (in any language) and a component definition for it. Below is a summary of the steps involved in -creating and using a component. - - Write the program that contains your component’s logic. The program must use files and command-line arguments - to pass data to and from the component. - - Containerize the program. - - Write a component specification in YAML format that describes the component for the Kubeflow Pipelines system. - - Use the Kubeflow Pipelines SDK to load your component, use it in a pipeline and run that pipeline. - -- [04 Reusable and Pre-build Components as Pipeline](04_Reusable_and_Pre-build_Components_as_Pipeline.ipynb): this -notebook combines our built components, together with a pre-build GCP AI Platform components -and a lightweight component to compose a pipeline with three steps. - - Train an MNIST model and export it to Google Cloud Storage. - - Deploy the exported TensorFlow model on AI Platform Prediction service. - - Test the deployment by calling the endpoint with test data. - -## Running the Tutorial Notebooks -Please note that the above configuration is required for notebook service running outside Kubeflow environment. -And the examples demonstrated are fully tested on notebook service for the following three situations: -- Notebook running on your personal computer -- [Notebook on AI Platform, Google Cloud Platform](https://cloud.google.com/ai-platform-notebooks/) -- [Notebook running inside Kubeflow cluster](https://www.kubeflow.org/docs/components/notebooks/) - -For notebook running inside Kubeflow cluster, for example JupyterHub will be deployed together with Kubeflow Pipeline, -the environemt variables such as service account and default project should have been pre-configured while -setting up the cluster. - -## Contributors -- [Shixin Luo](https://github.com/luotigerlsx) -- [Kumar Saurabh](https://github.com/saurabh24292) \ No newline at end of file diff --git a/samples/contrib/nvidia-resnet/LICENSE b/samples/contrib/nvidia-resnet/LICENSE deleted file mode 100644 index 94bdef64ebf..00000000000 --- a/samples/contrib/nvidia-resnet/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/samples/contrib/nvidia-resnet/README.md b/samples/contrib/nvidia-resnet/README.md deleted file mode 100644 index 32229b4b0fe..00000000000 --- a/samples/contrib/nvidia-resnet/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# A simple GPU-accelerated ResNet Kubeflow pipeline -## Overview -This example demonstrates a simple end-to-end training & deployment of a Keras Resnet model on the CIFAR10 dataset utilizing the following technologies: -* [NVIDIA-Docker2](https://github.com/NVIDIA/nvidia-docker) to make the Docker containers GPU aware. -* [NVIDIA device plugin](https://github.com/NVIDIA/k8s-device-plugin) to allow Kubernetes to access GPU nodes. -* [TensorFlow-19.03](https://ngc.nvidia.com/catalog/containers/nvidia:tensorflow) containers from NVIDIA GPU Cloud container registry. -* [TensorRT](https://docs.nvidia.com/deeplearning/dgx/integrate-tf-trt/index.html) for optimizing the Inference Graph in FP16 for leveraging the dedicated use of Tensor Cores for Inference. -* [TensorRT Inference Server](https://github.com/NVIDIA/tensorrt-inference-server) for serving the trained model. - -## System Requirements -* Ubuntu 16.04 and above -* NVIDIA GPU - -## Quickstart -* Install NVIDIA Docker, Kubernetes and Kubeflow on your local machine (on your first run): - * `sudo ./install_kubeflow_and_dependencies.sh` -* Build the Docker image of each pipeline component and compile the Kubeflow pipeline: - * First, make sure `IMAGE` variable in `build.sh` in each component dir under `components` dir points to a public container registry - * Then, make sure the `image` used in each `ContainerOp` in `pipeline/src/pipeline.py` matches `IMAGE` in the step above - * Then, make sure the `image` of the webapp Deployment in `components/webapp_launcher/src/webapp-service-template.yaml` matches `IMAGE` in `components/webapp/build.sh` - * Then, `sudo ./build_pipeline.sh` - * Note the `pipeline.py.tar.gz` file that appears in your working directory -* Determine the ambassador port: - * `sudo kubectl get svc -n kubeflow ambassador` -* Open the Kubeflow UI on: - * https://[local-machine-ip-address]:[ambassador-port]/ - * E.g. https://10.110.210.99:31342/ -* Click on Pipeline Dashboard tab, upload the `pipeline.py.tar.gz` file you just compile and create a run -* Training takes about 20 minutes for 50 epochs and a web UI is deployed as part of the pipeline so user can interact with the served model -* Access the client web UI: - * https://[local-machine-ip-address]:[kubeflow-ambassador-port]/[webapp-prefix]/ - * E.g. https://10.110.210.99:31342/webapp/ -* Now you can test the trained model with random images and obtain class prediction and probability distribution - -## Cleanup -Following are optional scripts to cleanup your cluster (useful for debugging) -* Delete deployments & services from previous runs: - * `sudo ./clean_utils/delete_all_previous_resources.sh` -* Uninstall Minikube and Kubeflow: - * `sudo ./clean_utils/remove_minikube_and_kubeflow.sh` \ No newline at end of file diff --git a/samples/contrib/nvidia-resnet/build_pipeline.sh b/samples/contrib/nvidia-resnet/build_pipeline.sh deleted file mode 100755 index 27d3dd94608..00000000000 --- a/samples/contrib/nvidia-resnet/build_pipeline.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -base_dir=$(pwd) -components_dir=$base_dir/components - -# Build and push images of Kubeflow Pipelines components -for component in $components_dir/*/; do - cd $component && ./build.sh -done - -# Compile kubeflow pipeline tar file -cd $base_dir/pipeline && ./build.sh -(mv -f src/*.tar.gz $base_dir && \ -echo "Pipeline compiled sucessfully!") || \ -echo "Pipeline compilation failed!" diff --git a/samples/contrib/nvidia-resnet/clean_utils/delete_all_previous_resources.sh b/samples/contrib/nvidia-resnet/clean_utils/delete_all_previous_resources.sh deleted file mode 100755 index 84c6d0246cc..00000000000 --- a/samples/contrib/nvidia-resnet/clean_utils/delete_all_previous_resources.sh +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -TRTIS_NAME=trtis -WEBAPP_NAME=webapp -PIPELINE_NAME=resnet-cifar10-pipeline -KF_NAMESPACE=kubeflow - -kubectl delete service/$TRTIS_NAME -n $KF_NAMESPACE -kubectl delete deployment.apps/$TRTIS_NAME -n $KF_NAMESPACE - -for service in $( kubectl get svc -n $KF_NAMESPACE | grep $WEBAPP_NAME | cut -d' ' -f1 ); do - kubectl delete -n $KF_NAMESPACE service/$service -done - -for deployment in $( kubectl get deployment -n $KF_NAMESPACE | grep $WEBAPP_NAME | cut -d' ' -f1 ); do - kubectl delete -n $KF_NAMESPACE deployment.apps/$deployment -done - -for pod in $(kubectl get pod -n kubeflow | grep $PIPELINE_NAME | cut -d' ' -f1); do - kubectl delete -n $KF_NAMESPACE pod/$pod -done diff --git a/samples/contrib/nvidia-resnet/clean_utils/remove_minikube_and_kubeflow.sh b/samples/contrib/nvidia-resnet/clean_utils/remove_minikube_and_kubeflow.sh deleted file mode 100755 index 8d4ee4e835c..00000000000 --- a/samples/contrib/nvidia-resnet/clean_utils/remove_minikube_and_kubeflow.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Remove KubeFlow -cd ${KUBEFLOW_SRC}/${KFAPP} -${KUBEFLOW_SRC}/scripts/kfctl.sh delete k8s - -# Remove Minikube -minikube stop -minikube delete diff --git a/samples/contrib/nvidia-resnet/components/inference_server_launcher/Dockerfile b/samples/contrib/nvidia-resnet/components/inference_server_launcher/Dockerfile deleted file mode 100644 index 2b59ba9756e..00000000000 --- a/samples/contrib/nvidia-resnet/components/inference_server_launcher/Dockerfile +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM ubuntu:16.04 - -RUN apt-get update -y && \ - apt-get install --no-install-recommends -y -q ca-certificates curl python-dev python-setuptools wget unzip -RUN easy_install pip && \ - pip install pyyaml six requests - -# Install kubectl -RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl -RUN chmod +x ./kubectl -RUN mv ./kubectl /usr/local/bin - -ADD src /workspace -WORKDIR /workspace - -ENTRYPOINT ["python", "deploy_trtis.py"] diff --git a/samples/contrib/nvidia-resnet/components/inference_server_launcher/build.sh b/samples/contrib/nvidia-resnet/components/inference_server_launcher/build.sh deleted file mode 100755 index 24a08366afe..00000000000 --- a/samples/contrib/nvidia-resnet/components/inference_server_launcher/build.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -IMAGE= - -docker build -t $IMAGE . -docker push $IMAGE diff --git a/samples/contrib/nvidia-resnet/components/inference_server_launcher/src/deploy_trtis.py b/samples/contrib/nvidia-resnet/components/inference_server_launcher/src/deploy_trtis.py deleted file mode 100644 index bdcfa1e6de0..00000000000 --- a/samples/contrib/nvidia-resnet/components/inference_server_launcher/src/deploy_trtis.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import os -import logging -import subprocess -import requests - - -KUBEFLOW_NAMESPACE = 'kubeflow' -YAML_TEMPLATE = 'trtis-service-template.yaml' -YAML_FILE = 'trtis-service.yaml' - - -def main(): - parser = argparse.ArgumentParser(description='Inference server launcher') - parser.add_argument('--trtserver_name', help='Name of trtis service') - parser.add_argument('--model_path', help='...') - - args = parser.parse_args() - - logging.getLogger().setLevel(logging.INFO) - logging.info('Generating TRTIS service template') - - template_file = os.path.join(os.path.dirname( - os.path.realpath(__file__)), YAML_TEMPLATE) - target_file = os.path.join(os.path.dirname( - os.path.realpath(__file__)), YAML_FILE) - - with open(template_file, 'r') as template: - with open(target_file, "w") as target: - data = template.read() - changed = data.replace('TRTSERVER_NAME', args.trtserver_name) - changed1 = changed.replace( - 'KUBEFLOW_NAMESPACE', KUBEFLOW_NAMESPACE) - changed2 = changed1.replace('MODEL_PATH', args.model_path) - target.write(changed2) - - logging.info('Deploying TRTIS service') - subprocess.call(['kubectl', 'apply', '-f', YAML_FILE]) - - with open('/output.txt', 'w') as f: - f.write(args.trtserver_name) - - -if __name__ == "__main__": - main() diff --git a/samples/contrib/nvidia-resnet/components/inference_server_launcher/src/trtis-service-template.yaml b/samples/contrib/nvidia-resnet/components/inference_server_launcher/src/trtis-service-template.yaml deleted file mode 100644 index 13900836b2a..00000000000 --- a/samples/contrib/nvidia-resnet/components/inference_server_launcher/src/trtis-service-template.yaml +++ /dev/null @@ -1,75 +0,0 @@ ---- -apiVersion: v1 -kind: Service -metadata: - annotations: - getambassador.io/config: |- - --- - apiVersion: ambassador/v0 - kind: Mapping - name: trtisserving-predict-mapping-TRTSERVER_NAME - grpc: True - prefix: / - rewrite: / - service: TRTSERVER_NAME.KUBEFLOW_NAMESPACE:8001 - labels: - app: TRTSERVER_NAME - name: TRTSERVER_NAME - namespace: KUBEFLOW_NAMESPACE -spec: - ports: - - name: grpc-trtis-serving - port: 8001 - targetPort: 8001 - - name: http-trtis-serving - port: 8000 - targetPort: 8000 - - name: prometheus-metrics - port: 8002 - targetPort: 8002 - selector: - app: TRTSERVER_NAME - type: ClusterIP ---- -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - labels: - app: TRTSERVER_NAME - name: TRTSERVER_NAME - namespace: KUBEFLOW_NAMESPACE -spec: - replicas: 1 - template: - metadata: - labels: - app: TRTSERVER_NAME - version: v1 - spec: - containers: - - image: nvcr.io/nvidia/tensorrtserver:19.03-py3 - command: ["/bin/sh", "-c"] - args: ["trtserver --model-store=MODEL_PATH"] - imagePullPolicy: IfNotPresent - name: TRTSERVER_NAME - ports: - - containerPort: 9000 - - containerPort: 8000 - - containerPort: 8001 - - containerPort: 8002 - resources: - limits: - cpu: "2" - memory: 4Gi - nvidia.com/gpu: 1 - requests: - cpu: "2" - memory: 4Gi - nvidia.com/gpu: 1 - volumeMounts: - - name: persistent-data-store - mountPath: /mnt/workspace - volumes: - - name: persistent-data-store - persistentVolumeClaim: - claimName: nvidia-workspace-read-claim diff --git a/samples/contrib/nvidia-resnet/components/preprocess/Dockerfile b/samples/contrib/nvidia-resnet/components/preprocess/Dockerfile deleted file mode 100644 index 27d44416390..00000000000 --- a/samples/contrib/nvidia-resnet/components/preprocess/Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM nvcr.io/nvidia/tensorflow:19.03-py3 - -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt - -ADD src /workspace -WORKDIR /workspace - -ENTRYPOINT ["python", "preprocess.py"] diff --git a/samples/contrib/nvidia-resnet/components/preprocess/build.sh b/samples/contrib/nvidia-resnet/components/preprocess/build.sh deleted file mode 100755 index cea03d28214..00000000000 --- a/samples/contrib/nvidia-resnet/components/preprocess/build.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -IMAGE= - -docker build -t $IMAGE . -docker push $IMAGE diff --git a/samples/contrib/nvidia-resnet/components/preprocess/requirements.txt b/samples/contrib/nvidia-resnet/components/preprocess/requirements.txt deleted file mode 100644 index 14348698da9..00000000000 --- a/samples/contrib/nvidia-resnet/components/preprocess/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -keras diff --git a/samples/contrib/nvidia-resnet/components/preprocess/src/preprocess.py b/samples/contrib/nvidia-resnet/components/preprocess/src/preprocess.py deleted file mode 100644 index 20f3141183f..00000000000 --- a/samples/contrib/nvidia-resnet/components/preprocess/src/preprocess.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import argparse -import numpy as np -from keras.datasets import cifar10 - - -def main(): - parser = argparse.ArgumentParser(description='Data processor') - parser.add_argument('--input_dir', help='Raw data directory') - parser.add_argument('--output_dir', help='Processed data directory') - - args = parser.parse_args() - - def load_and_process_data(input_dir): - processed_data = cifar10.load_data() - return processed_data - - def save_data(processed_data, output_dir): - (x_train, y_train), (x_test, y_test) = processed_data - if not os.path.isdir(output_dir): - os.mkdir(output_dir) - np.save(os.path.join(output_dir, 'x_train.npy'), x_train) - np.save(os.path.join(output_dir, 'y_train.npy'), y_train) - np.save(os.path.join(output_dir, 'x_test.npy'), x_test) - np.save(os.path.join(output_dir, 'y_test.npy'), y_test) - - processed_data = load_and_process_data(args.input_dir) - save_data(processed_data, args.output_dir) - - with open('/output.txt', 'w') as f: - f.write(args.output_dir) - - print('input_dir: {}'.format(args.input_dir)) - print('output_dir: {}'.format(args.output_dir)) - - -if __name__ == "__main__": - main() diff --git a/samples/contrib/nvidia-resnet/components/train/Dockerfile b/samples/contrib/nvidia-resnet/components/train/Dockerfile deleted file mode 100644 index 9020589b96b..00000000000 --- a/samples/contrib/nvidia-resnet/components/train/Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM nvcr.io/nvidia/tensorflow:19.03-py3 - -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt - -ADD src /workspace -WORKDIR /workspace - -ENTRYPOINT ["python", "train.py"] diff --git a/samples/contrib/nvidia-resnet/components/train/build.sh b/samples/contrib/nvidia-resnet/components/train/build.sh deleted file mode 100755 index 18644de4a8b..00000000000 --- a/samples/contrib/nvidia-resnet/components/train/build.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -IMAGE= - -docker build -t $IMAGE . -docker push $IMAGE diff --git a/samples/contrib/nvidia-resnet/components/train/requirements.txt b/samples/contrib/nvidia-resnet/components/train/requirements.txt deleted file mode 100644 index 14348698da9..00000000000 --- a/samples/contrib/nvidia-resnet/components/train/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -keras diff --git a/samples/contrib/nvidia-resnet/components/train/src/train.py b/samples/contrib/nvidia-resnet/components/train/src/train.py deleted file mode 100644 index 359bd1d4383..00000000000 --- a/samples/contrib/nvidia-resnet/components/train/src/train.py +++ /dev/null @@ -1,590 +0,0 @@ -# COPYRIGHT -# -# All contributions by François Chollet: -# Copyright (c) 2015 - 2018, François Chollet. -# All rights reserved. -# -# All contributions by Google: -# Copyright (c) 2015 - 2018, Google, Inc. -# All rights reserved. -# -# All contributions by Microsoft: -# Copyright (c) 2017 - 2018, Microsoft, Inc. -# All rights reserved. -# -# All other contributions: -# Copyright (c) 2015 - 2018, the respective contributors. -# All rights reserved. -# -# Each contributor holds copyright over their respective contributions. -# The project versioning (Git) records all such contribution source information. -# -# LICENSE -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import print_function - -import os -import shutil -import argparse -import numpy as np - -import tensorflow as tf -from tensorflow.python.saved_model import builder as saved_model_builder -from tensorflow.python.saved_model.signature_def_utils import predict_signature_def -from tensorflow.python.saved_model import tag_constants -from tensorflow.python.saved_model import signature_constants - -import keras -from keras.regularizers import l2 -from keras import backend as K -from keras.models import Model -from keras import backend as K -from keras.optimizers import Adam -from keras.models import load_model -from keras.layers import Dense, Conv2D -from keras.layers import BatchNormalization, Activation -from keras.layers import AveragePooling2D, Input, Flatten -from keras.callbacks import ModelCheckpoint, LearningRateScheduler -from keras.callbacks import ReduceLROnPlateau -from keras.preprocessing.image import ImageDataGenerator - -import tensorflow.contrib.tensorrt as trt -# keras.mixed_precision.experimental.set_policy("default_mixed") - - -CONT_TRTIS_RESOURCE_DIR = 'trtis_resource' - - -def main(): - parser = argparse.ArgumentParser(description='Model trainer') - parser.add_argument('--input_dir', help='Processed data directory') - parser.add_argument('--output_dir', help='Output model directory') - parser.add_argument('--epochs', help='Number of training epochs') - parser.add_argument('--model_name', help='Output model name') - parser.add_argument('--model_version', help='Output model version') - - args = parser.parse_args() - - print(args.output_dir, args.model_name) - - # Copy TRTIS resource (containing config.pbtxt, labels.txt, ...) from container to mounted volume - model_dir = os.path.join(args.output_dir, args.model_name) - if os.path.isdir(model_dir): - shutil.rmtree(model_dir) - shutil.copytree(CONT_TRTIS_RESOURCE_DIR, model_dir) - os.mkdir(os.path.join(model_dir, args.model_version)) - - # Training parameters - batch_size = 128 # orig paper trained all networks with batch_size=128 - epochs = int(args.epochs) - data_augmentation = True - num_classes = 10 - - # Subtracting pixel mean improves accuracy - subtract_pixel_mean = True - - # Model parameter - # ---------------------------------------------------------------------------- - # | | 200-epoch | Orig Paper| 200-epoch | Orig Paper| sec/epoch - # Model | n | ResNet v1 | ResNet v1 | ResNet v2 | ResNet v2 | GTX1080Ti - # |v1(v2)| %Accuracy | %Accuracy | %Accuracy | %Accuracy | v1 (v2) - # ---------------------------------------------------------------------------- - # ResNet20 | 3 (2)| 92.16 | 91.25 | ----- | ----- | 35 (---) - # ResNet32 | 5(NA)| 92.46 | 92.49 | NA | NA | 50 ( NA) - # ResNet44 | 7(NA)| 92.50 | 92.83 | NA | NA | 70 ( NA) - # ResNet56 | 9 (6)| 92.71 | 93.03 | 93.01 | NA | 90 (100) - # ResNet110 |18(12)| 92.65 | 93.39+-.16| 93.15 | 93.63 | 165(180) - # ResNet164 |27(18)| ----- | 94.07 | ----- | 94.54 | ---(---) - # ResNet1001| (111)| ----- | 92.39 | ----- | 95.08+-.14| ---(---) - # --------------------------------------------------------------------------- - - n = 3 - - # Model version - # Orig paper: version = 1 (ResNet v1), Improved ResNet: version = 2 (ResNet v2) - version = 2 - - # Computed depth from supplied model parameter n - if version == 1: - depth = n * 6 + 2 - elif version == 2: - depth = n * 9 + 2 - - # Model name, depth and version - model_type = 'ResNet%dv%d' % (depth, version) - - # Load the CIFAR10 data. - def load_preprocessed_data(input_dir): - x_train = np.load(os.path.join(input_dir, "x_train.npy")) - y_train = np.load(os.path.join(input_dir, "y_train.npy")) - x_test = np.load(os.path.join(input_dir, "x_test.npy")) - y_test = np.load(os.path.join(input_dir, "y_test.npy")) - return x_train, y_train, x_test, y_test - - preprocessed_data = load_preprocessed_data(args.input_dir) - x_train, y_train, x_test, y_test = preprocessed_data - - # Input image dimensions. - input_shape = x_train.shape[1:] - - # Normalize data. - x_train = x_train.astype('float32') / 255 - x_test = x_test.astype('float32') / 255 - - # If subtract pixel mean is enabled - if subtract_pixel_mean: - x_train_mean = np.mean(x_train, axis=0) - x_train -= x_train_mean - x_test -= x_train_mean - - print('x_train shape:', x_train.shape) - print(x_train.shape[0], 'train samples') - print(x_test.shape[0], 'test samples') - print('y_train shape:', y_train.shape) - - # Convert class vectors to binary class matrices. - y_train = keras.utils.to_categorical(y_train, num_classes) - y_test = keras.utils.to_categorical(y_test, num_classes) - - def lr_schedule(epoch): - """Learning Rate Schedule - - Learning rate is scheduled to be reduced after 80, 120, 160, 180 epochs. - Called automatically every epoch as part of callbacks during training. - - # Arguments - epoch (int): The number of epochs - - # Returns - lr (float32): learning rate - """ - lr = 1e-3 - if epoch > 180: - lr *= 0.5e-3 - elif epoch > 160: - lr *= 1e-3 - elif epoch > 120: - lr *= 1e-2 - elif epoch > 80: - lr *= 1e-1 - print('Learning rate: ', lr) - return lr - - def resnet_layer(inputs, - num_filters=16, - kernel_size=3, - strides=1, - activation='relu', - batch_normalization=True, - conv_first=True): - """2D Convolution-Batch Normalization-Activation stack builder - - # Arguments - inputs (tensor): input tensor from input image or previous layer - num_filters (int): Conv2D number of filters - kernel_size (int): Conv2D square kernel dimensions - strides (int): Conv2D square stride dimensions - activation (string): activation name - batch_normalization (bool): whether to include batch normalization - conv_first (bool): conv-bn-activation (True) or - bn-activation-conv (False) - - # Returns - x (tensor): tensor as input to the next layer - """ - conv = Conv2D(num_filters, - kernel_size=kernel_size, - strides=strides, - padding='same', - kernel_initializer='he_normal', - kernel_regularizer=l2(1e-4)) - - x = inputs - if conv_first: - x = conv(x) - if batch_normalization: - x = BatchNormalization()(x) - if activation is not None: - x = Activation(activation)(x) - else: - if batch_normalization: - x = BatchNormalization()(x) - if activation is not None: - x = Activation(activation)(x) - x = conv(x) - return x - - def resnet_v1(input_shape, depth, num_classes=10): - """ResNet Version 1 Model builder [a] - - Stacks of 2 x (3 x 3) Conv2D-BN-ReLU - Last ReLU is after the shortcut connection. - At the beginning of each stage, the feature map size is halved (downsampled) - by a convolutional layer with strides=2, while the number of filters is - doubled. Within each stage, the layers have the same number filters and the - same number of filters. - Features maps sizes: - stage 0: 32x32, 16 - stage 1: 16x16, 32 - stage 2: 8x8, 64 - The Number of parameters is approx the same as Table 6 of [a]: - ResNet20 0.27M - ResNet32 0.46M - ResNet44 0.66M - ResNet56 0.85M - ResNet110 1.7M - - # Arguments - input_shape (tensor): shape of input image tensor - depth (int): number of core convolutional layers - num_classes (int): number of classes (CIFAR10 has 10) - - # Returns - model (Model): Keras model instance - """ - if (depth - 2) % 6 != 0: - raise ValueError('depth should be 6n+2 (eg 20, 32, 44 in [a])') - # Start model definition. - num_filters = 16 - num_res_blocks = int((depth - 2) / 6) - - inputs = Input(shape=input_shape) - x = resnet_layer(inputs=inputs) - # Instantiate the stack of residual units - for stack in range(3): - for res_block in range(num_res_blocks): - strides = 1 - if stack > 0 and res_block == 0: # first layer but not first stack - strides = 2 # downsample - y = resnet_layer(inputs=x, - num_filters=num_filters, - strides=strides) - y = resnet_layer(inputs=y, - num_filters=num_filters, - activation=None) - if stack > 0 and res_block == 0: # first layer but not first stack - # linear projection residual shortcut connection to match - # changed dims - x = resnet_layer(inputs=x, - num_filters=num_filters, - kernel_size=1, - strides=strides, - activation=None, - batch_normalization=False) - x = keras.layers.add([x, y]) - x = Activation('relu')(x) - num_filters *= 2 - - # Add classifier on top. - # v1 does not use BN after last shortcut connection-ReLU - x = AveragePooling2D(pool_size=8)(x) - y = Flatten()(x) - outputs = Dense(num_classes, - activation='softmax', - kernel_initializer='he_normal')(y) - - # Instantiate model. - model = Model(inputs=inputs, outputs=outputs) - return model - - def resnet_v2(input_shape, depth, num_classes=10): - """ResNet Version 2 Model builder [b] - - Stacks of (1 x 1)-(3 x 3)-(1 x 1) BN-ReLU-Conv2D or also known as - bottleneck layer - First shortcut connection per layer is 1 x 1 Conv2D. - Second and onwards shortcut connection is identity. - At the beginning of each stage, the feature map size is halved (downsampled) - by a convolutional layer with strides=2, while the number of filter maps is - doubled. Within each stage, the layers have the same number filters and the - same filter map sizes. - Features maps sizes: - conv1 : 32x32, 16 - stage 0: 32x32, 64 - stage 1: 16x16, 128 - stage 2: 8x8, 256 - - # Arguments - input_shape (tensor): shape of input image tensor - depth (int): number of core convolutional layers - num_classes (int): number of classes (CIFAR10 has 10) - - # Returns - model (Model): Keras model instance - """ - if (depth - 2) % 9 != 0: - raise ValueError('depth should be 9n+2 (eg 56 or 110 in [b])') - # Start model definition. - num_filters_in = 16 - num_res_blocks = int((depth - 2) / 9) - - inputs = Input(shape=input_shape) - # v2 performs Conv2D with BN-ReLU on input before splitting into 2 paths - x = resnet_layer(inputs=inputs, - num_filters=num_filters_in, - conv_first=True) - - # Instantiate the stack of residual units - for stage in range(3): - for res_block in range(num_res_blocks): - activation = 'relu' - batch_normalization = True - strides = 1 - if stage == 0: - num_filters_out = num_filters_in * 4 - if res_block == 0: # first layer and first stage - activation = None - batch_normalization = False - else: - num_filters_out = num_filters_in * 2 - if res_block == 0: # first layer but not first stage - strides = 2 # downsample - - # bottleneck residual unit - y = resnet_layer(inputs=x, - num_filters=num_filters_in, - kernel_size=1, - strides=strides, - activation=activation, - batch_normalization=batch_normalization, - conv_first=False) - y = resnet_layer(inputs=y, - num_filters=num_filters_in, - conv_first=False) - y = resnet_layer(inputs=y, - num_filters=num_filters_out, - kernel_size=1, - conv_first=False) - if res_block == 0: - # linear projection residual shortcut connection to match - # changed dims - x = resnet_layer(inputs=x, - num_filters=num_filters_out, - kernel_size=1, - strides=strides, - activation=None, - batch_normalization=False) - x = keras.layers.add([x, y]) - - num_filters_in = num_filters_out - - # Add classifier on top. - # v2 has BN-ReLU before Pooling - x = BatchNormalization()(x) - x = Activation('relu')(x) - x = AveragePooling2D(pool_size=8)(x) - y = Flatten()(x) - outputs = Dense(num_classes, - activation='softmax', - kernel_initializer='he_normal')(y) - - # Instantiate model. - model = Model(inputs=inputs, outputs=outputs) - return model - - if version == 2: - model = resnet_v2(input_shape=input_shape, depth=depth) - else: - model = resnet_v1(input_shape=input_shape, depth=depth) - - model.compile(loss='categorical_crossentropy', - optimizer=Adam(lr=lr_schedule(0)), - metrics=['accuracy']) - model.summary() - print(model_type) - - # Prepare model model saving directory. - save_dir = os.path.join(os.getcwd(), 'saved_models') - model_name = 'cifar10_%s_model.{epoch:03d}.h5' % model_type - if not os.path.isdir(save_dir): - os.makedirs(save_dir) - filepath = os.path.join(save_dir, model_name) - - # Prepare callbacks for model saving and for learning rate adjustment. - checkpoint = ModelCheckpoint(filepath=filepath, - monitor='val_acc', - verbose=1, - save_best_only=True) - - lr_scheduler = LearningRateScheduler(lr_schedule) - - lr_reducer = ReduceLROnPlateau(factor=np.sqrt(0.1), - cooldown=0, - patience=5, - min_lr=0.5e-6) - - callbacks = [checkpoint, lr_reducer, lr_scheduler] - - # Run training, with or without data augmentation. - if not data_augmentation: - print('Not using data augmentation.') - model.fit(x_train, y_train, - batch_size=batch_size, - epochs=epochs, - validation_data=(x_test, y_test), - shuffle=True, - callbacks=callbacks) - else: - print('Using real-time data augmentation.') - # This will do preprocessing and realtime data augmentation: - datagen = ImageDataGenerator( - # set input mean to 0 over the dataset - featurewise_center=False, - # set each sample mean to 0 - samplewise_center=False, - # divide inputs by std of dataset - featurewise_std_normalization=False, - # divide each input by its std - samplewise_std_normalization=False, - # apply ZCA whitening - zca_whitening=False, - # epsilon for ZCA whitening - zca_epsilon=1e-06, - # randomly rotate images in the range (deg 0 to 180) - rotation_range=0, - # randomly shift images horizontally - width_shift_range=0.1, - # randomly shift images vertically - height_shift_range=0.1, - # set range for random shear - shear_range=0., - # set range for random zoom - zoom_range=0., - # set range for random channel shifts - channel_shift_range=0., - # set mode for filling points outside the input boundaries - fill_mode='nearest', - # value used for fill_mode = "constant" - cval=0., - # randomly flip images - horizontal_flip=True, - # randomly flip images - vertical_flip=False, - # set rescaling factor (applied before any other transformation) - rescale=None, - # set function that will be applied on each input - preprocessing_function=None, - # image data format, either "channels_first" or "channels_last" - data_format=None, - # fraction of images reserved for validation (strictly between 0 and 1) - validation_split=0.0) - - # Compute quantities required for featurewise normalization - # (std, mean, and principal components if ZCA whitening is applied). - datagen.fit(x_train) - - # Fit the model on the batches generated by datagen.flow(). - model.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size), - steps_per_epoch=len(x_train)/batch_size, - validation_data=(x_test, y_test), - epochs=epochs, verbose=1, workers=4, - callbacks=callbacks) - - # Score trained model. - scores = model.evaluate(x_test, y_test, verbose=1) - print('Test loss:', scores[0]) - print('Test accuracy:', scores[1]) - - # Save Keras model - tmp_model_path = os.path.join(args.output_dir, "tmp") - if os.path.isdir(tmp_model_path): - shutil.rmtree(tmp_model_path) - os.mkdir(tmp_model_path) - - keras_model_path = os.path.join(tmp_model_path, 'keras_model.h5') - model.save(keras_model_path) - - # Convert Keras model to Tensorflow SavedModel - def export_h5_to_pb(path_to_h5, export_path): - # Set the learning phase to Test since the model is already trained. - K.set_learning_phase(0) - # Load the Keras model - keras_model = load_model(path_to_h5) - # Build the Protocol Buffer SavedModel at 'export_path' - builder = saved_model_builder.SavedModelBuilder(export_path) - # Create prediction signature to be used by TensorFlow Serving Predict API - signature = predict_signature_def(inputs={"input_1": keras_model.input}, - outputs={"dense_1": keras_model.output}) - with K.get_session() as sess: - # Save the meta graph and the variables - builder.add_meta_graph_and_variables(sess=sess, tags=[tag_constants.SERVING], - signature_def_map={signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature}) - builder.save() - - tf_model_path = os.path.join(args.output_dir, "tf_saved_model") - if os.path.isdir(tf_model_path): - shutil.rmtree(tf_model_path) - - export_h5_to_pb(keras_model_path, tf_model_path) - - # Apply TF_TRT on the Tensorflow SavedModel - graph = tf.Graph() - with graph.as_default(): - with tf.Session(): - # Create a TensorRT inference graph from a SavedModel: - trt_graph = trt.create_inference_graph( - input_graph_def=None, - outputs=None, - input_saved_model_dir=tf_model_path, - input_saved_model_tags=[tag_constants.SERVING], - max_batch_size=batch_size, - max_workspace_size_bytes=2 << 30, - precision_mode='fp16') - - print([n.name + '=>' + n.op for n in trt_graph.node]) - - tf.io.write_graph( - trt_graph, - os.path.join(model_dir, args.model_version), - 'model.graphdef', - as_text=False - ) - - # Remove tmp dirs - shutil.rmtree(tmp_model_path) - shutil.rmtree(tf_model_path) - - with open('/output.txt', 'w') as f: - f.write(args.output_dir) - - print('input_dir: {}'.format(args.input_dir)) - print('output_dir: {}'.format(args.output_dir)) - - -if __name__ == "__main__": - main() diff --git a/samples/contrib/nvidia-resnet/components/train/src/trtis_resource/config.pbtxt b/samples/contrib/nvidia-resnet/components/train/src/trtis_resource/config.pbtxt deleted file mode 100644 index 21563ca7ddb..00000000000 --- a/samples/contrib/nvidia-resnet/components/train/src/trtis_resource/config.pbtxt +++ /dev/null @@ -1,29 +0,0 @@ -name: "resnet_graphdef" -platform: "tensorflow_graphdef" -max_batch_size: 128 - - -input [ - { - name: "input_1_1" - data_type: TYPE_FP32 - format: FORMAT_NHWC - dims: [ 32, 32, 3 ] - } -] - -output [ - { - name: "dense_1_1/Softmax" - data_type: TYPE_FP32 - dims: [ 10 ] - label_filename: "labels.txt" - } -] - -instance_group [ - { - count: 2, - kind: KIND_GPU - } -] diff --git a/samples/contrib/nvidia-resnet/components/train/src/trtis_resource/labels.txt b/samples/contrib/nvidia-resnet/components/train/src/trtis_resource/labels.txt deleted file mode 100644 index fa30c22b95d..00000000000 --- a/samples/contrib/nvidia-resnet/components/train/src/trtis_resource/labels.txt +++ /dev/null @@ -1,10 +0,0 @@ -airplane -automobile -bird -cat -deer -dog -frog -horse -ship -truck diff --git a/samples/contrib/nvidia-resnet/components/webapp/Dockerfile b/samples/contrib/nvidia-resnet/components/webapp/Dockerfile deleted file mode 100644 index 9548491acd2..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp/Dockerfile +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM base-trtis-client - -RUN pip3 install flask -ADD src /workspace/web_server -WORKDIR /workspace/web_server -EXPOSE 8080 - -ENTRYPOINT ["python3", "flask_server.py"] diff --git a/samples/contrib/nvidia-resnet/components/webapp/build.sh b/samples/contrib/nvidia-resnet/components/webapp/build.sh deleted file mode 100755 index 6568df6617a..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp/build.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -IMAGE= - -# Build base TRTIS client image -git clone https://github.com/NVIDIA/tensorrt-inference-server.git -base=tensorrt-inference-server -docker build -t base-trtis-client -f $base/Dockerfile.client $base -rm -rf $base - -# Build & push webapp image -docker build -t $IMAGE . -docker push $IMAGE diff --git a/samples/contrib/nvidia-resnet/components/webapp/src/README.md b/samples/contrib/nvidia-resnet/components/webapp/src/README.md deleted file mode 100644 index 42bceb56453..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp/src/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# web-ui - -The files in this folder define a web interface that can be used to interact with a TensorFlow server - -- flask_server.py - - main server code. Handles incoming requests, and renders HTML from template -- mnist_client.py - - code to interact with TensorFlow model server - - takes in an image and server details, and returns the server's response -- Dockerfile - - builds a runnable container out of the files in this directory - ---- -This is not an officially supported Google product diff --git a/samples/contrib/nvidia-resnet/components/webapp/src/flask_server.py b/samples/contrib/nvidia-resnet/components/webapp/src/flask_server.py deleted file mode 100644 index d9eccb12fcc..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp/src/flask_server.py +++ /dev/null @@ -1,87 +0,0 @@ -''' -Copyright 2018 The Kubeflow Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -''' - -import logging -import os -# from threading import Timer - -from flask import Flask, render_template, request -from trtis_client import get_prediction, random_image - -app = Flask(__name__) - -name_arg = os.getenv('MODEL_SERVE_NAME', 'resnet_graphdef') -addr_arg = os.getenv('TRTSERVER_HOST', '10.110.20.210') -port_arg = os.getenv('TRTSERVER_PORT', '8001') -model_version = os.getenv('MODEL_VERSION', '-1') - -# handle requests to the server -@app.route("/") -def main(): - args = {"name": name_arg, "addr": addr_arg, "port": port_arg, "version": str(model_version)} - logging.info("Request args: %s", args) - - output = None - connection = {"text": "", "success": False} - try: - # get a random test MNIST image - file_name, truth, serving_path = random_image('/workspace/web_server/static/images') - # get prediction from TensorFlow server - pred, scores = get_prediction(file_name, - server_host=addr_arg, - server_port=int(port_arg), - model_name=name_arg, - model_version=int(model_version)) - # if no exceptions thrown, server connection was a success - connection["text"] = "Connected (model version: {0}".format(str(model_version))+ ")" - connection["success"] = True - # parse class confidence scores from server prediction - output = {"truth": truth, "prediction": pred, - "img_path": serving_path, "scores": scores} - except Exception as e: # pylint: disable=broad-except - logging.info("Exception occured: %s", e) - # server connection failed - connection["text"] = "Exception making request: {0}".format(e) - # after 10 seconds, delete cached image file from server - # t = Timer(10.0, remove_resource, [img_path]) - # t.start() - # render results using HTML template - return render_template('index.html', output=output, - connection=connection, args=args) - - -def remove_resource(path): - """ - attempt to delete file from path. Used to clean up MNIST testing images - - :param path: the path of the file to delete - """ - try: - os.remove(path) - print("removed " + path) - except OSError: - print("no file at " + path) - - -if __name__ == '__main__': - logging.basicConfig(level=logging.INFO, - format=('%(levelname)s|%(asctime)s' - '|%(pathname)s|%(lineno)d| %(message)s'), - datefmt='%Y-%m-%dT%H:%M:%S', - ) - logging.getLogger().setLevel(logging.INFO) - logging.info("Starting flask.") - app.run(debug=True, host='0.0.0.0', port=8080) diff --git a/samples/contrib/nvidia-resnet/components/webapp/src/static/scripts/material.min.js b/samples/contrib/nvidia-resnet/components/webapp/src/static/scripts/material.min.js deleted file mode 100644 index cb9bbca9edc..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp/src/static/scripts/material.min.js +++ /dev/null @@ -1,10 +0,0 @@ -/** - * material-design-lite - Material Design Components in CSS, JS and HTML - * @version v1.0.6 - * @license Apache-2.0 - * @copyright 2015 Google, Inc. - * @link https://github.com/google/material-design-lite - */ -!function(){"use strict";function e(e,t){if(e){if(t.element_.classList.contains(t.CssClasses_.MDL_JS_RIPPLE_EFFECT)){var s=document.createElement("span");s.classList.add(t.CssClasses_.MDL_RIPPLE_CONTAINER),s.classList.add(t.CssClasses_.MDL_JS_RIPPLE_EFFECT);var i=document.createElement("span");i.classList.add(t.CssClasses_.MDL_RIPPLE),s.appendChild(i),e.appendChild(s)}e.addEventListener("click",function(s){s.preventDefault();var i=e.href.split("#")[1],n=t.element_.querySelector("#"+i);t.resetTabState_(),t.resetPanelState_(),e.classList.add(t.CssClasses_.ACTIVE_CLASS),n.classList.add(t.CssClasses_.ACTIVE_CLASS)})}}function t(e,t,s,i){if(i.tabBar_.classList.contains(i.CssClasses_.JS_RIPPLE_EFFECT)){var n=document.createElement("span");n.classList.add(i.CssClasses_.RIPPLE_CONTAINER),n.classList.add(i.CssClasses_.JS_RIPPLE_EFFECT);var a=document.createElement("span");a.classList.add(i.CssClasses_.RIPPLE),n.appendChild(a),e.appendChild(n)}e.addEventListener("click",function(n){n.preventDefault();var a=e.href.split("#")[1],l=i.content_.querySelector("#"+a);i.resetTabState_(t),i.resetPanelState_(s),e.classList.add(i.CssClasses_.IS_ACTIVE),l.classList.add(i.CssClasses_.IS_ACTIVE)})}var s={upgradeDom:function(e,t){},upgradeElement:function(e,t){},upgradeElements:function(e){},upgradeAllRegistered:function(){},registerUpgradedCallback:function(e,t){},register:function(e){},downgradeElements:function(e){}};s=function(){function e(e,t){for(var s=0;sd;d++){if(r=l[d],!r)throw new Error("Unable to find a registered component for the given class.");a.push(r.className),i.setAttribute("data-upgraded",a.join(","));var h=new r.classConstructor(i);h[C]=r,c.push(h);for(var u=0,m=r.callbacks.length;m>u;u++)r.callbacks[u](i);r.widget&&(i[r.className]=h);var E=document.createEvent("Events");E.initEvent("mdl-componentupgraded",!0,!0),i.dispatchEvent(E)}}function a(e){Array.isArray(e)||(e="function"==typeof e.item?Array.prototype.slice.call(e):[e]);for(var t,s=0,i=e.length;i>s;s++)t=e[s],t instanceof HTMLElement&&(n(t),t.children.length>0&&a(t.children))}function l(t){var s="undefined"==typeof t.widget&&"undefined"==typeof t.widget,i=!0;s||(i=t.widget||t.widget);var n={classConstructor:t.constructor||t.constructor,className:t.classAsString||t.classAsString,cssClass:t.cssClass||t.cssClass,widget:i,callbacks:[]};if(p.forEach(function(e){if(e.cssClass===n.cssClass)throw new Error("The provided cssClass has already been registered: "+e.cssClass);if(e.className===n.className)throw new Error("The provided className has already been registered")}),t.constructor.prototype.hasOwnProperty(C))throw new Error("MDL component classes must not have "+C+" defined as a property.");var a=e(t.classAsString,n);a||p.push(n)}function o(t,s){var i=e(t);i&&i.callbacks.push(s)}function r(){for(var e=0;e0&&this.container_.classList.contains(this.CssClasses_.IS_VISIBLE)&&(e.keyCode===this.Keycodes_.UP_ARROW?(e.preventDefault(),t[t.length-1].focus()):e.keyCode===this.Keycodes_.DOWN_ARROW&&(e.preventDefault(),t[0].focus()))}},_.prototype.handleItemKeyboardEvent_=function(e){if(this.element_&&this.container_){var t=this.element_.querySelectorAll("."+this.CssClasses_.ITEM+":not([disabled])");if(t&&t.length>0&&this.container_.classList.contains(this.CssClasses_.IS_VISIBLE)){var s=Array.prototype.slice.call(t).indexOf(e.target);if(e.keyCode===this.Keycodes_.UP_ARROW)e.preventDefault(),s>0?t[s-1].focus():t[t.length-1].focus();else if(e.keyCode===this.Keycodes_.DOWN_ARROW)e.preventDefault(),t.length>s+1?t[s+1].focus():t[0].focus();else if(e.keyCode===this.Keycodes_.SPACE||e.keyCode===this.Keycodes_.ENTER){e.preventDefault();var i=new MouseEvent("mousedown");e.target.dispatchEvent(i),i=new MouseEvent("mouseup"),e.target.dispatchEvent(i),e.target.click()}else e.keyCode===this.Keycodes_.ESCAPE&&(e.preventDefault(),this.hide())}}},_.prototype.handleItemClick_=function(e){e.target.hasAttribute("disabled")?e.stopPropagation():(this.closing_=!0,window.setTimeout(function(e){this.hide(),this.closing_=!1}.bind(this),this.Constant_.CLOSE_TIMEOUT))},_.prototype.applyClip_=function(e,t){this.element_.classList.contains(this.CssClasses_.UNALIGNED)?this.element_.style.clip="":this.element_.classList.contains(this.CssClasses_.BOTTOM_RIGHT)?this.element_.style.clip="rect(0 "+t+"px 0 "+t+"px)":this.element_.classList.contains(this.CssClasses_.TOP_LEFT)?this.element_.style.clip="rect("+e+"px 0 "+e+"px 0)":this.element_.classList.contains(this.CssClasses_.TOP_RIGHT)?this.element_.style.clip="rect("+e+"px "+t+"px "+e+"px "+t+"px)":this.element_.style.clip=""},_.prototype.addAnimationEndListener_=function(){var e=function(){this.element_.removeEventListener("transitionend",e),this.element_.removeEventListener("webkitTransitionEnd",e),this.element_.classList.remove(this.CssClasses_.IS_ANIMATING)}.bind(this);this.element_.addEventListener("transitionend",e),this.element_.addEventListener("webkitTransitionEnd",e)},_.prototype.show=function(e){if(this.element_&&this.container_&&this.outline_){var t=this.element_.getBoundingClientRect().height,s=this.element_.getBoundingClientRect().width;this.container_.style.width=s+"px",this.container_.style.height=t+"px",this.outline_.style.width=s+"px",this.outline_.style.height=t+"px";for(var i=this.Constant_.TRANSITION_DURATION_SECONDS*this.Constant_.TRANSITION_DURATION_FRACTION,n=this.element_.querySelectorAll("."+this.CssClasses_.ITEM),a=0;a=this.maxRows&&e.preventDefault()},E.prototype.onFocus_=function(e){this.element_.classList.add(this.CssClasses_.IS_FOCUSED)},E.prototype.onBlur_=function(e){this.element_.classList.remove(this.CssClasses_.IS_FOCUSED)},E.prototype.updateClasses_=function(){this.checkDisabled(),this.checkValidity(),this.checkDirty()},E.prototype.checkDisabled=function(){this.input_.disabled?this.element_.classList.add(this.CssClasses_.IS_DISABLED):this.element_.classList.remove(this.CssClasses_.IS_DISABLED)},E.prototype.checkDisabled=E.prototype.checkDisabled,E.prototype.checkValidity=function(){this.input_.validity&&(this.input_.validity.valid?this.element_.classList.remove(this.CssClasses_.IS_INVALID):this.element_.classList.add(this.CssClasses_.IS_INVALID))},E.prototype.checkValidity=E.prototype.checkValidity,E.prototype.checkDirty=function(){this.input_.value&&this.input_.value.length>0?this.element_.classList.add(this.CssClasses_.IS_DIRTY):this.element_.classList.remove(this.CssClasses_.IS_DIRTY)},E.prototype.checkDirty=E.prototype.checkDirty,E.prototype.disable=function(){this.input_.disabled=!0,this.updateClasses_()},E.prototype.disable=E.prototype.disable,E.prototype.enable=function(){this.input_.disabled=!1,this.updateClasses_()},E.prototype.enable=E.prototype.enable,E.prototype.change=function(e){this.input_.value=e||"",this.updateClasses_()},E.prototype.change=E.prototype.change,E.prototype.init=function(){if(this.element_&&(this.label_=this.element_.querySelector("."+this.CssClasses_.LABEL),this.input_=this.element_.querySelector("."+this.CssClasses_.INPUT),this.input_)){this.input_.hasAttribute(this.Constant_.MAX_ROWS_ATTRIBUTE)&&(this.maxRows=parseInt(this.input_.getAttribute(this.Constant_.MAX_ROWS_ATTRIBUTE),10),isNaN(this.maxRows)&&(this.maxRows=this.Constant_.NO_MAX_ROWS)),this.boundUpdateClassesHandler=this.updateClasses_.bind(this),this.boundFocusHandler=this.onFocus_.bind(this),this.boundBlurHandler=this.onBlur_.bind(this),this.input_.addEventListener("input",this.boundUpdateClassesHandler),this.input_.addEventListener("focus",this.boundFocusHandler),this.input_.addEventListener("blur",this.boundBlurHandler),this.maxRows!==this.Constant_.NO_MAX_ROWS&&(this.boundKeyDownHandler=this.onKeyDown_.bind(this),this.input_.addEventListener("keydown",this.boundKeyDownHandler));var e=this.element_.classList.contains(this.CssClasses_.IS_INVALID);this.updateClasses_(),this.element_.classList.add(this.CssClasses_.IS_UPGRADED),e&&this.element_.classList.add(this.CssClasses_.IS_INVALID)}},E.prototype.mdlDowngrade_=function(){this.input_.removeEventListener("input",this.boundUpdateClassesHandler),this.input_.removeEventListener("focus",this.boundFocusHandler),this.input_.removeEventListener("blur",this.boundBlurHandler),this.boundKeyDownHandler&&this.input_.removeEventListener("keydown",this.boundKeyDownHandler)},E.prototype.mdlDowngrade=E.prototype.mdlDowngrade_,E.prototype.mdlDowngrade=E.prototype.mdlDowngrade,s.register({constructor:E,classAsString:"MaterialTextfield",cssClass:"mdl-js-textfield",widget:!0});var L=function(e){this.element_=e,this.init()};window.MaterialTooltip=L,L.prototype.Constant_={},L.prototype.CssClasses_={IS_ACTIVE:"is-active"},L.prototype.handleMouseEnter_=function(e){e.stopPropagation();var t=e.target.getBoundingClientRect(),s=t.left+t.width/2,i=-1*(this.element_.offsetWidth/2);0>s+i?(this.element_.style.left=0,this.element_.style.marginLeft=0):(this.element_.style.left=s+"px",this.element_.style.marginLeft=i+"px"),this.element_.style.top=t.top+t.height+10+"px",this.element_.classList.add(this.CssClasses_.IS_ACTIVE),window.addEventListener("scroll",this.boundMouseLeaveHandler,!1),window.addEventListener("touchmove",this.boundMouseLeaveHandler,!1)},L.prototype.handleMouseLeave_=function(e){e.stopPropagation(),this.element_.classList.remove(this.CssClasses_.IS_ACTIVE),window.removeEventListener("scroll",this.boundMouseLeaveHandler),window.removeEventListener("touchmove",this.boundMouseLeaveHandler,!1)},L.prototype.init=function(){if(this.element_){var e=this.element_.getAttribute("for");e&&(this.forElement_=document.getElementById(e)),this.forElement_&&(this.forElement_.hasAttribute("tabindex")||this.forElement_.setAttribute("tabindex","0"),this.boundMouseEnterHandler=this.handleMouseEnter_.bind(this),this.boundMouseLeaveHandler=this.handleMouseLeave_.bind(this),this.forElement_.addEventListener("mouseenter",this.boundMouseEnterHandler,!1),this.forElement_.addEventListener("click",this.boundMouseEnterHandler,!1),this.forElement_.addEventListener("blur",this.boundMouseLeaveHandler),this.forElement_.addEventListener("touchstart",this.boundMouseEnterHandler,!1),this.forElement_.addEventListener("mouseleave",this.boundMouseLeaveHandler))}},L.prototype.mdlDowngrade_=function(){this.forElement_&&(this.forElement_.removeEventListener("mouseenter",this.boundMouseEnterHandler,!1),this.forElement_.removeEventListener("click",this.boundMouseEnterHandler,!1),this.forElement_.removeEventListener("touchstart",this.boundMouseEnterHandler,!1),this.forElement_.removeEventListener("mouseleave",this.boundMouseLeaveHandler))},L.prototype.mdlDowngrade=L.prototype.mdlDowngrade_,L.prototype.mdlDowngrade=L.prototype.mdlDowngrade,s.register({constructor:L,classAsString:"MaterialTooltip",cssClass:"mdl-tooltip"});var I=function(e){this.element_=e,this.init()};window.MaterialLayout=I,I.prototype.Constant_={MAX_WIDTH:"(max-width: 1024px)",TAB_SCROLL_PIXELS:100,MENU_ICON:"menu",CHEVRON_LEFT:"chevron_left",CHEVRON_RIGHT:"chevron_right"},I.prototype.Mode_={STANDARD:0,SEAMED:1,WATERFALL:2,SCROLL:3},I.prototype.CssClasses_={CONTAINER:"mdl-layout__container",HEADER:"mdl-layout__header",DRAWER:"mdl-layout__drawer",CONTENT:"mdl-layout__content",DRAWER_BTN:"mdl-layout__drawer-button",ICON:"material-icons",JS_RIPPLE_EFFECT:"mdl-js-ripple-effect",RIPPLE_CONTAINER:"mdl-layout__tab-ripple-container",RIPPLE:"mdl-ripple",RIPPLE_IGNORE_EVENTS:"mdl-js-ripple-effect--ignore-events",HEADER_SEAMED:"mdl-layout__header--seamed",HEADER_WATERFALL:"mdl-layout__header--waterfall",HEADER_SCROLL:"mdl-layout__header--scroll",FIXED_HEADER:"mdl-layout--fixed-header",OBFUSCATOR:"mdl-layout__obfuscator",TAB_BAR:"mdl-layout__tab-bar",TAB_CONTAINER:"mdl-layout__tab-bar-container",TAB:"mdl-layout__tab",TAB_BAR_BUTTON:"mdl-layout__tab-bar-button",TAB_BAR_LEFT_BUTTON:"mdl-layout__tab-bar-left-button",TAB_BAR_RIGHT_BUTTON:"mdl-layout__tab-bar-right-button",PANEL:"mdl-layout__tab-panel",HAS_DRAWER:"has-drawer",HAS_TABS:"has-tabs",HAS_SCROLLING_HEADER:"has-scrolling-header",CASTING_SHADOW:"is-casting-shadow",IS_COMPACT:"is-compact",IS_SMALL_SCREEN:"is-small-screen",IS_DRAWER_OPEN:"is-visible",IS_ACTIVE:"is-active",IS_UPGRADED:"is-upgraded",IS_ANIMATING:"is-animating",ON_LARGE_SCREEN:"mdl-layout--large-screen-only",ON_SMALL_SCREEN:"mdl-layout--small-screen-only"},I.prototype.contentScrollHandler_=function(){this.header_.classList.contains(this.CssClasses_.IS_ANIMATING)||(this.content_.scrollTop>0&&!this.header_.classList.contains(this.CssClasses_.IS_COMPACT)?(this.header_.classList.add(this.CssClasses_.CASTING_SHADOW),this.header_.classList.add(this.CssClasses_.IS_COMPACT),this.header_.classList.add(this.CssClasses_.IS_ANIMATING)):this.content_.scrollTop<=0&&this.header_.classList.contains(this.CssClasses_.IS_COMPACT)&&(this.header_.classList.remove(this.CssClasses_.CASTING_SHADOW),this.header_.classList.remove(this.CssClasses_.IS_COMPACT),this.header_.classList.add(this.CssClasses_.IS_ANIMATING)))},I.prototype.screenSizeHandler_=function(){this.screenSizeMediaQuery_.matches?this.element_.classList.add(this.CssClasses_.IS_SMALL_SCREEN):(this.element_.classList.remove(this.CssClasses_.IS_SMALL_SCREEN),this.drawer_&&(this.drawer_.classList.remove(this.CssClasses_.IS_DRAWER_OPEN),this.obfuscator_.classList.remove(this.CssClasses_.IS_DRAWER_OPEN)))},I.prototype.drawerToggleHandler_=function(){this.drawer_.classList.toggle(this.CssClasses_.IS_DRAWER_OPEN),this.obfuscator_.classList.toggle(this.CssClasses_.IS_DRAWER_OPEN)},I.prototype.headerTransitionEndHandler_=function(){this.header_.classList.remove(this.CssClasses_.IS_ANIMATING)},I.prototype.headerClickHandler_=function(){this.header_.classList.contains(this.CssClasses_.IS_COMPACT)&&(this.header_.classList.remove(this.CssClasses_.IS_COMPACT),this.header_.classList.add(this.CssClasses_.IS_ANIMATING))},I.prototype.resetTabState_=function(e){for(var t=0;tn;n++){var a=s[n];a.classList&&a.classList.contains(this.CssClasses_.HEADER)&&(this.header_=a),a.classList&&a.classList.contains(this.CssClasses_.DRAWER)&&(this.drawer_=a),a.classList&&a.classList.contains(this.CssClasses_.CONTENT)&&(this.content_=a)}this.header_&&(this.tabBar_=this.header_.querySelector("."+this.CssClasses_.TAB_BAR));var l=this.Mode_.STANDARD;if(this.header_&&(this.header_.classList.contains(this.CssClasses_.HEADER_SEAMED)?l=this.Mode_.SEAMED:this.header_.classList.contains(this.CssClasses_.HEADER_WATERFALL)?(l=this.Mode_.WATERFALL,this.header_.addEventListener("transitionend",this.headerTransitionEndHandler_.bind(this)),this.header_.addEventListener("click",this.headerClickHandler_.bind(this))):this.header_.classList.contains(this.CssClasses_.HEADER_SCROLL)&&(l=this.Mode_.SCROLL,e.classList.add(this.CssClasses_.HAS_SCROLLING_HEADER)),l===this.Mode_.STANDARD?(this.header_.classList.add(this.CssClasses_.CASTING_SHADOW),this.tabBar_&&this.tabBar_.classList.add(this.CssClasses_.CASTING_SHADOW)):l===this.Mode_.SEAMED||l===this.Mode_.SCROLL?(this.header_.classList.remove(this.CssClasses_.CASTING_SHADOW),this.tabBar_&&this.tabBar_.classList.remove(this.CssClasses_.CASTING_SHADOW)):l===this.Mode_.WATERFALL&&(this.content_.addEventListener("scroll",this.contentScrollHandler_.bind(this)),this.contentScrollHandler_())),this.drawer_){var o=this.element_.querySelector("."+this.CssClasses_.DRAWER_BTN);if(!o){o=document.createElement("div"),o.classList.add(this.CssClasses_.DRAWER_BTN);var r=document.createElement("i");r.classList.add(this.CssClasses_.ICON),r.textContent=this.Constant_.MENU_ICON,o.appendChild(r)}this.drawer_.classList.contains(this.CssClasses_.ON_LARGE_SCREEN)?o.classList.add(this.CssClasses_.ON_LARGE_SCREEN):this.drawer_.classList.contains(this.CssClasses_.ON_SMALL_SCREEN)&&o.classList.add(this.CssClasses_.ON_SMALL_SCREEN),o.addEventListener("click",this.drawerToggleHandler_.bind(this)),this.element_.classList.add(this.CssClasses_.HAS_DRAWER),this.element_.classList.contains(this.CssClasses_.FIXED_HEADER)?this.header_.insertBefore(o,this.header_.firstChild):this.element_.insertBefore(o,this.content_);var d=document.createElement("div");d.classList.add(this.CssClasses_.OBFUSCATOR),this.element_.appendChild(d),d.addEventListener("click",this.drawerToggleHandler_.bind(this)),this.obfuscator_=d}if(this.screenSizeMediaQuery_=window.matchMedia(this.Constant_.MAX_WIDTH),this.screenSizeMediaQuery_.addListener(this.screenSizeHandler_.bind(this)),this.screenSizeHandler_(),this.header_&&this.tabBar_){this.element_.classList.add(this.CssClasses_.HAS_TABS);var _=document.createElement("div");_.classList.add(this.CssClasses_.TAB_CONTAINER),this.header_.insertBefore(_,this.tabBar_),this.header_.removeChild(this.tabBar_);var h=document.createElement("div");h.classList.add(this.CssClasses_.TAB_BAR_BUTTON),h.classList.add(this.CssClasses_.TAB_BAR_LEFT_BUTTON);var p=document.createElement("i");p.classList.add(this.CssClasses_.ICON),p.textContent=this.Constant_.CHEVRON_LEFT,h.appendChild(p),h.addEventListener("click",function(){this.tabBar_.scrollLeft-=this.Constant_.TAB_SCROLL_PIXELS}.bind(this));var c=document.createElement("div");c.classList.add(this.CssClasses_.TAB_BAR_BUTTON),c.classList.add(this.CssClasses_.TAB_BAR_RIGHT_BUTTON);var u=document.createElement("i");u.classList.add(this.CssClasses_.ICON),u.textContent=this.Constant_.CHEVRON_RIGHT,c.appendChild(u),c.addEventListener("click",function(){this.tabBar_.scrollLeft+=this.Constant_.TAB_SCROLL_PIXELS}.bind(this)),_.appendChild(h),_.appendChild(this.tabBar_),_.appendChild(c);var C=function(){this.tabBar_.scrollLeft>0?h.classList.add(this.CssClasses_.IS_ACTIVE):h.classList.remove(this.CssClasses_.IS_ACTIVE),this.tabBar_.scrollLeft0)return;this.setFrameCount(1);var i,n,a=e.currentTarget.getBoundingClientRect();if(0===e.clientX&&0===e.clientY)i=Math.round(a.width/2),n=Math.round(a.height/2);else{var l=e.clientX?e.clientX:e.touches[0].clientX,o=e.clientY?e.clientY:e.touches[0].clientY;i=Math.round(l-a.left),n=Math.round(o-a.top)}this.setRippleXY(i,n),this.setRippleStyles(!0),window.requestAnimationFrame(this.animFrameHandler.bind(this))}},b.prototype.upHandler_=function(e){e&&2!==e.detail&&this.rippleElement_.classList.remove(this.CssClasses_.IS_VISIBLE),window.setTimeout(function(){this.rippleElement_.classList.remove(this.CssClasses_.IS_VISIBLE)}.bind(this),0)},b.prototype.init=function(){if(this.element_){var e=this.element_.classList.contains(this.CssClasses_.RIPPLE_CENTER);this.element_.classList.contains(this.CssClasses_.RIPPLE_EFFECT_IGNORE_EVENTS)||(this.rippleElement_=this.element_.querySelector("."+this.CssClasses_.RIPPLE),this.frameCount_=0,this.rippleSize_=0,this.x_=0,this.y_=0,this.ignoringMouseDown_=!1,this.boundDownHandler=this.downHandler_.bind(this),this.element_.addEventListener("mousedown",this.boundDownHandler),this.element_.addEventListener("touchstart",this.boundDownHandler),this.boundUpHandler=this.upHandler_.bind(this),this.element_.addEventListener("mouseup",this.boundUpHandler),this.element_.addEventListener("mouseleave",this.boundUpHandler),this.element_.addEventListener("touchend",this.boundUpHandler),this.element_.addEventListener("blur",this.boundUpHandler),this.getFrameCount=function(){return this.frameCount_},this.setFrameCount=function(e){this.frameCount_=e},this.getRippleElement=function(){return this.rippleElement_},this.setRippleXY=function(e,t){this.x_=e,this.y_=t},this.setRippleStyles=function(t){if(null!==this.rippleElement_){var s,i,n,a="translate("+this.x_+"px, "+this.y_+"px)";t?(i=this.Constant_.INITIAL_SCALE,n=this.Constant_.INITIAL_SIZE):(i=this.Constant_.FINAL_SCALE,n=this.rippleSize_+"px",e&&(a="translate("+this.boundWidth/2+"px, "+this.boundHeight/2+"px)")),s="translate(-50%, -50%) "+a+i,this.rippleElement_.style.webkitTransform=s,this.rippleElement_.style.msTransform=s,this.rippleElement_.style.transform=s,t?this.rippleElement_.classList.remove(this.CssClasses_.IS_ANIMATING):this.rippleElement_.classList.add(this.CssClasses_.IS_ANIMATING)}},this.animFrameHandler=function(){this.frameCount_-->0?window.requestAnimationFrame(this.animFrameHandler.bind(this)):this.setRippleStyles(!1)})}},b.prototype.mdlDowngrade_=function(){this.element_.removeEventListener("mousedown",this.boundDownHandler),this.element_.removeEventListener("touchstart",this.boundDownHandler),this.element_.removeEventListener("mouseup",this.boundUpHandler),this.element_.removeEventListener("mouseleave",this.boundUpHandler),this.element_.removeEventListener("touchend",this.boundUpHandler),this.element_.removeEventListener("blur",this.boundUpHandler)},b.prototype.mdlDowngrade=b.prototype.mdlDowngrade_,b.prototype.mdlDowngrade=b.prototype.mdlDowngrade,s.register({constructor:b,classAsString:"MaterialRipple",cssClass:"mdl-js-ripple-effect",widget:!1})}(); -//# sourceMappingURL=material.min.js.map diff --git a/samples/contrib/nvidia-resnet/components/webapp/src/static/styles/demo.css b/samples/contrib/nvidia-resnet/components/webapp/src/static/styles/demo.css deleted file mode 100644 index 5cbfd26797b..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp/src/static/styles/demo.css +++ /dev/null @@ -1,238 +0,0 @@ -/** - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -html, body { - font-family: 'Roboto', 'Helvetica', sans-serif; - margin: 0; - padding: 0; -} -.mdl-demo .mdl-layout__header-row { - padding-left: 40px; -} -.mdl-demo .mdl-layout.is-small-screen .mdl-layout__header-row h3 { - font-size: inherit; -} -.mdl-demo .mdl-layout__tab-bar-button { - display: none; -} -.mdl-demo .mdl-layout.is-small-screen .mdl-layout__tab-bar .mdl-button { - display: none; -} -.mdl-demo .mdl-layout:not(.is-small-screen) .mdl-layout__tab-bar, -.mdl-demo .mdl-layout:not(.is-small-screen) .mdl-layout__tab-bar-container { - overflow: visible; -} -.mdl-demo .mdl-layout__tab-bar-container { - height: 64px; -} -.mdl-demo .mdl-layout__tab-bar { - padding: 0; - padding-left: 16px; - box-sizing: border-box; - height: 100%; - width: 100%; -} -.mdl-demo .mdl-layout__tab-bar .mdl-layout__tab { - height: 64px; - line-height: 64px; -} -.mdl-demo .mdl-layout__tab-bar .mdl-layout__tab.is-active::after { - background-color: white; - height: 4px; -} -.mdl-demo main > .mdl-layout__tab-panel { - padding: 8px; - padding-top: 48px; -} -.mdl-demo .mdl-card { - height: auto; - display: -webkit-flex; - display: -ms-flexbox; - display: flex; - -webkit-flex-direction: column; - -ms-flex-direction: column; - flex-direction: column; -} -.mdl-demo .mdl-card > * { - height: auto; -} -.mdl-demo .mdl-card .mdl-card__supporting-text { - margin: 40px; - -webkit-flex-grow: 1; - -ms-flex-positive: 1; - flex-grow: 1; - padding: 0; - color: inherit; - width: calc(100% - 80px); -} -.mdl-demo.mdl-demo .mdl-card__supporting-text h4 { - margin-top: 0; - margin-bottom: 20px; -} -.mdl-demo .mdl-card__actions { - margin: 0; - padding: 4px 40px; - color: inherit; -} -.mdl-demo .mdl-card__actions a { - color: #00BCD4; - margin: 0; -} -.mdl-demo .mdl-card__actions a:hover, -.mdl-demo .mdl-card__actions a:active { - color: inherit; - background-color: transparent; -} -.mdl-demo .mdl-card__supporting-text + .mdl-card__actions { - border-top: 1px solid rgba(0, 0, 0, 0.12); -} -.mdl-demo #add { - position: absolute; - right: 40px; - top: 36px; - z-index: 999; -} - -.mdl-demo .mdl-layout__content section:not(:last-of-type) { - position: relative; - margin-bottom: 48px; -} -.mdl-demo section.section--center { - max-width: 860px; -} -.mdl-demo #features section.section--center { - max-width: 620px; -} -.mdl-demo section > header{ - display: -webkit-flex; - display: -ms-flexbox; - display: flex; - -webkit-align-items: center; - -ms-flex-align: center; - align-items: center; - -webkit-justify-content: center; - -ms-flex-pack: center; - justify-content: center; -} -.mdl-demo section > .section__play-btn { - min-height: 200px; -} -.mdl-demo section > header > .material-icons { - font-size: 3rem; -} -.mdl-demo section > button { - position: absolute; - z-index: 99; - top: 8px; - right: 8px; -} -.mdl-demo section .section__circle { - display: -webkit-flex; - display: -ms-flexbox; - display: flex; - -webkit-align-items: center; - -ms-flex-align: center; - align-items: center; - -webkit-justify-content: flex-start; - -ms-flex-pack: start; - justify-content: flex-start; - -webkit-flex-grow: 0; - -ms-flex-positive: 0; - flex-grow: 0; - -webkit-flex-shrink: 1; - -ms-flex-negative: 1; - flex-shrink: 1; -} -.mdl-demo section .section__text { - -webkit-flex-grow: 1; - -ms-flex-positive: 1; - flex-grow: 1; - -webkit-flex-shrink: 0; - -ms-flex-negative: 0; - flex-shrink: 0; - padding-top: 8px; -} -.mdl-demo section .section__text h5 { - font-size: inherit; - margin: 0; - margin-bottom: 0.5em; -} -.mdl-demo section .section__text a { - text-decoration: none; -} -.mdl-demo section .section__circle-container > .section__circle-container__circle { - width: 64px; - height: 64px; - border-radius: 32px; - margin: 8px 0; -} -.mdl-demo section.section--footer .section__circle--big { - width: 100px; - height: 100px; - border-radius: 50px; - margin: 8px 32px; -} -.mdl-demo .is-small-screen section.section--footer .section__circle--big { - width: 50px; - height: 50px; - border-radius: 25px; - margin: 8px 16px; -} -.mdl-demo section.section--footer { - padding: 64px 0; - margin: 0 -8px -8px -8px; -} -.mdl-demo section.section--center .section__text:not(:last-child) { - border-bottom: 1px solid rgba(0,0,0,.13); -} -.mdl-demo .mdl-card .mdl-card__supporting-text > h3:first-child { - margin-bottom: 24px; -} -.mdl-demo .mdl-layout__tab-panel:not(#overview) { - background-color: white; -} -.mdl-demo #features section { - margin-bottom: 72px; -} -.mdl-demo #features h4, #features h5 { - margin-bottom: 16px; -} -.mdl-demo .toc { - border-left: 4px solid #C1EEF4; - margin: 24px; - padding: 0; - padding-left: 8px; - display: -webkit-flex; - display: -ms-flexbox; - display: flex; - -webkit-flex-direction: column; - -ms-flex-direction: column; - flex-direction: column; -} -.mdl-demo .toc h4 { - font-size: 0.9rem; - margin-top: 0; -} -.mdl-demo .toc a { - color: #4DD0E1; - text-decoration: none; - font-size: 16px; - line-height: 28px; - display: block; -} -.mdl-demo .mdl-menu__container { - z-index: 99; -} diff --git a/samples/contrib/nvidia-resnet/components/webapp/src/static/styles/material.deep_purple-pink.min.css b/samples/contrib/nvidia-resnet/components/webapp/src/static/styles/material.deep_purple-pink.min.css deleted file mode 100644 index 818190a37bb..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp/src/static/styles/material.deep_purple-pink.min.css +++ /dev/null @@ -1,8 +0,0 @@ -/** - * material-design-lite - Material Design Components in CSS, JS and HTML - * @version v1.3.0 - * @license Apache-2.0 - * @copyright 2015 Google, Inc. - * @link https://github.com/google/material-design-lite - */ -@charset "UTF-8";html{color:rgba(0,0,0,.87)}::-moz-selection{background:#b3d4fc;text-shadow:none}::selection{background:#b3d4fc;text-shadow:none}hr{display:block;height:1px;border:0;border-top:1px solid #ccc;margin:1em 0;padding:0}audio,canvas,iframe,img,svg,video{vertical-align:middle}fieldset{border:0;margin:0;padding:0}textarea{resize:vertical}.browserupgrade{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.hidden{display:none!important}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.clearfix:before,.clearfix:after{content:" ";display:table}.clearfix:after{clear:both}@media print{*,*:before,*:after,*:first-letter{background:transparent!important;color:#000!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href)")"}abbr[title]:after{content:" (" attr(title)")"}a[href^="#"]:after,a[href^="javascript:"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100%!important}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}}a,.mdl-accordion,.mdl-button,.mdl-card,.mdl-checkbox,.mdl-dropdown-menu,.mdl-icon-toggle,.mdl-item,.mdl-radio,.mdl-slider,.mdl-switch,.mdl-tabs__tab{-webkit-tap-highlight-color:transparent;-webkit-tap-highlight-color:rgba(255,255,255,0)}html{width:100%;height:100%;-ms-touch-action:manipulation;touch-action:manipulation}body{width:100%;min-height:100%}main{display:block}*[hidden]{display:none!important}html,body{font-family:"Helvetica","Arial",sans-serif;font-size:14px;font-weight:400;line-height:20px}h1,h2,h3,h4,h5,h6,p{padding:0}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small{font-family:"Roboto","Helvetica","Arial",sans-serif;font-weight:400;line-height:1.35;letter-spacing:-.02em;opacity:.54;font-size:.6em}h1{font-size:56px;line-height:1.35;letter-spacing:-.02em;margin:24px 0}h1,h2{font-family:"Roboto","Helvetica","Arial",sans-serif;font-weight:400}h2{font-size:45px;line-height:48px}h2,h3{margin:24px 0}h3{font-size:34px;line-height:40px}h3,h4{font-family:"Roboto","Helvetica","Arial",sans-serif;font-weight:400}h4{font-size:24px;line-height:32px;-moz-osx-font-smoothing:grayscale;margin:24px 0 16px}h5{font-size:20px;font-weight:500;line-height:1;letter-spacing:.02em}h5,h6{font-family:"Roboto","Helvetica","Arial",sans-serif;margin:24px 0 16px}h6{font-size:16px;letter-spacing:.04em}h6,p{font-weight:400;line-height:24px}p{font-size:14px;letter-spacing:0;margin:0 0 16px}a{color:rgb(255,64,129);font-weight:500}blockquote{font-family:"Roboto","Helvetica","Arial",sans-serif;position:relative;font-size:24px;font-weight:300;font-style:italic;line-height:1.35;letter-spacing:.08em}blockquote:before{position:absolute;left:-.5em;content:'“'}blockquote:after{content:'â€';margin-left:-.05em}mark{background-color:#f4ff81}dt{font-weight:700}address{font-size:12px;line-height:1;font-style:normal}address,ul,ol{font-weight:400;letter-spacing:0}ul,ol{font-size:14px;line-height:24px}.mdl-typography--display-4,.mdl-typography--display-4-color-contrast{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:112px;font-weight:300;line-height:1;letter-spacing:-.04em}.mdl-typography--display-4-color-contrast{opacity:.54}.mdl-typography--display-3,.mdl-typography--display-3-color-contrast{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:56px;font-weight:400;line-height:1.35;letter-spacing:-.02em}.mdl-typography--display-3-color-contrast{opacity:.54}.mdl-typography--display-2,.mdl-typography--display-2-color-contrast{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:45px;font-weight:400;line-height:48px}.mdl-typography--display-2-color-contrast{opacity:.54}.mdl-typography--display-1,.mdl-typography--display-1-color-contrast{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:34px;font-weight:400;line-height:40px}.mdl-typography--display-1-color-contrast{opacity:.54}.mdl-typography--headline,.mdl-typography--headline-color-contrast{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:24px;font-weight:400;line-height:32px;-moz-osx-font-smoothing:grayscale}.mdl-typography--headline-color-contrast{opacity:.87}.mdl-typography--title,.mdl-typography--title-color-contrast{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:20px;font-weight:500;line-height:1;letter-spacing:.02em}.mdl-typography--title-color-contrast{opacity:.87}.mdl-typography--subhead,.mdl-typography--subhead-color-contrast{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:16px;font-weight:400;line-height:24px;letter-spacing:.04em}.mdl-typography--subhead-color-contrast{opacity:.87}.mdl-typography--body-2,.mdl-typography--body-2-color-contrast{font-size:14px;font-weight:700;line-height:24px;letter-spacing:0}.mdl-typography--body-2-color-contrast{opacity:.87}.mdl-typography--body-1,.mdl-typography--body-1-color-contrast{font-size:14px;font-weight:400;line-height:24px;letter-spacing:0}.mdl-typography--body-1-color-contrast{opacity:.87}.mdl-typography--body-2-force-preferred-font,.mdl-typography--body-2-force-preferred-font-color-contrast{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:14px;font-weight:500;line-height:24px;letter-spacing:0}.mdl-typography--body-2-force-preferred-font-color-contrast{opacity:.87}.mdl-typography--body-1-force-preferred-font,.mdl-typography--body-1-force-preferred-font-color-contrast{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:14px;font-weight:400;line-height:24px;letter-spacing:0}.mdl-typography--body-1-force-preferred-font-color-contrast{opacity:.87}.mdl-typography--caption,.mdl-typography--caption-force-preferred-font{font-size:12px;font-weight:400;line-height:1;letter-spacing:0}.mdl-typography--caption-force-preferred-font{font-family:"Roboto","Helvetica","Arial",sans-serif}.mdl-typography--caption-color-contrast,.mdl-typography--caption-force-preferred-font-color-contrast{font-size:12px;font-weight:400;line-height:1;letter-spacing:0;opacity:.54}.mdl-typography--caption-force-preferred-font-color-contrast,.mdl-typography--menu{font-family:"Roboto","Helvetica","Arial",sans-serif}.mdl-typography--menu{font-size:14px;font-weight:500;line-height:1;letter-spacing:0}.mdl-typography--menu-color-contrast{opacity:.87}.mdl-typography--menu-color-contrast,.mdl-typography--button,.mdl-typography--button-color-contrast{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:14px;font-weight:500;line-height:1;letter-spacing:0}.mdl-typography--button,.mdl-typography--button-color-contrast{text-transform:uppercase}.mdl-typography--button-color-contrast{opacity:.87}.mdl-typography--text-left{text-align:left}.mdl-typography--text-right{text-align:right}.mdl-typography--text-center{text-align:center}.mdl-typography--text-justify{text-align:justify}.mdl-typography--text-nowrap{white-space:nowrap}.mdl-typography--text-lowercase{text-transform:lowercase}.mdl-typography--text-uppercase{text-transform:uppercase}.mdl-typography--text-capitalize{text-transform:capitalize}.mdl-typography--font-thin{font-weight:200!important}.mdl-typography--font-light{font-weight:300!important}.mdl-typography--font-regular{font-weight:400!important}.mdl-typography--font-medium{font-weight:500!important}.mdl-typography--font-bold{font-weight:700!important}.mdl-typography--font-black{font-weight:900!important}.material-icons{font-family:'Material Icons';font-weight:400;font-style:normal;font-size:24px;line-height:1;letter-spacing:normal;text-transform:none;display:inline-block;word-wrap:normal;-moz-font-feature-settings:'liga';font-feature-settings:'liga';-webkit-font-feature-settings:'liga';-webkit-font-smoothing:antialiased}.mdl-color-text--red{color:#f44336 !important}.mdl-color--red{background-color:#f44336 !important}.mdl-color-text--red-50{color:#ffebee !important}.mdl-color--red-50{background-color:#ffebee !important}.mdl-color-text--red-100{color:#ffcdd2 !important}.mdl-color--red-100{background-color:#ffcdd2 !important}.mdl-color-text--red-200{color:#ef9a9a !important}.mdl-color--red-200{background-color:#ef9a9a !important}.mdl-color-text--red-300{color:#e57373 !important}.mdl-color--red-300{background-color:#e57373 !important}.mdl-color-text--red-400{color:#ef5350 !important}.mdl-color--red-400{background-color:#ef5350 !important}.mdl-color-text--red-500{color:#f44336 !important}.mdl-color--red-500{background-color:#f44336 !important}.mdl-color-text--red-600{color:#e53935 !important}.mdl-color--red-600{background-color:#e53935 !important}.mdl-color-text--red-700{color:#d32f2f !important}.mdl-color--red-700{background-color:#d32f2f !important}.mdl-color-text--red-800{color:#c62828 !important}.mdl-color--red-800{background-color:#c62828 !important}.mdl-color-text--red-900{color:#b71c1c !important}.mdl-color--red-900{background-color:#b71c1c !important}.mdl-color-text--red-A100{color:#ff8a80 !important}.mdl-color--red-A100{background-color:#ff8a80 !important}.mdl-color-text--red-A200{color:#ff5252 !important}.mdl-color--red-A200{background-color:#ff5252 !important}.mdl-color-text--red-A400{color:#ff1744 !important}.mdl-color--red-A400{background-color:#ff1744 !important}.mdl-color-text--red-A700{color:#d50000 !important}.mdl-color--red-A700{background-color:#d50000 !important}.mdl-color-text--pink{color:#e91e63 !important}.mdl-color--pink{background-color:#e91e63 !important}.mdl-color-text--pink-50{color:#fce4ec !important}.mdl-color--pink-50{background-color:#fce4ec !important}.mdl-color-text--pink-100{color:#f8bbd0 !important}.mdl-color--pink-100{background-color:#f8bbd0 !important}.mdl-color-text--pink-200{color:#f48fb1 !important}.mdl-color--pink-200{background-color:#f48fb1 !important}.mdl-color-text--pink-300{color:#f06292 !important}.mdl-color--pink-300{background-color:#f06292 !important}.mdl-color-text--pink-400{color:#ec407a !important}.mdl-color--pink-400{background-color:#ec407a !important}.mdl-color-text--pink-500{color:#e91e63 !important}.mdl-color--pink-500{background-color:#e91e63 !important}.mdl-color-text--pink-600{color:#d81b60 !important}.mdl-color--pink-600{background-color:#d81b60 !important}.mdl-color-text--pink-700{color:#c2185b !important}.mdl-color--pink-700{background-color:#c2185b !important}.mdl-color-text--pink-800{color:#ad1457 !important}.mdl-color--pink-800{background-color:#ad1457 !important}.mdl-color-text--pink-900{color:#880e4f !important}.mdl-color--pink-900{background-color:#880e4f !important}.mdl-color-text--pink-A100{color:#ff80ab !important}.mdl-color--pink-A100{background-color:#ff80ab !important}.mdl-color-text--pink-A200{color:#ff4081 !important}.mdl-color--pink-A200{background-color:#ff4081 !important}.mdl-color-text--pink-A400{color:#f50057 !important}.mdl-color--pink-A400{background-color:#f50057 !important}.mdl-color-text--pink-A700{color:#c51162 !important}.mdl-color--pink-A700{background-color:#c51162 !important}.mdl-color-text--purple{color:#9c27b0 !important}.mdl-color--purple{background-color:#9c27b0 !important}.mdl-color-text--purple-50{color:#f3e5f5 !important}.mdl-color--purple-50{background-color:#f3e5f5 !important}.mdl-color-text--purple-100{color:#e1bee7 !important}.mdl-color--purple-100{background-color:#e1bee7 !important}.mdl-color-text--purple-200{color:#ce93d8 !important}.mdl-color--purple-200{background-color:#ce93d8 !important}.mdl-color-text--purple-300{color:#ba68c8 !important}.mdl-color--purple-300{background-color:#ba68c8 !important}.mdl-color-text--purple-400{color:#ab47bc !important}.mdl-color--purple-400{background-color:#ab47bc !important}.mdl-color-text--purple-500{color:#9c27b0 !important}.mdl-color--purple-500{background-color:#9c27b0 !important}.mdl-color-text--purple-600{color:#8e24aa !important}.mdl-color--purple-600{background-color:#8e24aa !important}.mdl-color-text--purple-700{color:#7b1fa2 !important}.mdl-color--purple-700{background-color:#7b1fa2 !important}.mdl-color-text--purple-800{color:#6a1b9a !important}.mdl-color--purple-800{background-color:#6a1b9a !important}.mdl-color-text--purple-900{color:#4a148c !important}.mdl-color--purple-900{background-color:#4a148c !important}.mdl-color-text--purple-A100{color:#ea80fc !important}.mdl-color--purple-A100{background-color:#ea80fc !important}.mdl-color-text--purple-A200{color:#e040fb !important}.mdl-color--purple-A200{background-color:#e040fb !important}.mdl-color-text--purple-A400{color:#d500f9 !important}.mdl-color--purple-A400{background-color:#d500f9 !important}.mdl-color-text--purple-A700{color:#a0f !important}.mdl-color--purple-A700{background-color:#a0f !important}.mdl-color-text--deep-purple{color:#673ab7 !important}.mdl-color--deep-purple{background-color:#673ab7 !important}.mdl-color-text--deep-purple-50{color:#ede7f6 !important}.mdl-color--deep-purple-50{background-color:#ede7f6 !important}.mdl-color-text--deep-purple-100{color:#d1c4e9 !important}.mdl-color--deep-purple-100{background-color:#d1c4e9 !important}.mdl-color-text--deep-purple-200{color:#b39ddb !important}.mdl-color--deep-purple-200{background-color:#b39ddb !important}.mdl-color-text--deep-purple-300{color:#9575cd !important}.mdl-color--deep-purple-300{background-color:#9575cd !important}.mdl-color-text--deep-purple-400{color:#7e57c2 !important}.mdl-color--deep-purple-400{background-color:#7e57c2 !important}.mdl-color-text--deep-purple-500{color:#673ab7 !important}.mdl-color--deep-purple-500{background-color:#673ab7 !important}.mdl-color-text--deep-purple-600{color:#5e35b1 !important}.mdl-color--deep-purple-600{background-color:#5e35b1 !important}.mdl-color-text--deep-purple-700{color:#512da8 !important}.mdl-color--deep-purple-700{background-color:#512da8 !important}.mdl-color-text--deep-purple-800{color:#4527a0 !important}.mdl-color--deep-purple-800{background-color:#4527a0 !important}.mdl-color-text--deep-purple-900{color:#311b92 !important}.mdl-color--deep-purple-900{background-color:#311b92 !important}.mdl-color-text--deep-purple-A100{color:#b388ff !important}.mdl-color--deep-purple-A100{background-color:#b388ff !important}.mdl-color-text--deep-purple-A200{color:#7c4dff !important}.mdl-color--deep-purple-A200{background-color:#7c4dff !important}.mdl-color-text--deep-purple-A400{color:#651fff !important}.mdl-color--deep-purple-A400{background-color:#651fff !important}.mdl-color-text--deep-purple-A700{color:#6200ea !important}.mdl-color--deep-purple-A700{background-color:#6200ea !important}.mdl-color-text--indigo{color:#3f51b5 !important}.mdl-color--indigo{background-color:#3f51b5 !important}.mdl-color-text--indigo-50{color:#e8eaf6 !important}.mdl-color--indigo-50{background-color:#e8eaf6 !important}.mdl-color-text--indigo-100{color:#c5cae9 !important}.mdl-color--indigo-100{background-color:#c5cae9 !important}.mdl-color-text--indigo-200{color:#9fa8da !important}.mdl-color--indigo-200{background-color:#9fa8da !important}.mdl-color-text--indigo-300{color:#7986cb !important}.mdl-color--indigo-300{background-color:#7986cb !important}.mdl-color-text--indigo-400{color:#5c6bc0 !important}.mdl-color--indigo-400{background-color:#5c6bc0 !important}.mdl-color-text--indigo-500{color:#3f51b5 !important}.mdl-color--indigo-500{background-color:#3f51b5 !important}.mdl-color-text--indigo-600{color:#3949ab !important}.mdl-color--indigo-600{background-color:#3949ab !important}.mdl-color-text--indigo-700{color:#303f9f !important}.mdl-color--indigo-700{background-color:#303f9f !important}.mdl-color-text--indigo-800{color:#283593 !important}.mdl-color--indigo-800{background-color:#283593 !important}.mdl-color-text--indigo-900{color:#1a237e !important}.mdl-color--indigo-900{background-color:#1a237e !important}.mdl-color-text--indigo-A100{color:#8c9eff !important}.mdl-color--indigo-A100{background-color:#8c9eff !important}.mdl-color-text--indigo-A200{color:#536dfe !important}.mdl-color--indigo-A200{background-color:#536dfe !important}.mdl-color-text--indigo-A400{color:#3d5afe !important}.mdl-color--indigo-A400{background-color:#3d5afe !important}.mdl-color-text--indigo-A700{color:#304ffe !important}.mdl-color--indigo-A700{background-color:#304ffe !important}.mdl-color-text--blue{color:#2196f3 !important}.mdl-color--blue{background-color:#2196f3 !important}.mdl-color-text--blue-50{color:#e3f2fd !important}.mdl-color--blue-50{background-color:#e3f2fd !important}.mdl-color-text--blue-100{color:#bbdefb !important}.mdl-color--blue-100{background-color:#bbdefb !important}.mdl-color-text--blue-200{color:#90caf9 !important}.mdl-color--blue-200{background-color:#90caf9 !important}.mdl-color-text--blue-300{color:#64b5f6 !important}.mdl-color--blue-300{background-color:#64b5f6 !important}.mdl-color-text--blue-400{color:#42a5f5 !important}.mdl-color--blue-400{background-color:#42a5f5 !important}.mdl-color-text--blue-500{color:#2196f3 !important}.mdl-color--blue-500{background-color:#2196f3 !important}.mdl-color-text--blue-600{color:#1e88e5 !important}.mdl-color--blue-600{background-color:#1e88e5 !important}.mdl-color-text--blue-700{color:#1976d2 !important}.mdl-color--blue-700{background-color:#1976d2 !important}.mdl-color-text--blue-800{color:#1565c0 !important}.mdl-color--blue-800{background-color:#1565c0 !important}.mdl-color-text--blue-900{color:#0d47a1 !important}.mdl-color--blue-900{background-color:#0d47a1 !important}.mdl-color-text--blue-A100{color:#82b1ff !important}.mdl-color--blue-A100{background-color:#82b1ff !important}.mdl-color-text--blue-A200{color:#448aff !important}.mdl-color--blue-A200{background-color:#448aff !important}.mdl-color-text--blue-A400{color:#2979ff !important}.mdl-color--blue-A400{background-color:#2979ff !important}.mdl-color-text--blue-A700{color:#2962ff !important}.mdl-color--blue-A700{background-color:#2962ff !important}.mdl-color-text--light-blue{color:#03a9f4 !important}.mdl-color--light-blue{background-color:#03a9f4 !important}.mdl-color-text--light-blue-50{color:#e1f5fe !important}.mdl-color--light-blue-50{background-color:#e1f5fe !important}.mdl-color-text--light-blue-100{color:#b3e5fc !important}.mdl-color--light-blue-100{background-color:#b3e5fc !important}.mdl-color-text--light-blue-200{color:#81d4fa !important}.mdl-color--light-blue-200{background-color:#81d4fa !important}.mdl-color-text--light-blue-300{color:#4fc3f7 !important}.mdl-color--light-blue-300{background-color:#4fc3f7 !important}.mdl-color-text--light-blue-400{color:#29b6f6 !important}.mdl-color--light-blue-400{background-color:#29b6f6 !important}.mdl-color-text--light-blue-500{color:#03a9f4 !important}.mdl-color--light-blue-500{background-color:#03a9f4 !important}.mdl-color-text--light-blue-600{color:#039be5 !important}.mdl-color--light-blue-600{background-color:#039be5 !important}.mdl-color-text--light-blue-700{color:#0288d1 !important}.mdl-color--light-blue-700{background-color:#0288d1 !important}.mdl-color-text--light-blue-800{color:#0277bd !important}.mdl-color--light-blue-800{background-color:#0277bd !important}.mdl-color-text--light-blue-900{color:#01579b !important}.mdl-color--light-blue-900{background-color:#01579b !important}.mdl-color-text--light-blue-A100{color:#80d8ff !important}.mdl-color--light-blue-A100{background-color:#80d8ff !important}.mdl-color-text--light-blue-A200{color:#40c4ff !important}.mdl-color--light-blue-A200{background-color:#40c4ff !important}.mdl-color-text--light-blue-A400{color:#00b0ff !important}.mdl-color--light-blue-A400{background-color:#00b0ff !important}.mdl-color-text--light-blue-A700{color:#0091ea !important}.mdl-color--light-blue-A700{background-color:#0091ea !important}.mdl-color-text--cyan{color:#00bcd4 !important}.mdl-color--cyan{background-color:#00bcd4 !important}.mdl-color-text--cyan-50{color:#e0f7fa !important}.mdl-color--cyan-50{background-color:#e0f7fa !important}.mdl-color-text--cyan-100{color:#b2ebf2 !important}.mdl-color--cyan-100{background-color:#b2ebf2 !important}.mdl-color-text--cyan-200{color:#80deea !important}.mdl-color--cyan-200{background-color:#80deea !important}.mdl-color-text--cyan-300{color:#4dd0e1 !important}.mdl-color--cyan-300{background-color:#4dd0e1 !important}.mdl-color-text--cyan-400{color:#26c6da !important}.mdl-color--cyan-400{background-color:#26c6da !important}.mdl-color-text--cyan-500{color:#00bcd4 !important}.mdl-color--cyan-500{background-color:#00bcd4 !important}.mdl-color-text--cyan-600{color:#00acc1 !important}.mdl-color--cyan-600{background-color:#00acc1 !important}.mdl-color-text--cyan-700{color:#0097a7 !important}.mdl-color--cyan-700{background-color:#0097a7 !important}.mdl-color-text--cyan-800{color:#00838f !important}.mdl-color--cyan-800{background-color:#00838f !important}.mdl-color-text--cyan-900{color:#006064 !important}.mdl-color--cyan-900{background-color:#006064 !important}.mdl-color-text--cyan-A100{color:#84ffff !important}.mdl-color--cyan-A100{background-color:#84ffff !important}.mdl-color-text--cyan-A200{color:#18ffff !important}.mdl-color--cyan-A200{background-color:#18ffff !important}.mdl-color-text--cyan-A400{color:#00e5ff !important}.mdl-color--cyan-A400{background-color:#00e5ff !important}.mdl-color-text--cyan-A700{color:#00b8d4 !important}.mdl-color--cyan-A700{background-color:#00b8d4 !important}.mdl-color-text--teal{color:#009688 !important}.mdl-color--teal{background-color:#009688 !important}.mdl-color-text--teal-50{color:#e0f2f1 !important}.mdl-color--teal-50{background-color:#e0f2f1 !important}.mdl-color-text--teal-100{color:#b2dfdb !important}.mdl-color--teal-100{background-color:#b2dfdb !important}.mdl-color-text--teal-200{color:#80cbc4 !important}.mdl-color--teal-200{background-color:#80cbc4 !important}.mdl-color-text--teal-300{color:#4db6ac !important}.mdl-color--teal-300{background-color:#4db6ac !important}.mdl-color-text--teal-400{color:#26a69a !important}.mdl-color--teal-400{background-color:#26a69a !important}.mdl-color-text--teal-500{color:#009688 !important}.mdl-color--teal-500{background-color:#009688 !important}.mdl-color-text--teal-600{color:#00897b !important}.mdl-color--teal-600{background-color:#00897b !important}.mdl-color-text--teal-700{color:#00796b !important}.mdl-color--teal-700{background-color:#00796b !important}.mdl-color-text--teal-800{color:#00695c !important}.mdl-color--teal-800{background-color:#00695c !important}.mdl-color-text--teal-900{color:#004d40 !important}.mdl-color--teal-900{background-color:#004d40 !important}.mdl-color-text--teal-A100{color:#a7ffeb !important}.mdl-color--teal-A100{background-color:#a7ffeb !important}.mdl-color-text--teal-A200{color:#64ffda !important}.mdl-color--teal-A200{background-color:#64ffda !important}.mdl-color-text--teal-A400{color:#1de9b6 !important}.mdl-color--teal-A400{background-color:#1de9b6 !important}.mdl-color-text--teal-A700{color:#00bfa5 !important}.mdl-color--teal-A700{background-color:#00bfa5 !important}.mdl-color-text--green{color:#4caf50 !important}.mdl-color--green{background-color:#4caf50 !important}.mdl-color-text--green-50{color:#e8f5e9 !important}.mdl-color--green-50{background-color:#e8f5e9 !important}.mdl-color-text--green-100{color:#c8e6c9 !important}.mdl-color--green-100{background-color:#c8e6c9 !important}.mdl-color-text--green-200{color:#a5d6a7 !important}.mdl-color--green-200{background-color:#a5d6a7 !important}.mdl-color-text--green-300{color:#81c784 !important}.mdl-color--green-300{background-color:#81c784 !important}.mdl-color-text--green-400{color:#66bb6a !important}.mdl-color--green-400{background-color:#66bb6a !important}.mdl-color-text--green-500{color:#4caf50 !important}.mdl-color--green-500{background-color:#4caf50 !important}.mdl-color-text--green-600{color:#43a047 !important}.mdl-color--green-600{background-color:#43a047 !important}.mdl-color-text--green-700{color:#388e3c !important}.mdl-color--green-700{background-color:#388e3c !important}.mdl-color-text--green-800{color:#2e7d32 !important}.mdl-color--green-800{background-color:#2e7d32 !important}.mdl-color-text--green-900{color:#1b5e20 !important}.mdl-color--green-900{background-color:#1b5e20 !important}.mdl-color-text--green-A100{color:#b9f6ca !important}.mdl-color--green-A100{background-color:#b9f6ca !important}.mdl-color-text--green-A200{color:#69f0ae !important}.mdl-color--green-A200{background-color:#69f0ae !important}.mdl-color-text--green-A400{color:#00e676 !important}.mdl-color--green-A400{background-color:#00e676 !important}.mdl-color-text--green-A700{color:#00c853 !important}.mdl-color--green-A700{background-color:#00c853 !important}.mdl-color-text--light-green{color:#8bc34a !important}.mdl-color--light-green{background-color:#8bc34a !important}.mdl-color-text--light-green-50{color:#f1f8e9 !important}.mdl-color--light-green-50{background-color:#f1f8e9 !important}.mdl-color-text--light-green-100{color:#dcedc8 !important}.mdl-color--light-green-100{background-color:#dcedc8 !important}.mdl-color-text--light-green-200{color:#c5e1a5 !important}.mdl-color--light-green-200{background-color:#c5e1a5 !important}.mdl-color-text--light-green-300{color:#aed581 !important}.mdl-color--light-green-300{background-color:#aed581 !important}.mdl-color-text--light-green-400{color:#9ccc65 !important}.mdl-color--light-green-400{background-color:#9ccc65 !important}.mdl-color-text--light-green-500{color:#8bc34a !important}.mdl-color--light-green-500{background-color:#8bc34a !important}.mdl-color-text--light-green-600{color:#7cb342 !important}.mdl-color--light-green-600{background-color:#7cb342 !important}.mdl-color-text--light-green-700{color:#689f38 !important}.mdl-color--light-green-700{background-color:#689f38 !important}.mdl-color-text--light-green-800{color:#558b2f !important}.mdl-color--light-green-800{background-color:#558b2f !important}.mdl-color-text--light-green-900{color:#33691e !important}.mdl-color--light-green-900{background-color:#33691e !important}.mdl-color-text--light-green-A100{color:#ccff90 !important}.mdl-color--light-green-A100{background-color:#ccff90 !important}.mdl-color-text--light-green-A200{color:#b2ff59 !important}.mdl-color--light-green-A200{background-color:#b2ff59 !important}.mdl-color-text--light-green-A400{color:#76ff03 !important}.mdl-color--light-green-A400{background-color:#76ff03 !important}.mdl-color-text--light-green-A700{color:#64dd17 !important}.mdl-color--light-green-A700{background-color:#64dd17 !important}.mdl-color-text--lime{color:#cddc39 !important}.mdl-color--lime{background-color:#cddc39 !important}.mdl-color-text--lime-50{color:#f9fbe7 !important}.mdl-color--lime-50{background-color:#f9fbe7 !important}.mdl-color-text--lime-100{color:#f0f4c3 !important}.mdl-color--lime-100{background-color:#f0f4c3 !important}.mdl-color-text--lime-200{color:#e6ee9c !important}.mdl-color--lime-200{background-color:#e6ee9c !important}.mdl-color-text--lime-300{color:#dce775 !important}.mdl-color--lime-300{background-color:#dce775 !important}.mdl-color-text--lime-400{color:#d4e157 !important}.mdl-color--lime-400{background-color:#d4e157 !important}.mdl-color-text--lime-500{color:#cddc39 !important}.mdl-color--lime-500{background-color:#cddc39 !important}.mdl-color-text--lime-600{color:#c0ca33 !important}.mdl-color--lime-600{background-color:#c0ca33 !important}.mdl-color-text--lime-700{color:#afb42b !important}.mdl-color--lime-700{background-color:#afb42b !important}.mdl-color-text--lime-800{color:#9e9d24 !important}.mdl-color--lime-800{background-color:#9e9d24 !important}.mdl-color-text--lime-900{color:#827717 !important}.mdl-color--lime-900{background-color:#827717 !important}.mdl-color-text--lime-A100{color:#f4ff81 !important}.mdl-color--lime-A100{background-color:#f4ff81 !important}.mdl-color-text--lime-A200{color:#eeff41 !important}.mdl-color--lime-A200{background-color:#eeff41 !important}.mdl-color-text--lime-A400{color:#c6ff00 !important}.mdl-color--lime-A400{background-color:#c6ff00 !important}.mdl-color-text--lime-A700{color:#aeea00 !important}.mdl-color--lime-A700{background-color:#aeea00 !important}.mdl-color-text--yellow{color:#ffeb3b !important}.mdl-color--yellow{background-color:#ffeb3b !important}.mdl-color-text--yellow-50{color:#fffde7 !important}.mdl-color--yellow-50{background-color:#fffde7 !important}.mdl-color-text--yellow-100{color:#fff9c4 !important}.mdl-color--yellow-100{background-color:#fff9c4 !important}.mdl-color-text--yellow-200{color:#fff59d !important}.mdl-color--yellow-200{background-color:#fff59d !important}.mdl-color-text--yellow-300{color:#fff176 !important}.mdl-color--yellow-300{background-color:#fff176 !important}.mdl-color-text--yellow-400{color:#ffee58 !important}.mdl-color--yellow-400{background-color:#ffee58 !important}.mdl-color-text--yellow-500{color:#ffeb3b !important}.mdl-color--yellow-500{background-color:#ffeb3b !important}.mdl-color-text--yellow-600{color:#fdd835 !important}.mdl-color--yellow-600{background-color:#fdd835 !important}.mdl-color-text--yellow-700{color:#fbc02d !important}.mdl-color--yellow-700{background-color:#fbc02d !important}.mdl-color-text--yellow-800{color:#f9a825 !important}.mdl-color--yellow-800{background-color:#f9a825 !important}.mdl-color-text--yellow-900{color:#f57f17 !important}.mdl-color--yellow-900{background-color:#f57f17 !important}.mdl-color-text--yellow-A100{color:#ffff8d !important}.mdl-color--yellow-A100{background-color:#ffff8d !important}.mdl-color-text--yellow-A200{color:#ff0 !important}.mdl-color--yellow-A200{background-color:#ff0 !important}.mdl-color-text--yellow-A400{color:#ffea00 !important}.mdl-color--yellow-A400{background-color:#ffea00 !important}.mdl-color-text--yellow-A700{color:#ffd600 !important}.mdl-color--yellow-A700{background-color:#ffd600 !important}.mdl-color-text--amber{color:#ffc107 !important}.mdl-color--amber{background-color:#ffc107 !important}.mdl-color-text--amber-50{color:#fff8e1 !important}.mdl-color--amber-50{background-color:#fff8e1 !important}.mdl-color-text--amber-100{color:#ffecb3 !important}.mdl-color--amber-100{background-color:#ffecb3 !important}.mdl-color-text--amber-200{color:#ffe082 !important}.mdl-color--amber-200{background-color:#ffe082 !important}.mdl-color-text--amber-300{color:#ffd54f !important}.mdl-color--amber-300{background-color:#ffd54f !important}.mdl-color-text--amber-400{color:#ffca28 !important}.mdl-color--amber-400{background-color:#ffca28 !important}.mdl-color-text--amber-500{color:#ffc107 !important}.mdl-color--amber-500{background-color:#ffc107 !important}.mdl-color-text--amber-600{color:#ffb300 !important}.mdl-color--amber-600{background-color:#ffb300 !important}.mdl-color-text--amber-700{color:#ffa000 !important}.mdl-color--amber-700{background-color:#ffa000 !important}.mdl-color-text--amber-800{color:#ff8f00 !important}.mdl-color--amber-800{background-color:#ff8f00 !important}.mdl-color-text--amber-900{color:#ff6f00 !important}.mdl-color--amber-900{background-color:#ff6f00 !important}.mdl-color-text--amber-A100{color:#ffe57f !important}.mdl-color--amber-A100{background-color:#ffe57f !important}.mdl-color-text--amber-A200{color:#ffd740 !important}.mdl-color--amber-A200{background-color:#ffd740 !important}.mdl-color-text--amber-A400{color:#ffc400 !important}.mdl-color--amber-A400{background-color:#ffc400 !important}.mdl-color-text--amber-A700{color:#ffab00 !important}.mdl-color--amber-A700{background-color:#ffab00 !important}.mdl-color-text--orange{color:#ff9800 !important}.mdl-color--orange{background-color:#ff9800 !important}.mdl-color-text--orange-50{color:#fff3e0 !important}.mdl-color--orange-50{background-color:#fff3e0 !important}.mdl-color-text--orange-100{color:#ffe0b2 !important}.mdl-color--orange-100{background-color:#ffe0b2 !important}.mdl-color-text--orange-200{color:#ffcc80 !important}.mdl-color--orange-200{background-color:#ffcc80 !important}.mdl-color-text--orange-300{color:#ffb74d !important}.mdl-color--orange-300{background-color:#ffb74d !important}.mdl-color-text--orange-400{color:#ffa726 !important}.mdl-color--orange-400{background-color:#ffa726 !important}.mdl-color-text--orange-500{color:#ff9800 !important}.mdl-color--orange-500{background-color:#ff9800 !important}.mdl-color-text--orange-600{color:#fb8c00 !important}.mdl-color--orange-600{background-color:#fb8c00 !important}.mdl-color-text--orange-700{color:#f57c00 !important}.mdl-color--orange-700{background-color:#f57c00 !important}.mdl-color-text--orange-800{color:#ef6c00 !important}.mdl-color--orange-800{background-color:#ef6c00 !important}.mdl-color-text--orange-900{color:#e65100 !important}.mdl-color--orange-900{background-color:#e65100 !important}.mdl-color-text--orange-A100{color:#ffd180 !important}.mdl-color--orange-A100{background-color:#ffd180 !important}.mdl-color-text--orange-A200{color:#ffab40 !important}.mdl-color--orange-A200{background-color:#ffab40 !important}.mdl-color-text--orange-A400{color:#ff9100 !important}.mdl-color--orange-A400{background-color:#ff9100 !important}.mdl-color-text--orange-A700{color:#ff6d00 !important}.mdl-color--orange-A700{background-color:#ff6d00 !important}.mdl-color-text--deep-orange{color:#ff5722 !important}.mdl-color--deep-orange{background-color:#ff5722 !important}.mdl-color-text--deep-orange-50{color:#fbe9e7 !important}.mdl-color--deep-orange-50{background-color:#fbe9e7 !important}.mdl-color-text--deep-orange-100{color:#ffccbc !important}.mdl-color--deep-orange-100{background-color:#ffccbc !important}.mdl-color-text--deep-orange-200{color:#ffab91 !important}.mdl-color--deep-orange-200{background-color:#ffab91 !important}.mdl-color-text--deep-orange-300{color:#ff8a65 !important}.mdl-color--deep-orange-300{background-color:#ff8a65 !important}.mdl-color-text--deep-orange-400{color:#ff7043 !important}.mdl-color--deep-orange-400{background-color:#ff7043 !important}.mdl-color-text--deep-orange-500{color:#ff5722 !important}.mdl-color--deep-orange-500{background-color:#ff5722 !important}.mdl-color-text--deep-orange-600{color:#f4511e !important}.mdl-color--deep-orange-600{background-color:#f4511e !important}.mdl-color-text--deep-orange-700{color:#e64a19 !important}.mdl-color--deep-orange-700{background-color:#e64a19 !important}.mdl-color-text--deep-orange-800{color:#d84315 !important}.mdl-color--deep-orange-800{background-color:#d84315 !important}.mdl-color-text--deep-orange-900{color:#bf360c !important}.mdl-color--deep-orange-900{background-color:#bf360c !important}.mdl-color-text--deep-orange-A100{color:#ff9e80 !important}.mdl-color--deep-orange-A100{background-color:#ff9e80 !important}.mdl-color-text--deep-orange-A200{color:#ff6e40 !important}.mdl-color--deep-orange-A200{background-color:#ff6e40 !important}.mdl-color-text--deep-orange-A400{color:#ff3d00 !important}.mdl-color--deep-orange-A400{background-color:#ff3d00 !important}.mdl-color-text--deep-orange-A700{color:#dd2c00 !important}.mdl-color--deep-orange-A700{background-color:#dd2c00 !important}.mdl-color-text--brown{color:#795548 !important}.mdl-color--brown{background-color:#795548 !important}.mdl-color-text--brown-50{color:#efebe9 !important}.mdl-color--brown-50{background-color:#efebe9 !important}.mdl-color-text--brown-100{color:#d7ccc8 !important}.mdl-color--brown-100{background-color:#d7ccc8 !important}.mdl-color-text--brown-200{color:#bcaaa4 !important}.mdl-color--brown-200{background-color:#bcaaa4 !important}.mdl-color-text--brown-300{color:#a1887f !important}.mdl-color--brown-300{background-color:#a1887f !important}.mdl-color-text--brown-400{color:#8d6e63 !important}.mdl-color--brown-400{background-color:#8d6e63 !important}.mdl-color-text--brown-500{color:#795548 !important}.mdl-color--brown-500{background-color:#795548 !important}.mdl-color-text--brown-600{color:#6d4c41 !important}.mdl-color--brown-600{background-color:#6d4c41 !important}.mdl-color-text--brown-700{color:#5d4037 !important}.mdl-color--brown-700{background-color:#5d4037 !important}.mdl-color-text--brown-800{color:#4e342e !important}.mdl-color--brown-800{background-color:#4e342e !important}.mdl-color-text--brown-900{color:#3e2723 !important}.mdl-color--brown-900{background-color:#3e2723 !important}.mdl-color-text--grey{color:#9e9e9e !important}.mdl-color--grey{background-color:#9e9e9e !important}.mdl-color-text--grey-50{color:#fafafa !important}.mdl-color--grey-50{background-color:#fafafa !important}.mdl-color-text--grey-100{color:#f5f5f5 !important}.mdl-color--grey-100{background-color:#f5f5f5 !important}.mdl-color-text--grey-200{color:#eee !important}.mdl-color--grey-200{background-color:#eee !important}.mdl-color-text--grey-300{color:#e0e0e0 !important}.mdl-color--grey-300{background-color:#e0e0e0 !important}.mdl-color-text--grey-400{color:#bdbdbd !important}.mdl-color--grey-400{background-color:#bdbdbd !important}.mdl-color-text--grey-500{color:#9e9e9e !important}.mdl-color--grey-500{background-color:#9e9e9e !important}.mdl-color-text--grey-600{color:#757575 !important}.mdl-color--grey-600{background-color:#757575 !important}.mdl-color-text--grey-700{color:#616161 !important}.mdl-color--grey-700{background-color:#616161 !important}.mdl-color-text--grey-800{color:#424242 !important}.mdl-color--grey-800{background-color:#424242 !important}.mdl-color-text--grey-900{color:#212121 !important}.mdl-color--grey-900{background-color:#212121 !important}.mdl-color-text--blue-grey{color:#607d8b !important}.mdl-color--blue-grey{background-color:#607d8b !important}.mdl-color-text--blue-grey-50{color:#eceff1 !important}.mdl-color--blue-grey-50{background-color:#eceff1 !important}.mdl-color-text--blue-grey-100{color:#cfd8dc !important}.mdl-color--blue-grey-100{background-color:#cfd8dc !important}.mdl-color-text--blue-grey-200{color:#b0bec5 !important}.mdl-color--blue-grey-200{background-color:#b0bec5 !important}.mdl-color-text--blue-grey-300{color:#90a4ae !important}.mdl-color--blue-grey-300{background-color:#90a4ae !important}.mdl-color-text--blue-grey-400{color:#78909c !important}.mdl-color--blue-grey-400{background-color:#78909c !important}.mdl-color-text--blue-grey-500{color:#607d8b !important}.mdl-color--blue-grey-500{background-color:#607d8b !important}.mdl-color-text--blue-grey-600{color:#546e7a !important}.mdl-color--blue-grey-600{background-color:#546e7a !important}.mdl-color-text--blue-grey-700{color:#455a64 !important}.mdl-color--blue-grey-700{background-color:#455a64 !important}.mdl-color-text--blue-grey-800{color:#37474f !important}.mdl-color--blue-grey-800{background-color:#37474f !important}.mdl-color-text--blue-grey-900{color:#263238 !important}.mdl-color--blue-grey-900{background-color:#263238 !important}.mdl-color--black{background-color:#000 !important}.mdl-color-text--black{color:#000 !important}.mdl-color--white{background-color:#fff !important}.mdl-color-text--white{color:#fff !important}.mdl-color--primary{background-color:rgb(103,58,183)!important}.mdl-color--primary-contrast{background-color:rgb(255,255,255)!important}.mdl-color--primary-dark{background-color:rgb(81,45,168)!important}.mdl-color--accent{background-color:rgb(255,64,129)!important}.mdl-color--accent-contrast{background-color:rgb(255,255,255)!important}.mdl-color-text--primary{color:rgb(103,58,183)!important}.mdl-color-text--primary-contrast{color:rgb(255,255,255)!important}.mdl-color-text--primary-dark{color:rgb(81,45,168)!important}.mdl-color-text--accent{color:rgb(255,64,129)!important}.mdl-color-text--accent-contrast{color:rgb(255,255,255)!important}.mdl-ripple{background:#000;border-radius:50%;height:50px;left:0;opacity:0;pointer-events:none;position:absolute;top:0;-webkit-transform:translate(-50%,-50%);transform:translate(-50%,-50%);width:50px;overflow:hidden}.mdl-ripple.is-animating{transition:transform .3s cubic-bezier(0,0,.2,1),width .3s cubic-bezier(0,0,.2,1),height .3s cubic-bezier(0,0,.2,1),opacity .6s cubic-bezier(0,0,.2,1);transition:transform .3s cubic-bezier(0,0,.2,1),width .3s cubic-bezier(0,0,.2,1),height .3s cubic-bezier(0,0,.2,1),opacity .6s cubic-bezier(0,0,.2,1),-webkit-transform .3s cubic-bezier(0,0,.2,1)}.mdl-ripple.is-visible{opacity:.3}.mdl-animation--default,.mdl-animation--fast-out-slow-in{transition-timing-function:cubic-bezier(.4,0,.2,1)}.mdl-animation--linear-out-slow-in{transition-timing-function:cubic-bezier(0,0,.2,1)}.mdl-animation--fast-out-linear-in{transition-timing-function:cubic-bezier(.4,0,1,1)}.mdl-badge{position:relative;white-space:nowrap;margin-right:24px}.mdl-badge:not([data-badge]){margin-right:auto}.mdl-badge[data-badge]:after{content:attr(data-badge);display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;-webkit-align-content:center;-ms-flex-line-pack:center;align-content:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;position:absolute;top:-11px;right:-24px;font-family:"Roboto","Helvetica","Arial",sans-serif;font-weight:600;font-size:12px;width:22px;height:22px;border-radius:50%;background:rgb(255,64,129);color:rgb(255,255,255)}.mdl-button .mdl-badge[data-badge]:after{top:-10px;right:-5px}.mdl-badge.mdl-badge--no-background[data-badge]:after{color:rgb(255,64,129);background:rgba(255,255,255,.2);box-shadow:0 0 1px gray}.mdl-badge.mdl-badge--overlap{margin-right:10px}.mdl-badge.mdl-badge--overlap:after{right:-10px}.mdl-button{background:0 0;border:none;border-radius:2px;color:#000;position:relative;height:36px;margin:0;min-width:64px;padding:0 16px;display:inline-block;font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:14px;font-weight:500;text-transform:uppercase;letter-spacing:0;overflow:hidden;will-change:box-shadow;transition:box-shadow .2s cubic-bezier(.4,0,1,1),background-color .2s cubic-bezier(.4,0,.2,1),color .2s cubic-bezier(.4,0,.2,1);outline:none;cursor:pointer;text-decoration:none;text-align:center;line-height:36px;vertical-align:middle}.mdl-button::-moz-focus-inner{border:0}.mdl-button:hover{background-color:rgba(158,158,158,.2)}.mdl-button:focus:not(:active){background-color:rgba(0,0,0,.12)}.mdl-button:active{background-color:rgba(158,158,158,.4)}.mdl-button.mdl-button--colored{color:rgb(103,58,183)}.mdl-button.mdl-button--colored:focus:not(:active){background-color:rgba(0,0,0,.12)}input.mdl-button[type="submit"]{-webkit-appearance:none}.mdl-button--raised{background:rgba(158,158,158,.2);box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 3px 1px -2px rgba(0,0,0,.2),0 1px 5px 0 rgba(0,0,0,.12)}.mdl-button--raised:active{box-shadow:0 4px 5px 0 rgba(0,0,0,.14),0 1px 10px 0 rgba(0,0,0,.12),0 2px 4px -1px rgba(0,0,0,.2);background-color:rgba(158,158,158,.4)}.mdl-button--raised:focus:not(:active){box-shadow:0 0 8px rgba(0,0,0,.18),0 8px 16px rgba(0,0,0,.36);background-color:rgba(158,158,158,.4)}.mdl-button--raised.mdl-button--colored{background:rgb(103,58,183);color:rgb(255,255,255)}.mdl-button--raised.mdl-button--colored:hover{background-color:rgb(103,58,183)}.mdl-button--raised.mdl-button--colored:active{background-color:rgb(103,58,183)}.mdl-button--raised.mdl-button--colored:focus:not(:active){background-color:rgb(103,58,183)}.mdl-button--raised.mdl-button--colored .mdl-ripple{background:rgb(255,255,255)}.mdl-button--fab{border-radius:50%;font-size:24px;height:56px;margin:auto;min-width:56px;width:56px;padding:0;overflow:hidden;background:rgba(158,158,158,.2);box-shadow:0 1px 1.5px 0 rgba(0,0,0,.12),0 1px 1px 0 rgba(0,0,0,.24);position:relative;line-height:normal}.mdl-button--fab .material-icons{position:absolute;top:50%;left:50%;-webkit-transform:translate(-12px,-12px);transform:translate(-12px,-12px);line-height:24px;width:24px}.mdl-button--fab.mdl-button--mini-fab{height:40px;min-width:40px;width:40px}.mdl-button--fab .mdl-button__ripple-container{border-radius:50%;-webkit-mask-image:-webkit-radial-gradient(circle,#fff,#000)}.mdl-button--fab:active{box-shadow:0 4px 5px 0 rgba(0,0,0,.14),0 1px 10px 0 rgba(0,0,0,.12),0 2px 4px -1px rgba(0,0,0,.2);background-color:rgba(158,158,158,.4)}.mdl-button--fab:focus:not(:active){box-shadow:0 0 8px rgba(0,0,0,.18),0 8px 16px rgba(0,0,0,.36);background-color:rgba(158,158,158,.4)}.mdl-button--fab.mdl-button--colored{background:rgb(255,64,129);color:rgb(255,255,255)}.mdl-button--fab.mdl-button--colored:hover{background-color:rgb(255,64,129)}.mdl-button--fab.mdl-button--colored:focus:not(:active){background-color:rgb(255,64,129)}.mdl-button--fab.mdl-button--colored:active{background-color:rgb(255,64,129)}.mdl-button--fab.mdl-button--colored .mdl-ripple{background:rgb(255,255,255)}.mdl-button--icon{border-radius:50%;font-size:24px;height:32px;margin-left:0;margin-right:0;min-width:32px;width:32px;padding:0;overflow:hidden;color:inherit;line-height:normal}.mdl-button--icon .material-icons{position:absolute;top:50%;left:50%;-webkit-transform:translate(-12px,-12px);transform:translate(-12px,-12px);line-height:24px;width:24px}.mdl-button--icon.mdl-button--mini-icon{height:24px;min-width:24px;width:24px}.mdl-button--icon.mdl-button--mini-icon .material-icons{top:0;left:0}.mdl-button--icon .mdl-button__ripple-container{border-radius:50%;-webkit-mask-image:-webkit-radial-gradient(circle,#fff,#000)}.mdl-button__ripple-container{display:block;height:100%;left:0;position:absolute;top:0;width:100%;z-index:0;overflow:hidden}.mdl-button[disabled] .mdl-button__ripple-container .mdl-ripple,.mdl-button.mdl-button--disabled .mdl-button__ripple-container .mdl-ripple{background-color:transparent}.mdl-button--primary.mdl-button--primary{color:rgb(103,58,183)}.mdl-button--primary.mdl-button--primary .mdl-ripple{background:rgb(255,255,255)}.mdl-button--primary.mdl-button--primary.mdl-button--raised,.mdl-button--primary.mdl-button--primary.mdl-button--fab{color:rgb(255,255,255);background-color:rgb(103,58,183)}.mdl-button--accent.mdl-button--accent{color:rgb(255,64,129)}.mdl-button--accent.mdl-button--accent .mdl-ripple{background:rgb(255,255,255)}.mdl-button--accent.mdl-button--accent.mdl-button--raised,.mdl-button--accent.mdl-button--accent.mdl-button--fab{color:rgb(255,255,255);background-color:rgb(255,64,129)}.mdl-button[disabled][disabled],.mdl-button.mdl-button--disabled.mdl-button--disabled{color:rgba(0,0,0,.26);cursor:default;background-color:transparent}.mdl-button--fab[disabled][disabled],.mdl-button--fab.mdl-button--disabled.mdl-button--disabled{background-color:rgba(0,0,0,.12);color:rgba(0,0,0,.26)}.mdl-button--raised[disabled][disabled],.mdl-button--raised.mdl-button--disabled.mdl-button--disabled{background-color:rgba(0,0,0,.12);color:rgba(0,0,0,.26);box-shadow:none}.mdl-button--colored[disabled][disabled],.mdl-button--colored.mdl-button--disabled.mdl-button--disabled{color:rgba(0,0,0,.26)}.mdl-button .material-icons{vertical-align:middle}.mdl-card{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;font-size:16px;font-weight:400;min-height:200px;overflow:hidden;width:330px;z-index:1;position:relative;background:#fff;border-radius:2px;box-sizing:border-box}.mdl-card__media{background-color:rgb(255,64,129);background-repeat:repeat;background-position:50% 50%;background-size:cover;background-origin:padding-box;background-attachment:scroll;box-sizing:border-box}.mdl-card__title{-webkit-align-items:center;-ms-flex-align:center;align-items:center;color:#000;display:block;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-justify-content:stretch;-ms-flex-pack:stretch;justify-content:stretch;line-height:normal;padding:16px;-webkit-perspective-origin:165px 56px;perspective-origin:165px 56px;-webkit-transform-origin:165px 56px;transform-origin:165px 56px;box-sizing:border-box}.mdl-card__title.mdl-card--border{border-bottom:1px solid rgba(0,0,0,.1)}.mdl-card__title-text{-webkit-align-self:flex-end;-ms-flex-item-align:end;align-self:flex-end;color:inherit;display:block;display:-webkit-flex;display:-ms-flexbox;display:flex;font-size:24px;font-weight:300;line-height:normal;overflow:hidden;-webkit-transform-origin:149px 48px;transform-origin:149px 48px;margin:0}.mdl-card__subtitle-text{font-size:14px;color:rgba(0,0,0,.54);margin:0}.mdl-card__supporting-text{color:rgba(0,0,0,.54);font-size:1rem;line-height:18px;overflow:hidden;padding:16px;width:90%}.mdl-card__supporting-text.mdl-card--border{border-bottom:1px solid rgba(0,0,0,.1)}.mdl-card__actions{font-size:16px;line-height:normal;width:100%;background-color:transparent;padding:8px;box-sizing:border-box}.mdl-card__actions.mdl-card--border{border-top:1px solid rgba(0,0,0,.1)}.mdl-card--expand{-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1}.mdl-card__menu{position:absolute;right:16px;top:16px}.mdl-checkbox{position:relative;z-index:1;vertical-align:middle;display:inline-block;box-sizing:border-box;width:100%;height:24px;margin:0;padding:0}.mdl-checkbox.is-upgraded{padding-left:24px}.mdl-checkbox__input{line-height:24px}.mdl-checkbox.is-upgraded .mdl-checkbox__input{position:absolute;width:0;height:0;margin:0;padding:0;opacity:0;-ms-appearance:none;-moz-appearance:none;-webkit-appearance:none;appearance:none;border:none}.mdl-checkbox__box-outline{position:absolute;top:3px;left:0;display:inline-block;box-sizing:border-box;width:16px;height:16px;margin:0;cursor:pointer;overflow:hidden;border:2px solid rgba(0,0,0,.54);border-radius:2px;z-index:2}.mdl-checkbox.is-checked .mdl-checkbox__box-outline{border:2px solid rgb(103,58,183)}fieldset[disabled] .mdl-checkbox .mdl-checkbox__box-outline,.mdl-checkbox.is-disabled .mdl-checkbox__box-outline{border:2px solid rgba(0,0,0,.26);cursor:auto}.mdl-checkbox__focus-helper{position:absolute;top:3px;left:0;display:inline-block;box-sizing:border-box;width:16px;height:16px;border-radius:50%;background-color:transparent}.mdl-checkbox.is-focused .mdl-checkbox__focus-helper{box-shadow:0 0 0 8px rgba(0,0,0,.1);background-color:rgba(0,0,0,.1)}.mdl-checkbox.is-focused.is-checked .mdl-checkbox__focus-helper{box-shadow:0 0 0 8px rgba(103,58,183,.26);background-color:rgba(103,58,183,.26)}.mdl-checkbox__tick-outline{position:absolute;top:0;left:0;height:100%;width:100%;-webkit-mask:url("data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgeG1sbnM6ZGM9Imh0dHA6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvIgogICB4bWxuczpjYz0iaHR0cDovL2NyZWF0aXZlY29tbW9ucy5vcmcvbnMjIgogICB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIKICAgdmVyc2lvbj0iMS4xIgogICB2aWV3Qm94PSIwIDAgMSAxIgogICBwcmVzZXJ2ZUFzcGVjdFJhdGlvPSJ4TWluWU1pbiBtZWV0Ij4KICA8ZGVmcz4KICAgIDxjbGlwUGF0aCBpZD0iY2xpcCI+CiAgICAgIDxwYXRoCiAgICAgICAgIGQ9Ik0gMCwwIDAsMSAxLDEgMSwwIDAsMCB6IE0gMC44NTM0Mzc1LDAuMTY3MTg3NSAwLjk1OTY4NzUsMC4yNzMxMjUgMC40MjkzNzUsMC44MDM0Mzc1IDAuMzIzMTI1LDAuOTA5Njg3NSAwLjIxNzE4NzUsMC44MDM0Mzc1IDAuMDQwMzEyNSwwLjYyNjg3NSAwLjE0NjU2MjUsMC41MjA2MjUgMC4zMjMxMjUsMC42OTc1IDAuODUzNDM3NSwwLjE2NzE4NzUgeiIKICAgICAgICAgc3R5bGU9ImZpbGw6I2ZmZmZmZjtmaWxsLW9wYWNpdHk6MTtzdHJva2U6bm9uZSIgLz4KICAgIDwvY2xpcFBhdGg+CiAgICA8bWFzayBpZD0ibWFzayIgbWFza1VuaXRzPSJvYmplY3RCb3VuZGluZ0JveCIgbWFza0NvbnRlbnRVbml0cz0ib2JqZWN0Qm91bmRpbmdCb3giPgogICAgICA8cGF0aAogICAgICAgICBkPSJNIDAsMCAwLDEgMSwxIDEsMCAwLDAgeiBNIDAuODUzNDM3NSwwLjE2NzE4NzUgMC45NTk2ODc1LDAuMjczMTI1IDAuNDI5Mzc1LDAuODAzNDM3NSAwLjMyMzEyNSwwLjkwOTY4NzUgMC4yMTcxODc1LDAuODAzNDM3NSAwLjA0MDMxMjUsMC42MjY4NzUgMC4xNDY1NjI1LDAuNTIwNjI1IDAuMzIzMTI1LDAuNjk3NSAwLjg1MzQzNzUsMC4xNjcxODc1IHoiCiAgICAgICAgIHN0eWxlPSJmaWxsOiNmZmZmZmY7ZmlsbC1vcGFjaXR5OjE7c3Ryb2tlOm5vbmUiIC8+CiAgICA8L21hc2s+CiAgPC9kZWZzPgogIDxyZWN0CiAgICAgd2lkdGg9IjEiCiAgICAgaGVpZ2h0PSIxIgogICAgIHg9IjAiCiAgICAgeT0iMCIKICAgICBjbGlwLXBhdGg9InVybCgjY2xpcCkiCiAgICAgc3R5bGU9ImZpbGw6IzAwMDAwMDtmaWxsLW9wYWNpdHk6MTtzdHJva2U6bm9uZSIgLz4KPC9zdmc+Cg==");mask:url("data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgeG1sbnM6ZGM9Imh0dHA6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvIgogICB4bWxuczpjYz0iaHR0cDovL2NyZWF0aXZlY29tbW9ucy5vcmcvbnMjIgogICB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIKICAgdmVyc2lvbj0iMS4xIgogICB2aWV3Qm94PSIwIDAgMSAxIgogICBwcmVzZXJ2ZUFzcGVjdFJhdGlvPSJ4TWluWU1pbiBtZWV0Ij4KICA8ZGVmcz4KICAgIDxjbGlwUGF0aCBpZD0iY2xpcCI+CiAgICAgIDxwYXRoCiAgICAgICAgIGQ9Ik0gMCwwIDAsMSAxLDEgMSwwIDAsMCB6IE0gMC44NTM0Mzc1LDAuMTY3MTg3NSAwLjk1OTY4NzUsMC4yNzMxMjUgMC40MjkzNzUsMC44MDM0Mzc1IDAuMzIzMTI1LDAuOTA5Njg3NSAwLjIxNzE4NzUsMC44MDM0Mzc1IDAuMDQwMzEyNSwwLjYyNjg3NSAwLjE0NjU2MjUsMC41MjA2MjUgMC4zMjMxMjUsMC42OTc1IDAuODUzNDM3NSwwLjE2NzE4NzUgeiIKICAgICAgICAgc3R5bGU9ImZpbGw6I2ZmZmZmZjtmaWxsLW9wYWNpdHk6MTtzdHJva2U6bm9uZSIgLz4KICAgIDwvY2xpcFBhdGg+CiAgICA8bWFzayBpZD0ibWFzayIgbWFza1VuaXRzPSJvYmplY3RCb3VuZGluZ0JveCIgbWFza0NvbnRlbnRVbml0cz0ib2JqZWN0Qm91bmRpbmdCb3giPgogICAgICA8cGF0aAogICAgICAgICBkPSJNIDAsMCAwLDEgMSwxIDEsMCAwLDAgeiBNIDAuODUzNDM3NSwwLjE2NzE4NzUgMC45NTk2ODc1LDAuMjczMTI1IDAuNDI5Mzc1LDAuODAzNDM3NSAwLjMyMzEyNSwwLjkwOTY4NzUgMC4yMTcxODc1LDAuODAzNDM3NSAwLjA0MDMxMjUsMC42MjY4NzUgMC4xNDY1NjI1LDAuNTIwNjI1IDAuMzIzMTI1LDAuNjk3NSAwLjg1MzQzNzUsMC4xNjcxODc1IHoiCiAgICAgICAgIHN0eWxlPSJmaWxsOiNmZmZmZmY7ZmlsbC1vcGFjaXR5OjE7c3Ryb2tlOm5vbmUiIC8+CiAgICA8L21hc2s+CiAgPC9kZWZzPgogIDxyZWN0CiAgICAgd2lkdGg9IjEiCiAgICAgaGVpZ2h0PSIxIgogICAgIHg9IjAiCiAgICAgeT0iMCIKICAgICBjbGlwLXBhdGg9InVybCgjY2xpcCkiCiAgICAgc3R5bGU9ImZpbGw6IzAwMDAwMDtmaWxsLW9wYWNpdHk6MTtzdHJva2U6bm9uZSIgLz4KPC9zdmc+Cg==");background:0 0;transition-duration:.28s;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-property:background}.mdl-checkbox.is-checked .mdl-checkbox__tick-outline{background:rgb(103,58,183)url("data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgeG1sbnM6ZGM9Imh0dHA6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvIgogICB4bWxuczpjYz0iaHR0cDovL2NyZWF0aXZlY29tbW9ucy5vcmcvbnMjIgogICB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIKICAgdmVyc2lvbj0iMS4xIgogICB2aWV3Qm94PSIwIDAgMSAxIgogICBwcmVzZXJ2ZUFzcGVjdFJhdGlvPSJ4TWluWU1pbiBtZWV0Ij4KICA8cGF0aAogICAgIGQ9Ik0gMC4wNDAzODA1OSwwLjYyNjc3NjcgMC4xNDY0NDY2MSwwLjUyMDcxMDY4IDAuNDI5Mjg5MzIsMC44MDM1NTMzOSAwLjMyMzIyMzMsMC45MDk2MTk0MSB6IE0gMC4yMTcxNTcyOSwwLjgwMzU1MzM5IDAuODUzNTUzMzksMC4xNjcxNTcyOSAwLjk1OTYxOTQxLDAuMjczMjIzMyAwLjMyMzIyMzMsMC45MDk2MTk0MSB6IgogICAgIGlkPSJyZWN0Mzc4MCIKICAgICBzdHlsZT0iZmlsbDojZmZmZmZmO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTpub25lIiAvPgo8L3N2Zz4K")}fieldset[disabled] .mdl-checkbox.is-checked .mdl-checkbox__tick-outline,.mdl-checkbox.is-checked.is-disabled .mdl-checkbox__tick-outline{background:rgba(0,0,0,.26)url("data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgeG1sbnM6ZGM9Imh0dHA6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvIgogICB4bWxuczpjYz0iaHR0cDovL2NyZWF0aXZlY29tbW9ucy5vcmcvbnMjIgogICB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIKICAgdmVyc2lvbj0iMS4xIgogICB2aWV3Qm94PSIwIDAgMSAxIgogICBwcmVzZXJ2ZUFzcGVjdFJhdGlvPSJ4TWluWU1pbiBtZWV0Ij4KICA8cGF0aAogICAgIGQ9Ik0gMC4wNDAzODA1OSwwLjYyNjc3NjcgMC4xNDY0NDY2MSwwLjUyMDcxMDY4IDAuNDI5Mjg5MzIsMC44MDM1NTMzOSAwLjMyMzIyMzMsMC45MDk2MTk0MSB6IE0gMC4yMTcxNTcyOSwwLjgwMzU1MzM5IDAuODUzNTUzMzksMC4xNjcxNTcyOSAwLjk1OTYxOTQxLDAuMjczMjIzMyAwLjMyMzIyMzMsMC45MDk2MTk0MSB6IgogICAgIGlkPSJyZWN0Mzc4MCIKICAgICBzdHlsZT0iZmlsbDojZmZmZmZmO2ZpbGwtb3BhY2l0eToxO3N0cm9rZTpub25lIiAvPgo8L3N2Zz4K")}.mdl-checkbox__label{position:relative;cursor:pointer;font-size:16px;line-height:24px;margin:0}fieldset[disabled] .mdl-checkbox .mdl-checkbox__label,.mdl-checkbox.is-disabled .mdl-checkbox__label{color:rgba(0,0,0,.26);cursor:auto}.mdl-checkbox__ripple-container{position:absolute;z-index:2;top:-6px;left:-10px;box-sizing:border-box;width:36px;height:36px;border-radius:50%;cursor:pointer;overflow:hidden;-webkit-mask-image:-webkit-radial-gradient(circle,#fff,#000)}.mdl-checkbox__ripple-container .mdl-ripple{background:rgb(103,58,183)}fieldset[disabled] .mdl-checkbox .mdl-checkbox__ripple-container,.mdl-checkbox.is-disabled .mdl-checkbox__ripple-container{cursor:auto}fieldset[disabled] .mdl-checkbox .mdl-checkbox__ripple-container .mdl-ripple,.mdl-checkbox.is-disabled .mdl-checkbox__ripple-container .mdl-ripple{background:0 0}.mdl-chip{height:32px;font-family:"Roboto","Helvetica","Arial",sans-serif;line-height:32px;padding:0 12px;border:0;border-radius:16px;background-color:#dedede;display:inline-block;color:rgba(0,0,0,.87);margin:2px 0;font-size:0;white-space:nowrap}.mdl-chip__text{font-size:13px;vertical-align:middle;display:inline-block}.mdl-chip__action{height:24px;width:24px;background:0 0;opacity:.54;cursor:pointer;padding:0;margin:0 0 0 4px;font-size:13px;text-decoration:none;color:rgba(0,0,0,.87);border:none;outline:none}.mdl-chip__action,.mdl-chip__contact{display:inline-block;vertical-align:middle;overflow:hidden;text-align:center}.mdl-chip__contact{height:32px;width:32px;border-radius:16px;margin-right:8px;font-size:18px;line-height:32px}.mdl-chip:focus{outline:0;box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 3px 1px -2px rgba(0,0,0,.2),0 1px 5px 0 rgba(0,0,0,.12)}.mdl-chip:active{background-color:#d6d6d6}.mdl-chip--deletable{padding-right:4px}.mdl-chip--contact{padding-left:0}.mdl-data-table{position:relative;border:1px solid rgba(0,0,0,.12);border-collapse:collapse;white-space:nowrap;font-size:13px;background-color:#fff}.mdl-data-table thead{padding-bottom:3px}.mdl-data-table thead .mdl-data-table__select{margin-top:0}.mdl-data-table tbody tr{position:relative;height:48px;transition-duration:.28s;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-property:background-color}.mdl-data-table tbody tr.is-selected{background-color:#e0e0e0}.mdl-data-table tbody tr:hover{background-color:#eee}.mdl-data-table td{text-align:right}.mdl-data-table th{padding:0 18px 12px 18px;text-align:right}.mdl-data-table td:first-of-type,.mdl-data-table th:first-of-type{padding-left:24px}.mdl-data-table td:last-of-type,.mdl-data-table th:last-of-type{padding-right:24px}.mdl-data-table td{position:relative;height:48px;border-top:1px solid rgba(0,0,0,.12);border-bottom:1px solid rgba(0,0,0,.12);padding:12px 18px;box-sizing:border-box}.mdl-data-table td,.mdl-data-table td .mdl-data-table__select{vertical-align:middle}.mdl-data-table th{position:relative;vertical-align:bottom;text-overflow:ellipsis;font-weight:700;line-height:24px;letter-spacing:0;height:48px;font-size:12px;color:rgba(0,0,0,.54);padding-bottom:8px;box-sizing:border-box}.mdl-data-table th.mdl-data-table__header--sorted-ascending,.mdl-data-table th.mdl-data-table__header--sorted-descending{color:rgba(0,0,0,.87)}.mdl-data-table th.mdl-data-table__header--sorted-ascending:before,.mdl-data-table th.mdl-data-table__header--sorted-descending:before{font-family:'Material Icons';font-weight:400;font-style:normal;line-height:1;letter-spacing:normal;text-transform:none;display:inline-block;word-wrap:normal;-moz-font-feature-settings:'liga';font-feature-settings:'liga';-webkit-font-feature-settings:'liga';-webkit-font-smoothing:antialiased;font-size:16px;content:"\e5d8";margin-right:5px;vertical-align:sub}.mdl-data-table th.mdl-data-table__header--sorted-ascending:hover,.mdl-data-table th.mdl-data-table__header--sorted-descending:hover{cursor:pointer}.mdl-data-table th.mdl-data-table__header--sorted-ascending:hover:before,.mdl-data-table th.mdl-data-table__header--sorted-descending:hover:before{color:rgba(0,0,0,.26)}.mdl-data-table th.mdl-data-table__header--sorted-descending:before{content:"\e5db"}.mdl-data-table__select{width:16px}.mdl-data-table__cell--non-numeric.mdl-data-table__cell--non-numeric{text-align:left}.mdl-dialog{border:none;box-shadow:0 9px 46px 8px rgba(0,0,0,.14),0 11px 15px -7px rgba(0,0,0,.12),0 24px 38px 3px rgba(0,0,0,.2);width:280px}.mdl-dialog__title{padding:24px 24px 0;margin:0;font-size:2.5rem}.mdl-dialog__actions{padding:8px 8px 8px 24px;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:row-reverse;-ms-flex-direction:row-reverse;flex-direction:row-reverse;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap}.mdl-dialog__actions>*{margin-right:8px;height:36px}.mdl-dialog__actions>*:first-child{margin-right:0}.mdl-dialog__actions--full-width{padding:0 0 8px}.mdl-dialog__actions--full-width>*{height:48px;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;padding-right:16px;margin-right:0;text-align:right}.mdl-dialog__content{padding:20px 24px 24px;color:rgba(0,0,0,.54)}.mdl-mega-footer{padding:16px 40px;color:#9e9e9e;background-color:#424242}.mdl-mega-footer--top-section:after,.mdl-mega-footer--middle-section:after,.mdl-mega-footer--bottom-section:after,.mdl-mega-footer__top-section:after,.mdl-mega-footer__middle-section:after,.mdl-mega-footer__bottom-section:after{content:'';display:block;clear:both}.mdl-mega-footer--left-section,.mdl-mega-footer__left-section,.mdl-mega-footer--right-section,.mdl-mega-footer__right-section{margin-bottom:16px}.mdl-mega-footer--right-section a,.mdl-mega-footer__right-section a{display:block;margin-bottom:16px;color:inherit;text-decoration:none}@media screen and (min-width:760px){.mdl-mega-footer--left-section,.mdl-mega-footer__left-section{float:left}.mdl-mega-footer--right-section,.mdl-mega-footer__right-section{float:right}.mdl-mega-footer--right-section a,.mdl-mega-footer__right-section a{display:inline-block;margin-left:16px;line-height:36px;vertical-align:middle}}.mdl-mega-footer--social-btn,.mdl-mega-footer__social-btn{width:36px;height:36px;padding:0;margin:0;background-color:#9e9e9e;border:none}.mdl-mega-footer--drop-down-section,.mdl-mega-footer__drop-down-section{display:block;position:relative}@media screen and (min-width:760px){.mdl-mega-footer--drop-down-section,.mdl-mega-footer__drop-down-section{width:33%}.mdl-mega-footer--drop-down-section:nth-child(1),.mdl-mega-footer--drop-down-section:nth-child(2),.mdl-mega-footer__drop-down-section:nth-child(1),.mdl-mega-footer__drop-down-section:nth-child(2){float:left}.mdl-mega-footer--drop-down-section:nth-child(3),.mdl-mega-footer__drop-down-section:nth-child(3){float:right}.mdl-mega-footer--drop-down-section:nth-child(3):after,.mdl-mega-footer__drop-down-section:nth-child(3):after{clear:right}.mdl-mega-footer--drop-down-section:nth-child(4),.mdl-mega-footer__drop-down-section:nth-child(4){clear:right;float:right}.mdl-mega-footer--middle-section:after,.mdl-mega-footer__middle-section:after{content:'';display:block;clear:both}.mdl-mega-footer--bottom-section,.mdl-mega-footer__bottom-section{padding-top:0}}@media screen and (min-width:1024px){.mdl-mega-footer--drop-down-section,.mdl-mega-footer--drop-down-section:nth-child(3),.mdl-mega-footer--drop-down-section:nth-child(4),.mdl-mega-footer__drop-down-section,.mdl-mega-footer__drop-down-section:nth-child(3),.mdl-mega-footer__drop-down-section:nth-child(4){width:24%;float:left}}.mdl-mega-footer--heading-checkbox,.mdl-mega-footer__heading-checkbox{position:absolute;width:100%;height:55.8px;padding:32px;margin:-16px 0 0;cursor:pointer;z-index:1;opacity:0}.mdl-mega-footer--heading-checkbox+.mdl-mega-footer--heading:after,.mdl-mega-footer--heading-checkbox+.mdl-mega-footer__heading:after,.mdl-mega-footer__heading-checkbox+.mdl-mega-footer--heading:after,.mdl-mega-footer__heading-checkbox+.mdl-mega-footer__heading:after{font-family:'Material Icons';content:'\E5CE'}.mdl-mega-footer--heading-checkbox:checked~.mdl-mega-footer--link-list,.mdl-mega-footer--heading-checkbox:checked~.mdl-mega-footer__link-list,.mdl-mega-footer--heading-checkbox:checked+.mdl-mega-footer--heading+.mdl-mega-footer--link-list,.mdl-mega-footer--heading-checkbox:checked+.mdl-mega-footer__heading+.mdl-mega-footer__link-list,.mdl-mega-footer__heading-checkbox:checked~.mdl-mega-footer--link-list,.mdl-mega-footer__heading-checkbox:checked~.mdl-mega-footer__link-list,.mdl-mega-footer__heading-checkbox:checked+.mdl-mega-footer--heading+.mdl-mega-footer--link-list,.mdl-mega-footer__heading-checkbox:checked+.mdl-mega-footer__heading+.mdl-mega-footer__link-list{display:none}.mdl-mega-footer--heading-checkbox:checked+.mdl-mega-footer--heading:after,.mdl-mega-footer--heading-checkbox:checked+.mdl-mega-footer__heading:after,.mdl-mega-footer__heading-checkbox:checked+.mdl-mega-footer--heading:after,.mdl-mega-footer__heading-checkbox:checked+.mdl-mega-footer__heading:after{font-family:'Material Icons';content:'\E5CF'}.mdl-mega-footer--heading,.mdl-mega-footer__heading{position:relative;width:100%;padding-right:39.8px;margin-bottom:16px;box-sizing:border-box;font-size:14px;line-height:23.8px;font-weight:500;white-space:nowrap;text-overflow:ellipsis;overflow:hidden;color:#e0e0e0}.mdl-mega-footer--heading:after,.mdl-mega-footer__heading:after{content:'';position:absolute;top:0;right:0;display:block;width:23.8px;height:23.8px;background-size:cover}.mdl-mega-footer--link-list,.mdl-mega-footer__link-list{list-style:none;padding:0;margin:0 0 32px}.mdl-mega-footer--link-list:after,.mdl-mega-footer__link-list:after{clear:both;display:block;content:''}.mdl-mega-footer--link-list li,.mdl-mega-footer__link-list li{font-size:14px;font-weight:400;letter-spacing:0;line-height:20px}.mdl-mega-footer--link-list a,.mdl-mega-footer__link-list a{color:inherit;text-decoration:none;white-space:nowrap}@media screen and (min-width:760px){.mdl-mega-footer--heading-checkbox,.mdl-mega-footer__heading-checkbox{display:none}.mdl-mega-footer--heading-checkbox+.mdl-mega-footer--heading:after,.mdl-mega-footer--heading-checkbox+.mdl-mega-footer__heading:after,.mdl-mega-footer__heading-checkbox+.mdl-mega-footer--heading:after,.mdl-mega-footer__heading-checkbox+.mdl-mega-footer__heading:after{content:''}.mdl-mega-footer--heading-checkbox:checked~.mdl-mega-footer--link-list,.mdl-mega-footer--heading-checkbox:checked~.mdl-mega-footer__link-list,.mdl-mega-footer--heading-checkbox:checked+.mdl-mega-footer__heading+.mdl-mega-footer__link-list,.mdl-mega-footer--heading-checkbox:checked+.mdl-mega-footer--heading+.mdl-mega-footer--link-list,.mdl-mega-footer__heading-checkbox:checked~.mdl-mega-footer--link-list,.mdl-mega-footer__heading-checkbox:checked~.mdl-mega-footer__link-list,.mdl-mega-footer__heading-checkbox:checked+.mdl-mega-footer__heading+.mdl-mega-footer__link-list,.mdl-mega-footer__heading-checkbox:checked+.mdl-mega-footer--heading+.mdl-mega-footer--link-list{display:block}.mdl-mega-footer--heading-checkbox:checked+.mdl-mega-footer--heading:after,.mdl-mega-footer--heading-checkbox:checked+.mdl-mega-footer__heading:after,.mdl-mega-footer__heading-checkbox:checked+.mdl-mega-footer--heading:after,.mdl-mega-footer__heading-checkbox:checked+.mdl-mega-footer__heading:after{content:''}}.mdl-mega-footer--bottom-section,.mdl-mega-footer__bottom-section{padding-top:16px;margin-bottom:16px}.mdl-logo{margin-bottom:16px;color:#fff}.mdl-mega-footer--bottom-section .mdl-mega-footer--link-list li,.mdl-mega-footer__bottom-section .mdl-mega-footer__link-list li{float:left;margin-bottom:0;margin-right:16px}@media screen and (min-width:760px){.mdl-logo{float:left;margin-bottom:0;margin-right:16px}}.mdl-mini-footer{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-flow:row wrap;-ms-flex-flow:row wrap;flex-flow:row wrap;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;padding:32px 16px;color:#9e9e9e;background-color:#424242}.mdl-mini-footer:after{content:'';display:block}.mdl-mini-footer .mdl-logo{line-height:36px}.mdl-mini-footer--link-list,.mdl-mini-footer__link-list{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;list-style:none;margin:0;padding:0}.mdl-mini-footer--link-list li,.mdl-mini-footer__link-list li{margin-bottom:0;margin-right:16px}@media screen and (min-width:760px){.mdl-mini-footer--link-list li,.mdl-mini-footer__link-list li{line-height:36px}}.mdl-mini-footer--link-list a,.mdl-mini-footer__link-list a{color:inherit;text-decoration:none;white-space:nowrap}.mdl-mini-footer--left-section,.mdl-mini-footer__left-section{display:inline-block;-webkit-order:0;-ms-flex-order:0;order:0}.mdl-mini-footer--right-section,.mdl-mini-footer__right-section{display:inline-block;-webkit-order:1;-ms-flex-order:1;order:1}.mdl-mini-footer--social-btn,.mdl-mini-footer__social-btn{width:36px;height:36px;padding:0;margin:0;background-color:#9e9e9e;border:none}.mdl-icon-toggle{position:relative;z-index:1;vertical-align:middle;display:inline-block;height:32px;margin:0;padding:0}.mdl-icon-toggle__input{line-height:32px}.mdl-icon-toggle.is-upgraded .mdl-icon-toggle__input{position:absolute;width:0;height:0;margin:0;padding:0;opacity:0;-ms-appearance:none;-moz-appearance:none;-webkit-appearance:none;appearance:none;border:none}.mdl-icon-toggle__label{display:inline-block;position:relative;cursor:pointer;height:32px;width:32px;min-width:32px;color:#616161;border-radius:50%;padding:0;margin-left:0;margin-right:0;text-align:center;background-color:transparent;will-change:background-color;transition:background-color .2s cubic-bezier(.4,0,.2,1),color .2s cubic-bezier(.4,0,.2,1)}.mdl-icon-toggle__label.material-icons{line-height:32px;font-size:24px}.mdl-icon-toggle.is-checked .mdl-icon-toggle__label{color:rgb(103,58,183)}.mdl-icon-toggle.is-disabled .mdl-icon-toggle__label{color:rgba(0,0,0,.26);cursor:auto;transition:none}.mdl-icon-toggle.is-focused .mdl-icon-toggle__label{background-color:rgba(0,0,0,.12)}.mdl-icon-toggle.is-focused.is-checked .mdl-icon-toggle__label{background-color:rgba(103,58,183,.26)}.mdl-icon-toggle__ripple-container{position:absolute;z-index:2;top:-2px;left:-2px;box-sizing:border-box;width:36px;height:36px;border-radius:50%;cursor:pointer;overflow:hidden;-webkit-mask-image:-webkit-radial-gradient(circle,#fff,#000)}.mdl-icon-toggle__ripple-container .mdl-ripple{background:#616161}.mdl-icon-toggle.is-disabled .mdl-icon-toggle__ripple-container{cursor:auto}.mdl-icon-toggle.is-disabled .mdl-icon-toggle__ripple-container .mdl-ripple{background:0 0}.mdl-list{display:block;padding:8px 0;list-style:none}.mdl-list__item{font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:16px;font-weight:400;letter-spacing:.04em;line-height:1;min-height:48px;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row;-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap;padding:16px;cursor:default;color:rgba(0,0,0,.87);overflow:hidden}.mdl-list__item,.mdl-list__item .mdl-list__item-primary-content{box-sizing:border-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.mdl-list__item .mdl-list__item-primary-content{-webkit-order:0;-ms-flex-order:0;order:0;-webkit-flex-grow:2;-ms-flex-positive:2;flex-grow:2;text-decoration:none}.mdl-list__item .mdl-list__item-primary-content .mdl-list__item-icon{margin-right:32px}.mdl-list__item .mdl-list__item-primary-content .mdl-list__item-avatar{margin-right:16px}.mdl-list__item .mdl-list__item-secondary-content{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-flow:column;-ms-flex-flow:column;flex-flow:column;-webkit-align-items:flex-end;-ms-flex-align:end;align-items:flex-end;margin-left:16px}.mdl-list__item .mdl-list__item-secondary-content .mdl-list__item-secondary-action label{display:inline}.mdl-list__item .mdl-list__item-secondary-content .mdl-list__item-secondary-info{font-size:12px;font-weight:400;line-height:1;letter-spacing:0;color:rgba(0,0,0,.54)}.mdl-list__item .mdl-list__item-secondary-content .mdl-list__item-sub-header{padding:0 0 0 16px}.mdl-list__item-icon,.mdl-list__item-icon.material-icons{height:24px;width:24px;font-size:24px;box-sizing:border-box;color:#757575}.mdl-list__item-avatar,.mdl-list__item-avatar.material-icons{height:40px;width:40px;box-sizing:border-box;border-radius:50%;background-color:#757575;font-size:40px;color:#fff}.mdl-list__item--two-line{height:72px}.mdl-list__item--two-line .mdl-list__item-primary-content{height:36px;line-height:20px;display:block}.mdl-list__item--two-line .mdl-list__item-primary-content .mdl-list__item-avatar{float:left}.mdl-list__item--two-line .mdl-list__item-primary-content .mdl-list__item-icon{float:left;margin-top:6px}.mdl-list__item--two-line .mdl-list__item-primary-content .mdl-list__item-secondary-content{height:36px}.mdl-list__item--two-line .mdl-list__item-primary-content .mdl-list__item-sub-title{font-size:14px;font-weight:400;letter-spacing:0;line-height:18px;color:rgba(0,0,0,.54);display:block;padding:0}.mdl-list__item--three-line{height:88px}.mdl-list__item--three-line .mdl-list__item-primary-content{height:52px;line-height:20px;display:block}.mdl-list__item--three-line .mdl-list__item-primary-content .mdl-list__item-avatar,.mdl-list__item--three-line .mdl-list__item-primary-content .mdl-list__item-icon{float:left}.mdl-list__item--three-line .mdl-list__item-secondary-content{height:52px}.mdl-list__item--three-line .mdl-list__item-text-body{font-size:14px;font-weight:400;letter-spacing:0;line-height:18px;height:52px;color:rgba(0,0,0,.54);display:block;padding:0}.mdl-menu__container{display:block;margin:0;padding:0;border:none;position:absolute;overflow:visible;height:0;width:0;visibility:hidden;z-index:-1}.mdl-menu__container.is-visible,.mdl-menu__container.is-animating{z-index:999;visibility:visible}.mdl-menu__outline{display:block;background:#fff;margin:0;padding:0;border:none;border-radius:2px;position:absolute;top:0;left:0;overflow:hidden;opacity:0;-webkit-transform:scale(0);transform:scale(0);-webkit-transform-origin:0 0;transform-origin:0 0;box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 3px 1px -2px rgba(0,0,0,.2),0 1px 5px 0 rgba(0,0,0,.12);will-change:transform;transition:transform .3s cubic-bezier(.4,0,.2,1),opacity .2s cubic-bezier(.4,0,.2,1);transition:transform .3s cubic-bezier(.4,0,.2,1),opacity .2s cubic-bezier(.4,0,.2,1),-webkit-transform .3s cubic-bezier(.4,0,.2,1);z-index:-1}.mdl-menu__container.is-visible .mdl-menu__outline{opacity:1;-webkit-transform:scale(1);transform:scale(1);z-index:999}.mdl-menu__outline.mdl-menu--bottom-right{-webkit-transform-origin:100% 0;transform-origin:100% 0}.mdl-menu__outline.mdl-menu--top-left{-webkit-transform-origin:0 100%;transform-origin:0 100%}.mdl-menu__outline.mdl-menu--top-right{-webkit-transform-origin:100% 100%;transform-origin:100% 100%}.mdl-menu{position:absolute;list-style:none;top:0;left:0;height:auto;width:auto;min-width:124px;padding:8px 0;margin:0;opacity:0;clip:rect(0 0 0 0);z-index:-1}.mdl-menu__container.is-visible .mdl-menu{opacity:1;z-index:999}.mdl-menu.is-animating{transition:opacity .2s cubic-bezier(.4,0,.2,1),clip .3s cubic-bezier(.4,0,.2,1)}.mdl-menu.mdl-menu--bottom-right{left:auto;right:0}.mdl-menu.mdl-menu--top-left{top:auto;bottom:0}.mdl-menu.mdl-menu--top-right{top:auto;left:auto;bottom:0;right:0}.mdl-menu.mdl-menu--unaligned{top:auto;left:auto}.mdl-menu__item{display:block;border:none;color:rgba(0,0,0,.87);background-color:transparent;text-align:left;margin:0;padding:0 16px;outline-color:#bdbdbd;position:relative;overflow:hidden;font-size:14px;font-weight:400;letter-spacing:0;text-decoration:none;cursor:pointer;height:48px;line-height:48px;white-space:nowrap;opacity:0;transition:opacity .2s cubic-bezier(.4,0,.2,1);-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.mdl-menu__container.is-visible .mdl-menu__item{opacity:1}.mdl-menu__item::-moz-focus-inner{border:0}.mdl-menu__item--full-bleed-divider{border-bottom:1px solid rgba(0,0,0,.12)}.mdl-menu__item[disabled],.mdl-menu__item[data-mdl-disabled]{color:#bdbdbd;background-color:transparent;cursor:auto}.mdl-menu__item[disabled]:hover,.mdl-menu__item[data-mdl-disabled]:hover{background-color:transparent}.mdl-menu__item[disabled]:focus,.mdl-menu__item[data-mdl-disabled]:focus{background-color:transparent}.mdl-menu__item[disabled] .mdl-ripple,.mdl-menu__item[data-mdl-disabled] .mdl-ripple{background:0 0}.mdl-menu__item:hover{background-color:#eee}.mdl-menu__item:focus{outline:none;background-color:#eee}.mdl-menu__item:active{background-color:#e0e0e0}.mdl-menu__item--ripple-container{display:block;height:100%;left:0;position:absolute;top:0;width:100%;z-index:0;overflow:hidden}.mdl-progress{display:block;position:relative;height:4px;width:500px;max-width:100%}.mdl-progress>.bar{display:block;position:absolute;top:0;bottom:0;width:0%;transition:width .2s cubic-bezier(.4,0,.2,1)}.mdl-progress>.progressbar{background-color:rgb(103,58,183);z-index:1;left:0}.mdl-progress>.bufferbar{background-image:linear-gradient(to right,rgba(255,255,255,.7),rgba(255,255,255,.7)),linear-gradient(to right,rgb(103,58,183),rgb(103,58,183));z-index:0;left:0}.mdl-progress>.auxbar{right:0}@supports (-webkit-appearance:none){.mdl-progress:not(.mdl-progress--indeterminate):not(.mdl-progress--indeterminate)>.auxbar,.mdl-progress:not(.mdl-progress__indeterminate):not(.mdl-progress__indeterminate)>.auxbar{background-image:linear-gradient(to right,rgba(255,255,255,.7),rgba(255,255,255,.7)),linear-gradient(to right,rgb(103,58,183),rgb(103,58,183));-webkit-mask:url("data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIj8+Cjxzdmcgd2lkdGg9IjEyIiBoZWlnaHQ9IjQiIHZpZXdQb3J0PSIwIDAgMTIgNCIgdmVyc2lvbj0iMS4xIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciPgogIDxlbGxpcHNlIGN4PSIyIiBjeT0iMiIgcng9IjIiIHJ5PSIyIj4KICAgIDxhbmltYXRlIGF0dHJpYnV0ZU5hbWU9ImN4IiBmcm9tPSIyIiB0bz0iLTEwIiBkdXI9IjAuNnMiIHJlcGVhdENvdW50PSJpbmRlZmluaXRlIiAvPgogIDwvZWxsaXBzZT4KICA8ZWxsaXBzZSBjeD0iMTQiIGN5PSIyIiByeD0iMiIgcnk9IjIiIGNsYXNzPSJsb2FkZXIiPgogICAgPGFuaW1hdGUgYXR0cmlidXRlTmFtZT0iY3giIGZyb209IjE0IiB0bz0iMiIgZHVyPSIwLjZzIiByZXBlYXRDb3VudD0iaW5kZWZpbml0ZSIgLz4KICA8L2VsbGlwc2U+Cjwvc3ZnPgo=");mask:url("data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIj8+Cjxzdmcgd2lkdGg9IjEyIiBoZWlnaHQ9IjQiIHZpZXdQb3J0PSIwIDAgMTIgNCIgdmVyc2lvbj0iMS4xIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciPgogIDxlbGxpcHNlIGN4PSIyIiBjeT0iMiIgcng9IjIiIHJ5PSIyIj4KICAgIDxhbmltYXRlIGF0dHJpYnV0ZU5hbWU9ImN4IiBmcm9tPSIyIiB0bz0iLTEwIiBkdXI9IjAuNnMiIHJlcGVhdENvdW50PSJpbmRlZmluaXRlIiAvPgogIDwvZWxsaXBzZT4KICA8ZWxsaXBzZSBjeD0iMTQiIGN5PSIyIiByeD0iMiIgcnk9IjIiIGNsYXNzPSJsb2FkZXIiPgogICAgPGFuaW1hdGUgYXR0cmlidXRlTmFtZT0iY3giIGZyb209IjE0IiB0bz0iMiIgZHVyPSIwLjZzIiByZXBlYXRDb3VudD0iaW5kZWZpbml0ZSIgLz4KICA8L2VsbGlwc2U+Cjwvc3ZnPgo=")}}.mdl-progress:not(.mdl-progress--indeterminate)>.auxbar,.mdl-progress:not(.mdl-progress__indeterminate)>.auxbar{background-image:linear-gradient(to right,rgba(255,255,255,.9),rgba(255,255,255,.9)),linear-gradient(to right,rgb(103,58,183),rgb(103,58,183))}.mdl-progress.mdl-progress--indeterminate>.bar1,.mdl-progress.mdl-progress__indeterminate>.bar1{-webkit-animation-name:indeterminate1;animation-name:indeterminate1}.mdl-progress.mdl-progress--indeterminate>.bar1,.mdl-progress.mdl-progress__indeterminate>.bar1,.mdl-progress.mdl-progress--indeterminate>.bar3,.mdl-progress.mdl-progress__indeterminate>.bar3{background-color:rgb(103,58,183);-webkit-animation-duration:2s;animation-duration:2s;-webkit-animation-iteration-count:infinite;animation-iteration-count:infinite;-webkit-animation-timing-function:linear;animation-timing-function:linear}.mdl-progress.mdl-progress--indeterminate>.bar3,.mdl-progress.mdl-progress__indeterminate>.bar3{background-image:none;-webkit-animation-name:indeterminate2;animation-name:indeterminate2}@-webkit-keyframes indeterminate1{0%{left:0%;width:0%}50%{left:25%;width:75%}75%{left:100%;width:0%}}@keyframes indeterminate1{0%{left:0%;width:0%}50%{left:25%;width:75%}75%{left:100%;width:0%}}@-webkit-keyframes indeterminate2{0%,50%{left:0%;width:0%}75%{left:0%;width:25%}100%{left:100%;width:0%}}@keyframes indeterminate2{0%,50%{left:0%;width:0%}75%{left:0%;width:25%}100%{left:100%;width:0%}}.mdl-navigation{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap;box-sizing:border-box}.mdl-navigation__link{color:#424242;text-decoration:none;margin:0;font-size:14px;font-weight:400;line-height:24px;letter-spacing:0;opacity:.87}.mdl-navigation__link .material-icons{vertical-align:middle}.mdl-layout{width:100%;height:100%;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;overflow-y:auto;overflow-x:hidden;position:relative;-webkit-overflow-scrolling:touch}.mdl-layout.is-small-screen .mdl-layout--large-screen-only{display:none}.mdl-layout:not(.is-small-screen) .mdl-layout--small-screen-only{display:none}.mdl-layout__container{position:absolute;width:100%;height:100%}.mdl-layout__title,.mdl-layout-title{display:block;position:relative;font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:20px;line-height:1;letter-spacing:.02em;font-weight:400;box-sizing:border-box}.mdl-layout-spacer{-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1}.mdl-layout__drawer{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap;width:240px;height:100%;max-height:100%;position:absolute;top:0;left:0;box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 3px 1px -2px rgba(0,0,0,.2),0 1px 5px 0 rgba(0,0,0,.12);box-sizing:border-box;border-right:1px solid #e0e0e0;background:#fafafa;-webkit-transform:translateX(-250px);transform:translateX(-250px);-webkit-transform-style:preserve-3d;transform-style:preserve-3d;will-change:transform;transition-duration:.2s;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-property:transform;transition-property:transform,-webkit-transform;color:#424242;overflow:visible;overflow-y:auto;z-index:5}.mdl-layout__drawer.is-visible{-webkit-transform:translateX(0);transform:translateX(0)}.mdl-layout__drawer.is-visible~.mdl-layout__content.mdl-layout__content{overflow:hidden}.mdl-layout__drawer>*{-webkit-flex-shrink:0;-ms-flex-negative:0;flex-shrink:0}.mdl-layout__drawer>.mdl-layout__title,.mdl-layout__drawer>.mdl-layout-title{line-height:64px;padding-left:40px}@media screen and (max-width:1024px){.mdl-layout__drawer>.mdl-layout__title,.mdl-layout__drawer>.mdl-layout-title{line-height:56px;padding-left:16px}}.mdl-layout__drawer .mdl-navigation{-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-align-items:stretch;-ms-flex-align:stretch;align-items:stretch;padding-top:16px}.mdl-layout__drawer .mdl-navigation .mdl-navigation__link{display:block;-webkit-flex-shrink:0;-ms-flex-negative:0;flex-shrink:0;padding:16px 40px;margin:0;color:#757575}@media screen and (max-width:1024px){.mdl-layout__drawer .mdl-navigation .mdl-navigation__link{padding:16px}}.mdl-layout__drawer .mdl-navigation .mdl-navigation__link:hover{background-color:#e0e0e0}.mdl-layout__drawer .mdl-navigation .mdl-navigation__link--current{background-color:#e0e0e0;color:#000}@media screen and (min-width:1025px){.mdl-layout--fixed-drawer>.mdl-layout__drawer{-webkit-transform:translateX(0);transform:translateX(0)}}.mdl-layout__drawer-button{display:block;position:absolute;height:48px;width:48px;border:0;-webkit-flex-shrink:0;-ms-flex-negative:0;flex-shrink:0;overflow:hidden;text-align:center;cursor:pointer;font-size:26px;line-height:56px;font-family:Helvetica,Arial,sans-serif;margin:8px 12px;top:0;left:0;color:rgb(255,255,255);z-index:4}.mdl-layout__header .mdl-layout__drawer-button{position:absolute;color:rgb(255,255,255);background-color:inherit}@media screen and (max-width:1024px){.mdl-layout__header .mdl-layout__drawer-button{margin:4px}}@media screen and (max-width:1024px){.mdl-layout__drawer-button{margin:4px;color:rgba(0,0,0,.5)}}@media screen and (min-width:1025px){.mdl-layout__drawer-button{line-height:54px}.mdl-layout--no-desktop-drawer-button .mdl-layout__drawer-button,.mdl-layout--fixed-drawer>.mdl-layout__drawer-button,.mdl-layout--no-drawer-button .mdl-layout__drawer-button{display:none}}.mdl-layout__header{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start;box-sizing:border-box;-webkit-flex-shrink:0;-ms-flex-negative:0;flex-shrink:0;width:100%;margin:0;padding:0;border:none;min-height:64px;max-height:1000px;z-index:3;background-color:rgb(103,58,183);color:rgb(255,255,255);box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 3px 1px -2px rgba(0,0,0,.2),0 1px 5px 0 rgba(0,0,0,.12);transition-duration:.2s;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-property:max-height,box-shadow}@media screen and (max-width:1024px){.mdl-layout__header{min-height:56px}}.mdl-layout--fixed-drawer.is-upgraded:not(.is-small-screen)>.mdl-layout__header{margin-left:240px;width:calc(100% - 240px)}@media screen and (min-width:1025px){.mdl-layout--fixed-drawer>.mdl-layout__header .mdl-layout__header-row{padding-left:40px}}.mdl-layout__header>.mdl-layout-icon{position:absolute;left:40px;top:16px;height:32px;width:32px;overflow:hidden;z-index:3;display:block}@media screen and (max-width:1024px){.mdl-layout__header>.mdl-layout-icon{left:16px;top:12px}}.mdl-layout.has-drawer .mdl-layout__header>.mdl-layout-icon{display:none}.mdl-layout__header.is-compact{max-height:64px}@media screen and (max-width:1024px){.mdl-layout__header.is-compact{max-height:56px}}.mdl-layout__header.is-compact.has-tabs{height:112px}@media screen and (max-width:1024px){.mdl-layout__header.is-compact.has-tabs{min-height:104px}}@media screen and (max-width:1024px){.mdl-layout__header{display:none}.mdl-layout--fixed-header>.mdl-layout__header{display:-webkit-flex;display:-ms-flexbox;display:flex}}.mdl-layout__header--transparent.mdl-layout__header--transparent{background-color:transparent;box-shadow:none}.mdl-layout__header--seamed,.mdl-layout__header--scroll{box-shadow:none}.mdl-layout__header--waterfall{box-shadow:none;overflow:hidden}.mdl-layout__header--waterfall.is-casting-shadow{box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 3px 1px -2px rgba(0,0,0,.2),0 1px 5px 0 rgba(0,0,0,.12)}.mdl-layout__header--waterfall.mdl-layout__header--waterfall-hide-top{-webkit-justify-content:flex-end;-ms-flex-pack:end;justify-content:flex-end}.mdl-layout__header-row{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row;-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap;-webkit-flex-shrink:0;-ms-flex-negative:0;flex-shrink:0;box-sizing:border-box;-webkit-align-self:stretch;-ms-flex-item-align:stretch;align-self:stretch;-webkit-align-items:center;-ms-flex-align:center;align-items:center;height:64px;margin:0;padding:0 40px 0 80px}.mdl-layout--no-drawer-button .mdl-layout__header-row{padding-left:40px}@media screen and (min-width:1025px){.mdl-layout--no-desktop-drawer-button .mdl-layout__header-row{padding-left:40px}}@media screen and (max-width:1024px){.mdl-layout__header-row{height:56px;padding:0 16px 0 72px}.mdl-layout--no-drawer-button .mdl-layout__header-row{padding-left:16px}}.mdl-layout__header-row>*{-webkit-flex-shrink:0;-ms-flex-negative:0;flex-shrink:0}.mdl-layout__header--scroll .mdl-layout__header-row{width:100%}.mdl-layout__header-row .mdl-navigation{margin:0;padding:0;height:64px;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row;-webkit-align-items:center;-ms-flex-align:center;align-items:center}@media screen and (max-width:1024px){.mdl-layout__header-row .mdl-navigation{height:56px}}.mdl-layout__header-row .mdl-navigation__link{display:block;color:rgb(255,255,255);line-height:64px;padding:0 24px}@media screen and (max-width:1024px){.mdl-layout__header-row .mdl-navigation__link{line-height:56px;padding:0 16px}}.mdl-layout__obfuscator{background-color:transparent;position:absolute;top:0;left:0;height:100%;width:100%;z-index:4;visibility:hidden;transition-property:background-color;transition-duration:.2s;transition-timing-function:cubic-bezier(.4,0,.2,1)}.mdl-layout__obfuscator.is-visible{background-color:rgba(0,0,0,.5);visibility:visible}@supports (pointer-events:auto){.mdl-layout__obfuscator{background-color:rgba(0,0,0,.5);opacity:0;transition-property:opacity;visibility:visible;pointer-events:none}.mdl-layout__obfuscator.is-visible{pointer-events:auto;opacity:1}}.mdl-layout__content{-ms-flex:0 1 auto;position:relative;display:inline-block;overflow-y:auto;overflow-x:hidden;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;z-index:1;-webkit-overflow-scrolling:touch}.mdl-layout--fixed-drawer>.mdl-layout__content{margin-left:240px}.mdl-layout__container.has-scrolling-header .mdl-layout__content{overflow:visible}@media screen and (max-width:1024px){.mdl-layout--fixed-drawer>.mdl-layout__content{margin-left:0}.mdl-layout__container.has-scrolling-header .mdl-layout__content{overflow-y:auto;overflow-x:hidden}}.mdl-layout__tab-bar{height:96px;margin:0;width:calc(100% - 112px);padding:0 0 0 56px;display:-webkit-flex;display:-ms-flexbox;display:flex;background-color:rgb(103,58,183);overflow-y:hidden;overflow-x:scroll}.mdl-layout__tab-bar::-webkit-scrollbar{display:none}.mdl-layout--no-drawer-button .mdl-layout__tab-bar{padding-left:16px;width:calc(100% - 32px)}@media screen and (min-width:1025px){.mdl-layout--no-desktop-drawer-button .mdl-layout__tab-bar{padding-left:16px;width:calc(100% - 32px)}}@media screen and (max-width:1024px){.mdl-layout__tab-bar{width:calc(100% - 60px);padding:0 0 0 60px}.mdl-layout--no-drawer-button .mdl-layout__tab-bar{width:calc(100% - 8px);padding-left:4px}}.mdl-layout--fixed-tabs .mdl-layout__tab-bar{padding:0;overflow:hidden;width:100%}.mdl-layout__tab-bar-container{position:relative;height:48px;width:100%;border:none;margin:0;z-index:2;-webkit-flex-grow:0;-ms-flex-positive:0;flex-grow:0;-webkit-flex-shrink:0;-ms-flex-negative:0;flex-shrink:0;overflow:hidden}.mdl-layout__container>.mdl-layout__tab-bar-container{position:absolute;top:0;left:0}.mdl-layout__tab-bar-button{display:inline-block;position:absolute;top:0;height:48px;width:56px;z-index:4;text-align:center;background-color:rgb(103,58,183);color:transparent;cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.mdl-layout--no-desktop-drawer-button .mdl-layout__tab-bar-button,.mdl-layout--no-drawer-button .mdl-layout__tab-bar-button{width:16px}.mdl-layout--no-desktop-drawer-button .mdl-layout__tab-bar-button .material-icons,.mdl-layout--no-drawer-button .mdl-layout__tab-bar-button .material-icons{position:relative;left:-4px}@media screen and (max-width:1024px){.mdl-layout__tab-bar-button{width:60px}}.mdl-layout--fixed-tabs .mdl-layout__tab-bar-button{display:none}.mdl-layout__tab-bar-button .material-icons{line-height:48px}.mdl-layout__tab-bar-button.is-active{color:rgb(255,255,255)}.mdl-layout__tab-bar-left-button{left:0}.mdl-layout__tab-bar-right-button{right:0}.mdl-layout__tab{margin:0;border:none;padding:0 24px;float:left;position:relative;display:block;-webkit-flex-grow:0;-ms-flex-positive:0;flex-grow:0;-webkit-flex-shrink:0;-ms-flex-negative:0;flex-shrink:0;text-decoration:none;height:48px;line-height:48px;text-align:center;font-weight:500;font-size:14px;text-transform:uppercase;color:rgba(255,255,255,.6);overflow:hidden}@media screen and (max-width:1024px){.mdl-layout__tab{padding:0 12px}}.mdl-layout--fixed-tabs .mdl-layout__tab{float:none;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;padding:0}.mdl-layout.is-upgraded .mdl-layout__tab.is-active{color:rgb(255,255,255)}.mdl-layout.is-upgraded .mdl-layout__tab.is-active::after{height:2px;width:100%;display:block;content:" ";bottom:0;left:0;position:absolute;background:rgb(255,64,129);-webkit-animation:border-expand .2s cubic-bezier(.4,0,.4,1).01s alternate forwards;animation:border-expand .2s cubic-bezier(.4,0,.4,1).01s alternate forwards;transition:all 1s cubic-bezier(.4,0,1,1)}.mdl-layout__tab .mdl-layout__tab-ripple-container{display:block;position:absolute;height:100%;width:100%;left:0;top:0;z-index:1;overflow:hidden}.mdl-layout__tab .mdl-layout__tab-ripple-container .mdl-ripple{background-color:rgb(255,255,255)}.mdl-layout__tab-panel{display:block}.mdl-layout.is-upgraded .mdl-layout__tab-panel{display:none}.mdl-layout.is-upgraded .mdl-layout__tab-panel.is-active{display:block}.mdl-radio{position:relative;font-size:16px;line-height:24px;display:inline-block;vertical-align:middle;box-sizing:border-box;height:24px;margin:0;padding-left:0}.mdl-radio.is-upgraded{padding-left:24px}.mdl-radio__button{line-height:24px}.mdl-radio.is-upgraded .mdl-radio__button{position:absolute;width:0;height:0;margin:0;padding:0;opacity:0;-ms-appearance:none;-moz-appearance:none;-webkit-appearance:none;appearance:none;border:none}.mdl-radio__outer-circle{position:absolute;top:4px;left:0;display:inline-block;box-sizing:border-box;width:16px;height:16px;margin:0;cursor:pointer;border:2px solid rgba(0,0,0,.54);border-radius:50%;z-index:2}.mdl-radio.is-checked .mdl-radio__outer-circle{border:2px solid rgb(103,58,183)}.mdl-radio__outer-circle fieldset[disabled] .mdl-radio,.mdl-radio.is-disabled .mdl-radio__outer-circle{border:2px solid rgba(0,0,0,.26);cursor:auto}.mdl-radio__inner-circle{position:absolute;z-index:1;margin:0;top:8px;left:4px;box-sizing:border-box;width:8px;height:8px;cursor:pointer;transition-duration:.28s;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-property:transform;transition-property:transform,-webkit-transform;-webkit-transform:scale(0,0);transform:scale(0,0);border-radius:50%;background:rgb(103,58,183)}.mdl-radio.is-checked .mdl-radio__inner-circle{-webkit-transform:scale(1,1);transform:scale(1,1)}fieldset[disabled] .mdl-radio .mdl-radio__inner-circle,.mdl-radio.is-disabled .mdl-radio__inner-circle{background:rgba(0,0,0,.26);cursor:auto}.mdl-radio.is-focused .mdl-radio__inner-circle{box-shadow:0 0 0 10px rgba(0,0,0,.1)}.mdl-radio__label{cursor:pointer}fieldset[disabled] .mdl-radio .mdl-radio__label,.mdl-radio.is-disabled .mdl-radio__label{color:rgba(0,0,0,.26);cursor:auto}.mdl-radio__ripple-container{position:absolute;z-index:2;top:-9px;left:-13px;box-sizing:border-box;width:42px;height:42px;border-radius:50%;cursor:pointer;overflow:hidden;-webkit-mask-image:-webkit-radial-gradient(circle,#fff,#000)}.mdl-radio__ripple-container .mdl-ripple{background:rgb(103,58,183)}fieldset[disabled] .mdl-radio .mdl-radio__ripple-container,.mdl-radio.is-disabled .mdl-radio__ripple-container{cursor:auto}fieldset[disabled] .mdl-radio .mdl-radio__ripple-container .mdl-ripple,.mdl-radio.is-disabled .mdl-radio__ripple-container .mdl-ripple{background:0 0}_:-ms-input-placeholder,:root .mdl-slider.mdl-slider.is-upgraded{-ms-appearance:none;height:32px;margin:0}.mdl-slider{width:calc(100% - 40px);margin:0 20px}.mdl-slider.is-upgraded{-webkit-appearance:none;-moz-appearance:none;appearance:none;height:2px;background:0 0;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;outline:0;padding:0;color:rgb(103,58,183);-webkit-align-self:center;-ms-flex-item-align:center;-ms-grid-row-align:center;align-self:center;z-index:1;cursor:pointer}.mdl-slider.is-upgraded::-moz-focus-outer{border:0}.mdl-slider.is-upgraded::-ms-tooltip{display:none}.mdl-slider.is-upgraded::-webkit-slider-runnable-track{background:0 0}.mdl-slider.is-upgraded::-moz-range-track{background:0 0;border:none}.mdl-slider.is-upgraded::-ms-track{background:0 0;color:transparent;height:2px;width:100%;border:none}.mdl-slider.is-upgraded::-ms-fill-lower{padding:0;background:linear-gradient(to right,transparent,transparent 16px,rgb(103,58,183)16px,rgb(103,58,183)0)}.mdl-slider.is-upgraded::-ms-fill-upper{padding:0;background:linear-gradient(to left,transparent,transparent 16px,rgba(0,0,0,.26)16px,rgba(0,0,0,.26)0)}.mdl-slider.is-upgraded::-webkit-slider-thumb{-webkit-appearance:none;width:12px;height:12px;box-sizing:border-box;border-radius:50%;background:rgb(103,58,183);border:none;transition:transform .18s cubic-bezier(.4,0,.2,1),border .18s cubic-bezier(.4,0,.2,1),box-shadow .18s cubic-bezier(.4,0,.2,1),background .28s cubic-bezier(.4,0,.2,1);transition:transform .18s cubic-bezier(.4,0,.2,1),border .18s cubic-bezier(.4,0,.2,1),box-shadow .18s cubic-bezier(.4,0,.2,1),background .28s cubic-bezier(.4,0,.2,1),-webkit-transform .18s cubic-bezier(.4,0,.2,1)}.mdl-slider.is-upgraded::-moz-range-thumb{-moz-appearance:none;width:12px;height:12px;box-sizing:border-box;border-radius:50%;background-image:none;background:rgb(103,58,183);border:none}.mdl-slider.is-upgraded:focus:not(:active)::-webkit-slider-thumb{box-shadow:0 0 0 10px rgba(103,58,183,.26)}.mdl-slider.is-upgraded:focus:not(:active)::-moz-range-thumb{box-shadow:0 0 0 10px rgba(103,58,183,.26)}.mdl-slider.is-upgraded:active::-webkit-slider-thumb{background-image:none;background:rgb(103,58,183);-webkit-transform:scale(1.5);transform:scale(1.5)}.mdl-slider.is-upgraded:active::-moz-range-thumb{background-image:none;background:rgb(103,58,183);transform:scale(1.5)}.mdl-slider.is-upgraded::-ms-thumb{width:32px;height:32px;border:none;border-radius:50%;background:rgb(103,58,183);transform:scale(.375);transition:transform .18s cubic-bezier(.4,0,.2,1),background .28s cubic-bezier(.4,0,.2,1);transition:transform .18s cubic-bezier(.4,0,.2,1),background .28s cubic-bezier(.4,0,.2,1),-webkit-transform .18s cubic-bezier(.4,0,.2,1)}.mdl-slider.is-upgraded:focus:not(:active)::-ms-thumb{background:radial-gradient(circle closest-side,rgb(103,58,183)0%,rgb(103,58,183)37.5%,rgba(103,58,183,.26)37.5%,rgba(103,58,183,.26)100%);transform:scale(1)}.mdl-slider.is-upgraded:active::-ms-thumb{background:rgb(103,58,183);transform:scale(.5625)}.mdl-slider.is-upgraded.is-lowest-value::-webkit-slider-thumb{border:2px solid rgba(0,0,0,.26);background:0 0}.mdl-slider.is-upgraded.is-lowest-value::-moz-range-thumb{border:2px solid rgba(0,0,0,.26);background:0 0}.mdl-slider.is-upgraded.is-lowest-value+.mdl-slider__background-flex>.mdl-slider__background-upper{left:6px}.mdl-slider.is-upgraded.is-lowest-value:focus:not(:active)::-webkit-slider-thumb{box-shadow:0 0 0 10px rgba(0,0,0,.12);background:rgba(0,0,0,.12)}.mdl-slider.is-upgraded.is-lowest-value:focus:not(:active)::-moz-range-thumb{box-shadow:0 0 0 10px rgba(0,0,0,.12);background:rgba(0,0,0,.12)}.mdl-slider.is-upgraded.is-lowest-value:active::-webkit-slider-thumb{border:1.6px solid rgba(0,0,0,.26);-webkit-transform:scale(1.5);transform:scale(1.5)}.mdl-slider.is-upgraded.is-lowest-value:active+.mdl-slider__background-flex>.mdl-slider__background-upper{left:9px}.mdl-slider.is-upgraded.is-lowest-value:active::-moz-range-thumb{border:1.5px solid rgba(0,0,0,.26);transform:scale(1.5)}.mdl-slider.is-upgraded.is-lowest-value::-ms-thumb{background:radial-gradient(circle closest-side,transparent 0%,transparent 66.67%,rgba(0,0,0,.26)66.67%,rgba(0,0,0,.26)100%)}.mdl-slider.is-upgraded.is-lowest-value:focus:not(:active)::-ms-thumb{background:radial-gradient(circle closest-side,rgba(0,0,0,.12)0%,rgba(0,0,0,.12)25%,rgba(0,0,0,.26)25%,rgba(0,0,0,.26)37.5%,rgba(0,0,0,.12)37.5%,rgba(0,0,0,.12)100%);transform:scale(1)}.mdl-slider.is-upgraded.is-lowest-value:active::-ms-thumb{transform:scale(.5625);background:radial-gradient(circle closest-side,transparent 0%,transparent 77.78%,rgba(0,0,0,.26)77.78%,rgba(0,0,0,.26)100%)}.mdl-slider.is-upgraded.is-lowest-value::-ms-fill-lower{background:0 0}.mdl-slider.is-upgraded.is-lowest-value::-ms-fill-upper{margin-left:6px}.mdl-slider.is-upgraded.is-lowest-value:active::-ms-fill-upper{margin-left:9px}.mdl-slider.is-upgraded:disabled:focus::-webkit-slider-thumb,.mdl-slider.is-upgraded:disabled:active::-webkit-slider-thumb,.mdl-slider.is-upgraded:disabled::-webkit-slider-thumb{-webkit-transform:scale(.667);transform:scale(.667);background:rgba(0,0,0,.26)}.mdl-slider.is-upgraded:disabled:focus::-moz-range-thumb,.mdl-slider.is-upgraded:disabled:active::-moz-range-thumb,.mdl-slider.is-upgraded:disabled::-moz-range-thumb{transform:scale(.667);background:rgba(0,0,0,.26)}.mdl-slider.is-upgraded:disabled+.mdl-slider__background-flex>.mdl-slider__background-lower{background-color:rgba(0,0,0,.26);left:-6px}.mdl-slider.is-upgraded:disabled+.mdl-slider__background-flex>.mdl-slider__background-upper{left:6px}.mdl-slider.is-upgraded.is-lowest-value:disabled:focus::-webkit-slider-thumb,.mdl-slider.is-upgraded.is-lowest-value:disabled:active::-webkit-slider-thumb,.mdl-slider.is-upgraded.is-lowest-value:disabled::-webkit-slider-thumb{border:3px solid rgba(0,0,0,.26);background:0 0;-webkit-transform:scale(.667);transform:scale(.667)}.mdl-slider.is-upgraded.is-lowest-value:disabled:focus::-moz-range-thumb,.mdl-slider.is-upgraded.is-lowest-value:disabled:active::-moz-range-thumb,.mdl-slider.is-upgraded.is-lowest-value:disabled::-moz-range-thumb{border:3px solid rgba(0,0,0,.26);background:0 0;transform:scale(.667)}.mdl-slider.is-upgraded.is-lowest-value:disabled:active+.mdl-slider__background-flex>.mdl-slider__background-upper{left:6px}.mdl-slider.is-upgraded:disabled:focus::-ms-thumb,.mdl-slider.is-upgraded:disabled:active::-ms-thumb,.mdl-slider.is-upgraded:disabled::-ms-thumb{transform:scale(.25);background:rgba(0,0,0,.26)}.mdl-slider.is-upgraded.is-lowest-value:disabled:focus::-ms-thumb,.mdl-slider.is-upgraded.is-lowest-value:disabled:active::-ms-thumb,.mdl-slider.is-upgraded.is-lowest-value:disabled::-ms-thumb{transform:scale(.25);background:radial-gradient(circle closest-side,transparent 0%,transparent 50%,rgba(0,0,0,.26)50%,rgba(0,0,0,.26)100%)}.mdl-slider.is-upgraded:disabled::-ms-fill-lower{margin-right:6px;background:linear-gradient(to right,transparent,transparent 25px,rgba(0,0,0,.26)25px,rgba(0,0,0,.26)0)}.mdl-slider.is-upgraded:disabled::-ms-fill-upper{margin-left:6px}.mdl-slider.is-upgraded.is-lowest-value:disabled:active::-ms-fill-upper{margin-left:6px}.mdl-slider__ie-container{height:18px;overflow:visible;border:none;margin:none;padding:none}.mdl-slider__container{height:18px;position:relative;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.mdl-slider__container,.mdl-slider__background-flex{background:0 0;display:-webkit-flex;display:-ms-flexbox;display:flex}.mdl-slider__background-flex{position:absolute;height:2px;width:calc(100% - 52px);top:50%;left:0;margin:0 26px;overflow:hidden;border:0;padding:0;-webkit-transform:translate(0,-1px);transform:translate(0,-1px)}.mdl-slider__background-lower{background:rgb(103,58,183)}.mdl-slider__background-lower,.mdl-slider__background-upper{-webkit-flex:0;-ms-flex:0;flex:0;position:relative;border:0;padding:0}.mdl-slider__background-upper{background:rgba(0,0,0,.26);transition:left .18s cubic-bezier(.4,0,.2,1)}.mdl-snackbar{position:fixed;bottom:0;left:50%;cursor:default;background-color:#323232;z-index:3;display:block;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;font-family:"Roboto","Helvetica","Arial",sans-serif;will-change:transform;-webkit-transform:translate(0,80px);transform:translate(0,80px);transition:transform .25s cubic-bezier(.4,0,1,1);transition:transform .25s cubic-bezier(.4,0,1,1),-webkit-transform .25s cubic-bezier(.4,0,1,1);pointer-events:none}@media (max-width:479px){.mdl-snackbar{width:100%;left:0;min-height:48px;max-height:80px}}@media (min-width:480px){.mdl-snackbar{min-width:288px;max-width:568px;border-radius:2px;-webkit-transform:translate(-50%,80px);transform:translate(-50%,80px)}}.mdl-snackbar--active{-webkit-transform:translate(0,0);transform:translate(0,0);pointer-events:auto;transition:transform .25s cubic-bezier(0,0,.2,1);transition:transform .25s cubic-bezier(0,0,.2,1),-webkit-transform .25s cubic-bezier(0,0,.2,1)}@media (min-width:480px){.mdl-snackbar--active{-webkit-transform:translate(-50%,0);transform:translate(-50%,0)}}.mdl-snackbar__text{padding:14px 12px 14px 24px;vertical-align:middle;color:#fff;float:left}.mdl-snackbar__action{background:0 0;border:none;color:rgb(255,64,129);float:right;padding:14px 24px 14px 12px;font-family:"Roboto","Helvetica","Arial",sans-serif;font-size:14px;font-weight:500;text-transform:uppercase;line-height:1;letter-spacing:0;overflow:hidden;outline:none;opacity:0;pointer-events:none;cursor:pointer;text-decoration:none;text-align:center;-webkit-align-self:center;-ms-flex-item-align:center;-ms-grid-row-align:center;align-self:center}.mdl-snackbar__action::-moz-focus-inner{border:0}.mdl-snackbar__action:not([aria-hidden]){opacity:1;pointer-events:auto}.mdl-spinner{display:inline-block;position:relative;width:28px;height:28px}.mdl-spinner:not(.is-upgraded).is-active:after{content:"Loading..."}.mdl-spinner.is-upgraded.is-active{-webkit-animation:mdl-spinner__container-rotate 1568.23529412ms linear infinite;animation:mdl-spinner__container-rotate 1568.23529412ms linear infinite}@-webkit-keyframes mdl-spinner__container-rotate{to{-webkit-transform:rotate(360deg);transform:rotate(360deg)}}@keyframes mdl-spinner__container-rotate{to{-webkit-transform:rotate(360deg);transform:rotate(360deg)}}.mdl-spinner__layer{position:absolute;width:100%;height:100%;opacity:0}.mdl-spinner__layer-1{border-color:#42a5f5}.mdl-spinner--single-color .mdl-spinner__layer-1{border-color:rgb(103,58,183)}.mdl-spinner.is-active .mdl-spinner__layer-1{-webkit-animation:mdl-spinner__fill-unfill-rotate 5332ms cubic-bezier(.4,0,.2,1)infinite both,mdl-spinner__layer-1-fade-in-out 5332ms cubic-bezier(.4,0,.2,1)infinite both;animation:mdl-spinner__fill-unfill-rotate 5332ms cubic-bezier(.4,0,.2,1)infinite both,mdl-spinner__layer-1-fade-in-out 5332ms cubic-bezier(.4,0,.2,1)infinite both}.mdl-spinner__layer-2{border-color:#f44336}.mdl-spinner--single-color .mdl-spinner__layer-2{border-color:rgb(103,58,183)}.mdl-spinner.is-active .mdl-spinner__layer-2{-webkit-animation:mdl-spinner__fill-unfill-rotate 5332ms cubic-bezier(.4,0,.2,1)infinite both,mdl-spinner__layer-2-fade-in-out 5332ms cubic-bezier(.4,0,.2,1)infinite both;animation:mdl-spinner__fill-unfill-rotate 5332ms cubic-bezier(.4,0,.2,1)infinite both,mdl-spinner__layer-2-fade-in-out 5332ms cubic-bezier(.4,0,.2,1)infinite both}.mdl-spinner__layer-3{border-color:#fdd835}.mdl-spinner--single-color .mdl-spinner__layer-3{border-color:rgb(103,58,183)}.mdl-spinner.is-active .mdl-spinner__layer-3{-webkit-animation:mdl-spinner__fill-unfill-rotate 5332ms cubic-bezier(.4,0,.2,1)infinite both,mdl-spinner__layer-3-fade-in-out 5332ms cubic-bezier(.4,0,.2,1)infinite both;animation:mdl-spinner__fill-unfill-rotate 5332ms cubic-bezier(.4,0,.2,1)infinite both,mdl-spinner__layer-3-fade-in-out 5332ms cubic-bezier(.4,0,.2,1)infinite both}.mdl-spinner__layer-4{border-color:#4caf50}.mdl-spinner--single-color .mdl-spinner__layer-4{border-color:rgb(103,58,183)}.mdl-spinner.is-active .mdl-spinner__layer-4{-webkit-animation:mdl-spinner__fill-unfill-rotate 5332ms cubic-bezier(.4,0,.2,1)infinite both,mdl-spinner__layer-4-fade-in-out 5332ms cubic-bezier(.4,0,.2,1)infinite both;animation:mdl-spinner__fill-unfill-rotate 5332ms cubic-bezier(.4,0,.2,1)infinite both,mdl-spinner__layer-4-fade-in-out 5332ms cubic-bezier(.4,0,.2,1)infinite both}@-webkit-keyframes mdl-spinner__fill-unfill-rotate{12.5%{-webkit-transform:rotate(135deg);transform:rotate(135deg)}25%{-webkit-transform:rotate(270deg);transform:rotate(270deg)}37.5%{-webkit-transform:rotate(405deg);transform:rotate(405deg)}50%{-webkit-transform:rotate(540deg);transform:rotate(540deg)}62.5%{-webkit-transform:rotate(675deg);transform:rotate(675deg)}75%{-webkit-transform:rotate(810deg);transform:rotate(810deg)}87.5%{-webkit-transform:rotate(945deg);transform:rotate(945deg)}to{-webkit-transform:rotate(1080deg);transform:rotate(1080deg)}}@keyframes mdl-spinner__fill-unfill-rotate{12.5%{-webkit-transform:rotate(135deg);transform:rotate(135deg)}25%{-webkit-transform:rotate(270deg);transform:rotate(270deg)}37.5%{-webkit-transform:rotate(405deg);transform:rotate(405deg)}50%{-webkit-transform:rotate(540deg);transform:rotate(540deg)}62.5%{-webkit-transform:rotate(675deg);transform:rotate(675deg)}75%{-webkit-transform:rotate(810deg);transform:rotate(810deg)}87.5%{-webkit-transform:rotate(945deg);transform:rotate(945deg)}to{-webkit-transform:rotate(1080deg);transform:rotate(1080deg)}}@-webkit-keyframes mdl-spinner__layer-1-fade-in-out{from,25%{opacity:.99}26%,89%{opacity:0}90%,100%{opacity:.99}}@keyframes mdl-spinner__layer-1-fade-in-out{from,25%{opacity:.99}26%,89%{opacity:0}90%,100%{opacity:.99}}@-webkit-keyframes mdl-spinner__layer-2-fade-in-out{from,15%{opacity:0}25%,50%{opacity:.99}51%{opacity:0}}@keyframes mdl-spinner__layer-2-fade-in-out{from,15%{opacity:0}25%,50%{opacity:.99}51%{opacity:0}}@-webkit-keyframes mdl-spinner__layer-3-fade-in-out{from,40%{opacity:0}50%,75%{opacity:.99}76%{opacity:0}}@keyframes mdl-spinner__layer-3-fade-in-out{from,40%{opacity:0}50%,75%{opacity:.99}76%{opacity:0}}@-webkit-keyframes mdl-spinner__layer-4-fade-in-out{from,65%{opacity:0}75%,90%{opacity:.99}100%{opacity:0}}@keyframes mdl-spinner__layer-4-fade-in-out{from,65%{opacity:0}75%,90%{opacity:.99}100%{opacity:0}}.mdl-spinner__gap-patch{position:absolute;box-sizing:border-box;top:0;left:45%;width:10%;height:100%;overflow:hidden;border-color:inherit}.mdl-spinner__gap-patch .mdl-spinner__circle{width:1000%;left:-450%}.mdl-spinner__circle-clipper{display:inline-block;position:relative;width:50%;height:100%;overflow:hidden;border-color:inherit}.mdl-spinner__circle-clipper.mdl-spinner__left{float:left}.mdl-spinner__circle-clipper.mdl-spinner__right{float:right}.mdl-spinner__circle-clipper .mdl-spinner__circle{width:200%}.mdl-spinner__circle{box-sizing:border-box;height:100%;border-width:3px;border-style:solid;border-color:inherit;border-bottom-color:transparent!important;border-radius:50%;-webkit-animation:none;animation:none;position:absolute;top:0;right:0;bottom:0;left:0}.mdl-spinner__left .mdl-spinner__circle{border-right-color:transparent!important;-webkit-transform:rotate(129deg);transform:rotate(129deg)}.mdl-spinner.is-active .mdl-spinner__left .mdl-spinner__circle{-webkit-animation:mdl-spinner__left-spin 1333ms cubic-bezier(.4,0,.2,1)infinite both;animation:mdl-spinner__left-spin 1333ms cubic-bezier(.4,0,.2,1)infinite both}.mdl-spinner__right .mdl-spinner__circle{left:-100%;border-left-color:transparent!important;-webkit-transform:rotate(-129deg);transform:rotate(-129deg)}.mdl-spinner.is-active .mdl-spinner__right .mdl-spinner__circle{-webkit-animation:mdl-spinner__right-spin 1333ms cubic-bezier(.4,0,.2,1)infinite both;animation:mdl-spinner__right-spin 1333ms cubic-bezier(.4,0,.2,1)infinite both}@-webkit-keyframes mdl-spinner__left-spin{from{-webkit-transform:rotate(130deg);transform:rotate(130deg)}50%{-webkit-transform:rotate(-5deg);transform:rotate(-5deg)}to{-webkit-transform:rotate(130deg);transform:rotate(130deg)}}@keyframes mdl-spinner__left-spin{from{-webkit-transform:rotate(130deg);transform:rotate(130deg)}50%{-webkit-transform:rotate(-5deg);transform:rotate(-5deg)}to{-webkit-transform:rotate(130deg);transform:rotate(130deg)}}@-webkit-keyframes mdl-spinner__right-spin{from{-webkit-transform:rotate(-130deg);transform:rotate(-130deg)}50%{-webkit-transform:rotate(5deg);transform:rotate(5deg)}to{-webkit-transform:rotate(-130deg);transform:rotate(-130deg)}}@keyframes mdl-spinner__right-spin{from{-webkit-transform:rotate(-130deg);transform:rotate(-130deg)}50%{-webkit-transform:rotate(5deg);transform:rotate(5deg)}to{-webkit-transform:rotate(-130deg);transform:rotate(-130deg)}}.mdl-switch{position:relative;z-index:1;vertical-align:middle;display:inline-block;box-sizing:border-box;width:100%;height:24px;margin:0;padding:0;overflow:visible;-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.mdl-switch.is-upgraded{padding-left:28px}.mdl-switch__input{line-height:24px}.mdl-switch.is-upgraded .mdl-switch__input{position:absolute;width:0;height:0;margin:0;padding:0;opacity:0;-ms-appearance:none;-moz-appearance:none;-webkit-appearance:none;appearance:none;border:none}.mdl-switch__track{background:rgba(0,0,0,.26);position:absolute;left:0;top:5px;height:14px;width:36px;border-radius:14px;cursor:pointer}.mdl-switch.is-checked .mdl-switch__track{background:rgba(103,58,183,.5)}.mdl-switch__track fieldset[disabled] .mdl-switch,.mdl-switch.is-disabled .mdl-switch__track{background:rgba(0,0,0,.12);cursor:auto}.mdl-switch__thumb{background:#fafafa;position:absolute;left:0;top:2px;height:20px;width:20px;border-radius:50%;cursor:pointer;box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 3px 1px -2px rgba(0,0,0,.2),0 1px 5px 0 rgba(0,0,0,.12);transition-duration:.28s;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-property:left}.mdl-switch.is-checked .mdl-switch__thumb{background:rgb(103,58,183);left:16px;box-shadow:0 3px 4px 0 rgba(0,0,0,.14),0 3px 3px -2px rgba(0,0,0,.2),0 1px 8px 0 rgba(0,0,0,.12)}.mdl-switch__thumb fieldset[disabled] .mdl-switch,.mdl-switch.is-disabled .mdl-switch__thumb{background:#bdbdbd;cursor:auto}.mdl-switch__focus-helper{position:absolute;top:50%;left:50%;-webkit-transform:translate(-4px,-4px);transform:translate(-4px,-4px);display:inline-block;box-sizing:border-box;width:8px;height:8px;border-radius:50%;background-color:transparent}.mdl-switch.is-focused .mdl-switch__focus-helper{box-shadow:0 0 0 20px rgba(0,0,0,.1);background-color:rgba(0,0,0,.1)}.mdl-switch.is-focused.is-checked .mdl-switch__focus-helper{box-shadow:0 0 0 20px rgba(103,58,183,.26);background-color:rgba(103,58,183,.26)}.mdl-switch__label{position:relative;cursor:pointer;font-size:16px;line-height:24px;margin:0;left:24px}.mdl-switch__label fieldset[disabled] .mdl-switch,.mdl-switch.is-disabled .mdl-switch__label{color:#bdbdbd;cursor:auto}.mdl-switch__ripple-container{position:absolute;z-index:2;top:-12px;left:-14px;box-sizing:border-box;width:48px;height:48px;border-radius:50%;cursor:pointer;overflow:hidden;-webkit-mask-image:-webkit-radial-gradient(circle,#fff,#000);transition-duration:.4s;transition-timing-function:step-end;transition-property:left}.mdl-switch__ripple-container .mdl-ripple{background:rgb(103,58,183)}.mdl-switch__ripple-container fieldset[disabled] .mdl-switch,.mdl-switch.is-disabled .mdl-switch__ripple-container{cursor:auto}fieldset[disabled] .mdl-switch .mdl-switch__ripple-container .mdl-ripple,.mdl-switch.is-disabled .mdl-switch__ripple-container .mdl-ripple{background:0 0}.mdl-switch.is-checked .mdl-switch__ripple-container{left:2px}.mdl-tabs{display:block;width:100%}.mdl-tabs__tab-bar{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;-webkit-align-content:space-between;-ms-flex-line-pack:justify;align-content:space-between;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start;height:48px;padding:0;margin:0;border-bottom:1px solid #e0e0e0}.mdl-tabs__tab{margin:0;border:none;padding:0 24px;float:left;position:relative;display:block;text-decoration:none;height:48px;line-height:48px;text-align:center;font-weight:500;font-size:14px;text-transform:uppercase;color:rgba(0,0,0,.54);overflow:hidden}.mdl-tabs.is-upgraded .mdl-tabs__tab.is-active{color:rgba(0,0,0,.87)}.mdl-tabs.is-upgraded .mdl-tabs__tab.is-active:after{height:2px;width:100%;display:block;content:" ";bottom:0;left:0;position:absolute;background:rgb(103,58,183);-webkit-animation:border-expand .2s cubic-bezier(.4,0,.4,1).01s alternate forwards;animation:border-expand .2s cubic-bezier(.4,0,.4,1).01s alternate forwards;transition:all 1s cubic-bezier(.4,0,1,1)}.mdl-tabs__tab .mdl-tabs__ripple-container{display:block;position:absolute;height:100%;width:100%;left:0;top:0;z-index:1;overflow:hidden}.mdl-tabs__tab .mdl-tabs__ripple-container .mdl-ripple{background:rgb(103,58,183)}.mdl-tabs__panel{display:block}.mdl-tabs.is-upgraded .mdl-tabs__panel{display:none}.mdl-tabs.is-upgraded .mdl-tabs__panel.is-active{display:block}@-webkit-keyframes border-expand{0%{opacity:0;width:0}100%{opacity:1;width:100%}}@keyframes border-expand{0%{opacity:0;width:0}100%{opacity:1;width:100%}}.mdl-textfield{position:relative;font-size:16px;display:inline-block;box-sizing:border-box;width:300px;max-width:100%;margin:0;padding:20px 0}.mdl-textfield .mdl-button{position:absolute;bottom:20px}.mdl-textfield--align-right{text-align:right}.mdl-textfield--full-width{width:100%}.mdl-textfield--expandable{min-width:32px;width:auto;min-height:32px}.mdl-textfield--expandable .mdl-button--icon{top:16px}.mdl-textfield__input{border:none;border-bottom:1px solid rgba(0,0,0,.12);display:block;font-size:16px;font-family:"Helvetica","Arial",sans-serif;margin:0;padding:4px 0;width:100%;background:0 0;text-align:left;color:inherit}.mdl-textfield__input[type="number"]{-moz-appearance:textfield}.mdl-textfield__input[type="number"]::-webkit-inner-spin-button,.mdl-textfield__input[type="number"]::-webkit-outer-spin-button{-webkit-appearance:none;margin:0}.mdl-textfield.is-focused .mdl-textfield__input{outline:none}.mdl-textfield.is-invalid .mdl-textfield__input{border-color:#d50000;box-shadow:none}fieldset[disabled] .mdl-textfield .mdl-textfield__input,.mdl-textfield.is-disabled .mdl-textfield__input{background-color:transparent;border-bottom:1px dotted rgba(0,0,0,.12);color:rgba(0,0,0,.26)}.mdl-textfield textarea.mdl-textfield__input{display:block}.mdl-textfield__label{bottom:0;color:rgba(0,0,0,.26);font-size:16px;left:0;right:0;pointer-events:none;position:absolute;display:block;top:24px;width:100%;overflow:hidden;white-space:nowrap;text-align:left}.mdl-textfield.is-dirty .mdl-textfield__label,.mdl-textfield.has-placeholder .mdl-textfield__label{visibility:hidden}.mdl-textfield--floating-label .mdl-textfield__label{transition-duration:.2s;transition-timing-function:cubic-bezier(.4,0,.2,1)}.mdl-textfield--floating-label.has-placeholder .mdl-textfield__label{transition:none}fieldset[disabled] .mdl-textfield .mdl-textfield__label,.mdl-textfield.is-disabled.is-disabled .mdl-textfield__label{color:rgba(0,0,0,.26)}.mdl-textfield--floating-label.is-focused .mdl-textfield__label,.mdl-textfield--floating-label.is-dirty .mdl-textfield__label,.mdl-textfield--floating-label.has-placeholder .mdl-textfield__label{color:rgb(103,58,183);font-size:12px;top:4px;visibility:visible}.mdl-textfield--floating-label.is-focused .mdl-textfield__expandable-holder .mdl-textfield__label,.mdl-textfield--floating-label.is-dirty .mdl-textfield__expandable-holder .mdl-textfield__label,.mdl-textfield--floating-label.has-placeholder .mdl-textfield__expandable-holder .mdl-textfield__label{top:-16px}.mdl-textfield--floating-label.is-invalid .mdl-textfield__label{color:#d50000;font-size:12px}.mdl-textfield__label:after{background-color:rgb(103,58,183);bottom:20px;content:'';height:2px;left:45%;position:absolute;transition-duration:.2s;transition-timing-function:cubic-bezier(.4,0,.2,1);visibility:hidden;width:10px}.mdl-textfield.is-focused .mdl-textfield__label:after{left:0;visibility:visible;width:100%}.mdl-textfield.is-invalid .mdl-textfield__label:after{background-color:#d50000}.mdl-textfield__error{color:#d50000;position:absolute;font-size:12px;margin-top:3px;visibility:hidden;display:block}.mdl-textfield.is-invalid .mdl-textfield__error{visibility:visible}.mdl-textfield__expandable-holder{display:inline-block;position:relative;margin-left:32px;transition-duration:.2s;transition-timing-function:cubic-bezier(.4,0,.2,1);display:inline-block;max-width:.1px}.mdl-textfield.is-focused .mdl-textfield__expandable-holder,.mdl-textfield.is-dirty .mdl-textfield__expandable-holder{max-width:600px}.mdl-textfield__expandable-holder .mdl-textfield__label:after{bottom:0}.mdl-tooltip{-webkit-transform:scale(0);transform:scale(0);-webkit-transform-origin:top center;transform-origin:top center;z-index:999;background:rgba(97,97,97,.9);border-radius:2px;color:#fff;display:inline-block;font-size:10px;font-weight:500;line-height:14px;max-width:170px;position:fixed;top:-500px;left:-500px;padding:8px;text-align:center}.mdl-tooltip.is-active{-webkit-animation:pulse 200ms cubic-bezier(0,0,.2,1)forwards;animation:pulse 200ms cubic-bezier(0,0,.2,1)forwards}.mdl-tooltip--large{line-height:14px;font-size:14px;padding:16px}@-webkit-keyframes pulse{0%{-webkit-transform:scale(0);transform:scale(0);opacity:0}50%{-webkit-transform:scale(.99);transform:scale(.99)}100%{-webkit-transform:scale(1);transform:scale(1);opacity:1;visibility:visible}}@keyframes pulse{0%{-webkit-transform:scale(0);transform:scale(0);opacity:0}50%{-webkit-transform:scale(.99);transform:scale(.99)}100%{-webkit-transform:scale(1);transform:scale(1);opacity:1;visibility:visible}}.mdl-shadow--2dp{box-shadow:0 2px 2px 0 rgba(0,0,0,.14),0 3px 1px -2px rgba(0,0,0,.2),0 1px 5px 0 rgba(0,0,0,.12)}.mdl-shadow--3dp{box-shadow:0 3px 4px 0 rgba(0,0,0,.14),0 3px 3px -2px rgba(0,0,0,.2),0 1px 8px 0 rgba(0,0,0,.12)}.mdl-shadow--4dp{box-shadow:0 4px 5px 0 rgba(0,0,0,.14),0 1px 10px 0 rgba(0,0,0,.12),0 2px 4px -1px rgba(0,0,0,.2)}.mdl-shadow--6dp{box-shadow:0 6px 10px 0 rgba(0,0,0,.14),0 1px 18px 0 rgba(0,0,0,.12),0 3px 5px -1px rgba(0,0,0,.2)}.mdl-shadow--8dp{box-shadow:0 8px 10px 1px rgba(0,0,0,.14),0 3px 14px 2px rgba(0,0,0,.12),0 5px 5px -3px rgba(0,0,0,.2)}.mdl-shadow--16dp{box-shadow:0 16px 24px 2px rgba(0,0,0,.14),0 6px 30px 5px rgba(0,0,0,.12),0 8px 10px -5px rgba(0,0,0,.2)}.mdl-shadow--24dp{box-shadow:0 9px 46px 8px rgba(0,0,0,.14),0 11px 15px -7px rgba(0,0,0,.12),0 24px 38px 3px rgba(0,0,0,.2)}.mdl-grid{display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-flow:row wrap;-ms-flex-flow:row wrap;flex-flow:row wrap;margin:0 auto;-webkit-align-items:stretch;-ms-flex-align:stretch;align-items:stretch}.mdl-grid.mdl-grid--no-spacing{padding:0}.mdl-cell{box-sizing:border-box}.mdl-cell--top{-webkit-align-self:flex-start;-ms-flex-item-align:start;align-self:flex-start}.mdl-cell--middle{-webkit-align-self:center;-ms-flex-item-align:center;-ms-grid-row-align:center;align-self:center}.mdl-cell--bottom{-webkit-align-self:flex-end;-ms-flex-item-align:end;align-self:flex-end}.mdl-cell--stretch{-webkit-align-self:stretch;-ms-flex-item-align:stretch;-ms-grid-row-align:stretch;align-self:stretch}.mdl-grid.mdl-grid--no-spacing>.mdl-cell{margin:0}.mdl-cell--order-1{-webkit-order:1;-ms-flex-order:1;order:1}.mdl-cell--order-2{-webkit-order:2;-ms-flex-order:2;order:2}.mdl-cell--order-3{-webkit-order:3;-ms-flex-order:3;order:3}.mdl-cell--order-4{-webkit-order:4;-ms-flex-order:4;order:4}.mdl-cell--order-5{-webkit-order:5;-ms-flex-order:5;order:5}.mdl-cell--order-6{-webkit-order:6;-ms-flex-order:6;order:6}.mdl-cell--order-7{-webkit-order:7;-ms-flex-order:7;order:7}.mdl-cell--order-8{-webkit-order:8;-ms-flex-order:8;order:8}.mdl-cell--order-9{-webkit-order:9;-ms-flex-order:9;order:9}.mdl-cell--order-10{-webkit-order:10;-ms-flex-order:10;order:10}.mdl-cell--order-11{-webkit-order:11;-ms-flex-order:11;order:11}.mdl-cell--order-12{-webkit-order:12;-ms-flex-order:12;order:12}@media (max-width:479px){.mdl-grid{padding:8px}.mdl-cell{margin:8px;width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell{width:100%}.mdl-cell--hide-phone{display:none!important}.mdl-cell--order-1-phone.mdl-cell--order-1-phone{-webkit-order:1;-ms-flex-order:1;order:1}.mdl-cell--order-2-phone.mdl-cell--order-2-phone{-webkit-order:2;-ms-flex-order:2;order:2}.mdl-cell--order-3-phone.mdl-cell--order-3-phone{-webkit-order:3;-ms-flex-order:3;order:3}.mdl-cell--order-4-phone.mdl-cell--order-4-phone{-webkit-order:4;-ms-flex-order:4;order:4}.mdl-cell--order-5-phone.mdl-cell--order-5-phone{-webkit-order:5;-ms-flex-order:5;order:5}.mdl-cell--order-6-phone.mdl-cell--order-6-phone{-webkit-order:6;-ms-flex-order:6;order:6}.mdl-cell--order-7-phone.mdl-cell--order-7-phone{-webkit-order:7;-ms-flex-order:7;order:7}.mdl-cell--order-8-phone.mdl-cell--order-8-phone{-webkit-order:8;-ms-flex-order:8;order:8}.mdl-cell--order-9-phone.mdl-cell--order-9-phone{-webkit-order:9;-ms-flex-order:9;order:9}.mdl-cell--order-10-phone.mdl-cell--order-10-phone{-webkit-order:10;-ms-flex-order:10;order:10}.mdl-cell--order-11-phone.mdl-cell--order-11-phone{-webkit-order:11;-ms-flex-order:11;order:11}.mdl-cell--order-12-phone.mdl-cell--order-12-phone{-webkit-order:12;-ms-flex-order:12;order:12}.mdl-cell--1-col,.mdl-cell--1-col-phone.mdl-cell--1-col-phone{width:calc(25% - 16px)}.mdl-grid--no-spacing>.mdl-cell--1-col,.mdl-grid--no-spacing>.mdl-cell--1-col-phone.mdl-cell--1-col-phone{width:25%}.mdl-cell--2-col,.mdl-cell--2-col-phone.mdl-cell--2-col-phone{width:calc(50% - 16px)}.mdl-grid--no-spacing>.mdl-cell--2-col,.mdl-grid--no-spacing>.mdl-cell--2-col-phone.mdl-cell--2-col-phone{width:50%}.mdl-cell--3-col,.mdl-cell--3-col-phone.mdl-cell--3-col-phone{width:calc(75% - 16px)}.mdl-grid--no-spacing>.mdl-cell--3-col,.mdl-grid--no-spacing>.mdl-cell--3-col-phone.mdl-cell--3-col-phone{width:75%}.mdl-cell--4-col,.mdl-cell--4-col-phone.mdl-cell--4-col-phone{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--4-col,.mdl-grid--no-spacing>.mdl-cell--4-col-phone.mdl-cell--4-col-phone{width:100%}.mdl-cell--5-col,.mdl-cell--5-col-phone.mdl-cell--5-col-phone{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--5-col,.mdl-grid--no-spacing>.mdl-cell--5-col-phone.mdl-cell--5-col-phone{width:100%}.mdl-cell--6-col,.mdl-cell--6-col-phone.mdl-cell--6-col-phone{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--6-col,.mdl-grid--no-spacing>.mdl-cell--6-col-phone.mdl-cell--6-col-phone{width:100%}.mdl-cell--7-col,.mdl-cell--7-col-phone.mdl-cell--7-col-phone{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--7-col,.mdl-grid--no-spacing>.mdl-cell--7-col-phone.mdl-cell--7-col-phone{width:100%}.mdl-cell--8-col,.mdl-cell--8-col-phone.mdl-cell--8-col-phone{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--8-col,.mdl-grid--no-spacing>.mdl-cell--8-col-phone.mdl-cell--8-col-phone{width:100%}.mdl-cell--9-col,.mdl-cell--9-col-phone.mdl-cell--9-col-phone{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--9-col,.mdl-grid--no-spacing>.mdl-cell--9-col-phone.mdl-cell--9-col-phone{width:100%}.mdl-cell--10-col,.mdl-cell--10-col-phone.mdl-cell--10-col-phone{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--10-col,.mdl-grid--no-spacing>.mdl-cell--10-col-phone.mdl-cell--10-col-phone{width:100%}.mdl-cell--11-col,.mdl-cell--11-col-phone.mdl-cell--11-col-phone{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--11-col,.mdl-grid--no-spacing>.mdl-cell--11-col-phone.mdl-cell--11-col-phone{width:100%}.mdl-cell--12-col,.mdl-cell--12-col-phone.mdl-cell--12-col-phone{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--12-col,.mdl-grid--no-spacing>.mdl-cell--12-col-phone.mdl-cell--12-col-phone{width:100%}.mdl-cell--1-offset,.mdl-cell--1-offset-phone.mdl-cell--1-offset-phone{margin-left:calc(25% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--1-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--1-offset-phone.mdl-cell--1-offset-phone{margin-left:25%}.mdl-cell--2-offset,.mdl-cell--2-offset-phone.mdl-cell--2-offset-phone{margin-left:calc(50% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--2-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--2-offset-phone.mdl-cell--2-offset-phone{margin-left:50%}.mdl-cell--3-offset,.mdl-cell--3-offset-phone.mdl-cell--3-offset-phone{margin-left:calc(75% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--3-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--3-offset-phone.mdl-cell--3-offset-phone{margin-left:75%}}@media (min-width:480px) and (max-width:839px){.mdl-grid{padding:8px}.mdl-cell{margin:8px;width:calc(50% - 16px)}.mdl-grid--no-spacing>.mdl-cell{width:50%}.mdl-cell--hide-tablet{display:none!important}.mdl-cell--order-1-tablet.mdl-cell--order-1-tablet{-webkit-order:1;-ms-flex-order:1;order:1}.mdl-cell--order-2-tablet.mdl-cell--order-2-tablet{-webkit-order:2;-ms-flex-order:2;order:2}.mdl-cell--order-3-tablet.mdl-cell--order-3-tablet{-webkit-order:3;-ms-flex-order:3;order:3}.mdl-cell--order-4-tablet.mdl-cell--order-4-tablet{-webkit-order:4;-ms-flex-order:4;order:4}.mdl-cell--order-5-tablet.mdl-cell--order-5-tablet{-webkit-order:5;-ms-flex-order:5;order:5}.mdl-cell--order-6-tablet.mdl-cell--order-6-tablet{-webkit-order:6;-ms-flex-order:6;order:6}.mdl-cell--order-7-tablet.mdl-cell--order-7-tablet{-webkit-order:7;-ms-flex-order:7;order:7}.mdl-cell--order-8-tablet.mdl-cell--order-8-tablet{-webkit-order:8;-ms-flex-order:8;order:8}.mdl-cell--order-9-tablet.mdl-cell--order-9-tablet{-webkit-order:9;-ms-flex-order:9;order:9}.mdl-cell--order-10-tablet.mdl-cell--order-10-tablet{-webkit-order:10;-ms-flex-order:10;order:10}.mdl-cell--order-11-tablet.mdl-cell--order-11-tablet{-webkit-order:11;-ms-flex-order:11;order:11}.mdl-cell--order-12-tablet.mdl-cell--order-12-tablet{-webkit-order:12;-ms-flex-order:12;order:12}.mdl-cell--1-col,.mdl-cell--1-col-tablet.mdl-cell--1-col-tablet{width:calc(12.5% - 16px)}.mdl-grid--no-spacing>.mdl-cell--1-col,.mdl-grid--no-spacing>.mdl-cell--1-col-tablet.mdl-cell--1-col-tablet{width:12.5%}.mdl-cell--2-col,.mdl-cell--2-col-tablet.mdl-cell--2-col-tablet{width:calc(25% - 16px)}.mdl-grid--no-spacing>.mdl-cell--2-col,.mdl-grid--no-spacing>.mdl-cell--2-col-tablet.mdl-cell--2-col-tablet{width:25%}.mdl-cell--3-col,.mdl-cell--3-col-tablet.mdl-cell--3-col-tablet{width:calc(37.5% - 16px)}.mdl-grid--no-spacing>.mdl-cell--3-col,.mdl-grid--no-spacing>.mdl-cell--3-col-tablet.mdl-cell--3-col-tablet{width:37.5%}.mdl-cell--4-col,.mdl-cell--4-col-tablet.mdl-cell--4-col-tablet{width:calc(50% - 16px)}.mdl-grid--no-spacing>.mdl-cell--4-col,.mdl-grid--no-spacing>.mdl-cell--4-col-tablet.mdl-cell--4-col-tablet{width:50%}.mdl-cell--5-col,.mdl-cell--5-col-tablet.mdl-cell--5-col-tablet{width:calc(62.5% - 16px)}.mdl-grid--no-spacing>.mdl-cell--5-col,.mdl-grid--no-spacing>.mdl-cell--5-col-tablet.mdl-cell--5-col-tablet{width:62.5%}.mdl-cell--6-col,.mdl-cell--6-col-tablet.mdl-cell--6-col-tablet{width:calc(75% - 16px)}.mdl-grid--no-spacing>.mdl-cell--6-col,.mdl-grid--no-spacing>.mdl-cell--6-col-tablet.mdl-cell--6-col-tablet{width:75%}.mdl-cell--7-col,.mdl-cell--7-col-tablet.mdl-cell--7-col-tablet{width:calc(87.5% - 16px)}.mdl-grid--no-spacing>.mdl-cell--7-col,.mdl-grid--no-spacing>.mdl-cell--7-col-tablet.mdl-cell--7-col-tablet{width:87.5%}.mdl-cell--8-col,.mdl-cell--8-col-tablet.mdl-cell--8-col-tablet{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--8-col,.mdl-grid--no-spacing>.mdl-cell--8-col-tablet.mdl-cell--8-col-tablet{width:100%}.mdl-cell--9-col,.mdl-cell--9-col-tablet.mdl-cell--9-col-tablet{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--9-col,.mdl-grid--no-spacing>.mdl-cell--9-col-tablet.mdl-cell--9-col-tablet{width:100%}.mdl-cell--10-col,.mdl-cell--10-col-tablet.mdl-cell--10-col-tablet{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--10-col,.mdl-grid--no-spacing>.mdl-cell--10-col-tablet.mdl-cell--10-col-tablet{width:100%}.mdl-cell--11-col,.mdl-cell--11-col-tablet.mdl-cell--11-col-tablet{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--11-col,.mdl-grid--no-spacing>.mdl-cell--11-col-tablet.mdl-cell--11-col-tablet{width:100%}.mdl-cell--12-col,.mdl-cell--12-col-tablet.mdl-cell--12-col-tablet{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--12-col,.mdl-grid--no-spacing>.mdl-cell--12-col-tablet.mdl-cell--12-col-tablet{width:100%}.mdl-cell--1-offset,.mdl-cell--1-offset-tablet.mdl-cell--1-offset-tablet{margin-left:calc(12.5% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--1-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--1-offset-tablet.mdl-cell--1-offset-tablet{margin-left:12.5%}.mdl-cell--2-offset,.mdl-cell--2-offset-tablet.mdl-cell--2-offset-tablet{margin-left:calc(25% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--2-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--2-offset-tablet.mdl-cell--2-offset-tablet{margin-left:25%}.mdl-cell--3-offset,.mdl-cell--3-offset-tablet.mdl-cell--3-offset-tablet{margin-left:calc(37.5% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--3-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--3-offset-tablet.mdl-cell--3-offset-tablet{margin-left:37.5%}.mdl-cell--4-offset,.mdl-cell--4-offset-tablet.mdl-cell--4-offset-tablet{margin-left:calc(50% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--4-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--4-offset-tablet.mdl-cell--4-offset-tablet{margin-left:50%}.mdl-cell--5-offset,.mdl-cell--5-offset-tablet.mdl-cell--5-offset-tablet{margin-left:calc(62.5% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--5-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--5-offset-tablet.mdl-cell--5-offset-tablet{margin-left:62.5%}.mdl-cell--6-offset,.mdl-cell--6-offset-tablet.mdl-cell--6-offset-tablet{margin-left:calc(75% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--6-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--6-offset-tablet.mdl-cell--6-offset-tablet{margin-left:75%}.mdl-cell--7-offset,.mdl-cell--7-offset-tablet.mdl-cell--7-offset-tablet{margin-left:calc(87.5% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--7-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--7-offset-tablet.mdl-cell--7-offset-tablet{margin-left:87.5%}}@media (min-width:840px){.mdl-grid{padding:8px}.mdl-cell{margin:8px;width:calc(33.3333333333% - 16px)}.mdl-grid--no-spacing>.mdl-cell{width:33.3333333333%}.mdl-cell--hide-desktop{display:none!important}.mdl-cell--order-1-desktop.mdl-cell--order-1-desktop{-webkit-order:1;-ms-flex-order:1;order:1}.mdl-cell--order-2-desktop.mdl-cell--order-2-desktop{-webkit-order:2;-ms-flex-order:2;order:2}.mdl-cell--order-3-desktop.mdl-cell--order-3-desktop{-webkit-order:3;-ms-flex-order:3;order:3}.mdl-cell--order-4-desktop.mdl-cell--order-4-desktop{-webkit-order:4;-ms-flex-order:4;order:4}.mdl-cell--order-5-desktop.mdl-cell--order-5-desktop{-webkit-order:5;-ms-flex-order:5;order:5}.mdl-cell--order-6-desktop.mdl-cell--order-6-desktop{-webkit-order:6;-ms-flex-order:6;order:6}.mdl-cell--order-7-desktop.mdl-cell--order-7-desktop{-webkit-order:7;-ms-flex-order:7;order:7}.mdl-cell--order-8-desktop.mdl-cell--order-8-desktop{-webkit-order:8;-ms-flex-order:8;order:8}.mdl-cell--order-9-desktop.mdl-cell--order-9-desktop{-webkit-order:9;-ms-flex-order:9;order:9}.mdl-cell--order-10-desktop.mdl-cell--order-10-desktop{-webkit-order:10;-ms-flex-order:10;order:10}.mdl-cell--order-11-desktop.mdl-cell--order-11-desktop{-webkit-order:11;-ms-flex-order:11;order:11}.mdl-cell--order-12-desktop.mdl-cell--order-12-desktop{-webkit-order:12;-ms-flex-order:12;order:12}.mdl-cell--1-col,.mdl-cell--1-col-desktop.mdl-cell--1-col-desktop{width:calc(8.3333333333% - 16px)}.mdl-grid--no-spacing>.mdl-cell--1-col,.mdl-grid--no-spacing>.mdl-cell--1-col-desktop.mdl-cell--1-col-desktop{width:8.3333333333%}.mdl-cell--2-col,.mdl-cell--2-col-desktop.mdl-cell--2-col-desktop{width:calc(16.6666666667% - 16px)}.mdl-grid--no-spacing>.mdl-cell--2-col,.mdl-grid--no-spacing>.mdl-cell--2-col-desktop.mdl-cell--2-col-desktop{width:16.6666666667%}.mdl-cell--3-col,.mdl-cell--3-col-desktop.mdl-cell--3-col-desktop{width:calc(25% - 16px)}.mdl-grid--no-spacing>.mdl-cell--3-col,.mdl-grid--no-spacing>.mdl-cell--3-col-desktop.mdl-cell--3-col-desktop{width:25%}.mdl-cell--4-col,.mdl-cell--4-col-desktop.mdl-cell--4-col-desktop{width:calc(33.3333333333% - 16px)}.mdl-grid--no-spacing>.mdl-cell--4-col,.mdl-grid--no-spacing>.mdl-cell--4-col-desktop.mdl-cell--4-col-desktop{width:33.3333333333%}.mdl-cell--5-col,.mdl-cell--5-col-desktop.mdl-cell--5-col-desktop{width:calc(41.6666666667% - 16px)}.mdl-grid--no-spacing>.mdl-cell--5-col,.mdl-grid--no-spacing>.mdl-cell--5-col-desktop.mdl-cell--5-col-desktop{width:41.6666666667%}.mdl-cell--6-col,.mdl-cell--6-col-desktop.mdl-cell--6-col-desktop{width:calc(50% - 16px)}.mdl-grid--no-spacing>.mdl-cell--6-col,.mdl-grid--no-spacing>.mdl-cell--6-col-desktop.mdl-cell--6-col-desktop{width:50%}.mdl-cell--7-col,.mdl-cell--7-col-desktop.mdl-cell--7-col-desktop{width:calc(58.3333333333% - 16px)}.mdl-grid--no-spacing>.mdl-cell--7-col,.mdl-grid--no-spacing>.mdl-cell--7-col-desktop.mdl-cell--7-col-desktop{width:58.3333333333%}.mdl-cell--8-col,.mdl-cell--8-col-desktop.mdl-cell--8-col-desktop{width:calc(66.6666666667% - 16px)}.mdl-grid--no-spacing>.mdl-cell--8-col,.mdl-grid--no-spacing>.mdl-cell--8-col-desktop.mdl-cell--8-col-desktop{width:66.6666666667%}.mdl-cell--9-col,.mdl-cell--9-col-desktop.mdl-cell--9-col-desktop{width:calc(75% - 16px)}.mdl-grid--no-spacing>.mdl-cell--9-col,.mdl-grid--no-spacing>.mdl-cell--9-col-desktop.mdl-cell--9-col-desktop{width:75%}.mdl-cell--10-col,.mdl-cell--10-col-desktop.mdl-cell--10-col-desktop{width:calc(83.3333333333% - 16px)}.mdl-grid--no-spacing>.mdl-cell--10-col,.mdl-grid--no-spacing>.mdl-cell--10-col-desktop.mdl-cell--10-col-desktop{width:83.3333333333%}.mdl-cell--11-col,.mdl-cell--11-col-desktop.mdl-cell--11-col-desktop{width:calc(91.6666666667% - 16px)}.mdl-grid--no-spacing>.mdl-cell--11-col,.mdl-grid--no-spacing>.mdl-cell--11-col-desktop.mdl-cell--11-col-desktop{width:91.6666666667%}.mdl-cell--12-col,.mdl-cell--12-col-desktop.mdl-cell--12-col-desktop{width:calc(100% - 16px)}.mdl-grid--no-spacing>.mdl-cell--12-col,.mdl-grid--no-spacing>.mdl-cell--12-col-desktop.mdl-cell--12-col-desktop{width:100%}.mdl-cell--1-offset,.mdl-cell--1-offset-desktop.mdl-cell--1-offset-desktop{margin-left:calc(8.3333333333% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--1-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--1-offset-desktop.mdl-cell--1-offset-desktop{margin-left:8.3333333333%}.mdl-cell--2-offset,.mdl-cell--2-offset-desktop.mdl-cell--2-offset-desktop{margin-left:calc(16.6666666667% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--2-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--2-offset-desktop.mdl-cell--2-offset-desktop{margin-left:16.6666666667%}.mdl-cell--3-offset,.mdl-cell--3-offset-desktop.mdl-cell--3-offset-desktop{margin-left:calc(25% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--3-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--3-offset-desktop.mdl-cell--3-offset-desktop{margin-left:25%}.mdl-cell--4-offset,.mdl-cell--4-offset-desktop.mdl-cell--4-offset-desktop{margin-left:calc(33.3333333333% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--4-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--4-offset-desktop.mdl-cell--4-offset-desktop{margin-left:33.3333333333%}.mdl-cell--5-offset,.mdl-cell--5-offset-desktop.mdl-cell--5-offset-desktop{margin-left:calc(41.6666666667% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--5-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--5-offset-desktop.mdl-cell--5-offset-desktop{margin-left:41.6666666667%}.mdl-cell--6-offset,.mdl-cell--6-offset-desktop.mdl-cell--6-offset-desktop{margin-left:calc(50% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--6-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--6-offset-desktop.mdl-cell--6-offset-desktop{margin-left:50%}.mdl-cell--7-offset,.mdl-cell--7-offset-desktop.mdl-cell--7-offset-desktop{margin-left:calc(58.3333333333% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--7-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--7-offset-desktop.mdl-cell--7-offset-desktop{margin-left:58.3333333333%}.mdl-cell--8-offset,.mdl-cell--8-offset-desktop.mdl-cell--8-offset-desktop{margin-left:calc(66.6666666667% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--8-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--8-offset-desktop.mdl-cell--8-offset-desktop{margin-left:66.6666666667%}.mdl-cell--9-offset,.mdl-cell--9-offset-desktop.mdl-cell--9-offset-desktop{margin-left:calc(75% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--9-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--9-offset-desktop.mdl-cell--9-offset-desktop{margin-left:75%}.mdl-cell--10-offset,.mdl-cell--10-offset-desktop.mdl-cell--10-offset-desktop{margin-left:calc(83.3333333333% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--10-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--10-offset-desktop.mdl-cell--10-offset-desktop{margin-left:83.3333333333%}.mdl-cell--11-offset,.mdl-cell--11-offset-desktop.mdl-cell--11-offset-desktop{margin-left:calc(91.6666666667% + 8px)}.mdl-grid.mdl-grid--no-spacing>.mdl-cell--11-offset,.mdl-grid.mdl-grid--no-spacing>.mdl-cell--11-offset-desktop.mdl-cell--11-offset-desktop{margin-left:91.6666666667%}}body{margin:0}.styleguide-demo h1{margin:48px 24px 0}.styleguide-demo h1:after{content:'';display:block;width:100%;border-bottom:1px solid rgba(0,0,0,.5);margin-top:24px}.styleguide-demo{opacity:0;transition:opacity .6s ease}.styleguide-masthead{height:256px;background:#212121;padding:115px 16px 0}.styleguide-container{position:relative;max-width:960px;width:100%}.styleguide-title{color:#fff;bottom:auto;position:relative;font-size:56px;font-weight:300;line-height:1;letter-spacing:-.02em}.styleguide-title:after{border-bottom:0}.styleguide-title span{font-weight:300}.mdl-styleguide .mdl-layout__drawer .mdl-navigation__link{padding:10px 24px}.demosLoaded .styleguide-demo{opacity:1}iframe{display:block;width:100%;border:none}iframe.heightSet{overflow:hidden}.demo-wrapper{margin:24px}.demo-wrapper iframe{border:1px solid rgba(0,0,0,.5)} \ No newline at end of file diff --git a/samples/contrib/nvidia-resnet/components/webapp/src/templates/index.html b/samples/contrib/nvidia-resnet/components/webapp/src/templates/index.html deleted file mode 100644 index 0b58106fe8d..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp/src/templates/index.html +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - Kubeflow UI - - - - - - - -
            -
            -
            -
            -

            End2end Resnet50 Using NVIDIA TensorRT Inference Server, TF-AMP and TensorRT

            -
            -
            -
            - -
            -
            -
            -

            TRTIS Model Server

            -
            -
            - - -
            -
            - - -
            -
            - - - Input is not a valid port -
            -
            - - -
            - -
            - {% if connection.success %} -
            ✓ {{ connection.text }}
            - {% else %} -
            â— {{ connection.text }}
            - {% endif %} -
            -
            -
            - - {% if output %} -
            -
            -
            -

            Test Results

            - -

            - - - - - - - - - - - {% for score in output.scores %} - - - - - {% endfor %} - -
            Truth{{ output.truth }}
            Prediction {{ output.prediction }}
            Probability {{ score.index }}: -
            - -
            -

            - -
            -
            -
            - {% endif %} -
            -
            - - diff --git a/samples/contrib/nvidia-resnet/components/webapp/src/trtis_client.py b/samples/contrib/nvidia-resnet/components/webapp/src/trtis_client.py deleted file mode 100644 index 9ce2cec8e9f..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp/src/trtis_client.py +++ /dev/null @@ -1,315 +0,0 @@ -#!/usr/bin/env python2.7 -''' -Copyright 2018 The Kubeflow Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -''' - -from __future__ import print_function - -import numpy as np -import os -import random -from builtins import range -from functools import partial -import grpc - -from tensorrtserver.api import api_pb2 -from tensorrtserver.api import grpc_service_pb2 -from tensorrtserver.api import grpc_service_pb2_grpc -import tensorrtserver.api.model_config_pb2 as model_config - -from PIL import Image - - -def model_dtype_to_np(model_dtype): - if model_dtype == model_config.TYPE_BOOL: - return np.bool - elif model_dtype == model_config.TYPE_INT8: - return np.int8 - elif model_dtype == model_config.TYPE_INT16: - return np.int16 - elif model_dtype == model_config.TYPE_INT32: - return np.int32 - elif model_dtype == model_config.TYPE_INT64: - return np.int64 - elif model_dtype == model_config.TYPE_UINT8: - return np.uint8 - elif model_dtype == model_config.TYPE_UINT16: - return np.uint16 - elif model_dtype == model_config.TYPE_FP16: - return np.float16 - elif model_dtype == model_config.TYPE_FP32: - return np.float32 - elif model_dtype == model_config.TYPE_FP64: - return np.float64 - elif model_dtype == model_config.TYPE_STRING: - return np.dtype(object) - return None - - -def parse_model(status, model_name, batch_size, verbose=False): - """ - Check the configuration of a model to make sure it meets the - requirements for an image classification network (as expected by - this client) - """ - server_status = status.server_status - if model_name not in server_status.model_status.keys(): - raise Exception("unable to get status for '" + model_name + "'") - - status = server_status.model_status[model_name] - config = status.config - - if len(config.input) != 1: - raise Exception("expecting 1 input, got {}".format(len(config.input))) - if len(config.output) != 1: - raise Exception("expecting 1 output, got {}".format(len(config.output))) - - input = config.input[0] - output = config.output[0] - - if output.data_type != model_config.TYPE_FP32: - raise Exception("expecting output datatype to be TYPE_FP32, model '" + - model_name + "' output type is " + - model_config.DataType.Name(output.data_type)) - - # Output is expected to be a vector. But allow any number of - # dimensions as long as all but 1 is size 1 (e.g. { 10 }, { 1, 10 - # }, { 10, 1, 1 } are all ok). - non_one_cnt = 0 - for dim in output.dims: - if dim > 1: - non_one_cnt += 1 - if non_one_cnt > 1: - raise Exception("expecting model output to be a vector") - - # Model specifying maximum batch size of 0 indicates that batching - # is not supported and so the input tensors do not expect an "N" - # dimension (and 'batch_size' should be 1 so that only a single - # image instance is inferred at a time). - max_batch_size = config.max_batch_size - if max_batch_size == 0: - if batch_size != 1: - raise Exception("batching not supported for model '" + model_name + "'") - else: # max_batch_size > 0 - if batch_size > max_batch_size: - raise Exception( - "expecting batch size <= {} for model '{}'".format(max_batch_size, model_name)) - - # Model input must have 3 dims, either CHW or HWC - if len(input.dims) != 3: - raise Exception( - "expecting input to have 3 dimensions, model '{}' input has {}".format( - model_name, len(input.dims))) - - if ((input.format != model_config.ModelInput.FORMAT_NCHW) and - (input.format != model_config.ModelInput.FORMAT_NHWC)): - raise Exception("unexpected input format " + model_config.ModelInput.Format.Name(input.format) + - ", expecting " + - model_config.ModelInput.Format.Name(model_config.ModelInput.FORMAT_NCHW) + - " or " + - model_config.ModelInput.Format.Name(model_config.ModelInput.FORMAT_NHWC)) - - if input.format == model_config.ModelInput.FORMAT_NHWC: - h = input.dims[0] - w = input.dims[1] - c = input.dims[2] - else: - c = input.dims[0] - h = input.dims[1] - w = input.dims[2] - - return (input.name, output.name, c, h, w, input.format, model_dtype_to_np(input.data_type)) - - -def preprocess(img, format, dtype, c, h, w): - """ - Pre-process an image to meet the size, type and format - requirements specified by the parameters. - """ - # np.set_printoptions(threshold='nan') - - if c == 1: - sample_img = img.convert('L') - else: - sample_img = img.convert('RGB') - - resized_img = sample_img.resize((w, h), Image.BILINEAR) - resized = np.array(resized_img) - if resized.ndim == 2: - resized = resized[:, :, np.newaxis] - - typed = resized.astype(dtype) - - scaled = (typed / 255) - 0.5 - - # Channels are in RGB order. Currently model configuration data - # doesn't provide any information as to other channel orderings - # (like BGR) so we just assume RGB. - return scaled - - -def postprocess(results, filenames, batch_size): - """ - Post-process results to show classifications. - """ - if len(results) != 1: - raise Exception("expected 1 result, got {}".format(len(results))) - - batched_result = results[0].batch_classes - if len(batched_result) != batch_size: - raise Exception("expected {} results, got {}".format(batch_size, len(batched_result))) - if len(filenames) != batch_size: - raise Exception("expected {} filenames, got {}".format(batch_size, len(filenames))) - - label, score = [], [] - # batch size is always 1 here, need to modify if were to larger batch_size - for (index, result) in enumerate(batched_result): - print("Image '{}':".format(filenames[index])) - for cls in result.cls: - label.append(cls.label) - score += [{"index": cls.label, "val": cls.value}] - print(" {} ({}) = {}".format(cls.idx, cls.label, cls.value)) - return label[0], score - - -def requestGenerator(input_name, output_name, c, h, w, format, dtype, model_name, model_version, image_filename, - result_filenames): - # Prepare request for Infer gRPC - # The meta data part can be reused across requests - request = grpc_service_pb2.InferRequest() - request.model_name = model_name - if model_version is None: - request.model_version = -1 - else: - request.model_version = model_version - # optional pass in a batch size for generate requester over a set of image files, need to refactor - batch_size = 1 - request.meta_data.batch_size = batch_size - output_message = api_pb2.InferRequestHeader.Output() - output_message.name = output_name - # Number of class results to report. Default is 10 to match with demo. - output_message.cls.count = 10 - request.meta_data.output.extend([output_message]) - - filenames = [] - if os.path.isdir(image_filename): - filenames = [os.path.join(image_filename, f) - for f in os.listdir(image_filename) - if os.path.isfile(os.path.join(image_filename, f))] - else: - filenames = [image_filename, ] - - filenames.sort() - - # Preprocess the images into input data according to model - # requirements - image_data = [] - for filename in filenames: - img = Image.open(filename) - image_data.append(preprocess(img, format, dtype, c, h, w)) - - request.meta_data.input.add(name=input_name) - - # Send requests of batch_size images. If the number of - # images isn't an exact multiple of batch_size then just - # start over with the first images until the batch is filled. - image_idx = 0 - last_request = False - while not last_request: - input_bytes = None - input_filenames = [] - del request.raw_input[:] - for idx in range(batch_size): - input_filenames.append(filenames[image_idx]) - if input_bytes is None: - input_bytes = image_data[image_idx].tobytes() - else: - input_bytes += image_data[image_idx].tobytes() - - image_idx = (image_idx + 1) % len(image_data) - if image_idx == 0: - last_request = True - - request.raw_input.extend([input_bytes]) - result_filenames.append(input_filenames) - yield request - - -def get_prediction(image_filename, server_host='localhost', server_port=8001, - model_name="end2end-demo", model_version=None): - """ - Retrieve a prediction from a TensorFlow model server - - :param image: a end2end-demo image - :param server_host: the address of the TensorRT inference server - :param server_port: the port used by the server - :param model_name: the name of the model - :param timeout: the amount of time to wait for a prediction to complete - :return 0: the integer predicted in the end2end-demo image - :return 1: the confidence scores for all classes - """ - channel = grpc.insecure_channel(server_host + ':' + str(server_port)) - grpc_stub = grpc_service_pb2_grpc.GRPCServiceStub(channel) - - # Prepare request for Status gRPC - request = grpc_service_pb2.StatusRequest(model_name=model_name) - # Call and receive response from Status gRPC - response = grpc_stub.Status(request) - # Make sure the model matches our requirements, and get some - # properties of the model that we need for preprocessing - batch_size = 1 - verbose = False - input_name, output_name, c, h, w, format, dtype = parse_model( - response, model_name, batch_size, verbose) - - filledRequestGenerator = partial(requestGenerator, input_name, output_name, c, h, w, format, dtype, model_name, - model_version, image_filename) - - # Send requests of batch_size images. If the number of - # images isn't an exact multiple of batch_size then just - # start over with the first images until the batch is filled. - result_filenames = [] - requests = [] - responses = [] - - # Send request - for request in filledRequestGenerator(result_filenames): - responses.append(grpc_stub.Infer(request)) - - # For async, retrieve results according to the send order - for request in requests: - responses.append(request.result()) - - idx = 0 - for response in responses: - print("Request {}, batch size {}".format(idx, batch_size)) - label, score = postprocess(response.meta_data.output, result_filenames[idx], batch_size) - idx += 1 - - return label, score - - -def random_image(img_path='/workspace/web_server/static/images'): - """ - Pull a random image out of the small end2end-demo dataset - - :param savePath: the path to save the file to. If None, file is not saved - :return 0: file selected - :return 1: label selelcted - """ - random_dir = random.choice(os.listdir(img_path)) - random_file = random.choice(os.listdir(img_path + '/' + random_dir)) - - return img_path + '/' + random_dir + '/' + random_file, random_dir, 'static/images' + '/' + random_dir + '/' + random_file diff --git a/samples/contrib/nvidia-resnet/components/webapp_launcher/Dockerfile b/samples/contrib/nvidia-resnet/components/webapp_launcher/Dockerfile deleted file mode 100644 index d5d88baa00e..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp_launcher/Dockerfile +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2018 The Kubeflow Authors. All Rights Reserved. -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM ubuntu:16.04 - -RUN apt-get update -y && \ - apt-get install --no-install-recommends -y -q ca-certificates curl python-dev python-setuptools wget unzip -RUN easy_install pip && \ - pip install pyyaml six requests - -# Install kubectl -RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl -RUN chmod +x ./kubectl -RUN mv ./kubectl /usr/local/bin - -ADD src /workspace -WORKDIR /workspace - -ENTRYPOINT ["python", "deploy_webapp.py"] - diff --git a/samples/contrib/nvidia-resnet/components/webapp_launcher/build.sh b/samples/contrib/nvidia-resnet/components/webapp_launcher/build.sh deleted file mode 100755 index f5afd4682df..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp_launcher/build.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -IMAGE= - -docker build -t $IMAGE . -docker push $IMAGE diff --git a/samples/contrib/nvidia-resnet/components/webapp_launcher/src/deploy_webapp.py b/samples/contrib/nvidia-resnet/components/webapp_launcher/src/deploy_webapp.py deleted file mode 100644 index d76ef357b6f..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp_launcher/src/deploy_webapp.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2018 The Kubeflow Authors. All Rights Reserved. -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import os -import logging -import subprocess -import requests - - -KUBEFLOW_NAMESPACE = 'kubeflow' -YAML_TEMPLATE = 'webapp-service-template.yaml' -YAML_FILE = 'webapp-service.yaml' - - -def main(): - parser = argparse.ArgumentParser(description='Webapp launcher') - parser.add_argument('--trtserver_name', help='Name of trtis service') - parser.add_argument('--workflow_name', help='Workflow name') - parser.add_argument('--model_name', help='Name of default model') - parser.add_argument('--model_version', help='Model version') - parser.add_argument('--webapp_prefix', - help='Webapp prefix as subpath of Kubeflow UI') - parser.add_argument( - '--webapp_port', help='Webapp port inside the Kubernetes cluster') - - args = parser.parse_args() - - print("using model name: %s and namespace: %s" % - (args.model_name, KUBEFLOW_NAMESPACE)) - - logging.getLogger().setLevel(logging.INFO) - logging.info('Generating webapp service template') - - template_file = os.path.join(os.path.dirname( - os.path.realpath(__file__)), YAML_TEMPLATE) - target_file = os.path.join(os.path.dirname( - os.path.realpath(__file__)), YAML_FILE) - - with open(template_file, 'r') as template: - with open(target_file, "w") as target: - data = template.read() - changed = data.replace('MODEL_PASSIN_NAME', args.model_name) - changed1 = changed.replace( - 'KUBEFLOW_NAMESPACE', KUBEFLOW_NAMESPACE) - changed2 = changed1.replace( - 'MODEL_PASSIN_VERSION', args.model_version) - changed3 = changed2.replace('TRTSERVER_NAME', args.trtserver_name) - changed4 = changed3.replace('WORKFLOW_NAME', args.workflow_name) - changed5 = changed4.replace('WEBAPP_PREFIX', args.webapp_prefix) - changed6 = changed5.replace('WEBAPP_PORT', args.webapp_port) - target.write(changed6) - - subprocess.call(['kubectl', 'apply', '-f', YAML_FILE]) - logging.info('Deploying webapp service') - - -if __name__ == "__main__": - main() diff --git a/samples/contrib/nvidia-resnet/components/webapp_launcher/src/webapp-service-template.yaml b/samples/contrib/nvidia-resnet/components/webapp_launcher/src/webapp-service-template.yaml deleted file mode 100644 index 714ce8ed94f..00000000000 --- a/samples/contrib/nvidia-resnet/components/webapp_launcher/src/webapp-service-template.yaml +++ /dev/null @@ -1,56 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - annotations: - getambassador.io/config: |- - --- - apiVersion: ambassador/v0 - kind: Mapping - name: webapp-WORKFLOW_NAME - prefix: /WEBAPP_PREFIX/ - rewrite: / - timeout_ms: 1200000 - service: webappsvc.KUBEFLOW_NAMESPACE:WEBAPP_PORT - name: webappsvc - labels: - app: demo-client-ui - role: frontend -spec: - type: ClusterIP - ports: - - port: WEBAPP_PORT - targetPort: "http-server" - selector: - app: demo-client-ui - role: frontend - ---- - -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: webapp -spec: - replicas: 1 - template: - metadata: - labels: - app: demo-client-ui - role: frontend - spec: - containers: - - name: webapp - image: - imagePullPolicy: Always - env: - - name: TRTSERVER_HOST - value: TRTSERVER_NAME.KUBEFLOW_NAMESPACE - - name: MODEL_SERVE_NAME - value: MODEL_PASSIN_NAME - - name: MODEL_VERSION - value: "MODEL_PASSIN_VERSION" - - name: TRTSERVER_PORT - value: "8001" - ports: - - name: http-server - containerPort: 8080 diff --git a/samples/contrib/nvidia-resnet/install_kubeflow_and_dependencies.sh b/samples/contrib/nvidia-resnet/install_kubeflow_and_dependencies.sh deleted file mode 100755 index cffe64c7780..00000000000 --- a/samples/contrib/nvidia-resnet/install_kubeflow_and_dependencies.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/bash -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Install nvidia-docker 2 -sudo tee /etc/docker/daemon.json </dev/null || kubectl apply -f $PV -kubectl replace -f $PVC 2>/dev/null || kubectl apply -f $PVC - -docker build -t $IMAGE . -docker run --rm -v $(pwd)/src:/workspace $IMAGE diff --git a/samples/contrib/nvidia-resnet/pipeline/src/persistent-volume-claim.yaml b/samples/contrib/nvidia-resnet/pipeline/src/persistent-volume-claim.yaml deleted file mode 100644 index 697c2088a91..00000000000 --- a/samples/contrib/nvidia-resnet/pipeline/src/persistent-volume-claim.yaml +++ /dev/null @@ -1,12 +0,0 @@ -kind: PersistentVolumeClaim -apiVersion: v1 -metadata: - name: nvidia-workspace-read-claim - namespace: kubeflow -spec: - storageClassName: manual - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 20Gi diff --git a/samples/contrib/nvidia-resnet/pipeline/src/persistent-volume.yaml b/samples/contrib/nvidia-resnet/pipeline/src/persistent-volume.yaml deleted file mode 100644 index 65464bbd7c2..00000000000 --- a/samples/contrib/nvidia-resnet/pipeline/src/persistent-volume.yaml +++ /dev/null @@ -1,15 +0,0 @@ -kind: PersistentVolume -apiVersion: v1 -metadata: - name: nvidia-workspace - namespace: kubeflow - labels: - type: local -spec: - storageClassName: manual - capacity: - storage: 100Gi - accessModes: - - ReadWriteOnce - hostPath: - path: "/mnt/workspace" diff --git a/samples/contrib/nvidia-resnet/pipeline/src/pipeline.py b/samples/contrib/nvidia-resnet/pipeline/src/pipeline.py deleted file mode 100644 index 9154968d43a..00000000000 --- a/samples/contrib/nvidia-resnet/pipeline/src/pipeline.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import kfp.dsl as dsl -import datetime -import os -from kubernetes import client as k8s_client - - -# Modify image='' in each op to match IMAGE in the build.sh of its corresponding component - -def PreprocessOp(name, input_dir, output_dir): - return dsl.ContainerOp( - name=name, - image='', - arguments=[ - '--input_dir', input_dir, - '--output_dir', output_dir, - ], - file_outputs={'output': '/output.txt'} - ) - - -def TrainOp(name, input_dir, output_dir, model_name, model_version, epochs): - return dsl.ContainerOp( - name=name, - image='', - arguments=[ - '--input_dir', input_dir, - '--output_dir', output_dir, - '--model_name', model_name, - '--model_version', model_version, - '--epochs', epochs - ], - file_outputs={'output': '/output.txt'} - ) - - -def InferenceServerLauncherOp(name, input_dir, trtserver_name): - return dsl.ContainerOp( - name=name, - image='', - arguments=[ - '--trtserver_name', trtserver_name, - '--model_path', input_dir, - ], - file_outputs={'output': '/output.txt'} - ) - - -def WebappLauncherOp(name, trtserver_name, model_name, model_version, webapp_prefix, webapp_port): - return dsl.ContainerOp( - name=name, - image='', - arguments=[ - '--workflow_name', '{{workflow.name}}', - '--trtserver_name', trtserver_name, - '--model_name', model_name, - '--model_version', str(model_version), - '--webapp_prefix', webapp_prefix, - '--webapp_port', str(webapp_port) - ], - file_outputs={} - ) - - -@dsl.pipeline( - name='resnet_cifar10_pipeline', - description='Demonstrate an end-to-end training & serving pipeline using ResNet and CIFAR-10' -) -def resnet_pipeline( - raw_data_dir='/mnt/workspace/raw_data', - processed_data_dir='/mnt/workspace/processed_data', - model_dir='/mnt/workspace/saved_model', - epochs=50, - trtserver_name='trtis', - model_name='resnet_graphdef', - model_version=1, - webapp_prefix='webapp', - webapp_port=80 -): - - persistent_volume_name = 'nvidia-workspace' - persistent_volume_path = '/mnt/workspace' - - op_dict = {} - - op_dict['preprocess'] = PreprocessOp( - 'preprocess', raw_data_dir, processed_data_dir) - - op_dict['train'] = TrainOp( - 'train', op_dict['preprocess'].output, model_dir, model_name, model_version, epochs) - - op_dict['deploy_inference_server'] = InferenceServerLauncherOp( - 'deploy_inference_server', op_dict['train'].output, trtserver_name) - - op_dict['deploy_webapp'] = WebappLauncherOp( - 'deploy_webapp', op_dict['deploy_inference_server'].output, model_name, model_version, webapp_prefix, webapp_port) - - for _, container_op in op_dict.items(): - container_op.add_volume(k8s_client.V1Volume( - host_path=k8s_client.V1HostPathVolumeSource( - path=persistent_volume_path), - name=persistent_volume_name)) - container_op.add_volume_mount(k8s_client.V1VolumeMount( - mount_path=persistent_volume_path, - name=persistent_volume_name)) - - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(resnet_pipeline, __file__ + '.tar.gz') diff --git a/samples/contrib/nvidia-resnet/prepare_test_inference_data.sh b/samples/contrib/nvidia-resnet/prepare_test_inference_data.sh deleted file mode 100755 index be0e798e652..00000000000 --- a/samples/contrib/nvidia-resnet/prepare_test_inference_data.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -IMAGES_URL=https://storage.googleapis.com/nvidia-kubeflow-demo/test_images.tar.gz - -wget --no-verbose -O images.tar.gz $IMAGES_URL -tar -zxvf images.tar.gz -C components/webapp/src/static -rm images.tar.gz diff --git a/samples/contrib/openvino/deployer/README.md b/samples/contrib/openvino/deployer/README.md deleted file mode 100644 index f1c814db606..00000000000 --- a/samples/contrib/openvino/deployer/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# OpenVINO Model Server deployment pipeline - -This is an example of a pipeline implementation based on [OpenVINO Model Server deployer](../../../components/openvino/ovms-deployer) component. - -It includes 2 tasks: -- deployment of OpenVINO Model Server in Kubernetes -- evaluation of the model server instance and its served model - -![Deployer pipeline](deployer.png) - - -## Parameters - -- model_export_path - shared storage location( google storage, persistent storage or NFS) pointing to the folder with numerical subfolders storing OpenVINO models. -For example path 'gs://intelai_public_models/resnet_50_i8' includes subfolder '1' which stores model files .bin and .xml in Intermediate Representation format. -Refer to [OVMS documentation](https://github.com/IntelAI/OpenVINO-model-server/blob/master/docs/docker_container.md#preparing-the-models) -- server_name - Kubernetes service name to be deployed. The model name in the grpc endpoint has the same name. -- log_level - DEBUG/INFO/ERROR - sets the logging level in OVMS pods -- batch_size - which batch size should be loaded in the served model: 'auto' or numerical values -- model_version_policy - parameter defining which version should be served in OVMS. Examples: '{"latest": { "num_versions":2 }}') / -{"specific": { "versions":[1, 3] }} / {"all": {}} -- replicas - number of pods which should be deployed to serve the model. More replicas is increasing the scalability and number of client requests which can be handled in parallel. -- evaluation-images-list - path or URL to the file with a list of evaluation images. The file should include in every line a path or URL to an image and the classification label number separated by space. -- image-path-prefix - prefix which should be added to every image in the list above. To be used when the image path is relative. -- model-input-name - name of the gRPC input key including data for inference processing. It matches input tensor. -- model-output-name - name of the output key in gRPC response -- model-input-size - the resolution of the images processed in the model. Evaluation script is rescaling the images to match the model input size. - -## Exemplary parameter values - -Included default values ensure smooth execution without any extra requirements. They can be changed to execute using custom -models and datasets. - -- The deployed serving component is [OpenVINO Model Server](https://github.com/IntelAI/OpenVINO-model-server) which is using a public docker image. -- the default OpenVINO Model in IR format is representing ResNet v1.50 network topology. -It is based on pre-trained model from Caffe framework, optimized with OpenVINO model optimizer and quantized to INT8 precision. -You can learn more about model optimization for production deployments on [intel.ai blog post](https://www.intel.ai/introducing-int8-quantization-for-fast-cpu-inference-using-openvino/#gs.2kxdii) -- included [samples of images](https://raw.githubusercontent.com/IntelAI/OpenVINO-model-server/master/example_client/input_images.txt) - can be used to run simple test and can be easily swapped with other datasets like ImageNet. - diff --git a/samples/contrib/openvino/deployer/component.yaml b/samples/contrib/openvino/deployer/component.yaml deleted file mode 100644 index 34c4cfc7b26..00000000000 --- a/samples/contrib/openvino/deployer/component.yaml +++ /dev/null @@ -1,55 +0,0 @@ -name: OpenVINO model server deployer -description: Deploys OpenVINO Model Server instance to Kubernetes and runs model evaluation -inputs: -- name: Batch size - default: auto -- name: Log level - default: DEBUG -- name: Model export path - default: 'gs://intelai_public_models/resnet_50_i8' -- name: Model version policy - default: '{"latest": { "num_versions":2 }}' -- name: Replicas - default: '1' -- name: Server name - default: resnet -- name: Evaluation images list - default: 'https://raw.githubusercontent.com/IntelAI/OpenVINO-model-server/master/example_client/input_images.txt' -- name: Image path prefix - default: 'https://github.com/IntelAI/OpenVINO-model-server/raw/master/example_client/' -- name: Model input name - default: 'data' -- name: Model output name - default: 'prob' -- name: Model input size - default: 224 -outputs: -- name: Server endpoint -metrics: - - name: latency - - name: accuracy -implementation: - container: - image: gcr.io/constant-cubist-173123/inference_server/ml_deployer:13 - command: [./deploy.sh] - args: [ - --model-export-path, {inputValue: Model export path}, - --server-name, {inputValue: Server name}, - --log-level, {inputValue: Log level}, - --batch-size, {inputValue: Batch size}, - --model-version-policy, {inputValue: Model version policy}, - --replicas, {inputValue: Replicas}, - --server-endpoint-output-file, {outputPath: Server endpoint}, - ] - container: - image: gcr.io/constant-cubist-173123/inference_server/ml_deployer:13 - command: [./evaluate.py] - args: [ - --images_list, {inputValue: Evaluation images list}, - --image_path_prefix, {inputValue: Image path prefix}, - --grpc_endpoint, {outputValue: Server endpoint}, - --input_name, {inputValue: Model input name}, - --output_name, {inputValue: Model output name}, - --size, {inputValue: Model input size}, - --replicas, {inputValue: Replicas}, - ] diff --git a/samples/contrib/openvino/deployer/deployer.png b/samples/contrib/openvino/deployer/deployer.png deleted file mode 100644 index 461c30cf97b..00000000000 Binary files a/samples/contrib/openvino/deployer/deployer.png and /dev/null differ diff --git a/samples/contrib/openvino/deployer/deployer.py b/samples/contrib/openvino/deployer/deployer.py deleted file mode 100644 index 9235f4aae74..00000000000 --- a/samples/contrib/openvino/deployer/deployer.py +++ /dev/null @@ -1,52 +0,0 @@ -import kfp.dsl as dsl - - -@dsl.pipeline( - name='OVMS deployment pipeline', - description='Deploy OpenVINO Model Server instance in Kubernetes' -) -def openvino_predict( - model_export_path='gs://intelai_public_models/resnet_50_i8', - server_name='resnet', - log_level='DEBUG', - batch_size='auto', - model_version_policy='{"latest": { "num_versions":2 }}', - replicas=1, - images_list='https://raw.githubusercontent.com/IntelAI/OpenVINO-model-server/master/example_client/input_images.txt', - image_path_prefix='https://github.com/IntelAI/OpenVINO-model-server/raw/master/example_client/', - model_input_name='data', - model_output_name='prob', - model_input_size=224 - ): - - - """A one-step pipeline.""" - deploy = dsl.ContainerOp( - name='Deploy OpenVINO Model Server', - image='gcr.io/constant-cubist-173123/inference_server/ml_deployer:13', - command=['./deploy.sh'], - arguments=[ - '--model-export-path', model_export_path, - '--server-name', server_name, - '--log-level', log_level, - '--batch-size', batch_size, - '--model-version-policy', model_version_policy, - '--replicas', replicas], - file_outputs={'output':'/tmp/server_endpoint'}) - - dsl.ContainerOp( - name='Evaluate OpenVINO Model Server', - image='gcr.io/constant-cubist-173123/inference_server/ml_deployer:13', - command=['./evaluate.py'], - arguments=[ - '--images_list', images_list, - '--grpc_endpoint', deploy.output, - '--input_name', model_input_name, - '--output_name', model_output_name, - '--size', model_input_size, - '--image_path_prefix', image_path_prefix], - file_outputs={}) - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(openvino_predict, __file__ + '.tar.gz') \ No newline at end of file diff --git a/samples/contrib/openvino/model_optimizer/README.md b/samples/contrib/openvino/model_optimizer/README.md deleted file mode 100644 index e454b8111d1..00000000000 --- a/samples/contrib/openvino/model_optimizer/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# OpenVINO model optimizer pipeline - -This is an example of a one step pipeline implementation of model optimization using OpenVINO toolkit - -It performs graph optimization and generates Intermediate Representation model format which can be used -later by the Inference Engine. - -Learn more about [OpenVINO model optimizer](https://software.intel.com/en-us/articles/OpenVINO-ModelOptimizer) - -*Note:* Executing this pipeline required building the docker image according to the guidelines on -[OpenVINO model converted doc](../../../contrib/components/openvino/model_convert). -The image name pushed to the docker registry should be configured in the pipeline script `convert_model_pipeline.py` - -## Examples of the parameters - -input_path - gs://tensorflow_model_path/resnet/1/saved_model.pb - -mo_options - --saved_model_dir . - -output_path - gs://tensorflow_model_path/resnet/1 - - -All parameters for model optimizer options are described in the component - [doc](../../../components/model_convert/README.md) - -The model conversion component is copying the content of the input path to the current directory in the container. -It can include a single file or the complete folder. In the model optimizer options you should reference -the the file using relative path from the input path folder. This way you could pass also any configuration file -needed by the model optimizer. diff --git a/samples/contrib/openvino/model_optimizer/convert_model_pipeline.py b/samples/contrib/openvino/model_optimizer/convert_model_pipeline.py deleted file mode 100644 index 02ce5d57986..00000000000 --- a/samples/contrib/openvino/model_optimizer/convert_model_pipeline.py +++ /dev/null @@ -1,28 +0,0 @@ - -import kfp.dsl as dsl - - -@dsl.pipeline( - name='Model-Optimization', - description='Convert model using OpenVINO model optimizer' -) -def download_optimize_and_upload( - input_path: dsl.PipelineParam, - output_path: dsl.PipelineParam, - mo_options: dsl.PipelineParam): - """A one-step pipeline.""" - - dsl.ContainerOp( - name='mo', - image='gcr.io/constant-cubist-173123/inference_server/ml_mo:12', - command=['convert_model.py'], - arguments=[ - '--input_path', input_path, - '--output_path', output_path, - '--mo_options', mo_options], - file_outputs={'output': '/tmp/output_path.txt'}) - - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(download_optimize_and_upload, __file__ + '.tar.gz') diff --git a/samples/contrib/openvino/predict/README.md b/samples/contrib/openvino/predict/README.md deleted file mode 100644 index 15914302a94..00000000000 --- a/samples/contrib/openvino/predict/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# OpenVINO predict pipeline - -This is an example of simple one step pipeline implementation including OpenVINO predict component. - -It can execute predict operation for a dataset in numpy format and provided model in Intermediate Representation format. - -This format of models can be generated based on trained model from various frameworks like TensorFlow, Caffe, MXNET and Kaldi. - -Dataset in the numpy file needs to match the shape of the provided model input. - -This pipeline execute the predict operation and sends the results for each model output a numpy file with the name -representing the output tensor name. - -*Note:* Executing this pipeline required building the docker image according to the guidelines on -[OpenVINO predict component doc](../../../contrib/components/openvino/predict). -The image name pushed to the docker registry should be configured in the pipeline script `numpy_predict` - -## Examples of the parameters - -model_bin - gs:///model.bin - -model_xml - gs:///model.xml - -input_numpy_file - gs:///datasets/imgs.npy - -output_folder - gs:///outputs - diff --git a/samples/contrib/openvino/predict/numpy_predict.py b/samples/contrib/openvino/predict/numpy_predict.py deleted file mode 100644 index 217b0eff8fe..00000000000 --- a/samples/contrib/openvino/predict/numpy_predict.py +++ /dev/null @@ -1,37 +0,0 @@ -import kfp.dsl as dsl - - -@dsl.pipeline( - name='Prediction pipeline', - description='Execute prediction operation for the dataset from numpy file and test accuracy and latency' -) -def openvino_predict( - model_bin='gs://intelai_public_models/resnet_50_i8/1/resnet_50_i8.bin', - model_xml='gs://intelai_public_models/resnet_50_i8/1/resnet_50_i8.xml', - generated_model_dir='gs://your-bucket/folder', - input_numpy_file='gs://intelai_public_models/images/imgs.npy', - label_numpy_file='gs://intelai_public_models/images/lbs.npy', - batch_size=1, - scale_div=1, - scale_sub=0 - ): - - """A one-step pipeline.""" - dsl.ContainerOp( - name='openvino-predict', - image='gcr.io/constant-cubist-173123/inference_server/ml_predict:5', - command=['python3', 'predict.py'], - arguments=[ - '--model_bin', model_bin, - '--model_xml', model_xml, - '--input_numpy_file', input_numpy_file, - '--label_numpy_file', label_numpy_file, - '--batch_size', batch_size, - '--scale_div', scale_div, - '--scale_sub', scale_sub, - '--output_folder', generated_model_dir], - file_outputs={}) - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(openvino_predict, __file__ + '.tar.gz') diff --git a/samples/contrib/openvino/tf-slim/README.md b/samples/contrib/openvino/tf-slim/README.md deleted file mode 100644 index 8f0a215f8d6..00000000000 --- a/samples/contrib/openvino/tf-slim/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# OpenVINO workflow demo with TensorFlow slim models optimization and evaluation - - -This pipeline integrates three components: - -- [TensorFlow slim models generator](../../../components/openvino/tf-slim) - it creates slim models which are considered as exemplary to -demonstrate OpenVINO model optimization and execution -- [OpenVINO model optimizer](../../../components/openvino/model_convert) - it converts the model -from TensorFlow or other frameworks into Intermediate Representation format. -It is one time operation. The result can be used in the inference execution using Inference Engine. -- [Inference Engine prediction](../../../components/openvino/predict) - it is running the inference evaluation based on -input and label data in numpy files - -The pipeline can be used to assess the results of model optimization in terms of inference execution time and -classification accuracy. - -During the pipeline execution there are generated several artifacts. - -In the path set by `tf-export-dir` parameter: -- Graph definition in protobuffer format -- Frozen graph created based on imported checkpoint - -In the path set by `generated-model-dir` parameter: -- TensorFlow model in a frozen saved_model format. It can be consumed by TensorFlow based inference scripts or TensorFlow -serving. This format can't be used for additional training because all variables are changed to constants. -- Intermediate Representation of the optimized model created based on the exported TF saved model. It can be used to run -inference operation -using OpenVINO Inference Engine or to be imported by OpenVINO Model Server -- TensorFlow even file which can be used to visualize the graph in tensorboard - -`generated-model-dir` value should be pointed to a shared location like google storage, persistent storage or NFS -so it could be passed between components. - -![demo pipeline1](demo_pipeline1.png) -![demo pipeline2](demo_pipeline2.png) -![demo pipeline3](demo_pipeline3.png) -## Parameters - -- model_name = slim model name which match the allowed topologies from -[slim repo](https://github.com/tensorflow/models/blob/master/research/slim/nets/nets_factory.py) -- num_classes = how many classes is defined in the model. For slim models it could be `1000` or `1001` depending on the model -- checkpoint_url = URL to the compressed checkpoint for the [pre-trained models](https://github.com/tensorflow/models/tree/master/research/slim#pre-trained-models) --batch_size - batch size for the generated IR model -- export_dir - location for storing auxiliary model formats -- generated_model_dir - location for storing pipeline results - google storage path. If set to a numerical subfolder - it could be used by TensorFlow Serving or OpenVINO Model Server -- mo_options - a list of options to be passed to model optimizer, it assumes the source model in saved_model folder - will be downloaed to the 'current directory' so use `--saved_model_dir .` to point to the model generated in first step. - Refer to [mo.py --help](../../../components/openvino/model_convert) for details. -- input_numpy_file - gs path to the numpy file including evaluation dataset in the `NCHW` format. -- label_numpy_file - gs path to the numpy file with labels for evaluation images. They will be used to access model accuracy. - -## Examples of the parameters - -```bash -model-name - resnet_v1_50
            -model-name - 1000 -checkpoint-url - http://download.tensorflow.org/models/resnet_v1_50_2016_08_28.tar.gz -batch-size - 1 -tf-export-dir = /tmp/export -generated-model-dir = gs://your-bucket/folder -mo_options - saved_model_dir . -input_numpy_file - gs://intelai_public_models/images/imgs.npy -label-numpy-file - gs://intelai_public_models/images/lbs.npy -``` - - diff --git a/samples/contrib/openvino/tf-slim/demo_pipeline1.png b/samples/contrib/openvino/tf-slim/demo_pipeline1.png deleted file mode 100644 index 1d53c13fc58..00000000000 Binary files a/samples/contrib/openvino/tf-slim/demo_pipeline1.png and /dev/null differ diff --git a/samples/contrib/openvino/tf-slim/demo_pipeline2.png b/samples/contrib/openvino/tf-slim/demo_pipeline2.png deleted file mode 100644 index 658cbfc818b..00000000000 Binary files a/samples/contrib/openvino/tf-slim/demo_pipeline2.png and /dev/null differ diff --git a/samples/contrib/openvino/tf-slim/demo_pipeline3.png b/samples/contrib/openvino/tf-slim/demo_pipeline3.png deleted file mode 100644 index 5973d6c3031..00000000000 Binary files a/samples/contrib/openvino/tf-slim/demo_pipeline3.png and /dev/null differ diff --git a/samples/contrib/openvino/tf-slim/tf-slim.py b/samples/contrib/openvino/tf-slim/tf-slim.py deleted file mode 100644 index 8981fce88b8..00000000000 --- a/samples/contrib/openvino/tf-slim/tf-slim.py +++ /dev/null @@ -1,56 +0,0 @@ -import kfp.dsl as dsl - -@dsl.pipeline( - name='Prediction pipeline', - description='Generate slim models and optimize them with OpenVINO' -) -def tf_slim_optimize( - model_name='resnet_v1_50', - num_classes=1000, - checkpoint_url='http://download.tensorflow.org/models/resnet_v1_50_2016_08_28.tar.gz', - batch_size=1, - export_dir='/tmp/export', - generated_model_dir='gs://your-bucket/folder', - mo_options='--saved_model_dir .', - input_numpy_file='gs://intelai_public_models/images/imgs.npy', - label_numpy_file='gs://intelai_public_models/images/lbs.npy' - ): - - slim = dsl.ContainerOp( - name='Create_model', - image='gcr.io/constant-cubist-173123/inference_server/ml_slim:6', - command=['python', 'slim_model.py'], - arguments=[ - '--model_name', model_name, - '--batch_size', batch_size, - '--checkpoint_url', checkpoint_url, - '--num_classes', num_classes, - '--saved_model_dir', generated_model_dir, - '--export_dir', export_dir], - file_outputs={'generated-model-dir': '/tmp/saved_model_dir.txt'}) - - mo = dsl.ContainerOp( - name='Optimize_model', - image='gcr.io/constant-cubist-173123/inference_server/ml_mo:12', - command=['convert_model.py'], - arguments=[ - '--input_path', '%s/saved_model.pb' % slim.output, - '--mo_options', mo_options, - '--output_path', slim.output], - file_outputs={'bin': '/tmp/bin_path.txt', 'xml': '/tmp/xml_path.txt'}) - - dsl.ContainerOp( - name='openvino-predict', - image='gcr.io/constant-cubist-173123/inference_server/ml_predict:6', - command=['python3', 'predict.py'], - arguments=[ - '--model_bin', mo.outputs['bin'], - '--model_xml', mo.outputs['xml'], - '--input_numpy_file', input_numpy_file, - '--label_numpy_file', label_numpy_file, - '--output_folder', generated_model_dir], - file_outputs={}) - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(tf_slim_optimize, __file__ + '.tar.gz') diff --git a/samples/contrib/pytorch-samples/Dockerfile b/samples/contrib/pytorch-samples/Dockerfile deleted file mode 100644 index e28a61c1998..00000000000 --- a/samples/contrib/pytorch-samples/Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -ARG BASE_IMAGE=pytorch/pytorch:latest - -FROM ${BASE_IMAGE} - -COPY . . - -RUN pip install -U pip - -RUN pip install -U --no-cache-dir -r requirements.txt - -RUN pip install pytorch-kfp-components - -ENV PYTHONPATH /workspace - -ENTRYPOINT /bin/bash diff --git a/samples/contrib/pytorch-samples/Pipeline-Bert-Dist.ipynb b/samples/contrib/pytorch-samples/Pipeline-Bert-Dist.ipynb deleted file mode 100644 index 7b2e331fb01..00000000000 --- a/samples/contrib/pytorch-samples/Pipeline-Bert-Dist.ipynb +++ /dev/null @@ -1,980 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [], - "source": [ - "# Copyright (c) Facebook, Inc. and its affiliates.\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Bert Pipeline : PyTorch BERT News Classfication\n", - "\n", - "This notebook shows PyTorch BERT end-to-end news classification example using Kubeflow Pipelines.\n", - "\n", - "\n", - "An example notebook that demonstrates how to:\n", - "\n", - "* Get different tasks needed for the pipeline\n", - "* Create a Kubeflow pipeline\n", - "* Include Pytorch KFP components to preprocess, train, visualize and deploy the model in the pipeline\n", - "* Submit a job for execution\n", - "* Query(prediction and explain) the final deployed model\n", - "* Interpretation of the model using the Captum Insights\n" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "! pip uninstall -y kfp\n", - "! pip install --no-cache-dir kfp torch captum" - ] - }, - { - "cell_type": "code", - "execution_count": 72, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'1.6.4'" - ] - }, - "execution_count": 63, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import kfp\n", - "import json\n", - "import os\n", - "from kfp.onprem import use_k8s_secret\n", - "from kfp import components\n", - "from kfp.components import load_component_from_file, load_component_from_url, InputPath\n", - "from kfp import dsl\n", - "from kfp import compiler\n", - "\n", - "kfp.__version__" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Enter your gateway and the cookie\n", - "[Use this extension on chrome to get token]( https://chrome.google.com/webstore/detail/editthiscookie/fngmhnnpilhplaeedifhccceomclgfbg?hl=en)\n", - "\n", - "![image.png](./image.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Update values for the ingress gateway and auth session" - ] - }, - { - "cell_type": "code", - "execution_count": 92, - "metadata": {}, - "outputs": [], - "source": [ - "INGRESS_GATEWAY='http://istio-ingressgateway.istio-system.svc.cluster.local'\n", - "AUTH=\"\"\n", - "NAMESPACE=\"kubeflow-user-example-com\"\n", - "COOKIE=\"authservice_session=\"+AUTH\n", - "EXPERIMENT=\"Default\"\n", - "dist_volume = 'dist-vol'\n", - "volume_mount_path =\"/model\"\n", - "dataset_path = volume_mount_path+\"/dataset\"\n", - "checkpoint_dir = volume_mount_path+\"/checkpoint\"\n", - "tensorboard_root = volume_mount_path+\"/tensorboard\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set Log bucket and Tensorboard Image" - ] - }, - { - "cell_type": "code", - "execution_count": 93, - "metadata": {}, - "outputs": [], - "source": [ - "MINIO_ENDPOINT=\"http://minio-service.kubeflow:9000\"\n", - "LOG_BUCKET=\"mlpipeline\"\n", - "TENSORBOARD_IMAGE=\"public.ecr.aws/pytorch-samples/tboard:latest\"" - ] - }, - { - "cell_type": "code", - "execution_count": 94, - "metadata": {}, - "outputs": [], - "source": [ - "client = kfp.Client(host=INGRESS_GATEWAY+\"/pipeline\", cookies=COOKIE)" - ] - }, - { - "cell_type": "code", - "execution_count": 95, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Experiment details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "{'created_at': datetime.datetime(2021, 6, 21, 13, 13, 6, tzinfo=tzlocal()),\n", - " 'description': None,\n", - " 'id': 'ba9b7266-2b1c-4729-afcd-be808c25c5af',\n", - " 'name': 'Default',\n", - " 'resource_references': [{'key': {'id': 'kubeflow-user-example-com',\n", - " 'type': 'NAMESPACE'},\n", - " 'name': None,\n", - " 'relationship': 'OWNER'}],\n", - " 'storage_state': 'STORAGESTATE_AVAILABLE'}" - ] - }, - "execution_count": 67, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.create_experiment(EXPERIMENT)\n", - "experiments = client.list_experiments(namespace=NAMESPACE)\n", - "my_experiment = experiments.experiments[0]\n", - "my_experiment" - ] - }, - { - "cell_type": "code", - "execution_count": 96, - "metadata": {}, - "outputs": [], - "source": [ - "DEPLOY_NAME=\"bert-dist\"\n", - "MODEL_NAME=\"bert\"" - ] - }, - { - "cell_type": "code", - "execution_count": 97, - "metadata": {}, - "outputs": [], - "source": [ - "! python utils/generate_templates.py bert/template_mapping.json" - ] - }, - { - "cell_type": "code", - "execution_count": 98, - "metadata": {}, - "outputs": [], - "source": [ - "prepare_tensorboard_op = load_component_from_file(\n", - " \"yaml/tensorboard_component.yaml\"\n", - ")\n", - "prep_op = components.load_component_from_file(\n", - " \"yaml/preprocess_component.yaml\"\n", - ")\n", - "# Use GPU image in train component\n", - "train_op = components.load_component_from_file(\n", - " \"yaml/train_component.yaml\"\n", - ")\n", - "deploy_op = load_component_from_file(\"../../../components/kserve/component.yaml\")\n", - "minio_op = components.load_component_from_file(\n", - " \"yaml/minio_component.yaml\"\n", - ")\n", - "pytorch_job_op = load_component_from_file(\"../../../components/kubeflow/pytorch-launcher/component.yaml\")\n", - "kubernetes_create_pvc_op = load_component_from_file(\n", - " \"../../../components/contrib/kubernetes/Create_PersistentVolumeClaim/component.yaml\"\n", - ")\n", - "cp_op = load_component_from_file(\n", - " \"yaml/copy_component.yaml\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 99, - "metadata": {}, - "outputs": [], - "source": [ - "from kubernetes.client.models import V1Volume, V1PersistentVolumeClaimVolumeSource\n", - "def create_dist_pipeline():\n", - " kubernetes_create_pvc_op(name=dist_volume, storage_size= \"2Gi\", namespace=NAMESPACE)\n", - "\n", - "create_volume_run = client.create_run_from_pipeline_func(create_dist_pipeline, arguments={})\n", - "create_volume_run.wait_for_run_completion()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 100, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.pipeline(name=\"Training pipeline\", description=\"Sample training job test\")\n", - "def pytorch_bert(\n", - " minio_endpoint=MINIO_ENDPOINT,\n", - " log_bucket=LOG_BUCKET,\n", - " log_dir=f\"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " confusion_matrix_log_dir=f\"confusion_matrix/{dsl.RUN_ID_PLACEHOLDER}/\",\n", - " mar_path=f\"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store/\",\n", - " config_prop_path=f\"mar/{dsl.RUN_ID_PLACEHOLDER}/config/\",\n", - " model_uri=f\"pvc://{dist_volume}/mar/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " tf_image=TENSORBOARD_IMAGE,\n", - " deploy=DEPLOY_NAME,\n", - " namespace=NAMESPACE,\n", - " num_samples=1000,\n", - " max_epochs=1,\n", - " gpus=2,\n", - " num_nodes=2\n", - "):\n", - " \n", - " prepare_tb_task = prepare_tensorboard_op(\n", - " log_dir_uri=f\"s3://{log_bucket}/{log_dir}\",\n", - " image=tf_image,\n", - " pod_template_spec=json.dumps({\n", - " \"spec\": {\n", - " \"containers\": [{\n", - " \"env\": [\n", - " {\n", - " \"name\": \"AWS_ACCESS_KEY_ID\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"accesskey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"AWS_SECRET_ACCESS_KEY\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"secretkey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"AWS_REGION\",\n", - " \"value\": \"minio\"\n", - " },\n", - " {\n", - " \"name\": \"S3_ENDPOINT\",\n", - " \"value\": f\"{minio_endpoint}\",\n", - " },\n", - " {\n", - " \"name\": \"S3_USE_HTTPS\",\n", - " \"value\": \"0\"\n", - " },\n", - " {\n", - " \"name\": \"S3_VERIFY_SSL\",\n", - " \"value\": \"0\"\n", - " },\n", - " ]\n", - " }]\n", - " }\n", - " }),\n", - " ).set_display_name(\"Visualization\")\n", - "\n", - " prep_task = prep_op().after(prepare_tb_task).set_display_name(\"Preprocess & Transform\")\n", - " copy_task = cp_op(\"true\", prep_task.outputs['output_data'], dataset_path,\"\").add_pvolumes({volume_mount_path: dsl.PipelineVolume(pvc=dist_volume)}).after(prep_task).set_display_name(\"Copy Dataset\")\n", - " confusion_matrix_url = f\"minio://{log_bucket}/{confusion_matrix_log_dir}\"\n", - " train_task = pytorch_job_op(\n", - " name=\"pytorch-bert-dist\", \n", - " namespace=namespace, \n", - " master_spec=\n", - " {\n", - " \"replicas\": 1,\n", - " \"imagePullPolicy\": \"Always\",\n", - " \"restartPolicy\": \"OnFailure\",\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"pytorch\",\n", - " \"image\": \"public.ecr.aws/pytorch-samples/kfp_samples:latest-gpu\",\n", - " \"command\": [\"python\", \"bert/agnews_classification_pytorch.py\"],\n", - " \"args\": [\n", - " \"--dataset_path\", dataset_path,\n", - " \"--checkpoint_dir\", checkpoint_dir,\n", - " \"--script_args\", f\"model_name=bert.pth,num_samples={num_samples}\",\n", - " \"--tensorboard_root\", tensorboard_root,\n", - " \"--ptl_args\", f\"max_epochs={max_epochs},profiler=pytorch,devices={gpus},accelerator=gpu,strategy=ddp,num_nodes={num_nodes},confusion_matrix_url={confusion_matrix_url}\"\n", - " ],\n", - " \"env\": [\n", - " {\n", - " \"name\": \"MINIO_ACCESS_KEY\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"accesskey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"MINIO_SECRET_KEY\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"secretkey\",\n", - " }\n", - " },\n", - " }\n", - " ],\n", - " \"ports\": [\n", - " {\n", - " \"containerPort\": 24456,\n", - " \"name\": \"pytorchjob-port\"\n", - " }\n", - " ],\n", - " \"resources\": {\n", - " \"limits\": {\n", - " \"nvidia.com/gpu\": 2\n", - " }\n", - " },\n", - " \"volumeMounts\": [\n", - " {\n", - " \"mountPath\": volume_mount_path,\n", - " \"name\": \"model-volume\"\n", - " }\n", - " ]\n", - " }\n", - " ],\n", - " \"volumes\": [\n", - " {\n", - " \"name\": \"model-volume\",\n", - " \"persistentVolumeClaim\": {\n", - " \"claimName\": dist_volume\n", - " }\n", - " }\n", - " ]\n", - " }\n", - " }\n", - " }, \n", - " worker_spec=\n", - " {\n", - " \"replicas\": 1,\n", - " \"imagePullPolicy\": \"Always\",\n", - " \"restartPolicy\": \"OnFailure\",\n", - " \"template\": {\n", - " \"metadata\": {\n", - " \"annotations\": {\n", - " \"sidecar.istio.io/inject\": \"false\"\n", - " }\n", - " },\n", - " \"spec\": {\n", - " \"containers\": [\n", - " {\n", - " \"name\": \"pytorch\",\n", - " \"image\": \"public.ecr.aws/pytorch-samples/kfp_samples:latest-gpu\",\n", - " \"command\": [\"python\", \"bert/agnews_classification_pytorch.py\"],\n", - " \"args\": [\n", - " \"--dataset_path\", dataset_path,\n", - " \"--checkpoint_dir\", checkpoint_dir,\n", - " \"--script_args\", f\"model_name=bert.pth,num_samples={num_samples}\",\n", - " \"--tensorboard_root\", tensorboard_root,\n", - " \"--ptl_args\", f\"max_epochs={max_epochs},profiler=pytorch,devices={gpus},strategy=ddp,accelerator=gpu,num_nodes={num_nodes},confusion_matrix_url={confusion_matrix_url}\"\n", - " ],\n", - " \"env\": [\n", - " {\n", - " \"name\": \"MINIO_ACCESS_KEY\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"accesskey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"MINIO_SECRET_KEY\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"secretkey\",\n", - " }\n", - " },\n", - " }\n", - " ],\n", - " \"ports\": [\n", - " {\n", - " \"containerPort\": 24456,\n", - " \"name\": \"pytorchjob-port\"\n", - " }\n", - " ],\n", - " \"resources\": {\n", - " \"limits\": {\n", - " \"nvidia.com/gpu\": 2\n", - " }\n", - " },\n", - " \"volumeMounts\": [\n", - " {\n", - " \"mountPath\": volume_mount_path,\n", - " \"name\": \"model-volume\"\n", - " }\n", - " ]\n", - " }\n", - " ],\n", - " \"volumes\": [\n", - " {\n", - " \"name\": \"model-volume\",\n", - " \"persistentVolumeClaim\": {\n", - " \"claimName\": dist_volume\n", - " }\n", - " }\n", - " ]\n", - " }\n", - " }\n", - " },\n", - " delete_after_done=False\n", - " ).after(copy_task)\n", - " \n", - " mar_folder_restructure_task = dsl.ContainerOp(\n", - " name='mar restructure',\n", - " image='library/bash:4.4.23',\n", - " command=['sh', '-c'],\n", - " arguments=[f'mkdir -p {volume_mount_path}/{mar_path}; mkdir -p {volume_mount_path}/{config_prop_path}; cp {checkpoint_dir}/*.mar {volume_mount_path}/{mar_path}; cp {checkpoint_dir}/config.properties {volume_mount_path}/{config_prop_path}']).add_pvolumes({volume_mount_path: dsl.PipelineVolume(pvc=dist_volume)}).after(train_task).set_display_name(\"Restructure MAR and config.properties path\")\n", - " mar_folder_restructure_task.execution_options.caching_strategy.max_cache_staleness = \"P0D\"\n", - " copy_tensorboard = cp_op(\"false\", \"\", \"\", tensorboard_root).add_pvolumes({volume_mount_path: dsl.PipelineVolume(pvc=dist_volume)}).after(mar_folder_restructure_task).set_display_name(\"Copy Tensorboard Logs\")\n", - " copy_tensorboard.execution_options.caching_strategy.max_cache_staleness = \"P0D\"\n", - "\n", - " minio_tb_upload = (\n", - " minio_op(\n", - " bucket_name=log_bucket,\n", - " folder_name=log_dir,\n", - " input_path=copy_tensorboard.outputs[\"destination_path\"],\n", - " filename=\"\",\n", - " ).after(copy_tensorboard)\n", - " .set_display_name(\"Tensorboard Events Pusher\")\n", - " )\n", - " \n", - " # Deploy inferenceservice in gpu\n", - " gpu_count = \"1\"\n", - " isvc_gpu_yaml = \"\"\"\n", - " apiVersion: \"serving.kserve.io/v1beta1\"\n", - " kind: \"InferenceService\"\n", - " metadata:\n", - " name: {}\n", - " namespace: {}\n", - " spec:\n", - " predictor:\n", - " serviceAccountName: sa\n", - " pytorch:\n", - " storageUri: {}\n", - " protocolVersion: v2\n", - " resources:\n", - " requests: \n", - " cpu: 4\n", - " memory: 8Gi\n", - " limits:\n", - " cpu: 4\n", - " memory: 8Gi\n", - " nvidia.com/gpu: {}\n", - " \"\"\".format(\n", - " deploy, namespace, model_uri, gpu_count\n", - " )\n", - " \n", - " deploy_task = (\n", - " deploy_op(action=\"apply\", inferenceservice_yaml=isvc_gpu_yaml)\n", - " .after(minio_tb_upload)\n", - " .set_display_name(\"Deployer\")\n", - " )\n", - " deploy_task.execution_options.caching_strategy.max_cache_staleness = \"P0D\"\n", - " \n", - " dsl.get_pipeline_conf().add_op_transformer(\n", - " use_k8s_secret(\n", - " secret_name=\"mlpipeline-minio-artifact\",\n", - " k8s_secret_key_to_env={\n", - " \"secretkey\": \"MINIO_SECRET_KEY\",\n", - " \"accesskey\": \"MINIO_ACCESS_KEY\",\n", - " },\n", - " )\n", - " )" - ] - }, - { - "cell_type": "code", - "execution_count": 101, - "metadata": {}, - "outputs": [], - "source": [ - "# Compile pipeline\n", - "compiler.Compiler().compile(pytorch_bert, 'pytorch.tar.gz', type_check=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 102, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Run details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# Execute pipeline\n", - "run = client.run_pipeline(my_experiment.id, 'pytorch-bert', 'pytorch.tar.gz')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Wait for inference service below to go to `READY True` state." - ] - }, - { - "cell_type": "code", - "execution_count": 103, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "NAME URL READY PREV LATEST PREVROLLEDOUTREVISION LATESTREADYREVISION AGE\n", - "bert-dist http://bert-dist.kubeflow-user-example-com.example.com True 100 bert-dist-predictor-default-00001 4m12s\n" - ] - } - ], - "source": [ - "!kubectl get isvc $DEPLOY" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Get Inferenceservice name" - ] - }, - { - "cell_type": "code", - "execution_count": 104, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'bert-dist.kubeflow-user-example-com.example.com'" - ] - }, - "execution_count": 104, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "INFERENCE_SERVICE_LIST = ! kubectl get isvc {DEPLOY_NAME} -n {NAMESPACE} -o json | python3 -c \"import sys, json; print(json.load(sys.stdin)['status']['url'])\"| tr -d '\"' | cut -d \"/\" -f 3\n", - "INFERENCE_SERVICE_NAME = INFERENCE_SERVICE_LIST[0]\n", - "INFERENCE_SERVICE_NAME" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Prediction Request" - ] - }, - { - "cell_type": "code", - "execution_count": 105, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - " 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0* Trying 10.100.176.44:80...\n", - "* TCP_NODELAY set\n", - "* Connected to istio-ingressgateway.istio-system.svc.cluster.local (10.100.176.44) port 80 (#0)\n", - "> POST /v1/models/bert:predict HTTP/1.1\n", - "> Host: bert-dist.kubeflow-user-example-com.example.com\n", - "> User-Agent: curl/7.68.0\n", - "> Accept: */*\n", - "> Cookie: authservice_session=MTY1MTQyNjA3MnxOd3dBTkVoTVZUSk1URmRaTmxkQ04xQk1WelpSTWpKYU1rMU5UVTFJTlZGWFNVYzNUMHRUV0ZWRVNFRlJNMGxJTTFOUE5FeFJRVUU9fIcMBBMyWExQz5ZZSXeVDwn4jPm3MrRX0hExC_vYeREr\n", - "> Content-Length: 84\n", - "> Content-Type: application/x-www-form-urlencoded\n", - "> \n", - "} [84 bytes data]\n", - "* upload completely sent off: 84 out of 84 bytes\n", - "* Mark bundle as not supporting multiuse\n", - "< HTTP/1.1 200 OK\n", - "< content-length: 33\n", - "< content-type: application/json; charset=UTF-8\n", - "< date: Mon, 02 May 2022 08:40:23 GMT\n", - "< server: istio-envoy\n", - "< x-envoy-upstream-service-time: 176\n", - "< \n", - "{ [33 bytes data]\n", - "100 117 100 33 100 84 162 413 --:--:-- --:--:-- --:--:-- 576\n", - "* Connection #0 to host istio-ingressgateway.istio-system.svc.cluster.local left intact\n" - ] - } - ], - "source": [ - "!curl -v -H \"Host: $INFERENCE_SERVICE_NAME\" -H \"Cookie: $COOKIE\" \"$INGRESS_GATEWAY/v2/models/$MODEL_NAME/infer\" -d @./bert/sample.txt > bert_prediction_output.json" - ] - }, - { - "cell_type": "code", - "execution_count": 106, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\"predictions\": [\"\\\"Sci/Tech\\\"\"]}" - ] - } - ], - "source": [ - "! cat bert_prediction_output.json" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Explanation Request" - ] - }, - { - "cell_type": "code", - "execution_count": 107, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - " 0 0 0 0 0 0 0 0 --:--:-- 0:00:04 --:--:-- 0* Trying 10.100.176.44:80...\n", - "* TCP_NODELAY set\n", - "* Connected to istio-ingressgateway.istio-system.svc.cluster.local (10.100.176.44) port 80 (#0)\n", - "> POST /v1/models/bert:explain HTTP/1.1\n", - "> Host: bert-dist.kubeflow-user-example-com.example.com\n", - "> User-Agent: curl/7.68.0\n", - "> Accept: */*\n", - "> Cookie: authservice_session=MTY1MTQyNjA3MnxOd3dBTkVoTVZUSk1URmRaTmxkQ04xQk1WelpSTWpKYU1rMU5UVTFJTlZGWFNVYzNUMHRUV0ZWRVNFRlJNMGxJTTFOUE5FeFJRVUU9fIcMBBMyWExQz5ZZSXeVDwn4jPm3MrRX0hExC_vYeREr\n", - "> Content-Length: 84\n", - "> Content-Type: application/x-www-form-urlencoded\n", - "> \n", - "} [84 bytes data]\n", - "* upload completely sent off: 84 out of 84 bytes\n", - "* Mark bundle as not supporting multiuse\n", - "< HTTP/1.1 200 OK\n", - "< content-length: 264\n", - "< content-type: application/json; charset=UTF-8\n", - "< date: Mon, 02 May 2022 08:40:44 GMT\n", - "< server: istio-envoy\n", - "< x-envoy-upstream-service-time: 284\n", - "< \n", - "{ [264 bytes data]\n", - "100 348 100 264 100 84 49 15 0:00:05 0:00:05 --:--:-- 75\n", - "* Connection #0 to host istio-ingressgateway.istio-system.svc.cluster.local left intact\n" - ] - } - ], - "source": [ - "!curl -v -H \"Host: $INFERENCE_SERVICE_NAME\" -H \"Cookie: $COOKIE\" \"$INGRESS_GATEWAY/v2/models/$MODEL_NAME/explain\" -d @./bert/sample.txt > bert_explaination_output.json" - ] - }, - { - "cell_type": "code", - "execution_count": 108, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\"explanations\": [{\"words\": [\"bloomberg\", \"has\", \"reported\", \"on\", \"the\", \"economy\"], \"importances\": [-0.49426081646662806, 0.09581777446473196, -0.09546984597236165, -0.19612933767921537, -0.2438196769639178, 0.7996849104110348], \"delta\": -0.005089809745116192}]}" - ] - } - ], - "source": [ - "! cat bert_explaination_output.json" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": { - "scrolled": true, - "tags": [] - }, - "outputs": [ - { - "data": { - "text/plain": [ - "{'explanations': [{'words': ['[CLS]',\n", - " 'bloomberg',\n", - " 'has',\n", - " 'reported',\n", - " 'on',\n", - " 'the',\n", - " 'economy',\n", - " '[SEP]'],\n", - " 'importances': [0.18556156547587432,\n", - " -0.04754466449824699,\n", - " -0.09005958599003015,\n", - " 0.056995451538874545,\n", - " 0.10996221573727777,\n", - " 0.148971232294231,\n", - " 0.398128678194734,\n", - " -0.8712959534101352],\n", - " 'delta': 0.008833148050828438}]}" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "explanations_json = json.loads(open(\"./bert_explaination_output.json\", \"r\").read())\n", - "explanations_json" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "prediction_json = json.loads(open(\"./bert_prediction_output.json\", \"r\").read())" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "import torch\n", - "attributions = explanations_json[\"outputs\"][0][\"data\"][0]['importances']\n", - "tokens = explanations_json[\"outputs\"][0][\"data\"][0]['words']\n", - "delta = explanations_json[\"outputs\"][0][\"data\"][0]['delta']\n", - "\n", - "attributions = torch.tensor(attributions)\n", - "pred_prob = 0.75\n", - "pred_class = str(prediction_json[\"outputs\"][0][\"data\"][0]).strip('\"\"')\n", - "true_class = \"Business\"\n", - "attr_class =\"world\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Visualization of Predictions" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from captum.attr import visualization\n", - "vis_data_records =[]\n", - "vis_data_records.append(visualization.VisualizationDataRecord(\n", - " attributions,\n", - " pred_prob,\n", - " pred_class,\n", - " true_class,\n", - " attr_class,\n", - " attributions.sum(), \n", - " tokens,\n", - " delta))" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
            Legend: Negative Neutral Positive
            True LabelPredicted LabelAttribution LabelAttribution ScoreWord Importance
            Business\"Sci/Tech\" (0.75)world-0.11 [CLS] bloomberg has reported on the economy [SEP]
            " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "vis = visualization.visualize_text(vis_data_records)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### visualization appreas as below\n", - "![viz1.png](./viz1.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Cleanup Script" - ] - }, - { - "cell_type": "code", - "execution_count": 85, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "inferenceservice.serving.kserve.io \"bert-dist\" deleted\n" - ] - } - ], - "source": [ - "! kubectl delete --all isvc -n $NAMESPACE" - ] - }, - { - "cell_type": "code", - "execution_count": 84, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "pod \"create-dist-pipeline-444nk-3959473792\" deleted\n", - "pod \"training-pipeline-trb5h-1876153621\" deleted\n", - "pod \"training-pipeline-trb5h-284914308\" deleted\n", - "pod \"training-pipeline-trb5h-3177383612\" deleted\n", - "pod \"training-pipeline-trb5h-3252145113\" deleted\n", - "pod \"training-pipeline-trb5h-3265872190\" deleted\n", - "pod \"training-pipeline-trb5h-3331631297\" deleted\n", - "pod \"training-pipeline-trb5h-3651310105\" deleted\n", - "pod \"training-pipeline-trb5h-3914481085\" deleted\n" - ] - } - ], - "source": [ - "! kubectl delete pod --field-selector=status.phase==Succeeded -n $NAMESPACE" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/pytorch-samples/Pipeline-Bert.ipynb b/samples/contrib/pytorch-samples/Pipeline-Bert.ipynb deleted file mode 100644 index 97440165e42..00000000000 --- a/samples/contrib/pytorch-samples/Pipeline-Bert.ipynb +++ /dev/null @@ -1,779 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "# Copyright (c) Facebook, Inc. and its affiliates.\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Bert Pipeline : PyTorch BERT News Classfication\n", - "\n", - "This notebook shows PyTorch BERT end-to-end news classification example using Kubeflow Pipelines.\n", - "\n", - "\n", - "An example notebook that demonstrates how to:\n", - "\n", - "* Get different tasks needed for the pipeline\n", - "* Create a Kubeflow pipeline\n", - "* Include Pytorch KFP components to preprocess, train, visualize and deploy the model in the pipeline\n", - "* Submit a job for execution\n", - "* Query(prediction and explain) the final deployed model\n", - "* Interpretation of the model using the Captum Insights\n" - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "! pip uninstall -y kfp\n", - "! pip install --no-cache-dir kfp captum" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'1.8.12'" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import kfp\n", - "import json\n", - "import os\n", - "from kfp.onprem import use_k8s_secret\n", - "from kfp import components\n", - "from kfp.components import load_component_from_file, load_component_from_url\n", - "from kfp import dsl\n", - "from kfp import compiler\n", - "\n", - "kfp.__version__" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Enter your gateway and the cookie\n", - "[Use this extension on chrome to get token]( https://chrome.google.com/webstore/detail/editthiscookie/fngmhnnpilhplaeedifhccceomclgfbg?hl=en)\n", - "\n", - "![image.png](./image.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Update values for the ingress gateway and auth session" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "INGRESS_GATEWAY='http://istio-ingressgateway.istio-system.svc.cluster.local'\n", - "AUTH=\"\"\n", - "NAMESPACE=\"kubeflow-user-example-com\"\n", - "COOKIE=\"authservice_session=\"+AUTH\n", - "EXPERIMENT=\"Default\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set Log bucket and Tensorboard Image" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "MINIO_ENDPOINT=\"http://minio-service.kubeflow:9000\"\n", - "LOG_BUCKET=\"mlpipeline\"\n", - "TENSORBOARD_IMAGE=\"public.ecr.aws/pytorch-samples/tboard:latest\"" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "client = kfp.Client(host=INGRESS_GATEWAY+\"/pipeline\", cookies=COOKIE)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Experiment details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "{'created_at': datetime.datetime(2022, 4, 21, 9, 45, 22, tzinfo=tzlocal()),\n", - " 'description': None,\n", - " 'id': 'b4bee8c3-381b-42a0-9494-bc81eb9aa359',\n", - " 'name': 'Default',\n", - " 'resource_references': [{'key': {'id': 'kubeflow-user-example-com',\n", - " 'type': 'NAMESPACE'},\n", - " 'name': None,\n", - " 'relationship': 'OWNER'}],\n", - " 'storage_state': 'STORAGESTATE_AVAILABLE'}" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.create_experiment(EXPERIMENT)\n", - "experiments = client.list_experiments(namespace=NAMESPACE)\n", - "my_experiment = experiments.experiments[0]\n", - "my_experiment" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set Inference parameters" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "DEPLOY_NAME=\"bertserve\"\n", - "MODEL_NAME=\"bert\"" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Processing prediction_component.yaml\n", - "Processing ax_complete_trials_component.yaml\n", - "Processing preprocess_component.yaml\n", - "Processing train_component.yaml\n", - "Processing tensorboard_component.yaml\n", - "Processing ax_generate_trials_component.yaml\n", - "Processing minio_component.yaml\n", - "Processing copy_component.yaml\n", - "Processing ax_train_component.yaml\n" - ] - } - ], - "source": [ - "! python utils/generate_templates.py bert/template_mapping.json" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [], - "source": [ - "prepare_tensorboard_op = load_component_from_file(\"yaml/tensorboard_component.yaml\")\n", - "prep_op = components.load_component_from_file(\n", - " \"yaml/preprocess_component.yaml\"\n", - ")\n", - "train_op = components.load_component_from_file(\n", - " \"yaml/train_component.yaml\"\n", - ")\n", - "deploy_op = load_component_from_file(\n", - " \"../../../components/kserve/component.yaml\"\n", - ")\n", - "minio_op = components.load_component_from_file(\n", - " \"yaml/minio_component.yaml\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [], - "source": [ - "@dsl.pipeline(name=\"Training pipeline\", description=\"Sample training job test\")\n", - "def pytorch_bert( # pylint: disable=too-many-arguments\n", - " minio_endpoint=MINIO_ENDPOINT,\n", - " log_bucket=LOG_BUCKET,\n", - " log_dir=f\"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " mar_path=f\"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store\",\n", - " config_prop_path=f\"mar/{dsl.RUN_ID_PLACEHOLDER}/config\",\n", - " model_uri=f\"s3://mlpipeline/mar/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " tf_image=TENSORBOARD_IMAGE,\n", - " deploy=DEPLOY_NAME,\n", - " namespace=NAMESPACE,\n", - " confusion_matrix_log_dir=f\"confusion_matrix/{dsl.RUN_ID_PLACEHOLDER}/\",\n", - " num_samples=1000,\n", - " max_epochs=1\n", - "):\n", - " \"\"\"Thid method defines the pipeline tasks and operations\"\"\"\n", - " prepare_tb_task = prepare_tensorboard_op(\n", - " log_dir_uri=f\"s3://{log_bucket}/{log_dir}\",\n", - " image=tf_image,\n", - " pod_template_spec=json.dumps({\n", - " \"spec\": {\n", - " \"containers\": [{\n", - " \"env\": [\n", - " {\n", - " \"name\": \"AWS_ACCESS_KEY_ID\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"accesskey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"AWS_SECRET_ACCESS_KEY\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"secretkey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"AWS_REGION\",\n", - " \"value\": \"minio\"\n", - " },\n", - " {\n", - " \"name\": \"S3_ENDPOINT\",\n", - " \"value\": f\"{minio_endpoint}\",\n", - " },\n", - " {\n", - " \"name\": \"S3_USE_HTTPS\",\n", - " \"value\": \"0\"\n", - " },\n", - " {\n", - " \"name\": \"S3_VERIFY_SSL\",\n", - " \"value\": \"0\"\n", - " },\n", - " ]\n", - " }]\n", - " }\n", - " }),\n", - " ).set_display_name(\"Visualization\")\n", - "\n", - " prep_task = (\n", - " prep_op().after(prepare_tb_task\n", - " ).set_display_name(\"Preprocess & Transform\")\n", - " )\n", - " confusion_matrix_url = f\"minio://{log_bucket}/{confusion_matrix_log_dir}\"\n", - " script_args = f\"model_name=bert.pth,\" \\\n", - " f\"num_samples={num_samples},\" \\\n", - " f\"confusion_matrix_url={confusion_matrix_url}\"\n", - " # For GPU , set device count and strategy type\n", - " ptl_args = f\"max_epochs={max_epochs},accelerator=gpu,profiler=pytorch,devices=0,strategy=None\"\n", - " train_task = (\n", - " train_op(\n", - " input_data=prep_task.outputs[\"output_data\"],\n", - " script_args=script_args,\n", - " ptl_arguments=ptl_args\n", - " ).after(prep_task).set_display_name(\"Training\")\n", - " # For allocating resources, uncomment below lines\n", - " # .set_memory_request('600M')\n", - " # .set_memory_limit('1200M')\n", - " # .set_cpu_request('700m')\n", - " # .set_cpu_limit('1400m')\n", - " # For GPU uncomment below line and set GPU limit and node selector\n", - " # .set_gpu_limit(1).add_node_selector_constraint('cloud.google.com/gke-accelerator','nvidia-tesla-p4')\n", - " )\n", - "\n", - " (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=log_dir,\n", - " input_path=train_task.outputs[\"tensorboard_root\"],\n", - " filename=\"\",\n", - " ).after(train_task).set_display_name(\"Tensorboard Events Pusher\")\n", - " )\n", - " minio_mar_upload = (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=mar_path,\n", - " input_path=train_task.outputs[\"checkpoint_dir\"],\n", - " filename=\"bert_test.mar\",\n", - " ).after(train_task).set_display_name(\"Mar Pusher\")\n", - " )\n", - " (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=config_prop_path,\n", - " input_path=train_task.outputs[\"checkpoint_dir\"],\n", - " filename=\"config.properties\",\n", - " ).after(train_task).set_display_name(\"Conifg Pusher\")\n", - " )\n", - "\n", - " model_uri = str(model_uri)\n", - " # pylint: disable=unused-variable\n", - " isvc_yaml = \"\"\"\n", - " apiVersion: \"serving.kserve.io/v1beta1\"\n", - " kind: \"InferenceService\"\n", - " metadata:\n", - " name: {}\n", - " namespace: {}\n", - " spec:\n", - " predictor:\n", - " serviceAccountName: sa\n", - " pytorch:\n", - " protocolVersion: v2\n", - " storageUri: {}\n", - " resources:\n", - " requests: \n", - " cpu: 4\n", - " memory: 8Gi\n", - " limits:\n", - " cpu: 4\n", - " memory: 8Gi\n", - " \"\"\".format(deploy, namespace, model_uri)\n", - "\n", - " # For GPU inference use below yaml with gpu count and accelerator\n", - " gpu_count = \"1\"\n", - " accelerator = \"nvidia-tesla-p4\"\n", - " isvc_gpu_yaml = \"\"\"\n", - " apiVersion: \"serving.kserve.io/v1beta1\"\n", - " kind: \"InferenceService\"\n", - " metadata:\n", - " name: {}\n", - " namespace: {}\n", - " spec:\n", - " predictor:\n", - " serviceAccountName: sa\n", - " pytorch:\n", - " protocolVersion: v2\n", - " storageUri: {}\n", - " resources:\n", - " requests: \n", - " cpu: 4\n", - " memory: 8Gi\n", - " limits:\n", - " cpu: 4\n", - " memory: 8Gi\n", - " nvidia.com/gpu: {}\n", - " nodeSelector:\n", - " cloud.google.com/gke-accelerator: {}\n", - "\"\"\".format(deploy, namespace, model_uri, gpu_count, accelerator)\n", - " # Update inferenceservice_yaml for GPU inference\n", - " deploy_task = (\n", - " deploy_op(action=\"apply\", inferenceservice_yaml=isvc_yaml\n", - " ).after(minio_mar_upload).set_display_name(\"Deployer\")\n", - " )\n", - "\n", - " dsl.get_pipeline_conf().add_op_transformer(\n", - " use_k8s_secret(\n", - " secret_name=\"mlpipeline-minio-artifact\",\n", - " k8s_secret_key_to_env={\n", - " \"secretkey\": \"MINIO_SECRET_KEY\",\n", - " \"accesskey\": \"MINIO_ACCESS_KEY\",\n", - " },\n", - " )\n", - " )" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [], - "source": [ - "# Compile pipeline\n", - "compiler.Compiler().compile(pytorch_bert, 'pytorch.tar.gz', type_check=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Run details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# Execute pipeline\n", - "run = client.run_pipeline(my_experiment.id, 'pytorch-bert', 'pytorch.tar.gz')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Wait for inference service below to go to `READY True` state." - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "NAME URL READY PREV LATEST PREVROLLEDOUTREVISION LATESTREADYREVISION AGE\n", - "bertserve http://bertserve.kubeflow-user-example-com.example.com True 100 bertserve-predictor-default-00003 160m\n" - ] - } - ], - "source": [ - "!kubectl get isvc $DEPLOY" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Get Inferenceservice name" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'bertserve.kubeflow-user-example-com.example.com'" - ] - }, - "execution_count": 23, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "INFERENCE_SERVICE_LIST = ! kubectl get isvc {DEPLOY_NAME} -n {NAMESPACE} -o json | python3 -c \"import sys, json; print(json.load(sys.stdin)['status']['url'])\"| tr -d '\"' | cut -d \"/\" -f 3\n", - "INFERENCE_SERVICE_NAME = INFERENCE_SERVICE_LIST[0]\n", - "INFERENCE_SERVICE_NAME" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Prediction Request" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\n", - " \"id\": \"d3b15cad-50a2-4eaf-80ce-8b0a428bd298\",\n", - " \"inputs\": [{\n", - " \"name\": \"4b7c7d4a-51e4-43c8-af61-04639f6ef4bc\",\n", - " \"shape\": -1,\n", - " \"datatype\": \"BYTES\",\n", - " \"data\": \"Bloomberg has reported on the economy\"\n", - " }\n", - " ]\n", - "}" - ] - } - ], - "source": [ - "! cat ./bert/sample.txt" - ] - }, - { - "cell_type": "code", - "execution_count": 45, - "metadata": {}, - "outputs": [], - "source": [ - "!curl -v -H \"Host: $INFERENCE_SERVICE_NAME\" -H \"Cookie: $COOKIE\" \"$INGRESS_GATEWAY/v2/models/$MODEL_NAME/infer\" -d @./bert/sample.txt > bert_prediction_output.json" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\"id\": \"d3b15cad-50a2-4eaf-80ce-8b0a428bd298\", \"model_name\": \"bert_test\", \"model_version\": \"1\", \"outputs\": [{\"name\": \"predict\", \"shape\": [], \"datatype\": \"BYTES\", \"data\": [\"\\\"Business\\\"\"]}]}" - ] - } - ], - "source": [ - "! cat bert_prediction_output.json" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Explanation Request" - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "!curl -v -H \"Host: $INFERENCE_SERVICE_NAME\" -H \"Cookie: $COOKIE\" \"$INGRESS_GATEWAY/v2/models/$MODEL_NAME/explain\" -d @./bert/sample.txt > bert_explaination_output.json" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\"id\": \"d3b15cad-50a2-4eaf-80ce-8b0a428bd298\", \"model_name\": \"bert_test\", \"model_version\": \"1\", \"outputs\": [{\"name\": \"explain\", \"shape\": [], \"datatype\": \"BYTES\", \"data\": [{\"words\": [\"bloomberg\", \"has\", \"reported\", \"on\", \"the\", \"economy\"], \"importances\": [0.2124089759942075, 0.3070123112652129, -0.3175794877732026, -0.4493290921520886, -0.23262562691072097, 0.7097589881393321], \"delta\": 0.01156902069987975}]}]}" - ] - } - ], - "source": [ - "! cat bert_explaination_output.json" - ] - }, - { - "cell_type": "code", - "execution_count": 43, - "metadata": { - "scrolled": true, - "tags": [] - }, - "outputs": [], - "source": [ - "explanations_json = json.loads(open(\"./bert_explaination_output.json\", \"r\").read())\n", - "explanations_json" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "prediction_json = json.loads(open(\"./bert_prediction_output.json\", \"r\").read())" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "import torch\n", - "attributions = explanations_json[\"outputs\"][0][\"data\"][0]['importances']\n", - "tokens = explanations_json[\"outputs\"][0][\"data\"][0]['words']\n", - "delta = explanations_json[\"outputs\"][0][\"data\"][0]['delta']\n", - "\n", - "attributions = torch.tensor(attributions)\n", - "pred_prob = 0.75\n", - "pred_class = str(prediction_json[\"outputs\"][0][\"data\"][0]).strip('\"\"')\n", - "true_class = \"Business\"\n", - "attr_class =\"world\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Visualization of Predictions" - ] - }, - { - "cell_type": "code", - "execution_count": 39, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "from captum.attr import visualization\n", - "vis_data_records =[]\n", - "vis_data_records.append(visualization.VisualizationDataRecord(\n", - " attributions,\n", - " pred_prob,\n", - " pred_class,\n", - " true_class,\n", - " attr_class,\n", - " attributions.sum(), \n", - " tokens,\n", - " delta))" - ] - }, - { - "cell_type": "code", - "execution_count": 40, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
            Legend: Negative Neutral Positive
            True LabelPredicted LabelAttribution LabelAttribution ScoreWord Importance
            BusinessBusiness (0.75)world0.23 bloomberg has reported on the economy
            " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "vis = visualization.visualize_text(vis_data_records)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### visualization appreas as below\n", - "![viz1.png](./viz1.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Cleanup Script" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "! kubectl delete --all isvc -n $NAMESPACE" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "! kubectl delete pod --field-selector=status.phase==Succeeded -n $NAMESPACE" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/pytorch-samples/Pipeline-Cifar10-Captum-Insights.ipynb b/samples/contrib/pytorch-samples/Pipeline-Cifar10-Captum-Insights.ipynb deleted file mode 100644 index 1c9967ddc78..00000000000 --- a/samples/contrib/pytorch-samples/Pipeline-Cifar10-Captum-Insights.ipynb +++ /dev/null @@ -1,1063 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "# Copyright (c) Facebook, Inc. and its affiliates.\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# KubeFlow Pipelines : Pytorch Cifar10 Image classification\n", - "\n", - "This notebook shows PyTorch CIFAR10 end-to-end classification example using Kubeflow Pipelines. \n", - "\n", - "An example notebook that demonstrates how to:\n", - "\n", - "* Get different tasks needed for the pipeline\n", - "* Create a Kubeflow pipeline\n", - "* Include Pytorch KFP components to preprocess, train, visualize and deploy the model in the pipeline\n", - "* Submit a job for execution\n", - "* Query(prediction and explain) the final deployed model\n", - "* Interpretation of the model using the Captum Insights\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## import the necessary packages" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "metadata": {}, - "outputs": [], - "source": [ - "! pip uninstall -y kfp\n", - "! pip install kfp" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'1.8.12'" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import kfp\n", - "import json\n", - "import os\n", - "from kfp.onprem import use_k8s_secret\n", - "from kfp import components\n", - "from kfp.components import load_component_from_file, load_component_from_url\n", - "from kfp import dsl\n", - "from kfp import compiler\n", - "\n", - "import numpy as np\n", - "import logging\n", - "\n", - "\n", - "kfp.__version__" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Enter your gateway and the auth token\n", - "[Use this extension on chrome to get token]( https://chrome.google.com/webstore/detail/editthiscookie/fngmhnnpilhplaeedifhccceomclgfbg?hl=en)\n", - "\n", - "![image.png](./image.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Update values for the ingress gateway and auth session" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "INGRESS_GATEWAY='http://istio-ingressgateway.istio-system.svc.cluster.local'\n", - "AUTH=\"\"\n", - "NAMESPACE=\"kubeflow-user-example-com\"\n", - "COOKIE=\"authservice_session=\"+AUTH\n", - "EXPERIMENT=\"Default\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set the Log bucket and Tensorboard Image" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "MINIO_ENDPOINT=\"http://minio-service.kubeflow:9000\"\n", - "LOG_BUCKET=\"mlpipeline\"\n", - "TENSORBOARD_IMAGE=\"public.ecr.aws/pytorch-samples/tboard:latest\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set the client and create the experiment" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "client = kfp.Client(host=INGRESS_GATEWAY+\"/pipeline\", cookies=COOKIE)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Experiment details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "{'created_at': datetime.datetime(2022, 4, 21, 9, 45, 22, tzinfo=tzlocal()),\n", - " 'description': None,\n", - " 'id': 'b4bee8c3-381b-42a0-9494-bc81eb9aa359',\n", - " 'name': 'Default',\n", - " 'resource_references': [{'key': {'id': 'kubeflow-user-example-com',\n", - " 'type': 'NAMESPACE'},\n", - " 'name': None,\n", - " 'relationship': 'OWNER'}],\n", - " 'storage_state': 'STORAGESTATE_AVAILABLE'}" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.create_experiment(EXPERIMENT)\n", - "experiments = client.list_experiments(namespace=NAMESPACE)\n", - "my_experiment = experiments.experiments[0]\n", - "my_experiment" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set the Inference parameters" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "DEPLOY_NAME=\"torchserve\"\n", - "MODEL_NAME=\"cifar10\"\n", - "ISVC_NAME=DEPLOY_NAME+\".\"+NAMESPACE+\".\"+\"example.com\"\n", - "INPUT_REQUEST=\"https://raw.githubusercontent.com/kubeflow/pipelines/master/samples/contrib/pytorch-samples/cifar10/input.json\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Load the the components yaml files for setting up the components" - ] - }, - { - "cell_type": "code", - "execution_count": 39, - "metadata": {}, - "outputs": [], - "source": [ - "! python utils/generate_templates.py cifar10/template_mapping.json" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [], - "source": [ - "prepare_tensorboard_op = load_component_from_file(\"yaml/tensorboard_component.yaml\")\n", - "\n", - "prep_op = components.load_component_from_file(\n", - " \"yaml/preprocess_component.yaml\"\n", - ")\n", - "\n", - "train_op = components.load_component_from_file(\n", - " \"yaml/train_component.yaml\"\n", - ")\n", - "deploy_op = load_component_from_file(\n", - " \"../../../components/kserve/component.yaml\"\n", - ")\n", - "\n", - "pred_op = load_component_from_file(\"yaml/prediction_component.yaml\")\n", - "\n", - "minio_op = components.load_component_from_file(\n", - " \"yaml/minio_component.yaml\"\n", - ")\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "@dsl.pipeline(\n", - " name=\"Training Cifar10 pipeline\", description=\"Cifar 10 dataset pipeline\"\n", - ")\n", - "def pytorch_cifar10( # pylint: disable=too-many-arguments\n", - " minio_endpoint=MINIO_ENDPOINT,\n", - " log_bucket=LOG_BUCKET,\n", - " log_dir=f\"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " mar_path=f\"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store\",\n", - " config_prop_path=f\"mar/{dsl.RUN_ID_PLACEHOLDER}/config\",\n", - " model_uri=f\"s3://mlpipeline/mar/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " tf_image=TENSORBOARD_IMAGE,\n", - " deploy=DEPLOY_NAME,\n", - " isvc_name=ISVC_NAME,\n", - " model=MODEL_NAME,\n", - " namespace=NAMESPACE,\n", - " confusion_matrix_log_dir=f\"confusion_matrix/{dsl.RUN_ID_PLACEHOLDER}/\",\n", - " checkpoint_dir=\"checkpoint_dir/cifar10\",\n", - " input_req=INPUT_REQUEST,\n", - " cookie=COOKIE,\n", - " ingress_gateway=INGRESS_GATEWAY,\n", - "):\n", - "\n", - " def sleep_op(seconds):\n", - " \"\"\"Sleep for a while.\"\"\"\n", - " return dsl.ContainerOp(\n", - " name=\"Sleep \" + str(seconds) + \" seconds\",\n", - " image=\"python:alpine3.6\",\n", - " command=[\"sh\", \"-c\"],\n", - " arguments=[\n", - " 'python -c \"import time; time.sleep($0)\"',\n", - " str(seconds)\n", - " ],\n", - " )\n", - "\n", - " \"\"\"This method defines the pipeline tasks and operations\"\"\"\n", - " pod_template_spec = json.dumps({\n", - " \"spec\": {\n", - " \"containers\": [{\n", - " \"env\": [\n", - " {\n", - " \"name\": \"AWS_ACCESS_KEY_ID\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"accesskey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"AWS_SECRET_ACCESS_KEY\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"secretkey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"AWS_REGION\",\n", - " \"value\": \"minio\"\n", - " },\n", - " {\n", - " \"name\": \"S3_ENDPOINT\",\n", - " \"value\": f\"{minio_endpoint}\",\n", - " },\n", - " {\n", - " \"name\": \"S3_USE_HTTPS\",\n", - " \"value\": \"0\"\n", - " },\n", - " {\n", - " \"name\": \"S3_VERIFY_SSL\",\n", - " \"value\": \"0\"\n", - " },\n", - " ]\n", - " }]\n", - " }\n", - " })\n", - "\n", - " prepare_tb_task = prepare_tensorboard_op(\n", - " log_dir_uri=f\"s3://{log_bucket}/{log_dir}\",\n", - " image=tf_image,\n", - " pod_template_spec=pod_template_spec,\n", - " ).set_display_name(\"Visualization\")\n", - "\n", - " prep_task = (\n", - " prep_op().after(prepare_tb_task\n", - " ).set_display_name(\"Preprocess & Transform\")\n", - " )\n", - " confusion_matrix_url = f\"minio://{log_bucket}/{confusion_matrix_log_dir}\"\n", - " script_args = f\"model_name=resnet.pth,\" \\\n", - " f\"confusion_matrix_url={confusion_matrix_url}\"\n", - " # For GPU, set number of devices and strategy type\n", - " ptl_args = f\"max_epochs=1, devices=0, accelerator=gpu, strategy=None, profiler=pytorch\"\n", - " train_task = (\n", - " train_op(\n", - " input_data=prep_task.outputs[\"output_data\"],\n", - " script_args=script_args,\n", - " ptl_arguments=ptl_args\n", - " ).after(prep_task).set_display_name(\"Training\")\n", - " # For allocating resources, uncomment below lines\n", - " # .set_memory_request('600M')\n", - " # .set_memory_limit('1200M')\n", - " # .set_cpu_request('700m')\n", - " # .set_cpu_limit('1400m')\n", - " # For GPU uncomment below line and set GPU limit and node selector\n", - " # .set_gpu_limit(1).add_node_selector_constraint('cloud.google.com/gke-accelerator','nvidia-tesla-p4')\n", - " )\n", - "\n", - " (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=log_dir,\n", - " input_path=train_task.outputs[\"tensorboard_root\"],\n", - " filename=\"\",\n", - " ).after(train_task).set_display_name(\"Tensorboard Events Pusher\")\n", - " )\n", - "\n", - " (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=checkpoint_dir,\n", - " input_path=train_task.outputs[\"checkpoint_dir\"],\n", - " filename=\"\",\n", - " ).after(train_task).set_display_name(\"checkpoint_dir Pusher\")\n", - " )\n", - "\n", - " minio_mar_upload = (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=mar_path,\n", - " input_path=train_task.outputs[\"checkpoint_dir\"],\n", - " filename=\"cifar10_test.mar\",\n", - " ).after(train_task).set_display_name(\"Mar Pusher\")\n", - " )\n", - "\n", - " (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=config_prop_path,\n", - " input_path=train_task.outputs[\"checkpoint_dir\"],\n", - " filename=\"config.properties\",\n", - " ).after(train_task).set_display_name(\"Conifg Pusher\")\n", - " )\n", - "\n", - " model_uri = str(model_uri)\n", - " # pylint: disable=unused-variable\n", - " isvc_yaml = \"\"\"\n", - " apiVersion: \"serving.kserve.io/v1beta1\"\n", - " kind: \"InferenceService\"\n", - " metadata:\n", - " name: {}\n", - " namespace: {}\n", - " spec:\n", - " predictor:\n", - " serviceAccountName: sa\n", - " pytorch:\n", - " protocolVersion: v2\n", - " storageUri: {}\n", - " resources:\n", - " requests: \n", - " cpu: 16\n", - " memory: 24Gi\n", - " limits:\n", - " cpu: 16\n", - " memory: 24Gi\n", - " \"\"\".format(\n", - " deploy, namespace, model_uri\n", - " )\n", - " \n", - " # For GPU inference use below yaml with gpu count and accelerator\n", - " gpu_count = \"1\"\n", - " accelerator = \"nvidia-tesla-p4\"\n", - " isvc_gpu_yaml = \"\"\"# pylint: disable=unused-variable\n", - " apiVersion: \"serving.kserve.io/v1beta1\"\n", - " kind: \"InferenceService\"\n", - " metadata:\n", - " name: {}\n", - " namespace: {}\n", - " spec:\n", - " predictor:\n", - " serviceAccountName: sa\n", - " pytorch:\n", - " protocolVersion: v2\n", - " storageUri: {}\n", - " resources:\n", - " requests: \n", - " cpu: 16\n", - " memory: 24Gi\n", - " limits:\n", - " cpu: 16\n", - " memory: 24Gi\n", - " nvidia.com/gpu: {}\n", - " nodeSelector:\n", - " cloud.google.com/gke-accelerator: {}\n", - "\"\"\".format(deploy, namespace, model_uri, gpu_count, accelerator)\n", - " # Update inferenceservice_yaml for GPU inference\n", - " deploy_task = (\n", - " deploy_op(action=\"apply\", inferenceservice_yaml=isvc_yaml\n", - " ).after(minio_mar_upload).set_display_name(\"Deployer\")\n", - " )\n", - " # Wait here for model to be loaded in torchserve for inference\n", - " sleep_task = sleep_op(60).after(deploy_task).set_display_name(\"Sleep\")\n", - " # Make Inference request\n", - " pred_task = (\n", - " pred_op(\n", - " host_name=isvc_name,\n", - " input_request=input_req,\n", - " cookie=cookie,\n", - " url=ingress_gateway,\n", - " model=model,\n", - " inference_type=\"infer\",\n", - " ).after(sleep_task).set_display_name(\"Prediction\")\n", - " )\n", - " (\n", - " pred_op(\n", - " host_name=isvc_name,\n", - " input_request=input_req,\n", - " cookie=cookie,\n", - " url=ingress_gateway,\n", - " model=model,\n", - " inference_type=\"explain\",\n", - " ).after(pred_task).set_display_name(\"Explanation\")\n", - " )\n", - "\n", - " dsl.get_pipeline_conf().add_op_transformer(\n", - " use_k8s_secret(\n", - " secret_name=\"mlpipeline-minio-artifact\",\n", - " k8s_secret_key_to_env={\n", - " \"secretkey\": \"MINIO_SECRET_KEY\",\n", - " \"accesskey\": \"MINIO_ACCESS_KEY\",\n", - " },\n", - " )\n", - " )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 40, - "metadata": {}, - "outputs": [], - "source": [ - "compiler.Compiler().compile(pytorch_cifar10, 'pytorch.tar.gz', type_check=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Execute the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Run details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "run = client.run_pipeline(my_experiment.id, 'pytorch-cifar10', 'pytorch.tar.gz')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Wait for inference service below to go to READY True state" - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "metadata": {}, - "outputs": [], - "source": [ - "!kubectl get isvc $DEPLOY" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Get the Inference service name" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'torchserve.kubeflow-user-example-com.example.com'" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "INFERENCE_SERVICE_LIST = ! kubectl get isvc {DEPLOY_NAME} -n {NAMESPACE} -o json | python3 -c \"import sys, json; print(json.load(sys.stdin)['status']['url'])\"| tr -d '\"' | cut -d \"/\" -f 3\n", - "INFERENCE_SERVICE_NAME = INFERENCE_SERVICE_LIST[0]\n", - "INFERENCE_SERVICE_NAME" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Use the deployed model for prediction request and save the output into a json" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "!python cifar10/tobytes.py cifar10/kitten.png" - ] - }, - { - "cell_type": "code", - "execution_count": 42, - "metadata": {}, - "outputs": [], - "source": [ - "!curl -v -H \"Host: $INFERENCE_SERVICE_NAME\" -H \"Cookie: $COOKIE\" \"$INGRESS_GATEWAY/v2/models/$MODEL_NAME/infer\" -d @./cifar10/kitten.json > cifar10_prediction_output.json" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\"id\": \"a7c6539c-c3e6-456b-9517-76253d400a3b\", \"model_name\": \"cifar10_test\", \"model_version\": \"1\", \"outputs\": [{\"name\": \"predict\", \"shape\": [], \"datatype\": \"BYTES\", \"data\": [{\"truck\": 0.44697049260139465, \"car\": 0.435802698135376, \"frog\": 0.07103736698627472, \"plane\": 0.016822654753923416, \"cat\": 0.011767607182264328}]}]}" - ] - } - ], - "source": [ - "! cat cifar10_prediction_output.json" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Use the deployed model for explain request and save the output into a json" - ] - }, - { - "cell_type": "code", - "execution_count": 36, - "metadata": {}, - "outputs": [], - "source": [ - "!curl -v -H \"Host: $INFERENCE_SERVICE_NAME\" -H \"Cookie: $COOKIE\" \"$INGRESS_GATEWAY/v2/models/$MODEL_NAME/explain\" -d @./cifar10/kitten.json > cifar10_explanation_output.json" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Model Interpretation using Captum Vis and Insights" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Install dependencies for Captum Insights" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "metadata": {}, - "outputs": [], - "source": [ - "!./install-dependencies.sh" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "##### import the necessary packages" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [], - "source": [ - "from PIL import Image\n", - "import numpy as np \n", - "import matplotlib.pyplot as plt\n", - "from matplotlib.colors import LinearSegmentedColormap\n", - "import torchvision.transforms as transforms\n", - "import torch\n", - "import torch.nn.functional as F\n", - "import json\n", - "import captum\n", - "from captum.attr import LayerAttribution\n", - "from captum.attr import visualization as viz\n", - "import base64\n", - "import os\n", - "import io" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "##### Read the prediction, explanation, and the class mapping file which saved during the prediction and expalain requests." - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [], - "source": [ - "prediction_json = json.loads(open(\"./cifar10_prediction_output.json\", \"r\").read())\n", - "\n", - "explainations_json = json.loads(open(\"./cifar10_explanation_output.json\", \"r\").read())\n", - "\n", - "labels_path = './cifar10/class_mapping.json'\n", - "with open(labels_path) as json_data:\n", - " idx_to_labels = json.load(json_data) " - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Saving /home/jovyan/jag/pipelines/samples/contrib/pytorch-samples/captum_kitten_0.jpeg\n", - "Saving /home/jovyan/jag/pipelines/samples/contrib/pytorch-samples/captum_kitten_1.jpeg\n", - "Saving /home/jovyan/jag/pipelines/samples/contrib/pytorch-samples/captum_kitten_2.jpeg\n" - ] - } - ], - "source": [ - "count = 0\n", - "for i in range(len(explainations_json[\"outputs\"])):\n", - "\n", - " image = base64.b64decode(explainations_json[\"outputs\"][i][\"data\"][0][\"b64\"]) \n", - " fileName = 'captum_kitten_{}.jpeg'.format(count)\n", - "\n", - " imagePath = ( os.getcwd() +\"/\" + fileName)\n", - " img = Image.open(io.BytesIO(image))\n", - " img = img.convert('RGB')\n", - " img.save(imagePath, 'jpeg', quality=100)\n", - " print(\"Saving \", imagePath)\n", - " count += 1\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 43, - "metadata": {}, - "outputs": [], - "source": [ - "from IPython.display import Image\n", - "Image(filename='captum_kitten_0.jpeg') " - ] - }, - { - "cell_type": "code", - "execution_count": 46, - "metadata": {}, - "outputs": [], - "source": [ - "Image(filename='captum_kitten_1.jpeg') " - ] - }, - { - "cell_type": "code", - "execution_count": 47, - "metadata": {}, - "outputs": [], - "source": [ - "Image(filename='captum_kitten_2.jpeg') " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Captum Insights can also be used for visualization" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "##### Define the minio client for downloading the artifactes from minio storage ( model pth file and training file)" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from minio import Minio\n", - "from kubernetes import client, config\n", - "import base64\n", - "\n", - "config.load_incluster_config()\n", - "v1 = client.CoreV1Api()\n", - "sec = v1.read_namespaced_secret(\"mlpipeline-minio-artifact\", NAMESPACE).data\n", - "minio_accesskey = base64.b64decode(sec[\"accesskey\"]).decode('UTF-8')\n", - "minio_secretkey = base64.b64decode(sec[\"secretkey\"]).decode('UTF-8')\n", - "\n", - "\n", - "minio_config = {\n", - " \"HOST\": \"minio-service.kubeflow:9000\",\n", - " \"ACCESS_KEY\": minio_accesskey,\n", - " \"SECRET_KEY\": minio_secretkey,\n", - " \"BUCKET\": \"mlpipeline\",\n", - " \"FOLDER\": \"checkpoint_dir/cifar10\"}\n", - "\n", - "def _initiate_minio_client(minio_config):\n", - " minio_host = minio_config[\"HOST\"]\n", - " access_key = minio_config[\"ACCESS_KEY\"]\n", - " secret_key = minio_config[\"SECRET_KEY\"]\n", - " client = Minio(minio_host, access_key=access_key, secret_key=secret_key, secure=False)\n", - " return client\n", - "\n", - "client= _initiate_minio_client(minio_config)\n", - "client" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [], - "source": [ - "def download_artifact_from_minio(folder: str, artifact: str):\n", - " artifact_name = artifact.split(\"/\")[-1]\n", - " result = client.fget_object(\n", - " minio_config[\"BUCKET\"],\n", - " os.path.join(folder, artifact_name),\n", - " artifact,\n", - " )" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[INFO] Downloaded the Model Pth File.....\n", - "[INFO] Downloaded the Model Classifier File.....\n" - ] - } - ], - "source": [ - "download_artifact_from_minio(minio_config[\"FOLDER\"],\"resnet.pth\")\n", - "print(\"[INFO] Downloaded the Model Pth File.....\")\n", - "download_artifact_from_minio(minio_config[\"FOLDER\"],\"cifar10_train.py\")\n", - "print(\"[INFO] Downloaded the Model Classifier File.....\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Load the downloaded model pth file and classifer" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": {}, - "outputs": [], - "source": [ - "from cifar10_train import CIFAR10Classifier\n", - "model = CIFAR10Classifier()\n", - "\n", - "model_pt_path =\"./resnet.pth\"\n", - "model.load_state_dict(torch.load(model_pt_path,map_location=torch.device('cpu')))\n", - "model.eval()" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Predicted: truck / 9 ( 0.44697049260139465 )\n", - "Predicted: car / 1 ( 0.6488052010536194 )\n" - ] - } - ], - "source": [ - "#Lets read two test images and make the prediction and use these images for captum Insights. \n", - "\n", - "from PIL import Image\n", - "\n", - "transform = transforms.Compose([\n", - " transforms.Resize(224),\n", - " transforms.CenterCrop(224),\n", - " transforms.ToTensor(),\n", - " transforms.Normalize(\n", - " mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]\n", - " ),\n", - " ])\n", - "\n", - "imgs = ['./cifar10/kitten.png',\"./cifar10/horse.png\"]\n", - "\n", - "for img in imgs:\n", - " img = Image.open(img)\n", - " transformed_img = transform(img)\n", - " input_img = transformed_img.unsqueeze(0) # the model requires a dummy batch dimension\n", - "\n", - " output = model(input_img)\n", - " output = F.softmax(output, dim=1)\n", - " prediction_score, pred_label_idx = torch.topk(output, 1)\n", - " pred_label_idx.squeeze_()\n", - " predicted_label = idx_to_labels[str(pred_label_idx.squeeze_().item())]\n", - " print('Predicted:', predicted_label, '/', pred_label_idx.item(), ' (', prediction_score.squeeze().item(), ')')" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [], - "source": [ - "from captum.insights import AttributionVisualizer, Batch\n", - "from captum.insights.attr_vis.features import ImageFeature\n", - "\n", - "# Baseline is all-zeros input - this may differ depending on your data\n", - "def baseline_func(input):\n", - " return input * 0\n", - "\n", - "# merging our image transforms from above\n", - "def full_img_transform(input):\n", - " i = Image.open(input)\n", - " i = transform(i)\n", - " i = i.unsqueeze(0)\n", - " i.requires_grad = True\n", - " return i\n", - "\n", - "\n", - "input_imgs = torch.cat(list(map(lambda i: full_img_transform(i), imgs)), 0)\n", - "\n", - "visualizer = AttributionVisualizer(\n", - " models=[model],\n", - " score_func=lambda o: torch.nn.functional.softmax(o, 1),\n", - " classes=list(map(lambda k: idx_to_labels[k], idx_to_labels.keys())),\n", - " features=[\n", - " ImageFeature(\n", - " \"Photo\",\n", - " baseline_transforms=[baseline_func],\n", - " input_transforms=[],\n", - " )\n", - " ],\n", - " dataset=[Batch(input_imgs, labels=[3,7])]\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 48, - "metadata": {}, - "outputs": [], - "source": [ - "visualizer.serve(debug=True,port=6080)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Captum Insights output image " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![CIFAR10_Captum_Insights](./cifar10/CIFAR10_Captum_Insights.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Clean up\n", - "### Delete Viewers, Inference Services and Completed pods" - ] - }, - { - "cell_type": "code", - "execution_count": 56, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "! kubectl delete --all isvc -n $NAMESPACE" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "! kubectl delete pod --field-selector=status.phase==Succeeded -n $NAMESPACE" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/pytorch-samples/Pipeline-Cifar10-hpo.ipynb b/samples/contrib/pytorch-samples/Pipeline-Cifar10-hpo.ipynb deleted file mode 100644 index ea16aa3dd9e..00000000000 --- a/samples/contrib/pytorch-samples/Pipeline-Cifar10-hpo.ipynb +++ /dev/null @@ -1,551 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "# Copyright (c) Facebook, Inc. and its affiliates.\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# KubeFlow Pipelines : HPO with AX - Pytorch Cifar10 Image classification\n", - "\n", - "In this example, we train a Pytorch Lightning model to using image classification cifar10 dataset. A parent run will be created during the training process,which would dump the baseline model and relevant parameters,metrics and model along with its summary,subsequently followed by a set of nested child runs, which will dump the trial results. The best parameters would be dumped into the parent run once the experiments are completed.\n", - "\n", - "This notebook shows PyTorch CIFAR10 end-to-end classification example using Kubeflow Pipelines. \n", - "\n", - "An example notebook that demonstrates how to:\n", - "\n", - "* Get different tasks needed for the pipeline\n", - "* Create a Kubeflow pipeline\n", - "* Include Pytorch KFP components to preprocess, train, visualize and deploy the model in the pipeline\n", - "* Submit a job for execution\n", - "* Query(prediction and explain) the final deployed model\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## import the necessary packages" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "! pip uninstall -y kfp\n", - "! pip install --no-cache-dir kfp ax-platform" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'1.6.4'" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import kfp\n", - "import json\n", - "import os\n", - "from kfp.onprem import use_k8s_secret\n", - "from kfp import components\n", - "from kfp.components import load_component_from_file, load_component_from_url, func_to_container_op, InputPath\n", - "from kfp import dsl\n", - "from kfp import compiler\n", - "\n", - "import numpy as np\n", - "import logging\n", - "\n", - "from ax.service.ax_client import AxClient\n", - "import json\n", - "\n", - "kfp.__version__" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Enter your gateway and the auth token\n", - "[Use this extension on chrome to get token]( https://chrome.google.com/webstore/detail/editthiscookie/fngmhnnpilhplaeedifhccceomclgfbg?hl=en)\n", - "\n", - "![image.png](./image.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Update values for the ingress gateway and auth session" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "INGRESS_GATEWAY='http://istio-ingressgateway.istio-system.svc.cluster.local'\n", - "AUTH=\"\" \n", - "NAMESPACE=\"kubeflow-user-example-com\"\n", - "COOKIE=\"authservice_session=\"+AUTH\n", - "EXPERIMENT=\"Default\"\n", - "dist_volume = 'dist-vol'\n", - "volume_mount_path =\"/model\"\n", - "results_path = volume_mount_path+\"/results.json\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set the Log bucket and Tensorboard Image" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "MINIO_ENDPOINT=\"http://minio-service.kubeflow:9000\"\n", - "LOG_BUCKET=\"mlpipeline\"\n", - "TENSORBOARD_IMAGE=\"public.ecr.aws/pytorch-samples/tboard:latest\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set the client and create the experiment" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "client = kfp.Client(host=INGRESS_GATEWAY+\"/pipeline\", cookies=COOKIE)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Experiment details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "{'created_at': datetime.datetime(2021, 6, 21, 13, 13, 6, tzinfo=tzlocal()),\n", - " 'description': None,\n", - " 'id': 'ba9b7266-2b1c-4729-afcd-be808c25c5af',\n", - " 'name': 'Default',\n", - " 'resource_references': [{'key': {'id': 'kubeflow-user-example-com',\n", - " 'type': 'NAMESPACE'},\n", - " 'name': None,\n", - " 'relationship': 'OWNER'}],\n", - " 'storage_state': 'STORAGESTATE_AVAILABLE'}" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.create_experiment(EXPERIMENT)\n", - "experiments = client.list_experiments(namespace=NAMESPACE)\n", - "my_experiment = experiments.experiments[0]\n", - "my_experiment" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set the Inference parameters" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "DEPLOY_NAME=\"torchserve\"\n", - "MODEL_NAME=\"cifar10\"\n", - "ISVC_NAME=DEPLOY_NAME+\".\"+NAMESPACE+\".\"+\"example.com\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Load the the components yaml files for setting up the components" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "! python utils/generate_templates.py cifar10/ax_template_mapping.json" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "prepare_tensorboard_op = load_component_from_file(\"yaml/tensorboard_component.yaml\")\n", - "\n", - "generate_trails_op = components.load_component_from_file(\n", - " \"yaml/ax_generate_trials_component.yaml\"\n", - ")\n", - "\n", - "complete_trails_op = components.load_component_from_file(\n", - " \"yaml/ax_complete_trials_component.yaml\"\n", - ")\n", - "\n", - "get_keys_op = components.load_component_from_file(\n", - " \"../../../components/contrib/json/Get_keys/component.yaml\"\n", - ")\n", - "\n", - "get_element_op = components.load_component_from_file(\n", - " \"../../../components/contrib/json/Get_element_by_key/component.yaml\"\n", - ")\n", - "prep_op = components.load_component_from_file(\n", - " \"yaml/preprocess_component.yaml\"\n", - ")\n", - "\n", - "# Uncomment hpo inputs in component yaml\n", - "train_op = components.load_component_from_file(\n", - " \"yaml/ax_train_component.yaml\"\n", - ")\n", - "\n", - "kubernetes_create_pvc_op = load_component_from_file(\"../../../components/contrib/kubernetes/Create_PersistentVolumeClaim/component.yaml\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from kubernetes.client.models import V1Volume, V1PersistentVolumeClaimVolumeSource\n", - "def create_dist_pipeline():\n", - " kubernetes_create_pvc_op(name=dist_volume, storage_size= \"5Gi\", namespace=NAMESPACE)\n", - "\n", - "create_volume_run = client.create_run_from_pipeline_func(create_dist_pipeline, arguments={})\n", - "create_volume_run.wait_for_run_completion()" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "parameters = [\n", - " {\"name\": \"lr\", \"type\": \"range\", \"bounds\": [1e-4, 0.2], \"log_scale\": True},\n", - " {\"name\": \"weight_decay\", \"type\": \"range\", \"bounds\": [1e-4, 1e-2]},\n", - " {\"name\": \"eps\", \"type\": \"range\", \"bounds\": [1e-8, 1e-2]},\n", - " ]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "@dsl.pipeline(\n", - " name=\"AX Hpo\", description=\"Estimating best parameters using AX\"\n", - ")\n", - "def pytorch_ax_hpo( # pylint: disable=too-many-arguments\n", - " minio_endpoint=MINIO_ENDPOINT,\n", - " log_bucket=LOG_BUCKET,\n", - " log_dir=f\"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " mar_path=f\"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store\",\n", - " config_prop_path=f\"mar/{dsl.RUN_ID_PLACEHOLDER}/config\",\n", - " model_uri=f\"s3://mlpipeline/mar/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " best_params=f\"hpo/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " tf_image=TENSORBOARD_IMAGE,\n", - " deploy=DEPLOY_NAME,\n", - " isvc_name=ISVC_NAME,\n", - " model=MODEL_NAME,\n", - " namespace=NAMESPACE,\n", - " confusion_matrix_log_dir=f\"confusion_matrix/{dsl.RUN_ID_PLACEHOLDER}/\",\n", - " checkpoint_dir=\"checkpoint_dir/cifar10\",\n", - " input_req=INPUT_REQUEST,\n", - " cookie=COOKIE,\n", - " total_trials=2,\n", - " ingress_gateway=INGRESS_GATEWAY,\n", - "):\n", - " \n", - " \"\"\"This method defines the pipeline tasks and operations\"\"\"\n", - " pod_template_spec = json.dumps({\n", - " \"spec\": {\n", - " \"containers\": [{\n", - " \"env\": [\n", - " {\n", - " \"name\": \"AWS_ACCESS_KEY_ID\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"accesskey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"AWS_SECRET_ACCESS_KEY\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"secretkey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"AWS_REGION\",\n", - " \"value\": \"minio\"\n", - " },\n", - " {\n", - " \"name\": \"S3_ENDPOINT\",\n", - " \"value\": f\"{minio_endpoint}\",\n", - " },\n", - " {\n", - " \"name\": \"S3_USE_HTTPS\",\n", - " \"value\": \"0\"\n", - " },\n", - " {\n", - " \"name\": \"S3_VERIFY_SSL\",\n", - " \"value\": \"0\"\n", - " },\n", - " ]\n", - " }]\n", - " }\n", - " })\n", - "\n", - " prepare_tb_task = prepare_tensorboard_op(\n", - " log_dir_uri=f\"s3://{log_bucket}/{log_dir}\",\n", - " image=tf_image,\n", - " pod_template_spec=pod_template_spec,\n", - " ).set_display_name(\"Visualization\")\n", - "\n", - " prep_task = (\n", - " prep_op().after(prepare_tb_task).set_display_name(\"Preprocess & Transform\")\n", - " )\n", - "\n", - " gen_trials_task = generate_trails_op(total_trials, parameters, 'test-accuracy').after(prep_task).set_display_name(\"AX Generate Trials\")\n", - " \n", - " get_keys_task = get_keys_op(gen_trials_task.outputs[\"trial_parameters\"]).after(gen_trials_task).set_display_name(\"Get Keys of Trials\")\n", - " \n", - " confusion_matrix_url = f\"minio://{log_bucket}/{confusion_matrix_log_dir}\"\n", - " script_args = f\"model_name=resnet.pth,\" \\\n", - " f\"confusion_matrix_url={confusion_matrix_url}\"\n", - " ptl_args = f\"max_epochs=1, profiler=pytorch\"\n", - "\n", - " with dsl.ParallelFor(get_keys_task.outputs[\"keys\"]) as item:\n", - " get_element_task = get_element_op(gen_trials_task.outputs[\"trial_parameters\"], item).after(get_keys_task).set_display_name(\"Get Element from key\")\n", - " train_task = (\n", - " train_op(\n", - " trial_id=item,\n", - " input_data=prep_task.outputs[\"output_data\"],\n", - " script_args=script_args,\n", - " model_parameters=get_element_task.outputs[\"output\"],\n", - " ptl_arguments=ptl_args,\n", - " results=results_path\n", - " ).add_pvolumes({volume_mount_path: dsl.PipelineVolume(pvc=dist_volume)}).after(get_element_task).set_display_name(\"Training\")\n", - " # For allocating resources, uncomment below lines\n", - " # .set_memory_request('600M')\n", - " # .set_memory_limit('1200M')\n", - " # .set_cpu_request('700m')\n", - " # .set_cpu_limit('1400m')\n", - " # For GPU uncomment below line and set GPU limit and node selector\n", - " # .set_gpu_limit(1).add_node_selector_constraint('cloud.google.com/gke-accelerator','nvidia-tesla-p4')\n", - " )\n", - " \n", - " complete_trials_task = complete_trails_op(gen_trials_task.outputs[\"client\"], results_path).add_pvolumes({volume_mount_path: dsl.PipelineVolume(pvc=dist_volume)}).after(train_task).set_display_name(\"AX Complete Trials\")\n", - "\n", - " dsl.get_pipeline_conf().add_op_transformer(\n", - " use_k8s_secret(\n", - " secret_name=\"mlpipeline-minio-artifact\",\n", - " k8s_secret_key_to_env={\n", - " \"secretkey\": \"MINIO_SECRET_KEY\",\n", - " \"accesskey\": \"MINIO_ACCESS_KEY\",\n", - " },\n", - " )\n", - " )\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "compiler.Compiler().compile(pytorch_ax_hpo, 'pytorch.tar.gz', type_check=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Execute the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Run details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "run = client.run_pipeline(my_experiment.id, 'pytorch_ax_hpo', 'pytorch.tar.gz')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Viewing results\n", - "\n", - "Wait for the pipeline execution to be completed. Sample pipeline shown below" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![](screenshots/ax-hpo-pipeline.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Click on \"AX Complete Trials\" component. The best hyperparameters are shown in the Input/Output tab as shown below" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![](screenshots/ax-complete-trials.png)" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![](screenshots/ax-best-parameters.png)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.2" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/pytorch-samples/Pipeline-Cifar10.ipynb b/samples/contrib/pytorch-samples/Pipeline-Cifar10.ipynb deleted file mode 100644 index 564832a2ac8..00000000000 --- a/samples/contrib/pytorch-samples/Pipeline-Cifar10.ipynb +++ /dev/null @@ -1,730 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Copyright (c) Facebook, Inc. and its affiliates.\n", - "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", - "# you may not use this file except in compliance with the License.\n", - "# You may obtain a copy of the License at\n", - "#\n", - "# http://www.apache.org/licenses/LICENSE-2.0\n", - "#\n", - "# Unless required by applicable law or agreed to in writing, software\n", - "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", - "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", - "# See the License for the specific language governing permissions and\n", - "# limitations under the License." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# KubeFlow Pipelines : Pytorch Cifar10 Image classification\n", - "\n", - "This notebook shows PyTorch CIFAR10 end-to-end classification example using Kubeflow Pipelines. \n", - "\n", - "An example notebook that demonstrates how to:\n", - "\n", - "* Get different tasks needed for the pipeline\n", - "* Create a Kubeflow pipeline\n", - "* Include Pytorch KFP components to preprocess, train, visualize and deploy the model in the pipeline\n", - "* Submit a job for execution\n", - "* Query(prediction and explain) the final deployed model\n" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": {}, - "outputs": [], - "source": [ - "! pip uninstall -y kfp\n", - "! pip install --no-cache-dir kfp" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## import the necessary packages" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'1.8.12'" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import kfp\n", - "import json\n", - "import os\n", - "from kfp.onprem import use_k8s_secret\n", - "from kfp import components\n", - "from kfp.components import load_component_from_file, load_component_from_url\n", - "from kfp import dsl\n", - "from kfp import compiler\n", - "\n", - "import numpy as np\n", - "import logging\n", - "\n", - "kfp.__version__" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Enter your gateway and the auth token\n", - "[Use this extension on chrome to get token]( https://chrome.google.com/webstore/detail/editthiscookie/fngmhnnpilhplaeedifhccceomclgfbg?hl=en)\n", - "\n", - "![image.png](./image.png)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Update values for the ingress gateway and auth session" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "INGRESS_GATEWAY='http://istio-ingressgateway.istio-system.svc.cluster.local'\n", - "AUTH=\"\" \n", - "NAMESPACE=\"kubeflow-user-example-com\"\n", - "COOKIE=\"authservice_session=\"+AUTH\n", - "EXPERIMENT=\"Default\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set the Log bucket and Tensorboard Image" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "MINIO_ENDPOINT=\"http://minio-service.kubeflow:9000\"\n", - "LOG_BUCKET=\"mlpipeline\"\n", - "TENSORBOARD_IMAGE=\"public.ecr.aws/pytorch-samples/tboard:latest\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set the client and create the experiment" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "client = kfp.Client(host=INGRESS_GATEWAY+\"/pipeline\", cookies=COOKIE)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Experiment details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "{'created_at': datetime.datetime(2022, 4, 21, 9, 45, 22, tzinfo=tzlocal()),\n", - " 'description': None,\n", - " 'id': 'b4bee8c3-381b-42a0-9494-bc81eb9aa359',\n", - " 'name': 'Default',\n", - " 'resource_references': [{'key': {'id': 'kubeflow-user-example-com',\n", - " 'type': 'NAMESPACE'},\n", - " 'name': None,\n", - " 'relationship': 'OWNER'}],\n", - " 'storage_state': 'STORAGESTATE_AVAILABLE'}" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.create_experiment(EXPERIMENT)\n", - "experiments = client.list_experiments(namespace=NAMESPACE)\n", - "my_experiment = experiments.experiments[0]\n", - "my_experiment" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Set the Inference parameters" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "DEPLOY_NAME=\"torchserve\"\n", - "MODEL_NAME=\"cifar10\"\n", - "ISVC_NAME=DEPLOY_NAME+\".\"+NAMESPACE+\".\"+\"example.com\"\n", - "INPUT_REQUEST=\"https://raw.githubusercontent.com/kubeflow/pipelines/master/samples/contrib/pytorch-samples/cifar10/input.json\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Load the the components yaml files for setting up the components" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Processing prediction_component.yaml\n", - "Processing ax_complete_trials_component.yaml\n", - "Processing preprocess_component.yaml\n", - "Processing train_component.yaml\n", - "Processing tensorboard_component.yaml\n", - "Processing ax_generate_trials_component.yaml\n", - "Processing minio_component.yaml\n", - "Processing copy_component.yaml\n", - "Processing ax_train_component.yaml\n" - ] - } - ], - "source": [ - "! python utils/generate_templates.py cifar10/template_mapping.json" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "prepare_tensorboard_op = load_component_from_file(\"yaml/tensorboard_component.yaml\")\n", - "\n", - "prep_op = components.load_component_from_file(\n", - " \"yaml/preprocess_component.yaml\"\n", - ")\n", - "\n", - "train_op = components.load_component_from_file(\n", - " \"yaml/train_component.yaml\"\n", - ")\n", - "deploy_op = load_component_from_file(\n", - " \"../../../components/kserve/component.yaml\"\n", - ")\n", - "\n", - "pred_op = load_component_from_file(\"yaml/prediction_component.yaml\")\n", - "\n", - "minio_op = components.load_component_from_file(\n", - " \"yaml/minio_component.yaml\"\n", - ")\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "@dsl.pipeline(\n", - " name=\"Training Cifar10 pipeline\", description=\"Cifar 10 dataset pipeline\"\n", - ")\n", - "def pytorch_cifar10( # pylint: disable=too-many-arguments\n", - " minio_endpoint=MINIO_ENDPOINT,\n", - " log_bucket=LOG_BUCKET,\n", - " log_dir=f\"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " mar_path=f\"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store\",\n", - " config_prop_path=f\"mar/{dsl.RUN_ID_PLACEHOLDER}/config\",\n", - " model_uri=f\"s3://mlpipeline/mar/{dsl.RUN_ID_PLACEHOLDER}\",\n", - " tf_image=TENSORBOARD_IMAGE,\n", - " deploy=DEPLOY_NAME,\n", - " isvc_name=ISVC_NAME,\n", - " model=MODEL_NAME,\n", - " namespace=NAMESPACE,\n", - " confusion_matrix_log_dir=f\"confusion_matrix/{dsl.RUN_ID_PLACEHOLDER}/\",\n", - " checkpoint_dir=\"checkpoint_dir/cifar10\",\n", - " input_req=INPUT_REQUEST,\n", - " cookie=COOKIE,\n", - " ingress_gateway=INGRESS_GATEWAY,\n", - "):\n", - " def sleep_op(seconds):\n", - " \"\"\"Sleep for a while.\"\"\"\n", - " return dsl.ContainerOp(\n", - " name=\"Sleep \" + str(seconds) + \" seconds\",\n", - " image=\"python:alpine3.6\",\n", - " command=[\"sh\", \"-c\"],\n", - " arguments=[\n", - " 'python -c \"import time; time.sleep($0)\"',\n", - " str(seconds)\n", - " ],\n", - " )\n", - "\n", - " \"\"\"This method defines the pipeline tasks and operations\"\"\"\n", - " pod_template_spec = json.dumps({\n", - " \"spec\": {\n", - " \"containers\": [{\n", - " \"env\": [\n", - " {\n", - " \"name\": \"AWS_ACCESS_KEY_ID\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"accesskey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"AWS_SECRET_ACCESS_KEY\",\n", - " \"valueFrom\": {\n", - " \"secretKeyRef\": {\n", - " \"name\": \"mlpipeline-minio-artifact\",\n", - " \"key\": \"secretkey\",\n", - " }\n", - " },\n", - " },\n", - " {\n", - " \"name\": \"AWS_REGION\",\n", - " \"value\": \"minio\"\n", - " },\n", - " {\n", - " \"name\": \"S3_ENDPOINT\",\n", - " \"value\": f\"{minio_endpoint}\",\n", - " },\n", - " {\n", - " \"name\": \"S3_USE_HTTPS\",\n", - " \"value\": \"0\"\n", - " },\n", - " {\n", - " \"name\": \"S3_VERIFY_SSL\",\n", - " \"value\": \"0\"\n", - " },\n", - " ]\n", - " }]\n", - " }\n", - " })\n", - "\n", - " prepare_tb_task = prepare_tensorboard_op(\n", - " log_dir_uri=f\"s3://{log_bucket}/{log_dir}\",\n", - " image=tf_image,\n", - " pod_template_spec=pod_template_spec,\n", - " ).set_display_name(\"Visualization\")\n", - "\n", - " prep_task = (\n", - " prep_op().after(prepare_tb_task\n", - " ).set_display_name(\"Preprocess & Transform\")\n", - " )\n", - " confusion_matrix_url = f\"minio://{log_bucket}/{confusion_matrix_log_dir}\"\n", - " script_args = f\"model_name=resnet.pth,\" \\\n", - " f\"confusion_matrix_url={confusion_matrix_url}\"\n", - " # For GPU, set number of devices and strategy type\n", - " ptl_args = f\"max_epochs=1, devices=0, strategy=None, profiler=pytorch\"\n", - " train_task = (\n", - " train_op(\n", - " input_data=prep_task.outputs[\"output_data\"],\n", - " script_args=script_args,\n", - " ptl_arguments=ptl_args\n", - " ).after(prep_task).set_display_name(\"Training\")\n", - " # For allocating resources, uncomment below lines\n", - " # .set_memory_request('600M')\n", - " # .set_memory_limit('1200M')\n", - " # .set_cpu_request('700m')\n", - " # .set_cpu_limit('1400m')\n", - " # For GPU uncomment below line and set GPU limit and node selector\n", - " # .set_gpu_limit(1).add_node_selector_constraint('cloud.google.com/gke-accelerator','nvidia-tesla-p4')\n", - " )\n", - "\n", - "\n", - " (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=log_dir,\n", - " input_path=train_task.outputs[\"tensorboard_root\"],\n", - " filename=\"\",\n", - " ).after(train_task).set_display_name(\"Tensorboard Events Pusher\")\n", - " )\n", - "\n", - " (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=checkpoint_dir,\n", - " input_path=train_task.outputs[\"checkpoint_dir\"],\n", - " filename=\"\",\n", - " ).after(train_task).set_display_name(\"checkpoint_dir Pusher\")\n", - " )\n", - "\n", - " minio_mar_upload = (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=mar_path,\n", - " input_path=train_task.outputs[\"checkpoint_dir\"],\n", - " filename=\"cifar10_test.mar\",\n", - " ).after(train_task).set_display_name(\"Mar Pusher\")\n", - " )\n", - "\n", - " (\n", - " minio_op(\n", - " bucket_name=\"mlpipeline\",\n", - " folder_name=config_prop_path,\n", - " input_path=train_task.outputs[\"checkpoint_dir\"],\n", - " filename=\"config.properties\",\n", - " ).after(train_task).set_display_name(\"Conifg Pusher\")\n", - " )\n", - "\n", - " model_uri = str(model_uri)\n", - " # pylint: disable=unused-variable\n", - " isvc_yaml = \"\"\"\n", - " apiVersion: \"serving.kserve.io/v1beta1\"\n", - " kind: \"InferenceService\"\n", - " metadata:\n", - " name: {}\n", - " namespace: {}\n", - " spec:\n", - " predictor:\n", - " serviceAccountName: sa\n", - " pytorch:\n", - " protocolVersion: v2\n", - " storageUri: {}\n", - " resources:\n", - " requests: \n", - " cpu: 4\n", - " memory: 8Gi\n", - " limits:\n", - " cpu: 4\n", - " memory: 8Gi\n", - " \"\"\".format(deploy, namespace, model_uri)\n", - "\n", - " # For GPU inference use below yaml with gpu count and accelerator\n", - " gpu_count = \"1\"\n", - " accelerator = \"nvidia-tesla-p4\"\n", - " isvc_gpu_yaml = \"\"\"# pylint: disable=unused-variable\n", - " apiVersion: \"serving.kserve.io/v1beta1\"\n", - " kind: \"InferenceService\"\n", - " metadata:\n", - " name: {}\n", - " namespace: {}\n", - " spec:\n", - " predictor:\n", - " serviceAccountName: sa\n", - " pytorch:\n", - " protocolVersion: v2\n", - " storageUri: {}\n", - " resources:\n", - " requests: \n", - " cpu: 16\n", - " memory: 24Gi\n", - " limits:\n", - " cpu: 16\n", - " memory: 24Gi\n", - " nvidia.com/gpu: {}\n", - " nodeSelector:\n", - " cloud.google.com/gke-accelerator: {}\n", - "\"\"\".format(deploy, namespace, model_uri, gpu_count, accelerator)\n", - " # Update inferenceservice_yaml for GPU inference\n", - " deploy_task = (\n", - " deploy_op(action=\"apply\", inferenceservice_yaml=isvc_yaml\n", - " ).after(minio_mar_upload).set_display_name(\"Deployer\")\n", - " )\n", - " # Wait here for model to be loaded in torchserve for inference\n", - " sleep_task = sleep_op(60).after(deploy_task).set_display_name(\"Sleep\")\n", - " # Make Inference request\n", - " pred_task = (\n", - " pred_op(\n", - " host_name=isvc_name,\n", - " input_request=input_req,\n", - " cookie=cookie,\n", - " url=ingress_gateway,\n", - " model=model,\n", - " inference_type=\"infer\",\n", - " ).after(sleep_task).set_display_name(\"Prediction\")\n", - " )\n", - " (\n", - " pred_op(\n", - " host_name=isvc_name,\n", - " input_request=input_req,\n", - " cookie=cookie,\n", - " url=ingress_gateway,\n", - " model=model,\n", - " inference_type=\"explain\",\n", - " ).after(pred_task).set_display_name(\"Explanation\")\n", - " )\n", - "\n", - " dsl.get_pipeline_conf().add_op_transformer(\n", - " use_k8s_secret(\n", - " secret_name=\"mlpipeline-minio-artifact\",\n", - " k8s_secret_key_to_env={\n", - " \"secretkey\": \"MINIO_SECRET_KEY\",\n", - " \"accesskey\": \"MINIO_ACCESS_KEY\",\n", - " },\n", - " )\n", - " )\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [], - "source": [ - "compiler.Compiler().compile(pytorch_cifar10, 'pytorch.tar.gz', type_check=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Execute the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "Run details." - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "run = client.run_pipeline(my_experiment.id, 'pytorch-cifar10', 'pytorch.tar.gz')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Wait for inference service below to go to READY True state" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "NAME URL READY PREV LATEST PREVROLLEDOUTREVISION LATESTREADYREVISION AGE\n", - "bertserve http://bertserve.kubeflow-user-example-com.example.com True 100 bertserve-predictor-default-00003 3h11m\n", - "torchserve http://torchserve.kubeflow-user-example-com.example.com True 100 torchserve-predictor-default-00001 5m19s\n" - ] - } - ], - "source": [ - "!kubectl get isvc $DEPLOY" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Get the Inference service name" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'torchserve.kubeflow-user-example-com.example.com'" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "INFERENCE_SERVICE_LIST = ! kubectl get isvc {DEPLOY_NAME} -n {NAMESPACE} -o json | python3 -c \"import sys, json; print(json.load(sys.stdin)['status']['url'])\"| tr -d '\"' | cut -d \"/\" -f 3\n", - "INFERENCE_SERVICE_NAME = INFERENCE_SERVICE_LIST[0]\n", - "INFERENCE_SERVICE_NAME" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Use the deployed model for prediction request and save the output into a json" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [], - "source": [ - "!python cifar10/tobytes.py cifar10/kitten.png" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": {}, - "outputs": [], - "source": [ - "!curl -v -H \"Host: $INFERENCE_SERVICE_NAME\" -H \"Cookie: $COOKIE\" \"$INGRESS_GATEWAY/v2/models/$MODEL_NAME/infer\" -d @./cifar10/kitten.json > cifar10_prediction_output.json" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\"id\": \"fda5a0a4-fe03-4476-8258-fee5017e6c50\", \"model_name\": \"cifar10_test\", \"model_version\": \"1\", \"outputs\": [{\"name\": \"predict\", \"shape\": [], \"datatype\": \"BYTES\", \"data\": [{\"truck\": 0.7257930040359497, \"car\": 0.12065636366605759, \"plane\": 0.0643853172659874, \"frog\": 0.030459348112344742, \"ship\": 0.01999029517173767}]}]}" - ] - } - ], - "source": [ - "! cat cifar10_prediction_output.json" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Use the deployed model for explain request and save the output into a json" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [], - "source": [ - "!curl -v -H \"Host: $INFERENCE_SERVICE_NAME\" -H \"Cookie: $COOKIE\" \"$INGRESS_GATEWAY/v2/models/$MODEL_NAME/explain\" -d @./cifar10/kitten.json > cifar10_explanation_output.json" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Clean up\n", - "#### Delete Viewers, Inference Services and Completed pods" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "! kubectl delete --all isvc -n $NAMESPACE" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": {}, - "outputs": [], - "source": [ - "! kubectl delete pod --field-selector=status.phase==Succeeded -n $NAMESPACE" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/samples/contrib/pytorch-samples/README.md b/samples/contrib/pytorch-samples/README.md deleted file mode 100644 index ac1ef30e70d..00000000000 --- a/samples/contrib/pytorch-samples/README.md +++ /dev/null @@ -1,91 +0,0 @@ -# PyTorch Pipeline Samples - -This folder contains different Kubeflow pipeline PyTorch examples using the PyTorch KFP Components SDK. - -1. Cifar10 example for Computer Vision -2. BERT example for NLP - -Please navigate to the following link for running the examples with Google Vertex AI pipeline - -https://github.com/amygdala/code-snippets/tree/master/ml/vertex_pipelines/pytorch/cifar - -Use the following link for installing KFP python sdk - -https://github.com/kubeflow/pipelines/tree/master/sdk/python - -## Prerequisites - -Check the following prerequisites before running the examples - -**[Prerequisites](prerequisites.md)** - - -## Note: The Samples can be run in 2 ways - -1. From Kubeflow Jupyter Notebook mentioned in [Option 1](##-Option-1.-Running-from-Kubeflow-Jupyter-Notebook) -2. compiling and uploading to KFP mentioned in [Option 2](##-Option-2.-Compiling-and-Running-by-uploading-to-Kubeflow-Pipelines) - -## Option 1. Running from Kubeflow Jupyter Notebook - -This involves steps for building and running the pipeline from Kubeflow Jupyter notebook. -Here the pipeline is defined in a Jupyter notebook and run directly from the Jupyter notebook. - -Use the following notebook files for running the Cifar 10 and Bert examples - -Cifar 10 - [Pipeline-Cifar10.ipynb](Pipeline-Cifar10.ipynb) - -Bert - [Pipeline-Bert.ipynb](Pipeline-Bert.ipynb) - -**[Steps to Run the example pipelines from Kubeflow Jupyter Notebook](cluster_build.md)** - -## Option 2. Compiling and Running by uploading to Kubeflow Pipelines - -This involves steps to build the pipeline in local machine and run it by uploading the -pipeline file to the Kubeflow Dashboard. Here we have a python file that defines the pipeline. The python file containing the pipeline is compiled and the generated yaml is uploaded to the KFP for creating a run out of it. - -Use the following python files building the pipeline locally for Cifar 10 and Bert examples - -Cifar 10 - [cifar10/pipeline.py](cifar10/pipeline.py) - -Bert - [bert/pipeline.py](bert/pipeline.py) - -**[Steps to run the examples pipelines by compiling and uploading to KFP](local_build.md)** - -## Other Examples - -## PyTorch CIFAR10 with Captum Insights - -In this example, we train a PyTorch Lightning model to using image classification cifar10 dataset with Captum Insights. This uses PyTorch KFP components to preprocess, train, visualize and deploy the model in the pipeline -and interpretation of the model using the Captum Insights. - -### Run the notebook - -Open the example notebook and run to deploy the example in KFP. - -Cifar 10 Captum Insights - [Pipeline-Cifar10-Captum-Insights.ipynb](Pipeline-Cifar10-Captum-Insights.ipynb) - -## Hyper Parameter Optimization with AX - -In this example, we train a PyTorch Lightning model to using image classification cifar10 dataset. A parent run will be created during the training process,which would dump the baseline model and relevant parameters,metrics and model along with its summary,subsequently followed by a set of nested child runs, which will dump the trial results. The best parameters would be dumped into the parent run once the experiments are completed. - -### Run the notebook - -Open the example notebook and run to deploy the example in KFP. - -Cifar 10 HPO - [Pipeline-Cifar10-hpo.ipynb](Pipeline-Cifar10-hpo.ipynb) - -## PyTorch Distributed Training with PyTorch Job Operator - -In this example, we deploy a pipeline to launch the distributed training of this BERT model file using the pytorch operator and deploy with torchserve using KServe. - -### Run the notebook - -Open the example notebook and run to deploy the example in KFP. - -Bert Distributed Training - [Pipeline-Bert-Dist.ipynb](Pipeline-Bert-Dist.ipynb) - -**Refer: [Running Pipelines in Kubeflow Jupyter Notebook](cluster_build.md)** - -## Contributing to PyTorch KFP Samples - -Before you start contributing to PyTorch KFP Samples, read the guidelines in [How to Contribute](contributing.md). To learn how to build and deploy PyTorch components with pytorch-kfp-components SDK. diff --git a/samples/contrib/pytorch-samples/bert/agnews_classification_pytorch.py b/samples/contrib/pytorch-samples/bert/agnews_classification_pytorch.py deleted file mode 100644 index 9b993c51915..00000000000 --- a/samples/contrib/pytorch-samples/bert/agnews_classification_pytorch.py +++ /dev/null @@ -1,247 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""AG news Classification script.""" -import os -from argparse import ArgumentParser -from pathlib import Path -from pytorch_lightning.loggers import TensorBoardLogger -from pytorch_lightning.callbacks import ( - EarlyStopping, - LearningRateMonitor, - ModelCheckpoint, -) -from pytorch_kfp_components.components.visualization.component import Visualization -from pytorch_kfp_components.components.trainer.component import Trainer -from pytorch_kfp_components.components.mar.component import MarGeneration -from pytorch_kfp_components.components.utils.argument_parsing import parse_input_args -# Argument parser for user defined paths -import pytorch_lightning -print("Using Pytorch Lighting: {}".format(pytorch_lightning.__version__)) #pylint: disable=no-member -parser = ArgumentParser() - -parser.add_argument( - "--dataset_path", - type=str, - default="output/processing", - help="Path to input dataset", -) - -parser.add_argument( - "--mlpipeline_ui_metadata", - type=str, - default="mlpipeline-ui-metadata.json", - help="Path to write mlpipeline-ui-metadata.json", -) - -parser.add_argument( - "--mlpipeline_metrics", - type=str, - default="mlpipeline-metrics", - help="Path to write mlpipeline-metrics.json", -) - -parser.add_argument( - "--script_args", - type=str, - help="Arguments for bert agnews classification script", -) - -parser.add_argument( - "--ptl_args", - type=str, - help="Arguments specific to PTL trainer", -) - -parser.add_argument( - "--checkpoint_dir", - default="output/train/models", - type=str, - help="Arguments specific to PTL trainer", -) - -parser.add_argument( - "--tensorboard_root", - default="output/tensorboard", - type=str, - help="Arguments specific to PTL trainer", -) -args = vars(parser.parse_args()) -script_args = args["script_args"] -ptl_args = args["ptl_args"] - -TENSOBOARD_ROOT = args["tensorboard_root"] -CHECKPOINT_DIR = args["checkpoint_dir"] -DATASET_PATH = args["dataset_path"] - -script_dict: dict = parse_input_args(input_str=script_args) -script_dict["checkpoint_dir"] = CHECKPOINT_DIR - -ptl_dict: dict = parse_input_args(input_str=ptl_args) - -# Enabling Tensorboard Logger, ModelCheckpoint, Earlystopping - -lr_logger = LearningRateMonitor() -tboard = TensorBoardLogger(TENSOBOARD_ROOT) -early_stopping = EarlyStopping( - monitor="val_loss", mode="min", patience=5, verbose=True -) -checkpoint_callback = ModelCheckpoint( - dirpath=CHECKPOINT_DIR, - filename="bert_{epoch:02d}", - save_top_k=1, - verbose=True, - monitor="val_loss", - mode="min", -) - -if "accelerator" in ptl_dict and ptl_dict["accelerator"] == "None": - ptl_dict["accelerator"] = None - -# Setting the trainer specific arguments -trainer_args = { - "logger": tboard, - "enable_checkpointing": False, - "callbacks": [lr_logger, early_stopping], -} - -if not ptl_dict["max_epochs"]: - trainer_args["max_epochs"] = 1 -else: - trainer_args["max_epochs"] = ptl_dict["max_epochs"] - -if "profiler" in ptl_dict and ptl_dict["profiler"] != "": - trainer_args["profiler"] = ptl_dict["profiler"] - -# Setting the datamodule specific arguments -data_module_args = { - "train_glob": DATASET_PATH, - "num_samples": script_dict["num_samples"] -} - -# Creating parent directories -Path(TENSOBOARD_ROOT).mkdir(parents=True, exist_ok=True) -Path(CHECKPOINT_DIR).mkdir(parents=True, exist_ok=True) - -# Updating all the input parameter to PTL dict - -trainer_args.update(ptl_dict) - -# Initiating the training process -trainer = Trainer( - module_file="bert_train.py", - data_module_file="bert_datamodule.py", - module_file_args=script_dict, - data_module_args=data_module_args, - trainer_args=trainer_args, -) - -print("Generated tensorboard files") -for root, dirs, files in os.walk(args["tensorboard_root"]): # pylint: disable=unused-variable - for file in files: - print(file) - -model = trainer.ptl_trainer.lightning_module - -if trainer.ptl_trainer.global_rank == 0: - # Mar file generation - - bert_dir, _ = os.path.split(os.path.abspath(__file__)) - - mar_config = { - "MODEL_NAME": - "bert_test", - "MODEL_FILE": - os.path.join(bert_dir, "bert_train.py"), - "HANDLER": - os.path.join(bert_dir, "bert_handler.py"), - "SERIALIZED_FILE": - os.path.join(CHECKPOINT_DIR, script_dict["model_name"]), - "VERSION": - "1", - "EXPORT_PATH": - CHECKPOINT_DIR, - "CONFIG_PROPERTIES": - os.path.join(bert_dir, "config.properties"), - "EXTRA_FILES": - "{},{},{}".format( - os.path.join(bert_dir, "bert-base-uncased-vocab.txt"), - os.path.join(bert_dir, "index_to_name.json"), - os.path.join(bert_dir, "wrapper.py") - ), - "REQUIREMENTS_FILE": - os.path.join(bert_dir, "requirements.txt") - } - - MarGeneration(mar_config=mar_config, mar_save_path=CHECKPOINT_DIR) - - print("Generated checkpoint files") - for root, dirs, files in os.walk(CHECKPOINT_DIR): # pylint: disable=unused-variable - for file in files: - path = os.path.join(root, file) - size = os.stat(path).st_size # in bytes - if ".pth" in file: - print("Removing file: ", path) - os.remove(path) - - classes = [ - "World", - "Sports", - "Business", - "Sci/Tech", - ] - - # model = trainer.ptl_trainer.model - - target_index_list = list(set(model.target)) - - class_list = [] - for index in target_index_list: - class_list.append(classes[index]) - - confusion_matrix_dict = { - "actuals": model.target, - "preds": model.preds, - "classes": class_list, - "url": script_dict["confusion_matrix_url"], - } - - test_accuracy = round(float(model.test_acc.compute()), 2) - - print("Model test accuracy: ", test_accuracy) - - visualization_arguments = { - "input": { - "tensorboard_root": TENSOBOARD_ROOT, - "checkpoint_dir": CHECKPOINT_DIR, - "dataset_path": DATASET_PATH, - "model_name": script_dict["model_name"], - "confusion_matrix_url": script_dict["confusion_matrix_url"], - }, - "output": { - "mlpipeline_ui_metadata": args["mlpipeline_ui_metadata"], - "mlpipeline_metrics": args["mlpipeline_metrics"], - }, - } - - markdown_dict = {"storage": "inline", "source": visualization_arguments} - - print("Visualization Arguments: ", markdown_dict) - - visualization = Visualization( - test_accuracy=test_accuracy, - confusion_matrix_dict=confusion_matrix_dict, - mlpipeline_ui_metadata=args["mlpipeline_ui_metadata"], - mlpipeline_metrics=args["mlpipeline_metrics"], - markdown=markdown_dict, - ) diff --git a/samples/contrib/pytorch-samples/bert/bert-base-uncased-vocab.txt b/samples/contrib/pytorch-samples/bert/bert-base-uncased-vocab.txt deleted file mode 100644 index fb140275c15..00000000000 --- a/samples/contrib/pytorch-samples/bert/bert-base-uncased-vocab.txt +++ /dev/null @@ -1,30522 +0,0 @@ -[PAD] -[unused0] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[UNK] -[CLS] -[SEP] -[MASK] -[unused99] -[unused100] -[unused101] -[unused102] -[unused103] -[unused104] -[unused105] -[unused106] -[unused107] -[unused108] -[unused109] -[unused110] -[unused111] -[unused112] -[unused113] -[unused114] -[unused115] -[unused116] -[unused117] -[unused118] -[unused119] -[unused120] -[unused121] -[unused122] -[unused123] -[unused124] -[unused125] -[unused126] -[unused127] -[unused128] -[unused129] -[unused130] -[unused131] -[unused132] -[unused133] -[unused134] -[unused135] -[unused136] -[unused137] -[unused138] -[unused139] -[unused140] -[unused141] -[unused142] -[unused143] -[unused144] -[unused145] -[unused146] -[unused147] -[unused148] -[unused149] -[unused150] -[unused151] -[unused152] -[unused153] -[unused154] -[unused155] -[unused156] -[unused157] -[unused158] -[unused159] -[unused160] -[unused161] -[unused162] -[unused163] -[unused164] -[unused165] -[unused166] -[unused167] -[unused168] -[unused169] -[unused170] -[unused171] -[unused172] -[unused173] -[unused174] -[unused175] -[unused176] -[unused177] -[unused178] -[unused179] -[unused180] -[unused181] -[unused182] -[unused183] -[unused184] -[unused185] -[unused186] -[unused187] -[unused188] -[unused189] -[unused190] -[unused191] -[unused192] -[unused193] -[unused194] -[unused195] -[unused196] -[unused197] -[unused198] -[unused199] -[unused200] -[unused201] -[unused202] -[unused203] -[unused204] -[unused205] -[unused206] -[unused207] -[unused208] -[unused209] -[unused210] -[unused211] -[unused212] -[unused213] -[unused214] -[unused215] -[unused216] -[unused217] -[unused218] -[unused219] -[unused220] -[unused221] -[unused222] -[unused223] -[unused224] -[unused225] -[unused226] -[unused227] -[unused228] -[unused229] -[unused230] -[unused231] -[unused232] -[unused233] -[unused234] -[unused235] -[unused236] -[unused237] -[unused238] -[unused239] -[unused240] -[unused241] -[unused242] -[unused243] -[unused244] -[unused245] -[unused246] -[unused247] -[unused248] -[unused249] -[unused250] -[unused251] -[unused252] -[unused253] -[unused254] -[unused255] -[unused256] -[unused257] -[unused258] -[unused259] -[unused260] -[unused261] -[unused262] -[unused263] -[unused264] -[unused265] -[unused266] -[unused267] -[unused268] -[unused269] -[unused270] -[unused271] -[unused272] -[unused273] -[unused274] -[unused275] -[unused276] -[unused277] -[unused278] -[unused279] -[unused280] -[unused281] -[unused282] -[unused283] -[unused284] -[unused285] -[unused286] -[unused287] -[unused288] -[unused289] -[unused290] -[unused291] -[unused292] -[unused293] -[unused294] -[unused295] -[unused296] -[unused297] -[unused298] -[unused299] -[unused300] -[unused301] -[unused302] -[unused303] -[unused304] -[unused305] -[unused306] -[unused307] -[unused308] -[unused309] -[unused310] -[unused311] -[unused312] -[unused313] -[unused314] -[unused315] -[unused316] -[unused317] -[unused318] -[unused319] -[unused320] -[unused321] -[unused322] -[unused323] -[unused324] -[unused325] -[unused326] -[unused327] -[unused328] -[unused329] -[unused330] -[unused331] -[unused332] -[unused333] -[unused334] -[unused335] -[unused336] -[unused337] -[unused338] -[unused339] -[unused340] -[unused341] -[unused342] -[unused343] -[unused344] -[unused345] -[unused346] -[unused347] -[unused348] -[unused349] -[unused350] -[unused351] -[unused352] -[unused353] -[unused354] -[unused355] -[unused356] -[unused357] -[unused358] -[unused359] -[unused360] -[unused361] -[unused362] -[unused363] -[unused364] -[unused365] -[unused366] -[unused367] -[unused368] -[unused369] -[unused370] -[unused371] -[unused372] -[unused373] -[unused374] -[unused375] -[unused376] -[unused377] -[unused378] -[unused379] -[unused380] -[unused381] -[unused382] -[unused383] -[unused384] -[unused385] -[unused386] -[unused387] -[unused388] -[unused389] -[unused390] -[unused391] -[unused392] -[unused393] -[unused394] -[unused395] -[unused396] -[unused397] -[unused398] -[unused399] -[unused400] -[unused401] -[unused402] -[unused403] -[unused404] -[unused405] -[unused406] -[unused407] -[unused408] -[unused409] -[unused410] -[unused411] -[unused412] -[unused413] -[unused414] -[unused415] -[unused416] -[unused417] -[unused418] -[unused419] -[unused420] -[unused421] -[unused422] -[unused423] -[unused424] -[unused425] -[unused426] -[unused427] -[unused428] -[unused429] -[unused430] -[unused431] -[unused432] -[unused433] -[unused434] -[unused435] -[unused436] -[unused437] -[unused438] -[unused439] -[unused440] -[unused441] -[unused442] -[unused443] -[unused444] -[unused445] -[unused446] -[unused447] -[unused448] -[unused449] -[unused450] -[unused451] -[unused452] -[unused453] -[unused454] -[unused455] -[unused456] -[unused457] -[unused458] -[unused459] -[unused460] -[unused461] -[unused462] -[unused463] -[unused464] -[unused465] -[unused466] -[unused467] -[unused468] -[unused469] -[unused470] -[unused471] -[unused472] -[unused473] -[unused474] -[unused475] -[unused476] -[unused477] -[unused478] -[unused479] -[unused480] -[unused481] -[unused482] -[unused483] -[unused484] -[unused485] -[unused486] -[unused487] -[unused488] -[unused489] -[unused490] -[unused491] -[unused492] -[unused493] -[unused494] -[unused495] -[unused496] -[unused497] -[unused498] -[unused499] -[unused500] -[unused501] -[unused502] -[unused503] -[unused504] -[unused505] -[unused506] -[unused507] -[unused508] -[unused509] -[unused510] -[unused511] -[unused512] -[unused513] -[unused514] -[unused515] -[unused516] -[unused517] -[unused518] -[unused519] -[unused520] -[unused521] -[unused522] -[unused523] -[unused524] -[unused525] -[unused526] -[unused527] -[unused528] -[unused529] -[unused530] -[unused531] -[unused532] -[unused533] -[unused534] -[unused535] -[unused536] -[unused537] -[unused538] -[unused539] -[unused540] -[unused541] -[unused542] -[unused543] -[unused544] -[unused545] -[unused546] -[unused547] -[unused548] -[unused549] -[unused550] -[unused551] -[unused552] -[unused553] -[unused554] -[unused555] -[unused556] -[unused557] -[unused558] -[unused559] -[unused560] -[unused561] -[unused562] -[unused563] -[unused564] -[unused565] -[unused566] -[unused567] -[unused568] -[unused569] -[unused570] -[unused571] -[unused572] -[unused573] -[unused574] -[unused575] -[unused576] -[unused577] -[unused578] -[unused579] -[unused580] -[unused581] -[unused582] -[unused583] -[unused584] -[unused585] -[unused586] -[unused587] -[unused588] -[unused589] -[unused590] -[unused591] -[unused592] -[unused593] -[unused594] -[unused595] -[unused596] -[unused597] -[unused598] -[unused599] -[unused600] -[unused601] -[unused602] -[unused603] -[unused604] -[unused605] -[unused606] -[unused607] -[unused608] -[unused609] -[unused610] -[unused611] -[unused612] -[unused613] -[unused614] -[unused615] -[unused616] -[unused617] -[unused618] -[unused619] -[unused620] -[unused621] -[unused622] -[unused623] -[unused624] -[unused625] -[unused626] -[unused627] -[unused628] -[unused629] -[unused630] -[unused631] -[unused632] -[unused633] -[unused634] -[unused635] -[unused636] -[unused637] -[unused638] -[unused639] -[unused640] -[unused641] -[unused642] -[unused643] -[unused644] -[unused645] -[unused646] -[unused647] -[unused648] -[unused649] -[unused650] -[unused651] -[unused652] -[unused653] -[unused654] -[unused655] -[unused656] -[unused657] -[unused658] -[unused659] -[unused660] -[unused661] -[unused662] -[unused663] -[unused664] -[unused665] -[unused666] -[unused667] -[unused668] -[unused669] -[unused670] -[unused671] -[unused672] -[unused673] -[unused674] -[unused675] -[unused676] -[unused677] -[unused678] -[unused679] -[unused680] -[unused681] -[unused682] -[unused683] -[unused684] -[unused685] -[unused686] -[unused687] -[unused688] -[unused689] -[unused690] -[unused691] -[unused692] -[unused693] -[unused694] -[unused695] -[unused696] -[unused697] -[unused698] -[unused699] -[unused700] -[unused701] -[unused702] -[unused703] -[unused704] -[unused705] -[unused706] -[unused707] -[unused708] -[unused709] -[unused710] -[unused711] -[unused712] -[unused713] -[unused714] -[unused715] -[unused716] -[unused717] -[unused718] -[unused719] -[unused720] -[unused721] -[unused722] -[unused723] -[unused724] -[unused725] -[unused726] -[unused727] -[unused728] -[unused729] -[unused730] -[unused731] -[unused732] -[unused733] -[unused734] -[unused735] -[unused736] -[unused737] -[unused738] -[unused739] -[unused740] -[unused741] -[unused742] -[unused743] -[unused744] -[unused745] -[unused746] -[unused747] -[unused748] -[unused749] -[unused750] -[unused751] -[unused752] -[unused753] -[unused754] -[unused755] -[unused756] -[unused757] -[unused758] -[unused759] -[unused760] -[unused761] -[unused762] -[unused763] -[unused764] -[unused765] -[unused766] -[unused767] -[unused768] -[unused769] -[unused770] -[unused771] -[unused772] -[unused773] -[unused774] -[unused775] -[unused776] -[unused777] -[unused778] -[unused779] -[unused780] -[unused781] -[unused782] -[unused783] -[unused784] -[unused785] -[unused786] -[unused787] -[unused788] -[unused789] -[unused790] -[unused791] -[unused792] -[unused793] -[unused794] -[unused795] -[unused796] -[unused797] -[unused798] -[unused799] -[unused800] -[unused801] -[unused802] -[unused803] -[unused804] -[unused805] -[unused806] -[unused807] -[unused808] -[unused809] -[unused810] -[unused811] -[unused812] -[unused813] -[unused814] -[unused815] -[unused816] -[unused817] -[unused818] -[unused819] -[unused820] -[unused821] -[unused822] -[unused823] -[unused824] -[unused825] -[unused826] -[unused827] -[unused828] -[unused829] -[unused830] -[unused831] -[unused832] -[unused833] -[unused834] -[unused835] -[unused836] -[unused837] -[unused838] -[unused839] -[unused840] -[unused841] -[unused842] -[unused843] -[unused844] -[unused845] -[unused846] -[unused847] -[unused848] -[unused849] -[unused850] -[unused851] -[unused852] -[unused853] -[unused854] -[unused855] -[unused856] -[unused857] -[unused858] -[unused859] -[unused860] -[unused861] -[unused862] -[unused863] -[unused864] -[unused865] -[unused866] -[unused867] -[unused868] -[unused869] -[unused870] -[unused871] -[unused872] -[unused873] -[unused874] -[unused875] -[unused876] -[unused877] -[unused878] -[unused879] -[unused880] -[unused881] -[unused882] -[unused883] -[unused884] -[unused885] -[unused886] -[unused887] -[unused888] -[unused889] -[unused890] -[unused891] -[unused892] -[unused893] -[unused894] -[unused895] -[unused896] -[unused897] -[unused898] -[unused899] -[unused900] -[unused901] -[unused902] -[unused903] -[unused904] -[unused905] -[unused906] -[unused907] -[unused908] -[unused909] -[unused910] -[unused911] -[unused912] -[unused913] -[unused914] -[unused915] -[unused916] -[unused917] -[unused918] -[unused919] -[unused920] -[unused921] -[unused922] -[unused923] -[unused924] -[unused925] -[unused926] -[unused927] -[unused928] -[unused929] -[unused930] -[unused931] -[unused932] -[unused933] -[unused934] -[unused935] -[unused936] -[unused937] -[unused938] -[unused939] -[unused940] -[unused941] -[unused942] -[unused943] -[unused944] -[unused945] -[unused946] -[unused947] -[unused948] -[unused949] -[unused950] -[unused951] -[unused952] -[unused953] -[unused954] -[unused955] -[unused956] -[unused957] -[unused958] -[unused959] -[unused960] -[unused961] -[unused962] -[unused963] -[unused964] -[unused965] -[unused966] -[unused967] -[unused968] -[unused969] -[unused970] -[unused971] -[unused972] -[unused973] -[unused974] -[unused975] -[unused976] -[unused977] -[unused978] -[unused979] -[unused980] -[unused981] -[unused982] -[unused983] -[unused984] -[unused985] -[unused986] -[unused987] -[unused988] -[unused989] -[unused990] -[unused991] -[unused992] -[unused993] -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -¡ -¢ -£ -¤ -Â¥ -¦ -§ -¨ -© -ª -« -¬ -® -° -± -² -³ -´ -µ -¶ -· -¹ -º -» -¼ -½ -¾ -¿ -× -ß -æ -ð -÷ -ø -þ -Ä‘ -ħ -ı -Å‚ -Å‹ -Å“ -Æ’ -É -É‘ -É’ -É” -É• -É™ -É› -É¡ -É£ -ɨ -ɪ -É« -ɬ -ɯ -ɲ -É´ -ɹ -ɾ -Ê€ -Ê -Ê‚ -ʃ -ʉ -ÊŠ -Ê‹ -ÊŒ -ÊŽ -Ê -Ê‘ -Ê’ -Ê” -ʰ -ʲ -ʳ -Ê· -ʸ -Ê» -ʼ -ʾ -Ê¿ -ˈ -Ë -Ë¡ -Ë¢ -Ë£ -ˤ -α -β -γ -δ -ε -ζ -η -θ -ι -κ -λ -μ -ν -ξ -ο -Ï€ -Ï -Ï‚ -σ -Ï„ -Ï… -φ -χ -ψ -ω -а -б -в -г -д -е -ж -з -и -к -л -м -н -о -п -Ñ€ -Ñ -Ñ‚ -у -Ñ„ -Ñ… -ц -ч -ш -щ -ÑŠ -Ñ‹ -ÑŒ -Ñ -ÑŽ -Ñ -Ñ’ -Ñ” -Ñ– -ј -Ñ™ -Ñš -Ñ› -Ó -Õ¡ -Õ¢ -Õ£ -Õ¤ -Õ¥ -Õ© -Õ« -Õ¬ -Õ¯ -Õ° -Õ´ -Õµ -Õ¶ -Õ¸ -Õº -Õ½ -Õ¾ -Õ¿ -Ö€ -Ö‚ -Ö„ -Ö¾ -× -ב -×’ -ד -×” -ו -×– -×— -ט -×™ -ך -×› -ל -× -מ -ן -×  -ס -×¢ -×£ -פ -×¥ -צ -×§ -ר -ש -ת -ØŒ -Ø¡ -ا -ب -Ø© -ت -Ø« -ج -Ø­ -Ø® -د -ذ -ر -ز -س -Ø´ -ص -ض -Ø· -ظ -ع -غ -Ù€ -Ù -Ù‚ -Ùƒ -Ù„ -Ù… -Ù† -Ù‡ -Ùˆ -Ù‰ -ÙŠ -Ù¹ -Ù¾ -Ú† -Ú© -Ú¯ -Úº -Ú¾ -Û -ÛŒ -Û’ -अ -आ -उ -ठ-क -ख -ग -च -ज -ट -ड -ण -त -थ -द -ध -न -प -ब -भ -म -य -र -ल -व -श -ष -स -ह -ा -ि -ी -ो -। -॥ -ং -অ -আ -ই -উ -ঠ-ও -ক -খ -গ -চ -ছ -জ -ট -ড -ণ -ত -থ -দ -ধ -ন -প -ব -ভ -ম -য -র -ল -শ -ষ -স -হ -া -ি -à§€ -ে -க -ச -ட -த -ந -ன -ப -à®® -ய -à®° -ல -ள -வ -ா -ி -௠-ே -ை -ನ -ರ -ಾ -à¶š -ය -à¶» -à¶½ -à·€ -à· -ภ-ง -ต -ท -น -พ -ม -ย -ร -ล -ว -ส -อ -า -เ -་ -༠-ག -ང -ད -ན -པ -བ -མ -འ -ར -ལ -ས -မ -რ-ბ -გ -დ -ე -ვ -თ -ი -კ -ლ -მ -ნ -რ-რ -ს -ტ -უ -á„€ -á„‚ -ᄃ -á„… -ᄆ -ᄇ -ᄉ -ᄊ -á„‹ -ᄌ -ᄎ -á„ -á„ -á„‘ -á„’ -á…¡ -á…¢ -á…¥ -á…¦ -á…§ -á…© -á…ª -á…­ -á…® -á…¯ -á…² -á…³ -á…´ -á…µ -ᆨ -ᆫ -ᆯ -ᆷ -ᆸ -ᆼ -á´¬ -á´® -á´° -á´µ -á´º -áµ€ -ᵃ -ᵇ -ᵈ -ᵉ -áµ -áµ -áµ -áµ’ -áµ– -áµ— -ᵘ -áµ¢ -áµ£ -ᵤ -áµ¥ -á¶œ -á¶  -†-‑ -‒ -– -— -― -‖ -‘ -’ -‚ -“ -†-„ -† -‡ -• -… -‰ -′ -″ -› -‿ -â„ -â° -â± -â´ -âµ -â¶ -â· -⸠-â¹ -⺠-â» -â¿ -â‚€ -â‚ -â‚‚ -₃ -â‚„ -â‚… -₆ -₇ -₈ -₉ -₊ -â‚ -₎ -â‚ -â‚‘ -â‚’ -â‚“ -â‚• -â‚– -â‚— -ₘ -â‚™ -ₚ -â‚› -ₜ -₤ -â‚© -€ -₱ -₹ -â„“ -â„– -â„ -â„¢ -â…“ -â…” -↠-↑ -→ -↓ -↔ -↦ -⇄ -⇌ -⇒ -∂ -∅ -∆ -∇ -∈ -− -∗ -∘ -√ -∞ -∧ -∨ -∩ -∪ -≈ -≡ -≤ -≥ -⊂ -⊆ -⊕ -⊗ -â‹… -─ -│ -â–  -â–ª -â— -★ -☆ -☉ -â™  -♣ -♥ -♦ -â™­ -♯ -⟨ -⟩ -â±¼ -⺩ -⺼ -â½¥ -〠-。 -〈 -〉 -《 -》 -「 -〠-『 -〠-〜 -ã‚ -ã„ -ㆠ-㈠-㊠-ã‹ -ã -ã -ã‘ -ã“ -ã• -ã— -ã™ -ã› -ã -㟠-ã¡ -㣠-㤠-㦠-㨠-㪠-ã« -㬠-ã­ -ã® -㯠-ã² -ãµ -㸠-ã» -ã¾ -ã¿ -ã‚€ -ã‚ -ã‚‚ -ã‚„ -ゆ -よ -ら -り -ã‚‹ -れ -ã‚ -ã‚’ -ã‚“ -ã‚¡ -ã‚¢ -ã‚£ -イ -ウ -ã‚§ -エ -オ -ã‚« -ã‚­ -ク -ケ -コ -サ -ã‚· -ス -ã‚» -ã‚¿ -ム-ッ -ツ -テ -ト -ナ -ニ -ノ -ム-ヒ -フ -ヘ -ホ -マ -ミ -ム -メ -モ -ャ -ュ -ョ -ラ -リ -ル -レ -ロ -ワ -ン -・ -ー -一 -三 -上 -下 -ä¸ -世 -中 -主 -ä¹… -之 -也 -事 -二 -五 -井 -京 -人 -亻 -ä» -介 -代 -ä»® -伊 -会 -ä½ -ä¾ -ä¿ -ä¿¡ -å¥ -å…ƒ -å…‰ -å…« -å…¬ -内 -出 -分 -å‰ -劉 -力 -加 -å‹ -北 -区 -å -åƒ -å— -åš -原 -å£ -å¤ -å² -å¸ -åˆ -å‰ -åŒ -å -å’Œ -å›— -å›› -国 -國 -土 -地 -å‚ -城 -å ‚ -å ´ -士 -å¤ -外 -大 -天 -太 -夫 -奈 -女 -å­ -å­¦ -宀 -宇 -安 -å®— -定 -宣 -å®® -å®¶ -宿 -寺 -å°‡ -å° -å°š -å±± -岡 -å³¶ -å´Ž -å· -å·ž -å·¿ -å¸ -å¹³ -å¹´ -幸 -广 -弘 -å¼µ -å½³ -後 -御 -å¾· -心 -å¿„ -å¿— -å¿  -æ„› -æˆ -我 -戦 -戸 -手 -扌 -政 -æ–‡ -æ–° -æ–¹ -æ—¥ -明 -星 -春 -昭 -智 -曲 -書 -月 -有 -æœ -木 -本 -æŽ -æ‘ -æ± -æ¾ -æž— -森 -楊 -樹 -æ©‹ -æ­Œ -æ­¢ -æ­£ -æ­¦ -比 -æ° -æ°‘ -æ°´ -æ°µ -æ°· -æ°¸ -江 -æ²¢ -æ²³ -æ²» -法 -æµ· -清 -æ¼¢ -瀬 -ç« -版 -犬 -王 -生 -ç”° -ç”· -ç–’ -発 -白 -çš„ -皇 -ç›® -相 -çœ -真 -石 -示 -社 -神 -ç¦ -禾 -ç§€ -ç§‹ -空 -ç«‹ -ç«  -竹 -ç³¹ -美 -義 -耳 -良 -艹 -花 -英 -è¯ -葉 -è—¤ -行 -è¡— -西 -見 -è¨ -語 -è°· -è² -è²´ -車 -è» -è¾¶ -é“ -郎 -郡 -部 -都 -里 -野 -金 -鈴 -镇 -é•· -é–€ -é–“ -é˜ -阿 -陳 -陽 -雄 -é’ -é¢ -風 -食 -香 -馬 -高 -é¾ -龸 -ï¬ -fl -ï¼ -( -) -, -ï¼ -. -ï¼ -: -? -~ -the -of -and -in -to -was -he -is -as -for -on -with -that -it -his -by -at -from -her -##s -she -you -had -an -were -but -be -this -are -not -my -they -one -which -or -have -him -me -first -all -also -their -has -up -who -out -been -when -after -there -into -new -two -its -##a -time -would -no -what -about -said -we -over -then -other -so -more -##e -can -if -like -back -them -only -some -could -##i -where -just -##ing -during -before -##n -do -##o -made -school -through -than -now -years -most -world -may -between -down -well -three -##d -year -while -will -##ed -##r -##y -later -##t -city -under -around -did -such -being -used -state -people -part -know -against -your -many -second -university -both -national -##er -these -don -known -off -way -until -re -how -even -get -head -... -didn -##ly -team -american -because -de -##l -born -united -film -since -still -long -work -south -us -became -any -high -again -day -family -see -right -man -eyes -house -season -war -states -including -took -life -north -same -each -called -name -much -place -however -go -four -group -another -found -won -area -here -going -10 -away -series -left -home -music -best -make -hand -number -company -several -never -last -john -000 -very -album -take -end -good -too -following -released -game -played -little -began -district -##m -old -want -those -side -held -own -early -county -ll -league -use -west -##u -face -think -##es -2010 -government -##h -march -came -small -general -town -june -##on -line -based -something -##k -september -thought -looked -along -international -2011 -air -july -club -went -january -october -our -august -april -york -12 -few -2012 -2008 -east -show -member -college -2009 -father -public -##us -come -men -five -set -station -church -##c -next -former -november -room -party -located -december -2013 -age -got -2007 -##g -system -let -love -2006 -though -every -2014 -look -song -water -century -without -body -black -night -within -great -women -single -ve -building -large -population -river -named -band -white -started -##an -once -15 -20 -should -18 -2015 -service -top -built -british -open -death -king -moved -local -times -children -february -book -why -11 -door -need -president -order -final -road -wasn -although -due -major -died -village -third -knew -2016 -asked -turned -st -wanted -say -##p -together -received -main -son -served -different -##en -behind -himself -felt -members -power -football -law -voice -play -##in -near -park -history -30 -having -2005 -16 -##man -saw -mother -##al -army -point -front -help -english -street -art -late -hands -games -award -##ia -young -14 -put -published -country -division -across -told -13 -often -ever -french -london -center -six -red -2017 -led -days -include -light -25 -find -tell -among -species -really -according -central -half -2004 -form -original -gave -office -making -enough -lost -full -opened -must -included -live -given -german -player -run -business -woman -community -cup -might -million -land -2000 -court -development -17 -short -round -ii -km -seen -class -story -always -become -sure -research -almost -director -council -la -##2 -career -things -using -island -##z -couldn -car -##is -24 -close -force -##1 -better -free -support -control -field -students -2003 -education -married -##b -nothing -worked -others -record -big -inside -level -anything -continued -give -james -##3 -military -established -non -returned -feel -does -title -written -thing -feet -william -far -co -association -hard -already -2002 -##ra -championship -human -western -100 -##na -department -hall -role -various -production -21 -19 -heart -2001 -living -fire -version -##ers -##f -television -royal -##4 -produced -working -act -case -society -region -present -radio -period -looking -least -total -keep -england -wife -program -per -brother -mind -special -22 -##le -am -works -soon -##6 -political -george -services -taken -created -##7 -further -able -reached -david -union -joined -upon -done -important -social -information -either -##ic -##x -appeared -position -ground -lead -rock -dark -election -23 -board -france -hair -course -arms -site -police -girl -instead -real -sound -##v -words -moment -##te -someone -##8 -summer -project -announced -san -less -wrote -past -followed -##5 -blue -founded -al -finally -india -taking -records -america -##ne -1999 -design -considered -northern -god -stop -battle -toward -european -outside -described -track -today -playing -language -28 -call -26 -heard -professional -low -australia -miles -california -win -yet -green -##ie -trying -blood -##ton -southern -science -maybe -everything -match -square -27 -mouth -video -race -recorded -leave -above -##9 -daughter -points -space -1998 -museum -change -middle -common -##0 -move -tv -post -##ta -lake -seven -tried -elected -closed -ten -paul -minister -##th -months -start -chief -return -canada -person -sea -release -similar -modern -brought -rest -hit -formed -mr -##la -1997 -floor -event -doing -thomas -1996 -robert -care -killed -training -star -week -needed -turn -finished -railway -rather -news -health -sent -example -ran -term -michael -coming -currently -yes -forces -despite -gold -areas -50 -stage -fact -29 -dead -says -popular -2018 -originally -germany -probably -developed -result -pulled -friend -stood -money -running -mi -signed -word -songs -child -eventually -met -tour -average -teams -minutes -festival -current -deep -kind -1995 -decided -usually -eastern -seemed -##ness -episode -bed -added -table -indian -private -charles -route -available -idea -throughout -centre -addition -appointed -style -1994 -books -eight -construction -press -mean -wall -friends -remained -schools -study -##ch -##um -institute -oh -chinese -sometimes -events -possible -1992 -australian -type -brown -forward -talk -process -food -debut -seat -performance -committee -features -character -arts -herself -else -lot -strong -russian -range -hours -peter -arm -##da -morning -dr -sold -##ry -quickly -directed -1993 -guitar -china -##w -31 -list -##ma -performed -media -uk -players -smile -##rs -myself -40 -placed -coach -province -towards -wouldn -leading -whole -boy -official -designed -grand -census -##el -europe -attack -japanese -henry -1991 -##re -##os -cross -getting -alone -action -lower -network -wide -washington -japan -1990 -hospital -believe -changed -sister -##ar -hold -gone -sir -hadn -ship -##ka -studies -academy -shot -rights -below -base -bad -involved -kept -largest -##ist -bank -future -especially -beginning -mark -movement -section -female -magazine -plan -professor -lord -longer -##ian -sat -walked -hill -actually -civil -energy -model -families -size -thus -aircraft -completed -includes -data -captain -##or -fight -vocals -featured -richard -bridge -fourth -1989 -officer -stone -hear -##ism -means -medical -groups -management -self -lips -competition -entire -lived -technology -leaving -federal -tournament -bit -passed -hot -independent -awards -kingdom -mary -spent -fine -doesn -reported -##ling -jack -fall -raised -itself -stay -true -studio -1988 -sports -replaced -paris -systems -saint -leader -theatre -whose -market -capital -parents -spanish -canadian -earth -##ity -cut -degree -writing -bay -christian -awarded -natural -higher -bill -##as -coast -provided -previous -senior -ft -valley -organization -stopped -onto -countries -parts -conference -queen -security -interest -saying -allowed -master -earlier -phone -matter -smith -winning -try -happened -moving -campaign -los -##ley -breath -nearly -mid -1987 -certain -girls -date -italian -african -standing -fell -artist -##ted -shows -deal -mine -industry -1986 -##ng -everyone -republic -provide -collection -library -student -##ville -primary -owned -older -via -heavy -1st -makes -##able -attention -anyone -africa -##ri -stated -length -ended -fingers -command -staff -skin -foreign -opening -governor -okay -medal -kill -sun -cover -job -1985 -introduced -chest -hell -feeling -##ies -success -meet -reason -standard -meeting -novel -1984 -trade -source -buildings -##land -rose -guy -goal -##ur -chapter -native -husband -previously -unit -limited -entered -weeks -producer -operations -mountain -takes -covered -forced -related -roman -complete -successful -key -texas -cold -##ya -channel -1980 -traditional -films -dance -clear -approximately -500 -nine -van -prince -question -active -tracks -ireland -regional -silver -author -personal -sense -operation -##ine -economic -1983 -holding -twenty -isbn -additional -speed -hour -edition -regular -historic -places -whom -shook -movie -km² -secretary -prior -report -chicago -read -foundation -view -engine -scored -1982 -units -ask -airport -property -ready -immediately -lady -month -listed -contract -##de -manager -themselves -lines -##ki -navy -writer -meant -##ts -runs -##ro -practice -championships -singer -glass -commission -required -forest -starting -culture -generally -giving -access -attended -test -couple -stand -catholic -martin -caught -executive -##less -eye -##ey -thinking -chair -quite -shoulder -1979 -hope -decision -plays -defeated -municipality -whether -structure -offered -slowly -pain -ice -direction -##ion -paper -mission -1981 -mostly -200 -noted -individual -managed -nature -lives -plant -##ha -helped -except -studied -computer -figure -relationship -issue -significant -loss -die -smiled -gun -ago -highest -1972 -##am -male -bring -goals -mexico -problem -distance -commercial -completely -location -annual -famous -drive -1976 -neck -1978 -surface -caused -italy -understand -greek -highway -wrong -hotel -comes -appearance -joseph -double -issues -musical -companies -castle -income -review -assembly -bass -initially -parliament -artists -experience -1974 -particular -walk -foot -engineering -talking -window -dropped -##ter -miss -baby -boys -break -1975 -stars -edge -remember -policy -carried -train -stadium -bar -sex -angeles -evidence -##ge -becoming -assistant -soviet -1977 -upper -step -wing -1970 -youth -financial -reach -##ll -actor -numerous -##se -##st -nodded -arrived -##ation -minute -##nt -believed -sorry -complex -beautiful -victory -associated -temple -1968 -1973 -chance -perhaps -metal -##son -1945 -bishop -##et -lee -launched -particularly -tree -le -retired -subject -prize -contains -yeah -theory -empire -##ce -suddenly -waiting -trust -recording -##to -happy -terms -camp -champion -1971 -religious -pass -zealand -names -2nd -port -ancient -tom -corner -represented -watch -legal -anti -justice -cause -watched -brothers -45 -material -changes -simply -response -louis -fast -##ting -answer -60 -historical -1969 -stories -straight -create -feature -increased -rate -administration -virginia -el -activities -cultural -overall -winner -programs -basketball -legs -guard -beyond -cast -doctor -mm -flight -results -remains -cost -effect -winter -##ble -larger -islands -problems -chairman -grew -commander -isn -1967 -pay -failed -selected -hurt -fort -box -regiment -majority -journal -35 -edward -plans -##ke -##ni -shown -pretty -irish -characters -directly -scene -likely -operated -allow -spring -##j -junior -matches -looks -mike -houses -fellow -##tion -beach -marriage -##ham -##ive -rules -oil -65 -florida -expected -nearby -congress -sam -peace -recent -iii -wait -subsequently -cell -##do -variety -serving -agreed -please -poor -joe -pacific -attempt -wood -democratic -piece -prime -##ca -rural -mile -touch -appears -township -1964 -1966 -soldiers -##men -##ized -1965 -pennsylvania -closer -fighting -claimed -score -jones -physical -editor -##ous -filled -genus -specific -sitting -super -mom -##va -therefore -supported -status -fear -cases -store -meaning -wales -minor -spain -tower -focus -vice -frank -follow -parish -separate -golden -horse -fifth -remaining -branch -32 -presented -stared -##id -uses -secret -forms -##co -baseball -exactly -##ck -choice -note -discovered -travel -composed -truth -russia -ball -color -kiss -dad -wind -continue -ring -referred -numbers -digital -greater -##ns -metres -slightly -direct -increase -1960 -responsible -crew -rule -trees -troops -##no -broke -goes -individuals -hundred -weight -creek -sleep -memory -defense -provides -ordered -code -value -jewish -windows -1944 -safe -judge -whatever -corps -realized -growing -pre -##ga -cities -alexander -gaze -lies -spread -scott -letter -showed -situation -mayor -transport -watching -workers -extended -##li -expression -normal -##ment -chart -multiple -border -##ba -host -##ner -daily -mrs -walls -piano -##ko -heat -cannot -##ate -earned -products -drama -era -authority -seasons -join -grade -##io -sign -difficult -machine -1963 -territory -mainly -##wood -stations -squadron -1962 -stepped -iron -19th -##led -serve -appear -sky -speak -broken -charge -knowledge -kilometres -removed -ships -article -campus -simple -##ty -pushed -britain -##ve -leaves -recently -cd -soft -boston -latter -easy -acquired -poland -##sa -quality -officers -presence -planned -nations -mass -broadcast -jean -share -image -influence -wild -offer -emperor -electric -reading -headed -ability -promoted -yellow -ministry -1942 -throat -smaller -politician -##by -latin -spoke -cars -williams -males -lack -pop -80 -##ier -acting -seeing -consists -##ti -estate -1961 -pressure -johnson -newspaper -jr -chris -olympics -online -conditions -beat -elements -walking -vote -##field -needs -carolina -text -featuring -global -block -shirt -levels -francisco -purpose -females -et -dutch -duke -ahead -gas -twice -safety -serious -turning -highly -lieutenant -firm -maria -amount -mixed -daniel -proposed -perfect -agreement -affairs -3rd -seconds -contemporary -paid -1943 -prison -save -kitchen -label -administrative -intended -constructed -academic -nice -teacher -races -1956 -formerly -corporation -ben -nation -issued -shut -1958 -drums -housing -victoria -seems -opera -1959 -graduated -function -von -mentioned -picked -build -recognized -shortly -protection -picture -notable -exchange -elections -1980s -loved -percent -racing -fish -elizabeth -garden -volume -hockey -1941 -beside -settled -##ford -1940 -competed -replied -drew -1948 -actress -marine -scotland -steel -glanced -farm -steve -1957 -risk -tonight -positive -magic -singles -effects -gray -screen -dog -##ja -residents -bus -sides -none -secondary -literature -polish -destroyed -flying -founder -households -1939 -lay -reserve -usa -gallery -##ler -1946 -industrial -younger -approach -appearances -urban -ones -1950 -finish -avenue -powerful -fully -growth -page -honor -jersey -projects -advanced -revealed -basic -90 -infantry -pair -equipment -visit -33 -evening -search -grant -effort -solo -treatment -buried -republican -primarily -bottom -owner -1970s -israel -gives -jim -dream -bob -remain -spot -70 -notes -produce -champions -contact -ed -soul -accepted -ways -del -##ally -losing -split -price -capacity -basis -trial -questions -##ina -1955 -20th -guess -officially -memorial -naval -initial -##ization -whispered -median -engineer -##ful -sydney -##go -columbia -strength -300 -1952 -tears -senate -00 -card -asian -agent -1947 -software -44 -draw -warm -supposed -com -pro -##il -transferred -leaned -##at -candidate -escape -mountains -asia -potential -activity -entertainment -seem -traffic -jackson -murder -36 -slow -product -orchestra -haven -agency -bbc -taught -website -comedy -unable -storm -planning -albums -rugby -environment -scientific -grabbed -protect -##hi -boat -typically -1954 -1953 -damage -principal -divided -dedicated -mount -ohio -##berg -pick -fought -driver -##der -empty -shoulders -sort -thank -berlin -prominent -account -freedom -necessary -efforts -alex -headquarters -follows -alongside -des -simon -andrew -suggested -operating -learning -steps -1949 -sweet -technical -begin -easily -34 -teeth -speaking -settlement -scale -##sh -renamed -ray -max -enemy -semi -joint -compared -##rd -scottish -leadership -analysis -offers -georgia -pieces -captured -animal -deputy -guest -organized -##lin -tony -combined -method -challenge -1960s -huge -wants -battalion -sons -rise -crime -types -facilities -telling -path -1951 -platform -sit -1990s -##lo -tells -assigned -rich -pull -##ot -commonly -alive -##za -letters -concept -conducted -wearing -happen -bought -becomes -holy -gets -ocean -defeat -languages -purchased -coffee -occurred -titled -##q -declared -applied -sciences -concert -sounds -jazz -brain -##me -painting -fleet -tax -nick -##ius -michigan -count -animals -leaders -episodes -##line -content -##den -birth -##it -clubs -64 -palace -critical -refused -fair -leg -laughed -returning -surrounding -participated -formation -lifted -pointed -connected -rome -medicine -laid -taylor -santa -powers -adam -tall -shared -focused -knowing -yards -entrance -falls -##wa -calling -##ad -sources -chosen -beneath -resources -yard -##ite -nominated -silence -zone -defined -##que -gained -thirty -38 -bodies -moon -##ard -adopted -christmas -widely -register -apart -iran -premier -serves -du -unknown -parties -##les -generation -##ff -continues -quick -fields -brigade -quiet -teaching -clothes -impact -weapons -partner -flat -theater -supreme -1938 -37 -relations -##tor -plants -suffered -1936 -wilson -kids -begins -##age -1918 -seats -armed -internet -models -worth -laws -400 -communities -classes -background -knows -thanks -quarter -reaching -humans -carry -killing -format -kong -hong -setting -75 -architecture -disease -railroad -inc -possibly -wish -arthur -thoughts -harry -doors -density -##di -crowd -illinois -stomach -tone -unique -reports -anyway -##ir -liberal -der -vehicle -thick -dry -drug -faced -largely -facility -theme -holds -creation -strange -colonel -##mi -revolution -bell -politics -turns -silent -rail -relief -independence -combat -shape -write -determined -sales -learned -4th -finger -oxford -providing -1937 -heritage -fiction -situated -designated -allowing -distribution -hosted -##est -sight -interview -estimated -reduced -##ria -toronto -footballer -keeping -guys -damn -claim -motion -sport -sixth -stayed -##ze -en -rear -receive -handed -twelve -dress -audience -granted -brazil -##well -spirit -##ated -noticed -etc -olympic -representative -eric -tight -trouble -reviews -drink -vampire -missing -roles -ranked -newly -household -finals -wave -critics -##ee -phase -massachusetts -pilot -unlike -philadelphia -bright -guns -crown -organizations -roof -42 -respectively -clearly -tongue -marked -circle -fox -korea -bronze -brian -expanded -sexual -supply -yourself -inspired -labour -fc -##ah -reference -vision -draft -connection -brand -reasons -1935 -classic -driving -trip -jesus -cells -entry -1920 -neither -trail -claims -atlantic -orders -labor -nose -afraid -identified -intelligence -calls -cancer -attacked -passing -stephen -positions -imperial -grey -jason -39 -sunday -48 -swedish -avoid -extra -uncle -message -covers -allows -surprise -materials -fame -hunter -##ji -1930 -citizens -figures -davis -environmental -confirmed -shit -titles -di -performing -difference -acts -attacks -##ov -existing -votes -opportunity -nor -shop -entirely -trains -opposite -pakistan -##pa -develop -resulted -representatives -actions -reality -pressed -##ish -barely -wine -conversation -faculty -northwest -ends -documentary -nuclear -stock -grace -sets -eat -alternative -##ps -bag -resulting -creating -surprised -cemetery -1919 -drop -finding -sarah -cricket -streets -tradition -ride -1933 -exhibition -target -ear -explained -rain -composer -injury -apartment -municipal -educational -occupied -netherlands -clean -billion -constitution -learn -1914 -maximum -classical -francis -lose -opposition -jose -ontario -bear -core -hills -rolled -ending -drawn -permanent -fun -##tes -##lla -lewis -sites -chamber -ryan -##way -scoring -height -1934 -##house -lyrics -staring -55 -officials -1917 -snow -oldest -##tic -orange -##ger -qualified -interior -apparently -succeeded -thousand -dinner -lights -existence -fans -heavily -41 -greatest -conservative -send -bowl -plus -enter -catch -##un -economy -duty -1929 -speech -authorities -princess -performances -versions -shall -graduate -pictures -effective -remembered -poetry -desk -crossed -starring -starts -passenger -sharp -##ant -acres -ass -weather -falling -rank -fund -supporting -check -adult -publishing -heads -cm -southeast -lane -##burg -application -bc -##ura -les -condition -transfer -prevent -display -ex -regions -earl -federation -cool -relatively -answered -besides -1928 -obtained -portion -##town -mix -##ding -reaction -liked -dean -express -peak -1932 -##tte -counter -religion -chain -rare -miller -convention -aid -lie -vehicles -mobile -perform -squad -wonder -lying -crazy -sword -##ping -attempted -centuries -weren -philosophy -category -##ize -anna -interested -47 -sweden -wolf -frequently -abandoned -kg -literary -alliance -task -entitled -##ay -threw -promotion -factory -tiny -soccer -visited -matt -fm -achieved -52 -defence -internal -persian -43 -methods -##ging -arrested -otherwise -cambridge -programming -villages -elementary -districts -rooms -criminal -conflict -worry -trained -1931 -attempts -waited -signal -bird -truck -subsequent -programme -##ol -ad -49 -communist -details -faith -sector -patrick -carrying -laugh -##ss -controlled -korean -showing -origin -fuel -evil -1927 -##ent -brief -identity -darkness -address -pool -missed -publication -web -planet -ian -anne -wings -invited -##tt -briefly -standards -kissed -##be -ideas -climate -causing -walter -worse -albert -articles -winners -desire -aged -northeast -dangerous -gate -doubt -1922 -wooden -multi -##ky -poet -rising -funding -46 -communications -communication -violence -copies -prepared -ford -investigation -skills -1924 -pulling -electronic -##ak -##ial -##han -containing -ultimately -offices -singing -understanding -restaurant -tomorrow -fashion -christ -ward -da -pope -stands -5th -flow -studios -aired -commissioned -contained -exist -fresh -americans -##per -wrestling -approved -kid -employed -respect -suit -1925 -angel -asking -increasing -frame -angry -selling -1950s -thin -finds -##nd -temperature -statement -ali -explain -inhabitants -towns -extensive -narrow -51 -jane -flowers -images -promise -somewhere -object -fly -closely -##ls -1912 -bureau -cape -1926 -weekly -presidential -legislative -1921 -##ai -##au -launch -founding -##ny -978 -##ring -artillery -strike -un -institutions -roll -writers -landing -chose -kevin -anymore -pp -##ut -attorney -fit -dan -billboard -receiving -agricultural -breaking -sought -dave -admitted -lands -mexican -##bury -charlie -specifically -hole -iv -howard -credit -moscow -roads -accident -1923 -proved -wear -struck -hey -guards -stuff -slid -expansion -1915 -cat -anthony -##kin -melbourne -opposed -sub -southwest -architect -failure -plane -1916 -##ron -map -camera -tank -listen -regarding -wet -introduction -metropolitan -link -ep -fighter -inch -grown -gene -anger -fixed -buy -dvd -khan -domestic -worldwide -chapel -mill -functions -examples -##head -developing -1910 -turkey -hits -pocket -antonio -papers -grow -unless -circuit -18th -concerned -attached -journalist -selection -journey -converted -provincial -painted -hearing -aren -bands -negative -aside -wondered -knight -lap -survey -ma -##ow -noise -billy -##ium -shooting -guide -bedroom -priest -resistance -motor -homes -sounded -giant -##mer -150 -scenes -equal -comic -patients -hidden -solid -actual -bringing -afternoon -touched -funds -wedding -consisted -marie -canal -sr -kim -treaty -turkish -recognition -residence -cathedral -broad -knees -incident -shaped -fired -norwegian -handle -cheek -contest -represent -##pe -representing -beauty -##sen -birds -advantage -emergency -wrapped -drawing -notice -pink -broadcasting -##ong -somehow -bachelor -seventh -collected -registered -establishment -alan -assumed -chemical -personnel -roger -retirement -jeff -portuguese -wore -tied -device -threat -progress -advance -##ised -banks -hired -manchester -nfl -teachers -structures -forever -##bo -tennis -helping -saturday -sale -applications -junction -hip -incorporated -neighborhood -dressed -ceremony -##ds -influenced -hers -visual -stairs -decades -inner -kansas -hung -hoped -gain -scheduled -downtown -engaged -austria -clock -norway -certainly -pale -protected -1913 -victor -employees -plate -putting -surrounded -##ists -finishing -blues -tropical -##ries -minnesota -consider -philippines -accept -54 -retrieved -1900 -concern -anderson -properties -institution -gordon -successfully -vietnam -##dy -backing -outstanding -muslim -crossing -folk -producing -usual -demand -occurs -observed -lawyer -educated -##ana -kelly -string -pleasure -budget -items -quietly -colorado -philip -typical -##worth -derived -600 -survived -asks -mental -##ide -56 -jake -jews -distinguished -ltd -1911 -sri -extremely -53 -athletic -loud -thousands -worried -shadow -transportation -horses -weapon -arena -importance -users -tim -objects -contributed -dragon -douglas -aware -senator -johnny -jordan -sisters -engines -flag -investment -samuel -shock -capable -clark -row -wheel -refers -session -familiar -biggest -wins -hate -maintained -drove -hamilton -request -expressed -injured -underground -churches -walker -wars -tunnel -passes -stupid -agriculture -softly -cabinet -regarded -joining -indiana -##ea -##ms -push -dates -spend -behavior -woods -protein -gently -chase -morgan -mention -burning -wake -combination -occur -mirror -leads -jimmy -indeed -impossible -singapore -paintings -covering -##nes -soldier -locations -attendance -sell -historian -wisconsin -invasion -argued -painter -diego -changing -egypt -##don -experienced -inches -##ku -missouri -vol -grounds -spoken -switzerland -##gan -reform -rolling -ha -forget -massive -resigned -burned -allen -tennessee -locked -values -improved -##mo -wounded -universe -sick -dating -facing -pack -purchase -user -##pur -moments -##ul -merged -anniversary -1908 -coal -brick -understood -causes -dynasty -queensland -establish -stores -crisis -promote -hoping -views -cards -referee -extension -##si -raise -arizona -improve -colonial -formal -charged -##rt -palm -lucky -hide -rescue -faces -95 -feelings -candidates -juan -##ell -goods -6th -courses -weekend -59 -luke -cash -fallen -##om -delivered -affected -installed -carefully -tries -swiss -hollywood -costs -lincoln -responsibility -##he -shore -file -proper -normally -maryland -assistance -jump -constant -offering -friendly -waters -persons -realize -contain -trophy -800 -partnership -factor -58 -musicians -cry -bound -oregon -indicated -hero -houston -medium -##ure -consisting -somewhat -##ara -57 -cycle -##che -beer -moore -frederick -gotten -eleven -worst -weak -approached -arranged -chin -loan -universal -bond -fifteen -pattern -disappeared -##ney -translated -##zed -lip -arab -capture -interests -insurance -##chi -shifted -cave -prix -warning -sections -courts -coat -plot -smell -feed -golf -favorite -maintain -knife -vs -voted -degrees -finance -quebec -opinion -translation -manner -ruled -operate -productions -choose -musician -discovery -confused -tired -separated -stream -techniques -committed -attend -ranking -kings -throw -passengers -measure -horror -fan -mining -sand -danger -salt -calm -decade -dam -require -runner -##ik -rush -associate -greece -##ker -rivers -consecutive -matthew -##ski -sighed -sq -documents -steam -edited -closing -tie -accused -1905 -##ini -islamic -distributed -directors -organisation -bruce -7th -breathing -mad -lit -arrival -concrete -taste -08 -composition -shaking -faster -amateur -adjacent -stating -1906 -twin -flew -##ran -tokyo -publications -##tone -obviously -ridge -storage -1907 -carl -pages -concluded -desert -driven -universities -ages -terminal -sequence -borough -250 -constituency -creative -cousin -economics -dreams -margaret -notably -reduce -montreal -mode -17th -ears -saved -jan -vocal -##ica -1909 -andy -##jo -riding -roughly -threatened -##ise -meters -meanwhile -landed -compete -repeated -grass -czech -regularly -charges -tea -sudden -appeal -##ung -solution -describes -pierre -classification -glad -parking -##ning -belt -physics -99 -rachel -add -hungarian -participate -expedition -damaged -gift -childhood -85 -fifty -##red -mathematics -jumped -letting -defensive -mph -##ux -##gh -testing -##hip -hundreds -shoot -owners -matters -smoke -israeli -kentucky -dancing -mounted -grandfather -emma -designs -profit -argentina -##gs -truly -li -lawrence -cole -begun -detroit -willing -branches -smiling -decide -miami -enjoyed -recordings -##dale -poverty -ethnic -gay -##bi -gary -arabic -09 -accompanied -##one -##ons -fishing -determine -residential -acid -##ary -alice -returns -starred -mail -##ang -jonathan -strategy -##ue -net -forty -cook -businesses -equivalent -commonwealth -distinct -ill -##cy -seriously -##ors -##ped -shift -harris -replace -rio -imagine -formula -ensure -##ber -additionally -scheme -conservation -occasionally -purposes -feels -favor -##and -##ore -1930s -contrast -hanging -hunt -movies -1904 -instruments -victims -danish -christopher -busy -demon -sugar -earliest -colony -studying -balance -duties -##ks -belgium -slipped -carter -05 -visible -stages -iraq -fifa -##im -commune -forming -zero -07 -continuing -talked -counties -legend -bathroom -option -tail -clay -daughters -afterwards -severe -jaw -visitors -##ded -devices -aviation -russell -kate -##vi -entering -subjects -##ino -temporary -swimming -forth -smooth -ghost -audio -bush -operates -rocks -movements -signs -eddie -##tz -ann -voices -honorary -06 -memories -dallas -pure -measures -racial -promised -66 -harvard -ceo -16th -parliamentary -indicate -benefit -flesh -dublin -louisiana -1902 -1901 -patient -sleeping -1903 -membership -coastal -medieval -wanting -element -scholars -rice -62 -limit -survive -makeup -rating -definitely -collaboration -obvious -##tan -boss -ms -baron -birthday -linked -soil -diocese -##lan -ncaa -##mann -offensive -shell -shouldn -waist -##tus -plain -ross -organ -resolution -manufacturing -adding -relative -kennedy -98 -whilst -moth -marketing -gardens -crash -72 -heading -partners -credited -carlos -moves -cable -##zi -marshall -##out -depending -bottle -represents -rejected -responded -existed -04 -jobs -denmark -lock -##ating -treated -graham -routes -talent -commissioner -drugs -secure -tests -reign -restored -photography -##gi -contributions -oklahoma -designer -disc -grin -seattle -robin -paused -atlanta -unusual -##gate -praised -las -laughing -satellite -hungary -visiting -##sky -interesting -factors -deck -poems -norman -##water -stuck -speaker -rifle -domain -premiered -##her -dc -comics -actors -01 -reputation -eliminated -8th -ceiling -prisoners -script -##nce -leather -austin -mississippi -rapidly -admiral -parallel -charlotte -guilty -tools -gender -divisions -fruit -##bs -laboratory -nelson -fantasy -marry -rapid -aunt -tribe -requirements -aspects -suicide -amongst -adams -bone -ukraine -abc -kick -sees -edinburgh -clothing -column -rough -gods -hunting -broadway -gathered -concerns -##ek -spending -ty -12th -snapped -requires -solar -bones -cavalry -##tta -iowa -drinking -waste -index -franklin -charity -thompson -stewart -tip -flash -landscape -friday -enjoy -singh -poem -listening -##back -eighth -fred -differences -adapted -bomb -ukrainian -surgery -corporate -masters -anywhere -##more -waves -odd -sean -portugal -orleans -dick -debate -kent -eating -puerto -cleared -96 -expect -cinema -97 -guitarist -blocks -electrical -agree -involving -depth -dying -panel -struggle -##ged -peninsula -adults -novels -emerged -vienna -metro -debuted -shoes -tamil -songwriter -meets -prove -beating -instance -heaven -scared -sending -marks -artistic -passage -superior -03 -significantly -shopping -##tive -retained -##izing -malaysia -technique -cheeks -##ola -warren -maintenance -destroy -extreme -allied -120 -appearing -##yn -fill -advice -alabama -qualifying -policies -cleveland -hat -battery -smart -authors -10th -soundtrack -acted -dated -lb -glance -equipped -coalition -funny -outer -ambassador -roy -possibility -couples -campbell -dna -loose -ethan -supplies -1898 -gonna -88 -monster -##res -shake -agents -frequency -springs -dogs -practices -61 -gang -plastic -easier -suggests -gulf -blade -exposed -colors -industries -markets -pan -nervous -electoral -charts -legislation -ownership -##idae -mac -appointment -shield -copy -assault -socialist -abbey -monument -license -throne -employment -jay -93 -replacement -charter -cloud -powered -suffering -accounts -oak -connecticut -strongly -wright -colour -crystal -13th -context -welsh -networks -voiced -gabriel -jerry -##cing -forehead -mp -##ens -manage -schedule -totally -remix -##ii -forests -occupation -print -nicholas -brazilian -strategic -vampires -engineers -76 -roots -seek -correct -instrumental -und -alfred -backed -hop -##des -stanley -robinson -traveled -wayne -welcome -austrian -achieve -67 -exit -rates -1899 -strip -whereas -##cs -sing -deeply -adventure -bobby -rick -jamie -careful -components -cap -useful -personality -knee -##shi -pushing -hosts -02 -protest -ca -ottoman -symphony -##sis -63 -boundary -1890 -processes -considering -considerable -tons -##work -##ft -##nia -cooper -trading -dear -conduct -91 -illegal -apple -revolutionary -holiday -definition -harder -##van -jacob -circumstances -destruction -##lle -popularity -grip -classified -liverpool -donald -baltimore -flows -seeking -honour -approval -92 -mechanical -till -happening -statue -critic -increasingly -immediate -describe -commerce -stare -##ster -indonesia -meat -rounds -boats -baker -orthodox -depression -formally -worn -naked -claire -muttered -sentence -11th -emily -document -77 -criticism -wished -vessel -spiritual -bent -virgin -parker -minimum -murray -lunch -danny -printed -compilation -keyboards -false -blow -belonged -68 -raising -78 -cutting -##board -pittsburgh -##up -9th -shadows -81 -hated -indigenous -jon -15th -barry -scholar -ah -##zer -oliver -##gy -stick -susan -meetings -attracted -spell -romantic -##ver -ye -1895 -photo -demanded -customers -##ac -1896 -logan -revival -keys -modified -commanded -jeans -##ious -upset -raw -phil -detective -hiding -resident -vincent -##bly -experiences -diamond -defeating -coverage -lucas -external -parks -franchise -helen -bible -successor -percussion -celebrated -il -lift -profile -clan -romania -##ied -mills -##su -nobody -achievement -shrugged -fault -1897 -rhythm -initiative -breakfast -carbon -700 -69 -lasted -violent -74 -wound -ken -killer -gradually -filmed -°c -dollars -processing -94 -remove -criticized -guests -sang -chemistry -##vin -legislature -disney -##bridge -uniform -escaped -integrated -proposal -purple -denied -liquid -karl -influential -morris -nights -stones -intense -experimental -twisted -71 -84 -##ld -pace -nazi -mitchell -ny -blind -reporter -newspapers -14th -centers -burn -basin -forgotten -surviving -filed -collections -monastery -losses -manual -couch -description -appropriate -merely -tag -missions -sebastian -restoration -replacing -triple -73 -elder -julia -warriors -benjamin -julian -convinced -stronger -amazing -declined -versus -merchant -happens -output -finland -bare -barbara -absence -ignored -dawn -injuries -##port -producers -##ram -82 -luis -##ities -kw -admit -expensive -electricity -nba -exception -symbol -##ving -ladies -shower -sheriff -characteristics -##je -aimed -button -ratio -effectively -summit -angle -jury -bears -foster -vessels -pants -executed -evans -dozen -advertising -kicked -patrol -1889 -competitions -lifetime -principles -athletics -##logy -birmingham -sponsored -89 -rob -nomination -1893 -acoustic -##sm -creature -longest -##tra -credits -harbor -dust -josh -##so -territories -milk -infrastructure -completion -thailand -indians -leon -archbishop -##sy -assist -pitch -blake -arrangement -girlfriend -serbian -operational -hence -sad -scent -fur -dj -sessions -hp -refer -rarely -##ora -exists -1892 -##ten -scientists -dirty -penalty -burst -portrait -seed -79 -pole -limits -rival -1894 -stable -alpha -grave -constitutional -alcohol -arrest -flower -mystery -devil -architectural -relationships -greatly -habitat -##istic -larry -progressive -remote -cotton -##ics -##ok -preserved -reaches -##ming -cited -86 -vast -scholarship -decisions -cbs -joy -teach -1885 -editions -knocked -eve -searching -partly -participation -gap -animated -fate -excellent -##ett -na -87 -alternate -saints -youngest -##ily -climbed -##ita -##tors -suggest -##ct -discussion -staying -choir -lakes -jacket -revenue -nevertheless -peaked -instrument -wondering -annually -managing -neil -1891 -signing -terry -##ice -apply -clinical -brooklyn -aim -catherine -fuck -farmers -figured -ninth -pride -hugh -evolution -ordinary -involvement -comfortable -shouted -tech -encouraged -taiwan -representation -sharing -##lia -##em -panic -exact -cargo -competing -fat -cried -83 -1920s -occasions -pa -cabin -borders -utah -marcus -##isation -badly -muscles -##ance -victorian -transition -warner -bet -permission -##rin -slave -terrible -similarly -shares -seth -uefa -possession -medals -benefits -colleges -lowered -perfectly -mall -transit -##ye -##kar -publisher -##ened -harrison -deaths -elevation -##ae -asleep -machines -sigh -ash -hardly -argument -occasion -parent -leo -decline -1888 -contribution -##ua -concentration -1000 -opportunities -hispanic -guardian -extent -emotions -hips -mason -volumes -bloody -controversy -diameter -steady -mistake -phoenix -identify -violin -##sk -departure -richmond -spin -funeral -enemies -1864 -gear -literally -connor -random -sergeant -grab -confusion -1865 -transmission -informed -op -leaning -sacred -suspended -thinks -gates -portland -luck -agencies -yours -hull -expert -muscle -layer -practical -sculpture -jerusalem -latest -lloyd -statistics -deeper -recommended -warrior -arkansas -mess -supports -greg -eagle -1880 -recovered -rated -concerts -rushed -##ano -stops -eggs -files -premiere -keith -##vo -delhi -turner -pit -affair -belief -paint -##zing -mate -##ach -##ev -victim -##ology -withdrew -bonus -styles -fled -##ud -glasgow -technologies -funded -nbc -adaptation -##ata -portrayed -cooperation -supporters -judges -bernard -justin -hallway -ralph -##ick -graduating -controversial -distant -continental -spider -bite -##ho -recognize -intention -mixing -##ese -egyptian -bow -tourism -suppose -claiming -tiger -dominated -participants -vi -##ru -nurse -partially -tape -##rum -psychology -##rn -essential -touring -duo -voting -civilian -emotional -channels -##king -apparent -hebrew -1887 -tommy -carrier -intersection -beast -hudson -##gar -##zo -lab -nova -bench -discuss -costa -##ered -detailed -behalf -drivers -unfortunately -obtain -##lis -rocky -##dae -siege -friendship -honey -##rian -1861 -amy -hang -posted -governments -collins -respond -wildlife -preferred -operator -##po -laura -pregnant -videos -dennis -suspected -boots -instantly -weird -automatic -businessman -alleged -placing -throwing -ph -mood -1862 -perry -venue -jet -remainder -##lli -##ci -passion -biological -boyfriend -1863 -dirt -buffalo -ron -segment -fa -abuse -##era -genre -thrown -stroke -colored -stress -exercise -displayed -##gen -struggled -##tti -abroad -dramatic -wonderful -thereafter -madrid -component -widespread -##sed -tale -citizen -todd -monday -1886 -vancouver -overseas -forcing -crying -descent -##ris -discussed -substantial -ranks -regime -1870 -provinces -switch -drum -zane -ted -tribes -proof -lp -cream -researchers -volunteer -manor -silk -milan -donated -allies -venture -principle -delivery -enterprise -##ves -##ans -bars -traditionally -witch -reminded -copper -##uk -pete -inter -links -colin -grinned -elsewhere -competitive -frequent -##oy -scream -##hu -tension -texts -submarine -finnish -defending -defend -pat -detail -1884 -affiliated -stuart -themes -villa -periods -tool -belgian -ruling -crimes -answers -folded -licensed -resort -demolished -hans -lucy -1881 -lion -traded -photographs -writes -craig -##fa -trials -generated -beth -noble -debt -percentage -yorkshire -erected -ss -viewed -grades -confidence -ceased -islam -telephone -retail -##ible -chile -m² -roberts -sixteen -##ich -commented -hampshire -innocent -dual -pounds -checked -regulations -afghanistan -sung -rico -liberty -assets -bigger -options -angels -relegated -tribute -wells -attending -leaf -##yan -butler -romanian -forum -monthly -lisa -patterns -gmina -##tory -madison -hurricane -rev -##ians -bristol -##ula -elite -valuable -disaster -democracy -awareness -germans -freyja -##ins -loop -absolutely -paying -populations -maine -sole -prayer -spencer -releases -doorway -bull -##ani -lover -midnight -conclusion -##sson -thirteen -lily -mediterranean -##lt -nhl -proud -sample -##hill -drummer -guinea -##ova -murphy -climb -##ston -instant -attributed -horn -ain -railways -steven -##ao -autumn -ferry -opponent -root -traveling -secured -corridor -stretched -tales -sheet -trinity -cattle -helps -indicates -manhattan -murdered -fitted -1882 -gentle -grandmother -mines -shocked -vegas -produces -##light -caribbean -##ou -belong -continuous -desperate -drunk -historically -trio -waved -raf -dealing -nathan -bat -murmured -interrupted -residing -scientist -pioneer -harold -aaron -##net -delta -attempting -minority -mini -believes -chorus -tend -lots -eyed -indoor -load -shots -updated -jail -##llo -concerning -connecting -wealth -##ved -slaves -arrive -rangers -sufficient -rebuilt -##wick -cardinal -flood -muhammad -whenever -relation -runners -moral -repair -viewers -arriving -revenge -punk -assisted -bath -fairly -breathe -lists -innings -illustrated -whisper -nearest -voters -clinton -ties -ultimate -screamed -beijing -lions -andre -fictional -gathering -comfort -radar -suitable -dismissed -hms -ban -pine -wrist -atmosphere -voivodeship -bid -timber -##ned -##nan -giants -##ane -cameron -recovery -uss -identical -categories -switched -serbia -laughter -noah -ensemble -therapy -peoples -touching -##off -locally -pearl -platforms -everywhere -ballet -tables -lanka -herbert -outdoor -toured -derek -1883 -spaces -contested -swept -1878 -exclusive -slight -connections -##dra -winds -prisoner -collective -bangladesh -tube -publicly -wealthy -thai -##ys -isolated -select -##ric -insisted -pen -fortune -ticket -spotted -reportedly -animation -enforcement -tanks -110 -decides -wider -lowest -owen -##time -nod -hitting -##hn -gregory -furthermore -magazines -fighters -solutions -##ery -pointing -requested -peru -reed -chancellor -knights -mask -worker -eldest -flames -reduction -1860 -volunteers -##tis -reporting -##hl -wire -advisory -endemic -origins -settlers -pursue -knock -consumer -1876 -eu -compound -creatures -mansion -sentenced -ivan -deployed -guitars -frowned -involves -mechanism -kilometers -perspective -shops -maps -terminus -duncan -alien -fist -bridges -##pers -heroes -fed -derby -swallowed -##ros -patent -sara -illness -characterized -adventures -slide -hawaii -jurisdiction -##op -organised -##side -adelaide -walks -biology -se -##ties -rogers -swing -tightly -boundaries -##rie -prepare -implementation -stolen -##sha -certified -colombia -edwards -garage -##mm -recalled -##ball -rage -harm -nigeria -breast -##ren -furniture -pupils -settle -##lus -cuba -balls -client -alaska -21st -linear -thrust -celebration -latino -genetic -terror -##cia -##ening -lightning -fee -witness -lodge -establishing -skull -##ique -earning -hood -##ei -rebellion -wang -sporting -warned -missile -devoted -activist -porch -worship -fourteen -package -1871 -decorated -##shire -housed -##ock -chess -sailed -doctors -oscar -joan -treat -garcia -harbour -jeremy -##ire -traditions -dominant -jacques -##gon -##wan -relocated -1879 -amendment -sized -companion -simultaneously -volleyball -spun -acre -increases -stopping -loves -belongs -affect -drafted -tossed -scout -battles -1875 -filming -shoved -munich -tenure -vertical -romance -pc -##cher -argue -##ical -craft -ranging -www -opens -honest -tyler -yesterday -virtual -##let -muslims -reveal -snake -immigrants -radical -screaming -speakers -firing -saving -belonging -ease -lighting -prefecture -blame -farmer -hungry -grows -rubbed -beam -sur -subsidiary -##cha -armenian -sao -dropping -conventional -##fer -microsoft -reply -qualify -spots -1867 -sweat -festivals -##ken -immigration -physician -discover -exposure -sandy -explanation -isaac -implemented -##fish -hart -initiated -connect -stakes -presents -heights -householder -pleased -tourist -regardless -slip -closest -##ction -surely -sultan -brings -riley -preparation -aboard -slammed -baptist -experiment -ongoing -interstate -organic -playoffs -##ika -1877 -130 -##tar -hindu -error -tours -tier -plenty -arrangements -talks -trapped -excited -sank -ho -athens -1872 -denver -welfare -suburb -athletes -trick -diverse -belly -exclusively -yelled -1868 -##med -conversion -##ette -1874 -internationally -computers -conductor -abilities -sensitive -hello -dispute -measured -globe -rocket -prices -amsterdam -flights -tigers -inn -municipalities -emotion -references -3d -##mus -explains -airlines -manufactured -pm -archaeological -1873 -interpretation -devon -comment -##ites -settlements -kissing -absolute -improvement -suite -impressed -barcelona -sullivan -jefferson -towers -jesse -julie -##tin -##lu -grandson -hi -gauge -regard -rings -interviews -trace -raymond -thumb -departments -burns -serial -bulgarian -scores -demonstrated -##ix -1866 -kyle -alberta -underneath -romanized -##ward -relieved -acquisition -phrase -cliff -reveals -han -cuts -merger -custom -##dar -nee -gilbert -graduation -##nts -assessment -cafe -difficulty -demands -swung -democrat -jennifer -commons -1940s -grove -##yo -completing -focuses -sum -substitute -bearing -stretch -reception -##py -reflected -essentially -destination -pairs -##ched -survival -resource -##bach -promoting -doubles -messages -tear -##down -##fully -parade -florence -harvey -incumbent -partial -framework -900 -pedro -frozen -procedure -olivia -controls -##mic -shelter -personally -temperatures -##od -brisbane -tested -sits -marble -comprehensive -oxygen -leonard -##kov -inaugural -iranian -referring -quarters -attitude -##ivity -mainstream -lined -mars -dakota -norfolk -unsuccessful -##° -explosion -helicopter -congressional -##sing -inspector -bitch -seal -departed -divine -##ters -coaching -examination -punishment -manufacturer -sink -columns -unincorporated -signals -nevada -squeezed -dylan -dining -photos -martial -manuel -eighteen -elevator -brushed -plates -ministers -ivy -congregation -##len -slept -specialized -taxes -curve -restricted -negotiations -likes -statistical -arnold -inspiration -execution -bold -intermediate -significance -margin -ruler -wheels -gothic -intellectual -dependent -listened -eligible -buses -widow -syria -earn -cincinnati -collapsed -recipient -secrets -accessible -philippine -maritime -goddess -clerk -surrender -breaks -playoff -database -##ified -##lon -ideal -beetle -aspect -soap -regulation -strings -expand -anglo -shorter -crosses -retreat -tough -coins -wallace -directions -pressing -##oon -shipping -locomotives -comparison -topics -nephew -##mes -distinction -honors -travelled -sierra -ibn -##over -fortress -sa -recognised -carved -1869 -clients -##dan -intent -##mar -coaches -describing -bread -##ington -beaten -northwestern -##ona -merit -youtube -collapse -challenges -em -historians -objective -submitted -virus -attacking -drake -assume -##ere -diseases -marc -stem -leeds -##cus -##ab -farming -glasses -##lock -visits -nowhere -fellowship -relevant -carries -restaurants -experiments -101 -constantly -bases -targets -shah -tenth -opponents -verse -territorial -##ira -writings -corruption -##hs -instruction -inherited -reverse -emphasis -##vic -employee -arch -keeps -rabbi -watson -payment -uh -##ala -nancy -##tre -venice -fastest -sexy -banned -adrian -properly -ruth -touchdown -dollar -boards -metre -circles -edges -favour -comments -ok -travels -liberation -scattered -firmly -##ular -holland -permitted -diesel -kenya -den -originated -##ral -demons -resumed -dragged -rider -##rus -servant -blinked -extend -torn -##ias -##sey -input -meal -everybody -cylinder -kinds -camps -##fe -bullet -logic -##wn -croatian -evolved -healthy -fool -chocolate -wise -preserve -pradesh -##ess -respective -1850 -##ew -chicken -artificial -gross -corresponding -convicted -cage -caroline -dialogue -##dor -narrative -stranger -mario -br -christianity -failing -trent -commanding -buddhist -1848 -maurice -focusing -yale -bike -altitude -##ering -mouse -revised -##sley -veteran -##ig -pulls -theology -crashed -campaigns -legion -##ability -drag -excellence -customer -cancelled -intensity -excuse -##lar -liga -participating -contributing -printing -##burn -variable -##rk -curious -bin -legacy -renaissance -##my -symptoms -binding -vocalist -dancer -##nie -grammar -gospel -democrats -ya -enters -sc -diplomatic -hitler -##ser -clouds -mathematical -quit -defended -oriented -##heim -fundamental -hardware -impressive -equally -convince -confederate -guilt -chuck -sliding -##ware -magnetic -narrowed -petersburg -bulgaria -otto -phd -skill -##ama -reader -hopes -pitcher -reservoir -hearts -automatically -expecting -mysterious -bennett -extensively -imagined -seeds -monitor -fix -##ative -journalism -struggling -signature -ranch -encounter -photographer -observation -protests -##pin -influences -##hr -calendar -##all -cruz -croatia -locomotive -hughes -naturally -shakespeare -basement -hook -uncredited -faded -theories -approaches -dare -phillips -filling -fury -obama -##ain -efficient -arc -deliver -min -raid -breeding -inducted -leagues -efficiency -axis -montana -eagles -##ked -supplied -instructions -karen -picking -indicating -trap -anchor -practically -christians -tomb -vary -occasional -electronics -lords -readers -newcastle -faint -innovation -collect -situations -engagement -160 -claude -mixture -##feld -peer -tissue -logo -lean -##ration -°f -floors -##ven -architects -reducing -##our -##ments -rope -1859 -ottawa -##har -samples -banking -declaration -proteins -resignation -francois -saudi -advocate -exhibited -armor -twins -divorce -##ras -abraham -reviewed -jo -temporarily -matrix -physically -pulse -curled -##ena -difficulties -bengal -usage -##ban -annie -riders -certificate -##pi -holes -warsaw -distinctive -jessica -##mon -mutual -1857 -customs -circular -eugene -removal -loaded -mere -vulnerable -depicted -generations -dame -heir -enormous -lightly -climbing -pitched -lessons -pilots -nepal -ram -google -preparing -brad -louise -renowned -##â‚‚ -liam -##ably -plaza -shaw -sophie -brilliant -bills -##bar -##nik -fucking -mainland -server -pleasant -seized -veterans -jerked -fail -beta -brush -radiation -stored -warmth -southeastern -nate -sin -raced -berkeley -joke -athlete -designation -trunk -##low -roland -qualification -archives -heels -artwork -receives -judicial -reserves -##bed -woke -installation -abu -floating -fake -lesser -excitement -interface -concentrated -addressed -characteristic -amanda -saxophone -monk -auto -##bus -releasing -egg -dies -interaction -defender -ce -outbreak -glory -loving -##bert -sequel -consciousness -http -awake -ski -enrolled -##ress -handling -rookie -brow -somebody -biography -warfare -amounts -contracts -presentation -fabric -dissolved -challenged -meter -psychological -lt -elevated -rally -accurate -##tha -hospitals -undergraduate -specialist -venezuela -exhibit -shed -nursing -protestant -fluid -structural -footage -jared -consistent -prey -##ska -succession -reflect -exile -lebanon -wiped -suspect -shanghai -resting -integration -preservation -marvel -variant -pirates -sheep -rounded -capita -sailing -colonies -manuscript -deemed -variations -clarke -functional -emerging -boxing -relaxed -curse -azerbaijan -heavyweight -nickname -editorial -rang -grid -tightened -earthquake -flashed -miguel -rushing -##ches -improvements -boxes -brooks -180 -consumption -molecular -felix -societies -repeatedly -variation -aids -civic -graphics -professionals -realm -autonomous -receiver -delayed -workshop -militia -chairs -trump -canyon -##point -harsh -extending -lovely -happiness -##jan -stake -eyebrows -embassy -wellington -hannah -##ella -sony -corners -bishops -swear -cloth -contents -xi -namely -commenced -1854 -stanford -nashville -courage -graphic -commitment -garrison -##bin -hamlet -clearing -rebels -attraction -literacy -cooking -ruins -temples -jenny -humanity -celebrate -hasn -freight -sixty -rebel -bastard -##art -newton -##ada -deer -##ges -##ching -smiles -delaware -singers -##ets -approaching -assists -flame -##ph -boulevard -barrel -planted -##ome -pursuit -##sia -consequences -posts -shallow -invitation -rode -depot -ernest -kane -rod -concepts -preston -topic -chambers -striking -blast -arrives -descendants -montgomery -ranges -worlds -##lay -##ari -span -chaos -praise -##ag -fewer -1855 -sanctuary -mud -fbi -##ions -programmes -maintaining -unity -harper -bore -handsome -closure -tournaments -thunder -nebraska -linda -facade -puts -satisfied -argentine -dale -cork -dome -panama -##yl -1858 -tasks -experts -##ates -feeding -equation -##las -##ida -##tu -engage -bryan -##ax -um -quartet -melody -disbanded -sheffield -blocked -gasped -delay -kisses -maggie -connects -##non -sts -poured -creator -publishers -##we -guided -ellis -extinct -hug -gaining -##ord -complicated -##bility -poll -clenched -investigate -##use -thereby -quantum -spine -cdp -humor -kills -administered -semifinals -##du -encountered -ignore -##bu -commentary -##maker -bother -roosevelt -140 -plains -halfway -flowing -cultures -crack -imprisoned -neighboring -airline -##ses -##view -##mate -##ec -gather -wolves -marathon -transformed -##ill -cruise -organisations -carol -punch -exhibitions -numbered -alarm -ratings -daddy -silently -##stein -queens -colours -impression -guidance -liu -tactical -##rat -marshal -della -arrow -##ings -rested -feared -tender -owns -bitter -advisor -escort -##ides -spare -farms -grants -##ene -dragons -encourage -colleagues -cameras -##und -sucked -pile -spirits -prague -statements -suspension -landmark -fence -torture -recreation -bags -permanently -survivors -pond -spy -predecessor -bombing -coup -##og -protecting -transformation -glow -##lands -##book -dug -priests -andrea -feat -barn -jumping -##chen -##ologist -##con -casualties -stern -auckland -pipe -serie -revealing -ba -##bel -trevor -mercy -spectrum -yang -consist -governing -collaborated -possessed -epic -comprises -blew -shane -##ack -lopez -honored -magical -sacrifice -judgment -perceived -hammer -mtv -baronet -tune -das -missionary -sheets -350 -neutral -oral -threatening -attractive -shade -aims -seminary -##master -estates -1856 -michel -wounds -refugees -manufacturers -##nic -mercury -syndrome -porter -##iya -##din -hamburg -identification -upstairs -purse -widened -pause -cared -breathed -affiliate -santiago -prevented -celtic -fisher -125 -recruited -byzantine -reconstruction -farther -##mp -diet -sake -au -spite -sensation -##ert -blank -separation -105 -##hon -vladimir -armies -anime -##lie -accommodate -orbit -cult -sofia -archive -##ify -##box -founders -sustained -disorder -honours -northeastern -mia -crops -violet -threats -blanket -fires -canton -followers -southwestern -prototype -voyage -assignment -altered -moderate -protocol -pistol -##eo -questioned -brass -lifting -1852 -math -authored -##ual -doug -dimensional -dynamic -##san -1851 -pronounced -grateful -quest -uncomfortable -boom -presidency -stevens -relating -politicians -chen -barrier -quinn -diana -mosque -tribal -cheese -palmer -portions -sometime -chester -treasure -wu -bend -download -millions -reforms -registration -##osa -consequently -monitoring -ate -preliminary -brandon -invented -ps -eaten -exterior -intervention -ports -documented -log -displays -lecture -sally -favourite -##itz -vermont -lo -invisible -isle -breed -##ator -journalists -relay -speaks -backward -explore -midfielder -actively -stefan -procedures -cannon -blond -kenneth -centered -servants -chains -libraries -malcolm -essex -henri -slavery -##hal -facts -fairy -coached -cassie -cats -washed -cop -##fi -announcement -item -2000s -vinyl -activated -marco -frontier -growled -curriculum -##das -loyal -accomplished -leslie -ritual -kenny -##00 -vii -napoleon -hollow -hybrid -jungle -stationed -friedrich -counted -##ulated -platinum -theatrical -seated -col -rubber -glen -1840 -diversity -healing -extends -id -provisions -administrator -columbus -##oe -tributary -te -assured -org -##uous -prestigious -examined -lectures -grammy -ronald -associations -bailey -allan -essays -flute -believing -consultant -proceedings -travelling -1853 -kit -kerala -yugoslavia -buddy -methodist -##ith -burial -centres -batman -##nda -discontinued -bo -dock -stockholm -lungs -severely -##nk -citing -manga -##ugh -steal -mumbai -iraqi -robot -celebrity -bride -broadcasts -abolished -pot -joel -overhead -franz -packed -reconnaissance -johann -acknowledged -introduce -handled -doctorate -developments -drinks -alley -palestine -##nis -##aki -proceeded -recover -bradley -grain -patch -afford -infection -nationalist -legendary -##ath -interchange -virtually -gen -gravity -exploration -amber -vital -wishes -powell -doctrine -elbow -screenplay -##bird -contribute -indonesian -pet -creates -##com -enzyme -kylie -discipline -drops -manila -hunger -##ien -layers -suffer -fever -bits -monica -keyboard -manages -##hood -searched -appeals -##bad -testament -grande -reid -##war -beliefs -congo -##ification -##dia -si -requiring -##via -casey -1849 -regret -streak -rape -depends -syrian -sprint -pound -tourists -upcoming -pub -##xi -tense -##els -practiced -echo -nationwide -guild -motorcycle -liz -##zar -chiefs -desired -elena -bye -precious -absorbed -relatives -booth -pianist -##mal -citizenship -exhausted -wilhelm -##ceae -##hed -noting -quarterback -urge -hectares -##gue -ace -holly -##tal -blonde -davies -parked -sustainable -stepping -twentieth -airfield -galaxy -nest -chip -##nell -tan -shaft -paulo -requirement -##zy -paradise -tobacco -trans -renewed -vietnamese -##cker -##ju -suggesting -catching -holmes -enjoying -md -trips -colt -holder -butterfly -nerve -reformed -cherry -bowling -trailer -carriage -goodbye -appreciate -toy -joshua -interactive -enabled -involve -##kan -collar -determination -bunch -facebook -recall -shorts -superintendent -episcopal -frustration -giovanni -nineteenth -laser -privately -array -circulation -##ovic -armstrong -deals -painful -permit -discrimination -##wi -aires -retiring -cottage -ni -##sta -horizon -ellen -jamaica -ripped -fernando -chapters -playstation -patron -lecturer -navigation -behaviour -genes -georgian -export -solomon -rivals -swift -seventeen -rodriguez -princeton -independently -sox -1847 -arguing -entity -casting -hank -criteria -oakland -geographic -milwaukee -reflection -expanding -conquest -dubbed -##tv -halt -brave -brunswick -doi -arched -curtis -divorced -predominantly -somerset -streams -ugly -zoo -horrible -curved -buenos -fierce -dictionary -vector -theological -unions -handful -stability -chan -punjab -segments -##lly -altar -ignoring -gesture -monsters -pastor -##stone -thighs -unexpected -operators -abruptly -coin -compiled -associates -improving -migration -pin -##ose -compact -collegiate -reserved -##urs -quarterfinals -roster -restore -assembled -hurry -oval -##cies -1846 -flags -martha -##del -victories -sharply -##rated -argues -deadly -neo -drawings -symbols -performer -##iel -griffin -restrictions -editing -andrews -java -journals -arabia -compositions -dee -pierce -removing -hindi -casino -runway -civilians -minds -nasa -hotels -##zation -refuge -rent -retain -potentially -conferences -suburban -conducting -##tto -##tions -##tle -descended -massacre -##cal -ammunition -terrain -fork -souls -counts -chelsea -durham -drives -cab -##bank -perth -realizing -palestinian -finn -simpson -##dal -betty -##ule -moreover -particles -cardinals -tent -evaluation -extraordinary -##oid -inscription -##works -wednesday -chloe -maintains -panels -ashley -trucks -##nation -cluster -sunlight -strikes -zhang -##wing -dialect -canon -##ap -tucked -##ws -collecting -##mas -##can -##sville -maker -quoted -evan -franco -aria -buying -cleaning -eva -closet -provision -apollo -clinic -rat -##ez -necessarily -ac -##gle -##ising -venues -flipped -cent -spreading -trustees -checking -authorized -##sco -disappointed -##ado -notion -duration -trumpet -hesitated -topped -brussels -rolls -theoretical -hint -define -aggressive -repeat -wash -peaceful -optical -width -allegedly -mcdonald -strict -copyright -##illa -investors -mar -jam -witnesses -sounding -miranda -michelle -privacy -hugo -harmony -##pp -valid -lynn -glared -nina -102 -headquartered -diving -boarding -gibson -##ncy -albanian -marsh -routine -dealt -enhanced -er -intelligent -substance -targeted -enlisted -discovers -spinning -observations -pissed -smoking -rebecca -capitol -visa -varied -costume -seemingly -indies -compensation -surgeon -thursday -arsenal -westminster -suburbs -rid -anglican -##ridge -knots -foods -alumni -lighter -fraser -whoever -portal -scandal -##ray -gavin -advised -instructor -flooding -terrorist -##ale -teenage -interim -senses -duck -teen -thesis -abby -eager -overcome -##ile -newport -glenn -rises -shame -##cc -prompted -priority -forgot -bomber -nicolas -protective -360 -cartoon -katherine -breeze -lonely -trusted -henderson -richardson -relax -banner -candy -palms -remarkable -##rio -legends -cricketer -essay -ordained -edmund -rifles -trigger -##uri -##away -sail -alert -1830 -audiences -penn -sussex -siblings -pursued -indianapolis -resist -rosa -consequence -succeed -avoided -1845 -##ulation -inland -##tie -##nna -counsel -profession -chronicle -hurried -##una -eyebrow -eventual -bleeding -innovative -cure -##dom -committees -accounting -con -scope -hardy -heather -tenor -gut -herald -codes -tore -scales -wagon -##oo -luxury -tin -prefer -fountain -triangle -bonds -darling -convoy -dried -traced -beings -troy -accidentally -slam -findings -smelled -joey -lawyers -outcome -steep -bosnia -configuration -shifting -toll -brook -performers -lobby -philosophical -construct -shrine -aggregate -boot -cox -phenomenon -savage -insane -solely -reynolds -lifestyle -##ima -nationally -holdings -consideration -enable -edgar -mo -mama -##tein -fights -relegation -chances -atomic -hub -conjunction -awkward -reactions -currency -finale -kumar -underwent -steering -elaborate -gifts -comprising -melissa -veins -reasonable -sunshine -chi -solve -trails -inhabited -elimination -ethics -huh -ana -molly -consent -apartments -layout -marines -##ces -hunters -bulk -##oma -hometown -##wall -##mont -cracked -reads -neighbouring -withdrawn -admission -wingspan -damned -anthology -lancashire -brands -batting -forgive -cuban -awful -##lyn -104 -dimensions -imagination -##ade -dante -##ship -tracking -desperately -goalkeeper -##yne -groaned -workshops -confident -burton -gerald -milton -circus -uncertain -slope -copenhagen -sophia -fog -philosopher -portraits -accent -cycling -varying -gripped -larvae -garrett -specified -scotia -mature -luther -kurt -rap -##kes -aerial -750 -ferdinand -heated -es -transported -##shan -safely -nonetheless -##orn -##gal -motors -demanding -##sburg -startled -##brook -ally -generate -caps -ghana -stained -demo -mentions -beds -ap -afterward -diary -##bling -utility -##iro -richards -1837 -conspiracy -conscious -shining -footsteps -observer -cyprus -urged -loyalty -developer -probability -olive -upgraded -gym -miracle -insects -graves -1844 -ourselves -hydrogen -amazon -katie -tickets -poets -##pm -planes -##pan -prevention -witnessed -dense -jin -randy -tang -warehouse -monroe -bang -archived -elderly -investigations -alec -granite -mineral -conflicts -controlling -aboriginal -carlo -##zu -mechanics -stan -stark -rhode -skirt -est -##berry -bombs -respected -##horn -imposed -limestone -deny -nominee -memphis -grabbing -disabled -##als -amusement -aa -frankfurt -corn -referendum -varies -slowed -disk -firms -unconscious -incredible -clue -sue -##zhou -twist -##cio -joins -idaho -chad -developers -computing -destroyer -103 -mortal -tucker -kingston -choices -yu -carson -1800 -os -whitney -geneva -pretend -dimension -staged -plateau -maya -##une -freestyle -##bc -rovers -hiv -##ids -tristan -classroom -prospect -##hus -honestly -diploma -lied -thermal -auxiliary -feast -unlikely -iata -##tel -morocco -pounding -treasury -lithuania -considerably -1841 -dish -1812 -geological -matching -stumbled -destroying -marched -brien -advances -cake -nicole -belle -settling -measuring -directing -##mie -tuesday -bassist -capabilities -stunned -fraud -torpedo -##list -##phone -anton -wisdom -surveillance -ruined -##ulate -lawsuit -healthcare -theorem -halls -trend -aka -horizontal -dozens -acquire -lasting -swim -hawk -gorgeous -fees -vicinity -decrease -adoption -tactics -##ography -pakistani -##ole -draws -##hall -willie -burke -heath -algorithm -integral -powder -elliott -brigadier -jackie -tate -varieties -darker -##cho -lately -cigarette -specimens -adds -##ree -##ensis -##inger -exploded -finalist -cia -murders -wilderness -arguments -nicknamed -acceptance -onwards -manufacture -robertson -jets -tampa -enterprises -blog -loudly -composers -nominations -1838 -ai -malta -inquiry -automobile -hosting -viii -rays -tilted -grief -museums -strategies -furious -euro -equality -cohen -poison -surrey -wireless -governed -ridiculous -moses -##esh -##room -vanished -##ito -barnes -attract -morrison -istanbul -##iness -absent -rotation -petition -janet -##logical -satisfaction -custody -deliberately -observatory -comedian -surfaces -pinyin -novelist -strictly -canterbury -oslo -monks -embrace -ibm -jealous -photograph -continent -dorothy -marina -doc -excess -holden -allegations -explaining -stack -avoiding -lance -storyline -majesty -poorly -spike -dos -bradford -raven -travis -classics -proven -voltage -pillow -fists -butt -1842 -interpreted -##car -1839 -gage -telegraph -lens -promising -expelled -casual -collector -zones -##min -silly -nintendo -##kh -##bra -downstairs -chef -suspicious -afl -flies -vacant -uganda -pregnancy -condemned -lutheran -estimates -cheap -decree -saxon -proximity -stripped -idiot -deposits -contrary -presenter -magnus -glacier -im -offense -edwin -##ori -upright -##long -bolt -##ois -toss -geographical -##izes -environments -delicate -marking -abstract -xavier -nails -windsor -plantation -occurring -equity -saskatchewan -fears -drifted -sequences -vegetation -revolt -##stic -1843 -sooner -fusion -opposing -nato -skating -1836 -secretly -ruin -lease -##oc -edit -##nne -flora -anxiety -ruby -##ological -##mia -tel -bout -taxi -emmy -frost -rainbow -compounds -foundations -rainfall -assassination -nightmare -dominican -##win -achievements -deserve -orlando -intact -armenia -##nte -calgary -valentine -106 -marion -proclaimed -theodore -bells -courtyard -thigh -gonzalez -console -troop -minimal -monte -everyday -##ence -##if -supporter -terrorism -buck -openly -presbyterian -activists -carpet -##iers -rubbing -uprising -##yi -cute -conceived -legally -##cht -millennium -cello -velocity -ji -rescued -cardiff -1835 -rex -concentrate -senators -beard -rendered -glowing -battalions -scouts -competitors -sculptor -catalogue -arctic -ion -raja -bicycle -wow -glancing -lawn -##woman -gentleman -lighthouse -publish -predicted -calculated -##val -variants -##gne -strain -##ui -winston -deceased -##nus -touchdowns -brady -caleb -sinking -echoed -crush -hon -blessed -protagonist -hayes -endangered -magnitude -editors -##tine -estimate -responsibilities -##mel -backup -laying -consumed -sealed -zurich -lovers -frustrated -##eau -ahmed -kicking -mit -treasurer -1832 -biblical -refuse -terrified -pump -agrees -genuine -imprisonment -refuses -plymouth -##hen -lou -##nen -tara -trembling -antarctic -ton -learns -##tas -crap -crucial -faction -atop -##borough -wrap -lancaster -odds -hopkins -erik -lyon -##eon -bros -##ode -snap -locality -tips -empress -crowned -cal -acclaimed -chuckled -##ory -clara -sends -mild -towel -##fl -##day -##а -wishing -assuming -interviewed -##bal -##die -interactions -eden -cups -helena -##lf -indie -beck -##fire -batteries -filipino -wizard -parted -##lam -traces -##born -rows -idol -albany -delegates -##ees -##sar -discussions -##ex -notre -instructed -belgrade -highways -suggestion -lauren -possess -orientation -alexandria -abdul -beats -salary -reunion -ludwig -alright -wagner -intimate -pockets -slovenia -hugged -brighton -merchants -cruel -stole -trek -slopes -repairs -enrollment -politically -underlying -promotional -counting -boeing -##bb -isabella -naming -##и -keen -bacteria -listing -separately -belfast -ussr -450 -lithuanian -anybody -ribs -sphere -martinez -cock -embarrassed -proposals -fragments -nationals -##fs -##wski -premises -fin -1500 -alpine -matched -freely -bounded -jace -sleeve -##af -gaming -pier -populated -evident -##like -frances -flooded -##dle -frightened -pour -trainer -framed -visitor -challenging -pig -wickets -##fold -infected -email -##pes -arose -##aw -reward -ecuador -oblast -vale -ch -shuttle -##usa -bach -rankings -forbidden -cornwall -accordance -salem -consumers -bruno -fantastic -toes -machinery -resolved -julius -remembering -propaganda -iceland -bombardment -tide -contacts -wives -##rah -concerto -macdonald -albania -implement -daisy -tapped -sudan -helmet -angela -mistress -##lic -crop -sunk -finest -##craft -hostile -##ute -##tsu -boxer -fr -paths -adjusted -habit -ballot -supervision -soprano -##zen -bullets -wicked -sunset -regiments -disappear -lamp -performs -app -##gia -##oa -rabbit -digging -incidents -entries -##cion -dishes -##oi -introducing -##ati -##fied -freshman -slot -jill -tackles -baroque -backs -##iest -lone -sponsor -destiny -altogether -convert -##aro -consensus -shapes -demonstration -basically -feminist -auction -artifacts -##bing -strongest -twitter -halifax -2019 -allmusic -mighty -smallest -precise -alexandra -viola -##los -##ille -manuscripts -##illo -dancers -ari -managers -monuments -blades -barracks -springfield -maiden -consolidated -electron -##end -berry -airing -wheat -nobel -inclusion -blair -payments -geography -bee -cc -eleanor -react -##hurst -afc -manitoba -##yu -su -lineup -fitness -recreational -investments -airborne -disappointment -##dis -edmonton -viewing -##row -renovation -##cast -infant -bankruptcy -roses -aftermath -pavilion -##yer -carpenter -withdrawal -ladder -##hy -discussing -popped -reliable -agreements -rochester -##abad -curves -bombers -220 -rao -reverend -decreased -choosing -107 -stiff -consulting -naples -crawford -tracy -ka -ribbon -cops -##lee -crushed -deciding -unified -teenager -accepting -flagship -explorer -poles -sanchez -inspection -revived -skilled -induced -exchanged -flee -locals -tragedy -swallow -loading -hanna -demonstrate -##ela -salvador -flown -contestants -civilization -##ines -wanna -rhodes -fletcher -hector -knocking -considers -##ough -nash -mechanisms -sensed -mentally -walt -unclear -##eus -renovated -madame -##cks -crews -governmental -##hin -undertaken -monkey -##ben -##ato -fatal -armored -copa -caves -governance -grasp -perception -certification -froze -damp -tugged -wyoming -##rg -##ero -newman -##lor -nerves -curiosity -graph -115 -##ami -withdraw -tunnels -dull -meredith -moss -exhibits -neighbors -communicate -accuracy -explored -raiders -republicans -secular -kat -superman -penny -criticised -##tch -freed -update -conviction -wade -ham -likewise -delegation -gotta -doll -promises -technological -myth -nationality -resolve -convent -##mark -sharon -dig -sip -coordinator -entrepreneur -fold -##dine -capability -councillor -synonym -blown -swan -cursed -1815 -jonas -haired -sofa -canvas -keeper -rivalry -##hart -rapper -speedway -swords -postal -maxwell -estonia -potter -recurring -##nn -##ave -errors -##oni -cognitive -1834 -##² -claws -nadu -roberto -bce -wrestler -ellie -##ations -infinite -ink -##tia -presumably -finite -staircase -108 -noel -patricia -nacional -##cation -chill -eternal -tu -preventing -prussia -fossil -limbs -##logist -ernst -frog -perez -rene -##ace -pizza -prussian -##ios -##vy -molecules -regulatory -answering -opinions -sworn -lengths -supposedly -hypothesis -upward -habitats -seating -ancestors -drank -yield -hd -synthesis -researcher -modest -##var -mothers -peered -voluntary -homeland -##the -acclaim -##igan -static -valve -luxembourg -alto -carroll -fe -receptor -norton -ambulance -##tian -johnston -catholics -depicting -jointly -elephant -gloria -mentor -badge -ahmad -distinguish -remarked -councils -precisely -allison -advancing -detection -crowded -##10 -cooperative -ankle -mercedes -dagger -surrendered -pollution -commit -subway -jeffrey -lesson -sculptures -provider -##fication -membrane -timothy -rectangular -fiscal -heating -teammate -basket -particle -anonymous -deployment -##ple -missiles -courthouse -proportion -shoe -sec -##ller -complaints -forbes -blacks -abandon -remind -sizes -overwhelming -autobiography -natalie -##awa -risks -contestant -countryside -babies -scorer -invaded -enclosed -proceed -hurling -disorders -##cu -reflecting -continuously -cruiser -graduates -freeway -investigated -ore -deserved -maid -blocking -phillip -jorge -shakes -dove -mann -variables -lacked -burden -accompanying -que -consistently -organizing -provisional -complained -endless -##rm -tubes -juice -georges -krishna -mick -labels -thriller -##uch -laps -arcade -sage -snail -##table -shannon -fi -laurence -seoul -vacation -presenting -hire -churchill -surprisingly -prohibited -savannah -technically -##oli -170 -##lessly -testimony -suited -speeds -toys -romans -mlb -flowering -measurement -talented -kay -settings -charleston -expectations -shattered -achieving -triumph -ceremonies -portsmouth -lanes -mandatory -loser -stretching -cologne -realizes -seventy -cornell -careers -webb -##ulating -americas -budapest -ava -suspicion -##ison -yo -conrad -##hai -sterling -jessie -rector -##az -1831 -transform -organize -loans -christine -volcanic -warrant -slender -summers -subfamily -newer -danced -dynamics -rhine -proceeds -heinrich -gastropod -commands -sings -facilitate -easter -ra -positioned -responses -expense -fruits -yanked -imported -25th -velvet -vic -primitive -tribune -baldwin -neighbourhood -donna -rip -hay -pr -##uro -1814 -espn -welcomed -##aria -qualifier -glare -highland -timing -##cted -shells -eased -geometry -louder -exciting -slovakia -##sion -##iz -##lot -savings -prairie -##ques -marching -rafael -tonnes -##lled -curtain -preceding -shy -heal -greene -worthy -##pot -detachment -bury -sherman -##eck -reinforced -seeks -bottles -contracted -duchess -outfit -walsh -##sc -mickey -##ase -geoffrey -archer -squeeze -dawson -eliminate -invention -##enberg -neal -##eth -stance -dealer -coral -maple -retire -polo -simplified -##ht -1833 -hid -watts -backwards -jules -##oke -genesis -mt -frames -rebounds -burma -woodland -moist -santos -whispers -drained -subspecies -##aa -streaming -ulster -burnt -correspondence -maternal -gerard -denis -stealing -##load -genius -duchy -##oria -inaugurated -momentum -suits -placement -sovereign -clause -thames -##hara -confederation -reservation -sketch -yankees -lets -rotten -charm -hal -verses -ultra -commercially -dot -salon -citation -adopt -winnipeg -mist -allocated -cairo -##boy -jenkins -interference -objectives -##wind -1820 -portfolio -armoured -sectors -##eh -initiatives -##world -integrity -exercises -robe -tap -ab -gazed -##tones -distracted -rulers -111 -favorable -jerome -tended -cart -factories -##eri -diplomat -valued -gravel -charitable -##try -calvin -exploring -chang -shepherd -terrace -pdf -pupil -##ural -reflects -ups -##rch -governors -shelf -depths -##nberg -trailed -crest -tackle -##nian -##ats -hatred -##kai -clare -makers -ethiopia -longtime -detected -embedded -lacking -slapped -rely -thomson -anticipation -iso -morton -successive -agnes -screenwriter -straightened -philippe -playwright -haunted -licence -iris -intentions -sutton -112 -logical -correctly -##weight -branded -licked -tipped -silva -ricky -narrator -requests -##ents -greeted -supernatural -cow -##wald -lung -refusing -employer -strait -gaelic -liner -##piece -zoe -sabha -##mba -driveway -harvest -prints -bates -reluctantly -threshold -algebra -ira -wherever -coupled -240 -assumption -picks -##air -designers -raids -gentlemen -##ean -roller -blowing -leipzig -locks -screw -dressing -strand -##lings -scar -dwarf -depicts -##nu -nods -##mine -differ -boris -##eur -yuan -flip -##gie -mob -invested -questioning -applying -##ture -shout -##sel -gameplay -blamed -illustrations -bothered -weakness -rehabilitation -##of -##zes -envelope -rumors -miners -leicester -subtle -kerry -##ico -ferguson -##fu -premiership -ne -##cat -bengali -prof -catches -remnants -dana -##rily -shouting -presidents -baltic -ought -ghosts -dances -sailors -shirley -fancy -dominic -##bie -madonna -##rick -bark -buttons -gymnasium -ashes -liver -toby -oath -providence -doyle -evangelical -nixon -cement -carnegie -embarked -hatch -surroundings -guarantee -needing -pirate -essence -##bee -filter -crane -hammond -projected -immune -percy -twelfth -##ult -regent -doctoral -damon -mikhail -##ichi -lu -critically -elect -realised -abortion -acute -screening -mythology -steadily -##fc -frown -nottingham -kirk -wa -minneapolis -##rra -module -algeria -mc -nautical -encounters -surprising -statues -availability -shirts -pie -alma -brows -munster -mack -soup -crater -tornado -sanskrit -cedar -explosive -bordered -dixon -planets -stamp -exam -happily -##bble -carriers -kidnapped -##vis -accommodation -emigrated -##met -knockout -correspondent -violation -profits -peaks -lang -specimen -agenda -ancestry -pottery -spelling -equations -obtaining -ki -linking -1825 -debris -asylum -##20 -buddhism -teddy -##ants -gazette -##nger -##sse -dental -eligibility -utc -fathers -averaged -zimbabwe -francesco -coloured -hissed -translator -lynch -mandate -humanities -mackenzie -uniforms -lin -##iana -##gio -asset -mhz -fitting -samantha -genera -wei -rim -beloved -shark -riot -entities -expressions -indo -carmen -slipping -owing -abbot -neighbor -sidney -##av -rats -recommendations -encouraging -squadrons -anticipated -commanders -conquered -##oto -donations -diagnosed -##mond -divide -##iva -guessed -decoration -vernon -auditorium -revelation -conversations -##kers -##power -herzegovina -dash -alike -protested -lateral -herman -accredited -mg -##gent -freeman -mel -fiji -crow -crimson -##rine -livestock -##pped -humanitarian -bored -oz -whip -##lene -##ali -legitimate -alter -grinning -spelled -anxious -oriental -wesley -##nin -##hole -carnival -controller -detect -##ssa -bowed -educator -kosovo -macedonia -##sin -occupy -mastering -stephanie -janeiro -para -unaware -nurses -noon -135 -cam -hopefully -ranger -combine -sociology -polar -rica -##eer -neill -##sman -holocaust -##ip -doubled -lust -1828 -109 -decent -cooling -unveiled -##card -1829 -nsw -homer -chapman -meyer -##gin -dive -mae -reagan -expertise -##gled -darwin -brooke -sided -prosecution -investigating -comprised -petroleum -genres -reluctant -differently -trilogy -johns -vegetables -corpse -highlighted -lounge -pension -unsuccessfully -elegant -aided -ivory -beatles -amelia -cain -dubai -sunny -immigrant -babe -click -##nder -underwater -pepper -combining -mumbled -atlas -horns -accessed -ballad -physicians -homeless -gestured -rpm -freak -louisville -corporations -patriots -prizes -rational -warn -modes -decorative -overnight -din -troubled -phantom -##ort -monarch -sheer -##dorf -generals -guidelines -organs -addresses -##zon -enhance -curling -parishes -cord -##kie -linux -caesar -deutsche -bavaria -##bia -coleman -cyclone -##eria -bacon -petty -##yama -##old -hampton -diagnosis -1824 -throws -complexity -rita -disputed -##₃ -pablo -##sch -marketed -trafficking -##ulus -examine -plague -formats -##oh -vault -faithful -##bourne -webster -##ox -highlights -##ient -##ann -phones -vacuum -sandwich -modeling -##gated -bolivia -clergy -qualities -isabel -##nas -##ars -wears -screams -reunited -annoyed -bra -##ancy -##rate -differential -transmitter -tattoo -container -poker -##och -excessive -resides -cowboys -##tum -augustus -trash -providers -statute -retreated -balcony -reversed -void -storey -preceded -masses -leap -laughs -neighborhoods -wards -schemes -falcon -santo -battlefield -pad -ronnie -thread -lesbian -venus -##dian -beg -sandstone -daylight -punched -gwen -analog -stroked -wwe -acceptable -measurements -dec -toxic -##kel -adequate -surgical -economist -parameters -varsity -##sberg -quantity -ella -##chy -##rton -countess -generating -precision -diamonds -expressway -ga -##ı -1821 -uruguay -talents -galleries -expenses -scanned -colleague -outlets -ryder -lucien -##ila -paramount -##bon -syracuse -dim -fangs -gown -sweep -##sie -toyota -missionaries -websites -##nsis -sentences -adviser -val -trademark -spells -##plane -patience -starter -slim -##borg -toe -incredibly -shoots -elliot -nobility -##wyn -cowboy -endorsed -gardner -tendency -persuaded -organisms -emissions -kazakhstan -amused -boring -chips -themed -##hand -llc -constantinople -chasing -systematic -guatemala -borrowed -erin -carey -##hard -highlands -struggles -1810 -##ifying -##ced -wong -exceptions -develops -enlarged -kindergarten -castro -##ern -##rina -leigh -zombie -juvenile -##most -consul -##nar -sailor -hyde -clarence -intensive -pinned -nasty -useless -jung -clayton -stuffed -exceptional -ix -apostolic -230 -transactions -##dge -exempt -swinging -cove -religions -##ash -shields -dairy -bypass -190 -pursuing -bug -joyce -bombay -chassis -southampton -chat -interact -redesignated -##pen -nascar -pray -salmon -rigid -regained -malaysian -grim -publicity -constituted -capturing -toilet -delegate -purely -tray -drift -loosely -striker -weakened -trinidad -mitch -itv -defines -transmitted -ming -scarlet -nodding -fitzgerald -fu -narrowly -sp -tooth -standings -virtue -##â‚ -##wara -##cting -chateau -gloves -lid -##nel -hurting -conservatory -##pel -sinclair -reopened -sympathy -nigerian -strode -advocated -optional -chronic -discharge -##rc -suck -compatible -laurel -stella -shi -fails -wage -dodge -128 -informal -sorts -levi -buddha -villagers -##aka -chronicles -heavier -summoned -gateway -3000 -eleventh -jewelry -translations -accordingly -seas -##ency -fiber -pyramid -cubic -dragging -##ista -caring -##ops -android -contacted -lunar -##dt -kai -lisbon -patted -1826 -sacramento -theft -madagascar -subtropical -disputes -ta -holidays -piper -willow -mare -cane -itunes -newfoundland -benny -companions -dong -raj -observe -roar -charming -plaque -tibetan -fossils -enacted -manning -bubble -tina -tanzania -##eda -##hir -funk -swamp -deputies -cloak -ufc -scenario -par -scratch -metals -anthem -guru -engaging -specially -##boat -dialects -nineteen -cecil -duet -disability -messenger -unofficial -##lies -defunct -eds -moonlight -drainage -surname -puzzle -honda -switching -conservatives -mammals -knox -broadcaster -sidewalk -cope -##ried -benson -princes -peterson -##sal -bedford -sharks -eli -wreck -alberto -gasp -archaeology -lgbt -teaches -securities -madness -compromise -waving -coordination -davidson -visions -leased -possibilities -eighty -jun -fernandez -enthusiasm -assassin -sponsorship -reviewer -kingdoms -estonian -laboratories -##fy -##nal -applies -verb -celebrations -##zzo -rowing -lightweight -sadness -submit -mvp -balanced -dude -##vas -explicitly -metric -magnificent -mound -brett -mohammad -mistakes -irregular -##hing -##ass -sanders -betrayed -shipped -surge -##enburg -reporters -termed -georg -pity -verbal -bulls -abbreviated -enabling -appealed -##are -##atic -sicily -sting -heel -sweetheart -bart -spacecraft -brutal -monarchy -##tter -aberdeen -cameo -diane -##ub -survivor -clyde -##aries -complaint -##makers -clarinet -delicious -chilean -karnataka -coordinates -1818 -panties -##rst -pretending -ar -dramatically -kiev -bella -tends -distances -113 -catalog -launching -instances -telecommunications -portable -lindsay -vatican -##eim -angles -aliens -marker -stint -screens -bolton -##rne -judy -wool -benedict -plasma -europa -spark -imaging -filmmaker -swiftly -##een -contributor -##nor -opted -stamps -apologize -financing -butter -gideon -sophisticated -alignment -avery -chemicals -yearly -speculation -prominence -professionally -##ils -immortal -institutional -inception -wrists -identifying -tribunal -derives -gains -##wo -papal -preference -linguistic -vince -operative -brewery -##ont -unemployment -boyd -##ured -##outs -albeit -prophet -1813 -bi -##rr -##face -##rad -quarterly -asteroid -cleaned -radius -temper -##llen -telugu -jerk -viscount -menu -##ote -glimpse -##aya -yacht -hawaiian -baden -##rl -laptop -readily -##gu -monetary -offshore -scots -watches -##yang -##arian -upgrade -needle -xbox -lea -encyclopedia -flank -fingertips -##pus -delight -teachings -confirm -roth -beaches -midway -winters -##iah -teasing -daytime -beverly -gambling -bonnie -##backs -regulated -clement -hermann -tricks -knot -##shing -##uring -##vre -detached -ecological -owed -specialty -byron -inventor -bats -stays -screened -unesco -midland -trim -affection -##ander -##rry -jess -thoroughly -feedback -##uma -chennai -strained -heartbeat -wrapping -overtime -pleaded -##sworth -mon -leisure -oclc -##tate -##ele -feathers -angelo -thirds -nuts -surveys -clever -gill -commentator -##dos -darren -rides -gibraltar -##nc -##mu -dissolution -dedication -shin -meals -saddle -elvis -reds -chaired -taller -appreciation -functioning -niece -favored -advocacy -robbie -criminals -suffolk -yugoslav -passport -constable -congressman -hastings -vera -##rov -consecrated -sparks -ecclesiastical -confined -##ovich -muller -floyd -nora -1822 -paved -1827 -cumberland -ned -saga -spiral -##flow -appreciated -yi -collaborative -treating -similarities -feminine -finishes -##ib -jade -import -##nse -##hot -champagne -mice -securing -celebrities -helsinki -attributes -##gos -cousins -phases -ache -lucia -gandhi -submission -vicar -spear -shine -tasmania -biting -detention -constitute -tighter -seasonal -##gus -terrestrial -matthews -##oka -effectiveness -parody -philharmonic -##onic -1816 -strangers -encoded -consortium -guaranteed -regards -shifts -tortured -collision -supervisor -inform -broader -insight -theaters -armour -emeritus -blink -incorporates -mapping -##50 -##ein -handball -flexible -##nta -substantially -generous -thief -##own -carr -loses -1793 -prose -ucla -romeo -generic -metallic -realization -damages -mk -commissioners -zach -default -##ther -helicopters -lengthy -stems -spa -partnered -spectators -rogue -indication -penalties -teresa -1801 -sen -##tric -dalton -##wich -irving -photographic -##vey -dell -deaf -peters -excluded -unsure -##vable -patterson -crawled -##zio -resided -whipped -latvia -slower -ecole -pipes -employers -maharashtra -comparable -va -textile -pageant -##gel -alphabet -binary -irrigation -chartered -choked -antoine -offs -waking -supplement -##wen -quantities -demolition -regain -locate -urdu -folks -alt -114 -##mc -scary -andreas -whites -##ava -classrooms -mw -aesthetic -publishes -valleys -guides -cubs -johannes -bryant -conventions -affecting -##itt -drain -awesome -isolation -prosecutor -ambitious -apology -captive -downs -atmospheric -lorenzo -aisle -beef -foul -##onia -kidding -composite -disturbed -illusion -natives -##ffer -emi -rockets -riverside -wartime -painters -adolf -melted -##ail -uncertainty -simulation -hawks -progressed -meantime -builder -spray -breach -unhappy -regina -russians -##urg -determining -##tation -tram -1806 -##quin -aging -##12 -1823 -garion -rented -mister -diaz -terminated -clip -1817 -depend -nervously -disco -owe -defenders -shiva -notorious -disbelief -shiny -worcester -##gation -##yr -trailing -undertook -islander -belarus -limitations -watershed -fuller -overlooking -utilized -raphael -1819 -synthetic -breakdown -klein -##nate -moaned -memoir -lamb -practicing -##erly -cellular -arrows -exotic -##graphy -witches -117 -charted -rey -hut -hierarchy -subdivision -freshwater -giuseppe -aloud -reyes -qatar -marty -sideways -utterly -sexually -jude -prayers -mccarthy -softball -blend -damien -##gging -##metric -wholly -erupted -lebanese -negro -revenues -tasted -comparative -teamed -transaction -labeled -maori -sovereignty -parkway -trauma -gran -malay -121 -advancement -descendant -2020 -buzz -salvation -inventory -symbolic -##making -antarctica -mps -##gas -##bro -mohammed -myanmar -holt -submarines -tones -##lman -locker -patriarch -bangkok -emerson -remarks -predators -kin -afghan -confession -norwich -rental -emerge -advantages -##zel -rca -##hold -shortened -storms -aidan -##matic -autonomy -compliance -##quet -dudley -atp -##osis -1803 -motto -documentation -summary -professors -spectacular -christina -archdiocese -flashing -innocence -remake -##dell -psychic -reef -scare -employ -rs -sticks -meg -gus -leans -##ude -accompany -bergen -tomas -##iko -doom -wages -pools -##nch -##bes -breasts -scholarly -alison -outline -brittany -breakthrough -willis -realistic -##cut -##boro -competitor -##stan -pike -picnic -icon -designing -commercials -washing -villain -skiing -micro -costumes -auburn -halted -executives -##hat -logistics -cycles -vowel -applicable -barrett -exclaimed -eurovision -eternity -ramon -##umi -##lls -modifications -sweeping -disgust -##uck -torch -aviv -ensuring -rude -dusty -sonic -donovan -outskirts -cu -pathway -##band -##gun -##lines -disciplines -acids -cadet -paired -##40 -sketches -##sive -marriages -##⺠-folding -peers -slovak -implies -admired -##beck -1880s -leopold -instinct -attained -weston -megan -horace -##ination -dorsal -ingredients -evolutionary -##its -complications -deity -lethal -brushing -levy -deserted -institutes -posthumously -delivering -telescope -coronation -motivated -rapids -luc -flicked -pays -volcano -tanner -weighed -##nica -crowds -frankie -gifted -addressing -granddaughter -winding -##rna -constantine -gomez -##front -landscapes -rudolf -anthropology -slate -werewolf -##lio -astronomy -circa -rouge -dreaming -sack -knelt -drowned -naomi -prolific -tracked -freezing -herb -##dium -agony -randall -twisting -wendy -deposit -touches -vein -wheeler -##bbled -##bor -batted -retaining -tire -presently -compare -specification -daemon -nigel -##grave -merry -recommendation -czechoslovakia -sandra -ng -roma -##sts -lambert -inheritance -sheikh -winchester -cries -examining -##yle -comeback -cuisine -nave -##iv -ko -retrieve -tomatoes -barker -polished -defining -irene -lantern -personalities -begging -tract -swore -1809 -175 -##gic -omaha -brotherhood -##rley -haiti -##ots -exeter -##ete -##zia -steele -dumb -pearson -210 -surveyed -elisabeth -trends -##ef -fritz -##rf -premium -bugs -fraction -calmly -viking -##birds -tug -inserted -unusually -##ield -confronted -distress -crashing -brent -turks -resign -##olo -cambodia -gabe -sauce -##kal -evelyn -116 -extant -clusters -quarry -teenagers -luna -##lers -##ister -affiliation -drill -##ashi -panthers -scenic -libya -anita -strengthen -inscriptions -##cated -lace -sued -judith -riots -##uted -mint -##eta -preparations -midst -dub -challenger -##vich -mock -cf -displaced -wicket -breaths -enables -schmidt -analyst -##lum -ag -highlight -automotive -axe -josef -newark -sufficiently -resembles -50th -##pal -flushed -mum -traits -##ante -commodore -incomplete -warming -titular -ceremonial -ethical -118 -celebrating -eighteenth -cao -lima -medalist -mobility -strips -snakes -##city -miniature -zagreb -barton -escapes -umbrella -automated -doubted -differs -cooled -georgetown -dresden -cooked -fade -wyatt -rna -jacobs -carlton -abundant -stereo -boost -madras -inning -##hia -spur -ip -malayalam -begged -osaka -groan -escaping -charging -dose -vista -##aj -bud -papa -communists -advocates -edged -tri -##cent -resemble -peaking -necklace -fried -montenegro -saxony -goose -glances -stuttgart -curator -recruit -grocery -sympathetic -##tting -##fort -127 -lotus -randolph -ancestor -##rand -succeeding -jupiter -1798 -macedonian -##heads -hiking -1808 -handing -fischer -##itive -garbage -node -##pies -prone -singular -papua -inclined -attractions -italia -pouring -motioned -grandma -garnered -jacksonville -corp -ego -ringing -aluminum -##hausen -ordering -##foot -drawer -traders -synagogue -##play -##kawa -resistant -wandering -fragile -fiona -teased -var -hardcore -soaked -jubilee -decisive -exposition -mercer -poster -valencia -hale -kuwait -1811 -##ises -##wr -##eed -tavern -gamma -122 -johan -##uer -airways -amino -gil -##ury -vocational -domains -torres -##sp -generator -folklore -outcomes -##keeper -canberra -shooter -fl -beams -confrontation -##lling -##gram -feb -aligned -forestry -pipeline -jax -motorway -conception -decay -##tos -coffin -##cott -stalin -1805 -escorted -minded -##nam -sitcom -purchasing -twilight -veronica -additions -passive -tensions -straw -123 -frequencies -1804 -refugee -cultivation -##iate -christie -clary -bulletin -crept -disposal -##rich -##zong -processor -crescent -##rol -bmw -emphasized -whale -nazis -aurora -##eng -dwelling -hauled -sponsors -toledo -mega -ideology -theatres -tessa -cerambycidae -saves -turtle -cone -suspects -kara -rusty -yelling -greeks -mozart -shades -cocked -participant -##tro -shire -spit -freeze -necessity -##cos -inmates -nielsen -councillors -loaned -uncommon -omar -peasants -botanical -offspring -daniels -formations -jokes -1794 -pioneers -sigma -licensing -##sus -wheelchair -polite -1807 -liquor -pratt -trustee -##uta -forewings -balloon -##zz -kilometre -camping -explicit -casually -shawn -foolish -teammates -nm -hassan -carrie -judged -satisfy -vanessa -knives -selective -cnn -flowed -##lice -eclipse -stressed -eliza -mathematician -cease -cultivated -##roy -commissions -browns -##ania -destroyers -sheridan -meadow -##rius -minerals -##cial -downstream -clash -gram -memoirs -ventures -baha -seymour -archie -midlands -edith -fare -flynn -invite -canceled -tiles -stabbed -boulder -incorporate -amended -camden -facial -mollusk -unreleased -descriptions -yoga -grabs -550 -raises -ramp -shiver -##rose -coined -pioneering -tunes -qing -warwick -tops -119 -melanie -giles -##rous -wandered -##inal -annexed -nov -30th -unnamed -##ished -organizational -airplane -normandy -stoke -whistle -blessing -violations -chased -holders -shotgun -##ctic -outlet -reactor -##vik -tires -tearing -shores -fortified -mascot -constituencies -nc -columnist -productive -tibet -##rta -lineage -hooked -oct -tapes -judging -cody -##gger -hansen -kashmir -triggered -##eva -solved -cliffs -##tree -resisted -anatomy -protesters -transparent -implied -##iga -injection -mattress -excluding -##mbo -defenses -helpless -devotion -##elli -growl -liberals -weber -phenomena -atoms -plug -##iff -mortality -apprentice -howe -convincing -aaa -swimmer -barber -leone -promptly -sodium -def -nowadays -arise -##oning -gloucester -corrected -dignity -norm -erie -##ders -elders -evacuated -sylvia -compression -##yar -hartford -pose -backpack -reasoning -accepts -24th -wipe -millimetres -marcel -##oda -dodgers -albion -1790 -overwhelmed -aerospace -oaks -1795 -showcase -acknowledge -recovering -nolan -ashe -hurts -geology -fashioned -disappearance -farewell -swollen -shrug -marquis -wimbledon -124 -rue -1792 -commemorate -reduces -experiencing -inevitable -calcutta -intel -##court -murderer -sticking -fisheries -imagery -bloom -280 -brake -##inus -gustav -hesitation -memorable -po -viral -beans -accidents -tunisia -antenna -spilled -consort -treatments -aye -perimeter -##gard -donation -hostage -migrated -banker -addiction -apex -lil -trout -##ously -conscience -##nova -rams -sands -genome -passionate -troubles -##lets -##set -amid -##ibility -##ret -higgins -exceed -vikings -##vie -payne -##zan -muscular -##ste -defendant -sucking -##wal -ibrahim -fuselage -claudia -vfl -europeans -snails -interval -##garh -preparatory -statewide -tasked -lacrosse -viktor -##lation -angola -##hra -flint -implications -employs -teens -patrons -stall -weekends -barriers -scrambled -nucleus -tehran -jenna -parsons -lifelong -robots -displacement -5000 -##bles -precipitation -##gt -knuckles -clutched -1802 -marrying -ecology -marx -accusations -declare -scars -kolkata -mat -meadows -bermuda -skeleton -finalists -vintage -crawl -coordinate -affects -subjected -orchestral -mistaken -##tc -mirrors -dipped -relied -260 -arches -candle -##nick -incorporating -wildly -fond -basilica -owl -fringe -rituals -whispering -stirred -feud -tertiary -slick -goat -honorable -whereby -skip -ricardo -stripes -parachute -adjoining -submerged -synthesizer -##gren -intend -positively -ninety -phi -beaver -partition -fellows -alexis -prohibition -carlisle -bizarre -fraternity -##bre -doubts -icy -cbc -aquatic -sneak -sonny -combines -airports -crude -supervised -spatial -merge -alfonso -##bic -corrupt -scan -undergo -##ams -disabilities -colombian -comparing -dolphins -perkins -##lish -reprinted -unanimous -bounced -hairs -underworld -midwest -semester -bucket -paperback -miniseries -coventry -demise -##leigh -demonstrations -sensor -rotating -yan -##hler -arrange -soils -##idge -hyderabad -labs -##dr -brakes -grandchildren -##nde -negotiated -rover -ferrari -continuation -directorate -augusta -stevenson -counterpart -gore -##rda -nursery -rican -ave -collectively -broadly -pastoral -repertoire -asserted -discovering -nordic -styled -fiba -cunningham -harley -middlesex -survives -tumor -tempo -zack -aiming -lok -urgent -##rade -##nto -devils -##ement -contractor -turin -##wl -##ool -bliss -repaired -simmons -moan -astronomical -cr -negotiate -lyric -1890s -lara -bred -clad -angus -pbs -##ience -engineered -posed -##lk -hernandez -possessions -elbows -psychiatric -strokes -confluence -electorate -lifts -campuses -lava -alps -##ep -##ution -##date -physicist -woody -##page -##ographic -##itis -juliet -reformation -sparhawk -320 -complement -suppressed -jewel -##½ -floated -##kas -continuity -sadly -##ische -inability -melting -scanning -paula -flour -judaism -safer -vague -##lm -solving -curb -##stown -financially -gable -bees -expired -miserable -cassidy -dominion -1789 -cupped -145 -robbery -facto -amos -warden -resume -tallest -marvin -ing -pounded -usd -declaring -gasoline -##aux -darkened -270 -650 -sophomore -##mere -erection -gossip -televised -risen -dial -##eu -pillars -##link -passages -profound -##tina -arabian -ashton -silicon -nail -##ead -##lated -##wer -##hardt -fleming -firearms -ducked -circuits -blows -waterloo -titans -##lina -atom -fireplace -cheshire -financed -activation -algorithms -##zzi -constituent -catcher -cherokee -partnerships -sexuality -platoon -tragic -vivian -guarded -whiskey -meditation -poetic -##late -##nga -##ake -porto -listeners -dominance -kendra -mona -chandler -factions -22nd -salisbury -attitudes -derivative -##ido -##haus -intake -paced -javier -illustrator -barrels -bias -cockpit -burnett -dreamed -ensuing -##anda -receptors -someday -hawkins -mattered -##lal -slavic -1799 -jesuit -cameroon -wasted -tai -wax -lowering -victorious -freaking -outright -hancock -librarian -sensing -bald -calcium -myers -tablet -announcing -barack -shipyard -pharmaceutical -##uan -greenwich -flush -medley -patches -wolfgang -pt -speeches -acquiring -exams -nikolai -##gg -hayden -kannada -##type -reilly -##pt -waitress -abdomen -devastated -capped -pseudonym -pharmacy -fulfill -paraguay -1796 -clicked -##trom -archipelago -syndicated -##hman -lumber -orgasm -rejection -clifford -lorraine -advent -mafia -rodney -brock -##ght -##used -##elia -cassette -chamberlain -despair -mongolia -sensors -developmental -upstream -##eg -##alis -spanning -165 -trombone -basque -seeded -interred -renewable -rhys -leapt -revision -molecule -##ages -chord -vicious -nord -shivered -23rd -arlington -debts -corpus -sunrise -bays -blackburn -centimetres -##uded -shuddered -gm -strangely -gripping -cartoons -isabelle -orbital -##ppa -seals -proving -##lton -refusal -strengthened -bust -assisting -baghdad -batsman -portrayal -mara -pushes -spears -og -##cock -reside -nathaniel -brennan -1776 -confirmation -caucus -##worthy -markings -yemen -nobles -ku -lazy -viewer -catalan -encompasses -sawyer -##fall -sparked -substances -patents -braves -arranger -evacuation -sergio -persuade -dover -tolerance -penguin -cum -jockey -insufficient -townships -occupying -declining -plural -processed -projection -puppet -flanders -introduces -liability -##yon -gymnastics -antwerp -taipei -hobart -candles -jeep -wes -observers -126 -chaplain -bundle -glorious -##hine -hazel -flung -sol -excavations -dumped -stares -sh -bangalore -triangular -icelandic -intervals -expressing -turbine -##vers -songwriting -crafts -##igo -jasmine -ditch -rite -##ways -entertaining -comply -sorrow -wrestlers -basel -emirates -marian -rivera -helpful -##some -caution -downward -networking -##atory -##tered -darted -genocide -emergence -replies -specializing -spokesman -convenient -unlocked -fading -augustine -concentrations -resemblance -elijah -investigator -andhra -##uda -promotes -bean -##rrell -fleeing -wan -simone -announcer -##ame -##bby -lydia -weaver -132 -residency -modification -##fest -stretches -##ast -alternatively -nat -lowe -lacks -##ented -pam -tile -concealed -inferior -abdullah -residences -tissues -vengeance -##ided -moisture -peculiar -groove -zip -bologna -jennings -ninja -oversaw -zombies -pumping -batch -livingston -emerald -installations -1797 -peel -nitrogen -rama -##fying -##star -schooling -strands -responding -werner -##ost -lime -casa -accurately -targeting -##rod -underway -##uru -hemisphere -lester -##yard -occupies -2d -griffith -angrily -reorganized -##owing -courtney -deposited -##dd -##30 -estadio -##ifies -dunn -exiled -##ying -checks -##combe -##о -##fly -successes -unexpectedly -blu -assessed -##flower -##Ù‡ -observing -sacked -spiders -kn -##tail -mu -nodes -prosperity -audrey -divisional -155 -broncos -tangled -adjust -feeds -erosion -paolo -surf -directory -snatched -humid -admiralty -screwed -gt -reddish -##nese -modules -trench -lamps -bind -leah -bucks -competes -##nz -##form -transcription -##uc -isles -violently -clutching -pga -cyclist -inflation -flats -ragged -unnecessary -##hian -stubborn -coordinated -harriet -baba -disqualified -330 -insect -wolfe -##fies -reinforcements -rocked -duel -winked -embraced -bricks -##raj -hiatus -defeats -pending -brightly -jealousy -##xton -##hm -##uki -lena -gdp -colorful -##dley -stein -kidney -##shu -underwear -wanderers -##haw -##icus -guardians -m³ -roared -habits -##wise -permits -gp -uranium -punished -disguise -bundesliga -elise -dundee -erotic -partisan -pi -collectors -float -individually -rendering -behavioral -bucharest -ser -hare -valerie -corporal -nutrition -proportional -##isa -immense -##kis -pavement -##zie -##eld -sutherland -crouched -1775 -##lp -suzuki -trades -endurance -operas -crosby -prayed -priory -rory -socially -##urn -gujarat -##pu -walton -cube -pasha -privilege -lennon -floods -thorne -waterfall -nipple -scouting -approve -##lov -minorities -voter -dwight -extensions -assure -ballroom -slap -dripping -privileges -rejoined -confessed -demonstrating -patriotic -yell -investor -##uth -pagan -slumped -squares -##cle -##kins -confront -bert -embarrassment -##aid -aston -urging -sweater -starr -yuri -brains -williamson -commuter -mortar -structured -selfish -exports -##jon -cds -##him -unfinished -##rre -mortgage -destinations -##nagar -canoe -solitary -buchanan -delays -magistrate -fk -##pling -motivation -##lier -##vier -recruiting -assess -##mouth -malik -antique -1791 -pius -rahman -reich -tub -zhou -smashed -airs -galway -xii -conditioning -honduras -discharged -dexter -##pf -lionel -129 -debates -lemon -tiffany -volunteered -dom -dioxide -procession -devi -sic -tremendous -advertisements -colts -transferring -verdict -hanover -decommissioned -utter -relate -pac -racism -##top -beacon -limp -similarity -terra -occurrence -ant -##how -becky -capt -updates -armament -richie -pal -##graph -halloween -mayo -##ssen -##bone -cara -serena -fcc -dolls -obligations -##dling -violated -lafayette -jakarta -exploitation -##ime -infamous -iconic -##lah -##park -kitty -moody -reginald -dread -spill -crystals -olivier -modeled -bluff -equilibrium -separating -notices -ordnance -extinction -onset -cosmic -attachment -sammy -expose -privy -anchored -##bil -abbott -admits -bending -baritone -emmanuel -policeman -vaughan -winged -climax -dresses -denny -polytechnic -mohamed -burmese -authentic -nikki -genetics -grandparents -homestead -gaza -postponed -metacritic -una -##sby -##bat -unstable -dissertation -##rial -##cian -curls -obscure -uncovered -bronx -praying -disappearing -##hoe -prehistoric -coke -turret -mutations -nonprofit -pits -monaco -##ÙŠ -##usion -prominently -dispatched -podium -##mir -uci -##uation -133 -fortifications -birthplace -kendall -##lby -##oll -preacher -rack -goodman -##rman -persistent -##ott -countless -jaime -recorder -lexington -persecution -jumps -renewal -wagons -##11 -crushing -##holder -decorations -##lake -abundance -wrath -laundry -£1 -garde -##rp -jeanne -beetles -peasant -##sl -splitting -caste -sergei -##rer -##ema -scripts -##ively -rub -satellites -##vor -inscribed -verlag -scrapped -gale -packages -chick -potato -slogan -kathleen -arabs -##culture -counterparts -reminiscent -choral -##tead -rand -retains -bushes -dane -accomplish -courtesy -closes -##oth -slaughter -hague -krakow -lawson -tailed -elias -ginger -##ttes -canopy -betrayal -rebuilding -turf -##hof -frowning -allegiance -brigades -kicks -rebuild -polls -alias -nationalism -td -rowan -audition -bowie -fortunately -recognizes -harp -dillon -horrified -##oro -renault -##tics -ropes -##α -presumed -rewarded -infrared -wiping -accelerated -illustration -##rid -presses -practitioners -badminton -##iard -detained -##tera -recognizing -relates -misery -##sies -##tly -reproduction -piercing -potatoes -thornton -esther -manners -hbo -##aan -ours -bullshit -ernie -perennial -sensitivity -illuminated -rupert -##jin -##iss -##ear -rfc -nassau -##dock -staggered -socialism -##haven -appointments -nonsense -prestige -sharma -haul -##tical -solidarity -gps -##ook -##rata -igor -pedestrian -##uit -baxter -tenants -wires -medication -unlimited -guiding -impacts -diabetes -##rama -sasha -pas -clive -extraction -131 -continually -constraints -##bilities -sonata -hunted -sixteenth -chu -planting -quote -mayer -pretended -abs -spat -##hua -ceramic -##cci -curtains -pigs -pitching -##dad -latvian -sore -dayton -##sted -##qi -patrols -slice -playground -##nted -shone -stool -apparatus -inadequate -mates -treason -##ija -desires -##liga -##croft -somalia -laurent -mir -leonardo -oracle -grape -obliged -chevrolet -thirteenth -stunning -enthusiastic -##ede -accounted -concludes -currents -basil -##kovic -drought -##rica -mai -##aire -shove -posting -##shed -pilgrimage -humorous -packing -fry -pencil -wines -smells -144 -marilyn -aching -newest -clung -bon -neighbours -sanctioned -##pie -mug -##stock -drowning -##mma -hydraulic -##vil -hiring -reminder -lilly -investigators -##ncies -sour -##eous -compulsory -packet -##rion -##graphic -##elle -cannes -##inate -depressed -##rit -heroic -importantly -theresa -##tled -conway -saturn -marginal -rae -##xia -corresponds -royce -pact -jasper -explosives -packaging -aluminium -##ttered -denotes -rhythmic -spans -assignments -hereditary -outlined -originating -sundays -lad -reissued -greeting -beatrice -##dic -pillar -marcos -plots -handbook -alcoholic -judiciary -avant -slides -extract -masculine -blur -##eum -##force -homage -trembled -owens -hymn -trey -omega -signaling -socks -accumulated -reacted -attic -theo -lining -angie -distraction -primera -talbot -##key -1200 -ti -creativity -billed -##hey -deacon -eduardo -identifies -proposition -dizzy -gunner -hogan -##yam -##pping -##hol -ja -##chan -jensen -reconstructed -##berger -clearance -darius -##nier -abe -harlem -plea -dei -circled -emotionally -notation -fascist -neville -exceeded -upwards -viable -ducks -##fo -workforce -racer -limiting -shri -##lson -possesses -1600 -kerr -moths -devastating -laden -disturbing -locking -##cture -gal -fearing -accreditation -flavor -aide -1870s -mountainous -##baum -melt -##ures -motel -texture -servers -soda -##mb -herd -##nium -erect -puzzled -hum -peggy -examinations -gould -testified -geoff -ren -devised -sacks -##law -denial -posters -grunted -cesar -tutor -ec -gerry -offerings -byrne -falcons -combinations -ct -incoming -pardon -rocking -26th -avengers -flared -mankind -seller -uttar -loch -nadia -stroking -exposing -##hd -fertile -ancestral -instituted -##has -noises -prophecy -taxation -eminent -vivid -pol -##bol -dart -indirect -multimedia -notebook -upside -displaying -adrenaline -referenced -geometric -##iving -progression -##ddy -blunt -announce -##far -implementing -##lav -aggression -liaison -cooler -cares -headache -plantations -gorge -dots -impulse -thickness -ashamed -averaging -kathy -obligation -precursor -137 -fowler -symmetry -thee -225 -hears -##rai -undergoing -ads -butcher -bowler -##lip -cigarettes -subscription -goodness -##ically -browne -##hos -##tech -kyoto -donor -##erty -damaging -friction -drifting -expeditions -hardened -prostitution -152 -fauna -blankets -claw -tossing -snarled -butterflies -recruits -investigative -coated -healed -138 -communal -hai -xiii -academics -boone -psychologist -restless -lahore -stephens -mba -brendan -foreigners -printer -##pc -ached -explode -27th -deed -scratched -dared -##pole -cardiac -1780 -okinawa -proto -commando -compelled -oddly -electrons -##base -replica -thanksgiving -##rist -sheila -deliberate -stafford -tidal -representations -hercules -ou -##path -##iated -kidnapping -lenses -##tling -deficit -samoa -mouths -consuming -computational -maze -granting -smirk -razor -fixture -ideals -inviting -aiden -nominal -##vs -issuing -julio -pitt -ramsey -docks -##oss -exhaust -##owed -bavarian -draped -anterior -mating -ethiopian -explores -noticing -##nton -discarded -convenience -hoffman -endowment -beasts -cartridge -mormon -paternal -probe -sleeves -interfere -lump -deadline -##rail -jenks -bulldogs -scrap -alternating -justified -reproductive -nam -seize -descending -secretariat -kirby -coupe -grouped -smash -panther -sedan -tapping -##18 -lola -cheer -germanic -unfortunate -##eter -unrelated -##fan -subordinate -##sdale -suzanne -advertisement -##ility -horsepower -##lda -cautiously -discourse -luigi -##mans -##fields -noun -prevalent -mao -schneider -everett -surround -governorate -kira -##avia -westward -##take -misty -rails -sustainability -134 -unused -##rating -packs -toast -unwilling -regulate -thy -suffrage -nile -awe -assam -definitions -travelers -affordable -##rb -conferred -sells -undefeated -beneficial -torso -basal -repeating -remixes -##pass -bahrain -cables -fang -##itated -excavated -numbering -statutory -##rey -deluxe -##lian -forested -ramirez -derbyshire -zeus -slamming -transfers -astronomer -banana -lottery -berg -histories -bamboo -##uchi -resurrection -posterior -bowls -vaguely -##thi -thou -preserving -tensed -offence -##inas -meyrick -callum -ridden -watt -langdon -tying -lowland -snorted -daring -truman -##hale -##girl -aura -overly -filing -weighing -goa -infections -philanthropist -saunders -eponymous -##owski -latitude -perspectives -reviewing -mets -commandant -radial -##kha -flashlight -reliability -koch -vowels -amazed -ada -elaine -supper -##rth -##encies -predator -debated -soviets -cola -##boards -##nah -compartment -crooked -arbitrary -fourteenth -##ctive -havana -majors -steelers -clips -profitable -ambush -exited -packers -##tile -nude -cracks -fungi -##е -limb -trousers -josie -shelby -tens -frederic -##ος -definite -smoothly -constellation -insult -baton -discs -lingering -##nco -conclusions -lent -staging -becker -grandpa -shaky -##tron -einstein -obstacles -sk -adverse -elle -economically -##moto -mccartney -thor -dismissal -motions -readings -nostrils -treatise -##pace -squeezing -evidently -prolonged -1783 -venezuelan -je -marguerite -beirut -takeover -shareholders -##vent -denise -digit -airplay -norse -##bbling -imaginary -pills -hubert -blaze -vacated -eliminating -##ello -vine -mansfield -##tty -retrospective -barrow -borne -clutch -bail -forensic -weaving -##nett -##witz -desktop -citadel -promotions -worrying -dorset -ieee -subdivided -##iating -manned -expeditionary -pickup -synod -chuckle -185 -barney -##rz -##ffin -functionality -karachi -litigation -meanings -uc -lick -turbo -anders -##ffed -execute -curl -oppose -ankles -typhoon -##د -##ache -##asia -linguistics -compassion -pressures -grazing -perfection -##iting -immunity -monopoly -muddy -backgrounds -136 -namibia -francesca -monitors -attracting -stunt -tuition -##ии -vegetable -##mates -##quent -mgm -jen -complexes -forts -##ond -cellar -bites -seventeenth -royals -flemish -failures -mast -charities -##cular -peruvian -capitals -macmillan -ipswich -outward -frigate -postgraduate -folds -employing -##ouse -concurrently -fiery -##tai -contingent -nightmares -monumental -nicaragua -##kowski -lizard -mal -fielding -gig -reject -##pad -harding -##ipe -coastline -##cin -##nos -beethoven -humphrey -innovations -##tam -##nge -norris -doris -solicitor -huang -obey -141 -##lc -niagara -##tton -shelves -aug -bourbon -curry -nightclub -specifications -hilton -##ndo -centennial -dispersed -worm -neglected -briggs -sm -font -kuala -uneasy -plc -##nstein -##bound -##aking -##burgh -awaiting -pronunciation -##bbed -##quest -eh -optimal -zhu -raped -greens -presided -brenda -worries -##life -venetian -marxist -turnout -##lius -refined -braced -sins -grasped -sunderland -nickel -speculated -lowell -cyrillic -communism -fundraising -resembling -colonists -mutant -freddie -usc -##mos -gratitude -##run -mural -##lous -chemist -wi -reminds -28th -steals -tess -pietro -##ingen -promoter -ri -microphone -honoured -rai -sant -##qui -feather -##nson -burlington -kurdish -terrorists -deborah -sickness -##wed -##eet -hazard -irritated -desperation -veil -clarity -##rik -jewels -xv -##gged -##ows -##cup -berkshire -unfair -mysteries -orchid -winced -exhaustion -renovations -stranded -obe -infinity -##nies -adapt -redevelopment -thanked -registry -olga -domingo -noir -tudor -ole -##atus -commenting -behaviors -##ais -crisp -pauline -probable -stirling -wigan -##bian -paralympics -panting -surpassed -##rew -luca -barred -pony -famed -##sters -cassandra -waiter -carolyn -exported -##orted -andres -destructive -deeds -jonah -castles -vacancy -suv -##glass -1788 -orchard -yep -famine -belarusian -sprang -##forth -skinny -##mis -administrators -rotterdam -zambia -zhao -boiler -discoveries -##ride -##physics -lucius -disappointing -outreach -spoon -##frame -qualifications -unanimously -enjoys -regency -##iidae -stade -realism -veterinary -rodgers -dump -alain -chestnut -castile -censorship -rumble -gibbs -##itor -communion -reggae -inactivated -logs -loads -##houses -homosexual -##iano -ale -informs -##cas -phrases -plaster -linebacker -ambrose -kaiser -fascinated -850 -limerick -recruitment -forge -mastered -##nding -leinster -rooted -threaten -##strom -borneo -##hes -suggestions -scholarships -propeller -documentaries -patronage -coats -constructing -invest -neurons -comet -entirety -shouts -identities -annoying -unchanged -wary -##antly -##ogy -neat -oversight -##kos -phillies -replay -constance -##kka -incarnation -humble -skies -minus -##acy -smithsonian -##chel -guerrilla -jar -cadets -##plate -surplus -audit -##aru -cracking -joanna -louisa -pacing -##lights -intentionally -##iri -diner -nwa -imprint -australians -tong -unprecedented -bunker -naive -specialists -ark -nichols -railing -leaked -pedal -##uka -shrub -longing -roofs -v8 -captains -neural -tuned -##ntal -##jet -emission -medina -frantic -codex -definitive -sid -abolition -intensified -stocks -enrique -sustain -genoa -oxide -##written -clues -cha -##gers -tributaries -fragment -venom -##rity -##ente -##sca -muffled -vain -sire -laos -##ingly -##hana -hastily -snapping -surfaced -sentiment -motive -##oft -contests -approximate -mesa -luckily -dinosaur -exchanges -propelled -accord -bourne -relieve -tow -masks -offended -##ues -cynthia -##mmer -rains -bartender -zinc -reviewers -lois -##sai -legged -arrogant -rafe -rosie -comprise -handicap -blockade -inlet -lagoon -copied -drilling -shelley -petals -##inian -mandarin -obsolete -##inated -onward -arguably -productivity -cindy -praising -seldom -busch -discusses -raleigh -shortage -ranged -stanton -encouragement -firstly -conceded -overs -temporal -##uke -cbe -##bos -woo -certainty -pumps -##pton -stalked -##uli -lizzie -periodic -thieves -weaker -##night -gases -shoving -chooses -wc -##chemical -prompting -weights -##kill -robust -flanked -sticky -hu -tuberculosis -##eb -##eal -christchurch -resembled -wallet -reese -inappropriate -pictured -distract -fixing -fiddle -giggled -burger -heirs -hairy -mechanic -torque -apache -obsessed -chiefly -cheng -logging -##tag -extracted -meaningful -numb -##vsky -gloucestershire -reminding -##bay -unite -##lit -breeds -diminished -clown -glove -1860s -##Ù† -##ug -archibald -focal -freelance -sliced -depiction -##yk -organism -switches -sights -stray -crawling -##ril -lever -leningrad -interpretations -loops -anytime -reel -alicia -delighted -##ech -inhaled -xiv -suitcase -bernie -vega -licenses -northampton -exclusion -induction -monasteries -racecourse -homosexuality -##right -##sfield -##rky -dimitri -michele -alternatives -ions -commentators -genuinely -objected -pork -hospitality -fencing -stephan -warships -peripheral -wit -drunken -wrinkled -quentin -spends -departing -chung -numerical -spokesperson -##zone -johannesburg -caliber -killers -##udge -assumes -neatly -demographic -abigail -bloc -##vel -mounting -##lain -bentley -slightest -xu -recipients -##jk -merlin -##writer -seniors -prisons -blinking -hindwings -flickered -kappa -##hel -80s -strengthening -appealing -brewing -gypsy -mali -lashes -hulk -unpleasant -harassment -bio -treaties -predict -instrumentation -pulp -troupe -boiling -mantle -##ffe -ins -##vn -dividing -handles -verbs -##onal -coconut -senegal -340 -thorough -gum -momentarily -##sto -cocaine -panicked -destined -##turing -teatro -denying -weary -captained -mans -##hawks -##code -wakefield -bollywood -thankfully -##16 -cyril -##wu -amendments -##bahn -consultation -stud -reflections -kindness -1787 -internally -##ovo -tex -mosaic -distribute -paddy -seeming -143 -##hic -piers -##15 -##mura -##verse -popularly -winger -kang -sentinel -mccoy -##anza -covenant -##bag -verge -fireworks -suppress -thrilled -dominate -##jar -swansea -##60 -142 -reconciliation -##ndi -stiffened -cue -dorian -##uf -damascus -amor -ida -foremost -##aga -porsche -unseen -dir -##had -##azi -stony -lexi -melodies -##nko -angular -integer -podcast -ants -inherent -jaws -justify -persona -##olved -josephine -##nr -##ressed -customary -flashes -gala -cyrus -glaring -backyard -ariel -physiology -greenland -html -stir -avon -atletico -finch -methodology -ked -##lent -mas -catholicism -townsend -branding -quincy -fits -containers -1777 -ashore -aragon -##19 -forearm -poisoning -##sd -adopting -conquer -grinding -amnesty -keller -finances -evaluate -forged -lankan -instincts -##uto -guam -bosnian -photographed -workplace -desirable -protector -##dog -allocation -intently -encourages -willy -##sten -bodyguard -electro -brighter -##ν -bihar -##chev -lasts -opener -amphibious -sal -verde -arte -##cope -captivity -vocabulary -yields -##tted -agreeing -desmond -pioneered -##chus -strap -campaigned -railroads -##ович -emblem -##dre -stormed -501 -##ulous -marijuana -northumberland -##gn -##nath -bowen -landmarks -beaumont -##qua -danube -##bler -attorneys -th -ge -flyers -critique -villains -cass -mutation -acc -##0s -colombo -mckay -motif -sampling -concluding -syndicate -##rell -neon -stables -ds -warnings -clint -mourning -wilkinson -##tated -merrill -leopard -evenings -exhaled -emil -sonia -ezra -discrete -stove -farrell -fifteenth -prescribed -superhero -##rier -worms -helm -wren -##duction -##hc -expo -##rator -hq -unfamiliar -antony -prevents -acceleration -fiercely -mari -painfully -calculations -cheaper -ign -clifton -irvine -davenport -mozambique -##np -pierced -##evich -wonders -##wig -##cate -##iling -crusade -ware -##uel -enzymes -reasonably -mls -##coe -mater -ambition -bunny -eliot -kernel -##fin -asphalt -headmaster -torah -aden -lush -pins -waived -##care -##yas -joao -substrate -enforce -##grad -##ules -alvarez -selections -epidemic -tempted -##bit -bremen -translates -ensured -waterfront -29th -forrest -manny -malone -kramer -reigning -cookies -simpler -absorption -205 -engraved -##ffy -evaluated -1778 -haze -146 -comforting -crossover -##abe -thorn -##rift -##imo -##pop -suppression -fatigue -cutter -##tr -201 -wurttemberg -##orf -enforced -hovering -proprietary -gb -samurai -syllable -ascent -lacey -tick -lars -tractor -merchandise -rep -bouncing -defendants -##yre -huntington -##ground -##oko -standardized -##hor -##hima -assassinated -nu -predecessors -rainy -liar -assurance -lyrical -##uga -secondly -flattened -ios -parameter -undercover -##mity -bordeaux -punish -ridges -markers -exodus -inactive -hesitate -debbie -nyc -pledge -savoy -nagar -offset -organist -##tium -hesse -marin -converting -##iver -diagram -propulsion -pu -validity -reverted -supportive -##dc -ministries -clans -responds -proclamation -##inae -##ø -##rea -ein -pleading -patriot -sf -birch -islanders -strauss -hates -##dh -brandenburg -concession -rd -##ob -1900s -killings -textbook -antiquity -cinematography -wharf -embarrassing -setup -creed -farmland -inequality -centred -signatures -fallon -370 -##ingham -##uts -ceylon -gazing -directive -laurie -##tern -globally -##uated -##dent -allah -excavation -threads -##cross -148 -frantically -icc -utilize -determines -respiratory -thoughtful -receptions -##dicate -merging -chandra -seine -147 -builders -builds -diagnostic -dev -visibility -goddamn -analyses -dhaka -cho -proves -chancel -concurrent -curiously -canadians -pumped -restoring -1850s -turtles -jaguar -sinister -spinal -traction -declan -vows -1784 -glowed -capitalism -swirling -install -universidad -##lder -##oat -soloist -##genic -##oor -coincidence -beginnings -nissan -dip -resorts -caucasus -combustion -infectious -##eno -pigeon -serpent -##itating -conclude -masked -salad -jew -##gr -surreal -toni -##wc -harmonica -151 -##gins -##etic -##coat -fishermen -intending -bravery -##wave -klaus -titan -wembley -taiwanese -ransom -40th -incorrect -hussein -eyelids -jp -cooke -dramas -utilities -##etta -##print -eisenhower -principally -granada -lana -##rak -openings -concord -##bl -bethany -connie -morality -sega -##mons -##nard -earnings -##kara -##cine -wii -communes -##rel -coma -composing -softened -severed -grapes -##17 -nguyen -analyzed -warlord -hubbard -heavenly -behave -slovenian -##hit -##ony -hailed -filmmakers -trance -caldwell -skye -unrest -coward -likelihood -##aging -bern -sci -taliban -honolulu -propose -##wang -1700 -browser -imagining -cobra -contributes -dukes -instinctively -conan -violinist -##ores -accessories -gradual -##amp -quotes -sioux -##dating -undertake -intercepted -sparkling -compressed -139 -fungus -tombs -haley -imposing -rests -degradation -lincolnshire -retailers -wetlands -tulsa -distributor -dungeon -nun -greenhouse -convey -atlantis -aft -exits -oman -dresser -lyons -##sti -joking -eddy -judgement -omitted -digits -##cts -##game -juniors -##rae -cents -stricken -une -##ngo -wizards -weir -breton -nan -technician -fibers -liking -royalty -##cca -154 -persia -terribly -magician -##rable -##unt -vance -cafeteria -booker -camille -warmer -##static -consume -cavern -gaps -compass -contemporaries -foyer -soothing -graveyard -maj -plunged -blush -##wear -cascade -demonstrates -ordinance -##nov -boyle -##lana -rockefeller -shaken -banjo -izzy -##ense -breathless -vines -##32 -##eman -alterations -chromosome -dwellings -feudal -mole -153 -catalonia -relics -tenant -mandated -##fm -fridge -hats -honesty -patented -raul -heap -cruisers -accusing -enlightenment -infants -wherein -chatham -contractors -zen -affinity -hc -osborne -piston -156 -traps -maturity -##rana -lagos -##zal -peering -##nay -attendant -dealers -protocols -subset -prospects -biographical -##cre -artery -##zers -insignia -nuns -endured -##eration -recommend -schwartz -serbs -berger -cromwell -crossroads -##ctor -enduring -clasped -grounded -##bine -marseille -twitched -abel -choke -https -catalyst -moldova -italians -##tist -disastrous -wee -##oured -##nti -wwf -nope -##piration -##asa -expresses -thumbs -167 -##nza -coca -1781 -cheating -##ption -skipped -sensory -heidelberg -spies -satan -dangers -semifinal -202 -bohemia -whitish -confusing -shipbuilding -relies -surgeons -landings -ravi -baku -moor -suffix -alejandro -##yana -litre -upheld -##unk -rajasthan -##rek -coaster -insists -posture -scenarios -etienne -favoured -appoint -transgender -elephants -poked -greenwood -defences -fulfilled -militant -somali -1758 -chalk -potent -##ucci -migrants -wink -assistants -nos -restriction -activism -niger -##ario -colon -shaun -##sat -daphne -##erated -swam -congregations -reprise -considerations -magnet -playable -xvi -##Ñ€ -overthrow -tobias -knob -chavez -coding -##mers -propped -katrina -orient -newcomer -##suke -temperate -##pool -farmhouse -interrogation -##vd -committing -##vert -forthcoming -strawberry -joaquin -macau -ponds -shocking -siberia -##cellular -chant -contributors -##nant -##ologists -sped -absorb -hail -1782 -spared -##hore -barbados -karate -opus -originates -saul -##xie -evergreen -leaped -##rock -correlation -exaggerated -weekday -unification -bump -tracing -brig -afb -pathways -utilizing -##ners -mod -mb -disturbance -kneeling -##stad -##guchi -100th -pune -##thy -decreasing -168 -manipulation -miriam -academia -ecosystem -occupational -rbi -##lem -rift -##14 -rotary -stacked -incorporation -awakening -generators -guerrero -racist -##omy -cyber -derivatives -culminated -allie -annals -panzer -sainte -wikipedia -pops -zu -austro -##vate -algerian -politely -nicholson -mornings -educate -tastes -thrill -dartmouth -##gating -db -##jee -regan -differing -concentrating -choreography -divinity -##media -pledged -alexandre -routing -gregor -madeline -##idal -apocalypse -##hora -gunfire -culminating -elves -fined -liang -lam -programmed -tar -guessing -transparency -gabrielle -##gna -cancellation -flexibility -##lining -accession -shea -stronghold -nets -specializes -##rgan -abused -hasan -sgt -ling -exceeding -##â‚„ -admiration -supermarket -##ark -photographers -specialised -tilt -resonance -hmm -perfume -380 -sami -threatens -garland -botany -guarding -boiled -greet -puppy -russo -supplier -wilmington -vibrant -vijay -##bius -paralympic -grumbled -paige -faa -licking -margins -hurricanes -##gong -fest -grenade -ripping -##uz -counseling -weigh -##sian -needles -wiltshire -edison -costly -##not -fulton -tramway -redesigned -staffordshire -cache -gasping -watkins -sleepy -candidacy -##group -monkeys -timeline -throbbing -##bid -##sos -berth -uzbekistan -vanderbilt -bothering -overturned -ballots -gem -##iger -sunglasses -subscribers -hooker -compelling -ang -exceptionally -saloon -stab -##rdi -carla -terrifying -rom -##vision -coil -##oids -satisfying -vendors -31st -mackay -deities -overlooked -ambient -bahamas -felipe -olympia -whirled -botanist -advertised -tugging -##dden -disciples -morales -unionist -rites -foley -morse -motives -creepy -##â‚€ -soo -##sz -bargain -highness -frightening -turnpike -tory -reorganization -##cer -depict -biographer -##walk -unopposed -manifesto -##gles -institut -emile -accidental -kapoor -##dam -kilkenny -cortex -lively -##13 -romanesque -jain -shan -cannons -##ood -##ske -petrol -echoing -amalgamated -disappears -cautious -proposes -sanctions -trenton -##ر -flotilla -aus -contempt -tor -canary -cote -theirs -##hun -conceptual -deleted -fascinating -paso -blazing -elf -honourable -hutchinson -##eiro -##outh -##zin -surveyor -tee -amidst -wooded -reissue -intro -##ono -cobb -shelters -newsletter -hanson -brace -encoding -confiscated -dem -caravan -marino -scroll -melodic -cows -imam -##adi -##aneous -northward -searches -biodiversity -cora -310 -roaring -##bers -connell -theologian -halo -compose -pathetic -unmarried -dynamo -##oot -az -calculation -toulouse -deserves -humour -nr -forgiveness -tam -undergone -martyr -pamela -myths -whore -counselor -hicks -290 -heavens -battleship -electromagnetic -##bbs -stellar -establishments -presley -hopped -##chin -temptation -90s -wills -nas -##yuan -nhs -##nya -seminars -##yev -adaptations -gong -asher -lex -indicator -sikh -tobago -cites -goin -##yte -satirical -##gies -characterised -correspond -bubbles -lure -participates -##vid -eruption -skate -therapeutic -1785 -canals -wholesale -defaulted -sac -460 -petit -##zzled -virgil -leak -ravens -256 -portraying -##yx -ghetto -creators -dams -portray -vicente -##rington -fae -namesake -bounty -##arium -joachim -##ota -##iser -aforementioned -axle -snout -depended -dismantled -reuben -480 -##ibly -gallagher -##lau -##pd -earnest -##ieu -##iary -inflicted -objections -##llar -asa -gritted -##athy -jericho -##sea -##was -flick -underside -ceramics -undead -substituted -195 -eastward -undoubtedly -wheeled -chimney -##iche -guinness -cb -##ager -siding -##bell -traitor -baptiste -disguised -inauguration -149 -tipperary -choreographer -perched -warmed -stationary -eco -##ike -##ntes -bacterial -##aurus -flores -phosphate -##core -attacker -invaders -alvin -intersects -a1 -indirectly -immigrated -businessmen -cornelius -valves -narrated -pill -sober -ul -nationale -monastic -applicants -scenery -##jack -161 -motifs -constitutes -cpu -##osh -jurisdictions -sd -tuning -irritation -woven -##uddin -fertility -gao -##erie -antagonist -impatient -glacial -hides -boarded -denominations -interception -##jas -cookie -nicola -##tee -algebraic -marquess -bahn -parole -buyers -bait -turbines -paperwork -bestowed -natasha -renee -oceans -purchases -157 -vaccine -215 -##tock -fixtures -playhouse -integrate -jai -oswald -intellectuals -##cky -booked -nests -mortimer -##isi -obsession -sept -##gler -##sum -440 -scrutiny -simultaneous -squinted -##shin -collects -oven -shankar -penned -remarkably -##Ñ -slips -luggage -spectral -1786 -collaborations -louie -consolidation -##ailed -##ivating -420 -hoover -blackpool -harness -ignition -vest -tails -belmont -mongol -skinner -##nae -visually -mage -derry -##tism -##unce -stevie -transitional -##rdy -redskins -drying -prep -prospective -##21 -annoyance -oversee -##loaded -fills -##books -##iki -announces -fda -scowled -respects -prasad -mystic -tucson -##vale -revue -springer -bankrupt -1772 -aristotle -salvatore -habsburg -##geny -dal -natal -nut -pod -chewing -darts -moroccan -walkover -rosario -lenin -punjabi -##ße -grossed -scattering -wired -invasive -hui -polynomial -corridors -wakes -gina -portrays -##cratic -arid -retreating -erich -irwin -sniper -##dha -linen -lindsey -maneuver -butch -shutting -socio -bounce -commemorative -postseason -jeremiah -pines -275 -mystical -beads -bp -abbas -furnace -bidding -consulted -assaulted -empirical -rubble -enclosure -sob -weakly -cancel -polly -yielded -##emann -curly -prediction -battered -70s -vhs -jacqueline -render -sails -barked -detailing -grayson -riga -sloane -raging -##yah -herbs -bravo -##athlon -alloy -giggle -imminent -suffers -assumptions -waltz -##itate -accomplishments -##ited -bathing -remixed -deception -prefix -##emia -deepest -##tier -##eis -balkan -frogs -##rong -slab -##pate -philosophers -peterborough -grains -imports -dickinson -rwanda -##atics -1774 -dirk -lan -tablets -##rove -clone -##rice -caretaker -hostilities -mclean -##gre -regimental -treasures -norms -impose -tsar -tango -diplomacy -variously -complain -192 -recognise -arrests -1779 -celestial -pulitzer -##dus -bing -libretto -##moor -adele -splash -##rite -expectation -lds -confronts -##izer -spontaneous -harmful -wedge -entrepreneurs -buyer -##ope -bilingual -translate -rugged -conner -circulated -uae -eaton -##gra -##zzle -lingered -lockheed -vishnu -reelection -alonso -##oom -joints -yankee -headline -cooperate -heinz -laureate -invading -##sford -echoes -scandinavian -##dham -hugging -vitamin -salute -micah -hind -trader -##sper -radioactive -##ndra -militants -poisoned -ratified -remark -campeonato -deprived -wander -prop -##dong -outlook -##tani -##rix -##eye -chiang -darcy -##oping -mandolin -spice -statesman -babylon -182 -walled -forgetting -afro -##cap -158 -giorgio -buffer -##polis -planetary -##gis -overlap -terminals -kinda -centenary -##bir -arising -manipulate -elm -ke -1770 -ak -##tad -chrysler -mapped -moose -pomeranian -quad -macarthur -assemblies -shoreline -recalls -stratford -##rted -noticeable -##evic -imp -##rita -##sque -accustomed -supplying -tents -disgusted -vogue -sipped -filters -khz -reno -selecting -luftwaffe -mcmahon -tyne -masterpiece -carriages -collided -dunes -exercised -flare -remembers -muzzle -##mobile -heck -##rson -burgess -lunged -middleton -boycott -bilateral -##sity -hazardous -lumpur -multiplayer -spotlight -jackets -goldman -liege -porcelain -rag -waterford -benz -attracts -hopeful -battling -ottomans -kensington -baked -hymns -cheyenne -lattice -levine -borrow -polymer -clashes -michaels -monitored -commitments -denounced -##25 -##von -cavity -##oney -hobby -akin -##holders -futures -intricate -cornish -patty -##oned -illegally -dolphin -##lag -barlow -yellowish -maddie -apologized -luton -plagued -##puram -nana -##rds -sway -fanny -Å‚odz -##rino -psi -suspicions -hanged -##eding -initiate -charlton -##por -nak -competent -235 -analytical -annex -wardrobe -reservations -##rma -sect -162 -fairfax -hedge -piled -buckingham -uneven -bauer -simplicity -snyder -interpret -accountability -donors -moderately -byrd -continents -##cite -##max -disciple -hr -jamaican -ping -nominees -##uss -mongolian -diver -attackers -eagerly -ideological -pillows -miracles -apartheid -revolver -sulfur -clinics -moran -163 -##enko -ile -katy -rhetoric -##icated -chronology -recycling -##hrer -elongated -mughal -pascal -profiles -vibration -databases -domination -##fare -##rant -matthias -digest -rehearsal -polling -weiss -initiation -reeves -clinging -flourished -impress -ngo -##hoff -##ume -buckley -symposium -rhythms -weed -emphasize -transforming -##taking -##gence -##yman -accountant -analyze -flicker -foil -priesthood -voluntarily -decreases -##80 -##hya -slater -sv -charting -mcgill -##lde -moreno -##iu -besieged -zur -robes -##phic -admitting -api -deported -turmoil -peyton -earthquakes -##ares -nationalists -beau -clair -brethren -interrupt -welch -curated -galerie -requesting -164 -##ested -impending -steward -viper -##vina -complaining -beautifully -brandy -foam -nl -1660 -##cake -alessandro -punches -laced -explanations -##lim -attribute -clit -reggie -discomfort -##cards -smoothed -whales -##cene -adler -countered -duffy -disciplinary -widening -recipe -reliance -conducts -goats -gradient -preaching -##shaw -matilda -quasi -striped -meridian -cannabis -cordoba -certificates -##agh -##tering -graffiti -hangs -pilgrims -repeats -##ych -revive -urine -etat -##hawk -fueled -belts -fuzzy -susceptible -##hang -mauritius -salle -sincere -beers -hooks -##cki -arbitration -entrusted -advise -sniffed -seminar -junk -donnell -processors -principality -strapped -celia -mendoza -everton -fortunes -prejudice -starving -reassigned -steamer -##lund -tuck -evenly -foreman -##ffen -dans -375 -envisioned -slit -##xy -baseman -liberia -rosemary -##weed -electrified -periodically -potassium -stride -contexts -sperm -slade -mariners -influx -bianca -subcommittee -##rane -spilling -icao -estuary -##nock -delivers -iphone -##ulata -isa -mira -bohemian -dessert -##sbury -welcoming -proudly -slowing -##chs -musee -ascension -russ -##vian -waits -##psy -africans -exploit -##morphic -gov -eccentric -crab -peck -##ull -entrances -formidable -marketplace -groom -bolted -metabolism -patton -robbins -courier -payload -endure -##ifier -andes -refrigerator -##pr -ornate -##uca -ruthless -illegitimate -masonry -strasbourg -bikes -adobe -##³ -apples -quintet -willingly -niche -bakery -corpses -energetic -##cliffe -##sser -##ards -177 -centimeters -centro -fuscous -cretaceous -rancho -##yde -andrei -telecom -tottenham -oasis -ordination -vulnerability -presiding -corey -cp -penguins -sims -##pis -malawi -piss -##48 -correction -##cked -##ffle -##ryn -countdown -detectives -psychiatrist -psychedelic -dinosaurs -blouse -##get -choi -vowed -##oz -randomly -##pol -49ers -scrub -blanche -bruins -dusseldorf -##using -unwanted -##ums -212 -dominique -elevations -headlights -om -laguna -##oga -1750 -famously -ignorance -shrewsbury -##aine -ajax -breuning -che -confederacy -greco -overhaul -##screen -paz -skirts -disagreement -cruelty -jagged -phoebe -shifter -hovered -viruses -##wes -mandy -##lined -##gc -landlord -squirrel -dashed -##ι -ornamental -gag -wally -grange -literal -spurs -undisclosed -proceeding -yin -##text -billie -orphan -spanned -humidity -indy -weighted -presentations -explosions -lucian -##tary -vaughn -hindus -##anga -##hell -psycho -171 -daytona -protects -efficiently -rematch -sly -tandem -##oya -rebranded -impaired -hee -metropolis -peach -godfrey -diaspora -ethnicity -prosperous -gleaming -dar -grossing -playback -##rden -stripe -pistols -##tain -births -labelled -##cating -172 -rudy -alba -##onne -aquarium -hostility -##gb -##tase -shudder -sumatra -hardest -lakers -consonant -creeping -demos -homicide -capsule -zeke -liberties -expulsion -pueblo -##comb -trait -transporting -##ddin -##neck -##yna -depart -gregg -mold -ledge -hangar -oldham -playboy -termination -analysts -gmbh -romero -##itic -insist -cradle -filthy -brightness -slash -shootout -deposed -bordering -##truct -isis -microwave -tumbled -sheltered -cathy -werewolves -messy -andersen -convex -clapped -clinched -satire -wasting -edo -vc -rufus -##jak -mont -##etti -poznan -##keeping -restructuring -transverse -##rland -azerbaijani -slovene -gestures -roommate -choking -shear -##quist -vanguard -oblivious -##hiro -disagreed -baptism -##lich -coliseum -##aceae -salvage -societe -cory -locke -relocation -relying -versailles -ahl -swelling -##elo -cheerful -##word -##edes -gin -sarajevo -obstacle -diverted -##nac -messed -thoroughbred -fluttered -utrecht -chewed -acquaintance -assassins -dispatch -mirza -##wart -nike -salzburg -swell -yen -##gee -idle -ligue -samson -##nds -##igh -playful -spawned -##cise -tease -##case -burgundy -##bot -stirring -skeptical -interceptions -marathi -##dies -bedrooms -aroused -pinch -##lik -preferences -tattoos -buster -digitally -projecting -rust -##ital -kitten -priorities -addison -pseudo -##guard -dusk -icons -sermon -##psis -##iba -bt -##lift -##xt -ju -truce -rink -##dah -##wy -defects -psychiatry -offences -calculate -glucose -##iful -##rized -##unda -francaise -##hari -richest -warwickshire -carly -1763 -purity -redemption -lending -##cious -muse -bruises -cerebral -aero -carving -##name -preface -terminology -invade -monty -##int -anarchist -blurred -##iled -rossi -treats -guts -shu -foothills -ballads -undertaking -premise -cecilia -affiliates -blasted -conditional -wilder -minors -drone -rudolph -buffy -swallowing -horton -attested -##hop -rutherford -howell -primetime -livery -penal -##bis -minimize -hydro -wrecked -wrought -palazzo -##gling -cans -vernacular -friedman -nobleman -shale -walnut -danielle -##ection -##tley -sears -##kumar -chords -lend -flipping -streamed -por -dracula -gallons -sacrifices -gamble -orphanage -##iman -mckenzie -##gible -boxers -daly -##balls -##ان -208 -##ific -##rative -##iq -exploited -slated -##uity -circling -hillary -pinched -goldberg -provost -campaigning -lim -piles -ironically -jong -mohan -successors -usaf -##tem -##ught -autobiographical -haute -preserves -##ending -acquitted -comparisons -203 -hydroelectric -gangs -cypriot -torpedoes -rushes -chrome -derive -bumps -instability -fiat -pets -##mbe -silas -dye -reckless -settler -##itation -info -heats -##writing -176 -canonical -maltese -fins -mushroom -stacy -aspen -avid -##kur -##loading -vickers -gaston -hillside -statutes -wilde -gail -kung -sabine -comfortably -motorcycles -##rgo -169 -pneumonia -fetch -##sonic -axel -faintly -parallels -##oop -mclaren -spouse -compton -interdisciplinary -miner -##eni -181 -clamped -##chal -##llah -separates -versa -##mler -scarborough -labrador -##lity -##osing -rutgers -hurdles -como -166 -burt -divers -##100 -wichita -cade -coincided -##erson -bruised -mla -##pper -vineyard -##ili -##brush -notch -mentioning -jase -hearted -kits -doe -##acle -pomerania -##ady -ronan -seizure -pavel -problematic -##zaki -domenico -##ulin -catering -penelope -dependence -parental -emilio -ministerial -atkinson -##bolic -clarkson -chargers -colby -grill -peeked -arises -summon -##aged -fools -##grapher -faculties -qaeda -##vial -garner -refurbished -##hwa -geelong -disasters -nudged -bs -shareholder -lori -algae -reinstated -rot -##ades -##nous -invites -stainless -183 -inclusive -##itude -diocesan -til -##icz -denomination -##xa -benton -floral -registers -##ider -##erman -##kell -absurd -brunei -guangzhou -hitter -retaliation -##uled -##eve -blanc -nh -consistency -contamination -##eres -##rner -dire -palermo -broadcasters -diaries -inspire -vols -brewer -tightening -ky -mixtape -hormone -##tok -stokes -##color -##dly -##ssi -pg -##ometer -##lington -sanitation -##tility -intercontinental -apps -##adt -¹â„â‚‚ -cylinders -economies -favourable -unison -croix -gertrude -odyssey -vanity -dangling -##logists -upgrades -dice -middleweight -practitioner -##ight -206 -henrik -parlor -orion -angered -lac -python -blurted -##rri -sensual -intends -swings -angled -##phs -husky -attain -peerage -precinct -textiles -cheltenham -shuffled -dai -confess -tasting -bhutan -##riation -tyrone -segregation -abrupt -ruiz -##rish -smirked -blackwell -confidential -browning -amounted -##put -vase -scarce -fabulous -raided -staple -guyana -unemployed -glider -shay -##tow -carmine -troll -intervene -squash -superstar -##uce -cylindrical -len -roadway -researched -handy -##rium -##jana -meta -lao -declares -##rring -##tadt -##elin -##kova -willem -shrubs -napoleonic -realms -skater -qi -volkswagen -##Å‚ -tad -hara -archaeologist -awkwardly -eerie -##kind -wiley -##heimer -##24 -titus -organizers -cfl -crusaders -lama -usb -vent -enraged -thankful -occupants -maximilian -##gaard -possessing -textbooks -##oran -collaborator -quaker -##ulo -avalanche -mono -silky -straits -isaiah -mustang -surged -resolutions -potomac -descend -cl -kilograms -plato -strains -saturdays -##olin -bernstein -##ype -holstein -ponytail -##watch -belize -conversely -heroine -perpetual -##ylus -charcoal -piedmont -glee -negotiating -backdrop -prologue -##jah -##mmy -pasadena -climbs -ramos -sunni -##holm -##tner -##tri -anand -deficiency -hertfordshire -stout -##avi -aperture -orioles -##irs -doncaster -intrigued -bombed -coating -otis -##mat -cocktail -##jit -##eto -amir -arousal -sar -##proof -##act -##ories -dixie -pots -##bow -whereabouts -159 -##fted -drains -bullying -cottages -scripture -coherent -fore -poe -appetite -##uration -sampled -##ators -##dp -derrick -rotor -jays -peacock -installment -##rro -advisors -##coming -rodeo -scotch -##mot -##db -##fen -##vant -ensued -rodrigo -dictatorship -martyrs -twenties -##н -towed -incidence -marta -rainforest -sai -scaled -##cles -oceanic -qualifiers -symphonic -mcbride -dislike -generalized -aubrey -colonization -##iation -##lion -##ssing -disliked -lublin -salesman -##ulates -spherical -whatsoever -sweating -avalon -contention -punt -severity -alderman -atari -##dina -##grant -##rop -scarf -seville -vertices -annexation -fairfield -fascination -inspiring -launches -palatinate -regretted -##rca -feral -##iom -elk -nap -olsen -reddy -yong -##leader -##iae -garment -transports -feng -gracie -outrage -viceroy -insides -##esis -breakup -grady -organizer -softer -grimaced -222 -murals -galicia -arranging -vectors -##rsten -bas -##sb -##cens -sloan -##eka -bitten -ara -fender -nausea -bumped -kris -banquet -comrades -detector -persisted -##llan -adjustment -endowed -cinemas -##shot -sellers -##uman -peek -epa -kindly -neglect -simpsons -talon -mausoleum -runaway -hangul -lookout -##cic -rewards -coughed -acquainted -chloride -##ald -quicker -accordion -neolithic -##qa -artemis -coefficient -lenny -pandora -tx -##xed -ecstasy -litter -segunda -chairperson -gemma -hiss -rumor -vow -nasal -antioch -compensate -patiently -transformers -##eded -judo -morrow -penis -posthumous -philips -bandits -husbands -denote -flaming -##any -##phones -langley -yorker -1760 -walters -##uo -##kle -gubernatorial -fatty -samsung -leroy -outlaw -##nine -unpublished -poole -jakob -##áµ¢ -##â‚™ -crete -distorted -superiority -##dhi -intercept -crust -mig -claus -crashes -positioning -188 -stallion -301 -frontal -armistice -##estinal -elton -aj -encompassing -camel -commemorated -malaria -woodward -calf -cigar -penetrate -##oso -willard -##rno -##uche -illustrate -amusing -convergence -noteworthy -##lma -##rva -journeys -realise -manfred -##sable -410 -##vocation -hearings -fiance -##posed -educators -provoked -adjusting -##cturing -modular -stockton -paterson -vlad -rejects -electors -selena -maureen -##tres -uber -##rce -swirled -##num -proportions -nanny -pawn -naturalist -parma -apostles -awoke -ethel -wen -##bey -monsoon -overview -##inating -mccain -rendition -risky -adorned -##ih -equestrian -germain -nj -conspicuous -confirming -##yoshi -shivering -##imeter -milestone -rumours -flinched -bounds -smacked -token -##bei -lectured -automobiles -##shore -impacted -##iable -nouns -nero -##leaf -ismail -prostitute -trams -##lace -bridget -sud -stimulus -impressions -reins -revolves -##oud -##gned -giro -honeymoon -##swell -criterion -##sms -##uil -libyan -prefers -##osition -211 -preview -sucks -accusation -bursts -metaphor -diffusion -tolerate -faye -betting -cinematographer -liturgical -specials -bitterly -humboldt -##ckle -flux -rattled -##itzer -archaeologists -odor -authorised -marshes -discretion -##ов -alarmed -archaic -inverse -##leton -explorers -##pine -drummond -tsunami -woodlands -##minate -##tland -booklet -insanity -owning -insert -crafted -calculus -##tore -receivers -##bt -stung -##eca -##nched -prevailing -travellers -eyeing -lila -graphs -##borne -178 -julien -##won -morale -adaptive -therapist -erica -cw -libertarian -bowman -pitches -vita -##ional -crook -##ads -##entation -caledonia -mutiny -##sible -1840s -automation -##ß -flock -##pia -ironic -pathology -##imus -remarried -##22 -joker -withstand -energies -##att -shropshire -hostages -madeleine -tentatively -conflicting -mateo -recipes -euros -ol -mercenaries -nico -##ndon -albuquerque -augmented -mythical -bel -freud -##child -cough -##lica -365 -freddy -lillian -genetically -nuremberg -calder -209 -bonn -outdoors -paste -suns -urgency -vin -restraint -tyson -##cera -##selle -barrage -bethlehem -kahn -##par -mounts -nippon -barony -happier -ryu -makeshift -sheldon -blushed -castillo -barking -listener -taped -bethel -fluent -headlines -pornography -rum -disclosure -sighing -mace -doubling -gunther -manly -##plex -rt -interventions -physiological -forwards -emerges -##tooth -##gny -compliment -rib -recession -visibly -barge -faults -connector -exquisite -prefect -##rlin -patio -##cured -elevators -brandt -italics -pena -173 -wasp -satin -ea -botswana -graceful -respectable -##jima -##rter -##oic -franciscan -generates -##dl -alfredo -disgusting -##olate -##iously -sherwood -warns -cod -promo -cheryl -sino -##Ø© -##escu -twitch -##zhi -brownish -thom -ortiz -##dron -densely -##beat -carmel -reinforce -##bana -187 -anastasia -downhill -vertex -contaminated -remembrance -harmonic -homework -##sol -fiancee -gears -olds -angelica -loft -ramsay -quiz -colliery -sevens -##cape -autism -##hil -walkway -##boats -ruben -abnormal -ounce -khmer -##bbe -zachary -bedside -morphology -punching -##olar -sparrow -convinces -##35 -hewitt -queer -remastered -rods -mabel -solemn -notified -lyricist -symmetric -##xide -174 -encore -passports -wildcats -##uni -baja -##pac -mildly -##ease -bleed -commodity -mounds -glossy -orchestras -##omo -damian -prelude -ambitions -##vet -awhile -remotely -##aud -asserts -imply -##iques -distinctly -modelling -remedy -##dded -windshield -dani -xiao -##endra -audible -powerplant -1300 -invalid -elemental -acquisitions -##hala -immaculate -libby -plata -smuggling -ventilation -denoted -minh -##morphism -430 -differed -dion -kelley -lore -mocking -sabbath -spikes -hygiene -drown -runoff -stylized -tally -liberated -aux -interpreter -righteous -aba -siren -reaper -pearce -millie -##cier -##yra -gaius -##iso -captures -##ttering -dorm -claudio -##sic -benches -knighted -blackness -##ored -discount -fumble -oxidation -routed -##Ï‚ -novak -perpendicular -spoiled -fracture -splits -##urt -pads -topology -##cats -axes -fortunate -offenders -protestants -esteem -221 -broadband -convened -frankly -hound -prototypes -isil -facilitated -keel -##sher -sahara -awaited -bubba -orb -prosecutors -186 -hem -520 -##xing -relaxing -remnant -romney -sorted -slalom -stefano -ulrich -##active -exemption -folder -pauses -foliage -hitchcock -epithet -204 -criticisms -##aca -ballistic -brody -hinduism -chaotic -youths -equals -##pala -pts -thicker -analogous -capitalist -improvised -overseeing -sinatra -ascended -beverage -##tl -straightforward -##kon -curran -##west -bois -325 -induce -surveying -emperors -sax -unpopular -##kk -cartoonist -fused -##mble -unto -##yuki -localities -##cko -##ln -darlington -slain -academie -lobbying -sediment -puzzles -##grass -defiance -dickens -manifest -tongues -alumnus -arbor -coincide -184 -appalachian -mustafa -examiner -cabaret -traumatic -yves -bracelet -draining -heroin -magnum -baths -odessa -consonants -mitsubishi -##gua -kellan -vaudeville -##fr -joked -null -straps -probation -##Å‚aw -ceded -interfaces -##pas -##zawa -blinding -viet -224 -rothschild -museo -640 -huddersfield -##vr -tactic -##storm -brackets -dazed -incorrectly -##vu -reg -glazed -fearful -manifold -benefited -irony -##sun -stumbling -##rte -willingness -balkans -mei -wraps -##aba -injected -##lea -gu -syed -harmless -##hammer -bray -takeoff -poppy -timor -cardboard -astronaut -purdue -weeping -southbound -cursing -stalls -diagonal -##neer -lamar -bryce -comte -weekdays -harrington -##uba -negatively -##see -lays -grouping -##cken -##henko -affirmed -halle -modernist -##lai -hodges -smelling -aristocratic -baptized -dismiss -justification -oilers -##now -coupling -qin -snack -healer -##qing -gardener -layla -battled -formulated -stephenson -gravitational -##gill -##jun -1768 -granny -coordinating -suites -##cd -##ioned -monarchs -##cote -##hips -sep -blended -apr -barrister -deposition -fia -mina -policemen -paranoid -##pressed -churchyard -covert -crumpled -creep -abandoning -tr -transmit -conceal -barr -understands -readiness -spire -##cology -##enia -##erry -610 -startling -unlock -vida -bowled -slots -##nat -##islav -spaced -trusting -admire -rig -##ink -slack -##70 -mv -207 -casualty -##wei -classmates -##odes -##rar -##rked -amherst -furnished -evolve -foundry -menace -mead -##lein -flu -wesleyan -##kled -monterey -webber -##vos -wil -##mith -##на -bartholomew -justices -restrained -##cke -amenities -191 -mediated -sewage -trenches -ml -mainz -##thus -1800s -##cula -##inski -caine -bonding -213 -converts -spheres -superseded -marianne -crypt -sweaty -ensign -historia -##br -spruce -##post -##ask -forks -thoughtfully -yukon -pamphlet -ames -##uter -karma -##yya -bryn -negotiation -sighs -incapable -##mbre -##ntial -actresses -taft -##mill -luce -prevailed -##amine -1773 -motionless -envoy -testify -investing -sculpted -instructors -provence -kali -cullen -horseback -##while -goodwin -##jos -gaa -norte -##ldon -modify -wavelength -abd -214 -skinned -sprinter -forecast -scheduling -marries -squared -tentative -##chman -boer -##isch -bolts -swap -fisherman -assyrian -impatiently -guthrie -martins -murdoch -194 -tanya -nicely -dolly -lacy -med -##45 -syn -decks -fashionable -millionaire -##ust -surfing -##ml -##ision -heaved -tammy -consulate -attendees -routinely -197 -fuse -saxophonist -backseat -malaya -##lord -scowl -tau -##ishly -193 -sighted -steaming -##rks -303 -911 -##holes -##hong -ching -##wife -bless -conserved -jurassic -stacey -unix -zion -chunk -rigorous -blaine -198 -peabody -slayer -dismay -brewers -nz -##jer -det -##glia -glover -postwar -int -penetration -sylvester -imitation -vertically -airlift -heiress -knoxville -viva -##uin -390 -macon -##rim -##fighter -##gonal -janice -##orescence -##wari -marius -belongings -leicestershire -196 -blanco -inverted -preseason -sanity -sobbing -##due -##elt -##dled -collingwood -regeneration -flickering -shortest -##mount -##osi -feminism -##lat -sherlock -cabinets -fumbled -northbound -precedent -snaps -##mme -researching -##akes -guillaume -insights -manipulated -vapor -neighbour -sap -gangster -frey -f1 -stalking -scarcely -callie -barnett -tendencies -audi -doomed -assessing -slung -panchayat -ambiguous -bartlett -##etto -distributing -violating -wolverhampton -##hetic -swami -histoire -##urus -liable -pounder -groin -hussain -larsen -popping -surprises -##atter -vie -curt -##station -mute -relocate -musicals -authorization -richter -##sef -immortality -tna -bombings -##press -deteriorated -yiddish -##acious -robbed -colchester -cs -pmid -ao -verified -balancing -apostle -swayed -recognizable -oxfordshire -retention -nottinghamshire -contender -judd -invitational -shrimp -uhf -##icient -cleaner -longitudinal -tanker -##mur -acronym -broker -koppen -sundance -suppliers -##gil -4000 -clipped -fuels -petite -##anne -landslide -helene -diversion -populous -landowners -auspices -melville -quantitative -##xes -ferries -nicky -##llus -doo -haunting -roche -carver -downed -unavailable -##pathy -approximation -hiroshima -##hue -garfield -valle -comparatively -keyboardist -traveler -##eit -congestion -calculating -subsidiaries -##bate -serb -modernization -fairies -deepened -ville -averages -##lore -inflammatory -tonga -##itch -coâ‚‚ -squads -##hea -gigantic -serum -enjoyment -retailer -verona -35th -cis -##phobic -magna -technicians -##vati -arithmetic -##sport -levin -##dation -amtrak -chow -sienna -##eyer -backstage -entrepreneurship -##otic -learnt -tao -##udy -worcestershire -formulation -baggage -hesitant -bali -sabotage -##kari -barren -enhancing -murmur -pl -freshly -putnam -syntax -aces -medicines -resentment -bandwidth -##sier -grins -chili -guido -##sei -framing -implying -gareth -lissa -genevieve -pertaining -admissions -geo -thorpe -proliferation -sato -bela -analyzing -parting -##gor -awakened -##isman -huddled -secrecy -##kling -hush -gentry -540 -dungeons -##ego -coasts -##utz -sacrificed -##chule -landowner -mutually -prevalence -programmer -adolescent -disrupted -seaside -gee -trusts -vamp -georgie -##nesian -##iol -schedules -sindh -##market -etched -hm -sparse -bey -beaux -scratching -gliding -unidentified -216 -collaborating -gems -jesuits -oro -accumulation -shaping -mbe -anal -##xin -231 -enthusiasts -newscast -##egan -janata -dewey -parkinson -179 -ankara -biennial -towering -dd -inconsistent -950 -##chet -thriving -terminate -cabins -furiously -eats -advocating -donkey -marley -muster -phyllis -leiden -##user -grassland -glittering -iucn -loneliness -217 -memorandum -armenians -##ddle -popularized -rhodesia -60s -lame -##illon -sans -bikini -header -orbits -##xx -##finger -##ulator -sharif -spines -biotechnology -strolled -naughty -yates -##wire -fremantle -milo -##mour -abducted -removes -##atin -humming -wonderland -##chrome -##ester -hume -pivotal -##rates -armand -grams -believers -elector -rte -apron -bis -scraped -##yria -endorsement -initials -##llation -eps -dotted -hints -buzzing -emigration -nearer -##tom -indicators -##ulu -coarse -neutron -protectorate -##uze -directional -exploits -pains -loire -1830s -proponents -guggenheim -rabbits -ritchie -305 -hectare -inputs -hutton -##raz -verify -##ako -boilers -longitude -##lev -skeletal -yer -emilia -citrus -compromised -##gau -pokemon -prescription -paragraph -eduard -cadillac -attire -categorized -kenyan -weddings -charley -##bourg -entertain -monmouth -##lles -nutrients -davey -mesh -incentive -practised -ecosystems -kemp -subdued -overheard -##rya -bodily -maxim -##nius -apprenticeship -ursula -##fight -lodged -rug -silesian -unconstitutional -patel -inspected -coyote -unbeaten -##hak -34th -disruption -convict -parcel -##cl -##nham -collier -implicated -mallory -##iac -##lab -susannah -winkler -##rber -shia -phelps -sediments -graphical -robotic -##sner -adulthood -mart -smoked -##isto -kathryn -clarified -##aran -divides -convictions -oppression -pausing -burying -##mt -federico -mathias -eileen -##tana -kite -hunched -##acies -189 -##atz -disadvantage -liza -kinetic -greedy -paradox -yokohama -dowager -trunks -ventured -##gement -gupta -vilnius -olaf -##thest -crimean -hopper -##ej -progressively -arturo -mouthed -arrondissement -##fusion -rubin -simulcast -oceania -##orum -##stra -##rred -busiest -intensely -navigator -cary -##vine -##hini -##bies -fife -rowe -rowland -posing -insurgents -shafts -lawsuits -activate -conor -inward -culturally -garlic -265 -##eering -eclectic -##hui -##kee -##nl -furrowed -vargas -meteorological -rendezvous -##aus -culinary -commencement -##dition -quota -##notes -mommy -salaries -overlapping -mule -##iology -##mology -sums -wentworth -##isk -##zione -mainline -subgroup -##illy -hack -plaintiff -verdi -bulb -differentiation -engagements -multinational -supplemented -bertrand -caller -regis -##naire -##sler -##arts -##imated -blossom -propagation -kilometer -viaduct -vineyards -##uate -beckett -optimization -golfer -songwriters -seminal -semitic -thud -volatile -evolving -ridley -##wley -trivial -distributions -scandinavia -jiang -##ject -wrestled -insistence -##dio -emphasizes -napkin -##ods -adjunct -rhyme -##ricted -##eti -hopeless -surrounds -tremble -32nd -smoky -##ntly -oils -medicinal -padded -steer -wilkes -219 -255 -concessions -hue -uniquely -blinded -landon -yahoo -##lane -hendrix -commemorating -dex -specify -chicks -##ggio -intercity -1400 -morley -##torm -highlighting -##oting -pang -oblique -stalled -##liner -flirting -newborn -1769 -bishopric -shaved -232 -currie -##ush -dharma -spartan -##ooped -favorites -smug -novella -sirens -abusive -creations -espana -##lage -paradigm -semiconductor -sheen -##rdo -##yen -##zak -nrl -renew -##pose -##tur -adjutant -marches -norma -##enity -ineffective -weimar -grunt -##gat -lordship -plotting -expenditure -infringement -lbs -refrain -av -mimi -mistakenly -postmaster -1771 -##bara -ras -motorsports -tito -199 -subjective -##zza -bully -stew -##kaya -prescott -1a -##raphic -##zam -bids -styling -paranormal -reeve -sneaking -exploding -katz -akbar -migrant -syllables -indefinitely -##ogical -destroys -replaces -applause -##phine -pest -##fide -218 -articulated -bertie -##thing -##cars -##ptic -courtroom -crowley -aesthetics -cummings -tehsil -hormones -titanic -dangerously -##ibe -stadion -jaenelle -auguste -ciudad -##chu -mysore -partisans -##sio -lucan -philipp -##aly -debating -henley -interiors -##rano -##tious -homecoming -beyonce -usher -henrietta -prepares -weeds -##oman -ely -plucked -##pire -##dable -luxurious -##aq -artifact -password -pasture -juno -maddy -minsk -##dder -##ologies -##rone -assessments -martian -royalist -1765 -examines -##mani -##rge -nino -223 -parry -scooped -relativity -##eli -##uting -##cao -congregational -noisy -traverse -##agawa -strikeouts -nickelodeon -obituary -transylvania -binds -depictions -polk -trolley -##yed -##lard -breeders -##under -dryly -hokkaido -1762 -strengths -stacks -bonaparte -connectivity -neared -prostitutes -stamped -anaheim -gutierrez -sinai -##zzling -bram -fresno -madhya -##86 -proton -##lena -##llum -##phon -reelected -wanda -##anus -##lb -ample -distinguishing -##yler -grasping -sermons -tomato -bland -stimulation -avenues -##eux -spreads -scarlett -fern -pentagon -assert -baird -chesapeake -ir -calmed -distortion -fatalities -##olis -correctional -pricing -##astic -##gina -prom -dammit -ying -collaborate -##chia -welterweight -33rd -pointer -substitution -bonded -umpire -communicating -multitude -paddle -##obe -federally -intimacy -##insky -betray -ssr -##lett -##lean -##lves -##therapy -airbus -##tery -functioned -ud -bearer -biomedical -netflix -##hire -##nca -condom -brink -ik -##nical -macy -##bet -flap -gma -experimented -jelly -lavender -##icles -##ulia -munro -##mian -##tial -rye -##rle -60th -gigs -hottest -rotated -predictions -fuji -bu -##erence -##omi -barangay -##fulness -##sas -clocks -##rwood -##liness -cereal -roe -wight -decker -uttered -babu -onion -xml -forcibly -##df -petra -sarcasm -hartley -peeled -storytelling -##42 -##xley -##ysis -##ffa -fibre -kiel -auditor -fig -harald -greenville -##berries -geographically -nell -quartz -##athic -cemeteries -##lr -crossings -nah -holloway -reptiles -chun -sichuan -snowy -660 -corrections -##ivo -zheng -ambassadors -blacksmith -fielded -fluids -hardcover -turnover -medications -melvin -academies -##erton -ro -roach -absorbing -spaniards -colton -##founded -outsider -espionage -kelsey -245 -edible -##ulf -dora -establishes -##sham -##tries -contracting -##tania -cinematic -costello -nesting -##uron -connolly -duff -##nology -mma -##mata -fergus -sexes -gi -optics -spectator -woodstock -banning -##hee -##fle -differentiate -outfielder -refinery -226 -312 -gerhard -horde -lair -drastically -##udi -landfall -##cheng -motorsport -odi -##achi -predominant -quay -skins -##ental -edna -harshly -complementary -murdering -##aves -wreckage -##90 -ono -outstretched -lennox -munitions -galen -reconcile -470 -scalp -bicycles -gillespie -questionable -rosenberg -guillermo -hostel -jarvis -kabul -volvo -opium -yd -##twined -abuses -decca -outpost -##cino -sensible -neutrality -##64 -ponce -anchorage -atkins -turrets -inadvertently -disagree -libre -vodka -reassuring -weighs -##yal -glide -jumper -ceilings -repertory -outs -stain -##bial -envy -##ucible -smashing -heightened -policing -hyun -mixes -lai -prima -##ples -celeste -##bina -lucrative -intervened -kc -manually -##rned -stature -staffed -bun -bastards -nairobi -priced -##auer -thatcher -##kia -tripped -comune -##ogan -##pled -brasil -incentives -emanuel -hereford -musica -##kim -benedictine -biennale -##lani -eureka -gardiner -rb -knocks -sha -##ael -##elled -##onate -efficacy -ventura -masonic -sanford -maize -leverage -##feit -capacities -santana -##aur -novelty -vanilla -##cter -##tour -benin -##oir -##rain -neptune -drafting -tallinn -##cable -humiliation -##boarding -schleswig -fabian -bernardo -liturgy -spectacle -sweeney -pont -routledge -##tment -cosmos -ut -hilt -sleek -universally -##eville -##gawa -typed -##dry -favors -allegheny -glaciers -##rly -recalling -aziz -##log -parasite -requiem -auf -##berto -##llin -illumination -##breaker -##issa -festivities -bows -govern -vibe -vp -333 -sprawled -larson -pilgrim -bwf -leaping -##rts -##ssel -alexei -greyhound -hoarse -##dler -##oration -seneca -##cule -gaping -##ulously -##pura -cinnamon -##gens -##rricular -craven -fantasies -houghton -engined -reigned -dictator -supervising -##oris -bogota -commentaries -unnatural -fingernails -spirituality -tighten -##tm -canadiens -protesting -intentional -cheers -sparta -##ytic -##iere -##zine -widen -belgarath -controllers -dodd -iaaf -navarre -##ication -defect -squire -steiner -whisky -##mins -560 -inevitably -tome -##gold -chew -##uid -##lid -elastic -##aby -streaked -alliances -jailed -regal -##ined -##phy -czechoslovak -narration -absently -##uld -bluegrass -guangdong -quran -criticizing -hose -hari -##liest -##owa -skier -streaks -deploy -##lom -raft -bose -dialed -huff -##eira -haifa -simplest -bursting -endings -ib -sultanate -##titled -franks -whitman -ensures -sven -##ggs -collaborators -forster -organising -ui -banished -napier -injustice -teller -layered -thump -##otti -roc -battleships -evidenced -fugitive -sadie -robotics -##roud -equatorial -geologist -##iza -yielding -##bron -##sr -internationale -mecca -##diment -sbs -skyline -toad -uploaded -reflective -undrafted -lal -leafs -bayern -##dai -lakshmi -shortlisted -##stick -##wicz -camouflage -donate -af -christi -lau -##acio -disclosed -nemesis -1761 -assemble -straining -northamptonshire -tal -##asi -bernardino -premature -heidi -42nd -coefficients -galactic -reproduce -buzzed -sensations -zionist -monsieur -myrtle -##eme -archery -strangled -musically -viewpoint -antiquities -bei -trailers -seahawks -cured -pee -preferring -tasmanian -lange -sul -##mail -##working -colder -overland -lucivar -massey -gatherings -haitian -##smith -disapproval -flaws -##cco -##enbach -1766 -npr -##icular -boroughs -creole -forums -techno -1755 -dent -abdominal -streetcar -##eson -##stream -procurement -gemini -predictable -##tya -acheron -christoph -feeder -fronts -vendor -bernhard -jammu -tumors -slang -##uber -goaltender -twists -curving -manson -vuelta -mer -peanut -confessions -pouch -unpredictable -allowance -theodor -vascular -##factory -bala -authenticity -metabolic -coughing -nanjing -##cea -pembroke -##bard -splendid -36th -ff -hourly -##ahu -elmer -handel -##ivate -awarding -thrusting -dl -experimentation -##hesion -##46 -caressed -entertained -steak -##rangle -biologist -orphans -baroness -oyster -stepfather -##dridge -mirage -reefs -speeding -##31 -barons -1764 -227 -inhabit -preached -repealed -##tral -honoring -boogie -captives -administer -johanna -##imate -gel -suspiciously -1767 -sobs -##dington -backbone -hayward -garry -##folding -##nesia -maxi -##oof -##ppe -ellison -galileo -##stand -crimea -frenzy -amour -bumper -matrices -natalia -baking -garth -palestinians -##grove -smack -conveyed -ensembles -gardening -##manship -##rup -##stituting -1640 -harvesting -topography -jing -shifters -dormitory -##carriage -##lston -ist -skulls -##stadt -dolores -jewellery -sarawak -##wai -##zier -fences -christy -confinement -tumbling -credibility -fir -stench -##bria -##plication -##nged -##sam -virtues -##belt -marjorie -pba -##eem -##made -celebrates -schooner -agitated -barley -fulfilling -anthropologist -##pro -restrict -novi -regulating -##nent -padres -##rani -##hesive -loyola -tabitha -milky -olson -proprietor -crambidae -guarantees -intercollegiate -ljubljana -hilda -##sko -ignorant -hooded -##lts -sardinia -##lidae -##vation -frontman -privileged -witchcraft -##gp -jammed -laude -poking -##than -bracket -amazement -yunnan -##erus -maharaja -linnaeus -264 -commissioning -milano -peacefully -##logies -akira -rani -regulator -##36 -grasses -##rance -luzon -crows -compiler -gretchen -seaman -edouard -tab -buccaneers -ellington -hamlets -whig -socialists -##anto -directorial -easton -mythological -##kr -##vary -rhineland -semantic -taut -dune -inventions -succeeds -##iter -replication -branched -##pired -jul -prosecuted -kangaroo -penetrated -##avian -middlesbrough -doses -bleak -madam -predatory -relentless -##vili -reluctance -##vir -hailey -crore -silvery -1759 -monstrous -swimmers -transmissions -hawthorn -informing -##eral -toilets -caracas -crouch -kb -##sett -295 -cartel -hadley -##aling -alexia -yvonne -##biology -cinderella -eton -superb -blizzard -stabbing -industrialist -maximus -##gm -##orus -groves -maud -clade -oversized -comedic -##bella -rosen -nomadic -fulham -montane -beverages -galaxies -redundant -swarm -##rot -##folia -##llis -buckinghamshire -fen -bearings -bahadur -##rom -gilles -phased -dynamite -faber -benoit -vip -##ount -##wd -booking -fractured -tailored -anya -spices -westwood -cairns -auditions -inflammation -steamed -##rocity -##acion -##urne -skyla -thereof -watford -torment -archdeacon -transforms -lulu -demeanor -fucked -serge -##sor -mckenna -minas -entertainer -##icide -caress -originate -residue -##sty -1740 -##ilised -##org -beech -##wana -subsidies -##ghton -emptied -gladstone -ru -firefighters -voodoo -##rcle -het -nightingale -tamara -edmond -ingredient -weaknesses -silhouette -285 -compatibility -withdrawing -hampson -##mona -anguish -giggling -##mber -bookstore -##jiang -southernmost -tilting -##vance -bai -economical -rf -briefcase -dreadful -hinted -projections -shattering -totaling -##rogate -analogue -indicted -periodical -fullback -##dman -haynes -##tenberg -##ffs -##ishment -1745 -thirst -stumble -penang -vigorous -##ddling -##kor -##lium -octave -##ove -##enstein -##inen -##ones -siberian -##uti -cbn -repeal -swaying -##vington -khalid -tanaka -unicorn -otago -plastered -lobe -riddle -##rella -perch -##ishing -croydon -filtered -graeme -tripoli -##ossa -crocodile -##chers -sufi -mined -##tung -inferno -lsu -##phi -swelled -utilizes -£2 -cale -periodicals -styx -hike -informally -coop -lund -##tidae -ala -hen -qui -transformations -disposed -sheath -chickens -##cade -fitzroy -sas -silesia -unacceptable -odisha -1650 -sabrina -pe -spokane -ratios -athena -massage -shen -dilemma -##drum -##riz -##hul -corona -doubtful -niall -##pha -##bino -fines -cite -acknowledging -bangor -ballard -bathurst -##resh -huron -mustered -alzheimer -garments -kinase -tyre -warship -##cp -flashback -pulmonary -braun -cheat -kamal -cyclists -constructions -grenades -ndp -traveller -excuses -stomped -signalling -trimmed -futsal -mosques -relevance -##wine -wta -##23 -##vah -##lter -hoc -##riding -optimistic -##´s -deco -sim -interacting -rejecting -moniker -waterways -##ieri -##oku -mayors -gdansk -outnumbered -pearls -##ended -##hampton -fairs -totals -dominating -262 -notions -stairway -compiling -pursed -commodities -grease -yeast -##jong -carthage -griffiths -residual -amc -contraction -laird -sapphire -##marine -##ivated -amalgamation -dissolve -inclination -lyle -packaged -altitudes -suez -canons -graded -lurched -narrowing -boasts -guise -wed -enrico -##ovsky -rower -scarred -bree -cub -iberian -protagonists -bargaining -proposing -trainers -voyages -vans -fishes -##aea -##ivist -##verance -encryption -artworks -kazan -sabre -cleopatra -hepburn -rotting -supremacy -mecklenburg -##brate -burrows -hazards -outgoing -flair -organizes -##ctions -scorpion -##usions -boo -234 -chevalier -dunedin -slapping -##34 -ineligible -pensions -##38 -##omic -manufactures -emails -bismarck -238 -weakening -blackish -ding -mcgee -quo -##rling -northernmost -xx -manpower -greed -sampson -clicking -##ange -##horpe -##inations -##roving -torre -##eptive -##moral -symbolism -38th -asshole -meritorious -outfits -splashed -biographies -sprung -astros -##tale -302 -737 -filly -raoul -nw -tokugawa -linden -clubhouse -##apa -tracts -romano -##pio -putin -tags -##note -chained -dickson -gunshot -moe -gunn -rashid -##tails -zipper -##bas -##nea -contrasted -##ply -##udes -plum -pharaoh -##pile -aw -comedies -ingrid -sandwiches -subdivisions -1100 -mariana -nokia -kamen -hz -delaney -veto -herring -##words -possessive -outlines -##roup -siemens -stairwell -rc -gallantry -messiah -palais -yells -233 -zeppelin -##dm -bolivar -##cede -smackdown -mckinley -##mora -##yt -muted -geologic -finely -unitary -avatar -hamas -maynard -rees -bog -contrasting -##rut -liv -chico -disposition -pixel -##erate -becca -dmitry -yeshiva -narratives -##lva -##ulton -mercenary -sharpe -tempered -navigate -stealth -amassed -keynes -##lini -untouched -##rrie -havoc -lithium -##fighting -abyss -graf -southward -wolverine -balloons -implements -ngos -transitions -##icum -ambushed -concacaf -dormant -economists -##dim -costing -csi -rana -universite -boulders -verity -##llon -collin -mellon -misses -cypress -fluorescent -lifeless -spence -##ulla -crewe -shepard -pak -revelations -##Ù… -jolly -gibbons -paw -##dro -##quel -freeing -##test -shack -fries -palatine -##51 -##hiko -accompaniment -cruising -recycled -##aver -erwin -sorting -synthesizers -dyke -realities -sg -strides -enslaved -wetland -##ghan -competence -gunpowder -grassy -maroon -reactors -objection -##oms -carlson -gearbox -macintosh -radios -shelton -##sho -clergyman -prakash -254 -mongols -trophies -oricon -228 -stimuli -twenty20 -cantonese -cortes -mirrored -##saurus -bhp -cristina -melancholy -##lating -enjoyable -nuevo -##wny -downfall -schumacher -##ind -banging -lausanne -rumbled -paramilitary -reflex -ax -amplitude -migratory -##gall -##ups -midi -barnard -lastly -sherry -##hp -##nall -keystone -##kra -carleton -slippery -##53 -coloring -foe -socket -otter -##rgos -mats -##tose -consultants -bafta -bison -topping -##km -490 -primal -abandonment -transplant -atoll -hideous -mort -pained -reproduced -tae -howling -##turn -unlawful -billionaire -hotter -poised -lansing -##chang -dinamo -retro -messing -nfc -domesday -##mina -blitz -timed -##athing -##kley -ascending -gesturing -##izations -signaled -tis -chinatown -mermaid -savanna -jameson -##aint -catalina -##pet -##hers -cochrane -cy -chatting -##kus -alerted -computation -mused -noelle -majestic -mohawk -campo -octagonal -##sant -##hend -241 -aspiring -##mart -comprehend -iona -paralyzed -shimmering -swindon -rhone -##eley -reputed -configurations -pitchfork -agitation -francais -gillian -lipstick -##ilo -outsiders -pontifical -resisting -bitterness -sewer -rockies -##edd -##ucher -misleading -1756 -exiting -galloway -##nging -risked -##heart -246 -commemoration -schultz -##rka -integrating -##rsa -poses -shrieked -##weiler -guineas -gladys -jerking -owls -goldsmith -nightly -penetrating -##unced -lia -##33 -ignited -betsy -##aring -##thorpe -follower -vigorously -##rave -coded -kiran -knit -zoology -tbilisi -##28 -##bered -repository -govt -deciduous -dino -growling -##bba -enhancement -unleashed -chanting -pussy -biochemistry -##eric -kettle -repression -toxicity -nrhp -##arth -##kko -##bush -ernesto -commended -outspoken -242 -mca -parchment -sms -kristen -##aton -bisexual -raked -glamour -navajo -a2 -conditioned -showcased -##hma -spacious -youthful -##esa -usl -appliances -junta -brest -layne -conglomerate -enchanted -chao -loosened -picasso -circulating -inspect -montevideo -##centric -##kti -piazza -spurred -##aith -bari -freedoms -poultry -stamford -lieu -##ect -indigo -sarcastic -bahia -stump -attach -dvds -frankenstein -lille -approx -scriptures -pollen -##script -nmi -overseen -##ivism -tides -proponent -newmarket -inherit -milling -##erland -centralized -##rou -distributors -credentials -drawers -abbreviation -##lco -##xon -downing -uncomfortably -ripe -##oes -erase -franchises -##ever -populace -##bery -##khar -decomposition -pleas -##tet -daryl -sabah -##stle -##wide -fearless -genie -lesions -annette -##ogist -oboe -appendix -nair -dripped -petitioned -maclean -mosquito -parrot -rpg -hampered -1648 -operatic -reservoirs -##tham -irrelevant -jolt -summarized -##fp -medallion -##taff -##− -clawed -harlow -narrower -goddard -marcia -bodied -fremont -suarez -altering -tempest -mussolini -porn -##isms -sweetly -oversees -walkers -solitude -grimly -shrines -hk -ich -supervisors -hostess -dietrich -legitimacy -brushes -expressive -##yp -dissipated -##rse -localized -systemic -##nikov -gettysburg -##js -##uaries -dialogues -muttering -251 -housekeeper -sicilian -discouraged -##frey -beamed -kaladin -halftime -kidnap -##amo -##llet -1754 -synonymous -depleted -instituto -insulin -reprised -##opsis -clashed -##ctric -interrupting -radcliffe -insisting -medici -1715 -ejected -playfully -turbulent -##47 -starvation -##rini -shipment -rebellious -petersen -verification -merits -##rified -cakes -##charged -1757 -milford -shortages -spying -fidelity -##aker -emitted -storylines -harvested -seismic -##iform -cheung -kilda -theoretically -barbie -lynx -##rgy -##tius -goblin -mata -poisonous -##nburg -reactive -residues -obedience -##евич -conjecture -##rac -401 -hating -sixties -kicker -moaning -motown -##bha -emancipation -neoclassical -##hering -consoles -ebert -professorship -##tures -sustaining -assaults -obeyed -affluent -incurred -tornadoes -##eber -##zow -emphasizing -highlanders -cheated -helmets -##ctus -internship -terence -bony -executions -legislators -berries -peninsular -tinged -##aco -1689 -amplifier -corvette -ribbons -lavish -pennant -##lander -worthless -##chfield -##forms -mariano -pyrenees -expenditures -##icides -chesterfield -mandir -tailor -39th -sergey -nestled -willed -aristocracy -devotees -goodnight -raaf -rumored -weaponry -remy -appropriations -harcourt -burr -riaa -##lence -limitation -unnoticed -guo -soaking -swamps -##tica -collapsing -tatiana -descriptive -brigham -psalm -##chment -maddox -##lization -patti -caliph -##aja -akron -injuring -serra -##ganj -basins -##sari -astonished -launcher -##church -hilary -wilkins -sewing -##sf -stinging -##fia -##ncia -underwood -startup -##ition -compilations -vibrations -embankment -jurist -##nity -bard -juventus -groundwater -kern -palaces -helium -boca -cramped -marissa -soto -##worm -jae -princely -##ggy -faso -bazaar -warmly -##voking -229 -pairing -##lite -##grate -##nets -wien -freaked -ulysses -rebirth -##alia -##rent -mummy -guzman -jimenez -stilled -##nitz -trajectory -tha -woken -archival -professions -##pts -##pta -hilly -shadowy -shrink -##bolt -norwood -glued -migrate -stereotypes -devoid -##pheus -625 -evacuate -horrors -infancy -gotham -knowles -optic -downloaded -sachs -kingsley -parramatta -darryl -mor -##onale -shady -commence -confesses -kan -##meter -##placed -marlborough -roundabout -regents -frigates -io -##imating -gothenburg -revoked -carvings -clockwise -convertible -intruder -##sche -banged -##ogo -vicky -bourgeois -##mony -dupont -footing -##gum -pd -##real -buckle -yun -penthouse -sane -720 -serviced -stakeholders -neumann -bb -##eers -comb -##gam -catchment -pinning -rallies -typing -##elles -forefront -freiburg -sweetie -giacomo -widowed -goodwill -worshipped -aspirations -midday -##vat -fishery -##trick -bournemouth -turk -243 -hearth -ethanol -guadalajara -murmurs -sl -##uge -afforded -scripted -##hta -wah -##jn -coroner -translucent -252 -memorials -puck -progresses -clumsy -##race -315 -candace -recounted -##27 -##slin -##uve -filtering -##mac -howl -strata -heron -leveled -##ays -dubious -##oja -##Ñ‚ -##wheel -citations -exhibiting -##laya -##mics -##pods -turkic -##lberg -injunction -##ennial -##mit -antibodies -##44 -organise -##rigues -cardiovascular -cushion -inverness -##zquez -dia -cocoa -sibling -##tman -##roid -expanse -feasible -tunisian -algiers -##relli -rus -bloomberg -dso -westphalia -bro -tacoma -281 -downloads -##ours -konrad -duran -##hdi -continuum -jett -compares -legislator -secession -##nable -##gues -##zuka -translating -reacher -##gley -##Å‚a -aleppo -##agi -tc -orchards -trapping -linguist -versatile -drumming -postage -calhoun -superiors -##mx -barefoot -leary -##cis -ignacio -alfa -kaplan -##rogen -bratislava -mori -##vot -disturb -haas -313 -cartridges -gilmore -radiated -salford -tunic -hades -##ulsive -archeological -delilah -magistrates -auditioned -brewster -charters -empowerment -blogs -cappella -dynasties -iroquois -whipping -##krishna -raceway -truths -myra -weaken -judah -mcgregor -##horse -mic -refueling -37th -burnley -bosses -markus -premio -query -##gga -dunbar -##economic -darkest -lyndon -sealing -commendation -reappeared -##mun -addicted -ezio -slaughtered -satisfactory -shuffle -##eves -##thic -##uj -fortification -warrington -##otto -resurrected -fargo -mane -##utable -##lei -##space -foreword -ox -##aris -##vern -abrams -hua -##mento -sakura -##alo -uv -sentimental -##skaya -midfield -##eses -sturdy -scrolls -macleod -##kyu -entropy -##lance -mitochondrial -cicero -excelled -thinner -convoys -perceive -##oslav -##urable -systematically -grind -burkina -287 -##tagram -ops -##aman -guantanamo -##cloth -##tite -forcefully -wavy -##jou -pointless -##linger -##tze -layton -portico -superficial -clerical -outlaws -##hism -burials -muir -##inn -creditors -hauling -rattle -##leg -calais -monde -archers -reclaimed -dwell -wexford -hellenic -falsely -remorse -##tek -dough -furnishings -##uttered -gabon -neurological -novice -##igraphy -contemplated -pulpit -nightstand -saratoga -##istan -documenting -pulsing -taluk -##firmed -busted -marital -##rien -disagreements -wasps -##yes -hodge -mcdonnell -mimic -fran -pendant -dhabi -musa -##nington -congratulations -argent -darrell -concussion -losers -regrets -thessaloniki -reversal -donaldson -hardwood -thence -achilles -ritter -##eran -demonic -jurgen -prophets -goethe -eki -classmate -buff -##cking -yank -irrational -##inging -perished -seductive -qur -sourced -##crat -##typic -mustard -ravine -barre -horizontally -characterization -phylogenetic -boise -##dit -##runner -##tower -brutally -intercourse -seduce -##bbing -fay -ferris -ogden -amar -nik -unarmed -##inator -evaluating -kyrgyzstan -sweetness -##lford -##oki -mccormick -meiji -notoriety -stimulate -disrupt -figuring -instructional -mcgrath -##zoo -groundbreaking -##lto -flinch -khorasan -agrarian -bengals -mixer -radiating -##sov -ingram -pitchers -nad -tariff -##cript -tata -##codes -##emi -##ungen -appellate -lehigh -##bled -##giri -brawl -duct -texans -##ciation -##ropolis -skipper -speculative -vomit -doctrines -stresses -253 -davy -graders -whitehead -jozef -timely -cumulative -haryana -paints -appropriately -boon -cactus -##ales -##pid -dow -legions -##pit -perceptions -1730 -picturesque -##yse -periphery -rune -wr -##aha -celtics -sentencing -whoa -##erin -confirms -variance -425 -moines -mathews -spade -rave -m1 -fronted -fx -blending -alleging -reared -##gl -237 -##paper -grassroots -eroded -##free -##physical -directs -ordeal -##sÅ‚aw -accelerate -hacker -rooftop -##inia -lev -buys -cebu -devote -##lce -specialising -##ulsion -choreographed -repetition -warehouses -##ryl -paisley -tuscany -analogy -sorcerer -hash -huts -shards -descends -exclude -nix -chaplin -gaga -ito -vane -##drich -causeway -misconduct -limo -orchestrated -glands -jana -##kot -u2 -##mple -##sons -branching -contrasts -scoop -longed -##virus -chattanooga -##75 -syrup -cornerstone -##tized -##mind -##iaceae -careless -precedence -frescoes -##uet -chilled -consult -modelled -snatch -peat -##thermal -caucasian -humane -relaxation -spins -temperance -##lbert -occupations -lambda -hybrids -moons -mp3 -##oese -247 -rolf -societal -yerevan -ness -##ssler -befriended -mechanized -nominate -trough -boasted -cues -seater -##hom -bends -##tangle -conductors -emptiness -##lmer -eurasian -adriatic -tian -##cie -anxiously -lark -propellers -chichester -jock -ev -2a -##holding -credible -recounts -tori -loyalist -abduction -##hoot -##redo -nepali -##mite -ventral -tempting -##ango -##crats -steered -##wice -javelin -dipping -laborers -prentice -looming -titanium -##Ë -badges -emir -tensor -##ntation -egyptians -rash -denies -hawthorne -lombard -showers -wehrmacht -dietary -trojan -##reus -welles -executing -horseshoe -lifeboat -##lak -elsa -infirmary -nearing -roberta -boyer -mutter -trillion -joanne -##fine -##oked -sinks -vortex -uruguayan -clasp -sirius -##block -accelerator -prohibit -sunken -byu -chronological -diplomats -ochreous -510 -symmetrical -1644 -maia -##tology -salts -reigns -atrocities -##Ð¸Ñ -hess -bared -issn -##vyn -cater -saturated -##cycle -##isse -sable -voyager -dyer -yusuf -##inge -fountains -wolff -##39 -##nni -engraving -rollins -atheist -ominous -##ault -herr -chariot -martina -strung -##fell -##farlane -horrific -sahib -gazes -saetan -erased -ptolemy -##olic -flushing -lauderdale -analytic -##ices -530 -navarro -beak -gorilla -herrera -broom -guadalupe -raiding -sykes -311 -bsc -deliveries -1720 -invasions -carmichael -tajikistan -thematic -ecumenical -sentiments -onstage -##rians -##brand -##sume -catastrophic -flanks -molten -##arns -waller -aimee -terminating -##icing -alternately -##oche -nehru -printers -outraged -##eving -empires -template -banners -repetitive -za -##oise -vegetarian -##tell -guiana -opt -cavendish -lucknow -synthesized -##hani -##mada -finalized -##ctable -fictitious -mayoral -unreliable -##enham -embracing -peppers -rbis -##chio -##neo -inhibition -slashed -togo -orderly -embroidered -safari -salty -236 -barron -benito -totaled -##dak -pubs -simulated -caden -devin -tolkien -momma -welding -sesame -##ept -gottingen -hardness -630 -shaman -temeraire -620 -adequately -pediatric -##kit -ck -assertion -radicals -composure -cadence -seafood -beaufort -lazarus -mani -warily -cunning -kurdistan -249 -cantata -##kir -ares -##41 -##clusive -nape -townland -geared -insulted -flutter -boating -violate -draper -dumping -malmo -##hh -##romatic -firearm -alta -bono -obscured -##clave -exceeds -panorama -unbelievable -##train -preschool -##essed -disconnected -installing -rescuing -secretaries -accessibility -##castle -##drive -##ifice -##film -bouts -slug -waterway -mindanao -##buro -##ratic -halves -##Ù„ -calming -liter -maternity -adorable -bragg -electrification -mcc -##dote -roxy -schizophrenia -##body -munoz -kaye -whaling -239 -mil -tingling -tolerant -##ago -unconventional -volcanoes -##finder -deportivo -##llie -robson -kaufman -neuroscience -wai -deportation -masovian -scraping -converse -##bh -hacking -bulge -##oun -administratively -yao -580 -amp -mammoth -booster -claremont -hooper -nomenclature -pursuits -mclaughlin -melinda -##sul -catfish -barclay -substrates -taxa -zee -originals -kimberly -packets -padma -##ality -borrowing -ostensibly -solvent -##bri -##genesis -##mist -lukas -shreveport -veracruz -##ÑŒ -##lou -##wives -cheney -tt -anatolia -hobbs -##zyn -cyclic -radiant -alistair -greenish -siena -dat -independents -##bation -conform -pieter -hyper -applicant -bradshaw -spores -telangana -vinci -inexpensive -nuclei -322 -jang -nme -soho -spd -##ign -cradled -receptionist -pow -##43 -##rika -fascism -##ifer -experimenting -##ading -##iec -##region -345 -jocelyn -maris -stair -nocturnal -toro -constabulary -elgin -##kker -msc -##giving -##schen -##rase -doherty -doping -sarcastically -batter -maneuvers -##cano -##apple -##gai -##git -intrinsic -##nst -##stor -1753 -showtime -cafes -gasps -lviv -ushered -##thed -fours -restart -astonishment -transmitting -flyer -shrugs -##sau -intriguing -cones -dictated -mushrooms -medial -##kovsky -##elman -escorting -gaped -##26 -godfather -##door -##sell -djs -recaptured -timetable -vila -1710 -3a -aerodrome -mortals -scientology -##orne -angelina -mag -convection -unpaid -insertion -intermittent -lego -##nated -endeavor -kota -pereira -##lz -304 -bwv -glamorgan -insults -agatha -fey -##cend -fleetwood -mahogany -protruding -steamship -zeta -##arty -mcguire -suspense -##sphere -advising -urges -##wala -hurriedly -meteor -gilded -inline -arroyo -stalker -##oge -excitedly -revered -##cure -earle -introductory -##break -##ilde -mutants -puff -pulses -reinforcement -##haling -curses -lizards -stalk -correlated -##fixed -fallout -macquarie -##unas -bearded -denton -heaving -802 -##ocation -winery -assign -dortmund -##lkirk -everest -invariant -charismatic -susie -##elling -bled -lesley -telegram -sumner -bk -##ogen -##к -wilcox -needy -colbert -duval -##iferous -##mbled -allotted -attends -imperative -##hita -replacements -hawker -##inda -insurgency -##zee -##eke -casts -##yla -680 -ives -transitioned -##pack -##powering -authoritative -baylor -flex -cringed -plaintiffs -woodrow -##skie -drastic -ape -aroma -unfolded -commotion -nt -preoccupied -theta -routines -lasers -privatization -wand -domino -ek -clenching -nsa -strategically -showered -bile -handkerchief -pere -storing -christophe -insulting -316 -nakamura -romani -asiatic -magdalena -palma -cruises -stripping -405 -konstantin -soaring -##berman -colloquially -forerunner -havilland -incarcerated -parasites -sincerity -##utus -disks -plank -saigon -##ining -corbin -homo -ornaments -powerhouse -##tlement -chong -fastened -feasibility -idf -morphological -usable -##nish -##zuki -aqueduct -jaguars -keepers -##flies -aleksandr -faust -assigns -ewing -bacterium -hurled -tricky -hungarians -integers -wallis -321 -yamaha -##isha -hushed -oblivion -aviator -evangelist -friars -##eller -monograph -ode -##nary -airplanes -labourers -charms -##nee -1661 -hagen -tnt -rudder -fiesta -transcript -dorothea -ska -inhibitor -maccabi -retorted -raining -encompassed -clauses -menacing -1642 -lineman -##gist -vamps -##ape -##dick -gloom -##rera -dealings -easing -seekers -##nut -##pment -helens -unmanned -##anu -##isson -basics -##amy -##ckman -adjustments -1688 -brutality -horne -##zell -sui -##55 -##mable -aggregator -##thal -rhino -##drick -##vira -counters -zoom -##01 -##rting -mn -montenegrin -packard -##unciation -##â™­ -##kki -reclaim -scholastic -thugs -pulsed -##icia -syriac -quan -saddam -banda -kobe -blaming -buddies -dissent -##lusion -##usia -corbett -jaya -delle -erratic -lexie -##hesis -435 -amiga -hermes -##pressing -##leen -chapels -gospels -jamal -##uating -compute -revolving -warp -##sso -##thes -armory -##eras -##gol -antrim -loki -##kow -##asian -##good -##zano -braid -handwriting -subdistrict -funky -pantheon -##iculate -concurrency -estimation -improper -juliana -##his -newcomers -johnstone -staten -communicated -##oco -##alle -sausage -stormy -##stered -##tters -superfamily -##grade -acidic -collateral -tabloid -##oped -##rza -bladder -austen -##ellant -mcgraw -##hay -hannibal -mein -aquino -lucifer -wo -badger -boar -cher -christensen -greenberg -interruption -##kken -jem -244 -mocked -bottoms -cambridgeshire -##lide -sprawling -##bbly -eastwood -ghent -synth -##buck -advisers -##bah -nominally -hapoel -qu -daggers -estranged -fabricated -towels -vinnie -wcw -misunderstanding -anglia -nothin -unmistakable -##dust -##lova -chilly -marquette -truss -##edge -##erine -reece -##lty -##chemist -##connected -272 -308 -41st -bash -raion -waterfalls -##ump -##main -labyrinth -queue -theorist -##istle -bharatiya -flexed -soundtracks -rooney -leftist -patrolling -wharton -plainly -alleviate -eastman -schuster -topographic -engages -immensely -unbearable -fairchild -1620 -dona -lurking -parisian -oliveira -ia -indictment -hahn -bangladeshi -##aster -vivo -##uming -##ential -antonia -expects -indoors -kildare -harlan -##logue -##ogenic -##sities -forgiven -##wat -childish -tavi -##mide -##orra -plausible -grimm -successively -scooted -##bola -##dget -##rith -spartans -emery -flatly -azure -epilogue -##wark -flourish -##iny -##tracted -##overs -##oshi -bestseller -distressed -receipt -spitting -hermit -topological -##cot -drilled -subunit -francs -##layer -eel -##fk -##itas -octopus -footprint -petitions -ufo -##say -##foil -interfering -leaking -palo -##metry -thistle -valiant -##pic -narayan -mcpherson -##fast -gonzales -##ym -##enne -dustin -novgorod -solos -##zman -doin -##raph -##patient -##meyer -soluble -ashland -cuffs -carole -pendleton -whistling -vassal -##river -deviation -revisited -constituents -rallied -rotate -loomed -##eil -##nting -amateurs -augsburg -auschwitz -crowns -skeletons -##cona -bonnet -257 -dummy -globalization -simeon -sleeper -mandal -differentiated -##crow -##mare -milne -bundled -exasperated -talmud -owes -segregated -##feng -##uary -dentist -piracy -props -##rang -devlin -##torium -malicious -paws -##laid -dependency -##ergy -##fers -##enna -258 -pistons -rourke -jed -grammatical -tres -maha -wig -512 -ghostly -jayne -##achal -##creen -##ilis -##lins -##rence -designate -##with -arrogance -cambodian -clones -showdown -throttle -twain -##ception -lobes -metz -nagoya -335 -braking -##furt -385 -roaming -##minster -amin -crippled -##37 -##llary -indifferent -hoffmann -idols -intimidating -1751 -261 -influenza -memo -onions -1748 -bandage -consciously -##landa -##rage -clandestine -observes -swiped -tangle -##ener -##jected -##trum -##bill -##lta -hugs -congresses -josiah -spirited -##dek -humanist -managerial -filmmaking -inmate -rhymes -debuting -grimsby -ur -##laze -duplicate -vigor -##tf -republished -bolshevik -refurbishment -antibiotics -martini -methane -newscasts -royale -horizons -levant -iain -visas -##ischen -paler -##around -manifestation -snuck -alf -chop -futile -pedestal -rehab -##kat -bmg -kerman -res -fairbanks -jarrett -abstraction -saharan -##zek -1746 -procedural -clearer -kincaid -sash -luciano -##ffey -crunch -helmut -##vara -revolutionaries -##tute -creamy -leach -##mmon -1747 -permitting -nes -plight -wendell -##lese -contra -ts -clancy -ipa -mach -staples -autopsy -disturbances -nueva -karin -pontiac -##uding -proxy -venerable -haunt -leto -bergman -expands -##helm -wal -##pipe -canning -celine -cords -obesity -##enary -intrusion -planner -##phate -reasoned -sequencing -307 -harrow -##chon -##dora -marred -mcintyre -repay -tarzan -darting -248 -harrisburg -margarita -repulsed -##hur -##lding -belinda -hamburger -novo -compliant -runways -bingham -registrar -skyscraper -ic -cuthbert -improvisation -livelihood -##corp -##elial -admiring -##dened -sporadic -believer -casablanca -popcorn -##29 -asha -shovel -##bek -##dice -coiled -tangible -##dez -casper -elsie -resin -tenderness -rectory -##ivision -avail -sonar -##mori -boutique -##dier -guerre -bathed -upbringing -vaulted -sandals -blessings -##naut -##utnant -1680 -306 -foxes -pia -corrosion -hesitantly -confederates -crystalline -footprints -shapiro -tirana -valentin -drones -45th -microscope -shipments -texted -inquisition -wry -guernsey -unauthorized -resigning -760 -ripple -schubert -stu -reassure -felony -##ardo -brittle -koreans -##havan -##ives -dun -implicit -tyres -##aldi -##lth -magnolia -##ehan -##puri -##poulos -aggressively -fei -gr -familiarity -##poo -indicative -##trust -fundamentally -jimmie -overrun -395 -anchors -moans -##opus -britannia -armagh -##ggle -purposely -seizing -##vao -bewildered -mundane -avoidance -cosmopolitan -geometridae -quartermaster -caf -415 -chatter -engulfed -gleam -purge -##icate -juliette -jurisprudence -guerra -revisions -##bn -casimir -brew -##jm -1749 -clapton -cloudy -conde -hermitage -278 -simulations -torches -vincenzo -matteo -##rill -hidalgo -booming -westbound -accomplishment -tentacles -unaffected -##sius -annabelle -flopped -sloping -##litz -dreamer -interceptor -vu -##loh -consecration -copying -messaging -breaker -climates -hospitalized -1752 -torino -afternoons -winfield -witnessing -##teacher -breakers -choirs -sawmill -coldly -##ege -sipping -haste -uninhabited -conical -bibliography -pamphlets -severn -edict -##oca -deux -illnesses -grips -##pl -rehearsals -sis -thinkers -tame -##keepers -1690 -acacia -reformer -##osed -##rys -shuffling -##iring -##shima -eastbound -ionic -rhea -flees -littered -##oum -rocker -vomiting -groaning -champ -overwhelmingly -civilizations -paces -sloop -adoptive -##tish -skaters -##vres -aiding -mango -##joy -nikola -shriek -##ignon -pharmaceuticals -##mg -tuna -calvert -gustavo -stocked -yearbook -##urai -##mana -computed -subsp -riff -hanoi -kelvin -hamid -moors -pastures -summons -jihad -nectar -##ctors -bayou -untitled -pleasing -vastly -republics -intellect -##η -##ulio -##tou -crumbling -stylistic -sb -##ÛŒ -consolation -frequented -hâ‚‚o -walden -widows -##iens -404 -##ignment -chunks -improves -288 -grit -recited -##dev -snarl -sociological -##arte -##gul -inquired -##held -bruise -clube -consultancy -homogeneous -hornets -multiplication -pasta -prick -savior -##grin -##kou -##phile -yoon -##gara -grimes -vanishing -cheering -reacting -bn -distillery -##quisite -##vity -coe -dockyard -massif -##jord -escorts -voss -##valent -byte -chopped -hawke -illusions -workings -floats -##koto -##vac -kv -annapolis -madden -##onus -alvaro -noctuidae -##cum -##scopic -avenge -steamboat -forte -illustrates -erika -##trip -570 -dew -nationalities -bran -manifested -thirsty -diversified -muscled -reborn -##standing -arson -##lessness -##dran -##logram -##boys -##kushima -##vious -willoughby -##phobia -286 -alsace -dashboard -yuki -##chai -granville -myspace -publicized -tricked -##gang -adjective -##ater -relic -reorganisation -enthusiastically -indications -saxe -##lassified -consolidate -iec -padua -helplessly -ramps -renaming -regulars -pedestrians -accents -convicts -inaccurate -lowers -mana -##pati -barrie -bjp -outta -someplace -berwick -flanking -invoked -marrow -sparsely -excerpts -clothed -rei -##ginal -wept -##straße -##vish -alexa -excel -##ptive -membranes -aquitaine -creeks -cutler -sheppard -implementations -ns -##dur -fragrance -budge -concordia -magnesium -marcelo -##antes -gladly -vibrating -##rral -##ggles -montrose -##omba -lew -seamus -1630 -cocky -##ament -##uen -bjorn -##rrick -fielder -fluttering -##lase -methyl -kimberley -mcdowell -reductions -barbed -##jic -##tonic -aeronautical -condensed -distracting -##promising -huffed -##cala -##sle -claudius -invincible -missy -pious -balthazar -ci -##lang -butte -combo -orson -##dication -myriad -1707 -silenced -##fed -##rh -coco -netball -yourselves -##oza -clarify -heller -peg -durban -etudes -offender -roast -blackmail -curvature -##woods -vile -309 -illicit -suriname -##linson -overture -1685 -bubbling -gymnast -tucking -##mming -##ouin -maldives -##bala -gurney -##dda -##eased -##oides -backside -pinto -jars -racehorse -tending -##rdial -baronetcy -wiener -duly -##rke -barbarian -cupping -flawed -##thesis -bertha -pleistocene -puddle -swearing -##nob -##tically -fleeting -prostate -amulet -educating -##mined -##iti -##tler -75th -jens -respondents -analytics -cavaliers -papacy -raju -##iente -##ulum -##tip -funnel -271 -disneyland -##lley -sociologist -##iam -2500 -faulkner -louvre -menon -##dson -276 -##ower -afterlife -mannheim -peptide -referees -comedians -meaningless -##anger -##laise -fabrics -hurley -renal -sleeps -##bour -##icle -breakout -kristin -roadside -animator -clover -disdain -unsafe -redesign -##urity -firth -barnsley -portage -reset -narrows -268 -commandos -expansive -speechless -tubular -##lux -essendon -eyelashes -smashwords -##yad -##bang -##claim -craved -sprinted -chet -somme -astor -wrocÅ‚aw -orton -266 -bane -##erving -##uing -mischief -##amps -##sund -scaling -terre -##xious -impairment -offenses -undermine -moi -soy -contiguous -arcadia -inuit -seam -##tops -macbeth -rebelled -##icative -##iot -590 -elaborated -frs -uniformed -##dberg -259 -powerless -priscilla -stimulated -980 -qc -arboretum -frustrating -trieste -bullock -##nified -enriched -glistening -intern -##adia -locus -nouvelle -ollie -ike -lash -starboard -ee -tapestry -headlined -hove -rigged -##vite -pollock -##yme -thrive -clustered -cas -roi -gleamed -olympiad -##lino -pressured -regimes -##hosis -##lick -ripley -##ophone -kickoff -gallon -rockwell -##arable -crusader -glue -revolutions -scrambling -1714 -grover -##jure -englishman -aztec -263 -contemplating -coven -ipad -preach -triumphant -tufts -##esian -rotational -##phus -328 -falkland -##brates -strewn -clarissa -rejoin -environmentally -glint -banded -drenched -moat -albanians -johor -rr -maestro -malley -nouveau -shaded -taxonomy -v6 -adhere -bunk -airfields -##ritan -1741 -encompass -remington -tran -##erative -amelie -mazda -friar -morals -passions -##zai -breadth -vis -##hae -argus -burnham -caressing -insider -rudd -##imov -##mini -##rso -italianate -murderous -textual -wainwright -armada -bam -weave -timer -##taken -##nh -fra -##crest -ardent -salazar -taps -tunis -##ntino -allegro -gland -philanthropic -##chester -implication -##optera -esq -judas -noticeably -wynn -##dara -inched -indexed -crises -villiers -bandit -royalties -patterned -cupboard -interspersed -accessory -isla -kendrick -entourage -stitches -##esthesia -headwaters -##ior -interlude -distraught -draught -1727 -##basket -biased -sy -transient -triad -subgenus -adapting -kidd -shortstop -##umatic -dimly -spiked -mcleod -reprint -nellie -pretoria -windmill -##cek -singled -##mps -273 -reunite -##orous -747 -bankers -outlying -##omp -##ports -##tream -apologies -cosmetics -patsy -##deh -##ocks -##yson -bender -nantes -serene -##nad -lucha -mmm -323 -##cius -##gli -cmll -coinage -nestor -juarez -##rook -smeared -sprayed -twitching -sterile -irina -embodied -juveniles -enveloped -miscellaneous -cancers -dq -gulped -luisa -crested -swat -donegal -ref -##anov -##acker -hearst -mercantile -##lika -doorbell -ua -vicki -##alla -##som -bilbao -psychologists -stryker -sw -horsemen -turkmenistan -wits -##national -anson -mathew -screenings -##umb -rihanna -##agne -##nessy -aisles -##iani -##osphere -hines -kenton -saskatoon -tasha -truncated -##champ -##itan -mildred -advises -fredrik -interpreting -inhibitors -##athi -spectroscopy -##hab -##kong -karim -panda -##oia -##nail -##vc -conqueror -kgb -leukemia -##dity -arrivals -cheered -pisa -phosphorus -shielded -##riated -mammal -unitarian -urgently -chopin -sanitary -##mission -spicy -drugged -hinges -##tort -tipping -trier -impoverished -westchester -##caster -267 -epoch -nonstop -##gman -##khov -aromatic -centrally -cerro -##tively -##vio -billions -modulation -sedimentary -283 -facilitating -outrageous -goldstein -##eak -##kt -ld -maitland -penultimate -pollard -##dance -fleets -spaceship -vertebrae -##nig -alcoholism -als -recital -##bham -##ference -##omics -m2 -##bm -trois -##tropical -##в -commemorates -##meric -marge -##raction -1643 -670 -cosmetic -ravaged -##ige -catastrophe -eng -##shida -albrecht -arterial -bellamy -decor -harmon -##rde -bulbs -synchronized -vito -easiest -shetland -shielding -wnba -##glers -##ssar -##riam -brianna -cumbria -##aceous -##rard -cores -thayer -##nsk -brood -hilltop -luminous -carts -keynote -larkin -logos -##cta -##ا -##mund -##quay -lilith -tinted -277 -wrestle -mobilization -##uses -sequential -siam -bloomfield -takahashi -274 -##ieving -presenters -ringo -blazed -witty -##oven -##ignant -devastation -haydn -harmed -newt -therese -##peed -gershwin -molina -rabbis -sudanese -001 -innate -restarted -##sack -##fus -slices -wb -##shah -enroll -hypothetical -hysterical -1743 -fabio -indefinite -warped -##hg -exchanging -525 -unsuitable -##sboro -gallo -1603 -bret -cobalt -homemade -##hunter -mx -operatives -##dhar -terraces -durable -latch -pens -whorls -##ctuated -##eaux -billing -ligament -succumbed -##gly -regulators -spawn -##brick -##stead -filmfare -rochelle -##nzo -1725 -circumstance -saber -supplements -##nsky -##tson -crowe -wellesley -carrot -##9th -##movable -primate -drury -sincerely -topical -##mad -##rao -callahan -kyiv -smarter -tits -undo -##yeh -announcements -anthologies -barrio -nebula -##islaus -##shaft -##tyn -bodyguards -2021 -assassinate -barns -emmett -scully -##mah -##yd -##eland -##tino -##itarian -demoted -gorman -lashed -prized -adventist -writ -##gui -alla -invertebrates -##ausen -1641 -amman -1742 -align -healy -redistribution -##gf -##rize -insulation -##drop -adherents -hezbollah -vitro -ferns -yanking -269 -php -registering -uppsala -cheerleading -confines -mischievous -tully -##ross -49th -docked -roam -stipulated -pumpkin -##bry -prompt -##ezer -blindly -shuddering -craftsmen -frail -scented -katharine -scramble -shaggy -sponge -helix -zaragoza -279 -##52 -43rd -backlash -fontaine -seizures -posse -cowan -nonfiction -telenovela -wwii -hammered -undone -##gpur -encircled -irs -##ivation -artefacts -oneself -searing -smallpox -##belle -##osaurus -shandong -breached -upland -blushing -rankin -infinitely -psyche -tolerated -docking -evicted -##col -unmarked -##lving -gnome -lettering -litres -musique -##oint -benevolent -##jal -blackened -##anna -mccall -racers -tingle -##ocene -##orestation -introductions -radically -292 -##hiff -##باد -1610 -1739 -munchen -plead -##nka -condo -scissors -##sight -##tens -apprehension -##cey -##yin -hallmark -watering -formulas -sequels -##llas -aggravated -bae -commencing -##building -enfield -prohibits -marne -vedic -civilized -euclidean -jagger -beforehand -blasts -dumont -##arney -##nem -740 -conversions -hierarchical -rios -simulator -##dya -##lellan -hedges -oleg -thrusts -shadowed -darby -maximize -1744 -gregorian -##nded -##routed -sham -unspecified -##hog -emory -factual -##smo -##tp -fooled -##rger -ortega -wellness -marlon -##oton -##urance -casket -keating -ley -enclave -##ayan -char -influencing -jia -##chenko -412 -ammonia -erebidae -incompatible -violins -cornered -##arat -grooves -astronauts -columbian -rampant -fabrication -kyushu -mahmud -vanish -##dern -mesopotamia -##lete -ict -##rgen -caspian -kenji -pitted -##vered -999 -grimace -roanoke -tchaikovsky -twinned -##analysis -##awan -xinjiang -arias -clemson -kazakh -sizable -1662 -##khand -##vard -plunge -tatum -vittorio -##nden -cholera -##dana -##oper -bracing -indifference -projectile -superliga -##chee -realises -upgrading -299 -porte -retribution -##vies -nk -stil -##resses -ama -bureaucracy -blackberry -bosch -testosterone -collapses -greer -##pathic -ioc -fifties -malls -##erved -bao -baskets -adolescents -siegfried -##osity -##tosis -mantra -detecting -existent -fledgling -##cchi -dissatisfied -gan -telecommunication -mingled -sobbed -6000 -controversies -outdated -taxis -##raus -fright -slams -##lham -##fect -##tten -detectors -fetal -tanned -##uw -fray -goth -olympian -skipping -mandates -scratches -sheng -unspoken -hyundai -tracey -hotspur -restrictive -##buch -americana -mundo -##bari -burroughs -diva -vulcan -##6th -distinctions -thumping -##ngen -mikey -sheds -fide -rescues -springsteen -vested -valuation -##ece -##ely -pinnacle -rake -sylvie -##edo -almond -quivering -##irus -alteration -faltered -##wad -51st -hydra -ticked -##kato -recommends -##dicated -antigua -arjun -stagecoach -wilfred -trickle -pronouns -##pon -aryan -nighttime -##anian -gall -pea -stitch -##hei -leung -milos -##dini -eritrea -nexus -starved -snowfall -kant -parasitic -cot -discus -hana -strikers -appleton -kitchens -##erina -##partisan -##itha -##vius -disclose -metis -##channel -1701 -tesla -##vera -fitch -1735 -blooded -##tila -decimal -##tang -##bai -cyclones -eun -bottled -peas -pensacola -basha -bolivian -crabs -boil -lanterns -partridge -roofed -1645 -necks -##phila -opined -patting -##kla -##lland -chuckles -volta -whereupon -##nche -devout -euroleague -suicidal -##dee -inherently -involuntary -knitting -nasser -##hide -puppets -colourful -courageous -southend -stills -miraculous -hodgson -richer -rochdale -ethernet -greta -uniting -prism -umm -##haya -##itical -##utation -deterioration -pointe -prowess -##ropriation -lids -scranton -billings -subcontinent -##koff -##scope -brute -kellogg -psalms -degraded -##vez -stanisÅ‚aw -##ructured -ferreira -pun -astonishing -gunnar -##yat -arya -prc -gottfried -##tight -excursion -##ographer -dina -##quil -##nare -huffington -illustrious -wilbur -gundam -verandah -##zard -naacp -##odle -constructive -fjord -kade -##naud -generosity -thrilling -baseline -cayman -frankish -plastics -accommodations -zoological -##fting -cedric -qb -motorized -##dome -##otted -squealed -tackled -canucks -budgets -situ -asthma -dail -gabled -grasslands -whimpered -writhing -judgments -##65 -minnie -pv -##carbon -bananas -grille -domes -monique -odin -maguire -markham -tierney -##estra -##chua -libel -poke -speedy -atrium -laval -notwithstanding -##edly -fai -kala -##sur -robb -##sma -listings -luz -supplementary -tianjin -##acing -enzo -jd -ric -scanner -croats -transcribed -##49 -arden -cv -##hair -##raphy -##lver -##uy -357 -seventies -staggering -alam -horticultural -hs -regression -timbers -blasting -##ounded -montagu -manipulating -##cit -catalytic -1550 -troopers -##meo -condemnation -fitzpatrick -##oire -##roved -inexperienced -1670 -castes -##lative -outing -314 -dubois -flicking -quarrel -ste -learners -1625 -iq -whistled -##class -282 -classify -tariffs -temperament -355 -folly -liszt -##yles -immersed -jordanian -ceasefire -apparel -extras -maru -fished -##bio -harta -stockport -assortment -craftsman -paralysis -transmitters -##cola -blindness -##wk -fatally -proficiency -solemnly -##orno -repairing -amore -groceries -ultraviolet -##chase -schoolhouse -##tua -resurgence -nailed -##otype -##× -ruse -saliva -diagrams -##tructing -albans -rann -thirties -1b -antennas -hilarious -cougars -paddington -stats -##eger -breakaway -ipod -reza -authorship -prohibiting -scoffed -##etz -##ttle -conscription -defected -trondheim -##fires -ivanov -keenan -##adan -##ciful -##fb -##slow -locating -##ials -##tford -cadiz -basalt -blankly -interned -rags -rattling -##tick -carpathian -reassured -sync -bum -guildford -iss -staunch -##onga -astronomers -sera -sofie -emergencies -susquehanna -##heard -duc -mastery -vh1 -williamsburg -bayer -buckled -craving -##khan -##rdes -bloomington -##write -alton -barbecue -##bians -justine -##hri -##ndt -delightful -smartphone -newtown -photon -retrieval -peugeot -hissing -##monium -##orough -flavors -lighted -relaunched -tainted -##games -##lysis -anarchy -microscopic -hopping -adept -evade -evie -##beau -inhibit -sinn -adjustable -hurst -intuition -wilton -cisco -44th -lawful -lowlands -stockings -thierry -##dalen -##hila -##nai -fates -prank -tb -maison -lobbied -provocative -1724 -4a -utopia -##qual -carbonate -gujarati -purcell -##rford -curtiss -##mei -overgrown -arenas -mediation -swallows -##rnik -respectful -turnbull -##hedron -##hope -alyssa -ozone -##Ê»i -ami -gestapo -johansson -snooker -canteen -cuff -declines -empathy -stigma -##ags -##iner -##raine -taxpayers -gui -volga -##wright -##copic -lifespan -overcame -tattooed -enactment -giggles -##ador -##camp -barrington -bribe -obligatory -orbiting -peng -##enas -elusive -sucker -##vating -cong -hardship -empowered -anticipating -estrada -cryptic -greasy -detainees -planck -sudbury -plaid -dod -marriott -kayla -##ears -##vb -##zd -mortally -##hein -cognition -radha -319 -liechtenstein -meade -richly -argyle -harpsichord -liberalism -trumpets -lauded -tyrant -salsa -tiled -lear -promoters -reused -slicing -trident -##chuk -##gami -##lka -cantor -checkpoint -##points -gaul -leger -mammalian -##tov -##aar -##schaft -doha -frenchman -nirvana -##vino -delgado -headlining -##eron -##iography -jug -tko -1649 -naga -intersections -##jia -benfica -nawab -##suka -ashford -gulp -##deck -##vill -##rug -brentford -frazier -pleasures -dunne -potsdam -shenzhen -dentistry -##tec -flanagan -##dorff -##hear -chorale -dinah -prem -quezon -##rogated -relinquished -sutra -terri -##pani -flaps -##rissa -poly -##rnet -homme -aback -##eki -linger -womb -##kson -##lewood -doorstep -orthodoxy -threaded -westfield -##rval -dioceses -fridays -subsided -##gata -loyalists -##biotic -##ettes -letterman -lunatic -prelate -tenderly -invariably -souza -thug -winslow -##otide -furlongs -gogh -jeopardy -##runa -pegasus -##umble -humiliated -standalone -tagged -##roller -freshmen -klan -##bright -attaining -initiating -transatlantic -logged -viz -##uance -1723 -combatants -intervening -stephane -chieftain -despised -grazed -317 -cdc -galveston -godzilla -macro -simulate -##planes -parades -##esses -960 -##ductive -##unes -equator -overdose -##cans -##hosh -##lifting -joshi -epstein -sonora -treacherous -aquatics -manchu -responsive -##sation -supervisory -##christ -##llins -##ibar -##balance -##uso -kimball -karlsruhe -mab -##emy -ignores -phonetic -reuters -spaghetti -820 -almighty -danzig -rumbling -tombstone -designations -lured -outset -##felt -supermarkets -##wt -grupo -kei -kraft -susanna -##blood -comprehension -genealogy -##aghan -##verted -redding -##ythe -1722 -bowing -##pore -##roi -lest -sharpened -fulbright -valkyrie -sikhs -##unds -swans -bouquet -merritt -##tage -##venting -commuted -redhead -clerks -leasing -cesare -dea -hazy -##vances -fledged -greenfield -servicemen -##gical -armando -blackout -dt -sagged -downloadable -intra -potion -pods -##4th -##mism -xp -attendants -gambia -stale -##ntine -plump -asteroids -rediscovered -buds -flea -hive -##neas -1737 -classifications -debuts -##eles -olympus -scala -##eurs -##gno -##mute -hummed -sigismund -visuals -wiggled -await -pilasters -clench -sulfate -##ances -bellevue -enigma -trainee -snort -##sw -clouded -denim -##rank -##rder -churning -hartman -lodges -riches -sima -##missible -accountable -socrates -regulates -mueller -##cr -1702 -avoids -solids -himalayas -nutrient -pup -##jevic -squat -fades -nec -##lates -##pina -##rona -##ου -privateer -tequila -##gative -##mpton -apt -hornet -immortals -##dou -asturias -cleansing -dario -##rries -##anta -etymology -servicing -zhejiang -##venor -##nx -horned -erasmus -rayon -relocating -£10 -##bags -escalated -promenade -stubble -2010s -artisans -axial -liquids -mora -sho -yoo -##tsky -bundles -oldies -##nally -notification -bastion -##ths -sparkle -##lved -1728 -leash -pathogen -highs -##hmi -immature -880 -gonzaga -ignatius -mansions -monterrey -sweets -bryson -##loe -polled -regatta -brightest -pei -rosy -squid -hatfield -payroll -addict -meath -cornerback -heaviest -lodging -##mage -capcom -rippled -##sily -barnet -mayhem -ymca -snuggled -rousseau -##cute -blanchard -284 -fragmented -leighton -chromosomes -risking -##md -##strel -##utter -corinne -coyotes -cynical -hiroshi -yeomanry -##ractive -ebook -grading -mandela -plume -agustin -magdalene -##rkin -bea -femme -trafford -##coll -##lun -##tance -52nd -fourier -upton -##mental -camilla -gust -iihf -islamabad -longevity -##kala -feldman -netting -##rization -endeavour -foraging -mfa -orr -##open -greyish -contradiction -graz -##ruff -handicapped -marlene -tweed -oaxaca -spp -campos -miocene -pri -configured -cooks -pluto -cozy -pornographic -##entes -70th -fairness -glided -jonny -lynne -rounding -sired -##emon -##nist -remade -uncover -##mack -complied -lei -newsweek -##jured -##parts -##enting -##pg -293 -finer -guerrillas -athenian -deng -disused -stepmother -accuse -gingerly -seduction -521 -confronting -##walker -##going -gora -nostalgia -sabres -virginity -wrenched -##minated -syndication -wielding -eyre -##56 -##gnon -##igny -behaved -taxpayer -sweeps -##growth -childless -gallant -##ywood -amplified -geraldine -scrape -##ffi -babylonian -fresco -##rdan -##kney -##position -1718 -restricting -tack -fukuoka -osborn -selector -partnering -##dlow -318 -gnu -kia -tak -whitley -gables -##54 -##mania -mri -softness -immersion -##bots -##evsky -1713 -chilling -insignificant -pcs -##uis -elites -lina -purported -supplemental -teaming -##americana -##dding -##inton -proficient -rouen -##nage -##rret -niccolo -selects -##bread -fluffy -1621 -gruff -knotted -mukherjee -polgara -thrash -nicholls -secluded -smoothing -thru -corsica -loaf -whitaker -inquiries -##rrier -##kam -indochina -289 -marlins -myles -peking -##tea -extracts -pastry -superhuman -connacht -vogel -##ditional -##het -##udged -##lash -gloss -quarries -refit -teaser -##alic -##gaon -20s -materialized -sling -camped -pickering -tung -tracker -pursuant -##cide -cranes -soc -##cini -##typical -##viere -anhalt -overboard -workout -chores -fares -orphaned -stains -##logie -fenton -surpassing -joyah -triggers -##itte -grandmaster -##lass -##lists -clapping -fraudulent -ledger -nagasaki -##cor -##nosis -##tsa -eucalyptus -tun -##icio -##rney -##tara -dax -heroism -ina -wrexham -onboard -unsigned -##dates -moshe -galley -winnie -droplets -exiles -praises -watered -noodles -##aia -fein -adi -leland -multicultural -stink -bingo -comets -erskine -modernized -canned -constraint -domestically -chemotherapy -featherweight -stifled -##mum -darkly -irresistible -refreshing -hasty -isolate -##oys -kitchener -planners -##wehr -cages -yarn -implant -toulon -elects -childbirth -yue -##lind -##lone -cn -rightful -sportsman -junctions -remodeled -specifies -##rgh -291 -##oons -complimented -##urgent -lister -ot -##logic -bequeathed -cheekbones -fontana -gabby -##dial -amadeus -corrugated -maverick -resented -triangles -##hered -##usly -nazareth -tyrol -1675 -assent -poorer -sectional -aegean -##cous -296 -nylon -ghanaian -##egorical -##weig -cushions -forbid -fusiliers -obstruction -somerville -##scia -dime -earrings -elliptical -leyte -oder -polymers -timmy -atm -midtown -piloted -settles -continual -externally -mayfield -##uh -enrichment -henson -keane -persians -1733 -benji -braden -pep -324 -##efe -contenders -pepsi -valet -##isches -298 -##asse -##earing -goofy -stroll -##amen -authoritarian -occurrences -adversary -ahmedabad -tangent -toppled -dorchester -1672 -modernism -marxism -islamist -charlemagne -exponential -racks -unicode -brunette -mbc -pic -skirmish -##bund -##lad -##powered -##yst -hoisted -messina -shatter -##ctum -jedi -vantage -##music -##neil -clemens -mahmoud -corrupted -authentication -lowry -nils -##washed -omnibus -wounding -jillian -##itors -##opped -serialized -narcotics -handheld -##arm -##plicity -intersecting -stimulating -##onis -crate -fellowships -hemingway -casinos -climatic -fordham -copeland -drip -beatty -leaflets -robber -brothel -madeira -##hedral -sphinx -ultrasound -##vana -valor -forbade -leonid -villas -##aldo -duane -marquez -##cytes -disadvantaged -forearms -kawasaki -reacts -consular -lax -uncles -uphold -##hopper -concepcion -dorsey -lass -##izan -arching -passageway -1708 -researches -tia -internationals -##graphs -##opers -distinguishes -javanese -divert -##uven -plotted -##listic -##rwin -##erik -##tify -affirmative -signifies -validation -##bson -kari -felicity -georgina -zulu -##eros -##rained -##rath -overcoming -##dot -argyll -##rbin -1734 -chiba -ratification -windy -earls -parapet -##marks -hunan -pristine -astrid -punta -##gart -brodie -##kota -##oder -malaga -minerva -rouse -##phonic -bellowed -pagoda -portals -reclamation -##gur -##odies -##â„â‚„ -parentheses -quoting -allergic -palette -showcases -benefactor -heartland -nonlinear -##tness -bladed -cheerfully -scans -##ety -##hone -1666 -girlfriends -pedersen -hiram -sous -##liche -##nator -1683 -##nery -##orio -##umen -bobo -primaries -smiley -##cb -unearthed -uniformly -fis -metadata -1635 -ind -##oted -recoil -##titles -##tura -##ια -406 -hilbert -jamestown -mcmillan -tulane -seychelles -##frid -antics -coli -fated -stucco -##grants -1654 -bulky -accolades -arrays -caledonian -carnage -optimism -puebla -##tative -##cave -enforcing -rotherham -seo -dunlop -aeronautics -chimed -incline -zoning -archduke -hellenistic -##oses -##sions -candi -thong -##ople -magnate -rustic -##rsk -projective -slant -##offs -danes -hollis -vocalists -##ammed -congenital -contend -gesellschaft -##ocating -##pressive -douglass -quieter -##cm -##kshi -howled -salim -spontaneously -townsville -buena -southport -##bold -kato -1638 -faerie -stiffly -##vus -##rled -297 -flawless -realising -taboo -##7th -bytes -straightening -356 -jena -##hid -##rmin -cartwright -berber -bertram -soloists -411 -noses -417 -coping -fission -hardin -inca -##cen -1717 -mobilized -vhf -##raf -biscuits -curate -##85 -##anial -331 -gaunt -neighbourhoods -1540 -##abas -blanca -bypassed -sockets -behold -coincidentally -##bane -nara -shave -splinter -terrific -##arion -##erian -commonplace -juris -redwood -waistband -boxed -caitlin -fingerprints -jennie -naturalized -##ired -balfour -craters -jody -bungalow -hugely -quilt -glitter -pigeons -undertaker -bulging -constrained -goo -##sil -##akh -assimilation -reworked -##person -persuasion -##pants -felicia -##cliff -##ulent -1732 -explodes -##dun -##inium -##zic -lyman -vulture -hog -overlook -begs -northwards -ow -spoil -##urer -fatima -favorably -accumulate -sargent -sorority -corresponded -dispersal -kochi -toned -##imi -##lita -internacional -newfound -##agger -##lynn -##rigue -booths -peanuts -##eborg -medicare -muriel -nur -##uram -crates -millennia -pajamas -worsened -##breakers -jimi -vanuatu -yawned -##udeau -carousel -##hony -hurdle -##ccus -##mounted -##pod -rv -##eche -airship -ambiguity -compulsion -recapture -##claiming -arthritis -##osomal -1667 -asserting -ngc -sniffing -dade -discontent -glendale -ported -##amina -defamation -rammed -##scent -fling -livingstone -##fleet -875 -##ppy -apocalyptic -comrade -lcd -##lowe -cessna -eine -persecuted -subsistence -demi -hoop -reliefs -710 -coptic -progressing -stemmed -perpetrators -1665 -priestess -##nio -dobson -ebony -rooster -itf -tortricidae -##bbon -##jian -cleanup -##jean -##øy -1721 -eighties -taxonomic -holiness -##hearted -##spar -antilles -showcasing -stabilized -##nb -gia -mascara -michelangelo -dawned -##uria -##vinsky -extinguished -fitz -grotesque -£100 -##fera -##loid -##mous -barges -neue -throbbed -cipher -johnnie -##a1 -##mpt -outburst -##swick -spearheaded -administrations -c1 -heartbreak -pixels -pleasantly -##enay -lombardy -plush -##nsed -bobbie -##hly -reapers -tremor -xiang -minogue -substantive -hitch -barak -##wyl -kwan -##encia -910 -obscene -elegance -indus -surfer -bribery -conserve -##hyllum -##masters -horatio -##fat -apes -rebound -psychotic -##pour -iteration -##mium -##vani -botanic -horribly -antiques -dispose -paxton -##hli -##wg -timeless -1704 -disregard -engraver -hounds -##bau -##version -looted -uno -facilitates -groans -masjid -rutland -antibody -disqualification -decatur -footballers -quake -slacks -48th -rein -scribe -stabilize -commits -exemplary -tho -##hort -##chison -pantry -traversed -##hiti -disrepair -identifiable -vibrated -baccalaureate -##nnis -csa -interviewing -##iensis -##raße -greaves -wealthiest -343 -classed -jogged -£5 -##58 -##atal -illuminating -knicks -respecting -##uno -scrubbed -##iji -##dles -kruger -moods -growls -raider -silvia -chefs -kam -vr -cree -percival -##terol -gunter -counterattack -defiant -henan -ze -##rasia -##riety -equivalence -submissions -##fra -##thor -bautista -mechanically -##heater -cornice -herbal -templar -##mering -outputs -ruining -ligand -renumbered -extravagant -mika -blockbuster -eta -insurrection -##ilia -darkening -ferocious -pianos -strife -kinship -##aer -melee -##anor -##iste -##may -##oue -decidedly -weep -##jad -##missive -##ppel -354 -puget -unease -##gnant -1629 -hammering -kassel -ob -wessex -##lga -bromwich -egan -paranoia -utilization -##atable -##idad -contradictory -provoke -##ols -##ouring -##tangled -knesset -##very -##lette -plumbing -##sden -##¹ -greensboro -occult -sniff -338 -zev -beaming -gamer -haggard -mahal -##olt -##pins -mendes -utmost -briefing -gunnery -##gut -##pher -##zh -##rok -1679 -khalifa -sonya -##boot -principals -urbana -wiring -##liffe -##minating -##rrado -dahl -nyu -skepticism -np -townspeople -ithaca -lobster -somethin -##fur -##arina -##−1 -freighter -zimmerman -biceps -contractual -##herton -amend -hurrying -subconscious -##anal -336 -meng -clermont -spawning -##eia -##lub -dignitaries -impetus -snacks -spotting -twigs -##bilis -##cz -##ouk -libertadores -nic -skylar -##aina -##firm -gustave -asean -##anum -dieter -legislatures -flirt -bromley -trolls -umar -##bbies -##tyle -blah -parc -bridgeport -crank -negligence -##nction -46th -constantin -molded -bandages -seriousness -00pm -siegel -carpets -compartments -upbeat -statehood -##dner -##edging -marko -730 -platt -##hane -paving -##iy -1738 -abbess -impatience -limousine -nbl -##talk -441 -lucille -mojo -nightfall -robbers -##nais -karel -brisk -calves -replicate -ascribed -telescopes -##olf -intimidated -##reen -ballast -specialization -##sit -aerodynamic -caliphate -rainer -visionary -##arded -epsilon -##aday -##onte -aggregation -auditory -boosted -reunification -kathmandu -loco -robyn -402 -acknowledges -appointing -humanoid -newell -redeveloped -restraints -##tained -barbarians -chopper -1609 -italiana -##lez -##lho -investigates -wrestlemania -##anies -##bib -690 -##falls -creaked -dragoons -gravely -minions -stupidity -volley -##harat -##week -musik -##eries -##uously -fungal -massimo -semantics -malvern -##ahl -##pee -discourage -embryo -imperialism -1910s -profoundly -##ddled -jiangsu -sparkled -stat -##holz -sweatshirt -tobin -##iction -sneered -##cheon -##oit -brit -causal -smyth -##neuve -diffuse -perrin -silvio -##ipes -##recht -detonated -iqbal -selma -##nism -##zumi -roasted -##riders -tay -##ados -##mament -##mut -##rud -840 -completes -nipples -cfa -flavour -hirsch -##laus -calderon -sneakers -moravian -##ksha -1622 -rq -294 -##imeters -bodo -##isance -##pre -##ronia -anatomical -excerpt -##lke -dh -kunst -##tablished -##scoe -biomass -panted -unharmed -gael -housemates -montpellier -##59 -coa -rodents -tonic -hickory -singleton -##taro -451 -1719 -aldo -breaststroke -dempsey -och -rocco -##cuit -merton -dissemination -midsummer -serials -##idi -haji -polynomials -##rdon -gs -enoch -prematurely -shutter -taunton -£3 -##grating -##inates -archangel -harassed -##asco -326 -archway -dazzling -##ecin -1736 -sumo -wat -##kovich -1086 -honneur -##ently -##nostic -##ttal -##idon -1605 -403 -1716 -blogger -rents -##gnan -hires -##ikh -##dant -howie -##rons -handler -retracted -shocks -1632 -arun -duluth -kepler -trumpeter -##lary -peeking -seasoned -trooper -##mara -laszlo -##iciencies -##rti -heterosexual -##inatory -##ssion -indira -jogging -##inga -##lism -beit -dissatisfaction -malice -##ately -nedra -peeling -##rgeon -47th -stadiums -475 -vertigo -##ains -iced -restroom -##plify -##tub -illustrating -pear -##chner -##sibility -inorganic -rappers -receipts -watery -##kura -lucinda -##oulos -reintroduced -##8th -##tched -gracefully -saxons -nutritional -wastewater -rained -favourites -bedrock -fisted -hallways -likeness -upscale -##lateral -1580 -blinds -prequel -##pps -##tama -deter -humiliating -restraining -tn -vents -1659 -laundering -recess -rosary -tractors -coulter -federer -##ifiers -##plin -persistence -##quitable -geschichte -pendulum -quakers -##beam -bassett -pictorial -buffet -koln -##sitor -drills -reciprocal -shooters -##57 -##cton -##tees -converge -pip -dmitri -donnelly -yamamoto -aqua -azores -demographics -hypnotic -spitfire -suspend -wryly -roderick -##rran -sebastien -##asurable -mavericks -##fles -##200 -himalayan -prodigy -##iance -transvaal -demonstrators -handcuffs -dodged -mcnamara -sublime -1726 -crazed -##efined -##till -ivo -pondered -reconciled -shrill -sava -##duk -bal -cad -heresy -jaipur -goran -##nished -341 -lux -shelly -whitehall -##hre -israelis -peacekeeping -##wled -1703 -demetrius -ousted -##arians -##zos -beale -anwar -backstroke -raged -shrinking -cremated -##yck -benign -towing -wadi -darmstadt -landfill -parana -soothe -colleen -sidewalks -mayfair -tumble -hepatitis -ferrer -superstructure -##gingly -##urse -##wee -anthropological -translators -##mies -closeness -hooves -##pw -mondays -##roll -##vita -landscaping -##urized -purification -sock -thorns -thwarted -jalan -tiberius -##taka -saline -##rito -confidently -khyber -sculptors -##ij -brahms -hammersmith -inspectors -battista -fivb -fragmentation -hackney -##uls -arresting -exercising -antoinette -bedfordshire -##zily -dyed -##hema -1656 -racetrack -variability -##tique -1655 -austrians -deteriorating -madman -theorists -aix -lehman -weathered -1731 -decreed -eruptions -1729 -flaw -quinlan -sorbonne -flutes -nunez -1711 -adored -downwards -fable -rasped -1712 -moritz -mouthful -renegade -shivers -stunts -dysfunction -restrain -translit -327 -pancakes -##avio -##cision -##tray -351 -vial -##lden -bain -##maid -##oxide -chihuahua -malacca -vimes -##rba -##rnier -1664 -donnie -plaques -##ually -337 -bangs -floppy -huntsville -loretta -nikolay -##otte -eater -handgun -ubiquitous -##hett -eras -zodiac -1634 -##omorphic -1820s -##zog -cochran -##bula -##lithic -warring -##rada -dalai -excused -blazers -mcconnell -reeling -bot -este -##abi -geese -hoax -taxon -##bla -guitarists -##icon -condemning -hunts -inversion -moffat -taekwondo -##lvis -1624 -stammered -##rest -##rzy -sousa -fundraiser -marylebone -navigable -uptown -cabbage -daniela -salman -shitty -whimper -##kian -##utive -programmers -protections -rm -##rmi -##rued -forceful -##enes -fuss -##tao -##wash -brat -oppressive -reykjavik -spartak -ticking -##inkles -##kiewicz -adolph -horst -maui -protege -straighten -cpc -landau -concourse -clements -resultant -##ando -imaginative -joo -reactivated -##rem -##ffled -##uising -consultative -##guide -flop -kaitlyn -mergers -parenting -somber -##vron -supervise -vidhan -##imum -courtship -exemplified -harmonies -medallist -refining -##rrow -##ка -amara -##hum -780 -goalscorer -sited -overshadowed -rohan -displeasure -secretive -multiplied -osman -##orth -engravings -padre -##kali -##veda -miniatures -mis -##yala -clap -pali -rook -##cana -1692 -57th -antennae -astro -oskar -1628 -bulldog -crotch -hackett -yucatan -##sure -amplifiers -brno -ferrara -migrating -##gree -thanking -turing -##eza -mccann -ting -andersson -onslaught -gaines -ganga -incense -standardization -##mation -sentai -scuba -stuffing -turquoise -waivers -alloys -##vitt -regaining -vaults -##clops -##gizing -digger -furry -memorabilia -probing -##iad -payton -rec -deutschland -filippo -opaque -seamen -zenith -afrikaans -##filtration -disciplined -inspirational -##merie -banco -confuse -grafton -tod -##dgets -championed -simi -anomaly -biplane -##ceptive -electrode -##para -1697 -cleavage -crossbow -swirl -informant -##lars -##osta -afi -bonfire -spec -##oux -lakeside -slump -##culus -##lais -##qvist -##rrigan -1016 -facades -borg -inwardly -cervical -xl -pointedly -050 -stabilization -##odon -chests -1699 -hacked -ctv -orthogonal -suzy -##lastic -gaulle -jacobite -rearview -##cam -##erted -ashby -##drik -##igate -##mise -##zbek -affectionately -canine -disperse -latham -##istles -##ivar -spielberg -##orin -##idium -ezekiel -cid -##sg -durga -middletown -##cina -customized -frontiers -harden -##etano -##zzy -1604 -bolsheviks -##66 -coloration -yoko -##bedo -briefs -slabs -debra -liquidation -plumage -##oin -blossoms -dementia -subsidy -1611 -proctor -relational -jerseys -parochial -ter -##ici -esa -peshawar -cavalier -loren -cpi -idiots -shamrock -1646 -dutton -malabar -mustache -##endez -##ocytes -referencing -terminates -marche -yarmouth -##sop -acton -mated -seton -subtly -baptised -beige -extremes -jolted -kristina -telecast -##actic -safeguard -waldo -##baldi -##bular -endeavors -sloppy -subterranean -##ensburg -##itung -delicately -pigment -tq -##scu -1626 -##ound -collisions -coveted -herds -##personal -##meister -##nberger -chopra -##ricting -abnormalities -defective -galician -lucie -##dilly -alligator -likened -##genase -burundi -clears -complexion -derelict -deafening -diablo -fingered -champaign -dogg -enlist -isotope -labeling -mrna -##erre -brilliance -marvelous -##ayo -1652 -crawley -ether -footed -dwellers -deserts -hamish -rubs -warlock -skimmed -##lizer -870 -buick -embark -heraldic -irregularities -##ajan -kiara -##kulam -##ieg -antigen -kowalski -##lge -oakley -visitation -##mbit -vt -##suit -1570 -murderers -##miento -##rites -chimneys -##sling -condemn -custer -exchequer -havre -##ghi -fluctuations -##rations -dfb -hendricks -vaccines -##tarian -nietzsche -biking -juicy -##duced -brooding -scrolling -selangor -##ragan -352 -annum -boomed -seminole -sugarcane -##dna -departmental -dismissing -innsbruck -arteries -ashok -batavia -daze -kun -overtook -##rga -##tlan -beheaded -gaddafi -holm -electronically -faulty -galilee -fractures -kobayashi -##lized -gunmen -magma -aramaic -mala -eastenders -inference -messengers -bf -##qu -407 -bathrooms -##vere -1658 -flashbacks -ideally -misunderstood -##jali -##weather -mendez -##grounds -505 -uncanny -##iii -1709 -friendships -##nbc -sacrament -accommodated -reiterated -logistical -pebbles -thumped -##escence -administering -decrees -drafts -##flight -##cased -##tula -futuristic -picket -intimidation -winthrop -##fahan -interfered -339 -afar -francoise -morally -uta -cochin -croft -dwarfs -##bruck -##dents -##nami -biker -##hner -##meral -nano -##isen -##ometric -##pres -##ан -brightened -meek -parcels -securely -gunners -##jhl -##zko -agile -hysteria -##lten -##rcus -bukit -champs -chevy -cuckoo -leith -sadler -theologians -welded -##section -1663 -jj -plurality -xander -##rooms -##formed -shredded -temps -intimately -pau -tormented -##lok -##stellar -1618 -charred -ems -essen -##mmel -alarms -spraying -ascot -blooms -twinkle -##abia -##apes -internment -obsidian -##chaft -snoop -##dav -##ooping -malibu -##tension -quiver -##itia -hays -mcintosh -travers -walsall -##ffie -1623 -beverley -schwarz -plunging -structurally -m3 -rosenthal -vikram -##tsk -770 -ghz -##onda -##tiv -chalmers -groningen -pew -reckon -unicef -##rvis -55th -##gni -1651 -sulawesi -avila -cai -metaphysical -screwing -turbulence -##mberg -augusto -samba -56th -baffled -momentary -toxin -##urian -##wani -aachen -condoms -dali -steppe -##3d -##app -##oed -##year -adolescence -dauphin -electrically -inaccessible -microscopy -nikita -##ega -atv -##cel -##enter -##oles -##oteric -##Ñ‹ -accountants -punishments -wrongly -bribes -adventurous -clinch -flinders -southland -##hem -##kata -gough -##ciency -lads -soared -##×” -undergoes -deformation -outlawed -rubbish -##arus -##mussen -##nidae -##rzburg -arcs -##ingdon -##tituted -1695 -wheelbase -wheeling -bombardier -campground -zebra -##lices -##oj -##bain -lullaby -##ecure -donetsk -wylie -grenada -##arding -##ης -squinting -eireann -opposes -##andra -maximal -runes -##broken -##cuting -##iface -##ror -##rosis -additive -britney -adultery -triggering -##drome -detrimental -aarhus -containment -jc -swapped -vichy -##ioms -madly -##oric -##rag -brant -##ckey -##trix -1560 -1612 -broughton -rustling -##stems -##uder -asbestos -mentoring -##nivorous -finley -leaps -##isan -apical -pry -slits -substitutes -##dict -intuitive -fantasia -insistent -unreasonable -##igen -##vna -domed -hannover -margot -ponder -##zziness -impromptu -jian -lc -rampage -stemming -##eft -andrey -gerais -whichever -amnesia -appropriated -anzac -clicks -modifying -ultimatum -cambrian -maids -verve -yellowstone -##mbs -conservatoire -##scribe -adherence -dinners -spectra -imperfect -mysteriously -sidekick -tatar -tuba -##aks -##ifolia -distrust -##athan -##zle -c2 -ronin -zac -##pse -celaena -instrumentalist -scents -skopje -##mbling -comical -compensated -vidal -condor -intersect -jingle -wavelengths -##urrent -mcqueen -##izzly -carp -weasel -422 -kanye -militias -postdoctoral -eugen -gunslinger -##É› -faux -hospice -##for -appalled -derivation -dwarves -##elis -dilapidated -##folk -astoria -philology -##lwyn -##otho -##saka -inducing -philanthropy -##bf -##itative -geek -markedly -sql -##yce -bessie -indices -rn -##flict -495 -frowns -resolving -weightlifting -tugs -cleric -contentious -1653 -mania -rms -##miya -##reate -##ruck -##tucket -bien -eels -marek -##ayton -##cence -discreet -unofficially -##ife -leaks -##bber -1705 -332 -dung -compressor -hillsborough -pandit -shillings -distal -##skin -381 -##tat -##you -nosed -##nir -mangrove -undeveloped -##idia -textures -##inho -##500 -##rise -ae -irritating -nay -amazingly -bancroft -apologetic -compassionate -kata -symphonies -##lovic -airspace -##lch -930 -gifford -precautions -fulfillment -sevilla -vulgar -martinique -##urities -looting -piccolo -tidy -##dermott -quadrant -armchair -incomes -mathematicians -stampede -nilsson -##inking -##scan -foo -quarterfinal -##ostal -shang -shouldered -squirrels -##owe -344 -vinegar -##bner -##rchy -##systems -delaying -##trics -ars -dwyer -rhapsody -sponsoring -##gration -bipolar -cinder -starters -##olio -##urst -421 -signage -##nty -aground -figurative -mons -acquaintances -duets -erroneously -soyuz -elliptic -recreated -##cultural -##quette -##ssed -##tma -##zcz -moderator -scares -##itaire -##stones -##udence -juniper -sighting -##just -##nsen -britten -calabria -ry -bop -cramer -forsyth -stillness -##л -airmen -gathers -unfit -##umber -##upt -taunting -##rip -seeker -streamlined -##bution -holster -schumann -tread -vox -##gano -##onzo -strive -dil -reforming -covent -newbury -predicting -##orro -decorate -tre -##puted -andover -ie -asahi -dept -dunkirk -gills -##tori -buren -huskies -##stis -##stov -abstracts -bets -loosen -##opa -1682 -yearning -##glio -##sir -berman -effortlessly -enamel -napoli -persist -##peration -##uez -attache -elisa -b1 -invitations -##kic -accelerating -reindeer -boardwalk -clutches -nelly -polka -starbucks -##kei -adamant -huey -lough -unbroken -adventurer -embroidery -inspecting -stanza -##ducted -naia -taluka -##pone -##roids -chases -deprivation -florian -##jing -##ppet -earthly -##lib -##ssee -colossal -foreigner -vet -freaks -patrice -rosewood -triassic -upstate -##pkins -dominates -ata -chants -ks -vo -##400 -##bley -##raya -##rmed -555 -agra -infiltrate -##ailing -##ilation -##tzer -##uppe -##werk -binoculars -enthusiast -fujian -squeak -##avs -abolitionist -almeida -boredom -hampstead -marsden -rations -##ands -inflated -334 -bonuses -rosalie -patna -##rco -329 -detachments -penitentiary -54th -flourishing -woolf -##dion -##etched -papyrus -##lster -##nsor -##toy -bobbed -dismounted -endelle -inhuman -motorola -tbs -wince -wreath -##ticus -hideout -inspections -sanjay -disgrace -infused -pudding -stalks -##urbed -arsenic -leases -##hyl -##rrard -collarbone -##waite -##wil -dowry -##bant -##edance -genealogical -nitrate -salamanca -scandals -thyroid -necessitated -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##{ -##| -##} -##~ -##¡ -##¢ -##£ -##¤ -##Â¥ -##¦ -##§ -##¨ -##© -##ª -##« -##¬ -##® -##± -##´ -##µ -##¶ -##· -##º -##» -##¼ -##¾ -##¿ -##æ -##ð -##÷ -##þ -##Ä‘ -##ħ -##Å‹ -##Å“ -##Æ’ -##É -##É‘ -##É’ -##É” -##É• -##É™ -##É¡ -##É£ -##ɨ -##ɪ -##É« -##ɬ -##ɯ -##ɲ -##É´ -##ɹ -##ɾ -##Ê€ -##Ê -##Ê‚ -##ʃ -##ʉ -##ÊŠ -##Ê‹ -##ÊŒ -##ÊŽ -##Ê -##Ê‘ -##Ê’ -##Ê” -##ʰ -##ʲ -##ʳ -##Ê· -##ʸ -##Ê» -##ʼ -##ʾ -##Ê¿ -##ˈ -##Ë¡ -##Ë¢ -##Ë£ -##ˤ -##β -##γ -##δ -##ε -##ζ -##θ -##κ -##λ -##μ -##ξ -##ο -##Ï€ -##Ï -##σ -##Ï„ -##Ï… -##φ -##χ -##ψ -##ω -##б -##г -##д -##ж -##з -##м -##п -##Ñ -##у -##Ñ„ -##Ñ… -##ц -##ч -##ш -##щ -##ÑŠ -##Ñ -##ÑŽ -##Ñ’ -##Ñ” -##Ñ– -##ј -##Ñ™ -##Ñš -##Ñ› -##Ó -##Õ¡ -##Õ¢ -##Õ£ -##Õ¤ -##Õ¥ -##Õ© -##Õ« -##Õ¬ -##Õ¯ -##Õ° -##Õ´ -##Õµ -##Õ¶ -##Õ¸ -##Õº -##Õ½ -##Õ¾ -##Õ¿ -##Ö€ -##Ö‚ -##Ö„ -##Ö¾ -##× -##ב -##×’ -##ד -##ו -##×– -##×— -##ט -##×™ -##ך -##×› -##ל -##× -##מ -##ן -##×  -##ס -##×¢ -##×£ -##פ -##×¥ -##צ -##×§ -##ר -##ש -##ת -##ØŒ -##Ø¡ -##ب -##ت -##Ø« -##ج -##Ø­ -##Ø® -##ذ -##ز -##س -##Ø´ -##ص -##ض -##Ø· -##ظ -##ع -##غ -##Ù€ -##Ù -##Ù‚ -##Ùƒ -##Ùˆ -##Ù‰ -##Ù¹ -##Ù¾ -##Ú† -##Ú© -##Ú¯ -##Úº -##Ú¾ -##Û -##Û’ -##अ -##आ -##उ -##ठ-##क -##ख -##ग -##च -##ज -##ट -##ड -##ण -##त -##थ -##द -##ध -##न -##प -##ब -##भ -##म -##य -##र -##ल -##व -##श -##ष -##स -##ह -##ा -##ि -##ी -##ो -##। -##॥ -##ং -##অ -##আ -##ই -##উ -##ঠ-##ও -##ক -##খ -##গ -##চ -##ছ -##জ -##ট -##ড -##ণ -##ত -##থ -##দ -##ধ -##ন -##প -##ব -##ভ -##ম -##য -##র -##ল -##শ -##ষ -##স -##হ -##া -##ি -##à§€ -##ে -##க -##ச -##ட -##த -##ந -##ன -##ப -##à®® -##ய -##à®° -##ல -##ள -##வ -##ா -##ி -##௠-##ே -##ை -##ನ -##ರ -##ಾ -##à¶š -##ය -##à¶» -##à¶½ -##à·€ -##à· -##ภ-##ง -##ต -##ท -##น -##พ -##ม -##ย -##ร -##ล -##ว -##ส -##อ -##า -##เ -##་ -##༠-##ག -##ང -##ད -##ན -##པ -##བ -##མ -##འ -##ར -##ལ -##ས -##မ -##რ-##ბ -##გ -##დ -##ე -##ვ -##თ -##ი -##კ -##ლ -##მ -##ნ -##რ-##რ -##ს -##ტ -##უ -##á„€ -##á„‚ -##ᄃ -##á„… -##ᄆ -##ᄇ -##ᄉ -##ᄊ -##á„‹ -##ᄌ -##ᄎ -##á„ -##á„ -##á„‘ -##á„’ -##á…¡ -##á…¢ -##á…¥ -##á…¦ -##á…§ -##á…© -##á…ª -##á…­ -##á…® -##á…¯ -##á…² -##á…³ -##á…´ -##á…µ -##ᆨ -##ᆫ -##ᆯ -##ᆷ -##ᆸ -##ᆼ -##á´¬ -##á´® -##á´° -##á´µ -##á´º -##áµ€ -##ᵃ -##ᵇ -##ᵈ -##ᵉ -##áµ -##áµ -##áµ -##áµ’ -##áµ– -##áµ— -##ᵘ -##áµ£ -##ᵤ -##áµ¥ -##á¶œ -##á¶  -##†-##‑ -##‒ -##– -##— -##― -##‖ -##‘ -##’ -##‚ -##“ -##†-##„ -##† -##‡ -##• -##… -##‰ -##′ -##″ -##› -##‿ -##â„ -##â° -##â± -##â´ -##âµ -##â¶ -##â· -##⸠-##â¹ -##â» -##â¿ -##â‚… -##₆ -##₇ -##₈ -##₉ -##₊ -##â‚ -##₎ -##â‚ -##â‚‘ -##â‚’ -##â‚“ -##â‚• -##â‚– -##â‚— -##ₘ -##ₚ -##â‚› -##ₜ -##₤ -##â‚© -##€ -##₱ -##₹ -##â„“ -##â„– -##â„ -##â„¢ -##â…“ -##â…” -##↠-##↑ -##→ -##↓ -##↔ -##↦ -##⇄ -##⇌ -##⇒ -##∂ -##∅ -##∆ -##∇ -##∈ -##∗ -##∘ -##√ -##∞ -##∧ -##∨ -##∩ -##∪ -##≈ -##≡ -##≤ -##≥ -##⊂ -##⊆ -##⊕ -##⊗ -##â‹… -##─ -##│ -##â–  -##â–ª -##â— -##★ -##☆ -##☉ -##â™  -##♣ -##♥ -##♦ -##♯ -##⟨ -##⟩ -##â±¼ -##⺩ -##⺼ -##â½¥ -##〠-##。 -##〈 -##〉 -##《 -##》 -##「 -##〠-##『 -##〠-##〜 -##ã‚ -##ã„ -##ㆠ-##㈠-##㊠-##ã‹ -##ã -##ã -##ã‘ -##ã“ -##ã• -##ã— -##ã™ -##ã› -##ã -##㟠-##ã¡ -##㣠-##㤠-##㦠-##㨠-##㪠-##ã« -##㬠-##ã­ -##ã® -##㯠-##ã² -##ãµ -##㸠-##ã» -##ã¾ -##ã¿ -##ã‚€ -##ã‚ -##ã‚‚ -##ã‚„ -##ゆ -##よ -##ら -##り -##ã‚‹ -##れ -##ã‚ -##ã‚’ -##ã‚“ -##ã‚¡ -##ã‚¢ -##ã‚£ -##イ -##ウ -##ã‚§ -##エ -##オ -##ã‚« -##ã‚­ -##ク -##ケ -##コ -##サ -##ã‚· -##ス -##ã‚» -##ã‚¿ -##ム-##ッ -##ツ -##テ -##ト -##ナ -##ニ -##ノ -##ム-##ヒ -##フ -##ヘ -##ホ -##マ -##ミ -##ム -##メ -##モ -##ャ -##ュ -##ョ -##ラ -##リ -##ル -##レ -##ロ -##ワ -##ン -##・ -##ー -##一 -##三 -##上 -##下 -##ä¸ -##世 -##中 -##主 -##ä¹… -##之 -##也 -##事 -##二 -##五 -##井 -##京 -##人 -##亻 -##ä» -##介 -##代 -##ä»® -##伊 -##会 -##ä½ -##ä¾ -##ä¿ -##ä¿¡ -##å¥ -##å…ƒ -##å…‰ -##å…« -##å…¬ -##内 -##出 -##分 -##å‰ -##劉 -##力 -##加 -##å‹ -##北 -##区 -##å -##åƒ -##å— -##åš -##原 -##å£ -##å¤ -##å² -##å¸ -##åˆ -##å‰ -##åŒ -##å -##å’Œ -##å›— -##å›› -##国 -##國 -##土 -##地 -##å‚ -##城 -##å ‚ -##å ´ -##士 -##å¤ -##外 -##大 -##天 -##太 -##夫 -##奈 -##女 -##å­ -##å­¦ -##宀 -##宇 -##安 -##å®— -##定 -##宣 -##å®® -##å®¶ -##宿 -##寺 -##å°‡ -##å° -##å°š -##å±± -##岡 -##å³¶ -##å´Ž -##å· -##å·ž -##å·¿ -##å¸ -##å¹³ -##å¹´ -##幸 -##广 -##弘 -##å¼µ -##å½³ -##後 -##御 -##å¾· -##心 -##å¿„ -##å¿— -##å¿  -##æ„› -##æˆ -##我 -##戦 -##戸 -##手 -##扌 -##政 -##æ–‡ -##æ–° -##æ–¹ -##æ—¥ -##明 -##星 -##春 -##昭 -##智 -##曲 -##書 -##月 -##有 -##æœ -##木 -##本 -##æŽ -##æ‘ -##æ± -##æ¾ -##æž— -##森 -##楊 -##樹 -##æ©‹ -##æ­Œ -##æ­¢ -##æ­£ -##æ­¦ -##比 -##æ° -##æ°‘ -##æ°´ -##æ°µ -##æ°· -##æ°¸ -##江 -##æ²¢ -##æ²³ -##æ²» -##法 -##æµ· -##清 -##æ¼¢ -##瀬 -##ç« -##版 -##犬 -##王 -##生 -##ç”° -##ç”· -##ç–’ -##発 -##白 -##çš„ -##皇 -##ç›® -##相 -##çœ -##真 -##石 -##示 -##社 -##神 -##ç¦ -##禾 -##ç§€ -##ç§‹ -##空 -##ç«‹ -##ç«  -##竹 -##ç³¹ -##美 -##義 -##耳 -##良 -##艹 -##花 -##英 -##è¯ -##葉 -##è—¤ -##行 -##è¡— -##西 -##見 -##è¨ -##語 -##è°· -##è² -##è²´ -##車 -##è» -##è¾¶ -##é“ -##郎 -##郡 -##部 -##都 -##里 -##野 -##金 -##鈴 -##镇 -##é•· -##é–€ -##é–“ -##é˜ -##阿 -##陳 -##陽 -##雄 -##é’ -##é¢ -##風 -##食 -##香 -##馬 -##高 -##é¾ -##龸 -##ï¬ -##fl -##ï¼ -##( -##) -##, -##ï¼ -##. -##ï¼ -##: -##? -##~ diff --git a/samples/contrib/pytorch-samples/bert/bert_datamodule.py b/samples/contrib/pytorch-samples/bert/bert_datamodule.py deleted file mode 100644 index f550e23e976..00000000000 --- a/samples/contrib/pytorch-samples/bert/bert_datamodule.py +++ /dev/null @@ -1,162 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""BERT Data Module Script.""" - -import numpy as np -import pyarrow.parquet as pq -import pytorch_lightning as pl -import torch -from sklearn.model_selection import train_test_split -from torch.utils.data import DataLoader -from transformers import BertTokenizer -from news_dataset import NewsDataset - - -class BertDataModule(pl.LightningDataModule): # pylint: disable=too-many-instance-attributes - """Data Module Class.""" - - def __init__(self, **kwargs): - """Initialization of inherited lightning data module.""" - super(BertDataModule, self).__init__() # pylint: disable=super-with-arguments - self.pre_trained_model_name = "bert-base-uncased" - self.df_train = None - self.df_val = None - self.df_test = None - self.train_data_loader = None - self.val_data_loader = None - self.test_data_loader = None - self.max_length = 100 - self.encoding = None - self.tokenizer = None - self.args = kwargs - - def prepare_data(self): - """Implementation of abstract class.""" - - @staticmethod - def process_label(rating): - """Puts labels to ratings""" - rating = int(rating) - return rating - 1 - - def setup(self, stage=None): - """Downloads the data, parse it and split the data into train, test, - validation data. - - Args: - stage: Stage - training or testing - """ - - num_samples = self.args.get("num_samples", 1000) - - data_path = self.args["train_glob"] - - print("\n\nTRAIN GLOB") - print(data_path) - print("\n\n") - - df_parquet = pq.ParquetDataset(self.args["train_glob"]) - - dataframe = df_parquet.read_pandas().to_pandas() - - dataframe.columns = ["label", "title", "description"] - dataframe.sample(frac=1) - dataframe = dataframe.iloc[:num_samples] - - dataframe["label"] = dataframe.label.apply(self.process_label) - - self.tokenizer = BertTokenizer.from_pretrained( - self.pre_trained_model_name - ) - - random_seed = 42 - np.random.seed(random_seed) - torch.manual_seed(random_seed) - - self.df_train, self.df_test = train_test_split( - dataframe, - test_size=0.2, - random_state=random_seed, - stratify=dataframe["label"], - ) - self.df_val, self.df_test = train_test_split( - self.df_test, - test_size=0.2, - random_state=random_seed, - stratify=self.df_test["label"], - ) - - def create_data_loader(self, dataframe, tokenizer, max_len, batch_size): # pylint: disable=unused-argument - """Generic data loader function. - - Args: - dataframe: Input dataframe - tokenizer: bert tokenizer - max_len: Max length of the news datapoint - batch_size: Batch size for training - - Returns: - Returns the constructed dataloader - """ - dataset = NewsDataset( - reviews=dataframe.description.to_numpy(), - targets=dataframe.label.to_numpy(), - tokenizer=tokenizer, - max_length=max_len, - ) - - return DataLoader( - dataset, - batch_size=self.args.get("batch_size", 4), - num_workers=self.args.get("num_workers", 1), - ) - - def train_dataloader(self): - """Train data loader - Returns: - output - Train data loader for the given input - """ - self.train_data_loader = self.create_data_loader( - self.df_train, - self.tokenizer, - self.max_length, - self.args.get("batch_size", 4), - ) - return self.train_data_loader - - def val_dataloader(self): - """Validation data loader. - Returns: - output - Validation data loader for the given input - """ - self.val_data_loader = self.create_data_loader( - self.df_val, - self.tokenizer, - self.max_length, - self.args.get("batch_size", 4), - ) - return self.val_data_loader - - def test_dataloader(self): - """Test data loader. - Return: - output - Test data loader for the given input - """ - self.test_data_loader = self.create_data_loader( - self.df_test, - self.tokenizer, - self.max_length, - self.args.get("batch_size", 4), - ) - return self.test_data_loader diff --git a/samples/contrib/pytorch-samples/bert/bert_handler.py b/samples/contrib/pytorch-samples/bert/bert_handler.py deleted file mode 100644 index dd172f291bf..00000000000 --- a/samples/contrib/pytorch-samples/bert/bert_handler.py +++ /dev/null @@ -1,207 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=no-self-use,too-many-arguments,unused-argument,not-callable,no-member,attribute-defined-outside-init -""" Bert Custom Handler.""" -from captum.attr import IntegratedGradients -import json -import logging -import os -import numpy as np -import torch -from transformers import BertTokenizer -from ts.torch_handler.base_handler import BaseHandler -from captum.attr import visualization -import torch.nn.functional as F -from bert_train import BertNewsClassifier -from wrapper import AGNewsmodelWrapper - -logger = logging.getLogger(__name__) - - -class NewsClassifierHandler(BaseHandler): - """ - NewsClassifierHandler class. This handler takes a review / sentence - and returns the label as either world / sports / business /sci-tech - """ - - def __init__(self): - self.model = None - self.mapping = None - self.device = None - self.initialized = False - self.class_mapping_file = None - self.VOCAB_FILE = None - - def initialize(self, ctx): - """ - First try to load torchscript else load eager mode state_dict based model - :param ctx: System properties - """ - - properties = ctx.system_properties - self.device = torch.device("cuda") if torch.cuda.is_available() else "cpu" - model_dir = properties.get("model_dir") - - # Read model serialize/pt file - model_pt_path = os.path.join(model_dir, "bert.pth") - # Read model definition file - model_def_path = os.path.join(model_dir, "bert_train.py") - if not os.path.isfile(model_def_path): - raise RuntimeError("Missing the model definition file") - self.VOCAB_FILE = os.path.join(model_dir, "bert-base-uncased-vocab.txt") - if not os.path.isfile(self.VOCAB_FILE): - raise RuntimeError("Missing the vocab file") - - self.class_mapping_file = os.path.join(model_dir, "index_to_name.json") - - state_dict = torch.load(model_pt_path, map_location=self.device) - self.model = BertNewsClassifier() - self.model.load_state_dict(state_dict) - self.model.to(self.device) - self.model.eval() - - logger.debug("Model file %s loaded successfully", model_pt_path) - self.initialized = True - - def preprocess(self, data): - """ - Receives text in form of json and converts it into an encoding for the inference stage - :param data: Input to be passed through the layers for prediction - :return: output - preprocessed encoding - """ - - text = data[0].get("data") - if text is None: - text = data[0].get("body") - - self.text = text - self.tokenizer = BertTokenizer(self.VOCAB_FILE) - self.input_ids = torch.tensor( - [self.tokenizer.encode(self.text, add_special_tokens=True)] - ).to(self.device) - return self.input_ids - - def inference(self, input_ids): - """ - Predict the class for a review / sentence whether - it is belong to world / sports / business /sci-tech - :param encoding: Input encoding to be passed through the layers for prediction - :return: output - predicted output - """ - inputs = self.input_ids.to(self.device) - self.outputs = self.model.forward(inputs) - self.out = np.argmax(self.outputs.cpu().detach()) - return [self.out.item()] - - def postprocess(self, inference_output): - """ - Does postprocess after inference to be returned to user - :param inference_output: Output of inference - :return: output - Output after post processing - """ - if os.path.exists(self.class_mapping_file): - with open(self.class_mapping_file) as json_file: - data = json.load(json_file) - inference_output = json.dumps(data[str(inference_output[0])]) - return [inference_output] - - return inference_output - - def add_attributions_to_visualizer( - self, - attributions, - tokens, - pred_prob, - pred_class, - true_class, - attr_class, - delta, - vis_data_records, - ): - attributions = attributions.sum(dim=2).squeeze(0) - attributions = attributions / torch.norm(attributions) - attributions = attributions.cpu().detach().numpy() - - # storing couple samples in an array for visualization purposes - vis_data_records.append( - visualization.VisualizationDataRecord( - attributions, - pred_prob, - pred_class, - true_class, - attr_class, - attributions.sum(), - tokens, - delta, - ) - ) - - def score_func(self, o): - output = F.softmax(o, dim=1) - pre_pro = np.argmax(output.cpu().detach()) - return pre_pro - - def summarize_attributions(self, attributions): - """Summarises the attribution across multiple runs - Args: - attributions ([list): attributions from the Integrated Gradients - Returns: - list : Returns the attributions after normalizing them. - """ - attributions = attributions.sum(dim=-1).squeeze(0) - attributions = attributions / torch.norm(attributions) - return attributions - - def explain_handle(self, model_wraper, text, target=1): - """Captum explanations handler - Args: - data_preprocess (Torch Tensor): - Preprocessed data to be used for captum - raw_data (list): The unprocessed data to get target from the request - Returns: - dict : A dictionary response with the explanations response. - """ - vis_data_records_base = [] - model_wrapper = AGNewsmodelWrapper(self.model) - tokenizer = BertTokenizer(self.VOCAB_FILE) - model_wrapper.eval() - model_wrapper.zero_grad() - encoding = tokenizer.encode_plus( - self.text, return_attention_mask=True, return_tensors="pt", add_special_tokens=False - ) - input_ids = encoding["input_ids"] - attention_mask = encoding["attention_mask"] - input_ids = input_ids.to(self.device) - attention_mask = attention_mask.to(self.device) - input_embedding_test = model_wrapper.model.bert_model.embeddings(input_ids) - preds = model_wrapper(input_embedding_test, attention_mask) - out = np.argmax(preds.cpu().detach(), axis=1) - out = out.item() - ig_1 = IntegratedGradients(model_wrapper) - attributions, delta = ig_1.attribute( # pylint: disable=no-member - input_embedding_test, - n_steps=500, - return_convergence_delta=True, - target=1, - ) - tokens = tokenizer.convert_ids_to_tokens(input_ids[0].cpu().numpy().tolist()) - feature_imp_dict = {} - feature_imp_dict["words"] = tokens - attributions_sum = self.summarize_attributions(attributions) - feature_imp_dict["importances"] = attributions_sum.tolist() - feature_imp_dict["delta"] = delta[0].tolist() - self.add_attributions_to_visualizer( - attributions, tokens, self.score_func(preds), out, 2, 1, delta, vis_data_records_base - ) - return [feature_imp_dict] diff --git a/samples/contrib/pytorch-samples/bert/bert_pre_process.py b/samples/contrib/pytorch-samples/bert/bert_pre_process.py deleted file mode 100644 index 0cc3084007c..00000000000 --- a/samples/contrib/pytorch-samples/bert/bert_pre_process.py +++ /dev/null @@ -1,106 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Bert Pre preprocess script.""" -import os -import subprocess -from argparse import ArgumentParser -from pathlib import Path - -import pyarrow.csv as pv -import pyarrow.parquet as pq -from torchtext.utils import download_from_url, extract_archive -from pytorch_kfp_components.components.visualization.component import ( - Visualization, -) - -if __name__ == "__main__": - - parser = ArgumentParser() - parser.add_argument( - "--dataset_url", - default= - "https://kubeflow-dataset.s3.us-east-2.amazonaws.com/ag_news_csv.tar.gz", #pylint: disable=line-too-long - type=str, - help="URL to download AG News dataset", - ) - - parser.add_argument( - "--output_path", - default="output/processing", - type=str, - help="Path to write the ag news dataset", - ) - - parser.add_argument( - "--mlpipeline_ui_metadata", - type=str, - help="Path to write mlpipeline-ui-metadata.json", - ) - - args = vars(parser.parse_args()) - - dataset_url = args["dataset_url"] - output_path = args["output_path"] - - Path(output_path).mkdir(parents=True, exist_ok=True) - - dataset_tar = download_from_url(dataset_url, root="./") - extracted_files = extract_archive(dataset_tar) - - ag_news_csv = pv.read_csv("ag_news_csv/train.csv") - - pq.write_table( - ag_news_csv, os.path.join(output_path, "ag_news_data.parquet") - ) - - entry_point = ["ls", "-R", output_path] - run_code = subprocess.run(entry_point, stdout=subprocess.PIPE) #pylint: disable=subprocess-run-check - print(run_code.stdout) - - visualization_arguments = { - "inputs": { - "dataset_url": args["dataset_url"] - }, - "output": { - "mlpipeline_ui_metadata": args["mlpipeline_ui_metadata"], - }, - } - - markdown_dict = {"storage": "inline", "source": visualization_arguments} - - print("Visualization arguments: ", markdown_dict) - - visualization = Visualization( - mlpipeline_ui_metadata=args["mlpipeline_ui_metadata"], - markdown=markdown_dict, - ) - - df = ag_news_csv.to_pandas() - df_counts = df.iloc[:, 0].value_counts() - print(df.iloc[:, 0].value_counts()) - label_names = ["World", "Sports", "Business", "Sci/Tech"] - label_dict = {} - total_count = len(df) - for key, value in df_counts.iteritems(): - label_name = label_names[key - 1] - label_dict[label_name.upper()] = value - - label_dict["TOTAL_COUNT"] = total_count - - markdown_dict = {"storage": "inline", "source": label_dict} - - visualization = Visualization( - mlpipeline_ui_metadata=args["mlpipeline_ui_metadata"], - markdown=markdown_dict, - ) diff --git a/samples/contrib/pytorch-samples/bert/bert_train.py b/samples/contrib/pytorch-samples/bert/bert_train.py deleted file mode 100644 index c7882cec3e9..00000000000 --- a/samples/contrib/pytorch-samples/bert/bert_train.py +++ /dev/null @@ -1,217 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=arguments-differ -# pylint: disable=unused-argument -# pylint: disable=abstract-method -"""Bert Training Script.""" -import pytorch_lightning as pl -import torch -import torch.nn.functional as F -from torchmetrics import Accuracy -from sklearn.metrics import accuracy_score -from torch import nn -from transformers import AdamW, BertModel - - -class BertNewsClassifier(pl.LightningModule): #pylint: disable=too-many-ancestors,too-many-instance-attributes - """Bert Model Class.""" - - def __init__(self, **kwargs): - """Initializes the network, optimizer and scheduler.""" - super(BertNewsClassifier, self).__init__() #pylint: disable=super-with-arguments - self.pre_trained_model_name = "bert-base-uncased" #pylint: disable=invalid-name - self.bert_model = BertModel.from_pretrained(self.pre_trained_model_name) - for param in self.bert_model.parameters(): - param.requires_grad = False - self.drop = nn.Dropout(p=0.2) - # assigning labels - self.class_names = ["World", "Sports", "Business", "Sci/Tech"] - n_classes = len(self.class_names) - - self.fc1 = nn.Linear(self.bert_model.config.hidden_size, 512) - self.out = nn.Linear(512, n_classes) - # self.bert_model.embedding = self.bert_model.embeddings - # self.embedding = self.bert_model.embeddings - - self.scheduler = None - self.optimizer = None - self.args = kwargs - - self.train_acc = Accuracy() - self.val_acc = Accuracy() - self.test_acc = Accuracy() - - self.preds = [] - self.target = [] - - def compute_bert_outputs( #pylint: disable=no-self-use - self, model_bert, embedding_input, attention_mask=None, head_mask=None - ): - """Computes Bert Outputs. - - Args: - model_bert : the bert model - embedding_input : input for bert embeddings. - attention_mask : attention mask - head_mask : head mask - Returns: - output : the bert output - """ - if attention_mask is None: - attention_mask = torch.ones( #pylint: disable=no-member - embedding_input.shape[0], embedding_input.shape[1] - ).to(embedding_input) - - extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) - - extended_attention_mask = extended_attention_mask.to( - dtype=next(model_bert.parameters()).dtype - ) # fp16 compatibility - extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 - - if head_mask is not None: - if head_mask.dim() == 1: - head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze( - -1 - ).unsqueeze(-1) - head_mask = head_mask.expand( - model_bert.config.num_hidden_layers, -1, -1, -1, -1 - ) - elif head_mask.dim() == 2: - head_mask = ( - head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) - ) # We can specify head_mask for each layer - head_mask = head_mask.to( - dtype=next(model_bert.parameters()).dtype - ) # switch to fload if need + fp16 compatibility - else: - head_mask = [None] * model_bert.config.num_hidden_layers - - encoder_outputs = model_bert.encoder( - embedding_input, extended_attention_mask, head_mask=head_mask - ) - sequence_output = encoder_outputs[0] - pooled_output = model_bert.pooler(sequence_output) - outputs = ( - sequence_output, - pooled_output, - ) + encoder_outputs[1:] - return outputs - - def forward(self, input_ids, attention_mask=None): - """ Forward function. - Args: - input_ids: Input data - attention_maks: Attention mask value - - Returns: - output - Type of news for the given news snippet - """ - embedding_input = self.bert_model.embeddings(input_ids) - outputs = self.compute_bert_outputs( - self.bert_model, embedding_input, attention_mask - ) - pooled_output = outputs[1] - output = torch.tanh(self.fc1(pooled_output)) - output = self.drop(output) - output = self.out(output) - return output - - def training_step(self, train_batch, batch_idx): - """Training the data as batches and returns training loss on each - batch. - - Args: - train_batch Batch data - batch_idx: Batch indices - - Returns: - output - Training loss - """ - input_ids = train_batch["input_ids"].to(self.device) - attention_mask = train_batch["attention_mask"].to(self.device) - targets = train_batch["targets"].to(self.device) - output = self.forward(input_ids, attention_mask) - _, y_hat = torch.max(output, dim=1) #pylint: disable=no-member - loss = F.cross_entropy(output, targets) - self.train_acc(y_hat, targets) - self.log("train_acc", self.train_acc.compute()) - self.log("train_loss", loss) - return {"loss": loss, "acc": self.train_acc.compute()} - - def test_step(self, test_batch, batch_idx): - """Performs test and computes the accuracy of the model. - - Args: - test_batch: Batch data - batch_idx: Batch indices - - Returns: - output - Testing accuracy - """ - input_ids = test_batch["input_ids"].to(self.device) - attention_mask = test_batch["attention_mask"].to(self.device) - targets = test_batch["targets"].to(self.device) - output = self.forward(input_ids, attention_mask) - _, y_hat = torch.max(output, dim=1) #pylint: disable=no-member - test_acc = accuracy_score(y_hat.cpu(), targets.cpu()) - self.test_acc(y_hat, targets) - self.preds += y_hat.tolist() - self.target += targets.tolist() - self.log("test_acc", self.test_acc.compute()) - return {"test_acc": torch.tensor(test_acc)} #pylint: disable=no-member - - def validation_step(self, val_batch, batch_idx): - """Performs validation of data in batches. - - Args: - val_batch: Batch data - batch_idx: Batch indices - - Returns: - output - valid step loss - """ - - input_ids = val_batch["input_ids"].to(self.device) - attention_mask = val_batch["attention_mask"].to(self.device) - targets = val_batch["targets"].to(self.device) - output = self.forward(input_ids, attention_mask) - _, y_hat = torch.max(output, dim=1) #pylint: disable=no-member - loss = F.cross_entropy(output, targets) - self.val_acc(y_hat, targets) - self.log("val_acc", self.val_acc.compute()) - self.log("val_loss", loss, sync_dist=True) - return {"val_step_loss": loss, "acc": self.val_acc.compute()} - - def configure_optimizers(self): - """Initializes the optimizer and learning rate scheduler. - - Returns: - output - Initialized optimizer and scheduler - """ - self.optimizer = AdamW(self.parameters(), lr=self.args.get("lr", 0.001)) - self.scheduler = { - "scheduler": - torch.optim.lr_scheduler.ReduceLROnPlateau( - self.optimizer, - mode="min", - factor=0.2, - patience=2, - min_lr=1e-6, - verbose=True, - ), - "monitor": - "val_loss", - } - return [self.optimizer], [self.scheduler] diff --git a/samples/contrib/pytorch-samples/bert/config.properties b/samples/contrib/pytorch-samples/bert/config.properties deleted file mode 100644 index b8917868d2b..00000000000 --- a/samples/contrib/pytorch-samples/bert/config.properties +++ /dev/null @@ -1,9 +0,0 @@ -inference_address=http://0.0.0.0:8085 -management_address=http://0.0.0.0:8085 -metrics_address=http://0.0.0.0:8082 -number_of_netty_threads=4 -install_py_dep_per_model=true -enable_envvars_config=true -job_queue_size=10 -model_store=/mnt/models/model-store -model_snapshot={"name":"startup.cfg","modelCount":1,"models":{"bert":{"1.0":{"defaultVersion":true,"marName":"bert_test.mar","minWorkers":1,"maxWorkers":5,"batchSize":1,"maxBatchDelay":5000,"responseTimeout":120}}}} diff --git a/samples/contrib/pytorch-samples/bert/index_to_name.json b/samples/contrib/pytorch-samples/bert/index_to_name.json deleted file mode 100644 index 8bd12decff8..00000000000 --- a/samples/contrib/pytorch-samples/bert/index_to_name.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "0": "World", - "1": "Sports", - "2": "Business", - "3": "Sci/Tech" -} diff --git a/samples/contrib/pytorch-samples/bert/news_dataset.py b/samples/contrib/pytorch-samples/bert/news_dataset.py deleted file mode 100644 index 71bcb0a77ed..00000000000 --- a/samples/contrib/pytorch-samples/bert/news_dataset.py +++ /dev/null @@ -1,79 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=arguments-differ -# pylint: disable=unused-argument -# pylint: disable=abstract-method -"""News dataset script.""" -import torch -from torch.utils.data import Dataset - - -class NewsDataset(Dataset): - """Ag News Dataset - Args: - Dataset - """ - - def __init__(self, reviews, targets, tokenizer, max_length): - """Performs initialization of tokenizer. - - Args: - reviews: AG news text - targets: labels - tokenizer: bert tokenizer - max_length: maximum length of the news text - """ - self.reviews = reviews - self.targets = targets - self.tokenizer = tokenizer - self.max_length = max_length - - def __len__(self): - """ - Returns: - returns the number of datapoints in the dataframe - - """ - return len(self.reviews) - - def __getitem__(self, item): - """Returns the review text and the targets of the specified item. - - Args: - item: Index of sample review - - Returns: - Returns the dictionary of review text, - input ids, attention mask, targets - """ - review = str(self.reviews[item]) - target = self.targets[item] - - encoding = self.tokenizer.encode_plus( - review, - add_special_tokens=True, - max_length=self.max_length, - return_token_type_ids=False, - padding="max_length", - return_attention_mask=True, - return_tensors="pt", - truncation=True, - ) - - return { - "review_text": review, - "input_ids": encoding["input_ids"].flatten(), - "attention_mask": encoding["attention_mask"].flatten(), # pylint: disable=not-callable - "targets": torch.tensor(target, dtype=torch.long), # pylint: disable=no-member,not-callable - } diff --git a/samples/contrib/pytorch-samples/bert/pipeline.py b/samples/contrib/pytorch-samples/bert/pipeline.py deleted file mode 100644 index 379821d77b6..00000000000 --- a/samples/contrib/pytorch-samples/bert/pipeline.py +++ /dev/null @@ -1,229 +0,0 @@ -#!/usr/bin/env/python3 -# -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Pipeline for bert classification example.""" - -import json -from kfp.onprem import use_k8s_secret -from kfp import components -from kfp.components import load_component_from_file -from kfp import dsl -from kfp import compiler - -INGRESS_GATEWAY = "http://istio-ingressgateway.istio-system.svc.cluster.local" -AUTH = "" -NAMESPACE = "kubeflow-user-example-com" -COOKIE = "authservice_session=" + AUTH - -MINIO_ENDPOINT = "http://minio-service.kubeflow:9000" -LOG_BUCKET = "mlpipeline" -TENSORBOARD_IMAGE = "public.ecr.aws/pytorch-samples/tboard:latest" - -DEPLOY_NAME = "bertserve" -MODEL_NAME = "bert" - -ISVC_NAME = DEPLOY_NAME + "." + NAMESPACE + "." + "example.com" -INPUT_REQUEST = ( - "https://kubeflow-dataset.s3.us-east-2.amazonaws.com" - "/cifar10/input.json" -) - -YAML_FOLDER_PATH = "bert/yaml" -YAML_COMMON_FOLDER = "common" - -prepare_tensorboard_op = load_component_from_file( - "yaml/tensorboard_component.yaml" -) # pylint: disable=not-callable -prep_op = components.load_component_from_file("yaml/preprocess_component.yaml") # pylint: disable=not-callable -train_op = components.load_component_from_file("yaml/train_component.yaml") # pylint: disable=not-callable -deploy_op = load_component_from_file("../../../components/kserve/component.yaml") # pylint: disable=not-callable -pred_op = components.load_component_from_file("yaml/prediction_component.yaml") # pylint: disable=not-callable - -minio_op = components.load_component_from_file("yaml/minio_component.yaml") # pylint: disable=not-callable - - -@dsl.pipeline(name="Training pipeline", description="Sample training job test") -def pytorch_bert( # pylint: disable=too-many-arguments - minio_endpoint=MINIO_ENDPOINT, - log_bucket=LOG_BUCKET, - log_dir=f"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}", - mar_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store", - config_prop_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/config", - model_uri=f"s3://mlpipeline/mar/{dsl.RUN_ID_PLACEHOLDER}", - tf_image=TENSORBOARD_IMAGE, - deploy=DEPLOY_NAME, - namespace=NAMESPACE, - confusion_matrix_log_dir=f"confusion_matrix/{dsl.RUN_ID_PLACEHOLDER}/", - num_samples=1000, - max_epochs=1 -): - """Thid method defines the pipeline tasks and operations""" - prepare_tb_task = prepare_tensorboard_op( - log_dir_uri=f"s3://{log_bucket}/{log_dir}", - image=tf_image, - pod_template_spec=json.dumps({ - "spec": { - "containers": [{ - "env": [ - { - "name": "AWS_ACCESS_KEY_ID", - "valueFrom": { - "secretKeyRef": { - "name": "mlpipeline-minio-artifact", - "key": "accesskey", - } - }, - }, - { - "name": "AWS_SECRET_ACCESS_KEY", - "valueFrom": { - "secretKeyRef": { - "name": "mlpipeline-minio-artifact", - "key": "secretkey", - } - }, - }, - { - "name": "AWS_REGION", - "value": "minio" - }, - { - "name": "S3_ENDPOINT", - "value": f"{minio_endpoint}", - }, - { - "name": "S3_USE_HTTPS", - "value": "0" - }, - { - "name": "S3_VERIFY_SSL", - "value": "0" - }, - ] - }] - } - }), - ).set_display_name("Visualization") - - prep_task = ( - prep_op().after(prepare_tb_task - ).set_display_name("Preprocess & Transform") - ) - confusion_matrix_url = f"minio://{log_bucket}/{confusion_matrix_log_dir}" - script_args = f"model_name=bert.pth," \ - f"num_samples={num_samples}," \ - f"confusion_matrix_url={confusion_matrix_url}" - # For gpus, set number of gpus and accelerator type - ptl_args = f"max_epochs={max_epochs}," \ - "profiler=pytorch," \ - "gpus=0," \ - "accelerator=None" - train_task = ( - train_op( - input_data=prep_task.outputs["output_data"], - script_args=script_args, - ptl_arguments=ptl_args - ).after(prep_task).set_display_name("Training") - ) - # For GPU uncomment below line and set GPU limit and node selector - # ).set_gpu_limit(1).add_node_selector_constraint - # ('cloud.google.com/gke-accelerator','nvidia-tesla-p4') - - ( - minio_op( - bucket_name="mlpipeline", - folder_name=log_dir, - input_path=train_task.outputs["tensorboard_root"], - filename="", - ).after(train_task).set_display_name("Tensorboard Events Pusher") - ) - minio_mar_upload = ( - minio_op( - bucket_name="mlpipeline", - folder_name=mar_path, - input_path=train_task.outputs["checkpoint_dir"], - filename="bert_test.mar", - ).after(train_task).set_display_name("Mar Pusher") - ) - ( - minio_op( - bucket_name="mlpipeline", - folder_name=config_prop_path, - input_path=train_task.outputs["checkpoint_dir"], - filename="config.properties", - ).after(train_task).set_display_name("Conifg Pusher") - ) - - model_uri = str(model_uri) - # pylint: disable=unused-variable - isvc_yaml = """ - apiVersion: "serving.kserve.io/v1beta1" - kind: "InferenceService" - metadata: - name: {} - namespace: {} - spec: - predictor: - serviceAccountName: sa - pytorch: - protocolVersion: v2 - storageUri: {} - resources: - limits: - memory: 4Gi - """.format(deploy, namespace, model_uri) - - # For GPU inference use below yaml with gpu count and accelerator - gpu_count = "1" - accelerator = "nvidia-tesla-p4" - isvc_gpu_yaml = """ - apiVersion: "serving.kserve.io/v1beta1" - kind: "InferenceService" - metadata: - name: {} - namespace: {} - spec: - predictor: - serviceAccountName: sa - pytorch: - protocolVersion: v2 - storageUri: {} - resources: - limits: - memory: 4Gi - nvidia.com/gpu: {} - nodeSelector: - cloud.google.com/gke-accelerator: {} -""".format(deploy, namespace, model_uri, gpu_count, accelerator) - # Update inferenceservice_yaml for GPU inference - deploy_task = ( - deploy_op(action="apply", inferenceservice_yaml=isvc_yaml - ).after(minio_mar_upload).set_display_name("Deployer") - ) - - dsl.get_pipeline_conf().add_op_transformer( - use_k8s_secret( - secret_name="mlpipeline-minio-artifact", - k8s_secret_key_to_env={ - "secretkey": "MINIO_SECRET_KEY", - "accesskey": "MINIO_ACCESS_KEY", - }, - ) - ) - - -if __name__ == "__main__": - compiler.compiler.Compiler().compile( - pytorch_bert, package_path="pytorch_bert.yaml" - ) diff --git a/samples/contrib/pytorch-samples/bert/requirements.txt b/samples/contrib/pytorch-samples/bert/requirements.txt deleted file mode 100644 index 36a99192e9b..00000000000 --- a/samples/contrib/pytorch-samples/bert/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -pytorch-lightning -sklearn -captum -torchtext diff --git a/samples/contrib/pytorch-samples/bert/sample.txt b/samples/contrib/pytorch-samples/bert/sample.txt deleted file mode 100644 index 87efb143fbb..00000000000 --- a/samples/contrib/pytorch-samples/bert/sample.txt +++ /dev/null @@ -1,10 +0,0 @@ -{ - "id": "d3b15cad-50a2-4eaf-80ce-8b0a428bd298", - "inputs": [{ - "name": "4b7c7d4a-51e4-43c8-af61-04639f6ef4bc", - "shape": -1, - "datatype": "BYTES", - "data": "Bloomberg has reported on the economy" - } - ] -} \ No newline at end of file diff --git a/samples/contrib/pytorch-samples/bert/template_mapping.json b/samples/contrib/pytorch-samples/bert/template_mapping.json deleted file mode 100644 index fa7ee52bcd8..00000000000 --- a/samples/contrib/pytorch-samples/bert/template_mapping.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "train_component.yaml": { - "implementation.container.command": ["python3", "bert/agnews_classification_pytorch.py"], - "implementation.container.image": "public.ecr.aws/pytorch-samples/kfp_samples:latest" - }, - "preprocess_component.yaml": { - "implementation.container.command": ["python3", "bert/bert_pre_process.py"], - "implementation.container.image": "public.ecr.aws/pytorch-samples/kfp_samples:latest" - } -} - diff --git a/samples/contrib/pytorch-samples/bert/wrapper.py b/samples/contrib/pytorch-samples/bert/wrapper.py deleted file mode 100644 index df97d466ff3..00000000000 --- a/samples/contrib/pytorch-samples/bert/wrapper.py +++ /dev/null @@ -1,101 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=arguments-differ -# pylint: disable=unused-argument -# pylint: disable=abstract-method -"""Bert Wrapper.""" -import torch -import torch.nn as nn -import torch.nn.functional as F - - -class AGNewsmodelWrapper(nn.Module): - """Warapper Class.""" - - def __init__(self, model): - super( # pylint: disable=super-with-arguments - AGNewsmodelWrapper, self - ).__init__() - self.model = model - - def compute_bert_outputs( # pylint: disable=no-self-use - self, model_bert, embedding_input, attention_mask=None, head_mask=None - ): - """Computes Bert Outputs. - - Args: - model_bert : the bert model - embedding_input : input for bert embeddings. - attention_mask : attention mask - head_mask : head mask - Returns: - output : the bert output - """ - if attention_mask is None: - attention_mask = torch.ones( # pylint: disable=no-member - embedding_input.shape[0], embedding_input.shape[1] - ).to(embedding_input) - - extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) - - extended_attention_mask = extended_attention_mask.to( - dtype=next(model_bert.parameters()).dtype - ) # fp16 compatibility - extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 - - if head_mask is not None: - if head_mask.dim() == 1: - head_mask = ( - head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1). - unsqueeze(-1) - ) - head_mask = head_mask.expand( - model_bert.config.num_hidden_layers, -1, -1, -1, -1 - ) - elif head_mask.dim() == 2: - head_mask = ( - head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) - ) # We can specify head_mask for each layer - head_mask = head_mask.to( - dtype=next(model_bert.parameters()).dtype - ) # switch to fload if need + fp16 compatibility - else: - head_mask = [None] * model_bert.config.num_hidden_layers - - encoder_outputs = model_bert.encoder( - embedding_input, extended_attention_mask, head_mask=head_mask - ) - sequence_output = encoder_outputs[0] - pooled_output = model_bert.pooler(sequence_output) - outputs = ( - sequence_output, - pooled_output, - ) + encoder_outputs[1:] - return outputs - - def forward(self, embeddings, attention_mask=None): - """Forward function. - - Args: - embeddings : bert embeddings. - attention_mask: Attention mask value - """ - outputs = self.compute_bert_outputs( - self.model.bert_model, embeddings, attention_mask - ) - pooled_output = outputs[1] - output = F.relu(self.model.fc1(pooled_output)) - output = self.model.drop(output) - output = self.model.out(output) - return output diff --git a/samples/contrib/pytorch-samples/build.sh b/samples/contrib/pytorch-samples/build.sh deleted file mode 100755 index c09d9c3e7b7..00000000000 --- a/samples/contrib/pytorch-samples/build.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -if (( $# != 2 )) -then - echo "Usage: ./build.sh " - echo "Ex: ./build.sh pytorch_pipeline/examples/cifar10 foobar" - exit 1 -fi - - -## Generating current timestamp -python3 gen_image_timestamp.py > curr_time.txt - -export images_tag=$(cat curr_time.txt) -echo ++++ Building component images with tag=$images_tag - - -full_image_name=$2/pytorch_kfp_components:$images_tag - -echo IMAGE TO BUILD: $full_image_name - -export full_image_name=$full_image_name - - -## build and push docker - to fetch the latest changes and install dependencies -# cd pytorch_kfp_components - -docker build --no-cache -t $full_image_name . -docker push $full_image_name - -# cd .. - -python utils/generate_templates.py $1/template_mapping.json - -## Update component.yaml files with the latest docker image name - -find "yaml" -name "*.yaml" | grep -v 'deploy' | grep -v "tensorboard" | grep -v "prediction" | while read -d $'\n' file -do - yq -i eval ".implementation.container.image = \"$full_image_name\"" $file -done - - -## compile pipeline - -echo Running pipeline compilation -echo "$1/pipeline.py" -python3 "$1/pipeline.py" --target kfp diff --git a/samples/contrib/pytorch-samples/cifar10/CIFAR10_Captum_Insights.png b/samples/contrib/pytorch-samples/cifar10/CIFAR10_Captum_Insights.png deleted file mode 100644 index 03acfbec591..00000000000 Binary files a/samples/contrib/pytorch-samples/cifar10/CIFAR10_Captum_Insights.png and /dev/null differ diff --git a/samples/contrib/pytorch-samples/cifar10/ax_template_mapping.json b/samples/contrib/pytorch-samples/cifar10/ax_template_mapping.json deleted file mode 100644 index f22c6a2faec..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/ax_template_mapping.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "ax_train_component.yaml": { - "implementation.container.command": ["python3", "cifar10/cifar10_pytorch.py"], - "implementation.container.image": "public.ecr.aws/pytorch-samples/kfp_samples:latest-gpu" - }, - "preprocess_component.yaml": { - "implementation.container.command": ["python3", "cifar10/cifar10_pre_process.py"], - "implementation.container.image": "public.ecr.aws/pytorch-samples/kfp_samples:latest-gpu" - } - -} \ No newline at end of file diff --git a/samples/contrib/pytorch-samples/cifar10/cifar10_datamodule.py b/samples/contrib/pytorch-samples/cifar10/cifar10_datamodule.py deleted file mode 100644 index da509e38a61..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/cifar10_datamodule.py +++ /dev/null @@ -1,151 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Cifar10 data module.""" -import os - -import pytorch_lightning as pl -import webdataset as wds -from torch.utils.data import DataLoader -from torchvision import transforms - - -class CIFAR10DataModule(pl.LightningDataModule): # pylint: disable=too-many-instance-attributes - """Data module class.""" - def __init__(self, **kwargs): - """Initialization of inherited lightning data module.""" - super(CIFAR10DataModule, self).__init__() # pylint: disable=super-with-arguments - - self.train_dataset = None - self.valid_dataset = None - self.test_dataset = None - self.train_data_loader = None - self.val_data_loader = None - self.test_data_loader = None - self.normalize = transforms.Normalize(mean=[0.5, 0.5, 0.5], - std=[0.5, 0.5, 0.5]) - self.valid_transform = transforms.Compose([ - transforms.ToTensor(), - self.normalize, - ]) - - self.train_transform = transforms.Compose([ - transforms.RandomResizedCrop(32), - transforms.RandomHorizontalFlip(), - transforms.ToTensor(), - self.normalize, - ]) - self.args = kwargs - - def prepare_data(self): - """Implementation of abstract class.""" - - @staticmethod - def get_num_files(input_path): - """Gets num files. - - Args: - input_path : path to input - """ - return len(os.listdir(input_path)) - 1 - - def setup(self, stage=None): - """Downloads the data, parse it and split the data into train, test, - validation data. - - Args: - stage: Stage - training or testing - """ - - data_path = self.args.get("train_glob", "/pvc/output/processing") - - train_base_url = data_path + "/train" - val_base_url = data_path + "/val" - test_base_url = data_path + "/test" - - train_count = self.get_num_files(train_base_url) - val_count = self.get_num_files(val_base_url) - test_count = self.get_num_files(test_base_url) - - train_url = "{}/{}-{}".format(train_base_url, "train", - "{0.." + str(train_count) + "}.tar") - valid_url = "{}/{}-{}".format(val_base_url, "val", - "{0.." + str(val_count) + "}.tar") - test_url = "{}/{}-{}".format(test_base_url, "test", - "{0.." + str(test_count) + "}.tar") - - self.train_dataset = (wds.WebDataset( - train_url, - handler=wds.warn_and_continue, - nodesplitter=wds.shardlists.split_by_node).shuffle(100).decode("pil").rename( - image="ppm;jpg;jpeg;png", - info="cls").map_dict(image=self.train_transform).to_tuple( - "image", "info").batched(40)) - - self.valid_dataset = (wds.WebDataset( - valid_url, - handler=wds.warn_and_continue, - nodesplitter=wds.shardlists.split_by_node).shuffle(100).decode("pil").rename( - image="ppm", - info="cls").map_dict(image=self.valid_transform).to_tuple( - "image", "info").batched(20)) - - self.test_dataset = (wds.WebDataset( - test_url, - handler=wds.warn_and_continue, - nodesplitter=wds.shardlists.split_by_node).shuffle(100).decode("pil").rename( - image="ppm", - info="cls").map_dict(image=self.valid_transform).to_tuple( - "image", "info").batched(20)) - - def create_data_loader(self, dataset, batch_size, num_workers): # pylint: disable=no-self-use - """Creates data loader.""" - return DataLoader(dataset, - batch_size=batch_size, - num_workers=num_workers) - - def train_dataloader(self): - """Train Data loader. - Returns: - output - Train data loader for the given input - """ - self.train_data_loader = self.create_data_loader( - self.train_dataset, - self.args.get("train_batch_size", None), - self.args.get("train_num_workers", 4), - ) - return self.train_data_loader - - def val_dataloader(self): - """Validation Data Loader. - Returns: - output - Validation data loader for the given input - """ - self.val_data_loader = self.create_data_loader( - self.valid_dataset, - self.args.get("val_batch_size", None), - self.args.get("val_num_workers", 4), - ) - return self.val_data_loader - - def test_dataloader(self): - """Test Data Loader. - Returns: - output - Test data loader for the given input - """ - self.test_data_loader = self.create_data_loader( - self.test_dataset, - self.args.get("val_batch_size", None), - self.args.get("val_num_workers", 4), - ) - return self.test_data_loader diff --git a/samples/contrib/pytorch-samples/cifar10/cifar10_handler.py b/samples/contrib/pytorch-samples/cifar10/cifar10_handler.py deleted file mode 100644 index 04bf00ae38b..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/cifar10_handler.py +++ /dev/null @@ -1,245 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=no-self-use,too-many-arguments,unused-argument,not-callable,no-member,attribute-defined-outside-init -""" Cifar10 Custom Handler.""" - -import base64 -import io -import json -import logging -import os -from abc import ABC -from base64 import b64encode -from io import BytesIO - -import numpy as np -import torch -from PIL import Image -from captum.attr import ( - IntegratedGradients, Occlusion, LayerGradCam, LayerAttribution -) -from captum.attr import visualization as viz -from classifier import CIFAR10CLASSIFIER -from matplotlib.colors import LinearSegmentedColormap -from torchvision import transforms -from ts.torch_handler.image_classifier import ImageClassifier - -logger = logging.getLogger(__name__) - - -class CIFAR10Classification(ImageClassifier, ABC): - """ - Base class for all vision handlers - """ - - def initialize(self, ctx): # pylint: disable=arguments-differ - """In this initialize function, the CIFAR10 trained model is loaded and - the Integrated Gradients,occlusion and layer_gradcam Algorithm for - Captum Explanations is initialized here. - Args: - ctx (context): It is a JSON Object containing information - pertaining to the model artifacts parameters. - """ - self.manifest = ctx.manifest - properties = ctx.system_properties - model_dir = properties.get("model_dir") - print("Model dir is {}".format(model_dir)) - serialized_file = self.manifest["model"]["serializedFile"] - model_pt_path = os.path.join(model_dir, serialized_file) - self.device = torch.device( - "cuda:" + str(properties.get("gpu_id")) if torch.cuda.is_available( - ) else "cpu" - ) - - self.model = CIFAR10CLASSIFIER() - self.model.load_state_dict(torch.load(model_pt_path)) - self.model.to(self.device) - self.model.eval() - self.model.zero_grad() - logger.info("CIFAR10 model from path %s loaded successfully", model_dir) - - # Read the mapping file, index to object name - mapping_file_path = os.path.join(model_dir, "class_mapping.json") - if os.path.isfile(mapping_file_path): - print("Mapping file present") - with open(mapping_file_path) as pointer: - self.mapping = json.load(pointer) - else: - print("Mapping file missing") - logger.warning("Missing the class_mapping.json file.") - - self.ig = IntegratedGradients(self.model) - self.layer_gradcam = LayerGradCam( - self.model, self.model.model_conv.layer4[2].conv3 - ) - self.occlusion = Occlusion(self.model) - self.initialized = True - self.image_processing = transforms.Compose([ - transforms.Resize(224), - transforms.CenterCrop(224), - transforms.ToTensor(), - transforms.Normalize( - mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225] - ), - ]) - - def _get_img(self, row): - """Compat layer: normally the envelope should just return the data - directly, but older version of KFServing envelope and - Torchserve in general didn't have things set up right - """ - - if isinstance(row, dict): - image = row.get("data") or row.get("body") - else: - image = row - - if isinstance(image, str): - # if the image is a string of bytesarray. - image = base64.b64decode(image) - - return image - - def preprocess(self, data): - """The preprocess function of cifar10 program - converts the input data to a float tensor - Args: - data (List): Input data from the request is in the form of a Tensor - Returns: - list : The preprocess function returns - the input image as a list of float tensors. - """ - images = [] - - for row in data: - image = self._get_img(row) - - # If the image is sent as bytesarray - if isinstance(image, (bytearray, bytes)): - image = Image.open(io.BytesIO(image)) - image = self.image_processing(image) - else: - # if the image is a list - image = torch.FloatTensor(image) - - images.append(image) - - return torch.stack(images).to(self.device) - - def attribute_image_features(self, algorithm, data, **kwargs): - """Calculate tensor attributions""" - self.model.zero_grad() - tensor_attributions = algorithm.attribute(data, target=0, **kwargs) - return tensor_attributions - - def output_bytes(self, fig): - """Convert image to bytes""" - fout = BytesIO() - fig.savefig(fout, format="png") - fout.seek(0) - return fout.getvalue() - - def get_insights(self, tensor_data, _, target=0): - default_cmap = LinearSegmentedColormap.from_list( - "custom blue", - [(0, "#ffffff"), (0.25, "#0000ff"), (1, "#0000ff")], - N=256, - ) - - attributions_ig, _ = self.attribute_image_features( - self.ig, - tensor_data, - baselines=tensor_data * 0, - return_convergence_delta=True, - n_steps=15, - ) - - attributions_occ = self.attribute_image_features( - self.occlusion, - tensor_data, - strides=(3, 8, 8), - sliding_window_shapes=(3, 15, 15), - baselines=tensor_data * 0, - ) - - attributions_lgc = self.attribute_image_features( - self.layer_gradcam, tensor_data - ) - - upsamp_attr_lgc = LayerAttribution.interpolate( - attributions_lgc, tensor_data.shape[2:] - ) - - matplot_viz_ig, _ = viz.visualize_image_attr_multiple( - np.transpose( - attributions_ig.squeeze().cpu().detach().numpy(), (1, 2, 0) - ), - np.transpose( - tensor_data.squeeze().cpu().detach().numpy(), (1, 2, 0) - ), - use_pyplot=False, - methods=["original_image", "heat_map"], - cmap=default_cmap, - show_colorbar=True, - signs=["all", "positive"], - titles=["Original", "Integrated Gradients"], - ) - - matplot_viz_occ, _ = viz.visualize_image_attr_multiple( - np.transpose( - attributions_occ.squeeze().cpu().detach().numpy(), (1, 2, 0) - ), - np.transpose( - tensor_data.squeeze().cpu().detach().numpy(), (1, 2, 0) - ), - [ - "original_image", - "heat_map", - "heat_map", - ], - ["all", "positive", "negative"], - show_colorbar=True, - titles=[ - "Original", - "Positive Attribution", - "Negative Attribution", - ], - fig_size=(18, 6), - use_pyplot=False, - ) - - matplot_viz_lgc, _ = viz.visualize_image_attr_multiple( - upsamp_attr_lgc[0].cpu().permute(1, 2, 0).detach().numpy(), - tensor_data.squeeze().permute(1, 2, 0).cpu().numpy(), - use_pyplot=False, - methods=["original_image", "blended_heat_map", "blended_heat_map"], - signs=["all", "positive", "negative"], - show_colorbar=True, - titles=[ - "Original", - "Positive Attribution", - "Negative Attribution", - ], - fig_size=(18, 6) - ) - - occ_bytes = self.output_bytes(matplot_viz_occ) - ig_bytes = self.output_bytes(matplot_viz_ig) - lgc_bytes = self.output_bytes(matplot_viz_lgc) - - output = [{ - "b64": b64encode(row).decode("utf8") - } if isinstance(row, (bytes, bytearray)) else row - for row in [ig_bytes, occ_bytes, lgc_bytes]] - return output diff --git a/samples/contrib/pytorch-samples/cifar10/cifar10_pre_process.py b/samples/contrib/pytorch-samples/cifar10/cifar10_pre_process.py deleted file mode 100644 index 03252a1aa0a..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/cifar10_pre_process.py +++ /dev/null @@ -1,123 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Cifar10 pre-process module.""" -import subprocess -from pathlib import Path -from argparse import ArgumentParser -import torchvision -import webdataset as wds -from sklearn.model_selection import train_test_split -import numpy as np -from pytorch_kfp_components.components.visualization.component import Visualization - -if __name__ == "__main__": - parser = ArgumentParser() - parser.add_argument("--output_path", type=str) - - parser.add_argument( - "--mlpipeline_ui_metadata", - type=str, - help="Path to write mlpipeline-ui-metadata.json", - ) - - args = vars(parser.parse_args()) - output_path = args["output_path"] - - Path(output_path).mkdir(parents=True, exist_ok=True) - - trainset = torchvision.datasets.CIFAR10( - root="./", train=True, download=True - ) - testset = torchvision.datasets.CIFAR10( - root="./", train=False, download=True - ) - - Path(output_path + "/train").mkdir(parents=True, exist_ok=True) - Path(output_path + "/val").mkdir(parents=True, exist_ok=True) - Path(output_path + "/test").mkdir(parents=True, exist_ok=True) - - RANDOM_SEED = 25 - y = trainset.targets - trainset, valset, y_train, y_val = train_test_split( - trainset, - y, - stratify=y, - shuffle=True, - test_size=0.2, - random_state=RANDOM_SEED - ) - - for name in [(trainset, "train"), (valset, "val"), (testset, "test")]: - with wds.ShardWriter(output_path + "/" + str(name[1]) + "/" + - str(name[1]) + "-%d.tar", maxcount=1000) as sink: - for index, (image, cls) in enumerate(name[0]): - sink.write({ - "__key__": "%06d" % index, - "ppm": image, - "cls": cls - }) - - entry_point = ["ls", "-R", output_path] - run_code = subprocess.run(entry_point, stdout=subprocess.PIPE) #pylint: disable=subprocess-run-check - print(run_code.stdout) - - visualization_arguments = { - "output": { - "mlpipeline_ui_metadata": args["mlpipeline_ui_metadata"], - "dataset_download_path": args["output_path"], - }, - } - - markdown_dict = {"storage": "inline", "source": visualization_arguments} - - print("Visualization arguments: ", markdown_dict) - - visualization = Visualization( - mlpipeline_ui_metadata=args["mlpipeline_ui_metadata"], - markdown=markdown_dict, - ) - - y_array = np.array(y) - - label_names = [ - "airplane", - "automobile", - "bird", - "cat", - "deer", - "dog", - "frog", - "horse", - "ship", - "truck", - ] - label_counts = dict(zip(*np.unique(y_array, return_counts=True))) - label_dict = {} - TOTAL_COUNT = len(y) - for key, value in label_counts.items(): - print( - "Label Counts of [{}]({}) : {}".format( - key, label_names[key].upper(), value - ) - ) - label_dict[label_names[key].upper()] = int(value) - - label_dict["TOTAL_COUNT"] = int(TOTAL_COUNT) - - markdown_dict = {"storage": "inline", "source": label_dict} - - visualization = Visualization( - mlpipeline_ui_metadata=args["mlpipeline_ui_metadata"], - markdown=markdown_dict, - ) diff --git a/samples/contrib/pytorch-samples/cifar10/cifar10_pytorch.py b/samples/contrib/pytorch-samples/cifar10/cifar10_pytorch.py deleted file mode 100644 index 2c27221f0aa..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/cifar10_pytorch.py +++ /dev/null @@ -1,284 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Cifar10 training script.""" -import os -import json -from pathlib import Path -from argparse import ArgumentParser -from pytorch_lightning.loggers import TensorBoardLogger -from pytorch_lightning.callbacks import ( - EarlyStopping, - LearningRateMonitor, - ModelCheckpoint, -) -from pytorch_kfp_components.components.visualization.component import ( - Visualization, -) -from pytorch_kfp_components.components.trainer.component import Trainer -from pytorch_kfp_components.components.mar.component import MarGeneration -from pytorch_kfp_components.components.utils.argument_parsing import ( - parse_input_args, -) - -# Argument parser for user defined paths -parser = ArgumentParser() - -parser.add_argument( - "--tensorboard_root", - type=str, - default="output/tensorboard", - help="Tensorboard Root path (default: output/tensorboard)", -) - -parser.add_argument( - "--checkpoint_dir", - type=str, - default="output/train/models", - help="Path to save model checkpoints (default: output/train/models)", -) - -parser.add_argument( - "--dataset_path", - type=str, - default="output/processing", - help="Cifar10 Dataset path (default: output/processing)", -) - -parser.add_argument( - "--model_name", - type=str, - default="resnet.pth", - help="Name of the model to be saved as (default: resnet.pth)", -) - -parser.add_argument( - "--mlpipeline_ui_metadata", - default="mlpipeline-ui-metadata.json", - type=str, - help="Path to write mlpipeline-ui-metadata.json", -) - -parser.add_argument( - "--mlpipeline_metrics", - default="mlpipeline-metrics.json", - type=str, - help="Path to write mlpipeline-metrics.json", -) - -parser.add_argument( - "--script_args", - type=str, - help="Arguments for bert agnews classification script", -) - -parser.add_argument( - "--ptl_args", type=str, help="Arguments specific to PTL trainer" -) - -parser.add_argument("--trial_id", default=0, type=int, help="Trial id") - -parser.add_argument( - "--model_params", - default=None, - type=str, - help="Model parameters for trainer" -) - -parser.add_argument( - "--results", default="results.json", type=str, help="Training results" -) - -# parser = pl.Trainer.add_argparse_args(parent_parser=parser) -args = vars(parser.parse_args()) -script_args = args["script_args"] -ptl_args = args["ptl_args"] -trial_id = args["trial_id"] - -TENSORBOARD_ROOT = args["tensorboard_root"] -CHECKPOINT_DIR = args["checkpoint_dir"] -DATASET_PATH = args["dataset_path"] - -script_dict: dict = parse_input_args(input_str=script_args) -script_dict["checkpoint_dir"] = CHECKPOINT_DIR - -ptl_dict: dict = parse_input_args(input_str=ptl_args) - -# Enabling Tensorboard Logger, ModelCheckpoint, Earlystopping - -lr_logger = LearningRateMonitor() -tboard = TensorBoardLogger(TENSORBOARD_ROOT, log_graph=True) -early_stopping = EarlyStopping( - monitor="val_loss", mode="min", patience=5, verbose=True -) -checkpoint_callback = ModelCheckpoint( - dirpath=CHECKPOINT_DIR, - filename="cifar10_{epoch:02d}", - save_top_k=1, - verbose=True, - monitor="val_loss", - mode="min", -) - -if "accelerator" in ptl_dict and ptl_dict["accelerator"] == "None": - ptl_dict["accelerator"] = None - -# Setting the trainer specific arguments -trainer_args = { - "logger": tboard, - "checkpoint_callback": True, - "callbacks": [lr_logger, early_stopping, checkpoint_callback], -} - -if not ptl_dict["max_epochs"]: - trainer_args["max_epochs"] = 1 -else: - trainer_args["max_epochs"] = ptl_dict["max_epochs"] - -if "profiler" in ptl_dict and ptl_dict["profiler"] != "": - trainer_args["profiler"] = ptl_dict["profiler"] - -# Setting the datamodule specific arguments -data_module_args = {"train_glob": DATASET_PATH} - -# Creating parent directories -Path(TENSORBOARD_ROOT).mkdir(parents=True, exist_ok=True) -Path(CHECKPOINT_DIR).mkdir(parents=True, exist_ok=True) - -# Updating all the input parameter to PTL dict - -trainer_args.update(ptl_dict) - -if "model_params" in args and args["model_params"] is not None: - args.update(json.loads(args["model_params"])) - -# Initiating the training process -trainer = Trainer( - module_file="cifar10_train.py", - data_module_file="cifar10_datamodule.py", - module_file_args=args, - data_module_args=data_module_args, - trainer_args=trainer_args, -) - -model = trainer.ptl_trainer.lightning_module - -if trainer.ptl_trainer.global_rank == 0: - # Mar file generation - - cifar_dir, _ = os.path.split(os.path.abspath(__file__)) - - mar_config = { - "MODEL_NAME": - "cifar10_test", - "MODEL_FILE": - os.path.join(cifar_dir, "cifar10_train.py"), - "HANDLER": - os.path.join(cifar_dir, "cifar10_handler.py"), - "SERIALIZED_FILE": - os.path.join(CHECKPOINT_DIR, script_dict["model_name"]), - "VERSION": - "1", - "EXPORT_PATH": - CHECKPOINT_DIR, - "CONFIG_PROPERTIES": - os.path.join(cifar_dir, "config.properties"), - "EXTRA_FILES": - "{},{}".format( - os.path.join(cifar_dir, "class_mapping.json"), - os.path.join(cifar_dir, "classifier.py"), - ), - "REQUIREMENTS_FILE": - os.path.join(cifar_dir, "requirements.txt"), - } - - MarGeneration(mar_config=mar_config, mar_save_path=CHECKPOINT_DIR) - - classes = [ - "airplane", - "automobile", - "bird", - "cat", - "deer", - "dog", - "frog", - "horse", - "ship", - "truck", - ] - - # print(dir(trainer.ptl_trainer.model.module)) - # model = trainer.ptl_trainer.model - - target_index_list = list(set(model.target)) - - class_list = [] - for index in target_index_list: - class_list.append(classes[index]) - - confusion_matrix_dict = { - "actuals": model.target, - "preds": model.preds, - "classes": class_list, - "url": script_dict["confusion_matrix_url"], - } - - test_accuracy = round(float(model.test_acc.compute()), 2) - - print("Model test accuracy: ", test_accuracy) - - if "model_params" in args and args["model_params"] is not None: - data = {} - data[trial_id] = test_accuracy - - Path(os.path.dirname(args["results"])).mkdir( - parents=True, exist_ok=True - ) - - results_file = Path(args["results"]) - if results_file.is_file(): - with open(results_file, "r") as fp: - old_data = json.loads(fp.read()) - data.update(old_data) - - with open(results_file, "w") as fp: - fp.write(json.dumps(data)) - - visualization_arguments = { - "input": { - "tensorboard_root": TENSORBOARD_ROOT, - "checkpoint_dir": CHECKPOINT_DIR, - "dataset_path": DATASET_PATH, - "model_name": script_dict["model_name"], - "confusion_matrix_url": script_dict["confusion_matrix_url"], - }, - "output": { - "mlpipeline_ui_metadata": args["mlpipeline_ui_metadata"], - "mlpipeline_metrics": args["mlpipeline_metrics"], - }, - } - - markdown_dict = {"storage": "inline", "source": visualization_arguments} - - print("Visualization Arguments: ", markdown_dict) - - visualization = Visualization( - test_accuracy=test_accuracy, - confusion_matrix_dict=confusion_matrix_dict, - mlpipeline_ui_metadata=args["mlpipeline_ui_metadata"], - mlpipeline_metrics=args["mlpipeline_metrics"], - markdown=markdown_dict, - ) - - checpoint_dir_contents = os.listdir(CHECKPOINT_DIR) - print(f"Checkpoint Directory Contents: {checpoint_dir_contents}") diff --git a/samples/contrib/pytorch-samples/cifar10/cifar10_train.py b/samples/contrib/pytorch-samples/cifar10/cifar10_train.py deleted file mode 100644 index e1ea8436918..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/cifar10_train.py +++ /dev/null @@ -1,208 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#pylint: disable=no-member,unused-argument,arguments-differ -"""Cifar10 training module.""" -import matplotlib.pyplot as plt -import numpy as np -import pytorch_lightning as pl -import torch -import torch.nn.functional as F -from torchmetrics import Accuracy -from torch import nn -from torchvision import models - - -class CIFAR10Classifier(pl.LightningModule): #pylint: disable=too-many-ancestors,too-many-instance-attributes - """Cifar10 model class.""" - - def __init__(self, **kwargs): - """Initializes the network, optimizer and scheduler.""" - super(CIFAR10Classifier, self).__init__() #pylint: disable=super-with-arguments - self.model_conv = models.resnet50(pretrained=True) - for param in self.model_conv.parameters(): - param.requires_grad = False - num_ftrs = self.model_conv.fc.in_features - num_classes = 10 - self.model_conv.fc = nn.Linear(num_ftrs, num_classes) - - self.scheduler = None - self.optimizer = None - self.args = kwargs - - self.train_acc = Accuracy() - self.val_acc = Accuracy() - self.test_acc = Accuracy() - - self.preds = [] - self.target = [] - self.example_input_array = torch.rand((1, 3, 64, 64)) - - def forward(self, x_var): - """Forward function.""" - out = self.model_conv(x_var) - return out - - def training_step(self, train_batch, batch_idx): - """Training Step - Args: - train_batch : training batch - batch_idx : batch id number - Returns: - train accuracy - """ - if batch_idx == 0: - self.reference_image = (train_batch[0][0]).unsqueeze(0) #pylint: disable=attribute-defined-outside-init - # self.reference_image.resize((1,1,28,28)) - print("\n\nREFERENCE IMAGE!!!") - print(self.reference_image.shape) - x_var, y_var = train_batch - output = self.forward(x_var) - _, y_hat = torch.max(output, dim=1) - loss = F.cross_entropy(output, y_var) - self.log("train_loss", loss) - self.train_acc(y_hat, y_var) - self.log("train_acc", self.train_acc.compute()) - return {"loss": loss} - - def test_step(self, test_batch, batch_idx): - """Testing step - Args: - test_batch : test batch data - batch_idx : tests batch id - Returns: - test accuracy - """ - - x_var, y_var = test_batch - output = self.forward(x_var) - _, y_hat = torch.max(output, dim=1) - loss = F.cross_entropy(output, y_var) - accelerator = self.args.get("accelerator", None) - if accelerator is not None: - self.log("test_loss", loss, sync_dist=True) - else: - self.log("test_loss", loss) - self.test_acc(y_hat, y_var) - self.preds += y_hat.tolist() - self.target += y_var.tolist() - - self.log("test_acc", self.test_acc.compute()) - return {"test_acc": self.test_acc.compute()} - - def validation_step(self, val_batch, batch_idx): - """Testing step. - - Args: - val_batch : val batch data - batch_idx : val batch id - Returns: - validation accuracy - """ - - x_var, y_var = val_batch - output = self.forward(x_var) - _, y_hat = torch.max(output, dim=1) - loss = F.cross_entropy(output, y_var) - accelerator = self.args.get("accelerator", None) - if accelerator is not None: - self.log("val_loss", loss, sync_dist=True) - else: - self.log("val_loss", loss) - self.val_acc(y_hat, y_var) - self.log("val_acc", self.val_acc.compute()) - return {"val_step_loss": loss, "val_loss": loss} - - def configure_optimizers(self): - """Initializes the optimizer and learning rate scheduler. - - Returns: - output - Initialized optimizer and scheduler - """ - self.optimizer = torch.optim.Adam( - self.parameters(), - lr=self.args.get("lr", 0.001), - weight_decay=self.args.get("weight_decay", 0), - eps=self.args.get("eps", 1e-8) - ) - self.scheduler = { - "scheduler": - torch.optim.lr_scheduler.ReduceLROnPlateau( - self.optimizer, - mode="min", - factor=0.2, - patience=3, - min_lr=1e-6, - verbose=True, - ), - "monitor": - "val_loss", - } - return [self.optimizer], [self.scheduler] - - def makegrid(self, output, numrows): #pylint: disable=no-self-use - """Makes grids. - - Args: - output : Tensor output - numrows : num of rows. - Returns: - c_array : gird array - """ - outer = torch.Tensor.cpu(output).detach() - plt.figure(figsize=(20, 5)) - b_array = np.array([]).reshape(0, outer.shape[2]) - c_array = np.array([]).reshape(numrows * outer.shape[2], 0) - i = 0 - j = 0 - while i < outer.shape[1]: - img = outer[0][i] - b_array = np.concatenate((img, b_array), axis=0) - j += 1 - if j == numrows: - c_array = np.concatenate((c_array, b_array), axis=1) - b_array = np.array([]).reshape(0, outer.shape[2]) - j = 0 - - i += 1 - return c_array - - def show_activations(self, x_var): - """Showns activation - Args: - x_var: x variable - """ - - # logging reference image - self.logger.experiment.add_image( - "input", - torch.Tensor.cpu(x_var[0][0]), - self.current_epoch, - dataformats="HW" - ) - - # logging layer 1 activations - out = self.model_conv.conv1(x_var) - c_grid = self.makegrid(out, 4) - self.logger.experiment.add_image( - "layer 1", c_grid, self.current_epoch, dataformats="HW" - ) - - def training_epoch_end(self, outputs): - """Training epoch end. - - Args: - outputs: outputs of train end - """ - self.show_activations(self.reference_image) - diff --git a/samples/contrib/pytorch-samples/cifar10/class_mapping.json b/samples/contrib/pytorch-samples/cifar10/class_mapping.json deleted file mode 100644 index f629e692f3d..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/class_mapping.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "0": "plane", - "1": "car", - "2": "bird", - "3": "cat", - "4": "deer", - "5": "dog", - "6": "frog", - "7": "horse", - "8": "ship", - "9": "truck" -} diff --git a/samples/contrib/pytorch-samples/cifar10/classifier.py b/samples/contrib/pytorch-samples/cifar10/classifier.py deleted file mode 100644 index 1bd6b769ad5..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/classifier.py +++ /dev/null @@ -1,40 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" This is the model wrapper """ - -from torch import nn -from torchvision import models -import pytorch_lightning as pl - - -class CIFAR10CLASSIFIER(pl.LightningModule): # pylint: disable=too-many-ancestors - """ - model wrapper for cifar10 classification - """ - - def __init__(self, **kwargs): - """ - Initializes the network, optimizer and scheduler - """ - super().__init__() - self.model_conv = models.resnet50(pretrained=True) - for param in self.model_conv.parameters(): - param.requires_grad = False - num_ftrs = self.model_conv.fc.in_features - num_classes = 10 - self.model_conv.fc = nn.Linear(num_ftrs, num_classes) - - def forward(self, x): # pylint: disable=arguments-differ - out = self.model_conv(x) - return out diff --git a/samples/contrib/pytorch-samples/cifar10/config.properties b/samples/contrib/pytorch-samples/cifar10/config.properties deleted file mode 100644 index fba786380ca..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/config.properties +++ /dev/null @@ -1,9 +0,0 @@ -inference_address=http://0.0.0.0:8085 -management_address=http://0.0.0.0:8085 -install_py_dep_per_model=true -enable_envvars_config=true -number_of_netty_threads=4 -job_queue_size=10 -max_response_size = 655350000 -model_store=/mnt/models/model-store -model_snapshot={"name":"startup.cfg","modelCount":1,"models":{"cifar10":{"1.0":{"defaultVersion":true,"marName":"cifar10_test.mar","minWorkers":1,"maxWorkers":5,"batchSize":1,"maxBatchDelay":5000,"responseTimeout":900}}}} diff --git a/samples/contrib/pytorch-samples/cifar10/horse.png b/samples/contrib/pytorch-samples/cifar10/horse.png deleted file mode 100644 index 0eae13e1b60..00000000000 Binary files a/samples/contrib/pytorch-samples/cifar10/horse.png and /dev/null differ diff --git a/samples/contrib/pytorch-samples/cifar10/input.json b/samples/contrib/pytorch-samples/cifar10/input.json deleted file mode 100644 index 81fcc06cff8..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/input.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "inputs": [ - { - "data": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAIACAIAAAB7GkOtAAAAA3NCSVQICAjb4U/gAAAgAElEQVR4nIS93ZI0O44k5g5m1WnN7s7svpduZKa3l+0jrGlkMpvpPpWE6wI/BCPrtMK6v5OVGcEgARBwgCDI//P/+N/xuATGfyUAAlxvSZK23lvy/eO+Xb73dt97vwF3d9BJmESDu3cLBGU/LpckeHzDaFy7bosvnCTJfLUEEIhv8OdbJEhChEsSaaRJUjz3GIfDzEi6I5qNXkWT7m5mcn9nr5RPyQFkYyQISWYmetxAAKSZSTt+cndblPbbZWbxuLtLinYIEJCfDro7jWbVVckMQeQcY1HvDCdaq24AkH7iS5JBQJJGEpQkd/GFZGa2SZpDqPaNpPpH29VhVR8A7HzD6dLNoNNbNvPis8YNdvhDMvmV9xaVyKBMfRZNEJEDh+qNyce6K+h0NTXoZmbx3FoLlvxtxkUT0UL20KvP5AuUNjh+HR8GVdkdACBFrzSp1HSLy3AeaTGebfafdjhok5skzcyWoThLguAyI0nXWstgZrbWWmu9lsVPay0DzSyk2JYtW6tkca2V91g0b8tATPYhZnES3QV4jzQ+9CwAsPd29723IJfee3vNNXcHRHcBjqZkMAPb99YWIOLtbvB4JGZWsC86kV0jBUra7vE/x96+997v91vuImJqEIBbaqTfRHp8YGqYFBEYrW8430cLJRcth904CTNud4dCg8VsTUXx25Tf4uwYx091WxJEqs67DKIR0HqFJmDyca3gLOub4O7rKKFQUpIoCBJgpbx8kVuia5k71yIJD6vgZoz5DqaWW0Pcc1YrJq8AKFTOuGeMSmfAxVWAoc3WWn3bfKTshy4ToDPfbhZS0LFwpElH/T+u6oqgfoxm7r73OzS1XGDKihkBgq7SfbcSP90LrXR+YPeO/f2HRTtcqhusZ2WOGUjqkKIFu6+eUC7Z3bLgED2YD8jYPeWYzJPCrfgExZQ4fDzNjk7fb/w0Jz2fhzmBBEKjLz2dWh8FREB0w91BhCBWIyQh0dICyW85IWMq9jtIyz8kbeBMj5sIlz27VXy/+tcb4i04quF0prXMr9ejkQ96qruefFSKAADB5VD1P6SCVEje9i0pzEPcEKZRkksQCK3QUmRoXSEYFHrnoL0x8JxzrbJ3qBVrZHB0wueoFJbG5RDAZaaBqBAQqhV/DJGlJItBLQlrLScdCuuuhHZPkZ6ztf5MUrRYuvzzTgaOwnPKt6iw0KctC5oyp2e2rbsPFyL5rXu/XjTKHcJa1k+Fqi9AO7S/mdFea8x19WtYXQAAmJkE0U0rwBMAl5Futo7irob8GM+a5wdnkpjtX/ruMUCS0pmNAcYlmUizsAqtKT6IUUBYKgwoGhUeyWBV3Ne9D8vcMJslMKDSxCvmUaoemNKSkMAOZm6fOL0RpYY2izlTPR0z58ME3peYfTqym/81W9PUJSKc9Cj3YT5rhIplVmiAB4492PHU6kz/7GJAtkOwUczVjdDSKNbA20cDaTktBT/9EUJnxuP9fRMwOrzWyrHXuyyVGlIAeHTuafxB527zzGyNifzU/p/0IRMmNkg0ewL8cEBxq/VLGTXKSSFMDl59+01Cgno17XsQcoDuqdztECWfCmQwHCk/MmwARFh7W0H5wnY6vrEkNvrpx0mQQhn4bvn0PyFkDbeAHYsQgvec/dS/8d/jfVVrlDULQLp8lUpRKcR/olWnyzXfNZ3MM4RhUfrLD8yknFvD6oXUHyJml0RO7HHe1W2Glb9uAGPao+R8yIDFn+3bxZ+vtYw53xP3xzsc4QcoRuUCAZMAk0i+gUVi7410QwSZoHBMJBEHFyTsLzs3+t5Ggk3WYzZ6XCDN9OBGTwmWGuvgVTDjPJ144/Bbh9ClU8/3AEwA4ANYl8DD5aFHXE7IbEGQ7wCrarwwgWXRNR1Q6cPhO+LSX/61aBKByoQM+TTRwrsWhTZYx10IFt9NhXEOk3wg88UFPjvzqfVaayDBmEKk/MLvw6vrVyT37C0/xuBDKYufPe+eHIrFZIo72a8BzNLj1keM8VM1c9rsfsewGf8ciJGUhx58TOkxnN/G8lfNJpV0DMDs7fzg7mtYNRoR05gFekqISym6HHa8BLnEO3gV2N3MHBDMgrZSq+OjYccoLG5k9ioEUz0jji4MajBMTJg6lRLPURCu7e7sSVREhIVlSHaL2Bfh4h0GOClPC2Ko+JVu9anza2PTZHoEi1BBmyROBkop6Uh5KKKH9KZJXi6kA2qQX7dMIWHFZTHkcIrl54dsxd1IQHLBwq40GdMYzC5Jer1eq0yWvH7YUHpcHkFYmIxy1yZf7u+ALw4hnG4u0ou/lKAExg4sI7fegycqCBhdIfALiicZDmgRlmVF4Ge2EBkkpdAgIn4uLGDm8giz+phkxgM0MECWD6kwwdnuSomv5BINCQ7dAwa1nxEd+zRXdWW089Zyak16WF6u9BlT3VsoxAnLGVNGB9V6daBIAnmM+n6xV9SDIkSx2mowaH8xjmc/x8Rm+pwPAa27jvEPmAFi2WLGH8LU010lTtPAHOPd5uRie92AokCrJJJ+U/iXEc3ppOPXzu/vZ1OIcHtvzdAHasNvFo5kceZ80zC51E3Ar88O4H7vuXw7iAVDBRxUQUPBHTKBhBwOB5YZQW5JMvgyq3UyGdL4ufdwMnoU0ywnYCEqbjkizkF42BF3Ibltxq3dfnV72UhLD8mhA/lRHq18qz6namiIVTgtZCBa3PFWcG/stDC/8DtAINkAiCeqXnjx2J3mQkAcCbw4Mq37hBfxBTMOd9sSiTe2MNpfxqX7RcPoHrUT4PJDulnXCf+QJF9fX38cZMHUuDHntrvn2qfcXTITt28SZvb2t721VqAEJ1eZ1pzcAiUu2m0SywYcpj+tZfe4wF0pgnBDmP5rcJrqF7IJxhmVzkhlRe5o9criBNDI6PMKGwCAIB0yJJL+BQjjDD80j3sG1v6Chf/8GqIjPCkT7+hFkfg31FSvSKCCbeXpDDUUT8ViBXNNWy5NmIC/oMrowC93pJ6NX3XpwevZuopNZ0HYAUprrY4cpoingbmi8LFMUKa5+VK34LSQVpl8MO4mNVtBV/dyldPYy9Flt4BYGyqj24bwGJ75lqnWW983ER4avJXRRdkKqc0JdT9YhHAXwJVvKRYcSCFhu2whnCb4BizEQBAMkjEyC5BWot+bdPbyAHxn/+NltuM2Y/vkybq+SV59tl48jTUCVQ9ZCNJJLhI5/xMEzghJKm9AFr4GAHi6PiApIwWzVy4jHKBUOoE5TcIZqfaeQhVqp30mlDQ+DbBMChX4ZL1FlDJnZEMZuwgLNOocHJuCMGd9TzWZGdylXKlWrdc24z6hwytCQvniZe6+3RcpdzPbci4SjIQfwEz28yMRluKyzFza9JLjgVlghbmSfG3VZLXQBIC04T1cC+ixrFpmOAMLPVkykFxpAER4erUGIq21iqJw9158G1D9OCOlZS4CpfYP8UUrWUMA72FtpfAEOSBIo9FWwiUh95zlWNS6RCE5x8hzKvZP7OAFgVNsdg3N0gJQI3o+cc2xGEwzassei7HhNo2Y+21CamxDlCcUQqB5lGbnyTliRHUMhNErMYTNyFz0q2yhusbSX70x7s+fBtbIvKEyiWEsPkzgnLpNbZR6KlKU30q6RK7mfb6jeo+IetU6oTrEpALMxa2Rh1DG+zhSwFh7nz3MYOWYI1bJUZdVCP3bq/fIyGS4i4SABICQg2SEC5BsikjQSLVyKZ1h84zagdiqhJw0+eqfcsaFoBMk3LVjjaGAdo/CE7Tm2nDwUHQmTF4ueVkMki7PxTgPOU/pVZMHcOi4UMP8AKCxnTYKucLRWr5VQEsP6O7yFLPWHCVWRLkoxa+MR0nwHaGCZEc7oBPSkVesRjVFQ2IjeglILnc/q1ypskrAzwpiZZdUwpLqGhJ+8M3r6+vbjJmcBEr+ykQ49fJ90MrdJKcgfG1/E9AqEcfLJWrBITlhnSgmVneOAybApN0IUsf/wgPR8EJzrP6kssupwyP0F6vHdSv7EwqyNiLRWUNzF1CHFCVlOkgkSUU6g7VKysDiQcRTnZVsz/7weAw+V1yndjuvzi8a/pRigsVM7jt9BCJ8hzSESDlqejdBWhpK4KxJMTQJZ7JEB8E/eDRI/UjHGqypqQgv25PqYNnDtKRS0GmzAMQv1yHdUIIt4qGaAnPFcuR85JF6lN1YycRqrSYnCjPejDmYonNhQZInGzKD0MnFjmymnH36+oOC0z6pQVB/46r0nDRxKdDVsiR3L/jtTvCSgvGKwOJlY+qNDkh7h58orlyO1em2yzNKDqR+7hlbTHQorFAwt40EJOEkiB+wl5qhiQAjFSnXMBGAqTLAAbjgYKwVd8g+0GHqJCxIkQbRLHPR7LxGZdbjjWW/QS7UcjLNkMlRHy5aQQyb7BUaoT4YOp96MLqoRwDuG8AiR+JytlNyexq6VR05EgEeWQ9lAL6/eYwkPLUrhvZ3d2Ui135TBGlub/6EpLk7BDd3c0CmJTm8V31jLjHhKQo2Zeifg+b4XNe+idVzG4TB241k/6bxb2rnNCLIhNdj6othCYczXJMJcIUpUHrqmpAsDe5t1UwVtTwdhSqqrktMErk/rylMLf0T7jVwyDvlncUrV0CmCvJCmQsY/YlA3AMToVFqCLrviPnOGMsxOQ/40Mru1lDqZYb508dIn2DkQYGaiueeW2ceRgypoDrx6awxJT/KyD2trAqCs7Bh5+e43HCydaWAY5c65jGZY+dHpN4210IT17scGHSt/t1XtvPXdqF9YR8RADQE6dtcTl8rAj15lzNjvKGbWl9PnwNywuK2gzrdN95rGWBhSuJFe7/DaTCbDE0HQJmn5+jpdixY0OMep5GCKyG86rsYk9G8MoJaj5eS8Z7OKOjpKflpm9tUA7kvZ+s5N5/XyMefnbeRQB898F4tY6KHMXl37IWZK1etricIuMR7yupNpsfMYoezxaltOG6eYt8m4WVfX8aBv2KxPMNSnoTyXcKNLafjvbWwQjjct+TcRtKJSjxga5/RawPDlQJgTCB4fJ/xgTXZChIMPpFk5iEfnSkpAL3KOF9jTgx4CDcp6ENuMk409E0HiJAMDiUbRhFm5v7ONuNu+kihU1Dkn1z6BUpYKSY9bkPFHEnu9zv9ZaFzEtD2zwScBRhJxBL2ZVHJ2AgR1LNOgT+L5PPVT0nC8AYmwW2I2ucjHJr9/vLRMUT6sOVEusTpcX++C/ThE80u3fOKrbgr5BX+KRt2oW66OdVGJG6YEPUMs1zECgVm5hhBRn7wsjWzQDjM2HlXfTfWho+oKGJ/gdeZbJiEbfun+qlXowZePE3zfkXcKkJyVvQlSCshdk8SkegHAWJA78g907LcPAac7O1eLBkvSiKS3IRhkh5rrd65GY+rW2K990m0vNER68hzXfAEqAnIxzJvvdbLlh6ZGdYhBUNA5RRc03NKeKeitSRiHTqUqBy2/5bInpCl5NClvXcv0X1CqwvWfPzUNn62IOn19f1HTTBKihWKSAUIYr6135sZv6KQ6wqZRtqYKO+VbW6K+k3n8QQBPqD+vCeh60NTwWpPE49YPxbZP9Xl3XgZHA12+hAkDp2FwFltYGpKpcJNnyZwiqVrmYJpfzG42c+nYNVLZx+fqrNuUNOhGyTprXdLyMKGt5zVxH40+Bun7ntaelqbPO+/3pxb9mZzd7aloZD1pW2f5uRDxItxktzdaDSC1iuKj9Y+u/fZbUk0ztzHsyQ2Z+iw0NKkogDVvipO4WH6704yQLRiNeUXZMSWxp443WCDuHxvXaq0qBZaYULd3rYS4hJxoGPkg4YwkHRpHarmB1mswyNSHwBJW0IoiR6LtsO8kHX0f2+PzpfJcfneAe13xTwVe9V7fkF3fKto2GsGMZK/tF/qxBIlNZSR+AehSXe19T5sLSJbSjsjuf1SrBFPa2r3tJ1KA4CX3Uicwdznn+uQTw3e6GN+aWb7/Y73dsQJgPt2vwAZamobGDmS0bGIfnYQ6HF/7Ap4vWJbb3a2PFaD5FsO10tGM/ftkgy+3X2vBeBr0+T4egmAu/Z+mcl9Q5HwnvtKWCwpu+iAUGv/N0g0UeKJzFBkrtrAtStPOOBVJpAFaQGM3EyQJiLd29o51a+NQGFM3fKbpiwhLDqTOb1epKOda/p1UkpQvCSAZgAjSfSJIqc+mvLHVI6NUkOhBMWmDk3VpJWlHFpnxIYqZYyKoGgqMAIAhqUW02IOUBU1brDZoolrc78lOry1bcg4ADNUt4sy0B279BGlaJyYYZP4PtyogaBCnggwk4DNlJ30hlHB0Mc+50lkwmJTOgDB3bFsBRUicEyz3M2XD8Y/J0MpOlhaGSLFWubidVtpk8owCeJk2u4DvEoam1FGx1vpeyxgVMqkSiwYabplqyQ5ZYATDl9m0UOBoIEJ4Sc+jaa2IstfWKt3T3n6K7mxprq6Q04iNi0IJ6UnAX11pAyA5IjslCJc0dDl8dxyKvtzCkvEvYUFlugq5dqmQJL87VleQUCkM8vEHT7hEGtGelOs1dN6vgZ/KrkDmW4IIUog8DzPYUHnbMzYBkCczOmAaW05OeSZd8b5hVEIVyQSRQZBr4eXQkJ2t6M1x7VhCsZKL5oHXpx3gQYzgP76/v5OkZa1h6VwUN9vCdDmtu1mHp7a291SWeNnLbnk2mvttV6CfP9Eym0QKHcfhX2DT3Pb42G5gaQ5nBWGruGc6QuBmVaB2qVPts6uMCPqJURBiIog5eME1fJ5LgGxl7omHSRxrVKYIAGpAwVKAse0L9QG1sqDVNa4oCsmTmR76zyMHeqjejU20By9DIVSqDuVTbVzSeama6Sfe4GOMwmTCzdUShIw817ms1bMmCJVOROWYYgC62FdGbmdY+BHFksdj4zBJPVlmVUvVZGjyVNTI/ZClzEeUnNGdb6n1d6oXIRH2o6591xnaOfxvsXFVGkfix1ThfUif+EWkk9dn8avGjKzSHpMHFscjYd8hI/CcqCNU1tTMyPdZZm7mEimFoSJM48wGb3dtZZ1wSXJ4uazWelarpfkcqMJWzumeS6WMJaeIswfiilesWdxBSfhIlkr79V0cRcCfIdaKDGoNbTsQFqXut/DBoClyQ9UV0lEftCQ/9iRNnuRYARlziUZ2THzyXWKp0RSyVzhOMRS/cPtmC2o/TZkUpDhTCrNpKZ+qVE7ORH+SqTMNkPj3475sPeHGzsO9Hq9vlg5Yc1OAVKEXCht2tt8u29umBi7f4XcGG7S0tfae60t+F7L5donC+gAjcvInY72lEazawhB9AlAxJ7k3WCFOtREaaVzxHrIUjTpqAl7qI/xeCLZXKuoKdWNIexS6oUz+x+ZmteqqU79kIvx+SDE3K1KaXY730J7bhy8SFeEyyTUMVSUVqURTlVWYiOXBzSoV2t8PtD+2bGLxtevn129tf/zpc+rx9QbtCswPSfMXK+u0RyhisvdT/ZD6ovM7CTgLrlWbOo2mjH2DKJE79HVnkjZWqKBi6eD6dfSjmJdDefP0yxQqz7opgJFji/tfrDBi5I4kY5mmffV0paILvbzw2OTXXQ63bIz+0By760BQxha21uAL1LUZ4G7cjIy66ZxT/m26fe4J2Vi4+SBIFVTSyXJkcrTRtF7t0FdXXEyEURhnSKQygBDuCbd/HBsq6DcunNdrpOfbWZhbvHL9ZTw6va174+FXVpqJ5/ih0c/P19TxqxQcg1hSN9loYeWIG/n+/X1+j5inWkw5RXuLeXuYmnt/SbpvoB3NOlwc1trScvWa73e0stskW8z6zX8mEqAYhF49G+olY+xqjE7s8hVFdjpmG8MduZpPIlVY/chr/XS0iz1b0HB3BB9yXpX8SE19p5lCxlevTaPhJSfN95Ga3Y0OoBhL6ods84tTcA6B1BqHAWo5+8qD5Gdu3lkMP751VKeNvvelqvx3W0hRujjk/6Pz3OGnPtjqn84qi0aHOta3VWNduJDJPDMsXwquKciM4IWJU9nT64e/mK3CJw4crf3SdL7kb/61XqizO8/5/8UsIa1qsB06fRKSiYXM1AmIsoQZK3IqWUICgawkmJVK8xdWoBgg2Ngbt1F5BEhq5DIPRI2yUZzuX9zcD/7vEsVxBTdrRRapSMSNGJ+aPS5biMZkB8Iz8iLJGSNc0qs2tUVVdVAhxzi84oimv3qybcnu5kO+dxL2Ctn9TiP0s8H8z/dkwPxK+v60Y6ZQZmkPrr+lB9X1m478gMh94OneL++vr+rFzltxovokq1lttz3Wq+3v/d+4x2RlR8AkJWoxFYPrf2S/P0Wr2EbQdVuwoDIwDGYQvmoMNwlPdSO3/Dkzo8nSiCVL4y/xKFACimgTpgq2oWuMxDWKX3sfMdKawN08gWyX2xeTDUXEmZmnY/fI/or7XBE7bxr3EQcpRMI+dJNQ2dlMCa8+HgMQqVgT0EhcnZ2wmyoF1ZopLmQwvrs/GQicSj260hZ1y2Xv5OFvYzYDK3fH0l4ff8EYofg59XqOSyJjCqkG72gMJLk2wxExtlteBDrl3q8PbZl+a9ZDHecSNPrxYPRuiMMXbr8Lk76pFv9exMloABdoFxuofukWNQpi78lOECuWmrKBYBlQsiwBkGyQkonB3qGXyN4zYgiLJQYVQnulFqa+wY8Mknyp1dHeMojz4G45J4FhSFFzDmHGeYhWxhrLcgCZZ91eWtz8nAucWTYeltXX+3Kf8CjYVTOFwRzI2p2dRiDvvPBowmhSEJO0vd+TpOPKTMs6FPvaSTjqS94Z1MGVHp9ff9xGpQpCBdSZVF5Q9v23pu+sZmuKCkCRqeJlN5rvV7rJfl6v6QtvQJddPn1Mv4UnBDNRmCkCeRM6J/B9N4x0LSa5GCFzycpHzQt/XuRJsSlDcqcP5GLpkl6QXSWhYiVv8E+td89RupZQnWI46VZ5vNgBKAPlUiSDXmSSmJroqkrS2ioQYoEXxWvL6NSEWgjs2RTFAlG+7XHP4sNo9ar9U8Kz8mGzhGCovHphwJw3x2gxy3Kj4tl51kKVvXNw678ZloWI8djSsJQ5UlvpDvfTOk6dLPAwJyTmlupM5mtoGSJmY8o+c3khHUsi0121uDZm3PTpPnIauAAgmmZoCO90TY6pMsOOjuxhFyy6qgs2QkpIukQpfcWeu9327wLV3lolyMDGX+iSw5R6Uk4uZj7JpE4yxGCHNYnV8grftP51yX2G5u5oJ1E0aBwLtalVfACHxGyiupDwr2TI7TiUMRTnyA2hT2Es7ZJt51xVjrsYVNdqWVHMeAmdU9bOycZXPrqPBLZz2atvqKY0nUPQ7VmGS9pDqQE4TFHRr5IN/Ja65uJAXOAL9Jd27etl9JTW7Z8v98giVekhS1R4CuCjV9/pLWF9tc3c4PYhuQ7JNSrx0iTNTZKXCaaYYfhcnevwhwhrLmMNkh2dC6nOEuo5XpJnVeQzCiRKJt0W2QJoUjMwoDT0NmuOal6eufM9NIFQGqTsPzBbI3C/blWvBOGA7BIgI7WNbKwJ9Z+2ICHdNVRM5oDmXsRottn2stUijYViWICWORNdcaRb7S1axXwmCHqpYXiQqOu0VE7/P2Ig4ctF6DtKEbP+hM+XoHHbBnGe+rrsyRTd5fiSGL6SfjLvpXH3XDMiheZS1cjfddwhhWJHMfM1J77pdm8aI3srgj6s9DuNVdl4YOisXACMhJRBX1UDXngfYhVzsjMNn4WDbDc3dY1Z2qUUTC54ZItq0nkBpLceyP8B9/kOMLond12yd5v5Za0RfK9M5jjgOJzFceOR3dhavet3ucxdGIgrDiiYkd4Jyq7+ZGoHQGc2IxDILYguydhU8+2j3LiEVM7168Zxth7A/dSJTOIxpj1lntNUNu5ma9OMxccWTf4aGFIwasFleb4LX6lDI8f1rkzF/yK9Yjai3rNyui0h0u3FhhpAT01SuTMXq+vV7WbTW93muApaiBJj/4uI92/vkTm3kKJ7nJ/0160d5wgpPVar5fco45rJr9GzmVEgVAZCIlQUvzDCxVcoEXiQUhW0CKJuDo/UyqlC/TOmDLOyeAga4WG1XvWcWa+wFqA0lFPcaDOxvsBZqFIcj3MsM60yNUd9Zd4TlCW6x+vSx+mpaHabHZczk3agJR4GGLbxlET6e9/AhyiIjzo6Qhy3JEykJ61H8TTSf1TZOfVmvpzd8zsfPUQv4X7065ibCV1/0Xua+Y0YX/xTkqZnllRoPJMM4zJGdd9iEVq/lSwZb6bgNUBpr0Z/WS7BikMB563PSgrlartulTEZGV81ij6kakF2nJ8mkZEPShW9D0iEnuTWBabNhE5hgGzDJy22QlGwu79lmm2keinxAWeuTgklQVhKoxd9KyJFmAISi18NiFWnoYVEyd7UxV0VAO9CJGvaRUfG3QGQazbngLTR2gUAeO2nBxln44QqWBWsSM0Gyfl57S7BKPQUsrGzCgdPtZWFFutMZdkHCoAALb2QaMkkMX509c6rMxdBG2Az3jlr+/v73K6an07Yn+Cu8dpbu4ybTN7k3vvODhi5ZSKbLofd3e54F/+A7j7S1/OnSvJLJYnWUhoA6n+StWJMFC14csrKp+PpeskMLfdA7lfP/SFypagXJB2LNSY6NYYyv8p3a6HIUVmrHcKfBJWtb0/dfl8QtO5+1UZIgdebsQ9aVv1s1rQ5H1XASKorM6OZvYjJJaWkqh+ilyPN96KQ7eYXs5sE/MxmIY2DXCeu+QzbS8wx+8l9UsFdKKhzcnfxrUe5JxywxIfRyScgLReEnlZnalDxVwzBVIcHib/Ys/x/2qOZQl2ndFlu08iRMWcqUkPHRIIjABUxcHbr31SbHQsmlrlPFX3inQnnS2mh3eeg8XGkYh8RD75bWyMAds8iWI+HIYI+7gdWFP98Teq8Ej4kJLKsw3j5/OMPxpvSXMZ9x66SmpToq5Akza8h3yNWgWM4tnocFOykVZzIY/zS4Ynm4z0Paf7vMwAACAASURBVGsjEoDDZw7FIxaE364prup4jR62ZDJX96LjYWbfsSx38bUAFDFUFgzhIEiQ0lWtOZKu0mu9vk59NQDgWoTgcHe35Xsvubsv9y1C5Nc1GEhf+vp2yX1LX76/3H3vty0DvszekurQOBS2OgN6koyhwSuttcwryl+DesB16NUlfUrToIleG3ZFeWemeJRmOdjqUm+dF9SKXqVtrf2uib/KomTNIzVGS7V+4gB/JSgflCnjOMjz8dIbEuYEy7BPW7bnnbciv9s8ivXXtdZfu/pX+4Rr5nRlyuduz9FOs/FMtiHyt7r/C/qxLukAwDgI7hFEUmLJajMarH17n3PyMaLskmVyTICvthy6lVHeTCH29ICbG2WYY0rIL8J0aCheGfKaTZVQNH5sdTaJqdxqSwCypLywp0S02UbBYzIy3fLBLbcKetBkJ131EKdyhyL8xOxvkhEFkI79dvfX67X33tvDsTean71ODoAuT+c/WabYi4qc2gC2e4FCbXVFDFURs8Ov+NzxumozE4G6gubkeFNmHchSumXMiZ6ntzv9C77pfy8NWNdwTOnY7Y0df/Jz7oeLNG3nSVAsQbxXqjToEh9eNFv5N5udCiAOkMjdvXvD+SIAvFaeO03QBYe/9LX3e62Xa9t6vV7b/SVtYK/1AqoWZYZfQrFZ753r4TVJFfW+6WTuqopoV4/LzFTBEClPJTXGYUxK3Csot5xoqtIqU6fHauHdjbyXfVhuI8RCfHfnk4+oUqrB64oysX+d1E9edp3Io/Gv/kxFNKHE47pwwq1K63f7vLlbqxclkec+hv/fa8pWDgqBu58gvQ5E/eR7UXCE/huIHQt9xvVL3/b2DtY/68UXaMAIy6QAIDmbfHqsefylrbl+HD08NBkiXd4umFGdgRmliuxdTT9jxBgRpPozf05/815am31QnpzlqArbuDMLuyfxyBpJa0e/XCyWeitGza0WmNTTHJ0cKpWsvURyaZP0vcXWgFHh9Fg71Rp1BYGZ+rvPV7CsZ3UsMxkyPAzmB/MS+EfF8l4P+BCqseJSOuCxs/1iXN+MWt5oqj6mg27haesYyEB+Nfvo9q9vFHYjBLKqfLcFrcs9DnKXu7/W6wWQGSCLXjELcCsOcsbeb/GHUd/D+P5525K5U68VUAMZASLl/hNth1LedQCCe7i/cUpEpCOn6Ulw1Bm6qsoHXBSMcCttUrm7AGq1x06ujSXsGspvyB+QKckR9BfHOmEC5WRVLonXxPAoQqAD7E6aS8y7ZmXuEojaMsWYs5EKrHjWxAsNIea+0iGHzIPrL2Wd4C537VWjxfqylDk9jQSsKy08Vc3V8qmkcPv1bUWeF2NZe8ciYe/ilspKDif9n5nbmtR5yrTlCWulMCu8pYSKB+X0EJjHWsUIU+hrX/5ZYO/3krIsIBjNuGLxpo4Oj5SRjsNUf4a6n7CuZ9pN3MtPqySWbqGU+yTyRZkiQfdh3JH6iMhhhoX41WZJ5Xw9NJfKfrTNwFiUUgboMzkuUxtqa3xq2FjkMyLVYhrCWL2wOogtLDPNILzfb1tZqTCj8Ie6mtPce+fzwem1WSE2rZIyRrkhq5qsYcYzFjCS8QbZMT40XGOTpa+yEBlKP0DVc6/ozHqbghkqhSTrcIXkTYJ7lU3MZS116CmD2FaKqO47EeDWIuAJRLdoM9R4Hm5TiGcwPYq3isJrfX1BjFXZpEdqzlx0ZyQlgM5tNPhL+FM0F1+yCKA6/EuMPIA/3s6XacOXUyYTaLBOYwoGc8d5BeVsOlx0EJS94kRezy3eISLbt8WOTVaIM478ERC5y653nMDFcC0Ka4/kdEkGprOZOz8lVP4eIMI21qLHEdYR0c0iJXUcXlTCKEQX0l60B7FSblPUqkIIARmUmwm9d42XmA0mRW+8d5ZP7NCikEuPo0AeLFX8UUkn0NFzm5UbV+rjwun535hCvS5XvZgyhM+f7k6qea2KzrO7fitNguKmCSeaTPeoyWMCo3QJE+CcSls5EZjRE8stSzyG69gJknaVOwEAWSq7SM+KMIjaU9J5V6l2AlgCeHLMroicQosx9/aWwagXM2M7lpHIJqFJWge6sPfCkFYdZGdOh4ezLaB6yo4og0c02+Rw5dnWhQwQpZBmRnJqdqg2WhpVKraJ3Gc7h7aRe572VVT2Mj+I+m7pxrkEfzuigDNDyQkvvtbrKxIN3AOVcVvj96rowjQ/OXNXWKYurpVdcgmbcEaxWwDK6nQB8F+Zh1LQp9X4tNnGlZ03D7F3lxykOStkUIIRRXEiCWmtFfG71OuRoWCr6JVUaMkPWgsb5ufcifjOapepBMEjZSutEzMpwBYqjGByHR82kOPL4ZaV56A4rtcMopxY8DxSBtsjBRev79cfGCshlcALyiQE/fc20txt76gLfSsvySMP6msLG3vDBG35DwV/fUnC+0srY3PBiz56jSBgVktGTF2pg3ByGvB+55lx54YLTAFXAbVsLjSBy7F9rVU2OV+t2Pmt8jFxvu/3pb2vjkbTIR+l2h5wo0H32dc+E+OYgDntb2Oox8BbgU46tBzn4MYNjUUe7Uz9eGv/Kyu3u1fPNlL8y6uH8+jh4/v50v6+nex24eqeTO46ncE1xql8S5U9gz9F3l+gcbfQR789BKYrnYUdSaX+2zUHWC9lm/P7Nt1PoFT4rxRuERpAOYZ5LEgbPAbf24f0UY7U66yLByD87PxHF1rbxxSLFA4CAaRO1G7mDrRUFzE7R2HvsxybP+Msyl07h/rxNgWjm1KrgmF/Uw2EHzD4pTTORcZsAFnttOdxS8LgzSdRDqc+WBabEo5M3+tJmboPTpeOY3t1Cm31fN3rcKpIF5mbhwCIWa/P1Lt8wToPJjjiflZBWE29YgmgicdEiJQouVmvbp3jjdxfg6CSPOpHu++XXH9s/MP9tfWH21oRapdv95f4c6JAmFMduikck2TSLqodtZLv24rzVDibLn7wKvieTuyNQ1v3NGVV1LBYsRwz8laOQOwIG4GFu/9Fz2NKUgc9BgupncX5+Ig8XM3GHP9ledZwu/h4NlgR9giz3Pp9NFNw9dH8P9f+j6uV/tBZbWNQqy+Hm+1Gf17pTJRKu9jHQxIdYHsSgnFMy+hVwY3H5IybOjIeFFqjV78rxzF9ustAItRhnuMGb6tw3Rx3EuXVttuYb5jtc9gQdfpa6NZ6oiqX5Lzo8SMytSpcqqLU6ST12Pp3j13zGzMrHy9ZuVXlS1CpFvVEH0Xjc1yp6zLfP+1xVzXR9bqHVchzyNTWMY4mo1fiK0lV5lVpiazHfNqZW/yCJOFyouzHByFOdg6PGjso/CSdk/cyZ0lsZsO3FIy3OKuagdmagd9fZuvAVUZjrawEAoAU6j9CWDxzB4wNeu4uvaYeCbu9wVoWTrfd64Kw+cYZYXbkB3u5r/XScvc3v2PUDui9fr5eEvaG/O3MhLzCELVOEiZmIHrO7Q8AKj5uZu8gRYlJ4WsiXDSNKX123BWCCXmJSLHXcROTvqPucb/nQnMhFrRTm0zu4iFxitWJusQcC2KG2NwatvwGKSMzead+tYso+gwNywIABz38guV7/vyWD4fTGo7L+uttj+svfrrsUOvp+gbXh2vJowxw13rLYcl4Wu4JFh+Ccz3kpHP9dNRfdeZsaDr2+7JYaRs+FuK6zTkhG1GyYCZ07rnQX1FiYr3us6SBXx7V3540tKonXHDhgwuBq4hr0fJY4hPgamnJ/uBCUf+E9dG3d+upENpYkMjZU2PK2IgQEYwaHJlB7AYlSUv25sr6ildSshQHgBWuV4f+57ThqJx6BjgaGb7N+DJ+mUyMXwqiH9p4E56tZVEW94I+51+LiFoH/Cbgiz8F0KzLsrVAFkFSrIt3kjISKPRG0fJAUrbr5r09Cp7HNHi9Xg3nsRY3qqh9V7GwRXf4xt4i4AZ75yYRYUWlAFaaMAn62wyxrU7wPyQQlrWcJMjjlIM1tm5Fj1cEn0ooz+7ZsCVmIQGvKnGjBkBJmhj3mJmRk24lRkcT9boPI6zYcyzncPrQrTeTw498u6NeaWwLXDY1/ZXq2skJaeE+jxd0HYLy+6zjB2Jleyr9rIL9v6jvz8f7w7EWAIb9ePz6V5ekxlaRo1VA5KTZnVGUG5R6Nn2gR7+SHydDbkRyVMVs8PHYoGGQfeH+YY4R1enuzBwsG48N6knn8/UvABlGzKF/5e32Na3Y8xgo9PdLtAGUOcAsM14qMqNhpfZgRoKhXUIHRSVLRTYnrYLU7L0as//RsUgKbMeiOzxZ0AQJ2M2V6uJ8n8X1Mv87Qv/KHSnmOjX6tT0m1lyfSREZNuASlXgw6FKZQQPcNARIVsZsytMLarCexetWgK1jZXMfu+QENGPIv7DlwzSWCHU8+wZYuUvkvAxXOa/+UDa+ikbUl3Mu0/0dr3MPYB30GRAh6v7dfYxlfNqOkzCieGc4a3SIsY4qGYyW+S+Kcy0i5GiQ1jJpdX2OL8SBB+7HIL/8+1sEt0cZ1/1asXig/Q74j6wAPpQMrhXrmZW1YlG45DVGlitVPFj9KPKj8ZXJL9KRpOA6n9wNzXKwUlvd5mA56C2jiZimjM7pxGOPLhz3uNo4DeWFm5fPwjXk2IBad2YLJViR4/thMPCrSB9tVUpwEmd2+/GTUDyrQJZ3VgMrj4beyIzsQlyFYhCn7wJAnqhNxsJaczWCmT3M8CAffWM3LQ6ORNlBJGnGtFOllqGH8FA0jCOKptg0aL7QfW7NK2JMayadPKImXXjhn6E2pv0eznDuOslUDe8DcHD2nQUVvWFfgqar5dCTBA3oI8y8RNQO3zt74kSHeiCtLo+uT2NxBnIsSgBTZYiduUTs4lkJjNMMoCr2NqRqOzByJTz2hUGQKfJYWBYjX61pMCZimx0uObGwm3M4t8BHC4eZYcd1J9qOZtUKYH6eqiCNtWDMO5piaHNR7cUrIl+z0pMmGotTG6qvdabb0Xu3MjkDD2XocjjJYwCcsFrGVEWuMpN2HXAU8aC1FvANvEOEbX2ZtNxd+kK5IQKg996COd87azMVQgp+666bmEkSY4akOo5bLIqsJQkq1TIdxLGm3zRdR4Vf9pMEGaCKOMWBMFQE+uaeYSjP/VQDFZAVE0uMqNjEMJXFKSbQg/3gza+s6vtn/ZCjdA40vo3KESxvuBrvk/QwQg8t/6n3n40/3pjxApULfPmxoVYURR14PZhqkUQCw76jgAWzPRWMbQZK+7cZGPeHuFxn7taTYKXF9INDgx8F3S1HMaDaWJR3msVOyaM4su5k38MEBZ+kO2z1jZF8kV+2qAZh0WBxPh4/Qkg1SmYlIkhORcw3XLAo97A0JVmZy8zUj0LZw6bheF89cYUTH6SrzMtzVawmnSHvZVZo1+6BwJWlXw/D7i3BtTiqinKAYlfZZMlPpsqcOXKVTa2RzunT6V23FhYw9mPzsgEFIh+Toj/PDw/9S4YJPzUFWmOQdO3KQjjtrGXu6vIqffOQq+RCqn4AY1EzNWYdCPlrPyW97LVAyt0Ags40sdGiDCbCDXTaBoHNpS8znwlkgdbxLRBd5Dt2zf3xdoCu71hDcPf1ct8hCanFU6/S2MsqOSJ2CjzymHhmfmSFqXsTVRE+FaXNhPZx1XwDxr6sVFq3hmospetBdhpFa45D1mIEuLPeUcnomda4HxsSM29IvS83W6FxSm40OswhNJAodY2qTsm/3nL03QfkyWZvAPKgHnlt2WdEn5CmRVIUB/ucIX81YY5ofiw8ntyS/k9iqMfQn80GNbgKunYYtGayRh5F8OjiZ6jihB1dOvN4cmRHzxsdmzCLy7Ox529X/Jrp4613XIllCi5XIw/NyyNE8xXdUZXqd2TGZ563Lt1lbWIJN6Pwjiwn+4xfoer5AMjVe4/NE4HZWZTqnrSESKpN8dgdkQB9O8JV2/KqEaSqNtEh/FyiTjOYW7oq2k+MI3Q8EoSlxRVAeXTgXshVS9q1sSYbT5SZyqd+OUYqddtgQfP0mmgYgh1qldHy0f6ojClBC3c085gNJ2k0r95G+mQsTXluW9cZgYEwjiLYl+T0hximDLIXq1YaEA/XBblkbp47HeAOSgaYTJnc7TRbaxHf+TpPZCLpazsJ31ui9GfE/SjZm5t4/0B411hbiYbgE4iqI6lA3bcZJMvN6ieCvCI9bFJvqvhp63ihmJCFBIstBG1jMew+CyvNZw+EvypbjBWnXOg/P+bMJkjuIUDD5OgSyMwAq6NZ1yKSVgUcHuKSFUw/Ww6W/rp98amdr6DkFe2ZUeNHI/ORUpfHJ3towkf7FwU+WAYgT7UDYo2kNOwnAZvCR9ZDuHiSucZsyUFdAjPahKqcVN3JyigfGxirr03Gem9Y30syEwqEwOxmVHi0PldeWWAO0/JXTIwmyYnFR6cHeatX9REZhjMcOFmo8RAKmjzNt4EVTVKSKZYZRn6PqoMaVrCng9KPF0ODt0tWEzzS6Xiiq8fidtNU1TmYEX+PYw7aMW/b86Gha1z17js/DRnummxli5NORfq0YRfsq0mNAl7pVrX4kY996e0fKlcaj/B0myBetiSBsiMbAZWhZGXZ6eBLlkYl66WZCHR5AJkQE/97pU3T6UGuPJAmj701jkjdiZAyzTwXV229XiKxawdOqDBKLP/U/5CDS//4VsDTyD9x8/irKsd2sALM5KdzMgzNwmuISchwetJM9H7RqdF0aDqvqQNisDwVoFI0HqD4U399XimMsZGtW6te5Ny5NV1ryqO7Mxf2EoXr1MNYRvtNV56e/KZA51v60YcrcFGJF2jqlajGBrOgW8ET3Y9j3jZl9J9faWuLOPVtnSNUWRmP4dxE/aBJqKGxM+eWjJiHF1waY2cg+1/N3oGHBKvsR4Qm/sJEoeGCraheAMKdMJ0K0n5VqJj/hhjsXptq3d3mbZqQmwgZ7dnQGgR4cGfWaWql9mgp2u78qO3Oj/NKH/QMbd+LNrz3ueeotZknal2F748D5J3KcrFge1KVlQiHio189udW+jPid8YNKDISp5g1yPxVjDmuNgDZJSPvOUseSy/VicqlH7JvuSybWRXzpzbVPSuvwRICLFkciPHAkJKVdNokvSJ3QGc5gQBgkS2wYqExDm+IRWCOgo5mBrxA+HubGeylFQG6F+C+t0HuX+6+/cssNhe/Id/75/Va7x8544jqhNoAGh+4OzpbG21CvbLfWNGOY4T3R9LeQ5uH8UVtr2PGzoqCZQyjysKZVGmmj1VPFj4xY+MF3TrCbgci2rHk5JCxiwUEwa6vED9FwUJ2LPLeuZ75EHfkq5tmsawka0C/7lhLeamfBwHn6PLf8lpUqMbd+8y5yYph9bo54HCe89bhIDtKgCtz5nzuZpBim3skH8MHwPSM81vosvVTpcQYvapLxfPBCJJr2d7PxpG7dtJmA71i/7Sy1+FG53sd6QK5LGqksfRx+HqxPyOfsEBZKQDKpIamJ0utH1IfrWcZika9m5UXJGFxyO59DEO1fI43YESPU5rOafWlVdXTp/uZoG0SJmL4mb+tcefpBmp31YHSqpOcM2TsqlUCnUXpm9wfV9H/AKy/uL8mJfGgRpBoFkJXJskoZrmtIamhko0dp5UEXc+S6bHGVl8QsXd32sXg11or+nw0QOeJ+Pk2tBWP7rr68+LICRHBZcry/dGbWGl1MCqBpivADGEaIMqIr7fZfhm4JFt6ufP1B998LeFLdN9/6u/2hT9AwqS934Sw31lL/JWnxIWKtpr/64ByyUkJWyLNxYYrTJYss1xY2wITZwzfKdIGzupCCJ8Aay1G0PWiERVfIVGh093tnLm0FPQwZiIWYkUjlyLyLLz2nQFkzd1IYC/5I+M8DAOoE+6TxQbOtruZK43C3r2OakDtK49+mPnegR+ALFcSIgbIIpOEcGj3Uu12rrPiFFMCRkduekD477FRPp0b7kw9eay5YSo1AeRXHg1bcymGwViNd49DHkgKq519AIQRiBwhWRSNqNkKWdhAetk0q34X02uJKHYRUmYkUlVxDJYd+s3znRHDROoRW7VaCpw0hIycK8/8yVnjcq40hLolsBcY4/iURFTEzu2vjnrNl1nXTtTLkuYgsAhinwo01TqwK0ynhI4FjU0Fa6LVsmyCn1Nx3DeoDRo7xF/+E/slgmf8MoGx8V33HKMa7kSCUOAEabLEU9SyB4FdqLbqD9Zg8nLt+jOOGfcCLqgqQ5E7vlx615JEzbWgZadsKPgiiLRMFWuHu3LTWB3o1ccz+Co/cS3nyt2x1rKMuDTIA0E4YeVBWhQG65KFIGB45f21fzhK0+y9rfJ8yEgRVqErMNZDY0tf8aTrEin3TYMUKEYZ/N7WwB2+ieivGsMRpMJHIbJJzWknygBkYqi7LAt7B3Jfkvz1iiN6vvwlQL6DuG+DsKX9J2J5QEJUhb1egd88uGnNyTxOoXWVpCzGeYc+OFoISzLbLO2I/isFZQhCq9x5Nd5vEN3zH3VQ1EB1aI9SlbmkIaYCjKu+CV1/sFyTwb0M1e2CEGfTspePkH6DbqLVKLPPs/ZA4bg2AKEC19fr/X6Tefp2QoyuRld9m0SYI83uXczU6XpB+/M3D6Q/fOw2TrD1uLpee0tn9aIhPxcN6/tD/MZfQdVnOGN2NN7rvaTAsEi0q7fZY+snsvc8dWS7vSlIUB6hmtrnnCw2IOjlKI0XIqf4teFgmvOYtLrS3tPoTtqjofDv13Gdmr9zqs7LTxUNlZPX2oZ3509XJb+jQMF4L6m41jOLKLyFqvuqwfHokuuoAj56/uvAQ8mO82oQxn7Ke583peMFlp60vF2pVlrJALjmS65yUHZJbN+cKDBesdZybK+Tas0b7DW7o59RuAwAWMe7DqWFV0dc/UyJSU0NnTAFK6eVmckikBObeqOT0dEXbXO9Xu779Vr+9XLf/n69vt23PMoK8S2ahyOwqn7f42rSh02zxPWRSOBkxuwG5KwP0GMyoBI+pFqg5fWKAwdux23OKlgDXp4QeVO2qJ9aH4VE+p754XDa1OwJwh8kXZ1HLRIo7fyhT1eTwEgayylKpqRU8fTujY+J+NTg8Tpib5dimy024Huv14uk+xsUYa6TwTab6m6gx3Mi6VdMbIp7S+j1Pe+YTxih2sP5mC13V1KEH1N9vLzVAZqMj0uZrByfT/lQ5UC6dMRz+JFmUxZIj1/nZx45CbPtofc0kkHz5kMmQA+7a3OVaL5UJ/0D1zc1tUv/AvTZq6Mq6jmWw/ep96dhuADcDbAw/gwPSYKXh12ezVmBJymxk44+39I2dQpVv4s9NU6RohOye3R+XkWHwKaH1GVBrxksKQ5ouYT5sIBkpAPFHVFgE4XlGUX6EoRIANcCzor9itTn876yB4AhtmtleOqixmNSt+LqTtY24F+4CJWnjNiAUieMI+aJdqXlmLhV4hVmrywjaca1ln3tJXvp5T++9mvtzBsSBP1EKel8UVhmN/uFJc28DOLG7qIMsg9cU8NzySphFILY2batAHP/TFmea/6o8mpVCwYhT/XlmTzxwKQhSS6rIy2eo8hySENc3LdxGVeFayuAW2aMZXXJDJcMCHa9V8XQkRGQJ9C2wq91uCNSQ1MngUrlvddaESN+vV7v93uqdWXe74NBqTvGyLvff4ktj37j82jJcgvKfnfaQ+0XZ7kaTYfTGVTS5hCMh96/nnqArwCud685QQbcaO4wC7/O2iH79ZHRMMBntYnixQJ9bweuOm43wZQe3Bk6U1+VdPWJF2xYkDaptGfBO+8tu+sizme3Ndzouu25xPV5pQnVbDzMT0feu/QFGi21ICuHxQ/iz57NLk2r/7CybYCfuAW9gWnADVsmZV3SXtFt0xjBHLOo5tslXJELA1e9Lh+eQZ1ra13Jye31UtFDUgbAj3UxSXvvgGLBWTODTHSuo6O6ZHTjrVgwIO3y/kkAL9ULDgg6+7ejiVORdbLkInzGHJssAKwcEIO9zHzZy9d729da7q/30l7r6/Vyd+0lx6m3bScHvAKjRVVQFqsGOcDL06/Mfs5O6qzJpzQMCeaY3JduGpBQGuGI4Baagkxmn9j0UCLS7zLKAO8TIsWdVNWm5oiiToZNg/E8ebG/59kEYOxNsLHh85jVGu8tYakylLGvLMDnHpBsu1Zt5R9FnIAxK7pvY6adgfIJUQtMnaT/61c2HQiOZAkOFZC2haZ9NNe5k7nhs43/BI/lpx1r2r3LG1RF9qpXe+/IjutXeBQ0IFxwvTnCSqlTGFJz5lhInRkd+EiieV61A+bpKtWfbMo1XMX59QbmmW6f07mHdkymIvx4Vd2Znfm093MjmI4zMdEY2wJcfDmUT5jVGjA4HmFG5VayHtd57+ykysV8vvpD+2e65I1RQhK8QF4lmtcNqakh6WVT2o/KGxYlvZpa2IplI6sQ/aFvMTEbiVN1Ccq9FoPy8JZWP70K65W/pFyUSttpVnmYcR5n8/coutsDmMxDiaN3gx8XSeqQo+kvSXlsD4vryS/lBq8X8kzaF9eXrc2113Izj3pqKSWZm6OahJUDI1EGiNileGB8iVlhsYFX8zRWnGIbheemnrPa10eQ92pPKcp+daKjo2iGlMc3GZXrbN64JQ4V6X2DlZbwmDxjSstsSSE0xqjVjUuRPR6cDJmsUa0Y1+bhaCdk3TvG1crhPJ5KihgHpbGO0v2Xf/kXd//P//w77RVpn5aWIIFG2JMJvj7t00TYl3k4W6vqvmCutUVPTyZ7moLOtspKWx8tny1CjYDMTPT5kpxFj0OPB+BIveSHWK0O5vKgsreKkZU16v81FhisCiErHwAG4qrPGhjFzGrhDrWq1MH0Ytg/vUaH1eMtqUvkHhXHaOUjki13VX3rwUQv9HXYdRHQxbZ1jD2MB3aMhYOJ02fVtj5roQNYCJ6HdcitQMeEnG40DcsOoSeRyjmuPwP03KGqvm14Pz2hWHzpwnRwSgAAIABJREFUJ3okvTYb5TfXK8kbUtqhIUUoqbCA5YHLxSYjRVPGM7IGD6ySggTg9YpYEMwMmWUwaE9QiizJ7DWlQXGORZS4Xm29g6buWY4hcD+UwLwJdFh3Qa2W815JJ1zMA1Nptsy2Gdd66euPQIJavpfjOw5d0/tH4arsveOs+XCr/Oz7Z023rmYaN9B9hxfSMb7tuyReTR2VqkjLecSXo7JYp7jVxMNRTzUzL3U8wYUKNpJxxP34sixBTGG/G6gCG/GFl0BfULrtz0wubts0pXbkCHM8znpdAaWcRSLp25lCeCZS0MHd//73v//rv/6rmf355w+wvIoQRAw3X1HRjNdaHvsG3b++vszsZ7/LYAczEuW1yZR7plAUYAmhH7QlhCrcjja1OSABkll0TJ+ehCRbhqpC1rO6Zi+r3jhReZIoc9ykmN5Vq4/AKkJUd0DD/+TIw3Qf/UFW0DYsRUKEzMCOFDsMIFJu8dkOcuZ2v8TWglQ5nbGYuiacbzkJ9Clyx8eut5Hzmz1fhgcTkzllxmvyYOWZdyVSyL26UuqhSkNERu0sDcVRO4nGcEwCUAiUqCy7Duf1Vu7WVEMwanRoA9n0ifvMbG+X/DOj90FPILIwj22eP0mnIE8a5hS2lBMzxk7qFCHLPY1Ch+OTuQwMlMJPs6Bpo43WwCzMw/If5B5vxLKl9848vVjUrRNAYwA6A0wSvYbNLGs2jHNyoa4pxedj2UBJiHo/js5lNCynV7HW0EYW+Y5my7i2Nu1F27RX1B4qGa1FBSxmJni0sbs7mQrDKLAMKjfUALCr5DLH/8/V4be0XIoNt2TmTDJYhUwUuvaO54dGzx1nmu03pevfMsJx1mACurJtT/lj1fsdujikJ1zUA3OaNH3n7MLNtNPN6POsmD1bGE1prfXz8/O//tf/Wmv97W9/2/tEJPGbNn+/311ldu/99f0t4v1+f72+fL9TUkdXWctTTNgc3bvD6MN9KPWXpHuBtMdIgzyZGJ+oRJpFmVCeQSjWUcm1DJIuoWFthJTvnplpAALhEszE5aFxdNT0ZPHoJ1AAuUxLDXTMuYNzSyQCP/bczGavMgxAJciD6NSabjQGWyghG+ma8nEuQKqbE4A6quDoxFv8MjRRvVKuHx5/JV+Bfvoj3FTb4tIG8BJdBkwomHLpJRSfpyEZ14E+ly2co3lenz7QRB/KVGYCjfRH0LniIa/XAtJpzVEfmyTCW4I7Ttn7iXMjo3aJbw6hFod2xpcAKrNrq1BS1q06oGfsV0iXYg5L6rTPwdFL7V8XDpMEdBUvLUU4xqRYP4nTiWNBgASNJpjzZbbM1uKCvfDCdn///FhGaby2+AGtNElpAXJslzzj7lkKq6LECvGFKPqhePn3iSZ7agWUKeyQ0+kIRVZKDPq0/cxv6DiFNMrANkSx86buRs4KcnHVyt4BuY0fUSb+Fry81Suqk091LKK+rT8fsjvd78p541WH5Bdt5SLwx9cXgL333//jP76/vxZfEqPgU9Rym6/5qpu/v7/f7/c//v7377/9EcfJBZHmbIvXXbA6AenCxzVtW9SHGJzKG5SZjojwUcuwYotG4JJ7l7IqQpIEzB31wa9+aZqTZHxFVBn113KnutL7UnNnwLbf9FGRPRkXqXEApR9Mx/oeJhCnavlarxjCHic9tezYOUWWgtw3Mzsw3cQbK2jigzZ8/UVypxbC5vePf+eljiyl61m7yoFKCjB1gWhhFvmZJOp5EdJLHFU+3pXgoB/GdUOOWtKjNgPq3bXhRQ+RfnQGY6L1q+YHZtyfa621VmqMCh2EWDDT+SxfKyiOBU084Qk/GfOiM6+sONXB3o3E1BH8jy8bvqgKa7Om2Kkv8Dr9qf9OPThNZRRCqUJD0w5Igf1joQbcVRyRlaltxmW5CeYgPIBcBmOuGGd6rdKG25TItjTVpYi2xaKlonsW9UxvluGkjoVfnQvcsWLgcKTBUAoMjlohWUqjKXPEq9VFcz5qHys+xqQ7GWg83SB28u6k042KHUeyq1piW4jclB0xJlR91c7vCxt5hPVjOrJMC5jZjJsHr7D2GPf9kpbZ2Ae79t4/f/68vlDnXvTyxlFVVucCxoOoANf7/Z671WbHZkdLX3z4W31DTfLizPNqhVm3p3FNW19XdCM0wkm2EXoI0uhphlhjd00XhLm6oDjZytsOkaUJehRF3sNWpb9y9XzapKnpSlriQes8nzrar386KK86Rza0ctw6JbqH+dLxuoHuXRGSHrtYHmlC88H6fNmweEt8r7IBNcBy2saoORrsl37koVajl/EAkMlOAcUQ1WAf4oTxgPRLqKAfCcEr/JdE47nSAzDb86kjJBKtvy9QW2Kk2roaEaIE5EQfzAVwe9fBRBTVt9pdgor6Ry1PAJGsEYaWldd+lNEwAOVnDVEb/MgCPjmYDF0c+xDaf5qDHGpt2aTk2lXELyP7ZrY3Kaz1Mnu7Z7dCxcTnONQ4NgdIKv9axdzzoiZo4RCo9v/R2g1PySkRpJkWLDHs4HRw4szhnI09gY/IobBkWI5O80oSZwTwEoXEeoXR2sSGfY5dftXhPCNQabRrQyNktFgeqp78KrS/6MfWZ4lxyndoMZjzmT17SDN7v99mtn0vW4FyjVlKNuW0PNz3z/vr++v7+/vn5yc07N47c6U+VsK7ayy8jNpdMZeRb+2JrJOOy1RorOAdbmpwZIWF7507AGDLfOep5b63+9G2owxH4gBVfCPJG9joLmUc1Qmunv8CaSvfJCcdyhQhzkqixX70Ewh/gO7YsFtl9Ce+HrbtzskJfgdm6nZKyWpihocJaX2xPfOzh9w+HRQVRDx6tsDbJ5ta53BhLtE3/UdnNPrmD7tbkjOtxWV1YiCxNHgtG1T7RaLbY/8FrOQyXtPkqP9bDq8mzm3nIw5NUrdK8lOVJybW3HLMiJ3WuNolzSldJQjzaEXiJBlLIvLUneY7yZdyH0aJITLLEjpfx5SYav+o+zjnvYJ1krZ8y2MlGWoGY/yv5BUgTdpmi2aUmZlL3F5ryAnwkSW53lKnyeYrO9XdLKZx/Z50Z1rUnsCxNsCiYGJ6qSoihJssFyiVy+84znLyrv5FZgiUax1RrjoCI+IYOYXJ4letharFrcorgYAJecZkuCkZnqJQi5MJAq1tYVNkzIffLjbcT6EvVhQOO3dWo5GU5nIDsf0Fg2Die2+aYTug9bKo0U5b9lowuTstlW+wxhKeHIm0qCgewl0W19ZaK4sgTaU3QX0oqsoozQ3vzRoyN4TH/XOWLosVHli6X1QcJU1t3wTstfbecYYRAJjFYbXM9Mgo5OKxBz4EMIIXbWkSxunofZ5dBBMU56atZELUVvfYZppWV7WKFYMKK9V6L/LmWnlMe5ni70MJNjoqWUwdHQGtczZfL2Oi6T+139ApKTPKU46TM6lECumPumg5M+MdrYBKj6SMexeqYGIdHJXUktCT+2x+LIewPYFyMWrY+XasELSqq6HsuUoKdZ1cNN6YEz70Dc16pbduyXyNY0tDNzAPvsoGugpHmf9pXUi2UnfFxlpUqk+Er8NunMBm9hThKIC2kslG4sSZBcAj1yWVX2C/1+7sK41OBf9KnVLsTN/gV28oleciU/zXpTe2x2GWEaEoNBCgM/qdiXptLWH1Wib4bsObiVMVNetgWUthSkOyO1dNZ3yg1q8YfmtmJOc6b0axJfSBShWhElDLMMyqR0yQRQIrIFqhfqtSqVhm1FhbqadyBOmhI1WMlUuXpNBK0I905psljUQ602nktXfOyZkh/RsfAd+Y7p3CUm9H2v9y8KtIi1xfr29Se++v9fK93f379f23b9tb6+tv9r3+n//33yMEYQC2e4Yb7Ovr6/1+75/9/f2VM/IB8jL5gc04Q1hSTwk5oC8BImu5nnVapOfqK6JWUCU0xqBTHZgZEVvUI3hoRERmY0J2xVN/88+9V8wLD8dZQhT1igNW9q5okEu7FxygjLDwrDWXZrRjy1J/YR1rnrM60is9z3H02voKxW5Z5mxPaiB3wB8xO5rxvHsagAEG965ae8dZkTyWlmu+GAq4Z7QhlxDbDaJHvofW6AARCUu9sk1T5qRk4N5I7YOoKiQd6DNixyahMs1CIFCFg3siUBECbolFEVI56w8lUookt0JPHLPkAPDDtIcjkIyydnCiimeYyj63K+rcZU2teCq0B6vPiSsetbN0kE0hUhuJEjHDaytChnwyaoI4HyeNakwoZ1qArjJrXE6vyv7qxeTXURaRUnlnEUSbXiceuTzc+YB7cpcfz8CV7kCYiFg1qKwizzSh8glwBDJDgZkHgmNnuwOlojkRbr12srFrZdgkaApN/VVl7JBAJ2q9hbmrqjL5LFeIVgRmQpWbLTLzGSuvK/JcY6WDNPNYBzdL9HbEiCVJ5cpZWr+ewHkcT0O7nsnVhJnFYPq4x1kY9vNDW46Wp7RJxBKVOXmKVJZa0GaXJrLX6x9//knyb398/bzff3x9G+393mb2xx9/vNbrX/7lb3/7/vq///3fWXn436/X3//xD3fna63F7+9v9232UorjGE3uzkjDbqTv94Z/fX3//PypSmd032am4a70xCBpKkh/zsdeVRXKSbPYk2g0LKPFupyZkatwyCqdq6+vP/be7/3ndt97b8vi5JRFPYIXIGUBDAegXfRLQNehuWRewe0jV5h31CS4GVcsO57N9OvarcGYpxyJDG0qbsE7mypSNnxHBhbPdMsG3b2AAdohvP8NvbQewvb4191ZAC+/SdY7riKaD50bGbHINgTj6nsUwfGRU6BzhjhbbUxypXFqct8gaTDiaaoHRxAliXJBt8L5nebfXsssFVT474I+Kkh6kN/tB/Cc/9wYdKgIABkF7XKHoNE9g+aZS9yic+nGiz+vkwU4cjOGQjn27qjiVugeZyvnwnrYgLx3H40/IEbFgKrB416SoC0u0EUqkp+UjkS2MRxB1la9X6ZSvuu8NIeb22914C8yAuSpzGNmnsNma2LHj0RFw8lMVUi9n9u+lyUqtTxlIOF+R5Fr5wHyxLpWhR3jKRF3lZU6iiOe5RlwTgNg3ZOwuQaVNQRik8HctAlQ6XEl0xMVZjnY2DkhEd9/+5u/3//9f/yPf/u3f/uf//P/8jf+7b/9K2k/f77/63/5r++f94vrf/y3f/2P//xPEG/f8P1f/uV/+/PnH4DW6/Wff//76/Wa24NT6oiA7yQMkLstI4AtLH/ZcvcF7nIBK64WE8wrEYDkKy1wpjU7EKWlSqcg4k/MrLP1Wq+vZUYu5LKERXFV+Tbox/fPz9fb3++fn9UUI6mvFH3f7nsL9EARBZUD8KagJ2WDuqGri7KYUIbp9E01FEhpKqOae61LCbnGEZctEuw2R2uAkCVRM+MhOtwhynZwpzJqcWIFq6aYlar6ZbPYuLOHNr6MOjiSdq1f0VnFbYnMKWl7UI4Ar5bnQvRYtE8KsfeO5TQo8PXU+DdMnAGvtHCto4KPsTCT/Wwy9CO0AH8sXTGzBy9l1crdrHenzi4Jw1cQME0agNBBmaleIlRRa54dR2VQ0c5WSSag194buX12eju57uqeK8nu7tD2XV+GEYiCbtrHN9juHitT5QNs32lO2ilKPuV25ZggFRSRoRJ0dCQCpL0rbOJlSdp/1Qj6NsWb2RyLPJlD06gGJYklzb3vV0W13MyXrArNSLMXCXtZVOu2UC80yoyR88SKMAQPKmdGguTZ8/K3qjPuMjixQAOPCsPAb59gbeC+CmEqpvf/x9ibPkmSHXdifrwXEZl1dVX1MUfPfWAAEAMQhHgBFLncpS21pExrMtOa6YOk/SCT6Y+TPmllRokLyLQr2vIAAZAACQKcAeae6emuo+vIzIh47q4P/t6LyKqh2SZgPZlZmZERL/z58XP3nxdvqByFeMuPMDApm5OJzGCau1G+qY7CBf7s88cXF1df+crbJ0+ePPr00zv7h13bnp+dHR8cgkrgdudg2aceka7WK0lp2S3NrB+HZdcNKQGAqUKZY06IgN7U6eGeQimoMDMRiSH4J5m5OmCIQBnP4VxbTIE5EBFzCNT5Ot3iL0HPM0UMTJHLAwxDbM2sVDT5ZEdJKW3CRiQNYUwpqWhuAfN1Fk0iokI6pqzKPEgvOARVr9SKQZqTYPi/XDAtb91URNm6oVkDzvKTs8Rp1Y6TMrgp8NO9R0T3zaoj717qTDvAjZ8oT+ZJdb81lUzN5ic2F9Qbag5yac2WghOoJF2OdRbq0AJNQNmglrMpYChV1c5/FwDqzpjpd7wdtagWb2h+dtuudz7jaVtVpeEGvZYdAEIlBZ0ZXTRCMlTArXm8tekXip7ZPodymdt/xdlZ5SWa12ggEZoT15vlgMkr0PyYWkqGZtdXVFypAgniGdfSseqWQYv6cOxHdUJwvBg0f2I7OSzif1QvOa5xGAGIGIiaznL3c5en/NdZSAjZUDkP/wIDlQIFzOVsvjdslm2AyQbQvH7JrEjcrXvPRDaTUDNzvMAAM/LJXuEJBfDJ7wDmD8wgICIiKPe7+Lx+S6j6ev6ei2VZAjfRWmbqTtxY00VtnXY+3W3/K5P5+Mr475VKEm/7ktn5oAGwqlv8cjzeMjmqggRgKqlp2yGN//APP3v48Pk3X//Sxx99vOgWR3eOdtpFAOr7Pol01HIMB3t3Pj/5PA2y3F1Gjut+EzH6dKHMnJyrqojKmVTtn/cLoTlVjuaBG54cJN945M02BoDMIXJDTCFGxs4Pl9FK13oIiMTMRBwwsEcAxEgMBk1swb0fIgeGRMaUEm2iaApjLzGlJCklAGCvnRUNIqOMSQhhRCk+fRb4oprRanmcmhgawoyaCRCg9HMwqIIoTIW9Xvels9tcb3D1SatvvQUxue+SEwaToS+yVF8aABFp6Q7za0g1N2C5AM3ndGeNgmXTwTySuPkwm3jLp/fKdVU1ZyXkrVr6hnKfeyoAYGpI3qpOs72fFx+L5cCSpSi/hvMdUZfiC13y+SXU54hoUz+NASAaEOSa/oKSGRZHNTNgFkW3pdCLar6h6+fLMimlsgJ1QYjIVL2uUaulp6kNG4tn71+kAkbONWdWPiW/FtKoeSuWeNUsW2NRKXwyLsWag4zs/mvR+yIpqW6V6NS4K39SK+6nc7vktkRFS6FuuT1AtQEODAjQLVC9KeVmwHRrb+4EcJSKZk4NmKGYj4LI+SiaNkwWuGrfsbrv6FBbcLcxKywkJAqurpiQiYgpeHGmZ4HyhZSCE5zdDKY8sgExYIY6tbhsvuZeMQuFwm/CFrwDykou1K/YPZGyEvlGeb6UZrZWgatf4YvHDrLTNAWoWhQRiTEgkIgQBUQLIZDBJ598cv/w3quvvvrB+x+2IZKgJt1pd4jDenONTDKkZ44fPHr8eerTolsQsGjqh6FtomHuzkspcSVHNABiKteFOdJyxW2edEEzIvIhyQhAgIQYQoNMHgSEEAI0XlmEAEnEgzlRQaQQmIlD6JhihuyQAKCJHQCE0DRNA0CqApaGYWAOfd8jBAUZeeyiIaKkZGbAoKIsQz/2RMySxnG0DMcZASjmgd0IZJjMPLyx3FXuwaKimjGagZasPpSAHg3U2U1K1sSKT3PDQ88bACYEbwpcLQtYrmarrlL1JdSh9bLxSgxa6C4AsHQQuU2dOW1bmt9Vj5U6YBFvN5sPeJmCBn/u/LKjJJ5Aia3QFsuQMj8gUZ79A0rVSyvxQNEk2VsCzwB7isF/vP6uqhQ7MXV915NUrcvgJ+ze0twfshBKN1NWRNVg5+4QdwJuXMu237lV3Iw5hoD5m+B5b93yX6vFK2dcI54bteZbqMBc+xOB+3tuAQghaObMsdJZOcHE2Z0HFBUnATYoTr4HBaKiCkk8E60iKmqikCFSyVgpZPOhWqyGztrKivY2z15PeYbZjcmVplsoXrZKNvNztteLyOkhtzC4XOSgBpWXY9vao09czvbErQABEQAyeXVDLlQp85Yxb2n3IIGzsS54d3FscYp+iqmq0I9mh6LycFidvZbdMI8Bi7OTt9PMsysmwaym4sHYADTz+s3MZ/Xv/ApBzUSKXJiFkA9FxIhopjzVFyqqMsf1ev3os0cvvvDCsO5lnXSQNIwHBwcR46LtLq4udprl8f6di6vLCCG2kZmuVldJEzMLmII2TeN3M/Ck9+f6gsltVW0RZGTmklQhf4sjE1OIxEzELe/UVgx3TUrbJ4YQQgiR2xAiMxOyk3eZAXOIMYYQXCNoGiOPbbOIYc1hpSIjj75aKm6kIaUxpJEpDjIkGZljSmNeXFFDUPTaRRVFQzF3OQwA2QBwYuF0vYeqycXAaivhDed9GwG2ufRsPyZrUV7Ctiu9rYxufnHrud2q2spfZ68LcmElPzm8eZAq/3W3YonvvEEEEQFJJRlAwyxSK1k9TiIA99/RDJiC5ZYLK4GO67Fc/1Ku0R1xqv0K/stQJldXBxxm0UZR0zmswXx4REQFgULEVPkH/eSLlfQXVjpZpzRdVQL2RT7+7E9by+vv6/btqKfqZ+YLj7ko0UfLgZl6+Vz9Lb+KevfrCtUfDN7dg5jZdGBmA4oeNpuVBlltCRYVERXxVLBr/+yXmr/O/9jtLHEuGDLIrWpuMzPfMwKZM2tb1olu3EvA6d8wACukqfmhuZp44qctS5ylhYrqdGAKiTJZEkANnWaJ/oJK0KTQc/04Qq4nZEIEopBRIORcGzAzAOWmToAg1I3rewaMSlWrD/2FHBBhDeqKxt+6fVrb8megDcx4Jb1SsW5U27Z29WFmgfNXRNVNgqpyyERTWDSFiAIqc5BhBMBPPvzo5ZdfGW2ddBiGHlTaGMZ+s2waNmWFnbYjpOVi2adh2SxGTaISAidLBuA3KsSIiCpSAy7XgByClaokIgohAuZqHvbFBiLiwKFpOmJGxIgdUYaIALF6GQDATMwUaBFjG0IoNpoBjEOg/LVAiErMHMc0GKCpSdAY8+AlVQVEERnHnsYBiWBkACQqgz4Qi3ckCskMDUHV8nBtK8FXwa9mvhtZyfPhvOBn9qSY+azFqpsyF4DpgVBB/6rHq20gRCtgcand344q8kGsalgsJql8htwGQMZazXuUahcnlGAC8pmW0AQzF5BZrto2K9X05cfnvr+qtk0bYhBJIt5PmgGfKdSdBQSeYlSrCrQoQUAw8HKy+ZV+0fNpGWwalFWWFdxh3zaMuGUq53/DkuOtL2HmFJYz1Cr55aa4uHyBR+95KjCz0lM2SQoiZOK8ovKMELV+LNu5jIaAqmQDkM3ljCTEShBQix5UVS1NjpVMcYAfDG7o+hwHJLEExfkHgwzPzqEKyE6oeOLY9ReTgonkkAoA3NCB5emSJViukKgScnaUb+pEKC5DNfWEgFhaWPJy47SX3L67vmdmy/qC8lwFRCTImD9AVigYHP739NA8ApiHATkAdMND7iualTq5SbB8Z8y9hiwfmbm0avn5ld64ait4zvT+thmwEkXUuItL9C2m/oRHAcwBEzQIapZksWw0yXJv7/TJaUfEwQ539tLQr66vu7bth0EvNKXxut/sHxyQyDK0shljaDfDYI5lkXFoDCwyMWDiAFPNAwIAc7DC0BBCYA5grsS94opdhXGIgdsmtsQcuSNkRCQKRFTkUT2WJ8YYumIA8s40NcRcFeoQE6oNaaxZfR+H6ciSjMlbYMaxi6lvxj6OfT82KY2inVmmlzS1JEl0NBNWESURActVbIZ1W1qZYJGFhDBPEi0+QMUY6t3Drds2u9HwTz9sZmn8oZrHOZjrRtji7p6OplaZom5tqOnkbP7IMMiU7TMrrT0OOriwqc0H6xGhomFkt6+GoKaoFmJgREPs0yhjqgAWFJ8GAMqYZTRVxXKZmBsaJnjLI6+SmL2hwX0VZzvUzDKJwFyf+yXOVG7dnlCQBqhvzo8/UwXwRe/T/C/+JtNUtpdPuOjMrLSsdgLBzDVkyMgVoILm3uAbsY6foiBYHvfKzMbMhX8xgz/uoRV33d1+8xm+1QBMPQG5Kmgcx3oEK+ECFNzIJg3vRsLm9qbeAQAq43Wsmtxyw6c9M5d+y64KqG7Vs85vdhbQYgRx/gNlBa00rxIhFb3gSgIz2sOICIxM6AaAmRCYiAmzAbAyG7PIxyR2RViyKNo0k8BDoXyeVKKBKjrZECJiLhrLoICa4raMwlYhL9ZUQZWVyQZYhooVwEu8sJIeA3k/V8dRVSXnMwERY9eCQLfcSUNi5M3V+YPjo/39/QD0/PO/2oSgIm3XrYfhhz/8a28uOju/GsZ+k4a9w4PF3s755VMDy/TXqsDYhAgFH/A2DisYFCIGjl7BGUMIxH56DhEgEFMMsYmxidT5V4iIMGTRytICzBSoDRxd3XtRLsWSbZgYeyhycMRMzOlCOQQGQw3ZGWIeKDESATPHMKQ+SVIREdEkxMBIKYEayrTnzJ/l0d+MZYNU35DM0kwNZGmY+25zDWETFFB87MkXhlJnWT5QNcV2UwjgFpCEM3fYzDBPD4PM/zPbbm6xcsAPgKUYlPIwhpk77GqkDFL3I6gAMxCRgTMHADNWNwQAnECtxApmZpx/0Yrw45w6wrI3TGboWuaWisAaiMwuoUIudXfUwryb5DkAkGu5ShqiXCYWrx2r6/hPGYD6mN/QScXnO+8lAluhCUBhD8kvyWyCq3C2wdFzalpKeraVA2bUKHvOYRhGIspyXZokrTSOaoFxsjuvYma5ESyr/poElorzm6mIqlNuqyYRmpT+7NmcXWJimctXoWKzS7UtNVd0H5ab6jiVk8OpKqL3H950i3AKTo25BNIAObQ2c1sIJVNXFLYXGwab+gCqiSikApZf5+wwT4CPH9AKjJjfrH2VRchUNdcPeoSmOnF5b/sOU52Gh2tTvDrzimpjwBdFBjozAC7wiJDKCnhlMCCmJAYW1BQsli5rVQU1JGxiEzC8+fqX3nzl4YvPPxM7PHv0AAAgAElEQVQ5oGrkEEPYrDZPLy529/e+/rWvfvTxx1fXq8ur4Qc/+uHF6urs5GR3f++5Z557cnpCgRAsSYoxGtVaXHKOdUQKHM2MmZmD1+/HEKMnAgARGQxUAZEIQxPbyK2bYcfgsqYw88nRzBS5YwpuAPx9M3W/P98aAxMjDk25KSMSEQUOCKSsACCqQQINBAgUSDXyyGMaHAuVJCCaaEQABSYZQA1RC4l5HvxsJJQdFKrmv+xVM2eynqXB5tsbMwBS1NksGsgf86IKm2/+8vqWYkLEqRz5ZiE8QKkXvKX96zvoFXCF7fWWjckJ/iyRjiYgsXmZiSt3pkA4jKOrdkComqLyhIcQTTWJWNX7M6tW/oPFjazmvDJkA3rX5a2rqE+y7SwFh7WMEDJoQkDV+M73nIgA1fKQ8u/MpZhbWKyo5pbSvxUi2HR3tu5LsTZ+07zpYELq6mnVBfKrmAmSzqO4kIbEzKCKXMARIkRUS647RN1EC6iYiGrinAsWE8jmASVZUhETNRE1MVUbR5Vkqs6yCOJfFzMF9VJqAVFLCqKmBoYmkCllAMDI5yBLLv+ZxgEaIhiZF2MZOAm2GTpgFDz6UUWAgDWeK4um+SpVAcEdDzW1mbZ3mgpmDi40iOxs0w4WQ0aDCB2Mdo2DxshUECEoJB6VMrM+AKCWZ9V35h5BfsITbeC0F7eFwDOSjhbUbEAm+nBzmvKQGSml6ABgoqUe0dQsBWMDVW3Bq0/MAru6SChJxRgYkUzJAMxGkAS67Ha/+tW3d7vl17/yK/fv3WdiNRVJ49AL2HKX4p3D66unoaGX33wxDQMP48P7ux988PEvP/jkR9//0Zfe/ubLdx8+OjvhLqzHjaIRNo7bc7a9GXBzmttATQyRkZmYMERumFxec8IjEKOFJnRmTvgASGjgxVTevO1lWtkyGyJS7gH3mJPM2XRNcFRxLiaMsWUORFC4NsnMSLzTg5h4GIKqdtQO1CdJiCYhpVETSY8xpUEgMibR/D8R8UaGQV3WBEwJASh5tE8EomioIEpgngt0+SXiymxRtXmuGqBcVJlJaE0lD7fJxTM683E9vnZQv2DFGYJQVQhkBkwEoEnN8UaesY441lKllwAVzKRObcouqagi52kzwQxFkUgMnKwYzCIBEW/WqwaQJY2J0DAQu2/RLRYUSET6vl/3PQD0g0MCUzUUZnU2bXKaNghkooJiA7KXDZkuDUBqpmHLHtTx4O54uRJHczQFERU1ZJOJ5mN+80438n5xrEYeXN4KeJstdxE5RERFK60DkzHI2n8G62VXb6YLzMDIW7TMiiuA1e7n5tZSFLXdPWBl4JQhhJSSqoqAEFUklIjUzHIhJbiSBwd91LzATqUW9oioSPKEsKYkhgJSmCHMRNQ8PBYRLZGySonUFIsDDu7P1q7jEtpC8eVg8js8bACEuVW46fJPKN20khMoNH+3YoPZCy2vs2M/e+SW0vLgrFKofIWIyEA9fXADc5xsQHnftqO/26dfrUJNKlSDxpjFNGt2M/X6NoPCqpX3JANA5mSmlBQAGUlVkwoxkpYjigGCW3wwCIQEjETjMEQOZEDMd/cPdvZ2n7n//Ne/9vVXXngpApkmSaOIMSEYEnIaNotusbtcmKb1ZqVN6hiee/GlV18/+9rZ09/+znf+6q9/eHl5fvfOftPFDz69atsgYN5bF2M0BC5VVQBIyIGDP4kcmSMhI1ZEDtGAmLiwnOdVMmDiTMqOzByYw0TSMI11hW0ByYxvBOjLRqXzAwDNSW0BGYiQPUZRSyLCQxjHEdFERKOOYyKmcQzDsMHkDUQjIBKy65fALuwIkFTJQzbfzQ7KYdY+CAX0NPMQvbQHmiUtE62K1wy1usjFpvg7uO1RFjowAKyfJwClrKNtcjm9PLB62LC9XJCj1RsKS0053xrPFJOId/0SEXvDnSqKmAlRiAih6wIzOxQQYlDQzWrT972o9z+BK7I5V2t22V3T1e29xQ7kFz6XiqxpVbeuoh7QY9/5BrxxyXO4NdOKVV6ZqYKxOHNgnoWgMu5iujUmhRhu0g91/TBzkeV6WS1z3ucKrnwGbj5uRmDbN2t2qQgQ+n5DuUF+4kjJqEVF8jVjOq6fk4i7+KXSR1RHrwhSVdExBwiSzCylUTVVHElTbh92ugjJ/xfP/NQlLFnicl8B5ojY5GvABHLZLe3vH81DsCtyAgRzfLA4gBXxg9K2t/1gCuQpAaRJs9e1IvbmpPoOz78M+SbT7N9ZMVrJAfktmW+z2UdKoDC7n2YpV/NCmXVoxoVAwmQ2aCkLPwCgN71hpoLKjqKqooFybvuosbkhiGq7swOSY/Kzs7PT0/N+Na6vVpvL1Vuvv5E2V2dnj9brDSEz8zim5XKp4qXD2jRt1y0vzj5978OP7h4fHx0f3Lt39PbX3vrZz3/+53/+5w/uHN3ff+MnP/7b3cMHo2iMTb9Ji50dJEYApkhATRNDCKoGSIFjrgJFQnM7kQNqQjI1zASvxdUCQ2AmDhxyAt9BMNiWpbwdvP+W8kg7Myhel4qVHnIiir7eMXaqKSURGZhCDMnARCSlkSghIJPbaMI0QB4XAyJJ1SKzqYkmUFBWFCwouSIS1yrhWcTvN4QZiIKIqCTMteoAgLWlbiYzAEY2ofxZAfFchU2y5IJgbGBb9Qv1e37Q7E7WdTMDd7CxVt0TuSEUAWJyKjgGRiYwMyUk8mI/UI1xp20bU0mjDH0SESS8Xq9UJAdshKBSFPO2QpwudnKV5u9bxTnnj5krlh9O7Qn+ROYfvbU+MEPqoEQeNS2M8w/Pte38F/OOdB2QTXu1cPXjLmw2x3XxVpkWzsIXm33SShDDxfBYPe72Z4oBYI4h1Nwvl7mIouIhhoiYqshoBekvDr2oCnpNqKhasuzvJ1XX7smDX08RW8kmSEpWOOZy8setTfm3rOV0B+ALHnmJXPOKail+zvinloZ4LNzo6JYgJ0hwFmw5UYshZwTCI4FZESjn5xmIICwxEwBwSVe6cYBMCVTCAiCchQLFCFB1PxF9tE212HMByo5azfdU86EV6Z/ExrclgDmnnGN/5JW8RCQqEQm0koOietGUzvovAIIFLaZYcrrUK0I8c0tt28YYmyZcX12Pm/MnTz597xfvnZ6cHR/dPTy6e+/uvWaxQMSubdfrzdOnm+Pj/fvPPHPx9Onl5eXTs/P93Z23v/LG888c/+Kddx8+fB7W63c/Oz3aO7i4vl42iyZ2xFHNQgiBOPiQ7SYYkglRcZqyPZyTqYqYkfo7hIA5e18CN8ISj1PhktKMECOYeffWlMtSFZlKPnzZHSVmJAMjRGIGG0F9klMAUMFSVgeIRCJeSQtmqMXFR1RD1NzrYYwROQEEmDqyS+tH7czaKs9Tr5vL4EMhg5m1ejgjHmx7GtlhnMEBUEMGKwkYb7Vy/vOqMeZGxWZy4pITkNW0QhIIoqp5wI6oeRUCQyRSBVEhv0hUAFDRsR9EElBOa6MBY8BAACAmaFRH9HjWvOr6LS3vEXA1DKWOvpxq/iTmsCT7fQ6UUtYhvn2mxG52CnBKBvtyekV6zsSpg9MGmVx2csmtNu6Vs61ZgfpXcHucqSa2lBzeqvy+7ePO36lrUo/s0uPPPIWunshzhxjADMIwDMzsIu/OPxW9hkRqgl5G4QZARUUMwFO73gigIqijqiUZQE1FzFzFjyU+qGp96iPDLM2Fwqc0DFtJlmX0ahbxVXU/WwDwrxFRiYKLZLtupVl9VR62kj8AWYdW4zwlaSuaU9cUHQRw9V/6fokocJiDPB5CGZgnBzzapVnOJ58REpZG7ckiMMxdvvld9C8C1FPNYpm3JuXPZzNWM+QGQLn1sQqis3E4uuef9tIvq5H/bHu7whJQRlKSyGGxWDTdYrlcXl+vri4vLy8vjw+O1qsVyPj8c8/cPTrarHoZ+g8/fB+AlovFM88+13RtCOGdd98DtVdefvHo8AhVGOyzTz9qOLz24sP1uv+jP/iD7/6nH/zs3V+SAQGiYtMtDJA5tJGdHpBDM4oJCFHwsU6iZmXaAwAYaFIhyiqAmYkCAJCRgug2HGc5zVj472p3i5lhrnOrTYvZcQ5EgKrGUEq4PKvIahpU1dDQE6LCCOyiOAwjYmDysQOaQAgBMACAsv+cAQNZBqPJtIxYyybdNSkAaFE2mXHLm8qqxFQfk1z/e1yoVSNZlfp5zaHNDwCIpUhJS3UlOBV6VbKw7Wl6DIBmYGgGpmqMxCF4K2jTNO2iawKDaj8MowwEgMBooGaMxGwhUtPElEhMRUcRRQRFRbTYRDNTScwMxKoJkUIIGTMojrar75nP5JbqZqAzV5E3rnr27+T2TxmFslaIN49ZN+Z8/1TAx7bRmJm8QQkdEFAJ6Sad3uyc64a8oe6rgUHMwAHNykwAAEv23kvN3O+BWolLaGZhGDZMJOwRsqfrWZgpMDHlmkF1mXM3P0ltDTAVNdWEMnr6VxzqUxUZ8y4SURNNXvzvJFrJJCNBnoGY9p5lTwah8k1oZTat65gXZHa/qzqbf8yfqtab7Z3vTspVi6Cn213Eibb/7xaBEV2pc/Hy3UzUIOCGAfD0AOP0IE8kZkfAMkcETDTr27v5lmjeeLPI6Bb3cF2f/EnKxZ2UBwkBR54XX9WoyNgASwPYliUAQ1HRnZ3F/t6+qj29uPjss892lnuvvfJK27Y7u8vhiiM1RqNF05g4sG1SP2wuU59STyEulzs7i91h7N9955f37x+3XSdpuHfnwWa9Xl1dPT492d3b+/1/9nvv/OKX19erUXHv4AjQwRYOoTUVAoixBUsyJK/zzGGiE+agqoElwJwoQ0NkNmYAQxVQBUJUA4wBthv0vOQppSy9Ui5ecz91ZihERDMuG6yMES1hBFH23RGBOVhxElWQKQSOZsBMRAopJVcy3oVMhgRiCKSgJiSWO11BtSqRzPWOQMzgpwkwL1bJlxNCAO8rQfKUT4zzod8VyykwxsRK5P4CIE7tRf53r4KoRFnzX0TMFcajlxg48wgBMSdJzLS3s4wc1n3fr67RVEURcpVEP2y6buFukPQjhoaB16tVaCMTiAoxIVltwlBVs5TrcYsG2HaVcMKr6vafts8WmHNbS1Qb6iHefFVLT8N0+eTOYP4Y5KXKhnBL73/hjxYpKu+riQkRQe3+zlexZRRwhobM1EV9XjzC+Y8WjYiOecw6ZOtChWHYMFJgthCMWZFIiDgEDcYMhJlCLImUSjYv4FFVBTcCiVRFJEkyh3005fjZsSJVVDXPBLg9yP3Ahf7QPQlxZmmrwWw+1dJjVyaVlTvh6umLIsHZ6uSmAZq4sKe7WyQdYdYxwBiKluaSCeBqA5iYKAQOVEoVMUNDNWnATjPD2eXkAvfkUAAQiNhUt9MDk/tS4Lub7kkRvkmascAEdQXK/szvGxg7p2YpEkJABq7jMizXmZj5xi/d87Xi01ybsDVNN/bjar0CoAcPnnVIPYY49EPguLpe//KdX/RDv7e/+8LDh6+++CI3PAzjarMeUzKARbdzsL//+eePnjw5XS5aJDw/v9zb2wuLnXZ3950PP3j9jb3/6d/+D3/yf3/3e//h/zu+d39nuTuIDUO/s9xBDipCyDFi3w9Z+5t57JMLiL2mScUhGkMgDiEYE6kqIozMkIt/lJkBQc2ISETMII05EzWOoxZnzRfZf4sYHdYgIqKUEjCTz25UlZTSmAYizFlOBknK6AYjACQEL/WpM+MhNu0UriZDymPUAEURFYxINVOmkGqyUuKCDFJ8XivNYggI5hBddvERUVQ4i3j+KS0+bY6vzWbxwbSJshSBg4lgmUl00k1Y8CgXLAEjIkUUMQNFFWJGotX1xswAAQUWcaEoZnCwdyeNI0eOGDebTds1jOajCpftYjWsgc3IyNDb7VUVAJmZQuP3axgGnY0/KztlrvZzC1hBhGDmBWYVvK0uZtq+JAO1VMVXakKcqxHMCspdJAAsp3QzfK8bM3e9gGO5VpcRcvoeiW8Q3kx3xJk+cTsFOA9l/CfqmrgiQAPKrLJWz+rGIoRxs1FmJUpDTQIjE4/MyBRCQAAwdLzHskb3hLDksmdJYqKSfXlJg/fzuvfkyQBLolNEkDwTkJkkzBwsyiKleVA7iOUs3qTbCnJd7rSa4cwyYykOmwSjOjh2g/8WoYRHRcNWp96/NpX61FCg5H0LhE95xfxPjpw5NfE8ATBHHtyWAAAGrAbAcaG5gNbntyOALXsw93yKbEF1LgCs8J8AZEygGIbq5HvPvM8F3drkVtBekbGJjQmYyKLbiV2rqhzColt0i25M6f1fvvf4408Xi+746HC5aC/Pz3746cd937eLdrG7wxx2dnY3F6OqGNjewR4HSmncjLI+OY0hPHj2WRH56OMPn3/4wv/6v/zPh3fv/sVfff+Fto1NO0gKTCqqBqMII4bAVbyJCNScvdnMRNTzMojm/SpM7LUeQy9mgxmgjBxj0OByRUTjmCTpOI5pHMdxTCnpbLcQ5WZQSiQMbgCUGQBS8oQtAMAw9FLospgZgREDEQZqQmjcxvj9BwCRvOCIxMguCSIDIDIFBDYv4SIRVUEBcLYu992MkJkyMXueNQiOoEooKkZEECByYNwCnV04vF5AzXLMXDDY0h44fcW/pyY0CedN9xMBkqk5BxICe2GumqRkSZiZkSK1LLElHodxdbJp2+5wsTeOw+7ePgIkGcdxVEhp3QdGQ6DIiuZMjsyMCsycRPq+d708U6ZW/zv5PZMpyLYOvwD2qfY3WwtXFD5jvWjMmZ9ePwngNaBQClCK81eOC7i9JSfkqJzGZJZKGOG+OZqZkQGSGcScA0BfTwSgPH4in5vipDH8CZXJ4dkk5DKXaSnyKVVEDyGksTdlQ0IA5aDMRCTErgA1Ri/FK3rfS3dS7v11poeUIJPEWSkH0twKYKop1RxALiVS9RBhajBWzRiOE02DgZqoQJ5oUE4dt1Y2/8XRUQMoNc82c+0tB3Q3vemCiE4dVeXwldGBEQiNEYjQSX4CInscUJqFw2y2VE0Jc044buUGQo0SiiWY2kD85dw+b0vqBH/dsAe+KnTrW5aBax+6CzlO3Xbxil+Q7UBxOvIYiQKAmAHEyCbWxKAcVG3Rdpuhv3f3bhJhDl0bD4/2G3z28uLi8vLi8vxERK7XqxDCerN+enGZVFXx3uGDw+PD5c4SAZvYNU13587hyemTzXr16aefvfnmGw8fvvjo0eP33nvnj//wDyKHH//k7/aP7g7XK7IHgKQiwAhoRBl2yKOby1kW5kELXvlZRsj4giQTHU0QmCyqCEdXdoiYRh3HcRj6cRglZXqTutrVpVJEFPMQQL1EAsDKVFhVS6OIChikdlRN3sNn5rVYZAZmSARMgWjMdsKPr6AGBBxDQySQiV7EgDEl9CSoKaKKpOorAOTh1fVMJtFCZMTAIXBIMmQFWXmBCpiR80TZNYaifaiGPgaAOVM4lUOa5c61AroCAAQmMwMVYo4cNFnqh8ihi60kaULoYttfy3W/GjabwzuHy9iOq+HozoGCXl1daUpdbPphRBFEdG5gMyV3pZC809QHV4SQQS0tw222d8lczzp+MQcDsnbI+9/ylRPWRJL7TIgZpatqYcs9ckcf0UPKysviHuYUBOSd5Q4rbZkEBDQFw+yGl3tqAEBKyGSFvXFut+o53HhS/+q0qdlbBsCJQt93tk4vEQDRAEPqe2NWQgQUTBw8yg3IhISOVPiBRLTUd0p2OTKUL6LJnAnCy35MVRVSTgtMBiCnDsQrREVraen0yDHV/ArzTBpDnsYflVsNCFOn39wxyS8RECeodKZMs21HQEKuvO026xr0XYIw1fNQrpQNRCGEKVs+758or6xwEvAsYTA9Sqjh8kTVgM9VudlNJPHGVYhl8ruKHWK5KphpqHI126IzddVPtJGobnxVuZgKACQBBRNrQiAmjrEfx/Vm8+brr7dtXK/XSdLnjz+9urwElc1qs7NcPrh71wBjbLhtOMS2aYdxc3l19tmjj9VwudxdLBbPP//c3s7+su36Yf3+O+8cPXhmb29vudy5enr6L//Zd1554fn/+Gd/rsO6v75qu900JEagBpFJhpE5qFgqQaeppaRgljSZArO52TYwK3mNJDKMIzOEwEQBSjWICozjOAyDpgxLJklQrHLVhr6cIsLMmDL+4BuBmURksxlEJDCNg6qa5yp9T6Jlrz/GUGvSS36IEI05ICI7oGSipqoMYAhsNrA4QYQxB4AK8eU58ojOfIBEEQB8tjwwNU0TQoBR8l0sGI4fVMsWy0ReWQA0770SRWdeBDMn4YQC+ufCl4JAofqAByIDHUZN1lJABeiNDRnh6up02e69+dbrXdcxoIkOm816dfX06fmybYwRUCi2a1sNSZaLRULBQOJ15g5wm3faghUMjegWU8I2hLW9j4qrV9/P+AEUIqGy5Q3Ak/wzr7H66e47GijmvtQZ9mIZb8DSeTvptEzMN7HjQQHlLVf65DSGnxla7oj6wu1vM4DOtv8EM9XnIjqZ81uolC9CGMd+HCFDP8icctcTByZmSckQmIMDVU7emcF9UZVRTQ1KO4CMmRHIBERNU35bMk+QmaqO/mkDSympCVimjzDNBgZBdWYArIauqupEhiUmcP++atXpZkzXnGM0mxUMzBfUcj2M48mEM4JXKpgObrWDoRcje1E51irDovs9JMBcBsqIxMFpgjAn1TPPyQzh8ekzUGqWcrgzlQhVSco5g3yBhm7Ev8AAYIG4sptWKdaKPOZpbI4RUSVM1xxPlWk++VuxaVSUIYiJiC4XyxjCOI4hhhDw88ePkCQ2tLpcqYzn52fjMN45PBqHdL0eljt7m824Gs6I4uHhQWwWMXb9Zvjow4/3drpxWKc0DP0aYzP0w2fDZy+/9OLZyeevvfTC0cHhn37vP3z66aP7zzSoJqZpHAPHMY2IZADefmgZo8657ZRSxo7BpEBbiKhmaRzGpOR1KERIKEnB29pFsmzn5kTEnB+u8aIjEhQCI0Rf55Jgx5Sk73sRYQ5NmwCAyDzGV83a31c+R1pmpcNXMZef5YIQVd+3MsoAWS6Ric37ThG8HcMFyj1HkWQG7qCgZBTbBTKEYAAgVnGdWnaNhWjcCS9NPe2sNFWwFP83pxiy60iZrAlUM0kqmagqU/TyYkZiQAJCgM31xVtf/dZvfvvXn3/u/u5yp41Nv16fPjlhwnEYRMbPnzy6vrpcr/v33js5Pz97+cUXHz89VbKkTsFNBsDEtRbLZnHwbY+4KsNiKW/u9/nGnxSCFcTdNyPdVLuTxpx06C3tjADeMQxWgMMKXQMU7V93ca44zRPXpwwBANaur3qx+SC4/XOz8Md/zpsk/IkLc/66d6xNxK6T6Qhp2CCiCTteoOQsl4SJc8crcgiBif0MvBY0F3pm7z5nfb0d3GcCmGv5XD6kKQ1Wq5hVwSRz/4sLpKJj0WrohZ35UhXAoUkFAyztkpNPS5B8jTySmmYo5oUrOQODkkxAD61yhIcAyshi7jOST5VlU8JQQQQiJyTw/iNGAkSpfhyz93Z78WhmhA5ExKH0kOYMMDnASDDr/C5VFoU9qNxcLGFqxvmhuBJbH8hx7QzXnO63cwJMMSwAQCVPryI9dRL4ljcA4PwyC7FCBDRmBlMyIiJRuHNwRBg+f/T4wd3j+3ePh0aefP4kLJZxd38cU2jbEMPO/nKx2OmHEcwOjvbattus10/Pn15fPdr0feqH3UU8PNg9vnPnclj/4hf/8Oprr8UYz54+bpputb7Y2Vn+0b/6wz/50//30ZPH+wdHaVBsunW/UYBRRQ3IQhrUEAXECAQEE5nBMKbQMJqRSIwRgbKtUxBNCaxGtAg5xwiAKgKAxCSYU+GFnKoAtQYGNqSR2DiwqqaUPPGb0iiSrq4uu24xjo0ZxdioyDgIGA59cuCTWhRFE1fBbGYmhISBg8CIiMRgpqIClrzdLcufKqC7qYrknKNKhAAJAMEYyXeo+dgBIo4cYoioICrIgE6rrsoIiBzAVEDMp6qjIgBmU+RtC1bgaUnJ2AFvIyMEYEUFS6MgczJFAlMmZBh1p+muLy8Ol3tB9P7duy+9+MKX3nj9m9/6lqFsNis1+/izDxjD8mB3GEcgONg7WhzdaZr4+aPP292PHjw8/unP/3F9fb28s98PoxIiGRKgKQF7mWlVx57bt4z4Fo1mVF4aAEDmE4PsB2SQxV1CttwNc6OeSq0oXsxrX0dRqqtvwqCqBoYEvv0BNBdBgZLljhNEdBZMLwewQmStqmAEhsBU+PBByiw2xFypSSXAgJqgy11NrtnA52pBdvMxEJuXIkPuBCKfJqSG4CPmJ6tUn4e+37ieD7nWxbNT5CkqokDAKXdRZu2R87rZuqmp6jhIAfRFxTycTt4s7J/UEq079C8lB1AnR+ZAwMs5AMoE9+nG2FSZXK2gz20st6sWLiPmYvCURkRCyiztVUxc77om9duWxcTXmLDmdqbM7+TlY032+hNw5qAQmMIUOhTYh6cYIosFExmF6sjkP2lV5EXpg1Vdf9uRcX1R94O/5ZhO7niCnKgqoWv+1pZvUlZg/s78mAgwgo9QMzQBQCMGpIuLy+Vief/43jimzaZ/552fb67XZ4+edF23v3enlzEpAPFyZ+/Fhw+bpus3enryebdYHB3ff/b5tl+vT0+e9JvV6dNLMeja9nCPhtWVEv/w5z99/rmHx8f3YrNe7t757//Nf/3d7/3HH//kp8ZdXO603eL8Yr27u+euA3pjmlkCJYqIgIy5OcQMAFJKMQSPqgloVEvi07uwTBZB31FFnnlU8btmqp7YdTciFBBZKTUWXWZTknEcmHkcx5TGzcZWq4gYVA0Bh2HwMHez2bhCCSGY+eQiVDHMlcHE3PheKDkIAgveh8xIwF4qnhy1Ivb8s2XUgTGECADD0LsrETyNBcBZzGZIY30AACAASURBVNQh5RDYLZaqohkpCTtVnUrBL8wMmOcCYKpF0ZpHk84bCQoxhJRkV5ABYmivTk+eP7z7wgsvfvvb3/7KV96iGMRkZaNsNpBk72D/y4fHm35QoM1m0/d927b7RIvl4oWXXv3Vb2weff7EOP759/9SRgVjNHVKFXdqRMRmbIwupGpSnMGiI289KpRvGROeu1n5ldUM3LRX/P05nXB+4oNrPG3qGoZmjP+A6n7sHNEt9UgZd1UzQv4iHl904M2NVe51rOWcWfvbDMqq2xq9w29K9Tlb/hQwlanL01fADEIaBkQkxFSqx5AYiTOvJY8M5E0Crgh9p/hkBoBcFOQdXq7rRdI0Jd7MOUEhI0Gae0ptSgCbmXmMrOaIRHFQp57sagZu60EqIzGREBRKWWXwX3F0tXxWa2WtzTLht46IeQz8BPpQGexYbftERAMlSZDTvMy8bQBm6YH6EnVGElf+eON+btUtzC98kmDKY/BwQn5w6yBbkWL+yFx6XOlVKby9vPVLaArqTNfkFGHnT8/vHx8eHew1XUsUri5Xse0A8PGTx6FpXnn9jXv376vYarW+ulpRu4yhNaNNLwaJ4+Lo/rNmcnlxcX5+epkkXZ+lcXxw795zd+9F0KvTx0AhbVaH+8vf/fY3Fo382V/89bp/evfuc+O6t25fBGzsDRJTJCRIjBSwNVcThiiSxpGQqK0WDoyQmMM4jGJCyKo6TxGBQUpjEvHNYWbObZ6HJoUIxQ9LSUQ8CgR3fZwRQlSHYSDK+O04jkMafB9kUXFuAyBCVkYUzjKHRoR5riogErG3KxsYsfckkbHTUyIhUVBNSKSqgTFw8LGUzBxjaJoQYyTGsU+qTpAObIbsiiDjw8CIFnyn4gRUZW8gdxsgkpH4JBxy7EpUNDCTGfRjQCIdF91CZfOv/5t//Z3f/q1n7t1rQrhera5W14MkJDLDrt1JiS4unnaLHSI62D+8uLhYrzeHh3diaDebTRv42Wef/c7v/M7f/OTHTzcrbFyYKSs/U6QS1k5uLIL3ftzCym9vlvlL9ziLg7SdMAAAZ3wuDyoFGv69sjcLiINVy9cf8p9wymF02ai6AjH3iABm2SuIUPZNoTZ95KZVZ53DorT8BjmrDcxP/sbmrQokp+tL1cPsJA0AgqRN1hwIWNhOibkA4ejF7zlHgBkEl1y34/yeiaiUCZk6A2jG9DW31aNHMSopz/TxvkiFwkCgJpm3vVTt2ASAgBmYSV3r+aVyHh4GjrHPrpABlGZBQUFEKE+Vmw6F5WezFwSFM8APO2l/cmQHI7lJ9GUJMUbMVUAlCUDFfpQkwTwIIG8EK72pftXOgTyd7X+WAaD5G3bDmCFSaZCcS8nWjkAD/EKlP/tFBUIDI0AnUCUwWC6Xz96/f+fggBAYWRTuPXiwuV6h6BtvPLfc2330+Mn3//KvDGB/d+/B/QfDatWbbjYbYF4sl+2i29ndXSy7o7vH+8eHl1cXy4P9D9775Q/+6gex4V/9xtsPX3iu67rPH388jNf7h8e/+51vIo3//v/5syefadPe2VytFztL4QHQzMbAnSIjNEaZ21xyVYfoOGoIUIkfzFECTKMygxnO7S4gOAeSa3MAKABmypveQ3jTSByQvCRCh9GIIjGTs0mLiPqoDkkJkqAqiIQQ0TRSiBREEzG3FKsWUxUiFBFJRgyEhGbo7AK+LwjMkNA0j5IAZGCmcRwBQRXTKEwhRp9u6W4IKHt2ocg5+Ugs8JHjqkoeQSBiGb8VmbNnR5ghBQUDFO/WQRhk7GJjfWKgqKBD/8yrzz17/9nf/o3f+Be/+zv99erq6fnjxxcqtunHftN37XLn+O5VP9p6TCldXJ20TbO7uytizPzRRx/v7x8sunidNk2zGMe0t3/nWkYhSJYQ8+js6P3NBlYIc8oeoVn71NT8O9stM5GuyHoObqakQt1xas4cPu0ymwr2YXsnGQBUmuty+BJKFuWL2yw99bMGYIVyf+79+UedxLZcQMa4ykGy02k2w8S+KPaZ/65tb2rXOmYWhn7jM0xc6ghRATQhEjkESUCCnAHyUoY1QxFMVWUsTSuW6RxycUahBBWbVfqbOUdIuRzL2IvNT11r0FXO2BktsdZR1PvLJTafm1kAcDZHBwwhc4mgmoLmSYdmOulQDx4ArFhXLIX/DiJliL+YgUJIWSmAipYvEFDV+MUAOEdQnRU2xQR+wnOKrv9MA3DzjcxwMIOSZgS5X/gwsO32w4y21WDazAIz5XZwDx6oaxozS+MYA4OOo8gzzzw7bgbYu9PF8OjRo5/8/U8O7hy+9NJDBEKD9eoqBFrGZrkb18NIuonI68tTGdrYxquLi53d3dgsv/SVt7/01V/58IP3f/zTd3/0459849d+9a0vv3V6fv7uL3+xHvrf+s1vNbH90+/+hemw2UC3aIkohKCqvsYGSMwxxmEYdBi8TATBRBIhF0PoM2a9P8wQIRf2lEXW0hGcUqoi7ts4jakKXo0MEIGZRSTG0MSm8hNkkNM0BEpJmKnr2hhD27aLrluvN2YWEJfdjoiMKQGIZ8dEnLQpE6w5omMO2oICaHBublWEgD6X0SClJKqRAyGHEJsmhoBImIaNq/4aCiuCTTWUWRVgnTHnZYyq5j1rqGJOHk4ICqpIGJChHxfElORoZ++Nt9/457/7e6++9tLdu0d/96MfnZ+ehMht241iu/vHizt3msXO04v1+cXV/u7uwcHh5eXl9dXlMAybzUbSeHp68tmnn8QmPnjmHsJq72D/tTfePP3R04vNNcQ8M4wQEYgJPesOBtOApVy4p6Vf95aEb1e/qJXQf6YYbabOyq7f2oxQQucqJz5zFEBFku/l+Q716BAA6o/Xn8iBYI4hwCOAWh2UhQfAAMRRiuoRAxQ82MO3qXdh2r3bEC7OKgm9kex2lBA0ja5mrFyqmuVm1rLMNqtQzAlhNyCFVUAz0J+Nnvd5lb2j5mkD0y27YZZdfp0VX063aSpjKnFHLj2eu7R+hYRINA2PLpeCVDDxqiKJWC2hiJWeTMOKkVFVt3WfI/qEdyyV/TUW8IaAHBy57+9Txas3McN98qjI+j4ihsD1V7IdqEhO8UurASjGwCZBuK3WfSG2m3Qolx8jFmKZImfTDnB+x+Jd1Tcn+5rMi+oxt6MYp6R37x4f3jlQkc319fnZ2dOnT3cXO2LywQcf7Ozsfuub3xpVzs7P0phAQUUWjUofzp6eX19fx6Ztu0Vs2naxOD6+25i98/c/+ezJemdv9603Xts/uvdq263X1z975z1uFy++/FK73Hv/w4+vrze/8uUvf/bZ0x/85Od9CnsHu6ratRHB4X0iAI5koEiwt7sHCMMw+LROzwaaCRGJCCIQ0TAMVqovfHm9tCEnq0rNQt26ycyn9DGSjImYQM1AKQby4TmqmqQJkYgyy1igwNyjGjRd13o4xsxYhp/s7u5vNhsV5wGdb2FEJAI2IMqFnsoUREfmaKBEnpzwoUGQkiBwCDHG2DRtjAFATaVKkY+tNjPQYKYAgkScRyaAc/EiATujoutBRCQSEPFUtJipRuOAOAwbooDp8r/7H//tt771a3e63Y8++ODjX/xitblcdt1ydwcDG0YhFgqj0fGDB0f3njk/Ozk9Pb2+uuyaJg3DsLlGwP295dnZWWD87LNPl4u9l1699wd/8C8+P338zofvrYZr90iYGA0lc2IoBwoxMvFqvaqxMDqB6hbMPe2Nug4wc6rqFqqqH0oisTqnNdqoOxFzGUUutcJCj4rbTUWUCw3ER/vU+px8qML+grMIoKpsd+NSneyYtXcJ8wt74w0lgKUofK4Spn+3R/rUR0hpJEQ/HJWGZi0zhTCP8smrRswqzqmbS00KxQAAQM73lv3j4uyjnIHMSld9AVxURaq/6miXlTrjqsi8R6No8TA3p/M7ZOYQytx65/iIam7dwMDIkCh4rYVJHkDjP8cY2MnKzSzz7xMhBid9c2eEsBALYwiROXpfYqX9qShQtgEYmKY60akDLEQiskxKSIjANrf1k5jW66xCObu/UmSyXu0sbkAg85m3vp1vQ/weXWo96izmyLEyuv+VU2E0io4qsWnPzs72l511oe/XIGnRdR++997q8vLw4M5mtfrBD/4aA3ddt1zu7Cx3D/b2rtanp6dnu3cOnnn+OVUIHIZhuLpa/R//+/9275kX9hbt8fGDUcaf/vwf7h4dtF0HQPfv3vv5T//x5OTkV776lTdee+389OLRp49+57/8rYevvPzv/q/vXV48bprdZbd7vbraP9qDoRcQBkbE2IR+WKdBCCkwd2272Wy88rikqaq/4h2Y6l6YqoIpGZiBiKL75GXxE1o/jojYUvC4wczMxHwaHOJmvfG4YWdn2W/WSrDZ9KELfS+MgKAxRG82Xl1dhyASuYmRCQnVY0ghHAGaGN3+MGUeEYGEQKrJGauIAiCJiqREGESEKLRNjDEE5ia2hJ6cMzctXdeFQMMwqo6ImNmkVVwm1SyEYOMIzN61wEjG7HnXEMPgQ1bHccGRR5X+8vd+43fHYfN7v//7v/7r33r0+aP3P/qobbvn3nwdQS7Ozy/Ozz9897333//ozp17pmFv//D06iIs4p39/UAk4/jB6cly0cYYLy+fImHbdR9//P7h3XtHh3dXl1f7R8er6/WibZf7y9OLEyYWsX7ccBMBgDgsl0tEuLy89I1RNfjc9Znr4ttaz7axEyydmK5YdSLgmxoIyk/NqjkBYeahqioUQsnSqUBF+ZCzsflPEpGpCQgUWEULD02xAblMxR33khzYYofEskOrfJoozcqZ5qsx29c39WeY2qwMvOBI6zhZBBQCHN0yzsGVwmNAgE7GAgggKgC5Dd/MxFm4HPwHMctTsetKYOYZUkYCUDUFnP5UcbGi41EwU5fQLQd5/tj+4rTuuffRaTUMyUElYJFUB4GKWgz1zhVVq0aAofSAYQ6ac9FUAVynxG/V/kzBwwecMKKSAyhhRj1hb0CoN+ameS+jeufvwY1SUATDmhNDAMdHv7jBeHaQrUfdNjW3zEio6FByADW0oe+XyyUSOCkBmJ6dnOwul8cH+599/AkgvPTyK4udZWw6RFivNk/OztvDgxfefD4N/dOz85MnTyLH3Z3dvTsP/uUf/5vj40MCvh6fJh2vr1anp2d37h3tLRa7uzuX9++mcbw6Ob06eXL/mRcwLH75y3dff+ut39v85r/7P7+7t0umh30/BuKRcBx71s50JCKvnBGRzWalKk3T5gUE83pIkQSmy+Wy73uRRBgc/PTdWD0YcUc+l8ZnIjZkdfSJiNTUksUQhpT6vjezzWbTNU0bQ0oyoDFiCDSmYRj6GONms7m+utr0m5B0L+xurq+RySQJwLJZNDGqxMhBZAxIXnULCCYkmpgDojn/bEqjJgF1hn1g5BgiAZp6P7z1m8Ej8xibJrYiyRQIow/5YA5e1WGGPh4Kt5wnkF48/ZeGBGCmEhGDplcePPfKi7/19te+cufw8LW3Xn/nH38OgTqOi9j83d/+PZFeX1397d/8gJvF17/xX+wf3jdkEQinT/p+NQ59t1wAqUr/4QefrK6v2649Oj46efK4Wy7SuPmbH33/4cNXv/3CC3/0X/3hn3zv37/70buxjeM4imjTxH4Ylstl13UOH93yBbfA0m2NP4XOk1XAKQauEQAz63aSrP6p7qGCJZCagSphPSw4ql6Fp25YV0vlTIrimucd51GImU6UdM79VbuLAaaMDmwHKxnHm+/0Ww7ftFDVSw6I5K00bhZrMkIdUvG8CJSGZjAzI+Y5TpJdKcQ8oEu91Gees1av85mZwYy05FVxKnwEMMh9XlPoOoEqWlSS38+6uBWeqh+u7sDMdGdroeBgnoddZU6ppXo0mzgA0GWCt7EdmHqDix3Em3aImQnDLBOQYaJ6tlaMf3UTvnBMx/xRm+D/yUdeuinxTVWObt74KihWFD0W6fS/T6LDQAoEIAjglerDMPZ9P/aDqYHJZn0tw+bp04uWY9c2IcaTkxM7O2WOe3v7e3v7X/6Vt9Ni2a9WmqTdOfry1765v7O/s7NzdnK2Wq03/aAKoHJ1dvr4yZN+vflP7/zs/OTJc8+/+Nabr19eXhzs7z7/3LOXZycali8+fO7k0Sff+Oob5ycnf/PDn16eftpQvDg/UQYxGVNAUxF0TIaImqZZrVbjOCCyOkf5OIokx+X39nbN1Hu4AMDZgGo1jKv76qMp5g4+UU1JFKxpGgMbxpSJUBAAsR+Hq6uLnUXrSzr0m0AUSgPOMAz9ZgA1ZCGCtuVhSJJS17U7iw5MVDgGIuCBlSAPxURAEgghEKOh12FDngJgBIDMAcqww3EQ0WG97okgxth1nXqnNKBqdX2RkREUmI1ZVTnTMCQmTpIQEQlNLBCjjIz44N7dZ4/u/bf/6o/3F0tEHFL/5NGj6+vLq9XlQpdnKZ2dPHnplZcfHN3/tV/7jdB2vejlerherRPgK4cvr5+ef/LJRz/4q79E0EXXdiFA1276zcX56eHxsYhYGt/+6lfM4umTk67tPvro/UBhtV5zjBx4GMflcrmzs7Nara6vr9u2VVWAG9BxfhQNW1/nf2u588wNqk/9jmudeANf8Anwid2Wu5EN3UefJRX8Fsypvco+q+CU59c9mnDW6y1vz4qq8pACvCf8lp/mX58F7jPWp22Mt2pCV9HzXKuZeYsvEDKYD8TI38QMNRkCQiaG+f8pe69eS7LsTGytbcIef73NvHkrK8tXs7qqLXswHIngiIKGlAA9jDSAID3onwkDDYQZ6ImYB3Go1jTZ7Ga76qqszEp7vT0+/HZ62BFx4tzMJqVAourcOHEidkTs/S3/raqquDQaapcWAkEE230Zy0aSluCikocKSkJqrJJNwcqD0vu97HGrMnSX3iguAu6mymSsnl3p9C/jQJUzvxldsW/CgAHQxPZeN7qWpRSINqrEfeukMkiREEACSICANqANocgIpZRRwqhlKinZgbCOD1ch36WOYCWbdCkMGCFEoya06iCPNslvMc43FfOll2vuPhxciLilQxdzq9KBajlX/bCUgpXUtDXqphlJNsaUEgoAARlgu9XRWriOC2CyLB9dX4+HN20/RANSFEopjWQ2izVAb2Xd9YLH3zy9jdJuu31wcH9rfYtzFifx48dfP3/x6vWrF3GcHr77/mrHaYV+f7A5HA4HKwQMPvv28dXF8fd/8KMkiV+8fD5Y31rd2r06P9FIXO7+8LNPAsq++urJPC46vXAaZ9IAoZQisSapklqBxDJbXzIGyiilBYDtVyGKQk2mIzCWENRYjZ4QtLls9bJprh8kpE4lttU3Nqhg04QYYwaM4ziMW/5n8LgTpzHllHNuZ4TrupxnCK7DyaDXYYxPionDie85FAG01Epy33M4VdqW4xApJSGAaF0fmBWZUAIMEiRlnRFhnNuXCJSyohBSFsYAIczzAkTMstSmrlsXkC1WRABScc5YlaWcGMYoISkSMEYq5VCax/N7e/c/fu+9Lz77/HB/L4+TOIpCz5/Npy3PYwSjTKZS7D18wLgznM3iQlxdXl9dXYdhRxRSGRMn8yJPCWXddjtP49loLKVod1r9TjtO85uLK6lV2PZ/eXV18OC9bm/w0fvvf+fj7/zyq1873C1koY3ptXthyx8Oh0IIx3GqHNyFc8OAwYU7s4poNnLoTV0IBmW8rHq/ujLNjTGl2lqvHWwsg4oOg9ikR1KG3Bap5dXKKuG3Ah+0l2guVq0tk9vC9V/fDpbpZqasE66Cf5VWumCIqMPeZTHTG95/KIdin5INO9eqHgKA0YYtTI8S8sscVcCq1Wd1xlqqSK2h4hUqQ4gKsdGQyHZp11V1FwCYqmkJ1IX2DXutvInSFLUPBXBJitreRbVvB0qYBlK6n7B2rtgcntLV2wRKW/JZndD6xOv3Z8p2L4u0HFIP0n4ogRyQIi2bB5SNCRetNAmFMgAAS74gLCvJytbkBAlYJkgEgFIA0LuCsDn2MlayFLy1Iq3h6atZXRb4XmUB1ZMMGlvtK6vPUM2/akpU35iSHw0ADCL1PO64rNUKUas8z8PAb+/siLyYT2Yud6jDk0K++96jsNPP8uJ3X31lFBAg/bV1yLJXT79JkyQI/MD3Pvvkw08/ft/1fc6c4/OLPMvdducnn37P9xxZ5HE8L/L09PULxglBc3ZxMs/ijz76o5OTC1Po3Z2dR/e3Rlcnz6IhqEQKmQkEhpwyozRBagBQGU2ULIQGI6VEaj05VpUxxugkTizFIeeODRfbAi7LcwiI1uoqFyegTfgxRmNZ+kdtV3opFYDhnCuloHrHlLIwdJAaC7JgiJSSc26nCkfjMOYHfprGQkhKUBSZLAotFWeEUSqlIZRJKYuiLO63V5dClQpu1dWHUmoM2hC3MfWqRcY4YyxNU+vUssvBGIM2mk2MMYYQwihRWkkkQJBqLYQgpKydBMAiL6LTy3/xr//N9z7/7mAwiKfTq/Gty5nRdDye3NzccM6k4xRZcXt7k+fCd7wiEwDYaXe2Nre0Ma7jvjo6MkaOhiOR5slsroX0fS+LMykkEBqEYcgpd4BTGN1c9/urW/t7ohCMkMxIY/Ta2obr+jdXF1ory7BUoaGx1UKISMCWSFRJIqVSvrRgTMMgbtrGS5jw9ozKUn8ELBcdwSpNv7Et0EwRgwZL7sIlx5SFVYtD2hjS8NUsVl+50i0SlpXqhNSk1qaWOta/bCrJ1Vz7dxHE3gQpaRHqizJLB2KVTlKqf1ZQ2NYaZcZkfV5jYEFnasqGkTZesSAfNpUsqHRJVQmrukh6GWtsHZ1lR7I9jBZpVTUomypUUot4qALEiKZ0P5UNpBHLDq4Vt18lROrrApY5VVYMV8IDLZV5+cpLIj+kVm9H1MYQBEKAEKCMMts2x44YCbEUM4uUIazU/9pnhEAACbKKc6SsrKhG2YTjO9Ov/m9DQzFNW5VUObLNKVuVQZah4IZPDGrh3tSk7lzdGGNTga0KBICAJE1SI5nqhJ5L0mS+uraaT3EspkG7E8WRh97h4f15ml1f30TzeHfnXrc/6LZCUeSnJyez+XQ2mzmet7a5ubu95bvecDR0XWdnc3c8mcbx/Be/+NV0Ol3fWBsMOkWWJFLfXF4xBh88eqBlcXp8NOivF0KdnZ92Oq13H73T7nZPL8ceqiSOM3dDoGAEHAoEiFSCENBKISVSSaMNZ8yAKYpCGW3QSC0ZpUWRCSG0VpQxSqnWymYzA4F2t2NzQnMhbQjIZ246T5AAY4QzwjllxAiKxhilsRBFlqYBI47nM879IEBC0izVxhSF1ErZRFUA7XByfXuz0h8YqdAYApDnOWhDwKABOwRCEbQxQgJBTpkoiqIQRZ4DoUajUkbrsjeZFNKAcl0fbETXEGO0w12tscgVIUypXEnlug6l6HLqea7vO4zRQomiEFmcEmMQQIESUoAhIInJc5XM3t2791/89//6J//sR0kUXZ+fX19fFXnqcGc8mjz79mngBZ7rRmkhpZzOEsdv3arxbBYRytq9/jcvX4XtMAzD0HFanr+zvZEkiR+4vuddXV1meT4bzVvttihy7vCg5RDiSJlmIh2NR4hkNpuH3d7m7rY25uz0iDFCaMmzhoBSipI/p1SDFhO4BhY7hSsYhbvIjohgKkFS5pbe4fipnRbQSPk3YKvSFkmWcMd3WqZ0QW11NFRy03B1WLsEbSS1mQcOAEorC4yItvi60kdLl3PpZ6/8BaWgKLlsmzHu6pdmaRhWgzeMoD2menyIxqg6F6dxT0sfrchZnK5i06wO0zbfxh6vtUFLJdQcWQNxjEVVKFko0IISEFMW+davwVQvDA0BY1SFb2Bp6pCQmtavDphU3pFqNtR576W2DGXaD4ABYv8BEEOwLPshRCEQggoBCQAjhoAhtumjNYA0pQyJTRCiFKltDVA7fypaUEIq9AcClJR0g6QyL+pMgKX5+U9sdwJW5UurXsSdKBPAstZfzwpYRvw7xpmynDSgkQIi49T1HD/0PY97RsZJNL69HWVRNBpOtRIHBweU0GkUnxyfpWm+sb3T7fYQyc187jhua33LHawehC3HdXq9nlHi+ubaDdpRHEc3z1+/eDGPo7Dd8trtye2lLGLXd3ura2vb21meMe6gpqPRCAl1/HBlsJEWstVfPWz38vwrk19G8iwuNogR7cARReYyDloRcIzShFIDSiqhlAZTdtTVWkslqedp1AAotQRNwCAQJBQNQlFk7c6G6zgn52fIdF7kYctFQlTOhcwZRZdjGHoFRyWpNkYIzSjJcyFlQUgQhn4YBlJqISJCSC6EzR3iDs1zqQjTBEfjESWUM6bywgiFShMgaZRpo4VQ1EeZF1oUSBhyI4QUudDSIDUAVEqjlHYcZl90VX8OlJI8KyjjQIjRzBgqJRSZ5pQx5NRoBJJEsUhTSoG3Atu8m9re60qBNhy5LpQn9CcPP/jv/tu/3H3n0fXVpUNwPh5fHx8j6Ml4+s2Tp1Kb9a2deXTVXVkHpG6nfe/efaWNkrrdavt+0ApDRLy4vPj697/pdoKNjc2tnU1p5Hw6a/e6EoB7TpGmvu+hkVpyYfJUJI7Pg1bQ6/Ue3HtvZX316vby9OQVd9FSRVr0J6WvUtlFYKDqo1klVjZxuYFLC9O2aiZU1raUmjSUCTjVEkDrqzZA6j5pBgwQrY1Nk6wXVOlCse5Zg8ouvybgVrhU+hKtn8UYA5SUfiesGtlivZzrmmRdabFotI33IpQKLyASKPMAjdaaEmI/26xOqHpbWmrnEqirm2WmigqYCmsrBX/JlKjwvXx61pKug59aaWicA63fvORbXRKADbRa+C4Wim2JXmid41azrmGRINFaK1UCZa3PNs9Z/RjvXKX8cyHUGrsqpKQl8+Kinqu6btUdBktEZ5XlhYiEUCRoOeCw+iUub3UoqTHJYwAAIABJREFUACq5QJFW915uCyfsH9j+0FeNJ1kfeYctHaBMJ3jzbGi57t98XPUNIlJthM2VBQAhBSW8EHmcGSOSi4vL3/zi79dXVt9956Ddap2fnRVZNp3F7W7/0XvvxVkxHN4w7oa9XisMe9tdPwyFEEWeS1nEUexxLqXwKGW99urOZleqIAz2791rt1t5kX/z+JvpcBRFs063MyMAIn343oeu5zLXFVnabrW1BicMHxy+M49iJ2i5PI7mMeF9QhwpkHCuACUaMAoJRU0AjHXOAKBWAgwpCsUoF4VS0jCKRhuijcO51kTkuZHabbku5QYVYRC4PqVUpCJORTsk3HWDMEAwijGlFGWgEawLhTmMOw6llFFqCYU8x70dDTnn7XZbCGFzoJMkJYitIASildZpnhVCFELYmLIBlGU7DZFlaIvUrF9VaaOltOE2MBIAGHU4d8PQT9KYcpdSlFK7zBitlVIUDecEjFTGmEJRNJQh48wFmqex0LlCUIpnmfKAiGS0t7XyxXf/7NNPvr+5sXt9exqG4XQ0fPrtt6DV9eXldDbrra0R6hjK232XU90KW/N5/PLbb1ZX13zfjyZ5FvGjZ6P5fP7w8J0P3n33y8ePX718RRn78Q9/3PLDZ8++BaNDz/c4F1K02x1lTBgELUKVNEjpH332GXH9n/7s/5nFE8fjBhQY2wxhMadJ2SrSesTQBiIXEL+8XkwF5wCL0E59WA3WS7+qGoijraEowdZy5i7M8cZPsFI37xgHphGAALABpDofpAH90Kg6NqZ2AdnzIpQ0GNYXZKpBGUrR8khVWTUIYO7cUZ3bWYnJcj893Nu/87wMLKcbLhkvUHuEEbHRYb3yzpdirrpnRDRAAOvGXc1X0oS/0uYoIbRsvmqw4rsvTaaaduNNDwneOWEF33qx2x5BFuYhKfvHADby90taznIPK1neGimetPxi0Q2GUkpwSecnVTE+KekibE4dRYK1cUDJIimoKSfwbdudu2veI5b26Z08pcUx0CCgrkVR41ioD3vzKvaxE6uMgCVV1oQQh3OXE6OK2eT2vXcePNjdnk4mR69eMkIcxzk4OFhbXb26uTk7O42TREo5mUdRNCeUGq0pIVEU2UJi33Pagd/ptHuD7ur6aqfX7fY6s+n0+vr68vz89ubq7OhYZun0dpTMZ0evX37z+OtCSK2UEAWjhDtcabW5uaW1KYTYWQlfPnkcem1G/KzQCgCoKURGCJFCG2E9/6bIRUlTbpUJsK+V2wnIkHBmqToVo4wSUuSFUTazmzqOp7SKowgRfM/3fc8uraIogKDRJs9zzpjDeBCGUirO+HA0FEIUeSGFlNr2cyeikNpGopV2HVdpI4RM09QyEZWF9ABSSFueUGOKLbREA1orQE2R2IoBx3HbnQ7nLM8LypjWkOeCAuRZhqAZY4SAKAruUABFKbquEwR+niRCFMKAUAjAVCF1Fu9v9P+n//HffPLxx2HYm84ixszXv/9Sinxna9PzvU63qwG7/ZXdvXv79w/WN7c46Ha7vbGxMY+if/i7nx4dHV9enB+9fMYo1aI4Pz3ygnB3Z2djY8Noc3x81O/17+3vSykQcG1tlTMupWKMDwar+/fvM9db39xJ0uJnf/vz08szx+GFzKx2BFD6mmtNruk9xTdwoIFhFfLgYpLbbxphP8Q6RaWBMViBOjGABMsUcoJN3bQ6N1YXKC+69G0DuxDKvH5TVu83ZEMN0o0Ow/ZsNYjbkULlci+9TVW8w/6kzFNqXPSNyIYxxloA9a9L9b8puBq9oqpzNVjl0FIY2gbXtslEVYFr62hKPxGyujfNknxu7KztLlu+Wwmhaiy2UpdUOXlN0+TOa24+9MXkqEudl18YLprmlBup02nv4GB1idKqKlm97N46tNGYMvVsaKJzmW9Aq7KyO9wVAHfH/0/srGYD3tlZj7b+9s17qY5fgn7TcAFhGSdHDUiAWqI/KXWazpWUDvUpGCHEZHSTjEcEYXtrixAs8mI4vBmOpl7Y3t7aUgC5kK3OoNfrcspGt8Ob21uCwDnzXSdJovF4RBGJQ1thEEXxbDZrhaFRejoeK6UO7u+fvD4iCOvr6z/48Y/OT05evHqRJenhOw++Oj9vdbqH7z6aIV1Z31iZTL3s+rOP3pnFLFOCGNRKCaG1LsAQUCAzQKYpI1oWxhiPc0LIPI6QEEIY4yilXun3Xc6EKISQnDGRF0mSSSn7g5U0zbTWUgjP89rtVlHkeZ7N44gSUmnZSCgiImM8z/JoHmdZFoahLIQyutVq51nmuW4UJ2HYBiSykEaBrogUsyyTUlmVR1txq6WUymBp7NsZa7RWShM7gUrXBOGMO4wzyvOsUAoZkiIvilwFDkdEx/HSNEE0vuc7nEolgKACUyiVigyAEsVQSClShunaRvfP//wv1tf2ommsIR4Nh17QWlnpv375cuY619fXAEAYZ4yfnJ/3BsXe/v7Op5/7vn99ffPZYP37P/pnT548GY9GaRzd3lyvr660fH82m+R5VhTi0cN3Li8vz05OkqjvOs7F+blSZSfOdrs7vB16rXZ/LXQcp9/vR0mESJAQl7lFkTLqWIyt2hfbzIgKNsDU1MK4UKKXwKGe4eXiBwRY8hfVKIkATcAsUdjWxFqXSek4smMAANDakJKvqFTUm8Norj5TtQBTYGyvOHuVRk6rVfZVc0XX0Ielj8sYY2z1a5ULQEoHVGXbmDLmrNE6GJboQMuP9MHuXuMmy1TIponUQIrFc2w+3MYlS7gtUycrVnobRKxpfKyLpQbrCp7sbdMaoMmCPQERKTSC7nfMt+Xz3HXx3NlvrRWKlibBoDEELPU6qTq8EEYZsyEnLHu8W8YHast/KhOAoqWEY5QsqPIotb9a6gxM6vRQwhBpnSWKy1o/QFPo/NPbnePr27R/3lFt6mOW91TutsWfzYMRkVi6IGOUUlpaylcknDPX4ZToaDYaX19BkQPg1fk5Y5QApmmqtJ7N5t88+earr353cXEuhRkMVrTSURSFYUDATMaj66vLNE0PD+7t7e44rpNnGWPM81wEcnN7i0YTQjfW1/v9/ubGOnfcy6srQvCjDz90HZ5n2fraaqfTub69OXz4CAjpDwb59Gp1bTMIW/No3ut4YPJ2wJNopqTwHEcpJYqEU5BFqlTRCv1OO+x0Or7nxkkkhVBKbm5tttotS1YupBDSNjXSjuO2220pJaWUceo4jpSSEqKU5A4HAwSQc0YJkaJglEgpPc9jjI1Go/l8DgZWByuu52utGWFJnBBKjVQO54zxsNWmhAohMpEbbVzPdRxHIxoEBYpQahcZpUyXlmuZPug6juO4jBLHda3VmeW50UYpkxeFVLrTCpBQY6AohOu43GWEMSEEdxzOuBRqnmRSIiogokhGp3/8gx/+q7/4y4MH743HcZEk8fhqfHP2+vji+OXL4fXtq5evbm+G5+eXv/3Nb87OzinjrVYHEc9vpmdXQwl0Mk9vx9NWu7O6vjFYXXf90CDb3N4llCfRPAj8m5sbMOC6npSKMaqkvrm6ZIxxxpRSjDuZKDw/bHV6YbtTSDWcDPMiK2RGKakcAG9XleyMNVVdbgO+EJtEmJXro6JMrxOKyi+ryOwi9ls9cESsuOIXiFirTwuoMVWC0RJU3l19pIrYoSWlqnsGmaoW600ErmG5Xsu1eYB/QO1786t66PY89MH+fulvR3PnMlCmWjZ3L7xA5QWszKljyKXwql9PmWBa+X8Qy89LkF0/kbsIhIBou6gjQCk67tz/HUSDZbW6rsho4iOUsFcquXDHBVQ7bQAIIZxxxKoMmHJKGSWEWZQnSHHZEURImR1IlvbY61pCrjpSUFUAYPNZLT+Tt29vdRPVv7rjC7qz522/JcufF//KU9qGKmC01kiI74e+77dbYRBwl2MWz1zQ6Ww2mY573S4YPRqOKGOO63lesLq2trq++cMf/vjHP/6JUurq8rLbbXNKrI9wdWXw0fuPHIcfHR1dnJ9RQtrtdjSbj8ZjIYq8KOazGWNsdW1tNB7/9G/++qsv/3Y0mg2H49XVwdXVheO4rU4bgBydnHX7PcZZKwhfH52sra+Nbi/7vYChXOl1KGg0JvBdJDpJpv1ei1EkqPvddq/beXBwL2wF09mEMzqPZqurK9oYJChkIaXQxgRBiJQopaSSgIa7jlKCIHLG8iLjDnccBwE9z0NErbRRqtVqGWOCILBRK9fhnudoY1ZXBgQJtc3UkAS+r5TqdrtrK6uIIESR5Tl3uO/7jHGCaJR2uOMFvrWjLdOPpR5RSgNAEPicc0Tie54BU+QFEiptAZvSaIjrO5Q5aZ4ZA17gc8qsCeu6ruN6syjKMmBImclHV8/+6//qX/3LP/vznZ37cZLkaZTFk3w+ffns5csXx+Ph6Ob6Oo5T1/X6vf7O9t7e/v2Vwarreg5zmNeilHa73Xa7vba6SigjlLue22q1CaWU87VBb3N9VSnleR4Cuo7LOUOAnZ3tZ8+fIlJCwJbUtDrtrJCd/gpSnhb59fV1FM+UEUpLBGIJ2OpEEiwz8UvDHStcwmUtsImkuAwIdxadNdqR1HsWGlXdZhAaPQcBrEQBuAMyjes3x4NQM/hC84gqadLC7RKO37mL+tK1ElAfjwvDfXF3S2JpSWMuz8msodF0gtRjhUUIorS2Gj6bpn205HbQUIU0TMOmqX0xiz/LX+lKK8fKP9OQnPZHd58CNrJI67tq/rf5Z/M5IqKuisjs3waqeMgymEL15iw5UjWM0sdVOQppLZOgkjcWRpt3Xb0whjZVqLwGwUXjcqiLuLASq2UsZXm7M7PvPt5/9Nt/8gx3tvoJGwN1NF4JmedzQhglpuX5gBBF0cXFBaPY67aTOJ3PZ2EY5lkWjcdxnPjtwcNHj7rdzr//D//BKPne+++fnZ4OR8NWK9zf3e33e8cnp69fv+x0Or1OdzadXZ5erG5sBH7ghy3P81vt8ODefpZlxHX/l0+/IwsxHY2UFB4n6xvk6PhIaLO+tckZefH82aeffkev7vn9m4vbs16fp/PTnt/xSJ4x7bacVr+lAKiJ7u+tzWaz2QzWBy1t5MZad2Wlc35xcnp+7rns9dHLVnfAOTfauJ6nkqQQRbfbBTBKyaIo0jTptEJttOs6YRjMZ7PQ86WUjDJOKAHgrDvoD8aTSRiGoshbnksCn3MmhEyjaDDoe67vO24UJ5yx0Pdd1xOiCAJfiHZ/0JdKJXFcFMIa75zzth8wQCW1ASSM2WoDQmwah0YknBNjQOSF1MoxBmxdgjYEkTPP89zJeBy4nst4HEWOyzlhSshUKiM0KTRxjdLZj//5n/03f/GXoNj1ydXF1StRzGfD0bdPXlycDvuD9mwez6Zzxnirhb3uCiAGQSC0jmYzYozJstOzM8dxBv21/mDgeW6axtPx2LaNfX12Gs9n6+trFOnJyYkUwvd81+WOw6UsfvyjP/75z38mCu77HankGt3s9Lqz8WRzZ+87n376i1/9w3Q6YU4FjaakLCvnqAZlFKWMc6aUEkVBSm9MXbbyT0/y5rrAZbypQsCotcSKHaDkjAKAshmJPXK5Jtks/lf7iIw2SAixdAlNUKqz9+AtDm1YBr2mI6te8gsR+EZmKr6tl1R5XUBAYAv5gGDpUMrrgTGwCByXYK1NfT93T1fLAKMB0RYLE0IQwaja97QQheV5qnYuzfNU/11C0mZGY3255rOAWoQg1oq9QYOUQHkMqfJtl6wHQDRlhilqAqyhOMNC4oExC4A2tjfkW3QNtG69ZkgAEUnV/2vhtmxMuDui684tv/lw/tCfjWGUt1brC3csx7cefOe01YMFQGI0lVraIYmiQJRJguuDFujcdR3P8+J4OhyNlFK+7+d5bpP09u/dP3z3PSnNf/yrvyoK88UXn5+dniZpYozpdDZdz53Oprko9u/fo0hQG0r4g8OHrW63P1hptVtxEmd5fju6iaIYKb0dj6PxzCb8jafp7vYGdfhP/++//t6PfuL4rgHy7ZPH73z4va3dPaIGr759nM6Kopj7rsNQBZ1W2HLRYbOxs77aJpCLPOJUMe4SEJwy32UuZ4hyOp95fsAIOK5nlPY8N4qiVivkjNnCfCklxXB1ZXU4HG2sb/iO6/u+VkoUwnUdgsgocV2HMaaV0kIYrZQSqJUQIopjhztW93c5546DiEmSSKVsjdj9+/fn0fz45MS23LJ00wZqNy7zAl8pY3u/ZGkqlbQpBVIKpZTj8JLKVxvbu8eh1GWcIwaeB1oTMBwBNDBClJAyzYlKV9vrn3z2k//yT/+UEjK6Oh8Pz5nInj5++rvf/64z2Olvrkej0c7+4eu//j+VIStrG8cnp7PpBMBIKRzXdV1/+97e7toKIBidHD0/J5Smab6+tsYpTqaTtX5v0OsKoRzX+eiTj0+Oj68vr+Zz5brObDL54nufffjhh7PptCgU5fzy4mw0n//wJ/cJ4unZ+bNn3zLClCyQVu6EhrJKKXU4Q0IsJ72NFNaAho3Ot7VepZd9wvXx9Vpo5qogogFD78BFWYxbaZM1UlWlSbDImaxFVWUT6LILM4ABUoGqLStTVooYjWXfkqYHqfkBbbu6hjBYLGRjobXcCNYFCaV2CTWHgi5T6On9nZ1a89dGV3UM5a6mTVB9MG9KFWOfrdVp65cEYBvMaNDVg7b7jQFTBbFKjxpat3iliRNCLEN+EyvvACIutvKzWd5lnzwhpDST3jDNoMyjL69oEz0bBD5NBz5bZAHZlr+kbBLQSBOiFBnaDvAAtIwLlOiPtfcJLHktad7CHfAt58wb2517hz+w/aFUov9fG1RGm9QGAKQslNYARtp8GIROx2PEDK/PxjfXxADjzHPdLMuVkoy72zu7m1vbSZw+efqUEvaDH3xfaXV7c0sIPnhwuLu7OxqPLy+v4jjKC6HBcMft9Hudbq/b73PP/fVvvzw+PR2ORu12Vynz9ePHX3/9dRwnt7e352dnx8evn7981ev1Hr334ZdfftnvdieT4dGrF2Fv0G2H/U6HEmSEzedRliZxEnFGtVaB50WzyebG2mwyypKYM9YOw6IQq2trs+k8SbPBYNUPfGPA4azb7fiu67luNJ8TxMDzpMjbYaikWl9d3d3dyfMiDMNOp4MGPM/TSlJCri8vKMEsz2azmVayyFKCpsgyAiBF4TrOfDafz+ciLzrt9u7OznQ6mU2mlFIppeO6a2trRV4kcdJudxjnnDGHO77nAaLjcEo5krK/ECAqrYCAH/iiKOwKZ4wpKQlFU6KhGbQD3+FJNAtcB2TRDnxUErTyKe21wmQ2C13x3c+/9yd/8i+NYdfXl998/etB2/3m68fJXB4+/DCVBeVsNo1XVgfvf/rFg3c/2NzeykTBOZ/M4yiax/GMu45KZ2k0v7q8lEJsb21vb2/v7myfnJ4Ob2/DMKCMcdfjrsc5R0J7vX7YCl3XoYS02+FoeOsyOhoNiQHGqB+2KGMKyN69fUR6dn6R5WleZAhlGXPTrUEIYYwprYqiKBkdlhcCNrC+XFNkad0hmlrLbCpJ2MjltmQHWMVaEcvcxMWZsUomf5vV0TyysWaNTSOqjAVi0RbK3iimzuxpoP9Sdn79oXmVO4yhgECWvQh3QAMR6YO9vcWOWmC+4V6/c1dN3xOAqbpylV4sK0Ut8RqU3h4CdZV2FXWEhiJfx0KNDZIiteK5+UpMYzC45M4my38ufmXDEUvetCUYLXEWqhFgxe+2YPSskkFrnz7BktiT2uz/evCWz60E/bLAqxppWVsAVT1BE97vvJhqz1ssgDuvozplfSKEikID3i5B3nLGf+QAOweEUoDGaKW1kFKJQkql8yxxOem03GdPH9+cn/quk+cFpUwp6QXB2soaIomj5PjkdDyZ7O3vZ1n+5PHXW9tb9+/dT/P8+OQ4ihMDptfr+b5PCGGOJwHCTmc4nkZJ4gchpWxldW0yntze3PY6PcfxKKN+0BqsrHR7vc3NrddHR47r/fhHP3j96iWC5gSfPP160O21w86gtyE0Op4fpcnJ6asszQiiR72b28t+vzMZj2VRtIJWKwzH0/nKYIUxt9cftNptz/EDL0Cj1wYrLncmw3Eax20/dDjVSq+sDBgh6+vrruO6jjMejaP5XAjRbrVvrq6KPB+PhmkcJ0kqhECjjdEOJUZpgiiVVEojgsiLQoher220+vbpU10a30ApzbI8iiLGeL83QDS9bjcIgrW1tW6367guEkjTzHE4EsI48zyXUuL7fpZnlFFAwx3mug7nnFMKAJ7jtF3WCrwiTVb73ZbvtnzHZYRosbOxETrO5tr6H//Jn3z3iy/SJErmk2ePfzPotX/+d39faHZw+PB2PDVK7O3tHxweTmbz+4fvRElaKH1weNjrD9bXBoeHh6ub28Ob6/HtZXew3usPhNK342mcZo7f+uSP/sgJuuN57Lc6jue3Wm3CqO97SirX4YyRIkujaJ5EM6WVMdpowx3OOGt3u91ef31zW2qdZvl4MpxMR8YogsQY0EbXKxEAlFI2cZZYAmezNHXvoAE2VEAss0nNHdyvdMV6IZTtC3HJy11quncW7x9yjdwZT2lAWFyCsoH7m5LDQOW3KO+oPJmVDvVpm27wu/jwtiHhQo4CItKD3Z3GI7OSzDILNcupl0dmjGVTwaqKoTKwKgZQUFVAvVR5Ld2nfe41rFt3kvUxW2gFRANQNdtCWEhaYvMn63SZmomzfnOkwYa6eASVD8kebOsvmrdkIdqOjDBKKDHaMMYsM0wlAxq8ntTm8GAd+a2GQaqYbpn41Ezzqa5FoDJNGKELgIZFAUtzvr5lNt0RFaVrtPmv3GllT5Me5C3S4G3bm+9aA2glpRRCiKIQQiqlZDSbdUO32/KuLk/age+7HqNsOLztdTue6yilKeVxnJ6en7c6g73d3SdPv+kP+tNZ9PLVi/F4fHFxLqXY3t3d3tn54KMPNjY3vTDMc2EAb0fDi/NLQoiUSisTeN7W5ma/29va2tzZ2zVGU84Hg1Uh5XgyPXr59HY4+uTDD+bTyfr6ymw2REO2N/cAXSDOcHzr+ZxzGs2maZSChNvh0A88rVQURWmS+X6YZvnW9n6ei1mcul4Qx3GaJp7rb29uaamePn2ildna2pzPZkoUjNLQ86MkyfNcCDEajaWUeZ5LIbMkmYzHFEEKYRAyIUSehZ7ve67RWogiz7LpZNJqtyfTGQIoUSgp4ziWUlDC8qIQRXE7HCqlKGVSKqVEv9dTWrfbbULpZDqdR7FB5Jxz13E9W39gXNc1YDzXsVxUYSv0XLcVtjzPW9/Y6Id+uxUSMNvbG4HvtnyPU+oyJrJEi+JP/8U/f/jx5xfX13kyvD5/Ho8no9vp9v67V8Pbf/t//G9f//4/Z4l+8eRpATROk6ura+4419c3rud1ui1C8fTkNPDYwf3d9z/+/HY4yqVkbtDu9m/G06+fPnv6/PW9g4P7h4fcDQPfn8+mJ6dnJ6dnRZbd3FzH0ZwgKCU4Y57nJFHEKU+SWBHsr6y1uv3e6lrYap1fXfxf/+k/iiJHig63zX8MknIlKSWhJLMrNc9mRkNz0SzWCylNhwpbSu3eNJTfRuaI9ZqUUbDSJ9xYhssOEihV9waY3xkJlgmTlY99kVNjoJEPWrpkbKZQKXugXNHG6n02ZcZU3W0tqNTEB2BLBOqahqYGXN6v0fbb0gJAxLKWWmvrILJkKVBl+DQQQdfSg1TyyDa5LplwbDU9kHrExoA2qnyaxFROJG1VVkqQEkAwNp5KERA1EGM7+pIybmp/6wASQIqUAlJAYtkaDKIuX34VhF1EY8vMAWtYkYqBp07yqamQrHwhgJRymwNDKSNkkc1pK3UpIZRyJAwJBVvnRRhAZTkgQaAEKVLLMYS1dWJnT1kFgKBMqUUYtCwWFSfc4jlDxeRT2ilvEQN3XJzL31Yz+G3+pf9vmzFGaY2ykIVQUuVFkRUyyrLpfE456fZ6qysrs0kcj24m12fj8WRjY7fdXmm3V7kTMMcTUty7t/u9zz89PXm10g6TOE6SdDBY6ww2Pv7OFx9+/Nn+/QeTWfLll4+Ho9lsPFFS3V7f+l4w6Pf39/ZWV1ekyIVIj1+/fP786cnr57/9+7+bjUcu0fPJ0HPoo4cHO9vb0+nk2bNnnhc8efqs1dEyzR3wVG5WV9YMA0GV2wkyBacnV0lkptHE8Zw01Wlu+ms7iTCjWTQcD5UR8XzOKfzib/9WCWmUZIyeX16meaZBGzRCyiTN0jTN0tRIOR1N5vNZURTj4bjT7hQi7/S615dX4/GUu14WFyAMp+h7rN0JZ9F8No/zzACyMGynSSxkyvxAaZMLRZiXKhUnQmlM8oQxKlUxnQ+DwJtMxoxRo5XS2iitpOiEoZFya30dtDRCgzQecylSh7JuuyWL9MOHDw521jd67s5quLfW6beyTsC6ba8TOhxyLUYtX7ZapD/ovfvBF4fvff/41VfR5Pz2cjSdS+b2g/bqL3/567/69//24P57BwcPB4Pu6uamRhIE7uPHv7+9PucMh7dXrU57sDLYu3+wtrVzfDm6Gg639+5R17u6vpxORr1OO3Ap0/mLp189++p3eTR5+fzFYGVtfXU9T9Pzk9MsTWQulBAOY/F81vbCVtgG5oWddqvVHk+G3X6/0+pR6q0O1vfvvXN6diaV0AgaAAm1CmXJukAJQWYMApCy3TeWzl7rl7auafvPYO1BsovHErpgmfljP9TkbVoZrUnlQjagjZFINJYd9HQJuGCLXE3VdNTyyRFj6uT4RkTBNEG/HFa9OE3JnQy2Y3OZdApV8ok1RAAIlETFCJqUOffUYoV1LRBABIq2xoCUQwdEqZQFnBLYETSYkmLe9gOoxwGVQ+sN06TOEcIlLqD6xoy+I23sB7rEVn03w2mh+QIAIhCkhCmtTCVI7AFWQ67JOhr+sepalXHUDNNjI3hr+Y+9AAAgAElEQVQLy9GL+mUAmLeC4519dhjaKG1Qa1qTLNKqraC1oio7EmqQrn15VXimZi5ZMt/uPJbGCN8ymLdubz75O2f7R2RArSzUA7ZFp0JppZVQ0nLZp0mSJEmRw/r6Gufc81xUWUCFpOLq6FmUyULAweHDJM+RspYT/uLLb6JZ7Mh8Po/X17c3d/fD7kpSiDzPrq5So/W9e/taa0vD3esPut0u5/zk9IxRorU8Oz09PzvTsui2g48//iRst4zRNzc35xeX3377jDtOGHrr65uEkc2tbe7I6Tgajm8cxzs5Ee+8fzh/OnHbK+8chkdPj29uL5ijirxI0pi5Lcr5bDpbW1ubzqZC6TwXt8ORH4aUsf7KACn1fJ8yJqRExKIosjTNEjMShee6jHPKuet6iHh+cYEAQghCKXO4FIXjOEkcM84BcTqdTaezOMoBKGc8ThPHc+NJlCapdCTnzADVQmRZVhQ5EhNFsec5iCQMWuN8nGU5IFFKB0EQBGGSJGmapmkSx7EB4zg8Tuae72VZuru7oVUe+M7qah9UQEEbLXXRynPZDvwojifTmeM46xsb8yj5/g9+ErQHv/nyV4GvKGG9ft+Y2bOj86+//PL41Qvqtq6uLmZR6Afuo0ePDg8f9QedQX91PB6HgRcE/jePn7x0X3ue9/DRe9/97LOrq5uLy4t2q/3dz75fFBklWKTZs2ffHr96LaWI4nRv/+Dl82eu6zJKDg8PZtNJNJ1SNKLIOeeT6QQJc1vtJEsykRtKXzx/sbF10F/f/vrp09/97rdRNC8KoYm+A0eIqJVG0Iik7vvdnMMWVO6ssuXPpsaKEoJI7Scq8bzBqkLt55pzAmonduX7QKg9OssKfgPldCUcLJxZh5JFzwrwF+u+XrM1eiAiWN6zurki3HXYIGoENKCVWqClRciasMgeSR/s7S7Q/23OE6zGUT/TpdGVxkqZOot4F2Kq0VcneZtJQghBSgyisR1abBJ6pXlblR4pgSrx5q6GW4cV3vRzLEZhKq9I9fdiWiCAqf3+lPLafWMPKBmeEe1nO2DGCG0QvWHlgyIErXeHEFJFnMidEWFjpFA9nbohzB/A6LeWlb/NLGie4W0ewLedvPbYNa5XdURRSgslpZR5XmRZnueFkrLIsn4nDD0ni+dZPJ/HMeeBF7ZdL9jb22eERtNhx3dvLk+vjp5n88lwNOv1Vzv9/tn55Wg89Xx/Op1lRZEksVKSEGy1O73+wBA8v7z82d/+3WgyytIMEaJovrW1tbO7c3Dvvu/7w+FoOBpdXl7OZnMkpNNqT8aT10cnCDCbJ+1W13ODk6MXhUiFiIMw6HQGotCddm9jY/P65lSD9oPWNEqLQgph5nHk+d7p2XkQthl3Ot1+kqQ3t7c7u/uHh+8AoEEYjUbCer/yIsuyIs8p51lRjMeTjc1NDWY2m7muKwoxjyJKiJAqS9NWGGrUQeCnWT6dRZNprCyZmTGu7xHGkBDX9Q2AkLLIi0KI6XTKOAWjPdcbDPpplhWFUGVzQZplWZ7n8/lcCAGUzKZTJWWn3YqTaDDoSVns720HnjvohYzAykqbU1BSUFHkWeYF7vHxset5vd7aZJx8/r0f9gaDr558dT06Aa1mk/mXX3797/7d//7Ln/6n3fv333n3vV5/8P577wdB4LlOkRVPn78ajyYESZalr4+OgiD84Q9+/MEHH+3vH1xeXuWF3NrcHvQHm5vbUqpet1fkIgzC+/cPDg/ffXD4qN/pX11dEAQw6vTo1dHxqcdpt91yHcYYcUpKFZJLxThpddsHDw63dvdWN7ZW1jakgePTk29ffMM406XaXiHJ0oS3X5WulMaqKuf8Yt019S1LOVn1ZSoXZK3iWgEAVcQX6+Wz4F1fREGXB4FvpMksXbcRQK0Jq00pPrCph9XL/A+sWlPrl4gLTKvPb8ySP8rWKMMyjNAHOzv19ewHXXdK0aWkqs6rAd6QE/Vtl+5iKFsZNN4TNOVhVWTRHAQiIrLK708IQSBYet4pRUIBEbHW5Zu/XbxdUr+lWqhYuVf+BJtekztTwZ6kEbktz7Ag+rf+HbJwMRFaF0wt4sZvpt8QsiSX6mtBNVo758ocqT9AHgL1cXdB+o3D3tje/PIfP75pASiltFKFUrKQVgVO8zxJEjDQ67TXB731lf5sMrq+POeOO0tyoUwcZ7Pp/Pz0OPS9NJqOrs8cIjh1Hn70met6T7552l3d/ODjj5nj9vr9jc3Nza3NdqetjGaOezscvXj5Ksmy995/9O6jRzu7u9vbW198/vnh4YMg8Ifj8atXr87Pz29HE9d12p12r9drd7rbu3v9fk8Usj9Yub2eIJj79zeTdLS+vprEqZKwMtieTed+4O7f28qEODu/mkxnrXZvFsXnF5euH6RZJpSK5tH+vQPuOsPR+N79g7zIj06Oe93exsbm6emZ0aiNEYVQyviBp5RWWm9sbgBAlucP33mYpmkSJ0UhpFIAptfvBoGfF8VoNEmSPMsFpTwTIskSymi316OUWydfHMdRFEslOGN5lrmuu7m54fv+fB6nWS6EaLVaiHh9fc0Yi+OYMlrkOUHknIJRYegnSdQOg/fefcflQEAFPjOyEHk8Gd3IKJ7Npp7nRmnaH6y5bvu99z5eW9345tsnz1581el7r54f/fLnv/rNr36+tr79xQ9/KGTxve99vrG5wVy302l5rnd+fjkZz7M0jaIEEddWVnv9tV/+w2+++vJ3xsDa2jqjLM/FfB6Nx5MoikajCWf89PQ0jhIw5vLsbDqZGK3Go5vQ89Ikjmfj66uzaDqhBF3XEXkupSyKlHBvZXVFg0mznDlef3Xd8YLnr16fXZwaAoVM0zzTehELJYRUwFdCk/Xm2FaXAHCH/AZrBd86fBZ+27thgxLEwTL/2POjMWW08s2teYk3cX+x4qqc0TcskjJ60MxhLdG62gCWJEq102KIBbo6odIgMRXFPdQjLkdClhAAERlUuNO4mNHaQGUWVGNSi3HbmEhTl6+FYsUQWn0FVS+ROmwNNjVTN7lJrbSBMhPUkjJAaQQgGGIQDQKp0MxgLdfKkdt7NAbKNvKNlCldtgirojRvciKVj6JsvLnEJdt0hhi69OZMLRHLm63bQ9/5s7IxF4/FGGO0YWW5gkEE0GWG8jIt+JvbUnzpzmxbDHthz9WOvSUr8u2nXnbo6WqTUgoppJRCSqOUMZAmscyT2fpAKpnnadsjLlJSOKPhBHUhpO6vbTLPMwZ2Vne2tnc67fbx8anjs//hf/5fe4P+0emZzorV9U3K2Wg8mk7HuzvbSB2DdH17c3Vltd9pp1k6Gg4vLs6+/fbpfDYRReFwdv/Bwf2De0ZrURS3NzeEwM3NUGmttHZ97/TsXGTqN//w6y++//mg5xy9enZw+LFRkgBsbm4eHT/PZfHo/Q+ursan51dAbtc3trr9fpZlO7s7SNjxycnN8FYUknM+j2b79z66uLxeXV2LoogQ5MxBBO2oTGedXrfX6z/55vFwOFRKhUGgjR6NR1JKIVScxKBlux2sra3dDofD0aTfGxDmCakIQiFlLuVoMuWcB0GQzGMhCq21yAtEQinllCdRZJQihLqul2XZZDrrdTtRFHmeN51OVldXH77zkHFyc3OVpXHou7c349B3KDFFnjJUDg2HNxcep0WWTi+vcim7a6ubm1v3HzwKg27ot//hV796/PS3e/c3v/3mm1/87DfPH7/c3Lv/7sPDh48eEsp+9etf+0GYZUUcx4w5G9s7Z2e/KorMdb293V2K6DD64x9+7/j45HY4NABbW1tSG88P02Te6XRur2+FFGur60kSDW+HSqrZdEqo4ZSKIg98L9je0kpoJbVW0/Ek8F2Xc6YpOo4xOgh9BXh8fHTw6KPZfPr8xbOb4Y2UYjafIy6MVFOuvqVFYXSNY9ry3df5F+VyAGwg4N3FUq+iWrGqFHWbTGgsHRwAgFH1smogm2muoDuLDm1H26pXjx1kdcmSwrOmV6iGt6TwNVG7Oq3dYV06NRTDAtHtaavcHELLu26elz7Y2YEGcBjQCA0+ofJxLNW5aVMXiGkoYyx3z1tJneWyCwD9trxULFP1rUBmlFIgaFVsAGpq3bz6afmKlg29xlUtmhusPDZQ/bO8q5UOvvS7ShegS5p+JeTJ8lY7CrHKTYIqkGCPrn8FlV+y1v7Ld76wAWzYoCpyQVPNvbdsd5X3pTOA/Xl52sUMXzKB39A+yvfe/K9F/8r/owohijzPsyzP8yzL0jiRUs6mU4eR7c11AjqaDuPJqFBGaKIMSgWU+7kyhHtrmzvzOL+6Gbutzocff4e4zvNXJ73+aqc/uLi8Pjk9Vcbs37+XZHmW5dZpnufpxfn5zc0NIdAKw83N9cPDw8FgpdNuC5FKKS8uzwFMlqXTyXg6HYsiPz49i6N5p9sOw87hOw9vri8RSTyP8jS/uLwiaByHb2xtHB2ffPzJp48ffxNF6Wg88/yg2+31+j3XD1ZWVozB69vbr37/tTbm4vJ8Y2PDcZ3r6xshxPn55aDXj6JICC2FClt+lmd5nu/t7Q0GA89zsywbj8eFkJmt9mLY67ams/l8HmltDCHGgOf7YRhc31y7rquN9l2v2+mFYRjHiRA5IgSBxyjf2tyYTeeIRCg5nc2CILy9uU7TZGNj3fNcIQpE/eGH77dardHtLQNjjF4ZrLgOXxv059MJGplEs/OTIyWLKJq32+1MyvuHD7d37nX7/fOz42fffnVx9rrbDiej8X/+m5+Nr+cPDt95cHBPGnj67bPh6Oajjz7a2dsFZEVWDFZW93bvffZHn21v7/i+d3t7DYiuyy2DBec8iuKLi/PxbMIY+e3vfn99dTmZTs7Pzmbz6e3N9cqg3+93W2FAGUFEz+GiyLMkphTTNOl3ukWeOZxbsqOg3c2yNE4Tqc3K2tbB4cN2d5AUxd/89K+Pz14xx5ZzlquMLEgglF2GNk5az+g7unmNDXdg9I0Dmj6l5n5a/Rwqf89iu7OCoGnKL9Zgk/atuXrLhVkzfUIV+jVVE/Xq5OX3ddCiBHjbCg2wqn+1hy808iq8bQja+DAQg6XhAEAPdnYWo0dDkOAbFbZ3XG82icq2DCuhFCujCevDFkNvPnSbA9p0E1VPqnS4EIKEUCBIKS0b5dawrrUBoDUzLJah6PqFVTk/dkgEqqK4xcuumFSt1bFUaFyJGajqeGvX/x07kRLCSgdQGSiwVqelhKM2N6oSRE3wJaR6YMuzrR7Am7YeNu2bN8maGpup3vedY5pX+cd1f1NxiNvNtrpVSgmliryw7cIJoYwzz/O2tzYPH9zb2lgv0oQQwlzP8dvdwWqruxq0+0Grs7m9t7O767ju9u7OxubG9r13Wp2e1NBfWY2TdDgeM9fZ29/f3NiI4xgRNzY3lJKMkuvLy+FwOJtHT58+vbm5Oj46evr02fnZ6YuXzxzX8X1P5Pl8PqUI2ztbO7s7uzvbrsPA6Mls8u2Trwqhw1a4tbHjuEEYBlfXl1mRU2p6Kyud7spsNr93717gt26HoyBoOZxv7Wyfn52urKwioUKoy8tLSulsOgnDlud5YRBwxos8z9IsjmOltNbacdlsOnVdJwgCxpjWmhI6Go0AUApZ5MV3P/uk2+1cXV31+ytCyCAMAYwBXYiCO1wKoZSWhUizbHd3h3Pmu24YBpwyx+FB4M/nszwrkiydz2eDQT+KZo7D7+3vP336hDHywQfvM4ZSFvPZ1GjNOXMdt9cOPc8Boyfj4e31VTSfAZgiy1Y21sN2t9tfCzud4fD27PRFEo1evXwm8+zVq4vbi9nqxjqlhHHe6vR6vc6DwwdJmqZ5sb21vbW7H8fJ3//857fDm16vTxkmadrptEbjUafTStN0Hs0BAAl0e4O19bWtra0nT77Z3Ny8ub0xYPIsm4xHeZ4VRfHy+bMkSabT6WwyybJEiIJzzgj1Pd8QLN0NSAjFlbWVsN1Z29gcrG2mWfGr3/12Hs+TIgFiEGljTdjpXdK0GDBIFpkmTa2clPy7pe5e14rVy9DuNMaGAUi13BZQahHOEu5XexbhzDtL2H4oW5XV6G69SBaXjDHGqsLlOBtRaFL5cyxqEdvxDYDUCrkdJFT5oKWZglV7eutNKiULseHkhXFQ39HCD6/pg93d8mmiAQC61MVmqQiteYe28qt+AI0CDHtYA4oaOGhf2h3hWX6o4NRuhFCbtUsJqQUArVXvKnHLPuMabAmQpu5rcwMWY66uXt0aqbmgm0MidAH30BAMJfpbGjgoHXZk8V+ojQFEBNQESFmZhYhlTTTg4hGVZ4dlcbgA5QY0wzJ235EQb52C9etr7sE3pqwV0/aJWKefMZb5UysplVJSiDzPpSiKLJuOJ8PhaDQaXV5eZXlysL+3vja4vrp4/fT30fDy+vLy4vR0PJ3nUndXNzqDjXCw3tvYR68DbnhzfXl0fJJm6e1wqMF02u1Oq9Vph7PphFFUUr5+9Wo8vJ1Pp0oWLd/zXPbxB/8vXe/VJUlynQmacC1Ca5G6KrOqurtaAN0gSJCY5XA4Z3bP2Zmnmad93l+wj/uP5mG5XFAOB0CDQGtRWqTOyNDKQ7g2s30wdw+PLDBQyPbMcGFm7v7de7+rTqqVcsY0dU178OBBoVDwPEeSpIxhlEulXCbjeY5j25SxUrm0t3+wu7cvKxIh9PT05uXLN0f3Htj2stWqzKZ9gCBEUr2+Y1kzXdcZYF99+XWxUPSD4LZzE3ieIEmvXr7a3dkFDAz7vf39fYQhZFBVlel0qql6vz/wPF/AouO6944Ofc/L5bKUEkFApUKBkHC9WgVh6PuBpmkZQwsDfzyeaZp2eHgoCjgkgSzh9Xq5t7vb6w1UVUMI+r6HEFQkWZZFLET053g8cXzP8TzXtbGAGCPFQlHX1VKpoOvKvYN9z7XXq5Uii29fv1JlcTaZrBaLer2uKfKrF88RYNZ0rCgSY2Sn3QYSVnTj8OjYdb1vv/nq6ZMvB/2uKmPLWr94elqvH0qyEBB3Op049ur6pnN925/OZsP+4PnLV7KsVsqVICQAUNddU0osa359fWU79uVVp9ls1Ov1bD6XyWRvO925tajXa9Vaw3GdfKHQqDcXyxUhoTW3SpXSB48/tB1vvbQQEkgYQgBkWYYIBCQklIqSpBtGJps3MxlBEta2i0S5vbsvacbbs/MXr58TRggLY7M+/cJu3tx02cfkOY9fXgY2un8cvrmVOQTTbwc/nmNF1MkbsBiHIjiH8WF3/0VXp9thHgAAXno4UbWjrKn4ewgAvKOe8YgjAACl5I+86QCATeF6rrQmOAxZXBUhSWFLQCMeVrQI+KDdYoxyVRrFZd1SijNL5N4Gd2DMVcWouqnPF4PKHcRJr0Nqzik5DCGE2+1K4sAiDBGAACW2A4sWDkLIY3NSZ9yS2zHcxzPezrTi+0eNpFM3K/1kwEhB2PzKE8HSCQdgq/RCLDkizimZRnqyAIBEHUgLyjtwv5Fb8QN5d7c7z8Qfu83/psGbfpJY6hPFtnL0D8PAD1zP9lzHc5zA9wM/IIQQShVFKeSzmioLGFHfWVmzemv/6P6D9t7hweH9WnPHJ9CnoDsYzxb2dbcnwbDVahmGsbu706jVZFmURaHbuREQLOYzmDFVlor5/N5Ou9moVcvFYi5rza1+v3/b7Z6dnb169UqS5LdvznrdLiVsNp1Nx5P5bHZ9ff365cvLy8snT3549sN3nh9gQd4/OL6+7kGEZFmYzQfH9w/7gz4WZEpxoZDt93uSKF9dXU3HEwgZYFRWZASh53tBEBq6Ya+darXaajYMXUcQGrpxcXEpSWK11lgsV2vbNg1FEHAun6uWS4Hvh77f6XRWq7Xv+34QyLIcBr6mypqq39x09g/2PM+RJAAhAYwFYWgv7Uq56vuOKsmFfI6QACNo23ahmA9DctvrUcoc1yEkyJrG0dE+I2Hgu9msfnR4gBE8Pz+z7bUg4PV6tbSmxULe913XtnOmvlotTU0Zj0e+71YrlYPDA8006rXGfGa9ePb06y+/qFWK48HtbnsXALHZ3gsYLlezsiLU6vVytX5weNBqtfKFYiaT03SdkDCfKzx69Gg2n82ms3y+UCjkS8VSuVRyXffy4tzMFVRVEyW5XmtUqxUAcbVaC0O2XK57g0G5VHIclzFw07ldr50HDx6qmklJiLBAQ18UxVyu2Kg3q7WaaZqapquaOZvNfBICjM1sfmfvMFsoTazF24u3a3dFAEmy5+PXLR1JCGIM3QqahDBqwJG8EwkpnariuB2kxyhkjEeUI8iSgl+8sVVUkzoZCoDvlJ8AAHI2P4K4hDVJgwDYoBBMKfibV5ilNHeY8valgDQCGZ6rzFE3ElYsIshTqvAW65AqlgbxQavJOX1ekRps4IDGKvs2jsNtxLpjU6R77m4DbmK23YGnDU7xbNoomYzFHZ8j6wBHTv/tMngxgqZu4NbmHeyDif2QBkC4eWiiU0VFGzbxncmTx3k0lPoj2BIAICLc4kxDHtW0dQc244k32EbfhxsiCERBVdEud3lGkGqM865NkBYGqXv6RyggnuiXQD9LOQCCIPAD33HXnut6rs0DXVzflySpXCp98tHjVqshy6KqGqKakXXTDihA4sp2nr94+fbta3dluSurlJEf3dutNesMMEUSA9/tdTvD/q01G5uKuJhPLk9P7ZWlqCqjwXQ4+u7779+8ev2H3//u97/79XdffzmdTBCCQRDKkloolDESIGPT8WhhWQtr4bq+43iMAVFSVC1DGbm+6eiZyv7hvX6/d3iw++bNs0q5qOtGvd4ajWetduOmc5UxMoCC169eBEEoiYhQenh4RAi5OL+AECuyrGtqu91erlaUhMVS2XP973/44aOPPzk7Ow1C0m7V7987ctdrAUNJEiklJ8fHz5+9eP/9923HAQA16hVVUShlYUB83/W8dbVSdJx1LpcN3AAy7Dv+Jz/92PM9BNhoNNA0RcCIATCbz1zPd1yXMIYYqdfLtVoFC9jzXUkUarWqqsoX52eEBLa9rlXKy8XU0DQEgSRix7F9zxv0OqKIDw73dnZb2VxWEmRAwbdff7lcLezFTJPVfLY4Gluvz7uD6WwwncgymI6HZjZbqVRlVfM833F91/EURT07P3vx/Hm7vaPrpuf74/FY08xu9zZfLFYrtZtO9+XzZ5qZ7fb68+ns7dk5wmK7vXtwcAQA3Ns7EEV5p7VbKJbq1SbC4mw6a9QbCKHDw4PHjz+6d+9ePpd1fe+22x8Mx7PZnDIAMXRcW5DkWrPV2jkwc4VnL18+ef50uV5IsggiRZg/wzR5zlPv1Lt6J4Bo4/fioJ8Y+CCls/O3myVliUH8uiMc2/QbXOHNSWDivQVbVdRxDPQIbWwLuPGrAs5lRZQt27z4qXcyutCdPuHpDQQRD9NMTS2K2UzQFMaKI0x9+Bqy2PgQEgYqQYq7AMFRKdqmADDCSLIefAE4wQZjaujf+kCY9uPfDXmM5Q4BCAMIGGVxDsAmJD+ZDOKFWCGvPMqtKQBjKuWO3GJxehrc8Gsb4cwVBwYis4lSCikCkEICCSAsVWc1SvuCgFJAKYIxf0cI2axy5A1GPKWBu6wgpQBjlposhCCmHaMJpSQ/jB7BTaVYCEDajN16Gu5+UuxkWiFKFvnu7SYMAEA3UT+UQz8vcuC6LglDSkgYhIHnUUZkSS6Wy6VSqdsfKpLAQueH77/11zYD0PcCIIh+EJbKlffefx9hJEsyZWAynXZ6HUapqki+40qSKEBozedf/PrpammZGTOfKzx/9qzf6xkZM5vJYCwW84VWoyHLorVYrde27fvDwfDRw8cYCdfn56qkqaZGNR8A0G4LvCjQYrHwA2+nfXB+dvbpZz+r7uxf3d7Umvv93nB//2A+mxYLVRr43sr2DVfAsFQsLZaLQj5v2/azpz/W6i0s4HqtfHZ6USzkO9dXkqL+8OzZzu7UD6ksi3NrirAAhdCx7WG/n8tm59ZUxIIgirVaVde1kISWZe3uHRQKxWG/NxpOdUMNApdSfzi6bTWbgU8AQVmz9MP3TxFCvu/ns4aiyJPJ5NNPP+32e8vFIiRhQMIgCAsZZb1aFgv5wWB4eLB/e3szHg+LxTwNA0opZMzUlfv3DlarteeFgqCMRj3T1D3fOTp+eHS477q2JElvXp6+fPXi7ZsXOztNWcDT4dS2/R+f/JirNwNMiYTm9np3bxciDDFer9eeF7x5/Xa9XiuKcnB4ZFnWb3/z67/89/8xm81XqzVGqGmY19fXu/t7//l//y9uECxXK9PI3N50Pnj/6Lpz+3/8b//1//y//69mvTWezjRFefHiebNe6w8noigQhvujcbVSW05Hnc4NCTxRFBEEqqpquuH5/nxumVm93d4DolDIFSVJHPT7b9++FUVBU3WKQkYjPZNSBu4CCIdilg5XgRDF2vHmfUnjIACckEBxRCmEjEFepDOOnOF/i4iH6AgAGIFJfN3dl49BABBOvIvxteI0rES9S/B68yZyecAgADQpUkcZ3Upl3UwvRZ8Arq9jXh6c894c05K3PiU8WGKaQAjxfqMZTxVAlrY1NssKQSyqOOWTEl4x5NGkrXG82AzEjubNLLf1/cjjwS2ZyJJCkTiN23NxmR0r84xbNVEwL4Qxk8UQ3yle7uQuM8pQVD8cxsOiIInAiT0ffIgovnrqIjwXOTL5UFTwJ/I0M8A4YcX96lwDABDy0hZw47HZWB2p2xCvTbxw2/xPMqik/128+ikjkaafIP74x9Zg+u+px4vd/TVpo8coZYxQQkgY+H4YBkHgB4EfhoHjrlzXYxBQClzPs+31cDjo3nZGw36xUGg0mwKEEoIQMlmWy6VCqVwyzOxiuVwsFjfX176zno9Hw+s3WU3r33aXC2s2Hp29fSNL0s7BweMPP6nVa8VSubm789nPf/bovUdHR/sn94/qtZkgudUAACAASURBVKquKoIgZLMmxkiSRIQxRJCEvjUbW9ZsubQwEoIwGI/HEMLu7a3rOkyUbc9ngHWurz96/LB726VhCKBoLW1BUfcPjyDCtus5ruf5/m2/v7CsTC6PoEAJkUQsIoYRuz4/PT4+opQ8/uC9H5/8IMlSo9XqdG58P8hls46zfvz+Q9d184V8uVIeTca6prquW6tVv/vm22q5ImIBMDabzVzP0U2VkBBjbK+dcrm+tGxKqYDQ2l64gafIYjZrNpt1azprt9vDwXA6mQR+IAty4PmmIR8c7hcK+VqtulpZjr2y10sRQUVE9nJ+uNuejAeyAK3pRERAUxRICQl8AcNGo2YYqoDxYmF988WXb16/QFg9PDiYzuZeQC6urkuNdq1et9YrVTN/8dOPJ9NFp3P75s1La7HK5POqKufyudXSymayf/onP1U1ZTgZ3zu+d9O5gQjtHx2a2Vy/PwzCYLFYZjIZhNB7jx+9ePkim8n84q9/+frVy0cPH44Gfc+xMYSB7+22WoiGmkBDdznpXRN3bVvj+WTkOmsSBpqmKqJoaHKxUtI1jRKmaWZr9162VFczhW+ePut0u87aEjkLE2MIt8yTF4rBhAygEV8TYQWLtUfOw0CEAPfxYYQg5BVsIIZQQEhAEEMYIUn0JkMUVSkGKIGjWGNL3m7OUMCYz4nf7bjEDCdromI0LI63YCAOy2FpGcAALwrNkglx9AeQR8SkWP7owgDCjds3RWyByNThFkEULRNz1xvhgfea9YTEh1sYwYdLknCihHHh+JgADJdAyTZHWcgYgiAh8jfMD0h4qYhKYYwlXbxgZIFxzgfDyBcQky2IxbgfGxMx7MYOIrRxscLYMOFfoCiyKgoXixYwEmYMMMQSSidezngt48eNS6b4aeKDiEQDTpzy8aQod2JH9bffiT9LlPENfL+DzjQOZ+YbaQGQHLth7VI/t6TItsxIfnK2J34o+SUYISFjNAgC3vzE81zPcz3Pdj1vvXYIZaIoCqKAEFIV2TR0Qzdq9ToUEJaEMAwQIcPOTWCvu+cXV2en08GQUc/z1lcXb3OqNh4NPNc2NLVcLO3s7xWKRU03rMWSUCYpiu16i+XKWljTyeTVq9dX11eUkOlsNuj3zy8ubNve392dTCeSiEzTIGHoey4AIAwJRDCTyWQyGWux7I5nuWym1+uYuiYgeHJ8r15vPn/x2sxkVcPc2d1drV0K4HQ6WdprwthkMlksbEmUZUmUEECQABJUKnVVk2u1GqWk0701M9nj+8eKqq1tWxLE9XqdyRie52GM3n//A8exV6tVt9ttNhuT0aRWq7mOgxCyFgvGQkFEgDLbccqlOqPIcYK1vZ5NR/lCdjge/dmf/fzli+eiKELABoP+fDZTZGU2m+uqGgShKKFGo16rVsIw7HY7C2tGAu+2c9Vq1C/evtrf3Rn1bjEEznoFGKNhaM3nN9eXn3z8sWMvq5VqPp9/+uwpo+zq4vTDjz/RjQwFcL5Ya9mcbhg/PHvleGCn0Xzy7E21XLh/fPzB40+K5ZKm6/v7eycnxw8fPsxkzcl43Gw2tIzpeu4HHz52Pf/s/Pzo8KhWq0OEJ9PxeDKaTifWYvHw0YPpZEIpLeTznc7Nn//pn/a6tyz0nz994jr2qxfPXj/91lsvqW8LjGRMXRCwpmmU0iD0SeA76zVFVBKxpqm5QqnSbANJ/f7Zs5dnb6fTIaMuZD7YYMfWe8QFQKLzQZQofFGaWAzYHLO3+BAEoRDl7EeaZsT6RtUJooAQhHjaP8Jbh3LlLzp7CjMgStBh0+0rGTCAEEf8dPRmppIJ0q4+CsDmKwhSMfRwQ/NGpAonHlJkCUp4Cy4PIdqonGlbAu826jFAwMgCiQ6L84G34QlE2jOEGz7nXWaZ3aHaEwHANkOIsTzBa5iQX7GggNsyLalOAWNtP/UBvMx++vgkLStpjQIBgFHeBIzpbz49AKJiQ5SnNcBEE0AgMTRgxJ9BCJO8X5aKbEo9nSBSI2L+cHvhWVQBPEboNJv/b2rr28z+xgLYyACWEPqbM6QtBhp3HmXs3V857U8JjaI/gyCK/vcdXgvUshau65pmplqt6JpmGNrh4WEulwuJHwT2Yjq+vXh78eKH0dWFAAliBDAiiOLF+RWDKFcoGNn8veMTzcgohrFYrRbL5dq21/Z6bS8XiwUPE7RXq9APPM+3HXs2nsznM9fxbMdpttqdzm23e2PbTrvVWi0XhITd285qtZzNZnPLAhDohq6ZWQiYpmq+71ESmKa5u7drmJl8odDtDX76008EUVo5zng8FgTBcb3xaGyvHVGUEASyIgoCMgxDM83hcPTxJ590uj3dMGdzazyeHR8fn749ZQx4nud5TuB7hWIhX8i2W82zs1PO5wS+ryjy0lrM5jNJlkIS6poSBsHCsorFMkLYcVxCSS6XC8Mgk8tkM5nBYOC5XrO188P335GQEMqwgHf3diaTcbtVtyxLloTz87eT0cjzXFmW7MVckUSMEQJA1/VcLjcYDKeTyWLF86Tg/fv3AQTNVvP5y5f9fo8RAhF2vaBcqV7fDrCkrGz34uqqVm8e7LZWq9VPPnxoZgqB77meu17ZvX7/h++/e/3m7XA4vLq6rtZqpmFqRoZQOp3Oi8Xi0b3j4XhqzecQwka90ev1GKWO6/Ru+6VSqXPbxRhLkvr0yZNmswEZzecLy+XyvQfHnLSlYeAHfkhCJIh+EDAEBYwhAI7rhHSFEas3G4VyUdZ1pEgv3775H7/9J9ueIRRgTBjcNAwHGzxNQTKKbHP+AkYsQsyYCyhu4xrn9mOEBB7jnRR7xzHQIxRFdSNe9F3gHUFgKig8HRwYAVAypMTRAAAAgG6lpPHSnhDCSAnj2PxHkXYbNKLwoe25xzvAVITrtvEfwRFvbp7iYPgG3mvUOT6g2DBJwn62zpHAPIwT4uJC1tzxAFKhUCzRkJNidjEbtRUFdHf9QCLho/WMI2i4ERNzMhBCgOIcrNgiAYgnDwMA4uLPG9iN1e3IzAAQRrndILLOGIw8KluBZanb/M6jtrUBNteC3OBJydj0LQQg4f7ixyPlAX4X6+9s3/kgmFRjjaoYcsUg/TM5TTqa6w76x17fkAQhx/0gCHzf9zzP930/cD3P9/1AlbVMJiOKEqFhsVDIZIyD/X0soNVk4i1mznySNcTVahpQL1MwsSDmcjlN0jQs7zbbQFX8kIyn8/54bLsuZaxUqTSbjWq1XCqX87l8vpAXMBYxRhAGgefYznK5DINwbs2L5UrGzEynE1HA6+ViOh6fPHhgWda9+/en82kmm1UUhTE2GAwUTa9VS4HvSZKgKqpuGOvVSpbl9k7r+vpG1bSd3f1Op6Pr+mq5cl13bdue7XqeKyCgqoooCEHoTyazkJDDo0NCQs8PSqXSk2fPDvb3B8ORZc11XfMCr9Gs/7tf/sVqtcxmMjfX1+1WezadhISsFovQDxzXkxUZAGqvV7IkEUJ9P4AQTSbjYrGoaOrcmh0eHty7d2+9XiuyKoji65evIUSr9bJcLVdr1V6/o8qygLGmqbPpBGMkS4I1n7z/3nsIAkkSEYSKIlvWcjQaMcZc1zXMTC6XPTjcNwwjDIPbbnexsHq3PTOTHc69Vrv99PlLSTHOLi8UVauUS29PzyBki/lM1zVFVfqD0U3n9vLygocB1Ov1Vqt9dnZ63el4HlE1rXN72x8M1467d3DQbu/MLcv3vA8++FBE4sXlpSCIO+3dYqF0dXldq1bajYauqwihTMbo3N54jl0tFRRZLJfKsiwRSnzPDyl1PY8BYGo6FmAuK4uSCBGWVDVfqWJZ+eLrrz7/4p+yZg4hIiAGeI+Q1IuWKM4obsGXACCEkRofpT7FVV4wjrR+AWEMMRcKm37gUU3fVOInp6MRjoEFRiohil7/5E2OqWPAuIYfv8goNhZieiTyzbIIBaIs0TuQmwiANPon4Y93d2SRVs42ycAR7PABolQKUqJUJwKgErUFjobEK5ISwLtDgrgJDmP87BBAEEcHJYNOabYb5wmIKkwDAABDd9R/PvO0gxekXAbRyWACwTHeQoC5oQYiFR3FvRkwhCjtaUiGza8bLzdkUdezRHxGKByn4G49XokQTkP/Hac0SCRV/CuNDalIMG3no8WLlgoKjhmYPyoD0hvpxY5uCqW8y3Fy88A7IoSDfMokiMA/kQGEhGEYkiAIwzAMQ9/3uQDgGzT0w5BCCPyALJer1WrtOa7rOo5tl4oljPB81JcBWC2t2WxUrZZUTRNlWTWzAEmLuU0pGAzGX718wZAoKmo2mzOzmXqjqRs69zRcXV6u1ouVNUcQiCJ27JUkSvV6rdFs5gq5Yrlcq1bGk8l4NKRhUCjkZUVWVLVUKrquvbe373l+SCkWRFGSfd+BAO3t7ayWS13XBAFVa/Wbboe3dL937x4WJcLY5fXleDwiIWEMzKczBCFGsFou7u21AWM7O3sAwuvLK9cL9g8OgpC8fv2m3d517PXzJz/+5V/+Zad7XSyX33v0YDAYZjPmcDAYDoaL+ZwGwWq1CkNfEMTZbIYxttdLfkey2ZzjuEEQjEZDRslsPgup+8tf/jsShMvF0ra9i6trgCAATBSx59s///mn/V6/2ajPppPZbFKtFO/fO1wt5jvt5mjY11Sl3729OL9YLm2egey4dqlSrdbK3e7t/eNjWZFvOh2MhfloQhk4ODi0ffD67HxqrUeTmSCKg0F/b29HRIBSsl4vB71+SEIAoa5qa9fmfT1FSbl3dJQxM/lCqVat3b9/bK3Ww9HoV3//T2/evPnln//Stb1Bt68p+scffrha2l98+bWqyHt7+1998dVg2L++virk8ooqHx0ezmYzyMhgOJxORhhBM5tTNF3TTUlSGKAixvlcJp8zspmcbhRFxcjX2mauaPshxtpkOsYIE0IR2mSAptUv/k7wP4CYeInjP6PwGK7lCxhFFd5x0ucJC4Ig4LilH0JCpPtjjHDU5g+LkckgIBxlq2IEkbDpBbKdKpTEB8Y8BUK81m1UahpEBgriba8AAIxQsIHBiHzYyIEYW2L8vLMMMO0WRdG8N5zYZs9IGd5SSfFevUYTpy4jIA4RBXFSKIyKUbNUvBADvF5QgpUgYqE2kJqo3gBABNlW4WsO9oilyKy4RP82gkfJF8lC86igzY1njCVUDGAMppJB0udJUUl3I1OjjTicFKXi+lP3aWMNpNE/kRMbjy6/IksidiK7kMVbkVKAUOxiiZGaJr71d4j+LcmcQnRKubBJpMudgzjWs8h/QFNigCV/jAkowq0ATv5w3A+CICQBpSQMQkppGNAwDFVVzWZzjJLVcgkAKBULEMIfn/3oUy8IA2synfbHo9vRau44Ll27xBflQruZazc+/PDjjz/+qFAq5vOFVqsNEGSULlZWGAT5XKZYLBq6IiuiKHLHDwiDcDqbWdY8k8nMrYXjuqvFAkJGKRMEYTDot9vty8vr4WiczeUZAOdnb4qlsuu6o0HXMMx2e8cPw3a7/fzli0ajWSpXGACu52WzBYTwcrmgjM1nM8aYpmie5xmGLmKUzZq5fFbTTUVVMEaj8aRYqjAKBsPx2nYODw5//OG7n3766enFmaaq7VZrMh6bujEcDDqdzmqxRBipquK6ThCSgBAeOCbLsmmak8ksCAJCyGq1RggSEna7l+VydbVa+17Q6/Udx7XtdRj6qqaYWf2DDx51b7uM0lzOHPT71XJxp9U6ffsqaxqubQPAXMfWdL1z051bc0JDCOHxgxPTNK2FVavXHMf9/puvDw4PpqOxZuSH0/l0sR6Op5qZQVgYjcfNRqOQy8xns8lkMh5PGAMI4XK5VCyW7LUzGvUxFq4uL578+J21sKezpeu6uULx5OSkUCp9+OEHEOFvv/muWChQQjzXnc5mpmn89Cc/dRyHEVKrVk3DcD13MBxOJ5NsLlvI5Qa9brvVECXJtm3PD7EoAgiDwNcU2TC0jKGDkDKGBTVTqDTr7SMoqj4FK2dNwgAh7PlBAmpoG3A3cLF5Q3nj3YiqxQgiBEWMRBFjjCFGHPGFSO2PgJyn/fO/RzgPMY5RP9o5pv65VopgUg4gGlmEgBHJHP0vKtUAIjEAorjKqGgY5L7SBPnA1rzSE2TJZe5+IqRKHZIcyuAWXm2xIwgBvFOvxldmkYczxUalVc+U3NhSlpND+P8hhHewnlAGUlkbfAQUMAFFkZGIs1Opb2P5sRkYQggAFokEsLnfyYUiY2o7ITDSiBMLK7bSQKwsJ+Q7X3jGGIpL1L0rKrggib5CCPAI0aijTFrwcMxPrVhc6ZVtcD7ZOdHMAYurkyfqOaU0VTQqAnRKtwic5EbxFlFpCcFiwyI5hFBKeUEDSillhBAaEkoi4p9z/fGmT4KAURoEAd8ZI8H1vPls5rs+o7Rarbz36BFj4A9f/uH69rZarGqK6Tmw2bqXKbd37j+qHx4df/S4eW+/sbdTzRcd14EI1Ws1yogoCtlMxtB1TZV1XZdlkVHfmk2Gw8F0NhlPxr7v+b5brpR//PH7waAXBl42YwLKBFGwbYcxhgTp3v17F5cXQRgKAi5Xqp1ur9Wom5kcA/DkwTFfgWqtdn5xKclKxjS5MmKaGcPQLWsW+oEfBKEf8iZBqiq7zvrBg5Oz84v2zs7r16/3Dg4AFDRdD/zw5csXh/eOXj599t57HwQ0LJfLpWLRc9zpZDKbzV4+e84oBRAul8sgDD3fJ5RKkgAAMwyzXC5fXd1QyhzHkWXRms9kRWrtNKvV+sJaDkfjcqnc6/ULhZzruX/2Fz+noZ8v5lRJMjU9DMPJaFCvllVFFBAMfC/w/SDwp9MJY8gLCAXANExZU3b39sqVimYYx8f3l+t1EIaUgRdPnkiKOl86imZqmfx8uR5PJuVSEQDWubkK3LWqaMmzNbfmrusfHR69/8FjTdNMM5fN5m3H2223ZVUZjUfzpVUuVwghsiyvl+unT54WcoXhaAQxJozOF9bJgweSKOTzeVmWG82GoqoQwevOTblU8vzw4vxsba8zZjYg1LZdx7FlSTZUGQEqAJbVdUUzy/WmYuRFM3N52/3V3/3d57/79XQ2DUOfAYaFTR9AtAG1bSIZxoGLcRUBDEGC51yJ5+jPITxi/DEWBUGI2f9YNkS8v5D0A4fRabg7ASOMMN66NowiPWESuhKBJwaxO5cPOSYXIjWRv6QIbtCMpQiDDdLCSP+PQZmDzMYxAGHCBYBYK4UwpoYSeygpTwkAxLuxAAAplmDrc7eGEQCxZzvxAaRU9y2XcApD7zBaII6h2cLCjUQDEV0WHRt9i+P7DZK2MMkiJoMFKeSNlhtsZFr85y3SJjqUs0GxWZAm+iMLILnTaR8ASB67+HDG4sisOBIpFgCAl3eHkPK4y20Zy7tNU8AYr3vLKACUUgoAZZQARimj/Dbxn5SRmNmnlJIY7mnsyKHRgBiLbxVlXBJEhgElPPAzDIMwTDifkASEBDQkYRgSSmlIEIRYECihYRDyZVc1tVgsZkxDUZX5ZNqu1hHAFKCDk/eVYlnMFesHh9lSWTU0DEHoOpBQLGBRwI6zHg6Hg17/+vrq+uri9O3rN69eTMbjUe9mNhvbq5WABN3QF8vVer0eDPqWtZxMRoQQAYu6aoiSZOi6qmmLhVUolAghnud2u/2MmeM91k3TKJZKiqpQygajseM4mqZ1b29VVUUY7+3tO66taao1n5tm5vrmhve7cZ21pqmKIpqmObUWi8VytV7v7t8DAGGMp5MZg4j7GA6PjgLq7+/uLqyFIkqWZT1/+my9tkNCCAk93/VJ4HmhIIrVSsXzHCyIzUbj4vwSUAAhoowAAGRZ/OxnP7Fms2qtkc3lfd/vdm9PTu7P5rM/+ZPPri9PC/l8s9ZgjPV6t93bm3wuIyK0u7vDrRbXdXvdvu8FACFN15GACqWyIGEGwHvvPcrlcv3+wLZdz/WXi6WZzQmyWm02KUTffvtts9ls1qsXZ6dHh3s0DPOFvB8Eqqqs12vTzOiGcXp69q+//vtMoalpSuB7zWZLN3RD06r1euD7vUFPFCXTNBuNRrFYCsJwsVj2+r312kYITacTP/CLhUKxVJzPZ4ahF4tFRdWuL68fnBy3Wu217VxfX+qmaZqmJEmMEgExVRQkAVLmKYZmFMtIVZGq9oaDL775veOsJAUDGGLMldw03sb5V3CTiMUVUAS5qIAixoLIgTwu1YUFhBAHeFEQOfMjCoKAhaT3N4YQo+g4AQtYwNwXHPsJMMIxJQAR5DWMuYs1pl62IAhEYUkgjTYJHRN/0CZkPI5dTFyFMeBwNiVBpE3lx9TlksgPECm1II6Q5CGMdzkSIYVckFEAYFJq7q4oSMiFd3aIwlEikQVSsfXJzn+s3E2yEwB3lyN9PCGUsZBFgiauGI1i7j4VbwQhAnCLKuF2A1/7DTkSbUQ3hTHAKKM8gjghtbY/XGZCCHnOFyGEMSYIAiGEUgqETUgyHzeX/4QQfr7EQkKc/QEAQAxRNEKEUGKIRHcaQhR3ZgYAMBJrO3cML7h1L9/dIRJC0RLAZPrpeSV5v2EYkpA32Q5pSKIpxwsrQIRVIZvNFQoFQRBczxYwymRzYeibRkZX1b2dHUmUFsvlerXOFQuu77qWHQa279mevbKmy7W9Ho9G9notiHBnZ0cUhd12E4L6arGQZZmFTjaX4brSdafje9R3g9l02KjXG42GLMkIov5tdzKbabquqIooij8+/fGzz3725R++kGXl+vpK0/RyuTAYjsxsTtN0iAVpNisVi/1+v16vz+dzAaHnz549fPjA0LXJZFoulWRJXtoeoaEkSUEYUKr0B0NVU7rdnqLphNBsNtfpdKuN+mxlf/v1V//pP/2v33z7zdHxoa7rvjt7+fKlNZs7thPFTYWBIImu6yIkiAImJFiv7VqjaRjG8clJuVL7f/77fy/XywwBz/eDMMQYV6tlSVb+37/5//YOdh88OPbDQJSlRr1JCAUAeJ5jzWeOs5JlGYuC73uqoliWZdtOSGilWpzNFwgBa7F4+P4jWVOggEVZnsytbrfvuN5yta7Uqqpu7tdathv8z9/87uT+kSxLL1+9zhhGt9tfL2dL25VlOQxpPp8PfH+1XBwdHX700UejyWw0HDVaLVXXRqPhZDqaL618ofDw5IEoy75P3p6eG0YmW8i/9/jx1199eX19/be/+lWpUDAMNV8offaTT0rF4mg8Gg4G9Xqj2mj+z89/d7i3227vrJcLx7U9z1MksVIqgcBeLaZ6KS9JKKRBd3Bb1QzorubLiW4ocOJDgCDkpYzRuxiSri7MGGCcLWAIQIB5g1bIOf2oxpcgCHc8vYk8QQhiGMW3oEShZIz3CWeY8YwwCiljhEBK40rvjDGefIUohRgSSABIanaiaFwQonSWWqSQJdrZFoHMf+dt7tOoSwGFFKYGvNGqE6Td0CdbEI0STfMOYuC9Ri09pijWclszTv4TWxd3ZAPjbHL6X6xMR6WX0u7ZeARwa9rbbguYAuMEPSM8QxAigDACCDEe+w8jN0jSpTI5SXJsDIgQM5hYKSn5BADcUC131ghuD2ybd4IAgCSZKjkBiJTxTdZi+j7xm853IZSkY0C5aOF3L5FklG2ooa3Pljz7Ix/A/vgOSaH/WP0PIvKHhJSGNAjDMNzMiLEgCGzbWSxWi+VivV7P53NRFEVRqFYqsiz2RqPRfP75b38z7F0LJGC+HdpLx5phRljoLGcj31kx3xUQy2eMUimfMXTf82bT8XjYHw4GqqpADOyVc3F+9eTHJ2dn52fnV54fQijkC4XZbI6xxABaLJcI4lwupxuGJEmCKHleUCpVqtUao9QwDFVR+8NRsVA4PT3L5Aq7e3vDwSiTyWqaLsuyaRi1apUQYrtOpVziGsF4NEYQa5oOAPQ8x8yYhmmWyuXFap3L5kqVmuN4vf44ly/M5/On33/TaLRGw8Ff/fV/KOTz0/G0c3Nzc3Xje55t26IkUQAIo14Y5nJ5XdWDwJ/NprV6w/eDk+OTXL7w1Rd/MLLmer06OTnJZXVFVnbabdf3BoNBqVR68OD+119/+eDBiSpJGOGLszNCQlESJqNhuVzSVXU+mzLGer3BYrHQVC2TzUqyNJtN2/t71sIys5l2u722ndcvX8+txQ/fPdnd2SfAJ5Spmvb27DIMA8/3xuORLAnWbF6ulERRMjNZWZELubwgYEmSfM8L/AAi1Gg0P/zww3K5rOtaPpfJZnO+595eX718/hxBVK1U9w/2RVleO85oPP7ZZz+7f//+p599ShgLSeg59jfffrtYLh5/8FhS1G+++xaL0oePPyKE2I5r2/Z8PkcYMUog8WUBKgKkgef4DpQUQc+VmrtAlF+9Pb2+vnDctSBiQRRhBOBbRjngMTYxhvP+4QghjCDGUEBIEARO72ABS4IoxtS/wH2/giAIgsj3iVl+xB0EgsCpnyQ+SEAChhhHYUEYQR4yhKPKkJFvAEOIEBQQBAhwH+fGO0hpUj6ZEyUQRElaXDrEwLWJfNlAeTRDzB0PsRsSgVgV3sKolDMAJKeFcXPdhKXhV4l9ABBCADCEkdMgyn7eUDrpeJ90FeUY2tIUy5YfgHs4EuYlBakb+fWuZzWNxCBqvgwY2OLF4kswEEe+wlhgJOC1WQN+FNgMLOnbyXjdpaSEESEghf5oe2BpScCvBRG8EwYKIQSbalDRbBLEjhcNAR58CjeLG4F2jNIxvcMSJ/wdP3Cyb/L3O0AP4qCsdJmH5MMFAN+It8PEQ5A8tfxgjLFhmGbGNAwjn8/ncnlBwIoii6L4d7/6B0rCDx6dtJvVwF0sFmPXttbW5PTlk5u3p/5yLRCqaDIjISEk8NzFYq5rqq6puqb5vjceDd+8fHn+9tTzfIQESdJUVTfNzMxaXVx1Bt3Bm5fPv/vN837vYrGczay5NbeCIHRdz3WcxWLxyG/L2QAAIABJREFU6OGD07dvBIgW1kIzDADgcrliECws66OPPjo7OyuVSvPZvF6tz2bTWrUyHo9y2azjOv3+wF7ZppHxPB8j6DtOJpctFAsQo9lsjgRJ03QAhS++/PLkwSOEcO+2X6lVMUZ//R//A2NgvVpbs/ntzW0YEFGS/SAIwiAMg8bOzk6r3evcLpYLx3Fd11dkpd8fIoS/+/ZbLGDf9x4+Omk2a+cXl8f3j1+9flUuFxkj2Vzm8uL8/v2jm6trCNFqvQp8116vNV2tVyue6+YL+dFg5HqegEUkCIQSxqjtOs1mU9G1cqUiy7IoSqqmBT75yU9+enV1PZ4OCGUHh0eDYc/1/KW10FVtMBhommIauqwoqiJlMhluIquqaq/Xqq6Xy2XfD7/66svXp+euaw+GPUlWa9XSw5OTwA8oZZ7vZzIZzTBzxaKsqZ9//vlqtSoWC0eHB5988rEiy4osB2FAGdvbPygUivOZdXp+3m63KtVyLpebzWfL2Syfz+iq7K0s4rmGqhSr7WJ1J1NpEaS8vuz87l//EIakXKpkzCwWBEK2mO5ED+NUfgwUECEgRB5dQRI5z4NTIT8R7ouiGMf/xLRP7CWAUTQQivzDGAPAu5PE9Voh5Juc90EQgrjJCNikMvFQQN6a6h0OPYqR5DtCFofnA8CSYFaEEE18BtvAszkPjNOtYAqOtnmUVGU2uu07jk6Od+t1fla0pfZvzrARTRsDbEufhQkAs81XEWAnHoLt2KNILCSE1r+hbgOUNh4SIoel88wiiiNmvhLcj+0IzsMDRrlaHTkEaISt3CvLxx4DKog8ySz2JySOAxCbM4wl+zAE0JYPgH9NKe+0zBLzjbI0RnMHLUd0PvAgCFhcApBjdYTvSa2G2JphDEQcfip4NO1S2Cj+lCV+Y/7hsSgJ7gdBEEZeX0J5HzjCeC0R7u6mlBI/JPHxq9VqNB6PhsPr62trPms2m7lc5sUPz4bXN4D6hiEoOsYyI8xfraxZv+/N5vPrzqzT7S+mCCEBId/zViv75ubm4uJiMByMxyPGmKKoqqKFAe31+nPLuun0JpN5GISyLNVqtVKl/tNf/GRv/6DeaO7sHzZb7TCka9sxTFPTNF03f/EXvxyOxnrG7Pf7+weHnf5gsVjKstzpdX/yk5/8wz9//pe//MWr169zWRMLwvPnz6+vr3VVnUynjNJ8LpfNZBRFhojV6zVBQGvb1nVD0zPn55d+SOuN5mg0dhzHWi4oY8Vi4ZNPPv6X//Evvh+sV6urq0vfD0ulkrWcu57ruG6hWMwahjWfrlerxXIJGGs0mvba6XQ6y9UqCH0GSC6f+/ijD247XYigbdvlclkQ0GQyhhAWCnl7vaKUMhbqmgoAyGTMUqm0Wi1kSRJFcTAY6IYhyjIhNAh9SVGy2Wy+WKxUqrlsTlHVfn/gud7p6dvL87P2XqtWbxQLxeViUS6WJEn+8YevVsvV0dGR6zqr1SoMgyAMIASe6yIsNFvNXC47txa3t7e7eweCgDEEhq6IArq57rx6/arVahFKb2+7K9s1Mhnb9cxc5uOPPi6VSqIoDvqDwPdz+Vw+X8jk8t3+YG3blFIzkzk4OPjmm69Hg0E2l/3g/fcVRfVd215aiPqyiBGgkpkvV1v5+m5z90jPFqZzyw883/esheW5PkRCXNchFZWXEgZcImAERQELAhZFQRJELGABYVEUJUHkuM//wy0D/I4pIAj8UCHaAWKMkIgxTrmOo2yBFOGPo7JxCMfKOUQYxnAIt6EzxoiUtzJO4AEpGjmtUIJY5QV3tWS2wf/438Y+iC6cnAUiBAGLAmqSC+H9Zo0j3x1eJ7kwiGJvQGywbKqqoeh6XLmOw0BBlHjFz4cQYnG5iw0s8/RaACmlIG69lhbsKcSPo3oRRkAAIFovCCBHWIwwo4wRbhNw7ytMIBNCCBhDjEVVJADgTgLAAG+cyd04nPuCAMG4IQKCOGpQhjAAgOcGMpbcegQhYgxggCHcNBVggHA0RywyDJOpUAooRAAiQhkDkDJCAaVxm2XO+QLu842LNNBIjjDKUiVfASCURsQhITyajGey8Slv7B1KKSOxuCGURKsSNXqnNAxpGISUa/2E8HsbmxVJZBChISIh8QPf8dZBGPBgN0kQJEnUZKnZqDz98buvf/c3mmE+uHeoYOxay9XUsufWbDIJAJGz5jJwiONZk8lisQoB1nVT0TMrl5yevpxZCz+E/eFwOu4vV0tFU5u12sHeTrWUz5i6YRohCWbW5LZzfX59eXZ9+eOT75/8+FzSi8VSzafi55//4dmb089+8Rd6sRggZCjicDw+OTlxfIKx9PXXX+7de2RNpwTA20FfkCQv8BzXae/szCZTEQuigO8d7YeBQ4l3eO/QyOie53mOv1yuMZY6nZ4o6/fu3e8Phrfd2+7t9XIxO3l4vNs++Prrb/r9HhaF4ajPIAWI2M66kM1Wy5VWtXJ++hYL2HGcwPN0TTPMjKLK09l0Oh3pmoIxCny/WqsCxmzH2d/fFwTB0PXPf/v5zz77VFc1AIjnOoTSte1ks1nX8/KF/Hw2V1TV9WzXtc2MsV4vlytr73BvtV7v7u2FBDAKBEGez2anb16ullPDkD98/EDNlNqtndl0CgFo1mtPv//++u3tw/dP8vms7wWZbFaUBEpD214Xi6XDo0PbcV3PzWczJAx6t9f5rFktlzRVaNbrjx492D88GI4mjudrmYztuoPhEFDmrNfzsWWvHAhQJpOVVV2StKXtZPO5+8fHiqpWG42r68ve7c29+yfj+eLFq1PHC/KFXEZTBUgVARJvlc3nxFpDy+ZVI6+b2R9//P6f/+VvbX/uUxsiigSEAAKQbYVjMIYh5rHhIsQYQgFhMQrqFyQsYoxFhDHGkiDKooARFjnK42gjRnxBEAQRCxhhAWMJYxFhESERIQHz/QUYNX1iHO4FCAWEIOBgCgGMdEAMAU4VOAO81MCG7gcQogiUItmAOJBHtDvaKK4bhXnjCgYIpUEaYoii8mUA8TcXo7hxOqMormaGIR8qgwDGPtGNaMF7jVo0tJSQQqlOKQBsS4ZEjCWRLVveBgBiD0Bqty1pBnlGWLwusWED0ztEx4K0awCCuIkBSGVRRd0ScER1JLMD2z6AuI8AIzSuVxoX5+Pyj0ZEDd0muLaMkk2O9JYxk7Y8Yt4lMgQ2Kn9kumy0cZpQ/4SQtAVzV6mPhACllAIamQ4c7NOuasooiakblvBF8RT5b5wCSvJ+uQuAxbRQKvCUsFRRaBKQMAwIJZCbugwKAiaEQMhkEe8f7PX73bevfqzWWgf7u6osuY59fXU1t+aUsvFwLMuKKIhhSCRFBhA5njsaTwRR2Nvd/V/+/V8/fPheq9V+/4PHn372Z/lsqVgoj0aT6XQ+HE1evXr1xW++ma8XGTPreoEoKYIotdp7xw8fYQgHw2EYhn/2i5/LktRoNZr1mrVYHO3vYiwu17aqakgQKvXW9999++jByXJpjYbDMPBLpYJpGIeHh5RQ0zCKhYLtOBBhVVclWZ7P52vbzWRyz1+dZrPZV6/fzi2r1Wovl6uFZVnz+WLW+c//5b998/U3JyfH3V4fAnB2dgoYy+WLIsatVuPTn/6EUXp9c1OrNQaDAcKIUqoZhqYpoigWi4WrmytRlDzHaTQbo9H4k48+CoJgMByZhvHP//xP1UoVMEpC4nleEIarxVIQ8NKam4YuS6LruWEQLKyFruu27ZTKpUKhYNs2IVQSJd6xM1/ICVgoFoqWtXRddzJbNhqN5y9elkulq6ury8vzjz/9xPUc3TS7vYHrOeu1G/hBo7m7t7f/9NkzBKEoCqv1ulQsfPTRR5qmLFdWEPqL5ao/HM1m1vHJ8f7hITe8TcMQRKHX7yqSSilxPefZs6fX19eOYxcLeV3XO53OaDTSNe3k/v1yuTKfz3d3dsMwGA8HEoaqJBi6IiIGAJBkJVtvGZlcJlex/eDpixeL9ZJF8IYQz/eELOnoi5KAn5iEQREhv4nZTyJ8pEjvFxPyh3+4S2AT4x+nBQgoIotijgdyMZHEoKIo/Ie3rd14IRKbgLIYsOK3HcbVEPjbmmBd8tJjjAGKSoyy1E8YI2D0Jm8sgCRAAyaIBGGia7MYpLbwCr7j+o6cwJsJbEN8PIIE67aYnA2fvi0AIGfF36F0EqwHEW+OkjGBNNAne+IkYWHLrondy5xroZuV3fjTNyJkg/Hx2FIAG+vdKVIrgupISG/mlUw8dfO4BbeZPgVxaepEI6cboAcUAMIYpYBSBlgSUMSDhZKdt2L8kw0AIoyOYRowwOgGqVk8iOQMXGJxNokSkAA6jQo+h0HgkzBgsW8gvtDGNcLXloYkDANKCWWMEEIJYQxAAF3HrlbLR0f7i/ksCELTMJqNmiwJnusMBoMwCLkMvLw4Gw/7AAsBgQxjCJGsqIZujKczx/GDIJRFoXN1/Ztf/+4f/+Zvv/rtD+OFFYZ0bq2WK4cIwFr4siKt3EBRNU1TbdseDvu6YZRKRYDQeNTP53OVWu3+vXuAwdtOx8zmHcc7unev3x+0260gDEUBt1pNWZaO7x+fnr7RdcNzvEqlcnt722q1e72eba+zmbw1X+QLRc/zBVG0Fov5YkUBbDbbqqbpmnr69i2EQFON3d3d0Xiw2252OjdB4A8GQ0IZQoiEYalcenhy8q//+lvAoGlk16uVtVjUG41s1iSUUAam85llzSmFlFJZQYNB789/8ee+53///Q+KolycnQe+TwhBCKuqdnl5WSwUJElcWHNVlY8OD7/6w+932jvlcklRlFKpqMrK3FqUyuXVammaBsZwNp9/99237Z39drv9D//4j83mXqXRnE5n3dvOTadrmub944e94UjTVQaRaZr5YhEhqVBs1WvV5y9el4pFjnuSLGFR6HVvVqtFqVxotXZ8PzAzmYyZefb8+Wg4bLba9XpVliRKw1q5bFnLwPcMQ6/Xq61203ac6+vrhbWo1evlUmk6nc7n1mK1Pjw60nXt/r2jZq02nQyd9XI46LuOLasqlhUPS5lM0ciVCEC3g8HMmrq+zRiBLOJJIIqzMgHEAIAovzfC6CSjizt1BSxIgoAFLEsS53NkUcIISaKEETcCxIT2EQVRFLCIub9ZxELkB8A42hYEESMBQsS5IAR4s/FY5myqzPGM1Kh8JFe5uISI1M8IKjdAt1ElWSwo/ghgbpAcbGpPbByMd/Rmfi4IE4cEz0RN6hFFiMp/xfvNemo0UfWGRHeOvkkJkC0cjIbC7nyVnte744sFQGIHbWZ7ZyMd4pr8TCQYRyu+IyGEH8UpcgA3CnhCtyUOkSSmk58zrncarRxX4JOKz5tx8ipOvJB3empbNlIkcQiNL82i6vyMxdIKgIj/ZzQB/XcFAE39uvk7iGL94/AgkkgIGiv+yURi/20kJjbpX5QmXt/Ex8sFAACA54pRSnmEEKWUBpSEASesKKO8hhKjVNOU+8f37h8d3t52As85ODzI5zO+77muPRoOe72u5wWaZhiZQqO9X62UEMauH3ohGY4np2fn3ZvO6atnz77/6vL8ShDl3d3D9z/+VNCVYecCiZphZjOZnO8FlAV7e/uEEsqY6zlHR/cfvfeeaRoCRgzQ8Wg0m06f/vAdg+Jf/9Vfvb24aDZb1mI1mUw1TZvP5u12azafDweDSqWUzWQcxwEA9Hr9bDanKvJsPisWiwDBbC73+9//7k9+/uf9wUAQZcfzB4NxvlCQFG06nYmiOB6N1suFoqiqIqmq3Gw2crnsd998Z69tQujSWgJGi4W8aerffvelpuUgxL1ez3Oc1k47l8sCyJbLpSiKb9+8kRU9DINKpWTo2nvvPere9r/68oter2ev177vdzudVrM1n81lWYzbzU9r1cpsOnVdFzCaz+fL5TJ/l1aL5aNHD0bjIaFhuVSaWXNdN9rtnW53cH52dXBwj0C4XK2//+57URDK5XK5Wv397/6F+7QFSQQITiarxx88ns8XlFLXdbGAHceGCLqeK8mSmTUIYa4fCKIUBMQPgsAPQ0KWljWfzaylJQo49P1iodRs1AURmxlTV9VcLttoNgCAr1++opRKkqRqhqoZf/+rXw2GQ0BIvVp+79FDVRYFQRhPJnom64V05RPCULnewqL85OWL86sziCAFIQQAR+w6i4AWQIijevFR8E3cUBxjJIlSlOgrCgJGXLsXBZHbAZKABQGJQkIWRYaCEMf7CLzBNwQQRgUhePANf7nxxgLAMRRFhaCj5F8GIIDcpE8QL4EC3pck4gp4lDVnOeJiomzj4EQb+E4x5CCNsX9MX9/AUgzQ7yJwIgAghPig3UzQkIsgDNP8z1bMT/raCd7DVCWjjfja1GiDnJjasiNQZAowsOXpTk8Mxj3fkm8Rinu8pbo/xwOgkQcgVvUTxfmdUW8HxcPU4nKnLtyIJRZXc2aRIzd2HW9kLH8wYr9rVHEBRFWcUyAeMesscu2GJIQQxuo/SDJ1QazCA8oAoZy950o+4M2zKYvdB5Rr5Qzwv8WSI97gZ07MH74gKQoosgZoiv+Bic1KaJQhTCiMGCzCGTae9yIJkh+469Vy92DX992Ls7f1ek2RJUqDy/OLMPAlUcaCAKAAIb66vnr99nW333Ndz3Ecaz6nlCH0/7P1ns2SZNe12DFpy3t369b1pt1t3z0zPQaYwQAEBgRAvscQpaBCEhVB/QiFvkgRCumDpAjFU4REfZGeFCIf4z2QhCFBADMYjHc97e/t7utNee+y0p1z9OFkZmX1oGLQqFuVlZmVVbX2PmuvvTYWRTkQiGIstpr1nZ3d/nB4/sKFS1evUwZrlUo0Hp1bWFhZP5efKyiyEgiomVxe07T79+8dHR5QYkaj4Xg8Mb+wcG7z/OOHD4ZD7Z2f/Khcrl7a2vrs80+bzdZwNLx186aAUa8/0DQtl83+0y9/ls7kl5aW9vb2FhYWu51evjBXrzcxFkeTyfmLlyuVWjSRODg4BhCn0plSaWEwGEqS9HznqTYZD9q1t77zR7oxDocC4VDklz//6c1bd/b29m3bTiYTiiwVi4Vy5TQYjNWqLcaIbVnhWBhjSKi9tLxEGXj09QNBVQf94frq4urKEmXs+bPntVptNNKoTbudnjYarq9vfvX5Z7lC3rKs07Nyt9OORyPBQGA4Gs2X5geDAUSo0W4tLC7GYzHTNANBlTcrEUopBYlkenfvSA3Fn2w/zxfzO8/2dh7fvbR1rbRQevrseSyR0SaTiWmWSgvVeu2VV96o1hpHJyeapiGMa7U6gODo6PD4cN+0yMH+Xq83PDg8nUxM07QkSRawaFus0+3Ozc1l05lcJhMOBm1CG826KGDTMCACkixHI9GFhYX1zfVkImma5lCbJJPJixcv5rIZZpt7u8/2dp/H4/FYMrG4tCwFQrrNLIhlJTA3v6jb1qeffzrURhgjCKjTbOUjlr2MlP9CMUS8jQsjJGJBxKKAEcaCJAiiKIiCIAkiXwdIoihg5NJBomcB5NWEPQpIwJKABIQQl3tit/UXIwGwGekRdAYGQFdkCZCbIEInowaMUjc3dSKEH8UBAy535PQKuMDiA1X3Dwgcv+spCM6SE/xf7EykYVN/HQa88i30FYoF7zpOdwSdLV0IpW6ZlrmV7Wn7BXT7jJmjYHH+nF4F6EmEfF7P0LFP8OXyL74Tzua4mlbkXQSOeH4+yj00ZcRhlfjKyxchoPuGOfvinDB1d+Gm0Wz2KA7TBH0rBsYYITYCGCBgE9s9OmSQYYTcwEABhIhBSAknJ6nnGwEhpRQhBBACtvvRMMaDHXJbSwAAGPqIuun5O3f4rmxme5Ec+JZ1wF0q8RjgHWVK7LgfLZwyPhRCSKnTzu4GRYQQYIwiBBFAgPAeZkgJs4GdSqdvXr8SUAPRSJQwQBizCY3FE4lkcjDoDUdDSsBwOB70R7ZNEJYQFkaaJivANC1CDAiwoihEIMSyJFlSg4Fmq/Hf/R//12sbi3fu3PneD/5IVuRy+YwxKsniaDTYfvqk3e3HolHI7GAwOByN20+2bWIPB50f/PDPL166sv348YVr19KZnGnZb3zr23vPn/31//Y/xeKJO7dv1Krlvf3DleWl+dJyMBAaDscYoUar1Rv048kk29szbFKrNU2LBEIhQliv27ty4xaCqF5vIIyHg6FlWYxQAABEeDweiCIea6NesyVJkmHoqhrIZDKWbYy1sSjJgaCiaRohJhbQyfGRZRcymQyAsFgs5Etzg7EmiSCVSkwmk7Ozcqk0/8UXXxFCqEUQgppmVCoVw6CSJFFi2YSYptFqd2xCCoU5NRCs1moIC+Fw1DJtRQ082X68dfnS519+ubGx3u0PAIATwzBNe/vJTqPV/ZM/+/Hjx9trm1dW1taGI+3o6MS0dMrYO+/8kAH45re/c3h0tn+0q8iKEJBVVcpmU4Y+SSTSnW6nXjktLSz1ez3TZJ9/erdd7V975XIhnwuGwslE4ujg5NnT54FgMJ/LXbh4KaAqg9Ho6dOnpmEYhhmKRkVRWVndKBTm1lZXe4ORYVrls5N2vZbPpjbPbRDLBIwyRs9qtWq9HolE8tlEMBQJRyPjVjsSi9ZaNWJbSMDepCno0gZOOZQxPneJf+0FhCECGCNREPiyQBQEjJEj8eGtYBDJkuh+sV0qHEFPyAdd9sbBB+KB2zSRghABYFPKIIKUUp7EMYgQQgwwTDFjTJQkZpqMMcwwRlTAAgAEIAiQxZfYgAEEAHUzZ0a99YPjn8w1IJAnyb7snp8MdAkUV6E4pZWYn78BzlBJ5k7YncIa5W7/vhWAH+MocAwmHR4ETLPdmVAze2ZcDwNdkRaY5e6B/xBTdmdmRTMDc2w6BNEJW8jbfjZi8ajFIy5y12veDAB3M+RYsDloTp1YDKdoOC3L+N/rzHv0zvWFDdwaguMvRHyuDLzFgHt2+vl96svZGXO6i30PctqDeem5l9FPM33gWDsQH7/DGON0EvBrinwHBdM2RQAAoNSmlCBvBA+AwBGt8tcSStwvPwMIYcAAYNC27cGg12g2DNPIZlK1RgNBABihtoUgkETx8PCIEIpEuT8YmjZfTTAoCJZtm6YNIOC/TYQQBMAyDQKhJEuvX7/0o5/8CGJUrpQVVQkFA3fv3//y7t1QKBCNxbPZfDCoyrI8mWjZbB6L0kS3dZ3oFvvW669tbJ5/drC/tLRUr9VGw0Gn3X77e+/83d/8n9FYKp/LNep1LAgBVR2NhoqiAAAEATXqzWwuywCqVuumZb/8yquVarXaaKSzWUEUB/1Bq9UOyEq72axUqwgLihLQDWvr4nqn2201W0gKhMPhZzs7y6vr0Ujo9PS4tLBgGrpuGOXjSraQT6YS7XYTItTr9SLR6Orq6uH+UbtZj6WSf/LjHwCELItFo7EvPv10NNZtmzBKiW2HQkGMcbaQOTo6CgbU8XgEGByPhqIkjkfjaCw2Go8BhBALumGMxuNmq61NtFgiEQ7Htnd2TJMdHZcP9vfefOt7G5urEMJ0Oj3WJnv7ewihhcXFP/7Rj6KxOADwgw8++vyLzwij3U4zkYhn0umj45Nnu/uDwUhV5GAoenR4Eo8nsSCGo7Fbr9zWtMlE04yJvrd/NJnolmkFVPXo6KRSqamKkk6mzm1u5guFXDZr23av2/v7//AfPnj/d7Vao9vvLS8tlYpzELKz46Pd509b7TYAYDAah6OxiW6OJ0a10ZBUNVsoHJ0cCxLOFfKWbQ+GA4ydnGlaugRT1zWMsePt7Kv6TjWdApYlWcKCgLFTChYEhPFUBopEx+8BY8Fx/hExFri7sEPxYMHNwQQPhTgSeesDyFkg5k4PBIBDPHPth/mP0m3soV49wEM3xMOM593GqXKucQfTobz8uA54zlYoocdhIGdFwSdT8kUA4iyTC6o+CqhYmGK0B2eOsyY/2BQoX9iSMx/Qm3vuy1VfiBMATZ+Czuxe5A8A3qfrB1Z/Esy3Au5qwL3xs8S8rQw6CrEZ3PfvgV9+hDFzOjX+AFPEW4vdNRakDPDhZQ6oQkgBIK6P5nQPHF4dhn6qz6GUe+44aOr8y5j9TSrG5eKnHI6r7SH8X0opYIQ5f1rEBrNlAseXws8F+aKFP/0HwC0muF0G7uWa0noQciaL8k0ggJRR4DRMO6MUxtqIARqPRdKplKoq4/HI0LVetxMOh4PBUKVaYQxoumlaJqEAIExsxijjPz/bNikxMEbZuWKhUMjkUql0CiJ4797dbr9XKZ892dk2TP3tt9+6fu2Kqqr9waDebKmBQK8/kCQlFIqcO39+fWPj1q2XREnsj0bZXG53f98yzKWlBdPQ2+1WcS4fi6UGg14kEjFNs9vtr66t1Os1WVZD4WCn05noGiFUFOVuryfJgV6/r6iqKIqiKAlIYAyUyxXLME5PT0ejUTqdYQDu3Pvy4tWLtkVHo/HS8oooiR/85uPF9cXJeHx6svvGm28LAt7dPzJMKx6PZzLpycQwLXs0HgMG44k4IaTZbMwV8rl8Rpaka9ev1erNe3fv2ZRSSizD5F/R115/XRSE49PjxcVF3TDb7ZYoy4cH+2owZFtWJBqvVmuaNgmFgmNNOzs7u7h15fSstrKy0umM1ED40eNH8VThnR++Q4ghy0qlUg6FI9lsJhqLXbx4CUJMKfv4o0/K5TKlTJHVRCKeSCYOj09N045Ho4TY3e5Am1iJePr4tGYYE21iPbj3pa7rtUaTQhyPxZ4/2/nqo4dKUD5/4UKv19/f3fv9794ba3o8Fo/HotlcduvixTfe+Na58+eJZX/y0Ye/+uU/yJKcz6VLC3OZdCYQCHDHunqjqZsWFiRZkS9cvNhoNR49eXR4dDgY9rO5XDQaHo1GjNLp93Ka6zlFWOx2eImiIIkCp3okUZQEUeKIjxBCSBRFURQxxFwCipEoIMEZCAB5sUDASESuSYRbXPBrZhxJumMfCj1oK8kRAAAgAElEQVSkBcA32dGPKtCBBQe1qUfFTjNJ6CczoJ/udzJsl5XhebMHmGDKAU2396GcC5XIT2lQn7Mkv+Hl0txsl8KM+IcBAKcG0TOoys9v6mA3eyoAAK9KA12ZjXMVEQQusQPcDxL4lgvOO0QIIMj3j6ZuzNDPfQGfeMjZldOQPXNF3N1CfuVmKTjfBq6Hp7vac/WcPAxyLHSFVg6tNXvRgIemvsfdm/OCqQh0SsU4Wbsfqdk3OH1vHeBD86mNhPs1ZDMHdRke8I3VAGWEUMJtQr1LgabGhMCzNafEuTyUUMoYIYAxJoqiKImKLH37299aXV21iaXIsiJJ1fJpr9uBAEiyZBhmq9PpDYemaU10S5ZVgBDGgqaNBq2JEoRXrt9eXFyittUfjXeeP9e00craCmBAUZVoLBaPxwAEn3368cHRaSwa3Tx34fisZllWOp2hBIzGWrc3ZAyqwZBN6C9//jdXr78UCoZEUSjO5SGgtWolm07qE/3srHLh/LlmozXoDcbaaHPz3P/+3/+b7//p98vl03g8zqC4vr52fHJamCtSxpKptGkaum6IgtBsNitnZQhRt9OxbDrR9WQqFUvmImFlNNIkRY5H44Ph4Bef3gvL0DT1Qb+9uLSqyMru7l4um3u6+zwUjhDGmq1mJBqVZRkwNj9fnEw0Qo3i/LxNyOXLV//5X34rCGK70+12uoFQQJZECKzbr9yRFKk/GGSzGdu2Y7FYrz+4eOkSry3quqHpukXsRDLV6fYi0RhlLJvN9/rD09OyRehXn7/3Z//xfzbWxpPx8Mn2tmHoa6urCKHFhUXbJrFY7PPPv/z4ww8ohUuLpXAokE4m+r3ByfEJsa2D3Z1+b8igEFCV1dWlRDwcCAckEcUTSUbs8n7zUaUVU+CPf/Inl29s3b9/7+7drw739vcPj20CDnafHuzv33/48Mmjx9pYCwWCoWBgfWXlJz9658aNGxixpzvb1UrVIkSUpEgkIkoSZQAwOB6Ndnb3bty8btmmbk6q9Wqz1axUy51OhwMtpcQzXgaMS9+deV6CK/sREBJ5ERhhAQuiOKWARM77Y8f0wRF0YkFAoq8SjDHGELpDARBy6X3ogQ9yTgJidwow13u4KSMALkw7/zJXre2mhG6xcGoA782Gd37IbmXUedBTNsKZwrLLqswAHZ2S1X9gSxdsefR0aqV4pVTkMPFCGHDPgHnO2my2zxhCr0ILZjAPuMmze2OMAX+C71wd/yZTEH8Bl33nzXHfbbmYseB37zuzm5F/PzM7d3Nc5lvNuGfoBc8Xb9BHEwFXU8TY9CP0vcoBZjAr4+HUC8/tCaOEEkangO4T41BCCKHcH8j7hzkOzt6iwX35NxcQzBczKCGE61LZdLVh2zallFCLuk0GYDZ+8Dg1rR0BbmDKix984QoZA7ZNKSELiwur68uMAVmSx+ORLAu5TFKWpMGgF41EJ7oxnhiiJLU7XUWJGKYpCiKDeGll9cr1i6urG8+ePt1++gRLMqX2+vrahXMbsqxsnr8gK8rHH3/6q/e+CCjCWfnE0I2d7ceBUPQ73/2eKMnd/qBeb6jB2JdffNUZadF4NBQObVy89tHHH7791tvPn25PJuPFhfnRcNjv9zY31n7/+/dsm7z66uutTvvjD3/+rTe/NyaDfD7farUuX7nyePv5rVs3qvV6Ll+4euXas+fPa9VKtVpLZ9Knx6eNehMh1OsPdV3vdTuyopRKC4Nh//atW6oaFESRAhAQbMsyRqNRIBT76Ne/+/O/+E/ff+83aijQbFY2Ni8OBoNapaaowVAodHJ8aNtWJBa+cf1ao9np9frJVPr/+bf/NpXJNpvtiTYmlMiqrKpKIpFIpVKlhZIgiP1+H0IgK+r58+d1XacMCKIkSrJNqChLz57tRqIxCPDxydnzZ/sWob/+5d//q//kv7p67covfvELCFi5Ug2HI2urK5qmnZ2dzc0Vf/vb9371y5+ubWwtLS2fnZxMtMnh0fF4PFpbXcnnssX5UiQaFgVAiLm3/1Q3jXAomEjEr9+4fvna1VuvXHvj1uWxrv+v//dPkTW+dOmSKEqAyZZh6JpGCJ1MNEbJRNOq5bMHX999trPzwe/ff/jgYSwSTsRjW5cuMcBC4bAoSRChSDSmT4x6o/b40cPXv/Wtjc31Z7tPT06OkACj8SjCAv/+M0DxN4xxeJMXFgSRK/qdDi8suvSOJIoYO4k/7wFGGIuC7GsBFrEgcOaH+/1wSglBV/EPEXLknl4LAK+zQZ7HQUeOxClmCPhkR68WCgAAyJfnATfbc8aj+AdkUR/yQJer8Z51lz0MzML6TPLnBip/AOAPYuz4KCOIfKVgCCHEKwtF75B+CPbwzOuXegGdHSxkfm5qCtloeioQOiPYp6/1ljXAzeGdZ2dZ+6lVBIIIYq8rGM4okyDi48AYYBAg/A0KyJ/mQwiQ48LGI4qXQTtqz1k5k/enF1oBt5Fwa0K8Rdr3ATtDWiBx2Xk3p3dauBxTfsITB1+EANPUnnK1ECHc4plyWwlAGOEZAWWMMGoTGzhBYkrv0FnPOHv67Ay/RKg9/V5SNuXfuMLBNZlySyaYMUYpsyyLUkCJo5GyLbNcLS8uL6bTScAYJRaxDNvUKbEgY91uL5fNGhYxTWs4HJ0e9qJRZWVto1jIBtRAr9+VlEA0ES3kMgul+WQyHo1E+8Px0dGxadkQ4Uxu7um9xwtLpVQqywALqCqFaO/gCAJw6eKWrIQVRTl/YUsSxa/vfX39+rW19RUGgDUx1tfXKuVTTRtvXTz/1VefK4rCh0nZNo3E493uMJVM2sQOBALNVmthYeHevQeFQl5VAxDhTC5/7/69WDTS6XRURa1Vq0cHB7wRrNvtUGKnCgtBVS3O5eKJhK5PypWKbhqdTns8HloWQVgAcHLt1p333/tZrriQSeeTyVT5rNrt9uLxOGDM1CfxeDQWDRNiqqFoKp0cjSa1ehMw8MmXj2IhBSEoS4IkyYosJdMpXTearfbK0lK92bh44Xy305FlpdPpRGOxnWfPAsEglsSDvf1YLGHb9OS0nEykTk/Prt585e2333z6/PlHn7zHCFBk9V/96Y93dp72+/0bN240m61PPvnw9suvS5KkT8yArJ4cHGEslkoLCIuDQR9hFAwEFFVNJBI2oaPxQFUU3TARhK12lwGGMCyV5rPxQKfbVFQFYRRRo7FYYtDrYMywKFqWMRr1FTVIKdPGWjSW6HWbv/j3f713VDUsezAYGpYlShJGgmURRZXW19fSmYwoq+sba4PRYPvpE8PU+8OhZdmEUMhbXbGvhxYAjKCr3kGC6/czRX/E7yCEEff5hwg6Vg5Q9NwfMBa8YTAQYkfrgwSOJxAihIHL8gAPWwEnEZDTjMancPMeXc8xBTrVV8B1gU4K6FbgmBsHnNmB7srA47aBbw0B3aWAA7jTrHeGdJpNZ6n/EWdjXn9gAMCpggZCgFdKBejqkhjPFznvwRxdoIO40wIsB2y3a4zjuUPGeKwMnwIGKPfOgVMfVB6FuH0EggAJiJvC+cEauoNZIHRMKJArAXAWgW6dxaWqmUPU8zWAywshhCi3wMCIIX4OjM8y8Ic6/saYm9L7V0Je3GbQ0ek7WiYGeR2DW2gzwGsK3NhtKpl0/fqJg73E5nVdRikD0JV08qSdENsm1Kac7WcEMAYJYzYBlEHKqE0ApYBSQBgjlNmEUUYc+SlkjLeY8R1N23wpIYxRSmxi284yjdqMEq8hAgBAAR+L4Xpf8doYxLz3kroCVNsilk1sm9jENk1DFLEgCJCB8WAoCCgSDTJGhsM+xgBAoATkiTGhxA4osiJLAMILV7fWz21SyzJNSzf00WikaZqujSfaeDQePXu+Vz0+0IdDSsxgOBKJRhKp+OtvvdbqdsKR2MnJ2YVLV03bni8WYrH4eDwWRDRfKkZjodJ8oVDI12rVTrP13bfeerrz8NyF5aOTPUaorsNEvPj5J1/G4mFBIAIG6USqUqlPdD2fn+v3+5FItFarTwx9MBxcunRF1/W93f1sIhWQxHarYZoaZeDR43sr65fqzXar2TF069ylVQDMSxdWur1OOp3/4rO7m+vnLcu+eHHr43d/b5ij/NxCIBB5ePfr9Y1zo9Ho9su3H+886g6HkiLXa1WIhPnS/Hg4TCTiEJEL587fv/91NpXpdjt7+ydzuaRlGoZpipJomPqtl26apl6vluPxiIBQIZcnti1LUjgaPj45and6N69fe3jvSTKViUQjh0dH3W5r6+pVAmBpYcEm8Jf/9OtQMHX16sb3v/+9L7/6qtcbvvH6t8fjyUcffnL92u3l5ZV+vx8IhvaP985tXWYACKIgYIHYpNVojYajdCIxXyiU5uYYAxTQavlod/s+BIJlmP3eIKAGX37p5bWVtUajGYvGM9lsPBF75dU7mUxWwkI2k03E4giCYa8z0YaGMQrK4tblG61apVE+mQx6n330oT7uxyNBDCmlZKxpWMCLa0uiJBqG2Wl1JrrBCMWIa2AYFjAnYR30h9AVayIRYkkQBIRFQZQEUcBIkgRRxBgjSeSPyxgIsqhISMJQhHwuJIRuAQC6NnCOfygCEAMoYEFEmCONgAQMMAIYcwdQgCAQIMCch2CQkxnc2xEiwK1vICWMMcCX8zy1cg2CAaUUMQgYpIQBAB3OByHgSyUhhN6awKkfQL7g4DJ1wNxeJfd5gCD3j3Oof+rwOoDrfyglbtLvVAW4KGY6D8ArYziA6AQe6FBWM3OpGABO6OPHc096yo7PJPWMYa+i7TJaDqUDXWUpfMH2zVvH+KMi8N+mID5D9LsnwC+Kq6KabuUGKc+MiXGLHwogYggijyv3H4nrv/iDlFDo9ChwgS9FCFHIMET8b190oe4J8QcRgoQSAUIIEfVm0kAEMZiWQ/jNYjZCSMACILafyuJLUwAhgogR4HsYIgCROxfZOwdB8I98YBBSCJEkSYiPNgMAT2eocQ0DQAhx5zle/uVKIMIlSYzalDDGbNvmLZTBUCgSiQDAGKWKoujjnqKqBrNMw+x1O6FAeHFxvlCYe/z87Nf//A8QiuFQGGLBtslSqTgY9PKFfLPRGA3by8WVbr8fjydOTk+iqdSg3ljb3Lx58ybCQiwWX1hYMC1TDSqqGhRlqdPpPXr8eGlpMZvJxOPxo6OjbCb9L7/61VvfeaNWq12/duOLz+92O8/ObV7K5rKHx89CQaE4N28Yk1Ao0Ot0QsHg6enpuXObrVYrHA5pk8lgMKCE/pv/+b/9r/+b//HZ7j5GKJ8vPNv9kFKgqCplwCY2lsCw21pfW5mfn6vVW/1eV9MJhIAwEI/F5hbj3a6GAAtHwhDCdqsTTyYwxnvb25uXbwwGA9M0DdEY9Pu6NozEI+/86PuKHBgNhhDqxLIurBUz6fTXX9UECUaWFuPxRLl8tlAqtZqRfn8QjcZ0fSIIQrPVSqXS5bNyKpWaGEYoFDIsvdfrQQgCoehoNIrHYrV6Q5ZDyWTi5o2b8ajwxZdfGrr9p3/y42q1Nh6PS6X55eXl9977TTAULp+dXDh/cTjWiqX5ZCJxenpaqTZ0bSTL8pPtnYluiKLw7W99S1alTqdn2aBSrd2/+0WzVv/sk49CqvT2D/5069LWSDP+7m/+9uS0lQ2BrVu3E5Fwo9Hod1qML1kRzWbTGKNKuZzP5SBE+UKeUiuiyIN+NxwNR+PJoTZpdzumbqiKGotG4/FEpVYFDNi2RQgBCDBK3R+uIzJ0EhaIEHLm9DqNAAgiiBFCXNmDHC2/k+IgAIHAB/xCQcD8J4BcEb/jxYk8FKNcJQ8At+GnjAEGCHDJEgghABAzAKENIaSUYgwAAIxSDAHGmDGGMRUIJlz8bdsYIcIYQohS6jE7zCGmp4jjT96ZLwyA2Qgxg4ZTFsMHngB4rmzQX6x1UYMxhlcXii/sC3rlRG9ZATwnBeCVYoGPqPJw0g/ZcPpy4G3nPxR8UV46fcn0b45t/EGf9fXMm3cEoM5HgiCeLiVm5FNuNwEE3mkzn2x2eo19R2fu+svb0l3B8Q/PoQK9MMNmbnT2z2mNmALHcsfNCmaKvZzv54QPz+SZWxHi/I9bIaAucTQtNRA2rQRw70//nnkLBc963HDijbXj/h1uVxtv/OXrEZtatk0ooZRPLACUUP41mJ8vLi6WAkFFG42MiWbZOrFNyKiAcKVc1nWjVqk1m03dpLF4EguSIArhYHiukAuEgpo2EUQpEAx02q1gMBxLJJLZbK1RC0fj8VSSMFYqLQAAty5t5bK5ZCppMyIIkq7r88ViYW6u1+092d5+5ZWXP/zww9FwBAEQRKTISr3ezOcKv/nNuwKG8/P5p0+35xcWGAUCFk3TVoMBQRCGw8HKyvLZWSUYDi8tLRYKxX5/8Ntfvfdn/9GfV8tnlLHVtbV333t3NBznCwtn5cpkPBIwiieC3/ve98KhQEANGaY9MayNteXHOztLC8VQOPr73z+6eetKaWHx+Ph0rJtXr17WDf3h4wfF+QVK6dnRySuv3gGAJBPxhcXSyvp6o9E8OjwKhSPVaj0YDOZyucO9vZEGzp1bWlxaCATUuWJR1yeMsl6vryhqtVpVVXXvYF9Vg9lCfn6+9Plnn2QyuUQiGYlG+/3+RDdbzdba+qZtm8W5+XA4VG9UBEl8550fMAAq1erpaSUSizWbjeOT42BAFTCOp1L5fDYej/f7/VqtHgioWBAnE+3s9ARC8OjevXqjMRxrjXZfwDiViMZjieJ8KRZPJTMFw7LK5TJEaOv8uXgsfHZ0og27w+F4eaEkyUrleJ/rEBVVGQ9GqiIjhERBKM7PmZY1HI9kRQ7HooZpDUaDs7OzRDqDAHj+fLc/6FFCTcukPENGfISh88PllI3gOT9gx7ffVXYiLuvk5g1c/IMgFgQ+1gsjh/EXvO+/MyDA6+yFCPk84GdVQDw+TPEHOl1QUzDh/9qu05f7o3O1G5QB6qRQgLmW0RDwZYF/vhNkM45qDsZ7Ga8fJWdRcIpAwDOXA76+LOBVHPhzeGVhzkNb5ka2adiZvjm3vuxVUL0uBeDoNZlXFOChx90FA2A2n/ZScZ+wyUXlF2KH/x7wX6CZt89x3xWJAuSSRU4gdF/iTFx7oWLNH6DuAo34sniH0fNdEODm8wAABqGj+wRuZZ9SRj3GhxM8DlrzYi0Hc2ITCjxXTkIp4xO4vFKtVxswTXNK3HtFYNvdBjC3fuzGDWrzXJ36go//td7D0CkNeW4qAALAIwfwm9ZRajJicecItxqBMRYlQRJFRhmALJmIBlVZGw+JZQoIEMvQtfFw2E+nUgFV7Xf7o9FoMNFDoYAsK/3BQJvokqL0+sNEIimKIqFg6/JVioTltbVUJruycR5LsiBJ7U63tFAKhYK6YSwsLIQi4VwhHwwH54rFdrfT7fX6/W6pVPrk44//4i/+onJ2UqmUn+48EgRZG5kIwXLl7Nn2vavXrmEsQIaHg5EkixgL45H26MkjWZYi0Uh/OFIlqd3uplJpw9DTubmV5ZXdZzuhSLxYLP3iH/82FMuPhuPhYGiZhqnZL9+59eZbb1bKJycnp71+X5HlZrOhT7RXXnmp3elsbW1kMqnReGxapF6rGqZh23Y0mS7k84qi7j3e3biwVqmcFAppSRYAxH/3d/8eY1GbTMKR2OPHj0fD4cXLV+7tHZVyCUpsSVXD4TCEWDfMSDg8HI5+9+67axubjEGLEEUObG8/jkbCoiSZph0Mhk5Oqgf7hwwKN2/dOj09kWTplz/7+6vXr1/eukwZOzo6/dWv/2VzYx0LwtHx0cJiab5U3Dx/bnVtVZKVYCi8u7e/vLJanC/96qc/HRmDm7dfmS8tWMReP7cRCAZNc3J2erL95MlkYpy/cLFUKimKQggZjcYH+/vlg+fzc4Xi/FwgGBAxOni2nc3mM5lcpXzGGMQI2pZdr9ZNy2g06qZlcp4UIhSJxcKRCMbYsuzlpZVAMDAY9O9+dbfRqIuiyKtcvBIFGeBWENgp1GKEuYLHZf393s6OrAcjAblzH91eX1HEGIui6A2GcXEfO0VfiBDkkyP5dEi/Kc5M6und97u8AdfoxfvpOXmgT3jCKXbC8ymXHJruk/kHbrEZl4FZqST0nQl/EPlM1QDn7V3Qh5BPLYPOUgACAAClFK8uFv1vZnrH5WqcFNhXcphWRL2L4rs0kKfhvGrPAXl6ku4l8yLNVK4EPVWoGwl8gRchCJE/AHDc9gdn54jQOax7jdyN3c+Ka6w8/RU/nKvRhK7a3X2nLvPlX3Y59xEPbB7MOvenlpw8rWfU8el08mknsQZudQhw2P1D/VxeEuEuC6ZP8XPw238Sp8FgJnI70O6+BepTfPKfA/+knWjuPwqZWoZahFBCLWJbxKZujOSyXFEUZVWORUKSiIll2pZpWTpgRNe0Yb9vWZZlGPlczjDsk3J5MpnYhDAG+v1+MBgcjca5QiEai330wbulpfVAMBhPpWOJxNLyysraar5QgFh4vvu8ODc3HAw//fTjdC4bSyZkWZIkKR6Ph0KBTDr98OGDyWTS73Zu37xVrlTCkZCqhO5+eW8wHMwXM/1Bs16r3rzx0s/+4WfJVGY07kmSnIgnd58/XV1dG/R7tm0tLS4eHh4HgqFIJBwKhURRevL4AUQChXB//1RRQrquj4bD9c3N4nzOtu31leV2p6lpk3A4ahOQTMS3t7ffevPNnWe7hXx2/+AQC0gUpFq1vry6WqnWUplkNBqplMsbFzaGg34kGnn1zm3btrGgfP7FV6PhEDAYDIX6/cFI0wq53Nnzg3BMiYZCrXbzu999u9Fo2pbd7fTarVYqlen1B8lUajAcff7l59FI+Pzmhklop90NBcMUwGcP7r71/T9OxGOnZ8e/ffdfbr/yUi6dDgTUu3fv/8Pf//TOnTsAwH6/Vyjkq9XquXObxWJhohsPHz364vPPL13ayuVy9UZzaXNza+s6AGx3f98iliAKhweH3U63WqsiQQIQl6uNDz76uFIuj4ajerUaiUaN8ej08BAwFlRVRmwIUaNWQYIwPzc3Go4QRLZtC6LAACPEDkcjgiTo+iSVzciKks3lK7VqLBaPRGKGYR4dHzUaDQapbVnuEpk6KhDkNP062n+IHKh3prqL2GnxEhwuCGOMBFGQsEsTYSxChCEEzkx4CPj0MM/J37X39Dvmz2aj30i7OXAw5xfm4wDcpJDXVLmaiVJGiNeo4/boMEoZgxABzrTPqg19P+dpY6wfjvy4xFw7ePdUHWz2cj7o4a17B68tzr/wdqBLATH/sgA57V6Mea5yzvKH+bU0f+jKMMY8NPefvRcA3EvszBvgkMQHvEGnpAww9igc4MWcF07bPWXkcTh8IeVodYCzivJZ7k0Zm+l+XAWkE/lmn+XFAH7mzjbQKWAwBhwZPWCAuq47lHAOZ6oKdZprnYHs/OjEtj2U54/wIi6E0FsWOBsQn/rTl2g4rwXUe0tcZOpdHPfSOZUGjDACkDHKS0nIrXI7W/q6EWxGvc415siEEHKmYwiigCCgoYCiBmRIaUCRMIaigIhtd7vddrOlG7qiyrISCIbU8WgsSbKqhgil6+uboXCkVFqIpfLb29svv/Z6s9UOR2MIC/nCnCBJqVRycXGxXq0mk3GE4OMn27KipFIpwzAYY5Ik2ZaVzWbmcjlVUWrV6o3r18unxxJWjo9PW/XafClLmZXNZEUhuLJ2/uz02LJNy7RDoVAimSyV5o+OjlOpVCqVPjw6ioWjqURy0O///Oc/n5ub293bq9dbhMLRSNN13SbWQqmIMchl0/lc9u5XX0IsUsoEjAfD4XA0vnr58t17DwKqbOh6pd5IJBLrG+ct0/z63r2Aqsqi+Pjhg1dffUWb6LlM8uKFzVanWW90J5pWazQbrTbCgiwrh/v7kWhYDqqM6Jauv3Tn1f39g7OzsiAIB4dHhJJkOh0IBjOZLAPIssl8scCobRFarzfyhXl9Yi+ubcZisWAwcP/BvY31NcTs5dVNw7D+7t/97Xf/6I+2ti599tnHkUjk6Ojozqt3splMu9PZ3nkKIXrttddWV1YikUi5fIYQaraa5Uplc3OzWJxfXV3vdNsb585JshqNxne2H3e6bQxwKp0Jh4Orq2s7O09lRsLhMLUtfTKxTBNCKAniaNCDWEgmE4PBQJREyqhNrHA0kkjEs7kMQLC4sDA/X8ICDgTDiqLKotzttL+6e3c0HgqiAAAUBEHACAHAK5/QYWc4748EPrfL9X92vo/IaeiFEAtYxAiLgogA13mKCCEsSO59B/qxa/2JpoY/fu0/T3UgcPjRb/DWHmfsCwMet8sIpJ6Wz/ebAj7M5KECzCI1cImKKcviMi4vJHkzdxwOCwHHLccdMeYiI/AZ2vP3OKWAfBg6RToffHC2e1pnmAaqWV7ev0jxQoVTcvQ97lwMj9Xy9Rl4MYZ/bhBC5MWBWfLHi9I814fO6sFP+0xPjCe5DIBp0j67H4/rBw4jBV057zRyukTR9ONxr5J3bRjlDm7M4e39GT1xDJwdwsZ7lrqpvycG9XIEv0UEp5H4mRBKgBsepu+HOkok5+vlg3VKqSAIEAKuf3b7HaeX1NsV9D0CGLMpM22bEKeUhDGWZVkURVVWIISyKheyaYxgv9+zTD0QkACjhj6hlLZbLUKINhr3B/2hphm6TYhNKAQM9rqdl195dW1tfTAaL6+sLK+uHhweXLl+tdfvC5I40fVMNjMaDahtRyNh27ZTyWQulxuOx91uKxIOQcDGwwFgtN1sVCoVRRICiry/9/zVV16WJNmyiE1MSYbl8sHtl14/O60WcgXDNBi1k6nUSDNeeulWvVYbDIaXLl3c3n4SUAPNVvvSpUvlcvXk+DiZSuwfHgOGGIPNZrPf7yWSiYAqtZr1leXF5aWlcrUai8UWlxZarWa9XhdEJZVKvvfe++PRsN3prC1oy6oAACAASURBVK4tJxJJjOT79++dnZ2triwghACAb7z+6u7+cT4bny/m93Z3sRgMh8Pv/dMHmqlZliVJsiCKY93IpuLdTmt5eWltY+O9995dWFiMRkL7+wcLiwupVCocDtfqDTWgRsJhTRubhmbZNJFI7ew8v3jp8km5sr62+mT7yXjQU1Upn5+7cO7Cv/v//uadd76fTicfPnggyzLGaG1tLZVKNhqN0Wi0tr5+6eKlcCQEAWy12oPBoD8YfH33y//iL/+yVJpnAOqmripKsVhMpdKHh8cnR0e1pv7qqy+triwN+oOd7SeZRKqQTiiKzBgwDYPPmEMYi5I0mWgMQFmWxmMNIRCPxTPZbCCgSLK4sr5WKBTkQEA3LQZgKBxR1QCldP/ggBCuUaZuAsZ1dgwhgTGA+JgXiLi0HztTugSMsCiKHP6w4+aPBCwgBiGCnBxCUBAEkdM9fG4MQljAgjvu0V1g+Pq/XA7d4yQQh9lvrgQYY4wCQglzGVSPip329RAKGGQUUkptQmxiMebwBC6yQz6BihPpTq7sBBeHeH8x0Z4lyYGD0wBiDN0pAgBydaTbpgBdkEQQry+VZvbmkTYvHMdVjzIAMPTIFuBfVsBvXBVf4vmHlk4Onz7LJgEnRYW+hRj/aAD1tQ54SO9cAwi9hz3y6g8uSpjT8/XCBh6EEldsyxzvJwcKKfNmdDp2H9NAzAl0xj9sSimBbmrOZj1BnQyAEeZfA1Li6G18zI9L7b9QHHZ6x9x8f6bmDFzvIObWLdy2LwqdsTOM8Zk57lG8SOwnf4BnlsdDDgAe0cXzMowxIYT7pEPAGLFFASMIJtoYQKrIYrvVnOhaJp1BGOcKeX1iUkYbjYZlWO12azSemKbV7A62trYuXLyIsMAYiCZjnU4nFAp1e72gokgYBxQVQCAKAnWmV9rzxTlREIajAWBU1zQEQbvV7HY7v/3Nb/LZbK1ag5BFI7Eb1689eng/mY5ncgXLohAgy7Lm5grHx0fxZOL4rLK6svT02VPGaCqZlCR5aWlpMjEgRL3+YGlpqdluPts9SKfSJ6dnum5MxuN8MX/h3Fo2lz7c21teXjEMS1EVRZErlepgOFxaXGIM/O43v0znS5IgRGOh3d2DuWLp4HAvnoguLS4c7O+d29zIptOnxydLi3lVlY6OjkLh5Gg8Pj7eoQxBhBkFhNr7u2eFQjoaieTzc5evbLVarX6/F48nKGXEtguFAkKoUqnWG03Lsgf97qt3Xv3yy7sTTU9n8pQxRZYCgUCzWQsGlVdfe/XypYu//vV7wVCgkM/du/t1NBZJJOK1Wi2Xz0mSGAqGlpaWAKN7+3uyIp2cnj5+/PD0rNbtdv7qr/6q2+3u7DwNBJUP3n9/fX01lU6PhqOd7Z3hqP/GG69HI6H7d7/qdtvMJhgLAgKWbev6hDJqmgYEUFFVUZIwxpZlYEFAGBZLpVwhZxObMbK+uSErCoOQMqAZtqSokqT0Ot3T05PheEiITShBGLqpD+FMOEAQOepPLAhTBSef5IWc2QAc+7m/j+CsEpyasOhVj52WYIQddojPeXT07v6q74v45maowMUH6uZd3q/bdQImgDnMLuUjNBhwJgQQYhNX5sEA4w35Lua4+SWbsjQcZFw22gViL9X25bguyw+hpyJ1qpWMEyy8FYxjJH/aCQBTmPaO6KW+zt9TTGcQAOrJXqYG9BBChPELF8uLod+IXPxNeCc88yrvM/AeRAh5cPziIsDds6d0cuKeL1OHzuPMheyZ1dY0GEBI3Qcp9XIQ5hA87jkhL/WHEAHkfQ+gG6MxH6jLPED3AazLpTCfVMCJAtNFIqDfuLHpppSvIon3GsIoIYwxnj25HQC+ugUAjBAGAKE2j1XEnaDgOr06nhc8tyGE8K4DACChBDJAGbOJ7a1yFVECAFBCGCWaNkYQqIocjgQlCWuj0VgbtVvtyWSSSacZg4ZuWJauKkoqmRZFpT8YRqJxjNHHn3y0ef7CxuZ6KpOMRKLZTEaRZNswLNMYDAYIglg0OhoNgwHFss3y6Wm71ep1OkE1MOx1J+MRtS1JxIixoKrWalVVlS3L2N3fjUSCFy6ePz46Wl5e3Xu+Px6Pq5XTC1uXEun09vaONh5GItGVlRUG2Hg8Pn/+fKFQqJSrwWCw2WxRYre7nWqlohvWoD+eTCbEtJbXV2/fugIpFUXh5s3rX997SAg1TDMWizx+8qRYKD56vG1abKIbk1H/B3/8w93dvfffe39hsSSJeHV1JRyMFHLZSqUqS2g46AWDwYVSSTfsxw8fyoFwLJEsl2uGaRiGdf7C2qN7j7OFDKMsGouMx+NsLkdsIolCIhH/6KMPU8msJEnD4XCsaUeH+4l44osvPnvp5VfT6dTO02c/+cmP//Zv/t+V5ZXNjbW5wtz9e/cePHjw0u1bw+Hw9OysWJwLhUKQgWAo+PDRo0AgeHp6+rN//PuVlaWJpv3LP/8zhigSDvyXf/mfd7udVquRyaQr1dprr97ZXFsbDIaffvbF4cHe2saF61cvn56dhkJBBNDiQsk0JqPhUFEVyyaGYSaSKVlVKGQMgGAoJEqSRcji0mI6ldIm43A0fPXaVUmV+sMRhUi37HQmTxlUAkEBgXK53O33xhONOHIzJ4XGLpHgOTYIggABELizG5rm+wJColsB9hYHgiAiKHgyIdfsAbmWD3ia9fMysNvD6mHPN5YCjj0CRw7gtv07qMJbZ5zCH/dr4RoTwJv8+WKaAUaJm/8BT3bvu/lt9gED0CFUeIiYhgp/qeBFB2FfGWMaxmZQ1CkCfzMX9hrRmKP1nKV6pgnwNGHnl8zd1XSXbEodzVA9PJa4xdsZpY1/BTB9M2wa6/xnC9zww9z/kDP8/YUc3f2fD/RnAgCc+RC8PTPGoM/WFPIy/XTtMb0OLjXGmG27OhyvijxT1PXmrjieEJSyKXvPvFUk8HGF3p/UHygYmMYHSgH/xlmWZVk81PAN+Y/BbZd3Oie5PQrXPiDfGwTOahcCLlXiw2Rs3gtAAYAYIT4rBiMEIEAQEGJ3Oq2JNpYlIRwKiCK2LEMQhGqt3u50g8EAte1Bf9Dp9gb9QTY3n83mlpaWLmxdfv93v01ns/FUQhbEUDCIEZQkMZlIBIOBdrutyBKCoNVsRsIR09B7nZ4sCa1mg+f+z589K87NbW/vSILQarYgALIkFvJpm+qhYDAQjDy8/zgSiXV7bcLIRDcvnL/46cefpjMZQUBzc3MHBwdzhXyz2YzGouOx1u31D/Z3V1aXZUU6PjlrtboCFvXxaO3COYTouY0VUUCaNpmfm3vv/U+Lc4XJZBxPxCtnZ7n8/O/ffS8STQwG/fGgdfXmjUq11m0PgkHlcO/JO+/88fLS4tOdnYuXtnTdNPTxxuZmOBjs97XT05NINH5yelKv9ykhgiSoqmoSU9cnqXTanGjddnthoURsOxgOZzMZyyKJRLxeaxYK+WajGQ5FOq3mytpmKBSq1hsQY1FElWqlNF+8fv3azvb2ycnpydGxLIn9Xi8cjnTa7UQi1mg2Hj18NOwP5ufnT05PG436xfPn/umffpVIJF566dbFCxcgAN1epzg3RylZXVmKx2ONeh1B8PmXXwQC6mt3XiOUHR7s65q2vrqSTiWPjo4ItRVZkRTZ0A01GLApFUUBi4KkyCaxg8EgxIhQIkqiEpAZYGNNkwMBRQ3EEilBUsORGGOAmHp/MDg5O9ENA2E4MQzqZDAMO4QwdDEdYyTwr62j9kG8hRdjpzSA4LTDS/S8H7zKlcv7O5yP5/CDkPfN9/2oHbzzoZmLcIBBABzOh+fE7tLcl9dRSilwV+1ObgcAJZTYts1VgABMyXAPtTxkd4HeTbXpiy4RXorsQQ10OXMPlrnFpYt4zMNYvO4WgT1wdzZyIwzgTLev9AHdagiYjSfQXafwyzo9tT9EDXkZt7vDKfowrw2YMz/T1HvqADGT3nKijLm2FX4J5Gy53EH/2WAD3Dzfi4/Tz4DvATrlA+ZScvw/No0qfNXBlTSMnwil7phhX/rvZv3eF4RMcZ9OER8AOKMGpZRSZ0S79+CMOxAhjFLocELT5YgXPJxvAUKyJAqeWA4hjJGAIUaCd+Xd8wSAj6u0bZt/U/kUGgAAL6o74Q4JAgqqSjgUCgYC0UgYIwYRpMSUZVkUBFUN2rbd6XSpZWqTCYSCrKiUgV5/IEpyPBG/efv2/QcPJsZEFoVev59KJhVZGg2Hk9E4Eg53O21KKcbo9PQ4n8vZhnnv3r354ly5clar1TqdNoKgUa9LojQaDiPhkKKqrVa1VCp88dXnVy7fluVQpVqe6ONev80ozWYLNgHHRweFQiGZTJyVy/pECwQCfEzgxx9/ZJr2rdu3eoOuNjHPTsqM4ZX19fn5nCLC1bUlVZYO9vfq1SqE0nA8SmeSX3z5xcLiciKeevbseaXaKM7PpzOZRCpOKBMFeTTqJxPJO6++cnJ88uD+17lcQRuPrt+4urf7NBaNHR6WA8FQJBqLxOKNTiccjWijga7rqWRC0yaWbUNqBoKhaDhMiD0ej1dXV6vVyvLy0q9/89vz584/fvLsytaFiaZnctl+f6AZ2tblraPj41gsiiCglLY7bcaYiLGqKDyAIwzPTs+ePn0WicUWFhYG/f6zp3ut+mkwGEgnk6ViUZYVRZGbjaY2Gj/ffZ5MJvv9/mg0CqjqWNMODg7efOs7qWTyd7/7fSwSXl9f29xY//D99xFEoixjLBimISsKB9RAKFicny/Oz/fHY8pIPp+fn58XJEFR5dW11VQmPZpoSBDjybQajPT6Q1UNjAadvf2DeqMBIKNTDTkXLEAAABIwhFAQBAFhCBFnlqYo7pg8Oyy/A/OYo7/oboIxhtipEnPYFxDkEiMPqhwduR83IISUOswyo8zTjnA05qDHVeBexW6q1CDMa9khxFOCuL9ex18AQH990SlDTpN7xhjnWtyMHE7TNV/3FWDMs89hPnB2gQ0wBhDC/DGnzkq57AZAT5UI4XRKlgPRcAYugQtgU0ENcNl591X0BQz1SCcuuncIdt/jkAEIOGcC0Ys2F+7b8LZnHl4xRh1Fpdu5xLiVgstruP956D/dL/LcJpjThoD5OgJMixIz4YqfhOsE4V5UABhzXsX4sDhv+YIAg7OZPs/tCeEFUeJ0A9jEtmyL461t26ZpWLZlE8smlsXH99omocQmFiGWTW2b2DaxLMIV+rZlWbZlW7ZtmpZlE8oARBgjx/gWY4QgQBBi6GC8e+EYN5BmgDprJAgQxgAiwphNCQUMYES95gHuyQGcbw834BWwaNvMsiwsYMaoZRqT8Wg4GPS63W63V6nVkSBBQWp0RqMJGWoTKIjl8umjr+5/8P67X3z2Wafd+fEf/7DdaPb7vWwqUSmfDvoDRZYkSd7f269V67puauPJYDBq1JuiKNy4ebPRbFXLlWa9IQoSJSwYCA4H/VQylk4nASKGYR8fVTOpwqOH9+bmigultV63t7K8nEhGzs7OFhZKWJDyheLh4XE2k9t+uitKSqvdoYxOdO27f/T2yckpY2h1eamQT13ZWl8opvPp+PUrW7IglsuVYDCoBNTl5QVKLQRBUAl0Wm3bNOOJhDnoFwrZRDJpGuZ8cb5SrUqyTBmTJaXX6zLGqrWaIIBK+dQyrfFYG4+65zZX0+l4LBJYWZq/feNqd2RrY80wDNMyJxOj0WxNtMH2zuPxSO91+xDCvef7jAFVFTR9ksnFKSQb5851+30lEJBkNZGIj7UhRGhjc/PRo4fHxyfls3K2kM3ksrm5QrPZnJ8vNRuNZCKVTWeq1bqum816bWntXCY/1xsO4ql0aaHEAGh2mvce3J8v5HVd6/Xa1DYxhg8fP/jXf/avs7nM9rPtcxc3b995+dbLLwuyHIhElKBaLBajsZhNGIN4YhOGsCirAOG9/f3JsJ9LpyLRgKRiNSAvLa+IotRqdkKBqCSqw/5oOOgLAh6PBrZp2ablkDwIC1iQJFkQRIwFx/3BTc55soucxnWIOGZDwJ/1mcU4uITc7lUAwXSorvOc82t34X4K/f4bmGpbuBTFb2XPgFtrdYCMAQAY8oEIAq4anU9unYK/22DrNx9mTgHYkWc7TIbTGOYchDJGKfR0+tNaID8dLy91uV3gHMpdJUx35cRK6He29t2cK+1r8OWXEAHv7QHklsU9zoz5lCR+HPdiAAMMIDiNpIzya8tbPqCvSMtvbtzjCy36wh3gUHDEhXr+timADCIGXDtTACFzxwtBd4nDbfmcGMggpBAwgBiCXrOAR5/xPQBAASWAUtc72o0DCDGIGJ/1NZ2p6SkHmHPd+YdMAXOMhJxswXZyeSfNJzYlNrNtYtvUsoht2RYX4tuWadqmaVumbZoWMYlt2bZJeJwgps2IM0KAEgAYn5EkCVjESJL4SDzBkf8Dt0hCAaOAByfmVoe45YNFbJNYxOLEJQHuMpybLEqCIIqiIquSLFNGe73O0dHBZDKCzBYFzCgNhkK6aT3fPxzppmYLcjDWHWq9wWDrypV3/vSHqXSuenb21//L/7CzvXPj2rWTw/3jo0Nq2a1GIxKO7Ow8rVRqu8/3y2dVSkC91jo7Kz949KjRaEajsVu3Xlpb24jH4icnZ8N+H0MQjUa63TYS2GRiGxoqnzUrlbNf/+qXa6tr6WRxOBxLkvTb3/yjKCk3btze3T2oVBsM4K8//jASjT148NCwrUJxLp6I9YddRVA6zbaqiEsLOdvopeKRgCIS256fK0qyLMpSp1ePx8LNRnNr6zICCGNoWPrN129du3JRDYbGw7Flk0ZvdHp2VqmeNhtNSmk6ne61m4osVWu1bq8fCAXn5nKaNsSIxKJByGxGTAWArcuXVldXLdOc6MZcqTQcN3/wzo8ajU6hkP3ogw/Gmt3udNSgfHRyHIwEQrGgYVmNduf49DSZTOu6NR6bsiQfHp/U63XbthFG5epZKBYzLCsYDn91934uP0dsamhmPpuvVOuCIEZj8Y8/+6pW7ygB9enz51989eXh0VGhmNeNSa1SlgRBksRGs3bl8pYalAm1r1y78tZ33rx64zoWpd9/9EmmUAjFEr1+v1yt2DaNRGILC8sM46Gm1Wp1TRsCaqmKlM9nBBERYomCcHJWyeWL+VwRAtyoNzvtFiUmxnAynlDbhgxgJAAKBCwihAVBggATAjAWIXDadIE7HBhDx8LKjQ1OWxXPZBzhiy9vBYC6E7e4yhxwPtefbjrJ6Ddu01QacG2IJ5/zU8eQe0NwoHfiA3cFYy4IAQYA9aIOcOgWBxVc3KMEUMAgoE4AcEkO4J2DN7TE40CAk6q6LI5DqnAgo16Q85gd/n94fXnBRauZdwuZq+J3Lpzv2Re5nenhwczjM9UG55XuHf9T0HW288dcR6boxaFv7M25w/5/tt40VpIsOw+7SywZmRG558vM9zLfvtVe3dW1dFf39MxwZhrDoSBS5gIbosixKNqAZQiw4B+EbRiwfti/aAMSLcmSDcsCKZr0gKNZuM2Q0z29VXft1bW9qrcvue+ZkRnrvf5x742MVzOJ6tfv5RIZcSPiO+d85zvnQNZsmwU4nKMIuooCZqGmaR1ARGUAT23wY6QUipIMNjaBBV30lWwynbYBOXWsp60dN7k8W0u5lQDc5LN8EQrOgbD6PFDgJjHE9YfTADRIH7OqLhpefHEZQsAicVYor8iyJMsy5pXxYue53iEY5cOWi7NIPiGEeL7nez7lEoaAVEWKomiapqqqoii8s4qMk0Zc1zV7Mnaciec6hmE4nuf51HZcCFGnM/Rc1zB0czz57KPPYvHk+XMXZufmtETmu3/8/+TLCzeuX61UKq7j7e7uPXm6JWFUrZz0+r2D/f2XL18MhsOVlaV0Mn7/3p27n9/qdHsrKyvlcjmRSNQb9ZPaSS43k81mCCQvtrYAgtXKESF+NBb7/JNb//S//aef3b5tjiaeJ+3tbv/qr/3af/zedy3bTiSMtbPnEgnjwRdP47qeSqdbzWalWtE0HUJ4UqnOl0uNRiOVStZqNQpAOpV0fS8SiVoTO5FIptMZzyNPnzwuFIr7+0e/8su/7Lrunbv3IAAjc3Lv7gMjJv/qr/+GaZr/2//8r7LFzHhsj2233W6b5lA3dF1PJJPJVrtTrdYpATP5rOOYy8tLyXg8bhjU92PRqO2ML1y4/Bc//KvxZHj+wtmlxZWHD+5dfu1StzPECM3ksp999gBjqMcSV15//Uc//nEymVpcnL/1yceJeDIW0yonJ0YiMTtb7LTa9Xqj3+sZhp7OpL744osnjx9Ztp3L5Wzb3tnePntm8/ad21988UXciB/uHyTjiV6vVyqVNC2CEGo0mqlkqlQuG0bcMAzLsrvd7v7+3pvXb6yurjabDUKI57m6EdO0qG1ZrWbDHA4kBBzLzOVnEnEjElFrtQoAuNvvl+eXYoZ+dHwyHJumOVZUBUIgYdRptV5ub0OMeB9cz/M8j8EVn78iZr8gyAb5yqJzodDvYywhSSAGVrg6dEr3s+fhaZEhQgiImtlXfFYBx6EeMJRn/zhpSykbAy5ycCGCh1BCiOt7PhutzWr0fUb/AkJ9Qn3XE818md9KROEpFDQMu7/DXRzCBk2M4wVgmv5Fod6agE93PQ3XvBJIuPhhPIWhYZLTrC2YAivfCnNpA8MkNkJPJ5PDMC2+bLruYilfNRjibxT+4CvHEPwaRFdQrE2YIAMB931aPfuKIWH2jQLWyy8IPgAIuQJhbi6gv0LXBN+OeA8UQ8qEmAAgQMN7jsD0czy+4r1/4Cn0J0F5sO/7LDgQTRkCvijENvListMVxVy/GoijYdi0gqnmWdhIsQIsUPV9CCDCWFEUWZYDHQKggJXUK4oqyzIAwLYdhFAml4nFDJ/Q/nAIACCUTCYTz/MBRFtbj8bjiR6NXb15Y39/97t/+se1evvs5uav/9Y/qlWr+/v7S0sro9FoZWX5s1uf/vEf/9HINCUJu56XyeZkSf78888RgqqinD1/8cXW42fPnr58uYUxfPPNG7FYfDgyFS3aarQSSaNeP5AUDBGilKxsLHz48Uf/4De/3e3bKyvrFFCIwJs3vyRhfO/+/dXVtVu3PpMwLM4WL164+OmtT6KRKKX0+OQYUn9nZ3cysVrtzrVrNz6/9WmvPzg+Orny+hue58d0/eLFS67rJ5Lpp8+3IpGIObEIheag49h2s9FeXFqIxfTZYtH2JtE4OHP2vE/QyWG12xtOLLdSqRwcHLkedR1P07REwijO5A09nk7ENU2NGzEIyeHBUa/XsWxb06OZTGq2ONNpN998601FlmRFoT5UFG1smcPB6MobVw6Pjh7d/ezCuXNPnz5LJdOqqhJCI5HI8uLyzsttz/Nty8pmM6PRqNvpalrktStvrKwsx2JRy7I93zs8OhqPxufOnNvZ3p6dnVtdXc1mc6oaUdXIoD+Ix4x4PP708dNOp1Ov1YeDfrlUunz5siRJL7e3NU2DGOVnZyVJbrUa+7u7CsaT0QhS+trrr5dLs4Nh/+joEFDoOM7a2ka+kCcA5gqFeDIhychzvaiiAo+oEU03dEqJoqqqokQikYiqBv4WIQQAyupRGUECodA2AlHTAqf0znQOFg1wNHTLh6n2UD+z6b0cwrGftQpT9Kdkmh6cfiSEPwECsb8ABDxRzGyGD8CULBGYxrAjwDoQuGjBrtLQgzmCIaES+LmPADwRZIyPeARkBaWUnppEM4W1MEYDEIK1VxiS0M4FH6EAEPHREOYFpiSQ+kydfQgQEtl6KMS50x0I1pbi0Jx5sXoUYMH0c5E74LkAxvNgiAThEz67vLIBIQSpYLzE8Qf0zSknn19XvHSbe+inLxcEEaLBeDJ+jAghjDDTVYZrBoW/Twj1fOL6xPOIz/65vuewf57r+J7re47nesTzPMf1bNezfeL6vuv5jk8cz3d84hLqUeK5nuvajuM4rCAgbD4xxiyKZAoICSGJGWZWxRYkmn1CKIGif1xgQgAECCJJkmOxaCwWm8nNrK2v5XJ5jGTfJwhLAEDLchQ5ksvm2p12LBa9eu1LCONmp9fv9S9dvDi/fqbb6x4cHGkRdWNjo9cb9nv9crkcjyd+97/43ffe+2Y8kbBsF2N8dHQsSZIe0+v12lxpTlXlt26+o6qqqqqPvng4sexsNhczjPF4ApC6uLy4vLaEEZQkJZ2Oz+RT/9//+298gt77+jd73U69Vt0/OLh+9XUtGj3Ye4GxVKvXbn/0U2tijcemLGuVWrPT7R8eHhdL85KsaFG9WqvvHexjWYloUcd2LGsymYyLs2UKYCaX293Z2t07sO3JcGQeVSpXrt90PJqbyb926XK/VwOYntlcv/ruW5FI7M5PP01li6oSPTjYTaaSjWbHsmzDMEbDYTab7vU72YyxsFj2HSseUzOpxGhkVvd6tu1ubm5efv1if9B78eKpJOOjoyNVUR3b73UHCwuLkVgCAvD82bOvfuOXXrx4IUsyy9xWqvVsbqbValWr9U8/vTU7O6uqEdu2CSGrq6uaphmGXijkJ+boxhtXizP5udnZfq8bUdUL58+NRmYymcQYO45LfAogajfbpbm5dDK5tLC4tLRECHFdZzgcyIpyeHw8N192HMeyrHajiQBVMF5bXvTtiWONAaSJpEEpHQ3N8xcuRmOx4+rJYDQcjU3bthVFwYBIACgIOo4NAFevS5JEKWV5JiqegRADgCjhZDMl3HETDi0G01lanO9/pebzlLs2vW3D2D0FzACoXsEuEa/zTRLe2U2YG+IH+Mzk9/xzwmkOpn6IRCbPCVKfAEDDHmcYdeH0y0Hgrk0jGMCT0UD0/jzlsIa8Z6aqQaEHXhWFYDQ0RkB8BgEgurTRKYvPVhm9EpAwyAsJNznnzg1i2CoCITgPPRlolgBCQq71ihYIQWYPMIQY/ry58BACQrhWlFKCAmZJfBHLLdqLUgAAIABJREFU+APBbgHAW0NAgDj1QYgwyJQAMtWSBt8iJKHTiyB0zoJLAwKeXQBsBUW3UKYPQpSSqVl9RdTFlgiGNjy9cIOf06867arwwxT18ZDJmzFClLFfBFDgEz9YW0hZQz6AEAKUskZzlNKgeJFSivnoPWGPOe8KZVlSVCUW1WKxqCwr+XxOVWXPs83x0HXd4WBg27ZuJIhPup1uq9WLxaK248Ri2nA4MsfjdCa3vrqqx3Xfc1PJpGVZh/v7L7dfKooqYXz//v1EMn3h3DkEQWG2oOs6IW4sqhaLhb29vWQyqesxSZZG5hhQ6Pv+/t7B3NxstzMZDJu6Hi0Wyr3eKF/IIpkkkjOf37ozOzf/5lvX4onE4eFBp9v7ha9++emTJ/MLi6PRWNI0LaLGYrF2uwsonVju8+dPY0ZiY33tYP8AY8lznUQiUS6VWs1msThrTixCaCIeb7fbz1/u5POFmXwBQvx863khP2OOrUhEe/58K5FOxhNRLapVT2qDofPw4fOrN948qRzGUykIKQByIpnIz+S3XmyrspzNZOzJxPPIxvrq8dHR+vqq68OX2yeXXtvcer514+YbiXjs8LC+sjJfqZ5Eo6lcbubxky9SyfSlSxfa7faTp8+uX7/2ve99f75cGg6Gjx7e1Y3E5cuX2+2WZVuHhwfpdJpSQjxPN4zV1eX9/f2ZfL7X7R5XqulM0ie+bTmGEZ8vz1dOqoZu9LqDrWdPJSybprmysjIamb1eFyJk2Va71e52OhKWms1mt9NTVfXl9gufkm6rJWOsytJcIa9pKqB+YTavRpSYEYUQlctL6XRq4ti6ETcSCctxRsPReDzWVJUFs61+v9vtEKY9E82oGFiKCw+zSZCYz25EGGEJSRgj5rkh3seNpzQDL5d7mT+fAgKBZxbcXwLn+CPMxAq8FpU9NNS/S0TflPi+R7m+g4t/CCHU401hKPHZRCaPF5fyjCX3qxFPUBIEAqQKQCjItkLe8Ea4llA40SAE4EBgX+h4aUDA8Ht5dXlB/D2FRJHWYCNSgAhqAnuCKJ3aIsqnEEAIp6BMKaXTRvMwcLeh8B6hqO0SW0fCGYXsDGIIkSRhzLAeCoPET+xpxESCUhdgL17CQQYeiBQFU1eJJaWUzwgm0yc5xcF3mrKtgmBXg+ASCEEkndYTcFEBJYQGHUYpFXonbkop4l9BpkZ3uob8beJ/IbgHr16IrIZQLHjQE4IJkSEP95jwX0IIAcpKt1hnKnYFACTCPk43UcB5JmYNmW4MIdYmCwDAEwEYypIky5IWiRBKIQTRaCQRNzACjjWZWNbEslzPH41G9XoDANTrmaY5UlVlZI4IJcXC7HAwODo5AQAoitRstd68cf3suXNHxyeffX6bUHrt2rW/+dFfpjKZa9eu7u3uuZ63ubGKMVRVVdcNAIChG1iWEMTj8dgwdNdzu71+IT+v69pg2M9lC73uoNOt6zElXyh4LvjLv/ird969sbq81Kg3a/Xq0tISgEq1UlleXnRdZ3NzIx6PV04q9+7es13Sbne2nh997Rfe2dt5ibFkmmYhn5dlCSFUXpiXsfT8+ZObN7/0xZPHL1+8uHb9+srq6v7+wXBkuo6TSOiNZrtSqUBEZAlTQCnFd+88uP7mO/VGMxLBCNFUKhE3klpEm0zGjx4/1mNR3/O67WY2m0sm42NzlEzG+wMLqu7q0mJvMLh29fLhwV40lkIYWZaZSs52uv1nzx58+atfIz757NanX3733YODw0GvE4/HIYSNRv0rX/1qt9uRZeXw4BBDunn2bDadqlYqs8XZo+Nj13UT8fizrS1ZQqlkcjAYLi4uxePx4+OTw8NDTYu4rjM7V6rVGqPRqN8f7O7u5PMFTYvEYrFkKrUwv4AxzucLrJ2lGlGoT/rdnmNNtIiqyjiiytGoNrYm9UZ1rjSrx/S4kWj3+oqixOJxSZJtxz08OGw0GulEMqKqjVr9xf6e7die7zOw9j2P3T2cPUGQK5Y5mGOEkCwKfZnDI6ZZYCH7EaiHBdwgFGp+LjaMXp1NGyBVOFygIglHCKHEEyyOJyJljv8B88q4WcHPUqbL8D1W4UYAIEI2GuSHGYKLMYj8r4AXYtaB8ducqiUgiOk5YP/sUVBAA2cfAAJFxBB41XhtZZGIRnHC4WTHHLI705cpDGaBARBWzwYxC/uFhOuYAYD0NLoheOpVKMpqwaniL1bgEUyCBKcfEEIAEA1tmTeNCiE+pZQJNhHgUyHCnwdgWmwHKU87sKAQiIoChtQ0UOUGxBA4JRYQh8ZKpbhMTNQPgyDMZLPlmRmiQmbEsup845CPAQg4Jyhq8aCwB8HSQRE6BIYB8nGk01JjcaYgQog1UGQLy7IdEAAY0hcTSn1Rgsm0SPzqQjyFIEmyqkqKKquqqiqqLCuKLHu+n0wnC4UZhMDYHHW67X6/59gOxtJwMBqPJxhhjBVrMk6mEvG4oaqK53orK6ub62uapraa1aPDnYcPHkSj0Y31dU1Vv//d72ia/tabb1WrJ8T3Z2eLtj15+PCBFtF0XVdVdTAYEEBt2+33+/FEolqpxg39+Ph4NPLL83PJePLjj27lcrlK9cC2R9evvWnbbr1R8/1xqbywv3/YajWPT6rf+tYv/vN//vv/+B//k5fb2w/u358tFGrVarfXrTQ6lFIJ0pFpvvfNbz598ghhvLa+rkW14mzRMIx+r7+3t7O2sXFSrc7NlXxCzpzZPD45GXTblJJcJt3p9mdmMgiRjz/8uFyel7Da65vz86V7Dx/5jvn1b/xCKpV8+WIbITgY9FRFqVSOESC/+mu/sb62vLe39+jRF9lc7uDgJJdLTcbjaCRGqB1RVdelv/ff//6Xv3yjXFr4zp99r1Z59p/9/W//8IffX19bT6fT/+P/9Psbq/O5XA5jnMvNGHrs5ORY06KuY505czafnzk5Pu722p1Oq1ZrFAoF3yePHz+eL5cODvavXb+eSCSfP39OKRmNhuPxZDQyJxPLdT1VUY2ofvbc2VQ6jSWcn8mlU6nK8cne7u7uzm6n3XIc9+jk+OTw0HddplnRIiqWUEyPjsZm1DDy+ZyqRobDkZFIaJomKyrCuNlo1Wo1GeFCIa8qCkCoMzInEwtjybZcyrRAHBgRAhgAKESfiLMEgjXmQC8SmxITBQkDgDEW0ngIMYCAG4BpFgwEP8HPPgLcD24xMhXzB/eKJ3z98NRVUefl+57nup4Ywspe5MpF5osCdlwAsfQqzxayuAcGxUYhXGcoeiqVF+ZXBCMFABDjjSEHmJDvzwOiNaECYo0S+BeG2Idg09wkhAEIBAFIyADwIwg1gAvlMUJwP4VOcQj8xDDSQkQ6r1js4EMo6M8XHD+TsCAAArYdAJEfB7wXRgj+p/LW6dkWvA2hPL/P3X+hbWXoHyAvEHkmGGQVAKCUBjYSQjDtrA0phYK0gad2YroOU5MOITsUKji0gLXiO0lFlMInLohJMCJxA6CYkQQxRgytQVggDQHGWObDMZCop2D9JYjvEd8niA9uE4E3lmQFS5KkKIqEMcZQliRJlhRV0WMx4nuUeFhCw0G/3+9NJpbjOIqijkZmKp1Np5MQ0JXV5XgisbKygpGkqspMLrO4OL+8svxi60W723cd2/P9RDJVrZ7U6vXRaFQqzfZ6HUrI0tLyxLYP9vezuRyhtFKpEkIKxdlut5NIxCVJ6g9NXU8eHe2vLC8Vi8Wjo/2F+YVIRJvJzfi+jyAoFGf++q9/dO3ateHIHAwHuZlCvlhOppKyrHzx8H4ul7Mmk6Pjk97Ise2JHje63fqV115HWJYwPH/hXKvdzmQyhqHv7W23222E8YuXL99++23DiKuqOhj0qieVhfmFfr9njieN2nE+n1e1GII4k8nIsmQ79ucffrayVn7n3XdMc3xweLi5vs5kjZSQ8xcvXb586fvf/0G3PxgMTUM3jg5PdCNSmMnPFmd1Q6OUPHq8pUnmL//K3+v1R3/953/2X/83vzexJoPB4Ozm5ne+8931lYVsOoURSqWS0Whkf383lUrVa9VoLGboOvE8SrxerwcA7PcGl157rdlqm8NxMpXIzeQ3N9cfPXpsGMZJpRaLxhYXFyFEEEtsnDJEcG//8M9/+N10Jq3r+sOHj3q97nhk+j4xYvrzrRe72y8SRtzz3MlknEwlKPWG5ogCiiU4k88RAmzbllUlmUrJihKLxjzXq9XqnucqWNJ1wxxPKpVqzzQd2/Z932WNcEVKjF/BAVRDyK5bzN1/XsjLJ4JBIAkfP2AsEWa0N8svSvD0I7j1wuxF2ADQUA3/lO0hooqG8OYrvudTofgR5T2nLAKjcvjflMuuIccAPsgMMuwCEEGIJIwCNAABZoPAURZ8/HR3kSh2gIAIhxoHcAvh6cCIOc1rywsCqqbUyJSdDyA2TESJzcHA2Qa8f2fA/IQRjdmHU2sNQxsKRwDCBrD/2NkKdkD8MlX+BGg35WYo0whD7rYzVVRYBSSMTdCGKHhacDVMwkkJJFMVFWU1b1PcP2WQeBHBdINcUA8JRRAwHg0S0YkIBIspDM7Py0FxXoh5QaJRtqCY2O8sswQBZCEdO8VBOh0jpChyJBLRtEhU01RFYc0Tg4sIY9YKEQGMAIKEUG86qYY1qyIEUFE0iSRJUhRZqO+wLGFJwqqi6IahRlRFkT3PmVimORz4noMlSZGVSCSiKKrv+41mOxJRdD1q2xMEoSIruh5bXlpYXl6E0I/r0StvXL93916lciJJuFgsqqqqqvLEsmKxKJZwLKpVa7VEPNnrDXd3dvv9IfF9czyeny8fHhxYlpXNZtvtrjUZWpNJvz9IJGLZTGpkjqyJOxnb5ni4uLgwGoyisfj27uE33/uGaY6PTyoXLpxrtdqO635x/16xWOx2u7s7u6ZNxqOx67qaGklnM7lcZnlxMRaLSbJ0fHScTKa2nj6JJ+KdTtvzfEnCsqqMRiNKCADUnoyPDg/LpWyxOJPL5YcD0/eRYRgQ0kr1+Bvf+sr8fAkh3O10isWCLOOxOcrlchDAtdXVZ1vPP/zpT12PQIzavUEykZydncEIzc2VbWcyW5ytVJrjifXG1Sv3Hj6+dOV6eaF4+/Nb165e7/YGH3/wowsXLrJhpQuL5d29XS2qNVsNBFGlcpLLZcZjs1wuJRIJy7INI66oijmazM4W05n02+/cvHfvvhbRksmkoRvrGxt7BwdaNDYYDre3d3r93tbzLUr9a9dvaJHIJx9/jCEqzZUcx+102t//wZ8/+Pyz2dlZVmYIIZAk3O52fEDarUYimVIVJRaNyaqsx/Ver08IGY/Gx0dHTx4/xQhrEc0ntDsYVGo1czyGELImgwhjQgkKUfPBvcUrURBmnAYS8m4kSBP0M4/psHUIMZQ5ASFCbBZYMEJbYMmrcqCwAfAJH/DhU4/41PddX8g8CWHN31mVp8+GcYfUeL7nBfVfjANiwwR5/zG2M4AnaTGXOiER8fBhL9N6NyA49oBRD+Gg4IpwuN0aDNsA5lTi9ZVF7lgGmVHm8wZ2hXMFAXyjVwALBgBKCOC9OCHPKwqwE/OLQXibp8CUinRxIFMFOGRp2PvDBXhT1BYBAdNO8ZcI4SlcCoDo8gAF34KmIQjl/ToDj5yEWmYDYWoAAFT0In9lr7hFe9VQUaEJghSS6aHDU2t7+limq0EBhT8zEY69Gj6drzR/QrzaHUKIJIxlWVZVRYtEYtGYpmms/ktVVa7kQRBChCU8DREJYMk3Ji51GWHJKkoQhKyVkIRFLRhWZElR2ORtLMuSJGGPuK49GQz7rWbDNEexaCwW1THG47FdLBQS8bgckSQJTybjRqNpTSb1em0yMTOpRK1W2Xrxsjg3m83mms1mt9crl0uJRGo07DO/MqZHS6XSeGLLiqLrhqIozWZLUSMsIdHrdlPJlG1NZJVokRhGWIvKkgQKuaJtgVarPZ6M6rXK9vYRhHBijbVoLJebqdbrhXyx2WpS38dYOqmc2JNxrdpwgOy5juf7lPrZbGY07C+vLrcajaWlxZcvt8djE0NfVuRMOhONRefK867rMNfQdZyEYdz68CcXX79YyGf7veH+3snsXHlhYX48HioqePP6lQ8/+mBz42y1WoeQxHWdUup5XjaTdT2v2xusrK7FE6n+YPTixYuF0nw8rjTqjUK+fP/hnZl8/r/7X/6P3/mt/5RC/7Nb97797W9vbz8fj63Lly7+9IMP0+nM2uqabTtKRE4mE67vdXtdTYtEFNXzvEw2vfPy5Ze+9E632/E8X5FVTdMIAaY5OXfh7Cef3kolEwcHhxAhrMh37t6xbXcmNzPoD5jwSTf08kI5pmm7e7uyJC0uLrTbnVqltvXs+XAwKpXno2qkWjkhlJrjkeO5WjSialo2l0ml067rplMZI2GY41FE0/SY7tiOIitjcyJJclSLHlcqlVrVdhyAMSEEQch8OFVVgxucEAIAREhwFxizpB+7+jGCLO2LYXCHnkI6jDgBgbiE5NQjDCks5/BKyo0GCV/h/lOhyybE9zzXJ4S4LGrh7/FET0ZmBiihHs9p+8KcAEpZa5UwnQIR70rBR59wJAxw55RsEgIUGCxATmFywJ0hyO7vkP5eLI4ICMTBh5x53jI4JHs/rT8JI1cwXIWwoxGj3gOICqS4P8fLPcXqBH/ysxJEXgHcBo7AK58K9L+BveE7iTgjHjqVgo0JATJnzVhTDz8kqqFTtgpCiESyITgy8e082AyWSmRjUMjYsnxrkOpGwZUXvAGHhU/TPMupA0W8FpL/Q0iCCENebDz9J5z1iKKoWJLYNQchZKI68V3TcXdsqVk7CsdxWdaKdZhgAzIRQgBClrXmyygazxJCXNcFAFJALcuqVWvjySSZSi0vL8diMd/3JUmKxaKUEE1TCrnc6upqqVRKJIzBsC9J6N6dz7a3tw1dJ4QosmLbk2w2m0wkdnZ2hsO+7/u6HhsO+x9/9HGr3e12+ycnlcdPn3uev7C4cHx0qOvG8+cvxuMxgCASUVMpI52Jl0sl0zT7/Y45Gl86f3lpefXzT/9ybq6kylqv279w4UK90cQSPnvm7H/4D3/07pfeTadTZ85slkslz/NlWQKUDZ8G3YGnRTUtFhsOR5lM9uHDRzMzuV6vb9k2JVRVFd/3Z2cLyUTCHI1sy3n29DkPXBFACEzGk0w643uUEhDTY4XZGYiILMmpVJIQqmma4zjdbmd+vhSPxz3PM/TYxsaGbhiXLr3Wa9cajdpnn9/RorGTkxOE8P7e3u/9V7996dKFjz/96Jvf+qWnTx/3B33ik6PjE2sy2tzciES04+ODixfOHxwemeao1+2cO3s+FtN933/y+EmhkFcU2TTN8XjsOM7JSSWiaaVSeWd7Nz+TK5fLtm1rsej+/v57731zJpdrNlvPX75EEp5fmDdN0xxNnr/YqtfqqqpWq7XjwyM9Fjt37lwqmVAVBUHoue7YND3P93y/P+hXKscISRDBcnleUZSoFtE0LR43IIC+73c7PUDpcDDsdLvNZnM0nji+7zgOABBLkizLGGNKiOu6tm37fKhRCI1EoA5F9ovdIa/00gk8LZG5pYyBCUiZMKsz5XlOV1+Gfgb5XRK8K8wIBao58R4S7BHlehreiI0Qj1ASNKVACEliniXrZCphWUJsdvEUEyBCsoRlWZbEErE3Y4wRRGgKTiAAn6m1CGYd4GnbR7areHVlgSG6TygjgllOgtEWWJI43zBlyny2uCiYC8ZiBwgBCuthAAUAU4AglCAijLkRDTKAsO0BoLMeOhAC1t+JUspaeDBWA0BAmP4SIZ8QPoQLilgDUEgIZKYW8HwJApgHTBSEqkd45kA495RybodFC0xXTHhNOQCQUgIpghBjTClFECIAKUAIS+xYIEJU6ISYOBIiIMkSq+4Q4Sk7K3yqEQUAMMsklEWvoLy4gmFgSFGwShRDiETtCILQB4AASCAEEoQSRhhBRcIKRhghWZZURdUimqpGFEWVFVlijjrGVFx5kHcukghhkykBocTzPOJNbyyEEbsMVEWRMAKUIgokhIjvK7KsaRqFdGYmm0mnI4qKZWyNJ9FYdGKapjkivi9hNBp0Pd9FCPS63W67SX2/PDd3+eK5jfW1mUKh3R1MrEmz1Tk8rp47e6ZWP0km40Y8Vq9X40Z8PJ4kk9lioXxweOg4E8d1spmU7Tie68bjcQDA7NxstzfUdX1zc8Nz7Gw2gzEd20693nGJO/HMa1ffeLZdicb0L717o1Kv+xTEDf3evftnzpztdrv5XHow6KXTib29l6VSwfXtVrsKKMgmcqvLy93BcGm+JKtkOOzs7uw4NlakmOnR169ccwmcOO7mmXMYSc+ePiuV558+e/7mWzf/7Xd+cuXShuU4h8dVx/NkRSqVZqPRCADw0sXLrWZXwsrYNF1iJZLJ46PK5UuXbt/53PPtbC7T7nUnE+eD9z+OJWaa7Z5uJLK53Pb+E9u2S3PzX/nKV09OmrFISlGkSvVIkqIXL1z/L//RP/mF995eXln58MNPb779lUQievv233r2+NqV6zE18dFnfyvJsqHHNs+c911yeHDSaDQLc7OVysnq+lp/2JMlmM2kG/UGgkCS5evXrjqW1e13avXawkI5nU199OEHxVzpyePHhUI+HjckWe72OpFolAIUiyeierzZ7g76g8FwFInELMuTsAqBLCsaJXAytj3fgxJSI5qCZYSl0Wjs+J7leYPJOJ5M4Yhm2R7CEiVUkiVJ4jNMbMcZTyau5xDiAUBkWSLUD5gQyFO+GEKMJQwBYMpnCDFkHR4wABASQCgg3GkCCCOMIKKh0YScGIZivCugFAACKBGIxzQUnueJikyPMBUndcNz+QihHqu7IZ7juz7wiY88jxAKmQfrusQDAFBAeJaODaOlgRcIhE5JgBZAGFJEWVAPMIYIQwwhwgHNSymACPDUN8IY8YmJCGFKCYCUz02GYrAiZ3KnLAifrba+usThmNAgqQphqBvoab4ChtOh4s3oZygdjnzibYHrPbVTp8l0ntcI9f5EIs8fehIhkVgWVIyg7XgsNFX5CBVTOJQBkNkZIDq4gSBEANPiN9FuT1gJCEUyOdgNytgoBAEEYoAcEGKt8P4Hf4lFECuCXglE+APRUC49TJoFwRFn/zi5GQ5KAMsjseEWzE2QJEmRJUVVIxFVURSEEKvmZc4DYuXHcNqs2yfUJ77H2j9wA8cGb0uyJMsSZm4XIdS2J6PRyDRN27Ztx3FdB0HYatUn5khRlYk5AoAqsjyZTBAAsqK0Wq3BYAgAcV03m812Op1uty/JciE/s7i44Lpuu93b2d6yXfLlL71LiI8xTqezkYgGKPUJURTZcx0tqn30wV97FL998y2MpFar0et1JYzX1lZarWY2l2vUa93eYGFhsdc3k8mEpEQPD49SyeTy4vKtW7dvvvXWTL5w+/ZtANhIJrq5vm5ZVjwRP9jf73RaWiTy9OkThNR8rpDNZubmZrcf3988f2auVGg1GpblvHixmzDix/X6hXOb7XZL1/VMMtVptxOJRLfTufP5rZl8gVp94DvJREKPGd3uoNvprq2ttzvNbC6zMF9+8OAhoaDdalvWpNPpDYdmt9O+dv1Gq9lMpJKj4fjTTz+dnZ2/fedeKqFHo5FOp67r0YvnL8aNeC6b/9uf/OTdd798997davVkc+Nys9lx3ZZpji5eugyoVJqbe/L0gePY+Xxhobz49PGWS109ZpTL84lEvN/vP33y1DD07Z3ddDpdrdZ0PaZFNdd1c7kcG5olS8pwOKzUaoos+Z6/t7t748ZbOy/3isXiaDyaTCbJZLxUKrEkzWAw3N564fv+yeEeQjidzsiybJqjaDSq61FJwp7v+J7LlPOO4/g+0aJRQqjjeIqsyrKiR3XLsibmWMIYYEgJtW3bcRyGrAhCWZbYZADEKjS5s8j1kQghQfpPI2lOlkCmGAVIEP6BaCh0xwTOqLjt+b0WZn5410Th/RPKB/Mxut/1faEE9T0usCaEEBCU6ANenXU61JgC1ClCGwgmIMh28INjPH8ofwshK32DEILAlADuYlIIKMYBtcDqaqFw2SFCEGPEm8Kvri4GcROE4VUQwDTFVwBO/z7194XGKPgZHA/gXvMpsAv/LggiLnFlTwalH2gK6QBCfPqDgJki0f0itF3K6X8c3hkAKAA+4d75zx18BgEgARwKeosdG0aIsH1kHjRbUJZzFv3E+ek5lXZBoT2DQswJTj97at2ECourfk+fjuDNCCEYMH2YD0SdMk68T7qEAeb2nl1LbMUCHg/yu4mHyZ7rigIcwbeKbbIRHABQRVEikUgsFo1GowghQEEmnVxZWfF9V5FxVFOxBDCkw0F/bJoSxo5tAwAiWpQJFHzfG/QHc8XZVCpVrVYePfoCI+x4nuuS3Eyx025Zlr2+ttHv9/b29gzDuPHmjdW1taPDQ103KEXzSyu1atWIxzc3N8vlkuM6nU57OBwl4saLra3NzTP7+4eyGk2nMs1W+40rV4+PTo6PK0dHx29cuTIaTVQ18sknHyMsT8zx0eH+L//y33327KnrOrY90WIx13VOTioL5bIsKRtr67Zt/cLXv3bvzifnz50djYbb2zu7L19mZ2ZNa3xmY+Xzz26f2TyTSCQQgnu7u5cuX37y5HExX0yn0o8f31NVzfOINbE9x9lYXx1Pxpl08tGjR/t7hzMzM9VadW527umzl4osG/HEeDwZDEeEAElWs9nC863t+09eLpUy9cpJOpOC1F9Z21wol1/uHymKtrq68r0ffL8wW3rjysW//fH7Y2tYnC1CQi9dvPT++x/4vtPrtt986+a9B0/29g9n54pnz55dXlp+//2f7O7sleZKlXo9k0m5nmfZE0nCiUTypFLL5jLj8fjo8CieSGy/fKkqSjyRSCQSpVL58RePs9mcJEkI0XK51Ol0LNsa9Pqu40BRa23/AAAgAElEQVRAjbhOCZmfL2uahjGACBJKx+YwlU55nusTH0uS6zoz+YLrOrbjEUJ8gLRoNKbr0WgsEolYlm1NJpZlOb5HCAECa9n17vtegBLhqxEiiMVs91ceEsZMbYdCTiS/NSFLx3HYFfQyu6eooLq5FiZgfnzf450bPCHz9xzqe8T3WBzAdjlo2UJF03/KNT/UFdS/H5reKswSDBJsHCxYjhtzd54JIrm3ztK63A4ADMUkBNHsCEAKmaAUiU4KCGGMAUstIGZUIEJIEu4sXl9d5OhDKQp3+JlC7ZRNY68GqCRsLeS8kUCxgGYKgHRqtV6xeKHfIVd/cq4K8igvjJKnKCYAgkI5AEK7x3aVdfUmhO+VSOoD1q6Zrxar+wWAt4OmgGXnYVC3wMATsiFBmEUpECGhxhFZ2SAy5aeJJ5OhCF+oEHwBvm2AuOhICHzZVck9ERBsn/KhA2Hzxsg5CkXWPjA8ODCZYsFFfyFeHyn2BFJKiedDVjWDAAU0aExNRDk04hOUsKIoqqpqmqaqiiQhSZJlWVZkCQBgWRYhRI2osVjMsSfj0bBWPW7Wa71ux3fd4WhomaNkMun7flSLpFOpVCpl2zaLmYaDAYAgEtFardajBw8r9dbGxsblS5cIAds72xhLR4dHqqoeHh56np9IxNvdXqfblSX14oVzX3zxqNvt5mdysWjUsqy52UI2m6lWK+bYwZI0N1vK5nK3Pru9sLh04fz53d3dXr/vub5h6LZlmaPxZDIpFAvHxxVVVS9durT98kU8HpdludPtYQRjWrTRaOnRKKRgeans+645Ho5GJqXo6cODWFzvdhvZbPbg8PDs2bOj0Wg07DuOVZ4r3b19650vvXtwsJdMpkejUdyItzvdcrnUajWLhUIhnxuZo4iq+YTsbG8jSdnf34tqerfXKxQKjuMuLC62Wh1JUv/3f/cnZ1eWizOJ0aB37vy5pcUFVVV3tnclWfn61772/R98f3d3+7d/+7ce3H/YbDTX19fufv7Tv/N3/t6jR48J8Y8O9xaXl1Ul8n//wb8999rrFy6eo5QOBsNHD+6vrK5ShHTDmJ0tSpIsS7g0N+e6rqqqqWSaEvDs6dNut2vE47OFQkSNxHT9YG8vnUpVas12uxnV1Gaz8eDeJxJW19fXqU9cx1lcXCSEPHv2fGL25+ZKnu/qenR+YbFUmmNSslQqqShqJpOJatFkMukT0B8MjHjSMBIYYUlSbNsa9PvjicW1NxAyYTyl1HVd5l7zsFiUK7EcaRD9IsTYWFYOyjw7Np0QIcTb/3ISBECK+WBEnsIDp3KHIRIAEMKQnas2qVB4Ep85/oQItScNAgTh4Hses2HU94FPQfA8ESWcgVMHIT8USZIEmEDeCAdijNmwYl7kHMpxC/IHAiFnRBACnrxCrBcAb48nqBgaygFPjxqvriwEyx5+BGN/Bf6fyv0Gm+F/IihAGr76HmEDeDaWIy4AECLMM8Y8UEDTGI35qj9rAMBpLAy69HBgDNY1ZMOgKKgTSIlhKPI49eDnRRz11HNAiA9YYDtGAikBj9cAQBgHzAwAwhLwnq1hz12ERSHuKxwGAl4pBuB0RSlEAIqUEV9OUQvAswuCDWM1EABwCRahAEDquZ7neTz6mBZV81QwBYB3gGAXOSv1QEiWFTY3hhCCMZYVrEUisiwriqKqiiTJGENFUefm5ubLpYiiIAja7Wa7VRv0e57n2LZlTyaWZfX7fV03YrFYr9eTJTka0UxzJMtyJBKxLTuXy/oehQAqkSgLruPx+GRslsol3yf9Xk/TtF6/G9U023ZGk0m/3y8UCqVS6d7du7lcDgBg25brOmyy4/MX2+bIPKnUKSXr6xsffvzRyupqtV5fWlrJpFPF2dnnWy+63bY9sTKZjOW4pbmCpkZqzaZpmpSQerM5HPT1aGQyHudnZobDfqmU12PRZDIei+kY48/uPC3NZbef7RTmihFZiqhqNBI5PNxPp9KZTNpxvUhERQiao2FEiUiScri/O7EsSZKyuXQime73h8vLS48ePa5Xj8+cvZxKpff395uNZiKZGgyGsqK8ceXKn33vz7sHh2fOnjk+OgLeYPPMuW6vP5lYiysrN27cuHX7XrVysrC8urS0cPfO3X6vn0qn58pLM7nM559/fu7s5t27t/OFWdt2D08OsjMFLaL6Hvnxj/4mlUolU8lsJlMsFAghrmvfuHHDsiZ37z9YWV23bedv/uYnEkbsXHe6XYSwhFGv263VGpKs5HJZhGCv10tnC+fPnTNHph6LFov58cisVk+uXHntwoULpdLcysrKmTNner3uweHhaDiECNq2HY/HKaUjczwcjz3fz80UMpmsLMtYkmVJ8nzPdT2MMJs75DiOy9QHvj/FEMoqiSiDOIQQRmyMEQIQYQkBSLm/L/xDBpQCXyhXAAFIQ6JQCqZzzmnoIVK+foDolCk6fda/wSPE5a+KdwU5YmYPRNUXcHxCYZBeDkj16TcGkMxqLflcY4iwwHpR54ACTIeQczgM/bnTDKdEPIaYZUMwRgiBYGIyzwlwzoDzFnhtZZEjnyCoA289QFmBpmLh6NTxBGCqTAwQKwyq7IVALBTeePgX9AqTEqxKiB5BFAFehRV4w4LHYHVnIehHodbQMIgDEaS8raDIOv8MbUWISBQJ9p+/gbKIB1BAg72cVusJDoa9l8dAEAJWIQEFrQPDe86c8dM2KGRop28GAEAeFE9XFQJ2WwhzxSdGMKPIIg+f+FQUb1NKqU9klglATHLA++bxC5fQYE4FQmyajIQQkmSkKLIsy6qisp6gEADf9x3HYf4jxogQX5KwLCEtohDPocS3J5aEcTabTafTLFXguu5oZEIAlpaWFEXBGKdSaYRwIpEsFgrJTG44GiWTSVVR7t5/dP/evRs3rmFJJb7XbrUrlYoRT6ZS6cuXLpwcHWEJr66tPnvy2CPk0sULiqJSSrq9XiqV3nqx1WjWEZZqjcalS5fu3r1z9ty5p08ez8/PY4SWl5bq9UahWMzn848e3v67v/KfbG9vz83O/fTDD1ZW15rN1snxkYKhKiuuY0syLs/PFQozhmHs7e+5rk+ATwDqtZtYkYv5mcFwAACtN+qZTEbXY449qTcak4kpYTmXzY1GJoT0wqWLmqYghPv9QaPeePOtm48fPz53/tLAtP/8B9956+a7W1vb8/NlAOjrr11+/4MPt1++PHP27MNHj4tZZXV9Q1EUBLEa0aIxY2hO/ugP/93y8urVq9devng5NseqKo2Gg4WFhT/5kz+NG7pj2y929gv5/MuXL4qzZdMcnT17rtlql+YK4/GkVqt94xvvQYTiRgxjvLu3Vzk6vHbjZnF27j9+73uyIhm6oesGoWB5efmNK68DAFKpVDqVQkiemytiJGUyqXdu3ozHjVJpTotEKCWKJK2vr3muu7+/12q1Go360ydPBsMRwhhBoEW0VDqlKDIhJDOT03UjmUrLsjoYDse2ragqBLDfG3S7vV6/7/oea1nIrkZ28VNKMcIQQuLToJaVtXfn9wgFCCM2MldUPXJqFFKAcHDrs4wZCKqUxF1IAhaYkaMATPkfwhsj+qzolxDWzMejvPjLI2Lyb9C5iOUJfIoJoX4gCApYbgogQEx4EuYMEMQA8FpLQWVxwGYiHyy6sWChCOW4L94OphAq3oklxItAGR885Yr5VwOIAORJYAbjpwqAT5dCsAPgWVN4CjGhGLZORd1vGN+57xwmhabsUkgvyvdhmvhltvC0nUDhj1Mx+wCKEuUgCEBiJ5ijzCgi4fgzfIQISSEwhZSyHneYEh8hxHhCdtFAYV2xUPIAnmVCPJRBCIKpGDRgwIKj46RfKCMNA6o/9OCLF6Q9qLBwU7kbFUkEKC5f7tkHFglACCBFGGEIAcIYIwmhSCQSUZVIJKJpmizLkiwh3loDUkjF7HreT5pSxhXyxp+SjCVJikajEVXlIiJWNQMgIdRxXABoPK47rtWoVWvVY0o817FzmWwqlWq1Wr5PdF1vtdsxXc/lsmpENUcjXTcwlhzPAQDVa3VJUWRFLRZnXce5f/9BRJVHo5Hv04sXL6xvbFqWPRwOdndexnT9nbdvJpIJ3/e7nXaxWHx4/65PaTKRSCQSR0dHEKNIRH3t9dertRqE6PXXX0skU59+/PFobO7svJyZmZEVxRybRtyYyecVVZOwVK1W84XZTqc3NCfr6xu1aq1VP56dm/N9zzBiiionU4lMJvv8ybNMNidhaWJ7g14HYyQrKpYlLaLOl+erter+/oEkK8+fPb10+bW9nR1FiUysSTqVPHvuzGQyTiYy9Wbj8muvTyaWYzuWbf3rf/2Hs3OzS8vLjutAhC5eOHv33sNarfJbv/n3P/z4s2aldnZjPp5MYyxZtr+4tLy0tPrP/tn/8NY7X7569WqlWrUnE0Lp3s72199776cf/HR763Z5YTk3k/mH//k//OiTz9bW1n0Kms3a2upaqTR3fHLy2afvL61uZjLpg/39RrNx7+4dazyeKxavvfn2X/3o/Ua1lkwkWO3IhQsX5svlwWCwvLz8+PETVVUBRJ1OZzQa+Z6XyWQQgltbW2NzBCFcW1/r9TrMy7GsyWAwAADmCzOGrhMC4vEE8X1KwGA4tCw7k8lSCiBCSJJisZiqRgihzWazUjlWZAVISMKSqqoYS+xyROHGLUFFP+dcAQAgqPWFEEGAMNO4cEIfBO9nOWOhnmBja6daRHZ78duTBp46ZSwQoL5P/MDP933i+x4IEN9nHX15qEC5zod4FLLcMRBqSwghZV2AQuMOhY+LIIQSx2vhonOuHyEEWHUPb8YFKeY5YQQgX5TTqWF2zKIgTuKtG1n/JCyJeQmAT9PEayIJDITWE3A+mi9QmHaHp4c1hpEr+OwU28PPwFOvslMbvCcwFVzTOj2WU8M5OQ0ibEDQMhvRAEspBQRScQxBmTQVU8ZY9nZqdALbAwEVbeMw7ykUXCXTHRL023SgdLDL+PQlFZzjsD4qtJjiKoYBNSWOl+8ShYAiCMGpnAe7uFnWAoq0FQtVxDLzLQIIAIYyllSmB5IlCWNFVhRZVhQFS1IQclFAKe9dRTzPZ7YKYyzJEmOBVFVhzI8iKxhjEW5Q13UZTadpaiqVRBgYsaiqSLZteb5HfQIxzOVmev2BJKvF2aJl28PBoNVqup7X7fcgQpTCsTmu1Ot7e3s7u9vVamVhYeHMmTPtdjuVSmFJfvrk8cLCQkzXnzx9lkpn7t29ZdmeoUf7/Z4ei62srCwtrxwcHMqqMp5Ym2fO7B8eLi4uqpEoQnhpZbneaEiyfPHiRU3T5ufnq9XqjbfePjquJtNpRVGODg+Gw+Hnt2+n0hlV1T7++JNCcU5TIyldr9Yqeix69uymOR6qESWTyd25fXdtbaPb6z169IU1Mo1kEkLaarYwRoQS3dB39vbnSuVLr73WbHc++egjPaYrETlfyC0uLjy4/2C2PH+wf9zvD13PH42tf/kH/16WwfmLZ5LJhGEYV954fX6+tPX88ebmpuvYf/ZH37v5zrWhOSzksw8fPFxdP3fuwqW//PHfPLj18Gu/+I2VleW93Z24kfjww/d/87e//eTx84P9o9/8rd+5fPnCxYsXtrZezBbniA/+4gd//PY7X87lcts7u/t7Oyurm4oq1+o1AGCr2bx29aosy/Pz8x/89ONPPnw/kUj6np9MpkqluVarNRyOCsXZf/Uv/0Umm+sPBrbrlkolVVXTmezR0cnt27eXlpYKxWI2m6k36oPhwLZNSZJ63R4AoFDIm+MxBXRmJgMA9YmvxaK5mYKuG91O1xybAKFYTFcVNabrGMtaTCcUDEYjx3Vs2x6NRq7rBlc7ISToZC48FsBdYcjRAKGpdk5oKyhCvJnmVIZH+VRBrvYUpLdIGrL/EcInvFBCPFbxG+RyWRUB8X0KiOdTn1CP8hwAIYAXz7NqA9FVhZH+CCE+CiaEZmjalI2jiYQxQhAjLCGEp5p9jj4YQSxJLK3L8UjCUyCCbF0QBICV+yMIJQlDDEWFQbA1hCGSRMtUvL62xNkoECwHnUIJYFgkMB4ASmlYwRKGORh+PtC7sBN5Op5AYg7BKwZgms7mCzRNKohNT93nqVYGBL4/fwaKLH/wWcSQDgAx1PaVL+ebRDzBKwKI6c7xHwBCOJ0YAEVAAKGgvvgvoWWgkNdMUcqlX6yrBKMA2Z+hIwGAz0wGgI1SCCbFTW3AdIXDqxcyJNyaykwLinmFCasfYbldAFndBqRT/8UnPptujCRWkMM7PyDMZr/IMhso5rveeDwejUaO47iup0UjgJJOu1WrVsbjAcaYUuJ5nqyqtu0iJAGIIhHVcZxCsZgv5CNqpFQqaVpUi0YlWbFsByG8tLS4sLAAAFBV1TTHEKJ0OiNh6d7d+ysrK6qiGLq+trauKqqmqghLJ8fHWJLPbm5GItrDB/cRkqq1+vz8QqVa1w1jOBp5vp9MpEYj8/0PPshlc7ncDKW0UmueO39hZ2dnfX290er2ut3Z2fKP//qvLr92tVJrUgAlrGBEVEUhgKxvrOYLedu2AECKGtlY3/zkow9ty51fWjZ0vdlqUkAczy3NL8QMw3XcJ0++WFxcfvLkiSwphHi5mUyhkB+NTSSrGEknlcajR0/mFxeeb73c3d6fK+lvvXUzXyzUG7WlhVIqYYzHph7VzNE4qiuGERsOWolkBiD55jtf2dk//F//4P/62nvv/vpv/OrdO3d8n9y5fedbv/RL2Vz2X/zBv/na17/eqFeTSR1C+ukntwDFf/HD7y0uryWT8Ww2e/f251euvJFIJPqDfq/XSSbSv/itb/V6vd2dnWQiUW+0l1fXu92urusIS7s72xcvXkwkkz/68Y8zM/loNIowvnz5Uj6fn5srWbb9h//nv14/ezGqabu7Ozt7e3v7u1E9PlcoEOJBCMfmxLIsRZWLxWIylQYAEEpbnY6mqIqsGMmE63q6rkdjsdxMXo1oiqpiLNfrjUazadsTRgyyBwiFy8ydRyEy57RHBgM/GvKIOLgT6XRIML8/p/q301w1Ryc2rj003YtMp7Ty2a0iE+z7lCmDKCCEeD7v70Yp8IMogsEpq/sNkcABDwOFv8u8/PCxBWwQl3FKQUdr8TbB/PClCIk52eFhjCAzGzD4gJiegzgxjddWFgV1w3WHgQF4BSYFnp7q6sNfAqfwXax+yHUNoRMSEsxXAEuYMo7+0xMcxB8C4oPzBURTCQgAj+NEO+uglzMMseQQwMAAQB5OCpiekvG8rxEIEF98HIm8Owj0qpB5w9OaDm6m+Cgavsts38TFzGkplggCU6ceTo8qfEmGigaClefmamoEwOkmERBCiDCSsSxjLGHMvHjWByLkDkBCqWhpS4hPfMLNM78LEQIAIQwlSVJkmc1gkWVZliWMMNODyrLkua4aUbSIKsm4Xq+OzWEsFvUp6fcGCONMJqdp0cGwVygWMcKu645MczyZAAgARGxO3srKKgVePBG3LEvTorpuPHv2/MmTJ612G0G4tfXC831CPM+1N8+sJ5KJXq974fzFTz/52PH9N996s7ywOJ6YiiJ7BG1sbPqEyoo26A+xpGZzM6Ohub97ENMNSZJHlhWNRUvlsud5Z85snlSq3U7XMFLNdve1S6/96R/++5tf/uruiyfJVBJLYG1tNZlMeJ5vWbahJ7LZ3GBk9nutS5evVGqV/YN9x3VT6eTMzEyhUDg6PslmcxubZ9hsUtexbWu8sr6yvbNz/vxFx/b39g5dj3T7g48//OnsfD6bNt5862Y2nS7NzUJAHddZXpqXJbTzYiuqaYDCRrNeKs8PhpNUeubPvvf940b7d3/nH6iq3Gq3qEdvXLvu+e4P//KHV9+4CSB69uzxwsIcxjAa0dPJ7NvvfAn4nhbBE8tZ39g0DL3RaJwcH733zV/6xje+du/uvU67HdWiccOIRvWTk+rm+kZ/MGg0m1/58lf7g+Gntz5LpzOXLl1MJBKyLCMECSHj8fjunXvnL7+RSqW0mL60vOI6ztrGxtWrbxzt7iqKokU1x7VlWTlz5ozruvVazbKtfKGoKKrneqyANRJRCQWKqkiKosjKeDxxXc923OOTCgUeoJBloQkhrsu7KQS3gujiIEQx7Hbkc40QxhJHJwSnhZKAQoqYs0+5lhQI8ae4AUM1xoz/IULsHwh7GPoH+QlCiMf/BKyfsy90n4S5/+SUKJEQyopkA9dW3MiI99gVETk+lQaQkBjBzVq2QBRYBYgQb3MNAICsfgwiCCmzEtzHhxBiPsspKAYWvAWrMIB4dWVBbIVzJjAAGA46Iasg0Cpgb3hqBTLZ+CsIxI0AYm2MGKUS7mUhLOE0z41Q2BZQesr2nIZpDohTEGTmPthnxvuLXWF5aIQxnUJ3UG0AmURMkP4UhQYVBNZramyRSNJzsuVnZEIABNVVwi0JZzIohEygc8oEgp95BHAfWkuu/2cvYRgoPpEwWNOtSZKkSDJGCGNJVVWZte6RRS+36XhIHpr4vu95hAIKIZJY2CBJCOH/n603f5IkOa/E/Ig7MiPvoyrrvvue7ukZYGZAACQIEgCXFClBlLTiSv/Hmuk/kEySadckk5FaiiJXpNkaV8TSlljwWFwDYKan77u7qrrOrLzPyIzT3fWDu0dmgVsGw1RXd2VGREZ8x3vvex83f1ZVVZEbwUgsHk5uFk9ojDFyUinDVMPA96budDoZDAae54cxURRV0zSE2GAw7HQ7vudVq1UAYRzHp6dnZ+d1y7JPTk6yOSeVstNO+uzs4rx+0e/3NFVbXlq2TDufy/tB+PbNK1VB49F4e2trMp0ACBYWFgkl5+dnGKvZbG55eeXV64PXr942Gy0/iADAg9FYwYpl2Yww3w/W19axpjnZLGVkYWHhyZPHuXz+7PTM84OLelNV1JN6p1Yp+N4kCLxarZrLZcIwVBQ8HrkYK3EUx4SUy5VnL19TyoIgdpx0p90M47hSrVq2tb62ls1mFUVtXlyMx0PP9zrdzmTqra2t7e8fmaZzfHp2enza6ozLlVzK1jY2tt9//06v18tkM8Nhf3Nz7cWLZ9OJiwDs9YdRPGk06l/52m88evLi9Kx+6+a13/7t75ydnbw7PMxmcvWzMwCQ5aRv3XjvT//4z26/f/PG9d1evxN48aA//vlPf3Z+fry8UiuWK5quHhweHL472NraLhaLf/iHf+Q46St7e7ZpRlE09X3GqOeH1XLl408+sm17f3+/XKm4rnuwf7C+uZNK2ePRQNeN/f1D07AVVeXg9ueff/b0+cPKQq3Zbo+67YnrhlFomqZlmePxuNvrVcqVcqUSRpGCsWmYw+FIU9VyuVIslQzTvGg0hsORomkQK/3BoNVuRaHHn8c4jrkDBEKYEMISFFR22MktLpTxUhrE4dBZdBXVm4AxALtcvALIl3YAADgrkIR79o92/BIe8BkDhDJCIkYoAYnzDzcHIoBRBgkV0P8M/+ERkv0nyn+MMVZULLcfilgvY7zMCiLaYIyEmSNCkBepCua73hMkCELAZXtcGMQggGoCq8/WLGLODHI4aDYJzOEJLjrkzgk8cSIGoNTbyy4DSKCDIQYAQMkOtvnQLM+cQQAx4piDrHDn1KsI8l6GTydDHkSx9GtLFJAASh9kLosEfMcaBAAB4ZssHNhmhTcAIPHRFIfEbw/BtQCIMQZ8ER13nkCQY4DynhMHKE5K5kPGdbiY75dkCGMmbiNAAaWUSqslfnuJ+mL+8oC5aeoZE3E57ic3DWXCZ0leP3FvIwQA5JolBkTrBKBUo2KMFFVRNUXVVUVVFEXh2VXBWFHV5MUYBZQCSrh4mfBPBmOsiXofa6oBhfkgAoBRSrgTRxSFjDFFxYCy0PP96ZQSoipIxTCOAl3F6ZTlmEYmZTopa2lltdVqdVothNBitVoulSlhmmFgRSkW84ViYTwal4pFQzfslL25tbm+uVWuLgxGwyiOYkZyuXyhUGrU67adPjp6V60ujEZj3/ffHZ8EQVivN168em0YFmXUsow4InzS2DKtjON889e/mco4n332Wa5Q2Nne+tE//IPrTi3dUCDqNpvT8dgdDHr9bkRRQGLHcsrF6sH+i1s3r8YkqlaXCFV026mtrVAENN1QFO3ls8e6qd5678oXX9wjQK0trdUWaxDGtVqllC/88B9+rGkpL/BPz/b9qZvNlHXdand7165fPTttNev9iTsyNJDPplZWVnd3dt6+fbu1uQkhc8fDyXjMGLz/+RemYV50hvnq1srK6l/9278M3cE//W9/3za1e5/9/EsffPin/88f/zf/9L+HSC3kiv/ue3+1sly9fefmvS++uHfvgWE4qYzj5LMbm5svXr2+aPUfPn4ydqemYfq+9+zxg5vXb3zl448H/WG/37ctezzxGq3Olat7F43m5/e+ePbkUbFUVjQtjsjv/d53Faz88Ec/1lTdHU+DKPY87/PPf7q+sZlK25ZlbK5v7u3s2qZt2QbGOJ8rqIoaR3HGTnGPcd/zpxPPMm0KgGmbi0s1O2VrmqbrRrlcYhQUCwUS026ny5EXwCClQFFUXdc5HMQ7V04IywIqEYNC3ubzahZCluBB8iESDQESuAUFkDHI93+L4JaMqCbxGrAY0JhRwhgFlK/3Eq6IvO6JCRHKICo3gAngiJPAFEgPCSaJBYmrw8QpAEGEoSLmvURFL7AtAf+LNMDZOA1jKf8X65kQhhgjgBiDjGExqgR4k5EUqYmoFEGIEZolAk4KQwg5CSwCD0uAFBntoIhc/GKhWabl0QcKtQ2YUZ7zlGaSrvnfzA4uUU9e8irCGEnrUjTbPwUhFAg5kkzEJUpA4jM8yAtT/uTdk5JB/DVIYBkxNjLLXFzWwvjE1tyRI3nQ/EcIc/JdpgQEIIKU0qTd4GsYIYAA8LjNk9fsAjHp5i8Syrw8KplOEGdNZxdQuCIlScWMBcwAACAASURBVBPJkocfG++tZBadqxd46Nc1zbIty7QQQrwJQJJS4YmfD9xTyjsAqChIUTVN0xQFa5rBrfm4ghYAxm14EUSKgn3Pm4zdKI6iMFBVrEDgBx6GwNA1w9Cn3nTY6029aas3WFleri3Wms3mydERwoppGFbKrlbK7mQSRkHGcY7eHbdanTCKfN+vXzSWlpYAgJ7vm6b5+Nnzo4M3OcdZWVkvVyr1iwbG6OTkbDr13PHUcZwwjB4+ekgJ6bTba+vrz549f/TFz9c2djrd3v/5L//Xveu3/tl/98/+6t/9dblcSqXS3tRTFHVxoXp+fl7MZy8uLsrlhRevX6TSmRePX64tLYZx8JVPPjg9O11f3wxjUKpU7bQdxREEiFEwGY/KlXK31/n7z/cXsulKdYExmsmkVBVVSpV/8xf/XzZferP/FuM4nc6oirm/f7y6vqyo6qPHL+6/elmwjLsf3FperuWy+cXFhdOTk3K5jBCkJD45OZlOvWy24E39i/70u9/97svX+49/dv8r3/jV9+/c/H//7E8++eQrn/3iF//kd74bE7q4uPju4N3XvvpJ2rH+5P/+VwDi3/jmtwzDvqjX8/lct98zTfPO3Y++/tWvZ7LZzfX1a1evfOMbvx6EwXg4bDVb7U7n1ds3E88vFgsPHz0iJP7N3/jGl778ZVXTGIO+7z99+vTzz36RzearlUqr1RqOhleuXvmDP/iDcrn8+PGT+kV9a32jXKo0G812txEEkZ1O2Zatq5pl257vtdvd0Wis6XpEaRiGuqHHcskXBSwIglQqhRBOOxkIQL8/mHqTKOKziHEURVEktPZJIQ5mz6Y0f0YIQKBg/kyy5FFNnlcggAAgqDfILR9ZolqUqA9Iin0mPH8od/snhPIBBSodIGJC+BZSQoj4XZaMDIjuYRb9+b4XOMNOAAAQI4wURVq5YQXNjzTzoI2xsHLAEsCB86wA/w4wjJCqKHyUjAdR8UZSlw64ElA6SkiUQJbaAOKtzdW54vPyJmIRSSWHCRhgsykBmZP530EoN0yBSyCXLIMvp4S5LA2SV0PiKLn0SZABkCsyBX/CsQ4IhROPYImZyAFJUkiMFubqagZEuATJ+yF+A0GppJoN1s4V42jeaI/jMPOCHyTzH4QyzHNoLqn3uYKUM75cgkah1J0lQV3U8/JaCPWpOAYg4R8gywcAZKubfBAcPIOJlgAL40D+6gnopuu6oqo4ITPEZRRiNQ5oQggx5vO+GsYKEIdPAWMYI01TMeaQGsYYRVEEIbRty3HS+XwmnbYQBnEUcpOMbDar6gZSFE03jo+PIYROKoUVtd/vMMZS6bTv+e1OhxBaKpTW19djQgvFQrfXPz09bTZbY3esadpk6hXzuaXa0pWr1wfD4fHJ8fb21nn9wvO8zY2t/mDgB5Gm6ePhKAoj2zIpY1d2947PmhijlZXlK9dujEejbq/3yScfe9OpqmsZJ/P8+fPa4iJgbNDvra6uXjRbrU6nVKk9ffFydXEhbStOxmhcNDY2t3XDhggBBAiJ4zB+9+5YV9VKpfT5Fw+LjjkejbL5UhQFpqmaunH//sN0Kj9yJ69fv2LM//rXvnFyVG+225s7W51O9+/+9u81hhQQ3L57Y3NzI5121tbWIISGYRBKSBw/fvT429/6zsVF8/jw3frutXyx9D/+z/9yaan4m7/xzZOTo48++ZgjBzdu3lQUbTgYXbuy1+60/vwv/vzqtevf/S/+88XFxfOz8w8++NA0jFarsb66ViiV79374uTkpFjIURr/9NOfuePx6ura1Pcums3BoL+0vKooqm2ZN65f6/Z6P/nxj5uNZrfbcRzHcZyv/9o3VldXBoMBpfTk5DQmEVa0Bw8fH787WFxc6Pf69+/fazQaWFOvXbuWzWZHo3Gn2zuvnwMIVV3XTN1KpRVdZ4zGUcxnR9rtNqU0k8nGMRmPxrl8njLWbne6vQ4UcmexRSvxWpMBXNg2XmIM+YwLFFIMGbUkfIAQlGo5MKvwIJjRy4yQWTKgVNh2JoB+LGt7BhglFABACaUUzCUnIfiRQnRJ/CaC77lYBwCQhi1I4jZYwjsyH0jRJ0eD5B8AxjOkXD71ACsYIYWDK2JHAhTsIBJADsQYYYVH/0uGxjxU8kEwAORABLwMVjDGLzcGfD+wwAFm11rGSREdk9VfQGYC/iIIo/k3TiJw8sPkrDDGAImBLzjrQ2YBXX4HZcgXJwyS9gWKM03OIxFjwWSyTBhlQIwwmDvgBDKc5cT51CWKbIYAEGckc4u46RiFkM7nD4gkLCU6ktlqC3iZNeEIErf2lAYXs1k2/tHwY0joMIhYsuMaie8wHx+HECqKkPwoCsYIYwVDCDk+KO42hHmBNGO3COEPEMbC4x9CBKEs/BnldyrkOi4Kwij0fT8MgjgKLdssV4rptOlNJn7gkSiilBJKFYSDIOz0ugqE0+kEIWiahq4b7XZLN4xcLl9dXLBM6+zs/NXr17puhFEYRfHR8bGiqMViwUk7pWIBUlatVAaDftpxdnZ2XNf98pc+WlysKYr28UcfZ7LZwWCoqmqpmFlfWyMx2d/fJ1EYBEEYhMVSMWWnqpVKv98fu+6d9++WK5WYkMePHxUK+cbFxWg4PDk798PIdSeGrj179PS3f/vXr17Z6nTby8trqbQTUwoRTKdTR4dHlXKVUXZ8fLyxuako6g/vvTIg0DR9YaE0nbgY4mwm91d/9ddAUcrlUtrJGEa6Wll8+fp1q9u///xwpZo76k//q9/7Nd8Lb926hTEuFAr9fu/o6Mgw9fW1jcnE+/wX947evb1+9+O/+Zvv+6Pe7Tt3cxln6o0zTvrxk0ff+fa3VU1/8vQZYKzX7Xz/b/7mzvt3fuUrHz9//gJCSAl79OjJvfsPdnd3+oP+9773151WAwFqGvr9+/chALdu3Xz5+tVwNKSUvXf7NiHk7f7b5aWl8Xh8dnpy48bNjY31arWq6fri4mIUx5/+7OcAsnK5VC4Vr169GgQ+gGxhodJstepnZ6qq1Wo1RVcbrdb+wbtWq4WwUltaMu3U1PcIY1jTFpdqqqI2G43BYKAoimmaGKKxO4miOJ8vFHL56dQ7PTvt97uTyQQiFkVREPi88IdQOJlDuTNV3PjwUhkKoNh8xR8WltDACEmL3wQvEA9mEqMTsYYo3kURTxljMQUS5yFULveKpZ80N3WLKCWUyGJfhH46C6GXKl0e0RFCClaSzUoK5qo75TIZcNnnCPLoj6VMCCGIFQUl8UHUqSDB1IV93CXZ6EzHmGAcEG9vrsxCmLg6ctkYSFp+mc14kf6P4qMIaRzJF+vqAUJY4kOcVr3kL4Zmhj+8oxEfnkTp+cWbQRwcoklehycIed6Qz4pLGCsBjljCA89COePTE6IRkF2OyHdAqgNkjBbuGuLiibgNIObYz/wcL0Pc5Achnog4NTsHSs6zQJcOKUloaE4cxYCYOAOAyYsD5d0unf/5OYhtGJzUBQBy+A9hjBSMFQUrvOFUMEZI13Vd0zi/xNvfRNPG5++Z/FjlchnEWMQviKYquq4pqgoY4DqNOI4hRJquQQyj0G+1GpPRACtY19SYxFEUeZ4fx8QwzIyTtm2rVCx5nqfr+tLSkmEY3V7Ptm3f94MggBAfH5/EMXlzsN/vD8LANy2rXCr7E29/f7+6sLCyuhIE/mDQd8cjXdf7g36pVF5Zrr07OlKwVq1UAs+HiMZxvLW5WSwUXc8/OznpdjvtThcAhjG+c/t2ZXHxX//5vy5XFlZXV3VdP3j7VkHw7OzM9wOsqP3hEEFUMNTrN3c3N1dyucxgOHWy+V5/gFVs6Pr+2/3r125MRu5nn//87t27/+qP/vDa3nXIgO9NiuX8g/ufhSH58d//COmWbmoxidzRWNOMZrPd7vZ+8pP7JgYrK9Xf/a2vrixXJ2Nvc2trMpmmUqnx2NV0zR27lm1bhv3q7SFjagzQj/7jT3O53O7O1vHR4cJitVotf/DhBweH787O6yuraxgrrUajulD56lc/8bxpsVjy/bDRaEIIFxcWWq3meDwu5gthGLrusN/r7l3Z293Z7g8GhNJiqXT79u2Ts7Nmq/31r/3K8vISQjCVTo1Go0cPHw+Hg8D3f/yjHz598vjWrfcQApOJa1n2QrW6sblhp6yJ647H453tnRs3blYqlWKlUi5XUpadSqe2trbWNzZ83+/1elY6XSgWKaXe1FMVpVgs5nI5wzAooWEUIYRsKxUEwbt3RwcH+7qu5gt5vgMSY0xIjORzOkMUxDMln1mYzIIlcUJEOSCbgUSfyKsfCCG4VKaD+ejPGCNivSMll5cH8EzAAEsSAGEsZkz6u81aCon7/yeiP2dfhRGniPXJstVfdrab/wmWo7xoVvHJswac3cVANj1IwP0YYYCw6AwghpzHSzZAIYAwAMIMbi6gi31ZgrUQ11qoAyEQU1ZI/jMhCkISnAMy1yJB0XMamX8qQonKiVkJbvDPRnrqCJWkxGSA6PEAgAgxWfXPymFRsyedQSKyn8V0HjeBVBAhCCHmDC7ihQCH1BnkQi5xiyShVnyTlOpIwj5AjCIzSNEMepLZCyUUNBQLHeXxyBxCAacQAJC7HhNkiUnumqfAxGopISeAmOMGkGNiAELu/8ETmPyEcVJTKBjzSa5EBgpEHQQ4+i+rmpmUOFGnQcgQQoqKVUVRNb6sI1kbSTzPn0zHvj9RFawoiMbED6ZxGFqWFcexoqiMMU3Xs9mMaRgnJ8eWZa+vrwMAKguL6XQGIBhFEYJKvd7IZLJrq6vZfN6bTlOp1OLCwsVF3XWn/nTy+uXLs7OTkTtkjGqaqih44o47nSaEyPOmf/xH/0d/MHIc5+mTe0dHx81We3Fh0bIM13UZAJVyOZ/LhVHw6c8+dbK5b3zj13/0ox9nMpls1mk126fHR5qmnpycqpoehOHUnVy/vgdh6GTMTCZDY5B2cr1Bv1KtUBJjpKyurv7bv/y3Gxsb+/sHrg/KxWKv10+n03HsO5nsoNfXVFvR1N6459h2v9dZW1uLYqIb1mjQH0+C2lJld2tNwVA3LO6EoaqK500JiQkhuqZ1Ot2HD+6XSuV79x/kC/lKqTyZTDGCd+7cSqXsMAws287liwBABeHRcFSuFBlgDx48LJWKYUieP3/Z6XQfPnyYzTkIwZRl25bxne/81u/+7u9WypVHjx7lcrlsNlssFt8dnXT73bXlWq/Xi6MYQjhxxwihSrXSarY67fat9967du1qs9Fot1rjkQsxTKUs35s66XQcx2tra4sLC+PxOAyDXL4QhVGv16dxHEfxk2fP6vV6rbZUKpbG7vjs7MybemEQBEGgqipjTFGUdCqVzxdK5XKpVD4/Pz88OIhIyJcXRVHEH0MeWxMUQfBzSayHkN+lSAwMM5ioqaGMYDIocSEJYxRCLEUZIkYngVui+EAsbRRoFGWMxZz3JYRSyO2yBFFAxegAnzKbQ35+qcJLEA6Fh30FS7M2yQPM5J3y5wrGWI4HzPRBfEIYYYwwY5Q/5Akawr9HlF2mlLlWUIAE/GBkGIN4e2sVIUQZhSBZAwOS/YxzZX6C7LN5E0p+mgnIBiGAUERPPlONBMI+A1pkJQuZ8MsUWYx/qohfCgxFrASz9QMQyjFUkZBENocJAgOpCK+JdfjM9E22MhACwHjdABKQSt49CIBYYo68k6LJXJioRqCgxCGECFBGMEJUHIN0KhfBOsli4gslaRIyRpmw55xLNoDROYxork+DvKNiUGYjIG90/klKYktu+4L8bkAQQIOr9zXNMAxd08U+ACS27nAkM+JwDSFRHPNHju8d4vedYWh8gMDQtYRXIIREURzH0XTqU0biOIKAZpxUpVTKZBxGaK/XGQ4G/X5fVVXD0DPpVNpxHMc5PzubTKaEMT8Ip55n6EYhXxwOh3YqU6lUJ1Nvd2dvZ2fHcdKf/eJnMSFp28rlsteuX7t27Up1sVwsFnRD3dpa//DDD0rlomWZuVxu7+q1jY0NTdeiKFpeXt3e3nry+DGhrDcYXZwepjI5w9DjKLp5/bofBm/e7n/7298+OjryptNSuTQeDCglr16/JhQghN2xa6j4gw9uTieDxVpNVQ0nWxgMh6VScTqZpNPpfq8PCDs/PwmicHW19hf/5t+rCHreJJXSU7ZZKhYpgRfNjheONFUDLNra2sBIeXd4PBpPaOQv1iori5V7n32+vbe3vLTEd64NRsNisej7Xq1W8z1P141mo/Vm/9BKpQzdQIjt7mybllFdKJ+fn9eWlnXNXFld6/f6tmneeu/m/tu3mqb+7d/9/Z/90f8OoPb2zRvD0BCES7VFJ5POOmnd0P/F//I/PX7+fLG2vLS0XCwVwziijG5vb9umtbezrWnqxJ0MhsMnTx7v7e7duXPnvdu3+Wd38+aNycQNw+DGjeuWZRFKB4OBYZgxiZ88fty4uFAVPOgNdVUZDYbedOL7wdrKSm1xsdftPn3yxJ/6/sQPwwAhFMex53lcaKBghVEWh1G31+10uoREDDLf98MwgBDEccwfTL5ji2cCnjz4Tc5EqMBJgBIPAgAYIQYBozSpvnkmgImBIvjluJ88ofMyUGmOm7g+AEJoFEWEMgJYzNicWRwQL8XEyM+84k8EXIQVRZuL95jr9PkfOZGryOpM4QSAtIAQeC1HfkSBj/k0FZ9+YIDxv2aAYYlVo1nnxEQTIICk+b6E4a3tFVmWXkZLkig2l29FRhDSQylyl1U2BIBXu/JXwOznAgyRFb1Yd4AURZrY88CIEcYYgNlsN88ocKbFnOse5rT4QgYw1x/IpCA+8oSQkQMRCZqWnBgfgpD2chBgDPEsnoOZ+Ebuf5wpfEWLAqTahwHERK6U789LeCZTkQzuSSq9DKYBaVrLl0hKwFN8HniGRwlqCCOsYIQwE1cYASjwH+4EpWuaaZoaXxEjXd6AUL8xxhhfZs0fMHGHKaq0/jf4HICuahgpCCG+6i4Kie/7cRxDCEulYiGfd9JpBUJdU23bBJTGMdE0NQqDQj6XMs3xZOK6bhTHU883LMu0UoV86bxeD+PYcRxN0zHGuqZ1+33XnWYy6UKppGAFApDNZkgUTT03V8hZps7vukbjotG4OD4+fv78hecFhmncee/2Qm3RdSe+H964eWtpqWbqxsb2zsVFo9Vu2Zaxs721vrXNABiNhnt7uz/76adTd5JOWQpCx0enYUQgRHEcBoP6b/3Od0xDaTUvnEyxVKqqqu4HXqvVNHWTMfD4iy8WFhecTOr8tN64OMVYQQgEcfD++7cQRBjr9+/d8+Io9Ca1pZplGgCw128O3cFEVSCNvLu3by8trW7ubFWrVd8PIITD4XB1bYUyoqnq6cnpoD/wg+jps5eGZUEILdOYTsYffunu2flpsVjEqlYolru9fqPRDPzgwf0vGq323/7gB51ON+UUSEwAZU7GuXJl9+3+m3bjolgs1OsXtz/48JOPP+a0zf7BwcnJcaFQWFxYQBAeHx23261UKn1xcaEbZjrltDtd0zJzuez+/uHzZ882NrauXL0SRVG70zk+Pg7DECI4GvaXlpezGSedTiuK+vrVmygKU2lb09TBYHB0dBQEga4ZlFLbtuxUStc1hJBt2/y+zeXzhXy+WCppqjYcDI6PjiMaEUL4g8CFapSSOI75k4sx5qFZTL8KFRwvdLCMDBAiyCBkkC97mnXkPOLw526eoU2Cw3z9ThOLNyZ9IAgTqn/GJ2aE8DPpJJjU/oPLHGLy7nzgS7TjijB5VuQXnNsIK4p2iQBhIeOHl/92DgubIWDCsB7OhIJiGFhcLT5SMF/UA4h3dtYIpYlUZdZPCXVKoruSmA9ksp0Ry3YgEvUzxFCuvWWyHwMA8Z+LbzhcDREX8gDenjD+mkgIgTj3ACVWBcQ0t4R7xI+4QBNAIKtgyUDLb+Y/CDngliQMhBKnOXkHgJk8U8RYUVSIA8EIQyTsrHn0R0KIBnjzNIP7+UgCEBdQfEhI4DbiS5oCSTGPZGJkhkPyfAS9A2RahyjJpkIfhhJoCPEekR8hRgrCXP6vYKSqKkZY1VRFeI3wiyrOJyYxY4zMrEAVVVVN07Jtm/t2qaqmYoVXBVEcM0rjKA7DMAyDOArTqfR0Mhn0+wqCvW7H9yamqedzOUbjdDo1cd1mozkcjwihC4uLV65d39jaVhQVQGynHUJZrzf0veDJkyf7+wcnJ0fn52fdTjeKo4k76XU7GEHTNEvFguOkAQO6bvBxYgZgp925c+fuwmJtOBy/fbsfUVqpVKM4fvrseS5XiOLo9atXG+vruqY2mq3Tk3f1i4u9K1c6nU65WLZs68mjhySOeC3X6XUGw3E6ZduWYZjKxsZyuVzNZouKapxfNCBklm3alk2iGEOYL+QePXqwsLx8UW9t7e622t1rN7b39rZNyz45OmUaSjvOoNvf3FgvlXOZTNr3o353oGpIN9BXPv7IUNT13a1Bf5hKpXRdPz8/z+ezy8u1w4P9dquVyxX+9F/8X5WtNcMwEYT+dPIb3/qW644gYJqqnZ7XMVZVVf/5z35RqZT7vX67016qLZ2fnQVBXCyWcsVCLpfp9bvZbCabSQMEi8VCfzB88OAhY+D+gwdLy0ulUml7a/P43WEcRs1mgzHQbDbdyWSxumiYJsbo3bt3jx49yeayO9vbEFIGQKGQB4wtry7bttVut1qtluM4cRRhRVEQLleK2VxGURRu/joej33f5+4h7tjle76m0wkhxDRNwzAyGUdTNFVTX758NZ1OFVXr9tscauceU5wBZnJ9RRKvE/cb+ayJiEYpSZQjXE8BEaKAYSBwUYkl4ASrv/z4y8JflvxEdgNxRAkRxkAxJUSsBZPLsZNBYgCgdBmggPE0JYF+NeF4BbKKEU6+T0I7N3xASJiNzZMCMgckMR8AgOdMQBmgiqII+FcWwVCG8KS+FDrRmb4E4J0daQUhEkNSZctYxgevZgU4BIAPoDF+xfn8VVLdS62NeFcueeXgCWevOTSCMIaS58QIIazMdxn8LGQHI0p3WYij5N9BWXsjWafLD5Xx3CXvG25iChHGQO6Qk+GPywT4eDOCEFI5UZIgM1w/A2QukiA7orKun5fPClYZCcGUlN4CBuh8cSDRsMSXAjBGL5HDyVHKTxxCCAGDSIqCoAD1ZC6cGx+RhYbKZ2pU4QPBISCMEAcxGRNjLnwJHwCAMgoA4NFf1w3LtizL1HVDVVVudyeBURLHse+HQRAAADDG04kbBgGJQtPQM44DAfW96Wgw0nQFQhjHEWQQYhTHxLRTiqr6QajpRrvdjWKSLxQJZWenpxsbG4u1WiaTKZVKlmVfu3rl6pW9fC5rGEY+mzNNMwjCdrtz//4X9XoDUOB5kZMpuO7k+Oj05o2bV65cdTKZ/nCYzxeWl5c1TS+VSs1W+2c/+YdSpXr9yt7q6lomm5lOJ9euXX939G51ZaW2uHB6fHR2dhZFYa830HSdUnbrxrVszso4VhzHhWLVTmXqFw1OfqVT6fFonLbN07PTTNZpNVu94ehnP7mfzRm3bl19/uzR5sbm+Wm9VK6e1lsQ6E7aSqeUVrMxdYPmRbNYSGvIzTi5u+/fHfr+cDRaWlkFCKZTViaTHvS7NIowgj//9NNirVpvtqZ+AAFYX18vlwuj0SCdTseEbG3vAoDO680P7n5oGlqlXC4VS2N3lEo5V/eu/MpXvrKztZlOWXs7mxurq6l0KpvL+kFYLJUURYkpXV1dzeYLtcXq4f7B4eG+53n5XD6fz/d6vVKpAgEau647mYzHoziOoyC4eevm7s722urqRaMRBIEfBOPxuFQqLa+spFMpRunZ2Vm329Y0VTd0yuhgMJClOg7CkMREN0yEkK6rhmFgjMMw5Dkgm8uahtnpdA8O9kejEUmM1QglhERRyIeBebksHxtwqa2XTyJKKD0RmvjjihgDyVArhAhwk85EsgkZk+JGBgDlWP/lZfGEUEZZRPna9+RvE9moeCmaCE0BTIpIwB9EDvrL8l9VVV6RK6oYzMTSrE1QA1DsiIcYYoQvdQZS4Z3kBR6LkpIRyg21vFCUxTFCGGKRWWb0KA9/Cq/4KF8CJWeL5j0YAJTAzozX5rUzz6ScfUkGC+bxjZlgFM0DTzIkA0HP8gzFwy6CXKqIkMDmIECCk5C/BRFgjA/+McYnkZnMRnJjAEScDWYw2XOJGAMIwhnEjgCj8LLdJuNIDpSYPkJ8g4pw7OH5VNb4AlekAEijCrnMCEEEmLQuFaeLIKJyFIAxBgCmhPAj5DpRHu4TRJKPFyZXj1IEFQYoF0QD+TooAb7mrFLFrcfhREVRVJ4BVIWLOBWEeQLgmjY+ncPfSFEUACGWBiX80lEC4pgCwAClfEVfHMUc3kEo1e/3AWM0poHvZZxUJpMxdBUBPQq1Hu36ng8YIVFMGIMMl6uLlNLpdBoTQAAklA37vXuff2FaViFXwBhXKhU/nz87Ow2C4PDwHSGxbZnFQr5Zr3d6vTAmX/rSl4r5EmEgZaeiKM7m8pqmTyb+F/cfRjFZ21ozNP3R4yfVanUydhFW7t65nXXSr16/+oe/+7uvfO1rN9+7+f3/8H3Tsq9cvdq4uEhZ1pWrV0bDYafTKZZKUUT6g365VMjlbDtl21aKUEbi2DQtpLBqtepNPErZ8dGhk7aG4/Dd0eGbl69yRXN5dent2zdLS5V2p4kQ8v2g2xxvb66HQRjFbHd35yI1OHi1HwWTj7725Y315cCbZpaXcvm8aVmnJ8cxCQb9vqYrWIGeN9nd3Xn7el9VFaxAP/QAYG/evMnmUsvLt5dXVxjEjlO4/b7T7/V77bZqw9dv3rSandOz8+r71Uajee+Lz9dXFy/q8cJiZX1to1SpTD3/6Pjsn/zOfxZFcRgGjNEvPv+MkOhXf+0bKsKdTufVq9cnJyfpVEbTtFark8447733XtpxLNNKX3bEZgAAIABJREFUp+137476/YGmqWEUmSm7Wq3mcrnJZNLpdNLp9Pr6uqarlNKTkxNFUTTT6PV7MWRhHEEIDdNSNc0dj4LQM02TSwMYY7ZtQwDevn3r+34ulzs9PfUDP45icRMyxoEdzk7xIgMhRJlUq8NZOSxJAogVKHYaUkYYhZDIEhZjiGBMeD06F2EAYwQCQGU/8Uu4EIf+KQQkWeUovuY9aWaFJoTz2LWUY0oYaha4AUIYICwUjBjj5DeEgpD/ASc2k8lYGQMAiX1lwrYLEkYoZRgoDFAEMZPyIwYZRJCyGDLKIKJA6MsTBpe/IN7eXkuKUpQMbckSVQImCbgBJFovyl+Rk/FleRbg5S1/QcqlXWgGv8gCVyY4gepweQ5/DSHlmstX8qXFi8hchsUxAcBY0qgk/5IxqQISihootDqi2BYXXrA2PIbPWgT+JkgcDy89EJQNH0/wSAR/qcniE8bSkwhInlaMmwEIAJLe1HIRpSTQJUkAIYAMJg0AArL7AxJ2ggiLnTbJKjZBECC+CxupiqLrmq4quqZqqmYYM/pXZllG4piQmOdwAFhMKBLmU1iTX/y8KaFRFERhFEVhHEeU0DAIgyCIoyjwg8nEpZTYlrmyXEMIBN507A6jMMQYkSgGAMYknkynrU4HQBRGUSZXdLI5TTd109ja2VlZXTMtK2XZvuc/ePDg05/+yEmnS8UiAKxYyE/G45Pj44P9N5lsYaG61Gy23PE0ZacIpfl8YTR2x67rhX4QhuVqpd9ttTttTdWn04muaQ8ePmy1mxsbG3fv3PZDP449y0pdv3b96OBg7E6v7O4+fvR4MnGbjWaj0fCnQdpOp3Tl/TvXECLj0XBra1tRDUIBhLBSLXND/PF4vLJUaTTqlmkqWAWEZJxU1rFWVheL+aKq6Puv9/vDMVTUOAzX15edtG7bBiXo9Yun7394x8lYt27edDJZqmjeZIox9n0vn88jAE3TiIKgUiw/ffKk1+u642AymWQyGXc0djLppeXlGzeuQwhN3fT94PjdkYLg25fP/uSP/rcffO97MaJLy8uE0kK59OGXPswW8kFMa7WFw6PDFy9fvnjxstlqv3j5cjgctzttJ+2sra1GhLx89aJev/DDKIpIsVjgWpTNzc3FhYVyuaQq+N69z3/x8583W81MxrFtu7a0ZJomoTSO41arPZ16lLGp502nk/Pz85iQXr/X6/VLxWK1uoAQPj45OTk9icIok83kcjlN16M4DqNQVbUwCMIoRgi1O92YEAAgwqKHVVWVsUuSaABm47tJ9EfzcnQm1UHSqgAAMUwDEWSQ8cUvDDJesvMSnYpSDJJL+L6o8rnSk5vAya1f3PUBEMqkLQ6QdpTCyEYkBCjAH0VRVMz1/kjl4k8AFXVe8p8oQhWMMB8L5gM8UAj/pZUnRPLEUaJfR0gAXBjKSljGHQiwTCmz8lTIcWRchQDinZ31RHnCAABI7FhPMq3AtRi3CYKUUYTly8z4SZiEapnLEim9UC3JLCtG2qjIJTzqMQgBd6gQZsuSlwYSJgIISMBb8hxSqyPeHMqcISMvb0cYZLx/F60S3xYtrgxifMIV8LOjEDHIMEYYcDMizlIguXwA8asoSAKIsXQTQRAyACm36YeAMSyWEkAEAaRA0icMUSaJYsj5Kpnlk4pGXilxAcWJQMAbBZFLMONUBxLpkBNjIJlrUBBUFKwr2DA0XVd0TVeQomIVIYXf7TGJ4ziSDQejVJwXkgSArmmapvHhQsBiyo0OGaWUxCSKCYnDmJDYm0xiQnRTh5ANR4NW82I0GroTdzQcRmEUxbGq6YQwRdMphLbtWHZ6YaEWRXQ0GhuG1en1RuPRyfm52+9WKpWl2mJ1oWoZhu9Pmo2GN53k81ndMGq1pbOzs0F/YBr6YDAYjsaGbgyGQzudyuYz2XxO19VMPlPI2CSOh8PR4eFhKp3++JNPMhln6k3evHkFWFSrLWpIsQxzY2PN0jVNVUzLrNfr/X5/2BsO223iBx98cG1vZyFlWwirVjrjeZ6mq5QEcRSNx66um5ZtDQctyzRbjSaLAhUQFk2uX9ltnV+sLq++enkwHA7Pzk8ZJO64ubRc21jdGA29bqdBAL2yt01ippt2sVSy7SyJY9swWs1WLpt1XTebdhSsvjs4ZJQeHbyulBa67Y6GsaophmV861vfjqI4m3H6rdbT+59nTPz9v/zT4/0nmq7dvHP7g7sfXL954+adOzt7OwQApOBCsRhSePXatdvvfzAcjfwg2NjcfO/2e/lcjgL25uCAAuoF4UWznc0VNV3PFwq7uzsZJ93ptAxNoXE8cd2UZTUu6rliIZfLZzIZ13UVTQ3DsFKp5HL51dW1Vqfz5Nmz9sWpk06rWHFHY3c4nkymwdSL4zhl2/lcznHSYRQ0u21KqaprqXS6trS8srKayWTDKD6tnw9H7sTzEO/phedynGwp4o+DIOkgRAgj/gTLJ4aHCBnqEpuymYwCQCZMcyADDItHVIIGFAAqYRzK8X3h+UYZAJTRmACeKLi8h7uliOFgoSaCUqcIIIKMMQwVVRFkG0RQUbCq8oAhNrSIgQCsYKwghBVF5XCRGLQS7m+KoqgYi4XkvwQJQ5jUtbz7RxAgyBBjSIpmZoSB+F/ihSf4bgAYw7u7sgMAQHoZQDQvRpS4m8TlZ26aSX6eOyYk046somdxDc7/kR+eXF3A4WzxG6IXkUUxgMIPD7L5twNQAu5Qth3zo868ahCeBwwAAWdBaYIEE90SBBCIJAEhL7gxB9Yl6TpbPMRDtXhb2UwBofmR6TA5MgCAwJxQsmKZifO9fNGAxMcAZHIOAPKSPyly5hggQVgjIJE/flwI8TVeWJEb3RHCSIEQYUXFioIUgeNzdJOR+V3VsrqQCUCV1qF89jsBQAkhcRwDALhvShAEDLAwCrOZbKlY3FrfWFtdmXqTOAy4zDcIfN/3h6NRGEYXF01V0y0rtbqyFhPaaDYVhAulopNOU0JOT06iKLIs2524+weHuq5BhI6OT7jcY2NjfTqdBiTa29spl4uD4ZABGESxYZjTaWAatqbqhUIxiOJisVwslfv98dTzd3Z2NU1fWloqFIvuZFq/OK9Uiidnx0HoZXPpXMExdGXiDhFi61vrrU5T1eDSYtm0bE0zTMtKpR3XnfpBRAmpVBf4/stG/aTd6hSLRXc8efP28Natm37gR5SWq1UFazGJo5g5mWwQxJqu1BYqYRSSOGSM5vPZvb1d09Bdd5IvlwFghmkEgZ/JOEHgZbPZo8O3Fxfnt25e77Tbxyf1TCajGbplm9/8zd9UVVwoFHK5HKWk2Wrff/BkeW2tUClv71xd29j0o/j8otHrdV+/elM/OykV8tvbm4VsGivaq1cvdd34+KNPtre2z8/r794dRmFkW9bUdSvl8gd3P9jb3csX8uPhsNtuNxoXuq6vra6VyxXLsrKZ7GJtcbFWW11ZsywTIfzm7ZvRaDwcDsMwwhhbprmzvZPPZRoXjdPTM8fJrKytLS+vlMsLYRgFgW/ZdhiGFALbsjKZTMZxisVixslgjF134rqTRqMRx8TzPCA5AN3QAUR8GiBRAcE59BXMpjvnw9QsNvyj8U+hOWGMIYCSe17c+Uy4+EtMf/b3fMXLvMEDx5d+SeXJEpMf+YWR8HWQhJwI5TOsX+g7FdkHyH+JZl9z4VTW/jIOCyTpcuyFcBayRAvE61oAk3kjyvF5xMMXYIwpSaMklTIMAEohgAwgCVAwSQDwI2BsfrJJ6HDk5ZCFuIhRLFm8w1FuJjiSmXv93KUE8lNGPKvOIDdBmULAx6wZhAwwxP8DJX4DABbGQZCKKyXrdY7OiyiciMPEEt25bocf/9waISBZdImaAU5LSE83CCCAVNglAdl+8CNmCIncI7aUMYAgpoxywhlIuJ8BykVYQJiE8ptYCMsScgEAQBkBDDCIOXMDiFSgyYaUMYKwgkRBxAgABNCY0ojENPCjKFYUjd8MnMslMgdAiBBW5h8D8aEI4A6pCPEiiV8WSiljfFNGTBgLw6jd6ZI4RDQ2Td0y7cj3Bv3eeDyEAJQr1eXVqu9HQRBMJl7jopnJ5KqLi6VicTKdTqder9VutFr1i3re84bjcaVc+viTjwkhcRSoqmpZpm7qSEG7V7bCKIriUDfNq9evaao+8cPRYDyeepSCVrNVqmbDMI4JtazU3Q8/GI/Gx6fnTto2LQthZKfSW1vr3V7TTumURgcHL2w7lUqlCgXnyeNGJlPa3N5QEGGUtdvtza1dBauU0CAIwogsrK8jAL0gcNLp0dD96OOPT45ODg4Pc/nsytrap59+OvW9N2/e1mqrlLKV5aUYAEajtG11Op2V5Zql4xdPf1GpFPv9Xtpx1tc3xu4wnUrV66eWZWEMIGL1i7NXr15ubaz3el2GoKoo+WLp4aPP37v7ZUrjpaWaYRhv3r45fvcum8l/8slHEEPC4rOzus8mMWOVaoUxuLtTTafs1y9feKO+omBsZUjMakvLk+n05ctXrVY7n8udnZxACFdXlke9vjf1nYzT73VzmYypqZmM8/zZ0x//5Pzg8MA0DYTQ3t7VVDr18MFDHnCdTLZcqXB+iPs2T6deLl9Y39wyLdtxMkEQHh+fAAiCMNQ0jQAGMKJhBDHmHECpWObsFGOg0+n4nj9xvSiMIhDxBEAo5ewuIVREG8HOyXjHaylIZ5wXf44EPQshhJQCPpAkwzdlBEIIKWLzTzuEkO8flk8vY1J3RClNNiZJ/B1QyqHvWV02n5lEsEZQmSNvpYpfOP/gJN7PIjmar6qTaDN/avIEOecIJF7FAETCcRJCCAFPZ4DLYoCQKIkVaAxAihBgFFLAGETcCYApTER/gbrIkE4B5AtnkygHGAAUAg4hJ2iTzADJWKxIywAlYV9C6BAihNjsZObZ11/O41KWAxiFjCHx4fJFoABSKrovjtkzKha8IcogBJQyIOWa4l9SdknahDg7DC5/CU6DC1hFuwG5cDNR9/PT5J+NAOtl9OVEMTcCZMmrAZ4JYPI9HxoEEAkVMWSAMp40hKgKABnUxRILfh9DPqwnP5KE7AGUAcqk+ADI2E5jyiBgkFAVMUoB5YGcP2CUN7pzgmiZBmbsFhVu5hByngpDBhiTLXkUhoxRwIVVUaxpOiVUmmQjO5tzUpZhaIahRXEYhD5joFIpd9odJ51JpVOB77cbDUVVKWX9TifnZDRNW1lZAYy9ffNGVZVKpdJo1Lu9Hsao2+sSEmMMNRWXyhWElEarMRyN3Ym3vXNlsVYLI+p5/sry6pt3r92J53t+fzAyLcu2UwuLC2nL6nRa/V7XMIxuf+B5UamY7/c64+HAXNJBGOkqzmVzTx7fy+Sq3/y1rxWLxYtWi8uiEEJxHNdqS/yyDHq9i3odKcrnn9+PoujoYP/ulz/p9HpPnj1bWq4tOumxO15fX7/34PFF43xleTllmtvbm4auqgq4+d6X9/df7OxslEp5iJihKZls6uTkOJWyCYlVVfWm7o0bN8ajoTsaRVGcShn9fufrv/rNieembSsKg+l0UioW281ms9XstlqrG2vZYiFfXUyn09ev31Sw8tlnn+3v78dh4E+nt27ciKNw9/qNwA9S6ZSqqHt7154/fzocDNvNpm1bZ6dngFGsus+fPUlZZh3jleXlarVy7dq1/mCwurZm2TafhfI8/9at20EQqKoShhHCeOp5YRAghKoLC5ks6bWbqqKUy1XP82NKu4O+7/sAANO0GKO2bRqajhA0VM22LMpYHMfD4Xg0Go3dca/b59c2jEJ+Q3ueRxnlqh4RpoDQPgi6lc88yhoyiSQS8RG1pwBPxWSvaMK5igFcNqz85XseIT6tBgCWxT+VBRZjDKA5f38KGJrxdULVg7gX52wMH89X7khIc2Zl/jy4kxwem5uClgET84TEjxABDABkiPKMR5MDZPx8oRxk5uGBUQi5PIVItxuMMd67uioRaiCENEjCaqKtSEAbJvApySPIc4bJnJeA4WGSD2FCYDIRgplEMxJumYmJAvF98ikCKMCfpDmQoE2SlxKegDc7DEjAZp715ZFu/nRkqyJLB17kineWvL2gp+cIY5b0XGIOj58kBHPSUlGwQwaFUUmyPxJAKDRGUBybnOxNGhHhAAFh8g2inBdBfKZDnII4VjkiAuXriN4m0YxSSgkDlFIsJt8w5tqwhHgHc58v4LOFs3FFTlspqoIw5pGdM8mUkCiK4jiGAMaEED50CZhtmdlsRtPVqTsejQZB4BuaqqiK67q9wfD09NR13WwmWy6X6hfnnuerqhKFYX84OHp3iBVsW2bGSQHGoig4PTmxTLNSLhUKedu2bMsGjBmGOZl69fpFu92LKVQ0DQAYhOHZ+ZmqqAiyykJ1obKws7OtKQqgbDoZu6OhrmmFfH51dSUKo15v0O8NVaSs1JZNTfMnEyedNjXDm3im4YxHbhyHpaJTrVZt26aE2Gln6nm2ZfV6/TdvXnvT6XTqrq6uDgZD3/dX1rd63fbx8bFp6Zvb27fee89xsk+fv/re3/5id2NxuVZbXKj0e+2lpcVsNgMB+/jjj46O3pVK5Vwu1x+52Uym1+93Ot1isRhG0fn5ec7JBJ7faDQuGs3RsF+ploLAK1XKtdpCNpcr5PPtTjsIwpXlpW9/5zu7V69UFhfz+XzKydbrF3/+F3/x4OHDVCqzvrG9urqxurYeA/jk1RusqheN5tnZ+Rf3v+j1+scnJxCClG09ffpkNBwyQBeqZV3Tdnd2sul0yrIt08rlcoNB/+T01PO8arVaLlcRgqqqlUqlbDbX63bTdmpndzebzXpTr9/tIgUbpt3qdEbjMSFM0/RSsVyplB0nzafkbNtM2anpdBp4QRiFfhCGQRRF8WQ8QQhzMyjKYh6LEcYAgDiOZm2ojG/y/p/tVEn+wVwYFXrxJArx/CHQchm45zvdpPEVX4z7fF5GfiRsLgpxqZ2A87C2PAyM8Dycc2kELOkBuEBUEWJPDjXzPSh8DkwQGwn7mEiEpCO9CHNzFTRv0PlhUMplhADK6DTTJCKAAKSQdwBsbmZaXBp6aW8uoCL8iTqYASi7Bl6Tck4FAgj5Jk8qgvIMB09AoqR2RvyjQAgBgWJw6ApSwBCCjIozpLy6F0fNkmgPoLAlYpTnf8ZbEFkqAIgAAwASblWUICQzfQ4U3R6/cJwu5h8nkMmJt5lY/ALiv4pm11F0AABjxMthPr6YYHPi/yHi1BBjhCEAASKEIIQYBdxeihuS8s6Nt6GUArmcjr8M5ieIEAYACXtaICoRyOQCSg4ZMXESlFKEMISACGk1gSBS5BQlh5w48QUhZBTIlmAGsDJhDY1lAoPsEiBKoyjmlXK+UCzkM6E/ZYBx41/XdaPQi6JQVfBCdXHkBXEUD4eDbq+dL2RUFQNADg/e1paWF6tlBOhk4hLb1NXMyvJSp9W6OD8zdDVlm+7Y7TRbBwf76xtbmmZalrOzfTWTyxYKZayqp6dnR8eno8HIMoxGo/7xr3yFUXZ2dkKiwPO81ZXlZuPib3/wg1KpWFtcWF9bu7Kzl3Myo0FvPBzm0o6l4sbpqa7rK4sLgNBuuz0Z9k6OjzHG06kXRIQAuLBQCfyw1ahnUumlpdp4PMYqjsLIcZxisZjNpr7//X9//cb1ra1N0zRfvT68/4sf3tzMfnDnveFwcHz0rlotZpz02dlpOmXrmk5i4jjOeb2um+lhf5hxMtlMFjDoudPlpVUQR54XHB4emYZpmnoQ+IahL1RKlXLx5OgwjOJmq724uDSZTD77/B5UFN0yut3e9//DD06O337rW7/3+//1r66urOuaenJ8/PrguFwpr23YjLFBezR1J0EcGrpx/cb1IAwmo+F3/8vfPzjY33/zqlouLi0v9zrtRrNRr1+Mh8Nqbcm0rFKpvLy81Gw2Dw4Os9lcKmVHURiGIcY4l8tNxmPf94vFomUYZxf1ZrM+dacT19M0bTQaKYoaBFOEseOkdF0HAARBkE6nIUCA0CgIx+5EVbQoin3ftywLQhYRwLgrCYlZEpFmxS+U6gkAJI8lkRMkeWIqeDFxAyMIMRN9M6KMQsIQxHOacgjAzHZCvB2ESW+cRED+eFGxxU+oSSSIIktVXgjL3JMkBa5nlQU/QhBDOFv2gjGGiRHOXNDgmY5jBrP0k4zFIQ5MESpOMzkMDACBcvkKAIiJgEgBgBgpAqihgECCIAYMKHAmSpEJVRbVXFQL+FsACAGe8ciiXUjQOcYAm7sgshVAMjrLWlvGTYnu8SsvYCxJfiK5oR5Q0fQhyFWeMgWL0Qso/x1kfGANCuYAMkgBgxAoEFFKAUJ8WY+gWEW8nYEmHIsRL8jnkBUIAcRQkOU8js8AHf5D3i0xCgFCkAIRtfnM1NwSGAChYGOYdKxmhCCcDJIBCgAEfEVdMngBAcAIXTIqgZB3tRKjZ4wxpiJERC0DAQCMUFE5AAYYRlABAk4DWA4/c3aE90eKAjncyS375sdMoBRZc5SSEvGeqqoSQoIgoJQrR9FkMgk810lbwyGhkR8GE4TY2HXjOEQQXlw0A0ryuXwqZbdaTV3XcvkcITSby56cHKuqFoRRFAaTidvpdJaXlt6+3Sc0yjmZYDL1p5Mw8K/s7SGoYoopAfXzxvHJOaFP293O1PMRQqViceq7N25d/fEPf/jpT3708Oc/ypYqdjqbyWRv3LixUCkN+v2/e/7MTqW//vWvViulajl3fnzo+xPAIk0FkAWIxTlH+dL7u8+fPw1C35u6hNDllZXAn7qD3uu3B6VyeaFaOnq3DyBkDLZajZ3d3dPT09PTE8/3NjY23Mm0ftGcuqNStdZuNoajwfXrVx4+uF8qFlQFu+PxwkLFcZxqtdpqteKYTCYhYDCdTq9vbPpBmE47zUYjn800m63AD7W0c3x8sLWzt7m5V8hnHj+8X6pUGYk319cIg6qqVSsLFICjk3fLi7X/4Z//8yhmmq6PJ97TZ88r5epwEpqZPNJtW6WGYaRsezgatVotJ5WKo8jEJgLgrF4vVcrLtfLJ8fHRyVG1VFpbWXactOf5AMLBcHR+fmZZ5vXrN3TdqNfruq6vrq7W63Xf9x4/ftzpdNLp1O3b7zmOk3FyzUY7iuLpdOK6LqGE16aTyRiw2HEcpqBsxrEsS1N1w7AuLhqtRqvRbGiajhCKojAmMQRQ/iJ/zCml8wuBE4n/TP4pS29GKYFiuhTPtwW8sJtFNgCo3BWeiN4B4pJ2EaaSCJakHwihtHcWgAwh0f/P15s1S5Jk52Hn+BJ75J5597pV1dVV3dUbGjPAAI1lMCRI0IQhTXqgUdILX0QzvfBRepIeZKYfQJnpQQQkI0WYTAtlIqEFBEgYtpnpmZ7GTHfXdNe+L3fNvLnGHu6uB3ePzGpASmurvvfmvZERHh5n+c53vgMAhDjwGpS0fmpswASbj5I2/Vqf3yAjSDWMYFPytfXf8D2I5LVMRRn9IgJEGfMpASlaYSKl1RsVAFFUaQeopG6SNmURRESgCiUivXHzkmGtkE2kwtrWDRqpJbOjlai0wj7W7YEpiBuZByTriFqLFwFpPCGCbgDWdVr9hTnGOr1rUqoGnLHkTg0CAiLRumya56vRGNMDBwBKWfk1SypFMN4MbP6kI1pUYCWTyJptas4B9fiZJqdB6y9NroBAgFJiWKdKEopA0bp9RNvIhuY8tZi+0HkUMdqrsoGzmpI/EiM51/C4SFM8oqZPhBjM0cQaZlwcIZpkxinjjBFELQ7tMIdRxplDKbUQ2GYKaXSntOKbUabSOwmRmCEzjDFaVXWapkVRSCl00VVKhQQZJa1WhCAIQVnXnHHf9yml/X6fcTfPi6IoCCGU0TTLESkS6jheXcsoikM/4MyZzeaMsiiKB/3BbDafXFycHJ8yypfL1fHLozCIGKV1XXPXzbK0KKvdnZ3Ll6+8e/Ndx3EeP3ry8unz/b2dg8M3wjByXPfJo3s/++wHy0T6YVxU8sXzF7dvf64QHU7D0Fsl87quEES7HSpZhr4z6Ma+z7r9Ya/brapye3vr4aNHfuAt58vd7a1XL57HUdhuRX/+ve/t7e1RRsbj8zzP4nZrf293PB7HrVaSZkqR8fgsioI3rh7u7+6slosgCB4/ery9s00IPT05Ozs/29nd08k6pTRL07qqGKUXk8n0Yux67ovnT8bjMyHFe+++Hfi+6zp1XY+Gw+Fo2O324rB1+cqVdqc9GY9FUV679galrCwKUQkh6n5v4Pt+HMeaviXrPE2Sne3t/YODbrfrue721pZSyvPcwPcXy5Xv0N2dre2tESXous7e7u5wOCjywvX9wWDY7XZ/9KMfPnz4aHt7S0kxnoxXq9XFxcRx+JUrl8MwKPL8/OxsOlto5pjneVtbI4JktVos5/O6qhzOPc/1Pdf3fCkVpbSuRZqmdV0TQqWUy+WyrmvdLaQbUCilutWgacpF1ECrqXqR11AXG6+iUc+xP9cmFja+1UamCdJ1fVg/AhYB/3oSDBYpgtdyCwIN7L8xj1ejP5SaoUxrmr9h+htq0F+d+rv+ycZ5Nq5Fu4W1S8MGrXmNjdTkTLDOXZRCaab2ak8BaPqfG/ScbXobQAtB24Pqw5gOAVOFVwaxseA1mNwEGpSgcWWmPKDbVcEOkzFK+qYtS1trYu6H0hplsIHQW1esiJk0gEgAhEU/CAAYdhdKY9qVUqikGc2CygpDrJtlzbUqBaikGR+h4TZlVx8USFt6ICby3wgukChAHTaDyYSUotTuEt1OjEC0VqgS2oHoGg6AIro0q/QZKGRGnX/NYFNSV0aagERt3m/QJFeq76VBZvShAAEUAYoAFBkoFCA03xiURDOwzIhVaCqQWRFipgFzzpvZ8QCABKUSQhhiRl1Xoq4IQSllXuRSKkp5VZWUOlIpRune4WGSLM/PTsZnC87ocrl69PgJdwNR10op13FwlX1YAAAgAElEQVSyvKqruigf10J6rh+32nUl/cBDhPPz8+lsOZtOXcdFhMVqsTPaGo26z+7c3hkOKIqL8ZlEZK7neN4bV69Wlei0u0dHp7dufTmdzRiIs9NsuVzMF/Pnz57keeG40Sc//OG/+Xc/JABxi7mB8+mPfnx47er1a9cO9rYOdkfTycV8PnM5DnotlGJr0AXmKVnFkS9EGYd+sphPJ2f5/jYl6vzsaNDvv3vz5uRi4jjMdZ1Llw4cl3PO3njjysOHj3e2tsfn03Y7BlnduX2HUfqrH/3y06dPnz192u0PtreGV9+49oOPv99udTqt1snJCSrx7OmT7Z3t1Wrlus5impRFPhj0HM63Rp39/f0wDJ89e37lyhtZkjjcSZZpf7h19OLlydnZnTt38iz72a1bZ+fnaZodnU2iKO73e4EfuEHY6/WGw9Fg2F2l2fGLl71+79Lly7IW95/eu3r5isPYo4cPzo5PfvLsQbsVpKs0DMMwDE7PxoeHl65cvUoIc/3AcZzDw8P5fP7FF18oJR3HuXbt2mg0iqJoOOyXRfXs2TOlpMP809PT8XjsMLaYz33P7bRbdVUmyaKuS8fhnucBQhRGjuPWtSKUgSJKqLqqtYKNVEIpVZalHhup+wAaZMbkvrje+Wjqw7q5njTGUQihQRU0asTrI1iLRDcNXlNr3LR3YO3++jesxdAvbex03kA2ChUmbnzdoJs3X8OmmhDPBAG4rkC+5nuUIYAQUAhEgFoXh5UQykIYOibXOqmEkKZRGRHMEDXLXAHVXCwQRApI33r3wGYraPBqQggx6m+m9khs4GktcMON2fDDr7FW9d82vapGKMLAY7qeu/ZyiFoaCJuLBzs1xZRuzB+CVYwwoTgSe0laWs4UNhVBQKWAKCWlRk0QAZUEc48AkQAoglSnIfZzlP3cBrQjBvhBNGLaQKwY51rZTYNRygw9JrrxxELtCCip6XJc68GZqYpIwOQB6/1hEiSrA2r7J0yA0SSVlGq5UmJmROhuEYMvUh2j6H4ARpjrOEx3JCIlhGq4CA2fSinTIWEUIBzH0T5AK0MgMQ+ETtdkXaNSpVUPlUoWRYkE02SZrFZ5npRF5nmuw1hR5MdHR8vFkhImJQn8iBKOSABZVYvz8wvXCbvdIVInzavJdP78+cuiFKsk7w9H5+OLT370setHQdT6d3/0R5R7s8nZ4we3Vsmq3Wnt7e/nRfXZ559/9dW9F69O/+/f/4N795/NL+Y7u9t3791/9vRplhW+H4y2dySQdncwGLTf/fkPi0IuFstS8qNX08++ePAXH9/69Ac/JNxZJRlFUpdlr9NttTsSgDMat6IkzXzPc12HMxoEwXKxqMuCEixr+ezZMwKYpulHv/bR82dPe73ueDL54Q8+fvfd9/74j/7knXdvRqHnOPz01ctut//ixbOqqkRVcYdHUWtre+d73/sLinB8fJSmSZImZVnoOtD9+7c5owf7257Her0epWQymfT7A0bJ9GImpUDAzz///NNPP73z1W2lIPACx3GlEJ1ON0+W2XK+mJx2222P02w+eXjn1t3b9zzOlZRKwnw6Df0gCoLnz55/9eXPCKhOu+M6xHFIqx2vVsv9/d0rlw85Y0KqoqikUicnJw8fPl6ult1OBxFHo9FwOKhFVZblYr548eL55cuHvV53PJ6enpw4joMAVVU9f/7s7OwMlGSUxlHsew4AMM467S6lbDK5SJOUGX00Aoh1XeV5lhd5VVZCCHjNQG+EgPbnTZDfaCiszYgGVjYE4+zTpH8ZGgnI/5/XpkEHAMA1QK8NBVhqjKb425E0zaNJGGX6XfNraBp0iOnUeW32y9cSl83TaE7GntFrjqvJ33HDYdgwUTMkFejWB2JGeylQstGT0a70P/gHv2aPb5BiRARpjTsgEhO0GmOnFKHrs9B8eM23JabUqdaal2BCctsvvbH0amPYm1LaKhnnZoj/ZgqxrrsioALjlnWFEyQgECUlWo9NgaICpZF03bUtJdjObhNfE1vUBQBAqVC9BrsDY1RXHfR2Qk3WIYYRBEobfgkACqXObwRKSokQysCXBARK7dh1VcBevpKNsiGa8mlzz2RT2zW8Ukmbuuv6MQDQXC4ApRSRVN8XqZQmTZuyjbQuTCGjjCAlhBMgnDsUHUKpzjNAYV1VmviASAhh3OGO4/i+r62/4zhSSqSEElBSaV8pq1rUdVLkWZbneZ7muZJESukw4nK2uz3otVvL5azIssV8ulwupBRSqjSXcRw5jjObzaezaVWLyWRGKOeON5vNk1VKHcpdZ3d3b3d7GxAePXo4vZhIIZJVsn9w4HrepVHUCyCI234UF7UQwB8+fnH79v2/fPDqg8uXBsPRcr5ClfYH/bIqgtCjhCJl0/nc9YLBcPuXP/qVLz776asXR3d/+ulqVRIA9FzCCRbZcDvaH7W/+f7b28P2Bx+8qzi5uJgFUSgkLpZJJeT0Ynr9xvXbt+9EYSilfPLi7PHjJ9evv/X2zbdWaep6zsOHT05OXo3PLn7uw1/4y09vXbm87TiYp1mv27t958vt7X0has7da9eurtL86pWrWZHPJidSyrjVGl9ccMfhnJVVQRHjKCzyNPT9LCukUvP5rNfpL2bzs7NzyoMXx+fn5xN0wtUqnZyeLc9mtQSfgwT46G/8cit0j4+OLiZn77//oR8EcRxPFuWv/MpHs8Xi+aujVrtb1qXjuD/3/vue6z158ujifMydmhAxHAzruqaUcuZUZX1+MQWgnucRzsIgcFzXdd3lajWbTR3Xmc9ncRz32t0oCquyLKuirOjZ6alUajKZPHz4cD6fhr4ft8IgCPI8BYBWuzUYjQI/crizXKZpkqdpniSrJEmAglSiqso0XRnlH2JGrq+RCERKqbJt88xaTM4JNCgoNVMucG2JdX2VaBqL1baim+b1r6bXhumP5ocKDPt70wIromxyTLkWbbDmXvN8XO4gIqV26CPjjHKm/0+5lnrThCC0M202M4Dm3F57gVJqPb9YCKFlqZuXkEJbSC1pqpSSQihRKyUFCilrqRucsRY27pNS0rfeuyrsIAOlFCM2EjeRLwBKpEAoatlnpEqB1LDHBrVImZyAKNNVS3XrrlIgFShEYbIKTZwHQ84xmD6ChAqJpAwV1oBCS09q0EofEFBSggiKIhBQCLqFW2gBBi1ZSlAoEAoUolRo2wcApdLfaTaqVEohJULLH1gc38D9CE2ygxQVgiIKKSJDoFCDUkQBA6AAFBRVQFERLRehh8VrDSZoZgYASsI0ji+lwYukAmlkJ/R1oSIoCShKkYGkKCgBQhEoBa0oS4yGODN0AUUBKALhklDTtUD0HB5dJ6Bas8i0CiAlSIDpWcG6noEgpTLgPQjTZICkQX4sCkQIAYqo0RtErKq6quuiKOuyqopSVHVdlLpfUjOaTs7PJtMLpeEnh9V1lReZECKI/DxbrZbzLE8B6SLNkkIu0nyZJELUlIKiXqVwlRdPXrz8+JMf+2GglOj14u1+1PbEO1e3pJr3djqM+VujK1euvp2U8mQ6Hy/L1Xx1Mh47UIY+OC3JqIoDN/acVuj6LjoeW2Wrv/X3/uO3Pvz13sF7dPS2d/jBh3/zt/beeRsDlmYrJJAsi7Nxcv/F+RJbo4P9bkT6w0MJrqTcC2lZJJw4AW+tZumzZ4+2trq3vnrMXRcpj+JOkuSO4zy499Xu1na73UJRMSKmZ+O97V3HcRkhRZqN+p0kWYyGvYvpZH9/5/j4KGpFs9m4Oxienp0Ph6PT05PY9wKHc4Ke60WtnhO0Vtnq2YuTrHDuPbm4/+TiD/7wR3/w6Ve3Hrx8fnLxbHzycjl1othv96LhbjDss9g9mWRHp0l70I16w6/uvxLodQe9KAz+5M9/cPfhiw+++QvT1fTevZ+BlD/8ix+BkPt7/cDPRFEz8JQk7VYnzconj5/mVc459wM+XUzOzl5NpuMnT17IqoqD4GI8UbUY9gbbw600y8qqTrIyycveoLe1s312PlmuklWaZ8tUSEDCNDWh3x8Erhs6Lkocj8eL+WK+nE9nF2VZIMq6KquyAClASKXDNSlQmdZU26IPDcL8eq5MEYnOg0HqJ1dZgqWGmaV+mrX110Z509x/LeoHHV3qwR5KGbRWNwYpZbwHAAJQA28DJcz8XSP2SQnqmRrYBPhoVCEoEgJUK8HhGil6DTux52MRMIVEl7IVIoI0dD0EaIaNm3CQaLOnoPkXEIHqnjGwlU87dcpo7NObHxwyRnS8yRgDXS5GiUTZuYObY9gAtFyfjjQVaHI+IaZtWIfqzSWhBTFMNcbMsd3sHLYXj6j7tRB0CwNgw3RtdC0M/RM3khON6pvbSXAN2JgcQpNE139nbztpqI2WPKxxFLTlVouLNREFmP2HdnaZau4ZMcpHNjckRnRQL47unLDovqmzELKWuzJ5hqkZACGEUarZvnacgx7vpXTJnVIjF44WpLJcAl0FNnsdCSFAbBsipbqiIJXuUlEKdJcI2m7JBujcZCsjmohDb0qppdK1HlxZ1nVd13VZCVMfdhilVIqq1YodzhglVVmKqlwul+PJhFE2Go0AYJVkWV7mVcUZJwTqsqQUayHLshB11YmjrVHf95yqSD2G1y4feA69cePG/uH+/qVLh4fXh4Ot7mAElHx5+y53wyQtZFZ0OhFjanu334nblFKlZBD5lRSdfi/J87//H/3Da9dvIOWffvLDf/7f/Yu/+/e+3W35+fLCISJyGWeYJqtaVA/uPnWd8p0b14BQP4xWyZIQ9F0/8KPVMp3PF5Uozk5PJvP8ypUrSqnPfvr5jRtvZ1nCOQcl9/f2zs/Gvh8ogozzBw8fvnntSqfTkUq+Oj6/8dabd+/e9/3w5dHJ06ePw9C/mM644+RZniZJGIacout5YdSi3J0vlo8fPXvw8Onde49+8ulP7t97Qrjqt7x2wC/S+t2bNy5fukHDNu/vtAYHfntw9ebPtzoHuwfXK6VWyXI02nrx6vH9xz++cvj2u+++N18mP/j+x91B/8b164w6kd+az2Z37nyxSqeMuL7vJ8lytpjXonY9jzJn/9KlvKg59ynjW6OdXrfvB8HPfvZlkRfD0WB3Z4e7znQ2m85mtagqUZ8cn2g88/j42HO9fn+QZ1lVlQhSKeU4PAz8KIwoY0KqNM2KstSi/1rsoa5LPVaoaXBFRpEQKQ0cpO1JA0JswiYaFdWtxdxhxELNfy26ApoXogHdBvu1rya+N5KZsqlu0saAGKOxPnKDKa3tuH5/w/oTS6R+jWLXtI/9tdZ/M0FRVnTGYABrSGCdvugUwV6AsqVZAN36i8o0jCoTkTcpDpOyQiQAUoPDJu4GZYewrAu8jcskWpdVV191T1TDtl3XalRTvCaEKBDGaJp6NWmwqs1boNn0mvYOIG3J3l5Pc4P0rTC5BMrmOGaZlJH/p7onAQmgFEp3oikzKFT/MSJaOT/r4fR56rKNoUxZ4R4g0LiGpikMABRZ01z1fbRVcqkAwbgKJqzHRpRrOpD21Wjtr10+YhGo9W5oAhYw/SzSAnnaV5mAH1CPVNP7gBCgjDBCKRKiCGMOIgVbHFOKaF0gfSH6OdQOgDHGuSl3bCbIehB809DOGAM0tI0wCkajXUYgWc2ropSiEnXlOW4cRX4QFVm2XC51ogAASsiiyl3utNtxka4oY4yqTrstZQV1Hbj+9fdueJxeubJ/5dJBv9cp6oK6js/jVVLKbBm1oqIsorC/t7N//uKl43n9QeRzzijUEuJWaz6f+lFYK1GrynGp65HHj++U6TkU937/f/29VsSJSgjWg0HbGXWjODg+Ogpj+LN/+8mH79z85i98WJRFFEUAggDOJvOyLr3AiWQnz5M337x2cnw6n8+vXX9r0O/cv3/SiuNktZqMxwcHB0fHJ5Tgw4cPCGNPnj8b9gbtbr+q7yxWaavdffDwUW8w6Hbbsq6nFxfR4dXZdBoEvuu4rU5ruVy22p3pYrVYJrfvPv/i89urpJYArZ7HHIZE1ln6B7//v3OvU9bkZ0+Pbh1NFhcrj9UgsmEv5Epd7+7d/fLHi/Nxv9dVkv/ZH/9fYffw7Xc/SPPi6NXpl7e+/O7f+e2jZ3d3dwa/+Evv37v/lxcXZ6cnR0VZSwVlreaL5OJi1h+NpIROtzca7YwnJ3HoIiHD0Y7nOadnEyS8FuXe3l6r3X706JHruo7jTCaT6cUsDIM8L6s8Hwx7WZZqHUHucEJoEEaMMQEwuZjN5wsNYki5VtvUAlMKVA1KCWFCNcv1RtjY/xtuANd0TgXKlDAt+grUxGHrh8jqHYPBxG2xdI33aNhaIaDGzRtLpY0NGBNjrKIRZaFmktX6Cd08z00387rp/mtem6iUAtmwyRus2HqyNZFdh61rR2GNCdiCuREnMOgwYmNmEBmhIGVNGUWlpKwpJbpzWH+KDVkRrTlHzVkkYKiOTa3ZvouNObVJh6bn6+/tXVR/xY8pnTpogAuRKKv93VwMEt1vpX2jBEQjia9xImXVWvWxpFSAyPRNQwChATSDrSEAGFaR/Q1EzWwFAUgVUdruE9QjtqQywbhho2pRDn2GlFKzK8CI50gpCEEkVAFIWetLRzOQToKSOm4AQP2bStrJMBvKrHr/WLemZ9rpO6F0uwNpqjFKgUQTuugygjI9HDpEooxwwihyTUYGRepaauuvb5NmXitFqqrSpWBd40VELagnrSKuEELWAtYTkUApUdVVEPiOw1FJKVS30xFlsVhMZVVXVQlSVKUpGjNKRVXnaYpKOpQWedrrjEKXlLXMs9qldbff9j0eh8H16286nCbpKsmyAd/qtLsKCUgyavWY40xXiZAyYv5wsLWze8A4T/OsJ4Egr+siL2lv0KeeUyMOtnb/4X/y3a8+n+ml6sSDz3788Xe+861W5KIEn4AoioP9ESXw8tWrLIX/6ff+xf7lS8Nhx3XdPFOE8SBuFWWJRN17eDuOWlEc3f7qq/t3f3b9xjvj8zO9ON1uhzGWZ8Wdr76MWlG3197a3kmSZL5aSEJ+6Zc/Ojk9VQqWSYKE5aG3O2gHnu+6ThzHnuf1+/26Lg8uHSLltSAvX5396HufFUC4w9qR4wXc8RzXd3Z23//Nv/Pv3bv7arkSq+n92O385m//xnz28l//b7/zPFl892//5nz8XOTnW922LL35XNB+P8nm3//+X1zM0yQvdnd3fvd3/seD3e3J+DQvpr1+qIQMgmA6WyxWq/k8ZdydzZfPnh8T5hPm9PuDMPJbEV/M5v1BbzgYVGV5dHR87dobVVlRzi5fOTw+PhZCjEajra2tzz+79fLl81Ycx3HMOS3ylFLa63biKCrLcjKdVpUMgmA4GqVJIrSQlKi0XKA0HHxlRDjQztpTmp3X4BDrGLkBD4zxtSG1yasREF8rE6/t0v/3SxPKG6zgNRMFFJpmUwAA3R5P7Qc09POvFW8NcmUdFVhA3xRBGwmWr/2VAtG4qNd+bP6Tm7p0BFEhEes0wBQ/JOqo28A0elYymj5cRQhhawBmfb72AhCUHqxivY1dPts8jJaUDwZiAgSDTGmbbI9CLNxv5Fr15ITX6v3aKStAQGkF9pqKtm3C1d+ZoF9DIna5pVb8JmDEXu3+ASUBJAIBEEopVLpmgIRIUAT0VBgEINJU+83WMeAQEIWoV40SDWKBMDQiQEB9tTp20Ha7BiSUG+9BQIcJUhoFOwBEQmFD3JyAnnRKiFJA7LZu9CuM31Wbu8q0k9l7bfisoLTkoSbAoQIFAowLUUbrn4AujlEAqVTZUN2UVFbSTtXmVdU1UwDImK4XNdRsyihKyo2MKADBUlRC1FoAUkm1nKVFlvguf/vmzdlk/OjhQylEp9PxHDfNco3M1nUZxXEU+WWxAlnv7h5EQeg57M1rh+lqUWSJKJI0l3GnkxXl0cnZzt5BGAZ5VaAUCFJK0Wm3e91BpwX3vvpKKkzL1GVh4DHPbQ1GI4G4TLNaKAXk5o1vTcafno4XLRaqKgUARhTnhAB1qOr0h/PpYjhsl2VxXJ5+/FR8+pNbf/e7f6MoK9cNCHEodaWqRZVvbW1T5OPxuC7zX/+N33I4rJaL0XCQpQkiLpfLs9Pzvf2D2199/s77H0RRVJRVFIZ7e3tlVdHJNC8XfhAs09VyNR91IyRYFoXvB0EQ9gaDZLkihBelnM1W3//4k5UkQeA6nFBOXY87PkdS/tZ3/33uuG+8+cbTx+f/9X/xn/pb7938r/7LP//T/9OtZ6EDLx78bHnxpN/ysBYXs1mv7SU1DWJGFqJGF4B//IPPvvnh+0D4R7/2qzdvXi6rhcfx5PgoyXOaUUJwMp3P5vMsr4EowGJ8MaeUcpYFQfTq+JhRQhB3d7eDKCyrUsg6jAJKSZKsvvjiOIqiPM873W6ZZUevXjDOXM48z3EcHkVxWVVlWSVpVtVSgzZgZ75rfgcQlJUwWLahOZjHvdn1FobdwFvs96j5lBsSBg3wgrj2HY1F3oijASxPVMMD0lhYE+hD03TUAMgWEyC67NYYTPXXH9w+YzZfBz1OQCqFmz6gIcXYq15TUe3pKdWEuhv4D2gDoZdLQ2WGYY5EG6+mA7dJG9DYDfr2+5d0IEls17I5CaJDTgSrpoyWG9pE9/qDkQCi1PC0hVB0uYMgBUpJA5rbQNb6Ce0SiG5dQ7BukBiKpBkwaaoHVtEUiVbfN+UaJCboVkoSvUlIA4SgEWuDpv9D2iREKYKKAFBFKOgRLpRpWiUgAmGEUECiCLH6z9QIw9lrJEgAKVIKqLMEaq+dkKbIYS9EbyFFqJ1gDI1qB+p+Oj2+mVoXi2CjCn1NpNnnenmpdVP6CAYuMmNhbP3eJJ/EVKgYZYwwRGKmXlsfZEVRzLNnWsCoiW60/rMW/6mqSgihWVjGS1RVJQQhxPM8RHC404pDKWpGqajr5WK2Wi1d1xWARZ4v5osoignny9VqvlzmWUqJ2tvdOjzYdV3/4uI8Teb9butwb3d71I+iYHtnRxG+tbsftrpxp4fEdTyPMsYcmufFk6cvFrNqe2vv7oP7w61+3A76MXie5/vuMkm8MFRIs7wG4mRZfX52EUetwPPiVuT6bGdvNBp1R6PuzqjXjoLId6u85JSWRQ2LQjn4wXvXq7KK4y4AkyAoI0rV+3v7z56+UqiuXL26mC8uHV5eJenFZAwgv/jii/5gOByOOOOdXo87zPP9vCz2L12ijKdZfv/B/aKspFRhGHQ6HY+TKG7t7R+22p3haOT5/nQ2546fFfUf/vGf/ZvvfRH6nDvU4bC90x8OB61WO2wP7z18WAv6u7/7z/7p//DP8iSn6YN/9a/+pVi9aoV+y49913c95vuukCpsBU6A6Sora1kJooBTHsZxu9Xu3Hz7zcPLe0kym86mVZl7rj8Y9OO4NVss0jT1gjDPk/PjyXSVUEpqWRIkZVmVZVmWIoxDoWC5WJycnhRFXhT5xeRCIa5WyXQ67fcGSqk8y5RSUtRB4Lsu19FMVYksL8/OzpIk1SNI9YhdJAAUhaHoARJUYPRdvo4lEDMiUTdTMYv/0HWdDhsxqwaQ2XQVYCurjScgZD3Dy5jTNTrT1BZtELZ2Ofi1Izcyn8Sc0GtET7TJB2nOcqPvt/m38RAmt0a56UUszi9et/v6jIXOJwDAiD0qA/cDKCmFVEIpocsaUommAAAAbO1IFQAgpWZFpDm6bXoARGp6bJvUw5792uiYSqe12gat0RGuxlmM31CkEY4w7kjZAo6+T+t8TZq6NFa1MOC/AqQNOAaaHUUZASE115/q+FcqqYzgmhSadYNKIVID4SulGZ5aPcI03BrFY5QKCAIKzRSyV2Yme1ECCEJKioRQ043VpJjakzFGAVQthTLQv7RCGkpKQazOn35XlyX04BkFQJA2a7duoQAkdI1ISqnTKYKINl9ao1u2nKYoUmLnFABIoWqKaKsGICxLVid4hDTWHwFAK+U2TDNNztPPn7CPDSHEZZQ7juNwAMyzjBIc9AZK1ghyNb8IwwBAuVEsqno6nV1cTBTlw+GwN9oGhPHpq36v14p96vg7uztxwHe3hhRUsprnRSGQHr5x3QtbtcKyEqHrLZMloOryVhiGcRg8uff80sGVIPC542dVLoWklEpQQegJUUmBjPI8FaEbtaLOfJYognldDbe2vCBKssL3CG+5DgBvhYNBRwrleX7Y57c+v33n7qP3bt6oqirwfb3ORZGUecUoirycT8uyLI9evVomyYcfvJdmyc7O7vj8/MnqRRxFW9tbaZEGUdQbDJeLJC+nQRDu7V364ovP333v/el00u60fd+J4nYtpB86lPHVKg3C2HGDh48f/Okf/2mLAbKaOxhGfrcTU0YQkTteWop/8t/+N0dHp2cvj6OgI2qnHURHp/Pdy1CAyGQVeDxTdVaB53pCgu87eZ1zx00uln4QHV55w+V0Ors4ONwPfPLy1fNO5Huu+/zF87PJ5J133/u1Xx9MpvP5YvXy5OT45Hw2X5yfTUAqzwvjOEqSZZLmCohUMo7807NzKWUY+hcXc0QghL58+aqu6zLPtG4gY8z3gygOmOMgOv2+O58vkyQBRSTUlJCyLuu6ElKURSGkbjKpzdNhQi9LwYC1intjDnVYQ4metcLW7HP7amxI8zLPnbXrAEAp3QzVNYatNP6iD6KDTbBYuKn9bgBSG70FOjpHtubCrG237WjTpu9rsX/jnEwgjxbNbvyBRo9h/aRLZRS8YP1aN69J1BCBJBRBEVAUlFJSaLMi7CUzQgwQTKzlAjvVHqAZhWh0GgxpHpvKrS07rh2s+bVN761ZQGZ9N3AuRFRg5T7WV2EBJWOijd6OUpIyW/vUeL0C3dqqfx9AWa0OPc9AO22DDSFRmpNAVLO4JosxnB9QgFIoSQigtr9EMyzXmZOpcGg7jaC7u5CgshXvZsCAApBooTBQSinbNwegWw3AEvEok/QAACAASURBVB4AQSo0wQigpdiavafWyhVKGUpcs+xkXak3DXF6foCUukGaKKUIAgVDl5Ky5ow3CJ5OO6Qw+1xP6HQcowRhmvKFUFLqwL+5fUIIUdeatc0YY5wTRoWUnPEgbIe+H0QxJ7CYz8paZsnyYnqxygqC0A5j3w9PxxfT+dILww9//v03rx4qkb159bAGVlfl5Pz0+fP80t6OlKrfH4atjud5QkjHC4QohKopdfr9ruvx1Wpx+dLB9//kszevXfnhj7s14PkkvTGKg6hNOSoAyp358YXrxsuqYg62gmh2PlOc1CgPr1w+ONiV1TJNJqmneOjPptPlcpXneS2g3RuUL6Z/+iffv7K334n6VV0w1y3KKgxai+x8OOhkRb7Ikm47fvLkyW9859s/+OEnv/or36rq+vLhpS9u3c6SzN3bfvr8+Wi0vVwmEmBne/fo+IRxx3F9zmkUh3HUGg56o63t84tZ1O5KKYOolWX5fJn+5NaXRwl0PU5oHUS80wpbkdfu9Bw/HC8zFjpe3Kurus4KIB7iaLmclxWlAXcjplhFOFBAv9dFQeuaZs7C9xFrhzPe7fa63eH4/Ojtt6/0h926yi5dupxnKRKys3f56tUbs9mUEMIcFkTe+eTM98hgcDDotRkN8zx/8eLVxcWFFGKVJK7DsyyUokpWyWDQb/e6eV4sFysdIighOp226zrKDJCTaZouV9NklaZZpts060oIVRdlkaUrZbFHq7ZmzLRUijIKiMoI665NRyOo0Jh1Ynp1YJN1g6/XYAmxOEJDsLEjUIRSBJAQKhVKpYAoLSYJCvXO3zBf2PCPTCxvD9j4JKWNzF/3sm03kiIxVFMlUYuRrSEfjdJ8zQFIALCzCZSU0pQUlZ0NYIN67WAoGjnJddkAAVCrqK2tLX33w0uodZY1uEGMMoPWkjOogqXHACjdi2v5TnqNtcUzf64nN2pnYBtZv+6HEZEQIxWHTQax1pMwxHx771RjVYkVl9Y/tAVgc3xNWtRmV4+KAaUQLciIBsiCNTmVENQtDvoczEebMMAgVxpTIvo+E/1xpHkLm55nRKtXbUBMBQiEGkUqYlMN40qN5BxBApQgtTIbOnDYAC0RNuKaNXOUEACghG/43YY523heHaygURpFRFgXivUWkMJsJh3/c8bpxkBgqpMbG7+g7aYRdV1VtWbyua5LGZNKKynRPMullHmWCymiMCirYjqbtlqtra0t13Unk+l8vsiLMoxiCfDs2bMiT9Nk6bq81+1JISkF33UQ1fbWtheGjuMukxQAi6KkqKqychw/TYvFYh5GQTuKvvdnf8q8+NLh5S/v3Q9b3f1hGLe6q2RJCCgpAz9klNe1KsvS5ezo+GUh8g/ef2d3d0tWBUfRjYPQow4hqHCV5ItlnleMe23G4P4XT268fXDjrTeAIHe8oqxEVTGUnMGro7Msy4ajrTiKGWO1FHmRt+L48uUrhJDJZHoxOaeEnpyOpxfT0dY2Y+7LV0enpydxHKdJEoVBqxWHUavbH8TtrusHSmFZCQBydHL2T//Jf48ECXUYr3e3hnvbw0t722HgLdOEer7jh4SyPMuT1cLzXKR0OV8Ara5c3iOKeJTHAfNQ9VsxVqsyPfODdhhGtaKTeR5F3V63/+jhw299650bNy5XVU4JI8wFJLUQ88UiSZNktcrSlHN+eHjp2tWr21uji4uLk5MLKQVnzmq5lAqUNJaFEFoLOZ3Nq7JCJFEUce5SykGpqqoQ0fWcVhyHYQDIpEJRC98PCaOyrutaFGVeVSUiSCVrWaFEPQ3S7k0ktDHloJFJ0+5lhuhSREIoYRsdWISu+Wl/9UUpxbVCl8nq10+L8RNogz1ikB9TZjb4DUH8WiuvfQbNRxBiIKDG/TQH3/RJWkWGNOLEdmrs2twZm24av4xfUOvnsRHmNdZQyeavzA+U1GC70gZHi6hCUxMARGQKBKUElAIpdRaiYROwnbG4oe2jz1U1P7FuxF6nEY9TxpRrmPw1678R5wswlH/VWF9z9miCf0IIAtZ1rUARSnUGZPWriRH2MRKpgIgodblZAiAqqQCVgVR0tdcgJcZbIoCSFvZRTUlZ6RwRAZVEHTEgIkjr6EwypnelAqkkmLKVSShMrUKtFxmaS0ed9CDi+h6bfmdEE0loTwCmAmILTPohaHqttQ8wQ+XNLdfuQUqJZGP3SJBSIQh9XqKWuqsNkSoFmhqraz92igXlnGvrjwQ5YRIJIaQZzielLEstDFwiImecu04UxHUlV8tVlmdpmjJCAt+dOCjrEpCcnJ1rzT4hJWHMD8jZ+UQipYwGQTjst/I8l6JyGJQAw34vS5PVKnnj2huEsrSskyxfrpI3rr0RtzpZQfK8iuMB52ImxaXDvZ/85cff/pvf9d14kaYnZ5nrRpTx0Wg4Pj9DqCkolLkoc4eT/d2B2woO9voUin6r47soi8VsPHH6XcadZ6+OZ9NM0Q5ltD/YSl6+WM6TxXzm+F4lAYBKSThjYeAIKThzFotlLQRj7Mrlw9u3v/z2t7999+69s7PzNMu/8Y33p9OL+XwZRK3VMrl7936n2+Wce54zGZ/VdX7z5tvIvSyv2r1Wnuftdnd8du453he3vjyV0GKkrlXs+BQoCBn6bhhHXhyA3z6bJczhg+Ho4b1HgIXvEc9VYRy7zCPSDdyuqlZe4ObpYtDzB/3h8YlQSi3mM8fxPM/r9Xsvnx1vbQ84R0ZJVSjKnShu50VBmdPvDRBhMj6njE0m01tffpmm2dn5OE3r45PTuhJFUUpQoMDlHADLogoC13UcjSKkaa6llaVQhGJRFIixUpBl+SLJV1lZFSWjXClV1TUhyDlHilWVa3H6PCl1/qoNKTSZrg6IrE1vzKjJA4wWIiGEMEqBwOvSOs3Th/aRaWBaUKiR0mYOkjJILIJSSkhpHMIGXq/FxhoT3BjlTXPf2EPyer3ha0AQovlYtPSn5pho8wnQzHhYq4G+VqRYY+DaKDSC2U1UZ0wZgJFVUIgEUUhdYVeAir7/zQMN6VATcesY2dI2DQ5kll5j92AFy4wnRAQUhBjOldbp1qUQMOGzmRVll8IIQW96Ba3h0XgLYkEnAI3d66h57UjI66Ubc7OVTiOMHVZSNdQsBMOwBGJrEfZIiIgEgChC9CcpU8MAW0vV8QCiIgZlarydhqfQ/pq23bgxXK2xy+aOYlPoN9RZfbGWsiRNdmUEh9YplK6R6yRGn49SEm09faM4BWipUSY9BGlxI9QcWu0VjFAJIEGqnwk9orrRAjK1AGZaWprdWQuhhNR1PO0tFCAoUhSFlhMghEopyrLIi1wJEQQBYaSqqtVytVqu8jxXQLKs7nS7QeBFYXB+dnLpYN93eRyGnNN7d+8kyerJ48cvnj+ryqquKwJYZOn5+UmaZ9320PPioixElbsOjoZbd+/cTwrc2T/86c/ujp9+NZnOLh/utVpBJw7rqkCAViuu60qIqtOPXQ4o68B1+p3WVq/FULUjF6S6d//h+XglidMb7IZRt9eKs3QeRfwb33gXGQHiUOYGrosqp5A/eny0WC7Pz86vXb8+X6yeP39WVWVd12mWvXp1xBmfzmaI4Hre/v4hIdwPQlFLx+FC1IvF9Pr1G++8d5M5oRME3V5fIRFSlmWVpdnv/fPfO58m7Tjmrs+IbEfR9rB/aXdEqOhvDdO6QubVgrg8WM3z8fmEEjqf5TfeuhyEHBAIgzCiQRgCeIjuIsnzZTYeT+49ejwc7d248f5iucjL2X/4D74bxw4ocHhAHS/J0q3RFuNstVxMzsdREM3GF0ma9buDOGqdnY7PJ1OlZLJMiqoqiopQWkshhSCUglKUOQ7XCpgUEcuyQsCqKtud1vb2VhxFSslaqKKs8zwXQqICSkhdCyQglczzvKjysiyIUa20mC4SvfUYY3oTNk1VTYy/rghTyhhba+VusEK/BgcRZLYGuxHCbxgEHYfDmlGNNg9AiRYCsoRT/TIfARb1t2TBxvpvWCrciOiaDobXXhsxPdhplBb30ciPtF8Y4Qdpgz1pAlcAIzCvRwoigJn/J2WTUSgt4wP0vW9csi6RoJZL0E2xZjXRFngN1kEQEZXWdSAWFMKNSZtgSjdg4SNEc0gNYWs2kRnoZrqyjB1TWmvHQN5GOMFgbQBEKVMENqVvNOkIEM0fNdUCMCMYFWp2PiqFShLbD0Bsbkeatm8w2JdOALi+tcquhmFXwhoKawZvIRB9JhadR0lAz5NUABKsNwWlFAgkdqFQAUqFAjWqo8XjzJZqSAJAiB5vRhrbTiwnye4+HbA0X+stpajJBRRo1QyFFPUa6ntMUVGiz1Ki4YDZBHYzg2aMIpJmywCAEkJJKepa1LV25PqdsijyPEUAShGUdBxGQHZaUbvdcjkbj8dnZ8fTiwvG3aKoFklKGJtOL1xOfd9zXVrVhee6jLG6rsui9IJgtDXygqASMozjZZomaUKdANCZzBelkEErcjzueb6o1Wy+un//8c7OLoo6L/P5PN0dDRyiCMhSVJQz6rh5WRAQMku9wA0ir98OAwdCDgTKuiikJGkmFsuC8tAPYs8LlCQnZ5NkcX7t2oHrYRC2hOQEWZnORLl68PSl67ntTuf6jetffnXnzTevPXv6+O233zk+PfVcR0nRiiPPdS4m0yCKXh2dBFHEHT4Zjx1ORV0NB4Ot0bDVHXLO86xwKGdIPMafPXv+O//L/8MBOoOBog4jcmvYP9zf6XZb88UiCGKHBgg0WWVVVfgxHU9Ps7m4dLk/6MdVmXoO45x1Wl0EQikRspjNLwjg8WSWymi4d7XTG3z64x9+56Ob3/r5d7LlbJnkgvBKiqqsxuNxuloSJJ12m1EmATudDjCknG3vbg/6faTYHwx2drb6/V6arTjTmuGEIPq+53k8ikPGqeswQiFNlr7veZ4T+F6n03G5U0sQAoq81PoitayruiyqvCjyWpQaA7DxliJUdy0iocSYdWTEPhG0YQJZyQfSGHeKSEBLL9iJLKC1UZil2xFCEKV5um0QpsCWLQmiFbMx9TW0EICJMY3KvbY/xJaUsflAbTrtKWlO4Try3UhHNpODxh+YRMREjNJKL69xWrAIhLHgpvXJcjmUFEooEEAUgFIoAIVCLfei9X/sMFilJ8kqtnZ9JjmyLsswSxVpoAmTShhekA2SrVVV+hoQTZfvZryOuJ6OYr5WSJpKMzSERTCFDLXRJaDMKdqRXAAa/tHOD0xrnx7aIBRK7fkUkTZZAzPnQZ+zsqmT/peAUlITYNBkGWCvChCVNMkHAEqL8yil9Ngvy54xl6xXT8C6G8NwmXUxWbsKfQZIQEoz4EBavQqLFRG75TZiB70zzc5DxDUWZNdsvXsIaHetEAho1hei7sEghICihDiUMJ0NSHtnlAJNqFBKCfOiACCEMCyFZh6QUpwzzrDU6l11pV0WI8goQUYJUXG7G7juKlks5/O6rFzOXTdYLFbLVcKYp2rhObzdiofDfuA7p6dHJycnrVaLUuYFPiIUVRmGYRjFfuCPdrYnF1MpiRDy4PJO1OqkRS5ycJB0e733br71ySefHT1/eO3K/tnsHPL6s1v38mT7F7/xjq/UMl0BreKAeSxyCVWcRXHQb4ehy45fPh4NB7KGopTTWVZVNO500jR3nSjJS+b6D+49VgCcUEaJRCyqSithXb16+WI6S7Pi5OTo0sHuycnJL37rV+paMKSE853trfl0Fnh+WZxMJmNAdT4+29vb5y6r6ooR6HVaDIkCQCRFuuIRq6sKJawWKQB4HlWE+kE4PxlXdX02mQwHkaiqfJUCVck8bfueElkYqHffvvbiwemVK3t5sZJS9tohCuUzN/R4XS2S1cTjQpLw5HTS2X3r0tXr8/l8NT96+/pvry4mfuQN+oN5XotKtOLYd5zVYk4UzGcXabryfX+xWmZlvkwW0/n85Pi4LsvlYr5KsqoUzIZKSkjqckKQUgSQYei7Hi/SjJOOlFJJ4ft+q9VCqSpBsly2W62yLMuykFJQTlzicE6dmpVlWQtRC6nqikgAo76o5U/MI0kJI1Q2Rt/wLamF24mRvQUExrT0iklbTUhlZ81KbCAUZTAVLSNm0VT9VAIlqEARhYZOZ20RGjcByrRXYfO1edChgZPWYDyAFROwpl1KYmmQ+ms77gnQaGsq1ARIY+71uCnbDaAdgLIOQCk9rkzJWqFSSihUBjsCoY2+NHgAWFNv2rTo+9+8pA2oZaCvU6EmH4LXjVGDvtvw2WY0tnmvOYg+BlgoyXhF/UsUCWlsHmroDU1ZlTRJ1sYaKiRKO3ltpNBg5c0v6C4QLfahLBKvms+3/hm0iTQxRZPNETtJplkEI35tL9ACLY3TBtNiLa1XkaayoGQjO6HvE9qCPhhRJ2LL0o0jArpGv3DNJ27ozCbCtxdhElsd/je1KA2MUt1sjIiacdHwqggSikzPIaKEaqo/rhNXpIR9DcfU7btCCKWkzdd0lsgQQept18BDAHVdASqKBEBmaVJVpcMZ59x1gzzPl6sEAaIodF0HAM7Pjo9ePne4u7O72+u0Xzx/Phmf+Z5PlAqDwGEcAfIsOzs79Ry+t7sr6nI2mzGCnJEiTYgUget0Op3z8eTenbt+4O5efW+2WJ2cjJeLRbKc7466/XYwPT2mhFaV4H7scSqLwueOrGqQtKrg1dH4q7tPnjw5idvbZY2UuXEcHZ+cr5JlOp383DdvdLot14/LGuK4JcplVazyvM7SNM/L5WIZt1pn5+PFYtHvD+qqVBKm04swCGtRtzq9wWhUlNV4fLFKE9fheZpMJ+PhYHBwsMeibiXqreFISJFn+fnk4otbX37y2ZedOG73Bo4blHnSa3vnp6/63fDa1cM4CrnjMko63X5dZGWW5UnW7/UdBg6FUa+zvzPkKNsB50TURVpkWV3Wf/bxVwL9d9/7cH/v4NNPPrl+Zf9v/63v7O3seH44mS0nF/PFciHq+uzkuK6KMk/LMnMd7jgOEAyDuKxqUOTy5St5nou6ZoxXVaUfK0ZpFMW9Xsf3/VYr7HQ6nHNGSb/f7/X6QRj6nuc4TlEUXhC0O71VkpZlWRQ5IjgOV2DGvuuiMhi+UA2w1ksnxPLrGSWEUEY2Vao08NJAQObFqMWf1zCRfqr0G2qTKmpGUunqWgPprL8AA4Azi+lrqYCNA8Aa92+ePkrpBn/1tSJwg/ngBv5jSwU6XpSNgWlsPGjizhoJksYfKCXNYOM1UoSovYFEE90KoSXWTOuwbFgg2h/QD37hktJ2wtQ6LPpv4JTGoIOuUGskAl4nemqDqmkzChRqmIUAICiipThxfc0GLTGH1TiGTbZMGG2AkUYhR/8tbtJgJdoBAIZopJ248VhmPV/PIvQqK0A95QrsTWpK8HoRlL6W9alaC2qXQmkESZ+2fenGDakRHmWD+WbUl1IKNSiESDS3zWJJG/vOOgHLGrLrY4J+c2tM35wyK7DRnac0a1bpVhrjbrQbQt21RnSth5ll1oLmFvPRDoBsDCSSUur9BFJXnkFPNgIl61r7ORSiBnsXNPTvuc721ujatWu7u9v9Qb8oitUy0dLQVVXmeZqlSykqh7PAD7Isp5RHkb+7vc0d3ul2rl653Ol0CMXJ+VlRZHVd+p7z/OmT85NX9+/fLooyDvw4DAEkkQpAjUaDs/Ozhw/u0nC0s71zfHJe1ZAkiSjz2HMGvXZRlIQ4hHvZfHqwvVWkGUEStzp5KW599eB8sgzaQzeIk7Tc3topyjSKW1meJavjwyt7Bwd7jhcCMkqprBKGcj5bpEl6986dvb19QugqSRzHFbX0PK8s63S56PV7RVXu7u0/ff4ybrc//+mP37rxdpmnjGIyn24N+4NBX3KfMw5KEULyNMvz6rMvfnbrzsPQ89wgYsyhWKerqe/xMHBA1lmeSqnqskIl+512ma5i3ycMHQL9btxvR53IC1wauJSi4IzVpTw7Ht96Mr9x/a2f/8Y3T0+O//X//H/843/8j65cuVxLOZ0tsryKwng+m56dHi8XszgMwsBNlnMh6iRJ0zSbz5fj8XQ+XyBCHEfn5+M8y+aLhVLS9zzf86QUAIoxFsfBYDDgDqurmlFa1ZXDnTiOlYLZdLZaJWUl0iyv66quayEMamFSbmX6yaWUdS0INSwdqzRFjZI+JYwh1TvYotNavJbpWGb98wZERWu4CaUEEBSaZgJtntZPng2elFJNVaBps6drmn8Tx2tTtg6XGx+gqRMNorB+a8OSrt/A9QPeBFG4DjEb/AfAEEUa668tTlMflkoqKQUSJW3jkSX8rMMzaWNT0yCmISCkimnc3uJdylhdq02hAW6TCChLLQIJVkbGWijNOsGNSn1zMTZaNxJqYANtZaX/CSLRng4NuGUWjjYMVzPrSilNMDUyOFbzxkggINH0V2EcmNJsWxtwK2MO0Yro6AzFrpFCVIYFpH1G4/xM1qSXY51UgPZ29mBNcqn7DezttGVbQ1FVUiqN5WunTQhF2Vh/Wy63O1PZz2vgM+sjlJaosB8OoBRRqJQkCIQxKaUUzU0FpEiJZpwygoQAo4RpgJQQ1CoPDXsZAOq6NnsPlZKSAFCHc8Z0muS4DtCEFFjXQlcchBCu4462Bq1W7DDGGI7Pz+fzicNZUeQEVK/TUVC7DuGclmXZ7fZv3Li+WqZRGJdFTUF1u93+oHt+cvqTn/xEVFWn2+acX5xPRsMhStWJ/ZTU7u7W9OzFI1Xv7x/ubO8QLoVUZZl+9NHPzeanz+5/Mdjee+/9D45PTqeTk3/74+d/+IeffuPD3Z3trVoqN4hDB0+Ojwl3js4vSnmyWOWTZSEdn3r+0clpq90tRAEg7z64++r589APzs6np2cTN+y2uy3GgAX+PJ1wVPPp+c23rg/6vWWWt6MwL6s7d7765W/90tPJ01arlWWp6zhC1MvFPIqjX/2173DOLl++vJyOi9VF4HtxEGIQgFKM0SIvsjw7O5+kWQkAaZofhK1VmvlBfHE26/dCocizFy8P9najMHY9z3Fpmc4HoTeH1HVdh4QOZQ5HkSV5Nu/ubhd5TdCbz4qf/vgn3/jo7+9vb5Vp+of/8vf+8//sH127eqkSVZpXXhi3u97p0cnu9vbe9kDK+vzkaDlf+b7z5Mnj87MLAaSsJBJOCHv18rmSuDUaea7vef5kMq2q/5etN+uRLMnOxM4xs7tf35dYMyIjM2svdlev7OFoNAM2ORCBEUkBggA9CAIGM9KzIFG/SYBeJEgPEkUNSIoz3cMu9lZZWZWVe+yL7373a4sezOy6RzUdiUyPSF/uYnaW73znO3WWZVJySlDUHBEXiwWiCl1fShG3WqggTVNQ6LpeUVST129qYYY+Iqqa14jgOIxSIqSZyU4Z9QNHLz9C0HWdhvuvO38ZM5kxY1RTGByHoR0PoK08pToOJduxtomNQIBSqKgt6xoBX6kjQoVUd0pJIaVARIZUEda0JoBtMlJK6akbem/q4iAo0Jq7mtzeYOnfMutKKUuqVEoJ/WFSGkijMTXEjv1qIk49Hd2IrgG5T0OyKAhKKZWQQjtXoYQZhwWgQAkNeSFoJIDrsFwp+t0f7xNNQlGAROnofou8r3S7kWnlBU2Zkhana7KcjZW0E2qbSw8AihoLv4E5ALWXNadtaaTGJG8dANlymcp6MoOUNUbcOExAbfel0tJthrYPxnnrdxnvg01128AkSptek5GBdfQNL8d6O+0bdHeadRHGZ9gMA0x/L0EkRqhHZ05Wq0RD6k1ys51YWqlnRAQzS7mJFMC6E72YbAFMLy/UAYgQUn+tEBIbbQsklDKXuZRQRh3GHEoYbQA1w4ez6Z9NRwGAAApRKymRoOsy12GMMR1/6Rm/AEoKKaUsiqIocwAQkuu513metlttQtD33F63E4QuQQhDLwhdgkrwkiDhnC8XS89xx+P+3d3k8urWZaTX77WimBAUdS2FXC3mZZGDEkWZzWbzxWLlux4ldLlaCQkVr6M47A86vV776tXrq4t3hLnD8b4XtVdpTqPgbp5enF+VRS2qWihVcJkW/Hoy/9UXT5999VI5blXzvOS//0//oNPpuA67vDj71W9+/eDhkeu6ndAZDXpHDx64voeg6nydJct0uSaIUgJSmmaF43pFUR8fPSiLot/rvHz5stvtSaXu7qa7+/tv3r7b292Zz6eewxDk5Ppq2Ou9//57JfEC1y2LPFsns/nq9PzyN0+f3d3cKcIGw1EYBHnBKXUXs4nruEmyilutnd1d5jjrxYIi5lnaaoVx5POyKoscpZSiHI0Gg+Hw1bvzt6eT//1/+as//NP/mrV6Hzx5/LO/++sHe+N/+dN//ujREfNcNwwJJevlkhe565JktQQpr6/OXr95sZhPBRfrJCHUWa0TSr0sTQlCUVbaACfrRAfyZVkopXTXiB69t1oshOA1ryihnucyxlwvYI7DmON6XlGWaGqE4DhMD5cWQhRlLmo9mKLWu8w2IepKLtVzVJiWMWFGqdbAl/d5Qah3gOEjUQBCKdtEU8S0DDc2SzsNRNqM7rIGSu8wE+NTSvVzYo2D5ShuEB6y9QCA5gNtPnGvS2DLLW3e3ljLxmNZ/qJsUGgLm+gOALBlDB0gCwMfGT0w2cSdGsrWk3Z0xCl1nK8UIDBEUEpYzUdt4o0VsygGtzx1/R+SOc1IMKlVx6yV2raDOnQ0DQHKJhhKyzwgUsOstx10mi4qDfW+KYlYnqzF/RG1BKuGvnArvgYrl6YACLWUMgtb68/RQJEA09SqbZ4294Q0gYE0nQFKd08Y5Txd3Vf6UJpTI/qb1ZYjUkopTZ/S9Z5NmV1H87oGsEVjpcT8qJQiqFAPsmhoSXoxbvMN7G9Q6aZoqi+URsaY4yghFJeUUlAEJEpASoEAI4Q41KHEQWQgiZIopABQVVWZzE/Zu2jLTVxxACGlBJRKMHRd5jClVF3UhCBjdWy+eAAAIABJREFUNC9KzjlI9DxPSF7XVZETz3GqIqvr0nMd3w8cJxRV6Qes1fKS9XKdLtstX2/H9SrhHIQM7yYTzmup1HxeIuJ6tRz0e/3BMM/PpJCX52eLVvTw0cmABb0ekUDm80WaF63ewKF+f9CtRf3xx+/T/7z67bNv/q+/+lm0mrWHh59853vz1WoxucY6W5Wr4ia9WRZZWZW8jjvDweF3dx/7vW53d3e/FYWTye3Pf/7zm6tz5OXewf6f//mfffHFb9L5GQFHcK7qqiirgCIvSl7kVVG2ux1kzm7cfvX2XRSFg37/6vKy0xrsjsaK14t0/f3v/9ALIi741fVV4LmUYJqmRZaURZ6kK7/VB161Aidb1rKuijxLk3S8u79Osqurq0ePTvrDPQTx9mX+6u31e48PX767vrqZ7e3vPDl5KKTqDwZlma3XKwp01B92O52rm8vJLP0//vJvLy/Oot77//ov/mI6WXz6yUdffvHrZHb9L//0T/Z2epTCYrXIKk6RvH3+jUfIZHa3SlaM4uH+zo9+8MMsS1+9evPw+JixsNtNkrQs8rwscpDq4uLcD6N2p+U4jhAiz/O7uyllDJFQZEVR1lWdqIQgXF1eRk+eRFHse6Fus1ytEy5VXVVC8KLMJVeIUFUF57zilZBSU322TaFuOSQEKTM4vm4E0x4ADbBDEZFZa9u4AUIsgc9ENNSGNBu0BxFtq7zJrptal5RIiBFAlBJBEWRN6RUoJaDHowJuNqbdzubAtNG3qDraxF3qrzOZARpcYfN2aygAAEBYmQN1710b1F1JoSPyBuuWIAzcL6UWWxNgdYsJKiUJIbWUAEoqEEoiItN+BjVbEHS+Yc29AevtXxu0pPljjB4SCSgNrGYu+r0UTJlg2Zyd/QuxgU0QUSEhRG3oSNi83no5Zp8rq1kETfe07nFQanNLlNKND8Y3KKUs/nSPpbv90GG1+RY0rBir4GGXjk4m7OTl7b7q5rzFVu5pL0VDMDLoP7HkegTDILY4uy0HWAkHhfb6q28dLxiJPQs3IRAFAIQCoxoJAzPrHrRskZQSQSoltFMTQgkphOBKglJgisDNUiOopJKS6xCK2jYxfU0YKxCxqmvJhRLKI57nBgp4mqZFlvZ6nSgIFKi6rhjzpRR5nuVp4nmO5/VAqbqqV6v1aNCl1JUSi6KQUpZlMRqNJtPZxflpluVSqv2Dg8V8nqxXCtnrdxeM0P29g8VscX195wStX//mt51ed2dnNOh3hagff/hk7+igPxz8zV//7PbsWTQ86g3Go9Gn68W8zlasrperqd8ijsK9/YPdgwMh5Hw+//wXv3nx5TdxSCmFVhAxZIPd0XvvPzo9e/vN88/LvLq9vHIdbMVemawZJYgYx6Hksq6zxe0kCIIkTVYrN/C9ssja7ejy6mL/wQOlpBCcIIyHfYIEpBgN+ueu2+3Ek+ubftiLA79IuEdgcndzfX01nc3+6D/70y++/OrlV88///nf/eGf/Jfj0VBUlaiSq5tFUVwPBoOkFK4bxVEgqvlqPnMcBOW8fnMVhJFQ8t356a8/P/vpn/+rh8ePKyHHew+e/vYXv/r7v/uv/uxfffLph+12dHl1zhHLWixmszRd1EpRlJJXCsjTp18wRtfr1e3t3d7+ERLvN7/97cOT9wnFKG4tlmtC6Gq11oZPSskcbzAYICGe61LK8jyPwghACSFarVgIEQSB43plWRVlOVssgIDjO7IQfuADKiEEUqCcM8E450IIhkxKExRaUw6MMkJRVwEYpQRJ4wB0/xelRgJ0uywMtgZr9rGJo7QNAUpNAUAHeYQRafqzlJm5hARRY7NESTRMB8PnsEYSALSqAkBTQtO7nVru/JYBINsvIFvDbRqz01i8LWzHuJimEQy/DSttJtITzc0HkJLryhxXyozDAkNiMqPBQI8UM3UX+v1/8kAfrXVa2q0hYFOP1oC4akB8xGZEcIOlmOMzTRW2ndUacbQVV9QTr4gZUKWxCkN7QYKUUP1Se03x/oXD7Rr99gVGQ9rfRBDmEhm10O20C3737qD5g4Ro1ATtV1O6VRHdvBjIvbytuct67ru9wQ0VYeskDDPYZpFoAEvanKv9GDToj30rsQDV5lupuSDEXhwDP2l6AwDoopftNiGgKa8SBNeDvTjngguhd6DgArcUrPQOkVLqJMZl1Pd913URUQhR13VVlWXF67pWQqVpWlVVWVZCit3d3YfHx3ErVkpWZaH9NCN6OocSkmfJWggxGAy73a4CVRZVWZZxqzOZTJbL5WAwfHD88NGjJ5RRIQSv+WK+GI13xrv77f44z3JEsrOz0+50l2n6/JsX707P1qu159AoCMD1wsB/8vDByYN9h/Dzdy+m0+lsuXCZwwhzg2B357A/GPV74+FwlxLn8vz6+uKKgopC1uu0QPJuK4wix/Hd4Xh4eX5ertYPdnfqIjnYHzlMIq/y9errr75GQrgQB0fHURzHcSuMIodRz3XXq/XL5193ep12t53n2Vdff723u5cXBQKIus7TVNTV7s7OeLQTttuyykVZLOfzxWL1q988u76dDcZ7i8WiKvPxsPf116/+/he/qMpid2fnj/74j+P28MtnT2fL9fnF1cX5VZKUgCwveFrwxar4xee/evbs+TLJ/+TP/4tet3c7WfS6vS+fPv3VL//yn/z+T370g++2oqgSIu50PN/3XPbi+dfpYv7R+0+YQ2tecV5TxkbD8Wg02t09nC+Wt3eThw8f+b6HiIw5Qsm6qlzPW66WRVmWZTWZTi8vL7KskFK5jp4eIYXgoKSOZ7IsY44DhJQ1933XjIwCFLKWUja8OC0bRylljtNsGcdxtHwtcxjRRAXGtE6JliskhDHmEEKZ4zDCCDE4ESGbSbx6gqpezrbLXe+0+ygQIWj6BJrfWwMBenaexlrN/xsLQghSQ73QkRFaQh4Qsq0GSq1wb7Ot9JdQQnVqsm1DbBhJbCG28QONNGmD+cB2cKzdm5H1tbi3ZqMo+zbdBQYE7Hh0kFKyJnIFACTKcmR07K8AJCqFG5K7LghrgQc7FcAUIG2/rtKpAGw8xCb435hKsjlVi1XZRIzgJorfjnbtDJJGw15tj0TYnpApNeFVqq2LaC4kQSphc+SG5Wtic32E0t4JcwCNTQSTx0kND5lDJlsIlCUGUTNy8p67kqZ+TgBM07Z1AEgINXfHLk0AbLTesIG/7EFYzywt/9gUQAiCRNNWQwhRQoJU0rZ06M/SkYsmEHMOwq4nANC0MmrPV6c+lDJA1IqfOjF3HKeqKiHUVuhEeM1lVVc1ffXytRJyOOoNh8NWHLkOrapC8TLPszAIBa+yIK6qYrlcWZozhKFfVZUfRn4QL1bJ1fVvlsv53t4eoxSV6HU74529VVa8O303v7vlRRaG4cNHj3yHeZ6DSL/56st0fvv45GR88ujk6Mhx6UcffxC1wt/7zkfT+eoffvvV1e3VfJE7TjjjwvP8sizrqnIch9eV47Llzcx1UArSjVjgsyRJD3ZHz589ff7sacj5i6+ef/Lx0fnbl51O2I6Cyc11r9erhNg/2EcAL/Bfvnpz+OBBXdVVUXoudR3mMNbrdCTg8LOd12/e+kF0dHi4XMyT5WxnZ0cIkaZrulx04nAym5y9Pb06n4ASUojrm5v9/b3vfvLhf/z3f7szjsbjNi/rV89f/vxnvzg6ftAbHBOqijyfLMvr2wvJeRAEUknme/3D9x93ulHcKkvWaYUnR8P/83/7X12P/Nv/9r85efj48PBBVfF1mr9883S5Xt5dn58cPTh49PD/+5v/NyvLvcNDx2Gr1Qoken74xZfPlvPF/oMHCmSeF1VVXVzetjudJMuElK7rpkkmpBoMhjs7e3eTyWK1qstsMByUgvO6CAKvE7facWvvYN9xvUoIx3GTNKWMibLULdOoJWaVFEJoyyWE0GNHiNGjbay5NpGgyWpokwNiYyskRguIED0TS1JCNeBseSLK1G8RkCiiaBNNE0IQdfivTYGkiioluABEUFILCyg9ukCTOgyB0ko5mJxAOy6yCUDvxYv2GZrYuolQ0ZodgoYy3uzsLZO1DRlZNokCqU+x8aNSmukrhFKpBDe1a6pFhJSehaIM7V4XkrVnYNvfZKyTqQhrvEcZI4dgaYhmVpcx6w2qg4iG9UhsdaJBcgzoseUAkChUAIToIjiYTAWREuOyfufMG5haaEaUBseU2nyX9Y3WhWyRqwwgdP+yWigKG4LXBn6yNhcAiMJtB6CRmGZCC2nSS2LBOFPTNp9JiG1Gb6Y2gKnBmBmmmniqELccgK5Y2wuLUspNrd0+uDkH2zSndPVYgY7aUWdhoLtiEAhFCmA6m6VAiZIpBXoyEUOiCKOsCVt0CiyEUIpr8AfsrBydMWh/bF5NaVVVvufvP9iXUsZxHARBGEZKqZoLKYFQ1un0BK+zNHWoI4XgcSWVKMrq+KiT5/liVQRRezadIkIYt5jrJmlOKdkbjwDJ9c1NwdVguOMS4lGkFM7PL/qj8dHh4Wq9prKK/XA1mx49fsSrGhyKhI4PDnqDruTVhye7VzeTm8nqm5enr1+fEVm6HkEPeJUIJvJk/ejBTrfX8j0nz1MCdDTspXk6u72piqLt+Xc3d9cdJuXqyePD1QRmd3elIr3BmFKaVyUQNt4dz2azuqwG3V5rd/fdyxeDXrcosm5vcHl9TQjttNuc81YcD9qtd29elEX58tXr9xxf5OvZ5Pby9O10mrqMxe342ZdPf/rTnxKCJycnF7fvOp348nzSGnRana7jekKBlIAUmeuEHqOMEUqTLAlbYXe4u7t7iECQ8zdvzi5ef/HP/+D7noeH497h3igMo1oUfuQeHkfl8688x3v5zQvfIR99+Ikb+XlVXl/dvHz5ajFfCc7Hu4cffPQJoSRut6qqfP7iVdzqaOUo13OFUF4QTKazu+l8MBj0Oj1RK87rNM3iIAiDmKB0Xbfd6bRararmdV7Ml8uai7qqGWPtdhyEoyzL6rpSSgohqqrUMlMSlZJKbxbSqKqZh2ZnUoKEUQsB2fSaWbEHTTunlCoEwI16KGITJgGxDENinATTtkNKiXLDBpSEap1EawQ2MslqC4m3+FJjABtrv0kviOXPbMyOtvrfAhCsjQD7FQ0J0bgxBIK6dwwIagl6sfVGbU2pMg1J+I8NjUEgoKmBhBAhJCKyhu25jdGD5VWaT9bttRZrbogoaA1VYzH1gVuphAZPsKMETN8SJUiIiUepwUuMxg40TVPQOABDqVRoUCmmNt6skUkCpZqLZ4mfsFUhML5Mz7nRfQCoEOimIGMAdrJVe9AUAsGF9Wqgz0qBpmeaO2t04Ii5WoB6DKQFaCylSdmj0p8DCnRyqJRCRQiaGN46AO0J9UrVJd9NPqM9Ad2MhASFoCRBq6+no3xCKRCQpv27ycSQAiWIUhJJgCjCOQdARqhDXUKJ5npKKbngIKQkxHVcyphehpSQuuaCi7KqsixLk6QqKsWl77mu62RZ5jhOnuc4Vy5zotBXgEJIpTFciv2+TxGS9erq8pIyyhi7uLwsihyIn2XFkyePwrh1eXkxnU5nszvfc1vddpHlUom65mffvPKY9B2yWi1arfbk9o66XlVUOzu769ks9vv//m/+9oNPvtMf7xHmDoe9uN3yCWcye7D/fs3J8f64/mc/TvNiOpsqpcIwSpM1oyh4dbi/u1xOs2xd5PXdtMrWqzevXwduuMpywuTrVy84H4YeBpQGXrBeZ4PhoN3p3M6XOmjwfP/owdE3X3016vcGg3630+4MBrP58uThyat3p0WRd7udOPSLNHFdL4jC5O7m/PRNN45Wi8X52fkkqee5Yox+8MH7ge/96pefo6j73eBgfwRVXZdYcbLOCnBcz3MIo0qxMAgcx6ulHIRxGAd5Xr189SpfJS7y4SD4sz/7k/cf749HHTeIyrJYJpfT2fr0/Orzz/++FQdH+6Od0XA5n/3ff/n/lKKO2zEhrN8fDof7Ozu7QRhd3dwEnjufz7/48imlrNsZE0IYo67rTSdTwpgQXPD6zes3DnMcx4k9T9Q1hr7vu3Ho7+3tRqGvlPR9r9cfUNe9vromhJRVWZQySRNKqVJCCK7HCilQjDFlSHoMQGs/0qYjjDGKCA51CdFdK7Qxr4wxakmTBJHotnxUVs9L72WjF6D3H6Vm8pWy5q0x3wCglERAqUCaKajS2CZteUEpUJJRY3SkfWOzt6hmwN2DmKAp8t3HqQmxCDnRYSJYQ60aVog1ykT3AVtvYB0DNj2hKKWRBgIFFKlEEFL8btnQOgbTvMrQYDEGlVAG5bdGSkfcaNifuphNCLEVgqYSIK3EHbFoCtXNF0gAgVBkCKZMoSNupS8zWPVoJEioMtwb47qE9hjEgNEaXlNbstlK6QxOUdTMGuOeBUhj7+28gca3cjP6BcA0cKM+a7CCJAIN7GNuhuYd3weFEDZ9JPaXili1Z4IotltqNYxjogXrSBBt4mLEODeXTLOqCNEiDdpXEasDahaIjmKk6fFC07ygP5xofyKlUooIyQ2Ap5Nc6ujpPjqaJ0CUBM0EoJQxqjtrGAHdnEM4IVIpLhUVICRyLqSEuhZVxUFwqKu6yGVdU0TXcRF4sl4GYegHriu9vKxc33cZZYEbhrFug8qrcjmbrBa3ge9xWZe8Yq4TONQh/njUb3fCXi/qdR6u1/1Oi+V5tphP8qIsi7rfbu21qVDs7Py02x8fHBxHrdab04tklV7eTQlxb1LRCnuvX76J/Ghnb5wt54uqarVjr71XM4oI7332YbFOXT/0g0AAkQDJOnEpiiKv1vOVJ/KV+uqr58VKYqkeHTzOa1hny8t0Cj7liq3ulv6wz0Xpx23msNlsRpAqpbrdvuMFaZo+eHi8mNyMxx1el6vlMoiiyWI+HI6YQ/JsQWTQbbcXcYt5PrJgcndWrCLFaVGKZ6/eYHsXqOuhWl6dOSJr9aIWcWIBf/RPflDwmktM0uL167O7m0lRlMxzymSe17xepa3uYH5edoftg8PR4GT4+L3DR49PgjCoKqG8MFfxKlvcXl/2u3GxOvuDH354cXHJ63Iyzfwg/PDTT28nc9f1bq+vbxZ3/dHePCluZivXcYuCl6UY9cYKoBRcVsL3giLPHUpRqUEcd+MIkUynd+tkqWjEgDKofVft7HRG43YYUN+jvh+WZR27Ya/Tq2Wlh8pxXkqpOFeO4wjBCQNKqFQSkQghkKhGzw3txF2dEDDi2Px4exgAIUg3VGmytePs9tSrH63BalgephXZwJiSIJFKCdM6oyglhBi2odrqj5GmD0BqPF1DyE0wrzvV7JE1bgBNHnKP7qntTBNVS2k5IKCQUCItIdO6AWupDX6ghELddSUN5KukNPNFCDFa/hKkVELH1Uo1UwCVUEoCSiQbCAgU2kovWuBEz6kx2jvQxLe4Qdub9KIBfND6AdTOArRLNprJBkUiVBtooguZBlBpbphFvWGTB6BxLOa5vioEgBDZODlpxyvTrclZ1jsbY4o2WbDpCuheWQDT0AEUYKtpmxAiQZKtY7Py1frdSimgzJ6O5YM6SLcdONoV2dxI2+MMxhhbSM+A9NoNWGrq9tsbH4CIBLXGtU1OFAAQ7VSUJg8AEEIb2JIANb4WTHlCITWFH6SGQU0cxpAo5OZkNVALnCCvEZQghFRVpUBJgEpI5jiu6yFi3O622h2pVFlVSVa4ni8VBGHY63Qpw/U6uZvMktU89KjvO95onOdZXRbrVUKYkxfVYnYTRYE/8dqtKIrCTqf9/vvvSylvbiZOks6qRZZnrcBvdXrjvb0vnj77/Ne/PTjY2z94wDl//fad6wXdduvu9qbm/Prm9qOPP+j2uuv16sGDw+PjIwJQllUct3YHYwAiQEkFSLAdBtl6JRT3ZJhMMc1yQHp1fS5oVHLG3IBXdZ7k3miopxm/efP64w8/enTykFLq+/50viy5HHjh7e3ter2OA9f3/LPL095gL1ksD9pdKKowDJjDwPerPNV36eLioq6q9WLNPXV9fs0cn7Jgvkqj2C2KKpkvjg93izrr9dzvff/9gwcHr9++6w2HQdj+7mcf11xmafHrX/8yjKKjo13XBclxMByLSu7sjPv9TrsdZ0URhm1QbDFf38xPp3e38/ns3/3VsyRZfP973zvc303TbDZPRqNxkqQuI0WeX9/e9vtDXpevXjyP4laWZn4YVFVV5JmQgrp+HEUHe7tSyrub29PTUyEEIIax33104npetl4iQK/bHo77QBlxPKDOKkkni7Qo6mSdcSGAQBQHGsCv60rXSaVytHYIUTp2t8iP7YPRjbXW5N+vrNondGN+0bT0w/Z22zgD3PRRNpxybMyAjqIRmwmpm6HtSpGm3GhIOcp29NhhXo3FB7AS1lv1gOYYyJbwQ2Prvv3cBvfbv9/Ef03VE8F0uQKRoDTLE0ABMt1tqoeXm1qwdjTSgjRgJIfwX/8Pf9AckAVzdKHBGi9ieP2mgWALKWrIP9owEUJ0xRSRIJi5Dfr8KTqaYiOaGWOEEs1XAXu22xOdt5jvcuMDQZm6o4E1KNHVJNMYYQcGqM3FMlbMYmH6+LeSDLByS2CzAUHMVBYNmlAkjX63/RBUpt/ZCuQZqihpyKmb2WzKNKxpu4wWXyJIweZY3/IQuoRANvMoNv+rDI/L9hlvrSLj2gB1AUBtWMy6yiZRl8UUQcUAKCgKQBAoANY11zpRDmOUUUYZUSAEL8uyqkqlhAJFKdNFYNd1lVJJkqRJkmbZarXKslIpFFJWvGq3WscPH37wwfsPj487nXa6Xq1WizzPEAQjGHoOr4vVYpon6/U6yfMsiKOayzTN8jSvq7LX7/m+G8UhgBKcC6FqzlfLZDAYVUWWJas0zbrdHlI6HO18/c3L1y9ftruDWon93YMobp9fnCLA4eFBXVdccM91p9O7/YODowcPjo4eDIcDXlee6y6WKz8IEECK2iGgeM3z7OLd6+fPnn7z4vXr21V3dLxIxOuzm2S9Yir/7P19nxSDljfqdb733e8UzO10u0mSUNdD5qUFV0g7nY5LyeT6ssyWCh2/1W33+lyoy9vbVhSh5K3QF5wzxlbLZD6fv3nxLF+mq2VWSvr8eraWnlDO0e5evlpU+RwI/6fff/z48cPdvZ3Dh8fdfr/iGMddKWlWlJPJJEnXFU/9AIfdcavVZdSbTeZpmmR5UVZC1OTiaiIEffrsb1zHX8xmB4cH3U7kOc5iMQMAKbHdacdRnGbF3d1kf//g9m6qCHU93w/C2Xxecx7HEecVpUxIVVVVuk6S1TKOW5RgVZeU0izL9O7uDnqtKArCgFLa7bb9IOh1u4Q5i8W6qgUCSbO0LArX84SoASFJEim56zqEECkNrq7heEvu1DYEmMMsya2ZCGB4os2TxgEYw2NiPsQmttY/a7gVYXsvNw7CRvrmYRR2pFQKOedgCq2bEe1bcLGpG96P9rHxB1tPbNi12dGkSU1w62E2uxU62wo91ZZ9k3oAgEZ+NK9DKgGghBKgBJdCKK4QpBRKCQFSqtpOkUQAw6VhsLEk2KA6AGCGihhbttUzZa9rgxRZlMI4CSmB0KY1DI1TsfVWauYYas6pQc2N7waKYMUi7t8no3ht5YdRgUJpLpA2usbiW8Pf/CzNXEaD2TXWEky2aJI4fUYG3ZHmzkBTq9n4cHswza0ycJz+kVJiLL6SaJU7tl62dTV0O9w98NG+BPXXNyCmSb2I/kefmfXouDklw/lSaOIVSwHVztzM0URFESgiA0X1W6TQtRGQQnJeg4HdCCFAKRKKQqARcRHCcRyz3KmDlBHqKMCyqqez2d7+wR//4U/39nZ6vW47jlerxTfPv14uZoILQpRDlBQcFacEiixdL+cA4Pl+UVRCQpIV3SjeefSwLMu6LsuszPJESilqQSh1Kb29vhr0+8Px+DhuIWCaF9c3t7yu/DCIoqisyjTLPD9qx600S1erFec8ikJec4ey5XyedrvZOv3i8orXRa/bK8tif28XlPBdR0h5d3ONouZ1TQh6gd/u4vXVRYUhYyRP0t1hPLmbDjuM+3Q03qkFd/24LivP8xzXXyQ5Yz4yd7lcdVpxkmYXp+dhuzP2IyBkPl90u700WTsEsjyrinI8Gjsuy4rUcbwMs8V8KZ2IcyVAua53dzvphm7YaXNZLGfzS4f2u91WEPmO7zCCEqIw6vdHo8G45lUtqiRdVSUvcyUdeXR8fHlxJoFPZreTu3lWlq1W99OPv+O6bDqdOJQwh6CCXq8vhEiSlFEny7KqrD3Pmy+WO7u7y3XquO50NqvKMmq3/MATgmZZvliuh4O+5HWeIUHJHMfzQpQw6LapQ6WUpRRBHBBC4nY7jFqe59cSeVp6fiixqqq6qOq8KLKi8Dy32+06jocISgnORVXnBrYmDuhxI7pMZak+WzYUth3AxrDCPWuL1v5v9jhsNp9qphBu/9qa18a22kyaSKkYM0VHu08NAqF3pjKqlPdqDoRQUxvVZHdAy9km2JhQWz1tDMv2oWrLb54bxL55NII6BJVC1MKiEpEqKQG1sigBUCCJNRKoT0uhDi+VtHgxww1VRtnLIa1daViGGloBS+0Hy3zBrYSLIILUxnPjAm0ytGnN0KgPQS0+DGZGDwG6RR617FE0fViGz6UEWnJ6c8NQn4BROxXSYCJgEgLaBPyNiW1MpnFZyv6jcSU7nbHxc0R/QpOSqUYV2ZZktpI1LeJGlOT6l0Q1pNKNYwPL/dqO720JAYnJfgyEZpEvMxz4H81sAECPh8GGqICKKIWS6HIwUEoI1bPUKDIABgql1FUnpVUCpRBCgd17SAhSRplkXOsENZAapZQyLoRQUFZVu9d7+Pj9J4+fjEajNE0ndzeL+fTu5mZ6d0MI7O3uBqEvKqGIUEKA4i5jnW6nHceLxZwyB5mcTqdRFPW6Xd/3fd9/9+4mYcgjAAAgAElEQVSN7/uPTh7neY6IaZpdXV0nSfLu9Gw87odBUAspAT9474Pf++53qrLiXF5cXBV1laZpnheu6yIhu7t7fuBenV1wzjmvnz59dn56XpVlVZdPHp+Egb+c3rVCrxUFRZ6CEJLXZ6dnk7tpr9d7fftWKRGEQZrxTrsd+l4UqvGw3YndklfXk2mX0HxatFrtWq53Dx4IYG4YAZAsXftB2Ol0vLidFeV6nbXaHd/3up12liyn11eg1Nt3b4WQQRAMh0OPOutlcTNLqqoGx6UMXEIdRmMfgyj23dqhLFmtRVUDl912B4i3SjIEWpUlF5wLbMUHXs+py+xn//Fnp+9ej0Y96sByNev0gsPO+Pnzb6D2FnX19t1ptl78+Cc/fvTopN/vX15eLZfL5XL17NlzP4wfPX4UBsGrt++iqEUZ7fX6nu9lRX43uY1brbjVOjo6ury8XK+XrSgKAp8SFLxWKOs6A3CjKAycgPPq6MFRFLV7vWEctwBJmhdpmjpeUHNOKRWdTlVVZVWt14kCRQhKLTGLzGFmXKo1Efe6uqxxp4QotJPBtHkxqjOqKaLiJn79HZOqH7YXeIOmopFQ1Eipti3ESjUrAEkIEUIIISllUiilJGNox7CA4dlpk26P3+T3BtHd/K3NiOUOqebY9JMGdkZEKRUaiofUNkaBUKYlFBu7t+HlmC+joOsCBo+hUgMChvtHECToviYbxLOGVm+YO5uxPLqeCcpOO0Hjh5R2X4TYCoguoZg+Mu0kdDuUboojRGN5pvsJjXAkEkRKkOoLp7mJDXTWnJDSSgmbLI4YY6SUUmDGv4NSemyJbFqFzd86uDB2UztSBABU1k/cM6Z2QnSzLMC6xCYUtw5gE9Nr7KsJEIybl5v6Emzu0hYQdE+X1gJDFlBqfIZGpajFLrcGDYDSutwKdIVKl9V1tqKzQQBQQsuCmniFoI5EGCiqz4VLrpRQUkiuhOQE9dxNCUCk9kVoXZpSgMilpEJUJeeSj8aj3//JTxAhTfO6qt+9fT1fLO5urgWvotDrtEKH0SJbp6u5FNzzaeAHvst837+9uV7O5khIkmUKCKFUZAXU1ctXL3ldB2FY1Zxc3yAhoGCxWCgFnU673+9GUVjUdccLF8vl7e1tXlZxHLuOLxRMp5ODg0POVZqsXKf18vVLXuSA+MH7H/Y6sXYky/mSc3769q3gdVWmTx4dZ+t1t91uRYHgfDGbBWHo+EG73UKmkpyn68R3nSjwdkZxXaXMjV3X63TbSZISgnEr1mHU+dXl8aP3gtAnSsbHxzuDbg0QdAaEONR1z87OA89RQnY6nTzLoiiq6vr29rbkJQDEUbhIC4KirrNxOB62u+Vq3u22O91gv+sWZZKsJrPpDXNwnSykcjq94Xx6VQuxWi89v3P5zesiTZmDuzvjw/2di4uz5XoZ+C1G2esXp7wm7cB3O+3hoAeAq9Xq3buzJE2VIoQ4rVb7g/feOzx+mOX5ze2d47Avnz5lrtMbDoMw9MMgilvzxSJdrwPPR0StwBZH4aDXqeoyTZKyLPzAFYIryR3HRakYktlkslqs43Y3CMP2TtvzXKFkkqYXV1e3NzdCyppLz3O4qAmlSiIQQMoADB/PcRxlR6vr8F8ZhU6klqO8CfZ1iGkzAG0ut02/Bdw3u9eOT9JGAOz21G+EbZl+7Qz01iSIQHT0pBBRSRuQbVtwkwEQ3CowbB/MJoS6H/pZuwLNu2yMb07CIPdIEGSDXW9ZOZNPWHjYeiIDB+jisGYMavQEzAQDREDCiJEc0kLSyppjqbSIEEoE2aA9Gi6zRVXjMk3kCQB26gKx1ZBGdh+NO9imxlMEYkfIo8lozF1lls4Ihr1IKIAC4ABIN70PUqIEsNLHAMKwlSxSZOs5lhKz0XZWTQeHvQWbnrJ7N83e1k0huXH6gEgAQUmFzbR3bcYRESlYDYnGVZuUQ+OHhG7WjE049NkDArXxwtZaMWEJ2gWjlFI2IVBSoV3QOpMwmQPRM4GlklIZLW2qg37QeJ2QkgsApcBM+KHUhGCEYlHUhFFVAGEMADjnSoikFvsH+9/97LMgCJ4/f75cLrvtztXV5YsXL5bz6XjQ77RbdZELBPB9h9GyyNIk8XyHxxyiIPD90Wgn8Py7uzsFyve9qqrLIr9aTPWMSSmF7w2kkHVRzudzfckp9ZL12nWd3fF4Nl8WRXb69qys8t/7zmdhEIWB/6Mf/fjm9vb3f/yj65vrFy++ee/kZD6fVlWZZ+vLi3PF6729fQQIA//4+MHN5eVgcFIXuev6rVZL8Go6m3308ScOc/7+H3456PcdT9xMLnhZDEYDLajU63WrMk/SzPW82+mk1+0uF3Pfj5ZiPh4NyyJrxbHfaS2m0yxN1hUf+XGr7dec7+6Ml/P59eVlHPqr5UoKWZQlY6ysCj9wXRd9n7Vil5HAd5Fg7TAkRFVV4fuR58d5kf3qlz8/PHr4wUef3NxcvX772nH8qq4/+vTj59+8Liv1D7/6+x//6Ef/4T/8JopaJw8f9Xs7Z2fndzezdrxbFXcvX76MoviDD54QQqIoBMTP/+E3RZ6NhqNefyAVvnrz5vXbt0qpIAiOHh7VNffCwHVdRkmn02m3W1VZgZRpmoLkUeC34rAoc9dh49FQ2cofIcT3AwLoUtaOIyCOEPL2+kY3mRNKuv3ee0/e/87vfWc+n11eXs7n8yRNCCJxDdeSUUrRZJ+aFIe2fKqnk+p13hR+G/NKCNFSEMSoPBJspsmait9mQyujF4NNAG6DPEAgUgJBlCCbAp4GM5QCC7MovTWk5Tw2MeK3/M22A9guAsP2QxFlcAizcxWaCNoEqRv0n5gxU9D4BmIMgJbmNPbTMkeRoG2GBaU/VVEAiRJNM5lqzDnTRU37TcZ0Uko0979BIKztU1vXTvfTKkCl1ZbQolqIoNlc9j4avUsAY/oJIRQZIEHT+6UhbyOAhpp41CREtotDqoZRaUsCRCqpwJZESRNoW+jfNCshSmIdAG7feHtqdqQCgE3ntnyBYbY2fg+UsoMglJISlDaayurNgZF6Bj3VskGfTOFYSmUFi7a4QHA/I0BzU5t4ASwauPUQYOeoWg+0KXPo3jE7w0EA0VGE1tBTUoDlkgFahTs0a1d3gQkhCSFVVblBkKZpnmWdbu+9994/PDx0Pff09HS9Xne73SzL/uZv//bu9jYMvOGgRwiAEkHg+67DGK2qUvC6LHLHdcIw6na7VVVWVXk7mdRVLSo+TW6rqlYKUMmyLJWS+/sH6ySZz5dS8ocPT/b29parVVVVdat1d3tzdnZ5d3vT7Y/G41FVVVmWxVFr/2B/vLvX7Q+ePX16sL93FYWr1aIosuVisZjNKXMYYpEXnucVRf3yxctOO765uRV13W7Fd5PpcNAfDHeKWnz11ddVxZE5d7c3knPfY4HHdgcxwbQsqp1RPwiC5To5eXjSimPHcQBZt9+XxFmt8zJPk7IaDfqvvvri8ORxf9CrK5GtluPxbpWlD4+PleCB6y1XqzAMr2+u+4PBejJvd1u302mvHZSLdD279frD0GU7O6O0WAtwpQDf70uoioz/9b/7u+989oPlYvnyxQvXd5A65+cXx48ef/rJx3Ec/tFP/9PXr99JzhlzfD/23bzXHfS6vTyZvv/hR0qp5998M51OOt3eRx9/9PKbVze306oWnXaPgOx0u7yuXddFgDAMoyjygiCMQ8/3AUAIISoe+cFqOU+SBGXd7XWlFBXnge+vkxQRXUJFLYajXUoIYxSQAEhKSVmWiFjkxatXr55++aXnuQcHhycnJ598/HFd10mSTKfTy6vzsiy1YaWMNYG9ft4kAabWC9g4AABrLbYqq9s1Xmw6irbs11bcrUvCxAZGyu4D3TIgdShl9g1QIQSaEV0bqEoHqc1HNxYZkaFBOwyyBcaRGMqLMUFb219bFmk8DRj8Y9MN0BglNJwXJe+9F4yDFApBcRvMNukIAQSKjtGg3qoo4L/5n360lZQYeIdq6By2Y2TQlhRNezBasAQUABKKJtjX06YYIy4lej3oWW2uRoUIZQTNkE6dLlEklFBQ1I5vA0Pmsgel0zYAhcRSa6SyzCDd12Bus7QBsT1g0MOG0KIiiLCtGLG9NpRR1gSUtvrd1ES0HzM0TdCSGhtYB4j1hWjxK7CsfeMpbdyx+UZlEzZbbwZtrK251/aY2tq5sf6N99UPuelt3sQ4khsBVSUlURoxAylAKZBSCSFBoZQKkSil6pqLWnDOpdDMaEYp833XcRwA4KLK83w6W8Vx/MMf/nAwGFzf3Nze3vZ7vXa7PZvPLy4urq+u0jTlNa/KglHoxHErDjtx7Ac+KFUUeVVVNa/DIGSMBr7H6zzwfF6XRZ5dX19lWcYYS7OMUeZ5Pue1hizLqux2Oo8fP2aOG/jeOkl4VcVRPJvN3rx9W9UckDieqxSenl1cnn/5z/7Fn56enqbLxaff/cz33SDw66okBIXg3U7/6vLq7O27k0dPotAPAm86nfT6fc9hb9+8Vrx0XZeXxf7ueLFYRHHr1eX83en1Khfz6fJwb9gJ6bDjeJSPRj3H8QbjsUJstyLHcQhl3V4fmce8wHUDkHW6WoKos1p4rW4cx4CMEOa5bDmbTm5vANTl1bVQarlaIlXVKpFZcX179+Z6UlE/K9XR/pHvuZ5HC57tRO3xcPD48cnzF195QTAcjIMwjuNOXvEvvvyy5nXcbt/eTb/72aeTyY0UnFI2ny/brb7n+Z1uOy9SBaoVtoCS66url6/frFcrSh0/DPr9/nwyc1yPc75O167vUUoZpWEYRlGrKEouBWE0jCJEmM8XDDBNE4dR33fi0GeMEEIcx0mSbDgaTmeLkDLqOISynd29uN2paj5freta6s4ex3GZw4qqyLKsqipNqun1+ru7O8PhcDQa1DW/vLw4Oztbr1eEEC29wJijkX9qqwLE5sEW//lHMgBpZfQbs/WtuHvLnGkrvEkFmgdou2GGzkpLEILtAY2NnflWKKmfUMqa5986YDRwr2oMy/axbT8k2o1v0CEFSgm5ObCmDbQ5fgCQIIXkzSgYZaoaQoHkSjTKEJtS6H/3P/9EWRoMgESClKJ+XWP97T8SreKCjs51fqDVXFEhIYxSRgmjyChxKXUoUEodSgkolxBCGCXIrCQ3Q4WUMAR9j4lSGkvCBikyVBmiQXdE4A3ypc8XAaQVTSW4IYyCMbVKSkOU1XcNic6YVGNAbSUYjCMjKLn81uJBRD280rRd6fkB5gfTnw2wvewUacZvbt0heyO18zSBB9mQFrAB8AAACTFjk5sF27xma7lschXdzaBLvso0BYICJZQQygYjwLlEMz0YpZRVVUsuq5LbFJv5vo+IjkOVUlmWRnF48vjDdqv14uWLq6ur46Oj0Wh0d3e3Wq14XVdlcXd3N18s86KsyiIOg16nrXVAGSWu4zLHAVBCCFDy9uZG8KoVR9125DCyWi7evXtTlnlRFoQQ1wkIoe12p67rvMhXy5UCwnntMKKU8oMwcN3jwwdciPlikeWFkGK8t99ud+aL5Wh3N47ju7uJz+jp2dnDk6PJ7a0CIaRMkgQBlCRSSlkLUVcHhwdxqyWF+PL5i0cnDxmKu5tr32Gh716eveWSXE+L6WKdV6LV7rgoBl2XivX+uO95zoOjk1qouNOiBDmvd3f3HNcTilDH40KCkkrUjKgkK8LuQEr0g5BzsV6tFK8Fr1+/fJEWpQDMy1KhZFUFeSGE+PU3r3JJFYvqgg/7w7AVlrJ+b3/nweHB3eTqBz/4HqXkzet3k9nC86Oq4lG7dfTw6Le/fXZ9M/vgw0ePHx+9ev3i+uJyf+9wMpmfX120261+v1XLui6cuBWvVisFxAv8sqrzrDw7OxsMht1Ox3eD6XLqBW671crz/OHxcZZlv/zlr2az+d7+fn8woA4jSJSQ0+lEiqrVjvd2dySvEVGCYo5zdzvd2d1N58vxaDQcD6nrUodR18uLMsnysuCcy6KopBSOy3RU0RjoNE3ruiSEDIfDR48ejca7lNLlcpkm6TpZr9fruq4poYxRJATBjMnDLVDFQs2NN0CF92oA/5jdJ9/6L6U28aKtG24oodvWvzGyYCFoAD11Y6vYYLdzk2rgfRTI/v6eA2hevHmOuBUQwoapsnXAUtWbc1WgQA9XU1JxqTn/iEoIpZQeDixMD7CJVnUzFP7bv/j9pqcJEXTxlouKmDTHYtPEHhIAmJFj+gaA1GUAhVqijxKHAqPUZdSlSA1VF1xKKWWOVj9DJIQwAkiQmQEtyAg20N5WG5kOvgEUKKLFb3Ru0gjDWcV/DahIU9NGNPiPVNZpmUo1aRBw82IzMl4ZBwDivlIrKKIsCmR+JZGAHktwL79sFhsAmqLpJonTiRha1J7g5o7apUDQUsoQiUIQyq4SZbMR7TyM4CKCVbb69lKWUkmhlAJJRC254Hpop1KgNRm19xJCVGUlBVRlbU/fCK9zXnW73ZOTEyTqzbursip73R4hOJ9NsyyLothxnbrIkySZTKaL1TrNsixN4yjsdVp7u+N2q6VVWXjNi7LigsuqdBwW+t6g30Elijytq7IqM87r6XSyWi2X66Ku616353pekRcAyByPEEUQXNcNw7BIM+CiFmJ3b08qyMtSAfpRhEjenp1eXV0+evRelWV3dzdKyZOTh0WZC8kJoe1213XDLMuef/11OwiGg/50Ojk8Oh7t7nS6vXdv3kxuLqp0TZXsdlqUOc9e3pydX9Gg1W7Fdbk+2um4kO6MOlEQ+HHHC2LmOg6jnud6nhfFLYlEIfNcb71a1kU66Hbm6zRodVqdngSS51WeJL1u6+zdKYK8nsyY4xR1lWQZFnk1nSmANVd3ST5f1+mqaLU7XPHHHzzpBwDA+/3O1dXpZ5/9gBKkzLu8vjk/v3r83nuO61Q1pKlEWnc6Ma+yZJ0gsjBoLZbLSlSMQZqukjUpy9wPQ0rdZbKWUta1oNRBIKen7+Z3tw/ee7y7t9uOoiRZF3lOqMN5vV6uXM9DQkbjMVJ6e3Uz6PdarYBS0m7HIHhRV47rea6/Xqfz1fLjkycKZNxpeUHAlaxE7bpBLVSeVczx6lrmeZJnKefCdR0hhA7zCUGNneo5kUCcMAyHw2G32w2CgDkMAZbL5WQyyfIMAAPnvrKu3jM2A0BETSq8tx22gVsNvMh/xD0AGCYFGDBYNn0AlvtPvtUoADaYU0o1MejWDtU202TtuOn/uucALLgLYIlHWw7AzKm1YMSm76yxJ1LVRppT6XG5Umn8QicBOoXQXQIgFXCppFCbgq6xu//mf/wDja408L1mUCKVhChsBoGbiFWBUsSMadwg1ICMUkaREWQO8yllDBmlDiFEowogfd0XRglD1LPTGEECSBEAgSIhSHR+ALohAAltXKCFp7dvrtxyh5u0ALck4ZRSSMxHmPc297kxl0rJrZwAEa36vsXuG4xma7U068ngRMQg/bD5ldiG99SWzp/9XvKtUMUepanfAwLYduYmCkD7QwOO3f9fncsZ/EfnPEJIpXU7lVJSUcKEEApASzpzLkWtwwOBAFKClLLfHxwfP/Q87+3bd0VRnJw8WC6Xr169ms8mw+GwHcdhFICCsszyvJhOJ/PVepmsiyz33WA0GIxHozgKCYLgtesyz3EIIUm25JznaVKVJaOkFQaeS3lVpcn69vq6ris37PGKK1B1LSazqRRSgHJcl1JaVFW72+20orpIxru7aVaMdnaPjx+en529efvWdR3HoYiQpslyOv/0009uri6Wi9nx0YHn0F63p5SKwqgoijzP7ybr/QdHRZEvl3OC4HlOmmaT27vRcDToDd+9ecMlfnM69SgSXhOshv2WFLnvUdd1W1EU+MFoMBKqJIR4nkcdJ45jIMwNQqS0qLgC0u31pncT33elAMf1yqKseUkp6DGK63ValnyxXBIfXOXXubi9vogi9urtKwFOyb3ZslKOOzrYeTQeTO+uP/69DyhThFLBRZbxH/7oJ59//ivfZ47rtOP49urm408+Xi2XlNLeYHh2cZ4keZYX09nC8RzP9Vu9iEs5m62isDVfJnlRjYejNy9f393dhp4bhiHziOM7oKAsS0opr+pkvQ6CmHlmuvr+wZ7D6PXNTZammkU6m82KMhOcU6TD4YCLmoD35MkTpDQIQ0A6WyyFUEVRcCEsgiIVGs4BIlJCmOOg5VubAFkj8koJIQihruvHrfjw4GAwGEkpp9Pp3XSaZpmUwqEUibY4lBCieK0Fo0FK0DWtLTOhLOnu2wDsFrrSmIvmSRP+G1sjlQmrZFMr3E4d7iflYNN1A2bfy1q2/27sFZoxBgBg2qMAQCphC43K2pWmWKkBbZPZb9kiZbqPdGMYSKG4Rjqk4grkvSHmuubx3//Ff6K1j6xTQgSCqGfNKECu7GkASutfmxNu2ngdShlDhxDGmM+I41BmhbkdQgiqwMhw64G0yIgeUI5aY9Io8WviPyAhQHHjYIwUxdb9A9iad/ytzGj7rqj7hV99Xb912xo8HkAvHvqtD2nK0c2v7/94byWZ35DfqfBsvw9tH979D9GUp9/xN5uvuP8c5abJADRxV0mdspi+CKFQSgVCmiUjAACklFVdE0I451lRKa51fyDP89FofHhwGIbhYrGsqioMI17zX//6F6vlyvVc12WO4+6Mhp7nImJZ5lVVrVerdZ6v0zRNEoqs02oPer04ijyX+d7/z9h7LkuSJelh7n5EyNRXllYtZnpmVoxYgcHugEuY4QcMD8AHII0vQDM+EwXMSIJGACTNSAMM2B3uzoqZnp4W1aWvTJ0hj+KPExEZ91Y1yLDq6qjMyIgTEee4+Nz980AIZnQNzi3W1wBAAIzQGl3sdozQqPry4rwsi2KXbTNlAeuylIEMwkgGYZwkyXCotNrlmXVOChIMsyxXxhKxOEmJ2Gw2rcqyqgrG6PpqPkjCNE0OZ7O6KtareSi5s4YhRnF6eHAopSg1/epv/344TLWuizwLpMjy8nsff8wY/93nXyyXa0fibFFCXRyM04DDIJHW1Ijm3v27VVEO00FdVqd3DqUURZ5HaTqeTIqyOjo5XW22g/EUkDliRbYLpFyvNnfu3Hv79vVkMrZO7Xa7vMjBsd02ny9WGvV2uU2CBKw2upwvr7JClVZscr2rrAjDgyi8f/90ejgCsoxgNjv85ps3Dohz/ubN69Eo/aOf/fTN27O6LIqiCIJgu92GYTxfbrfbrYziw8Oj4XA0ORjvsuLy6ppIWKCiLMCYr778gsCeHh9NxsN4MGSCl2VVFOXV1fXr16+zXZ6mKWN8djDjnJSqHj9+fHh4sF6vtdZSijAMOcM8y8qyDAIxGg2tlSenp4PhUBvrgFW1Wm+2xtmyKHbbXVWWXLAojpIkEUJ4U9fCDaF5CyQhInBkrPE16pPJ5O7du8lo5JyrqnKzXmdZVldVIAVjTHLm5TJj1Kwr12TvteFO6tbeBz1maBkdvksH3ASF/AqDTv5AD9q9tWaxl9oBPQ2xX7a3ckadZ4j0u74PVcdq0zbnaAfcyYGeoLEA4MA0/V6cMdhAQL5RjN3n+4B/BbzJINoDER5e8U/RIHh6IdagP84hkXUGumaIAABADBmxNurLGHaJvIKRIEJC7vN/GJJPAG2r43zmOvMKoMkdBWDIoAcXvi8Q7Q1Xzrbmf4NZ9XsGNI8IoAGLqIm+uybycUN59J2y92fJB0fi2rhT3xBwzmKTQNU7Qed8QUe73f2uGRLtnSroZ2t1V4OeG4Ag9me24MABOc+CTc45x6wxPVXTJpYhcimrqtJGB4FUaMtdxoj/6Ee/F0Vhnufb7YZz2myKb58/3263jEE6SLTWxrqI8bquiJAxZi14oEYaG3nOIG2dM8bqMAwGaVxX5XK5rquyqqqyLoJAWq2d0YyRVcpZnUTRYDAsy0pGYYycCwkw8CiiUvXFxdv61bcyjuJ0wBhzxCezo8l4dnV1BcirohqNJ4IHIIEBRnHwez/8PSSaX18ZrYIwHg5tEkeCExEtl6svvvxmOBzuSs04F0I+ffr4xYsXSRT9xY9+VNf1v/5f/7fxeLJcba8vzquagzWD4WC3XsbxdLVdq6rMi5oAgiCYjiZal87Zw6Oj9WajlA7DsMiLsigHQ4uEqtZJHFlrAd1ms2KMVXUVBCIMQ631+fnVcrFFokobpfVarfPtNoljGYwsViERUF6UO5XDdVFop7Q7jWI+nY4BkAs2Gc+U0nEYALi//OVfjUfjhw8eXF1f/+6L33IR8CBUtq5ULeNouVoZa/7DL/9mMV/cu39/NBoPJ+Ovv/pyOhl+/PGT05ODySjN8s3l9fbizWK1XhMxQpiMp1EYV1WV51n1phiOR4M0ffXq1fn5xXQ6SQfpbDZbLpfz9Wo8HkkppJShDIYHp76dEyLkeZkXZVGXqq4RMRkkURLVdW2MqesaAJq6r/eatvcXkbXAGHLiQkhGzBjz/PlzS5Cm6XAwnEwmp6enRquL8/PtdmsMD4MAAQAsMjLG+JIj22aVONet9/0q7kz+W1tncbeS+gb+DgDk+4y0fWr/E5IBWrAZWg3Rnef9H+5VSItk+B96Ji6/Na3JuuXs+jhEN3jbxjVdJyZalML1rOom+sjbREGAxm5FBxbBddxzSABkfdKIQ+ej03ttAYCIDIgRNZ2auQ8Cc0bSdx4nJCLhW5BT0wKLExChp8wkrxt8Jx0E9Mc0TwEbcNyB8x0e/JsjtmdM7SQoNSK9FzkF20/f3P+vhdbcXgF0wQOCvvneqwHYi+b2nd182X0cCfbCvq8CWg1jnemFdxx0j/MG5tN6kr1GAjeu2PBQ+1Ya/sn4nFiPawFDh9RQaFh0hKCNIS6s0lKESCLPc4bs0ZPHB7PZdr29vNwIwS/OznfbHRJyxgdJlJeZNYYIEZzRlbWBr5dhjDkHjGnGGAFYY43RUsrJZEeOzjEAACAASURBVDoaj1RVevqILM891aKqNWPIuUR0FjUgU8qUVc0YqyoVhrJhnatz5RCRjg9mg9EwSUdc8CAMkyQus11Z6zsnp2EYyzA+O78wtb44uzK6nEymX1XPrTXa6DAI16vr5fwqkOL+g3t1pRjhyemdwXj40fhwPl8SuhevXg+Hw5OjE8bl4t3lbDotCzUeJFLId5er2XRstE6TtDLWOLYr6l1erpcLsKDy4vFHD1bz+WA4HI/HlVZaayHkME3ropBRZGtVGVVVdSADQvQdNOtaXV9fI5JWNsuyi4vL1bZKUj4cRMrqi+vN+duLxx89BFfNJslsevi7z1/JNB6kw6ur+Y9/+gf37p3WtX727KnW9vjo+Ac/+PT1m9evXr/693/5H+erxWff+/4//xf/4t3ZWVFUu7xI742qujYWLubXd05PnbUvXj6vqpIxfnAwS5L45PTY2Orrr7+UAVstc0QIpJjPF1VVx1GCiGEYJklijCYkxgUXzGi122VlWZ2/ezceDc/PzxaLxdHhTApORMYaEYfGOR6E4zDevn5bVrUIJAAYrUmIYRRC28Tbp3Lu09dao9ha2xE/NDWLhODQGouEQRAQIWizWiyuLs6llLPZ7MGDB0EQbDab1WpVlqW1iMYJHjhoeDG7KKy39ryN2MML+p69a+WpF9NNr91uDbaj9cUEH7YL3weFvBxxeOMk3QEfUBtebGNnGt7wV7xP0zKwfUB/Nbqqocds6r1co0uwQRk6XwTAgmM/+7MHAABgW47PFghCh+g8E5w/uHNMGPPfAmLb35EEY5wxwRkXLBBMcC459wpAtH4A9y4CY4wha6U/b5u0MR9CaJskNrTazXdInNreWb6wAImIGDbHNDrEc891X/s8spt/On1DSG1zOL/PfQ85H6WgVl35nSZwjaytViPsdoCox0xCTaCZNZUN0LSQbJAu8Ps+J5a1P2JEvr9ce6EbX7V37KMhe5MJkRjtyUaoIRvHzm3q9DQSMWh0KUMkZEwZYx1MJ9O7d+4IIa6ur8qiWC7m5+/OlKo4J4aoVQ0tNtiwtu79WfL+aK2U1rqqqizLGGMnR8fHJ0dhIDhjgJgmaZLEzlljbBiGgQwYQ7BgjFa1quu6LCvOxeHhYRiKui61rrXRUgrBORLUZXF5dV6VJThb5DtdV0VRIKDR5t27t6quv/7d7/LddjQapskgTpJdll1dXLx88ZwRHMwO7pyejEajg8OZtfbg8DAMotdvz/KiLKuKc1K11lp/+/z52ds3aTJYzedETEjBBIvDkBCiMNzsspcvv3zw6HGRZ865q7Ozk5PTvNqlg2Fe5A4wEFIpTYwjYhxHdaWur69YU7KH1rjVZsM522zWWuuiqBaLpVJmMV/xIIjjkAle16Yozde/eRMO+A8++0SpGoxDCyhDa+1wPP7d7z7fbreci9nsQKn6888/v7q6ms1mh4cHP/jB9z/9+OOqqn7zm18DUF3XWlvGWFGUxpiqKIt6a60aDNMojsNAaq1OT04J+fnbCymk0Y4J6dOjhZCI6BzESRLFKSCMR+PZbDYcDLWxV5fXRZEzwlAGRZEhoqrr4SAdj8eIYJELLmqtNpvtcDQCABmERmvB+XAwkJxjw+7A/VXAt5Wg3iJCZKyDs7vZ3kR5WcOjQ85axlkQBERUVdXV1dVqtZJSTqbT0Xgsg0Bbm5eldY5zAT6A2RjeXeQMOoxnv3czqNtJ1g75uSncb1t+fdP+hvTvhHJnprf5Jt+FJQDsa3p6RQONyG4Hg60ag5s6DNrLAECb/QjQNIp1DsCxps3g3kDmPkYM4NrUVALrHBgHFpxFMMgInIOm3RQY47qbQS8UGWfIOReCc8akYIJTQCQYE4ScEyfGwQkiYsR9oQBBmwhEDIF60tljUcwLMnTYOCiEiGhNE4oA13QvwF7It30TrqXqcz036MYj7u/0Hus+sNG99Vvvqfnn7UaTzfPe73dZO9213h/DnhV2j0/dcj/7F/0uj7Vx98A5n0WLvssCWWupqX3DNvKN1jpLZLQ2FkajyWg05CR2m9VqvZ7P52VeGKO8K6VVzRhnzAGgA+pIWJ1zSinGeK8fAlprfZ+/JEmSNOGsYfVgjJVFnmUZAIZhFIahNco5HggrBBecq6qOo8iBI0Au3MgOAAbkEJG01oBgjZtMRpxzJqCqcmf1dHxgnSvKMhC42+WTcVrV9XJxXZfZ+ouV1m4yGR9MJxdnb0dp6pXtt19//ejJU6XUty9evn53maSDOAyZ4BFnUgaPHz2+vrz6+suv0zSNwvDi4nKcxnVV+lp0xujxk08mkxmMJ9cXZ3xIXIaVWjnA0WiyWCziZMg4MsbAucuLSyQyWkdRrIypan1+9ubBw4dC8KvLq6LMNutdVaksywHJmNrZQLB4MhmNJxBE0fzy1WaztdpuV2vJEGUowtAa8+d//p+Bsyenx998880u242Gw/V6/eLFCwBX1ZWq1XqzjQIJFsIoOjk8OLu4CKU8PjkVQs4350VR1bXmLMiLajwab7fZ1q6fPnkah+HLly8BlGCAgYyiOE4SRFbXqiiquq5ygCzLwzDknMdRRIj5bpdtN2EgifDOnbtHRwdhGIZBMJ4eEmPWOlXn7968G40nRbZA53Rd58YMh0MeJ3VZKa2gl+TdZ2oDANe2w7s5/123YjyXKjqwxlDLFOScO7+8MmfnURRNZ9PjkxNjbbbb7XY7U9fYMiiiJzcANEb7aet6W7cwO+mMiKZJoPRB4D7N8N4jf39JfnCRNmQ8YLEFrjtyrc5J8TteyUGrjTzQ3xDstFQFzSBcUwh36+pNZYMjH9ZtTtUTFzf4ZADwv/5v/5FzxoHugKAGKkKHpAGNFzFEBOB6RGnAOWeMOweBjMAFQggpQk6hYBJBMCY5SUbC925GJxCJGOONjY+I3BeOeegfG9JX1vgV2KQAtU+nLwrbmbGvjGtl93vvoAewtHQZN2G0dpL1f/hdcF4HyHho39468qYqaquxOuyvD977UDS2EQLnwIGfzb3YDvQGur/9G1f0REyuVcnOoQPfqpGQPB6CiNY2DRyNBa11HKej0SgQYVmWy+Vyu11t11tnrTZ1XZTGOQHgO5xCO9uMs9aAUgoRGSPGhO/T7Q2xoixWmxUApHFydDibjMdRFOZZtlmt67piBJ5SMZQBoEUiW1e1UgQOwKm6LosckAYxWmvqus63mTFGCrHZbHa7LaALoziKojgJx6PheDxRxqjKZFle1aqqVFGWWV5st9vxZLzdZOjcdDqWQZDtdoHkURzPZrOjo6Msz8uqcDw+PDwOw2A0GPz673/18uXryXCwni/SJOZEu+2uyDPtjOQ8DGNVqyAKRRjOF/MwkHVVXJ+fz8bTILSDwSBOEs7lZDJGZEEgi6LcbbPxeMylEGFYGbPb5Hfv3ru8uJRhYI124M4vLrVyr169WS7XYcLrGqrKffLxx4NBAKiff/21NW633jx8cPfwYPzyKp+vd4xofn155+6dKI2ePH58cDC7vp5fL5ZhGNZlSQBlWRweHoZBIIVUxjz/9tW7s4u6rpJkQIwPD4ZaW0Iex7EUwdt374w2DQubtWEYxakYT0ZCBFxGRVlZh3Wtzs7Ory4vAxlUdYng6lpbo6syF5zHcRjHYRgEo/H48OAwHaaDwZDLeJAOKqXzoiqr6uTuvavLq/V6JWVQliVjbDAcdE1Gm7BqSxvTLvAmXAott36zGB22SZ+eXaWhh/N+cmv0MNeIbG2tTdN0MpkEQaBrXVdVlmVVXYO13jM2tulz0bAo3zT8vQ+ErdB3AE3mEvh0yqaVUl9zdOqkOxXeRGd6Jn8/2cTfrGvBGICGIo06y8r3y8IbGtF1NafN8m9DKQ261lzNNmFRdBZ8hrd1aJ3vjoV7BeCc49aa1qZtwg64l1TQjhucsy200JJ6ImfEAZBzSRBwLhgLOAnGJAJnJDiTPuMLCTkIRGwQFWigHQTetQD1vKmeHxS7Xp1+ANj17YLbA3MOgDloSjha4rx2D/YtBvaBVsSWXKlXVdUc43sdv+9V7VXEB8M9t9S+cw5A+8kEN32Qvcnf6uEuboFI0JS6wAeVUBcY2H8C1hP7NRFgcA6AUefBcM4RwFljlfHJDO709O4wHWRZdnlxUZalUcpqwzlZ64wBJOLW7Z8eIhFZa8AyJHDkudvBWotgDXbhKeRMOLDWmDzPpRDWmtViWeZFEElGzFqV7XY57sIwiKIICDljSlWb9do5l6YJEWX5wnfrnEzGVVXNry6NUcfHR2maDodpHEeMM8Yx22Wb9UYEQRzz0ShljF9ez0eTNIkfW2OLvDg/e7fbrhKbRpKdnJyIIOBCHB4elm9fk6LhZDKdTpyzSilE+qOf/nS7WS0uL7J8Z5VeLVePHj1Yrq8no1FdVXce3N3ustroqizAGTA2TlKDyEQQhPFoNFHKrLfZ/fsPl8slkXj4+MnrN69FJe7PZhcvXs9mh74JFmme5VmW5cTFdrWZLze77S4ZTaG21xfzRw9r63Ip3KNHd7er/KMnzzizu+08CkNc1UbBT37ykyfPnq3Xy8ur88Vysdtunzz7aD5fnpycLufXUsosL68uF2D1l988N8bEcbrdbOIoKVUR5GmSxERsu94+eXzA7929nl+HgajKfL1e5eVCGe5A5XmlDWjHqqoWMnDWDgeDNImrssiyvCrKqizCQFqjijwLJJ+cnIwnk9F4TJwjkWBcK8OJpzFfrda//c3nn3zySZ5ljCiUUkiplapV7dv5QtczpGv/7UXnTXu2CbxhmxLdBkVdt9IbYB0dWAKyDhhjnPOqqt68ecOJR2EYhuFoNHLObTabPM8ZNezCrq3wgi6zvpUR1jTxzrbut2HPAYdNBWUL4/RdB+jprX6qfvdtc3ArxvbwDLqW+hJdw7vfFAz51lzYnq19OB3rTF/meCdhnyjfOv2uCwCgLwzrmk12z/m//G/+GMECeX/BAQA1zaQckSbmgNCHB4iIN1iNJ2vjnEtwGMiQ84SICSY5SsYkkRQUci58R08A5BhQwxPXokAt1A5AhKwtD+PQliN5Vdzj9gCAm8GX1h/z8rQrAds7B83csX18Btw+raqVcvvMUH8A3IR0+hf0cxJuRmBuCOXWybv5Q2i1fVMw0AJ87ymPDvP7jjH0f8E8sNOzPgDQh1L9QXVdOeeMNQ5xNBqPx+PVcrlaLPMsD4MIEYsiN6Yy2tR1XZSlruqmmqQtviciZ5vZrJRq+h9x3ib1UlVXSuuiLJ1VnFGaJoNBqmpV5DsCiJOEEbfWqipzDnxElDGUTDAOWuuqrMoir+s6CVkcBc66qi7AWF1XURxZa4xRQRBaa+s6R3KTyXQymUkptTKb3U4pXWsDiIwzY1wcxgeHB87AerOx1iLgcDgCoizP7j94JDh/e36VF5UxWlX1cjEngHdv3hwfTKzRcRQKxssyz/JFGqUnh0fnb9/timKTFVfz69FkWpR1KAKlzCChOEmcw+PjkyRJgHGj9Xa3vTw7e/zkSRQGmzwfTae7bT4eTbM8X66XeZ4TIwS6OJ8vltu3796MJ0KytMjNZJQ+fXb32dO7dV1lmzqO03dnb5Qu31zMw2jISK7Wm3QwmMwmeZkprQfjUVXqX/ziL8qivL48u7q6Onv3RhsjhMh2u3dnFwCojAWE6fRgtSqdNYdHB4iwWs3LcicEk4JkwKQgIgjC1FpGjNXKCRlr687eXXjijTAQw+F4MEg4w7Ozd3VVDAeDJImllKPxmHEZRPHJyV0SPA1TKeU2y7WxYRTnZZkOBlEYvnz5UggxHA19ty9rm+5yRE3AC3pys/MA+gsebjB6dhsgNZHkG5+2q8BLGmfBwzhSysFggIhlWRb5TintT9snOd/vtCwpvSKAfQ5od+StZfsB79zBnl+yu4SX+Z00u31fTbSisYk7AxjhFlzmn4EB0wZo9yR0Xu5bawDAgnFo2ox5a0E3vM6IthWl7Cc/vweNLPQ6wLO2OSQg/4c1YVZGTc9YRk1+J+eSMcFFwDBgTHAf6WWBD/w2eZ9M9KKpjDHewv3t3z4Y2+4gMmr6I/rQqJ8tjdEAjfvXqLUWO+r4oNqYUo98o0Wr23zT/UZt/nETRG1PtY8gE/Uu0Z6c0INU+z/NhGz2vTfDoEl5YuBhLmTU4F0+8tGEvHwiLKDnte2dE3HvDPYG3K80b5Nou3ts4mb+Q2t93hRGyWA8nhpjLi+vs+3GWhcFIUPQtXLGaK2csdYYZ2xbadmG5RkjInBwa+ojIWOMM0JC3y3AWseJOOfe6fDGhBBiMBjEcSK4MKb2SzGKgiRJpBRlWWw2m7zIKlUDomBYlqVSSnARSBHIENAyxOFwEAjhnI2iMEriKIytNXmeGWON1mEUTSYTIQQgC8JgOBpXVWWctdYJKaI4CeM4K+pdnr94/fZysTo8ONzttpyxhw8eILhHDx84q4eDtCiL6XgMCOvVYjBMpGBltk2iwBnNOdWq5kJUtT67uEDGBSciJmQgZKiMJSaSdOAcMC7COJZBYAEY49tNtsszGURlUYlAVlXtgK03u/l8ybkcDVOl1CcfPZ1NR2kSjQYpODi9e6S0mh4cVrV+8PBBnu0Yk8dHR4B4enry8PGjh48eWMAvv/jyi999XSu93W3PL6+Xq81iudhstkEUZ0WRFblxDpEtVnNEkJKMUcZUWbaOoyAKZJqEURxIKYjAascYq6r6erFcLTfr9VZIOUjTNI6J2GJ+vVxcZ7uVEJSm8XQ8PjqcjcdjQEzSIZEolS7KStXKIQZhWGmdl6VSerleHhwfiUAqPz2cAwuMM29VOAd+uWMn7t1NpvubUvWWSmg8gpZAxeejNwn/iOBpG6wFn2+EpLXO89wYE4ahl/vvy/QGOrF74oAPbs3B3sS8fYauUZjzQLz3NLrz79WGQ9dbU3gLMurufQ9fe462G24HAPR4L/CGDmvtW4cO0GfD+voA69UPtqC0c44DNBwyjVpBAgeERD4tkyySpwUlQs5Yk9Pp03Va4e5lepPww5kgkqyhe2vyujiKTl43nHGMEZKvAW7LwBoB3UjGLhWsMatd13esvU/T2fIfqsPoHu6NeEsXzIQbers7D9xS7f7qnX0BPQyte+K30jSdz8VtQin9WoDeBXGfTHZr3B0CxeDG1vMiu5Psyx2ca5wH15SwGykDxngUR3lRzK/nHgkUIkTnnDFGaTCGnEPnfFWF4LwtfO8vCdc2st/Dlw3Ft58KglvnBHeEDZhlrI2jYJAmnq+jrrWqKx/iM0ZXlS3L0hhdFnmeZQDOGFNW1bIuhkmaDuK6rlXlOCExF4chIxSCC8GQKIxDAMyyzK/qJE1rpfI8B6LRaGjBoWBSRIgsTjkRD6N4sVjxMOLaYG22WfbN8+fTyeTVixeXFxcP79999erVvXv3tKryfCekWCwXhydHTx/fVVVFSq0Wy+vLi9FwIMJ7Mk6++faNEHKbZeNEChEY7YCYNrasKqV1lmXj8aSqyrpWSZpezedRlIyG09Vq5RDLogzDeLFcLZdLpWySpHm2Ozwc379/WOXFcDBczDfEDBMuiCQx+ejJp5IrdO4f/uHzJEk44XK5vJhfMk7n14v7Dx5OZydFWZ7cu5NXdRAGSfI0SaJXr17djx5cXl1nWTYYjRgXEksAkDIkgHt3Z3m2q+sKnKuL2hdzNC3MwaVRWNZ2l5W7bKcrFUcRIZyenjijg5CkEEkcTSYTD6poi1VtlIPZ7DhMEqvNLttlqxKJAZIBG0bR2fn5o0cPjTFaa5+t13KzExJQr8U5tNDzre2Gle0cMAYt/tJkhFsfAzO4xzTIx1GNtU0nL0RENMbkeV4URRJHHoaqqsqTUtwybpxzVjcMEP6H2OsS3C6LvdiFTuDCXrzYJtIAAK7vZ3SLvZMn3SUak448vS9Ya8l5zjFHRD1FAN0P/bd9/XFDzeyBEGgjw66jmO4eNrfWNkMC8v95n4xxH653SJaR777DGBECcREgMnC+vIsIOUfZWrgE0Biw1BZ8Md/4pclBZIiMeyAIGCH5iGOjDDoz1lcCt3D/ni3txka99pw3Zsz+EQAgsp7wgt6O60P6rSyGps9Nb+vENLYw/I1s/fblQNNjwGNYHdDUxSr6G5KvSG75oDugzu0zhlxXTNAhQreH5fkAnQP/Xlu7hAsZxVxwYZ1Zr5ZVrThjft46D+GDJ5tDbY2/Z4HoAKyxgIDOGWubFmHOOeN1RwOlve+NCs4RQGvFiAhZEARpOgwEz7O8KArOnfdbiDGtldIqDILO0TBGE4BgFCTjMAisQSE4oLNGFVme77aHxzOla0YYRsl2V6qq4AzjOLDO6XJLROlwwoIYSVpAQaaqKmdtrTTjIh0MJ5OZcS6KkvOzCyBCdMvF1Ww2ef71N8VuFwixWa4I7MHBZL1ZPH5yX+vaOBxPZrYqQymWy/lmu5senmzyOt8V1pKxrtBIhZpMpmfXyzRJsdJaVVLyy+srIjaejHNlBuNpkWVff/NlkgysAxEmb8+vrMNCOWC0y3Z37ozvnJ5uVvM6L+p8Nzs8BHIHxydhHOe5icIROM14NBwdX13NX715Mz6YhQ7DJJ5O7+RFlcSJ4PzbFy+rqq5rFcZ4eb2cLzdFkd+5cwfQrddrawznFpEh1IwxsNUwjRBi6/taWauUsg7rugpkAA7rcjNKQ8542xoOlCrDMEiHyTAdMGKMS0DKKxXGaRpJB1TWpSWYTmdBHG03u7wotFbW2LxWgTGr9XZycDBfLMg1xLY+2d8Y060bb+J1lt1tI65bgIhgLRC1oTvnPO8hNohss6yc00b7hGdjbGuLA7QdGLebrQ8VMMaNsVpr7Ojl/aXsPmzr7fQuXOw3L8/xJhrdNxABwFrT/nyPx/aF/g3R096jj5Nje2rrHLgGquof6zonyXf8RnC9Yk9/tw1Z2154IDRUz2DaloYWrAPHO2IlQt/bAXw+qO8LCOAIkAEy8P1bEJExENYX2RFnJDgJgcK3miIUnrnbYzUMOQIjR84n6SPzif/+c4SmTwA5IvQ9yj3ktdcE0JO8Tbeybk4A+JBn77nsd7p/9l+Maxpy3Ti+vcr+Fd943ODQ3ZyUN19GJ7k9k6cfbu/kN949tBqY3mMlhBbFg254XSgIb4/Wbwpbre6BHwDOeRiGzrmiLNfbrbMKAaUQ4EkOEZvZS05rZXyMyJdgO7C2gYAAAI021vm2kRodoHM9FeB5Bz2eyxjTWitVW2sFDzljggtr3Hq3zfIdYyyOEyEFksmy3Fmw4IwxnDCJY0Gk6lqrOg5lGM0AIM92ZV45a8Apa02ShNfrXRwGXLLtfF7ndZKEMg60M8aaMI6CKLXWWaUYZ4gcmJCSWyCnjLFusVgGQSgI00D845/+/otvvqmdUdrpKn9w724g5PXl5WJ+9dln30O0aRqNxnEYHpalFkF4tZzbMpeBkBW3Wq8XqzAItc7r2iLju7Jyu6yqNclAMJSSB1IiAiDb5Tny8CAZ1mUZBYLAaofrLDcotbUWxXo9Pz05ODyaMMLN9Xo2GZVlxrgN0tQxUVvKizwOzXK9WyxXgzQ+Pp1m1fabr343Ozi+vJpPZ7Pddvu3f/NXj58+repqMhmNRhNiTEbxR59+vNmsL8/PqyI7nA2363UaDcIo9vNecN9ll4Nrul+psq6UtY7qugZjh3FIRMAojgYWYL3ZItJgEAsmKqUGw0hGEeOy1gaViSSL44iIZ0W+2WZERIyFMlivFkZDGEeEtJjPJ5OJlLLMCusAETnnt1311jpqU5n3JvktiUlEYI0j8vxnpmn5AabDgzzy063ivXEGnUXvHCqlldJBEAguisJnP7bBW0DAPS2Ea7GaDrGBPbj/AQXQu4oFaHrd3AAPeua/N/itc3SD979B7PuSZP+oWmO3fYa+GsAn8SOCzwF33cNsrUdnCcA565qG4f5jBGfAsD/6s4fYwMaMe4S/676GxLnwhP4+848xyZkg4pyEoED4rH8mOEnygQHGOZOEvhhY+Loqxhjn0ldaMca8Z8CItxVYXUigA94b4Ii19H+tArgZ82nh8PZp3tYZH9puR40+FGK6tXU4+wfO43/bxrf3f3/3OdtgxXvfN+luN3H/LmS0P6D/E86EEGEQhkEgpQyCgDGW53mWZVprwTn3ffxasU7IEMFZZ4y2vkUAooOW38o5RPIEWw37U6PBsI+Z+pcihPC8Lt7HN9b6zk0yEACgalUWBRGlvnEKuCLPtpttWZb5bqe18slFRmsuxHg8GgzSoszKKi/LXZZtA8niOIjC2GjjnLXazq8WAdHJdMQFJy6BBwqDIBpXCurakgNT5KbKC4pJRMpAEKeD8WQymQrOjTHWaIYgOBciOjo4jsKgKrZS2INpcnQ4Ksv18eHsYHYomJQydGgZutXien55UZdlHEVZnosgNBaLqs7ynOttGkeBDAFxs9llWRkGUVnXgzj59vmLQRxr7YQM5vP5l198MRqNAZmxzlpT7HZgVLVdHR3ODFTj4Xgynkxns0ePH/MwHE8PeRhWparLymojAhxPUiGRuDs8Gk+n49nB+PrqXAhAZotiFUgQ3BXZZjIdTadDzlxdZnW5K4r1brMYDePBIIqSsQgCzkUYRzIIjXVGaetcUVZ1pY11xNA5Q2DTJE4HYRxHiKB0JQQfDBLiJKWcTmdxlGhlirziQqTpgJgo8tIYGyeDwXBsrKtVXWQ5AMRxLETgwMVxwogtlosHjx6uF8suxtsXi/3t1sK5JVvbTxowuKNkc57ux1jbtL3rg5d76nx/sLHGGtNNWn9clzThfGvsNuprm1P2KwBc7+z2/Q/bX9n2/7Z/8K3zdFrhlm5oFSK8fzzelG/dMdAKfWNtQxjR/BaajEfwT8e4LsEPwFoD1nEfYW0kmpczjLflqsgIwbfuaiKmPjbI9/HMhsa5zegHQiDWsLRSG5ylplsXoRf6aFkrDln3Lz3sbwAAIABJREFU7rt4ZhMC7clQeM9ghxYSQfDNWRwAOLyZJAv9Z7R/oP1nhx8sqX5vu3n1xsFq52NzTddWWfc+/46zNSGAD2w333Fr2AMCgG+U0d80WK/rtdb7CW2NlBIRrTb+XpsmdsYZq9sQMhEx30gZHQGYzlOx1vrSinZJ7Gc2Ivowr0/mAd8k0jnOuWdsJkAHpqoqRhTHse8c4ouE893K802maepvpK4ro/UgGMggqFUVsrqul67MQoRESFuVF5eXURQGXFbOPjw9PTg8zIpCVTXowmw2cRL7gjvOZBAmPB4hY4yjEGAEN87ku+1ms3WASTqIwjDLsvHDE355UW7Xpq4Ec0YVu0oJzsajQRxFztk4GkZxIjSrijwMg/FkpKIgSYZ4tXjx+ny3K4aDgdIWXVmU2rBaU3C5LOIkXb++PJpNHJYag11hQNhfff4FJ3a5qscrtVjON7tyMJkwkllRpZMJCgIHWZ7P7tyJBwOLfDAcBXGcFbmzQMQQoK6hKCptlAw5ZwFhwUgk0WCz3NZ1eTg9Pjo80NowES4vF998+dwCMkbW2uEgHSYTqymOYx4wcK6uaqMMkxSHoWJKlXUUhMSoLAqjKynAkLWuQEQu2TAIlOEAJAMRpVwb0EolcRgOYyTmgFSlkTkpQyKxXG7j2IhYSggIqa4qpQ0ghmFY1RURdwjL6/nBwcHFu7MwDJVSfvI4t29o3i1w53rpce26e38lNnseO8DGQXdNavft1dQt+W5K13XtdYC11nskncS/MZLG2L8hgqEnlD84sPbzvdpAbETyraVt7Q355gMSfSHQV5N9id/tY9NozHVulHW2QcQaPNuPoMEJwPsu2OXJWwuOMy5b5Kc5MydG1LRiJ3KAHHx39yZFhoFDRMGQEXCGnCFjxLFJ75FEDBveN05IjAQnsoDYsTsAh5YCaJ++42sLgN43yXt37fZgXYudO+f2O3gbcN9rTgddAKT76qZx8eEJ1z+y3Xx0du+Otb/6TkXSQ/LbT6z5jiO7RAj0xkeHNmpd3xqV2bfAQ8aEn3CcS3TOM6ggOuO0NdYX1HDOnbPAnOPMWe2InLOMMUvGOAfgmc09wOOMMdoYY7S1jQPuTYCO0NEY4+NXnPMoikajEWdUVSXn3FobB4ExZrPZZFlWFkUUsPFoZK1ljMoi3+SZtSYMg0rV1Xxe1yUxq5i0QjuAwjEkPjl9zIiCIDw9Ps62u6/fLvKqIAdo6mEaI6qiWDtkIkrW5ba2O4c0HSVJAkzIMA7jSEZxkmVZWeYEri7zfANJJLBmZa7rWgVcGuvWi2z86BCRzefXjPHrxbVF47S2qjbOjqcza2yp6l2R52VV6VJbBWJALDm/3CmoLhdbpIzArDZqNIjBmtJk2/Kq0opTsCzgq1eb+WqzWGyHk+pgNuJBypg5X2wilg0Hg8nh0Xg8kjIExPVmkxW7o8OjQZxm2w04vsvyIBBpPAGAJDGvXr5KomQ2mc1m0+vrS87Z9XYZoZiOpgh8PJtFcZLneVFkgYh9Uz8MjeCChyLPsl2Vaa0RIAwkEWmtgzQkK8s8r6qSiDhnxjgirCsDYBC5EHI2nTAnnQMSUimdDgZpOnJIShshIhlIZWxlal3XnPNEDKy2VVURUUBSG0tIl+cXDx88jOPYf+7nzPvLBfvhrpsCF3sgSbfcsE1n79vLH1xQfcnu95VS/tu6rtsev9b1QKd2a5r9dX4AAPSr4t+/7j7w2lMJnWhqb9PnqjarG5vOu/u7RgQisPa2SoAb0r+JG/rbau60oftvdKIDi03Nk/WEYOArRa2z0LT84pwJbIU7Y8DRuwTge3I655qMR0QfzkUizjgj4fMdCVhT8NXEAJCQMRSMeEOwg+SVAWOcE0Ng0Gb6d1Y/NeFn3vognQ7wU4JhEwU2ro2E9tyCZkJQwybYKMDmQwTwGtj36QXXlMMBIKCzbTlY+84Qwdnb2Tc351PzZPvvo4f97afp+zOy/2Hf0GktIXDOEbF2xlB71/vzdPOgveX9t81FEdFa50M0RM5pcuSJAJuxdemnRL483akm7mU9nXo791sq92b2c86JebJV8NaTdzuIKAxDKSURIEK3vLVStVJeAQgh4iQWQuZ5rpSqleKMA2dEVJalUrWU3MrhcJoeCJFEkbXWKG20GY/HgyRZXC9yMoOD8CggIUOjnZCh1o64ACQpBRPkrAmCwDrhAAwR8CgZpEapQDBdmMXZq3y3xsnkOts4awg5OLHe1i++fSOF2GYvfvGLn3O5y8p8l+0OT08ZWMGm2XZDzhrtHj55pkDO/+6333z74uD4OC/tdn5V1Bp4oKoykAKMKjarYrkYDtPF1VxIro3WFsHCN8+/BBLK6PN36/X6+v7dkzDgz1+e/+jjox/88Ed3HzxAYlqZsqqOT+4IwVaLRVEVeV5QAEcnM0K3y1dKKc5ZGPGqKMeTBJmq9S6Mh7//h99HYspafOsc6ovL14iYJKkMWV1VVltdVsCtYGwQRZpxrXVZlsvrayEFIjLCUI6S5AjcbrfbWYOMsTiOT09SrW1VayZEIFJGWCtV11UQRAhYV1UQJ3EcEAltrZTCOjDKKGUsQSiltwzCMIy5qOtaOrFerWaz2bt371zTl4l6CwH7i6gvf29Jve74br179ObWori1j72aLH+V/oWwTcKBVj30B+CRHGwjBN1X0FrdnYTwcJO/r1u4v18vndj2kgdpr5aoKWkmaLk/EdFa9MBWN87u4J5E8dbzTfXjQxoeNwbnm0Fi0yXeADlA3y/QeOORe442hojkENHn/CM66lLvfQoHEgE65zgTzKP5bQIoAoFjPqiLjhFwBASHjLjXCoDIqWV+BtYKIOZzjry12yFQnX7DBnbvV0LjLZnYn0ntk4IOkNk/J2q/BcQ+3T91sr8Xz7mZ5QK9oi9sEUgEvEGwsZ+p0A/v3/QbbqCZBI230nzV0x99h6QFlG7DRf4Ti/v9ZooDIJKjdgL7xGhnseUj7AVBCLtKGgBrrTHaWQ033V7nnG9WTIA+uF+r2lmntLLGGmOCICBEYuicU0plWaaUAnClbdZwEMokiTnjzrkwjJwziFDsdnVdR9FwMpkcHBxIyata+WTK3XKR57vNanM4m+ao/uY//vuqzMMwSmMBThfKaMuZCMfTIwtY5LmqSnTK6mqQRJPxgQMYTyb85GRn69nB4XankAcyHafj6XQ6s+XO1FVZ1Y6Sl9++dhT+5S//n48//eTV28uDw1kUito6LkKjFRARE4GUYRRfXS8P79wXX750DM8ur5Lh6XaziuMEbTEeSgkGreEuAwLMFNeKq1AC1kZz5liiDCplXVWZui7fvczLXJ8ezX7xF//5k48+yYri9PTuYr5wQFleJFGYJIngNBqNVpt8u90Mhok2ZrlabtZbo6xz7vWbV0fHB/fu318srt9enB+fnChrDLnFYh7HSTwYMCLOuM+GUkvQucqKLeeMEHVWk3NH0awoyyzPNLiFztPhZDodL5brqqqGwwHjknEpAs4DDUiVUlLww+PjWumz8wuLxMNA6do4FyUyDVLnwGriKTda+5UsAmmLsiwrzo03dLMsG41Gg8FguVwGQeB6eIvf8TKxW/j96XdLJnQi0kvV7zK2+tstBdAZTH1zzbutfjAdA7NHabTRHRIKLcLg3eTeNcCBA2tau7ORLPuf2C4pHBGb/ordjXQ0qD31hj79rvu8HZLt6UIvS21PfDW0aa1wsoQOyFljES14JnzX9DRw1gEiD7jci4RG+iP5DE1fA0XgwDYhGi/WiXXSn4gRCYS2RAA5R68DGHrCZ9wHFBCpFfrUtgTAztTtb61cY9iTcR57aW++7wRAu9PGQz7Et9dOrN4nAB6JfP/g/pR6T45Tz2qHDvt3bUhi/5P2FH3Rj+1M/NCZ++L/gzGCW3d0Iw8BW4pr7FpgArUtILyKQWMsONhfpzF0nLPGGm193mdTTtm4aQ6Nc86BUUo756qqstZobYiRlDKKAyE54T5Q7J+AEMIYMxgkXIw455EUdV1nWVbXFQIeHBwIyaUUSqmL83NtFDNqt9kkSVLVNVrz7P5JEIrf/fbzRGAiJIDWtQaimHA4CuMoLHbvVsulqWtCDiJIk2RxffHm69/ce/TRbDIWxMJA1lUphdBSlAUY6/7h13//6usXVVEHUeicTdNBPEh5KB2TytFyXZ1frIWk1Xp7cny8WS/CeDQeDUtVx8OpSKY//PFPcovPv32xWC4Z5+NRyq0OyLqqCoVDa4yxRNaGHFTBHBjBKrBgrUEignQYLtf5r786e3ac/vGf/PjZR59Uxk4PjxfrtbE2TeI4CvPttizz129eVararN2D+w9fv5wXZXF0dGJN/C//x395eHjn5//45weHB1qXJ3dnykEynBVl+ejxyWc/HBLn223mADgyY8xmvdklu7Ioybr1arO4uijW5fXVm+3y28Ojj6QMam2QqYvnXy1W9z7+5JPlcukcgNAkXVVlxrlsk/MgwJJfrzaT8WQ8nVW12eV5EETpIBSSB6F01rFQWmOqqqqrWtV1Xdd+Qllr/doG587Ozo6PjznnZdmAhD1BhgAAprWNblp1/cWL7/kB/ZV161d9Qd9XANBTCdDKCj97O6XSaR9/WG9ug4N9YKx//v5ItNt3L+/Gf+NvDdDTbT4BtFMDrQ4A+FBsANucqBb52V/CWusXbacAACzj6JxxZNEzAjlnnG7J7CxvOTihTc321KtNXlBroLs2yQa9UY/IkDi2Na4IDIEjImsy/amJBHhi5H1NQCP3CZoCWp/vT42x37f9oUsO7t823BSafZ2JLcrmekQZnbbo/ulc06yx91XP5HY3RCp8aPOCvoPmXROE7pot9Kdth2lCO7omYE9EuD9b7xetp+gcvq8Dbg8J9zYINmR3e73YPZZWxToA2z7pTtE6dB75sV0GA/lCTf/wffaYJ/oxxjmnlPIQEFmKwkgI4StrfBZpksTNTWkL5Kw1Simj9U7VSinG2Hg8cs5Zo+u6Lssi22VGKyQQyKJ4qIwFFI+ePJ4vly+++hZ4aNvud7WxYZAQx9V2d3VxXhdbVedcRjIexsOJTMd/9rN/cufOgzgJiyIHB6qumABdlXW+sfmaGMl6G4r4xYuLzWbx8z/709nBpDb5yf3D0SQZjIa7Tfb8+as4Dn/y0+8nQUjDiTN6s8tHkwkyxUTwmQiDZDie/ebf/pt/p/Jyk+2iIIw5HiSSc261JRJKW4MmFtaaUlFsKa5KfbXaaHT5bv52Vf3zf/rTP/zxjz765FkQD8aTEQAMOf/2q69UXqRJPL84T4YDbczf/OpX/+f/8u/+8E9+PpkOf/yzH789u/z7v/v11//wJfv9RBskHqxXqy++/GYwnhwcFMkgRcaWm4skTcejCeM8CmNO7P49ygMq86Isc1VWAHAwm8VRDAhAWFTl9XyuVte2yF+9ermYn5+cPrq+uigKVauN0hqIcxZlRT1IUynZclvYTRbFcRKnjpEFV5SVdcA5NxaV1qpWRhtv/2htfJPRDs41SmVZFkVRURQeBeqA006i+Znj6d769nW70JoF0YqDxqJuhK+7sVz6Arr753cpAADwYKa11pfCWGvBAdI+PODzg7x14+DGyW+dEHq403fJEF+x3Mk655q8IS+E+wqgf07cgwR+85Q5HesnADqP43RtbX2LcyAH1rXFPMaABR8xR+tJ8JtkS2oxGUTsWBCoD1yQp8jlBLyp/AIiFNgw1jfJo+SLvIDthQ0QOERH2DhB5LsftmfuW75+a4z9vm7/rqfZO8b/s4/p3zANGvSlt31Q/n7wq//09kEbpPft7YO7DKW+crppJvz/ulzzzx5U1A2AiNpMgwYC8q/JgbMWXWvXNACcH491/p0xRI/8AJFzvm1309HJI7zWGgDezFulPDMo41y6gDFSdc0FQ+a2201ZlogoGEVRVFf1YrEwxgjOdtvNbrsF54JAWrApRzSFs+7jTz65vLp8/fpVEIRIVJRlXVXbzSIenWpkYTIanDw8mYxPTo5mB7PxcJjGSb7bbdarV69e/av/6b/bbnbK6MPD489+8IMgjDjnaTo6Hh+GQtx9/MmTXQ3s/x7PRs8+ekRk79w5mEwG4+Ho/OzNuzdn/8e//rd/+JOf3DlOfvXLv1qtV8ao0zt3nn708aMnT4taD9LkyZNHcRKng4Ovn794+erNX//d75SFMcdPn5wcjge2KgQJZ13uDDBUJC4Wxd9++a4CUmA/vXP6X/zTP3j2yZ0f/v73pJQGxHpbOqfR6aOjo1//6q//93/1H2az2fd/9IP7D++/ubw8ffwPgwn/+PsPrq5fHR0ff/ajZ5/+8OPDoyPGxDK7yvWuttuTO4+Gk8HBwaGy5vz88nL57vzyzWKxAAvb9Wa5XBpMDg8OHj+871upbDcbZbS1Vlsdp8mdu3cmo8l0ePzP/tnPBoMBExIB5tdXu7xYrzdX88V6vXl3frbOFgBVEARRlOZFWdbbg5kMgng4Gjlr8zzfbQshBABUZa1U3aAcnpSwnaLOufl8fufOHSJSSnVirhOCXTpm3+bdLygHjvZBz2aG481WsTetn77ov7Uw3/8E2lxE3z7b2zqeR7c72w3Mp/25l9rdVWxb7Wxvuvi3F2zP2HXOkTWICLYhSiIiRHC1aQRFawf3dIOX0k0Hlz7s4TuJeO4GQGfBMOubgRm0FtCaJrTgHDok1yXjtxQ37e5+py3K9YY8kSd4aFluWvaeNvGTNV1/fYX5Pg0UOvabhtCmp687edha8bcoH75T9N/aOg15Y+rcmBl7T+LWi+mm4y2v7db7vnXMB8fwwTH/f9xC8wj2OuA7FMl+PnV5rugr5Ghv5nQD7hkL7YJ0HVDkfGcFcvuXgb2rtKAfKNzHzay1SjdIEYCp67ooir0hY21R5M45IQQPmCcj4pxvt9vtdl2VVSCkkKyuqrLI66pMoiiO40BK5wzZslLV06fPXr95++bdW0TM8qVzMD28+/T7H336vc/SKKmr2iI4zhfrzXwxf/Xy9du3Ly/P3i4uzudvf/vgyU9/9rMf/egHP0iHo3QwjNMBlxGXkguR7bKr5dI668rNp589CoIwjoMkjvM8+94n32NE/D4NkhDgzz95+iQU4qMnj776+pu//uX/9ezpkzSJlarBoZByIgMuBRQFt+Xi6uwPf//7z1++uZhvPn9x8QefxiEy0MpWhXWcwnSzM1+9eGuZO52Mj08mn33y9Ic/fPbs6aPj2fHs4ABdoHRV5JuAB8VqJYj9yR//0WQ81uCI6OlHz+6c/FdGlePZ+PT0tKwUD8PT03tI3AIKycu6PLl7l7NgNB5VqhaB/Oyz73MhqrLOtrtsVxhln3/77b/57/+H//mXv00AfvyPfvzso2d3791PkjAMeK1qkjwrNl+dvXt3eZXvdmEYxlE0HE7+5E//1Fg3PTg8PD0NAukAdmVVltXFxcWr/5e0946S5DjvBMOmK1/t7XiMwwww8N4NABJcCiQBEiRoRLsitTInvdPd7a3u7T1pb/lO0rt3stTdUlyJDqJEiaIDQMITZuAGGO+np6e9repyacPdH5GVnV3dA+p2883rqYqKjIyMzPh9/vsmL/NqxfVatXrLMB0FkO04pu0AhRPbD4SQc44g1BG/CQ5KKYUQ1Wo1n88vLS0lqvzk5dEftBO9UjgtwsbYqlblbKUtB235PYkHSu/WDclA2gK8fn8RQoQUjLH2r1KlzL9p6WF1Vmtzf0KYzjaqVfmdeov0FtMDrmX8Yyd4IHniKpLeyGDV2sx0QzK2tushhACCUupqgDKWWACHSAGkpJIKSKkkgBJIAL/6f38uBggIYTv2qt2itTckKTIV64uUtgKQVCo3BSGkujSYlg8wRpAABOPcZwqlC0AiSJNbTS6XvAcIkTSUX4mKrj/SuJle95SApjrSMyUd0q/OlYZNv5EdL1bH8UuJFl53U/8aItfRR7ZVQEitmRVov6+pkBapd6AQTAjOWcRZxDkTnLEwCMMwCkMWRpwxBCCKFXixblAAEEUR51z/9X1fX0jHgpmmSSi1bVu/wYHvYYwzTsaxTMs26/X6wsJCo1EPo1AKARWUivOIKcmBEuViUVcaETxyBS93dc/PLy5XG9lcedu2HfuuvX7T5i2Ok51fmJ8cn5ydvFyZmZoZPyP9JQzq2VwZU5uYue6evq079mzeurNQ6l4O3Pm5ufmFBUJoNpvr7+9XkkvOCVQ9XaUw8MOo3mw1CvliFAnGJMF0oLc3CFrLS/N+awUAIZjoLfX4vmeY5tz8fDaf7e7rJ6YDMckWil4YSYDgysLU/PL5iYXnD707vVCbnJ4Oms1b9u1GUYswT4UtandHyPSlGpubbUXB7qu3lAqZq3fu3Ldnz97d+xG2FKR2oSBkFAUtEXmsWSdCBM36yMioYdu10Fuu1yHDjDHLtkpdJS8I6g03ly9Twyam4bkeA5wAjAB2nEzIQiZlq+U26g3fD9xmy3U9x3I2j44Obe+TQpw/c+b0iVOz0zO12orv1QmGxVJ3b39/b293uaenu7fH87yJiYmlpUXGGVBw1+5dp8+edZwsoQY1CvnCUE9PT6GQz+ey+XyeECw4971Wo9msrVQajabvulrRp1O/qbZZVScKTN5MKWUURdu2bVtcXNT14xIFEWjjYJtgrJZxTw4BlWxrioBUKMURAxArg5KXH6Q2cgclSG/SdIuU0jAMqVQYhbpRiDUBBB1EpWOQZMurlFEhEQ7SU10FAbyqfk8aV1Vha3UDbS4u/k8vWEIA2tAEdMAWxBBBBDFUUIf+KAU4wgBgoBSXSkqgs4QC+Ed/+mUAAI4DcSFWsXsn1o6dbQKjzbYAQqSSpNA6jSXQ5X+12IcxwZgggBFqZ//X2f51QZhY1tDBXwRCmGSL05ZhkCaA6y0n/01H+jErpdKhdjHJXduzA0k7h0rUkVcQJNMz/6WzAqn77aAZcK00k+6QJlrrGB+QUDgppQBCSaiJgBJCSsU5k4JzxhgLwzBizJdREDHGoohFTIgIaloCddQewghxKYQQkosoYrp+AABAKkUtgxBCTcM0bYKpEDJizPc80zR7erq0SbO+siwkDwMvCurNph9FQjAhpDAtatsmoYhFUavlEWJvvfGe7q4e23L6egbLuS4ZsYtnT5w++ub5U897zdOmCfLZAoaby9uu7x/dXuzqNywn71i9pWzo1WuVhUtj56enJ/zapWIuXyqVKXIAtroG+jLdpXy51zRzzPekCFjdi/xwfHwsX8h5oZvP5TJZO+JRV3eX42Q838vmSwKaGEgWBSZBnDNKDQ6QYWcFpNTKRkIFzSZG8PL45RPHjp8+cWJ5qTo/N9fXP6gkYJzX661cwQEICAGqlRUnm7nnvjuuPbC3p6csperp7qfIFhKijFXI5cKgBZUQIqjXqrZFMxlHcu66LgQKEKvZcA3DsC0bSIUhZFEEARCMRWFgWyaysiRbIggqpVrNRuC7SvKZifHZqcuTExdWlme95rRyto0Mbbt67y179l7f1TvEIaw3a35Qq9WXFhanxi+cmJoYA9C47c77t23bKRhYWalVqyutVnPrttELF85aFrZt23eDuZmZ5aVFBVS5a6BY6u0fGh0a3dLVM9DV3Uup5YY1122tVFemp6eWl5c816WEaqWQFJILjpRAQCmFMEaGYZZKpbm5OQSA0qHI2qmaYLjmTV7V57b3KU7c7BM4jLdqHOUDkqw4a5ghGZfIVmkzwNoNqgWD2PE5dS6EUMsxCfSnDRjp2aYuJ2WK8detqAMQlMJaH6tk2xNyjfIAQiigQhApAKBSWIfmS7GqmIGQ6zzJCgAFMdCXFFopQwwkgERY15NRAAkIJIBAQomQAkAJFaubSEyEAYAxwEOEkNKO+QppdYzWEWsCodMBQaSLukAI23kDIEjSCsXhuZ0YqMBazQ+Iu3YCXIc957/nSD9I1fYXSjj69RLDFXE/dQ/vfaGOYd9jYmnpJBGAkhHAOnDvIAZXGnjtGauVLiCEMI4/ju1YuvNqqSMIAIA6RQTEsRIPQgAhQhAoqBDCkGgzE5BKEYMY1DBtixJimRbnAkIZ+DKXdQyK67UVzkLLtjiLlIhgwJQZRcgPSSSlpaCNSM/Qpp1X7drdN9iHsaotLCxXqq2VygtvvTI7dXH88tt+CAjJX7X75gO7f3X7jj295SwBru/WGw23UltZmLt8vrI4PnaWS2ZRg1BCMOkp9Bt2vhkq6gjb5gIRG5V5Q0RoMRBBvRX5y9XIc7PlchD5diYTSdGcn8/nsyJiDAZ527Yo9hlHUFmWCRQnACklDYxZ6BlWnkBVbzUIQlJK2zYHhvpLpeIzP39GKtZstahhMsYrKyuQwlw202o1a0utfQeu2bp9u4LItO1CLic4UBDYtslZ0Ky4gPOIBbZJTCn96kplaoZQ0t3VFYaRImqkv8f1fKXE+Ph4rVpHCEWMj44Oc84XliuYUqEuIUINw8jmspwJzlnv4FDD88PLE8ruKeSHAbQiAM9OHnv58E+JEqXsQHfPpm1X7Sx1FQcP3HD7wfcZEk1eGn/xpefPnjnfP9BXLBa6esrDm/qkBLv3HLhwYazZkraT275nf9dKbXF+HgCIKYEIVpcXpqcmhYIQIsMxc7lcuVy67tprbNvxff/4sWPnzp3jQlimSSnVyQmE4IxJ3w8LhSKlRhj6AAAgkVSCUKKril5psyjVTq6bOnQg+tqXf5Uf7+D601/j0VZZvdUERBrZk0zO6dHSLqTvRQCUUu2qYckelx26ZaUkiAlYSvOxGkaAEBJQZxeDOLHHASAlT7QlQuehBwBrFQCA7QxISHEpgUAKQSQBUBBJhBSAQMJVA4DSXk//11/+ZrvGGsAAx1JJO8M+QCgOVUMQQUgQAW0v/jb6x3kgUKztQVB7kMZuhHH4KASxzkeLDknd50QCiG0DMLm7jdn//79ywDo7qZnZAAAgAElEQVSgVAmDnHxH63omj/k9FERXohzrT7nS0XFTaK2z15WG6mhcJwGsyrNKKQmElG3fZyl19JbgEWc8ioIoiqLI56EfRZHggvOQMy45BwAgDIlhxP4YCkghOBPaxwNCqP2IESGmaTgZx7IphhBhUqvVOWdRxDhjjmUgjJXgSoqIhZGIfC8wrEJXuXd4uH/zppFsNt9stE6fPrO0sDw9Mzd24hQlkNqke3A4VygMDvaOjAwOjWwWiszNLk1Mzc3PTVcq08qv5aksOlQCVW+FSystRGg+l7EpLhdylg0UdkJuCoQHh4aG+sv1SkUpzgGnmaKV6+vJO46Fxy+NsdDzPTf0fB56U5fHOQuv2r6dEupks8gwpGDlQkEIXq1Wy+Vyz8AwNmwO8cz8cjZfIKYdBoEQgrNwZaV+7NiJ06fPHj96FBHHMJ2JybmucjabzTSazdqC91u/9+Xb77jZsjGECiFkG06z6bcarSzlACgMoOs2WOADpQLXHx0ZPnPmbLVSzTgZYIF8sTQ6Mnp+bDzwo3qzmckVIxadPX9+ZHTzSq22e+c2y4DUsCSArhdwKV3X7xsa7u3tU1K9+dbh8+dOF9AKMrLZYrlQ6gqiUEnJo4CFzPOElLRUHMiXC9ceuHbT5s1KiZbrLi1VpucXlhaWhJR79+xzsplLl8ZD7gZBoKSwbQtKxTk3DMuxbIwJF0Jw3ggiLkS9XvdcL5crbNq06cCBA6VCYXZ2dnJq6tLYWL1ek5I7ToYQKrgkBI+MjIxfGqeU6CK9SkldquNK77nOWZJynYhxYT0BSB9pjf+aCAAQs5htjVPscpLgeJsMxH81qYgtw+s0BMmJa+Fe5yrVHZJeAEKAEQba+aK98VVK+5/4AvG4kj1EKlUZRKXSKBGkFEAgca4BCEGAACIAEaigWi1ughWEUgBd1VFol8FYWfdnX/udOHuzLgSrwzEAghi3bcMo1iwBHb8M44THur2tMIYwzvPTLviOEwKggb4ttCGEINC4H/fcmACAdTRApVQi/0pKkIZp2HaZWoPdEKJfhukdA67+CtfEm6mNdDLvMbENadv6W+44Mem/4eXSBAAoIKCQ7dheEJsBuNAF+toEQIR+GEWccc4jzpgSAgAFMcKE6KQ9ECIpFWdCcq4U0I9SKg4QNKhhZ8yMY5qmCSFstloLCwvV6oplWflczqA09P0wDDAxt+y+bsumzbqo3kp18dzZ4xcvHJmfPdPXs2mgb5Qa9uCOPaVSt0FMGQggVRSFk5cvXTh11K1NZnMkkzUQLQhoWxRYGLDIb7itlhcBhIuFAgEqa2AE5PnllteYfPiRL27fvLtWD44efxOZYW/f0Mjwrt7uXtdd8YLW8vKSaVCDoJxtmQRNXRp7+43XeBDks1keRlypmte6attWTLDXrJW7+qoNr6uryABstILB0c1DwyM0U2IsJBhXVyqcsVqj6brRD37y85effbPYk6/Wmt2lrOTR/HL4a198/IEH7i6Xi0HYtEzaajVtM2NQKwrC+uLY+OXxYi7f01WenZ5Zmp+znUxPuWt2dmFhYdF33X3X7uru7i6Wut98+53lam3n3v39Q8PFrq6p2dlL4+PFUnlkZLhRW/KDYHBoOJsrun548uTJd955q7tn8L6DD2zfstl13Rd+/v3JS6dLhf5SudvMZht+GCmJIKGIECYyyFCOsViv1quNrq6hzdt2bdm8o1AsmqbV8luXJ8Y4DyEhbsQwhLWVauQHuaxjUEIJgVJyzhGCQggvQmEYRlGoHcSiKGq1WvlCcd++a7Zs2QIhiFh08sSJUydPMC7K5bIQqq+vTwqxvLxMCEFIM8Tr+bzVNxxCCOQa8pCItqmXXyb54Tv4/USHE8M3SGXIb+uA0wRgPRVJPuj5JOSk48T4K8QyDoTULYkqK46wAUpBsCYSGLT1WqAtDUgY/6SE1Cp9pV04YFthgHWiA9AOtAIQSkQwRAoSoKAiBEMICcEIAwm4kBwioP1ndTyYUgr+xV//Hmqr83W2zliNoJU8ECGgPUFh7BOk+fg4pAvC2G5MgC77BttxXhBhjEG7qAsCutajlhqwtikggFcJAISwXdjsPSSADhz8pWibhnIVKz064b5DAuh4nFcaLd3SgdQbAveGR3r+aF24xy89Ra01EiQtydspgZQSpCUAIYVgjEdRxMIgCBnzeeAFYSg445wJzpEOFoRAE4D4cSgguWAREzLWwkolAYSGQSzbMgxaLBSkUo1Wa/LyBIxLieFSoTg6Ojo8PFwqlacnZqZmJufmJmdnJ6q1hmUX+vuGtm3fuX3HNgFCxvyF+YW5uYVGzV2aXWJ+yHwfyogS0d1lWaYSwpNcAokwtQTCkeSMcQgUxZAg4Bh2vVoZO3/0wAd/92OPPDR58dgzP3vWtMxb7rgvV+zv6uuXgs9NnkOyRqy86eSAErZBgmbdbzX9Rr1WWQpct5DN8jB698hbfSMjUAkRhQjj+fn5nr7B4U2bllfqW7bt7B0axpi6EQBAVaoVO2N5vgcA9kP+xltHnnvx9YsXLitILKwoFKObR778la+MjAwiKDIZM/R9jGEUsoyTXVyYNwwxOzebta3hgX7f886dOVvI54f6h44cOy65GOofYkEjl8sKADhXbsgQNbL5koRoYHjYyeWbzYbPed/gsOu6S0vLjUajt7tnZHjk3NkzTz/5VKu+0Nu/6Zabbjpw08FWvfKzp38wM34i65j9A1sjBvxQEoMSE9WaFQzIptHNbstfWFhSAEWRUJAOjWwaHB4aHR0ZGh7GhnHszOnJy+OWaQAFfLeJoE72izljURASgiKOfN9njAEAhZQAIIyx67lhwBhnxVK5q6dr3769o6Obz5w688orrwR+iDHesWPH4uJ8GIUxdKA1ttD2JlqNFQBxed7VY12sgErYuzQnlAbrpDGujQFWwXfV5X8tFKQ/JCMLIdQVCYBUkKy/aHrDQqCShAMolfEftu28AABt4tWgH3t9tn2fMEQQKl2oKbbR6qWDABEIMNBuNFqCxwQiBBWSSgkAAUQSxIYQiRCCX/sv/yuEEOtqMAhhTFZzAUMIEIlT9ieeQUCXbgegzd0jlI7phRiT9o0gAGJXohR3HxuBNQFIkgLpU0CKAHTgXUd7x6/vcax9IVa1hEkHtPbr+ifXgfj/SnBPn7JOe7l6bHiDG67A+pYOISD9KscEAEop2jXRVlVAjEVRFPlhGDEWiigIw0AILgUXnEMt6kJAKKWEAgiQUhhjxnjIWBSGQsEoDAFUGGPTNAqFIsSUUANI1Wi2pFJd3d3DQ6NdxSI1jLm5hVOnTh0/9m42B/sHhgYHhrvLXY5jmabTqPsr1ZW3336rurxAKO0ypONYQnJMkeu1vMATCmJCASQAYKmQIbwsCYCZYQpCggwCmdfM2ZZp2pOzCwO77rj99ttLXcVvfuMJ12fvf/+dg/25qamlrvJwyAO3NVcuFYuFXts2XbcRRYHiEQJCsogAubQwl3VsBMDk+GXLtpaX5+v15tYtmyAECGPDtCMJyt09iFgAkSAMqZnlXOQLmVp9BRvmzNx8vRUEkfr5s6+MjU2dvTSzd2t/dyk3NDxwzf59PT1FBCSEanhogDOedRwphOXYHCopOQQqCtxiLj8zPeG7Xl9vL2N8YnzC94O923e0Gg0h1cJydXBoOFcsQkwjwV3PkxAMj4yY2bKnKFDSsizB+IULF9869Or7H3zfUF/v7Nzc66+8cvj1Hw0M3XTrvQ/cdOutKw3v3Pmx15/7yfBAKesYDb9FbBuYtDZf8+qt0S0jhoGWK4uCc2pahpEJIhCGwPfZnn37r71xv2EYnudRQoPAn5udrVaq+p0hlAIAoADac0YIKYQghPgB45wpBYCCCqiQR3OLcwN9gwcPPrB169blSvXs2bPnz57NZrKu19Qp6JXgECEppQ6LjfXj7fAvTQDU2u0MU1Y0zW7Ddh6wNHAndAImLj0x97/Guy9l6dW9V5UFa/h9EAsaiWQA1tIApXQRvTUupxDCNJCgVW0WSCwB+q8OqwQAAKVblEbG9j0mB8JIQQjiWgDxyRBhAImusQX0UAjporQqjp3TxgCptE4S/j9f/32twkcQ4zgqGMF21T+AEQS6MG/sy68Tt4EY1BNYx23TdKz0B2BVRGj3RQiiRA5IaEPbFQytTfuzxibe/qqfOriSjXgjetBBuUFaAogXVD82sKqr6zh5fUtH4/rjSr9eibat/7B+Hd57tA3nLIGMzT1tQSB2/YyiMAyiKIpCX0QR46FOBCSF0EZhAHXVToIxphgIIaRSYcRd15NSRowhhGzbpgbJ53JBBBpNv1gs7tu3v6+vd3m5MjMzNz4+fmnskmU527fv2LV7RzZv+J5fqzRbjebk5YvzMxe7y/bM1EXBVW/XkGE6FlRB4HIeYRMrJMKI+WEUcckEFAIRYhHFCfBsJwOAskyEgTQQaLYailgf/+L/Amjm1beOnD707cc+/XuG1f3usZOjI8PFLDlz4k0D+eWevsHRnXaur740k6EAUwyB9N2mFIxFHiU459i1Wq1ULp07dQoC7tiOVJIzYWUynHMJcKmrB2AShowYBpSYUhpGYb1ZrzUaCuIL41MAWfOLK4vL9UuXp4f7ShmL9Pd3ZzJmo1YplYqWQa7avh0CKJXoLpUixpxyL8HIpIhg2GqsIKgC34MANup1XXV7cXqhXCrV6o3lSrWnb2DL1i2Mc6lUrpCfmZnJFfK+IFahr1gqcsar1SohdHRo6Pnnnj9+5J33ve+ha/bvX1hc+os//g0js9UpbDpw8527du/s6ys99/yTZ0+9UcrlkYIUU4RgpbrcqNdsy+7qKgshpQIIU4gNhE2ISKVS7ertL5e7I876+wf6+vvLpTJX8vL45PT0VLVahQTbECCsk+wLzrkQUgGocwVqDFRAAgQ55/Vaw7Dsa689sHv37u7u7rNnzz755E8zGcc0Tc1iUox83yeExOWxAEix/XB9+T+Y0si3QXZVib8R6xbn8pRtBZJaewAQF4Hp2E1KAdXWv3Ol1BXqAay73JqNiTrLe0EAdHnLVZRLKAGMZSCogERKpjvE/DcQqJ3KX2mGHAKFFSEYIBUTAKgw1oV3tfJHARwnh4xn8I1v/EEbwnHbogsgxADG/h9aUQMhhHFTTI8RRBjgVHYxDGNqhXSW/1Xoj3/VhCGxd6PVU+NuGxCAtXjXafncEAE3BESwDkOTJ4Q2AvqOx9/xFa5V8rw3MUh3vhJrnyJy8deOliuNvGa27VS0yeW0CigmdUolRmAWaTfQkPNQMhZFoRAMKAmUlFwoJRVECiiEMKUUQyWkUADVGw0EUaPV4pzblmUYhmXbQMpC1+Dw6JZioTg9NVVbWYEQ5nP5XC5rmHYUsenpmbNnTgWVRWLAUiknJQtC37KsMIpm56YLhRyCnFAYBQxBbFm2QY0oiNxWi0W+YBFjHkISQ6mwDUk28psZy8BQ+UGL5kff//Aju3bv/tFPnnZd7+abbto0MPLcC09VG5VHP/rp8YvT3/7bPyvm6FU7D9x0w021Rt2NlgdLPbZhe77neU2ItBguMYamaYa+xwVfmJstZbMQo1KxKKWiphWEoWU71DBbnu9ksp4fZGhmaXlpuVrxfB9T49zF8TASChnUzC4sr8zOL0HBesrZ4ZF+08CGgWqVCpCyXCzt2rUzCn2EUalYhjhjUhIErmNRSrHnNhCUMzPT9UbDNMyBwf65+RUFoBC8r2+g5bqu2+ru7snmspVqpVAoKCABpBDbiJBcvkgMZ3Z+8ZVXDz328U9lMpmfPfv8oUOvv//BB+9+/0Mnj7315stPVWZPYkw37bjhoV/5mJ0pHT16/NTxo9yrERIiKDiTvhfySA72D9nZjOu6CMMg8hVUlp1Zmm9tu2qnk8m0vIAaJleqp6d/0+YtpmXp2K6VhZn5+Tm35UKEtCIoCELGmI4DwJhGLNQoJ5XE2OBcVKorg0NDDz744PDw4JEjR06cOFmtVjKZDASAYKRjVxBCUCoAkz24Rleu2hnl4DqNK0xFY63by0Ln8hJqjcKnPUIcvby65RPHUN3YpiFr2Ky16SXWjbl6da2TSc8cti0KsG0PgKupftbADoSqrSGJtz5MecQCgGLlPgSYQKS99TECUGmkhhAAIFAc09VOpQ0B/OY3v9oG99gfPzb8Qqy0IWCVkQexTh9oW3DS2rYJa00OQqBdxiR1rDL7OreozsqZRIJo60ICaMlzXYuSG8DoewAieE+wvhIBuBKyX4lIXKmxE53XqmvARmTsv5MArJ+JgkoTAABi17YoCgVnnHHGwiiKGAsBZ4xzwUNdogsoITiXAGJCEMIAQgyBkJIzoRDyvSBgLAqjKAoty963b9+O7dsAMmr1lttqKSUNQnzfX16ujF8am5mZWpxfDKMom3FGezL5Us6xDSYipaDj5KqVhgKQsRAbikUBocgyLUIMHrBWsxX6HoZK8CjwGwQphASi2ZAjilHoe57vPvyp396yY+f03NJydWXP9hEL8bFzZ3/w5Kuf+ezj20e63njr0LM/+ma+d+8ddz149dV7jrz79uLshaGB0mDfiO9HQvB6vW4YmBpEKZnNZubm53p6ehYWFnu6Sg5BLddVSkkFMrmskgAg6PuBZdthEJqWCYEBEZZCTs/MeUF44dJlJgGmDiLmwnLFMO1arTbQ01UsOhDwcinPgqCnu6dWrZmmEQStru5yJpOxcc6xrTBw5+ZmBgf6EFaEwMXFRd/z6/V6LpeTxCCUdnV1Ly1Xdu/agwl9/Y3XBweGlivLAIHu7p6MbdvUbLi+k8s72fz0fCVX6JqaXWi2/OtuvDFfKL7++lsvv/byr33pMyODxanp6Z//9Du1ajUKwc23PXzXPQcLhcLE9PyLz/7YbyzYTpYSxKMAI5Qv5GzL4UJiQuuNppDQMKwwZKNbtiqAFEBONhtEnDGRyWSklNQwB7qLlFKI8NjY2LmzZ4IgpNQIAl/rHTlnEEGgvdoVgBArBTBCXMjZ+dmbbrpl/zX7C/l8tbryk5/8GGNsmSZQ7UyfWosUS/+4Y58qpRBEHSmAwKpCX3Xsj2TLS7VG3ZNm/BVY9SXquJYmKXqojUhLZ//1QkDHvoZwNYVRjEWpsmiJzTkeJKkh1e6BlNSAC6AulQwVUkApTBRCgGCMDSyhhKjN/gMJEADt0r4xrn/7238EACQIaYzGACstAegradNAO1QBAIDb2YFS9wNhHOWrKYmeUEIBVqWBNtwDCEhyI4kKKKFuYCMo1Mu+Ieql17ej5Uq8efoBo42owpX6bzjm+pb3Bm6w9gaTxg4jWMevv3TM9YdWAcVvUlwHPEoIAGOM81BxLqWQgknJBWeCMyGEVEBIpRTAGIcBUwpKpbhUjAupVD5fuOqqHTfdckuz0Ri7cH5mdm5ubi7wAxGFrWadhT6lBGPCGGs0GpwLYiBl+iZxTOzkMvmerqJgHgtbgddCAPNICQ7NDDFMDBRotuqe53HGhRAsisIgNE3bsmwFVKO1EoRi7zW3Pfb4py9cmp6aW7jt9tunLl86dvjQ7MUjvT09n/r1r8zPNr73jW8zrzq8ue/u+x7Mlbp//NMfNRqTKojuvPl+TFGlumQYBoAqm81oV3TGGCYkk8lCiCSPJsdOG4aZL+QRoRDhKIykklEUWZaJETZNUyFruVINgghjSg1nqVoLIl5ruHYmM3b5suU49bq7ffN2Idzu7gLFyrEdIEF3uSeKIkygVBxB2VcseZ6/tDQvBccIdHV3xbXEMapWV7LZHMSAcWaadt/A0OHD745u2rJn79WLi0sA4zOnzzRbLckji+I9e/cdO3HadLKbtmwfuzxtms6OnTsPvfFGGIlHP/JIES185x+fXHLhwfd96MB1N7391iuvvvTU4tTxTKY8vGnf7Xfec+DATW++9ebRd99sNeYdB+UyNiEQYwMoQrCtAPWDQAKGEHayuWyugDCVSmFEMCEsimzbCQJ/cnoGIpTJZPbv29dVKh8/ceL4sWMrKysYx27lURQgCBDEEKO2A2TM+UmlCsWi63m33nLLvn37/+mfvj8zNWUYBiFIv7RqNYYrKZXRuSkS3I+pgk7rlko4mvyFbQZcdjqDQNlm7NMM/irNSBkSOsyEaV2//tsRJbDmRKC0pj19Lwm/m0gA6Q6rvCNcA4AQqhioNd+NtAZMaL9PQiAmWGKFIERYaU8ORDQpVaBtg4ZPPPEncUZPCCBAWEGAYv2PHhYmKKyjEtKsP0gmT5MmBDDAUMXZg3AayNqGBJTkuACrRA+mE1+kh06+KiXgRrj5HscGYK3/JAn92pWVfyn7/x4E4L/t2BDiOxmEFAuwlhZufHTIGRJIKeK7hXGNF8FZxBnjLIoYE5wpzhiPpGBSMM6ZkjyKIqmgYVqOk6WUZpyiYRh+EGJCC+WuwcFB1/PGLl46e/ZMEPgIYgRFGHpACahk6LmcM0oQoVgI2Ww0GOMIYal4xKKMYw8MDDi2xTjjPBKch1FgGRQigASGADIe1mrVZqvJokgIAQHMOlnHzrKITU+fuPaOD91+9735QvfMfI1CiAH4/re+3liZszLOBx59/OYbDrz8wrP/8r2/3rTr5r17r83ncgO95ddfe9F3G41649obbu4f6n/5pRe3bdtqGKSrq9s06fLi4qZNmxYXlnL5gmVZC/NLUkQsWAoD3tc/sFytZDIZIWQQBLZtAaC07aQZQaUA50AKZDu56fnFQrEMEanWa/NLi/PLC0qQrFMq5M3RTf1Z2zANAwGazxVaLdc0CcSKYAm5GwShQbFtW6ZleZ5HCGk03EKxZBiW6/rdJXtpaYkxmc0VwoivNFqZbKl/YHBkdHPTdV966Rdzs9O+19yydccNN9zwk58+6QfhI48+dvLk6V+89NxnfvULvtv82VM/vOOeD95/8MHpqbknvvXdiQvTv/9HX+3pKY9NTjzzsx80G9MA+nXX/PwXfnvH1tHq8uKJY+9OXjpfLhd1yn6lAOPccuxIcAgQoTSXK5iWzbkgmEopEZCe5xmUSpKdX5hvNRsLCwuUkptvvmXzyEir5b59+O2L589hQrMZW0mhFFBSQUQUjBk+refJ5/OGQc5duJTLZT/3uc9VK5UXnn++1Wo4jqNzWOpXO0kQGfNJCiSeo2kHTcEFRBAoIKUAqUQOyQaBcdZ41c5aAQBAStcXVUpIqdZqddryhARa8d42MmsJYz1KdDCR67E+2acJ3IO1ZWHSWJREO8dikNaCpMaCEOoKYxABbdpACGIoCUbQgAJKjJHWdiqoMNWaJaGjdhVQ+BMfexhDggCCiiDQzvIPCIJYF31EcUmw9j+UJG+IK7lDuFraBYJ06ACOk/7H+Zp0DAECKVIMU/qZNnjpryqV7F4BsGZB3wP+Oo71fWA7HRpQMSymr74e5ZNxUn/TL+XG/9KPef1Q6VtIfk2fAq5AHjrvcd2F25S7LczGgp9+aRBAKA4ElIoLETEWcYYptmwbU5rN5bu7urt7+np6+nt7+wv5opKARbxSa0xMTgEgt23bzKPgtddefeO1l6anxgwCHMewLGIRiBTnkS8Fk4JTgg3LQAhjgg2TYoIRxgbOEkIdx8rYhpARUAJIaVJqmzbFNOvkGPVDzmr1YKXiMZ9DyQkApmlm8+V6o1Gpjn35P/zVyPYDFOSNiE6Nn3v79edefu5PmYTz46f+9z/+297e7r/71jdPvnUIWoUPfvzz5y+eu+nAvpdf+FnQWFAAEyt72x13/PM//YOVsahpAESv2rPv/IXxbTuv8QNea/ojo5tWVqohC3K5HCSWk80blkkwsizLd73enl7OFQBYCBAJEUbSMm2CMaEUE2SZJhdCKllZrnAmIi8EAmAgOWOOaQVuUC6UWRhJLjCCvu9GjCmlJWkSBqHv+X1dvV7LpZjmc3khRF9vnwKAAoYgcH3fyRQsJ2OZppBsYX6m3mhu2brj6v3XH3rjsOv79Xorny/ef+99Lz//82PvvPvIhz9imdm//es/7+oduv+Bh185dfK7X//Ogb27H/noR0ub+z77mUd8N9y546oPP/yJUvfoiVNv53L84vl3jh8/U8h13XHLfddce/vp0xeXlqumbRoWFUoEgS84NE1Ls/O2Y2tRHmFdTgi7rtdy3e5SsVGrU0KgVOfOnD11+oxhmNddd8Otd9w1PTs3NnYJIkqoBTHlQgKECMZKKcMwAABhGDiOY1BDCnHkyBGM8Uc+8hHfDy5cOEctUwGACBFxHKxsC8lrsvSs7iagFAQKQgmVBBrq2or+GENl0jH2MGp7gcB4RwMAQLu4INCcuW6BAOhEKSDZfEolexKodk3wdWKH3uda7wLb1k7NzcG49HrbsppCusQ/ErQ7I63oV3E5KQQATGzkUNsVFIQQau+gxHre5ipBG1pBHNeFIADwB9/7mrZF654IaeVMm3MHGEIJkUr7aKbVNe1u6TQ+sN15ldNXbVqVrI628q/OcK3RII1960GwozGNm7/kaNfx6oBmkELndHtHh3a3dAmeeDIdND+ddLqDzl0J2Ttubb0QoFcmfSG5bik6qAjQXEOc0F9xLiAEgkcQAIwhIQRAJXgQhXFuOB5GXsttNOqu63meJ6WMwnDz9l379u4FUD3/3DOe23IylpSKUqIzfUKEoJQsDP0gAEpKKSCEmFIlhS4m0K4dhhhnuYzVbNYdi2IEEIQZx7aoAREOgrDltZrNmuc1BBNSAAgxNajr1+xCzy23PnjzTXedOnMxXy6GzcZzP31C8SrAslpb2rb7/b/6xS+/+fIvnv3x3xgm7h26+u77H/zp009/9JGHn33qJ97SJctyfIEefvSjP3v6535tcWjzSCTB1VdfwwSo172du3aePH58z+5drUaVMz/wm9lM1vejoaG+2akJAKTXatmWMze3iDCxLJspYVBKDLPZbEqlOJcAoSiSQoJL41OYmhgZk1NTCBunceMAACAASURBVKF8PkcJLpVKMgqzuaxSQiqey+UNw4AY53LZwG3YpslZEIWB57b27Nq5vLSEKC13dQOEM5lsc2V2YWGJWhku0cDgkO971KQY48npRS7RgRvu2LR127e/8503X32xUCw+9OD7dl2189vf/ObExOX/+Af/xxtvvfPdb33LyuQ+9thjEY++9a2/tqj1v/3BnwwPDL/48qvf+dbXCIH/87//z/uuPXDi6LuvvvIcFxEAMorYgQO3vO/Bh+bn548dO3H27AnDIoZBMSJ2xikUCoZpmqZJiCGE4JwrBQXjvu/7ftBsNHt7e8fHx4MgYoxJAH3fX1xeHh4ZfezjHzcN88L58ydPnpqfm7Vs27LMKAopxlIpSghjzLKsbDa7srKCEPI8F0L82c9+xve9nzz1lB94GGGkgUkphJAQAmOiVOeuV0ppXkfCWA8T81sKAKXa/GSirgFKxSiZ2toKtD04k8F1LiC4VinfSXjUGt1R0ic9OZSS4Dv2LGonF4p5OADA2rT17f6rGY3UWvWRhKCtIQeaBiAIdMpmECfuAQBJ1CY+GEBNLfAnHvvgaoIhlKRvixl57duZpH7TjD9CcZHepA9IYXcS/ZsmEmqj204Wa0OmvoNZ7ljTDTn9Tvhbd0CwWuBx/SMEGyH+Rr9uMH7HHLRpOQ3f6RvvuK8NV6ODEEIIU1dYwylsOA3dD60KmJAQalCDYEQpQQgxFjWbzZVqtbZSbTabfhD4rhv4nhBMKokRBFJmstm777k3n88fO3703cNvm4ZhWSbRbtsIUUoNg2Ksi0BAAADBFGKkkwUCpYBUEEJKMKGYy8g0iOu2qpUVzqXkSgpJMMIYQSiCsCXqQgShYD6iglpQYDk9d+7Gux/9lQ990jBzKzUuGDvx7stvHPobpwQkkZMTZx/7+B/cf+/Bp3/8wxPvPGPaZHTTpquvu+2ll1545Fce+sWLL46ffccws9VG/e4HHpqYmDp99I2to0OhkAqZ27fteOXV1++8686TJ08ihPv6euZmZ0yTLi0uSqky2UK1urJcrQSeR6gRhkwIKBUCCEWcU9Oo15qu67Zcz3GclVrd84Ioimq1OkJISlEulZyMRSjECBaLOaWE4JFUQgjueS5jkZAcIggkqDfrBGNKSRRGXuhnstn5+flavVGpVEzTbDbrAGEusYIk4hITMjc3pxTEhC5XapcuXe7u6b/jzru6e4eOvP3W/NJyT0/f7Xfd/S/f/7tDbx959LFHB0dHT556d/7MRbO7+9aDBycmxp76+p+GPLP/uhve/6EP1htLP/zen1WX4a5d1zxw7wO2bV2eOFXuzlZXpt948xAh9v59+6+7/hbP5efPX7Btms3lCKGEUEJ0CSmkEVJrncPA50I0Go2BwcEwCHw/0Lu/WCyGYfDaa6/OzMxu3779+uuv23v11XNzczPTU6ZlIYyVUlIpQmkQBAghXTLINE2l5KHXXu3p6b3rnrukVBcvXnAcB0igcV9KpdVH7f2uGTLVxvs1mJ5iVzX/lPanTCozrkIHTGlsOrC+A3zW7neg1qEZhDAd3pwYeNd3Sy63BkZTX5L9nQyV3uYadHUNgHZnFSfx0QUB9TSghEmGfq2NQQg//rEPJdFYMaC3Q31RqjxAEsmFENKhBxCsyjHtPsl00+gWL3Dckvq8HhDh2sb0WquU8WdDgrH+SE5ZTxI2IODruInOz6qzff3RAe7rp7rhLazH+na7imUnsKp3ihWYcTzKGheI9XNQ7cS8FGMlFGeMRVHEoigKdWEvpZQuCKTzTWqlkeDcd72RkeEbbrj+5MkTR48egQBYtkkxIhgJISiljmU7jkMNgxJCCEWYQISwlgswNijBCEIMCEEUY4yhlIGSYm52HigouAIKsogDoIDiEQs4D2AYBJ5nWjmAiMuC8sCWz//a/zQ8dFV1OXDs3PMvPv3C03/fWj6bLWbdYDEMi7/xm3/iGM7Xv/bnbuOiBP72q/aPjGx78dDhX3380Sd/9C+XTx+yM3kOwYFb7xkZGvmH7/yX7Zs3YykujF188AMfPn7q3M033zw9PTt+aXLv3l2V5aXK8jKlpLayQqll2Jnp6SkpVcR4LlfwAgYhARAKqahhQoh8z/eDkFKiAHBdDyHoBz5jLJ/P2ZYhBSMU5nI2F1GhmANS5ArZVrPpOJZlG2HoQ6AMgxi27Xq+lCJirKe3Z3FhcaB/0PMDQg0uFTVMqZRlZ/1Q1Ju+5WQazZbbci+NXRocGp2dnV9arJ67cGlweHTT5s3X33rH66+/eeT48aGRTR945LGfP/v062+89bGPfaRQ7j7+zkuXLk1S4tx9+12LDXdq+szhw4cwMj/x6GPDI3tOn3rr2LE35xcXb7rx+jvvuH9uttpouqZFlyvz7xw5XK97t9921/UHbm40K62Wm8vmTctSCgrtQSl10UcFAdS1yIUUtZXawEC/ENz3fL1VMMa2bbMwPH/m3LHjx8Mo+vCHH967d+/ycqVarSCM9KtMCJE8ZrQ555QapmWdP39uYmLynnvv2bFjx+nTp4EuFEDi8nMIo1iPo/13QFJhC6rULkh4vnbNgLVsFgDrojRjd6AOkpDQgzRhAGmQSW3ADbnJ94YFDaFpmR51FidfTRm5SiRWEV9HA8iUJlg7hqoYvJEWhWLcB3HRMIg/+fEPQaj9OHVG6Dg6N1H6Q10RXsdwrXL3iUCA0nMCAMC4ZG486Xg2YLUcDIwzjMZ/0x+05io2QLf/Jb+mxQi4EVudrHUnKKZ+S/dRG31Nj7zmEa6JIFsTBpKifMlMOilBx9f1lGA98diQy0/3h2vWDVxpSbXPmxSq/RbqOxBCb2IleMQhiBMOe57rmNauXbscx3nhhee9ViuTdSCQECioy78RQig2TZPoOGEECTUpNTEhhkExpQalpmkYlFJCDIoJRkoKREGj0QgDpl8wEIfVKKWEUgwAhYgFLdoM6t0jW+87+NHbbnmAB1bghZPj5777t3/YXBnLZLDpOIsL49dc/clPf/xLFy9Pf/2rv7NpS1HAVt/glnxp8I0j01/+t598/tnnLh1/oVjqUQiNXnXtwYMH//g//fvBvgGCUKNeG919846duyuVerlcPnPm3P59+yYmJxnjPIqE5AuLC/liyTAyfhBEjGUyOYxJGDHTdACiiBJKzYhzhCA1DM5FqVzWm1BJUSoVC4VCJmNjjHL5TCbrOI5tEJwv5GzbKneVAASWZdm2jTG2LEshaloWF4JQkssXuBBciGw2DxFmXOay+cWlSqmnLxIQIjNkQknQajZd3+8qd1tWZmFhcWFhzg94d1cZYnzfwQfOXRz71hPfvP+B9998y+3//P2/fOvwqS998XPd2/adPfxyML+AzNz1d95+5PTbJZvUZufeeO3oPfcfvP7G3X64Mr8weei1Q/V6dPC+h67asX9xvlapVh3babZWnnn2p9mcc9ONN27btq3ler7nGzS2D0shhZBKKYg03wCllATj2kqtWChEUcSFgBBAqBX3kBBsGmRyYuLpp5+ybfuee+7ZtHXL8vJys9nEBEsllVj1hdf6ENu2wiB45pmfjYxuevD++z3Xn5ycMgyzjSIStKN11/DjifoTgFStlCRgaXWHKgVSQQbJ/lpTomA9LGyI7wkNWM9Krk4GwlW7wjoWFqbz1CdifgoKYLsu7+reT7UAzQzGNoZVJj+WeJCOPVYQgrhSV6LP+eTHP9zW6miNT9trM2bwU5dLiU6JnqcdOJZAEuiouQhijn+DYKgOkWrDtVuD7KmzNqSoGy59Z+NG4H6lczt+bfdRa2W1DXl5dIX29oe2zNTxK1hdbgDAmqtsNOAGlwCpRUvCLHQzQkhIoZSMA4OFEFIIJfTdhL7XbNY3b968bcuWc+fOX7x4wTRNQkmc1wlBjCEEAGNsGgbCSJeAo4RQwybUQBgRTDCGBBOEIMGIGsQ0KCUEAiCVEExq3FdAEoohVEpxqZRt26ZhuYBUGisH/82H77jtbs6ITQrnz5x7541fzF1+s6ecNZBUCJ29+O6nPv/nB+/5wD9+9x+e/+6fbd2zjZhCIbL7mtteff3sb/727/z4X544c/ilQr4oEWr6/HNf+rc/+emTvFkxIBBKrTSqv/rF3/re93/wvgfv/+lPn3rg/oPvvHvUNEmz0Sp3lZYWF1rN5s6du+stP/ADoAChxLLsiMmQcUIMYhgKQAiRaZqY0Hy+oJQQQpbLZUJIoZC3LdMwDIRguVzMZDIEY8MwSsXySnWlv6+fEINSw3EcCAHBRCGUz+UJJQgi27YwwoxzYhhCAsa4lMpyMrVmYDvZcnd/pVrr6e6emp60bYtz0NPTc+nSZdMwz585cd2Nt0xPzUBM7r33PteLvvFf/+ZTn/xUf//2f/nGE6+98cpnv/jl3XuufuvFb6y0Ikkyt9585/kTRwuFXKZgHT36ppTwrjvvOXz4cKFQrFYX3377sAT43zz0od6eTa+98jpEvLsvt9JYGB+btm1779X7+vsHFhcWQ8YUgJxxzrlSUkiFEYqiCEPEGAcANJtNyzLDMBRCaiZVtf1bLNPMFwozMzNvvv2mbTkf+MBDkeDvHHk3l89jBQklyXsLIVBKYowL+cLF8+fm5+cP3vfA6KbNb7z+WjaX00noYJzeMb1DV4MhVymB3hrae0cK1QYxncNGiw7xZoGgnRxgDRytD/VKj6yUUlottY6HS3pqOFBt4Epv1fUbPB425XiahNAmLSpOfQHX4LPU/p1KxUZphXBbpYNjpdKqE49O6vnpT3xEJ+0BALa9enTehjjPT0q9g5IEn2nciQWpFPu/HuvbSgwVm2piVxnQxu3O+nBgrXetajtvrRHXtHYbxkZ52G5JhtCNKCH6Sf/U3DbCfZlMtd1Ftn1sZbux81j3CFG6Ba4/1l1Fv/Gay9Y3DTe60ntcpaMbAEBJldTOBgoppYTgQNtqlWCS69vjjLWarVw2c801+xUXL7/8olScIASBbNcsisuKYaxz/EEd7YEwMQzTsDKUmoRSQnAcQ4ghQgBjhDHECBFCgMIYYUwINZAQkQJSKMGFyOUKBNthILceuOFXPvK4Rbsq8ysiaD731H91Vy7kbWAiA0mIoQBW32/87h8NlAe+ctutgjQGtvQAQzKcu/bGBw+/O/Zbv/3rTzzxdxMnns1kCwCjperSv/sff//Nt985ffilgoEBZ0yCez/0pYvjE9cdOHDk6IktW0aaTffJHz9x38EHL5w/s2XLlsmJye7u3kw2Nzu76AWe67kYE8fJ+H4oBBBKEkoBUDpflpPJGAYFCmazGdMyCSGEEtuxNcOVzeYRRLbtYExzxSJENAijfKEkpKSGJaRCmCiElJK2ZUspDMOUCniujzFttdxsNtdqNgeGR86cPd/XN2Q5uXPnLtx08y3Hj76byTj1emPr5q3jlyd9zy2Xiy+/8Mzjn/7Uj374Y9Oy77nrHinAI5/4/F/+5V8Mb9p0+eKl7/6Hrz76u//uhoMfOXP2uDc3maW5fdfdcn7yPERB1rIaK+6licUHH/zgO++8iwlWkI2PX/jRj36wc/fuj37sIwDCxfllSoxsNh8E0cWLFzGhB667LpMv1Gq1ZrMZBiFCWEohBVdKRowJLsIwdF3X8zyhQ2hTGT0TADUMwzTM2bnZJ7777Y9+9LG777ln4vJEs14jmCoQJxNIp223bdt1W089+bMDB647eP/9ExOXV6orpmmAtSzOapHUtTtCG4HjznEWY6jjI5VSGMY5NRPqkVYBaSCK81KktnOaeY17tr2AdLXL9DbUh6ZY7Spga9hQmOiUkkvEYcMxMqQDxNpYvA5RNPwqqYEBIQyhggBgrE0dEiJASJy7ASGoAEAI4U8//mgK2RMLwGr0VvsDgHDVMQmtu75WPGwIWOmF6KANyTux9pmlKcrq3zgVdaJB6lCDtBdutaX9D6T+rp9Jaj4weTYdwLp2ldd8gHDVBJI06rci3bJuxeLrJoQWrDUTpcdcv57tzxsseLonbOs+AYjdHxRQUgqlZBRFGGFtFAjDcOdVV/X1dJ89e3rswoVsLoO1RE9wW/OoFbBCK3/1QiJCqGEQQjA1MKYIQR1rhiDQGcqxTi6uNNOPAAARjxSQEkipBOPMcXJRyIeGtz/0gQ/TTKZWqcoomhw7O3HhaHfRzGYQZy2Ikedzwxn9wpd+49Ll8T/7w49fdestdl7iDG5J1Teyf3Jm5Yuf++wzTz954rUfFIslAGCtWX3k07+JCf3JP/1db6kk/RZCKCTFO+97/9il8a1bNv/kxz/40K88/I2/+auBwZFrrr225bqter1eb5RKXYyzmZkFCEHLbXmum8vlXM+HCEmppBSe63q+Z1kWQggirIAilMQ1lADU9kydCgVjQyGkFHTsLEQ4DBmlBudcSYUJbWtRhJTS9wOD0kajGTFmmlYUMalAq+XmcrlKpepk8z09/TMz8zt2bJu4fLnZbEjOrtq5Z2FxeaUyD2SECB4bm/jKV778x//5/7xq1649u3bfct2B//evvvaFX/s1P+Aeu/j33/nrDzzypbtuu/vUu29UKtNdAyPl7oGl+SUgpJD88uXLB667ybYL0zNzBrUglpkceeOtn41fnjhw3Q233Xqf78rqyrxpWvlisV6vT07PGJRu3ry5p6enslyp11YAUIxzJVUYxBVDpZRRxHRcFUg5/iVvrkZYSkhPb++hQ4cQRAfvu49geu78Ocs0tQoItTVLSikpJSGkWCwdPXaEcXbXnXdSwzx//pxt25zHsUFCcIyx0ikR4vWNlckxdwghAHFpMBAXlIcQQtQJCKvu5wlGJR1WmdH1jCOEiQqoA9zaV4ypFFqjJF+DEiCBOwgRkGvtuvGWhxDE7p4AJLqcVAcAVMKkKghVO09b250fx147GCOEEP7MJz+W8gJCSZRWmvdPLpzMex0+/hIkWt9yJbtxe94bs9mdo8AY5dLr+B7TSD+5DhBPzeSXmGvWXhx2DKV/TO4uWcaOsyAEHacDAHSZnQ4hcf1dpL+9568J3YsDKhXQsqqSQiKKI84iFtmWtXvn7uXK0plTp1gUOLappGRRaBiGkEx3lrG+KM6YK5SEEFPLtCwbUxMi0lY7xoInBCreV3H8vJBcBoGvgGp5PoBQQQQQtZ3Svfe+7/Y77nn5lUODvV2Lc9PHDv+CqpZNKRAwjBiXrNpa2bLzho88/qUf/tOPDh96YnTTNicLzKypqFP34N5rb3zwgft/+M//eO6Np7tyZa4iIdWWvbcfuG7/3/7N1wfKRRC0oFIL85ce+uiXLs0u3X7TDV/9w//4icc/vVJbfv3V5z7wwQ/VqlWC8flzF1y3uf/q/e+8c8T1fc91W826ZRq5XH5pcbHVaq2sVAnBZ86cymSdXLYAY5QBhmEoBYQChBoIEYgJRFhBRAwDAoQxgZgQYuhnG4aREFyX3OBR5LmuFIIzLqT0vEAIEUURptR13TCKMEYsZPOLS/uvOVBvuhAojNDE+CXXbXV19w8PD7/47I/z+YKS8PLYSWjkP/OFz//WFz595933F/PFzZs2f++7f//r/8PvnJmYtnD25aee2rv/wP0f/NArb741fvHEvl37Ao+7fgixlMhYXKrdceftZ8+dF/8fY+8dJsdx3YueU6G7J+8iLHIGSYAEQAIkSII5J0kkxZxESVa0Jdty0LVsX19fW36W7HdtyU+yrHQly4ESKYkUg5jMIJIiSJAEQSIt4gJYLDbv7M7MznSo9P6o7tnemQXk/uabr6enu7qquut38jk60loCQjZbDINwz67dw8MjF2254LTTV09UJiqVqsMdynm5XO7r63McZ/O55xqjbYX3MIqM1kEQKiX9IEqwdNqabaISGE0otfxuNpPtP3Fi185dl1x66erVq/d2dxNEK2taIYBM1ckinPPh4eHu7u4LLrxgyeIl+/fv45whWoLBpNIaAWPPn3j1xdFesYnVtHCWkPYWTQGRSRl74ywLM2mnp2mEUhHCGPNC0/RILfjZ3ho2T4r53SmBIH0VaSZrhqS7rXBhgx6s8gOsstZWcUSChMb1ljmPCcAdFqkQMc4sCgCWQuI0mLPdawM7aP/ZMrDmHKUxcaZzp6lEmvqi1M8ZALcFW1vn9CQHT/YYkm7MMIoWvG4O4SRdapUV0pe00P/0lhyExMw+9UkEmFRISkqP1D51yUBw6qZTYSREKRlF0aKFi4qFwoH9+8fLZUoRjJZSGKUYp1JFhBCjZRIW0+R9EIAwx8lm8142i5QSpFZoswpEY3SSitaWoVdRGGnREFIEUUQIRepWarXN519x5VXXRlIdOXJ8wcKFLz7308Pd2zuLs5RQUgs/aviR6jsxcNMtnz5j7Vnf+pd/Gh/uXjSn4DqMuQ6w3NDwxJVX3XrB5nOf+eUv+g683Vnq0FFAPG+03PvZz3/xqV8+J2sjTIVGyUYoF55x2blbLh4YqY6Xxydr4+vOWrt/f/f+7jfuu/8TW7e+Vizmjx05ks0WZ8+etWf3bikUJSab9bRWAKbR8CuVKmNsoL+fc9I1d04QCkSijaEONwaEVK6bUdoYAAOolAZrVePMcT0A4K4rpABjk6EprRUiiHrg+/54eRyMyRfyx44eLXV0jo+XEVHa6uoi8sOgMlFdtHhFqdTR3b179aqVe/fuDIPo4MHu++578OePfMfxSkpqN1PY9uuXrrrq2vVnn/sX/+OPrvvAByjFQrH09ONPfOFP/ry357CaHHjxhWcyc1fefc9H+nr7+48eWrtu3bGBAaWA81zfieOrT185a1Z+/4G92WxBCUYxk8sVu7pmGTN58PDOMDQbNpw9e/accrlcb/ie51FKy+XygUMHzjj99NNOP63uByMjIwYgjITRIISI3ziCiZaydQVppW30k1bKc10w5uWXX161avWWLVsq1droyLAtRtREcEKI1sb6oQLA1l//euXKVZs2bTrWe9z3fUqZVdRoo61jnIVsTPRCGDPXhpA0AiBAs2bh1ME0aqeNAYmq36Ikpv9FxJRbCAAqK/YCGvttQRnANLnBdvBJOhkz1On/UyqglDNkal03e4uxzGMJIGBcnx2QAKXWVRQRp/h7+uB9dyWJmqEJzdYe0tLL6bj532K3W9AZp/ktnWprR21jWo3g6Z2TdaYFl9Oo3QTllnPaefOWW7S8yulxJfvY/KelkdRpMwof095OTFIqtcwhJrxEeyNtloOUhhRjlztjjFIKUC9fsXJ0dHRocFAKAWAIGLTuw0ppoxEhjqbEaSF7hDHmOI7redksc12batYm+2bU1h5QVtDURiohlVZSRlrWESGSxnVz9Xp09z2/1dE5p39gyHEz+w8cePq5Z0k0Xip2cteVIH3hj9cqR48e+r0//urIUPnhnzys5WTWIRniuF4+krRaaSxactYDd9z5nW/9y/CxfaVCFoxAB4eHjvz+n/5T94GDrzz/03mdHdwIY3C4PHTvJ//oV6+9uWnD+oce+s/rrr0S0Wx/9715XYuXL19aHh2VyhzpOXD6Gesa9frg0DChVClBCPp+Q0oZhiEalFEkZJTPZykDRj0gWJ2sEUIpZZ7n2aAwJIRQHgSRMQYx0eEiUUYrLQ0araRQouHXo9B3KPN9v6+3l1I2q3PWkcOHAYA7juu69XpdRIKimqzWAJibLSxZsmT79nfXrTvzwP49kZB9x/afc97FhVnLdr6zFagbRpExuG//oU99/nd++eh/vPP+7suvvII7HBgc7N537z13/eKpZ/Kdc3e9+3ax2Hnfvfc8+/xzxc4OxpzQDxVAoVTcs3f3rbd+eOtrrxcLnYQgEONxgqjzuUy+UJioNo4cOVoqlc7euDEIouN9vQSJMYZzeujAwcnJ2jkbz120ePGxY8ekFEiItt5B1EYU2TQ0repWxqiMJGcME41Nx6zO999/v16vX3/99Uqpw4cPZjIZq/+hlCqljNGUkiiKACCfLxw4cIBQes3V1+zatUspzTkXQhCa+E7G7hpT8a+JandaR6ZMik3HvoTxTA5PlximsGgaX6tTkWNW/ZS+jWkWdZmiNK0AAgA2Z1t8E4LpZJ+ps1I9aU5n7InbRAkAAIIEKSBBQpHYUpE01v9YT097Dv3oA3eTaVG+qVtBMmNTt2pSpqagMAPu4kyw1abMsig4M59un1yT608BJqQoZXOn9abtd295hNOPpO26puXfGX+m4b5l6trCJuKHnmoB0uNpmfPUTvzVMmnTh9BqPkl2MJYStIljshIbFSIqpbLZ7KzZs3p7e8MgsAI2otFSKhWhMYyiNgpjhVFiWCNIKeWcO67HXdfzMo7rUcLBOgyQWEWotQKjjdKEGK2UUtLGQDFKDBLGvVlzl9xy8x3jE5N+IxgeG33o3789NjJUKuQ9dL0MVxBUGuN9AwOUL/n9P/ifb7297cc/+qvOjtkceYZ7WkmpychYZdEZW37rI/d/42v/1Bjs6SwWNeoQRD0oX3rDJ7u6On/4L/+4bNEKrkIUfmjkhivud9zsnI7Ont6jK5bNP3rkcEcxd3DPzs0XXTwyPr5o4cJtr71E0Fl71rpde/ahDrQ2E+WxXC47Pl4mhGW9TH2yJqSYrNWyWZcRms0XxsbLQ4MDnR2dABBGkedlwjCqNxq+HwghhQiljISQQkrKuFACEcHoSrWCRkdR6DcaKogaQUNKPV4eW7Bo4ejIaL1eZ5xls159si6iELSWQgRhUJmobbn44p7e44sXLew5fNgowXhmpFz94M0f+tH/94Ps7JKINAAc2Lv9RP/A17/5w2eefm7b9tevvPIKouHIwb01ye/5yMdefPrZQgZ3bH30cF/1T//6797fdZBTU50YL3TkDBglTdBQN9108/vvvVvqyGQySKl2HddoFkWAFF3XGxoa3L//wJlnnbV61eqxkdFqtYqADudBEO7atXv2rNkXXXSR0mZwYNCAYdyBeA1ArHa337FsS4wyjDEhAoTHDgAAIABJREFUBGNcG80Yk0rl8/nKxMSTTzx+2223L1m6bP/+/ZzHcgBjzHL0sdygtZfJ9PUd37Nn9/33PzA0NDxeLjuuo41KpGOEJOkxTiEWprxEpxAMmkBE0CQVA6bcvpuMPiSigB1QSlKIdxOqYGJIsUwa2OYIoYBgINbJJ+txCg2a/pQ2UT4Y3cSThNJM9Ttm8gEgcWWFuCpMM1rM+mOYODUPaRIDoJRYRRAi0o9+5K4WfUVT/4WIiBQg9qVBwDgzaLPnYGl72hvlpFs7IgOkfHqMrVScmG2TnIEYay4QEUxikEyh9hTNboPiaajapNVtYNr0tYKTkZOTtdk8ZyZ6YP1205BtWp633WYypzdlrGnNznRmSwgCseU2mzSAMyaFREKMMZQgGA3G5PJ5xtjw8FAUhg5jSggpIyUEaG2MQm20tmVhlFIaEZUSCQeHgIQxhzue43qcc1v/ASkBBGMLDiullGKEGK0QjdZSKWEMhJBpBPL8Cy/vmrfIn4wa9dqeXdv27nhhbme2lHEzSFwehSIQhhzsOb52/VX3333Pfz3/4hsv/Gz58tOVDD0vr4nSxD/ae3DF+quvvenmX73xzvuvP95ZAMpUhLShcLQ8+bHP/cljP3kob3wlgBrNHHP4xK7bP/ql/Xt7T1s6/9En/23FqpX1WmPx3Dn7tr142saLy6EpFPL733iq0Llk3pKVO957O+9IISFsNIr5/PjYeNfsuYzSWmUcjPY8R0lV6pjVaFTr1QkAUyqWolAYpQkhg4MDCCbwfREFUeRrLX2/YatsaqXQQGNyMvT9aqVqtBGhqFSrjUZDG50vFCYna4xRP2hQinNmdw4O9nNCHC87PjHqMBgt9595znmjExOMOSIUE0N9jNPe/sH1688eCmDg+GHQVOsoW3QfeWb7rbfctnrN2l+9+PVwklx16TWjJPfow987d/1Zl156+fPPPT1nwYL+o7t6j0/ece99UuvD+94rFkuO6+ay2b6+3s3nnhOFge9PZjM5z80y6iBSrTVzOGM0l8u5jnPk0CERRWetPYtTfvToMa2IVpDNZnqP9vb2nVi3fsPm88/fuWt3KALGbd5QlEJQRNA2A5d1PqGQ5KM3hhBCldIEQStFCZk7Z+5rr766Yf36DRs2vPbqq7l8wfodMBIr1mOVjtGcMSmiY0ePX3XlVZXKxOjYCGHEgKGEaGOTIKaWKqIBQ4A0Y5OaamVICo9pYzToOKGQtVVb7/zkJyAopayZywKHZfynwMjqnRCtq2MzAgoJMfEKwsRVKelHggmQOEZadXwTuNKAE1tbDWKcGpQAggFtmn6VcbrVmANEYgi1Sh8gVv9DYtuAvTH92EfubsJRU9WVQkwynfdu4/ibbbWBexNGUyA4A5OebK1CYvoqTGh4y/EWjUc6iCN9Tkt/Wu5yCmb8FH1uOaftWtLWzsxEsmWwbX1rndLp57TLBNPGLqV0XEcpRSlFQozWnZ2dQRDUalVjNOdO4PtojLJRPUoarbSSSgodW37t643We9gakhjlnud5XpZxFlu7AY0x8YVKGKMIouMwpXQkBaVMKJkt5K+48vqRkYpDnQMH9+za9abRgedm817edbhREYKOBBzYt//Oez9z6UWbv//D74707Z47Zz4Cy2SykfQ5gxO9hzdd+cAVl1/+80ceGjtxqOFPAnOk0lEQ9h/v+dMvf+ell1/Ys+1FhzmMoTaiOjlx2Y2/PTEZrly+6PEnnzrvnLO3vfPe5o3rhwYGB0aqa9evB84O7Hpv8Pix005fb5hz9NBuzpgBJkREGROR4Ix6Lh8eHnYcHglRKnUg4mR9MooixriVrgyYwA/rtUltTKNRl1KA0QgmElKIKBRRGIRGa9/3wyAYGxvxHF6tVBijY2NjmayXybqHDh7s6CzKKHIcx3Wc8XJZSuk4ThSFoHUjCDq7liyYv2iyWsu6bs+B3Uqb4z3bL77qrmKp+NRDj89ZWCRIlJDzZ5utb773iY8/MDYGe3a93dc38OEP31zIFP72y1/8wO13nL523UtPvti1cEV//97tb7913733CUl7jx+dPWc2QZLNZn0/WHfWmT1HevKFYhzoZ9AgKGMY45Qwz/O4405MTOzZvef009dsOHtD7/Fj5bGyMVprNdlo7NixY3Kyfscdd0gpjhzpoYTIKKKcKaUghuD4hW/W9rLRqsZoQoj1fNdal0qlt956y8tkbrnl1n379kkpKaUiCtNaBOttmclkqtXa/v37r7vu+trkZLlSdji3YEEJm+bxkUBIav1M9zyxOv4kRX9To58OAjDGaIs5sTzQVC+lslsCxMqLtAtQStVMmphDWld3O/Jg7BQ7lSh0SmtlJRrTjP6dJuNYqZxQICSuKkkpEpY4+iDalulHH7jLoocdTGsVApsxDn8DiqWHMfWbxHxrO8bNhGgzIGC6QQBoma+ZkLSVALS2cCoKNMPd0wA90+1wxgZn6HkbCcTEIHGy4bTPWPvt7K+0eJjWtSIipVRKacVno3WxWKzX6yKKEMBoraSkSJSUSisllFaRUlJGkVLCaG1F4dRI4zoe3HG9TMb1MozyJutiw8q0VqC1AUMoCCGlUpSyynjttDXrlq5e0b3n8JzZ83a9//6J490ZT3MCOSfDkFNiHAeF1JWK/+DHf3fR4sXf/tbXijnd2Zn3uGNtDJqIY0fe3XLVb116xRX//J0fznKivqN7a0EggGmta6O9H/zY/+Ju4e8/9/kFK5cYqSkKoEDzy665/pa93d2Mk8P731+8YPFjj//g3ns/9uwvflacNX/JqtWuQ1969qec59aeefahw4cmJyfAGARijK7Xa7M6O8rlUc/LUsrK5XJnZ0cm64ExRkslhdVacM4BTBiGBCEM/MZkzXFcrYRS0ma01kpZcuu6zsR4OQoDz3UbjUkwRgjBGCOA9foko0xrk8tlKaW1Wo0QgoRRAkGjzrgz6UeXX3n17l3dxVzm2OEDAFoh8NyCs9as/vYjjy6fn/W4R5A6jjvcd4A4hQfv+8hjj/wkEpMyFFdec2NF4De//nuf/fSXFixcs/3dt+YvLKj66Csvvf7AJz8rtD64b1cunwcD5fLoeZvPHx4ZoZxRxijjhDGklFLmuRnOuRDSGFBKM0rffvutSMhLLrmkUCrs29dttGk06oh4uOfQrt27Lrxwy7mbNu/atSsIAkqoUkZp0xRiY20K2ASZaAyANhoMEiKVIpQKIXL5/OHDhxHx6quv3rt3r1aaMQbW1goGSfyqS6kch4eR2L1n93XXXzdZr48Mj7iOp7VRUk2pTJppFZqInQL6+EjsuIOACAaNAW1MMy9B0zhgSVd8MrT6Ednv2FF6us9P89sSAEJa3WHasaK5TbdJJF5JyfK37lKImGT+QQsvSACJoZQSahP7WCS32pz4RqTZbtJ6GwSdBDNTp8wMYc1oMmyGFyTbqTFuRhxs/2mmb+lJ/O+0idOlihn78xtbaB9a06G2XWPzGwd1snmecSzJs2++Ga0BivEepVIppLSjs9P3fSklWImUEMdxIhElAqm9bIpVadKS6WMhJM4XCAAA2kgptVSgjLEVJY3SWvl+EEaR0jBarlx0+ZWUO9vf2bl0yeI333htcPCI5xCGwIkBIwhVlHGC3Bj+uc9/IZd1//3731i8cM7szlLOcxEVc5EwmJys3PDhP7rs0su+/Fd/M3To3dmzSvXJisOplLLaCDqXX7rprDXf/fZ3N2xZFPqBISh0NF4du+CyD+7Zd/SSLVueeOKRcy/Y/OoLL63bdG1tst57+N1csVStN0aH+12e09QBxoeOH3EIIqWMESmF67pKaW1UpVL2Mq7rcgBtjI5EmDhmgZZCRH7g10UUSBkFQQOM4RSlDI1Sod/QUoBRWspadSJo1JUUqHWjPkkJDcMAUWc8Z2xsOJvNAJhCsWAVaJxz63ZLENEAJ/TwvveymczY6AgjxG80MplsPjf74f/7lTVrTwcArUOCMp/JcEKWrFj41f/na/v2HfrT//V/+vt3bH/5qTffevPBj30iWzr9b//mby6+9LzNF19WLY93FEoUoh/88IeXbtm89oy1J3qPN/w6EvLO9nfOP/+CRt1HIJw7lFJMancjout6AGDzQi9cuLD32JHHHnu0kM/feeedlepE0Kj7QSOb9crlsX/51rd379nz2c/+9hlrzhwZHrH8jtJKKJVo2MFoY7QxWtnEsVJrqRTlPBRCGyOkzOXz7+/c+cKLL957332RFFJJbbQBg4RoG85utDaaUMIdisT88umntmzZsmDBgnq9DgCExYUQbYyofXwGIf7YWNkZlhVCokOx2RBsWvvkIJkGdwknhynRxGiLojOgXHMJYRvuTfdEmtraUS4hVDNsU3CBAKibuR8ssbGKLBKjUwwg9GMfubslzdn0fscrvbn+Z8CvVHHfk2u0T7ol550UbZuQahBIisdvOy15fm38clr+mn5hYgFq6/DUzxn7lGq8ZdJOgfvNgZystVMfb94otaNnlMmalxhErbXrusVisTw21hQkAQDBiDCihGijlVJgNIA2WsXMQfLmEUIIYXE6WAAkljvklFCllZQqDMLQ94UIpIiUlJEUUgohZbVe15pceunl3fsOHj8+tPaMtS+/9ByaGoVIhhFqoISGsgEUg1DmSgvvufPB3fv2bH39mfkLZnsOZUDBGEI0UFVvTF582S1nrV/35b/7+05WX9Q1y+FspDzuccYY7t+95y//7msvvvjK3tefyBJNvRx1KAEZ0vlX33hb4Idvv/FKpCKH821PPHb1Xffsee+dowf3XnfrgxO18Wp5OKxP1AK9ds26HW88XuiYo5DmMtl6oz6rs2Nysuq6juO5COB6LqHU8zylFKVgtKGEaq3tMiIIaGtpKlHI5xv1OoCORIQAWkqthF+fBKO1FFEUOg6PwkDJaM7s2ePlssN5GIZSRsVC3vPcRqPuOEyICICCUWgkIaTRmFx+2vrx8YlVy5e++9bWWbM6OWcTE72rz7zgoo1rd25/IZvJeMzLZjIAwaqVc7a+sfu+++8KZPbIoT0vPPvQOeddc+VlNz31i5+O++WPPHj/66/s8GuTXpZRij//6U8+9/nPlzo69uze1dHZeaSnZ8OGcyYnG74fKiWMMYwxx/UoYYTEzmBNydXNuIiwbdvWTCZ78823aDD79u21daQ9L7N3z559+/bfeeedS5csfXfHDgPIuJNK2WltqQnfCUAoAwAhBOc8vgWg4zojIyMDAwMf+tCHjh07FoUBpQziwgCEJMVSjDGe54Wh+PVrr99x251RJMbGxmy2PottTfV2gtuQXk1p/jrW1LdB8Ey5oKc5jDR9PQDjpGlN8GnlC1OImF7OUwqllPcRTPdKQkSTaM9S0r6Z1gcbiWldM2w0JhIkxv4kFJqKfIzhg0xDnxYKk8bNdrxLkI2kyVo7zLWjXgsZbIe/liMA0O6t1MJuN9E/PblTkIfx80i3ijhtXO3d+I3m7XSbM0L8jHJP+mD6RZlx7C23O3nvpnEThBAppOd5nudVKhUkNosDNF8TzrmxHM5UxhBi+Tv7ath2mjk5CCGIRkoRhaEQkRJCRFGj4VerlVqlWq1UGo1GEIRBGJ7oH+jsmHPRloteePFlQvjyZcteffEljhJNyIhCo6WEehAFkSxXJmYvWHb9TTe/+sYb+7t3ds4qofGjoC4jqRQYJJXKyIWXXLt6xcof/OBfOztoR47OLnBOietkPMcR1b7/9fffHBob+en3v7Jw/twMz1BGKMfh4X133v3xkXKdMnzxme+uXLb84MGjmRJdsXTxnp3vdXYtL5Q6lJCV8ogQkZctSqXBKMvuKi2LxbzlTjlnBFFrlc1kOKMAoJQGA5Si1lLKSMpIRKHWSilJEDhjURiC1kZphpQAKCnRmCgIRRgYrQiCwx0tled6URgoKQEhn8tqqWrVaqlYlEIQQikl1kkGtELUmUzuWE/P2WefQwgVUodBmMtll6485z///XvXXXNDYwS4Sw0KAugxjxhtguFHH3/8gQfvfe31IyvXnPfdr/3d3Lld933sU7984psvvPzSF//nX1UnVSgV18HK5Su+8Y1vrF618oMf/FC9Xps3f97hnmNLli6XSktpgjCUSoIhzeodhBDGiONwx3GsP/CyZcv37t7z1JNPnrZ69W99/BNhGFr8KpVKtVr1L/7iz/OFwmd++7OhiIIwaOqZCRJIXi3GWBxfaBRlxBgllVBKSC20UdmsNzDY/+a2N6+++irf94UQ0/UhcaKhKIpcx+maM/eJJ584++yzu7rm+X4DAKSUTc8InAn6m2x10uw01QK0kIdpOpJpPuX2rDQC2DIG6XOSPrdyqJhUFkv3cIoeJBQoKUyWdC4VaIaIiTeQNs0aONbPlcRGd6t3AkStlTEG0CjQ9KMP3NXUHqRvn9B5gphyRkmEwemIMwNatcN325ini0U4gxAA000ISFqbmn5y0kxquk/RsVOA/n9/LM1Opv9qP6H9Fi0v4owXnrrbiNhmOY9DzEkSPJkvFAilge9rrTlj1lRlm9AmtvGqOGbfIGhEg0Bib4j4tTQxu2a0tQ3YsGCplJRSiGi8XK5Vq/V6PRRCajU5OTk4NHzWunOWrVj+i188sfGcjVrrN7e+nueGMaAIIoqAkIYf+KEYrzZWnnb2FVde85Of/qxRG6JMay2iIGCMKtBISKVSvXDLDaetXPXQfzzsZkwuKz2KOS+bcT0RRgBkzblXnrdp/bf+6SsrVizxOGRYVnIMVLhg0eZLLr/xWP/Arl07IKwvWLS85+iJxUuXrly59LmnH1l/7lVdCxePjQ6NndgfClHonF/I5wePHfJyOcK5w91cNus3GmjTGRGkjHLOGWWRDXHSijFmtJZScodFUUitH5TRlglTStq8AIRY7ygZhSElNiwAs9mskJESkVK2GrOyy5VS2tnZUR4dLRQKUkTUyURBA3ToOswQHqG7adPm8fLYid6jIvRd12EOeWvrGx/8wG2VSNSqhxGox3OUUkTtZrKvP//YBZdcc/kHP/DOr37ZmefH+wbvvf8urYtPPvz9FavX3fjhD7/6yq86HUOo44fhtm1bb7zxhq55XSOj5cAXixct6T3eBwCMU0KQc9cSgKZTgIEYfhDR5gkMfH/7u+/Onjv3+huu7+8fGC+PO45jjCkUis8992xH5+x77rmn53BPtVKJ8VpEhBBCiVbK2kJMStucYssMEmScDQ0OVicql152+bGjRyFhMZsLwfZESgmIRuvdu3ff9IEPDI+OVWoVL5ORWkmlKRIr0ja1QC16iURGQJie7RliC8V0UG5bsOllDilWrHnmlBwQn5/m96F9UK0daxIqHdufdUwBdBOHrVxiEAhBSpEwRIqMMRsChNRadIEQIASAEmNVQC3I1ewKIiJSmoL/Nk6WEBtaR0iaqqUun+L024Fs2r1gBl67haFOewG1N2KvaGtjqp3mJalZJk3DyCnAt3lh+hm3/DXjfsvYZ1T+tDzs1Gkk3av2B9RcJ+nNGBt/hEopG7Fp0d/h3KSUnslbl7zlWhvQaAwYjXFMjDbGaKO00WhAGxU79WuljdZaAYCUquE3KpVKEPiTtVokRL3hT1Qmr7/hg1Lp/3ruxcsvu3R4ePidt7eVCllGIgQqIwiFagSNut/oHxy+7oa7163f+M1//mfCFEBdK6EEcMo1aCSmWq+dvvb8TRvPffSnv8hmiEMChr4mLiItZjK1am1kvPKJz/3+iy++pGvHHUqQcWIc4OTE0KG77/+jniN92Vxm2ys/6+iYPXdWV3//4Lqz18mw3t391kVX3gFa18aHgtpwKOScecuMEuMjw9zlgOC6Gc9zarUK5wyMQaOtUM85BwANhhNqIC5NBUm0SuLcbQghUgqrnUAARCOENMYYUGAQCeHcCcMwDBrE5j1GQAQhojlz5vpBEEtdSgF1jAyJjgghzMvVQjh746aew4c7O4o9h7ozWY87lFCxaPHp8xcs2LXrJeYUAKjrOACKUJPrmPeLh//v7/zx/x4drjQmeiM5MV6NLrnwakKzv9r6y0suu7JrzoLefe/N6loktCqWCu+9/+7Gc8+bPXvunu4DixcvQ6QnThxnjHieyxhnjCZa4ylLoTYgpRRCgEFjjOO6Bw8eqNRq1113HRJ6YP9+13WVUqVSace77/q+f/ddd42OjQ4MDHDO45cVUBvDHUfrqZJd6WUWM6aAjuuWx8qu46xatbqn5zBjrLlqmmBNKSUkrhvcvX//VVdfNTo2WqlUKKWEEA1ACbXsTqzhsE/OTDUynbFOartbmtcWywZxpPM0Ht9CGdhQWyCQMqpNifsGMI7IxWZ0WAvct0O/3bSOXVVNUoAMkQCYpjYBEW3FyPgItQF5BilQZnEWCIHYmb95oZ0j+43T4Htmxjy9H9vEUlWDcTpGtz3W1nYg0c+0nNM+4y37bQA6Q82ElnbSlInEeQBPxXq33Ktl52Rzkr6wXT+W3sj0rX3UzdPS3zNuYLU6xkgpi8WiRX9KCGfMaI3xCkleOYKISCmllFHrKEBobO4jxKYHB4PaGKOUkdqiP4ABowwoY7QQke/7Igwb9bpCU/f9o8d6b739rkq18ea2d6655to33nhr76738lkvCOqT9fp4pTFRi0bGGwMj43sP7PvgLffPnjPvO9/+YcZ1wkYlCBpBIESkJ+uRH8ix8YnT1p530SUX/+sPHooiXyufmcCjjtYskIS52f4TBz/y6c8dOda7c/vL+WyeM0qpJymGoVx+xk1A2fyF87f+6ulSruDyjFZS+OOz58+r1qP580/PFkpRGE4MHgcwzPHy+WwU+MzLUcYoAUDQSnJuk1ojZYwgMkYnJyez2aySkjGOQDjnjDOlJSFojFLaZiWzqIHJWjXWk4oxZg9SSpWSQggEQxAYJWg0AGSzOcdh1Uolk8kIIaw0Syk6rgtGM8ZE5Nfr9WPHj69afZrfqIahr4VaumjlicET87o6NMzmblboMAhDzl2lBKO6Y9aC11584aYbrmsEDaXCjEO3v7Pj0ksvLmTYwz/+0aWXX9E5bwVzvI5iqVQsdnXNeeWVl0sdHRdddNE7b7+7ZMnSIIrqjUYQBFEkkkRQqsmbEySO42QyGdd1EY31G85lc8eOHPnxj3+8adOma6+/fnx8HBGk1PPmzX93+/Yf/duPbvvwh5ctWzYxPm4ApNZgDCKRUkmt0BjU0z6gNRqgSCiiFiKbye7cuQuQnHXWWWEYtqipwTq2CaGU5JwKEf76169dcdkVIpJaGSk0GCLl9KRWWuuEnU5jbhr99Uz21qm12bYeASz3TQmhaa9FkjhxmiRTW7Ot+GBTPkgAqgX3rYOAMQYMaNs90w6A09AjJm2gEY31ByWMUIo2xB9tlICFl/SN08SAWGfSkwe+IiKcBOVPfeQUmNt+Cbahf7ozqfNbBah2tJ3pqmn3bd9O1trJhtDC6c94l5ON/dQttzTe3k+lFGMsn88LIaw2liQP1zqDEkKQEIMEqIW35M21lIAz2kwAbVFNKSGEkpFW0pYWtrnegrARhr4UIhIhMhKEgVDw+d/7gwMHenbt6r72mhv+/Uf/Vh4dC3x/ojw2MV4eGvNPDA4d7e/rOd7XO9j4vT/4KpDMn//RHxII67VyOFmbGA/K5erQ2HClOtF3fGD5qs3nnXfBt7//g0CUg3BCi7oUOgqpQxhBfmyksum6exYsnP+Nr36qs6OrHhKpXBmouoyOHtqxeeOWiYbsObr/2J6XiVSO44kocF3mZLMDfb3FWUspY0qKieEeAoQyt5j16pOTwDggOoxoKRqNRjabsRGfjGKcbxBN4Af5XD6KIkoZIrquaz12AEAqYVIxOzaCyP6cSk9gNCFEKR1FkTEKjLFWGS2irq651WqNEGMdkCB5UpQSxihFrZUERIXM9TzCIIqiMJBoSM+hHaWO4sIlGyjPcY+Fot5oBKiBM5w9Z/Z3/uIPvY7ClR/8WL3B+48dGx3a5/u1Ky+5XlbHf/Jv3733079jmFso5EUYCCEYZ0899UTX3PnrNmwAIAsXLnI411ppENpIiFNhxzCnjSFIPM/L5XL5fL5YLLiuSyktFotaq69+9StLly6574H7rZlca9nZWTzRd+xzv/OZe++99+prrh0dHWGcC62lNsqW9WoBWq2tVsDmy6OEaq1KpdJTTzzR1TV/xYoViTHA8siJ2Ko1Y1RrjQQHBgaeee752+64A0ksE4tUpd94MyATuG/CewLT2Pxu/nsKfEgv0Zb1O6XDmAnW4iPGWIjXSerFhD6klD+pTmidaIJiLiM5aEzC11uecCpKANEkue4BUNt8FQnIT2dCW5CIJLXB0uOZ4kZP6eV5MvA66X6LoglmpgctFxIrqrQdbGGZIWULag6xuXOKzqcfYfvTbblRy/FY1pu+nRriW7qRPjJ9+DPcxWbLklISQjhlCJB4WxuWgDsiNgkDpXGRX0xRFB2r+IUIpZRCKqm0si6eKtYFaSmllBGj1Mb3PPCR+3fu3tPdfWDz5vN/+IMf5vLFIAjq9clKpVweGxsYrJwYGu4b7n30+Tf/8i+/0r3/2Nf+8V/mL19WHh+slocqI2OVsfpEuVqrjh86uHXVmo2XX3rVd77/477eo/VwIgjH67UJIagQ1ARBGATHxuqbL7n0Rw89tGjhmX5Qr/iyNinqE9VydWJoF2xcv3FgaOSNd3bkc3kZhBRZY7LmZtxcPj/et9vN5BSQkZERJUQUhUigkPX8RoMQrrVxGQophBCO61gjnjaaEOJwhxAiosjmpQnD0PLpAOC6LqFUW6fExFinlDIGE9upoUgos5MfE1cradj6AV4mA1r7fsMYkFJQSi0MUEYZtSwbIgGhJHccP/CLpbkARgutIl0Z36sMWbZ0pVTE8RhlqDUAEGOU67B1F5z9D1/5q+tvvLlQWjlZGy91kJ88/K9nnra+qzhJ4gqNAAAgAElEQVR7dPj4geMDWy69QivhuhwRoigsFEovvfxyqWOW47irV63OF/LZrOdwBwCQWDkmCRUyqLQSQkiprHCZ8TzP84wxjuMuW778u9/9LqPsvvvuHR0dAwNG6Yzrdc2f981//saWLRdeffW1/f0njNEAtrww6ratyfNizA6jlHL+ggWvv/76kiVLisWilFJbW5aWWkulNRIUQiitAcD1vKGhwe7u7i1bttRqNSk1wVgFlNBjo5Prp/anrK26FXmnSwAzkgGA2Ek/ORNjGI7tt2mYnobs01poQhI0Ff2miRvNpprBms0+UIrNhWzFDNtIk1QopbRRBps53hX9xMfusxquKVBJccqEMEIpYJxIwmqdm+HNaPVdM2HmjFMzfWxtxHDqA81PkngiBditha5iQLN12CFVXwXAYMptNP3k0uia/iQdOZW6qQWFpx+kydQksSBAEGlinp36JPey9M7qLjHVh2lEYsaXA21SZSTaACABpAaQcW4AhJT2hdI2qB1bE4TadwOMDVg3ShullLbvllJCKqlkGAZCREJGoTCRNFLZRBJEKRASEKg2RCkzWW8wt+Puex589bWt29/Zfvddd/zkx/9BqVYiFGF9YrxcrVTqk/WJeq08USGs87GHH/7JTx56+Hv/Z+H82Sqs1eq1mjATka5HQUOoo8cOX3TNgx/6wC3/79e/0XtgN6XMKK00AnMMhSAKx33x/q5XPv6ZPx7qH3ny0Z8jLVYn6o1aY2xsYqzWeGzrsR888uTbb+50Mdr66ndIZvakijrnzRmpBl1dKzKO89wbj69de+GKeXN3vPlqoAKhKMVZ8xet7DneE4my63iAmSjyjQFEyh1HSAVIER3OXSkUZTQKQ+ZyoaRBtMWQASkipYTbx6eUgtiKbguIEWtHN9oQSijBKAqCwKecAyHaGEIp4zwIA2M0oE1nrx2HC1nl1APiGcaVZkS7Z6xeceLEIZeBNHx8dJC6oMVk1uucMLlzNq1786VHC6XFSKhnakIjeCWHGi+LjUqP1KXb7rzn16+8vPy0JQOD/UAyq85YMzR07OCuty66+Cri5AcGR7OZDgaukQq1PHbk4OLFC5atWHViYIxQjxBDGbc+eK7rovUNIIYxpoyWSoZCiUhIpcBKnMaA1p7nvPHG6/MXLrjo4gv3dO+V2ihjGHMavv+LXzz2yU9+KpfLd3d3ZzKu1oogWPdGTFyDEFvV7hBbnrVSsr+/f9OmTSdO9AsRIaINIwODoLVWGsAgoFHKcVhPz6FFixctXrzowKH9jMfp1pSUlFL7bRO1GQPagFUJJUQ6rlwBAFrH5jPrD52uCAsAtvAWJGZ/A5ogAFgW29J4u1JjeEsVabJtTjNmTEkbWiVgYax9zmiN0wpVGasnjMGPgDLSUE0sYDNDWOL0SQAIIkNDjDYGprQ2iZNPiuOGJgFIc6JNPDoZjzz9UU15v8YXklZ+Nc01N28x7a8ptrZV991COcg0YaKdx8cZ22/fkrOmnZ++11RXU0qz5uVNpRmZEjVae9JyR/tPO2lMSyrp4czYJau0QUTuOEneRJO+I6Yns31S4nZpbO9BCgC2+IuUSgoVBKEQQkoZhMIPwjCMlNJCKCklAnTOmnP77be/vvWNI0d77r77zp8+8kgYNtCYKGyEoW9dFqRWURgO7D36lb/968cef+zxh76zYs0GIUNtVCQiNCDCSAo5NHBkzfqLbv3gjd/54b/ueOMZBRgEfqPRCIWoB2F5vDY6Vjl8eP81H/jtufPm//5vfcFxs5VKuTZZG58oT/q18mjfpz52F6CIoqC/f5AgijBSQhhtymPjGzZs2P7WG9UazOro8BuNarUmQgFaaYQw9JEYJaXjOGEQIIJSQilBKWWMkWYIfgJJURS5rmuMsSkKdJIqQGvd1A+nlzQkD87WhNJaE4JW2G8+xzAM7TNwHEcIYW18YAAJZYRSio7Dx8fLDGFiotLR0UkYQ7BqIuf1Xz27ZNGiTKYAaHLZTCaby+VzyRsD8xeeufXl50xQ23T+hfW6mDdv8f6D3ZmsKxTN5orf+96316xZM2/BIt8PrJOANiaby23btq3RqK9Zu0aIyCoPAYBSqrXh3KGUAcRFGT3Pc7nDrIVc6+a7xylbuHDRU08+MTI6ctvttwdBaNM1M8rmdc3/8pf/+txzz/3QzTefONGHiEJKy7YrpUiSAjo1q6lvrRljvu/v2PHeuedusi8vmNg62tTux9OroVTqePaZpwmhZ2/Y4Pu+tSs4jMtIEEAtFWgNSoM2Vtdj83XbjzH2oxFtHmnLQ0uAGDMTdnsGySANJXYgaT8ik4iKTffO9IX2milQTTv8p4QGYsst6TjAQWvNUhBEmkw8WgQw7WEQsSSKsTFwysUzjeDpIy1wlgapmcA0hXpTdseTolArCUldi9PRFtuIQdJO+p8ZHKratxnhEBOCl36WzftifMrU8Nulw9RDnJrxmXZO1pOWdlr73PzLqnpsxzilUSS01iTxSIkf3PSZb7kdIDatPQbQxHKA0doqJbUBVBqEVEIIIaTShhBmADh3hDI33nTTjh3b+/qO3Xrzh97a9mbgV1xGQr9qlEBEylipY1ahUGSonnnzpZ8+8vDD//7NFWvXB34NUUslGKNglOc4CLD+nEv+7Etf/Ievf/OVp382e/5yqUKtpdImDMREpTo0MjE4UgF3wfXXXfsPX/3aGecsiYTfaNSCsBHKACgO98Hll1588Ej3ilVLn330W1lnQYZz0IIY5MQp5Tvef/dZh8LcuV3j42UDynGY0kJroVRAQLuua7QGYxihRusoCLSSyUxqo02yQAgixumVEFWcDaK5OKf5jMfvRCLgx9YCqUwSkWdtNkIIyz0AgOt5iY4FtFYAmtLYDheFUTabEcp0ds7SxkgRAaICpEr09/edff4VUoScUjdXzGaynsMMoYxxrUihmHv2+Sevuuqy2oQAw4yJBoeG160/d6xcUVo/+osnzjvvvIGhkcGhkXrdJ5QBYiab2bV757IlS1atWh2FklAGSCjljHEb6mUdYYWQUqpMsnHOLdV0HMdxHKXUggULXnv11cpE5b777h0dHXW5a5QihORyuX/8x3884/TTr7n2ukqlQprQprWli2nja1MzbleYnfPx8fKJE/0bN26sVqt2Js2Uujx+w7XRUso5c+a++uorc+fOnTtnrjFGSpXEw2MsroFKm4JbFjJAk4I3wTBhoYmJc7JNx4EZMWeayj7x1U9f1bLIp4KWMc7HadOE2v2m12xceC1WlDSzEFtVhv02GI/LVpydZkScMgJjimS14lAbjKbhb2ZYacOsGVtuP95+WnoHToKJ6aZaRtF+5sm2/+a5p2iz5Z9TNNjSSIt6CtqG2T7/drOednZBNnwfEZpMK2lVlLXeHVMWl3jOABGTQjzKSG200gYgEsL3gyCUUhqlQCpNGXPczIdvu+29994/1tt73bXXvP7rV8dHB3MZhxPNiHE5cR06t2t2sVQsdc762te/8ciPH3r3jRfXrzvHc6BU8jg3CNLlLON6DqXZbP5PvviHj/zs8RNH3ll95pn5HC+V8l7GoYQEQVirNoLIPPXWkS/92Z+/sWPPzl+9xBnVYcPlQInM5DLcY6dfctbKFatQ6FdffXbu/Pk5z3UYFDNeUK8v7Fpy+NAh5sHixWvz+UK1VnUd5Jx5HkeQYTDpcJLzsloq1+GMocOo1irwfaMkGEACiSoWbNSkEJFJ2Qmb/JNJAp0sWimlpJQqYUgBQCklpDDGGKVBG4IkiiJjmlY7QgmhlAopKSUIhhigFCkAEgiCIJvNKA2FfJ4QwghqbaShWZfveu+9zVsuNjIEo9DJMsftLGQ9L4tIGHMyGWf/vq379u/70C13+75kGfLmW6+vXLkG0FWGvPX2tvd27rnrnntffvHp/QcPnjjRPzExwTiLwrD3+LGOjuKGs89WUhFCpVRKaQTCuJPN5DKZnLWB2wowrutmM9mM53HK7MvpcQcRioXCYz9/mBL8xCc+Xq2OU271nYo75Atf+Ox552688Ybra7UKciakREqkEBjXsDNGaSOV/W4yzFEUKaUcx+nu3js6Onb++ec3Gg0AkHEy2sROJaWlvnaB7HjvvbPPObtardi3GxFlLJBZ+JWJYkdbZ4dmAXBEANDa/iTWfdMA6rR9Nb2QbRuxFsnYjBfa6uFjqS61iJtXTVG46eykSbiHGNlS+gn7l3V8RaOptQBMpWcnU8pwbBY4S5STAHaHfvoTH5mRMU8G1vrnjERipgZOevxk/86oo2gXJmb8mcgQBlqJ0Cm60DynBRxbQfZkl7Ts244034TkhFauv6XxGe8142ntPbEWLdd1EVFKiYng0tSitt+0fbyQuE/YpKAmtnBGURiGYSiVUoAWwgCRc+56nutmkNKLL7mkt6/vxIkTixYvHhgcGB8bymY9hwBnhFEgBLOZrJfNjI0Of/FLf/bay6/s7941t6uLoCQoCTGcEu44juNkMtn+nve//s3vvfTyyy8988iS5avBiFkdhVzWYSyORUMkQyd67r3zvgs2b/rifZ9Yfd4ZIH2PU8/ljBLueaOjfZ/5nS8c7xsvZOkLzz86t7PgUJPzGOfEKL75vEuO9h4dLe9dv/GGWR2zeo8eEUEl4zFEoMwplfKTtZrDKCiNBikBxpidKkRiYppKpU1pCRYCqDHGWtARkVKaWHcAjLFGcvtOKqWaSS7tqo6iCBHBGGtTjcKQOxzA3hQ9L6O0FkIgKjQUDCLRUhvK85SxYmdheHh8/rx5PYf2OdQoqSJkOY/7kpy98ZzdO7ZnXSaBOQ5zGCLPoFFaU+aQXD739tvvXX7FjScGhhqNciOIKM0uX3Xarl27OXefe+bJSy67bMslVz3/3FOOl9VaU8aiKMoXC42GTxBXrVzV29vrOK7jukJKRLRFzyilQEjMhQIAgq0bTQjRWgIYozWgKXV0btu27YILL1y0aNFb297M5fLGGAo4e9bcne/vvOOO28Mw2tu9N18oREJkPM/3fUppikkGW9nLZgDFRBfDGOs7fnzBokXLli49cuSI6zhaSwuHTYyzeTsJIdXKhOu4K5YtO3HieAwXsWwGKbC1yn6bEDopQm5jaO37YJ/xNO1Ay5oyAJrawgKJMQGbF8b7iEkNEsQmyk992pfpDMhjDAAijf39bcoHSpFQYn26CcXY8EDQ+v5bsAS0ikdLYDX9zKcexFQ2nynNUbzTCj0zolITXE4BXjNAT1vLJxtzO/a1dQARCYBuu+qkMs3J+tNO1U/Wh7YLoQn3iSrgpMM/2dDax36KiSKMUULsYmg5Lc2Qpi9Mq7aS7mISVKi1VlKIwPejMIqiyCqjpbZFE5AQ5riel8kRyq688qr+waGBgcFcPj/ZqIOWmYznOgyMdF0n43DHdb2MS5nzmd/+3DPPPHPs0IFSKU+J4gwZA0ohm3Vz2VypVNQK/uJ//92Od9/Z+upzq1auZqiKOcfh6LnM4xwRPc8DpU4767z/8aUv3rr56vM/sNljOufRnEs8h3qu57rZ4qwF99xz/9tvvy9Eo1E51FnMuRxzWeZ5jhB8w/qN+w51G5hYf87VSory6KBoTHgOcTxXadE1f97Q8GgxX5RBgKAIBcY4pQwThwqHe4Bogc9Om9YaEK1mmiSmAjvXSmulpOUxIYncQURto36VUkoRgi53AKHRaNj1yzgDAMao62a0MVJKBE2RG41ANQAW8rMN4oIFcw8f7l20YH5Pz8G8R4VSvjA5DpVq7fQzzvT9SPoVX1OPM1ACeJYjV0ZIFRDChUA3U9q08ZxXXns5ny/s3tN92SVXHeo5NjlZ9TKZX/7y6TvvuM3LFgYG+vP5vNKKUTo2NnbmWWdue+PNfD6/atWq8fEJRPS8DOdOk0uVkWhy3EkOAqWVtCtAa00JUVLms9mX/uv5W2+5taura8c7b+WyOTAGCYmi8Oc/feRzv/u7QoqDB/fl8zkhIkYJNHlVo21YtQ250EpZfsUCpevwo0d65s2fp7Wu1SpTr3fiOMIYs1HsGS9z9GjP6aedpqRq1OsIccYek1bU2qiNZsgpQZpo0K0hMl65KRVuovvRCXZbAkGaSNCq25la+02F4TTnDEh4UJWoDuN4wsSVwxINg0BinX2zwEusLaQUaVymD4AaQtCG/iJFpNZbckrZRT/7yY+mUbINL2j78RmPnAzmfuPWdmYr5WhRWqVPJrH3J012pulPELHZ/xn7mW6zecQ081VNH2ZzymYc5vQRN9+Mk43xVGqZGSew+ZeZVq4amwehrUxx+wBxBs8KsGvJaGOD8oUQURiEUSSk1EorKZEyY4AS4nlZz/OEkFdffc34xMTxvuPFUgkQCQJnlDFivx3X8TzXdd0wDD508y07d+7qPXK4mM8QBEI0Ac0ZyWY9SyIqE+W777q/Vq1sf/O1BfPnM4acAkHNOeGUUIIOcxjjYSQ//elPvfjif2VmQSHrOER7HB0KruNw7kkpb/zwA8eP93XNmbN3z7a8pzkxLqOUKcqZ482eP7+r58h+5vinr73Ir9cqE2NRUAGjGeca9ZyueePjFZc7ge87nJo4RhUp5YAIU74fBgAIojT2fzSYFBUhhHJmWXyLhdZSlDgWxi4fIhSEUq0NJZQjBEFgS+5QSjl37I7juErrKBKUGAQGBhk3QRDN7Vpcq9fnL5y3r/vAgq45Q4P9jEhKWSC1iwbArFq7nhrZ33uYZUsqCgioSCMoKnQjkqHvG8azfX1Ht2y5ZHSk0j94HFFls3MXLVywc+cO1/XAmGqtfucdt/3X888VOzoopYRiEPirVq4ql8ujo6OZXG7e/PkDA4OEEut2TxJrbWx3lXG4eKyOBwBjEEEbRSiNRFQqlX7+s5/c/8CD2Vyhe8+ubC6nlOKcFUsdv3799Xvuu2dystF79EjG8wBAStlMSqhtyFjsKASxksYYW/mLEjo4MBCFYVzXFonl7HGqmrw1FJuM51XGJ9asWdPbe5xRRhDBEhJbFwxsKv0Eb+PCLKnsoMbEFQ0IAsYVrNLB+FMrEUjTrtu+6JIlGa/g9mWegIghJD6VWheNOIG29S8DMIbGmT6RMctHoKUH1rMb0BACJHZCjCPCkpwlVlq1woyZGanbrb7pXp7CYNACPTPiXdt0tIPeNOPwjDv2rCbNaMorp7hvS4enW36MTpnj02q45v6Mw0w/4FhTHAslrdMyYzdaTktPcvqSNPQbY6zZ0CRFU+3+jM/xZHeH5DkmNgAAAKW1UkYnwIaUIyJl1HE913XDMNyyZUsUhYcOH+ro6FBKUUoYJ9mM57pePl/s6JxVLHYUOjocz737nnuPHT1ytOdAZ0fBdVkmw3Ien9VRKJUK2Wwuk80LqS6/8rpcPvPm66/OmzfX5ZSC5ow4jDqUcsYyXpZzXhkfu+qa67XWu3e81lnKuQ5xHMx6PJfNeq5TKhadbOfczlmT9TCfpSoYyHicM5bNeY7jGq0XLJgnVECwkXGKYX1SStGoTwAYrUAICcYEQeA6bhBErucpow0yQIaUUca543LXA4JKa0SkjDWreptEtDdaR1EkIjE1q4DGgFKxCkNrLaXSKp5QRhkCiaJICgkqdjBsMo82Oowxm8kSbUYdISIv6/l+A0ErERIAzphBYJzPKuQBwXH4YF/frFIhEIZTREIjoUUYVmsN3/d9P6xM1MbHxyiV//Gf/3b7bXdNVuuEkDfffH3lyqVZL1MsFBYuXLB7145f/eqVP/7in0gpstms52V835+crCEiMnro8OFqrbbqtNVBGIZRpLSyDgikmUEgEXcIoo0npJRyzjhlFDGfzWqlVq46/Vvf+udzN23ccM6mWrXKOVNKAmit1I9+8MPbb7mlo9RRr9eVUjb4QDWLsUDMGMcrFMGKXPZnM3rR/oVTslcr8k5MlOuN+pKli4QUAHFixIR5t5WcCWOEUhLHx1t1SqJCIQQJAjXAgNgPBbQfAkgM2P30Ak9WGbYciVceJWkHnHgdoiVsNsTKcu1AECghlCIlhNnAfYoJy2tFAsMJZZjUDCNAKWEkFgUIwTjgB03i6xKTBpiauuneL2mwmIL7xAEGZsDikzK2pz6Iabb/JIxwe5faMa5FAmi/b7Op9iH8xj7Dyez1M52fpiszNvX/k/bmT5Ik13ngO9zjyMzKrKquvnum574aM7hIYIABSIoEhRHFBQWCFEUtZbu2ZiQlkWtcs/039pfV7hptRZHErngIPARS4AUSAHEMQBAHZwACc09P90yfdecdEe7+3v7gEVFZR3cPsGFpVZmRER4eke7fe/696//n1t5gK7eOXJrc6ty2Bay9XGI7GA1tIYj33rngXfBBYo29xCbeuUcfvbCysvL1r32t1+1ZawdLvTRLlnq9JEm63W6SZVm3v7S8atPO+576oa3t7atX3jh35lSe2jS1qeU8S1NrU5N0uj1Aeuxtb3/k0Uf/+I8+0et1yrKYz6cxdyNzwpwA8Kxw42lx8q4HPvhDH/y/f/1/Wx4MCJEZjTVKgEycZPOqvP/htynCo49deOazf9TvdX1ZZVmHyKKxs9Ivr/Y3dzaCFnm2NtzdZdayHHrvVTF4CKLT+byz1JvOyiRLvfoYVAFqBFDq3EoUNUrk6AATayVCCKqCIuC9VFWoUxsgEzMsjBYJolKzbAB1HP9CDr44UKLxIPgQECO3x8zGSyBiYyhNEpFQFEUIDkEsU2KS4H0n5SzrJEl68aVvn7/nnsl8jqA2ScAk6sp5MZ9Oq/nMiehwe7Ny5Yvf/ebFi6/9m1/4xaqs5vOdF57/7s987KcluGOrq48//vjXvvZ3RTH70Ic+BKDM1O12JuOJIs7nc2vNxYuvhRDuueeeEIICEFIsWhBv2xhjrbXGcL0RU20eJ0IRnyQ2sQZV/9Nv/sZPfeQnH3rkkfF4hIiqai2v37j5H/7D//E//+qvnj1zzpVlazbHplT6vkHepCRqRzJhncokKuZEEPExEiB1QVaCbq/37D9888SJE0lqEZuC1o0fnTG1Lzc3wF+bU5kj9uMengIztukemkiiWD4LIeraEMnTmsJa3AMgsT8IgDE7G7dMU/1qyspqu9Rsysq2O+PttzFPqFqbqRmVYqU10Na+HTkkUgBVih9j1ugDYEqHtv1YexCpj/zYws2RCLi485aiYz9gHWj8VsB9p8b2IeCB9o9sarHBw+h/YM/hU+BW9XRuvS2uPA7vX2xfF+ID38qNL/bzwPuoqEYHahd88OJDCCIAqEESm6jI+XvvffTRR770pWfWjh/v5Lk1hphQgYkANM7/rNevBO5/4BEFvnzp0trqqiHs5lmeJ9aSNUQEaZoYtojmPe958tN//Rlr7Wi4szsaTubz8bwcT2bj6Xw6c8NJWTq5duPNj/7Mv/zkn/1Vkq8wG2ttjNxhmwYkk6bXb1x+/InHLr3xxnA8me1eJRGDiXcIZNhmlcdjq2uvX3rRGOp1TrjSJYlB8NH32jlB4ul0utRfDqKKSswBUAgDggICcVDwIjEwImbujHcal1xt6fBIfTTm93rSANS1ZEVFQqOQNkK7+bjnRl4z6aAAmCRZdP0LIkBoE0tsCFSDEwmMwMYAKolL8pyJJ9vX0rzb668Vs0kQQZNaywIOwAbHoh5JitnU2uQzn/mbe+6++/xdD50+vfbXn/7kqZNrp0+cNEydLHv44Ye/+c1vPPDA/SdOHM/zNMvSoirYsPd+Op2yoZvrN9iY1WPHvPcxRyZjG01OUc2MPmn1FnOMGGOMYSYATRJbFeUf/MEf/sRPPL28cqzO1CbS7XS2NzY+8zef/dhPf1QaSzIiBgkaXXOgqXBCCLHWPGrNk1AdXaUxWlEVW/MntiR+xFtI8+zatWunT59GRkXl/b7z8eBFNI4p9LlWmessP3uHEfLeKfGYPc8UOIRaLYqCapRscZRwvFAsvhFtulHMxBUAKmF0/lSsJYG2qngrAAA0akgUU89CrF7c+ic0U16jm1MAUP53v/Q/4gJzgge3fbnuDmPxbXYeuPPbI9GBdg7sPHyV/W3WfMsBW0rz1RFXPNCf/VC7T3nHBYbnAMge7vACB9U2SO2Xbx2g4ZCMXPwqTow2euQ2zd5msdLKGKkteM57P5/Pi2I+m8/LoiiKOt+WtUZV+4PlJ5988u+e+fLyYCXvZEmS2MRYaxFBvCMARbRZ7kWPrZ04fuLEd779rTxNCYVAM2sUJXouJsYK0Gzu/8VP/9x/+7M/29xYd1VRTqfTsiyqajKZzGZVUbpZURWlv3b9ysf+9S8C0h/+1z9aW1kBV4WgimBs4nwwNpmVVb608sM/9uHXrm4Mx8Ny82UiVs8aTJongkjYu/DEu7/1rc+nqbn79Dsqp1kXN25cSq2FYAA4UKiCP7Z2ZjKZg5ZJxj7EAjjxhYQUeeT4s6qqtUnra3tgWBKRiILWBrxG/a95xei4UgeOhSpako2pUzAB1KmcjLUxziDK9xCKtRNnRAiJjcU3Ll1+9MGHr125QgasIanmYLuGGULZXT07OHby9RefI069UgoOrIWQGZNmOSn4paVjVeW31q/edfej99/7wOUr3z11/JQrw2MXHtve2uh0e/1B3xizvrHx1FNPvfDCi0liV1ePKdB0Oul2u1knB4DxeHjfvfePhsPIegXvGw4h8p57T6OdLTUvhlhVLjLU21tbu8Phz/7Mx77whS8qqDWWkKy1L7/88srq2tNPf/iLX/xCmqRUx82RMXaP6d1/iZhPCUD3ok2xNgk2+useIjMSKVZlefL4ielkaursiEhUa/yIyg3bgxid72N4aYTguGjgtjOGDREgNh6a9ZKlYTUWWOK2w/EyXHvq7N3CAVWbKKI5tzEi9d1jnWAAME67uuvMjKht/S+K4cGoxAQxkwPVTszRESj+OHUgWE03Ee+tZnDfg2sH+gHDwJ2RbAF6brO1xyw+hcNn3QrvanHXJDzb2zkAACAASURBVFpQja+je7LI5Cw4xmqbBvnAwQfu4sBXLQOjjS+XqmjTjUWZ9H3TQQfWGe1HijeprdOx4oFX+ywWH02rEWg05kEsIRHEB/E+OO8rEUcMITiRECQ47374R37469/4hkmtzRKbJEmaWptgHTtGCsRkjbGptaeOH3/t1Zf7/a5JmC0nWSIIhthywjajpINoPvjBD7z2ygvznfWlzLB4Eafe+6IECfNiOp3NRtNyNK6uX3zzPT/w5H/5xB/08s7OcLa9Wwx3dnaGo+3prAqhKP3ObvlTH/vlv/3M357ud2a72zbtV2USIKFUKj/zTs+evm+4s5VlKZLJuh0vnlQQ0yzrJB0mEywbFpKq6C91vbBoioRBJfrGBgEBYGPrgFxjiU0Ta02NpxC11rwmfFTbuV8Pmpg0BSEEF81xceoBAChwPaVERV1VoQKIemDEQKyVmLW1kzfX15cG/fl4mhAUaD0xiye2glaCB1CTLb30/PMP3HdPUHLOqysF0FrOc2IjScKDwfLKoLcy6J4+c/JTn/zde+45tZQvdTrd4Wg3y5Ksk3e6GYJkeTKdjq5eu/aeJ9+vmJYe+r1+YtKofeZZnqX5a6+/ds+995o0CQDYVIbJssQkJmr7iA14RCbLGEOMgN1uHlE1y5LvfOsfnnnmK7/y7/59KAOIqobE0rGVpf/6iY/v7O7+T7/4S+s7u5TmZIw1RFoyCMeqFRGLI1uOiiCEwiiEYlANgqndYBRA4reEShit+2ISJsbRdLRybFlUFJUYiZEtASoz1flxa+TlNjJ0YY0AkZqvdfO4hxtSKAJ384owahsUj1+1PitxhUEY3Y0w5uON3jqGydTJ+GuaqGG04t0BUSBW4oAUkAQwxDjOGEDRrk5VJZqu42iLkhERFRWoLnFZ43wcqi1WLOiz9TBefL+480g0v+N2+MjD7R/YDpsoIiQ2rwPLnSMaWWRXGlRddL9tpcPRYL1I7uuC0fhAU7UrwVHSYvHN4rZ4ZLv4OPLZHjgAGhlw5N3uvWlDUqHWIBBARYNC9N5w3sdSQSE4QJnNJtG257z/sR//8Zdefmk8GXf7S2TZJJaYpKYxkMgqGTa2Kqozp05tba4jKBtiBmMsMqOJA5uArBc4e9d5Irz6+iunTxzLLXQy2+t2elnW7+ZZwt1eztYQ8/bWtf/9t/7w937vEzfffH062hkNhxubo+FwZ2c0urG9s765s7m5q+mpLFt67uvfYV+sX31z6mQy19IFwUqx8pU/f/c9Vy6/nueZKKWdpCimrKCCAsAJJDkbYoNczqb9fi9JcqQsSRKqC6hC9OfGqF8Zy8ZSY29nZmNsfBFxY0eRqErFk9qfLIioCBIoSFz2K0QNDiByGhDzEEhwTkVARYQJBVE56Yro1s52nibz0ZhVK7IeWKsSgJCtusq5ipP8zTde7Xdzmy8hkYaqrAIhdbqm0+HEJHmWWcv9fmeply/18Ytf/PRPPP2TVfAeQpJnK8eOJUmSpgkRLC8P3rjypjHJubvOE9o863Q7PcOGkSUEFSGAza2tc3fdBcxQK+CIRMbWrE+sIrqXURiZmRNrmcgaYlbDsLa29oXPfvrSpcu//Mu/NJ/OmVHFE4Z77733d//f38ry/Kc+9rEbmxtkDICQekNAUT1vDJENvy9MaggYlQmZIlUiCFGX33sxARsGBGvt9vZWt9shRmvY2FgTD41lNkQMbDC+jEVjsf3IBtkQG158UWSFaqmDSFgvIRENsSHiGDSx72+dj59QCSPPQ/XfxvLc7MG4XGBmY6KBGqOtgg0aQ8a0fp/aVDdpsVrr59Sm/1FtfeUJqY5pabYjAhBqHN0P63eE6SOR6/Zyot1z5JvDl7u9mLnNt4cx98g3h7fDoXptEpjDjbeeC3AI9A+cctS5R3RV9aCz8GKDb2XTQ7Wk62el0iZ/iMdFKjvvdJBoOps9/vgTo+Ho0qXX+/0lCdrJ8zbIIKiKKhCmWa4Ax0+cEJWd7W0mAhEQrd2xQZURDANqktr77j//j99+LuvmSWqWlwfHjq2sHV9dXR30+73jx493u/nKylKawEd+5heCn/7Jx38zy7P5fFjOx0AydzKdF5PhZGNn9N3vPv+uH3zf5SvXZ0E8ZdfXN0fj6Xg6Gc+ms7J0QSvvl/pL125cYTaAbC1U83HM3OlcqCoftTEFmJUuz/MkTW2SpGmapmnkrJuVdb215kGAWuuv1/sNw4xkRKNARq3FrrY/VOOOp6oqAMAIhgTrXPDRhUYBvK80aAgVE8+nk3vuOnPt6jUANIZHu5tsTWJIRJx3zlUcnR2D98EXRTkaDU+fPpMkDLEAVvBMtLTUS5LEGhskWJsoyPHjp159+SUJ4fzd9xhj5vOi31uKd0tECsKEb7zxxoULj83ns8jiJ52MDSdJwkyINBmPy7K8+9y54Hw7quvngzE0YsESEOtIN5sxxlqjIGfPnfvkH39iPJn83M//3Nb2NlsTCbelpe5v/+Z/fPzCo+98+zuKYmaM5SSpCXgiYmTTAiU33joUq1m0UqcxPMTjmutGhCe0zNPx+NSJEwhgiAwTIRoi20j1PYSvAd9wFGpETGSZ48s07yPoGybTOG8i1mkYsPa8jK44cT0Brbd+2+HYU2PIWDJ2b8gZaq3q7bqEOPbKEBteGKv7jE+x2A2ixqBg1dqWEqOREdEsKikAUXQo0e0S+xyJ4G8R5W8PT/CWWfJFkfAWr7KIeotwfBj0D5/SJJhaqCV066YW+3MroIejZMxt+twe8tZPOSCuWpe4xY/xrUbWIgQVCT7EqtyALF7Kyq2srt1//30vvfRir9dL06TTySP9LapeJM55ZAPMvV5/ZXX1jTffSLNMReokCD4EF4LXoEDGVF7e8e53P/etb0fDWZqlbLk36HXyNO/kg5VBlqcrK8vdbh5E3vX2hz/7t599+/vfYVh6ebLUsVnKwlYVgnfzUr95Fc6ePvHM3/296Qx2i3Bz/frW7ng8K8bT+e5oNp7MRE0QPxqPgwiTYUbvKoC6krgE9c6LKLEp5kWaZcjE1jAZJmNNYm2GQPE9s2UyRMaYxNiUTYNAxIimHovEQPXkosZ3BQC1zgpaT6X6wZMgt/H6e64QqOArJ74yAGVRnDt392h3ZzTcSdI0yzJfzYyNCKMiMp1MRAQJCLEqCgDd2NxYO37CGJtnWZIkCCri0zRZWuoN+n1j2DDleQ4gvV7vO9/59jve8cTO7rCsXJJk0SYBAEQk3o/Gu1evvHnhbRd8kCxPE5tkWRZ1eVUgohvXr1tjTpw4UT/O4J1zPoTQ5iluMhU3UFbz7YhoYjI7puMnjv/mb/z6ynL/Az/0I/NZERPPdfMst/b3f+d3fuonn+50ckAIGltAJqzBnrAhgpo/AKTKtWBFy2QIGYFBGZQRGIEYjWXR0Onmu8Od7lKHDSGBtcYYYkuG0UY9HSF6ScYT48e6kTrCFoC0Drhtw24ZyGDjS1qbjGNRh/hijvtrfil+pH2vWlolqU0SayyzpTgoF0QosUFqFkO0Z3FonnZcCgCiNjFpsaqlakx7h4CkiAoLaf73wagc1jfh1kB/+MjvdcOG3Dh8uSPfwFtYBBx5wGHghqNU6SM/Hondt8ngcbid2+88soXDBxzu8JFfHW7wgAyITj8QfCz46L333sdWffBsbZpmH/nIf/c3f/M33W633+93u90sSxFRI6ehqqrGmCRJjTErx9fWNzayLIPozei8r1wxL8uycl4C8HAyv/D4269fX5/M5mmaxZkRdbIkywbLA2Nt1u0mSTKbTf/pj39oOptcvfidtWOD0ydWTx5bPnGst7ra7XS7lhmc397e+uB733Xy9Ln/6+O/01055ijbWH99Y2u2sTPaHk8nM789nnf6p7aHw63dndF4FgR98LP5BBQQYp20JBahQiQFnVeVSZLI9URyn5CMsRHBLdskSQlJReMMrVGinssWAIMXFTCGmU10427W1Mh1aCY3awKoHUmYgFgVRKKRkCSEGJEnziU2Fe/Wr70pISz1lgwRiPchRE4gqIQQZrOpdw4AiFAkjEbjwWBAhL2lXpplWZZFArrb7QwGg8Gg3+12O1lGRFmWXn79NQF+17t/4Nq1651e1weN5sPKOSIMwX/ta38PIP2VpclsGtlXZkY0aZIaoszYy69f7KY2TdNFDWlvDi7k/62By1prKbEWQTtZZlDz1Jw9c/I//+f/573vfc/Js+eCRKcDyRKaDdef+dIX/4df+O/Hk7FNc6wfGFKdXbmm103tzR95EsImgCuuv5goKvJRObaEBjQ1DN510mS6Ozy2PLCEBsEgJHEpYCiq6hGRjWVjjbUmSWx8pcamxiZsEor8EFkkS2yJU7YWuWZmDBqDDUUD+14GrSVrMVqXDFNiTZKY5i8nlhM2xtTl+uoED4xMYBgZkZg42pABASTW/KhNvG00dAuBCgBKAAZbK0ZtTN+Lojq0KTbG4u8V6BePfysy48iW74jvh3ceCaxwFBw3px+QOrdEdjgkA2IjtwDigxf9nja4haQ5jOyLS5PFA45s4VAn414VFe+d95X3vtbbgyLycDx+/1NPvfTSy0tLS51OJ0mS6PtorW2WmnFeW1Fd6g8AoSorBHTeh6pylSvLqijKoqi8l9LJ8soakb38xhUi45w6L7Hg4HRaTGfFrCh8EFeFonSg8oM/8O6//NSfrKyu9jrp6mBw/NjxQTfvZSbrZFliDenzV9wv/9t/+/u//3sGIEnT9c1NBihKKCrZHc42t8eTie/2j21sj32A4XBmbTqaTKezWVl5VQBEa5MkzZmjwman0+nKyvJsPlMEmyZsjSJEz34gBMaohpnERjInTjcibFgOS8QqILF+eO1hQdIUZWtHrqo2hS5qu40AxFKvLVskKhIq5/3W5lZqTVWWvV53Z2uD2SAxamACEU3TtKoq711VlRJ8p9MZjafdbmc2nZGxWZ5Hj8wgEoIQUZZ1kiRJ0jTPc1BdWur/xZ9/6vEn3u68JzIhSBR+ibWqYIn7/aVvfP3rMeFg5VwdVcvRMErG2sQmN67fWFtbi7VI43yK0/uAroatVRjJEKVJQoRR+04SY0i+/o1v/viHfmxrezPa1q2hXqfz3Ne/sn7z5tP/7J/XpX2ZiGuaxTSojQTY+FDWmVOZaq4nGooJW+4GQNkwEUaxMJ1NkjSpT44ZdRq3H+aa26Gm5TZGrCb8Iq1e++ZDvS5hIkLLaCiWZMdmLUImUkzMiTEJs0E0hAbRIBoCE0/h+iwmZI7HU0KU1LYGbK/C2Fod4uIIOZ5G7Sxv6B4AaNLbEJGJhuomfs3gLdNWNF4K+7c7igE8Mt/AbalqPRTEdFjNx/3umLe49O06po2XzkJ/ot04Rvm37e85ArVZ1RYvungvt5CLuug+C3e6/f23cLSd40ALt2r28GEH9uwXDKCqIXiVABqrgEUjcJjNZg/cd//K8vJz//APK6sDY0wsf2gth0b4RMOSqCLT2traxddfRRXvXHC+LMqiKLxz8/lcVYlYTHb27L1f+buvEsBsPiZVVAmhcmUpwYOqMnkvQWR7a/NffOxfPfvsP6qEbp4jBA2SJCmyUlUumxzLqeTp0x9858pg6f/8+O+tAjz60AOf+os/Z9ublYAoIThBMxkPO93+znjCJhuNdh55ZHW4O6mcd8FP5vMsz4JqkqRBUQTRJjs7w7UTJzqdrgqIKJskSMmN12Zc3seMPeK8qKJqUEAANkabpx2Co1hQQQRJ4zwFkqjx1+b6No0HACAgMSILKAABCBr2IYhoqCpiC6AMUlbVyuqxrWuXjDUoiCqGUY2pRIwxIQRiBFXDtLuz89AD93qB+Wze63URpXKuKCsFSpIcSZIkVaAQBFIGBFdML168+LYnnqjmZZwLbCjhFKAsnWRAu8PhbD67++67r1692ul0AICRIrmsoqgaQphOpydPnrx8+ZJIYOLaaxZQm3oVqgog0WhiyDgMAMhMqhRzp+ad7Ktf/tI7H3/sve/7wCsvfCdPE5VgjTm2uvLHn/idX/m1//Whhx/euPEm1YmxKYQACpSwBsG2BhbVIWl7Q33PXRMBoySOtXBrA6kAaHCry4Pt7a00zYIEQFARanK/iSodgEHEmIBLUKKoboP5mFhVI6JKEERqpxsuKAGxO9FHh+rosegIgISNp2ac4yqidZEJBhIBVREmUcUYx6WkioqxUGtN3wFIlEuqQqqiQljzkPWTaGBBRc3+W4vuJLAI/fsADo9khg5uh1XjA+h5GDRvhemtbFh4lHu1Oo+68hEtHLmjkQfYttz8ldZSF09YcAO/XQ/3X2BviXDkWbfaFp/PkefepsEj5YGq1mJu4Zjmb80COe99LJgt4n1wZcUI73/fk5/61H/r9/tMYAxH374QRGOGSwAyJhLYJ0+cvLl+05WOUbyrivm0LIrZbDadTEWEjJ3Pp4+/4+Fv/+N3N9a3iDD4yhUz0KAqwQfVoCJeBJHKssq6a1maffWrX+v1+ohAil4inAKDMISVpSU33froz//8Pz73jScfGKBWS7l9/qtfPHFipfIgwSG53WFRFVXwcOXa+nzuy0ryzvLuzijN8qKqVHFWlN1uLohZ2hFFNNZ5f+XK1XvvPX/54mVjjXdeNTre7dnJ6h+aY5gNqXMiEkQBqY6wVAoAXM8xEKjXCgBS5wSGqGKK1HGaAFwnaoEmq5WoCrKCVM5bRnCVgi71B69++0ZukDgVcYyAWSdUAVQlBA2CpMRmd2crei5Wzk+nsywzgKEsKlFCmneAFDhJDBCoD967Xq//7LPPPf3009t11R9vk5SIszxXciGUg8HgtYuvPHD/A/P5qncuWJtYG4KPAc0KgQh2d3ezLFtdXV1fvxnNA3GwteOfCFWJSBQI2NQpl5UjMR3LQZ5cW/34b/2nf/8rv3r96hvBu8QaCZ4BTp85/ad/+ic/8qM/+ulP32ACEKUIlgCIKIBICLECM7RVt2oGaPFXw9r5EgCAuXGrNzydTk4cP56liWpIrQkSgLWtty6HZ5uC1HZUVFXSA5lggBAVIFqzG6jZ5764fzghNQtBQG0ZeaiZnAj6UQjEND6IqgyoghKLvGioS6lh5HO0FXgxjVEN+U0CuDZ3TnwkZpEMgVsA8T74PgoGDzAMcAiMWkR7izCqByT5/kaObOFwT27RvSPInEN/b6dZ31GYtf0/vGS5Tedvpe8fecptFgeLexaYnn0da78KPngvIuqDeidVVXnvXVm9//3v/9a3v82I1hhrrWEmQtV6vsbRxESIaBPLhke7uwahmM+KycS5ajKZTKdT731ZuSBFrz9QgRdffJFUg69CqHxZEtVORwgQYtLkoJubmz/xkQ9s7k6K2Ti1/TkES+hdIEVLwMTWK+d5f+2+Y/3kc5/7zonT5x588OGLL3+3k0HwQQIE8Ro8Mya902yTy5cuaajms9Hq6vFXX/mmSdI0y0ySimpVOREl9DbJRDUWZtlY3zp18vT169eTJAleiElFYNGFGhENxfBdZkaMWR8CAKgKs5HalhqTgBIRAyjGYBOtlT4AaMUyxnCdJqOyKEjQoIFFgmhiWETYWLaJK2b50hIbC6oEimSMoRjI55xnNCajYjZJ07zTXfJuPp3NbbJEZBVcWVZBZmxShRCksknGhsvKCYiovPnGlYcefPD57z7vXSirChHQmiRJgqgxNjH2jctvPPTQQ5dev4yIrqy09jMXRDKGgPjatWv33Xfvzs528F5VMfoSSQNGB0YjRN9XVhQmlRqdIUvty6+++t73feDzn/1rThNrrEiwxk5nk+tXr733B9/zpS9+fqnXgxiNFKNeEZAoyhOsGe36LzSVbuPVaQ8isRUAxpiiKFTCUq87mUxAhWOxxoieMViXGtjRVm1XVaU4v7V291qYfRilu8gRQNH+bd5EwR+7WrM0UMtOAUQiFBESVVCMcTsRWIhVYg5gJCCkGIBWl59TlD3Ij3OW2llfr34QCUBNHMGL2KK6oDHWvMoekL1FlfbII2+D4LcSDLfajgTfO/atHX/t30W+/oDoWlyyHWjkSFm1uDpZvK940UMPRODghoc+3vlpHykqDgiAw51XVRHx3ru6XIk456qqkqDB+WPH1wjplReeP3HyOFNMHQUheFVAJoBogDKIWFXVubvvunnjJkiYF/NiNp2ORzs725PR2AVfFNW0rHZ2h7/4i7/0x3/0X0LlZtMJkTKhrwpViZ55aZqwNd57TzKZXHvibU/8xn/8dbZcFDPnIbMWgrISGLLGdlKzM54/9sS7QghufAMwPP6OJ/7qzz517uy5ndEsSBVCBeh9AcdXe0meP/fM1+5+9ORofdhfXt5cv4kG827Ppmm3k3pXsrGqWJbOJIloSGyys72jCr1BfzadkmEAgEZ3izH4SIS1BojMliguo4Ao0g8oJKoqNeIEFQMgGurSUUQU8xVAk0gUAFUxBBURRIiV1lWVyIvFMngK5dqpR0bjSQjesEG088nUlWWaJqwuoEHwIYh6T1UVglbOL/V6G+szFRmPJ3mnS2yL0hXlHCnJO5CkqQlq2BBZAgGll154/rFHH4Vo/A9URWd/tkhsbCregWpVlqurK5PhGIzxdciINMowp2n65ptv3HfffRdfe61OLd5E7agqACOCqkJQ2lsfAyExU1x6gi9Xl5f//itf+dmf//nBsRNuPlEMTCji+r3Os1/7ykc+9rOPPHrh6puX0yRRCaCCIKrAjKoQlGNhYaQYwYEA2uR6i5BXO2EBIDeYh6hZZqfTcb/fL0u+41ofGqzXuP7Zxye3YkBDTSmzqkCbMhRqZbye6nuXaXmVVsUAjP47AKAghBJJ2pjCp7kEIkYRE72HkUA0IBGiYpOcPJ4RUDiKh6YjcXULAKZ5ENgkLTiiFjMcgsUDWHMbuD/yId7q2wPt31HlP/TVwTbfirg6csmyb08cvAvJp26j+zcn7j2Ww+gfpf1bu6Mjdrat1fub7i0ef5s1RIv+3vsQvJdQuqosSxe8c+7mzRs//dGPfvnLX+z2ehI8Y4btqNRoFEE2JgRvjBksD8qynIyG6r0ri+l4vLu7MxmNR6PRZD6vnF/f2Pzpf/Vvvvnct3c2bnSXBsxgDXvvrGXm6GzNxphYfaOalx96+l9vbG5MZ+O146s+zEFAAqCiFwUhdNDLzXw2+cH3vv+P//D30swcH5xQBFfOl/tLVbDzMJrPCyIcTrXXSb33YwAXSiBwzg1H2zbtlGXJxiIZm2C0hzEZ732SJt77PM+Gu7vLyyvW2tls1ul06vot+55gXEgzNpULERGRVSWm2IrlGUQBGz2fkAVDHb0ay45IM6Ik+mHVWpeqBAk1CYkgohDcqTPn1jc2syyXEMgmxtroWd6cgjE2zVUuhDAaTfK8WxRFYu10NgNgQJKgpfNhOBYlJBOCluSzLAVEm/B4NB6NRufP37uxedMkRsoKmUnQiwzStAiOiYa7u2dOnd28uRHtvW3YvIIY5jioxuPxubPnLl+6HA2zgBJ7FucxG0ZSqQehIHAAQLQADkCNIadgDH/rW9/5p08//Ue//zv9bm4wKIOqLC8PPv/5L3z4wx++fvVqxOhYjKzJbIyxxErjdl+r1kQHmPca3VstGxGBTPAeAJIkCd7XlnkFrVcmuLB8xuYrUBHc48P3qrrULJ+2ptgF5gdqO3gzYBAhGkq0ZaviRnVMiUS6goCEIt2kjWkXUEQopnjWEDQiN5OJFBDUFSVVY4whKAAEVaZ9TwMATKPv1+B2GH4Q99i0A1i5iCxvfXFwq+3AT3XkfrgtprfCtf1lvtceHaWqg4ocPGixV0ene9NFUbq4LLhV/498qrc4sD2gzvd34Kz2ZzryR9E651sI3ruo+Vdl/LC5ufX+pz5w8eLF0WjYzTOmFBG4Du83ABRAVdF7b9MshHBsbe3VV18jwGI+H+3uDnd21tfXp5PJ1ta2C0EAH3rsiV4n/9xz/3Dq9GlXVZj2JFT9Xu5clVgTY26RKIAGkaqqfviHnvrt3/742vHjbNR7BUBFJWRA8qoMNJ/NL7zj/TvD0cVXnj1x4uy5Bx4aTmaZBUTb7aEwzDMo3cy7+dlTJ3Z2dwxACD7rwXxelG7GSbK1vYVEVeWWenmslooISZN+BxHzPB+PR3meM3NZVtaaA78L2+jtI5GyZ2ZAaDJoA2gdixR8UBBjDICo93UAJlFkdEUEFJBURH3wIUijVkpZVszkWMAJWPIBO3n66qU3emyBjI0VTkQEYs1ECSKAKKIQpKqq0Wi0uro6L0oi8qKzeWFtGgREdDqbBQEfxCQJW1NWVbfXifrxzZvr999//9VrV/Jex4dgiMqqqkpHx2g6GmVZZ3d7e2Wwury8vLW11UTDxalKQULwwVp78+bNBx94cPXYsZ3dbYoQrCKIIXhVMkgipi65AqLCRD6EQAgqyCritdfrvvD8t5aWuiurKyxBfLDGKBASzZy/evXqk+978u+//OVuN5fgjWndapWAdJEAxzrkohEG9RdMDKBEDM0UZiYECN7nWTYej43huoBD1LWxiaFtMCUSIoJ12YHYvmo73xFAJdYEq6VIzSJpHXdLjYIY5WJTZyIuNCPU1pUGON6cqjYOBEEjDxQBWWKhCaWa/IEG6+tKOgBar0zrZDGxPKVgbeMGRDQC1CJraw1YhLQWQNoJQE1xjEWsuS1m7W0Lq8J92634lsPbbRYWqgcSM9zqsPrvYiKHvVPk8DqiuWi7JFh4SogLYrNthAAgLD7IAz1qdPc9qXDovnTvevtO3AvRUDWL8uAA3CMiqOD+PT54ABDvxTvxTp0TPw/V3LtyPp/0+t3B8uDvvvrlwVIXDSGCsWjSFA2DsQqsPngRIpSiPHXy1PqNm95VYV6Md3Y3Nza2t7c2tofj8ayqgCjb2Vj/lz/zztdffvFEv9NJCbJcRRBSBMlSkzAnNhGEIAG9U0oefOhtMCOcUQAAIABJREFUm5s3i3LY664G70CCd4RpgkghBIXZXHF7aD70xDs/97m/XDt5alK4te7S+saWyXPCYlWnCZsRQuW7o435YxcuXL70+qAPrJja0yhJMR8Plld3d7a7eXc02hWvSWJVlQnAUMyYFaextbYsS1UlwhDCoh04GmmjM4qAqKqwQIy6FISg1MxwRBUERPBSEx9AJEiiGhSdigS1xATKAGCMSqgqFwveWkRkm3pX+tR275b5JIyvzbMsiM0V1AdPiCBewrQsvUgQEkUK6p1evvTmO97+xKyoTJJ7D5VzWRYjt4EQp9OxTRKLYEGQs9Hu7rHVNQJz/dr6iVPnwKTzsjIGXBUMJVKUWrrZZIwAaPill1545zvftbGxyRwdWY2qgkqAQKTeuzRNX7148aEHH5oXc1dVcVFkCT0ZT+iDgKggMANTEkIIToQgEIQAqmBJQihPDpJXvvWMMQkxc9JSFNpJw0vPf/1DH/qJ3lLfOW+MBaiMAZWAwAigioIL2r1q/AVhz+4CaLR2nI9SATD6u1fV/NSpU5PJEKJVPirs0FqzAXSPG1cUjln/GqRp/8ZxIlLP/VC7EWqTAg4XfExgkRBqRBQ1lAESx4RRNToFqT8GkaDCxIzS1JYnbdKKBFUE0ZhYisjTnlcyNBmRsKbCBGMQ4/fGvh8F+odB+cCewyTGkS3fHv1vIzYO7z+S0lmUW29Z477dMbfpcHxItz+sxf1FmfpW+tC8vyXVs/e+vXITLdDo/yGIePHO+aryUXO8cOFtF19/PYafGOY0zRKbLESXq6hoEFUQ1TRN3rz6hiuqye7ucGd3a2t7a3trdzh0lTcmCaF66OGHl/pLr73ywsmTxzs5E7F4h6AKAhIMcZZlSDqdz60hNw3nz9/15pXrS0uDhhMHCcFVDowBAAUKXk6euWc0HF6/cTXrZdPZpNNJx+PdbrfrqgBpp8t2Xk2BcDSGs2fOfvHzn8m6KCFk/cwHXxRTRJrPy9XV5d2drRACRs9oY5RYVWtAqDV1WoyxaH/HNk0xABDE+YbERkWCBCRqH3I0NyIqE4tggACgIupVfPCyx/lAdBoOotEYIyLMHCRUPlTl5KGz58azCgDFaxmqvMPOiXNCTKJYOV9WXgGJrIgPolvbm73BwFWVd6EoizgYjDEh1Nlj57OZTayKioghnBcFIDPTjRs3T5448eprLy6v9JPUKmin09nZ3prOpgKYpKkqbGxsrK0d293dFZEkSb33AEiKQig+RJDb2Fg/dfrU1TevqAoSQUwRAaBgKabaFtLgAYAgEQnMHIS9i7CnKMxsoGaeay4HAEXZILz4wgtPPvnkF77wBcMpkyoERYjJfkSxzrDfkOywyPy0NtgGdFtGwVobp8Sg35/Np8RNPHc9bfflH4vsr6o2/uILFfr2tvpUItKmD0SkTTA/1g6psVcaGZ14ZqNtgKo0KxpGhJitRVVJiBUFgNEqiCqK1LWM40hSCCKCSMAAANFOgIgK0mT3rBsmon1uoN/HdiS03VGLP/Lg74NB0kO8/JEHHGau2lm9T/d/a1eE7/EGjzxrsVdthoDFTuIhs0Hzd9/KoLatHdoicbewVtDowiANPImIc76qKue8c248HidJurKy/MwzXzp+bCWqTnXiFVGUmnQQH7zzZVXdf999165dL6bz3d3d8XB088bN9fXN+WyiqkmaWpPMZtOPfuynPv1Xf3burnODfje1SETelQhAqCAhhrzH2y/KwjrTX+p//etfy7MuqmACvnKlK+fzSowSMyV2PBu++933b4+mhiFhtkiGaTbZTRI2lDs2WsDq6urueLq6DKvHVq5deuHYqZPVfG5s4p1DBsO8s7115tRxH2J6/5AkCRMrURtNBw2zH/vWzNi93641FWrNEAAjCQCwxqT/cWpFvBXV4IUIATEuUEXUifqYGloBAWLpMB9CWbnKeWIUgBBUQrhx7crTT//zz3zu0zZlYCymxbGT+bSUSoCFvYTp3JdlBYBsgBgqHy5fvjQvqsl0lua9yjkRBaRUI0mFxthIAaox6oPNEucrY7P+oP/sc8/+kx/94a/+/ZfTLCnLMjXJubN3Xb1yVRXmszkRZVn+5pU3Lzz2tps3b3Y6naIsrWHvAyJArNQLkiR2NBp2OvnK6upwuINgJHiDAN4DgBIBB5EggYMEDRy9mEQCU6TCQEO0LeO++QKoZKyxr7/28n33PXDXXWd3tzeJOFpdYuZmUuVDObqhFgRQq/wE0BZfA0VAETWMXnQ6HmVZVhRKsOebGedXPJqBFEChrjSPtXN9S/cvTt6FpUPLIUXSpmG4a2GlSgulK6OGEU81zPW1Y4EwZooOqqASRAGCSvQMEtaoqgAhqCiyMIKCEhCxqgqoaIh+CdEXFhub+PcvAG4D3HqI0DiMmEdi4vfdmSMbv1X3bn8KHgwP3re18uaOi5U73vLiAUeukA5IhQPHqCpibVG7ze3s2wmqoCFWfXHOe+ecL6qqrMr5fP7EE088++yzaZo659LExnK1QYIRCcGT5yDknS/LMoRgrb1+7ar3fjIab21t7ezszGZzBbVJaowBkac++IHxeGit6S91ur0sIUAkzwCoCVF0A3TOVZUjIleWJ0+eCxJcVWZJiqghkISgAKronMcATLg7nt19/u6/+NSfp2kKEvI08VVBIKjBWEvIIlKFeZomD154ZDwelSV0ux1DCBqAgE2ChFU5J2OzvBOChBBqCyLWSXliTbT2J15UGo5UMuL01HrRHh2E6l8QKUYHGAnifU0GxAIBIZaYaQp8iGgIoap8VTkJaoxBIAUYDnefePcHr1y7ORyNV9dWysoPx1ObpLvDsXNqjIYAIUDlgg8KGNh4RB6PdsqqKsqycsEFAQURjdkoYiJrAvDOp2nKSEVZZXmmArP5dLi7naZpnue7O8NuLw+lK8oCAfI8j05K0+ksTdPRaDQYDLa3tztZXnhPVI82wywiqDHX5vbZ02dms0kIgdFK8ERRi68fLBOxUCASXyFiCABokDQS20QY0xuIaut4HoXoysryKy+9cO+9929t3GBmRlZVquOqCZlhb+phU7MZmnLuMYVfQ2UjIqI11ntvDaMGVxXGcPOr1ouAGO8GAEAaCf7IliPuYXtsT2shUBctxr38PFozsnVMHADE+lzxOnUJs9oJh3CvTlwTPg3NIjwSOtH3LFb3gmgcjsmvQYMCqBg2ABIkPm8gBAVTWxJAY27yOGi/HwFwAPoPI92tsO82x3wfp0Cjv78VuGxPP6xowy0WCre6QWy29pi38gRutaddgtxqEXOrbfF2DnT48Md20RNC8MFX3jnnyrIsiqIsy+ls1u0vAcJrr726tnosKkqR/pYgGsu2BhChqqzm8+L8PeevXLkyHo3F+63NzZ2dYeEcWZMkqbEmtZaJH33kwRe++/z5u88mKaeJNaCImNhMJcRKge3SJy5HHnro/pdfeXV5MFDxxrD3gRCNMRJCzDdZVWFl7Wy3P3jxO1++597zacaWGcWLONRASsQ2zzFAMSur8/fc9/rrry0dS4gxy9LZZAgKzEZVk8RubW6eOnni2tWrWd4JAcjE4E+KIFX75MfBII2NvcmyWw+VIzNlKUQORFSCBEICgqCqSD64KCNCEOeC80FViCTW6VYF51xRFPOiREQL5H0sMIanT5/5q7/8y5W1wWxeIvLm9naSdi5evjorPZlEhZET0WpWzIOIMd7a5PrV692lQTGfEDMEUohZu9UmSQwbBgXvvYoyETJPp/O1tcH6+k0iePGll5588n1/+qefXF5ePnPq1HB3dzab9pd6nlRU8iybF+WlS5cuXLiwtbUVrSPx3iNOxVRuhlhUdkfD1WNr21ubQYSRKUQHWVLxJCQSYgJLieHTRBACISCieB87CQBc62OIhMGLYYMqw+G293edPHlia2vDGgJICGu/2sj3RxStvVoaY1v8aBpKJs6feGjM6x8rXBqOrLly607CNcXP1PAs2k6u/YpgXV0VmBowQYQ2HAHrLmKzs53IcVwt2kej4NlHYBEBQlP7BiSIAdXGrtswA2KRnHhERTREPobaSbxmvDpRTDmCCIDyPQiAwwj11nX226vDdwC+hs4AWIxp3ddy3H+4e/uuAm2FhEVtbj/4IsYkxod71U7+O6r/i1rB4llHior6vhrZc+Ah314qxK9a9hAPsUYHFhl1rG/w3lVVVVRVUZZFWVTFvHzkkUdffPHl5cEyIjAzAlhbl4NX0chAVs55HwxRliSvvfKqK8vpdDocDkXEWmsMZXmSJNYQnjl9RkSINElsntk0tShKqEQYBQC2ZDrgvKg6vX5/qTPcXu8vdQAlBsATirUUvATng/fT4e6H/9lHv/SlZ/KltcpV1qYSYsx9pRiMsSpsmNI0S5NieXk1Gju895bYVfMgghj1L1xfv3n+/A9eu3ZNESIctoW6CEkbCqh9dCIxcSIraOw2AEgtCfYGZJxihggUgob4o0f1v/IhKqqV81WQygsgqA8GiIh88M75eVmVlU8SGxRCUO8Dm/xLX/qSF3XOl84bk2xtX8/y3tb2TpBgK2+AVTAohCBlWXojCnhxG6KviwLkee69j+kZYrHGoph7HzDKAFVC413IOvmNGze9d5//28/92q/9L93u0nBnuNIfiA8AWpalMUme5yow6PfnRTmdTk+eOrWzuaUqRKadEq0pi4kn40m3203SrCgKAEBCUhQJQIYwEGI0Qvo6aNYDBSEf9WGkJrSuDbIBsAl7H5gpI3v58muPPfbY+s0baWIBBIFVAzUxrhFhkQgACEQBCVp3oCgjCBGioq0SDMduSxRnDUdS/6QNbtcuoEgxB8PeAGmX9NBQNgqtl0eceqCgRKRI0Jy5GC/dgAMh7im1kZ+PaWW1hVDEJiqxDj2PtabjePWhAsIEWUERlcAQSGhGs2JchKJiYzOujcBvBYX38x53BKkDSHf7A26j/u/ddrvz0Im6yHbv/2rfVeJya79H5sKviO1xt+ph+wQWG1mE+L3+7+/JEZ05dPqiDIA7bU2/j7ATLL5vW45bTPcWnKsdQJ0rq3Jezo21SWK3t7dSa4xhG5N0RrOVAMRBK76qfBDNO93ZdDYaDquqmkwmqioakiQxhru91FoWH+69957ZbLq8POh1kzyzTKBBQSVaBLHh1hFRFInt2XMnt7Y2kcBYZEqYUUWNpeBDVTrvqqpwyTw5f/f5T/7+76ZZ4rwrS/JOJIgLpYQqhyVURYPOVSG4wWCwsb7RWzrmXAVkKKYIUEVVy0ZVR+PxqVOnRqNRmqbtT8C1u5HGWntxOR9rpomogtQ/U03SNiWE6nmPe5acWs9DRBLRECT4KDnAOe8q77wnBARigqAagpSlL4oqBEEkUBLxVVVNxiNXQd7tTSZTRW8TP58UWZbv7mz3+nkIwcTaV1hbU0QUAQ3AvJguDY4H7zudjrVWnEfEEEKapmmaqRZBtSgqxOnSYIVNUhbl1vYWspnP5y+88NLjb3vnZ/76L8qy3NzYOHP6ZAie2QQfrE1EJM2yGzdu3HXXXdsbm0SkKsZYgDqHPDTPgIl2d3dXVlZns+t19ogAzBADZAVRRYnYGFBSDCjiUUFZFBACKPo63Bjq3wGBLNcZ7ufTcVnMT58+tbu7E6ORVYHZxJx38XfgOokPNi9ARNR2PjYB3rG4RfTyEVCChrlvFwmLE68mT1RbiF84QFEx6mTtGmFvWRIXJTGJT3smNmuR2tod1xBxlyA0mUiglWxxyCEAQE3oKQaJdecByaqKIqmKxsVnUwtAYtX42JnGTwraQLC9O2kQbvGuD0PSgT13xKzbQPzhbVFtv+NVDrdzQDgd+LY16x0+q9E1Dvrs3x6+j7jKQm7ug5LsqG1RjB152K3WAUfvXFjoxH8xnkSCaBBXlWVROOdc5YqiKMtqY2P78ccvvPraa8GHUsRyLhJUSYRDCN4HCKUiALL3YTyZnjlz9sqVK9PZrCyL+XweQiDExJpuL8sy2+128ixbXh5cunix11vKUrRJrFUqqkKoIigSgggCBqXKlQq0dvzUG29e7Ha71piYfVFCAERN1BjnK7bGPnTi3GwyZAqD/qAqhiLgglYBnHOKvoSqk3WDD0xJ8D5NkrKq+v2BSOFLpyrGsA+FcxVIlne6N2/cfOihh6bTqYhE4gJUiVUJQClIUAnxsUGT3UWCICJSsz7APTGvoChAzFyrZnseVyFIVXnnPQCWzrkqTGdF8N4mDCiAQRWqqnKV815i4RrnfFWVk+nEVapgy6KonAL6xIfB8jIRFLPRYCWPweSpNYnhkNiqskTEhF0AV5QrywMvPgTX7y1FE27lKutsLM5SVs75EERmc3fi9Kn5vHDOzSczY+w3v/HcU089VVYiAYfDYSdPT60dR0AJQTjElPUxirC3tOSqMoIFNfUPahRDtESVC96H5eWV8WRERD6IYRPAgwIzAqGqqJKIBEJUESRyKOSZIAQQ2VOkRQSAmNn7CokgTy5dvnThsbc9++xzieGgnghBAzMzAkZTKNbQifXMir8Yt/rY3uxDBVEEAIImum5hzmrNL9ThXqBhf66zGqCj5t4EBixMQIgcvALEdU1TxLhFj703sMhZRUnWpK9AqAOF63MBgZvUAzFkhgB8NFBrXPgAIoEGRRGQ2natMUtECxfmAOLgIUiCBXDcp+S+NV31wHbHdQbcOlbgjq21nbz9uYtafHvw3pv9aZsOn3j4zWHZoIdOueN2+yMPSxSAekUDzfXq/kPtQLfXnLSxXzH6N3jnyqpwVVmWhfO+319+5gtfTBJrkkSCF2KtEw+K9z7WHRbwzqlzrtvtvnnlSumqyXQKoF6CZc5SM1jqdTpZmqVra2vFfJ7nea/XRfWGEaGO6pLoB+mDgjKxDyEIOBcQcbQ7JuTgFQ0AKCIjCCJZgwYZZH7f/fdtrF+//8EHgptNsKrK0OuvTKcFACTWqhcGJsNenasqZiyKioiNSVBwJs7aRBR9CEgo0RVWQrfbQ4Bo0/7/OHuzXlm27UxoNHPOiMjMtdZuT399buPbHLeUbFMgo5JspKL+AQ/wzB+AP8EDEiqJH4AQPJZVRpSgwDJgucqqB8r3XmPj5nanu+fsbnWZGRGzGYOHMSMyMtfa+x5X6GifXJnRzJgR8xv9NwixSNG5m7OoEf7ozAhbA7coc+LHpDKJCEh9jQQEUS0CV4xcL2URENFxHFPOqRSsvZSxFDWTLKacixg1RcolphxTUmACFBVSYYKUhnff+/rt7Y2KkBRGR6zOk/cs4kvbCEAI/tE5jGN/dnZ2e3tTUhIpXduMY7SGASIECLmUse9DSkTBv7oa45BLAYBhiD/84Q9+//f+46998PWb69uUchzHfb9vu7Wo5pSJHBJ656+vr58+efLjH/94ve6IeE4Vnt5MAIAmNJeXl++88+52e0uEPnhQcKgqZHQJWNlRERBQnL0fprCqKqoC1oR6ZAIlBKDaQcXtd7thGB49ery7vXHExICic8WGVASFGeon1GeYde5Z1UOFqrsD4pH5TlCNAzAMRwBFtoFPhBZQTwGz9SN4cF7Vq9MBFnCuCV74Bk5RaMrirObDtPPykh6c+f+9Y9M4ajs3a0iqRiuVwOrsVQFEpigEVDVRj1xAr1E/5wd66vpYAtNX2b6izPiKmv7Jnktx9VUG89qRvO7r1/is5qd49NNXOMn8zcmz/+pbVRsU4CQZ9Dj2KyIll5JzSankVHIehmEchmEYXr548Q9+67d/8MMfFlUituERojMeY6Npk4KEMZUhlrfffvuTTz99dXWZcxaVnFLTto3ns/Vqs1l1q7YJ/tGDh1dXV6t2JUWCd85RzhnIOg/IGFOOAwCkXGKUnAU49GO6vd0zoUjx3rPDpmksDYnIETjR8eH52d/8+CdvPX7U90RUri53j5++8+WLV8DMDlVYi7ouOPXrzUXJeRhHZi8SyXEcRmZumnPHbhxHFxoA2N3u3nnn3R/9+Eeb9RoRVYQclzLRMCzk5vx0REQRik7t1WaPs1YFb1qoYIvd+iuIQs4SLTMnxiLCzAhkCdsxJpMQ9iqJqFE0OecAjDEeicV7TGn49V//9b/9q/+3a1sicA68Q21cahsyKx+hbdpHj1dxHM7PzoZ+37V+7Per1ToEn3MZxj6AAhA7J8Nws71V8a8ub7pVEJU+RiJ/c7378+//4J/8J//kv/un/83Td57EMceYhn7wbYOIOWcDr/1+j0/w4cOHw7An52uaiipWPn1OOYOqd2G36x89fHx99YqZSs6IDqmoCCJXJ7wYxHhkUBUsAGoxjMNLTQAIJJKtoy4rdavm+fMX77/33g9/+GJz1kJRF1hkhkokFeID0VldesQIMFXvT4ws9YEDKchi8Uw+JDRn+4yTqkdAPF/gYBRU9k7zBJk6WylIAeZ3xBxME3osJGglD4JDSitiTQSyCuXZ9DBaXrsDEQlERUWkKECp+UVMqohYRBCUkE1J1HrveMcFZB+qLlm5TBC/Kna/eVti3JIfdd7+vgh4cnKoMobm8NHrTnhQqCesPlg5M/kfTLlgsPjfgt5n3uFEGsAkXpfgfjQJAHDHqFrOcLU15/0XEmJ6OHU45rB607ypapHa86sUy/0fhj7H2O92PjShCX/zN399tl7lksVRkSKFjCliHEW1FtkT8W63+7Vfe+eP/viPVXUYhrZt1psNE3ctb85Wq65tQlh1K+8JALx3WHOWlQhjKSmlOOz7ftuPvRiDQYEXr67e/+DDFy8vh/1YpJSSVIt3PjQhNF5FENETI7IjvHzx7MH5imglUMaIm7MHn3z2qW86hD54j0qI1DRd06xUZByGEEJKo+QoAjmXaitpXcyvrl49feftR48exXGEqjMJEReZ2f1M6de55tOemPlezQ9sngVLOSEirBShMJnmlWe7lBJjylnGMZERo7EWUIvF5CSAQORM6CJa079AFBx3zEws3sPtTf+Nb3zzD//5H65WqyYEgyJ23K4aUVERcp6da7szBlh3K8/sPItoypHJpRSNrNQ039V6Xba7VHAch5vby27V+bZJMTeh+2//63/6R//n/9q0q+D9atVZiy67HCGaXYiIn3/++fvvv/eTn1xC7QRMdYlQzWkhYgUc47jqzp1zgEAepRREp6XO38JaFmBmZQSBmuGiIqhQENGS2hFZyCr1Nfiw3d469+FmvQZNzJYMyUanqVKQ+JSEGdD6pk/tOuWw7ggAQEQI2F6S2SOuBuZTvK36923hTivWhFUVAVrZ5rQSQdj1aV7vEymd2T4wG0/zyarzSmgOJdlqF1VAIKwyAAGA2FIWcHJuExDQFCCEOmoGglIMU9ScWZPjwKHljy6spGrpHKAI4BjC3iAMfqGQ+Ip67l3VHhGnuM3RJZaeOKzuvsMVjsdzOPxwC6pwnJ9T52Z6/qeDqReaWj3YVY6E2Zw/DDCldt29u0UXnipB6stomsjEBnIM/TZyOhiCMxe52QEKloh9kIUKUxxTSkklRuP+KUX6Md1s+1/68OvPPvuSlOJocaSUBVKBJMXHRARI6H0gRNW82Zztdtvbq2vy3Di3atvVqmHHnl3Tdk27apqwPj9PRQCL0eIXK04tZdj3fd8Pw77vh+3uNqYsCgD0yc+ffet7v/njTz57fn1ZiuQkIuK967rgAxUp7LwIv/3Oh4nOr188W3dfA/QhrDdn6JvYNDGO6t05IxNpkcgheN8xAUHJJXvqyHsOqAgxNbFgg6RIquqYP/7pT7730Ud/97d/Z/oYkislO9IkBaEgZIQMKlpJx6x3I5gv1USDKWY6vVHEZEz/CFpEzXpQKTmnnHNOo0IBZWIUFajamgArofFOi0hxjI1zkuNqxc4pOyT2xNj6h6i3eXjx5MkTB9GhI00OmbxzXbOHolDaNlyceyJBVELX+JBTyaKliCqkLEgKRCLqfbPuYCs9YgCQYUii3K7WTWjPHrovvvz8t377t168eOabNbsV++C8n1aF1ULR1dXVu+++6/1KUmL0RYStZtv6hpm2JIqqKcX15my33xI5USVCAUUoIKAqhESEWTISATogVRDFUoAQhYCmwFbNjKnZLACAcP3qxbvvvvfZ5x/7NkhJNYcfAZlnbcyQ31abrUaq9Dc8eX3hCBPA8gVs9WmVa4AWVmUEKKpkoTVz1k+MblPaACrVpLJJ8ADW4jHLfAOFykdkwsBA/RhqyE8Wg2knilx9VojWDUYykYqgkBY99OMkZEAgVQEsYB43IVRSMorWIsX4o1TlEAO4H7txIaEX+HXvIfeeYQaje3e768TXhWp8oj7XqrjFzrgonLOCaQC9y7Q8XcVeiDuEOfeJpbv3ciwGjqTO3X3sgcNrTk73ScDqzzmcqgqbuzbE/NdJNjoiMtLyS4HJBSQpl5ykjOM4juN2u+uH8fL66t978viP/vc/EtEYzUcsKaUcXMnMhM6RZxZR5x0qPX368OXLl23XKIB3ruvarmvbtgkhrNcraxfVdd1+vyN2CiWmrDnnkvKYhn2/3d3u9/th6G+31plLCP2zZ8/b1eqv//r/k7yPY04xawEfXNcFYgVUUb68vPnV3/wPfv7s5RfPvmg3F23XWKNt72nVNTl2JYsAeAeA6pC6piFUIs0xEgK7EJpNyrnrzmKMpfOH56j68sWL999///PPP2vbLudIxFpSBRGcJk/EqB0AwHJXSi29VAsEmEJARFYNa9hkVKsiKqLFfHA5q1bSrpJF5wqy6q4m49jxjMzsm9XZ2YaJ0NUmhO++98Hu5tXZpluvWyK250yIwTkQUQlFi3d8dnbWNK5tQ9c1wXtChpxFwFnsN6Wm84hYcmnbFomGIXrvx5TimNI4EuBHH330r/71n/z7v/07/+Jf/C/vvPMOaHHOGXZZQ3N7M5umuby8fPLk8Zdffh5jbJom5+ycA1TCaW0wgmqMcXN2FtOYY2qaJqfI7GpS2UD/AAAgAElEQVQdrhTzuxCQgiqTKimTigMGATXKu3qrk5UBgAiE4L784otf+83f/PLZz0XFsSOB2bt+tKYmAojJk7JEsIO+e9CAAalqZjrlWAKCdX+pvgFcLExZZPaogpSq6c+v2Tx2m5eZsIgQhbGq+gsHIgAYjxtMGuexlWBBYKO0E9SaxGQXUlULBACgItf0I1QtBJNngitfNC/poI9nYYFZvxAc591ORvmGnafbqDO3VORfd+DUfOMYTOnQunI6hJYy4Eiw33dmPLEAXp+NY99XHkc8flond70wCxbK+2RcTa6k5RVPTqLzP3cGcLL/wpoBnCtQJmPCOCOl1OrflFJMSVRubm6ePH06DMOzZ882m42qMiORtxVXSlFLw2BGJu9DSvnBg/Mf/egnTdOknEMIm816tWrX63XTNCGEEEKMYxOaly9fSskqOceU02glZ9vb/Xa73e1uh2Hc971ISUVE+hh778JPf/J3jx4+HPsxpSJFnedh9IgiUBTc//Nnf/tf/ldf/5/+x/9hO+QXry67rm0bD6A55uD9arUahiHnIlpYUVVD8M4771hVYsoeIJcyjuPZgwfj7Zfmv2FmImya5vb29uLiwcOHD29ubkIIJSdAJCRBmR7NYiEACAgIau2HNT8JmR2BKIgMoFDKFHqXwweoGX1G/yAms4lJVYCUCBnVMTvvzrrGM/umcY4VUFTeffvtMebHTx5771BRqkYJTKzgAJsxDU0bmrHx3nnnnGMiDN5J9UGzgnmlStM0qmpMGIiYc2HvvPcAmHNsm/UXX3wxjsO7777jvXcuWCUBETrvEYDZ5ZyJ+Pr6+unTp8MwbDYbEWs9OAvFwwopqqXktm17UURgIzkARVEAEgUFIULR2o5XiYEVtKjSzLQoQDRTdAJYabCCDkN/cX6xH2+1JOtUvVx9dWnghKTHAgAmLU1nj/FkkWO15ElrOx+Y/K8KJCdr0GLFNVAsJmeOWu0uiannYJsNxxqcVUohwLklZH1RDlrmEcuFncnODFqJJIyp0JxIqmq2mFY2crFM5aJiziKLRTmojNWzYIFa+HDAp7utS167vUGjv3+bsfk1xQSvMx3unmhx9YMQOjzX+y97GOEvlEDzn3yo575nYCeflyc39eDk0nA8S4thwOTduWdsJwceLnonSq+qRXIuNfHf+J93u/12t/29f/j7f/DP/qAJoZSsqoicUpSCIrlkdEQhMDADoIgYScBut23btiPabDbnF5u2bZomWLdI0zFTzimmnGLJMY7D0O+GYdjv+9ubfre7GfqhOvoBUiy7vv/wG9/7+JNP9rfbLqxSzkY5kEVSzkVyEQGkdz/YjFn++3/2v/3ub7yX9fn5ZtU2/uL8bNgPbWglFwBIljLJDlG9913bnK1X2+vLfj+WIinnly9fPnz06LPLz0ouY4wi1lNE2fMnn3z87W9/m4mvrq+Cd4JFVTVb8yVblHPXLgSBglJEKkk4VhWRmbT2wCIosww2Z5HZAWLkaKpSSrEAg3PWbA0EwDE1jpgxOA6Na4Jv2yaEBomI6Wa3e/Tw7Id/8ZcPLy7M6s05m1OYmQI6RF115/s4PHny6N/8mz8j9A8uNqlEZoeeVRCAmraNMRcFZmIOKWVR8cEhAjsyPaCUHEKDINvd9oMP3r++unx4fuGZy5REqQDOMYCWUlRg7IcnT5/eXF+HECxPEQkRCi9UXYew7/uL87OSS0qRnZOSLVoMkNDaMrMHyKqqLKSsJGQLTQFNpbPqJavRQAQgRN5sVs+fP3v0+GH/xZbY4UTEZEtgLkzDOcRaHe+zYMBlvNB0bXt2tRsLwMQsPXsaJufPjAEL5jiLayCbAmaSaILmBY0gVk+O2UowIf+RcFJcgslh2AsQgEndN2tBmRlEpJZFAwGCgiCrFgQkpII1sABz8fasph6AA2AKAk9zcLwt0WcJNCfw97rtBOne8NMS7+YT3zn8yC80bUdGwPF5Fp6cO/h795DXfT8frvdh7nKHw1EyteF5vaS8C/d3ZcO9gzHpAseGhYiUksSK62sOaB7GcYzj07fevry8fPXy5cOLB1IKMRtKATMRCGIWYQYpQkSl5K998Evb7e161QGSD816s1qvV84xMzGziJSS1+vV9eVVHMdxHFMc+v12d7vd73fb3W7oSz/sc0pEQMQ2HTnlX/72d37ykx9364uioMBTGjQUQQUWxedffP6f/uf/xZ/8yZ8+Jnh1swN0IkqgcYjB+advPS65MDOAEwQqQIRU1BGaD2Qc+5RLaNaffvrpW2+99fHfZECyflYiIgIlS9M0P//s87eePkXVm5trYkJWq7y3hUqWkg7z+jY3owAgVpI1U2ht7GYNmEluTb1N2VemCgciClobphMBgjKq98458ozBc9d4z7RedUhsptyji4erdqU5+q4RIbHKZmYwIHQOEUSyPSAmVC1MDsgxOwInogjUUNcVyaoiagz+KWcFdM6pqHU3AGgcu4cPLl68eP7R9753+eqliTBCVBCVDIhxHJBq6uEXX37+y9/9zrMXz1zwqjXPxKwARdA5kCmaszjHKmxJwJZ0Q+BACiJa93MlVRXFUq0EKpV67cDnY1iIAICEIppTJEZE9OxBMk7b8aKe0WnpojiCLDR30OSLRwCEOaKIk9QBVUU4pNKrKsghX+kgV3RKEp760hz+BbDKcp1qjqs8qM4e46uD6UwIWgu4YHFfqlB7nwFYA86DC8ikGkC9tGq2WhYiFVHbFyyuoG6JL2+A5rvbyYEn8uBekPqK2/LhLf+d3RqLjU4OXAyMZxkwH3XQCF7vaHrdyO/u+QaZcY9suJc+5vWb6kFa3L3uws0z2bbT+XFi1zHeh5rxXrc8jqOI7Lbbj37163/3dz/qVquUkoqayuIDiRTNKFh7RxOzcw4B33vvvU8++fT84kJBnfdd1zVNw1yL1y3L5eED9/lnn47jMOz3Y+z77X63uzULYIxlHCMAEDFzQARRcuzeefudH/7gB955VVKt3nYzqqVgKvDxFfzKr/zaP//D/9mf0RDl8maPyJpiSTl4f3FxPsbsG7bqfhR1zKlkH/x61V2cn+UUr2+2wYcvnz//5ne/l3Nv8xNjappSSm6aUFIO7F68ePH06dOUhjH2iGj0YloT/wEQaHqbyiJHwsCd0HIp1FJV5tfM5Iyq8U7j1FdHixQFdEzBMyKgZkJumxA8esfeu65pV13jnCPHqrrdbX/jo+/9/IsvVqu1d77kUhTA8aSUKrMHUhFAJkRiz6AOkbKCY1Zmyx9xoRHRVISIxzEOw9B0rUUmRCRnWa1XTBxCs1p3u93NxYOztmscO1Vhx0Y7hgBEICU7ZGDYXt+UUh5cPBjHMYSweOerr9xUNOc4pbRqmxQTIgA7VAFEQbQCQRabMUERQiq1joUUhAmnlV7f9urgBiGiXIoU6br1OOz9cQuw5WKcF84EuIf1tcAumjFmJoSwF+ZU35VjlfEgEKqIOPh8poIck9ZSG1VOJQp1MPX9NceTfSVY8/9nbjhENOAGExdafzoyE2YRVDGCVDOItQaboEIBoEYLHJy6+2FW+rH6j2BSZ76qYFhO610F+c3Hzo/tSMbeneeDiXL4/mSEVtYJcFT5tjzzUmCcqO1vGPPJwO69qaMrfjXgXx6iespHdDJIm6TqJNJqDdoZZmogoyY21s8Yh3Ech6Hf7/fDMCAxIn788c+IUHJBxFIyMYogIgiWktE3IXjftW0IYbVaI6KqNE0AQN+Etm1DCAbZFl0Yhj7GeHV15YmGvr+9vd7v+zjuY0zmBmfmEIJ1mQeA0OjN1asHDy6eP3/GjqzHliUpFhEQHmJExd/5jV/dbm8+/fnP2XV9TCLYhIglphhVNbTeOfYZfeOb1gcmmBpveM8PH16UkhCRHW+3W9AM0IKSd150ovQx/i8RInz+7It33n33xctnNzc3aOwOxm0gQliAIKdirhwEJKy1X5PDQSZSYgEBKyOzhUyI3pGKExVmVEUHhIqE5B2CCDtuG98EH4IjxCb4bhW6rgOFtm13/b5tmocPH/zFX/zw/PxBcCFjRkRRYVRENqd54xgIRLIqsHPMwfsmW+cyx8ze2ABCaFOZeLCZc9Gu60wt8F4cU9OEBw8u1usNIonI06dP0xCn9B6xRwlAtcuVats0P//5l0/fevuTj3+qqsxG9UrW7BOrrwZEiqqKaggh5VRfdjHabKO1JURmFgLINfSi1v5IxchEq2uDJmFABKLSNHx7c9116zj2J/6ceWWZY11ECHmy46YuMQvQECkITDQvtCNEmQGNiQVkXv5IC/QARUKeyr6mS1coFwJUnMKWFa6n98dwe8aBauWc+AMmPxPAhMzGScVABWrTaVNKmLmIoAoqEZFIgRokkPleAMDBIcaNegxlqrCoOzgVAHhqZN0DmkusXOLXyfZmzF3C9PQZ7j3bCYZWlAS6z491erkTAWDbicXwOpX/WILqyTf3fn+vxFpe6HWet5npAcDWyOnV6yaiIiWXXGrk1wqRROT25ubBxcVuu0spiQpOHL32RjKTc9458N6HEELThBA2681ut/Per1YrAESmEDwzz66lYegB5ObmJqekgH3f97v9fr83EwQRm8YzsfcOAENoEHEYhtC0ImK08qjIiIJaJEvRAqIKt1evfvf3/3GM6S+//xePnp5pkZTzfoisRXMMoXn+8nKzXjWBN4ht04TQKlFWBAAmar1/cH5uyXHX+5TTeP74bTN/vfP1JQFlZlAh5Bjji+dfPn7yOMd0m1LXddvb2+odNS8wKhET1iXjyBwWKlUdQUQkZgUgFQIlAAIg1OCYMYhaUEARga2m1SEqBc9d24TgnHcI6AOFpgk+AECRoqrvvvfB5eVl265CCDafhGQxZEQkdshMjgFU1aWSmblpGu8DFlAR8o7Zq6IYYz4TZRKRrmu3ux4RvfcGxgjARIQUgl+t1tfX1+++++7HP/uY2AFo1kqmrKqgYoBCTNvt9tGjR6po3TGpNkA0f0p1oBvdXoyxa1sRqXFGFBQGRCBQLaSgpKJATCpOqRARKBs9pzVUIaAJuAuC8W5KSvHi4eO5qMqm5UT7nFaMpZAeVt+M9fOK0qkGfDqbSaPpPDhVhs9r9mQ5zzAFsx1h5UJWoVAdPbAoIjIxuczFR0S5o+ACABwcPKeoRgpqFFuEVouMiKSohCSkKPV6xkY0DdJZLMvQ/S7AaRXC041OKIOTMWVCApDthxOvRZ3Eg9l19NPJteZ7h9PtRNE2o+fAnvpm02RqznB/KHspXd6wA8CbbuG1aF7H/gu8aq+zJO6VSfgaZ9LhFlS1WGGQ5JxyTCnGOI5xGHM/9n1/c3P70Ue/8n//yb9CBatVnPU1513jvPfERG3bWHoPIGzOzmKMq9XKmSZYF7lg5bLP4zicbTbX19eacxRJcUw5i5QQgrGK1iWkmFOWkoroMAxN0+33uziOXXshKo5BAIqoaMklq7hPb9KDs82r65sBIMVRFbMURmqCzzm/vL0B1scPctOwKjShy15AoGka533ThJt+t1m1pWyQiKgvufzyt375i09+PDE/V/6yWuGL1LZNSvGTn338/vvvO6aXL54H52JK1VaoCWCCczWQvbWWG041hcW8/IporWG9d6CaUiJylrViKiQBsGOHAKCrVdOEhpnZscFx41vzGJgz4fHjpz/72c9WqxUzK1EcR1HrLIho8QpkRLZmWqTOcQghOOfJUSkF2FntsSLlXLxznrFIQcS26DhG5zmEVosgYtu23rGZazc3Nx/+0odt91xFANSxZguvqbmCQAEFII9Ri6y7VcnZeZZc2JPRaVgujE0gM1vE3ntfJAOAKAEWREABVYeaGRhBVTyQETtwTXUBq1+DSmxgARiy9CgqOatoCE1Je8vRuKNNHi+x42gZVOY/e6Z1xU5HVVxUtbDvXCY0YzzMCTMnF51UekBrFQGH1KBZ35pPc7L6Zx/A0YYTDeG0zwGCANRkgILFDCxSrcQqYMYBVol0KHZGRDdlOOny1ItZwoOgWSqbuPwXQY4nYImM09wsn8q96D/9d3Sm6WQmq8uE5gfRfSIAXqekT1N0zz6ng5kf60LrXz7aJe4vLZvlnzSRbi+l5smo3iwV7m4nF1qO7UgUKarx/lvGn5HRxJRiHGOM/bBq23EcX7144ZtAUwK7c85Ufs/sHTGhdwwAIuKUiGgYh6ZpSyk++GXnRFXIOacUkfD29iblDFqIKASPqBYqAADJQ4ol5QxQcpKci5TMzl9fXaoqO+dAwLEXZgfkCsSSRgSAX/ra+//y//ijrz0JMWMRBdUxjsioSP0Yt/uBiTerEMitmq5bdZNup5v1+uWzn4dVd36+EVAgHsfh4cXm5x+r984H770LwZvbzPRT8+YH9l9+/sXDRw/k4cNnz55573LOlkdiDhCsKdpqrTym1W6uXUAy7i8gBCb0TOCIyCMAIhU1KjRiUOcsKgxtaEJwREzskNAFT46tFCn40DSACMM4rFcrmJoSp1JJcsw8ASRAYmZyKKLMjoiCD1kZIMmsFhN657PWXEzvvHMaY0Igx9x0K1Xtuta50DatcywCu35/dn623W5VhciTqogSTuF6RAFl525vb1er7tXlK+e5WqAWbgSAqfgIEZ1zViugqqIFYc6gQUQEJAWZ8h2h5sVUPktDLqhVWxVaBKe0nv1u3zTtdrw1AaAT4+zpIpKDnnRY6ZN2WL+p8zpdxjJtFkk/eIDBCfppgqmqneKBH+CgOC6EEC52gOWOC9yzfxceLVOwZ8Sbd70PTgHRkj6nlmIwX1CrCYEAAA4rzcP9eHSPDXU0oBn+6vgAXqtK373be0d/354AsJQfOI/26FhrDDc9KpxswOPL3e+ZORrVLLTuQ//lnyef54vO8l/v8w4BGJf83ak4nemTo+6dKDx2hWnlr5GK/inlmOIYh2HYT9t7773/8U9+qmrEsXPMyS4hIigKDLXOSIuklKXIOI7Ok3cBpzSyGmXOZYzjmMYi5er6Gkvy3ndt6xyZo9l7LiUXdFqKFM0qKjmmmFJanT1U1W7VtqvWZSQGgdzkZhyjG3JPBQDe/+Br//pf/vHDDx5JP6iIgKSU2WUfuIgMMcKtaGk0F4fYbpp2tU7DuN/tN+uVY1QpwfvNeg3kr189/963v2u0+IRk7Wq9d1B1oMLMqIDexTg+f/F8teoePXp0fX2tqlIKInnva0Y/QBFZpAYiETl2dGjsZERGwI5V1U1kKr6gqjrH3qEl6jOhD957T0QAzI7JOfO/O+dSyo+fPI4xMVMBdUjT0j4UHiigTtwjpUjNn88FW7T8fXuXci5ASsRGZYGISOyDDzkYhyYzIyKzCyYenVeF7XbbNu04DlrZDkjEmEVmcghW1O12+9ZbT6yCxDknRaimQSkxitTuuDZgmDRoVZhasZnjwrI1qwmFxERsebYIiJBsWUz6E9SuvMSI2PfDg4tzRLI2NTqVxU7rcaEpz9krMEHfctVNDo9Z/T0UgmH9vMDuSZpU3J/cQbpAjKNVrXO3SJgQ39R/Wny/GIz+gi8WP8y/EVEplfBjVlFQCUEmp5PO3pp7uIDunnpGmRPYnVF14TKbjYLJqkGrTzrSoO9i6Hzm+c872AqqfILgSwUfayKMzo4Qc1BM8kOm4b3uDPd8eS/WLx7h0vSbjDbbrSaJ47yP1rgWwF0pdJiyOpUH1WT+daERLI9a2ASWba5Fc5Zk2J9zykYAPQzD2A/j2A/j5uz8+z/8C6sAUpGZ+0hKEUIBtPovUS0qRUVKUVXLHiQGgcLoEOYwdcmpOHSSS4nJOyJk8kiMImK1SCKF2LsAqYBCVmBirxqJUEt+cHb29OGDxjlHKFpSSTHmfiyvLrf/2Uff215ffvM7T5vu7PLycj/mfpSUJeeEhEwuFSAo/RBB1DE3r7on7HPOt9vbt588Zt8oQIw5eLdBur65ubi4OD8/s9z7JnjvmACILeGJmIznB7xnALm5uXLObTaroedhHFSMI9W6JTMJFvsbFrklUza3ABIBojATAlsuKCEAMwI657wj57xjJkbnmJ0zVwE7RiJr5OtCuLm+fuuttz7++BNTPOuSrnWhZBCpqkkyF0Ai6/0IgFk05hyaQGhUx1QkxZidc0VARECVSJxzq9XKMatmkWJ1Cc47AIt2UCmCDU9ER+o9ikiBApCLyUARRMwxmkMrx8RonXOKCBGyagEFJidFRNU5lppsQKAFABBQFWdPNdbGLixYzCyoqetT0otMxMYApCpo4FBGovocdMaZyQNrtrjCXOa1XEEyyYDKfGb76qkKZ1dERAu8zutyqVzOEHFAjCprwXJ64Mg0WPxfVeB4ZGjJn6oLD5H1qZicwNMsqPX9QixSQKfkkIq0Jq8saoRa+foOyO3uKphL98Lym9c5MRARVMBaWRrbXE2xqiEjOVCtnjoxjq8I0zyePKH5m3uE5NEDqG2Elofo0ti49/A3nvNeYbD8oMvPB4yeWkVpzQYBrKwVsyZymMD5DMfzfDxFANWMWwrjg2gx2jIplvOpKeYxlpjTEOM4jv0wDvsxxpwF6NXl1e3tLgSfc2amWRsy7wWTdWvnCmAIbfCISFiBySqZBBRqzpzEMbahi31kYiJXqtC1rk5IWEFOlJQKsHeorg3Ou8161TXhrccPnpxvmi54JhXNKadcxiQO6T/6R//hxz/6y29981tQSqDyarvD2zQkVwQMRsUITwrEIjd9v7m5XXWr0ITLV68+eP9ddB6JJUZRbRy1DNc3Vx9+/cMvv/iSjGYTgK1Hnopx3gEhSikiCoWZck45J8/NulvHGFMsQKAWDYXqG1BQZgJAJAQCq1c3nEBiYlVQQEEARgBAIvTeOeIQgnPOFHpbKeQIZnYTckquXa2Q6ObmyhE5AkajCgNRoyVmJNZSEAgBVa3KlIsIs8aSKEVVZSQBUsAhJs6SkyKaX150TG3bIKoFgVRVpIAVs4I10QQfGoWqJtBUfarKqmCtaZkg5Swq7FwpRVRRAYoAF9CkIKxYUlXpSlFENFvK9GV7y2vVHTJaB0MiLECIWVUBWMESQ8VIs0TBuC3VqJ+hSFRJguSYRYpJBevuOeELmMPIOCdAlUxaGF8WlkkIWEeYqoSpzgorqoLBp+AUUNRDE6AJUQ2vD3o/AqgouakEzEL305o+ipqe+lqEEKEmm81HVCgFQIGiWLORqhioyiZMXi1VKfWwKp9qgQFWbwncIwDu/XP5zRLHYdbWD/LvaEMAfs1Py9Pe60t57THHl77rDTrZ7e43C6eHvnnPe4f8hvuZz3d0EnntZM6f75v/46scNIfppZzsPgNi807knFJMaYyxH1M/7na7fhj2+3633d3c3FycP7q8urbECWZnhSEAQIREZFVdOLfQprrasxSFYkEgKUVFilUhKVio+cHFxeXLl3NYfmr6VFsnimJKMaXEhF3XVTbElXSrdrNZvfP2Ww8vHnBAz4QApZScJRYhz7/63e/8wV99/xvf+Foa993an13fvrq6ud0PNze7mDOBV5RhiH7dQVIivL298d4/efr01auXquCsUE01xkiM683mpz/669/5h7/77IsvLaXEOSbvGEGhIE4s73II9BEiIcUYRYTZta1PMRGolVbAFA0mctbnfPnELIqLACoFmBGBazyQnHPByBqcq1U55sEnwyMjXQgl5W9881ufffY5ALJviB07V4okUQVkxwAEyILmzzHxbJ0CsRTUsTgES9tEQgSK41iyAJBl4hKRlTSv1q1z3kLi3vumCd57e6VKiuaYUdW5CbBO7UVBlWrLTLy5uTk7O7u8vDQXk3OORAUFCyoCo86ENgC1hmv5bpsfCKdSYrVkeUIiAiZUIKkpjzpxMsCUp4eA1m6BmUopiCAqOFeP4iHEC2ZAVHeBrR1AklmtRvMH1lU2HTpZ8fVEFY116VA6Wu8wu1hskHhwfE1jqjsfO6COlvusZC/9/gtvrU6pqDAlfy/A3P6sLyAiWGf4mn2FVkOGCPdZACe6/xKLTxw1y/1fj/Bfabtze6p3vEB3VfjZlTf/egKjd+XKUgDcd4l7DjyZhF/kRzLgfi2zxWIyFyJ/kd05TcUxGziAHo/02JayE2sx0udcLDDbD+N+6Hfb/X63H4bh8tXV22+9++z586nciuZWE0zMzhlDoU6tYDhbm28w5jNb8x5JiqgltAiWmEsuzrnb3c7eAkNGJit/1xiHGGNKCRSI0AXnvWdmYmhD07XNk0cP15s1syLhTLIjwOw9cjo/X3dtU0ZHJE0b2sb3Y3ze+OcvXzWBVGE/SinCQCKYU97t9t3ZHhWub269Dyml0DY55yLqXYixV9XziwtCdd5z7WcLWJPwlJEEFbRITdhRK9wBgJSSSmJ2OBOygllG1fOnoKg8ZX0cEogN1s29bZ3QmZmZD67qyhaAACiSLVEHANn5puleXV45F0ABiZBcyTEXzVnZoypoKTllYmePThSxoAILQEoFKaWSQZCcsxJoK8czuY4EgCTmh6n2nHrviZiJrDlbKiWnqCDGFyIpGb5LOfSaNUTc7/dPn74lMotGsZwfmBiPjaDTvim5mG5u6XmicmRp47RSzRGPVLNOkar7aLEkjWKBiFJOhJhKcUwwjewAWfYn1eVb/cWL8OfrNE6dEp6qHodmB2B1pt9BvuXCxMP2OqBfnuF+zVKNYEhn4TGt9skJYOg/FwAtlEIFQK2kwNMVAAEI1Vqg4Wkh2C/882Q7wj59s3L8pjPcK1ruCr03nknfPNST8997RThF86ORTDscbvKuIn/PmCaHodHKw4SwJ3su5wEAVGWWAfXLu+PEo4dtIF1KNs7nYRj6oe/7frfb7fq9lYAVkc8+/bSuGUQOfp5kG0BdYFwvwcyha9g70xAn5UFBFbJIkZhGIk65DMPgnbd6w2JpnFJiHMdxjHEAwrZtN6tN17Zt04TGNyE4wrNN98Q6mL8AACAASURBVODBeRMadKpatJRSCiCgC6vNxnt6cLEJgf15xyTnm9Wq9f2YVl3beNr3MRcpKdkcp1zGBK3qsB+d588///zBgweffvJJ9+ihEcaJaNetXl2+evrWW1evXrBjc/zDlDGi5rtD0TlpGIGZrX0iMwtKzqlAxb4Fa4AlIBDWMjSEWiuFalICwSEjAjMyOyIEPRCKenaOPQIZZhZFR5yyPHn65MXLK0TOIsF5Bc4FxijDmHMqJCAIMoKqeg+IHpkUxAQpAKSUSxkEdBiiAHgX2Pm43fV9v+nW67NOBZDVuroDKNbRW0OXKqIEtO/37aq7ubxiomKvbhFQJVVRgalQdRiG2dVp/9ZeOoAIJCjGh2Nv2rQuJhzX4z7rQAAF5vcSSVAmyg1VLcsyGVtNRCRFQtOUnKspIWUueqpwbMuvqMmYerR51W08073gwtE6XWt2+wC8pj/KCRocQc19wGg3PMXvq2Uz644HfRFnjXOJjfWr+irOEmE+80Kq1QDIFKzSelsCSxcQ3MHEu1gM9yHmcjvVjV+/neDd3XMun8HJT8dwjPfu87pN75Sk3REzrz1VdRLcM57p9zoDp0aVgqUPiPUKsldWF0fOAzgxUJY/3XMjk/NHJ8IZkZr9aQJgGIft7W677/d9f3t7qwj7fn9zfd12HTMzYs7oyHLyVFVKAWYCRGZu23a1aru2RaQ8raXq9FVRhVJKSjH2o3cuxSSiSKRSACDGlKsMGswkD11ou+7i4rxrm6ZpmuC9cwQQQuja4LxTFMv1IEIVJWb2gZHPzs8CY4nDo4fnWkrXht1+WK8aInn27FUuEJzb9z0RC0DKMo5xHCNz9/nnX7z99tsi0o9jYA4hpJTY8c3NzaNHj4zf2HnPTKgKUACKysSoMutYUhUrIjQyDWKGYpgyUbFYyycwxXAqDFYlwiknvQCAoqWpMZMDY2nNgojOOUIniqISY8y5AKBzCKrrzdlf/dVfMrmcRy4AqLnEm9vddt+DEjkgonEcAagIFk2EjE5BgdkpQIwjJAGAOCZAvEm7VbfyPozDuB92LtDZ5gwdzfW681ahX6rzue/7p0+eXr18RUgw1RlZMlRdTQAAYHwjsyYxazlEpIQWLyA62LVwB2EqHi5WBCKpVfIjAyRVqZ3idVoCE88agOaSPTQAICL8mrayKEs5ozrTuKr178KlOm4vApJFs6ZVCWp9igAA4U3Na2fBYxJjhp07y1mM1/PuWXR24oCht1iXmHmiJs3vlPNmRs5qA5lcmZJdq79LQBGOuIDu/nsy1nu/Wd4wACz049dud2XMLwS7ez/fO4A3n+reu5iPmg235U8HUXTwMr3WBWRv5uySm3Dk8MBKOf7pWAGfTzVzxi5e16MLISJOPkr7xqzynHMcxxJTGsY4pNvdrh/67XZ7c3P79K23X11eKkApBYlSylR9pphTSoBN8ETkmCcmEy2lYE45RhUpUgAoW5V5yUYmk/IYmlWMSVVzzgiVMGCMcbCW8UTr9Xp9ttps1k0T2rbx3hGCY0Md4y3gIoBobSwEmBHZClPX3YpUsopDIIKmadarcb3uRNWze/biZRO6EFxKElMC0BhTjMmFkLbbGBM7F8eITfBMVNA5v9/tvvbBe89UedoIVYpIRiIsxQKYteELAswcFYhIzJb7ApaPL5UHXhRQgNmeF04iAOZHWUpRRCUG4mwvg4iINk1D7BQp5gKI+34sRYIPu33/4Te/9uWzl7fbffCOGPdj5Cwxpt0w7naxaVvMQgR9FJG8IpclIiIgORdEsnNuHNNu3DdNQETnvBS9urpumhCCFynjOITgNs15CGHCUO+cR6y0HNZyQFXGmMxIsrfaOOHmDQCUaixUSvHep5RgNiXvW30LHFxqeObNKEZhAqoIDJDn3BlFUDniYQYA6yoPCKJFFaWUnKNnV6TQaxzX03JWpIryOMt9hDkEsFjQExFQrXZaUBKAzqvyLrjr5AJSPcWi2eM7QcnBBp0N8YkiYqlQGnP2JF+ra7a6gEG1VEkwA9TBdaxVuqHUSVSrbnfHA32TdrycxzffM2iNN9tlTt6ANxgQy+d0MoZ5bPdI9SM31C9A/7tn/vvv9qaj7h3h/PVsrsFku81G8fy8F4rDkWw7EWyqtSh81gJMBZNcSsxjHMe4v7y5GmO8vr7u++Hq+uZ7H/3q97//52rubFV0LqcksHjVFEopuYArJeVEI0iRdQhail3UOimpapoKy2KMTbsex8FIh4gQVUrJIuK832w2IYTVqmu6sFp1XdsE7xxzKTnFyEwKYPqmKcuSUUmMgw6BANQ77wkySkZl0qYJbdes1ytmWjV+terGmF6+uhnHPI4R8kjsY848jKLl6uo6NN047F2R6toGHWN0zq3Wa2ImJjAXvU2l1TUUIwtVEC2VwKauMtHK7CZGbVP1s/qARZSZ2DFa20igIlKKGtm2tTQv5sewrmAOBSkVIAUgur66EZGYS8zw4MEDH7o/+7P/6+LifIypaQIClhKHmPa7oR+TC51F2vf7MaYoSk3TAmBKJTQl5aKKRJgk531hIu9L13a7uM0xOUdt2zRtQ4z90KvoarVCxBhj27ZWuS0ipWRELqog2u/23rl+3+eUwGzAaTPV3hhh+74nxJwzTDFwqPxuVipVcwoWzo2K6TpNM9TXftaKSKa+8ERkZKEzIACAokwQjKo1G9ewsagxSUglEJps5ulNVyuZtK+mc9ZczInHSQG05ClvZ1oCIhNNg+1vXiM4/Dthrh7QUlWPV7FB8rTIy/wuzWOY9XqpJoROQ5hU+yM1cik8qglr3kcw2DlixpT5nT1yAZ0g1l3oWf56oo3es0OtyDrsvzzqDee8i/X/rkj9997z3u9/4WnvvRc4UvMPn5f7352QE2m6/GZBGzJbo6XGwRVUrTqnqEiWnNI4DqOUst3uxpj6oW/bdt/vX754cXZ+Doo5Z0eUcnZUNVwppRACsFPrJgHWXqPW8Vuy5KLKxhxN4zi0KV5fX6cUEa01hoBRxiOtN+uubdq2bTrftV0ILjiHhFYn7ZwjtD5Yc0+8yp5iRZ4EyIiOSIkLkqIyccMMCA/ONwrqvOuHEkKz3fXX19vtTbbyLFV0zt/c7h4+OL++euWYc3bI3Hgnqrfb7dtvv/Xq+YsSQhOsaJtBauep+VnB8SKDSc8yJmhRUTHXec37RGRFyCJcyWrsoVj7WwFxBasgQURQcc6LgGgG4VxSTGW32yvg5jw8ffvdf/tvf+Bcs9/HtvXjmAFwTGPfj7v9UIoYy1suZbcf4ziK0kbR+xBLkTGp6jiOzjkr1gKA/W4Xnvj1ZrPd3jjHzOw8e+8RSaTknMwRFGPsug541vEEABjx5ubGMY/DIMX6BJkGqhM1fnXu933ftm1OiRYKzbQQ7lsyy+nVWdRO+TjTUjB9+WBzaNXQp/D1hOOqCOiIixScHpYVkQHKpIzKpJ3i9HRhSrMRRKgsPQD2BsL0xywklimed30dJ8B4+BNPfp3KDA4KxEHTnzW/+tZNau2sF1Y5OW1zGKBONRxwY/p+ml/Q2r6sUppPMYATrPnqSDq/KXc31Uq4sYT1+cPyz6NDXv/h7oW+ipr/hu3NAunf+ZwnomvxzdGdnpoVp15CnZ/6cqjHczgdO6lkUCwIkGKMcRiHYUwpWfzTh2Z7e5uLppRr88wiznPWrKqlFPFBpQlNMw/AHm5JKY5jEwKKAtV0wFkA7IZhlVPf701SsGMi9MZ1INo0vmvas7NNt2qbxrsp1bG4XEomIlVE4jEmxkJgoA8FihQw3dKSggjROQ+EzvKpzUHFqABNW5z3NoMg2vd9KZpLcaH58tmXH/7S+59+WhTA1FtEbJr21cuX3/3u925vKm2ATEa9LW0CzBZNFwFFnRj9bVqQyAqapk5K1e5mrkX1CsZqrUU1ixbVoiACiiIVlSo1giiQs4ZcaT8M/X4YY0akDx+/vdvHL7/8EgA2m82+j967lFOKedf3u32P5LpUcikxpV3fj2MSZOdbQAcKKSVFyFL2t/umbQEARFOMz7589v4H7zdNAyBFillC3jNIfaBd13nvpYC66sO0l82xG4dxdXGOCIxonEjZ2sHUIHjNEO37/vz8PKVklA/Ti0rEJ8LgCOnqN6XGsVQtUiaq5pCh+psckI7qcjr4Biy8lnMkopwToZVWTMmR1VlTVNXoTxSUjXmu6pqywFvbZtviMPI65Kmo2ZLf74I+LFBF56RVPOywlArTl6Uyqt2DfniiRi9B5oD7S+Q5IpNUrRTmEwRN9HYHC6CaU8e+HbjjeThGqMO/uJi25f5W3bEMcx+9BNNMLVNi5nNOc3fnqOPtdRLi5NjltrzHw+GTt+rk+3s19JPTLmnplj8tn83dgd11aM5X1GovEi5fkSldYbnvrEXYNUqRlFPOsaQcU0opFVER2W533/7Od25ubr1zpRQLHYCIKLOyLTqHJCHAdJVSxAKSolpyAac5124b2Rqcx9jHMecMRIDgAyNiCCE0oQvee6daiKhr2rPzTdME7xhN2RJJSVJKIrDv+yza92PwiREZFEBUQLSUnFMeS04O2bKWxTLYAExdRUQV6IdYijiHTeO7rosx5pJTJpdd3t4OY2zbNqUExkLjses6ALi9vXn8+Ml+tyW0/rWFCYWQEYoUKFKs61dBVZmflALA1HKpPpNpK0UUChM6ZK1Bf1TFUoxuEUVqGpiqtUIEUWTNCrjvh92+H4cYcz47P3/w6PGf/umf5lywLg3s+yFaOm1M/RCdx1Q0JUmpDH1MORFRt+rclNNlbXxyKTSOiOScW3Wroe+319dn55tkoVqAcRxVtQ0Ns7MQt3PeQgLWJICJ0DkEAVBCdEgZKgRXIgi0kqS6fsdxPDDwHJZD/XCICS/WbM2EWuCXiLUx1cogXT1NiwJW1SJqvvj5PKoKCjHG4JxYDbCIvW2HNUuHnY2Azv4GgEWGoaoqgfl2Dn4IO4kluckMTaCwgJ35w+zXnT8TzyGJZaBRrO5swooDXBzGbIYJHJB5uVU3sh743cDWTz3JFCNUUChQLVIBa2EDCqgzFcQR6p1s8xQsB7f8d/nh7mbJVXfRvMLQ9Of9uHwMnUswPcH3hYiFu8e+7vvjM5w+AFgIp3uh/2SfeXjzW368BuBI5ZnnZyFZ5/MQmRJzMLDs74W/Xue80nqeaTBaM4w0ppRzsrDz9dX11fW1vdOpZFTje5CiBRE5pcQupUiE3rOlKXJCEVm1reQsUxkzIeVcqqQpJhKAHXkXmCg0IYSw7poQPIAaK1zXtczkiEopKmKNKYch5pzb1UqKjDEyFYuyVegEVlQTAMogJRcRdA0gIYFz1ojGnW1Wzgck348pDi+aplut0na7VYWUUtuGZ89fPHjw4MWLF00TSs7svKquVqtxHN96+naKI6haU3LRyR0NiggEWMCmeKFnTd1XRWoIh8kRYSkyZ3+WPFEWLr0W9mjqZUBBvSNEKpJLke3tbrff51yyln/0D37vz7//w74fhn54+PAhEYuU/X4vov04SNZSxDp5I6KIjjHmUpBjytnMMu/9LKqHYe99CM63TbNercYxtjGtzzcCxRoRxxQll/V63bYtERKyqzH5kkspIixiuGlJ/Xjsm60vs6jRxNp1AcAShKCqCwWNr47oJEFOp1owk5aHE+vJYkcAlTLLgurwWa4yRERCkTLxm9l5TpOtDyNfHGi8ttMhOi3FigMHm7ui1oFLedJ/8eQSojKHQFSrlXDX4q83ehTqOwLAI7xbDH6Z+imTdrJ806AKyykbBQwrLK+rXtGklzvRmpfYp1OIcr7qUqxVTl07DQDeMQtgIRWmoMr9EYXlGE6MjCN5UCqTxzx3R9ea7nw5U3gn/j7/sbzZ+kEPhpouMJ0mSWoXe51cgSl+BLZaqt2pNOWeqerUVNVuEKYPRy+SvXWztMB5M4tzWjMIQMBWzoJQOTAEVABFMGaJqYhoSqmUrFp+/NO/LQUQUYrdIhVVVvBixWM8ptERIlGIDCpNE5rEiKRJC+UUR+v/VQBijKWoR79yne8CUwBuXNOt2rbrmiaEpnGrtiUCkWTzKqJWUFoEcs4TOODY95vN5rLkkkgUC4hDFv3/SXu7X0l2I08sPkhmZtU53belkbS7hoGF4Qc/eMdrYP3/L/w8gHe1WNuPBuydHUkj3dt9zqmqTDIi/BAkk5VV3ZLhhNQ3T1Ymk2SS8R2/sCI5TotsRSSLourGhmSbCQEGtMCUKEWFspAA0m9+9bpe3mKYYohFbNs2RFXgf/wv//Xf/bv/5Q//9CctyMRoyoTMBMhF4fT65XL5BiogBAgcQDZlDIKmkk1V0Q0TtbciwsbZPOwF3dbUSBbVEHKHyvBwlmqWQwBsdjMxM2JCjkzh8rFer5ePy7WIrmv+N3//P//jf/mn//v/+n/MoNhmqAX0tq0fWyk5iygCTSGhKUkBzaK3NCOsSiRSboSnsq35dk3TVHImMAHOudgigIUDfnk5q2YtK6cpcDAkyfmWcylvzPF8+uQe4BAiW2QSEUG3dxBl07gs17c3I3LUEbNqmieidb2FEMR0KxsFylLYCNCYEExKAQCsiRFV80dtHhdQAwFDBisOaa9OutVUi4qoFK+iqSIghqioRkRQavoYWnUPA0BM4XoTQrBaxqvbY9AEHPapU7ZGbbCTBLA7SuWUoXJ9p0vgRA9cA8AhJ6AyjOYAtxqlyogEHfW05f76aiE004rTDCBgtZbCSAa9abkPHNI6cerJFEVFTICAPMnBsA3QC5irq/LV9m8glV/IzgCsSeijQebxOFJnBHgksvfUfzw/8LfHlvvFw4nt+Xh3v95xlGaK2vWS7/T/afvwIJv7uTzT8r537M1Wq8yPlJh2EbAGu+1v2cn+w5/HNu/0wuoPcNwyaemaiBg4NuXV59JD/4GRHDuaiFbcADGyR8Z4VBGv20qEkiIhupyGiCGwmwxj1JRSSmmap9P5tMyJiVMKKSVmLAXcx+h8FMHUpJsCXGxMaRLRnMVMIgIRu6ZNzDlnkWqAKSJMFBqOv4+OA0MxBzCIMa4rpil+en15/7gULTkr2CYi07zcbjdcFiyeNWbzNHkZk7dvf5lTzOVGiJsocyg5g5MrIwCps9p3h5lq3/B7TMmwMquUUFxRMjMDVck5u4TORHFOYLCt5ePj/XK9FlVVO5/PL+fTP/zDP4jq9XqbFipFtjXfbtu2rp57C6CMjIjeh8i8pARijBw5aJEUeVvFVJdpWhFd+YscHK0ohJgzrluZQygZ0avSKN1ut2/fvp2Wl2lK8zxDKxHhdedVFQEcxtl3WBPEqwzqJEHEVFSKpjhdr1cMqGJCKmJoBWpxFAgBQGsJ224JR+pL2hRch717h69Gbco0mtXydfu0I+AesOvxW63NcfO6LPwoX/ZPiJ0bjHttp0L3In9fGPt+Psb7H6kc4uDf9uXiVoRmSh9JRD3BqiwMF9t+b/8Z9aF9gC4WgjlLNRMxVS3iUhk0E9A4HU+PA22q+Y4PhH4c5PeaevqWR7KLB3vRd/pzP9f/n4+nFP8J77lnZj9+3VO6/9j/3vORysNA7h/fdWgTB8jDfTdqg224X0ZWLattv7qnDcFF2JoCqsbMpei2ZS8yQkRb2YKGULKD3QOAITBxQAQiDjbN05effpqntMwzolN8yKUg1lQGBDApxZQQTbWU0tXV27oCsULjXL5UzVR1nmcRRQQRNdUdBt5VYAAimgIJMxjq6+uXLz99e/uLaZ6mlItAoXXdVMMf//iXL7/6uz/94Z9KKQ5x7NTNEyCWeZG8ocf2GYjVyUMggNLDK+rkVnUb+xRCNQDW8MR6o0fkluLkX8Qd5kVrcDeawWXdrtfr9Xpb15U5MMe///t/++//138fON1u67ZljmldN0S6XC7X67VVLLCwLAiw5Y0YmTDFCJ7TQARm8zSZqkgOyxTjAkSmWkomOqWUci6Oz3H5WJeFmAJYS0Yjcv6Ucwao9SEqEIiZ87N5nsGMEBtsnH/Elh+ABgDrusYY39/fiUgQSURVEBCwiFSZuqeDVaGnJkiq1pxGL2fkEbi+nnuo6J63ZQPVqsJ4Fe0LE4rsq2XYBeofGhtWx2FrW3fYPtvRwy67U82f3Hx8ryAyjmbwGoe/E7GmVDyhF/7j7qNoG6THAol4Zoq3XMOJENDLNfmalHYuVoqKQUU0CuMcPZVzO9Pz44G6IcCoAcHhweNctmcOc3Ro+cfU866pvwYb94PjwDA7DW2/Qufe96+oLt+n/O9xFOOz45Ltf47kfgyeO1wcrf/jPY/KQbuhiwlWzVfNqWhQEbrNzAGQETMYYIjrusXAiMaMWy0U48hCmdgCuomg5o6GgErGBMscUwwxBjfMlZLXdQVlYjNXE1VzKZ5547nKPsNbKWaapjlANgQyA0JGMrYQYilCzDU8EEl1j07BPeGWYoyn5fTlp8/f3vPXb9/kssUpisF6EyH4pz/86X/46b9HRNGKq4VApZTldP727ZfX19e//OkPRLhtCoCluP+kbQRCKDhMo2+8UHmtmQcEETkuJIKhKYhYKQ4VbaXItuVtK9u2IWJMCRQ+Lrfrtn58fFyvt3matlz+7f/4b/73//P/MIPbul3Xm5nlwuuapejHx9XMYiSR0qZCc15JalX0lBIzISgRhsDLPF2vNyny8vKCHFSKipRSvMiXqimgKaxbnijEQMwQYzyfzyGEEKKZiRRrYnIz3JuZMnGMsVbFaful26DNDIiu1+uyLCLCzAhQABxekIELYAg9YsId6agq2OPmEQDEAJzpiJZqS1Np0Z9Ql1OXA9rm1ValomzZw7DuKAM2Ve1OUn9CncZd7+D53+ETT8jm947dXDLKkQYAYDUcE3eQZ2/QKnHfN3W3tXQOZFRJPYx0CzwPy5mKOk+sOBuOFqyqohWiGEKF0+sgtA9ss1OXkWYNM1IHNk7KSMV6I73BkZAd3gX3Y36cyqeMqv/7lA+MHPt77xqpZ9fCDp28v3iUyh/vGWJFjlPRp2t8dqT++HAcRt0eF+xA24MsgohIhrRPrzWRFQAcF9A72nB9TEQyYiR2U4OIerRoCGFZ5iqjYK0Z0t/lFU3maQoxunirKq68SykZlc1pfgv+UelTiohqQESGcH59LZevYtXcY2BmxMxbzkgBQEoWYuNqK0NVJWYics3ZSVLg8Pq6rNv1el3BQESZg4q9vb1tW07zYuowzuhOWkIoOeNpTnNaL5uZw9g1E/6D5l55pxkSVm8kABH7PPoWgZ4lC6ZqpXi+9JZzNgMkr8+CW97erpfb9cYccpF//a//u2/f3v/8889I4Xb9yKUgYi5yXTczk63EFEQ8h1xFBJhzKUjVnOw2KUVVKWAaY8iZRLJInmIUQohRRK7XawgJEBGZmUSs5BI4pBSYg1eAjrFKhCKiq/qt4KWJ1UTyYX12K01d86Vcr9fPnz/75DhgSCkSInnopqqDynUZXM0B/aEyVFVTUatpdl2OqVK5s10P73TMWrj/SKiW17VH6TRxAbBnpDasDn1IVH5CGTrNfdjCfRk/kqnv8AM9CI7QR1UD+bzYsrSCaM/tHzZeRegGSfeASO2ud08NqxMLwNS0WJ1Vw8YkEENFNLEWuOLGpnH1V07VlPk2Rn8RthhVGAnxwVLWaNRIcA8UsP/5dE7rFdv/fCSLAHtn9hbG5IomqHThBe6/ZevJkQkd+vO0e3fj3e+5u7O/d+zA4Rip/IEBHMQQGCh751vVmeBxLMRMxA5lCeZ0DaudsebOoJdDaXi/0HSCUnKM3KVLr6I1xTkN4HFMFJhjjGFOtq6malqgeg4EVRvkvpppxJ5AptygJgyAmBHpfDpfypUZy7oCQBGvNwWXy7VINsnr9QoAKaZYooHXCUH3WCIFAzCDmOLr67yuy7dv7yol5yyCqrqu+Z///POvvrx8+/YLGDFHBIwhqmqMMa/r+XS+fbwhQi7ieJPdhevxdUP6tjmfqNYP9BIk7qmjfeMgAICobltxe8665RACma2bmcG2beuaOUQD+N2//G+m8+k//YffT/O85Zy3zQyKiNnmljkiIuS8bSLi5ndEVFAQczDXUmO0YNs2rzHATDmX2+36+ikysRqklEop397fT6eXGLHkgkTbllW3GF9jjDEGx4j2Q02t1LXqUaH+0df1pg0IosKCNrO7D/l6vaZpyjnHGMWMiYjIgAyUquZW6S9Ua7lzip5aLA40ClqNP/1tPXUeCQj2CKK6ERA9NaPIHq0EAK2QL/Zorj1o+05A67pCNY3279iKmfg5KlivXQAP5P5AqfpF8uoL1spvNNd0g59zYIYeLQk7AbsjQW0rt1mDYeNXQZ9qvKeaGog76z0ZUdRjwtVMOoEL0HwFgK0KQpXsq4mgK1yj0QqaH8OLzo94D7a73dvxUG9yHNXjv08mtLd0L/KPF9sD/sJGzlwAxeGTNPo+kuNhVv/64Q2PFPORoH9vIAfG873jMMCnpP/YZtNxAaBnxQNAl3apJ9nW/QJVwgEjZNz9OoQIRBRjmKYUAoWUOMaQIoVgQBU4GTGFREzE7JZ95tCNyFoLwhoRArAp7FyoeYARwW3k67qGwGrKRAXBDNZ1zUW3nD8ut5xvVrKW1WnKspwAVaQAwDTPKU3VlguIBjGEl5fzp0/rbf1lznq5Zo/R/Mf/+o//8l/8T4hfidk9zx3qKIvN5yWldFmvntHaZ17MMxJMtG4YdxuoegwGgb8VqQcxAoCjiYpIyc4Kq7vC40llK6paRDBwzvm3v/1Xnz59/v3/9h9DTDnLtm3SDN5FJOfsSD4KKGqlFGqsupSCCEy7TuZufO9/jMnB99fb9Xx+mWIsxRSMFEvJToZF1VmwD8rb6UjOfVOIyJY3lfrVRES1Vpy2Zv8ZJbnb7TbSAnc5kKkqjWyjIYNW6X4H7DdzZdga5J6HjxaRlihcC2AeNpi2YmGdwZgByIQLAwAAIABJREFUMfTYncNuwsZ8xuvPd6PtTzW9+WgO6s+2c0MkeGInqO6iOuoWLlwzpZz6YgPrGZ61tqEfiRQido8a1hBDNTAkU6/oBy7IQFtZXqywsqU7JzB2pakZvx6JbL/z7mTXPBDvo9efzOf36exT8f8H7fzgRbtE39K+R8p74AGHjo0LGu6pLXS97aEn423fo9fj6A7r5ij7D+agw8mTZ5uq4+dja0QdjZ3Rg0iNPFPAbwuMiMCBArPXKEwpejTINDXJcEohJpd1K58g4hgqZhwCVauIOQWJRA76zyEGJimZRKwWd/Kyi+5tLlvOX3/5+dPnzzlnDAHAS8eUy/V2u92u18vl8oEqYFlK2TYP6UnregNXFEQ6sQNAJnxZlt/8+kvOAsaG123diFBK/vr1269//RvJW0qzW7qdJamUdVtfXl6u7+/M4bZ+IIBU0woUKd0D2ZzqpkZmAx0BbFZYMKyZ0lKqNQuJQDSEuOaiKrkUFQXCLPL6+uXT6+vv/+N/IIpa03ENWkC3igRmAGQOgLCVIqVM08QcVR1mmbQoE8YQ/et32ppiUlMRSSnGGKZ5DiHetvzxcb3d1iyyLIuBIZLuEJE4TVM3CEOzSSKi47QWkev1iojOQXuAGRg6YKdDpfaIMlf1AECkEEcrAroBgINPeCU0nz/0dUqoKkgBtDQhtFmSqxfJPDhb7YH8NyoNQxAjkmcT78dBaPOLbrp8JALjZjzIdnZPIg6/ooMRVRnxaM9pPK/fVg1TVmNAPT3N/3dwB7Y2uqjxIFUjoqkYGiDWmH/sXnVzXN4qHragyj0M1Hs5JrX21XAgmiP1Bzcj7NSq9/CeDvb7exTKg6fh8Mjd/GLzmTwQ0LEzj8xj5PA/eN39569Vqu+b6Z/+2NTj8QPm1/s8vh2HX3e2OjR1mPDDJNTf259ENcqFmWPg4HUaYzDTUteUqXb4GmBGIk4pxRjnlNIUHa85pZg8rStNkVMIIaYYOHS60FzB2oLiBQACMxIjwjRNtQMhto/ODvzjpN/MCJEAbrfr6+urL1DPtFLRbSsOqJBzJlBuhYtFBBQd29lJAkCt/AUAKEgmr+eXX38pquZJjyISiP78z3/+F7/73c3eASClyQuzIFgu9vH+/un15XQ6v70XgN0ZBgAGHWi78gAclpD/t80kwZCn3UCV2ZlTESulqEBRFZGAYYrpd7/9ze9//3vmaCYOT11za0oRkbxtKSWrBgETka1oYAUAYrZctPIwDCFB1a4cUwxCrFE9c4zL+WRIMaaQJjUAQq/Q4F5fMI0xnk6nl5eXVpgerEUcerF4Q1JVFlnX1dSKZWuBNCqOjHZHVT1gtLHY3TCLzb6KNTy/STpNPENkIlAgEARkIjNlRDVU35WGaDVPuNGathk9PIixBemikR0340j9Rx7QKepBrhp+HbykBqNwbA/a+fgu3EvD1ttG7aqT3n072/7XQaysFW1GwuTyyHAAmJp0iURMFVU9dArdX6VVO2l1asNj78crDRa4nvcb7kjh4+iHHo40a6SzfzsB3XnAs8af8uEfH9/j9r2dpzL+99oZ10Gn1HC/gO4G8myMhz70k0d2O76xPVDNdETNZNyOFMOU4jwnML0hEmW1ofwFontSY3RaH6eYpikt8zRNKaYwT9M8p2U+pTSlOM3TzMyA4AuhGfV97IYIIUQrUnKmEBCRmZn9JCAYKZsZmhJVs3Upm5dGSSFwiNt6NS3+rd2E4oq8FAUGMC+tB2o6z5NHLgYOHtwSYgADWHE6zdNsolpUkCh8o/f3j1LKut4ul8vn8yuhayeIYGBKhOta3t7fpmm63hJxkLyaeSB6XwrVhmjHsmw17sDZsN3tG3MGwEylUC6bZ40VKT4oyPKf/9N/Xi/rfKJpmjxprIMslZy1Ya65/d2vb8TbWlJKiCilMBFzCDES4rZtRGwKpsgUpyXGEGVbU0rTsuRcBOBXv/7y7e1dfpZc1iLbvKTTfHaXzDwv0zT5YnMUkCrgo3k1cRcpVLVILZugYpVoNrrmxq6cMxGZ1fxeIkLw2o7sA3EdwirRRzBCVENCFTNEIyJSI1WyGiBcYdkIUT2ay/aE0LrLhn+hhi09of6HfTfQJbsj5bj/Cvf0qvGyJ3r5s8PGbj7wnr666moyAxwjPkeyY505NJZAiHZHXjoPs1rdYnfRy54YscsoABj8oz6Osx/fGypiq4nWZrDvk7sJ8AZpJzp/bcp+JFz/4P4DK6781l2d/bodx/jIor7HFR7Pxxb+xnH9LUOzRyfKoAoA3H3yFq5iNS+DMISQYpymaZ7n2+12Pp/NjIk4rGpzKR3O0xCJGWNkj/gOIZyXJaZ4mudpii8v58+vr8uyzKfTPM8xplpitwqJ4nk33gtmiiFEDlnN7ezMHEN00xAAErHD8zpSQgihlM05EACAyTxN1+s7yp6fWUQAcEpTQYwMTDGlNE2JkEJgry3sZQuZQwqRiKY4iVgw+zuOFCNRmKbJzG7X1dR++fnrb3/1q0DISIlZVYoWMGDi9/cPm+fAIYagTCiN4rujrQaoqKiYgYBAM3XWRV+5Q/1YRAjMnTKWUrZtLUW3kkspiKRqUvTj42OeJgKUUtTAEbZLESki2UIKITAidAYgIlnKx8cHAiKyahEtoZSolpYZA2/rKqbm0jPFaT6fPr3c1hyYT6fzmktR+dWvfkK0j48PVSW0eZ6W5bQsCzM5T3U4h90S0MJ5nCUQkVlugmcdrFkQlcBBVInCthVRCMxgiMAEjAO+KBiicV/XbdGTO+qsxcMjIiEKItQUWgeRbhtJ667rG8GF3tpDMNRqaRhF2HEr9f3Y9hTAGBrkkfoeb/ZgjPUd+FToHC/WExSwXmDj4DPAau2xIUoJ+uTe9bNrAHt2HIE1F3k3gTlZFlU1GT0fvT/d3uUeiDBe6oN04eNA+YeJgyaU1zG4W6G6Xru01F8MQFqLBICZu2vaw0+I5sgc276q6+JRgh54+JHJW41+bz71B8LtJ6PKPz4+3jny7aYljNTc2jKtL/G4LGhL+RnLqawYsT/Yh3ZnZzv0waewc/J2D1YOAEREHtM3z/O8TDnPqkW1OKn1Mk5u+SXiwBxSSjHFEKcUlnlOIZzmeV6m8/l8Pp/neUrzixuIOAQicuXbQM0EoYC5wZyJEIi84KFD7RMTgHmNsKymompQFMwMDIgjFgkctm29XC4xcF43RiJAJC/mrhEtzWkjJYRpShz45fVFRZFgnqcQI3jpgJiYmAIzxnXLUITjDEgiikTfvn0V2Qzy+/XrKrcwnRQAQ0LJqCJyA8uMcru+zfOMaDzNumUFRCbPS1KpyalgrGZNREMAL8JbI5pMqt0clEwqfFkpsuWiCllKztmQGGnLZc0FOCoF4FDEcik555JVpGySi+lMMwGORQqZuaX4KSEBWNFyy+ukk6FNKZl6WLC6sQRAYpyZOec1xLBMYc0mol8+vabIt/UWQiAO55fXZTkDknrMxP02EddLPDLYHb/dk8+giuZgrYZi6vLH119+QTWgupKJa53JurwJkYyQiCtcvIczmrr4C4BoTffCGtSm7mbQoXAkApi42tEisVqs4hi4M1IMRGzYcHWgZpXwUiMPd8H4ByrfKYYBtMIGT2kFAA2YNQ6A0WjiIAc3Guqqnn9mBbCd0VVrVyOoYxl4M1DwReZOdiQQz/EwA1KqmpPDeglUVl2nz5cWOgM4jrNLN0d62ufFOs0ajdg23neYF7272BlEj836KxJ0e3okxP3BJ3R/ZBX+rw286V65G//8W47xddaURxdSxqWC91ok3DODNo3jOrsT8/u/458AAC2xu0sG3rbbfphdig3TNC3LYlLAjJmnadq2TWsdP8dWrCYa4ujGnzmFeZpiiHOalmWa53lZlmlKHOcQQogxhOA00VefipGRmpFiQL2BWkU+MEYEN9cCIKBDKyMah4AIYGyqjtayrTdE/Pb1629+83eqcNtukktgRkRQZcLANMWTgYUYkMgFfwAIjDElAEZ2GkPEzJgSElA2pDORIU7Tsm0bwD+vJQPpL99+/vzTJwUDJCYWRCbMksFk21ZGYKaPWwkxrtu2bZmYZWvCmoGpiXh9Eyf1LftIFQjMY6kVVTRn2bZ8vd5ut3XdtjXnkkWKUUQ1y1LEeSaRGpQtr+stl82/dwghJoohVgcCgwEERktBSzEQs8KUUoply3m7lpwCvaQUAKZt29bb7Xa7hkASowHEFDVjyYWQ5mla1xUgfE6fvtBPzDzPL/O8hFoYQAWAObjNgAZoAREzACSSUrpygBVfHxWMGvYZM0sRrJWFm9jbggt9ORs4lmdLZGgExazFIrqQgUCISj0CkwhJAWFPfarZXkjksUFdePLOtGTHkSA43a8FjhAaZNC+tXeh1O6NCk+N3YMOMZx4deEmDx6o3J1gt7fbeX1L76jqkN/S08R2zaANsBITRRDrgna91wwQ8IDt45EbCEDdB3BP1Op9Tyny44D79fHk+OwPCeyBCv+AExyE8cP9Y9/6T63xesNBGD90++nRRf5Dx8Z5GHvVxfPHm2Gn6XuamP9EhD0H5HGMQyd3uj9ONSIxcwgcQgwxzbOUUqPymdghkUVk2zZwyxhhYKYQKIQpTSnGOcbq+u3O33mOMXCY3EBEgbGWim0B8gDUrBMVQqcRRAQUVQIgQBEp24YIMQT3U0jJCBZDzNvKzLfbdV3z+fRy+fjTum4xRBEFhDRNpiWGiARI6GQipYSAYGJmMTIgqocoAxgaM0cARSQkNQOk3/zmN2r47f1tmqbbbTWHR1ENTA0PwGmcvr2/T/NMRKVYCPF22zxqGqqlFRvvRaiQkTXPiJDBX69obAagJuu6XW+3LUsusq1ZzJG+wJoJmwMRoUousikIE3FgAENkDhwR3OLNFIgINJiWgsYMRMZkzIQ4retaclFVBAhMq+omZdvWvEaJKeccY5rnpYbnorHbeupCCRxmVxbdLl/t0M3GPe4O53huMR72VLMD3NO1w9LtK3a4Dbux52GnAAAgUINypPpve09v8JBuWi92E4eDfLaCjm2b2B19w2M+QZdoD3Jkvf87RKLvx339P9KK+8ncn0KAg9W3sc0hOfg+NcuNMC71qyooAIoKtLCnzu3GWbXmeK8MABFth4P+rqX7O2ToeHQq+fSeR1PP4yseyfehA9aOA7U98KFHtnRgMIc/nx4HPuEn3S/XafThvcMVGxSmv+lwmWl8ZHx29NMMM1EHUgPACav9RycDm4ugGQJM01RUPIbSw2+8t0xEMdQc0BBTDDHEFKcQYkwxhhinFAIzJY6BOXhZD0RUq2YoExGRkrect7JlMW2hQSApOpCkgtcItIrfjsBIyCwlI6Kpruu6ruuf//LzTz/99E9//BMiGYAhrVs+LfP1uiXGaZ7Aa82XEkKIHABJRSy06O9aU14IOTB74Y8Y4xn5y5dS1DhE5rDMcV23lJacc+CZObju5Pxm2zY1SClu2woAMYbLx4aAnpkqDg1N2MvGSoUARQVD9Y/lSaxYRDyeNUtZt5yrs6Qq90TMoMzkHBpUE1GYOMbgmdgcOIDGyFOkmALHEAMwaQ54mtN5mZkIEOaJT3MkIsk3WlJMQTKXYoxGZIiOog0x8jRNOefbuiEipzjPZzNjDtM0OexPjFFV0XZsqJFqe5CV9nosRNLyrdDl9D29A70aQV+c1jjovt/HPKuj+QGgq+lVRSZEL9dcI9m92ep/8leoIqFZtXI0+tmMyXdBn0iE2Eq2AZjdpTHV9Mgec3iU1muWbd2SPjOPdKDt0zsN3oaN3C9WKj/oHANB8GxNbHoAmllPaTJzAQxrUFTDfugDbxPepmyw5CMiYcVj2hnA945HTmjPhNOx008b6TeMV44f/rti713Ld1rQ0MPHx5+SfrjnJU9f9+NRjKv58Ehr8K9wF0TbF8H3la39dcNqtmEb9M7Uj8oUQwCbAVAVAlOInHMRKVvOPWW0b+MQg0eAphCYQggxMHMIIURijjEQhxAiMxNTl4DQQFDATIqWkvPmuGbXtRQOYZ6m88uZiJjYoOLGEKKrCgioVLONcsmilrecc/7l6z8HTnkr67amlAKxGqZlud4udckSxWhmsK4rTQhgBipSQkhcYzcRUYGBiFDMFY4YKRcPKAIR+/TpjM3DKSoAENOUt6sZmCoh3a4riYQQrtebmRFxKSJmpaIfIwLV0ttoZNVwp+b/hxqK7Y7fXLZSrrdt24qaIRIY1briQOyYFiKgJUWcUppSCpHd2QyIAXRK0zRPHEPgkBMvU8zbxszLFFJKvgzcjuT7eYqRP71u6xZCcIjWECdPCJjnOaYUYrpe11JU1VKcOAZX9hBRRJAIAIkM7mF2HEl0X2Z35Y+AEYFqbkeT59EGJHkiQruX9tDpUHVbQYMVICCvN4zIWisXIoKb+EzF8URqNkDdFHek/lH+g8PGGjf9vr9qp6G5YKtTFu8b3IfQDTIGRne0Bbsh634Xay2ngaPsuDOJgaJVvcrrutQx+Lw1MwaogZv7UXwdIgkUl8z6zHQSB92O1BSoPWMLMIwDa8fdgA+i/d1E7OMcb3ikfdaTGg60+Imi1E4eecPYv65q9ad+wJYO7XyPxI/j/R4nG1uAJgHtuSeDqRHqN/sepzxa+cfv9HTZtaSW5/337UDsnkIDSGZWyDhgKVJKiXlzE33rpDEHYqbAKaZIgbg5ETgxB2QKHCgyAnqhc3AxXms5FHX431xKkZzzeltvZYsxLvMSvewscRExMGRGK05kzQyBfBAl55JLzvL+drlu8u3tnUOUj4tFCKmCCwExhQgtx1VEtixUhTQDQCImIlMFJlVxRcDMMwRcTJ7s82cFulxvKU7uRYgxebIWeOgIszmwOuL1cpmmaZqmy/Viju3j+8ohtVSMghMJVQ90VEREJs9SAgAzEBFRLUVu603BPedIRGCgVgyMAzGhiiHZPE8vyzxPKcZYkd4BUbd5mtI8eUnFIpgL2RJUYZ7jMs9mSoaISExxSr7cXk4nmSYOwR347rMxs3XdmDnGSQy2TT7eL3rCc0ydTCNi4GiiwHcWG4AaDussYXAA9G0CBGieeSQK6MVfGon1pga7Td0nNWqtCchV3jcAIAexcm4KBJ5PTswUBAtAsb5Xal6BE0ryD7F33rnZ/V7uecK7YN54gnonOpF3Tyk+2W5YIUzQzMQxbgHutJpnppSBLDyhV2igcPciD3UaNzi2mIBa6tkFCDQDE/VsiQNBAARUxKZMtaijFh2DzgBGnaW9fhBLG1sZx9+JU3uVY3Nb1z76+PenHqzz408/oN0jKXwki/BMHh/F6kc63tcu3H8h78ZIzcHna2Azh6OvnvG8t6n6g3nYd1FLuGvWz9bCI3d0MBRoC+XeLgTkWTRBa71EQo1B9VShiR2XQNWVyrr5Ws5YrEHrTMgcAjIREbj27ZYfaMZNMi/H0VFacl5LyWrqaNJeDICIBIwQt1Jy3qBW/wAH29nWdb3d3t/fr5fL27dvt9t6WfMf//zn//Zf/cuP9/d5Wk6nRVRC4JSiAdxut3maEWs24+12Y2bRvG1bySVOEyGyWTZLMYoYEQHitm4UYuQoKfz0+VOIEUyYOXCswSiIgMgx4oq18okZAV4u15TSp5dPX7++ATKCB8io53ehQIscqRKqGpjoYCcB8wBQrZllSIzVOiJkEGJAtkDAMaWwfPp0Ps1eMRkDB2+EQDwuEwmBCBBVBQxVNXCYpgkdMI/ZMZeyCiEzc0pxmpeU0ul0Mgx529Z1VYMYk9pGGAjtuq3l/T3EyEwOVAegIuZSQF/dnlRlVgFchwjCpgc0hP6K2NB3ogIxQdvXZoY9ZsYXHbGDT+xbEgyYSESKhwITMJgokELhJiojM0vJWnMGDeo2EVPFVpu+NeiEXhvwR036HWlI331dlMRBUIcmf9tYVxIAWnhoJxGHbY7DDZ0m9I3s1rPe2k6sqmI08gzf4B25mvrrCFFAHOlCsMJmqKsFqoBQdyh4UWpw/mIIBFjNzciEGJ7KuQMXMtgjo3ZCc8czhp8GWlw5Qhv5k/c8yLDH39tUdjK6/zaSzu/0HwdhuV+2h5fe0ejhwzcZZ3jx08/pYTl2H8MzTkhnKo9sYDw8L6M/+3hDt8DCUVIYeDMiIQMhBERmY66BPzE6sbbGs+vNzIAe3E9MHCkgetQyY+NIA0UYJhYAyUN9HGQ2F9nMIMboZmX3FtZ4SSQA9LrhyJS3fL1tl4/L11++rduasxpgCOF2vRDxspwq9lxKMaRYidSac04x5ZwR0UtUGrhLAxzn2QNgy5ZjCIoooqKatxynhYlT5PMyl5KpajpsiGpZrNq4mVkqcB6o6vv72zydX15f5e29yIaIQGhqIsUwQQebUuVa9anGevsicRu9k0VQIUL3DhAaE8aAaQpEeJ7m02l6OS9T4sgceAgY11xNSohIZICIKYTgnogYIxh4fJcqiJqVLGIO+LzeNiTetjyfZg2ab2u5XkrREANydHNEKcXLDEzT7J/bzHIp2DY4IgHUjLBx/+yr6Jmyv69/AEUDMATzcJ/D3qtvATLHB/CBoSERGqGyoQsrVaQ0a76EEdmlvwyhqQK7OYXomJ490ty+repXa7afBhrenrqX4om+66gbt6TP55j3+3jbcY83WmVN95H7uVVV6zHo4C5dMi8xCqoqhlXJJiQDFCtq4hNXqbDj0bnpyszMwsFhjT3bYu/i/vvj9I1X+rnVUYzydSWkjy1AXVh6WE6PdyIEG2oPwAMRRDxcvBPwG7d6wjD2N9yXrjx85qfMxmOIH4f29JEH5vGsE3+DIasfNmi4/qCTc18hGoKqaDVk7GmBnZETkVV8ZHLUz+Zzo87Ge/udeRgREoXAmcgjDxwsLKaYUqr8pBIyT/tsVQERzWzdto+Pj2/f3r69f1SQSzMOiGh/+tMffvu7v3t7+2aihEgUmIPkzYpmKEzB05TMgWigFirIObtMibSZaYoxcHChLouKSJwWQwoMiJEqhCcCExSSUsAdKPWzEwCCGhG/v78BetFNNISalaqoqOMcAnj+A3kKhFWkNmPEyCHG4HqGioBZjDTFOM9pTilEfj0vL+fTaY4hEDNGjuDRGkCEAQxUrZgCYogRrMJvuOijhmpAMRKgrBsRFymqEGPKRa6XNXBCXkspzCGXfL1eQ4lA65QWJNRcVEtfEtgtVBXrtJaTI3Ik58cy113lvcsiqrsGveAENtoKakaDmEXkYaLssZ5mZFAMTE1RiZzdmuugDMA1/wkaadmlziqSd5nGWmUYRDx4Cw9UeKcMuP+KvX4v6tB5oHvrzS4gth192LbYCkm6JNSlqEMfBgLl7qN6Xn1FjhbULEzuFbFR2kZzkR7cJe8wVjRQyGprMIAKSwRmDhqAAOpRQI/y5vHPobvw7Bh/Gmd2vAUb9PzhQdh5xh2A/nhnnXfon/vIwPskPn18/GCHYfafoAn6XZMdVYHGCO3Qcnvpg/IyiP+PA3mqCvSpGGnuobf+38ch4FBLwNoBAAzsOawwuPX8prYJWwwlIrVgj51LtbXdpxoAjJlUiAJxoAZFGT2aaErn09mBaNo0EhIhUhEThZJzKeVyub5/XC+X2/W2Xi6XbdvSNJ2QX19e1vX6+voSA7vYdbteHD9BRT+ubx8fF9PipcoAoEhWlRCCx7qYGRA5SPWUEjGVUkpR40uaTiHNaZqZEwKZmYgSAzMbVotql4J9Lkw0xrRuWYoSorgEAwSMHUy3z6FPGCIwk5lJqbI/MToOK6ARYyBa5rjM0zKnFOJpmV/OyzynZYpMyIRTSp5NYmYIwdBUdd0yEi/nk4o2rk3MLGprFgPiwJglxKBGWy7L6SUm/Mtffn779r6J1LgsJCK6bav73tM0T1N0i0L/3G4w8LCWbvTwMarWyJ/vyECPmnQtYdQOvJ8rtyLf6dz+pBERoSojKhEZUYvZx8PKby89bq7xUFNo+KBm5igUIxEHqDUx/OhyDwBUvaXe19Ldhs31uA0P3RgJyPERD+/BI+VsvwK0yFADABVzKxBYD0RFxB0rr4q27hkGAyzqkd7UQ4YATAEq6i9xm3QIIytrtz6d6Ls/74d0nJTxSzS6dmcjwpr31JuCxtZ/9Dn7e59yEZ8IhCcu+MMQDl/okRyPn2RYsgD3uYXteFL4pdt86s3oH/QJ3b9/Ix6uPx6+GTxvsq9s2DfDvkaJCBC0T5e1clKjxuCxQ+gGSOU79dbcokRAOkyjtkg6DiGmGKekkhgVicMUGx59xRauqPrtpbdtvV6v6/V2u12vt9v1tl6uNxWdp9NpmT99+hRCkFJeX16u16uqXj7eRCTnddu2bd2AkAGIcSuFiHJe13XlwMuygEHO4qs7hZDTrWpCQKIiClHVEGLigQS4gzlKWRGRiEMIkguZMnHO0qtWQUP8V6y88/DtPAsPwFRbqQBzrGbiwAxgpiGEeYrnKc5TnKeUQjqfl/NpTjFMEzMiEYTIhOwo+IQoWoAoBgYKIQQIKEVdrg4hMBJFvF6vqGSIIrqcT7frdrlcvnz51efPP/3yy1clmOdZDT3u3y1d18s1l3I+n17SCZugSswIFTyyyyGPqx1rIRd3xiDVoNiH5Tpks2NDqbrnAYSIQPuLmsNW/XYwUq0/tC3fEqq6CdSq5u2f4bjH7a40o7/oewysLW8FwL5VfWM1/lTXv9XivfuoO00b6UZf890ZcHi73fMAuxevcZT0HfHBh9NvQWysC1pustlADUzBw8DbNFQ22makvi7sXTHbRf0qDnsUC8LdUrDD+ZPP/1xM7oy2fz4/q5/vBx9maPUYZnu84Z4Y9/sfezhev78Nn3+SOiNo2BTPelAP3ukdc1H6IC98b/H1HQGwr7ORJ9w/iLDHx1U+2sTt+oFNrYfW3Vn07sth17fUAE9vursyxrchOSQwots6DNg0ACQwLaUwqqSgBk5DHTHUzERHp5zuAAAgAElEQVSKimrzb6vK9Xq9XK632+1yWa/XLefCHKbEp9eX5TSFwJ8+veZtOy+LPwsCW8kfHx9mtuX8fvlIzDGylpKmCU0v75ctbz99+bIsMwCIlFIUIakhE8cYDEgMFLOKABhijGFit2aoIQITE5ICICEx11rwiBxD3opry6KCSEho4nV9B88ZAqBDD/ge9YI4GRCYOcYwJcdqw2Wel2VaUgyB5pSWeT5N03KaYiAmCIy1eI8hkqEYAqgimIYQOEQm4hCELOeiZgAUQmKiXMq2ZQAsIsno8+fPHx+Xt/f3z59+ijH+6eef9eP28ukFALZtCynmnEMIKvL+9j5Py3n5REglFxRhIkLyWHsV3eveWM1cg6YfO/WvqxqNnsOtOwwXPCf9j5uRCJo5HohACQANCdyHaaCGag0r3+6+wFFobdIQ1+0MXdGpu6RtdjMwA0JHv/dmtcGXDqlbgC7CGYACwkCF+napkl6TdNXz4R9Ez+GvnZH0Xu8bERp7G0J7Gs216lJRceXVeQMyu5MKAIhJ6x6HGvHhAbs9fNWzQQ0DDsQL6lxUiLFKeu5xbrtuOHT7SODGE6te0N2f0RS+O2NRp3iPi2N4U42xG648MaGMj/Q2n14/dPXAWh7lBexdv19vTW/c4+Tq34ML6LA9Ht3XhkaVR1eYsR4dfM+NKpdu+hOYVXhOgyqyWi1OZQS1jHs1n/ET5ay1hjiMqw0fEAGYzQy1YIOaQjQ0RYwAGrcoGhQiihkSBe7BGB4eR2BFi0ouW75dbteP9bZuH5f1el1FLXJYTtNpTjFNMSbmwMiqOqf07eubBn57v10uWy5ioCLwkTNc8mlO2/uaAiOly+UC+AZAHMMyBzMgr1WJqCIhsomYQJELWQlhgnkhCi6xmYGKMUfmuEnBQMaBESgLGiITagEP80EE9iIlAIrun6wzRWgt4MsMiDDGqQhOUxLJltHJ98vLyRG2AXCe59MclmWZpshEDGhWy/mJCoAhmxYl8soNxoSBMARmQqCgamIEamZlnuecpZQMhtf1Np1OP/36y8fH+/vtLaV0/nS+fVzW9bbMCyKW2wZmUgoFTjGCKpSCIsQBiSoVJAAHADUoRaUUaGGgMKi2iK2oLQNkISIjVHfkuvEHgQgY61odKCb1sBw06PGgqmpVAAmCYGTE6v4PrFA9JmK5qHosmmYwc0M9tJRhquCjTTCuGxARTR2t1bjLWW3XghGa7db/VqKriYMAANUG6KzBnUhdenU1BQFUDRHAFAMDWFGhpjC1/YstL0LMzDMImwEH2xT5ZiezAogEKKawV1Ry8mMKZqTVW62uAqAqIAWHa3KmjIYISO5NUCCoIVgE7M6CcCBzd6Gnd/x8v3YgH4+E8nCMv/7gqfGn8eKjMP70qacC+//PP5++tEPs7Wu63vzdSsguH/beHgMJAMH9VeiZONVb8+iu7g0++y41FKLPBgy6yY+Pp6x3EPG8S4bYHaJtV5mFlJJmJDABAvciVAsGVrohpZRty5fLpZSy5fX9/f3j40NKiTEE5pTSvMyvLy/zlIhomef3j/dpmi7rLb+Xdd3ePy7f3j5ipHmOWsQtVqZyu20hhJfX07recs7TNDGn02khxBAYEUsuAMgRb+sqqiqwrlvaVubY+lnRxJgZAFWsak2EDvtgXa4DgC5/eNGOxi6rycJrrhEzMxgBIoAQwZRqaOY8TyFEr9M5L/N5mWKMzBA4YJXbwMxBu9jpHRIyhpyzqCIFpJBCVCy1QLrnDyFN07RtpYgQ4uVy+dWvfrWcXi7Xj1zKPM1kcLlcEMirYNZDYFmYkLaSJ8nBmCq5cDpYQ8aw0i+VQ53Fe3nLEfwA94Xthv+2Ju+MJNZiY4iIiWzQGKBp2Og+ZNqlN+xhVy7VqsIQSA0A7iK1x01h1oqi7sLV+CvAHRoENiAewPv2RyIGCApALWsAERFEFQ0QyYBceIdmxrmnYKZq6PAW1q1bjSG2efDXdAylu/FURyhQTSBzFjh8jt4UDoYxR9n2PD8zZAMlUw13LcPxXW383yW+oyj92MJB/h2PMSjlEJ71A3L/g96O1w8845G0jT8d+JNXS+i3Ddy7P3/X4Khc9LP2RWCXboZaS48HottDqfet238Og6WWEgVDgKYzod1DtF+0cWU8TlRtBweBaHivd8MtsUTksgojIwBBcBUkpRlMMaMFc4NhNXqaCph5uflcSikiaqrbum7bFmNMMSLClPjl5eXT68v5dHIjQVG53W5bzkXsD3/8Y5qW67r981/+8vnTOQUiTwlFnOc5b6uZnM8uWccY4zTPaZpTTCFQjBGRti2bGX583NaMTDlv67rGMCEyANS6hiIK5oKdqnqGsCswbhXw0H6ozNlK2TNF2kExxkDIwbPSgGoaBqlOHMgDk3xhTGlalmWeUkzB3KEC4jXTATrsvoQ4qSqUooaiKGJshByCObYmI+FtvZZiKc0cNxYDgG3bvn77tizLMp8uHx+AXpQmrOtm6uqpcWC3+Hu4Fph5njQREYeB7ru/BwGIIYipqfry2zcvIjgAnOlO1ltoQD/G6erLrxG+PWnGyV8/wKrXCgYTDQD41ceN5JaiZ4KgK6IINZDGdqjOB4was9320+w5Y2tgBuJp/IpYcxrBeiI0AQLV4B1fQFWS615PT5nq226UvaxvTDNzEz+ix+7vIzYzo2rg7cDKiKTNvtMaMeuhU/vmNqcxqgpqBA0K4qmQ3pjnUbAdvh+M/46T/njl8UWPbzzc3M8f6ddfvTKutvHXH9zWlg522g33WNmPD3bpwNfZ0Ljb0nxyYGAl0CitHZobAP/qJXq2c+A+MH+/4kut3VX5FnTMrTqgB/kIerd35nU/tS0OmrBinxGxARIIMGuMEWFCrEKi5OJSkKF5uRUmAtC8FVXNRbZtI8QQgpmGwC/n0+vrS0ppnieHKnq/vWeR28fH169fc7Fbvqzrtm758nF9WabXl1eVTIgv5zO+nj4+3mMMr68vKYWXlxelwBymeQ7MDocwzbIVMWLkq4gJkpqKZNXo41SzIlKjVAldeXaajiht5qmm5BzQwJqoWyNzQoiBQwymHsMKKQUEJcSYIljNSFqWJaUUI7st3ncZYlAtvh+ZWRWVORchQDYTsct1pTiTWvVzxwimrGKoKvrTT1/e3t4/Ltd12xxaOc1TnKd1zWiGCCKymUzzJFrM4Dy/THPiQOzBv8zEobIHRgAE2tMaShEmslI6ie9EtgooDpLUXL3jEh15gA1H5SK8L/lBikXwupieua7a7E5uGd3Zw2EXNAnIN+zold3LNlgzlmJlKvfGDbfRO/T+XvwN+v5o7KDf5MmBVfMwAFRTU6rYjgAAbr4hGNCH2tZ0ouEReTWJYVcCHHHLWSwNJNxc+wYAT35uJKhvbZ8cqNS/wVYQGtaCPS5FGaIR0vNEsH1Kd4I1ztEdAENv4SnFxFYscLznrx7j8uqPPLGb3yk+R4p/IPo/eHW/4YHhP+mGU4Gu7ZpZ01W5EVMctUbcsbWt6cTQcvz6W6h6AEa70Hf6PPLXsVc4kCfsWWy1XFifirumDu081VFwx7CkGo6NaKhkgdlSMkJTKZ6PUgUTUDMBdfAEAbVtzZePi8f2EAGhhZReX86fPr2+nJfAwQxijA6h/PPPP1+v23XLRfX9cjW1GNPH5SLyOs/TPL0AwJeffqKA5/Op5JzmEFM6nU4heZWY4EjXEMI0LbTlNC3E79fr1YgBHcJOiAgRVISQPBe2wlfU9OaSS7Fa7Bp9HtCFPKqR+EABkf2FqQKpMjOD6hyDak25QjREEilev2yeZ0TkEHLOYEYe+khUxPpuImLEKGJZYd0k54wsSmEuNk0zBSXREGMIBlCUHNUjxDS9vb2t6/rnrz+f8mk5nZjidrtu2yZSVE2vyszWqjbO0+xlHgARwBSMsEdMFl8+RIRoIsXPXWfCe0SgukgG/OfDcu08EoaNadUW1PcRgNOxXoRTzc10LfzUn6x6Ge4tYFvZXXKtss4hyanuk7byezfub6DxtnYzjJYZZKexYGAE5FzFXExoL7UxZJEIqEWVDmJif7WqtmBC24Xq5pOo3XQ9gqDKlXsPrS5Nq3xJAZR056we5FdjbdRjqzw4WVXvGMDYs/3T1lo8u1oAAw0aSfBhVGOz41w/PtJbG+WFw7P9vDfy+NP4HFTKS50oH5aCD7Da0e5s639N/8C764jtg4yQTvfke6Sz9b1dRAKsjlo69hDbyGoj4xKs0sPdJLe1sg8Q9g5VHbPvtOHq/sW62HI/Wv/NTd/NOAAArGSMyijMHFRNpCBirVphYKalbGBa8iZFb7dt2zanEQbITKfz6XRaQoxTiN4NVb3d1vf3y2VdwfByu0qRGGNMiUlzzufzaUrRRdrXz5/nedq2dV4SIM7zPJ3OqiaqFKLVpUshTgo2zaWoZFEDMPD95p9AfYAqKqrIgVTGDycN7QAa1SOXaZA65D2iF2d0ID0mMDRudhIBACSQAp7Y7KnIoiIl+zLxSFFqgfZizhIwi4raum3rtqWE67qpQBFLSSyhAIYQS1FmJKI4pRcmx3zNm4jI+9vbFM9OB4sUAovRyzqEGCMTu9EspkRuj2/Bu6pSpEK93qWP1DmBrk36riOPNsZK63/MA3CftKMuXrd2Yys7FTJr9pNWVcqsl6+x6vjdK1hV20gPPe+7ANFqoBFAi6U+dEN7ofb7/btTMKyATwSIBGYmLVOhdg7Ri171HGN/sJloBpn1zgLV37X3BSoP0KofVfBQ3/kDyqehr5/uhIempLgdAhFJq0OwEsNKG/DoBIaBq+/eyHupEx4kx8djnLtRH+yMEVsA8oEPPyPreHh8n8Ejb7i7uXW+t3Fop63gOl1POv/YN/9Pp831epuV4cFdWen7xCcWa3RNl01a6j8e33vskrWZ73ceda+7WNpnjVTQocNl+GtfE5r0ZR6eroqubhMyqTJTjKwKYKYCakgGBqVIyZtIye6jLEWqLk9ecnKeEhEyAKCC0la2bds+Pj6u6/r+9r5ueV3XlGanG6fzOcagoHFKL6dlXdfr7frb3/1GiuSSQ2RkAsQQQyJm5iKGISgAMBDg6eWsYLSuaggANZARDaDWnxIvSl+/LGFFzBX/QtqqESChKRAResgrMSK2+u9ITCEEbnB0zgBMzVDQHISfWwCSYPXfVUerb2BErFIkgZmu2yoit9tGFINYRskfH0VEAahs5/NLCMFrPIQYw8Tn8+l2vSFiKSXEWEoxLQAwTwkRvH7ycjq9vLy8vLymdnAISGDmBc5QFUWKVviorr4bOC5b3SwVo4WIUHft85Gst8WDnQ30P3FfWvdbvT7Y9hCCW4xqL2p3TDukVaVaYN1XDEBtO3aioS6yN5W4ashNTf+hKaS/07CBbqLDFQ0QbNqYoHcO0OXsyqJcPqNuQtq9EU4ka/vwaKdt7mBo9h27K6pInbHslMk1iIb+hm3mmhqFoKaKTA0MzmwkXkeqerjeFatH8t1bG0/GG0bC3ZdCt/F1wflA9/FOQodDU53CdnXyMKjeWu8tVklfx8bGsRyGdresCe94N4JXiIJ9GdUPraZ+tessg4PFGxhYbJW+/Ql2Jt964nO4s8/Wkb0oOfrXvndqmdE4cBjw6fotiNpbPsz2KAcAAEi90QsF9v0cOApJoQJOVQlM3AQkYIbmInMF8lWzFMM8T9PshSZjCAQAW86X6+V2u61rVrWcy+12yznP08SEn15Ory/LMjEAhJTOLy/TNF2vFxGlEAiBGkaCmaWUwKHEVD2xVlVjjCklh8VzmVLVwGEHmBCYiBTH9YD7rA7yAQKGwGYGLjOHSCERR2JmDuwoZzX9wrGdQVFU3UiTiNhFbBX1nYkIBqomlRO0coVSCiKUvPm4SikuW/u0IFGK6fpxWZaFiUQKEcYYXz+9btsGb3a9rRiMAiJyKZuqTVNaliWEcFqWl5fXT/8vae/eJElu3An6A0BEZr26ZyhSlNnKTnb3h26//1e5s11bHW3X7jQiOeLMdHdVZkQA7n5/OIBAPqpmtBtG9mRlRiDw9OfP3Z+fD8cHjlOMiVrYiogiB4PNzBxkrq4y+X67jDYfT6iNgu3lIe3HfNQAxmm+2I993ismwl/nhqFbXcQ8lNE3q6rZIASZE8dq+wbtBvz6LMAF4boj+vRT0CkM1BNWX2jWSLo10RsB0THChgakbhyz/iKt6J3dYN88A9X0ZLWbBgC1sNileagCVgkJQayhvP0/iJVZVH8JeeOeIhfr4YQuRppCuLtmlwT9anYuJPrbWRu/H6nJOKf9pw/aGV2dvZ2LPWfX+/JSNxgW7FJg7+ZLxAv00b2x32xQqALE2HPicfYAXA00Q9gjdZug3SnxjeVtfxc1/oTj3GCNnIAeNId79Te8Ev8BoMlrOK7gjf8YGhphGN+VFtVPFDm2DBFrKLmn6TULbkEwwoKGakhYE1mZipRSSi455yJZgCAE9gRBKaXHx6OWvG15y9vpdNrWbc2bS0xMlA1ECqC+PD1/enma53B8eEgpIdOnp8/0LZzXPM8MQFvOx+ORiXPJuZQ0zRFp2za3t2zb6oxhXc+u+BtoTz/lbgPVKGUbZ4GIyIu2qBGx43iBlJHV1OtQek4kYq7BwG4MqrOsaCziimlxFUHN1EBERYrvzcqMVB080230qlLKxpEhlxRZRJAwEIfA27Zt6/lwmLVI3rb5kNZNc95cB/30+UVBASDn8u389TCllELOWyklpfD09Pz8/DzPcwgxxkQxUUthhIRAakWNojFA8MpxLV5JaskXGMoTUeMOo0R0K+3hYBrqPMC5AVzSCqoaEQ22/bYNrYWBVdIPAGBgY6YddMnKiXvHwLm/oBPHj6/h/F8Rh53gtLA3MaGWw7Ga2r2eQU1Wbg6uHc+Xmcl9wPoFCap0G8yMDEy9wnKdbEN0gGmH71WuhI3GmDbggnnqdRc0CQDBQA2o5rwb4gBGTnBJ9ZwS9fHTrq0MMvVdo9DVl7eKwhWt6SLnXfZwRYvpDrAScdhzV1N89fjdL+9+c/3lDQOAakkcSHVLHNJHWr+/N1FYhfcu+/dfL3ropH/UInqfhrHcVcTfZdVwoQa9c39ftQoS74tItcA3uXxLKgRVERbVoiallHVd12V5e3tzD3BgDoFjCodpPhzmeZoW1VLOOedt2759e319e8tbYebj8ZASp5Ae5+n56Tgf0uE4Pz4+AkApaoAvLy/LuopZCDGFw+vp/PnpITDnnEOcUuKQkhQllwvN5pj0+IC4blvxE0wIpWWq6BhcolpVAQDNUK0QkULLqwIMAF4zB5EBGYla/rvKEogDmKmhmqgLY8hmIGKOqVcVqOCfiw1MRK4mubOEiVIMOZQDHbecESymwCmEhfO2WpEQ5iJbKRSYAERNwHCe5s8vn+Zpfv32Wr6+vb6+xhiOxzkGRsQY48PDw3x4SGlijl4Twle3iszNx1vzuZZSPACikpheTke5SdSIZABiFbNzq03eHr2WgZaBdqQ6EakBkhEQEytSrQ3gMrZWm9pwgupZ0Ms8z/1ScBQMejWN90/BfqZ0oFH9p1sRtpFp/8kAANVhfwpm4HZNBEXgAZnarFPQ8zwPp3tAjhqCoUKxnYgYGHgmkiHX2zWI1aw6x6FbkxBcBa+3ms8JmAe/jfUAsF23VHVnAK5LYf22PznMxXX24LG1flt/0ahbwSUDGAfWpYCr0V49O4gMOyfoLYytwfvXiBxAvDDINZlklNYBEbs43zvpBeo6V9vZZG+quVt30emS4t+Ot8tB++hs347QDHx9Bu6O7up7u1CL37u/nY3RElLBMECEhrUCuFNQ19cd51dK2ZZ1OS/rshWP/AqBiGKIj4+PIbIXGSeibdtKkWVZS85mNk1zCCxSQOXheHx8eOAUwpRijKrCIZyX9fnpaZpoWRYDTNN8Oi/Lcp4Ph3VZ120lphiTlDWXEgKbKgKkFNWQsDgM2kDcWa2t2gkzq7CQBWYEUsPgOo+Aqvh4AQAIiRiJiUMMkYiHzGdIrlmamqGquuUMETugRdUQBDy/xCD6+Lo5YKOUMs8JEVI6nJd1Eo0xpRSLCD8czqfz6Xx6ekgUeF3XaU4ISExmdj6f5/lArqGEmNfDz7/8dD6f4+MDYlV3pmmapoMPwSFY1SzlBUYqDgfAiDkoiNVke9DjgWuZnErdwAsTVgI91AIbw32u9lXFBZHb4v0N4D3UuqUuTnc3AfX92ssVoHfsNsmEjUp3e/U7W33kLfsJbBRgpPv+3irOqfVyd347EalCd5YoGCqo8wfcWwUA9/c3R2R7lZHvyCq7V1NVTdPUiWmvRdTMRTul8o26Y0gGK6ZBhXCaGQIZtKLwMFINwnHOquIAXRK8Y0TAGjtzJbFeA56o4TjHgPL2FF1tkntMaJdVRynDv2m62AXpv6WDt6zland29rOL7e0Ju2m2zxgiXs0Mvm/78gH1sdv9ib3dqN1TAtBZcTX21Zb7S++KXfc6s7/o9pRCkyNq4zU7iKrWWRIRRGLmQsw0FUbVoqYMgJY1SylyWrevp/NpecslHx7mw+FwbBeoKSioBUIVeHs9icgUoqo+P84xRjNLKT4+Pj59ejkeDszMMRQRMWJOS9Zpno4h/e2nH6eYnh8eX3/5kREjYDmvS7bH5xCRsmkpGgIJAIWJCsSZADFvogqIh20lYQiG7hXAeGAsxtlKSYGQIWdjVTA2AyLKXp8FkZhj4BiRyFLiEMjDDxARSCGj59czKQaQ84qIOWvOWwiBkcGCuAHOzQJmpp6UlGI8btsJkZ+fX9Z1TTGczmsIGNEAkTnyAb+9fTsFfY6PZra8nqfpABvM00EiiIjDew6PT8vpHFN4e3srJhRiSAljBA6UZg7REy4bmKiiqImqF/qUIiYCoqCoAIZmaKrQqm8CoFRqr378RcV9yIAoUC1arvIURNyRo4IYekgBGDcyxxWhZoJAoMAclQoyGZMFZokZt2LuxYdi4ozH9TMAJE+KsIMpHLSL/Rh7ffmeT+by9IEjg4HIRj2gE4HhSABWWZMJvIK1V+UlIgQUE67pc0BMCYFcF6gCHPbTjRhGWa2Jx4KISOACvKGJmUIGQCAAQjUFQmAwAwNUVQGv9FSZpB9SKUpVzkTPZWrub/BRV9XCLhhAJyIdZ9IkRF/Wa1rZpwQR7XI24RK2P77FRa2xtc4D+jyP1Kp3rPN/G26giyjiXYgeqeEdJnd3GJc37x2+jND6VaZy93u8VK3GiWpf7qzlbk+sF4S+fPAuS7vu0kdS/tjaOyt88UMFwrczjF6Zqkm/hIRa1Mwc/7Ou67ZtqjLN6Xg8Pjw8PD4+Hh+OIQTT4iXRPac/gCdQi0W2nPPhcHh6enp8eooxpMN8OB5NDYlinETEDRTLsoQQUO3r16+fv/sMCG73N9MMti7n+XAQKWIqYjFG4pCSbTmLmhdXEYMQoomIQ87VCJQDO4jEbQv+LlMQEVOb44SNolVIJbN/TikRJwQx8bSd0EDtamallG0rrhjNKSH1RMcVABpjlWBCYLPkr56mWdREQUWJeI6hFGmlYCSXwi2qOUYChCklAcTAKUYxOExzSmGappy3GFMMiZnZQyVCQDV0q13LxWa7ZG89xVtfeLvIvl5/9s1FWL3oeCMh9RkwM1UpRWKkqmVU83Sv+9TlLWexxBpCkFBCrm5hMiBPpLkjRZ0sXO5dM0M0ajgIdwCMCvQo1EONpryjrXSHxxWxBgAxqAVBG7VRG+cHoYX/wSD1jhaO9r5bYwaKKgIh1cwEatpixVqd+Lo8ngrIbJ8M8OEggGnLD2A+/opMYawH9sIJXHvgFQCb+uALepck7RuDL3Aj+w/36F3v5eUu+RXh/YJhXLZ51U6fyrvt3On9paGmv2v/c4T83JhQ7hPcm+vu/Nx246pLV2180DJcz8Nl47+FA/iNv+YlG9mq7yEz8oKSSDUfkkhRERHZ8pbL5hiVw+GQUpymeDge5nmKMahiXhcV2bZc8kpogApY7bmPT0/Pnz49Pj4iIgdO0+wCJjOvefMcpstyLnlT1dPpLUU2gTWvFFhVOYRtW76j7xEwOMQeDQhCCKIGoMisaiZGrByimhARE0tNmcAYsOFWsVl6AQyVGFpIZU0rROjBCiHEKhCaIQIiGRQzUy2llG3bchYRAYNADIDEAMCqrotgzoXZpmlCNIDobwlxElGisG4bEMWYQrB1XTmzkcMOEZscjQhewhPQEC3FOYXIjCGEUsrxeAwpBk7U3Pi1bMuFRFw3DKKPAMFQWy6gXd6qtI8IVBsA2n/tIY37ARzqiJmBqoi4Bok1o8LFAfc/AWpNBXaUbYgxxCgqRXIxU1EHX5iTtlp17FL3pQsDLDTZcTy2XZTxXzsFvaV4/ZveVcN6XsYzOLCQ6njWBvGxwWMxHCiAy8SRw+sGCKgPzbrd10xRzQzVzGDYoVfXOBwP9Ou8UFVDg07CJZXo7GiXqe8Sr85d2yq+R+Js+OBbwkb7Q+MB71J/ABiMJGY23ln/dDbqakL7sJtHrvpx3b9G+ujCz7nf/x6J79uo3/PeRI178aa130igr65r/+0YCdGaxfd6/j9/tbnCRiX8cj+oIqqpask5r+u6rZuZzPM8zynGMM/Tw8NhmpIzgLLhtm1FNpGCCIykzC77P7+8PD09Ho5HpkCBA3MN6UGeiMVkjkn17fX1FVSk5B/+9YeJUUTnw2RgzIyEXxkfHp9CmFy+kyIUQuCIqERBRDQ5LB/MVMWCgRegV6vJkDUEBPQCyIgMCIaxjtoHzETEKaXAARyBgiauBJlBSwK4rWspJWfJOccYcy5mhgU5GKI5rBYJGRkQpThGCBVoTknFQkw18oAjEaV5wkBFZZ6mwEFFzYwphBgp1LgEEYkxEFJMIaW05TzP8/HhKc1TTImJoZ35TtkRkYmhZas1MwRUM1REZh+OtU3msi4MW86aUbr/2TOz74cAACAASURBVMkr3tYTdpLVBDbVq+1EQEjGRFrjujVyYC5cOJCwkftvqgZyV3s1hY6lrjv3nWwC1qOHL0lEP91XIizsVIygOSdqEvjKEwA9uSru8LxO97s2UHs1sqg+S4hmIJ6oqhodoW5Vn1cwQ3cGQM1W1XiJI0HdWuZ5hQWAHUmF5Om8xBTEAhIPSEno+gjckLxbHjBS/4G0we01srX60PtGamycwa5/5f7dbiyChpX1e/CiqauWb3/ae1UxedDtPeN4P6ahfc0uufd1B96j/r+RQDcJ5t0H+w3vNfgeF7/b5/cuRKym3joW8sxnSOAQGEAgYgcwiBSrpWmIiNIU5uM8z1NKMYSwrQIAa148WzgzH458QJym+en5+Xg8Hh8ep3kmYjMjJlMTETerbGvWIiWX8+lkWsD07dvrCkBM05xCJNWMwMv5LYRYoqY4qZL1xJs1jomYAygIC3MKEQCRpIiIKjvqxIsBODEkZCRSBWgjQnTof0DGGqcPhkCKZiLiepAUaKKfFK/CCDkXVfWwulrQFSDEiCgcStaCxswEaoiMwdiAciEOIUVEmvgQ07SUNbiZRMwjttwD79mdETnnHFNEpjRPIaVpmqc0eWUYwItwFqf4buxCh9+QAYCggigxgYF2M421QthWhY4q27ZNNAL5sOoYvfB63UVd5gMARM+/Cq6VEKIRKQT0SgwODAVAZEJiDsrBzCcT3bqBSkgXNAOx4kCbYRyvhDC4PLnW7OPjWdiF/YsH97c4JIbGRvxm8qTRbnVHaUrAe0evd69/Y91Ygu7idh2ltm+tHwZgquoVSw3MTDz4fHddGmAFqrTOWb8hjK52v/09SfyuAOs0t/lZqkv6lpRcPVhVmrH9CyJdNVC4JqPQd8wocTvj8n12K0n/FvK639M1C6ib1HPFXlBtuy+vD3LB/Z/GP+mdHHMf9Pn29g/m+bKdrln/r1L/3mwfbFXVKzWsUCDVagMqomKKRIzAAY/H4/FwCIGJSFXWvOXiToI8zVO0NE1TSgmJD8fjd999f3x8RKSSS1YJFFVFSmGmnPO6rd+WxVQcFO+IjPO6xcgEkGJ4Oy1EpCZ+NBBYvUClFwqvE0IIDIABDACJOUgskqXyAPWxeNyDV8QBAEHGimEnIEIO7jJjDoCQt800u0DqNYsdZeSQGBEFBJGiBqJiYCwMYNUDCShFAIiYVIEYN9Wk4nWMAfG8LBxiShERHx6fnuPnnDMaiJRlWR1hiu6TNI2RFdjXlohjiofDIcQpxhRCJK7aP6gRm0q1vBGROgoHDNEISbFKbYQkVrqDsIPPPcaqmHaUSrf4IxGIMBG0AIKWGLcqBzQkynUSjAiA7KHyNXGHeU7ySLghMxZE8EW0HuIErqmg1VSEAylwrkXYaeee8sQViEamxx2+Hzdr1+WZ6jTBzMCImUBqar8a+oXNqe2fxQx1cBhApTb1KDV2YbVPHrdcJexm24Lq+9jLzoBXjHBPgGpNIDQefAVlqKoJIZqR5/xHE8ABBTQObyS1Vz/BPfI0erY9+GKcprGJnfE2W037qZFVbHY1rP24S5tclRvZvSPk224YePtI9QYT4V0ii+gJby6YEeoFya+Czw3b2D9fzt0dkR/umow+Et6b5nSBuIWbRRmb9YkchX7rRX5uGr/bz7vXVYKtKt+5edFJAxEROy4CEb22lKnQ4ThNU0rBzHLe3J2Yc2HmNE3zgZmDm9GReZ7m+XBgDmBIDGXdSj6BGTOXUt7e3r5++2oi5/PrcjoFRi05hmBmpYg2p4QhxJCwJlhWMBQR5C6cohkiIUP0NfW5YybRWEoWEQNgiIhkgADEISKAZ+Q0JEfkMMd9tgFMNWeQImqiUqyWkLJSCoARUpGsooigWUUKEsYYACAXC5EN7LRtzHQ8PHBMIrItmQMX9dhhyKXEZGmekZg5TumwbStkignWbQGzLWcDOIQjGBI70gaYQ4gxpXmaD9N0QGIE9KATQ0NFhOLZnomo5aRANEI0Zl9HAUJiVhHwnMaVOoHUSgk1gOBiNw56a1cC/IPDhKALWk116CWnzAyQoDpgzCopJ0SiGEkzAKkKgHJIdWcCEGlnQohe6hLbN0bMaqrSnK6IXj3F97KZtQLxF0eji+eVq+1Ao2YjAsjiSUCpE+BORNtTDTzUBfy6BWvLjqFoh9HFWvOoAGhOYLcMqZqpCYKZiYobjLSYQne0NMOcNaqLIOqhBNIFNVO9ZgDj9VvIwXuPjB+uCNYdgti6WOf+7q83XbrTvVvOdI957OLG3c5fWlrsQ6L8Xk9uCPH1z7e87ePZRsT3UEBXb7xqc1Rp4J0J+Q9dV4PyU4RIUEHtlTD4MQkciEjUyCDFGGPsPMztIarGHA+HI4XIHFJKzBER0zyZkWfdiUxo+OXLV0SbpomI3t5O//bDv81zkrx9+/rl8XhEAsgSmDnQlgtRjGk2MzFgDgaEzISUa9p6AGBAqDGZjAzRwBQMCiAwgoaYVLWoGNRSxIbIFAAMkAHdhOIg2OjVtEBVS8k5I4KnUjM11yGrvKbmOcEATFREZV1WZg4ckckAPfsOB1Ihm4yJTU1AwbxQsAKSy9YpJTUQgxTiHIKdT66O5JytuM4hzJGIYwyu3KRpilPqxb8QRz8kIjKxAgCKuOfVvyfTojVnkoERIhCJmQuojdJdu1Wta+RO7HoEV62Ru1PVRuuv96e1rJ9ioGCq6IZtRASHmnEgyq6vVF89AFcMPvSMxdbW20FqxTP9IbjL2IMuxnDisTjM7YHqfYNOsrHBKwEEKmReTB0fStfErFbc2psdMUhQy0b5PNQaUYhqJioGYGgg5uBpA9PWtJiHADZz2MADPPOiI718sB6rhqqOTQq2RyTtQf+/nRDsk/K+ZH1FgN4TvccPtxziV9++f244tvsGj4Znfe8V1++65AdQ1cMr7+ututOzTbzDk96xI314XfPRy062/zSFdj9LH4r2fbv8R3szXNVMi0CI7C5TUwXPk8ARACqWA71aCJihiJZSgHCapoeHY5gePJcWM8U0TWkCADNcltW7dz6ftuXMIaRpUrXT6fz1yy/Pz8+I/Jcff3x+fOTA3788zodJAQx5nh8VQNXEMHEMMSCxiWCtXAiIkRBFalFz4uCU0kyxiXUsYgDgiB0kJM+cTEjkwml1fXgCH5XsfmM3IYmoCVT3ZJf5AL1AEIhIXtfVwx1cFsxZzsuZmA6Hectlmk0RrahZVrUti1kJIappVgkhMdC6rsyc0oyYrUbqspmpYinGVELgGGOaZuKAGBA7cogQUa3VukFEZKRqnFSvVGMGhgQolwcToQEuPJmRQP9pRNT4nlNV7HCdS5kaEfEq6tP6Lvaf3NRDVtOwEtS6PaZqAqiAYoqVASCBAqIqVLd+zZeFvugANUrgNiCpH5PeRWdNdu9oEO1BS1cn0QANiay6yhWAbsDf48HsLfSgud64uUShUtUej04wM0+z7h7uZlhzr69c0SXfcy0i2OkiGbgt1CdnrwjmEs0FzuY/Qhf6/vj4kfeo/8dE6vbZ+0zFh9hcVXd6cqmX3CX6N93wwzms8Y1B/4M+X7D3i41yYUf6VSUDLqu7+yN7lp5R6h/+vfx0s1kvN+JvvK5UuvoO5Irn6/UxkQ2QOcQppTSFJlT6T4giIiUXIk7MTHx8eDR3MxowshkgUs7569cvpqZq27L97ae/HY/Hg8jpfAazUvSXX36JkUOIy7Y9T8/Tw3w8PhJxLhpjmueDAWQxUmWAOUVUVDH1kFcjA69uU72EDueXhrhGQGRu9RQQKboLwTAQIpArAQSIYIKq9fh5XZtSVMSNHV3BZw4WtJTiNVtawh/MopFBRIvYsq5mxsy5yJYFwESkFDQAVTufz8jEKc2bBK5u060UJjYEigE9/5kqurNUNecMSDGZ1xpDola6xVk11Pq01VaD7v5lZl/jPftCG0vfzAaALenxNbm42dDd+DOqAy6vwk4fx5TmuB8QA7BmEAHbJbsLadfTUTS1pLInVfBc+Yi16Ar2zsBwPJ0ZjRu7DvBS+utMrr7U9qb2y2rFR3PSe3XwB/PD+K4BJmt9otycqjXQzHWWJke4DqYeFmdqNTq5z20V/a3pO1pJjpdrNQIPLwjWbQS7AWTvK9yjDiPDvB58vQGuiOZdQfgDsvth4+/zCYQu99/vvNWIuL7efTe/3ywC3DcZvd+fd5SJe1d/dRcHbm/w/14pIv+L1/+EyI/NqDl2eGzNav4t69HkRDzF+TAXJpjnmdzkagBQo4inGAJRNaSEoCLLsqw5Uykicj6fv/z8jQjXdf365cvy9mqmonp6O3kO5JxzKfLd5+d1WR6enl5eXub5gUNQxRAn4jjNcylFTHPO0zSHEIw9RWgRUzUg5jrvhGhk4Okbwao2jjVjILEbgjx9AhAhMfQgeQhQtpJzKWXb1nXbRIqpjoWgQwiAQO7bVF3LuiyLqKjasq7gM0MIRlJKKZJzXtcFEUUkixISEW5S9PUtxjmGE8e4FavkUVcOwRkPABAyAhEyxZqXQjx1ACE6QMs1VKhWezGr+TA8NrttRRdLnT4N2wARkZiQUATAjFsyUQCoeSCGHULE4AgoEUQsRYiEiFW1VPNaD+P3XdTf5P+vaCsgYiKlQMCKGZHb6tS8g9Bh0fWAA6Ag1moBIKKea++G8pg1E5DV997KRrd7Hlp3r6nHZcSS3RBDaFrg+AqfnJHOWCP1WjVWU/J8hNZx/OIEjUDNwFeu8TB/nAANXUhBDxv2tNRWz7MG6pxWrSOCPpANR9m5f8A9/X236F2qe3BNvpyxW6Mo47yPTNKXsTbyITl1hnxFT69HgRUv1Bu/Irv7AgysH7ss0rTDqzcPo3LdsWNJrU3IkLfTJ3B47B1a3IURa51sLbRzYg393Bn4LR9to7vWY8br7lPQ5lzvJf8Yn7baJ0WsMAwgEsOa7CZgTIzg6ZMJQImwFGX23PkBAThwyWViliJSyrKcU0w559PpdF5eVe3t7fV8Povi+XQ2BTBlgm1dHx+Pb28nMP306ZkR5uOje5KJogEhBwOcj8fz6U1Fct4STQrEkVjBvLSvR3ICIrFLTG6kVQK2SgU9szqixzkzAKCzAeQ2q6pAoCp5LdsqOddKKjpCazRyQC4qEBhVxbPtmFneVmYyBY4cAhM6kra+Xw1UbMsrcyDj7bwt0zlyiDECcSliZkxkuXjwhZqmmCiySA6UKFCkwMiMzEBMHJCr/UfNsxZ3W0clKjYQEsRO36vJu+2MnvdNcrG6DVAv883WLaTaAwsAwF0UKgUxOm9yKz26lI8AnhhOferci0+igUIEEeQAQmZASGqAhmZu2wHobthGjqpYewnn3+lSpUxm4Hhf9K08koXe+WFQoxC8t9Mf8WSc6GjGqrtjHVjrT/88UFQbEJRuvTHxnOred1FgbA6IFnU8kmvt1as7YKhSCPdbAAIhe7d854d+9BGcbF1zqiuKcEsvoOnIfct04tYn/Q48s5FZHyy2ghsjO20f+vy+K7R+wKiuefjl0G7HtX+P+zFARMDRQXSPB3S2et3Pa9XK8/nh0PO+k0bGedHtgb/6TsGqFleGcJc7+npig6PdtPbR1Sd/X6X+veowA1il5poSybDGAXiuNA6BpikiMocQYggxeEsxRkYiIkeLspS8EZht6/L2+npCNJBlWUXLtm0GuqxL2XIInIIS2WGepWx5W+cUCO3zpxdEBKKQJqQQ53k5b4Aoqm7TAbSybRyjAgSuAr+JIiogN87NfpoNiMBzi3FD4vV09mxkiIwUENn9iFKKGagUyZvkLW+blAJqACPVc3EbCTUETCkty5pzdkVBRBALKYQYAkcE27YthpBCUgIBndL0+vqKgKLy9u1bCsFUeYoIQaSgmpchWLd13TYKPM/p5eWlAAJAComQgicrQvIQsLqPPImA1HR4Wms0GoD0fQCIxFzF5E5M20ZS88R5IMXr6bizGq/O1/hn5Y2m5HTMoMHcAStsz3fsnjiamUFj5mLohMqPWVUotRlHqCbA3yV6RQXd+1ALAQ18AiuPb58HvL41A9H4Z+th3fpuP7k5sLj7QEaLldsgaDxcF8/qgIBy0xpW8VP9uHsJgtZzaIh+754iEbrJEcHMa994ZLKJGag5trcmlgNExOtcQFdD7Rt3HN4HguRdQtzG/tE1yLEXUnnXAN573cUbcQckjAP5lauS7Dt+iIEdN872QYO2u1A+eKtdjuYdnnrnnr0/91q3ZrG8XbVxPuH9Idxynfe6catIuQ/YA8EQ2cxqUJhbgaYpxskRPiHEnDdoEde+gJ6J8m17Kzn/9ce/ihRm8loxKcXT6QRmgenwcFRVM2GOBjrP83I+H58Onhro6empiH46HJdlNbVpSjlnT5hjYNu6mhmEQIiUIAT2iS9FAbQadlyJAq5lPMADvtDMk7KgG3+4+i3JiH3fIIKZbtu25bxtm9dlDLWWas2dWYkgg6f8DyHM86xqOWetxQkqIDHGqKp5W3IInrmz5JxSEjm8vb2ty7Jt2zRNuRQ92TQdtAgYeJpVRIwhKNhyPpvq86fvmNljoSqKcNfyFUB6JtTro9RWnIm0Wb3dR1Kt1czMnHMW1WqQYBM1VOxiad/beJMAp1mExKWGPTdcPcSVpnVUKDOb+harhZmRCMAErHk+tUom9Q0t7szQSKCl7uhHozur1cyLoQG4CUlhkFdt9Hk0y9I+UbuR4ErcdGhQrxFWHeofH8DKW/aLAUxNxW0AjU6oR62bGaKamakjm2U007XkQO6eN1CvJ6BqQp5mNDhm6D4MtI/kbl+vHvnYMvPB1eWCq613QxP7hwtZ/upmcxPQEMY8yv6jBN2yM2ETqAch9wLQfz32d1Zu13g+wPaYGQJ+EA54pamMn68507tMaO9g25E0bGVs8/ARQx078B5z8m/befDNjTV7L3LP38IcmGu+8cAxxujGWMdrQ51tJiIR+/rla0z89cvXbV0RjTCkEGOKMWxffvnZQD+9vEwhnM4nInp6ejidT8xYtm2e5jSlShMVAJCIRco8H3MupQgzcmA5y7qeKXCIiYqEgJVfEhqgh7IBERj2qjmIVEvqVCchgYfNI3brf4ulRTMtKqXkUkrxZJpsBNjxiASAXueEiCLOgCKSc3aYYtMPal6WKiSilZK9Bk4p+XiczdRMzOx8fiPCLGKiaBxiMDMRiTEiYiBiDohYSokpIoAPpUmv2swRe56CTqZrMuFhA9AOTq+drNxicJsTswJws4dok539BsSuP7WYYVURQxSXhzrwtL3Lz5SJ51FucHoA81dKVbSxC9Fm6vimWo/FAEHBD4DrBGYwhOO2M4WATf/YJdULO0i3zrcX3TlAI2V4j7oT9fO1m6SuzpoBOXM0A0DW3axfIaVmYKiAVEHFYF58QxvPu+iJWzM9Nstz+UKL1W2bINzp6Y3xZPhtv2Hs9/jsft+H8nKf0PbnHW/7r15jI4huFLnjb7mcaGo9xL6u45L2OJSrkf6Wy6yv/52nFK8MbB9L5Qg7Z9pnCQBG0Xl8dnA0jI1fzeSvqBp9g3+wBP5Wn8E9aoiatbbV+iCiEKLL3IHTNCcPyApEZrWothkuy/L2+vZ2ent4ePjy9YtqYWYiSFOY58lMHh8fv337djzOgRgQiOH3f/j9ly9fvvzyS/q7qFqenp4+ffpUSgnT4fR2/vT58+l8RqSUEhGVUmIIh8NhXU/bsiKgiYBNRCHGgOL4CDRiz6q2D9uTjWHVY6zLsE4EMWCz2pppKUVU1nU9redt26QU9uLAYMxk5vhYBPcGAyCFUmQrxQhLFtUCEKFWqzcmQsLDPAWOJiVGEjUiPB4PZpJLVsuGWkpZzsvh8LCVnOY5hJACs+C2bSo4zUnKJhK17RPVgoJSbaq+yu0YdJcvdFyrExUrWq70gyoZNNFYozGhIYDULeG3eqhXp/79QtefzEQECD2AFilAh5AOhnW3jRqYkfsxmJitBWwikRGoVHl7ONF+GtWoAVYr4jMYGFJ3wnVp32tc38GOu365b341JyG3Zu1GZ6oY6TyyFskY5GlrGUNH8mIdvAREFM1AWqKNmgXUUB3XA2jV6WuKAB7s7rvw0vbun8w1BgUEDI6VAvNob7vLAEbi7g3tmEVA/A00cZRbP7j5A5n3Vxu/auGDb361kY9bwPuaxNUjeJVzaritmWUuFYtBoLiYhOHfGgjzXidV1RwVoC1l/1C9uS3/HXXhauxjf/BGCbi6obeg+/YyQELUGl7Y6D8iEQVmBqjlpWKIRCQlMzkSg1RtWdbXb2/LuuV1OwHMKeYCKYUpJUJkIgT7/d/9DlRTjKpioOuamfnx4QEBcs6n89u6rinGaZ4VQ85ZzfylMaYQaNtWQPC8yqUomJUiRCUECiEBwiYtzS5QE+mrjbsSSsfutLGjl6kiN8EqqEeAFRXJ27Yt52XdtORkZszodfmIdi8cMzFZMSPiEGaiwsXjvKyZpJE4JeegrKWoqORiZtM0HR8Ob28aApdcQgyl6NtynqZp2zavv8aBGUnXYgrRDDnMhzypqoqqEtu465rFuFHbIa9n3WagZNSly75viVrdEqvRYb7wZi2yACCE0LHtA+nHhujfN5iZSSmeE2JH4wB4GR9oDkLwgAJCYiYOCqtHaJuZSqlJhMCQ3DK3y9rOz1TBQNw7SlRrMmm3zkHNb3e1+bvGMyhA9cfxIA8nCtQEqjPsmqWY2Xu0py9BVZKa4uJcA9x2jw75NFE1NhFRM2CqJjXendV9levoTJlYQEkRzLiBY+8wgLGv9flLbneDascLM8jw7BX96hvoipJWqbOpF7cPtiXhy2ebErcbx6s2Pc7C+N7xFXc7NuoKdy+8p/0gVhe/F3O4vB8AUJsz9vbt2P3kdwZ+h1JbUwAAajKUTuJr3alGxm6p/9XobjnBB/NwtV57bxE9/otIAD3IlIlCtY4QIXCMTBw5BDc4uKWCmV9fT+fzeVm3XDITLee3P/79HxFRNJsZmJWc5/lQcv797//u7fXtdF4B7OnpSURCDJ+/+2xmf/3RTO3ff/r5H/7hj/N8WNbl9fUtpfTnv/zld7/7XRGdpmnbVjdAmWV05KAIk4gWohhDyOoGBQQk9HTzdY48AImt1Yus1mekzviKiLsZcl5z3pblfDq9MRIRl1IQEefUHMJihiEwBc4iIQREDCEw83re0E8WsRoEZnA/OQcMCLkA4bquZnY8HmuzyGaBo+VlybkYoOZCRPn1FQAY6euXr9Pj9En1eDzmOSetbIZwJ2p1QXs++r2M3b7oOFxwcV0cBCKSvb5mvRx22e9pc1gF3d4NVUVUaXBUN/07+N2DnIiCqVBg83niABSJo3lUsoECgZpoISI0NCtYdRrYk+YixtBrIGKPAht66Ptu1yS6VtQnoPNLa/5Yb7spT+QZts0auGow+PS33D2P6qisTiSplzyr4P+WHahyLp9tA9NS1ExMUe4LxFXeMiODgsJIxdSrcYZbovCewPsBZcTrvG3XrY0UGe/L0demNbwnjY40tMmgXZmqbGgkYdfU9l6w8xVBv6Xvv/3qFX07iMKaQfFX+cq9Lu1f7zNmw8fmveuN1CEPEkyf1bvsZ/zmPd7Qh2Ttf5ecr4IkvS48YXDcN7r/kAKhpThxmkJgUFEqoAqmeSvn89uynItkMwsphilMx0OMcVlOBjDNs6puOasqEGYVZn54eJimCQCmNIkZAqxr/vz5c4zxrz/+7Y//cPz0+bu3t7ecMxH95a9/DoE/Pb9M0+TuhxRp27aUUhFRrwGG5uHJVtGuXY8CBMdMe6JJZ6wt+rdWH6xzrTX59bZt67osWgzZtm0DwBgp54IpFjBQIA4cgudyYeaUkpoSMiHnkoHocDioauA4JUYgRIwpEccsGii9nd6m6Xg8Pi7reZoOyya6mhp8O51iTClOUwgGZTmfwIwQF1kA+XB8mI+PplUZbVeXFnchyX0AfS+NUJkrngHNEmatVKO1XXrl74UqBrn1dc8SOux2AACRgtXaRh0mVAXDinMjJ/0hTYZAOXOYSARLQTVTAQAgbJlzEKAiacQA0HNKc8nZy9pA7YZ1r/i4+V0kRbyO83Ji9j5pMgAHyLXAiMvTNk7LzUnE9m8nmNQMYAA133/15AMiYUAvI9ymHgDVUV1OcAA8+hcJwYBIAQyAwYBaLqcaCTwK5nDVqYEmvi8dX2yLu9c7U/wr13sk6W47PgLY91yN87p50c4e9p8Qhi/f6Uxvul0uC9w1qmPb8q64/pbB3vBau6xQPXDSdtxuWrjAKQ176HqLwzvzef/7Tu4BEFoe4OH+qgdUoc5r0VIIoXAkqYIYEyMSkGeXVClFRIqU8/m05TLPx8enR97NCOAJDM7n87auIUQVUBEvKFZEPGXEejqt6+rVrF4+ffrb3/79hz//8E+Hf3p8fDgv58NxPsD0+vr65dvXz/TpcDiYGUeGnCkwFVMRRAJSAEUjQ0UKOEotHhpfU0c2AE0ljOQkRM2cLZiK5CIlaykekZqzOIIREUNgySIqMQYGzCKqRshTmkoRAIAAQG40pxQ9h0900muAHMLx8JRTFoAtl+fnJw7BAFnQQy1yWdftjLS+PD2nmI5E63nx+po++V4BzAPEEJGImyZpiDJuv1G6utoV2GCUiNhDw/qM9HQ6435rn31e+wTuNzTUBZppC8nyJXCjlVkruOi7HpGZg5qENIlKkElCEVHAYmY0EitANfP05AhkAmKFEMETIzRZ243hV71CakEJdq0t4YXKTo39QbXfAFbAVctR/97VSZDTnPrqQfFq0hsiumnBq3I3gKl7CBwGBGCe4aexrmYWqLG/iPUbRzgJGKibby1cDez2/NsNEXqHd13vmJEQj4zknetXxO27esl1g79KZhvA7Irh3aXi42B9/bG/pAvmOxDs4lkH3/TufMDwbr8fvrlL/vfT2UdRv0ev94bj3rrkjXW76AAAIABJREFUkTg2eJcHvLPEww1DU+1mBs/Xi+iiChESsafk9FNrTmN9A4uIlG1by7aZlcNxenw8HqY5hIoaMqgOxm3bRC0ArtvydjqHGEPO5/Py/PyihlsWClGR4nygENN8PP3y07/8y3/753/+ZyIkDtM0PT8///zTLz///CWleZqO59M5pilnmec55wz1VDkU3QIycmXYHSTiQqjrdtjzKHrsgAEZgKd93vK6rXnZdr2sZgAFAMiZHWcvRXIuWG2G6PnaPAaXMSGSiiIwABW1vGxJ8Xza5vkwHY7bW56m+bwuW5Gn55dtzRxx2UpZVgM6LWfmUORLCvHheOQwEes88/Pz89PTc+CpEzFzzacJPX65EdkBWj0moP/bV5xbQABc8gD3946y/+VO3qm/tVSg9Qa7iGIZe6XVb1FBnMjRq+JuhIZ49ExBVVRCM/PacN3FamaIoGoeHIAIaGSk0KLzsK4sOgNwQu8AQWomVrghTKYKQ+0EaPq9NYvQ1dm5IiO3Z3AgLdCFaV8sR+54hXkDA1MHOVWPbM0NZGgmUDmldSgpeB4ncEqPakBAUMAYqJd9bz6AKzJ0l1q9J4PDYDH8iHDcMyIPDd2n3l2uH1u44i53nmqWivFFOCpUw9tvXBoX134A+jA/HOnOMHZ2AX3r14Fgc/H82oz1rTzwPIDh4MGww+ptOE6L7cLFHSnvmjHf3vlB34bPapfRLh5RBeC4RgNEoJrEtj+Uc1Yth+Ph+PAYYnSijxTUsuu1W8mnZWEORWVZl9e3VyKKIeVcshRdzhw4xviP/+k/retaREKM3333nYhs2+bVCADg5eXTPB8Q8PX1NYaU0lxKXtYzAHKITewjzzsMHlhD/l+PtK5yoXlpbz+STkzARUvpg3aPqoN8RM3pKRG6nu0hu2pi2xZCcHVJREOgGCOFUIqUIlCrj1nJcj4v8+wCKhOnGKaia4zTtpV13WJMiOHh8XHNRQyL2rqub19OhFhymVNKMcYQ5vlwPB7neU5uhdtxk/6v2r2z37fc7ZYY78eGJB0lqnFfVW7RzJP916YuVNux6h5B3CDzHtVERFbBL4ACYBBb0mMLMYaSSsghlI03LAzohbKsGVL8VdpYVB1UDwKgNgJsAc4+Nhk8w7cD77mAtMVveqRha+HKTHAreN0e/K4PDS/qYog1JmkEHQpkBghsKMPrRNX1m1auxwAJVND5SK9boyZYs42GsWfj4l19P14f0Kyr2/oe+o3woasOtDW473e9fYsfXWi6092nxuHtHxE7W8VqBRok7tqx2mbLGP6BpFypBQAMZSyhtzx27y4P8B3c8RTjFjTbeeVYZagaKwdb1r027xzR2+X+jeu7N7vnUMceuekxYa7ViyqKihUEQQBmFs1INs8zEKVpAsTD/MjMhHheViImCiJaiqQ0YdOucy4UiAK/vb0h4qdPn2NgA1DDX375Mk3zlNI0z6L66dOnH374NylyPDxOaVrOZwA6nc7Hx8cUgqiWIjFFVaUaUuR5JBEdVdKOlXlWYmBsigCgVy8BgGZ6teqY86w8zIwAmosnBiVEZgKAXDIgtlShaoYpBhFBTPMxkVhK9vrtVIoej7OKFoDT+awKaZ5L0fN5ifNkBqUIBz4viwER6MPDUxETg5xLyZLz6/l0Pr+dDvP86fnl8+cDei7swIGD58/xcVkjz9YSvnmHsW1+Guq0SCerXRPt+mWzCzlBd6G7yQHVij1uv5Gwmpkjb8gTaLaf/NQjcghghmamoNVy40a1DbfNYkoqRVVAtUhB1ZJVtLqS/RWuJzQwDYZYbTudZzX9oNOQFtXSzFm3VKvTZTVj8jx97ig2REJCxuv7r07f5UG71jFqyredBKHTNWbyyGZVEFWrc9NjFxpxaU1iLzdQlaTKcBWNoWpFH6GAoDbVxTu8ImdQ/etW/YTkGCbwzHwwAp5aEbILdrrrPdXQ1SfrkgY1foaxajXVL14fxprxQt1hDlYr8lRch/esTee++EYGWAtYtxt9ntoWgYElCzQ5b0Q8XW2O1m0z42Er73d6y2RgoP0tl+Nt0j0gAndZoM4e1FHVBltqit1v2c8o9VN0zTLhQuO+MCKZa5DXyhjDhxcCBIRcF4WIGJmJIyIDRS+HlR0NiRCYDBkkIE9MMM1HphhC5MiqmqWIAlF8/XYSkcDJEZZm8se//0OKk4oy0uvr6bvvviNkYhLRt9dfEDEwZIz59fT8/DLx4fnwaV23b7+8Pb88AaCaLPl1+2bf/+4PQayUXLIgIQADUAgB1CUiBKRAULKgESmhMTKhEQKDESoaOeQwarGKTGNDyIglsKSImSCZMaIazYGCGZQ8ERnCKq7B67JuaknNFGGSY4xTKQWQSynLshAwEIVwWDcxFopFkXUronAuYluOMW3ZTOTl5TMYHtJ8puU1vzKSqb2eXosWQftDfhHTJedjARVDNOZeW8QtT+pmYlAlMATPlrirxQYAyGiqIm4SMjWHATu+HBEChU60EJEBPTcGIoiI22/6frsUqwXEbRRkRIBmRYEMCFo201pYUdXUcoierBUiGKApiOmhqOaSgxzFjLlsi5VcxASb+xMNCRgMEKlkcH4MoIQEFTdpgEMauysfb4UHWCWZDfBXIarIdmFFcBqkCFgDf6pG7sYiRA9DxX5SK0EZEgS4emnkuqCZBxF6uTVBNQBFAEQVMCBAhZreHAi8UIBHL5OCFdNQe4AEBFYVHgMUwgAQWqQidMPJnRNuYDdf224YGYhxJQj+/49E/gsS/xskzkbNDABauMX+2qsedJZ70QJcIljInDFcCbz4QRxce8XH4n/luV1WundRLRlqN6UFhsQjV7r5JV1uqoiDw66HataUPbs4fvsdN51uHQa4fpFeLtZ9fcUTQbDXSHdvsKc8QgA0kYwIROaIcYhRRHKREPg4HwFJTaWWUNzWZfvpbz+nFAhJ1NeIXl5eSha3Jzw/P3/69AkQl+W0bdnMYozbtiFiLvL8+LQs5/kQ/9//73/80z/9bwBaSxCX9e10ejyfESEQGZrUDHGkqozspU6oaViV1+7aef26arTmwURmnspHxMGUhBSYMUIIAQFiqDVwAMF941CKqKet123LMSQRTcmTE8C6LDnnwzRLLWFCy7KIyOFgiOhlhr9+eZ0P82Gavv7y5bRsACQiRLguy7quxMDIqoUJVY1jJMItbxwih9gFEWgRW6aN3jdxvVtIrFJHYIfYV02hjtj3LiJ2lXSnmJ7vWwQRucnDV+LOpV5r3qCxeZlNZxVdDCcyRUNgYQGFasNHA4DJSy4bqK6AU8jiFZJFBdoQCxgZsQvBlcKbo+qRDJHQ0J2oY1KKLozWwK721Wg5cEWjzxgAUHUsQ/NI1w3sv9/iOipNU8XLYmoKVRQdT1p9rXUT9577wqvD+4NVjetddcMXWMCqGph5JmsMbtrwpcGhW/t5b6/cPZ/v6ESVn9WjciH/wrBF3qOJtz+NaqOZObjipg+jTE5t1eyS/lojrDveqfbtHhG/10kacs2Co9lu+3mve9ctD/86mXYx/4YTtNMxUuM2ia192jXA26upOq6TXdt5+ofLgUCndHcHePvIODRqMHmPBkPikRf6RUQpsYYgqoAlxskTFDPS2+nt7e3tdDqr6DQlkfLLl59fPr2kKRwOR1VLKaWUHHYvIjGl83n98uXn4/GIiCklhlC0iGQmkyLHQ/qv//X//j//83+e55RSmmQ+Lz+v6/nzy8vp9KoiKYQiYqYB0QjNlJCZuxKIZgLoCcYQwGqR2qpwel4i0Vy2dVmWc9k2A+DAZERIYIaARCQiWM1HHsIvJZeyGUb2VKQ5Z0Tctm3bsqhuuZhpjAeftywlb2VdywsAUcibqNoP//rDH//4D5vAn/70Pw6HOecyTZOZ5W0loimEGMPnl5dP3313OBymafJKaqrCHDzZDtaaqmhoZub9BACvTrxvaQDk4KfKtTEnzE6yK4EeD5Trv1Th9v0wqu7hr3rpxOobTFVbKWLvUm2TmbsdjlRImTkQczUo1Ge9bJYgZyCvv6uGxk1sVyRBQgQV4Va2N4OQInl2PwRRpUvqb82pA9Xqc3le6pkZxIQavlvl1OZU7g/WA9bk11HNqtOoTdfyFHX9qTphVbw2AGBiUBAp1ZjirNn8vW4cqNnqoAnGaopG2szIChC6gaebL25Pdv2gFWY1rhnUwdT9Dc3KcbXAV5RifPz2p95+30D1Pe3H7tpu9/aUJpXody/NcI+0N+y0G0e21n6+m04O0W0F1mZkt7kgoupFAMble+s3Tcb6QG/YcdkXHGWU2m1YDoLR33Q15yOfu7TwXGsDNxLZfd7Qv7rtd1f3KucARKwx+1Rzy1ilGZUpeTx/dHtR0eLC9Nvb29vprWyFMMSYtm07HI8xpsPhcDjOy+k8TVOM6V//9QcRmabp+eXl559++vrtFxH5+eftH//xHwFgmqe//PnfjvN0PE7r8vb8fPzTv/y33/3+D58+feYQX56f87aplimmv/7y0zRN83QwEZoQEIsohirAAiCAGhCAohs33bTtwVJWRTQzLZq1ZClZTZiZUzLVDNY3hUMk3YInUkrOUkTFIAZmMrOcxWxT1SIlxgCwreuqhjEmRFxP5zWXNM3TPIucEYg5iMBff/zb8XBYlgUAcs7rusbAKbKZxhAOc3o4Tk9PT4+PT1OaY4wcQgjRHTSALtNVd+Iu6g70et/MDQ5UARG631+Xm+DqvLgVwr9sznC6kTbMLjfkmAmtTx3WxBvVcIEmoAUIFYCkJAVVKaJRJJVJysa0EWUgBlUAF4cr9XORSwHNCT14bT/DCouquowpKO05RKuHo9q1K/3DZiptqIBhXLqfWgMXA3y04ylrCiRUMW43jxsSsqH1Sss7CUAgQ/UZN2NgQyAQcRg2sUmp5L7Zyruhxj31fW6b9HvjBIYbCjXSMtQLF6g1ibGFVxlitWy40D1Qz//Y1bfj0KXB7n/HvN4/XxTWueo/DPwJq79eR8dGf53dis0Au1ozWsSsgkvNfRVNIBpf3an//pDbD5EMpElG1BlY49l3sFLjFO1M+5r6dzj29QCGe645H+wzac0keHH1CICbZ/f59ySgNRtQCMQM4qlKFGqaoABIZhriJLrmUqbpICKv374tp/OXn37Ztu3Tp+/neZrnw/E4p5QOh8Pp/JZLYeaffvrpT3/60/fff+9IxB9++Lfnl6fz+fzf//v/84c//P3Pf/nbfJymKfyX//J//e//xz89PE7//u///vz8+a9//cuUpmk6ELJpeXt9PcyTiPz4l7/+3e//LqQJiSlGA7RSDKhr++3EmIKxucaNKoqIpmpiUra8ntf1nPOKAJFJjV2r8OKtzDVJp6qqmKqa1yMDNlUDXM5bnGoZMRExBMcXmpqIIMCW8+l8yjnP8xxi2ta1mE5T+vnnX7atEIWff/758eFhO59hCsfDzEzznF4+vfz+998/P396eno+HI4hTiGEEKJn3/M92gru7mYf65J4IxY0rPsY0thDfAEA+RKM4NlHG+TGzFSlGb2v1Upop8NTWo5HGz2QGGuQITQBmbRS46TzahZllqglC/HCYUJekAIRFQAxIYXm4K0JSNALNSOZF88EMHUVjTxjhwGAkmJF+FTzTmVC4PPQ2OOd8wUOkOWacwOIGMgxmoSIyFBRTPWQ42ByMKu7qwn+l7Jyo90tH5AiAFGoSbERTcHITSXoDK+SuxoPoNwgDFYNP/ZRLqAr6gDO3LDpFHu36lOI6Dj09ogHxX1cTuQ+Tbm6GpJsD90eVc7bnr9D/Q3u5XzuPMC/79qpDaxsdPwCEDSretOHdmkKzGCoizi+ZqC8e9JRBHYe8FvY5EjoESujsSqq3P7SH3L940qdrHe3CeloDX90b+4mvLyx52EGcG+PasAUud2YEVGqJozIgTgAspvUfM8TQSkqpXz79u3HH3+c53maUgjh8+fPADDPE4D9/NMvzCiqf/vp5/lwOByPP/30U3p7+/7777///vO3b98O88PXL1+en5+/fvtlnl+WZfnrn//Mgf74xz9++/aGNfcDG5MuWko+L5pC/Crb29vpAWijJQAis2ghAESuFQg9fTp4yW03GpsRmgoYiBQpWcomktGECJjISFwK8RwG4KgnERcSiIg5lKLgQFgDEQ15Sylt2+qxFA/HAyIQBUBUtcBEiKXIt2/fvvv+d+u65FwELE3pdD6xo+/Njg//P2Nv3uzIceQJuntE5AHgnVXFKlaRbFEUJVNrpnd3bNfm+3+BPabNRmqpKfGu89W7gTzC3fcPj4gM4L3SLoxWxAMSmZGRfh8/7x1hv2rapg1tOD09OTs97VdrQIfeueAJnSq4JGez8ZnDA7QgwVFhcLVCFyA04ARUwVJZsTx/S7badhFSBp7PMi2fKXsVmDztys6r6HAhYjsBFUhRQgBEEYgADlUBWxQRCeI5ujAheSRPzgOQqV9mZpCkWAxnCADSDBxRRjXVohpFnM2xAAUiy3F7t7eeRPN5CMdhJUgO6ZpwZesQRACbwkhkNRuIaMV7S6Qk/xpT+wFIrvPMJ8+RAEwVOWaa5IwrEqb9LxoqhWJgcaYEFA3rbhEMAPiYAjhg9fKc0o8U1KpNq8NQFXBvoFoWdmBq4KHrt/x2L85zeOnq6nmfFiF3cIbKgczLfnBaLb5PfWoAqVAc9k6baRGLMq7k4x54NJgwLvf94FW7Bct7lMebiR/7be30ZBMMCbW+00SnefNyyZCUVHCxLCq5X34LDwu9qm/3HiJV50o3lcG+rITD+k8lJx7JOecCWfQWdJ6jyS6e4zgM0zzO09x13fPnL05PT4kohDCO4zTNHz68u76+Pj09GcbROd+27fX1NSA2IWw2myY0x0dHz54+Q4DVenV5fdG2bde2IiKT3PDN6cn51c3dMAznZ+dNvyZyw27rfcscj49PINmVOk0TOa8Acxyd84YDITZw3GoFC12BFVugcIxxUhWVWYRVWDS1lakgRwEA5y2bB847FAORRlXkKXLkaY4x8nbYbdZH0zQ7R84TILVtN41zv14heVWdYlTAeZqGYde0IcbIUwRQ70g4Bk9I2jW+77v1ZtW2zXq9Pjk7efLkfL3etG0bfCBEckRYWmUXAoGKzdOTQsTS8JWr+MGUOlggTBchDoqSrFE1K19UresDUqIEsJYXCoDWcYaVlw+6LAAtUEtk4cKEKuoIEUkIgSzSICJpvAQ5MIODiMjbcDeF1EmVShNtNWg3SM5qaUQA0KMTSbkeVa0MTS+8wIsd2p377rhWtaEKQDZ9HUhFzH1RQXW1WFscIlUldLYBkuGrERMWUPWg0qUKt1o3Ubb3yrNMnQIWRcjSCDNzpsdpHz6uAD4tjiurfy8KYs0Hyd4/iHjsydy8lhpko5aM5U1NHJpRW/HgwgBgLiokzax7mvOROwNgAJ9uAJe9AZAcvzo4PxZ1nwl570Yg6REo1TK2C49evloHp3PvrQ0gp8Awp5Rrs6i+biZHqwFbaLRa9x4IndZex4Or7t1uPkAe30OA3FIC+cLlp+YCgHEiOfCeVJwZwt75EMgH65Mm50VEWeM8XV99vLm93RxtPnv+mQW+m6axqMgPP/7w8fIjER6fnNze3M3zNAzD0dHRbjcMg0G80Xa7HYbhzZtx2g0fPnw4Pj46O38SvNvtdh8/fmzC6uTkBNCJivf+5OTE4tnr9ZoIWZTIK0KMUZkdORGdYXIuON+oKKDUtf8AYNEdUBVm4ZnnIU6TygwqoGLTAGKM8zwDIUaniIgECkgUMia+AE6gu12cpnmOEvwUY4wRg/rdMJyfnwOQinadb87P+76/vrvjyOM4hNCSA9XZE1ETlFGZg6em9atVu+r7ftWdP3365Px8vVl13arr1t6HLLRBVVKjKwKUyRhLZxbUqJ+IaExh0ZAUGoKlMkE1pxsLORk9HDJRIWmjVamilJTScmTuUYKTRWfz4koVEKWjnSMEc3qUI1BgF6aUcLLMsPk3CTITkc3vp1TKlMiXyFm5DoiwI6eoakngZfqNInDUEEhVLftt9W31/kAtkisuyBIGlEgwWVXCgJhKy4rZp5praFE5b60mbOc8eakcaZfIfcuW1CgKnXL5HxhwKCZkJExpGkSwUJS5tqj/JAT0UIAeSMaqNrSO4TwioOGB8Kr3Lu9g1Uu6v4BFX+FeS+Fy7eWcJCnsdVBbubeWrAOW9ZTFW86pLMNs/8VPsKBQLWOXVejyHuGgbPaBUlkq7eozZGMcy+186ue2EPvYsqvFkVJNHJq0ZpbSj55K91eafJ19q/DA+TAz5+GS7DtbPBkynHNEDh2ROkJHziE16BwRKLNz3pHjOA277fb+3jl3dnq+WvW73c6i4fM8X15dXXz8iIh9vxaRq6urcRy7rrVJLzfXN2/fvvniiy8uLy+E+fzJk/NnT9++e3t7e7darUJomraPUcdx8k3XNH6cp+nycr3edH1/ffURAEiIhVlm71slAiUbVSNxFgUg71xh3uoubdKTggpznGOcOY7MMRVIijDLPMfITOCiCLBYuxOmYkoARUfovQ/BscgcZRxHIjcMO+c28xSZpWmbcTeAgveu73r0bp7jPMc5RiLsunaO0Xtoun6ahuB9cBi867um69r1ul9t1m3fk/cmkIjIDAJKucFklT3q3pX3RLS0GKZ2LcORXzjU6OGQDB5EdYpRmBVEUQDevAsgdfkFRHCAKKdmJhMhKCEKKXnvAwuTjXNzzqS/YmqCUWDWCAZ6oQCElGv5RFGUFIRAFRL/mMejYGGnVOakoMIA2afWquLzwCGumUXEJgxZ2EyBSEzmKqgmcKlc4QPWTbzkFbI8XPyh/YLw7HhhEjua/5eGckNqdsUlElTrm0q1IwI+PhGsVtf7B2AJXSV9gmBRCMwUdagn/r/Of2C3Hmxl+bhY/7lxIbMlYg5BpAaLbLYw7L9Ul0EmxQb5J0s1aVjFeKo42P6JFxZAybpsL6C0Z5gfytPyVfJG8sF4sMJ9FgVVVVJVpRycsmdau1ZLg9jjQal/dvvlw0OdYeUL+xSSNqcCTANyZtCJOAxE5ImIvJlpoIhOxXvP8zgOW4d6dHq23vQWNiGiu7u7u7u7cZqY+ejo6Ozs7K//8R+EcHZ25n338eMVIqqoD96HMM+z9+Hrr7++ur5AR+M43d1tX79+2/edCkYBRHr77m2/7oHo6ur65OSoabrXr385Oz/jmQVQIAI6QXYiHklEQJndbLNwk6GrtGfxmaSPHOfRhgArM6jNTxQ21EaTIyIK6Bw5G9WHBI6coHrquk4Ux5HHcQohjOPk3ODIbe93pydHgDjPc+iahoJvgzlkt3f34zROIwAKEXRd2KxbFV71fdO4EHzXdW3bhbbtV6umWxGZvKbc82qERVXIRWtOrGtAi1uguXwzNwouZUKlKLl2BGvfUR449IsBizZKgJAsZ0TeezJrgYo/kBIAWKpCAcUsDLR09lKGKSoqUVQYJAqLiGXrMafz01g0FBZGQIP7RhFEQHIGcUT5dgyzKAoTYsoe5K/KiBit5h/YJRBRxWr0KE3IRGSWZK5BGkCGlGx/sP1EizBUhTqpwtbYOAtJtL/RYhG5VUXAkrNgs9W1FOCYpKA84fTgpZo7A9LftpWEVuEGmCO6qcznMECSpUMKQuWokxTg/ofXy8SUbiVLwxQROxQ0yxmw8N4n5BRZPC2H6pfL5T+t6XLJOKmmuP/BObU4sMlUQUSsPoMC971sm6bEvJQY8eHy9twd+yCL/srMAQBQym3UB3tYyeK0RrbWFVDM6aZFf2QtYsTz6OPYX2L1/08YOOV9fTtlbXuPT20gvPPOW80DudaR88E7R6iihKRi82O8o+Ojo83REZKbxnmeZ0SZpsm7pj3u5zj3fT8Mw9u3r7/++rfPPnv27u27eZ7v7++6pnv1xYvgEm7o5eXlOA3TPPfr9d+/+8ezZ88vPlzHeR4jr1ZrH8L33//jm9//l3GM4zg1bYuIN9c36/WmbVpBx6LBh+A9MDvvLWGnVgSKUuMYG0GI6hzncRrHYWSemaMoJ+ZFBJtI4wM4AiBQsdIoq6EMPgDPitT3vShOE09zVNWmae7v70Pwu+121beIOAy7fr1q2wYAyQciCm1zf39/f3/vZyLCJlATQuNXbdOEtgnBN03TNK1zznnvyDnvjJJr07WwCaYpNYuNSbQk8xCAE5haURIgFdqdmTuFVBJ+nKaetTTqC3JsIoc0ESmDyiVlYPFCR845h47QpYlCzqUMcKJlhJRtVgBF4WIbq7KwzCzMwiKsKqLMEnPKlhwQAEZhT05UVJTQk7MBpQgeSYGyZmJm7/ySXlMQsDogZ5Z3CXQcSP/MF5St0vSJdepR7jREVRKw0mi7BaVcspO18hJgUasoyFyGe6yX58UjkjUnW6JGLTfMqbXZpBiAlaZpqWkFL8uA3OQ5IAC4JZqukAOGmHokK8FlIQQkDEWGCKiqdYFiEfRSnn6WJ1rCkEnmSK6OstPauu2eJet4i9ugLlNNqfLQVVAURVBSAMxsFgVrAkjqDCgBtwCoKBJQUpKYBWUJRpHSspiUsKGMarF4wag50e1S07iNXV6EZN5DTN4uLNr8MbcDqtU+FMQICVczxxILbwEAaCrncKWCcdEcmJtcMuFoYauHBsJiFRIUoKv8cJYuWEBIuFJp7qAggvPEguicDw26kKIQ3jvnvXeEKDGOiupbxrg5fRacQ4JpnshRaJtxmHbjECPDAM+ePh2H8eOHiz/98b8+eXJ+f7u9vLwehunLr75erVZPnjyZpoGoYRlev3k3z3dEOMep6QM1+urp819//UUh/u0///ry5ed//p9/vr66/v3vvt3xSrvu+ZNnb969VZ6HXXS+bVZrULdZH83jMAsnq0bVqaIiLWM9IFeYDCK7ON9K3HIcrbqQlQGYUJwD53zTeIsZiqATcEhCgKo6/t6dAAAgAElEQVREgYgagMgaWbouGv6992Gaebub2h5vbm43m7UqDduxazofGkB0hOu2DQiNg+3WIXnvCUG6riFH3lHTd13Xh6b1vkHz7zO5VqaPpmGimEeMIAIAQWrzyQoCkJyTDATEatIPBZUVQM3bFlWEXEFkTg+zsGhKuGLiU0wREETM6C1IBOgQncUQvJJTdAjk0JtOMPCi5NxqqmhIbE5iuQwiQnCqSOoMD1NFQVDZBvDkICghM1sc3mvqhFAWJAVyKspOFNQJeO8UYJbo8igz40gRAFVnk0JZ1JrdwAMkpZi4Kokwwy9C84eQQNLMA7VGZlVCO7YqKVzYWFNBLQApABfp6VBsBjCYlwpRUNRq6jJuOWIRT6SAVj5lwKKqqFTcGVCweQDpKSWLADFrda2tYyOL2lhIny8eWBV13vu3DpA/XvJSx6iLuW1LqmMj2dLMHW5Zx6bdSxtgNVeFY01ZpHpiyAcVOldVquKVWtnL+5pWwWbT5YVmV2ZZIWZZX4v/xRHJd19L5E/sQ1Kv9YSgahlZWacrHhrg9ieV9oxPXAuSe7f/QRUrtJ3h5aHZebTIlHJmrZ9TNpYRnXOQLCsbEu+Dc45QmWicBh+azbGTeeB5VuBpmsiFpmnGcSIiAFbVaY4fLi6cc9vt9sf/8wdRubm5/tc//dd/+eqrk7NTUH3/7u3TZ88+XHwA1fOzJ+8/fFj1/fn5GQA8e/bs19evn3322Yf37y8+XITQ/OM//xbH4fz82auXr5xvmHmap369ub29U/Kbo9NhN4TgHKFHN0+RmQldmgGbm2DMBBOJEmfhqMJoc9myZewcheCdc957w/yIIibnCACt2xidKhhkXIyioBx1irFf9cMwDsO2Df04js75GOP9/Xa1xq7vQnAA4Bx4T03TjpMgiiMEhLZtvfdN07Rd50PjXHDkcf8BaY5dPUbeOSqtWmoujKeIUATJ5U4+snytIQmxqqgsY+IhhyJt8ANm/tSqWjllfRPDFpuvMDtmFqypzmzJFM4y+9TKfgwqxxSEAe2x4UhziscCWKmcpJy3Vr1ZqfvYBKjZ5cuQyOz9p9UogqiSqKEmoeRIBpY1W78agFiqAIhATHVo2jKy3ICSwpIF1BxTygZjliGKiqKKqZLKShIIkBJkbPYWUJLArx+nYgGgWJ41lkdP+okkcC2M96R/NmAP6AYgq73l8E+/9mVieSFiBh3LfQrVV/VO4d7Xi0Kxx4roKLVAsC0r9WGmurJlJtFDKVwUTL5HJcTH8Rn08BPLuHxS9O8HYYph/shOLue1ZEWqCqoJsRpalfYnKwkt+wOLS5cOqldiv6L9vFAt/evVLs5B/mkSEXtbUN1vYnp05MCneiEKjbWGOXJWtOyIvPcoMM+ghHOUaZpCwDnG7Xanqt6FrusuLj7udmPftcyy2qznef7TV/+yXq9++OmH4+uPf//u76u+Q4Svvvzi/Px0GLZffvHlPM9N0/Z9f3Hxcb1an5+dv/r81f39PbPM0/DL6zeh6S+vb3wIwjLNcY3kfRiG3Xp97L3XUq0/s0gRT7rsBStoKvcrkyJBbTYkWHdRnosO9m1wDtExC7M45wDRea+KotHwLaYYnVMWaZoAajFhURXnCECGYeccEqmjlQ/Oe4+o5Bx5mcadqkzTbGAPbdt2Xds0wfvgnK+JvMQo9glhsTBqgiwH7/+qhCjSVxbzUYPxKS8Dm9O9y5QFmBC3d7avkkbAHLJAjR6RSXgRc/X6qwthGpuC5SsFsGSG3al5vQXRiUw+uDL7LKVALEulrHkATlo5cKq3oeKhW4oVrfMY6sWhYT7ZGYqMzsFqc7bK7di3kqMjwJrrEoFSED77bwi288vjQEzmx0J1ZvtCSvqkXA7kJaqByldJ4IdvDk5Xvjnsay3SYL85oKatg08PP8mv2hGpPtw7g50zqWxEVT7oNUuCXkGSEFQiV5nqj0v/8r7+ZI/2oVBVOaCqZN3nmewRPJpIt5+qLmb8p9SmWURqhQr7h+3x5MHPDvy0GstvMWf2Uyy1Hq234lMqaq/OStUyn4UeAZCQ1CnkpjBTAEREhFaJFkKISKzineM4jrudRN7O26vLK1Y9OT4PIVxf31xfX7Vtuzk6Pjs/v7r8SETPnj2Lwm/evPnLn3+5uvywWvV/+tc/Hh8fX19+fP369ZdffvnhwwdQ/eLVq++/+/uLFy/evX7z+cuXfdt+/vx50/gffvyRnGORhmi9WQNhZN4cbYYxMnPTtlEilRoV0OVOUUhSmRBUzAdmbJmoz5WUVpAAoNbLClb7rxyZLVrLkiakuOCbvgnzJCqdBkLnNylA670jSnAIMcZht3NETbshcqrsANrGOwKOk0SIMYKq996H4L133pF3GZFMC0RzRTB7phTiId/BHqcuB2elmFUfKiehrwoqhhdahBHmf7I2YbAoCJbgJOJSeFaEfkGPsE9EbGI8qkKBeonWV82pxUysAssA85ZshPnkZgWCZUezL65qQNOYvJtqd1BVBaXYWulBl99bPtHm/2Ly07MsLzxlLlTOqaj5PKppsteeIMo7kCMcqpjnJcTcS2HVEyrCwgkIOsFca/1IFsanhJGjBgKkmqacpgA5QqmGfFQG5WNozwsrK66PdMln2RNRmh2ZfXEOsNiZtfauqh7tk+I+2laCyKKNoW5GTdFnRCwyWnO8qpy2EPrjwA+P7oAWdYwIiLrUbiYaQrtlG83+mKx8KENrJKyH1y3yd/lcAXKwsLbQH1xONYFzP3TtAcTqRhfIkerqpLlSrpYR9ghQiYjr60KOqUF+QuneHxgg5DwhAQI5Tz4rAESVWZhUUwKNRWzcOSLe397e32+PT0/7vr+7u7u9vR3HyTkvIm/evPnxh79/++0fbu5ur66u/8f/+L8d4rfffvPVV18eb9Yf3r/ruu7rr78GgK7rmPny8rJt24uLj8+ff/bzTz/d3NwQ0W9/983R8bF3TdM0zgVA9N4bdnzTtoQQI7vgiDwiknM8Txhj6moGFY3mkdn0VRU2/FIrFSTyqBA51URwErtgTQCKICI2+BeVnAA4REcg2LWtrDnOsfHWeRQ4Mnk1C94K4gGEeR6GXds1PjgiYhY19CFqIHgAJR+ct0xqcGTt1qYAJBmO+dnam8r+wBJFqQxBKJINiuFl49qFLcmnyiKiGjWDBUk1QSwbp5gphTB7j4s5mahOWRZLTnNIxK7IzKnqfTF51Qbcc6qXT4HHxAbLbPrkbVu3vyrkIcYZJgEB1C0nTYBnKdmrpVXa9JDNkwQQBnC2GnEASpQnKqiiKAAatjWmYKFpOBTQFJtAtKrUahMkj54v3MfR4KlNkDsGjiIIBLneRVNqVTO2WwmUVTJCRbOLQpb+VASDQkZEhTQU/lEJmINrUCuAR14IFmODfalUTA7cP/bgVX7yYA168G0ym/MUFNuqyhDFPMoNNOO51FRYS9UDM7/8Wz7PzzN/aF4hFGQNu4yBb+/F8kC16KWHKqFca78sellqfWRZcDbhi5FxeGTZ7IxV8skF5IxYbdoJAJbhDPv7oAIREwbO/kmyyQFLYCpnDs3hdB7BGiuIyAF5pNy1jmQeeBPCEIfdbmtNA8xyd3sHAJv12ur/+r7/t3/7t3Ec//GPf/zww3d/+tc/ksMff/rh4sOHtmmeP3/2229++9nTJ9u7W+b4+vUvq76fZ57n+M3XX79//z5Ocwj+h+9/aIK/ub05Pt6Iyr/85usP7z/shhGAfONZuG0bUFZRJr9ae3CeiAzOTJhnUe9DbiIlAFUhBTZ6sMZSMoORGQSc85xA9ZUs7USkCNYuO0dWmFTVNSHYRorYsPtpHDWZrRBjRALnnPdEy7hdmud5GMe174nIB4yi8zw7B95R0zRN23jXWD8UkgOw8H2hrmQm1yRXOq0OCO/hh+W9TUwpn8cYKwG6WAA1eRMu8FmlIz0J7sRkAsTwgDsgDQujEpcv55Rk/KYqFEpgEbTHAHtUXTEUZoUHWT8mOSGIDpQIPQiIKjrME8oABMk5mwBdoh0CKTDDmlCQbP2a5L8tQvNNJT6hrIbrfqCDG7RNYFBVFo2AAESGXwEAzFLiA9nNqsAh9+JmVJoFEIGsUjQtCnylpSETiv3p6jU9+irirBZemn0l2f8knWf/dI/av2UZZY+KegYAkVi6B21xORWruV4IEhRXFWffp2Cqr1jfo6pmVE7r47DrJh+tZHEPzpn62s3nz8VTB6c92LFH20kO9iHbXwgmaFTruop8JAAwAEIq5qsl+3JYOqfprkM3fy8WpMkDXdy22mXJv9kPGCoUO4PIA0QWJedEkYjQEZKH1ByGUI05EwTz2AnVsG622/vnL77YbI7GcZ7n+fT0JMa43d4R6u+//f3NzQ0Afv78+fHmiBDPn5zO0wQAIYTPX3x+enKsyr/88kvTNO/ev72/u2eOIbg4j5eX754/f/7tt980Tbi6urr4+JFnvYLLzz9/0fctgI7j0LbreRoJCYi8dyJACjHOANiERnEmckQWA1iQMo2cyDsYkcjNPLOIKAggEQKSEiGACERm46kozLO2RCrqct1h0zTeOe9djJNRoFnT5qzc39/HyIBIzo3j6L0LwUe2XAUipEkDITQ2CNKiB5oamLEI0APhnhy1yhYB2GOZAwJGxFTZWWmOBF1nfKQJacBiMZDhPA0tMAmKTIki1gegqkLOKXOxgguAqPc+9QEgGuRc5rK08wjknZe2w3FE79u2a5p2GAON5L0XjTaJs/SdMTORJ8rOAVqJW7pfFYjC3jlhKaZSHlIPgKkBEHMcDBGEJRvjC+PVElxT78SetERKAFmEyHkCgXVXYCqSsgtnfAVK0HSAKZaFiJLDiOZkiArs52g1NdxpscRVi6UHFg7yIhERs7dY+Bwxx6j0gXNwIM6gMmPLe9k/oFAbPHACHkrJ+oR71zJAXlVVRpu+ZudMJsRy7mSx753B3uPDEBA8Ruv1AuqFpcEI1QNFyFmtxefZu8uH2/WoWn3AdSUeCsVDz3VBUC6U9HfWTtWpdF9PVFuxlxKo1QzUTo+xGOSgWk4FL0m8TFBpEdWNIBGJKQlypq2tCl4BNFdnYSJl9M5F5hh5HKeu6xFhmkYFuLq+mobh5ubmyZMnx8dHMU7393J8fDzuhmkcf/e7b5o2dG0bQmi8k749PT3+5ZcfAeDlyxfb7fjm7a99u/HB/ebrf3n37u3R0Wa3u29WmxijQ5p4+PDhg3P48tXnkWfnAoI72pwoM3pv8JNelDmKwDzPHgHSiGNRsEi0gkEzOx/RgdXlqVX5OeeARVkhzgyEwsoiSOi9j6LzzAizIwreoU0HJgrON20D2IHCPM+ANE+ziJDzR8cn99stiwAqi95vx7aTEFpCCsGb6DTEMUwd2B5T20FFuVno6+HzeuAmfsIuKbqkdiMenqEYNxaUN9CFVFYEqVva6AQyIIEwF5hhzCPmi/2XYoeaMpmgpfkAU7cYo3OuCU3TtMGH4EMMzTQP1i6WUSnt5C7vACCqEpWpA4gFvW1h3yLK9+QDJqYnIABkMz2T0FuAGUx1qAoBIpENEE4mpHUIJ4mBkHIbxaMGMRhre49JhufYOBjgFBIm7L1UfaLwAINHzRhO5U+JyZPniooApU7A1FSCqTuQ0g/l40PqWczh3Bp3KDrNXjCswU+QVznPAYFKBUQOYELQinyotJ4iVHItiUw7T2ECLGf7J57vozeVxKI9W5Qif6uq0BLwxYfSvxb9+Zu9Ve1fa283MFk8yRRAxdJlAMWuP1R19ueBOVD5QHmmULIzipNrAtqIDdEADEpyIC3psVsrYiX9SZRG0BFlELJMVZg4jZwLvmU/AblpF7fb3Wq9Pjk9FbFyGlmvulXXfvXVq1Xf393eznH+6qsvmtAgOQMKFYlNE4TjD99//9f/+LNz/pdf//Hy5ZeAjkWRmpPz0+Pj41dffPX7P/zhL//x5/cXH1+/v1ytNl3b77bbYbf76fvv726unjx7+vLVKwJpW49kjTqpF9KamViYomVibRuciEgudhEAJAKyKndQhDnynAK4OsVorJxTAiRxitPskVwIMTJRwtjZrNeOnHMNAATfskgILQCxSNt0oenvtvcxMlIQ1XGMAN57IRu4heCCJ+cQyerzLU5J/yyNv7QFwL5R8inftBwJmY9gqbJLPoEZ/qpK5KxUMQXDIXdR2DSx3IKbIkgA5ALmPvbiUkie505EHkMpZTGtqeq818iESN77pm2aqWvbPgzb0WYHJ6PWbgRVsUj4LDeyZNe0Nm+YGbnEU0RzCEhTvCqDuwEgLwGbxQlQtZ9UEgxAbHhOYW0jDvul1VsXSbIwcDY17RMVG8CmCClildzp3I3xML6dENoQADQlFMz3AcRU/OorUsjG3l4RVfqkkil7Bl85slYA5a6Wk5sfhUsPV7Fz9RB/DQ/MW9ONlvQBSLauJnlvyg1ND6gUR6wIoz2j27IaZVWPUvlC61VhWpZ8mrcj6QNddjxNQITkZC0P4zHpv3ehA22HRdpmD3lZErACpehpFtIPzrBEbEr0pjyRRYgXr0UTMF1t9oCqpup3TGUMD9Zuv7YUSL2GtBuIVgiNBkSZr1hyhD746JyoKgISnZyegoIA3NzcIGLXdafHx4ASpxlU1n3z9NkXIkLeE9HFx6v37z/sdveh8QTw888/fvx4hQhfffXN5y8+j5FV4cWLz73zm/Xx5eX1d999F4J/9vR5u+ovLi4Rte9ahzAM2zevt9M0HK3XR8en0263bnslR+hnmed5zsV2VuwoBoXMAsJscM9YMpuJXFAVInNk9d6DwjxFAESimSNHFlGOouZVeK+qRF5ExnFqmsYwzQAQEJq2s+4BQ6p0IXRdv93tAMl7Zx0MSL6YPwjoffApC+wt7Faqawoz1i9Y+GKvXK0og0KxFud5SM/5ME8kmiomE+Gl8BQyYsmcmSpUBlUWMBs5Cx/RWBZjOIAm9y1ElmKJmdMBQBGcy/lQJOdc8KFtutVqPU67adxGHpljnCdJCyMzQHEpgE7lRs45+8BA6VJyJ0sIs+MtyQBpioyZ4bn432RxZlO0dG+SVAqQCn40c5htBe17YwlopJKw+eMcwLfeJyJNChJFRcGyEdn0OzC47WqCi0TP6XcT/8pVEtiUOi3TjcsZs58CAAAl91JkhVnbmrWErT17efk8+fcJTOpB1qH6aRZQ6TxpYaICxeRM0BHpEeTZsyi541itTqO6Sh5dtxfOWgTivoy2S2O9oemsglg9ajh85eI/+ERO9fC1v89Q76ERbOlIqX4koITJDaq8ovSvlLVSscorvq3Xk2l0r/wAEkGp+SgPZUF5U8kU0gPKI8o1fjYYkuwqYkPrNGkYE/39atMEP46z9/76+rpp281mM47DdnuPqA5R4kSOfv3lZ2btV6uPV5f/83/+5fXr14i03qyaJny8uPCe/vjHP/ZNENHvvvv+b3/9+83N/cnJ0XqzOTs9GcZ7IlyvV//tf/9vm83R5cXFkydPCHEcdsO4nafh9a+/XF5eXV1c/ub3f2xPuxBQypTalNgTkbSHswoIR+GEeKzKIjH1nEMUEQERMT2kKkQOFXiWeTZkSnBE1ivUNA0iMisRDcOkij1QCCFGNlxdVQ2hMTEXWUMQAPS+cU5jHFWVKIECk6M8jzlt/AGxlbhNra3Lo/znfAGZhGoNUWjDOWReOKiimUJpKfIvqV40W/oqJKU0BxVS2MSGInrnfAjeOVONVkYVQvAuGMubceFd0KAxeu9D23Zd16/Xa553zKNIFJ7mOS2j+P2lWTUvFUs8WbPMK4GQVI1qwV4AU2DJHU8mKWT5qVYEkmJMuWfJisrSRtgeKhQvoVS7LuymmsUyQPEaUFkUa7+tVEvi0rxWyXNbPwGU6JrlTwDyvdiU4BI2qilmIZEDbwDgQABikThlLw4ozMohID/d8kVNZCW8tS/9S3mv0W6K5Vl+WZPGAk3wTZR1VWk1KbegC5JX1me6bxSXFwsvWqTsGtrVDxhnSZMAPC7lHzLSwW4/qgP2HoCmuTRV7lqKybG/2bZnKQJr4kcPqqU0Nyvm6xmx5+6Q4g4vxd6pmRCxuCalzyA/ey6UuoCMWCtAZjbzkfOe2D+E4Mh5EFZxLMN0PzSNPz87ERHVOaKgyjBsr66ubq6uvQ9E4Ycfvv/19c/kgw+hadvVqgveffXlFy+eP/fB74bx47v3//nd32/ub7t1vz7eIKjz+GTzdBx3m/Xm559+/F/+1//NO4oz+yZshx06P26H7W5q+6O7u/uL9++fNN0qOJIIhhiICoiioJHtZkRJZebIzBxnnuPMCXzGzBQLcmTnAdEpIbOIZQ41e6tRoqhTBRFmAuQYd8zCvN6sEXGapq7trKjdeQ+I3lHXtdM8IUHXdOMohmlInhQghJZ8QOcRAybwr+IfPiS/PQIubx4aCnsEmU9VS67MdMlQs0pHsjs2d8+iiJoaBAAIxaLnVp4oan2zjjhGy77GGJnjhAaYGjjOzodWBVSdcwYdgQaXT+R8UAQ/xxCYRULTtu2q7dfNsI08l0HHoLlJSywKjhaZpDRurCSmSUFRl6k4ZFWeufcKAdBZWCsz4HL7KihgY7lUMcGsAGvEkn3MZR1RlIhA00xwSmH5KqaCSQgCGOxGJXaN/7JsNz+AVUCSpNO6hjCJEECr/sz5CUUCrRRAkV8HBSo18eTHv3xemYN7gqwmneUoyGt5zOiur1JOsme2ACwbD5YSTu659YmUH0OWhflGiMjIBRWFBIggV/vsmz+ieRiolZTqopmMjhcghD0uyjr5kzqg3hz7uN6SCgcpHWbrNpVpZ9HKLtNlXijUztbeViULIitXBQVNgJAprCFgM/8WHUYW5Uj0i5gqHyr9bLEOzYrEjLqUFgdD6yVUe+boUoEpKqLYwzJtpABAiB6QABGcU46OqGn99v725vpqGHd93yPhOE3bccAmOB/evX3HyL/53Tdnp6fMfHJyEud5e3cfnDs52kzTNHfdSQjnz54dnx0/f/6cEEW4aRoHeH7+2dHR0fHR5uefvj86OgXCRrpO5M2bd7e7uB0umKlrQtu98QFb4NCvLAViGSjLCVhhHDNbB4BIzPQJmHpDBUEIxcaYoyJrKhSBhB8ag/eAyKyqrDo23jmkaTd26x5R7+9vFaTruuA7CzejhQsInUMgSmOKSVer9TiM8yxt2/jgXejQBaJA5AydSRVYuGaoA5o88O3qY0rVUE3eD89DRCwAMueJu8IcIamTNDZyYkZNc3dVkJB8KQVDUCAWVhblqHYEopVZcowqDCrm8NMEDpGdj0gOvXNOweS3JwXve+/Fy9w0fWgGHzrf9jQN3gfh1DFGZGAwyaxJ6ovFUM6cDdYCSQMCzW5OvGNCExTFahkwe+DMnN1+k4yCABOnDFA2vgERzV1A4x/ySUGDSsoSO1XNs25MALCYqYs50otANmPH8KjASlU1w1apcZZauRAmkYX2H6CzImxhArJIFgEdwkHXMvGh4ZApYHH5i6opsolSyaI+PG3tpMCB5N2/xMFrIVzAQpGLQ4dYk2x5SQYyp2RAE6qQUNLPWdMud1pcGkNgSiG+bLdCEmoH0fADwQ2PuQKLhthXkNUNHuq/Wo7XhXq1jVbfZlnGwZ/LacFASjT7WIkcyx8KZulChhpHREyWCaScRL5/E25VW39yASQFGV3BXAKrUpCs5FRFZeneJHKCKCLe+WG8v70d7u9umaP3frvdhhDatn35+UsRQcCz07PVZn1+fqaq2+3Wqm//8ue/HB8dAcDt7e3Hu+1PP/3Q993zz55v77cvX37+2WfPri4+juPu7OxMRCLz1//y24+XV5v1arXaEPnb2/ubq9vLm6vddtuE5v3Fxa9v3/72d/e/+e23jW+6LuhOJUZFBgUWFeYZQJWB5zjPLFb/raDqyDGzKhC5gKiIyqAiHA1/EVVlt9tNRM7ROA6IsFqtoO29c3OcdQdt087M4zghogrFyH3fhxAUtAkBEad5tgGTqtC0rXd+nmeL/ocQDEsZkhkjWvKMj8nuwjgPKVkzKPSjlFyTLiI6IiXiMk6ywtIRUWaWGFVsHC4KMEsqBpVq1nzy8rHgiYqIxBgBrFCY2xQhIQUUlaBNCMGaxawQyHvftq3A1HVxjv04dv3YcVyRWnuB8oyLwDDbH9D81Ac7scc2zAZ9l/hBwRK+AgXQ32x/O1zEnBprdS7pcdtbm3GDSKl+IEkhVATliIACDJxStcpp8KLFezHlnwwizkIjmgvSrMmDctQKsZQQWfQFF6FE5CijIOvjWECZCD5FHwd/1ofZTmZU3HyMFjW2iLAHhv+eEHx4/prsIIvF2m+txWL9BlTBFXR+zP9BuVwiZQDzLejwZgueEmCVQti7wX+6Y+XDR3VqXtVemH5Zefmz0rIPF1Cz98EnRjea6wjKVtu3iS3TkVXrRtYAIEiQMkeQTpV0AO4/M01BIc3Z80zcOQJsiiC7aOnqzhEjAsBud6fMniA4hyrBub7vj4+PFSlyZFFmdn48Oztzzn/8+PHk5KTv+//8619//fUX+ex5COH29rbrVy9evJin+Q+//1ZEgvenJ8f3N9dtc7Ra9U0If/vb31Th1csvrm5vT8/OVPHufrvd7sZxN0wTOjfN07u3r9tVf3r+5OjkHAEcoSrHKASgzBzZFgwSVRnUEOc5YSFU3iEBCikKigpHSWhfIuM8A8A0DYhI5Fg0OM/CrOK9J6R5np0PABOYZ7VBQjeOY9u2CiAijhwSRY7Ouc73zIylPDzXOIhJpNp4etBpCFVYvLYtag4tv60puTY+MDXxsZiLSZTjXxojM4tZuHleKCCpsIE1x9JVYCdHBRHlBOOp1o6Xr5Qux8wc2XkpGghyG4RCI9rN89g2Xdf193dNEzptJmYWjsoJDCEFQ3LXWO4geyTLZS/TTKmTwPSGEbOxLaHmqnNJCwrYXTgAACAASURBVAWlpMmWM5hSJNMaxqGcMBkAQZFTCSqiiqIQEqsxEYqqKQNAkBx5SWWkidnzbpAVp+ZC7SK4qsgBGYhpiiuAfyCIH49lwwMJVdPKwWH4GEnBA8fi4NK4v9w6HV1vZSk+KwKx1gHlfVlbIXH7CZHLP6wkZq5sB7B88YGYTpq1QpX4ZF0sQIpyHUjket8ONs3+eqhRoMjpB/07B5v5YJeyZEZd8hlJj+wdaVU5IjYexw45jGgp+ZzEzzEuzLtQBw1AEUicdZ7Y8L3MUbaQjF6FSCGQEJGyaIgjAeiqb6dhGHYsMTY+nJ6cnpydOucE0PswRUZQ5vnq+vry8vL29vbjx4/M/PbNm+cvXjbejeP47PlnITRPTo67ruMY52nyfT/P04vnz4dh+/TJuaj89//jv2+H3fsP7zebI2Y5Otp88803p6fnm83m44eLcRrRkXd0d3t38eF9aJomtFakzjOzCM9RRNgGLCsLMwiDLulBTT1HKdHNLMIS4zxPbPEQIpqmiTlOcXLkrNAoZiRIBd0cHXvfTDN750XizXYWgvV6DQqi6n1Q1RACAEzTxFGExDnXNI33LsdGQVVRRAva3wOZfkCc9Z+Psv8BWZa8JSEpqHPovRNhpw5cstkRUw8tKmW9n64VOTJLyW5qKlZnADAEKdHU/KyYY9m5LckiMA+Bz5wDBR+jbUbXd6u+62Oc4hSIXOoUFrXWKVLM+Ah7p69vFkvnGu19paxmMikqEQGipPnHoDbXBZQzHEWyHiV5VFHEAVjmxhDGsQqNgJJdLokkRERSENPlCtYZtOThxGZQp7UWs9aEQ+Xh5UeHiCKoS4oCENFXm5h+9vDxH1AAZeS8InDLSeyNdV9wSQFlm9FeDzsPy97WlLoogOraeRL6I2J3eUJJRyLkDo5aVdTHFskOGZcGNG0dAECF15qoBf7Z5lT7kMcV7DOYCGsxfqlCfwX7di+EBQ+0UNnwg+uC7l2oXKLeJcy1jDnCaNY6KCimzxURQVKYqHxvNGiyPpn9hTH29ZkFORAQzMFc1ESyj6w9G63vCZFVlKxwBYmccNxtb2Pkk9MjIoo8D7vd0fFJnOeru61zwXt/c3MrwsfHx03T7Ha7Z8+evXj+4ub6+ubmZrM5Wq16UBiGoW0a7900bIfd/TBsReLnn7+4uLh49cWr1re77Q4Erq+vn/U9KPR99/Lli5OT4+12+/PPv15ff3TKzPO7t2+arj9/8swyd4IShTlOIMKpEEtAouaJ1AAQI5cp3skKInTOOXKTRpOJNgCHWREBKZuuqjFGIjcM0bnh7OnaeS8SWUUEbLxwExphadoE8ykiMQrznB6RqiPvyBUKBAA0AKgH/FLzbKGTmovrV/lwj5xyGWGRTYaAjSgYIbo0RFczAwobdhrEyMzRkuQ1qWpOtiGiIwciZrDU/OucC8F7H5xLs32r/iwF51TYY+jaFpTj3K1WG+Yo8zBP40yOkACYcoGa3adLsnTB0qlMSkREwb1d2mNGAHP9AEAx9T/ZUCBZuFpTCYyJiNRq4BCEGJTSOA2TxUJqE41VQaNY84yiijAQghq4dmYmsFAtmSeukNOFomwBnxSNtQImW3myfZMAUECAvRzApwJhB9TzKA3VX9lP3F5FqWZDMDVG42IFZ1n02Hnsb6rob1/SQXmvqjaDwlw0LAEJBbRSubxTeYUAaUbFEsZIwT1cnOlEKWrTKzCJ9/3c2rKn9h6Ss5aLHzJUlEFXVfde9G7WEFIq9w9244D4yle17jmo87MnZJaJ0SXkPp10r4gpFZycBit2KKyQriV1nehiaezHAQyTPg3pw4KVq5rSkcycctI5GyAiJv9VdZqmu7tbEbXW/6Ojo6Zpg28EYJ6m3XY3x7u2bY6ONt673W7HzOfn523bvvn19W63e3J2ttmsLy8vf/jhH33fn5+dqZKI7Ha7tm1Xq/b29o6IPnz48OXnX7X9yg9jcHR1eUU+eO/Pnzxllrbrp1naNgSCmWNkvrq6Wq035DyYpw9CCFFFJKpgcKkewOIYJvodOWZ1hMLivUfRiOI5hCBJAXjvvWdhIPA+GHJxEwJHRnIheFHZ7XabzcY5JwKqcZ7naZoBSccxNK2BPBfNASDe+eCbZLKq2phErXzfA1qt6ergVbPYA0ONUuWiqBq8T54JUJ1ZANQBKUohNsRUdGBhn3lms0Vs38pi8uWwMKK9vA/ee+89pHRXmtICVZMwAJLtZ8OizD42oe26Nc/zuNu27TjudjMQlMYwRLRugCLNAPItpy8JgRPJF5pPx4mwjR22x2dHKCirgCILK6WJm6JqlfvMYtKXFCMLohKBE3XOW52FlfdzkasIDGb2J9s/eSUEoCmtYautJacuVqy6Esy2pSvluFDC67AD/YFA+RRNwGMyqEjq+quD16KiH1yi1gGPnqS+9EM5+MhPqr9KrrI2ZhAQq65uU8gAALCITgRUzLVraDG15E+kNQiYhw0AuVonK3jjMoQl+yapyk1VLbJph/KhCZP4jchn619LqU+9afXOZzhAu3U1AGxjnaXSJ7G0LE4FptIc48l84lxulBSDHZz6t8pWG9RhveWcfc6Er2KzZ2G5L9uGNKxIRJUVgdATIUfhmK50fHJKKm3bdl2HiLvdbp7mOcZ55qOjTdO2HOPd7fWbt2/71er8/PxovYkc15s1SxzGYRh2l5eXjaN11+7u7t5fXIS2iTGuVitV+vHHnzebo3OB/+vd/3Nycho5vnj2ikXGafzp51/fvb/49ttv0TfPPoNhtz056n1o5shRdLfdtV0nAioCIKycDVUwjZ5eKgpK5CKoKBMFh+x8AFGJESB2fZ+OjNGFpkHyGsvAW/Ku32yEra4Dpmm6vrlerzeOSETu7u4jazvPq9Va5AYAQwiqYJqAWdQSlaIZp0HB6lqqwddVDfHe64C0ioVYKK1iMRttIhWv5PKYKperykZujghCMEGffi8swsxTtA3LgldVnXMm/TIBonMOJfErgAGpRiRCTDsG+4YXAJJzbdvbo4lx7qaJ5zH41lEgcg5JKQUxU5lMZcsSpCGmpvuQDAlaiwjRZPGr5Q4Q03yYVJINYC6NIAhgaXgBAGA2NmcAjwm8AQCYBUg1t78lNyHBY1CBnDKOTVVoqqQkSSOhLFETSLWJaAI/dbqZ2gXFXJCkVjoVDaYJyRH5A7H+qA44eNU9gYfWekVGtvpcQ0KLqHog6B+e5OFhxar9lO55ZPGYfCszEyzWUORlFo9aS3/j7RR/y6hwBroLqob6a/Su2WYvi1lo0dx7RcklW8YYxjCahStmHZDXD6mTDFT3xp8damjMBkkdCV2STnUPhGZPJ0cGVSWjsiTjokYzTefW8n4PLaAUey7BnbyXCTYdF4y+csb9B6epxMHGdysDgPO+bTvhSZUV6fbufhoHgz8jdOM8hxAA6f7ubthuj46PV323vb97/frXq5srUvzss88EEFQ9ue5o0/fd7e1tE7wP4fj4GACCbwDdf/zHX/74r386PjpBonE7xXleHx0N0/i7b7+9vr65vr7+/NWXwYfmd820vRcEN89TFOccIbFYuhJNsTkn2aqxnTR32wEhWjkVIpEHcgSgIAwKCqFtnUocpyRyfGtjza2r0ztHgSh4Zo7CnsI0TTbba5zu7+9vh8Fvt0PXdW3bmlRFcs55mzSb5bHtdoqzcZWHrKygR0y6AwbcZ71iQwCYCKwVgCiCWsVOVoYiqZQTrZfdcrmcQUMBQJLtjzY8J1MaIFp9thlQ4MhZKLwE64umgUr+FIGDYDMYnYJZa+hdYzOIrPRFACynUMPHlK1LW0GWIMAqrJ62yLoAVG12MKqJ+OzB1/sCy3mXFYoIp5aldL8AVZmQYblZuRhqqqTJwYiELoAGDqoAkMd3amHVxM9m/6e6H0xx2crLKTLHYk2LB1CI46EOeEAxe0fWPzn4bQG4UKhJ6vD1Ty5af/4pZQPZ4T0wmYvblh42YmESzHtn1FafTdXAlzApAyAtaQGTcvlWSsHl4f6oWiYn2USGD5kDLOm+MtMufVvMCzLJY5uPuZ9t+RDLMcvBFXcCyoERl1tT8q8UUJcbfLjVixcp1TPce5Y5Z6UACM5sKzEjaTkbaFFLmFxjEQYFU8zOuYjIs+ziONzfA6gPvu97Ve21C00bQnhydhrnue97QwoixDhNm82mCW7cbZmjlQiO4zAMQwh+HMeu77fbYbU6evnyFSLd3W7v7rZffvmlD+H7H3/88ssv4zzTCr949cUwTne3t6vNUdt1wbtpmpxvPbMqiAI5x1FU1PvAzA6dkYUQokWVTXSpKqOIANqI2jQsGZFEFckF8qCAzqFzmPEB2taLiPc+NI3VQU5xnqbJdT7O0dz8cZy9h3nm+/t7Zn7y5Ml6tYos4zR7wqYJ3vt5nud5bnM+icjA4AqJ5AK4JMgQi3nwwOSrrSvjiIW04hK8X6aRVGaKiVcgRXSUIRZUBHOEw45k5hjneY5WlBGjOpcw4GBxWFNtu83mNiuuti4kAU44omU2uKqqACI68gBEQIieLDjJsjcOMAsGn8cFJvySB1uReTaZ1dbrZX1/VjoYNZWxLtJAl5+XuK5kVUqYSAAQVMpY7qSTrPI8A75A5s4iZEDRsuP5k6r6q84IQt5DSDIJY4yUNCCJKAgfloH+/5D+j3z16G9t5kD5clFPj/28aEv49Av3Yz5F2kJFrAeH1RpCi+ldEX12WssBklsoQa2wBRgRmVNzYDaikttbk/6ysHyAqloDqBU3FxGrqgYOJXnsUb7x5V7Kkgr15CtX73Fv0yrJn/4+mMynoE5QMwdZVZpVoqXsSKLCBOGbYtx1kBEq8z9FjCRbVQQIQM7ym4iUQM8xQUCocrmrpbcAQJNocALgnN+cHAfvHWITvEW6EfHm9nZ7v+VxnqaJHBHh7e01oszT8PNPb30ICOAAI0zb7fb+fvv06fPj1ert67feB1E4OTltm/727v74eB1Fuq673+2ur6+PTo632y2LOudFbV4BAQVEdV58CPPMai65oeAjIToRp8AgDOwUZs0VF6rAIlEYALwLlh0WBSQfiGzGiJnrbds6lzJklpNkZg/adZ2CwKAiOM9RFbsurFZrVRiGgYi6rpum6ebmJoTQ9+uZJXL0BgVBJBmD1wjOoDJrZtkTe+WRPibylqeT6SeFeHKJZ2q7rwgOK6AeAHDkyHlVneLMwlZSUWr27Wc1bTtqoNI9Uo08BFFmRuZFQ2Sbr7qfRdQQOcJA5AE0AYJqmkwEddXpwU4kVVYy5/viKLWJqdUyRMODsP43QdsbRKymwwOYew3JvKNsJSASKHBUJU0LSEkAUAWXGi0T+A+jIuXefRP5ZnKpmKdUMzghAqVAkjkTmpr+Uy+nKQ1hAZsN79zeRLD6hg+k7cOXLsWONc18wo1A8zgWwNMDq6Gcdc/t2iPExdAoHunhCnHPOkWwoWhYSDOBwSEUaATrhoV0+5AxWC3uva+0NM2dUN3Lt0LVO12unKCIsghP6m2fZzgHB0t9anGtILNcLdNta4zxjOATPtRBAKp6dsnEKH7oMklmUQIuhefN660ibJhyJPUmW+sv5h7g+kQptpPsGLLAJFqFXOJlI5qq3QAAwPxpIkdt13uHIOKIzHJzhHd3NwDIHBEhNI1zeL/b3m+3wYcXz59fX1//4Q9/ePfu3dXFx26zWW/Wm83R9fVN13VPnz9n1ouLy19/fd13m6brLn76+cPHD6FtVqtV13W3tzdN1x4fn4jCNE0AbtgNZnH7JthdT3NEJFWwQb6q6ohYUBSZAW0yRd5kEYkcmSMiQkNgmDaSKASAAwUiZ889hMZk9LjdIgAzD8MQGheapm3bcRxc01i1T9/36/X65uZmGAZVbdu2aZoYY+ToXHCEIXjvvXOeFuMkER6iy895qdoSqSC/cGHPQy4z2q7GyFgOg0Ugl3gUQjVKxYzfSUTZPkg1skToHAGw6YBpmhDRuTIGfBELmGO2qiCSkOAkcqTofQBAs/kL40gyQVLq207rLEkANlHAuB6dcxK5CP6cddsfzbX/SqyEAHncnm2XWoSG0NoAC8CVVC362TlKHn+ONwEDW0eYJiRZly+OZJ5zTjKp9XdZ3EhV0TAMIHOodVGAiRaxvAsWLYZW0CIZWjVJOV0KYhTAi7mFVd1oWj0nEUkAuRli3+gGsGnFsKSbnCohOgSbgad5w6RUKCcKzVuZRJKIIAoSFJlSd+dmAeOsHCX/OCu6IpxAgRRVESzxoYiEYss0O9fgUS3LmxbgnILLEQpbULJxHrhC0RChbCRZLZrLpmU9ChnEiDPLRVVL8mki3ex1OjREBNCMamfxFE3nWdSA6Q+ptHWOVVZKwjwG2zTRIpgKeQuZrZ+rmwBLCsD0IO47rVo80HyfqTI7eVJ2PAE5oKDkAQxehUSZQS3tqxrR/GNLWpvUdAhogFrqvPKooHEe5q4NAXEadpdX1zfXV6z6/MWLELrmeOWcu99uvejT9doHN8/jetMLi0Ndde3p8ZHVfZ4dnaFDiNz33WrdrU+Pfnz9/Wq9/uo3L7fD9N0/vt+N09e/+dq7oKLv3ly8fPWyP+pv7++G3c3Z06fXV/dWbhiZrX0C0JFzGJxxuhcf48QQrU1VRCWiipvmkQ31QRGYogqz8Bx349C2ATRG4OC9c36a5xl3bdshqmv9OM0RAKLuBkXyPqwAVzyPfd+t12sV9cGvVisAGIYBEVerVdM0jpBQXOPROQoevMc2oHNCKT9J6U0GdVCllHLQXKdvzG0Pumj6NKQrpZRZVGZVLQlvI7PInM1JLuVGuSDKRr2QYEqQCpCAN5g8MwS8J1VvoR4jzygxjkzkvffoksnCzEigIt57RBGdFSaFRpUBHSJSkjYkikgKDiiQYyKTRz7YE3IeKfgYZyrIwaqa4sYYrSjfipwXV8KESRLfUWMyfQAgAYupqlWCms0InIO+mBHjKRUwIigrIKKXHCAQzVi/rC74lGHDrL8BGDh1C1gxqmUG4owOrd4XxWNy3AEAHICluS1VqaIs7DEZwUlEmKex+B3os0jdt6b3QiipBsl63IqYK3HfJKsWy71Y8eYHaBH0mKPw1XWKOAdUyTpKofYhNNcmQlKE5bf1yTClShCz2ZM1WjGxUU3Hph+kpGaa2VMMH/5kECmvhYupvi/3SygGABbBLcIlWwCgzIqo3mcDyqRqZcWbtNfFkNPCN9XNImRMc9jPRui+7a+Vj6L7AaWk8jHnAGq9++BaWCLCeVKNuURofhZ5M9uWk1j4I29q7nVLa8PsyJttSIQu0BTjzGPc8vb25u2vv15dfdxsNi9fvpzn6fb+noi222E37pqmOTnZiBAi7YYdAbZNO7pxGIZxnu7u7ptm1YRGEfvVSi8uT46ON+uju/t7RPrD737/8fzqzZvX3333jy9evTo9PZ3m+d///d9/8/XXm6MjYR2HyTlnM2MJkQhjtAYlyhCKFutIxJUJgESiSX8iBCDOAjNynGxsmXf4//L1dmtyHDmW4AFg5u4RkcmkKIlVquma2d35+v3faOequkfaKolMZkaEuxmAvYCZuWdSPTlTbDIV4eFhboafg4MDuLISUUqJBKrRRpuWiQulWtXMSi0GzPNMU8MrpmUaz+7Dhw/cf6ZpapBLE0IIdQQWbgJfbm6k42HB3dtZjpOIICY0C9V/zDQCuhbZRwyy69v52AxHHufYKuEMahMaaYRDZm6yvs2AMJHkzO5eSomN4K2UquqGAjQtNRJmmXjKmXMiZtPG1B65fkNDWxSD6PlKSVJK05Rbyu4U1XLqqf/gkvZsgOIiO5DUm9hbKdtYo1UNhF2yYifS9aPqcfb7p3g/XHso1j/EiIUbR93iVqxZzOaAx8J6E5tqsXCzGzTsa7uB/XQzARAI+mOiLoXdn7IJMQiJD09wf7rxER4vHcCLjzfHsx7WoRPZvYNC1oNK70+o4Xrjl0cLi4BiiN/b/Tc/bu/f8uafZm7Q3SJT+PJIr2KpIl9hIOa9tf8RpDnzRoqXd0a/7zPqD51GCoI//2nJC/VW5HCiJHGk9yv2i7j3XsH+3+zITTKzVoU9+DzqmfJxQY7n891jfecvx3d857UOR6C/8u0/xwvCe8BjfBYTMXXZDA/YR9VM3SIUNnR4UphjVi1RDIZJOU8w0UIvr/eXL3+s96uRPT09LstC5H88//GPf/wHiJfl/PT09PT0YZ6zma3rFiZ4nufLw+X569fn5y/zPP/6669/+etf6vNzytO8LEny8nDaiv30049m/vT0xMz3+/3bt5fffvvt//mf//Mvf/nly5evr9fb3/72tzD6pdTX8hrpczu0bcGF3UnNHW6uClXTlisTk7BQqeYIFYS61VJqdffb7abT5E7mEt2zXlTVUkrzNIskQMzWrdQ8IxFvW5kz11q/fv0aipgppXjWAQHFI2BmSUlymqapvUym4YgBmNVaK1HnBe7b4/1e6o+VgOasjZqGQeD+XRzbfbBu9neNiITNjIVZcfi4qEQQsNS6bdumtVLHf0Symblb0eptUEnoh8PhxE7MkjPlNE0LBfwzBviaOo2dHOp61bTzUd1LqXDEsEyHmnnQrtC7WQ9B27Bd4QbIuRsr2hsa2vfnY6wWVD9X0xYJNw2xvgJEQNv86Ihto2c6GFQdbCYiMSprHLfjKSYiNw+5e2rqPm8MEHXFobbm5t6jxliLmOjQWwd6sxRROq7CuOk3lM3ucIbHdSAGDRORdcmXQyjalU2b67Ne4dj3xHHzxW4DAa5jC9JQQTpY2ndG+bhAzSFbPKeQHoGPuT/MCH5uc0YH19rSGIryr4MJcAwpRH9rAK0Vg976nnc/3jOVtrbEgDN7tyPxRXrSQAB5C9m+u1rbYiEB0g6tdT/vInmcxrE+Y5uO3xwv9e7iAFo1qYFJ7SSMf7YH0vxeu5p06YL+0dSQfIq0Kh5T1H3VTE0rPPJiHOIY7xkAiwhyugumOX36+Sct6+36UraVmXlKs9Avf/vb+fxwOp3neaq1vr6+AC7CWym1VqillB4eP4hAFQ7+48vX88OjXdeqdr3eltPl65fny/nx8fPH3377zcw/ffrx69fnaT799ts/f/rpJzW7nB9evr1eHh5yzufz+cuXr9fr1dwbN7efHAoksXVIxBo21TNmJhJJUqqva1XVbdvcjEXWsqFAVXOe5jyJcK16v69EtE21STsUfX55ef727dOnT+fzw8t2g/u2bV3pIV0ul4Gw9/AfDOdoME4paqTjxwdp0t36gR+PPow7RjbT3nJQBwr588OU0OO2Ga1Y7wxI/F5SHuhlhP8HBxMUASGAwCIctZOpz5wBGA4SAogkZh1LTjNzc4TNGpqZMVCdGLS7BDPbtlJKUdWQvpmmKeWkJjlnXdfxXUbJmnoGsB+Nd80ufb6xB63j4D7jCJt2MQgA2PuHG/BO5PDRHRkkVtn754mjP7/Hlc1udiCknTHuomTNu/s75MZ7pZe9NVq++70DXtUOmTredQIfHzDQ8h+8MUyMPTd5Y3QO79aDXGj7v76bmz+zQR0jCt/TTPbRv70NPMfbh7EjGqzNfr3x4pbQRLlpCG5jJMLt9sBoTdOKP/s53vkxfDg6JBy+Zrux9kEYLTntpI3l26OoN8/yu4WKsKH2nkCjnlEdb+bdKtHAbf7MYx2cXDP3PX7f19BH6XiXgdvfPkrr3EatUeRiiK5mVdVitZrWKF5IEiGJwR++8zxaz+M0zUIMr1aX8/lc1ntV/fj08evz15frXbJEgLltm6qlxDlNlUtKqXrR1Yj46eMPt9tKzJva68v15798Ll+en79dP/9l/vTpp19//Y2IHx8//OMf//jrX38RSa+vL8uySMoT+Hq9nU6nl28vT59+EJHHxwd3+/r1G3EiivHrGuSXFhFRr8YFtccQGYMIi8BsazYKXk0NKNWLbbWaO5KIw1St1rpthVkiZaql3EqptT4+3h8fFtP6+voa7cNB/z+fz7FjWGSacnO+gdVoE5jEoZ4XUsDu0E7B3OODlvOpOhqDiwggc21qlGMPd3Pphx/rQ9tjMxyT+z89OP1SHKlMqyVozxJAvVYBJkbU7ohixn0kN61kRgyEwa3+pmLsvQ3BVGugFCnlaZ7VT/O0aC16KFegxzFHeBbdK1jnvhFRQ36owfPo/I6u9xB/daYoDJtHFNnWGv20R5MkUef5t0WhdkV4qwGG0QgHHEtxtNA0QmNQNPsdF5kbMMt00JUBGmg7IKCwPrF6b1hA+xvePEgaEpnjww5xRgxZ1uMboxuw4SuHd767/vd/fv/P4+39Vz87n9JaMR5oasUWMU5r8DvCnc2LHr9kl0Hu//kwUnGvxH7nh/7k3jo/irpk39HMD1+IPcZGjG9BA0Ppu8wjQrHaMEbSpmH7PhHc7/zd0o3vcjT6gAdgPVxvrNDxuw/n0cdVgPBeKLgNp47U1gGYm6pWK7Vuq9biroGkMlPXiu2HqPcqJ8mW1KqnlJeHLMyvry+q9Z///OfLtxdAVHFbV62llsJMZmDm0+lhXe9mmM+pbNv5vKj5H8/Pnz79/OXr1y9fntetTHla1+3Dh6eXl+vr6/Xx8fF8Pv/662///u///vLyqmYp5ZTyr7/+6u6ffvz07du35+fnp6enx8cPX78+m9WUcnuObt5xIesDT9RsXbeqlZgDMyeieBUAM69tzsmhRuo+TZM7Mafa5wqYaUp5zqlum5aVfCbCsiyhf0XkqkVr5XmG+3ZfCZ5zCtw/AsYWrPv+6LlLdY7n1Uaj9MNoFjxGEMH3ZzpioDgwb2KLnPM4Cy2aaWF+ww9HpTDu3Hs0GlgoUQo5CDjMtM9N83Za0cgqziwp5ZzjyuHyj2fQzbR462e1lo7UWs2s1iZ7BSdhmfJ0Ol2qllJLjNbZfdsuK9mXhPwYAsathdtQU3MFt44eREjWOv31uF7j0I5VEuMa+QAAIABJREFU41b+9CYq0w4OjxWjg4z2aORq/GOg19MIABN7k2n3wZ9iJoASsTIBzsQK9UECdXf31MoINPw3Ae87gY+G+E9tXH/wPELadwbo8M/999av9q6OdPzE4xXGZhpPBn9qag+mDSMq676Z+lRb7wxad6CVmt3d9ydhHqpeDh/H492yfH+f75bl7Ro6HMGBpEM6ubuBP3MnrfeY9qbCOMMtgaZIvxwkMdAmdL0xEom3QNC4q3ePNbK0VsgBUx/w0mrxQAPQ2mgA7oQh6jf6XaoxPstAbA2ksqhorlo2oAmfWRIzG8OGmTheTAR3ChB4mk4O//L1+dvz87atRPL5r7+Uraz3+8vLi8OnlKZpVi2vLy/uvm1bRHZVbS3bVuu2bv/6/V/zvKxFc55ut/V6vX7+/FeiX2+3K4Aff/zx//1f/+sf//G/f/zxE5tdr9ePHz8+Pj5++/btp59/ci3V/Hq7Xc6Xy+Xy8nKNU1lrFXE49x1qx83MxOHLmCjAbPMa8JdGLQTujqoau2P1teM2HDxO4lA5YADLaV6WOecEoJTSdw+rFtUCmMTEXUYtdQVYskjaOeh7iBfm2IkpUjKMAHNXB/F2rMG9kaBtmCBPuMPMAl/yQzTz3e7tv0fIBHkvUPWSoal6G0pC8BoN1lqtRdbokQaJcBP+t3Zyj6sd5z1U9uJd2kR0KJDubmrcow+AWCTlNE+5VNvzhqMb867vH998OLbjyGuDtsI5vN+Jjzipf25cqitrNra0R/ZMjZsRxIc2pTb+jKwoVtC6xGALIYmYxTpNK/rA2nEmkDe1DPJmZt1BgEDMzDtVj9EIAEHRbqmje9r99p8ZvnePufnVHqXu4WFUVtu6uHuf3uD7G99dfPDJ/s+G9b/6zbv/enAqiLQmWmZjQd3Dq9MRaicimHZ1A9/r+YdbHYF/3Cbt6H9zJz2q3d/X/mqtENCt/598I38TGb2JSg4BSU9LW+U6ou83IOa703hcsWj1GlAsguOBeOtAjexdwkGt3B25uTlkv6660w46Rdo5GBgePs/MrLpuWlctW91WN6MsKQlXUSZxIW6djIMnVKqqmkiuRb98/eP567MQffzh0+Pjo6r+85//XLfi7sw0z/PpdPry5XZ9fY0z4+YpTfO83NfXdS2ny8XMX6/Xj59+3Nby8nq93W7m+vmvn1++Pb++vv7yyy//7d/+2/PzF4f9+OnH5XT6z//8z7///e8AtnV9eLzcb9vt5VVrrbUCLiwppYjlmRmGYEeaq1lVraNX1szUyM2g1VSZIMK1bNtWiEU4MVDNRDIAwIRpyss05WWZT6clHlvOWVWZ7HSacsoAhBOLmJmpl7KJJCG/X2/TaV7mBUS11FLqlFu1lklcYE082LrBCn7xTqbAvtMa+scgdRvg+DibYf0HWjICjqMNpRHyox28tkslmZag37hpKVsp5Xa7mVl0u5qamlFiZiZwwOyA1VrctEFGqdYumwqgtZJJ+/TRgjtcVDwy0Oxa3KtIijyPLI1ZlXHPw3A3O0H7UYpI3FtubLDdwI0f64oyxyPMPIKm5hGjCE/eZ3W2cjfFqBw5tiL0paNeyY/bI/BhRni3yYdzP5CN1ncJJ97Td9ppU52j6QA8jfDvyHsfD/6tWaEuj4P3Btk70RM0eLENzSfxLmAwrjLW6/gR+855e3Vusn3vGYr09poAiGTnGvbr+KiBuIVIIQ6V3G7+x3ME+RteV/t6fStQ61EMk0rMEnnbuxcHoyfwp8jcvvNhkQ73bWRmZrENrS3vkQ8Q4rtoe6u9YD94Y1XH1fd08uCDiZpEe1whEs93/tc7RhFXMCLDm5ZLDJYbEbGQgfuMge5sDaZaqwcNvq5uSshAEhFTJhB3raumDtqVIe632/XlpWq5PFyWaT6dZiKqtZZSAZzOJ7eGsBJRKcXdP3z44EYsfLk8/PFHBd1Pp7Op37dnJl7Opyf33//1++e/fv7w4YHgU87X6/WnH3/8+eefX67Xb7fXpw9PKed//etf5/P5vq6Xx/M0521dI/QWYWp1r1H1aYyDWqPYWGtVNTUHEWqtZkrkqfUOy5xz3UothRMQ2qJkU5qmPC2nOaW0nOZlnk+nU84yLDV1+ffL5WGeJxCbmplv2xYAYPRtq6lIyjkLS5BRIkANbC2G1TfDbU6O4Mi0LcjcS5IR0oEIb4xRHO2AxWOLvg1N3p1c9BQxGkDQTQvgplVr0Vq2db3f72VbzZ3BbrZtpdZqhPgiRAJi4RTgVdwnqblpEqrB5Ou5exNhFQEoBje4ExNRToQpKUPVvE7bVOtcSoFeiTiQot10NAMBCoHPnbDeLfobm7/L/oyv73uqbeaeYh7G4Wy2YkloWrSDFyvmLcQa6Vjk4mTe+5a7oWo8i93ocSP7Uv+nmzFoTHhlkALcHyYTUaNuNxeA/2oi2LAXXdSFjr88vjAORRhNinSvWz50gIreZgDeI9/jL7/fTNSD3PHPgz9882KMChVFL4D3BaE9JI/P6jlyGNmA33BsVPWWAMa7qL+if9gu/0BEb4iw750Te++wosPZ6FfeB8uM3LZdIQA/a6qFhwWnRrN54/beu8/xValvx/Hn0UO4O3HDBw/twTh+nQMN2dp2euv4x6eYuZNx/45masGO1GJRBDYNsElTpRrE3ISuw2G1aq2Z6brdb9fXlOV8WVJqIed9vW1lm7LUymqaRKZ5TklqLc/PzwCenj7Oy+wMYvr4w8dpnq7X+/V2JaI//vj9p58/f/78c9nK//fbb3//73//8PhQq359fla38/n8yy+/hGTQw8P56x9ftm0j4Pp6XZaFmFUtWOqR1MQIh55Wm2oLZksp26YxOBAgczXd3FSEHV4Np9NSSrH7OvrPzbzWcr6cHh4vUeUUYXjVqg3EAEQoieQp5URMlJIYs5knYXekJCQCJrBETMBvhaQizFTbN9ggjYzT13dyPHfrxQZyh6kS3ngCP4A/x5IYvc0D0A9kAEeBy8fk6JgEUGvZ1lVL0SYQ5Ou6qmpxExGz8zTNRB7TN4mImM0U7MsyAyaSOYbPSMo5B5LJzDGfxVDN0NjJADPZNKmdtnyrU922uxVx10GebrfdViHy2EHmgY2Bp7AeVfeAcnRHv/UEQGvvOp5b6nlGwyQGH9+d0Zuwu3Xvlu7w91jkHWjB+Lh4rA501UIbWAOHUq/vmQEdEoZmTfxYA+gtZcNaBQQwquF47wOod064H25oGKBuwZshw//5Z5ht6npl0RbbkDPqNxSvbV1O6G9Bi6h5WPFuHANf7o/H3WNoT38/de5jewHtBn//mh2mcH8Dp4zHcDx4TC3LiELld+6Njpcd0dm41IAUqbdTeWcrUe+yORy5N+czVt4PLmrszPbt0NpUhkc8uvb9UfQPiks20oK7m0cJalw0trVbZSJt3G2DReC/aY0x39VMXcHEpVY0kNfcKSLc+Gq1bmblcjlN07StaykbEebL2YxUt2kS82Xd1svlclomdjqdLj/88MPXL19qqTlPTFK2jQXn89md8jxdb+sff/zBKf3IP/1f//f/+OPL15eXFyb64eMPkmjdyrptz8/Pnz9/rrU+nM8C/PNf/8o5Xy4Xd5+XibZ6vd2YGT6o3eH3Q+G91lJq3Wqp9/vNjEqtRJwnhrupCjOlZKVMOV8uF5HW6sWhEupqqgKaUnLYtm3u6qosxEwiklPKecrIDifykHsLTWWOzgsRnjKTkDR+VFTaxwZmJoEAqLW6GYPHqRmbGfu32uOt+F3YwRB+CPJlHEHT3g/Vd4Xj/TWDTNCK5WpuVWsldwEJaEq8OcpNtdZSa60acYMwWS2bGcXMAxLmRFwDt1zJiTDPnmWJHAsgkSQiTMnNDbWYkkOYGUwpmWUJBSArHI9le45ZNNxd1Ejs0ID7neoWlsN9/4Jj7qd3bNa7uOk4QcxM5nEbYWdD+ZGciImJmchjfAYC34hHMJDV1sZ1PI/xeAwgCMje/ydvo2kiFI8AIhIK6iUT5l2ekVr0Sk5Ig+fbrNdhyEAYoXfwxdu/97Jrj53je9COpHmPo9/hPD5QZnRz0mq2Du6069620Px0E0wNZ9GoBv3z2+2Ov0Y8Am89I3HxBlR1H8iRno4wv3nlUI/AYWiXO+xIfLL2ffYvEGT/5sYYMWUrmO/702rvsqYii+Y5qZdM3ji4nYM70JVg4sTfmjZZ26ut0OS9L737NEPkw93QuzeEEANHISI+bl8cHPkhpmsqJzHsmpq7JuKQv5fWQNfgNNMajAmN8a+mleAKEyGN2IwyesHdXNXrWqrkyVS/fnvVWj99+iTMIsmxprTcbtdSjSlPeU4yaVlPp/Nf//q3x8enaHVlZq26rXpfr3mameXD08fbfX15eVmWS07Tf/+3v//r9y8gf7nd/+3f/u35+Vnd1m19vV4fLpf7ejtfTv9t+eXXX3/99vytlgIS9+BuO8iNjKDM5ObsSm7RuG6g4lYdarbVCsApMRFRMiIHolfjfFqIcL3echJ3iPB5Suc5R6PWpuX15bXWQo2QTtM0nSZZFtvWSk89qARqKeu6AV51Op8fsmcRInhmCFlyJYspJuQuxEy0MROzVkf11tbrIREYnXgt0SeR1FOcPZzrjDTWotTFbBCVJD4ez84HMXNQ1dqSZVUzIyFTLVrc1SJYnWaoQ0xVnYhzItB5Orl7eDZuXHWOdkgSZuLBCHIzcreqRpWTEKV2+MwzMWXxBqEDRFPOaZoIDHCpvuaLKpm5W2WKEh2Fy2q2P6Ry2vEeQNYQqZFgGwWA06FP9KR7GEZ0qJbQWVLdzEVhu+dd+0h6p+BB+YCDWmOsNfY99c5qaTNo2FpQS7tAjgUGQi1haQEl9r4qd7gaiGNGR+oWJuAadJJMLxTgT37e+oD2p492SOuWrrvNP/MfQfQ/Bsg9im9OMZYu4tCOt/B7IxW2LSJ5H6H+HvC++cSemuw4EvXSwvEOB9aPkZH1EKA9TduN//BnAB8rMkykrhEEYVRrxxd0r4c2jfYW7s0gYY4H0HT4Ft5Lr7TfTTz5kenDuzYXuRusq4FH09Cx5t2USNBKuXwEOo+xYfunkxO5qblFUAx3I0qgykoiAHsIIJtp2dyqVi11q7VYra7VKrsh5eqgSDxZBO61Vqu2LKd1vV+3sm3106dP03zmGAd2K6Vank7zcqYQagRxymZXIv7h06dt2wy0blupFZBlOVdTgEpVNXt8/KBViVgVHx4en6+v0zJ/fX7++MMPt/v68PCwbet9XXOS+/1uZp9++HS73lcmApv5ppWIJEkSAROLwBylDl0Ugxso5anq6pHvEgGUUibhotXMRbhWWuYZ3ijLSeQ0pWmezPx6vV7X9Xq9ldLoTKoqKX04TQ8Pl5RT9fpBH6dpUtVa67qu7rosS8SJ8zwRCbnzDN1QmEJeKWUIJTgxsaTkQA0uEvoMQQI8yMfNzO3NbiAgvIUD5ETmtSEIfTwLNVGVlhf2IwmNOjgC3dRt29RqmAM1jdHKIJnmE3Ga5jL2dtHWHhyuiPvY4ajz59xanZk5cQzSQWRaq90BqqoMmyeWlL2T54kITDPgqhHs2XYF2LRuVqNz/YCAxJ/U7SE6IMDoBcIx1C9+bEROwwCiUXrCrHcr8h1yC4wSgY+T3gxIDCGOjb4Hp91IjSg+/tfe20Tg1FpQ6A0y6tA4xVQSYe7F2nbSUweHOL7xHqozgbhh+/0Wu014YwGpL2L7NOaoS1AjV735/t4w9jcXQfN5e60RbbvFRcbr93cd3ji813tv5V0wFoiqRFv2/S1v723sA6Jj22cDhujgGHbrP67G7WsNixyG9M3djhi/Z0DH9KglF+F/GS0zi08eoX7fTMdbbltz795ujf6h4G9q5AyKEdt9jxPDEbXeQQc6Pg4a5Wk4jDCoouZObhqJGhFQHM7Mnno/AcxUa0EtWu9lW+t2V63uzMweKoG4ap2ZY9iia6lElHO+3a4vL6+Xy4OI1FrMLLOczxe1CQATpZxrKet6C1zser0+TXlaFnds2xYL/vj49Pz8Rc1q3a7XKzP/9Onxert9+pFZ5EIMwoenj9O8LKdzTtPT09P9doM7M9dtc/dlWVJK1WxbIyTnNrqLmDgB6iIjcmNmYVF4TlPOEzOpRrOek3mE0yJZBMCc81RraDOAJbtTtbqu6/V2K6WqmmqNmU390ZVJ0v12G8N+AWhTRRYzu283tRp6EJwY5F7YAdFqVrNNLMLMiZmSBOxUzdrwqiZvgHCqDnUUCjp/twLRoWSoDaBANLlS4Hdjz7Q+pmZ4nLv+dySNNEq1LDIJEYskMy+l1rqptkExyZsuJRFN08ScRgmBiOZ5FuaUcvCJOzG/QNsI0ihpqVpKobAk/WWmBNV5qts0l/P5vG33dZuqbh7QDY7Hf8eygKbvH6QPAL3433VuutlCL8M6kYS2FfFAa7lrjo4WVOIIv8PDhM1v9chwDTTqw94QirCvQKgKNOSjx6X7yaWIzDoFqR/nsa7ko6zYHWQilmb0cKx+YJfQPGhcdKDiTXrQjVvAJhHGhgq2H23ywWrv7mS/y/ZtuiUyo5in1OP0OG7dSu7sTLz7mLeGzN/lH51ac3zL8QXujp5/jLWDxR61bmn3d/n+0rcXaUH0fofe0sBwqDhc/809DBDWR52jYzCR0zLzkLJouwTmruEpPTx3zL6L4ZWAExhkDu7nui952PRjWXv3zcMHRPihTdy0ZZ1M5BFJwN3E3CmI8GZR1LV6r+W+3a51u9WymRJzylVrLrkuKRtLSrk1jed5Aurvv/8e491jlm+sWI6THBMAheeUSi2vL7fQ2TdHiqkGQFXNKRH5PE+q+vHp6R//+I9ayzQvaZ5u9+3h4YHU0pRTmphEWFTV7pbS9Prybds2d1uvVyaap8ndS9kYlFJOImFziUiIKghggIVT5glzIiqVzdQMlVlqtev9plpVjVPw0NsIQ2bOaaq13kuJ5HyrtROHME+TJHH3nKfTOc/zfD6d3T31CYjUG1+JyNysbqVu7saU1PTh4cMsUourFnhxn9J0EXRF6B57VlWt1a22p9xSU0WtTpRT6jPsPJrDrE0WkENgO/IAQsTXhzCWE4lHKTsGIbDBvLGZwyxyrZqzqk6qpbHOekxqIYzDHDLOUZoWEURp2ihNAEzVRJJD4cQu3VBEKTOG7sbUHYGknKcpz3Mu9zTlaZ7nZStrtZCCR0eUIyBjZjYLIVjzNtKy2/hjXHswHA1pcScRdPXJ8bDiK+8XaCsVBn0sZ8vGm8kwj4EqzTNEthkfGOD9YVxBz8jju/uo5oZ7b66qC3W5eTTjozkAGq2n7dk2AkpHqZvFGnborc85mtFhOEYV4/vQcrwc+O4X/dm3N4aaMfeEtA1Lax8xiMlHQtHxs8afHc0IAqu/e8HR+scr7dDrQOQNhmy4yh5nU0OrqBtKw6HW3d3TTpYILn9kIR6FkvbMDkWn9sDC0PtB9gdd6PSQmkRwFYtBDFQAUQoiIo/REyOUa4lh8xnH2sXxU45hS/hzAKWYuqkVVXUzirkuUaghZqBYaV/fovOpaC11u5X7a1mv23pb1zsq3CnN83w6LecHYpGUpnkh5jxN7vL6+vry8vr581+WZWHmoHiq6p1AxDmlPE0JoqYpz2kqoZtWDQRWqylPkgrgt9st9saHx8eff/75flvzPP3w8ZOkyYlSnoN1nvJkQK2qpcI3B4X4fp6X7fpaUwoBM2Ge85TzJCwBRYKkRGQMIJitnJlSYd22rdzL9f5a12293bZSiChNk5ov8wJQMF4A3NfVCVNKLERASjnI4CH0RkTLsiwnmaa8LMuyLCKpQyI+LzAzNZh5zkLMtRS1er/dcs4pZxcj48JuXkWWKOwN2QPr44+qasDHBmdiODucWLSW3jxk45yqwi2zcJ4m6qo03uUwgiUQhw1xcrgZwch4QJSnJJKllRkgkrwhPKWhl70KGjI+I4ZlRlUtxYTZ0ZxfYlGzGL3ALK1HnSDcHdRB44gT3K2Wmqea8yScPKwBwUWs696Tu0Gxj/OGwUICuo1eQv/fCISjFBcj4foJYhECwv3wHry8CfWYmDGqrZGGtdAZaFLRI+fvUVsHAN6bi/iP+12B0IYbR7NniBfBo/g0Dn9VZaIUjibmGAyoejemPezHIZr+UwPqR27Md/H/Hulj2KD3yIO30RMHOSp3slG89n7uRr4Vq9K2qXuMv3gTiXtL2aIGTqPDtj2qt2QetHwWLSqKcQrtKzeRCUeruNABsRrg+/jCTQamO4KGRA0OhlmTxzuige3Oj5qggxjWovV4976YOzK0z5NpLqw5Hm7V72NdpDtCAB6Ta3qSqaE11rbRocbVCEtKbmrmbgRm2eteFN7OzLSaVq3rut629abbuq732/WqayGW2UvVcr9dKeXlfFarIokZ20a36/3h4XFZFgCqWkpRtQA3RKhWdd80GQEs+fL4AeBS7qqa8qyq83Iy8/V237YSM3uJ6OfPP79+u9ZaOaXHp6fn55fT+XRft6owJ3MXSXAQ8zwlK3Ura3q43LK8Xq9srRza+pM6sEogYgmryiRMSixlq7Xqum7rWl5ermXbtBQHRFgNpr6umwib+evLVd3u651TugtnkdNpXubZVch96L6dTqfLZZ7miQjLfKJOxAI0p6Sm7gQSycvpdFJTd0+SwBKhpbtbrTC703W2Oc9ZUmbORA0t2ZiJSOsGbsMrQjslxZbm2NUIaR0QpZQ4EYCtVmFLKTE31b+q2ieWx4mOjpkIy+AxsD7SJWnx5BATBFArBQrEqR3ezndCnCxmTpYACLOwAJ4iIxPxbmGpVarUAeHoqG7nKU6+mErKLEnSlFIWSYHWHCCUVsNEc2wAEUOcLe7fdn2Bbus7bEOAdwSaQULEHqoLJBSd9i6dPEfdY/aELOr+5HBzZ7SOXwKPzq2BW3T4YER1Q1cRTpGvMNxbVS+mq5BZnylAwg7y5tOa6Uzhu3l4zGYfOhmx/9lNwW4rmxVv8BThMOIqLFZETD0EBt6E228cye5UoyEIu7TIYMgE1ZeD9NADf3YAjWMrxHrocrSDTuGQvohS6Pd5iR9/Aibbca2g1mhoxMeSHLp/xw+hB+fo+VToEY45dkQcgeQbZ/kWffF9Jld0Hg3dzb5yPUcEiKy9cwyiE8lEplbNlJgZgV2wqrprfKb3Pq/u0lquMa7P7oI+bIBA1CW0WjqsHMVtEWIyNfSpQ0CkS21Gkqpt27ZuZduqWXUtxYr5N8lpnk/m1d1Snoj9JCxJnp6ezGzbtnmez+ez93lkzJwkQcQojDEJ+OOnH7dSai2munCutcyXtK3ltFxKraXcS6nn08WNP/7wI0jytJwu/vsfX3/66aeUs4HOl8dtW6m12ySZeGLSUplzzouq5mkiIoRhii4hd3cXYWFhJjUNA1dLvV2vr9fr9X5rbWuOmNKVREAMEgdf76/OdHu9v76+UkoPl0vOKex1njLMR/ifp0kBSiIiFSGtFIuPlDJzcneW6XR6vFwugZdE3wkzx6O431dhFiQwOEsOQ8kkIlsjkcq2srrmWeC+bYVAoUvqI2eVptomSUA0NoVZIOEtzDdthB/zKNjmaZrcPdQgUhInNOsDE5kkpR42eRZOnvr5JHdPKcU6ANFVZwEMdL4HRISYJbXRknvIgsxMIomEQWymMX8NADgRGCBzOMvAEhyRuFs07oCCJE0givlNPWgjgjg0dsIY+9UMJgBH4E08moC7mR4m0Q+9OMKC1lLQWHXUwS5SkDALGyn1yXrNRjSWJji8Q/+QZrUCTxLAwsgbszikK3EFPdSilqMWmBCnMPGy2yNveEMzTkFa3BGeYakPRiQwqUPTFlpGOCLNkTocA/89sUCnIh1cQjiXeAZ7qb5/Cr+X8ggt/zeW/V0y43tzAI4vO/4zapw9KN970BxMLQGI7Oy7Sx9iCdp59NYj9+GKQEQppT5ot3GY3vnCyCfw5sfHIvW78hhdENcJl2nB3PCmTU8hNkjOjFDcsfHgsN/z7sT6D94+8bF0zDu4ZzHkl0SYpY+aQnJiuNaUs5QZspFUmSy5bLXElwNgiApp3YrPvohQSsu6rs2XN7iZHSjbZgZzSpyCeigibk7Cp2lRs+CdbNu6bqtXv2/3NM3MXMrL/b6mnB04n8/r/S5pWk6nf/3xxy+//C3nqWp5eHxQ1VqKad3Wervf6raSW54nMfd1BRBSBE3c2KK+60TkRCJUi91u99fr/fZ6fb1et7KWWs1MUmLJxBRROQvXajlPDjK/Spqq21bK+bIQMOV8WRY1m+cZgBNVN1K5vm5RAYnCLzOnlEJDdISxInmapjxlZlYzrdVLjZNbqtaySU6xPyQlcnHp8yBBzHTfQqmJciYihC+JB+wAmxPVbh/bFjZDEmKS0Pxzd6i52iAZl7oSIedpnid31KIKpQ6GiOTRcda7XqIMGSf6IKqhdd91YQcjo6WdcdcdlYT9isov9YJNJMweQqHtaMLddY9/AI9UvwOyGMYaYVBiWhjHhrc2F2wYunBLOzEaUecI+Vg5nqbjYbbQ3xMios7K8kByrBPbI4cwAPDo4WhWKVbDHIMW2Yt7AHp3v7PByY2Mw2cBTAdBobhns0T7zYVx8RH/Dtvwxmh2nrCNQBJOeMMljxqzd32lbiKP8bIer2lvUftIPJvH89YKFZ32e2zynZ3qYfKbnqbdE7TSyJvH8Cf31vWA4GB2NJwudFm9GeLvnmjshON14iZTSv4uWHBr4BOkr9h+tX4eyN3acMFeium/74tMCD492vsDwQMAdYtWLxgTs/fmeBDMItG0kbP2uORPfsZdsQw34xEgNPiLE7GI5H5gOJhHpJQnRO+UuZOkaa7uRdY13KewcJJpWliSgUiERKw6QNM0xywnVe2JroBNUhKWqgZiQjIyQNK05Ka6isV0XbdlOX35/Xe4aSoXoufn5/V+l+lELNNydtDpdFbz3/86xuzTAAAgAElEQVT4Pc/Th/OH221lQcqZp5mI7vd7qZqFWBILzJvUPhEhgPf4fwZzjWp3Kdv9drvf77fb7Xp9dTgLg5NwIiYwO7GTmJOkvN1rVZvm+bauIEw5J5HT+XQ6n6Y8Bd5VVB2oVeEQRzRJ1VpDBGmaZm+/5J0JAQYExJIzcTLf3DQB7gkppZRYoieAhYgEIinoS7UWTrmULWD6JGmapobPUOOSxWjMFm8AiDGYMNttBCLQrnAmJE4sDfkhkl5Tbft8zL897rSWRrReljFmAL35PLig6k4pJUlCff5BU5SKejER3AXCInESolThjpjxR0ScYhYvRXJpDvJe0XVCSOc4DVK1xT9aUTaOS6Dt/dYjig+2UJwxb3gss7w7WUejRNTA3zZXoql7EqzVV6jdZi/PjoM+zF2HX0dIiL1uYETuwiFeSyyuANQZHB0bHVEweApR6VBi2TGtBnobaDeCx3LrbusPVsm9q3i2/+wYHMY3OgRHI9xD7Fjlg/yyCJM3w9pwM29jN4ZPeuddaYzp6I4qAmC4txkvhMMk2zeJwtiLqiUCC3ePqQxt91MKHK+Z7cMTFZHecfYm4wmd9/FBwamjKBb0OcTHFRiL2T2xNTIsxkVG0PKGY9yxwLHI7u611iQte3SOYq+6m1vTiQ3FT2aOyGns1FizUSPxiACZrd+DpCQpEwvACpIIzJjNjZK4uEjilCVPMi1Vi9Za19t8Vg7KKJyIcp4g4iCQ3G7bMk1hfUQ4JrDbfhBRSYmE0Xr3mBKnnPIS1irufTlXrQ/Tcnn59nW7X5fT2cEv//m/w2YR85RnEqnwZZ5fX19Pp8v5cn59vda6CnPK+fL4QISckjATU87lfr+34bet719rLWqotZayrut9Xddt3V5fXl9fX9d1ZZEpM0GcuBoJUZZEod2aWFIiVUlZzd3ruq6mi5snkWC4x25wkKqv63Y+n4gopZRyKmV9vd5A7LDr/ZZqSTlP95xzZklpnuJlAJHIer/COTF7SiyJhImFUiIWUwVRnmZi3jZOU5Y1bduqWnN01YZo4DiaAmnmGAS4tdlS1iU5LZCFZqjI3U0h0thpCJRDxo6Sbu67lWuhjx84fu5eiFqmYtbgCw6rn7IkSVPOkbMM4hwRHOLEIofW1rBoTl1oSJLkPEkSYoKiH7phmPiAIrftHrl1v2SUgzjYMu7ODbMlboPGuZurVq9u2cvB1HA38p3h0+OtiJo6RZB6+dWJIu/hRqNqza1xkKKM4MOGYP/q1p0UiGDicKZumHtRMEmLBgcBKaguO30l5KPxZz8j9AsFJQxmWG9i7d+81wve+o/hzTrDwQ5r7U3R/z3c8t4D8T5J8Y01f88NOtjQ48v+zGoPkmWnaO3j2h20V3qPFwHexNLtAdgOAY0dBqCaUvdNbev2BQkuhcPaRNLDZqQm+dKcopkJyPntx9mIXQKysC48y7E5zKqbY5BZQUcm2vis46JZ72WTKMC4enRLeku/2yZmitooAKQkSVPKeVqKFq3Vzg8BQhE1YRUwkYi1Hm9xQJhTSlEG2GrRagBtW1H1nDNBUiI41HyaU8ozcU7z1FM0YtGUQvhXXl7Eyoanj+u6llLNSVVTckny+HBh4jzPr7frfFqefni6vbzc1xVuKaXlfI6u75SFk6jqulaPArerh0Ce1VJKrbXWbdvWUkpIUne/5QSwCIswEydJOTGzVp3nxR2lFBE2JRG5nM85J6tq2Yho02rmJFRUXeFGxNLgXxYzut/XAeRVrdfXVwcMzjnleQJRzjkGZJW8MRE4T9MkLC1uBTFJVbfRzOUuSWYs5oo+aa6hMeYBZgCy9yHFPYkfRwSbWTRLRJABEAmnPIkIXIiolUIBahxrHtyNfkycSbyf2hgSEAcwpR6IELMkkiwp5ZRDJ9V6lOkRGoGDHeLH0RZkAKdEMVdnOS3zPKcpm1W1ik4hQceA2snY4d831m/k+UQNehl2oImmdDM4in9hiscru6nqhcCAUJrOZsw365/fm/+JmEgoUhBrmAQTRyvzkaJKA2gIKWKCu0aEaB1paZgVubsnb6ULmLmaujZxYHPrg2rHo+r2+B364fvYAdCQ+W2mrW2bpqJtw74Tt1bCYTGJGTHXxd29tZ42e9eW3o6L2PdKPfzLxjP/zrLT0bQFqRS7ad6/jjS9xtgEhLahYtVgaj5QoP46DJeGJrYXKx08idiAZtosfo+SiCjsuVmQWxigcUsE6rp9HaDr28h71cRC4AVufPAW3sJ/VSV4hE3uzgQmGDFIzbsGSdMmia85Gix2rYq4IIWjydmM3RMc6oCGWFbnwzFHQ3GPbphE2JJorrUmMNjbJ4WiGkDCZr7VmlJeMmst27qt252ZJKXglptZKRWglGrKobJAeZrzvDCnnKeeEcJM3Cjn5Xy+nM6X129fiOXnz/Tt22tKySO8gKU85ZTTND3O87aVlNL54YGIr9cXJjoti6mrapv3QmCWENhwjCkcFhJAMdbxenvVWokw5axRf2OklJbTQsQpp2memTqpATBg28o88Wk5RUwaKy8x9bfFejSdZ0mTJM4pOTTni7vWWkQ4EP95nlPKDmyl3tc1z3NYnJxzoAamlqZZUibJ0T/LnJ0NtZZS1m3lZhGYheAEUjhql0tW1QAQOqvGRxgn1CoF8c+Uc8/rnRjMQsJJMhO7k0gmRtAoOrajPYjRHhu6+aa9kmxmpZRSKjOlFL3BlHLu3HkODg9FdwAQpUojV3VziyodGjbEDmM2mNeqy+mxql0ul9fXBVY3s1o1Dm7riNkjVY+DplUN0QvVmLjCDRWgxsEPk0CgfbKaqykqcWhscCxVRN57+BsVhIj5gmvXTTRcidPAdRoCE9cOFKs5v26CvJ3m8LPMUFciUOC35BErwhTh00MwgihZa2sE3EgjA9DGduP9fpvpgcEpxiseo/gIRc1s1I+7Oxi89erxwPsgtyyTwwhtZkCL9Ik7340a5gYGsbW85JBk9NDbutjFaJvrIfm+Qd09mJ0R+jZNZNcG4bUHsQfv7ZcNJwxmrjEZHFB1IgQYxDHgm7QDLX2pqM9cdqZIhNVUo9kHUQSgBCd3M7dDLuzkjZ/Vvfruz0bVNrYqAUbuiGFIoAgOvINHvcgVsVwUwWKPKuDOHgKCvWVxONawumgYWodMHcRcneDS6a0GoHEUiKz5EWrdn67h4KmXbSliFhHQPhTI3WE2SyYiU1eFq2YBs2fBfFlu9w3zHDVzYzemlFPOU0pTSjnnTCBhDl4YCwwzQIBeHj6SZE4LZN4qWNLDwwPDSSakJCwpZSJJmctWskjOaZlPWrZSNM9zmqa63m7r5rU2OhYgLVKsojqBRM0VtdStFIOm3MS+HEicT3k6L+f5tHCWPE1EcAv/ynAq55KTX05zTtnNnUxV2SxPMyhvaqc5RE9TEnbVomVKKaVEtAT/ysw5L2CWPItM7mwa+GKD7pJAoQpxzpImlgWQftxIiCdJimIKijk0hHB7qlvUSDdTM2VQliSSSMSiT4pEXd2dSLjBAzQiFjRQgswjSOVqRubcrZg3AUELTyPMTuizfLeyxXyFpmEiMhNRzMwhTiDOaZryIilxStIo9jBzgxGrklYLcw1xYTYkZpacErMYSJWq0un8dTmda7nV4lAL1qU7dRfdmYfuBCSRWkcFkR0mQ98fzeAGCZwILEyOIDCQIxHtevDRsMgh4Ezk5BpmGOgVxGMebs1FhPpWr1MTMYk1BfYmLhflIkRq2A6XhYyDaRzX5qQgTOZuwdw1AlI8JQLabQsZqKl5oMv6NEYXEbGZxcS4HhJFMBooG48YtAeVHnqczcAw9569/ac3WfQfD/AnoJgwK+xm5vsou8EY096C2Jbs0ENwtP7jwhFae5+QNzpdjvfTb77FNPHFvfXCuMPN4NBI+AxhjMcHuau7DVZVvFHHtNK4zQABAxKlPqPmQMSkKArRDkYFEIlwougCEiMBbxkP7U5joKtRKBNJBETewSQhnjzWrSdV3j1HpMNoAlTd3UqPGoYPDmNDvQhhI4SKDIIjjBB2ge9DBY6gahBCzB1WiQgiRNHHTMSSUvYIdMiFOUY7TfNpPi0pTSnC/2A4CDMloyCVQ1I+n05MmOfZHdu65mmZ52lV55SiPs9t1mB8WEPhay3WlNjdzDUoHyzUmpyi1u3xONvSUZt7LknMXNXAlOf5dDk/PD5My0lyGpwnIlHVZZ6ZdaxDZSazTMQQEn44nd09KiIpsZthjVlONE0TiNQb3IdBd+/bzPrU+KiORLE+zBZRDiZo0slNTYOt6n7QtwnAMDr+OJDIoM1oZVC0CaDTu021cQ3dOxW1RWE9jGAPFWjVEFLy1i5m4+Sqecwwr7Vq1YZfwkFN6TMcg7tLJkmZhWJIcJBBWzWCnAVmRA52bGVT1WnSlJKRR0lGiOc8+Ynq9nI6nS4Pj2W73a93Cj0nBAHUzP1ghIc1CyPXNPa6bWj/NyYo2B56dvSAKCAptP8U+yegIUZQJDiYLn+CsXfPau7cbAKztVuJFlVQAEGILn9yV2+gTFgYoNFNEUGrWosijcAOBxJBqAXpDnhEVG6VOYKuIebTgvo3+RH52HA4hOTDjIaFHzhGQzS6eeJQ8R6gQ4f+w0631ikiIhfAOlkIu4PZDX38NPFBkRgQccCy/Z1Z3wvoB5nPXp5qtZP4Tfh2ay4U1IQP+w2AojGsDwuCu0UnSr+x9mHUxlbAjMy8s1ht1L72G42lO1RlG4WJyOHEodfXP32klYgebhofFM8tbDRLXD+5O0nLbVvs0Zmd8ex63aJJsVOnPLo7EYtgdPF0Sw9iBnPjjcT27OBmhwJbnXZ3Sx1uaq5LFcyu0AgvEidJ45GBGqde8jTNp2lecp4l5ZQSAebUxnkSEbNQMiO4SZqm2Znl8+fPX7583Wr98OEpi3gIJretaTU+2tzcmXk+XVRtu13X++pNNNiJhARwcyUHlbpW3apu63orZYt6CDOlnJjYHSlP0zLleVrOl9P5kYSDI3CjV2K63W/fvn11VbUkQswsKaVpYhZOkqdJpgm9FmXuc86AQaNN0tWsaB1UCFX13DpUaq2llNgD4eEkTcxNWtnNjSx8g0mq1KIo9FQ+/ox5ZGZ1sOzbn2Ze3MSEQttCQDz0JChwUjd0udhwSOHkTGNUS5c5GYn7foqJWYg0hl65uatXD3k4ViV3Sw4CxVDoOGgp5WY0ukKJu9/vd7OakoA0EnK0SC2WWuZ5OZ0ezqeH7XS9Ta9l02Zs46zaLg7UDXpLnX3oKncd/B4z7kSdXvtFh5Wwh1oUslqImYxOcLVm4IJH1IuzHUHvqRXa9cM2OLkD0elvblEI1pB9iZjV25kLq8+QAW7RqBF325QOgDrwpgspfIRbG+JKUWKkg6Q4GvTVIKWOB7yFxdvCAI3P04LKHg+y9fbxxrBqN2hRqODACdz2vri3P3vB09ubY3cdarnHG6bOBB322v2NZlwrqzQv20C3sC5CFD0gNmZyEpgYtca49r62XdakB/XtXkbYy4ef4XuszRBvrU/doDFABg0YOmDA8WNRFaCeePf0aHjfNjmVgyxpBFKvZAG97ns21mP4oQB5xhr1oGa/bHtZdD4fuG5GNoz7eFjv/jJ+2tnxvluZJWWQRiRYqSaZicHkxs4cFbxlXk55mkWSJEGwaxBAV2s9FUnEcVY5c+Dp8gOl19fXreqynFhSi0PdAWXOWynhtWqpdb2y5Gq21eqq0crLIEKCWylF1VWrqm7btm2bamUmIXYjZhIWkTSdT+fLeTmdpnnJy5mAai7MIQ308OHx+dtX0ZSnvMwTEUSEIKVU87uBWZHnqZSSc85pisM75Yxg3bCRqarGzBwJW+ymWkTIrDHlYpGnfGJmkcQU/YDtrFioB5prDTCnhU1RyBWhGLeIHsk6UKvWosRMKVkKB4ORtR6er0Xp0rqQZwS8dNhsDaXqZyMinLJVJq5NGcnMTWuMAxPAStlUDW5MLtxYzTUMadv4BnMhTkzVAVeYq1WGUp5TmoJUw8x5mvI05zznNKc0Md8jcI0YHz54kD4sDJqJ7Y6u+a2DpSd0XSHqKQS1/x/gDJE7O2ngOGFi6AhUtHhsGKsWoxLBYZ2KGe6maZSHHItHz1AjONoIer3/McyLDURnxNnkydwQ3U/mGkGZw70pBYZ8uNeBLiAaDhsQDLB3TYu4XUafM74H1wCIUhdPjUmKo2G4JUfxZ684GAMEJcreilG73R92c2wgdKoW97gbjZnAB9M/luUYa7c1spbqxvp0Ilhz7UGBAKdgUFSvBaoGQBzCDE4plVrRo2Y+3GcL0xpuFumFiwg1SduWCGgPh4nZzdSM+5cCoFaH8xhLCoA588FqD/5PvEai6IeIr+IlKpQUGj2tOGy4AHCDWse9DbtDbXuaNc4wMTMY3gZ2juX7/j7H4ntrJN4/lHrZhIkhNAnValtZTRVQEUzTkqcYosuST2k+pzzlaZac0eqlRAR3IyWQhUoegUkotAOE87qt52nOy3Jf11JtSZKSqLuVqrBS7X7bAMspgdggZd3qtm2lCCxJTkLq8ODMEMy8VL03Cc+CEHo0KLzW6snzNKWcl8vl4fEpzyeR5JQWEcBZ+Pr6fD6fP336VK43IqRpIrR+KmFxcCllfbmGyMGHxw+nec45aS2qAWggsfA0l1ovlwcI8pynaQKgtXpKahvV6JUN+NFSmpgyQCBOIg6HhhxcrVXdrdaoECHnnHOOsx95QESlTOTuSWTI2gQsI3xoIO2Gxt1Doif1IfLN4rd+DxnbNTR/YgGBSPIk5xzyn8wcL2DmosXViK3NQLRqWqobSiFqrf3mRk5CeZoytlpr2bZSa8l5Oi2neT4zZw/MjEU4JzlxWoQn5gzUgLs7FkIjDotLR2WLR2iL3TWKNx0EDlylHZk2UyCwl37UEC+PhJV6k3DgXaajA2y4BHUHnBF0LXMGkUTiHwgSEL0CJNEaB5BRKLV025rcqo4D2c4iEYFEiAUJtobDaYmaWV8FAE6CRKyhA9OaE9y99Uz0s2xdFY1ayNjj62Ftx8fGX1p278M9BLkiFqDDTGGYuf+u7bDdAB5x8HcY1Ehd7TDG+viXt14BnbI8HINj/5QWsxBFrsNurfbgSp7hnZ/MznsYTY5+HkAEVSJKFAUYDSd6NOUjaj6CJOgEtWF8/3+y3nQ9ciPJFjy2uAMRzEUlVd+5/c37v9t03+qqUiYZAbjbMj/MEUzN8EeVpGSSCMDhbnY2q9V3gUYsqriEsFU35aWmLQSAiRZ3RYVi/VLC80s5tw7L8q80tAJef2kj8td79ap5GJdOibiC4F6N0etxECE8PRzgF2y0HgSz8OL86zJrvdcPJFHRvu130V7LM7kxKxVrwvTiThYRVYJFKUVzXXR4Egv3bQsPZo1cQ6CSiFlqoGCdVzUzoLe2bTpZ5vmk686ICGVhU/mqMKpQfQHZcQ02dgMRaWutd+2NRYmVSbnoOiE9HwBEZa5JecgEi9bzFNVMej6fxzi/fPmCmp7Su9uc48jMcZ7MTBAi0dZUGcsmxqqySgk2CmIRAq958BkB5RreXAnvxMKKv675KkXWpkwsxOBVKWYmE0ALWixThLuFV6nBZSiprePV3WamRSRSSITXYVErfI16N7/0mquVfL2h1Y5UD6FNtCsRqSgBZfyuF0REVCrKPgNrVYtIpNk5j+P5eDwzk7m1xuFgbiAwC0trbdO+sUhetemrdnxtJpl5SXw+gYQlRsLlP7j+6BO0JeFSbS5tSUn2ryah3AZYiET9ks/Ns0Iwr1euOIniezx8EW6U8Ey+BkrGy1pW3qOqq6pwc7qiYi4IpTbS8k6HRhhdZjubFm51W6UKdWFWkSD3GqMcr60pYh0GWXQQf04rvxCANZ7uVQjkMlgnuFypRcZSeex/eQSoIyVKFUNcf8qli7qURbmGjtaqohrWU6z8i6X89fCpj0m//Jerpft8tNeZvO4UL/0W8YKJ1yPMjOM4M7O1pr3vNxJqLHo9SjCvEbKfv+v6utrwF5H++T2vE+va/deb8es9pBLTVPfAqwauKy94t4q7zFRVEC+hUSZ4EZj1dgEr9Be/HpaUxCQQ/nRX/H8r9yrY694tLkc4q/+8NvmrAV3SCGGOqLDSK6ApExFLRrTgT0E4ETXtItpa175t+731LYGKbkvqLyvpL3cvgMWIxQt1vHrfzBRuREHB243GebqnLkaEet/33YnIpu5b37aNCOM85/Ew5pizoNUEZblRLQpso4toLRXyAufq5Nr67X7ft12l05XQsl5biGoHKDx9el4LUElFtPWNiJ/P5xhDWLq2rXd3fz6fc04zn9NYdZpHzr5ttdj4YlwzI9MjIs2CoSskljLhKBpx8Ynu7uZ2aT1fpJ37XNqTRNIlLHsxhFe9cJFkq0Chq/nLa5ZUrc/6mW21wsL8aSaoZbDWcJHA628TXfPTiVGeDmYi2ld9c40er2tQVRGuZKFyvGdJ75ibNtWmYs952PTjOAASUXfLSJHe+97aDmYIgQiCBdeAs27Xa9fC65lf8odrj3i9fatoA2WWPERKNVR7lCj/ijlXN1ZgAK4q+qW/jl8Lr+svLA1SRNYAhAqwyuRKnriw4utK+KrLs4xHjOpRE0u/CEGdpKHVcGVmJfleCavIT3sB1yFfG7tUF3ktBVzlQDn31vl3aV0TTpXb/NkugGgBTte+fAWtVEpBXneybL3FBVRZ+FeFzwv0wFWp4epDX99DVzVdX8xMv2y761TAutVrW8E61YgoEYsiLbk7oka5Hs/nv//8092/fv36JuTRmJiprIkFUJbqkS9OFbhYHFUAMJuvwuvzDGD+NVr99YIxy2vRy0XrMq3QSxCtvCcmTmbmKk6ZGRhe+9bqOSDaqACXpVn0X6n1wnN+vYD6W3zpVeow/LWxI1oZq1VDuHuEYx2lnwOKX+VlxjX3AwR+tZsEojoU6xmYg6iJdu17oaYJzpBfUD53j3qg8Kul+hWkqul9GcWUFDPeQWGOCJKKd8btdlMVm6OJsLC7mU2bY52N4XMGwBHhNt0nMlmEVQqyYGazBQ+zSOv7tm1bv239JqIRgFuAIl2TgLBpc5hPuwpJQo1jVK27OsZg5re3t7e3t33fzWyMkzLMLJHwYBZRKZ3h2qRqJgpqbzWkODJBEansHkEB4nQPIANpNsc8xzzcbNhkqgCzdA93763hMxI5mUlFliCYJT4rlav7yUCidu0E6troyrKkv2C2cYG9+epWkaCVYUqf1clVg/FKAAUTm5svh0rpS1cZxKsQZoAJ7G5Ekin7ljajNxBx0eOttcvUA2ZN0PQIpFN4+KV0KG3FS75wVW+EkibmJwD0egkKGAfKD/yJZQfhs7R6afyJgqnolvVtuXaWv7pXS3RfLCiXeZooK711MkkuxU2VOKVXXDHXdbEZXmdreceugEumIgNK5Fo7BaNQIbm0vKVbQCSYSaU0iKsqZGJ8Ch+LjwAuJGHVOp8KnGubyCwr1a+b+K93MZfSJpe0dh0htQ8JfrG8vn7mazugT6BmqXSu61kXdl1S8tVz/box/bLlXZKCSACy9LUgqiIUCrCIqjThSIt0N3MzYaZkIl31AVGgkk2XhKa0+vhERenC3v/yoXDJqIjqTl4tB/I62+pcpKDMQgZLTFUwvwcuOJWIKYzgSM8orwJEmFLqlUOCmX/VDl7r/HP7xxreWW11/GrZ/5WJwYK9MvyKb+SF/NbRxWV4QawTPpPWvLm1LOwS8xBBWJIkkyLFayIEMV89d2ZSZHLdwwi/sAOAhC4hw0taVyxcVR6872pjzmlgF2lVN2Q2ICkRnnPOeZ5zDHeT8jzHtLgUrpEvYvNqkgUwYibi3reKZGBSQhk1a4871x3JHM/jPI5xTI/o6zFR3zZmjkxmNNXt+/e3t7eSgZqNeZ51GSziFvte3mBeoRla5E6keBqnyKWrMo9MdXYXjchgrFm4Y8zjPM/zmOdhbqqtPGiFaM0xXw+3DALuXrutqoKlfrgZPG2l9IBedBeYSxNVq/Ta6yFS55O/1nzRAAAaqXZlSOVaE32m0696JDM9SpMXC9Dm8rCIqvSGetfKRUvtig7VDFE5i7nI8PM82rbX7mA1YbxeSc4oJxaV2WdVq1RmK7p2pFVfgJcs5AUdVK1d50TU6o5MTimCgniV/ZlxTblNXGG39VLkJ5Z49Ul5fRvKOgCs2DcDdGH+AIiJosYbC0uk45KlL4VtIavJgPMl7ynILAGVUqqinhpdUYD1/BIJy+n02b+jMoqTau7xOnDo1/Py9T7ilzfwr+DA5867vFGJkkIRCtStDWNVilLJN7/Wy1eX9BkLmnWuf4p/iK+wvEIwFp39C0nwq1iFLmMBEmUDrQsTkswsYIiBYALodtuZ/2ZmgYwws0FEkWBJISGRaumqIfgL7vFLL1HFQLmxfv10dLHiBF1sLV63jVa/mgRid0Plra/DhElYSfnSsyI9bdicNbq9PKWZSC/5PDFz7726wOuQzl8f369ESBVp1dm8QmMyy4OZTKtxJqRQhoebFU9IS3KzPIyURLIu0SPMbM0Or0dKleelog0oJyCBar7Nq9e+bojn1UUjIipAsWAbIqmGZqniapl5aFNmGWNY4jV/PNzPc1GHx/Mxx0i3qg4z082HWX1ad4sgkS6tyNXJzE0lAVHtfdu2m0hDcHoBG3P6DI/eWkY+3h8ffz6eHyerAZ2YtRgkVQDneYq2r1+/3O93dx/jLK2RmVXUT6S33vbb277v27axEF9DLsYc5tYJ2mpiFoHA5a0NJ0hSnV9zjPF8Psd5RFp4gEwuUD4zw7yeTJW19R9V6QqjJlEtpRwTZYiXNvazGgOt8KX1hoaXs4RUuV7GV+1Vv1SbiAiTcHIVy5VZxvjsFXC5LOvQba1zK01zvfKrznCESGfpIu2mO2mHfIwxWOAwm8/HeZRKRVREm3RCjTUAACAASURBVPQGrUViqDD/X4IV8hr+EchL3JNrKdHa8y69TKF9ucAyqi3vhTGvr1LpEFG45wsmYcY1pSfXTJRSHCx5aJTUfKXlzQgRDnBjWVOlMqLyHnPVV3XP7UKBFpHDV4tcsCEBnKnl4yh1ZlK+iru8mvrw+PwMlPlXL+76XFfjQ59f16YOqniTC1bOCzZ87eb1T+t/mYiJhTmXspRf2RS/wiNVOq01F5+7wK/dQF7g9fUfq6d3z5VEBWRtGSVEqNK18BMPZ4aEN9QYo4XIXEHcK7wbcCQqqyACLOGsemnt6wa9wJMLC6pDMQi4Jrby66D6hZioQ3id8gBJtb0khYXSolIi1mQ7Iqbe26uyYGZSzpReM7GZsMY4ZoStIyTixXcXyfF5DNSOT3lZgbFERpSgz3O3Zh5cYE/iAkxZS328Dr9lzq8mL5PSS0G7guiAWQl0RGASYdbOrJ4kAWKuz7tMLYsp+aVpiygYNM3xV4RqbRsoYUYSIT1YpLX2PE7PueLkQADM7JxjjOHhiOBLfDXGPI7nwsrdLU2EWlOWctAjM1i0a2+tLxtKkptHuMHP88xMQqTn8/F4PB7H8Ww34TGldZHmEXGe2lrpietKVPW1+5/nWezO25cvwkIX/A1en51Kg5QJKpRGhYilxlS6exBVN+OFoRQsn46+dRFRYlUFFZjGC/ELz4QIvxrKWmO49HxMDAW84I1gbnzFcK5dYcFA1TUyLmoXV+lMtJBSZlZtuZJd1m5AK1KUkEFMpd8ttbVlSISKAPA661btUcshAKeU2/7FLc7znHNGzowAKIPMcg6rpkGFwbwa04VcfMYG1+JZgI+/kNz18ejqll/xZ/+/r1qv1cGu+WK1MzA4cHnP1qKO680loozSxScVH3O9ppzplGBU2GAVVylY8DbWPmaR/qLX1qZDVABs0qszgCbnJSEv2OEzGDLcIyd81mu5djACLcIphbnMAgFj5gQly4QUSIfIDC9aSSjcg0hAW0QCbDbTnQjCEIIv/ewiV5gbv3a3iCuhiIA0CxAFoKoqykTm83MYVy4j4lxukc/y0N3DLO0lPhMWSU6WlTDz2dAVJ11qwiQCmTtFsBAiCSGMELCwez6PwzMtuLYVbRuiSXRtkh6RVrRvdQoRWYAnNaXrlMVlxlqLY1FBSJ7CwitsqTzD5PWcBKjylypuAshEIEXqXRBGJjE70oBVTQexSP1eePgyPYZb/hrDR1oOpPAKWRgutTg9I5JIpLcOQAosJk5fjoCKPYnQuttYUBOz8JknUSlH10tccuAMsxyejhnCAiaAk5SYPSZD3MFQUQHBIyWZ6tRdzzUps7yZEYjKQllqpJXzRXU8rGq0+ARPBCtsnj/fnwIOCwqIZ7MAPAmHm6W7jfA55pjjmHN6BINVd0ZT7iyNpHmep81GAtJN9843cB753mIXFj88LQGkY8754+Nff77/z/PxuOdNmc2GqpgriFZ+k83zPIHw1OIhpruoNO1dNzsm38TNbfokAyBSYSNBRGb2fDzU5rbdiCjmhH6fliJBFMK8DGVJvW+FqnN6DdiKiPRELrBUuLg3aOPelYgJHAGEZwQDJJoR7qnSOKwGJw/3wiTrq4r9qoaJHZDWNmk6pxeFnpkkK2+cRTJC20pQX6fAPH1GMglDuK9yDVzwiJAIKWVlbdVoomqtTRjAskTsbXv/+Rjn031CtWacR01sFgV3YiVtHo60iyVMocrRDy2sMjO1yhzI0vNQFXpKkukIsCAiiHnluxDiF6K4au5VfRISFpe8qJDVVe/VHpPwis+xVQkvFaoSgSaHMjc/gUYsjBJZSL7UaGFXuV+KTRAQBhBFeQXqyCPS14yqF6iyNtJwRirXK5aUxFJDADFjmk3Kyh4BC3u6pRNqBoZH1sTLArQLUSdmJaaIGOZm7mbIBLKp1NjKS+PJ1/26DtCIXOuyhlt6JnGSOwFZ1hV3y+pzRV6jK+qvv7hidw83cp/THCkcEiFNk1MjiSkX0zBFFBc3fKniCtek5EAKgUXKJEGq3cZZizbK7A6OBewBQFngY9EZsaKnwhMpNaeFkFmxBJHpRcdFRDpeiLOqvqxYvGJyC79Ojk+EbXU8kV7qn/lMH/X0VPttv9fjiTI+1Musko7MDA93Z+GgSruSEomM087zdPckqLS+36rYrDYfFdThBkBVXgx9df0VaAVApDGuAu1S/NWZQkQcRW7FxdPD3cmmmDoLiFBDjS65d6lT1jYBXI1BEoQuPTGqnaQlxqt7VPKGy/ECgBH5eL7Pc3LChs3jkfmcc2RGuLlNsxE2I6Okn026wwLBLE2XicnMQCWTWwoKFPyfVKc+M2XknOM4zjlnuWSQWfIYMyPmDDrP0232XnqkVRO03hER6apy2jyOp7kT0xhnU7ndbm7TxlllTGa65SCL4Na623RhV2ViZIR5qQ+YWFW0KYVdL1mEOVCARxJlZLTGzN3dLzCeXvGF9X713vvWNqnuknjOaVaS0Fd37iWfW9qzJGDfe14R7wsvKF9xYdMZqNdl4QFiNoVZWFcYxQqvwZxW9og6/KaNWFog6m0jEkS6WboRchxPtzmQSBJtGdG03fbbmPv7yn+kgBeYQnTFcRG9hAwrXaG2pqWdqGV4sU1/aQo+YaDPTfVVxn7Wq1eFmi8moIyNK8arYPz6RiKCGYg4xRfqlaotSSIScK5BY1d5xIt5K5VsElOCylmKNcUA1VW9AGuUYGf1OAhmEISusrp+dKm6wlfK0JizVjkxb8ybSIVzJcK9qPs4HaqRgFu6xzCbxznN3EyFbnvftr1ScyPMV+7DWtAvAKWmVBMlImuOaI0yy2WDXkBhRv4y0Qa1VdXbm5mEdEKZ94M4PBwWCREkI7nGZZDir2HOzFyp1xWXQgAJhSVpoyb9Vr+PSbSGzxHHwhCZCcKVvsl1qhMxyC/cxdMW5RXhtN5GnzbLz1wHQARaa/kLgQFASiC7wPn8tZO0Oeecfj6Ox0cSvX352nQzty4b89Kze0q4F3DnFotIWk7zQpcFmJnn64U3cx7Dkb33iqLMBRbVnwbROjLdrd4ZISGklM6O6kVanIeHUSZHePVwKyDF0zLSiqPoK8UiiYQAFookDmBpP9b+v6qdz06QFslc9FadEbguYmGdzGBhUZagMY5jnGf68PnMzPRpc9gYHhYZdBEPEVF5eluTfd/18SH8MvRFEiLSplFECjJtzlkhOWZ+PMfjOKcHa+ttS2aP9MimgqRzTDPPIDN387Zp7Q7CPN3PMbZ9RuFK4X1rpAmw+UjETBtn9r7VBxsjzCZRU5shnK5B8BlzjOVadyMKXWj+6wTwQvZLW2I2I2nb96adSdYbRnTB08lMrbfee21cXu/8L6UkM2eGZWbWhCyOiDmGaKN6w6Vqmty2bcnaK7wfkgggVAUkQmuoGb0iZFBVBY9hlXNoY45xuJ/HcRL4fv/W206AzcPNKJyQ4WZhTNpb06buZdjYKnoWVwZ77d+15nkVJ5+beBXiNdeiPqcvFTJWYBMt/Wdeyghc1MDCXeobSgZdi/Uqy64XLTLKegLPhbxUshaq6pLM5WpDXHESL54Qr5CuQuG51E283r01In4RDIp6aBGJqCJ9US9FElNkMLNwJQ2V4R1QkdPN3TNyzAmUVIBtTv0cIIU1FfY8z5yiiiQPuMccM4Fw8zmfz2dT/vrl7evXL1++fqFLbPuqcBOZBUfFyczMDbqSKiLCzassIVVmxQWVXYAGXtviSjAvK6AIixCxuZmnp1c8Jph21SobI1DxVJ5rOCSKvIQSiIHIUFzxVRflm8VFq6zT/XowzFzZv+ERl2hsNYBF14fX34gIZuz77p5jjOM45vTWWvkk6x8uoLl275WJiMK40yNsDHs8jvPx89///idAfziBtCdEKhGMIwxlVwwwCSkTSwW4mGckFIKEQQMzSdbZLwxmhC+mxD0iWKRvXUU+dZzVh3pkQrUpKS0XOnF1BbVbUk2mNA+bQSoMpJtFAsTjTKIqG0PQmFZuUlYhk0kvS319/kvzkEsC8OJSql0gZFr6C2YsqJdBZjaez/P5bvPMcJvPjHCfZiPCMtynZWRrTbglik9IImqtbb1rUx4cmdNsjHGMh+bG2mxWAbGa2mHz/Xg+HscYvvUGkkyZwynP7JTIj493m1OF54zzNEsrNiKseDjKH3+KaCJjnsTUtz593HJjYo9SnZdWs2XSsIzH0PwRcWOEq2YUyDTH8Zzz9PCSANSKKkaqPty1LDOmn8fwWeDeYt2YIUo17av2dBWNiOn28XyYW2stkSoKLGrIzScGETExEuOYsUo6WtKjfdd+ezXuy14OCFGjnr1FRNBrX8PSkEZGmPuIyPM85jyO5/uPH+8R+bw9bvuNSFQISKK833bAxzHCS2qr7tW4ruRq80laSOqn+nPV7Lyk7bWvEcDCRMRZnXy9z1UlFD9eCUCZyIwoIZ2/pD5lb2KqBIe1U72agqugjcW1XHRxrFQhTgEcGQl1IBPMwSRck9jocxTlZZmscpWLr4bNCM4wAjlM63fW4SOSBTJlwXuIiCDSTPjykURMw2W2Op4HkL33eCnxIsoaUw+7qSLT5khPdy8+KROi3FpvKmbWNy3Pt5mFB6/47KXMEWUKRJDZ+XweBGo9tHVmobgO0tUUViQ90VUZxi/JiLxMUhF1PK4hecUs5TVsVlmEfRAoQKDgmmSdseaXMLPUUB6pUzyzZi7OWhUv89e6AoJQReQvMWVlFlyBtfW4S3i77Dml8lFtqs2jepf4+Hi0pqra+xYR1Qpk1iDqJb141b0gXLoS8dbvty+Z8MzncRAJ4WSVKy1uzRUgIpHGyDjnHDbOmZnCEmUO8UGX8geAR3jk83jWI1PV1lVq7AlprbOIFGlFUqy6hpKr9mFwTWdbaTSmEtQpJrJaJgTKAJmwOeccWeW7KJMs9F9LVUvXp/gr67tOfvxSMK6JKAlE1NDMDHcbTkl2nh/vP8IOCnMbNk9zywiPmZE2bcxxxRkwImpvNDdK1AD0zWxaRuY5hx5HC+LpmQhPaS0ztbVzjPM4S/I03aY5T0dSBsawafbzx09C3m/77XYf5jHneRzmFu7IJKHATcRba+728fHhHm4hIsoalsXTsiizeKS7ncfZTSmc00tNH+ZjnjUO4DierbWMe0ZsW1dpKsrC4ZYpUR5fhqoSVphdXhhjLSERrnjBTPVI9zRzM0eSiC7ylEuvAWZGZCUu12yMKjHPcXr6tNG6Ey1LPhO31ppqii4KGhzp+cqTWeE4ycxzrukxZnaexxhnJk5+Arltt4w0t9KPtb43e5xhJdwMzznNLVDe7Jr0gQX9X4B4DWIqcffldV81Rr3jmStg/zoqiEBJiARHRhWquPD9xMJTQJSXduLVYSdWGROLw0UmvUAQCiJGhXILiYcFKmgwmIIiVcEEIWXRUvIqf7qjCtxL0XQPn06TiPVVJtcu3Hv5Nks8YAlDZrqRUOtKdTwBc4zj+fyv//p/5pz/+z//837/yitABjVU3c0DaF17V+AeB7lHa63v+1LUJ5rImMe3+42JIqPIKCbWpnTFWAIgpTIzEcTMia9eUaT2bWISLJ05kAUxLKvz2u5fQMpyK5bwAEQeyyBT95RR3BSh8GgSZlhEwhOUKEfby06cHj5tzDFxbaNVRgkHgZShjIudT6/I9iIiocDVmiVVcuM8H6AQloJVRHjf9zHGcTzK5Ow+nk+bU3vfVFV7K6nraldZmbhpa6wqwsxNqffOoq21gkk8Yp5W/fhLSEMkpQo1s+fzfP/5Pq+uLiJoOYWyaRNhcw+38zwK3Lvdbl/4npXIKyrC4SUzY5GatlHgXGRGMHFkCggBiswzMyrtnXu3Cwd1j+lV3U0bo3ZraEtRQfOqwQRXCtV6HVEmzoSvpILgyy9Uh0qiDJ9Zpvc0tzmOj/ewGXaez3fKGqdebgavrnGamVfWPrtHeBCFhZ/HnG4eQcSt94RPMzwe7tHaYNbwJOK+b+4bj/FxPN/ff57nOc0SeD6PMaaIiGiEP5/PH//+8+3tzkRjjCKtzvOMcC5LnXn4k1m+fZMEhWMMO49JkC9fVXXLzDktmcx9zPl8Ho/Hk5Hfv3+3b1+3bS+lqdmZHuHz+fjpvd1vt3rvSospxMGUmWbJ3FhIRLiU9URVsI9xztOIsrXGfFOV53lUOd57L0Q+IlbAbwShNEuCZe7JK3WuYvUkZ/g00BOJcAcgzMiObIoN8IimIq2px2v/TyKItgL+mMCI5weY9X5/Y5bbfr/dbq31zOUXOY5jjPn1y9vj//zPjx//am2rhKw5bVhhy2Q2hVutllWv5NW3goASTK6BlqsbUCaP1wpcpwUKwF5UD1A8HSUvvsCySEJ+4QNxKds84joDXhjGOiRWSY+VSpIZDC6uPhCUBlJiiKq2RixCoqJVHjHz4srE3IxZidTQNK7pZQtidq/xRqWUqJ4viCLsPGwlmogg8zyP4/k8xjmn5bICWkRUIC1fieQlZeMkZt3btu87vchMwm3rUefzFUAmTV8wX6FgJQ7R3vb9bZoxC7EmOJKFhKtBicwIJ2NhIdSjJWVa+WvEKkmAJxkRLyNPeDBHTRsghNKnUZguhzcRNZFMjZi5Sv5KdgOAaec5z8fHg5mYlIhUtbcuXeUlSvdYVOF5ABg2AaS8VbJFay09et/cfd+XwDED7gFCuAvzbd/PMcKdAE+3OcymiPS5tXaZ7ZnTZ2TKmg+uLTeRvN32IuoJUtF+CJIFyC40ijKqFHo+j5pze57DvTag5F5nTTNfrL7NEeHMsu33r799v20lf1SA3HOFY2VkELOUpomkkACv7heUbtNsVmdb1nRVdbcx1qiKAtXNxsXxoAZILtU0QEJ/TR0kuvTFBSyIaMWDYMkBMi1ZGBmIMJvn8/nzzz+Px59znGaDYKjokQrLB5aM3jDnmAUXZjKjDobjHGNWUCKx8Jg2bE638J+AVMVze/vSWwPxj4+fz8dzzFn7+8OTealabNrxeJ7jaF0+Hu99a60195W8O+ckpjmn+8f9fmut7ftetS2EMzkMoQlkRDwej3Gef/788/F4AGisQPQmW28E3vedEANHZ/3999/2vu1vt9vttvVeVfYLvAUKG0gz663v+74s5WFzjrhUp8zsLiIbVRJq5RFllr6zzv64aMUXNFvoBHO5IHhOHWMESoVgrcnWtXdOZPg5Z2aSqvb7W1ZxnkFIbb1sOQzUCmi93+jLlquB2G9vvfeaTElEGfHj/f2f//0zIn/+/Mn81N6f5/Mcp9nM67UvaRxdSjIwlYCkxlFcxC+VnqXampcoHjW3hUBX9VmTX+LqBuCfSnkQhddch6yfltdXXB6w+EwlerUIq2sfc0JEkqSmqYowk5TtjIVZVZpoJxK++JiqrcBK5EFKUGBqJUkVQl2g2nXm1DynAhYyM80mRTKz7Nu29dvt9u37933OEui/XFfrqq9c4kIAajhURtgxytipIiRLmTanhbtFMHPPrfVWt2POWUwAsxLJtt95FgkGlNDavLVEwN1UJGhFnr5sSoVT14UVY1HbTxnZaA0d5heZiCrJ3dfURCIWKXw5KKovcV8Oi/Xp3EXKgR3nmOcJ3O8I4v0mRBnlws2IOI8j3E83QrbbTk0ARhTtLCK0bfvz+czMyDg/jgoBrb6qTiabswQVNYo2Evu+3W733nvvXbW7jzlBzNL0Ui+DhVvfhLUUYGaW6VchBk9URezm4zzG+XSbEbM0iUh4UJdW07wBiYhIsMjbly+//fbb1y9fu7JcL7eZT7Nwl8/JCuTuQsFMMT0QTJyEXPP7in1KI7tAxHB38yAWBIVHln6NiJBuDIVweTdfYcRLxpa0fNcrUjjCnXvvC5+sqDOn9HCb4/l8fPz8eP/z8f5n2JOASIxxNO0Am/l5nqCVaDYtHs9jnENEWdndwmsuMsw9QVZJ/WbHcZq5zQgPbvJtPHvfzOLj+RhjznGkRxIec/JSQvkcw2xm5uMhEdaatqbnOUVkv90qtufxeDyfz2/fvqo2gLZtryEP5vZ4Pvat1/L988e/p4/39/f3x4+vX7/duo7xMD8B79v+7cutKZlpQXvbtknv2966tlLnYuWAGq7xzhTEXIQcM2uE99Ytvd4yIjZLwOovtq3XoOTMyyxS6pprAoy7EaEmDVRHEhHVHxzzmZlBkRHCXRiR8fPj4/k8b/c3dT1tNm1m08y2bSt19Rjj8XiUIG3McZwDmarqnhEYNr980fvb123fE+B+s/PfYzKwPY/xHM9znBZGNVPs0mLGCoRYkTp0FfsrkYwoaKXirdndlJ+urwvYueKyKDIDyQJc6zVrLWZkUkUx16+JK0AbBVQug5Mv1qkuqcjlMoPlKmuYA9VcEC7PdldpIo2gVEJyoqq9OJPIGOzEAVYLLzEbsyiLmYXb5Ujml7Ei3SnKPetuo/X+9vYG4JyjSNW+5t75nLO2SHfLFBHZ9q3LVnVhRDU3ZmkRoiIREWZjzmEWEW2M/XZTbZmY0yJcJFqDiGrbwEJmcy5Nnpf5JROI+/12//LGa3Y5X1tJVRyIyDGGmVcQc2FcFzq5nm5EABxUrGwQpPKS6icU5pdWM+2W76MefWsiolWJ23R3N+CcI4IpI9xqHqT7cka5+env27arSLoz83E8M9IjxjgTPs4xziPcXoQKsI5DInh4UeuPMd39+7fv33/77e1+730T0UywsHqLiHmemdFaY2nMiWUbXvU1M1GF40dByRxewZPnnM4UrXFmstAm3FsxHEngbduZad/7bde9LTTJZmEmNsdxngOZK0gg3Gxue9/3HkmUNANMSdBIWDgFuXCsOLO6QhIW9wCzu1+u+PCoAnMDXv5efvVqVZAulxOTmZsNVY2wplLilopAM5s2zo+P94+fP57HO+BJOc6RaUxaxbs5IiusgjPTK3NOmFSgAiLPGZEgYQFyRoRHpSuP4xiP50EEmdWBiLs9ng+P9AABY5h7IMzdwq04XlU5x2OMJxDae0bOMd++vLXep81//OMfRLjdeqQ/no8xBjN6b701Zmqtb1u/v93+4z/+YOFznOd5esZb2Z6Px/HYvn/94m5b702XU721Vk50D8tARM45VbsIqbZVfIAj7PGYdLmkRBlJqnq73W+3u5mFDY9wHzxRAI5NC0ReOV4iGkjzGY4Mj7lkEZlpbkga03zW4DAKs4+Pn8fzYR7/889/ntN//+Pv237z+WFmSOz73lWc6eP58a9//uu///u/55xvb2+qzSNqXEzftqatb7tIu7+9iTuR6Lb98cf/1u3Ll2+///njx//86x+P8fMxfMYcc3hkdepVUSxzGVYIwVVrlLR/9aC5asZc7C9dbBRfoS+Xb/QKc0iAopRBSwl81c3LM1moUen+MzM9KpSnStUSwhMzl5wiSxwlwiIsyqqt97btrUYg1CDwUgFx6UNAmQIJXbNWdKlo3dMcsWabIEO1TMKxRqBQapPee2aUNbFglF020bZvt9u+m81x+ivuIyOmRzGukSlXLGD9zIyYc44x6vOLtl17YTU1nO+F2hOV5k+0YBwiVvAkjCJonZlqaFhxQQC5XzKBiIhsrREls2eaxfSwTEhpV2prQJhbrvlcV593kTMRFacWhIq2w7Kx0FJdRIRqU9V9u7n7nIMIMzwjhNc0oiTR3sDEked5fnwcj8eHCFeuTDUoc4zIUGZkzjFLqkFL5Rnhy/M5bY4xwj3duyoxzOa0uWwBXOF6HLEyVS5QpPxxI64Q9rKzqrToDRGE3LamwoSQGutUbuppkZHzlN5FpfEaqaitIXzMg43Ku5BRDTFE6Hk8iaiJIoOZ3NOcCAKiKB0+EM4eIHA5xuqwLYhAVXk5SCky08PJAUyyFYhK4l4DrRfCQzV3gcDMW+9EOI/jfD4iojZKIN3Ipp/zeR7H4+PHcbwjSoqZFoGVHyGt8w4SnTbNUV6cmsWuLOWH4RnJxHvfeyYfzzk9SoVDHJnDTjOLwDkOAGOM43EAJE2Z1TNAdaYEKLRxcZ01k/c5nh3uY3o4PeOLfGnb9p//93/u+/7HH3/8/W9/+3g83v/88fPnT5b8cv+yb1t639q3xvT3v//HH3/8nZjePz4+3t8pHu54PB7udp5Hzc4obVXvXUUduQS7LEABEc7cReQ8z1c0P4DIFTErwlLze7S11pnEcmT4OG2ch2hbM9xLTZdw88Kst7b11s2m2TznKPnGGNPMx+qBnFGbj7jHnGOcZ99urbW+bYc93n/+EJH7fc+wcK4y7Kr5SIS7qrG4excl5EplQro5KJhY+/33v93v91O3zXL++/0feMd5HG4V+Ydkr91WKssNuVT++UvmfKy9uvCiquirILyECRwriRtEKA/jQkZp7fBLmkxr71+mOaJS02IlhVb4KIqwrIOhSDuALnVL6TOq8JemvbZuprJVxhK81c8pTIC4BsVmE41MITKz8xxmnh4lIGmt9d7MrGQYNQWJibbWPFFM4LTo26bSFvPj4e5a6DdzRsxKjQpHZyYhYRGKiJyzRGc25lKJgFrftr7Fcspx673wPVshAznTHD7dhWW/7fu+VYAR19Bzm+bztR3U0VoQUAFc27Yxk/s0d6SDw2PGXFO/pw0zA9HeOjEBDDcuuQMr2KKsUhkiTVSattL6uFfT48LKQsysbUvyMJ/ukdlEhXjYBBGz9q5J7f398eef/6yeo3R4fduQeR7PbKqqZRQomdsV0rAKkYhApop8/37/8vbW901UW9tERURLQsdMqiyyV6klKsjLPMGLA7iqZhVhzqQKOkonytutZ0JUE2nH488//4yIrbXeWGR5mZPcxiP91KbE5B5zOoGIpHUlvk0bQmi9iTTwLSMyzCNPMzcjQFojUpAEpGuEI5jM0s0tTaXHJaKMqt7LIeeRIE2Qp7YmzNUiEVEyGOwIJvTeVNi39vHx/nz/+cMs3Jn3ajjNjnkeGRPkcRhVDwAAIABJREFUIlWsaymHIlKkpsaz9luEq1rbbK+MVWEPMnNps/e+9W4ZH+/vrW/neVZ9MCKm+zhOJrJZxJhN8wpnruImSrGPCLOSFVwYMAXQtN329vXbV2K+3W6k8tv331tv375++/7l27///a//81//588/meX8/u3bfb9vfVftt9v+27ff/vj9D5a27x9bv8/nf4859/1vmdAmAJnNTOy77Pt+u+0W8Tiez8eROVWbiC5VKNE6hoVr6lkEqWq1GgmOehi2bJJMJAQHyjHJXC0lI9G0BWBuFcKxbaIqx3HM8NLgvr9/jDG2bVPZ5vlA5r63UtYRSdO+77fbftsY5+PnnHY83p/Ph6q21m777e9//P3xfKjq1noXARUzUfNL9Hw+3n/8u29vbdtb39p+n3Ni2v1+//L92+2fuzYuj3cdV3kZqZyWdEzA5UxL80Ry6WFzcb31/wuIrI0HRNesvMsHUNHCuLRUeSmWr3+/tECvPwUy4YnKF2JmTi6uiYmkKnupwXhCIqraVKW13nsTLs2uEKFMX8hMOBIroBSeCMCZoWOcW+9NlYnd080A6BU6XNsNgT1ymtNxrhgdbYjsDcJSnWPBkQDt+1trzSMiXHypukW50nOJKAgWXs6d1vg85+PxMz3ebm8xnubGIvvtzggWpcpbIXYPD7MxK5Sdadt6J2ZVjQjMGSGUy6F6kSfcmr64eG0KoZxEczJk6YdK2BRJoEZCBCVhYo+ITOIo+J1VIS3SwwKMpRhCQlibghlJrC2ZE8lCziDxOJ9RyhdgmiFJN+lExFzJXJXCuPWm2r58+cKUH4rwyPAIi7TIGekWzsKt9znTzTw8ga7t27fvt9tdm2przMoiorKmpQircl7DvAoPx1UuNVVmcrcxJuIQgTAJUwAk0vcNTCViIaJdbhndbW57b62CiKOOvViCVtLWw6uQyW0JlDS8EfIluBrHDLOMmWaI4Nb3rSeJB1gbJKV1AXKedjye54PmQSgIkwBUdEvrfet7Rmb31sLNocoiV3oeOznWtAwhCCj325twez7fj/M4z0eYRYT5jExtW5VoZw6QASDVY87xfKrq2/1rAuZOaq2wO3dWzfGY09yk9d57F9Gbtr3vFpkL+E5VqVCE83kUNBYZ7v54PIvWKqGkCk8byGi9EVDz0838drv9/vvvX79+NfcSLex7wW7bl7f7/dZ///b14+M/VOXr16/3/Y4VKYOmbHOwG6UJObY3EltaLNAwJ5Cobvvbtn+93W+lxhzPOeYEY+tNmN0swymyMakIb1tETBuJEKk5bMikiBgztHVhztaIO1bWJFjIY0ZK3/bebsh8Hs+R54X4t61nJoQFKb6FkLJQE5kgM3cPEbnf7tt2Z1Fl5YT27X5/ez6eyBqBEu3WtibmPueccx6PZ/hU0bcvbwTcbrdWwqdhwpOJuyhwK5VTZNz3t69ffuvtBvLMEUFEHQVT5gUDEZkbrVHFFw582XuTmCLz8pkACFr8wYKDCiTC9RpevGES1Q3MpXUoWdHiz4mxJKlruCklgVkK7qnBTRVlsyJRa6h0663dVHam9hq6mCm4fNUoyjMqVbFiuKFCJMz7diMiMzuOEeFSmEPVXWZErK33bQfgmb33X6X6mVmJcnPavu8sCipcmCKodyKi4IRIEDWWzpJtDzdkEHG4E4WHzfMYRx7jbL0RJTJEGy8uu7w/qEPE5/QxnEhEk4VItWnC2AYRwn2YuU9VZe6VZeY2UWP5qNLKwRU7Wecq4KIVS9Bbj4wcg6g8EcEsSA6CUCdOICqk22yOMTwCRLL11m7aegkJnCjdMvI8n8NcmGfJu1zUGyfu96/5B7emGb6kSkhhud/2Maa7g8JyqLRNbkS43e7bts05Pz4e7+8fQKq2r1+/7bc7ERGvZbHE6teYTFwjKeYc5s6AVuFPVEwx0sxGBAwpLJkOZtYmWRmsTEzs7cu35j4r0PeKP8ziPETKGddURLhFpRyrRISUqwiESLNn2mBkU+K+SWvbft/2W4DHst+jHobb6f7249//+te//nV+PJDZRRl0WjjRtu1xj+wBN7gzq0uT3hiSa76A0MX/V2MQyWDS1reIdJ+RhEwLL+aj3zICltpRsHiyOjQyDVzYDyUxsRLOcyBZCUlpNo7zOEH9dm+t/19//1/U28d5jhHC+ftv3+/3u5sdz2POWcXlOM8fP38+n0833/re90ZEHvb9t+9vb7fj+RzjsGnncezb9v37H/f7WwLVN0WmCDVuNbrgb79//+2370z9/nZfaRCz0NQ4nu9Fbt1vDXI7ns/n8xlV0BAA1rZp20AyDIxgoq1vsgoIZeA4z3E8w12V77d7TC25hmUE3GwkKJyvkVjcemfHINgMWlaZJAQzs4q0LdyIwExulQhNxAJP1Xa/awbcfobPY845xxznz5/+7dvXAjyB9DmGp8Ug4r7tzDSnncfYN297663hfj/HfOSH+3S4Z6Q5AG+577cIo8xxPCmj3e/EUsVo1+3t7ffff/tf//j+24/3/z6P6S5cfvjSsfGn6/jzK5GJ4JRlD6XIFVSyhOjLgHtl1xIRKPIq/yMDGYH4dKmmv5JiaEFDRFVZASJIMKtoZ5YrFV4gCmQyQAJW1i7tpnpj3YkbScOakSS58J/AEp4Xe1YsAq0MWHe/kHcwr9SOiKhBRXOMiqNprZm7yLKP/dolnOf5fH6YzcfjOee83+9fvnzZtq2QKScxd4CClADtHDYiHRHa8dvv/wEHkqbNfD5FuG13Eg1QeCQ5CBHBaXXgkGq4P58fmdi2W9t6+UJKWhLu4TXVdZhNVWWCVYYDiIluwoVEwyNKvd6UJG1OEHmGu485SySv2gAkrUNatNUQZbNK1iNmLsplhdMxMwsoI1QiODPcVaT1tDkzKBMi+vXL1/v9rqrneY4xHu/v55i0sci2742Zk/DFrWRItEynW0S+3z62/oMAFtlu+37bqQqYYRGuIm3fmdltzmNmwY6VxFIOYOZMzGFWQrwkRdClvRGmDKfwxixtabcfH+8ewyNGWK5Z5LwY12Rh8UyLwcTbvvXWwqOw4yDOysPIksqwCmmT2773bWu9b9uN2+ZIs8jiMJAkNybe9IZs//R/qsjb/U2YP56P53kS8TRnsTxzuDMTi6q1cnHzUGJibnypzjMjY9qccxxjnGEjffqc53GMMUSbtk6AMKcqkH3fOrb7bfWRz+eTV8yDR0S5shWHioBlnPM8z3j/+Pr9tz/evt7vb23ff3480q9mq/XzOEFZU6uYedv3ogSQtO8bMYny29e3fdtsjvM83ew8ngTcb9/evn6rLcYzjuOIiJrxRcSqOxMzrzRf0QbmcAu3x/PYevv+7dvbly/c9OfPnz9+/DyOw8xqEGYJvuecx3GM8z18MjHX7C2GmT+eH17J2Odz3/et37QJM6nK3rdi5qqrNjsj7P61g0Bu7oZgYmlNVLcazTznyPh/qXqTX0uzJNvLzHb7Nae7jbtHH5lVUtWrAlEFEg8JJowYI4aMYYb4o94EiSmIPwMQj0ZARmZGREa4+/Xr955zvma3Zgz2d6ISH7giXOERHu737L3N1lq/VZtvDVCk4M2CzDlLKcxQUCNkMVaViog6hHVZtmAmFF6WudYaU4ghhLBa551xMcRaUz/0LY9OCN7rod/tdvvYqiDX+Xq9aGVt1331lTHOh7AUWrxH55wIKMVvHt+WMp+vv6Q85zijUgB/tdQRabYfxN/gzxvzuRVybD7P1gH1W3KYpaJsDjxEAKntM9hGBGgjAPCWHgPmzdeIeCsz2XaBW+FGaxrS2uC2bCNEAtKArBQ26dcYa52zrjPaK2W3XwwzIDekEgMhCnBlRKVoq7oA0Ii4ob6kM8Z0nY8xVuYmDBhj+q4LuBldSSunzZbavTXJNU34+fn5fD4759p1EuMwDL0xNqUIAOCscONRNE49KOM0IUgtNXdKOWOFMYTg1rnW2rZbIlCrMDNRk3tr25MRQYzxep3mee77oR/G7TMA0OostEYCaNYzZOaNhoFaKQWinVdac+ska515FQAoM1YWxbHNSczcIg2yPawBAIQrAm3eVkJruvZvBpaaMpf27maEIgLIrEhJBRbSplNkm8es7ZC0NoggFrQyijTXIiKtHNxYZ4xGteHeKzMCspCAeN8DElfWWltvjDGEVLgZtVocghsgaQ1BblWRpJTSpil3tZQQQmVuLh2FgA05ypUZgTbLk6INd1E4rHFpBqTWuaFJQWs9RSIikMrMDFJSNkoLS875thzFZo/QxrfABzM2GQKRAEE1TRVyinOMVWlD2lRhEGVtf7ozw9B77xBxCGsIa06p5CwimSsVBhEh1IW0NUQERQsKoTbGKG0ahYC5lpJiXGMIUnIpOeUcQ8w56/YaRWz6FpImJK61ceqbnfFmS91sZwxgbec7L8Leh3leYspEuuTNnphdBeawrsu6WqVDXJs437533nvnRCSn4r1vUVtrjQBrZcAiq6JV60E0xtqm71URIlNKkVZNjFgKCBdtgEFSKbaVUgtUgSIMucRSvLAC7eyw35Mxdr212OMGW+Sc87LMwsV5b0AxVwFodpyUEynq+l4rpTUJ15hrSmiM5Spaa02Ucg4xEOIFPSmNoKw1DZJWSkFCrUkDNlAjgKQcafN9C1du9k1D2jprrQqCnXdGIxLUsosxaq37cSy5lMKgQFCUIu9dSsko2u07IjVPl3mZiZRzrnKNtfbez9P1er1O0wwAfT8a70otx85PPIdwBTRdZ7zvqkRnu8P+Yb+7d58+XGHZDnRU8lu+ULbEf7OCEjbcVAuso7pF/hvUAW8Xx3Z94KYoCHAzk/LtXyR/dc7LX8MraVs0AcANytw+Ze3jSEpp2EpmsDV43V7mRiunyChltTLticZSGigAQLaqG1IACCxMDJWbqq/bPrfp2K2HiJlbfq5WJkW6AUPotxKyfwFShhCm6Tov8+V8jSk+Pj4650op1lprDSLnHIWLoEJBMhtSrQktRmtEAwkFODOKIGhju4GZSaFVqpQikqTmWqGyaMg5bVbIZZqm6zXnLCIhrCFFENTaAvBNkW72fDZadd4754wxKO3/VJwHrQ0BlCoxBkECoBBjKlkTe++d97VWYW4a8m0SbCEAKIULJ620cxoRamWWKgKEdQ5hul68AVKajAPS2Ei4SretHpfCwopM6/PSypChYRgBYZnm6/USU0HSxnljLCKmlBGgKfsAiA6t60opSimBIg1E22ydiNY5rRQCKmU672OMsNVXaaJNvFVaG+uwVCIiBFUzCWqtK2POWcpmaahQixRmZmDnW87A1MqlVAQi0ghI2NwOXDkDAHNJKSmttLUll5xzLpt1Wm39DZqlFG4YPyWCKUbBNtEv67pq7VBZEWSgrtvtD845LSLMhbkCFwIhxBgCIINQ5cyFa6VcAiEq8CyccyWlrHXtzcO11pprTjUnbhngWqENdCycS8NLaG2YJaVcS25VmiLSMgQsrKqmnH3Xa20IGigbiLQoPQAgKescAqQUCRAIfdfN8/xy/dymN+/9hl+Yp8PhMI47JmoHYmN/lsJFkjGE2oIXpVBAN3+OiMScrXVd17UPnTBbYxv6RJFirqXWXFvRIIpIqfX1fJmWxSjvvTdGG20jxhACCHIPxhRmbsc0AjJIKZVFlBZm2u2Py7ICFOe80ZoaLrAKM5cKlUtPxncWAOZpKjkLsfedt0ZZVZlrLcsSKpeu673DViXvnC+tDlsql9I8fjHGy3JpcnTO6dM6d9ZUrs2kZJ1zzlpjGUlEus7nlIzS18u5fSpzTjEGRUgKUw4ppnUNpRTvHAhorYZh/OLLr/eHYxVYwsrAgIzIRApJ6SzGiFbe6l3vDlZdW4i1Hf6/Hda/eXvktp1pLbG373CDl1QhAriVZ2w/BZkFuL3BBRoJvdGuAdp7D7Y54XZrMKAQCAOSVlvBYhN7DQApZYgQQbWrgTQphVpbpZzWTmvb7oBmbKlIwIlvVLumKitCAUaRSkQiWyWkiMQU2hMGbzneVkmxroGIhKXrus1WeSt0rrU65wCBFPV9z8y73c4atxX9aA0AXdfVWmOK2hgRav2XjceYQwTcIg8lr4UBBUrNBGC1w7aEKrn1OOeSE8d1XXMu23/a6v04glLTNEutOad5nlLKxpjOu3aFKoRMpBCMpoqCSKXUZT3buBrjGlokpdySw8u6cq1CkFJsjayE+rfLmohg6/8qtdaUUhCJMRijQbjkIiBaqes0PX34MBjpx50fDm4YfddXQRGJJWsUVChVSikEGhGVQCkbx5UFUyohhuaJUlop1T5RW5NcY28BQJO+Y07NtgEAawgxpr4rwzDoBv4j47uthYNIi0jhmnNpAyNS3UZZUEBknLOASoWUUxs4BKThqLwbcUsbK2VA5cwsRhlq5IDG8hSltV7XtdaqyXhj5rqucZ6mubFWULjru904AIouknMBJMylcREqS44hpTSHrJR1tiNllCYCDuvMXEQ4x7XGBMxOYyVhZiRQQE2P4cyFWYEwSIwJSdWS27VdWtSTi7BoQ7WWGFPOmbf62banJVKqEVRzii0ArNQm8xptda3UoqvGkrQuQ2FUAqSNaWeu2dwEgIjOOaWV1jRdp5SS1qq516/X6+vra0qp6wbnXKsuN9pqLctSQ0gI0HedsZorVJAUo9bGu5Zl4xYBc84aYxCxFTZU5riGnBMgCIuz3jmLhLVURE2kNgoiaUVabl/POScR0KQEt1W9Udr5zlhDoJgZQJhrjCHnxCK+77quI1TruopwZVTG+q4PtGhltDKAUEoWRKWVExsil1wTpubM9t51vq+1tINTE4rINE1Pnz4563a7EcZxDQsyG21KSc55rY0inWoBQGssoWr1nwDIVeZ5WdellNJ1XitduGhtjofeO++dH8eD77pa6+5wKrWmUl5eL9a7x7f7YeiVUoWBSFnjneu/++5vd4fRe/enP/0/tYpCYhEA5I3vBpvf9KZ63tY1NyX4loJkRkH+DSgEtKUIbqSg5pPYfD5yI5j89RDQJjO4ScjNB0XUdn2mWW/p1smKhKSQFClUCrRCMsoqpRvZkYEBq6Bq6FvYFOat+YZIoQJC0jElQmxROm1UiKvR1nv/20HfTvCWuPPOA0Lf9+3pzcx93ztnh6Fv/3DO2Zp2+JKIpJS898YYSJlrBhBQCiq3tXRzbQOAUmS0BeBaSk3xMk257xAhp6hJAdT24A0pX68TM1tr2w4ylWVZl5ajiTGeLxMiDWPPbJRSzfXovBfmGGLGFEutSLWWMl173ymtSopcKymKIby8fFZIQXTbk7ao88PpLuetN+q3rVdKMaWwNZpqTTc0cRRBkaEzHK8AvbXKaOp7D6RTimG9CpLVlKsgIgmo2yB1W6lBPwzOe+89AMSUiCiX3A47YXW9nhu0oz2oY8qE1Vo3zcvz55fz+TL0w+l48N43uv9+v283eq11WdcQY61srTXOMTMg9s5T44uAUkZrJDIe8WZHyQUUaO2bYVm4GqUy5vZ1r5uLKhciU2tBxK7r5nkpteZUzpfz+Xy9TnNp9IVaQgzOuaHvGCDm2tq42scgxVSFK6I2OlemWgyiEgwhlLQQMgADM5dUS0m1Wq0ZMZXSkh9GW+bKKKlkImrtr7VmY2xLptVSCKGUsiy1lLKuaynF9wPekMVtxaeUaQHgUmr7g242M5S2kTECUJh7a6xzcQ0xxlxLFbbW+s5ba4iUcbaZoQWk994aOy8LAGuihzdvxnFYwzIMozXeOT8MwzRPzXg6DAMimk1whZyXfui9G/aHPYJ6enp6/vyilHn79i0pYhBNrZJeiMhY2/d9LinnTAiKaLffE1GtSITPz58aDqvrehZx1oqIaa0+yqUYO991/cgi1nrrvIg0w17JxWifamxv18qonNGWw7quISlNxnvTeSkcYwohkiYi5Zz3zgNSS/zC5sYWa10pSmsNwkbrnPNuVITKeWeM4ZKNtkRUct7vj1qbWmtKGckSYa1QChttFVHXjVrl8/mcYiZSIMoYZ9C7Q0eoRaTve2tNSvlyffn51/87hPj49q3vemaa5guieD80XJY1Xqm349jfnQ5Q84f3P63LInXzCN7WntDKqYR5c0+3O+BW5/LbCY4Ev92vjCIiv4Epb26ff/nWHtP4V9/gJgDAphxj460qMlobRUappjLebiAi0ooIFGlFSpNpM2GrW5UKm+hJSraBpXGJRABkKx9kjSLG2GZ9acuk9itLKeWctbLaaqu10nqaplyyMSbGSLdCn1pr4/22YldrnDEmhNCIq0TEzDnnWopSShNIzTnEhrJx1hnncs5rWLUpWutSIwh7Z7gWa01lYMmI7RFXlDH7/V4pg0ghxWmew7rGmIahb5rzMO6c9865kkuKUYQIYBzHWusag9EmpiLasEhhWWPQRaV1+vjxg3dWodANO9F3noi4FMk1xtj+eEIIYV37YSAi67T3u+t1yiU5b9tGvq2LvXf7nU9zl6vEuBRmFtDeIZIzutaMZIAkp0jOK2Xabd/mpRZWUForopRjyqkpdYhotK5SSimIYExvjAGEk74rpb6erz/9/JfrdWKRWidtLQuklOZ5YoDT4aiNzqWuIa7rGmM0xpDW7bz21jbUUqpcoCKp1kenlQZmpMylENr2+CWFhavrRq5FODckUU4JQACo1gqIyphSyuvl8uuv76/TXBi4CiH23qQkv/zyy+l0fHx8vK2kCAFJacMAlbVxAlSWlVs3M+cU1xQmo9EaZbRi4iK51sJcRaTkqsmRImICQaOUENRSrbW51mVZtc64rVUhNyU4J0QkozU1EULFnJlZa7mhflhrzSxaa+ecNDo3b2ADrXWrTIQW4YGtpKG9eJx3u3GvSMUSG7/BGZtzrjVzqcycS+y8c0bvdjsi9f7Dx5eXl9PdyXlXSy1ZlFK4pbEk5bA3w/FwEoGnp6eXl+ewrvv9sQn2RC2fWGqtfe8774XFGL3iKrVqo1NKWutcRClVuKawDkOvYauCyCmTsHPOaIOoACjniqRSrpXDlq/mBupg67rO+XUNKSUA6oddO+Wss8sylVKlMBEqTWtcU0zjeHC2I9RKm77zIrzMa0lJqvLO55JjjIuItc4Yezic2pX8/PyJK7cn5uvLZT/utDW1HbKoQwyX1/MXj2/6YWgT3TCMl8t1tz8oraqgs25dI2G9XM9K6ZTSjz/9+Hq+3N3fs0A/7oGsR02IzFJq1aSGoY8x5VIQdO8PRnmrTEElKJUFt+wq1sqNs66UBhCutfkv22m9qcNtTGgCcvvdYZYbGlhuhz/ctrVyc/P8/wSA9lO3BVPz9CGh0lo3tV8hKdR0i4Ip3Zj2sHWQSPN7bsK1ImyoCeRNiyBEQAVSBVAQGYG5aq20bf4fImMNES3LAoJd1xOplqdgESTVD2POKZVsbzeV1jrnXErRRjdwLt2KwhExpSQCXdcD1OfnT9Za75xVGgCs0cISYtDWGGtz3eoahNlZ48ahOVAVEeeyxoVFgKhkIW1QaWOc7fquH1tj6jD2xpgc8waF2HJ6O0QEAWet0qrWqpVmxEokwpqIS76eX3LWr68vYV3fPj787nff9c4VcxiGvrEotDE5ZaKGV66oEoMYvcXKlFJrCEYb2lLyUkpCIkW6G3e6csp1WUNmcbkjavUFDAaUNlTTPE8xhvbuyzkqwlKqtPpkpb3zpKitfXPOWmtFquSChLVmEBaQWKDk/On59Xy+Kq0765gZgFonUyqVBVIpDbI0DMM4jryRMmsDcbVNt2Qhow2pnAsAa1KKSFi00oDETEqZwlkYgahWbkM9M3OpMYZc2PueCJZ5Pp9fl7DMy3I+n+c1CBM3TFlWbahPKREp+8U7QNWWyzlXpSxLpvZ08b49nWrJ2DwMrYWqigg477te1QpVhGIpuaZSSVsCajjiNQZSClEBlJxL5YqNxFQrlxpLba5p0lBYoDTqCRCq5mVou5quHxrsrE1aDWyltAbAJa46ozCXUknRaXciokZwAoB1XRqyu+lq53lpr4dSCpLM16lzLpeM2DQhNtYeD8e+667XCYCdcymsnz598t53nfHWgbAU7qw9HQ93d3fWOuusNto6p0hJFSLy3iFCCmGal7AGBFGKSs5V68KEWPb7w+vrSwsKlFK7rreu00p51xmtF8DKjdvK/ejbfjrHkGLKOeeSrbOrc877vu8ApNbU4FeE1PlhmubC2ThjrWGorTihXaJG6VprynFZ5xSyCDvnAfDl5VVrdTgclNLOmeYVfnh4eH199c4hIdf6+fyacvbe912XmxNE4Nf378dx+PHHH//4xz+O47jf7VPKGmCNaU1pnpZljpVLzmldV0TUxr558/ab774npXb7o/Peddq7fl5jTHOpaZoWLhjXtBvdOO5rKSAt50MAwO2Z34J7fNv3S8Gt9mY7329yb1MOGFBQmrQi0kpZ8BYLvqXMYEt4ya1N/TcWOtxe97ela6MFtKwv6uYyJEQiaH/X1JSN2ta4xZV4u5m2uNo2WOBv/x0havijqm8uN9mPIykVU2wrZu87ACy5bAbL2wJEKUUA1tr2W9zmO3ULqszzklK21iJSI8m0lf0wjDHFUmvfdQo346nkHHMO01QZxrFnziysScu2o9omVhO7dk5x5VpLzlVprYzVtusGEalaaRCx1gFAyyg06joh5lxrKc57c8sutDOIueSscQ/Hw5233Z/++MN+v+uGo7G2xoxcre6875CoOBZBrjyMxvd9M/4CC1fR2uxGa4wrpYSwOGet9zmmlKuA7fpu72yDoTLLdLksy1IrN45eSrGUItaydyISQvz48eOvv/46DMM333wzDIPv+1IFEfthBwLADCBaa2x7eyJFJFxiKFrrN2/eamMyV2YxnRdEZQwTXS6z1c46p7VRCp0zjWORc27ci+aQYWk4NZZamFlb0oilVoUkXFLmGGdttABoawu3vrIXkYVlAAAgAElEQVRUc0ZgAtKkrO32+3EcQyN5WufGYbfG6LthHHYgktYrIDrrzufL09NTjLHvR6VUSkmReXh41ErWZWrPospFabUbB2G3qHaFtJZViqUoDaWdv1pxqWuKqZZ2SUQuMZWcgwivIazr2sw5jYfClZO0eq9WIKS1NrUWa9xuhO1rEhiy7HeHtlRsLCbvvbNOKZVSNqRKjq0iV5Pe7XZ972NMORelVM4FkYw1qFFrrZCUgmkCBLi/Px12O65lmqYmkt3f393fP5zuH0VAG11rXebFKHW5nAV56A4//fmnksvd/d0wDN98+dVlnkNIINJ1ftztvfMo2OahuIbL9fLjn39URN5b2q68OuzvWo/WMAwxxr4fdrudtRaBGpQUFSmjauSu867zpdQQolYKhLUGYViX0NJEnJPaDWtYY4ytGUOYjbGd75zRpKhy9N4Nw+DdqMnGVFJMIc4hrZfXz2FZAdBa76ytpRKanNYkUosjROYa8grIl+nciO0CsixzWFekTV/kjNM0//zzj/M8931XSnn6/MnOjkG6rhMEUtoNzrs91/rp06dvv/1WG22Mu3s4GeOHcSDViIbae5zn+dPzx3EcM7N3fp7DdJ6NtqEuRBoEAUVtNeGNqNnwn4qUFcxwqwZrtAa5mfr/2kEKW9nvLdgLt23QZo9DbJGzFiS7QYFu6yC8qXfNra+sMoRKqxb+AqU0IKrW0oPUUCrtXSYli8LbcS+3fHKFrbBStmxoLbVk3RbEzDzPcyqpveyGflRKxZhyaawSrbUehiGldL1elcK2hW/ncnsLM0uMmRmOx0N7/jvniGiaJgDo97sBdkbpGNY1rsMwDP3ufLnkUq3rQGltqGRGKAAYcxEGEUmZBcE6b3tVcjZIpdbmIGmybVv41lpKyQhgbLsgUVvbZqFSJ21tFdCA1nmujAJGmcyVKxNp79233//Nbn9HCglwDYFQFCECr8u8hHQ8npx1IYbbECdG6VTSPC85V2udMZ0xvh2mMUQRqIyVUUJGVFwhhtwsLillJI2gre37vluWud3lISyfPz+fz+dPn55++OGHDx8+7He7u4c32ngi6jpvjfHeWWtzqLlkAOm6zlkXQySl7u8fGiH1uszzPP/88y85537oRPj55eX584vW6utvvn64v9fKVN5oGSGsiKgU1VyQkEARQuddyVlh64LmFMLr60tG2Ew1WtcwW2tLKs0wxxWc9aCssa6wkDZv3747HPcxxlpZWIxzRDrFpI5DAwBrbT5/fvnzjz8JoDV+mqbL9fJP//Tv/+3X90YkxNAKz61x3jprbTccW4dByaky5FqEKBU+X66oCAGXtCzzwpW1Uogm5VRKLbXEGBrSWZgVAddaUolSSLW6Armhb6Eqjik1VqgUXuPClcdxdzgciOh6vTaFBgDa6nI3HvthKDmHFIxR2pj2ASei02m32x1QASlklhji5+cnTWq3G4zSKYbPnz8DwN3dHUvVmlJO7z/8qpXdOJo5DZ1/98W763Q1yo7dLlBwxqHQ5+eXXGtMhQWtjUqHGFJJjZUNtZTXl9dlXrrOEtj2pAth2Z8ehrFf17WBRY3WTT1WpIR5XWNMwRgtyOfLmZZrjEkheee2FJNUAjEAkjMaevrwi0B1ztfMqFxiWOYFAJtkhdham9awpOPxkZmXZWEuCFBLSmlWxubCKa3e98zlcrmIiLXGGFO5Pr98INKESiH++c8/rWskQuf80+dPh8NBa0NSjocDKfGdTSm/vr4aZfqh68bh7v7hu++/Tyk9f/5cUrper99+/+3f/f3fDf0QY3o9n4exbwknYUg5aeeM1biCNtR3w3IuMQREyjG1NteGsEaNrXMKGEGkqb6IWHErp2/nvNwO2d/OfRRk4NbFzq10s90lggRQb7rBph5LvenFv90CN1+QMCGoVpK1LYWIQNpft+1Y8wk1F3ctGfG25m8QiNa92qCGiMIMXEU2cE4ppXnsGBGv12vh/ObNu/YjIQRh6bzPXFpOPeXUxNgUQ0nZGGOsbdVTKZVGIe16rZQqpfiu01oL8/F4BwAVa8rJW2uNCXp13qNS47iPpTjfVYEUV05FRADRGptiGfc7AFiWJZeqrCWDkrIwpyolFCBljDUGjW52CGLmylUbTaRqqW0aGIddLiXG8PHjR2Xsab93iJUrkRq6MefU/B6nu3vr/BrWJb04DcYoAjlfLj/9+JeHd+/evftKAKpwjMk52w8DAMaYRdoTabXWWuteXl5ez593u522tmTOuUwcpGZgUaTu7u6NtqSNUtq6TlEklJyLSDFGPz4+3t3fPT4+PD8/a2OmaVqW1VhYlrn3nbVOGzX2faMvrOuyTPMyz+c1vnn39vHtm3UJS0rM3DKOzru2w13iklOa5/l6vXz11Vdff/31fuxLKUph3/fCwrVkqVzAWksoMaxaaQQpOccYl+vl4/tfk+Yv3n05zevucEJEpcnZHgHXaf7w6eM6Xxn1uD8ejztSNI69NS6lZI2x1rVY3th3CmuuPM/LvMxKaW1NWvM1XOZ5uV6v/8P/+D/9F//Zf/L9t994YyKIMUpbB4i5yDwHYSEyjM09JMbrse9Rq1RSa0tPJS3zvIZFsokp/vY0aVG7Ro1NLEjktb97uG+mzFKKVFZESmurDbYWUk1MXHOVmwni4eGhBWKcs/Osl2W11p6Oh1rL5XrZ2NdK3d2dhmEUAWeddRYVzcuskOy7L5ZpYikgUnPKpfRdvxt3H58+tJxpZX59fUk5933/8HCfU2qAwtfzeZkXQEgpp5RZZBh33g+lFebk0nptYowll8N+/93332mllmXquk5AYliv58vpMTfLwDzPyCzC18slxtQcvQBoEJd5YhaWklbuh94oba3JMYV1jXFdl0UMh7QgifeuMRP7vkeCWjKBSimvItZpUtyEQNL0/PycUi4ll5IAxVozPD5Y60qpSmnneue76/UaYwQQYxRVubu7N8Z8991355fzX375Jb2u+8NhHPvCVYCXdRq9G4Y+phBjAMT98fB3f/f3b969nddl3O2MdX3f393ff/zwYX/YPTw8dNaFGJl5t9/XWrUhY3XOZBFSyVqru7vT68tr36ExnXcOgXy/m85njcpY2/AOshW1SnPdA4JAxVtZwO3bvxA9W+C2XQYM0iCgAtA837IRRls9whbIuD33t+e/3BiidONAmNuqB2/lZJvogIQb1wBBWJhLzoitQwha+vh2AUijGCFXAGauJcWcQylR/bf/zX9tjAaEdV2HfjBKz9eJuUqtKa7eGlAYaylctdIxpbYFQlKkdMnVGDtNM2lEQqUNKVWZyZo1ppgSkAJFKVcsMYfQRAIk3fauKaXper28vJxfPqX5gpKbSd9aA4jOWaMVEa7TVGKM02T4aq2e5zWGMq/BKA21IhetwDXoj/abi7zWrfYshfnyuaQZOKblbBEHOzaFWYTXdWo7uxTicr2GaTFAbud3veuorK/vy/IyDPtu3Iv2WRSzaBQgBpIQF2ubuT+hYqnx48efr5eX+7uD1SQlEXLnbUgJiNaYQykVZdyN1mtQnFN5fT1XFmNcqTzsR993pNXp8d733fH+OPQdcvn09H5eroiVOQtwLlGghhieX56fX5/R6Pv7Uy3l6emD1LQbuy/ePX75xVujFIHsx6Hv7eG4H8a+6/3xdNd13RoiC2pjAcA5p41RitoXR07x09NTTsk5H1KuAExqOJy6wXXD4LseSB1Pp77vWYpIBiwv56dUls7py8vTcT8qaqwOVVJBQO9cWNccVucc2p0yzvXj/nT66uuv7o+H3ttSQghTlnSen/+v/+Pffvf733fjwADzujbFtXKdp+sSZuO0tcZa08ZjTdpqM3SD0c4ar7W3fiBl1/NLWJea87Iuy7rGGNpZzMIpBUbuh+Hrb77e70ZnjVLoO4tQiVhr6L3lGt88Po7j4Xg47sZdTNH3rvN+HHprtHfWOyNShFfntLaq1jpd58771kK0Lsvry+d5fmHstNmDIIEmFG/t0Pnzy8u8rvcPb5zr0Zje9967Ncyfnz8yB4RitXJaLfPl+eMHo9EYVaWUkrTv+t2OjGtxUIUqhNhahlpVe6mZgQV4dxhDitY7RtbeisacIwCs68S15BCQq1YkwCIllzQv5zBPOUZrdO9MKbHp7c2ZnVJJqYgi6JwdBtOPw/Fu2J98txdRAJjWRSQZ4lRSqz+y2jye7u9Ox5yX6/UFUWIKRGq/OzrT7/YnbbzW1nqfch7Hcbfbe98r0tfL9PHp8pef369zNMYpIufcP/zjP/6rf/hXiOr58+fPr69/97e/H/reWPfd7/7G9+O7L77qhx2h6v0AlZFFo0WxJdc//L9//PnPv/TDoWR+/vTy8vnl9eVVqlxfL+5wLJwVqcH1Bi0UGvzgOtPt/f2XX/yH//F/+vu//3d/+fjx8+uzIdAgWhBFEWhgRALhTCjCRqEiQkJgLEQixCxFYGucZoFWBMvA0uohpSKyEIsUgExwwwkBtZvg1iYr2CTcLf2ljHGklDHWaEutk7zZf7YQJhIqQoOieJssKtdUayv+DDmHWmJNa80Ba2AONa+SA9dQylJT1KM31roYo1ZkjUUEbTQiKaOFMORqiMI8AWBnzLYjVpoUAYjRFGMcxnEY+yazLcuyNRZpm0tp2bGa0+Xy2TqPyDFlY6xzjrk0J0WIMeU09g4AO+/JWADUmqZpIsLO+67z83y9XM4v8Xr/IAz6zRdvkAznsswX5q1bUYBKzsTsjM61gFSNSjtTsi81C3PfD9bqEC4pJ6DKAinFypVQ11JFpPNd73vUMa9LDNP19VxzPo794DRYe51mqZlLDlK0tZoMEbWfiFArQN/vQojn8+V0OrV9K5KybY/RD9M8NU5R0QQiQOr+zVsEqFwBoVa2ZLz1WmkDWmnlHuwyr13fN67G4XDouu7l5eU6TdrYyvV0f7c/nJxzALAbeyDS2hwPR0X0+vlFKzJaWbdPOSnSSuthGMbdGJa1GcxYsDBLyZXFWMfMMRdASimHELbxTikROLrBWhdTTimVlNd55VoBOMU4Dof745137jJcUi6MqHPxWnddN03Tp0+flmXJKTnvvbZAaJzprOdanbrrjPbOcK0fnj48Pjz+w7ffSMnrdO36ruRUjTaKBHHsOxafckZrlfU765d1XVN0xrR+LmP0/elQyvAJ5H1Y52U6ne7u3zyy8MvrK4jsd+M4jiEE5tr3AwjP89RZtx/HGNYCopQyCp3TNdO6TPdffIUAXPnx8YG2nDAbowDAGDMMQy54mSa1xqEf3r1rE7NYa+d5cs6tYVIoRqEzHrjMc1RGxbAUlv3+aKzlysBSS12W9Xx+MVadTveAKBWmaTLG5FJkWR8ed9Z5RCoi3nfOd7xFiVjpxg8v67S0ZaD3jkg5a37//e9alXzJ2Rufa5rXKS2paWNK6SpAqKyzQOu8VC7FKJ1zDmFdUwgxvn3bee+naUbEvu/WEJRSzjlFNHbdfn9IMT59/JBiBJFSuHCxwy7l/PHjB2Z+8/ZxN47zur55+25dw26/74edMxaYSakQ43W6zMuitXG+U0rFEGKo/Tjiy0vX+U+fnn744Q/WbP0Nne+++earaZ67rjtfZ6VMYXFd9+3+uCxr1/eCoJAI3fv3773vc+IPHz8IQz/0Hz683+12YV2bcdxoczodC3PvexRsTAHfd6SUtXYJq1WmG/x/9K//9du7w3//3/2bH//wfyqtmrL7L9t7AWBoLrD2VG9bfLg1fQmjgFS8LfRvb/tGR27DRFN5FaBsBYR4E7k2xVa2fdJ2m2yVMwRGaQJiEWrd74QgqtHrGpOmrfYbR5q5srBsNRuMKCUDEmNllgqyVfToHGbOQQSllJCz7weltNKmAqBSS0z3Y3/c7c7n8+vLZ6WUQvX+/a93p1NTybTWiPLLx6nkzMxD3zvnLpeL985oxVznadJKD+POGAMCyxpqrbXVYGk4PdwP+12t1WriHJFUSHkYLQvUWoQ5xZhT0ETH/Q5ptL6nKvM8W2ulVu+0Vqrlvrth1/e9QlKIwDxdL5fza04RAf7mb3+33+36vn/6+PGnH/+whmitG3a74+meRR0OR6VUCEEpSimfPz6RlENvL+dXQXr//i9fmI51UMr0TuU1kHhEh1jbVOicma5XpdTd3f1+v79eLmGJvutZZJrnYRiNtSxwNAYRM2cCsMaJgeay6DtvjK21ImBYE2J01hGgUXYYzTDuGmx1jWF9PRvrul4u1+vX337vrO06x5WR1G+K5fX12Rh7d78/He9BYM2pCtZS1nmZp/n15WyM6bxf19U5d3d3FwVUw0GDKGUaWMZ3npAQwTkTY0whgSAhoVCORQRyrjVnQiKgEIrR3rv+uswx52lerPXNPJNTzjkfjseUc718BqRUclsMcs7T5ZLC+vbxruv/STt7sJZrvlzOAOKUUgTWmpzzOHbX61zT+mlZYgVtDYsIY7JVmHNYc0q905ooXV/P1yetnXMU08yVu861ucFYtd/dWeeMMp8/f75/eNgfdmldr1cV5rM3tvNOoez67nB3dzyejNHX67X1EeWcnbPOudPphAjLsryeX3LOYY0pXu7u7hFhWSYijDEZrYd+yGk9v3wkwq7rQowpxXVd7t687awFgBBCyTkD+WEUlJzC09OTUvrNm7fX86WU8vj4eL1e1mWtLPvTSVUOIS5rcNYBoHPOe59TklKtseMwKqLz+RxjtMa2mUkbPfRD3/VKD5+ePw3DIIDWeWMMcwWEnHPnHOyP03XiWs/X6/PLZ2vM6XR0ziOC0Ro7pZUi0mToeDwyi9J6ni5DPyDihw8fS05fvH3z8PAYASCS68d1np4+Pj9/eimlfP21Oez3v354/+OPP3/15Vd3dw/rsl6v124chlGv6zrNS8llXZbX11dmPuz3z8/Ph8NhnaePHz7EGD88PU3TdLo7ffHurX5S67zExN//7ncfPj7t9mp3uCulxBB/evp1WZbn52dnvff+22+//ffe/HMzcfW9v7u/B6SwzI0Mj9qFEEJYh65vD19jDAISQAhBcsbKu/E0dAeljALJJZPSAsLAm09zK/xqVJetI2tratywEAKqEaA3QlyLAWzen40Z1yjN6lYbeyuMwU0SRhHmVgbNIFWgkgghWW1anQoBgdCWGGv0FgAAyMwspelVuSSGKq2qHQWEGyF0IxK1tON/9V/+58syX86XmCLXuqZsrG1e8lKlVAlhNpqsdUPfj8OQYmyXqnMuxti+oGs7IBCcc3FZSalWgdUgUFqpru9YBEgppXLlUsq4G6/Xi9a2OV404aenj59fLwg4jmNlbmM1ABOIt5ZQlPP7w1EAw7ocD3sC1gjLPMWUiFQsRREarYQbwzTVUn74wx9qSY2z/5ef//J6/vz86dc//ulP79+/F4HHN2/H3WiMXcN6uVytc0QESseYl3kB0q4byPau3ynbxVQBias8ny+pMAj2w3jYH5uQ65131gGA0rofhnHcNddgP46NxeC8zzmHkACwvbKXZWWufdd55xo58vJ6zin3Xc/Nk06Ycjlfzh8/fjyfzywSQnp5ff3zTz/3fX+8OxFg1/XO2VIqER6PJxEopXTOj31HBGRc1/W+68ZxtMamlHNOtZTW/SsiSNSskznXKtL2v0qpnNK6LtZapTXUGtZVAKyx1litdd91SmmtlDWGFPXDoIw2xjrvc6nGWqU1i4zjzjt7PB6FeT5fSi3G6JrLMl2NppJyzYWZd4fRdY5Ycsm1lhBCc1X6zk/T9Msvv7y+nMd+uM7zHBMiae26vq8sl/MFESxRCus6X9O6Prx7++VXX759+5aZh3G4O90dT8cv3r0d+8EYgwDzNCnCf+cf//Hx4SEsSyn5uN8fDvvT8Th0/nQ8fPnlV8b7UrJRyrnmKMv7/d5a29rGG5wZkPphWJb1/HqepqmU4n0nXIH5/u4BQa7n808//fk6XUsta0zLGoZhWJa17zoUSSmVKp3358sLgxz2h5Sy0ebNw5u+31rac+Vhv9+Nu5xrcyKWUkrJt630hjutOTfbGwhIrT//+NPzp+da6tAPWundrmvTntJmvz+0ohJunrqSNeFuf7hOcwjr4bAfhsFaW2sthZXSKeclREA4Hffe2V/+8ss8TYibrcM7N/TD4Xh03jOoXMowDI+Pb3zXDUPvvbfWeu8F5OnTs4g0objUqpTa7Xbv3r1z3m/UtrBM89z3/t27d7thsNY457777ru/+f3v//TnP3/49cMw9kjoXHd+vaxh/fT8fJmmeV4u1/n58+dcyhdffPnd999/9933h+Px8c0bZu77zjqXS1nXMM9LSqn5G1Gwlrosi+86aGhf5nmaOJcYQom5Fvj48el//9/+1+n8CpK9Mxv8WYC2SDA1oHI7rmUzgLb3e2sIvIE9QTa7AXD7QbwB4+DGlxAB2WjNtx+WW+kLICFopYmUIa2V1qScdk0i2ryguK2FNlv21jFXudZaUyl5Y6gIt34r4dpO/21CqEWXCqVU13WH44lI//rhiUita1DG5py7YZdCWNfQYITWWBE43d2LiHWu1O1LeRhaWjimEFXb3ubUGKbH/b7WOi/rMIzGmBCS4c1RujscnXONLrEs4eXl3A19Pw5tkHHGVCAiFUuSWnvvwHXO2Q/v34cYUI6a6nw9L9erdt3x7v7p5fXl83NcPCLUlNZ1fvP4+B/88z+3vILWepnDMIzqq68OD29KZmt7pR1XWkLS2o8HpaxNKZrdY0c+h/nui+9qrcYPyvW5oncqrMv//G//lz/99PPpdP/t118/pFLqh8f7e2tdqZmYQ8paW+McIOVSur5DpFpLyw4h0X6/F5F1XTWCaVJtZaZKxvR9N+53pWQkBMKQ4nK5TNMEiL4fjt63TO/xdLp7eEwppZj7/aiNqbV635VSAFU/7lAZbcwaCyCgUs1QkHJZlpBz8b4HkNag8/z8bIz5/PoKAi118ubNw9vHh5RiTHGZ5yboDcYhAOcC2irCEEKtNeWUYlSKnHMhrNpYY+3r6yuS6rquOU37oa9WzfPcee+9Ba2d9yHG8/mCckilPrx90w/Ddbpe10WgVXFZIsol5pyv16nznTZOG1hiurt/+Hq/y7kq68fd4fn5JYRoFVmFKa7McjgedW+PpxMI3t0/LCESqpzz0A+1Vq78pz/+8Icf/mSs0Vp//fXXSHq3P9QUL5fzdL0CiFbq6fMraEtE9/cPxth1Wdp+NqX8ww8/HI/H0+k06F0ujAJffPEVIIRlISLv/QzceBje2eO+z+VgfX+4uysMMUZtNAgopTKAUnp/GEuJtbJ15nh3QsCcy+V8PRx39/f3Hz9kND7GdOYraW2t6/s+pWSt9dalFIXrJYbKkpjXEIwxpRYA+P7775sZpes6pfS6zIqUIu7HXakcc25VS9boMF9CCKrUd+/epXTH3BYFpI1OOVeWw+EkIq8vn2vNIJZref+XD/34/1H1Jr+SZfl935mnO0XEi3hTTlWV3VXsociWPJECTGhhrbW0dzZswLRhEDYJwZAX+pMMG5ZhCR7AhQFDFN0SiO4uVndVVuWcb4jpzmc+Xpx4We1cZSKByPduvnvOb/h+P9+yKMrValWU5TAMwzgZYz3CKSVCGWPMeceoMnje7XZ5z9TUNUZwnAYpZVUVCBEAwP39fdce3717l0LcbNZVocxkMrQDY7RYLG4+fLi4uvrJT37y5s2bb373zTRNF5uLcdaXlxdFUfzrv/mlD+GzH/346uoaIJRSyrHYlGYBOksJZhVu13da68VikYNR84rx7OxsGCcupVBKjwOCkDNWCOGNlfXZ7e19exwwIhgR5/2DYzFmNwCCEOUDPaWIEszqGojBx+DGh40wyBFeD7/JChdwmufkxW1MD45CCBAECMFT6Ds62QtAiiF5n5AP0EWAA3YEUgKzBAifAgVCjA+htikGAEJKAcQEU4DAQwBSTCEmEGKCEf8ALwoxevyX/+1/RZkoqibE6HyECHJ+4nRG75K33jttLIQohOSc50wgQouyxJRXdV1UTd00mcB+fn4uOAsh6HlKGdgQA+fM6DkByDiPMXofheCUMeezTjZO06S1icEVShRl9RBdjNr2aIxhGO1223kaMQKY8pevXh0Oe46h0/P71y+NHudpAAAsV2cInbjBgnMpefShruuz5eIENsIYQFg3i3LRLJeb5epcyKIqF9ZHrR1lgnERIvDeU9UwLmVRcaWoKifjTUyY8GGcd4dOyuJHX/zBj7/4AmJ8v99qbd6+fXPo2svLS0zINE9FUWQkToxRCD5NGgBAOc93fh7FUkpLwRTnknMAUvBunidtNEIwwTTM42zmydgEsJDy6vr67OwsR56WVcWFqJsmxhhSNMZRxgEiPkSIsfMhpWSMCzFhghnjCcBJ67ZtD9v9PGulJMGEMuKdE4KHEIyxed0qpEgg9d0wTVPwzjmXOfLD0PftgXEKErDOGquHsR+GfprGtj0YawkhPvhMhcKUYEzqus61TAiOYDJNE0hJcIEp9T4gjDmX06xfvXqz3pz/7Gc/jxD5EJt6uVidLc/Wm4uLql6EGAEimHJMmDH27bsPy0WzWS3W6zPOOReCUOac8yE2dSOldCEqVahSSS6NDYxLJYqqahjjhDCeRSoxbS4unj57JlQRfdTacC5mbbwPqiwXyxXlrFksMaHOOQgR59xYWxQFIYRzwRirmzrnAyGMu65FEFVVNY5j3/eH/cE79/TpE+89RoBQzBgnlKqiQIQIKVOMjJF5HrvjkRFsQwoxQAiDd5v1RnBhrVsuF5QyrafTqhuSycxSqlxNSykJIcH7bF2cZk2YWJ2dhZhyZU0wLaoyrxRPIuz+OAzTbAyXMqeSheDz8NhZ66011kmlIIR1XfsQZVEsFksp1TzraZqUUt65br///vWr4/EohGSUPCDQaV01QorNZnO2uRKcc86yvjymVJU1ggIwSYMAACAASURBVNAYizCp6mp9thr79tX33xtjEQJvXr++vf3QHY8wRYTANHRK8k8++YQxtt1uVaG8D2VVvX37VkpJKTk/3xRFKRg3erZG7w8Hb01dVdbopm4+//FzKSUhFCF0c3Oz3W5jjE3TxAhCCKvFSqmCc+GsSynhBKx1IUYphTYmpZhiWDQNSmAep93dLaIFQuTld9/07ZagQCiBEELwoLkE8CTvORXs8fekOTAzOMFpFvQw9gEJgJgyXeujzPNBCZo/JzcD+d9AAJ1oP/k5I4wAzAZ9DCCG+PfyZTKAKE/7Q94Anyr95NPDDgCkBGJMMYEUM105pZBF0ClB/Gf/xX9W1gttrA/pBEVJ0TmTgl829WG/LaqaUH55dXW2XpdlJZQCEHGhGBOyKBAhmNDgrGDUGpMJ1kPfN4sGwpS58G3X9uOYjVp9P4bgY0onSKNz1lrGTlws57y2OiRACJnmWc+T1rM1OniPEXr79t3/8j//j7/4oz9s2+O3v/v6+xe/c84SQqZxuri6lqpIKe73ewTgcrXw1jlnMSGYkH6YhnH89sWLbhyZLEOCjEpKBQA4+GSddy5gSo0NQpXZRW315L17/frN69evMIaMEKtnJfnF+aZqmmEa94fD48ePikLd77Z//df/SghR1xUmBGLgvSeYZDFizjBEEGhjM8aHUpoArCjZ3d955zjjhGCAgHNutqYbhllrBwKEuCyq9XrDOYcQ7feHYRwTADksqSxLms8YqbgUMQFCGSaUMFbVDRfCGIMQtsYG582sIYJSyqoshWBSiLquFlVljA4xnK03uZxJMQEQh2Goq5JSMs2TnnU/DMfDHkJIBeuHIc9nhmEAIEEIlBIx+nGapZQZ7ljXi7IslsvlarWsikJKyTijlMaYtLWqqsbZAIiHYWy7Ybs/NIsFE4IKWVVNvVhyKaVUXKoEcUxw1BpBjAmt6gWCiaDIOSeYci44F1LKaZqVUmXVUMqsj1IJzpUPESJc1c16c7FcrqSQ1lgplJSKMFbWzaJZMCGLsvQJKFlgxiKAACLCOJeKM1FX9WK5cN6P48gYV6rIb3Xf99baBIB3LsQ4T2OmGebRcK4/MKacU63nmGBesFvjIIB93419d7Za/e63Xx8OB218iqFtjxCmummULLq2jxFsd9vj4di2x/2x08YUqqjq5u3bt8YYpRQAwDvvvbNaE84xZ2VVYoK11hChsi4BSEIKTLBxdprnrj3q2VjvnfeMMe9c17UQAsH5erXyIUxaMyG89wChw+Gw3x+KotBaxxDGeU4ghRiHvtfaLBbL1Wp1fn55cXHRDwMEqKqr27ut8x4hkkCKMTBKZp01AiC33W3XbbdbiiEE8dWrV6tlc745L5UslSpLlWIQnC7q6ng41IslpVQqNYzTo8ePGePffPMNAHB9tkkpUcbevX5p9RxTGPt2UdeE4Lubm6HvnLWcc0axlLKqG8ZY27YQQikFQijneWWzqtY6hQAfcqRjCH3fBR8wQne3t3qeDvvdoR33+/3bt98N3Y5gAE6JwDAl9GDzSj9cAPA06M+wCAhAABGAnGOej/uYThP6HOTwoBj94QoBv+cwACinCwCIIAIwYYAwwgQigvApBv7Eok/Bh4zHTzHGTJkDp2UviAHECFKIKUAQT1lxMYIUUkwpPLQvIcIY8T/97/8CYYwRwQQrWcQY3r19A1Nar5rXr16C6BfLjfXBeZ9iykZfLoS1zhijrXHOW61h8kabeZ4Kpe5vb8dhfPX6ZQwxpmSN1lrvDoeyLBDC2+291gZBiDEqqyqLr2/vPnhrgw9SSkSoMWa73XLOAUhCCASgUnJ32N3f3vz0pz//7NNPv33xHUToH/2j/whAeHX9qKib27tds1iMw5hSevT4EQTJx5ASGOd5NtY6CxGhlF8/flY0q+BiSMAaMw6Ds0ZwzoUACYzzVDcNcuM8HAkCX3/167/6P/7lj59/Upfyw5uXw3E/993bV98JpSBGxpjbmw93dzePrq9+9PyzTz55VlfVNI6Sc4JQbn6NMc65cRyMMd7ZFCMlxFlLMR2Pu+N+77zPAEmMca7dZKGElKpQShZ11YQQjsej915J2SwWjLMYI6U0l4RMKEoYhIgxBhGiJ3E3SDFpazKBXEq5WC7KosgeFkqJM6YsJCGIYHS+WSullotFXZdNXS2aerM+W6/Xdd2UZYUwhAhfXl83i5WQyliLMcrBQVKKqq4SBN3QD8MAIaSU1XW9WDQYYwCidzaEEIInGVjOuY9pmOZxmodp9j6ulqtlswwxUSHLqpSqQIgAhIxz3dDnSAPBxfWjx0+ePjtbr4WgkhFjvbMuhmiscdZtNhtC6ax1QrhZnUVntLFSFYzxqmqstQAikBIhlBLCubh+9AhjjDF+//6maVZVWVMhhJCr9UYWinCeADwcDjkraLvbQ4yLsuz7fpjG7Xabn7xzfp4mDBGhRCmliqKsSiUVJhggZL1r++7ufgsxjglIWRhrhmG4Ot/oeVZSMIqfP/9MiPL29sNqtXj67KmU8njsCKFnqzUhTClVlCWhtCyr58+fAwiPxyPjLBMJvfPWmllrrqQJvixLIYSPebZexhg44xDCopDv3r4FENXN4tNPP8l4HM7womkWTTNNY9cP6XRywWaxijFdnF9cXl7lkFeI8HK5rJoaAlAWxeMnT+qqstYdj21MyXufEmi7DmJkfaAUd10LUzrsd9u7+5QioyyEIKUkjMYYndUX5+cJAGPM06fP8jCz77vgvRICArBcNm0/W2u5VACi47HVxnz62XPG+fnlRUqgbbt2v/3bv/2ld/b8fJNAOt+s37x59ejxE4zxb7/+u/12d2zbNx/eP3nymEve912IMTOMT/H0AChVQBATAN77w+GACWGMpRgxRvvd/aE9IgTLxdmsp1/+zf9DcUQZoxNzxO9JfZ+lmidDbx7ywNNNcHJ7nXIgP6Ywgo8er4/khwefV8ptA4AAQYQABFk4D0+7JpgAyQgggGBCMBt7H2guwYeQCVbBZskMSCEG74OLJ15WPEHn8m0RwmlMFBJMIDuU8T/7H/6Js9ZZl8nGx8PBaH15eQEhSiBlGDUXMsWopymlCHOSezoV7ylF79z27kPXtqvlwjkLISxKlTFBXdsul6t5mqumYZxlZ01dV0LIaZoQQn3fAZQWiwVGiDNmrM2cpKIsIYTD0DnnY/TzPHvnm7KiXBDOV2ebTz797H63WyxWQhZtN3BZYMzevXvXNHXf9c57peSHu9vd4TBNWghJheRCLNdriBkECSOAUwLJYQSUEk1dUUqHoccQT+3t9v5unPpp6P+df/fv11XhjZ6nvq6rQkrC6DjN2912GofLzfknnzz99OlTKTiGQFLGGUUAzfOUAJBSppTD2TEhxDlPGRmHkTEegp/Hsaiq5WoZYrTOaW0AhHVZZ5IMiNC7UJV1SgkhlAdH8zRnxv5yudR6DiGkmAiGhGCjtbN2GvqMBzZac0qF4JSQ4J137rDfpxikFN7ZGEOK0ZgZwQQBNHrGGFJCIEiEYITQOI4ZJVYUZVXVTIqz9bofhrZrrXWCi9XZCiKkSsW5iCEVZSmllEotF0sEoctZbM6lFIK3eeUwjlN7bD/c3h3aY4xJKbVcrNbrs5SADy6/L5mYH1O01mo9M8qzsm17f393d4cxDjExLoVUmBDvLERQChG8Z4x3XY8wqgsVEyCMOe+PbRdjgghKIcdx/Nu//bf/5pe//ObFN19//fXL719+9dVXr16/adve+7C5OCcEx5xYhDDFtChLjKkQEmFIKAUPARiZkCiEWCyaLPKx1oTgi0Jl8G1d1UJIAMGnnz0vipIxnlIUnAvGxrE/Wy2NnmY9U8Y4FYzS6+uL8/PN3f394dBSKrq26/oBQljV9Wq5OOz30zxn3eBut1ssFimlv/nl//vy5cvgwouX3wtZpBStMdaYuq5iiM7ZrmvHoYMQlIVarS8RwkoJwZm3MwSREwwgdM5pY7lQQnAAUF3XUiqCSNd3VV0TSoTgWuu880MIU8qsDx+1jQTTetFATBbLJcHMW00RCt4Jzq21ZVEUZeGdz7Tgsiy1no+Hw+psDQk5W28woYRS5xxCoCiLJ0+eEMYgZl0/ZNXc+mwtpKybZtE0CKKUUtf38zw47whhQgnBxXKx/OSzz46H4zTr27u73W6/2++vHz+5OD/Pd3xVlZgQH7wU0lo7z/Os574/hhgIIdM077f3F5uNtXro+7P1CmK4PezG2bTt/tX338SgGQEhBAARAOThW0+ndfDH8h+A9KCtOQF/QIwnI+7DjfHRNgYhPGn5TwggiDJVFOCHpS6CGAMEIcIAJ5BwjoUCCAMIU8orYgBAjCHFkO2/McRTHnfK18KJlpjblaz2STGmCEBK8IRKy58D8X/5n/7HMEU9jwzj7L+4uroCEB3bzrpAKCcYd4dDezgMXeedQRBmbvN+u+WcYgSmYezaY9M0SqnD4RBCIJRQSqu6ury6SgDUi2axaLz1CSRGWYhxmkbG6DyNQnCC8W67ncbRO0cwEVK2fT/P1nnbLBYIw91255yrl4umqOqmmV3wEWIurfOyrLX1ABEfwP1un19j69z+cDge26qq67qJACxXZwCiSWvOZYoQRG/n8fsXv+2P9yDa4M3QHQ6H7TxN49BN4zCM06HtXExMSuf9OM+ffvL86tGT5dkGIHp9ffX00aOnjx5t1mc4gWgtjLE/tkPXdvv9atGM/cCLAqRkzCQEl0JMwwhSZIRKIaZxZpS5EBKARVUDiHyMGJN5Mt9++72ZDYwQJ1RVtbauLIqMaB3HURWqrmrvfA7IDCEwjHOCYNceOcUgRpgiJTh4Z41O3g9jJwTzzjVNVSgVvGcME4RS8hmXb62RQtzefEAQOGuVlF3Xa6NjTPNsTphYwvphts5DgJvFgmAqhXTWYUQIoRCjuqwLpRhnCaT9YW+N7bp2nqb2eDjs90PX6nne7/bGmhDj/XZLCXt0deWshQlM82CtiTHMs2acpBjGYbBmPludXV6ed8cWguScwxBgwizAx36wzlNKKKXOmaHrBGcQAqmk4Px43IkcnuydMfrR42sEYUz+xXff/l//5/+OEXrz+tW7N6+dNcvlCkHw4cN778PhcOCc5+BGKQVC2AUPMSqK8gH/AQBIq9Uq58NkGq7Wk5R8miZMsBBCa2Otm2eTAHAhzlpTxgmhMUROKUjBGpNisNYywbQ2KKHzi/Xd3c3tzc1ut89Qa+c9oTSmNE3zfr8lGL17+zZlJgzGq9VKKXV5cfH8+Y9+8tOfIEQY5ZyS9tgG59v2eH97O3St1tM89JuzVfDOBjSMvdZzCs5ZzQjZ7e6HoY8AEMoZEwDADADO1SkmDECYIdvDMGCMKWUIk8PhiBBilFnnKKHa6Lu7e0zocnEmlNrf3wIQU0zeOUqJECyPXChj4zQe9seyqpgUiNCyasZxGucJAkS5MNZ9+PDh66+/fv/h5n67u7u7b/s+RWCcTQB1XZ8ADN5LKa11i2UTAXj65AkAQMri/OLixXcvX3z//fv3N0IUf/oP/+F/+Kd/+uj66t27dwQTggnGJHpPKbVGgwS89yClSfdVVQvBz1ZLa/Q0jhijcRyMM9Y7bfWXX35ZlsX79y8PuxvBaM4lzNm7D7zOmGB6iHw8iYGyuzfzQmI6rXvhQy9w8hJDeMryQghCEE85LR/vEQQhgAkigCHE8LQPgBRjBDGBGdr+MTnmgdUaQkwegAhSSskDEGMGaz6ghxKIeeaTD30MEUQYZeUoRClE/J//J/94d3/LGbPW5EZpnPXddhcBdD5gLkBw/WEPUhCMcs76roshhOhvbz8wSqSQdVUaPSulYghD30kpgvdKKYRQCCGjoTM1t1BF8GEYp0wQyhosH0LwNt89y9XKWIswSylRQiijSqjVaikEK1Rp59kFQEVRLVcRIibLCKCQijKRENbGf/H5Z5SQsiwRQmfrdUxxt99dXlxmRrSQIsYkhbR66tu9oOnzH32iJHnz+nslRFkU3jlGsFosI0SL9eazH39xfnV9dr65uLzmqkyQtP2UqJj6DkY/DsN+u3XavH/79rDb9u1xGgY9Tt3xGFPkUnZtN43D8XCMKbZtWze1lNIam5FShHNICJcSY5KRW4IJignDrDu0i3KRUnIhcMlTStvtdpymvFAQQnRdV1UVpdTpaZ6Gse8QhIUUVVFYPRMEQQxKcCnYPM+FUgQjzpnWc/AOARiCq6qKYGz0PA7Den2W28KyLCGE1phmsTgejxhjxjghZHIRY0Ip55xDgDhjEKLo4ylHBJMUQ4YDTtNUliUXvChUDmqpyiLGkOdsZVWena0fPX6MEIohWm1SCoyyED0lxFqDc4UVA4Rps16bedKzjiEc9nur9dnlI1YuEKEYIYIhTAnGUJYKAtB1bVkUxmhKMONUKIkIEULEELSZMUbBO8bol1/+7O/90ZfPP/vsZz/7ueTcOX/74QYT/LOf/fTps2fzPL17/y4nIcUECKEY491h3x7bs7OzGEICsSxLSunl+XlRSgghJnjW436/TwDkJk8pxRlTdWVy4FpKXde++v67se+uLs4px13fD8PQD4Nk8n57e39/6707HlqICUhwszkXUlR1RRnV45APDsbl1fX1z37+87quOOMxRoTQMI5lUV1dXWGIQYoh+KaunbPJWzPPGAEpBSF4P9hxGAQjnBOrJ2+10fM4z5QJxhWkHEGAMXTOY4QgJn0/GGOypYBzllJShRpn473bbM6ttcF7RmlRlYyxi8sLiMhsdMmZnvU8TV13+PD2rdEGY2SMu7u/hxC2bVfVtSxUAohQBgC2LsQEhBCLxeLuftuPw9Nnn6zPNo8eP/72m29d8IxLVRbe+7Is66buuy7GdGh34zT2wwAxubm9fX9z+6tf/eaLL/5AyJJLeX31yPngnM0BD8aYaZy890IIPc+//e3vZF6DB0MpHofpsN/3bXc4HIIPm/P1bA3C6P3t+9/+7tuLiw1G8d2bFwQjZw0AGACc97RZpBlydZ9/nejNp2CA+FES+sB4PoH4P7YCJyUojD9sgAEACZ2m/AgjnPNYIUIEEwIRPMn8IQIQwIzzB6dogRhPSCIQQ4rg5PkCIP2AgXugI4PMwMIQgVOYGAIAwA9f/41zzgfvXYQJRJCY4AhhbU0C0FiTQoQxLRZLa/XN3c04TquzdbNYhBCncXIhXJ6fg+QhRIQJKcWkZwRxVZU3NzeU0mwa2JyfU0opocbom9sbkJKQIoaECQk+TPNcVlVV11ZbY2YhJUhAKZVVtM7ZlBJn3ExjgghzPs8GYAwg5pz3/UAJEULGECIEhJAQvPMGgni/vd/udp8//1FKCCPKqZjGCUZ3f3NTFKIqpLMmgwYZ5yFGgCiAUHIcI0gQWu8xRNZMyXtC8M3N9vtXb778e3/fe+u9/fDuPQRJcDr0A8ZYGy2EvL5+NE4zF7xaNCCBEDyCaOwHANKiWWCCt/td3dTeuxCR1nYaJ4yxc6dkWi7E2Wrddj1GyAUXU6iKwodAMJZS5t0jJSSfTcYajM3h0FZVbaxtmqU1BmOaQEgpIYgwxu2xW65WMcYQ4ziOQoimqbyz1miQYmaTXT15qmftvWeUM8q0sdM0lWWdd84uhORhUaoEYogRgOScKcvieDx4Z0BMmKBpHKu6wRAppTDGwUeCMQDAGCMLqWc9jENu+UOMEKJpmhDCjx49Oh4PeVtorfUxUEKN1s+ePTnsdiGG66sra9yx7/RsMCXXj54aGxmjRs8pRad1CA5DpI2uqhIkME1TUZ5lZw1lVM/zOHWc89vb92VZNE1trRFEYUyLsmz77m53/+K77xjjf/zv/8lmsx678dWrV9vdbVNJRogUUk86hNBN4/XjxwFA552znlG6qCsI4YvvXsQQyrK+326VVJ999lnX9xihvh+meVgtG+tDDNG40CwWCKMs/JOcdl334sU3hVBlqQpBMULj0MWYGBeXl4/udodunD799NM3b14PXeudpQz/5IufEMSmab6+uv7u1avdfmucwYgooYqywIwSTABI86SbpgEgvn79+vLy0lqLhfzw/t2jR9ecU5BSWRbhtJnz8zQnAHwInIuuawFCx2P7/sPN5eVlSkBy8fkXX2Q9W4ouOleV6ubmg7VWlVWzOpu1Y5wRTPb7rbOGc5ZHi3VZWWtTDDc3Nz6E1XJVlhUVjEkxT7O1XskCAWTszAi2Rt+8ezsO3fWja4Dxze2tme3TZ0/bvi/Lahwna+xmvT4/P3fOvXn7ZrlaFkWRYtzvD69evnLO7ff7589/dGzbcRyruj5brwUXQsqUQKGKFFMm4O5322++/fYf/Mkff9gem6YpVOGc2+92jHMlZUoRYbw/Hs4358HqaWr/9t/+m//7r/4lIwg9KHdCSjHFCCM4sZ1Pp3wCKaEIMkw2+ozfCTlJJtPpQYjZgYVRThFLKUWYfIopJgQBACh7KjEkGGYQKUYQM0xAggRiFCEEiJ7oDzhvmCMAKAuOci7cSWT04CH+4b4BJyppNhwDkMm4+XYKIeC/+G/+jAs5jjMhBCCYRW/zPAslKaOEEk454wIiBBEmjGFCOedCyExAyzgUQhDMEhfOjLYIoa7rvPcAQUbparXS2mT2U0pJzzonURyPbd/3ZVk9ur7GCI/jdDwectwYYzSXG8PQe+chgFrroigwoTFFkBLCmBICUmKExOCN1t65sZ2UEPMwCsowAhLT6/MLQTn0SVIRrccAwOA5p4RgPc3auHnW/Tje3t33wxhCSABSAvqh64ah77rdbotTunn/brlour7nQpSFwgSnGKu6qspSFUIVigu+OV9/+ulzVVec8/V6AwAchkHPFiSIEeKM13U1DSMXglJsrXU2DEN/PB5yVPc0zymlt2/fTtYcDsdhHo/H/dh3IXhKSYxxu73X83w8HF+/eoUQykP/BHzWwzgXYkzex5ubG2u9Uurm9pYyBgHSs0kxOueklHm2TjAGMA19H4Kv6to470NAEEYfj21LKbXWI4T2hyOjjHFaSnF/dwsgmIYhptD3vQ/uk08+kVwggigmxpjlaiWlDN7rWSOIOBfHtoUAQIRCjKoopnmECDLGQvCrs6UUvKwKADIMkkjJi7LKQhc9z8vl0ljrrM8agfVmszxb3d3dt+1xmkZrreSsLEsA4TxP8zw776ZxslaPs8nlgpIKQIAxnsYpxvjs2dOqrqdx7vr5fru/ubtVhXr27Nmnn35a11XXd13bqUJOw9h1LcZpt93+9quvC6XOLy4eP3kyG1M3i7KpKSJFUR72+4wR7fthu93u9/v9/miMXTSLsiz3+z3nVM9joUrGORMSQggRwQgzQvf73fFwePz4MaHEaI0J9s7qcZqm0Vl7d3/HuLi9u5+nWUieqecUo7qsXnzz7fZ+t1pvhn7YHfaE4Hcf3gkpV+v1/f397c3tb37zG+vs7d0doVRbCxEuqzpnJRFCx3kaZ22dTwAgTBjj2ZhWN40QQkolVbFenz375JPddksQNsZIqaqiwAgZp8dhOBz20bvr62upitVm43wcxwGAxBkNMTrvcK5NCI0pIUiKQiGMEcJlWaaUtDEY47brVoslBLDvuvZwGMfBar1ZnxGMrffO2J/+9KdKynEczjfnjFKEYCbHUEqFlEoVCCLvvWBCCDEMQ9M0uc29vbn5xR/94cXFBcZICuGsgykdDrtXr17utneLxUJJ4Z1T5eLmww3CiHOelbIIIc7FOI4Y4bdv3l1fnb97/UZwao3d3d8iiOBJqp9+CIcEPxTvMHt38xYgwZhCDBGeIsQBANlADBA87XnzIgDAFFM85YwDhBEhCGNEIMAIIgIxxTTXDDifvBAjiB/WCFlferIQfPQdgIcolJTi6bD/feTcg2oJZ9lq/opTwv/sn/7Fbre7v7/DmGCECaHGWIAQRMSHWJQlAEgIgTBinAkp6mZRFGVVViHEcZp9iJjgBIDRVkqJEIansAgYY2SM5tbI+5CnQAhlDjsRQmRps5QyKyswwSnF7XYrpcxfn/N+1jrTfXMYpNa6KFTwoZBCcBq8985hiBhjQnI7jgXjnGIM4v72niAoMAE+JOfMNB62WzONx3bfHg5911qjg7PzPCGI8nJ8GIdpHMu6TCndbXcIo2EclRTOm7Zr+6G3zs3WZshwUShGqXOWMVbXNWN8sVy0XRdC1EZTylICUsgQouRiGMYYw6x1VZba2uCD4AKAtFqdFUWRQ4aFFFKqEHx7PAjOjsdDoWRZlW3bGj1XVWWt6bs2C4Bvbm+sNRAmJYvt/X6aNYRonk3fD03dBB/7vl8sllLyrAJICRhrrLU5Zds7p41BCCcAMjGREuJ9sNZqrRlj3nvKCEJws15XlTwcdlIxIbiUcrNeQwCtsc57BFCKOKVYlEVM0WiNMDJGv3v39ubmAybYWZtilFLudluQUl03KSXBheRiGAYIkffeO08pAwk45zjnVVF4f4p9ppQiiMZxssbsttuUohSiUMJ775wtS3U8HEJwH1ddnNPgzTwN2sx6mkJwMUQllZIFTEjPVnA+z5NztlBy6Pv9dnt3czN0nXN2nsZ+6MtSpRTubu9evXxVVfV2t1NlgTDWxh4O+7IovfeFkkIIa+3t7W32AdRN3R4PSslxHKdplJIXRTHrOccuZvPt0A/Hw94ZM43jJ8+eMSFiBLc3H/76r//VPE7zPM9a9/0AIFJlOc/aaP3+/dvjYe+9v7+7izEKIRHG1nlVFsvVYrU+88HvdrvtdksoO7bHEEOzXBBClZJVXcUYEEBlWU7TGJwXgh+Px+ADBNBaSykfx4liEkPouhZBGEMoC4UxOuwPGMKUghTCO5cguLw455xxzvb7Q8qkMEyOx5ZSzCkpqzKjWZSQwfu2bff7Xdf1GOHlcllWJUSIMapkMY8zSGAYhjydPztbBufquiIEA0xyO6iNqetFDk9mjFnn+r6Tm2JA6wAAIABJREFUQsUY+74vVHE8tP/iX/xvP/3pT0MIRVmslstF0xwPx6urK2t009SPrq+HoddaSyl+9atfXVxcVlUJUrLO1as145wyKqWQUu52u7Zt9/v9t99+mx3O1llKyYsX3xz2+3Ho4e+NbuLJpAtSSvB06MOPg/78h5gj3x+EQekhMRiAk/nrxFHPpXoCmeWJEcKnuT9CAGFMMMYIwLwkhgCi7D97WDzn8/9jIX8q/NNDBwAfUmdANis/7Cl+7yY4XU4xwn/9V/98vzuslivn3IcPH/69P/4PGOPWOYQRFwoghCGMPmTZCcJISUkosc5DgHzwzgeQEkHZ+4CMMYzz4H1RFN4aCCHByFoLIcohkRDCeZ4ywl8IQRlnlOY0wXEYZq3zhUwpyVG3UggAAOd8nicIYY575RRTSjMCyRivitLHPNcCBGMza60nTmlKMcv1jJ4Phx0EIMYEIcwAtZx6DyH0IWJMfAwxghgiL6i2jnLuvccIPrrYaD3++te//qNf/GI2DlNhjc1PM0ZP0Gnu4b3P723fD3d3d+/f3zV1QyljhP7m17++uLgoSlnXNYRAKpEg8D4IIQjGECFjHQBQG5MiEEoarb2PjJK6LqZp1vNcNw1CSGvtnEUId11HCIYQVVU1TVNMKQHofaSEYoy1NtlhV1UVTME5a6x58uTpOE1td/z888/hQ7xWCKHtWohwCD6GiCAuy7LrO854SjEC4L33ISAQQgibzSbn5/34R5/fb3dt20UfIcCUECIJZ8R7G0Ps2rZt27qqXr9+jTEqy/IE9sBEcGGdVUoNw5BSOj8/L4oixng8HvOPrXOeEuSck0LAHD2EsXNuHCdMqXM2pSiEyD851pgY4uFwQAiVZVkUxX6/d94ghB8/fhxj0rO21qUEqqoax3kYBgjQbKYUfc5UGceBIDT0A+G0rmvrfaEKytnN7U0hVbBOCTnNuhuHn335c+N83/XLxUIJZczknJvneblcMsYopd779x/eI4QKVfRDD1NcLJeM8/3hKIRywTfNEsR42O9fv3rpnCmKomqaoe9hDM7M6/WylPI3X321Oxw2m4vHzz6FEEnFo7Nf/d1vKMabzUaKsu+n9eYCE9IsFxH433z1m+tHj371q98wxqRQhJCzs7N60ez2ewjher02RteiGKexbuoMhZ6madZGa71en2ttOeeU4HEYfAjNohnHEQJonR6HoWvb4ENVlW/evrt8+uysae5vP8zj8OMv/qAfhvOL65DAzc379WoppUgpHQ6H4/F4dnYGALi5uVkul9773W63Xq8vL86d9yHEcZyyvY4QkkKklLTHg2Q0BIcx0SEYY6SUbdtWVXU8Ho/HI8Hk0eNH+VyTIkutJCH47u6OENIP3dD3y8Uye/H2u93bN6/+5E/+gTaWUna22VDKQoghxvVmXVWV9x5gaa05Ho8+nOCGx+PRWjvP05df/jzGWJSld/bVdy/+13/+P0U3UwTychXAGFJKMMUU82gagFO+SgARwDxsDyGF9P87ZWOCmfgPIAQAZVlySgCEGCEACCEIIEEEQUwggpBgSE87AAhhgjABBAgBKKegAAQfgKPxY1H/8UBPGTj3sVM4kYjA722PAf7Ip8v2iH/y3/153dSUMyb41aNHlHKECeUcADTpOSYIIIwxWu9iTFKqBKCQhXUeExoTABBTxqVSjIkQIqHc+cAopZRChKZxBA8BhOABWJRAct7LovA+TNNEGMsdA2OMEFI39f39LsY0TXPwXirFOYcIxwSEkM7boe/Grr27eacYjcETglICxlpKOSAgwIgIFFJEkPbHA8bIBxeSTTBJSUEMBReUYJhiNruG4K21OVsn27SM0xjhfhgwxgAkmOI8TRCk9XoNIYoppJjmec7UmhwHaJ3T1u52O6313d0dAGC1ODsc9gCAm5sP1mhjzNt3b733GKNpGCEAmBLn3DiOWs/b+3uppBCcc+asybZSQmDXHud5IhhVdaWURAh2XVdVJWO0KJTgPLemnHOlFADJe48JwgRJITDGPrjvX7x49/ZtrqQ267WUMr8GMYG8t1Sq4EKkBAilTVPN84QQBCB1XeedCyF8eP9ecgpRTlSfYAKMUj1r7zwhVBXS6NknDzHy1rngdvdbQsjlxXmmJoTgTwl2mDDOqqqy1hZFAR460PwQnHPeB4xQUahpHIdhMMZkoXAIgTJOMTFGM0qdddZYZ63gIge25Dn1YrHIFVgWs+ZXehwnY4z3UWtzPB69CwiD84sNAGAaBsUFxbCpS5SSkLysy7quEoJV3ZRFURRlUVaqUBhjQpnWRkrZd11KCWMUQpimqe1aawxESOtZKemcQwhyzlfLZd93QkoAwf39neCiUKptj1/95ispJALIWutCWq1WMQRCyTxNxuhJ667rnjx9mgBcLlcEAc6ZFOKbb77Z7w+E0BDScrnaHg8JJCHEdnc/T9Pl5dX19fX55ny1WhFCqrouiyKGgBAsqwonQCl11g1DDwBYr9dCyJgAxsR7jzFxPrz/cJsZpYRSLrizdpompeTFxXlRqLquIebO6NViIZWa5jmGgDCJKVRFiSF8/+7tMAwEk6ZZAAAxxqvVqizLxWKR8RXGmbZtx2HIAS/GWkpI3w/TOA59V1eVNYZzLsuSMRp88D6M4xhjtNauz84Iwi+/f1lXlTEWY/zixbevXr1SSsXolVRnqzPnXFVVT548XtTVk6uLN2/fOmsWTaOUwoSUVc0Fp4wLpSBCLgIhRFWWSqmqqq6urh5fPxZSVFWZpxFSFRFAKeWTJ0+5kO/evfkBtPYx/vehwIbwh/M1JXDqtbPAMj2Mih78Y/AECEKnzgCkXNpn6y+BCENKMMaIoAz3ebAYI4gJJihLiMDDAOpkJ3gInf+4Ugbwh1k/etg4/2BM+0GSmq9V/Jd//mcpgeVyJaSs6ybGBBBEiPgYAYAQIRATAECpgkuZ01ZnrZ3zhJCUYB7k9H0/TpOQeTHACKUx+Pu7u3Ec8xmXUqKUlWVprQ0hFEWBMXbOYYyztI5gDCGUUkIAvQ9lWWKMm6ZJAGJCnHP5gaQUCintPKbgpWBKKSVUCAEj6rw/tLtVUztj5nGah5EhglOM3ls9QxCD097ZFFI2STvvnPPO+wRATIkx5qOHAGEEu26IIUohheAEgb49Xl2cpxjff/gAERrHCUKotc7nV/4/ztnxjDGlJGOMc44JSiluzjc//uLHQrK6qigljFHvT2kE4zTf39+nlELwi2ZBMNLztN/vU4r7/RYA0NTVYtEgjDBGRmuQgLGac8YZy+ZhYzTGWBvjY0jpRGRSSgopxnEQgqcQ7rf3GQxwcXGBEM68bpCfJkLTPOcxqDFG63me5xST1rqqKgATRqhZ1JzxFBPGeLlYKaWMMTHGEHz2oqQQIEFccGttDIFgLDgjhCCIqrrI3ykheLM+zwxLxlhZluMwWGsxQhihGEIIoapKCECMYZ5nIcTJ7UxpXdf5PTBGL5omxYgJ8d43TQMACCEul6vFYskY41x6Z5Usbm/uyqJcLpeMssVyofXMOV8sFpvNmQuOUpJCLKTYnK0WTX22WGijZaE+ff5Zs6hjApOxUkhrDAAQYVyUhVCFNppiUkjljC0KCVJCEAgpvPdte9zv9+M45gXA4bBjnN/e3o3jyDirq9p73/edNWaz3nz++eer5WqxXFVNvVgulBB1U4EUy6oEKT3/0Y+Wy2UESWv9+tWr77//Xpu5qRvGqPPxq6/+7tPnP2Kcf/O73y1WyxA8RhikZK1t21YJBSEkEMUYvffzPMUQ3r16wzk/HI/v3r775ptvKWWLpkGYeO/nWb/87lXb9+/ev++GwXqvrdOzFkpN01jVlZAiO90m4199/x1BaL3OyCDQjwPBpO/7PKfwPmSD0TAMhFAppXbOaA0BCCHUi9oZd/vhpq7rUhWCc2ts2x4xgnVVZaHzNE4uRJBgCFHr+erqKotuCSEYocVigSBkXCilzs7OlFJ1Xet53u92D/orE2MslfTWXF5cVGX16PEjhFFZ1TGGcdaY0JRgjAASgjDKw2QAEiZkv9+nlC4vL+q6ppQmhLS1jDEpxS/+8A/HaXr9+juEAUDph+MXpR/4ninFfETmvzvN3EE6jWESSBkd8Xtleg5lBwgmmLnQGCKMCDr5AdADcwIiAFKCBCCM8sb3h894mOoA+HsWtfSQMgZBdo79ful/MjA8ZMucrgT8l3/+XxOSmUcIYeJjEEIY6yilZVnmFPJcUvkQjLEhhHmeT3FClHDO27ad51kphRDNJcA0jX3XY4ILJRnnjGZIE0wJeB8wZhCiGBPGtKrKGBMAKeeeD+PYD0NRlN57a22IsawqrTUAgFIGYIrBgxRXy9pqjTGuq7rt+pgAgAggDJ3Tw6C7zs82aRe1AT5EoykEOAaUkrceIBwhzGMTyjhmvKxqQhmiZBpGhLAztqqqumpijNM4OK3fv30NIdTGVnUNEsSYVEVhnWOcZ9y/dS5bH13wlDHGsw1SNE2zWq244AjBSc+znmMMUrKyLAhjSknOhVJKKbVarfIkoa6qTz55lmKoynK1Wq1Wq49LgrY7VnWdUgIIIISyIruuy3EaGKMh+JSiEFxrY62e55kxyhi7uLjgnH/55ZdCiBBCZiJm3GNIACIEUoQQUkpTAmfrlXUOIVzVFcLQeVcUBcHCWi9FoZSK3qcUCUYhOOetD8bYOQEYYqIEZ7VZCCEPecqyxBjd398LIW/v7gGEnLEcKtf3PQCAULpoGmMtwdham+dy9/f3ShVSKZQdMBDO0+SsLYsSI5Ii8NZfnF8AAJx1EKI8ytPGIohCCOMwjtN8eXmZkTUAAMro/d2tVEJJNs5j13Z1XXPGCMIh+OD9OE1FUZRl4UPkQlDGpZCMkGmcpFJCqrvtdrVcFkrBBAoptZmdsyGEqqhy2iKlVGu92+0IIRiTvPSSSmFCDvsDIfT8fIMgev3qFUZYcD6Oo4uuqSsIobU6xZgSeP36tdZmtV5zIdqubY9tXdfPnz9fb9aLxery4qooF+M0VlWDENof9+vV2dNHj8ysS1Venp8LLhBC3jmt9fb+rqnr6MPrly//P6rea1nSazkTS7PM78ps0wY4RuSIZm41j6QLjUIzUnAi9Fajh9HETIjkIYlzDkA0Gr27t6mq3yyTmbpYtRsgAkCgA42NsmtlfrblOfVD/+7du/s39yIyz/P5Mv/DP/zjH//0XRa5f/uutXU2r4yq7fZ7cjzP8/PT883d/f5w3O+mnDYEBdPd7jCO49PzS06birx7925Z18fHR0Q83tywc22TriJPXz4zu6fHRyl1v9u1kEFE9I67GIkoOPeHP/zThw//+vnzZzG4XC7jOO6mHYC1xPJxGNZt+/TwMAwDs1uW1fuw2+1Op5Pz7ng8vjy/MBOAbdv29OXLfHrZH/bff//9tD9sKZ0vF3L84cPPVWRL+fHxEYnylrdtNdWU0zzPL6dTu2lUBBEUYJp2jl0McZnnh4ePf/7TPzPDa+wP/Vv55pUeeDX2moi9ZoQqMyNQkwDZa4Mw2NUX8Ir4g2N39X8RM7m2ENDr+O/QtY/xV9IB2/j2eo5f6dxr7bw2FVwjKloKRcOSmIgRiADRmkO4pbnwf/k//3d8jdMzs+ZKfXz8AqZoSgjBu9P59PLyMgxDa15tP8M5JsJayvl8arXADdlPKZWSx2Fo7H9OyTlniKKqAMyuSm3gcnsgpdQqdds2BHDe7/d758KyLCklH3zs4vl0CsEDtFZ0Dt51XZfT1lJL//zDD/0w3t2/GacJVECtFf+CCZiaVETwwVHrzyHupt047dh7FzogHqfd7nDsxmGcJhdiKmmb51JFVddtk1qklo8fPizragbeRyQex7GlKLPz3vsYu3G36/o+xC7G2EDhlqnHjgyxCWGPtzd3dze3d7fjOIxjj+j7fpimab/fj+PovW8UaNskiKjUOk6jqLSix37o+2FY1wUQCKjkwsSNSO9i57yPMdzcHr0Pfd85R7e3N42Xvrk5vn3z1jkXvL/M8//zX//ru/fv2x0vIs/PT97xy8tLA7VF9Hg8DMPQMse3dWOkda2InFL68vlhWdacU5W6LMvlcjreHKvmeVnPL5fdbqo5Pzw8dDG+flDROZdLGacxxq7Jz0KM7ZOfSwGAcRxF5Hw5py21RerL42Pfja0OW0Rby4qImAETl1JKKcuymBki1VpqrfPlsi5rq7Sb52Wel67r1nVtNuNxHJxzXRcA7e7+7XyZ1UxqlbbFgqWUcy2GBIjOx26YHHPJxbsgoqf5gkRD13tmMF2X5fH58XA4xBhzzrvdzuza08TMwzB89y//wuzfvnv/m29/CwY//fTTw8ODiHz+/Hlb0+VyyaWqWclb13VMKLUyo6rO83w6n513l3kdhvFv/upvj8fjtm2NeqkK77/5dhh387w8PHyqUgmAzD5//mxqUmW+zOMwTuO4zJcPHz409uX08nR3d/fu3bvjzQ0z5ZSWdamljsP4zTfv//Zv/vrdt99+eX7e8lZVnXfrun7//fdIxEQff/7oHd/fv6m1OoK8rssyv7y87Hb7adptKZ2eTyH68+nlj3/687qu7NzN7d2ypnXdEHGe5y52u93onAcVEI0hOu+2dY0xdl0cxyFG//z0qKZ/89d/8/bdN4f9XlSd9/NlPp1e2oGZS65S+3EIvkspPzw8NL3A6XQyACRUtRBj33cvzy/nl5fb+/t+GOd1ndd1Syl23e3d3W63n3Z7IvLOAVhwruv7GKOZ7fd755yIFBEiBiIkVtFlXv+///4//v7v//tlfkYUAAWE6/D/K2r3lXT9NzjP9YAFaEXiiCAi1+P/ihhhy/ZpsT+I5IgZHSMB0GsoNAC0cLU21NNXAOcXFP+XWR4Ate3Q7ftFLXn0l38Nr1Txv1lG+O/+039sHCYRtYTSbVs9E4J9+fyQt8wO2XEIoY8xxIBIJWfvOHhvzbjGxICqmlJSlZbyRkQ5ZxPxPhAhELVhues6VSN2iCQqtVZiImIi2u127Jx3TrWO49D1vbXk9Fq8cyJyuZyfHx/nZVbVlHMWIefH6RD6wfnQdVEQi1m/m4ZpMII4RCXgLlDwFSEDhHG6ffPed70Lne96CsF3vREjsXM+9gP7QADjtEfnY9/f3NwcDodvvv32/ft3VZRD1w87Ioyhv725HcdxHEckHvph6PtxHNk5BHaO+34Yp4l9cMFPu904jNM0hdh77/p+6LoBkQCx67qhH0IIUmsVWZalEdTeuX4YgFnNnPeePbMLPqSUGdmxU9FaK3EQUeedgRG51le12+0AaBimWmsIcZr2VeqnTz+r2rqu7969c86F4EW0inQxdl1soFApZVnW0+nsnNvvDj/99HGdl9P5rEo+OGJyzu12uypyf38vItN+P03j8ebWu+idZ8IG9e52uxBiKfXh80PKmZmfnp/TWvp+aCtgjN08L6q23+0fn54/fXpYlrUfenau7/v9fn+8vQldVLOci2MyAKnV1HLJzNz3vXNMjG0Sv37SQU+nZzPo+u4v/vIvtrRdtYOxW9MW+w6JQuxEzbuwmybvPRCVUkPsT6fzlkvKdZk3M6yqbFhLJSR2HEMcxjGlzUS+//Ofnx+fmj5PRFMqBrbM63ff/fH55Tmn+vT4PAxTVTTk8+Xyxz//KaUthvj49PTp06eSy36/J+JxHMfBb+syTaN3xExd1x0Oh7s39z/99OkyXwwA0d2/uV/W9c8/fE/kpmkfY+y6YV3XaRxvbo59FxsV8cfv/rgsCxF+/Onj+XSqtT6fXlqRfdrWfhh8CN77ru8eHx9bL3wIPni/bSsSvnlz18e4G6fzy+nu5va3v/12GobdbheCf//u3ZcvX8ZhWJe5pCSldH3//PxMV5fvKrW8vLx0Xf9Xf/VX7N2ybi029dOnBy3FOWbk6MMyX+b51Pfdzz9/zCUx0x//+N35fL67u725ufnxx3+9u79XtYeHTzEGqZWbwdDgcpmligF479UwxG4YhzVttdb9ft/EAkSUcpp2u93+cDzeuBCHadcNQ4hxt9uP086HSMR93w/92HXBO9e0MX3fNyWoD2Hbtr7vDZAdNXA8b1lr/fDhx+eXT8R6Df8Bslb1+BrheeWdTE2b4F6vfuCrK/haBGzWzFtfcRxkZARCJDRsG0Cb/V/FpXjVgAJdzWb2C5rzyxJwtfuaoXy9hKDxDL9sC7+6q15/+fXv/H//3X9qBLb3vj2PhhuldT09P3vHzjkA6LuOiAiplsrMeH3OaiqNluamXWJKKdVSQgjeO++v+0uIoeWAEzXglxCxjast47ABX5fLWU3FhJlDFxgRTcdhEC2E0JYPFz07N+5292/fT9N+2h+GcTSwKrKKzOvWaEwgHKYBHffj0I2TC10FQvbYYhiIDVkAFbiKAHIVUTPng/Nu2u0PNzeH40039GZ6c3fD7HbHm37aGzupktYVEed1S7mUUruuQ0IVbb2DRFyrmgGzC6Ejvjp+1YyJ2blSqrWAWcRaqwGwY8euHwbnPRJNuykOfeh673yIkYirSC21aWNEJOcSfJh2R0QyBXZuGEYi531MqYio4wCGBvj0/NIOzRC8md3d3Y/jMAyjqB72+77rt211zh2Px2k35VJ20zRNU075hz9/PwxDTtnYiGBbFyKqpXaxM7MYu/3huK4JkErOXYxgtizLTz99AAAiulwuVSSGeL7MW8o3N7fD0BvAw+eH27s7NW251k/PzwDw5u2bcRyvgws7dq7vumVZQgiHw5EQu743MCKIXQjRxxiYsTWGRs+OaDcNQ98fbg6mUqUu6xJj9D5UEQM0w9gNzN6RG4apzZWAGPvehQDEh8Nxvz+iUWPxSiqEuG3bPM/OOUM7n16eHh9rLoy0P+xbF9W0n/pu6GLP7L58eer7cZoOqqaGHz89IGAX+9/+7rf9NHz7/pvf/+53x5vj27fvum7Y7adtfqo515IfHj6J1Bi7UsqHn34+Hm++/c1v+2HswtB0+tuWd7vd/nAoRVLKADYO/bZtp5fnoR/evX8vqvvD8Xf/0+9v7u6GaTQCESm13NzcsuNpmm5ubublfD6fvnz+0g/d2I9EqKppS95hCH43Dlrru7dv3797qyK1pL6LnijlreSMZtu6OjQAffvmLTmOXZ9y7vseAfp+OB6P/dDf3N5lqY0EjSH2fVdSklqeHx+/++d/co5Uas7pMp9zzQD244cfTOx0ejmdTiXnddl++P6H/X6nZv/0hz/c3t51XfdyeqmqCOR8OJ1nROy67ny+GJgPwUDP5/M4Tl3f5ZznZXEutG9cFUFCURvGAZlijCpqZipSSk7b1uByZtemcGKHRMQupXm5zCUVM/z+z3/6/ofv1vVEJGDa8jrNrnDM14n+FXDR68D/a3Xm67F7df5+/RX8cjwzEhK9crwtqpSoTYhI1z/bP7/iPg6J0OzVZtwoaLuuDNT+N0StJPK6Kvz69P/1/sD/5T//x7aMt1Tpll3MCEzYxTiEUESaGRUAUs4i4r27/vdgphq8Z0Izw0ZcG7RaYGaOXQSzWisShBCJaNty486dc21rvl6kREQuxBC8V1XnXMllW1cFDTE67/u+Q8K+75z3YtCPU+z7NWcFIKSrCyJD3eq6LPPphUHLtl1OLypKxo592sqybnPJolbUkD25oADsPRErABi0nAxFPC/LVgoQdDGQc0hEzlW1CoiqjKhqKWUwct43hHG+zCklImR20EqHgQxoXjfH4XxZlnlpuNy2JEDMKYuIGqgKM58vl3Vdi9QYAxLFrg9d77x3ziOSiqScGWld1rwlIk6piJCKbFvKuTI5EbtclpRyydraRS7b2vdD3/cxhJTTNE6tHKGUMk6TqjFTzvlwOCzLsubUHIwi4phLzswOFLop+uCIiJn2h/3nhy9pS0Cu5Hqel20r59P5y5fPJedWJnV7c3d7ewuAIQTkK041jKPzvtbKzjVJIhKezqda6+HmOE1TyakJnKlF08YAV+OY5JxMjRido1Kyj76LgR10MUTPuWyq1TmKwVVQ9lRKGccBEV3wxB6AhumwLBu72ORbBthPY+g6QDzN8zwvwXfBByK/3x9yLT//9DF4P3T96XQi5th1z09PT18et3U77veAQMQqIlVU2xeJcpYf//VDG6/nbWMfdvvp/ft3IXhCPN4cx36oUpd1K6WmtPUBlmX+4YfvX16eReTHH3/0PhD73e5wc3vr2OWsorpu6/6wN7Pz6dxGipKLmc6XCyIYwd39/TCOx+ORnTNrwYJbHPpvf/Obfhx+/OEHEdntdrkk51zTkjnvSilay+V8qjURqkohRO+YET79/PP55eX56XFd5i7E4IOZRccpLSF4553zAZDYh8v53HLxDGxLqaimLfsYmN3L8/O2boxwPO4/fvjpm/fvbo/HZVmJuY/db37zLQD87d/+e8f8j//wD01DZWbHw43W2mgq5/zt7W3o4uW8gKN+GLzvL+f58+PneVm2dWOm/f4wDH0/9KqaSybmm9t7AyDCeVnWbdtNuyp12TYikqrrtjFBKdXU+qEXsZzzFfkB2FLqul5qyTmdXs4O/X43ffz44+PjB2QDkzab29egN7hiK62DFwBMTcGIsKXBtetArynN7Yy2a/ZPMwVjK/RqFjAkYGpKIbwGibY/6Cr1oaY5oqsX7GpPe6Wmv4I8V1faFYyCf3MBfL0evt4HDhBCDCqSc+76ru/6WjKoooEn13ibYdoBYMNbmZ3U7EMws5SqGTCzGeSarssLAhGJ1FpryQhqTBR8UJGUkog6piqFyYOZKYmBVHHe+eBbCHBrzWZEqRkR1nP1jpecMHRMXFVULadci5RSYwiK4NinsknZPErOm66zWjmfT/NyYfLDbt8Nu8en55fLBfvYdR0ADtN+HCcz4xDM1BEQkSNSdKXWkjbf9VZrMZACgBhjVw2/vDz2DA21Gobe+WhgRSoCAsFmqWUFAAAgAElEQVTYjV0XnXfOR1U9nS6n86WKEhAYHA+HdV3Strngy7agGShuuTS+SERKySSYEIbuqKqEHLvYTn9i57v+6fGpmGWR9XJZl9XsOcaYtuX55cUxD+PQxeiDb9kPJvAyn/7nv/6rd2/un58eTe18vqhpjJGIEbCL3adPPw9j9/LyUqV6H/quk1wb+k/M9/f3JZeULwDQdZ2ZrZdLzXmdF1Ubx13ve/budr97+PRxS9tut/vtb3/H7PphrAAxdOwdAqaSCdGHDpABSKo1Rjp4YPam9vj4gpBjiDnnGDuHKDn1wUsVcsw0pG1T0ZRWIi45a60xRiJ7OZ2enp58cFIrOydm3vvW37ClvMxL7AYinucZALctS63juGN2pRQxSS2EvBu2vOUtjeO4rct5Ph1vjx8fHt7c3e2OB2asJX/7/u1+6C6nE4MZYC6ZmNZ1eXp+zrkO43B7d7h/e7vfHz5//lxByTGopJSOx/taPRGJKXun64aO1RTZ9dPuDfA0jd75x6enbUv9uHPezZeLKazbigA5bSU7ZhdjULVay8vL87qt9/f3sfP90BvAuq6x6z1iPwyfPn92zkstL88vbT/Y7w9IlHN9++7t0A/ruhHAly9fnp5fnp+ed/txW9Zxt9vvj81E/f7tndlNLfXh08/R07atL5dVSzGT483NlmV32C3rxuy++fY3JSUwJaY0b0Wkiq3PGxGXWtO6uv0EgOR4K+V4f/t+HGvJpRQACiECYBX967/99957Yi6pHI5HrVpVv/ntb0UUkbquv39zv6UNVJx3d/c3z8/wm99844i9Y+/dljbvXDzsnWNA9MGfLxcEnKbDBIaIOacuBAIspZaUEQQRfYiO2LSw81VEagGAaRhrKX0XGKfdePz08TMC/C//4T/8y7/8v6AZm9S/ge2vRG4jhPXVgvXKD19P25YPB79kQwPqNekfrU367cBshdstNLrp/Fvmj5m1rIiG8ujXU1samk9fNT7X5LlrwBxBC0RSs1fV6RV4alvQ6w80AOO/+8//GzOLSosiaiyuqkoRMc21NrWMYxapCOKZnCN8tT0QeQMqVUSN2TE7ADWRkjNYRTBiat3SiGCqgKK1qlQmNJX22M1McgYVq1BLraU6ZgQ4Pz2l5bycHmU9S5qFIhMGHzxxWpaSCzVSRm1d11rFsJSybPNLmk9QE0keokfQfpp8H+e0ppqgFq3FSgWtBIpaHSpqlbxJTgRKRIyIWpeXx7rOnpARYoxmuCyLlBKDY8eAFLsICKUUz9wQMCI2MK3VMSHA6fQyz7NvJZ6ITLZti0hBUk2LlMSIXd8P4zj0g5ps20qggQi0sI/ke0Q2pGqAzsd+6IYhxk5EzNR7X3O+nF9qzvPpZZvPNafgeL2ctGaw+u7dXRe6kvJhvyu5+hiRyXsvovOyDLFv8X9tIUDE3TiBmlYh1Hm+hOBjF9QkP18kV8/sgEw0xnhzvIkxEILkAmZpXRw0ULWLXayi52W5zEtKNac6TfsuDGEIAHg+n1VqF4KpxhC849a3Doh9F0IMTOQYCGwIPq/btq7eeQBUBWZnqn0/ILIZieiyppRLjEPXjTkLGDvAnKqKqhgxtzW01OKcJ0bnHRGpWc7ZO9d5X0u9vbkZ+q7vQsrLss7jbjTCN2/f+BApsIERk2M0KSXNfef2+34rUGqtUhHNefKBbm727HF3GFPdLsuFUKahe//u7s3dbexjO+AAkb3rxq7ru37oiLwCD+MUur7kys4B0rqu3sdmqJ6mIQZWFSJk5nEYTIWZnOOu70W0H6eu77ctA1EMcVs3KeLY9V3XsgCIaHfcj9NUREquzoWca4wdIFexVCo6R67bH++GYWwqkpqz1MQEZV3yttaylZJFUcEU6Zvf/v5wc8ehcy7sdjvnHIicz5fn5wep9XA8TLup6zsfAgCUWl9Oz0x0OB53t0cKcZx287qZoQHtd0czPF0uPkS7Kt09kRNV3/VEHELs+j5tyVTGviezotV58o77LjJiKVmkPj8/5pSap1KkNk44xjj00zAMfd9H51pkAhNLFQAlJMcOARwzgjkirZWQQbXmonVz7OYlvZwvH376WOr28PHDtpxVpcVhETurYo3gRVBDA1WzNvWDmclrdMSrGpQQ0ICs2b6o6TubwJ+olb201AcHQGSE1oIflL4mjtqv3F4A0IQ+aPCrGOqv18Or+FMREJEBQAwU0ABVW7UBEhIAqJprketEJKrLukqtzEzeU3BmxmDcdvkWRMSOmEqt9upoQGymUQghOOaSk1ZBxK4LiFhyRsSmEWo/Yd20lHxdBZx7bc20q5Seigq44BXs5fn54fMX0MqgNbpxnNCsiniktuo2EK3kDA29cw7RAwXfT975PniHyES5iu969PFww+PuRqU2rwMiheCb0ry5zEspVQSppC3VkpjIe46eGcGk5LJJSQx6OZ1CF8dxx8xo0MjGWmvJ2dQMSFXXk6zbNl8Wxw3ed4iwbgnZmdatCrXrHElFwKzpK9Zl3e8G513X90MXi2YtRQ1brI1j9MHF/UB6jGTLsjw9PZ/m834aj/c3YOqdc9GF3o/TNE7T+2/eA9CnTw/btgzTAACQJaV0Pp9zymfvmLmZlUxNVVsqEYDudjtmXte1MVoVFADOl8t+fyivYoaWIayqpSQGQ1ACWC6XnJOLsZTqmZZ1RnAqB61QUkIDLWm/2/cxqlbHbXbSKrIbQiqplNLFzjlX8oboFFIVEdEqWor0XRd82yFa4C44502NmHbjaO0xX051XWoRYgvOAYCUGmMAJEQWFQTTUlQ1A6iSc67Wgki5yjDsABYRI6P5NMcQvXPLupRtRe/StgHgzfHGpDqfIkDTk+Scc87EIXgffPj08Lgsi2PP5N+8+SYEn3NqECUSMfroAwISoamIPM7ni3Mcg2/tAqUUMzydlyLVeV9KfXx6AjMi6rvOh1CrHg4HYnc6nboYaqmtn8C1znDnDuOIiP0w5FwIiaODVq3Z92Cwbus8z02rdntzJL5VQxWpKg0bRMJa8vkyb+tijPOWbg63d/sjEJ5Pl0Zy1poRqdZMhAoQY/xm+o2qOh84ROdCFY0+Dn08vXipFbq4zotzjg3Q4PnxyTn3/Pg0TVMfu6fnJ+d964wztWEaQ3AA5Jx7eXn58x+/Syl98/797e2NA3DMFqKUmktpZPjxcLNu62W+MHEu+Xy5xNDVioVTFTDTZT41WU7ONYY+V/EeRDTnjIillGncGdG6ze0Ledh3rTnTzN5/8+6nH//UYswRjJhb4QqAgIEhqIGZ6NUBZmaKdjUAtAj+dmS3AFckvpbG/3KUG16D5ZSRVQ1RTOmK2xCIKrU6Ynile1/xHHsF9K+rh70GvyF8xYEA0AxUpaHz+mpXaKtMexyuVmGmVjbkzJx3jh0YIGHbOapIraWoqopzLoGWUr0PLUmG2YdAjERgteTL5WJgfRfNMOecti2IMAGxb48bAZvHDwCYGcxEVFTytqW0kWPvPBCu67qlPO73nphAmKDrOmUPCtuy5FJa426D4AjNOXQMLo7ofNf1IIVBtVZVBVJwHbnY937LOafFeSIiAGuu9PbcGwgjItEzaiWwaRq855rzyzwjswvRO59VXl5eRt01iz+TG4ZBVDSrAXjvELGqSKlksBuGEDsjNgNiZo9dcOtKmrb97gYRybl5WRtis9/tDvs9moTozWxbF6SIhAqGzADe0FRb13MFk6GLv/93v/v2d++7EEsuLVitrVP90DsfAFFE9vudC2ymOWcFA4LdfgKDcRhUtOWagJmpVlXvWQVaOkrrI53P59IWNURkyiU754qUIvWrBt+hmgCSVRFEDY5ujneXy8KEpdTL6QXAQmREYgQpaSmJkKwWYgYAVNEiJeUmRwYAdmEtrXlRLpcFEFPOuWQEw1YRGXwrjAPCWiqS886ty9LCqULsDCCl3PW9mqznzQcfQtcqSNtnuZT0OjRgk+iJiAuhipScSypqdjgc+tCXlJfLEhwbuMt5Xi/nVWPX9f3QlZxr1TZibmsStaEba7XlshjQPC8iERGZySHVUgCBGQEQ1JrUOKXk/ZhS+vDhQwjh7dv3IYb7N2/SltTscGgkv2zLkks+nU7TtMs5O9ZpGgnRx9hsIufz2fsQQiyi3vllXWqt3ntJCJGI2AxKrU1nHLxnzylvYMaOz8uFiaZ+qFKZaRinBS73+93pdLJ5diG0o8R7l7YVkZpiuJQyDAMhqqg5MsNSqpIDECQyU0S8u7ubT6eHh0+11m/ev18u87asOaecUt/167oS0X63V7CmG9xy6m0opQDA5XJSkRACgOWc0rqAc6AOAJZ5Juah74dhMDMRXbf106dPp9PL8eb49s37eVvczU3XdaWU0/nsvev7UVRyTefLIiItA3hbNwB8jqfdYc/ee+eY+fRyOhyPscJ+Pz08fEopbWlDIDUwEQS6nvKIamCgimrXgUThKxbUenrN8BoKd51xr/aw17H9KxxvZora+tnsiutow5kUiMyQfm0l+4V/MAPTX5aDq+IBlMCI0Eya/UDNTOsVW1I0MAUgBQPj/+v/+F+bewVeazxFtFQxADHNVV7lRgAEjaNom7VzzvvA1/LbWmtVlVprWhdpjoBaEYGZ2n2bS2lf7+b8DCEwOyBU0VJKypuZBu8BoKWSAeA4TO23sQ/MHgHbvMxELWaJiNq3CMzGcfDdzscYQhAwQEbvBLAqFtXLvD0+nR4eWrC+qSoz913vvGtpEIZAhOwpXMEcYsKS05cvX06nk1YhIgNI21pFiZ2ZOeebkLzUuqzrtq5XJ8SWAlI/Ts45uaZoqL0SRqKCSFBqqUXVANGHwI6bXlakqmrOGyHza5gHExEjqBIAmD49PaZ1PhwPN+/e39wcY9+FPvZ9H2PnY4z9wM7ja4pUDAEAi4hcy8UiIAUfEFt3MZvBsiytYSbnLW0ppySqQ9e3qarUIqoxxhDDNcLIOVVpmyazC4FL3WptlCh475gol0yI0zgOQweGtRYAQACttZTimMxUagUT59oBRTnntgjmnGsp65rMwIc4jlMIXfPOtDKp1o9Uq3bd0PZpEb0s8+V8UgNiUrUW7XG5nF+eXxz7GMKyLqBKiDGEYRid803O1MjVdd0QKacy9mPKpdWiAYCKresWuiilEFLKuWJkxynl82UGQCSe56WK1qot5tYUxnFKKalYCNHMtmWd5zltqU1FOeV1287n0+n5hZ0rOTcjcRX1wU/T1HeDARCRqQFCi9OIIcTgRWXbtvZ+OddM3VhKBUBV8y6owYePnwywiuVagg+1KgLUUpZ5rrX2Xey7jhARrKrGGLsQpIqIAIJUIeZh6LdtM4DPD5/XbV23zQdfqjS+6nQ6xxg/fPhQa+36zjHnXFIqhGgGKlprqbVoyQY29P1u2nWxI6RS6t3dXT+MtZZcSkppGAdEJKZhmsyg1uqcq7WqKJjt97vjYX9+ed62xftQS315fn56ekzbCmpNCnmZ58cvTymlcRj6vluXRMSH/YGJai3kuZ0SXTccjzd9N/V977gpJua29+eUl2UppXjv58up1hpiNwzTtm61bD98/6dSVgCtUgytilrbma3qVf0J1iKArsO4tZHiFz0QtK4vuur56Sry4ebCRyIgBDB9ZQUIAKiN6b+W/v9aw2MArxfPdWEwtPYFVFUgsHY3XzcVNTWgBgFpo7IbquQAgJnMjJmBUMXYATA750UVnTAgaGOxrdSMBsRXvaqqqgoAECIzAoB3DF3Xnrz3ruV61lqrWGtxQgAmanmTANegagCIMTpGERW1nJIhOucBudQCxkRNNttcFRhjbPiDqW3rUmqOwcXgyFEVMLKuGwAkp1RTXnPa5nQ+L9uW13VFx1Ws72LzP4tWUyHH4zg2+hqqrNtGZsRMaqEbumEkIkCXSxG1abcDpCY28N4DokMeBvTOqWh7d5WpqlSpRTRvGZnb/VFSS1+WUjKYGRYiqqr92CPiuq7bunhHtRZT0mjMDohYlUUNjZnTtuVSXIjE7F1EROLgfCe1qmnOpZSqaoTEhgygKOwZrKmIUdWCjyatv4G2dUtbKrU0MiCEHiPUWpx3InqZL957RQCEcZwAsRsHAmzZEkRUax3HngiWbW7Dba7COVcFJG4Nuo0zQ0RiDiGoSNq2Kiq5qEoIvtQUYohhyrk2JEpEDayLQ9fHGHvvfc9u6PtSCzsmolyriLDzQIzEVaHWUhV3h0NrrULEhssNw9j3QwMuTKoa9P0QQgCzkkurTEXEeVmqIjIVyS/n8+PT05aSD+H5fPbBT30PyMjhy9NjSmsmCSlUESbqukGkplxq2czAuUvf9+zDvG2llPO8bNuGhKpVWyhorYgQY0glE+Ht3Z2IpFzevX07jLuUExGlLTnv+2HKKbngnbkuxm0lVWVErFVqPZ2ep91+HKY1ZyJWNQAlgqp6mVcRUTVmFNF1zVrL2Ef0AU1zTo+PjyVv025C5LRsiAxENeecUyuC7brucp7TlnfT5Mh9lbws8+V0OhHRPM/OvWlaUkQ0g5TL8/Pzu/fvu96nlNZ1MdUu+FLz/nAEw3a87na7lgTVj2PaNuebChNEpevibrdbl1VboJnaNI05bVtKpRQAuXov1nUIHSJIleUye+ca1l9K2baNkV5Ol6GfmAMROaenTz+fL+fD/rh7e+i6fuh9i8pQ1b7vP3z40Iq77w+Hvu/Pzy8vp8eff/75t7//S+fj8bifRv/27dt//PsPISARV5VWr676mvXwb2ZyMDNRQSB4HXYBEfma13DdA+iVRn4VEBkaGF09ukCqCqgtV4LAAEBU2xXy9fT/laz/mg3XNEgIUE1JiUzUjABBG2APaiKmhNSkSKZmCK6U6rhy8I6ZsEVQMxlwE/cY4jVgTkqpYCgm67Z5vlqHEND76DrX7u02Hja7v3PcNmswY3aNjjAzRHIOW6gIEQGo954pmAlAbUXkSOx9qLXyNR2DCIFNEMlUEZCZVbWUUktmIu8cAhCgSs55846YWM22bcspi9bgGIJjGpVoGLoYPBHlnBBMpHZ9j6/7BJIzJHQBmQn56HskULEisqXEAWOMTAzUUFNm71XNe299p6WknLNZBch5AwCma2WT1gpIkkpet1LELLeg/zXlEIKqiUkLpxNhAF23ZEbVtlpq7DvvAztvJrkUJHbMWUzXTGCIhIiiWIqWYvO85S0hkXeOANmzc671AjWCKITQyiXQlOhaq9CK6dtu99NPP9VSDvsDhogA3TQCQAih77pSa9pWZhe8T9tWayUkJIj96M2KVMqFnHMhEDMAbrm0qzrGrl0/5HwcmqAAur5HMJG8bRnyJYTQ1FYGVoo2r3gpRUSZCzN755CJmVMpzV5OzCgMAGVLiBR8bMNEg/UaQN9ipduV4H2LIREzQLCWiyAi25ZSqaU4Zk65hKEDpiKSaslS1bRodWDFDMgRsyF45xBxy9nMum7ADq6USa0CVEHaplulmpl3rc9Pc9pCiC44HwIhMmFKaRhG79yatoZJbikXkSl2AHA6naL3pRQw2bYtet/3vapezK7BHmbmrN0LAGVZty2XfhiCD865ZZnP5cQIaV120xi7yEQ5rTml55pTSt4HxzGVdV3Wl5cXInLO4c54GBwyAd3d3qeaW+Je13Xn83nbUkNm3r9/j0itm5qIDocDGDYhCiKy9845LyHXgoZVtMUfLCUxse+CohEREW9pSzmratcpIjbTX5O9zPP89PwIorWmLw+fp3EKPqjqltLnzx8I8f7du/s39857ETu9XMZhZArzeYlxCMGr6m463N7ex9h5F00JGFqZea21ZQ62dw0R9/v9NO2ch8+fP18ul9v73jHHQO/ff/PPf/gfiFarAF21P19znu2qv3zNf1Bt7ZFqcE17xqsMSOS1JUZUWzOwoqEpKgETt5iG68VAQFfA3JSQW6hze5ztClBQhNcDlhDMqoiCNodx69VraaKq2tbi6zpB1zRpIDQ1J2YCxldBUiurtGtT5dWfZgRQpdYqzjlVasNs+3xft5OkbaL33jfjWLPyq722UyKKSCkJrtzxVbHUYrQbQyxVkRgBPTK99l6JiOn1gRGVmktVQ1MyaHaB4+EW0IhoWVYuWGqtJZsnBMjrktdFSyWzrvMxekAHzofgHBOYmhb7JchVmnhfRBUZELYibTFCRGBg9pFcMGuUHRIRsahKLu1tUFUzAQB2Th0GYjADUdZKgISgpZioI3beoQ9FJS9LyknBeHWIRsw+eDUzUwFKVatqzrVYck6IMhIYohmVXNdU9Fzh9UVsHBERmyIAS5GaU5XSSA5VKaXUWodhQERGbHe8d06kuOCHYXCOiclM52VRkXfv3nchAEDS6pxv90TOG6BDoi3nddtKLZdt8V2HITpAFCEujZFvtF4qxfnOO69amZxd82/BTLVK13k1XZcLISK7RuoCohlEAioFAIhANRO5y2lm56ZpSrmUnNk5vEZkl77vp90kVcryrGrXixybEeY64zcs/nVUQ1XJjRAoIiLOuRiau8VcYE8hdF2pVVRzzqlUQHCEFcAF33Hrz2YRaYAVIkbnm/PRqhYEZpdTVjP2zlSqinfMzo1xP/YxBI9IBHg+nwDAB09EARgASpUWLmRVGqHYOhodIwKcz+d5ntdtW7c1xt4H3xJSk1TVuq5rzsXUxt2eGaXWnz78q2d+//598K4L/rjfzZd5hmbrMal1iJ33vjBKSZ1nYk4p/fjD8939/TiOp5fTPC/FpNGYzcl/c+xDiOTYrOVY8LUB2HtCVK1E1nWdQxIpzjlCNkJRDTHErrvCBmSeurIlBHPO1epyTqWUGCIo5JxN9KIyz4tULaV4HwxsTZt37uHTp+9/+MEMYowffv54vLkZx7HU+unnB8fdOIwp1cPNz7/73W9i9LlkEdm2cjl/dC40+f18mYno48ePf/jDP7579/73v//949MjMU67abfbDcMoZlJLztvYhb/8i3/33/7b7dPTT0CALfFfm9OKXifa17QFQ1ICBBVTBWL6StMCQkOACBkQCBCNTA2JmjtAqgAyIoIpITUpqQK0TrKvQn6Aljx6hXqu2f9qDbEHAr22wVzr4xXVXsN/DJubWBEMjA1AVV0M0bFn4vataON+u88AAZmY6Eo6MzsiAHPMtdQk1+L5WkrOuX0CGrZOzEj09fESMQCKiIgxX5VD7YXRK/8GItKKVpoIk6/OBjAzNQHDr1lDLbXCiCoYIzrvEcBE1yUBVTWTWmgzU8k5mVQ0YXaxizF2xF6RzdS0NhcPM1LovA+NUjDVYmbAUuu2JTPtY2yr3Ku+qiU1ARICQBVp8J+I1CoI5hyFEKiPwTsTlVqcoybCqk6c96CoqlkLg4YQuqEvtRZpIfW9D15Eai2GZEjE5AOqIbL3IVBr09VUamn9Co3NhpaeT9RK5hyhAgEYmVe1arW9cma2bVsreAAz771jTjn1Q8/Oi2ouRVV2hyOobSkBoHcupYLIZiXnnFKO3tWqIhpi531gJgA0ZDMtpW7b5jx3SBQYPXoD50KMQc03A0ytNefapu8yF1CtUoehZw6taq2KpJxVDQxqyWBWawner8sChgCiBl3fV9EtJTNAom3duhClVjDIKdVa6Wp+Dm0cad8fanZFQkDItWzpiqQTcd93rYEuxmCEzJyrUs4KEIJX0Rg9moEJO+5CbNWsLaiFCNd1tTbzIHLnuhCrGSMw82G/R7BSCzM6djH4LgapUkphJGZOa2LmtrK0ezDEiIQEDGbTOG7bCmaq0nW+RXHA6TSM4/6wQ4IQw7al8+mlVlnWxQzNzHteZ8m5OjIwkZLG3W1wTlWdo+C8954IHLNnkpq1Fsm572I/DFWU6KnW0j4eRcW5wIgG5r3zzndd3+66KrqKArFDbE8Br6mC5tkhYikEYN45Q1IjYtf8HzlnAGNnlbKUKiomYgqXyymF4NmXUlQ1Wuz7YbfbL+vCjsZhLLmoSOj6t2/fNev4l6fHLSUzi103jOM6FzVIJX333XeXy+nduzcG9fn5OaeiijHG/f1t38XLfNmN45u392uac8qn83MpZV3nN2/eHI+HcRzWlIHIAyNg3w3TuHt8/LnxlWAGwIRwLWYBBUNUbI3xBoiGhIqt4x0axN0I4IaBNDsAAyAigaK1RAlkeM33NEVFUDNCNaOGLf36Dmhj39fmFwNQM6RmOTMDvUK+Bihg2A7btohcx24ABEMFczH2zZT7tWW+aYmucj+n5hy07hgk5xoyJA3s9t6316CaNkIDEK9PDKg911fy49rv4RjBTMTadFZVpUrOuSHXzM45eN0PsiEiIDsmZgAt1UQBXmf2poNVM2YmJm2aQVWttdRsKirVtVXUx34ah3Fyzpdc122tqmjmmVvzMjErQBUrtVH7Wkptk2OjfLZtVVUmds4RmmNCatpOvP71StF4R855RIeGSMje8ytL41vMhVjOxSuKVEcU+65KFRE17WIXYlSwUoqJtoixXAUAxnHXYoJKKbQhNMa/KgFycO1Wun4wRFVKe6Va01l7J5vWiwFEK4A0XWljYXIql8uiaE1sE0NgwFxyzhUBqjXDxrWCKFUBsC4EH4JIlSrWUEMFM8s5iXAMUaQ4DEPfk2MEDMSIhM5L9SKXUqTUenp+Kjn9/01dR64jSQ6li0gj6ZdrNOYC0/c/1KxmGt1VJZNhaGbBVKFXf/MBpRIKBvn4zLrVMP/4UkspRKQeRKKqptqO+fPH3xAQrktdhWt/vVhErrtZEDgQ56b9aMfj8fj+3/+oWa31crlmPUJEEckWJNHSdKB6vV6tNREholqlFBrDAL0WiohSC6FiCGWkl/lSBMELgTDVUjL7qLmWkj8fLCLCFO7bvq375dk6mG7bslQOc3SKCDBlrELiYcdxIAARp7XZSeVkllKllD5Hb4epJqek9/583i+Xy5d1vW5bJmQkXvt6vX7ef/z55/9yKFmWJdzdpoIH4vgAAAcYSURBVJsXoW9fPl+2LTx0jhYGPsOcAuacZuoeRq5jTtOpQ4ogoSB9fP50HEdglG0FVRTht3o/2wj3MAs1AySRQudeE3PQzxKfPBFVA6RaF3MIj9kV0HvrEY7hGOBqx/OJCAAGAGE+fc4x3IOpRJzsidHV9J5y1HXfbp8+0KP3vh5bHyON1vd9cwe16W73+4/eX6/jsW3lOF6tDQT8+u03ypSF7bd84j/+/cdxHJHLGbXv3//q/fj69WsQSinrui2lbPt++/jk7mDu6MzoM6v+WdeTeYkZiQVEAEHF3OJcECMR+MmhOZ0ZUjMLkQIuOon7mFYTgaeoIAtdJB8z3/AbdDo9huJ9wpEAiczGGU4AmU2GEA7ggHiaUgcGGABT8oDchWtFohQJAHG8RQX5R/s01V+Ph2eIh5n7aeH2tshjFpHzyGUh8ji/oAMwnhsMzOD691ROhMDMzGaB5/+gu5vNE0dixrNMASQvAtGD4ByqsgPLiyeBwyxEGBYYJMQoLHWVWrEwEDIF5xonL2ePcHbzORQYAdBthrsIlbIzESczx2yOluE/OsEMmSumhO9USJxNXMIbqGYKAMa/NhhIGhEWGCCVeU7TkwVg5nD6kXCRhZDUTHVEWJyvhZcimGxiYdo2Jgr3oRPR0Q0BAYiFhEgDJ9jUEeYGgERSiiBAmBRelmX0NhFqKaUWzzsVoY0+VDNv+fuPHzoV3NGhsAyb27av67IsS4RL4X3fy1JLKRKCiVsiEkBlEgIzF6JQs+gZc+fhk5xJ0FPEZjraaK0dL50dXdFU9pVqYQjisl52MB/9mLOWVymEpnTZN+GaqGpvLQCYeOp0REYcY47jMPcUdqS/3uFRa6l1wRwf3T1CVafOpKiVwrXWxCT3dQ0AApiqVH0pIsylrohkpglaraUCOCNKYaaFmYIwArZt4ZT+uy1rrYVNUfZlWVcMGDrH8epjRER7vrZ9T+uC5HfdbjcklATNIkxNw8cYx+PZW+utRXgGqWZgZ0QM830rycV6Pl/ufrleSilZR/L8LrWs64o2wichjfbs5r5diCnc2xj35wMArpdlXUot9co4xmzjSPrQdr2UpXIRA4CzKqWwdswxtn0HRJ3a+zzaXBdiIlOlxGyzdKjOaWMM842lEEAgzjHnbHMMHTPc1lrBDNW2fQeOKnVOHa231zGG9jYCTy+c1nvvTZiXWrdtgzkBwDBuH1dzVdX2epVl/f33byL1+Wxms/XDfR6v/uXL523b//rrb5udiTJqgoh0jnUpnz9/AMD9fr/f74/Hvfdu7teP20Wk9/b8+fNoh3sAEBITkpnSac6DHidpEyPdQt9ODpGMy3PiRCbwzCFxROQ3nRIAISgr6wm9RFDSdN7tXIQnqT9RuF+VNT86EZtAcA8EglOfbHBGEhBAbnAj4h1ameqA/LVCSClLCgvIk9lC7p79Y7alqvYu1pT3f+513+BWREQgM5+lEE+3/XPHHSdM9g4sJsBUAKRjERCXpJO7Rbjaye7xE1VMo0p1y6c6LyfwZFIFQASa5vvOQhtFSgAForshIpVCzBbexiAA0bS/F4jcW8RUDXNHBVIppRCICPF7Jw7gLhiBYW+qKyATc/x6LW+sC9+4locpEkSoKwQF5ZIDwACBhYSYyUwxLVuZEMndmSS5usQi4GE+5lmn8rskeo4ATCiMxpQQnIOXWpZlE0G3QAREJ3JkQSIR8AgEqlWYYamFEIqwEA4LAJgna8+2bQPCOI7n8+lmFBDmbYxa77fbTYTN7Nu3r/u+EwkRp0jVTHtvY8wcRHx6qAGDeg77EBGlskgxcw9wB898PQREHL0RhaVuxyOnE4dglqUsHx83CicAQnSLOeay7zoGlZJaRIBEPeN63a/bvyxChN3czAhAFVlqhPbW5hiOaG4ejhF83r/ETOEOCMKMAVUKpekehs4Z760VA4KIzkFMCbixCAAM84ggSHa1MFO4FWaI8DnpdDexOQZ4gBp6cBHA5M4xYGSM0nmgwjNrh4i2batFmBkxVLW19ng81D3T00qVtS4RXmv5+LglQqdqpSxMtNaFmdtrHK2n4i67FJFiZqP3OSYxBTgyekSpFRA9oE+lgqXWeOOcOazDL6YjgDuMoa33oe7uY+paF4hYiJh5WRZEnNMej0drDZlrr66KhNPG8/GoVbZ1tTkhXPtEj0JMC7sfx+PZWzdzM2v3OxAHQFkEAFTNwxPTyzkja06KG9qct1JIqBTZtuXrty+vl3x8XNelbNvaWmemXPjUWplZCq9VfkH5Y/T7PQBCdd5//kSmqZoF+P73dzcXKdM6sgdGOJxuPBEQRAgep/NOpIcPQTADIgb+0z46jzDiPzr+AAJMLzR8c/ZTZUZnP0mBeX4Thjk/Op2bUySYNCoHCwB3s5gBWVXivVWGCEciQKAAj5Ni5BH/B7uuJf88meS1AAAAAElFTkSuQmCC", - "datatype": "BYTES", - "name": "c855161b-dd78-4e4f-bb37-cb6907335162", - "shape": -1 - } - ] -} \ No newline at end of file diff --git a/samples/contrib/pytorch-samples/cifar10/kitten.png b/samples/contrib/pytorch-samples/cifar10/kitten.png deleted file mode 100644 index 893f760b772..00000000000 Binary files a/samples/contrib/pytorch-samples/cifar10/kitten.png and /dev/null differ diff --git a/samples/contrib/pytorch-samples/cifar10/pipeline.py b/samples/contrib/pytorch-samples/cifar10/pipeline.py deleted file mode 100644 index 869a39a8364..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/pipeline.py +++ /dev/null @@ -1,260 +0,0 @@ -#!/usr/bin/env/python3 -# -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Pipeline for cifar10 example.""" - -import json -from kfp.onprem import use_k8s_secret -from kfp import components -from kfp.components import load_component_from_file -from kfp import dsl -from kfp import compiler - -INGRESS_GATEWAY = "http://istio-ingressgateway.istio-system.svc.cluster.local" -AUTH = "" -NAMESPACE = "kubeflow-user-example-com" -COOKIE = "authservice_session=" + AUTH -EXPERIMENT = "Default" - -MINIO_ENDPOINT = "http://minio-service.kubeflow:9000" -LOG_BUCKET = "mlpipeline" -TENSORBOARD_IMAGE = "public.ecr.aws/pytorch-samples/tboard:latest" - -DEPLOY_NAME = "torchserve" -MODEL_NAME = "cifar10" -ISVC_NAME = DEPLOY_NAME + "." + NAMESPACE + "." + "example.com" -INPUT_REQUEST = ( - "https://kubeflow-dataset.s3.us-east-2.amazonaws.com" - "/cifar10_input/input.json" -) - -prepare_tensorboard_op = load_component_from_file( - "yaml/tensorboard_component.yaml" -) # pylint: disable=not-callable -prep_op = components.load_component_from_file("yaml/preprocess_component.yaml") # pylint: disable=not-callable -train_op = components.load_component_from_file("yaml/train_component.yaml") # pylint: disable=not-callable -deploy_op = load_component_from_file("../../../components/kserve/component.yaml") # pylint: disable=not-callable -pred_op = components.load_component_from_file("yaml/prediction_component.yaml") # pylint: disable=not-callable -minio_op = components.load_component_from_file("yaml/minio_component.yaml") - - -@dsl.pipeline( - name="Training Cifar10 pipeline", description="Cifar 10 dataset pipeline" -) -def pytorch_cifar10( # pylint: disable=too-many-arguments - minio_endpoint=MINIO_ENDPOINT, - log_bucket=LOG_BUCKET, - log_dir=f"tensorboard/logs/{dsl.RUN_ID_PLACEHOLDER}", - mar_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/model-store", - config_prop_path=f"mar/{dsl.RUN_ID_PLACEHOLDER}/config", - model_uri=f"s3://mlpipeline/mar/{dsl.RUN_ID_PLACEHOLDER}", - tf_image=TENSORBOARD_IMAGE, - deploy=DEPLOY_NAME, - isvc_name=ISVC_NAME, - model=MODEL_NAME, - namespace=NAMESPACE, - confusion_matrix_log_dir=f"confusion_matrix/{dsl.RUN_ID_PLACEHOLDER}/", - checkpoint_dir="checkpoint_dir/cifar10", - input_req=INPUT_REQUEST, - cookie=COOKIE, - ingress_gateway=INGRESS_GATEWAY, -): - """Thid method defines the pipeline tasks and operations""" - pod_template_spec = json.dumps({ - "spec": { - "containers": [{ - "env": [ - { - "name": "AWS_ACCESS_KEY_ID", - "valueFrom": { - "secretKeyRef": { - "name": "mlpipeline-minio-artifact", - "key": "accesskey", - } - }, - }, - { - "name": "AWS_SECRET_ACCESS_KEY", - "valueFrom": { - "secretKeyRef": { - "name": "mlpipeline-minio-artifact", - "key": "secretkey", - } - }, - }, - { - "name": "AWS_REGION", - "value": "minio" - }, - { - "name": "S3_ENDPOINT", - "value": f"{minio_endpoint}", - }, - { - "name": "S3_USE_HTTPS", - "value": "0" - }, - { - "name": "S3_VERIFY_SSL", - "value": "0" - }, - ] - }] - } - }) - - prepare_tb_task = prepare_tensorboard_op( - log_dir_uri=f"s3://{log_bucket}/{log_dir}", - image=tf_image, - pod_template_spec=pod_template_spec, - ).set_display_name("Visualization") - - prep_task = ( - prep_op().after(prepare_tb_task - ).set_display_name("Preprocess & Transform") - ) - confusion_matrix_url = f"minio://{log_bucket}/{confusion_matrix_log_dir}" - script_args = f"model_name=resnet.pth," \ - f"confusion_matrix_url={confusion_matrix_url}" - # For gpus, set number of gpus and accelerator type - ptl_args = "max_epochs=1, gpus=0, accelerator=None, profiler=pytorch" - train_task = ( - train_op( - input_data=prep_task.outputs["output_data"], - script_args=script_args, - ptl_arguments=ptl_args - ).after(prep_task).set_display_name("Training") - ) - # For GPU uncomment below line and set GPU limit and node selector - # ).set_gpu_limit(1).add_node_selector_constraint - # ('cloud.google.com/gke-accelerator','nvidia-tesla-p4') - - ( - minio_op( - bucket_name="mlpipeline", - folder_name=log_dir, - input_path=train_task.outputs["tensorboard_root"], - filename="", - ).after(train_task).set_display_name("Tensorboard Events Pusher") - ) - - ( - minio_op( - bucket_name="mlpipeline", - folder_name=checkpoint_dir, - input_path=train_task.outputs["checkpoint_dir"], - filename="", - ).after(train_task).set_display_name("checkpoint_dir Pusher") - ) - - minio_mar_upload = ( - minio_op( - bucket_name="mlpipeline", - folder_name=mar_path, - input_path=train_task.outputs["checkpoint_dir"], - filename="cifar10_test.mar", - ).after(train_task).set_display_name("Mar Pusher") - ) - - ( - minio_op( - bucket_name="mlpipeline", - folder_name=config_prop_path, - input_path=train_task.outputs["checkpoint_dir"], - filename="config.properties", - ).after(train_task).set_display_name("Conifg Pusher") - ) - - model_uri = str(model_uri) - # pylint: disable=unused-variable - isvc_yaml = """ - apiVersion: "serving.kserve.io/v1beta1" - kind: "InferenceService" - metadata: - name: {} - namespace: {} - spec: - predictor: - serviceAccountName: sa - pytorch: - protocolVersion: v2 - storageUri: {} - resources: - limits: - memory: 4Gi - """.format(deploy, namespace, model_uri) - - # For GPU inference use below yaml with gpu count and accelerator - gpu_count = "1" - accelerator = "nvidia-tesla-p4" - isvc_gpu_yaml = """# pylint: disable=unused-variable - apiVersion: "serving.kserve.io/v1beta1" - kind: "InferenceService" - metadata: - name: {} - namespace: {} - spec: - predictor: - serviceAccountName: sa - pytorch: - protocolVersion: v2 - storageUri: {} - resources: - limits: - memory: 4Gi - nvidia.com/gpu: {} - nodeSelector: - cloud.google.com/gke-accelerator: {} -""".format(deploy, namespace, model_uri, gpu_count, accelerator) - # Update inferenceservice_yaml for GPU inference - deploy_task = ( - deploy_op(action="apply", inferenceservice_yaml=isvc_yaml - ).after(minio_mar_upload).set_display_name("Deployer") - ) - pred_task = ( - pred_op( - host_name=isvc_name, - input_request=input_req, - cookie=cookie, - url=ingress_gateway, - model=model, - inference_type="predict", - ).after(deploy_task).set_display_name("Prediction") - ) - ( - pred_op( - host_name=isvc_name, - input_request=input_req, - cookie=cookie, - url=ingress_gateway, - model=model, - inference_type="explain", - ).after(pred_task).set_display_name("Explanation") - ) - - dsl.get_pipeline_conf().add_op_transformer( - use_k8s_secret( - secret_name="mlpipeline-minio-artifact", - k8s_secret_key_to_env={ - "secretkey": "MINIO_SECRET_KEY", - "accesskey": "MINIO_ACCESS_KEY", - }, - ) - ) - - -if __name__ == "__main__": - compiler.Compiler().compile( - pytorch_cifar10, package_path="pytorch_cifar10.yaml" - ) diff --git a/samples/contrib/pytorch-samples/cifar10/requirements.txt b/samples/contrib/pytorch-samples/cifar10/requirements.txt deleted file mode 100644 index 010a9bacb29..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -pytorch-lightning -sklearn -captum -torchtext -torchvision \ No newline at end of file diff --git a/samples/contrib/pytorch-samples/cifar10/template_mapping.json b/samples/contrib/pytorch-samples/cifar10/template_mapping.json deleted file mode 100644 index 036d7da7f0a..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/template_mapping.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "train_component.yaml": { - "implementation.container.command": ["python3", "cifar10/cifar10_pytorch.py"], - "implementation.container.image": "public.ecr.aws/pytorch-samples/kfp_samples:latest" - }, - "preprocess_component.yaml": { - "implementation.container.command": ["python3", "cifar10/cifar10_pre_process.py"], - "implementation.container.image": "public.ecr.aws/pytorch-samples/kfp_samples:latest" - } - -} \ No newline at end of file diff --git a/samples/contrib/pytorch-samples/cifar10/tobytes.py b/samples/contrib/pytorch-samples/cifar10/tobytes.py deleted file mode 100644 index f99faad7357..00000000000 --- a/samples/contrib/pytorch-samples/cifar10/tobytes.py +++ /dev/null @@ -1,33 +0,0 @@ -# !/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import base64 -import json -import argparse -import uuid - -parser = argparse.ArgumentParser() -parser.add_argument("filename", help="converts image to bytes array", type=str) -args = parser.parse_args() - -image = open(args.filename, "rb") # open binary file in read mode -image_read = image.read() -image_64_encode = base64.b64encode(image_read) -bytes_array = image_64_encode.decode("utf-8") -request = { - "inputs": [{"name": str(uuid.uuid4()), "shape": -1, "datatype": "BYTES", "data": bytes_array}] -} - -result_file = "{filename}.{ext}".format(filename=str(args.filename).split(".")[0], ext="json") -with open(result_file, "w") as outfile: - json.dump(request, outfile, indent=4, sort_keys=True) \ No newline at end of file diff --git a/samples/contrib/pytorch-samples/cluster_build.md b/samples/contrib/pytorch-samples/cluster_build.md deleted file mode 100644 index 75a131a5407..00000000000 --- a/samples/contrib/pytorch-samples/cluster_build.md +++ /dev/null @@ -1,100 +0,0 @@ - -# Running From Kubeflow Jupyter Notebook - -This covers instructions on building the Bert and Cifar10 building and running a pipeline from the Jupyter notebook in Kubeflow Notebook server. - -## Prerequisites - -[Kubeflow Jupyter Notebook Server](https://www.kubeflow.org/docs/components/notebooks/setup/) - -### Steps to Run the example pipelines from Kubeflow Jupyter Notebook - - 1. Clone the repository into the Jupyter notebook from terminal - - ```git clone https://github.com/kubeflow/pipelines``` - - 2. Run the example notebooks - - Cifar 10 - [Pipeline-Cifar10.ipynb](Pipeline-Cifar10.ipynb) - - Bert - [Pipeline-Bert.ipynb](Pipeline-Bert.ipynb) - - Once the deployment is done, run the prediction and explanations. - -**Notes** - - 1. For GPU Training - 1. Make sure to set `node selectors`, `gpus`, `accelerator` variables under the train task - 2. Use `isvc_gpu_yaml` for GPU inference. - - 2. Tensorboard Image Update - - A custom tensorboard image is used for viewing pytorch profiler statistics. Update tensorboard image name in the notebook (variable_name: `TENSORBOARD_IMAGE`) for using any other custom tensorboard image. - -### Captum Insights Visualization - -Run the following command to port forward kubeflow dashboard - -``` -kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80 -``` - -To view the captum insights UI in the local environment, run the following port forwarding command - -Open `Pipeline-Cifar10-Captum-Insights.ipynb` notebook and `Launch classic notebook` from `Help` menu and run the notebook file for captum visualization. - -![](serve.png) - -``` -kubectl port-forward -n kubeflow-user-example-com :6080 -``` - -For example: - -``` -kubectl port-forward pod/root-0 -n kubeflow-user-example-com 8999:6080 -``` - -The captum insights UI can be accessed via - -``` -http://localhost:8999 -``` - -### For Code changes and new examples - -1. Build and push the docker image - -CPU Build -```bash -docker build -t image_name:tag . -``` - -GPU Build -```bash -docker build --build-arg BASE_IMAGE=pytorch/pytorch:1.8.1-cuda10.2-cudnn7-runtime -t image_name:tag . -``` - -Push the docker image - -```bash -docker tag image_name:tag username/image_name:tag -docker push username/image_name -``` - -2. Update the component yamls of the example with the new image - -```yaml -image: public.ecr.aws/pytorch-samples/kfp_samples:latest -``` - -Steps for generating/updating component.yamls are given in the following readme file - -[generate component.yaml from templates](utils/template-generation.md) - -3. Run the examples notebook - -4. Click on the visualization tab, select the custom tensorboard image from the dropdown (examples screenshot shown below) and click `Start Tensorboard`. Tensoboard UI will be loaded with the run details. - -![](screenshots/tensorboard.png) - diff --git a/samples/contrib/pytorch-samples/common/minio/upload_to_minio.py b/samples/contrib/pytorch-samples/common/minio/upload_to_minio.py deleted file mode 100644 index c2e311df2bd..00000000000 --- a/samples/contrib/pytorch-samples/common/minio/upload_to_minio.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utility to upload files/folders into minio""" -import os -from argparse import ArgumentParser -from pytorch_kfp_components.components.minio.component import MinIO -from pytorch_kfp_components.components.visualization.component import Visualization - -# Argument parser for user defined paths -parser = ArgumentParser() - -parser.add_argument( - "--bucket_name", - type=str, - help="Minio bucket name", -) - -parser.add_argument( - "--folder_name", - type=str, - help="Path to destination folder", -) - -parser.add_argument( - "--input_path", - type=str, - help="Input path of the file or folder to upload", -) - -parser.add_argument( - "--filename", - type=str, - help="Name of the file to be uploaded", -) - -parser.add_argument( - "--endpoint", - type=str, - default="minio-service.kubeflow:9000", - help="Name of the file to be uploaded", -) - -parser.add_argument( - "--mlpipeline_ui_metadata", - type=str, - help="Path to write mlpipeline-ui-metadata.json", -) - -args = vars(parser.parse_args()) - -bucket_name = args["bucket_name"] -input_path = args["input_path"] -folder_name = args["folder_name"] -filename = args["filename"] - -if filename: - input_path = os.path.join(input_path, filename) - -endpoint = args["endpoint"] - -print("File to be uploaded: {}".format(input_path)) - -print("Uploading file to : {}".format(folder_name)) - -MinIO( - source=input_path, - bucket_name=bucket_name, - destination=folder_name, - endpoint=endpoint -) - -inputs = {} - -for key, value in args.items(): - inputs[key] = value - -outputs = {} - -s3_url = f"s3://{bucket_name}/{folder_name}" - -if filename: - s3_url += f"/{filename}" - -outputs["minio_url"] = s3_url - -visualization_arguments = {"inputs": inputs, "outputs": outputs} - -markdown_dict = {"storage": "inline", "source": visualization_arguments} - -visualization = Visualization( - mlpipeline_ui_metadata=args["mlpipeline_ui_metadata"], - markdown=markdown_dict, -) diff --git a/samples/contrib/pytorch-samples/common/tensorboard/Dockerfile b/samples/contrib/pytorch-samples/common/tensorboard/Dockerfile deleted file mode 100644 index 632f165887e..00000000000 --- a/samples/contrib/pytorch-samples/common/tensorboard/Dockerfile +++ /dev/null @@ -1,5 +0,0 @@ -FROM gcr.io/deeplearning-platform-release/tf2-cpu.2-2:latest - -RUN pip install --user --no-cache-dir boto3 - -RUN pip install --user --no-cache-dir torch-tb-profiler diff --git a/samples/contrib/pytorch-samples/compile_test.json b/samples/contrib/pytorch-samples/compile_test.json deleted file mode 100644 index 87ebf88c16c..00000000000 --- a/samples/contrib/pytorch-samples/compile_test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "instances": [ - { - "data": "/9j/4AAQSkZJRgABAQEBLAEsAAD/7QGoUGhvdG9zaG9wIDMuMAA4QklNBAQAAAAAAYwcAVoAAxslRxwCVQArQW5pbWFscyBUYWJieSBDYXQgUGV0cyBPdXRkb29ycyBLaXR0ZW4gQ3V0ZRwCeAArQW5pbWFscyBUYWJieSBDYXQgUGV0cyBPdXRkb29ycyBLaXR0ZW4gQ3V0ZRwCaQArQW5pbWFscyBUYWJieSBDYXQgUGV0cyBPdXRkb29ycyBLaXR0ZW4gQ3V0ZRwCUAAXTWF4UGl4ZWwncyBjb250cmlidXRvcnMcAnMAJWh0dHA6Ly9tYXhwaXhlbC5mcmVlZ3JlYXRwaWN0dXJlLmNvbS8cAm4AJWh0dHA6Ly9tYXhwaXhlbC5mcmVlZ3JlYXRwaWN0dXJlLmNvbS8cAnQAFUNvcHlyaWdodCBieSBNYXhQaXhlbBwCKAAXR3JlYXQgcGhvdG8gb24gTWF4UGl4ZWwcAhkASUNhdCxUYWJieSxPdXRkb29ycyxBbmltYWxzLEN1dGUsS2l0dGVuLFBldHMsTWF4UGl4ZWwsRnJlZUdyZWF0UGljdHVyZS5jb23/4QDYRXhpZgAATU0AKgAAAAgABAEPAAIAAAAGAAAAPgEQAAIAAAAOAAAARIKaAAUAAAABAAAAUodpAAQAAAABAAAAWgAAAABDYW5vbgBDYW5vbiBFT1MgNTBEAAAAAAEAAABkAAaCmgAFAAAAAQAAAKSCnQAFAAAAAQAAAKyIJwADAAAAAgGQAACQAwACAAAAFAAAALSSCQADAAAAAgAQAACSCgAFAAAAAQAAAMgAAAABAAAAZAAAAAQAAAABMjAxMjowODoyNSAxOToxMDo0NgAAAABVAAAAAf/bAEMAAwICAgICAwICAgMDAwMEBgQEBAQECAYGBQYJCAoKCQgJCQoMDwwKCw4LCQkNEQ0ODxAQERAKDBITEhATDxAQEP/bAEMBAwMDBAMECAQECBALCQsQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEP/AABEIAtAC0AMBIgACEQEDEQH/xAAdAAABBQEBAQEAAAAAAAAAAAAEAgMFBgcBAAgJ/8QAUxAAAQMDAwIEBAMFBQYDBQIPAQIDBAAFERIhMQZBEyJRYQcUcYEykaEVI0JSsQgzYsHRFiRDcoLhU5LwFzRjc4NEk6KjwvElVHSyCRgmZISkw//EABsBAAMBAQEBAQAAAAAAAAAAAAECAwAEBQYH/8QAMBEAAgICAgEDAwMDBAMBAAAAAAECEQMhEjFBBBNRImHwMnGxkaHRBRSB4SPB8UL/2gAMAwEAAhEDEQA/AMR/ZbuM6aact7qORWiCwYGdHahn7CMYDRqHFlW/gziRCVg5TUFcoOATjH2rTplnwCdPtxVZu9oUlKsJO1SyREbM5db0LIplW5433qUucUtOHKSKi1DFcE47CmNKBUc702tO1Okb1zTnn1rinGgoGSMEjNcX3A2NOKTg/pSSB6A1NdmBlp7Y29qa0nHPFEOJwSOAKaxg711QGSE49a6Nxz/+avcEjPNd5/7VdaGf2Eq23HbemFDfanyR7/nTSz2xRbFr5GxtTiD29KZ19sb04k1Nk5UPJVv3x/SlZGASPam9WDuf+1e1Z7VCVkmdUTuc70knvXCoE5xXO1C6AhYONsbUoHffmmxzxSxgUHsIrb0/WufSvA9hxXR7ADPtSNGPBI77ZpRAxgA4pIO2QNzSjtg53oKCM3R7A+ldGe/rXtz39q8B609UK2Lx6Hb604BgZNJSNu2x+1OJ33PrTIVuzunbGK74YPNLT7jYUoDB5PFOIMlO52xXPD4/1p8gc8V3wwBRMnQhKacDeR39qUlsjbFOpRtg/eig2MFs9/68V0IwASDT5Rj61zTjO9ZmTByjVuR71ws52xRBGQSa7p9e9BDAfgjgU62yNscj1opLI9KX4X8oAo2ZME0cnfg17QM0SWRjBHtSfDwrJJ+tAw0lAz/pTjbQPevaPUfWnGxgHIpWMmdCAORSVDIzTh9f8qSc8du9ag2CrHoPzptSRjcbUQ4kZzj2pog5xj/tWDYxpzjbeuBO/wD3p0jPauacelOhrGwB606hO/eugbbcUpOAdXpTIZMNZGB9KOj4JGTgc1HNL422FSEc5wc0Qrsm4WwBTU3DdIICjVfhLzpztj2qairwd6zZeLJ+K/2z9qmYckDBzVZYdwQNqkY7+NhjPOKnZRSLWzK1DmlKkjB3qCblqT6Y96WuYVZBJIFbTHUiQXKHdQ/OuqlakkA/lUP8wSok/pTweGPx/nU3SG5nX1BRIz9M0I4jVwaWt4KJIVyaRqyNj3qbYjlsbICRQbx8xPOe1GqGd80O4k4I2+1TcrJydojns59xSBgdqIeSnVsKHXhKTg/rSLZJugeThQqvzSkrO2fTapqU4SDmoWT5lnejVnPKVgLg5FDqGBxRbm3IphzGM4xRSApAyk45pBxnn706rfBHFNrSfxfnvTJDWcIzznFeKNvSup2G5+1LBHOaNAbG/D7jfFL0ZH2/KukpznVXdQA98dqDQGJ0jTuO9IUk8d6eAA4NIUN96xNjJT2ptSaeWce2DvTOoAb7egNGxUDPYzzXo4Gr/wBbV10Z4pUcYXvQfQyJq3tBWCBVhiR0gA4/WoW1oyRttVnht5A759KQx9JCwDGC3+lCSbAMDyHn0rTjZkj/AIeRQsmygjOg5+lfarGVczHrhYSAfJVPvNnUArCK3O42TCT5NxVIvtk06vIPtU54hXM+eepLUUlZCapjzSkKIUDW1dT2YjX5TWYXm3FtSiE4Oa8rPCmPGRXtOTyK6UdjThQQrBFe0k8g15+SOyqBnE7bimFDHbH2o5TY3ztQzzRG4GN6g1TCuwZeO6RTCx/6xTyzz69smh3FY7/SuiAUJz33r2cjjam1rweNxXknONwfqK6fA1HXCOMb/SmySo8UtWBueK4OOKRyA0NFJBx6+9LRg147jG1dTgYx9Km2TaHBjHFJJI3UKUkng1xY7gCot/JJjerIxj1xXUncd6QTnt9aUnc81qvYLHACPSlge32riQBvvTg9aKQLE4xtvtXj6ilkUnSc+1ZoFidztvtSkg7YArukH2roTnGM1kqBYrG2Tsa7pJ33xXUoxuN6dCcUUgNiAnb7042ntXAPYbUsDtuPSjQljiRjbelAetIBxXadAHU+32pX25ptChgYxn+lODc4rAFpAOMAYpwbDNISMc/pSsZoBFDfeuFBxtSkpB3296UQcf60WZaGCM8faloTvSgglW4p1LeOxoBFJSMDb7UtKQexpSW8p2H0yKcS2PXas9GQwprIO2aaU1g7jOP60boGM44pCkZ3O1LbGugTRx2rmk5J9KI8PfjevBHttWoKGSMYpBGd+3NEFCePam1pAOPTOKKC2DqTk53pkpweKeWdz7UyTj6GmSChGPb9K4PU9q6SCew+1cyPWikNZ44zjG3FcKs8V44IzxSTv9qKNYQws5IFSMZZzwDUUySPrUhHVuKIyJqIsg/SpZh08jOag4yxkdhUi06exO/rStlYyomWn996MalcAqz22qEQ73xnvTyJChuBSDKRZGpWpPJpapCdOTUC3MAx/WlmUSOcbVNuh+ZKmVg4B9uaUmWTnBqGS8Sef1pxLx9am38m5bJUyN8A4zS0v5GVA7VFofzyc0544OwOfcelTbNZJKdGN6ZcXhJoYSDjOcU27Kzxj0pWxHJnHXcqye9Cuu8EmkOPDfG1CrdB3zxzvWJSkdkuDBztUU8QcmiX3tsJPb1oF1QOcEb00USYw8o0KtWcYGDTjhxnNDrx3/IVVIB7V71zA2yT/SvDTnilZA/9YrUHsSob17GRnevKI+4pSE57fpWaNZz7/lvXUpztj3pwIPOK6kDbCaQDE6c0k53/ACohKQRggUhxAxgc/SgIwRQ1ZOD+XFDqST60WUe36UgtkZH+VazAmjOTjmlNgJUNt6fW3gev1pkJOsAUrZkWC1n8IIq2wUBSBgc1TbW4UKA9KuFsdG29K2Y/QNdrGNkChXrWCN0irgqIkjjbFCvQ0AHAr9CURWzP7haRg5SM1TL5ZQoKGkZ+la5OhJwdqqt1tqVJICd60oCcjA+pLFqyPDrJep+nyjWQj9Nq+nL7ZQrUdOeazHqfp4KSv93x7V5ufDZfHM+bZ0NTDxSRjehwj9KvfUlgLS1K0kDniqg8wW1FJGMV4+THTOiLsCLfbFDus5Bx3qQKQdzmm1oCgQDXO4UOiEebUCaDdSeOamX2tOdqjXm9/UUYqmMiPUN68CPTY0taQfz9aSQMbVcZI4D3/T1r3GwrqTg713bO+KnIEkIwSBk8bUrHYGlbdu1JPPNRlsSQtOw4pWQR9eO9JSPbFOYzwKlJkZAy0Y3rqAQeadKM+lc0/ajGQp0Hf2pxOT60lKCeRTyEenanuhTgA96WlG2c8ilJSfv6cYp9LfuQKFgGPD3G1LDXG1EeGByKUEADnIpkK2D+HzsaUUjtnP1p8oxvz9TSSnHIpkKNacCvY78fUU7owN6SUY+1EBwbcmufWlZxn29KTgE423ogFJ2PNPJSTvj2plPPpRLWAOOdqHZrFp29KVgnsNqUkDkClpSOMc0lhEpGO1OBBPHalBP2p1CNwOK10ZiUtZPm/pToZ296cS2cY3p1LZpGzDaUdhTgRtThRkbilJTgDnagpDDBbzuTjNcLY4xzT5HrxSCAO9NZvAwpoDt+dcLe2wogEDvv70hRBzjn0rWZAawcUO6ccjtRjgB3AoN5Ks7flVFsIOvfsaaWknn9aeKea7o9aboINoJ3xXNIFPqb9e3vTZCh2yPah+wbobOwxSdJJ3PelkZ3rqUk9t6a6D4s62DtmjmBgAd6FQPf86IbJHA4rM0ZUSDC8dz7Uay4Ki2lHvRbKztjikHUiVS6O9L10E2574p5K9+1KxkwkOEbZp1LxB3O1Caxxmu+Ngc8c0jGsN8ZI2xx2rpkDHNRy38Z9/Wmy+eQamw2SzcgDcDj3pwSATkmopt87ZO9OB8jg7VJ34NfwSRk42z+VMreB5OaDMjUBuabU9tyKDQLClvHJyf1oN5/GabceynbvQ61kj/OilWybYpbxUTTLq+/ptXCe+1JUrNVihGMLO+c8+1NHcfWnXN6ZJ29KqkY4VA49a9qz2rmDnY13TyK1UjHBuoH0ohtOeKbAGeaLYaOR2zSM1iw3t3NeDQ+1GNxyfLinkxO2Kk2AjgjApKmyo49Kk1RNuM5pHywB4NJdCMiixvjGfSvFhXpUt8vzhJ/LiklgY/DxW5gIdTPqDg+lNiPk8d6llsn+U/lTBYOc4I96XkFMTFQW1JxkVY7c+EgDfI23qHbZ4/0qQjDSRmj2Hs/UlRAoSQsYOKWpZGd6EfX75r9GSItgUtWoEVAz0pwcipeS5pHOfvUJPdBBo0BdlauraCFDAqhX6I2sKGkd6vVzczneqbeCCFH+tQyRtFEn2ZL1NamlBflTvWWX61+A4pSR3rbb6gYVvms46hjhWrb615PqMSbtHRBtGbOJIz+dNKPbNSFxa8JxWODv96j1Ed8bVwOBdOwR8ZJz32qOkD2xUm6Rk4OPvUZK49celBQ2MlRHrGVZNNKG2+1OOaQTv3pBAPJ5pnGh+hHB75rpOMbV3SeO9e0q9CKnJIDs8k857V0gncb11I545paUgj+tc8ybPIbGRtmn0tnHb7V5pAJGOaKTHUoYANc7JyA1t7/AOVIKO2KkxBcUM4Nd/ZyttqCdEyNQMd+KfbSM8b+lF/s0g5wd+1OJglI77U6YAZKdhsKeSnuo06IihwCM0sR1DGB+lFCMbCQBStOc4H1pwNqTyDXAnHJ3qiF2I0/96Tg78U4dhSTv3/Ojo2xOnb6b02vGCd6cJ9TSDvyPrRFSE6VFOrBAJwDjbPpSFAbHHI/WnUvONBQQ4QlX4k8pVj1FeWULRqSnSeSKyYeIhAHfFPtkbEcGmEDVwM0QgFpSVA57jPf1yKIKH0H0O5p1Pv3pHlCvKPKdx9PSnmwKVhQ60kE4NPtt7jakNI7AfSi0AbY/pU2ws4lGNiKeDeQBjFdQAds708Eg9s/WpyAkMqAA4/SknA4x6085t/WmwFEj2oJjDZHvnFNq+go1uOpe4BBFJchrB2T7/WnswCTj0pI9sUQprtjHtTSm8DOPemWzJDCyDjemHUj7ccUQtJ33plWMnk+lOpUZ6Byg+npXCgelPEZHoaSU0bMhgp2yB7U2pAPtRCgBv6U0v2znk0VswwUnJI7frXtu1OHjfn0pBVvRQbFpGeaeQQNvtTCV49j+lOBffPG1HY1hAV7d+KebeSBzQWsjg/SlJcOd/X7UGjEmh4AY27U8HjkDIqMQ7k0+hfcqpGhrJDxh617WO39aFC8bnOMUouAnGqps1jinBjmk698A00tfvTZXuSDxS0aw1teD9qUVkY4oRLuByciu+LscmkcR0wguH1/KkKdO+TxTRXmk6jknehxsFjhV6EZpKlcn/KkhYzv+tJUrO5o0KeUoDYUgryd/wCteUdqaKsd6dIApau2dzTKjtzXHF5547bUjIwN+adCiwQe9KBGPUetNpO1LTlXeiHYS02CcbYqQYb04A3+lBx0+YGpSO3xnFSkYKYaz239MUYiOTyMZpMZvOME89qlWGUkbCueUjMDTFSr1NJVDCd8e9Swj52/pXFRs7YNC0ybshFRQNgmmjGIJ8o/1qdVGz2HNMrj44H3qb30AgnIuRx70Otgg47VOuRx6D6UM9GVuR60E/AUiOZSM6SMUU2gDcU2ppQOycb083kDHH2p0wo/Sv5xKhTD8hPrxUBEuyFj8YpcicDnf3r9MokhyZKAzvUDNlZBGea7NuAySVc+9V+dcdjhVBhS2D3OUMEZ5qp3WRlKh/Sj7jPT5jq29qq90n/i82RUZlIquyvXp3JVvnFUO9q1ZP1q13eVnJzVIvL+c5NeflR0RRULunJOM1AOL3xsO29TtzXq1Ef1quyTgnH51wSiXilQy6vb/SgnlZByadcURtmhHDzqNLVB2CuAdv60gKwfbilOHG3J70yk+nFZoYJSARnG9d8LzZG3vXWUgq429KICAT9a5pgaBw3nfGaJZjKODp5p6PHK1bDFTMO3g4OPbNQasSQDGgkkbYHvUtGt2QPKTUhEtvompqLbQBxSrHsi+yFbteT+CnhaDjIQKs7NtyB5N/pRaLZqG6RTrGhaKWq0HP4aSq0ngJ5q9izFQyEbmufsUnlH5Ct7fwamii/snH8Irn7IUf4OKvqLED/AKcTYeTp/SjwA4szpdpVxoxTK7UofwmtIVYcH+7/SmHOn8/wflW4C8WZuu1r40kfQU2LW5vgZzWhudOnP93TR6dx/w63AziZ6u3OAYAIodcBwb6DtWiO2Dv4Z9NqDesJH/DO3t3o8WLxopEaHGLpRNdW0kjyr8LWlJ/xAebHuMkehqRZsq2X0NPxULKjqQUgOIcTn8SCCNSc8jORU2qwc6myD6gZqRs1lmOOJhsRvm21LOGEoWtWsjlISCoK9wN+DkVlHZTGm3TKxL6WcRIJjtqbydkKzt9Djj+lAuWuS0XELbUC2fMg8pP1HI9xW82j4IfFac2lUfom5KS8fFAkoDQSMb/iIzkHjmo++/BP4lWx+PMuHRMkxwjwVvMOodCSP4VaTkHH8w+hNZqPhnbP0Vq4mLNNFSB5CCB2p1tpYwADirXMsSmru5bDBejyskeC62ptRIA2AP8XG1Ns2bca2yM5GMd6SUWjhnhcHTIOOhxJyUkg7EeookMKzhIzj27VPt2MnYYyKPjWBa1AeGcH27VPg2LxKu3FdJ2SakoducfIAT7cVcYnSinEhRZ2+lTtp6XQXA0trCu1MvTykwqGjPnOnn8Z05FNsdPPFWAgmtuZ6LQ4jSpsYI9K4z0OEPZDec+uK6f8AYvsbgZdC6XdUnK0GpFPRTzvlDQJ7bVs0To1oNgrZHvtUvC6aYaTgtjB575roj6JLsbgfNV16MfiklLZBxmoJVheBKC2QR7V9O9Q9KR3mipDYPfOKqy+g2nG9fh5yc4pX6OnoDx7PnedanWAcoO1Ri4rgO42rc+oOhikkIbwe21UmV0ktpSstHnsOKjP07ixHFJlALCk8p29qR4ZPAqzzbMpnUNOMe1RSoakH8JqLxu6A0yKcbVjGKEUSDvUxKZKEnIqJeTg81nGhehhR0nfP502VDJGK6sHGfSmiTn7VkMOah2wKUF+9DqXtilagScGmAh4uAAkV1Dm+3NDFXalJVjtWaD5DkKGck/eim3Pfmo5tRPbFENLI2pWaw4KO5FJ1kbb49qa1jFJUvbPFTaNY8V9qSXByMflSCrSn0J9fSka8ntxShHvFwdqUlzAwf1pjcedWRj1FcUSQCVbqIHvS1YU6C9eo6vTiuZPcf50yrKPLnfPHpS0rbB85Jx/Cn1+tLxGsUfUcV0Ek4zz6U0txWohSdPtXC4cYopMDFuK78ZoZbgHIpS1ZGMf50O7nPeqJaFs74mdhXcnFMo5Hr9KeTukZ4Io2E8Dg7U81+LFN6f8AXinWUkKBIzvSt/BiSiAqUPpjmpqM3t5e+KioYAwamooGABg1CTB0HxWuD+lSTSCAKEio3AIqTZbOfWoMApCM8j706lknttTzTQxnBFEBrBpGAC8D1GM00tnGdqkXG8bevehXEkZCaUSrI11nseKCW1k8cVKuozvihVNjmmqjfYjHGScnTyKbU2QT/pR62t6ZWgbnvWugo+rbd1ODjLmB9am03xDrWQvcD1rA4PVRRjDv13qx2/q8jA8UY771+l+4mN7bo0WddQM5Vt9ar1wu+M+bP3qEl9QJcGpDmcioCbetjhW596zkuwqD8ErcLrknz81WrjdASTqGfrUdPvGc+beq9Ou+xyr9a5ZzLRgF3O46gTr/AFqnXSXq1bjvyacn3XOfNVdn3DUVYVvXJkZZRoGnScqJ3qFkLBVzT0mSCSMigFud+/tXLIoo6EOKztmhXCNwTvjvTjhznfO9Dq1E4AzSMyGXP86QjcgHH506psk5wcV4NHPHNIw7HGRgjij2QFHOnNBNjG1GxwFLA5+lSlGwMloMYHkc71Y7fDSRnHNRtuZyBgE5q22qITik4E2PwbcCRgVNxLYcjy8bmirdbydJIHtirHAteSNs03ASiKi2nIyU5qRZs4OPLVjiWonACBUuxZjt5fpWUAUVJqyA4wj9Kc/YZ7N8VeGbKkYGiik2MEA6cfajwDRQE2Ltop1Niz/Bir+mxgDOmlosgxskVuBqM+/2fJGfDBptXT2Rjw960lNmGN0Ur9gg8I/Ot7YvFdmYHpzt4fG30plzpzGct4z7cVqv+z6Tyj9KQvp9JH4P0rKBq8mSudNbZDfb0oN/psnICNvTFa8vp3vp/ShnenEnbw/zrcTcdGSs9KSH1lLEQu6RqUMbAe+4wPvX0N/ZihWmBPfizumbW8tpJkIeakSX1IcTt5cqLSDgnISTnvVRHTWUBlflaKgpW+x98etax8FbM3b2bjObU00pLC/KGwFggHcebzpxztnbf3MVWx4Rpost8+KU+JcjDT0oyuNqKW5CnyE5HYp0KV+QNEW7rfpi/qW2lMFchIAcQ0rDij6edLefuKwnqDqD5G9OTUqlyXHFZCnwpCMdj4Scgj6j7VIxeq+rJraUuwbfKQRqSgxFoax6FSI6gOe4z7Vw+9y09nsexStGg9Z/DH4bdeR1u3jpeFLcQ4klcdC4ctBGwUSSnJH8yVEEDvjFY/1b/ZtW0qS309d0S3ngFwW5aAy+44Buys/hUpSfwrGAVJGQNRNaFZuqHXHkuKtMpLgRoX+z2A6NvdIZUr7oNTrt7txQyq5S7g5BylSw8h5l6PvyCpGEYJP4jjA+9I3KK+l6M4Rnqas+Q2LKpm5vWt9vQ/HWW3UKBCkLGMpI7EE4NWOB04CErUgegx619G9f/B7p7r1pvqfpqdEj9SxCCZi06EXFlYHlkhPlKiMaXk5GQNW3GNFifYZs6yXuA7Cnw5A1MODcJ2GQeCM53G1ej6fhl15PNy+n4O0tEfGtyGMpGCUnBNHRWo4UlZAyO9eluI1qUlQOoBf17H+gqIl3IIH7skY2rvjBVokooukaaxsCdzU1bExZKs+X6Vl0K4l5afMQc771Zot1Xbl+JklPerxj5M0jQ/lQgZSNqbLzbIysgfeq411rGDY1qGRuc1A3bqwuKKWHNvWmSsTyW+bKjqyNj96FS9HUkpSE4FUAdQyFnzrOON6fi9QFtYBXz70HBB2XCTaY0xJ1pB7iqtcOm4q3VhLYyRxRo6pDYA1bK5qNb6lacuRb15zknPfalnBPwaik9TdNtMBTgbA+gqhzbYEqJ07522rYeqnmXYiVnHnJ78Vns1ppyN4oAHnxUJ4F2K0Ui4W8BOkJ5qtzIi0LIA79q0y5Q0HSMAnSDxiq/JsqlErIznfiuTLh+CbgURcc4ximi2gJVrHm7e9WOfAShZQBn6VHrg5TunftXI4OIrRB+GSc5r3hnAyDUybcRgYwTT7Np8d0N8ZznbPaptgUSvhtWO/3FLDfsKn1WgjYJppy2EcJFbmFxrZEtjtjmnk7Dv60+uIpAzxQ+Ck4IrckxWh0nbBGKRrAycbUlS+cg5pDhIVpIP50t2ZDvi55Od8bU4y288T4TRVjcnsPqeKD1AbkqITxjnNSFqtt0vExi3QoMqU8+sIQww0palH2SAST9q3G+isYSyaSErCmiEuKST3CVAgfcV5vDqkKAKkpBxggDP1rUoP9mr4zKYRcJnRTkBkpwUSX2kv77AaCcg/TJ3oK6/BXr+04+a6MuCOAXMBSRgZwkDJ29Tj6U/BJbaOyHo5vZnwlhlzyx2ApP8w17++aU9MkysKdczvsAAkD7Dim5VuuEea+JMdbaW3S3lQKQVgZI9yO5rqdO22rHrxipzVOkc2WDxy4sbUhWkrHCdic7Ukc880+5rWRrVnHAAwBTen3xQInM7c42G2aYcG+McU+oe5pso9+KyAxoI3H506kbAYPHrXMY7d6Ug4GM1mbY4lJp1tO/IpKE8Hj2p5CcHG3HpU2Gg6Kd+DtU1DPG+KhmAARzUtDI2PH3qEt9GJ2Ir86lGQOx+1Q0VYG1STD6UgAZzSAZLsYOP8AOitsVFtyAOeM4otD2rlXapNMVoW7jHb60GsbnFFFQO42phwDO3egCgN1OTxQxQo9hRy0ZycU2pvsUgevtTGAFt+n+tDOJOMjH51IuJyO9DOJ7GsbRAMdTLTsHPzqRjdXKGD4g/Os5WtY3So44psyXUkELNfbe5JHq+0mjYo3WiSkIU6PTmkyeowoEhYPpk1jv7XlNHZZz604OpJCdlKJ9zWed+TeyaJMvgVnSuoSXeNWfNVScv61jdfag3rsVjdR37ZpHlQyxE/JuQJOk5H5VGSJZVkhX51FGcpSuf8AOveMV79/61JzsKgPOOKVuT3pACle59KSATyaIabHp+VTbsDQz4ZPFKSxk5KfrRPhjHrTrTac4PNHiJ2MJhg8AE0hy3nThI3FSyUoAGw/rSwkEcD7Cg4hK2plbSjkH8qLgAl4Z9hUo7FS6k7CmI0NTcgFPGfyqEo0K3ZZrQyFadqu9miaiDiqpY2s6SRj1rQ7HGB0gjjuaMUI02T9rg7J23+lWq3W0AAadvXFC2eHkgEZ9dqt1uhZ0+UUyjQKPQrbnAxU3Ftg2yKLgwAQDip2JBBxtTqNgIpi1cEoGfSi02sZHlqwMQM4JHaiRAHGKbiZFZFsGDhNeFrH8tWj5DHYV75EDbFbiErYto9Mfali3AHirEIONtNKEMc6aHEDK+m3AD8NeVbgeU1YRD7Ac0sQh6VuKMVhVqBG6f0pBs6f5c/arWYIxuKSYQ4xxQ4mKkq0JGPIKv3w7s62YU+VLe0xkMLUjUBpBxuUqB29wRUS5bwoFJTsamrWHbT0T1BKdcjuBDK9OF530nAVscenBpZrjFseCuSR80dS2+8v3hy4/wC0jiI63FJQnxQ4cZ2SlIx4ivbJA9afttxl2koDrEgqx5nLnKdSsjHPy7ZDafbUvNVu+OXJc5b02Mllxa9lh1RUEdklZGwPYAZ7D0qTtimbHHM6+WRm2xxjw1NLJkLP+FtSlIR7qVpxnudq8CDt/SfRuPGK5Fpe6ytmttq+CFJad4bkEvAfRCULwKLYZ6QixF3CxTWYCEZD37pQZA5yrZAA9wPtQ8Rvox+C3c7w5cunWpCgsHx1rkOjsSSAE/bb0oKZY7FcrkzOsXw6PUMZKSwLlcOo0KS338qRnSSf5QFfWr/W9SohUe1aJay9dX6yOMzmXYt1szalNtyLDJ1LA5w4wtSkqxz5VhXoDxWhXpjpX4pdLxjdX0x1lKvkL4ygpVH1D+7fSRlKdQGU+2RwRWMzemep3HnJPTFi6chvABCot0SyNasgjKngFr9iFAg74qW6X6l6q6Vmp/b3TUC0TZWkPRrfMQ4iRjOCgaykkjOUFOfRRA2i3LC+UbQeKyKtFJv8DqDpPqJzpzqVhDMpGtCFIVqQ8n8SHEnuCNvtQoi+IkqVgnVyO4Iz/nX0F1DbenfiX06iyzmzEkrQh62S3GwPk5CScNnPnQlWwUg5BByk7Yr58u0e6dKSp1uvDRSqN4SPE0kJWASgKSfqBn6ive9H6yOVKMnv+TzM3p3C2lo6m3uR3AttBO2TgZwPeplxwuQhrGFY3zUI7dUusnSQrA1D2pS7wAFRSslKNkatyB6Zr0lI5EvIFMkrZXjUQn3PBplT6tHiauNxQcuc2ma2mSkqb1jWlJwcUyZzTrbrTKj5DsFbHH0Boprwajrt6KEqQVb52NDs3patQUrvtv2qFlvOLWUI3X6UO4l1epKdQKU6tueKYWqLgbopTSMLOopwPY/WoxUqSm7NPtr3WhQ0jn6VEwXJT7SyCTo3BpPiy1XeNI8Tw9AKRngGlkGkWW6T5kqP4WCdKd/8v1qNagSF9PKW4MKDgXud8E1OyIst9syUxgpKstr07YJwQfzqHvDc1ux/LtPEHXpUMdjxUnk5aGcKEPQspTLSsFGkJ++OKjJikpcDAzjTmpSHb5bNmjq+aClOr0rTndKk7bj13pcm3x37gqOycqQxuo+3NaUotCPGyiSY4deUrGwPemvkQdynj+tWWVafAbShR869xihp0ViCgtoWlSyMk+lTyYlJaJ8aK3JQzHycalYCU/U1L2OzFxsPrB+pOKjglK3w4sjSg9+5qwolPsxGorKd1KGVY9TxXnZsPFaDRHLgJW4pCOx3oeRBGMaPsKm5K2oaPluVZ86vVXpSI8J2cdTbK16dvKgnH5Vxyi1sNWVKXb9PCT9Kg50bwidu29X+4wC1yOxOD9qqdzQjcjAGdI3/AFpBHArZJCsevFTfSPRnVHX16bsPStnfuM9/JDTKckAck+gHrW5/Af8Aspyuvrex1x1u8q19POhxTQWotOyEpJA0ggHBO+fQe9fS1si9CfCW3i39MNQLc7PbbbDyW1qecab2DaiBq08KI2BJ3yd6R5VHo7sHoOS5T/oY70B/Y06TsYRM+MF5kSpZOGrPZ3EqWlQ5U+5gpTg5ASM7/SvoXpbpvp3pkNROielIdnZaSUahoDyk98rIK1k53JO571RJnxWsDJdtt362dt0ZSQBHgNMMrSN9TwcUM5JOPXOdqh7Z1B8Npba/2NEduLoKlqdd6vc+be8u6lIBbTnfAG49KaHKctvR3cI441Ffn9jZZcpiCUfP2dhK21FAEhTXjqyeQAonHfAxzvUQ51d0raJSkrtKnYzilJx4SVBskYxgYKs+hycVgPVfV9wgBUK3WJ6JGUBhp5hhazncaHmcLGw3JTqPqaqkXq+c6tYmXJ10uDSW33gkpHZOrSUqSfcZ9N6pKcY6SAsUmrZZ/wC1x0Jab1ZLZ8QOiung2Ijq41yejNkJShacpyjAx5gRkDk4r5RYBUkFI53+lfcHS0q39b9FdT9F3RQUly1LfbRNeVIYbcZHioUgjDicKRjGTzscbV8UeBJnPuFLiVNoAW48ryoSnsVf5DntR1KKaPM9anGSbGkIUtRQkaj+dIcTzjB+lPOPpSCzF1BvGFLVspz3PoPamVAkEelI0cNjKq5tilqSfoM02s4rI2zhOdgNq6lQBGew4psGnEA7HHvg0WFBLYOARgGn0JO2woZrgDjFEoJHYnNQYQxg+YHf6jmpKOoAColpR+39aMZdIxUZdgZNNP8AqfcZoxEgdzvUM08AMZO1EoewrmkaATjL/ANFpkYG1QjL23FEpf8AegBkymQkjGa4XNXJAFRaZWNic08l/wB6PDyLYaFAEEkbVxRBFD+JkZPBpWsYG3NCgoQtR34plW+acUrekE4BJxWoDMmWcHO/5UOvbOeaedXtj/OhXFYFfYNntLYw4M/ShXBvuD9KIdUcdqFWoEnfvU5bGB3M6tiaaUTxj8qdXj6U3zn9Km6HVHEKUF4ohtXrvQ30waJawceucUBXoOYII3/QUY2jJ/DQsdrj86PaSeKKROQkp0j2PbFeTkKp/wANR7c15MZQVxRJjjfmG5NOhJx/2rzbWBT4QPT3NZgsZwc47etPR2gpzfP2pJT2oyAka8d6Ri6LBZmdKhtt2NaLYUJ8uBmqJa0fhUPsavthOAkH0oJAo0OyNAhJHB7Vc7YwNvL+lVCxrSSk4q82soGk5/SmSNVE/AjDYYP5VPw4g2yKi7fpwPerBD04AAGKokKwpmIOQKfEUcYp1kDHFPpSDwKejAnyw9K78sP5aM0ilBAoGATGHpXPlc4xUj4YrobBrUYjkxh6U4Iwo4MjtSvCHpWMAfLjHFIVF24/SpPwaQtCQN6FGIV9Yj5VjOOBjNN9VTZsz4b3WPD+ejLUkeZhv94nfdSBjnuK5epCGUFQc0EDIOdwaEkX1ub8PL80XHXXoqBkI3V5uAMd81H1FLFIrh/WqPmdEmI1LLbc6SuY2nUuXdF65hycFRSEpQ3n1ICj6mg3+rrZ0lOE+PImQpbqilU6S2+1IeJ/8MspwkemDn3oLqGBc/nC2n5hKncOBlUdbY91KUpODjvqNOw1tdMFlUUvTb/OWEQ0KKiw0r+fwwQHCMHkYGMkAA189im+R9HOC46NasF86fmWtm7dRdOwbhIfTqadW6pb6x2UovoST33JPtmiJXVFqvziWUWC8PqjtlIUxEStuOgckPOaEIH0V96x8dSPqfSmX17MUAoKlTpK1GRNUCdRbQDqDQ4SCW04wSauVrkTOsY6Y9gtU+ZbUEHUhiUUvHjWsttgKHupeD2Nd0pWtHFwUXsHuUToWDPevL9inz23MuqE55pUdCvYIJ34xpUdzyarUzqi+zZvi2O12ZEFb3liPRWlLa04w4S45qWBjZQAOc71pcPo7opuMr/almW684pSWoheeZaDgOyQoBWgHgnKj64oefZ4MCwPw+lug7e4iQSt2IiQhaHfZ5b6ivtjSU71Jwk0Nzje9gPQ/wAQb1b5KxMh2a4RfE1Ll22QfCbUrhLrZISnJ21BOjPJ/irQ+r+lrZ8VukJKYkF6JcoqU+GlZQFlKiSBnJ1AnIBB2IUk18/yI14tMlZb+GHTduTj/emUx3G1ho7kBUY+TjZRTyMgHcVY+jPiPebeyxdLPKauqY7/AIbIeaU3JZCv/s8gEADOPKvdCxsQlQGOenie+ikqmrj2Vu69KXzp2S+09GU5HW878ud9RbScKCx2UO4+9Qs6Q8wXkuMOJWhIUpJSQQNI3r6LvdrtHxJ6bX1x024uI+t5Uh+O9uELCNC06RuNx5j3AB23rLHZlpavUy19R24suFlbLanAfDWAgjKT33A7969PF/qcoVHIr+/ycsvRxyK4a+xmT6nrhLEeM2S4tIUOd/8A1tSA1JiT0sTI7rD7pT5FpxnVwf1FahDY6eFys/ySWUIfhNYWVAZVpSo49/KRUv8AFTopXWH7N6gtchmIl23peUMjUFNnRkAc4Pc/au7D66OV6RyZfSvGrMnkQHbBdociUkamXQV77HGxH60pmC29cF3JtCFMuLW2UA4KO42/PH5VNyun5X7CmXG6y9SYa0teMDqStxWvOO/4kpz/AM4pm3x1XCOwlDjbUpTPi77KVsD+qTn867HJPZzcW/BH9OMpttzlQ5zIKVk+Eoeihkfr/Wnb5BQthamowAStIBxyDV7n9PdOwGWpL09Lqiy24gs4JDo060450kHP1H2qM6guNitM1MbT80hEtX4SMBGolBHqORSOXkeMG9DD13iM9JOtlpQcKdKVgZy4FYx67gj8qp05U5OZjzBVFfaSrIyRvjKvqDyKtbN+i3JlQhxGwltZcKecZXkA0yvqKJFElt2PGfYUAhTRTgIKiACPc8/apqSTor7TkrKQzKmuXBKGW1KRg6tIyMgc/pzQEi6zl3F5trUhQSQU9/etJludPWpyELcCt55hLji8EJSN/ICefc8UHJtXTdxhx0QX2m561OKlu8AJwCP0zT6fQOLiqM7enylOtKWDloAZx7UDPedKlqOrKknJ9K0tn4fT1QpDodYLfleJWeEkkf03qJk9Ookx1tsIStTeoFXKTg8/SqQkRkvkodthyHHW23UEI1BRJHvVnuTkOGorZ38LGnflXc/pSHLXPitBooJUpJBUkcAKxiuO9O3iSlvwIbjgUoBOkE5G2P6GpzhGXZNxvoj57bqp+paN1OKKE9vqfapi2JUlalSHnAhpGSUrIxtmp5/oq7S5SXI0FxJTDZGSkjSpWcn67GuW3oDqW+Sh0/a2CudLfajss58z2pQBPoABuSeK8zPx6svDBkfSKlOXNvdwbg29l2RMnuJZYabSVrUtRACQOTX0l8IP7NfTXRlvi9YfFuAqTdwS+za3ACywkHyaz/Eo6dWMdhuN6svwz+GvSfwJtDfUd+THndaOsGMlapASzDBJUvTq/iwAFEAnCQEg5JrMPi38YBf7u/Y5vWwtBcWsOW94qMpxGojW4toEMAgbN58QgYwmuDI/Efz9/wDB2enwKL5P8/Y0n4kfGHpaSXLW3fXUIQlHhptqPFdUAMaFp2wlJ3CEgJ9STWN9OxOkutr46p3qS7uy3iprx1sqZ0JH4UKbWMq3G4QsZB+xpqehIsuexd3OtYxDLgCmI8RxxlYOSy60taAG21AjPiayD3wRiTuf/tZ6XmNtQJlvltjzoZaXHC3U55bTpQHhnbDa1EdwDtSe3T5SOu1XGLNUn9A3SzxNfSF3gPvKSPGjS48htlzG3CWtSFZyMkaTvvUD+2/iOlaLNPsFscZUjHhQkB5CkEbo1JwtC/TBRv6HYhdNfEPqBhYTKVPtjmSqS2grHhrx5wpiRkJB/FhKwd9s4xVifXdusGiu19QtXuEhRSuRCk6HGCeEusqOplWBsU+RQ4A4qypr6bJbi/qogWJFzgI8Eybo5AUsgwZiFIdbJ/iZeDaCCD/CtIyeQrmq7c7daYFyVoZnB1weK0sRQ2XUkkKSvTjBB5Hrv3qb6mtbaG1NSesHI1xjJGkuSnFr0jsttJBV6Zxn6iqcu4NvQnLPfT+0vAJkRpCct+EoDBwc6tKk8j1Sk1Gf0rZeCUujfv7Pdss02+OMfMPJkTYrkNxt7JRpKSEkdwRnB2B3zXyN8WrXcen+sbn069aP2VDgTHW40MM+FhAUQlahkkqIwcqPfavor4B3Jxvqa3sMGOtpt9BcaKHApGSAFBalHj0GygTxVE/tq2J+1fFd6S47FajymGzEjMRlNaWgCMncpJyOdic8V04mnidHkf6jBqRgCTgcUvnj8qQEKKijGCPWnCtATpQMqI3Ue30pNHlVQgpGNs44phwb8c0+T5cYNNKRk1gjSUncD0p5Ax2IzXAjnOaWMDj9aVgHEAdu1Po7YHb86YSTz/lTqVbbbg/pU2wpj6OeT+dPoXjtxQiF9yfzp9K0+v61No1hqHQAaIQ7nB3qNSs52NPNu4BOaSgEw0/gb88U6JA7H9aig8AOfrSvmu+ayQHZKJk9u1EsycnY/lUGJBPfin2ZJBGSf9KahScEn0PP6U4l7I2AJ5qJbkBR9xRbTu31oUjB2vY/n9aSpWc//mprVvSgrfHrSs1GTvK5zxQbixnvmiHlDPtmhHFZPHG9fWNntpDS1fw0Os7bEb044fUimVKzuN6m2MhCieN/zpCh6ilKznfvST/SkbGQkDsNvSn45wsAjY0z3xjanW077UBWTUVGrBGCKlY8YBI71GWxwFP+VTzCdW/608UmRk2IDQHaveENqf0b6QketKLXsfvTsm0DpT7kZpQBz5gfypzQQe1cI2xnc0GjdDZ33HrREEfvM0yR35ApcVehfvSNGLbaljYcfWrvZXANON8VnttkJTgggbVcbRKAUMnagA1CyP40+bf61e7S+DpGR9Ky+0y+DmrvaJw23pkw0aNbnxge1WCI+Nt6pFsmjbzVZIcoFIwaZMSi1R3RgDNGJcBqAjyxgUa3LB709gqiVCx60sKFRyZQ9acEkHG9ZmDwsUtKgO1AJfB4pxLwI3P5VrMHA54pQUKFS8O1K8YetAwQVpHNAz5IbbJBrj0pKBnNVy+3htppR1jYUA0VTrjqFMZpYDwSd+VYrvwsvMO69GdWQkyWpDyW/EKfDR4iQeE6/wAWMjP8vfnNZ3151XJZCww8tvUeUkYP57Von9n0uTOlL6mX1HbFSX21LR4ehx9KE75Hh51Y9CM1LNvG6KYlU02Yb1SlyyT1Os3CY0p1QWRClBC1p9Ckjwwe2Rvvtg0KJHXNyZdNgihlyQgpeefkNfuWj/wytZ04I5OTtsBzWpybZDkvuM3jqufKQVEmPC6fRpWAds61E6j/AMlT1ktloYCUxOh7XDOVKD92YhtveylJbWpYV9irb+GvF9P6VzjTdfn/ACe9l9SoeL/P+DJukPh51UqQ3dZsa0SGnjvNmpbV4qk/wRmMKU5gj8ShjvjYVqH+z8qX4EWZc7fIdYHlYUhx/wAEdyoua0tnHKikH34FWJvqOyNwlOTOs5MhgBW1tYEZlWD2czujnckE+u1QB+LPRCXRarXGkS290oXbUGYrX3LikNaPslavcGu1RhijVnFOc8ztIcPTd6MlYsKOnbclsDXcp0gFSMfyMhKEZ9CdQBOwPNRXVPQky8pS/eOrL1McYIJjWewhaHl4OFF5xKANtvKcZ55r12+INkmtFsMfEq6upUvKLNCfipSrH86UtEfUJIFZzfOt3GX0IkfDW6xYGSh1m83yS8qQ2rnWC55ldx4nBxxUpyi9IEYzW2AdR9M3y2Fcbp+wW6zMpUHi69IbEsb41KQ0Ckn1JJxiqPcZi0yAub1LBukh1tbDrbjJXKUBzj+7QpIO+ElSu+RWhXXqxEBhiNcLNaJTSkhcHTJWylTB3QrQRpyf5c7YqhdT/EKcoKh3TpixuR1ZU0mRqQQc75DTqSpXpkA+meK45xadNHXFtqy5/Cb4sI6dvAjfNmIyJCS7DmxlZyRnUlaslbbg18kkBSucVp/xH+H9g6/6WYuFvm/KOR3AWJUdYd8JRIT4S+5SrACD657g18iWK4uRbiJdrk/KW+KFfPW8q8dn5dSgS+3nzpDSwSscpGFYKdVfRfwO6nkMS1pYCS3KdWidDK9y4p0a3GiCdiAp4dgoEj8RqdPE0n0+vsO2pq12jIh1DdOlOoJ/SsiMk3K1NGJ4Clb+VICtJ4BCgo496urd4ekwyzdX1Wy2TRIDTyF/u0NBpSVJUDwC4BnjfbHFK/tOxk2+723q+CzH1xFyrdMCGQlS16iEvFX0xjPKiTVKZ6lusuyLjz1uT/GkOtyoWrQpwvqToXrAwBkA7d+dq9H08opJHHmUns0Dp+02u+9CIYD7JWq4vsPSGn8Ij6cltSkHdWUJUc8nSAN6yfqGbdrU6plp9KFRVrY1tnAJQdBI+6KvHwz/AGf07ZZk9y4vuNKkTkvaAVpdQ0601GcUNwM63Eg7jUDVT+JEaM7PvL0aV4yoLzTTXho0IeS74q/GSncpGySe2pRPFetikpaOKScXbCoEu+vm4RZDrz0hhmO/HDY1nK0IWCojj8SB6gn2oTrq8fITDHlXASVRElAeKSN15V9x5quFkskW0RpF2ScpfSHC02lQRgJQVtKB31JWCn6/Sqh8QumJ56wm2maIankXUwltsqUoKSEZ8TJ4QAUpOeVah2NMmm6DtbEt9QtWezgRJOp26pSWwMJ0Ang+gzxUXab4+pf7PuuUtykFRcB4KVcj1wTU7PEM32Y7cm25SlNErDSdAYfSNtOOU415HsKB6i6UXDiic0jXHb0vpcQs6xHCSVqSMbBWrP8A0ikTjdPtlG5eCTvswwbY24sOOeInw0rUnGQMjb8jtUd0i89Pkn9oPuIY1o1KCAde4ON++kH6496sFnZXBgi1TgZ7C3mVhQSVtIcdjIdQMkZzpdKSPXPpVZ+IMxzpq4OWZgeCuOoCSjA8sgEpVuOwxinUl+kRryy8weq1Rky0uFKVJSXAyjBDh1AlI+ygMD/Kl2h6/XaXeIlojJIitJU+lXlDTeQonB5GVY+1RnQfTEhVktvVbzTkkNS3H3EKQcIxpQoj1O6T76duDV7uTUTo65Ren+mz4ka7NNkv7rWVLDrYUpRzsCQrHGw+tTlljHoHBsD6MsrF66qgW+WFhp9xLjmU5SEJCy4g47nkfWtfsjXTFj6ZhsPhl55iMEskhIUoJdOSdvVeRWMdI3y5dOXGHIu+lTbjSEvoAB8IEoSHke+HVK39KsvQNju3Xd4RPuD5bgJcbjPqdc8ynQUlaEgYABKdXtkc15vq/UOUdM7MGFJ2y8W62z+tlhmwxfCZRJeTJWslvShPmSM4wNlHntU1deqOi/hRZHSlyIbqwwXrhdEN+SG0UAFllXBWoABRG+5JxnFUzrr4w2HpqzTOjOg7qhy5FbXz85ndcdterIQBsSlLen3UR2Br5n6x61hdQvrgSpt6ui/mdcr5BkKjOr1HDROcqCElOEpGCokkkkV5sXtJd/PwdclabekX2+/G3qr4q3dTfR063dL21DuDJmJQzMmny5bYceH7sEbZRg7nCsnarvfDT9hXRZtl/wCpJM6U8qWpL8CPpfC8qAUHCpYOcJKikAnO/FSfSfTsmKHHWuo77ZShfiNwHEpaW6MHCvBWCFgjsps77ZFTNs61hTS9a7s/euobOn9zJQiG34lvCuXUDT+6Bxk6VAEjSQDjPTj+pV4Oeb4vRXYNwevs1iJe+iLPabnCV8v/ALxFXGU4nX5kKbBSlecj8BJ3yEb1q1nkG0pFsvYksw3DpDb0tudbpDR5SPHYaW32wFK1JOAM8VULj01emEqhQ7tc34cT95Efj3QlDsdwDGW1gpUhQ307J3IGlWUmU6ViXpQVHndPu36EpGYd0gvoadKP/Dc8MAhQGQCUjdODyDVuO+ibaonrxH+HUcta2IqtBS6yZV1cc8NCyUoKANZKdlDCvLkEZGN6B1FbLR46r1aBeEPMLUhSIL7KoY99CG8kHulaQRuDtg1dlRei+omInTt/lSIF5kh6PbFXCOHBMz/eRS8ylKVLOnIQ4lDmpIyFEkmkXnoGZDdFwtFwt81C2UOLbablPsyGz5fIUpKwklPZZUg5HbNPLE2rSNjmk6ZVb91P1DIhQ2n4DMxlpKkeDJUpJbCSf7tw4WnYDbKgPQ80PGfaeS29CQ8w+hGrR42spChyCoFJBB/7U7d/hvb5CxPj3lizIT/fNz1qPgKzgjVpC1JOdlFOeyuxJ8K02mKyhti6PX7SnzC2tJLQGe5WrV/+DXn5lO6Z6GLilaL18M1pjXq3OXCVOLxfbCExXENqbWpQGCkYJB74H51O/wBuPqCI9dLZYhdXoyozCJBjPKcw6SMamkhOk7jck0n4TRF3Tqu2R7TC8FkyEElWFO6dtQUk4KfqNqrP9ugRWfis3AizXnlNxGlutuuHDS1Ak6Uq3AIA427966fT2oSPG/1KVHzd4ilHGT+ea7qz3/7VwJ7nO9LSkenFK38HjHttPPPoK8Egmu4yNhvTiUA4A9KVsBwNHG2K6W9juTRDbSuTvTgjrxnSePSkuzAGMHjilDPofrRKoiyQCnakqinP4aV0xtjYO/8A2pXiYGM13wSD3pJaV3pDUzqXd8g06HsDncUwW1DfAzXFAjuRRWwUFiR711L+d85NR5WRnn1rwfxsSKZRASiX+2efeiG3znYj6ZqHS9nO/f8AOimlkgCjQCYZfBI35o5p48EioVpW42A+tHNObjf86nLXRrJVLhIwTnO1PpUTUe0rIGDijmTtgipNmoyRxznJ9qFcWDnkU84Ffw0K56Gvqm2e2hpat9JVTZIB25riyaaUog80GGx0q9qSDtxzTXiEc13Xnbbek0MmOpFPtp4J7GmG1bjtRSM7ACshHsKiuFpYNWW3vh1AxjNVlpPpUhDkLYUCk0ydCMtCGyd8U6GgRsNvWh4Etp9ISpWCakEhPrTqmJoEUzueRTKmsdtv6VILGOP9aHdSB7UwrAHEkZx+tMhRS4KJe25waCcVg5BGKRqheiagvEYyTvVqtUpQIwaokOQMjJ59aslulBJBBFKazS7RO04yrcd6udquOnB1e9ZZbJ4GPMPQVardciMDOPvRQTWLXcwQPN+tWiBcgQN+aya23cDA1/rVot95B0jXxRRqNKYuQx+KjWrgDjzVRY13BwCrP3qQauoI/EMeophWi5Jn/wCL9aeTOB31D86qCLp3CqfRcwd81rBRb0Tt8BVOonD+aqki5DH46fTch3VWsFFqE4etKVP/AMVVj9opHKqGfvKGwSHKFhosFwugbQaznqu/HQsNnCt+DTt06hUoFKXD9jVLu0pySFEk4Heg2MkU+7TXpM3QpS1JUrdtLJc1/bIx9c1rH9nWV0lbb1dLi0Vxno9ucW6kHKcDkKWFKBGN8dqyGYzGecKJMYKZB/eL+a8LbPvkfpWp/ByP07CidStwJbSvEs0lbzTMtcl0JSk5CgEBvjOO+aDX0jpUxv4v9SvLeE7pm/xrcFBLqVJaK0uoOxGQCSfYDBqhsSL5cW1NQeobvKfkZwqMw66UN/x6SpOkE7j8wOKtfRl56Zm2kuQulcQ4qiGlPSmnNSjytLDHiLbPupaT7dqtD/x36O6RQi1wmo1vfCEpSp2ItSSPdeCsHn+EfWvIeNY5tyn/AMHsQyuWNRhC3+f8lMsXRV1uDjUiV8ObkpSkfu3bo80tYbAwMMtBRSOSdSd96uL9kuUVhmIm7PFaNKVRmIrYCff94tKdhjlJPoBULefjN1c8hcuLdbVEjPqCVOx4bTqnhn+Fx19eP+oAD0qDd+LgcUl28dWXuQtGtWiDcmtSgOE6GEtj81mtKcEvpJ1kb+oleopHWaInysfqCW2FEBHzaihCEjthtxBT91KG3FZjepvULbi7heOpobsWIFlLTF3ZSl9wDGjQop1DYFW5zxyc071J8SOkZAcUoy2ZSygCRdro2koz/F4ZbWkH03J7mstuXVtquFxQ/wCL1Fd1xsoa/ZDbi0pSN9OHNKSBnOoJHPFRXKW6KVx0W+2zH+smHbfc5LK5T61ORZCAnxVL/Etvygo3GVI/xJ0/xVUL/bJjDDiH7pBukOV+6ZUI+lLi99KdQ3Qr2/IkURCvUppwXB+03CE2lwFh1+2BpaFA5GFpc/EDgjABzVq6gs0672+P1jBYeESeFtz4zsBK2hJ5cQpKd8KGHUY3wpQG6KNN9hutIz3pbpx9F0t96sjzryoyvlXmlq8TwlE7BRx+EglAX3CgDg7HfPh1HtNu6utl9tUMxkOz/kZgKVoYS2Y5bXo7suJcKCQcoC88JUDVG6HvNpS7cOnbs6i3u3SEl2PK8Q4GFKSkrUdyhKxs4Nwk4UCBV0tNpeYvUmY3PWwtbgjy0r1OpQ8lOotqAOFKQHULT/Oy5kZAwM032TutIf8A7SMBqSm5W4h0+MrxGVKAKUqwnBHcFSUgbjY53O9YZ8NUXS7SV2qU2fmGcAaydbhSCdORnJ2HHcJ9a+lvijcbX1N0cpwSYouEREuI/HaOtTMgaFJ0nPlTpS5gHPJxivn/AKctN4sPVsVwNOa3VFTawDoCkqDmRk5O45x/rRf6XQcfezVOnbaluxyOi7nERDXcZDSmS0rLSoyUFwoKgQQsuecZA3SR3qodSqd6gNrhtsMftORcXul5ryGggFSFBUZawOMsqUdudJ9KvNkdFtv8+4TpZeENp8QUKASlx1TiX0toJ5/eJcAHoSODWedSSZUfr63T7UEPRrhBi3yOFnSTPjvKWEuJGDu1rbKfxAK5rt9LJ1Zz5kmzYfhfCj334W3G4XaYmU/cm576GVlI8F2NJQW1ZByCouasHGR2NZR1beojstwwXvnVR9cNMhwDUk+OpeTucgrUvjtitG+ONylfCXpSPZrXMiuIQFRZC2yMtPlxecY4UEhIPP4B6V8qQptxmXSPYm3QiTMlttIQtWkJU4sAEn6KzXUpPslFUrZakXMyy6I4cU9LAcKhj8QJ1behBP0H3qWmdVW59z5d3AbaiAsIZV+7WtK0gpOf4dBXgdzioPpO8WqD1JbLZKaW46iJLSptKQC6+ph0IQSexUpKc/SobqWNYrX1p+wLPdXJMEptxZkvt6FKLjDC3FKAxjSpaxp5AT60/dsPRuk2FbrT8PLZfrBe0yYz1mcSGEoKViSyt4N6/UhzGDvttkVmPTkV34oXSTACi9dZCUlhZVhK1KWpxSnD66WnMepIFb5P6XiK6Bn2lhptPUUH/dorOk+Gy5HiBL4CTylZkKWDjCiAc+oXTXw8snRPU13ftElbVvtyum0RnBpUt16Ima8+hWBskodYBPck59Kg8lRtdmXwVey3XqCbYbT0HbH0tJma1tkt5GkvBQQV91JS9ueAdiM0RHcVFtkC4TJzr+lTjqWWxq8Np8ONltOSM4CW8A4xqp+6Kmy/iFAvEOOuHb4lvcU4ygaSxl1KEkEbKyrSP+r71C/E55+VclW2BEMVK3ULdDJzoTqUSlGeRlOAfY15+bI5OkdmKCWyqr6tF66lasHTynm25hZgqaAKlDy6TlXtk59vWvoG0ynOm/hcTaJB+cnvxrR+9xrW6ko8VQP4cqGQVZB0oPc1iXw16XXP6qVJhjxS2+Q08glC0uLGUv5A30444yM1vfxTvsq0dP261NwoMODAQqTJkyHw0rC8hTugDdRUTpQnzEkqOnG8JQ5aQ7yU6PnuReY1rs13kvsMtuXe4xiGkYW6lZVIDis406UpQUoztqUo48tZXAuce8Sv2jJiFH7PdQ6WFyksx4iVnJUDwt87aSolQA1DBwa13qrpxqN8P/mrJaJRfmOJjB8oBcabQ15i2jB/enxigH+EqWSU6DWEhxmfcJLMFEBtiH+7KlgrbStawpWT/EtQScq7JASnuatjhFY7XZHJkcp14NjsvV9kctwHSvTlmmXMr8ZTny+VhedypbynXn145UFJJJzpqx9IfFgl9i432VZIk6IpbSQ5aFMlSM4Lan46Qts78KGlQIyO9ZJbLQ40pEtDjLryiVhloDS6c5wnB1Z9N0qFaZa41qSylHU9/vS2FhIcjPx03D5fVuEqX/eNoP8AKsLTzxzQivAJfJ9B2192/WWPe+mrTCvTMcqK02S5oRKiOnnwnFAhLalA5CwtGchQ3JETd7fZupoxuTDk213NhakONS7WWZcdeRlt9lpWFhWNQcYWpJ3KQc6RCdG9VNwJLKLbJt0zQoqTHbkPQrgGs4ISHUrDoxyEqWjb+Ecas70Z098QYqb/AG5lqUt1HyzqWVtsTAD+JGtCi0tWRlOU8gf81dmJxlo5ZXB2+jJ5jfUsiJI+eTIlSEqOuZHfjrRcQjBT4clJCUyEDGC+0Ccb5IJqPusSF1iolqa8z1EhQWpqQEQFzlrR5wst6mwXUjzFIAKwSMFKiq2XHoD4v9MTS/0P1bEmxXQSq3X62rhTApvfGCAl1PsFBYJOlRSQKZcuz3VEOSm79PwIF7gpLki1qfaSS3kZdjOugKThQScLGncHIOxo41pDwku0YJMsL1plZbuN1U0rIbMx1MpO+QUFSSVZByk+XtS7db7iZvyrISCtPlLas6T2ynAP+daH1f0hNu8N2bbYKH73EJMqO0wI010YyoPxiShxeMHUnOrGe9U+zLbihsuMJYeW5lTY1accbpO6D9NvavI9Tj9q5M9jBk96oo+jP7OVsnO3+MiSpEt5Kk4e82UDBzkE4/zr53/tUNday/ivd5XUoDkZLp+TW28HUhnOE5OSUn2J4xivpn4VyL3Y+m5vUCrXJdYjQ3H20xUJkOrVjbcqAxj1UK+M+qYr90v0+9vNloS31vITIcSXtJOQCMk7VX0za9Pu9ng/6s173FeCneCrOwNeDRBGoVN/IdsUgwO+mhZ5DRFBnjbAouPEUtQGnPpR8e1LcIwkmpy32NRx5PyFJJrwNGNkVHti1jGk0e1Z1ad01aYdiOASj86lmbFgY8PI+lIVUChGxnnRTbllXj+7+9aUmweX8H5ikO2EYKg3+lFIbhfRlj1nWkEaaCdhLHKa02ZYyAfIKgJ1nGD+7x9qNCuBSHGCNuPehnGgD/2qfnQ1NKII49qinm8EjGKKQriRrrZH09BQqwrNSTiDxtih1s54wO1MkTaBW1KSaMjunvTBZwcj9aW2kp5wftRYjJVlZOB/nR7KjsdqiY6uMGj2V8HJqb2AlGDk771IsjYZzUQw55h+VSjJ1AH171CSCjK3GTuTQbrR4x9al3G8dqDdbzkn1r6g9pMinGyNqYUg5zUi6gUK436HFJYyAik14JOds0QWwe3vXvCJBOP0oDVYhsYIopsknk8UyhGD6fSn2052zRRNhbZxtRTPI5oRGwGKNjJJxjNEVh7BWga0nBFSMe4LGEq3AOB7VHtpyn/0KcSnk+lMrFJpEpC0+beuLc1cUIyPKCKIQnAyKohbBpGoEn9TQTpJyakXUbY78mgHkYJOc1hWxpp1Tas8YNTEOeEgDOCDUE5tkgVxuUW1YB+tLQmjQLfcRkYXx71YIV0wANVZpCuZBA1VNxbrt+OgzJmmQr1px+8x9asMK+4xhfvWQs3kII89S0TqAADLnFZMe/k2OL1BjGFc/wBKlo99BA8231rHI/UZ2/e1KRepOB4mRTIBrrd7T/Ofzolq9ZxlVZYx1GDjDn60ez1DgDz425rG0aezeRgYUMfWi0XUEY1n/OsyYv4Jx4hH0qSj33YHxM0A9l9XdVAbHj3qGud4WAQDzUOL0gpzryKFkXJp0bHJ96HRhS7qou4CuRQU+cCgjzb02khSivGQfvSJLBcbKjlJpWx0rKhd1rU4oqTnfbNaB/Z1huXHrpy3LCksyoMhlQEZToJUgpwSAQgbnckf5VRbqkN/ixnfmpz4NpkPfEG2w4vU7tnMhzCignS8BvoxqCSTj+IgUWm4sbp2G/Cno9noTo7qJ0PrVGkyFyQ3oIVHIyjSUjYYUjG1YV1hcrg5MflSJEVpxJLazOUFtuo5zsNRIxttmvtbrroeRY27i507IQhUh1191txWG3PEUFEpxkJ82Tg5Tkn1r4s+PHR0qAmTebPA0xpJ8ZxCTs08lWHEDuDkHynvjHavByw/8lM9b0+Wo2QVv+I0CzDUzd5LwRp8RENhTSFg4ylQccAUO3mbI9qkLf8AE74bScKk/DZMZa07yo10dRk5yNUZpTaCPUIWnNY7abLdeoIwdgsBTAUUqfccDbCcH/xFYH2GTnsatlr6BmRkmO9bbjPdQgOgNNfIxyDxh2QAtf1S2B71aMeCpjyksjtlvf6jvwffn9Fw7C5BSCtw2syHJLIPBdbfWp5AA/iCSP8AFUOLJ8XruzcrhejdZtpbQ06zKdQ88whJOFhJUtKEjBTyoVauj+nnLLFdu9kesjF9iILhbtLaZ01ogFSQ3JwtZVhJGltbO/dXBMh/F209ZOW53rfp67XFyUhTartNfLEpKAsIUpbQKWXgnyHSpGsJOfE2roUYNWkc85taKFYo/TbCy+Oo7Y1IdCkqUzJXKcGBk+IlOqOr/wCW4r/qrdf7PF/6YsL1ysF7hsMWa+rZiP3OLIaXHjSwSWXlxErWEJJ3OhfY4T6Z31F0FJtjzVztbTd4jNP+Cn5VsR0ZVkttvJdAU0pX4QFeRasBClk1ZPhKi2xLm91P0z0BKj3ODmP1HaJi3Xn1wFALOhDiihZABcTgEK0bEKCmlUWOS/Y5nOLWzUvix8OIPSENV16gsaYkUKfUn5c64zLiTh9vxhulDrag42oYSUqVqQFN1XYthbtD8G2T7zJaV8qLhDuAWlxuZHiRXCx4n8yksrSCP4mwsAnRittdlN3joY216TKuUi2NtTGgWsJuNswHUaARh0eAfwqGpSEqSfOk1iNy6XRYY16sHSziA309JlT7H5y805DW1rCGMYI0eMols+XClEacnMppL6UaLb2y1dOP2G82mPIctSChY+f8XUr9+yXEh5Pm3Ck6QQFch1VSkqC6bq5b1JSJ0wzUuOttgKSYkdRKdKjuVaFDb0BNUS2LkXV5cFlLMVm4OQ27e64VFxDcqKXHW1lR/wCHgNpUOfsK0Pp6G/1RMtnXQuDKkyunU3B1bzaVgOOxAmS1oxnxfCDih3IBNc6xqTot7nFGQ36FfnOs12u1Sn2hapTVxUpnCS0tTSSlGCMJyWysAnPnI9qqV5vEO5fHbp19ppTgdnRHUx1L0pXrUHPCUoZGFFWjPorf0q9/E2/WyyWqZPYjOsPO3mMHvEzq1JUVhasZzgSFAE+/tXzaxfLhN+JBR8y7EuyHkLYfSPNGWydSjgcYbSTnt4ddmONRpE2+Ttm9/wBoNufI6ftl68dMqX1BETc5TiED966y14sp5SuCCmQBsMeTHOKovw+/ZVw6X6h6gNiS9cURm0QHOVBS3EMuOJ9xlJAHBX71pvxAtieouipNznCKi5PWpxm0srUcRC9CDykqwCE5QkEpB2LiRtwMe+FPULTV8XY7wyyhqD05JS2C4AlD8RLMoOnODv8ALrxtupY9Kyk5R+5tJkbGXCd6+jeDI8FMVoueIV6susthYSCd/MtGM8nO1M/FFh/p2+xroqEmOLotT7TqQcOJbaabJJ9fFQ9tgHknOaj3YL1r6WducmDvBdgvMzlr0uPuuj96gDsAEpx7qz3q3COz8Tel7J0HcZBMuDO/dylqSkBp2K46sOLG4wpspyBwSec1blW/Ast9G89IfEtXSvwxidfXRDsmc/JtQCNyTAEaNrDiiTpUooO5GSrH2rfw/wDjkzNtt3sV6cWpht643JbhIKgl4sDRnlW68Z7JB9KH+M17sznwcgqatjsOb1AuK7LeCMJZgRbaH21hCeAXHwgnHKRncAV88fBW4Q7n1hbWL84pNumvtxJqC4E5jurQF4Ue+N/qBUuNxtmi1Z9c2y9QR1OzYLhJccdlQ3HpTSPL5TIbb0IJ4OpnUM8b08jpJvqWN83dH3WJUoIHgBrAcCnV+GU90pIyCT/h3Gqq71X0HdFyW58SYJlxTJdCC1nOh4rSywoKOU5cUsDGwHm71JzZN3sNudafuiJr7DTbbErQUlaXceElOn1AQ2E85IOxrhyQbZ0QmkidgdM3Dp6SUxlJZQ62pCvDV/cuNoUlsKI38zYCsD+Y7d6jes+pmIdphdXdQGLHuMi5qDklZU4zHgMMEvrUV51qKA4AdIGteACcUmb1rbE3mZfJ1yeMe2OtzHCyjUlTikIU442nOCpTOW0jOwXtjaqTdmYvWF1k9RXq1SIfSHSL7l0nofcIalzFPYjw2UqGdRUlCFgfhQFHOVijixOTqXQs8iW0Wqf1fNv/AFdK+HsR2XHYttqQ1NuCo4QqL4qQtwjBwD+8IVgbeGhIG5NY7buhok+zA9MMuNoSsSERYraZT2p0aY7SlK2U6sYJHOf5Up33FtNs+JL95ucq3OQrbc3GocYxmyF3V/QlbhQnOp8oCiNKfIFOb6iNob4pdRO9PQ3fhv8ADWzi3tsqKbvcGk40urScxwprzOOkDzHIBACRhOVG3tb0R9x1RmTDJ6QWqCh56TKA/wB+lMsKVp839wFpCkgDhWnKlH+VOxkLf1v0M1cwi5KjNXGKAA5IilLsdJPCFJII3/iKTjYbVQ4hf8OUx0xOV86k+FLnXOMqR4CMfhSthaCyTn8SueysbmFfidUQwVXubdUwWE5blWuWXwgkYGkvIWQgnlPiEHjB4rTxKXWhoza7Pqfp34ndNXWG5BtHUke8R2ilXyMt9D2F5wQpKilSTnOFpWlXqK0npGRYrw4tyzxlW+e4MPIElovbYwS27pcVvuEuawcHzb5r4mtvRN5ura7rZOuOm2rgCltsXFLLTqzpzpSUpLgBHLepQzwkbJqaj3Hrrpmd8hckzHJsFsLVb5Daja3kHHniyUqU6nHOnDeDwobgaGPh+lmlU+z78QJCmfkpfXjVvcaClFh91KGjuNlMPhaMAntgbnBFcc6e6mEUuJiQrvb4yVfvYLyJMRerIUlcY6/ByOdCwN+K+M+mf7XHxEt0tFo666VtV3htI8jM9ovJxnCVtukle4xkhbgrRLP8Z/hN1EGroqB1V0je1IUgSOmL7lGNx/7u9spQzskkZxjzbV0rI06kTeF+DUbz8P7DcHQw5YLlbjDAchXC0upmpZbO4QWSUvpQO7RCkp5QU1TOpOhbt82id4cK4MJAWZbBWl9WBgBSHAle5/hI559atdm616T6ztjDa/iHaOrHYqvlkSp8RdmvLIxlIXoOHDkEEhI39a51v03dXrQFwW5V+RG/fCKrRMeQEjYoJ0nO43G5HbbNQ9RCPqFx8F8GWXp3fkmrreRD/s+X2S/CmQAW24rrS2fCUsK/hAJOkHNfHRYSFEIQEjtX031rdL90p8I+lbe44zDcu7BuDrLjSpLC1K5T5iFDCSnYpP0r56uLzMuUVswIkZX8QiFfhrPqEqJ0/QbewqU4qMVFeDzPUy9ydsixHBGPb0p5mD4hwB9dqIbRqUAO9TtstpWQSmudkIwB7bZirTlPPtVrtfT+oD93+frUhaLOnKQUf9qutqs6Tp8oxjjFSZaOOivw+ndvwbfSpVjp8YHkP5VcYlmyAQjPvUk1ZxjCk5+1FQbKqCKL+wQP+GceuKHfsaf/AAz6k1oq7TnYp/ShHrTnO36VRQrsbijL5tjH8vb0qs3SzaNXkrX51rSAcI/Piqpd7WnBOkDI4xW4glDyYre7WRlQSRjnbeqfOjqbXjB5rXr9bQErOnv6Vnl6ggEkCnSISgVVaewyKaUnfGMfSinm9KjnFMKGTnFGiEogykd+a4kHvvTix7bUlPApWiMkLQcetFMuKzn0oROM0+0SD29cc0jFJNlzBAJqXhuBQwdqgmldwQO1HRHlIIwQftUJIxUnUbYoN5vB4qTcQk7D+tBPAfSvpT10RriSfXFDrbzxn6Gj3E5oZxG+wFIyiYIUAehrugeg4p4o7/liuFBGQR+tIGxjATvpxTrew2+tJWM8etea55pgMfR6GpGKONt6j28FSdu9SkVOAn0prEDW9hxTiUjI4pkHA3pQdAIpkCg+PgYosYxztUYl8J4NFNP605zVUI1Y64Bg4FASEjOMA0YrOMjmhnhnamoRkY6OTv8ASg3edu1HyBpH9aj17nBOx7UKJNnESFtjOTRjV0KByaAKB9KZdyEnBx9KSSBbTJr9s4G7mPvTjN/0K/F+tVF6QpGRkge9AOXJbavxe1SumUs1CP1Fv/ece9Hx+pNJ2XWTM3wjbUfzo5i+HnxP1qidhRr8fqbIHn29c81JsdSDYeJ+ZrGmL+Rj95UnF6i4w4d+acNmxsdQnnxNqkmL2tzhzH3rIo3USTjz7mpiJ1G4FABJWPY8UGZWagi8y0p3XnA5SamLVP8AmUlLhOeck1nVuuq5AGFqScj8Qq3WpctOg+IjHekY62XiOpttGrORSZsxstEbjb0qMSt8Nbq29qbkSstEFece29DsdKis3+QVuFIJ9qe6HUyzfbe9MT4jDMhKlpIA8ud+Qf6UFcXmy/kDO+9SNrlqQ4hbSPMkhQPYH29Kr4ETdn111FMZnpZkW9vxWPBGhWlRBBGCNsbfbFfOHxDRJXc53yqori5BObfcWgtBAHm052JJAICsj0PFaR0d1RcJyQuc04hRb0KfLocaUSPQ7j7EVS+tbdElKkSLvDamrJUhEhl3C0JwcKUkHJx7b+uRXz3qk1kPTwUkfJPVxlfOmcuTeEtukeLD8VxpDDZ/8MDBSnHZOwxxR56Tbk2ULfbkQoMyW1CTKU0VBh1O6VOHY6FkI3O2TnO+S/11AYTJebi3fwB4nmW7PUI4QU5I0aScHnAHc7V9Ff2e7ja5fSzVn61di3q0lpESLe4SUrVGSrIRHltnCVpJyEuYOcYOFcdOKssbYJyeMqvwv+A/Ubz7cyzXJvWhLrc2K44W9IwCUjbWkoWEnGAQFJUnYEVqzHw/6HDrF9+IfTUyLMjlXz7iY6CWXVp0F9Wj+8ZUAQVo1aScqAyTWgN9Ep6alRrp03dPJDSGil5oqdCU7oSrA8Q6dW2dQ0kjVgmjHuobfMitqaLSZDhLeygpCXlHAxjUCgkgZHGpOdqqmsfRCUpZHbArp8C7c5YU/sKUy8gNp+TdjyXI7iQOFJcRqyN8KSUqSQd0kZSceudn6nyVotameobKpzwCuH4SpqPxo+XfZUNMhC0kp0OFt0OKSEoV+6Giq+McbpiLLhTGmIKmFN6mUrU0FLIBSpLRykJVpyFIOnZQIBFZT131w7cb0i5WCdc0N3GIz48PUChXjlQSrSeHG1JxqGywAlWab3l0JHFK9g9/+Mj4t8KPZbbGtvU8WM3cYTTSVGOtAcUt9nPOCC64lCwClQWBvqTSIt6gWa4CeyxJZiXqIXILTiMMxVEaHmQc5yl0tHHPhuJIylWkTUu2wOv0QplzgMSr2xKLaAw34apqQyA7GcUTpDivOppflCidCjqwo1XpS5pulwHQNwaWtxmLHTCcQ0EkzWGwllK84CVOeCGV9xqSc+WpWpaXX/srXEiC1Ksyptrix5Jt0SE5HhPlzLbkh9vxWQAANspCf+ZxQ7VfbAll3pRHTabu5ZxYepLY9dn3HglpLLTEf5hIP8KFth0HkDITwaj+mJjd56JtNwuTjwbtt0ffQVM4SjS6/KBcSd8NOtJGQdg4B2GT+r7VZrUmHZbHbXlo6xZixH4hT4oWUu+Ipx07eUNLQDwMPEk+UVkqkDsxf483uPd+kEfKTS5c5t5nSJwcV5kxw1FcQCefM4tRT2wnbgiqB8GenB1t8dI16vEdbVlvN0kx3Ft74QpP7xOAQQkoUpGr1OKB+LMXqVvqudYIMd2U3OUzMjuMoUUOxSykNpB/l0oJHOTxW0/Broi3dEWjo7qS8rXCmxLg9FuDzKApbiG1qc2GMqQpKo++OVD1Iqrahj/cC+qRb/ibNt11jXa+2c+GxG6fuyEeC/5FuvOxGQ6ARyhsaAnlXhqxVU69+FfT9r+J3UfULk0hN7SqAtjA2lXGMp4lSgTpSEvspAG+oYB7Ue9cEdQ9N2npl5huG7PnNxPlC34enGlUxCyOClTLmDncubCqP0J1Pcevfi/crtMbkNKmTmZYCTn5KUmNKEdCQdlAEMj20jG9TjaX7DMh4LLk/pW19DXwCE/Lkm6OB/B+bahtO+HtjIThTON/Ngn6OfCXoS5vXp1iImSUupkX1iU0+EpERtwNNujOfMpK5CcYP4kntU70X0u913f7ldLwr9mzLVY5Ly5ukOfLtNIYS2jSTutZaIO+AtxX0rVugYNvk3e1zbYYrFvudi/2eS6jyqjR0SLiWm0gZPiFUdGpQGP3gBI5p8k+KaQsYtuykfHnrjp3qzqC69GRrW3BjwIi7bHaS5huW1kOuBOjbzKJRsNikHtXzv0h+yYnW9quLrIcgy4BUW0rKA3KYUEKRnAIypCTxt4o+tXj+0T0lfBeE9W2W3yZTEVQtkBbCcF8NPNMJVtguKUVLyRuVb8Vm3QXU0Nq+w7jMCJEd2UiQ8HFEpZfJIylI7KbSnV7hJ7VVR+jQl06Z94XLqm0Wt633x2alqTNjRnZDRWhbrbqGVvaVgnCXSI6wQSSnXVUvd/ekRrB1L1Hb5Yag3Fp1UOIkk/MYK0NIXjdICGik8J8M4B3FR3T7dm64kuOQnUwm75fkzGo406mnQtKWlJwcgrcShOOCgjnUad6nvF6VaErgSWm5qGGnPCitqEZ9KpEiGwQE5/iIUv054rn4Jj20qIPqCyux7RMiSG0P3BAkXaPHZWoMnQpjSjGM7lek6vMTsNsCpJq4of6Ts8/rDpt6fIvVxVcI1pQsBGpt0NNF0KPkSlwvPrKsgYSV5UTpLt8a5ybrGcEZTc0TmWreylwhDDjIaTIlOqVnURgvpySMBSv4Rmd6didFRmpXU90hNSItuU5bun7XJcUUORW4gcbDpUQFlKE+ItPcuNkgkkU8PpjfkSW3RezdrH0xIjw7ZHfu3WU62tOg+dMWzWhICjpP/CbUEk5/vHSUkkJ3FahdDPfEZ6RY4N0Yj2aJEUt9aGPDaSHiVLdd/AlJcTk6CpThbQgLUBqBh7X1VPmCE/1JEmMquy3bjffEUdUuQoowMjBLLbXiq08aW0etTXTnxntNks0kyrSH7XEZckMm5KXma8VpHkYSN0gaQfKApWAOKylWkBporPVPwhhXK33I9KR5LdvhBbsy7uNSFAaU/3cbQsk+RI2KQgFaQEEDzYLabP8ROl5SoLcxYflyGkG3IYYuCfCOVKU9pGjzDSNPJJ3zX2Gnq3rj4twoXTMK4i1wpFtakTZq46SXHHTq0NxgpQW0jUAS6dK1jKvEwABJXQXTcJqfZei+qP2Wy28FT5fyKH5Dz6san5DupBcdUpQWlskNtpIPhnbLcoyQE5RPlm/9QdNrlyn7p0UzawtSmzcrcxKtym0ocLaiQ2+40oFQOBoSdtqRB+IFpg2WTD6dvvUF/s0ZaiW762+hmNsB4iXC4mQyeMLbWlOcAjOBWk/Gr4dq6nuFq6BshRC6etEREy6TihJcZQf7vK0hOHC2lawkJySsHScmsxv3R1wRYBfGkXG22OAtMe0QlLWkOLOSlGBv+HKlKXkkqycZFaKSQ8W26K+jqe7I8R639W3mXblHX+zLrIW6UqJyfDe2V9F+VXrq7x6rlM/aTd2TKemw05QUSSFOMKI2SsDZQ4wobK9AcilTYci4OMmWfD1NalhCQkg5wMHAyCdql2ejZbceG+6gticoNNgKBW4gjJOjOSNueM4qE7fZ6EXBIs3RvUkTqJ2TbbjYBJefjFJXrUsgtkLQCDnI8pwBuNwCOK+svh2u7Tvh4lTcJIcACI+qOFAg7JxndKR3yQRt3rGfhZ8OIsWcHGrUfBiNB1QRKR5zj/iudh/hSd+K+oeiUuvwWHpMpptppSUobRsn8I7bbehwAMVzwtzpEvUTXEoH9pnqRluz2HppxbEqQywnxChJQ8jCQCok5QoHHGM+9fNK1OeIThe/wDN6Vrn9oC9pndbPxnHPmFRk6EqKAkpRyE5AGfr71kh8RxWcEAY/rVpuzy5xTdBlvY1rBUKutkiDKdge9Vm0ME4JG/+dXmytbp9dqg0aEaLTaISQE7Z9KudrgAY25AqBszI8uU81eLRFGxI5oRhsslQfAtydP4fbipVu2jA8lFwIycDYb1NNRElI2q6h5NRWl20Afh29MUDIgjc6cVcnoicbCouXFSM7dqZxCUafBGk+WqneIQwohPatGuLAANU68tABWftSNBoyi/w91EpHOaze/RhlZ9/Stb6haHn/wBKzK/oAKsbHvWUbJSiZxPRoWokGgFEZ35qSu+EuKAPJ5zUQteCRms0c01s4s+gpOdhn8q4pQpCl8Y9O9TZzyQ6PX0p5o9u3uKYQc9gKfaHff60rJMLSQMD8qKacxuMHag0Dfmn2gRgjbbNRkgWRboB/wAqDdT3x9aPdTjORQjyQRzX0TPViyPcSd80wpAznH3ox1PYYpkpGTtzSNFIsFUnfcZpCkbcUStHP5/SkFG2TtzSDNgikZ9aSlsg54ovQNq4UbiiBnGGVKVnG59qlozKgPtQkNIJ71Ls4KQOcUyQqGi2rB2NMuJOedqOP6GmVoBJ2p1EIy0cjJIznFHxVDjNAgBJ39eBT7LiQoEb/WqR7A4kicgbnehnsg5zj3p4eYZpl4jBGePWqpCOPwR8nODUc6TzUg+ScgUEttRO1BnPOI0FAZB/rTTuO1OLQobjmmlBWNzgikbFWiLmIO4HFQctsgnG1WWQxqBOnioiXGIztxUZUUTRBlSh6ivJlOJHJ+lOus4JIFMLbIG4NBSDY8m5KTyTRbF4IP4iPvUOtAO1ICFpORVLNsukC7BZGVce9WyyztbgHpWVxVuoOAcZqx2Z+Z4qAg5O3FbYUb90y9Gd06/IfUirxES2rdtYIHpWZ9HRJRZSt14lP8ver7BZcZWngA9xSWVS0WpsJ8HJWMCoO6yy3nQo7+9SLiktRtRV+ZqqXOejxgNfJ/OmjsaVDzcMvZdJJPoauXS9mROSlCEAOYx61X7CESCEuDbncVpnTEFptaXWwlOOTTt0IuyUY6OuNrgN3x1fhJAUEqS574wUq257iqL168JdpVKjtEvQF63ShwoJB5AOnuD+HcV9AyplugdEi3rcSlLiVDUgLbW4e34hufoK+beq+m58xmQ+FOBKSp1CHWtDiscHJIQobdj+RrxvUw/8tnbilcaPn26vS9QkCW0ht9Y8NxtsPILBUSA+yvZWCQDgEjORV7+BkS8R7om62ZCbO8+XY0qLb0rmWu7sKz4jK2UqUUqOCQjdQwCEjG0rbfhhHXDem3CxyXY5IeUYryFhlWfMU5Gvf0IyCNjVx6f6B+FfUlufQiXOauS0LdBabbYkvNJIOQsaUOKQoZ8yQoEA5B3LqcY2Gm9F6626ovSOl5z/AEJfIyZNthhTtuujcjCIpGHGshBcbKPxNrBJSBvttWGROofiIpDiI7rsiBP0TG3G3G5jDpQoYVqbJXp5BUncHIIGcGc6huXUVzVCa6jvBdm2p1K7X1Iy2pMxDaN2/GzqWlJGdQy4g5VjymlW65NeA50/fpFujBVz1RVIHhRm3HEpOpC2yUtK1KQPEbUEKDiVfzJo8kkCg2fcOp37lADYYfs04IfctU5oyENsKI8YtuZykoUVZAIykg+xcktW5HUFvmsvSY7Up1y3vw3VKWlp3KNPguYykKBcUkE7qTpBJ2oG5dZXC3IetYcRPehtGQgqkJVLaSUkPJWhISXT5QCoDIIB7mg7rco98RKgsSPEek2xc5BDmTqMQOpynnR4gC0KwD+Ic8yp+R7+ACeL+u6OTIrqfkHpzpdCCR4alLLYacXgeU74HO4zuKtV86bmdTMvMW6RMPU6Vssz20r0qvcZppTvzrXq8jCWXsHKm0h0ZKV1GW+LI6ggfIX9LkZxJLLT6n9KZLLpS94DuR5igJOlweZOnPmB2k5F6t1hNlh32Pc4l56SZRPZkMoK3l6y62l/KeySrIOSMlI3G4aMuLrtflCtNo71PClXiRY5DuXbD1LFg2rqCHrQw5aLi846ZC3EfwKdWwlQI2yXQRsBUenrCW50v1F8QFQ327har8ppuE5HGhqHJaSxJ8N0jASXylxJOxKce9WxAR1LbZ92vl2disX6MGr03qQ23bi5IUiFcmCT5E/NMlxO5CMvNnyaKqPx5fulgPWDMm0SodovbDDrcQp/dNTUf+8pKdw2C6lwehOgAjauiS5KyK7oy699RtuX+/TLbof/AGFbYUN9Ss6keDqPipA2GpQ0kjbJSceat06ZubXVsqPbpaFKty77AS1LISh+OXmGy4lIxghK2UIGwyCCfWviz4TXxs/EoQLpFdlRbs29b5bLa0gvx3E7jzEZUkpQtGDkKQMZzX2v8NGLN8PrPZ5Ti/mmyv599wo8+F5WHEDc+UtLQrOwLiON6TNj4seE7RXOuGG+kPhlaOs4jC35bt3CX4gRqLa23XcJGeFLcSgrxvh1B4IzUrOG7RbLHGsbDcS9ft927XHxEEPOFct9mIlSufwvq4xhLRV2rVLLFuHWLsWx3iC1Ptbtwm3RTbTml1qQWkuIcCjtpQhlChjkrbGCc1n/AMX1W6wdUzesGfl9TEaGZcdK1ZbKVJY1qzsRkPccHWTjGKEHFoLbuitfDe3XFjon4m9SdNdTMqvi46XIjxV+7cQ+8t95tDfAGVNowRsdWDXen/iDcbReURLbObctUC+ORWXVpCUrDchoOOKA28LxVrPuVlI5NI6b6Mm9GdT2jpdm4qdW5CdizEtp8JhwBKUpUoYJ1lKQAf8A5nfep7ob4dotvUzPUt4Sp62NwZkaDbFIBb8CFGWVvrUBkK+ZcUoBRydJJ7Uz4yTb89CpuOi29Yqj9S23p1YZ+UYmXhyEG08qejzFqwCd0nEfI7edWfWsNlfCCF/s26LJaPmpTUtEljwVhbq33HpOmOopx51MsNEJ4AcB4IJ257olV6tsGKue+7Jt7a7g7IGtK23lOZdcCV7N51PD03A2yTTPxguN36VYTPtHTLFs6asSnlMeCU+JKV4SWA8/jc6n05JOSfKnACACIya1ELSe2VL4ICZ1pH6Zi21xuA4LreTcGmGPDdeYZZYWNDh2HhlICT/w1kHvVxZhwIbFhvMePMbi4ExxDydaWbf8y46khROFoSQpwask+OgbEVFfCUdNdPiCf204tannbeXY60+EVSBIcfCxkEgITrX/ABaghI3GCda+m53WHSTtpnTblb5HVNxfnPPym1Mu2/p+KdCm22sDwQt1ppltsbqw2MDO2W5V4/8AostLYu3zL/Z0z7wpwN3efMch2EyCRHaiBCVvTFpO/hNx20oyrGQ0/wAmq1db/abvJt/Tthky3mJUNlNkYec8TLandZdXnA1ZBdcKjsSscAAWj4gW+Tdods6dsbqGG7qj5ee6l8rECxQloaDZd/Fl0sOKU5slSW3VAEOb1/8AZ/7Ottmlts4vFwhfJIRk+MqO5IcUlKRgkeIkLOrGdKB/NVG1QiXk5eeqG5j0fpu2mShywNFx4kYclsurWs4wD/eK0qV/CGylJpldhuk2OyiRcvlrc8hM1MNpoBLrTSkAqWr8WFLX5Eq8udyMJAovo5hF9mXnp0ttR7pcEzXZcpGnUlpkazrIxpBJwE9y2c7JwIhFlmPQZ9pXc3ZUqROZSgNed0sR1J/dJQN8qWW05PlTkDP4jU090h60WHovq34hs2l026+SFrvKRMeQyt0rlAkhhpSlAFtttCskJ5H8oJNal02/ZLdAisz+p2Li+2ymSEtEKBd0qLjxSjJSkkHCnMZAyOAqs8ehWy0WQWC3zSWZjMqQu5Ox3AiUpBDRUPMj/d28hKAf7zSk4AIzBz7iuwRG7G0ic6VOiKiBPc0OOJZCSVBhGop3TskjI7qHBKjyE0j6T6dYtsmO7bro3a3JSlia+ia7gl5QCkZbGdgnH49SzgE4GAK98b7RYJXT65Mou3JYi+HGQ22tiNGSDlbiGwQgb5/eukgY2OcJOefDb4uXWyeFNMaytGWVMx7UyylfjSCcLekP+ZwjI4SUg6Ve1EfG/wCN3U7Fv8S3ePd3piw0lSXDHgMPAbq/FleBnCQSP+XejH6XRuNu0fOnUD7Vquz0WCgzZU3AL0oKUUA8YLmknbcEhO44G1W/4cWC5OuftCa54863PFlEdC0tod08qz5lKIGxATsTzWZXW/Jk3sS5sp1+YwVpcKyBqcGe/AT5jsB24rXvhBJnXi4Rbm5GYkFlLjUdxpSykpJSFLVulIJwANR9ds1D1EnVI68dJb7PpL4ZQbvcG13e9qkn5pzxmYIZIwACEqcSo7IG+lG3GTU/Z7m6x1RJS7dD4YTlWtHhAK905IGfWofp67udP29+Y7b21upCkeK46WwpYHoDskHYbdqzFPxEiWNibd5LTRn3UqT4efEAUCcg5wcVLF1onPb30QnxEc+b6yur/wA0VpU+SheAcj025+veqqRqdCS4F43yRj868/LemvKeZSlAcUThIGnft9KYQ4EO7qSTn+HtV6OSW3ZZrQhIKRj61ebO2k6TjFUS0OaiCTV4sr34RvtzvU5RQ8UXq0AAp9KvNoUMAcVQrSseWrpanQnTvSrRVrRd7crYVNtEac1WbfIGBvmpxiQNPNWQlBTm4qLmYxij1vjB3qHuMgAK3omIS5LSAc8VS724khXvVhu0sJzk1SLzO1aiDxU29jVZUOoljCsGsv6hWAVg71oF+k6yr3rO75hQVvxyTTRoSRn13US6ogbe1QxBJJOcc1Y7nG1rUcZIqPMInhJFZxdHNONkSob7muaVEDAqVXAzuBimzBWnhJxUZEJRA2slW/6USgAbYpBaKDgjHbinEgkgDNTbOdoJZSVnNGNx84JO5pEZlQAAFSLDW2/61JsmVx0D270I4kgggEYo1zv6UMsZ5r6M9KIG4nkYzTBQftRi0Y+uKb0HO1IyqYKps+mKRoxRZQeQDSS2fTmkGTBNHbn7VwoOOOaK8MkjAHvXkMqJxisCzsZokjO2edqkW0AJGO9JjRsJG29FFsjt7U8QDZAONuNqYXv2P2ohQ5BplxB7CqWPFbBFqwcDavIUrnevOJIya43n3z2opl1HRIsOlSN+fQUl05G9NRyUqIA5opaM42NXWyfFAC29R4r3y2QOP+1FhoDGNtqeQyCOO1aiM4EYYerbTn/Ski3kbgbc1OIjDuilKjjGNOO9TkrOSSorT0DYgo/71DzIJ32q5vMbny/pUTOieiahJATKLLhaVHA2qOebA2xVumRArOE1CS4mnOE1PopZBKbINJAAIChRbrJBICaGU2odqdSCmPx0IJydqslndbadSo4IG9VqOFA75qZt5PipHrtxTJjp0bh0R1PB0pYe0g42J/1rSY0lqSlJabByOR2rEuj4mHkKQByM5FbVZhqjJygZA3xSvsrH7j8+UG2S2Tkgd6rCoS5jqlAZANS96WpIOQU9q5YY6XCdQJB9BTwZpEt05bH1IBaIzsCMEHFXu2QLszJjNW6arxXiEFv5nTseeU/50npa2J8PASEZ3Ga7dXpFtkoWp1hDaSMHC8k/ai9miap1ReV2Tp2NGuTdylx0MjUkuBxORudRAUCAeM1jF36ghXKc42hhxKCCFqWjIGR+FYRlIO/4tsetXrqq9GfYI85u3qdiBIQXWHHErQcblQGUn3zVGgyrfcH02piWhEhtXkdKygKHOknHnyOxH0NeT6htzo68SSVkALTLtstsQ7oZhaUkqioQEvNtY/EnwyCQP5gdjjPpVsm9QXZFocVL6UF5Zitn5ZciK0mU0sgedt9Azx33PA1CnWrPGDgi2+4yGHS4XUxYzQcWB/ErUeFdwQTttQXUL6ULbYtPUK4VyQoPuO+AApOdgopbSSPNjgq3OwzUErZa1RVOnoFustj/AG3dZLRtU6QUNonPJju2yQ7ylt7V4S21LwSkOJKVHUnB1JKbfH6el25+3XG0uQbsoqjJb8RC2FLPmUyW3RgFQwUlKijKk7BKkqq09W9NhfRz7l/cukyUoB5cpEZ1LjyMZKXgynSSDjC3ceXcLBGDRJfVSLz0q9Z730veXYbAbkQ5yoiJMttIyVsuKCcutYKgDspJ5wCTV0r2iRCwrOm3XaVfZ9vusy1lKULhuteG+wknT87HXuoAaSHEpyUEHUNJBq22npW5Iuy5PTL0W4KhR1yrXIlMpSFoSAFsh4DCkbjUgjTg7YPNh6Ihu3foWPCt99hXlmHIUxEW9ES2+23oKmm3HkHUUqOUoc099CgnmpKJ0mG0JldGxoxj3NSvnOm5cgrW1IIOvwygFKkqTqwUYWlQ4xqSQ3fRlrso8XqOxXqO9BuKVyZVpu0Z42twJblGOtLiSwFfgLqMlKN0lSm06VHWkVP2aNcT1vZkJQx1A3YYSFw7iECOqbDfLivBcSQNbalJIOUgtPBGwyQY+Yz0lFmRrn8xBVc4khbrqyy5HlZCQn94CCVLToGUgqStJKgjAotUdDnW5xZ3Ys9IMm3PRFrbTJSogqSqMSE6laFoUpBDZIbIOSKrHHroRz2VTpe8yY/W0GxmBKkWN27O2IwS0pta4DzLi/lsOEkLSQVBJ3S4WyCcb2r4xXJmVAunjXZsv2toIKksBSXEvMoQ1IAUOHEeA7gHGUuADKQKuXUHTEiTY53UNoeiR5Ldyi3GM247laHQrWtjcEocSS4UOE/hAbWEqSTXz7P+IJS/GbvdyiuoiPosdwLuMvW+UtRZecSMgKYkEJUOAnQUkYqqg5fST5LtFR6G+E3T1zuwtsVp1M5Uhv5Wa0UuLDqGg6yseg8VAGU5GAoY/eCrT8ZLzfuk5HTNwEFuEPFbgyo6X8eQx9bxA/gb0OLVg+g9RVy+B/TEO73iTJW45HFpRqCFoK1xZTbg8Vgkkk6ShScn+FXpjFn+OfQk3qnovqKVbU6rpLeYgtMBv8AMNMZx5O3dRIUTwlDh7VH3HKST8FuKXQP/AGfPixAl2i03W4SFBFs6TYaEguBLrklCxHkqOdtm0he+/HfFB/GH4cyb1AucmI2EQ7r0yEmKQlLb0pNwedjNIPIGzil77khPCq+dulL678POqI1iauqF21ADTk9xrXGWXClezfc+RCgnOdJOea+mUWa83GB1NK8OMuJDgPyrfLnPFvwFvNthQ3UR+7QtY1H/AIni4xtTSTT0JpGPC4z7f19YrrOQ4qS06JUkssKCpbBcS2hRVglAVpeVnbCSR3rWet73crH0THfirYfiWuFJW8tDWHnZc2Y414azvpTqZdVgDfwhnYjET0Auzz3JlgmXNlN/nPtQw48gJUloFKlrKfTRGWN8DU4rA3NZ98UvjJdOg4F86ZtjSIzk+Qy9aXWY3hpQpbzhS6TjBSlvZA7Fal4yc0IJz0vBpNIn7B1/Lg9ZS7Bc3pIZuARAYS44XCFNNrePjaTkOpeX4imichWnWDpIEN/aW6kfa6f6at0GSW5V7hR5DDJeLbcKIjzLlvg5041eG3q3JD7m5Ug1aPgR8FGv9jemeq7uEXJhu5zJkpDwUC5IQsleN8nISSQdihauc0L8ePh9Bs9rcs82eXGFTHb5LddX4jj7Dcb/AHOItwgHCd1+EnYeQAHGQ6cISTFVy0Vb4IQmZrsG5XMqdgMTHvkoim1suODwAhDiFE4QfEUlRWo5w2s4ynbe+oFzbv1dFsqrwhiXAuLl46legqKkwIUeOlEW3NqV5Q6lt1hJX+ESHyoIKkApwaz9bK6B6TldVwYvz93VHTbrHEcSlTPzpQXX3VDGXPDU4lR2AKlBA8oGrYLN01I6Q6UsHw9ZnxEdQuD5i6y5J8Varm+VKLRSjd9xCFvnSVYKwtxZShKQowi65PyabV8UVXp2fBkXLqade4jblpukKKHIEdRjtuanEsxIDa1ALCVNxlJO4LcUOLUdaxQl467U0/cupESIU3qCTdG7E2404dMRLflfeCQNtKWcJA/CCRnAFTFrsVguvXvT1rndPPS7Q1HS7ZGJcjS8qM2jUu6SVeXS4+5qLaCCfDb1EEFGlHxRs9qud+jx4C7Db2HpiYwiRS6pa4jagQ2GkoCGW9S3HH1eIFOLOlbmykgzVvf7gj9gGS1C6PW1070wtxFzntOt3K4OsH90yspU0lBXjC1J0nSonAU3q/FpLFv6F6x6emIXdJIjNJQ4mU05q8NDSHE5j6laS4StYyMFS1asgAEGY6ugeN1Fb7o3dI37ViyfGs8SM60lMRtJGJDy86Askpd1KOE+UgkAVMv9Lx57sLqCBcm+qPlH3w5PCHlQPEbSrDUZKiFupC8anjuoa9ISCVCT10Mt6IW8dbQenHrE9IjJevClJeEdUcpfbi+IVthYIIbOANKdzqK1AJCEYjblb2+oLRL6rL0mWqcosOfLMOIQsKVqW2FNjUoAgBSWzlWo5UN1VAo6f6vufV0u8zpTt7kSZCzOk3E/JNMYHm8ZSvK02lO6Y6cKCQkaU5GdHhdTdT9SvtdI2edIbYZaU6ln5NZduGknCgGgoNR0/iIJ8xGXFnASKca6Ef3MZtTN6sjy37f0vdJb0lwhuKyAgu76SpYQPIhKQEpQFaUgHckk1rF96Vul/wCl3mWra9Ab8LPhw4qi4+5gaW0E+dwZ4OtCCQSdgKV1gxZ+mLVBm9K9Stu399P+6uTrqwmO0g7KfCEpGScgJQkZJ5J3ozpzqWc1fWun0TbeqXJYU5MlyoUuS802N1qK0jQnOw0pGw2JSK07/Ugxa8mBXL4F39mG7JIiwXm3XFqFwuMcuI2yQ4EKIyRnypCjT/Rhds96bFkugLLenDjDS/D5BP4tOAN98dhitq6k6diXy4MToiWFNK8jnjxTHSyvOQtWxAON9O/PrtUdO6I1Nibb476wpW77ySwiQc4Og4JVgcfhFRc+aplnov8AZJ797T8kp5clsuHU66wGyy3sRqcVurPv67VDfE7opi39PMSLeGnPBUQChScFJOedtt6J6Et12cWwIjc1yBH8qm1Q3Fhogk+VxStx7j7Vb/ixBs116BemxFOyVQQladcZYAI5BGrY/UYo48ddEZy3s+XpV0UyCyt4IxsUp4/SmI1xAX5RgcE55+teuJDqC44sJSd9IaHO/Heofx1IdCQVAbZBotETR7JN16cKP51fbPISdJz7c1j9lm6VDzVoFknk6RqHrU2mUizVLXKGEjP1zVwtczBAydqzK1zs6Tn9atluuOkDJ3qfkuqNJgzMJByKm2Jw0jzVQoNx2yVVMsXIBO6qdSEaLS7PAT+KoS43AAbmgH7pgYzULPueoKGR+dFyAkMXefqB8361TLtJyTvUncp+rJB/WqvcZOrODtSJNhbRXru9q1b852NU66IKyoAbVa55KiRjPqKgZUcuKPvtV0iTZUn4hUs7H8q4i2ZPGR2NWRNsUtR8u3vRbdpOPw0zWibVlTNo8uyf0pl20qx+Dmr0i0En8P6ZrjtkwM6MZrnmqEcEZw/ajv5TQiLesOjY7bcVoMmz6cgI3FRTls0uZKAO/wBqhJaOeUL6IiPCIA8ufejmYYSMqRn61NRbeCgeXeixbCoFWkZrmbZyuOzJ1jPNNrT2IolSN8Yz/pSS1nO1fSOR3pgRbyod/XNe8FOOKNDGRxzShGAHH61NzQ6ZHFivFnKc6du1SSYoIyRilJiZOTjHrSPIhuRFiOc8Z37083EORxvUmmEOSKfbhD+Tvzih7iBYIxHCd8YI7UtbOeB3qTRFwnYbk80hccDG1OpmT2RC2u+KZWycbVLKjhX502uMBk4xVOSL42QjjI3OKFKSkjbA9am3Y49Kj345yCBgE1lI6V0ejI1HJzRykgDVnnvTMVrSDkU+oE7YxV4TFasbCMnYd6Kaayc4pthrc7dqkGWMe9PzJyWhKUBI3/KklvVyM+uaK8EEbD327V4MnGw5qbkcU4oj3WM8Db0qOkRidiKsJYJ2oV6JkcGoymQeiqSIA3JqHlwOcp/Wro9EA5HtUbIghW+BvUXM1lBlQNz5fehFQsncVdZVsSc+Xf6VHO2zGfLSPJQVIrjcLJqUt0VIcTn13okQBnGkbetGQ4C/FSlAzv6U0ZjqReOknHGnEgJSsehANa9ZyDESdIRtwDms26MgPJWjUlJTjuOK05ptEWONSNiOQaqnZ0wIy+uNlOBnOaK6RkISspVvUNd1eMohvP3PJqd6ItxekID+yc71VBkaLZXXH30+GMJPPai+oRHMNSTgL04CiSNP3qUhtWyC2lSVpCsCqz1dJDzSiypOB+I529tufyFDd7NE90suwogyWJjr/juJw2tLyFt5GeVAgp+4pvAiPBlBfbXnLbrD6Egg+p4IJr3RM5Uh5xm1W2GEoQPFkeH4qCr3LgGc77Y+9LvluhN3AT1RFNPp82pnQtpXfABPlG2+N/SvL9Wvqtndhug9yPfLxcGUyyGbkVeKxIYUpUpKcficWEnxUD1GcZwasLkK6vwnW742qLNZcXl35RxwSUKSNwpOnZQ2ITvtwahenLq11XcY6L5IUhmKslr5dTSSFjGUgKLh3B7pxWhXyGLfGUYNpvbjOjwiWbUiQ08SAUh0JwE+gV7cHcVLGnIM2o6MgunW8q1WqTJs1mu0RZ1swnUGQ6zqHlJTkpDQUOApCcYPIOKqfULlr6gdaf6lvaYM+4OI8d0MONuR1fw5+WCHm1AYAUptSVgnvWnTZvWFsdbWxcen4KZGNVtdtjCZC0DhC0IByAT+NAztz2qGT0tZrBcDcWbzGjRzoUWmYbsV1paj/BreHiZI9SP6V0foJ1yZX+mrPY4TjsSHfn0RUhTQltsImLSFH+9HhJS4tKjkltxG3IUlVM9b9A2e1R2orHWtuduNxaEyM0+zJ1OLSQCtlzUMLwAS2tKVAq2JwK1aJeprT0WX0tZnNT7elMxT0WE4hROPOhOleSQPNuo5/ipUhi820tTeu7mAwpw+R1LNzjNJ2KG1sLQiQcqAJUk4BHbFCDbfQX9z5n6st8u+dFKldPX5q73G1PNyXRDlFl5hIKdWW3VYcbV5MDUCCSUgjUKu/wAK+s7H1fan4PUV2nwre6CpCbo84IsB/OpD8V/C1w1ZJ8yD4RGpK28DNXe/dPfD1Lzi/mLUq1BDri0Lt6nWE60nWtkhSlJwc6m1heBuEgkGsXs1ln9D9Uwp0BuHb7XKlqbgvhLsyyTCocpdb/esFSMEpwUnTnSnBI6IScVsm0pdH0ZerM3Bh3e5wmzlxCzf4L7aW30E6XG5bCkKLbqPLqCk5StOeMED4f8Ajgu2SbnKTHhoS28wtEoIcOlfijZaPRvWhCwkkkKKtyMGvvCApvqDpe42iHc49muVpdLighnSmA4T+8HhqA1RH8kg/hCtQGCQT8Mf2gbY9/tdJDcVlmXGY8GWhlettTaNwtAxknCtPHv9KprkmiSTcWmfRH9liJOuPS9r6pnS2FSJ14RIu7HhkuLSqOWVO6u4c1tOK7AkkZzgfSt3t711sMW0W1tRfffNuQpTaSlIVrDjiux04cJGe4zzWIf2VOm5sHpm0WOU2j5mKylH7hQT4jL2FtJeBGVJ1DTqA2AT2Nb88t1RuEohTMRpcjwdOEoc8VwpCsbnOSVHsCrJzkVxRlylJ/cvkXR+b/x6tdqhXawXWxvKebt0mSlqKEqQy4w0UtyJri1ADSpYQw2ABnwiEg7VfI3xtUx8OxZrguctyalp6WhqOFl11h0EMaxskKW2jxFbkICUjdeKa/tV2C2QOnvCdiy323bjFSgtJV8xKdS2XF+IpQ/dkuOLKUkZShK1acrJqn9AWNn/ANmE5iY2+1LkpS61Kd3bYMhzS02OM+GnUtS/4lqx/BVlKPFfuK1UiO6A6kfuEa4XOdPlMX51SXhIjJ8RSEgkLSOyiVrQnVjbzYFQVzmT/j18b4dqEZqBBs6YjCoshxADbDQabdX2CtI06RyQAOTilfBOF4NyuMeC1MmMrYcaggrShaktoW4UKzsjOlKh66gOc1Lf2dvh1c5/xfnx5MN6TBKJ9snBwgkJ3GtRTuoJLW4TvnTuM07ksfLw/wDIsYudH3f8M+mYVm+HkBy2sraeiKly48NTqQ225JUUeCT6I0FOo7YPvWM/2kYcZrpxV/baMq4XC4patrK1FCYyFNBL2PKc4CELW6sgIS6BjUdNfTkOwtN9Px4SGmoiIr0eKhtIL2ptsanSsDfc5G++++TknHv7W8K4r6RnMWhlDTLjCkoLraAhtbqlqcczj8RBOhI0gHSpRwg1B0mrGg7ej5E6FvUC9/Ee1P2uCBZumUIgdMw0p3kSidXzKwskqKnCuQor7+Ek7Db7F6V6TYt7huUiOu4T2m1twXJj7j7aEuEeNJkLGklx5I3QhP8Ad+EgYQpQPxP8BZEKH1LHkvqdEeKgrbZwFF9WkaUE7YRsCvvpGBuoGv0Q6N6b+IaLNKciN21MuW1hLr0pSVtvOjKnFuFBWgk/wISV+XAxyOue5qiPUTK7t0cz1vf1z5yY9nshlraXbhNLcy/TAQ478w6lJcS0jIKmkgryEhQbCEgVbqC99EwZ0npG3QIyIoU5mI3chlLTaRiK2wjDbLQAGpbjn8aiQtSjQv8AaKnvwIjXScj4ustSEtGKi1stMI0eYApKGCt0IySQ0dOcgrUo5NUPpP4SQrVeIy2Os3JM2S23HbfZsq1NNuFJUlLKvE0rcABIIBSnknUBQerbHSs0Dp53qG7yrpNg/B+xoixgPAX+yH1QI4P7xxcmQtJ1jUQo6glSjpCcJ5nOp4vUFzs8UOT1xGIKEoMoyI7a5JX5lNR2QpCQVHYJB4GTjGKfY6MceTbbJMunT16mtJJjs3e4B3JxhZU2454QeOEkKdK86cBJwALnEi9U2VlmVfn4d2ccyxCbjvNfIMgnBU6Vp8FC9IAShK3Co5JwNqlL6vqN+nSMdPQ4iQI97lW61OvoK0R2pR/aKbck5KsMIHgreUeQgkgq3VnzVGsXC0vGbAuDfUE9uYgNzocCGq3N4QMNiQgFa0tjGfwOK52xV8+IdzcuT4sF3lOPIUA48zFYCvAbBOhlKmlJZSDjOUnPsOaZaX1ff2YbrDLtttDUpDf7QmJhQ5MJRSBhDhUkgHB51qJOxB3oxk+zNA3Qdtb6rva5NsuljUlmJoNvucJ2RFS9lIaUEakOpUnSdOQncfh9Ak2L4p2y9T/k3oLSJLmiXPejLis6s58FhqScpSNgNBHO6q0SD0/aY7KIfR9zflF9anJcmEl5C3FbAHxVpdUs/wCPUgdgKauvTi46tV3kQZktgIcjRX7lIW4yn+NJcU4sZII2xjb0O2crFqmQcdu9W92NZZIscN9IL8ifMmPPBJycq0g6EjPYFX1oSPZ4jkyRc5VxaTDwtKZ6FqbQ4rbJbStQJyfQfSu3GJEhSg7KkupnJTltgvNKSxzpS3oTp2zwBq35qjXS4TEXJbN/lJEtwEJM/CFLTnYaTnV7YqXFXoe3VWaB09crxGcWIcqL4DDmEhOdahv/AArSNB4O9XzqK4tufDO73i/Wt5DLeC1lvxC24RjVrSrGTnkgishsbwEtpbzKGkPLSkENBvT/AOY7j7VonxSnwYPRDdtt8Vp5yQkre8FtAcCeQUODIUR3Bp4OmK1Z8wXYTAkyWi2Q7ukoktnI+gOQfY1XnJEl0aXF8b88GpmTMcLiwpkrwd9bYT9OKh5SUKdKt0EnOkjI/OhJ2TCrY+ULBJq82WbgJOcA71n8T8e+5z61ZLbKUgJ8wB/OpMKZqNruAwDqFWmFcAMebYVmFuuBwnJwKtEC5AgHVyKk1ZRSNGh3TSANdSrV2OkEr3rPo1z482w96kW7qEpwFfahVDWW967DSfMah5l0G41eveoR+77airaoiVddRI1Z3opAciUm3EKJOftUPIk6870E5OKznOaQHCrff0q0UTbOPDX2ppFvU6cgfajoscvHg/ap+BawoA6c7be9VSE7ISPZjt5P0o9qzAHASatUW0D+Xf1qQbs52AR+lbsarKcizkZGg715VnwMaKvSbRt+EU25agAcJH5VOUbNRnMqyp3IR9/WoOXZileUpxjtWoyrUByn/wDNULNtIByAK5pRrRNxtlHYt6kD8O9EpjqCeKsBtwSTsB9qFkxwhRwNvpXHLTOXJCmfPpbPPFdDRzuKMSwfSnBF7bV7Ty2ZSA0s5OCOfenPA9t6ORG9cbU6I+2CKlKaGUiPEcZ3Bp5EbsE0cIxxxjen2Y224qLyDcgNEYHBwM8cU6mKNP4TvUgiNvwaeTHGMEd96X3AcrAUxk6eOKQqL3xkVKpj4yCM9q8qMD2yfrVFkNyINcbG+KHcYwMYqeci7HG2KGdjneqrKWhkIB6OdOQPvQDrBBwBirI9G8p23qNfYxnbFP7h2wlZHts6U8VxLfm2Bo4M7YriGDqyd/pVVk0Fs7GYyBgcUcGNI/DSozONsfnR7bGRkUyy2SkwNLJxgDauhg4yUmpJuNvvTiox/hBHB+tLLL4OWZFhjO+D6Ul2LkZxUymIrGrB9q6uIrG4BzzXPLIckirPRefLQT0XOfKfyq0PQjk7ZPGaDXCJJBG1QeUFlUfh54BoJyAdxpIq6G25308+1INoBGNI2qby2EojlvUk50n8qeiMoaWFlByPSriuypxuO/OKaFkRnBRz2xTRmx1dkr0vcmwgJSUEjGyhirPJnOuN48Pb24qr2m1LYf1IT5Tj71bUMIDABBBxk134naOqF+SJLi1r/eJ+m9XrozQSEkAE1U2WkPPBBSOavfTlvLAS6nsM10rodlsMB55GULPHFUnqxDrbbjYOpXcn61f4slGgh7JwM84zWfdcOZeJQG9P9P8AWhewr5O/Dws3B5UKfEaeCMFIVLcZOO3lSd8V2+RZ1qlrRb5LMxhOSsYWpxAJ3Aznf7UP0TdJEmSm2PP3kNlJSkxkhSWx9Ck/cAio3rhbdpnFbKJM9eAWlNrcQffVgaknncb1xerjvR14HotPQE1Fv6obm2XqBiQAnxpLS4wZIAHmGtJBKsZ3HA/hzWy3yZ0pcLO/LEMz4z40ueLE+cbjZOdS1oQFpTngrOx3zXyRE6ru9sfYvDjTyENE6FvStajn8SFEJCsZ7KFGM/2ioZjuOXVi2MlnLSiZMjxnjxnS2rP0OPoQDSY4y6SNNJ9m4tdOyklp+fb7TFiyA54V3ttuT4kVsAAZcDOVZ9z9zQ9w6al2ZhIlQLhOsqRlPzLTSWs/xLTrThvOobA5yRjHNZHE+PkS8WidFRalu2dspdUZE9bKBpzqAcVpxkkYSc5PINRyfj/Yup220KtyEyGCExIojsvFjB2WshASO+kDVk7nTzWnjn8Gi18m22Bnp22y2lsI6k8Z5avDMNDsiQhAwQ0XWiApHYBwLxnv21CHKmR7fEiWpCQ64PGaj3IvMSAoK/GotsA5TnsMn+YGsX6a+I9xkRnpEOferWllP96t5La3lAfiDTSCs535KQcc42qlz/7THwus3zAZVdupOo9YTIVcpCS222FZClFxfhuKJAIT+9AwCW1EYDYFy62DKmnTN16l6PuVzvabxeulblZitSii4R5C0JOE/gX4iFpcbycgLSoZOwG5p+d8MrRdYRvUGLGUtIWt6RbH/B8IAb6mVJIWdzsSR7AbV8Zdc/FzqfqC8KunTly6nk3DSAia5eX22T5shaWwGtODxpQlOw8uKDs/xV/tBSVvXmzWNq43C1KRJk3OOXG5xAXpTqcQpKXMqBA1IWVb5BHFePL6Uidcdtn2T0/fZMRCXorKLlCYb+SeQsIX4TZVupRQAUpVgE6co9tjXzX/AGgujrbb+tjcI9r+XiylNutXCO74iYyydwtIGFJOcfzDA54PrH/bpdkKbY+I/Rjkea0dDk6AnUpQznztlQycgbjT3wK0Lp3rT4ZfG2H+yIsph8qRpSy44oELJ2JGxQc4weAQnI3zXDnnk9M7knR1YYRy9PZt3w0jMpMeZ4sUlKEIZXGWltD6B4ayoIH8qkDSOPMR3zVuRe7XbIMdu+rcl3BxlLohtBS1ttq/BpQRtlwhOsjGVDgA4zjpPpt/pWAiG0Sl6MlmOwp4asOhadSz6axpGRsKLb6rLDLb8mXqFzlyPEcS+f3jYWlLfnwDpBdwUZ3KlYxXNhzxSGyYm2Zd8ZUJuTtugXOJcUxhICb9NUsLTGwAsto0jheCgKV51DzEYxjJbjbV3b4dXi/9Q2ZdnsTEpa0vOrKCNCS2ghIxuhpDgQgZOp853BI1+Tn4h2262CL1C6tt16Si4T0A/vVlsjAQk6sq3aS2ANyrJODml/GOV01dumrj03CnqkotEJl9tpnCm23QpLKWkYzqU2dWtYzjKxnOcdMWuSoRt1RmHwH6TcnypS4bj8OLfophJZlHU54hYUErQnuol0YJwAlI3NbX8GOjpvTXVz9vfhR44euEt6DPbdCvESVqW6QBtqSQEnV5cYwds1V/h5cI/RwtLl9hp/aVjZaisTUr1/NOePgMEcatBCkrGdgB23vvSPUE++dL2ia1EKJ9vbeTLZYaK3ZLBeKHg0f5sloaTjJK962TI5vQIw4o+l4jsaTNt0c+Gj5yK5JZebQUJ1qR4ak54RjY5yeDWR/2gbjDunSFwm3xmO7DMUmAzlSASgNqUtQIytZ0OJ25QtQyM1K2H4kRLC6uG18zKYeceQHZbaAooCs4GOCS4kccA81QPi0bl8Qo5i2qUpDkLxlMO6gEKDrRBQEqBUojcZIBAScAZTRlkTW2JCDjIyH+zv8ADWPN+J0S5zYyHg3mRkx9LpXgLL6kZIUnWryjYeQFWAMH6q69gXzqmfb+mrDHVIahJOUxXmFuDUCClb77ngsHBJ1oacVyEBP4qyPoK32X4f2JFsk3MhDTSXLi8taQp5XOgkA6tR8xz5QAnynIBn7z/aI/s/dCtOokP2q4TRkhpVyaSEOEHzLKjgq34CFED0/DVMeb3NMM4NPkiQV8HrF0S85cbrc7PZFpSjwJVwnmbLdSM5ZZaCXFKx2Sgbb8HcTnTUm/9Sx0rFnut1tzkReo3C1aIeUr2Syy2WtzndKtRICvrWM//wA3fwtHysmz9B9AdRX5GrwHJsh4SVqJ2abeU020gY4ACU7YG9Wyx/2qrf1U8g3Xou3/ADUF0n5dU2XKdaUr0KpHhb8JSCk+XAB4rpgoJVdHPKORu2rNPl9OdHtqcvSldJuTo7ZRCiJtrqG2EE5GYjLepSiezh2HJIFUC9/ERtOtjqgF2JHV4GA27HbIOFKStrA8Fo4UEnG+DgnOKvkT4nfDvq+DDHV9iRHjTEJegyJDyEHSASAB5m18E6Qsr076Rg4H6luNiTaDNsEcOIcQpppxK3n4DqBy2XmlYTnbyOoSRkEAiqNKtEo8rqSK9bZVh6naRAtdjg2WMGA0088+xMZ06ThSUvrGxI2UjGOSM1Xrn+34LaOn5bPSUm2vsloEuyleOkqwrDzbpaRkAHGnbvmn2ZNwu0Zy2SPhpCdgyEqXoduzpjFewKNKgS1vv5UgY3xnNFWWz9P2GS/crGhEC4Mx1Not8SZEcSFnhCkF5KlgY/j0g5yfSuZT5M6OPFaJO3Wa6t25Futdo6at0MAq1oWVrUgnCtIWfDXkDIxnbAwDvVsi2659QdMvQbpHmPQ1JQEOIQthRQNtIMY+IB7YSD32qr9O3i9Rk/L3TpS12tE5ol+XJjss6yN9mmyAFbblCiDzmrFbLXHbP7atXUch2W42dbk1ChFSkq/CkKVpQcDZOkDAGc53e0idWY91b0fAsdwf0typLDafFjQ9Km9Jzkla0ZUTzupWeM4qpSvFnSPmDYflXCAlvQtLqm9huTqKxn3qwfExVrZmS7hfba2202so8VUhI8QkbnQysfxcY0pPvWdWGaly7iT4paguDSlRYUPt5SoZ+potKKNtmxfDK0WeO6l+7POGQySsIVrdIP1JOBjt2qC+PfUxektRo9uDbCEcltKMZ4JASM8bbferIhcGL00t1t5slxGhCUtEqTnvntn86w7qj5hMhwMzPESnPlS4SoAngBe4HsK0FqwNFZlXQvhZdbUdQ2IWQR7+9Ra3CoqOrUk78V2ZORlY+XJUP4lq/wAhQCH8qGTmkYjJONyNOamIZ3G+9QcVQUcE7VOQwcjH3xU2wIn4Tqk6d6n4krSAM71AQ0k42Ax2qVjpUTjTnvzSNIJOtXBSf4iBRH7UUlOSfyqIQlQ3FdKTjGQKNBskV3TVsFdqEcmE5wf1oJWocHPvmuJJJ8xpkhWwtD2VYJqUhtFw6SrmoyOjURvnJzuan7a2kkA062Ldk1a4IOkf5VbrbbwcHTUPa2kJI79qudpZSQNqqlYwRDto28n6VJNWwbeXn2qSgRUkDb3qVbhj+X9KNBsr/wCzxpAKf0od6AnB8tWdyPgcUFIZ2JoNGsqEuADnaoKZBAJIAA9auktlO5IFQUxkZOBUZIcqj8PKSQM1EzIuxyN/WrS83jeoqW0FZ2PFedmVMhlR84oYBVkpH2/rT6GAeRRCGTkbf9qfTHJG4P5VV5fucN0gVDIIApwR99th6UY2zjgc04ljO4BqbyhTBEx8jjaiG44VgEfpRTccYySMUU3H/mqLyjNgjcbgYpYY9s1JJjDGcD8q6YwO+KT3WgWRnhlP2/Svaf8ADUkYys4NcMfGMD9OKZZaNZGrbzkEUK7HO5AwBUwWMDjb9KaWwM71SOYKmyDcY/w1Gyo2nfGfWrI6wU5xx9KAkMEjGKrHPR2Y8jIANA5GO+afZj6jjTRpjDOCDRDUcJIIG+OaovUaKvINMRVEfhqQZjcAjilMtjgijWmhnIrL1BGc7EIjZ3AzT4iAkZ4+lGMIScKKeBRiGkbYrPMc8pEaIWBnAyO1IcijBwkmphTYI0jtxTJaTxznmpTykW7IRyIFb4/SmBbyTukbetTpYCjx+VdTGBOwrmeQMSGRbgMHFK/ZaTynJqwNws76Rv7U6Ye2NuPSh7g6VFYXagRx+lMOWvAxpFWwwwNgKHehY5G30qkclDIhbZbwohAOFVKSrcW2BlR/PelM6GVZA470St9L6SkJ4HGa9H02Wzog9FaUsxpGUkgjitD6QuKZDIS5tt6VR1wVKmZUnbOavthtITFC2sBWNsdq9JPRQlp7zUdtakugk71QupkKdSFE4BG/nqyTYz7bp8Zw6RviqvcJaZc0RnE4SOeBQHQN0na4NyW43OD2pByjwlEKJHfbY/lR/VMdFwgOiGptxuKPMSQ05k8aUqPmp5EmLanWiltiSEkK0pd06ftjn70d1B1TFucJDkGIIspoFSvOgkJ7lIWnY/8AVioeoi2rK4Wk9mM3gOfsOU4R4rjXmClqCngnGPMRuBWRRbU/fZC0x21LJOU+EnWrOrGFDnHuK1zrDqSM4lSYlxUVtg6nFFsLTgbp1NYT9AQa78Kl2qTcYsyR0/aFlL6WVSFMpSt3UM6VLbOjUOyiEHtU4ScINopNKTRUelPgtfr+66yiOJDsVwOFAcZeUj0IbLgOcZ5SR64r6O+EvwH6WtNwYYvkS4uqKVBC32kIOV7KSQzrUU6uMEJ+nFWtmNaemHisNoh2+WjyNxoSXfG9VJISnzjnTk59eRWsdMdS9NWW1pTbrrHdjuDWtRfcZc0k7qUhWCEjnk4rmyZpZpcZPRSONY48orYU38M+nbZ09cosK2xWWzBWhSvA5IQdB1ZyMHHANfmJ1b0W611JY+qH4hSbnbC66lo6lF1Ly2VgHsQQnP8AzCv16sqoV1jKkQ7nAnsSWy2p1mYteBjGBgeUnO5Ka/PD419Hu9Kybz0+9Dejyem50yUw4fO49bJRStKwONTLrbRwORqzg16foca3iXlaOPNJ/rfgh7F0u6rT8pEbjgNBQUtQwr1wgE6jjfcDHvVxsX9om0/2bem+o2j8LI/Vcrqd6HHYeNwMQxgyHNYKQ0vVqDijqGMKAyCKT8P7haOrenhLsrrZLDCEOJdc0Oh4p840nYFJJ9c1Svi90jMuVucjxWVataZMZa8AKIyCM9sjP3rj9FkeLN9bpo3qoPJCo9FE6J606t+JVjuPS3UsRieWZSpaZUmEgylJd38PxEgEISUkgJxsog7YFBdTdB9QfDuTH6u6YXMt0iOQ6AhRSRgjzJx+qT9a134RfDQWuyqUyvXN3kSH0lSFAhJ2Qdtkgdxvg8Zon4z3Ux7bFtHhvKlym9Jb8JGW9gAor9TkbDjJqnrvVzy51TuxPRYlCL1SPpv+z58UYfxp+GFr6iWjw7hHJhzmQcESWkjxAP8ACoFKx9aD+LPS11hRm5dtRoZgrK0OMkJEdJB1OcbY087AFWo5xg/O39ja53PoL4rS+kH5Ibtd/hKnNMLWB/vTQI1pBOMlIGfUYr7i6ztLF/s8qGpoeC/HW0ScEkEK29wT2+vrv5fqMCwSlH4/g9THNyp/J8f9AouFu6nlJMd2JabO045DWuQpr5uQXFsNOyAclTaW3jhSjuvQcagBR/UUGBBsDyrZHE92IGU+Cy2kqPkylTikg4GlKlhsYBU7x3oS2tpdny5iJjsVUdKkeM4gmO06p0iKtWx8RSXAooRjBUU8k5C+pZtqNtgXeJ87Hm3GV4qA02W2m2mwGW0vIH41BOVq58x0j0Dp8qFa4sjuibRc7ixGZcjoltTFOKiIW2EKgpRGZbQdR8yfxJKiASdJ75qPn3y49My27HFuUlQYjMxpD0FQSsusOkB1IP4VKIwoAdydvLV3s0+eluV8xDjTITDzytEZvwHHEvAYbGo5ASlLh08qToJ35rNrgSL919HivxI7sm6SEIykjCQ2FJ15PJJcOobbJxuRmmUNcmJKb6Ll0/b771GQzFfdWh55ZQQg4ShS23UDJxshSyD9R9rb8SL5ZvhJ8P731Xc5KUGL4qWUg5UVY0JQCeVKUMewAzWldK9Fs2i2JfTFdZUGtK0hROFpBIwOcqSVD3KB/Lk/CX/8QD4kyrr1pA+F1sWBDszAnTkNnZUheohJ9QlJ775NVwemeSSTJSzJJ0YndevPiD8TrhITKu8tEJ0qKozKiEqBOfNjnt+Q9K+n/gz/AGT/AIG/+wG6/Gb4o9UuNzYj89Ddui3KKw6tEZvWkNtuKStx5elwhBIyAkjnfN/gJ8OYt2sa5obPiK0JClkpQtZ2AyMHk45rvx4+G4jdN/OeAl1UJ9DrqkDKlIwRqBIzgEg/rXp+kyQebjWl4OD1MpKKvyGx7Z8DJVjtdy6WkC7SrmkLdtMljw5EBOpScOrSFJLmpONG4KSDwRRHVHSkONcBI6PM+K1FUlEN/JQsKH4tBSStDZO+lQI9QKxr4Q21UfqubJxmG0y20C4tWCpagcb8kAKP3r6piWNmfAEhSkIX4ZKVqzoA4Ocb79tqr/qXqXdJJL7L7f1JeihxdW3+7MWunxE6lsTiX5EaGlT2oXJ6K0nw5zaUEp+YjEBlw5CirAQSfMktuZcVuXwduHUfWMOfebVbnbIy+yyCpq5IaQt3T5krQ8B4ydwMnLqcAKW5zWT3Dpy13rriHZI6gtLaVuzFuKw22DhOCdv4Q4rHok19nf2Uek2pfwXVfp8Qf/1HdJM1sOKKT4GopaIyfLsM7Y5qbjGXpFN9vR1c3HK0Yz131d1x0XDkNymYioeNLjq2lFhZ08gRlaFenCCT2PNZE1/a4loWzB6l6XtymY+MKjRAgqUAfOUu5Bznfb9d6+9+s+kLc5KbmyoUOY8hICPmXVvEAHcBnfP10fQ180fGP4VdCuuPXeZ0O89MQ2tKzBtcVhsOYJAU6seItW/Cd9uK5MSUHWRWVnNzVxKPZf7T3R7646WrbLjIjaSxFYjspYWo41FTQSE52wFHJA74rd4vxP6Y6qtjU+Lem485SEqVFaCkNJSCdKdtOF+hOU5A4r88esrpbbVLRb7XZF2pxpRJHgoQXANgoah4nrngVJdH/Ea8RHWktyVKxlJSsAoVnspOMGumeHVxJrJemfY/xQvkfqCCyXfmbu4wAxIjrbQ2tAByCtzxBrHcAKA52zVA6Rs0SVeWWYEl5prWdnso0Z7eQn+tVF+9yp0LxA860lad0sNJS3jHdPp9SR9Kv/wsSt5lc0xAUNDhDKVFRHfQajO1EaJaevXIDERFqaksx5TbYAUslaVeuPKT+Z9Kx27IkstqxIQrTvhKSD+m1X7qa6ftSQpD3zKVRgUNtqjtthIPI8prNr/IkJJCpelJGNKVEZHuBWa4xM5Xsrsl5a3MqQknO/OP1ppkZXvTLq1uPZwcD1JNSMOMpYBI+uajIRv5DIDBURtmrLAijbYHcc1HW+GQobEirXbIWdI2pAIJixsDAH+lSsePgeXYU9EgpCQDUkzCSMDGKWjASY/fk+9NutEDNTPyhxsN6GkRyRgpP0o7NRBvDbvTaDlVHPRwDun9KbbjgHOf0por5FugqE3qxjAxVgtjRSRjO9RUJCf/AEan7ejJAPY8VRKgFjtKBqANXO0kJxvmqfb/AC9/0qyW5/SRvxVRi9W51IAFTKHEaeRVUgSBgEGppuQCkeajYA15wHNASCDk1518Y5oR6QkgjVikbGQJLAPtUHMQMnipaQ6MkZqMlAHfGalJjJkJKbxncZqMdbzk5qbkJ23qIeGFYJrz88rZLIzAG2Pp7UQhvbinW2x2FOpbxviuL3dbPN8DIaIHFOIaycAZFPIbyMYFPJb1VGWYKbQhtnHbc+1EttEDNKbbI5H0otpnV/Dx7Uiy2GxDbWe29O+DvxRTMRRI2NFCGcAYPNMm30HsjPAHBT+VIVHI4G1TXyJPY781xUD1FMmwMgFsY2CRn3phyMRmp1yD2xtQ7kJWDtQuSAQDjO/G53oV2MVcip56ERwN6HVFIG4plJlFJkEqJk8UtMYA4xzUwYeVHY/lXBD/AMJFMpspzojwyfvTzaVIO4o8QskHST7UpEP9a3Ng5WDtk42PNEtukUoRFZzpx96dRDON0k0HkkK/k4FlQyD9aUBvuNqIZh52IzRHyJB4pXNtEmBeFnYd9+KfZjFR4zRjUMkjIwOKNYggk+WkUmh4oEbi4AwNqeEYHnHFSKIeBnHFOfKjtTcmUp+CJMMY3FCSY2EkY45z+lWEx8Dg0K/GH8QHtTwbYyKouOsu6UoPmo+PblK/E2CD60YWClwYR+defeltowkAjFet6V7OiCoYctCEKSvUN96krdKVDBRk8bVA+PPcfAwedxR62Hw34mCMjvXrR6HWyWCVXNSm0b1WeoLEYLgyUgqyoHO5q39Hw1KdU4vc8jI5qL+I7zCCU60IOOfw0nJ2UWkVG3tuSJXyzcyK26rGfHc0pI+uKvbvTLRsyoi7tK8RTZUtqG0ktEkbalhQO/vWVWWfOkT/AAWm2HG21airxltqKc8JVuM/avoHpbppt6KifGnqbGMK1rS+pRHYqKgR9gKTNdFIaZ8edf8Aw/v0e8q8C2yUMglQQ8pWNhjJUkAH7fnUl8Penr3b/BdkTbO5HlHKo2vD6Tq3AVpLgUcfzYP6V9XdRdAT7gFKjKDxWPKhSfEH10kkCqDdOjNCw42xDbW2cannkJSjfBCQhKcH2GT7157zyT4taOpY09h1vREbkJal3kynGWgFR4vgOTGO+hzdKiMY3Iz/AFqz2rrSLDlpszhiIiKV4uH1JJWocZ0KKm1eqs7eprKDHuMVM5+TEkvwyC22WJOgo5BAeAcGe5C8ccVIwJ0Dqi2stzPF+TikAyAorkApTnTqc0o2yckZ5OKlOMltFI8Xpm+9F/GNtq9OWS5xUMeKfEZdi+I88gZ8wWCgrUkZG42A57VXf7Tnw6X11b4vXvRTaZt7tcdSXm2RrTMjEjU0vfY8kAjknOKyJ/qdhCpMGQ6tMEnQh9Fy1PNHTgoCG0nIAxlIGwzvtU50L8b+u+nLii0z5Me62ZwaEBp1YUlIHlwrZJOlKsBWPw7jNdOHPkx7b2iM8UX+ldnzBbrxeegL+/f+kRIVbskzrU4rKmFcKBCgcgAYCsbDAOMA1do/xn6D6nS22v5mI6pIBiS2wpTZA/ClwHCk5xvua+l+v/gJ8Pvi/BY6r6XnsWvqN1oKRIhOpKHcdlpA0qxvukZ7GvnP4hf2YfiT0yt+TL6Wh3qLGGpcuGAy4pONipCTuo9siu/I/S+tanN8J/2ZzRWXD9MVyj/dFgc+J/SPS9oadmXTwUDLiCy1lRWUlJQcZynBPb3NZM91HfOvbu5PPiPtMEIbTHZy3FbJxrWobJJzgaiNz9KGidMXLpt2Str4cXIS1NjzSWmnCgg5yUraP5jB4+4l16P+J3WVu8O7w7t+zFKShDDz60sgjfIYT5Qcf4cjNHF6X0/pn7kpps0p5cn0xjSDul+r4kD459OXOG6X2bVMMKSQkeEnxBo8PXkhROFH0yNs4zX6YiQ1cul/nlrAQptK0kjBICAfw++3PqeBX5cKnwOhukYXRio7TU1d/Yva3UkJW0222psDVjdJCidxsR9c/bHw5+Otn6i+Hq23HVuSkNlOjSM5O6W8AdgBnHp67Dzv9UyRX/k+VR1ejxznr4ZVLpDmfP3RiD4cSK0sGOlJwpx/95pVgYACSsqTwSpIV6Cqvc5GqzxpphMxjGKY7bgYT4KWV6lhxCRtpClIQnkrXlXtWl2m1vXGyuXGU67iZqLSWAGEvadlZVjUNRRjYjCc745rPVltdg9Ok2MrJZQwhsP7nwCvGW9W5SFBOFH8OBgDYDz8OT6dnXkxrnSK5cYjTLSmrMlsXCQvLaluFDTSkkIaaJ3BUpCHjkHJKcVKdGQpiPip04+JMZ9Ep11wDwyPEK0klSU8pOXVAncAhXapl21y7dZYVicSDG1JKmlx0jS5kq1LWe2UrITyoqJHGy3LEOnuqbVfJ6UmPbkOqbU02Q5pON8dsasjHqrvXRLJ/wCPRzxx3k2fS97kR7RZ3bi+4GHQgYVqzgAbgA4Bwc/TGDtX5IfFS5p6x+PPWd3kq+YS8+sIKcgeEkBCgn1wkZ+2a/Q/43fGK22zoxMSNOLj01olpeoBSVhOyidknOcnG57jmvztFkR1JaX7yHUW642iXIBmJOhU1bi9aQHCrcpQVbFP4QBuVAV7H+nZI5G5fB5/qsUscVF+TQfgR1y1YlfsO8OodaZW00hgvFJ8UKKku5G+CMHnGwr6B6ossS+Wx4uENyHSkpwvxshadwc5GM5wnk53r448a4xrmqT1PaUqdXGSlp+CU251IUkaVkNoU04MchSBnfJBqz9NfFj4gdMxk2yDJbkRllRSgt6XBnnCxqA+2PpWzeiyc3kwO0xYZYSioZUa5078K7RY5TTchAZZjFS0sISQo6uSRznjn2qx9c3W19G9PswpDxM9SWWo8AN+LIkrc/AgNo8xJPb0GfesgifEv4j3pww7bPbsyFYS+t9fitrHJHnATnjcqHFWXoWx/MKfb6Mtyuq+ppaVKmSlAuN4UfMFu7BKDuC00NTgylbikZQYy9Jnm+Wd1HyVjLFHWNWyR6N6G6kvwY6XYWl7rHrl3xX3kFIbgW7I1uJAPcHQjH4vORkOA1+glu6bhdFdJwOnrTHERi1xUQ2XXRqWEpSEjSk5CeMk7Dnmse+Efw/jfCSDK616tvsKX1Jdf3kyZMeTHLYCRhI1qw0lI2GBsMfSg+rfizbb1IQsvluA0VFGZqEsSVKGAv5spQlvbPdRO3GatlzxaVL6V0Shid15fZeupepJVtCk3yVCjB1JbaUzbpKEFY2OpxQ0g7jfJ+wrHesPiPaZDDsSLEnJkJBbS8xIwfKRqPlQ4T6ccVX7p1nB6dmIZgz7qEzUELRYX0StydkFwaie+CSDvzVRmzLw5Bcm3uZcYdvilSG4024NAjV2dLZC1r9E7kVzRm5yt9FXBRjRmnxXs7dyS/cbI9eGkLSfHbkrlHCsb6lhCUkEZwVBI7Y2rILPDhxbo2xJS4twH/xAAk/Yb/pWs9TvAvCVHhqhrUykgmS6oAEkDYqKTn0I5O4NZlcOoCqaZC5sRT8U4LEiM0FKGd9DiRg/TOa7oW1RzukzeOnEdLPRmbY+6l0KbBWl11KRk8YVp43rR+m0N9JIcbgWpUqO8cPNnfKfRKsH+g+1fOvTF0kXaSw5FiBxTytKttxn+HAwCO/FfV9ut9xhdMNuzH9IDQKlR44Km04x5ioH8zuK5mqlTLLSszO9zYs6c65EguMoUs5bI352GRziqnfbbHQwFqbQlSt8HGav0+I3MUfAKF6ck+OttKj9hyardxs6VLPiMNEnjS4M5+1HI7J2Zr+z3FyDoSCCdsDtU5Ati8gAZ+nFWFiwHxc+EEozt3/yqfgWFAx5QfXaoNPsSyIt1qUAPJVot1sKSCRnHtUtb7JsP3e2fSrFDsYASSj9OaTixkRMeCAkeQYFGtQ9I2Sdu1T7FrAAyiiE2v8Aw7VuOhuiuGGVbaTvTDtuVjIT+dXBu0lQxp24zXXrSdOMYz6UVEDM9kwORp/OhFQSgnCeKvEq1YJyjao9y1+qaaMRGiuMNqQvGOdjU3bkq1AngV39lHIIGDn0qQhQygjCftiqIGiTg5AAI96mohUDxj3oGFFPcVMR2OMp7elMHoloLp2FTEeQojvxUPGZKcYFSTWpI3GNqDsIUtxRoN4rHfaiN8Z9KadSSMbVN2ZMi5Tqhk5oBx8gnIP+tSMtsjI05FREhJHP6dq58jdGbpDb6ioZ334qLkA8jFHOKOnFASs7kHj+tcOTo55zMTbG+KICONzSEJI4/SiW04ODuRzXkTy0cZxLYyABT6EV1KdhgbU4lJI+m+K5ZZQo6hPmyd8n1qWhR9SQcUHFjlZwBVit8NWBkbGq4ny6ClZxiJnHl396Mbg99PNScS3lQBIOPpUqxaweRgfSvTx42x0tlfTbyv8AhzSzaTjYc1amLTj+D9KLTac76BXWvTthcSiO2dWNkn22oVy0qHmCa0Rdn2/Bn7UKqygnYGl/27MoGdO2pR/h49qHNpI/grR1WHPbih12Hb8Peh/t2biZ8bOTwkikG1KByRWgqsONyjikKsgJA04oewwpUURu1kYyjvTqbWcYKD+VXUWIn+Db6U6mxjH4PyFb2GFRKQm1nH4cU6i174Kcb7Vdf2Jgfg574pSLPg5KcfSt7DYOLKam26caU7ewp1EFXBTVwNnydke29c/Y4IHkP1xSf7dh4FXbt2+cGimoKgQAkjFWBNpI/hxT7drIONOaPsDKDIJEI84rpg4OwqxJtoHanP2bn+Hn2pvYGUSrGF7YoeRBXjGDVwNpJ5FMO20oH4c+lPHDQyRRl24lXr611Vs1Jyo7VZpMEFRARgj2qPkRpCW1EqIGK78EOLsqlRWlMxoj+6c7435o+a0HIetpIG1APsLMjBHBqww7bNukZUa2xVPrQnUsjCUNp/mWtWEoT7qIFehydFFortnva4T5Z83OM1Cddw517dRHt8V+bIdP7thpCluOH0CUgk/ar/B6N6fiKRNv1yMpCz5Uxnfl4hPp8ytJW99GG1f84qT6t6nj2Dp92J03AYiNLQQpLLamWlf8w1F17/6rhT/grV8m5fBlfSPwt6mbbS91SLN07EQ4AtcuY4qQjPq00SE/RwpPtWx2HqfoDpiN8jap8y7vNjDkiUsx2Vf8uQFEe2SK+dLZc5/UnUaBcrk/JcaUEspcIDbIP8LbaQEIH/KBWjyumibaWWrouKpXK3FnOfakySrSRSC5bbNMuPV8a5x3dCHPDKMBCJCGY+n30gD7kmsyvt6ZcRpdfhNLcwhmOh9p/fO3lcwDyMHj61JdPMvMx3IDBVISEYclOp8qhjY+dWDg/wCGs66xh3m0yJAjqkpiKIy6XGglR98BOB9vzrzMv6rkdsF9NI0mxTeobQ21bpFwE/5jf5ZiO04M9gsEoCD7/l61AdVxo1u8N2TY5draCy5oircQtSgdlBtClAHnjJPbesrgOMQmXLpEVBkqydKnYOtCFZ3yrIQc+pUKtFn6i6TfggP2WQudpUXX4dxgNMqz+IBAKtI4/iJJ7VOt2Uolm7XZ5UyClu7tR55dCWPCfcRJcHKkFuQ14RJ5OcE55B3qNvnRPWbFzYvfVdpMVrxFAvspbdU44BpBIW5pSvG+EOg42GMUu+vXme21GsN5mS7chOh6OBpKVHHl/dFK1EDbOVfTtUlaL9L6ZU21crP0w6gRDHgSrs3PEkukDS23oS2WxnfdRO+QTVcWhcl+Qfozru9dNNyo7PVoabeSpRQp9hvypAwtKCpKgrSDnC1AEA7k1sXw++PkOXbHnuqYKZMRgZTJgRVPJkjJA1Lx4YPvrJNYtP6hg9RoeEqXbmJSNCJKLeFuR1ODOHkNrQPMoZGUqySDkHOaj37hcYIZucybd7lEAS085NadUhkJOB4SgnSgnIGFYI7+lNJpuhFG+z69s/xV+GN6SI8Rm3seM0lSG1BHiFJ/ETg6sD3rOfjjfOnzY1t2ltLch1wobC2koS7kc4A27DUd9/oax2JOYmxfDt6oslxTbSVtSFRmcJOTl1xxLbaWgNyBlaj/ABA4Ncl9aKLD9uat0JuSEpYbcgx1OkJyQtbZx4aR/iTjJxg4GTxZ+bVeDs9PCEZX5Plbr5Uhll5vzPvoeXEUtaEAnCsZSMAjAG+Se3Fa18Ao0wKhQXm3o8ZnzSPL53xjcEAEgKOw27d6zzrGQjqPqrUEtojwXlJUhS06Rk4xttqz6k/etp+F1sDawtJLGgFpJCt0hOTjjc5Ucau3vXR/qE0/Twg+wejg45ZTPoyCph1mO1AKmkx2w2lOgpQQjG2pZwE4Sk7HJ98kgTrm1XGPZhLehzpTjqwULZjslWggaVJGMYII3KydzgHOaf6Ij2qMBGbLKW0JKlpMkq1qIwcjUDxkEK5+2BdLhIirtaLUzNW04UoSlxCMOADUTpKSCdkqTvxnPNefjkuDTKTg+aaKZbrQ47BYmtxZKlkKQ4ZDYbQQVK0N6tWFFJK1JzkAHONzUd1EuO/bXIijIjvRtXhgJJKtScLQFKScJ5Oc9yK09Llsk2ht2V4iUeClCcDxFb4wUlRCjndI3AO5GBvWQ9ZvJtD8iHb2X1Ao8NSi8A2SkJwCVOYUoZ33xuRg4JrqbVKjmjGVts+TPjrdby3bnLLdFO/LtnMZYV5UErOCADhJ0gDI5AqgWD9pzUNR40RMghXjaHmg4FKJ3WO+QkAEnI9q2L4vWRu+29XgQwooQVeGCFlSDpyQoHCsbnI7bVTOi3247wt7+p6e0FR2Wi4hICjxgJPbScj6gb4z6HocsY4ZQS2T9VjcskZvqj65+Anw96A6+6bTHu/T8FhVvT4TqXQk+IpZyC4pQ22JAG29W/qP+wx8G5vgzYLK7asOkrQVLKHc8J2OMb7aeazTomc7FuDdys9/TEuBbcy2mQlWvuUNgABTgJwW1/jABSFEkC1u9ZXw3hTFtMhh1ER/5qDPY0oQlwpyWuSNS/MnUFDOMFOSBz4M2SLab/uHNii9okG/7LHwL6NjouF3tCHlIdSlK0vPy2kqJwAttJUtKSrbJGx5Ip66daxOgoKonSvSNoiR46UuIelNuQWkjG5CzqWpWOAEqyRjAqqRL8+q3SLqq1Iauj76S8li7FqS2sgtl1Ibc3QtKRrSTpyOKjLhe4L015UzpUW0KZC3fkoao7LxI0lzX4gKVYSMFSfXnOa6XJN7fXzs5uLogeo/iB1t1FfWIF66qXGjSfDeLTFvWmM8pPmShSpCvBAAV+EbkqGeK7Ptdw6RVIXL6chtQnkYUi3QAEOLWrBPhspWl1RwAQrUBgCo25Xxu122PE6WujydCiqTJmFP7w5OGyFh0LIz+FJA2JwKqcnqeLAlPTJk35Zl/DavmW3n2XHBjGheSsj3yB6AAUeDm7Mmo6D5Nqbt7jNylTHLGVgoZCUCEhpvSANISEBB5yle5OSPShZqGbh+4m9TtFKla2ELXFkrwP8A4qsqHqBgdt6batdsnOftfqG9x2kyQX/FkSM4UDwlTmrG2MHOB6ipjp3qmyzWH7YLY8/nKUNrf8ZDunbIS3qBHuDV46VEJtvaKpdrM2FotKIzch97JUsLCHUA8YWyopOeeD7ioD/2VwJzym56WnFk5V8y0def+cBKh9zzWn2hhifdQq22lmPHQSA4yhPhkDkFKt0n/wAv0FWlaUJZ8d2OFaTpLpCXNB9FHc6fU8j0NJPJJOkGMVWyE+Enwjt8F9iVA8NOhexdkJI1D+HCsD9T7Gtzvln6kiwfAaTNZY0fyhKV7fwq3BB9M1H9AxHosNxcyGhppQ3QpGnSdvxAeU/Uc5zRk2feIEh1duVNgg+ZX7OmBvI9cADP0OabElJ2+xclx1ZnEuI+UlqfGSy4B5XHfPr39h6VGuWxtWA2W3VAYJQDj9a0VXU5kuKF2tdnuozgmXEDMjPu9HKCT7kGnEW7oW6EFbN3sjvA0hE+P+f7t0D7KqkoJkLZncazZXkozk1NwrHjT5d6u8boRD21nvlouJO4bRI8B0//AE3glWfpmiXenJ9qUG7nbJEVR48ZpSQR7EjB+xpfb+wbRXoVnIwdJqci2zACdFSUWEAPw1KR4gwNqHEZEQi2p9Kfbtwz+HP2qbTEAG6RSxHHpQ4hsikW8AA6eKS9ATjjtU14YximnGtqKgAq8i2owcJqLft+knI452q2yWRg5qMfZGeBTKICuKgAk5H5URGhJSeDkUetjGSBnvtSm0EHbFHiah2KwE7bVKsR0gc0EwQnHGKkGlijxMFtIwKMaTtn70Eh3HeiGnsbFVLxAFBA7im1pyNq74oxvTTj2M4x96Vx+AA8hrOwNRUmKMEgbkVLLcCxnIoR4dgMA1GcdAbpFflN6M7GgVsFfIJzU7IZCs5oYRQdxiuDJj2c81ZgyUJB/wBKcRtwQKaCiT6mn2WlrOydq+ZzSdM5kPoG2++OaebaUo4G5pyLAcWrvj6VO2+0+YZHcHcVwLlN6Cj1ptql6TpNW+22sgDKN67abalIASgVa7fASkAAb4r3fSenbWx4oCiWwADCal41tzjKf0qTiwQNtOKl40AbeWvfw4CqVESzauBp/SikWvH8O9T7MFIAymiW4KfSu+OHQyiVhVrGNhTS7T/hq3mCP5c02qCP5d/pReEPEqCrQOCik/sjb8NW0wu+KQYe+NIpXhDxKiqzj+WmzZgD+CreYQz+EVwwR6Cg8BqKibOnjRShaB6fpVqMIeg/KuGF3CRW9g1FX/ZA7J/SvG1DP4as5if4RSTD9Uih7AeJWv2WnH4f0rn7MR3T+lWX5Mfy8e1eEQeg/Kh7H2NRWv2UMbJrotm/4efarGYYO2mvfJ5OcVvYMkV8W3vprwgAfw/pVg+U9q4Yp7Ch7AaIEwkgcUM/DAG6asi4h9KGcgLWrQhtSlK2AAyTRWEKVFOlQScnSPyqKlWqXMV4ERhbriv4EDJ/7fWrxItbLDmbi4oaR/cMYKz9Sdkfqfao+fc3mI6mIbaITOMaWiStXupZ3P6U8cddjK3pFFdstksK/H6mlh1wDUmIyo4+ilDdX0Tgf46Fu/XjkiKIVmgR4zDZy2lbSVpQexS1/dg/4lBav8VMX6MZSlFIUVKOVE71DIt62hh0YH0p+bXRWMfkhkz5797FzuUx+S+ThTrzhWvHpk9vbirTeX2bjbiFK1DRuMVAzorYIUM5B/yp9DTnya9JJ24Jod9j1RTemLGJnWgjW1sIIUCtfYVrd4iz7KzrtojakgZW8d1fTbP2qidOsu227GS0VpJUTgcmrW5Dnz5Tsi6veHrGWwpOdvYVOb8DY9FbdFykgPvx2FK8xCvC1K55GrOKp3VbV4vSWYrPziXkbFSlNhAOduApQPuRWlwk/iYkzco4GtoL29hVfvc62tSFtoC3Uo/hWyjQPcIA3+9cWSKZ2Y5cTFpNjuan1Ny1XOWpB3KltqQCO/nTj/Opex2ESHWkT33GS15krRbEDQP+dpQycetaWpq2XZkHW4lzHlceaSFpPqEjOmirH0NaE6pbESLMcJAy+6vUVepCRjP61yPPKHZ0+2poqPyn7LcTcrXf7lHDDagCycaifxadS0pAPca/rsKho3UdglTVQ5rL0SP4X+9LdkKbUs5JUQqMHCdwDr3GT7VvTVhYMdMae0sAJ0hmMshCR7ggnPtmo+R8KLNdnxIXbGXFI/4akK0oUP4s7gnetD1SapoSWFLZkH+1HTzj8a2WC2POONZTCfdlJSPD5KELICsY3I8Mcd6vsBVwYWs/tOK3FjNhUhMl1bqUbbDLiWghWBsNYBA2A2qzMfAPp9LrbcJt4KDgWWkhlSEq7aNSAtvnHlOasUrp2y9Ixm7XMtN1cjuupCVtqAX4idgAHFYW2Mk4JBxnTkCi0sjtIykoLsyFpu3zpMZmPfJMoreKkGI6UeG32ShLjikac4wA5kHYA52r/wASGmI63LL0y1cIDLYS4UTFIVI8RJJU88ULJcB/h1FW2SM1vV3VZrnC/wBmUvW+OwFIfDBt5KZDCQRqKQpAOFZykalbkZT2ziX0EXH5Edx8OrUUuqjQGmxrWo6vDDSdQACcZ85KR32pppRdj4XfZ88WS0SLapUq6uqkFpxS1hAVqDXGlI2BJ525yAat/SN9lWsW6NcLU+l+UklKm1KQcrwBlW5GEg7jcEgb1rM34Yu2C1fOyrec6gtTzml3QlRBzyo5SCd1HSByBWVdURp1jTEdvbAy88t1rw9KQnCvxIGcgD0GPrUJ5I+of3OuKeJJ+Dcelbq1JZEhKXm1ONuBtIIKXGwpP4cFRChjfOTtzxWjMXePFQp6U4klxaUoGnzYOE7BPY5Jydt/rWIdCXCI2yy4400hKUhQUkgLcHBBP8QAwDvkYNXNid4l1UY0uG4QsJaUhSitx0E6WlZH4M5Gw7HKqhDFKOxpzjLTL0ZTIfETwiFKUlYYKNStKAQrPsdge2cVlXxU6wt0WG+5DaadIBCkqUM6OcAgjTkk8b5J3qzy7u21GklpSM6i8hS1AleEHyhCtykDOOBjHesW+MN5fetxhql6pTqwnJOrRggAeYHQMHJAwcqOeK6oYnJo5nNIq1y6puNxtzk8WkSkkPKDCEqcVuPwp2BHcYA9Bv3qUyxRp9kidTW1KIrikFTDaiSvIIGgjhJBzp2ySDxWkfDnpm4vzU3x2cyzAmITHKdKFFwJwStKHMJI3xkk87JO2NUb+D9oktrvHS7EdyS8pXgLMVGnJP4RsE5UcEgJwE7DGwp+axSTj3+aBx5xqXRn/wAIJsuYpuBc4819tStSjFgoW046QBh3hwZCAQTsSkHGpO9yfs9qachM2/qlTcm2qV4HnWqIoBWrwyXHsNgEqy2tIwcjA8tVhjoJfTHUjsa6RFOOJUGRqS8hIS4chUfw3U58++k6d8etWsNS0XGNHmx5nUcdKHUrQzHWw5FbQopKkF9vCjqThW+oDCcggZ6U1J3Hyck049gSbrdGIbTc7quNaJ/iOHwrXcBNbKVLJKtAX4jBO+yQpIJAxiqXIReWb4uUyZF4cSpS2mTGcSptvHKQpLSCD9F8q2PNWO/P3yDeVs9JWcMtgqeU5iGAAoApKHMKOEj2Qec6s1jdxk9Xi8KmSQ5PBScyZEpSw4NeRupBQhQPYYx6b08Li9k5NNdlkunWPxBXMlJndPNym3SWm1sxEsKAGQhJbwsKxvjjISNxUxaunbVNCuo7z0/cZc8MJDr/AMy0IzCgAAnS06twHIHlUBj07VB23qHqMsqdRYbXIeWChYLDKkuDGPMrIUojcjcfnTKHZpKFPRm4jqVA6m4ulR32ysrJ/wBe9Unnio1f9CUcbuxjqx+7rcXHi2uEhhXk1w2lNOt/Q53+6artmiXqLMSz+0gltOCUyXmsjfYhQOQd/Y1crr1rfLZHbQh+YWVI8vhoZxkDbGoHJ+n6VSpN9nTpDst+JFmLeUArWz8s5ke4ygn+tUxzUo2K4tOjQ7R1qzAuClXRltySygNGUlBZW4kDgrSNDnP8aVZ9e9XHoqUrqC4qkC1lttpZbLjQSEuo7e2fUZ/LtjNi6Eu17uPipbVDaICkBbg1AemwAI+21b30LZLh08pEUW5paudSXPD1KHcHOk5+xqbam6iNx4q2bPbmmYVratrBcjr8IpaS8nZQA2bJ9fT2GPSqNLt8+5PFpcgBwHP7l0gn6fzflUnc7vIS0hlaJEN9oAgglDjZG4IVuCOOxpHyDdwaavMdhIbcV+/bbAT4b2NyNO2lX4h9x2rsgqRyz77BYVueYHhycOlO2pSRqqRRGCTnTToZcH4lOE8kL3P50+hoAfQdqbiIzjcdKwApII9CMipy0Xe8WpHg2+6SWWTyz4mpo/VtWUH8qjW8Dt2p9opBopUaiyMXCBMI/aVlj6jy9CPy6/rp3bP5CpNm22+QAbddmys8My0+A59ArJQr/wAwqrx3cEAUe0+AORWf3BVdEtKt0qErTLjOMntrTgH6Hg0wpASNhXIl5mQk+FGluIbVy3nLZ+qTkfpS13OJIUBJjpaUf+IwnA+6OPyxQUUG35B1qAG350wtwHvSJbnhHOtKkE+VSeDQSpQHfOPetQR59ScH9KjnyCSM7Ut2Wkj8WcDegXZAz22o8THFgZrmAnimTIBOc135hP8A3pqCPoc00SiTgc1FLlp7HakfNgnmg0YnkyQTuRzRLUnvmq63K33NGMyeMHahQGTofOMDG1IU7qzk1HtyPenUvAmlcQUFZNNuKpIdAAzim3HQeDSuNoAh1II+9I8MY3ri1bE17PlzXnZFTIyRhsa2KKgdIPtUxFtRxnTt3qVjW4bZT9sVMRbfj+Gvn5em5HOokTEtoGAUVPQbcNQJScn1opi3nUPLwc1Mw4mMbU+L0e+hlH5F26EAR5RVlgw+NqEgxQCMAbVYITGABivY9Pgooo+QmJE2B049KlY0Xg4pEVAIAxUpHbB7V7OLHSHSo6zE9qLRDGON6dZR2xRSAPSulIIGYvbFNqie1SegGvFsGtxMQ6oZ7im1Q/aplTQ9KQpkdxQ4hIYxNuKSYtSqmgKQWh6UKMRhje1J+W9BUmWxXC2B2rcUEizGztikGMM5xUmtAHamigGhxMAfL+te+VHO1HeGDSgjHpW4mI/5b1FdEYHke1SPhg9qUGvUUOISOEVvG+v7AUoxIvo9+SakPBHpXQyPSjxMRpjRkjaMpR9Vr2/ID/OmH23FJKEBLST/AAtp05+p5P51M+AD2pp1kaTtQoCRU5kFCUnCB/lVWvUUnKRx6Cr5cGCQcCqleUFAOxJqclSHj2Z5NShp0pURjNRlwejlOUpGfaiOpFOIdJGeaiWWVutnVufeuds6Ip0RrsdUp3KRhIPNPq8CMgMJcypW25/yoS5yJEJK0pSQMHJFR1hnB+eVTEqUkbjI70bNRbIHS64sUXMyUlajkJX69q5PucezKE6a8ZLzowhvOQPbFTDki2yrc01KcWhORhIG/FR15t0C3QkXBTQUgHlzgf6n6UkmNErEuRcA8Zz7ngoPnS22jUcHt7U0zKts4rcuMFSiThtrSASfcV6f1lY3UqLjbqkjYo0lpJ+wOo//AINC27ra5pSpXTsYQY4JGUYQVb/4R/ma5Mh0w+w4ux9USpRcj2e4pgpTtoQptB22z5QBRts6e6lix0yZqWIEdZ8ugha177hCVK8x9VHAoWVe4TjKXb4kTJj/AP7rCW4VLdJ21qGdQbzkADdXAwMmn5Hww+Il8SL9I6LeiRlJGJt1DVsjNoA2CVyC2kJA4Ca5njeXw3+fsXU+HbX5/wAlht5fiu+GiylDTmAXJEthKlJzxgqySfT9Kultmywg6YDzCNQCUNJaAHHqsD3zWRR7JYLVJUm4/EboyI4TpUqB81dHge+FMMlkfZ4/WiIHSPQV1d0WF7rvqqYVYJs3TLOM+zjzrgR/zKRtSx9Koy2t/uv/AKNLK5L/AKf/AMPoe1QU3NaCIinHEDVh7yAHPOpIP6c9q7doFidR+yLzBUlLqzjTFLqEBIyHE5bOkjuSo/riqj8NIsjpyci2HpFFqcIBKr11TFemKV2y0yhvST78Vol2ssq4l19m7MsSFoO1vCnFN5PJWVJAzx+LNdPt1HS/P7HNy+rv8/uYr1mx0rA8ziLYqKG1qIWA7qGACUjUFZ53Smpn4dp6Ues0OfGSiVBSXmwlAD7bSU4KkFvvnY6iSOOcCmYnwo6svjN2Y6v6/uEqM4pbUFD+hzCcEajgLClEnTgk4xz2quJ6B6/+DPRrFi6PhtXaW5LClpkSBH8JpR822hA0Ab5UVngAAVyzxJ9y/wAHZDIukjROsrTPvkJiDFYmswigNrS4xrQR+LzAbAb8Y9NztXyl8eISLNcU2u5NvRVOYTFedThbxTuE41bYGOc7n1yK+qVdZ9QWi52S1r+Hz8pDpSwZolBaG1JAypYCCEFQ/Dkq2HGRRfxf+AnTfxb6dLMiGpqWyglp9CiShat8Y3Jwd+N+1QwY6y8rK5Mzhj4tHwkj4lRbLGQw2+p2QhYC9BwPN+LIO2QduOcHvVts/wAardKhNLfDkJwkrf8AMCQkpUMcDfUrPIyfYVk/xc6B+InwUuLjD1oRcYiFqUp12OppawedCs74OxyDgjvUVbb/AB7nbYtzTHU0iSgK0LAyDnBH553r2MkIRgmo2vmzkw/+WbTlT+KNuv3x/abuHzS2GvBbilKWUnBU6leQCeQClKduMk71R7n11ZOtGmZrjqIyYay9JaUrBCQTkAk7gnH5D6VmvVvUkqA7DtdmtplzpxIa1HCE49cbk78VvX9mj+yjefihMidY/EF4MRGTlqJFUENBQ8yVLUNZVnH8XHpVcagsfJxq/vv+hHM+E+KldfY3P4NdCv8AVvTcLqyZDdYjqbQqO4tZbynOpLgxjGCPKfKCFYxtmtjMaVZobSWi9EjxyFoU5LCw6n+bSEjCck7KI9ztVynWZywWAwbfDbbcjxkoaCHElKtIGltKcpGDjYE6R29KzGW58X+oOmbjKYs6LFKaSo25TKw42Vjy/vClWATyRlQG/I2rzJ4lB77OiGV5I34Q11DYIF+tUiHeWUobX5mvmXllS15BCgSoEb49Nqx+49LSZvjKt8xuHbY85p9seOfFa5yEFJwBnlXG3Parr0r0f8bJ3SL3+2EmC9cWWC2siONCHM/i0DDbpUNyQSQeOakOjPgz8QOjemJiZcu3Xq7PocUzMlIkJ8xOUowl0gpx2KRt9KrGD3xf8iSyRX6ilsdLXOxtJkS+mrVORNd0B65sIwQdyQpJV4gVsSPptVcuPSSpr4li3Q2YywQpEOEtprOdlAaQdXvt6b19E2voLqI2dpF+t3TnjpyoJct8vwMkjVq0SFjkbeU/TtUbfHGLYlcNjofpq9yHEhTceF1D8q44eMIS6WznI2Az9M1eOGUlTf8AJySyR5aRhDnT8GIlLyYqHk6catQCh/5hv+dRcgwlFTBlMMrB06FoSkg+/wD2q49RXzpCK8pvrX4FdbdHyM7OTH7i2ysf4XkFxKhz/wAJX3qJgWv4W30hu2xb5ISvzBqJ1xBWtHsGp1vYX9kqqUvSNf8A6RSOTV0/7f5KRPs9vMjwpURB14WCjB1e+U8/UilN9FNS1IbWw8gKILTuE4I9yOR9a09Hw/8Ahha2kx7878QbUhR1sJuNphy0NK/mbcZebJSe6Ug+o3qbmdBWB9tDFk+LNjGGkuNi4RJcIKQfwrDnhrbKTxkqx6kEVo+mnff90aWaNFJtPTaobiI6HUNE/ib/ABII9cHfH0rQrPZ5jGQ4I7zJwNJJP5K5B+tAN9AfEYNNpgW5m/R0nUg2u4xrg3nuUhtwuoz/AMv2q99PWy9Wi3F292qfbniPMifCca//AAlJH+dd+PDKH6kcs8sZdFTudvVMK2yVKSjfQoEEY7hQpyxn9lEpUxqiPJ8N/QRkj+bHZSTuPXcd6kLzeW98BpJ4Og7fYjmq2ZjjatQeGFbjFdCVEe1TLA6HYTy4rqgSnhQ3ChyFD2Iwa4H0k4zUT+1DLgkBeX4Kdv8AExn/APJJ/Ij0oJF2Ktwdu1OYtKXUgHJGPrS0vpzz+tVlF3IOCrYj1rv7XxuFVjFsRLCcYNPC4gDZVUz9sn+bPali7njP50AaZb/2kB/FSVXJOfx/Sqiq8YGyh+dDuXpQOdX61glwN3CQpOQQrZQ9f+9DuTRtheR61T13lR/iNcReVK/deJ+I7fWiYs707GfPQjlwSCfNVXevpBPn/wAt6Aevqs51CijWW5VxTn8VIVc0ncK4qlLvZO+umVXsjOF5x60QWXZy4jnVwKSm4j+aqQq+HJyvj3roveOFihRrNCZnpJA1Ue1OTgAqx96zZq+nIIczv60a11CcaVOcd81qDZoiLggY8wFPInt86h+dZ4jqLHCsZolvqAqOdW3NAxffnkZyFfrXPnAobqqki+5GQT96IYu+sAa6WRmW1UoFJwqufPBI0nk1ANXEYAK9+K45PGrdVeV6l7OfJTJCNb9xjvUrGgEbYIqSj27Tvj86kGYOMbUsfT2bjojmIPqM1JxoeMApoxqIBjajGo5T2q8fTpDKJyLH0n3qXjIwBTDDRGKOaQRXVDFQaoMjDFSkf1qNZ2xRrLuNq6oqjEm0rFEIWKjkv06l0ncVQxIBYpWR60Cl4+tK8atZgzIptRHemPmPekl7PetZh1RFNq+tNLe25plb59aAUPKWM80hSxjmhy+aaW/2zQCEKWDSM780MX/f9aT459awQvV717V70L4vqa8HvesYMQrHen0kUAl73p5D3G9YAZt2roG21MB3ala6xh4U06MjHrXvEpJVqrGI+W3kGqb1G3pQrerzIwQdzVO6ha1pUCKWXQYmR30ErUNyQfSo2AhRJC8kirZc7e3qUSU/QmoyFb5TrxTDtsqSQdxHjrc/PSDXDLujqXRX7rBQojxB5TSLFDt4mBJCVAkHarbL6P6svDiYELpuYZCxqQ0dCFkeulSgQB68DvSOnulrDYlvL6t6ytcZ9lRSqFbFftCXnuCpoFlv03Wo/wCGnUJNdAcktAvVUaFbosabHa8dQIIQDgE9tR7D1ohPTvVfUlsTcnrGtmIU4EmYpEWKgezrxSjH0JoiV8XujumUrRYunrn4qCUpfV8uiQT6h50PKT/9NDdZj1r8VbrNnJuTPRtrlSVnyyr0/KuzyB/9ZwNj/wC7x7UXjh1KRouXaRPSPhz0EJYNz6nlXqWo5/ZvTFvXPdUfTxF+G390hY+tTBsdhtsYpX8LLdakYwwrrbqLwlLV6mM2tjyjuNK88VULZ8SuquqYS7M7eZMJoDL7UJKYTGn08NgISfbINVyRaHWrk5KisArI0tpbTgJSOwxz6k8k5Ncjljx/pX5/zf8A6LpSl2/z+xfpXWV1srbgHxhg2Vtf94z0N0z8uojHHzGmNnbglxdVYdQfD+ZIXcJnT1/6ikA5MzqbqBWpR9fDjpCh/wDfK+tV++yGbVDKpLiVvu8lw8faqYH3ZLq32fKw2MqcPlSPWpZPVSkqr/3/AGei0MMV1/j+DWmetf2chT/TnTnTFoc30mDZmnXlfV+V4zv5EVFy+teuru14vVHWt6kxn3Q21FEp51KydktttAlKlHgJSin+gfhL111HEF6uTarHZPC8dMqY1l15n+dllRTlB/8AFdW0z/8AE7VonTt76St10a6a+Gbc29XbwV63rQ+gSVtgfvFv3ZSQiMyB+MQ20oA2VJUN6SOPPNXJ1H86QzyY4v6Vb/O2A9L9NPWu5xbXeo8qBdVo+Ya6fjgSrqUDfxJDKMIiIxuVSHAof+Ga3K0wbxItzKLhPatzL6AWI0R0SH1oO2oBB0BOP4icH0PFYm11S8VSOj/htCs9ymvvoTIkwmS3YoTqs4UPE1OXN/IOH5JWjIJaaWE6xJ9OfEKV05dzYrTen78+gLlXe+yXAmO2hA86m9WToSMDxFlS1nGkJBSCXDFiWvz/AD/b7iN5Mnf5/g0O5/DlS5DkwT5UZR0q1y3S86SNshKNOgYyAlGAMnPNUUdYfEDo+Xc25diYnQIToWh19RQ4sAcq8yjnBGEj2zvVpZ+IN2vjBk2uIgw0pClqJ1KWScIyedRONjk/lSpVyv7kN+O5bmYrbSg2pasEpdV+JQx/EkZCTjIOpWds1wZvqf0nXifH9eyvsfH2ZIhuPK6MkfMtqHiMtstvutHcpbIC/wC8PoVJxndWxxonTXxEuN50x2OmJEBBQC/IlvNpQ3kbpSE5yvOxIzjjNZ7bbr4Y0J6bdCEOOISHF4WpWkE4QkAb7E5O2U5PrJsdZmKwG02RLUdJGHFKJCgTgnnfggdvaoYssoy5NlMsISjxSCvi/wBA9O/Ea1ri3JsOrQhQZcS1kNHw1AKycZIBJxkAeXJxsfzK+OfR17+D97kdMuueNBBS7BmNDCFI1AqSdhgpJ0nbGeCa/QLrPry43Bl9FsedjlLS1J8Vr91sMhQGdSh5e/OByM184/EyxybpDZb6ohMurVISkOqSHFoHnIAJ3AKz6coz6V7fpcnKVy6PMzReNfSyt/2SfhWfiR10rrPqdbUazWAI8Np/IU+6vUNYB4SNO5IPf0r9DbWYlriBi3MIUQ0ClwgOeIAOCE/iPoRz65r4x+G8FFxszCbc60l1PhvFxpYbUHDhRRtwNlJOxxrOxxvt3Ts3qK0tRoipbUppCMAurIdWg+ZIUAdKiNxnYgjYkECtmnJS0ugQipbbNPmdedYrTKZZsTZEROpCFPlIkN4yrRlAxjPr654qg3f4s/ESFAtT0Hp9p5E5CgUPyB4TZB2AWQCF4P4HMjIOM1YV9fX1L6IkSCmU1jMlfilOjA2WM7qIHIOMjvtUHdJfUtwnpciRYzsVsIQ7HeTqRpXkE8cZx6jcepA87Jkk5d2duOMYraQVe/iH1sq3xIdosyWJ0lwiWuMC061hQAUGyrCjn8Q1A8epTSmV/FmTJabtt6iTgVBa0PJWxKZIP7wFCFedJzto8QjOd8YEPEhSYUhM6eh0yYa0slccBbrIKQAsIV5Xk48qklSSU6dJynBn7rN6ntVmR1G63Au9qjeSQ/A1ZaJGyilW6QRgYVjChtpziq4pSa+RMvBfp0Tk6b8QYEB9V7uUZphrKQ/Lw5GG2xU8lOWj/wA4HvisO+JNotXU05uF1/Gk9HXORhUC5rZEuzXJJH4vFaBO/wDOkOAbBWBWmp+Nbcjp4XH5p9Ecr+UTcPGUj5R8jIbeUAotpUBqSVJcbIBylQCgK0u/QW13GzMqNreQNd4hsW1EqG6hQ1JkTrKSpp1pSSD81AVwdYAzgd8Mcci7/qcLlKD3/YoUG0/ET4Xx2ZIv17tUB86Yl1s13dVbZP8Ah8RlZZJ9lAKHcekknrfqCTqR1XbrD1M2kErN1sbCpBT6h6OGnT9dRNT7TDPTDbN+6Z6jj9Gs3j9y3MZmKunSV4/+H8wUrUwfViY2vT3WkUzdBZoz8e2/EHo6X0RPkJLkK62VsTbPOb/8RDIWco/xRXVpH/hDin9mcVpuvv1+fukB5U+1+fnwxu19VdE2tnQzZr7ZbfLGdNmuabhb1K7kRpYBSfVPi5qw2u19HdQttxLZ1LY1JdUXI+vNolsOq5/dPnwFBW2pLboBO+Cc5o0v4Z3eIwrqa1Ow7tY1r/e3K0vfMwlf/OSAFsK9nUIUKIPT7So+WnQlKwPIo5bUD2z6en9aMZyg+M4/n8AajJXFkr1J8JJkaemXPtEuMtvcyEslsq9yDltwe6SR71YbX1Le+m4jUe1dRT4ZSnA+XkLbSfqgHT+mKT0HKv1sQGod2mRnWdssvqSHWx2WkeVRTwCQcjnirTcDaJ7RdvXTlrffx/7w2FQnFf8AMpryA+5QR645rqhXcXRzttaeygXP4iXaTJKb/ZrJfkcFcyAGn/s/HLbn5lVRr9w+Hl21I+ZvHTEn+V5AucLPprbCH0D/AKHKlb5bejXVusR747ZJaT/7veUfuiecCUyCE/VxtI/xVnnVVmvfTrzSrxbXIzMsaosjyuR5KT3aeQS25/0qPvirJvyLUfBZBYOqbeweorSwzfrXHJLs20PCWwlO4KXkp/es5GQfFQjGahpUxltxLkRzxIz6Q6wvOdSDwDjuOD7iq7Auc22T2rrbJ0mDPY/u5MV5TLyD7LQQofTNXKJ1rB6n1QuurEifIUCtq6W0NwrgV/xa8J8CQSBnzoSokbuAnNFU0a2iPTcSQPN+td+dKttWKVc+nC3Acv3Td1RfLOxgvPtNFmTCzwJUckqa9lgraPZedqgvmlDhX5UrVGWycE3ByVCuKuQ5KzUEZCyTyfrXC8vA1H9awSZcuR4BNBu3RWcBWTUc48cHOMg0Ot8DYqNCjEiu6OAk6qYcuytO6/ao11442P3oVxajvnNFIDJBy6q5Ss0E9clqP4vrvQTjuM0K48rVgHf3pqBYc5c1JUQVUOu7nfJqPccJJyRQznrmtQLJRV4IJGa4L0vVuraoRWonn868nPfB759KNGLAi9rA/EdhRDd7UojzHbmq2kEDbPpmnkagNz3oUayyovRBHnz/AJUQzfzwlWPvVTKlHfPelocUO9BoKdFzRfVZ3X+dSUG9EjAVziqG28vYaqkIMpYWElW2alkviZvVI0Vm66k6teT6U6bko4wTntVSjSlhPlXRyJS+CePSvJzbZzyZ9OtxM4OKLbiii24wwNqIQ0ByMV6sY0dAM3HAxRKGR6U6lApxKR6b06ignGmwKfTgdqRxxXQDTLRh9KxxTyXN85oQJUd+KcAPG9NYKDm3hxmn0vVGpWRTqHPbijZqJJLvvSvEJoNCiafHasChwrrhcpONsVwpJ2rGo4tzamVudqcUk02W/WsEbUqmlqp8tkcikFrIx6ULCDa/c10Ek804pgjtSksn0oGG9zzXgDzRCWc7EGlhn2omGUjfinUA80tLO/FOBrA/rWsx5J270rOfrXNGK6AccVtmOgmljJ5pO9dBo2Cjy0NlJKkqJ9AQP1qvXlTbaFFEOOT6uJLn6E4/SrCrOnAqFuNvelJXoKUpQMrWs6UNj1Uf6AZJ7A1n0bSKNcLxOjJUtEvwdOwLbTaPsMJqrX9DyFlfV93uixgLbtbclQkOg8eJqymOk+qklZB8qP4haL1cU2hShZEqEng3B1OHU/8AyU8Nf827noU8VnsxvzrcWCVElRKjkknkk9yfWuDLLgzqxK0DS7rPlRF2yHHjWu3OfihQUeG25/8ANWSXHz7uKV7AcU9aunwGh5lAnYJGNqj3vHDgLSdx6Crl0fFffbL0kYKcYFIpObHklFFTunRDynlO+HnJB3qMldIvlKg4yFNjfYVtcyIl6PqKQMfrVcujKIrROkHIztS5lx6DjfLs+db9MuFhuCY0Jnw0A6nFY/GTwPsP1JqXsfU7siKthTOX98Ecj3qzX7p249T3BFutVrdkyZCiG2mkFS1nk4A9OSeANzgb0ZG6Q6J+FEQT+v7jHm3KSnxGrc0S82fYJQpK5R7eVbUYd317oMY45Z1r+pXmsTplNsvwn6x+JFwU7BghUdLvhKmSFFEZtX8uvBK1438NsKX/AIe9WiU38MfgowREaV1p1MyoKDuhsRYjg/kyFtNEH+LEh70+XVuCL58YZd6teJWLXaENFtEVLiE/uR/CsoShAR/8NtKGh/KTlRi5jXTNnjsXXraEt1+Q0iRb+nULUy/IaUModlKHmjR1DBCdnnQfKEIPiVOMljlWNW/l9L/j8/Yrx5q5uvsgG59SdUfE20OdTfEO+N2PpZqUpKpCm1rZdlAZLMRgqLk6XjGSpainYuuoTTlvli/23/Yvo+yTbR05LfaaeixymTc71JVktIfcASl95WFKS0NMdhIUvThJWqDu0G+fETqGA48hdzuz4TAtFtiNBtqOjfRGjsp8jLSdyQMAAKWok6lVK9V3q39K2l3ozoO6tzngwuDcr6wfI8F48diGrs04oAOPDd1KENpw0n94nuqVzbdeW+39l8Ibg41BLf8AZHOoOoDDca6B6GSw8+6r5OQ5bSZDepwhCokReNT6lnSl6SfM+rDaNLACVhdRTY/TjquimJSZJjqQ7fHoxDglS0HKYbak51Msq5I2de1EZShum7I078PbFH6gUvR1H1BGX+yQBhVutysocm/4XXsKaY/lbDro3U2aN+FNhKro91nLeagRLQvTDkut6mmJKGy8uUU8KbhsJVJIOxc+Vb/4oFRuWaaj5f8AZDpKEXLwv7s1Owybl0zb0Rn1sIk25L7zmtYSymeynMh1auPAha0MatwqStzGfCxRVx62SiNGgwy45pcisoUpGFuvOIS64opPCjqa2OSBoTuQSaBOcRfGLHYXGHrcnqu8Mx3o61lSrbYIKEuiO4TysJdU+8Tut/WVbpNRl460fh9RXKWpsNN2aLJvEpBGzcySsKaYJ9UqfitY9I6hxXROMa4I543dmo3K/THrkq3oKAxHQsueH+JZ/E5v6lWR9APSodK7g6tTjslTqCcqGrIzjGkDsOBjsBWP2T4hymr05JlS+H34obHK16lhIUffKdhUlbviVcH5saAy6y2HXAXHMbhpPJx/MSrtsMdziovDBNsqskqouUtvqIIfUyylCitKWglOBud8AcnAx6DIqjXPou/Xxb8i4MKU2t5Abb1aT5ThOD2ydR29T61sNnv8Ge6IxUkPsoClNkjKc8ZPbij5LMNtOlZHl0qAzjfbBApoKUH9JOUk+zHenfh/cLEkpYzG8AsuJdBxoAGnUfp5fsKsUOF1ZbHkJl+ZCXAFpX5gk4HB7jbH2rT7e1CkIUS4hSXGylQ52P8A3H61E3dxiEEh9f7tKEg5GSMAgH9P61R8mrbE5JaIAIvjkkGM4tpxIUuK+OQQCQk9iMgpIPINS8C/3iHNYU1LQiHLKELZWkHw0uApKQeQArVj/lT3qqSOthBub1tB0rjsPSkYOykobUVAH1GB+YPaohfVnjWxUhpWMJCQrg6jlQSR2IWCPSpPGm7KKckqZoka7XBMhqe2XJCJ4W3udyvQ4S2R2IcZeT7FBxtioEfFu/dOxXeq7K1G8SNJRFmMqT+7lsOoWpsOoPlUlQS4hQIwSPXGE9LdYMS4vUshtIL9hlR+qWUDcltiS25LSB6FlTiv+pZ71TZMJ09d9UfCTA13dE222pZ4M9lan4aT/wA62i0P/wBo966seGMFGUSMpuTakaU309Yp0JPxB+C8JEqFe4jqrv0XOdK0SGWykyG46vxEsqUlRb/vGwpt5sqSrFZnf3LJHjWdMu9SI3Ssl51HSfVroWmV0xPSdTtruJaGtLSSQSU7thQfayhTjYoHwq+MNusNzRZb/eZdqsl0eZdXcY2S/ZJzYIjXNod1MlSkuIwQ6wt5sg+XG8F5PUU7qO19QdLw0dSspQz1p05DeS3FvbCMFm5wXD5UOAOJcYfH92XUpc1R39bfdBRyK1pnPJSg6vRmzdz+I3SXUlxtKUCydWOhPzMMttPWzqdhSdSQ6yQqM84tPmbdSC28CNJS4QV2z4e9WRLrHdY6TkxOmX5rmZnSt1PzHTlzdO37tL5PybxOwDihvsh9BwiouUi0dKwLd0F1vOdvHQ05tyT0n1KzFV81bGy4Q4EtHzaEOgpkwFEKadClNFKseID1l0ze7QpFwmGM7McbQpyZEcDsW7xXQQzPYcAw6hzSpC9gdafMErUoVNyyR6/Pz+jGqL7/AD8+PBqNlV081eFTukrpJ+H3VTK1MPQJcxaIjjgOlTbcpfma3BHgygUdvENEXi4QFXR2w/EPpqT09edlLnWmOllStXC3oJIaeSefEYU3kbgKrNpL0nqzpVjqFTZVdbQWrZeDz4zJGIkw98kJMdwn+JtpR3cqw/D/AKlu8aK30/fUM3ixMqJatty1KTFJ5VGeB8SMfZB0HuhVUTUVv/r+n+CT30al090dMtNvFzjSYt2t4/DPhEqbHs4lQC2Vf4VpH3rtyeZdCmThOocZ4qR6ZCQoTegrlLTJQk5gLWlE1Ke4bKcIko9gAr1R3oS4zOnuoAf2gkWifkpE6IwQyVej8dP4d+VNcd0Gqxiqsk3syvqe2pdBjTXdKUZDL43LPPlIHKPblOcjbIqi22+9R9JOyYlpuK47Dyv95hKSh+JI/wDmx3Appz6lOfQitB+I1j6hsraDOZQuPKSVRpbDgdjyUju24Nle42I7gGsrcaeCipefvVI2gumizpuHQd/0pu9ne6XmEbTbOhUmApXq5DcUXGh7suKHo32oK89NXOwMMXL5iNOtclzREukB7xorzg30heAUODktuBCx3T3qCGRwd6lLD1FdunX33rY834ctAamxJDYeizWxw2+yfK4n0OykndCknenuxGn4PW+83O1XNu92eY/AnMqKmn46tCkZGFAdik7gpOUkHBBG1TxX011j/eCD0zflfxDDNpnK9/8A9ScP3YJ/8GmVWO2dTIcmdEtOszUIU7IsDrhdeSlO61xHDvJbAySg/vkAHIcAK6rKVhQCk+YKGdtwR/nQYew26Wy52Oe9arxb34M2McPMPo0rRkZBx3BG4IyCDkEjeg1OYqegdUNuW5jp7qiM7crSwnTEUggTLaD/APqzitijO5YWfDP8Phq81R19sz1oDElqW1Pts3UqHPYBDb4T+JJSfM24nI1tK8ySRyCFENfAbI1asjGqhynff86c1ZGP6Uk8n9KBkvI2UZHr9KYcTzjNFEbb7024jI3/AEFZB7I14YJzj2oB3IOyu9STyRk1HPgjYfTmmFBVq83GaaVjtx7Ut09/WhnHsAj86JmdVjPAryCBgcGh1vHkcUtlRUcjv+tawILTscgU4MZxvTCc8in0DjbmsFigATtvStH9KWgd/wBaeQkAb9qVsI0lGOd80+yrSRvgfWvEACkpJ1DHekkrQGibhLUod6mI6SQCRzUPbUZ0g1Y4rOSCobf515OfTOeWmfXX4eBvXU524p4x1E53rqIx7+tesjpOITxtTgaz2p9tj1FPoY9qZBBUtE9qcSyeaL8AelLDQ9KIAMNEVwpNGFv2pJZyeKxgPQTTrbas8U+GfanW2wNsVgiEJIAp1AVtsacQjPankt+1YA2Aa7pJ/On0te1OhjbiiABLavSueGfSjlMikln2rMICpskcVzwj6Ud4XtSS3jtQMCeFtg10MHsKMDOdsU4lk+lYwEGSO1LSyfSjQx7UsMY7VjAXg47Vzw/ajyxkUnwPUZrGAvD24rhRijvAz2pKmN6JgMDbYbVwpJPFF+B7U4iMnT4jmdPYDlR9PYe9ZABERgUF106WwdOcbqPokev6D9DE3tS3mtASENIJUltPAPr7n3P6DarA4hThyQAAMJSBgJHoBUdcYupsiizfuY91M2tK1EJ+tUaWt8uYXsc9q1nqK3JyrKfzrMuoPBirUSQDXneoWzswvQBHYPiIz3POa0SEIkKE24V42yay0XIqUA25gCuX/qWXGtJYaeUVEY2/yqeKaTopONmoSr1HX+CSkp9jTGI96ZVIckJjQWnEsOSVoU4C6r8LLaE+Z55X8LSNzydI3rHulXbtLS1Pvjr6YMpxxuDFZeS1IujjYJcDbispZjtAEvSljQ2AQApflC7p8YGI4ESDcGv3LSo7cyM2plpllX42YTavMy0rfU6vL73KykHRV5Y+S5S6JJ06iaZ1D1xA6RhuWTpOIhiY7+7lyHCh5bf+FxQyhbgPLacstnb96sFacmuNmT1BcVSiZE65T3UjUNTzz7qsBIHKlqOwA3PYVWf9vk364xumOkrcq4XOUpSGI7Kkp2SkqUoqUQlCEJBUpaiEoSkqUQATRbfxUsPSMZy39LXtmdcXklqffmSoIUlQwpiDkBSWcbKfIC3dwkIbOF808eSe3qK/P6l4SjDS2ywu9KN9ESA467DufU7a8tBaUPwbIsfxaTlEqWDxnUyyezqx5K2z0veLzefl7emTdrxcZCnNbr2t2Q8rKluOOLPplS3FnCQCpRAGaasfVQ6nu0Pp6yxnJ0+a4GIsVgDUtWM4A4AABJJwEpBJIAJqx3PrDpq1RZHSXTFyYnuSk+DeLtHUS3LwQflY6uflUqAyrYvqAUfIEA8k3J96ivz+p0Rq9dsr87qdHR1uvlo6Lmomyl25yNd+oWwoJlF5SWvlIOcKbiJC1lThwuQpIJ0thKFV7osRn4k7qXrCKs9L9OJbcnNNr0KuMhzPy1uaVyFvqSrUof3bDbznKU5tMG2ftqQrpy1MNPzrsthmM0pQQFLDmpSlqOyEJQFrUs7JShSjsKjetr30pJiw+mOnJqXenbEXDGfKdBuMpYAkT1pO4LmlKW0ndDLbSedWU9zklKUdLpfn9xuFfSnt9/n8EIx1F1X8S+qRKfjpnXq/TG2GGY6PDbU6spbaZbTw20kaG0p4QhI7A1p3XPUVi6IsQ6YgyG58GyQmpUslOEzEqc8SM2R6S5KfnXBz8tHhoP4SKq/wjlWPpmBfPind2guDbG3rdb0ceM8UJTJKT6lMiPFBHBnLUN2jgKyssdXXe2O9SSRIF46idvF3eIwHI8RgPPHHAToVIAHABAGMCrYoe3HnL9Uv4/7Yk5cnxX6Y/n8FwsMpI6ulN3JxchvpWws26c+pW7kyWpUi4rJ/mKVXFP8A0j2rLOobzNl9OzZMs5n9SX12TJX2IYaDhSPYOy//AMAelW1m6ORvhtMvU94ftHre/PzZBzg6Q5lQ+mpMkfRwVU75DDNs6fiY84bmygMc+JKU2D9xHFTySSWvi/6/9Agne/2/p/2VQvyoVwXNaQNTM75tn3dOFD7BJB+uKnLEhce7PSsKDbCS6x6hOFEAn/CdQA9s9xSJ0ZpwMLaHlZHhEgfxjk/cY/KjoKFeAlg8ueJoPAJwkEfQj9R71H3LdFa0Wfpy9SIM19TrpPjq8xzudIwP6mrI91LOk3FsqdUQ454qyTnASOB6bCqF4etKXmXNWMpI74P+ealY76igKWfMgFJz9KyyNaFlBFohdYTojZUy8UqSyE8nBJRj9M5pF76ueuUlL7T6tD0dSj/hOjP5BSc/c1WSoqZWEngAZ98UNCWY7C1LGUsNuAAjsW1j/Oisj6F9tdg1ymyWQ7MaJ8Zwgtd9LOR4qT/hWpKU/TV2NFO+NGshQw4SHZK5SVJ/iRowkj7qO3rmo1xL0nwigKcccHhhI5J4A/yqWaDRimI24lxEJIiBY4WnWCVD/rUoj2UKCyWM4USXwzuceF8VbSic5pt13lG1T99vl5rfy7mR6DxEn7VF/GFXUUW/2+/2xa418Yt9vuCVnyqTPinwVr9ciTEWT96j5kV1lyTJYUUrSFOtqH8K07pI+4FXz+0IpkdVpvLDASldwuMd0JOxTI8C4o/W4Pj/AKa7MM7xS+zOecUsi+6MD+MfTMRn4k3G72GOqNZerGo/VNoQOGWJ6PHLI/8AlPF9n2LNaT8M+qr/ANQ9MQosOQ6Ou/hxDcl2SQkBblzsTQUX4RSdnXIqFOLQg58SKp9rBDSQWOobO31D8K7VcWMLf6Uusi1LP8QhTdUuMT7JkInJHprHrVJsN4u3T9/t/UHTspdvvNqlNy4khAyWX21ZQoDuM8g7EFSTsTVHnanb6f5/ZirGnGvKPoSR1F0Z1d02vqJ9DiOjeoZjaOpoMVJfkdK3oo0sXaKk+ZbLyE6Vj/ioQpCiXUoUY7pyXcPh7c5fwl68aVcOnZavGQIikveAXkpcauVscOEqQ6EtLKMhDycatK0hSV9OOQLP1JF+IXR1njNdNddxZVum2RwFUaJMISuTa3Rz4aXg06yr8QZdaWk6kHFrj9LWjqO32/o+JKW8w6g3LoOfIUEutBxZLlpfVxguhaU9kSEnGEv7dTne13/P/T/kgo1p9fx/8EWqKz0Z1FDdujTc+xXiItCpsIFUa6WxwhLi287haSArQoBbbrSQQCN5uV05Hs8mRaXnkqeirLYcSdnU8pWk/wAqklKh7KFB9GOt9Npl9M3tqRJssqQXnYi0gORnsYD7BV/dvAeVQPlWBpX2KbF1XZrlHgWa7RVN3CKgG3iWznw5LCBqjuYO6FBGtpSTulTAB5BOxrmm/gnJ8XQu2xFNQdKhqUNxnYg9iCOD7124Xtu6LUx1gmQl1WA1fIqS5JbxwJDe3zKB/MMOgd1/hoiLqcjoIQUFI334pqZE+abV+9SlQH8QyDXRB10Iyn37qTrL4fufLyosK6WO7grQlz/erXdUDlSFDGFj+ZJQ82djp4qtTrJa+r4r91+HokLkMNKkTOnpCw5OjtpGVOR1AD5xlI3JSA6gfjQR5zZH59x6e+ahrgxLjapqgqZbZJUqNJI4V5TqbdH8LqCFp9SMpNJ6k6ebgsJ616InzV22JIbKlFzw59mkk5bS8pvGMkfu5CMJXjHkXlFWWyb0yrIfSoBaSCDuCODSg7z68VaEuRfig4GG2Y0LrhWdKW0pZjdQq9kjCWZx/lGG3zwEubLpniLbUpt5C0OIJQtCwUqSoHBSoHcEEEEHuDR/YKfyHtSXWXUPsuuNOtLS4242spW2tJylaVAgpUCAQQcg1avFZ6+UVaG2Oq1EqPhpShq9HvhIwluX3wMIe7BLmy6T4w9acQtJGAfpg4x9+1CwbClKIzzkbcYOfp2o2z3xdr8aJIipnWyaUidBWvQHdP4XEK38N5OTocG44IUkqSSpT56pjO3NR1XqMkuzRt/vzQG8gf8AxUj+9H8Q/eDfxKgFupzyKHQ1kpebILahmdAlfO2qaVfKSyjQSU/iadTn928jI1I43CkkpINRedjmjrNf02lb0aXGMy1ztKJ0PXpLiU50uIUfwPIyShfuUnKVKFN3uAm0vMrYlCZAmoL0GWlOgSG84OU/wOJPlWg7pUO4IJ1fALrQIT+tIdUdJ3FNfME+mKS48Mc1jXYO/nGRUdJJwQDnainns8EUA86CaahQN1ROwxQziVYyR+VSGgKO9ILAP32rGI3w1asetPNNq9MUWWEkDG/evBKU4xjbvQMhTSBjI5p0YTya4kD/ANGukgDG+3tQHHElKR+VL8Uf6UG45gf5UwqQpPt7VjIkvFyQO1PtIyRmo2IvWr8WalYaNSxt+dSm6Aybt7RASf0qyRU4QMgfSom3tpUlIO2Km2EpAODkY5rx/UTVnLNn2QI3fG9LSwByOKPDWdjXfA9q9pHWCJa9qcQ1jtRKWRilpa9qYw0Gga94IHaiPD9K6E+oomBvAFc8EHtRWPUV7TmsYG8D2paWB6UQEb8U4lHoKxhlLAG+KcSz7U+hGaeCB2FZGB0sjGRTgb24p4JApWBRRgfQPSvFtOOKfUkDemlHB5rGGVNgHik+ED2p0/XmuZP1oGOJQB2pwJHOBXARSx9ayMeSkU4EjuK4MZ7U4KZGE6ccCuFv2p3avEisAZ0b1wt+1OgiugBW/bvWMNJZTjWsbDtwT7f96SpJUrJH0AGwFPnBPbHb2rmBWAMeGMcULLa1JIxR5x2ph5IxvRMUDqWKQhSgnfFYD8QJSmnVJSe5G1fRXV7rbMVw7bA18w9dXBLs1xsgHJPNcuaHI6MToqMe5STKEdvUSe+ea0O0dKMP2oX7qNhx2CkkR4aXChc91PKAobobB/G4Nx+FPmOUxfRHS0a4uOXm6BaLbFUkPFs4cfcIymO0f51AElXCE5Uf4QrVUQZVyYVNmobbwhLTTLKcNMMpGENIB4SkfckknJJNSUEtlXJ0Yb8QpF3fEuPDWl2RNZbYnS2W/CaLCMKbgxW8ANRGyB5cZcUkE+UAHMYXw86i6xuDdsisqXJeKg23rCAMAqUpS1eVCEgFSlqOlKQSSAK+sFdHRrosNNx1OrUoIQkDKlKOwA9zUb1L0vCtUB+wWfTqkeW5S2z/AO84IIYSf/ASoAn/AMRQCj5UoFTy5JdvoMEul2fKnVdjR0raZXRnRMlT0SekIv16CSly84UFCO2DhTUBCgCGzhT6khx3YNtt0mL0l1DeJrMG3wJMmRJdQxHZYQVOPOLUAlCEjlRJAA96+lLp0kzlXispIT/EeB70iau3fDKKpPhqTf7gyQ4oDCrZEcT+AfyyHUHzHlttWnZS1aef/cyyP7It7Sjr5MjvlnndA2eT0N0lLS9ebk0Y3U98YWFpKCfNa4bg/wDs6SAH3h/frGgfuUDxKuyL708jVhTjgHPYe9aDL6nsKl+GUIaSnGlKQAAKl+ll9Pvok9a3yAzJstkdbaaivDyXS4rBUxEPq2AkvPY4aRp2LqanLJPK+NaGUI41dlVul16j6I6LMZ8OjqrrSGlcsYwq0WJ3Cm2PVMiaAlxfdEUNp/8AtCwM6g9OdZ9XXWJYbS04/PuchqHDZzgOPurCG0k9hqUMnsMntWwXWQeoJki8XKcJ1wnvLlS5DhGt51ZypSsepPA2AwBgAVb/AISdP21m6yeo5wUyzDSqAw6g4Wl55h1chxJ7KZgtS1g9lrYPcUiyvJkWOKpFHFQg5t7KT8VENW61WH4Y9KPrestpZbdQ8E4MltPiBh5X+KQt2ZOIPCZsdP8AwhhMBqZaugbpdsK1sxH7I2rO4cmOtrdIHr8rHfTn/wCJUzNkIv8AepVzkRm2npjqni2gYS1n8LY9EoSEoSPRIqaVZ3Hrb0v0wiOP/wBNXBc130Ulx5MRr8kNPH6OVOfq5ZcrklqqX8IKwqEEn2V34kWmZb4fSXSYcPi2S0pMgDgvO6S59fOlz/zVH3dt16PY5KzhUazRmj/zKClE/wDmLh+oNTfxIucm59Um74KkvRo7gI7a0eLj83TQb4VLtdvdBBHhuxT2wpDhcb/R0j86TJNuckuv8GikoxsrjbbscuNrRqacAIPoR/6NGIwlppCtjpOPoVGjGmQtJCkg5Hc02tpPiBKkqCUjSPbc/wCtJGLXY7kvA9blp1lKzgnY78+/1qRcbU0lQVwRUMplyM4M4KTuDmpREoyImhZyUjGfan4iWIalJSlRPCjQk6UhMZ9tPLmlIH3yf6UPJWppGM8cUKp0OFRUMkaefvU6HQbHf0FCckLUPL6j3/0qWiRw1EW+BslaQcfyqSQf6D8qqkF5b1y1Ek7irrBWh+JIjkd0H9f+9IuxpKhtppMiOWiNSzhIGOcnH+dWX4otKu1luc3UpTiZlnmp1ejkebHJ/OM2PsKjLDD8S9QGFfh+aaW4rt4aFBayfYJSo/aj7m987Y5Dbp837Aschz1SpyS84M++JI/OuvDKoNP80c+SNyVFY+GWbw7e+iP4uorRIjx0ngTo3++RdvUrYW2P/nEd6rrdiYmFEqOAlZAUD6gjI/MVPWB93pnqCD1TERiRaZbM4pH8RZWHB+YSR96kOubVG6d6qvVgguYbgzXUxVdlx1HWyfu2pB+9GL5wX2/P8itcZP7/AJ/gmPhXdrZaLk5091FLEax3t6MmS8oZECY0vMWdj/4alKQvHLLro7CrAY8mMzfegZzK491s8iVPhxj+JL7IxMjjG51NNh1OOVR8j8VYO/cr4mc0Qgqw6hK0ncKBUAQR3rXGbldOtrBb+vrLLXG6q6QkR4dw8TdTrSFYt80/zEaPlnCfxaGc/wB4c93p4px4y8fx+bObK3donZPxMkdb2SX1DDbbmdQ2NlLt+jg4XPg5CE3Jv1WglKJAHZTb3Bcxofwd6utV7Zc6dnfMGFdxpMUkBSJKUktOoJ2C8jR6KCsHbGMqtvRZs3Wlu626PbNsD4/altQoBaI4c1tvw3EnZbaVeMwpJ2W2Rn8VaKekYfTU+29U9MMGNbnpbbzMfWVGE824lTkUqPITsUKO6m1JPIVjtTSdo5pXVMvItHyZZfYeTLhSk640lsYS4nuCDulY4Ug7g+xBI8+3LbBdRjSd1IWMU30zcHod2ulklsOLgia8062k4I0OKShxHosAbHgjY7HaYukZcNZjuuh1taQtp0DyuNnhQ/oR2IIPFOooS70ZvfbS0sLcZUU+qc5rPnXLj09dP2lbPCDmhbLrTyPEZksqxrYeRw40oDCkn2IIUEkapdoC1LUU5APoNqp16tJIV5QfSi3Q3G0Z11h0rB/Zw6s6ZbdVY33UsSo7q/EdtUpQJEd1XKkKwVMvba0gg4cQoUczJX8VC3CluoT1ulGiNJWoJ/2hCR5WXSdvngBhDhx44ASr97pUspi4Telri5MjxGZceSyqLPgSM/LzoqiCth3G4BICkqHmQtKVpwUiq71b01EtyGL50/IemdO3JakQ3nsePHdSAVxJGnZL7eQdQwFpKXEbHCcp2LxoglLWhSkLSpKkEpUlQIUlQOCCDuCDsR2ryZWk7Vb5PifE2E9PSCrrOAwp6WkDe/RW05U+kDmY2gZcA3eQkuDzpc1UI5Vgp3B3BB2x9a1jJfJKsXV6I+1LiPrZfZWHG3UHCkKHBH/r9KPuBjXSG5fbWy2wWin5+GjYMKUQA62OfBUdsf8ADUdJ2KCa1hXJH2pxKCdznHFbkah4ySc87+9SdourHhOWG7veHbJi/E8Qgn5KRjCZKR6YwlwD8SPdCaiA0rbYjHenEsK2ocgNWPS2ZdvlvQJrXhSI6y24jOcKHuOR3BHIIPeh3XiUnc1MPNrudrZeUMyLYhMZau7kfOGifUoJ8PP8pb9KjjCUcpPFFSDxIxalLON6bLZ5wQfWpb5DG+mkLhnjFb3AcCKCN+N6UVY432o0w1E8b005FUMbVuVg4sFWQN8U2Vninlsq9Oaa8I52FGzUcC1AHevFYUeKeRHzyjmlCIc5peQaBj5hgUO62d8GpNMRQ8wTxTbsbtppeaNsZgJOrHA/pVlt8XggCoaC0QvAHtVrtrQ0ZGN+1c2aYrVkjBbO2BvUkjCEnIIpiKgEgaRiilp5GmvGzTtnJNbPuUNY7V7w/rRYaCuDXPBr6ZJnaDBuuhGKJLYHakkAVjDWgVwoA7U7xSSDRMN6a8BSykV7TWMdSBS0prgB7CljIrGFpTS8YpAJFd1VjCjvXicUnUaSV5omPLWaaWuuLWaHW6a3Zh0uY2zSPGOcUwVk81wL7ZzQME+Lv7U82vO3FCJV6U+g4rGCkqpQXjvTKV7ZzXSqiAd1+9d1570zmu6sd6NmHdf0rviDFMFfvXgv3rGHisetJLmKaLgHem1OgVgjxd9KZkPAIINNF055oK4TEstFSjvisYpPXs9KI7iSrsds186S7PJ6j6hTb4jrberW68+7nwozKRqceXj+FCd/UnCRuoVrnxHvrbTLpU6AACSTwKztom2j9hgFM64luRdCfxNoGFsRfbGUuuD+ctpP93UMnyysF8Fs6atYnORmYjTjNrgJU3DZcA14Ucrdcxt4rhAUo9vKkeVAq8raV4fyyEkbYqJ6dT8pAbCEEZHNWa1xWZDinpKiGGh4ryhyEDsPckhI9z7VOOwydDSYibLbfmgdMiUkhs/+G0chSx7q3SPQaj3FUe9XaKhSmyU54FK+JPxCj2tLzr7qEqUNkA4CQBhKR6AAAfQVhMX4hf7U9QJgRZRaZShyRLlY1IiRmxqdeI76U8D+JRQkbqFQzR9x8UVx/SuUjXWERoLB6glIbWULLcJhwZS8+MZWod228gkfxKKU8aqoPU9ntl4eXIuMoOvuqU4444vKlrJyVKPckkkmqD1V8Rr3e57i4MR6LCbQGYkdRyWWU50pPqr+JR7qUo1AtyupZ6dbqnE7bHJNcOXElpM6sc32yXuvw6ZuM2ParMyJU+a+iNGZSrHiOrUEpGewyRk9hknior4iWl+ai3dN9HStdh6cQuNFeQMfOPrUFSpxHq84kafRlthP8NSVhfu9gtF26qkvufOSM2K07kFC3W9Ux8e6I6g0D2VLB5TQ8W8rXHbSlsJCNth2pW5Yo0n2MksktmUdRSrx0nHU84+7hCVLJGTwMmtSPWFw6Vtl16QlKHzPRXSCv2qdWD+3rtIiJko+rLC0Rf8A/Gd96mrNYrRLn/7UXyG1Lt3TbX7YfZdAKX3G1pEaOR3DslTKSO6A56Gsy63slyhdDypa5KpFy6uuzl0mvr/G+3GLiErV/wA8mRKX9Wh6V0+lnFxbmtv8/klmjKMqi/z/AOHum/ic09JfcdUCpKS4Ek77DO35VvBv0OB8QVQ9QP8AsrAWg4GwXCt6lK//AB4V96+Kvh1brlcPib0xZJCHENTb3Cjv+ngl9Bcz7eGF1s/w467c6qndc9SzSdUq3zJQOc5M24MIx/5X10mT0kY/VF+V/Y0c7lpmidRjE8MR8ONohwk45yBEZoiwMJlWybFdSctKbloH+EZQv8gtB+iT6UFIbD9wYnQ3dSJECC9pO+AYzaf0KSKnLc6uK+3IYaQl1ByMpBSrIwQR3BBII9Ca5ljqbZSU7jojJNsQ04VpTgHcehpCbfqTrAJHHG9WCYyw9pVHSpCFE4QpWSg4/CT39j3GO+aQxHKDgpwDVOAnIr7sLHlG++21DNtFsFBGD2zxVoegpUMpTgp396jJEQ51jYg5ouGg8it3NBQ2Md6jSktpKick7VPz45WAkg7HjFQzzZyoK4TXPONFoOxm0tBM5K1d85qeiyvl33jnIUEj7ahULBWhD5UTsAadenoQjfTlZA5qXB9lFL5Lj86GoakRFapk9KoiSP8AhoXhJ/6lkhPsnP8ANtJdQaPmuulR16m4rMOKyRx4ceawwnH/AEoqB6OcakdQWhySofLxX/nnyTsGo6S+r9GsfepS2qW5YL6/IGpchmE04Sf4lyQ4fru0arBuv6/x/wBk5JXr7fz/ANFTRcQ4sKIAWnAUk8KH/cVaOropv0bp6+NqIfm2KMlZ7qciqXEX98R0Z+tUi6FMOUnw8Yzv9KtsielXQFjnsrUFW67XKED2AcRHkJH5qd/M1XAm00Sy6aYmzdO/PLbcBSiQ0odvxY3FXPpBodK3cTJcMyIb7TkOfHTt8xEcGHWx/i2C0nstCD2qgJ6lTDeauTCyEqxrT6etXy39SwrlFR+9QFKA0qyNvrXRixy7ITkjV4dlRarc3ai63KTBnufKykJwmTGkMtvNOp9lhBVjsVEdjUnHuMC3ofgXlC022agIkLSnJZUM6HwO5QST7pK096hOlLg/cOiXYxBW907MjrC87phvF1IQfUIeXkegeI7Cp1/5a7xUxHEgOOYbB4yTtj9a9GKppo4ZvWybdtrKeqrl5k+N46lKKPwqKgFaknuk6sg9wRUk4W1NKt046WiStl7nwHD3P+FXCvsexyxBWxKZkTQ4FPwHFJBzkriayEg+vhkgf8qwP4aHmTkSVFRBIPpV78ipX2QV0DjDi40hkhxs4IVyDVUujCFgnQB96vs2ObtH8itUyIg6fV5lIyR7qQM/VP8Ay1UpsZSgcA0kikTNuobcFIOEiqrargmwTZUS4wVz7JdEpYukBKgFOtpJKHWidkSGiSptfY5SrKFrB0m729xQVhII+lUq5WkqUSU/pXPKTi7RVKyu3qxz+kr6yu3XVay34NxtV1iEtl5onWxJaPKFZG45QtK0ndJo3qO3ROpIC+uLREZjPeIlu+wGEaG40lZ8slpA/DHfIPlGzbupH4VN5noMJF76cf6akqQJlqD1wtC1nGps+eVEyf5gPHQP50OgbuCoqxvPWaeJjUdMhlxtUeVGUSESoy8eIyr0CgAQf4VJQobpFD3K/Zm4X+6Kd8jv+E0tEMYPlq6Xzpdu1TEoiPrkwJbSZUGQtOC8wokAq9FpIUhY7LQoelAfs0j+HBzSudaYVG9kCmHkAFJ98U63BBxhO1TqLcSMaM0+3bSCNhS+6HgRsCOiM+lxaCpogocSP4m1bKH5bj3Ap120eA6pokKCTsoDZaeQr7jB+9TbFt4ynvR64PixG1hPnYwyc90HJR+W4/Kt7xvbKku3ADATnFMKtZJ2SKtht440jeuG3eiaHvB4FPXbgnlIoV635GyduauL1uJOCKHVa9sFJpveQPbKO7b1DbH6U0Lcoqzgk/Sro5Z9RzoO9KRZO+k7e1N7wOBUGraQeKMRaSRuBVqRZkp300+1a8bFJpXmNwKgbURyCftQ7ttAH4e3pV4dteeE70C9ajvlNSeUDgVNmGlK8aN6mobYQnHHanjbSFA6fyFPtxSkbjFQnMjJUFRhjGOTRyW1FPmFCRkaFDPbepVtIWnBAO1efk2znaUj7gRS64lOwpRGK+tKoQaSpJNPBPqK6EAmgMD6D6V3R7UR4Ve8OsYG0e1eCPaiC3ik+GfSsYbCSO1exTnhn6VzTigYTvXuRXcelcIJomEKIpBJxtSykj1rn1rGBnVHihlk53o1xsK4HNMKZoGByoVzJ7U74B4pSWFelYJxsEc0/wDakpaNPJbPFExwE0oZ9K7oPalhJoAOAY54rh5xTmnI4pGkiiYQramyo06oZplQxQZhKjvzSSoetJWCN6aUo0bNQta9IyKqfU92RHaVlfAqauM0MtKye1Y38RuqW4rDylugBIJJ9BQboZJlV6jvjC5792ltpfi2oJf8FX4X5BOGGj6pKxqUP5G11AdILXNuRmTnFyH3nFOuur3U44o5Uo+5JJqvdUXt5l6J04pQDrKhNnpPIkupBS2fdtnQnHZS3asHTExEcB1TiABXPl0qK497NfN0Yj27GcEVF9YdeM9M9PtxlL0POtiW+M7jUP3SPsg6serntVdbvMefMjxpL2mNlTkhY/hYQkrdP2QlX3xWO9a9VXDrK5zH0IUTIeU4pI4TqOQn6AYA+lND9JpRSZUOtepb91rdXUoLiWdWNu9XXor4ePW3p1qCtJTL6g8KbNI/EiEhWYzP/WsF9Q9EselWP4e/DVFy+X+bRjx1ALOOE/xHPsnJrX4NpirluzFNISXDlKcDCUABKEj6JCR9q58s6T4lIW3sys/DqMspCWzuMZx3oa79JxrXBP7kq0DOR39q1S5XC221OXXG0nPrVce6kscu8w0PutrYjFc58EctMILqvsfD0/8AVXmNSlKjsTSRlfX9ilMzYnTrKCG7JF8BekbGU4fFkq+viK0fRpPpWeNIuFuUth5kqQV7ECteR1RaJMtxc6QkyH1qccUo5ytRJUfzJoeVabZe3W4UAtmVJcSyylI3Li1BKP1UKMrb2jRVFSmSW7Z0lZ7GpxQe6gcVfJI//t21rjw0H7plu/RxB9KE6qhMzEWuC5gfLWWC0n/qb8RX5rdUfvUz8U+mm3b/AC7vZnCuFBSi3xAk7CNFbDDePqlrV9VGq1f/AJgX1LClKIhsRoS/8Xhx20KP/mBoy8qPWkBbSvsp1n6fFj6tldRpQofsa0XW4pATn94ITrbRB7AOut79qg/h1bBZ7F1fEZdyDbbcGj30JucbP/5NX+7R1xrf1O+6gKS5aWYySfV6dGSfplKVfqKp3SrAat/VzWogpszTiQrti5Qsj9a6oN8Ev3Od6kzSem3FuWy1zOTH12936BRdaP3S4tP/ANOryww2ptJbxnGapXw0Siey5a1qCfnEJSgnYB9Jy2fuSpGfRw+lXiK2tsaVoUlScgpUMEH39DXP9yv2GXUJzukAj86625tzxSJjvhk9hUaZqQfKeeRmmsHEmitKk4OAaFksJVugZ9qjv2skHGvFLauaVHBUDvtRAlsAubWgKVtgDvVTuktDTDjgVgE7b1YeqJ4ZhKKSMq4rJb/1Ao/7o2sgk8/1oLHz2Ny4llgTm8a1qG+Sai5N2TKuWlC8Jb3NQL13VEiaQ55lVGxZjzUd6QAS44T70fZ0bmatZeo2LfaJTzjqfmLmr5GOnO6YyClUhwjsFKDTQ9R43pU/C6jaT0tLcDu0m8RYo37NR33Ffq83Xz3DZvK3VPNpVqc5IHvWmQ2pUfpfp23vA+JMmXO5ub/wlbMZs/8A+o9VI44efgWU5eADrDqQ2+7LCz+6JGN6mbR1QqZ8M7uy0lKwx1JblhWd0l6FNTj7+CPyqu9d9PuTkpeTvqQncetGdH2R9noPqyOoHyzbBIHsUuTEf/8AWq4owj0Sm5MXaZ02WhxjJyDkAirr0c8sqMck7HIB7A8iqww2mMG5WAh1GAvHc+o+tEyrs9bHhOiIOn8S9OxHvVo0+ib62fRXw86gh29SWZ8oNRbpIRaJalqwBHU2vWrPq2p1lzP+CpqydSPRrnJs93SWp9oW4JKSMKQ80op47jxMCvly4fExm4ssR20LQuKlYcQOVKWcrUR6EaRt2TWjPdTTOo7VZPiFDfU7JvgRbLoAPMifASlLqlD/AOM0uE97qUv0NdKg+P7HO9yN16YvVws8xp93VJbbOFoHDrShhaD/AMySR7Zz2q2vKREkLit5dawlbDp/4jShqQr7pIz759Kzzpqap2KytSCklI1J9D7VoEXwJ1qQoOAuW71O/gLV/wDkrJ+zntWj0M1TPMreQ8iQwotrbUFoUOUqB2NO3S2suhM+O2G23ydTaeG3B+JI9t8j2I9KIYaQUjSQc+1GxmEqK4rmA3Iwkk8JWPwq+xOD7E1qvRutoos+zpcJJTVcuXT6TklGc1pki3LSpSVoIUCQRjgjkVGybclSTlNc2SJaDMcuFk0KOW8j3FCC277JzWj3a0oBUQiq+uAMkFNedNuLOiKtANthi5Wl7p9xGt6Opc63bb6tOX2R/wA6UhYH87Z/nNRQgg7hIxjarIzGeivty4ivDfYWl1pXotJyD+Yoy7WphEwSoTQbizUCUyjGQ2FE6m/+hYUn6AVnPlH9jKNMqqLaOAP0p5FvA20/pU8i34AwO/pT6bdvxU3MdQIJEL0TRDMfww8hSApLrZQfrkFJ+xA/WpxFu24rqrcMcbUrm0MoIr6YQ/lrvyexyjip4QcH8NcVDwPw0vuNBUCtrhhR/CRSFW8DcJ/Sp8w8qyBSVxcfw0fcZuBXv2ek8p4pz5HAASnH2qWMbf8ACfzpxuLnA07emKKyiuBDJgjjSaWIJAHlz9asDUEHsfyp4W7OBpo+4DgV5EEL2U3ke4pDtlBGQnirU1bMcJolNvSRwPyoWCUCgO2bHCMH6UOu1ZONPv8AWr5ItqQeOP1qOdt+MjH5Ckk9aOeeMpxtwB4I+oopljRgEVLyIBSdQTTIjKT6CuWUjhlDiz7LTilHFCeKoGuh019haKcQjNKSRQynTya6hzNCzUFg74xTiQD2phCgadSfasahWkGuFApYG3euEGiYb0d64W/ankpFKKa1GBS2PWvaE54p8pGOKQQAdgaKQBpaAe1NFG/FEkbcVwJTWaCDFvO9JUyM8UaECuFtJ7UDASY49KWI9EpaweKdS0O9YwD8uRud6WGqMLXtSC2ewrGB9A+9J04NPKQRzTJO/NAKR0j0pBSOa6Se1ePG9Yw2pNNLT3p1RpCvrWAMKRnO9DvIShJPFEqBBzmoy7S0sMqJV2rDJFU6suQYZXhXavnPrK/MKujkuePFg2ptVxlNk7OpbICGv/qOqbb+iz6VpPxH6nTHady7jY180fEC/KFkRESr95eZSpC//wBnYKkNj6F1Tyv/AKafSlTtjNOirLv8ydcH7hOlF6RJeW885ndbi1FSlfckmrTZb0+4pttK1b++aolrhpfWCtzTWi9H2uI3KQsnUARSZXaHgtmkQLbMe6WuMtKFF2YEW5o/4SQ48f8AyobT/wDUNEdHfDQJQZEwfjOSAKu3T0eG9aoMXSFJ8JT+ntlayM/+VCasMhDcK3FSEhOkE7Cpp6oZgERNv6ft8laNKA014ST/AIlnH/7oXVB6n+JsS0oUG3eBwmm+ruoVpisxm3FFcx917H+FH7tP6+JWQzunrpdprypTxS3yN658lMeC8gPU3xOuF5kuMxlqAJ9ar7PU93i9PdVXZa1ZbhRra0ST+KVKSVf/AIqM8P8AqqSl2eLZVE+FqOTknvUHeluT+gZ7MVnJmdSREHT/ACswpCgMfV+hBRu0PO62Z491jeETQ/8AMnnOM1cvhh8UbtbOqZHVMpfiRuk7XMvriTulTzaA1ESf+aW/G+wNUK8dJXtpBfbjOY5/Cab+QuVk+D15lOR1JkdUX5m3jKTkQ7c2JDv2VJlRf/ufaunHCEnfwRnJrRqXRPxjal21q3XB7UMlkqVuScc/etEvkWPOuC7lEWkl8pdOOCFJBH9a+IbZdpURS4wUUKyFp7bit4+HPxOUtUSNcHdk4Qc+mNv9K4/Uen4biWxZeen2a3d7Iuf0b1AG0kuJYhkYHIEoH+uKyfp2W4v/AGltchrDz/Tk4I5yVMuMSf8A92Oo/avom1/K3Cy3pMUpK1W9D6QOFBuQ0r+mqs7X0rb43VNvvK2wIi3THlgfwsPJUy9n6NurP2qeLJxaTGyRtNoH+GMhtxIad5BxzyDW2GMm6RzNRvLSP36e7oA/vB/ix+Id/wAXc4xzoDp2ZZ5S7ZNBEiG4qO7kZOtBKVfqmtht4djpSpClIWghSVJOCCO+aSdRGirKxemy2Pr3qqPKWkq3NaP1BFauDReabQ3K/iQBhK/8SPQ+qfy9BQZ8RWVAA5B3BGKhdMsl8kO/IWFnfGKXGlLO/c0l1jBIUn6bU7DilW+Nqopk3Eh+qpSkxVAnG3FZBILrlwW6pJKUmtk6lt5dbKBmqgemApWdAyo74FVjkSQrg2U9ppyV51glIO1T1lsDsktpLZ83t61arX0VrW3qa8ucmr3Z+mGY7iFFoAI5qeTPeolI4q2yAtnR6WmGktRNbqsJbbA3Uo7JT9SSB96sF/6ajNXIxo+FNWplu2tLG4X4IIWsf8zqnVf9VXO1RRGeVeEIGm2NKkoB48YkJZ//ABikq/6DUZOYRGgoS2MgjH6c1zJso0rKVIsokxi2pOcY/Ki7TbUR7FdLOqHrNxVEPiZ/B4C1rG3fOv12x3qwW5ttePEA39an7ZbIqpQBx5iBxtmrwlJNMjOujNZXSbstPlQRrHYd6Bc6VuMVkpea1aSU5I2I9DW3C1RWFLS4gYzzTcuBDfbU1lJURgeh9K9HFJo4pnzrK6KL0lKkNKaWD5SOc+lbR8GOn5jvT1w6bnsB0IdN8jZGP37BQ06P+uMtefXwU+lKjdPJlOkPI0qScJKhzj/Otk+GdpZtTkaS+2FttL0r23LagUrH3SpVdayPojXkRCt8dhKAhBSkcj0NTlrkxoM5px7JjLBakDH/AAljSv8AIeYe6RRtxsa4rjsVI1eCtTeR3wcf5VF+AWlaF/kaybsfTJppt+DLdt8jd2O4ppZHBIPI9jyPY1Oxmkuo3qEDweRCmrOVrZDDhPdbXkB+6PDqx25aVtgCnXdCbFyoofQmQR5j5F/8w7/cY/WoyRAOMhNWVllKtTZ4WP17H/160PIYTgjFTyRsMXRQLtCwCCKqciIkOEf0rSrpGTpVlNUyfFw7kDvXl54UdeJkMmKD2o9qMH7S4wrdcJ3xkH/4bmErH0Cwg/8AUa6GwM7CireUfNIaV+F8KZP/AFDAP56TXNF06+S7WiPbiADcU+mMnOKcTjG4we/saWCnHNIxjyGEgcV5TA4ApaV43rxVkUrYaGvlkk8A02uMOMUUDk81xenBpZNDIjFRxnOBtTa2QQdqkVIBOaQpnO2KnY37kWImTx96IbhDP/rejUMjPFFNspPA4pkxXoDaigdqLbhgjcUS20BwKfSgD0qqEbBkxcb4p1MUY4p8YzjFKyMcU/ROyOkxU44FRD7ASSKsD55zUbIQCMjHpSszVkDJaSUkYGeaBWyCRkf/AJqlpbeDsKBWkasEVy5FTOTLGj6fLp9a4HPQ0P4w4IrmvfavrSIQpw+tOsqJoQKz34olkE4rGDmqIRxQzW1PoVTIDQQmvEGkoWO9dLoxmiA6PpXtXrSQ6KSpzNYwsqHekkjG1NqWM817UB3rWY8TXQMbVwHfeu6hWMOhKcV4AUnVgVzWBWMOgAUoKx2pkOpxXvFBrGHisGuK34zTOsV5ToA5o2Y47gDFAuuJSo70+85gb1Fylk96VhQ+ZaPWvfMBXfNRClkr5NPsrzzQCSSSFb5NKUkYodoq70UEkp70QMCkr0JO9Z91pfREYWSvGM1erssoaUR6VgHxcvK40R4pJyEnNJJjxVmJ/Fvr8pU8ltwFYyEjPJ7D86xvqe/NzL+5DbdKmrehuAgk/wDhJwo/dzxFf9VDdW39T/VbTkh5PgxXFTF6lYBSylTuN+SSgDHfNZvFv7vjeNIXqccJWs55UTkn8yaaKpBu2bJYkNrIUpXGK0jpVp151AaZyM8+lYp0r1A26UpAGfUmtn6TmIwgqf5GdjUsnQ8TfOnwWmo6NSPJEjp27fux/rRl8uiWLW94qhsNqhLDMa+WQ4TjyIR6/hSE/wCVV7r+9ltkttEkrGkD9Khex0tbK51M4VTmmhgfKRWWjk/xEa1fqs1WpfVECAFalFbihjSKOujd1ul9mqbyAp9aBgbYB0j+lE2f4YSX56XZbPia8EEjjelyUNFkPb7FJ6tKZHyxSjO2RirlbPhLHYsRZeaRtc0yNk9ywU/5VfbV0hHtEUDWE+X0o1iZCatkhHilZaeZUc++tP8ApXLKb6RWK8sz6V8O7U6kMrZRvtkAVG9Z/Bu3PWKzW0Rmy0xBkuDCdi49LdWo/UhLY/6RWkPSo61JcTgY7VMT3Y8u2W/IGfkxz2/eOVCOVpOmUcLaPz++Iv8AZ9mWoOz7eyQW8KAxsRWax4k6zzdK0KSjZSTjH1r9Lrh03Eu1vWy8ylYUnByntXzv8TfhBBgsOy0Qykaj5kjbfuati9U5fRMlPCovlEB+C/Vin34cGQ8CJbbkAkns62pCQf8ArKD9qt8iO3Lj+KB5HUkOo7pPesIsCpNluBjNrU28ysKaUNvMDlJ/MCt5YmsvzXH2v/d54RMbGPwpdAXj7KKk/wDTQnCgqWyYiQPmrqxd07m5Rm33cDl4Dw3fzW2pX/VVr+WDSckY2xTHS1sL1tCEIyqE6t9s8/unNIWPspKD/wBSqmbk0AzqGxAxzU83VlMW9FTupGCCc881WpqkrJS6Avtk8/n/AK1MXWRpWQSfrVckO6tQziuaL8l2qIyWyhS8JBA+tPQ2hwkDPNMOukqwPyoi2rGv/XirQiRkwa7RgQTjO3NBQ7ehawcDbejb3J0d8ZqLbuaWUBJWB61R42wKSRaoaGk6Wmk7jk+lSSnmWnUR0rTknfeqkjqCPBiqkrUEgDJ96jbJ1OzKkTb9cVE2+ABqTnHjOqz4bIP+Ig59EJWewox9O5GllSNcubzEW2RbaFAOyiiU+M8IwQyk/wDSVr/601AdSXGHGaYjBYKlHcVnzXXM24eLep75WuStSyrgKJPYdh6DsABVed6vduV8QgqKkoStQ+21VXpbkTeakabFvMQvoZBxgdz68VPM3qK2gOpdGUjV/wCvyr58R1g8jqRMTXgEJQR+VWCTfpTDBU26TpycZ5GdxXTH0yXZzyy2bVM6xhuIcZWsYcTgLzkAnjJqChdQPyZbkVerxUnYE/iHtWZWu+vuuJPhF+KfK6lPISe4+lXqxMNtqDjyPERyhwenYircVBaJNt7Zp3TqXH2E+ONSuN/Ste6YhpZhpIOMjG++ayLpNan320NvFYCvKrn7Gtegu/LMpacSUlQ7cVo92B9E1d5Ki6mTpT+/Zbc+p06Vf/hJNVmZK1L3AHtRd3kv/INKQ5jwnFt8b4UAsfrrqqSZril4Uc+9WbNFUtlpiyPEtbo7x5KHB9FpKT+qEVNWqcRgHk1SrPNUUTI4J/eRFrHuWylwfok1O2eYFEAkZ9c0QNF7jyFLSDqOadddK9SiBvUfBcBSN80+67hOM0H0ZANxCVJNU+5NDWTjgmrNNfzzVcuTqckivPzqzqx6INwpBxvSQ8UKDiNikhQPuN/8qZkPgOHj7U0HcHcV5rZ1Ik5CtL7uCMFalDHoTn/OmC5ncH9aZedGlChkZbST9QMH+lNl3+tJN7GitBqXvfiulwYFBpd/pXi6dqm2MkGh3G5r3iE9+KDDopQdPr+dK2MohSVZ3BpRIoZLmR3p0K70qDQ+jcb8UW3igW1gb9jRSF780yFYWCBSwoYoXWPWlF0Y+tWiyUh8qwdu1eK8JznehS9vgGkreATzVEybPSHvzoBbu3NckP7nBoFbwJ/0oMWxUhQINArIzk96fddB70E44Mk5rnyUSybPphxogcUyUK4JqVWwd8g0wqOM4xX1ZyWCtoPOaMZ2HeupjY4p1DQT9qxrHmTRSeOKHRpHenQ4n2pkYcweBXCCK54qex3rhcBogEKWU7UlThPeuqAPemynvmsY9nO2aWNVIScelPoKcb7Vgnh717UBtSsJPekrSnGRWN0cU4Mc0hThwTSFFNIUrbY0DHlPGuCQsd6bJBptRIOBW2ELD+e9dLuRzQGsjalgqI2OKxh5xzOaCeBOcURpOM5oZwb7qrGBQ3lXNEtscGkhG+aLYSDisYdjsn0ootkDbFeaRTq04TRMV+/JKY68HtXzH8bVr+VfA9DX07fU5YVjnFfNvxkgOux3ihJOx4qUmPE+APiGs/tF0K51HmqQHVg5Bx6VqHxB6bmLuTqvDJBO3vVKPTUwqH7lW9PGSFcQvp67usOJSMg5GK3Doq/KJaScknA9TWT2Doya46gllQ3GBity+HfRMrxEf7uonY5xUZtUVgbH0zLkSIiUhKicZzUjJsiZ620qa1q1A8e9THTXS77UVKdGnb05qysWj5UEqGTmoU07KciqW7p2NAXIeUgay4pfm3xkk1ZI0uM7FSEtpSpIxkUHcWHFLexkJ1njk5qQtNqbTGKl5wR39aE3yQIqgG4XJQjFrSSpOMe9Q8ZDsmNcGW2cKUwHQf8AkcQr+mqpm7wVkEIcSk42+tRUFcpq7NsJcATISthXp50lI/UiuN6ezp8aIlAdQVturOR6ip2a+mM1GZLoAahxkHJ4JbCj+q6qHUk51EV5aMhbafN9QKiuorncHrktLa3ChbbOMevgorn42mWvo1K23CKEYW6nzDuajesLRCvNpkRw7utB433qj2pq7yYyWypxKhgoyeR6VcrXbrjIaCH1KCgPTmpLTDJWfKHXdk/Zk5SnglD7RwFAYz6VZ+jrsbr0/EdSQH7e4YrgB5bWSts/ZXij8q0n4p/DF27wlTEx8uozq23x71kPQsNywdTKs9wBQ1MHgrVnYEkFCvsoA/TNepjkskN9nHJOMj6U+H6gmM0+tBUMEKH8yCMKT9wTT/USPklrZVvjYH+Ydj9xRXS1vMW1oUU6cDCgOxG3+VM9RqROiFGcSIwwP8SOx+x2+4rmzL6eJbGqlZmF7WFLVgjI2qsyJBTkZ96mr84pClE5BzVTlP4O6hzXJFHTJ7PPP58o3I96MtyihGpVRCFBS85zUi0rSjANdkYnLJkJ1NdEMFS3FYA4FU9N7Q+6XXFfu0nOM8ipLrZspyt1fvvWYTrwEOeE2rCc8D0rsxwtEZSotki/Teop6LdDCUNJzlS1aUJSN1KUeyUjJJ9BUJ1b1Wlllmx2xxSYEdRLZI0qeWrAW8sdlKwAB/CkJT2JMKnqRCmTa7cFNoXhUt0kZdIOQgY4QNjjurc8ACs9Q3BtLhcS4VEbV0whRByZon+0ynrfFgsq2SMqPYCnbLcE+N8wo5KSpP2NUeyylJtPivDDhQcVLWCa1Itq8H94lWSPUf8AoVnGujJlnVHiO3VE0HB5x61ajCaU+guLy1JG59FetVO1RhPjqdbVqUjOw5xWj9KWhN5hIZUBrTgAkd6ntDUgexW4Wd8h3dBUCFY4Pv7VdbbNLCg2SA2rjfj/ALVKQ+iQ5ALS0HUkaVZO6D7+oqDTZp0K4C3SUrAyPDVjb2H0potPbElrRpPw+hzEXb5hh4htWDpHH1rcGJbCm0tyUgEAYNZ18Mem3Uw8vagUjABGAfcVeXo2hWhZyPUU0RR6euM5DkNJeHCFjUe4OP6KqkXEhpZOo7+9TNxAYQoofJCh5klXI5/yqrTlal+QYz2HFUWw1RKdNyQu8RmCc+OXGceuttaf6kVIWa5BBQSr8SR39qrVkdUzeoD/AB4ctlX5LGakYKcKCUHg4B9s0aB5NMtVyUUgav8AOpIy9SfMc5qnWlx1ICSo78e1WBkladyaV3QUJnyEgHChVVuUrc7/AK1YprYwcHeqtdU4ChvxXDnTOjGyGdeys+bfNcD2/wD3oJ5wtrwTzSPHPb+teUzsXRKuSUraaQCdSAoH6ZyP86bDnofzoEO5G52p1Dg51ZzU2xkg4OE8ke1eKz60Ole252ruvjNKxgkOdq7rJP3odK99zS9fIApQ2FIXn/tTyV780I2r3FOhW/NAIa2rfkUShVANr9TRKFkc96KYoTq2rxUcc700F5HNeWry81SLEkjynCKZcdOmuLVtvTDjgxuaqiEgeQ4c45HehVLAGSRilyV9sjNBuulIOftQbog3Wzr74AOSKi3pSlLKc7fWuynTuNx22oRIK1d/WuScmyEpuR9orAphQGd6d16huabIz3r7FEjmQO9cyDuK4Wx6muKwBtRDR1RI4NJ8Q0hR22zSAsjkULNQ/rJ717xD2UaZLoGM1wPJzsaIQkOKNe155of5iupdUa1mCArJ4pYUPXimUuDG9eKx2FEAQlRxXFO7YxTPiEbikqc/WhYTy3BTSlEd6UATXgkk7itZhouEetJU73xTqmsUn5YkbVjDBWlX1pSXUgYzSvlD6V75Ug5oMx0PD1rmWzzTa46hvvSUtqHOa1hCkJbJ2xRLbaE7g0E2k5oxtOfasgBKVpSKS4+MUnScUO4lW5zRZgO44dQQfSst616aFwacGjORxWpvsrIqOkWtL5wpOc0jVhTo+P8Aqf4QolPrdVHB322qsN/BT98CYgOf8NfaUrpNh7OWgaET0TGCgSyAPpU+DH5JnzX0/wDBtpgJUqMPyrT+mvh6zB06Y+PtWrROlmWiP3Qx9KlGbM2gABGKPD5MpUtFThWFuOzuntUTfFtQ/KrGMVoki3hDZxWeda295TSltpJIBFaa0aLt7KBJ6kYVcDEGdSzsAO9WthbiISVKTjKcge9Uax9OS3b4mXJ4SrZJFaJcY7aYobJwcDYVyt2i3TIZOh7IfwTk9qqfVtwj23D4UEqRg5zjBB2NG3q/MWhp3XkLIOnfisI6p61k3K6fKJCnQ4rAA4Fc8ouRaDSLT1FeG5101wnF+FPaD4TjbKtlD7LCh9qtdvt6JgjuuoH72LHdzzg+EkH9U1V+k+n5T0FCpAQj5ZReZzv5FY1p+ygFfdVXGXcoUKBHQ1hCm29O3sokY+xqM0q0Wi2WKzJhMtBl1KAtskpPtmrTBWwUocbSnCucetZH/tfGdXrQQnJxgKB3xuKdtnxAS2HENOkltW6RyK5npj1aNgmxYkppWsJOsYIrD/iF8L2U3pq6w2sAqGooG1W2D8QkSUFJJHYf6UfE6jjXJPyctIPYEnO1dOLIrpkMmN1Yd00h5FoSzKHm0YUr1UBz96qPUsl2DMUUYOCQR2IPIrQ4TKExC0AOPKaz/rJlS1rJSNSeab1MdIOB7ozTqpaFp8dpRKV5wT39j7iqLMfBJTnHvVxurjjKnEKAUhXKFDIP+h9xvVIngpUSDj3qWONlJOhDEjzYJ/OpFqSBhRPaoVorLg7VOMRWfBJcONs11xikc7dmWfEu/KZcUwFnHAHqay9T63QtxZOVjGav/wATIJfnKUhJIztiq3ZulptxdSS2oN522rthKMY2znnblRAIZfS0GWEqLjtP/wCw1wfQhbrS15OTkZrcOj/hmwSmRIj5PbI4rQmOhYaGAVMJHqCmoZPV06RSHp7Wz51tXRs5cHQtsgaTjaiunuj5zL77akHfcDGxFfQKOk4yH0MtpGCcHbipCP0dEDoRoSlzBHHNQXqpMo8KTMd6X6bm2yYtXhqUy5yCPw+9X7pC1XSHdFNoSfCUQpOBtirRKtEeIyWlNpQsHBOOx4NFW0SYSBIZbJdjkeK0OSn+YetVjKUhHGK6LzbIpkNJcStKH0pAIPCvajLdZLfOkaJjKPFSr8KvTuRVZZvnjqEmMSgq7Dg+31qTtd1VMuTILnhvpVjnZX0NWjFeSE2bLZLS1EjJDeygP4eMU7NYCUkqSM49K5ahqhtqz5seuDXZ7itGCsH371dIRFUuUUK1HI52AquyI+CU4FWS4IKVkgkk7VCSQonf706CRyWghR1AZH61KW5YBAxg8UCpJUeKOgNEkYNMDotloOvB71ZG21aM47VAWFpWoZ+lXRhhKmhkdqWrA2V+Yg6TtVZurWtJwKvNwi+XGKqF3QG0E5rkzxsviZQbmvQ4dsEUGl337etP3xf7znvUY25XjT0z0I9Eml043VT7bhzzUahfvRTS/b7VJjIkEL23/Ku6jjmmArCdt68XCMZ/OloN0FJWM7GlhY7mhEun12paXMnfjNBqgJhrSxn2Pengreg23PQCn0rzxQGsMbXncniiEqGcDf60E2vGN9j/AFp8qOKBgkLzSioYwaESqnCs42pkK0ecIOcGhXVYBJpxbgGc0O4oqA35qsWc2TQO+vbkbf1oJbm29FSQAnbn2qOcVgnn0+lTk7ZwTnsHf8yq6yyk/WmFqUVYG9GxhhGrVvUXtgg+TPronB2r2vfcc0sNEnau/LHOcZr7EUbJzSVK/KniwruK58uo7EZosIKdROwptaTjPpRngHOMVxUc44oGI86qaUpWrcVIKYPBTSTHG/l3rGBGyTRCAaWiOdWNNEojjgVgghSr0rwCzsKO+WzyKUljG+KxgRCFcEU6lkn+Gi0MA9qfQxjtRoAAGcDcV7wO4qSLGRxXPl88itRiNDXY0pLKhwKkBHHpXRHFEwEGMjOmupj5/hqRTH9qWI4rMCIpUZJ7UOqMAeKnFRvamVxh3Fag2RIj7/hohtrA3SaNSx7U8lgVkgMB8DI2FIVF9qlRHFJMffajRrIr5IHtSVW8Y4qXTH33pYj54FajMhRAB7V4wB/L+lTgjD0rnyvtW4mshUwkgfhpYigdqlvlhnNd+XHYUKNZCvQwoHKe1VnqCypfjqTo3PtV+WwnH4aiLnESptQ09vSllGwp0Yoq1IgyCsAagcUNeHVJjKUFDKR3q19RRgw6o42zWfdXT2osVwrXgaCPSvPyvho6sa5GHfEu+OvSC0w8dZUU/wDKKgOiOj3509Mx4qU2lWSo7k0zNki89QOIbBKC5gmth6bsottjTJKNBTuED09aWT4qiqWxu5zIPS0IJWlIAIwTwUnkfesO606+lpDojqVllzyjO/h57++KsfxS6lXOX8ghate4Tp/Ss96a6auN3uTkSchZC1BRVvnSKnFJbY7b6RdOm4E69BmQySUykDSrOPNz+tWO19DXY3YOrKkrPkc5wsdjV1+H3R8WBARbnyoNqT5Dj8GT/r/WtDjwCyAl9lJcZRlxQG6gP4vc1yzt9FVJJFVtHQrLiAJLZQsjCjnf6168dGSLatEhlRIScE+/atMtzTD6i0SkrICkK/mHanZUJEplTCgDqTjcfhVQjBdivI+iqWea4uAEugeIgYqqdUr8TxNKdwD9xVrTFfhOrbU1pHtxVT6kaClFaSc+ldGR8oonDTMo6hKcq4CieMVR7gkqChjg9qv/AFBDcdKvLVMkQ3fEI0Hf2rY1oM2Q0JClvpBGN6uVssS5aAog4x9qi7faz4oVoO5+9aR01bwAE6fartasjezM7z8Nk3GUHFsbA54qTs3w9YZWhlEdIQjk49K1WXBZbSSQM/ShEOMxWyf4ic1CUpWWSRHxrLFiNiO0lOoDGPQ14wMkpJAwDgUZGWVO+Lgb5NOPJCWvmTnZIO1c8nZVaRWZa2rZNbU6obHBHc1ZojEK5eG406ASBuPWqN1ZCnypHzDDaikKCxjj/wBGjoQmwHGJAz4CwA4O4B710Yox7I5JMmb/AGGRLbUlnZxvOPUj0+tREO5IQEwpg8GSzkIWrbI9CfSrnGkqdQlxSwpaQM+i0+tQXU3TK5bpudvGHB/eN5/9c11xpaOaVkDKuDUSWpKBoS5+NPofWpzo5hU+6hSCSUnOCP1FVNlovXBuPKjlpwEZQv09UmvoH4b9FsIYalqb16gDqxgiqXukL4LTZW3QwhDh3x6b07cY+tBT4ifpVmFuaYZAKdO3NV29xEjUULP0zV0mTu2VOazoz5v+1RLqVFRyBip6Q0CMKJ+9Rq20pVyNqaw9kamPrVjGw/OpKGwEKCU0jU20ckj7UTCcadeABomZZLMFJIODmrpEUktAgYNVm0NJOkVa40dSWwaZCMj56hpJOKpN/dQpKhjGeKvVxYKkms+6jQW0rPGM1z5uimLszS9rKZCge9RzS+MKoq+Oa3ifeo5lz339a8PJ2elHokkKykZotlQHeo9Cjgb0S0v0qTQ96JAL2wMV7Xnb/KmEqOmu68Dc1qEsfSvHalB33NChz0FeDmTg+tGhbD0O8DNPpd3OeAO1RrboB7EGn0OjP+VK4hUmSjTo2PrRSV4xvUUy577UYhzvzSNUUTC9Q25FKLgwKGCzxnFKUvbOaC+wbFKUFeu1NgE/SkFwZrqVA5OdhTp6OXMxmUBjYcVGPEBPFSsjGnINRkjSdqm9I8nI6YClJUobbZo9pIS2MY/LihQCDjtRCXBgAdqnG2zowbPspMcjfFOJaBHFFJbCuK6Wwmvs6J3YKWvauBrPaidqUEpNEwIYw5ApJjA0doGDSSkcYoUGyPVFGMYpKYvejy3muBqtRrBUxB3FOCKkHiiAnHJpWM0TWCmOPSuhnGNqLDdeKAKFGsZQ0ByKcDYril6dsUj5gUTbH/DHpXCj2pKHQe9Og571gWNlPtxXgkc07p96QRisHs6NqWFCmc44rufShZqHCoHtSFJHO1cGe4rqtqxqPBPalBI9KSDnalpx2pkA7v6UnIpRUBsaaXvWZhwEe1OAg70LnHFOoUaFmoISK4quJVn0rxNGwHjjvSTXCcmu0AnudqDmNBSCMUZjNNPjynasYzHrOKEoUoDg5r51+LNzdajKYSd1p7GvprrVCRHWfY18ufFWIuWhzRjWncV5/qk7R14H2VD4X9Jpmy/EW2FEq1KUR3rUuqFIs8AtoWAVN6QM8HG1QnwnhKiWjzjC1b59anLhD/a8xDToJabVpPvvXLNtl46ZhEjpy5Xm5B1lkjUvOsjg5rbOk/h1Ht8ZiU6wEunBK1Dc1ZY3Sttti0B1LeknII5NL6g6utcCK7DLwCkN5C840ntTKDfZnK+h35GJBPiK8vhK3HoPT/16VCXrrFq3TEFtwOfLZI/+Ig8j8v6VQr/8SX5vT8ktOFt9KsIUOc57+3+tUNjqC53SclaApXiKSlQUo+VRGAR7HcflSTjrQY97N0tPVy5LQkRUYbbWUgk8b7Vf7bdEXBtEoJ0rUAVpxtnv+tZl0h07IiRFRpCDpf0rGOyhzWl2K3llKIL5wrGEq7K9DXPBSv7FZuIVMgtPKKgkgKGcY71Seo7KpWrSjJPtWhPIXHwhwHGNjQE2GmUnBTv6gV2RipROXlxZiFw6edcJBbyd+1QrnRS3FZDRGd8gYrcnun23fxN79jXWOl2s7t47cUY4W2B5UjFYXRK0LCvCP5Vabb0+YTeso3HtsK1FjpRoYVoG3tQPUcONa4K1lKUnFdXsUrZD3bdIyi9OpaCkqOFcVQL1eHm5HgpOPNt/rVqvEsPyFkcAnFUG/wAd9TinseXmvNbtnfFaJ+Hf2kt+ZXCam7dOZksFKl6hoBA7msXRdwLuI6nPJnSfrvVuiX4sORY7QJSoBJI7Gi8d7RnKjTodqjT4amjjy7D6GvM9MCXDUy0tJUAU7/0NCt3hhlpDn4daAD23qa6LvUV25OR3cA5xvwc8GnhCicpkex0xc4cdAdSpJaPccVZLZbm3GkFSUqUkYII59qtUh6BIaLCykkZAPf6GoaKwhmXmOsFJ29qsoOPRBztUR916Bt92fZkMtAOBQOwwa13oyym3wENlOcJxxUP07GZkvI8RvChj3BrSYUYJZSNhgeldWKNuyEpeCHuUclBwOBVNukdalHetCnpTpIWKqdzjoyVDBq7QE7KTKYxkngVDS0AEjerTOSk6gEH04qvzY6gScbfSgPH7kI6EhQzzRlvKUkbj86EdSULyR7c09HJCxjmsZsvVhdUoJOrBFXSEtS2wkkVn1hfSCkavar9bVILacHfFPERnprZKD9KpXUUNK2l5xnFXuUcpIqmdRJV4asZqeWNoaDoxbqaGWnlqR696rzbuFYz3q49SpIWvUnnPNUJ1zw3lAbZrxMsKkd8ZaJhD5xgUWy9nHrUC3JwMUcxL432qLQ/Im0r25+9eUvO/3FAokjk80tL2RzjbihQthBcOa4HPNjOaY8Xfc1zXnuKIthqHN9iRTyXdt9s8e9Aoc374xTqXMnvSNBTJJpzJG9FJcwfxfao1lfAzmiUr4xU2VTD0PfalF3cZIoAOc0vxARtx2oBseW6ODv70tDpSnOrO1R7z2DyBn86R8yoJwFbUsujkzhb7/qaCcdSo4570y7J1DIPOxoUvAHY/lU2zycr2Fhe+PtTzatIyd/Y0B4wI2p+O4VA44po/J04JH3UEgbiuLGfevJWO9dJFfZAGwnfilBO/NKCvpSuaxhJFNlOOadzvSFCizCRXQB671zTg967g0oTunNdCAKUjg0sJ74ooAkCkqTnvTvbiuEdsUaMCrSMGhlDzZoxxPtQ5b32FAJ1oUUjYUwhJFPAHv3pqALUcjamyTiu5xzSCsUpkerwIrgWK9kE0GrGsVqFe1DvXAke9exisgNncp70tJzxTJUDtXUqwaIB4gEU2vyjJO1cLwAplx4HatoyFFxIPNOIcBoJW5zqpxDuBsKweg5KsbZpec0KheeafSRTUAVzxXUjvXNs4pQIrUYWAKZeRqBAp3IrhAxWF6KN1jFKoy8DtXzp1dajJmrb/AJjvtX0/1U2lUVZxnY1gt8iJF01Efxb5ri9TE6cLIjp21ptlv0ISAVfh/wA6XcJbNrYLr6tBzlIA3NTMgNIYSG0jKOwqi35udfLoltJwyjGsnPY9q4XSlTOpdWJu/V866rjtQgtLWArIHf0z7VDdY2SY5FaktqW6XAPFPOkEYB/OrhGske221LAY/G7q8w33O9CXy4NRkTNCT4UZJTgDvjei27DozCF0Sp0zITMjWW2SspVuVY2P9aI6G6RdjXosSmwThQU2obqbxtj3HpV16alWzwmJAb0/MM61rV/Eo7Y/OpFFveeub86bpbK46PllJOClxJP5H+opJJyG5JF3tUfxIjaRgLKdCvXWkZB+4wfzqRkqlNJaeQnzNndPsd9qhbc45LtSXbkyqO8haUuONHZR5Qv2PI+oqdh+LHkmBKkB5C0a2Hj6HdIPt71q1SJ3skmpSbgzoV+Pkdua8xHOSlfIodiOuO+UYKSg8H+U8fkalkJJUnUnkc1fD3TJZNLQ0mInPAopiGgHOKWAAvSaPjNg4r0YROSchKIqEozpGKyL4xXdMGKtOvA3H3ra320pjqPtXy7/AGhJr5JjN5Go7b0fUfTjNgTcygIurcjKkqBKiRnNNzY3zDISNyo5qnQJzjcpDCc5HA9KtDd6ZZWA4BsNI9jXizh8HqKVGedQWRcO6l1sE6VZUB/pU7Y2/FbHcgAj2NS0qKzc3i43ha1JJx6ilWSEiI4WXW8FKx9h/pvTRT6FbLbGjqXakrlEEpwoZ7jG4NRjTM20XVqXFeUphew+n19qsNruttlN/s5SfLu2T/LilRoEZMtcNw4QVa2986Ven0NUipUI2kcj9VynHg08+pKkbZ9R2P0qUjXVCJKJDdwwpRGfNtn0INUfrSFPtDwlRBqRjKVA7EelVmP1Vb7k58suR4EpGEEZwFfaulY20R5q6PsL4fTG5ehzxAr171qqFoLIA22rBvgbaZaLa3IW+p1KuAVcVuzHhpZAUKthuiM+yLuIUrPm2qvzGVL7bVY7gtABwBg1CSVDgJzVmKiuyGUlRSUmoyZBQpOcY96sD7YJJOxqOkoBSUgZpRkUufbV6joB+1MRrc+g6lg44qzPQVOKznbmkmJoHt7VjN2NWxhTOMVbbZMUkBKhxVfjICCO9SsdxIwM0yQGWbUh5v8AHvVcvbSQhWd/WpGOtenUk5qPuCXnsp0/etPaAnRkvWEcgrUB9hWU3Jzw5CiTtmt66isq30rK08jGccVjXVtjcjOLWkHY+leXnxu7OmGStEKmXsN+KLYlE48xxVdU74Z0kYwaIYmJyMq/PiuOUSvItDUnPBzRKJGcZVxVeam4GCo/nRrMvO2am0G7Jjx++a6H847e1BNugp2O9OascVjUSDa8kb0+le/NR7LwP9BT6HUlWcjNI9DIkmXMnCVUSlQBBBNR8dW5Jo1JFIyqCEq2rjhIGU439aQlW1ecVilqzMGdeIIGabW4lKOcV10Be/vQa3CkEK4960oWcuW2hTkpCUkZ4/ShlSU7nIFBSHTg8UIqQRuCajxPHyunsmEyQdtWc+lHxXwOVZ4qttSNxkmpGNI2pkvA+HJTPv4LpXiUjSaQoLBxivrrOqh8KFdL2NqHAXXglRpkBoKS6FUvY0OhBGM0SgUUwM8RScCniE4waZWMds1jHMYOadCxjBoZSyK8lRUaxgzKSMDFJINcbxThTkU3YOgV5zScUwZA4016UClXNCkgc0vQQ5opUaIA7UJGINHISCKdMVjLiD2ocoVnOKkS2nHNNKa9q1WawINq715OUncUXoPcU2sAHcVmkjWNeIocClJJVzStKfSugAbDagMNqSOQKQdQ7URpUeRSS2TQdGByFHtTa0nBNGBnvSixtxQo1kUvKTXW9ROaPcjJPamFNFBxisG7FtEjneiEk9hQzeQaKbo2A6ScZxXgpRpYGa4UkcCt2CzwUe9LO4pAGOa7tiiDshr80Fx1AjJxWIdWRizNUvG2a3i6I1Mqz6Vk3Vtu8Z04RnPtUMystB0VC2MmQsheSkjmkRLfHiyH9SArB1Aeg9astosy2kE+Gfriqt1c5Ntqn0RGyXHsJSAOCa4ckOL5M6IS5aRUeouqW5V1cssCRodwQMHzAc59qjocK4yI70i5EIjIOvKuSOwPuakmLFBsUlV1lIS5NdQVPKUO3oPSqd1h1HdZctWQqJao7RlO4I84TyPr2A96WrQ/RZZ8yxWm6Q40hKnXUo+ZKE7NtJJAGT29qAl9awnryhh4OojGYmDHexhOskHOfTO2feqNe+qkTGG71MaWwiU0Cyh0aCpCdwopO5HbFEIvNxuMa0QZDUWF8zLAbe/EEBJSpKj6ZOf0pWtDdmz9O9T212LNthlq1Sm1+ApXlBWjlG/4VAjPpnNS/Tl6YvLSIc1txmWwwU60p05350/wqBzlPGeNqyC09Qw2LnPaui1SE/POpjrQgal7fvNhsd8Kx3BOO9XXoLqi3rMe4yJKFFtwRy4slOlCs+GHM76dsZ7fapXK/sM0qNOduTMe1NS5bmpTJDTpT2HAUPapy3SmJEVKw6FEf+s1XrzJgrjiUqLpaeeCHkZzoX6nHY9/zpEGSttCGo7Ckp4SEnORXRFvkQklxLZJKQ6kpOQRRkVwAVWYsuQp3Q4FaRukmp2OSUbZr08MrRxzVaDZ8pDUVaiRgJJr5d+Lzxu11WkY0oJ3xW/9STxFguKWvGx5NfNXWdwL8p91J3ya5vX5Kiol/Sw3Zn4t8dD6lISCvfBxQt0cRGYCinKgfypUeU+qWtzT5QcUXIbEmMdTIORtXmpM7m10QliuSWJTbrjgHlwE5753oi4Xhx2aqTGUNI/Ekdx/2qo9TQpcPEqNqCUKzgeoNN9MX1Au/wAvKyEOHg90n/Q10RVq0RemXF3qdVoW3J8wKzhX/epO99YOtJanxV5UjSsoz/eI74/yrjdih3qMuIUgusq8qhzjsfeom+dIXKBDQ0UqLaB5FAbgelXhJeSU0Dz/AIkTJSF29azIYIJQSnKtCv8AMHtVasPTF0vl+ZIbWrU4BrG2U5qctPR7z0lp9DfspJ7f9q+kPg/0HCkPtvOxU6kY05RwaaWVdRE46tmpfCnpiVaLHFb1nOgZyefrWnohrLY1elD2m3ogx0NJQlOB2GxqU0qxnO1XgqVEm7IiTbtQqLeiFJIUmrM6EkYwaFdiaknKaYxVpTLQG7ZJ96jHooUryt4FWmTCz/DQDrKG8gc0QdEEbeEjJQM0DJgp3UNgN8VOSNge1RUxQwRnf0rG6IvToVsRXUrOcA7ivLZye+fpTjUUlWk5zWWzEjb3XThKiTU2zA+YGdPIqPtlvIIURVstsUoT+EYpkK2Ve59PJU0dSO1ZN1v0w3ocUW8Yz24r6PlRW3GDqAziso6+jNJZc37HepZcaasyb7PknquJ8nJUU4GDUI1N3AChVn+I620OrKT+E1njU3KgAfyrxctQlSKxyFqZmHYZ3FScWVq5O+aqcaSDyc54qXhSNwRuRU/1F4TstkZ86fc0WFhScjNQcR8kc74xzxUixIJIAO3pSNF0wxCwFA0Qhzcb5oZJBTqNc8XTsT+tCjNkxGczjfmj0L2B/OoKM+Nvf3qTQ+DwdjSSiOpByVk5pS1ApA7n1FCeIOc811Tu2c0EjNnnCMn3oR9OQTmnlr5JxTS8LBzTeCGTaIaYCg4B27Go0unP19qlJ/OBUSprJyKjqzx/ULeh1pwAjtUiw4dsbVGNNYVnHFHsEY549qK7J4j9EUj3pZRqodtXqTRbakjmvrVs7jyGR3FL+XTXfEGKSXwds0aQDxaI2NdG21NLf53phUg52VRM2GKzimztzTbT5I81O6kmgZDakAjNeSAOTxXVkA7GhX3gkHetSMFJdA7074wxjNQhlEHOaeZmaxzRTM0HvpQsZB3oX5YFWdX2pKpKQN81xEgKVtmg6MgtpoJopG22aHZVqSM0+Nts1jDmvTya6HAaYWvkZpouKB5rXRqsKUtPcimXFDG1MlZNcwo8Hag5WFRPKJPevIVp5NeIIFIxnmgMEpeH0rutPGaG1BNJL4B3rWag0Glcc0Kh7I2NL8U4oqQGh1RBph0CkreIOxpoyBncis2ZIWnSKeQsE4NBl9J4NdS8kb6qFmaJEEAV7KSN6AEwY5pImgHBVRUkCiRykd6SVe3FCJlJ9RXVSscGjaNxG56stkYqpS7YZT5JT3q0vPBxJSDTLMUFecUFHkzSfFEZFsjbLROkcVm/VjEePJddUnzZ8pxufYVscnShrSP0rI+voTqlkxxqWMqGTgCo+qjUdDenf1bMcvt3cmzTYW20l18lSid9CM+1Z/1EXIjq4i1vx7bG1OPyltjw9fv9NsAfetGZ6clQrq/OSvxnZCCXVJTlKU9h/wBqhbj0nCeYWi89QPOtKI0peQlLTe+SdJzk/wBa4U1FbO2rejImLlaOn2x1fdLrcJkhvHyMeUyFlxGfxBs77jgnAxVpbfvvVUFnqLqyDFgQEOhcJtlHhKKcEJSo/wAKQTqJ7mpG8x/hN0OwiVeuppExZcLgiMsoC3VdioZ1afQUNcvi7Y5EKDa+iOnVznH3DqbmNLWnB5JOMD1yT2pJSdaGS3sM6ak26DIefVboyjFR84VNu+Ihp8EoQMkbJUP6ntSVu9GzrJ8pKXIjT5wDA8JYUPMoK0rSONJVsRtjBFUjqTq+x9PW9uP1He2IV3uchLrnykQqTpG3h6QePcj3qdtHVnw4S5bbhZ50oz1BIMt6MstFGeC3jYgZyRtjtUd1dFKRp/RbXVVot71jv88EklCMq1ZawAkj7YP2FaVYEq8Zh1L2QpOhYHdQ4UPesK6sldTMPN3KIGvm4zjanNBKELTykpzykir58MfiLDui2m75qiyA6W1ISMoSrOUnI4/71SElFonKLkjX5jXgsCQpOkhQJPtRsV1BbCs7EUx1U+2306txshRWkFKh9aibXPIjILiv4RzXqYnxOCWyN68fxDcBJxjtXzx1AC46tIO25x3re+spLb8ZQyMY71gfUriUynCk7A5rh9auUzs9NSiQECMhTjjWjuTxS3lMRJKI6gACM70KxdEsrKk4BVkYFR13uPjKJSMuDYH/ANfSpQg0PKXwSfUFrj3GCW2kJ1KB0/Ws3kdKSW5CJDbSkFpQUnHYdxV+gz3VMJdUcjbYnv6Vbbe1ap8bUptOsDzeozzVE3HoTvsqPTwkxEtPhZSoAA59PStFg/K3i34cbSVp2UnFBjpuKtK2xulW+Btg9iKZtseVBccQ0VK0HcfzJo8ZNAckKZtQjyk/KoSpIOCgjfHpX0b8KLO23BadKMEgE1iNuZD8lhxKSCsjjvX0r8P4nhWxrYbJHamxLlMlkdRLO4Q2AMcUgTEjyk05JIGxoBbiQrZNehRzh7burelkqx5aDbdUdqKb3Gc1qMDvoPOBmoKeFJJVkJ9asT6QUncHNQU9KlZT2rUYrsqWU5BOftUa4sKOSAM0fOaIUd96iXQtKwoKxnkUaNYpRIVsM06wvC8kgUG/I0oyDvQSrgpB2Hvk1kgt+C726U0nGVVZYcpvAyoVlLd+RH3cXgDcipu2dTsPYDb4I+tUJmiSJLZbIB7VlfxCXrjOhKexNXmNI+aQFIWSO9Vzq63eNHcTpyCNzSzWjJnxT8SnlF99G+dR7+9Zu0+Qa3D4r9LPNOreQ2Md9uKxB2O40+pBRpINeB6mLUx4klFkKSAMfpU5Ae1YGfaq4wggDB/KpyCCMbY371KKLwLJGcOx+wzUiw6SQO+ahGF4AA7VIx3CFb7EetHiXuidZcBAznekOuJSs70Ih8Y57Uhb2T+KtVIEpkiy8U43otuVuDknaoRL+NtR5p9EjGPMKRpMX3CfbklQ5x96cW+cZB2qIZfyn8We/NEeOCk+ahxH5hCn9tz+e1eL/k/17UH4pz/3rq3soz9qWXVEMkgaSsqUaHTnGCaefUCM4x2pnIz+nNRZwZNsWDjt/lTjZIGOKY1nO9EtpBSTngU0F8ghHZ+iQQRwKUAoV4Ek0rtmvqzpELUoDmmStYVwacUqkJXvvRRhWNXY00pKwfw0SgZ4pWjNEFWCoJB3G9PJXilqAHakEjNCzHlkqHloR6O6vmiwR2pQJO9AJEqhug4KdvWloi+GMhRBqRUB3NMOpBGxrGIyQpQyAa7ELijuaU82M4p2I0kb6TWDRIx8hO5p0vadjQ4WUjATimXHTjYn8qzdGSDNYPFKDeqo9D5Hei2Zgxgnehpmqh0t6dxSQTmmXZYB/FTJmA7g0KCGlQHPFMrdQDjVQMmbgbGo5yWo+YGi9GWybcVng0O4sAbGgmJSljGomnyQoZKcGh2EdRIUk770983qHFRynUIOM11Mhv8AmrG7CnXlcg0KuQoHY1xbwIOFUE86QcilswUqUoDmkKlL41VHLfJ5zSPFUobGldsZIkTKVuQTgVwTFatjmo5Jc75pxIUBknagkzOmSiJauCaWZeBueaifGI714vk7E0wHSJZl8rXhJJqWY2Tmoe1oK8EiphQKEZ4roxKlZGbt0DynFLBSnmqF1hFcQhTpIJAI2q7OrSFewqqdYNiRCcSsHTg7CpeodxY2JUzBbnIuEp35KJKKGgr974atKR9TyaMl9DQJseOq63Bp1YwpHi+UZ9s8fep1MWbFJRaYDDSjkeKoDI+lMybfehqkT7nEVpRkJUx4ivqE14l7+o9Sk1oob3QPQNscLr3TBuLz7ul2UydYSM7BJPJz2xVZvNomxb5MRaum0QmXG0sxkOpCnEjlS1pScJB9PatGclzITapjt5fkTN/BD8TKQeAENjFVC83O4x0rFwExEmUChxaGW2WlKJ/CABqJHfes2jKLTKI18O1SrnJvF9uDS1NlCWFRo7aSgZOoKG+2SMKySN6vFj6HtEi4LTaJTSWigoYSrfD2kEZSdiRufQjI2qsOsSVqTboUCchuMguuNpIT4vOrHqeT9RSx1D4EaLdbbc1tNQFBiWytOUqSfwqOPMk8HI/ypbc+huKiakLDGltLsdzbZakIaKSlBV4IUMf3aV76fVOdu1Y51N0D190Xd3rnaXluW9agorZVqCd9tq1np2/Nz223ZSWpKbiyktvKdK9Lqe2rO4I7n86ukC0Wx6G5aZTr77D6DpS+o62e+lKuSPTOcVbGvBKbp2iufC3r64dU9PPWLqFzVMa/ASCMpqwSrk7AQEZ/CAAaEV0rEt0lMuK2Eln+PG5H1FN3Fl15Rb3IHH+ldeJyiqISScrI+8XJcqMrKjuKyPqNYU44nbJz+VaXdI648VSTkbGszvikNKUtZ3JqGa5y2Vx1FFSmpS00pxDZyMnaqtd7o+1olMA7DChV6uKUfJHAAKs4+9QE+zsSIwIABVzjjNUjFpCuSsrll6jlracLzZUncfQVcumbw/LWhbJV4jacKTwVD1HrUM106iGAopCe3rn1FSkK0fKMNzILupO2cHH/AOahXyZy0aTYbyl10sPIGpHB4BHpVnas2tz5xkDSBnfuPSslcfuSEpnwW3CtA/eADfHrirt0P1lcJbRhSmtO/lXg4HsR2qij8k27LpYoDXzyC0jA1Z0njPtX0F0ufBt7YSMDSKwmxvumWMMaTqGcHY1ufS6wuAgHbIyPaqY1xkTntEu6oq70PpOrOM0Z4R/iwRXS2kZxjauolQNqwNs7U80VEU5oBG4rmkJzggVjHlo1etR0uOAo+hoh+WGEnKqjnriHE85FCwpWV+8xXE6ingbg5qsS5D34EJRnv61a7k447kBQxUA/bPEBdWRj9ayZmiEek+GNbmxx+dRj9xC8pCkgfrT17iOYV4ayMcbVVjIWhwpUCcHn1p1IJKSHXFAlJz65pq1vSm5ICXSQT9acYQHW84zntmnIiUsP50FI5JrXYGjTunJMtKUJWTpNWGbE+aYKVJ5FVXpZSlAYUVJq8Rmy4nBOPanRFmB/FLpYPxXlIazgHGRzXyx1VZ0wppBQQQe9fd/X0VCmFgNpxg5OK+PvifECbkdKMZUSBXl+txeSuNbM9ZZHpvUtFa0pAHpQ7DHtUrDaxjV2HpXnVR1RVC2QoDg0W2sk5UfyryUpxpAr2nG4OMUboLYQHgcpKiBSHZIT/F+dMKdCcAbY5oKRJzkZ9qm3ZzznXQaJhBwFbf1ohmXlWFqz9DVfL/m1aqKZk/THvQogpuyytSxpGVUUmSSM6tjztVfYeOAAT7ijkOkDP+VZWXjJkn46RuSCTXjJB2zgfWo1L4UcE0vXxg5FLJEskwlx/UDn9aShaexGc00TkHJpIJSrtUtHN2wxBTnJIzRKFJ077E/eo0OAHGf1p5DwJ259M7UyR0QP0hBI3pKl+lNeKcYzXMqzX1A/R5aiTzXEBRPH5UspyKUgEbUVE1ikK0c06HUmhnlaRmm21qV2rXWjUFuLHahluUl1Rx32oVa1E8msEKS5vjO9K1ntQSFKJ5NFtoUrk1rNSFFSjTLi1oHrRIaJ7im3WSoYFAwElfiKypIo9htIGcChkxlJXRAKmxxTL7gf2CPCSe1NuMt4zim0y/Y0h2UTsBRbiBJjLrQSSRmmFqKeDvTylrUNqEdUUHKjU9DnFl0+9eClAZ01xp1SjvjFKcycgCjZgZ5WvIyRQwiqKs6yKfcbOrcmlpRtzQezLQuOko7in1ubYzxQiiWuFfamTMWTgn7YrXQexE15STzQBnrSrAOaNdKXQQqhxCH4tNI78BQ4mYpxI3xSSpZ3JzmnksISMlNe1tpONqxgJ1SkkmmkOK1cmjnFNrOw2odSUatu1Bugi23d9/1p3WCKFVzxTanHEkYAwK3IPEN2O2KcbaJPOaDakHIBwDRzDw1jOKZbEeidtzaW0A4xXrhPQ0CCoUwiWhDf4sbVU+prqpG6FcGrzmoRJwhzkWRl5MleQdhzQl6abeiOIJ7VEWG6hacKVzUhcZiFMKCdyPSudz5IrxpmVdRWnqNxb7FjlFgqGAUoBX9vSqSLd1H0/CU47cQJr7oR40slxShn+XO4/StKuM6etx5qItLSVK0qfO5+iRUZcbawxaitLXiy3Rs+9uTj68V5c4W7s7YTaVFZZ+bjKD10uxnSHNgEoS2lA+gqB6ljvPadaVJdYWVNqxkA49PSp1q2Pot8ualDi1BOUEnKs57VXrQmReW/HmuvtkLU242sbgCoyVsrF+SpxZFzs09xM5tt3URqO4CCrbf0SdvoaHixbX4Lc52GvEeYWH1pIKktndP/ADAHINSfVVmkwnXbmtfjpBSFaDkFoHGCPy+lJscBlxT2g+Na7m0kFY5YcByjUO319dqfFFeRckrJK2wHbXNmr6baDbKNLiogSFMlKhs4EHgZyCBxV9sNxlxFR2pjzwYUfEZacwpbSv5UK5wOwPKT9KgbPaWvFgp8d6M+w3oYlJ/A42T+FXqARjHIwParXPsE+Ey3JfQ2tCSlSNAOWXRyR/gUM5HHf1FdKSIt+CVuzio0Iut+G6wtXmKTu2SdgajoTiZDZJJpVzdXHiuBKS21IaAcQSDgmh4iMx0rQSDjf3q+NkpIr/WDnhtK04rI7y2JMpSAsDHlHcZrRuup/wAtlCuTWUSbuwuQVIWNWSoD1qcqcmUXRA9QT1pbUwFaQAd/TFR1u6hbWnwJBAJGSTxTl9jOTEuqQogqBCT6GqQlMppwtvpIUDjjkVSLUibtMv56jhpKo8tH7tP4SOKVZerrbHlGEQrwlnyK5SfYih+mOnWLpHTJed8iTkhXIq4xOn+km9Gjww4nGONj6ihyjA3FschdXxobvyhhgoHB05xntmrz08/b5Sg8GUoUvB14xn60HZel7VcsB1KVY2BBwT9amInTbECQG4b4QBuEk7A+tKsiY3Bl/wCnIUZx9J2x7bitesrCG4yfD7Csc6djSGH0lxRB2yR3+orXrK658oN98VbHvZCaDHJziFlJB52ppdyU3vjJ+tCS5B8QpWME1Grl4VpI2zVeQEidRddY3ry5oKSQvmoVl9C9hTw8Pk8+tZSZmhUtbznfahQ244nbIPccUUdJSCggg7k0n5gJ/h+9EAFIYzsqgnmNDZQTkeuKlnH2FbuAihZUmIltQBBzRQGUq8RclRaBJ9ztVZk21kjUpWlXO2N6s3UVxWy24plGrtt3rOrl1GpvIcz4mcYO1HkrGUWSjcUtqOl9Q+hxUxZ7S486l1xwqST/ABHmqfBvpfTrU0UH3B3qfhdTfLISNSlAHbiqKmhGmahZI6IACgrynt6Vambk2EZGDis9st7VNDaUqIGO+29WZCJagVJxgcECmj9ibQJ1fPakRFpCAokGvlb4l2p1c1ySpOEknjgb19Rz7c7OQtCgCePKKyL4idJvobcKWVYwTxXJ6qPKJTGfPLcZQOCnHpmi2kaRnH6UfKh+C6pATxyMUPoIANeOzrOjb60lxzbOnjYe9c1YTxQsh7apyZKTEvvZzg4qKfeKjvx7U886o7DfvtQC1knAxk0lnPLY62VZ5otnUTj9KGZaKxkA70ewzvtn71kLwYXGURg7ZHfFHIe1AAd6Gbb48v1p1tBCzsOc01jxtIKQnO4GBT7bRJJ239BXGB6Y/OiUI0jilaIy2NlG+4/Km3wUAkjPbeiwBnfYeppiZgN49RvS0aKsBD516c/QUYyc78e1RqcFeM71IMg6cj0oF4RP0g8IatuadCcA5pZBbGSn70yt9IOO5r6jSNTY6k55pepIO1Dh9OOcmm3XFkZTtWsNBZbDgJpvw9BFDsy1JJBVTyXw6rmgzI84AdsUyplvnFPOuBJ2IptWVpynFBhOMob9Nx70Y2lBx7VFqdW0rOKfQ6vYityo1WSS2Boyk70MV6TgjivNTFFOlYyBSXFoUdQVg1rBR5TmDnBryltrT5c570w4NtlAj60lCyDnFMgUez5iAM0tWkDJFKwFZIoeQDjymgMOoW3680LJWgnGKQgOZ3TXlMZOonFZ9aMkMoQonKaeCkgb80tDYI2NNKaUFYScmgjPYJIfGsgHg9qcQdTZJAFcfip1BZ2PtSmnUITpUMjvW8hQPoWTjOa6qMFbkYooFtR8o29aS8opH9KH7mA/AKffFOtoAHNcDqyNx+lcKiDkjatpGOvoWUHSaiZCHM7A1Np0uJ3O1Rkt5tpzzHIHrWaTNtAaBJTyDiuqW4nJ70YH23G8o3xxQLzuskYNI1FDKzhl5OFD717SV+dJ/OmVQ1rOrVgUVGaSkFOobUKDZHSJYYVuTmuNXNxY1JVTV1YUpeUJ1CkQ2EAYUrnkcUrbukFV2wx++qbaySeKovVHWDTQUFK/DzV0kWxDyMJA3qi9UdAuzyopVgHmhl5yjSHx8Yu2A9P/ABEieJu8EgHknarzC6kj3VCEIdGFbk+1ZGPhXPS6XEBQSDkAGrh07YplpeZU82tSEjGPU1CDmtNFMnB7RJsvmTd5E8xm0xo6yhjUdzjlWP6VIKXAuenx1LPqB+H8qebhMsuF55wJJBCUY/yphyZCblNpG2r0GM0yjeid0MXq2IjQFItzWpBT5gn+E+oqqT4azby8lj/eCPMQMZwMGtTtzAlKCEIBOc8dqmX+g4kxo5aCdQyCBRfpXLaB7/HTPmq/W9U+ySGYToRIWwcgj8TmM/rTHw3s8r9mOuSGkhKkFEhB20nOxPsd8/nWrdU/DGVBSvwArwsktrA/D3x+dRXQcJ0sSoc5rw3UuLQcDZQ5+4PNTjialTHeRNWiV6Ws0U27RJZUstOHKFnv/Tfg+uKlpbTjbOiFCcLZGkBS9v8AlPt9cUd0nbpMMqivgOM5KULIzpHYK9U/0qUu8B1LK47QMd/lOk5Sr6Zq7xKiCybMsubDHh4RESw6telScHKTUnFt6Wo4SQM6amJnT58Hx3VBS0HKtgCajZsnw0/uxjbikS49lW1IyX4hRS/JU0MjJxWO3mPCtjin5ToyPQ8fatX+J97bjOrcGAoZ39DXzV1BfZV0uLiVLUG8kZz3rRSbYX1ZPt9Tw5MhEdLB0g4Kh3pu6uWZk/MOEJKhnJ3ziomzwXFLRobODuD613q63PKgOOeESAn03yO30qixiOaJpHUCZEERrWrCtOCB6UzbLReJziVMyFpysbAkYqO+HFuW82kzGHEJBA82P6elajHjR7cdUdBBCSTt61NqnSGTtWyd6Tst6hBDhkKGBhYUeR2qbcRMbuDbrjqyBjgnmomJc7g5GBbUpIwDpI/TajGrosqjpEdeoqw4MnGak8TXY3M0C2Xa6x3GSYi3Gj3APFaZZb860hsKJ0kZ3qudEzYFxZRGW2AsADC01ZLpYHG9KmvwKwQQcYp4KUNpiSknplieQmfH8RoBRx2NVW4vSIi9KmTjPOKufSVvWiMG3skkd96dvtmhODC8JVxg966qclZBSSZQGLwtpWzasnbFELu68ZKCB6kYoxdlCJCdwpCeD3pxyBCxoJAz70FfQ1+QSLcC9uSQM89qNStSUk6hg75phFuQ2nyObCuqS42NKsccjvT0/ILsQ86pxJQf0qJuEY4Cy+pAHdI3ohTTpdKtKyM7Y5qYZiuPsgOMhRPBPNC23QaSKqOm0TEBbkhSgew2zUNO+H8ULLiWU7+ozWmR4KGiC4hAIrq2mCSVgY9KdLQlmUJ6JEQhxUcKSTvtvXHen2mnkLLCyO4ArUn0Q0oJKk7dqh3glxeppxtZB9RRpoBCWSHocJRGKSlPfvVjaMwoICXEp9tqNtFqW+tJKgD3qwt2kIB1nP2pk2gNojbawXkJDgztzQHWVhak29ai2CNPferbHjNJAAAOaD6hhOOQHEJSQCKNWqYLpnxZ17a27fdFhtGnKiMY2qnOmtK+LlulxbnqUg6SSazJ1WCcmvDyxqTo607QM8spyBkj6VHPvKOd8H2ot9ZyMYqPfUMHBG3cVGSJyGzqUKZQytbgGB+dPJx23NOxwQoEjap8WJwvYVGibcbYo1EUg8URCYSpCVDG4+9FKa39N6WqGoHaZI4/SlkaVgkZ7UU20AM+vevLaGsb8e9HTDo7GJzR6BkUMy35hjipNpCQnGxNbZxzVMH8IjJI/ShJjRKNhxUv4eTuBimZTQ07p2oqAkZU6Kz4ZSvHv2otogDJ+m1dkNEE+X60zkp4J2ouFHXCR+nbqmQ3gkVFyEJ3Uk7CkPqfeb06iD7UllDrKQXckepr6FyvwaKoZLoA2Of8q8l/K8UQ/wCAohSM787Yod1oAhSDQb+Bh8tBQyBvXEMqCj/rSGVuKOgqzTzaVoV5jtWTZqOKihxQCs/nRXywQjIJprUoKxvXjJVgpKjWrZhpzGcGlKCA1gcnihypbi8DencEjCthQV2bVCGEuKPG1JeQ8nJBzRCVtjyoUM0gh4LySSD7U1ABmXXNOlwYNK8TB/FRZhhzzA74odcQN5JUDtW2YR8wUj8VOMvJc/EBtQxLQVgHOKcQdJ1FOE1rDVBCihSSAcH2oN9Lp1Y4HNKRPY8bwwpNOv8AnQdJ5FDT7MrvQJHcySnxN66XXUr0hJyKrUhN0i3Uu+Okx/5QMGrTDlxpDadShmtFp6GkqEykuqRlKTxzUMtL6XNlKyT9qtKlshsjTx3oVxURQ0q0/StxsXkRjTTpSFOfnSnFE41AkeuKNbZ8XZo7Cmnoy2ljf61qNYnS0hGon7UOoBf4QcUU62CgBW9LjMpbwpSefWs07oyA0tOpTnSQPcVHS4fza8A96sjz7JbLfBNQ5iPIeLoygeg71qoF2DM2rw28FXH6U0uGEOE5Oa8/Olof8Mase42otDiFo/eHc+1LcW9DNNAzrZ8JWjc4qPZU94uFJIxUoAUqJKk49Qea7pbUrcis1ZkyOcbbc8qhzSBbAFakZOfWj3o7KVagrG/rRjDISjWVJxWSM2Ri2VtICeKZKNSfMnI96OuTpSMo0n7VG/MrUNKkYP02NZtIyTYQhqOlJSrAphyO0RkAE9qHd8YkFOw9xXFuvMIGRqHtSckOotgMpAjSUl1Iwo7Ec1yTaXpL6XG45KVHOQKfUVzFpOgZByNs1otmisKgtpcbTkDehDE5ybQJz4rYP0j0+EtJddAJwPtVw+TShI0gafShbcEMJKU4AowSCNuf8671FJUcjbbAplrbktKaWgFKhwRxVIe6MZtt0XLQz+4e2dSP4T2UK0bxUqGOc0062h1BQsAjilljUthU3ErLdiZislxGCkjmoeY9FdHgnzKb2IBwftV0LaIqChav3Z2Cj/Q1AT2rbbnlyQ2FahyBkVnjTQFJplDv0xLziYjZOwzk8/eqrd2vBYU4fTcVKdSXZt+5uOoASAdsDFVS7X9hxLjK3UhWNxnmuHJVtHbjTpM+ffjBdUPvuRk4Kj+lZFFtyJTha16VlW2K1H4rRAmcuQBsrcGqb0daDMuba1eYJX9qhjuys3qzUeg+ho6oCFyWUq2BScc05150MhNrdLDCQoIykHv6irvbHo9uhNEYSEo3FQnxE6qjx7CCopGvivTUEo/c4G5ORk1ntj0dtGvShKTghIq7QHrY0wn5/wAoBAClAYz61XLAlE4pV42ASSd+Knn7Qm6yWIRfShtsa1AcH0rm4U7L8rVFwix4su3plMIwjUMKA4370YY+lKPDZbWonGRznO1Q0u6ItEVi2x1EJ0hJT/iqU6fYl3CS2pShjUCCT+IevvStWzdIv3SCHY6krdTpIOMDnP0rWGHm5kRCV8gdqy+KBEUlbhyBufarfbLskpQppzII4oJ7oV9WaRZI3gMp0kKHb3ofqbwkJ14IJ+9Ks80KjJVkcVXur724lXgpbJBH2rvlFKBzxewRucy6rwFrwfQjNRs2IXHCULJRnYih2RJcSHkpUlY5zT6X30kakKTnuBmoNlUrOJ8VnCdBcHt2okBLm4cwe6TRMRkyB+9S4gnjam5UNxtfkUF6fWteg1TPBpKUg4Hua8ZDreW0tnB4Vilx21nGpH1we9HFKAAopyB2IoKQWgJAlvoBIyR3oYxnypSVO4V32NWAE6AUM5yOw3FMhiU+7gs+X1I3rMxSrlabi47qQpSsnbc0wwiVb29amNJQcEBOc1oD1qdGymycbjIppyE0hJQtkKJ9RQujVY10xcG1Iy8xgnfKqsy5kZacoUnHpVYYg4d0hIQD67VYYsFsRxnGfTFVU3RNpEbLkrjSNal6Gxvkd/rUlCuEWfEIS4FpIwc7156zNTY7jeoDI2BqsxI3+z762HXMAKJUk7ZzRuXfg1IzL46dNR3GlvpRggEjA4r5WnBTLi0ehNfZPxUksTbS84ypOdJxq5G1fHV8SUXB1ChvnPFeZ6yKjPl8lYTSVMi3MrIHfmgnWySR680eUjGwFMLQMYIrisommMNNk8E7U+y0oL2/Q11vCT6081kHNB0iiWiXt2AkfSjVpBOCNqCt6icHOMUatWFc+9TkkRm6Z0JG5HHbekKV3pLjoTkAYHuaDVIHiYKqRok5Eow6PWpFlQ7YwariJQB2UOaPiThnSTzTR72c2R3onkqBSAACaYcGSQRSozyVDBIzjalvqQgc/SupRTRFWiJlMpSSf0zUW6lRXhIzUvKUVg0A4kJSSDzU8mkdOJNs/R5clDboQrAzxS35jSU6F8VGyJsdlsGTlZxkFIzTccmWlTraCUI9a9ttpaOhJMkEvR9GUnUBz3xUPcbrFgvJQqSlKnThAUrGT6UlxxS5XhIfS2rGQD3qKvtmhXqRHeWCl9lWoHtkd6Vy+BuNbJiDPkF8peHlxtii5c5/wS7HRrKBuAd6Get5YjtHxStxSdgmm1+IhrwX0rSFfxAU91oWvI051HcHGCWIKioc4O+aiXb11PGcSsW/xW18hJ8wHrVpiQTHYK46yEnc6kikS5wab0qSlKhwvAxTcqVCpO+z1rkvPtofLKkKIyUqG4o6WHyjxfDOAMmgrbJmasuPMKSvdPrUjouAWrxFILauwpabDdEOxNakuam1EKTsQRUrrdfbSASCO9BSLeEuqENaNZ30+tNBPULenxmGw0Duc7kUVGltmtNkwEOpR+LVio65CclGWU5B96PjPtNtZUorzxmmnnn3mnCwMlJ/CRRo1kCxEmuOFwhSfbNSUZUlaPBcbH512LOcbUWno6hnk+lOuzI8dwOb+b2rfSjNt+Qdy3Bs+InOuiI7ReGFbK4OaWmSlxzONjwK8uWltfnjkdsjilpNBt3oZm2Zt5tRU4ACN6j4VpZjr1NSCrHYmpSa0+5CUtIIBFQrVre8NL0eUpBSd0knf2pajfQW2+ycSytQCMagBQsu14X4gVj2zXmpz7aAyrBcHf1oKYu4yAt1t8Ao3KfpSSm70h1FVdhzCnGfwnJ96befVIcLQOF5xv2quxfiHb2XSw8wpTiDggDIyK5O60C3VFq3raUU6gVDAPvVNV2KkH3C8P2R5tuU2XErOAQNqPgXyLL4AQT2PIrOLr1o7KJbmoG3GTvUNI6qktJzFIDnAx2oc0g8GbY54Ct9sHio67PTIjBdjNqXgVROleo+q5ABuLKVNn8J7496uzM6a82ptbWQU7AinUkydUVmz365SZzguUBSW0qwlXIP1q1qXBfR5VYPpxim4SDoWqVGSKU02h1ZCWNCCdz2pElFDt8gWXFBaU62sJA3O9Qy+oLU2hSXJaUrTtzVhufT0mRHcSzKSlOnbFY/eeh75IluISsYBzk96aodSFTfaLOLml58uJnoKAc4K+30opPWkaNiMqSlSycYGMVSYFjkJC4S21+K3so42zU70709Bt0rx7igOKP4dXIND6F0wtyfaLnGkPSWPFQMhQzv2odLrjcgpcRsTsaVJnssI8GGPDOPKKiFT7m44ptUfK+xI2PvS67sKZYEhp0aU7ULObcKS22NRHAoP51bGlx8htQHmGe9NieXnQ6wSUnvUMjyXUUVhwrZxt16MAotkEeo4q29P3sLZSlasEbVT50h44DvkHGquRJTkYhQcScHfBq2OTROceRrcGWlYxmjNWc4NUayXxtzCS4nI96tDU1CwFBY3rqjLkjncaZICSlK9KjiiQ4gjUFDeoOaHH062FgLG+D3qNe6kTBSW56FtEcKxkH7ijyS7BxvosFxkMpaUlStO3FZF1Z1J8jKcitSFj/q2x7VK9T9cJ+VWiMvWrGARWM3ufNff8VxSlFwnBJ4+lRz5eK+nsthx7uQVeb8XiolR1bgk1SrjJTKd8UP6dJ3z3o26MTX1BUV0K28wG5+/pVLvNrvLkhIaS4E53x3rzuMpO2dtxiAfEFhmfb1EOguoHr2qh9ATE2qY4iWcYVtk1bp/Tt5nuKZabWogZye1Z/eemupGpTjcWMsLRnBAq2NOLtkck0+jY09TRpCUxUvJ3OTis3+MfUDaxEtyXAUj8WDVDisfEW2yVuu2+SpI2GNx7VBXljrS8yfEmWqR5VEp1Dmu1Ti1Ryv7GodM3yBAihK3wAE85GRUz0/1jDuV4U9HfCm2zpO+2AawSRZ+vNHhtW95KVbZo3p2J1lZHdCbdIUlYIzpPJpXKHQy+T6IuXVVudfBQk6mzsecb1YLN1ahuSh5okNqOCEng4rDbfY/iJdI5bFmdaeXs2tzg54z9qv/TnRHxB6YsKLncYwlHKgUJBNTqPhh5eDcol++ditOR3CSDhRzn65qcjXVNsZTIcfQEZ8wz2rI+lp/VT7KQmxPtNK5KkHb3rSZXwZvvUNnZvhvrjaD5/BPp6fSkTjdrYzujVOn+tYTsALZeCiRtg5qOufWsREhCZLQWFq0gnbJqodE9Py+nJqYT6fHj85O2CO30q/QOnYN3l5YhkqQdRBFO/UuS0KsSR5pxU5tJjIKB6GkhM1uQ2NOoatwT2qdOiEDDSyEuoH4SNzSno40JX4YKl7aQeKVy5IZKjrbupAKmnABzvmioLcSS4CV6wN9JphK3nG0xPAGMYBOQQKftthDDzniOFtZTlIB9KyfhGYq6PRo2htiOlQzygcVx0pcjhSEjJG29P6glamHGFLJ4X6U6iE842B4YUfQc0U0+gbvY3EeLLI1M748yTvmnF35ENSfGikAnB27Uw7GlpVltWdHKCN6GuV4jMRNciGVDGD5c70eTWjUmTR6htiyArIJ9qeMeLOw4jGofrWfN3gNy0uIjHJBUEqVsQKftt9vNzu7jLLXgstJ2BBG/rmt7ibpo3ClZoKYrKE4UgE+hFRdxmPRWXUoaPl3B9BUFD6tmGe4zcl+D4KtOMZ1H1qF6j6r6kbuXgtNsfKOAjUokE0/NeBeL8jkbrmeq4rjCK642BgFIprqKRLuTJV4brSk8Ep2+majIku9tSPFQiMoZ2KcE59K1fpxyDc7WGJrLTj6wUq8mCKMW5LjZv0bo+e7pLSuFJjTVlTiM5SVb/lXzj1o201d1lrAQr9DX118T/hPGjplXiA8pL6kklIJzj/ADr4z6rkqRdnYqx5mlYVnua4PWp8En2RyyXgDBB7/WuKaUoZxnNDNSKIbfTjGcb/AFrzBYZBPgkZVv8ASltgqPHvSy4MfbFKaI9MVr8HZHJoOhENAZohx3JyOKDQvtg/WllwJ34+9C/khkmLeWMHvn0qMkPFCiSaffkDOSriomU8FZ3/AO1FEednnZxByCKfh3FRWAFfSoVzWpZSlXPrREZl3PG1b9iUmvJcodzUEgEq+tTMVLkrzYJAqs2iE8sgb1pfT1oCWgpwHJ3q+G5PiJyIb9jvuJI042qPnWh9kHy8Vo4iNpGCMbUBOgNOIIIGcelWyYE12Vx5KZ9lxJUdIMGQEJSdkrxmlsshKVpAy3wpQ9PWvW+PatvmioKACQEjJWT2qQatwS2F63UEEpS2MZwD3Fekt6O7oio1pjOBTinULA2B/iHpXZGmKhSxCDoA2A5NScuzlbfzrZSlSB32JHPHemVtvYDjel1eBgp33PINGqBdlHX1J1LLubrNssqI6G2/3bjp5Oew9atVpVcnoKJF5ZSZGrC0oGAB60W6l5DzbaG0qUsKyCMJTjYnNLZjXBrU4UrUlSSlOkZUD6/SirAEs+EtlWACg7AH0oWbY7fOYDL7vkWfMkDcUwyxd0M6JoA1L0hTKDkn39KISzMRqjlhSloWD4gUBhPrnvRbUtUZRpXZEr6YgMTWVw5q1BOwSpWM0e5JEEqaceBAHrmkyI7Mp5SpOtXhEaSgjY+tDSunlSctxZDjK/xqStGCPpQ63FB7/UwCf1JAj+IpC0Leb3ASdyaB/wDaW7FigXW2hIUTjCwfL60Sei2lOOJkmOpTnKwrSomhB8L7c34i5IW942MFThUAKVc1sL49AS/iJa53mYXoKFY8m/51NWnqhFwGtleCNlFQxQ7HwltLTCGoiEtpPZGxP3709D6LtccJiPSXIxwUpCnAnX/3rNyfRlx8krGm258qUqSht1J8wNImXu0Mta1rbUpKsZyN6AndDwYyVORZEhTmNOoLyAO+RVI/9nPUiLnLXfLh85aGz4jKmzhSSexx6UHKUfFhST+xose4InrTJiraWjGClKhkVKYSW/EVjTjcVTbP0z0xZGI9wkPOw15CEKW6SCo8ZqzSbd+9ZSicRhWMJ3C800Xe0B2uxxc6K8r5VUlLenjfGad/blmtcVx2R4a0NDKt8mo2Taba7LaYeC230E6yRsfrSJNrgxFpaeCFKUDpbO+tPrTW1tIFJ9hTtztnUAQ/a2jlP8udxQiYwiuu5CsL/En0zSITbFqb+Yt8sx9KjlkIz+VITdrhc5y0Js8hSXRp8UgJB9DQTbX1LYeKT+noac6XtbySYqENOLJOvHc1GzbdGZmN2+4OIcUdgpPep1qVMiMvCbF3ZPlSkalH/Wqk18Vbcxe5EC/dKXCIhrSEPmOVJUFd8jilqLCrRLp6FsDDjay2HCo6hq3FM3b4f2mZIRIbaabxyEn9aJa6l6ZuElCo051ooOChacA5454p9F+sdtkO/M3FhpKlE63j5R963tpeAJtilQ4EC3h1C0JDYAKtsUZHdBiB6O40tfIIOxqi9TyU9RxkG2X2C1bdSirwnMBw1Xm7wzbYSre1Lkl5khYUG1adiDjVxxVljdEnNJ7NZLcmWAuQkthPoaUsPM4QzpW0ocmqxYesenuriLMLqW304BQs6FHbOx71YW+nWretstTJS0jcAkqAPvQ412NfwPpbk+EVtoUUjZQBoBTMcqU6+0Se22KmIchxaXNCk4G2+w+9euk2FBhfN3JaW0JSVKPcYqbj5sZbdJFNct9ydcW82y0gZ7jzEUg2OS+fGKMgD0q1Qb7bJiUqZTrSoAg6c5B4P0NDXq5txm9yiGF7JU6dIJrOHm7CpO6IRmEW1pEuOCT/AHauNxQFyusO2veFNcQ0v0zU5Njzp9rS9ZrnDdfI1lKhqT9QRWX3L4ZdY3qe7LkX5ltCjlJSgq39ADWUd1IDlXRcrY7b7g+lzxkyG1qxnNX5mH06zECXm2gpIynArEU2u/fDqOm5TLgy+yFgOR0pIKx6gHvRyvj70Ww/Gh3LxYjclYaLrqQENknAKj2Gcb0Fjt3EZytUzTXJfT0xpyI5CQpWCkD/ADqsI6AgFT0iTcnG0uZKG0rPlFEuzoYU2pthw53SpvcKB70/LiPIfD/zmtKkg6CcH8qnarq2FRafwVlu3rsa/wDd5jjqNWMqPBqVi9UyY5KXSdPrQUxLJDz/AIb6yjZXhHsfaq1O6ih2dwtym3tCsFKig/rWjKX7DNR6s0FjrtoOeC6rfGQRUffOsmnEFCVpKvQ1kl365t4USxlS0K8mgYP0qvTuupYQouQFp7ZJ3NVcptUIoxsvV4vrnzKG29AIVuO9DOralOB15vARjJxVBjdQTFLblKskjcbOdiPvU0ze570XX8s82pStxjO1LGL8hbT6JZ5dr8RQgkJfWcHfbmgJ0S5qIaYQ2gqOoKNPW63NzP30RhQd/wCIh06Sk+1TqIjvyJkOMLSv8OFoOEn3pkI/sUO7WKe2lL7FxW0/kA4GBmpe3WtiTawma2hcgHdxKdye+atL/SVxuT0f9qtJbjuJylSFflVh6b6SuEf/AHWPEEhAJSHHE4GPY+tJFKL0NKTkqZnVxcahRmokiztadsP6Acj325oJ+ydPXJ1tq5QCyhY8jqG/Ln6gVqvU3Rt6biLQ7HbKNOdKEalA9uOBUNBtbtvjwoMlep913LbJjkgHk70JSoEVZCdPfDmwSHmj8l4ugahrSClR/wA6MlfDexS5g8CAyhxCtWAncVfumocMqfuFyubgZOzTaEDwm1YwTvuKIusOSln5u2yEuNlY8RakgBSDsSnPpQdSVhqmVdvpS2KZDZjLDyUgZSg7e+fSi7f0pIJLcdCHkYIIzt96n4VxjvxH7KcR228IXKaeClrGPT/Kpbo2026K4qEHJLbYOrWsZKvfejFqWmgSTjsiWrM9b43hP29pKCc+Ubg0RbCtt/wn4Tj8QfiS2OKuEpVphxn1xFSJ0hCCtLYb82w7CqfbeqXUsOXJ2GI2DwdsDuK04oMW2O3602xxCrhCZdZ2z4ak74r3T10RAiqcjIW2spyMp71MXCDJlIYlrKiXEYG40qSdx9xvTrNltsNCH3bgh1ScBTbe3vxWUaejWRcaRKnvfMSI/iPFWPKmgrhZuqnJinmGnWW86grGQB6EVZUSIUCQpCX3EoKtSUoQFFR9PY07cb9IYiF1D6mhp1YdTzWUFW2bk70QMFq+F9KlI+ZS3kqx5VD86k5Mp+K0ic20V74Kc5KaVF6qt+hKpRYLyAAA1kbn+b296d+dguMuLUyWSs6kqSM4/LtWUUnp7M2/KH1L+bCJCEhrP8HOaJt1wYbccPhFtxOx1DmopUeOkMqMxxB1AlbYyVe2KMnXaBbUOIbW3KWpOQVEbbf1po/wLLY1NE75la0NBxKt047g1XbrMuPii1y7U4Eu5KFpGo/9q9H61vbzSmW7YttS9mleGTj/ALVKs9RKdBZmMOCShO6i3hQPetae0wpNFJkWiQ66lQROaeaOP7vIzn+hqYi2u5MJS61ICHlpOxTjH0pi8Xx+1W2Re3mLu3CacH+8KAIQeMb7n/SoW2/E1m4NOLl+M3rBUlwRjgp998g/SpqNu7H5aO3L9tR3yubOb0OkgkN+ZJHfPegpXUK7gCytDclppOgr0+Y/ao/rPq3qJiHFiw48AI0FzxHWytx5s52KfX3FL6T6f6+nNoRC6WMcqa8QOylhtKgdwfX6bZ7VqfhhbSWxTQmfItrsVgcEgqJWNWMn1Htir/0jKuEGGFX+Yy064AppKOUq7A/WqPI6S+K0+aLKzc3oZfJEhbKdKWkAbK1Yzv2xz3xVx+H3wkVazoul3lyZxSPHdQ9rQ4QTgpyPTnbmqQjT7Jymq0X6fbTd7V4kpnzKTpwDnUD9K+E/7Qnw9k2C/vXWHan0QnHDqe8MhOfrX304hdnjpTET4iE+VQdA8u/49sbCsL/tQ6kfDeb8wlC1KcQpC0E4A9N+f8qb1WJZcbb7Wzlrk6PhkJKCOaeQcEYFOJSk749/auhoKI05459K8Nwof2WmONqVx2p9tQTtwRTSUkDJNeKtOwqUmh6pBySdOcZpiS4QMZ3pKZATtmh5C9WcGpcvBzZGxh51RBGfvQ68453pwgZ1Dv7V7TqUM06lZC6FQ4PiuJJ3qfi2lRwoN12yQ0uEYH19quEK3pISk124oKrMouQz0xZwZCQsVqMK0IbYCgnBAqsWuMiO6lwDGKucSeyG9KlAiuz08Iq2wcGAqi4zkcUBMikAlIyamnZLJVgKG9Rl0mMNMqVqGcbUMlLY0Y7Pqlq6B5YkoshSmMDqWoge5wDydqkW5sRIaebUQsr1YXkYzuoZPtQAn2t53woyFFDaVAeLqbOodjkc75JqLdceYbbiuXAPyFOhTq22j4ehJz4YJ7bjJHPFWi+P3PWav7FmVd4aitsXFKS5hxDS05XnH4QRtTXjXFMVt123k5CgsNAnO/bHYio5jq6JHC4zcQhltait5cYBKT38x5G43qatvWra5arbF8LV4SXAnSBkE7YJ5/7irRp+SbTj4G2przruHbe2hCMAaAo7nspR47Eii9fiNKXEZSnUdTikq2A7kflUd+3JkyGtUlpxlUha20toBc8MJPnUrbGc7Z42OKDRHvbMdbEm6x0sqT4banmChzBIxkggYx3xkms5V0BR+SWVGlx20PocdfSrdQPK/fHauXLpxN1Upx+Y5HSlQAWlzZYIzx2oNxidb40X5ie8ExstqdSjBePtnj9a9Lnkx0lxCkLb0nUhOSon01ZTjcDP1xTKqA2xqx+A+7JDMV9LbepIccGPEOcDA9NqKufiSG0NuTWkrVqSh3xikn2GBigZC31R2p6Gn0KVpUCGgXCkHJOnjHbioWZBu6AyY9teUFqUG3AVJDZIJGQe+RwM5qbuMdDppy2WMW1hiRGh3NxuW6qOtwKfQFeHpUMZVtgnP6Uj9s+Cp1Fu8OQ2x53lZ/hz/DiqVG+Hsa4xXUypEtx6RMXNnOoeIdlZTp8FSzhTaTxhA2ycVNW2zOdIeHNgxJjp8BxLwSlbqnCoDQgJJJwlQ2Vnv6Uqc9JLQz4q7eydnXzVFZ+QS45IUrQkOMHSVEbAntVfafnSmHJd+6WlJkJex4MZ1Lh3/Dsr6ZO4qWW7PYjOR5tkfEgrU8ZCnMoS4AM+c7IQAcbZJwaBVAm9UQUSLg5rjKWhSVw39QUhKh5woY2539qEk5sMXGKIe19UdZXC+Gy3LpiTFiPrOiV5e2O2c+n61a/2S24ELLj76A3lSlLw3qB40ih7u8bU4iFbo8qQ45w20EqKEhO6lKUcJGO5puJfIzLCHGoNwdWiSUPpLXkJCd0jbfc8jbOfSjFKL4ylsLcpLkkRXUtq6quq4sSPBQW4j2tQKAG1jHlA7/Wp2+3Fqz2wXC4pRGSwkH9zlaifQJAyo/SoG+O9adXP/sm3Rbpa4yHgZL0ZCULdbCMhrU5sgE8qAUcADbJopNk6pQymW9c40BKClLUd8IkLSjlWSMDgE+wo4oQhJ1exZ5JSSTrQ5b+qbFeCmRJmP21b4TpbuDCo7qtv5VcD34o+TYy/KjXNEr/ewgoQQnyKbyCft7ioadZmZbklfUvUtujQnkZfZhg5Lek93M+GMntzRHT1z6MsNsY6et894swmnG21PuqcSRyAVK3Gd8cDfFWnGHgnDm/BNNWVBUpp9YebQSchGCtR7bHfaiIdtFmU4iO2441spWk5wewANDzXYeUMomhhwDUf3ZUnUOwOMZx/Sn7XdZYZaU+NaJPnQVtnODwfZPGT2oKugO2ORbYxKkOaoxiKUsKC1qOVHGcmom5xGPnpKZ0NlTTawQsr2cyNxmphtc5mM5PkSitLjiitl7AQ39B2GN6jJqbTcpCWzBmtOlWhClqIYUdOToWAQQO45pab8DNV1sqUvp7pN+6OTbV049OkrPnCSpLftk96Ev8A0Y3c5TUO4dLyY9veP7x1JBQ2rkDBOcGtIcjxoUAeDI8OOtvQWm0lS1q3H8Pm+/pRMNmGvLRbjPhSQW0A6hgDzfiOQRt+dU4Im5MpDXwu6ebhxmo8ZAaax4Sc+VsHvgbGpD/ZO33B5LM0oZTFRoQI2AF+5H0q3iVbUlqBEiNrdGcJCcJTkZ596aWpby2giIpB3Kiy35h2IJ7gUaS8g2/BR7j0ZaemVpvEV+KqQkAJaeaBKt+BjenLV1T8utDE6LoQQfM2rVgnjY9qsbttjPhud8o2AQlRdWSFqzwANwT6+1MzoMb5J5M6C2uOTlawgggDB0+vc0XKKRoxZASb90/d0KgtSpTa3VaStTOhJPuc7Ue8u2MrTAnvBa9GAtWFAkdvfmnHumrFLWuM7aC2ptIV5V4QU6cggfkPrQMnp2wXZhNoMJUMttgJUlfKMAjVk4OR+lTdP9I6tdkhD+WbUDDbZjKZSRqSkYI9RUFfG4l3faiSpsSSptwOBtafzOPUVyb0uiDHjW+zsrMdpJ1KcWSMnsMcDc4zTKulGG2n2o0laHuErWRslW/B4xmkaaVNDJxe0ehRX7M821akMlh5ZKG0tEJAUd9xxvnalOMTn3Wnf2iyww24f3aCD587DPIBqwdOxGLc2380589IbSC3oSMBJ/iJzj7+9H3PqSOUqbm2R0ITjWdCMJ9MmhaSDtsqcy/vsRFOzrZHUgKCCFo1KGTjOMcVkXW3wqi9fIVdRbAxBbdLbrsRACHCDuhX6VpHUdxvjrzYtllUzCdd0LdLyUloK21FJ3KduB60roxq3dLIcjwUvPRpzgdktE/uy4M+YD1I799qbi5K7BaVqin2WLPsVmj2mPapKYMAIQyUrV+7Qk4xk74xVput4jIdYdZgrBKUluQ4sFvcbJ33yTtxWhu3dqfGbi2+KjErSjSBkbnhXp65qvXa1WGC98jcLZEnPOyAhpbrqgNW2C2Mbnfape3T7H9zl4IVETqR1tcx2zNRg2jWvSca09txzVTvkt+ZEV8xaG3lPpKWvLkpV79q2m2OSYaVQiy/H8NGlAcRlGPYneo+dYoUlhLLcRLYWsvLbdbODhOVLGONsYxiq1ap6ZNOnfaPlxr4M9QPR/GnCDKXJWpQPiLQWkk7EYGf9Kskf4Y2uHZUzuo0odSwnxFqQFlwAcj1PHFbvGtkmItmYxGDuVhtIdGNCexUAd8c1FfErqq42Nq3s2Ho09RS7itbS2WcpaSBg+dKfMNfAJ9DSKLS2x7UmUGw9NWO/wDTapkWW6hxDhaZhPxFNLbSNgfOBn1qPg9Hx2T4Ei6BKvEAUtqMtYSQc7Y2J9qfk/Fjra0tIuV7+EH7HtrOsOIkh4lXmwFIcxjI4rUunZVkvdgh9Ufs9+3NTinDrhIbWcbaSDun0JFCWNyVpjKSh2iuKtPT77an7fIbe+WOVFbRSdZ7gYyM1MW7p+HNjmHKU8X5DepCUNEpxjfKu2KkZ9r0QnXHJ4ShlYcccISkKAOydeOP9KFWLi8tD0SYw6yEpWlsvob8RBOQoK31bfntWp+RbXghJFluMNz5RUP55mMc+Fr0qawMhQJ7Uw00/KUwF+G1obDi9DusoOds49eKs9whCJquFzbWlbbQ1JdcRslRwR5d8A12DbLXHlvfspRb8dOdbJSErwRjVk7jOdu2KFLpGT+Rpd4uMpx6E10+VLYAIdDgwtOPT/vUYm0JKHZsuM43LWUuKCsHzH+FI7Y9qmVRY1sYVKn3R9OpKkr+YCApsnnShKcqxjbHO1Rt5jW+Tbw9MQ85HdebeUtJWUkIRjUCgZaKs7pPemk7Vy7Au9dFanQ5b1zZhhIec8FbngugFsEcjbcmpRNmlXO2JeZdS2mMErDekgpJ2UnjCwccU0JHT3TUh64mXNkSUNgxWQVFuPqwClZ/mz339MUc0l64ttrMWR8uday4y8tJUonKilOoA4PpUXC26KJ12KtFqZEjU1CjqlOFQca8LA42yOOO4qwfKONNB6RB8Ao3R5wOBv5c1XLUs3FDlg6ft8l2c26h9p35n948NiEKOfJhKtzuMbVYeq7P1ioNMIYs7bTbHiK+fW4tZcA/AAgAYJGNX6VSOloR97B0yGWm0vYWZCceZxGkaM8gp32oVlMq5z3I0jpSMzFcBUHUqCyvPJ0kbZowSZUeFCg3SzpS4jLmIriXNAxkJGDg/wAXpkCksdRQry89bWYJjKLK1JeW35kcEd8dzjJwcUZUuwR34FptRTIWy3OV8myBgDhKewx652r0+O07FWHsJCiAHE4yD9PSkQIFpYYfdhImuSGXihx75da9SckEKCeN9+KXGfjvJVbosWSy2jxEPfMtKz5cZUgrAxyMKG1Lr9PkP38EIm3dTIebah4VFQsK8ROVqc7gg8j6GrJcoXzzRQ5CkILaMqbWdKuPxDPP2o+HbbSYCmprwUY+CoGSAkacFJJTgpwDRyrZapcpMuK4JAJU2VocH7s6cAEg9x2NFQ8gcitm4dOdNPRm5iUSFvtallOClBzwoDcexNMORJk+7JebKhb3QVNBpguOAHtkbUZI6ctdq1zYcWMzIbeCHJDmhIW4OQkOE+vrvRNvt0qQ4+ltbkMMqL5cCUFBXglITpIVjHb3plFvTBajtASIN+hy/nLdcFRm2CEeA/HyDnv5sEb0NNYWmKZLltYm3OO+PHZLKk+ICfxJSO+PTai731E62jwJlwXFUp5IXqZKtIxuCRnHA39CM7b0qdMtkxko+edU7qaUhUZtSlgqzgq0nBTqFBpXoKuthLLtsmrbaZXEhJbTl4+KEraUedQPb61DdTXyNF//AERZ5EO4PBv8QCkaxncBRPPsDUpaun48a5y7tc4ynEymUNvNBJcTrSdljVxkcj1qI6itdns8kKm2yc9EuBDUdMJpsalbEA5TgfXn0NBtpXQVTfZnl/tV5i2x42e//LmSorfjOqW4NPABSvYqB2B8p7UbPuLMzpqFaOp7VHf+WjIej3G2Mrw4jlIUlI8RB554PermjqNmXdkW+6/tOLNRqVHcecaW4toqwM8j8Q79xkc1xfSFqh2CYu0tuhTji1anlBxKXD/eAAdjkqxt3o03sza6IWP1jY4FsTMvkF2W2iQYz7gSnUlxP4ClH407YJ2I5q3xJV8vrDS+nY0e3RnseBLfaU8lWD5gkjHpwcbV8/WG1zbbeV/7RXIyoiZiktvJgvrDKCnguJSRuNwecYqyfD/rLqWDcIli6LmOsw31PPInXlpcdh9AVkFSSScjcYASSBnamxyT1QJx8pm2xegTeZrV6ufUspyfFKtCoj5aaaAP4dKVH9fyBqVg9GdPR53zLb19cexpWJFxdU0c7+XONvpS7WBF8GTNucb9oJSpEqXEYCW5JIz5cHsfUE/1qan3u3MIYiuSEF99KlobW4T4iUjJKcYyQOQNwPaumkyFsW4ykqcj+EAhxIQlROSpWOFemw+9R00tpQ029a2nWUkIWl9ttSF52OAvY+lBTb00zAZlmRFbQ4tIcY1lSloOfwkDdQ5x9aq/V9wXJsolsvTBFbb1JKW1rbcaJB1qCQVak75A3rOSoyivPR8c/wBqKw9P2L4vXKD0z081ZY5ZaccjMt6Gw6oEqUlI2AO2ydqyTQtJ49K1/wDtHQLjH+JLsuW+w/Flw2XYbzK1KQpA2OCrzc9iMisncBz6143qYpZZKPR24knBDaXfLpIpDmSCojeu6d9q88QlOPvXmZVuyc0hGRpz6U2s5G35UnVnc10I198fauWb2cc4nvKNiN/avYOoKFN48xHoaLZbz2NGE23RFQtlg6bfSladW29aHDjMutodaUPcVmVuCmjnGBnbNWmFdXG2tOvGBtvXq+nnqmdWPBqy5BSGgQpQ+lCSrmtpKvDJ243qvftp4ndWwpD1zW7ttjiuqWRJUh3hJE36Qk6ivGOKYl3pTzRCnCc9qg5clerCTQZW6Qc7jnauKbcjQwn6BrPUk2d4qLSwY/iBvSHwdDYOSoqP4lYwcAY7Zqe/ZzKlkSl6kOE6QEA4HYegA/WutuIbWtAJypGkYTuT3xUfKul5hP7AvNPL/e+JgJQgegr2korso3KWkGSOmrbPQITsNTkYK1BW4AO2498/0pU7pNiQy1FgPYDStKSBrORvkHtT0SQzIbXKckOkOKyhB4TtjA9u9FRguM2ppClJLuUhSDjn0p0o9iNyGYltlRGXGPGEvUQFFOMpI7HHJzzQ0tMCLMK5s9BeUEK0OYVjH4QfoSSKNcclRGFIYUE6UkaRscn3ppcBLjAcU202dioJA1KJ9fWs4qtGTd7B5iXXYWla3X2lLBQNYwog52/0pmAzFE51bsRRcwMtLUpbaVcAjtke1ShhobaDan1Bf4k7DYd9u1MmBK8Jz/egBuQMbkn3ocfIeRHvLmxZgkqaX8s4oISlJ1qJxvzwkJA2Hcn2opD1xmJXLRLdYwkhAcR5vY77b0WbahSC2/HdysBvxyvzYxvj0oaOwpohiPKStDBV+PYknbJJ9MVteTX8DM7qBNsdaaukZ11Bc8NK2gkJKjtj24JJNFiW2+vxVNOMpcIwQrT5fUn2/wBKEdhzbl4tvkfIgSCNDmCojBzk5o632P5WTIkzZwntrynwy2AlsAY8vt9aROSlrob6Gt9gs57wIaCp5twFpAKleZTifRKeN85zVdunVL0BtLFvgxWwlGkhR2SkdgB60VPYszjymXZDrCwSfVIQBzURJgWd23LlN3aO6kfj1bHBoTnNPXZXHCFW/wCCKf6gmqK33pTDYeOpXgsZJPbJxvRES/XZ1z5iLeXVkHBDgOk/Y+vtR6LR03NQBBXqUE5zr3PqakhZLc8x8uYAbUcFC0cpI4NNGM3tgllx9IAc6mmXO4pabkutJQtBWhGVYH8QOfU9hjA70K3FflSGZ8i5spTAddYhreZ1LdSUgqUDnnOpPfAHqaPjdJNOBEZd6e1tjX4YRk87eY8dv1pmF0CrpsK0XuQ/EwQpD+F4JOcA9gMkY75pJRkmqApRp7G3BY5Tv++NtKdWseI6lrJVungb8jSB9KND1gtNuU3abT4rwdKl6WUnXuSnJUe2N/vRbMe3OuB1UV4urG4S55BjYcD/ANbVIm3WDxW0wo61O4wcA4Ao0+xW60Z5d+u7q2Xkt9HynpepP+7x3ytATyVZIxhOQfc5HFOWf4jM3ZrTJssiG6dLCQ4lIKRxgYJGDjOTvvWiizWtoEOLeiFeyiT5jk9id+9QzvRfSsKWuU1FSXUHKlgKBWoc47GndtdAUl0Uy4/sKdJKUTZDU15aEANkrCMnI2P0O/vQd3ZVZ29CuoXo4cUSptKVrA1AJBP8oBI4x+taHBgWFLyStlsvhQCFnbccfWvN26LdnH4rcpC/HCdelkZyncHfk539BgUVOUQNJlasKJS9DDVzf8LRrW880porJO+kk+UfSltHqtLrzkW9vKS64Q4hegoTo2wAQTvpG+a0D/Zl6ND8F9AdYaTpSkJ2xjFVC4uRLYtxpL7bSQdkqOAhHodvqMUeUgKn0IXf7k2oBtmMsgHVlR1uqVgE+U/bA9KaY6husdLL77Knmw8t1SitQU55chAHAA9KOTN6WkOMtyJ9uB04SrISFJByB2x9qKmJ6K/vUyGT4SQUkryNWcHAz6ACmSa1QrITpv4hRuqnZUGzxXm3oaAUtyTjbUUnfsRsPoavM1hU1lGJQWEo3JyUubdzzjmoqJBgMxVzrDEhxmpzenxNCSXSNkqB4wNzv96RYLPeINvagvdQOTnEeIlct06lqSVqUkk4AJAwOAAB6UGr0w35R2SqZHKg5JbZKAlSVocTx/ClIPI2x9Ki3pyWdDjrEiYhlKlFprCUuEklYTnBztydtsZqZmtSUx0y2LmQQNSW1oC9SMYJxwnI3xUZ1BKkxWobdsiNqUEOPLLSSEBSSPKlOcgb59MmhtdGW9HYTMW4ydDrUtphx9SVNoXlMdpW/OcnHHcZp6fHjLS6wW2nA0lQS8oDxlg53JJwo7j3qLYmXFAWVvvxwWy44Uu+UqyDkg+gyMDmpCOq0zx88Z0mUlxGG20JBUkgJOcgckgCtb/cNLs9cwm1RWmoCFLUAhsJCQAr/ErfOME59qAQrqiI+8q6oYkxnHTpW0AApIxhJAHloS4z5YXBgWsTJ+h8IkAoKHAAonf2G3OOKLL19Zact/7HdUzHdDjaVDcLGckqB43Ix7j0pXfgK0thDVnf8Iuy4SXlkOPaF5RlHODjgAEYVUjIRE+XbllYczpaU2GRgY2Az/5R9t67b2nWGo0Z5gBMhCgvQDhslJyACd8k4xntTT7U+PGbtkV1lCnlF15buoJZI7JOd9qNMGhj/dYqHEvsLIklQKAkEgDjKQMbZ2Paj1OyCymY0lxRDRBckM5KcDykADOQPT2oaFZxInRluT3lFhK0jSvCfMrOSPpUi9AkhrLr5QB+BJSTp04AOR3Irfc2ugCBNmNqaYlF2Q66AR4gOhWrcjB42Gc+/tQa4d2kTZzTMZTKN1I8IqU2rAACdRPO/epyLCflOqdRLCXUjw1JGCFD+XJyfy9aGkR7khxUNu6tNSANelO4Bztkf580HNeWFR+AK33G4ojvNuPOLc2zHU2l46BnzZG+Mgj7U23dzAivMM2GTJkTCPEOjStoADSPNtjAoxi2KjTnJTchep8IDy0KAbCEjCcA7jckHej24EYym5MgpSpCS2HFK8xScE5A27c/aim5fcDpFUYu16HjRzbf2iiUhK0MSFJfQ0gn6BJOcep29qJtQ6xu1vldPvQgmK2Ql5lpICWE7KASM7ZB7VNjp9MZHi25woZUSvH4W0gnOADuR2piyS5sS4y3bbcI7iHXhllSMFWNs5+3NB870FcWvuUtrp3q2LdTdGLswGExx4UBwJcaKgd1EnJOQADjcZq6s3BSS01JgeEgIC1o8NBUypQ2GcYKRuNvapOdeIExxOhhIeiEEpaSkHcjzp/mH6c1AX68ybKlFugxH1OPHUH3UFSQSCTkg4ByeOO3eilxBblSYB1NaY93mpQ9dJ9rDbPhqkNtodJB41KUNgMjYc0Fbbe7bbWbMOrf2hG1NR47stLXlIwC15AFDzA+ux571JsTo7ifl57zKMpGp10qw4oDAJ32Iqg3OTdUTJqAy02WkB4SYsPZTadwUrwQDuQQeay5PoLVGhRWITDaPk5CHA2tTR8JCleAsADfOV6dxg+2KA6jk3Z2UqJb7dIu9vZU2qQptnS26g4y2SFalb5/hGMDnFRlrevHUMFLNtajW0xVlx15Clh5xYUcKwnbG2d/pUiqzdS2y8PS731MypUhtKSGYgGW9XB9cE/qaNNfSxE03ZC3mB1MtTIs5kWm3uqWtxKogkLGVADSAUlJ1EnBzmo7p1XVUuY8zPkyI8eFJ0Q5a06Bxg4ycacE59Scdqu79/hR0ossyHPfLOshTbZWENZykqxtwDx6VBwLI87IlqsUxSbdIdMt5M1IWl4qwfKDunjcEZyK5tOVxL21GmVuVfJFjvgXDvMaXFktKZMiHFC/C1KJKV6SFAglOFJVnSCDnFWaJJs/VNht6pU0ydECUiCEl1AkFDh1pCzg5Cm9id8b7g1IqjQoqozFmMaHBbyZDLTSTpVjZSN8kHbY8YoS8QbUktJhPLipbJeSww8oKcWTuQCDj1xwKeMWrsDlqkQ7kPqS5xk22z379mR/CLbK/HQJBICVajpSrUoZVvnB2IxVvt9pm29yKm53P5oBpQaccdOEqUQSjSCSE7A75HI2odmN1W5co8yLcYzUWWxqdS80A42UbBKUjbB3GRg03cEXx6/xC07bnPBS4zJQlBaQcp5wQSSDg4zyPrTJ6Ea+5Pu3GwNutFV6hQDKcKmnQpQS44TgKzkY32wQeKk5inY0VTF7lxtJUhCX3Nk4V/HkFOM8YyKpHT9niRLU3JiwPmJp8bwvIpC9Qc1bas7ZV2HapS6MWa4WtB6whsSjqSsNyc6UrBA2AwDg5xWjJXszj8Bjdx6U6hdfZdXbJTaXPDLLzW2AMHf+nNR1kg3Sz3KebQw1JjkeEWnvERuBqSM6d8HbPODTbPRtukvpVZ7qWGXMJUnTqS0kb6U9wecHke9FQUpirjRzOuz78Ra1oTJQUh7SPKVHhQG3O+MVVtVcWTV3tHFTRMcSGo2HYw/3xpMpK0suFPHhuDccJyDkc4qefnwpaGnytvwflSmQ0hBKln8OAlOx9DvUTdHhdLmhUKW0wvCAVJKV6tOdIOexO3rjFIuUK7OzGJduWGpQbKSpDeUa9IGdzsdiMHnNInt0O46VhTIaK3BaNKCtBTGSt7fPcEHJA+59qZiWl+FdzMuiY8sOoPnjYS5kA5ChkA7Y3pi0QDZQhDl2deffUfGLzLQWlYJJKSdgCe1Il3hlUSbcZjaJvgtJ1eZTOkn8KdsgZPJH9KLWrFXdIl401m4OfPQXFhCEmOtxaSsagrJJHIxnGx+lRJtEG5OtzVOSJCI43ccQoIyokggHcadhnj1qKiz77Fkx0M2jzrC/Fy5lJOglPn4I4z6423pi/wBzclQ7W3d+m3RJVKKwIjyyy9HOclWg6kk5HPvmpuaaorGPFk4qztQHHWm4qJK5r5cUWsFxSkfh3JGQDqyON6YsMi4QZSpb9uMVyTI8F1tplJGnOxVypABzvv24Nej3xSIz5hdOvx0rdAZQyMqU2eV75AI7jvin7ime1cIceUguIbytlw/xeXfPoeNqKXwK9dnr5LfdiRrU2xIaYBDr4ct+uO7FyQprKSFIXwdSeKoz/Q0abDU1amoa33X25Da1ylpQ4UcBQOBqKRoOwOM7+lymXB1xaYSBcU+KoF1pCVrDJycEK7Z9iRXZFjeR48h+M6ptSjkKCVLUdOAoHfSd6aSaAmiLtF+uMt5cBzoY259DrjqpDExpcdLOSEIA2WdtlEgEEdxuZ6ydJTZL7qLpfFrdYfEqMpwBSmSCrSMEnCgk6cjBI5BqNHTjNtjoKnpCZCElvUFBS9Cla9Kttxn1G1WK22yzJbUuWlTjvhpbccUPOcceYc4rQt6kabS/SSjlklRELjxg3IakqBbbfkFCUKx2TjOMjOM/lVaFv6/nXXQp63ssRQgofjZdHjBXJUFJUMDGc5BBIIq2xkQXXVNllbjSQMqKskdhin40GPGKUwVaW1JJKUoAyruT6Z/rXR30Quuz5S/tTfDPrC7yz1TbLW3PhwGHHpTzQw+wgHzDSTqU2M576d+29fLK0ZGea/RL44TrvL6DvEazx56JCIy9SGcBTjYHnxn8Q05BA3r88VDTkE9q8z1sFBprydWKbcaYLpAJyaYkjYAc+oolYA70JIc7YBPavIyKwyB1FOMGnG1gJxtQbj+nYnbOa4h9Wcc1yZIHPJBzQ1ObjvUtGjg4JAwO1RkPKlDAqcYI0jtWxQpjY8aH2kAqwAPWis6U4zTTQ8uQadG/8Q2r0IaR2whqjyVqSKQFKKueaccAKNqZbwTjOR/SmcguI4ok/i7V5W6Dn09K6VEDY+1JUQTgq3oWKlR+j/hPSGkOsOFC8bEHOBQUiC4tlSSh9ZHlznmpuNDcgsBCEFRA70kS1NKy6ztnevcUfk5OT8EO2XIyW0qjpCUDAyTzRyHXSULfUztxg5/KiH/lJ6gDhKAKi5kYKeHy7mlKeM1qp2Mneg2ZLUwyFtRlLKleYnkj2r0SQVOh5uGS0n8ZUdyaTCTNcb/fOJWkjHHaiyhGyOAKLtm0jtxW7KjF6JFUheMDfmhPmnm4xU6NSyNwPWiSlSSoh44HCRQb65DKSppvxCN8Gh0zeBgXua0gGS0cp/AjJO/bNV9dvnyZxkyLm6fEUVFsDAAPapMXKQDrlNJSc5xijWpCnv36mghJ745ouCkqkFTcXaGUuvlgxmk5KCMr/iA9qJgzHJU1NsS8slW+kHYfWhjIWXnCCMKGABT8RtLH+9M6UPD+IDc0tNdB0z146HYelKkvOKc8pBTqwAPcVU7srpizwpMSUySHFZKGk5JxVkuF+uaPE0knxBgnFQ1s6cZurylXJzCFb70vGHO6HUpqFN6IPppFimyEqgtvMnf8SjsKvCoXj+EbWlR8LAWQvYmoyVZ4dsPgxGwEA41AVKWeRFiMrbDnmc3wT3p2k3USbtK2CTY76JKpMhTqgxuENnBPsfWhZV+kyrapt+CuNo3Qg/xHtmpd4XB90GMnyk5UT3qPmSX2pAZlxkFIIIJpXjad2FZE1VBfTDr1wjFqYyoOjHiFP+VHOvOxH3AiNJQkDylOwP3py1yW1ArjI0HjIFFOIffXh57LZ5GKZLQjeyIuC3JTjUydKWUtpGhsD8P1r05+OthKlSVoKfwAnIAPNTjtrtTzPhpGlRAyc81H3DpaNJGAvXgbJztRTX6WZtPb0QluchtPJDMrWskkeMAd+2KcjGDbri47LmGQtSsqTkYGTxt2pmR0UpbqX2ippxs4Tg7flRTPRr7kbxFvoLvJOOaVwtUhnkV2XWJemntKA8gJ5xmoLreNbLoqOwIzannlY1AfhHqaiEWF0TGwZJSQMKI7CiLtaXo7BfiylFSRnKuaepNdk/pTtIp1z+AiJKvEmXl59JcK0gAAIB7CpGy/D2xWuYlc2SZSkHZt1QxxjcU/Huc9whDk90FW2nVzQt5Uph1KngoqI5B3qjgvLMpT6LZBt8K2QEwYiEeAlSvCQD5WwpWSAPqa8zAdaW88uUtbbgSPDCRo0jP6VQkuz30IaaecQCcDc5o5l2/sv+EJaghIyd9jQaS8mVtlmXNaRIcbipWdQCMpGCQOMZqJkTnG5TyhEQ0rwvDTqWStasd/SgYfVL0FKkvIbdkqUdJxvUdG6+s9omyjLZVrkLLjqVeYBWO3oKDgxlJJ0Gw49xciulTLmkrSEhSSR/iNSTEJ9akMtx/DUcAFtGnUcb7UxC+JFkuENKIz6UayAry4KjnirPGt8i4oZmoUcp3QkKwAKXi4j+5fZWmlvQbgpCX3UpcVoWcZBxUlDjyGZ7kxmUXGsKyJB2WSPxY7ntRl4jPshC2vAVpV5wo7ijYybe4EGU4hGkbg8KpY3F9iyly3RHqlOrdRKeYK0hAQAPKEk8nAo5udCljwi20M4SoFOx9s0dIbtbUYuMvJK8bAjORVRk9VsNXBNvciI8IKzqxgnPatK6tM0WuqJsTYkI6pDBDqVYBQjIwO23akMm4zEOmO0jSpetIdVuoelTaYCX4qVRHQglIOFJ2xVUucu8W6aqNGhtrSP4uOf9KyqO2ZPlpEtGiykgl2KGUFSt0DfelKt1vZS89MjhzOHE55zjgmhbbcZbrIZmOgqaGxT3V7/anXbg63HcceR45AwEEDkcYp0oyFlyiwGGHzI1uxloQsk6FKykJzzRqLgUl9TsUp8PzIVrBK/cAcVCW++XRbinZkMRmdWNBXlRqSa+XZQp9TqUocVnc0kddDS32GMyJXgqW4GvCUAkBQJOM9vShbrJbD7Rc8PW0rUPDOCAN/1wNqBud48QojxMKKjhSs023alupaW4Qpwr8xJ4TjtT2vIn7Ch1TaGZH7PleAFJbIQ4hvGkk5wfUZO9JlXeOGEz5DbTjYCg0lYynO2DgdzvTz3S8WY02rQEg51FfahXukGmI6UB9RAXk4ORitOMZxBGTjLohnBaprXhoRrecBUdvIkfQ0mbcIcy3uW2Mow1E+YFooDgAxpPapuX0qoNJQwk6SM6gcEUJB6Xk5QxKVqCjgq9BWqHYbb0yV6KvNlgW9MOR4DckDU66E/iz2z7VMu9RdNvO6X2wok4TlA3qtDpKAiQ+uQCUJGEgKwT7160t9LIklDzLpcbB0k5O9aXHwBJvtCbvdWvmpLkWApUco1NgoyFL7D1H1pq1292Wzr0JgyNSVrShWttasfhz78GjJDfzj7JiMBKUnCkk8+496lmYEWMhSpBJ0+YJA2zUq2PdIg7paZrcoy4ktSVoSnLfhowCOfeo/U22lpy6sgqKgA42n8Iz+EH0NFX2Z806tyLAkqc2JyCEnA23oNi1266tx2jLkMLU5qWyFnGodvag9PQy62Wl52PoS8h5SlO6QfNgoSd/KRQl0ftTb8dkw0uur1OE5yU/X3NB9ROXGKwzFttv1EJw64rjGNjtUDB/bZWvQvW45jzFGQnsapFx8iU/BdlzWHm470YKStlB8qF6VJB2296beZidRxEm4KTIcaV50PbKCfr3qudNLjWV8fPyDKcCyRrG6STuPcVL3S62ZpSg9HKvHVsEg7UkpL/8AJSON1scQuLbmUtKLLbROEBHlwPf3oW5tGSGJUaU++rUoIaSPNoAycH1OKYvcZNycZW5ER4CTkhP4vambHbHG1SAxIeDKTra1L3Qe4HtSclMPHh5Ja0TLPaZPz6ICoutAZV4w1HPIPtRi+rG7kEwEQ0uNyFLCXmkHA0nfUe2e1AKgNaHHJLrrgwCoKOc71IuLtcWM0ltsNJVuQkdvpVo30SfyKtbVmS+/bkwG16P3iwU6ue+TzUM/bWky1yLOwlLrpKNC8hpWNxkeoqdhuW5J1tBx1RH8PfNcC4Dchll9lbKlKwEn+tGtUhfNkTcI18mNRm4Ri+KygkqyAD/hH34oBLV5fcU880tpxsBJUkpAcx6gcY3q4hdvgMOJWkLWVkNnO59qiLpcErRpbY0l1QRoX6+tK40Mm3pI7AMiEyUy0BwpT5Fa8pKfoOPeo67XWbFuCZtwfaVEeAbEQIAKQRsc+lRV3usy0IcakuowDltA3UBUBMvtkuWJEkuuuoSMgKIykDcYoba0MlW2Xebf4sR/5ZqM4hRSA46lQ8vpufalXS4TokVU2xRXJnzKApXiu/u/qf8AtWeXIWzqiIlhp+XFjNnzBCyCofynviq1JE6wuvW5i6y2oEgEIQHidz29q37hpVo23pm4z5shSZbKUOIbT4itOAT2Cc8jFJRKm3Oe9bmXGivxPMkRyEafc8E+4rAJV2676RchXK3zFSE6w24FOlWtGdsg1Ov/ABJ6/QhuU2tEOKpwakqH7zPcj1FUaSrYtfBsM+X1V09BdfMFh4N5JCHO2ecHvXLN1w9dmFOuRnm0tD98pA1JBHqORmqw9aL11dZliZdpSm5TISfDXpUD/MDUdAhdRfDtDKl3Lx2VJ0KBR5l78k+u9PaQnZaOr9PVlkVcLK54zyEKCWiopKF4xnPsR35Br86rot2NcJLL6dLjby0rSNtKgo5GPrX6PWXqCxy4/wA2zpioVhSkoO5V32718B/HGPEhfFHqBqAR8uuUpxGBgebc4FcPrt40/gaDqymrkjjfNAvv4yTmuleB9T60K8dZ+9eQ02UScmMuP61b7elPtY2G2TQvhqCsAHc8mn2uwxv9KjkjQXHwTMFWkZyKlWX843xioJgkAUch7ABJOfemxwa2UhGiZbkjjO1PhwDvUO08QcGiw8NPOautIuug1T4xzgUw3KAJBxxmhFvZyN8HbNMB8JONW/pQnSRGeTiSypGrj/8ANTfjEp2GxoESTnc7nalKkHQd+BQjK3RH3lZ//9k=" - } - ] -} \ No newline at end of file diff --git a/samples/contrib/pytorch-samples/contributing.md b/samples/contrib/pytorch-samples/contributing.md deleted file mode 100644 index 89d3338292c..00000000000 --- a/samples/contrib/pytorch-samples/contributing.md +++ /dev/null @@ -1,36 +0,0 @@ -# Contributing - -## Adding New Example - -### Steps for adding new examples: - -1. Create new example - - 1. Copy the new example to `pipelines/samples/contrib/pytorch-samples/` (Ex: `pipelines/samples/contrib/pytorch-samples/iris`) - 2. Add yaml files for all components in the pipeline - 3. Create a pipeline file (Ex: `pipelines/samples/contrib/pytorch-samples/iris/pipeline.py`) - -2. Build image and compile pipeline - - ```./build.sh ``` - - For example: - - ```./build.sh cifar10 johnsmith``` - - The following actions are done in the build script - - 1. Bundling the code changes into a docker image - 2. Pushing the docker image to dockerhub - 3. Changing image tag in component.yaml - 4. Run `pipeline.py` file to generate yaml file which can be used to invoke the pipeline. - -3. Upload pipeline and create a run - - 1. Create an Experiment from `kubeflow dashboard -> Experiments -> Create Experiment`. - 2. Upload the generated pipeline to `kubeflow dashboard -> Pipelines -> Upload Pipeline` - 3. Create run from `kubeflow dashboard -> Pipelines -> {Pipeline Name} -> Create Run` - - **Pipeline params can be added while creating a run.** - - Refer: [Kubeflow Pipelines Quickstart](https://www.kubeflow.org/docs/components/pipelines/pipelines-quickstart/) diff --git a/samples/contrib/pytorch-samples/gen_image_timestamp.py b/samples/contrib/pytorch-samples/gen_image_timestamp.py deleted file mode 100644 index 59212489adc..00000000000 --- a/samples/contrib/pytorch-samples/gen_image_timestamp.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utility to generate timestamp""" -from datetime import datetime - -dateTimeObj = datetime.now() -timestampStr = dateTimeObj.strftime("%d-%m-%Y-%H-%M-%S.%f") -print(timestampStr, end="") diff --git a/samples/contrib/pytorch-samples/image.png b/samples/contrib/pytorch-samples/image.png deleted file mode 100644 index 91a0251b81e..00000000000 Binary files a/samples/contrib/pytorch-samples/image.png and /dev/null differ diff --git a/samples/contrib/pytorch-samples/install-dependencies.sh b/samples/contrib/pytorch-samples/install-dependencies.sh deleted file mode 100755 index 9b91d651f4c..00000000000 --- a/samples/contrib/pytorch-samples/install-dependencies.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -# Install Yarn -conda install -c conda-forge yarn -y - -# Install Pip Packages -pip install captum torchvision matplotlib pillow pytorch-lightning ipywidgets minio - -pip install Werkzeug==2.0.0 flask flask-compress - -# Install Jupyter Notebook Widgets -jupyter nbextension install --py --symlink --sys-prefix captum.insights.attr_vis.widget - -# Enable Jupyter Notebook Extensions -jupyter nbextension enable --py widgetsnbextension -jupyter nbextension enable captum.insights.attr_vis.widget --py --sys-prefix diff --git a/samples/contrib/pytorch-samples/local_build.md b/samples/contrib/pytorch-samples/local_build.md deleted file mode 100644 index 4333f6aea9f..00000000000 --- a/samples/contrib/pytorch-samples/local_build.md +++ /dev/null @@ -1,46 +0,0 @@ -# Compiling and Running the pipeline by uploading to Kubeflow Pipelines - -This covers instructions on building the Bert and Cifar10 example pipelines locally using KFP sdk and uploading the file to Kubeflow Pipeline and starting a run out of that pipeline. - -## Prerequisites - -[KFP Python SDK](https://github.com/kubeflow/pipelines/tree/master/sdk/python) - -### Generating component.yaml from templates - -Follow the readme file for generating component.yaml files using templates - -[generate component.yaml from templates](utils/template-generation.md) - -### Building the pipeline - -Run the below commands for building pipeline for the existing Cifar 10 and Bert examples - -`python cifar10/pipeline.py` - -or - -`python bert/pipeline.py` - -The output of the above script will generate a yaml file which can be uploaded to KFP for invoking a run. - -### Uploading and invoking a run - - 1. Create an Experiment from `kubeflow dashboard -> Experiments -> Create Experiment`. - 2. Upload the generated pipeline to `kubeflow dashboard -> Pipelines -> Upload Pipeline` - 3. Create run from `kubeflow dashboard -> Pipelines -> {Pipeline Name} -> Create Run` - - **Pipeline params can be added while creating a run.** - - Refer: [Kubeflow Pipelines Quickstart](https://www.kubeflow.org/docs/components/pipelines/pipelines-quickstart/) - - 4. Click on the visualization tab, select the custom tensorboard image from the dropdown (examples screenshot shown below) and click `Start Tensorboard`. Tensoboard UI will be loaded with the run details. - - ![](screenshots/tensorboard.png) - - -**For testing any code changes or adding new examples, use the build script** - - Refer: [Creating New examples](README.md##Adding-new-example) - - diff --git a/samples/contrib/pytorch-samples/mino-secret.yaml b/samples/contrib/pytorch-samples/mino-secret.yaml deleted file mode 100644 index a0bc2abed7f..00000000000 --- a/samples/contrib/pytorch-samples/mino-secret.yaml +++ /dev/null @@ -1,20 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: mysecret - annotations: - serving.kserve.io/s3-endpoint: minio-service.kubeflow:9000 # replace with your s3 endpoint - serving.kserve.io/s3-usehttps: "0" # by default 1, for testing with minio you need to set to 0 - serving.kserve.io/s3-region: "minio" # replace with the region the bucket is created in - serving.kserve.io/s3-useanoncredential: "false" # omitting this is the same as false, if true will ignore credential provided and use anonymous credentials -type: Opaque -data: - AWS_ACCESS_KEY_ID: # replace with your base64 encoded minio credential - AWS_SECRET_ACCESS_KEY: # replace with your base64 encoded minio credential ---- -apiVersion: v1 -kind: ServiceAccount -metadata: - name: sa -secrets: - - name: mysecret diff --git a/samples/contrib/pytorch-samples/package.json b/samples/contrib/pytorch-samples/package.json deleted file mode 100644 index 14be688cc98..00000000000 --- a/samples/contrib/pytorch-samples/package.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "name": "pytorch-samples", - "version": "1.0.0", - "description": "This folder contains different PyTorch Kubeflow pipeline examples using the PyTorch KFP Components SDK.", - "main": "index.js", - "dependencies": { - "npm": "^8.11.0", - "yarn": "^1.22.10" - }, - "devDependencies": {}, - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [], - "author": "", - "license": "ISC" -} diff --git a/samples/contrib/pytorch-samples/prerequisites.md b/samples/contrib/pytorch-samples/prerequisites.md deleted file mode 100644 index 89fe66cca4f..00000000000 --- a/samples/contrib/pytorch-samples/prerequisites.md +++ /dev/null @@ -1,65 +0,0 @@ -# Prerequisites - -## Below are the prerequisites to be satisfied for running the samples. - -For running the samples you will need a cluster with Kubeflow 1.4.xxx (or later) installed, -refer https://github.com/kubeflow/manifests for details. - -### Add Minio secret for KServe - -Apply below secret and service account for KServe to access minio server - -minio-secret.yaml - -```yaml -apiVersion: v1 -kind: Secret -metadata: - name: mysecret - annotations: - serving.kserve.io/s3-endpoint: minio-service.kubeflow:9000 # replace with your s3 endpoint - serving.kserve.io/s3-usehttps: "0" # by default 1, for testing with minio you need to set to 0 - serving.kserve.io/s3-region: "minio" # replace with the region the bucket is created in - serving.kserve.io/s3-useanoncredential: "false" # omitting this is the same as false, if true will ignore credential provided and use anonymous credentials -type: Opaque -data: - AWS_ACCESS_KEY_ID: # replace with your base64 encoded minio credential - AWS_SECRET_ACCESS_KEY: # replace with your base64 encoded minio credential ---- -apiVersion: v1 -kind: ServiceAccount -metadata: - name: sa -secrets: - - name: mysecret -``` - -Run the following command to set the secrets - -```Kubectl apply -f minio-secret.yaml -n kubeflow-user-example-com``` - -### Disable sidecar injection - -Run the following command to disable sidecar injection - -```kubectl label namespace kubeflow-user-example-com istio-injection=disabled --overwrite``` - -## Migrate to KServe 0.8.0 - -Refer: https://kserve.github.io/website/admin/migration/#migrating-from-kubeflow-based-kfserving - -Note: Install KServe 0.8.0 - -### Modify KServe predictor image - -Edit inferenceservice-config configmap - -```kubectl edit cm inferenceservice-config -n kubeflow``` - -Update the following keys under `predictors -> pytorch` block - -```yaml -"image": "pytorch/torchserve-kfs", -"defaultImageVersion": "0.5.1", -"defaultGpuImageVersion": "0.5.1-gpu", -``` diff --git a/samples/contrib/pytorch-samples/requirements.txt b/samples/contrib/pytorch-samples/requirements.txt deleted file mode 100644 index b7f5551d3e2..00000000000 --- a/samples/contrib/pytorch-samples/requirements.txt +++ /dev/null @@ -1,17 +0,0 @@ -boto3 -image -matplotlib -pyarrow -sklearn -transformers -torchdata -webdataset -pandas -s3fs -wget -torch-model-archiver -minio -kfp -tensorboard -torchmetrics -pytorch-lightning diff --git a/samples/contrib/pytorch-samples/screenshots/ax-best-parameters.png b/samples/contrib/pytorch-samples/screenshots/ax-best-parameters.png deleted file mode 100644 index 78cd0285fc0..00000000000 Binary files a/samples/contrib/pytorch-samples/screenshots/ax-best-parameters.png and /dev/null differ diff --git a/samples/contrib/pytorch-samples/screenshots/ax-complete-trials.png b/samples/contrib/pytorch-samples/screenshots/ax-complete-trials.png deleted file mode 100644 index f230fa7b8ab..00000000000 Binary files a/samples/contrib/pytorch-samples/screenshots/ax-complete-trials.png and /dev/null differ diff --git a/samples/contrib/pytorch-samples/screenshots/ax-hpo-pipeline.png b/samples/contrib/pytorch-samples/screenshots/ax-hpo-pipeline.png deleted file mode 100644 index d8b6e585b06..00000000000 Binary files a/samples/contrib/pytorch-samples/screenshots/ax-hpo-pipeline.png and /dev/null differ diff --git a/samples/contrib/pytorch-samples/screenshots/tensorboard.png b/samples/contrib/pytorch-samples/screenshots/tensorboard.png deleted file mode 100644 index 886269e403d..00000000000 Binary files a/samples/contrib/pytorch-samples/screenshots/tensorboard.png and /dev/null differ diff --git a/samples/contrib/pytorch-samples/utils/generate_templates.py b/samples/contrib/pytorch-samples/utils/generate_templates.py deleted file mode 100644 index 82a503b3ef9..00000000000 --- a/samples/contrib/pytorch-samples/utils/generate_templates.py +++ /dev/null @@ -1,120 +0,0 @@ -#!/usr/bin/env/python3 -# Copyright (c) Facebook, Inc. and its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Generate component.yaml from templates""" -import json -import os -import shutil -import sys - -import yaml - -CURRENT_FOLDER = os.path.dirname(os.path.abspath(__file__)) - -PIPELINES_HOME = os.path.join(CURRENT_FOLDER.split("pipelines")[0], "pipelines") - -TEMPLATE_PATH = os.path.join( - PIPELINES_HOME, "components/PyTorch/pytorch-kfp-components/templates" -) - -OUTPUT_YAML_FOLDER = "yaml" - - -def create_output_folder(): - """Removes the `yaml` folder and recreates it""" - if os.path.exists(OUTPUT_YAML_FOLDER): - shutil.rmtree(OUTPUT_YAML_FOLDER) - - os.mkdir(OUTPUT_YAML_FOLDER) - - -def get_templates_list(): - """Get the list of template files from `templates` directory""" - assert os.path.exists(TEMPLATE_PATH) - templates_list = os.listdir(TEMPLATE_PATH) - return templates_list - - -def read_template(template_path: str): - """Read the `componanent.yaml` template""" - with open(template_path, "r") as stream: - try: - template_dict = yaml.safe_load(stream) - except yaml.YAMLError as exc: - print(exc) - - return template_dict - - -def replace_keys_in_template(template_dict: dict, mapping: dict): - """Replace the keys, values in `component.yaml` based on `mapping` dict""" - - # Sample mapping will be as below - # { "implementation.container.image" : "image_name" } - for nested_key, value in mapping.items(): - - # parse through each nested key - - keys = nested_key.split(".") - accessable = template_dict - for k in keys[:-1]: - accessable = accessable[k] - accessable[keys[-1]] = value - - return template_dict - - -def write_to_yaml_file(template_dict: dict, yaml_path: str): - """Write yaml output into file""" - with open(yaml_path, "w") as pointer: - yaml.dump(template_dict, pointer) - - -def generate_component_yaml(mapping_template_path: str): - """Method to generate component.yaml based on the template""" - mapping: dict = {} - if os.path.exists(mapping_template_path): - with open(mapping_template_path) as pointer: - mapping = json.load(pointer) - create_output_folder() - template_list = get_templates_list() - - for template_name in template_list: - print("Processing {}".format(template_name)) - - # if the template name is not present in the mapping dictionary - # There is no change in the template, we can simply copy the template - # into the output yaml folder. - src = os.path.join(TEMPLATE_PATH, template_name) - dest = os.path.join(OUTPUT_YAML_FOLDER, template_name) - if not mapping or template_name not in mapping: - shutil.copy(src, dest) - else: - # if the mapping is specified, replace the key value pairs - # and then save the file - template_dict = read_template(template_path=src) - template_dict = replace_keys_in_template( - template_dict=template_dict, mapping=mapping[template_name] - ) - write_to_yaml_file(template_dict=template_dict, yaml_path=dest) - - -if __name__ == "__main__": - if len(sys.argv) != 2: - raise Exception( - "\n\nUsage: " - "python utils/generate_templates.py " - "cifar10/template_mapping.json\n\n" - ) - input_template_path = sys.argv[1] - generate_component_yaml(mapping_template_path=input_template_path) diff --git a/samples/contrib/pytorch-samples/utils/template-generation.md b/samples/contrib/pytorch-samples/utils/template-generation.md deleted file mode 100644 index 7def111fb59..00000000000 --- a/samples/contrib/pytorch-samples/utils/template-generation.md +++ /dev/null @@ -1,47 +0,0 @@ -## PyTorch component.yaml templates - -The templates for generating `component.yaml` are placed under `components/PyTorch/templates`. - -[Link to component.yaml templates](../../../../components/PyTorch/templates) - - -## component.yaml generation - -There are two different ways to generate `component.yaml` files for all the components. - -### 1. Generate templates using utility - -The following utility has been created to generate templates dynamically during runtime. -If there are no mapping scecified, the utility copies the templates into output directory (`yaml` folder) -If the mappings are specified, the utility replaces the key, value pairs and generates `component.yaml` into `yaml` folder. - -Run the following command to generate the templates - -`python utils/generate_templates.py cifar10/template_mapping.json` - -Sample mapping file is shown as below - -``` -{ - "minio_component.yaml": { - "implementation.container.image": "public.ecr.aws/pytorch-samples/kfp_samples:latest - } - -} -``` - -The above mentioned mapping will replace the image name in `minio_component.yaml` - - -### 2. Manually editing the templates - -When there are more changes in the templates and new key value pairs has to be introduced, -it would be easier to manually edit the templates placed under `components/PyTorch/templates` - -Once the templates are manually edited, simply run the template generation script. - -`python utils/generate_templates.py cifar10/template_mapping.json` - -The script will copy over the edited templates to output folder name `yaml` under `pytorch-samples` - - \ No newline at end of file diff --git a/samples/contrib/pytorch-samples/viz1.png b/samples/contrib/pytorch-samples/viz1.png deleted file mode 100644 index 8833d9a0140..00000000000 Binary files a/samples/contrib/pytorch-samples/viz1.png and /dev/null differ diff --git a/samples/contrib/seldon/README.md b/samples/contrib/seldon/README.md deleted file mode 100644 index 03863619163..00000000000 --- a/samples/contrib/seldon/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Seldon Samples - -[Seldon](https://github.com/SeldonIO/seldon-core) is a model serving solution that supports multiple deployment strategies and provides out of the box [observability](https://docs.seldon.io/projects/seldon-core/en/latest/analytics/analytics.html). These examples are an overview of how seldon can be used with pipelines: - -- iris_storagebucket.py is the simplest case and best place to start. It shows how seldon can [serve a packaged model from a storage bucket URI](https://docs.seldon.io/projects/seldon-core/en/latest/servers/overview.html). -- mabdeploy_seldon.py shows how components can be assembled into [inference graphs](https://docs.seldon.io/projects/seldon-core/en/latest/analytics/routers.html) such as [multi-armed bandits](https://docs.seldon.io/projects/seldon-core/en/latest/examples/helm_examples.html#Serve-Multi-Armed-Bandit). -- mnist_tf.py shows how seldon can be used with [custom-built serving images](https://docs.seldon.io/projects/seldon-core/en/latest/workflow/README.html) where a model is baked into the docker image. -- mnist_tf_volume.py shows a model being stored in a volume and served using a serving image. - -See the seldon docs for other uses such as [autoscaling](https://docs.seldon.io/projects/seldon-core/en/latest/examples/autoscaling_example.html), [canaries](https://docs.seldon.io/projects/seldon-core/en/latest/examples/istio_canary.html) and [tracing](https://docs.seldon.io/projects/seldon-core/en/latest/examples/tmpl_model_tracing.html) \ No newline at end of file diff --git a/samples/contrib/seldon/iris_storagebucket.py b/samples/contrib/seldon/iris_storagebucket.py deleted file mode 100644 index 143777250ce..00000000000 --- a/samples/contrib/seldon/iris_storagebucket.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import json -import kfp.dsl as dsl -from string import Template - -@dsl.pipeline( - name="Deploy example SKLearn Iris", - description="SKLearn Iris simple deployment example" -) -def iris_storagebucket(bucket='gs://seldon-models/sklearn/iris'): - -#simple serving of an iris sklearn model based on https://docs.seldon.io/projects/seldon-core/en/latest/servers/overview.html -#requires seldon 0.3.2 or higher - sklearnjson_template = Template(""" -{ - "apiVersion": "machinelearning.seldon.io/v1alpha2", - "kind": "SeldonDeployment", - "metadata": { - "name": "sklearn" - }, - "spec": { - "name": "iris", - "predictors": [ - { - "graph": { - "children": [], - "implementation": "SKLEARN_SERVER", - "modelUri": "$bucket", - "name": "classifier" - }, - "name": "default", - "replicas": 1 - } - ] - } -} -""") - - sklearnjson = sklearnjson_template.substitute({ 'bucket': str(bucket)}) - - sklearndeployment = json.loads(sklearnjson) - - deploy = dsl.ResourceOp( - name="deploy", - k8s_resource=sklearndeployment, - action="apply", - success_condition='status.state == Available' - ) - - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(iris_storagebucket, __file__ + ".tar.gz") diff --git a/samples/contrib/seldon/mabdeploy_seldon.py b/samples/contrib/seldon/mabdeploy_seldon.py deleted file mode 100644 index e0367f6792f..00000000000 --- a/samples/contrib/seldon/mabdeploy_seldon.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import json -import kfp.dsl as dsl -from string import Template - -@dsl.pipeline( - name="Deploy example MAB", - description="Multi-armed bandit example" -) -def mabdeploy_seldon(docker_image1='seldonio/mock_classifier:1.0', - docker_image2='seldonio/mock_classifier:1.0', - mab_router_image='seldonio/mab_epsilon_greedy:1.1'): - -#serve two models load balanced as bandit as per https://github.com/SeldonIO/seldon-core/blob/master/notebooks/helm_examples.ipynb -#in this example no volumes or buckets required as the models are baked into images -#seldon can also be used with volumes - see seldon tf mnist example - mabjson_template = Template(""" -{ - "apiVersion": "machinelearning.seldon.io/v1alpha2", - "kind": "SeldonDeployment", - "metadata": { - "labels": { - "app": "seldon" - }, - "name": "mnist-classifier-mab" - }, - "spec": { - "name": "mnist-classifier-mab", - "predictors": [ - { - "name": "abtest", - "replicas": 1, - "componentSpecs": [{ - "spec": { - "containers": [ - { - "image": "$image1", - "imagePullPolicy": "IfNotPresent", - "name": "classifier-1", - "resources": { - "requests": { - "memory": "1Mi" - } - } - }], - "terminationGracePeriodSeconds": 20 - }}, - { - "metadata":{ - "labels":{ - "version":"v2" - } - }, - "spec":{ - "containers":[ - { - "image": "$image2", - "imagePullPolicy": "IfNotPresent", - "name": "classifier-2", - "resources": { - "requests": { - "memory": "1Mi" - } - } - } - ], - "terminationGracePeriodSeconds": 20 - } - }, - { - "spec":{ - "containers": [{ - "image": "$router", - "name": "eg-router" - }], - "terminationGracePeriodSeconds": 20 - }} - ], - "graph": { - "name": "eg-router", - "type":"ROUTER", - "parameters": [ - { - "name": "n_branches", - "value": "2", - "type": "INT" - }, - { - "name": "epsilon", - "value": "0.2", - "type": "FLOAT" - }, - { - "name": "verbose", - "value": "1", - "type": "BOOL" - } - ], - "children": [ - { - "name": "classifier-1", - "endpoint":{ - "type":"REST" - }, - "type":"MODEL", - "children":[] - }, - { - "name": "classifier-2", - "endpoint":{ - "type":"REST" - }, - "type":"MODEL", - "children":[] - } - ] - } - } - ] - } -} -""") - - mabjson = mabjson_template.substitute({ 'image1': str(docker_image1),'image2': str(docker_image2),'router': str(mab_router_image)}) - - mabdeployment = json.loads(mabjson) - - deploy = dsl.ResourceOp( - name="deploy", - k8s_resource=mabdeployment, - action="apply", - success_condition='status.state == Available' - ) - - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(mabdeploy_seldon, __file__ + ".tar.gz") diff --git a/samples/contrib/seldon/mnist_tf.py b/samples/contrib/seldon/mnist_tf.py deleted file mode 100644 index ae9a8fbbd6c..00000000000 --- a/samples/contrib/seldon/mnist_tf.py +++ /dev/null @@ -1,234 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kubernetes import client as k8s_client -import kfp.dsl as dsl -import json -from string import Template - -@dsl.pipeline( - name="Seldon MNIST TF", - description="Example of training and serving seldon MNIST TF model. Requires docker secret as per kubeflow/example-seldon. Simpler version is mnist_tf_nopush.py" -) - -#Example derived from https://github.com/kubeflow/example-seldon -#This example is TF but R and SKLearn flows are similar - see kubeflow/example-seldon -#push access needed to chosen docker repo - see note below on secret -#requires seldon v0.3.0 or higher -def mnist_tf(docker_secret='docker-config', - training_repo='https://github.com/kubeflow/example-seldon.git', - training_branch='master', - training_files='./example-seldon/models/tf_mnist/train/*', - docker_repo_training='seldonio/deepmnistclassifier_trainer', - docker_tag_training='0.3', - serving_repo='https://github.com/kubeflow/example-seldon.git', - serving_branch='master', - serving_files='./example-seldon/models/tf_mnist/runtime/*', - docker_repo_serving='seldonio/deepmnistclassifier_runtime', - docker_tag_serving='0.3'): - -#will be pushing image so need docker secret -#create from local with `kubectl create secret generic docker-config --from-file=config.json=${DOCKERHOME}/config.json --type=kubernetes.io/config` - secret = k8s_client.V1Volume( - name="docker-config-secret", - secret=k8s_client.V1SecretVolumeSource(secret_name=docker_secret) - ) - -#use volume for storing model - modelvolop = dsl.VolumeOp( - name="modelpvc", - resource_name="modelpvc", - size="50Mi", - modes=dsl.VOLUME_MODE_RWO - ) -#and another as working directory between steps - wkdirop = dsl.VolumeOp( - name="wkdirpvc", - resource_name="wkdirpvc", - size="50Mi", - modes=dsl.VOLUME_MODE_RWO - ) - -#clone the training code and move to workspace dir as kaniko (next step) expects that - clone = dsl.ContainerOp( - name="clone", - image="alpine/git:latest", - command=["sh", "-c"], - arguments=["git clone --depth 1 --branch "+str(training_branch)+" "+str(training_repo)+"; cp "+str(training_files)+" /workspace; ls /workspace/;"], - pvolumes={"/workspace": wkdirop.volume} - ) - -#build and push image for training - build = dsl.ContainerOp( - name="build", - image="gcr.io/kaniko-project/executor:latest", - arguments=["--dockerfile","Dockerfile","--destination",str(docker_repo_training)+":"+str(docker_tag_training)], - pvolumes={"/workspace": clone.pvolume,"/root/.docker/": secret} - ) - - tfjobjson_template = Template(""" -{ - "apiVersion": "kubeflow.org/v1beta1", - "kind": "TFJob", - "metadata": { - "name": "mnist-train-{{workflow.uid}}", - "ownerReferences": [ - { - "apiVersion": "argoproj.io/v1alpha1", - "kind": "Workflow", - "controller": true, - "name": "{{workflow.name}}", - "uid": "{{workflow.uid}}" - } - ] - }, - "spec": { - "tfReplicaSpecs": { - "Worker": { - "replicas": 1, - "template": { - "spec": { - "containers": [ - { - "image": "$dockerrepotraining:$dockertagtraining", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "persistent-storage" - } - ] - } - ], - "restartPolicy": "OnFailure", - "volumes": [ - { - "name": "persistent-storage", - "persistentVolumeClaim": { - "claimName": "$modelpvc" - } - } - ] - } - } - } - } - } -} -""") - - tfjobjson = tfjobjson_template.substitute({ 'dockerrepotraining': str(docker_repo_training),'dockertagtraining': str(docker_tag_training),'modelpvc': modelvolop.outputs["name"]}) - - tfjob = json.loads(tfjobjson) - - train = dsl.ResourceOp( - name="train", - k8s_resource=tfjob, - success_condition='status.replicaStatuses.Worker.succeeded == 1' - ).after(build) - -#prepare the serving code - clone_serving = dsl.ContainerOp( - name="clone_serving", - image="alpine/git:latest", - command=["sh", "-c"], - arguments=["rm -rf /workspace/*; git clone --depth 1 --branch "+str(serving_branch)+" "+str(serving_repo)+"; cp "+str(serving_files)+" /workspace; ls /workspace/;"], - pvolumes={"/workspace": wkdirop.volume} - ).after(train) - - build_serving = dsl.ContainerOp( - name="build_serving", - image="gcr.io/kaniko-project/executor:latest", - arguments=["--dockerfile","Dockerfile","--destination",str(docker_repo_serving)+":"+str(docker_tag_serving)], - pvolumes={"/workspace": clone_serving.pvolume,"/root/.docker/": secret} - ).after(clone_serving) - - seldon_serving_json_template = Template(""" -{ - "apiVersion": "machinelearning.seldon.io/v1alpha2", - "kind": "SeldonDeployment", - "metadata": { - "labels": { - "app": "seldon" - }, - "name": "mnist-classifier" - }, - "spec": { - "annotations": { - "deployment_version": "v1", - "project_name": "MNIST Example" - }, - "name": "mnist-classifier", - "predictors": [ - { - "annotations": { - "predictor_version": "v1" - }, - "componentSpecs": [ - { - "spec": { - "containers": [ - { - "image": "$dockerreposerving:$dockertagserving", - "imagePullPolicy": "Always", - "name": "mnist-classifier", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "persistent-storage" - } - ] - } - ], - "terminationGracePeriodSeconds": 1, - "volumes": [ - { - "name": "persistent-storage", - "persistentVolumeClaim": { - "claimName": "$modelpvc" - } - } - ] - } - } - ], - "graph": { - "children": [], - "endpoint": { - "type": "REST" - }, - "name": "mnist-classifier", - "type": "MODEL" - }, - "name": "mnist-classifier", - "replicas": 1 - } - ] - } -} -""") - seldon_serving_json = seldon_serving_json_template.substitute({ 'dockerreposerving': str(docker_repo_serving),'dockertagserving': str(docker_tag_serving),'modelpvc': modelvolop.outputs["name"]}) - - seldon_deployment = json.loads(seldon_serving_json) - - serve = dsl.ResourceOp( - name='serve', - k8s_resource=seldon_deployment, - success_condition='status.state == Available' - ).after(build_serving) - - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(mnist_tf, __file__ + ".tar.gz") diff --git a/samples/contrib/seldon/mnist_tf_volume.py b/samples/contrib/seldon/mnist_tf_volume.py deleted file mode 100644 index 755b020ad5c..00000000000 --- a/samples/contrib/seldon/mnist_tf_volume.py +++ /dev/null @@ -1,179 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kubernetes import client as k8s_client -import kfp.dsl as dsl -import json -from string import Template - -@dsl.pipeline( - name="Seldon MNIST TF", - description="Example of training and serving seldon MNIST TF model. Like kubeflow/example-seldon but using existing images." -) - -def mnist_tf_volume(docker_repo_training='seldonio/deepmnistclassifier_trainer', - docker_tag_training='0.3', - docker_repo_serving='seldonio/deepmnistclassifier_runtime', - docker_tag_serving='0.3'): - -#use volume for storing model -#here model is saved and mounted into pre-defined image for serving -#alternatively model can be baked into image - for that see mabdeploy-seldon.py -#requires seldon v0.3.0 or higher - modelvolop = dsl.VolumeOp( - name="modelpvc", - resource_name="modelpvc", - size="50Mi", - modes=dsl.VOLUME_MODE_RWO - ) - - tfjobjson_template = Template(""" -{ - "apiVersion": "kubeflow.org/v1beta1", - "kind": "TFJob", - "metadata": { - "name": "mnist-train-{{workflow.uid}}", - "ownerReferences": [ - { - "apiVersion": "argoproj.io/v1alpha1", - "kind": "Workflow", - "controller": true, - "name": "{{workflow.name}}", - "uid": "{{workflow.uid}}" - } - ] - }, - "spec": { - "tfReplicaSpecs": { - "Worker": { - "replicas": 1, - "template": { - "spec": { - "containers": [ - { - "image": "$dockerrepotraining:$dockertagtraining", - "name": "tensorflow", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "persistent-storage" - } - ] - } - ], - "restartPolicy": "OnFailure", - "volumes": [ - { - "name": "persistent-storage", - "persistentVolumeClaim": { - "claimName": "$modelpvc" - } - } - ] - } - } - } - } - } -} -""") - - tfjobjson = tfjobjson_template.substitute({ 'dockerrepotraining': str(docker_repo_training),'dockertagtraining': str(docker_tag_training),'modelpvc': modelvolop.outputs["name"]}) - - tfjob = json.loads(tfjobjson) - - train = dsl.ResourceOp( - name="train", - k8s_resource=tfjob, - success_condition='status.replicaStatuses.Worker.succeeded == 1' - ) - - seldon_serving_json_template = Template(""" -{ - "apiVersion": "machinelearning.seldon.io/v1alpha2", - "kind": "SeldonDeployment", - "metadata": { - "labels": { - "app": "seldon" - }, - "name": "mnist-classifier" - }, - "spec": { - "annotations": { - "deployment_version": "v1", - "project_name": "MNIST Example" - }, - "name": "mnist-classifier", - "predictors": [ - { - "annotations": { - "predictor_version": "v1" - }, - "componentSpecs": [ - { - "spec": { - "containers": [ - { - "image": "$dockerreposerving:$dockertagserving", - "imagePullPolicy": "Always", - "name": "mnist-classifier", - "volumeMounts": [ - { - "mountPath": "/data", - "name": "persistent-storage" - } - ] - } - ], - "terminationGracePeriodSeconds": 1, - "volumes": [ - { - "name": "persistent-storage", - "persistentVolumeClaim": { - "claimName": "$modelpvc" - } - } - ] - } - } - ], - "graph": { - "children": [], - "endpoint": { - "type": "REST" - }, - "name": "mnist-classifier", - "type": "MODEL" - }, - "name": "mnist-classifier", - "replicas": 1 - } - ] - } -} -""") - seldon_serving_json = seldon_serving_json_template.substitute({ 'dockerreposerving': str(docker_repo_serving),'dockertagserving': str(docker_tag_serving),'modelpvc': modelvolop.outputs["name"]}) - - seldon_deployment = json.loads(seldon_serving_json) - - serve = dsl.ResourceOp( - name='serve', - k8s_resource=seldon_deployment, - success_condition='status.state == Available' - ).after(train) - - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(mnist_tf_volume, __file__ + ".tar.gz") diff --git a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/README.md b/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/README.md deleted file mode 100644 index 3953de678f7..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# Hello World CI Sample - -## Overview - -This sample uses cloudbuild to implement the continuous integration process of a simple pipeline that outputs "hello world" to the console. Once all set up, you can push your code to github repo, then the build process in cloud build will be triggered automatically, then a run will be created in kubeflow pipeline. You can view your pipeline and the run in kubeflow pipelines. - -Besides, we use **REST API** to call kubeflow pipeline to create a new version and a run in this sample. Other methods to create pipeline version can be found in mnist sample in this repo, i.e., use Kubeflow Pipeline SDK. - -## Usage - -To use this pipeline, you need to: - -* Set up a trigger in cloud build that connects to your github repo. -* Replace the constants to your own configuration in cloudbuild.yaml -* Replace images in the pipeline.py to your own images (the ones you built in cloudbuild.yaml) -* Set your cloud registry public accessible -* Set your bucket public accessible, or authenticate cloudbuild to cloud storage \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/cloudbuild.yaml b/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/cloudbuild.yaml deleted file mode 100644 index 7a0c7b503c3..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/cloudbuild.yaml +++ /dev/null @@ -1,59 +0,0 @@ -steps: - - name: "gcr.io/cloud-builders/docker" - args: - [ - "build", - "-t", - "${_GCR_PATH}/helloworld-ci:$COMMIT_SHA", - "-t", - "${_GCR_PATH}/helloworld-ci:latest", - "--cache-from", - "${_GCR_PATH}/helloworld-ci:latest", - "${_CODE_PATH}/helloworld", - ] - id: "BuildImages" - - name: "python:3.7-slim" - entrypoint: "/bin/sh" - args: [ - "-c", - "cd ${_CODE_PATH}; - pip3 install cffi==1.12.3 --upgrade; - pip3 install kfp; - sed -i s/image: helloworld-ci/image: ${_GCR_PATH}/helloworld-ci:$COMMIT_SHA/g component.yaml; - python pipeline.py; - cp pipeline.py.zip /workspace/pipeline.zip", - ] - id: "PackagePipeline" - - - name: "gcr.io/cloud-builders/gsutil" - args: - [ - "cp", - "/workspace/pipeline.zip", - "${_GS_BUCKET}/$COMMIT_SHA/pipeline.zip", - ] - id: "UploadPipeline" - waitFor: ["PackagePipeline"] - - - - name: "gcr.io/cloud-builders/curl" - entrypoint: "/bin/sh" - args: - [ - "-c", - "curl.bash $COMMIT_SHA ${_PIPELINE_ID} ${_GS_BUCKET} ${_PIPELINE_ENDPOINT}" - ] - id: "CreatePipelineVersionAndRun" - -images: - - "${_GCR_PATH}/helloworld-ci:$COMMIT_SHA" - - "${_GCR_PATH}/helloworld-ci:latest" - -substitutions: - _GCR_PATH: [Your cloud registry path. For example, gcr.io/myproject] - _CODE_PATH: /workspace/samples/contrib/versioned-pipeline-ci-samples - _NAMESPACE: kubeflow - _PIPELINE_ID: [Your kubeflow pipeline id to create a version on. Get it from Kubeflow Pipeline UI.] - _GS_BUCKET: [Name of your cloud storage bucket. For example, 'gs://my-bucket'] - _PIPELINE_ENDPOINT: [Your exposed pipeline endpoint 'ml-pipeline'. You can expose it in k8s workloads on gcp. - For example: https://0.0.0.1:8888] diff --git a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/component.yaml b/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/component.yaml deleted file mode 100644 index c7a84c42c4b..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/component.yaml +++ /dev/null @@ -1,5 +0,0 @@ -name: helloworld-ci -description: Print "hello world" to console -implementation: - container: - image: helloworld-ci \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/curl.bash b/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/curl.bash deleted file mode 100644 index 2dfba531026..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/curl.bash +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash - -# strip gs:// prefix -bucket_name=$(echo $3 | sed 's/gs:\/\///') -data='{"name":'\""ci-$1"\"', "code_source_url": "https://github.com/kubeflow/pipelines/tree/'"$1"'", "package_url": {"pipeline_url": "https://storage.googleapis.com/'"$bucket_name"'/'"$1"'/pipeline.zip"}, -"resource_references": [{"key": {"id": '\""$2"\"', "type":3}, "relationship":1}]}' - -version=$(curl -H "Content-Type: application/json" -X POST -d "$data" "$4"/apis/v1beta1/pipeline_versions | jq -r ".id") - -# create run -rundata='{"name":'\""$1-run"\"', -"resource_references": [{"key": {"id": '\""$version"\"', "type":4}, "relationship":2}]}' -echo "$rundata" -curl -H "Content-Type: application/json" -X POST -d "$rundata" "$4"/apis/v1beta1/runs \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/helloworld/Dockerfile b/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/helloworld/Dockerfile deleted file mode 100644 index be7c1207814..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/helloworld/Dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM python:3 -COPY helloworld.py . -CMD ["python", "./helloworld.py"] \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/helloworld/helloworld.py b/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/helloworld/helloworld.py deleted file mode 100644 index 59298527481..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/helloworld/helloworld.py +++ /dev/null @@ -1,5 +0,0 @@ -def main(): - print("hello world!") - -if __name__ == "__main__": - main() diff --git a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/pipeline.py b/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/pipeline.py deleted file mode 100644 index af29d34d951..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/helloworld-ci-sample/pipeline.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import kfp -import kfp.dsl as dsl - -@dsl.pipeline( - name='hello world pipeline sample', - description='A simple sample using curl to interact with kfp' -) -def helloworld_ci_pipeline(): - import os - train_op = kfp.components.load_component_from_file('./component.yaml') - train = train_op() - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(helloworld_ci_pipeline, __file__ + '.zip') diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/README.md b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/README.md deleted file mode 100644 index e6702e8917a..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# Kaggle Competition Pipeline Sample - -## Pipeline Overview - -This is a pipeline for [house price prediction](https://www.kaggle.com/c/house-prices-advanced-regression-techniques), an entry-level competition in kaggle. We demonstrate how to complete a kaggle competition by creating a pipeline of steps including downloading data, preprocessing and visualizing data, train model and submitting results to kaggle website. - -* We refer to [the notebook by Raj Kumar Gupta](https://www.kaggle.com/rajgupta5/house-price-prediction) and [the notebook by Sergei Neviadomski](https://www.kaggle.com/neviadomski/how-to-get-to-top-25-with-simple-model-sklearn) in terms of model implementation as well as data visualization. - -* We use [kaggle python api](https://github.com/Kaggle/kaggle-api) to interact with kaggle site, such as downloading data and submiting result. More usage can be found in their documentation. - -* We use [cloud build](https://cloud.google.com/cloud-build/) for CI process. That is, we automatically triggered a build and run as soon as we pushed our code to github repo. You need to setup a trigger on cloud build for your github repo branch to achieve the CI process. - -## Notice -* You can authenticate to gcp services by either: Create a "user-gcp-sa" secret following the troubleshooting parts in [Kubeflow pipeline repo](https://github.com/kubeflow/pipelines/tree/master/manifests/kustomize), or configure workload identity as instructed in [this guide](https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity). This sample uses the first method, but this will soon be deprecated. We would recommend using second method to replace the use of "user-gcp-sa" service account in the future. - -## Usage - -* Substitute the constants in "substitutions" in cloudbuild.yaml -* Fill in your kaggle_username and kaggle_key in Dockerfiles(in the folder "download_dataset" and "submit_result") to authenticate to kaggle. You can get them from an API token created from your kaggle "My Account" page. -* Set up cloud build triggers to your github repo for Continuous Integration -* Replace the CLOUDSDK_COMPUTE_ZONE, CLOUDSDK_CONTAINER_CLUSTER in cloudbuild.yaml with your own zone and cluster -* Enable "Kubernetes Engine Developer" in cloud build setting -* Set your gs bucket public or grant cloud storage access to cloud build and kubeflow pipeline -* Try commit and push it to github repo \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/cloudbuild.yaml b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/cloudbuild.yaml deleted file mode 100644 index 665ce878b2e..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/cloudbuild.yaml +++ /dev/null @@ -1,183 +0,0 @@ -steps: - - name: "gcr.io/cloud-builders/docker" - args: - [ - "build", - "-t", - "${_GCR_PATH}/kaggle_download:$COMMIT_SHA", - "-t", - "${_GCR_PATH}/kaggle_download:latest", - "${_CODE_PATH}/download_dataset", - "-f", - "${_CODE_PATH}/download_dataset/Dockerfile", - ] - id: "BuildDownloadDataImage" - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "push", - "${_GCR_PATH}/kaggle_download:$COMMIT_SHA", - ] - id: "PushDownloadDataImage" - waitFor: ["BuildDownloadDataImage"] - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "build", - "-t", - "${_GCR_PATH}/kaggle_visualize_table:$COMMIT_SHA", - "-t", - "${_GCR_PATH}/kaggle_visualize_table:latest", - "${_CODE_PATH}/visualize_table", - "-f", - "${_CODE_PATH}/visualize_table/Dockerfile", - ] - id: "BuildVisualizeTableImage" - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "push", - "${_GCR_PATH}/kaggle_visualize_table:$COMMIT_SHA", - ] - id: "PushVisualizeTableImage" - waitFor: ["BuildVisualizeTableImage"] - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "build", - "-t", - "${_GCR_PATH}/kaggle_visualize_html:$COMMIT_SHA", - "-t", - "${_GCR_PATH}/kaggle_visualize_html:latest", - "${_CODE_PATH}/visualize_html", - "-f", - "${_CODE_PATH}/visualize_html/Dockerfile", - ] - id: "BuildVisualizeHTMLImage" - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "push", - "${_GCR_PATH}/kaggle_visualize_html:$COMMIT_SHA", - ] - id: "PushVisualizeHTMLImage" - waitFor: ["BuildVisualizeHTMLImage"] - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "build", - "-t", - "${_GCR_PATH}/kaggle_train:$COMMIT_SHA", - "-t", - "${_GCR_PATH}/kaggle_train:latest", - "${_CODE_PATH}/train_model", - "-f", - "${_CODE_PATH}/train_model/Dockerfile", - ] - id: "BuildTrainImage" - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "push", - "${_GCR_PATH}/kaggle_train:$COMMIT_SHA", - ] - id: "PushTrainImage" - waitFor: ["BuildTrainImage"] - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "build", - "-t", - "${_GCR_PATH}/kaggle_submit:$COMMIT_SHA", - "-t", - "${_GCR_PATH}/kaggle_submit:latest", - "${_CODE_PATH}/submit_result", - "-f", - "${_CODE_PATH}/submit_result/Dockerfile", - ] - id: "BuildSubmitImage" - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "push", - "${_GCR_PATH}/kaggle_submit:$COMMIT_SHA", - ] - id: "PushSubmitImage" - waitFor: ["BuildSubmitImage"] - - - name: "python:3.7-slim" - entrypoint: "/bin/sh" - args: [ - "-c", - "set -ex; - cd ${_CODE_PATH}; - pip3 install cffi==1.12.3 --upgrade; - pip3 install kfp==0.1.38; - sed -i 's|image: download_image_location|image: ${_GCR_PATH}/kaggle_download:$COMMIT_SHA|g' ./download_dataset/component.yaml; - sed -i 's|image: visualizetable_image_location|image: ${_GCR_PATH}/kaggle_visualize_table:$COMMIT_SHA|g' ./visualize_table/component.yaml; - sed -i 's|image: visualizehtml_image_location|image: ${_GCR_PATH}/kaggle_visualize_html:$COMMIT_SHA|g' ./visualize_html/component.yaml; - sed -i 's|image: train_image_location|image: ${_GCR_PATH}/kaggle_train:$COMMIT_SHA|g' ./train_model/component.yaml; - sed -i 's|image: submit_image_location|image: ${_GCR_PATH}/kaggle_submit:$COMMIT_SHA|g' ./submit_result/component.yaml; - python pipeline.py - --gcr_address ${_GCR_PATH}; - cp pipeline.py.zip /workspace/pipeline.zip", - ] - id: "KagglePackagePipeline" - - - name: "gcr.io/cloud-builders/gsutil" - args: - [ - "cp", - "/workspace/pipeline.zip", - "${_GS_BUCKET}/$COMMIT_SHA/pipeline.zip" - ] - id: "KaggleUploadPipeline" - waitFor: ["KagglePackagePipeline"] - - - - name: "gcr.io/cloud-builders/kubectl" - entrypoint: "/bin/sh" - args: [ - "-c", - "cd ${_CODE_PATH}; - apt-get update; - apt-get install -y python3-pip; - apt-get install -y libssl-dev libffi-dev; - /builder/kubectl.bash; - pip3 install kfp; - pip3 install kubernetes; - python3 create_pipeline_version_and_run.py - --pipeline_id ${_PIPELINE_ID} - --commit_sha $COMMIT_SHA - --bucket_name ${_GS_BUCKET} - --gcr_address ${_GCR_PATH}" - ] - env: - - "CLOUDSDK_COMPUTE_ZONE=[Your cluster zone, for example: us-central1-a]" - - "CLOUDSDK_CONTAINER_CLUSTER=[Your cluster name, for example: my-cluster]" - id: "KaggleCreatePipelineVersionAndRun" - -images: - - "${_GCR_PATH}/kaggle_download:latest" - - "${_GCR_PATH}/kaggle_visualize_table:latest" - - "${_GCR_PATH}/kaggle_visualize_html:latest" - - "${_GCR_PATH}/kaggle_train:latest" - - "${_GCR_PATH}/kaggle_submit:latest" - - -substitutions: - _CODE_PATH: /workspace/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample - _NAMESPACE: kubeflow - _GCR_PATH: [Your cloud registry path. For example, gcr.io/my-project-id] - _GS_BUCKET: [Name of your cloud storage bucket. For example, gs://my-project-bucket] - _PIPELINE_ID: [Your kubeflow pipeline id to create a version on. Get it from Kubeflow Pipeline UI. - For example, f6f8558a-6eec-4ef4-b343-a650473ee613] \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/create_pipeline_version_and_run.py b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/create_pipeline_version_and_run.py deleted file mode 100644 index 7ec6fd8fba7..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/create_pipeline_version_and_run.py +++ /dev/null @@ -1,47 +0,0 @@ -import kfp -import argparse - -parser = argparse.ArgumentParser() -parser.add_argument('--commit_sha', help='Required. Commit SHA, for version name. Must be unique.', type=str) -parser.add_argument('--pipeline_id', help = 'Required. pipeline id',type=str) -parser.add_argument('--bucket_name', help='Required. gs bucket to store files', type=str) -parser.add_argument('--gcr_address', help='Required. Cloud registry address. For example, gcr.io/my-project', type=str) -parser.add_argument('--host', help='Host address of kfp.Client. Will be get from cluster automatically', type=str, default='') -parser.add_argument('--run_name', help='name of the new run.', type=str, default='') -parser.add_argument('--experiment_id', help = 'experiment id',type=str) -parser.add_argument('--code_source_url', help = 'url of source code', type=str, default='') -args = parser.parse_args() - -if args.host: - client = kfp.Client(host=args.host) -else: - client = kfp.Client() - -#create version -import os -package_url = os.path.join('https://storage.googleapis.com', args.bucket_name.lstrip('gs://'), args.commit_sha, 'pipeline.zip') -version_name = args.commit_sha -version_body = {"name": version_name, \ -"code_source_url": args.code_source_url, \ -"package_url": {"pipeline_url": package_url}, \ -"resource_references": [{"key": {"id": args.pipeline_id, "type":3}, "relationship":1}]} - -response = client.pipelines.create_pipeline_version(version_body) -version_id = response.id -# create run -run_name = args.run_name if args.run_name else 'run' + version_id -resource_references = [{"key": {"id": version_id, "type":4}, "relationship":2}] -if args.experiment_id: - resource_references.append({"key": {"id": args.experiment_id, "type":1}, "relationship": 1}) -run_body={"name":run_name, - "pipeline_spec":{"parameters": [{"name": "bucket_name", "value": args.bucket_name}, - {"name": "commit_sha", "value": args.commit_sha}]}, - "resource_references": resource_references} -try: - client.runs.create_run(run_body) -except: - print('Error Creating Run...') - - - - diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/Dockerfile b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/Dockerfile deleted file mode 100644 index fcfaa1ed722..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM python:3.7 -ENV KAGGLE_USERNAME=[YOUR KAGGLE USERNAME] \ - KAGGLE_KEY=[YOUR KAGGLE KEY] -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt -COPY ./download_data.py . -CMD ["python", "download_data.py"] \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/component.yaml b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/component.yaml deleted file mode 100644 index f59290ac3cb..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/component.yaml +++ /dev/null @@ -1,15 +0,0 @@ -name: download dataset -description: visualize training in tensorboard -inputs: - - {name: bucket_name, type: GCSPath} -outputs: - - {name: train_dataset, type: string} - - {name: test_dataset, type: string} -implementation: - container: - image: download_image_location - command: ['python', 'download_data.py'] - args: ['--bucket_name', {inputValue: bucket_name}] - fileOutputs: - train_dataset: /train.txt - test_dataset: /test.txt \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/download_data.py b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/download_data.py deleted file mode 100644 index 7ae67d6696f..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/download_data.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -step #1: download data from kaggle website, and push it to gs bucket -""" - -def process_and_upload( - bucket_name -): - from google.cloud import storage - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name.lstrip('gs://')) - train_blob = bucket.blob('train.csv') - test_blob = bucket.blob('test.csv') - train_blob.upload_from_filename('train.csv') - test_blob.upload_from_filename('test.csv') - - with open('train.txt', 'w') as f: - f.write(bucket_name+'/train.csv') - with open('test.txt', 'w') as f: - f.write(bucket_name+'/test.csv') - -if __name__ == '__main__': - import os - os.system("kaggle competitions download -c house-prices-advanced-regression-techniques") - os.system("unzip house-prices-advanced-regression-techniques") - import argparse - parser = argparse.ArgumentParser() - parser.add_argument('--bucket_name', type=str) - args = parser.parse_args() - - process_and_upload(args.bucket_name) - \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/requirements.txt b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/requirements.txt deleted file mode 100644 index 19b9f84b35f..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/download_dataset/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -kaggle -google-cloud-storage diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/pipeline.py b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/pipeline.py deleted file mode 100644 index d9f69a691af..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/pipeline.py +++ /dev/null @@ -1,40 +0,0 @@ -import kfp.dsl as dsl -import kfp.components as components -from kfp.gcp import use_gcp_secret - -@dsl.pipeline( - name = "kaggle pipeline", - description = "kaggle pipeline that goes from download data, analyse data, train model to submit result" -) -def kaggle_houseprice( - bucket_name: str, - commit_sha: str -): - - downloadDataOp = components.load_component_from_file('./download_dataset/component.yaml') - downloadDataStep = downloadDataOp(bucket_name=bucket_name).apply(use_gcp_secret('user-gcp-sa')) - - visualizeTableOp = components.load_component_from_file('./visualize_table/component.yaml') - visualizeTableStep = visualizeTableOp(train_file_path='%s' % downloadDataStep.outputs['train_dataset']).apply(use_gcp_secret('user-gcp-sa')) - - visualizeHTMLOp = components.load_component_from_file('./visualize_html/component.yaml') - visualizeHTMLStep = visualizeHTMLOp(train_file_path='%s' % downloadDataStep.outputs['train_dataset'], - commit_sha=commit_sha, - bucket_name=bucket_name).apply(use_gcp_secret('user-gcp-sa')) - - trainModelOp = components.load_component_from_file('./train_model/component.yaml') - trainModelStep = trainModelOp(train_file='%s' % downloadDataStep.outputs['train_dataset'], - test_file='%s' % downloadDataStep.outputs['test_dataset'], - bucket_name=bucket_name).apply(use_gcp_secret('user-gcp-sa')) - - submitResultOp = components.load_component_from_file('./submit_result/component.yaml') - submitResultStep = submitResultOp(result_file='%s' % trainModelStep.outputs['result'], - submit_message='submit').apply(use_gcp_secret('user-gcp-sa')) - -if __name__ == '__main__': - import kfp.compiler as compiler - import argparse - parser = argparse.ArgumentParser() - parser.add_argument('--gcr_address', type = str) - args = parser.parse_args() - compiler.Compiler().compile(kaggle_houseprice, __file__ + '.zip') \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/Dockerfile b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/Dockerfile deleted file mode 100644 index 69dd1895800..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM python:3.7 -ENV KAGGLE_USERNAME=[YOUR KAGGLE USERNAME] \ - KAGGLE_KEY=[YOUR KAGGLE KEY] -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt -COPY ./submit_result.py . -CMD ["python", "submit_result.py"] \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/component.yaml b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/component.yaml deleted file mode 100644 index 7ee0cec9849..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/component.yaml +++ /dev/null @@ -1,11 +0,0 @@ -name: submit result -description: submit prediction result to kaggle -inputs: - - {name: result_file, type: string} - - {name: submit_message, type: string} -implementation: - container: - image: submit_image_location - command: ['python', 'submit_result.py'] - args: ['--result_file', {inputValue: result_file}, - '--submit_message', {inputValue: submit_message}] \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/requirements.txt b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/requirements.txt deleted file mode 100644 index 35e6acd93c6..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -kaggle -gcsfs diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/submit_result.py b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/submit_result.py deleted file mode 100644 index 20237e80593..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/submit_result/submit_result.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -step #4: submit result to kaggle -""" - -def download_result( - result_file -): - import gcsfs - fs = gcsfs.GCSFileSystem() - fs.get(result_file, 'submission.csv') - -if __name__ == '__main__': - import argparse - parser = argparse.ArgumentParser() - parser.add_argument('--result_file', type=str) - parser.add_argument('--submit_message', type=str, default = 'default submit') - args = parser.parse_args() - - download_result(args.result_file) - import os - os.system("kaggle competitions submit -c house-prices-advanced-regression-techniques -f submission.csv -m " + args.submit_message) - - \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/Dockerfile b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/Dockerfile deleted file mode 100644 index efcc30306b3..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -FROM python:3.7 -COPY ./train.py . -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt -CMD ["python", "train.py"] \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/component.yaml b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/component.yaml deleted file mode 100644 index eb13e7ed22e..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/component.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: train model -description: train and test -inputs: - - {name: train_file, type: string} - - {name: test_file, type: string} - - {name: bucket_name, type: string} -outputs: - - {name: result, type: string} -implementation: - container: - image: train_image_location - command: ['python', 'train.py'] - args: ['--train_file', {inputValue: train_file}, - '--test_file', {inputValue: test_file}, - '--bucket_name', {inputValue: bucket_name}] - fileOutputs: - result: /result_path.txt \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/requirements.txt b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/requirements.txt deleted file mode 100644 index 996ce8fd956..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -pandas==0.25.1 -gcsfs -numpy -matplotlib -seaborn -sklearn diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/train.py b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/train.py deleted file mode 100644 index 445f81f7cf5..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/train_model/train.py +++ /dev/null @@ -1,216 +0,0 @@ -""" -Copyright 2020 The Kubeflow Authors -Copyright (c) 2017 Sergei Neviadomski - -This file is modified based on code from kaggle user Sergei Neviadomski. -Original code can be found at: - - https://www.kaggle.com/neviadomski/how-to-get-to-top-25-with-simple-model-sklearn - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -def train( - train_file, - test_file, - bucket_name -): - train = pd.read_csv(train_file) - test = pd.read_csv(test_file) - - # Prints R2 and RMSE scores - def get_score(prediction, lables): - print('R2: {}'.format(r2_score(prediction, lables))) - print('RMSE: {}'.format(np.sqrt(mean_squared_error(prediction, lables)))) - - # Shows scores for train and validation sets - def train_test(estimator, x_trn, x_tst, y_trn, y_tst): - prediction_train = estimator.predict(x_trn) - # Printing estimator - print(estimator) - # Printing train scores - get_score(prediction_train, y_trn) - prediction_test = estimator.predict(x_tst) - # Printing test scores - print("Test") - get_score(prediction_test, y_tst) - - # Spliting to features and lables and deleting variable I don't need - train_labels = train.pop('SalePrice') - - features = pd.concat([train, test], keys=['train', 'test']) - - # I decided to get rid of features that have more than half of missing information or do not correlate to SalePrice - features.drop(['Utilities', 'RoofMatl', 'MasVnrArea', 'BsmtFinSF1', 'BsmtFinSF2', 'BsmtUnfSF', 'Heating', 'LowQualFinSF', - 'BsmtFullBath', 'BsmtHalfBath', 'Functional', 'GarageYrBlt', 'GarageArea', 'GarageCond', 'WoodDeckSF', - 'OpenPorchSF', 'EnclosedPorch', '3SsnPorch', 'ScreenPorch', 'PoolArea', 'PoolQC', 'Fence', 'MiscFeature', 'MiscVal'], - axis=1, inplace=True) - - # MSSubClass as str - features['MSSubClass'] = features['MSSubClass'].astype(str) - - # MSZoning NA in pred. filling with most popular values - features['MSZoning'] = features['MSZoning'].fillna(features['MSZoning'].mode()[0]) - - # LotFrontage NA in all. I suppose NA means 0 - features['LotFrontage'] = features['LotFrontage'].fillna(features['LotFrontage'].mean()) - - # Alley NA in all. NA means no access - features['Alley'] = features['Alley'].fillna('NOACCESS') - - # Converting OverallCond to str - features.OverallCond = features.OverallCond.astype(str) - - # MasVnrType NA in all. filling with most popular values - features['MasVnrType'] = features['MasVnrType'].fillna(features['MasVnrType'].mode()[0]) - - # BsmtQual, BsmtCond, BsmtExposure, BsmtFinType1, BsmtFinType2 - # NA in all. NA means No basement - for col in ('BsmtQual', 'BsmtCond', 'BsmtExposure', 'BsmtFinType1', 'BsmtFinType2'): - features[col] = features[col].fillna('NoBSMT') - - # TotalBsmtSF NA in pred. I suppose NA means 0 - features['TotalBsmtSF'] = features['TotalBsmtSF'].fillna(0) - - # Electrical NA in pred. filling with most popular values - features['Electrical'] = features['Electrical'].fillna(features['Electrical'].mode()[0]) - - # KitchenAbvGr to categorical - features['KitchenAbvGr'] = features['KitchenAbvGr'].astype(str) - - # KitchenQual NA in pred. filling with most popular values - features['KitchenQual'] = features['KitchenQual'].fillna(features['KitchenQual'].mode()[0]) - - # FireplaceQu NA in all. NA means No Fireplace - features['FireplaceQu'] = features['FireplaceQu'].fillna('NoFP') - - # GarageType, GarageFinish, GarageQual NA in all. NA means No Garage - for col in ('GarageType', 'GarageFinish', 'GarageQual'): - features[col] = features[col].fillna('NoGRG') - - # GarageCars NA in pred. I suppose NA means 0 - features['GarageCars'] = features['GarageCars'].fillna(0.0) - - # SaleType NA in pred. filling with most popular values - features['SaleType'] = features['SaleType'].fillna(features['SaleType'].mode()[0]) - - # Year and Month to categorical - features['YrSold'] = features['YrSold'].astype(str) - features['MoSold'] = features['MoSold'].astype(str) - - # Adding total sqfootage feature and removing Basement, 1st and 2nd floor features - features['TotalSF'] = features['TotalBsmtSF'] + features['1stFlrSF'] + features['2ndFlrSF'] - features.drop(['TotalBsmtSF', '1stFlrSF', '2ndFlrSF'], axis=1, inplace=True) - - ## Log transformation of labels - train_labels = np.log(train_labels) - - ## Standardizing numeric features - numeric_features = features.loc[:,['LotFrontage', 'LotArea', 'GrLivArea', 'TotalSF']] - numeric_features_standardized = (numeric_features - numeric_features.mean())/numeric_features.std() - - - # Getting Dummies from Condition1 and Condition2 - conditions = set([x for x in features['Condition1']] + [x for x in features['Condition2']]) - dummies = pd.DataFrame(data=np.zeros((len(features.index), len(conditions))), - index=features.index, columns=conditions) - for i, cond in enumerate(zip(features['Condition1'], features['Condition2'])): - dummies.ix[i, cond] = 1 - features = pd.concat([features, dummies.add_prefix('Condition_')], axis=1) - features.drop(['Condition1', 'Condition2'], axis=1, inplace=True) - - # Getting Dummies from Exterior1st and Exterior2nd - exteriors = set([x for x in features['Exterior1st']] + [x for x in features['Exterior2nd']]) - dummies = pd.DataFrame(data=np.zeros((len(features.index), len(exteriors))), - index=features.index, columns=exteriors) - for i, ext in enumerate(zip(features['Exterior1st'], features['Exterior2nd'])): - dummies.ix[i, ext] = 1 - features = pd.concat([features, dummies.add_prefix('Exterior_')], axis=1) - features.drop(['Exterior1st', 'Exterior2nd', 'Exterior_nan'], axis=1, inplace=True) - - # Getting Dummies from all other categorical vars - for col in features.dtypes[features.dtypes == 'object'].index: - for_dummy = features.pop(col) - features = pd.concat([features, pd.get_dummies(for_dummy, prefix=col)], axis=1) - - ### Copying features - features_standardized = features.copy() - - ### Replacing numeric features by standardized values - features_standardized.update(numeric_features_standardized) - - ### Splitting features - train_features = features.loc['train'].drop('Id', axis=1).select_dtypes(include=[np.number]).values - test_features = features.loc['test'].drop('Id', axis=1).select_dtypes(include=[np.number]).values - - ### Splitting standardized features - train_features_st = features_standardized.loc['train'].drop('Id', axis=1).select_dtypes(include=[np.number]).values - test_features_st = features_standardized.loc['test'].drop('Id', axis=1).select_dtypes(include=[np.number]).values - - ### Shuffling train sets - train_features_st, train_features, train_labels = shuffle(train_features_st, train_features, train_labels, random_state = 5) - - ### Splitting - x_train, x_test, y_train, y_test = train_test_split(train_features, train_labels, test_size=0.1, random_state=200) - x_train_st, x_test_st, y_train_st, y_test_st = train_test_split(train_features_st, train_labels, test_size=0.1, random_state=200) - - ENSTest = linear_model.ElasticNetCV(alphas=[0.0001, 0.0005, 0.001, 0.01, 0.1, 1, 10], l1_ratio=[.01, .1, .5, .9, .99], max_iter=5000).fit(x_train_st, y_train_st) - train_test(ENSTest, x_train_st, x_test_st, y_train_st, y_test_st) - - # Average R2 score and standart deviation of 5-fold cross-validation - scores = cross_val_score(ENSTest, train_features_st, train_labels, cv=5) - print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2)) - - GBest = ensemble.GradientBoostingRegressor(n_estimators=3000, learning_rate=0.05, max_depth=3, max_features='sqrt', - min_samples_leaf=15, min_samples_split=10, loss='huber').fit(x_train, y_train) - train_test(GBest, x_train, x_test, y_train, y_test) - - # Average R2 score and standart deviation of 5-fold cross-validation - scores = cross_val_score(GBest, train_features_st, train_labels, cv=5) - print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2)) - - # Retraining models - GB_model = GBest.fit(train_features, train_labels) - ENST_model = ENSTest.fit(train_features_st, train_labels) - - ## Getting our SalePrice estimation - Final_labels = (np.exp(GB_model.predict(test_features)) + np.exp(ENST_model.predict(test_features_st))) / 2 - - ## Saving to CSV - import os - result_path = os.path.join(bucket_name, 'submission.csv') - pd.DataFrame({'Id': test.Id, 'SalePrice': Final_labels}).to_csv(result_path, index =False) - - with open('/result_path.txt', 'w') as f: - f.write(result_path) - -if __name__ == '__main__': - # Adding needed libraries and reading data - import pandas as pd - import numpy as np - from sklearn import ensemble, tree, linear_model - from sklearn.model_selection import train_test_split, cross_val_score - from sklearn.metrics import r2_score, mean_squared_error - from sklearn.utils import shuffle - import warnings - warnings.filterwarnings('ignore') - - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument('--train_file', type=str) - parser.add_argument('--test_file', type=str) - parser.add_argument('--bucket_name', type=str) - - args = parser.parse_args() - train(args.train_file, args.test_file, args.bucket_name) - diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/Dockerfile b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/Dockerfile deleted file mode 100644 index 5a81fade97d..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -FROM tensorflow/tensorflow:2.0.0-py3 -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt -COPY ./visualize.py . -CMD ["python", 'visualize.py'] \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/component.yaml b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/component.yaml deleted file mode 100644 index 38cf640e207..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/component.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: visualize in html -description: visualize dataset in picture written in html -inputs: - - {name: train_file_path, type: string} - - {name: commit_sha, type: string} - - {name: bucket_name, type: string} -outputs: - - {name: MLPipeline UI metadata, type: UI metadata} -implementation: - container: - image: visualizehtml_image_location - command: ['python', 'visualize.py'] - args: ['--train_file_path', {inputValue: train_file_path}, - '--commit_sha', {inputValue: commit_sha}, - '--bucket_name', {inputValue: bucket_name}] - fileOutputs: - MLPipeline UI metadata: /mlpipeline-ui-metadata.json \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/requirements.txt b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/requirements.txt deleted file mode 100644 index 308eb5ffc19..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -gcsfs -pandas -matplotlib -seaborn diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/visualize.py b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/visualize.py deleted file mode 100644 index 99476b6cfce..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_html/visualize.py +++ /dev/null @@ -1,54 +0,0 @@ -# visualizer with html - -def datahtml( - bucket_name, - commit_sha, - train_file_path -): - import json - import seaborn as sns - import matplotlib.pyplot as plt - import os - image_path = os.path.join(bucket_name, commit_sha, 'visualization.png') - image_url = os.path.join('https://storage.googleapis.com', bucket_name.lstrip('gs://'), commit_sha, 'visualization.png') - html_path = os.path.join(bucket_name, 'kaggle.html') - # ouptut visualization to a file - - import pandas as pd - df_train = pd.read_csv(train_file_path) - sns.set() - cols = ['SalePrice', 'OverallQual', 'GrLivArea', 'GarageCars', 'TotalBsmtSF', 'FullBath', 'YearBuilt'] - sns.pairplot(df_train[cols], size = 3) - plt.savefig('visualization.png') - from tensorflow.python.lib.io import file_io - file_io.copy('visualization.png', image_path) - rendered_template = """ - - - correlation image - - - - - """.format(image_url) - file_io.write_string_to_file(html_path, rendered_template) - - metadata = { - 'outputs' : [{ - 'type': 'web-app', - 'storage': 'gcs', - 'source': html_path, - }] - } - with file_io.FileIO('/mlpipeline-ui-metadata.json', 'w') as f: - json.dump(metadata, f) - -if __name__ == '__main__': - import argparse - parser = argparse.ArgumentParser() - parser.add_argument('--bucket_name', type = str) - parser.add_argument('--commit_sha', type = str) - parser.add_argument('--train_file_path', type = str) - args = parser.parse_args() - - datahtml(args.bucket_name, args.commit_sha, args.train_file_path) diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/Dockerfile b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/Dockerfile deleted file mode 100644 index c839d1504ed..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -FROM python:3.7 -COPY requirements.txt . -RUN python3 -m pip install -r \ - requirements.txt --quiet --no-cache-dir \ - && rm -f requirements.txt -COPY ./visualize.py . -CMD ["python", 'visualize.py'] \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/component.yaml b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/component.yaml deleted file mode 100644 index 70fc43fa932..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/component.yaml +++ /dev/null @@ -1,13 +0,0 @@ -name: visualize table -description: visualize dataset in table -inputs: - - {name: train_file_path, type: string} -outputs: - - {name: MLPipeline UI metadata, type: UI metadata} -implementation: - container: - image: visualizetable_image_location - command: ['python', 'visualize.py'] - args: ['--train_file_path', {inputValue: train_file_path}] - fileOutputs: - MLPipeline UI metadata: /mlpipeline-ui-metadata.json \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/requirements.txt b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/requirements.txt deleted file mode 100644 index 74341bb0c1a..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -gcsfs -pandas diff --git a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/visualize.py b/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/visualize.py deleted file mode 100644 index 41f5bfe36ba..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/kaggle-ci-sample/visualize_table/visualize.py +++ /dev/null @@ -1,26 +0,0 @@ -def datatable( - train_file_path -): - import pandas as pd - import json - train_file = pd.read_csv(train_file_path) - header = train_file.columns.tolist() - metadata = { - 'outputs' : [{ - 'type': 'table', - 'storage': 'gcs', - 'format': 'csv', - 'header': header, - 'source': train_file_path - }] - } - with open('/mlpipeline-ui-metadata.json', 'w') as f: - json.dump(metadata, f) - -if __name__ == '__main__': - import argparse - parser = argparse.ArgumentParser() - parser.add_argument('--train_file_path', type = str) - args = parser.parse_args() - - datatable(args.train_file_path) diff --git a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/README.md b/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/README.md deleted file mode 100644 index 61138a598d9..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# Mnist Continuous Integration(CI) Pipeline - -## Overview - -This sample uses cloud build to implement the continuous integration process of a basic machine learning pipeline that trains and visualizes model in tensorboard. Once all set up, you can push your code to github repo, then the build process in cloud build will be triggered automatically, then a run will be created in kubeflow pipeline. You can view your pipeline and the run in kubeflow pipelines. - -We use **Kubeflow Pipeline(KFP) SDK** to interact with kubeflow pipeline to create a new version and a run in this sample. - -## What you can learn in this sample -* CI process of a simple but general ML pipeline. -* Launch a tensorboard as one pipeline step -* Data passing between steps - - -## What needs to be done before run -* Authenticate to gcp services by either: Create a "user-gcp-sa" secret following the troubleshooting parts in [KFP Repo](https://github.com/kubeflow/pipelines/tree/master/manifests/kustomize), or configure workload identity as instructed in [this guide](https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity). This sample uses the first method, but this will soon be deprecated. Please refer to the second method to replace the use of "user-gcp-sa" service account. -* Set up a trigger in cloud build, and link it to your github repo -* Enable "Kubernetes Engine Developer" in cloud build setting -* Replace the CLOUDSDK_COMPUTE_ZONE, CLOUDSDK_CONTAINER_CLUSTER in cloudbuild.yaml with your own zone and cluster -* Substitute the 'substitution' field in cloudbuild.yaml: - -`_GCR_PATH`: '[YOUR CLOUD REGISTRY], for example: gcr.io/my-project' \ -`_GS_BUCKET`: '[YOUR GS BUCKET TO STORE PIPELINE AND LAUNCH TENSORBOARD], for example: gs://my-bucket'\ -`_PIPELINE_ID`: '[PIPELINE ID TO CREATE A VERSION ON], get it on kfp UI' \ - -* Set your container registy public or grant cloud registry access to cloud build and kubeflow pipeline -* Set your gs bucket public or grant cloud storage access to cloud build and kubeflow pipeline -* Try a commit to your repo, then you can observe the build process triggered automatically - - - diff --git a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/cloudbuild.yaml b/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/cloudbuild.yaml deleted file mode 100644 index 07931245d3c..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/cloudbuild.yaml +++ /dev/null @@ -1,107 +0,0 @@ -steps: - - name: "gcr.io/cloud-builders/docker" - args: - [ - "build", - "-t", - "${_GCR_PATH}/mnist_train:$COMMIT_SHA", - "-t", - "${_GCR_PATH}/mnist_train:latest", - "${_CODE_PATH}/train", - "-f", - "${_CODE_PATH}/train/Dockerfile", - ] - id: "MnistBuildFirstImage" - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "push", - "${_GCR_PATH}/mnist_train:$COMMIT_SHA", - ] - id: "MnistPushFirstImage" - waitFor: ["MnistBuildFirstImage"] - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "build", - "-t", - "${_GCR_PATH}/mnist_tensorboard:$COMMIT_SHA", - "-t", - "${_GCR_PATH}/mnist_tensorboard:latest", - "${_CODE_PATH}/tensorboard", - "-f", - "${_CODE_PATH}/tensorboard/Dockerfile", - ] - id: "MnistBuildSecondImage" - - - name: "gcr.io/cloud-builders/docker" - args: - [ - "push", - "${_GCR_PATH}/mnist_tensorboard:$COMMIT_SHA", - ] - id: "MnistPushSecondImage" - waitFor: ["MnistBuildSecondImage"] - - - name: "python:3.7-slim" - entrypoint: "/bin/sh" - args: [ - "-c", - "cd ${_CODE_PATH}; - pip3 install cffi==1.12.3 --upgrade; - pip3 install kfp==0.1.37; - sed -i 's|image: train_image_location|image: ${_GCR_PATH}/mnist_train:$COMMIT_SHA|g' ./train/component.yaml; - sed -i 's|image: tensorboard_image_location|image: ${_GCR_PATH}/mnist_tensorboard:$COMMIT_SHA|g' ./tensorboard/component.yaml; - sed -i 's|ui_metadata_path|${_UI_METADATA_PATH}|g' ./tensorboard/component.yaml; - python pipeline.py --gcr_address ${_GCR_PATH}; - cp pipeline.py.zip /workspace/pipeline.zip", - ] - id: "MnistPackagePipeline" - - - name: "gcr.io/cloud-builders/gsutil" - args: - [ - "cp", - "/workspace/pipeline.zip", - "${_GS_BUCKET}/$COMMIT_SHA/pipeline.zip" - ] - id: "MnistUploadPipeline" - waitFor: ["MnistPackagePipeline"] - - - - name: "gcr.io/cloud-builders/kubectl" - entrypoint: "/bin/sh" - args: [ - "-c", - "cd ${_CODE_PATH}; - apt-get update; - apt-get install -y python3-pip; - apt-get install -y libssl-dev libffi-dev; - /builder/kubectl.bash; - pip3 install kfp==0.1.37; - pip3 install kubernetes; - python3 create_pipeline_version_and_run.py - --bucket_name ${_GS_BUCKET} - --commit_sha $COMMIT_SHA - --pipeline_id ${_PIPELINE_ID} - --output_path ${_UI_METADATA_PATH}" - ] - env: - - "CLOUDSDK_COMPUTE_ZONE=[Your cluster zone, for example: us-central1-a]" - - "CLOUDSDK_CONTAINER_CLUSTER=[Your cluster name, for example: my-cluster]" - id: "MnistCreatePipelineVersionAndRun" - -images: - - "${_GCR_PATH}/mnist_train:latest" - - "${_GCR_PATH}/mnist_tensorboard:latest" - -substitutions: - _CODE_PATH: /workspace/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample - _NAMESPACE: kubeflow - _GCR_PATH: [Your cloud registry path. For example, gcr.io/my-project-id] - _GS_BUCKET: [Name of your cloud storage bucket. For example, gs://my-project-bucket] - _PIPELINE_ID: [Your kubeflow pipeline id to create a version on. Get it from Kubeflow Pipeline UI. - For example, f6f8558a-6eec-4ef4-b343-a650473ee613] - _UI_METADATA_PATH: [Path to the file which specifies where your metadata is located. For example, /mlpipeline-ui-metadata.json ] diff --git a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/create_pipeline_version_and_run.py b/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/create_pipeline_version_and_run.py deleted file mode 100644 index 2f13adb66ae..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/create_pipeline_version_and_run.py +++ /dev/null @@ -1,49 +0,0 @@ -import kfp -import argparse - -parser = argparse.ArgumentParser() -parser.add_argument('--bucket_name', help='Required. gs bucket to store tensorboard', type=str) -parser.add_argument('--commit_sha', help='Required. Name of the new version. Must be unique.', type=str) -parser.add_argument('--pipeline_id', help = 'Required. pipeline id',type=str) -parser.add_argument('--output_path', help = 'Required. Path to UI metadata.',type=str) -parser.add_argument('--host', help='Host address of kfp.Client. Will be get from cluster automatically', type=str, default='') -parser.add_argument('--run_name', help='name of the new run.', type=str, default='') -parser.add_argument('--experiment_id', help = 'experiment id',type=str) -parser.add_argument('--code_source_url', help = 'url of source code', type=str, default='') -args = parser.parse_args() - -if args.host: - client = kfp.Client(host=args.host) -else: - client = kfp.Client() -import os -package_url = os.path.join('https://storage.googleapis.com', args.bucket_name.lstrip('gs://'), args.commit_sha, 'pipeline.zip') -#create version -version_body = {"name": args.commit_sha, \ -"code_source_url": args.code_source_url, \ -"package_url": {"pipeline_url": package_url}, \ -"resource_references": [{"key": {"id": args.pipeline_id, "type":3}, "relationship":1}]} -response = client.pipelines.create_pipeline_version(version_body) - -version_id = response.id -# create run -run_name = args.run_name if args.run_name else 'run' + version_id -resource_references = [{"key": {"id": version_id, "type":4}, "relationship":2}] -if args.experiment_id: - resource_references.append({"key": {"id": args.experiment_id, "type":1}, "relationship": 1}) -run_body={"name":run_name, - "pipeline_spec":{ - "parameters": [ - {"name": "storage_bucket", "value": args.bucket_name}, - {"name": "output_path", "value": args.output_path} - ] - }, - "resource_references": resource_references} -try: - client.runs.create_run(run_body) -except: - print('Error Creating Run...') - - - - diff --git a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/pipeline.py b/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/pipeline.py deleted file mode 100644 index fb4706a16fb..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/pipeline.py +++ /dev/null @@ -1,30 +0,0 @@ -import kfp.dsl as dsl -import kfp.components as components -from kfp.gcp import use_gcp_secret - -@dsl.pipeline( - name='mnist pipeline', - description='A pipeline to train a model on mnist dataset and start a tensorboard.' -) -def mnist_pipeline( - storage_bucket: str, - output_path: str, - ): - import os - train_op = components.load_component_from_file('./train/component.yaml') - train_step = train_op(storage_bucket=storage_bucket).apply(use_gcp_secret('user-gcp-sa')) - - visualize_op = components.load_component_from_file('./tensorboard/component.yaml') - visualize_step = visualize_op( - logdir='%s' % train_step.outputs['logdir'], - output_path=output_path - ).apply(use_gcp_secret('user-gcp-sa')) - -if __name__ == '__main__': - import argparse - parser = argparse.ArgumentParser() - parser.add_argument('--gcr_address', type = str) - args = parser.parse_args() - - import kfp.compiler as compiler - compiler.Compiler().compile(mnist_pipeline, __file__ + '.zip') \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/tensorboard/Dockerfile b/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/tensorboard/Dockerfile deleted file mode 100644 index 5948e2e0a8d..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/tensorboard/Dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM python:3.7-slim -COPY tensorboard.py . -CMD ["python", "./tensorboard.py"] \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/tensorboard/component.yaml b/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/tensorboard/component.yaml deleted file mode 100644 index cd113b1d766..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/tensorboard/component.yaml +++ /dev/null @@ -1,16 +0,0 @@ -name: tensorboard visualization -description: visualize training in tensorboard -inputs: - - {name: logdir, type: string} - - {name: output_path, type: string} -outputs: - - {name: MLPipeline UI metadata, type: UI metadata} -implementation: - container: - image: tensorboard_image_location - command: ['python', '/tensorboard.py'] - args: ['--logdir', {inputValue: logdir}, - '--output_path', {inputValue: output_path}] - fileOutputs: - MLPipeline UI metadata: ui_metadata_path - \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/tensorboard/tensorboard.py b/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/tensorboard/tensorboard.py deleted file mode 100644 index 75b8332ac6d..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/tensorboard/tensorboard.py +++ /dev/null @@ -1,18 +0,0 @@ -if __name__ == '__main__': - import json - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument('--logdir', type=str) - parser.add_argument('--output_path', type=str, default='/mlpipeline-ui-metadata.json') - - args = parser.parse_args() - - metadata = { - 'outputs' : [{ - 'type': 'tensorboard', - 'source': args.logdir, - }] - } - with open(args.output_path, 'w') as f: - json.dump(metadata, f) \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/train/Dockerfile b/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/train/Dockerfile deleted file mode 100644 index 8836137beab..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/train/Dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM tensorflow/tensorflow:2.0.0-py3 -COPY mnist.py . -CMD ["python", "./mnist.py"] \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/train/component.yaml b/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/train/component.yaml deleted file mode 100644 index ca29ed134d8..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/train/component.yaml +++ /dev/null @@ -1,13 +0,0 @@ -name: train mnist model -description: train mnist model -inputs: - - {name: storage_bucket, type: GCSPath} -outputs: - - {name: logdir, type: string} -implementation: - container: - image: train_image_location - command: ['python', '/mnist.py'] - args: ['--storage_bucket', {inputValue: storage_bucket}] - fileOutputs: - logdir: /logdir.txt \ No newline at end of file diff --git a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/train/mnist.py b/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/train/mnist.py deleted file mode 100644 index f0a9230c0b4..00000000000 --- a/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample/train/mnist.py +++ /dev/null @@ -1,41 +0,0 @@ -def mnisttrain(storage_bucket:str): - import tensorflow as tf - import json - mnist = tf.keras.datasets.mnist - (x_train,y_train), (x_test, y_test) = mnist.load_data() - x_train, x_test = x_train/255.0, x_test/255.0 - - def create_model(): - return tf.keras.models.Sequential([ - tf.keras.layers.Flatten(input_shape = (28,28)), - tf.keras.layers.Dense(512, activation = 'relu'), - tf.keras.layers.Dropout(0.2), - tf.keras.layers.Dense(10, activation = 'softmax') - ]) - model = create_model() - model.compile(optimizer='adam', - loss='sparse_categorical_crossentropy', - metrics=['accuracy']) - import datetime - import os - log_dir = os.path.join(storage_bucket, datetime.datetime.now().strftime("%Y%m%d-%H%M%S")) - tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1) - - model.fit(x=x_train, - y=y_train, - epochs=5, - validation_data=(x_test, y_test), - callbacks=[tensorboard_callback]) - - print('At least tensorboard callbacks are correct') - with open('/logdir.txt', 'w') as f: - f.write(log_dir) - -if __name__ == '__main__': - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument('--storage_bucket', type=str) - - args = parser.parse_args() - mnisttrain(args.storage_bucket) \ No newline at end of file diff --git a/samples/contrib/volume_ops/volumeop_dag.py b/samples/contrib/volume_ops/volumeop_dag.py deleted file mode 100644 index fe2f4f90de4..00000000000 --- a/samples/contrib/volume_ops/volumeop_dag.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import kfp.dsl as dsl - - -@dsl.pipeline( - name="Volume Op DAG", - description="The second example of the design doc." -) -def volume_op_dag(): - vop = dsl.VolumeOp( - name="create_pvc", - resource_name="my-pvc", - size="10Gi", - modes=dsl.VOLUME_MODE_RWM - ) - - step1 = dsl.ContainerOp( - name="step1", - image="library/bash:4.4.23", - command=["sh", "-c"], - arguments=["echo 1 | tee /mnt/file1"], - pvolumes={"/mnt": vop.volume} - ) - - step2 = dsl.ContainerOp( - name="step2", - image="library/bash:4.4.23", - command=["sh", "-c"], - arguments=["echo 2 | tee /mnt2/file2"], - pvolumes={"/mnt2": vop.volume} - ) - - step3 = dsl.ContainerOp( - name="step3", - image="library/bash:4.4.23", - command=["sh", "-c"], - arguments=["cat /mnt/file1 /mnt/file2"], - pvolumes={"/mnt": vop.volume.after(step1, step2)} - ) - - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(volume_op_dag, __file__ + ".tar.gz") diff --git a/samples/contrib/volume_ops/volumeop_parallel.py b/samples/contrib/volume_ops/volumeop_parallel.py deleted file mode 100644 index 10a0d8f8ae0..00000000000 --- a/samples/contrib/volume_ops/volumeop_parallel.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import kfp.dsl as dsl - - -@dsl.pipeline( - name="VolumeOp Parallel", - description="The first example of the design doc." -) -def volumeop_parallel(): - vop = dsl.VolumeOp( - name="create_pvc", - resource_name="my-pvc", - size="10Gi", - modes=dsl.VOLUME_MODE_RWM - ) - - step1 = dsl.ContainerOp( - name="step1", - image="library/bash:4.4.23", - command=["sh", "-c"], - arguments=["echo 1 | tee /mnt/file1"], - pvolumes={"/mnt": vop.volume} - ) - - step2 = dsl.ContainerOp( - name="step2", - image="library/bash:4.4.23", - command=["sh", "-c"], - arguments=["echo 2 | tee /common/file2"], - pvolumes={"/common": vop.volume} - ) - - step3 = dsl.ContainerOp( - name="step3", - image="library/bash:4.4.23", - command=["sh", "-c"], - arguments=["echo 3 | tee /mnt3/file3"], - pvolumes={"/mnt3": vop.volume} - ) - - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(volumeop_parallel, __file__ + ".tar.gz") diff --git a/samples/contrib/volume_ops/volumeop_sequential.py b/samples/contrib/volume_ops/volumeop_sequential.py deleted file mode 100644 index a3285cbc917..00000000000 --- a/samples/contrib/volume_ops/volumeop_sequential.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import kfp.dsl as dsl - - -@dsl.pipeline( - name="VolumeOp Sequential", - description="The third example of the design doc." -) -def volumeop_sequential(): - vop = dsl.VolumeOp( - name="mypvc", - resource_name="newpvc", - size="10Gi", - modes=dsl.VOLUME_MODE_RWM - ) - - step1 = dsl.ContainerOp( - name="step1", - image="library/bash:4.4.23", - command=["sh", "-c"], - arguments=["echo 1|tee /data/file1"], - pvolumes={"/data": vop.volume} - ) - - step2 = dsl.ContainerOp( - name="step2", - image="library/bash:4.4.23", - command=["sh", "-c"], - arguments=["cp /data/file1 /data/file2"], - pvolumes={"/data": step1.pvolume} - ) - - step3 = dsl.ContainerOp( - name="step3", - image="library/bash:4.4.23", - command=["cat", "/mnt/file1", "/mnt/file2"], - pvolumes={"/mnt": step2.pvolume} - ) - - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(volumeop_sequential, __file__ + ".tar.gz") diff --git a/samples/contrib/volume_snapshot_ops/volume_snapshotop_rokurl.py b/samples/contrib/volume_snapshot_ops/volume_snapshotop_rokurl.py deleted file mode 100644 index cf41361b3ba..00000000000 --- a/samples/contrib/volume_snapshot_ops/volume_snapshotop_rokurl.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -"""This sample uses Rok as an example to show case how VolumeOp accepts -annotations as an extra argument, and how we can use arbitrary PipelineParams -to determine their contents. - -The specific annotation is Rok-specific, but the use of annotations in such way -is widespread in storage systems integrated with K8s. -""" - -import kfp.dsl as dsl - - -@dsl.pipeline( - name="VolumeSnapshotOp RokURL", - description="The fifth example of the design doc." -) -def volume_snapshotop_rokurl(rok_url): - vop1 = dsl.VolumeOp( - name="create_volume_1", - resource_name="vol1", - size="1Gi", - annotations={"rok/origin": rok_url}, - modes=dsl.VOLUME_MODE_RWM - ) - - step1 = dsl.ContainerOp( - name="step1_concat", - image="library/bash:4.4.23", - command=["sh", "-c"], - arguments=["cat /data/file*| gzip -c >/data/full.gz"], - pvolumes={"/data": vop1.volume} - ) - - step1_snap = dsl.VolumeSnapshotOp( - name="create_snapshot_1", - resource_name="snap1", - volume=step1.pvolume - ) - - vop2 = dsl.VolumeOp( - name="create_volume_2", - resource_name="vol2", - data_source=step1_snap.snapshot, - size=step1_snap.outputs["size"] - ) - - step2 = dsl.ContainerOp( - name="step2_gunzip", - image="library/bash:4.4.23", - command=["gunzip", "-k", "/data/full.gz"], - pvolumes={"/data": vop2.volume} - ) - - step2_snap = dsl.VolumeSnapshotOp( - name="create_snapshot_2", - resource_name="snap2", - volume=step2.pvolume - ) - - vop3 = dsl.VolumeOp( - name="create_volume_3", - resource_name="vol3", - data_source=step2_snap.snapshot, - size=step2_snap.outputs["size"] - ) - - step3 = dsl.ContainerOp( - name="step3_output", - image="library/bash:4.4.23", - command=["cat", "/data/full"], - pvolumes={"/data": vop3.volume} - ) - - -if __name__ == "__main__": - import kfp.compiler as compiler - compiler.Compiler().compile(volume_snapshotop_rokurl, __file__ + ".tar.gz") diff --git a/samples/test/README.md b/samples/test/README.md deleted file mode 100644 index 220a6f54477..00000000000 --- a/samples/test/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Sample Test - -Refer to [V2 samples test documentation](https://github.com/kubeflow/pipelines/tree/master/backend/src/v2/test) for more details. - -## Test Samples - -Pipeline samples in this folder are built for testing purposes only. diff --git a/samples/test/after.py b/samples/test/after.py deleted file mode 100644 index 5a776a3910f..00000000000 --- a/samples/test/after.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2020 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp import components -from kfp import dsl -from kfp import compiler - -component_op = components.load_component_from_text(""" -name: Print Text -inputs: -- {name: text, type: String} -implementation: - container: - image: alpine - command: - - sh - - -c - - | - set -e -x - echo "$0" - - {inputValue: text} -""") - - -@dsl.pipeline(name='pipeline-with-after') -def my_pipeline(): - task1 = component_op(text='1st task') - task2 = component_op(text='2nd task').after(task1) - task3 = component_op(text='3rd task').after(task1, task2) - - -if __name__ == '__main__': - compiler.Compiler().compile( - pipeline_func=my_pipeline, output_path=__file__ + '.json') diff --git a/samples/test/after_test.py b/samples/test/after_test.py deleted file mode 100644 index a53410c64c3..00000000000 --- a/samples/test/after_test.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import kfp -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase - -from .after import my_pipeline - -run_pipeline_func([ - TestCase(pipeline_func=my_pipeline), -]) diff --git a/samples/test/config-integration.yaml b/samples/test/config-integration.yaml deleted file mode 100644 index 279be3c1e79..00000000000 --- a/samples/test/config-integration.yaml +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# kubeflow-pipelines-samples-v2 test config -# Documentation: https://github.com/kubeflow/pipelines/tree/master/v2/test -# -#### config format -# -# Each item in the list corresponds to test for one sample. -# -# The field `path` corresponds to the test's python module path -# e.g. if folder path is `samples/test/fail_test.py`, then module path is -# `samples.test.fail_test`. - -# Integration Samples -- name: dataflow - path: samples.core.dataflow.dataflow_test -# TODO: reenable when kfp dependency is updated -# - name: parameterized_tfx_oss -# path: samples.core.parameterized_tfx_oss.parameterized_tfx_oss_test - -# The following samples were in integration test, but we disabled them. -# -# Disabled ai_platform test because we are developing new component. -# - ai_platform -# TODO(#4361): Re-enable the kubeflow_tf_serving sample test after -# fixed the sample -# - kubeflow_tf_serving -# Disable container_build because this approach will be deprecated soon. -# - container_build diff --git a/samples/test/config.yaml b/samples/test/config.yaml deleted file mode 100644 index 20406d595bf..00000000000 --- a/samples/test/config.yaml +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright 2021-2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# kubeflow-pipelines-samples-v2 test config -# Documentation: https://github.com/kubeflow/pipelines/tree/master/v2/test -# -#### config format -# -# Each item in the list corresponds to test for one sample. -# -# The field `path` corresponds to the test's python module path -# e.g. if folder path is `samples/test/fail_test.py`, then module path is -# `samples.test.fail_test`. - -# Core Samples -- name: condition - path: samples.core.condition.condition_test -- name: nested_condition - path: samples.core.condition.nested_condition_test -- name: exit_handler - path: samples.core.exit_handler.exit_handler_test -- name: output_a_directory - path: samples.core.output_a_directory.output_a_directory_test -- name: loop_output - path: samples.core.loop_output.loop_output_test -- name: loop_static - path: samples.core.loop_static.loop_static_test -- name: loop_parameter - path: samples.core.loop_parameter.loop_parameter_test -- name: loop_parallelism - path: samples.core.loop_parallelism.loop_parallelism_test -- name: resource_spec - path: samples.core.resource_spec.resource_spec_test -- name: runtime_resource_spec - path: samples.core.resource_spec.runtime_resource_request_test -- name: xgboost_sample - path: samples.core.XGBoost.xgboost_sample_test -- name: use_run_id - path: samples.core.use_run_info.use_run_id_test -- name: multiple_outputs - path: samples.core.multiple_outputs.multiple_outputs_test -- name: lightweight_component - path: samples.core.lightweight_component.lightweight_component_test -- name: dsl_static_type_checking - path: samples.core.dsl_static_type_checking.dsl_static_type_checking_test -- name: pipeline_transformers - path: samples.core.pipeline_transformers.pipeline_transformers_test -- name: secret - path: samples.core.secret.secret_test -- name: sidecar - path: samples.core.sidecar.sidecar_test -- name: execution_order - path: samples.core.execution_order.execution_order_test -- name: imagepullsecrets - path: samples.core.imagepullsecrets.imagepullsecrets_test -- name: retry - path: samples.core.retry.retry_test -- name: preemptible_tpu_gpu - path: samples.core.preemptible_tpu_gpu.preemptible_tpu_gpu_test -- name: volume_snapshot_ops - path: samples.core.volume_snapshot_ops.volume_snapshot_ops_test -- name: resource_ops - path: samples.core.resource_ops.resource_ops_test -- name: caching - path: samples.core.caching.caching_test -- name: parallelism_sub_dag - path: samples.core.parallelism_sub_dag.parallelism_sub_dag_test - -# Test Samples -- name: fail - path: samples.test.fail_test -- name: fail_parameter_value_missing - path: samples.test.fail_parameter_value_missing_test -- name: two_step - path: samples.test.two_step_test -- name: two_step_with_uri_placeholder - path: samples.test.two_step_with_uri_placeholder_test -# TODO(capri-xiyue): re-enable cache v2 tests -# - name: cache_v2_compatible -# path: samples.test.cache_v2_compatible_test -- name: after - path: samples.test.after_test -- name: legacy_data_passing - path: samples.test.legacy_data_passing -- name: parameter_with_format - path: samples.test.parameter_with_format_test -- name: reused_component - path: samples.test.reused_component_test -- name: legacy_exit_handler - path: samples.test.legacy_exit_handler_test -- name: tensorboard_minio - path: samples.core.visualization.tensorboard_minio_test -- name: lightweight_python_functions_v2_with_outputs - path: samples.test.lightweight_python_functions_v2_with_outputs_test -- name: lightweight_python_functions_v2_pipeline - path: samples.test.lightweight_python_functions_v2_pipeline_test -# TODO: uncomment when support for concat placeholder struct syntax is added in kfp v2 beta.3 -# - name: placeholder_concat -# path: samples.test.placeholder_concat_test -- name: placeholder_if - path: samples.test.placeholder_if_test -- name: metrics_visualization_v1 - path: samples.test.metrics_visualization_v1_test -- name: metrics_visualization_v2 - path: samples.test.metrics_visualization_v2_test - -# V2 Samples -- name: hello_world - path: samples.v2.hello_world_test -- name: producer_consumer_param - path: samples.v2.producer_consumer_param_test -- name: pipeline_with_importer - path: samples.v2.pipeline_with_importer_test -- name: pipeline_container_no_input - path: samples.v2.pipeline_container_no_input_test -- name: two_step_pipeline_containerized - path: samples.v2.two_step_pipeline_containerized_test -- name: component_with_optional_inputs - path: samples.v2.component_with_optional_inputs_test -- name: pipeline_with_env - path: samples.v2.pipeline_with_env_test -- name: pipeline_with_volume - path: samples.v2.pipeline_with_volume_test -- name: pipeline_with_secret_as_volume - path: samples.v2.pipeline_with_secret_as_volume -- name: pipeline_with_secret_as_env - path: samples.v2.pipeline_with_secret_as_env -# TODO(capri-xiyue): Re-enable after figuring out V2 Engine -# and protobuf.Value support. -# - name: cache_v2 -# path: samples.v2.cache_test diff --git a/samples/test/fail_test.py b/samples/test/fail_test.py deleted file mode 100644 index 27f4ce11c35..00000000000 --- a/samples/test/fail_test.py +++ /dev/null @@ -1,60 +0,0 @@ -o # Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Fail pipeline.""" - -from __future__ import annotations - -import unittest - -import kfp -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TaskInputs -from kfp.samples.test.utils import TaskOutputs -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .fail_v2 import fail_pipeline as fail_v2_pipeline - - -def verify(run, **kwargs): - assert run.status == 'Failed' - - -def verify_v2(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Failed') - t.assertEqual( - { - 'fail': - KfpTask( - name='fail', - type='system.ContainerExecution', - # TODO(Bobgy): fix v2 engine to properly publish FAILED state. - state=Execution.State.RUNNING, - inputs=TaskInputs(parameters={}, artifacts=[]), - outputs=TaskOutputs(parameters={}, artifacts=[]), - ) - }, - tasks, - ) - - -run_pipeline_func([ - TestCase( - pipeline_func=fail_v2_pipeline, - verify_func=verify_v2, - ), -]) diff --git a/samples/test/lightweight_python_functions_v2_pipeline_test.py b/samples/test/lightweight_python_functions_v2_pipeline_test.py deleted file mode 100644 index 1869ddfa543..00000000000 --- a/samples/test/lightweight_python_functions_v2_pipeline_test.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pprint import pprint -import unittest - -import kfp -from kfp.samples.test.utils import KfpMlmdClient -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .lightweight_python_functions_v2_pipeline import pipeline - - -def verify(run: kfp_server_api.ApiRun, mlmd_connection_config, **kwargs): - t = unittest.TestCase() - t.maxDiff = None # we always want to see full diff - t.assertEqual(run.status, 'Succeeded') - client = KfpMlmdClient(mlmd_connection_config=mlmd_connection_config) - tasks = client.get_tasks(run_id=run.id) - - task_names = [*tasks.keys()] - t.assertCountEqual(task_names, ['preprocess', 'train'], 'task names') - pprint(tasks) - - preprocess = tasks['preprocess'] - train = tasks['train'] - pprint(preprocess.get_dict()) - t.assertEqual( - { - 'inputs': { - 'parameters': { - 'message': 'message', - } - }, - 'name': 'preprocess', - 'outputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'output_dataset_one' - }, - 'name': 'output_dataset_one', - 'type': 'system.Dataset' - }, { - 'metadata': { - 'display_name': 'output_dataset_two_path' - }, - 'name': 'output_dataset_two_path', - 'type': 'system.Dataset' - }], - 'parameters': { - 'output_bool_parameter_path': True, - 'output_dict_parameter_path': { - "A": 1.0, - "B": 2.0 - }, - 'output_list_parameter_path': ["a", "b", "c"], - 'output_parameter_path': 'message' - } - }, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, - preprocess.get_dict(), - ) - t.assertEqual( - { - 'inputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'output_dataset_one' - }, - 'name': 'dataset_one_path', - 'type': 'system.Dataset' - }, { - 'metadata': { - 'display_name': 'output_dataset_two_path' - }, - 'name': 'dataset_two', - 'type': 'system.Dataset' - }], - 'parameters': { - 'input_bool': True, - 'input_dict': { - "A": 1.0, - "B": 2.0, - }, - 'input_list': ["a", "b", "c"], - 'message': 'message' - } - }, - 'name': 'train', - 'outputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'model', - 'accuracy': 0.9, - }, - 'name': 'model', - 'type': 'system.Model' - }], - }, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, - train.get_dict(), - ) - - -run_pipeline_func([ - TestCase( - pipeline_func=pipeline, - verify_func=verify, - ), -]) diff --git a/samples/test/lightweight_python_functions_v2_with_outputs_test.py b/samples/test/lightweight_python_functions_v2_with_outputs_test.py deleted file mode 100644 index 616b1a148d7..00000000000 --- a/samples/test/lightweight_python_functions_v2_with_outputs_test.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -from pprint import pprint -import unittest - -from kfp.samples.test.utils import KfpMlmdClient -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api -from minio import Minio - -from .lightweight_python_functions_v2_with_outputs import pipeline - - -def verify(run: kfp_server_api.ApiRun, mlmd_connection_config, **kwargs): - t = unittest.TestCase() - t.maxDiff = None # we always want to see full diff - t.assertEqual(run.status, 'Succeeded') - client = KfpMlmdClient(mlmd_connection_config=mlmd_connection_config) - tasks = client.get_tasks(run_id=run.id) - pprint(tasks) - - output_artifact = tasks['output-artifact'] - output = [ - a for a in output_artifact.outputs.artifacts if a.name == 'Output' - ][0] - pprint(output) - - host = os.environ['MINIO_SERVICE_SERVICE_HOST'] - port = os.environ['MINIO_SERVICE_SERVICE_PORT'] - minio = Minio( - f'{host}:{port}', - access_key='minio', - secret_key='minio123', - secure=False) - bucket, key = output.uri[len('minio://'):].split('/', 1) - print(f'bucket={bucket} key={key}') - response = minio.get_object(bucket, key) - data = response.read().decode('UTF-8') - t.assertEqual(data, 'firstsecond\nfirstsecond\nfirstsecond') - - -run_pipeline_func([ - TestCase(pipeline_func=pipeline,), -]) diff --git a/samples/test/metrics_visualization_v2_test.py b/samples/test/metrics_visualization_v2_test.py deleted file mode 100644 index 4f37869b114..00000000000 --- a/samples/test/metrics_visualization_v2_test.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -import unittest -import unittest.mock as mock - -import kfp -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .metrics_visualization_v2 import metrics_visualization_pipeline - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - task_names = [*tasks.keys()] - t.assertCountEqual(task_names, [ - 'wine-classification', 'iris-sgdclassifier', 'digit-classification', - 'html-visualization', 'markdown-visualization' - ], 'task names') - - wine_classification = tasks['wine-classification'] - iris_sgdclassifier = tasks['iris-sgdclassifier'] - digit_classification = tasks['digit-classification'] - html_visualization = tasks['html-visualization'] - markdown_visualization = tasks['markdown-visualization'] - - t.assertEqual( - { - 'name': 'wine-classification', - 'inputs': {}, - 'outputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'metrics', - 'confidenceMetrics': { - 'list': [{ - 'confidenceThreshold': 2.0, - 'falsePositiveRate': 0.0, - 'recall': 0.0 - }, { - 'confidenceThreshold': 1.0, - 'falsePositiveRate': 0.0, - 'recall': 0.33962264150943394 - }, { - 'confidenceThreshold': 0.9, - 'falsePositiveRate': 0.0, - 'recall': 0.6037735849056604 - }, { - 'confidenceThreshold': 0.8, - 'falsePositiveRate': 0.0, - 'recall': 0.8490566037735849 - }, { - 'confidenceThreshold': 0.6, - 'falsePositiveRate': 0.0, - 'recall': 0.8867924528301887 - }, { - 'confidenceThreshold': 0.5, - 'falsePositiveRate': 0.0125, - 'recall': 0.9245283018867925 - }, { - 'confidenceThreshold': 0.4, - 'falsePositiveRate': 0.075, - 'recall': 0.9622641509433962 - }, { - 'confidenceThreshold': 0.3, - 'falsePositiveRate': 0.0875, - 'recall': 1.0 - }, { - 'confidenceThreshold': 0.2, - 'falsePositiveRate': 0.2375, - 'recall': 1.0 - }, { - 'confidenceThreshold': 0.1, - 'falsePositiveRate': 0.475, - 'recall': 1.0 - }, { - 'confidenceThreshold': 0.0, - 'falsePositiveRate': 1.0, - 'recall': 1.0 - }] - } - }, - 'name': 'metrics', - 'type': 'system.ClassificationMetrics' - }], - }, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, wine_classification.get_dict()) - t.assertEqual( - { - 'inputs': { - 'parameters': { - 'test_samples_fraction': 0.3 - } - }, - 'name': 'iris-sgdclassifier', - 'outputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'metrics', - 'confusionMatrix': { - 'struct': { - 'annotationSpecs': [{ - 'displayName': 'Setosa' - }, { - 'displayName': 'Versicolour' - }, { - 'displayName': 'Virginica' - }], - 'rows': [ - { # these numbers can be random during execution - 'row': [mock.ANY, mock.ANY, mock.ANY] - }, - { - 'row': [mock.ANY, mock.ANY, mock.ANY] - }, - { - 'row': [mock.ANY, mock.ANY, mock.ANY] - } - ] - } - } - }, - 'name': 'metrics', - 'type': 'system.ClassificationMetrics' - }], - }, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, - iris_sgdclassifier.get_dict()) - rows = iris_sgdclassifier.get_dict()['outputs']['artifacts'][0]['metadata'][ - 'confusionMatrix']['struct']['rows'] - for i, row in enumerate(rows): - for j, item in enumerate(row['row']): - t.assertIsInstance( - item, float, - f'value of confusion matrix row {i}, col {j} is not a number') - - t.assertEqual( - { - 'name': 'digit-classification', - 'inputs': {}, - 'outputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'metrics', - 'accuracy': 92.0, - }, - 'name': 'metrics', - 'type': 'system.Metrics' - }], - }, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, digit_classification.get_dict()) - - t.assertEqual( - { - 'name': 'html-visualization', - 'inputs': {}, - 'outputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'html_artifact' - }, - 'name': 'html_artifact', - 'type': 'system.HTML' - }], - }, - 'state': Execution.State.COMPLETE, - 'type': 'system.ContainerExecution' - }, html_visualization.get_dict()) - - t.assertEqual( - { - 'name': 'markdown-visualization', - 'inputs': {}, - 'outputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'markdown_artifact' - }, - 'name': 'markdown_artifact', - 'type': 'system.Markdown' - }], - }, - 'state': Execution.State.COMPLETE, - 'type': 'system.ContainerExecution' - }, markdown_visualization.get_dict()) - - -run_pipeline_func([ - TestCase( - pipeline_func=metrics_visualization_pipeline, - verify_func=verify, - ), -]) diff --git a/samples/test/placeholder_concat.py b/samples/test/placeholder_concat.py deleted file mode 100644 index 92ceab2996a..00000000000 --- a/samples/test/placeholder_concat.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2020 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp import components -from kfp import dsl -import kfp.compiler as compiler - -component_op = components.load_component_from_text(""" -name: Component with concat placeholder -inputs: -- {name: input_one, type: String} -- {name: input_two, type: String} -implementation: - container: - image: registry.k8s.io/busybox - command: - - sh - - -ec - args: - - echo "$0" > /tmp/test && [[ "$0" == 'one+two=three' ]] - - concat: [{inputValue: input_one}, '+', {inputValue: input_two}, '=three'] -""") - - -@dsl.pipeline(name='one-step-pipeline-with-concat-placeholder') -def pipeline_with_concat_placeholder(): - component = component_op(input_one='one', input_two='two') diff --git a/samples/test/placeholder_concat_test.py b/samples/test/placeholder_concat_test.py deleted file mode 100644 index 924217f4c12..00000000000 --- a/samples/test/placeholder_concat_test.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import kfp -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase - -from .placeholder_concat import pipeline_with_concat_placeholder - - -def verify(run, run_id: str, **kwargs): - assert run.status == 'Succeeded' - # TODO(Bobgy): verify echo output - # TODO(v2-compatible): support IR placeholder like {{$.inputs.parameters['input_prefix']}} - - -run_pipeline_func([ - TestCase( - pipeline_func=pipeline_with_concat_placeholder, - verify_func=verify, - ), -]) diff --git a/samples/test/placeholder_if_test.py b/samples/test/placeholder_if_test.py deleted file mode 100644 index bed2b829e42..00000000000 --- a/samples/test/placeholder_if_test.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import kfp -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase - -from .placeholder_if_v2 import pipeline_both as pipeline_both_v2 -from .placeholder_if_v2 import pipeline_none as pipeline_none_v2 - -run_pipeline_func([ - TestCase(pipeline_func=pipeline_none_v2), - TestCase(pipeline_func=pipeline_both_v2), -]) diff --git a/samples/test/placeholder_if_v2.py b/samples/test/placeholder_if_v2.py deleted file mode 100644 index 2f06aed5fa2..00000000000 --- a/samples/test/placeholder_if_v2.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2020,2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp import dsl, components - -component_op = components.load_component_from_text(''' -name: Component with optional inputs -inputs: -- {name: required_input, type: String, optional: false} -- {name: optional_input_1, type: String, optional: true} -- {name: optional_input_2, type: String, optional: true} -implementation: - container: - image: registry.k8s.io/busybox - command: - - echo - args: - - --arg0 - - {inputValue: required_input} - - if: - cond: - isPresent: optional_input_1 - then: - - --arg1 - - {inputValue: optional_input_1} - - if: - cond: - isPresent: optional_input_2 - then: - - --arg2 - - {inputValue: optional_input_2} - else: - - --arg2 - - 'default value' -''') - - -@dsl.pipeline(name='one-step-pipeline-with-if-placeholder-supply-both') -def pipeline_both(input0: str = 'input0', - input1: str = 'input1', - input2: str = 'input2'): - # supply both optional_input_1 and optional_input_2 - component = component_op( - required_input=input0, optional_input_1=input1, optional_input_2=input2) - - -@dsl.pipeline(name='one-step-pipeline-with-if-placeholder-supply-none') -def pipeline_none(input0: str = 'input0'): - # supply neither optional_input_1 nor optional_input_2 - # Note, KFP only supports compile-time optional arguments, e.g. it's not - # supported to write a pipeline that supplies both inputs and pass None - # at runtime -- in that case, the input arguments will be interpreted as - # the raw text "None". - component = component_op(required_input=input0) diff --git a/samples/test/two_step_with_uri_placeholder_test.py b/samples/test/two_step_with_uri_placeholder_test.py deleted file mode 100644 index 215b778732f..00000000000 --- a/samples/test/two_step_with_uri_placeholder_test.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Two step v2-compatible pipeline with URI placeholders.""" - -from pprint import pprint -from typing import Dict -import unittest - -import kfp -from kfp.samples.test.utils import KfpMlmdClient -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .two_step_with_uri_placeholder import two_step_with_uri_placeholder - - -def verify_tasks(t: unittest.TestCase, tasks: Dict[str, KfpTask]): - t.assertCountEqual(tasks.keys(), ['read-from-gcs', 'write-to-gcs'], - 'task names') - - write_task = tasks['write-to-gcs'] - read_task = tasks['read-from-gcs'] - - t.assertEqual( - { - 'name': 'write-to-gcs', - 'inputs': { - 'parameters': { - 'msg': 'Hello world!', - } - }, - 'outputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'artifact' - }, - 'name': 'artifact', - 'type': 'system.Artifact' - }], - }, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, write_task.get_dict()) - t.assertEqual( - { - 'name': 'read-from-gcs', - 'inputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'artifact' - }, - 'name': 'artifact', - 'type': 'system.Artifact', - }], - }, - 'outputs': {}, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, read_task.get_dict()) - - -def verify(run: kfp_server_api.ApiRun, mlmd_connection_config, **kwargs): - t = unittest.TestCase() - t.maxDiff = None # we always want to see full diff - t.assertEqual(run.status, 'Succeeded') - client = KfpMlmdClient(mlmd_connection_config=mlmd_connection_config) - tasks = client.get_tasks(run_id=run.id) - verify_tasks(t, tasks) - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=two_step_with_uri_placeholder, - verify_func=verify, - ), - ]) diff --git a/samples/test/utils/README.md b/samples/test/utils/README.md deleted file mode 100644 index 2c537d53e62..00000000000 --- a/samples/test/utils/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# KFP sample test utils - -This is a python package for testing Kubeflow Pipelines (KFP) samples. - -The package will be imported as `kfp.samples.test.utils`. diff --git a/samples/test/utils/kfp/samples/test/utils.py b/samples/test/utils/kfp/samples/test/utils.py deleted file mode 100644 index 2cb18c29ba2..00000000000 --- a/samples/test/utils/kfp/samples/test/utils.py +++ /dev/null @@ -1,620 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -from dataclasses import asdict -from dataclasses import dataclass -import json -import logging -import os -from pprint import pprint -import random -import subprocess -import sys -import tempfile -import time -from typing import Callable, Optional -import unittest - -from google.protobuf.json_format import MessageToDict -import kfp -import kfp.compiler -import kfp_server_api -from ml_metadata import metadata_store -from ml_metadata.metadata_store.metadata_store import ListOptions -from ml_metadata.proto import Event -from ml_metadata.proto import Execution -from ml_metadata.proto import metadata_store_pb2 -from nbconvert import PythonExporter -import nbformat - -MINUTE = 60 - -logger = logging.getLogger('kfp.test.sampleutils') -logger.setLevel('INFO') - - -# Add **kwargs, so that when new arguments are added, this doesn't fail for -# unknown arguments. -def _default_verify_func( - run_id: int, run: kfp_server_api.ApiRun, - mlmd_connection_config: metadata_store_pb2.MetadataStoreClientConfig, - **kwargs): - assert run.status == 'Succeeded' - - -def NEEDS_A_FIX(run_id, run, **kwargs): - """confirms a sample test case is failing and it needs to be fixed.""" - assert run.status == 'Failed' - - -Verifier = Callable[[ - int, kfp_server_api.ApiRun, kfp_server_api.ApiRunDetail, metadata_store_pb2 - .MetadataStoreClientConfig -], None] - - -@dataclass -class TestCase: - """Test case for running a KFP sample. One of pipeline_func or - pipeline_file is required. - - :param run_pipeline: when False, it means the test case just runs the python file. - :param pipeline_file_compile_path: when specified, the pipeline file can compile - to a pipeline package by itself, and the package is expected to be stored - in pipeline_file_compile_path. - """ - pipeline_func: Optional[Callable] = None - pipeline_file: Optional[str] = None - pipeline_file_compile_path: Optional[str] = None - enable_caching: bool = False - arguments: Optional[dict[str, str]] = None - verify_func: Verifier = _default_verify_func - run_pipeline: bool = True - timeout_mins: float = 20.0 - - -def run_pipeline_func(test_cases: list[TestCase]): - """Run a pipeline function and wait for its result. - - :param pipeline_func: pipeline function to run - :type pipeline_func: function - """ - - if not test_cases: - raise ValueError('No test cases!') - - def test_wrapper( - run_pipeline: Callable[[Callable, str, str, bool, dict, bool], - kfp_server_api.ApiRunDetail], - mlmd_connection_config: metadata_store_pb2.MetadataStoreClientConfig, - ): - for case in test_cases: - pipeline_name = None - if (not case.pipeline_file) and (not case.pipeline_func): - raise ValueError( - 'TestCase must have exactly one of pipeline_file or pipeline_func specified, got none.' - ) - if case.pipeline_file and case.pipeline_func: - raise ValueError( - 'TestCase must have exactly one of pipeline_file or pipeline_func specified, got both.' - ) - if case.pipeline_func: - # TODO: remove accessing of protected member - pipeline_name = getattr( - case.pipeline_func, 'name', - getattr(case.pipeline_func, '_component_human_name', - 'pipeline')) - else: - pipeline_name = os.path.basename(case.pipeline_file) - if not case.run_pipeline: - if not case.pipeline_file: - raise ValueError( - 'TestCase.run_pipeline = False can only be specified when used together with pipeline_file.' - ) - - run_detail = run_pipeline( - pipeline_func=case.pipeline_func, - pipeline_file=case.pipeline_file, - pipeline_file_compile_path=case.pipeline_file_compile_path, - enable_caching=case.enable_caching, - arguments=case.arguments or {}, - dry_run=not case.run_pipeline, - timeout=case.timeout_mins * MINUTE) - if not case.run_pipeline: - # There is no run_detail. - print(f'Test case {pipeline_name} passed!') - continue - pipeline_runtime: kfp_server_api.ApiPipelineRuntime = run_detail.pipeline_runtime - argo_workflow = json.loads(pipeline_runtime.workflow_manifest) - argo_workflow_name = argo_workflow.get('metadata').get('name') - print(f'argo workflow name: {argo_workflow_name}') - t = unittest.TestCase() - t.maxDiff = None # we always want to see full diff - tasks = {} - client = None - case.verify_func( - run=run_detail.run, - run_detail=run_detail, - run_id=run_detail.run.id, - mlmd_connection_config=mlmd_connection_config, - argo_workflow_name=argo_workflow_name, - t=t, - tasks=tasks, - client=client, - ) - print('OK: all test cases passed!') - - _run_test(test_wrapper) - - -def debug_verify(run_id: str, verify_func: Verifier): - """Debug a verify function quickly using MLMD data from an existing KFP run - ID.""" - t = unittest.TestCase() - t.maxDiff = None # we always want to see full diff - client = KfpMlmdClient() - tasks = client.get_tasks(run_id=run_id) - pprint(tasks) - - verify_func( - run=kfp_server_api.ApiRun(id=run_id, status='Succeeded'), - run_id=run_id, - t=t, - tasks=tasks, - client=client, - ) - - -def _retry_with_backoff(fn: Callable, retries=3, backoff_in_seconds=1): - i = 0 - while True: - try: - return fn() - except Exception as e: - if i >= retries: - print(f"Failed after {retries} retries:") - raise - else: - print(e) - sleep = (backoff_in_seconds * 2**i + random.uniform(0, 1)) - print(" Retry after ", str(sleep) + "s") - time.sleep(sleep) - i += 1 - - -def _run_test(callback): - - def main( - pipeline_root: Optional[str] = None, # example - experiment: str = 'v2_sample_test_samples', - metadata_service_host: Optional[str] = None, - metadata_service_port: int = 8080, - ): - """Test file CLI entrypoint used by Fire. To configure KFP endpoint, - configure env vars following: - https://www.kubeflow.org/docs/components/pipelines/user-guides/core-functions/connect-api/#configure-sdk-client-by-environment-variables. KFP UI endpoint can - be configured by KF_PIPELINES_UI_ENDPOINT env var. - - :param pipeline_root: pipeline root that holds intermediate - artifacts, example gs://your-bucket/path/to/workdir. - :type pipeline_root: str, optional - :param experiment: experiment the run is added to, defaults to 'v2_sample_test_samples' - :type experiment: str, optional - :param metadata_service_host: host for metadata grpc service, defaults to METADATA_GRPC_SERVICE_HOST or 'metadata-grpc-service' - :type metadata_service_host: str, optional - :param metadata_service_port: port for metadata grpc service, defaults to 8080 - :type metadata_service_port: int, optional - """ - - if pipeline_root is None: - pipeline_root = os.getenv('KFP_PIPELINE_ROOT') - if not pipeline_root: - pipeline_root = os.getenv('KFP_OUTPUT_DIRECTORY') - if pipeline_root: - logger.warning( - f'KFP_OUTPUT_DIRECTORY env var is left for backward compatibility, please use KFP_PIPELINE_ROOT instead.' - ) - logger.info(f'KFP_PIPELINE_ROOT={pipeline_root}') - if metadata_service_host is None: - metadata_service_host = os.getenv('METADATA_GRPC_SERVICE_HOST', - 'metadata-grpc-service') - logger.info(f'METADATA_GRPC_SERVICE_HOST={metadata_service_host}') - client = kfp.Client() - # TODO(Bobgy): avoid using private fields when getting loaded config - kfp_endpoint = client._existing_config.host - kfp_ui_endpoint = client._uihost - logger.info(f'KF_PIPELINES_ENDPOINT={kfp_endpoint}') - if kfp_ui_endpoint != kfp_endpoint: - logger.info(f'KF_PIPELINES_UI_ENDPOINT={kfp_ui_endpoint}') - - def run_pipeline( - pipeline_func: Optional[Callable], - pipeline_file: Optional[str], - pipeline_file_compile_path: Optional[str], - enable_caching: bool = False, - arguments: Optional[dict] = None, - dry_run: bool = False, # just compile the pipeline without running it - timeout: float = 20 * MINUTE, - ) -> kfp_server_api.ApiRunDetail: - arguments = arguments or {} - - def _create_run(): - return run_v2_pipeline( - client=client, - fn=pipeline_func, - file=pipeline_file, - pipeline_root=pipeline_root, - enable_caching=enable_caching, - arguments={ - **arguments, - }, - ) - - run_result = _retry_with_backoff(fn=_create_run) - if dry_run: - # There is no run_result when dry_run. - return - print("Run details page URL:") - print(f"{kfp_ui_endpoint}/#/runs/details/{run_result.run_id}") - run_detail = run_result.wait_for_run_completion(timeout) - # Hide detailed information for pretty printing - workflow_spec = run_detail.run.pipeline_spec.workflow_manifest - workflow_manifest = run_detail.pipeline_runtime.workflow_manifest - run_detail.run.pipeline_spec.workflow_manifest = None - run_detail.pipeline_runtime.workflow_manifest = None - pprint(run_detail) - # Restore workflow manifest, because test cases may use it - run_detail.run.pipeline_spec.workflow_manifest = workflow_spec - run_detail.pipeline_runtime.workflow_manifest = workflow_manifest - return run_detail - - # When running locally, port forward MLMD grpc service to localhost:8080 by: - # - # 1. NAMESPACE=kubeflow kubectl port-forward svc/metadata-grpc-service 8080:8080 -n $NAMESPACE - # 2. Configure env var METADATA_GRPC_SERVICE_HOST=localhost. - mlmd_connection_config = metadata_store_pb2.MetadataStoreClientConfig( - host=metadata_service_host, - port=metadata_service_port, - ) - callback( - run_pipeline=run_pipeline, - mlmd_connection_config=mlmd_connection_config) - - import fire - fire.Fire(main) - - -def run_v2_pipeline( - client: kfp.Client, - fn: Optional[Callable], - file: Optional[str], - pipeline_root: Optional[str], - enable_caching: bool, - arguments: dict[str, str], -): - pipeline_spec_file = tempfile.mktemp( - suffix='.yaml', prefix="original_pipeline_spec") - if fn: - kfp.compiler.Compiler().compile( - pipeline_func=fn, package_path=pipeline_spec_file) - else: - pyfile = file - if file.endswith(".ipynb"): - pyfile = tempfile.mktemp(suffix='.py', prefix="pipeline_py_code") - _nb_sample_to_py(file, pyfile) - from kfp.cli.compile import dsl_compile - dsl_compile(py=pyfile, output=pipeline_spec_file) - - return client.create_run_from_pipeline_package( - pipeline_file=pipeline_spec_file, - arguments={}, - pipeline_root=pipeline_root, - enable_caching=enable_caching) - - -def _simplify_proto_struct(data: dict) -> dict: - res = {} - for key, value in data.items(): - if value.get('stringValue') is not None: - res[key] = value['stringValue'] - elif value.get('doubleValue') is not None: - res[key] = value['doubleValue'] - elif value.get('structValue') is not None: - res[key] = value['structValue'] - else: - res[key] = value - return res - - -@dataclass -class KfpArtifact: - name: str - uri: str - type: str - metadata: dict - - @classmethod - def new( - cls, - mlmd_artifact: metadata_store_pb2.Artifact, - mlmd_artifact_type: metadata_store_pb2.ArtifactType, - mlmd_event: metadata_store_pb2.Event, - ): - # event path is conceptually input/output name in a task - # ref: https://github.com/google/ml-metadata/blob/78ea886c18979d79f3c224092245873474bfafa2/ml_metadata/proto/metadata_store.proto#L169-L180 - artifact_name = mlmd_event.path.steps[0].key - # The original field is custom_properties, but MessageToDict converts it - # to customProperties. - metadata = _simplify_proto_struct( - MessageToDict(mlmd_artifact).get('customProperties', {})) - return cls( - name=artifact_name, - type=mlmd_artifact_type.name, - uri=mlmd_artifact.uri, - metadata=metadata) - - -@dataclass -class TaskInputs: - parameters: dict - artifacts: list[KfpArtifact] - - -@dataclass -class TaskOutputs: - parameters: dict - artifacts: list[KfpArtifact] - - -@dataclass -class KfpTask: - """A KFP runtime task.""" - name: str - type: str - state: int - inputs: TaskInputs - outputs: TaskOutputs - children: Optional[dict[str, KfpTask]] = None - - def get_dict(self): - # Keep inputs and outputs keys, but ignore other zero values. - ignore_zero_values_except_io = lambda x: { - k: v for (k, v) in x if k in ["inputs", "outputs"] or v - } - d = asdict(self, dict_factory=ignore_zero_values_except_io) - # remove uri, because they are not deterministic - for artifact in d.get('inputs', {}).get('artifacts', []): - artifact.pop('uri') - for artifact in d.get('outputs', {}).get('artifacts', []): - artifact.pop('uri') - # children should be accessed separately - if d.get('children') is not None: - d.pop('children') - return d - - def __repr__(self, depth=1): - return_string = [str(self.get_dict())] - if self.children: - for child in self.children.values(): - return_string.extend( - ["\n", "--" * depth, - child.__repr__(depth + 1)]) - return "".join(return_string) - - @classmethod - def new( - cls, - execution: metadata_store_pb2.Execution, - execution_types_by_id: dict[int, metadata_store_pb2.ExecutionType], - events_by_execution_id: dict[int, list[metadata_store_pb2.Event]], - artifacts_by_id: dict[int, metadata_store_pb2.Artifact], - artifact_types_by_id: dict[int, metadata_store_pb2.ArtifactType], - children: Optional[dict[str, KfpTask]], - ): - name = execution.custom_properties.get('task_name').string_value - iteration_index = execution.custom_properties.get('iteration_index') - if iteration_index: - name += f'-#{iteration_index.int_value}' - execution_type = execution_types_by_id[execution.type_id] - params = _parse_parameters(execution) - events = events_by_execution_id.get(execution.id, []) - input_artifacts = [] - output_artifacts = [] - if events: - input_artifacts_info = [(e.artifact_id, e) - for e in events - if e.type == metadata_store_pb2.Event.INPUT] - output_artifacts_info = [ - (e.artifact_id, e) - for e in events - if e.type == metadata_store_pb2.Event.OUTPUT - ] - - def kfp_artifact(aid: int, - e: metadata_store_pb2.Event) -> KfpArtifact: - mlmd_artifact = artifacts_by_id[aid] - mlmd_type = artifact_types_by_id[mlmd_artifact.type_id] - return KfpArtifact.new( - mlmd_artifact=mlmd_artifact, - mlmd_artifact_type=mlmd_type, - mlmd_event=e, - ) - - input_artifacts = [ - kfp_artifact(aid, e) for (aid, e) in input_artifacts_info - ] - input_artifacts.sort(key=lambda a: a.name) - output_artifacts = [ - kfp_artifact(aid, e) for (aid, e) in output_artifacts_info - ] - output_artifacts.sort(key=lambda a: a.name) - - return cls( - name=name, - type=execution_type.name, - state=execution.last_known_state, - inputs=TaskInputs( - parameters=params['inputs'], artifacts=input_artifacts), - outputs=TaskOutputs( - parameters=params['outputs'], artifacts=output_artifacts), - children=children or None, - ) - - -class KfpMlmdClient: - - def __init__( - self, - mlmd_connection_config: Optional[ - metadata_store_pb2.MetadataStoreClientConfig] = None, - ): - if mlmd_connection_config is None: - # default to value suitable for local testing - mlmd_connection_config = metadata_store_pb2.MetadataStoreClientConfig( - host='localhost', - port=8080, - ) - self.mlmd_store = metadata_store.MetadataStore(mlmd_connection_config) - self.dag_type = self.mlmd_store.get_execution_type( - type_name='system.DAGExecution') - - def get_tasks(self, run_id: str): - run_context = self.mlmd_store.get_context_by_type_and_name( - type_name='system.PipelineRun', - context_name=run_id, - ) - if not run_context: - raise Exception( - f'Cannot find system.PipelineRun context "{run_id}"') - logger.info(f'run_context: name={run_context.name} id={run_context.id}') - - root = self.mlmd_store.get_execution_by_type_and_name( - type_name='system.DAGExecution', - execution_name=f'run/{run_id}', - ) - if not root: - raise Exception( - f'Cannot find system.DAGExecution execution "run/{run_id}"') - logger.info(f'root_dag: name={root.name} id={root.id}') - return self._get_tasks(root.id, run_context.id) - - def _get_tasks(self, dag_id: int, - run_context_id: int) -> dict[str, KfpTask]: - # Note, we only need to query by parent_dag_id. However, there is no index - # on parent_dag_id. To speed up the query, we also limit the query to the - # run context (contexts have index). - filter_query = f'contexts_run.id = {run_context_id} AND custom_properties.parent_dag_id.int_value = {dag_id}' - executions = self.mlmd_store.get_executions( - list_options=ListOptions(filter_query=filter_query)) - execution_types = self.mlmd_store.get_execution_types_by_id( - list(set([e.type_id for e in executions]))) - execution_types_by_id = {et.id: et for et in execution_types} - events = self.mlmd_store.get_events_by_execution_ids( - [e.id for e in executions]) - events_by_execution_id = {} - for e in events: - events_by_execution_id[e.execution_id] = ( - events_by_execution_id.get(e.execution_id) or []) + [e] - artifacts = self.mlmd_store.get_artifacts_by_id( - artifact_ids=[e.artifact_id for e in events]) - artifacts_by_id = {a.id: a for a in artifacts} - artifact_types = self.mlmd_store.get_artifact_types_by_id( - list(set([a.type_id for a in artifacts]))) - artifact_types_by_id = {at.id: at for at in artifact_types} - _validate_executions_have_task_names(executions) - - def get_children(e: Execution) -> Optional[dict[str, KfpTask]]: - if e.type_id == self.dag_type.id: - children = self._get_tasks(e.id, run_context_id) - return children - return None - - tasks = [ - KfpTask.new( - execution=e, - execution_types_by_id=execution_types_by_id, - events_by_execution_id=events_by_execution_id, - artifacts_by_id=artifacts_by_id, - artifact_types_by_id=artifact_types_by_id, - children=get_children(e), - ) for e in executions - ] - tasks_by_name = {t.name: t for t in tasks} - return tasks_by_name - - -def _validate_executions_have_task_names(execution_list): - executions_without_task_name = [ - e for e in execution_list - if not e.custom_properties.get('task_name').string_value - ] - if executions_without_task_name: - raise Exception( - f'some executions are missing task_name custom property. executions:\n{executions_without_task_name}' - ) - - -def _parse_parameters(execution: metadata_store_pb2.Execution) -> dict: - custom_properties = execution.custom_properties - parameters = {'inputs': {}, 'outputs': {}} - for item in custom_properties.items(): - (name, value) = item - raw_value = None - if value.HasField('string_value'): - raw_value = value.string_value - if value.HasField('int_value'): - raw_value = value.int_value - if value.HasField('double_value'): - raw_value = value.double_value - if name.startswith('input:'): - parameters['inputs'][name[len('input:'):]] = raw_value - if name.startswith('output:'): - parameters['outputs'][name[len('output:'):]] = raw_value - if name == "inputs" and value.HasField('struct_value'): - for k, v in _simplify_proto_struct( - MessageToDict(value))["structValue"].items(): - parameters['inputs'][k] = v - if name == "outputs" and value.HasField('struct_value'): - for k, v in _simplify_proto_struct( - MessageToDict(value))["structValue"].items(): - parameters['outputs'][k] = v - return parameters - - -def _nb_sample_to_py(notebook_path: str, output_path: str): - """nb_sample_to_py converts notebook kfp sample to a python file. - - Cells with tag "skip-in-test" will be omitted. - """ - with open(notebook_path, 'r') as f: - nb = nbformat.read(f, as_version=4) - # Cells with skip-in-test tag will be omitted. - # Example code that needs the tag: - # kfp.Client().create_run_from_pipeline_func() - # so that we won't submit pipelines when compiling them. - nb.cells = [ - cell for cell in nb.cells - if 'skip-in-test' not in cell.get('metadata', {}).get('tags', []) - ] - py_exporter = PythonExporter() - (py_code, res) = py_exporter.from_notebook_node(nb) - with open(output_path, 'w') as out: - out.write(py_code) - - -def relative_path(file_path: str, relative_path: str) -> str: - return os.path.join( - os.path.dirname(os.path.realpath(file_path)), relative_path) diff --git a/samples/test/utils/setup.py b/samples/test/utils/setup.py deleted file mode 100644 index a90dea67434..00000000000 --- a/samples/test/utils/setup.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import setuptools - -setuptools.setup( - name="kfp-samples-test-utils", - version="0.0.1", - author="Kubeflow Pipelines Team", - author_email="kubeflow-pipelines@google.com", - description="Kubeflow Pipelines sample test utils", - url="https://github.com/kubeflow/pipelines/blob/master/samples/test/utils", - project_urls={ - "Bug Tracker": "https://github.com/kubeflow/pipelines/issues", - }, - install_requires=[ # The exact versions should be determined elsewhere. - 'kfp', - 'ml-metadata', - 'nbconvert~=6.0', # >=6.0 <7 - ], - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: Apache Software License", - "Operating System :: OS Independent", - ], - package_dir={"": "."}, - packages=setuptools.find_packages(where="."), - python_requires=">=3.7", -) diff --git a/samples/v2/.gitignore b/samples/v2/.gitignore deleted file mode 100644 index a6c57f5fb2f..00000000000 --- a/samples/v2/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.json diff --git a/samples/v2/Makefile b/samples/v2/Makefile deleted file mode 100644 index 72015474d2b..00000000000 --- a/samples/v2/Makefile +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# compile all v2 samples to pipeline job JSON files -.PHONY: all -all: - @for f in $$(find . -name "*.py"); do \ - compiled="$${f%.*}_pipeline.yaml"; \ - echo "compiling $${f} to $${compiled}"; \ - kfp dsl compile --py "$${f}" --output $${compiled}""; \ - done - -# clean up all genereated pipeline job JSON files -.PHONY: clean -clean: - @find . -name "*.yaml" -exec rm {} \; diff --git a/samples/v2/README.md b/samples/v2/README.md deleted file mode 100644 index c246ce8fac9..00000000000 --- a/samples/v2/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Samples for KFP v2 compatible and KFP v2 - -[KFP v2 compatible documentation](https://www.kubeflow.org/docs/components/pipelines/sdk/v2/v2-compatibility/) diff --git a/samples/v2/component_with_optional_inputs.py b/samples/v2/component_with_optional_inputs.py deleted file mode 100644 index 6fcb2e4b44d..00000000000 --- a/samples/v2/component_with_optional_inputs.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2023 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Optional, Dict, List - -from kfp import compiler -from kfp import dsl -from kfp.dsl import component - - -@component -def component_op( - input_str1: Optional[str] = 'string default value', - input_str2: Optional[str] = None, - input_str3: Optional[str] = None, - input_str4_from_pipeline: Optional[str] = "Some component default", - input_str5_from_pipeline: Optional[str] = "Some component default", - input_str6_from_pipeline: Optional[str] = None, - input_bool1: Optional[bool] = True, - input_bool2: Optional[bool] = None, - input_dict: Optional[Dict[str, int]] = {"a": 1}, - input_list: Optional[List[str]] = ["123"], - input_int: Optional[int] = 100, -): - print(f'input_str1: {input_str1}, type: {type(input_str1)}') - print(f'input_str2: {input_str2}, type: {type(input_str2)}') - print(f'input_str3: {input_str3}, type: {type(input_str3)}') - print(f'input_str4_from_pipeline: {input_str4_from_pipeline}, type: {type(input_str4_from_pipeline)}') - print(f'input_str5_from_pipeline: {input_str5_from_pipeline}, type: {type(input_str5_from_pipeline)}') - print(f'input_str6_from_pipeline: {input_str6_from_pipeline}, type: {type(input_str6_from_pipeline)}') - print(f'input_bool1: {input_bool1}, type: {type(input_bool1)}') - print(f'input_bool2: {input_bool2}, type: {type(input_bool2)}') - print(f'input_bool: {input_dict}, type: {type(input_dict)}') - print(f'input_bool: {input_list}, type: {type(input_list)}') - print(f'input_bool: {input_int}, type: {type(input_int)}') - - -@dsl.pipeline(name='v2-component-optional-input') -def pipeline(input_str4: Optional[str] = None, input_str5: Optional[str] = "Some pipeline default", input_str6: Optional[str] = None): - component_op( - input_str1='Hello', - input_str2='World', - input_str4_from_pipeline=input_str4, - input_str5_from_pipeline=input_str5, - input_str6_from_pipeline=input_str6, - ) - - -if __name__ == '__main__': - compiler.Compiler().compile( - pipeline_func=pipeline, package_path=__file__.replace('.py', '.yaml')) diff --git a/samples/v2/component_with_optional_inputs_test.py b/samples/v2/component_with_optional_inputs_test.py deleted file mode 100644 index 8fafdc24d0c..00000000000 --- a/samples/v2/component_with_optional_inputs_test.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2023 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Component with optinal and default input v2 engine pipeline.""" - -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .component_with_optional_inputs import pipeline - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - component_op_dict = tasks['component-op'].get_dict() - - t.assertEqual( - { - 'name': 'component-op', - 'inputs': { - 'parameters': { - 'input_str1': 'Hello', - 'input_str2': 'World', - 'input_str5_from_pipeline': 'Some pipeline default', - }, - }, - 'outputs': {}, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, component_op_dict) - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=pipeline, - verify_func=verify, - ), - ]) diff --git a/samples/v2/hello_world.py b/samples/v2/hello_world.py deleted file mode 100644 index d4044ed6add..00000000000 --- a/samples/v2/hello_world.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os - -from kfp import dsl -from kfp import compiler - -# In tests, we install a KFP package from the PR under test. Users should not -# normally need to specify `kfp_package_path` in their component definitions. -_KFP_PACKAGE_PATH = os.getenv('KFP_PACKAGE_PATH') - - -@dsl.component(kfp_package_path=_KFP_PACKAGE_PATH) -def hello_world(text: str) -> str: - print(text) - return text - - -@dsl.pipeline(name='hello-world', description='A simple intro pipeline') -def pipeline_hello_world(text: str = 'hi there'): - """Pipeline that passes small pipeline parameter string to consumer op.""" - - consume_task = hello_world( - text=text) # Passing pipeline parameter as argument to consumer op - - -if __name__ == "__main__": - # execute only if run as a script - compiler.Compiler().compile( - pipeline_func=pipeline_hello_world, - package_path='hello_world_pipeline.json') diff --git a/samples/v2/hello_world_test.py b/samples/v2/hello_world_test.py deleted file mode 100644 index 04ffda438d4..00000000000 --- a/samples/v2/hello_world_test.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Hello world v2 engine pipeline.""" - -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TaskInputs -from kfp.samples.test.utils import TaskOutputs -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .hello_world import pipeline_hello_world - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - t.assertEqual( - { - 'hello-world': - KfpTask( - name='hello-world', - type='system.ContainerExecution', - state=Execution.State.COMPLETE, - inputs=TaskInputs( - parameters={'text': 'hi there'}, artifacts=[]), - outputs=TaskOutputs( - parameters={'Output': 'hi there'}, artifacts=[])) - }, - tasks, - ) - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=pipeline_hello_world, - verify_func=verify, - ), - ]) diff --git a/samples/v2/lightweight_python_functions_v2_pipeline/lightweight_python_functions_v2_pipeline.py b/samples/v2/lightweight_python_functions_v2_pipeline/lightweight_python_functions_v2_pipeline.py deleted file mode 100644 index 20f62ea7e54..00000000000 --- a/samples/v2/lightweight_python_functions_v2_pipeline/lightweight_python_functions_v2_pipeline.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Sample pipeline for passing data in KFP v2.""" -from typing import Dict, List - -from kfp import compiler -from kfp import dsl -from kfp.dsl import Input, InputPath, Output, OutputPath, Dataset, Model, component - - -@component -def preprocess( - # An input parameter of type string. - message: str, - # Use Output[T] to get a metadata-rich handle to the output artifact - # of type `Dataset`. - output_dataset_one: Output[Dataset], - # A locally accessible filepath for another output artifact of type - # `Dataset`. - output_dataset_two_path: OutputPath('Dataset'), - # A locally accessible filepath for an output parameter of type string. - output_parameter_path: OutputPath(str), - # A locally accessible filepath for an output parameter of type bool. - output_bool_parameter_path: OutputPath(bool), - # A locally accessible filepath for an output parameter of type dict. - output_dict_parameter_path: OutputPath(Dict[str, int]), - # A locally accessible filepath for an output parameter of type list. - output_list_parameter_path: OutputPath(List[str]), -): - """Dummy preprocessing step.""" - - # Use Dataset.path to access a local file path for writing. - # One can also use Dataset.uri to access the actual URI file path. - with open(output_dataset_one.path, 'w') as f: - f.write(message) - - # OutputPath is used to just pass the local file path of the output artifact - # to the function. - with open(output_dataset_two_path, 'w') as f: - f.write(message) - - with open(output_parameter_path, 'w') as f: - f.write(message) - - with open(output_bool_parameter_path, 'w') as f: - f.write( - str(True)) # use either `str()` or `json.dumps()` for bool values. - - import json - with open(output_dict_parameter_path, 'w') as f: - f.write(json.dumps({'A': 1, 'B': 2})) - - with open(output_list_parameter_path, 'w') as f: - f.write(json.dumps(['a', 'b', 'c'])) - - -@component -def train( - # Use InputPath to get a locally accessible path for the input artifact - # of type `Dataset`. - dataset_one_path: InputPath('Dataset'), - # Use Input[T] to get a metadata-rich handle to the input artifact - # of type `Dataset`. - dataset_two: Input[Dataset], - # An input parameter of type string. - message: str, - # Use Output[T] to get a metadata-rich handle to the output artifact - # of type `Dataset`. - model: Output[Model], - # An input parameter of type bool. - input_bool: bool, - # An input parameter of type dict. - input_dict: Dict[str, int], - # An input parameter of type List[str]. - input_list: List[str], - # An input parameter of type int with a default value. - num_steps: int = 100, -): - """Dummy Training step.""" - with open(dataset_one_path, 'r') as input_file: - dataset_one_contents = input_file.read() - - with open(dataset_two.path, 'r') as input_file: - dataset_two_contents = input_file.read() - - line = (f'dataset_one_contents: {dataset_one_contents} || ' - f'dataset_two_contents: {dataset_two_contents} || ' - f'message: {message} || ' - f'input_bool: {input_bool}, type {type(input_bool)} || ' - f'input_dict: {input_dict}, type {type(input_dict)} || ' - f'input_list: {input_list}, type {type(input_list)} \n') - - with open(model.path, 'w') as output_file: - for i in range(num_steps): - output_file.write('Step {}\n{}\n=====\n'.format(i, line)) - - # model is an instance of Model artifact, which has a .metadata dictionary - # to store arbitrary metadata for the output artifact. - model.metadata['accuracy'] = 0.9 - - -@dsl.pipeline(pipeline_root='', name='my-test-pipeline-beta') -def pipeline(message: str = 'message'): - preprocess_task = preprocess(message=message) - train_task = train( - dataset_one=preprocess_task.outputs['output_dataset_one'], - dataset_two=preprocess_task.outputs['output_dataset_two'], - message=preprocess_task.outputs['output_parameter'], - input_bool=preprocess_task.outputs['output_bool_parameter'], - input_dict=preprocess_task.outputs['output_dict_parameter'], - input_list=preprocess_task.outputs['output_list_parameter'], - ) - - -if __name__ == '__main__': - compiler.Compiler().compile( - pipeline_func=pipeline, package_path=__file__.replace('.py', '.yaml')) diff --git a/samples/v2/parallel_consume_upstream.py b/samples/v2/parallel_consume_upstream.py deleted file mode 100644 index bc8a5b6e3fd..00000000000 --- a/samples/v2/parallel_consume_upstream.py +++ /dev/null @@ -1,39 +0,0 @@ -from kfp import Client -from kfp import dsl -from kfp.dsl import Artifact, Input, Output - - -@dsl.component -def split_input(input: str) -> list: - return input.split(',') - - -@dsl.component -def create_file(file: Output[Artifact], content: str): - with open(file.path, 'w') as f: - f.write(content) - - -@dsl.component -def read_file(file: Input[Artifact]) -> str: - with open(file.path, 'r') as f: - print(f.read()) - return file.path - - -@dsl.pipeline() -def loop_consume_upstream(): - model_ids_split_op = split_input(input='component1,component2,component3') - model_ids_split_op.set_caching_options(False) - - with dsl.ParallelFor(model_ids_split_op.output) as model_id: - create_file_op = create_file(content=model_id) - create_file_op.set_caching_options(False) - # Consume the output from a op in the loop iteration DAG context - read_file_op = read_file(file=create_file_op.outputs['file']) - read_file_op.set_caching_options(False) - - -if __name__ == '__main__': - client = Client() - run = client.create_run_from_pipeline_func(loop_consume_upstream) diff --git a/samples/v2/pipeline_container_no_input_test.py b/samples/v2/pipeline_container_no_input_test.py deleted file mode 100644 index bfb402573c5..00000000000 --- a/samples/v2/pipeline_container_no_input_test.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Pipeline container no input v2 engine pipeline.""" - -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TaskInputs -from kfp.samples.test.utils import TaskOutputs -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .pipeline_container_no_input import pipeline_container_no_input - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - t.assertEqual( - { - 'container-no-input': - KfpTask( - name='container-no-input', - type='system.ContainerExecution', - state=Execution.State.COMPLETE, - inputs=TaskInputs(parameters={}, artifacts=[]), - outputs=TaskOutputs(parameters={}, artifacts=[])) - }, - tasks, - ) - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=pipeline_container_no_input, - verify_func=verify, - ), - ]) diff --git a/samples/v2/pipeline_with_env_test.py b/samples/v2/pipeline_with_env_test.py deleted file mode 100644 index 86003f203aa..00000000000 --- a/samples/v2/pipeline_with_env_test.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2023 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Hello world v2 engine pipeline.""" - -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TaskInputs -from kfp.samples.test.utils import TaskOutputs -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .pipeline_with_env import pipeline_with_env - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - t.assertEqual( - { - 'print-env-op': - KfpTask( - name='print-env-op', - type='system.ContainerExecution', - state=Execution.State.COMPLETE, - inputs=TaskInputs(parameters={}, artifacts=[]), - outputs=TaskOutputs(parameters={}, artifacts=[])), - 'check-env': - KfpTask( - name='check-env', - type='system.ContainerExecution', - state=Execution.State.COMPLETE, - inputs=TaskInputs(parameters={}, artifacts=[]), - outputs=TaskOutputs(parameters={}, artifacts=[])), - }, - tasks, - ) - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=pipeline_with_env, - verify_func=verify, - ), - ]) diff --git a/samples/v2/pipeline_with_importer.py b/samples/v2/pipeline_with_importer.py deleted file mode 100644 index 49c4ff5989e..00000000000 --- a/samples/v2/pipeline_with_importer.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Pipeline using dsl.importer.""" -import os -from typing import NamedTuple - -from kfp import compiler, dsl -from kfp.dsl import Dataset, Input, Model, component, importer - -# In tests, we install a KFP package from the PR under test. Users should not -# normally need to specify `kfp_package_path` in their component definitions. -_KFP_PACKAGE_PATH = os.getenv('KFP_PACKAGE_PATH') - - -@component(kfp_package_path=_KFP_PACKAGE_PATH) -def train( - dataset: Input[Dataset] -) -> NamedTuple('Outputs', [ - ('scalar', str), - ('model', Model), -]): - """Dummy Training step.""" - with open(dataset.path, 'r') as f: - data = f.read() - print('Dataset:', data) - - scalar = '123' - model = 'My model trained using data: {}'.format(data) - - from collections import namedtuple - output = namedtuple('Outputs', ['scalar', 'model']) - return output(scalar, model) - - -@dsl.pipeline(name='pipeline-with-importer') -def pipeline_with_importer(): - - importer1 = importer( - artifact_uri='gs://ml-pipeline-playground/shakespeare1.txt', - artifact_class=Dataset, - reimport=False) - train(dataset=importer1.output) - - -if __name__ == "__main__": - # execute only if run as a script - compiler.Compiler().compile( - pipeline_func=pipeline_with_importer, - package_path='pipeline_with_importer.json') diff --git a/samples/v2/pipeline_with_importer_test.py b/samples/v2/pipeline_with_importer_test.py deleted file mode 100644 index df50125fea3..00000000000 --- a/samples/v2/pipeline_with_importer_test.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Hello world v2 engine pipeline.""" - -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .pipeline_with_importer import pipeline_with_importer - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - t.assertCountEqual(['importer', 'train'], tasks.keys(), 'task names') - importer = tasks['importer'] - train = tasks['train'] - t.assertEqual( - 'gs://ml-pipeline-playground/shakespeare1.txt', - importer.outputs.artifacts[0].uri, - 'output artifact uri of importer should be "gs://ml-pipeline-playground/shakespeare1.txt"' - ) - t.assertEqual( - 'gs://ml-pipeline-playground/shakespeare1.txt', - train.inputs.artifacts[0].uri, - 'input artifact uri of train should be "gs://ml-pipeline-playground/shakespeare1.txt"' - ) - importer_dict = importer.get_dict() - train_dict = train.get_dict() - for artifact in importer_dict.get('outputs').get('artifacts'): - # pop metadata here because the artifact which got re-imported may have metadata with uncertain data - if artifact.get('metadata') is not None: - artifact.pop('metadata') - for artifact in train_dict.get('inputs').get('artifacts'): - # pop metadata here because the artifact which got re-imported may have metadata with uncertain data - if artifact.get('metadata') is not None: - artifact.pop('metadata') - - t.assertEqual( - { - 'name': 'importer', - 'inputs': {}, - 'outputs': { - 'artifacts': [{ - 'name': 'artifact', - 'type': 'system.Dataset', - }], - }, - 'type': 'system.ImporterExecution', - 'state': Execution.State.COMPLETE, - }, importer_dict) - - t.assertEqual( - { - 'name': 'train', - 'inputs': { - 'artifacts': [{ - 'name': 'dataset', - 'type': 'system.Dataset' - }], - }, - 'outputs': { - 'artifacts': [{ - 'metadata': { - 'display_name': 'model' - }, - 'name': 'model', - 'type': 'system.Model' - }], - 'parameters': { - 'scalar': '123' - } - }, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, train_dict) - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=pipeline_with_importer, - verify_func=verify, - ), - ]) diff --git a/samples/v2/pipeline_with_placeholders.py b/samples/v2/pipeline_with_placeholders.py deleted file mode 100644 index a4ace7894a7..00000000000 --- a/samples/v2/pipeline_with_placeholders.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2025 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp import compiler -from kfp import dsl -from kfp.dsl import component - - -@component -def print_all_placeholders( - job_name: str, - job_resource_name: str, - job_id: str, - task_name: str, - task_id: str, -): - allPlaceholders = [job_name, job_resource_name, job_id, task_name, task_id] - - for placeholder in allPlaceholders: - if "\{\{" in placeholder or placeholder == "": - raise RuntimeError( - "Expected the placeholder to be replaced with a value: " + placeholder - ) - - assert task_name == "print-all-placeholders" - assert job_name.startswith("pipeline-with-placeholders ") - assert job_resource_name.startswith("pipeline-with-placeholders-") - - output = ", ".join(allPlaceholders) - print(output) - - -@dsl.pipeline(name="pipeline-with-placeholders") -def pipeline_with_placeholders(): - print_all_placeholders( - job_name=dsl.PIPELINE_JOB_NAME_PLACEHOLDER, - job_resource_name=dsl.PIPELINE_JOB_RESOURCE_NAME_PLACEHOLDER, - job_id=dsl.PIPELINE_JOB_ID_PLACEHOLDER, - task_name=dsl.PIPELINE_TASK_NAME_PLACEHOLDER, - task_id=dsl.PIPELINE_TASK_ID_PLACEHOLDER, - ).set_caching_options(False) - - -if __name__ == "__main__": - compiler.Compiler().compile( - pipeline_func=pipeline_with_placeholders, - package_path=__file__.replace(".py", ".yaml"), - ) diff --git a/samples/v2/pipeline_with_retry.py b/samples/v2/pipeline_with_retry.py deleted file mode 100644 index 90f1f53e407..00000000000 --- a/samples/v2/pipeline_with_retry.py +++ /dev/null @@ -1,51 +0,0 @@ -import os - -from kfp import dsl - -from kfp import compiler - -_KFP_PACKAGE_PATH = os.getenv('KFP_PACKAGE_PATH') - -@dsl.component(kfp_package_path=_KFP_PACKAGE_PATH) -def verify_retries(retryCount: str, retries: str) -> bool: - if retryCount != retries: - raise Exception(f"Number of retries has not reached {retries} blank yet.") - return True - -@dsl.component(kfp_package_path=_KFP_PACKAGE_PATH) -def print_op(text: str) -> str: - print(text) - return text - -@dsl.pipeline -def pipeline_single_component(): - task2 = verify_retries(retryCount="{{retries}}", retries='1') - -@dsl.pipeline -def pipeline_single_component_with_retry(): - task2 = verify_retries(retryCount="{{retries}}", retries='1').set_retry(num_retries=1) - -@dsl.pipeline -def pipeline_multi_component(): - task2 = verify_retries(retryCount="{{retries}}", retries='1') - task2 = verify_retries(retryCount="{{retries}}", retries='2').set_retry(num_retries=2) - -@dsl.pipeline -def retry_pipeline(): - task1 = verify_retries(retryCount="{{retries}}", retries='2').set_retry(num_retries=2) - task2 = verify_retries(retryCount="{{retries}}", retries='2').set_retry(num_retries=2, backoff_duration="0s", backoff_factor=2, backoff_max_duration="3600s") - task3 = print_op(text='test').set_retry(num_retries=0) - - # retry with all args set at pipeline level only - task4 = pipeline_single_component().set_retry(num_retries=1) - # retry set at component level only - task5 = pipeline_single_component_with_retry() - # retry set at both component and pipeline level - task6 = pipeline_multi_component().set_retry(num_retries=1) - - -if __name__ == '__main__': - compiler.Compiler.compile( - pipeline_func=retry_pipeline, - package_path='pipeline_with_retry.yaml') - diff --git a/samples/v2/pipeline_with_secret_as_env_test.py b/samples/v2/pipeline_with_secret_as_env_test.py deleted file mode 100644 index e8c5beaca37..00000000000 --- a/samples/v2/pipeline_with_secret_as_env_test.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2023 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""A pipeline that passes a secret as an env variable to a container.""" - -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api - -from .pipeline_secret_env import pipeline_secret_env - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=pipeline_secret_env, - verify_func=verify, - ), - ]) diff --git a/samples/v2/pipeline_with_secret_as_volume_test.py b/samples/v2/pipeline_with_secret_as_volume_test.py deleted file mode 100644 index 8b93b20550d..00000000000 --- a/samples/v2/pipeline_with_secret_as_volume_test.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2023 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""A pipeline that passes a secret as a volume to a container.""" - -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api - -from .pipeline_secret_volume import pipeline_secret_volume - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=pipeline_secret_volume, - verify_func=verify, - ), - ]) diff --git a/samples/v2/pipeline_with_volume_no_cache_test.py b/samples/v2/pipeline_with_volume_no_cache_test.py deleted file mode 100644 index b14caf73c4f..00000000000 --- a/samples/v2/pipeline_with_volume_no_cache_test.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api - -from .pipeline_with_volume import pipeline_with_volume_no_cache - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=pipeline_with_volume_no_cache, - verify_func=verify, - ), - ]) diff --git a/samples/v2/pipeline_with_volume_test.py b/samples/v2/pipeline_with_volume_test.py deleted file mode 100644 index d053145764a..00000000000 --- a/samples/v2/pipeline_with_volume_test.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api - -from .pipeline_with_volume import pipeline_with_volume - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=pipeline_with_volume, - verify_func=verify, - ), - ]) diff --git a/samples/v2/producer_consumer_param_test.py b/samples/v2/producer_consumer_param_test.py deleted file mode 100644 index 17c334fd692..00000000000 --- a/samples/v2/producer_consumer_param_test.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Hello world v2 engine pipeline.""" - -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpMlmdClient -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TaskInputs -from kfp.samples.test.utils import TaskOutputs -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .producer_consumer_param import producer_consumer_param_pipeline - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - t.assertEqual( - { - 'consumer': - KfpTask( - name='consumer', - type='system.ContainerExecution', - state=Execution.State.COMPLETE, - inputs=TaskInputs( - parameters={ - 'input_value': - 'Hello world, this is an output parameter\n' - }, - artifacts=[]), - outputs=TaskOutputs(parameters={}, artifacts=[])), - 'producer': - KfpTask( - name='producer', - type='system.ContainerExecution', - state=Execution.State.COMPLETE, - inputs=TaskInputs( - parameters={'input_text': 'Hello world'}, artifacts=[]), - outputs=TaskOutputs( - parameters={ - 'output_value': - 'Hello world, this is an output parameter\n' - }, - artifacts=[])) - }, tasks) - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=producer_consumer_param_pipeline, - verify_func=verify, - ), - ]) diff --git a/samples/v2/sample_test.py b/samples/v2/sample_test.py deleted file mode 100644 index d170e514d89..00000000000 --- a/samples/v2/sample_test.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright 2024 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from concurrent.futures import as_completed -from concurrent.futures import ThreadPoolExecutor -from dataclasses import dataclass -import inspect -import os -from pprint import pprint -from typing import List -import unittest - -import collected_parameters -import component_with_optional_inputs -import hello_world -import kfp -from kfp.dsl.graph_component import GraphComponent -from kubernetes import client -from kubernetes import config -from kubernetes import utils -from modelcar import modelcar -import parallel_after_dependency -import parallel_consume_upstream -import pipeline_container_no_input -import pipeline_with_env -import pipeline_with_placeholders -import pipeline_with_secret_as_env -import pipeline_with_secret_as_volume -import producer_consumer_param -import subdagio -import two_step_pipeline_containerized -import yaml -import pipeline_with_retry - -_MINUTE = 60 # seconds -_DEFAULT_TIMEOUT = 5 * _MINUTE -SAMPLES_DIR = os.path.realpath(os.path.dirname(os.path.dirname(__file__))) -PRE_REQ_DIR = os.path.join(SAMPLES_DIR, 'v2', 'pre-requisites') -PREREQS = [os.path.join(PRE_REQ_DIR, 'test-secrets.yaml')] - -_KFP_NAMESPACE = os.getenv('KFP_NAMESPACE', 'kubeflow') - - -@dataclass -class TestCase: - pipeline_func: GraphComponent - timeout: int = _DEFAULT_TIMEOUT - - -def deploy_k8s_yaml(namespace: str, yaml_file: str): - config.load_kube_config() - api_client = client.ApiClient() - try: - utils.create_from_yaml(api_client, yaml_file, namespace=namespace) - print(f'Resource(s) from {yaml_file} deployed successfully.') - except Exception as e: - raise RuntimeError(f'Exception when deploying from YAML: {e}') - - -def delete_k8s_yaml(namespace: str, yaml_file: str): - config.load_kube_config() - v1 = client.CoreV1Api() - apps_v1 = client.AppsV1Api() - - try: - with open(yaml_file, 'r') as f: - yaml_docs = yaml.safe_load_all(f) - - for doc in yaml_docs: - if not doc: - continue # Skip empty documents - - kind = doc.get('kind', '').lower() - name = doc['metadata']['name'] - - print(f'Deleting {kind} named {name}...') - - # There's no utils.delete_from_yaml - # as a workaround we manually fetch required data - if kind == 'deployment': - apps_v1.delete_namespaced_deployment(name, namespace) - elif kind == 'service': - v1.delete_namespaced_service(name, namespace) - elif kind == 'configmap': - v1.delete_namespaced_config_map(name, namespace) - elif kind == 'pod': - v1.delete_namespaced_pod(name, namespace) - elif kind == 'secret': - v1.delete_namespaced_secret(name, namespace) - elif kind == 'persistentvolumeclaim': - v1.delete_namespaced_persistent_volume_claim( - name, namespace) - elif kind == 'namespace': - client.CoreV1Api().delete_namespace(name) - else: - print(f'Skipping unsupported resource type: {kind}') - - print(f'Resource(s) from {yaml_file} deleted successfully.') - except Exception as e: - print(f'Exception when deleting from YAML: {e}') - - -class SampleTest(unittest.TestCase): - _kfp_host_and_port = os.getenv('KFP_API_HOST_AND_PORT', - 'http://localhost:8888') - _kfp_ui_and_port = os.getenv('KFP_UI_HOST_AND_PORT', - 'http://localhost:8080') - _client = kfp.Client(host=_kfp_host_and_port, ui_host=_kfp_ui_and_port) - - @classmethod - def setUpClass(cls): - """Runs once before all tests.""" - print('Deploying pre-requisites....') - for p in PREREQS: - deploy_k8s_yaml(_KFP_NAMESPACE, p) - print('Done deploying pre-requisites.') - - @classmethod - def tearDownClass(cls): - """Runs once after all tests in this class.""" - print('Cleaning up resources....') - for p in PREREQS: - delete_k8s_yaml(_KFP_NAMESPACE, p) - print('Done clean up.') - - def test(self): - test_cases: List[TestCase] = [ - TestCase(pipeline_func=hello_world.pipeline_hello_world), - TestCase(pipeline_func=producer_consumer_param - .producer_consumer_param_pipeline), - TestCase(pipeline_func=pipeline_container_no_input - .pipeline_container_no_input), - TestCase(pipeline_func=two_step_pipeline_containerized - .two_step_pipeline_containerized), - TestCase(pipeline_func=component_with_optional_inputs.pipeline), - TestCase(pipeline_func=pipeline_with_env.pipeline_with_env), - - # The following tests are not working. Tracking issue: https://github.com/kubeflow/pipelines/issues/11053 - # TestCase(pipeline_func=pipeline_with_importer.pipeline_with_importer), - # TestCase(pipeline_func=pipeline_with_volume.pipeline_with_volume), - TestCase(pipeline_func=pipeline_with_secret_as_volume - .pipeline_secret_volume), - TestCase( - pipeline_func=pipeline_with_secret_as_env.pipeline_secret_env), - TestCase(pipeline_func=subdagio.parameter.crust), - TestCase(pipeline_func=subdagio.parameter_cache.crust), - TestCase(pipeline_func=subdagio.mixed_parameters.crust), - TestCase( - pipeline_func=subdagio.multiple_parameters_namedtuple.crust), - TestCase(pipeline_func=subdagio.parameter_oneof.crust), - TestCase(pipeline_func=subdagio.artifact_cache.crust), - TestCase(pipeline_func=subdagio.artifact.crust), - TestCase( - pipeline_func=subdagio.multiple_artifacts_namedtuple.crust), - TestCase(pipeline_func=pipeline_with_placeholders - .pipeline_with_placeholders), - TestCase(pipeline_func=modelcar.pipeline_modelcar_test), - TestCase( - pipeline_func=parallel_consume_upstream.loop_consume_upstream), - TestCase(pipeline_func=parallel_after_dependency - .loop_with_after_dependency_set), - TestCase( - pipeline_func=collected_parameters.collected_param_pipeline), - TestCase(pipeline_func=pipeline_with_retry.retry_pipeline), - ] - - with ThreadPoolExecutor() as executor: - futures = [ - executor.submit(self.run_test_case, test_case.pipeline_func, - test_case.timeout) for test_case in test_cases - ] - for future in as_completed(futures): - future.result() - - def run_test_case(self, pipeline_func: GraphComponent, timeout: int): - with self.subTest(pipeline=pipeline_func, msg=pipeline_func.name): - print( - f'Running pipeline: {inspect.getmodule(pipeline_func.pipeline_func).__name__}/{pipeline_func.name}.' - ) - run_result = self._client.create_run_from_pipeline_func( - pipeline_func=pipeline_func) - - run_response = run_result.wait_for_run_completion(timeout) - - pprint(run_response.run_details) - print('Run details page URL:') - print( - f'{self._kfp_ui_and_port}/#/runs/details/{run_response.run_id}') - - self.assertEqual(run_response.state, 'SUCCEEDED') - print( - f'Pipeline, {inspect.getmodule(pipeline_func.pipeline_func).__name__}/{pipeline_func.name}, succeeded.' - ) - - -if __name__ == '__main__': - unittest.main() diff --git a/samples/v2/subdagio/__init__.py b/samples/v2/subdagio/__init__.py deleted file mode 100644 index 024415d6bd2..00000000000 --- a/samples/v2/subdagio/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from subdagio import artifact -from subdagio import artifact_cache -from subdagio import mixed_parameters -from subdagio import multiple_artifacts_namedtuple -from subdagio import multiple_parameters_namedtuple -from subdagio import parameter -from subdagio import parameter_cache -from subdagio import parameter_oneof diff --git a/samples/v2/subdagio/artifact.py b/samples/v2/subdagio/artifact.py deleted file mode 100644 index 8f425662a1b..00000000000 --- a/samples/v2/subdagio/artifact.py +++ /dev/null @@ -1,47 +0,0 @@ -import os - -from kfp import Client -from kfp import dsl - - -@dsl.component -def core_comp(dataset: dsl.Output[dsl.Dataset]): - with open(dataset.path, 'w') as f: - f.write('foo') - - -@dsl.component -def crust_comp(input: dsl.Dataset): - with open(input.path, 'r') as f: - print('input: ', f.read()) - - -@dsl.pipeline -def core() -> dsl.Dataset: - task = core_comp() - task.set_caching_options(False) - - return task.output - - -@dsl.pipeline -def mantle() -> dsl.Dataset: - dag_task = core() - dag_task.set_caching_options(False) - - return dag_task.output - - -@dsl.pipeline(name=os.path.basename(__file__).removesuffix('.py') + '-pipeline') -def crust(): - dag_task = mantle() - dag_task.set_caching_options(False) - - task = crust_comp(input=dag_task.output) - task.set_caching_options(False) - - -if __name__ == '__main__': - # Compiler().compile(pipeline_func=crust, package_path=f"{__file__.removesuffix('.py')}.yaml") - client = Client() - client.create_run_from_pipeline_func(crust) diff --git a/samples/v2/two_step_pipeline_containerized.py b/samples/v2/two_step_pipeline_containerized.py deleted file mode 100644 index b5397e6e323..00000000000 --- a/samples/v2/two_step_pipeline_containerized.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Two step pipeline using dsl.container_component decorator.""" - -from kfp import compiler -from kfp.dsl import container_component -from kfp.dsl import ContainerSpec -from kfp.dsl import Dataset -from kfp.dsl import Input -from kfp.dsl import Output -from kfp.dsl import pipeline - - -@container_component -def component1(text: str, output_gcs: Output[Dataset]): - return ContainerSpec( - image='alpine', - command=[ - 'sh', - '-c', - 'mkdir --parents $(dirname "$1") && echo "$0" > "$1"', - ], - args=[text, output_gcs.path]) - - -@container_component -def component2(input_gcs: Input[Dataset]): - return ContainerSpec(image='alpine', command=['cat'], args=[input_gcs.path]) - - -@pipeline(name='two-step-pipeline-containerized') -def two_step_pipeline_containerized(): - component_1 = component1(text='hi') - component_2 = component2(input_gcs=component_1.outputs['output_gcs']) - - -if __name__ == '__main__': - # execute only if run as a script - - compiler.Compiler().compile( - pipeline_func=two_step_pipeline_containerized, - package_path='two_step_pipeline_containerized.yaml') diff --git a/samples/v2/two_step_pipeline_containerized_test.py b/samples/v2/two_step_pipeline_containerized_test.py deleted file mode 100644 index 3f97a9c28e1..00000000000 --- a/samples/v2/two_step_pipeline_containerized_test.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Pipeline container no input v2 engine pipeline.""" - -from __future__ import annotations - -import unittest - -from kfp.samples.test.utils import KfpTask -from kfp.samples.test.utils import run_pipeline_func -from kfp.samples.test.utils import TestCase -import kfp_server_api -from ml_metadata.proto import Execution - -from .two_step_pipeline_containerized import two_step_pipeline_containerized - - -def verify(t: unittest.TestCase, run: kfp_server_api.ApiRun, - tasks: dict[str, KfpTask], **kwargs): - t.assertEqual(run.status, 'Succeeded') - component1_dict = tasks['component1'].get_dict() - component2_dict = tasks['component2'].get_dict() - for artifact in component1_dict.get('outputs').get('artifacts'): - # pop metadata here because the artifact which got re-imported may have metadata with uncertain data - if artifact.get('metadata') is not None: - artifact.pop('metadata') - for artifact in component2_dict.get('inputs').get('artifacts'): - # pop metadata here because the artifact which got re-imported may have metadata with uncertain data - if artifact.get('metadata') is not None: - artifact.pop('metadata') - - t.assertEqual( - { - 'name': 'component1', - 'inputs': { - 'parameters': { - 'text': 'hi' - } - }, - 'outputs': { - 'artifacts': [{ - 'name': 'output_gcs', - 'type': 'system.Dataset' - }], - }, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, component1_dict) - - t.assertEqual( - { - 'name': 'component2', - 'inputs': { - 'artifacts': [{ - 'name': 'input_gcs', - 'type': 'system.Dataset' - }], - }, - 'outputs': {}, - 'type': 'system.ContainerExecution', - 'state': Execution.State.COMPLETE, - }, component2_dict) - - -if __name__ == '__main__': - run_pipeline_func([ - TestCase( - pipeline_func=two_step_pipeline_containerized, - verify_func=verify, - ), - ]) diff --git a/sdk/Makefile b/sdk/Makefile new file mode 100644 index 00000000000..357f246f917 --- /dev/null +++ b/sdk/Makefile @@ -0,0 +1,32 @@ +# Copyright 2025 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Contact one of chensun, HumairAK if this remote image needs an update. +PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:master + +.PHONY: python +python: python + docker run --interactive --rm \ + --user $$(id -u):$$(id -g) \ + -e HOME=/tmp \ + -v "$$(pwd)/..":"/go/src/github.com/kubeflow/pipelines":z \ + $(PREBUILT_REMOTE_IMAGE) \ + sh -c 'cd /go/src/github.com/kubeflow/pipelines/sdk/python && \ + python3 setup.py sdist bdist_wheel --dist-dir ./dist' + +.PHONY: clean-python +clean-python: + rm -rf python/build + rm -rf python/dist + rm -rf python/kfp.egg-info diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 1d8db6d59fd..f3b20dda5a1 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -2,6 +2,9 @@ ## Features +* Allow sharing utility functions across components via the `additional_funcs` argument to the `component` decorator; + functions are embedded into the generated component code before the main function (#12178). + ## Breaking changes ## Deprecations @@ -10,6 +13,34 @@ ## Documentation updates +# 2.14.0 + +## Features + +* feat(sdk): Add support for compiling pipelines to Kubernetes native format in SDK (#12012) +* feat(backend/sdk): enable dsl.Collected for parameters & artifacts (#11725) +* feat(sdk): update PipelineConfig to reflect new workspace Protobuf changes (#11934) +* feat(backend/sdk): support PipelineTaskFinalStatus input (#11953) +* feat(sdk): Add Support for Docker Container Run Arguments (#12006) + +## Deprecations +* PipelineTaskFinalStatus field names pipelineJobResourceName and pipelineTaskName are deprecated. Support for these fields will be removed at a later date. + +## Bug fixes and other changes +* fix(deps): widen urllib3 upper bound to <3.0.0 (#11819) +* fix(sdk): resolve issue when creating pipeline version from pipeline name using the cli. Fixes #11810 (#11866) +* fix(sdk): fix pip install for dev (#11891) +* fix(sdk): Resolves issue when using ParallelFor with param and depending tasks (#11903) +* chore(deps): bump urllib3 from 2.4.0 to 2.5.0 in /sdk/python (#11999) +* fix: input resolution with set_display_name (#11938) +* fix(sdk): Fixes for Identifying Untagged Images for Running (#11984) +* fix(sdk): Move version info to version.py for editable installs. (#11997) +* fix(sdk): Support partial replace of placeholders in dict/list objects (#12039) +* fix(backend/sdk): update proto packages (#12067) +* fix: backwards compatibility for pipeline spec task_name (#12061) +* chore(backend): resolve linting errors (#12083) +* fix(sdk,backend): Make the workspace size required (#12094) + # 2.13.0 ## Features @@ -28,6 +59,7 @@ ## Bug fixes and other changes * Depends on `google-cloud-storage>=2.2.1,<4` [\#11735](https://github.com/kubeflow/pipelines/pull/11735) +* Fixed missing `kfp.__version__` when installing SDK via `pip install -e`. [\#11997](https://github.com/kubeflow/pipelines/pull/11997) # 2.12.2 diff --git a/sdk/python/build.sh b/sdk/python/build.sh deleted file mode 100755 index a18d0d3c0e9..00000000000 --- a/sdk/python/build.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash -ex -# -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# The scripts creates the Kubeflow Pipelines python SDK package. -# -# Usage: -# ./build.sh [output_file] - - -target_archive_file=${1:-kfp.tar.gz} - -pushd "$(dirname "$0")" -dist_dir=$(mktemp -d) -python3 setup.py sdist --format=gztar --dist-dir "$dist_dir" -cp "$dist_dir"/*.tar.gz "$target_archive_file" -popd diff --git a/sdk/python/kfp/__init__.py b/sdk/python/kfp/__init__.py index 7ad8bb5d8a7..72311fc78f9 100644 --- a/sdk/python/kfp/__init__.py +++ b/sdk/python/kfp/__init__.py @@ -16,7 +16,10 @@ # https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages __path__ = __import__('pkgutil').extend_path(__path__, __name__) -__version__ = '2.13.0' +try: + from .version import __version__ +except ImportError: + __version__ = 'dev' import sys import warnings diff --git a/sdk/python/kfp/cli/cli_test.py b/sdk/python/kfp/cli/cli_test.py index 88d43cb5c40..505363cc03b 100644 --- a/sdk/python/kfp/cli/cli_test.py +++ b/sdk/python/kfp/cli/cli_test.py @@ -279,6 +279,124 @@ def test_compile_with_caching_disabled_env_var(self): caching_options = task['cachingOptions'] self.assertEqual(caching_options, {}) + def test_compile_with_kubernetes_manifest_format(self): + with tempfile.NamedTemporaryFile(suffix='.py', delete=True) as temp_pipeline, \ + tempfile.NamedTemporaryFile(suffix='.yaml', delete=True) as output_file, \ + tempfile.NamedTemporaryFile(suffix='.yaml', delete=True) as output_file2: + temp_pipeline.write(b""" +from kfp import dsl + +@dsl.component +def my_component(): + pass + +@dsl.pipeline(name="iris-pipeline") +def iris_pipeline(): + my_component() +""") + temp_pipeline.flush() + pipeline_name = 'iris-pipeline' + pipeline_display_name = 'IrisPipeline' + pipeline_version_name = 'iris-pipeline-v1' + pipeline_version_display_name = 'IrisPipelineVersion' + namespace = 'test-namespace' + + # Test with --kubernetes-manifest-format flag + result = self.invoke([ + '--py', temp_pipeline.name, '--output', output_file.name, + '--kubernetes-manifest-format', '--pipeline-display-name', + pipeline_display_name, '--pipeline-version-name', + pipeline_version_name, '--pipeline-version-display-name', + pipeline_version_display_name, '--namespace', namespace, + '--include-pipeline-manifest' + ]) + self.assertEqual(result.exit_code, 0) + + with open(output_file.name, 'r') as f: + docs = list(yaml.safe_load_all(f)) + kinds = [doc['kind'] for doc in docs] + self.assertIn('Pipeline', kinds) + self.assertIn('PipelineVersion', kinds) + pipeline_doc = next( + doc for doc in docs if doc['kind'] == 'Pipeline') + pipeline_version_doc = next( + doc for doc in docs if doc['kind'] == 'PipelineVersion') + self.assertEqual(pipeline_doc['metadata']['name'], pipeline_name) + self.assertEqual(pipeline_doc['metadata']['namespace'], namespace) + self.assertEqual(pipeline_doc['spec']['displayName'], + pipeline_display_name) + self.assertEqual(pipeline_version_doc['metadata']['name'], + pipeline_version_name) + self.assertEqual(pipeline_version_doc['metadata']['namespace'], + namespace) + self.assertEqual(pipeline_version_doc['spec']['displayName'], + pipeline_version_display_name) + self.assertEqual(pipeline_version_doc['spec']['pipelineName'], + pipeline_name) + + # include_pipeline_manifest False + result = self.invoke([ + '--py', temp_pipeline.name, '--output', output_file2.name, + '--kubernetes-manifest-format', '--pipeline-display-name', + pipeline_display_name, '--pipeline-version-name', + pipeline_version_name, '--pipeline-version-display-name', + pipeline_version_display_name, '--namespace', namespace + ]) + self.assertEqual(result.exit_code, 0) + + with open(output_file2.name, 'r') as f: + docs = list(yaml.safe_load_all(f)) + kinds = [doc['kind'] for doc in docs] + self.assertNotIn('Pipeline', kinds) + self.assertIn('PipelineVersion', kinds) + self.assertEqual(len(kinds), 1) + pipeline_version_doc = docs[0] + self.assertEqual(pipeline_version_doc['metadata']['name'], + pipeline_version_name) + self.assertEqual(pipeline_version_doc['metadata']['namespace'], + namespace) + self.assertEqual(pipeline_version_doc['spec']['displayName'], + pipeline_version_display_name) + self.assertEqual(pipeline_version_doc['spec']['pipelineName'], + pipeline_name) + + def test_compile_manifest_options_without_format_flag(self): + with tempfile.NamedTemporaryFile(suffix='.py', delete=True) as temp_pipeline, \ + tempfile.NamedTemporaryFile(suffix='.yaml', delete=True) as output_file: + temp_pipeline.write(b""" +from kfp import dsl + +@dsl.component +def my_component(): + pass + +@dsl.pipeline(name="iris-pipeline") +def iris_pipeline(): + my_component() +""") + temp_pipeline.flush() + pipeline_display_name = 'IrisPipeline' + pipeline_version_name = 'iris-pipeline-v1' + pipeline_version_display_name = 'IrisPipelineVersion' + namespace = 'test-namespace' + + result = self.invoke([ + '--py', temp_pipeline.name, '--output', output_file.name, + '--pipeline-display-name', pipeline_display_name, + '--pipeline-version-name', pipeline_version_name, + '--pipeline-version-display-name', + pipeline_version_display_name, '--namespace', namespace, + '--include-pipeline-manifest' + ]) + self.assertEqual(result.exit_code, 0) + self.assertIn( + 'Warning: Kubernetes manifest options were provided but --kubernetes-manifest-format was not set', + result.output) + # Should only output a regular pipeline spec, not Kubernetes manifests + with open(output_file.name, 'r') as f: + doc = yaml.safe_load(f) + self.assertIn('pipelineInfo', doc) + if __name__ == '__main__': unittest.main() diff --git a/sdk/python/kfp/cli/compile_.py b/sdk/python/kfp/cli/compile_.py index e1fc28a8328..269a40acbf6 100644 --- a/sdk/python/kfp/cli/compile_.py +++ b/sdk/python/kfp/cli/compile_.py @@ -21,7 +21,9 @@ from typing import Callable, Dict, Optional import click +from click_option_group import optgroup from kfp import compiler +from kfp.compiler.compiler_utils import KubernetesManifestOptions from kfp.dsl import base_component from kfp.dsl import graph_component from kfp.dsl.pipeline_context import Pipeline @@ -140,6 +142,52 @@ def parse_parameters(parameters: Optional[str]) -> Dict: default=False, envvar='KFP_DISABLE_EXECUTION_CACHING_BY_DEFAULT', help='Whether to disable execution caching by default.') +@click.option( + '--kubernetes-manifest-format', + is_flag=True, + default=False, + help='Output the compiled pipeline as a Kubernetes PipelineVersion manifest, with the option to include the Kubernetes Pipeline manifest as well when used with --include-pipeline-manifest.' +) +@optgroup.group( + 'Kubernetes Manifest Options', + help='Options only used when compiling pipelines to Kubernetes native manifest format. These control the metadata of the generated Kubernetes resources. Only relevant if --kubernetes-manifest-format is set.' +) +@optgroup.option( + '--pipeline-name', + type=str, + default=None, + help='Name for the Pipeline resource. Only relevant if --kubernetes-manifest-format is set.' +) +@optgroup.option( + '--pipeline-display-name', + type=str, + default=None, + help='Display name for the Pipeline resource. Only relevant if --kubernetes-manifest-format is set.' +) +@optgroup.option( + '--pipeline-version-name', + type=str, + default=None, + help='Name for the PipelineVersion resource. Only relevant if --kubernetes-manifest-format is set.' +) +@optgroup.option( + '--pipeline-version-display-name', + type=str, + default=None, + help='Display name for the PipelineVersion resource. Only relevant if --kubernetes-manifest-format is set.' +) +@optgroup.option( + '--namespace', + type=str, + default=None, + help='Kubernetes namespace for the resources. Only relevant if --kubernetes-manifest-format is set.' +) +@optgroup.option( + '--include-pipeline-manifest', + is_flag=True, + default=False, + help='Include the Pipeline manifest in the output. Defaults to False. Only relevant if --kubernetes-manifest-format is set.' +) def compile_( py: str, output: str, @@ -147,6 +195,13 @@ def compile_( pipeline_parameters: Optional[str] = None, disable_type_check: bool = False, disable_execution_caching_by_default: bool = False, + kubernetes_manifest_format: bool = False, + pipeline_name: Optional[str] = None, + pipeline_display_name: Optional[str] = None, + pipeline_version_name: Optional[str] = None, + pipeline_version_display_name: Optional[str] = None, + namespace: Optional[str] = None, + include_pipeline_manifest: bool = False, ) -> None: """Compiles a pipeline or component written in a .py file.""" @@ -155,13 +210,39 @@ def compile_( python_file=py, function_name=function_name) parsed_parameters = parse_parameters(parameters=pipeline_parameters) package_path = os.path.join(os.getcwd(), output) + + manifest_options_provided = any([ + pipeline_name, pipeline_display_name, pipeline_version_name, + pipeline_version_display_name, namespace, include_pipeline_manifest + ]) + kubernetes_manifest_options = None + if kubernetes_manifest_format: + kubernetes_manifest_options = KubernetesManifestOptions( + pipeline_name=pipeline_name, + pipeline_display_name=pipeline_display_name, + pipeline_version_name=pipeline_version_name, + pipeline_version_display_name=pipeline_version_display_name, + namespace=namespace, + include_pipeline_manifest=include_pipeline_manifest, + ) + elif manifest_options_provided: + click.echo( + 'Warning: Kubernetes manifest options were provided but --kubernetes-manifest-format was not set. ' + 'These options will be ignored.', + err=True) + compiler.Compiler().compile( pipeline_func=pipeline_func, pipeline_parameters=parsed_parameters, package_path=package_path, - type_check=not disable_type_check) + type_check=not disable_type_check, + kubernetes_manifest_options=kubernetes_manifest_options, + kubernetes_manifest_format=kubernetes_manifest_format, + ) - click.echo(package_path) + click.echo( + f'Pipeline code was successfully compiled with the output saved to {package_path}' + ) def main(): diff --git a/sdk/python/kfp/cli/diagnose_me_cli.py b/sdk/python/kfp/cli/diagnose_me_cli.py index b03f9cb7643..2c17b52b743 100644 --- a/sdk/python/kfp/cli/diagnose_me_cli.py +++ b/sdk/python/kfp/cli/diagnose_me_cli.py @@ -52,8 +52,9 @@ def diagnose_me(ctx: click.Context, json: bool, project_id: str, # default behaviour dump all configurations results: ResultsType = { - gcp_command: gcp.get_gcp_configuration( - gcp_command, project_id=project_id, human_readable=not json) + gcp_command: + gcp.get_gcp_configuration( + gcp_command, project_id=project_id, human_readable=not json) for gcp_command in gcp.Commands } diff --git a/sdk/python/kfp/cli/experiment.py b/sdk/python/kfp/cli/experiment.py index cf94fd904fc..2417a97c2e6 100644 --- a/sdk/python/kfp/cli/experiment.py +++ b/sdk/python/kfp/cli/experiment.py @@ -109,8 +109,8 @@ def delete(ctx: click.Context, experiment_id: str): '--experiment-id', default=None, help=parsing.get_param_descr(client.Client.archive_experiment, - 'experiment_id') + ' ' + either_option_required -) + 'experiment_id') + ' ' + + either_option_required) @click.option( '--experiment-name', default=None, @@ -145,8 +145,8 @@ def archive(ctx: click.Context, experiment_id: str, experiment_name: str): '--experiment-id', default=None, help=parsing.get_param_descr(client.Client.unarchive_experiment, - 'experiment_id') + ' ' + either_option_required -) + 'experiment_id') + ' ' + + either_option_required) @click.option( '--experiment-name', default=None, diff --git a/sdk/python/kfp/cli/pipeline.py b/sdk/python/kfp/cli/pipeline.py index cab0bf7db4a..997d20d4306 100644 --- a/sdk/python/kfp/cli/pipeline.py +++ b/sdk/python/kfp/cli/pipeline.py @@ -82,8 +82,8 @@ def create(ctx: click.Context, '--pipeline-name', required=False, help=parsing.get_param_descr(client.Client.upload_pipeline_version, - 'pipeline_name') + ' ' + either_option_required -) + 'pipeline_name') + ' ' + + either_option_required) @click.option( '-d', '--description', diff --git a/sdk/python/kfp/cli/recurring_run.py b/sdk/python/kfp/cli/recurring_run.py index c0f7004cf6c..61eabcec9d2 100644 --- a/sdk/python/kfp/cli/recurring_run.py +++ b/sdk/python/kfp/cli/recurring_run.py @@ -61,8 +61,8 @@ def recurring_run(): @click.option( '--experiment-id', help=parsing.get_param_descr(client.Client.create_recurring_run, - 'experiment_id') + ' ' + either_option_required -) + 'experiment_id') + ' ' + + either_option_required) @click.option( '--experiment-name', help='The name of the experiment to create the recurring run under.' + ' ' + diff --git a/sdk/python/kfp/client/auth_test.py b/sdk/python/kfp/client/auth_test.py index db58b51adb1..ef18076e8e4 100644 --- a/sdk/python/kfp/client/auth_test.py +++ b/sdk/python/kfp/client/auth_test.py @@ -40,9 +40,9 @@ def test_is_ipython_should_raise_error(self): mock.side_effect = ImportError self.assertFalse(auth.is_ipython()) - @patch('builtins.input', lambda *args: - 'https://oauth2.example.com/auth?code=4/P7q7W91a-oMsCeLvIaQm6bTrgtp7' - ) + @patch( + 'builtins.input', lambda *args: + 'https://oauth2.example.com/auth?code=4/P7q7W91a-oMsCeLvIaQm6bTrgtp7') @patch('kfp.client.auth.is_ipython', lambda *args: True) @patch.dict(os.environ, dict(), clear=True) def test_get_auth_code_from_ipython(self): @@ -50,9 +50,9 @@ def test_get_auth_code_from_ipython(self): self.assertEqual(token, '4/P7q7W91a-oMsCeLvIaQm6bTrgtp7') self.assertEqual(redirect_uri, 'http://localhost:9901') - @patch('builtins.input', lambda *args: - 'https://oauth2.example.com/auth?code=4/P7q7W91a-oMsCeLvIaQm6bTrgtp7' - ) + @patch( + 'builtins.input', lambda *args: + 'https://oauth2.example.com/auth?code=4/P7q7W91a-oMsCeLvIaQm6bTrgtp7') @patch('kfp.client.auth.is_ipython', lambda *args: False) @patch.dict(os.environ, {'SSH_CONNECTION': 'ENABLED'}, clear=True) def test_get_auth_code_from_remote_connection(self): @@ -60,9 +60,9 @@ def test_get_auth_code_from_remote_connection(self): self.assertEqual(token, '4/P7q7W91a-oMsCeLvIaQm6bTrgtp7') self.assertEqual(redirect_uri, 'http://localhost:9901') - @patch('builtins.input', lambda *args: - 'https://oauth2.example.com/auth?code=4/P7q7W91a-oMsCeLvIaQm6bTrgtp7' - ) + @patch( + 'builtins.input', lambda *args: + 'https://oauth2.example.com/auth?code=4/P7q7W91a-oMsCeLvIaQm6bTrgtp7') @patch('kfp.client.auth.is_ipython', lambda *args: False) @patch.dict(os.environ, {'SSH_CLIENT': 'ENABLED'}, clear=True) def test_get_auth_code_from_remote_client(self): @@ -76,9 +76,9 @@ def test_get_auth_code_from_remote_client(self): def test_get_auth_code_from_remote_client_missing_code(self): self.assertRaises(KeyError, auth.get_auth_code, 'sample-client-id') - @patch('kfp.client.auth.get_auth_response_local', lambda *args: - 'https://oauth2.example.com/auth?code=4/P7q7W91a-oMsCeLvIaQm6bTrgtp7' - ) + @patch( + 'kfp.client.auth.get_auth_response_local', lambda *args: + 'https://oauth2.example.com/auth?code=4/P7q7W91a-oMsCeLvIaQm6bTrgtp7') @patch('kfp.client.auth.is_ipython', lambda *args: False) @patch.dict(os.environ, dict(), clear=True) def test_get_auth_code_from_local(self): diff --git a/sdk/python/kfp/client/client.py b/sdk/python/kfp/client/client.py index c6888603311..ef8736124ae 100644 --- a/sdk/python/kfp/client/client.py +++ b/sdk/python/kfp/client/client.py @@ -475,7 +475,7 @@ def create_experiment( namespace=namespace, ) experiment = self._experiment_api.experiment_service_create_experiment( - body=experiment) + experiment=experiment) link = f'{self._get_url_prefix()}/#/experiments/details/{experiment.experiment_id}' if auth.is_ipython(): @@ -736,7 +736,7 @@ def run_pipeline( runtime_config=job_config.runtime_config, service_account=service_account) - response = self._run_api.run_service_create_run(body=run_body) + response = self._run_api.run_service_create_run(run=run_body) link = f'{self._get_url_prefix()}/#/runs/details/{response.run_id}' if auth.is_ipython(): @@ -907,7 +907,7 @@ def create_recurring_run( max_concurrency=max_concurrency, service_account=service_account) return self._recurring_run_api.recurring_run_service_create_recurring_run( - body=job_body) + recurring_run=job_body) def _create_job_config( self, diff --git a/sdk/python/kfp/compiler/__init__.py b/sdk/python/kfp/compiler/__init__.py index 5da14a06b67..213177aae23 100644 --- a/sdk/python/kfp/compiler/__init__.py +++ b/sdk/python/kfp/compiler/__init__.py @@ -16,5 +16,7 @@ __all__ = [ 'Compiler', + 'KubernetesManifestOptions', ] from kfp.compiler.compiler import Compiler +from kfp.compiler.compiler_utils import KubernetesManifestOptions \ No newline at end of file diff --git a/sdk/python/kfp/compiler/compiler.py b/sdk/python/kfp/compiler/compiler.py index a77f606e89c..83aee81f173 100644 --- a/sdk/python/kfp/compiler/compiler.py +++ b/sdk/python/kfp/compiler/compiler.py @@ -20,6 +20,7 @@ from typing import Any, Dict, Optional from kfp.compiler import pipeline_spec_builder as builder +from kfp.compiler.compiler_utils import KubernetesManifestOptions from kfp.dsl import base_component from kfp.dsl.types import type_utils @@ -53,6 +54,9 @@ def compile( pipeline_name: Optional[str] = None, pipeline_parameters: Optional[Dict[str, Any]] = None, type_check: bool = True, + kubernetes_manifest_options: Optional[ + 'KubernetesManifestOptions'] = None, + kubernetes_manifest_format: bool = False, ) -> None: """Compiles the pipeline or component function into IR YAML. @@ -62,6 +66,8 @@ def compile( pipeline_name: Name of the pipeline. pipeline_parameters: Map of parameter names to argument values. type_check: Whether to enable type checking of component interfaces during compilation. + kubernetes_manifest_options: KubernetesManifestOptions object for Kubernetes manifest output during pipeline compilation. + kubernetes_manifest_format: Output the compiled pipeline as a Kubernetes manifest. """ with type_utils.TypeCheckManager(enable=type_check): @@ -83,4 +89,6 @@ def compile( pipeline_description=pipeline_func.description, platform_spec=pipeline_func.platform_spec, package_path=package_path, + kubernetes_manifest_options=kubernetes_manifest_options, + kubernetes_manifest_format=kubernetes_manifest_format, ) diff --git a/sdk/python/kfp/compiler/compiler_test.py b/sdk/python/kfp/compiler/compiler_test.py index bdd0033925b..012f9eb2b2c 100644 --- a/sdk/python/kfp/compiler/compiler_test.py +++ b/sdk/python/kfp/compiler/compiler_test.py @@ -31,6 +31,7 @@ from kfp.cli import cli from kfp.compiler import compiler from kfp.compiler import compiler_utils +from kfp.compiler.compiler_utils import KubernetesManifestOptions from kfp.dsl import Artifact from kfp.dsl import ContainerSpec from kfp.dsl import Dataset @@ -43,6 +44,9 @@ from kfp.dsl import PipelineTaskFinalStatus from kfp.dsl import tasks_group from kfp.dsl import yaml_component +from kfp.dsl.pipeline_config import KubernetesWorkspaceConfig +from kfp.dsl.pipeline_config import PipelineConfig +from kfp.dsl.pipeline_config import WorkspaceConfig from kfp.dsl.types import type_utils from kfp.pipeline_spec import pipeline_spec_pb2 import yaml @@ -1032,6 +1036,132 @@ def simple_pipeline(): 'empty-component'] self.assertTrue('inputs' not in dag_task) + def test_compile_with_kubernetes_manifest_format(self): + with tempfile.TemporaryDirectory() as tmpdir: + + @dsl.pipeline( + name='my-pipeline', description='A simple test pipeline') + def my_pipeline(input1: str): + print_op(message=input1) + + pipeline_name = 'test-pipeline' + pipeline_display_name = 'Test Pipeline' + pipeline_version_name = 'test-pipeline-v1' + pipeline_version_display_name = 'Test Pipeline Version' + namespace = 'test-ns' + + package_path = os.path.join(tmpdir, 'pipeline.yaml') + + # Test with include_pipeline_manifest=True + kubernetes_manifest_options = KubernetesManifestOptions( + pipeline_name=pipeline_name, + pipeline_display_name=pipeline_display_name, + pipeline_version_name=pipeline_version_name, + pipeline_version_display_name=pipeline_version_display_name, + namespace=namespace, + include_pipeline_manifest=True) + + compiler.Compiler().compile( + pipeline_func=my_pipeline, + package_path=package_path, + kubernetes_manifest_options=kubernetes_manifest_options, + kubernetes_manifest_format=True, + ) + + with open(package_path, 'r') as f: + documents = list(yaml.safe_load_all(f)) + + # Should have both Pipeline and PipelineVersion manifests + self.assertEqual(len(documents), 2) + + # Check Pipeline manifest + pipeline_manifest = documents[0] + self.assertEqual(pipeline_manifest['kind'], 'Pipeline') + self.assertEqual(pipeline_manifest['metadata']['name'], + pipeline_name) + self.assertEqual(pipeline_manifest['spec']['displayName'], + pipeline_display_name) + self.assertEqual(pipeline_manifest['spec']['description'], + 'A simple test pipeline') + self.assertEqual(pipeline_manifest['metadata']['namespace'], + namespace) + + # Check PipelineVersion manifest + pipeline_version_manifest = documents[1] + self.assertEqual(pipeline_version_manifest['kind'], + 'PipelineVersion') + self.assertEqual(pipeline_version_manifest['metadata']['name'], + pipeline_version_name) + self.assertEqual(pipeline_version_manifest['spec']['displayName'], + pipeline_version_display_name) + self.assertEqual(pipeline_version_manifest['spec']['description'], + 'A simple test pipeline') + self.assertEqual(pipeline_version_manifest['spec']['pipelineName'], + pipeline_name) + self.assertEqual(pipeline_version_manifest['metadata']['namespace'], + namespace) + self.assertNotIn('platformSpec', pipeline_version_manifest['spec']) + + # Test with include_pipeline_manifest=False and has a platform spec + @dsl.pipeline( + name='my-pipeline', + description='A simple test pipeline with platform spec', + pipeline_config=dsl.PipelineConfig( + workspace=dsl.WorkspaceConfig(size='25Gi'),), + ) + def my_pipeline(input1: str): + print_op(message=input1) + + package_path2 = os.path.join(tmpdir, 'pipeline2.yaml') + kubernetes_manifest_options2 = KubernetesManifestOptions( + pipeline_name=pipeline_name, + pipeline_display_name=pipeline_display_name, + pipeline_version_name=pipeline_version_name, + pipeline_version_display_name=pipeline_version_display_name, + namespace=namespace, + include_pipeline_manifest=False) + + compiler.Compiler().compile( + pipeline_func=my_pipeline, + package_path=package_path2, + kubernetes_manifest_options=kubernetes_manifest_options2, + kubernetes_manifest_format=True, + ) + + with open(package_path2, 'r') as f: + documents2 = list(yaml.safe_load_all(f)) + + # Should have only PipelineVersion manifest + self.assertEqual(len(documents2), 1) + + # Check PipelineVersion manifest + pipeline_version_manifest2 = documents2[0] + self.assertEqual(pipeline_version_manifest2['kind'], + 'PipelineVersion') + self.assertEqual(pipeline_version_manifest2['metadata']['name'], + pipeline_version_name) + self.assertEqual(pipeline_version_manifest2['spec']['displayName'], + pipeline_version_display_name) + self.assertEqual(pipeline_version_manifest2['spec']['pipelineName'], + pipeline_name) + self.assertEqual( + pipeline_version_manifest2['metadata']['namespace'], namespace) + self.assertEqual( + pipeline_version_manifest2['spec']['platformSpec'], { + 'platforms': { + 'kubernetes': { + 'pipelineConfig': { + 'workspace': { + 'kubernetes': { + 'pvcSpecPatch': {} + }, + 'size': '25Gi' + } + } + } + } + }) + class TestCompilePipelineCaching(unittest.TestCase): @@ -1489,8 +1619,7 @@ def pipeline_spec_from_file(filepath: str) -> str: _PROJECT_ROOT = os.path.abspath(os.path.join(__file__, *([os.path.pardir] * 5))) -_TEST_DATA_DIR = os.path.join(_PROJECT_ROOT, 'sdk', 'python', 'test_data') -PIPELINES_TEST_DATA_DIR = os.path.join(_TEST_DATA_DIR, 'pipelines') +_TEST_DATA_DIR = os.path.join(_PROJECT_ROOT, 'test_data') UNSUPPORTED_COMPONENTS_TEST_DATA_DIR = os.path.join(_TEST_DATA_DIR, 'components', 'unsupported') @@ -1506,9 +1635,11 @@ def _test_compile(self, directory: str, fn: Optional[str] = None, additional_arguments: Optional[List[str]] = None) -> None: - py_file = os.path.join(directory, f'{file_base_name}.py') + py_file = os.path.join(directory, 'sdk_compiled_pipelines', 'valid', + f'{file_base_name}.py') - golden_compiled_file = os.path.join(directory, f'{file_base_name}.yaml') + golden_compiled_file = os.path.join(directory, 'sdk_compiled_pipelines', + 'valid', f'{file_base_name}.yaml') if additional_arguments is None: additional_arguments = [] @@ -1539,7 +1670,7 @@ def _test_compile(self, def test_two_step_pipeline(self): self._test_compile( 'two_step_pipeline', - directory=PIPELINES_TEST_DATA_DIR, + directory=_TEST_DATA_DIR, additional_arguments=[ '--pipeline-parameters', '{"text":"Hello KFP!"}' ]) @@ -1549,7 +1680,7 @@ def test_two_step_pipeline_failure_parameter_parse(self): r'Unterminated string starting at:'): self._test_compile( 'two_step_pipeline', - directory=PIPELINES_TEST_DATA_DIR, + directory=_TEST_DATA_DIR, additional_arguments=[ '--pipeline-parameters', '{"text":"Hello KFP!}' ]) @@ -1560,9 +1691,7 @@ def test_compile_components_not_found(self): r'Pipeline function or component "step1" not found in module two_step_pipeline\.py\.' ): self._test_compile( - 'two_step_pipeline', - directory=PIPELINES_TEST_DATA_DIR, - fn='step1') + 'two_step_pipeline', directory=_TEST_DATA_DIR, fn='step1') def test_deprecation_warning(self): res = subprocess.run(['dsl-compile', '--help'], capture_output=True) @@ -4048,6 +4177,224 @@ def outer(): task = inner() foo_platform_set_bar_feature(task, 12) + def test_pipeline_with_workspace_config(self): + """Test that pipeline config correctly sets the workspace field.""" + config = PipelineConfig( + workspace=WorkspaceConfig( + size='10Gi', + kubernetes=KubernetesWorkspaceConfig( + pvcSpecPatch={'accessModes': ['ReadWriteOnce']}))) + + @dsl.pipeline(pipeline_config=config) + def my_pipeline(): + task = comp() + + expected = pipeline_spec_pb2.PlatformSpec() + json_format.ParseDict( + { + 'pipelineConfig': { + 'workspace': { + 'size': '10Gi', + 'kubernetes': { + 'pvcSpecPatch': { + 'accessModes': ['ReadWriteOnce'] + } + } + } + } + }, expected.platforms['kubernetes']) + + self.assertEqual(my_pipeline.platform_spec, expected) + + loaded_pipeline = compile_and_reload(my_pipeline) + self.assertEqual(loaded_pipeline.platform_spec, expected) + + # test that it can be compiled _again_ after reloading (tests YamlComponent internals) + compile_and_reload(loaded_pipeline) + + def test_workspace_config_validation(self): + """Test that workspace size validation works correctly.""" + from kfp.dsl.pipeline_config import WorkspaceConfig + + valid_sizes = ['10Gi', '1.5Gi', '1000Ti', '500Mi', '2Ki'] + for size in valid_sizes: + with self.subTest(size=size): + workspace = WorkspaceConfig(size=size) + self.assertEqual(workspace.size, size) + + with self.assertRaises(ValueError) as context: + WorkspaceConfig(size='') + self.assertIn('required and cannot be empty', str(context.exception)) + + # Test whitespace-only size raises error + whitespace_sizes = [' ', '\t', '\n', ' \t \n '] + for size in whitespace_sizes: + with self.subTest(size=repr(size)): + with self.assertRaises(ValueError) as context: + WorkspaceConfig(size=size) + self.assertIn('required and cannot be empty', + str(context.exception)) + + # Test None size raises error + with self.assertRaises(ValueError): + WorkspaceConfig(size=None) + + # Test invalid size raise error + invalid_sizes = ['abc', '10XYZ', 'Gi', '.', '1..5Gi', '-10Gi'] + for size in invalid_sizes: + with self.subTest(invalid_size=size): + with self.assertRaisesRegex( + ValueError, + r'Workspace size \".*\" is invalid\. Must be a valid Kubernetes resource quantity \(e\.g\., \"10Gi\", \"500Mi\", \"1Ti\"\)' + ): + WorkspaceConfig(size=size) + + # Test set_size method validation + workspace = WorkspaceConfig(size='10Gi') + + # Valid size update + workspace.set_size('20Gi') + self.assertEqual(workspace.size, '20Gi') + + # Empty size update raises error + with self.assertRaises(ValueError) as context: + workspace.set_size('') + self.assertIn('required and cannot be empty', str(context.exception)) + + # Whitespace-only size update raises error + with self.assertRaises(ValueError) as context: + workspace.set_size(' ') + self.assertIn('required and cannot be empty', str(context.exception)) + + def test_compile_fails_when_workspace_placeholder_used_without_workspace_config( + self): + """Tests that compilation fails if placeholder is used and no workspace configured.""" + + @dsl.component + def uses_workspace(workspace_path: str) -> str: + import os + file_path = os.path.join(workspace_path, 'test.txt') + os.makedirs(os.path.dirname(file_path), exist_ok=True) + with open(file_path, 'w') as f: + f.write('hello') + return file_path + + # No PipelineConfig provided (i.e., no workspace configured) + with self.assertRaisesRegex( + ValueError, + r'Workspace features are used \(e\.g\., dsl\.WORKSPACE_PATH_PLACEHOLDER\) but PipelineConfig\.workspace\.size is not set\.' + ): + + @dsl.pipeline + def my_pipeline(): + uses_workspace(workspace_path=dsl.WORKSPACE_PATH_PLACEHOLDER) + + with tempfile.TemporaryDirectory() as tmpdir: + output_yaml = os.path.join(tmpdir, 'pipeline.yaml') + compiler.Compiler().compile( + pipeline_func=my_pipeline, package_path=output_yaml) + + def test_compile_fails_when_workspace_placeholder_used_in_nested_groups_without_workspace_config( + self): + """Tests that compilation fails if placeholder is used within nested groups and no workspace configured.""" + + import os + import tempfile + + from kfp import compiler + from kfp import dsl + + @dsl.component + def gen_int() -> int: + return 0 + + @dsl.component + def uses_workspace(workspace_path: str) -> str: + import os as _os + file_path = _os.path.join(workspace_path, 'nested.txt') + _os.makedirs(_os.path.dirname(file_path), exist_ok=True) + with open(file_path, 'w') as f: + f.write('nested') + return file_path + + with self.assertRaisesRegex( + ValueError, + r'Workspace features are used \(e\.g\., dsl\.WORKSPACE_PATH_PLACEHOLDER\) but PipelineConfig\.workspace\.size is not set\.' + ): + + @dsl.pipeline + def my_pipeline(): + x = gen_int() + with dsl.If(x.output == 0): + uses_workspace( + workspace_path=dsl.WORKSPACE_PATH_PLACEHOLDER) + + with tempfile.TemporaryDirectory() as tmpdir: + output_yaml = os.path.join(tmpdir, 'pipeline.yaml') + compiler.Compiler().compile( + pipeline_func=my_pipeline, package_path=output_yaml) + + +class TestPipelineSemaphoreMutex(unittest.TestCase): + + def test_pipeline_with_semaphore(self): + """Test that pipeline config correctly sets the semaphore key.""" + config = PipelineConfig() + config.semaphore_key = 'semaphore' + + @dsl.pipeline(pipeline_config=config) + def my_pipeline(): + task = comp() + + with tempfile.TemporaryDirectory() as tempdir: + output_yaml = os.path.join(tempdir, 'pipeline.yaml') + compiler.Compiler().compile( + pipeline_func=my_pipeline, package_path=output_yaml) + + with open(output_yaml, 'r') as f: + pipeline_docs = list(yaml.safe_load_all(f)) + + platform_spec = None + for doc in pipeline_docs: + if 'platforms' in doc: + platform_spec = doc + break + + self.assertIsNotNone(platform_spec, + 'No platforms section found in compiled output') + kubernetes_spec = platform_spec['platforms']['kubernetes'][ + 'pipelineConfig'] + self.assertEqual(kubernetes_spec['semaphoreKey'], 'semaphore') + + def test_pipeline_with_mutex(self): + """Test that pipeline config correctly sets the mutex name.""" + config = PipelineConfig() + config.mutex_name = 'mutex' + + @dsl.pipeline(pipeline_config=config) + def my_pipeline(): + task = comp() + + with tempfile.TemporaryDirectory() as tempdir: + output_yaml = os.path.join(tempdir, 'pipeline.yaml') + compiler.Compiler().compile( + pipeline_func=my_pipeline, package_path=output_yaml) + + with open(output_yaml, 'r') as f: + pipeline_docs = list(yaml.safe_load_all(f)) + + platform_spec = None + for doc in pipeline_docs: + if 'platforms' in doc: + platform_spec = doc + break + + self.assertIsNotNone(platform_spec, + 'No platforms section found in compiled output') + kubernetes_spec = platform_spec['platforms']['kubernetes'][ + 'pipelineConfig'] + self.assertEqual(kubernetes_spec['mutexName'], 'mutex') + class ExtractInputOutputDescription(unittest.TestCase): diff --git a/sdk/python/kfp/compiler/compiler_utils.py b/sdk/python/kfp/compiler/compiler_utils.py index 1924716f15e..6e99da5b600 100644 --- a/sdk/python/kfp/compiler/compiler_utils.py +++ b/sdk/python/kfp/compiler/compiler_utils.py @@ -15,7 +15,12 @@ import collections import copy -from typing import DefaultDict, Dict, List, Mapping, Set, Tuple, Union + +# isort: off +# Disable isort because isort and yapf conflict with each other. +from typing import (Any, DefaultDict, Dict, List, Mapping, Optional, Set, Tuple, + Union) +# isort: on from kfp import dsl from kfp.dsl import constants @@ -461,7 +466,7 @@ def get_outputs_for_all_groups( group_name_to_group = {group.name: group for group in all_groups} group_name_to_children = { group.name: [group.name for group in group.groups] + - [task.name for task in group.tasks] for group in all_groups + [task.name for task in group.tasks] for group in all_groups } outputs = collections.defaultdict(dict) @@ -777,15 +782,16 @@ def get_dependencies( return dependencies -def recursive_replace_placeholders(data: Union[Dict, List], old_value: str, - new_value: str) -> Union[Dict, List]: - """Recursively replaces values in a nested dict/list object. +def recursive_replace_placeholders(data: Any, old_value: str, + new_value: str) -> Union[Dict, List, str]: + """Replaces the given data. If the data is a dict/list object, it + recursively replaces values in it. This method is used to replace PipelineChannel objects with input parameter placeholders in a nested object like worker_pool_specs for custom jobs. Args: - data: A nested object that can contain dictionaries and/or lists. + data: An object, which could be a nested object including dictionaries and lists. old_value: The value that will be replaced. new_value: The value to replace the old value with. @@ -805,7 +811,10 @@ def recursive_replace_placeholders(data: Union[Dict, List], old_value: str, else: if isinstance(data, pipeline_channel.PipelineChannel): data = str(data) - return new_value if data == old_value else data + if isinstance(data, str): + return data.replace(old_value, new_value) + else: + return data # Note that cpu_to_float assumes the string has already been validated by the _validate_cpu_request_limit method. @@ -863,3 +872,73 @@ def _memory_to_float(memory: str) -> float: memory = float(memory) / constants._G return memory + + +class KubernetesManifestOptions: + """Options for Kubernetes manifest output during pipeline compilation. + + Args: + pipeline_name: Name for the Pipeline resource (defaults to pipeline_spec.pipeline_info.name). + pipeline_display_name: Display name for the Pipeline resource (defaults to pipeline_name). + pipeline_version_name: Name for the PipelineVersion resource (defaults to pipeline_name). + pipeline_version_display_name: Display name for the PipelineVersion resource (defaults to pipeline_display_name). + namespace: Kubernetes namespace for the resources (optional). + include_pipeline_manifest: Whether to include the Pipeline manifest (default: False). + """ + + def __init__( + self, + pipeline_name: Optional[str] = None, + pipeline_display_name: Optional[str] = None, + pipeline_version_name: Optional[str] = None, + pipeline_version_display_name: Optional[str] = None, + namespace: Optional[str] = None, + include_pipeline_manifest: bool = False, + ): + self.pipeline_name = pipeline_name + self.pipeline_display_name = pipeline_display_name + self.pipeline_version_name = pipeline_version_name + self.pipeline_version_display_name = pipeline_version_display_name + self.namespace = namespace + self.include_pipeline_manifest = include_pipeline_manifest + self._pipeline_spec = None + + def set_pipeline_spec(self, pipeline_spec): + self._pipeline_spec = pipeline_spec + + @property + def pipeline_name(self) -> Optional[str]: + if self._pipeline_name is not None: + return self._pipeline_name + if self._pipeline_spec is not None: + return self._pipeline_spec.pipeline_info.name + return None + + @pipeline_name.setter + def pipeline_name(self, value: Optional[str]): + self._pipeline_name = value + + @property + def pipeline_display_name(self) -> Optional[str]: + return self._pipeline_display_name or self.pipeline_name + + @pipeline_display_name.setter + def pipeline_display_name(self, value: Optional[str]): + self._pipeline_display_name = value + + @property + def pipeline_version_name(self) -> Optional[str]: + return self._pipeline_version_name or self.pipeline_name + + @pipeline_version_name.setter + def pipeline_version_name(self, value: Optional[str]): + self._pipeline_version_name = value + + @property + def pipeline_version_display_name(self) -> Optional[str]: + return (self._pipeline_version_display_name or + self.pipeline_version_name or self.pipeline_display_name) + + @pipeline_version_display_name.setter + def pipeline_version_display_name(self, value: Optional[str]): + self._pipeline_version_display_name = value diff --git a/sdk/python/kfp/compiler/compiler_utils_test.py b/sdk/python/kfp/compiler/compiler_utils_test.py index 4763bf965d4..69b0eae5582 100644 --- a/sdk/python/kfp/compiler/compiler_utils_test.py +++ b/sdk/python/kfp/compiler/compiler_utils_test.py @@ -106,7 +106,68 @@ def test_additional_input_name_for_pipeline_channel(self, channel, }, 'replica_count': 1 }], - },) + }, + { + 'data': [{ + 'first_name': + f'my_first_name: {str(pipeline_channel.PipelineParameterChannel(name="Output", channel_type="String", task_name="first_name"))}', + 'last_name': + f'my_last_name: {str(pipeline_channel.PipelineParameterChannel(name="Output", channel_type="String", task_name="last_name"))}', + }], + 'old_value': + '{{channel:task=first_name;name=Output;type=String;}}', + 'new_value': + '{{$.inputs.parameters[' + 'pipelinechannel--first_name-Output' + ']}}', + 'expected': [{ + 'first_name': + 'my_first_name: {{$.inputs.parameters[pipelinechannel--first_name-Output]}}', + 'last_name': + f'my_last_name: {str(pipeline_channel.PipelineParameterChannel(name="Output", channel_type="String", task_name="last_name"))}', + }], + }, + { + 'data': [{ + 'project': 'project', + 'location': 'US', + 'job_configuration_query': { + 'query': 'SELECT * FROM `project.dataset.input_table`', + 'destinationTable': { + 'projectId': + 'project', + 'datasetId': + 'dataset', + 'tableId': + f'output_table_{str(pipeline_channel.PipelineParameterChannel(name="Output", channel_type="String", task_name="table_suffix"))}', + }, + 'writeDisposition': 'WRITE_TRUNCATE', + }, + }], + 'old_value': + '{{channel:task=table_suffix;name=Output;type=String;}}', + 'new_value': + '{{$.inputs.parameters[' + 'pipelinechannel--table_suffix-Output' + ']}}', + 'expected': [{ + 'project': 'project', + 'location': 'US', + 'job_configuration_query': { + 'query': 'SELECT * FROM `project.dataset.input_table`', + 'destinationTable': { + 'projectId': + 'project', + 'datasetId': + 'dataset', + 'tableId': + 'output_table_{{$.inputs.parameters[pipelinechannel--table_suffix-Output]}}', + }, + 'writeDisposition': 'WRITE_TRUNCATE', + }, + }], + }, + ) def test_recursive_replace_placeholders(self, data, old_value, new_value, expected): self.assertEqual( diff --git a/sdk/python/kfp/compiler/pipeline_spec_builder.py b/sdk/python/kfp/compiler/pipeline_spec_builder.py index acef4fc7bc1..431f7621bf1 100644 --- a/sdk/python/kfp/compiler/pipeline_spec_builder.py +++ b/sdk/python/kfp/compiler/pipeline_spec_builder.py @@ -25,6 +25,7 @@ import kfp from kfp import dsl from kfp.compiler import compiler_utils +from kfp.compiler.compiler_utils import KubernetesManifestOptions from kfp.dsl import component_factory from kfp.dsl import for_loop from kfp.dsl import pipeline_channel @@ -35,7 +36,7 @@ from kfp.dsl import structures from kfp.dsl import tasks_group from kfp.dsl import utils -from kfp.dsl.types import artifact_types +from kfp.dsl.component_task_config import TaskConfigField from kfp.dsl.types import type_utils from kfp.pipeline_spec import pipeline_spec_pb2 import yaml @@ -71,8 +72,9 @@ def to_protobuf_value(value: type_utils.PARAMETER_TYPES) -> struct_pb2.Value: return struct_pb2.Value(number_value=value) elif isinstance(value, dict): return struct_pb2.Value( - struct_value=struct_pb2.Struct( - fields={k: to_protobuf_value(v) for k, v in value.items()})) + struct_value=struct_pb2.Struct(fields={ + k: to_protobuf_value(v) for k, v in value.items() + })) elif isinstance(value, list): return struct_pb2.Value( list_value=struct_pb2.ListValue( @@ -283,13 +285,8 @@ def build_task_spec_for_task( additional_input_placeholder = placeholders.InputValuePlaceholder( additional_input_name)._to_string() - if isinstance(input_value, str): - input_value = input_value.replace( - channel.pattern, additional_input_placeholder) - else: - input_value = compiler_utils.recursive_replace_placeholders( - input_value, channel.pattern, - additional_input_placeholder) + input_value = compiler_utils.recursive_replace_placeholders( + input_value, channel.pattern, additional_input_placeholder) if channel.task_name: # Value is produced by an upstream task. @@ -395,6 +392,16 @@ def _build_component_spec_from_component_spec_structure( component_spec.input_definitions.parameters[ input_name].description = input_spec.description + # Special handling for TaskConfig second. + elif type_utils.is_task_config_type(input_spec.type): + component_spec.input_definitions.parameters[ + input_name].parameter_type = pipeline_spec_pb2.ParameterType.TASK_CONFIG + component_spec.input_definitions.parameters[ + input_name].is_optional = True + if input_spec.description: + component_spec.input_definitions.parameters[ + input_name].description = input_spec.description + elif type_utils.is_parameter_type(input_spec.type): component_spec.input_definitions.parameters[ input_name].parameter_type = type_utils.get_parameter_type( @@ -445,6 +452,11 @@ def _build_component_spec_from_component_spec_structure( component_spec.output_definitions.artifacts[ output_name].description = output_spec.description + # Attach TaskConfig passthroughs if present on the structure + if getattr(component_spec_struct, 'task_config_passthroughs', None): + for p in component_spec_struct.task_config_passthroughs or []: + component_spec.task_config_passthroughs.append(p.to_proto()) + return component_spec @@ -623,6 +635,91 @@ def build_container_spec_for_task( A PipelineContainerSpec object for the task. """ + def _raise_passthrough_error(task: pipeline_task.PipelineTask, + resource_type: str) -> None: + raise ValueError( + f"Task '{task.name}' cannot handle resource type '{resource_type}' based on the values in task_config_passthroughs" + ) + + def _validate_task_config_passthroughs_for_container_settings( + task: pipeline_task.PipelineTask,) -> None: + """Validates that when a component declares TaskConfig passthroughs, + the task does not set container-level resources/env that are not + listed. + + This prevents silent no-ops at runtime when a component expects + to handle certain TaskConfig fields externally. + """ + passthroughs = getattr(task.component_spec, 'task_config_passthroughs', + None) + if not passthroughs: + return + + allowed_fields = {pt.field for pt in passthroughs} + + if task.container_spec and task.container_spec.resources: + res = task.container_spec.resources + if any([ + res.cpu_request, + res.cpu_limit, + res.memory_request, + res.memory_limit, + res.accelerator_type, + res.accelerator_count, + ]): + if TaskConfigField.RESOURCES not in allowed_fields: + _raise_passthrough_error(task, + TaskConfigField.RESOURCES.name) + + if task.container_spec and task.container_spec.env: + if TaskConfigField.ENV not in allowed_fields: + _raise_passthrough_error(task, TaskConfigField.ENV.name) + + _validate_task_config_passthroughs_for_container_settings(task) + + def _validate_task_config_passthroughs_for_kubernetes_settings( + task: pipeline_task.PipelineTask,) -> None: + """Validates that when a component declares TaskConfig passthroughs, + the task does not set Kubernetes platform options that are not + listed.""" + passthroughs = getattr(task.component_spec, 'task_config_passthroughs', + None) + if not passthroughs: + return + + allowed_fields = {pt.field for pt in passthroughs} + k8s_cfg = (task.platform_config or {}).get('kubernetes', {}) or {} + + def _has_any(cfg: dict, keys: list[str]) -> bool: + return any(cfg.get(k) for k in keys) + + if _has_any(k8s_cfg, ['tolerations']): + if TaskConfigField.KUBERNETES_TOLERATIONS not in allowed_fields: + _raise_passthrough_error( + task, TaskConfigField.KUBERNETES_TOLERATIONS.name) + + if _has_any(k8s_cfg, ['nodeSelector']): + if TaskConfigField.KUBERNETES_NODE_SELECTOR not in allowed_fields: + _raise_passthrough_error( + task, TaskConfigField.KUBERNETES_NODE_SELECTOR.name) + + if _has_any(k8s_cfg, ['nodeAffinity', 'podAffinity']): + if TaskConfigField.KUBERNETES_AFFINITY not in allowed_fields: + _raise_passthrough_error( + task, TaskConfigField.KUBERNETES_AFFINITY.name) + + volume_like_keys = [ + 'pvcMount', + 'secretAsVolume', + 'configMapAsVolume', + ] + if _has_any(k8s_cfg, volume_like_keys): + if TaskConfigField.KUBERNETES_VOLUMES not in allowed_fields: + _raise_passthrough_error( + task, TaskConfigField.KUBERNETES_VOLUMES.name) + + _validate_task_config_passthroughs_for_kubernetes_settings(task) + def convert_to_placeholder(input_value: str) -> str: """Checks if input is a pipeline channel and if so, converts to compiler injected input name.""" @@ -1175,6 +1272,7 @@ def modify_pipeline_spec_with_override( pipeline_spec: pipeline_spec_pb2.PipelineSpec, pipeline_name: Optional[str], pipeline_parameters: Optional[Mapping[str, Any]], + pipeline_display_name: Optional[str] = None, ) -> pipeline_spec_pb2.PipelineSpec: """Modifies the PipelineSpec using arguments passed to the Compiler.compile method. @@ -1183,6 +1281,7 @@ def modify_pipeline_spec_with_override( pipeline_spec (pipeline_spec_pb2.PipelineSpec): PipelineSpec to modify. pipeline_name (Optional[str]): Name of the pipeline. Overrides component name. pipeline_parameters (Optional[Mapping[str, Any]]): Pipeline parameters. Overrides component input default values. + pipeline_display_name (Optional[str]): Display Name of the pipeline. Overrides default which is pipeline name Returns: The modified PipelineSpec copy. @@ -1195,6 +1294,8 @@ def modify_pipeline_spec_with_override( if pipeline_name is not None: pipeline_spec.pipeline_info.name = pipeline_name + if pipeline_display_name is not None: + pipeline_spec.pipeline_info.display_name = pipeline_display_name # Verify that pipeline_parameters contains only input names # that match the pipeline inputs definition. @@ -2019,16 +2120,33 @@ def write_pipeline_spec_to_file( pipeline_description: Union[str, None], platform_spec: pipeline_spec_pb2.PlatformSpec, package_path: str, + kubernetes_manifest_options: Optional[KubernetesManifestOptions] = None, + kubernetes_manifest_format: bool = False, ) -> None: """Writes PipelineSpec into a YAML or JSON (deprecated) file. Args: pipeline_spec: The PipelineSpec. pipeline_description: Description from pipeline docstring. - package_path: The path to which to write the PipelineSpec. platform_spec: The PlatformSpec. + package_path: The path to which to write the PipelineSpec. + kubernetes_manifest_options: KubernetesManifestOptions object with manifest options. + kubernetes_manifest_format: Output the compiled pipeline as a Kubernetes manifest. """ + if kubernetes_manifest_format: + opts = kubernetes_manifest_options or KubernetesManifestOptions() + opts.set_pipeline_spec(pipeline_spec) + _write_kubernetes_manifest_to_file( + package_path=package_path, + opts=opts, + pipeline_spec=pipeline_spec, + platform_spec=platform_spec, + pipeline_description=pipeline_description, + ) + return + pipeline_spec_dict = json_format.MessageToDict(pipeline_spec) + yaml_comments = extract_comments_from_pipeline_spec(pipeline_spec_dict, pipeline_description) has_platform_specific_features = len(platform_spec.platforms) > 0 @@ -2061,13 +2179,88 @@ def write_pipeline_spec_to_file( f'The output path {package_path} should end with ".yaml".') +def _write_kubernetes_manifest_to_file( + package_path: str, + opts: KubernetesManifestOptions, + pipeline_spec: pipeline_spec_pb2.PipelineSpec, + platform_spec: pipeline_spec_pb2.PlatformSpec, + pipeline_description: Union[str, None] = None, +) -> None: + pipeline_name = opts.pipeline_name + pipeline_display_name = opts.pipeline_display_name + pipeline_version_display_name = opts.pipeline_version_display_name + pipeline_version_name = opts.pipeline_version_name + namespace = opts.namespace + include_pipeline_manifest = opts.include_pipeline_manifest + + pipeline_spec_dict = json_format.MessageToDict(pipeline_spec) + platform_spec_dict = json_format.MessageToDict(platform_spec) + + documents = [] + + # Pipeline manifest + if include_pipeline_manifest: + pipeline_metadata = {'name': pipeline_name} + if namespace: + pipeline_metadata['namespace'] = namespace + pipeline_manifest = { + 'apiVersion': 'pipelines.kubeflow.org/v2beta1', + 'kind': 'Pipeline', + 'metadata': pipeline_metadata, + 'spec': { + 'displayName': pipeline_display_name, + }, + } + if pipeline_description: + pipeline_manifest['spec']['description'] = pipeline_description + documents.append(pipeline_manifest) + + # PipelineVersion manifest + pipeline_version_metadata = {'name': pipeline_version_name} + if namespace: + pipeline_version_metadata['namespace'] = namespace + pipeline_version_manifest = { + 'apiVersion': 'pipelines.kubeflow.org/v2beta1', + 'kind': 'PipelineVersion', + 'metadata': pipeline_version_metadata, + 'spec': { + 'displayName': pipeline_version_display_name, + 'pipelineName': pipeline_name, + 'pipelineSpec': pipeline_spec_dict, + }, + } + + if platform_spec_dict: + pipeline_version_manifest['spec']['platformSpec'] = platform_spec_dict + + if pipeline_description: + pipeline_version_manifest['spec']['description'] = pipeline_description + documents.append(pipeline_version_manifest) + + with open(package_path, 'w') as yaml_file: + yaml.dump_all( + documents=documents, + stream=yaml_file, + sort_keys=True, + ) + + def _merge_pipeline_config(pipelineConfig: pipeline_config.PipelineConfig, platformSpec: pipeline_spec_pb2.PlatformSpec): - # TODO: add pipeline config options (ttl, semaphore, etc.) to the dict - # json_format.ParseDict( - # {'pipelineConfig': { - # '': pipelineConfig., - # }}, platformSpec.platforms['kubernetes']) + config_dict = {} + + workspace = pipelineConfig.workspace + if workspace is not None: + config_dict['workspace'] = workspace.get_workspace() + + if pipelineConfig.semaphore_key is not None: + config_dict['semaphoreKey'] = pipelineConfig.semaphore_key + if pipelineConfig.mutex_name is not None: + config_dict['mutexName'] = pipelineConfig.mutex_name + + if config_dict: + json_format.ParseDict({'pipelineConfig': config_dict}, + platformSpec.platforms['kubernetes']) return platformSpec diff --git a/sdk/python/kfp/compiler/pipeline_spec_builder_test.py b/sdk/python/kfp/compiler/pipeline_spec_builder_test.py index 29e79266d4f..36b2e04e887 100644 --- a/sdk/python/kfp/compiler/pipeline_spec_builder_test.py +++ b/sdk/python/kfp/compiler/pipeline_spec_builder_test.py @@ -13,12 +13,19 @@ # limitations under the License. """Tests for kfp.compiler.pipeline_spec_builder.""" +import os +import tempfile import unittest from absl.testing import parameterized from google.protobuf import json_format from google.protobuf import struct_pb2 +import kfp +from kfp import dsl +from kfp import kubernetes +from kfp.compiler import compiler from kfp.compiler import pipeline_spec_builder +from kfp.dsl import TaskConfigField from kfp.pipeline_spec import pipeline_spec_pb2 import yaml @@ -406,6 +413,148 @@ def test_merge_different_platforms(self): self.assertEqual(base_spec, expected) +class TestTaskConfigPassthroughValidation(unittest.TestCase): + + def test_resources_set_without_passthrough_raises(self): + + @dsl.component(task_config_passthroughs=[TaskConfigField.ENV]) + def comp(): + pass + + with self.assertRaisesRegex( + ValueError, + r"Task 'comp' cannot handle resource type 'RESOURCES'"): + + @dsl.pipeline + def pipe(): + t = comp() + t.set_cpu_limit('1') + + def test_env_set_without_passthrough_raises(self): + + @dsl.component(task_config_passthroughs=[TaskConfigField.RESOURCES]) + def comp(): + pass + + with self.assertRaisesRegex( + ValueError, r"Task 'comp' cannot handle resource type 'ENV'"): + + @dsl.pipeline + def pipe(): + t = comp() + t.set_env_variable('A', 'B') + + def test_tolerations_without_passthrough_raises(self): + + @dsl.component(task_config_passthroughs=[TaskConfigField.RESOURCES]) + def comp(): + pass + + with self.assertRaisesRegex( + ValueError, + r"Task 'comp' cannot handle resource type 'KUBERNETES_TOLERATIONS'" + ): + + @dsl.pipeline + def pipe(): + t = comp() + kubernetes.add_toleration(t, key='k', operator='Exists') + + def test_node_selector_without_passthrough_raises(self): + + @dsl.component(task_config_passthroughs=[TaskConfigField.RESOURCES]) + def comp(): + pass + + with self.assertRaisesRegex( + ValueError, + r"Task 'comp' cannot handle resource type 'KUBERNETES_NODE_SELECTOR'" + ): + + @dsl.pipeline + def pipe(): + t = comp() + kubernetes.add_node_selector(t, label_key='k', label_value='v') + + def test_affinity_without_passthrough_raises(self): + + @dsl.component(task_config_passthroughs=[TaskConfigField.RESOURCES]) + def comp(): + pass + + with self.assertRaisesRegex( + ValueError, + r"Task 'comp' cannot handle resource type 'KUBERNETES_AFFINITY'" + ): + + @dsl.pipeline + def pipe(): + t = comp() + kubernetes.add_node_affinity( + t, + match_expressions=[{ + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + }], + ) + + def test_volumes_without_passthrough_raises(self): + + @dsl.component(task_config_passthroughs=[TaskConfigField.RESOURCES]) + def comp(): + pass + + with self.assertRaisesRegex( + ValueError, + r"Task 'comp' cannot handle resource type 'KUBERNETES_VOLUMES'" + ): + + @dsl.pipeline + def pipe(): + t = comp() + kubernetes.mount_pvc(t, pvc_name='my-pvc', mount_path='/mnt') + + +class TestTaskConfigPassthroughValidationPositive(unittest.TestCase): + + def test_all_passthroughs_allow_all_settings_compiles(self): + + @dsl.component(task_config_passthroughs=[ + TaskConfigField.RESOURCES, + TaskConfigField.ENV, + TaskConfigField.KUBERNETES_TOLERATIONS, + TaskConfigField.KUBERNETES_NODE_SELECTOR, + TaskConfigField.KUBERNETES_AFFINITY, + TaskConfigField.KUBERNETES_VOLUMES, + ]) + def comp(): + pass + + @dsl.pipeline + def pipe(): + t = comp() + # Set container resources and env + t.set_cpu_limit('1') + t.set_env_variable('A', 'B') + # Set Kubernetes platform configs + kubernetes.add_toleration(t, key='k', operator='Exists') + kubernetes.add_node_selector(t, label_key='k', label_value='v') + kubernetes.add_node_affinity( + t, + match_expressions=[{ + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + }]) + kubernetes.mount_pvc(t, pvc_name='my-pvc', mount_path='/mnt') + + with tempfile.TemporaryDirectory() as tmpdir: + package_path = os.path.join(tmpdir, 'pipeline.yaml') + compiler.Compiler().compile( + pipeline_func=pipe, package_path=package_path) + + def pipeline_spec_from_file(filepath: str) -> str: with open(filepath, 'r') as f: dictionary = yaml.safe_load(f) diff --git a/sdk/python/kfp/compiler/read_write_test.py b/sdk/python/kfp/compiler/read_write_test.py deleted file mode 100644 index 9be9d4ca2c8..00000000000 --- a/sdk/python/kfp/compiler/read_write_test.py +++ /dev/null @@ -1,199 +0,0 @@ -# Copyright 2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import re -import sys -import tempfile -from typing import Any, Callable, Dict, List, Union -import unittest - -from absl.testing import parameterized -from kfp import compiler -from kfp import components -from kfp.dsl import placeholders -from kfp.dsl import python_component -from kfp.dsl import structures -import yaml - -_PROJECT_ROOT = os.path.abspath(os.path.join(__file__, *([os.path.pardir] * 5))) - - -def create_test_cases() -> List[Dict[str, Any]]: - parameters: List[Dict[str, Any]] = [] - config_path = os.path.join(_PROJECT_ROOT, 'sdk', 'python', 'test_data', - 'test_data_config.yaml') - with open(config_path) as f: - config = yaml.safe_load(f) - for name, test_group in config.items(): - test_data_dir = os.path.join(_PROJECT_ROOT, test_group['test_data_dir']) - - parameters.extend({ - 'name': name + '-' + test_case['module'], - 'test_case': test_case['module'], - 'test_data_dir': test_data_dir, - 'read': test_group['read'], - 'write': test_group['write'], - 'function': test_case['name'] - } for test_case in test_group['test_cases']) - - return parameters - - -def import_obj_from_file(python_path: str, obj_name: str) -> Any: - sys.path.insert(0, os.path.dirname(python_path)) - module_name = os.path.splitext(os.path.split(python_path)[1])[0] - module = __import__(module_name, fromlist=[obj_name]) - if not hasattr(module, obj_name): - raise ValueError( - f'Object "{obj_name}" not found in module {python_path}.') - return getattr(module, obj_name) - - -def ignore_kfp_version_helper(spec: Dict[str, Any]) -> Dict[str, Any]: - """Ignores kfp sdk versioning in command. - - Takes in a YAML input and ignores the kfp sdk versioning in command - for comparison between compiled file and goldens. - """ - pipeline_spec = spec.get('pipelineSpec', spec) - - if 'executors' in pipeline_spec['deploymentSpec']: - for executor in pipeline_spec['deploymentSpec']['executors']: - pipeline_spec['deploymentSpec']['executors'][ - executor] = yaml.safe_load( - re.sub( - r"'kfp==(\d+).(\d+).(\d+)(-[a-z]+.\d+)?'", 'kfp', - yaml.dump( - pipeline_spec['deploymentSpec']['executors'] - [executor], - sort_keys=True))) - return spec - - -def load_compiled_file(filename: str) -> Dict[str, Any]: - with open(filename) as f: - contents = yaml.safe_load(f) - pipeline_spec = contents[ - 'pipelineSpec'] if 'pipelineSpec' in contents else contents - # ignore the sdkVersion - del pipeline_spec['sdkVersion'] - return ignore_kfp_version_helper(contents) - - -def handle_placeholders( - component_spec: structures.ComponentSpec) -> structures.ComponentSpec: - if component_spec.implementation.container is not None: - if component_spec.implementation.container.command is not None: - component_spec.implementation.container.command = [ - placeholders.convert_command_line_element_to_string(c) - for c in component_spec.implementation.container.command - ] - if component_spec.implementation.container.args is not None: - component_spec.implementation.container.args = [ - placeholders.convert_command_line_element_to_string(a) - for a in component_spec.implementation.container.args - ] - return component_spec - - -def handle_expected_diffs( - component_spec: structures.ComponentSpec) -> structures.ComponentSpec: - """Strips some component spec fields that should be ignored when comparing - with golden result.""" - # Ignore description when comparing components specs read in from v1 component YAML and from IR YAML, because non lightweight Python components defined in v1 YAML can have a description field, but IR YAML does not preserve this field unless the component is a lightweight Python function-based component - component_spec.description = None - # ignore SDK version so that golden snapshots don't need to be updated between SDK version bump - if component_spec.implementation.graph is not None: - component_spec.implementation.graph.sdk_version = '' - - return handle_placeholders(component_spec) - - -class ReadWriteTest(parameterized.TestCase): - - def _compile_and_load_component( - self, compilable: Union[Callable[..., Any], - python_component.PythonComponent]): - with tempfile.TemporaryDirectory() as tmp_dir: - tmp_file = os.path.join(tmp_dir, 're_compiled_output.yaml') - compiler.Compiler().compile(compilable, tmp_file) - return components.load_component_from_file(tmp_file) - - def _compile_and_read_yaml( - self, compilable: Union[Callable[..., Any], - python_component.PythonComponent]): - with tempfile.TemporaryDirectory() as tmp_dir: - tmp_file = os.path.join(tmp_dir, 're_compiled_output.yaml') - compiler.Compiler().compile(compilable, tmp_file) - return load_compiled_file(tmp_file) - - def _test_serialization_deserialization_consistency(self, yaml_file: str): - """Tests serialization and deserialization consistency.""" - original_component = components.load_component_from_file(yaml_file) - reloaded_component = self._compile_and_load_component( - original_component) - self.assertEqual( - handle_expected_diffs(original_component.component_spec), - handle_expected_diffs(reloaded_component.component_spec), - f'\n\n\nError with (de)serialization consistency of: {yaml_file}') - - def _test_serialization_correctness( - self, - python_file: str, - yaml_file: str, - function_name: str, - ): - """Tests serialization correctness.""" - pipeline = import_obj_from_file(python_file, function_name) - compiled_result = self._compile_and_read_yaml(pipeline) - golden_result = load_compiled_file(yaml_file) - self.assertEqual(compiled_result, golden_result, - f'\n\n\nError with compiling: {python_file}') - - @parameterized.parameters(create_test_cases()) - def test( - self, - name: str, - test_case: str, - test_data_dir: str, - function: str, - read: bool, - write: bool, - ): - """Tests serialization and deserialization consistency and correctness. - - Args: - name: '{test_group_name}-{test_case_name}'. Useful for print statements/debugging. - test_case: Test case name (without file extension). - test_data_dir: The directory containing the test case files. - function: The function name to compile. - read: Whether the pipeline/component supports deserialization from YAML (IR, except for V1 component YAML back compatability tests). - write: Whether the pipeline/component supports compilation from a Python file. - """ - yaml_file = os.path.join(test_data_dir, f'{test_case}.yaml') - py_file = os.path.join(test_data_dir, f'{test_case}.py') - if write: - self._test_serialization_correctness( - python_file=py_file, - yaml_file=yaml_file, - function_name=function) - - if read: - self._test_serialization_deserialization_consistency( - yaml_file=yaml_file) - - -if __name__ == '__main__': - unittest.main() diff --git a/sdk/python/kfp/dsl/__init__.py b/sdk/python/kfp/dsl/__init__.py index 8335f76d1b0..afc62fba7d9 100644 --- a/sdk/python/kfp/dsl/__init__.py +++ b/sdk/python/kfp/dsl/__init__.py @@ -21,6 +21,7 @@ 'InputPath', 'OutputPath', 'PipelineTaskFinalStatus', + 'TaskConfig', 'Artifact', 'ClassificationMetrics', 'Dataset', @@ -40,9 +41,13 @@ 'PIPELINE_ROOT_PLACEHOLDER', 'PIPELINE_JOB_CREATE_TIME_UTC_PLACEHOLDER', 'PIPELINE_JOB_SCHEDULE_TIME_UTC_PLACEHOLDER', + 'WORKSPACE_PATH_PLACEHOLDER', + 'run_notebook', ] import os +from kfp.dsl.notebook_helpers import run_notebook +from kfp.dsl.task_config import TaskConfig from kfp.dsl.task_final_status import PipelineTaskFinalStatus from kfp.dsl.types.artifact_types import Artifact from kfp.dsl.types.artifact_types import ClassificationMetrics @@ -53,6 +58,7 @@ from kfp.dsl.types.artifact_types import Metrics from kfp.dsl.types.artifact_types import Model from kfp.dsl.types.artifact_types import SlicedClassificationMetrics +from kfp.dsl.types.type_annotations import EmbeddedAnnotation from kfp.dsl.types.type_annotations import InputAnnotation from kfp.dsl.types.type_annotations import InputPath from kfp.dsl.types.type_annotations import OutputAnnotation @@ -209,6 +215,23 @@ def my_pipeline(): ) """ +WORKSPACE_PATH_PLACEHOLDER = '{{$.workspace_path}}' +"""A placeholder used to obtain the path to the shared workspace within a component. + + Example: + :: + + @dsl.pipeline( + pipeline_config=dsl.PipelineConfig( + workspace=dsl.WorkspaceConfig(size='100Gi'), + ), + ) + def my_pipeline(): + clone_repo_task = clone_repo( + workspacePath=dsl.WORKSPACE_PATH_PLACEHOLDER, repo='https://github.com/example/repo', + ) +""" + T = TypeVar('T') Input = Annotated[T, InputAnnotation] """Type generic used to represent an input artifact of type ``T``, where ``T`` is an artifact class. @@ -237,6 +260,8 @@ def my_pipeline(): """ Output = Annotated[T, OutputAnnotation] +# Runtime-only input for accessing embedded artifacts extracted at runtime. +EmbeddedInput = Annotated[T, EmbeddedAnnotation] """A type generic used to represent an output artifact of type ``T``, where ``T`` is an artifact class. The argument typed with this annotation is provided at runtime by the executing backend and does not need to be passed as an input by the pipeline author (see example). Use ``Input[Artifact]`` or ``Output[Artifact]`` to indicate whether the enclosed artifact is a component input or output. @@ -265,12 +290,17 @@ def my_pipeline(): # compile-time only dependencies if os.environ.get('_KFP_RUNTIME', 'false') != 'true': from kfp.dsl.component_decorator import component + from kfp.dsl.component_task_config import TaskConfigField + from kfp.dsl.component_task_config import TaskConfigPassthrough from kfp.dsl.container_component_decorator import container_component # TODO: Collected should be moved to pipeline_channel.py, consistent with OneOf from kfp.dsl.for_loop import Collected from kfp.dsl.importer_node import importer + from kfp.dsl.notebook_component_decorator import notebook_component from kfp.dsl.pipeline_channel import OneOf + from kfp.dsl.pipeline_config import KubernetesWorkspaceConfig from kfp.dsl.pipeline_config import PipelineConfig + from kfp.dsl.pipeline_config import WorkspaceConfig from kfp.dsl.pipeline_context import pipeline from kfp.dsl.pipeline_task import PipelineTask from kfp.dsl.placeholders import ConcatPlaceholder @@ -283,21 +313,10 @@ def my_pipeline(): from kfp.dsl.tasks_group import If from kfp.dsl.tasks_group import ParallelFor __all__.extend([ - 'component', - 'container_component', - 'pipeline', - 'importer', - 'ContainerSpec', - 'Condition', - 'If', - 'Elif', - 'Else', - 'OneOf', - 'ExitHandler', - 'ParallelFor', - 'Collected', - 'IfPresentPlaceholder', - 'ConcatPlaceholder', - 'PipelineTask', - 'PipelineConfig', + 'component', 'container_component', 'pipeline', 'importer', + 'ContainerSpec', 'Condition', 'If', 'Elif', 'Else', 'OneOf', + 'ExitHandler', 'ParallelFor', 'Collected', 'IfPresentPlaceholder', + 'ConcatPlaceholder', 'PipelineTask', 'PipelineConfig', + 'WorkspaceConfig', 'KubernetesWorkspaceConfig', 'TaskConfigField', + 'TaskConfigPassthrough', 'notebook_component' ]) diff --git a/sdk/python/kfp/dsl/base_component.py b/sdk/python/kfp/dsl/base_component.py index 2682321417d..e1693ece3e3 100644 --- a/sdk/python/kfp/dsl/base_component.py +++ b/sdk/python/kfp/dsl/base_component.py @@ -101,8 +101,8 @@ def __call__(self, *args, **kwargs) -> pipeline_task.PipelineTask: return pipeline_task.PipelineTask( component_spec=self.component_spec, args=task_inputs, - execute_locally=pipeline_context.Pipeline.get_default_pipeline() is - None, + execute_locally=pipeline_context.Pipeline.get_default_pipeline() + is None, execution_caching_default=pipeline_context.Pipeline .get_execution_caching_default(), ) diff --git a/sdk/python/kfp/dsl/component_decorator.py b/sdk/python/kfp/dsl/component_decorator.py index c2dee355b16..507f11fc9eb 100644 --- a/sdk/python/kfp/dsl/component_decorator.py +++ b/sdk/python/kfp/dsl/component_decorator.py @@ -13,23 +13,30 @@ # limitations under the License. import functools -from typing import Callable, List, Optional +from typing import Callable, List, Optional, Union import warnings from kfp.dsl import component_factory - - -def component(func: Optional[Callable] = None, - *, - base_image: Optional[str] = None, - target_image: Optional[str] = None, - packages_to_install: List[str] = None, - pip_index_urls: Optional[List[str]] = None, - output_component_file: Optional[str] = None, - install_kfp_package: bool = True, - kfp_package_path: Optional[str] = None, - pip_trusted_hosts: Optional[List[str]] = None, - use_venv: bool = False): +from kfp.dsl.component_task_config import TaskConfigField +from kfp.dsl.component_task_config import TaskConfigPassthrough + + +def component( + func: Optional[Callable] = None, + *, + base_image: Optional[str] = None, + target_image: Optional[str] = None, + packages_to_install: List[str] = None, + pip_index_urls: Optional[List[str]] = None, + output_component_file: Optional[str] = None, + install_kfp_package: bool = True, + kfp_package_path: Optional[str] = None, + pip_trusted_hosts: Optional[List[str]] = None, + use_venv: bool = False, + additional_funcs: Optional[List[Callable]] = None, + embedded_artifact_path: Optional[str] = None, + task_config_passthroughs: Optional[List[Union[TaskConfigPassthrough, + TaskConfigField]]] = None): """Decorator for Python-function based components. A KFP component can either be a lightweight component or a containerized @@ -64,6 +71,16 @@ def component(func: Optional[Callable] = None, shareable/loadable version of the component spec into this file. **Warning:** This compilation approach is deprecated. + embedded_artifact_path: Optional path to a local file or directory to + embed into the component. At runtime the embedded content is + extracted into a temporary directory and made available via + parameters annotated as ``dsl.EmbeddedInput[T]`` (e.g., + ``dsl.EmbeddedInput[dsl.Dataset]``). For a directory, the injected + artifact's ``path`` points to the extraction root (the temporary + directory containing the directory's contents). For a single file, + the injected artifact's ``path`` points to the extracted file. The + extraction root is also prepended to ``sys.path`` to enable + importing embedded Python modules. install_kfp_package: Specifies if the KFP SDK should add the ``kfp`` Python package to ``packages_to_install``. Lightweight Python functions always require an installation of KFP in ``base_image`` to work. If you specify @@ -79,6 +96,12 @@ def component(func: Optional[Callable] = None, use_venv: Specifies if the component should be executed in a virtual environment. The environment will be created in a temporary directory and will inherit the system site packages. This is useful in restricted environments where most of the system is read-only. + additional_funcs: List of additional functions to include in the component. + These functions will be available to the main function. This is useful for adding util functions that + are shared across multiple components but are not packaged as an importable Python package. + task_config_passthroughs: List of task configurations (e.g. resources, env, volumes etc.) to pass through + to the component. This is useful when the component launches another Kubernetes resource (for example, + a Kubeflow Trainer job). Use this in conjunction with dsl.TaskConfig. Returns: A component task factory that can be used in pipeline definitions. @@ -110,27 +133,46 @@ def pipeline(): DeprecationWarning, stacklevel=2) + task_config_passthroughs_formatted: Optional[ + List[TaskConfigPassthrough]] = None + if task_config_passthroughs is not None: + task_config_passthroughs_formatted = [] + + for passthrough in task_config_passthroughs: + if isinstance(passthrough, TaskConfigField): + task_config_passthroughs_formatted.append( + TaskConfigPassthrough( + field=passthrough, apply_to_task=False)) + else: + task_config_passthroughs_formatted.append(passthrough) + if func is None: return functools.partial( component, base_image=base_image, target_image=target_image, packages_to_install=packages_to_install, + embedded_artifact_path=embedded_artifact_path, pip_index_urls=pip_index_urls, output_component_file=output_component_file, install_kfp_package=install_kfp_package, kfp_package_path=kfp_package_path, pip_trusted_hosts=pip_trusted_hosts, - use_venv=use_venv) + use_venv=use_venv, + additional_funcs=additional_funcs, + task_config_passthroughs=task_config_passthroughs_formatted) return component_factory.create_component_from_func( func, base_image=base_image, target_image=target_image, + embedded_artifact_path=embedded_artifact_path, packages_to_install=packages_to_install, pip_index_urls=pip_index_urls, output_component_file=output_component_file, install_kfp_package=install_kfp_package, kfp_package_path=kfp_package_path, pip_trusted_hosts=pip_trusted_hosts, - use_venv=use_venv) + use_venv=use_venv, + additional_funcs=additional_funcs, + task_config_passthroughs=task_config_passthroughs_formatted) diff --git a/sdk/python/kfp/dsl/component_factory.py b/sdk/python/kfp/dsl/component_factory.py index 5e7a25c0edc..7822e74309b 100644 --- a/sdk/python/kfp/dsl/component_factory.py +++ b/sdk/python/kfp/dsl/component_factory.py @@ -11,11 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import base64 import dataclasses +import gzip import inspect +import io import itertools import pathlib import re +import tarfile import textwrap from typing import (Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, Union) @@ -32,6 +36,8 @@ from kfp.dsl import python_component from kfp.dsl import structures from kfp.dsl import task_final_status +from kfp.dsl.component_task_config import TaskConfigPassthrough +from kfp.dsl.task_config import TaskConfig from kfp.dsl.types import artifact_types from kfp.dsl.types import custom_artifact_types from kfp.dsl.types import type_annotations @@ -59,6 +65,7 @@ class ComponentInfo(): pip_index_urls: Optional[List[str]] = None pip_trusted_hosts: Optional[List[str]] = None use_venv: bool = False + task_config_passthroughs: Optional[List[TaskConfigPassthrough]] = None # A map from function_name to components. This is always populated when a @@ -165,6 +172,20 @@ def _get_packages_to_install_command( index_url_options = make_index_url_options(pip_index_urls, pip_trusted_hosts) + # Install packages before KFP. This allows us to + # control where we source kfp-pipeline-spec. + # This is particularly useful for development and + # CI use-case when you want to install the spec + # from source. + if packages_to_install: + user_packages_pip_install_command = make_pip_install_command( + install_parts=packages_to_install, + index_url_options=index_url_options, + ) + pip_install_strings.append(user_packages_pip_install_command) + if inject_kfp_install: + pip_install_strings.append(' && ') + if inject_kfp_install: if use_venv: pip_install_strings.append(_use_venv_script_template) @@ -184,16 +205,6 @@ def _get_packages_to_install_command( ) pip_install_strings.append(kfp_pip_install_command) - if packages_to_install: - pip_install_strings.append(' && ') - - if packages_to_install: - user_packages_pip_install_command = make_pip_install_command( - install_parts=packages_to_install, - index_url_options=index_url_options, - ) - pip_install_strings.append(user_packages_pip_install_command) - return [ 'sh', '-c', _install_python_packages_script_template.format( @@ -265,6 +276,9 @@ def get_name_to_specs( if annotation == inspect._empty: raise TypeError(f'Missing type annotation for argument: {name}') + # Exclude EmbeddedInput[...] from interface (runtime-only) + elif type_annotations.is_embedded_input_annotation(annotation): + continue # is Input[Artifact], Input[List[]], (e.g., str), or InputPath() elif (type_annotations.is_artifact_wrapped_in_Input(annotation) or isinstance( @@ -401,8 +415,12 @@ def make_input_spec(annotation: Any, default = None if inspect_param.default == inspect.Parameter.empty or type_annotations.issubclass_of_artifact( annotation) else inspect_param.default - optional = inspect_param.default is not inspect.Parameter.empty or type_utils.is_task_final_status_type( - getattr(inspect_param.annotation, '__name__', '')) + optional = ( + inspect_param.default is not inspect.Parameter.empty or + type_utils.is_task_final_status_type( + getattr(inspect_param.annotation, '__name__', '')) or + type_utils.is_task_config_type( + getattr(inspect_param.annotation, '__name__', ''))) return structures.InputSpec( **input_output_spec_args, default=default, @@ -486,7 +504,11 @@ def parse_docstring_with_return_as_args( def _get_command_and_args_for_lightweight_component( - func: Callable) -> Tuple[List[str], List[str]]: + func: Callable, + additional_funcs: Optional[List[Callable]] = None, + embedded_artifact_path: Optional[str] = None, + _helper_source_template: Optional[str] = None, +) -> Tuple[List[str], List[str]]: imports_source = [ 'import kfp', 'from kfp import dsl', @@ -495,11 +517,60 @@ def _get_command_and_args_for_lightweight_component( ] + custom_artifact_types.get_custom_artifact_type_import_statements(func) func_source = _get_function_source_definition(func) + additional_funcs_source = '' + if additional_funcs: + additional_funcs_source = '\n\n' + '\n\n'.join( + _get_function_source_definition(_f) for _f in additional_funcs) + # If an embedded_artifact_path is provided, build the archive and generate + # a shared extraction helper. + helper_source = None + if embedded_artifact_path: + asset_path = pathlib.Path(embedded_artifact_path) + if not asset_path.exists(): + raise ValueError( + f'embedded_artifact_path does not exist: {embedded_artifact_path}' + ) + + buf = io.BytesIO() + with tarfile.open(fileobj=buf, mode='w') as tar: + if asset_path.is_dir(): + tar.add(asset_path, arcname='.') + else: + tar.add(asset_path, arcname=asset_path.name) + + archive_bytes = buf.getvalue() + + compressed_bytes = gzip.compress(archive_bytes) + compressed_size_mb = len(compressed_bytes) / (1024 * 1024) + if compressed_size_mb > 1: + warnings.warn( + f'Embedded artifact archive is large ({compressed_size_mb:.1f}MB compressed). ' + f'Consider moving large assets to a container image or object store.', + UserWarning, + stacklevel=4) + + archive_b64 = base64.b64encode(compressed_bytes).decode('ascii') + + if _helper_source_template: + # Allow callers to inject a custom helper source via a template. + # Use literal replacement to avoid str.format interpreting other braces + # in the helper source (e.g., f-strings) as format fields. + helper_source = _helper_source_template.replace( + '{embedded_archive}', archive_b64) + else: + file_basename = asset_path.name if asset_path.is_file() else None + helper_source = _generate_shared_extraction_helper( + archive_b64, file_basename) + + helper_block = f'\n\n{helper_source}\n' if helper_source else '' source = textwrap.dedent(''' - {imports_source} + {imports_source}{additional_funcs_source}{helper_block} {func_source}\n''').format( - imports_source='\n'.join(imports_source), func_source=func_source) + imports_source='\n'.join(imports_source), + additional_funcs_source=additional_funcs_source, + helper_block=helper_block, + func_source=func_source) command = [ 'sh', '-ec', @@ -525,6 +596,127 @@ def _get_command_and_args_for_lightweight_component( return command, args +def _generate_shared_extraction_helper(embedded_archive_b64: str, + file_basename: Optional[str] = None + ) -> str: + """Generate shared archive extraction helper source code. + + Produces Python source that extracts a tar.gz archive (provided as + base64) into a temporary directory, then prepends that directory to + sys.path. When a single file was embedded, sets + __KFP_EMBEDDED_ASSET_FILE to the extracted file path; otherwise only + __KFP_EMBEDDED_ASSET_DIR is set. + """ + file_assignment = '' + if file_basename: + file_assignment = f"\n__KFP_EMBEDDED_ASSET_FILE = __kfp_os.path.join(__KFP_EMBEDDED_ASSET_DIR, '{file_basename}')\n" + return f'''__KFP_EMBEDDED_ARCHIVE_B64 = '{embedded_archive_b64}' + +import base64 as __kfp_b64 +import io as __kfp_io +import os as __kfp_os +import sys as __kfp_sys +import tarfile as __kfp_tarfile +import tempfile as __kfp_tempfile + +# Extract embedded archive at import time to ensure sys.path and globals are set +__kfp_tmpdir = __kfp_tempfile.TemporaryDirectory() +__KFP_EMBEDDED_ASSET_DIR = __kfp_tmpdir.name +try: + __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode('ascii')) + with __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode='r:gz') as __kfp_tar: + __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR) +except Exception as __kfp_e: + raise RuntimeError(f'Failed to extract embedded archive: {{__kfp_e}}') + +# Always prepend the extracted directory to sys.path for import resolution +if __KFP_EMBEDDED_ASSET_DIR not in __kfp_sys.path: + __kfp_sys.path.insert(0, __KFP_EMBEDDED_ASSET_DIR) +{file_assignment} +''' + + +def create_notebook_component_from_func( + func: Callable, + *, + notebook_path: str, + base_image: Optional[str] = None, + packages_to_install: Optional[List[str]] = None, + pip_index_urls: Optional[List[str]] = None, + output_component_file: Optional[str] = None, + install_kfp_package: bool = True, + kfp_package_path: Optional[str] = None, + pip_trusted_hosts: Optional[List[str]] = None, + use_venv: bool = False, + task_config_passthroughs: Optional[List[TaskConfigPassthrough]] = None, +) -> python_component.PythonComponent: + """Builds a notebook-based component by delegating to + create_component_from_func. + + Validates the notebook path and constructs a helper source template + that executes the embedded notebook at runtime. The common + component-building logic is reused by passing the template and the + notebook path as an embedded artifact. + """ + + # Resolve default packages for notebooks when not specified + if packages_to_install is None: + packages_to_install = [ + 'nbclient>=0.10,<1', 'ipykernel>=6,<7', 'jupyter_client>=7,<9' + ] + + # Validate notebook path and determine relpath + nb_path = pathlib.Path(notebook_path) + if not nb_path.exists(): + raise ValueError(f'Notebook path does not exist: {notebook_path}') + + if nb_path.is_dir(): + # Ignore .ipynb_checkpoints directories + ipynbs = [ + p for p in nb_path.rglob('*.ipynb') + if p.is_file() and '.ipynb_checkpoints' not in p.parts + ] + if len(ipynbs) == 0: + raise ValueError( + f'No .ipynb files found in directory: {notebook_path}') + if len(ipynbs) > 1: + raise ValueError( + f'Multiple .ipynb files found in directory: {notebook_path}. Exactly one is required.' + ) + nb_file = ipynbs[0] + notebook_relpath = nb_file.relative_to(nb_path).as_posix() + else: + if nb_path.suffix.lower() != '.ipynb': + raise ValueError( + f'Invalid notebook_path (expected .ipynb file): {notebook_path}' + ) + notebook_relpath = nb_path.name + + # Build the helper source template with a placeholder for the embedded archive + from kfp.dsl.templates.notebook_executor import \ + get_notebook_executor_source + helper_template = get_notebook_executor_source('{embedded_archive}', + notebook_relpath) + + # Delegate to the common component creation using the embedded artifact path + return create_component_from_func( + func=func, + base_image=base_image, + target_image=None, + embedded_artifact_path=notebook_path, + _helper_source_template=helper_template, + packages_to_install=packages_to_install or [], + pip_index_urls=pip_index_urls, + output_component_file=output_component_file, + install_kfp_package=install_kfp_package, + kfp_package_path=kfp_package_path, + pip_trusted_hosts=pip_trusted_hosts, + use_venv=use_venv, + additional_funcs=None, + task_config_passthroughs=task_config_passthroughs, + ) + + def _get_command_and_args_for_containerized_component( function_name: str) -> Tuple[List[str], List[str]]: @@ -541,6 +733,7 @@ def create_component_from_func( func: Callable, base_image: Optional[str] = None, target_image: Optional[str] = None, + embedded_artifact_path: Optional[str] = None, packages_to_install: List[str] = None, pip_index_urls: Optional[List[str]] = None, output_component_file: Optional[str] = None, @@ -548,6 +741,9 @@ def create_component_from_func( kfp_package_path: Optional[str] = None, pip_trusted_hosts: Optional[List[str]] = None, use_venv: bool = False, + additional_funcs: Optional[List[Callable]] = None, + task_config_passthroughs: Optional[List[TaskConfigPassthrough]] = None, + _helper_source_template: Optional[str] = None, ) -> python_component.PythonComponent: """Implementation for the @component decorator. @@ -573,7 +769,7 @@ def create_component_from_func( ("The default base_image used by the @dsl.component decorator will switch from 'python:3.9' to 'python:3.10' on Oct 1, 2025. To ensure your existing components work with versions of the KFP SDK released after that date, you should provide an explicit base_image argument and ensure your component works as intended on Python 3.10." ), FutureWarning, - stacklevel=2, + stacklevel=3, ) component_image = base_image @@ -584,9 +780,16 @@ def create_component_from_func( function_name=func.__name__,) else: command, args = _get_command_and_args_for_lightweight_component( - func=func) + func=func, + additional_funcs=additional_funcs, + embedded_artifact_path=embedded_artifact_path, + _helper_source_template=_helper_source_template, + ) component_spec = extract_component_interface(func) + # Attach task_config_passthroughs to the ComponentSpec structure if provided. + if task_config_passthroughs: + component_spec.task_config_passthroughs = task_config_passthroughs component_spec.implementation = structures.Implementation( container=structures.ContainerSpecImplementation( image=component_image, @@ -609,7 +812,8 @@ def create_component_from_func( base_image=base_image, packages_to_install=packages_to_install, pip_index_urls=pip_index_urls, - pip_trusted_hosts=pip_trusted_hosts) + pip_trusted_hosts=pip_trusted_hosts, + task_config_passthroughs=task_config_passthroughs) if REGISTERED_MODULES is not None: REGISTERED_MODULES[component_name] = component_info @@ -653,6 +857,9 @@ def make_input_for_parameterized_container_component_function( if (annotation == task_final_status.PipelineTaskFinalStatus or type_utils.is_task_final_status_type(annotation)): placeholder._ir_type = 'STRUCT' + elif annotation == TaskConfig: + # Treat as STRUCT for IR and use reserved input name at runtime. + placeholder._ir_type = 'STRUCT' else: placeholder._ir_type = type_utils.get_parameter_type_name( annotation) diff --git a/sdk/python/kfp/dsl/component_factory_test.py b/sdk/python/kfp/dsl/component_factory_test.py index 281d4ad70f3..9ffaa24b3aa 100644 --- a/sdk/python/kfp/dsl/component_factory_test.py +++ b/sdk/python/kfp/dsl/component_factory_test.py @@ -132,8 +132,16 @@ def test_with_user_packages_to_install_and_no_pip_index_url(self): self.assertEqual( strip_kfp_version(command), strip_kfp_version([ - 'sh', '-c', - '\nif ! [ -x "$(command -v pip)" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location \'kfp==2.1.3\' \'--no-deps\' \'typing-extensions>=3.7.4,<5; python_version<"3.9"\' && python3 -m pip install --quiet --no-warn-script-location \'package1\' \'package2\' && "$0" "$@"\n' + 'sh', '-c', '\n' + 'if ! [ -x "$(command -v pip)" ]; then\n' + ' python3 -m ensurepip || python3 -m ensurepip --user || apt-get install ' + 'python3-pip\n' + 'fi\n' + '\n' + 'PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet ' + "--no-warn-script-location 'package1' 'package2' && python3 -m pip install " + "--quiet --no-warn-script-location kfp '--no-deps' " + '\'typing-extensions>=3.7.4,<5; python_version<"3.9"\' && "$0" "$@"\n' ])) def test_with_packages_to_install_with_pip_index_url(self): @@ -148,8 +156,19 @@ def test_with_packages_to_install_with_pip_index_url(self): self.assertEqual( strip_kfp_version(command), strip_kfp_version([ - 'sh', '-c', - '\nif ! [ -x "$(command -v pip)" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location --index-url https://myurl.org/simple --trusted-host https://myurl.org/simple \'kfp==2.1.3\' \'--no-deps\' \'typing-extensions>=3.7.4,<5; python_version<"3.9"\' && python3 -m pip install --quiet --no-warn-script-location --index-url https://myurl.org/simple --trusted-host https://myurl.org/simple \'package1\' \'package2\' && "$0" "$@"\n' + 'sh', '-c', '\n' + 'if ! [ -x "$(command -v pip)" ]; then\n' + ' python3 -m ensurepip || python3 -m ensurepip --user || apt-get install ' + 'python3-pip\n' + 'fi\n' + '\n' + 'PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet ' + '--no-warn-script-location --index-url https://myurl.org/simple ' + "--trusted-host https://myurl.org/simple 'package1' 'package2' && python3 " + '-m pip install --quiet --no-warn-script-location --index-url ' + 'https://myurl.org/simple --trusted-host https://myurl.org/simple kfp ' + '\'--no-deps\' \'typing-extensions>=3.7.4,<5; python_version<"3.9"\' && "$0" ' + '"$@"\n' ])) def test_with_packages_to_install_with_pip_index_url_and_trusted_host(self): @@ -166,8 +185,18 @@ def test_with_packages_to_install_with_pip_index_url_and_trusted_host(self): self.assertEqual( strip_kfp_version(command), strip_kfp_version([ - 'sh', '-c', - '\nif ! [ -x "$(command -v pip)" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location --index-url https://myurl.org/simple --trusted-host myurl.org \'kfp==2.1.3\' \'--no-deps\' \'typing-extensions>=3.7.4,<5; python_version<"3.9"\' && python3 -m pip install --quiet --no-warn-script-location --index-url https://myurl.org/simple --trusted-host myurl.org \'package1\' \'package2\' && "$0" "$@"\n' + 'sh', '-c', '\n' + 'if ! [ -x "$(command -v pip)" ]; then\n' + ' python3 -m ensurepip || python3 -m ensurepip --user || apt-get install ' + 'python3-pip\n' + 'fi\n' + '\n' + 'PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet ' + '--no-warn-script-location --index-url https://myurl.org/simple ' + "--trusted-host myurl.org 'package1' 'package2' && python3 -m pip install " + '--quiet --no-warn-script-location --index-url https://myurl.org/simple ' + "--trusted-host myurl.org kfp '--no-deps' 'typing-extensions>=3.7.4,<5; " + 'python_version<"3.9"\' && "$0" "$@"\n' ])) def test_with_packages_to_install_with_pip_index_url_and_empty_trusted_host( @@ -184,8 +213,17 @@ def test_with_packages_to_install_with_pip_index_url_and_empty_trusted_host( self.assertEqual( strip_kfp_version(command), strip_kfp_version([ - 'sh', '-c', - '\nif ! [ -x "$(command -v pip)" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location --index-url https://myurl.org/simple \'kfp==2.1.3\' \'--no-deps\' \'typing-extensions>=3.7.4,<5; python_version<"3.9"\' && python3 -m pip install --quiet --no-warn-script-location --index-url https://myurl.org/simple \'package1\' \'package2\' && "$0" "$@"\n' + 'sh', '-c', '\n' + 'if ! [ -x "$(command -v pip)" ]; then\n' + ' python3 -m ensurepip || python3 -m ensurepip --user || apt-get install ' + 'python3-pip\n' + 'fi\n' + '\n' + 'PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet ' + "--no-warn-script-location --index-url https://myurl.org/simple 'package1' " + "'package2' && python3 -m pip install --quiet --no-warn-script-location " + "--index-url https://myurl.org/simple kfp '--no-deps' " + '\'typing-extensions>=3.7.4,<5; python_version<"3.9"\' && "$0" "$@"\n' ])) diff --git a/sdk/python/kfp/dsl/component_task_config.py b/sdk/python/kfp/dsl/component_task_config.py new file mode 100644 index 00000000000..7590592574d --- /dev/null +++ b/sdk/python/kfp/dsl/component_task_config.py @@ -0,0 +1,53 @@ +"""Definition for TaskConfig.""" + +import dataclasses +from enum import IntEnum + +from kfp.pipeline_spec import pipeline_spec_pb2 + + +class TaskConfigField(IntEnum): + # Indicates that the resource limits and requests should be passed through to the external workload. + # Be cautious about also setting apply_to_task=True since that will double the resources required + # for the task. + RESOURCES = ( + pipeline_spec_pb2.TaskConfigPassthroughType + .TaskConfigPassthroughTypeEnum.RESOURCES) + # Indicates that the environment variables should be passed through to the external workload. + # It is generally safe to always set apply_to_task=True on this field. + ENV = ( + pipeline_spec_pb2.TaskConfigPassthroughType + .TaskConfigPassthroughTypeEnum.ENV) + # Indicates that the Kubernetes node affinity should be passed through to the external workload. + KUBERNETES_AFFINITY = ( + pipeline_spec_pb2.TaskConfigPassthroughType + .TaskConfigPassthroughTypeEnum.KUBERNETES_AFFINITY) + # Indicates that the Kubernetes node tolerations should be passed through to the external workload. + KUBERNETES_TOLERATIONS = ( + pipeline_spec_pb2.TaskConfigPassthroughType + .TaskConfigPassthroughTypeEnum.KUBERNETES_TOLERATIONS) + # Indicates that the Kubernetes node selector should be passed through to the external workload. + KUBERNETES_NODE_SELECTOR = ( + pipeline_spec_pb2.TaskConfigPassthroughType + .TaskConfigPassthroughTypeEnum.KUBERNETES_NODE_SELECTOR) + # Indicates that the Kubernetes persistent volumes and ConfigMaps/Secrets mounted as volumes should be passed + # through to the external workload. Be sure that when setting apply_to_task=True, the volumes are ReadWriteMany or ReadOnlyMany or else + # the task's pod may not start. + # This is useful when the task prepares a shared volume for the external workload or defines output artifact + # (e.g. dsl.Model) that is created by the external workload. + KUBERNETES_VOLUMES = ( + pipeline_spec_pb2.TaskConfigPassthroughType + .TaskConfigPassthroughTypeEnum.KUBERNETES_VOLUMES) + + +@dataclasses.dataclass +class TaskConfigPassthrough: + field: TaskConfigField + apply_to_task: bool = False + + def to_proto(self) -> pipeline_spec_pb2.TaskConfigPassthrough: + """Converts this object to its proto representation.""" + proto = pipeline_spec_pb2.TaskConfigPassthrough() + proto.field = int(self.field) + proto.apply_to_task = self.apply_to_task + return proto diff --git a/sdk/python/kfp/dsl/constants.py b/sdk/python/kfp/dsl/constants.py index 44b7a16fbb3..c24a34c9daa 100644 --- a/sdk/python/kfp/dsl/constants.py +++ b/sdk/python/kfp/dsl/constants.py @@ -13,6 +13,9 @@ # limitations under the License. """Constants.""" +# Workspace-related constants +WORKSPACE_MOUNT_PATH = '/kfp-workspace' + # Unit constants for k8s size string. _E = 10**18 # Exa _EI = 1 << 60 # Exa: power-of-two approximate diff --git a/sdk/python/kfp/dsl/executor.py b/sdk/python/kfp/dsl/executor.py index 793c42d0eb2..ded418cc4ea 100644 --- a/sdk/python/kfp/dsl/executor.py +++ b/sdk/python/kfp/dsl/executor.py @@ -20,6 +20,7 @@ from kfp import dsl from kfp.dsl import task_final_status +from kfp.dsl.task_config import TaskConfig from kfp.dsl.types import artifact_types from kfp.dsl.types import type_annotations @@ -132,7 +133,8 @@ def get_input_artifact(self, name: str) -> Optional[dsl.Artifact]: def get_output_artifact(self, name: str) -> Optional[dsl.Artifact]: return self.output_artifacts.get(name) - def get_input_parameter_value(self, parameter_name: str) -> Optional[str]: + def get_input_parameter_value( + self, parameter_name: str) -> Optional[Union[str, dict]]: parameter_values = self.executor_input.get('inputs', {}).get( 'parameterValues', None) @@ -328,16 +330,39 @@ def execute(self) -> Optional[str]: # `Optional[str]`. In this case, we need to strip off the part # `Optional[]` to get the actual parameter type. v = type_annotations.maybe_strip_optional_from_annotation(v) - if v == task_final_status.PipelineTaskFinalStatus: value = self.get_input_parameter_value(k) + + # PipelineTaskFinalStatus field names pipelineJobResourceName and pipelineTaskName are deprecated. Support for these fields will be removed at a later date. + pipline_job_resource_name = 'pipelineJobResourceName' + if value.get(pipline_job_resource_name) is None: + pipline_job_resource_name = 'pipeline_job_resource_name' + pipeline_task_name = 'pipelineTaskName' + if value.get(pipeline_task_name) is None: + pipeline_task_name = 'pipeline_task_name' + func_kwargs[k] = task_final_status.PipelineTaskFinalStatus( state=value.get('state'), pipeline_job_resource_name=value.get( - 'pipelineJobResourceName'), - pipeline_task_name=value.get('pipelineTaskName'), - error_code=value.get('error').get('code', None), - error_message=value.get('error').get('message', None), + pipline_job_resource_name), + pipeline_task_name=value.get(pipeline_task_name), + error_code=value.get('error', {}).get('code', None), + error_message=value.get('error', {}).get('message', None), + ) + + elif v == TaskConfig: + # The backend injects this struct under the actual input parameter name. + # If missing, pass an empty structure. + value = self.get_input_parameter_value(k) + value = value or {} + func_kwargs[k] = TaskConfig( + affinity=value.get('affinity'), + tolerations=value.get('tolerations'), + node_selector=value.get('nodeSelector'), + env=value.get('env'), + volumes=value.get('volumes'), + volume_mounts=value.get('volumeMounts'), + resources=value.get('resources'), ) elif type_annotations.is_list_of_artifacts(v): @@ -354,6 +379,26 @@ def execute(self) -> Optional[str]: if type_annotations.is_artifact_wrapped_in_Output(v): func_kwargs[k] = self.get_output_artifact(k) + elif type_annotations.is_embedded_input_annotation(v): + # Inject a runtime-only artifact pointing to the extracted embedded asset + inner_type = type_annotations.strip_Input_or_Output_marker(v) + artifact_cls = inner_type if type_annotations.is_artifact_class( + inner_type) else artifact_types.Artifact + embedded_dir = self.func.__globals__.get( + '__KFP_EMBEDDED_ASSET_DIR') + embedded_file = self.func.__globals__.get( + '__KFP_EMBEDDED_ASSET_FILE') + artifact_instance = artifact_cls() + if embedded_file: + artifact_instance.path = embedded_file + elif embedded_dir: + artifact_instance.path = embedded_dir + else: + raise RuntimeError( + 'EmbeddedInput was specified but no embedded asset was found at runtime.' + ) + func_kwargs[k] = artifact_instance + elif is_artifact(v): func_kwargs[k] = self.get_input_artifact(k) diff --git a/sdk/python/kfp/dsl/graph_component.py b/sdk/python/kfp/dsl/graph_component.py index 7b17ec8a2e3..4236aa18344 100644 --- a/sdk/python/kfp/dsl/graph_component.py +++ b/sdk/python/kfp/dsl/graph_component.py @@ -14,15 +14,18 @@ """Pipeline as a component (aka graph component).""" import inspect -from typing import Callable, Optional +from typing import Callable, List, Optional import uuid +from kfp import dsl from kfp.compiler import pipeline_spec_builder as builder from kfp.dsl import base_component from kfp.dsl import pipeline_channel from kfp.dsl import pipeline_config from kfp.dsl import pipeline_context +from kfp.dsl import pipeline_task from kfp.dsl import structures +from kfp.dsl import tasks_group from kfp.pipeline_spec import pipeline_spec_pb2 @@ -63,6 +66,9 @@ def __init__( if not dsl_pipeline.tasks: raise ValueError('Task is missing from pipeline.') + # Validate workspace configuration if workspace features are used + self._validate_workspace_requirements(dsl_pipeline, pipeline_config) + # Making the pipeline group name unique to prevent name clashes with # templates pipeline_group = dsl_pipeline.groups[0] @@ -86,6 +92,79 @@ def __init__( self.component_spec.implementation.graph = pipeline_spec self.component_spec.platform_spec = platform_spec + def _detect_workspace_usage_in_tasks( + self, tasks: List[pipeline_task.PipelineTask]) -> bool: + """Detects if any task in the list uses workspace features. + + Args: + tasks: List of pipeline tasks to check for workspace usage. + + Returns: + True if any task uses workspace features, False otherwise. + """ + + for task in tasks: + if hasattr(task, 'inputs') and task.inputs: + for input_value in task.inputs.values(): + if isinstance(input_value, str): + if (dsl.WORKSPACE_PATH_PLACEHOLDER in input_value or + input_value.startswith( + dsl.constants.WORKSPACE_MOUNT_PATH)): + return True + + return False + + def _detect_workspace_usage_in_groups( + self, groups: List[tasks_group.TasksGroup]) -> bool: + """Detects if any task group uses workspace features. + + Args: + groups: List of task groups to check for workspace usage. + + Returns: + True if any task uses workspace features, False otherwise. + """ + for group in groups: + # Check tasks in the current group + if hasattr(group, 'tasks') and group.tasks: + if self._detect_workspace_usage_in_tasks(group.tasks): + return True + + # Recursively check nested groups + if hasattr(group, 'groups') and group.groups: + if self._detect_workspace_usage_in_groups(group.groups): + return True + + return False + + def _validate_workspace_requirements( + self, pipeline: pipeline_context.Pipeline, + pipeline_config: Optional[pipeline_config.PipelineConfig]) -> None: + """Validates that workspace is configured if workspace features are + used in the pipeline. + + Args: + pipeline: The pipeline instance to validate. + pipeline_config: The pipeline configuration. + + Raises: + ValueError: If workspace features are used but workspace is not configured in PipelineConfig. + """ + + workspace_used = False + + if pipeline.groups: + workspace_used = self._detect_workspace_usage_in_groups( + pipeline.groups) + + # If workspace features are used, ensure workspace is configured + if workspace_used: + if (pipeline_config is None or pipeline_config.workspace is None or + not getattr(pipeline_config.workspace, 'size', None)): + raise ValueError( + 'Workspace features are used (e.g., dsl.WORKSPACE_PATH_PLACEHOLDER) but PipelineConfig.workspace.size is not set.' + ) + @property def pipeline_spec(self) -> pipeline_spec_pb2.PipelineSpec: """Returns the pipeline spec of the component.""" diff --git a/sdk/python/kfp/dsl/notebook_component_decorator.py b/sdk/python/kfp/dsl/notebook_component_decorator.py new file mode 100644 index 00000000000..84f95dc186a --- /dev/null +++ b/sdk/python/kfp/dsl/notebook_component_decorator.py @@ -0,0 +1,100 @@ +"""Decorator for creating notebook-based components. + +Uses the existing Python executor. The embedded notebook is executed by +a helper bound to `dsl.run_notebook(**kwargs)` that users can call +inside their component function. +""" + +from __future__ import annotations + +import functools +from typing import Any, Callable, List, Optional + +from kfp.dsl import component_factory +from kfp.dsl.component_task_config import TaskConfigField +from kfp.dsl.component_task_config import TaskConfigPassthrough + + +def notebook_component( + func: Optional[Callable[..., Any]] = None, + *, + notebook_path: str, + base_image: Optional[str] = None, + packages_to_install: Optional[List[str]] = None, + output_component_file: Optional[str] = None, + pip_index_urls: Optional[List[str]] = None, + pip_trusted_hosts: Optional[List[str]] = None, + use_venv: bool = False, + kfp_package_path: Optional[str] = None, + install_kfp_package: bool = True, + task_config_passthroughs: Optional[List[TaskConfigPassthrough]] = None, +): + """Decorator to define a Notebook-based KFP component. + + Args: + notebook_path: Path to the .ipynb file or a directory containing one to embed and execute. + base_image: Base container image for the component. + packages_to_install: Runtime-only packages to install inside the + component container. When None, defaults to + ["nbclient>=0.10,<1", "ipykernel>=6,<7", "jupyter_client>=7,<9"] + to ensure the notebook can execute with a Python kernel and client. + When [], installs nothing. When non-empty, installs the exact list. + output_component_file: Optional path to write the component YAML. + pip_index_urls: Optional pip index URLs for installation. + pip_trusted_hosts: Optional pip trusted hosts. + use_venv: Whether to create and use a venv inside the container. + kfp_package_path: Optional KFP package path to install. + install_kfp_package: Whether to auto-install KFP when appropriate. + task_config_passthroughs: Optional task config passthroughs. + + Parameter injection and execution: + - The notebook bytes are embedded in the component. At runtime, the + helper binds `dsl.run_notebook(**kwargs)`. + - Inside the decorated function body, call `dsl.run_notebook(...)` with + keyword arguments for parameters you want available in the notebook + (e.g., `dsl.run_notebook(text=my_text)`). + - Parameters are injected following Papermill semantics: if the + notebook contains a code cell tagged `parameters`, an overriding + `injected-parameters` cell is inserted immediately after it; otherwise + the injected cell is placed at the top of the notebook before + execution. + - Notebook outputs you want to expose as KFP outputs should be written + by the notebook to known paths, then copied or logged by the function + after `dsl.run_notebook(...)` returns. + """ + + formatted_passthroughs = None + if task_config_passthroughs is not None: + formatted_passthroughs = [ + TaskConfigPassthrough(field=p, apply_to_task=False) if isinstance( + p, TaskConfigField) else p for p in task_config_passthroughs + ] + + if func is None: + return functools.partial( + component_factory.create_notebook_component_from_func, + notebook_path=notebook_path, + base_image=base_image, + packages_to_install=packages_to_install, + output_component_file=output_component_file, + pip_index_urls=pip_index_urls, + pip_trusted_hosts=pip_trusted_hosts, + use_venv=use_venv, + kfp_package_path=kfp_package_path, + install_kfp_package=install_kfp_package, + task_config_passthroughs=formatted_passthroughs, + ) + + return component_factory.create_notebook_component_from_func( + func=func, + notebook_path=notebook_path, + base_image=base_image, + packages_to_install=packages_to_install, + output_component_file=output_component_file, + pip_index_urls=pip_index_urls, + pip_trusted_hosts=pip_trusted_hosts, + use_venv=use_venv, + kfp_package_path=kfp_package_path, + install_kfp_package=install_kfp_package, + task_config_passthroughs=formatted_passthroughs, + ) diff --git a/sdk/python/kfp/dsl/notebook_component_decorator_test.py b/sdk/python/kfp/dsl/notebook_component_decorator_test.py new file mode 100644 index 00000000000..c6112374401 --- /dev/null +++ b/sdk/python/kfp/dsl/notebook_component_decorator_test.py @@ -0,0 +1,291 @@ +"""Unit tests for notebook components and embedded artifacts.""" + +import base64 +import io +import json +import os +import tarfile +import tempfile +import unittest +import warnings + +from kfp import dsl +from kfp.dsl import component_factory +from kfp.dsl.types.artifact_types import Dataset + + +class TestNotebookComponentDecorator(unittest.TestCase): + + def _make_temp_notebook(self, cells_source: str) -> str: + nb = { + 'cells': [{ + 'cell_type': 'code', + 'execution_count': None, + 'metadata': {}, + 'outputs': [], + 'source': cells_source, + }], + 'metadata': { + 'kernelspec': { + 'display_name': 'Python 3', + 'language': 'python', + 'name': 'python3', + }, + 'language_info': { + 'name': 'python', + 'version': '3.11', + }, + }, + 'nbformat': 4, + 'nbformat_minor': 5, + } + tmpdir = tempfile.mkdtemp() + nb_path = os.path.join(tmpdir, 'tmp.ipynb') + with open(nb_path, 'w', encoding='utf-8') as f: + json.dump(nb, f) + return nb_path + + def test_notebook_component_default_packages_install(self): + nb_path = self._make_temp_notebook( + "import os\nos.makedirs('/tmp/kfp_nb_outputs', exist_ok=True)\n") + + @dsl.notebook_component(notebook_path=nb_path) + def my_nb(text: str): + dsl.run_notebook(text=text) + + container = my_nb.component_spec.implementation.container + command = ' '.join(container.command) + self.assertIn('nbclient>=0.10,<1', command) + self.assertIn('ipykernel>=6,<7', command) + self.assertIn('jupyter_client>=7,<9', command) + + def test_notebook_component_no_extra_packages_when_empty_list(self): + nb_path = self._make_temp_notebook('pass\n') + + @dsl.notebook_component(notebook_path=nb_path, packages_to_install=[]) + def my_nb(): + dsl.run_notebook() + + container = my_nb.component_spec.implementation.container + command = ' '.join(container.command) + self.assertNotIn('nbclient>=0.10,<1', command) + self.assertNotIn('ipykernel>=6,<7', command) + self.assertNotIn('jupyter_client>=7,<9', command) + + +class TestNotebookExecutorTemplate(unittest.TestCase): + + def test_template_binds_run_notebook(self): + from kfp.dsl.templates.notebook_executor import \ + get_notebook_executor_source + + source = get_notebook_executor_source('ARCHIVE_B64', 'nb.ipynb') + self.assertIn('dsl.run_notebook = kfp_run_notebook', source) + self.assertIn('class KFPStreamingNotebookClient(NotebookClient):', + source) + + +class TestNotebookParameterInjection(unittest.TestCase): + + def _make_nb(self, with_parameters_cell: bool) -> dict: + import nbformat + nb = { + 'cells': [], + 'metadata': { + 'kernelspec': { + 'display_name': 'Python 3', + 'language': 'python', + 'name': 'python3', + }, + 'language_info': { + 'name': 'python', + 'version': '3.11', + }, + }, + 'nbformat': 4, + 'nbformat_minor': 5, + } + if with_parameters_cell: + nb['cells'].append({ + 'cell_type': 'code', + 'execution_count': None, + 'metadata': { + 'tags': ['parameters'], + }, + 'outputs': [], + 'source': '# default parameters\n', + }) + # Regular code cell + nb['cells'].append({ + 'cell_type': 'code', + 'execution_count': None, + 'metadata': {}, + 'outputs': [], + 'source': 'print("hello")\n', + }) + return nbformat.from_dict(nb) + + def test_injects_after_parameters_cell(self): + from kfp.dsl.templates import notebook_executor as nb_exec + nb = self._make_nb(with_parameters_cell=True) + + params = { + 'x': 1, + 's': 'abc', + 'd': { + 'a': 1 + }, + } + + getattr(nb_exec, '__kfp_write_parameters_cell')(nb, params) + + # Expect new cell inserted immediately after the parameters cell (index 1) + self.assertGreaterEqual(len(nb.get('cells', [])), 2) + injected = nb['cells'][1] + self.assertEqual(injected.get('cell_type'), 'code') + tags = injected.get('metadata', {}).get('tags', []) or [] + self.assertIn('injected-parameters', tags) + src = ''.join(injected.get('source', '')) + self.assertIn('import json', src) + # Ensure each variable assignment is present + self.assertIn('x = json.loads(', src) + self.assertIn('s = json.loads(', src) + self.assertIn('d = json.loads(', src) + + def test_injects_at_top_when_no_parameters_cell(self): + from kfp.dsl.templates import notebook_executor as nb_exec + nb = self._make_nb(with_parameters_cell=False) + + getattr(nb_exec, '__kfp_write_parameters_cell')(nb, {'p': 42}) + + self.assertGreaterEqual(len(nb.get('cells', [])), 2) + injected = nb['cells'][0] + self.assertEqual(injected.get('cell_type'), 'code') + tags = injected.get('metadata', {}).get('tags', []) or [] + self.assertIn('injected-parameters', tags) + src = ''.join(injected.get('source', '')) + self.assertIn('p = json.loads(', src) + + +class TestEmbeddedArtifacts(unittest.TestCase): + + def _make_tar_gz_b64_for_paths(self, root_dir: str) -> str: + buf = io.BytesIO() + with tarfile.open(fileobj=buf, mode='w') as tar: + tar.add(root_dir, arcname='.') + raw_bytes = buf.getvalue() + gz_bytes = __import__('gzip').compress(raw_bytes) + return base64.b64encode(gz_bytes).decode('ascii') + + def test_shared_extraction_helper_exposes_dir_and_file(self): + tmpdir = tempfile.mkdtemp() + file_basename = 'data.txt' + with open( + os.path.join(tmpdir, file_basename), 'w', + encoding='utf-8') as f: + f.write('hello') + + archive_b64 = self._make_tar_gz_b64_for_paths(tmpdir) + helper_src = component_factory._generate_shared_extraction_helper( + embedded_archive_b64=archive_b64, file_basename=file_basename) + + g: dict = {} + exec(helper_src, g, g) + self.assertIn('__KFP_EMBEDDED_ASSET_DIR', g) + self.assertIn('__KFP_EMBEDDED_ASSET_FILE', g) + self.assertTrue(os.path.isdir(g['__KFP_EMBEDDED_ASSET_DIR'])) + self.assertTrue(os.path.isfile(g['__KFP_EMBEDDED_ASSET_FILE'])) + with open(g['__KFP_EMBEDDED_ASSET_FILE'], 'r', encoding='utf-8') as f: + self.assertEqual(f.read(), 'hello') + # sys.path must include the extracted dir (at position 0 per helper) + import sys + self.assertIn(g['__KFP_EMBEDDED_ASSET_DIR'], sys.path) + + def test_embedded_input_excluded_from_interface_and_runtime_injected(self): + + def comp_sig(cfg: dsl.EmbeddedInput[Dataset], out: dsl.Output[Dataset]): + pass + + spec = component_factory.extract_component_interface(comp_sig) + self.assertIsNone(spec.inputs) + + tmpdir = tempfile.mkdtemp() + payload_path = os.path.join(tmpdir, 'x.txt') + with open(payload_path, 'w', encoding='utf-8') as f: + f.write('PAYLOAD') + + def use_embedded(cfg: dsl.EmbeddedInput[Dataset], + out: dsl.Output[Dataset]): + import os as _os + with open( + _os.path.join(cfg.path, 'x.txt'), 'r', + encoding='utf-8') as _f: + data = _f.read() + with open(out.path, 'w', encoding='utf-8') as _g: + _g.write(data) + + # Build a minimal executor input for a single output artifact + executor_input = { + 'inputs': {}, + 'outputs': { + 'artifacts': { + 'out': { + 'artifacts': [{ + 'name': 'out', + 'type': { + 'schemaTitle': 'system.Dataset', + 'schemaVersion': '0.0.1', + }, + 'uri': os.path.join(tempfile.mkdtemp(), 'out'), + 'metadata': {}, + }] + } + }, + 'outputFile': + os.path.join(tempfile.mkdtemp(), 'executor_output.json'), + }, + } + + from kfp.dsl import executor as _executor + + # Inject globals emulating extraction helper behavior + use_embedded.__globals__['__KFP_EMBEDDED_ASSET_DIR'] = tmpdir + e = _executor.Executor(executor_input, function_to_execute=use_embedded) + e.execute() + + out_path = e.get_output_artifact_path('out') + with open(out_path, 'r', encoding='utf-8') as f: + self.assertEqual(f.read(), 'PAYLOAD') + + def test_large_embedded_artifact_emits_warning(self): + # Create >1MB of incompressible data so gzip compressed size stays >1MB + tmpdir = tempfile.mkdtemp() + bigfile = os.path.join(tmpdir, 'big.bin') + with open(bigfile, 'wb') as f: + f.write(os.urandom(3 * 1024 * 1024)) + + @dsl.component(embedded_artifact_path=tmpdir) + def dummy(): + pass + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + _ = dummy + + # Building the component happens at decoration time above; to force + # the path through component_factory, explicitly rebuild: + def f(): + pass + + component_factory.create_component_from_func( + func=f, embedded_artifact_path=tmpdir) + self.assertTrue( + any( + isinstance(ww.message, UserWarning) and + 'Embedded artifact archive is large' in str(ww.message) + for ww in w), + msg='Expected a UserWarning about large embedded artifact') + + +if __name__ == '__main__': + unittest.main() diff --git a/sdk/python/kfp/dsl/notebook_helpers.py b/sdk/python/kfp/dsl/notebook_helpers.py new file mode 100644 index 00000000000..a1f9a425d97 --- /dev/null +++ b/sdk/python/kfp/dsl/notebook_helpers.py @@ -0,0 +1,35 @@ +"""Notebook-related helper APIs for KFP DSL. + +This module provides a stub for `dsl.run_notebook(**kwargs)`. At runtime +inside `@dsl.notebook_component`, the SDK binds this symbol to a helper +that executes the embedded notebook with the provided parameters. +""" + +from __future__ import annotations + +from typing import Any + + +def run_notebook(**kwargs: Any) -> None: + """Execute the component's embedded Jupyter notebook with injected + parameters. + + This is a stub placeholder. Inside a function decorated with + `@dsl.notebook_component`, the SDK binds this symbol at runtime to a helper + that materializes the embedded notebook, injects a parameters cell from the + provided `**kwargs`, and executes the notebook via nbclient. + + Calling this function outside of a notebook component context will raise + NotImplementedError. + + Args: + **kwargs: Parameter names and values to inject into the notebook's + parameters cell. + + Raises: + NotImplementedError: Always, unless overridden at runtime inside a + notebook component. + """ + raise NotImplementedError( + 'dsl.run_notebook is only available inside a @dsl.notebook_component at runtime.' + ) diff --git a/sdk/python/kfp/dsl/pipeline_config.py b/sdk/python/kfp/dsl/pipeline_config.py index a4e90c28a01..72308ee16bd 100644 --- a/sdk/python/kfp/dsl/pipeline_config.py +++ b/sdk/python/kfp/dsl/pipeline_config.py @@ -13,11 +13,140 @@ # limitations under the License. """Pipeline-level config options.""" +import re +from typing import Any, Dict, Optional + +# Workspace size validation regex +_SIZE_REGEX = re.compile( + r'^(?:(?:0|[1-9]\d*)(?:\.\d+)?)(?:Ki|Mi|Gi|Ti|Pi|Ei|K|M|G|T|P|E)?$') + + +def _is_valid_workspace_size(value: str) -> bool: + """Returns True if size is a valid Kubernetes resource quantity string.""" + if not isinstance(value, str): + return False + size = value.strip() + return _SIZE_REGEX.match(size) is not None + + +class KubernetesWorkspaceConfig: + """Configuration for Kubernetes-specific workspace settings. + + Use this to override the default PersistentVolumeClaim (PVC) configuration + used when running pipelines on a Kubernetes cluster. + + Attributes: + pvcSpecPatch: A dictionary of fields to patch onto the default PVC spec + (e.g., 'storageClassName', 'accessModes'). + """ + + def __init__(self, pvcSpecPatch: Optional[Dict[str, Any]] = None): + self.pvcSpecPatch = pvcSpecPatch or {} + + def set_pvcSpecPatch(self, patch: Dict[str, Any]): + self.pvcSpecPatch = patch + + +class WorkspaceConfig: + """Configuration for a shared workspace that persists during the pipeline + run. + + Attributes: + size (str): The size of the workspace (e.g., '250Gi'). This is a required field. + See https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/quantity/ for valid quantity formats. + kubernetes: (Optional) Kubernetes-specific configuration for the underlying PVC. + """ + + def __init__(self, + size: str, + kubernetes: Optional[KubernetesWorkspaceConfig] = None): + self.size = size + self.kubernetes = kubernetes or KubernetesWorkspaceConfig() + + @property + def size(self) -> str: + return self._size + + @size.setter + def size(self, size: str) -> None: + if not size or not str(size).strip(): + raise ValueError('Workspace size is required and cannot be empty') + if not _is_valid_workspace_size(str(size)): + raise ValueError( + f'Workspace size "{size}" is invalid. Must be a valid Kubernetes resource quantity ' + '(e.g., "10Gi", "500Mi", "1Ti")') + self._size = str(size).strip() + + def get_workspace(self) -> dict: + workspace = {'size': self.size} + if self.kubernetes: + workspace['kubernetes'] = { + 'pvcSpecPatch': self.kubernetes.pvcSpecPatch + } + return workspace + + def set_size(self, size: str): + self.size = size + + def set_kubernetes_config(self, + kubernetes_config: KubernetesWorkspaceConfig): + self.kubernetes = kubernetes_config + class PipelineConfig: """PipelineConfig contains pipeline-level config options.""" - def __init__(self): - pass + def __init__(self, + workspace: Optional[WorkspaceConfig] = None, + semaphore_key: Optional[str] = None, + mutex_name: Optional[str] = None): + self.workspace = workspace + self._semaphore_key = semaphore_key + self._mutex_name = mutex_name + + @property + def semaphore_key(self) -> Optional[str]: + """Get the semaphore key for controlling pipeline concurrency. + + Returns: + Optional[str]: The semaphore key, or None if not set. + """ + return self._semaphore_key + + @semaphore_key.setter + def semaphore_key(self, value: str): + """Set the semaphore key to control pipeline concurrency. + + Pipelines with the same semaphore key will be limited to a configured maximum + number of concurrent executions. This allows you to control resource usage by + ensuring that only a specific number of pipelines can run simultaneously. + + Note: A pipeline can use both semaphores and mutexes together. The pipeline + will wait until all required locks are available before starting. + + Args: + value (str): The semaphore key name for controlling concurrent executions. + """ + self._semaphore_key = (value and value.strip()) or None + + @property + def mutex_name(self) -> Optional[str]: + """Get the mutex name for exclusive pipeline execution. + + Returns: + Optional[str]: The mutex name, or None if not set. + """ + return self._mutex_name + + @mutex_name.setter + def mutex_name(self, value: str): + """Set the name of the mutex to ensure mutual exclusion. + + Pipelines with the same mutex name will only run one at a time. This ensures + exclusive access to shared resources and prevents conflicts when multiple + pipelines would otherwise compete for the same resources. - # TODO add pipeline level configs + Args: + value (str): Name of the mutex for exclusive pipeline execution. + """ + self._mutex_name = (value and value.strip()) or None diff --git a/sdk/python/kfp/dsl/pipeline_task.py b/sdk/python/kfp/dsl/pipeline_task.py index 381899a361a..eda1407ed6d 100644 --- a/sdk/python/kfp/dsl/pipeline_task.py +++ b/sdk/python/kfp/dsl/pipeline_task.py @@ -164,13 +164,14 @@ def validate_placeholder_types( self.pipeline_spec = self.component_spec.implementation.graph self._outputs = { - output_name: pipeline_channel.create_pipeline_channel( - name=output_name, - channel_type=output_spec.type, - task_name=self._task_spec.name, - is_artifact_list=output_spec.is_artifact_list, - ) for output_name, output_spec in ( - component_spec.outputs or {}).items() + output_name: + pipeline_channel.create_pipeline_channel( + name=output_name, + channel_type=output_spec.type, + task_name=self._task_spec.name, + is_artifact_list=output_spec.is_artifact_list, + ) for output_name, output_spec in ( + component_spec.outputs or {}).items() } self._inputs = args diff --git a/sdk/python/kfp/dsl/pipeline_task_test.py b/sdk/python/kfp/dsl/pipeline_task_test.py index 8543058b826..6e767112cc7 100644 --- a/sdk/python/kfp/dsl/pipeline_task_test.py +++ b/sdk/python/kfp/dsl/pipeline_task_test.py @@ -172,11 +172,10 @@ def test_set_valid_cpu_request_limit(self, cpu: str, expected_cpu: str): task.set_cpu_limit(cpu) self.assertEqual(expected_cpu, task.container_spec.resources.cpu_limit) - @parameterized.parameters( - { - 'gpu_limit': '1', - 'expected_gpu_number': '1', - },) + @parameterized.parameters({ + 'gpu_limit': '1', + 'expected_gpu_number': '1', + },) def test_set_valid_gpu_limit(self, gpu_limit: str, expected_gpu_number: str): task = pipeline_task.PipelineTask( diff --git a/sdk/python/kfp/dsl/structures.py b/sdk/python/kfp/dsl/structures.py index 4350cf9895d..2dba57411e5 100644 --- a/sdk/python/kfp/dsl/structures.py +++ b/sdk/python/kfp/dsl/structures.py @@ -26,8 +26,10 @@ from kfp.dsl import placeholders from kfp.dsl import utils from kfp.dsl import v1_structures +from kfp.dsl.component_task_config import TaskConfigPassthrough from kfp.dsl.container_component_artifact_channel import \ ContainerComponentArtifactChannel +from kfp.dsl.task_config import TaskConfig from kfp.dsl.types import artifact_types from kfp.dsl.types import type_annotations from kfp.dsl.types import type_utils @@ -223,7 +225,9 @@ def spec_type_is_parameter(type_: str) -> bool: in_memory_type = type_annotations.maybe_strip_optional_from_annotation_string( type_utils.get_canonical_name_for_outer_generic(type_)) - return in_memory_type in type_utils.IN_MEMORY_SPEC_TYPE_TO_IR_TYPE or in_memory_type == 'PipelineTaskFinalStatus' + return (in_memory_type in type_utils.IN_MEMORY_SPEC_TYPE_TO_IR_TYPE or + in_memory_type == 'PipelineTaskFinalStatus' or + in_memory_type == 'TaskConfig') @dataclasses.dataclass @@ -570,6 +574,8 @@ class ComponentSpec: outputs: Optional[Dict[str, OutputSpec]] = None platform_spec: pipeline_spec_pb2.PlatformSpec = dataclasses.field( default_factory=pipeline_spec_pb2.PlatformSpec) + # Optional passthroughs for TaskConfig fields + task_config_passthroughs: Optional[List[TaskConfigPassthrough]] = None def __post_init__(self) -> None: self._transform_name() @@ -640,8 +646,9 @@ def from_v1_component_spec( ] env = { key: - placeholders.maybe_convert_v1_yaml_placeholder_to_v2_placeholder( - command, component_dict=component_dict) + placeholders + .maybe_convert_v1_yaml_placeholder_to_v2_placeholder( + command, component_dict=component_dict) for key, command in container.get('env', {}).items() } container_spec = ContainerSpecImplementation.from_container_dict({ diff --git a/sdk/python/kfp/dsl/task_config.py b/sdk/python/kfp/dsl/task_config.py new file mode 100644 index 00000000000..f2d48b611b5 --- /dev/null +++ b/sdk/python/kfp/dsl/task_config.py @@ -0,0 +1,118 @@ +"""Definition for TaskConfig.""" + +import dataclasses +from typing import Any, Dict, List, Optional + + +# TaskConfig needs to stay aligned with the TaskConfig in backend/src/v2/driver/driver.go. +@dataclasses.dataclass +class TaskConfig: + """Configurations for a task. + + Annotate a component parameter with this type when you want the task's + runtime configuration to be forwarded to an external workload and optionally applied to the + task's own pod. This is useful when the task launches another Kubernetes resource (for example, + a Kubeflow Trainer job). + + This is an empty value by default and is populated if the component is annotated with + task_config_passthroughs. + + All fields are optional and map 1:1 to fragments of the Kubernetes Pod + spec. These fields have values as Python dictionaries/lists that conform to the + Kubernetes JSON schema. + + Example: + :: + + @dsl.component( + packages_to_install=["kubernetes"], + task_config_passthroughs=[ + dsl.TaskConfigField.RESOURCES, + dsl.TaskConfigField.KUBERNETES_TOLERATIONS, + dsl.TaskConfigField.KUBERNETES_NODE_SELECTOR, + dsl.TaskConfigField.KUBERNETES_AFFINITY, + dsl.TaskConfigPassthrough(field=dsl.TaskConfigField.ENV, apply_to_task=True), + dsl.TaskConfigPassthrough(field=dsl.TaskConfigField.KUBERNETES_VOLUMES, apply_to_task=True), + ], + ) + def train(num_nodes: int, workspace_path: str, output_model: dsl.Output[dsl.Model], task_config: dsl.TaskConfig): + import os + import shutil + from kubernetes import client as k8s_client, config + config.load_incluster_config() + + with open( + "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" + ) as ns_file: + namespace = ns_file.readline() + + train_job_script = "with open('/kfp-workspace/model', 'w') as f: f.write('hello')" + + dataset_path = os.path.join(workspace_path, "dataset") + with open(dataset_path, "w") as f: + f.write("Prepare dataset here...") + + train_job = { + "apiVersion": "trainer.kubeflow.org/v1alpha1", + "kind": "TrainJob", + "metadata": {"name": f"kfp-train-job", "namespace": namespace}, + "spec": { + "runtimeRef": {"name": "torch-distributed"}, + "trainer": { + "numNodes": num_nodes, + "resourcesPerNode": task_config.resources, + "env": task_config.env, + "command": ["python", "-c", train_job_script], + }, + "podSpecOverrides": [ + { + "targetJobs": [{"name": "node"}], + "volumes": task_config.volumes, + "containers": [ + { + "name": "node", + "volumeMounts": task_config.volume_mounts, + } + ], + "nodeSelector": task_config.node_selector, + "tolerations": task_config.tolerations, + } + ], + }, + } + print(train_job) + api_client = k8s_client.ApiClient() + custom_objects_api = k8s_client.CustomObjectsApi(api_client) + response = custom_objects_api.create_namespaced_custom_object( + group="trainer.kubeflow.org", + version="v1alpha1", + namespace=namespace, + plural="trainjobs", + body=train_job, + ) + job_name = response["metadata"]["name"] + print(f"TrainJob {job_name} created successfully") + + print("Polling train job code goes here...") + + print("Copying output model") + shutil.copy(os.path.join(workspace_path, "model"), output_model.path) + + @dsl.pipeline + def example_task_config(): + train_task = train(num_nodes=1, workspace_path=dsl.WORKSPACE_PATH_PLACEHOLDER) + train_task.set_cpu_request("1") + train_task.set_memory_request("20Gi") + train_task.set_cpu_limit("2") + train_task.set_memory_limit("50Gi") + train_task.set_accelerator_type("nvidia.com/gpu") + train_task.set_accelerator_limit("1") + """ + + affinity: Optional[Dict[str, Any]] = None + tolerations: Optional[List[Dict[str, Any]]] = None + node_selector: Optional[Dict[str, str]] = None + env: Optional[List[Dict[str, Any]]] = None + volumes: Optional[List[Dict[str, Any]]] = None + volume_mounts: Optional[List[Dict[str, Any]]] = None + resources: Optional[Dict[str, Any]] = None diff --git a/sdk/python/kfp/dsl/templates/__init__.py b/sdk/python/kfp/dsl/templates/__init__.py new file mode 100644 index 00000000000..9e7ba0e123f --- /dev/null +++ b/sdk/python/kfp/dsl/templates/__init__.py @@ -0,0 +1 @@ +# Templates package for KFP DSL code generation diff --git a/sdk/python/kfp/dsl/templates/notebook_executor.py b/sdk/python/kfp/dsl/templates/notebook_executor.py new file mode 100644 index 00000000000..0bdc21f16e4 --- /dev/null +++ b/sdk/python/kfp/dsl/templates/notebook_executor.py @@ -0,0 +1,258 @@ +"""Template for notebook executor code generation. + +This module contains the actual Python functions that get embedded into notebook +components at runtime. Using inspect.getsource() to convert them to source code +provides better maintainability, testing, and IDE support. + +Note: The template functions import their dependencies locally to avoid requiring +those dependencies when this module is imported for code generation. +""" + +import inspect +import textwrap + + +def __kfp_write_parameters_cell(nb, params): + """Inject parameters following Papermill semantics. + + - If a cell tagged with 'parameters' exists, insert an overriding + 'injected-parameters' cell immediately after it. + - Otherwise, insert the 'injected-parameters' cell at the top. + """ + import json + + import nbformat + + if not params: + return + + # Build the injected parameters cell + assignments = [] + for key, value in params.items(): + serialized = json.dumps(value) + assignments.append(key + ' = json.loads(' + repr(serialized) + ')') + source = 'import json\n' + '\n'.join(assignments) + '\n' + cell = nbformat.v4.new_code_cell(source=source) + cell.metadata.setdefault('tags', []) + if 'injected-parameters' not in cell.metadata['tags']: + cell.metadata['tags'].append('injected-parameters') + + # Locate the first 'parameters' tagged cell + insert_idx = 0 + for idx, existing in enumerate(nb.get('cells', [])): + if existing.get('cell_type') != 'code': + continue + tags = existing.get('metadata', {}).get('tags', []) or [] + if 'parameters' in tags: + insert_idx = idx + 1 + break + + nb.cells.insert(insert_idx, cell) + + +def _kfp_stream_single_output(output, cell_idx): + """Stream a single notebook output immediately during execution. + + Prints stdout/stderr and text/plain display outputs to the console + so users see cell output as it happens (no need to wait until the + notebook finishes). + """ + import sys + output_type = output.get('output_type') + + if output_type == 'stream': + text = output.get('text', '') + if text: + try: + print(f'[nb cell {cell_idx} stream] ', end='', flush=False) + except Exception: + pass + print(text, end='' if text.endswith('\n') else '\n', flush=True) + elif output_type == 'error': + for line in output.get('traceback', []): + print(line, file=sys.stderr, flush=True) + else: + # Handle display_data and execute_result + data = output.get('data', {}) + if 'text/plain' in data: + print(data['text/plain'], flush=True) + elif 'application/json' in data: + try: + import json as __kfp_json + parsed = data['application/json'] + # Some kernels send JSON as string; try to parse if needed + if isinstance(parsed, str): + try: + parsed = __kfp_json.loads(parsed) + except Exception: + pass + print( + __kfp_json.dumps(parsed, indent=2, ensure_ascii=False), + flush=True) + except Exception: + # Fallback to raw + print(str(data.get('application/json')), flush=True) + elif 'text/markdown' in data: + # Print markdown as-is; frontends may render, logs will show raw markdown + print(data['text/markdown'], flush=True) + + +def kfp_run_notebook(**kwargs): + """Execute the embedded notebook with injected parameters. + + Parameters provided via kwargs are injected into the notebook + following Papermill semantics (after a parameters cell if present, + otherwise at top). Execution uses a Python kernel; nbclient and + ipykernel must be available at runtime (installed via + packages_to_install for notebook components). + """ + import os + import subprocess + import sys + + from nbclient import NotebookClient + import nbformat + + # Ensure a usable 'python3' kernel is present; install kernelspec if missing + print('[KFP Notebook] Checking for Python kernel...', flush=True) + try: + from jupyter_client.kernelspec import KernelSpecManager # type: ignore + ksm = KernelSpecManager() + have_py3 = 'python3' in ksm.find_kernel_specs() + if not have_py3: + print( + '[KFP Notebook] Python3 kernel not found, installing...', + flush=True) + try: + subprocess.run([ + sys.executable, '-m', 'ipykernel', 'install', '--user', + '--name', 'python3', '--display-name', 'Python 3' + ], + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) + print( + '[KFP Notebook] Python3 kernel installed successfully', + flush=True) + except subprocess.CalledProcessError as e: + raise RuntimeError( + "Failed to install 'python3' kernelspec for ipykernel. " + "Ensure ipykernel is available in the environment or include it via packages_to_install. " + f"Error: {e}") from e + else: + print('[KFP Notebook] Python3 kernel found', flush=True) + except ImportError as e: + raise RuntimeError( + "jupyter_client is not available. Ensure it's installed in the environment or include it via packages_to_install. " + f"Error: {e}") from e + + nb_path = os.path.join(__KFP_EMBEDDED_ASSET_DIR, __KFP_NOTEBOOK_REL_PATH) + + try: + nb = nbformat.read(nb_path, as_version=4) + except Exception as e: + raise RuntimeError( + f'Failed to read notebook {nb_path}. Ensure it is a valid Jupyter notebook. Error: {e}' + ) from e + + try: + __kfp_write_parameters_cell(nb, kwargs) + print( + f'[KFP Notebook] Executing notebook with {len(nb.get("cells", []))} cells', + flush=True) + + # Use our custom streaming client for real-time output (defined in the + # generated ephemeral source) + client = KFPStreamingNotebookClient( + nb, + timeout=None, + allow_errors=False, + store_widget_state=False, + kernel_name='python3') + client.execute(cwd=__KFP_EMBEDDED_ASSET_DIR) + + print('[KFP Notebook] Execution complete', flush=True) + + except Exception as e: + raise RuntimeError(f'Notebook execution failed. Error: {e}') from e + + +def get_notebook_executor_source(archive_b64_placeholder: str, + notebook_relpath_placeholder: str) -> str: + """Generate the notebook execution helper source code. + + Uses inspect.getsource() to extract the actual function definitions, + providing better maintainability, and includes archive extraction at import. + + Args: + archive_b64_placeholder: The base64-encoded gzip-compressed tar archive containing the notebook and assets + notebook_relpath_placeholder: The relative path to the notebook within the extracted archive root + + Returns: + Python source code for notebook execution helpers + """ + # Get source code for helper functions that don't require nbclient + functions = [ + __kfp_write_parameters_cell, _kfp_stream_single_output, kfp_run_notebook + ] + + # Extract and dedent source code for all helper functions + function_sources = [ + textwrap.dedent(inspect.getsource(func)) for func in functions + ] + + # Define the streaming client class inline in the generated source to avoid + # importing nbclient in the SDK environment. + streaming_client_source = textwrap.dedent(""" + class KFPStreamingNotebookClient(NotebookClient): + # Streams outputs in real-time by emitting outputs during message processing. + def process_message(self, msg, cell, cell_index): + # Call the parent implementation to handle the message normally + output = super().process_message(msg, cell, cell_index) + + # If an output was created, stream it immediately + if output is not None: + _kfp_stream_single_output(output, cell_index) + + return output + """) + + # Combine everything into the final source with archive extraction at import + functions_code = streaming_client_source + '\n' + '\n'.join( + function_sources) + return f"""__KFP_EMBEDDED_ARCHIVE_B64 = '{archive_b64_placeholder}' +__KFP_NOTEBOOK_REL_PATH = '{notebook_relpath_placeholder}' + +import base64 as __kfp_b64 +import gzip as __kfp_gzip +import io as __kfp_io +import os as __kfp_os +import sys as __kfp_sys +import tarfile as __kfp_tarfile +import tempfile as __kfp_tempfile +from nbclient import NotebookClient + +# Extract embedded archive at import time to ensure sys.path and globals are set +print('[KFP] Extracting embedded notebook archive...', flush=True) +__kfp_tmpdir = __kfp_tempfile.TemporaryDirectory() +__KFP_EMBEDDED_ASSET_DIR = __kfp_tmpdir.name +try: + __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode('ascii')) + with __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode='r:gz') as __kfp_tar: + __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR) + print(f'[KFP] Notebook archive extracted to: {{__KFP_EMBEDDED_ASSET_DIR}}', flush=True) +except Exception as __kfp_e: + raise RuntimeError(f'Failed to extract embedded notebook archive: {{__kfp_e}}') + +# Always prepend the extracted directory to sys.path for import resolution +if __KFP_EMBEDDED_ASSET_DIR not in __kfp_sys.path: + __kfp_sys.path.insert(0, __KFP_EMBEDDED_ASSET_DIR) + print(f'[KFP] Added notebook archive directory to Python path', flush=True) + +# Optional convenience for generic embedded file variable name +__KFP_EMBEDDED_ASSET_FILE = __kfp_os.path.join(__KFP_EMBEDDED_ASSET_DIR, __KFP_NOTEBOOK_REL_PATH) + +{functions_code} + +# Bind helper into dsl namespace so user code can call dsl.run_notebook(...) +dsl.run_notebook = kfp_run_notebook""" diff --git a/sdk/python/kfp/dsl/types/type_annotations.py b/sdk/python/kfp/dsl/types/type_annotations.py index 4f1c99ba92c..a0d45d89f55 100644 --- a/sdk/python/kfp/dsl/types/type_annotations.py +++ b/sdk/python/kfp/dsl/types/type_annotations.py @@ -125,6 +125,15 @@ class OutputAnnotation: """Marker type for output artifacts.""" +class EmbeddedAnnotation: + """Marker type for embedded runtime-only inputs. + + Parameters annotated with ``dsl.EmbeddedInput[T]`` are not part of + the component interface and are injected at runtime by the SDK. They + provide access to an extracted embedded artifact's filesystem path. + """ + + def is_Input_Output_artifact_annotation(typ) -> bool: if not hasattr(typ, '__metadata__'): return False @@ -135,6 +144,14 @@ def is_Input_Output_artifact_annotation(typ) -> bool: return True +def is_embedded_input_annotation(typ) -> bool: + """Returns True if typ is of type EmbeddedInput[T].""" + if not hasattr(typ, '__metadata__'): + return False + + return typ.__metadata__[0] == EmbeddedAnnotation + + def is_artifact_wrapped_in_Input(typ: Any) -> bool: """Returns True if typ is of type Input[T].""" if not is_Input_Output_artifact_annotation(typ): diff --git a/sdk/python/kfp/dsl/types/type_utils.py b/sdk/python/kfp/dsl/types/type_utils.py index bc8e54f4468..f50ae772135 100644 --- a/sdk/python/kfp/dsl/types/type_utils.py +++ b/sdk/python/kfp/dsl/types/type_utils.py @@ -20,6 +20,7 @@ import kfp from kfp.dsl import task_final_status +from kfp.dsl.task_config import TaskConfig from kfp.dsl.types import artifact_types from kfp.dsl.types import type_annotations @@ -146,6 +147,18 @@ def is_task_final_status_type(type_name: Optional[Union[str, dict]]) -> bool: type_name == task_final_status.PipelineTaskFinalStatus.__name__) +def is_task_config_type(type_name: Optional[Union[str, dict]]) -> bool: + """Check if a ComponentSpec I/O type is TaskConfig. + + Args: + type_name: type name of the ComponentSpec I/O type. + + Returns: + True if the type name is 'TaskConfig'. + """ + return isinstance(type_name, str) and (type_name == TaskConfig.__name__) + + def is_parameter_type(type_name: Optional[Union[str, dict]]) -> bool: """Check if a ComponentSpec I/O type is considered as a parameter type. @@ -163,7 +176,8 @@ def is_parameter_type(type_name: Optional[Union[str, dict]]) -> bool: return False return type_name.lower( - ) in PARAMETER_TYPES_MAPPING or is_task_final_status_type(type_name) + ) in PARAMETER_TYPES_MAPPING or is_task_final_status_type( + type_name) or is_task_config_type(type_name) def bundled_artifact_to_artifact_proto( @@ -195,8 +209,8 @@ def get_parameter_type( Raises: AttributeError: if type_name is not a string type. """ - # Special handling for PipelineTaskFinalStatus, treat it as Dict type. - if is_task_final_status_type(param_type): + # Special handling for PipelineTaskFinalStatus and TaskConfig, treat them as Dict type. + if is_task_final_status_type(param_type) or is_task_config_type(param_type): param_type = 'dict' if type(param_type) == type: type_name = param_type.__name__ @@ -461,6 +475,7 @@ def __exit__(self, *unused_args) -> None: 'STRUCT': 'Dict', 'BOOLEAN': 'Boolean', 'TASK_FINAL_STATUS': task_final_status.PipelineTaskFinalStatus.__name__, + 'TASK_CONFIG': TaskConfig.__name__, } IR_TYPE_TO_COMMENT_TYPE_STRING = { @@ -471,6 +486,7 @@ def __exit__(self, *unused_args) -> None: 'STRUCT': dict.__name__, 'BOOLEAN': bool.__name__, 'TASK_FINAL_STATUS': task_final_status.PipelineTaskFinalStatus.__name__, + 'TASK_CONFIG': TaskConfig.__name__, } IN_MEMORY_SPEC_TYPE_TO_IR_TYPE = { diff --git a/sdk/python/kfp/dsl/types/type_utils_test.py b/sdk/python/kfp/dsl/types/type_utils_test.py index 0272cc146d9..9f88dfc6c47 100644 --- a/sdk/python/kfp/dsl/types/type_utils_test.py +++ b/sdk/python/kfp/dsl/types/type_utils_test.py @@ -83,10 +83,9 @@ def __init__(self): class TypeUtilsTest(parameterized.TestCase): - @parameterized.parameters( - [(item, True) for item in _PARAMETER_TYPES] + - [(item, False) - for item in _KNOWN_ARTIFACT_TYPES + _UNKNOWN_ARTIFACT_TYPES]) + @parameterized.parameters([(item, True) for item in _PARAMETER_TYPES] + + [(item, False) for item in _KNOWN_ARTIFACT_TYPES + + _UNKNOWN_ARTIFACT_TYPES]) def test_is_parameter_type_true(self, type_name, expected_result): self.assertEqual(expected_result, type_utils.is_parameter_type(type_name)) diff --git a/sdk/python/kfp/dsl/v1_modelbase.py b/sdk/python/kfp/dsl/v1_modelbase.py index c1facf6c8e2..103f27ad782 100644 --- a/sdk/python/kfp/dsl/v1_modelbase.py +++ b/sdk/python/kfp/dsl/v1_modelbase.py @@ -217,7 +217,7 @@ def parse_object_from_struct_based_on_type(struct: Any, typ: Type[T]) -> T: inner_value_type = type_args[1] return { parse_object_from_struct_based_on_type(k, inner_key_type): - parse_object_from_struct_based_on_type(v, inner_value_type) + parse_object_from_struct_based_on_type(v, inner_value_type) for k, v in struct.items() } @@ -370,7 +370,9 @@ def __repr__(self): def __eq__(self, other): return self.__class__ == other.__class__ and { k: getattr(self, k) for k in self._get_field_names() - } == {k: getattr(other, k) for k in other._get_field_names()} + } == { + k: getattr(other, k) for k in other._get_field_names() + } def __ne__(self, other): return not self == other diff --git a/sdk/python/kfp/dsl/v1_structures.py b/sdk/python/kfp/dsl/v1_structures.py index 57cc7c6375e..962a974979b 100644 --- a/sdk/python/kfp/dsl/v1_structures.py +++ b/sdk/python/kfp/dsl/v1_structures.py @@ -209,14 +209,20 @@ def to_dict(self) -> Mapping[str, Any]: return {'executorInput': None} -CommandlineArgumentType = Union[str, InputValuePlaceholder, - InputPathPlaceholder, OutputPathPlaceholder, - InputUriPlaceholder, OutputUriPlaceholder, - InputMetadataPlaceholder, - InputOutputPortNamePlaceholder, - OutputMetadataPlaceholder, - ExecutorInputPlaceholder, 'ConcatPlaceholder', - 'IfPlaceholder',] +CommandlineArgumentType = Union[ + str, + InputValuePlaceholder, + InputPathPlaceholder, + OutputPathPlaceholder, + InputUriPlaceholder, + OutputUriPlaceholder, + InputMetadataPlaceholder, + InputOutputPortNamePlaceholder, + OutputMetadataPlaceholder, + ExecutorInputPlaceholder, + 'ConcatPlaceholder', + 'IfPlaceholder', +] class ConcatPlaceholder(ModelBase): #Non-standard attr names @@ -620,10 +626,18 @@ class LessThenOrEqualPredicate(BinaryPredicate): _serialized_names = {'operands': '<='} -PredicateType = Union[ArgumentType, EqualsPredicate, NotEqualsPredicate, - GreaterThanPredicate, GreaterThanOrEqualPredicate, - LessThenPredicate, LessThenOrEqualPredicate, - 'NotPredicate', 'AndPredicate', 'OrPredicate',] +PredicateType = Union[ + ArgumentType, + EqualsPredicate, + NotEqualsPredicate, + GreaterThanPredicate, + GreaterThanOrEqualPredicate, + LessThenPredicate, + LessThenOrEqualPredicate, + 'NotPredicate', + 'AndPredicate', + 'OrPredicate', +] class TwoBooleanOperands(ModelBase): diff --git a/sdk/python/kfp/local/config.py b/sdk/python/kfp/local/config.py index 9ea01d18369..6b94d402845 100755 --- a/sdk/python/kfp/local/config.py +++ b/sdk/python/kfp/local/config.py @@ -15,7 +15,8 @@ import abc import dataclasses import os -from typing import Union +import tempfile +from typing import Optional, Union from kfp import local @@ -45,12 +46,119 @@ class SubprocessRunner: use_venv: bool = True -@dataclasses.dataclass class DockerRunner: """Runner that indicates that local tasks should be run as a Docker container.""" - - def __post_init__(self): + DOCKER_CONTAINER_RUN_ARGS = { + # for available run parameters: + # https://docker-py.readthedocs.io/en/stable/containers.html + 'image', + 'command', + 'auto_remove', + 'blkio_weight_device', + 'blkio_weight', + 'cap_add', + 'cap_drop', + 'cgroup_parent', + 'cgroupns', + 'cpu_count', + 'cpu_percent', + 'cpu_period', + 'cpu_quota', + 'cpu_rt_period', + 'cpu_rt_runtime', + 'cpu_shares', + 'cpuset_cpus', + 'cpuset_mems', + 'detach', + 'device_cgroup_rules', + 'device_read_bps', + 'device_read_iops', + 'device_write_bps', + 'device_write_iops', + 'devices', + 'device_requests', + 'dns', + 'dns_opt', + 'dns_search', + 'domainname', + 'entrypoint', + 'environment', + 'extra_hosts', + 'group_add', + 'healthcheck', + 'hostname', + 'init', + 'init_path', + 'ipc_mode', + 'isolation', + 'kernel_memory', + 'labels', + 'links', + 'log_config', + 'lxc_conf', + 'mac_address', + 'mem_limit', + 'mem_reservation', + 'mem_swappiness', + 'memswap_limit', + 'mounts', + 'name', + 'nano_cpus', + 'network', + 'network_disabled', + 'network_mode', + 'networking_config', + 'oom_kill_disable', + 'oom_score_adj', + 'pid_mode', + 'pids_limit', + 'platform', + 'ports', + 'privileged', + 'publish_all_ports', + 'read_only', + 'remove', + 'restart_policy', + 'runtime', + 'security_opt', + 'shm_size', + 'stdin_open', + 'stdout', + 'stderr', + 'stop_signal', + 'storage_opt', + 'stream', + 'sysctls', + 'tmpfs', + 'tty', + 'ulimits', + 'use_config_proxy', + 'user', + 'userns_mode', + 'uts_mode', + 'version', + 'volume_driver', + 'volumes', + 'working_dir', + } + + def __init__(self, **container_run_args): + """Runner constructor, taking any arguments to propagate to + `containers.run` in the `docker` SDK. + + Args: + **container_run_args: Keyword arguments that comport with `containers.run` in the `docker` SDK, with some exceptions (see below). + + `containers.run` arguments are supported with the following exceptions: + - image + - command + - volumes + + Raises: + ImportError: Raised when `docker` is not installed + ValueError: Raised when unsupported `containers.run` arguments are supplied + """ try: import docker # noqa except ImportError as e: @@ -58,6 +166,24 @@ def __post_init__(self): f"Package 'docker' must be installed to use {DockerRunner.__name__!r}. Install it using 'pip install docker'." ) from e + unsupported_args = container_run_args.keys( + ) - self.DOCKER_CONTAINER_RUN_ARGS + + if unsupported_args: + raise ValueError( + f"Unsupported `docker run` arguments, see Package `docker` for details: {', '.join(unsupported_args)}" + ) + + excess_args: set = set(['image', 'command', 'volumes' + ]) & container_run_args.keys() + + if excess_args: + raise ValueError( + f"The following docker run arguments should not be specififed: {', '.join(excess_args)}" + ) + + self.container_run_args = container_run_args + class LocalExecutionConfig: instance = None @@ -66,6 +192,7 @@ def __new__( cls, runner: SubprocessRunner, pipeline_root: str, + workspace_root: str, raise_on_error: bool, ) -> 'LocalExecutionConfig': # singleton pattern @@ -76,6 +203,7 @@ def __init__( self, runner: SubprocessRunner, pipeline_root: str, + workspace_root: str, raise_on_error: bool, ) -> None: permitted_runners = (SubprocessRunner, DockerRunner) @@ -85,6 +213,7 @@ def __init__( ) self.runner = runner self.pipeline_root = pipeline_root + self.workspace_root = workspace_root self.raise_on_error = raise_on_error @classmethod @@ -99,6 +228,7 @@ def init( # annotate with subclasses, not parent class, for more helpful ref docs runner: Union[SubprocessRunner, DockerRunner], pipeline_root: str = './local_outputs', + workspace_root: Optional[str] = None, raise_on_error: bool = True, ) -> None: """Initializes a local execution session. @@ -108,12 +238,17 @@ def init( Args: runner: The runner to use. Supported runners: kfp.local.SubprocessRunner and kfp.local.DockerRunner. pipeline_root: Destination for task outputs. + workspace_root: Directory to use as workspace. If None, a temporary directory will be created. raise_on_error: If True, raises an exception when a local task execution fails. If False, fails gracefully and does not terminate the current program. """ # updates a global config pipeline_root = os.path.abspath(pipeline_root) + if workspace_root is None: + workspace_root = tempfile.mkdtemp(prefix='kfp-workspace-') + LocalExecutionConfig( runner=runner, pipeline_root=pipeline_root, + workspace_root=workspace_root, raise_on_error=raise_on_error, ) diff --git a/sdk/python/kfp/local/config_test.py b/sdk/python/kfp/local/config_test.py index ad71cf5a6ac..d980a20d356 100755 --- a/sdk/python/kfp/local/config_test.py +++ b/sdk/python/kfp/local/config_test.py @@ -29,6 +29,7 @@ def test_local_runner_config_init(self): """Test instance attributes with one constructor call.""" config.LocalExecutionConfig( pipeline_root='my/local/root', + workspace_root='/tmp/test-workspace', runner=local.SubprocessRunner(use_venv=True), raise_on_error=True, ) @@ -36,6 +37,7 @@ def test_local_runner_config_init(self): instance = config.LocalExecutionConfig.instance self.assertEqual(instance.pipeline_root, 'my/local/root') + self.assertEqual(instance.workspace_root, '/tmp/test-workspace') self.assertEqual(instance.runner, local.SubprocessRunner(use_venv=True)) self.assertIs(instance.raise_on_error, True) @@ -43,11 +45,13 @@ def test_local_runner_config_is_singleton(self): """Test instance attributes with multiple constructor calls.""" config.LocalExecutionConfig( pipeline_root='my/local/root', + workspace_root='/tmp/test-workspace-1', runner=local.SubprocessRunner(), raise_on_error=True, ) config.LocalExecutionConfig( pipeline_root='other/local/root', + workspace_root='/tmp/test-workspace-2', runner=local.SubprocessRunner(use_venv=False), raise_on_error=False, ) @@ -55,6 +59,7 @@ def test_local_runner_config_is_singleton(self): instance = config.LocalExecutionConfig.instance self.assertEqual(instance.pipeline_root, 'other/local/root') + self.assertEqual(instance.workspace_root, '/tmp/test-workspace-2') self.assertEqual(instance.runner, local.SubprocessRunner(use_venv=False)) self.assertFalse(instance.raise_on_error, False) @@ -62,6 +67,7 @@ def test_local_runner_config_is_singleton(self): def test_validate_success(self): config.LocalExecutionConfig( pipeline_root='other/local/root', + workspace_root='/tmp/test-workspace', runner=local.SubprocessRunner(use_venv=False), raise_on_error=False, ) @@ -131,6 +137,23 @@ def test_import_error(self): ): local.DockerRunner() + def test_good_container_args(self): + local.DockerRunner(network_mode='none') + + def test_unknown_container_args(self): + with self.assertRaisesRegex( + ValueError, + r'Unsupported `docker run` arguments, see Package `docker` for details: .*' + ): + local.DockerRunner(image='spaghetti', favorite_vegetable='zucchini') + + def test_excess_container_args(self): + with self.assertRaisesRegex( + ValueError, + r'The following docker run arguments should not be specififed: .*' + ): + local.DockerRunner(image='spaghetti') + if __name__ == '__main__': unittest.main() diff --git a/sdk/python/kfp/local/docker_task_handler.py b/sdk/python/kfp/local/docker_task_handler.py index 08d48b35b73..39b5f25e323 100755 --- a/sdk/python/kfp/local/docker_task_handler.py +++ b/sdk/python/kfp/local/docker_task_handler.py @@ -14,6 +14,8 @@ import os from typing import Any, Dict, List +import docker +from kfp.dsl import constants as dsl_constants from kfp.local import config from kfp.local import status from kfp.local import task_handler_interface @@ -34,16 +36,41 @@ def __init__( self.pipeline_root = pipeline_root self.runner = runner - def get_volumes_to_mount(self) -> Dict[str, Any]: - """Gets the volume configuration to mount the pipeline root to the - container so that outputs can be obtained outside of the container.""" + def get_volumes_to_mount(self, + client: docker.DockerClient = None + ) -> Dict[str, Any]: + """Gets the volume configuration to mount the pipeline root and + workspace to the container so that outputs and workspace can be + accessed outside of the container.""" + default_mode = 'rw' + if client is not None and 'name=selinux' in client.info().get( + 'SecurityOptions', []): + default_mode = f'{default_mode},z' + if not os.path.isabs(self.pipeline_root): # defensive check. this is enforced by upstream code. # users should not hit this, raise ValueError( "'pipeline_root' should be an absolute path to correctly construct the volume mount specification." ) - return {self.pipeline_root: {'bind': self.pipeline_root, 'mode': 'rw'}} + volumes = { + self.pipeline_root: { + 'bind': self.pipeline_root, + 'mode': default_mode + } + } + # Add workspace volume mount if workspace is configured + if (config.LocalExecutionConfig.instance and + config.LocalExecutionConfig.instance.workspace_root): + workspace_root = config.LocalExecutionConfig.instance.workspace_root + if not os.path.isabs(workspace_root): + workspace_root = os.path.abspath(workspace_root) + # Mount workspace to the standard KFP workspace path + volumes[workspace_root] = { + 'bind': dsl_constants.WORKSPACE_MOUNT_PATH, + 'mode': default_mode + } + return volumes def run(self) -> status.Status: """Runs the Docker container and returns the status.""" @@ -52,13 +79,13 @@ def run(self) -> status.Status: import docker client = docker.from_env() try: - volumes = self.get_volumes_to_mount() + volumes = self.get_volumes_to_mount(client) return_code = run_docker_container( client=client, image=self.image, command=self.full_command, volumes=volumes, - ) + **self.runner.container_run_args) finally: client.close() return status.Status.SUCCESS if return_code == 0 else status.Status.FAILURE @@ -70,21 +97,18 @@ def add_latest_tag_if_not_present(image: str) -> str: return image -def run_docker_container( - client: 'docker.DockerClient', - image: str, - command: List[str], - volumes: Dict[str, Any], -) -> int: +def run_docker_container(client: 'docker.DockerClient', image: str, + command: List[str], volumes: Dict[str, Any], + **container_run_args) -> int: image = add_latest_tag_if_not_present(image=image) image_exists = any( - image in existing_image.tags for existing_image in client.images.list()) + image in (existing_image.tags + existing_image.attrs['RepoDigests']) + for existing_image in client.images.list()) if image_exists: print(f'Found image {image!r}\n') else: print(f'Pulling image {image!r}') - repository, tag = image.split(':') - client.images.pull(repository=repository, tag=tag) + client.images.pull(image) print('Image pull complete\n') container = client.containers.run( image=image, @@ -93,7 +117,7 @@ def run_docker_container( stdout=True, stderr=True, volumes=volumes, - ) + **container_run_args) for line in container.logs(stream=True): # the inner logs should already have trailing \n # we do not need to add another diff --git a/sdk/python/kfp/local/docker_task_handler_test.py b/sdk/python/kfp/local/docker_task_handler_test.py index 71f8be21361..bfd265485fe 100755 --- a/sdk/python/kfp/local/docker_task_handler_test.py +++ b/sdk/python/kfp/local/docker_task_handler_test.py @@ -20,6 +20,7 @@ from kfp import dsl from kfp import local from kfp.dsl import Artifact +from kfp.dsl import constants as dsl_constants from kfp.dsl import Output from kfp.local import docker_task_handler from kfp.local import testing_utilities @@ -93,55 +94,163 @@ def test_cwd_volume(self): class TestDockerTaskHandler(DockerMockTestCase): def test_get_volumes_to_mount(self): - handler = docker_task_handler.DockerTaskHandler( - image='alpine', - full_command=['echo', 'foo'], - pipeline_root=os.path.abspath('my_root'), - runner=local.DockerRunner(), - ) - volumes = handler.get_volumes_to_mount() - self.assertEqual( - volumes, { + # Mock LocalExecutionConfig.instance to be None (no workspace configured) + with mock.patch('kfp.local.config.LocalExecutionConfig.instance', None): + handler = docker_task_handler.DockerTaskHandler( + image='alpine', + full_command=['echo', 'foo'], + pipeline_root=os.path.abspath('my_root'), + runner=local.DockerRunner(), + ) + volumes = handler.get_volumes_to_mount() + self.assertEqual( + volumes, { + os.path.abspath('my_root'): { + 'bind': os.path.abspath('my_root'), + 'mode': 'rw' + } + }) + + def test_get_volumes_to_mount_with_workspace(self): + # Mock the LocalExecutionConfig to have a workspace_root + with mock.patch('kfp.local.config.LocalExecutionConfig.instance' + ) as mock_instance: + mock_instance.workspace_root = '/tmp/test-workspace' + + handler = docker_task_handler.DockerTaskHandler( + image='alpine', + full_command=['echo', 'foo'], + pipeline_root=os.path.abspath('my_root'), + runner=local.DockerRunner(), + ) + volumes = handler.get_volumes_to_mount() + + expected_volumes = { os.path.abspath('my_root'): { 'bind': os.path.abspath('my_root'), 'mode': 'rw' + }, + '/tmp/test-workspace': { + 'bind': dsl_constants.WORKSPACE_MOUNT_PATH, + 'mode': 'rw' } - }) + } + self.assertEqual(volumes, expected_volumes) - def test_run(self): - handler = docker_task_handler.DockerTaskHandler( - image='alpine', - full_command=['echo', 'foo'], - pipeline_root=os.path.abspath('my_root'), - runner=local.DockerRunner(), - ) + def test_get_volumes_to_mount_with_relative_workspace(self): + # Mock the LocalExecutionConfig to have a relative workspace_root + with mock.patch('kfp.local.config.LocalExecutionConfig.instance' + ) as mock_instance: + mock_instance.workspace_root = 'test-workspace' - handler.run() - self.mocked_docker_client.containers.run.assert_called_once_with( - image='alpine:latest', - command=['echo', 'foo'], - detach=True, - stdout=True, - stderr=True, - volumes={ + handler = docker_task_handler.DockerTaskHandler( + image='alpine', + full_command=['echo', 'foo'], + pipeline_root=os.path.abspath('my_root'), + runner=local.DockerRunner(), + ) + volumes = handler.get_volumes_to_mount() + + # The relative workspace path should be converted to absolute + abs_workspace_path = os.path.abspath('test-workspace') + expected_volumes = { os.path.abspath('my_root'): { 'bind': os.path.abspath('my_root'), 'mode': 'rw' + }, + abs_workspace_path: { + 'bind': dsl_constants.WORKSPACE_MOUNT_PATH, + 'mode': 'rw' } - }, - ) + } + self.assertEqual(volumes, expected_volumes) - def test_pipeline_root_relpath(self): - with self.assertRaisesRegex( - ValueError, - r"'pipeline_root' should be an absolute path to correctly construct the volume mount specification\." - ): - docker_task_handler.DockerTaskHandler( + def test_get_volumes_to_mount_with_selinux(self): + # Mock LocalExecutionConfig.instance to have a workspace_root + with mock.patch('kfp.local.config.LocalExecutionConfig.instance' + ) as mock_instance: + mock_instance.workspace_root = '/tmp/test-workspace' + + # Create handler + handler = docker_task_handler.DockerTaskHandler( + image='alpine', + full_command=['echo', 'foo'], + pipeline_root=os.path.abspath('my_root'), + runner=local.DockerRunner(), + ) + + # Mock docker client with SELinux option + client = mock.Mock() + client.info.return_value = {'SecurityOptions': ['name=selinux']} + volumes = handler.get_volumes_to_mount(client) + + # Expect ",z" added to mode + self.assertEqual(volumes[os.path.abspath('my_root')]['mode'], + 'rw,z') + self.assertEqual(volumes['/tmp/test-workspace']['bind'], + dsl_constants.WORKSPACE_MOUNT_PATH) + self.assertEqual(volumes['/tmp/test-workspace']['mode'], 'rw,z') + + def test_get_volumes_to_mount_without_workspace(self): + # Mock the LocalExecutionConfig to have no workspace_root + with mock.patch('kfp.local.config.LocalExecutionConfig.instance' + ) as mock_instance: + mock_instance.workspace_root = None + + handler = docker_task_handler.DockerTaskHandler( + image='alpine', + full_command=['echo', 'foo'], + pipeline_root=os.path.abspath('my_root'), + runner=local.DockerRunner(), + ) + volumes = handler.get_volumes_to_mount() + + expected_volumes = { + os.path.abspath('my_root'): { + 'bind': os.path.abspath('my_root'), + 'mode': 'rw' + } + } + self.assertEqual(volumes, expected_volumes) + + def test_run(self): + # Mock LocalExecutionConfig.instance to be None (no workspace configured) + with mock.patch('kfp.local.config.LocalExecutionConfig.instance', None): + handler = docker_task_handler.DockerTaskHandler( image='alpine', full_command=['echo', 'foo'], - pipeline_root='my_relpath', + pipeline_root=os.path.abspath('my_root'), runner=local.DockerRunner(), - ).run() + ) + + handler.run() + self.mocked_docker_client.containers.run.assert_called_once_with( + image='alpine:latest', + command=['echo', 'foo'], + detach=True, + stdout=True, + stderr=True, + volumes={ + os.path.abspath('my_root'): { + 'bind': os.path.abspath('my_root'), + 'mode': 'rw' + } + }, + ) + + def test_pipeline_root_relpath(self): + # Mock LocalExecutionConfig.instance to be None (no workspace configured) + with mock.patch('kfp.local.config.LocalExecutionConfig.instance', None): + with self.assertRaisesRegex( + ValueError, + r"'pipeline_root' should be an absolute path to correctly construct the volume mount specification\." + ): + docker_task_handler.DockerTaskHandler( + image='alpine', + full_command=['echo', 'foo'], + pipeline_root='my_relpath', + runner=local.DockerRunner(), + ).run() class TestAddLatestTagIfNotPresent(unittest.TestCase): diff --git a/sdk/python/kfp/local/executor_input_utils.py b/sdk/python/kfp/local/executor_input_utils.py index 82eaa9d5b9f..08e6a51b86b 100644 --- a/sdk/python/kfp/local/executor_input_utils.py +++ b/sdk/python/kfp/local/executor_input_utils.py @@ -48,16 +48,16 @@ def construct_executor_input( inputs = pipeline_spec_pb2.ExecutorInput.Inputs( parameter_values={ param_name: - pipeline_spec_builder.to_protobuf_value(arguments[param_name]) - if param_name in arguments else component_spec.input_definitions - .parameters[param_name].default_value + pipeline_spec_builder.to_protobuf_value(arguments[param_name]) + if param_name in arguments else component_spec.input_definitions + .parameters[param_name].default_value for param_name in input_parameter_keys }, # input artifact constants are not supported yet, # except when passed from an upstream output or parent component input artifacts={ artifact_name: - dsl_artifact_to_artifact_list(arguments[artifact_name]) + dsl_artifact_to_artifact_list(arguments[artifact_name]) for artifact_name, _ in component_spec.input_definitions.artifacts.items() }, @@ -67,16 +67,18 @@ def construct_executor_input( component_spec.output_definitions.parameters.keys()) outputs = pipeline_spec_pb2.ExecutorInput.Outputs( parameters={ - param_name: pipeline_spec_pb2.ExecutorInput.OutputParameter( - output_file=os.path.join(task_root, param_name)) + param_name: + pipeline_spec_pb2.ExecutorInput.OutputParameter( + output_file=os.path.join(task_root, param_name)) for param_name in output_parameter_keys }, artifacts={ - artifact_name: artifact_type_schema_to_artifact_list( - name=artifact_name, - artifact_type=artifact_spec.artifact_type, - task_root=task_root, - ) for artifact_name, artifact_spec in + artifact_name: + artifact_type_schema_to_artifact_list( + name=artifact_name, + artifact_type=artifact_spec.artifact_type, + task_root=task_root, + ) for artifact_name, artifact_spec in component_spec.output_definitions.artifacts.items() }, output_file=os.path.join(task_root, _EXECUTOR_OUTPUT_FILE), diff --git a/sdk/python/kfp/local/executor_output_utils.py b/sdk/python/kfp/local/executor_output_utils.py index 716eb405e6a..e86f9344d0a 100644 --- a/sdk/python/kfp/local/executor_output_utils.py +++ b/sdk/python/kfp/local/executor_output_utils.py @@ -96,11 +96,13 @@ def get_outputs_from_executor_output( # collect artifact outputs from executor output output_artifact_definitions = component_spec.output_definitions.artifacts output_artifacts = { - artifact_name: artifact_list_to_dsl_artifact( - artifact_list, - is_artifact_list=output_artifact_definitions[artifact_name] - .is_artifact_list, - ) for artifact_name, artifact_list in executor_output.artifacts.items() + artifact_name: + artifact_list_to_dsl_artifact( + artifact_list, + is_artifact_list=output_artifact_definitions[artifact_name] + .is_artifact_list, + ) + for artifact_name, artifact_list in executor_output.artifacts.items() } return {**output_parameters, **output_artifacts} diff --git a/sdk/python/kfp/local/pipeline_orchestrator.py b/sdk/python/kfp/local/pipeline_orchestrator.py index 117673688c4..72bfc406af8 100644 --- a/sdk/python/kfp/local/pipeline_orchestrator.py +++ b/sdk/python/kfp/local/pipeline_orchestrator.py @@ -13,6 +13,8 @@ # limitations under the License. """Code for locally executing a compiled pipeline.""" import logging +import os +import shutil from typing import Any, Dict, List from kfp.local import config @@ -86,33 +88,41 @@ def _run_local_pipeline_implementation( # convert to dict for consistency with executors components = dict(pipeline_spec.components.items()) fail_stack: List[str] = [] - outputs, dag_status = dag_orchestrator.run_dag( - pipeline_resource_name=pipeline_resource_name, - dag_component_spec=pipeline_spec.root, - executors=executors, - components=components, - dag_arguments=arguments, - pipeline_root=pipeline_root, - runner=runner, - unique_pipeline_id=placeholder_utils.make_random_id(), - fail_stack=fail_stack, - ) - if dag_status == status.Status.SUCCESS: - status_with_color = logging_utils.format_status(status.Status.SUCCESS) - with logging_utils.local_logger_context(): - logging.info( - f'Pipeline {pipeline_name_with_color} finished with status {status_with_color}' - ) - return outputs - elif dag_status == status.Status.FAILURE: - log_and_maybe_raise_for_failure( - pipeline_name=pipeline_name, + try: + outputs, dag_status = dag_orchestrator.run_dag( + pipeline_resource_name=pipeline_resource_name, + dag_component_spec=pipeline_spec.root, + executors=executors, + components=components, + dag_arguments=arguments, + pipeline_root=pipeline_root, + runner=runner, + unique_pipeline_id=placeholder_utils.make_random_id(), fail_stack=fail_stack, - raise_on_error=raise_on_error, ) - return {} - else: - raise ValueError(f'Got unknown task status {dag_status.name}') + if dag_status == status.Status.SUCCESS: + status_with_color = logging_utils.format_status( + status.Status.SUCCESS) + with logging_utils.local_logger_context(): + logging.info( + f'Pipeline {pipeline_name_with_color} finished with status {status_with_color}' + ) + return outputs + elif dag_status == status.Status.FAILURE: + log_and_maybe_raise_for_failure( + pipeline_name=pipeline_name, + fail_stack=fail_stack, + raise_on_error=raise_on_error, + ) + return {} + else: + raise ValueError(f'Got unknown task status {dag_status.name}') + finally: + # Clean up the workspace directory + workspace_root = config.LocalExecutionConfig.instance.workspace_root + if workspace_root and os.path.exists(workspace_root): + shutil.rmtree(workspace_root) + logging.info(f'Cleaned up workspace: {workspace_root}') def log_and_maybe_raise_for_failure( diff --git a/sdk/python/kfp/local/pipeline_orchestrator_test.py b/sdk/python/kfp/local/pipeline_orchestrator_test.py index 49bf6829f9f..dbb972be298 100644 --- a/sdk/python/kfp/local/pipeline_orchestrator_test.py +++ b/sdk/python/kfp/local/pipeline_orchestrator_test.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. """Tests for pipeline_orchestrator.py.""" - +import functools import io as stdlib_io import os from typing import NamedTuple @@ -27,10 +27,30 @@ from kfp.dsl import Output from kfp.dsl import pipeline_task from kfp.local import testing_utilities +import pytest ROOT_FOR_TESTING = './testing_root' +@pytest.fixture(autouse=True) +def set_packages_for_test_classes(monkeypatch, request): + if request.cls.__name__ in { + 'TestRunLocalPipeline', + 'TestFstringContainerComponent', + }: + root_dir = os.path.dirname( + os.path.dirname( + os.path.dirname(os.path.dirname(os.path.dirname(__file__))))) + kfp_pipeline_spec_path = os.path.join(root_dir, 'api', 'v2alpha1', + 'python') + original_dsl_component = dsl.component + monkeypatch.setattr( + dsl, 'component', + functools.partial( + original_dsl_component, + packages_to_install=[kfp_pipeline_spec_path])) + + class TestRunLocalPipeline(testing_utilities.LocalRunnerEnvironmentTestCase): def assert_output_dir_contents( @@ -74,9 +94,11 @@ def my_pipeline(): my_pipeline() def test_no_io(self): - local.init(local.SubprocessRunner(), pipeline_root=ROOT_FOR_TESTING) + local.init( + local.SubprocessRunner(use_venv=False), + pipeline_root=ROOT_FOR_TESTING) - @dsl.component + @dsl.component() def pass_op(): pass @@ -285,6 +307,132 @@ def my_pipeline(content: str = 'string') -> Model: }) self.assert_output_dir_contents(1, 2) + def test_notebook_component_local_exec(self): + local.init(local.SubprocessRunner(), pipeline_root=ROOT_FOR_TESTING) + + import json as _json + import os as _os + import tempfile as _tempfile + + nb = { + 'cells': [ + { + 'cell_type': 'code', + 'execution_count': None, + 'metadata': { + 'tags': ['parameters'] + }, + 'outputs': [], + 'source': ['# parameters\n', "text='hello'\n"], + }, + { + 'cell_type': + 'code', + 'execution_count': + None, + 'metadata': {}, + 'outputs': [], + 'source': [ + 'import os\n', + "os.makedirs('/tmp/kfp_nb_outputs', exist_ok=True)\n", + "with open('/tmp/kfp_nb_outputs/log.txt','w') as f: f.write(text)\n", + ], + }, + ], + 'metadata': { + 'kernelspec': { + 'display_name': 'Python 3', + 'language': 'python', + 'name': 'python3', + }, + 'language_info': { + 'name': 'python', + 'version': '3.11' + }, + }, + 'nbformat': 4, + 'nbformat_minor': 5, + } + + with _tempfile.TemporaryDirectory() as tmpdir: + nb_path = _os.path.join(tmpdir, 'nb.ipynb') + with open(nb_path, 'w', encoding='utf-8') as f: + _json.dump(nb, f) + + @dsl.notebook_component(notebook_path=nb_path,) + def nb_comp(msg: str) -> str: + dsl.run_notebook(text=msg) + + with open( + '/tmp/kfp_nb_outputs/log.txt', 'r', + encoding='utf-8') as f: + return f.read() + + @dsl.pipeline + def my_pipeline() -> str: + comp_result = nb_comp(msg='hi') + return comp_result.output + + result = my_pipeline() + self.assertEqual(result.output, 'hi') + + self.assert_output_dir_contents(1, 1) + + def test_embedded_artifact_local_exec(self): + local.init(local.SubprocessRunner(), pipeline_root=ROOT_FOR_TESTING) + + import os as _os + import tempfile as _tempfile + + with _tempfile.TemporaryDirectory() as srcdir: + with open( + _os.path.join(srcdir, 'data.txt'), 'w', + encoding='utf-8') as f: + f.write('EMBED') + + @dsl.component(embedded_artifact_path=srcdir) + def use_embed(cfg: dsl.EmbeddedInput[dsl.Dataset]) -> dsl.Dataset: + out = dsl.Dataset(uri=dsl.get_uri('out')) + import os + import shutil + shutil.copy(os.path.join(cfg.path, 'data.txt'), out.path) + return out + + @dsl.pipeline + def my_pipeline() -> dsl.Dataset: + return use_embed().output + + task = my_pipeline() + out_ds = task.output + with open(out_ds.path, 'r', encoding='utf-8') as f: + self.assertEqual(f.read(), 'EMBED') + self.assert_output_dir_contents(1, 1) + + def test_notebook_component_invalid_notebook_raises(self): + local.init( + local.SubprocessRunner(), + pipeline_root=ROOT_FOR_TESTING, + raise_on_error=True) + + import os as _os + import tempfile as _tempfile + + tmpdir = _tempfile.mkdtemp() + bad_nb = _os.path.join(tmpdir, 'bad.ipynb') + with open(bad_nb, 'w', encoding='utf-8') as f: + f.write('not a json') + + @dsl.notebook_component(notebook_path=bad_nb,) + def nb_comp(): + dsl.run_notebook() + + @dsl.pipeline + def my_pipeline(): + nb_comp() + + with self.assertRaises(RuntimeError): + my_pipeline() + def test_input_artifact_constant_not_permitted(self): local.init(local.SubprocessRunner(), pipeline_root=ROOT_FOR_TESTING) @@ -657,6 +805,71 @@ def my_pipeline(string: str = 'baz') -> str: task = my_pipeline() self.assertEqual(task.output, 'foo-bar-baz') + def test_workspace_functionality(self): + import tempfile + + # Create temporary directory for workspace + with tempfile.TemporaryDirectory() as temp_dir: + workspace_root = os.path.join(temp_dir, 'workspace') + os.makedirs(workspace_root, exist_ok=True) + + local.init( + local.SubprocessRunner(), + pipeline_root=ROOT_FOR_TESTING, + workspace_root=workspace_root) + + @dsl.component + def write_to_workspace(text: str, workspace_path: str) -> str: + import os + output_file = os.path.join(workspace_path, 'output.txt') + with open(output_file, 'w') as f: + f.write(text) + return output_file + + @dsl.component + def read_from_workspace(file_path: str) -> str: + with open(file_path, 'r') as f: + return f.read() + + @dsl.pipeline( + pipeline_config=dsl.PipelineConfig( + workspace=dsl.WorkspaceConfig(size='1Gi'))) + def my_pipeline(text: str = 'Hello workspace!') -> str: + # Write to workspace + write_task = write_to_workspace( + text=text, workspace_path=dsl.WORKSPACE_PATH_PLACEHOLDER) + + # Read from workspace + read_task = read_from_workspace(file_path=write_task.output) + + return read_task.output + + task = my_pipeline(text='Test workspace functionality!') + self.assertEqual(task.output, 'Test workspace functionality!') + self.assert_output_dir_contents(1, 2) + + def test_docker_runner_workspace_functionality(self): + import tempfile + + # Create temporary directory for workspace + with tempfile.TemporaryDirectory() as temp_dir: + workspace_root = os.path.join(temp_dir, 'workspace') + os.makedirs(workspace_root, exist_ok=True) + + # Test that DockerRunner can be initialized with workspace + local.init( + local.DockerRunner(), + pipeline_root=ROOT_FOR_TESTING, + workspace_root=workspace_root) + + # Verify that the workspace is properly configured + self.assertEqual( + local.config.LocalExecutionConfig.instance.workspace_root, + workspace_root) + self.assertEqual( + type(local.config.LocalExecutionConfig.instance.runner), + local.DockerRunner) + class TestFstringContainerComponent( testing_utilities.LocalRunnerEnvironmentTestCase): diff --git a/sdk/python/kfp/local/placeholder_utils.py b/sdk/python/kfp/local/placeholder_utils.py index 405b48e851f..85235730b5f 100644 --- a/sdk/python/kfp/local/placeholder_utils.py +++ b/sdk/python/kfp/local/placeholder_utils.py @@ -19,6 +19,8 @@ from typing import Any, Dict, List, Optional, Union from kfp import dsl +from kfp.dsl import constants as dsl_constants +from kfp.local import config def make_random_id() -> str: @@ -319,6 +321,14 @@ def recursively_resolve_struct(placeholder: Dict[str, Any]) -> str: return placeholder +def _raise_workspace_not_configured() -> None: + """Raises an exception when workspace_root is not configured.""" + raise RuntimeError( + 'Workspace not configured. Initialize with workspace_root parameter:\n' + 'local.init(runner=local.SubprocessRunner(), workspace_root=\'/path/to/workspace\')' + ) + + def resolve_individual_placeholder( element: str, executor_input_dict: Dict[str, Any], @@ -329,6 +339,26 @@ def resolve_individual_placeholder( pipeline_task_id: str, ) -> str: """Replaces placeholders for a single element.""" + + if dsl.WORKSPACE_PATH_PLACEHOLDER in element or dsl_constants.WORKSPACE_MOUNT_PATH in element: + # Ensure local config and workspace are available + if not (config.LocalExecutionConfig.instance and + config.LocalExecutionConfig.instance.workspace_root): + _raise_workspace_not_configured() + + runner = config.LocalExecutionConfig.instance.runner + # For DockerRunner, use the standardized in-container mount path. + # For SubprocessRunner (or others), use the host workspace path. + if isinstance(runner, config.DockerRunner): + workspace_value = dsl_constants.WORKSPACE_MOUNT_PATH + else: + workspace_value = config.LocalExecutionConfig.instance.workspace_root + + element = element.replace(dsl_constants.WORKSPACE_MOUNT_PATH, + workspace_value) + element = element.replace(dsl.WORKSPACE_PATH_PLACEHOLDER, + workspace_value) + # match on literal for constant placeholders PLACEHOLDERS = { r'{{$.outputs.output_file}}': diff --git a/sdk/python/kfp/local/placeholder_utils_test.py b/sdk/python/kfp/local/placeholder_utils_test.py index 05f83c2d274..63ef9368adb 100644 --- a/sdk/python/kfp/local/placeholder_utils_test.py +++ b/sdk/python/kfp/local/placeholder_utils_test.py @@ -14,11 +14,13 @@ """Tests for placeholder_utils.py.""" import json +import os from typing import List, Optional import unittest from absl.testing import parameterized from google.protobuf import json_format +from kfp.dsl import constants as dsl_constants from kfp.local import placeholder_utils from kfp.pipeline_spec import pipeline_spec_pb2 @@ -458,5 +460,193 @@ def test_simple(self): self.assertEqual(actual, expected) +class TestWorkspacePlaceholderResolution(unittest.TestCase): + """Tests for workspace placeholder resolution.""" + + def setUp(self): + """Set up test environment.""" + # Initialize local config for testing + from kfp import local + local.init(runner=local.SubprocessRunner()) + + def test_workspace_path_placeholder_resolution(self): + """Test that workspace path placeholder is correctly resolved.""" + executor_input_dict = { + 'inputs': { + 'parameterValues': { + 'workspace_path': '{{$.workspace_path}}' + } + }, + 'outputs': { + 'outputFile': '/tmp/outputs/output.txt' + } + } + + result = placeholder_utils.resolve_individual_placeholder( + element='{{$.workspace_path}}', + executor_input_dict=executor_input_dict, + pipeline_resource_name='test-pipeline', + task_resource_name='test-task', + pipeline_root='/tmp/pipeline', + pipeline_job_id='test-job-id', + pipeline_task_id='test-task-id') + + self.assertIsInstance(result, str) + self.assertTrue(result.startswith('/tmp/kfp-workspace-')) + + def test_embedded_workspace_placeholder(self): + """Test embedded workspace placeholder resolution.""" + executor_input_dict = { + 'inputs': { + 'parameterValues': { + 'file_path': + os.path.join('{{$.workspace_path}}', 'data', 'file.txt') + } + }, + 'outputs': { + 'outputFile': '/tmp/outputs/output.txt' + } + } + + result = placeholder_utils.resolve_individual_placeholder( + element="os.path.join('{{$.workspace_path}}', 'data', 'file.txt')", + executor_input_dict=executor_input_dict, + pipeline_resource_name='test-pipeline', + task_resource_name='test-task', + pipeline_root='/tmp/pipeline', + pipeline_job_id='test-job-id', + pipeline_task_id='test-task-id') + + self.assertIsInstance(result, str) + self.assertIn('os.path.join', result) + self.assertIn('/tmp/kfp-workspace-', result) + self.assertIn('data', result) + self.assertIn('file.txt', result) + + def test_workspace_configured_resolves(self): + """Test that workspace placeholder resolves when workspace is + configured.""" + executor_input_dict = { + 'inputs': { + 'parameterValues': { + 'workspace_path': '{{$.workspace_path}}' + } + }, + 'outputs': { + 'outputFile': '/tmp/outputs/output.txt' + } + } + + result = placeholder_utils.resolve_individual_placeholder( + element='{{$.workspace_path}}', + executor_input_dict=executor_input_dict, + pipeline_resource_name='test-pipeline', + task_resource_name='test-task', + pipeline_root='/tmp/pipeline', + pipeline_job_id='test-job-id', + pipeline_task_id='test-task-id') + + # Should resolve to actual workspace path + self.assertIsInstance(result, str) + self.assertTrue(result.startswith('/tmp/kfp-workspace-')) + + def test_literal_mount_path_replaced_for_subprocess(self): + """'/kfp-workspace' should be replaced with host workspace in + subprocess mode.""" + executor_input_dict = { + 'inputs': { + 'parameterValues': {} + }, + 'outputs': { + 'outputFile': '/tmp/outputs/output.txt' + } + } + + element = f'{dsl_constants.WORKSPACE_MOUNT_PATH}/data/file.txt' + result = placeholder_utils.resolve_individual_placeholder( + element=element, + executor_input_dict=executor_input_dict, + pipeline_resource_name='test-pipeline', + task_resource_name='test-task', + pipeline_root='/tmp/pipeline', + pipeline_job_id='test-job-id', + pipeline_task_id='test-task-id') + + self.assertIsInstance(result, str) + self.assertTrue(result.startswith('/tmp/kfp-workspace-')) + self.assertTrue(result.endswith('/data/file.txt')) + + +class TestWorkspacePlaceholderResolutionDocker(unittest.TestCase): + """DockerRunner-specific workspace placeholder resolution.""" + + def setUp(self): + from kfp import local + local.init(runner=local.DockerRunner()) + + def test_workspace_placeholder_maps_to_mount_path(self): + executor_input_dict = { + 'inputs': { + 'parameterValues': {} + }, + 'outputs': { + 'outputFile': '/tmp/outputs/output.txt' + } + } + + result = placeholder_utils.resolve_individual_placeholder( + element='{{$.workspace_path}}/data/file.txt', + executor_input_dict=executor_input_dict, + pipeline_resource_name='test-pipeline', + task_resource_name='test-task', + pipeline_root='/tmp/pipeline', + pipeline_job_id='test-job-id', + pipeline_task_id='test-task-id') + + self.assertEqual(result, + f'{dsl_constants.WORKSPACE_MOUNT_PATH}/data/file.txt') + + def test_literal_mount_path_kept_for_docker(self): + executor_input_dict = { + 'inputs': { + 'parameterValues': {} + }, + 'outputs': { + 'outputFile': '/tmp/outputs/output.txt' + } + } + + element = f'{dsl_constants.WORKSPACE_MOUNT_PATH}/data/file.txt' + result = placeholder_utils.resolve_individual_placeholder( + element=element, + executor_input_dict=executor_input_dict, + pipeline_resource_name='test-pipeline', + task_resource_name='test-task', + pipeline_root='/tmp/pipeline', + pipeline_job_id='test-job-id', + pipeline_task_id='test-task-id') + + self.assertEqual(result, element) + + +class TestWorkspacePlaceholderMissing(unittest.TestCase): + + def test_raises_when_workspace_not_configured(self): + # When placeholder or mount path appears and no workspace configured, raise + from unittest import mock + with mock.patch('kfp.local.config.LocalExecutionConfig.instance', None): + with self.assertRaises(RuntimeError): + placeholder_utils.resolve_individual_placeholder( + element='{{$.workspace_path}}/foo', + executor_input_dict={'outputs': { + 'outputFile': '/tmp/x' + }}, + pipeline_resource_name='p', + task_resource_name='t', + pipeline_root='/tmp/root', + pipeline_job_id='j', + pipeline_task_id='k') + + if __name__ == '__main__': unittest.main() diff --git a/sdk/python/kfp/local/subprocess_task_handler.py b/sdk/python/kfp/local/subprocess_task_handler.py index 63e150e85e0..83e249f766a 100644 --- a/sdk/python/kfp/local/subprocess_task_handler.py +++ b/sdk/python/kfp/local/subprocess_task_handler.py @@ -116,7 +116,8 @@ def run_local_subprocess(full_command: List[str]) -> int: def replace_python_executable(full_command: List[str], new_executable: str) -> List[str]: """Replaces the 'python3' string in each element of the full_command with - the new_executable. + the new_executable. It does not replace the python executable in the user + code. Args: full_command: Commands and args. @@ -125,7 +126,20 @@ def replace_python_executable(full_command: List[str], Returns: The updated commands and args. """ - return [el.replace('python3', f'{new_executable}') for el in full_command] + user_code_index = None + new_full_command = [] + + for i, el in enumerate(full_command): + if user_code_index is None and '"$program_path/ephemeral_component.py"' in el and '"$@"' in el: + user_code_index = i + 1 + + if i != user_code_index: + new_full_command.append(el.replace('python3', f'{new_executable}')) + else: + # It's important to skip the user code so we don't errantly replace the Jupyter Notebook kernel name. + new_full_command.append(el) + + return new_full_command @contextlib.contextmanager diff --git a/sdk/python/kfp/local/subprocess_task_handler_test.py b/sdk/python/kfp/local/subprocess_task_handler_test.py index 4a1d6b662b8..6ae8a36d3b2 100644 --- a/sdk/python/kfp/local/subprocess_task_handler_test.py +++ b/sdk/python/kfp/local/subprocess_task_handler_test.py @@ -13,7 +13,9 @@ # limitations under the License. """Tests for subprocess_local_task_handler.py.""" import contextlib +import functools import io +import os from typing import NamedTuple, Optional import unittest from unittest import mock @@ -26,6 +28,7 @@ from kfp.dsl import Output from kfp.local import subprocess_task_handler from kfp.local import testing_utilities +import pytest # NOTE: When testing SubprocessRunner, use_venv=True throughout to avoid # modifying current code under test. @@ -34,6 +37,26 @@ # impact of such an error we should not install into the main test process' # environment. +root_dir = os.path.dirname( + os.path.dirname( + os.path.dirname(os.path.dirname(os.path.dirname(__file__))))) +kfp_pipeline_spec_path = os.path.join(root_dir, 'api', 'v2alpha1', 'python') + + +@pytest.fixture(autouse=True) +def set_packages_for_test_classes(monkeypatch, request): + if request.cls.__name__ in { + 'TestSubprocessRunner', 'TestRunLocalSubproces', + 'TestUseCurrentPythonExecutable', 'TestUseVenv', + 'TestLightweightPythonComponentLogic' + }: + original_dsl_component = dsl.component + monkeypatch.setattr( + dsl, 'component', + functools.partial( + original_dsl_component, + packages_to_install=[kfp_pipeline_spec_path])) + class TestSubprocessRunner(testing_utilities.LocalRunnerEnvironmentTestCase): @@ -85,7 +108,7 @@ def comp(): def test_cannot_run_containerized_python_component(self): local.init(runner=local.SubprocessRunner(use_venv=True)) - @dsl.component(target_image='foo') + @dsl.component(target_image='foo', packages_to_install=[]) def comp(): pass @@ -138,7 +161,8 @@ class TestUseVenv(testing_utilities.LocalRunnerEnvironmentTestCase): def test_use_venv_true(self, **kwargs): local.init(**kwargs) - @dsl.component(packages_to_install=['cloudpickle']) + @dsl.component( + packages_to_install=[kfp_pipeline_spec_path, 'cloudpickle']) def installer_component(): import cloudpickle print('Cloudpickle is installed:', cloudpickle) diff --git a/sdk/python/kfp/local/task_dispatcher_test.py b/sdk/python/kfp/local/task_dispatcher_test.py index d8163aab89b..14ddee2ccdd 100755 --- a/sdk/python/kfp/local/task_dispatcher_test.py +++ b/sdk/python/kfp/local/task_dispatcher_test.py @@ -19,6 +19,7 @@ irrespective of the runner. While there will inevitably some overlap, we should seek to minimize it. """ +import functools import io import os import re @@ -32,6 +33,7 @@ from kfp.dsl import Model from kfp.dsl import Output from kfp.local import testing_utilities +import pytest # NOTE: uses SubprocessRunner throughout to test the taks dispatcher behavior # NOTE: When testing SubprocessRunner, use_venv=True throughout to avoid @@ -42,6 +44,26 @@ # environment. +@pytest.fixture(autouse=True) +def set_packages_for_test_classes(monkeypatch, request): + if request.cls.__name__ in { + 'TestLocalExecutionValidation', 'TestSupportOfComponentTypes', + 'TestSupportOfComponentTypes', 'TestExceptionHandlingAndLogging', + 'TestPipelineRootPaths' + }: + root_dir = os.path.dirname( + os.path.dirname( + os.path.dirname(os.path.dirname(os.path.dirname(__file__))))) + kfp_pipeline_spec_path = os.path.join(root_dir, 'api', 'v2alpha1', + 'python') + original_dsl_component = dsl.component + monkeypatch.setattr( + dsl, 'component', + functools.partial( + original_dsl_component, + packages_to_install=[kfp_pipeline_spec_path])) + + class TestLocalExecutionValidation( testing_utilities.LocalRunnerEnvironmentTestCase): diff --git a/sdk/python/kfp/version.py b/sdk/python/kfp/version.py new file mode 100644 index 00000000000..5beb82ef89d --- /dev/null +++ b/sdk/python/kfp/version.py @@ -0,0 +1 @@ +__version__ = '2.14.3' diff --git a/sdk/python/requirements-dev.txt b/sdk/python/requirements-dev.txt index 9b18104a8a4..ce9cd60a0a5 100644 --- a/sdk/python/requirements-dev.txt +++ b/sdk/python/requirements-dev.txt @@ -3,6 +3,7 @@ docformatter==1.4 docker==5.0.3 isort==5.10.1 mypy==0.941 +nbformat==5.10.4 pip-tools==6.0.0 pre-commit==2.19.0 pycln==2.1.1 @@ -14,4 +15,4 @@ types-protobuf==3.19.15 types-PyYAML==6.0.5 types-requests==2.27.14 types-tabulate==0.8.6 -yapf==0.32.0 +yapf==0.43.0 diff --git a/sdk/python/requirements.in b/sdk/python/requirements.in index 01d43468545..9f669798dad 100644 --- a/sdk/python/requirements.in +++ b/sdk/python/requirements.in @@ -3,6 +3,7 @@ # pip-compile --no-emit-index-url requirements.in click==8.1.8 +click-option-group==0.5.7 docstring-parser>=0.7.3,<1 # Pin google-api-core version for the bug fixing in 1.31.5 # https://github.com/googleapis/python-api-core/releases/tag/v1.31.5 @@ -10,15 +11,19 @@ google-api-core>=1.31.5,<3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0 google-auth>=1.6.1,<3 # https://github.com/googleapis/python-storage/blob/main/CHANGELOG.md#221-2022-03-15 google-cloud-storage>=2.2.1,<4 -# pin kfp-pipeline-spec to an exact version, since this is the contract between a given KFP SDK version and the BE. we don't want old version of the SDK to write new fields and to have the BE reject the new unsupported field (even if the new field backward compatible from a proto perspective) -kfp-pipeline-spec==0.7.0 +# pin kfp-pipeline-spec to an exact version, since this is the contract between a given KFP +# SDK version and the BE. we don't want old version of the SDK to write new fields and to +# have the BE reject the new unsupported field (even if the new field backward compatible from a proto perspective) +kfp-pipeline-spec==2.14.0 # Update the upper version whenever a new major version of the # kfp-server-api package is released. # Update the lower version when kfp sdk depends on new apis/fields in # kfp-server-api -kfp-server-api>=2.1.0,<2.5.0 +kfp-server-api>=2.14.0,<3 kubernetes>=8.0.0,<31 -protobuf>=4.21.1,<5 +# protobuf version should be identical to the one in kfp-pipeline-spec +# api/v2alpha1/python/requirements.txt +protobuf==6.31.1,<7.0 PyYAML>=5.3,<7 requests-toolbelt>=0.8.0,<2 tabulate>=0.8.6,<1 diff --git a/sdk/python/requirements.txt b/sdk/python/requirements.txt index 122d13663dd..de5dde75163 100644 --- a/sdk/python/requirements.txt +++ b/sdk/python/requirements.txt @@ -6,7 +6,7 @@ # cachetools==5.5.2 # via google-auth -certifi==2025.4.26 +certifi==2025.8.3 # via # kfp-server-api # kubernetes @@ -14,15 +14,19 @@ certifi==2025.4.26 charset-normalizer==3.4.2 # via requests click==8.1.8 + # via + # -r requirements.in + # click-option-group +click-option-group==0.5.7 # via -r requirements.in -docstring-parser==0.16 +docstring-parser==0.17.0 # via -r requirements.in -google-api-core==2.24.2 +google-api-core==2.25.1 # via # -r requirements.in # google-cloud-core # google-cloud-storage -google-auth==2.40.2 +google-auth==2.40.3 # via # -r requirements.in # google-api-core @@ -31,7 +35,7 @@ google-auth==2.40.2 # kubernetes google-cloud-core==2.4.3 # via google-cloud-storage -google-cloud-storage==3.1.0 +google-cloud-storage==3.2.0 # via -r requirements.in google-crc32c==1.7.1 # via @@ -43,19 +47,19 @@ googleapis-common-protos==1.70.0 # via google-api-core idna==3.10 # via requests -kfp-pipeline-spec==0.7.0 +kfp-pipeline-spec==2.14.0 # via -r requirements.in -kfp-server-api==2.4.0 +kfp-server-api==2.14.0 # via -r requirements.in kubernetes==30.1.0 # via -r requirements.in -oauthlib==3.2.2 +oauthlib==3.3.1 # via # kubernetes # requests-oauthlib proto-plus==1.26.1 # via google-api-core -protobuf==4.25.8 +protobuf==6.31.1 # via # -r requirements.in # google-api-core @@ -76,7 +80,7 @@ pyyaml==6.0.2 # via # -r requirements.in # kubernetes -requests==2.32.3 +requests==2.32.4 # via # google-api-core # google-cloud-storage @@ -96,7 +100,7 @@ six==1.17.0 # python-dateutil tabulate==0.9.0 # via -r requirements.in -urllib3==2.4.0 +urllib3==2.5.0 # via # -r requirements.in # kfp-server-api diff --git a/sdk/python/setup.py b/sdk/python/setup.py index 080c713af0f..e61b9f78d01 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -54,12 +54,14 @@ def read_readme() -> str: return f.read() +_version = find_version('kfp', 'version.py') docker = ['docker'] -kubernetes = ['kfp-kubernetes<2'] +kubernetes = [f'kfp-kubernetes=={_version}'] +notebooks = ["nbclient>=0.10,<1", "ipykernel>=6,<7", "jupyter_client>=7,<9"] setuptools.setup( name='kfp', - version=find_version('kfp', '__init__.py'), + version=_version, description='Kubeflow Pipelines SDK', long_description=read_readme(), long_description_content_type='text/markdown', @@ -77,8 +79,9 @@ def read_readme() -> str: }, install_requires=get_requirements('requirements.in'), extras_require={ - 'all': docker + kubernetes, + 'all': docker + kubernetes + notebooks, 'kubernetes': kubernetes, + 'notebooks': notebooks, }, packages=setuptools.find_packages(exclude=['*test*']), classifiers=[ diff --git a/sdk/python/test/__init__.py b/sdk/python/test/__init__.py new file mode 100644 index 00000000000..3a85de0ba67 --- /dev/null +++ b/sdk/python/test/__init__.py @@ -0,0 +1,9 @@ +import sys + +from .test_utils.file_utils import FileUtils + +print(f"Adding test_data to the sys path") +# Adding test_data to the sys path for it to be available for import in test +# Since *test* is excluded in packaging (in setup.py), this will not part of the published code +sys.path.append(FileUtils.TEST_DATA) +sys.path.append(FileUtils.VALID_PIPELINE_FILES) \ No newline at end of file diff --git a/samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/model_files/__init__.py b/sdk/python/test/client/__init__.py similarity index 100% rename from samples/contrib/ibm-samples/ffdl-seldon/source/seldon-pytorch-serving-image/model_files/__init__.py rename to sdk/python/test/client/__init__.py diff --git a/sdk/python/test/client/client_test.py b/sdk/python/test/client/client_test.py new file mode 100644 index 00000000000..640a28f606e --- /dev/null +++ b/sdk/python/test/client/client_test.py @@ -0,0 +1,339 @@ +# Copyright 2022 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import time + +import kfp +from kfp_server_api.models.v2beta1_experiment import V2beta1Experiment +from kfp_server_api.models.v2beta1_experiment_storage_state import \ + V2beta1ExperimentStorageState +from kfp_server_api.models.v2beta1_pipeline_version import \ + V2beta1PipelineVersion +import pytest + +from backend.api.v2beta1.python_http_client.kfp_server_api.models.v2beta1_pipeline import \ + V2beta1Pipeline +from test_data.sdk_compiled_pipelines.valid.sequential_v2 import sequential + +from ..test_utils.file_utils import FileUtils + +KFP_URL = os.getenv("apiUrl", "http://localhost:8888") +NAMESPACE: str = os.getenv("namespace", "kubeflow") +VERIFY_SSL: bool = bool(os.getenv("verifySSL", "False")) +AUTH_TOKEN: str = os.getenv("authToken", None) +SSL_CA_CERT: str = os.getenv("sslCertPath", None) + + +@pytest.mark.client +class TestClient: + + hello_world_pipeline_file = f'{FileUtils.VALID_PIPELINE_FILES}/hello-world.yaml' + kfp_client: kfp.Client + + @pytest.fixture(autouse=True) + def setup_and_teardown(self): + self.created_pipelines: list[str] = list() + self.created_experiments: list[str] = list() + self.created_runs: list[str] = list() + self.created_recurring_runs: list[str] = list() + self.kfp_client = kfp.Client( + host=KFP_URL, + namespace=NAMESPACE, + verify_ssl=VERIFY_SSL, + existing_token=AUTH_TOKEN, + ssl_ca_cert=SSL_CA_CERT) + yield + for run_id in self.created_runs: + print(f'Deleting run with id={run_id}') + self.kfp_client.terminate_run(run_id) + self.kfp_client.archive_run(run_id) + self.kfp_client.delete_run(run_id) + for recurring_run_id in self.created_recurring_runs: + print(f'Deleting recurring run with id={recurring_run_id}') + self.kfp_client.disable_recurring_run(recurring_run_id) + self.kfp_client.delete_recurring_run(recurring_run_id) + for experiment_id in self.created_experiments: + print(f'Deleting experiment with id={experiment_id}') + self.kfp_client.archive_experiment(experiment_id) + self.kfp_client.delete_experiment(experiment_id) + for pipeline_id in self.created_pipelines: + print(f'Deleting pipeline with id={pipeline_id}') + pipeline = self.kfp_client.get_pipeline(pipeline_id=pipeline_id) + print(pipeline) + if pipeline is not None: + pipeline_versions: list[ + V2beta1PipelineVersion] = self.kfp_client.list_pipeline_versions( + pipeline_id=pipeline_id, + page_size=50, + ).pipeline_versions + for pipeline_version in pipeline_versions: + self.kfp_client.delete_pipeline_version( + pipeline_id=pipeline_id, + pipeline_version_id=pipeline_version.pipeline_version_id + ) + self.kfp_client.delete_pipeline(pipeline_id) + + def test_upload_pipeline(self): + pipeline_name = f"hello-world-{self.get_current_time()}" + pipeline_desc = "Test Hello World Pipeline" + pipeline = self.kfp_client.upload_pipeline( + pipeline_package_path=self.hello_world_pipeline_file, + pipeline_name=pipeline_name, + description=pipeline_desc) + self.created_pipelines.append(pipeline.pipeline_id) + assert pipeline.pipeline_id is not None, "Failed to upload pipeline" + assert pipeline.name == pipeline_name, "Created Pipeline does not have the expected name" + assert pipeline.description == pipeline_desc, "Description not same" + + def test_get_pipeline(self): + pipeline_name = f"hello-world-{self.get_current_time()}" + pipeline_desc = "Test Hello World Pipeline" + created_pipeline = self.kfp_client.upload_pipeline( + pipeline_package_path=self.hello_world_pipeline_file, + pipeline_name=pipeline_name, + description=pipeline_desc) + self.created_pipelines.append(created_pipeline.pipeline_id) + pipeline = self.kfp_client.get_pipeline(created_pipeline.pipeline_id) + assert pipeline.pipeline_id == created_pipeline.pipeline_id, "Created pipeline not found in the DB" + + def test_list_pipelines(self): + pipeline_name = f"hello-world-{self.get_current_time()}" + pipeline_desc = "Test Hello World Pipeline" + pipeline = self.kfp_client.upload_pipeline( + pipeline_package_path=self.hello_world_pipeline_file, + pipeline_name=pipeline_name, + description=pipeline_desc) + self.created_pipelines.append(pipeline.pipeline_id) + pipelines: list[V2beta1Pipeline] = self.kfp_client.list_pipelines( + page_size=50, sort_by='created_at desc').pipelines + pipeline_exist = False + for pipe in pipelines: + if pipe.pipeline_id == pipeline.pipeline_id: + pipeline_exist = True + break + assert pipeline_exist, "Created pipeline not found in the DB" + + def test_list_pipeline_versions(self): + pipeline_name = f"hello-world-{self.get_current_time()}" + pipeline = self.kfp_client.upload_pipeline( + pipeline_package_path=self.hello_world_pipeline_file, + pipeline_name=pipeline_name, + description="Test Hello World Pipeline") + self.created_pipelines.append(pipeline.pipeline_id) + pipeline_versions = self.kfp_client.list_pipeline_versions( + pipeline.pipeline_id, + page_size=50, + sort_by='created_at desc', + ).pipeline_versions + assert len(pipeline_versions + ) > 0, "No pipeline versions available after pipeline upload" + + def test_create_experiment(self): + experiment_name = f"TestExperiment-{self.get_current_time()}" + experiment_desc = "Python Client Tests Experiment" + experiment = self.kfp_client.create_experiment( + name=experiment_name, description=experiment_desc) + self.created_experiments.append(experiment.experiment_id) + assert experiment.experiment_id is not None, "Failed to create experiment" + assert experiment.description == experiment_desc, "Description not same" + assert experiment.display_name == experiment_name, "Name not same" + + def test_get_experiment(self): + experiment_name = f"TestExperiment-{self.get_current_time()}" + experiment_desc = "Python Client Tests Experiment" + experiment = self.kfp_client.create_experiment( + name=experiment_name, description=experiment_desc) + self.created_experiments.append(experiment.experiment_id) + experiment_list: list[ + V2beta1Experiment] = self.kfp_client.list_experiments( + page_size=50).experiments + experiment_found = False + for exp in experiment_list: + if exp.experiment_id == experiment.experiment_id: + experiment_found = True + break + assert experiment_found, "Created experiment not found in the list of experiment" + + def test_archive_unarchive_experiment(self): + experiment_name = f"TestExperiment-{self.get_current_time()}" + experiment_desc = "Python Client Tests Experiment" + experiment = self.kfp_client.create_experiment( + name=experiment_name, description=experiment_desc) + self.created_experiments.append(experiment.experiment_id) + self.kfp_client.archive_experiment(experiment.experiment_id) + archived_experiment = self.kfp_client.get_experiment( + experiment_id=experiment.experiment_id) + assert archived_experiment.storage_state == V2beta1ExperimentStorageState.ARCHIVED, "Experiment not in archived state" + + self.kfp_client.unarchive_experiment(experiment.experiment_id) + archived_experiment = self.kfp_client.get_experiment( + experiment_id=experiment.experiment_id) + assert archived_experiment.storage_state == V2beta1ExperimentStorageState.AVAILABLE, "Experiment not Unarchived, its still in archived state" + + def test_create_run(self): + # Upload Pipeline + pipeline_name = f"hello-world-{self.get_current_time()}" + pipeline_desc = "Test Hello World Pipeline" + pipeline = self.kfp_client.upload_pipeline( + pipeline_package_path=self.hello_world_pipeline_file, + pipeline_name=pipeline_name, + description=pipeline_desc) + self.created_pipelines.append(pipeline.pipeline_id) + pipeline_version = self.kfp_client.list_pipeline_versions( + pipeline.pipeline_id).pipeline_versions[0] + + # Create Experiment + experiment_name = f"TestExperiment-{self.get_current_time()}" + experiment_desc = "Python Client Tests Experiment" + experiment = self.kfp_client.create_experiment( + name=experiment_name, description=experiment_desc) + self.created_experiments.append(experiment.experiment_id) + + # Create Run + run_name = f"TestRun-{self.get_current_time()}" + run = self.kfp_client.run_pipeline( + experiment_id=experiment.experiment_id, + pipeline_id=pipeline.pipeline_id, + version_id=pipeline_version.pipeline_version_id, + job_name=run_name) + self.created_runs.append(run.run_id) + assert run.run_id is not None, "Run not created" + assert run.display_name == run_name, "Run Name not same" + + def test_create_run_from_pipeline_package(self): + # Create Experiment + experiment_name = f"TestExperiment-{self.get_current_time()}" + experiment_desc = "Python Client Tests Experiment" + experiment = self.kfp_client.create_experiment( + name=experiment_name, description=experiment_desc) + self.created_experiments.append(experiment.experiment_id) + + # Create Run + run_name = f"TestRun-{self.get_current_time()}" + run = self.kfp_client.create_run_from_pipeline_package( + pipeline_file=self.hello_world_pipeline_file, + experiment_id=experiment.experiment_id, + run_name=run_name) + self.created_runs.append(run.run_id) + assert run.run_id is not None, "Run not created" + assert run.run_info.display_name == run_name, "Run Name not same" + + def test_get_run(self): + # Create Experiment + experiment_name = f"TestExperiment-{self.get_current_time()}" + experiment_desc = "Python Client Tests Experiment" + experiment = self.kfp_client.create_experiment( + name=experiment_name, description=experiment_desc) + self.created_experiments.append(experiment.experiment_id) + + # Create Run + run_name = f"TestRun-{self.get_current_time()}" + created_run = self.kfp_client.create_run_from_pipeline_package( + pipeline_file=self.hello_world_pipeline_file, + experiment_id=experiment.experiment_id, + run_name=run_name) + self.created_runs.append(created_run.run_id) + + # Get Run + run = self.kfp_client.get_run(created_run.run_id) + assert run.display_name == created_run.run_info.display_name, "Run name not same in the DB" + + def test_list_runs(self): + # Create Experiment + experiment_name = f"TestExperiment-" + experiment_desc = "Python Client Tests Experiment" + experiment = self.kfp_client.create_experiment( + name=experiment_name, description=experiment_desc) + self.created_experiments.append(experiment.experiment_id) + + # Create Run + run_name = f"TestRun-{self.get_current_time()}" + created_run = self.kfp_client.create_run_from_pipeline_package( + pipeline_file=self.hello_world_pipeline_file, + experiment_id=experiment.experiment_id, + run_name=run_name) + self.created_runs.append(created_run.run_id) + + # List Runs + runs = self.kfp_client.list_runs( + page_size=50, experiment_id=experiment.experiment_id).runs + run_created = False + for run in runs: + if run.run_id == created_run.run_id: + run_created = True + break + assert run_created, "Run not found in the DB" + + def test_create_run_from_pipeline_func(self): + # Create Experiment + experiment_name = f"TestExperiment-{self.get_current_time()}" + experiment_desc = "Python Client Tests Experiment" + experiment = self.kfp_client.create_experiment( + name=experiment_name, description=experiment_desc) + self.created_experiments.append(experiment.experiment_id) + + # Create Run + run_name = f"TestRun-{self.get_current_time()}" + run = self.kfp_client.create_run_from_pipeline_func( + pipeline_func=sequential, + arguments={'url': 'gs://sample-data/test.txt'}, + experiment_id=experiment.experiment_id, + run_name=run_name) + self.created_runs.append(run.run_id) + assert run.run_id is not None, "Run not created" + assert run.run_info.display_name == run_name, "Run Name not same" + + def test_create_scheduled_run(self): + # Create Experiment + experiment_name = f"TestExperiment-{self.get_current_time()}" + experiment_desc = "Python Client Tests Experiment" + experiment = self.kfp_client.create_experiment( + name=experiment_name, description=experiment_desc) + self.created_experiments.append(experiment.experiment_id) + + # Create Recurring Run + run_name = f"TestRecurringRun-{self.get_current_time()}" + recurring_run = self.kfp_client.create_recurring_run( + experiment_id=experiment.experiment_id, + pipeline_package_path=self.hello_world_pipeline_file, + job_name=run_name, + interval_second=300) + self.created_recurring_runs.append(recurring_run.recurring_run_id) + assert recurring_run.recurring_run_id is not None, "Run not created" + assert recurring_run.display_name == run_name, "Run Name not same" + + def test_get_scheduled_run(self): + # Create Experiment + experiment_name = f"TestExperiment-{self.get_current_time()}" + experiment_desc = "Python Client Tests Experiment" + experiment = self.kfp_client.create_experiment( + name=experiment_name, description=experiment_desc) + self.created_experiments.append(experiment.experiment_id) + + # Create Recurring Run + run_name = f"TestRecurringRun-{self.get_current_time()}" + recurring_run = self.kfp_client.create_recurring_run( + experiment_id=experiment.experiment_id, + pipeline_package_path=self.hello_world_pipeline_file, + job_name=run_name, + interval_second=300) + self.created_recurring_runs.append(recurring_run.recurring_run_id) + + # Get Recurring Run + created_recurring_run = self.kfp_client.get_recurring_run( + recurring_run.recurring_run_id) + assert created_recurring_run.display_name == recurring_run.display_name, "Run name not same in the DB" + + def get_current_time(self) -> str: + return str(int(time.time() * 10000)) diff --git a/samples/contrib/pytorch-samples/utils/__init__.py b/sdk/python/test/compilation/__init__.py similarity index 100% rename from samples/contrib/pytorch-samples/utils/__init__.py rename to sdk/python/test/compilation/__init__.py diff --git a/sdk/python/test/compilation/pipeline_compilation_test.py b/sdk/python/test/compilation/pipeline_compilation_test.py new file mode 100644 index 00000000000..d7779637387 --- /dev/null +++ b/sdk/python/test/compilation/pipeline_compilation_test.py @@ -0,0 +1,1148 @@ +# Copyright 2022 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from dataclasses import dataclass +import os.path +import tempfile +from typing import Callable, Optional + +from kfp.compiler import Compiler +import pytest + +from test_data.sdk_compiled_pipelines.valid.arguments_parameters import \ + echo as arguments_parameters_echo +from test_data.sdk_compiled_pipelines.valid.artifacts_complex import \ + math_pipeline as artifacts_complex_pipeline +from test_data.sdk_compiled_pipelines.valid.artifacts_simple import \ + math_pipeline as artifacts_simple_pipeline +from test_data.sdk_compiled_pipelines.valid.collected_artifacts import \ + collected_artifact_pipeline +from test_data.sdk_compiled_pipelines.valid.component_with_metadata_fields import \ + dataset_joiner +from test_data.sdk_compiled_pipelines.valid.component_with_task_final_status import \ + exit_comp as task_final_status_pipeline +from test_data.sdk_compiled_pipelines.valid.components_with_optional_artifacts import \ + pipeline as optional_artifacts_pipeline +from test_data.sdk_compiled_pipelines.valid.conditional_producer_and_consumers import \ + math_pipeline as conditional_producer_consumers_pipeline +from test_data.sdk_compiled_pipelines.valid.container_io import container_io +from test_data.sdk_compiled_pipelines.valid.container_with_artifact_output import \ + container_with_artifact_output +from test_data.sdk_compiled_pipelines.valid.container_with_concat_placeholder import \ + container_with_concat_placeholder +from test_data.sdk_compiled_pipelines.valid.container_with_if_placeholder import \ + container_with_if_placeholder +from test_data.sdk_compiled_pipelines.valid.container_with_if_placeholder import \ + container_with_if_placeholder as pipeline_with_if_placeholder_pipeline +from test_data.sdk_compiled_pipelines.valid.container_with_placeholder_in_fstring import \ + container_with_placeholder_in_fstring +from test_data.sdk_compiled_pipelines.valid.containerized_python_component import \ + concat_message as containerized_concat_message +from test_data.sdk_compiled_pipelines.valid.create_pod_metadata_complex import \ + pipeline_with_pod_metadata as create_pod_metadata_complex +from test_data.sdk_compiled_pipelines.valid.critical.add_numbers import \ + add_numbers +from test_data.sdk_compiled_pipelines.valid.critical.artifact_cache import \ + crust as artifact_cache_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.artifact_crust import \ + crust as artifact_crust_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.collected_parameters import \ + collected_param_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.component_with_optional_inputs import \ + pipeline +from test_data.sdk_compiled_pipelines.valid.critical.container_component_with_no_inputs import \ + pipeline as container_no_inputs_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.flip_coin import \ + flipcoin_pipeline as flip_coin +from test_data.sdk_compiled_pipelines.valid.critical.loop_consume_upstream import \ + loop_consume_upstream +from test_data.sdk_compiled_pipelines.valid.critical.mixed_parameters import \ + crust as mixed_parameters_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.modelcar.modelcar import \ + pipeline_modelcar_test +from test_data.sdk_compiled_pipelines.valid.critical.multiple_artifacts_namedtuple import \ + crust as multiple_artifacts_namedtuple_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.multiple_parameters_namedtuple import \ + crust as multiple_params_namedtuple_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.nested_pipeline_opt_input_child_level import \ + nested_pipeline_opt_input_child_level +from test_data.sdk_compiled_pipelines.valid.critical.nested_pipeline_opt_inputs_nil import \ + nested_pipeline_opt_inputs_nil +from test_data.sdk_compiled_pipelines.valid.critical.nested_pipeline_opt_inputs_parent_level import \ + nested_pipeline_opt_inputs_parent_level +from test_data.sdk_compiled_pipelines.valid.critical.parallel_for_after_dependency import \ + loop_with_after_dependency_set +from test_data.sdk_compiled_pipelines.valid.critical.parameter_cache import \ + crust as parameter_cache_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.parameter_oneof import \ + crust as parameter_oneof_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.parameters_simple import \ + math_pipeline as parameters_simple_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.pipeline_with_artifact_upload_download import \ + my_pipeline as artifact_upload_download_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.pipeline_with_env import \ + my_pipeline as env_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.pipeline_with_input_status_state import \ + status_state_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.pipeline_with_placeholders import \ + pipeline_with_placeholders +from test_data.sdk_compiled_pipelines.valid.critical.pipeline_with_pod_metadata import \ + pipeline_with_pod_metadata +from test_data.sdk_compiled_pipelines.valid.critical.pipeline_with_secret_as_env import \ + pipeline_secret_env +from test_data.sdk_compiled_pipelines.valid.critical.pipeline_with_workspace import \ + pipeline_with_workspace +from test_data.sdk_compiled_pipelines.valid.critical.producer_consumer_param import \ + producer_consumer_param_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.pythonic_artifacts_test_pipeline import \ + pythonic_artifacts_test_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.two_step_pipeline_containerized import \ + my_pipeline as two_step_containerized_pipeline +from test_data.sdk_compiled_pipelines.valid.cross_loop_after_topology import \ + my_pipeline as cross_loop_after_topology_pipeline +from test_data.sdk_compiled_pipelines.valid.dict_input import dict_input +from test_data.sdk_compiled_pipelines.valid.env_var import test_env_exists +from test_data.sdk_compiled_pipelines.valid.essential.component_with_pip_index_urls import \ + pipeline as pip_index_urls_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.component_with_pip_install import \ + component_with_pip_install as pip_install_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.component_with_pip_install_in_venv import \ + component_with_pip_install as pip_install_venv_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.concat_message import \ + concat_message +from test_data.sdk_compiled_pipelines.valid.essential.container_no_input import \ + container_no_input +from test_data.sdk_compiled_pipelines.valid.essential.lightweight_python_functions_pipeline import \ + pipeline as lightweight_python_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.lightweight_python_functions_with_outputs import \ + pipeline as lightweight_python_with_outputs_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_in_pipeline import \ + my_pipeline as pipeline_in_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_in_pipeline_complex import \ + my_pipeline as pipeline_in_pipeline_complex +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_in_pipeline_loaded_from_yaml import \ + my_pipeline as pipeline_in_pipeline_loaded_from_yaml +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_with_after import \ + my_pipeline as after_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_with_condition import \ + my_pipeline as condition_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_with_if_placeholder import \ + pipeline_none +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_with_loops import \ + my_pipeline as loops_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_with_metrics_outputs import \ + my_pipeline as metrics_outputs_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_with_nested_conditions import \ + my_pipeline as nested_conditions_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_with_nested_conditions_yaml import \ + my_pipeline as pipeline_with_nested_conditions_yaml +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_with_outputs import \ + my_pipeline as outputs_pipeline +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_with_params_containing_format import \ + my_pipeline as pipeline_with_params_containing_format +from test_data.sdk_compiled_pipelines.valid.essential.pipeline_with_reused_component import \ + my_pipeline as reused_component_pipeline +from test_data.sdk_compiled_pipelines.valid.failing.pipeline_with_exit_handler import \ + my_pipeline as exit_handler_pipeline +from test_data.sdk_compiled_pipelines.valid.failing.pipeline_with_multiple_exit_handlers import \ + my_pipeline as multiple_exit_handlers_pipeline +from test_data.sdk_compiled_pipelines.valid.hello_world import echo +from test_data.sdk_compiled_pipelines.valid.identity import identity +from test_data.sdk_compiled_pipelines.valid.if_elif_else_complex import \ + lucky_number_pipeline +from test_data.sdk_compiled_pipelines.valid.if_elif_else_with_oneof_parameters import \ + outer_pipeline as if_elif_else_oneof_params_pipeline +from test_data.sdk_compiled_pipelines.valid.if_else_with_oneof_artifacts import \ + outer_pipeline as if_else_oneof_artifacts_pipeline +from test_data.sdk_compiled_pipelines.valid.if_else_with_oneof_parameters import \ + flip_coin_pipeline as if_else_oneof_params_pipeline +from test_data.sdk_compiled_pipelines.valid.input_artifact import \ + input_artifact +from test_data.sdk_compiled_pipelines.valid.long_running import \ + wait_awhile as long_running_pipeline +from test_data.sdk_compiled_pipelines.valid.metrics_visualization_v2 import \ + metrics_visualization_pipeline +from test_data.sdk_compiled_pipelines.valid.nested_return import nested_return +from test_data.sdk_compiled_pipelines.valid.nested_with_parameters import \ + math_pipeline as nested_with_parameters_pipeline +from test_data.sdk_compiled_pipelines.valid.output_metrics import \ + output_metrics +from test_data.sdk_compiled_pipelines.valid.parameters_complex import \ + math_pipeline as parameters_complex_pipeline +from test_data.sdk_compiled_pipelines.valid.pipeline_as_exit_task import \ + my_pipeline as pipeline_as_exit_task +from test_data.sdk_compiled_pipelines.valid.pipeline_producer_consumer import \ + math_pipeline as producer_consumer_parallel_for_pipeline +from test_data.sdk_compiled_pipelines.valid.pipeline_with_concat_placeholder import \ + pipeline_with_concat_placeholder as \ + pipeline_with_concat_placeholder_pipeline +# Final batch of remaining missing pipeline imports +from test_data.sdk_compiled_pipelines.valid.pipeline_with_condition_dynamic_task_output_custom_training_job import \ + pipeline_with_dynamic_condition_output +from test_data.sdk_compiled_pipelines.valid.pipeline_with_dynamic_importer_metadata import \ + my_pipeline as pipeline_with_dynamic_importer_metadata +from test_data.sdk_compiled_pipelines.valid.pipeline_with_google_artifact_type import \ + my_pipeline as pipeline_with_google_artifact_type +from test_data.sdk_compiled_pipelines.valid.pipeline_with_importer import \ + my_pipeline as importer_pipeline +from test_data.sdk_compiled_pipelines.valid.pipeline_with_importer_and_gcpc_types import \ + my_pipeline as pipeline_with_importer_and_gcpc_types +from test_data.sdk_compiled_pipelines.valid.pipeline_with_loops_and_conditions import \ + my_pipeline as loops_and_conditions_pipeline +from test_data.sdk_compiled_pipelines.valid.pipeline_with_metadata_fields import \ + dataset_concatenator as pipeline_with_metadata_fields +from test_data.sdk_compiled_pipelines.valid.pipeline_with_nested_loops import \ + my_pipeline as nested_loops_pipeline +from test_data.sdk_compiled_pipelines.valid.pipeline_with_parallelfor_list_artifacts import \ + my_pipeline as pipeline_with_parallelfor_list_artifacts +from test_data.sdk_compiled_pipelines.valid.pipeline_with_parallelfor_parallelism import \ + my_pipeline as pipeline_with_parallelfor_parallelism +from test_data.sdk_compiled_pipelines.valid.pipeline_with_retry import \ + my_pipeline as retry_pipeline +from test_data.sdk_compiled_pipelines.valid.pipeline_with_secret_as_volume import \ + pipeline_secret_volume +from test_data.sdk_compiled_pipelines.valid.pipeline_with_semphore import \ + pipeline_with_semaphore +from test_data.sdk_compiled_pipelines.valid.pipeline_with_string_machine_fields_pipeline_input import \ + pipeline as pipeline_with_string_machine_fields_pipeline_input +from test_data.sdk_compiled_pipelines.valid.pipeline_with_string_machine_fields_task_output import \ + pipeline as pipeline_with_string_machine_fields_task_output +from test_data.sdk_compiled_pipelines.valid.pipeline_with_task_final_status import \ + my_pipeline as pipeline_with_task_final_status +from test_data.sdk_compiled_pipelines.valid.pipeline_with_task_using_ignore_upstream_failure import \ + my_pipeline as pipeline_with_task_using_ignore_upstream_failure +from test_data.sdk_compiled_pipelines.valid.pipeline_with_utils import \ + pipeline_with_utils +from test_data.sdk_compiled_pipelines.valid.pipeline_with_various_io_types import \ + my_pipeline as various_io_types_pipeline +from test_data.sdk_compiled_pipelines.valid.pipeline_with_volume import \ + pipeline_with_volume +from test_data.sdk_compiled_pipelines.valid.pipeline_with_volume_no_cache import \ + pipeline_with_volume_no_cache +from test_data.sdk_compiled_pipelines.valid.preprocess import preprocess +from test_data.sdk_compiled_pipelines.valid.pythonic_artifact_with_single_return import \ + make_language_model_pipeline as pythonic_artifact_with_single_return +from test_data.sdk_compiled_pipelines.valid.pythonic_artifacts_with_list_of_artifacts import \ + make_and_join_datasets as pythonic_artifacts_with_list_of_artifacts +from test_data.sdk_compiled_pipelines.valid.pythonic_artifacts_with_multiple_returns import \ + split_datasets_and_return_first as pythonic_artifacts_multiple_returns +from test_data.sdk_compiled_pipelines.valid.sequential_v2 import sequential +from test_data.sdk_compiled_pipelines.valid.two_step_pipeline import \ + my_pipeline as two_step_pipeline +from test_data.sdk_compiled_pipelines.valid.xgboost_sample_pipeline import \ + xgboost_pipeline + +from ..test_utils.comparison_utils import ComparisonUtils +from ..test_utils.file_utils import FileUtils + + +@pytest.mark.compilation +@pytest.mark.regression +class TestPipelineCompilation: + _VALID_PIPELINE_FILES = FileUtils.VALID_PIPELINE_FILES + + @dataclass + class TestData: + pipeline_name: str + pipeline_func: Callable + pipline_func_args: Optional[dict] + compiled_file_name: str + expected_compiled_file_path: str + + def __str__(self) -> str: + return (f"Compilation Data: name={self.pipeline_name} " + f"compiled_file_name={self.compiled_file_name} " + f"expected_file={self.expected_compiled_file_path}") + + def __repr__(self) -> str: + return self.__str__() + + @pytest.mark.parametrize( + 'pipeline_data', [ + TestData( + pipeline_name='add-numbers', + pipeline_func=add_numbers, + pipline_func_args=None, + compiled_file_name='add_numbers.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/add_numbers.yaml' + ), + TestData( + pipeline_name='hello-world', + pipeline_func=echo, + pipline_func_args=None, + compiled_file_name='hello_world.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/hello-world.yaml' + ), + TestData( + pipeline_name='simple-two-step-pipeline', + pipeline_func=two_step_pipeline, + pipline_func_args={'text': 'Hello KFP!'}, + compiled_file_name='two_step_pipeline.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/two_step_pipeline.yaml' + ), + TestData( + pipeline_name='single-condition-pipeline', + pipeline_func=condition_pipeline, + pipline_func_args=None, + compiled_file_name='condition_pipeline.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_with_condition.yaml' + ), + TestData( + pipeline_name='pipeline-with-loops', + pipeline_func=loops_pipeline, + pipline_func_args={'loop_parameter': ['item1', 'item2']}, + compiled_file_name='loops_pipeline.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_with_loops.yaml' + ), + TestData( + pipeline_name='pipeline-with-outputs', + pipeline_func=outputs_pipeline, + pipline_func_args=None, + compiled_file_name='outputs_pipeline.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_with_outputs.yaml' + ), + TestData( + pipeline_name='collected-param-pipeline', + pipeline_func=collected_param_pipeline, + pipline_func_args=None, + compiled_file_name='collected_parameters.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/collected_parameters.yaml' + ), + TestData( + pipeline_name='component-optional-input', + pipeline_func=pipeline, + pipline_func_args=None, + compiled_file_name='component_with_optional_inputs.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/component_with_optional_inputs.yaml' + ), + TestData( + pipeline_name='mixed_parameters-pipeline', + pipeline_func=mixed_parameters_pipeline, + pipline_func_args=None, + compiled_file_name='mixed_parameters.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/mixed_parameters.yaml' + ), + TestData( + pipeline_name='producer-consumer-param-pipeline', + pipeline_func=producer_consumer_param_pipeline, + pipline_func_args=None, + compiled_file_name='producer_consumer_param_pipeline.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/producer_consumer_param_pipeline.yaml' + ), + TestData( + pipeline_name='parameter_cache-pipeline', + pipeline_func=parameter_cache_pipeline, + pipline_func_args=None, + compiled_file_name='parameter_cache.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/parameter_cache.yaml' + ), + TestData( + pipeline_name='parameter_oneof-pipeline', + pipeline_func=parameter_oneof_pipeline, + pipline_func_args=None, + compiled_file_name='parameter_oneof.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/parameter_oneof.yaml' + ), + TestData( + pipeline_name='pipeline-with-env', + pipeline_func=env_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_env.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/pipeline_with_env.yaml' + ), + TestData( + pipeline_name='test-pipeline', + pipeline_func=retry_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_retry.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/pipeline_with_retry.yaml' + ), + TestData( + pipeline_name='loop-with-after-dependency-set', + pipeline_func=loop_with_after_dependency_set, + pipline_func_args=None, + compiled_file_name='parallel_for_after_dependency.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/parallel_for_after_dependency.yaml' + ), + TestData( + pipeline_name='multiple_parameters_namedtuple-pipeline', + pipeline_func=multiple_params_namedtuple_pipeline, + pipline_func_args=None, + compiled_file_name='multiple_parameters_namedtuple.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/multiple_parameters_namedtuple.yaml' + ), + TestData( + pipeline_name='multiple_artifacts_namedtuple-pipeline', + pipeline_func=multiple_artifacts_namedtuple_pipeline, + pipline_func_args=None, + compiled_file_name='multiple_artifacts_namedtuple.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/multiple_artifacts_namedtuple.yaml' + ), + TestData( + pipeline_name='pipeline-with-modelcar-model', + pipeline_func=pipeline_modelcar_test, + pipline_func_args=None, + compiled_file_name='modelcar.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/modelcar.yaml' + ), + TestData( + pipeline_name='artifact_cache-pipeline', + pipeline_func=artifact_cache_pipeline, + pipline_func_args=None, + compiled_file_name='artifact_cache.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/artifact_cache.yaml' + ), + TestData( + pipeline_name='artifact_crust-pipeline', + pipeline_func=artifact_crust_pipeline, + pipline_func_args=None, + compiled_file_name='artifact_crust.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/artifact_crust.yaml' + ), + TestData( + pipeline_name='v2-container-component-no-input', + pipeline_func=container_no_inputs_pipeline, + pipline_func_args=None, + compiled_file_name='container_component_with_no_inputs.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/container_component_with_no_inputs.yaml' + ), + TestData( + pipeline_name='loop-consume-upstream', + pipeline_func=loop_consume_upstream, + pipline_func_args=None, + compiled_file_name='loop_consume_upstream.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/loop_consume_upstream.yaml' + ), + TestData( + pipeline_name='math-pipeline', + pipeline_func=parameters_simple_pipeline, + pipline_func_args=None, + compiled_file_name='parameters_simple.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/parameters_simple.yaml' + ), + TestData( + pipeline_name='pipeline-with-datasets', + pipeline_func=artifact_upload_download_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_artifact_upload_download.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/pipeline_with_artifact_upload_download.yaml' + ), + TestData( + pipeline_name='status-state-pipeline', + pipeline_func=status_state_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_input_status_state.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/pipeline_with_input_status_state.yaml' + ), + TestData( + pipeline_name='pipeline-with-placeholders', + pipeline_func=pipeline_with_placeholders, + pipline_func_args=None, + compiled_file_name='pipeline_with_placeholders.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/pipeline_with_placeholders.yaml' + ), + TestData( + pipeline_name='pipeline-with-pod-metadata', + pipeline_func=pipeline_with_pod_metadata, + pipline_func_args=None, + compiled_file_name='pipeline_with_pod_metadata.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/pipeline_with_pod_metadata.yaml' + ), + TestData( + pipeline_name='pipeline-secret-env', + pipeline_func=pipeline_secret_env, + pipline_func_args=None, + compiled_file_name='pipeline_with_secret_as_env.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/pipeline_with_secret_as_env.yaml' + ), + TestData( + pipeline_name='pipeline-with-workspace', + pipeline_func=pipeline_with_workspace, + pipline_func_args=None, + compiled_file_name='pipeline_with_workspace.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/pipeline_with_workspace.yaml' + ), + TestData( + pipeline_name='containerized-two-step-pipeline', + pipeline_func=two_step_containerized_pipeline, + pipline_func_args={'text': 'Hello KFP Containerized!'}, + compiled_file_name='two_step_pipeline_containerized.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/two_step_pipeline_containerized.yaml' + ), + TestData( + pipeline_name='nested-pipeline-opt-input-child-level', + pipeline_func=nested_pipeline_opt_input_child_level, + pipline_func_args=None, + compiled_file_name='nested_pipeline_opt_input_child_level.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/nested_pipeline_opt_input_child_level_compiled.yaml' + ), + TestData( + pipeline_name='split-datasets-and-return-first', + pipeline_func=pythonic_artifacts_test_pipeline, + pipline_func_args=None, + compiled_file_name='pythonic_artifacts_test_pipeline.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/pythonic_artifacts_test_pipeline.yaml' + ), + TestData( + pipeline_name='optional-artifact-pipeline', + pipeline_func=optional_artifacts_pipeline, + pipline_func_args=None, + compiled_file_name='components_with_optional_artifacts.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/components_with_optional_artifacts.yaml' + ), + TestData( + pipeline_name='my-test-pipeline-beta', + pipeline_func=lightweight_python_pipeline, + pipline_func_args={ + 'message': 'Hello KFP!', + 'input_dict': { + 'A': 1, + 'B': 2 + } + }, + compiled_file_name='lightweight_python_functions_pipeline.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/lightweight_python_functions_pipeline.yaml' + ), + TestData( + pipeline_name='xgboost-sample-pipeline', + pipeline_func=xgboost_pipeline, + pipline_func_args=None, + compiled_file_name='xgboost_sample_pipeline.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/xgboost_sample_pipeline.yaml' + ), + TestData( + pipeline_name='pipeline-with-after', + pipeline_func=after_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_after.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_with_after.yaml' + ), + TestData( + pipeline_name='metrics-visualization-pipeline', + pipeline_func=metrics_visualization_pipeline, + pipline_func_args=None, + compiled_file_name='metrics_visualization_v2.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/metrics_visualization_v2.yaml' + ), + TestData( + pipeline_name='nested-conditions-pipeline', + pipeline_func=nested_conditions_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_nested_conditions.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_with_nested_conditions.yaml' + ), + TestData( + pipeline_name='container-io', + pipeline_func=container_io, + pipline_func_args={'text': 'Hello Container!'}, + compiled_file_name='container_io.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/container_io.yaml' + ), + TestData( + pipeline_name='pipeline-with-exit-handler', + pipeline_func=exit_handler_pipeline, + pipline_func_args={'message': 'Hello Exit Handler!'}, + compiled_file_name='pipeline_with_exit_handler.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/failing/pipeline_with_exit_handler.yaml' + ), + TestData( + pipeline_name='pipeline-with-importer', + pipeline_func=importer_pipeline, + pipline_func_args={ + 'dataset2': 'gs://ml-pipeline-playground/shakespeare2.txt' + }, + compiled_file_name='pipeline_with_importer.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_importer.yaml' + ), + TestData( + pipeline_name='pipeline-with-nested-loops', + pipeline_func=nested_loops_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_nested_loops.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_nested_loops.yaml' + ), + TestData( + pipeline_name='concat-message', + pipeline_func=concat_message, + pipline_func_args={ + 'message1': 'Hello', + 'message2': ' World!' + }, + compiled_file_name='concat_message.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/concat_message.yaml' + ), + TestData( + pipeline_name='preprocess', + pipeline_func=preprocess, + pipline_func_args={ + 'message': 'test', + 'input_dict_parameter': { + 'A': 1 + }, + 'input_list_parameter': ['a', 'b'] + }, + compiled_file_name='preprocess.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/preprocess.yaml' + ), + TestData( + pipeline_name='sequential', + pipeline_func=sequential, + pipline_func_args={'url': 'gs://sample-data/test.txt'}, + compiled_file_name='sequential_v2.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/sequential_v2.yaml' + ), + TestData( + pipeline_name='math-pipeline', + pipeline_func=artifacts_simple_pipeline, + pipline_func_args=None, + compiled_file_name='artifacts_simple.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/artifacts_simple.yaml' + ), + TestData( + pipeline_name='pipeline-with-multiple-exit-handlers', + pipeline_func=multiple_exit_handlers_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_multiple_exit_handlers.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/failing/pipeline_with_multiple_exit_handlers.yaml' + ), + TestData( + pipeline_name='pipeline-with-reused-component', + pipeline_func=reused_component_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_reused_component.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_with_reused_component.yaml' + ), + TestData( + pipeline_name='math-pipeline', + pipeline_func=artifacts_complex_pipeline, + pipline_func_args=None, + compiled_file_name='artifacts_complex.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/artifacts_complex.yaml' + ), + TestData( + pipeline_name='math-pipeline', + pipeline_func=conditional_producer_consumers_pipeline, + pipline_func_args={'threshold': 2}, + compiled_file_name='conditional_producer_and_consumers.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/conditional_producer_and_consumers.yaml' + ), + TestData( + pipeline_name='collected-artifact-pipeline', + pipeline_func=collected_artifact_pipeline, + pipline_func_args=None, + compiled_file_name='collected_artifacts.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/collected_artifacts.yaml' + ), + TestData( + pipeline_name='split-datasets-and-return-first', + pipeline_func=pythonic_artifacts_multiple_returns, + pipline_func_args=None, + compiled_file_name='pythonic_artifacts_with_multiple_returns.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pythonic_artifacts_with_multiple_returns.yaml' + ), + TestData( + pipeline_name='identity', + pipeline_func=identity, + pipline_func_args={'value': 'test'}, + compiled_file_name='identity.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/identity.yaml' + ), + TestData( + pipeline_name='input-artifact', + pipeline_func=input_artifact, + pipline_func_args=None, + compiled_file_name='input_artifact.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/input_artifact.yaml' + ), + TestData( + pipeline_name='nested-return', + pipeline_func=nested_return, + pipline_func_args=None, + compiled_file_name='nested_return.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/nested_return.yaml' + ), + TestData( + pipeline_name='pipeline-in-pipeline', + pipeline_func=pipeline_in_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_in_pipeline.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_in_pipeline.yaml' + ), + TestData( + pipeline_name='container-with-concat-placeholder', + pipeline_func=container_with_concat_placeholder, + pipline_func_args={'text1': 'Hello'}, + compiled_file_name='container_with_concat_placeholder.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/container_with_concat_placeholder.yaml' + ), + TestData( + pipeline_name='my-test-pipeline-output', + pipeline_func=lightweight_python_with_outputs_pipeline, + pipline_func_args={ + 'first_message': 'Hello KFP!', + 'second_message': 'Welcome', + 'first_number': 3, + 'second_number': 4 + }, + compiled_file_name='lightweight_python_functions_with_outputs.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/lightweight_python_functions_with_outputs.yaml' + ), + TestData( + pipeline_name='output-metrics', + pipeline_func=output_metrics, + pipline_func_args=None, + compiled_file_name='output_metrics.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/output_metrics.yaml' + ), + TestData( + pipeline_name='dict-input', + pipeline_func=dict_input, + pipline_func_args={ + 'struct': { + 'key1': 'value1', + 'key2': 'value2' + } + }, + compiled_file_name='dict_input.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/dict_input.yaml' + ), + TestData( + pipeline_name='pipeline-with-volume', + pipeline_func=pipeline_with_volume, + pipline_func_args=None, + compiled_file_name='pipeline_with_volume.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_volume.yaml' + ), + TestData( + pipeline_name='pipeline-with-volume-no-cache', + pipeline_func=pipeline_with_volume_no_cache, + pipline_func_args=None, + compiled_file_name='pipeline_with_volume_no_cache.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_volume_no_cache.yaml' + ), + TestData( + pipeline_name='container-with-if-placeholder', + pipeline_func=container_with_if_placeholder, + pipline_func_args=None, + compiled_file_name='container_with_if_placeholder.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/container_with_if_placeholder.yaml' + ), + TestData( + pipeline_name='container-with-placeholder-in-fstring', + pipeline_func=container_with_placeholder_in_fstring, + pipline_func_args=None, + compiled_file_name='container_with_placeholder_in_fstring.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/container_with_placeholder_in_fstring.yaml' + ), + TestData( + pipeline_name='pipeline-in-pipeline-complex', + pipeline_func=pipeline_in_pipeline_complex, + pipline_func_args=None, + compiled_file_name='pipeline_in_pipeline_complex.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_in_pipeline_complex.yaml' + ), + TestData( + pipeline_name='lucky-number-pipeline', + pipeline_func=lucky_number_pipeline, + pipline_func_args={ + 'add_drumroll': True, + 'repeat_if_lucky_number': True, + 'trials': [1, 2, 3] + }, + compiled_file_name='if_elif_else_complex.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/if_elif_else_complex.yaml' + ), + TestData( + pipeline_name='if-elif-else-with-oneof-parameters', + pipeline_func=if_elif_else_oneof_params_pipeline, + pipline_func_args=None, + compiled_file_name='if_elif_else_with_oneof_parameters.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/if_elif_else_with_oneof_parameters.yaml' + ), + TestData( + pipeline_name='if-else-with-oneof-parameters', + pipeline_func=if_else_oneof_params_pipeline, + pipline_func_args=None, + compiled_file_name='if_else_with_oneof_parameters.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/if_else_with_oneof_parameters.yaml' + ), + TestData( + pipeline_name='if-else-with-oneof-artifacts', + pipeline_func=if_else_oneof_artifacts_pipeline, + pipline_func_args=None, + compiled_file_name='if_else_with_oneof_artifacts.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/if_else_with_oneof_artifacts.yaml' + ), + TestData( + pipeline_name='pipeline-with-loops-and-conditions', + pipeline_func=loops_and_conditions_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_loops_and_conditions.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_loops_and_conditions.yaml' + ), + TestData( + pipeline_name='pipeline-with-various-io-types', + pipeline_func=various_io_types_pipeline, + pipline_func_args={ + 'input1': 'Hello', + 'input4': 'World' + }, + compiled_file_name='pipeline_with_various_io_types.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_various_io_types.yaml' + ), + TestData( + pipeline_name='pipeline-with-metrics-outputs', + pipeline_func=metrics_outputs_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_metrics_outputs.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_with_metrics_outputs.yaml' + ), + TestData( + pipeline_name='containerized-concat-message', + pipeline_func=containerized_concat_message, + pipline_func_args={ + 'message1': 'Hello', + 'message2': ' Containerized!' + }, + compiled_file_name='containerized_python_component.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/containerized_python_component.yaml' + ), + TestData( + pipeline_name='container-with-artifact-output', + pipeline_func=container_with_artifact_output, + pipline_func_args={'num_epochs': 10}, + compiled_file_name='container_with_artifact_output.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/container_with_artifact_output.yaml' + ), + TestData( + pipeline_name='math-pipeline', + pipeline_func=nested_with_parameters_pipeline, + pipline_func_args=None, + compiled_file_name='nested_with_parameters.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/nested_with_parameters.yaml' + ), + TestData( + pipeline_name='math-pipeline', + pipeline_func=parameters_complex_pipeline, + pipline_func_args=None, + compiled_file_name='parameters_complex.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/parameters_complex.yaml' + ), + TestData( + pipeline_name='dataset-joiner', + pipeline_func=dataset_joiner, + pipline_func_args=None, + compiled_file_name='component_with_metadata_fields.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/component_with_metadata_fields.yaml' + ), + TestData( + pipeline_name='my-test-pipeline', + pipeline_func=pip_index_urls_pipeline, + pipline_func_args=None, + compiled_file_name='component_with_pip_index_urls.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/component_with_pip_index_urls.yaml' + ), + TestData( + pipeline_name='flip-coin', + pipeline_func=flip_coin, + pipline_func_args=None, + compiled_file_name='flip_coin.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/flip_coin.yaml' + ), + TestData( + pipeline_name='component-with-pip-install-in-venv', + pipeline_func=pip_install_venv_pipeline, + pipline_func_args=None, + compiled_file_name='component_with_pip_install_in_venv.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/component_with_pip_install_in_venv.yaml' + ), + TestData( + pipeline_name='component-with-pip-install', + pipeline_func=pip_install_pipeline, + pipline_func_args=None, + compiled_file_name='component_with_pip_install.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/component_with_pip_install.yaml' + ), + TestData( + pipeline_name='component-with-task-final-status', + pipeline_func=task_final_status_pipeline, + pipline_func_args=None, + compiled_file_name='component_with_task_final_status_GH-12033.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/component_with_task_final_status_GH-12033.yaml' + ), + TestData( + pipeline_name='math-pipeline', + pipeline_func=producer_consumer_parallel_for_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_producer_consumer.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_producer_consumer.yaml' + ), + TestData( + pipeline_name='pipeline-with-secret-as-volume', + pipeline_func=pipeline_secret_volume, + pipline_func_args=None, + compiled_file_name='pipeline_with_secret_as_volume.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_secret_as_volume.yaml' + ), + TestData( + pipeline_name='pipeline-with-utils', + pipeline_func=pipeline_with_utils, + pipline_func_args=None, + compiled_file_name='pipeline_with_utils.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_utils.yaml' + ), + TestData( + pipeline_name='echo', + pipeline_func=arguments_parameters_echo, + pipline_func_args={ + 'param1': 'hello', + 'param2': 'world' + }, + compiled_file_name='arguments_parameters.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/arguments-parameters.yaml' + ), + TestData( + pipeline_name='container-no-input', + pipeline_func=container_no_input, + pipline_func_args=None, + compiled_file_name='container_no_input.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/container_no_input.yaml' + ), + TestData( + pipeline_name='test-env-exists', + pipeline_func=test_env_exists, + pipline_func_args={'env_var': 'HOME'}, + compiled_file_name='env_var.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/env-var.yaml' + ), + TestData( + pipeline_name='create-pod-metadata-complex', + pipeline_func=create_pod_metadata_complex, + pipline_func_args=None, + compiled_file_name='create_pod_metadata_complex.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/create_pod_metadata_complex.yaml' + ), + TestData( + pipeline_name='nested-pipeline-opt-inputs-nil', + pipeline_func=nested_pipeline_opt_inputs_nil, + pipline_func_args=None, + compiled_file_name='nested_pipeline_opt_inputs_nil.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/nested_pipeline_opt_inputs_nil_compiled.yaml' + ), + TestData( + pipeline_name='nested-pipeline-opt-inputs-parent-level', + pipeline_func=nested_pipeline_opt_inputs_parent_level, + pipline_func_args=None, + compiled_file_name='nested_pipeline_opt_inputs_parent_level.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/critical/nested_pipeline_opt_inputs_parent_level_compiled.yaml' + ), + TestData( + pipeline_name='cross-loop-after-topology', + pipeline_func=cross_loop_after_topology_pipeline, + pipline_func_args=None, + compiled_file_name='cross_loop_after_topology.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/cross_loop_after_topology.yaml' + ), + TestData( + pipeline_name='pipeline-with-task-final-status-conditional', + pipeline_func=pipeline_as_exit_task, + pipline_func_args={'message': 'Hello Exit Task!'}, + compiled_file_name='pipeline_as_exit_task.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_as_exit_task.yaml' + ), + TestData( + pipeline_name='pipeline-in-pipeline-loaded-from-yaml', + pipeline_func=pipeline_in_pipeline_loaded_from_yaml, + pipline_func_args=None, + compiled_file_name='pipeline_in_pipeline_loaded_from_yaml.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_in_pipeline_loaded_from_yaml.yaml' + ), + TestData( + pipeline_name='dynamic-importer-metadata-pipeline', + pipeline_func=pipeline_with_dynamic_importer_metadata, + pipline_func_args=None, + compiled_file_name='pipeline_with_dynamic_importer_metadata.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_dynamic_importer_metadata.yaml' + ), + TestData( + pipeline_name='pipeline-with-google-artifact-types', + pipeline_func=pipeline_with_google_artifact_type, + pipline_func_args=None, + compiled_file_name='pipeline_with_google_artifact_type.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_google_artifact_type.yaml' + ), + TestData( + pipeline_name='pipeline-with-metadata-fields', + pipeline_func=pipeline_with_metadata_fields, + pipline_func_args=None, + compiled_file_name='pipeline_with_metadata_fields.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_metadata_fields.yaml' + ), + TestData( + pipeline_name='make-and-join-datasets', + pipeline_func=pythonic_artifacts_with_list_of_artifacts, + pipline_func_args={'texts': ['text1', 'text2']}, + compiled_file_name='pythonic_artifacts_with_list_of_artifacts.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pythonic_artifacts_with_list_of_artifacts.yaml' + ), + TestData( + pipeline_name='make-language-model-pipeline', + pipeline_func=pythonic_artifact_with_single_return, + pipline_func_args=None, + compiled_file_name='pythonic_artifact_with_single_return.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pythonic_artifact_with_single_return.yaml' + ), + TestData( + pipeline_name='pipeline-with-semaphore', + pipeline_func=pipeline_with_semaphore, + pipline_func_args=None, + compiled_file_name='pipeline_with_semaphore.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_semaphore.yaml' + ), + TestData( + pipeline_name='pipeline-with-condition-dynamic-task-output', + pipeline_func=pipeline_with_dynamic_condition_output, + pipline_func_args=None, + compiled_file_name='pipeline_with_condition_dynamic_task_output_custom_training_job.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml' + ), + TestData( + pipeline_name='pipeline-with-importer-and-gcpc-types', + pipeline_func=pipeline_with_importer_and_gcpc_types, + pipline_func_args=None, + compiled_file_name='pipeline_with_importer_and_gcpc_types.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_importer_and_gcpc_types.yaml' + ), + TestData( + pipeline_name='pipeline-with-nested-conditions-yaml', + pipeline_func=pipeline_with_nested_conditions_yaml, + pipline_func_args=None, + compiled_file_name='pipeline_with_nested_conditions_yaml.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_with_nested_conditions_yaml.yaml' + ), + TestData( + pipeline_name='pipeline-with-parallelfor-parallelism', + pipeline_func=pipeline_with_parallelfor_parallelism, + pipline_func_args={ + 'loop_parameter': ['item1', 'item2', 'item3'] + }, + compiled_file_name='pipeline_with_parallelfor_parallelism.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_parallelfor_parallelism.yaml' + ), + TestData( + pipeline_name='pipeline-with-params-containing-format', + pipeline_func=pipeline_with_params_containing_format, + pipline_func_args={'name': 'KFP'}, + compiled_file_name='pipeline_with_params_containing_format.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/pipeline_with_params_containing_format.yaml' + ), + TestData( + pipeline_name='pipeline-with-string-machine-fields-pipeline-input', + pipeline_func=pipeline_with_string_machine_fields_pipeline_input, + pipline_func_args=None, + compiled_file_name='pipeline_with_string_machine_fields_pipeline_input.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_string_machine_fields_pipeline_input.yaml' + ), + TestData( + pipeline_name='pipeline-with-string-machine-fields-task-output', + pipeline_func=pipeline_with_string_machine_fields_task_output, + pipline_func_args=None, + compiled_file_name='pipeline_with_string_machine_fields_task_output.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_string_machine_fields_task_output.yaml' + ), + TestData( + pipeline_name='pipeline-with-task-using-ignore-upstream-failure', + pipeline_func=pipeline_with_task_using_ignore_upstream_failure, + pipline_func_args={'sample_input': 'test message'}, + compiled_file_name='pipeline_with_task_using_ignore_upstream_failure.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_task_using_ignore_upstream_failure.yaml' + ), + TestData( + pipeline_name='pipeline-with-concat-placeholder', + pipeline_func=pipeline_with_concat_placeholder_pipeline, + pipline_func_args=None, + compiled_file_name='pipeline_with_concat_placeholder.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_concat_placeholder.yaml' + ), + TestData( + pipeline_name='pipeline-with-if-placeholder', + pipeline_func=pipeline_with_if_placeholder_pipeline, + pipline_func_args=None, + compiled_file_name='container_with_if_placeholder.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/container_with_if_placeholder.yaml' + ), + TestData( + pipeline_name='wait-awhile', + pipeline_func=long_running_pipeline, + pipline_func_args=None, + compiled_file_name='long_running.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/long-running.yaml' + ), + TestData( + pipeline_name='pipeline-with-task-final-status', + pipeline_func=pipeline_with_task_final_status, + pipline_func_args={'message': 'Hello World!'}, + compiled_file_name='pipeline_with_task_final_status.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_task_final_status.yaml' + ), + TestData( + pipeline_name='pipeline-parallelfor-artifacts', + pipeline_func=pipeline_with_parallelfor_list_artifacts, + pipline_func_args=None, + compiled_file_name='pipeline_with_parallelfor_list_artifacts.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/pipeline_with_parallelfor_list_artifacts_GH-12033.yaml' + ), + TestData( + pipeline_name='pipeline-with-if-placeholder-supply-none', + pipeline_func=pipeline_none, + pipline_func_args=None, + compiled_file_name='placeholder_none_input_value.yaml', + expected_compiled_file_path=f'{_VALID_PIPELINE_FILES}/essential/placeholder_with_if_placeholder_none_input_value.yaml' + ), + ], + ids=str) + def test_compilation(self, pipeline_data: TestData): + temp_compiled_pipeline_file = os.path.join( + tempfile.gettempdir(), pipeline_data.compiled_file_name) + try: + Compiler().compile( + pipeline_func=pipeline_data.pipeline_func, + pipeline_name=pipeline_data.pipeline_name, + pipeline_parameters=pipeline_data.pipline_func_args, + package_path=temp_compiled_pipeline_file, + ) + print(f'Pipeline Created at : {temp_compiled_pipeline_file}') + print( + f'Parsing expected yaml {pipeline_data.expected_compiled_file_path} for comparison' + ) + expected_pipeline_specs, expected_platform_specs = FileUtils.read_yaml_file( + pipeline_data.expected_compiled_file_path) + print( + f'Parsing compiled yaml {temp_compiled_pipeline_file} for comparison' + ) + generated_pipeline_specs, generated_platform_specs = FileUtils.read_yaml_file( + temp_compiled_pipeline_file) + print('Verify that the generated yaml matches expected yaml or not') + ComparisonUtils.compare_pipeline_spec_dicts( + actual=generated_pipeline_specs, + expected=expected_pipeline_specs, + name=pipeline_data.pipeline_name, + runtime_params=pipeline_data.pipline_func_args, + ) + ComparisonUtils.compare_pipeline_spec_dicts( + actual=generated_platform_specs, + expected=expected_platform_specs) + finally: + print(f'Deleting temp compiled file: {temp_compiled_pipeline_file}') + os.remove(temp_compiled_pipeline_file) + print(f'Deleted temp compiled file: {temp_compiled_pipeline_file}') diff --git a/sdk/python/test/local_execution/__init__.py b/sdk/python/test/local_execution/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/test/local_execution/local_execution_test.py b/sdk/python/test/local_execution/local_execution_test.py new file mode 100644 index 00000000000..b3437d28d29 --- /dev/null +++ b/sdk/python/test/local_execution/local_execution_test.py @@ -0,0 +1,289 @@ +# Copyright 2022 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from dataclasses import dataclass +import os +from pathlib import Path +import shutil +from typing import Any, Callable, Optional + +from kfp import local +import pytest + +from test_data.sdk_compiled_pipelines.valid.arguments_parameters import \ + echo as arguments_echo +from test_data.sdk_compiled_pipelines.valid.critical.add_numbers import \ + add_numbers +from test_data.sdk_compiled_pipelines.valid.critical.component_with_optional_inputs import \ + pipeline as optional_inputs_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.flip_coin import flip_coin +from test_data.sdk_compiled_pipelines.valid.critical.mixed_parameters import \ + crust as mixed_parameters_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.multiple_parameters_namedtuple import \ + crust as namedtuple_pipeline +from test_data.sdk_compiled_pipelines.valid.critical.producer_consumer_param import \ + producer_consumer_param_pipeline +from test_data.sdk_compiled_pipelines.valid.dict_input import dict_input +from test_data.sdk_compiled_pipelines.valid.essential.concat_message import \ + concat_message +from test_data.sdk_compiled_pipelines.valid.essential.container_no_input import \ + container_no_input +from test_data.sdk_compiled_pipelines.valid.essential.lightweight_python_functions_with_outputs import \ + pipeline as lightweight_with_outputs_pipeline +from test_data.sdk_compiled_pipelines.valid.hello_world import echo +from test_data.sdk_compiled_pipelines.valid.identity import identity +from test_data.sdk_compiled_pipelines.valid.nested_return import nested_return +from test_data.sdk_compiled_pipelines.valid.output_metrics import \ + output_metrics +from test_data.sdk_compiled_pipelines.valid.parameter import \ + crust as parameter_pipeline +from test_data.sdk_compiled_pipelines.valid.sequential_v1 import sequential + + +@dataclass +class TestData: + name: str + pipeline_func: Callable + pipeline_func_args: Optional[dict] + expected_output: Any + + def __str__(self) -> str: + return (f"Test Data: " + f"name={self.name} " + f"pipeline_func={self.pipeline_func.__name__} " + f"args={self.pipeline_func_args}") + + def __repr__(self) -> str: + return self.__str__() + + +def idfn(val): + return val.name + + +# Use relative directories that work for both runners +ws_root_base = './test_workspace' +pipeline_root_base = './test_pipeline_outputs' + +pipeline_func_data = [ + TestData( + name='Add Numbers', + pipeline_func=add_numbers, + pipeline_func_args={ + 'a': 5, + 'b': 5 + }, + expected_output=10, + ), + TestData( + name='Mixed Parameter', + pipeline_func=mixed_parameters_pipeline, + pipeline_func_args=None, + expected_output=None, + ), + TestData( + name='Flip Coin', + pipeline_func=flip_coin, + pipeline_func_args=None, + expected_output=['heads', 'tails'], + ), + TestData( + name='Optional Inputs', + pipeline_func=optional_inputs_pipeline, + pipeline_func_args=None, + expected_output=None, + ), + TestData( + name='Concat Message', + pipeline_func=concat_message, + pipeline_func_args={ + 'message1': 'Hello ', + 'message2': 'World!' + }, + expected_output='Hello World!', + ), + TestData( + name='Identity Function', + pipeline_func=identity, + pipeline_func_args={'value': 'test_value'}, + expected_output='test_value', + ), + TestData( + name='Lightweight With Outputs', + pipeline_func=lightweight_with_outputs_pipeline, + pipeline_func_args={ + 'first_message': 'Hello', + 'second_message': ' World', + 'first_number': 10, + 'second_number': 20 + }, + expected_output=None, + ), + TestData( + name='Dict Input', + pipeline_func=dict_input, + pipeline_func_args={'struct': { + 'test_key': 'test_value' + }}, + expected_output=None, + ), + TestData( + name='Parameter Pipeline', + pipeline_func=parameter_pipeline, + pipeline_func_args=None, + expected_output=None, + ), + TestData( + name='Output Metrics', + pipeline_func=output_metrics, + pipeline_func_args=None, + expected_output=None, + ), + TestData( + name='Nested Return', + pipeline_func=nested_return, + pipeline_func_args=None, + expected_output=[{ + 'A_a': '1', + 'B_b': '2' + }, { + 'A_a': '10', + 'B_b': '20' + }], + ), + TestData( + name='NamedTuple Pipeline', + pipeline_func=namedtuple_pipeline, + pipeline_func_args=None, + expected_output=None, + ), +] + +docker_specific_pipeline_funcs = [ + TestData( + name='Producer Consumer Pipeline', + pipeline_func=producer_consumer_param_pipeline, + pipeline_func_args=None, + expected_output=None, + ), + TestData( + name='Container Hello World', + pipeline_func=echo, + pipeline_func_args=None, + expected_output=None, + ), + TestData( + name='Sequential Container Pipeline', + pipeline_func=sequential, + pipeline_func_args={ + 'param1': 'First', + 'param2': 'Second' + }, + expected_output=None, + ), + TestData( + name='Container No Args', + pipeline_func=container_no_input, + pipeline_func_args=None, + expected_output=None, + ), + TestData( + name='Container with Arguments', + pipeline_func=arguments_echo, + pipeline_func_args={ + 'param1': 'arg1', + 'param2': 'arg2' + }, + expected_output=None, + ), +] +docker_specific_pipeline_funcs.extend(pipeline_func_data) + + +@pytest.mark.regression +class TestDockerRunner: + + @pytest.fixture(scope="class", autouse=True) + def setup_and_teardown(self): + ws_root = f'{ws_root_base}_docker' + pipeline_root = f'{pipeline_root_base}_docker' + Path(ws_root).mkdir(exist_ok=True) + Path(pipeline_root).mkdir(exist_ok=True) + local.init( + runner=local.DockerRunner(), + raise_on_error=True, + workspace_root=ws_root, + pipeline_root=pipeline_root) + yield + try: + if os.path.isdir(ws_root): + print(f"Deleting WS Root {ws_root}") + shutil.rmtree(ws_root, ignore_errors=True) + if os.path.isdir(pipeline_root): + print(f"Deleting Pipeline Root {pipeline_root}") + shutil.rmtree(pipeline_root, ignore_errors=True) + except Exception as e: + print(f"Failed to delete directory because of {e}") + + @pytest.mark.parametrize( + 'test_data', docker_specific_pipeline_funcs, ids=idfn) + def test_execution(self, test_data: TestData): + if test_data.pipeline_func_args is not None: + pipeline_task = test_data.pipeline_func( + **test_data.pipeline_func_args) + else: + pipeline_task = test_data.pipeline_func() + if test_data.expected_output is None: + print("Skipping output check") + elif type(test_data.expected_output) == list: + assert pipeline_task.output in test_data.expected_output or pipeline_task.output == test_data.expected_output, "Output of the pipeline is not the same as expected" + else: + assert pipeline_task.output == test_data.expected_output, "Output of the pipeline is not the same as expected" + + +@pytest.mark.regression +class TestSubProcessRunner: + + @pytest.fixture(scope="class", autouse=True) + def setup_and_teardown(self): + ws_root = f'{ws_root_base}_subprocess' + pipeline_root = f'{pipeline_root_base}_subprocess' + Path(ws_root).mkdir(exist_ok=True) + Path(pipeline_root).mkdir(exist_ok=True) + local.init( + runner=local.SubprocessRunner(), + raise_on_error=True, + workspace_root=ws_root, + pipeline_root=pipeline_root) + yield + try: + if os.path.isdir(ws_root): + shutil.rmtree(ws_root, ignore_errors=True) + if os.path.isdir(pipeline_root): + shutil.rmtree(pipeline_root, ignore_errors=True) + except Exception as e: + print(f"Failed to delete directory because of {e}") + + @pytest.mark.parametrize('test_data', pipeline_func_data, ids=idfn) + def test_execution(self, test_data: TestData): + if test_data.pipeline_func_args is not None: + pipeline_task = test_data.pipeline_func( + **test_data.pipeline_func_args) + else: + pipeline_task = test_data.pipeline_func() + if test_data.expected_output is None: + print("Skipping output check") + elif type(test_data.expected_output) == list: + assert pipeline_task.output in test_data.expected_output or pipeline_task.output == test_data.expected_output, "Output of the pipeline is not the same as expected" + else: + assert pipeline_task.output == test_data.expected_output, "Output of the pipeline is not the same as expected" diff --git a/sdk/python/test/pytest.ini b/sdk/python/test/pytest.ini new file mode 100644 index 00000000000..ba32b4bb0a8 --- /dev/null +++ b/sdk/python/test/pytest.ini @@ -0,0 +1,7 @@ +# pytest.ini +[pytest] +markers = + sdk: marks tests as SDK tests + regression: marks tests as Regression Tests + client: marks the test as SDK Client Tests + compilation: marker for compilation tests \ No newline at end of file diff --git a/sdk/python/test/runtime/__init__.py b/sdk/python/test/runtime/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/runtime_tests/execute_commands_args_test.py b/sdk/python/test/runtime/execute_commands_args_test.py similarity index 82% rename from sdk/runtime_tests/execute_commands_args_test.py rename to sdk/python/test/runtime/execute_commands_args_test.py index f920ef971eb..8519a052bd4 100644 --- a/sdk/runtime_tests/execute_commands_args_test.py +++ b/sdk/python/test/runtime/execute_commands_args_test.py @@ -20,10 +20,11 @@ import tempfile from typing import Any, Dict -from absl.testing import parameterized +import kfp +import pytest import yaml -TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'test_data') +from ..test_utils.file_utils import FileUtils @dataclasses.dataclass @@ -36,7 +37,8 @@ class RuntimeTestConfig: TEST_CONFIGS = [ RuntimeTestConfig( pipeline_file_relpath=os.path.join( - TEST_DATA_DIR, 'pipeline_with_task_final_status.yaml'), + FileUtils.VALID_PIPELINE_FILES, + 'pipeline_with_task_final_status.yaml'), executor_name='exec-print-op', executor_input={ 'inputs': { @@ -57,7 +59,8 @@ class RuntimeTestConfig: ), RuntimeTestConfig( pipeline_file_relpath=os.path.join( - TEST_DATA_DIR, 'pipeline_with_task_final_status.yaml'), + FileUtils.VALID_PIPELINE_FILES, + 'pipeline_with_task_final_status.yaml'), executor_name='exec-exit-op', executor_input={ 'inputs': { @@ -115,11 +118,13 @@ def run_commands_and_args( # commands/args resemble the true runtime commands/args less well # prefer the less invasive approach of installing from a PR - kfp_package_path = 'sdk/python' - command_and_args = [ - re.sub(r"'kfp==(\d+).(\d+).(\d+)(-[a-z]+.\d+)?'", kfp_package_path, - cmd) for cmd in command_and_args - ] + overriden_kfp_package_path_commands = list() + for cmd in command_and_args: + if re.search("kfp==[0-9].[0-9]+.[0-9]+", cmd) is not None: + cmd = re.sub("kfp==[0-9].[0-9]+.[0-9]+", + f"kfp=={kfp.__version__}", cmd) + overriden_kfp_package_path_commands.append(cmd) + command_and_args = overriden_kfp_package_path_commands executor_input_json = json.dumps(config.executor_input).replace( '/gcs/', temp_dir) command_and_args = [ @@ -134,20 +139,19 @@ def run_commands_and_args( ) -class TestRuntime(parameterized.TestCase): - - @classmethod - def setUp(cls): - cls.temp_dir = tempfile.mkdtemp() +@pytest.mark.regression +class TestRuntime: - @classmethod - def tearDown(cls): - shutil.rmtree(cls.temp_dir) + @pytest.fixture(autouse=True) + def setup_and_teardown(self): + self.temp_dir = tempfile.mkdtemp() + yield + shutil.rmtree(self.temp_dir) - @parameterized.parameters(TEST_CONFIGS) + @pytest.mark.parametrize('config', TEST_CONFIGS) def test(self, config: RuntimeTestConfig): process = run_commands_and_args( config=config, temp_dir=self.temp_dir, ) - self.assertEqual(process.returncode, 0, process.stderr) + assert process.returncode == 0, f"Process failed with error={process.stderr}" diff --git a/sdk/python/test/test_utils/__init__.py b/sdk/python/test/test_utils/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/test/test_utils/comparison_utils.py b/sdk/python/test/test_utils/comparison_utils.py new file mode 100644 index 00000000000..6826f69f3ec --- /dev/null +++ b/sdk/python/test/test_utils/comparison_utils.py @@ -0,0 +1,66 @@ +# Copyright 2022 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import re + +from sdk.python import kfp + + +class ComparisonUtils: + + @classmethod + def compare_pipeline_spec_dicts(cls, actual: dict, expected: dict, + **kwargs): + """Compare two pipeline/platform specs :param actual: Pipeline Spec + that you want to compare :param expected: Pipeline Spec that is the + source of truth :param kwargs: options are: name (pipeline name), + display_name (pipeline display name), runtime_params (pipeline runtime + params)""" + if expected is None: + assert actual is None, "Actual is not None when its expected to be None" + else: + for key, value in expected.items(): + if type(value) == dict: + # Override Pipeline Name and Display Name if those were overridden during compilation + if key == 'pipelineInfo': + value['name'] = kwargs['name'] + + # Override Run Time Params in the expected object if runtime params were overridden when compiling pipeline + if 'runtime_params' in kwargs: + if kwargs['runtime_params'] is not None: + if key == 'root': + for param_key, param_value in value[ + 'inputDefinitions']['parameters'].items( + ): + if param_key in kwargs[ + 'runtime_params'].keys(): + value['inputDefinitions']['parameters'][ + param_key]['defaultValue'] = kwargs[ + 'runtime_params'][param_key] + + cls.compare_pipeline_spec_dicts(actual[key], value, + **kwargs) + else: + # Override SDK Version to match the current version + if key == 'sdkVersion': + value = f'kfp-{kfp.__version__}' + # Override SDK Version in the args as well to match the current version + if key == 'command': + for index, command in enumerate(value): + if re.search("kfp==[0-9].[0-9]+.[0-9]+", + command) is not None: + value[index] = re.sub( + "kfp==[0-9].[0-9]+.[0-9]+", + f"kfp=={kfp.__version__}", command) + assert value == actual[ + key], f'Value for "{key}" is not the same' diff --git a/sdk/python/test/test_utils/file_utils.py b/sdk/python/test/test_utils/file_utils.py new file mode 100644 index 00000000000..86ac04a4697 --- /dev/null +++ b/sdk/python/test/test_utils/file_utils.py @@ -0,0 +1,52 @@ +# Copyright 2022 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os + +import yaml + + +class FileUtils: + + PROJECT_ROOT = os.path.abspath( + os.path.join(__file__, *([os.path.pardir] * 5))) + TEST_DATA = os.path.join(PROJECT_ROOT, "test_data") + SDK_COMPILED_FILES = os.path.join(TEST_DATA, "sdk_compiled_pipelines") + VALID_PIPELINE_FILES = os.path.join(SDK_COMPILED_FILES, "valid") + ESSENTIAL_PIPELINE_FILES = os.path.join(VALID_PIPELINE_FILES, "essential") + CRITICAL_PIPELINE_FILES = os.path.join(VALID_PIPELINE_FILES, "critical") + COMPONENTS = os.path.join(TEST_DATA, "components") + + @classmethod + def read_yaml_file(cls, filepath) -> tuple: + """Read the pipeline spec file at the specific file path and parse it + into a dict and return a tuple of (pipeline_spec, platform_spec) :param + filepath: + + :return: + """ + + pipeline_specs: dict = None + platform_specs: dict = None + with open(filepath, 'r') as file: + try: + yaml_data = yaml.safe_load_all(file) + for data in yaml_data: + if 'pipelineInfo' in data.keys(): + pipeline_specs = data + else: + platform_specs = data + return pipeline_specs, platform_specs + except yaml.YAMLError as ex: + print(f'Error parsing YAML file: {ex}') + raise f'Could not load yaml file: {filepath} due to {ex}' diff --git a/sdk/python/test_data/README.md b/sdk/python/test_data/README.md deleted file mode 100644 index 35f0a24f5c0..00000000000 --- a/sdk/python/test_data/README.md +++ /dev/null @@ -1,26 +0,0 @@ -# KFP SDK Test Data - -Test data in this directory is used for two separate test suites. - -1. Read/write tests -> Location: `sdk/python/kfp/compiler/read_write_test.py ` - -Tests the compiler (write) and load (read) functionality of the SDK. This ensures that pipelines are written and read correctly and idempotently. - -These tests require golden snapshots of pipelines and components against with compiled pipelines are compared. To update pipeline golden snapshots: - -```bash -for f in sdk/python/test_data/pipelines/*.py ; do echo "$f" && python3 "$f" ; done -``` - - -To update component golden snapshots: -```bash -for f in sdk/python/test_data/components/*.py ; do echo "$f" && python3 "$f" ; done -``` - - -2. Pipeline execution tests -> Location: `test/sdk-execution-tests/sdk_execution_tests.py` - -These tests ensure that the KFP OSS BE can execute the pipelines. `execute`: may be `false` in the `test_data_config.yaml` for a given `test_case` if the test case (a) isn't a complete example (e.g., a dependency doesn't exist in the image, etc.) or (b) the KFP OSS BE cannot execute the pipeline. diff --git a/sdk/python/test_data/components/add_numbers.yaml b/sdk/python/test_data/components/add_numbers.yaml deleted file mode 100644 index 197646adec6..00000000000 --- a/sdk/python/test_data/components/add_numbers.yaml +++ /dev/null @@ -1,86 +0,0 @@ -# PIPELINE DEFINITION -# Name: add-numbers -# Inputs: -# a: int -# b: int -# Outputs: -# Output: int -components: - comp-add-numbers: - executorLabel: exec-add-numbers - inputDefinitions: - parameters: - a: - parameterType: NUMBER_INTEGER - b: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER -deploymentSpec: - executors: - exec-add-numbers: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add_numbers - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add_numbers(a: int, b: int) -> int:\n return a + b\n\n" - image: python:3.9 -pipelineInfo: - name: add-numbers -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: add-numbers - tasks: - add-numbers: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-numbers - inputs: - parameters: - a: - componentInputParameter: a - b: - componentInputParameter: b - taskInfo: - name: add-numbers - inputDefinitions: - parameters: - a: - parameterType: NUMBER_INTEGER - b: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/component_with_metadata_fields.yaml b/sdk/python/test_data/components/component_with_metadata_fields.yaml deleted file mode 100644 index 4547d34d722..00000000000 --- a/sdk/python/test_data/components/component_with_metadata_fields.yaml +++ /dev/null @@ -1,129 +0,0 @@ -# PIPELINE DEFINITION -# Name: dataset-joiner -# Description: Concatenate dataset_a and dataset_b. -# Also returns the concatenated string. -# Inputs: -# dataset_a: system.Dataset -# dataset_b: system.Dataset -# Outputs: -# Output: str -# out_dataset: system.Dataset -components: - comp-dataset-joiner: - executorLabel: exec-dataset-joiner - inputDefinitions: - artifacts: - dataset_a: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: First dataset. - dataset_b: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: Second dataset. - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: The concatenated dataset. - parameters: - Output: - description: The concatenated string. - parameterType: STRING -deploymentSpec: - executors: - exec-dataset-joiner: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - dataset_joiner - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef dataset_joiner(\n dataset_a: Input[Dataset],\n dataset_b:\ - \ Input[Dataset],\n out_dataset: Output[Dataset],\n) -> str:\n \"\"\ - \"Concatenate dataset_a and dataset_b.\n\n Also returns the concatenated\ - \ string.\n\n Args:\n dataset_a: First dataset.\n dataset_b:\ - \ Second dataset.\n\n Returns:\n out_dataset: The concatenated\ - \ dataset.\n Output: The concatenated string.\n \"\"\"\n with\ - \ open(dataset_a.path) as f:\n content_a = f.read()\n\n with open(dataset_b.path)\ - \ as f:\n content_b = f.read()\n\n concatenated_string = content_a\ - \ + content_b\n with open(out_dataset.path, 'w') as f:\n f.write(concatenated_string)\n\ - \n return concatenated_string\n\n" - image: python:3.9 -pipelineInfo: - name: dataset-joiner -root: - dag: - outputs: - artifacts: - out_dataset: - artifactSelectors: - - outputArtifactKey: out_dataset - producerSubtask: dataset-joiner - parameters: - Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: dataset-joiner - tasks: - dataset-joiner: - cachingOptions: - enableCache: true - componentRef: - name: comp-dataset-joiner - inputs: - artifacts: - dataset_a: - componentInputArtifact: dataset_a - dataset_b: - componentInputArtifact: dataset_b - taskInfo: - name: dataset-joiner - inputDefinitions: - artifacts: - dataset_a: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: First dataset. - dataset_b: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: Second dataset. - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: The concatenated dataset. - parameters: - Output: - description: The concatenated string. - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/component_with_pip_install.yaml b/sdk/python/test_data/components/component_with_pip_install.yaml deleted file mode 100644 index e6b0fe11513..00000000000 --- a/sdk/python/test_data/components/component_with_pip_install.yaml +++ /dev/null @@ -1,52 +0,0 @@ -# PIPELINE DEFINITION -# Name: component-with-pip-install -components: - comp-component-with-pip-install: - executorLabel: exec-component-with-pip-install -deploymentSpec: - executors: - exec-component-with-pip-install: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - component_with_pip_install - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple\ - \ --trusted-host https://pypi.org/simple 'kfp==2.7.0' '--no-deps' 'typing-extensions>=3.7.4,<5;\ - \ python_version<\"3.9\"' && python3 -m pip install --quiet --no-warn-script-location\ - \ --index-url https://pypi.org/simple --trusted-host https://pypi.org/simple\ - \ 'yapf' && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef component_with_pip_install():\n import yapf\n print(dir(yapf))\n\ - \n" - image: python:3.9 -pipelineInfo: - name: component-with-pip-install -root: - dag: - tasks: - component-with-pip-install: - cachingOptions: - enableCache: true - componentRef: - name: comp-component-with-pip-install - taskInfo: - name: component-with-pip-install -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/component_with_pip_install_in_venv.py b/sdk/python/test_data/components/component_with_pip_install_in_venv.py deleted file mode 100644 index c9f78fbde8f..00000000000 --- a/sdk/python/test_data/components/component_with_pip_install_in_venv.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2024 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from kfp.dsl import component - - -@component( - pip_index_urls=["https://pypi.org/simple"], - packages_to_install=["yapf"], - use_venv=True, -) -def component_with_pip_install(): - import yapf - - print(dir(yapf)) - - -if __name__ == "__main__": - from kfp import compiler - - compiler.Compiler().compile( - pipeline_func=component_with_pip_install, - package_path=__file__.replace(".py", ".yaml"), - ) diff --git a/sdk/python/test_data/components/component_with_pip_install_in_venv.yaml b/sdk/python/test_data/components/component_with_pip_install_in_venv.yaml deleted file mode 100644 index 135c9b469b2..00000000000 --- a/sdk/python/test_data/components/component_with_pip_install_in_venv.yaml +++ /dev/null @@ -1,55 +0,0 @@ -# PIPELINE DEFINITION -# Name: component-with-pip-install -components: - comp-component-with-pip-install: - executorLabel: exec-component-with-pip-install -deploymentSpec: - executors: - exec-component-with-pip-install: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - component_with_pip_install - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ \nexport PIP_DISABLE_PIP_VERSION_CHECK=1\ntmp=$(mktemp -d)\npython3 -m\ - \ venv \"$tmp/venv\" --system-site-packages\n. \"$tmp/venv/bin/activate\"\ - \n python3 -m pip install --quiet --no-warn-script-location --index-url\ - \ https://pypi.org/simple --trusted-host https://pypi.org/simple 'kfp==2.9.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location --index-url\ - \ https://pypi.org/simple --trusted-host https://pypi.org/simple 'yapf'\ - \ && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef component_with_pip_install():\n import yapf\n\n print(dir(yapf))\n\ - \n" - image: python:3.9 -pipelineInfo: - name: component-with-pip-install -root: - dag: - tasks: - component-with-pip-install: - cachingOptions: - enableCache: true - componentRef: - name: comp-component-with-pip-install - taskInfo: - name: component-with-pip-install -schemaVersion: 2.1.0 -sdkVersion: kfp-2.9.0 diff --git a/sdk/python/test_data/components/component_with_task_final_status.yaml b/sdk/python/test_data/components/component_with_task_final_status.yaml deleted file mode 100644 index 2b1fca875f4..00000000000 --- a/sdk/python/test_data/components/component_with_task_final_status.yaml +++ /dev/null @@ -1,66 +0,0 @@ -# PIPELINE DEFINITION -# Name: exit-comp -# Inputs: -# status: PipelineTaskFinalStatus -components: - comp-exit-comp: - executorLabel: exec-exit-comp - inputDefinitions: - parameters: - status: - isOptional: true - parameterType: TASK_FINAL_STATUS -deploymentSpec: - executors: - exec-exit-comp: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - exit_comp - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef exit_comp(status: dsl.PipelineTaskFinalStatus):\n print(status)\n\ - \n" - image: python:3.9 -pipelineInfo: - name: exit-comp -root: - dag: - tasks: - exit-comp: - cachingOptions: - enableCache: true - componentRef: - name: comp-exit-comp - inputs: - parameters: - status: - componentInputParameter: status - taskInfo: - name: exit-comp - inputDefinitions: - parameters: - status: - isOptional: true - parameterType: TASK_FINAL_STATUS -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/concat_message.yaml b/sdk/python/test_data/components/concat_message.yaml deleted file mode 100644 index 381a8f22f73..00000000000 --- a/sdk/python/test_data/components/concat_message.yaml +++ /dev/null @@ -1,87 +0,0 @@ -# PIPELINE DEFINITION -# Name: concat-message -# Inputs: -# message1: str -# message2: str -# Outputs: -# Output: str -components: - comp-concat-message: - executorLabel: exec-concat-message - inputDefinitions: - parameters: - message1: - parameterType: STRING - message2: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING -deploymentSpec: - executors: - exec-concat-message: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - concat_message - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef concat_message(message1: str, message2: str) -> str:\n return\ - \ message1 + message2\n\n" - image: python:3.9 -pipelineInfo: - name: concat-message -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: concat-message - tasks: - concat-message: - cachingOptions: - enableCache: true - componentRef: - name: comp-concat-message - inputs: - parameters: - message1: - componentInputParameter: message1 - message2: - componentInputParameter: message2 - taskInfo: - name: concat-message - inputDefinitions: - parameters: - message1: - parameterType: STRING - message2: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/container_io.yaml b/sdk/python/test_data/components/container_io.yaml deleted file mode 100644 index 0b7d13d8ac5..00000000000 --- a/sdk/python/test_data/components/container_io.yaml +++ /dev/null @@ -1,60 +0,0 @@ -# PIPELINE DEFINITION -# Name: container-io -# Inputs: -# text: str -# Outputs: -# output_path: str -components: - comp-container-io: - executorLabel: exec-container-io - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - output_path: - parameterType: STRING -deploymentSpec: - executors: - exec-container-io: - container: - args: - - --output_path - - '{{$.outputs.parameters[''output_path''].output_file}}' - command: - - my_program - - '{{$.inputs.parameters[''text'']}}' - image: python:3.9 -pipelineInfo: - name: container-io -root: - dag: - outputs: - parameters: - output_path: - valueFromParameter: - outputParameterKey: output_path - producerSubtask: container-io - tasks: - container-io: - cachingOptions: - enableCache: true - componentRef: - name: comp-container-io - inputs: - parameters: - text: - componentInputParameter: text - taskInfo: - name: container-io - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - output_path: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/container_no_input.yaml b/sdk/python/test_data/components/container_no_input.yaml deleted file mode 100644 index 23c30e59a42..00000000000 --- a/sdk/python/test_data/components/container_no_input.yaml +++ /dev/null @@ -1,27 +0,0 @@ -# PIPELINE DEFINITION -# Name: container-no-input -components: - comp-container-no-input: - executorLabel: exec-container-no-input -deploymentSpec: - executors: - exec-container-no-input: - container: - command: - - echo - - hello world - image: python:3.9 -pipelineInfo: - name: container-no-input -root: - dag: - tasks: - container-no-input: - cachingOptions: - enableCache: true - componentRef: - name: comp-container-no-input - taskInfo: - name: container-no-input -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/container_with_artifact_output.yaml b/sdk/python/test_data/components/container_with_artifact_output.yaml deleted file mode 100644 index 644c3285755..00000000000 --- a/sdk/python/test_data/components/container_with_artifact_output.yaml +++ /dev/null @@ -1,82 +0,0 @@ -# PIPELINE DEFINITION -# Name: container-with-artifact-output -# Inputs: -# num_epochs: int -# Outputs: -# model: system.Model -# model_config_path: str -components: - comp-container-with-artifact-output: - executorLabel: exec-container-with-artifact-output - inputDefinitions: - parameters: - num_epochs: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - model: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 - parameters: - model_config_path: - parameterType: STRING -deploymentSpec: - executors: - exec-container-with-artifact-output: - container: - args: - - --epochs - - '{{$.inputs.parameters[''num_epochs'']}}' - - --model_path - - '{{$.outputs.artifacts[''model''].uri}}' - - --model_metadata - - '{{$.outputs.artifacts[''model''].metadata}}' - - --model_config_path - - '{{$.outputs.parameters[''model_config_path''].output_file}}' - command: - - sh - - run.sh - image: gcr.io/my-image -pipelineInfo: - name: container-with-artifact-output -root: - dag: - outputs: - artifacts: - model: - artifactSelectors: - - outputArtifactKey: model - producerSubtask: container-with-artifact-output - parameters: - model_config_path: - valueFromParameter: - outputParameterKey: model_config_path - producerSubtask: container-with-artifact-output - tasks: - container-with-artifact-output: - cachingOptions: - enableCache: true - componentRef: - name: comp-container-with-artifact-output - inputs: - parameters: - num_epochs: - componentInputParameter: num_epochs - taskInfo: - name: container-with-artifact-output - inputDefinitions: - parameters: - num_epochs: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - model: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 - parameters: - model_config_path: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/container_with_concat_placeholder.yaml b/sdk/python/test_data/components/container_with_concat_placeholder.yaml deleted file mode 100644 index f0f02544f25..00000000000 --- a/sdk/python/test_data/components/container_with_concat_placeholder.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# PIPELINE DEFINITION -# Name: container-with-concat-placeholder -# Inputs: -# text1: str -# Outputs: -# output_path: str -# text2: system.Dataset -components: - comp-container-with-concat-placeholder: - executorLabel: exec-container-with-concat-placeholder - inputDefinitions: - parameters: - text1: - parameterType: STRING - outputDefinitions: - artifacts: - text2: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - output_path: - parameterType: STRING -deploymentSpec: - executors: - exec-container-with-concat-placeholder: - container: - args: - - --output_path - - '{{$.outputs.parameters[''output_path''].output_file}}' - command: - - my_program - - '{"Concat": ["prefix-", "{{$.inputs.parameters[''text1'']}}", "{{$.outputs.artifacts[''text2''].uri}}"]}' - image: python:3.9 -pipelineInfo: - name: container-with-concat-placeholder -root: - dag: - outputs: - artifacts: - text2: - artifactSelectors: - - outputArtifactKey: text2 - producerSubtask: container-with-concat-placeholder - parameters: - output_path: - valueFromParameter: - outputParameterKey: output_path - producerSubtask: container-with-concat-placeholder - tasks: - container-with-concat-placeholder: - cachingOptions: - enableCache: true - componentRef: - name: comp-container-with-concat-placeholder - inputs: - parameters: - text1: - componentInputParameter: text1 - taskInfo: - name: container-with-concat-placeholder - inputDefinitions: - parameters: - text1: - parameterType: STRING - outputDefinitions: - artifacts: - text2: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - output_path: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/container_with_if_placeholder.yaml b/sdk/python/test_data/components/container_with_if_placeholder.yaml deleted file mode 100644 index 65aec216d69..00000000000 --- a/sdk/python/test_data/components/container_with_if_placeholder.yaml +++ /dev/null @@ -1,84 +0,0 @@ -# PIPELINE DEFINITION -# Name: container-with-if-placeholder -# Inputs: -# optional_input: str [Default: 'default'] -# Outputs: -# dataset: system.Dataset -# output_path: str -components: - comp-container-with-if-placeholder: - executorLabel: exec-container-with-if-placeholder - inputDefinitions: - parameters: - optional_input: - defaultValue: default - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - output_path: - parameterType: STRING -deploymentSpec: - executors: - exec-container-with-if-placeholder: - container: - args: - - --output_path - - '{{$.outputs.parameters[''output_path''].output_file}}' - command: - - my_program - - '{"IfPresent": {"InputName": "optional_input", "Then": ["{{$.inputs.parameters[''optional_input'']}}"], - "Else": ["bye"]}}' - - --dataset - - '{"IfPresent": {"InputName": "optional_input", "Then": ["{{$.outputs.artifacts[''dataset''].uri}}"], - "Else": ["bye"]}}' - image: python:3.9 -pipelineInfo: - name: container-with-if-placeholder -root: - dag: - outputs: - artifacts: - dataset: - artifactSelectors: - - outputArtifactKey: dataset - producerSubtask: container-with-if-placeholder - parameters: - output_path: - valueFromParameter: - outputParameterKey: output_path - producerSubtask: container-with-if-placeholder - tasks: - container-with-if-placeholder: - cachingOptions: - enableCache: true - componentRef: - name: comp-container-with-if-placeholder - inputs: - parameters: - optional_input: - componentInputParameter: optional_input - taskInfo: - name: container-with-if-placeholder - inputDefinitions: - parameters: - optional_input: - defaultValue: default - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - output_path: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/container_with_placeholder_in_fstring.yaml b/sdk/python/test_data/components/container_with_placeholder_in_fstring.yaml deleted file mode 100644 index 03949eafb50..00000000000 --- a/sdk/python/test_data/components/container_with_placeholder_in_fstring.yaml +++ /dev/null @@ -1,66 +0,0 @@ -# PIPELINE DEFINITION -# Name: container-with-placeholder-in-fstring -# Inputs: -# text1: str [Default: 'text!'] -# Outputs: -# output_artifact: system.Artifact -components: - comp-container-with-placeholder-in-fstring: - executorLabel: exec-container-with-placeholder-in-fstring - inputDefinitions: - parameters: - text1: - defaultValue: text! - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - output_artifact: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-container-with-placeholder-in-fstring: - container: - command: - - my_program - - prefix-{{$.inputs.parameters['text1']}} - - '{{$.outputs.artifacts[''output_artifact''].uri}}/0' - image: python:3.9 -pipelineInfo: - name: container-with-placeholder-in-fstring -root: - dag: - outputs: - artifacts: - output_artifact: - artifactSelectors: - - outputArtifactKey: output_artifact - producerSubtask: container-with-placeholder-in-fstring - tasks: - container-with-placeholder-in-fstring: - cachingOptions: - enableCache: true - componentRef: - name: comp-container-with-placeholder-in-fstring - inputs: - parameters: - text1: - componentInputParameter: text1 - taskInfo: - name: container-with-placeholder-in-fstring - inputDefinitions: - parameters: - text1: - defaultValue: text! - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - output_artifact: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/containerized_python_component.yaml b/sdk/python/test_data/components/containerized_python_component.yaml deleted file mode 100644 index e9c8d94e828..00000000000 --- a/sdk/python/test_data/components/containerized_python_component.yaml +++ /dev/null @@ -1,70 +0,0 @@ -# PIPELINE DEFINITION -# Name: concat-message -# Inputs: -# message1: str -# message2: str -# Outputs: -# Output: str -components: - comp-concat-message: - executorLabel: exec-concat-message - inputDefinitions: - parameters: - message1: - parameterType: STRING - message2: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING -deploymentSpec: - executors: - exec-concat-message: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - concat_message - command: - - python3 - - -m - - kfp.dsl.executor_main - image: kfp-image -pipelineInfo: - name: concat-message -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: concat-message - tasks: - concat-message: - cachingOptions: - enableCache: true - componentRef: - name: comp-concat-message - inputs: - parameters: - message1: - componentInputParameter: message1 - message2: - componentInputParameter: message2 - taskInfo: - name: concat-message - inputDefinitions: - parameters: - message1: - parameterType: STRING - message2: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/dict_input.yaml b/sdk/python/test_data/components/dict_input.yaml deleted file mode 100644 index 4bcf5e61d53..00000000000 --- a/sdk/python/test_data/components/dict_input.yaml +++ /dev/null @@ -1,63 +0,0 @@ -# PIPELINE DEFINITION -# Name: dict-input -# Inputs: -# struct: dict -components: - comp-dict-input: - executorLabel: exec-dict-input - inputDefinitions: - parameters: - struct: - parameterType: STRUCT -deploymentSpec: - executors: - exec-dict-input: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - dict_input - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef dict_input(struct: Dict):\n print(struct)\n\n" - image: python:3.9 -pipelineInfo: - name: dict-input -root: - dag: - tasks: - dict-input: - cachingOptions: - enableCache: true - componentRef: - name: comp-dict-input - inputs: - parameters: - struct: - componentInputParameter: struct - taskInfo: - name: dict-input - inputDefinitions: - parameters: - struct: - parameterType: STRUCT -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/identity.yaml b/sdk/python/test_data/components/identity.yaml deleted file mode 100644 index 377911ccbcd..00000000000 --- a/sdk/python/test_data/components/identity.yaml +++ /dev/null @@ -1,79 +0,0 @@ -# PIPELINE DEFINITION -# Name: identity -# Inputs: -# value: str -# Outputs: -# Output: str -components: - comp-identity: - executorLabel: exec-identity - inputDefinitions: - parameters: - value: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING -deploymentSpec: - executors: - exec-identity: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - identity - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef identity(value: str) -> str:\n return value\n\n" - image: python:3.9 -pipelineInfo: - name: identity -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: identity - tasks: - identity: - cachingOptions: - enableCache: true - componentRef: - name: comp-identity - inputs: - parameters: - value: - componentInputParameter: value - taskInfo: - name: identity - inputDefinitions: - parameters: - value: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/input_artifact.yaml b/sdk/python/test_data/components/input_artifact.yaml deleted file mode 100644 index 71c983fe36d..00000000000 --- a/sdk/python/test_data/components/input_artifact.yaml +++ /dev/null @@ -1,68 +0,0 @@ -# PIPELINE DEFINITION -# Name: input-artifact -# Inputs: -# data: system.Dataset -components: - comp-input-artifact: - executorLabel: exec-input-artifact - inputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-input-artifact: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - input_artifact - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef input_artifact(data: Input[Dataset]):\n print(data.name)\n\ - \ print(data.uri)\n print(data.metadata)\n\n" - image: python:3.9 -pipelineInfo: - name: input-artifact -root: - dag: - tasks: - input-artifact: - cachingOptions: - enableCache: true - componentRef: - name: comp-input-artifact - inputs: - artifacts: - data: - componentInputArtifact: data - taskInfo: - name: input-artifact - inputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/nested_return.yaml b/sdk/python/test_data/components/nested_return.yaml deleted file mode 100644 index f8d2fd169cc..00000000000 --- a/sdk/python/test_data/components/nested_return.yaml +++ /dev/null @@ -1,66 +0,0 @@ -# PIPELINE DEFINITION -# Name: nested-return -# Outputs: -# Output: list -components: - comp-nested-return: - executorLabel: exec-nested-return - outputDefinitions: - parameters: - Output: - parameterType: LIST -deploymentSpec: - executors: - exec-nested-return: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - nested_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef nested_return() -> List[Dict[str, str]]:\n return [{'A_a':\ - \ '1', 'B_b': '2'}, {'A_a': '10', 'B_b': '20'}]\n\n" - image: python:3.9 -pipelineInfo: - name: nested-return -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: nested-return - tasks: - nested-return: - cachingOptions: - enableCache: true - componentRef: - name: comp-nested-return - taskInfo: - name: nested-return - outputDefinitions: - parameters: - Output: - parameterType: LIST -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/preprocess.yaml b/sdk/python/test_data/components/preprocess.yaml deleted file mode 100644 index 40315487468..00000000000 --- a/sdk/python/test_data/components/preprocess.yaml +++ /dev/null @@ -1,176 +0,0 @@ -# PIPELINE DEFINITION -# Name: preprocess -# Description: Dummy preprocessing step. -# Inputs: -# input_dict_parameter: dict -# input_list_parameter: list -# message: str -# Outputs: -# output_bool_parameter_path: bool -# output_dataset_one: system.Dataset -# output_dataset_two_path: system.Dataset -# output_dict_parameter_path: dict -# output_list_parameter_path: list -# output_parameter_path: str -components: - comp-preprocess: - executorLabel: exec-preprocess - inputDefinitions: - parameters: - input_dict_parameter: - parameterType: STRUCT - input_list_parameter: - parameterType: LIST - message: - parameterType: STRING - outputDefinitions: - artifacts: - output_dataset_one: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - output_dataset_two_path: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - output_bool_parameter_path: - parameterType: BOOLEAN - output_dict_parameter_path: - parameterType: STRUCT - output_list_parameter_path: - parameterType: LIST - output_parameter_path: - parameterType: STRING -deploymentSpec: - executors: - exec-preprocess: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - preprocess - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef preprocess(\n # An input parameter of type string.\n message:\ - \ str,\n # An input parameter of type dict.\n input_dict_parameter:\ - \ Dict[str, int],\n # An input parameter of type list.\n input_list_parameter:\ - \ List[str],\n # Use Output[T] to get a metadata-rich handle to the output\ - \ artifact\n # of type `Dataset`.\n output_dataset_one: Output[Dataset],\n\ - \ # A locally accessible filepath for another output artifact of type\n\ - \ # `Dataset`.\n output_dataset_two_path: OutputPath('Dataset'),\n\ - \ # A locally accessible filepath for an output parameter of type string.\n\ - \ output_parameter_path: OutputPath(str),\n # A locally accessible\ - \ filepath for an output parameter of type bool.\n output_bool_parameter_path:\ - \ OutputPath(bool),\n # A locally accessible filepath for an output parameter\ - \ of type dict.\n output_dict_parameter_path: OutputPath(Dict[str, int]),\n\ - \ # A locally accessible filepath for an output parameter of type list.\n\ - \ output_list_parameter_path: OutputPath(List[str]),\n):\n \"\"\"\ - Dummy preprocessing step.\"\"\"\n\n # Use Dataset.path to access a local\ - \ file path for writing.\n # One can also use Dataset.uri to access the\ - \ actual URI file path.\n with open(output_dataset_one.path, 'w') as\ - \ f:\n f.write(message)\n\n # OutputPath is used to just pass\ - \ the local file path of the output artifact\n # to the function.\n \ - \ with open(output_dataset_two_path, 'w') as f:\n f.write(message)\n\ - \n with open(output_parameter_path, 'w') as f:\n f.write(message)\n\ - \n with open(output_bool_parameter_path, 'w') as f:\n f.write(\n\ - \ str(True)) # use either `str()` or `json.dumps()` for bool\ - \ values.\n\n import json\n with open(output_dict_parameter_path,\ - \ 'w') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with\ - \ open(output_list_parameter_path, 'w') as f:\n f.write(json.dumps(input_list_parameter))\n\ - \n" - image: python:3.9 -pipelineInfo: - name: preprocess -root: - dag: - outputs: - artifacts: - output_dataset_one: - artifactSelectors: - - outputArtifactKey: output_dataset_one - producerSubtask: preprocess - output_dataset_two_path: - artifactSelectors: - - outputArtifactKey: output_dataset_two_path - producerSubtask: preprocess - parameters: - output_bool_parameter_path: - valueFromParameter: - outputParameterKey: output_bool_parameter_path - producerSubtask: preprocess - output_dict_parameter_path: - valueFromParameter: - outputParameterKey: output_dict_parameter_path - producerSubtask: preprocess - output_list_parameter_path: - valueFromParameter: - outputParameterKey: output_list_parameter_path - producerSubtask: preprocess - output_parameter_path: - valueFromParameter: - outputParameterKey: output_parameter_path - producerSubtask: preprocess - tasks: - preprocess: - cachingOptions: - enableCache: true - componentRef: - name: comp-preprocess - inputs: - parameters: - input_dict_parameter: - componentInputParameter: input_dict_parameter - input_list_parameter: - componentInputParameter: input_list_parameter - message: - componentInputParameter: message - taskInfo: - name: preprocess - inputDefinitions: - parameters: - input_dict_parameter: - parameterType: STRUCT - input_list_parameter: - parameterType: LIST - message: - parameterType: STRING - outputDefinitions: - artifacts: - output_dataset_one: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - output_dataset_two_path: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - output_bool_parameter_path: - parameterType: BOOLEAN - output_dict_parameter_path: - parameterType: STRUCT - output_list_parameter_path: - parameterType: LIST - output_parameter_path: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/components/unsupported/output_named_tuple.py b/sdk/python/test_data/components/unsupported/output_named_tuple.py deleted file mode 100644 index 279f659f9d8..00000000000 --- a/sdk/python/test_data/components/unsupported/output_named_tuple.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import NamedTuple - -from kfp.dsl import component -from kfp.dsl import Metrics - - -@component -def output_named_tuple( - dummy: str) -> NamedTuple('Outputs', [ - ('scalar', str), - ('metrics', Metrics), - ]): - scalar = '123' - - import json - metrics = json.dumps({ - 'metrics': [{ - 'name': 'accuracy', - 'numberValue': 0.9, - 'format': 'PERCENTAGE', - }] - }) - - from collections import namedtuple - output = namedtuple('Outputs', ['scalar', 'metrics']) - return output(scalar, metrics) diff --git a/sdk/python/test_data/components/unsupported/task_status.py b/sdk/python/test_data/components/unsupported/task_status.py deleted file mode 100644 index 908e77b6ee5..00000000000 --- a/sdk/python/test_data/components/unsupported/task_status.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp.dsl import component -from kfp.dsl import PipelineTaskFinalStatus - - -@component -def task_status(user_input: str, status: PipelineTaskFinalStatus): - """Checks pipeline run status.""" - print('Pipeline status: ', status.state) - print('Job resource name: ', status.pipeline_job_resource_name) - print('Pipeline task name: ', status.pipeline_task_name) - print('Error code: ', status.error_code) - print('Error message: ', status.error_message) diff --git a/sdk/python/test_data/pipelines/component_with_optional_inputs.yaml b/sdk/python/test_data/pipelines/component_with_optional_inputs.yaml deleted file mode 100644 index b798786a856..00000000000 --- a/sdk/python/test_data/pipelines/component_with_optional_inputs.yaml +++ /dev/null @@ -1,73 +0,0 @@ -# PIPELINE DEFINITION -# Name: v2-component-optional-input -components: - comp-component-op: - executorLabel: exec-component-op - inputDefinitions: - parameters: - input1: - defaultValue: default value - isOptional: true - parameterType: STRING - input2: - isOptional: true - parameterType: STRING - input3: - isOptional: true - parameterType: STRING -deploymentSpec: - executors: - exec-component-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - component_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef component_op(\n input1: str = 'default value',\n input2:\ - \ Optional[str] = None,\n input3: Optional[str] = None,\n):\n print(f'input1:\ - \ {input1}, type: {type(input1)}')\n print(f'input2: {input2}, type:\ - \ {type(input2)}')\n print(f'input3: {input3}, type: {type(input3)}')\n\ - \n" - image: python:3.9 -pipelineInfo: - name: v2-component-optional-input -root: - dag: - tasks: - component-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-component-op - inputs: - parameters: - input1: - runtimeValue: - constant: Hello - input2: - runtimeValue: - constant: World - taskInfo: - name: component-op -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/component_with_pip_index_urls.yaml b/sdk/python/test_data/pipelines/component_with_pip_index_urls.yaml deleted file mode 100644 index 0358182f92b..00000000000 --- a/sdk/python/test_data/pipelines/component_with_pip_index_urls.yaml +++ /dev/null @@ -1,51 +0,0 @@ -# PIPELINE DEFINITION -# Name: v2-component-pip-index-urls -components: - comp-component-op: - executorLabel: exec-component-op -deploymentSpec: - executors: - exec-component-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - component_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple\ - \ --trusted-host https://pypi.org/simple 'kfp==2.7.0' '--no-deps' 'typing-extensions>=3.7.4,<5;\ - \ python_version<\"3.9\"' && python3 -m pip install --quiet --no-warn-script-location\ - \ --index-url https://pypi.org/simple --trusted-host https://pypi.org/simple\ - \ 'yapf' && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef component_op():\n import yapf\n print(dir(yapf))\n\n" - image: python:3.9 -pipelineInfo: - name: v2-component-pip-index-urls -root: - dag: - tasks: - component-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-component-op - taskInfo: - name: component-op -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/components_with_optional_artifacts.yaml b/sdk/python/test_data/pipelines/components_with_optional_artifacts.yaml deleted file mode 100644 index 457902a8a51..00000000000 --- a/sdk/python/test_data/pipelines/components_with_optional_artifacts.yaml +++ /dev/null @@ -1,244 +0,0 @@ -# PIPELINE DEFINITION -# Name: optional-artifact-pipeline -# Inputs: -# dataset1: system.Dataset -components: - comp-custom-artifact-printer: - executorLabel: exec-custom-artifact-printer - inputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - isOptional: true - comp-custom-artifact-printer-2: - executorLabel: exec-custom-artifact-printer-2 - inputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - isOptional: true - comp-importer: - executorLabel: exec-importer - inputDefinitions: - parameters: - uri: - parameterType: STRING - outputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-inner-pipeline: - dag: - tasks: - python-artifact-printer: - cachingOptions: - enableCache: true - componentRef: - name: comp-python-artifact-printer - inputs: - artifacts: - artifact: - componentInputArtifact: dataset - taskInfo: - name: python-artifact-printer - inputDefinitions: - artifacts: - dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isOptional: true - comp-inner-pipeline-2: - dag: - tasks: - python-artifact-printer: - cachingOptions: - enableCache: true - componentRef: - name: comp-python-artifact-printer-2 - inputs: - artifacts: - artifact: - componentInputArtifact: dataset - taskInfo: - name: python-artifact-printer - inputDefinitions: - artifacts: - dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isOptional: true - comp-python-artifact-printer: - executorLabel: exec-python-artifact-printer - inputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - isOptional: true - comp-python-artifact-printer-2: - executorLabel: exec-python-artifact-printer-2 - inputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - isOptional: true -deploymentSpec: - executors: - exec-custom-artifact-printer: - container: - command: - - '{"IfPresent": {"InputName": "artifact", "Then": ["echo", "{{$.inputs.artifacts[''artifact''].uri}}"], - "Else": ["echo", "No artifact provided!"]}}' - image: alpine - exec-custom-artifact-printer-2: - container: - command: - - '{"IfPresent": {"InputName": "artifact", "Then": ["echo", "{{$.inputs.artifacts[''artifact''].uri}}"], - "Else": ["echo", "No artifact provided!"]}}' - image: alpine - exec-importer: - importer: - artifactUri: - constant: gs://ml-pipeline-playground/shakespeare1.txt - typeSchema: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - exec-python-artifact-printer: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - python_artifact_printer - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef python_artifact_printer(artifact: Optional[Input[Artifact]] =\ - \ None):\n if artifact is not None:\n print(artifact.name)\n \ - \ print(artifact.uri)\n print(artifact.metadata)\n else:\n\ - \ print('No artifact provided!')\n\n" - image: python:3.9 - exec-python-artifact-printer-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - python_artifact_printer - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef python_artifact_printer(artifact: Optional[Input[Artifact]] =\ - \ None):\n if artifact is not None:\n print(artifact.name)\n \ - \ print(artifact.uri)\n print(artifact.metadata)\n else:\n\ - \ print('No artifact provided!')\n\n" - image: python:3.9 -pipelineInfo: - name: optional-artifact-pipeline -root: - dag: - tasks: - custom-artifact-printer: - cachingOptions: - enableCache: true - componentRef: - name: comp-custom-artifact-printer - inputs: - artifacts: - artifact: - componentInputArtifact: dataset1 - taskInfo: - name: custom-artifact-printer - custom-artifact-printer-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-custom-artifact-printer-2 - taskInfo: - name: custom-artifact-printer-2 - importer: - cachingOptions: - enableCache: true - componentRef: - name: comp-importer - inputs: - parameters: - uri: - runtimeValue: - constant: gs://ml-pipeline-playground/shakespeare1.txt - taskInfo: - name: importer - inner-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-inner-pipeline - dependentTasks: - - importer - inputs: - artifacts: - dataset: - taskOutputArtifact: - outputArtifactKey: artifact - producerTask: importer - taskInfo: - name: inner-pipeline - inner-pipeline-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-inner-pipeline-2 - taskInfo: - name: inner-pipeline-2 - inputDefinitions: - artifacts: - dataset1: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isOptional: true -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/container_component_with_no_inputs.yaml b/sdk/python/test_data/pipelines/container_component_with_no_inputs.yaml deleted file mode 100644 index fba2c46ee03..00000000000 --- a/sdk/python/test_data/pipelines/container_component_with_no_inputs.yaml +++ /dev/null @@ -1,27 +0,0 @@ -# PIPELINE DEFINITION -# Name: v2-container-component-no-input -components: - comp-hello-world-container: - executorLabel: exec-hello-world-container -deploymentSpec: - executors: - exec-hello-world-container: - container: - command: - - echo - - hello world - image: python:3.9 -pipelineInfo: - name: v2-container-component-no-input -root: - dag: - tasks: - hello-world-container: - cachingOptions: - enableCache: true - componentRef: - name: comp-hello-world-container - taskInfo: - name: hello-world-container -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/cross_loop_after_topology.yaml b/sdk/python/test_data/pipelines/cross_loop_after_topology.yaml deleted file mode 100644 index 87d19b30d62..00000000000 --- a/sdk/python/test_data/pipelines/cross_loop_after_topology.yaml +++ /dev/null @@ -1,500 +0,0 @@ -# PIPELINE DEFINITION -# Name: my-pipeline -components: - comp-for-loop-10: - dag: - tasks: - print-op-5: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-5 - inputs: - parameters: - message: - runtimeValue: - constant: five - taskInfo: - name: print-op-5 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-9: - parameterType: NUMBER_INTEGER - comp-for-loop-12: - dag: - tasks: - for-loop-14: - componentRef: - name: comp-for-loop-14 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-13 - items: - raw: '[1, 2]' - taskInfo: - name: for-loop-14 - print-op-8: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-8 - dependentTasks: - - for-loop-14 - inputs: - parameters: - message: - runtimeValue: - constant: eight - taskInfo: - name: print-op-8 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-11: - parameterType: NUMBER_INTEGER - comp-for-loop-14: - dag: - tasks: - print-op-7: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-7 - inputs: - parameters: - message: - runtimeValue: - constant: seven - taskInfo: - name: print-op-7 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-13: - parameterType: NUMBER_INTEGER - comp-for-loop-2: - dag: - tasks: - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - inputs: - parameters: - message: - runtimeValue: - constant: one - taskInfo: - name: print-op - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: NUMBER_INTEGER - comp-for-loop-4: - dag: - tasks: - print-op-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-2 - inputs: - parameters: - message: - runtimeValue: - constant: two - taskInfo: - name: print-op-2 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-3: - parameterType: NUMBER_INTEGER - comp-for-loop-6: - dag: - tasks: - for-loop-8: - componentRef: - name: comp-for-loop-8 - dependentTasks: - - print-op-3 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-7 - items: - raw: '[1, 2]' - taskInfo: - name: for-loop-8 - print-op-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-3 - inputs: - parameters: - message: - runtimeValue: - constant: three - taskInfo: - name: print-op-3 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-5: - parameterType: NUMBER_INTEGER - comp-for-loop-8: - dag: - tasks: - print-op-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-4 - inputs: - parameters: - message: - runtimeValue: - constant: four - taskInfo: - name: print-op-4 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-7: - parameterType: NUMBER_INTEGER - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-2: - executorLabel: exec-print-op-2 - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-3: - executorLabel: exec-print-op-3 - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-4: - executorLabel: exec-print-op-4 - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-5: - executorLabel: exec-print-op-5 - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-6: - executorLabel: exec-print-op-6 - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-7: - executorLabel: exec-print-op-7 - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-8: - executorLabel: exec-print-op-8 - inputDefinitions: - parameters: - message: - parameterType: STRING -deploymentSpec: - executors: - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 - exec-print-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 - exec-print-op-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 - exec-print-op-4: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 - exec-print-op-5: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 - exec-print-op-6: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 - exec-print-op-7: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 - exec-print-op-8: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 -pipelineInfo: - name: my-pipeline -root: - dag: - tasks: - for-loop-10: - componentRef: - name: comp-for-loop-10 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-9 - items: - raw: '[1, 2]' - taskInfo: - name: for-loop-10 - for-loop-12: - componentRef: - name: comp-for-loop-12 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-11 - items: - raw: '[1, 2]' - taskInfo: - name: for-loop-12 - for-loop-2: - componentRef: - name: comp-for-loop-2 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '[1, 2]' - taskInfo: - name: for-loop-2 - for-loop-4: - componentRef: - name: comp-for-loop-4 - dependentTasks: - - for-loop-2 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-3 - items: - raw: '[1, 2]' - taskInfo: - name: for-loop-4 - for-loop-6: - componentRef: - name: comp-for-loop-6 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-5 - items: - raw: '[1, 2]' - taskInfo: - name: for-loop-6 - print-op-6: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-6 - dependentTasks: - - for-loop-10 - inputs: - parameters: - message: - runtimeValue: - constant: six - taskInfo: - name: print-op-6 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/if_elif_else_complex.yaml b/sdk/python/test_data/pipelines/if_elif_else_complex.yaml deleted file mode 100644 index df2ff1c76ca..00000000000 --- a/sdk/python/test_data/pipelines/if_elif_else_complex.yaml +++ /dev/null @@ -1,1118 +0,0 @@ -# PIPELINE DEFINITION -# Name: lucky-number-pipeline -# Inputs: -# add_drumroll: bool [Default: True] -# repeat_if_lucky_number: bool [Default: True] -# trials: list [Default: [1.0, 2.0, 3.0]] -components: - comp-condition-11: - dag: - outputs: - parameters: - pipelinechannel--print-and-return-5-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-and-return-5 - tasks: - print-and-return-5: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-5 - inputs: - parameters: - text: - runtimeValue: - constant: Got a high even number! - taskInfo: - name: print-and-return-5 - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--is-even-or-odd-2-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--print-and-return-5-Output: - parameterType: STRING - comp-condition-12: - dag: - outputs: - parameters: - pipelinechannel--print-and-return-6-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-and-return-6 - tasks: - print-and-return-6: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-6 - inputs: - parameters: - text: - runtimeValue: - constant: Got a high odd number! - taskInfo: - name: print-and-return-6 - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--is-even-or-odd-2-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--print-and-return-6-Output: - parameterType: STRING - comp-condition-13: - dag: - tasks: - condition-14: - componentRef: - name: comp-condition-14 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - pipelinechannel--repeat_if_lucky_number: - componentInputParameter: pipelinechannel--repeat_if_lucky_number - taskInfo: - name: condition-14 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--repeat_if_lucky_number'] - == true - print-and-return-8: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-8 - inputs: - parameters: - text: - runtimeValue: - constant: 'Announcing: Got the lucky number 5000! A one in 10,000 - chance.' - taskInfo: - name: print-and-return-8 - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--repeat_if_lucky_number: - parameterType: BOOLEAN - comp-condition-14: - dag: - tasks: - for-loop-16: - componentRef: - name: comp-for-loop-16 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - pipelinechannel--repeat_if_lucky_number: - componentInputParameter: pipelinechannel--repeat_if_lucky_number - parameterIterator: - itemInput: pipelinechannel--loop-item-param-15 - items: - raw: '[1, 2]' - taskInfo: - name: for-loop-16 - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--repeat_if_lucky_number: - parameterType: BOOLEAN - comp-condition-2: - dag: - tasks: - condition-3: - componentRef: - name: comp-condition-3 - inputs: - parameters: - pipelinechannel--add_drumroll: - componentInputParameter: pipelinechannel--add_drumroll - pipelinechannel--trials-loop-item: - componentInputParameter: pipelinechannel--trials-loop-item - taskInfo: - name: condition-3 - triggerPolicy: - condition: int(inputs.parameter_values['pipelinechannel--trials-loop-item']) - == 3 - inputDefinitions: - parameters: - pipelinechannel--add_drumroll: - parameterType: BOOLEAN - pipelinechannel--trials-loop-item: - parameterType: NUMBER_INTEGER - comp-condition-3: - dag: - tasks: - print-and-return: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return - inputs: - parameters: - text: - runtimeValue: - constant: Adding drumroll on last trial! - taskInfo: - name: print-and-return - inputDefinitions: - parameters: - pipelinechannel--add_drumroll: - parameterType: BOOLEAN - pipelinechannel--trials-loop-item: - parameterType: NUMBER_INTEGER - comp-condition-6: - dag: - outputs: - parameters: - pipelinechannel--print-and-return-2-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-and-return-2 - tasks: - print-and-return-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-2 - inputs: - parameters: - text: - runtimeValue: - constant: Got a low even number! - taskInfo: - name: print-and-return-2 - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--is-even-or-odd-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--print-and-return-2-Output: - parameterType: STRING - comp-condition-7: - dag: - outputs: - parameters: - pipelinechannel--print-and-return-3-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-and-return-3 - tasks: - print-and-return-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-3 - inputs: - parameters: - text: - runtimeValue: - constant: Got a low odd number! - taskInfo: - name: print-and-return-3 - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--is-even-or-odd-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--print-and-return-3-Output: - parameterType: STRING - comp-condition-8: - dag: - tasks: - condition-branches-5: - componentRef: - name: comp-condition-branches-5 - dependentTasks: - - is-even-or-odd - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - pipelinechannel--is-even-or-odd-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: is-even-or-odd - taskInfo: - name: condition-branches-5 - is-even-or-odd: - cachingOptions: - enableCache: true - componentRef: - name: comp-is-even-or-odd - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - taskInfo: - name: is-even-or-odd - print-and-return-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-4 - dependentTasks: - - condition-branches-5 - inputs: - parameters: - text: - taskOutputParameter: - outputParameterKey: pipelinechannel--condition-branches-5-oneof-1 - producerTask: condition-branches-5 - taskInfo: - name: print-and-return-4 - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - comp-condition-9: - dag: - tasks: - condition-branches-10: - componentRef: - name: comp-condition-branches-10 - dependentTasks: - - is-even-or-odd-2 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - pipelinechannel--is-even-or-odd-2-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: is-even-or-odd-2 - taskInfo: - name: condition-branches-10 - is-even-or-odd-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-is-even-or-odd-2 - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - taskInfo: - name: is-even-or-odd-2 - print-and-return-7: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-7 - dependentTasks: - - condition-branches-10 - inputs: - parameters: - text: - taskOutputParameter: - outputParameterKey: pipelinechannel--condition-branches-10-oneof-1 - producerTask: condition-branches-10 - taskInfo: - name: print-and-return-7 - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - comp-condition-branches-10: - dag: - outputs: - parameters: - pipelinechannel--condition-branches-10-oneof-1: - valueFromOneof: - parameterSelectors: - - outputParameterKey: pipelinechannel--print-and-return-5-Output - producerSubtask: condition-11 - - outputParameterKey: pipelinechannel--print-and-return-6-Output - producerSubtask: condition-12 - tasks: - condition-11: - componentRef: - name: comp-condition-11 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - pipelinechannel--is-even-or-odd-2-Output: - componentInputParameter: pipelinechannel--is-even-or-odd-2-Output - taskInfo: - name: condition-11 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--is-even-or-odd-2-Output'] - == 'even' - condition-12: - componentRef: - name: comp-condition-12 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - pipelinechannel--is-even-or-odd-2-Output: - componentInputParameter: pipelinechannel--is-even-or-odd-2-Output - taskInfo: - name: condition-12 - triggerPolicy: - condition: '!(inputs.parameter_values[''pipelinechannel--is-even-or-odd-2-Output''] - == ''even'')' - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--is-even-or-odd-2-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--condition-branches-10-oneof-1: - parameterType: STRING - comp-condition-branches-4: - dag: - tasks: - condition-13: - componentRef: - name: comp-condition-13 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - pipelinechannel--repeat_if_lucky_number: - componentInputParameter: pipelinechannel--repeat_if_lucky_number - taskInfo: - name: condition-13 - triggerPolicy: - condition: '!(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) - < 5000) && !(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) - > 5000)' - condition-8: - componentRef: - name: comp-condition-8 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - taskInfo: - name: condition-8 - triggerPolicy: - condition: int(inputs.parameter_values['pipelinechannel--int-0-to-9999-Output']) - < 5000 - condition-9: - componentRef: - name: comp-condition-9 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - taskInfo: - name: condition-9 - triggerPolicy: - condition: '!(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) - < 5000) && int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) - > 5000' - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--repeat_if_lucky_number: - parameterType: BOOLEAN - comp-condition-branches-5: - dag: - outputs: - parameters: - pipelinechannel--condition-branches-5-oneof-1: - valueFromOneof: - parameterSelectors: - - outputParameterKey: pipelinechannel--print-and-return-2-Output - producerSubtask: condition-6 - - outputParameterKey: pipelinechannel--print-and-return-3-Output - producerSubtask: condition-7 - tasks: - condition-6: - componentRef: - name: comp-condition-6 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - pipelinechannel--is-even-or-odd-Output: - componentInputParameter: pipelinechannel--is-even-or-odd-Output - taskInfo: - name: condition-6 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--is-even-or-odd-Output'] - == 'even' - condition-7: - componentRef: - name: comp-condition-7 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - componentInputParameter: pipelinechannel--int-0-to-9999-Output - pipelinechannel--is-even-or-odd-Output: - componentInputParameter: pipelinechannel--is-even-or-odd-Output - taskInfo: - name: condition-7 - triggerPolicy: - condition: '!(inputs.parameter_values[''pipelinechannel--is-even-or-odd-Output''] - == ''even'')' - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--is-even-or-odd-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--condition-branches-5-oneof-1: - parameterType: STRING - comp-for-loop-1: - dag: - outputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: int-0-to-9999 - tasks: - condition-2: - componentRef: - name: comp-condition-2 - inputs: - parameters: - pipelinechannel--add_drumroll: - componentInputParameter: pipelinechannel--add_drumroll - pipelinechannel--trials-loop-item: - componentInputParameter: pipelinechannel--trials-loop-item - taskInfo: - name: condition-2 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--add_drumroll'] == - true - condition-branches-4: - componentRef: - name: comp-condition-branches-4 - dependentTasks: - - int-0-to-9999 - inputs: - parameters: - pipelinechannel--int-0-to-9999-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: int-0-to-9999 - pipelinechannel--repeat_if_lucky_number: - componentInputParameter: pipelinechannel--repeat_if_lucky_number - taskInfo: - name: condition-branches-4 - int-0-to-9999: - cachingOptions: {} - componentRef: - name: comp-int-0-to-9999 - taskInfo: - name: int-0-to-9999 - inputDefinitions: - parameters: - pipelinechannel--add_drumroll: - parameterType: BOOLEAN - pipelinechannel--repeat_if_lucky_number: - parameterType: BOOLEAN - pipelinechannel--trials: - parameterType: LIST - pipelinechannel--trials-loop-item: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: LIST - comp-for-loop-16: - dag: - tasks: - print-and-return-9: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-9 - inputs: - parameters: - text: - runtimeValue: - constant: 'Announcing again: Got the lucky number 5000! A one in - 10,000 chance.' - taskInfo: - name: print-and-return-9 - inputDefinitions: - parameters: - pipelinechannel--int-0-to-9999-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--loop-item-param-15: - parameterType: NUMBER_INTEGER - pipelinechannel--repeat_if_lucky_number: - parameterType: BOOLEAN - comp-int-0-to-9999: - executorLabel: exec-int-0-to-9999 - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-is-even-or-odd: - executorLabel: exec-is-even-or-odd - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-is-even-or-odd-2: - executorLabel: exec-is-even-or-odd-2 - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return: - executorLabel: exec-print-and-return - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-2: - executorLabel: exec-print-and-return-2 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-3: - executorLabel: exec-print-and-return-3 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-4: - executorLabel: exec-print-and-return-4 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-5: - executorLabel: exec-print-and-return-5 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-6: - executorLabel: exec-print-and-return-6 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-7: - executorLabel: exec-print-and-return-7 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-8: - executorLabel: exec-print-and-return-8 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-9: - executorLabel: exec-print-and-return-9 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-ints: - executorLabel: exec-print-ints - inputDefinitions: - parameters: - ints: - parameterType: LIST -deploymentSpec: - executors: - exec-int-0-to-9999: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - int_0_to_9999 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef int_0_to_9999() -> int:\n import random\n return random.randint(0,\ - \ 9999)\n\n" - image: python:3.9 - exec-is-even-or-odd: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - is_even_or_odd - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef is_even_or_odd(num: int) -> str:\n return 'odd' if num % 2\ - \ else 'even'\n\n" - image: python:3.9 - exec-is-even-or-odd-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - is_even_or_odd - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef is_even_or_odd(num: int) -> str:\n return 'odd' if num % 2\ - \ else 'even'\n\n" - image: python:3.9 - exec-print-and-return: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-4: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-5: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-6: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-7: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-8: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-9: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-ints: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_ints - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_ints(ints: List[int]):\n print(ints)\n\n" - image: python:3.9 -pipelineInfo: - name: lucky-number-pipeline -root: - dag: - tasks: - for-loop-1: - componentRef: - name: comp-for-loop-1 - inputs: - parameters: - pipelinechannel--add_drumroll: - componentInputParameter: add_drumroll - pipelinechannel--repeat_if_lucky_number: - componentInputParameter: repeat_if_lucky_number - pipelinechannel--trials: - componentInputParameter: trials - parameterIterator: - itemInput: pipelinechannel--trials-loop-item - items: - inputParameter: pipelinechannel--trials - taskInfo: - name: for-loop-1 - print-ints: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-ints - dependentTasks: - - for-loop-1 - inputs: - parameters: - ints: - taskOutputParameter: - outputParameterKey: pipelinechannel--int-0-to-9999-Output - producerTask: for-loop-1 - taskInfo: - name: print-ints - inputDefinitions: - parameters: - add_drumroll: - defaultValue: true - isOptional: true - parameterType: BOOLEAN - repeat_if_lucky_number: - defaultValue: true - isOptional: true - parameterType: BOOLEAN - trials: - defaultValue: - - 1.0 - - 2.0 - - 3.0 - isOptional: true - parameterType: LIST -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/if_elif_else_with_oneof_parameters.yaml b/sdk/python/test_data/pipelines/if_elif_else_with_oneof_parameters.yaml deleted file mode 100644 index f6414225eda..00000000000 --- a/sdk/python/test_data/pipelines/if_elif_else_with_oneof_parameters.yaml +++ /dev/null @@ -1,420 +0,0 @@ -# PIPELINE DEFINITION -# Name: outer-pipeline -# Outputs: -# Output: str -components: - comp-condition-2: - dag: - outputs: - parameters: - pipelinechannel--print-and-return-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-and-return - tasks: - print-and-return: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return - inputs: - parameters: - text: - runtimeValue: - constant: Got heads! - taskInfo: - name: print-and-return - inputDefinitions: - parameters: - pipelinechannel--flip-three-sided-die-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--print-and-return-Output: - parameterType: STRING - comp-condition-3: - dag: - outputs: - parameters: - pipelinechannel--print-and-return-2-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-and-return-2 - tasks: - print-and-return-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-2 - inputs: - parameters: - text: - runtimeValue: - constant: Got tails! - taskInfo: - name: print-and-return-2 - inputDefinitions: - parameters: - pipelinechannel--flip-three-sided-die-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--print-and-return-2-Output: - parameterType: STRING - comp-condition-4: - dag: - outputs: - parameters: - pipelinechannel--special-print-and-return-output_key: - valueFromParameter: - outputParameterKey: output_key - producerSubtask: special-print-and-return - tasks: - special-print-and-return: - cachingOptions: - enableCache: true - componentRef: - name: comp-special-print-and-return - inputs: - parameters: - text: - runtimeValue: - constant: Draw! - taskInfo: - name: special-print-and-return - inputDefinitions: - parameters: - pipelinechannel--flip-three-sided-die-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--special-print-and-return-output_key: - parameterType: STRING - comp-condition-branches-1: - dag: - outputs: - parameters: - pipelinechannel--condition-branches-1-oneof-1: - valueFromOneof: - parameterSelectors: - - outputParameterKey: pipelinechannel--print-and-return-Output - producerSubtask: condition-2 - - outputParameterKey: pipelinechannel--print-and-return-2-Output - producerSubtask: condition-3 - - outputParameterKey: pipelinechannel--special-print-and-return-output_key - producerSubtask: condition-4 - tasks: - condition-2: - componentRef: - name: comp-condition-2 - inputs: - parameters: - pipelinechannel--flip-three-sided-die-Output: - componentInputParameter: pipelinechannel--flip-three-sided-die-Output - taskInfo: - name: condition-2 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-three-sided-die-Output'] - == 'heads' - condition-3: - componentRef: - name: comp-condition-3 - inputs: - parameters: - pipelinechannel--flip-three-sided-die-Output: - componentInputParameter: pipelinechannel--flip-three-sided-die-Output - taskInfo: - name: condition-3 - triggerPolicy: - condition: '!(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] - == ''heads'') && inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] - == ''tails''' - condition-4: - componentRef: - name: comp-condition-4 - inputs: - parameters: - pipelinechannel--flip-three-sided-die-Output: - componentInputParameter: pipelinechannel--flip-three-sided-die-Output - taskInfo: - name: condition-4 - triggerPolicy: - condition: '!(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] - == ''heads'') && !(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] - == ''tails'')' - inputDefinitions: - parameters: - pipelinechannel--flip-three-sided-die-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--condition-branches-1-oneof-1: - parameterType: STRING - comp-flip-three-sided-die: - executorLabel: exec-flip-three-sided-die - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return: - executorLabel: exec-print-and-return - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-2: - executorLabel: exec-print-and-return-2 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-3: - executorLabel: exec-print-and-return-3 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-roll-die-pipeline: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: pipelinechannel--condition-branches-1-oneof-1 - producerSubtask: condition-branches-1 - tasks: - condition-branches-1: - componentRef: - name: comp-condition-branches-1 - dependentTasks: - - flip-three-sided-die - inputs: - parameters: - pipelinechannel--flip-three-sided-die-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-three-sided-die - taskInfo: - name: condition-branches-1 - flip-three-sided-die: - cachingOptions: - enableCache: true - componentRef: - name: comp-flip-three-sided-die - taskInfo: - name: flip-three-sided-die - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-special-print-and-return: - executorLabel: exec-special-print-and-return - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - output_key: - parameterType: STRING -deploymentSpec: - executors: - exec-flip-three-sided-die: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_three_sided_die - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_three_sided_die() -> str:\n import random\n val =\ - \ random.randint(0, 2)\n\n if val == 0:\n return 'heads'\n \ - \ elif val == 1:\n return 'tails'\n else:\n return 'draw'\n\ - \n" - image: python:3.9 - exec-print-and-return: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-special-print-and-return: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - special_print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef special_print_and_return(text: str, output_key: dsl.OutputPath(str)):\n\ - \ print('Got the special state:', text)\n with open(output_key, 'w')\ - \ as f:\n f.write(text)\n\n" - image: python:3.9 -pipelineInfo: - name: outer-pipeline -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-and-return - tasks: - print-and-return: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-3 - dependentTasks: - - roll-die-pipeline - inputs: - parameters: - text: - taskOutputParameter: - outputParameterKey: Output - producerTask: roll-die-pipeline - taskInfo: - name: print-and-return - roll-die-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-roll-die-pipeline - taskInfo: - name: roll-die-pipeline - outputDefinitions: - parameters: - Output: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/if_else_with_oneof_artifacts.yaml b/sdk/python/test_data/pipelines/if_else_with_oneof_artifacts.yaml deleted file mode 100644 index 7ce1208b87c..00000000000 --- a/sdk/python/test_data/pipelines/if_else_with_oneof_artifacts.yaml +++ /dev/null @@ -1,380 +0,0 @@ -# PIPELINE DEFINITION -# Name: outer-pipeline -components: - comp-condition-2: - dag: - outputs: - artifacts: - pipelinechannel--param-to-artifact-a: - artifactSelectors: - - outputArtifactKey: a - producerSubtask: param-to-artifact - tasks: - param-to-artifact: - cachingOptions: - enableCache: true - componentRef: - name: comp-param-to-artifact - inputs: - parameters: - val: - componentInputParameter: pipelinechannel--flip-coin-Output - taskInfo: - name: param-to-artifact - inputDefinitions: - parameters: - pipelinechannel--flip-coin-Output: - parameterType: STRING - outputDefinitions: - artifacts: - pipelinechannel--param-to-artifact-a: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-condition-3: - dag: - outputs: - artifacts: - pipelinechannel--param-to-artifact-2-a: - artifactSelectors: - - outputArtifactKey: a - producerSubtask: param-to-artifact-2 - tasks: - param-to-artifact-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-param-to-artifact-2 - inputs: - parameters: - val: - componentInputParameter: pipelinechannel--flip-coin-Output - taskInfo: - name: param-to-artifact-2 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-Output: - parameterType: STRING - outputDefinitions: - artifacts: - pipelinechannel--param-to-artifact-2-a: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-condition-branches-1: - dag: - outputs: - artifacts: - pipelinechannel--condition-branches-1-oneof-1: - artifactSelectors: - - outputArtifactKey: pipelinechannel--param-to-artifact-a - producerSubtask: condition-2 - - outputArtifactKey: pipelinechannel--param-to-artifact-2-a - producerSubtask: condition-3 - tasks: - condition-2: - componentRef: - name: comp-condition-2 - inputs: - parameters: - pipelinechannel--flip-coin-Output: - componentInputParameter: pipelinechannel--flip-coin-Output - taskInfo: - name: condition-2 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-coin-Output'] - == 'heads' - condition-3: - componentRef: - name: comp-condition-3 - inputs: - parameters: - pipelinechannel--flip-coin-Output: - componentInputParameter: pipelinechannel--flip-coin-Output - taskInfo: - name: condition-3 - triggerPolicy: - condition: '!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] - == ''heads'')' - inputDefinitions: - parameters: - pipelinechannel--flip-coin-Output: - parameterType: STRING - outputDefinitions: - artifacts: - pipelinechannel--condition-branches-1-oneof-1: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-flip-coin: - executorLabel: exec-flip-coin - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-flip-coin-pipeline: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: pipelinechannel--condition-branches-1-oneof-1 - producerSubtask: condition-branches-1 - tasks: - condition-branches-1: - componentRef: - name: comp-condition-branches-1 - dependentTasks: - - flip-coin - inputs: - parameters: - pipelinechannel--flip-coin-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin - taskInfo: - name: condition-branches-1 - flip-coin: - cachingOptions: - enableCache: true - componentRef: - name: comp-flip-coin - taskInfo: - name: flip-coin - print-artifact: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-artifact - dependentTasks: - - condition-branches-1 - inputs: - artifacts: - a: - taskOutputArtifact: - outputArtifactKey: pipelinechannel--condition-branches-1-oneof-1 - producerTask: condition-branches-1 - taskInfo: - name: print-artifact - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-param-to-artifact: - executorLabel: exec-param-to-artifact - inputDefinitions: - parameters: - val: - parameterType: STRING - outputDefinitions: - artifacts: - a: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-param-to-artifact-2: - executorLabel: exec-param-to-artifact-2 - inputDefinitions: - parameters: - val: - parameterType: STRING - outputDefinitions: - artifacts: - a: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-print-artifact: - executorLabel: exec-print-artifact - inputDefinitions: - artifacts: - a: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-print-artifact-2: - executorLabel: exec-print-artifact-2 - inputDefinitions: - artifacts: - a: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-flip-coin: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_coin - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_coin() -> str:\n import random\n return 'heads' if\ - \ random.randint(0, 1) == 0 else 'tails'\n\n" - image: python:3.9 - exec-param-to-artifact: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - param_to_artifact - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef param_to_artifact(val: str, a: Output[Artifact]):\n with open(a.path,\ - \ 'w') as f:\n f.write(val)\n\n" - image: python:3.9 - exec-param-to-artifact-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - param_to_artifact - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef param_to_artifact(val: str, a: Output[Artifact]):\n with open(a.path,\ - \ 'w') as f:\n f.write(val)\n\n" - image: python:3.9 - exec-print-artifact: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_artifact - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_artifact(a: Input[Artifact]):\n with open(a.path) as\ - \ f:\n print(f.read())\n\n" - image: python:3.9 - exec-print-artifact-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_artifact - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_artifact(a: Input[Artifact]):\n with open(a.path) as\ - \ f:\n print(f.read())\n\n" - image: python:3.9 -pipelineInfo: - name: outer-pipeline -root: - dag: - tasks: - flip-coin-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-flip-coin-pipeline - taskInfo: - name: flip-coin-pipeline - print-artifact: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-artifact-2 - dependentTasks: - - flip-coin-pipeline - inputs: - artifacts: - a: - taskOutputArtifact: - outputArtifactKey: Output - producerTask: flip-coin-pipeline - taskInfo: - name: print-artifact -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/if_else_with_oneof_parameters.yaml b/sdk/python/test_data/pipelines/if_else_with_oneof_parameters.yaml deleted file mode 100644 index aa2c7cf5a9f..00000000000 --- a/sdk/python/test_data/pipelines/if_else_with_oneof_parameters.yaml +++ /dev/null @@ -1,313 +0,0 @@ -# PIPELINE DEFINITION -# Name: flip-coin-pipeline -# Outputs: -# Output: str -components: - comp-condition-2: - dag: - outputs: - parameters: - pipelinechannel--print-and-return-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-and-return - tasks: - print-and-return: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return - inputs: - parameters: - text: - runtimeValue: - constant: Got heads! - taskInfo: - name: print-and-return - inputDefinitions: - parameters: - pipelinechannel--flip-coin-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--print-and-return-Output: - parameterType: STRING - comp-condition-3: - dag: - outputs: - parameters: - pipelinechannel--print-and-return-2-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-and-return-2 - tasks: - print-and-return-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-2 - inputs: - parameters: - text: - runtimeValue: - constant: Got tails! - taskInfo: - name: print-and-return-2 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--print-and-return-2-Output: - parameterType: STRING - comp-condition-branches-1: - dag: - outputs: - parameters: - pipelinechannel--condition-branches-1-oneof-1: - valueFromOneof: - parameterSelectors: - - outputParameterKey: pipelinechannel--print-and-return-Output - producerSubtask: condition-2 - - outputParameterKey: pipelinechannel--print-and-return-2-Output - producerSubtask: condition-3 - tasks: - condition-2: - componentRef: - name: comp-condition-2 - inputs: - parameters: - pipelinechannel--flip-coin-Output: - componentInputParameter: pipelinechannel--flip-coin-Output - taskInfo: - name: condition-2 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-coin-Output'] - == 'heads' - condition-3: - componentRef: - name: comp-condition-3 - inputs: - parameters: - pipelinechannel--flip-coin-Output: - componentInputParameter: pipelinechannel--flip-coin-Output - taskInfo: - name: condition-3 - triggerPolicy: - condition: '!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] - == ''heads'')' - inputDefinitions: - parameters: - pipelinechannel--flip-coin-Output: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--condition-branches-1-oneof-1: - parameterType: STRING - comp-flip-coin: - executorLabel: exec-flip-coin - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return: - executorLabel: exec-print-and-return - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-2: - executorLabel: exec-print-and-return-2 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-and-return-3: - executorLabel: exec-print-and-return-3 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING -deploymentSpec: - executors: - exec-flip-coin: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_coin - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_coin() -> str:\n import random\n return 'heads' if\ - \ random.randint(0, 1) == 0 else 'tails'\n\n" - image: python:3.9 - exec-print-and-return: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 - exec-print-and-return-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ - \ text\n\n" - image: python:3.9 -pipelineInfo: - name: flip-coin-pipeline -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: pipelinechannel--condition-branches-1-oneof-1 - producerSubtask: condition-branches-1 - tasks: - condition-branches-1: - componentRef: - name: comp-condition-branches-1 - dependentTasks: - - flip-coin - inputs: - parameters: - pipelinechannel--flip-coin-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin - taskInfo: - name: condition-branches-1 - flip-coin: - cachingOptions: - enableCache: true - componentRef: - name: comp-flip-coin - taskInfo: - name: flip-coin - print-and-return-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-and-return-3 - dependentTasks: - - condition-branches-1 - inputs: - parameters: - text: - taskOutputParameter: - outputParameterKey: pipelinechannel--condition-branches-1-oneof-1 - producerTask: condition-branches-1 - taskInfo: - name: print-and-return-3 - outputDefinitions: - parameters: - Output: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/lightweight_python_functions_pipeline.yaml b/sdk/python/test_data/pipelines/lightweight_python_functions_pipeline.yaml deleted file mode 100644 index 52d0037d0ea..00000000000 --- a/sdk/python/test_data/pipelines/lightweight_python_functions_pipeline.yaml +++ /dev/null @@ -1,245 +0,0 @@ -# PIPELINE DEFINITION -# Name: my-test-pipeline-beta -# Inputs: -# input_dict: dict [Default: {'A': 1.0, 'B': 2.0}] -# message: str -components: - comp-preprocess: - executorLabel: exec-preprocess - inputDefinitions: - parameters: - input_dict_parameter: - parameterType: STRUCT - input_list_parameter: - parameterType: LIST - message: - parameterType: STRING - outputDefinitions: - artifacts: - output_dataset_one: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - output_dataset_two_path: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - output_bool_parameter_path: - parameterType: BOOLEAN - output_dict_parameter_path: - parameterType: STRUCT - output_list_parameter_path: - parameterType: LIST - output_parameter_path: - parameterType: STRING - comp-train: - executorLabel: exec-train - inputDefinitions: - artifacts: - dataset_one_path: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - dataset_two: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - input_bool: - parameterType: BOOLEAN - input_dict: - parameterType: STRUCT - input_list: - parameterType: LIST - message: - parameterType: STRING - num_steps: - defaultValue: 100.0 - isOptional: true - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - model: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-preprocess: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - preprocess - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef preprocess(\n # An input parameter of type string.\n message:\ - \ str,\n # An input parameter of type dict.\n input_dict_parameter:\ - \ Dict[str, int],\n # An input parameter of type list.\n input_list_parameter:\ - \ List[str],\n # Use Output[T] to get a metadata-rich handle to the output\ - \ artifact\n # of type `Dataset`.\n output_dataset_one: Output[Dataset],\n\ - \ # A locally accessible filepath for another output artifact of type\n\ - \ # `Dataset`.\n output_dataset_two_path: OutputPath('Dataset'),\n\ - \ # A locally accessible filepath for an output parameter of type string.\n\ - \ output_parameter_path: OutputPath(str),\n # A locally accessible\ - \ filepath for an output parameter of type bool.\n output_bool_parameter_path:\ - \ OutputPath(bool),\n # A locally accessible filepath for an output parameter\ - \ of type dict.\n output_dict_parameter_path: OutputPath(Dict[str, int]),\n\ - \ # A locally accessible filepath for an output parameter of type list.\n\ - \ output_list_parameter_path: OutputPath(List[str]),\n):\n \"\"\"\ - Dummy preprocessing step.\"\"\"\n\n # Use Dataset.path to access a local\ - \ file path for writing.\n # One can also use Dataset.uri to access the\ - \ actual URI file path.\n with open(output_dataset_one.path, 'w') as\ - \ f:\n f.write(message)\n\n # OutputPath is used to just pass\ - \ the local file path of the output artifact\n # to the function.\n \ - \ with open(output_dataset_two_path, 'w') as f:\n f.write(message)\n\ - \n with open(output_parameter_path, 'w') as f:\n f.write(message)\n\ - \n with open(output_bool_parameter_path, 'w') as f:\n f.write(\n\ - \ str(True)) # use either `str()` or `json.dumps()` for bool\ - \ values.\n\n import json\n with open(output_dict_parameter_path,\ - \ 'w') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with\ - \ open(output_list_parameter_path, 'w') as f:\n f.write(json.dumps(input_list_parameter))\n\ - \n" - image: python:3.9 - exec-train: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - train - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef train(\n # Use InputPath to get a locally accessible path\ - \ for the input artifact\n # of type `Dataset`.\n dataset_one_path:\ - \ InputPath('Dataset'),\n # Use Input[T] to get a metadata-rich handle\ - \ to the input artifact\n # of type `Dataset`.\n dataset_two: Input[Dataset],\n\ - \ # An input parameter of type string.\n message: str,\n # Use\ - \ Output[T] to get a metadata-rich handle to the output artifact\n #\ - \ of type `Dataset`.\n model: Output[Model],\n # An input parameter\ - \ of type bool.\n input_bool: bool,\n # An input parameter of type\ - \ dict.\n input_dict: Dict[str, int],\n # An input parameter of type\ - \ List[str].\n input_list: List[str],\n # An input parameter of type\ - \ int with a default value.\n num_steps: int = 100,\n):\n \"\"\"Dummy\ - \ Training step.\"\"\"\n with open(dataset_one_path) as input_file:\n\ - \ dataset_one_contents = input_file.read()\n\n with open(dataset_two.path)\ - \ as input_file:\n dataset_two_contents = input_file.read()\n\n \ - \ line = (f'dataset_one_contents: {dataset_one_contents} || '\n \ - \ f'dataset_two_contents: {dataset_two_contents} || '\n \ - \ f'message: {message} || '\n f'input_bool: {input_bool}, type\ - \ {type(input_bool)} || '\n f'input_dict: {input_dict}, type\ - \ {type(input_dict)} || '\n f'input_list: {input_list}, type\ - \ {type(input_list)} \\n')\n\n with open(model.path, 'w') as output_file:\n\ - \ for i in range(num_steps):\n output_file.write(f'Step\ - \ {i}\\n{line}\\n=====\\n')\n\n # model is an instance of Model artifact,\ - \ which has a .metadata dictionary\n # to store arbitrary metadata for\ - \ the output artifact.\n model.metadata['accuracy'] = 0.9\n\n" - image: python:3.9 -pipelineInfo: - name: my-test-pipeline-beta -root: - dag: - tasks: - preprocess: - cachingOptions: - enableCache: true - componentRef: - name: comp-preprocess - inputs: - parameters: - input_dict_parameter: - componentInputParameter: input_dict - input_list_parameter: - runtimeValue: - constant: - - a - - b - - c - message: - componentInputParameter: message - taskInfo: - name: preprocess - train: - cachingOptions: - enableCache: true - componentRef: - name: comp-train - dependentTasks: - - preprocess - inputs: - artifacts: - dataset_one_path: - taskOutputArtifact: - outputArtifactKey: output_dataset_one - producerTask: preprocess - dataset_two: - taskOutputArtifact: - outputArtifactKey: output_dataset_two_path - producerTask: preprocess - parameters: - input_bool: - taskOutputParameter: - outputParameterKey: output_bool_parameter_path - producerTask: preprocess - input_dict: - taskOutputParameter: - outputParameterKey: output_dict_parameter_path - producerTask: preprocess - input_list: - taskOutputParameter: - outputParameterKey: output_list_parameter_path - producerTask: preprocess - message: - taskOutputParameter: - outputParameterKey: output_parameter_path - producerTask: preprocess - taskInfo: - name: train - inputDefinitions: - parameters: - input_dict: - defaultValue: - A: 1.0 - B: 2.0 - isOptional: true - parameterType: STRUCT - message: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_complex.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_complex.yaml deleted file mode 100644 index 6994b0a68dc..00000000000 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_complex.yaml +++ /dev/null @@ -1,497 +0,0 @@ -# PIPELINE DEFINITION -# Name: math-pipeline -# Inputs: -# threshold: int [Default: 2.0] -# Outputs: -# datasets: system.Dataset -# sum: system.Dataset -components: - comp-add: - executorLabel: exec-add - inputDefinitions: - artifacts: - in_datasets: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-add-2: - executorLabel: exec-add-2 - inputDefinitions: - artifacts: - in_datasets: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-add-two-ints: - executorLabel: exec-add-two-ints - inputDefinitions: - artifacts: - in_dataset1: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - in_dataset2: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-add-two-lists-of-datasets: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: out_dataset - producerSubtask: add-two-ints - tasks: - add: - cachingOptions: - enableCache: true - componentRef: - name: comp-add - inputs: - artifacts: - in_datasets: - componentInputArtifact: in_datasets1 - taskInfo: - name: add - add-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-2 - inputs: - artifacts: - in_datasets: - componentInputArtifact: in_datasets2 - taskInfo: - name: add-2 - add-two-ints: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-two-ints - dependentTasks: - - add - - add-2 - inputs: - artifacts: - in_dataset1: - taskOutputArtifact: - outputArtifactKey: out_dataset - producerTask: add - in_dataset2: - taskOutputArtifact: - outputArtifactKey: out_dataset - producerTask: add-2 - taskInfo: - name: add-two-ints - inputDefinitions: - artifacts: - in_datasets1: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - in_datasets2: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-condition-5: - dag: - outputs: - artifacts: - pipelinechannel--double-2-out_dataset: - artifactSelectors: - - outputArtifactKey: out_dataset - producerSubtask: double-2 - tasks: - double-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-double-2 - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--loop-item-param-3 - taskInfo: - name: double-2 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-3: - parameterType: NUMBER_INTEGER - pipelinechannel--threshold: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - pipelinechannel--double-2-out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - comp-double: - executorLabel: exec-double - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-double-2: - executorLabel: exec-double-2 - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-for-loop-2: - dag: - outputs: - artifacts: - pipelinechannel--double-2-out_dataset: - artifactSelectors: - - outputArtifactKey: pipelinechannel--double-2-out_dataset - producerSubtask: for-loop-4 - pipelinechannel--double-out_dataset: - artifactSelectors: - - outputArtifactKey: out_dataset - producerSubtask: double - tasks: - double: - cachingOptions: - enableCache: true - componentRef: - name: comp-double - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--loop-item-param-1 - taskInfo: - name: double - for-loop-4: - componentRef: - name: comp-for-loop-4 - inputs: - parameters: - pipelinechannel--threshold: - componentInputParameter: pipelinechannel--threshold - parameterIterator: - itemInput: pipelinechannel--loop-item-param-3 - items: - raw: '[1, 2, 3]' - taskInfo: - name: for-loop-4 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: NUMBER_INTEGER - pipelinechannel--threshold: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - pipelinechannel--double-2-out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - pipelinechannel--double-out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - comp-for-loop-4: - dag: - outputs: - artifacts: - pipelinechannel--double-2-out_dataset: - artifactSelectors: - - outputArtifactKey: pipelinechannel--double-2-out_dataset - producerSubtask: condition-5 - tasks: - condition-5: - componentRef: - name: comp-condition-5 - inputs: - parameters: - pipelinechannel--loop-item-param-3: - componentInputParameter: pipelinechannel--loop-item-param-3 - pipelinechannel--threshold: - componentInputParameter: pipelinechannel--threshold - taskInfo: - name: condition-5 - triggerPolicy: - condition: int(inputs.parameter_values['pipelinechannel--loop-item-param-3']) - >= int(inputs.parameter_values['pipelinechannel--threshold']) - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-3: - parameterType: NUMBER_INTEGER - pipelinechannel--threshold: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - pipelinechannel--double-2-out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true -deploymentSpec: - executors: - exec-add: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add(\n in_datasets: Input[List[Dataset]],\n out_dataset:\ - \ Output[Dataset],\n):\n nums = []\n for dataset in in_datasets:\n\ - \ with open(dataset.path) as f:\n nums.append(int(f.read()))\n\ - \ with open(out_dataset.path, 'w') as f:\n f.write(str(sum(nums)))\n\ - \n" - image: python:3.9 - exec-add-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add(\n in_datasets: Input[List[Dataset]],\n out_dataset:\ - \ Output[Dataset],\n):\n nums = []\n for dataset in in_datasets:\n\ - \ with open(dataset.path) as f:\n nums.append(int(f.read()))\n\ - \ with open(out_dataset.path, 'w') as f:\n f.write(str(sum(nums)))\n\ - \n" - image: python:3.9 - exec-add-two-ints: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add_two_ints - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add_two_ints(\n in_dataset1: Input[Dataset],\n in_dataset2:\ - \ Input[Dataset],\n out_dataset: Output[Dataset],\n):\n with open(in_dataset1.path)\ - \ as f:\n in_dataset1 = int(f.read())\n\n with open(in_dataset2.path)\ - \ as f:\n in_dataset2 = int(f.read())\n\n with open(out_dataset.path,\ - \ 'w') as f:\n f.write(str(in_dataset1 + in_dataset2))\n\n" - image: python:3.9 - exec-double: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - double - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef double(\n num: int,\n out_dataset: Output[Dataset],\n):\n\ - \ with open(out_dataset.path, 'w') as f:\n f.write(str(2 * num))\n\ - \n" - image: python:3.9 - exec-double-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - double - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef double(\n num: int,\n out_dataset: Output[Dataset],\n):\n\ - \ with open(out_dataset.path, 'w') as f:\n f.write(str(2 * num))\n\ - \n" - image: python:3.9 -pipelineInfo: - name: math-pipeline -root: - dag: - outputs: - artifacts: - datasets: - artifactSelectors: - - outputArtifactKey: pipelinechannel--double-out_dataset - producerSubtask: for-loop-2 - sum: - artifactSelectors: - - outputArtifactKey: Output - producerSubtask: add-two-lists-of-datasets - tasks: - add-two-lists-of-datasets: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-two-lists-of-datasets - dependentTasks: - - for-loop-2 - inputs: - artifacts: - in_datasets1: - taskOutputArtifact: - outputArtifactKey: pipelinechannel--double-out_dataset - producerTask: for-loop-2 - in_datasets2: - taskOutputArtifact: - outputArtifactKey: pipelinechannel--double-2-out_dataset - producerTask: for-loop-2 - taskInfo: - name: add-two-lists-of-datasets - for-loop-2: - componentRef: - name: comp-for-loop-2 - inputs: - parameters: - pipelinechannel--threshold: - componentInputParameter: threshold - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '[1, 2, 3]' - taskInfo: - name: for-loop-2 - inputDefinitions: - parameters: - threshold: - defaultValue: 2.0 - isOptional: true - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - datasets: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - sum: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.yaml deleted file mode 100644 index e923d788ec2..00000000000 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.yaml +++ /dev/null @@ -1,216 +0,0 @@ -# PIPELINE DEFINITION -# Name: math-pipeline -# Outputs: -# Output: system.Dataset -components: - comp-add: - executorLabel: exec-add - inputDefinitions: - artifacts: - in_datasets: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-add-container: - executorLabel: exec-add-container - inputDefinitions: - artifacts: - in_datasets: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-double: - executorLabel: exec-double - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-for-loop-2: - dag: - outputs: - artifacts: - pipelinechannel--double-out_dataset: - artifactSelectors: - - outputArtifactKey: out_dataset - producerSubtask: double - tasks: - double: - cachingOptions: - enableCache: true - componentRef: - name: comp-double - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--loop-item-param-1 - taskInfo: - name: double - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - pipelinechannel--double-out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true -deploymentSpec: - executors: - exec-add: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add(in_datasets: Input[List[Dataset]], out_dataset: Output[Dataset]):\n\ - \ nums = []\n for dataset in in_datasets:\n with open(dataset.path)\ - \ as f:\n nums.append(int(f.read()))\n with open(out_dataset.path,\ - \ 'w') as f:\n f.write(str(sum(nums)))\n\n" - image: python:3.9 - exec-add-container: - container: - args: - - "\nimport argparse\nimport json\nimport os\n\ndef main(in_datasets, out_dataset_uri):\n\ - \ in_dicts = json.loads(in_datasets)\n uris = [d['uri'] for d in in_dicts]\n\ - \ total = 0\n for uri in uris:\n with open(uri.replace('gs://',\ - \ '/gcs/')) as f:\n total += int(f.read())\n\n outpath = out_dataset_uri.replace('gs://',\ - \ '/gcs/')\n os.makedirs(os.path.dirname(outpath), exist_ok=True)\n \ - \ with open(outpath, 'w') as f:\n f.write(str(total))\n\nparser\ - \ = argparse.ArgumentParser()\nparser.add_argument('in_datasets')\nparser.add_argument('out_dataset_uri')\n\ - args = parser.parse_args()\n\nmain(args.in_datasets, args.out_dataset_uri)\n" - - '{{$.inputs.artifacts[''in_datasets'']}}' - - '{{$.outputs.artifacts[''out_dataset''].uri}}' - command: - - python - - -c - image: python:3.9 - exec-double: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - double - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef double(num: int, out_dataset: Output[Dataset]):\n with open(out_dataset.path,\ - \ 'w') as f:\n f.write(str(2 * num))\n\n" - image: python:3.9 -pipelineInfo: - name: math-pipeline -root: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: pipelinechannel--double-out_dataset - producerSubtask: for-loop-2 - tasks: - add: - cachingOptions: - enableCache: true - componentRef: - name: comp-add - dependentTasks: - - for-loop-2 - inputs: - artifacts: - in_datasets: - taskOutputArtifact: - outputArtifactKey: pipelinechannel--double-out_dataset - producerTask: for-loop-2 - taskInfo: - name: add - add-container: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-container - dependentTasks: - - for-loop-2 - inputs: - artifacts: - in_datasets: - taskOutputArtifact: - outputArtifactKey: pipelinechannel--double-out_dataset - producerTask: for-loop-2 - taskInfo: - name: add-container - for-loop-2: - componentRef: - name: comp-for-loop-2 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '[1, 2, 3]' - taskInfo: - name: for-loop-2 - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/conditional_producer_and_consumers.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/conditional_producer_and_consumers.yaml deleted file mode 100644 index ac95760cb19..00000000000 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/conditional_producer_and_consumers.yaml +++ /dev/null @@ -1,232 +0,0 @@ -# PIPELINE DEFINITION -# Name: math-pipeline -# Inputs: -# threshold: int [Default: 2.0] -# Outputs: -# Output: list -components: - comp-add: - executorLabel: exec-add - inputDefinitions: - parameters: - nums: - parameterType: LIST - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-condition-3: - dag: - outputs: - parameters: - pipelinechannel--double-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: double - tasks: - double: - cachingOptions: - enableCache: true - componentRef: - name: comp-double - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--loop-item-param-1 - taskInfo: - name: double - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: NUMBER_INTEGER - pipelinechannel--threshold: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--double-Output: - parameterType: LIST - comp-condition-4: - dag: - outputs: - parameters: - pipelinechannel--add-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: add - tasks: - add: - cachingOptions: - enableCache: true - componentRef: - name: comp-add - inputs: - parameters: - nums: - componentInputParameter: pipelinechannel--for-loop-2-pipelinechannel--double-Output - taskInfo: - name: add - inputDefinitions: - parameters: - pipelinechannel--for-loop-2-pipelinechannel--double-Output: - parameterType: LIST - pipelinechannel--threshold: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--add-Output: - parameterType: LIST - comp-double: - executorLabel: exec-double - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-for-loop-2: - dag: - outputs: - parameters: - pipelinechannel--double-Output: - valueFromParameter: - outputParameterKey: pipelinechannel--double-Output - producerSubtask: condition-3 - tasks: - condition-3: - componentRef: - name: comp-condition-3 - inputs: - parameters: - pipelinechannel--loop-item-param-1: - componentInputParameter: pipelinechannel--loop-item-param-1 - pipelinechannel--threshold: - componentInputParameter: pipelinechannel--threshold - taskInfo: - name: condition-3 - triggerPolicy: - condition: int(inputs.parameter_values['pipelinechannel--loop-item-param-1']) - >= int(inputs.parameter_values['pipelinechannel--threshold']) - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: NUMBER_INTEGER - pipelinechannel--threshold: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--double-Output: - parameterType: LIST -deploymentSpec: - executors: - exec-add: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add(nums: List[int]) -> int:\n return sum(nums)\n\n" - image: python:3.9 - exec-double: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - double - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 -pipelineInfo: - name: math-pipeline -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: pipelinechannel--add-Output - producerSubtask: condition-4 - tasks: - condition-4: - componentRef: - name: comp-condition-4 - dependentTasks: - - for-loop-2 - inputs: - parameters: - pipelinechannel--for-loop-2-pipelinechannel--double-Output: - taskOutputParameter: - outputParameterKey: pipelinechannel--double-Output - producerTask: for-loop-2 - pipelinechannel--threshold: - componentInputParameter: threshold - taskInfo: - name: condition-4 - triggerPolicy: - condition: int(inputs.parameter_values['pipelinechannel--threshold']) == - 2 - for-loop-2: - componentRef: - name: comp-for-loop-2 - inputs: - parameters: - pipelinechannel--threshold: - componentInputParameter: threshold - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '[1, 2, 3]' - taskInfo: - name: for-loop-2 - inputDefinitions: - parameters: - threshold: - defaultValue: 2.0 - isOptional: true - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: LIST -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/nested_with_parameters.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/nested_with_parameters.yaml deleted file mode 100644 index 221ee317b5d..00000000000 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/nested_with_parameters.yaml +++ /dev/null @@ -1,294 +0,0 @@ -# PIPELINE DEFINITION -# Name: math-pipeline -# Outputs: -# Output: list -components: - comp-add: - executorLabel: exec-add - inputDefinitions: - parameters: - nums: - parameterType: LIST - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-add-two-nums: - executorLabel: exec-add-two-nums - inputDefinitions: - parameters: - x: - parameterType: NUMBER_INTEGER - y: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-double: - executorLabel: exec-double - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-double-2: - executorLabel: exec-double-2 - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-for-loop-2: - dag: - outputs: - parameters: - pipelinechannel--add-two-nums-Output: - valueFromParameter: - outputParameterKey: pipelinechannel--add-two-nums-Output - producerSubtask: for-loop-4 - tasks: - for-loop-4: - componentRef: - name: comp-for-loop-4 - inputs: - parameters: - pipelinechannel--loop-item-param-1: - componentInputParameter: pipelinechannel--loop-item-param-1 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-3 - items: - raw: '[1, 2, 3]' - taskInfo: - name: for-loop-4 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--add-two-nums-Output: - parameterType: LIST - comp-for-loop-4: - dag: - outputs: - parameters: - pipelinechannel--add-two-nums-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: add-two-nums - tasks: - add-two-nums: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-two-nums - dependentTasks: - - double - - double-2 - inputs: - parameters: - x: - taskOutputParameter: - outputParameterKey: Output - producerTask: double - y: - taskOutputParameter: - outputParameterKey: Output - producerTask: double-2 - taskInfo: - name: add-two-nums - double: - cachingOptions: - enableCache: true - componentRef: - name: comp-double - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--loop-item-param-1 - taskInfo: - name: double - double-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-double-2 - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--loop-item-param-3 - taskInfo: - name: double-2 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: NUMBER_INTEGER - pipelinechannel--loop-item-param-3: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--add-two-nums-Output: - parameterType: LIST -deploymentSpec: - executors: - exec-add: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add(nums: List[List[int]]) -> int:\n import itertools\n \ - \ return sum(itertools.chain(*nums))\n\n" - image: python:3.9 - exec-add-two-nums: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add_two_nums - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add_two_nums(x: int, y: int) -> int:\n return x + y\n\n" - image: python:3.9 - exec-double: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - double - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 - exec-double-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - double - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 -pipelineInfo: - name: math-pipeline -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: pipelinechannel--add-two-nums-Output - producerSubtask: for-loop-2 - tasks: - add: - cachingOptions: - enableCache: true - componentRef: - name: comp-add - dependentTasks: - - for-loop-2 - inputs: - parameters: - nums: - taskOutputParameter: - outputParameterKey: pipelinechannel--add-two-nums-Output - producerTask: for-loop-2 - taskInfo: - name: add - for-loop-2: - componentRef: - name: comp-for-loop-2 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '[1, 2, 3]' - taskInfo: - name: for-loop-2 - outputDefinitions: - parameters: - Output: - parameterType: LIST -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_complex.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_complex.yaml deleted file mode 100644 index 1fb44a7fd7e..00000000000 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_complex.yaml +++ /dev/null @@ -1,494 +0,0 @@ -# PIPELINE DEFINITION -# Name: math-pipeline -# Outputs: -# Output: int -components: - comp-add-two-numbers: - executorLabel: exec-add-two-numbers - inputDefinitions: - parameters: - x: - parameterType: LIST - y: - parameterType: LIST - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-double: - executorLabel: exec-double - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-double-2: - executorLabel: exec-double-2 - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-for-loop-2: - dag: - outputs: - parameters: - pipelinechannel--double-2-Output: - valueFromParameter: - outputParameterKey: pipelinechannel--double-2-Output - producerSubtask: for-loop-4 - pipelinechannel--double-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: double - tasks: - double: - cachingOptions: - enableCache: true - componentRef: - name: comp-double - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--loop-item-param-1 - taskInfo: - name: double - for-loop-4: - componentRef: - name: comp-for-loop-4 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-3 - items: - raw: '[4, 5, 6]' - taskInfo: - name: for-loop-4 - simple-add: - cachingOptions: - enableCache: true - componentRef: - name: comp-simple-add - dependentTasks: - - for-loop-4 - inputs: - parameters: - nums: - taskOutputParameter: - outputParameterKey: pipelinechannel--double-2-Output - producerTask: for-loop-4 - taskInfo: - name: simple-add - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--double-2-Output: - parameterType: LIST - pipelinechannel--double-Output: - parameterType: LIST - comp-for-loop-4: - dag: - outputs: - parameters: - pipelinechannel--double-2-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: double-2 - tasks: - double-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-double-2 - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--loop-item-param-3 - taskInfo: - name: double-2 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-3: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--double-2-Output: - parameterType: LIST - comp-for-loop-6: - dag: - outputs: - parameters: - pipelinechannel--nested-add-2-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: nested-add-2 - pipelinechannel--simple-add-2-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: simple-add-2 - tasks: - nested-add-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-nested-add-2 - inputs: - parameters: - nums: - componentInputParameter: pipelinechannel--for-loop-2-pipelinechannel--double-2-Output - taskInfo: - name: nested-add-2 - simple-add-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-simple-add-2 - inputs: - parameters: - nums: - componentInputParameter: pipelinechannel--for-loop-2-pipelinechannel--double-Output - taskInfo: - name: simple-add-2 - inputDefinitions: - parameters: - pipelinechannel--for-loop-2-pipelinechannel--double-2-Output: - parameterType: LIST - pipelinechannel--for-loop-2-pipelinechannel--double-Output: - parameterType: LIST - pipelinechannel--loop-item-param-5: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--nested-add-2-Output: - parameterType: LIST - pipelinechannel--simple-add-2-Output: - parameterType: LIST - comp-nested-add: - executorLabel: exec-nested-add - inputDefinitions: - parameters: - nums: - parameterType: LIST - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-nested-add-2: - executorLabel: exec-nested-add-2 - inputDefinitions: - parameters: - nums: - parameterType: LIST - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-simple-add: - executorLabel: exec-simple-add - inputDefinitions: - parameters: - nums: - parameterType: LIST - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-simple-add-2: - executorLabel: exec-simple-add-2 - inputDefinitions: - parameters: - nums: - parameterType: LIST - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER -deploymentSpec: - executors: - exec-add-two-numbers: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add_two_numbers - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add_two_numbers(x: List[int], y: List[int]) -> int:\n return\ - \ sum(x) + sum(y)\n\n" - image: python:3.9 - exec-double: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - double - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 - exec-double-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - double - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 - exec-nested-add: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - nested_add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef nested_add(nums: List[List[int]]) -> int:\n import itertools\n\ - \ return sum(itertools.chain(*nums))\n\n" - image: python:3.9 - exec-nested-add-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - nested_add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef nested_add(nums: List[List[int]]) -> int:\n import itertools\n\ - \ return sum(itertools.chain(*nums))\n\n" - image: python:3.9 - exec-simple-add: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - simple_add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef simple_add(nums: List[int]) -> int:\n return sum(nums)\n\n" - image: python:3.9 - exec-simple-add-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - simple_add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef simple_add(nums: List[int]) -> int:\n return sum(nums)\n\n" - image: python:3.9 -pipelineInfo: - name: math-pipeline -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: add-two-numbers - tasks: - add-two-numbers: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-two-numbers - dependentTasks: - - for-loop-6 - inputs: - parameters: - x: - taskOutputParameter: - outputParameterKey: pipelinechannel--simple-add-2-Output - producerTask: for-loop-6 - y: - taskOutputParameter: - outputParameterKey: pipelinechannel--nested-add-2-Output - producerTask: for-loop-6 - taskInfo: - name: add-two-numbers - for-loop-2: - componentRef: - name: comp-for-loop-2 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '[1, 2, 3]' - taskInfo: - name: for-loop-2 - for-loop-6: - componentRef: - name: comp-for-loop-6 - dependentTasks: - - for-loop-2 - inputs: - parameters: - pipelinechannel--for-loop-2-pipelinechannel--double-2-Output: - taskOutputParameter: - outputParameterKey: pipelinechannel--double-2-Output - producerTask: for-loop-2 - pipelinechannel--for-loop-2-pipelinechannel--double-Output: - taskOutputParameter: - outputParameterKey: pipelinechannel--double-Output - producerTask: for-loop-2 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-5 - items: - raw: '[0, 0, 0]' - taskInfo: - name: for-loop-6 - nested-add: - cachingOptions: - enableCache: true - componentRef: - name: comp-nested-add - dependentTasks: - - for-loop-2 - inputs: - parameters: - nums: - taskOutputParameter: - outputParameterKey: pipelinechannel--double-2-Output - producerTask: for-loop-2 - taskInfo: - name: nested-add - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_simple.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_simple.yaml deleted file mode 100644 index 47fb058803f..00000000000 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_simple.yaml +++ /dev/null @@ -1,187 +0,0 @@ -# PIPELINE DEFINITION -# Name: math-pipeline -# Outputs: -# Output: list -components: - comp-add: - executorLabel: exec-add - inputDefinitions: - parameters: - nums: - parameterType: LIST - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-add-container: - executorLabel: exec-add-container - inputDefinitions: - parameters: - nums: - parameterType: LIST - outputDefinitions: - parameters: - sum: - parameterType: NUMBER_INTEGER - comp-double: - executorLabel: exec-double - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-for-loop-2: - dag: - outputs: - parameters: - pipelinechannel--double-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: double - tasks: - double: - cachingOptions: - enableCache: true - componentRef: - name: comp-double - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--loop-item-param-1 - taskInfo: - name: double - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--double-Output: - parameterType: LIST -deploymentSpec: - executors: - exec-add: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add(nums: List[int]) -> int:\n return sum(nums)\n\n" - image: python:3.9 - exec-add-container: - container: - args: - - "\n set -ex\n mkdir -p $(dirname {{$.outputs.parameters['sum'].output_file}})\n\ - \ echo {{$.inputs.parameters['nums']}} | jq 'add' > {{$.outputs.parameters['sum'].output_file}}\n\ - \ " - command: - - sh - - -c - image: stedolan/jq - exec-double: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - double - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 -pipelineInfo: - name: math-pipeline -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: pipelinechannel--double-Output - producerSubtask: for-loop-2 - tasks: - add: - cachingOptions: - enableCache: true - componentRef: - name: comp-add - dependentTasks: - - for-loop-2 - inputs: - parameters: - nums: - taskOutputParameter: - outputParameterKey: pipelinechannel--double-Output - producerTask: for-loop-2 - taskInfo: - name: add - add-container: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-container - dependentTasks: - - for-loop-2 - inputs: - parameters: - nums: - taskOutputParameter: - outputParameterKey: pipelinechannel--double-Output - producerTask: for-loop-2 - taskInfo: - name: add-container - for-loop-2: - componentRef: - name: comp-for-loop-2 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '[1, 2, 3]' - taskInfo: - name: for-loop-2 - outputDefinitions: - parameters: - Output: - parameterType: LIST -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/pipeline_producer_consumer.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/pipeline_producer_consumer.yaml deleted file mode 100644 index 015d9066115..00000000000 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/pipeline_producer_consumer.yaml +++ /dev/null @@ -1,367 +0,0 @@ -# PIPELINE DEFINITION -# Name: math-pipeline -# Outputs: -# Output: int -components: - comp-add: - executorLabel: exec-add - inputDefinitions: - parameters: - nums: - parameterType: LIST - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-add-pipeline: - dag: - outputs: - parameters: - out1: - valueFromParameter: - outputParameterKey: Output - producerSubtask: add - out2: - valueFromParameter: - outputParameterKey: pipelinechannel--echo-and-return-Output - producerSubtask: for-loop-2 - tasks: - add: - cachingOptions: - enableCache: true - componentRef: - name: comp-add - inputs: - parameters: - nums: - componentInputParameter: nums - taskInfo: - name: add - for-loop-2: - componentRef: - name: comp-for-loop-2-2 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '["m", "a", "t", "h"]' - taskInfo: - name: for-loop-2 - inputDefinitions: - parameters: - nums: - parameterType: LIST - outputDefinitions: - parameters: - out1: - parameterType: NUMBER_INTEGER - out2: - parameterType: LIST - comp-double: - executorLabel: exec-double - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-double-pipeline: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: double - tasks: - double: - cachingOptions: - enableCache: true - componentRef: - name: comp-double - inputs: - parameters: - num: - componentInputParameter: num - taskInfo: - name: double - inputDefinitions: - parameters: - num: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-echo-and-return: - executorLabel: exec-echo-and-return - inputDefinitions: - parameters: - string: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-for-loop-2: - dag: - outputs: - parameters: - pipelinechannel--double-pipeline-Output: - valueFromParameter: - outputParameterKey: pipelinechannel--double-pipeline-Output - producerSubtask: for-loop-4 - tasks: - for-loop-4: - componentRef: - name: comp-for-loop-4 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-3 - items: - raw: '[1, 2, 3]' - taskInfo: - name: for-loop-4 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--double-pipeline-Output: - parameterType: LIST - comp-for-loop-2-2: - dag: - outputs: - parameters: - pipelinechannel--echo-and-return-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: echo-and-return - tasks: - echo-and-return: - cachingOptions: - enableCache: true - componentRef: - name: comp-echo-and-return - inputs: - parameters: - string: - componentInputParameter: pipelinechannel--loop-item-param-1 - taskInfo: - name: echo-and-return - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: STRING - outputDefinitions: - parameters: - pipelinechannel--echo-and-return-Output: - parameterType: LIST - comp-for-loop-4: - dag: - outputs: - parameters: - pipelinechannel--double-pipeline-Output: - valueFromParameter: - outputParameterKey: Output - producerSubtask: double-pipeline - tasks: - double-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-double-pipeline - inputs: - parameters: - num: - componentInputParameter: pipelinechannel--loop-item-param-3 - taskInfo: - name: double-pipeline - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-3: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - pipelinechannel--double-pipeline-Output: - parameterType: LIST - comp-join-and-print: - executorLabel: exec-join-and-print - inputDefinitions: - parameters: - strings: - parameterType: LIST -deploymentSpec: - executors: - exec-add: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add(nums: List[List[int]]) -> int:\n import itertools\n \ - \ return sum(itertools.chain(*nums))\n\n" - image: python:3.9 - exec-double: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - double - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 - exec-echo-and-return: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - echo_and_return - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef echo_and_return(string: str) -> str:\n print(string)\n \ - \ return string\n\n" - image: python:3.9 - exec-join-and-print: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - join_and_print - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef join_and_print(strings: List[str]):\n print(''.join(strings))\n\ - \n" - image: python:3.9 -pipelineInfo: - name: math-pipeline -root: - dag: - outputs: - parameters: - Output: - valueFromParameter: - outputParameterKey: out1 - producerSubtask: add-pipeline - tasks: - add-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-pipeline - dependentTasks: - - for-loop-2 - inputs: - parameters: - nums: - taskOutputParameter: - outputParameterKey: pipelinechannel--double-pipeline-Output - producerTask: for-loop-2 - taskInfo: - name: add-pipeline - for-loop-2: - componentRef: - name: comp-for-loop-2 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '[1, 2, 3]' - taskInfo: - name: for-loop-2 - join-and-print: - cachingOptions: - enableCache: true - componentRef: - name: comp-join-and-print - dependentTasks: - - add-pipeline - inputs: - parameters: - strings: - taskOutputParameter: - outputParameterKey: out2 - producerTask: add-pipeline - taskInfo: - name: join-and-print - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_as_exit_task.yaml b/sdk/python/test_data/pipelines/pipeline_as_exit_task.yaml deleted file mode 100644 index 13bfb5acd14..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_as_exit_task.yaml +++ /dev/null @@ -1,273 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-task-final-status-conditional -# Inputs: -# message: str [Default: 'Hello World!'] -components: - comp-condition-1: - dag: - tasks: - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-2 - inputs: - parameters: - message: - runtimeValue: - constant: notify task failure. - taskInfo: - name: print-op - inputDefinitions: - parameters: - pipelinechannel--get-run-state-Output: - parameterType: STRING - comp-conditional-notification: - dag: - tasks: - condition-1: - componentRef: - name: comp-condition-1 - dependentTasks: - - get-run-state - inputs: - parameters: - pipelinechannel--get-run-state-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: get-run-state - taskInfo: - name: condition-1 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--get-run-state-Output'] - == 'FAILED' - get-run-state: - cachingOptions: - enableCache: true - componentRef: - name: comp-get-run-state - inputs: - parameters: - status: - componentInputParameter: status - taskInfo: - name: get-run-state - inputDefinitions: - parameters: - status: - isOptional: true - parameterType: TASK_FINAL_STATUS - comp-exit-handler-1: - dag: - tasks: - fail-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-fail-op - inputs: - parameters: - message: - runtimeValue: - constant: Task failed. - taskInfo: - name: fail-op - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - inputs: - parameters: - message: - componentInputParameter: pipelinechannel--message - taskInfo: - name: print-op - inputDefinitions: - parameters: - pipelinechannel--message: - parameterType: STRING - comp-fail-op: - executorLabel: exec-fail-op - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-get-run-state: - executorLabel: exec-get-run-state - inputDefinitions: - parameters: - status: - parameterType: STRUCT - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-2: - executorLabel: exec-print-op-2 - inputDefinitions: - parameters: - message: - parameterType: STRING -deploymentSpec: - executors: - exec-fail-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - fail_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ - \ print(message)\n sys.exit(1)\n\n" - image: python:3.9 - exec-get-run-state: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - get_run_state - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef get_run_state(status: dict) -> str:\n print('Pipeline status:\ - \ ', status)\n return status['state']\n\n" - image: python:3.9 - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 - exec-print-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-task-final-status-conditional -root: - dag: - tasks: - conditional-notification: - cachingOptions: - enableCache: true - componentRef: - name: comp-conditional-notification - dependentTasks: - - exit-handler-1 - inputs: - parameters: - status: - taskFinalStatus: - producerTask: exit-handler-1 - taskInfo: - name: conditional-notification - triggerPolicy: - strategy: ALL_UPSTREAM_TASKS_COMPLETED - exit-handler-1: - componentRef: - name: comp-exit-handler-1 - inputs: - parameters: - pipelinechannel--message: - componentInputParameter: message - taskInfo: - name: my-pipeline - inputDefinitions: - parameters: - message: - defaultValue: Hello World! - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_in_pipeline.yaml b/sdk/python/test_data/pipelines/pipeline_in_pipeline.yaml deleted file mode 100644 index 45efa979a99..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_in_pipeline.yaml +++ /dev/null @@ -1,159 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-in-pipeline -components: - comp-inner-pipeline: - dag: - tasks: - print-op1: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op1-2 - inputs: - parameters: - msg: - componentInputParameter: msg - taskInfo: - name: print-op1 - print-op2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op2 - dependentTasks: - - print-op1 - inputs: - parameters: - msg: - taskOutputParameter: - outputParameterKey: Output - producerTask: print-op1 - taskInfo: - name: print-op2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-op1: - executorLabel: exec-print-op1 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op1-2: - executorLabel: exec-print-op1-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op2: - executorLabel: exec-print-op2 - inputDefinitions: - parameters: - msg: - parameterType: STRING -deploymentSpec: - executors: - exec-print-op1: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ - \n" - image: python:3.9 - exec-print-op1-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ - \n" - image: python:3.9 - exec-print-op2: - container: - command: - - echo - - '{{$.inputs.parameters[''msg'']}}' - image: alpine -pipelineInfo: - name: pipeline-in-pipeline -root: - dag: - tasks: - inner-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-inner-pipeline - inputs: - parameters: - msg: - runtimeValue: - constant: world - taskInfo: - name: inner-pipeline - print-op1: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op1 - inputs: - parameters: - msg: - runtimeValue: - constant: Hello - taskInfo: - name: print-op1 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_in_pipeline_complex.yaml b/sdk/python/test_data/pipelines/pipeline_in_pipeline_complex.yaml deleted file mode 100644 index 268581c358a..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_in_pipeline_complex.yaml +++ /dev/null @@ -1,248 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-in-pipeline-complex -# Inputs: -# msg: str [Default: 'Hello'] -components: - comp-condition-1: - dag: - tasks: - print-op2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op2 - inputs: - parameters: - msg: - runtimeValue: - constant: world - taskInfo: - name: print-op2 - inputDefinitions: - parameters: - pipelinechannel--print-op1-Output: - parameterType: STRING - comp-condition-2: - dag: - tasks: - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - inputs: - parameters: - msg: - runtimeValue: - constant: Bye! - taskInfo: - name: print-op - inputDefinitions: - parameters: - pipelinechannel--print-op1-Output: - parameterType: STRING - comp-for-loop-2: - dag: - tasks: - inner-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-inner-pipeline - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop-item-param-1 - taskInfo: - name: inner-pipeline - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: STRING - comp-inner-pipeline: - dag: - tasks: - condition-1: - componentRef: - name: comp-condition-1 - dependentTasks: - - print-op1 - inputs: - parameters: - pipelinechannel--print-op1-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: print-op1 - taskInfo: - name: condition-1 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--print-op1-Output'] - == 'Hello' - condition-2: - componentRef: - name: comp-condition-2 - dependentTasks: - - print-op1 - inputs: - parameters: - pipelinechannel--print-op1-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: print-op1 - taskInfo: - name: condition-2 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--print-op1-Output'] - != 'Hello' - print-op1: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op1-2 - inputs: - parameters: - msg: - componentInputParameter: msg - taskInfo: - name: print-op1 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-op1: - executorLabel: exec-print-op1 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op1-2: - executorLabel: exec-print-op1-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op2: - executorLabel: exec-print-op2 - inputDefinitions: - parameters: - msg: - parameterType: STRING -deploymentSpec: - executors: - exec-print-op: - container: - command: - - echo - - '{{$.inputs.parameters[''msg'']}}' - image: alpine - exec-print-op1: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ - \n" - image: python:3.9 - exec-print-op1-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ - \n" - image: python:3.9 - exec-print-op2: - container: - command: - - echo - - '{{$.inputs.parameters[''msg'']}}' - image: alpine -pipelineInfo: - name: pipeline-in-pipeline-complex -root: - dag: - tasks: - for-loop-2: - componentRef: - name: comp-for-loop-2 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '["Hello", "world!"]' - taskInfo: - name: for-loop-2 - print-op1: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op1 - inputs: - parameters: - msg: - componentInputParameter: msg - taskInfo: - name: print-op1 - inputDefinitions: - parameters: - msg: - defaultValue: Hello - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_in_pipeline_loaded_from_yaml.yaml b/sdk/python/test_data/pipelines/pipeline_in_pipeline_loaded_from_yaml.yaml deleted file mode 100644 index 97977ea3d99..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_in_pipeline_loaded_from_yaml.yaml +++ /dev/null @@ -1,273 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-in-pipeline -components: - comp-inner-pipeline: - dag: - outputs: - artifacts: - data: - artifactSelectors: - - outputArtifactKey: data - producerSubtask: print-op2 - parameters: - msg: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-op1 - tasks: - print-op1: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op1-2 - inputs: - parameters: - msg: - componentInputParameter: msg - taskInfo: - name: print-op1 - print-op2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op2 - dependentTasks: - - print-op1 - inputs: - parameters: - msg: - taskOutputParameter: - outputParameterKey: Output - producerTask: print-op1 - taskInfo: - name: print-op2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - parameters: - msg: - parameterType: STRING - comp-pipeline-in-pipeline: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: data - producerSubtask: inner-pipeline - tasks: - inner-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-inner-pipeline - inputs: - parameters: - msg: - runtimeValue: - constant: world - taskInfo: - name: inner-pipeline - print-op1: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op1 - inputs: - parameters: - msg: - componentInputParameter: msg - taskInfo: - name: print-op1 - inputDefinitions: - parameters: - msg: - defaultValue: Hello - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-print-op1: - executorLabel: exec-print-op1 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op1-2: - executorLabel: exec-print-op1-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op1-3: - executorLabel: exec-print-op1-3 - inputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-print-op2: - executorLabel: exec-print-op2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-print-op1: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.1.3'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ - \n" - image: python:3.9 - exec-print-op1-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.1.3'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ - \n" - image: python:3.9 - exec-print-op1-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op1(data: Input[Artifact]):\n with open(data.path, 'r')\ - \ as f:\n print(f.read())\n\n" - image: python:3.9 - exec-print-op2: - container: - args: - - '{{$.inputs.parameters[''msg'']}}' - - '{{$.outputs.artifacts[''data''].path}}' - command: - - sh - - -c - - mkdir --parents $(dirname "$1") && echo "$0" > "$1" - image: alpine -pipelineInfo: - name: pipeline-in-pipeline -root: - dag: - tasks: - pipeline-in-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-pipeline-in-pipeline - inputs: - parameters: - msg: - runtimeValue: - constant: Hello - taskInfo: - name: pipeline-in-pipeline - print-op1: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op1-3 - dependentTasks: - - pipeline-in-pipeline - inputs: - artifacts: - data: - taskOutputArtifact: - outputArtifactKey: Output - producerTask: pipeline-in-pipeline - taskInfo: - name: print-op1 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_after.yaml b/sdk/python/test_data/pipelines/pipeline_with_after.yaml deleted file mode 100644 index 757bea08ee1..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_after.yaml +++ /dev/null @@ -1,107 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-after -components: - comp-print-text: - executorLabel: exec-print-text - inputDefinitions: - parameters: - text: - parameterType: STRING - comp-print-text-2: - executorLabel: exec-print-text-2 - inputDefinitions: - parameters: - text: - parameterType: STRING - comp-print-text-3: - executorLabel: exec-print-text-3 - inputDefinitions: - parameters: - text: - parameterType: STRING -deploymentSpec: - executors: - exec-print-text: - container: - command: - - sh - - -c - - 'set -e -x - - echo "$0" - - ' - - '{{$.inputs.parameters[''text'']}}' - image: alpine - exec-print-text-2: - container: - command: - - sh - - -c - - 'set -e -x - - echo "$0" - - ' - - '{{$.inputs.parameters[''text'']}}' - image: alpine - exec-print-text-3: - container: - command: - - sh - - -c - - 'set -e -x - - echo "$0" - - ' - - '{{$.inputs.parameters[''text'']}}' - image: alpine -pipelineInfo: - name: pipeline-with-after -root: - dag: - tasks: - print-text: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text - inputs: - parameters: - text: - runtimeValue: - constant: 1st task - taskInfo: - name: print-text - print-text-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-2 - dependentTasks: - - print-text - inputs: - parameters: - text: - runtimeValue: - constant: 2nd task - taskInfo: - name: print-text-2 - print-text-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-3 - dependentTasks: - - print-text - - print-text-2 - inputs: - parameters: - text: - runtimeValue: - constant: 3rd task - taskInfo: - name: print-text-3 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_concat_placeholder.py b/sdk/python/test_data/pipelines/pipeline_with_concat_placeholder.py deleted file mode 100644 index 04b07236953..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_concat_placeholder.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2020 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pathlib - -from kfp import compiler -from kfp import components -from kfp import dsl - -test_data_dir = pathlib.Path(__file__).parent.parent / 'v1_component_yaml' -component_op = components.load_component_from_file( - str(test_data_dir / 'concat_placeholder_component.yaml')) - - -@dsl.pipeline(name='one-step-pipeline-with-concat-placeholder') -def my_pipeline(): - component = component_op(input_prefix='some prefix:') - - -if __name__ == '__main__': - compiler.Compiler().compile( - pipeline_func=my_pipeline, - package_path=__file__.replace('.py', '.yaml')) diff --git a/sdk/python/test_data/pipelines/pipeline_with_concat_placeholder.yaml b/sdk/python/test_data/pipelines/pipeline_with_concat_placeholder.yaml deleted file mode 100644 index 56ddc45189c..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_concat_placeholder.yaml +++ /dev/null @@ -1,36 +0,0 @@ -# PIPELINE DEFINITION -# Name: one-step-pipeline-with-concat-placeholder -components: - comp-component-with-concat-placeholder: - executorLabel: exec-component-with-concat-placeholder - inputDefinitions: - parameters: - input_prefix: - parameterType: STRING -deploymentSpec: - executors: - exec-component-with-concat-placeholder: - container: - args: - - --arg0 - - '{"Concat": ["{{$.inputs.parameters[''input_prefix'']}}", "some value"]}' - image: gcr.io/my-project/my-image -pipelineInfo: - name: one-step-pipeline-with-concat-placeholder -root: - dag: - tasks: - component-with-concat-placeholder: - cachingOptions: - enableCache: true - componentRef: - name: comp-component-with-concat-placeholder - inputs: - parameters: - input_prefix: - runtimeValue: - constant: 'some prefix:' - taskInfo: - name: component-with-concat-placeholder -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_condition.yaml b/sdk/python/test_data/pipelines/pipeline_with_condition.yaml deleted file mode 100644 index eb350488970..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_condition.yaml +++ /dev/null @@ -1,277 +0,0 @@ -# PIPELINE DEFINITION -# Name: single-condition-pipeline -# Inputs: -# text: str [Default: 'condition test'] -components: - comp-condition-1: - dag: - tasks: - flip-coin-op-2: - cachingOptions: {} - componentRef: - name: comp-flip-coin-op-2 - taskInfo: - name: flip-coin-op-2 - print-op-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-2 - dependentTasks: - - flip-coin-op-2 - inputs: - parameters: - msg: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op-2 - taskInfo: - name: print-op-2 - print-op-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-3 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--text - taskInfo: - name: print-op-3 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - pipelinechannel--text: - parameterType: STRING - comp-flip-coin-op: - executorLabel: exec-flip-coin-op - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-flip-coin-op-2: - executorLabel: exec-flip-coin-op-2 - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-op-2: - executorLabel: exec-print-op-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-op-3: - executorLabel: exec-print-op-3 - inputDefinitions: - parameters: - msg: - parameterType: STRING -deploymentSpec: - executors: - exec-flip-coin-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_coin_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ - \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ - \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 - exec-flip-coin-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_coin_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ - \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ - \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ - \n" - image: python:3.9 - exec-print-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ - \n" - image: python:3.9 - exec-print-op-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ - \n" - image: python:3.9 -pipelineInfo: - name: single-condition-pipeline -root: - dag: - tasks: - condition-1: - componentRef: - name: comp-condition-1 - dependentTasks: - - flip-coin-op - inputs: - parameters: - pipelinechannel--flip-coin-op-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op - pipelinechannel--text: - componentInputParameter: text - taskInfo: - name: condition-1 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-coin-op-Output'] - == 'heads' - flip-coin-op: - cachingOptions: {} - componentRef: - name: comp-flip-coin-op - taskInfo: - name: flip-coin-op - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - dependentTasks: - - flip-coin-op - inputs: - parameters: - msg: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op - taskInfo: - name: print-op - inputDefinitions: - parameters: - text: - defaultValue: condition test - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml b/sdk/python/test_data/pipelines/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml deleted file mode 100644 index 12754cacc2f..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml +++ /dev/null @@ -1,419 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline -# Inputs: -# encryption_spec_key_name: str [Default: ''] -# location: str -# project: str -components: - comp-accelerator-count: - executorLabel: exec-accelerator-count - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-accelerator-type: - executorLabel: exec-accelerator-type - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-condition-1: - dag: - tasks: - custom-training-job: - cachingOptions: - enableCache: true - componentRef: - name: comp-custom-training-job - inputs: - parameters: - display_name: - runtimeValue: - constant: add-numbers - encryption_spec_key_name: - componentInputParameter: pipelinechannel--encryption_spec_key_name - location: - componentInputParameter: pipelinechannel--location - pipelinechannel--accelerator-count-Output: - componentInputParameter: pipelinechannel--accelerator-count-Output - pipelinechannel--accelerator-type-Output: - componentInputParameter: pipelinechannel--accelerator-type-Output - pipelinechannel--machine-type-Output: - componentInputParameter: pipelinechannel--machine-type-Output - project: - componentInputParameter: pipelinechannel--project - worker_pool_specs: - runtimeValue: - constant: - - container_spec: - args: - - foo - command: - - echo - image_uri: gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0 - machine_spec: - accelerator_count: '{{$.inputs.parameters[''pipelinechannel--accelerator-count-Output'']}}' - accelerator_type: '{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}' - machine_type: '{{$.inputs.parameters[''pipelinechannel--machine-type-Output'']}}' - replica_count: 1.0 - taskInfo: - name: custom-training-job - inputDefinitions: - parameters: - pipelinechannel--accelerator-count-Output: - parameterType: NUMBER_INTEGER - pipelinechannel--accelerator-type-Output: - parameterType: STRING - pipelinechannel--encryption_spec_key_name: - parameterType: STRING - pipelinechannel--flip-biased-coin-op-Output: - parameterType: STRING - pipelinechannel--location: - parameterType: STRING - pipelinechannel--machine-type-Output: - parameterType: STRING - pipelinechannel--project: - parameterType: STRING - comp-custom-training-job: - executorLabel: exec-custom-training-job - inputDefinitions: - parameters: - base_output_directory: - defaultValue: '' - description: The Cloud Storage location to store the output of this CustomJob - or HyperparameterTuningJob. See [more information ](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/GcsDestination). - isOptional: true - parameterType: STRING - display_name: - description: The name of the CustomJob. - parameterType: STRING - enable_web_access: - defaultValue: false - description: Whether you want Vertex AI to enable [interactive shell access - ](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) - to training containers. If `True`, you can access interactive shells at - the URIs given by [CustomJob.web_access_uris][]. - isOptional: true - parameterType: BOOLEAN - encryption_spec_key_name: - defaultValue: '' - description: Customer-managed encryption key options for the CustomJob. - If this is set, then all resources created by the CustomJob will be encrypted - with the provided encryption key. - isOptional: true - parameterType: STRING - labels: - defaultValue: {} - description: The labels with user-defined metadata to organize the CustomJob. - See [more information](https://goo.gl/xmQnxf). - isOptional: true - parameterType: STRUCT - location: - defaultValue: us-central1 - description: Location for creating the custom training job. If not set, - default to us-central1. - isOptional: true - parameterType: STRING - network: - defaultValue: '' - description: The full name of the Compute Engine network to which the job - should be peered. For example, `projects/12345/global/networks/myVPC`. - Format is of the form `projects/{project}/global/networks/{network}`. - Where `{project}` is a project number, as in `12345`, and `{network}` - is a network name. Private services access must already be configured - for the network. If left unspecified, the job is not peered with any network. - isOptional: true - parameterType: STRING - project: - defaultValue: '{{$.pipeline_google_cloud_project_id}}' - description: Project to create the custom training job in. Defaults to the - project in which the PipelineJob is run. - isOptional: true - parameterType: STRING - reserved_ip_ranges: - defaultValue: [] - description: A list of names for the reserved IP ranges under the VPC network - that can be used for this job. If set, we will deploy the job within the - provided IP ranges. Otherwise, the job will be deployed to any IP ranges - under the provided VPC network. - isOptional: true - parameterType: LIST - restart_job_on_worker_restart: - defaultValue: false - description: Restarts the entire CustomJob if a worker gets restarted. This - feature can be used by distributed training jobs that are not resilient - to workers leaving and joining a job. - isOptional: true - parameterType: BOOLEAN - service_account: - defaultValue: '' - description: Sets the default service account for workload run-as account. - The [service account ](https://cloud.google.com/vertex-ai/docs/pipelines/configure-project#service-account) - running the pipeline submitting jobs must have act-as permission on this - run-as account. If unspecified, the Vertex AI Custom Code [Service Agent - ](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) - for the CustomJob's project. - isOptional: true - parameterType: STRING - tensorboard: - defaultValue: '' - description: The name of a Vertex AI TensorBoard resource to which this - CustomJob will upload TensorBoard logs. - isOptional: true - parameterType: STRING - timeout: - defaultValue: 604800s - description: 'The maximum job running time. The default is 7 days. A duration - in seconds with up to nine fractional digits, terminated by ''s'', for - example: "3.5s".' - isOptional: true - parameterType: STRING - worker_pool_specs: - defaultValue: [] - description: Serialized json spec of the worker pools including machine - type and Docker image. All worker pools except the first one are optional - and can be skipped by providing an empty value. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#WorkerPoolSpec). - isOptional: true - parameterType: LIST - outputDefinitions: - parameters: - gcp_resources: - description: Serialized JSON of `gcp_resources` [proto](https://github.com/kubeflow/pipelines/tree/master/components/google-cloud/google_cloud_pipeline_components/proto) - which tracks the CustomJob. - parameterType: STRING - comp-flip-biased-coin-op: - executorLabel: exec-flip-biased-coin-op - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-machine-type: - executorLabel: exec-machine-type - outputDefinitions: - parameters: - Output: - parameterType: STRING -deploymentSpec: - executors: - exec-accelerator-count: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - accelerator_count - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef accelerator_count() -> int:\n return 1\n\n" - image: python:3.9 - exec-accelerator-type: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - accelerator_type - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef accelerator_type() -> str:\n return 'NVIDIA_TESLA_P4'\n\n" - image: python:3.9 - exec-custom-training-job: - container: - args: - - --type - - CustomJob - - --payload - - '{"display_name": "{{$.inputs.parameters[''display_name'']}}", "job_spec": - {"worker_pool_specs": {{$.inputs.parameters[''worker_pool_specs'']}}, "scheduling": - {"timeout": "{{$.inputs.parameters[''timeout'']}}", "restart_job_on_worker_restart": - {{$.inputs.parameters[''restart_job_on_worker_restart'']}}}, "service_account": - "{{$.inputs.parameters[''service_account'']}}", "tensorboard": "{{$.inputs.parameters[''tensorboard'']}}", - "enable_web_access": {{$.inputs.parameters[''enable_web_access'']}}, "network": - "{{$.inputs.parameters[''network'']}}", "reserved_ip_ranges": {{$.inputs.parameters[''reserved_ip_ranges'']}}, - "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[''base_output_directory'']}}"}}, - "labels": {{$.inputs.parameters[''labels'']}}, "encryption_spec": {"kms_key_name": - "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}}' - - --project - - '{{$.inputs.parameters[''project'']}}' - - --location - - '{{$.inputs.parameters[''location'']}}' - - --gcp_resources - - '{{$.outputs.parameters[''gcp_resources''].output_file}}' - command: - - python3 - - -u - - -m - - google_cloud_pipeline_components.container.v1.custom_job.launcher - image: gcr.io/ml-pipeline/google-cloud-pipeline-components:2.14.1 - exec-flip-biased-coin-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_biased_coin_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_biased_coin_op() -> str:\n \"\"\"Flip a coin and output\ - \ heads.\"\"\"\n return 'heads'\n\n" - image: python:3.9 - exec-machine-type: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - machine_type - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef machine_type() -> str:\n return 'n1-standard-4'\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline -root: - dag: - tasks: - accelerator-count: - cachingOptions: - enableCache: true - componentRef: - name: comp-accelerator-count - taskInfo: - name: accelerator-count - accelerator-type: - cachingOptions: - enableCache: true - componentRef: - name: comp-accelerator-type - taskInfo: - name: accelerator-type - condition-1: - componentRef: - name: comp-condition-1 - dependentTasks: - - accelerator-count - - accelerator-type - - flip-biased-coin-op - - machine-type - inputs: - parameters: - pipelinechannel--accelerator-count-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: accelerator-count - pipelinechannel--accelerator-type-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: accelerator-type - pipelinechannel--encryption_spec_key_name: - componentInputParameter: encryption_spec_key_name - pipelinechannel--flip-biased-coin-op-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-biased-coin-op - pipelinechannel--location: - componentInputParameter: location - pipelinechannel--machine-type-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: machine-type - pipelinechannel--project: - componentInputParameter: project - taskInfo: - name: condition-1 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-biased-coin-op-Output'] - == 'heads' - flip-biased-coin-op: - cachingOptions: {} - componentRef: - name: comp-flip-biased-coin-op - taskInfo: - name: flip-biased-coin-op - machine-type: - cachingOptions: - enableCache: true - componentRef: - name: comp-machine-type - taskInfo: - name: machine-type - inputDefinitions: - parameters: - encryption_spec_key_name: - defaultValue: '' - isOptional: true - parameterType: STRING - location: - parameterType: STRING - project: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_dynamic_importer_metadata.yaml b/sdk/python/test_data/pipelines/pipeline_with_dynamic_importer_metadata.yaml deleted file mode 100644 index 3e788001c0e..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_dynamic_importer_metadata.yaml +++ /dev/null @@ -1,186 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-importer -# Inputs: -# int_input: int [Default: 1.0] -# name: str [Default: 'default-name'] -# pipeline_input_artifact_uri: str [Default: 'gs://ml-pipeline-playground/shakespeare1.txt'] -# pipeline_input_image_uri: str [Default: 'us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest'] -components: - comp-importer: - executorLabel: exec-importer - inputDefinitions: - parameters: - metadata: - parameterType: STRING - metadata-2: - parameterType: STRING - uri: - parameterType: STRING - outputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-importer-2: - executorLabel: exec-importer-2 - inputDefinitions: - parameters: - metadata: - parameterType: STRING - metadata-2: - parameterType: STRING - metadata-3: - parameterType: NUMBER_INTEGER - uri: - parameterType: STRING - outputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-make-name: - executorLabel: exec-make-name - inputDefinitions: - parameters: - name: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING -deploymentSpec: - executors: - exec-importer: - importer: - artifactUri: - runtimeParameter: uri - metadata: - containerSpec: - imageUri: '{{$.inputs.parameters[''metadata-2'']}}' - name: - - '{{$.inputs.parameters[''metadata'']}}' - - alias-name - typeSchema: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - exec-importer-2: - importer: - artifactUri: - constant: gs://ml-pipeline-playground/shakespeare1.txt - metadata: - containerSpec: - imageUri: us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest - list-of-data: - - '{{$.inputs.parameters[''metadata'']}}' - - '{{$.inputs.parameters[''metadata-2'']}}' - - '{{$.inputs.parameters[''metadata-3'']}}' - name: prefix-{{$.inputs.parameters['metadata']}} - '{{$.inputs.parameters[''metadata'']}}': '{{$.inputs.parameters[''metadata'']}}' - '{{$.inputs.parameters[''metadata-2'']}}': us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest - typeSchema: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - exec-make-name: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - make_name - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef make_name(name: str) -> str:\n return name\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-importer -root: - dag: - tasks: - importer: - cachingOptions: - enableCache: true - componentRef: - name: comp-importer - inputs: - parameters: - metadata: - componentInputParameter: name - metadata-2: - componentInputParameter: pipeline_input_image_uri - uri: - componentInputParameter: pipeline_input_artifact_uri - taskInfo: - name: importer - importer-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-importer-2 - dependentTasks: - - make-name - inputs: - parameters: - metadata: - taskOutputParameter: - outputParameterKey: Output - producerTask: make-name - metadata-2: - componentInputParameter: name - metadata-3: - componentInputParameter: int_input - uri: - runtimeValue: - constant: gs://ml-pipeline-playground/shakespeare1.txt - taskInfo: - name: importer-2 - make-name: - cachingOptions: - enableCache: true - componentRef: - name: comp-make-name - inputs: - parameters: - name: - runtimeValue: - constant: a-different-name - taskInfo: - name: make-name - inputDefinitions: - parameters: - int_input: - defaultValue: 1.0 - isOptional: true - parameterType: NUMBER_INTEGER - name: - defaultValue: default-name - isOptional: true - parameterType: STRING - pipeline_input_artifact_uri: - defaultValue: gs://ml-pipeline-playground/shakespeare1.txt - isOptional: true - parameterType: STRING - pipeline_input_image_uri: - defaultValue: us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_dynamic_pipeline_input_custom_training_job.py b/sdk/python/test_data/pipelines/pipeline_with_dynamic_pipeline_input_custom_training_job.py deleted file mode 100644 index b34bb8f6ed7..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_dynamic_pipeline_input_custom_training_job.py +++ /dev/null @@ -1,40 +0,0 @@ -import google_cloud_pipeline_components.v1.custom_job as custom_job -from kfp import dsl - - -@dsl.pipeline -def pipeline( - project: str, - location: str, - machine_type: str, - accelerator_type: str, - accelerator_count: int, - encryption_spec_key_name: str = '', -): - custom_job.CustomTrainingJobOp( - display_name='add-numbers', - worker_pool_specs=[{ - 'container_spec': { - 'image_uri': - ('gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0' - ), - 'command': ['echo'], - 'args': ['foo'], - }, - 'machine_spec': { - 'machine_type': machine_type, - 'accelerator_type': accelerator_type, - 'accelerator_count': accelerator_count, - }, - 'replica_count': 1, - }], - project=project, - location=location, - encryption_spec_key_name=encryption_spec_key_name, - ) - - -if __name__ == '__main__': - from kfp import compiler - compiler.Compiler().compile( - pipeline_func=pipeline, package_path=__file__.replace('.py', '.yaml')) diff --git a/sdk/python/test_data/pipelines/pipeline_with_dynamic_pipeline_input_custom_training_job.yaml b/sdk/python/test_data/pipelines/pipeline_with_dynamic_pipeline_input_custom_training_job.yaml deleted file mode 100644 index 2b70925a3ab..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_dynamic_pipeline_input_custom_training_job.yaml +++ /dev/null @@ -1,208 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline -# Inputs: -# accelerator_count: int -# accelerator_type: str -# encryption_spec_key_name: str [Default: ''] -# location: str -# machine_type: str -# project: str -components: - comp-custom-training-job: - executorLabel: exec-custom-training-job - inputDefinitions: - parameters: - base_output_directory: - defaultValue: '' - description: The Cloud Storage location to store the output of this CustomJob - or HyperparameterTuningJob. See [more information ](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/GcsDestination). - isOptional: true - parameterType: STRING - display_name: - description: The name of the CustomJob. - parameterType: STRING - enable_web_access: - defaultValue: false - description: Whether you want Vertex AI to enable [interactive shell access - ](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) - to training containers. If `True`, you can access interactive shells at - the URIs given by [CustomJob.web_access_uris][]. - isOptional: true - parameterType: BOOLEAN - encryption_spec_key_name: - defaultValue: '' - description: Customer-managed encryption key options for the CustomJob. - If this is set, then all resources created by the CustomJob will be encrypted - with the provided encryption key. - isOptional: true - parameterType: STRING - labels: - defaultValue: {} - description: The labels with user-defined metadata to organize the CustomJob. - See [more information](https://goo.gl/xmQnxf). - isOptional: true - parameterType: STRUCT - location: - defaultValue: us-central1 - description: Location for creating the custom training job. If not set, - default to us-central1. - isOptional: true - parameterType: STRING - network: - defaultValue: '' - description: The full name of the Compute Engine network to which the job - should be peered. For example, `projects/12345/global/networks/myVPC`. - Format is of the form `projects/{project}/global/networks/{network}`. - Where `{project}` is a project number, as in `12345`, and `{network}` - is a network name. Private services access must already be configured - for the network. If left unspecified, the job is not peered with any network. - isOptional: true - parameterType: STRING - project: - defaultValue: '{{$.pipeline_google_cloud_project_id}}' - description: Project to create the custom training job in. Defaults to the - project in which the PipelineJob is run. - isOptional: true - parameterType: STRING - reserved_ip_ranges: - defaultValue: [] - description: A list of names for the reserved IP ranges under the VPC network - that can be used for this job. If set, we will deploy the job within the - provided IP ranges. Otherwise, the job will be deployed to any IP ranges - under the provided VPC network. - isOptional: true - parameterType: LIST - restart_job_on_worker_restart: - defaultValue: false - description: Restarts the entire CustomJob if a worker gets restarted. This - feature can be used by distributed training jobs that are not resilient - to workers leaving and joining a job. - isOptional: true - parameterType: BOOLEAN - service_account: - defaultValue: '' - description: Sets the default service account for workload run-as account. - The [service account ](https://cloud.google.com/vertex-ai/docs/pipelines/configure-project#service-account) - running the pipeline submitting jobs must have act-as permission on this - run-as account. If unspecified, the Vertex AI Custom Code [Service Agent - ](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) - for the CustomJob's project. - isOptional: true - parameterType: STRING - tensorboard: - defaultValue: '' - description: The name of a Vertex AI TensorBoard resource to which this - CustomJob will upload TensorBoard logs. - isOptional: true - parameterType: STRING - timeout: - defaultValue: 604800s - description: 'The maximum job running time. The default is 7 days. A duration - in seconds with up to nine fractional digits, terminated by ''s'', for - example: "3.5s".' - isOptional: true - parameterType: STRING - worker_pool_specs: - defaultValue: [] - description: Serialized json spec of the worker pools including machine - type and Docker image. All worker pools except the first one are optional - and can be skipped by providing an empty value. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#WorkerPoolSpec). - isOptional: true - parameterType: LIST - outputDefinitions: - parameters: - gcp_resources: - description: Serialized JSON of `gcp_resources` [proto](https://github.com/kubeflow/pipelines/tree/master/components/google-cloud/google_cloud_pipeline_components/proto) - which tracks the CustomJob. - parameterType: STRING -deploymentSpec: - executors: - exec-custom-training-job: - container: - args: - - --type - - CustomJob - - --payload - - '{"display_name": "{{$.inputs.parameters[''display_name'']}}", "job_spec": - {"worker_pool_specs": {{$.inputs.parameters[''worker_pool_specs'']}}, "scheduling": - {"timeout": "{{$.inputs.parameters[''timeout'']}}", "restart_job_on_worker_restart": - {{$.inputs.parameters[''restart_job_on_worker_restart'']}}}, "service_account": - "{{$.inputs.parameters[''service_account'']}}", "tensorboard": "{{$.inputs.parameters[''tensorboard'']}}", - "enable_web_access": {{$.inputs.parameters[''enable_web_access'']}}, "network": - "{{$.inputs.parameters[''network'']}}", "reserved_ip_ranges": {{$.inputs.parameters[''reserved_ip_ranges'']}}, - "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[''base_output_directory'']}}"}}, - "labels": {{$.inputs.parameters[''labels'']}}, "encryption_spec": {"kms_key_name": - "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}}' - - --project - - '{{$.inputs.parameters[''project'']}}' - - --location - - '{{$.inputs.parameters[''location'']}}' - - --gcp_resources - - '{{$.outputs.parameters[''gcp_resources''].output_file}}' - command: - - python3 - - -u - - -m - - google_cloud_pipeline_components.container.v1.custom_job.launcher - image: gcr.io/ml-pipeline/google-cloud-pipeline-components:2.14.1 -pipelineInfo: - name: pipeline -root: - dag: - tasks: - custom-training-job: - cachingOptions: - enableCache: true - componentRef: - name: comp-custom-training-job - inputs: - parameters: - display_name: - runtimeValue: - constant: add-numbers - encryption_spec_key_name: - componentInputParameter: encryption_spec_key_name - location: - componentInputParameter: location - pipelinechannel--accelerator_count: - componentInputParameter: accelerator_count - pipelinechannel--accelerator_type: - componentInputParameter: accelerator_type - pipelinechannel--machine_type: - componentInputParameter: machine_type - project: - componentInputParameter: project - worker_pool_specs: - runtimeValue: - constant: - - container_spec: - args: - - foo - command: - - echo - image_uri: gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0 - machine_spec: - accelerator_count: '{{$.inputs.parameters[''pipelinechannel--accelerator_count'']}}' - accelerator_type: '{{$.inputs.parameters[''pipelinechannel--accelerator_type'']}}' - machine_type: '{{$.inputs.parameters[''pipelinechannel--machine_type'']}}' - replica_count: 1.0 - taskInfo: - name: custom-training-job - inputDefinitions: - parameters: - accelerator_count: - parameterType: NUMBER_INTEGER - accelerator_type: - parameterType: STRING - encryption_spec_key_name: - defaultValue: '' - isOptional: true - parameterType: STRING - location: - parameterType: STRING - machine_type: - parameterType: STRING - project: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_dynamic_task_output_custom_training_job.py b/sdk/python/test_data/pipelines/pipeline_with_dynamic_task_output_custom_training_job.py deleted file mode 100644 index 6308d1b0b36..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_dynamic_task_output_custom_training_job.py +++ /dev/null @@ -1,56 +0,0 @@ -import google_cloud_pipeline_components.v1.custom_job as custom_job -from kfp import dsl - - -@dsl.component -def machine_type() -> str: - return 'n1-standard-4' - - -@dsl.component -def accelerator_type() -> str: - return 'NVIDIA_TESLA_P4' - - -@dsl.component -def accelerator_count() -> int: - return 1 - - -@dsl.pipeline -def pipeline( - project: str, - location: str, - encryption_spec_key_name: str = '', -): - machine_type_task = machine_type() - accelerator_type_task = accelerator_type() - accelerator_count_task = accelerator_count() - - custom_job.CustomTrainingJobOp( - display_name='add-numbers', - worker_pool_specs=[{ - 'container_spec': { - 'image_uri': - ('gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0' - ), - 'command': ['echo'], - 'args': ['foo'], - }, - 'machine_spec': { - 'machine_type': machine_type_task.output, - 'accelerator_type': accelerator_type_task.output, - 'accelerator_count': accelerator_count_task.output, - }, - 'replica_count': 1, - }], - project=project, - location=location, - encryption_spec_key_name=encryption_spec_key_name, - ) - - -if __name__ == '__main__': - from kfp import compiler - compiler.Compiler().compile( - pipeline_func=pipeline, package_path=__file__.replace('.py', '.yaml')) diff --git a/sdk/python/test_data/pipelines/pipeline_with_dynamic_task_output_custom_training_job.yaml b/sdk/python/test_data/pipelines/pipeline_with_dynamic_task_output_custom_training_job.yaml deleted file mode 100644 index 3c688b6bab3..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_dynamic_task_output_custom_training_job.yaml +++ /dev/null @@ -1,332 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline -# Inputs: -# encryption_spec_key_name: str [Default: ''] -# location: str -# project: str -components: - comp-accelerator-count: - executorLabel: exec-accelerator-count - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER - comp-accelerator-type: - executorLabel: exec-accelerator-type - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-custom-training-job: - executorLabel: exec-custom-training-job - inputDefinitions: - parameters: - base_output_directory: - defaultValue: '' - description: The Cloud Storage location to store the output of this CustomJob - or HyperparameterTuningJob. See [more information ](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/GcsDestination). - isOptional: true - parameterType: STRING - display_name: - description: The name of the CustomJob. - parameterType: STRING - enable_web_access: - defaultValue: false - description: Whether you want Vertex AI to enable [interactive shell access - ](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) - to training containers. If `True`, you can access interactive shells at - the URIs given by [CustomJob.web_access_uris][]. - isOptional: true - parameterType: BOOLEAN - encryption_spec_key_name: - defaultValue: '' - description: Customer-managed encryption key options for the CustomJob. - If this is set, then all resources created by the CustomJob will be encrypted - with the provided encryption key. - isOptional: true - parameterType: STRING - labels: - defaultValue: {} - description: The labels with user-defined metadata to organize the CustomJob. - See [more information](https://goo.gl/xmQnxf). - isOptional: true - parameterType: STRUCT - location: - defaultValue: us-central1 - description: Location for creating the custom training job. If not set, - default to us-central1. - isOptional: true - parameterType: STRING - network: - defaultValue: '' - description: The full name of the Compute Engine network to which the job - should be peered. For example, `projects/12345/global/networks/myVPC`. - Format is of the form `projects/{project}/global/networks/{network}`. - Where `{project}` is a project number, as in `12345`, and `{network}` - is a network name. Private services access must already be configured - for the network. If left unspecified, the job is not peered with any network. - isOptional: true - parameterType: STRING - project: - defaultValue: '{{$.pipeline_google_cloud_project_id}}' - description: Project to create the custom training job in. Defaults to the - project in which the PipelineJob is run. - isOptional: true - parameterType: STRING - reserved_ip_ranges: - defaultValue: [] - description: A list of names for the reserved IP ranges under the VPC network - that can be used for this job. If set, we will deploy the job within the - provided IP ranges. Otherwise, the job will be deployed to any IP ranges - under the provided VPC network. - isOptional: true - parameterType: LIST - restart_job_on_worker_restart: - defaultValue: false - description: Restarts the entire CustomJob if a worker gets restarted. This - feature can be used by distributed training jobs that are not resilient - to workers leaving and joining a job. - isOptional: true - parameterType: BOOLEAN - service_account: - defaultValue: '' - description: Sets the default service account for workload run-as account. - The [service account ](https://cloud.google.com/vertex-ai/docs/pipelines/configure-project#service-account) - running the pipeline submitting jobs must have act-as permission on this - run-as account. If unspecified, the Vertex AI Custom Code [Service Agent - ](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) - for the CustomJob's project. - isOptional: true - parameterType: STRING - tensorboard: - defaultValue: '' - description: The name of a Vertex AI TensorBoard resource to which this - CustomJob will upload TensorBoard logs. - isOptional: true - parameterType: STRING - timeout: - defaultValue: 604800s - description: 'The maximum job running time. The default is 7 days. A duration - in seconds with up to nine fractional digits, terminated by ''s'', for - example: "3.5s".' - isOptional: true - parameterType: STRING - worker_pool_specs: - defaultValue: [] - description: Serialized json spec of the worker pools including machine - type and Docker image. All worker pools except the first one are optional - and can be skipped by providing an empty value. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#WorkerPoolSpec). - isOptional: true - parameterType: LIST - outputDefinitions: - parameters: - gcp_resources: - description: Serialized JSON of `gcp_resources` [proto](https://github.com/kubeflow/pipelines/tree/master/components/google-cloud/google_cloud_pipeline_components/proto) - which tracks the CustomJob. - parameterType: STRING - comp-machine-type: - executorLabel: exec-machine-type - outputDefinitions: - parameters: - Output: - parameterType: STRING -deploymentSpec: - executors: - exec-accelerator-count: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - accelerator_count - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef accelerator_count() -> int:\n return 1\n\n" - image: python:3.9 - exec-accelerator-type: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - accelerator_type - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef accelerator_type() -> str:\n return 'NVIDIA_TESLA_P4'\n\n" - image: python:3.9 - exec-custom-training-job: - container: - args: - - --type - - CustomJob - - --payload - - '{"display_name": "{{$.inputs.parameters[''display_name'']}}", "job_spec": - {"worker_pool_specs": {{$.inputs.parameters[''worker_pool_specs'']}}, "scheduling": - {"timeout": "{{$.inputs.parameters[''timeout'']}}", "restart_job_on_worker_restart": - {{$.inputs.parameters[''restart_job_on_worker_restart'']}}}, "service_account": - "{{$.inputs.parameters[''service_account'']}}", "tensorboard": "{{$.inputs.parameters[''tensorboard'']}}", - "enable_web_access": {{$.inputs.parameters[''enable_web_access'']}}, "network": - "{{$.inputs.parameters[''network'']}}", "reserved_ip_ranges": {{$.inputs.parameters[''reserved_ip_ranges'']}}, - "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[''base_output_directory'']}}"}}, - "labels": {{$.inputs.parameters[''labels'']}}, "encryption_spec": {"kms_key_name": - "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}}' - - --project - - '{{$.inputs.parameters[''project'']}}' - - --location - - '{{$.inputs.parameters[''location'']}}' - - --gcp_resources - - '{{$.outputs.parameters[''gcp_resources''].output_file}}' - command: - - python3 - - -u - - -m - - google_cloud_pipeline_components.container.v1.custom_job.launcher - image: gcr.io/ml-pipeline/google-cloud-pipeline-components:2.14.1 - exec-machine-type: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - machine_type - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef machine_type() -> str:\n return 'n1-standard-4'\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline -root: - dag: - tasks: - accelerator-count: - cachingOptions: - enableCache: true - componentRef: - name: comp-accelerator-count - taskInfo: - name: accelerator-count - accelerator-type: - cachingOptions: - enableCache: true - componentRef: - name: comp-accelerator-type - taskInfo: - name: accelerator-type - custom-training-job: - cachingOptions: - enableCache: true - componentRef: - name: comp-custom-training-job - dependentTasks: - - accelerator-count - - accelerator-type - - machine-type - inputs: - parameters: - display_name: - runtimeValue: - constant: add-numbers - encryption_spec_key_name: - componentInputParameter: encryption_spec_key_name - location: - componentInputParameter: location - pipelinechannel--accelerator-count-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: accelerator-count - pipelinechannel--accelerator-type-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: accelerator-type - pipelinechannel--machine-type-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: machine-type - project: - componentInputParameter: project - worker_pool_specs: - runtimeValue: - constant: - - container_spec: - args: - - foo - command: - - echo - image_uri: gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0 - machine_spec: - accelerator_count: '{{$.inputs.parameters[''pipelinechannel--accelerator-count-Output'']}}' - accelerator_type: '{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}' - machine_type: '{{$.inputs.parameters[''pipelinechannel--machine-type-Output'']}}' - replica_count: 1.0 - taskInfo: - name: custom-training-job - machine-type: - cachingOptions: - enableCache: true - componentRef: - name: comp-machine-type - taskInfo: - name: machine-type - inputDefinitions: - parameters: - encryption_spec_key_name: - defaultValue: '' - isOptional: true - parameterType: STRING - location: - parameterType: STRING - project: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_env.yaml b/sdk/python/test_data/pipelines/pipeline_with_env.yaml deleted file mode 100644 index 9663641b9ee..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_env.yaml +++ /dev/null @@ -1,84 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-env -components: - comp-print-env: - executorLabel: exec-print-env - comp-print-env-op: - executorLabel: exec-print-env-op -deploymentSpec: - executors: - exec-print-env: - container: - command: - - sh - - -c - - 'set -e -x - - echo "$ENV1" - - echo "$ENV2" - - echo "$ENV3" - - ' - env: - - name: ENV1 - value: val0 - - name: ENV2 - value: val2 - - name: ENV3 - value: val3 - image: alpine - exec-print-env-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_env_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_env_op():\n import os\n print('ENV1', os.environ.get('ENV1'))\n\ - \ print('ENV2', os.environ.get('ENV2'))\n\n" - env: - - name: ENV1 - value: val1 - image: python:3.9 -pipelineInfo: - name: pipeline-with-env -root: - dag: - tasks: - print-env: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-env - taskInfo: - name: print-env - print-env-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-env-op - taskInfo: - name: print-env-op -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_exit_handler.yaml b/sdk/python/test_data/pipelines/pipeline_with_exit_handler.yaml deleted file mode 100644 index ca5d65cd408..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_exit_handler.yaml +++ /dev/null @@ -1,180 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-exit-handler -# Inputs: -# message: str [Default: 'Hello World!'] -components: - comp-exit-handler-1: - dag: - tasks: - fail-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-fail-op - inputs: - parameters: - message: - runtimeValue: - constant: Task failed. - taskInfo: - name: fail-op - print-op-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-2 - inputs: - parameters: - message: - componentInputParameter: pipelinechannel--message - taskInfo: - name: print-op-2 - inputDefinitions: - parameters: - pipelinechannel--message: - parameterType: STRING - comp-fail-op: - executorLabel: exec-fail-op - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-2: - executorLabel: exec-print-op-2 - inputDefinitions: - parameters: - message: - parameterType: STRING -deploymentSpec: - executors: - exec-fail-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - fail_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ - \ print(message)\n sys.exit(1)\n\n" - image: python:3.9 - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 - exec-print-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-exit-handler -root: - dag: - tasks: - exit-handler-1: - componentRef: - name: comp-exit-handler-1 - inputs: - parameters: - pipelinechannel--message: - componentInputParameter: message - taskInfo: - name: exit-handler-1 - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - dependentTasks: - - exit-handler-1 - inputs: - parameters: - message: - runtimeValue: - constant: Exit handler has worked! - taskInfo: - name: print-op - triggerPolicy: - strategy: ALL_UPSTREAM_TASKS_COMPLETED - inputDefinitions: - parameters: - message: - defaultValue: Hello World! - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_google_artifact_type.yaml b/sdk/python/test_data/pipelines/pipeline_with_google_artifact_type.yaml deleted file mode 100644 index ae54d2aef4d..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_google_artifact_type.yaml +++ /dev/null @@ -1,159 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-google-types -components: - comp-importer: - executorLabel: exec-importer - inputDefinitions: - parameters: - uri: - parameterType: STRING - outputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: google.VertexDataset - schemaVersion: 0.0.0 - comp-model-consumer: - executorLabel: exec-model-consumer - inputDefinitions: - artifacts: - dataset: - artifactType: - schemaTitle: google.VertexDataset - schemaVersion: 0.0.0 - model: - artifactType: - schemaTitle: google.VertexModel - schemaVersion: 0.0.0 - comp-model-producer: - executorLabel: exec-model-producer - outputDefinitions: - artifacts: - model: - artifactType: - schemaTitle: google.VertexModel - schemaVersion: 0.0.0 -deploymentSpec: - executors: - exec-importer: - importer: - artifactUri: - constant: gs://ml-pipeline-playground/shakespeare1.txt - metadata: - key: value - typeSchema: - schemaTitle: google.VertexDataset - schemaVersion: 0.0.0 - exec-model-consumer: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - model_consumer - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location 'aiplatform'\ - \ && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\nfrom aiplatform import VertexModel\nfrom aiplatform import VertexDataset\n\ - \ndef model_consumer(model: Input[VertexModel],\n dataset:\ - \ Input[VertexDataset]):\n print('Model')\n print('artifact.type:\ - \ ', type(model))\n print('artifact.name: ', model.name)\n print('artifact.uri:\ - \ ', model.uri)\n print('artifact.metadata: ', model.metadata)\n\n \ - \ print('Dataset')\n print('artifact.type: ', type(dataset))\n print('artifact.name:\ - \ ', dataset.name)\n print('artifact.uri: ', dataset.uri)\n print('artifact.metadata:\ - \ ', dataset.metadata)\n\n" - image: python:3.9 - exec-model-producer: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - model_producer - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location 'aiplatform'\ - \ && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\nimport aiplatform\n\ndef model_producer(model: Output[aiplatform.VertexModel]):\n\ - \n assert isinstance(model, aiplatform.VertexModel), type(model)\n \ - \ with open(model.path, 'w') as f:\n f.write('my model')\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-google-types -root: - dag: - tasks: - importer: - cachingOptions: - enableCache: true - componentRef: - name: comp-importer - inputs: - parameters: - uri: - runtimeValue: - constant: gs://ml-pipeline-playground/shakespeare1.txt - taskInfo: - name: importer - model-consumer: - cachingOptions: - enableCache: true - componentRef: - name: comp-model-consumer - dependentTasks: - - importer - - model-producer - inputs: - artifacts: - dataset: - taskOutputArtifact: - outputArtifactKey: artifact - producerTask: importer - model: - taskOutputArtifact: - outputArtifactKey: model - producerTask: model-producer - taskInfo: - name: model-consumer - model-producer: - cachingOptions: - enableCache: true - componentRef: - name: comp-model-producer - taskInfo: - name: model-producer -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_if_placeholder.py b/sdk/python/test_data/pipelines/pipeline_with_if_placeholder.py deleted file mode 100644 index 4cc355c5446..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_if_placeholder.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2020 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pathlib - -from kfp import compiler -from kfp import components -from kfp import dsl - -test_data_dir = pathlib.Path(__file__).parent.parent / 'v1_component_yaml' -component_op = components.load_component_from_file( - str(test_data_dir / 'if_placeholder_component.yaml')) - - -@dsl.pipeline(name='one-step-pipeline-with-if-placeholder') -def my_pipeline(input0: str, input1: str, input2: str): - # supply only optional_input_1 but not optional_input_2 - component = component_op(required_input=input0, optional_input_1=input1) - - -if __name__ == '__main__': - compiler.Compiler().compile( - pipeline_func=my_pipeline, - package_path=__file__.replace('.py', '.yaml')) diff --git a/sdk/python/test_data/pipelines/pipeline_with_if_placeholder.yaml b/sdk/python/test_data/pipelines/pipeline_with_if_placeholder.yaml deleted file mode 100644 index c01d58df2a3..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_if_placeholder.yaml +++ /dev/null @@ -1,58 +0,0 @@ -# PIPELINE DEFINITION -# Name: one-step-pipeline-with-if-placeholder -# Inputs: -# input0: str -# input1: str -# input2: str -components: - comp-component-with-optional-inputs: - executorLabel: exec-component-with-optional-inputs - inputDefinitions: - parameters: - optional_input_1: - isOptional: true - parameterType: STRING - optional_input_2: - isOptional: true - parameterType: STRING - required_input: - parameterType: STRING -deploymentSpec: - executors: - exec-component-with-optional-inputs: - container: - args: - - --arg0 - - '{{$.inputs.parameters[''required_input'']}}' - - '{"IfPresent": {"InputName": "optional_input_1", "Then": ["--arg1", "{{$.inputs.parameters[''optional_input_1'']}}"]}}' - - '{"IfPresent": {"InputName": "optional_input_2", "Then": ["--arg2", "{{$.inputs.parameters[''optional_input_2'']}}"], - "Else": ["--arg3", "default value"]}}' - image: gcr.io/my-project/my-image -pipelineInfo: - name: one-step-pipeline-with-if-placeholder -root: - dag: - tasks: - component-with-optional-inputs: - cachingOptions: - enableCache: true - componentRef: - name: comp-component-with-optional-inputs - inputs: - parameters: - optional_input_1: - componentInputParameter: input1 - required_input: - componentInputParameter: input0 - taskInfo: - name: component-with-optional-inputs - inputDefinitions: - parameters: - input0: - parameterType: STRING - input1: - parameterType: STRING - input2: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_importer.yaml b/sdk/python/test_data/pipelines/pipeline_with_importer.yaml deleted file mode 100644 index 530a881afa4..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_importer.yaml +++ /dev/null @@ -1,242 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-importer -# Inputs: -# dataset2: str [Default: 'gs://ml-pipeline-playground/shakespeare2.txt'] -components: - comp-condition-1: - dag: - tasks: - importer-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-importer-2 - inputs: - parameters: - uri: - componentInputParameter: pipelinechannel--dataset2 - taskInfo: - name: importer-2 - train-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-train-2 - dependentTasks: - - importer-2 - inputs: - artifacts: - dataset: - taskOutputArtifact: - outputArtifactKey: artifact - producerTask: importer-2 - taskInfo: - name: train-2 - inputDefinitions: - parameters: - pipelinechannel--dataset2: - parameterType: STRING - pipelinechannel--train-scalar: - parameterType: STRING - comp-importer: - executorLabel: exec-importer - inputDefinitions: - parameters: - uri: - parameterType: STRING - outputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-importer-2: - executorLabel: exec-importer-2 - inputDefinitions: - parameters: - uri: - parameterType: STRING - outputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-train: - executorLabel: exec-train - inputDefinitions: - artifacts: - dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - outputDefinitions: - artifacts: - model: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 - parameters: - scalar: - parameterType: STRING - comp-train-2: - executorLabel: exec-train-2 - inputDefinitions: - artifacts: - dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - outputDefinitions: - artifacts: - model: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 - parameters: - scalar: - parameterType: STRING -deploymentSpec: - executors: - exec-importer: - importer: - artifactUri: - constant: gs://ml-pipeline-playground/shakespeare1.txt - metadata: - key: value - typeSchema: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - exec-importer-2: - importer: - artifactUri: - runtimeParameter: uri - reimport: true - typeSchema: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - exec-train: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - train - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef train(\n dataset: Input[Dataset]\n) -> NamedTuple('Outputs',\ - \ [\n ('scalar', str),\n ('model', Model),\n]):\n \"\"\"Dummy Training\ - \ step.\"\"\"\n with open(dataset.path) as f:\n data = f.read()\n\ - \ print('Dataset:', data)\n\n scalar = '123'\n model = f'My model\ - \ trained using data: {data}'\n\n from collections import namedtuple\n\ - \ output = namedtuple('Outputs', ['scalar', 'model'])\n return output(scalar,\ - \ model)\n\n" - image: python:3.9 - exec-train-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - train - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef train(\n dataset: Input[Dataset]\n) -> NamedTuple('Outputs',\ - \ [\n ('scalar', str),\n ('model', Model),\n]):\n \"\"\"Dummy Training\ - \ step.\"\"\"\n with open(dataset.path) as f:\n data = f.read()\n\ - \ print('Dataset:', data)\n\n scalar = '123'\n model = f'My model\ - \ trained using data: {data}'\n\n from collections import namedtuple\n\ - \ output = namedtuple('Outputs', ['scalar', 'model'])\n return output(scalar,\ - \ model)\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-importer -root: - dag: - tasks: - condition-1: - componentRef: - name: comp-condition-1 - dependentTasks: - - train - inputs: - parameters: - pipelinechannel--dataset2: - componentInputParameter: dataset2 - pipelinechannel--train-scalar: - taskOutputParameter: - outputParameterKey: scalar - producerTask: train - taskInfo: - name: condition-1 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--train-scalar'] == '123' - importer: - cachingOptions: - enableCache: true - componentRef: - name: comp-importer - inputs: - parameters: - uri: - runtimeValue: - constant: gs://ml-pipeline-playground/shakespeare1.txt - taskInfo: - name: importer - train: - cachingOptions: - enableCache: true - componentRef: - name: comp-train - dependentTasks: - - importer - inputs: - artifacts: - dataset: - taskOutputArtifact: - outputArtifactKey: artifact - producerTask: importer - taskInfo: - name: train - inputDefinitions: - parameters: - dataset2: - defaultValue: gs://ml-pipeline-playground/shakespeare2.txt - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_importer_and_gcpc_types.yaml b/sdk/python/test_data/pipelines/pipeline_with_importer_and_gcpc_types.yaml deleted file mode 100644 index d824dd7c272..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_importer_and_gcpc_types.yaml +++ /dev/null @@ -1,75 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-importer-and-gcpc-type -components: - comp-consumer-op: - executorLabel: exec-consumer-op - inputDefinitions: - artifacts: - dataset: - artifactType: - schemaTitle: google.VertexDataset - schemaVersion: 0.0.1 - comp-importer: - executorLabel: exec-importer - inputDefinitions: - parameters: - uri: - parameterType: STRING - outputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: google.VertexDataset - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-consumer-op: - container: - args: - - '{{$.inputs.artifacts[''dataset''].path}}' - command: - - cmd - image: dummy - exec-importer: - importer: - artifactUri: - constant: gs://ml-pipeline-playground/shakespeare1.txt - metadata: - key: value - typeSchema: - schemaTitle: google.VertexDataset - schemaVersion: 0.0.1 -pipelineInfo: - name: pipeline-with-importer-and-gcpc-type -root: - dag: - tasks: - consumer-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-consumer-op - dependentTasks: - - importer - inputs: - artifacts: - dataset: - taskOutputArtifact: - outputArtifactKey: artifact - producerTask: importer - taskInfo: - name: consumer-op - importer: - cachingOptions: - enableCache: true - componentRef: - name: comp-importer - inputs: - parameters: - uri: - runtimeValue: - constant: gs://ml-pipeline-playground/shakespeare1.txt - taskInfo: - name: importer -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_loops.yaml b/sdk/python/test_data/pipelines/pipeline_with_loops.yaml deleted file mode 100644 index df8c9c3fc69..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_loops.yaml +++ /dev/null @@ -1,443 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-loops -# Inputs: -# loop_parameter: list -components: - comp-args-generator-op: - executorLabel: exec-args-generator-op - outputDefinitions: - parameters: - Output: - parameterType: LIST - comp-for-loop-1: - dag: - tasks: - print-text: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - taskInfo: - name: print-text - inputDefinitions: - parameters: - pipelinechannel--loop_parameter: - parameterType: LIST - pipelinechannel--loop_parameter-loop-item: - parameterType: STRING - comp-for-loop-2: - dag: - tasks: - print-struct: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-struct - inputs: - parameters: - struct: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - taskInfo: - name: print-struct - print-text-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-2 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["A_a"] - taskInfo: - name: print-text-2 - print-text-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-3 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["B_b"] - taskInfo: - name: print-text-3 - inputDefinitions: - parameters: - pipelinechannel--args-generator-op-Output: - parameterType: LIST - pipelinechannel--args-generator-op-Output-loop-item: - parameterType: STRUCT - comp-for-loop-4: - dag: - tasks: - print-struct-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-struct-2 - inputs: - parameters: - struct: - componentInputParameter: pipelinechannel--loop-item-param-3 - taskInfo: - name: print-struct-2 - print-text-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-4 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop-item-param-3 - parameterExpressionSelector: parseJson(string_value)["A_a"] - taskInfo: - name: print-text-4 - print-text-5: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-5 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop-item-param-3 - parameterExpressionSelector: parseJson(string_value)["B_b"] - taskInfo: - name: print-text-5 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-3: - parameterType: STRUCT - comp-print-struct: - executorLabel: exec-print-struct - inputDefinitions: - parameters: - struct: - parameterType: STRUCT - comp-print-struct-2: - executorLabel: exec-print-struct-2 - inputDefinitions: - parameters: - struct: - parameterType: STRUCT - comp-print-text: - executorLabel: exec-print-text - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-text-2: - executorLabel: exec-print-text-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-text-3: - executorLabel: exec-print-text-3 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-text-4: - executorLabel: exec-print-text-4 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-text-5: - executorLabel: exec-print-text-5 - inputDefinitions: - parameters: - msg: - parameterType: STRING -deploymentSpec: - executors: - exec-args-generator-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - args_generator_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef args_generator_op() -> List[Dict[str, str]]:\n return [{'A_a':\ - \ '1', 'B_b': '2'}, {'A_a': '10', 'B_b': '20'}]\n\n" - image: python:3.9 - exec-print-struct: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_struct - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_struct(struct: Dict):\n print(struct)\n\n" - image: python:3.9 - exec-print-struct-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_struct - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_struct(struct: Dict):\n print(struct)\n\n" - image: python:3.9 - exec-print-text: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 - exec-print-text-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 - exec-print-text-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 - exec-print-text-4: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 - exec-print-text-5: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-loops -root: - dag: - tasks: - args-generator-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-args-generator-op - taskInfo: - name: args-generator-op - for-loop-1: - componentRef: - name: comp-for-loop-1 - inputs: - parameters: - pipelinechannel--loop_parameter: - componentInputParameter: loop_parameter - parameterIterator: - itemInput: pipelinechannel--loop_parameter-loop-item - items: - inputParameter: pipelinechannel--loop_parameter - taskInfo: - name: for-loop-1 - for-loop-2: - componentRef: - name: comp-for-loop-2 - dependentTasks: - - args-generator-op - inputs: - parameters: - pipelinechannel--args-generator-op-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: args-generator-op - parameterIterator: - itemInput: pipelinechannel--args-generator-op-Output-loop-item - items: - inputParameter: pipelinechannel--args-generator-op-Output - taskInfo: - name: for-loop-2 - for-loop-4: - componentRef: - name: comp-for-loop-4 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-3 - items: - raw: '[{"A_a": "1", "B_b": "2"}, {"A_a": "10", "B_b": "20"}]' - taskInfo: - name: for-loop-4 - inputDefinitions: - parameters: - loop_parameter: - parameterType: LIST -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_loops_and_conditions.yaml b/sdk/python/test_data/pipelines/pipeline_with_loops_and_conditions.yaml deleted file mode 100644 index 77ae492d702..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_loops_and_conditions.yaml +++ /dev/null @@ -1,1051 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-loops-and-conditions-multi-layers -# Inputs: -# loop_parameter: list [Default: [{'B_b': ['A', 'B'], 'A_a': 'heads'}, {'B_b': ['X', 'Y', 'Z'], 'A_a': 'tails'}]] -# msg: str [Default: 'hello'] -components: - comp-args-generator-op: - executorLabel: exec-args-generator-op - outputDefinitions: - parameters: - Output: - parameterType: LIST - comp-args-generator-op-2: - executorLabel: exec-args-generator-op-2 - outputDefinitions: - parameters: - Output: - parameterType: LIST - comp-condition-1: - dag: - tasks: - args-generator-op-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-args-generator-op-2 - taskInfo: - name: args-generator-op-2 - for-loop-2: - componentRef: - name: comp-for-loop-2 - dependentTasks: - - args-generator-op-2 - inputs: - parameters: - pipelinechannel--args-generator-op-2-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: args-generator-op-2 - pipelinechannel--args-generator-op-Output: - componentInputParameter: pipelinechannel--args-generator-op-Output - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - pipelinechannel--loop_parameter: - componentInputParameter: pipelinechannel--loop_parameter - pipelinechannel--msg: - componentInputParameter: pipelinechannel--msg - parameterIterator: - itemInput: pipelinechannel--args-generator-op-Output-loop-item - items: - inputParameter: pipelinechannel--args-generator-op-Output - taskInfo: - name: for-loop-2 - inputDefinitions: - parameters: - pipelinechannel--args-generator-op-Output: - parameterType: LIST - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - pipelinechannel--loop_parameter: - parameterType: LIST - pipelinechannel--msg: - parameterType: STRING - comp-condition-13: - dag: - tasks: - print-text-8: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-8 - inputs: - parameters: - msg: - runtimeValue: - constant: '1' - taskInfo: - name: print-text-8 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - pipelinechannel--loop-item-param-11: - parameterType: STRING - comp-condition-15: - dag: - tasks: - for-loop-16: - componentRef: - name: comp-for-loop-16 - inputs: - parameters: - pipelinechannel--loop_parameter-loop-item: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - pipelinechannel--loop_parameter-loop-item-subvar-B_b: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - parameterExpressionSelector: parseJson(string_value)["B_b"] - parameterIterator: - itemInput: pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item - items: - inputParameter: pipelinechannel--loop_parameter-loop-item-subvar-B_b - taskInfo: - name: for-loop-16 - inputDefinitions: - parameters: - pipelinechannel--loop_parameter-loop-item: - parameterType: STRING - pipelinechannel--loop_parameter-loop-item-subvar-A_a: - parameterType: STRING - comp-condition-3: - dag: - tasks: - print-text-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-2 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["B_b"] - taskInfo: - name: print-text-2 - inputDefinitions: - parameters: - pipelinechannel--args-generator-op-Output-loop-item: - parameterType: STRING - pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a: - parameterType: STRING - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - comp-condition-4: - dag: - tasks: - print-text-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-3 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["B_b"] - taskInfo: - name: print-text-3 - inputDefinitions: - parameters: - pipelinechannel--args-generator-op-Output-loop-item: - parameterType: STRING - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - comp-condition-5: - dag: - tasks: - for-loop-7: - componentRef: - name: comp-for-loop-7 - inputs: - parameters: - pipelinechannel--args-generator-op-Output-loop-item: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - parameterIterator: - itemInput: pipelinechannel--loop-item-param-6 - items: - raw: '[{"a": "-1"}, {"a": "-2"}]' - taskInfo: - name: for-loop-7 - inputDefinitions: - parameters: - pipelinechannel--args-generator-op-Output-loop-item: - parameterType: STRING - pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a: - parameterType: STRING - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - comp-flip-coin-op: - executorLabel: exec-flip-coin-op - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-for-loop-10: - dag: - tasks: - print-text-6: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-6 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - msg2: - componentInputParameter: pipelinechannel--args-generator-op-2-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["A_a"] - taskInfo: - name: print-text-6 - inputDefinitions: - parameters: - pipelinechannel--args-generator-op-2-Output: - parameterType: LIST - pipelinechannel--args-generator-op-2-Output-loop-item: - parameterType: STRING - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - pipelinechannel--loop_parameter-loop-item: - parameterType: STRING - comp-for-loop-12: - dag: - tasks: - condition-13: - componentRef: - name: comp-condition-13 - inputs: - parameters: - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - pipelinechannel--loop-item-param-11: - componentInputParameter: pipelinechannel--loop-item-param-11 - taskInfo: - name: condition-13 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--loop-item-param-11'] - == '1' - print-text-7: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-7 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop-item-param-11 - taskInfo: - name: print-text-7 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - pipelinechannel--loop-item-param-11: - parameterType: STRING - comp-for-loop-14: - dag: - tasks: - condition-15: - componentRef: - name: comp-condition-15 - inputs: - parameters: - pipelinechannel--loop_parameter-loop-item: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - pipelinechannel--loop_parameter-loop-item-subvar-A_a: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - parameterExpressionSelector: parseJson(string_value)["A_a"] - taskInfo: - name: condition-15 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--loop_parameter-loop-item-subvar-A_a'] - == 'heads' - inputDefinitions: - parameters: - pipelinechannel--loop_parameter: - parameterType: LIST - pipelinechannel--loop_parameter-loop-item: - parameterType: STRING - comp-for-loop-16: - dag: - tasks: - print-text-9: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-9 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item - taskInfo: - name: print-text-9 - inputDefinitions: - parameters: - pipelinechannel--loop_parameter-loop-item: - parameterType: STRING - pipelinechannel--loop_parameter-loop-item-subvar-B_b: - parameterType: STRING - pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item: - parameterType: STRING - comp-for-loop-2: - dag: - tasks: - condition-3: - componentRef: - name: comp-condition-3 - inputs: - parameters: - pipelinechannel--args-generator-op-Output-loop-item: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["A_a"] - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - taskInfo: - name: condition-3 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a'] - == 'heads' - condition-4: - componentRef: - name: comp-condition-4 - inputs: - parameters: - pipelinechannel--args-generator-op-Output-loop-item: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - taskInfo: - name: condition-4 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-coin-op-Output'] - == 'heads' - condition-5: - componentRef: - name: comp-condition-5 - inputs: - parameters: - pipelinechannel--args-generator-op-Output-loop-item: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["A_a"] - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - taskInfo: - name: condition-5 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a'] - == 'tails' - for-loop-12: - componentRef: - name: comp-for-loop-12 - inputs: - parameters: - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - parameterIterator: - itemInput: pipelinechannel--loop-item-param-11 - items: - raw: '["1", "2"]' - taskInfo: - name: for-loop-12 - for-loop-8: - componentRef: - name: comp-for-loop-8 - inputs: - parameters: - pipelinechannel--args-generator-op-Output-loop-item: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["B_b"] - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - parameterIterator: - itemInput: pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item - items: - inputParameter: pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b - taskInfo: - name: for-loop-8 - for-loop-9: - componentRef: - name: comp-for-loop-9 - inputs: - parameters: - pipelinechannel--args-generator-op-2-Output: - componentInputParameter: pipelinechannel--args-generator-op-2-Output - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - pipelinechannel--loop_parameter: - componentInputParameter: pipelinechannel--loop_parameter - parameterIterator: - itemInput: pipelinechannel--loop_parameter-loop-item - items: - inputParameter: pipelinechannel--loop_parameter - taskInfo: - name: for-loop-9 - print-text: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--msg - taskInfo: - name: print-text - inputDefinitions: - parameters: - pipelinechannel--args-generator-op-2-Output: - parameterType: LIST - pipelinechannel--args-generator-op-Output: - parameterType: LIST - pipelinechannel--args-generator-op-Output-loop-item: - parameterType: STRING - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - pipelinechannel--loop_parameter: - parameterType: LIST - pipelinechannel--msg: - parameterType: STRING - comp-for-loop-7: - dag: - tasks: - print-struct: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-struct - inputs: - parameters: - struct: - componentInputParameter: pipelinechannel--loop-item-param-6 - taskInfo: - name: print-struct - inputDefinitions: - parameters: - pipelinechannel--args-generator-op-Output-loop-item: - parameterType: STRING - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - pipelinechannel--loop-item-param-6: - parameterType: STRUCT - comp-for-loop-8: - dag: - tasks: - print-text-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-4 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item - taskInfo: - name: print-text-4 - inputDefinitions: - parameters: - pipelinechannel--args-generator-op-Output-loop-item: - parameterType: STRING - pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b: - parameterType: STRING - pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item: - parameterType: STRING - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - comp-for-loop-9: - dag: - tasks: - for-loop-10: - componentRef: - name: comp-for-loop-10 - inputs: - parameters: - pipelinechannel--args-generator-op-2-Output: - componentInputParameter: pipelinechannel--args-generator-op-2-Output - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - pipelinechannel--loop_parameter-loop-item: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - parameterIterator: - itemInput: pipelinechannel--args-generator-op-2-Output-loop-item - items: - inputParameter: pipelinechannel--args-generator-op-2-Output - taskInfo: - name: for-loop-10 - print-text-5: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-5 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - taskInfo: - name: print-text-5 - inputDefinitions: - parameters: - pipelinechannel--args-generator-op-2-Output: - parameterType: LIST - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - pipelinechannel--loop_parameter: - parameterType: LIST - pipelinechannel--loop_parameter-loop-item: - parameterType: STRING - comp-print-struct: - executorLabel: exec-print-struct - inputDefinitions: - parameters: - struct: - parameterType: STRUCT - comp-print-text: - executorLabel: exec-print-text - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING - comp-print-text-2: - executorLabel: exec-print-text-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING - comp-print-text-3: - executorLabel: exec-print-text-3 - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING - comp-print-text-4: - executorLabel: exec-print-text-4 - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING - comp-print-text-5: - executorLabel: exec-print-text-5 - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING - comp-print-text-6: - executorLabel: exec-print-text-6 - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING - comp-print-text-7: - executorLabel: exec-print-text-7 - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING - comp-print-text-8: - executorLabel: exec-print-text-8 - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING - comp-print-text-9: - executorLabel: exec-print-text-9 - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING -deploymentSpec: - executors: - exec-args-generator-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - args_generator_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef args_generator_op() -> list:\n return [\n {\n \ - \ 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n \ - \ {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n \ - \ },\n ]\n\n" - image: python:3.9 - exec-args-generator-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - args_generator_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef args_generator_op() -> list:\n return [\n {\n \ - \ 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n \ - \ {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n \ - \ },\n ]\n\n" - image: python:3.9 - exec-flip-coin-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_coin_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ - \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ - \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 - exec-print-struct: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_struct - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_struct(struct: dict):\n print(struct)\n\n" - image: python:3.9 - exec-print-text: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 - exec-print-text-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 - exec-print-text-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 - exec-print-text-4: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 - exec-print-text-5: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 - exec-print-text-6: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 - exec-print-text-7: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 - exec-print-text-8: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 - exec-print-text-9: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-loops-and-conditions-multi-layers -root: - dag: - tasks: - args-generator-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-args-generator-op - taskInfo: - name: args-generator-op - condition-1: - componentRef: - name: comp-condition-1 - dependentTasks: - - args-generator-op - - flip-coin-op - inputs: - parameters: - pipelinechannel--args-generator-op-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: args-generator-op - pipelinechannel--flip-coin-op-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op - pipelinechannel--loop_parameter: - componentInputParameter: loop_parameter - pipelinechannel--msg: - componentInputParameter: msg - taskInfo: - name: condition-1 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-coin-op-Output'] - != 'no-such-result' - flip-coin-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-flip-coin-op - taskInfo: - name: flip-coin-op - for-loop-14: - componentRef: - name: comp-for-loop-14 - inputs: - parameters: - pipelinechannel--loop_parameter: - componentInputParameter: loop_parameter - parameterIterator: - itemInput: pipelinechannel--loop_parameter-loop-item - items: - inputParameter: pipelinechannel--loop_parameter - taskInfo: - name: for-loop-14 - inputDefinitions: - parameters: - loop_parameter: - defaultValue: - - A_a: heads - B_b: - - A - - B - - A_a: tails - B_b: - - X - - Y - - Z - isOptional: true - parameterType: LIST - msg: - defaultValue: hello - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_metadata_fields.yaml b/sdk/python/test_data/pipelines/pipeline_with_metadata_fields.yaml deleted file mode 100644 index b3181c95eda..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_metadata_fields.yaml +++ /dev/null @@ -1,179 +0,0 @@ -# PIPELINE DEFINITION -# Name: dataset-concatenator -# Description: A pipeline that joins string to in_dataset. -# Inputs: -# in_dataset: system.Dataset -# string: str -# Outputs: -# Output: system.Dataset -components: - comp-dataset-joiner: - executorLabel: exec-dataset-joiner - inputDefinitions: - artifacts: - dataset_a: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: First dataset. - dataset_b: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: Second dataset. - outputDefinitions: - artifacts: - out_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: The concatenated dataset. - parameters: - Output: - description: The concatenated string. - parameterType: STRING - comp-str-to-dataset: - executorLabel: exec-str-to-dataset - inputDefinitions: - parameters: - string: - description: The string. - parameterType: STRING - outputDefinitions: - artifacts: - dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: The dataset. -deploymentSpec: - executors: - exec-dataset-joiner: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - dataset_joiner - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef dataset_joiner(\n dataset_a: Input[Dataset],\n dataset_b:\ - \ Input[Dataset],\n out_dataset: Output[Dataset],\n) -> str:\n \"\"\ - \"Concatenate dataset_a and dataset_b.\n\n Also returns the concatenated\ - \ string.\n\n Args:\n dataset_a: First dataset.\n dataset_b:\ - \ Second dataset.\n\n Returns:\n out_dataset: The concatenated\ - \ dataset.\n Output: The concatenated string.\n \"\"\"\n with\ - \ open(dataset_a.path) as f:\n content_a = f.read()\n\n with open(dataset_b.path)\ - \ as f:\n content_b = f.read()\n\n concatenated_string = content_a\ - \ + content_b\n with open(out_dataset.path, 'w') as f:\n f.write(concatenated_string)\n\ - \n return concatenated_string\n\n" - image: python:3.9 - exec-str-to-dataset: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - str_to_dataset - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef str_to_dataset(string: str, dataset: Output[Dataset]):\n \"\ - \"\"Convert string to dataset.\n\n Args:\n string: The string.\n\ - \n Returns:\n dataset: The dataset.\n \"\"\"\n with open(dataset.path,\ - \ 'w') as f:\n f.write(string)\n\n" - image: python:3.9 -pipelineInfo: - description: A pipeline that joins string to in_dataset. - displayName: Concatenation pipeline - name: dataset-concatenator -root: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: out_dataset - producerSubtask: dataset-joiner - tasks: - dataset-joiner: - cachingOptions: - enableCache: true - componentRef: - name: comp-dataset-joiner - dependentTasks: - - str-to-dataset - inputs: - artifacts: - dataset_a: - taskOutputArtifact: - outputArtifactKey: dataset - producerTask: str-to-dataset - dataset_b: - componentInputArtifact: in_dataset - taskInfo: - name: dataset-joiner - str-to-dataset: - cachingOptions: - enableCache: true - componentRef: - name: comp-str-to-dataset - inputs: - parameters: - string: - componentInputParameter: string - taskInfo: - name: str-to-dataset - inputDefinitions: - artifacts: - in_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: Dataset to which to concatenate string. - parameters: - string: - description: String to concatenate to in_artifact. - parameterType: STRING - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - description: The final concatenated dataset. -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_multiple_exit_handlers.yaml b/sdk/python/test_data/pipelines/pipeline_with_multiple_exit_handlers.yaml deleted file mode 100644 index 1e99f2eb8cf..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_multiple_exit_handlers.yaml +++ /dev/null @@ -1,406 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-multiple-exit-handlers -# Inputs: -# message: str [Default: 'Hello World!'] -components: - comp-exit-handler-1: - dag: - tasks: - fail-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-fail-op - inputs: - parameters: - message: - runtimeValue: - constant: Task failed. - taskInfo: - name: fail-op - print-op-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-2 - inputs: - parameters: - message: - componentInputParameter: pipelinechannel--message - taskInfo: - name: print-op-2 - inputDefinitions: - parameters: - pipelinechannel--message: - parameterType: STRING - comp-exit-handler-2: - dag: - tasks: - print-op-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-4 - inputs: - parameters: - message: - componentInputParameter: pipelinechannel--message - taskInfo: - name: print-op-4 - inputDefinitions: - parameters: - pipelinechannel--message: - parameterType: STRING - comp-exit-handler-3: - dag: - tasks: - print-op-6: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-6 - inputs: - parameters: - message: - componentInputParameter: pipelinechannel--message - taskInfo: - name: print-op-6 - inputDefinitions: - parameters: - pipelinechannel--message: - parameterType: STRING - comp-fail-op: - executorLabel: exec-fail-op - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-2: - executorLabel: exec-print-op-2 - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-3: - executorLabel: exec-print-op-3 - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-4: - executorLabel: exec-print-op-4 - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-5: - executorLabel: exec-print-op-5 - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op-6: - executorLabel: exec-print-op-6 - inputDefinitions: - parameters: - message: - parameterType: STRING -deploymentSpec: - executors: - exec-fail-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - fail_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ - \ print(message)\n sys.exit(1)\n\n" - image: python:3.9 - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 - exec-print-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 - exec-print-op-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 - exec-print-op-4: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 - exec-print-op-5: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 - exec-print-op-6: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-multiple-exit-handlers -root: - dag: - tasks: - exit-handler-1: - componentRef: - name: comp-exit-handler-1 - inputs: - parameters: - pipelinechannel--message: - componentInputParameter: message - taskInfo: - name: exit-handler-1 - exit-handler-2: - componentRef: - name: comp-exit-handler-2 - inputs: - parameters: - pipelinechannel--message: - componentInputParameter: message - taskInfo: - name: exit-handler-2 - exit-handler-3: - componentRef: - name: comp-exit-handler-3 - inputs: - parameters: - pipelinechannel--message: - componentInputParameter: message - taskInfo: - name: exit-handler-3 - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - dependentTasks: - - exit-handler-1 - inputs: - parameters: - message: - runtimeValue: - constant: First exit handler has worked! - taskInfo: - name: print-op - triggerPolicy: - strategy: ALL_UPSTREAM_TASKS_COMPLETED - print-op-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-3 - dependentTasks: - - exit-handler-2 - inputs: - parameters: - message: - runtimeValue: - constant: Second exit handler has worked! - taskInfo: - name: print-op-3 - triggerPolicy: - strategy: ALL_UPSTREAM_TASKS_COMPLETED - print-op-5: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-5 - dependentTasks: - - exit-handler-3 - inputs: - parameters: - message: - runtimeValue: - constant: Third exit handler has worked! - taskInfo: - name: print-op-5 - triggerPolicy: - strategy: ALL_UPSTREAM_TASKS_COMPLETED - inputDefinitions: - parameters: - message: - defaultValue: Hello World! - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_nested_conditions.yaml b/sdk/python/test_data/pipelines/pipeline_with_nested_conditions.yaml deleted file mode 100644 index 62b04e0f2ab..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_nested_conditions.yaml +++ /dev/null @@ -1,445 +0,0 @@ -# PIPELINE DEFINITION -# Name: nested-conditions-pipeline -components: - comp-condition-1: - dag: - tasks: - condition-2: - componentRef: - name: comp-condition-2 - dependentTasks: - - flip-coin-op-3 - inputs: - parameters: - pipelinechannel--flip-coin-op-2-Output: - componentInputParameter: pipelinechannel--flip-coin-op-2-Output - pipelinechannel--flip-coin-op-3-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op-3 - pipelinechannel--flip-coin-op-Output: - componentInputParameter: pipelinechannel--flip-coin-op-Output - taskInfo: - name: condition-2 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-coin-op-2-Output'] - == inputs.parameter_values['pipelinechannel--flip-coin-op-3-Output'] - flip-coin-op-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-flip-coin-op-3 - taskInfo: - name: flip-coin-op-3 - print-op-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-3 - dependentTasks: - - flip-coin-op-3 - inputs: - parameters: - msg: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op-3 - taskInfo: - name: print-op-3 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-op-2-Output: - parameterType: STRING - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - comp-condition-2: - dag: - tasks: - flip-coin-op-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-flip-coin-op-4 - taskInfo: - name: flip-coin-op-4 - print-op-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-4 - dependentTasks: - - flip-coin-op-4 - inputs: - parameters: - msg: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op-4 - taskInfo: - name: print-op-4 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-op-2-Output: - parameterType: STRING - pipelinechannel--flip-coin-op-3-Output: - parameterType: STRING - pipelinechannel--flip-coin-op-Output: - parameterType: STRING - comp-flip-coin-op: - executorLabel: exec-flip-coin-op - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-flip-coin-op-2: - executorLabel: exec-flip-coin-op-2 - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-flip-coin-op-3: - executorLabel: exec-flip-coin-op-3 - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-flip-coin-op-4: - executorLabel: exec-flip-coin-op-4 - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-op-2: - executorLabel: exec-print-op-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-op-3: - executorLabel: exec-print-op-3 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-op-4: - executorLabel: exec-print-op-4 - inputDefinitions: - parameters: - msg: - parameterType: STRING -deploymentSpec: - executors: - exec-flip-coin-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_coin_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ - \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ - \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 - exec-flip-coin-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_coin_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ - \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ - \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 - exec-flip-coin-op-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_coin_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ - \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ - \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 - exec-flip-coin-op-4: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - flip_coin_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ - \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ - \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ - \n" - image: python:3.9 - exec-print-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ - \n" - image: python:3.9 - exec-print-op-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ - \n" - image: python:3.9 - exec-print-op-4: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ - \n" - image: python:3.9 -pipelineInfo: - name: nested-conditions-pipeline -root: - dag: - tasks: - condition-1: - componentRef: - name: comp-condition-1 - dependentTasks: - - flip-coin-op - - flip-coin-op-2 - inputs: - parameters: - pipelinechannel--flip-coin-op-2-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op-2 - pipelinechannel--flip-coin-op-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op - taskInfo: - name: condition-1 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-coin-op-Output'] - != 'no-such-result' - flip-coin-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-flip-coin-op - taskInfo: - name: flip-coin-op - flip-coin-op-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-flip-coin-op-2 - taskInfo: - name: flip-coin-op-2 - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - dependentTasks: - - flip-coin-op - inputs: - parameters: - msg: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op - taskInfo: - name: print-op - print-op-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-2 - dependentTasks: - - flip-coin-op-2 - inputs: - parameters: - msg: - taskOutputParameter: - outputParameterKey: Output - producerTask: flip-coin-op-2 - taskInfo: - name: print-op-2 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_nested_conditions_yaml.yaml b/sdk/python/test_data/pipelines/pipeline_with_nested_conditions_yaml.yaml deleted file mode 100644 index ef6d01cd86b..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_nested_conditions_yaml.yaml +++ /dev/null @@ -1,350 +0,0 @@ -# PIPELINE DEFINITION -# Name: conditional-execution-pipeline -# Description: Shows how to use dsl.Condition(). -components: - comp-condition-1: - dag: - tasks: - condition-2: - componentRef: - name: comp-condition-2 - dependentTasks: - - generate-random-number - inputs: - parameters: - pipelinechannel--flip-coin-output: - componentInputParameter: pipelinechannel--flip-coin-output - pipelinechannel--generate-random-number-output: - taskOutputParameter: - outputParameterKey: output - producerTask: generate-random-number - taskInfo: - name: condition-2 - triggerPolicy: - condition: int(inputs.parameter_values['pipelinechannel--generate-random-number-output']) - > 5 - condition-3: - componentRef: - name: comp-condition-3 - dependentTasks: - - generate-random-number - inputs: - parameters: - pipelinechannel--flip-coin-output: - componentInputParameter: pipelinechannel--flip-coin-output - pipelinechannel--generate-random-number-output: - taskOutputParameter: - outputParameterKey: output - producerTask: generate-random-number - taskInfo: - name: condition-3 - triggerPolicy: - condition: int(inputs.parameter_values['pipelinechannel--generate-random-number-output']) - <= 5 - generate-random-number: - cachingOptions: - enableCache: true - componentRef: - name: comp-generate-random-number - taskInfo: - name: generate-random-number - inputDefinitions: - parameters: - pipelinechannel--flip-coin-output: - parameterType: STRING - comp-condition-2: - dag: - tasks: - print: - cachingOptions: - enableCache: true - componentRef: - name: comp-print - inputs: - parameters: - msg: - runtimeValue: - constant: heads and {{$.inputs.parameters['pipelinechannel--generate-random-number-output']}} - > 5! - pipelinechannel--generate-random-number-output: - componentInputParameter: pipelinechannel--generate-random-number-output - taskInfo: - name: print - inputDefinitions: - parameters: - pipelinechannel--flip-coin-output: - parameterType: STRING - pipelinechannel--generate-random-number-output: - parameterType: NUMBER_INTEGER - comp-condition-3: - dag: - tasks: - print-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-2 - inputs: - parameters: - msg: - runtimeValue: - constant: heads and {{$.inputs.parameters['pipelinechannel--generate-random-number-output']}} - <= 5! - pipelinechannel--generate-random-number-output: - componentInputParameter: pipelinechannel--generate-random-number-output - taskInfo: - name: print-2 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-output: - parameterType: STRING - pipelinechannel--generate-random-number-output: - parameterType: NUMBER_INTEGER - comp-condition-4: - dag: - tasks: - condition-5: - componentRef: - name: comp-condition-5 - dependentTasks: - - generate-random-number-2 - inputs: - parameters: - pipelinechannel--flip-coin-output: - componentInputParameter: pipelinechannel--flip-coin-output - pipelinechannel--generate-random-number-2-output: - taskOutputParameter: - outputParameterKey: output - producerTask: generate-random-number-2 - taskInfo: - name: condition-5 - triggerPolicy: - condition: int(inputs.parameter_values['pipelinechannel--generate-random-number-2-output']) - > 15 - condition-6: - componentRef: - name: comp-condition-6 - dependentTasks: - - generate-random-number-2 - inputs: - parameters: - pipelinechannel--flip-coin-output: - componentInputParameter: pipelinechannel--flip-coin-output - pipelinechannel--generate-random-number-2-output: - taskOutputParameter: - outputParameterKey: output - producerTask: generate-random-number-2 - taskInfo: - name: condition-6 - triggerPolicy: - condition: int(inputs.parameter_values['pipelinechannel--generate-random-number-2-output']) - <= 15 - generate-random-number-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-generate-random-number-2 - taskInfo: - name: generate-random-number-2 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-output: - parameterType: STRING - comp-condition-5: - dag: - tasks: - print-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-3 - inputs: - parameters: - msg: - runtimeValue: - constant: tails and {{$.inputs.parameters['pipelinechannel--generate-random-number-2-output']}} - > 15! - pipelinechannel--generate-random-number-2-output: - componentInputParameter: pipelinechannel--generate-random-number-2-output - taskInfo: - name: print-3 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-output: - parameterType: STRING - pipelinechannel--generate-random-number-2-output: - parameterType: NUMBER_INTEGER - comp-condition-6: - dag: - tasks: - print-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-4 - inputs: - parameters: - msg: - runtimeValue: - constant: tails and {{$.inputs.parameters['pipelinechannel--generate-random-number-2-output']}} - <= 15! - pipelinechannel--generate-random-number-2-output: - componentInputParameter: pipelinechannel--generate-random-number-2-output - taskInfo: - name: print-4 - inputDefinitions: - parameters: - pipelinechannel--flip-coin-output: - parameterType: STRING - pipelinechannel--generate-random-number-2-output: - parameterType: NUMBER_INTEGER - comp-flip-coin: - executorLabel: exec-flip-coin - outputDefinitions: - parameters: - output: - parameterType: STRING - comp-generate-random-number: - executorLabel: exec-generate-random-number - outputDefinitions: - parameters: - output: - parameterType: NUMBER_INTEGER - comp-generate-random-number-2: - executorLabel: exec-generate-random-number-2 - outputDefinitions: - parameters: - output: - parameterType: NUMBER_INTEGER - comp-print: - executorLabel: exec-print - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-2: - executorLabel: exec-print-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-3: - executorLabel: exec-print-3 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-4: - executorLabel: exec-print-4 - inputDefinitions: - parameters: - msg: - parameterType: STRING -deploymentSpec: - executors: - exec-flip-coin: - container: - args: - - mkdir -p "$(dirname $0)" && python -c "import random; result = 'heads' if - random.randint(0,1) == 0 else 'tails'; print(result, end='')" | tee $0 - - '{{$.outputs.parameters[''output''].output_file}}' - command: - - sh - - -c - image: python:alpine3.9 - exec-generate-random-number: - container: - args: - - mkdir -p "$(dirname $2)" && python -c "import random; print(random.randint($0, - $1), end='')" | tee $2 - - '0' - - '9' - - '{{$.outputs.parameters[''output''].output_file}}' - command: - - sh - - -c - image: python:alpine3.9 - exec-generate-random-number-2: - container: - args: - - mkdir -p "$(dirname $2)" && python -c "import random; print(random.randint($0, - $1), end='')" | tee $2 - - '10' - - '19' - - '{{$.outputs.parameters[''output''].output_file}}' - command: - - sh - - -c - image: python:alpine3.9 - exec-print: - container: - command: - - echo - - '{{$.inputs.parameters[''msg'']}}' - image: python:alpine3.9 - exec-print-2: - container: - command: - - echo - - '{{$.inputs.parameters[''msg'']}}' - image: python:alpine3.9 - exec-print-3: - container: - command: - - echo - - '{{$.inputs.parameters[''msg'']}}' - image: python:alpine3.9 - exec-print-4: - container: - command: - - echo - - '{{$.inputs.parameters[''msg'']}}' - image: python:alpine3.9 -pipelineInfo: - description: Shows how to use dsl.Condition(). - displayName: Conditional execution pipeline. - name: conditional-execution-pipeline -root: - dag: - tasks: - condition-1: - componentRef: - name: comp-condition-1 - dependentTasks: - - flip-coin - inputs: - parameters: - pipelinechannel--flip-coin-output: - taskOutputParameter: - outputParameterKey: output - producerTask: flip-coin - taskInfo: - name: condition-1 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-coin-output'] - == 'heads' - condition-4: - componentRef: - name: comp-condition-4 - dependentTasks: - - flip-coin - inputs: - parameters: - pipelinechannel--flip-coin-output: - taskOutputParameter: - outputParameterKey: output - producerTask: flip-coin - taskInfo: - name: condition-4 - triggerPolicy: - condition: inputs.parameter_values['pipelinechannel--flip-coin-output'] - == 'tails' - flip-coin: - cachingOptions: - enableCache: true - componentRef: - name: comp-flip-coin - taskInfo: - name: flip-coin -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_nested_loops.yaml b/sdk/python/test_data/pipelines/pipeline_with_nested_loops.yaml deleted file mode 100644 index 960d091d1ef..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_nested_loops.yaml +++ /dev/null @@ -1,265 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-nested-loops -# Inputs: -# loop_parameter: list [Default: [{'p_b': 'hello', 'p_a': [{'q_a': '1'}, {'q_a': '2'}]}, {'p_b': 'halo', 'p_a': [{'q_a': '11'}, {'q_a': '22'}]}]] -components: - comp-for-loop-1: - dag: - tasks: - for-loop-2: - componentRef: - name: comp-for-loop-2 - inputs: - parameters: - pipelinechannel--loop_parameter-loop-item: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - pipelinechannel--loop_parameter-loop-item-subvar-p_a: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - parameterExpressionSelector: parseJson(string_value)["p_a"] - parameterIterator: - itemInput: pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item - items: - inputParameter: pipelinechannel--loop_parameter-loop-item-subvar-p_a - taskInfo: - name: for-loop-2 - inputDefinitions: - parameters: - pipelinechannel--loop_parameter: - parameterType: LIST - pipelinechannel--loop_parameter-loop-item: - parameterType: STRING - comp-for-loop-2: - dag: - tasks: - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item - parameterExpressionSelector: parseJson(string_value)["q_a"] - taskInfo: - name: print-op - inputDefinitions: - parameters: - pipelinechannel--loop_parameter-loop-item: - parameterType: STRING - pipelinechannel--loop_parameter-loop-item-subvar-p_a: - parameterType: STRING - pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item: - parameterType: STRING - comp-for-loop-4: - dag: - tasks: - for-loop-6: - componentRef: - name: comp-for-loop-6 - inputs: - parameters: - pipelinechannel--loop-item-param-3: - componentInputParameter: pipelinechannel--loop-item-param-3 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-5 - items: - raw: '["100", "200", "300"]' - taskInfo: - name: for-loop-6 - print-op-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-2 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop-item-param-3 - taskInfo: - name: print-op-2 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-3: - parameterType: STRING - comp-for-loop-6: - dag: - tasks: - print-op-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-3 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop-item-param-3 - msg2: - componentInputParameter: pipelinechannel--loop-item-param-5 - taskInfo: - name: print-op-3 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-3: - parameterType: STRING - pipelinechannel--loop-item-param-5: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING - comp-print-op-2: - executorLabel: exec-print-op-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING - comp-print-op-3: - executorLabel: exec-print-op-3 - inputDefinitions: - parameters: - msg: - parameterType: STRING - msg2: - isOptional: true - parameterType: STRING -deploymentSpec: - executors: - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 - exec-print-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 - exec-print-op-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ - \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-nested-loops -root: - dag: - tasks: - for-loop-1: - componentRef: - name: comp-for-loop-1 - inputs: - parameters: - pipelinechannel--loop_parameter: - componentInputParameter: loop_parameter - parameterIterator: - itemInput: pipelinechannel--loop_parameter-loop-item - items: - inputParameter: pipelinechannel--loop_parameter - taskInfo: - name: for-loop-1 - for-loop-4: - componentRef: - name: comp-for-loop-4 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-3 - items: - raw: '["1", "2"]' - taskInfo: - name: for-loop-4 - inputDefinitions: - parameters: - loop_parameter: - defaultValue: - - p_a: - - q_a: '1' - - q_a: '2' - p_b: hello - - p_a: - - q_a: '11' - - q_a: '22' - p_b: halo - isOptional: true - parameterType: LIST -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_ontology.yaml b/sdk/python/test_data/pipelines/pipeline_with_ontology.yaml deleted file mode 100644 index a029447584f..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_ontology.yaml +++ /dev/null @@ -1,114 +0,0 @@ -# PIPELINE DEFINITION -# Name: two-step-pipeline-with-ontology -# Description: A linear two-step pipeline with artifact ontology types. -# Inputs: -# input_location: str [Default: 'gs://test-bucket/pipeline_root'] -# n_epochs: int [Default: 200.0] -# optimizer: str [Default: 'sgd'] -components: - comp-ingestion: - executorLabel: exec-ingestion - inputDefinitions: - parameters: - input_location: - parameterType: STRING - outputDefinitions: - artifacts: - examples: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-trainer: - executorLabel: exec-trainer - inputDefinitions: - artifacts: - examples: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - n_epochs: - parameterType: NUMBER_INTEGER - optimizer: - parameterType: STRING - outputDefinitions: - artifacts: - model: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-ingestion: - container: - args: - - --input-location - - '{{$.inputs.parameters[''input_location'']}}' - - --output-examples - - '{{$.outputs.artifacts[''examples''].uri}}' - image: gcr.io/my-project/my-ingestor - exec-trainer: - container: - args: - - --input-examples - - '{{$.inputs.artifacts[''examples''].uri}}' - - --optimizer - - '{{$.inputs.parameters[''optimizer'']}}' - - --n_epochs - - '{{$.inputs.parameters[''n_epochs'']}}' - - --output-model - - '{{$.outputs.artifacts[''model''].uri}}' - image: gcr.io/my-project/my-fancy-trainer -pipelineInfo: - description: A linear two-step pipeline with artifact ontology types. - name: two-step-pipeline-with-ontology -root: - dag: - tasks: - ingestion: - cachingOptions: - enableCache: true - componentRef: - name: comp-ingestion - inputs: - parameters: - input_location: - componentInputParameter: input_location - taskInfo: - name: ingestion - trainer: - cachingOptions: - enableCache: true - componentRef: - name: comp-trainer - dependentTasks: - - ingestion - inputs: - artifacts: - examples: - taskOutputArtifact: - outputArtifactKey: examples - producerTask: ingestion - parameters: - n_epochs: - componentInputParameter: n_epochs - optimizer: - componentInputParameter: optimizer - taskInfo: - name: trainer - inputDefinitions: - parameters: - input_location: - defaultValue: gs://test-bucket/pipeline_root - isOptional: true - parameterType: STRING - n_epochs: - defaultValue: 200.0 - isOptional: true - parameterType: NUMBER_INTEGER - optimizer: - defaultValue: sgd - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_outputs.yaml b/sdk/python/test_data/pipelines/pipeline_with_outputs.yaml deleted file mode 100644 index 914937e18f4..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_outputs.yaml +++ /dev/null @@ -1,210 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-in-pipeline -# Inputs: -# msg: str [Default: 'Hello'] -# Outputs: -# Output: system.Artifact -components: - comp-inner-pipeline: - dag: - outputs: - artifacts: - data: - artifactSelectors: - - outputArtifactKey: data - producerSubtask: print-op2 - parameters: - msg: - valueFromParameter: - outputParameterKey: Output - producerSubtask: print-op1 - tasks: - print-op1: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op1-2 - inputs: - parameters: - msg: - componentInputParameter: msg - taskInfo: - name: print-op1 - print-op2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op2 - dependentTasks: - - print-op1 - inputs: - parameters: - msg: - taskOutputParameter: - outputParameterKey: Output - producerTask: print-op1 - taskInfo: - name: print-op2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - parameters: - msg: - parameterType: STRING - comp-print-op1: - executorLabel: exec-print-op1 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op1-2: - executorLabel: exec-print-op1-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op2: - executorLabel: exec-print-op2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - outputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-print-op1: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ - \n" - image: python:3.9 - exec-print-op1-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ - \n" - image: python:3.9 - exec-print-op2: - container: - args: - - '{{$.inputs.parameters[''msg'']}}' - - '{{$.outputs.artifacts[''data''].path}}' - command: - - sh - - -c - - mkdir --parents $(dirname "$1") && echo "$0" > "$1" - image: alpine -pipelineInfo: - name: pipeline-in-pipeline -root: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: data - producerSubtask: inner-pipeline - tasks: - inner-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-inner-pipeline - inputs: - parameters: - msg: - runtimeValue: - constant: world - taskInfo: - name: inner-pipeline - print-op1: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op1 - inputs: - parameters: - msg: - componentInputParameter: msg - taskInfo: - name: print-op1 - inputDefinitions: - parameters: - msg: - defaultValue: Hello - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_parallelfor_list_artifacts.yaml b/sdk/python/test_data/pipelines/pipeline_with_parallelfor_list_artifacts.yaml deleted file mode 100644 index 7df9b8930a9..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_parallelfor_list_artifacts.yaml +++ /dev/null @@ -1,420 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-parallelfor-artifacts -components: - comp-for-loop-1: - dag: - tasks: - print-artifact-name: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-artifact-name - inputs: - artifacts: - artifact: - componentInputArtifact: pipelinechannel--make-artifacts-Output-loop-item - taskInfo: - name: print-artifact-name - inputDefinitions: - artifacts: - pipelinechannel--make-artifacts-Output: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - isArtifactList: true - pipelinechannel--make-artifacts-Output-loop-item: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-for-loop-1-2: - dag: - outputs: - artifacts: - pipelinechannel--make-artifact-Output: - artifactSelectors: - - outputArtifactKey: Output - producerSubtask: make-artifact - tasks: - make-artifact: - cachingOptions: - enableCache: true - componentRef: - name: comp-make-artifact - inputs: - parameters: - data: - componentInputParameter: pipelinechannel--texts-loop-item - taskInfo: - name: make-artifact - inputDefinitions: - parameters: - pipelinechannel--texts: - parameterType: LIST - pipelinechannel--texts-loop-item: - parameterType: STRING - outputDefinitions: - artifacts: - pipelinechannel--make-artifact-Output: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - isArtifactList: true - comp-for-loop-1-3: - dag: - outputs: - artifacts: - pipelinechannel--make-dataset-Output: - artifactSelectors: - - outputArtifactKey: Output - producerSubtask: make-dataset - tasks: - make-dataset: - cachingOptions: - enableCache: true - componentRef: - name: comp-make-dataset - inputs: - parameters: - data: - componentInputParameter: pipelinechannel--texts-loop-item - taskInfo: - name: make-dataset - inputDefinitions: - parameters: - pipelinechannel--texts: - parameterType: LIST - pipelinechannel--texts-loop-item: - parameterType: STRING - outputDefinitions: - artifacts: - pipelinechannel--make-dataset-Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - comp-for-loop-2: - dag: - tasks: - print-artifact-name-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-artifact-name-2 - inputs: - artifacts: - artifact: - componentInputArtifact: pipelinechannel--make-datasets-Output-loop-item - taskInfo: - name: print-artifact-name-2 - inputDefinitions: - artifacts: - pipelinechannel--make-datasets-Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - pipelinechannel--make-datasets-Output-loop-item: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-make-artifact: - executorLabel: exec-make-artifact - inputDefinitions: - parameters: - data: - parameterType: STRING - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-make-artifacts: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: pipelinechannel--make-artifact-Output - producerSubtask: for-loop-1 - tasks: - for-loop-1: - componentRef: - name: comp-for-loop-1-2 - inputs: - parameters: - pipelinechannel--texts: - componentInputParameter: texts - parameterIterator: - itemInput: pipelinechannel--texts-loop-item - items: - inputParameter: pipelinechannel--texts - taskInfo: - name: for-loop-1 - inputDefinitions: - parameters: - texts: - defaultValue: - - Hello - - ',' - - ' ' - - world! - isOptional: true - parameterType: LIST - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - isArtifactList: true - comp-make-dataset: - executorLabel: exec-make-dataset - inputDefinitions: - parameters: - data: - parameterType: STRING - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-make-datasets: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: pipelinechannel--make-dataset-Output - producerSubtask: for-loop-1 - tasks: - for-loop-1: - componentRef: - name: comp-for-loop-1-3 - inputs: - parameters: - pipelinechannel--texts: - componentInputParameter: texts - parameterIterator: - itemInput: pipelinechannel--texts-loop-item - items: - inputParameter: pipelinechannel--texts - taskInfo: - name: for-loop-1 - inputDefinitions: - parameters: - texts: - defaultValue: - - Hello - - ',' - - ' ' - - world! - isOptional: true - parameterType: LIST - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - comp-print-artifact-name: - executorLabel: exec-print-artifact-name - inputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-artifact-name-2: - executorLabel: exec-print-artifact-name-2 - inputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - outputDefinitions: - parameters: - Output: - parameterType: STRING -deploymentSpec: - executors: - exec-make-artifact: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - make_artifact - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef make_artifact(data: str) -> Artifact:\n artifact = Artifact(uri=dsl.get_uri(),\ - \ metadata={'length': len(data)})\n with open(artifact.path, 'w') as\ - \ f:\n f.write(data)\n return artifact\n\n" - image: python:3.9 - exec-make-dataset: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - make_dataset - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef make_dataset(data: str) -> Dataset:\n dataset = Dataset(uri=dsl.get_uri(),\ - \ metadata={'length': len(data)})\n with open(dataset.path, 'w') as f:\n\ - \ f.write(data)\n return dataset\n\n" - image: python:3.9 - exec-print-artifact-name: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_artifact_name - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_artifact_name(artifact: Artifact) -> str:\n print(artifact.name)\n\ - \ return artifact.name\n\n" - image: python:3.9 - exec-print-artifact-name-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_artifact_name - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_artifact_name(artifact: Artifact) -> str:\n print(artifact.name)\n\ - \ return artifact.name\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-parallelfor-artifacts -root: - dag: - tasks: - for-loop-1: - artifactIterator: - itemInput: pipelinechannel--make-artifacts-Output-loop-item - items: - inputArtifact: pipelinechannel--make-artifacts-Output - componentRef: - name: comp-for-loop-1 - dependentTasks: - - make-artifacts - inputs: - artifacts: - pipelinechannel--make-artifacts-Output: - taskOutputArtifact: - outputArtifactKey: Output - producerTask: make-artifacts - taskInfo: - name: for-loop-1 - for-loop-2: - artifactIterator: - itemInput: pipelinechannel--make-datasets-Output-loop-item - items: - inputArtifact: pipelinechannel--make-datasets-Output - componentRef: - name: comp-for-loop-2 - dependentTasks: - - make-datasets - inputs: - artifacts: - pipelinechannel--make-datasets-Output: - taskOutputArtifact: - outputArtifactKey: Output - producerTask: make-datasets - taskInfo: - name: for-loop-2 - make-artifacts: - cachingOptions: - enableCache: true - componentRef: - name: comp-make-artifacts - taskInfo: - name: make-artifacts - make-datasets: - cachingOptions: - enableCache: true - componentRef: - name: comp-make-datasets - taskInfo: - name: make-datasets -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_parallelfor_parallelism.yaml b/sdk/python/test_data/pipelines/pipeline_with_parallelfor_parallelism.yaml deleted file mode 100644 index 0fcf820fc14..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_parallelfor_parallelism.yaml +++ /dev/null @@ -1,1017 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-loops -# Inputs: -# loop_parameter: list -components: - comp-for-loop-1: - dag: - tasks: - for-loop-2: - componentRef: - name: comp-for-loop-2 - inputs: - parameters: - pipelinechannel--loop_parameter: - componentInputParameter: pipelinechannel--loop_parameter - parameterIterator: - itemInput: pipelinechannel--loop_parameter-loop-item - items: - inputParameter: pipelinechannel--loop_parameter - taskInfo: - name: for-loop-2 - print-text: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - taskInfo: - name: print-text - inputDefinitions: - parameters: - pipelinechannel--loop_parameter: - parameterType: LIST - pipelinechannel--loop_parameter-loop-item: - parameterType: STRING - comp-for-loop-10: - dag: - tasks: - print-int-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-int-3 - inputs: - parameters: - x: - componentInputParameter: pipelinechannel--list-dict-maker-1-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["a"] - taskInfo: - name: print-int-3 - inputDefinitions: - parameters: - pipelinechannel--list-dict-maker-1-Output: - parameterType: LIST - pipelinechannel--list-dict-maker-1-Output-loop-item: - parameterType: STRUCT - comp-for-loop-11: - dag: - tasks: - print-int-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-int-4 - inputs: - parameters: - x: - componentInputParameter: pipelinechannel--list-dict-maker-2-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["a"] - taskInfo: - name: print-int-4 - inputDefinitions: - parameters: - pipelinechannel--list-dict-maker-2-Output: - parameterType: LIST - pipelinechannel--list-dict-maker-2-Output-loop-item: - parameterType: STRUCT - comp-for-loop-12: - dag: - tasks: - print-int-5: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-int-5 - inputs: - parameters: - x: - componentInputParameter: pipelinechannel--list-dict-maker-3-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["a"] - taskInfo: - name: print-int-5 - inputDefinitions: - parameters: - pipelinechannel--list-dict-maker-3-Output: - parameterType: LIST - pipelinechannel--list-dict-maker-3-Output-loop-item: - parameterType: STRING - comp-for-loop-13: - dag: - tasks: - print-int-6: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-int-6 - inputs: - parameters: - x: - componentInputParameter: pipelinechannel--list-dict-maker-1-2-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["a"] - taskInfo: - name: print-int-6 - inputDefinitions: - parameters: - pipelinechannel--list-dict-maker-1-2-Output: - parameterType: LIST - pipelinechannel--list-dict-maker-1-2-Output-loop-item: - parameterType: STRING - comp-for-loop-2: - dag: - tasks: - print-text-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-2 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop_parameter-loop-item - taskInfo: - name: print-text-2 - inputDefinitions: - parameters: - pipelinechannel--loop_parameter: - parameterType: LIST - pipelinechannel--loop_parameter-loop-item: - parameterType: STRING - comp-for-loop-4: - dag: - tasks: - for-loop-6: - componentRef: - name: comp-for-loop-6 - iteratorPolicy: - parallelismLimit: 1 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-5 - items: - raw: '[{"A_a": "10", "B_b": "20"}, {"A_a": "100", "B_b": "200"}]' - taskInfo: - name: for-loop-6 - print-text-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-3 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop-item-param-3 - parameterExpressionSelector: parseJson(string_value)["A_a"] - taskInfo: - name: print-text-3 - print-text-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-4 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop-item-param-3 - parameterExpressionSelector: parseJson(string_value)["B_b"] - taskInfo: - name: print-text-4 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-3: - parameterType: STRUCT - comp-for-loop-6: - dag: - tasks: - print-text-5: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-5 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop-item-param-5 - parameterExpressionSelector: parseJson(string_value)["A_a"] - taskInfo: - name: print-text-5 - print-text-6: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-text-6 - inputs: - parameters: - msg: - componentInputParameter: pipelinechannel--loop-item-param-5 - parameterExpressionSelector: parseJson(string_value)["B_b"] - taskInfo: - name: print-text-6 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-5: - parameterType: STRUCT - comp-for-loop-8: - dag: - tasks: - print-int: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-int - inputs: - parameters: - x: - componentInputParameter: pipelinechannel--loop-item-param-7 - parameterExpressionSelector: parseJson(string_value)["a"] - taskInfo: - name: print-int - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-7: - parameterType: STRUCT - comp-for-loop-9: - dag: - tasks: - print-int-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-int-2 - inputs: - parameters: - x: - componentInputParameter: pipelinechannel--list-dict-maker-0-Output-loop-item - parameterExpressionSelector: parseJson(string_value)["a"] - taskInfo: - name: print-int-2 - inputDefinitions: - parameters: - pipelinechannel--list-dict-maker-0-Output: - parameterType: LIST - pipelinechannel--list-dict-maker-0-Output-loop-item: - parameterType: STRUCT - comp-list-dict-maker-0: - executorLabel: exec-list-dict-maker-0 - outputDefinitions: - parameters: - Output: - parameterType: LIST - comp-list-dict-maker-1: - executorLabel: exec-list-dict-maker-1 - outputDefinitions: - parameters: - Output: - parameterType: LIST - comp-list-dict-maker-1-2: - executorLabel: exec-list-dict-maker-1-2 - outputDefinitions: - parameters: - Output: - parameterType: LIST - comp-list-dict-maker-2: - executorLabel: exec-list-dict-maker-2 - outputDefinitions: - parameters: - Output: - parameterType: LIST - comp-list-dict-maker-3: - executorLabel: exec-list-dict-maker-3 - outputDefinitions: - parameters: - Output: - parameterType: LIST - comp-print-int: - executorLabel: exec-print-int - inputDefinitions: - parameters: - x: - parameterType: NUMBER_INTEGER - comp-print-int-2: - executorLabel: exec-print-int-2 - inputDefinitions: - parameters: - x: - parameterType: NUMBER_INTEGER - comp-print-int-3: - executorLabel: exec-print-int-3 - inputDefinitions: - parameters: - x: - parameterType: NUMBER_INTEGER - comp-print-int-4: - executorLabel: exec-print-int-4 - inputDefinitions: - parameters: - x: - parameterType: NUMBER_INTEGER - comp-print-int-5: - executorLabel: exec-print-int-5 - inputDefinitions: - parameters: - x: - parameterType: NUMBER_INTEGER - comp-print-int-6: - executorLabel: exec-print-int-6 - inputDefinitions: - parameters: - x: - parameterType: NUMBER_INTEGER - comp-print-text: - executorLabel: exec-print-text - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-text-2: - executorLabel: exec-print-text-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-text-3: - executorLabel: exec-print-text-3 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-text-4: - executorLabel: exec-print-text-4 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-text-5: - executorLabel: exec-print-text-5 - inputDefinitions: - parameters: - msg: - parameterType: STRING - comp-print-text-6: - executorLabel: exec-print-text-6 - inputDefinitions: - parameters: - msg: - parameterType: STRING -deploymentSpec: - executors: - exec-list-dict-maker-0: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - list_dict_maker_0 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef list_dict_maker_0() -> List[Dict[str, int]]:\n \"\"\"Enforces\ - \ strict type checking - returns a list of dictionaries \n where keys\ - \ are strings and values are integers. For testing type \n handling during\ - \ compilation.\"\"\"\n return [{'a': 1, 'b': 2}, {'a': 2, 'b': 3}, {'a':\ - \ 3, 'b': 4}]\n\n" - image: python:3.9 - exec-list-dict-maker-1: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - list_dict_maker_1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef list_dict_maker_1() -> List[Dict]:\n \"\"\"Utilizes generic\ - \ dictionary typing (no enforcement of specific key or\n value types).\n\ - \n Tests flexibility in type handling.\n \"\"\"\n return [{'a':\ - \ 1, 'b': 2}, {'a': 2, 'b': 3}, {'a': 3, 'b': 4}]\n\n" - image: python:3.9 - exec-list-dict-maker-1-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - list_dict_maker_1 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef list_dict_maker_1() -> List[Dict]:\n \"\"\"Utilizes generic\ - \ dictionary typing (no enforcement of specific key or\n value types).\n\ - \n Tests flexibility in type handling.\n \"\"\"\n return [{'a':\ - \ 1, 'b': 2}, {'a': 2, 'b': 3}, {'a': 3, 'b': 4}]\n\n" - image: python:3.9 - exec-list-dict-maker-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - list_dict_maker_2 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef list_dict_maker_2() -> List[dict]:\n \"\"\"Returns a list\ - \ of dictionaries without type enforcement.\n\n Tests flexibility in\ - \ type handling.\n \"\"\"\n return [{'a': 1, 'b': 2}, {'a': 2, 'b':\ - \ 3}, {'a': 3, 'b': 4}]\n\n" - image: python:3.9 - exec-list-dict-maker-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - list_dict_maker_3 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef list_dict_maker_3() -> List:\n \"\"\"Returns a basic list\ - \ (no typing or structure guarantees).\n\n Tests the limits of compiler\ - \ type handling.\n \"\"\"\n return [{'a': 1, 'b': 2}, {'a': 2, 'b':\ - \ 3}, {'a': 3, 'b': 4}]\n\n" - image: python:3.9 - exec-print-int: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_int - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 - exec-print-int-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_int - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 - exec-print-int-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_int - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 - exec-print-int-4: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_int - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 - exec-print-int-5: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_int - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 - exec-print-int-6: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_int - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 - exec-print-text: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 - exec-print-text-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 - exec-print-text-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 - exec-print-text-4: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 - exec-print-text-5: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 - exec-print-text-6: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_text - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-loops -root: - dag: - tasks: - for-loop-1: - componentRef: - name: comp-for-loop-1 - inputs: - parameters: - pipelinechannel--loop_parameter: - componentInputParameter: loop_parameter - iteratorPolicy: - parallelismLimit: 2 - parameterIterator: - itemInput: pipelinechannel--loop_parameter-loop-item - items: - inputParameter: pipelinechannel--loop_parameter - taskInfo: - name: for-loop-1 - for-loop-10: - componentRef: - name: comp-for-loop-10 - dependentTasks: - - list-dict-maker-1 - inputs: - parameters: - pipelinechannel--list-dict-maker-1-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: list-dict-maker-1 - parameterIterator: - itemInput: pipelinechannel--list-dict-maker-1-Output-loop-item - items: - inputParameter: pipelinechannel--list-dict-maker-1-Output - taskInfo: - name: for-loop-10 - for-loop-11: - componentRef: - name: comp-for-loop-11 - dependentTasks: - - list-dict-maker-2 - inputs: - parameters: - pipelinechannel--list-dict-maker-2-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: list-dict-maker-2 - parameterIterator: - itemInput: pipelinechannel--list-dict-maker-2-Output-loop-item - items: - inputParameter: pipelinechannel--list-dict-maker-2-Output - taskInfo: - name: for-loop-11 - for-loop-12: - componentRef: - name: comp-for-loop-12 - dependentTasks: - - list-dict-maker-3 - inputs: - parameters: - pipelinechannel--list-dict-maker-3-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: list-dict-maker-3 - parameterIterator: - itemInput: pipelinechannel--list-dict-maker-3-Output-loop-item - items: - inputParameter: pipelinechannel--list-dict-maker-3-Output - taskInfo: - name: for-loop-12 - for-loop-13: - componentRef: - name: comp-for-loop-13 - dependentTasks: - - list-dict-maker-1-2 - inputs: - parameters: - pipelinechannel--list-dict-maker-1-2-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: list-dict-maker-1-2 - parameterIterator: - itemInput: pipelinechannel--list-dict-maker-1-2-Output-loop-item - items: - inputParameter: pipelinechannel--list-dict-maker-1-2-Output - taskInfo: - name: for-loop-13 - for-loop-4: - componentRef: - name: comp-for-loop-4 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-3 - items: - raw: '[{"A_a": "1", "B_b": "2"}, {"A_a": "10", "B_b": "20"}]' - taskInfo: - name: for-loop-4 - for-loop-8: - componentRef: - name: comp-for-loop-8 - iteratorPolicy: - parallelismLimit: 1 - parameterIterator: - itemInput: pipelinechannel--loop-item-param-7 - items: - raw: '[{"a": 1, "b": 2}, {"a": 2, "b": 3}, {"a": 3, "b": 4}]' - taskInfo: - name: for-loop-8 - for-loop-9: - componentRef: - name: comp-for-loop-9 - dependentTasks: - - list-dict-maker-0 - inputs: - parameters: - pipelinechannel--list-dict-maker-0-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: list-dict-maker-0 - parameterIterator: - itemInput: pipelinechannel--list-dict-maker-0-Output-loop-item - items: - inputParameter: pipelinechannel--list-dict-maker-0-Output - taskInfo: - name: for-loop-9 - list-dict-maker-0: - cachingOptions: - enableCache: true - componentRef: - name: comp-list-dict-maker-0 - taskInfo: - name: list-dict-maker-0 - list-dict-maker-1: - cachingOptions: - enableCache: true - componentRef: - name: comp-list-dict-maker-1 - taskInfo: - name: list-dict-maker-1 - list-dict-maker-1-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-list-dict-maker-1-2 - taskInfo: - name: list-dict-maker-1-2 - list-dict-maker-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-list-dict-maker-2 - taskInfo: - name: list-dict-maker-2 - list-dict-maker-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-list-dict-maker-3 - taskInfo: - name: list-dict-maker-3 - inputDefinitions: - parameters: - loop_parameter: - parameterType: LIST -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_params_containing_format.yaml b/sdk/python/test_data/pipelines/pipeline_with_params_containing_format.yaml deleted file mode 100644 index 2ca766c8f3b..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_params_containing_format.yaml +++ /dev/null @@ -1,210 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-pipelineparam-containing-format -# Inputs: -# name: str [Default: 'KFP'] -components: - comp-for-loop-2: - dag: - tasks: - print-op2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op2 - inputs: - parameters: - pipelinechannel--name: - componentInputParameter: pipelinechannel--name - text1: - componentInputParameter: pipelinechannel--loop-item-param-1 - text2: - runtimeValue: - constant: ' and {{$.inputs.parameters[''pipelinechannel--name'']}}.' - taskInfo: - name: print-op2 - inputDefinitions: - parameters: - pipelinechannel--loop-item-param-1: - parameterType: STRING - pipelinechannel--name: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op-2: - executorLabel: exec-print-op-2 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op2: - executorLabel: exec-print-op2 - inputDefinitions: - parameters: - text1: - parameterType: STRING - text2: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING -deploymentSpec: - executors: - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(text: str) -> str:\n print(text)\n return text\n\ - \n" - image: python:3.9 - exec-print-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(text: str) -> str:\n print(text)\n return text\n\ - \n" - image: python:3.9 - exec-print-op2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op2 - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op2(text1: str, text2: str) -> str:\n print(text1 +\ - \ text2)\n return text1 + text2\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-pipelineparam-containing-format -root: - dag: - tasks: - for-loop-2: - componentRef: - name: comp-for-loop-2 - inputs: - parameters: - pipelinechannel--name: - componentInputParameter: name - parameterIterator: - itemInput: pipelinechannel--loop-item-param-1 - items: - raw: '["1", "2"]' - taskInfo: - name: for-loop-2 - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - inputs: - parameters: - pipelinechannel--name: - componentInputParameter: name - text: - runtimeValue: - constant: Hello {{$.inputs.parameters['pipelinechannel--name']}} - taskInfo: - name: print-op - print-op-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-2 - dependentTasks: - - print-op - inputs: - parameters: - pipelinechannel--print-op-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: print-op - text: - runtimeValue: - constant: '{{$.inputs.parameters[''pipelinechannel--print-op-Output'']}}, - again.' - taskInfo: - name: print-op-2 - inputDefinitions: - parameters: - name: - defaultValue: KFP - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_placeholders.py b/sdk/python/test_data/pipelines/pipeline_with_placeholders.py deleted file mode 100644 index cfb1e7990c3..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_placeholders.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp import compiler -from kfp import dsl -from kfp.dsl import component - - -@component -def print_op(msg: str, value: str): - print(msg, value) - - -@dsl.pipeline(name='pipeline-with-placeholders') -def my_pipeline(): - - print_op( - msg='job name:', - value=dsl.PIPELINE_JOB_NAME_PLACEHOLDER, - ) - print_op( - msg='job resource name:', - value=dsl.PIPELINE_JOB_RESOURCE_NAME_PLACEHOLDER, - ) - print_op( - msg='job id:', - value=dsl.PIPELINE_JOB_ID_PLACEHOLDER, - ) - print_op( - msg='task name:', - value=dsl.PIPELINE_TASK_NAME_PLACEHOLDER, - ) - print_op( - msg='task id:', - value=dsl.PIPELINE_TASK_ID_PLACEHOLDER, - ) - - -if __name__ == '__main__': - compiler.Compiler().compile( - pipeline_func=my_pipeline, - package_path=__file__.replace('.py', '.yaml')) diff --git a/sdk/python/test_data/pipelines/pipeline_with_placeholders.yaml b/sdk/python/test_data/pipelines/pipeline_with_placeholders.yaml deleted file mode 100644 index 44f63c95d61..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_placeholders.yaml +++ /dev/null @@ -1,267 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-placeholders -components: - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - msg: - parameterType: STRING - value: - parameterType: STRING - comp-print-op-2: - executorLabel: exec-print-op-2 - inputDefinitions: - parameters: - msg: - parameterType: STRING - value: - parameterType: STRING - comp-print-op-3: - executorLabel: exec-print-op-3 - inputDefinitions: - parameters: - msg: - parameterType: STRING - value: - parameterType: STRING - comp-print-op-4: - executorLabel: exec-print-op-4 - inputDefinitions: - parameters: - msg: - parameterType: STRING - value: - parameterType: STRING - comp-print-op-5: - executorLabel: exec-print-op-5 - inputDefinitions: - parameters: - msg: - parameterType: STRING - value: - parameterType: STRING -deploymentSpec: - executors: - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str, value: str):\n print(msg, value)\n\n" - image: python:3.9 - exec-print-op-2: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str, value: str):\n print(msg, value)\n\n" - image: python:3.9 - exec-print-op-3: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str, value: str):\n print(msg, value)\n\n" - image: python:3.9 - exec-print-op-4: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str, value: str):\n print(msg, value)\n\n" - image: python:3.9 - exec-print-op-5: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(msg: str, value: str):\n print(msg, value)\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-placeholders -root: - dag: - tasks: - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - inputs: - parameters: - msg: - runtimeValue: - constant: 'job name:' - value: - runtimeValue: - constant: '{{$.pipeline_job_name}}' - taskInfo: - name: print-op - print-op-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-2 - inputs: - parameters: - msg: - runtimeValue: - constant: 'job resource name:' - value: - runtimeValue: - constant: '{{$.pipeline_job_resource_name}}' - taskInfo: - name: print-op-2 - print-op-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-3 - inputs: - parameters: - msg: - runtimeValue: - constant: 'job id:' - value: - runtimeValue: - constant: '{{$.pipeline_job_uuid}}' - taskInfo: - name: print-op-3 - print-op-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-4 - inputs: - parameters: - msg: - runtimeValue: - constant: 'task name:' - value: - runtimeValue: - constant: '{{$.pipeline_task_name}}' - taskInfo: - name: print-op-4 - print-op-5: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op-5 - inputs: - parameters: - msg: - runtimeValue: - constant: 'task id:' - value: - runtimeValue: - constant: '{{$.pipeline_task_uuid}}' - taskInfo: - name: print-op-5 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_resource_spec.yaml b/sdk/python/test_data/pipelines/pipeline_with_resource_spec.yaml deleted file mode 100644 index d3f72357d5c..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_resource_spec.yaml +++ /dev/null @@ -1,128 +0,0 @@ -# PIPELINE DEFINITION -# Name: two-step-pipeline-with-resource-spec -# Description: A linear two-step pipeline with resource specification. -# Inputs: -# input_location: str [Default: 'gs://test-bucket/pipeline_root'] -# n_epochs: int [Default: 200.0] -# optimizer: str [Default: 'sgd'] -components: - comp-ingestion: - executorLabel: exec-ingestion - inputDefinitions: - parameters: - input_location: - parameterType: STRING - outputDefinitions: - artifacts: - examples: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-trainer: - executorLabel: exec-trainer - inputDefinitions: - artifacts: - examples: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - n_epochs: - parameterType: NUMBER_INTEGER - optimizer: - parameterType: STRING - outputDefinitions: - artifacts: - model: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-ingestion: - container: - args: - - --input-location - - '{{$.inputs.parameters[''input_location'']}}' - - --output-examples - - '{{$.outputs.artifacts[''examples''].uri}}' - image: gcr.io/my-project/my-ingestor - exec-trainer: - container: - args: - - --input-examples - - '{{$.inputs.artifacts[''examples''].uri}}' - - --optimizer - - '{{$.inputs.parameters[''optimizer'']}}' - - --n_epochs - - '{{$.inputs.parameters[''n_epochs'']}}' - - --output-model - - '{{$.outputs.artifacts[''model''].uri}}' - image: gcr.io/my-project/my-fancy-trainer - resources: - accelerator: - count: '1' - resourceCount: '1' - resourceType: tpu-v3 - type: tpu-v3 - cpuLimit: 4.0 - cpuRequest: 2.0 - memoryLimit: 15.032385536 - memoryRequest: 4.294967296 - resourceCpuLimit: '4' - resourceCpuRequest: '2' - resourceMemoryLimit: 14Gi - resourceMemoryRequest: 4Gi -pipelineInfo: - description: A linear two-step pipeline with resource specification. - name: two-step-pipeline-with-resource-spec -root: - dag: - tasks: - ingestion: - cachingOptions: - enableCache: true - componentRef: - name: comp-ingestion - inputs: - parameters: - input_location: - componentInputParameter: input_location - taskInfo: - name: ingestion - trainer: - cachingOptions: - enableCache: true - componentRef: - name: comp-trainer - dependentTasks: - - ingestion - inputs: - artifacts: - examples: - taskOutputArtifact: - outputArtifactKey: examples - producerTask: ingestion - parameters: - n_epochs: - componentInputParameter: n_epochs - optimizer: - componentInputParameter: optimizer - taskInfo: - name: trainer - inputDefinitions: - parameters: - input_location: - defaultValue: gs://test-bucket/pipeline_root - isOptional: true - parameterType: STRING - n_epochs: - defaultValue: 200.0 - isOptional: true - parameterType: NUMBER_INTEGER - optimizer: - defaultValue: sgd - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.12.1 diff --git a/sdk/python/test_data/pipelines/pipeline_with_retry.yaml b/sdk/python/test_data/pipelines/pipeline_with_retry.yaml deleted file mode 100644 index e82aefc825b..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_retry.yaml +++ /dev/null @@ -1,83 +0,0 @@ -# PIPELINE DEFINITION -# Name: test-pipeline -# Inputs: -# a: float [Default: 1.0] -# b: float [Default: 7.0] -components: - comp-add: - executorLabel: exec-add - inputDefinitions: - parameters: - a: - parameterType: NUMBER_DOUBLE - b: - parameterType: NUMBER_DOUBLE - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_DOUBLE -deploymentSpec: - executors: - exec-add: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - add - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef add(a: float, b: float) -> float:\n return a + b\n\n" - image: python:3.9 -pipelineInfo: - name: test-pipeline -root: - dag: - tasks: - add: - cachingOptions: - enableCache: true - componentRef: - name: comp-add - inputs: - parameters: - a: - componentInputParameter: a - b: - componentInputParameter: b - retryPolicy: - backoffDuration: 0s - backoffFactor: 2.0 - backoffMaxDuration: 3600s - maxRetryCount: 3 - taskInfo: - name: add - inputDefinitions: - parameters: - a: - defaultValue: 1.0 - isOptional: true - parameterType: NUMBER_DOUBLE - b: - defaultValue: 7.0 - isOptional: true - parameterType: NUMBER_DOUBLE -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_reused_component.py b/sdk/python/test_data/pipelines/pipeline_with_reused_component.py deleted file mode 100644 index d52985bbe72..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_reused_component.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2020 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pathlib - -from kfp import compiler -from kfp import components -from kfp import dsl - -test_data_dir = pathlib.Path(__file__).parent.parent / 'v1_component_yaml' -add_op = components.load_component_from_file( - str(test_data_dir / 'add_component.yaml')) - - -@dsl.pipeline(name='add-pipeline') -def my_pipeline( - a: int = 2, - b: int = 5, -): - first_add_task = add_op(op_1=a, op2=3) - second_add_task = add_op(op_1=first_add_task.outputs['sum'], op2=b) - third_add_task = add_op(op_1=second_add_task.outputs['sum'], op2=7) - - -if __name__ == '__main__': - compiler.Compiler().compile( - pipeline_func=my_pipeline, - package_path=__file__.replace('.py', '.yaml')) diff --git a/sdk/python/test_data/pipelines/pipeline_with_reused_component.yaml b/sdk/python/test_data/pipelines/pipeline_with_reused_component.yaml deleted file mode 100644 index 63ce4597c63..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_reused_component.yaml +++ /dev/null @@ -1,152 +0,0 @@ -# PIPELINE DEFINITION -# Name: add-pipeline -# Inputs: -# a: int [Default: 2.0] -# b: int [Default: 5.0] -components: - comp-add: - executorLabel: exec-add - inputDefinitions: - parameters: - op2: - parameterType: NUMBER_INTEGER - op_1: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - sum: - parameterType: NUMBER_INTEGER - comp-add-2: - executorLabel: exec-add-2 - inputDefinitions: - parameters: - op2: - parameterType: NUMBER_INTEGER - op_1: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - sum: - parameterType: NUMBER_INTEGER - comp-add-3: - executorLabel: exec-add-3 - inputDefinitions: - parameters: - op2: - parameterType: NUMBER_INTEGER - op_1: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - sum: - parameterType: NUMBER_INTEGER -deploymentSpec: - executors: - exec-add: - container: - command: - - sh - - -c - - 'set -e -x - - echo "$(($0+$1))" | gsutil cp - "$2" - - ' - - '{{$.inputs.parameters[''op_1'']}}' - - '{{$.inputs.parameters[''op2'']}}' - - '{{$.outputs.parameters[''sum''].output_file}}' - image: google/cloud-sdk:latest - exec-add-2: - container: - command: - - sh - - -c - - 'set -e -x - - echo "$(($0+$1))" | gsutil cp - "$2" - - ' - - '{{$.inputs.parameters[''op_1'']}}' - - '{{$.inputs.parameters[''op2'']}}' - - '{{$.outputs.parameters[''sum''].output_file}}' - image: google/cloud-sdk:latest - exec-add-3: - container: - command: - - sh - - -c - - 'set -e -x - - echo "$(($0+$1))" | gsutil cp - "$2" - - ' - - '{{$.inputs.parameters[''op_1'']}}' - - '{{$.inputs.parameters[''op2'']}}' - - '{{$.outputs.parameters[''sum''].output_file}}' - image: google/cloud-sdk:latest -pipelineInfo: - name: add-pipeline -root: - dag: - tasks: - add: - cachingOptions: - enableCache: true - componentRef: - name: comp-add - inputs: - parameters: - op2: - runtimeValue: - constant: 3.0 - op_1: - componentInputParameter: a - taskInfo: - name: add - add-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-2 - dependentTasks: - - add - inputs: - parameters: - op2: - componentInputParameter: b - op_1: - taskOutputParameter: - outputParameterKey: sum - producerTask: add - taskInfo: - name: add-2 - add-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-add-3 - dependentTasks: - - add-2 - inputs: - parameters: - op2: - runtimeValue: - constant: 7.0 - op_1: - taskOutputParameter: - outputParameterKey: sum - producerTask: add-2 - taskInfo: - name: add-3 - inputDefinitions: - parameters: - a: - defaultValue: 2.0 - isOptional: true - parameterType: NUMBER_INTEGER - b: - defaultValue: 5.0 - isOptional: true - parameterType: NUMBER_INTEGER -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_string_machine_fields_pipeline_input.yaml b/sdk/python/test_data/pipelines/pipeline_with_string_machine_fields_pipeline_input.yaml deleted file mode 100644 index 65815886409..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_string_machine_fields_pipeline_input.yaml +++ /dev/null @@ -1,116 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline -# Inputs: -# accelerator_limit: str [Default: '1'] -# accelerator_type: str [Default: 'NVIDIA_TESLA_P4'] -# cpu_limit: str [Default: '2000m'] -# memory_limit: str [Default: '10G'] -components: - comp-sum-numbers: - executorLabel: exec-sum-numbers - inputDefinitions: - parameters: - a: - parameterType: NUMBER_INTEGER - b: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER -deploymentSpec: - executors: - exec-sum-numbers: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - sum_numbers - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.8.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef sum_numbers(a: int, b:int) -> int:\n return a + b\n\n" - image: python:3.8 - resources: - accelerator: - resourceCount: '{{$.inputs.parameters[''pipelinechannel--accelerator_limit'']}}' - resourceType: '{{$.inputs.parameters[''pipelinechannel--accelerator_type'']}}' - resourceCpuLimit: '{{$.inputs.parameters[''pipelinechannel--cpu_limit'']}}' - resourceMemoryLimit: '{{$.inputs.parameters[''pipelinechannel--memory_limit'']}}' -pipelineInfo: - name: pipeline -root: - dag: - tasks: - sum-numbers: - cachingOptions: - enableCache: true - componentRef: - name: comp-sum-numbers - inputs: - parameters: - a: - runtimeValue: - constant: 1.0 - accelerator_count: - runtimeValue: - constant: '{{$.inputs.parameters[''pipelinechannel--accelerator_limit'']}}' - accelerator_type: - runtimeValue: - constant: '{{$.inputs.parameters[''pipelinechannel--accelerator_type'']}}' - b: - runtimeValue: - constant: 2.0 - cpu_limit: - runtimeValue: - constant: '{{$.inputs.parameters[''pipelinechannel--cpu_limit'']}}' - memory_limit: - runtimeValue: - constant: '{{$.inputs.parameters[''pipelinechannel--memory_limit'']}}' - pipelinechannel--accelerator_limit: - componentInputParameter: accelerator_limit - pipelinechannel--accelerator_type: - componentInputParameter: accelerator_type - pipelinechannel--cpu_limit: - componentInputParameter: cpu_limit - pipelinechannel--memory_limit: - componentInputParameter: memory_limit - taskInfo: - name: sum-numbers - inputDefinitions: - parameters: - accelerator_limit: - defaultValue: '1' - isOptional: true - parameterType: STRING - accelerator_type: - defaultValue: NVIDIA_TESLA_P4 - isOptional: true - parameterType: STRING - cpu_limit: - defaultValue: 2000m - isOptional: true - parameterType: STRING - memory_limit: - defaultValue: 10G - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.8.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_string_machine_fields_task_output.yaml b/sdk/python/test_data/pipelines/pipeline_with_string_machine_fields_task_output.yaml deleted file mode 100644 index 13f80133f8b..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_string_machine_fields_task_output.yaml +++ /dev/null @@ -1,265 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline -components: - comp-accelerator-limit: - executorLabel: exec-accelerator-limit - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-accelerator-type: - executorLabel: exec-accelerator-type - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-cpu-limit: - executorLabel: exec-cpu-limit - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-memory-limit: - executorLabel: exec-memory-limit - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-sum-numbers: - executorLabel: exec-sum-numbers - inputDefinitions: - parameters: - a: - parameterType: NUMBER_INTEGER - b: - parameterType: NUMBER_INTEGER - outputDefinitions: - parameters: - Output: - parameterType: NUMBER_INTEGER -deploymentSpec: - executors: - exec-accelerator-limit: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - accelerator_limit - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.8.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef accelerator_limit() -> str:\n return '1'\n\n" - image: python:3.8 - exec-accelerator-type: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - accelerator_type - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.8.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef accelerator_type() -> str:\n return 'NVIDIA_TESLA_P4'\n\n" - image: python:3.8 - exec-cpu-limit: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - cpu_limit - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.8.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef cpu_limit() -> str:\n return '4000m'\n\n" - image: python:3.8 - exec-memory-limit: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - memory_limit - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.8.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef memory_limit() -> str:\n return '15G'\n\n" - image: python:3.8 - exec-sum-numbers: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - sum_numbers - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.8.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef sum_numbers(a: int, b:int) -> int:\n return a + b\n\n" - image: python:3.8 - resources: - accelerator: - resourceCount: '{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}' - resourceType: '{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}' - resourceCpuLimit: '{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}' - resourceMemoryLimit: '{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}' -pipelineInfo: - name: pipeline -root: - dag: - tasks: - accelerator-limit: - cachingOptions: - enableCache: true - componentRef: - name: comp-accelerator-limit - taskInfo: - name: accelerator-limit - accelerator-type: - cachingOptions: - enableCache: true - componentRef: - name: comp-accelerator-type - taskInfo: - name: accelerator-type - cpu-limit: - cachingOptions: - enableCache: true - componentRef: - name: comp-cpu-limit - taskInfo: - name: cpu-limit - memory-limit: - cachingOptions: - enableCache: true - componentRef: - name: comp-memory-limit - taskInfo: - name: memory-limit - sum-numbers: - cachingOptions: - enableCache: true - componentRef: - name: comp-sum-numbers - inputs: - parameters: - a: - runtimeValue: - constant: 1.0 - accelerator_count: - runtimeValue: - constant: '{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}' - accelerator_type: - runtimeValue: - constant: '{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}' - b: - runtimeValue: - constant: 2.0 - cpu_limit: - runtimeValue: - constant: '{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}' - memory_limit: - runtimeValue: - constant: '{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}' - pipelinechannel--accelerator-limit-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: accelerator-limit - pipelinechannel--accelerator-type-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: accelerator-type - pipelinechannel--cpu-limit-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: cpu-limit - pipelinechannel--memory-limit-Output: - taskOutputParameter: - outputParameterKey: Output - producerTask: memory-limit - taskInfo: - name: sum-numbers -schemaVersion: 2.1.0 -sdkVersion: kfp-2.8.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_task_final_status.yaml b/sdk/python/test_data/pipelines/pipeline_with_task_final_status.yaml deleted file mode 100644 index 61c916495cd..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_task_final_status.yaml +++ /dev/null @@ -1,189 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-task-final-status -# Inputs: -# message: str [Default: 'Hello World!'] -components: - comp-exit-handler-1: - dag: - tasks: - fail-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-fail-op - inputs: - parameters: - message: - runtimeValue: - constant: Task failed. - taskInfo: - name: fail-op - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - inputs: - parameters: - message: - componentInputParameter: pipelinechannel--message - taskInfo: - name: print-op - inputDefinitions: - parameters: - pipelinechannel--message: - parameterType: STRING - comp-exit-op: - executorLabel: exec-exit-op - inputDefinitions: - parameters: - status: - isOptional: true - parameterType: TASK_FINAL_STATUS - user_input: - parameterType: STRING - comp-fail-op: - executorLabel: exec-fail-op - inputDefinitions: - parameters: - message: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - message: - parameterType: STRING -deploymentSpec: - executors: - exec-exit-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - exit_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef exit_op(user_input: str, status: PipelineTaskFinalStatus):\n\ - \ \"\"\"Checks pipeline run status.\"\"\"\n print('Pipeline status:\ - \ ', status.state)\n print('Job resource name: ', status.pipeline_job_resource_name)\n\ - \ print('Pipeline task name: ', status.pipeline_task_name)\n print('Error\ - \ code: ', status.error_code)\n print('Error message: ', status.error_message)\n\ - \n" - image: python:3.9 - exec-fail-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - fail_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ - \ print(message)\n sys.exit(1)\n\n" - image: python:3.9 - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ - \ print(message)\n\n" - image: python:3.9 -pipelineInfo: - name: pipeline-with-task-final-status -root: - dag: - tasks: - exit-handler-1: - componentRef: - name: comp-exit-handler-1 - inputs: - parameters: - pipelinechannel--message: - componentInputParameter: message - taskInfo: - name: my-pipeline - exit-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-exit-op - dependentTasks: - - exit-handler-1 - inputs: - parameters: - status: - taskFinalStatus: - producerTask: exit-handler-1 - user_input: - componentInputParameter: message - taskInfo: - name: exit-op - triggerPolicy: - strategy: ALL_UPSTREAM_TASKS_COMPLETED - inputDefinitions: - parameters: - message: - defaultValue: Hello World! - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.yaml b/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.yaml deleted file mode 100644 index fd6ef9e3a85..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.yaml +++ /dev/null @@ -1,95 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-task-final-status-yaml -# Inputs: -# message: str [Default: 'Hello World!'] -components: - comp-exit-handler-1: - dag: - tasks: - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - inputs: - parameters: - message: - componentInputParameter: pipelinechannel--message - taskInfo: - name: print-op - inputDefinitions: - parameters: - pipelinechannel--message: - parameterType: STRING - comp-exit-op: - executorLabel: exec-exit-op - inputDefinitions: - parameters: - status: - isOptional: true - parameterType: TASK_FINAL_STATUS - user_input: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - message: - parameterType: STRING -deploymentSpec: - executors: - exec-exit-op: - container: - command: - - echo - - 'user input:' - - '{{$.inputs.parameters[''user_input'']}}' - - 'pipeline status:' - - '{{$.inputs.parameters[''status'']}}' - image: python:3.9 - exec-print-op: - container: - command: - - echo - - '{{$.inputs.parameters[''message'']}}' - image: python:3.9 -pipelineInfo: - name: pipeline-with-task-final-status-yaml -root: - dag: - tasks: - exit-handler-1: - componentRef: - name: comp-exit-handler-1 - inputs: - parameters: - pipelinechannel--message: - componentInputParameter: message - taskInfo: - name: my-pipeline - exit-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-exit-op - dependentTasks: - - exit-handler-1 - inputs: - parameters: - status: - taskFinalStatus: - producerTask: exit-handler-1 - user_input: - componentInputParameter: message - taskInfo: - name: exit-op - triggerPolicy: - strategy: ALL_UPSTREAM_TASKS_COMPLETED - inputDefinitions: - parameters: - message: - defaultValue: Hello World! - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_task_using_ignore_upstream_failure.yaml b/sdk/python/test_data/pipelines/pipeline_with_task_using_ignore_upstream_failure.yaml deleted file mode 100644 index ffc0928698a..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_task_using_ignore_upstream_failure.yaml +++ /dev/null @@ -1,124 +0,0 @@ -# PIPELINE DEFINITION -# Name: my-pipeline -# Inputs: -# sample_input: str [Default: 'message'] -components: - comp-fail-op: - executorLabel: exec-fail-op - inputDefinitions: - parameters: - message: - parameterType: STRING - outputDefinitions: - parameters: - Output: - parameterType: STRING - comp-print-op: - executorLabel: exec-print-op - inputDefinitions: - parameters: - message: - defaultValue: default - isOptional: true - parameterType: STRING -deploymentSpec: - executors: - exec-fail-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - fail_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef fail_op(message: str) -> str:\n \"\"\"Fails.\"\"\"\n import\ - \ sys\n print(message)\n sys.exit(1)\n return message\n\n" - image: python:3.9 - exec-print-op: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - print_op - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef print_op(message: str = 'default'):\n \"\"\"Prints a message.\"\ - \"\"\n print(message)\n\n" - image: python:3.9 -pipelineInfo: - name: my-pipeline -root: - dag: - tasks: - fail-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-fail-op - inputs: - parameters: - message: - componentInputParameter: sample_input - taskInfo: - name: fail-op - print-op: - cachingOptions: - enableCache: true - componentRef: - name: comp-print-op - dependentTasks: - - fail-op - inputs: - parameters: - message: - taskOutputParameter: - outputParameterKey: Output - producerTask: fail-op - taskInfo: - name: print-op - triggerPolicy: - strategy: ALL_UPSTREAM_TASKS_COMPLETED - inputDefinitions: - parameters: - sample_input: - defaultValue: message - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_various_io_types.yaml b/sdk/python/test_data/pipelines/pipeline_with_various_io_types.yaml deleted file mode 100644 index ac17bc1794d..00000000000 --- a/sdk/python/test_data/pipelines/pipeline_with_various_io_types.yaml +++ /dev/null @@ -1,215 +0,0 @@ -# PIPELINE DEFINITION -# Name: pipeline-with-various-types -# Inputs: -# input1: str -# input3: system.Artifact -# input4: str [Default: ''] -components: - comp-downstream: - executorLabel: exec-downstream - inputDefinitions: - artifacts: - input_b: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 - input_c: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - input_d: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 - input_e: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - input_f: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - input_g: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - input_h: - artifactType: - schemaTitle: system.HTML - schemaVersion: 0.0.1 - input_i: - artifactType: - schemaTitle: google.BQMLModel - schemaVersion: 0.0.1 - parameters: - input_a: - parameterType: NUMBER_INTEGER - comp-upstream: - executorLabel: exec-upstream - inputDefinitions: - artifacts: - input_3: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - parameters: - input_1: - parameterType: STRING - input_2: - parameterType: NUMBER_DOUBLE - input_4: - parameterType: STRING - outputDefinitions: - artifacts: - output_2: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 - output_3: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - output_4: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 - output_5: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - output_6: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - output_7: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - output_8: - artifactType: - schemaTitle: system.HTML - schemaVersion: 0.0.1 - output_9: - artifactType: - schemaTitle: google.BQMLModel - schemaVersion: 0.0.1 - parameters: - output_1: - parameterType: NUMBER_INTEGER -deploymentSpec: - executors: - exec-downstream: - container: - args: - - '{{$.inputs.parameters[''input_a'']}}' - - '{{$.inputs.artifacts[''input_b''].uri}}' - - '{{$.inputs.artifacts[''input_c''].path}}' - - '{{$.inputs.artifacts[''input_d''].uri}}' - - '{{$.inputs.artifacts[''input_e''].uri}}' - - '{{$.inputs.artifacts[''input_f''].path}}' - - '{{$.inputs.artifacts[''input_g''].path}}' - - '{{$.inputs.artifacts[''input_h''].path}}' - image: gcr.io/image - exec-upstream: - container: - args: - - '{{$.inputs.parameters[''input_1'']}}' - - '{{$.inputs.parameters[''input_2'']}}' - - '{{$.inputs.artifacts[''input_3''].path}}' - - '{{$.inputs.parameters[''input_4'']}}' - - '{{$.outputs.parameters[''output_1''].output_file}}' - - '{{$.outputs.artifacts[''output_2''].uri}}' - - '{{$.outputs.artifacts[''output_3''].path}}' - - '{{$.outputs.artifacts[''output_4''].uri}}' - - '{{$.outputs.artifacts[''output_5''].uri}}' - - '{{$.outputs.artifacts[''output_6''].path}}' - - '{{$.outputs.artifacts[''output_7''].path}}' - - '{{$.outputs.artifacts[''output_8''].path}}' - image: gcr.io/image -pipelineInfo: - name: pipeline-with-various-types -root: - dag: - tasks: - downstream: - cachingOptions: - enableCache: true - componentRef: - name: comp-downstream - dependentTasks: - - upstream - inputs: - artifacts: - input_b: - taskOutputArtifact: - outputArtifactKey: output_2 - producerTask: upstream - input_c: - taskOutputArtifact: - outputArtifactKey: output_3 - producerTask: upstream - input_d: - taskOutputArtifact: - outputArtifactKey: output_4 - producerTask: upstream - input_e: - taskOutputArtifact: - outputArtifactKey: output_5 - producerTask: upstream - input_f: - taskOutputArtifact: - outputArtifactKey: output_6 - producerTask: upstream - input_g: - taskOutputArtifact: - outputArtifactKey: output_7 - producerTask: upstream - input_h: - taskOutputArtifact: - outputArtifactKey: output_8 - producerTask: upstream - input_i: - taskOutputArtifact: - outputArtifactKey: output_9 - producerTask: upstream - parameters: - input_a: - taskOutputParameter: - outputParameterKey: output_1 - producerTask: upstream - taskInfo: - name: downstream - upstream: - cachingOptions: - enableCache: true - componentRef: - name: comp-upstream - inputs: - artifacts: - input_3: - componentInputArtifact: input3 - parameters: - input_1: - componentInputParameter: input1 - input_2: - runtimeValue: - constant: 3.1415926 - input_4: - componentInputParameter: input4 - taskInfo: - name: upstream - inputDefinitions: - artifacts: - input3: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - parameters: - input1: - parameterType: STRING - input4: - defaultValue: '' - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pythonic_artifact_with_single_return.yaml b/sdk/python/test_data/pipelines/pythonic_artifact_with_single_return.yaml deleted file mode 100644 index fa6aa54dae0..00000000000 --- a/sdk/python/test_data/pipelines/pythonic_artifact_with_single_return.yaml +++ /dev/null @@ -1,123 +0,0 @@ -# PIPELINE DEFINITION -# Name: make-language-model-pipeline -# Outputs: -# Output: system.Model -components: - comp-importer: - executorLabel: exec-importer - inputDefinitions: - parameters: - uri: - parameterType: STRING - outputDefinitions: - artifacts: - artifact: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-make-language-model: - executorLabel: exec-make-language-model - inputDefinitions: - artifacts: - text_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-importer: - importer: - artifactUri: - constant: gs://ml-pipeline-playground/shakespeare1.txt - metadata: - key: value - typeSchema: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - exec-make-language-model: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - make_language_model - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.7'\ - \ && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef make_language_model(text_dataset: Dataset) -> Model:\n # dill\ - \ allows pickling objects belonging to a function's local namespace\n \ - \ import dill\n\n with open(text_dataset.path) as f:\n text =\ - \ f.read()\n\n # insert train on text here #\n\n def dummy_model(x:\ - \ str) -> str:\n return x\n\n model = Model(\n uri=dsl.get_uri(suffix='model'),\n\ - \ metadata={'data': text_dataset.name},\n )\n\n with open(model.path,\ - \ 'wb') as f:\n dill.dump(dummy_model, f)\n\n return model\n\n" - image: python:3.9 -pipelineInfo: - name: make-language-model-pipeline -root: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: Output - producerSubtask: make-language-model - tasks: - importer: - cachingOptions: - enableCache: true - componentRef: - name: comp-importer - inputs: - parameters: - uri: - runtimeValue: - constant: gs://ml-pipeline-playground/shakespeare1.txt - taskInfo: - name: importer - make-language-model: - cachingOptions: - enableCache: true - componentRef: - name: comp-make-language-model - dependentTasks: - - importer - inputs: - artifacts: - text_dataset: - taskOutputArtifact: - outputArtifactKey: artifact - producerTask: importer - taskInfo: - name: make-language-model - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pythonic_artifacts_with_list_of_artifacts.yaml b/sdk/python/test_data/pipelines/pythonic_artifacts_with_list_of_artifacts.yaml deleted file mode 100644 index 985e26f026a..00000000000 --- a/sdk/python/test_data/pipelines/pythonic_artifacts_with_list_of_artifacts.yaml +++ /dev/null @@ -1,187 +0,0 @@ -# PIPELINE DEFINITION -# Name: make-and-join-datasets -# Inputs: -# texts: list [Default: ['Hello', ',', ' ', 'world!']] -# Outputs: -# Output: system.Dataset -components: - comp-for-loop-1: - dag: - outputs: - artifacts: - pipelinechannel--make-dataset-Output: - artifactSelectors: - - outputArtifactKey: Output - producerSubtask: make-dataset - tasks: - make-dataset: - cachingOptions: - enableCache: true - componentRef: - name: comp-make-dataset - inputs: - parameters: - text: - componentInputParameter: pipelinechannel--texts-loop-item - taskInfo: - name: make-dataset - inputDefinitions: - parameters: - pipelinechannel--texts: - parameterType: LIST - pipelinechannel--texts-loop-item: - parameterType: STRING - outputDefinitions: - artifacts: - pipelinechannel--make-dataset-Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - comp-join-datasets: - executorLabel: exec-join-datasets - inputDefinitions: - artifacts: - datasets: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - isArtifactList: true - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-make-dataset: - executorLabel: exec-make-dataset - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-join-datasets: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - join_datasets - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef join_datasets(datasets: List[Dataset]) -> Dataset:\n texts\ - \ = []\n for dataset in datasets:\n with open(dataset.path, 'r')\ - \ as f:\n texts.append(f.read())\n\n return ''.join(texts)\n\ - \n" - image: python:3.9 - exec-make-dataset: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - make_dataset - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef make_dataset(text: str) -> Dataset:\n dataset = Dataset(uri=dsl.get_uri(),\ - \ metadata={'length': len(text)})\n with open(dataset.path, 'w') as f:\n\ - \ f.write(text)\n return dataset\n\n" - image: python:3.9 -pipelineInfo: - name: make-and-join-datasets -root: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: Output - producerSubtask: join-datasets - tasks: - for-loop-1: - componentRef: - name: comp-for-loop-1 - inputs: - parameters: - pipelinechannel--texts: - componentInputParameter: texts - parameterIterator: - itemInput: pipelinechannel--texts-loop-item - items: - inputParameter: pipelinechannel--texts - taskInfo: - name: for-loop-1 - join-datasets: - cachingOptions: - enableCache: true - componentRef: - name: comp-join-datasets - dependentTasks: - - for-loop-1 - inputs: - artifacts: - datasets: - taskOutputArtifact: - outputArtifactKey: pipelinechannel--make-dataset-Output - producerTask: for-loop-1 - taskInfo: - name: join-datasets - inputDefinitions: - parameters: - texts: - defaultValue: - - Hello - - ',' - - ' ' - - world! - isOptional: true - parameterType: LIST - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/pythonic_artifacts_with_multiple_returns.yaml b/sdk/python/test_data/pipelines/pythonic_artifacts_with_multiple_returns.yaml deleted file mode 100644 index 247484fbe28..00000000000 --- a/sdk/python/test_data/pipelines/pythonic_artifacts_with_multiple_returns.yaml +++ /dev/null @@ -1,184 +0,0 @@ -# PIPELINE DEFINITION -# Name: split-datasets-and-return-first -# Outputs: -# Output: system.Dataset -components: - comp-dataset-splitter: - executorLabel: exec-dataset-splitter - inputDefinitions: - artifacts: - in_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - outputDefinitions: - artifacts: - dataset1: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - dataset2: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-make-dataset: - executorLabel: exec-make-dataset - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-splitter-pipeline: - dag: - outputs: - artifacts: - dataset1: - artifactSelectors: - - outputArtifactKey: dataset1 - producerSubtask: dataset-splitter - dataset2: - artifactSelectors: - - outputArtifactKey: dataset1 - producerSubtask: dataset-splitter - tasks: - dataset-splitter: - cachingOptions: - enableCache: true - componentRef: - name: comp-dataset-splitter - inputs: - artifacts: - in_dataset: - componentInputArtifact: in_dataset - taskInfo: - name: dataset-splitter - inputDefinitions: - artifacts: - in_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - outputDefinitions: - artifacts: - dataset1: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - dataset2: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-dataset-splitter: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - dataset_splitter - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef dataset_splitter(\n in_dataset: Dataset\n) -> NamedTuple(\n\ - \ 'outputs',\n dataset1=Dataset,\n dataset2=Dataset,\n\ - ):\n\n with open(in_dataset.path) as f:\n in_data = f.read()\n\ - \n out_data1, out_data2 = in_data[:len(in_data) // 2], in_data[len(in_data)\ - \ //\n 2:]\n\ - \n dataset1 = Dataset(\n uri=dsl.get_uri(suffix='dataset1'),\n\ - \ metadata={'original_data': in_dataset.name},\n )\n with open(dataset1.path,\ - \ 'w') as f:\n f.write(out_data1)\n\n dataset2 = Dataset(\n \ - \ uri=dsl.get_uri(suffix='dataset2'),\n metadata={'original_data':\ - \ in_dataset.name},\n )\n with open(dataset2.path, 'w') as f:\n \ - \ f.write(out_data2)\n\n outputs = NamedTuple(\n 'outputs',\n\ - \ dataset1=Dataset,\n dataset2=Dataset,\n )\n return\ - \ outputs(dataset1=dataset1, dataset2=dataset2)\n\n" - image: python:3.9 - exec-make-dataset: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - make_dataset - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ - $0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef make_dataset() -> Artifact:\n artifact = Artifact(uri=dsl.get_uri('dataset'))\n\ - \ with open(artifact.path, 'w') as f:\n f.write('Hello, world')\n\ - \ return artifact\n\n" - image: python:3.9 -pipelineInfo: - name: split-datasets-and-return-first -root: - dag: - outputs: - artifacts: - Output: - artifactSelectors: - - outputArtifactKey: dataset1 - producerSubtask: splitter-pipeline - tasks: - make-dataset: - cachingOptions: - enableCache: true - componentRef: - name: comp-make-dataset - taskInfo: - name: make-dataset - splitter-pipeline: - cachingOptions: - enableCache: true - componentRef: - name: comp-splitter-pipeline - dependentTasks: - - make-dataset - inputs: - artifacts: - in_dataset: - taskOutputArtifact: - outputArtifactKey: Output - producerTask: make-dataset - taskInfo: - name: splitter-pipeline - outputDefinitions: - artifacts: - Output: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/two_step_pipeline.yaml b/sdk/python/test_data/pipelines/two_step_pipeline.yaml deleted file mode 100644 index f9298a98b68..00000000000 --- a/sdk/python/test_data/pipelines/two_step_pipeline.yaml +++ /dev/null @@ -1,91 +0,0 @@ -# PIPELINE DEFINITION -# Name: simple-two-step-pipeline -# Inputs: -# text: str [Default: 'Hello KFP!'] -components: - comp-read-from-gcs: - executorLabel: exec-read-from-gcs - inputDefinitions: - artifacts: - input_gcs_path: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-write-to-gcs: - executorLabel: exec-write-to-gcs - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - artifacts: - output_gcs_path: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-read-from-gcs: - container: - command: - - sh - - -c - - 'set -e -x - - gsutil cat "$0" - - ' - - '{{$.inputs.artifacts[''input_gcs_path''].uri}}' - image: google/cloud-sdk:slim - exec-write-to-gcs: - container: - command: - - sh - - -c - - 'set -e -x - - echo "$0" | gsutil cp - "$1" - - ' - - '{{$.inputs.parameters[''text'']}}' - - '{{$.outputs.artifacts[''output_gcs_path''].uri}}' - image: google/cloud-sdk:slim -pipelineInfo: - name: simple-two-step-pipeline -root: - dag: - tasks: - read-from-gcs: - cachingOptions: - enableCache: true - componentRef: - name: comp-read-from-gcs - dependentTasks: - - write-to-gcs - inputs: - artifacts: - input_gcs_path: - taskOutputArtifact: - outputArtifactKey: output_gcs_path - producerTask: write-to-gcs - taskInfo: - name: Consumer - write-to-gcs: - cachingOptions: - enableCache: true - componentRef: - name: comp-write-to-gcs - inputs: - parameters: - text: - componentInputParameter: text - taskInfo: - name: Producer - inputDefinitions: - parameters: - text: - defaultValue: Hello KFP! - isOptional: true - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/two_step_pipeline_containerized.py b/sdk/python/test_data/pipelines/two_step_pipeline_containerized.py deleted file mode 100644 index 925cf13c7ad..00000000000 --- a/sdk/python/test_data/pipelines/two_step_pipeline_containerized.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp import compiler -from kfp import dsl - - -@dsl.container_component -def component1(text: str, output_gcs: dsl.Output[dsl.Dataset]): - return dsl.ContainerSpec( - image='alpine', - command=[ - 'sh', - '-c', - 'mkdir --parents $(dirname "$1") && echo "$0" > "$1"', - ], - args=[text, output_gcs.path]) - - -@dsl.container_component -def component2(input_gcs: dsl.Input[dsl.Dataset]): - return dsl.ContainerSpec( - image='alpine', command=['cat'], args=[input_gcs.path]) - - -@dsl.pipeline(name='containerized-two-step-pipeline') -def my_pipeline(text: str): - component_1 = component1(text=text) - component_2 = component2(input_gcs=component_1.outputs['output_gcs']) - - -if __name__ == '__main__': - compiler.Compiler().compile( - pipeline_func=my_pipeline, - package_path=__file__.replace('.py', '.yaml')) diff --git a/sdk/python/test_data/pipelines/two_step_pipeline_containerized.yaml b/sdk/python/test_data/pipelines/two_step_pipeline_containerized.yaml deleted file mode 100644 index a00c67e46b5..00000000000 --- a/sdk/python/test_data/pipelines/two_step_pipeline_containerized.yaml +++ /dev/null @@ -1,81 +0,0 @@ -# PIPELINE DEFINITION -# Name: containerized-two-step-pipeline -# Inputs: -# text: str -components: - comp-component1: - executorLabel: exec-component1 - inputDefinitions: - parameters: - text: - parameterType: STRING - outputDefinitions: - artifacts: - output_gcs: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-component2: - executorLabel: exec-component2 - inputDefinitions: - artifacts: - input_gcs: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-component1: - container: - args: - - '{{$.inputs.parameters[''text'']}}' - - '{{$.outputs.artifacts[''output_gcs''].path}}' - command: - - sh - - -c - - mkdir --parents $(dirname "$1") && echo "$0" > "$1" - image: alpine - exec-component2: - container: - args: - - '{{$.inputs.artifacts[''input_gcs''].path}}' - command: - - cat - image: alpine -pipelineInfo: - name: containerized-two-step-pipeline -root: - dag: - tasks: - component1: - cachingOptions: - enableCache: true - componentRef: - name: comp-component1 - inputs: - parameters: - text: - componentInputParameter: text - taskInfo: - name: component1 - component2: - cachingOptions: - enableCache: true - componentRef: - name: comp-component2 - dependentTasks: - - component1 - inputs: - artifacts: - input_gcs: - taskOutputArtifact: - outputArtifactKey: output_gcs - producerTask: component1 - taskInfo: - name: component2 - inputDefinitions: - parameters: - text: - parameterType: STRING -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/pipelines/xgboost_sample_pipeline.yaml b/sdk/python/test_data/pipelines/xgboost_sample_pipeline.yaml deleted file mode 100644 index 2e03d06edff..00000000000 --- a/sdk/python/test_data/pipelines/xgboost_sample_pipeline.yaml +++ /dev/null @@ -1,926 +0,0 @@ -# PIPELINE DEFINITION -# Name: xgboost-sample-pipeline -components: - comp-chicago-taxi-trips-dataset: - executorLabel: exec-chicago-taxi-trips-dataset - inputDefinitions: - parameters: - format: - defaultValue: csv - isOptional: true - parameterType: STRING - limit: - defaultValue: 1000.0 - isOptional: true - parameterType: NUMBER_INTEGER - select: - defaultValue: trip_id,taxi_id,trip_start_timestamp,trip_end_timestamp,trip_seconds,trip_miles,pickup_census_tract,dropoff_census_tract,pickup_community_area,dropoff_community_area,fare,tips,tolls,extras,trip_total,payment_type,company,pickup_centroid_latitude,pickup_centroid_longitude,pickup_centroid_location,dropoff_centroid_latitude,dropoff_centroid_longitude,dropoff_centroid_location - isOptional: true - parameterType: STRING - where: - defaultValue: trip_start_timestamp>="1900-01-01" AND trip_start_timestamp<"2100-01-01" - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - table: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-convert-csv-to-apache-parquet: - executorLabel: exec-convert-csv-to-apache-parquet - inputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - outputDefinitions: - artifacts: - output_data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-xgboost-predict: - executorLabel: exec-xgboost-predict - inputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - model: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - parameters: - label_column: - isOptional: true - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - predictions: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-xgboost-predict-2: - executorLabel: exec-xgboost-predict-2 - inputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - model: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - parameters: - label_column_name: - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - predictions: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-xgboost-predict-3: - executorLabel: exec-xgboost-predict-3 - inputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - model: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - parameters: - label_column_name: - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - predictions: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-xgboost-predict-4: - executorLabel: exec-xgboost-predict-4 - inputDefinitions: - artifacts: - data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - model: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - parameters: - label_column: - isOptional: true - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - predictions: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-xgboost-train: - executorLabel: exec-xgboost-train - inputDefinitions: - artifacts: - starting_model: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - isOptional: true - training_data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - parameters: - booster: - defaultValue: gbtree - isOptional: true - parameterType: STRING - booster_params: - isOptional: true - parameterType: STRUCT - label_column: - defaultValue: 0.0 - isOptional: true - parameterType: NUMBER_INTEGER - learning_rate: - defaultValue: 0.3 - isOptional: true - parameterType: NUMBER_DOUBLE - max_depth: - defaultValue: 6.0 - isOptional: true - parameterType: NUMBER_INTEGER - min_split_loss: - defaultValue: 0.0 - isOptional: true - parameterType: NUMBER_DOUBLE - num_iterations: - defaultValue: 10.0 - isOptional: true - parameterType: NUMBER_INTEGER - objective: - defaultValue: reg:squarederror - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - model: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - model_config: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - comp-xgboost-train-2: - executorLabel: exec-xgboost-train-2 - inputDefinitions: - artifacts: - starting_model: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - isOptional: true - training_data: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - parameters: - booster: - defaultValue: gbtree - isOptional: true - parameterType: STRING - booster_params: - isOptional: true - parameterType: STRUCT - label_column_name: - parameterType: STRING - learning_rate: - defaultValue: 0.3 - isOptional: true - parameterType: NUMBER_DOUBLE - max_depth: - defaultValue: 6.0 - isOptional: true - parameterType: NUMBER_INTEGER - min_split_loss: - defaultValue: 0.0 - isOptional: true - parameterType: NUMBER_DOUBLE - num_iterations: - defaultValue: 10.0 - isOptional: true - parameterType: NUMBER_INTEGER - objective: - defaultValue: reg:squarederror - isOptional: true - parameterType: STRING - outputDefinitions: - artifacts: - model: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 - model_config: - artifactType: - schemaTitle: system.Artifact - schemaVersion: 0.0.1 -deploymentSpec: - executors: - exec-chicago-taxi-trips-dataset: - container: - command: - - sh - - -c - - "set -e -x -o pipefail\noutput_path=\"$0\"\nselect=\"$1\"\nwhere=\"$2\"\n\ - limit=\"$3\"\nformat=\"$4\"\nmkdir -p \"$(dirname \"$output_path\")\"\n\ - curl --get 'https://data.cityofchicago.org/resource/wrvz-psew.'\"${format}\"\ - \ \\\n --data-urlencode '$limit='\"${limit}\" \\\n --data-urlencode\ - \ '$where='\"${where}\" \\\n --data-urlencode '$select='\"${select}\"\ - \ \\\n | tr -d '\"' > \"$output_path\" # Removing unneeded quotes around\ - \ all numbers\n" - - '{{$.outputs.artifacts[''table''].path}}' - - '{{$.inputs.parameters[''select'']}}' - - '{{$.inputs.parameters[''where'']}}' - - '{{$.inputs.parameters[''limit'']}}' - - '{{$.inputs.parameters[''format'']}}' - image: byrnedo/alpine-curl@sha256:548379d0a4a0c08b9e55d9d87a592b7d35d9ab3037f4936f5ccd09d0b625a342 - exec-convert-csv-to-apache-parquet: - container: - args: - - --data - - '{{$.inputs.artifacts[''data''].path}}' - - --output-data - - '{{$.outputs.artifacts[''output_data''].path}}' - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install - --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" - - python3 - - -u - - -c - - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ - \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ - \ file_path\n\ndef convert_csv_to_apache_parquet(\n data_path,\n output_data_path,\n\ - ):\n '''Converts CSV table to Apache Parquet.\n\n [Apache Parquet](https://parquet.apache.org/)\n\ - \n Annotations:\n author: Alexey Volkov \n\ - \ '''\n from pyarrow import csv, parquet\n\n table = csv.read_csv(data_path)\n\ - \ parquet.write_table(table, output_data_path)\n\nimport argparse\n_parser\ - \ = argparse.ArgumentParser(prog='Convert csv to apache parquet', description='Converts\ - \ CSV table to Apache Parquet.\\n\\n [Apache Parquet](https://parquet.apache.org/)\\\ - n\\n Annotations:\\n author: Alexey Volkov ')\n\ - _parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--output-data\", dest=\"\ - output_data_path\", type=_make_parent_dirs_and_return_path, required=True,\ - \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ - _output_files = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = convert_csv_to_apache_parquet(**_parsed_args)\n\ - \n_output_serializers = [\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n\ - \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ - \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ - \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: python:3.7 - exec-xgboost-predict: - container: - args: - - --data - - '{{$.inputs.artifacts[''data''].path}}' - - --model - - '{{$.inputs.artifacts[''model''].path}}' - - '{"IfPresent": {"InputName": "label_column", "Then": ["--label-column", - "{{$.inputs.parameters[''label_column'']}}"]}}' - - --predictions - - '{{$.outputs.artifacts[''predictions''].path}}' - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'xgboost==1.1.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' - --user) && "$0" "$@" - - python3 - - -u - - -c - - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ - \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ - \ file_path\n\ndef xgboost_predict(\n data_path, # Also supports LibSVM\n\ - \ model_path,\n predictions_path,\n label_column = None,\n):\n\ - \ '''Make predictions using a trained XGBoost model.\n\n Args:\n \ - \ data_path: Path for the feature data in CSV format.\n model_path:\ - \ Path for the trained model in binary XGBoost format.\n predictions_path:\ - \ Output path for the predictions.\n label_column: Column containing\ - \ the label data.\n\n Annotations:\n author: Alexey Volkov \n\ - \ '''\n from pathlib import Path\n\n import numpy\n import pandas\n\ - \ import xgboost\n\n df = pandas.read_csv(\n data_path,\n \ - \ )\n\n if label_column is not None:\n df = df.drop(columns=[df.columns[label_column]])\n\ - \n testing_data = xgboost.DMatrix(\n data=df,\n )\n\n model\ - \ = xgboost.Booster(model_file=model_path)\n\n predictions = model.predict(testing_data)\n\ - \n Path(predictions_path).parent.mkdir(parents=True, exist_ok=True)\n\ - \ numpy.savetxt(predictions_path, predictions)\n\nimport argparse\n_parser\ - \ = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions\ - \ using a trained XGBoost model.\\n\\n Args:\\n data_path: Path\ - \ for the feature data in CSV format.\\n model_path: Path for the\ - \ trained model in binary XGBoost format.\\n predictions_path: Output\ - \ path for the predictions.\\n label_column: Column containing the\ - \ label data.\\n\\n Annotations:\\n author: Alexey Volkov ')\n\ - _parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"\ - model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ - --label-column\", dest=\"label_column\", type=int, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path,\ - \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ - \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.7 - exec-xgboost-predict-2: - container: - args: - - --data - - '{{$.inputs.artifacts[''data''].path}}' - - --model - - '{{$.inputs.artifacts[''model''].path}}' - - '{"IfPresent": {"InputName": "label_column_name", "Then": ["--label-column-name", - "{{$.inputs.parameters[''label_column_name'']}}"]}}' - - --predictions - - '{{$.outputs.artifacts[''predictions''].path}}' - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'xgboost==1.1.1' 'pandas==1.0.5' 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' - 'pandas==1.0.5' 'pyarrow==0.17.1' --user) && "$0" "$@" - - python3 - - -u - - -c - - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ - \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ - \ file_path\n\ndef xgboost_predict(\n data_path,\n model_path,\n \ - \ predictions_path,\n label_column_name = None,\n):\n '''Make predictions\ - \ using a trained XGBoost model.\n\n Args:\n data_path: Path for\ - \ the feature data in Apache Parquet format.\n model_path: Path for\ - \ the trained model in binary XGBoost format.\n predictions_path:\ - \ Output path for the predictions.\n label_column_name: Optional.\ - \ Name of the column containing the label data that is excluded during the\ - \ prediction.\n\n Annotations:\n author: Alexey Volkov \n\ - \ '''\n from pathlib import Path\n\n import numpy\n import pandas\n\ - \ import xgboost\n\n # Loading data\n df = pandas.read_parquet(data_path)\n\ - \ if label_column_name:\n df = df.drop(columns=[label_column_name])\n\ - \n evaluation_data = xgboost.DMatrix(\n data=df,\n )\n\n \ - \ # Training\n model = xgboost.Booster(model_file=model_path)\n\n \ - \ predictions = model.predict(evaluation_data)\n\n Path(predictions_path).parent.mkdir(parents=True,\ - \ exist_ok=True)\n numpy.savetxt(predictions_path, predictions)\n\nimport\ - \ argparse\n_parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make\ - \ predictions using a trained XGBoost model.\\n\\n Args:\\n data_path:\ - \ Path for the feature data in Apache Parquet format.\\n model_path:\ - \ Path for the trained model in binary XGBoost format.\\n predictions_path:\ - \ Output path for the predictions.\\n label_column_name: Optional.\ - \ Name of the column containing the label data that is excluded during the\ - \ prediction.\\n\\n Annotations:\\n author: Alexey Volkov ')\n\ - _parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"\ - model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ - --label-column-name\", dest=\"label_column_name\", type=str, required=False,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--predictions\", dest=\"\ - predictions_path\", type=_make_parent_dirs_and_return_path, required=True,\ - \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ - \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.7 - exec-xgboost-predict-3: - container: - args: - - --data - - '{{$.inputs.artifacts[''data''].path}}' - - --model - - '{{$.inputs.artifacts[''model''].path}}' - - '{"IfPresent": {"InputName": "label_column_name", "Then": ["--label-column-name", - "{{$.inputs.parameters[''label_column_name'']}}"]}}' - - --predictions - - '{{$.outputs.artifacts[''predictions''].path}}' - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'xgboost==1.1.1' 'pandas==1.0.5' 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' - 'pandas==1.0.5' 'pyarrow==0.17.1' --user) && "$0" "$@" - - python3 - - -u - - -c - - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ - \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ - \ file_path\n\ndef xgboost_predict(\n data_path,\n model_path,\n \ - \ predictions_path,\n label_column_name = None,\n):\n '''Make predictions\ - \ using a trained XGBoost model.\n\n Args:\n data_path: Path for\ - \ the feature data in Apache Parquet format.\n model_path: Path for\ - \ the trained model in binary XGBoost format.\n predictions_path:\ - \ Output path for the predictions.\n label_column_name: Optional.\ - \ Name of the column containing the label data that is excluded during the\ - \ prediction.\n\n Annotations:\n author: Alexey Volkov \n\ - \ '''\n from pathlib import Path\n\n import numpy\n import pandas\n\ - \ import xgboost\n\n # Loading data\n df = pandas.read_parquet(data_path)\n\ - \ if label_column_name:\n df = df.drop(columns=[label_column_name])\n\ - \n evaluation_data = xgboost.DMatrix(\n data=df,\n )\n\n \ - \ # Training\n model = xgboost.Booster(model_file=model_path)\n\n \ - \ predictions = model.predict(evaluation_data)\n\n Path(predictions_path).parent.mkdir(parents=True,\ - \ exist_ok=True)\n numpy.savetxt(predictions_path, predictions)\n\nimport\ - \ argparse\n_parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make\ - \ predictions using a trained XGBoost model.\\n\\n Args:\\n data_path:\ - \ Path for the feature data in Apache Parquet format.\\n model_path:\ - \ Path for the trained model in binary XGBoost format.\\n predictions_path:\ - \ Output path for the predictions.\\n label_column_name: Optional.\ - \ Name of the column containing the label data that is excluded during the\ - \ prediction.\\n\\n Annotations:\\n author: Alexey Volkov ')\n\ - _parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"\ - model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ - --label-column-name\", dest=\"label_column_name\", type=str, required=False,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--predictions\", dest=\"\ - predictions_path\", type=_make_parent_dirs_and_return_path, required=True,\ - \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ - \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.7 - exec-xgboost-predict-4: - container: - args: - - --data - - '{{$.inputs.artifacts[''data''].path}}' - - --model - - '{{$.inputs.artifacts[''model''].path}}' - - '{"IfPresent": {"InputName": "label_column", "Then": ["--label-column", - "{{$.inputs.parameters[''label_column'']}}"]}}' - - --predictions - - '{{$.outputs.artifacts[''predictions''].path}}' - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'xgboost==1.1.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' - --user) && "$0" "$@" - - python3 - - -u - - -c - - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ - \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ - \ file_path\n\ndef xgboost_predict(\n data_path, # Also supports LibSVM\n\ - \ model_path,\n predictions_path,\n label_column = None,\n):\n\ - \ '''Make predictions using a trained XGBoost model.\n\n Args:\n \ - \ data_path: Path for the feature data in CSV format.\n model_path:\ - \ Path for the trained model in binary XGBoost format.\n predictions_path:\ - \ Output path for the predictions.\n label_column: Column containing\ - \ the label data.\n\n Annotations:\n author: Alexey Volkov \n\ - \ '''\n from pathlib import Path\n\n import numpy\n import pandas\n\ - \ import xgboost\n\n df = pandas.read_csv(\n data_path,\n \ - \ )\n\n if label_column is not None:\n df = df.drop(columns=[df.columns[label_column]])\n\ - \n testing_data = xgboost.DMatrix(\n data=df,\n )\n\n model\ - \ = xgboost.Booster(model_file=model_path)\n\n predictions = model.predict(testing_data)\n\ - \n Path(predictions_path).parent.mkdir(parents=True, exist_ok=True)\n\ - \ numpy.savetxt(predictions_path, predictions)\n\nimport argparse\n_parser\ - \ = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions\ - \ using a trained XGBoost model.\\n\\n Args:\\n data_path: Path\ - \ for the feature data in CSV format.\\n model_path: Path for the\ - \ trained model in binary XGBoost format.\\n predictions_path: Output\ - \ path for the predictions.\\n label_column: Column containing the\ - \ label data.\\n\\n Annotations:\\n author: Alexey Volkov ')\n\ - _parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"\ - model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ - --label-column\", dest=\"label_column\", type=int, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path,\ - \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ - \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.7 - exec-xgboost-train: - container: - args: - - --training-data - - '{{$.inputs.artifacts[''training_data''].path}}' - - '{"IfPresent": {"InputName": "starting_model", "Then": ["--starting-model", - "{{$.inputs.artifacts[''starting_model''].path}}"]}}' - - '{"IfPresent": {"InputName": "label_column", "Then": ["--label-column", - "{{$.inputs.parameters[''label_column'']}}"]}}' - - '{"IfPresent": {"InputName": "num_iterations", "Then": ["--num-iterations", - "{{$.inputs.parameters[''num_iterations'']}}"]}}' - - '{"IfPresent": {"InputName": "booster_params", "Then": ["--booster-params", - "{{$.inputs.parameters[''booster_params'']}}"]}}' - - '{"IfPresent": {"InputName": "objective", "Then": ["--objective", "{{$.inputs.parameters[''objective'']}}"]}}' - - '{"IfPresent": {"InputName": "booster", "Then": ["--booster", "{{$.inputs.parameters[''booster'']}}"]}}' - - '{"IfPresent": {"InputName": "learning_rate", "Then": ["--learning-rate", - "{{$.inputs.parameters[''learning_rate'']}}"]}}' - - '{"IfPresent": {"InputName": "min_split_loss", "Then": ["--min-split-loss", - "{{$.inputs.parameters[''min_split_loss'']}}"]}}' - - '{"IfPresent": {"InputName": "max_depth", "Then": ["--max-depth", "{{$.inputs.parameters[''max_depth'']}}"]}}' - - --model - - '{{$.outputs.artifacts[''model''].path}}' - - --model-config - - '{{$.outputs.artifacts[''model_config''].path}}' - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'xgboost==1.1.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 - -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' - --user) && "$0" "$@" - - python3 - - -u - - -c - - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ - \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ - \ file_path\n\ndef xgboost_train(\n training_data_path, # Also supports\ - \ LibSVM\n model_path,\n model_config_path,\n starting_model_path\ - \ = None,\n\n label_column = 0,\n num_iterations = 10,\n booster_params\ - \ = None,\n\n # Booster parameters\n objective = 'reg:squarederror',\n\ - \ booster = 'gbtree',\n learning_rate = 0.3,\n min_split_loss =\ - \ 0,\n max_depth = 6,\n):\n '''Train an XGBoost model.\n\n Args:\n\ - \ training_data_path: Path for the training data in CSV format.\n\ - \ model_path: Output path for the trained model in binary XGBoost\ - \ format.\n model_config_path: Output path for the internal parameter\ - \ configuration of Booster as a JSON string.\n starting_model_path:\ - \ Path for the existing trained model to start from.\n label_column:\ - \ Column containing the label data.\n num_boost_rounds: Number of\ - \ boosting iterations.\n booster_params: Parameters for the booster.\ - \ See https://xgboost.readthedocs.io/en/latest/parameter.html\n objective:\ - \ The learning task and the corresponding learning objective.\n \ - \ See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n\ - \ The most common values are:\n \"reg:squarederror\"\ - \ - Regression with squared loss (default).\n \"reg:logistic\"\ - \ - Logistic regression.\n \"binary:logistic\" - Logistic regression\ - \ for binary classification, output probability.\n \"binary:logitraw\"\ - \ - Logistic regression for binary classification, output score before logistic\ - \ transformation\n \"rank:pairwise\" - Use LambdaMART to perform\ - \ pairwise ranking where the pairwise loss is minimized\n \"\ - rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized\ - \ Discounted Cumulative Gain (NDCG) is maximized\n\n Annotations:\n \ - \ author: Alexey Volkov \n '''\n \ - \ import pandas\n import xgboost\n\n df = pandas.read_csv(\n \ - \ training_data_path,\n )\n\n training_data = xgboost.DMatrix(\n\ - \ data=df.drop(columns=[df.columns[label_column]]),\n label=df[df.columns[label_column]],\n\ - \ )\n\n booster_params = booster_params or {}\n booster_params.setdefault('objective',\ - \ objective)\n booster_params.setdefault('booster', booster)\n booster_params.setdefault('learning_rate',\ - \ learning_rate)\n booster_params.setdefault('min_split_loss', min_split_loss)\n\ - \ booster_params.setdefault('max_depth', max_depth)\n\n starting_model\ - \ = None\n if starting_model_path:\n starting_model = xgboost.Booster(model_file=starting_model_path)\n\ - \n model = xgboost.train(\n params=booster_params,\n dtrain=training_data,\n\ - \ num_boost_round=num_iterations,\n xgb_model=starting_model\n\ - \ )\n\n # Saving the model in binary format\n model.save_model(model_path)\n\ - \n model_config_str = model.save_config()\n with open(model_config_path,\ - \ 'w') as model_config_file:\n model_config_file.write(model_config_str)\n\ - \nimport json\nimport argparse\n_parser = argparse.ArgumentParser(prog='Xgboost\ - \ train', description='Train an XGBoost model.\\n\\n Args:\\n \ - \ training_data_path: Path for the training data in CSV format.\\n \ - \ model_path: Output path for the trained model in binary XGBoost format.\\\ - n model_config_path: Output path for the internal parameter configuration\ - \ of Booster as a JSON string.\\n starting_model_path: Path for the\ - \ existing trained model to start from.\\n label_column: Column containing\ - \ the label data.\\n num_boost_rounds: Number of boosting iterations.\\\ - n booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\\\ - n objective: The learning task and the corresponding learning objective.\\\ - n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\\\ - n The most common values are:\\n \"reg:squarederror\"\ - \ - Regression with squared loss (default).\\n \"reg:logistic\"\ - \ - Logistic regression.\\n \"binary:logistic\" - Logistic regression\ - \ for binary classification, output probability.\\n \"binary:logitraw\"\ - \ - Logistic regression for binary classification, output score before logistic\ - \ transformation\\n \"rank:pairwise\" - Use LambdaMART to perform\ - \ pairwise ranking where the pairwise loss is minimized\\n \"\ - rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized\ - \ Discounted Cumulative Gain (NDCG) is maximized\\n\\n Annotations:\\\ - n author: Alexey Volkov ')\n_parser.add_argument(\"\ - --training-data\", dest=\"training_data_path\", type=str, required=True,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--starting-model\"\ - , dest=\"starting_model_path\", type=str, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--label-column\", dest=\"label_column\", type=int,\ - \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--num-iterations\"\ - , dest=\"num_iterations\", type=int, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--booster-params\", dest=\"booster_params\", type=json.loads,\ - \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--objective\"\ - , dest=\"objective\", type=str, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--booster\", dest=\"booster\", type=str, required=False,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--learning-rate\",\ - \ dest=\"learning_rate\", type=float, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--min-split-loss\", dest=\"min_split_loss\", type=float,\ - \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--max-depth\"\ - , dest=\"max_depth\", type=int, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--model\", dest=\"model_path\", type=_make_parent_dirs_and_return_path,\ - \ required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model-config\"\ - , dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True,\ - \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ - \n_outputs = xgboost_train(**_parsed_args)\n" - image: python:3.7 - exec-xgboost-train-2: - container: - args: - - --training-data - - '{{$.inputs.artifacts[''training_data''].path}}' - - --label-column-name - - '{{$.inputs.parameters[''label_column_name'']}}' - - '{"IfPresent": {"InputName": "starting_model", "Then": ["--starting-model", - "{{$.inputs.artifacts[''starting_model''].path}}"]}}' - - '{"IfPresent": {"InputName": "num_iterations", "Then": ["--num-iterations", - "{{$.inputs.parameters[''num_iterations'']}}"]}}' - - '{"IfPresent": {"InputName": "booster_params", "Then": ["--booster-params", - "{{$.inputs.parameters[''booster_params'']}}"]}}' - - '{"IfPresent": {"InputName": "objective", "Then": ["--objective", "{{$.inputs.parameters[''objective'']}}"]}}' - - '{"IfPresent": {"InputName": "booster", "Then": ["--booster", "{{$.inputs.parameters[''booster'']}}"]}}' - - '{"IfPresent": {"InputName": "learning_rate", "Then": ["--learning-rate", - "{{$.inputs.parameters[''learning_rate'']}}"]}}' - - '{"IfPresent": {"InputName": "min_split_loss", "Then": ["--min-split-loss", - "{{$.inputs.parameters[''min_split_loss'']}}"]}}' - - '{"IfPresent": {"InputName": "max_depth", "Then": ["--max-depth", "{{$.inputs.parameters[''max_depth'']}}"]}}' - - --model - - '{{$.outputs.artifacts[''model''].path}}' - - --model-config - - '{{$.outputs.artifacts[''model_config''].path}}' - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'xgboost==1.1.1' 'pandas==1.0.5' 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 - python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' - 'pandas==1.0.5' 'pyarrow==0.17.1' --user) && "$0" "$@" - - python3 - - -u - - -c - - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ - \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ - \ file_path\n\ndef xgboost_train(\n training_data_path,\n model_path,\n\ - \ model_config_path,\n label_column_name,\n\n starting_model_path\ - \ = None,\n\n num_iterations = 10,\n booster_params = None,\n\n \ - \ # Booster parameters\n objective = 'reg:squarederror',\n booster\ - \ = 'gbtree',\n learning_rate = 0.3,\n min_split_loss = 0,\n max_depth\ - \ = 6,\n):\n '''Train an XGBoost model.\n\n Args:\n training_data_path:\ - \ Path for the training data in Apache Parquet format.\n model_path:\ - \ Output path for the trained model in binary XGBoost format.\n model_config_path:\ - \ Output path for the internal parameter configuration of Booster as a JSON\ - \ string.\n starting_model_path: Path for the existing trained model\ - \ to start from.\n label_column_name: Name of the column containing\ - \ the label data.\n num_boost_rounds: Number of boosting iterations.\n\ - \ booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\n\ - \ objective: The learning task and the corresponding learning objective.\n\ - \ See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n\ - \ The most common values are:\n \"reg:squarederror\"\ - \ - Regression with squared loss (default).\n \"reg:logistic\"\ - \ - Logistic regression.\n \"binary:logistic\" - Logistic regression\ - \ for binary classification, output probability.\n \"binary:logitraw\"\ - \ - Logistic regression for binary classification, output score before logistic\ - \ transformation\n \"rank:pairwise\" - Use LambdaMART to perform\ - \ pairwise ranking where the pairwise loss is minimized\n \"\ - rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized\ - \ Discounted Cumulative Gain (NDCG) is maximized\n\n Annotations:\n \ - \ author: Alexey Volkov \n '''\n \ - \ import pandas\n import xgboost\n\n # Loading data\n df = pandas.read_parquet(training_data_path)\n\ - \ training_data = xgboost.DMatrix(\n data=df.drop(columns=[label_column_name]),\n\ - \ label=df[[label_column_name]],\n )\n # Training\n booster_params\ - \ = booster_params or {}\n booster_params.setdefault('objective', objective)\n\ - \ booster_params.setdefault('booster', booster)\n booster_params.setdefault('learning_rate',\ - \ learning_rate)\n booster_params.setdefault('min_split_loss', min_split_loss)\n\ - \ booster_params.setdefault('max_depth', max_depth)\n\n starting_model\ - \ = None\n if starting_model_path:\n starting_model = xgboost.Booster(model_file=starting_model_path)\n\ - \n model = xgboost.train(\n params=booster_params,\n dtrain=training_data,\n\ - \ num_boost_round=num_iterations,\n xgb_model=starting_model\n\ - \ )\n\n # Saving the model in binary format\n model.save_model(model_path)\n\ - \n model_config_str = model.save_config()\n with open(model_config_path,\ - \ 'w') as model_config_file:\n model_config_file.write(model_config_str)\n\ - \nimport json\nimport argparse\n_parser = argparse.ArgumentParser(prog='Xgboost\ - \ train', description='Train an XGBoost model.\\n\\n Args:\\n \ - \ training_data_path: Path for the training data in Apache Parquet format.\\\ - n model_path: Output path for the trained model in binary XGBoost\ - \ format.\\n model_config_path: Output path for the internal parameter\ - \ configuration of Booster as a JSON string.\\n starting_model_path:\ - \ Path for the existing trained model to start from.\\n label_column_name:\ - \ Name of the column containing the label data.\\n num_boost_rounds:\ - \ Number of boosting iterations.\\n booster_params: Parameters for\ - \ the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\\\ - n objective: The learning task and the corresponding learning objective.\\\ - n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\\\ - n The most common values are:\\n \"reg:squarederror\"\ - \ - Regression with squared loss (default).\\n \"reg:logistic\"\ - \ - Logistic regression.\\n \"binary:logistic\" - Logistic regression\ - \ for binary classification, output probability.\\n \"binary:logitraw\"\ - \ - Logistic regression for binary classification, output score before logistic\ - \ transformation\\n \"rank:pairwise\" - Use LambdaMART to perform\ - \ pairwise ranking where the pairwise loss is minimized\\n \"\ - rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized\ - \ Discounted Cumulative Gain (NDCG) is maximized\\n\\n Annotations:\\\ - n author: Alexey Volkov ')\n_parser.add_argument(\"\ - --training-data\", dest=\"training_data_path\", type=str, required=True,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column-name\"\ - , dest=\"label_column_name\", type=str, required=True, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--starting-model\", dest=\"starting_model_path\"\ - , type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ - --num-iterations\", dest=\"num_iterations\", type=int, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--booster-params\", dest=\"booster_params\", type=json.loads,\ - \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--objective\"\ - , dest=\"objective\", type=str, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--booster\", dest=\"booster\", type=str, required=False,\ - \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--learning-rate\",\ - \ dest=\"learning_rate\", type=float, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--min-split-loss\", dest=\"min_split_loss\", type=float,\ - \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--max-depth\"\ - , dest=\"max_depth\", type=int, required=False, default=argparse.SUPPRESS)\n\ - _parser.add_argument(\"--model\", dest=\"model_path\", type=_make_parent_dirs_and_return_path,\ - \ required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model-config\"\ - , dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True,\ - \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ - \n_outputs = xgboost_train(**_parsed_args)\n" - image: python:3.7 -pipelineInfo: - name: xgboost-sample-pipeline -root: - dag: - tasks: - chicago-taxi-trips-dataset: - cachingOptions: - enableCache: true - componentRef: - name: comp-chicago-taxi-trips-dataset - inputs: - parameters: - limit: - runtimeValue: - constant: 10000.0 - select: - runtimeValue: - constant: tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total - where: - runtimeValue: - constant: trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp - < "2019-02-01" - taskInfo: - name: chicago-taxi-trips-dataset - convert-csv-to-apache-parquet: - cachingOptions: - enableCache: true - componentRef: - name: comp-convert-csv-to-apache-parquet - dependentTasks: - - chicago-taxi-trips-dataset - inputs: - artifacts: - data: - taskOutputArtifact: - outputArtifactKey: table - producerTask: chicago-taxi-trips-dataset - taskInfo: - name: convert-csv-to-apache-parquet - xgboost-predict: - cachingOptions: - enableCache: true - componentRef: - name: comp-xgboost-predict - dependentTasks: - - chicago-taxi-trips-dataset - - xgboost-train - inputs: - artifacts: - data: - taskOutputArtifact: - outputArtifactKey: table - producerTask: chicago-taxi-trips-dataset - model: - taskOutputArtifact: - outputArtifactKey: model - producerTask: xgboost-train - parameters: - label_column: - runtimeValue: - constant: 0.0 - taskInfo: - name: xgboost-predict - xgboost-predict-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-xgboost-predict-2 - dependentTasks: - - convert-csv-to-apache-parquet - - xgboost-train-2 - inputs: - artifacts: - data: - taskOutputArtifact: - outputArtifactKey: output_data - producerTask: convert-csv-to-apache-parquet - model: - taskOutputArtifact: - outputArtifactKey: model - producerTask: xgboost-train-2 - parameters: - label_column_name: - runtimeValue: - constant: tips - taskInfo: - name: xgboost-predict-2 - xgboost-predict-3: - cachingOptions: - enableCache: true - componentRef: - name: comp-xgboost-predict-3 - dependentTasks: - - convert-csv-to-apache-parquet - - xgboost-train - inputs: - artifacts: - data: - taskOutputArtifact: - outputArtifactKey: output_data - producerTask: convert-csv-to-apache-parquet - model: - taskOutputArtifact: - outputArtifactKey: model - producerTask: xgboost-train - parameters: - label_column_name: - runtimeValue: - constant: tips - taskInfo: - name: xgboost-predict-3 - xgboost-predict-4: - cachingOptions: - enableCache: true - componentRef: - name: comp-xgboost-predict-4 - dependentTasks: - - chicago-taxi-trips-dataset - - xgboost-train-2 - inputs: - artifacts: - data: - taskOutputArtifact: - outputArtifactKey: table - producerTask: chicago-taxi-trips-dataset - model: - taskOutputArtifact: - outputArtifactKey: model - producerTask: xgboost-train-2 - parameters: - label_column: - runtimeValue: - constant: 0.0 - taskInfo: - name: xgboost-predict-4 - xgboost-train: - cachingOptions: - enableCache: true - componentRef: - name: comp-xgboost-train - dependentTasks: - - chicago-taxi-trips-dataset - inputs: - artifacts: - training_data: - taskOutputArtifact: - outputArtifactKey: table - producerTask: chicago-taxi-trips-dataset - parameters: - label_column: - runtimeValue: - constant: 0.0 - num_iterations: - runtimeValue: - constant: 200.0 - objective: - runtimeValue: - constant: reg:squarederror - taskInfo: - name: xgboost-train - xgboost-train-2: - cachingOptions: - enableCache: true - componentRef: - name: comp-xgboost-train-2 - dependentTasks: - - convert-csv-to-apache-parquet - inputs: - artifacts: - training_data: - taskOutputArtifact: - outputArtifactKey: output_data - producerTask: convert-csv-to-apache-parquet - parameters: - label_column_name: - runtimeValue: - constant: tips - num_iterations: - runtimeValue: - constant: 200.0 - objective: - runtimeValue: - constant: reg:squarederror - taskInfo: - name: xgboost-train-2 -schemaVersion: 2.1.0 -sdkVersion: kfp-2.7.0 diff --git a/sdk/python/test_data/test_data_config.yaml b/sdk/python/test_data/test_data_config.yaml deleted file mode 100644 index 369d4d609de..00000000000 --- a/sdk/python/test_data/test_data_config.yaml +++ /dev/null @@ -1,282 +0,0 @@ -# Copyright 2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -pipelines: - test_data_dir: sdk/python/test_data/pipelines - read: true - write: true - test_cases: - - module: pipeline_with_importer - name: my_pipeline - execute: false - - module: pipeline_with_importer_and_gcpc_types - name: my_pipeline - execute: false - - module: pipeline_with_ontology - name: my_pipeline - execute: false - - module: pipeline_with_if_placeholder - name: my_pipeline - execute: false - - module: pipeline_with_concat_placeholder - name: my_pipeline - execute: false - - module: pipeline_with_resource_spec - name: my_pipeline - execute: false - - module: pipeline_with_various_io_types - name: my_pipeline - execute: false - - module: pipeline_with_reused_component - name: my_pipeline - execute: true - - module: pipeline_with_after - name: my_pipeline - execute: true - - module: pipeline_with_condition - name: my_pipeline - execute: true - - module: pipeline_with_nested_conditions - name: my_pipeline - execute: true - - module: pipeline_with_nested_conditions_yaml - name: my_pipeline - execute: true - - module: pipeline_with_loops - name: my_pipeline - execute: true - arguments: { "loop_parameter": ["a", "b"] } - - module: pipeline_with_nested_loops - name: my_pipeline - execute: false - - module: pipeline_with_loops_and_conditions - name: my_pipeline - execute: false - - module: pipeline_with_params_containing_format - name: my_pipeline - execute: false - - module: lightweight_python_functions_pipeline - name: pipeline - execute: true - arguments: { "message": "here is my message" } - - module: lightweight_python_functions_with_outputs - name: pipeline - execute: true - arguments: - { - "first_message": "a", - "second_message": "b", - "first_number": 1, - "second_number": 2, - } - - module: pipeline_with_metrics_outputs - name: my_pipeline - execute: true - - module: pipeline_with_exit_handler - name: my_pipeline - execute: true - expected_state: FAILED - - module: pipeline_with_env - name: my_pipeline - execute: true - - module: component_with_optional_inputs - name: pipeline - execute: true - - module: pipeline_with_placeholders - name: my_pipeline - execute: true - - module: pipeline_with_task_final_status - name: my_pipeline - execute: false - - module: pipeline_with_task_final_status_yaml - name: my_pipeline - execute: false - - module: component_with_pip_index_urls - name: pipeline - execute: true - - module: container_component_with_no_inputs - name: pipeline - execute: true - - module: two_step_pipeline_containerized - name: my_pipeline - execute: true - arguments: { "text": "a" } - - module: pipeline_with_multiple_exit_handlers - name: my_pipeline - execute: true - expected_state: FAILED - - module: pipeline_with_parallelfor_parallelism - name: my_pipeline - execute: false - - module: pipeline_in_pipeline - name: my_pipeline - execute: false - - module: pipeline_in_pipeline_complex - name: my_pipeline - execute: true - - module: pipeline_with_outputs - name: my_pipeline - execute: true - - module: pipeline_in_pipeline_loaded_from_yaml - name: my_pipeline - execute: false - - module: pipeline_as_exit_task - name: my_pipeline - execute: false - - module: pipeline_with_google_artifact_type - name: my_pipeline - execute: false - - module: pipeline_with_dynamic_importer_metadata - name: my_pipeline - execute: false - - module: components_with_optional_artifacts - name: pipeline - execute: false - - module: parallelfor_fan_in/parameters_complex - name: math_pipeline - execute: false - - module: parallelfor_fan_in/pipeline_producer_consumer - name: math_pipeline - execute: false - - module: parallelfor_fan_in/parameters_simple - name: math_pipeline - execute: false - - module: parallelfor_fan_in/conditional_producer_and_consumers - name: math_pipeline - execute: false - - module: parallelfor_fan_in/artifacts_simple - name: math_pipeline - execute: false - - module: parallelfor_fan_in/artifacts_complex - name: math_pipeline - execute: false - - module: pipeline_with_task_using_ignore_upstream_failure - name: my_pipeline - execute: false - - module: pipeline_with_metadata_fields - name: dataset_concatenator - execute: false - - module: if_else_with_oneof_artifacts - name: outer_pipeline - execute: false - - module: if_elif_else_complex - name: lucky_number_pipeline - execute: false - - module: if_else_with_oneof_parameters - name: flip_coin_pipeline - execute: false - - module: if_elif_else_with_oneof_parameters - name: outer_pipeline - execute: false - - module: pythonic_artifact_with_single_return - name: make_language_model_pipeline - execute: false - - module: pythonic_artifacts_with_multiple_returns - name: split_datasets_and_return_first - execute: false - - module: pythonic_artifacts_with_list_of_artifacts - name: make_and_join_datasets - execute: false - - module: cross_loop_after_topology - name: my_pipeline - execute: false - - module: pipeline_with_parallelfor_list_artifacts - name: my_pipeline - execute: false -components: - test_data_dir: sdk/python/test_data/components - read: true - write: true - test_cases: - - module: add_numbers - name: add_numbers - execute: true - arguments: { "a": 1, "b": 2 } - - module: component_with_pip_install - name: component_with_pip_install - execute: true - - module: concat_message - name: concat_message - execute: true - arguments: { "message1": "input message", "message2": other message" } - - module: dict_input - name: dict_input - execute: true - arguments: { "struct": { "a": 1, "b": 2 } } - - module: identity - name: identity - execute: true - arguments: { "value": "a" } - - module: input_artifact - name: input_artifact - execute: false - - module: nested_return - name: nested_return - execute: true - - module: output_metrics - name: output_metrics - execute: true - - module: preprocess - name: preprocess - execute: true - arguments: - { - "input_dict_parameter": { "a": 1 }, - "input_list_parameter": [1, 2, 3], - "message": "another message", - } - - module: container_no_input - name: container_no_input - execute: true - - module: container_io - name: container_io - execute: false - - module: container_with_artifact_output - name: container_with_artifact_output - execute: false - - module: container_with_concat_placeholder - name: container_with_concat_placeholder - execute: false - - module: container_with_if_placeholder - name: container_with_if_placeholder - execute: false - - module: container_with_placeholder_in_fstring - name: container_with_placeholder_in_fstring - execute: false - - module: component_with_metadata_fields - name: dataset_joiner - execute: false - - module: component_with_task_final_status - name: exit_comp - execute: false - - module: containerized_python_component - name: concat_message - execute: false -v1_components: - test_data_dir: sdk/python/test_data/v1_component_yaml - read: true - write: false - test_cases: - - module: concat_placeholder_component - name: Component with concat placeholder - execute: false - - module: if_placeholder_component - name: Component with optional inputs - execute: false - - module: add_component - name: Add - execute: false - - module: ingestion_component - name: Ingestion - execute: false diff --git a/sdk/runtime_tests/test_data/pipeline_with_task_final_status.py b/sdk/runtime_tests/test_data/pipeline_with_task_final_status.py deleted file mode 100644 index 27d418a3334..00000000000 --- a/sdk/runtime_tests/test_data/pipeline_with_task_final_status.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Pipeline using ExitHandler with PipelineTaskFinalStatus.""" - -from kfp import compiler -from kfp import dsl -from kfp.dsl import component -from kfp.dsl import PipelineTaskFinalStatus - - -@component -def exit_op(user_input: str, status: PipelineTaskFinalStatus): - """Checks pipeline run status.""" - print('Pipeline status: ', status.state) - print('Job resource name: ', status.pipeline_job_resource_name) - print('Pipeline task name: ', status.pipeline_task_name) - print('Error code: ', status.error_code) - print('Error message: ', status.error_message) - - -@component -def print_op(message: str): - """Prints a message.""" - print(message) - - -@component -def fail_op(message: str): - """Fails.""" - import sys - print(message) - sys.exit(1) - - -@dsl.pipeline(name='pipeline-with-task-final-status') -def my_pipeline(message: str = 'Hello World!'): - exit_task = exit_op(user_input=message) - - with dsl.ExitHandler(exit_task, name='my-pipeline'): - print_op(message=message) - fail_op(message='Task failed.') - - -if __name__ == '__main__': - compiler.Compiler().compile( - pipeline_func=my_pipeline, - package_path=__file__.replace('.py', '.yaml')) diff --git a/test/frontend-integration-test/package-lock.json b/test/frontend-integration-test/package-lock.json index 6843660caa2..17c3bf9857a 100644 --- a/test/frontend-integration-test/package-lock.json +++ b/test/frontend-integration-test/package-lock.json @@ -981,9 +981,10 @@ } }, "node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } @@ -2167,9 +2168,10 @@ } }, "node_modules/glob/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -2200,9 +2202,10 @@ } }, "node_modules/globule/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -2988,9 +2991,10 @@ } }, "node_modules/jake/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -4268,9 +4272,10 @@ } }, "node_modules/recursive-readdir/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" diff --git a/test/install-argo-cli.sh b/test/install-argo-cli.sh index d723dc30e6b..05098517212 100755 --- a/test/install-argo-cli.sh +++ b/test/install-argo-cli.sh @@ -19,7 +19,7 @@ set -ex DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" REPO_ROOT="${DIR}/.." ARGO_VERSION="$(cat ${REPO_ROOT}/third_party/argo/VERSION)" -# ARGO_VERSION=v3.5.14 +# ARGO_VERSION=v3.6.7 OS=${OS:-"linux-amd64"} # if argo is not installed diff --git a/test/kfp-functional-test/README.md b/test/kfp-functional-test/README.md deleted file mode 100644 index 2387cc7d775..00000000000 --- a/test/kfp-functional-test/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# kfp-functional-test - -## Updating python dependencies - -[pip-tools](https://github.com/jazzband/pip-tools) is used to manage python -dependencies. To update dependencies: -1. edit [requirements.in](requirements.in) -1. run - - ```bash - pip-compile requirements.in - ``` - to update and pin the transitive dependencies. - -## Run kfp-functional-test in local - -### Via python (Using Kind) - -1. Set up a Kind cluster: - - ```bash - kind create cluster --name kfp-functional-test-cluster - ``` - -2. Deploy Kubeflow Pipelines to the Kind cluster: - - ```bash - kubectl apply -k manifests/ - ``` - -3. Ensure the cluster is ready: - - ```bash - kubectl cluster-info --context kind-kfp-functional-test-cluster - ``` - -4. Run the functional test: - - ```bash - cd {YOUR_ROOT_DIRECTORY_OF_KUBEFLOW_PIPELINES} - python3 ./test/kfp-functional-test/run_kfp_functional_test.py --host "http://localhost:8080" - ``` - -### Via Kind - -1. Set up a Kind cluster: - - ```bash - kind create cluster --name kfp-functional-test-cluster - ``` - -2. Deploy Kubeflow Pipelines to the Kind cluster: - - ```bash - kubectl apply -k manifests/ - ``` - -3. Ensure the cluster is ready: - - ```bash - kubectl cluster-info --context kind-kfp-functional-test-cluster - ``` - -4. Start a container and run the functional test: - - Using Docker: - ```bash - docker run -it -v $(pwd):/tmp/src -w /tmp/src python:3.9-slim\ - /tmp/src/test/kfp-functional-test/kfp-functional-test.sh --host "http://localhost:8080" - ``` - - Using Podman: - ```bash - podman run -it -v $(pwd):/tmp/src:Z -w /tmp/src python:3.9-slim \ - /tmp/src/test/kfp-functional-test/kfp-functional-test.sh --host "http://localhost:8080" - ``` - - -## Periodic Functional Tests with GitHub Actions - -A periodic GitHub Actions workflow is configured to automatically run functional tests daily. The workflow ensures consistent validation of the Kubeflow Pipelines functionality. - -For more details, see the [Periodic Functional Tests GitHub Actions workflow](https://github.com/kubeflow/pipelines/blob/master/.github/workflows/periodic.yml) - diff --git a/test/kfp-functional-test/constants.py b/test/kfp-functional-test/constants.py deleted file mode 100644 index 145be20dcc7..00000000000 --- a/test/kfp-functional-test/constants.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2020 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Common test params -RUN_TIMEOUT_SECONDS = 1800 -DEFAULT_USER_NAMESPACE = 'kubeflow-user-example-com' diff --git a/test/kfp-functional-test/kfp-functional-test.sh b/test/kfp-functional-test/kfp-functional-test.sh deleted file mode 100755 index fde9ec5c1e2..00000000000 --- a/test/kfp-functional-test/kfp-functional-test.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh -ex -# Copyright 2023 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -source_root="$(pwd)" -python3 -m pip install -r "${source_root}/test/kfp-functional-test/requirements.txt" -python3 "${source_root}/test/kfp-functional-test/run_kfp_functional_test.py" --host "http://localhost:8888" # host configured in workflow file diff --git a/test/kfp-functional-test/requirements.in b/test/kfp-functional-test/requirements.in deleted file mode 100644 index 1235b3932c2..00000000000 --- a/test/kfp-functional-test/requirements.in +++ /dev/null @@ -1 +0,0 @@ -kfp diff --git a/test/kfp-functional-test/requirements.txt b/test/kfp-functional-test/requirements.txt deleted file mode 100644 index 6c7eb5e6b68..00000000000 --- a/test/kfp-functional-test/requirements.txt +++ /dev/null @@ -1,109 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile requirements.in -# -cachetools==5.3.3 - # via google-auth -certifi==2024.2.2 - # via - # kfp-server-api - # kubernetes - # requests -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via kfp -docstring-parser==0.16 - # via kfp -google-api-core==2.19.0 - # via - # google-cloud-core - # google-cloud-storage - # kfp -google-auth==2.29.0 - # via - # google-api-core - # google-cloud-core - # google-cloud-storage - # kfp - # kubernetes -google-cloud-core==2.4.1 - # via google-cloud-storage -google-cloud-storage==2.16.0 - # via kfp -google-crc32c==1.5.0 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.0 - # via google-cloud-storage -googleapis-common-protos==1.63.0 - # via google-api-core -idna==3.7 - # via requests -kfp==2.7.0 - # via -r requirements.in -kfp-pipeline-spec==0.3.0 - # via kfp -kfp-server-api==2.0.5 - # via kfp -kubernetes==26.1.0 - # via kfp -oauthlib==3.2.2 - # via requests-oauthlib -proto-plus==1.23.0 - # via google-api-core -protobuf==4.25.3 - # via - # google-api-core - # googleapis-common-protos - # kfp - # kfp-pipeline-spec - # proto-plus -pyasn1==0.6.0 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.0 - # via google-auth -python-dateutil==2.9.0.post0 - # via - # kfp-server-api - # kubernetes -pyyaml==6.0.1 - # via - # kfp - # kubernetes -requests==2.32.2 - # via - # google-api-core - # google-cloud-storage - # kubernetes - # requests-oauthlib - # requests-toolbelt -requests-oauthlib==2.0.0 - # via kubernetes -requests-toolbelt==0.10.1 - # via kfp -rsa==4.9 - # via google-auth -six==1.16.0 - # via - # kfp-server-api - # kubernetes - # python-dateutil -tabulate==0.9.0 - # via kfp -urllib3==1.26.18 - # via - # kfp - # kfp-server-api - # kubernetes - # requests -websocket-client==1.8.0 - # via kubernetes - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/test/kfp-functional-test/run_kfp_functional_test.py b/test/kfp-functional-test/run_kfp_functional_test.py deleted file mode 100644 index 00f031f3a48..00000000000 --- a/test/kfp-functional-test/run_kfp_functional_test.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2023 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -from datetime import datetime -import random -import string - -import constants -import kfp -import kfp.dsl as dsl - - -@dsl.container_component -def say_hello(name: str): - return dsl.ContainerSpec( - image='library/bash:4.4.23', command=['echo'], args=[f'Hello, {name}!']) - - -@dsl.pipeline(name='My first pipeline', description='A hello pipeline.') -def hello_pipeline(name: str): - say_hello(name=name) - - -# Parsing the input arguments -def parse_arguments(): - """Parse command line arguments.""" - - parser = argparse.ArgumentParser() - parser.add_argument( - '--host', type=str, required=True, help='The host of kfp.') - args = parser.parse_args() - return args - - -def main(): - args = parse_arguments() - - ###### Initialization ###### - client = kfp.Client(args.host) - print('host is {}'.format(args.host)) - - ###### Create Experiment ###### - print('Creating experiment') - experiment_name = 'kfp-functional-e2e-expriment-' + ''.join( - random.choices(string.ascii_uppercase + string.digits, k=5)) - response = client.create_experiment( - experiment_name, namespace=constants.DEFAULT_USER_NAMESPACE) - experiment_id = response.experiment_id - print('Experiment with id {} created'.format(experiment_id)) - try: - ###### Create Run from Pipeline Func ###### - print('Creating Run from Pipeline Func') - response = client.create_run_from_pipeline_func( - hello_pipeline, - arguments={'name': 'World'}, - experiment_name=experiment_name, - namespace=constants.DEFAULT_USER_NAMESPACE) - run_id = response.run_id - print('Run {} created'.format(run_id)) - - ###### Monitor Run ###### - start_time = datetime.now() - response = client.wait_for_run_completion(run_id, - constants.RUN_TIMEOUT_SECONDS) - success = (response.state.lower() == 'succeeded') - end_time = datetime.now() - elapsed_time = (end_time - start_time).seconds - if success: - print('Run succeeded in {} seconds'.format(elapsed_time)) - else: - print("Run can't complete in {} seconds".format(elapsed_time)) - finally: - ###### Archive Experiment ###### - print('Archiving experiment') - client.archive_experiment(experiment_id) - print('Archived experiment with id {}'.format(experiment_id)) - - -if __name__ == '__main__': - main() diff --git a/test/kfp-kubernetes-execution-tests/requirements.txt b/test/kfp-kubernetes-execution-tests/requirements.txt deleted file mode 100644 index 2552b2cbb65..00000000000 --- a/test/kfp-kubernetes-execution-tests/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -sdk/python -pytest==8.3.2 -pytest-asyncio-cooperative==0.37.0 diff --git a/test/kfp-kubernetes-execution-tests/sdk_execution_tests.py b/test/kfp-kubernetes-execution-tests/sdk_execution_tests.py deleted file mode 100644 index 5ba08a47b5d..00000000000 --- a/test/kfp-kubernetes-execution-tests/sdk_execution_tests.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright 2023 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import asyncio -import dataclasses -import functools -import os -import sys -from typing import Any, Dict, List, Tuple - -from kfp import client -from kfp import dsl -import kfp_server_api -import pytest -import yaml - -KFP_ENDPOINT = os.environ['KFP_ENDPOINT'] -TIMEOUT_SECONDS = os.environ['TIMEOUT_SECONDS'] -CURRENT_DIR = os.path.abspath(os.path.dirname(__file__)) -PROJECT_ROOT = os.path.abspath( - os.path.join(CURRENT_DIR, *([os.path.pardir] * 2))) -CONFIG_PATH = os.path.join( - PROJECT_ROOT, - 'kubernetes_platform', - 'python', - 'test', - 'snapshot', - 'test_data_config.yaml', -) - -kfp_client = client.Client(host=KFP_ENDPOINT) - - -@dataclasses.dataclass -class TestCase: - name: str - module_path: str - yaml_path: str - function_name: str - arguments: Dict[str, Any] - - -def create_test_case_parameters() -> List[TestCase]: - parameters: List[TestCase] = [] - with open(CONFIG_PATH) as f: - config = yaml.safe_load(f) - test_data_dir = os.path.join( - PROJECT_ROOT, - 'kubernetes_platform', - 'python', - 'test', - 'snapshot', - 'data', - ) - - parameters.extend( - TestCase( - name=test_case['name'] + '-' + test_case['module'], - module_path=os.path.join(test_data_dir, - f'{test_case["module"]}.py'), - yaml_path=os.path.join(test_data_dir, - f'{test_case["module"]}.yaml'), - function_name=test_case['name'], - arguments=test_case.get('arguments'), - ) for test_case in config['test_cases']) - - return parameters - - -def wait( - run_result: client.client.RunPipelineResult -) -> kfp_server_api.V2beta1Run: - return kfp_client.wait_for_run_completion( - run_id=run_result.run_id, timeout=int(TIMEOUT_SECONDS)) - - -def import_obj_from_file(python_path: str, obj_name: str) -> Any: - sys.path.insert(0, os.path.dirname(python_path)) - module_name = os.path.splitext(os.path.split(python_path)[1])[0] - module = __import__(module_name, fromlist=[obj_name]) - if not hasattr(module, obj_name): - raise ValueError( - f'Object "{obj_name}" not found in module {python_path}.') - return getattr(module, obj_name) - - -def run(test_case: TestCase) -> Tuple[str, client.client.RunPipelineResult]: - full_path = os.path.join(PROJECT_ROOT, test_case.module_path) - pipeline_func = import_obj_from_file(full_path, test_case.function_name) - run_result = kfp_client.create_run_from_pipeline_func( - pipeline_func, - enable_caching=True, - arguments=test_case.arguments, - ) - run_url = f'{KFP_ENDPOINT}/#/runs/details/{run_result.run_id}' - print( - f'- Created run {test_case.name}\n\tModule: {test_case.module_path}\n\tURL: {run_url}\n' - ) - return run_url, run_result - - -def get_kfp_package_path() -> str: - repo_name = os.environ.get('REPO_NAME', 'kubeflow/pipelines') - if os.environ.get('PULL_NUMBER'): - path = f'git+https://github.com/{repo_name}.git@refs/pull/{os.environ["PULL_NUMBER"]}/merge#subdirectory=sdk/python' - else: - path = f'git+https://github.com/{repo_name}.git@master#subdirectory=sdk/python' - print(f'Using the following KFP package path for tests: {path}') - return path - - -dsl.component = functools.partial( - dsl.component, kfp_package_path=get_kfp_package_path()) - - -@pytest.mark.asyncio_cooperative -@pytest.mark.parametrize('test_case', create_test_case_parameters()) -async def test(test_case: TestCase) -> None: - """Asynchronously runs all samples and test that they succeed.""" - event_loop = asyncio.get_running_loop() - try: - run_url, run_result = run(test_case) - except Exception as e: - raise RuntimeError( - f'Error triggering pipeline {test_case.name}.') from e - - api_run = await event_loop.run_in_executor(None, wait, run_result) - assert api_run.state == 'SUCCEEDED', f'Pipeline {test_case.name} ended with incorrect status: {api_run.state}. More info: {run_url}' - - -if __name__ == '__main__': - pytest.main() diff --git a/test/presubmit-backend-test.sh b/test/presubmit-backend-test.sh index 7bf33dfb3ca..04b0e4e5ce7 100755 --- a/test/presubmit-backend-test.sh +++ b/test/presubmit-backend-test.sh @@ -27,4 +27,5 @@ go mod tidy git diff --exit-code -- go.mod go.sum || (echo "go modules are not tidy, run 'go mod tidy'." && exit 1) # 2. Run tests in the backend directory -go test -v -cover ./backend/... +# shellcheck disable=SC2046 +GIT_REPO="$GIT_REPO" GIT_BRANCH="$GIT_BRANCH" go test -v -cover $(go list ./backend/... | grep -v backend/test/v2/api | grep -v backend/test/compiler | grep -v backend/test/end2end | grep -v backend/test/integration | grep -v backend/test/v2/integration | grep -v backend/test/initialization) diff --git a/test/presubmit-component-yaml.sh b/test/presubmit-component-yaml.sh index 3772e6c5137..68facd9e9ab 100755 --- a/test/presubmit-component-yaml.sh +++ b/test/presubmit-component-yaml.sh @@ -15,6 +15,8 @@ source_root=$(pwd) +python3 -m pip install pytest +python3 -m pip install pytest-asyncio-cooperative==0.37.0 python3 -m pip install --upgrade pip python3 -m pip install sdk/python apt-get update && apt-get install -y protobuf-compiler diff --git a/test/presubmit-test-kfp-runtime-code.sh b/test/presubmit-test-kfp-runtime-code.sh deleted file mode 100755 index 3e1196c647b..00000000000 --- a/test/presubmit-test-kfp-runtime-code.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -ex -# Copyright 2023 Kubeflow Pipelines contributors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ex -source_root=$(pwd) - -pip install --upgrade pip -pip install pyyaml -pip install $(grep 'absl-py==' sdk/python/requirements-dev.txt) - -# precautionarilty uninstall typing-extensions, in case any of the test libs -# installed require this dep. we want to test that the kfp sdk installs it, so -# it cannot be present in the environment prior to test execution. -# we'd rather tests fail to execute (false positive failure) because a test -# lib was missing its dependency on typing-extensions than get a false -# negative from the actual kfp sdk test because typing-extensions was already -# present in the environment. -pip uninstall typing-extensions -y - -# run with unittest because pytest requires typing-extensions -python -m unittest discover -s sdk/runtime_tests -p '*_test.py' diff --git a/test/presubmit-tests-sdk-client.sh b/test/presubmit-tests-sdk-client.sh new file mode 100755 index 00000000000..bc9574d555c --- /dev/null +++ b/test/presubmit-tests-sdk-client.sh @@ -0,0 +1,47 @@ +#!/bin/bash -ex +# Copyright 2020 Kubeflow Pipelines contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +source_root=$(pwd) +SETUP_ENV="${SETUP_ENV:-true}" + +if [ "${SETUP_ENV}" = "true" ]; then + # Create a virtual environment and activate it + python3 -m venv venv + source venv/bin/activate + + python3 -m pip install --upgrade pip + python3 -m pip install -r sdk/python/requirements.txt + python3 -m pip install -r sdk/python/requirements-dev.txt + python3 -m pip install setuptools + python3 -m pip install wheel==0.42.0 + python3 -m pip install pytest + python3 -m pip install pytest-cov + python3 -m pip install --upgrade protobuf + python3 -m pip install sdk/python + + # regenerate the kfp-pipeline-spec + cd api/ + make clean python + cd .. + # install the local kfp-pipeline-spec + python3 -m pip install -I api/v2alpha1/python +fi + +python -m pytest sdk/python/test/client/ -v -s -m client --cov=kfp + +if [ "${SETUP_ENV}" = "true" ]; then + # Deactivate the virtual environment + deactivate +fi diff --git a/test/presubmit-tests-sdk-unit.sh b/test/presubmit-tests-sdk-unit.sh new file mode 100755 index 00000000000..00b88fdf725 --- /dev/null +++ b/test/presubmit-tests-sdk-unit.sh @@ -0,0 +1,46 @@ +#!/bin/bash -ex +# Copyright 2020 Kubeflow Pipelines contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +source_root=$(pwd) +SETUP_ENV="${SETUP_ENV:-true}" + +if [ "${SETUP_ENV}" = "true" ]; then + # Create a virtual environment and activate it + python3 -m venv venv + source venv/bin/activate + + python3 -m pip install --upgrade pip + python3 -m pip install -r sdk/python/requirements.txt + python3 -m pip install -r sdk/python/requirements-dev.txt + python3 -m pip install setuptools + python3 -m pip install wheel==0.42.0 + python3 -m pip install pytest-cov + python3 -m pip install --upgrade protobuf + python3 -m pip install sdk/python + + # regenerate the kfp-pipeline-spec + cd api/ + make clean python + cd .. + # install the local kfp-pipeline-spec + python3 -m pip install -I api/v2alpha1/python +fi + +pytest -v -s sdk/python/kfp --cov=kfp + +if [ "${SETUP_ENV}" = "true" ]; then + # Deactivate the virtual environment + deactivate +fi \ No newline at end of file diff --git a/test/presubmit-tests-sdk.sh b/test/presubmit-tests-sdk.sh index 27ef0def843..01cd008d67c 100755 --- a/test/presubmit-tests-sdk.sh +++ b/test/presubmit-tests-sdk.sh @@ -14,31 +14,36 @@ # limitations under the License. source_root=$(pwd) +SETUP_ENV="${SETUP_ENV:-true}" -# Create a virtual environment and activate it -python3 -m venv venv -source venv/bin/activate +if [ "${SETUP_ENV}" = "true" ]; then + # Create a virtual environment and activate it + python3 -m venv venv + source venv/bin/activate -python3 -m pip install --upgrade pip -python3 -m pip install -r sdk/python/requirements.txt -python3 -m pip install -r sdk/python/requirements-dev.txt -python3 -m pip install setuptools -python3 -m pip install coveralls==1.9.2 -python3 -m pip install --upgrade protobuf + python3 -m pip install --upgrade pip + python3 -m pip install -r sdk/python/requirements.txt + python3 -m pip install -r sdk/python/requirements-dev.txt + python3 -m pip install setuptools + python3 -m pip install wheel==0.42.0 + python3 -m pip install pytest-cov + python3 -m pip install pytest + python3 -m pip install google_cloud_pipeline_components + python3 -m pip install docker + python3 -m pip install --upgrade protobuf + python3 -m pip install sdk/python -python3 -m pip install sdk/python + # regenerate the kfp-pipeline-spec + cd api/ + make clean python + cd .. + # install the local kfp-pipeline-spec + python3 -m pip install -I api/v2alpha1/python +fi -pytest sdk/python/kfp --cov=kfp +python -m pytest sdk/python/test -v -s -m regression --cov=kfp -set +x -# export COVERALLS_REPO_TOKEN=$(gsutil cat gs://ml-pipeline-test-keys/coveralls_repo_token) -set -x -REPO_NAME="${REPO_NAME:-kubeflow/pipelines}" -REPO_BASE="https://github.com/${REPO_NAME}" -export COVERALLS_SERVICE_NAME="prow" -export COVERALLS_SERVICE_JOB_ID=$PROW_JOB_ID -export CI_PULL_REQUEST="$REPO_BASE/pull/$PULL_NUMBER" -# coveralls - -# Deactivate the virtual environment -deactivate +if [ "${SETUP_ENV}" = "true" ]; then + # Deactivate the virtual environment + deactivate +fi diff --git a/test/presubmit-tests-tfx.sh b/test/presubmit-tests-tfx.sh deleted file mode 100755 index e26a8f9fa55..00000000000 --- a/test/presubmit-tests-tfx.sh +++ /dev/null @@ -1,61 +0,0 @@ -#!/bin/bash -ex -# Copyright 2020 Kubeflow Pipelines contributors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -source_root=$(pwd) - -# TODO(#5051) Unpin pip version once we figure out how to make the new dependency resolver in pip 20.3+ work in our case. -python3 -m pip install --upgrade pip==20.2.3 -# TODO(#7142): remove future -python3 -m pip install --upgrade future==0.18.2 -# TODO: unpin google-cloud-bigquery once TFX revert https://github.com/tensorflow/tfx/commit/f8c1dea2095197ceda60e1c4d67c4c90fc17ed44 -python3 -m pip install --upgrade google-cloud-bigquery==1.28.0 -python3 -m pip install -r "$source_root/sdk/python/requirements.txt" -# Additional dependencies -#pip3 install coverage==4.5.4 coveralls==1.9.2 six>=1.13.0 -# Sample test infra dependencies -pip3 install minio -pip3 install junit_xml -# Using Argo to lint all compiled workflows -"${source_root}/test/install-argo-cli.sh" - -python3 -m pip install sdk/python - -# Test against TFX -# Compile and setup bazel for compiling the protos -# Instruction from https://docs.bazel.build/versions/master/install-ubuntu.html -curl -sSL https://github.com/bazelbuild/bazel/releases/download/3.7.2/bazel-3.7.2-installer-linux-x86_64.sh -o bazel_installer.sh -chmod +x bazel_installer.sh -./bazel_installer.sh - -# Install TFX from head -cd $source_root -# TODO(#6906): unpin release branch -git clone --branch r1.4.0 --depth 1 https://github.com/tensorflow/tfx.git -cd $source_root/tfx -python3 -m pip install .[test] --upgrade \ - --extra-index-url https://pypi-nightly.tensorflow.org/simple - -# KFP-related tests -python3 $source_root/tfx/tfx/orchestration/kubeflow/kubeflow_dag_runner_test.py -python3 $source_root/tfx/tfx/orchestration/kubeflow/base_component_test.py -python3 $source_root/tfx/tfx/orchestration/kubeflow/v2/compiler_utils_test.py -python3 $source_root/tfx/tfx/orchestration/kubeflow/v2/kubeflow_v2_dag_runner_test.py -python3 $source_root/tfx/tfx/orchestration/kubeflow/v2/parameter_utils_test.py -python3 $source_root/tfx/tfx/orchestration/kubeflow/v2/pipeline_builder_test.py -python3 $source_root/tfx/tfx/orchestration/kubeflow/v2/step_builder_test.py -python3 $source_root/tfx/tfx/orchestration/kubeflow/v2/container/kubeflow_v2_entrypoint_utils_test.py -python3 $source_root/tfx/tfx/orchestration/kubeflow/v2/container/kubeflow_v2_run_executor_test.py -python3 $source_root/tfx/tfx/orchestration/kubeflow/v2/file_based_example_gen/driver_test.py -python3 $source_root/tfx/tfx/examples/penguin/penguin_pipeline_kubeflow_test.py diff --git a/test/release/Dockerfile.release b/test/release/Dockerfile.release index 51522f8fc0e..48d9e13de9b 100644 --- a/test/release/Dockerfile.release +++ b/test/release/Dockerfile.release @@ -14,11 +14,13 @@ # Based on KFP backend api client generator dockerfile # Keep in sync with the version used in backend/api/Makefile -FROM ghcr.io/kubeflow/kfp-api-generator:1.1 +# Allow overriding the base image tag at build time +ARG BASE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:master +FROM ${BASE_IMAGE} # install nvm & node 12 # Reference: https://stackoverflow.com/a/28390848 -ENV NODE_VERSION 12.21.0 +ENV NODE_VERSION=12.21.0 ENV NVM_DIR=/usr/local/nvm RUN mkdir -p $NVM_DIR && \ curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash && \ @@ -26,8 +28,9 @@ RUN mkdir -p $NVM_DIR && \ nvm install $NODE_VERSION && \ nvm alias default $NODE_VERSION && \ nvm use default -ENV NODE_PATH $NVM_DIR/versions/node/v$NODE_VERSION/lib/node_modules -ENV PATH $NVM_DIR/versions/node/v$NODE_VERSION/bin:$PATH +ENV NODE_PATH=$NVM_DIR/versions/node/v$NODE_VERSION/lib/node_modules +ENV PATH=$NVM_DIR/versions/node/v$NODE_VERSION/bin:$PATH +ENV GIT_CLIFF_VERSION=2.10.0 # install java==11 python==3 RUN apt-get update \ @@ -45,3 +48,6 @@ RUN chmod -R 777 $NVM_DIR && \ # Configure npm cache location RUN npm config set cache /tmp/.npm --global + +RUN curl -L https://github.com/orhun/git-cliff/releases/download/v${GIT_CLIFF_VERSION}/git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-gnu.tar.gz \ + | tar -xz --strip-components=1 -C /usr/local/bin git-cliff-${GIT_CLIFF_VERSION}/git-cliff diff --git a/test/release/Makefile b/test/release/Makefile index 0a5e83e57cb..d8e5ab13d5d 100644 --- a/test/release/Makefile +++ b/test/release/Makefile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -REMOTE=ghcr.io/kubeflow/kfp-release:1.1 +REMOTE=ghcr.io/kubeflow/kfp-release:master CONTAINER_ENGINE ?= docker diff --git a/test/release/bump-version-docker.sh b/test/release/bump-version-docker.sh index b7f6bf7ad68..a394a94c0fb 100755 --- a/test/release/bump-version-docker.sh +++ b/test/release/bump-version-docker.sh @@ -31,7 +31,7 @@ if [[ -z "$TAG_NAME" ]]; then fi pushd "${REPO_ROOT}" -RELEASE_IMAGE=${RELEASE_IMAGE:-ghcr.io/kubeflow/kfp-release:1.1} +RELEASE_IMAGE=${RELEASE_IMAGE:-ghcr.io/kubeflow/kfp-release:master} docker run -it --rm \ --user $(id -u):$(id -g) \ --mount type=bind,source="$(pwd)",target=/go/src/github.com/kubeflow/pipelines \ diff --git a/test/release/bump-version-in-place.sh b/test/release/bump-version-in-place.sh index 3ae8339d097..a3b4c0ee106 100755 --- a/test/release/bump-version-in-place.sh +++ b/test/release/bump-version-in-place.sh @@ -33,11 +33,7 @@ fi cd "$REPO_ROOT" "$DIR/check-release-needed-tools.sh" -npm ci -npm run changelog -# Change github issue/PR references like #123 to real urls in markdown. -# The issues must have a " " or a "(" before it to avoid already converted issues like [\#123](url...). -sed -i.bak -e 's|\([ (]\)#\([0-9]\+\)|\1[\\#\2](https://github.com/kubeflow/pipelines/issues/\2)|g' "$REPO_ROOT/CHANGELOG.md" +git-cliff -c cliff.toml --unreleased --tag ${TAG_NAME} --prepend CHANGELOG.md "$REPO_ROOT/manifests/gcp_marketplace/hack/release.sh" $TAG_NAME "$REPO_ROOT/manifests/kustomize/hack/release.sh" $TAG_NAME diff --git a/test/release/release.sh b/test/release/release.sh index 2e84eddb128..51580b03ec4 100755 --- a/test/release/release.sh +++ b/test/release/release.sh @@ -36,6 +36,33 @@ fi cd "$clone_dir" git checkout "$BRANCH" +echo "Deriving release branch name from VERSION file (drop patch, prepend release-)." +# VERSION may be like 2.14.3 or 2.15.0-rc.1. We only care about MAJOR.MINOR. +VERSION_FILE_CONTENT="$(cat VERSION)" +VERSION_CORE="${VERSION_FILE_CONTENT%%-*}" +MAJOR_MINOR="$(echo "$VERSION_CORE" | awk -F. '{print $1"."$2}')" +RELEASE_BRANCH_FROM_VERSION="release-${MAJOR_MINOR}" + +echo "Will update image references to tag: ${RELEASE_BRANCH_FROM_VERSION}" + +# Update image tag references used by client generation and tooling to the release branch tag +sed -i -E "s#^(PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:).*#\\1${RELEASE_BRANCH_FROM_VERSION}#" backend/api/Makefile +sed -i -E "s#^(RELEASE_IMAGE=ghcr.io/kubeflow/kfp-release:).*#\\1${RELEASE_BRANCH_FROM_VERSION}#" backend/api/Makefile +sed -i -E "s#^(PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:).*#\\1${RELEASE_BRANCH_FROM_VERSION}#" api/Makefile +sed -i -E "s#^(PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:).*#\\1${RELEASE_BRANCH_FROM_VERSION}#" sdk/Makefile +sed -i -E "s#^(PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-api-generator:).*#\\1${RELEASE_BRANCH_FROM_VERSION}#" kubernetes_platform/Makefile +# Keep release tools Makefile consistent as well +sed -i -E "s#^(REMOTE=ghcr.io/kubeflow/kfp-release:).*#\\1${RELEASE_BRANCH_FROM_VERSION}#" test/release/Makefile + +# Update the release tools Dockerfile base image tag to align with the branch +sed -i -E "s#^(FROM ghcr.io/kubeflow/kfp-api-generator:).*#\\1${RELEASE_BRANCH_FROM_VERSION}#" test/release/Dockerfile.release + +# Update default RELEASE_IMAGE tag in bump-version-docker.sh to this branch +sed -i -E "s#^(RELEASE_IMAGE=\$\{RELEASE_IMAGE:-ghcr.io/kubeflow/kfp-release:).*#\\1${RELEASE_BRANCH_FROM_VERSION}}#" test/release/bump-version-docker.sh + +# Ensure the release bump container uses the correct tag for this branch +export RELEASE_IMAGE="ghcr.io/kubeflow/kfp-release:${RELEASE_BRANCH_FROM_VERSION}" + echo "Preparing local git tags used by changelog generation." # tags with "-" are pre-releases, e.g. 1.0.0-rc.1 if [[ "$TAG" =~ "-" ]]; then @@ -52,9 +79,8 @@ fi echo "Running the bump version script in cloned repo" echo -n "$TAG" > ./VERSION -PREBUILT_REMOTE_IMAGE=ghcr.io/kubeflow/kfp-release:1.1 pushd ./test/release -make release-in-place +RELEASE_IMAGE="$RELEASE_IMAGE" make release-in-place popd echo "Checking in the version bump changes" diff --git a/test/sample-test/README.md b/test/sample-test/README.md deleted file mode 100644 index 5736145afa3..00000000000 --- a/test/sample-test/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# sample-test - -## Updating python dependencies - -[pip-tools](https://github.com/jazzband/pip-tools) is used to manage python -dependencies. To update dependencies: - -1. (optional) edit [requirements.in](requirements.in) -1. run - - ```bash - ./hack/update_requirements.sh - ``` - - to update and pin the transitive dependencies. - -Some dependencies are resolved at the time running this command, so without editing -requirements.in, the result will still change over time. diff --git a/test/sample-test/check_notebook_results.py b/test/sample-test/check_notebook_results.py deleted file mode 100644 index b46cecf285d..00000000000 --- a/test/sample-test/check_notebook_results.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright 2018-2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import subprocess -import utils -import yaml - -from constants import RUN_LIST_PAGE_SIZE, DEFAULT_CONFIG -from kfp import Client - - -class NoteBookChecker(object): - def __init__(self, testname, result, run_pipeline, experiment_name, host, namespace='kubeflow'): - """ Util class for checking notebook sample test running results. - - :param testname: test name in the json xml. - :param result: name of the file that stores the test result - :param run_pipeline: whether to submit for a pipeline run. - :param host: The hostname of KFP API endpoint. - :param namespace: where the pipeline system is deployed. - :param experiment_name: Name of the experiment to monitor - """ - self._testname = testname - self._result = result - self._exit_code = None - self._run_pipeline = run_pipeline - self._host = host - self._namespace = namespace - self._experiment_name = experiment_name - - def run(self): - """ Run the notebook sample as a python script. """ - self._exit_code = str( - subprocess.call(['ipython', '%s.py' % self._testname])) - - def check(self): - """ Check the pipeline running results of the notebook sample. """ - test_cases = [] - test_name = self._testname + ' Sample Test' - - ###### Write the script exit code log ###### - utils.add_junit_test(test_cases, 'test script execution', - (self._exit_code == '0'), - 'test script failure with exit code: ' - + self._exit_code) - - try: - with open(DEFAULT_CONFIG, 'r') as f: - raw_args = yaml.safe_load(f) - except yaml.YAMLError as yamlerr: - raise RuntimeError('Illegal default config:{}'.format(yamlerr)) - except OSError as ose: - raise FileExistsError('Default config not found:{}'.format(ose)) - else: - test_timeout = raw_args['test_timeout'] - - if self._run_pipeline: - experiment = self._experiment_name - ###### Initialization ###### - client = Client(host=self._host) - - ###### Get experiments ###### - experiment_id = client.get_experiment(experiment_name=experiment).id - - ###### Get runs ###### - list_runs_response = client.list_runs(page_size=RUN_LIST_PAGE_SIZE, - experiment_id=experiment_id) - - ###### Check all runs ###### - for run in list_runs_response.runs: - run_id = run.id - response = client.wait_for_run_completion(run_id, test_timeout) - succ = (response.run.status.lower()=='succeeded') - utils.add_junit_test(test_cases, 'job completion', - succ, 'waiting for job completion failure') - - ###### Output Argo Log for Debugging ###### - workflow_json = client._get_workflow_json(run_id) - workflow_id = workflow_json['metadata']['name'] - print("Argo Workflow Name: ", workflow_id) - argo_log, _ = utils.run_bash_command( - 'argo logs {} -n {}'.format(workflow_id, self._namespace)) - print("=========Argo Workflow Log=========") - print(argo_log) - - if not succ: - utils.write_junit_xml(test_name, self._result, test_cases) - exit(1) - - ###### Write out the test result in junit xml ###### - utils.write_junit_xml(test_name, self._result, test_cases) diff --git a/test/sample-test/configs/dataflow.config.yaml b/test/sample-test/configs/dataflow.config.yaml deleted file mode 100644 index bfb14086891..00000000000 --- a/test/sample-test/configs/dataflow.config.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -test_name: dataflow -notebook_params: - output: - project: ml-pipeline-test - region: us-central1 -run_pipeline: True diff --git a/test/sample-test/configs/default.config.yaml b/test/sample-test/configs/default.config.yaml deleted file mode 100644 index edb9dfc2248..00000000000 --- a/test/sample-test/configs/default.config.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -test_name: default_sample_test -test_timeout: 2700 -run_pipeline: True diff --git a/test/sample-test/configs/kubeflow_tf_serving.config.yaml b/test/sample-test/configs/kubeflow_tf_serving.config.yaml deleted file mode 100644 index 51c5c908155..00000000000 --- a/test/sample-test/configs/kubeflow_tf_serving.config.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -test_name: kubeflow_tf_serving -notebook_params: - output: - project: ml-pipeline-test diff --git a/test/sample-test/configs/parameterized_tfx_oss.config.yaml b/test/sample-test/configs/parameterized_tfx_oss.config.yaml deleted file mode 100644 index 4d7c236e8c3..00000000000 --- a/test/sample-test/configs/parameterized_tfx_oss.config.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -test_name: parameterized_tfx_oss -arguments: - output: - push_destination: '{"filesystem":{"base_directory":"gs://ml-pipeline-test/serving_model/tfx_taxi_simple"}}' -run_pipeline: True diff --git a/test/sample-test/configs/schema.config.yaml b/test/sample-test/configs/schema.config.yaml deleted file mode 100644 index d5bb047a413..00000000000 --- a/test/sample-test/configs/schema.config.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This is the schema for sample test config yaml file. -test_name: str() -# Parameters for python samples. -# If 'output' is specified, values will be dynamically injected -arguments: map(required=False) -# Parameters for notebook samples. -# If 'output' is specified, values will be dynamically injected -notebook_params: map(required=False) -# Timeout -test_timeout: int(min=0, required=False) -# For python samples, run_pipeline means submiting the pipeline -# run; For notebook samples, run_pipeline means checking the -# run results assuming the notebook codes already submit the run. -run_pipeline: bool(required=False) \ No newline at end of file diff --git a/test/sample-test/configs/xgboost_training_cm.config.yaml b/test/sample-test/configs/xgboost_training_cm.config.yaml deleted file mode 100644 index 214b962216c..00000000000 --- a/test/sample-test/configs/xgboost_training_cm.config.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -test_name: xgboost_training_cm -arguments: - output: - project: ml-pipeline-test - rounds: 5 - diagnostic_mode: False -test_timeout: 3600 # xgboost needs extra time, 60 * 60 secs diff --git a/test/sample-test/constants.py b/test/sample-test/constants.py deleted file mode 100644 index f40ac6991f8..00000000000 --- a/test/sample-test/constants.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -# String msgs. -PAPERMILL_ERR_MSG = 'An Exception was encountered at' - -# Common paths -GITHUB_REPO = 'kubeflow/pipelines' -BASE_DIR = os.path.abspath('./') -TEST_DIR = os.path.join(BASE_DIR, 'test/sample-test') -CONFIG_DIR = os.path.join(TEST_DIR, 'configs') -DEFAULT_CONFIG = os.path.join(CONFIG_DIR, 'default.config.yaml') -SCHEMA_CONFIG = os.path.join(CONFIG_DIR, 'schema.config.yaml') - -# Common test params -RUN_LIST_PAGE_SIZE = 1000 diff --git a/test/sample-test/hack/update_requirements.sh b/test/sample-test/hack/update_requirements.sh deleted file mode 100755 index e6bbe8277e7..00000000000 --- a/test/sample-test/hack/update_requirements.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash - -# Copyright 2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ex - -DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" -REPO_ROOT="${DIR}/../../.." - -cat "${REPO_ROOT}/sdk/python/requirements.in" "${REPO_ROOT}/backend/api/v1beta1/python_http_client/requirements.txt" "${REPO_ROOT}/backend/requirements.in" "${DIR}/../requirements.in" | \ - "${REPO_ROOT}/hack/update-requirements.sh" google/cloud-sdk:352.0.0 >"${DIR}/../requirements.txt" diff --git a/test/sample-test/requirements.in b/test/sample-test/requirements.in deleted file mode 100644 index 2d48979e5b6..00000000000 --- a/test/sample-test/requirements.in +++ /dev/null @@ -1,8 +0,0 @@ -kfp==2.8.0 -junit-xml -minio -black -papermill -fire -yamale -kubernetes diff --git a/test/sample-test/requirements.txt b/test/sample-test/requirements.txt deleted file mode 100644 index a220e1b69e9..00000000000 --- a/test/sample-test/requirements.txt +++ /dev/null @@ -1,211 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile -# -ansicolors==1.1.8 - # via papermill -argon2-cffi==23.1.0 - # via minio -argon2-cffi-bindings==21.2.0 - # via argon2-cffi -attrs==24.2.0 - # via - # jsonschema - # referencing -black==24.8.0 - # via -r requirements.in -cachetools==5.5.0 - # via google-auth -certifi==2024.7.4 - # via - # kfp-server-api - # kubernetes - # minio - # requests -cffi==1.17.0 - # via argon2-cffi-bindings -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via - # black - # kfp - # papermill -docstring-parser==0.16 - # via kfp -entrypoints==0.4 - # via papermill -fastjsonschema==2.20.0 - # via nbformat -fire==0.6.0 - # via -r requirements.in -google-api-core==2.19.1 - # via - # google-cloud-core - # google-cloud-storage - # kfp -google-auth==2.34.0 - # via - # google-api-core - # google-cloud-core - # google-cloud-storage - # kfp - # kubernetes -google-cloud-core==2.4.1 - # via google-cloud-storage -google-cloud-storage==2.18.2 - # via kfp -google-crc32c==1.5.0 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 - # via google-cloud-storage -googleapis-common-protos==1.64.0 - # via google-api-core -idna==3.8 - # via requests -jsonschema==4.23.0 - # via nbformat -jsonschema-specifications==2023.12.1 - # via jsonschema -junit-xml==1.9 - # via -r requirements.in -jupyter-client==8.6.2 - # via nbclient -jupyter-core==5.7.2 - # via - # jupyter-client - # nbclient - # nbformat -kfp==2.8.0 - # via -r requirements.in -kfp-pipeline-spec==0.3.0 - # via kfp -kfp-server-api==2.0.5 - # via kfp -kubernetes==26.1.0 - # via - # -r requirements.in - # kfp -minio==7.2.8 - # via -r requirements.in -mypy-extensions==1.0.0 - # via black -nbclient==0.10.0 - # via papermill -nbformat==5.10.4 - # via - # nbclient - # papermill -oauthlib==3.2.2 - # via requests-oauthlib -packaging==24.1 - # via black -papermill==2.6.0 - # via -r requirements.in -pathspec==0.12.1 - # via black -platformdirs==4.2.2 - # via - # black - # jupyter-core -proto-plus==1.24.0 - # via google-api-core -protobuf==4.25.4 - # via - # google-api-core - # googleapis-common-protos - # kfp - # kfp-pipeline-spec - # proto-plus -pyasn1==0.6.0 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.0 - # via google-auth -pycparser==2.22 - # via cffi -pycryptodome==3.20.0 - # via minio -python-dateutil==2.9.0.post0 - # via - # jupyter-client - # kfp-server-api - # kubernetes -pyyaml==6.0.2 - # via - # kfp - # kubernetes - # papermill - # yamale -pyzmq==26.2.0 - # via jupyter-client -referencing==0.35.1 - # via - # jsonschema - # jsonschema-specifications -requests==2.32.3 - # via - # google-api-core - # google-cloud-storage - # kubernetes - # papermill - # requests-oauthlib - # requests-toolbelt -requests-oauthlib==2.0.0 - # via kubernetes -requests-toolbelt==0.10.1 - # via kfp -rpds-py==0.20.0 - # via - # jsonschema - # referencing -rsa==4.9 - # via google-auth -six==1.15.0 - # via - # fire - # junit-xml - # kfp-server-api - # kubernetes - # python-dateutil -tabulate==0.9.0 - # via kfp -tenacity==9.0.0 - # via papermill -termcolor==2.4.0 - # via fire -tomli==2.0.1 - # via black -tornado==6.5.1 - # via jupyter-client -tqdm==4.66.5 - # via papermill -traitlets==5.14.3 - # via - # jupyter-client - # jupyter-core - # nbclient - # nbformat -typing-extensions==4.12.2 - # via - # black - # minio -urllib3==1.26.19 - # via - # kfp - # kfp-server-api - # kubernetes - # minio - # requests -websocket-client==1.8.0 - # via kubernetes -yamale==5.2.1 - # via -r requirements.in - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/test/sample-test/run_resnet_cmle_test.py b/test/sample-test/run_resnet_cmle_test.py deleted file mode 100644 index bce46c13409..00000000000 --- a/test/sample-test/run_resnet_cmle_test.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import os -import json -import tarfile -from datetime import datetime -import utils -from kfp import Client - -###### Input/Output Instruction ###### -# input: yaml -# output: local file path - - -# Parsing the input arguments -def parse_arguments(): - """Parse command line arguments.""" - - parser = argparse.ArgumentParser() - parser.add_argument('--input', - type=str, - required=True, - help='The path of a pipeline package that will be submitted.') - parser.add_argument('--result', - type=str, - required=True, - help='The path of the test result that will be exported.') - parser.add_argument('--output', - type=str, - required=True, - help='The path of the test output') - parser.add_argument('--namespace', - type=str, - default='kubeflow', - help="namespace of the deployed pipeline system. Default: kubeflow") - args = parser.parse_args() - return args - -def main(): - args = parse_arguments() - test_cases = [] - test_name = 'Resnet CMLE Test' - - ###### Initialization ###### - host = 'ml-pipeline.%s.svc.cluster.local:8888' % args.namespace - client = Client(host=host) - - ###### Check Input File ###### - utils.add_junit_test(test_cases, 'input generated yaml file', os.path.exists(args.input), 'yaml file is not generated') - if not os.path.exists(args.input): - utils.write_junit_xml(test_name, args.result, test_cases) - print('Error: job not found.') - exit(1) - - ###### Create Experiment ###### - experiment_name = 'resnet cmle sample experiment' - response = client.create_experiment(experiment_name) - experiment_id = response.id - utils.add_junit_test(test_cases, 'create experiment', True) - - ###### Create Job ###### - job_name = 'cmle_sample' - params = {'output': args.output, - 'project_id': 'ml-pipeline-test', - 'region': 'us-central1', - 'model': 'bolts', - 'version': 'beta1', - 'tf_version': '1.9', # Watch out! If 1.9 is no longer supported we need to set it to a newer version. - 'train_csv': 'gs://ml-pipeline-dataset/sample-test/bolts/bolt_images_train_sample1000.csv', - 'validation_csv': 'gs://ml-pipeline-dataset/sample-test/bolts/bolt_images_validate_sample200.csv', - 'labels': 'gs://bolts_image_dataset/labels.txt', - 'depth': 50, - 'train_batch_size': 32, - 'eval_batch_size': 32, - 'steps_per_eval': 128, - 'train_steps': 128, - 'num_train_images': 1000, - 'num_eval_images': 200, - 'num_label_classes': 10} - response = client.run_pipeline(experiment_id, job_name, args.input, params) - run_id = response.id - utils.add_junit_test(test_cases, 'create pipeline run', True) - - ###### Monitor Job ###### - try: - start_time = datetime.now() - response = client.wait_for_run_completion(run_id, 1800) - succ = (response.run.status.lower()=='succeeded') - end_time = datetime.now() - elapsed_time = (end_time - start_time).seconds - utils.add_junit_test(test_cases, 'job completion', succ, 'waiting for job completion failure', elapsed_time) - finally: - ###### Output Argo Log for Debugging ###### - workflow_json = client._get_workflow_json(run_id) - workflow_id = workflow_json['metadata']['name'] - print("Argo Workflow Name: ", workflow_id) - argo_log, _ = utils.run_bash_command('argo logs {} -n {}'.format(workflow_id, args.namespace)) - print("=========Argo Workflow Log=========") - print(argo_log) - - if not succ: - utils.write_junit_xml(test_name, args.result, test_cases) - exit(1) - - ###### Delete Job ###### - #TODO: add deletion when the backend API offers the interface. - - ###### Write out the test result in junit xml ###### - utils.write_junit_xml(test_name, args.result, test_cases) - -if __name__ == "__main__": - main() diff --git a/test/sample-test/run_sample_test.py b/test/sample-test/run_sample_test.py deleted file mode 100644 index 65841fe6d40..00000000000 --- a/test/sample-test/run_sample_test.py +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright 2019-2021 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from datetime import datetime -import os -import tarfile -import time - -from constants import CONFIG_DIR -from constants import DEFAULT_CONFIG -from constants import SCHEMA_CONFIG -import kfp -from kfp import Client -import utils -import yamale -import yaml - - -class PySampleChecker(object): - - def __init__(self, - testname, - input, - output, - result, - experiment_name, - host, - namespace='kubeflow', - expected_result='succeeded'): - """Util class for checking python sample test running results. - - :param testname: test name. - :param input: The path of a pipeline file that will be submitted. - :param output: The path of the test output. - :param result: The path of the test result that will be exported. - :param host: The hostname of KFP API endpoint. - :param namespace: namespace of the deployed pipeline system. Default: kubeflow - :param expected_result: the expected status for the run, default is succeeded. - :param experiment_name: Name of the experiment to monitor - """ - self._testname = testname - self._experiment_name = experiment_name - self._input = input - self._output = output - self._result = result - self._host = host - self._namespace = namespace - self._run_pipeline = None - self._test_timeout = None - - self._test_cases = [] - self._test_name = self._testname + ' Sample Test' - - self._client = None - self._experiment_id = None - self._job_name = None - self._test_args = None - self._run_id = None - self._expected_result = expected_result - - def run(self): - """Run compiled KFP pipeline.""" - - ###### Initialization ###### - self._client = Client(host=self._host) - - ###### Check Input File ###### - utils.add_junit_test(self._test_cases, 'input generated yaml file', - os.path.exists(self._input), - 'yaml file is not generated') - if not os.path.exists(self._input): - utils.write_junit_xml(self._test_name, self._result, - self._test_cases) - print('Error: job not found.') - exit(1) - - ###### Create Experiment ###### - response = self._client.create_experiment(self._experiment_name) - self._experiment_id = response.experiment_id - utils.add_junit_test(self._test_cases, 'create experiment', True) - - ###### Create Job ###### - self._job_name = self._testname + '_sample' - ###### Figure out arguments from associated config files. ####### - self._test_args = {} - config_schema = yamale.make_schema(SCHEMA_CONFIG) - try: - with open(DEFAULT_CONFIG, 'r') as f: - raw_args = yaml.safe_load(f) - default_config = yamale.make_data(DEFAULT_CONFIG) - yamale.validate( - config_schema, - default_config) # If fails, a ValueError will be raised. - except yaml.YAMLError as yamlerr: - raise RuntimeError('Illegal default config:{}'.format(yamlerr)) - except OSError as ose: - raise FileExistsError('Default config not found:{}'.format(ose)) - else: - self._test_timeout = raw_args['test_timeout'] - self._run_pipeline = raw_args['run_pipeline'] - - try: - config_file = os.path.join(CONFIG_DIR, - '%s.config.yaml' % self._testname) - with open(config_file, 'r') as f: - raw_args = yaml.safe_load(f) - test_config = yamale.make_data(config_file) - yamale.validate( - config_schema, - test_config) # If fails, a ValueError will be raised. - except yaml.YAMLError as yamlerr: - print('No legit yaml config file found, use default args:{}'.format( - yamlerr)) - except OSError as ose: - print( - f'Config file "{config_file}" not found, using default args: {raw_args}') - else: - if 'arguments' in raw_args.keys() and raw_args['arguments']: - self._test_args.update(raw_args['arguments']) - if 'output' in self._test_args.keys( - ): # output is a special param that has to be specified dynamically. - self._test_args['output'] = self._output - if 'test_timeout' in raw_args.keys(): - self._test_timeout = raw_args['test_timeout'] - if 'run_pipeline' in raw_args.keys(): - self._run_pipeline = raw_args['run_pipeline'] - - # TODO(numerology): Special treatment for TFX::OSS sample - if self._testname == 'parameterized_tfx_oss': - self._test_args['pipeline-root'] = os.path.join( - self._test_args['output'], - 'tfx_taxi_simple_' + kfp.dsl.RUN_ID_PLACEHOLDER) - del self._test_args['output'] - - # Submit for pipeline running. - if self._run_pipeline: - response = self._client.run_pipeline(self._experiment_id, - self._job_name, self._input, - self._test_args) - self._run_id = response.run_id - utils.add_junit_test(self._test_cases, 'create pipeline run', True) - - def check(self): - """Check pipeline run results.""" - if self._run_pipeline: - ###### Monitor Job ###### - start_time = datetime.now() - response = self._client.wait_for_run_completion(self._run_id, self._test_timeout) - succ = (response.state.lower() == self._expected_result) - end_time = datetime.now() - elapsed_time = (end_time - start_time).seconds - utils.add_junit_test(self._test_cases, 'job completion', succ, - 'waiting for job completion failure', - elapsed_time) - print(f'Pipeline {"worked" if succ else "Failed"}. Elapsed time: {elapsed_time}s') - - ###### Delete Job ###### - #TODO: add deletion when the backend API offers the interface. - - assert succ diff --git a/test/sample-test/sample_test_launcher.py b/test/sample-test/sample_test_launcher.py deleted file mode 100644 index 276c5c8d28b..00000000000 --- a/test/sample-test/sample_test_launcher.py +++ /dev/null @@ -1,282 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""This launcher module serves as the entry-point of the sample test image. - -It decides which test to trigger based upon the arguments provided. -""" - -import os -import re -import subprocess - -from check_notebook_results import NoteBookChecker -from constants import BASE_DIR -from constants import CONFIG_DIR -from constants import DEFAULT_CONFIG -from constants import SCHEMA_CONFIG -from constants import TEST_DIR -import fire -import kubernetes -import papermill as pm -from run_sample_test import PySampleChecker -import utils -import yamale -import yaml - - -class SampleTest(object): - - def __init__(self, - test_name, - results_gcs_dir, - host='', - target_image_prefix='', - namespace='kubeflow', - expected_result='succeeded'): - """Launch a KFP sample_test provided its name. - - :param test_name: name of the corresponding sample test. - :param results_gcs_dir: gs dir to store test result. - :param host: host of KFP API endpoint, default is auto-discovery from inverse-proxy-config. - :param target_image_prefix: prefix of docker image, default is empty. - :param namespace: namespace for kfp, default is kubeflow. - :param expected_result: the expected status for the run, default is succeeded. - """ - self._test_name = test_name - self._results_gcs_dir = results_gcs_dir - # Capture the first segment after gs:// as the project name. - self._target_image_prefix = target_image_prefix - self._namespace = namespace - self._host = host - if self._host == '': - try: - # Get inverse proxy hostname from a config map called 'inverse-proxy-config' - # in the same namespace as KFP. - try: - kubernetes.config.load_incluster_config() - except: - kubernetes.config.load_kube_config() - - self._host = 'http://localhost:8888' - except Exception as err: - raise RuntimeError( - 'Failed to get inverse proxy hostname') from err - - # With the healthz API in place, when the developer clicks the link, - # it will lead to a functional URL instead of a 404 error. - print(f'KFP API healthz endpoint is: {self._host}/apis/v1beta1/healthz') - - self._is_notebook = None - self._work_dir = os.path.join(BASE_DIR, 'samples/core/', - self._test_name) - - self._sample_test_result = 'junit_Sample%sOutput.xml' % self._test_name - self._sample_test_output = self._results_gcs_dir - self._expected_result = expected_result - - def _compile(self): - - os.chdir(self._work_dir) - print('Run the sample tests...') - - # Looking for the entry point of the test. - list_of_files = os.listdir('.') - for file in list_of_files: - # matching by .py or .ipynb, there will be yaml ( compiled ) files in the folder. - # if you rerun the test suite twice, the test suite will fail - m = re.match(self._test_name + r'\.(py|ipynb)$', file) - if m: - file_name, ext_name = os.path.splitext(file) - if self._is_notebook is not None: - raise (RuntimeError( - 'Multiple entry points found under sample: {}'.format( - self._test_name))) - if ext_name == '.py': - self._is_notebook = False - if ext_name == '.ipynb': - self._is_notebook = True - - if self._is_notebook is None: - raise (RuntimeError('No entry point found for sample: {}'.format( - self._test_name))) - - config_schema = yamale.make_schema(SCHEMA_CONFIG) - # Retrieve default config - try: - with open(DEFAULT_CONFIG, 'r') as f: - raw_args = yaml.safe_load(f) - default_config = yamale.make_data(DEFAULT_CONFIG) - yamale.validate( - config_schema, - default_config) # If fails, a ValueError will be raised. - except yaml.YAMLError as yamlerr: - raise RuntimeError('Illegal default config:{}'.format(yamlerr)) - except OSError as ose: - raise FileExistsError('Default config not found:{}'.format(ose)) - else: - self._run_pipeline = raw_args['run_pipeline'] - - # For presubmit check, do not do any image injection as for now. - # Notebook samples need to be papermilled first. - if self._is_notebook: - # Parse necessary params from config.yaml - nb_params = {} - try: - config_file = os.path.join(CONFIG_DIR, - '%s.config.yaml' % self._test_name) - with open(config_file, 'r') as f: - raw_args = yaml.safe_load(f) - test_config = yamale.make_data(config_file) - yamale.validate( - config_schema, - test_config) # If fails, a ValueError will be raised. - except yaml.YAMLError as yamlerr: - print('No legit yaml config file found, use default args:{}' - .format(yamlerr)) - except OSError as ose: - print( - 'Config file with the same name not found, use default args:{}' - .format(ose)) - else: - if 'notebook_params' in raw_args.keys(): - nb_params.update(raw_args['notebook_params']) - if 'output' in raw_args['notebook_params'].keys( - ): # output is a special param that has to be specified dynamically. - nb_params['output'] = self._sample_test_output - if 'run_pipeline' in raw_args.keys(): - self._run_pipeline = raw_args['run_pipeline'] - - pm.execute_notebook( - input_path='%s.ipynb' % self._test_name, - output_path='%s.ipynb' % self._test_name, - parameters=nb_params, - prepare_only=True) - # Convert to python script. - return_code = subprocess.call([ - 'jupyter', 'nbconvert', '--to', 'python', - '%s.ipynb' % self._test_name - ]) - - else: - return_code = subprocess.call(['python3', '%s.py' % self._test_name]) - - # Command executed successfully! - assert return_code == 0 - - def _injection(self): - """Inject images for pipeline components. - - This is only valid for coimponent test - """ - pass - - def run_test(self): - self._compile() - self._injection() - - # Overriding the experiment name of pipeline runs - experiment_name = self._test_name + '-test' - os.environ['KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME'] = experiment_name - - if self._is_notebook: - nbchecker = NoteBookChecker( - testname=self._test_name, - result=self._sample_test_result, - run_pipeline=self._run_pipeline, - experiment_name=experiment_name, - host=self._host, - ) - nbchecker.run() - os.chdir(TEST_DIR) - nbchecker.check() - else: - os.chdir(TEST_DIR) - input_file = os.path.join(self._work_dir, - '%s.py.yaml' % self._test_name) - - pysample_checker = PySampleChecker( - testname=self._test_name, - input=input_file, - output=self._sample_test_output, - result=self._sample_test_result, - host=self._host, - namespace=self._namespace, - experiment_name=experiment_name, - expected_result=self._expected_result, - ) - pysample_checker.run() - pysample_checker.check() - - -class ComponentTest(SampleTest): - """Launch a KFP sample test as component test provided its name. - - Currently follows the same logic as sample test for compatibility. - include xgboost_training_cm - """ - - def __init__(self, - test_name, - results_gcs_dir, - gcp_image, - local_confusionmatrix_image, - local_roc_image, - target_image_prefix='', - namespace='kubeflow'): - super().__init__( - test_name=test_name, - results_gcs_dir=results_gcs_dir, - target_image_prefix=target_image_prefix, - namespace=namespace) - self._local_confusionmatrix_image = local_confusionmatrix_image - self._local_roc_image = local_roc_image - self._dataproc_gcp_image = gcp_image - - def _injection(self): - """Sample-specific image injection into yaml file.""" - subs = { # Tag can look like 1.0.0-rc.3, so we need both "-" and "." in the regex. - r'gcr\.io/ml-pipeline/ml-pipeline/ml-pipeline-local-confusion-matrix:(\w+|[.-])+': - self._local_confusionmatrix_image, - r'gcr\.io/ml-pipeline/ml-pipeline/ml-pipeline-local-roc:(\w+|[.-])+': - self._local_roc_image - } - if self._test_name == 'xgboost_training_cm': - subs.update({ - r'gcr\.io/ml-pipeline/ml-pipeline-gcp:(\w|[.-])+': - self._dataproc_gcp_image - }) - - utils.file_injection('%s.py.yaml' % self._test_name, - '%s.py.yaml.tmp' % self._test_name, subs) - else: - # Only the above sample need injection for now. - pass - utils.file_injection('%s.py.yaml' % self._test_name, - '%s.py.yaml.tmp' % self._test_name, subs) - - -def main(): - """Launches either KFP sample test or component test as a command - entrypoint. - - Usage: - python sample_test_launcher.py sample_test run_test arg1 arg2 to launch sample test, and - python sample_test_launcher.py component_test run_test arg1 arg2 to launch component - test. - """ - fire.Fire({'sample_test': SampleTest, 'component_test': ComponentTest}) - - -if __name__ == '__main__': - main() diff --git a/test/sample-test/unittests/testdata/test_file_injection_input.yaml b/test/sample-test/unittests/testdata/test_file_injection_input.yaml deleted file mode 100644 index 31ee8f654a1..00000000000 --- a/test/sample-test/unittests/testdata/test_file_injection_input.yaml +++ /dev/null @@ -1,10 +0,0 @@ -spec: - templates: - - container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:151c5349f13bea9d626c988563c04c0a86210c21 - - container: - image: gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:151c5349f13bea9d626c988563c04c0a86210c21 - - container: - image: gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:151c5349f13bea9d626c988563c04c0a86210c21 - - container: - image: gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:151c5349f13bea9d626c988563c04c0a86210c21 \ No newline at end of file diff --git a/test/sample-test/unittests/testdata/test_file_injection_output.yaml b/test/sample-test/unittests/testdata/test_file_injection_output.yaml deleted file mode 100644 index 40a0a69c5bd..00000000000 --- a/test/sample-test/unittests/testdata/test_file_injection_output.yaml +++ /dev/null @@ -1,10 +0,0 @@ -spec: - templates: - - container: - image: gcr.io/ml-pipeline/LOCAL_CONFUSION_MATRIX_IMAGE - - container: - image: gcr.io/ml-pipeline/DATAPROC_ANALYZE_IMAGE - - container: - image: gcr.io/ml-pipeline/DATAPROC_CREATE_IMAGE - - container: - image: gcr.io/ml-pipeline/DATAPROC_DELETE_IMAGE \ No newline at end of file diff --git a/test/sample-test/unittests/utils_tests.py b/test/sample-test/unittests/utils_tests.py deleted file mode 100644 index 0a026ee566f..00000000000 --- a/test/sample-test/unittests/utils_tests.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -import os -import shutil -import sys -import unittest -import yaml - - -# Need to adjust sys path to find utils.py -_PACKAGE_PARENT = '..' -_SCRIPT_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__)))) -sys.path.append(os.path.normpath(os.path.join(_SCRIPT_DIR, _PACKAGE_PARENT))) - -import utils - - -_DATAPATH = 'testdata/' -_WORK_DIR = 'workdir/' - - -class TestUtils(unittest.TestCase): - """Unit tests for utility functions defined in test/sample-test/utils.py""" - def setUp(self) -> None: - """Prepare unit test environment.""" - - os.mkdir(_WORK_DIR) - # Copy file for test_file_injection because the function works inplace. - shutil.copyfile(os.path.join(_DATAPATH, 'test_file_injection_input.yaml'), - os.path.join(_WORK_DIR, 'test_file_injection.yaml')) - - def tearDown(self) -> None: - """Clean up.""" - shutil.rmtree(_WORK_DIR) - - def test_file_injection(self): - """Test file_injection function.""" - subs = { - r'gcr\.io/ml-pipeline/ml-pipeline-local-confusion-matrix:\w+':'gcr.io/ml-pipeline/LOCAL_CONFUSION_MATRIX_IMAGE', - r'gcr\.io/ml-pipeline/ml-pipeline-dataproc-analyze:\w+':'gcr.io/ml-pipeline/DATAPROC_ANALYZE_IMAGE', - r'gcr\.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:\w+':'gcr.io/ml-pipeline/DATAPROC_CREATE_IMAGE', - r'gcr\.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:\w+':'gcr.io/ml-pipeline/DATAPROC_DELETE_IMAGE', - } - utils.file_injection( - os.path.join(_WORK_DIR, 'test_file_injection.yaml'), - os.path.join(_WORK_DIR, 'test_file_injection_tmp.yaml'), - subs) - with open(os.path.join(_DATAPATH, - 'test_file_injection_output.yaml'), 'r') as f: - golden = yaml.safe_load(f) - - with open(os.path.join(_WORK_DIR, - 'test_file_injection.yaml'), 'r') as f: - injected = yaml.safe_load(f) - self.assertEqual(golden, injected) diff --git a/test/sample-test/utils.py b/test/sample-test/utils.py deleted file mode 100644 index a2a0af8e3d1..00000000000 --- a/test/sample-test/utils.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2018 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License - -import os -import re -import subprocess - -from minio import Minio -from junit_xml import TestSuite, TestCase - -# Parse the workflow json to obtain the artifacts for a particular step. -# Note: the step_name could be the key words. -def get_artifact_in_minio(workflow_json, step_name, output_path, artifact_name='mlpipeline-ui-metadata'): - s3_data = {} - minio_access_key = 'minio' - minio_secret_key = 'minio123' - try: - for node in workflow_json['status']['nodes'].values(): - if step_name in node['name']: - for artifact in node['outputs']['artifacts']: - if artifact['name'] == artifact_name: - s3_data = artifact['s3'] - minio_client = Minio(s3_data['endpoint'], access_key=minio_access_key, secret_key=minio_secret_key, secure=False) - data = minio_client.get_object(s3_data['bucket'], s3_data['key']) - with open(output_path, 'wb') as file: - for d in data.stream(32*1024): - file.write(d) - except Exception as e: - print('error in get_artifact_in_minio: %s', e) - print(workflow_json) - -# Junit xml utilities -def add_junit_test(test_cases, testname, succ, message='default message', elapsed_sec=0): - test_case = TestCase(testname, elapsed_sec = elapsed_sec) - if not succ: - test_case.add_failure_info(message) - test_cases.append(test_case) - -def write_junit_xml(testname, filename, test_cases): - with open(filename, 'w') as f: - ts = TestSuite(testname, test_cases) - TestSuite.to_file(f, [ts], prettyprint=False) - -# Bash utilities -def run_bash_command(cmd): - process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE) - output_bytes, error_bytes = process.communicate() - output_string = '' - error_string = '' - if output_bytes != None: - output_string = output_bytes.decode('utf-8') - if error_bytes != None: - error_string = error_bytes.decode('utf-8') - return output_string, error_string - - -def file_injection(file_in, tmp_file_out, subs): - """Utility function that substitute several regex within a file by - corresponding string. - - :param file_in: input file name. - :param tmp_file_out: tmp output file name. - :param subs: dict, key is the regex expr, value is the substituting string. - """ - with open(file_in, 'rt') as fin: - with open(tmp_file_out, 'wt') as fout: - for line in fin: - tmp_line = line - for old, new in subs.items(): - regex = re.compile(old) - tmp_line = re.sub(regex, new, line) - line = tmp_line - - fout.write(tmp_line) - - os.rename(tmp_file_out, file_in) diff --git a/test/sdk-execution-tests/requirements.txt b/test/sdk-execution-tests/requirements.txt deleted file mode 100644 index 86a4517d704..00000000000 --- a/test/sdk-execution-tests/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -sdk/python -pytest==8.3.2 -pytest-xdist==2.5.0 -ml-metadata==1.17.0 -minio==7.2.15 diff --git a/test/sdk-execution-tests/sdk_execution_tests.py b/test/sdk-execution-tests/sdk_execution_tests.py deleted file mode 100644 index b1e7eb424af..00000000000 --- a/test/sdk-execution-tests/sdk_execution_tests.py +++ /dev/null @@ -1,201 +0,0 @@ -# Copyright 2022 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import dataclasses -import functools -import os -import subprocess -import sys -import kfp_server_api -import kubernetes.client -import kubernetes.client.rest -import kubernetes.config -import pytest -import yaml -from typing import Any, Dict, List, Tuple -from minio import S3Error -from ml_metadata import metadata_store -from ml_metadata.proto import metadata_store_pb2 -from ml_metadata.metadata_store.metadata_store import ListOptions -from minio import Minio -from kfp import client -from kfp import dsl - -KFP_ENDPOINT = os.environ['KFP_ENDPOINT'] -KFP_NAMESPACE = os.getenv('KFP_NAMESPACE', 'kubeflow') -TIMEOUT_SECONDS = os.environ['TIMEOUT_SECONDS'] -CURRENT_DIR = os.path.abspath(os.path.dirname(__file__)) -PROJECT_ROOT = os.path.abspath( - os.path.join(CURRENT_DIR, *([os.path.pardir] * 2))) -CONFIG_PATH = os.path.join(PROJECT_ROOT, 'sdk', 'python', 'test_data', - 'test_data_config.yaml') - -METADATA_HOST = '127.0.0.1' -METADATA_PORT = 8080 - -kfp_client = client.Client(host=KFP_ENDPOINT) -kubernetes.config.load_kube_config() - -minio_client = Minio( - endpoint="127.0.0.1:9000", - access_key="minio", - secret_key="minio123", - secure=False, -) - -BUCKET_NAME = "mlpipeline" - -print("Checking Minio connectivity...") -for b in minio_client.list_buckets(): - print(f"Found bucket: {b.name}") - -@dataclasses.dataclass -class TestCase: - name: str - module_path: str - yaml_path: str - function_name: str - arguments: Dict[str, Any] - expected_state: str - - -def create_test_case_parameters() -> List[TestCase]: - parameters: List[TestCase] = [] - with open(CONFIG_PATH) as f: - config = yaml.safe_load(f) - for name, test_group in config.items(): - test_data_dir = os.path.join(PROJECT_ROOT, test_group['test_data_dir']) - - parameters.extend( - TestCase( - name=name + '-' + test_case['module'], - module_path=os.path.join(test_data_dir, - f'{test_case["module"]}.py'), - yaml_path=os.path.join(test_data_dir, - f'{test_case["module"]}.yaml'), - function_name=test_case['name'], - arguments=test_case.get('arguments'), - expected_state=test_case.get('expected_state', 'SUCCEEDED'), - ) for test_case in test_group['test_cases'] if test_case['execute']) - - return parameters - - -def wait( - run_result: client.client.RunPipelineResult -) -> kfp_server_api.V2beta1Run: - return kfp_client.wait_for_run_completion( - run_id=run_result.run_id, timeout=int(TIMEOUT_SECONDS)) - - -def import_obj_from_file(python_path: str, obj_name: str) -> Any: - sys.path.insert(0, os.path.dirname(python_path)) - module_name = os.path.splitext(os.path.split(python_path)[1])[0] - module = __import__(module_name, fromlist=[obj_name]) - if not hasattr(module, obj_name): - raise ValueError( - f'Object "{obj_name}" not found in module {python_path}.') - return getattr(module, obj_name) - - -def run(test_case: TestCase) -> Tuple[str, client.client.RunPipelineResult]: - full_path = os.path.join(PROJECT_ROOT, test_case.module_path) - pipeline_func = import_obj_from_file(full_path, test_case.function_name) - run_result = kfp_client.create_run_from_pipeline_func( - pipeline_func, - enable_caching=False, - arguments=test_case.arguments, - ) - run_url = f'{KFP_ENDPOINT}/#/runs/details/{run_result.run_id}' - print( - f'- Created run {test_case.name}\n\tModule: {test_case.module_path}\n\tURL: {run_url}\n' - ) - - return run_url, run_result - - -def get_run_artifacts(run_id: str): - mlmd_connection_config = metadata_store_pb2.MetadataStoreClientConfig( - host=METADATA_HOST, - port=METADATA_PORT, - ) - mlmd_store = metadata_store.MetadataStore(mlmd_connection_config) - contexts = mlmd_store.get_contexts(list_options=ListOptions(filter_query=f"name = '{run_id}'")) - if len(contexts) != 1: - print("ERROR: Unable to find pipelinerun context in MLMD", file=sys.stderr) - return [] - - context = contexts[0] - return [a for a in mlmd_store.get_artifacts_by_context(context.id)] - - -def cleanup_run_resources(run_id: str): - print(f"Cleaning up resources for run {run_id}") - - artifacts = get_run_artifacts(run_id) - print(f"Found {len(artifacts)} artifacts for run {run_id}") - # Clean up any Artifacts from object store - for artifact in artifacts: - try: - object_key = artifact.uri.removeprefix(f"minio://{BUCKET_NAME}") - print(f"Deleting artifact {object_key} for run {run_id}") - minio_client.remove_object(BUCKET_NAME, object_key) - except S3Error as err: - print(f"MinIO error: {err} for run {run_id}") - - # Clean up Argo Workflow - try: - print(f'Deleting the Argo Workflow for run {run_id}') - kubernetes.client.CustomObjectsApi( - ).delete_collection_namespaced_custom_object( - 'argoproj.io', - 'v1alpha1', - KFP_NAMESPACE, - 'workflows', - label_selector=f'pipeline/runid={run_id}') - except kubernetes.client.rest.ApiException as e: - print( - f'Failed to delete the Argo Workflow for run {run_id}: {e}', - file=sys.stderr) - - -def get_kfp_package_path() -> str: - repo_name = os.environ.get('REPO_NAME', 'kubeflow/pipelines') - if os.environ.get('PULL_NUMBER'): - path = f'git+https://github.com/{repo_name}.git@refs/pull/{os.environ["PULL_NUMBER"]}/merge#subdirectory=sdk/python' - else: - path = f'git+https://github.com/{repo_name}.git@master#subdirectory=sdk/python' - print(f'Using the following KFP package path for tests: {path}') - return path - - -dsl.component = functools.partial( - dsl.component, kfp_package_path=get_kfp_package_path()) - - -@pytest.mark.parametrize('test_case', create_test_case_parameters()) -def test(test_case: TestCase) -> None: - try: - run_url, run_result = run(test_case) - except Exception as e: - raise RuntimeError( - f'Error triggering pipeline {test_case.name}.') from e - - api_run = wait(run_result) - assert api_run.state == test_case.expected_state, f'Pipeline {test_case.name} ended with incorrect status: {api_run.state}. More info: {run_url}' - - cleanup_run_resources(api_run.run_id) - -if __name__ == '__main__': - pytest.main() diff --git a/test/seaweedfs/namespace_isolation_test.sh b/test/seaweedfs/namespace_isolation_test.sh new file mode 100755 index 00000000000..56dab7a1d26 --- /dev/null +++ b/test/seaweedfs/namespace_isolation_test.sh @@ -0,0 +1,197 @@ +#!/bin/bash +set -euxo pipefail + +echo "SeaweedFS Security Test - Unauthorized Access Check" +echo "Testing if one namespace can access files from another namespace" + +# Check dependencies +for cmd in kubectl python3; do + if ! command -v $cmd &> /dev/null; then + echo "Error: $cmd is required but not installed" + exit 1 + fi +done + +# Install boto3 if not available +if ! python3 -c "import boto3" 2>/dev/null; then + echo "Installing boto3..." + pip3 install boto3 +fi + +PORT_FORWARD_PID="" +# Cleanup function +cleanup() { + echo "Cleaning up..." + if [ -n "$PORT_FORWARD_PID" ]; then + kill $PORT_FORWARD_PID 2>/dev/null || true + fi + rm -f test-file.txt accessed-file.txt + kubectl delete profile test-profile-1 test-profile-2 --ignore-not-found +} +trap cleanup EXIT + +# Create test profiles +create_profiles() { + echo "Creating test profiles..." + + # Create both profiles + kubectl apply -f - </dev/null 2>&1; then + echo "Namespaces created" + return 0 + fi + sleep 10 + done + + echo "Error: Namespaces not created" + exit 1 +} + +# Wait for S3 credentials +wait_for_credentials() { + local namespace=$1 + echo "Waiting for S3 credentials in $namespace..." + + for i in {1..6}; do + if kubectl get secret -n $namespace mlpipeline-minio-artifact >/dev/null 2>&1; then + echo "Credentials found" + return 0 + fi + sleep 10 + done + + echo "Error: No credentials found" + return 1 +} + +# Get credentials for namespace +get_credentials() { + local namespace=$1 + local access_key=$(kubectl get secret -n $namespace mlpipeline-minio-artifact -o jsonpath='{.data.accesskey}' | base64 -d) + local secret_key=$(kubectl get secret -n $namespace mlpipeline-minio-artifact -o jsonpath='{.data.secretkey}' | base64 -d) + echo "$access_key:$secret_key" +} + +# Setup port forward to SeaweedFS +setup_port_forward() { + if [ -n "$PORT_FORWARD_PID" ]; then + return 0 # Already running + fi + + echo "Setting up port-forward..." + local pod=$(kubectl get pod -n kubeflow -l app=seaweedfs -o jsonpath='{.items[0].metadata.name}') + kubectl port-forward -n kubeflow pod/$pod 8333:8333 >/dev/null 2>&1 & + PORT_FORWARD_PID=$! + sleep 3 +} + +# Upload test file +upload_file() { + local namespace=$1 + echo "Uploading test file to $namespace..." + + local credentials=$(get_credentials $namespace) + local access_key=$(echo $credentials | cut -d: -f1) + local secret_key=$(echo $credentials | cut -d: -f2) + + setup_port_forward + + python3 test/seaweedfs/s3_helper.py upload \ + --access-key "$access_key" \ + --secret-key "$secret_key" \ + --endpoint-url "http://localhost:8333" \ + --bucket "mlpipeline" \ + --key "private-artifacts/$namespace/test-file.txt" \ + --content "Test file for $namespace" +} + +# Test unauthorized access +test_unauthorized_access() { + local from_namespace=$1 + local target_namespace=$2 + + echo "Testing unauthorized access from $from_namespace to $target_namespace..." + + local credentials=$(get_credentials $from_namespace) + local access_key=$(echo $credentials | cut -d: -f1) + local secret_key=$(echo $credentials | cut -d: -f2) + + setup_port_forward + + # Try to access the other namespace's file + # Note: Python script returns 0 when access is denied (good), 1 when access succeeds (bad) + if python3 test/seaweedfs/s3_helper.py download \ + --access-key "$access_key" \ + --secret-key "$secret_key" \ + --endpoint-url "http://localhost:8333" \ + --bucket "mlpipeline" \ + --key "private-artifacts/$target_namespace/test-file.txt"; then + + echo "Security OK: Access denied as expected" + return 0 + else + echo "SECURITY ISSUE: Unauthorized access successful!" + return 1 + fi +} + +# Main test function +main() { + echo "Starting security test..." + + # Create test profiles + create_profiles + + # Wait for credentials to be created + echo "Waiting for profile controller to create credentials..." + sleep 30 + + wait_for_credentials "test-profile-1" || { + echo "Failed to get credentials for test-profile-1" + exit 1 + } + + wait_for_credentials "test-profile-2" || { + echo "Failed to get credentials for test-profile-2" + exit 1 + } + + # Upload file to first namespace + upload_file "test-profile-1" || { + echo "Failed to upload file" + exit 1 + } + + # Test unauthorized access + if test_unauthorized_access "test-profile-2" "test-profile-1"; then + echo "SECURITY TEST PASSED: No unauthorized access detected" + else + echo "SECURITY TEST FAILED: Unauthorized access detected" + echo "This indicates a security vulnerability in the SeaweedFS setup" + exit 1 + fi +} + +main diff --git a/test/seaweedfs/s3_helper.py b/test/seaweedfs/s3_helper.py new file mode 100755 index 00000000000..f5c2e2f0fc2 --- /dev/null +++ b/test/seaweedfs/s3_helper.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 +""" +S3 helper script for SeaweedFS namespace isolation testing. +Uses boto3 to perform S3 operations for security testing. +""" + +import sys +import boto3 +from botocore.exceptions import ClientError, NoCredentialsError +import argparse + + +def create_s3_client(access_key, secret_key, endpoint_url): + """Create S3 client with given credentials.""" + return boto3.client( + 's3', + aws_access_key_id=access_key, + aws_secret_access_key=secret_key, + endpoint_url=endpoint_url, + region_name='us-east-1' # Required but not used by SeaweedFS + ) + + +def upload_file(access_key, secret_key, endpoint_url, bucket, key, content): + """Upload a file to S3.""" + try: + s3_client = create_s3_client(access_key, secret_key, endpoint_url) + s3_client.put_object( + Bucket=bucket, + Key=key, + Body=content.encode('utf-8') + ) + print(f"✓ Successfully uploaded file to s3://{bucket}/{key}") + return True + except Exception as e: + print(f"✗ Failed to upload file: {e}") + return False + + +def download_file(access_key, secret_key, endpoint_url, bucket, key): + """Download a file from S3.""" + try: + s3_client = create_s3_client(access_key, secret_key, endpoint_url) + response = s3_client.get_object(Bucket=bucket, Key=key) + content = response['Body'].read().decode('utf-8') + print(f"✓ Successfully downloaded file from s3://{bucket}/{key}") + print(f"File content: {content}") + return True, content + except ClientError as e: + error_code = e.response['Error']['Code'] + if error_code in ['NoSuchKey', 'AccessDenied', 'Forbidden']: + print(f"✓ Access denied as expected: {error_code}") + return False, None + else: + print(f"✗ Unexpected error: {e}") + return False, None + except Exception as e: + print(f"✗ Failed to download file: {e}") + return False, None + + +def main(): + parser = argparse.ArgumentParser(description='S3 operations for SeaweedFS testing') + parser.add_argument('operation', choices=['upload', 'download'], help='Operation to perform') + parser.add_argument('--access-key', required=True, help='AWS access key') + parser.add_argument('--secret-key', required=True, help='AWS secret key') + parser.add_argument('--endpoint-url', required=True, help='S3 endpoint URL') + parser.add_argument('--bucket', required=True, help='S3 bucket name') + parser.add_argument('--key', required=True, help='S3 object key') + parser.add_argument('--content', help='Content to upload (for upload operation)') + + args = parser.parse_args() + + if args.operation == 'upload': + if not args.content: + print("Error: --content is required for upload operation") + sys.exit(1) + success = upload_file(args.access_key, args.secret_key, args.endpoint_url, + args.bucket, args.key, args.content) + sys.exit(0 if success else 1) + + elif args.operation == 'download': + success, content = download_file(args.access_key, args.secret_key, args.endpoint_url, + args.bucket, args.key) + # For security test: success=True means unauthorized access (bad) + # success=False means access denied (good) + if args.key.startswith('private-artifacts/') and '/' in args.key[18:]: + # This is a cross-namespace access attempt + sys.exit(1 if success else 0) + else: + sys.exit(0 if success else 1) + + +if __name__ == '__main__': + main() diff --git a/test/tag_for_hosted.sh b/test/tag_for_hosted.sh index 77789294f4f..e5d7b87b3c8 100755 --- a/test/tag_for_hosted.sh +++ b/test/tag_for_hosted.sh @@ -120,12 +120,12 @@ docker tag gcr.io/cloudsql-docker/gce-proxy:1.25.0 gcr.io/$PROJECT_ID/hosted/$CO docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/cloudsqlproxy:$SEM_VER docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/cloudsqlproxy:$MM_VER -docker tag quay.io/argoproj/argoexec:v3.5.14 gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$SEM_VER -docker tag quay.io/argoproj/argoexec:v3.5.14 gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$MM_VER +docker tag quay.io/argoproj/argoexec:v3.6.7 gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$SEM_VER +docker tag quay.io/argoproj/argoexec:v3.6.7 gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$MM_VER docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$SEM_VER docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$MM_VER -docker tag quay.io/argoproj/workflow-controller:v3.5.14 gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$SEM_VER -docker tag quay.io/argoproj/workflow-controller:v3.5.14 gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$MM_VER +docker tag quay.io/argoproj/workflow-controller:v3.6.7 gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$SEM_VER +docker tag quay.io/argoproj/workflow-controller:v3.6.7 gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$MM_VER docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$SEM_VER docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$MM_VER diff --git a/test/tools/project-cleaner/go.mod b/test/tools/project-cleaner/go.mod index 8612ff8902a..a78fe50b781 100644 --- a/test/tools/project-cleaner/go.mod +++ b/test/tools/project-cleaner/go.mod @@ -1,26 +1,31 @@ module github.com/kubeflow/pipelines/test/tools/project-cleaner -go 1.23.0 - -toolchain go1.23.7 +go 1.23.12 require ( - cloud.google.com/go v0.89.0 - google.golang.org/api v0.52.0 - google.golang.org/genproto v0.0.0-20210729151513-df9385d47c1b + cloud.google.com/go/compute v1.21.0 + google.golang.org/api v0.126.0 + google.golang.org/genproto v0.0.0-20230711160842-782d3b101e98 gopkg.in/yaml.v2 v2.4.0 ) require ( + cloud.google.com/go/compute/metadata v0.3.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect - github.com/golang/protobuf v1.5.2 // indirect - github.com/googleapis/gax-go/v2 v2.0.5 // indirect - go.opencensus.io v0.23.0 // indirect + github.com/golang/protobuf v1.5.3 // indirect + github.com/google/s2a-go v0.1.4 // indirect + github.com/google/uuid v1.3.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.2.3 // indirect + github.com/googleapis/gax-go/v2 v2.11.0 // indirect + go.opencensus.io v0.24.0 // indirect + golang.org/x/crypto v0.36.0 // indirect golang.org/x/net v0.38.0 // indirect - golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 // indirect + golang.org/x/oauth2 v0.27.0 // indirect golang.org/x/sys v0.31.0 // indirect golang.org/x/text v0.23.0 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/grpc v1.39.0 // indirect - google.golang.org/protobuf v1.27.1 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20230711160842-782d3b101e98 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230711160842-782d3b101e98 // indirect + google.golang.org/grpc v1.58.3 // indirect + google.golang.org/protobuf v1.33.0 // indirect ) diff --git a/test/tools/project-cleaner/go.sum b/test/tools/project-cleaner/go.sum index 63996bada15..493657e47ac 100644 --- a/test/tools/project-cleaner/go.sum +++ b/test/tools/project-cleaner/go.sum @@ -1,93 +1,40 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= -cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= -cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= -cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= -cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.88.0/go.mod h1:dnKwfYbP9hQhefiUvpbcAyoGSHUrOxR20JVElLiUvEY= -cloud.google.com/go v0.89.0 h1:ZT4GU+y59fC95Mfdn2RtxuzN2gc69dzlVevQK8Ykyqs= -cloud.google.com/go v0.89.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +cloud.google.com/go v0.110.4 h1:1JYyxKMN9hd5dR2MYTPWkGUgcoxVVhg0LKNKEo0qvmk= +cloud.google.com/go v0.110.4/go.mod h1:+EYjdK8e5RME/VY/qLCAtuyALQ9q67dvuum8i+H5xsI= +cloud.google.com/go/compute v1.21.0 h1:JNBsyXVoOoNJtTQcnEY5uYpZIbeCTYIeDe0Xh1bySMk= +cloud.google.com/go/compute v1.21.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= +cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2QxYC4trgAKZc= +cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= -github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -97,412 +44,138 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210715191844-86eeefc3e471/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/s2a-go v0.1.4 h1:1kZ/sQM3srePvKs3tXAvQzo66XfcReoqFpIpIccE7Oc= +github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.2.3 h1:yk9/cqRKtT9wXZSsRH9aurXEpJX+U6FLtpYTdC3R06k= +github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= +github.com/googleapis/gax-go/v2 v2.11.0 h1:9V9PWXEsWnPpQhu/PeQIkS4eGzMlTLGgt80cUUI8Ki4= +github.com/googleapis/gax-go/v2 v2.11.0/go.mod h1:DxmR61SGKkGLa2xigwuZIQpkCI2S5iydzRfb3peWZJI= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= -go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= +golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 h1:3B43BWw0xEBsLZ/NO1VALz6fppU3481pik+2Ksv45z8= -golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= +golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= +golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= -google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= -google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= -google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= -google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= -google.golang.org/api v0.52.0 h1:m5FLEd6dp5CU1F0tMWyqDi2XjchviIz8ntzOSz7w8As= -google.golang.org/api v0.52.0/go.mod h1:Him/adpjt0sxtkWViy0b6xyKW/SD71CwdJ7HqJo7SrU= +google.golang.org/api v0.126.0 h1:q4GJq+cAdMAC7XP7njvQ4tvohGLiSlytuL4BQxbIZ+o= +google.golang.org/api v0.126.0/go.mod h1:mBwVAtz+87bEN6CbA1GtZPDOqY2R5ONPqJeIlvyo4Aw= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= -google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= -google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210721163202-f1cecdd8b78a/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210722135532-667f2b7c528f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210729151513-df9385d47c1b h1:4xoALQmXxqVdDdLimpPyPeDdsJzo+nFTJw9euAMpqgM= -google.golang.org/genproto v0.0.0-20210729151513-df9385d47c1b/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20230711160842-782d3b101e98 h1:Z0hjGZePRE0ZBWotvtrwxFNrNE9CUAGtplaDK5NNI/g= +google.golang.org/genproto v0.0.0-20230711160842-782d3b101e98/go.mod h1:S7mY02OqCJTD0E1OiQy1F72PWFB4bZJ87cAtLPYgDR0= +google.golang.org/genproto/googleapis/api v0.0.0-20230711160842-782d3b101e98 h1:FmF5cCW94Ij59cfpoLiwTgodWmm60eEV0CjlsVg2fuw= +google.golang.org/genproto/googleapis/api v0.0.0-20230711160842-782d3b101e98/go.mod h1:rsr7RhLuwsDKL7RmgDDCUc6yaGr1iqceVb5Wv6f6YvQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230711160842-782d3b101e98 h1:bVf09lpb+OJbByTj913DRJioFFAjf/ZGxEz7MajTp2U= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230711160842-782d3b101e98/go.mod h1:TUfxEVdsvPg18p6AslUXFoLdpED4oBnGwyqk3dV1XzM= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.39.0 h1:Klz8I9kdtkIN6EpHHUOMLCYhTn/2WAe5a0s1hcBkdTI= -google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.58.3 h1:BjnpXut1btbtgN/6sp+brB2Kbm2LjNXnidYujAVbSoQ= +google.golang.org/grpc v1.58.3/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -511,28 +184,18 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/test_data/__init__.py b/test_data/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test_data/compiled-workflows/add_numbers.yaml b/test_data/compiled-workflows/add_numbers.yaml new file mode 100644 index 00000000000..fb1f456fe7f --- /dev/null +++ b/test_data/compiled-workflows/add_numbers.yaml @@ -0,0 +1,369 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: add-numbers- +spec: + arguments: + parameters: + - name: components-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a + value: '{"executorLabel":"exec-add-numbers","inputDefinitions":{"parameters":{"a":{"parameterType":"NUMBER_INTEGER"},"b":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add_numbers"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add_numbers(a: int, b: int) -\u003e int:\n return a + b\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"add-numbers"}}}},"tasks":{"add-numbers":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-numbers"},"inputs":{"parameters":{"a":{"componentInputParameter":"a"},"b":{"componentInputParameter":"b"}}},"taskInfo":{"name":"add-numbers"}}}},"inputDefinitions":{"parameters":{"a":{"parameterType":"NUMBER_INTEGER"},"b":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - add-numbers + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-numbers"},"inputs":{"parameters":{"a":{"componentInputParameter":"a"},"b":{"componentInputParameter":"b"}}},"taskInfo":{"name":"add-numbers"}}' + - name: container + value: '{{workflow.parameters.implementations-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a}}' + - name: task-name + value: add-numbers + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: add-numbers-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-numbers-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-numbers-driver.outputs.parameters.cached-decision}}' + depends: add-numbers-driver.Succeeded + name: add-numbers + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - add-numbers + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/arguments-parameters.yaml b/test_data/compiled-workflows/arguments-parameters.yaml new file mode 100644 index 00000000000..10020005c22 --- /dev/null +++ b/test_data/compiled-workflows/arguments-parameters.yaml @@ -0,0 +1,360 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: echo- +spec: + arguments: + parameters: + - name: components-e3bf4dafebca73c53759f2310029cb3fc65ab6a05d870069f7c58096ff7bb483 + value: '{"executorLabel":"exec-echo","inputDefinitions":{"parameters":{"param1":{"parameterType":"STRING"},"param2":{"parameterType":"STRING"}}}}' + - name: implementations-e3bf4dafebca73c53759f2310029cb3fc65ab6a05d870069f7c58096ff7bb483 + value: '{"args":["{{$.inputs.parameters[''param1'']}}-{{$.inputs.parameters[''param2'']}}"],"command":["echo"],"image":"public.ecr.aws/docker/library/python:3.12"}' + - name: components-root + value: '{"dag":{"tasks":{"echo":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"inputs":{"parameters":{"param1":{"componentInputParameter":"param1"},"param2":{"componentInputParameter":"param2"}}},"taskInfo":{"name":"echo"}}}},"inputDefinitions":{"parameters":{"param1":{"defaultValue":"hello","parameterType":"STRING"},"param2":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - echo + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e3bf4dafebca73c53759f2310029cb3fc65ab6a05d870069f7c58096ff7bb483}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"inputs":{"parameters":{"param1":{"componentInputParameter":"param1"},"param2":{"componentInputParameter":"param2"}}},"taskInfo":{"name":"echo"}}' + - name: container + value: '{{workflow.parameters.implementations-e3bf4dafebca73c53759f2310029cb3fc65ab6a05d870069f7c58096ff7bb483}}' + - name: task-name + value: echo + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: echo-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.echo-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.echo-driver.outputs.parameters.cached-decision}}' + depends: echo-driver.Succeeded + name: echo + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - echo + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"param1":"hello"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/arguments.pipeline.yaml b/test_data/compiled-workflows/arguments.pipeline.yaml new file mode 100644 index 00000000000..10020005c22 --- /dev/null +++ b/test_data/compiled-workflows/arguments.pipeline.yaml @@ -0,0 +1,360 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: echo- +spec: + arguments: + parameters: + - name: components-e3bf4dafebca73c53759f2310029cb3fc65ab6a05d870069f7c58096ff7bb483 + value: '{"executorLabel":"exec-echo","inputDefinitions":{"parameters":{"param1":{"parameterType":"STRING"},"param2":{"parameterType":"STRING"}}}}' + - name: implementations-e3bf4dafebca73c53759f2310029cb3fc65ab6a05d870069f7c58096ff7bb483 + value: '{"args":["{{$.inputs.parameters[''param1'']}}-{{$.inputs.parameters[''param2'']}}"],"command":["echo"],"image":"public.ecr.aws/docker/library/python:3.12"}' + - name: components-root + value: '{"dag":{"tasks":{"echo":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"inputs":{"parameters":{"param1":{"componentInputParameter":"param1"},"param2":{"componentInputParameter":"param2"}}},"taskInfo":{"name":"echo"}}}},"inputDefinitions":{"parameters":{"param1":{"defaultValue":"hello","parameterType":"STRING"},"param2":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - echo + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e3bf4dafebca73c53759f2310029cb3fc65ab6a05d870069f7c58096ff7bb483}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"inputs":{"parameters":{"param1":{"componentInputParameter":"param1"},"param2":{"componentInputParameter":"param2"}}},"taskInfo":{"name":"echo"}}' + - name: container + value: '{{workflow.parameters.implementations-e3bf4dafebca73c53759f2310029cb3fc65ab6a05d870069f7c58096ff7bb483}}' + - name: task-name + value: echo + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: echo-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.echo-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.echo-driver.outputs.parameters.cached-decision}}' + depends: echo-driver.Succeeded + name: echo + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - echo + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"param1":"hello"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/artifact_cache.yaml b/test_data/compiled-workflows/artifact_cache.yaml new file mode 100644 index 00000000000..a6d14f25f0b --- /dev/null +++ b/test_data/compiled-workflows/artifact_cache.yaml @@ -0,0 +1,471 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: artifact-cache-pipeline- +spec: + arguments: + parameters: + - name: components-a0055e53510db02175ab79ccc2c9c06c8b7bd7592f3d2ec6903b3de5d6b65431 + value: '{"executorLabel":"exec-crust-comp","inputDefinitions":{"artifacts":{"input":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-a0055e53510db02175ab79ccc2c9c06c8b7bd7592f3d2ec6903b3de5d6b65431 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","crust_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + crust_comp(input: dsl.Dataset):\n with open(input.path, ''r'') as f:\n print(''input: + '', f.read())\n\n"],"image":"python:3.9"}' + - name: components-6dc68c2089ddf2062dbae792f6214699e1d4a930573b3b4c3cc0626bea7bee2d + value: '{"executorLabel":"exec-core-comp","outputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-6dc68c2089ddf2062dbae792f6214699e1d4a930573b3b4c3cc0626bea7bee2d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","core_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + core_comp(dataset: dsl.Output[dsl.Dataset]):\n with open(dataset.path, + ''w'') as f:\n f.write(''foo'')\n\n"],"image":"python:3.9"}' + - name: components-comp-core + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"dataset","producerSubtask":"core-comp"}]}}},"tasks":{"core-comp":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: components-comp-mantle + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"Output","producerSubtask":"core"}]}}},"tasks":{"core":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: components-root + value: '{"dag":{"tasks":{"crust-comp":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"artifacts":{"input":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}},"mantle":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - artifact-cache-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-6dc68c2089ddf2062dbae792f6214699e1d4a930573b3b4c3cc0626bea7bee2d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-6dc68c2089ddf2062dbae792f6214699e1d4a930573b3b4c3cc0626bea7bee2d}}' + - name: task-name + value: core-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: core-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.core-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.core-comp-driver.outputs.parameters.cached-decision}}' + depends: core-comp-driver.Succeeded + name: core-comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-core + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - artifact-cache-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-core}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' + - name: task-name + value: core + name: core-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.core-driver.outputs.parameters.condition}}' + depends: core-driver.Succeeded + name: core + template: comp-core + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-mantle + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-a0055e53510db02175ab79ccc2c9c06c8b7bd7592f3d2ec6903b3de5d6b65431}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"artifacts":{"input":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-a0055e53510db02175ab79ccc2c9c06c8b7bd7592f3d2ec6903b3de5d6b65431}}' + - name: task-name + value: crust-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: mantle.Succeeded + name: crust-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.crust-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.crust-comp-driver.outputs.parameters.cached-decision}}' + depends: crust-comp-driver.Succeeded + name: crust-comp + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-mantle}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' + - name: task-name + value: mantle + name: mantle-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.mantle-driver.outputs.parameters.condition}}' + depends: mantle-driver.Succeeded + name: mantle + template: comp-mantle + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/artifact_crust.yaml b/test_data/compiled-workflows/artifact_crust.yaml new file mode 100644 index 00000000000..424ef2b3566 --- /dev/null +++ b/test_data/compiled-workflows/artifact_crust.yaml @@ -0,0 +1,471 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: artifact-pipeline- +spec: + arguments: + parameters: + - name: components-a0055e53510db02175ab79ccc2c9c06c8b7bd7592f3d2ec6903b3de5d6b65431 + value: '{"executorLabel":"exec-crust-comp","inputDefinitions":{"artifacts":{"input":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-a0055e53510db02175ab79ccc2c9c06c8b7bd7592f3d2ec6903b3de5d6b65431 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","crust_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + crust_comp(input: dsl.Dataset):\n with open(input.path, ''r'') as f:\n print(''input: + '', f.read())\n\n"],"image":"python:3.9"}' + - name: components-6dc68c2089ddf2062dbae792f6214699e1d4a930573b3b4c3cc0626bea7bee2d + value: '{"executorLabel":"exec-core-comp","outputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-6dc68c2089ddf2062dbae792f6214699e1d4a930573b3b4c3cc0626bea7bee2d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","core_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + core_comp(dataset: dsl.Output[dsl.Dataset]):\n with open(dataset.path, + ''w'') as f:\n f.write(''foo'')\n\n"],"image":"python:3.9"}' + - name: components-comp-core + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"dataset","producerSubtask":"core-comp"}]}}},"tasks":{"core-comp":{"cachingOptions":{},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: components-comp-mantle + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"Output","producerSubtask":"core"}]}}},"tasks":{"core":{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: components-root + value: '{"dag":{"tasks":{"crust-comp":{"cachingOptions":{},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"artifacts":{"input":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}},"mantle":{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - artifact-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-6dc68c2089ddf2062dbae792f6214699e1d4a930573b3b4c3cc0626bea7bee2d}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-6dc68c2089ddf2062dbae792f6214699e1d4a930573b3b4c3cc0626bea7bee2d}}' + - name: task-name + value: core-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: core-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.core-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.core-comp-driver.outputs.parameters.cached-decision}}' + depends: core-comp-driver.Succeeded + name: core-comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-core + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - artifact-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-core}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' + - name: task-name + value: core + name: core-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.core-driver.outputs.parameters.condition}}' + depends: core-driver.Succeeded + name: core + template: comp-core + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-mantle + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-a0055e53510db02175ab79ccc2c9c06c8b7bd7592f3d2ec6903b3de5d6b65431}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"artifacts":{"input":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-a0055e53510db02175ab79ccc2c9c06c8b7bd7592f3d2ec6903b3de5d6b65431}}' + - name: task-name + value: crust-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: mantle.Succeeded + name: crust-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.crust-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.crust-comp-driver.outputs.parameters.cached-decision}}' + depends: crust-comp-driver.Succeeded + name: crust-comp + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-mantle}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' + - name: task-name + value: mantle + name: mantle-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.mantle-driver.outputs.parameters.condition}}' + depends: mantle-driver.Succeeded + name: mantle + template: comp-mantle + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/artifacts_complex.yaml b/test_data/compiled-workflows/artifacts_complex.yaml new file mode 100644 index 00000000000..26fdf128ccd --- /dev/null +++ b/test_data/compiled-workflows/artifacts_complex.yaml @@ -0,0 +1,722 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: math-pipeline- +spec: + arguments: + parameters: + - name: components-c57c4368bb3853e287d208a6153cc7cfdd0a6392a5da536df9a7944cb4018d08 + value: '{"executorLabel":"exec-add","inputDefinitions":{"artifacts":{"in_datasets":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}},"outputDefinitions":{"artifacts":{"out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-c57c4368bb3853e287d208a6153cc7cfdd0a6392a5da536df9a7944cb4018d08 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add(\n in_datasets: Input[List[Dataset]],\n out_dataset: Output[Dataset],\n):\n nums + = []\n for dataset in in_datasets:\n with open(dataset.path) as + f:\n nums.append(int(f.read()))\n with open(out_dataset.path, + ''w'') as f:\n f.write(str(sum(nums)))\n\n"],"image":"python:3.9"}' + - name: components-be8ee4858aa0dabdc7c419be456f8e7751d0de0a26fe140352aaadb1050c4ba3 + value: '{"executorLabel":"exec-add-two-ints","inputDefinitions":{"artifacts":{"in_dataset1":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"in_dataset2":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-be8ee4858aa0dabdc7c419be456f8e7751d0de0a26fe140352aaadb1050c4ba3 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add_two_ints"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add_two_ints(\n in_dataset1: Input[Dataset],\n in_dataset2: Input[Dataset],\n out_dataset: + Output[Dataset],\n):\n with open(in_dataset1.path) as f:\n in_dataset1 + = int(f.read())\n\n with open(in_dataset2.path) as f:\n in_dataset2 + = int(f.read())\n\n with open(out_dataset.path, ''w'') as f:\n f.write(str(in_dataset1 + + in_dataset2))\n\n"],"image":"python:3.9"}' + - name: components-comp-add-two-lists-of-datasets + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"out_dataset","producerSubtask":"add-two-ints"}]}}},"tasks":{"add":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"inputs":{"artifacts":{"in_datasets":{"componentInputArtifact":"in_datasets1"}}},"taskInfo":{"name":"add"}},"add-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-2"},"inputs":{"artifacts":{"in_datasets":{"componentInputArtifact":"in_datasets2"}}},"taskInfo":{"name":"add-2"}},"add-two-ints":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-two-ints"},"dependentTasks":["add","add-2"],"inputs":{"artifacts":{"in_dataset1":{"taskOutputArtifact":{"outputArtifactKey":"out_dataset","producerTask":"add"}},"in_dataset2":{"taskOutputArtifact":{"outputArtifactKey":"out_dataset","producerTask":"add-2"}}}},"taskInfo":{"name":"add-two-ints"}}}},"inputDefinitions":{"artifacts":{"in_datasets1":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true},"in_datasets2":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: components-ddb6006c75f716bd2d06e80c76e9743f6cc6dd588e1c1dda65066ba1d0ec3b49 + value: '{"executorLabel":"exec-double","inputDefinitions":{"parameters":{"num":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-ddb6006c75f716bd2d06e80c76e9743f6cc6dd588e1c1dda65066ba1d0ec3b49 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","double"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + double(\n num: int,\n out_dataset: Output[Dataset],\n):\n with open(out_dataset.path, + ''w'') as f:\n f.write(str(2 * num))\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-5 + value: '{"dag":{"outputs":{"artifacts":{"pipelinechannel--double-2-out_dataset":{"artifactSelectors":[{"outputArtifactKey":"out_dataset","producerSubtask":"double-2"}]}}},"tasks":{"double-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double-2"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"double-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-3":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--threshold":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"pipelinechannel--double-2-out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}}}' + - name: components-comp-for-loop-4 + value: '{"dag":{"outputs":{"artifacts":{"pipelinechannel--double-2-out_dataset":{"artifactSelectors":[{"outputArtifactKey":"pipelinechannel--double-2-out_dataset","producerSubtask":"condition-5"}]}}},"tasks":{"condition-5":{"componentRef":{"name":"comp-condition-5"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-3":{"componentInputParameter":"pipelinechannel--loop-item-param-3"},"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--loop-item-param-3'']) + \u003e= int(inputs.parameter_values[''pipelinechannel--threshold''])"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-3":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--threshold":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"pipelinechannel--double-2-out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}}}' + - name: components-comp-for-loop-2 + value: '{"dag":{"outputs":{"artifacts":{"pipelinechannel--double-2-out_dataset":{"artifactSelectors":[{"outputArtifactKey":"pipelinechannel--double-2-out_dataset","producerSubtask":"for-loop-4"}]},"pipelinechannel--double-out_dataset":{"artifactSelectors":[{"outputArtifactKey":"out_dataset","producerSubtask":"double"}]}}},"tasks":{"double":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}},"for-loop-4":{"componentRef":{"name":"comp-for-loop-4"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--threshold":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"pipelinechannel--double-2-out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true},"pipelinechannel--double-out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}}}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"datasets":{"artifactSelectors":[{"outputArtifactKey":"pipelinechannel--double-out_dataset","producerSubtask":"for-loop-2"}]},"sum":{"artifactSelectors":[{"outputArtifactKey":"Output","producerSubtask":"add-two-lists-of-datasets"}]}}},"tasks":{"add-two-lists-of-datasets":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-two-lists-of-datasets"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"in_datasets1":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--double-out_dataset","producerTask":"for-loop-2"}},"in_datasets2":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--double-2-out_dataset","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add-two-lists-of-datasets"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}}},"inputDefinitions":{"parameters":{"threshold":{"defaultValue":2,"isOptional":true,"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"datasets":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true},"sum":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c57c4368bb3853e287d208a6153cc7cfdd0a6392a5da536df9a7944cb4018d08}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"inputs":{"artifacts":{"in_datasets":{"componentInputArtifact":"in_datasets1"}}},"taskInfo":{"name":"add"}}' + - name: container + value: '{{workflow.parameters.implementations-c57c4368bb3853e287d208a6153cc7cfdd0a6392a5da536df9a7944cb4018d08}}' + - name: task-name + value: add + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: add-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-driver.outputs.parameters.cached-decision}}' + depends: add-driver.Succeeded + name: add + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c57c4368bb3853e287d208a6153cc7cfdd0a6392a5da536df9a7944cb4018d08}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-2"},"inputs":{"artifacts":{"in_datasets":{"componentInputArtifact":"in_datasets2"}}},"taskInfo":{"name":"add-2"}}' + - name: container + value: '{{workflow.parameters.implementations-c57c4368bb3853e287d208a6153cc7cfdd0a6392a5da536df9a7944cb4018d08}}' + - name: task-name + value: add-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: add-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-2-driver.outputs.parameters.cached-decision}}' + depends: add-2-driver.Succeeded + name: add-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-be8ee4858aa0dabdc7c419be456f8e7751d0de0a26fe140352aaadb1050c4ba3}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-two-ints"},"dependentTasks":["add","add-2"],"inputs":{"artifacts":{"in_dataset1":{"taskOutputArtifact":{"outputArtifactKey":"out_dataset","producerTask":"add"}},"in_dataset2":{"taskOutputArtifact":{"outputArtifactKey":"out_dataset","producerTask":"add-2"}}}},"taskInfo":{"name":"add-two-ints"}}' + - name: container + value: '{{workflow.parameters.implementations-be8ee4858aa0dabdc7c419be456f8e7751d0de0a26fe140352aaadb1050c4ba3}}' + - name: task-name + value: add-two-ints + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: add.Succeeded && add-2.Succeeded + name: add-two-ints-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-two-ints-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-two-ints-driver.outputs.parameters.cached-decision}}' + depends: add-two-ints-driver.Succeeded + name: add-two-ints + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-add-two-lists-of-datasets + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ddb6006c75f716bd2d06e80c76e9743f6cc6dd588e1c1dda65066ba1d0ec3b49}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double-2"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"double-2"}}' + - name: container + value: '{{workflow.parameters.implementations-ddb6006c75f716bd2d06e80c76e9743f6cc6dd588e1c1dda65066ba1d0ec3b49}}' + - name: task-name + value: double-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: double-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.double-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.double-2-driver.outputs.parameters.cached-decision}}' + depends: double-2-driver.Succeeded + name: double-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-5 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-5}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-5"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-3":{"componentInputParameter":"pipelinechannel--loop-item-param-3"},"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--loop-item-param-3'']) + \u003e= int(inputs.parameter_values[''pipelinechannel--threshold''])"}}' + - name: task-name + value: condition-5 + name: condition-5-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-5-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-5-driver.outputs.parameters.condition}}' + depends: condition-5-driver.Succeeded + name: condition-5 + template: comp-condition-5 + when: '{{tasks.condition-5-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-4 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-4-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-4-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4-for-loop-4-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ddb6006c75f716bd2d06e80c76e9743f6cc6dd588e1c1dda65066ba1d0ec3b49}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}}' + - name: container + value: '{{workflow.parameters.implementations-ddb6006c75f716bd2d06e80c76e9743f6cc6dd588e1c1dda65066ba1d0ec3b49}}' + - name: task-name + value: double + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: double-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.double-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + depends: double-driver.Succeeded + name: double + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-add-two-lists-of-datasets}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-two-lists-of-datasets"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"in_datasets1":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--double-out_dataset","producerTask":"for-loop-2"}},"in_datasets2":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--double-2-out_dataset","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add-two-lists-of-datasets"}}' + - name: task-name + value: add-two-lists-of-datasets + depends: for-loop-2.Succeeded + name: add-two-lists-of-datasets-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.add-two-lists-of-datasets-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.add-two-lists-of-datasets-driver.outputs.parameters.condition}}' + depends: add-two-lists-of-datasets-driver.Succeeded + name: add-two-lists-of-datasets + template: comp-add-two-lists-of-datasets + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"threshold":2}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/artifacts_simple.yaml b/test_data/compiled-workflows/artifacts_simple.yaml new file mode 100644 index 00000000000..57aa78730d5 --- /dev/null +++ b/test_data/compiled-workflows/artifacts_simple.yaml @@ -0,0 +1,524 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: math-pipeline- +spec: + arguments: + parameters: + - name: components-80d990c0f4b48d97644425ebeb33e4614a065cb6fef18de932c1a962063f9ed1 + value: '{"executorLabel":"exec-add","inputDefinitions":{"artifacts":{"in_datasets":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}},"outputDefinitions":{"artifacts":{"out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-80d990c0f4b48d97644425ebeb33e4614a065cb6fef18de932c1a962063f9ed1 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add(in_datasets: Input[List[Dataset]], out_dataset: Output[Dataset]):\n nums + = []\n for dataset in in_datasets:\n with open(dataset.path) as + f:\n nums.append(int(f.read()))\n with open(out_dataset.path, + ''w'') as f:\n f.write(str(sum(nums)))\n\n"],"image":"python:3.9"}' + - name: components-699e6289d2b910228c41f94be6dd87fc0851d8dd1e68d93412ad045a9db72279 + value: '{"executorLabel":"exec-add-container","inputDefinitions":{"artifacts":{"in_datasets":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}},"outputDefinitions":{"artifacts":{"out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-699e6289d2b910228c41f94be6dd87fc0851d8dd1e68d93412ad045a9db72279 + value: '{"args":["\nimport argparse\nimport json\nimport os\n\ndef main(in_datasets, + out_dataset_uri):\n in_dicts = json.loads(in_datasets)\n uris = [d[''uri''] + for d in in_dicts]\n total = 0\n for uri in uris:\n with open(uri.replace(''gs://'', + ''/gcs/'')) as f:\n total += int(f.read())\n\n outpath = out_dataset_uri.replace(''gs://'', + ''/gcs/'')\n os.makedirs(os.path.dirname(outpath), exist_ok=True)\n with + open(outpath, ''w'') as f:\n f.write(str(total))\n\nparser = argparse.ArgumentParser()\nparser.add_argument(''in_datasets'')\nparser.add_argument(''out_dataset_uri'')\nargs + = parser.parse_args()\n\nmain(args.in_datasets, args.out_dataset_uri)\n","{{$.inputs.artifacts[''in_datasets'']}}","{{$.outputs.artifacts[''out_dataset''].uri}}"],"command":["python","-c"],"image":"python:3.9"}' + - name: components-bd592e219a10055cde863d9b28fdaf17dfe7658370d390ac470bb7be0cf58b9b + value: '{"executorLabel":"exec-double","inputDefinitions":{"parameters":{"num":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-bd592e219a10055cde863d9b28fdaf17dfe7658370d390ac470bb7be0cf58b9b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","double"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + double(num: int, out_dataset: Output[Dataset]):\n with open(out_dataset.path, + ''w'') as f:\n f.write(str(2 * num))\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-2 + value: '{"dag":{"outputs":{"artifacts":{"pipelinechannel--double-out_dataset":{"artifactSelectors":[{"outputArtifactKey":"out_dataset","producerSubtask":"double"}]}}},"tasks":{"double":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"pipelinechannel--double-out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}}}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"pipelinechannel--double-out_dataset","producerSubtask":"for-loop-2"}]}}},"tasks":{"add":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"in_datasets":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--double-out_dataset","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add"}},"add-container":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-container"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"in_datasets":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--double-out_dataset","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add-container"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-bd592e219a10055cde863d9b28fdaf17dfe7658370d390ac470bb7be0cf58b9b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}}' + - name: container + value: '{{workflow.parameters.implementations-bd592e219a10055cde863d9b28fdaf17dfe7658370d390ac470bb7be0cf58b9b}}' + - name: task-name + value: double + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: double-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.double-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + depends: double-driver.Succeeded + name: double + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-80d990c0f4b48d97644425ebeb33e4614a065cb6fef18de932c1a962063f9ed1}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"in_datasets":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--double-out_dataset","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add"}}' + - name: container + value: '{{workflow.parameters.implementations-80d990c0f4b48d97644425ebeb33e4614a065cb6fef18de932c1a962063f9ed1}}' + - name: task-name + value: add + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: add-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-driver.outputs.parameters.cached-decision}}' + depends: add-driver.Succeeded + name: add + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-699e6289d2b910228c41f94be6dd87fc0851d8dd1e68d93412ad045a9db72279}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-container"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"in_datasets":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--double-out_dataset","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add-container"}}' + - name: container + value: '{{workflow.parameters.implementations-699e6289d2b910228c41f94be6dd87fc0851d8dd1e68d93412ad045a9db72279}}' + - name: task-name + value: add-container + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: add-container-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-container-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-container-driver.outputs.parameters.cached-decision}}' + depends: add-container-driver.Succeeded + name: add-container + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/collected_artifacts.yaml b/test_data/compiled-workflows/collected_artifacts.yaml new file mode 100644 index 00000000000..0ff79294adc --- /dev/null +++ b/test_data/compiled-workflows/collected_artifacts.yaml @@ -0,0 +1,882 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: collected-artifact-pipeline- +spec: + arguments: + parameters: + - name: components-454f675bb7800adf80b4227cf4afab17ccd8f2158391d6c83b382a19f6c06d20 + value: '{"executorLabel":"exec-create-file","inputDefinitions":{"parameters":{"content":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"file":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-454f675bb7800adf80b4227cf4afab17ccd8f2158391d6c83b382a19f6c06d20 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","create_file"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + create_file(file: Output[Artifact], content: str):\n print(f''Creating + file with content: {content}'')\n with open(file.path, ''w'') as f:\n f.write(content)\n\n"],"image":"python:3.9"}' + - name: components-8de1f97f1c6fbc2c49f794738b59fedba4bfc0367ad809393ff85e905f70886d + value: '{"executorLabel":"exec-read-single-dataset-generate-model","inputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"id":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"results":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}}}' + - name: implementations-8de1f97f1c6fbc2c49f794738b59fedba4bfc0367ad809393ff85e905f70886d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","read_single_dataset_generate_model"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + read_single_dataset_generate_model(data: Dataset, id: str, results:Output[Model]):\n print(f''Reading + file: {data.path}'')\n with open(data.path, ''r'') as f:\n info + = f.read()\n with open(results.path, ''w'') as f2:\n f2.write(f\"{info}-{id}\")\n results.metadata[''model''] + = info\n results.metadata[''model_name''] = f\"model-artifact-inner-iteration-{info}-{id}\"\n\n"],"image":"python:3.9"}' + - name: components-110fc7a806182548336d0e41ebf637176c62d600d89da66d667e5241685993a3 + value: '{"executorLabel":"exec-create-dataset","inputDefinitions":{"parameters":{"content":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-110fc7a806182548336d0e41ebf637176c62d600d89da66d667e5241685993a3 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","create_dataset"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + create_dataset(data: Output[Dataset], content: str):\n print(f''Creating + file with content: {content}'')\n with open(data.path, ''w'') as f:\n f.write(content)\n\n"],"image":"python:3.9"}' + - name: components-comp-single-node-dag + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"data","producerSubtask":"create-dataset"}]}}},"tasks":{"create-dataset":{"cachingOptions":{},"componentRef":{"name":"comp-create-dataset"},"inputs":{"parameters":{"content":{"componentInputParameter":"char"}}},"taskInfo":{"name":"create-dataset"}}}},"inputDefinitions":{"parameters":{"char":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: components-comp-for-loop-2 + value: '{"dag":{"outputs":{"artifacts":{"pipelinechannel--read-single-dataset-generate-model-results":{"artifactSelectors":[{"outputArtifactKey":"results","producerSubtask":"read-single-dataset-generate-model"}]},"pipelinechannel--single-node-dag-Output":{"artifactSelectors":[{"outputArtifactKey":"Output","producerSubtask":"single-node-dag"}]}}},"tasks":{"read-single-dataset-generate-model":{"cachingOptions":{},"componentRef":{"name":"comp-read-single-dataset-generate-model"},"dependentTasks":["single-node-dag"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"single-node-dag"}}},"parameters":{"id":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"taskInfo":{"name":"read-single-dataset-generate-model"}},"single-node-dag":{"cachingOptions":{},"componentRef":{"name":"comp-single-node-dag"},"inputs":{"parameters":{"char":{"componentInputParameter":"pipelinechannel--split-chars-Output-loop-item"}}},"taskInfo":{"name":"single-node-dag"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--split-chars-Output":{"parameterType":"LIST"},"pipelinechannel--split-chars-Output-loop-item":{"parameterType":"STRING"},"pipelinechannel--split-ids-Output-loop-item":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"pipelinechannel--read-single-dataset-generate-model-results":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"},"isArtifactList":true},"pipelinechannel--single-node-dag-Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}}}' + - name: components-f13f4d96d8af0136b7ec2fd8bb04bf61e6936dec4fb6215eb91ac03b7800c8f9 + value: '{"executorLabel":"exec-read-datasets","inputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-f13f4d96d8af0136b7ec2fd8bb04bf61e6936dec4fb6215eb91ac03b7800c8f9 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","read_datasets"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + read_datasets(data: List[Dataset]) -\u003e str:\n for d in data:\n print(f''Reading + dataset {d.name} file: {d.path}'')\n with open(d.path, ''r'') as f:\n print(f.read())\n\n return + ''files read''\n\n"],"image":"python:3.9"}' + - name: components-a20411795c50176906ce97cafe2d672030b0584c63cc3f061d6b5fa4d8d18702 + value: '{"executorLabel":"exec-read-models","inputDefinitions":{"artifacts":{"models":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"},"isArtifactList":true}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-a20411795c50176906ce97cafe2d672030b0584c63cc3f061d6b5fa4d8d18702 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","read_models"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + read_models(models: List[Model],) -\u003e str:\n for m in models:\n print(f''Reading + model {m.name} file: {m.path}'')\n with open(m.path, ''r'') as f:\n info + = f.read()\n print(f\"Model raw data: {info}\")\n print(f\"Model + metadata: {m.metadata}\")\n return ''models read''\n\n"],"image":"python:3.9"}' + - name: components-99e3dc1afb3392e03ef7e4ef8b3f63c142e5fbc03c571b8879a6e9429b814760 + value: '{"executorLabel":"exec-read-single-file","inputDefinitions":{"artifacts":{"file":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-99e3dc1afb3392e03ef7e4ef8b3f63c142e5fbc03c571b8879a6e9429b814760 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","read_single_file"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + read_single_file(file: Artifact) -\u003e str:\n print(f''Reading file: + {file.path}'')\n with open(file.path, ''r'') as f:\n print(f.read())\n\n return + file.uri\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-1 + value: '{"dag":{"outputs":{"artifacts":{"pipelinechannel--read-single-dataset-generate-model-results":{"artifactSelectors":[{"outputArtifactKey":"pipelinechannel--read-single-dataset-generate-model-results","producerSubtask":"for-loop-2"}]}}},"tasks":{"create-file":{"cachingOptions":{},"componentRef":{"name":"comp-create-file"},"inputs":{"parameters":{"content":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"taskInfo":{"name":"create-file"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--split-chars-Output":{"componentInputParameter":"pipelinechannel--split-chars-Output"},"pipelinechannel--split-ids-Output-loop-item":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"parameterIterator":{"itemInput":"pipelinechannel--split-chars-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-chars-Output"}},"taskInfo":{"name":"for-loop-2"}},"read-datasets":{"cachingOptions":{},"componentRef":{"name":"comp-read-datasets"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--single-node-dag-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"read-datasets"}},"read-models":{"cachingOptions":{},"componentRef":{"name":"comp-read-models"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"models":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--read-single-dataset-generate-model-results","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"read-models"}},"read-single-file":{"cachingOptions":{},"componentRef":{"name":"comp-read-single-file"},"dependentTasks":["create-file"],"inputs":{"artifacts":{"file":{"taskOutputArtifact":{"outputArtifactKey":"file","producerTask":"create-file"}}}},"taskInfo":{"name":"read-single-file"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--split-chars-Output":{"parameterType":"LIST"},"pipelinechannel--split-ids-Output":{"parameterType":"LIST"},"pipelinechannel--split-ids-Output-loop-item":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"pipelinechannel--read-single-dataset-generate-model-results":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"},"isArtifactList":true}}}}' + - name: components-194a4bfb844b1828b520d4f1d24304bbdba6a0c71f95c1e315dfbfa4e171f4ea + value: '{"executorLabel":"exec-split-chars","inputDefinitions":{"parameters":{"model_ids":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-194a4bfb844b1828b520d4f1d24304bbdba6a0c71f95c1e315dfbfa4e171f4ea + value: '{"args":["--executor_input","{{$}}","--function_to_execute","split_chars"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + split_chars(model_ids: str) -\u003e list:\n return model_ids.split('','')\n\n"],"image":"python:3.9"}' + - name: components-b7700b8f98f5bc8b5097b759b614a5652ffb5baa093a27ca516cd10ce62e33de + value: '{"executorLabel":"exec-split-ids","inputDefinitions":{"parameters":{"model_ids":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-b7700b8f98f5bc8b5097b759b614a5652ffb5baa093a27ca516cd10ce62e33de + value: '{"args":["--executor_input","{{$}}","--function_to_execute","split_ids"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + split_ids(model_ids: str) -\u003e list:\n return model_ids.split('','')\n\n"],"image":"python:3.9"}' + - name: components-comp-collecting-artifacts + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"pipelinechannel--read-single-dataset-generate-model-results","producerSubtask":"for-loop-1"}]}}},"tasks":{"for-loop-1":{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-chars","split-ids"],"inputs":{"parameters":{"pipelinechannel--split-chars-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-chars"}},"pipelinechannel--split-ids-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-ids"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-ids-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-ids-Output"}},"taskInfo":{"name":"for-loop-1"}},"split-chars":{"cachingOptions":{},"componentRef":{"name":"comp-split-chars"},"inputs":{"parameters":{"model_ids":{"componentInputParameter":"model_chars"}}},"taskInfo":{"name":"split-chars"}},"split-ids":{"cachingOptions":{},"componentRef":{"name":"comp-split-ids"},"inputs":{"parameters":{"model_ids":{"componentInputParameter":"model_ids"}}},"taskInfo":{"name":"split-ids"}}}},"inputDefinitions":{"parameters":{"model_chars":{"defaultValue":"","isOptional":true,"parameterType":"STRING"},"model_ids":{"defaultValue":"","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"},"isArtifactList":true}}}}' + - name: components-root + value: '{"dag":{"tasks":{"collecting-artifacts":{"cachingOptions":{},"componentRef":{"name":"comp-collecting-artifacts"},"inputs":{"parameters":{"model_chars":{"runtimeValue":{"constant":"x,y,z"}},"model_ids":{"runtimeValue":{"constant":"s1,s2,s3"}}}},"taskInfo":{"name":"collecting-artifacts"}},"read-models":{"cachingOptions":{},"componentRef":{"name":"comp-read-models-2"},"dependentTasks":["collecting-artifacts"],"inputs":{"artifacts":{"models":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"collecting-artifacts"}}}},"taskInfo":{"name":"read-models"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - collected-artifact-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-110fc7a806182548336d0e41ebf637176c62d600d89da66d667e5241685993a3}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-create-dataset"},"inputs":{"parameters":{"content":{"componentInputParameter":"char"}}},"taskInfo":{"name":"create-dataset"}}' + - name: container + value: '{{workflow.parameters.implementations-110fc7a806182548336d0e41ebf637176c62d600d89da66d667e5241685993a3}}' + - name: task-name + value: create-dataset + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: create-dataset-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.create-dataset-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.create-dataset-driver.outputs.parameters.cached-decision}}' + depends: create-dataset-driver.Succeeded + name: create-dataset + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-single-node-dag + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - collected-artifact-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-8de1f97f1c6fbc2c49f794738b59fedba4bfc0367ad809393ff85e905f70886d}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-read-single-dataset-generate-model"},"dependentTasks":["single-node-dag"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"single-node-dag"}}},"parameters":{"id":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"taskInfo":{"name":"read-single-dataset-generate-model"}}' + - name: container + value: '{{workflow.parameters.implementations-8de1f97f1c6fbc2c49f794738b59fedba4bfc0367ad809393ff85e905f70886d}}' + - name: task-name + value: read-single-dataset-generate-model + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: single-node-dag.Succeeded + name: read-single-dataset-generate-model-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.read-single-dataset-generate-model-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.read-single-dataset-generate-model-driver.outputs.parameters.cached-decision}}' + depends: read-single-dataset-generate-model-driver.Succeeded + name: read-single-dataset-generate-model + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-single-node-dag}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-single-node-dag"},"inputs":{"parameters":{"char":{"componentInputParameter":"pipelinechannel--split-chars-Output-loop-item"}}},"taskInfo":{"name":"single-node-dag"}}' + - name: task-name + value: single-node-dag + name: single-node-dag-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.single-node-dag-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.single-node-dag-driver.outputs.parameters.condition}}' + depends: single-node-dag-driver.Succeeded + name: single-node-dag + template: comp-single-node-dag + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--split-chars-Output":{"componentInputParameter":"pipelinechannel--split-chars-Output"},"pipelinechannel--split-ids-Output-loop-item":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"parameterIterator":{"itemInput":"pipelinechannel--split-chars-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-chars-Output"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--split-chars-Output":{"componentInputParameter":"pipelinechannel--split-chars-Output"},"pipelinechannel--split-ids-Output-loop-item":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"parameterIterator":{"itemInput":"pipelinechannel--split-chars-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-chars-Output"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-454f675bb7800adf80b4227cf4afab17ccd8f2158391d6c83b382a19f6c06d20}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-create-file"},"inputs":{"parameters":{"content":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"taskInfo":{"name":"create-file"}}' + - name: container + value: '{{workflow.parameters.implementations-454f675bb7800adf80b4227cf4afab17ccd8f2158391d6c83b382a19f6c06d20}}' + - name: task-name + value: create-file + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: create-file-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.create-file-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.create-file-driver.outputs.parameters.cached-decision}}' + depends: create-file-driver.Succeeded + name: create-file + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f13f4d96d8af0136b7ec2fd8bb04bf61e6936dec4fb6215eb91ac03b7800c8f9}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-read-datasets"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--single-node-dag-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"read-datasets"}}' + - name: container + value: '{{workflow.parameters.implementations-f13f4d96d8af0136b7ec2fd8bb04bf61e6936dec4fb6215eb91ac03b7800c8f9}}' + - name: task-name + value: read-datasets + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: read-datasets-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.read-datasets-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.read-datasets-driver.outputs.parameters.cached-decision}}' + depends: read-datasets-driver.Succeeded + name: read-datasets + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-a20411795c50176906ce97cafe2d672030b0584c63cc3f061d6b5fa4d8d18702}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-read-models"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"models":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--read-single-dataset-generate-model-results","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"read-models"}}' + - name: container + value: '{{workflow.parameters.implementations-a20411795c50176906ce97cafe2d672030b0584c63cc3f061d6b5fa4d8d18702}}' + - name: task-name + value: read-models + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: read-models-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.read-models-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.read-models-driver.outputs.parameters.cached-decision}}' + depends: read-models-driver.Succeeded + name: read-models + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-99e3dc1afb3392e03ef7e4ef8b3f63c142e5fbc03c571b8879a6e9429b814760}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-read-single-file"},"dependentTasks":["create-file"],"inputs":{"artifacts":{"file":{"taskOutputArtifact":{"outputArtifactKey":"file","producerTask":"create-file"}}}},"taskInfo":{"name":"read-single-file"}}' + - name: container + value: '{{workflow.parameters.implementations-99e3dc1afb3392e03ef7e4ef8b3f63c142e5fbc03c571b8879a6e9429b814760}}' + - name: task-name + value: read-single-file + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: create-file.Succeeded + name: read-single-file-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.read-single-file-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.read-single-file-driver.outputs.parameters.cached-decision}}' + depends: read-single-file-driver.Succeeded + name: read-single-file + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-chars","split-ids"],"inputs":{"parameters":{"pipelinechannel--split-chars-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-chars"}},"pipelinechannel--split-ids-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-ids"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-ids-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-ids-Output"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-1 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-1-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-chars","split-ids"],"inputs":{"parameters":{"pipelinechannel--split-chars-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-chars"}},"pipelinechannel--split-ids-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-ids"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-ids-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-ids-Output"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-1-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1-for-loop-1-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: split-chars.Succeeded && split-ids.Succeeded + name: for-loop-1 + template: comp-for-loop-1-for-loop-1-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-194a4bfb844b1828b520d4f1d24304bbdba6a0c71f95c1e315dfbfa4e171f4ea}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-split-chars"},"inputs":{"parameters":{"model_ids":{"componentInputParameter":"model_chars"}}},"taskInfo":{"name":"split-chars"}}' + - name: container + value: '{{workflow.parameters.implementations-194a4bfb844b1828b520d4f1d24304bbdba6a0c71f95c1e315dfbfa4e171f4ea}}' + - name: task-name + value: split-chars + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: split-chars-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.split-chars-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.split-chars-driver.outputs.parameters.cached-decision}}' + depends: split-chars-driver.Succeeded + name: split-chars + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-b7700b8f98f5bc8b5097b759b614a5652ffb5baa093a27ca516cd10ce62e33de}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-split-ids"},"inputs":{"parameters":{"model_ids":{"componentInputParameter":"model_ids"}}},"taskInfo":{"name":"split-ids"}}' + - name: container + value: '{{workflow.parameters.implementations-b7700b8f98f5bc8b5097b759b614a5652ffb5baa093a27ca516cd10ce62e33de}}' + - name: task-name + value: split-ids + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: split-ids-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.split-ids-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.split-ids-driver.outputs.parameters.cached-decision}}' + depends: split-ids-driver.Succeeded + name: split-ids + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-collecting-artifacts + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-collecting-artifacts}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-collecting-artifacts"},"inputs":{"parameters":{"model_chars":{"runtimeValue":{"constant":"x,y,z"}},"model_ids":{"runtimeValue":{"constant":"s1,s2,s3"}}}},"taskInfo":{"name":"collecting-artifacts"}}' + - name: task-name + value: collecting-artifacts + name: collecting-artifacts-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.collecting-artifacts-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.collecting-artifacts-driver.outputs.parameters.condition}}' + depends: collecting-artifacts-driver.Succeeded + name: collecting-artifacts + template: comp-collecting-artifacts + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-a20411795c50176906ce97cafe2d672030b0584c63cc3f061d6b5fa4d8d18702}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-read-models-2"},"dependentTasks":["collecting-artifacts"],"inputs":{"artifacts":{"models":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"collecting-artifacts"}}}},"taskInfo":{"name":"read-models"}}' + - name: container + value: '{{workflow.parameters.implementations-a20411795c50176906ce97cafe2d672030b0584c63cc3f061d6b5fa4d8d18702}}' + - name: task-name + value: read-models + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: collecting-artifacts.Succeeded + name: read-models-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.read-models-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.read-models-driver.outputs.parameters.cached-decision}}' + depends: read-models-driver.Succeeded + name: read-models + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/collected_parameters.yaml b/test_data/compiled-workflows/collected_parameters.yaml new file mode 100644 index 00000000000..0f758e2664a --- /dev/null +++ b/test_data/compiled-workflows/collected_parameters.yaml @@ -0,0 +1,617 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: collected-param-pipeline- +spec: + arguments: + parameters: + - name: components-7a820e79c6578d36752f6fda7d48b8a137a50faa9bd35b332acc7fb844f2a95a + value: '{"executorLabel":"exec-consume-ids","inputDefinitions":{"parameters":{"ids":{"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-7a820e79c6578d36752f6fda7d48b8a137a50faa9bd35b332acc7fb844f2a95a + value: '{"args":["--executor_input","{{$}}","--function_to_execute","consume_ids"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + consume_ids(ids: List[str]) -\u003e str:\n for id in ids:\n print(f''Consuming: + {id}'')\n return ''completed''\n\n"],"image":"python:3.9"}' + - name: components-9188bbf5b36c887074ca4eb40d70e9941b5a8c7907d6f07c5c30c0f5e2d5d77f + value: '{"executorLabel":"exec-consume-single-id","inputDefinitions":{"parameters":{"id":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-9188bbf5b36c887074ca4eb40d70e9941b5a8c7907d6f07c5c30c0f5e2d5d77f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","consume_single_id"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + consume_single_id(id: str) -\u003e str:\n print(f''Consuming single: {id}'')\n return + ''completed''\n\n"],"image":"python:3.9"}' + - name: components-df2373cb8810b1a0954f3daeec49ae019a9421b35f7405c8e212dbfd47c1d48c + value: '{"executorLabel":"exec-prepend-id","inputDefinitions":{"parameters":{"content":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-df2373cb8810b1a0954f3daeec49ae019a9421b35f7405c8e212dbfd47c1d48c + value: '{"args":["--executor_input","{{$}}","--function_to_execute","prepend_id"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + prepend_id(content: str) -\u003e str:\n print(f\"prepending: {content} + with ''model_id''\")\n return f''model_id_{content}''\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-1 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--prepend-id-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"prepend-id"}}}},"tasks":{"consume-single-id":{"cachingOptions":{},"componentRef":{"name":"comp-consume-single-id"},"dependentTasks":["prepend-id"],"inputs":{"parameters":{"id":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"prepend-id"}}}},"taskInfo":{"name":"consume-single-id"}},"prepend-id":{"cachingOptions":{},"componentRef":{"name":"comp-prepend-id"},"inputs":{"parameters":{"content":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"taskInfo":{"name":"prepend-id"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--split-ids-Output":{"parameterType":"LIST"},"pipelinechannel--split-ids-Output-loop-item":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--prepend-id-Output":{"parameterType":"LIST"}}}}' + - name: components-4dc5ff1bee8506864b281bf8b8add721ff9352165ca4734c110638f55de15497 + value: '{"executorLabel":"exec-split-ids","inputDefinitions":{"parameters":{"ids":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-4dc5ff1bee8506864b281bf8b8add721ff9352165ca4734c110638f55de15497 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","split_ids"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + split_ids(ids: str) -\u003e list:\n return ids.split('','')\n\n"],"image":"python:3.9"}' + - name: components-comp-collecting-parameters + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--prepend-id-Output","producerSubtask":"for-loop-1"}}}},"tasks":{"consume-ids":{"cachingOptions":{},"componentRef":{"name":"comp-consume-ids"},"dependentTasks":["for-loop-1"],"inputs":{"parameters":{"ids":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--prepend-id-Output","producerTask":"for-loop-1"}}}},"taskInfo":{"name":"consume-ids"}},"for-loop-1":{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-ids"],"inputs":{"parameters":{"pipelinechannel--split-ids-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-ids"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-ids-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-ids-Output"}},"taskInfo":{"name":"for-loop-1"}},"split-ids":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-split-ids"},"inputs":{"parameters":{"ids":{"componentInputParameter":"model_ids"}}},"taskInfo":{"name":"split-ids"}}}},"inputDefinitions":{"parameters":{"model_ids":{"defaultValue":"","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"collecting-parameters":{"cachingOptions":{},"componentRef":{"name":"comp-collecting-parameters"},"inputs":{"parameters":{"model_ids":{"runtimeValue":{"constant":"s1,s2,s3"}}}},"taskInfo":{"name":"collecting-parameters"}},"consume-ids":{"cachingOptions":{},"componentRef":{"name":"comp-consume-ids-2"},"dependentTasks":["collecting-parameters"],"inputs":{"parameters":{"ids":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"collecting-parameters"}}}},"taskInfo":{"name":"consume-ids"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - collected-param-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-9188bbf5b36c887074ca4eb40d70e9941b5a8c7907d6f07c5c30c0f5e2d5d77f}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-consume-single-id"},"dependentTasks":["prepend-id"],"inputs":{"parameters":{"id":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"prepend-id"}}}},"taskInfo":{"name":"consume-single-id"}}' + - name: container + value: '{{workflow.parameters.implementations-9188bbf5b36c887074ca4eb40d70e9941b5a8c7907d6f07c5c30c0f5e2d5d77f}}' + - name: task-name + value: consume-single-id + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: prepend-id.Succeeded + name: consume-single-id-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.consume-single-id-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.consume-single-id-driver.outputs.parameters.cached-decision}}' + depends: consume-single-id-driver.Succeeded + name: consume-single-id + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-df2373cb8810b1a0954f3daeec49ae019a9421b35f7405c8e212dbfd47c1d48c}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-prepend-id"},"inputs":{"parameters":{"content":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"taskInfo":{"name":"prepend-id"}}' + - name: container + value: '{{workflow.parameters.implementations-df2373cb8810b1a0954f3daeec49ae019a9421b35f7405c8e212dbfd47c1d48c}}' + - name: task-name + value: prepend-id + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: prepend-id-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.prepend-id-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.prepend-id-driver.outputs.parameters.cached-decision}}' + depends: prepend-id-driver.Succeeded + name: prepend-id + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - collected-param-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-ids"],"inputs":{"parameters":{"pipelinechannel--split-ids-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-ids"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-ids-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-ids-Output"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-1 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-1-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-ids"],"inputs":{"parameters":{"pipelinechannel--split-ids-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-ids"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-ids-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-ids-Output"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-1-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1-for-loop-1-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7a820e79c6578d36752f6fda7d48b8a137a50faa9bd35b332acc7fb844f2a95a}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-consume-ids"},"dependentTasks":["for-loop-1"],"inputs":{"parameters":{"ids":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--prepend-id-Output","producerTask":"for-loop-1"}}}},"taskInfo":{"name":"consume-ids"}}' + - name: container + value: '{{workflow.parameters.implementations-7a820e79c6578d36752f6fda7d48b8a137a50faa9bd35b332acc7fb844f2a95a}}' + - name: task-name + value: consume-ids + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-1.Succeeded + name: consume-ids-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.consume-ids-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.consume-ids-driver.outputs.parameters.cached-decision}}' + depends: consume-ids-driver.Succeeded + name: consume-ids + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: split-ids.Succeeded + name: for-loop-1 + template: comp-for-loop-1-for-loop-1-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4dc5ff1bee8506864b281bf8b8add721ff9352165ca4734c110638f55de15497}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-split-ids"},"inputs":{"parameters":{"ids":{"componentInputParameter":"model_ids"}}},"taskInfo":{"name":"split-ids"}}' + - name: container + value: '{{workflow.parameters.implementations-4dc5ff1bee8506864b281bf8b8add721ff9352165ca4734c110638f55de15497}}' + - name: task-name + value: split-ids + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: split-ids-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.split-ids-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.split-ids-driver.outputs.parameters.cached-decision}}' + depends: split-ids-driver.Succeeded + name: split-ids + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-collecting-parameters + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-collecting-parameters}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-collecting-parameters"},"inputs":{"parameters":{"model_ids":{"runtimeValue":{"constant":"s1,s2,s3"}}}},"taskInfo":{"name":"collecting-parameters"}}' + - name: task-name + value: collecting-parameters + name: collecting-parameters-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.collecting-parameters-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.collecting-parameters-driver.outputs.parameters.condition}}' + depends: collecting-parameters-driver.Succeeded + name: collecting-parameters + template: comp-collecting-parameters + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7a820e79c6578d36752f6fda7d48b8a137a50faa9bd35b332acc7fb844f2a95a}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-consume-ids-2"},"dependentTasks":["collecting-parameters"],"inputs":{"parameters":{"ids":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"collecting-parameters"}}}},"taskInfo":{"name":"consume-ids"}}' + - name: container + value: '{{workflow.parameters.implementations-7a820e79c6578d36752f6fda7d48b8a137a50faa9bd35b332acc7fb844f2a95a}}' + - name: task-name + value: consume-ids + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: collecting-parameters.Succeeded + name: consume-ids-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.consume-ids-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.consume-ids-driver.outputs.parameters.cached-decision}}' + depends: consume-ids-driver.Succeeded + name: consume-ids + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/component_with_metadata_fields.yaml b/test_data/compiled-workflows/component_with_metadata_fields.yaml new file mode 100644 index 00000000000..126951877da --- /dev/null +++ b/test_data/compiled-workflows/component_with_metadata_fields.yaml @@ -0,0 +1,384 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: dataset-joiner- +spec: + arguments: + parameters: + - name: components-3e4e63ec82d130c2d86a0de3eb781d6a47ff58129bf5f5aa0aa8891ef448dc7f + value: '{"executorLabel":"exec-dataset-joiner","inputDefinitions":{"artifacts":{"dataset_a":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"First + dataset."},"dataset_b":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"Second + dataset."}}},"outputDefinitions":{"artifacts":{"out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"The + concatenated dataset."}},"parameters":{"Output":{"description":"The concatenated + string.","parameterType":"STRING"}}}}' + - name: implementations-3e4e63ec82d130c2d86a0de3eb781d6a47ff58129bf5f5aa0aa8891ef448dc7f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","dataset_joiner"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + dataset_joiner(\n dataset_a: Input[Dataset],\n dataset_b: Input[Dataset],\n out_dataset: + Output[Dataset],\n) -\u003e str:\n \"\"\"Concatenate dataset_a and dataset_b.\n\n Also + returns the concatenated string.\n\n Args:\n dataset_a: First dataset.\n dataset_b: + Second dataset.\n\n Returns:\n out_dataset: The concatenated dataset.\n Output: + The concatenated string.\n \"\"\"\n with open(dataset_a.path) as f:\n content_a + = f.read()\n\n with open(dataset_b.path) as f:\n content_b = f.read()\n\n concatenated_string + = content_a + content_b\n with open(out_dataset.path, ''w'') as f:\n f.write(concatenated_string)\n\n return + concatenated_string\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"out_dataset":{"artifactSelectors":[{"outputArtifactKey":"out_dataset","producerSubtask":"dataset-joiner"}]}},"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"dataset-joiner"}}}},"tasks":{"dataset-joiner":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-dataset-joiner"},"inputs":{"artifacts":{"dataset_a":{"componentInputArtifact":"dataset_a"},"dataset_b":{"componentInputArtifact":"dataset_b"}}},"taskInfo":{"name":"dataset-joiner"}}}},"inputDefinitions":{"artifacts":{"dataset_a":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"First + dataset."},"dataset_b":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"Second + dataset."}}},"outputDefinitions":{"artifacts":{"out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"The + concatenated dataset."}},"parameters":{"Output":{"description":"The concatenated + string.","parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - dataset-joiner + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-3e4e63ec82d130c2d86a0de3eb781d6a47ff58129bf5f5aa0aa8891ef448dc7f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-dataset-joiner"},"inputs":{"artifacts":{"dataset_a":{"componentInputArtifact":"dataset_a"},"dataset_b":{"componentInputArtifact":"dataset_b"}}},"taskInfo":{"name":"dataset-joiner"}}' + - name: container + value: '{{workflow.parameters.implementations-3e4e63ec82d130c2d86a0de3eb781d6a47ff58129bf5f5aa0aa8891ef448dc7f}}' + - name: task-name + value: dataset-joiner + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: dataset-joiner-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.dataset-joiner-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.dataset-joiner-driver.outputs.parameters.cached-decision}}' + depends: dataset-joiner-driver.Succeeded + name: dataset-joiner + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - dataset-joiner + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/component_with_optional_inputs.yaml b/test_data/compiled-workflows/component_with_optional_inputs.yaml new file mode 100644 index 00000000000..e754d7fe79b --- /dev/null +++ b/test_data/compiled-workflows/component_with_optional_inputs.yaml @@ -0,0 +1,373 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: v2-component-optional-input- +spec: + arguments: + parameters: + - name: components-305fccf43860a79b150cabe2f8d8140dd3c6539588f7e9c3f1788e775bd62887 + value: '{"executorLabel":"exec-component-op","inputDefinitions":{"parameters":{"input1":{"defaultValue":"default + value","isOptional":true,"parameterType":"STRING"},"input2":{"isOptional":true,"parameterType":"STRING"},"input3":{"isOptional":true,"parameterType":"STRING"}}}}' + - name: implementations-305fccf43860a79b150cabe2f8d8140dd3c6539588f7e9c3f1788e775bd62887 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_op(\n input1: str = ''default value'',\n input2: Optional[str] + = None,\n input3: Optional[str] = None,\n):\n print(f''input1: {input1}, + type: {type(input1)}'')\n print(f''input2: {input2}, type: {type(input2)}'')\n print(f''input3: + {input3}, type: {type(input3)}'')\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"component-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-op"},"inputs":{"parameters":{"input1":{"runtimeValue":{"constant":"Hello"}},"input2":{"runtimeValue":{"constant":"World"}}}},"taskInfo":{"name":"component-op"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - v2-component-optional-input + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-305fccf43860a79b150cabe2f8d8140dd3c6539588f7e9c3f1788e775bd62887}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-op"},"inputs":{"parameters":{"input1":{"runtimeValue":{"constant":"Hello"}},"input2":{"runtimeValue":{"constant":"World"}}}},"taskInfo":{"name":"component-op"}}' + - name: container + value: '{{workflow.parameters.implementations-305fccf43860a79b150cabe2f8d8140dd3c6539588f7e9c3f1788e775bd62887}}' + - name: task-name + value: component-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-op-driver.outputs.parameters.cached-decision}}' + depends: component-op-driver.Succeeded + name: component-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - v2-component-optional-input + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/component_with_pip_index_urls.yaml b/test_data/compiled-workflows/component_with_pip_index_urls.yaml new file mode 100644 index 00000000000..977b4d68d7c --- /dev/null +++ b/test_data/compiled-workflows/component_with_pip_index_urls.yaml @@ -0,0 +1,371 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: v2-component-pip-index-urls- +spec: + arguments: + parameters: + - name: components-193733112d59e5469b4142adfbe2c9deed6bad8e824a72821f68347f1923663f + value: '{"executorLabel":"exec-component-op"}' + - name: implementations-193733112d59e5469b4142adfbe2c9deed6bad8e824a72821f68347f1923663f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple + --trusted-host https://pypi.org/simple ''yapf'' \u0026\u0026 python3 -m + pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple + --trusted-host https://pypi.org/simple ''kfp==2.14.3'' ''--no-deps'' ''typing-extensions\u003e=3.7.4,\u003c5; + python_version\u003c\"3.9\"'' \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp + -d)\n\nprintf \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true + python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_op():\n import yapf\n print(dir(yapf))\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"component-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-op"},"taskInfo":{"name":"component-op"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - v2-component-pip-index-urls + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-193733112d59e5469b4142adfbe2c9deed6bad8e824a72821f68347f1923663f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-op"},"taskInfo":{"name":"component-op"}}' + - name: container + value: '{{workflow.parameters.implementations-193733112d59e5469b4142adfbe2c9deed6bad8e824a72821f68347f1923663f}}' + - name: task-name + value: component-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-op-driver.outputs.parameters.cached-decision}}' + depends: component-op-driver.Succeeded + name: component-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - v2-component-pip-index-urls + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/component_with_pip_install.yaml b/test_data/compiled-workflows/component_with_pip_install.yaml new file mode 100644 index 00000000000..dfcf35c6de7 --- /dev/null +++ b/test_data/compiled-workflows/component_with_pip_install.yaml @@ -0,0 +1,371 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: component-with-pip-install- +spec: + arguments: + parameters: + - name: components-802eb316f0b783824e20b61d8170192b475638db1d585e4b891cfe17e7c76602 + value: '{"executorLabel":"exec-component-with-pip-install"}' + - name: implementations-802eb316f0b783824e20b61d8170192b475638db1d585e4b891cfe17e7c76602 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_with_pip_install"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple + --trusted-host https://pypi.org/simple ''yapf'' \u0026\u0026 python3 -m + pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple + --trusted-host https://pypi.org/simple ''kfp==2.14.3'' ''--no-deps'' ''typing-extensions\u003e=3.7.4,\u003c5; + python_version\u003c\"3.9\"'' \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp + -d)\n\nprintf \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true + python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_with_pip_install():\n import yapf\n print(dir(yapf))\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"component-with-pip-install":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-with-pip-install"},"taskInfo":{"name":"component-with-pip-install"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - component-with-pip-install + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-802eb316f0b783824e20b61d8170192b475638db1d585e4b891cfe17e7c76602}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-with-pip-install"},"taskInfo":{"name":"component-with-pip-install"}}' + - name: container + value: '{{workflow.parameters.implementations-802eb316f0b783824e20b61d8170192b475638db1d585e4b891cfe17e7c76602}}' + - name: task-name + value: component-with-pip-install + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-with-pip-install-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-with-pip-install-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-with-pip-install-driver.outputs.parameters.cached-decision}}' + depends: component-with-pip-install-driver.Succeeded + name: component-with-pip-install + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - component-with-pip-install + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/component_with_pip_install_in_venv.yaml b/test_data/compiled-workflows/component_with_pip_install_in_venv.yaml new file mode 100644 index 00000000000..1f8a99a561d --- /dev/null +++ b/test_data/compiled-workflows/component_with_pip_install_in_venv.yaml @@ -0,0 +1,372 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: component-with-pip-install- +spec: + arguments: + parameters: + - name: components-2074f0535aaa76760a0be660778c79824ceacab159cd4d83a847c6a3876650e0 + value: '{"executorLabel":"exec-component-with-pip-install"}' + - name: implementations-2074f0535aaa76760a0be660778c79824ceacab159cd4d83a847c6a3876650e0 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_with_pip_install"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple + --trusted-host https://pypi.org/simple ''yapf'' \u0026\u0026 \nexport PIP_DISABLE_PIP_VERSION_CHECK=1\ntmp=$(mktemp + -d)\npython3 -m venv \"$tmp/venv\" --system-site-packages\n. \"$tmp/venv/bin/activate\"\n + python3 -m pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple + --trusted-host https://pypi.org/simple ''kfp==2.14.3'' ''--no-deps'' ''typing-extensions\u003e=3.7.4,\u003c5; + python_version\u003c\"3.9\"'' \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp + -d)\n\nprintf \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true + python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_with_pip_install():\n import yapf\n\n print(dir(yapf))\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"component-with-pip-install":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-with-pip-install"},"taskInfo":{"name":"component-with-pip-install"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - component-with-pip-install + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2074f0535aaa76760a0be660778c79824ceacab159cd4d83a847c6a3876650e0}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-with-pip-install"},"taskInfo":{"name":"component-with-pip-install"}}' + - name: container + value: '{{workflow.parameters.implementations-2074f0535aaa76760a0be660778c79824ceacab159cd4d83a847c6a3876650e0}}' + - name: task-name + value: component-with-pip-install + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-with-pip-install-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-with-pip-install-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-with-pip-install-driver.outputs.parameters.cached-decision}}' + depends: component-with-pip-install-driver.Succeeded + name: component-with-pip-install + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - component-with-pip-install + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/components_with_optional_artifacts.yaml b/test_data/compiled-workflows/components_with_optional_artifacts.yaml new file mode 100644 index 00000000000..4bc933543bc --- /dev/null +++ b/test_data/compiled-workflows/components_with_optional_artifacts.yaml @@ -0,0 +1,589 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: optional-artifact-pipeline- +spec: + arguments: + parameters: + - name: components-950ab2ff916de191d4701d45acbc5dce7cd1edca6fa6b0226245176343e4af97 + value: '{"executorLabel":"exec-custom-artifact-printer","inputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"},"isOptional":true}}}}' + - name: implementations-950ab2ff916de191d4701d45acbc5dce7cd1edca6fa6b0226245176343e4af97 + value: '{"command":["{\"IfPresent\": {\"InputName\": \"artifact\", \"Then\": + [\"echo\", \"{{$.inputs.artifacts[''artifact''].uri}}\"], \"Else\": [\"echo\", + \"No artifact provided!\"]}}"],"image":"alpine"}' + - name: components-comp-importer + value: '{"executorLabel":"exec-importer","inputDefinitions":{"parameters":{"uri":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-comp-importer + value: '{"artifactUri":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"},"typeSchema":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}' + - name: components-b375628bfbe3297e8615b48608c3bb9e5702145c781f5d43ce35416859ad7994 + value: '{"executorLabel":"exec-python-artifact-printer","inputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"},"isOptional":true}}}}' + - name: implementations-b375628bfbe3297e8615b48608c3bb9e5702145c781f5d43ce35416859ad7994 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","python_artifact_printer"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + python_artifact_printer(artifact: Optional[Input[Artifact]] = None):\n if + artifact is not None:\n print(artifact.name)\n print(artifact.uri)\n print(artifact.metadata)\n else:\n print(''No + artifact provided!'')\n\n"],"image":"python:3.9"}' + - name: components-comp-inner-pipeline + value: '{"dag":{"tasks":{"python-artifact-printer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-python-artifact-printer"},"inputs":{"artifacts":{"artifact":{"componentInputArtifact":"dataset"}}},"taskInfo":{"name":"python-artifact-printer"}}}},"inputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isOptional":true}}}}' + - name: components-comp-inner-pipeline-2 + value: '{"dag":{"tasks":{"python-artifact-printer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-python-artifact-printer-2"},"inputs":{"artifacts":{"artifact":{"componentInputArtifact":"dataset"}}},"taskInfo":{"name":"python-artifact-printer"}}}},"inputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isOptional":true}}}}' + - name: components-root + value: '{"dag":{"tasks":{"custom-artifact-printer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-custom-artifact-printer"},"inputs":{"artifacts":{"artifact":{"componentInputArtifact":"dataset1"}}},"taskInfo":{"name":"custom-artifact-printer"}},"custom-artifact-printer-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-custom-artifact-printer-2"},"taskInfo":{"name":"custom-artifact-printer-2"}},"importer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}},"inner-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"dependentTasks":["importer"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"inner-pipeline"}},"inner-pipeline-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline-2"},"taskInfo":{"name":"inner-pipeline-2"}}}},"inputDefinitions":{"artifacts":{"dataset1":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isOptional":true}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - optional-artifact-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-b375628bfbe3297e8615b48608c3bb9e5702145c781f5d43ce35416859ad7994}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-python-artifact-printer"},"inputs":{"artifacts":{"artifact":{"componentInputArtifact":"dataset"}}},"taskInfo":{"name":"python-artifact-printer"}}' + - name: container + value: '{{workflow.parameters.implementations-b375628bfbe3297e8615b48608c3bb9e5702145c781f5d43ce35416859ad7994}}' + - name: task-name + value: python-artifact-printer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: python-artifact-printer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.python-artifact-printer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.python-artifact-printer-driver.outputs.parameters.cached-decision}}' + depends: python-artifact-printer-driver.Succeeded + name: python-artifact-printer + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-inner-pipeline + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-b375628bfbe3297e8615b48608c3bb9e5702145c781f5d43ce35416859ad7994}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-python-artifact-printer-2"},"inputs":{"artifacts":{"artifact":{"componentInputArtifact":"dataset"}}},"taskInfo":{"name":"python-artifact-printer"}}' + - name: container + value: '{{workflow.parameters.implementations-b375628bfbe3297e8615b48608c3bb9e5702145c781f5d43ce35416859ad7994}}' + - name: task-name + value: python-artifact-printer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: python-artifact-printer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.python-artifact-printer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.python-artifact-printer-driver.outputs.parameters.cached-decision}}' + depends: python-artifact-printer-driver.Succeeded + name: python-artifact-printer + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-inner-pipeline-2 + outputs: {} + - container: + args: + - --executor_type + - importer + - --task_spec + - '{{inputs.parameters.task}}' + - --component_spec + - '{{inputs.parameters.component}}' + - --importer_spec + - '{{inputs.parameters.importer}}' + - --pipeline_name + - optional-artifact-pipeline + - --run_id + - '{{workflow.uid}}' + - --parent_dag_id + - '{{inputs.parameters.parent-dag-id}}' + - --pod_name + - $(KFP_POD_NAME) + - --pod_uid + - $(KFP_POD_UID) + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - launcher-v2 + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: ghcr.io/kubeflow/kfp-launcher:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: task + - name: component + - name: importer + - name: parent-dag-id + metadata: {} + name: system-importer + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - optional-artifact-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-950ab2ff916de191d4701d45acbc5dce7cd1edca6fa6b0226245176343e4af97}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-custom-artifact-printer"},"inputs":{"artifacts":{"artifact":{"componentInputArtifact":"dataset1"}}},"taskInfo":{"name":"custom-artifact-printer"}}' + - name: container + value: '{{workflow.parameters.implementations-950ab2ff916de191d4701d45acbc5dce7cd1edca6fa6b0226245176343e4af97}}' + - name: task-name + value: custom-artifact-printer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: custom-artifact-printer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.custom-artifact-printer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.custom-artifact-printer-driver.outputs.parameters.cached-decision}}' + depends: custom-artifact-printer-driver.Succeeded + name: custom-artifact-printer + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-950ab2ff916de191d4701d45acbc5dce7cd1edca6fa6b0226245176343e4af97}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-custom-artifact-printer-2"},"taskInfo":{"name":"custom-artifact-printer-2"}}' + - name: container + value: '{{workflow.parameters.implementations-950ab2ff916de191d4701d45acbc5dce7cd1edca6fa6b0226245176343e4af97}}' + - name: task-name + value: custom-artifact-printer-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: custom-artifact-printer-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.custom-artifact-printer-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.custom-artifact-printer-2-driver.outputs.parameters.cached-decision}}' + depends: custom-artifact-printer-2-driver.Succeeded + name: custom-artifact-printer-2 + template: system-container-executor + - arguments: + parameters: + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}' + - name: component + value: '{{workflow.parameters.components-comp-importer}}' + - name: importer + value: '{{workflow.parameters.implementations-comp-importer}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: importer + template: system-importer + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-inner-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"dependentTasks":["importer"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"inner-pipeline"}}' + - name: task-name + value: inner-pipeline + depends: importer.Succeeded + name: inner-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.inner-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.inner-pipeline-driver.outputs.parameters.condition}}' + depends: inner-pipeline-driver.Succeeded + name: inner-pipeline + template: comp-inner-pipeline + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-inner-pipeline-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline-2"},"taskInfo":{"name":"inner-pipeline-2"}}' + - name: task-name + value: inner-pipeline-2 + name: inner-pipeline-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.inner-pipeline-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.inner-pipeline-2-driver.outputs.parameters.condition}}' + depends: inner-pipeline-2-driver.Succeeded + name: inner-pipeline-2 + template: comp-inner-pipeline-2 + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/concat_message.yaml b/test_data/compiled-workflows/concat_message.yaml new file mode 100644 index 00000000000..dae282e55ba --- /dev/null +++ b/test_data/compiled-workflows/concat_message.yaml @@ -0,0 +1,370 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: concat-message- +spec: + arguments: + parameters: + - name: components-7af5009b3d45dcf8a4432f8c675e83fecaf54125510a1a4ec938ecaadc07eac0 + value: '{"executorLabel":"exec-concat-message","inputDefinitions":{"parameters":{"message1":{"parameterType":"STRING"},"message2":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-7af5009b3d45dcf8a4432f8c675e83fecaf54125510a1a4ec938ecaadc07eac0 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","concat_message"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + concat_message(message1: str, message2: str) -\u003e str:\n return message1 + + message2\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"concat-message"}}}},"tasks":{"concat-message":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-concat-message"},"inputs":{"parameters":{"message1":{"componentInputParameter":"message1"},"message2":{"componentInputParameter":"message2"}}},"taskInfo":{"name":"concat-message"}}}},"inputDefinitions":{"parameters":{"message1":{"parameterType":"STRING"},"message2":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - concat-message + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7af5009b3d45dcf8a4432f8c675e83fecaf54125510a1a4ec938ecaadc07eac0}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-concat-message"},"inputs":{"parameters":{"message1":{"componentInputParameter":"message1"},"message2":{"componentInputParameter":"message2"}}},"taskInfo":{"name":"concat-message"}}' + - name: container + value: '{{workflow.parameters.implementations-7af5009b3d45dcf8a4432f8c675e83fecaf54125510a1a4ec938ecaadc07eac0}}' + - name: task-name + value: concat-message + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: concat-message-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.concat-message-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.concat-message-driver.outputs.parameters.cached-decision}}' + depends: concat-message-driver.Succeeded + name: concat-message + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - concat-message + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/conditional_producer_and_consumers.yaml b/test_data/compiled-workflows/conditional_producer_and_consumers.yaml new file mode 100644 index 00000000000..77294228e33 --- /dev/null +++ b/test_data/compiled-workflows/conditional_producer_and_consumers.yaml @@ -0,0 +1,553 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: math-pipeline- +spec: + arguments: + parameters: + - name: components-0a346d821b1e474f2aea8ab24806f75b1afd4d6f7764be16c00f645cc08d5428 + value: '{"executorLabel":"exec-add","inputDefinitions":{"parameters":{"nums":{"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-0a346d821b1e474f2aea8ab24806f75b1afd4d6f7764be16c00f645cc08d5428 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add(nums: List[int]) -\u003e int:\n return sum(nums)\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-4 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--add-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"add"}}}},"tasks":{"add":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"inputs":{"parameters":{"nums":{"componentInputParameter":"pipelinechannel--for-loop-2-pipelinechannel--double-Output"}}},"taskInfo":{"name":"add"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--for-loop-2-pipelinechannel--double-Output":{"parameterType":"LIST"},"pipelinechannel--threshold":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--add-Output":{"parameterType":"LIST"}}}}' + - name: components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44 + value: '{"executorLabel":"exec-double","inputDefinitions":{"parameters":{"num":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","double"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + double(num: int) -\u003e int:\n return 2 * num\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-3 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--double-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"double"}}}},"tasks":{"double":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--threshold":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--double-Output":{"parameterType":"LIST"}}}}' + - name: components-comp-for-loop-2 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--double-Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerSubtask":"condition-3"}}}},"tasks":{"condition-3":{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-1":{"componentInputParameter":"pipelinechannel--loop-item-param-1"},"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--loop-item-param-1'']) + \u003e= int(inputs.parameter_values[''pipelinechannel--threshold''])"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--threshold":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--double-Output":{"parameterType":"LIST"}}}}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--add-Output","producerSubtask":"condition-4"}}}},"tasks":{"condition-4":{"componentRef":{"name":"comp-condition-4"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"pipelinechannel--for-loop-2-pipelinechannel--double-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}},"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--threshold'']) + == 2"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}}},"inputDefinitions":{"parameters":{"threshold":{"defaultValue":2,"isOptional":true,"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0a346d821b1e474f2aea8ab24806f75b1afd4d6f7764be16c00f645cc08d5428}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"inputs":{"parameters":{"nums":{"componentInputParameter":"pipelinechannel--for-loop-2-pipelinechannel--double-Output"}}},"taskInfo":{"name":"add"}}' + - name: container + value: '{{workflow.parameters.implementations-0a346d821b1e474f2aea8ab24806f75b1afd4d6f7764be16c00f645cc08d5428}}' + - name: task-name + value: add + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: add-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-driver.outputs.parameters.cached-decision}}' + depends: add-driver.Succeeded + name: add + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-4 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}}' + - name: container + value: '{{workflow.parameters.implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task-name + value: double + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: double-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.double-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + depends: double-driver.Succeeded + name: double + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-3 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-3}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-1":{"componentInputParameter":"pipelinechannel--loop-item-param-1"},"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--loop-item-param-1'']) + \u003e= int(inputs.parameter_values[''pipelinechannel--threshold''])"}}' + - name: task-name + value: condition-3 + name: condition-3-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + depends: condition-3-driver.Succeeded + name: condition-3 + template: comp-condition-3 + when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-4"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"pipelinechannel--for-loop-2-pipelinechannel--double-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}},"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--threshold'']) + == 2"}}' + - name: task-name + value: condition-4 + depends: for-loop-2.Succeeded + name: condition-4-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-4-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-4-driver.outputs.parameters.condition}}' + depends: condition-4-driver.Succeeded + name: condition-4 + template: comp-condition-4 + when: '{{tasks.condition-4-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"threshold":2}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/container_component_with_no_inputs.yaml b/test_data/compiled-workflows/container_component_with_no_inputs.yaml new file mode 100644 index 00000000000..d6f5f3b0e0d --- /dev/null +++ b/test_data/compiled-workflows/container_component_with_no_inputs.yaml @@ -0,0 +1,360 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: v2-container-component-no-input- +spec: + arguments: + parameters: + - name: components-ae0b9ab0aaa00ff363a428841da1ae97c69c83b805b51d72258bd4cc91ad843d + value: '{"executorLabel":"exec-hello-world-container"}' + - name: implementations-ae0b9ab0aaa00ff363a428841da1ae97c69c83b805b51d72258bd4cc91ad843d + value: '{"command":["echo","hello world"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"hello-world-container":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world-container"},"taskInfo":{"name":"hello-world-container"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - v2-container-component-no-input + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ae0b9ab0aaa00ff363a428841da1ae97c69c83b805b51d72258bd4cc91ad843d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world-container"},"taskInfo":{"name":"hello-world-container"}}' + - name: container + value: '{{workflow.parameters.implementations-ae0b9ab0aaa00ff363a428841da1ae97c69c83b805b51d72258bd4cc91ad843d}}' + - name: task-name + value: hello-world-container + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: hello-world-container-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.hello-world-container-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.hello-world-container-driver.outputs.parameters.cached-decision}}' + depends: hello-world-container-driver.Succeeded + name: hello-world-container + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - v2-container-component-no-input + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/container_io.yaml b/test_data/compiled-workflows/container_io.yaml new file mode 100644 index 00000000000..ab5d554e997 --- /dev/null +++ b/test_data/compiled-workflows/container_io.yaml @@ -0,0 +1,360 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: container-io- +spec: + arguments: + parameters: + - name: components-41946b6fb2d5d2dfad624f45e380a74ae2e501121a3202c09d5afeafaf165105 + value: '{"executorLabel":"exec-container-io","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"output_path":{"parameterType":"STRING"}}}}' + - name: implementations-41946b6fb2d5d2dfad624f45e380a74ae2e501121a3202c09d5afeafaf165105 + value: '{"args":["--output_path","{{$.outputs.parameters[''output_path''].output_file}}"],"command":["my_program","{{$.inputs.parameters[''text'']}}"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"output_path":{"valueFromParameter":{"outputParameterKey":"output_path","producerSubtask":"container-io"}}}},"tasks":{"container-io":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-io"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"container-io"}}}},"inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"output_path":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - container-io + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-41946b6fb2d5d2dfad624f45e380a74ae2e501121a3202c09d5afeafaf165105}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-io"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"container-io"}}' + - name: container + value: '{{workflow.parameters.implementations-41946b6fb2d5d2dfad624f45e380a74ae2e501121a3202c09d5afeafaf165105}}' + - name: task-name + value: container-io + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: container-io-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.container-io-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.container-io-driver.outputs.parameters.cached-decision}}' + depends: container-io-driver.Succeeded + name: container-io + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - container-io + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/container_no_input.yaml b/test_data/compiled-workflows/container_no_input.yaml new file mode 100644 index 00000000000..81a4da58e2c --- /dev/null +++ b/test_data/compiled-workflows/container_no_input.yaml @@ -0,0 +1,360 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: container-no-input- +spec: + arguments: + parameters: + - name: components-ae0b9ab0aaa00ff363a428841da1ae97c69c83b805b51d72258bd4cc91ad843d + value: '{"executorLabel":"exec-container-no-input"}' + - name: implementations-ae0b9ab0aaa00ff363a428841da1ae97c69c83b805b51d72258bd4cc91ad843d + value: '{"command":["echo","hello world"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"container-no-input":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-no-input"},"taskInfo":{"name":"container-no-input"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - container-no-input + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ae0b9ab0aaa00ff363a428841da1ae97c69c83b805b51d72258bd4cc91ad843d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-no-input"},"taskInfo":{"name":"container-no-input"}}' + - name: container + value: '{{workflow.parameters.implementations-ae0b9ab0aaa00ff363a428841da1ae97c69c83b805b51d72258bd4cc91ad843d}}' + - name: task-name + value: container-no-input + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: container-no-input-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.container-no-input-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.container-no-input-driver.outputs.parameters.cached-decision}}' + depends: container-no-input-driver.Succeeded + name: container-no-input + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - container-no-input + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/container_with_artifact_output.yaml b/test_data/compiled-workflows/container_with_artifact_output.yaml new file mode 100644 index 00000000000..01cf73f3dad --- /dev/null +++ b/test_data/compiled-workflows/container_with_artifact_output.yaml @@ -0,0 +1,360 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: container-with-artifact-output- +spec: + arguments: + parameters: + - name: components-f8e3101206918b913f3b1307bd1caa3d193be077324b20aa8fb05607de411069 + value: '{"executorLabel":"exec-container-with-artifact-output","inputDefinitions":{"parameters":{"num_epochs":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"model":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}},"parameters":{"model_config_path":{"parameterType":"STRING"}}}}' + - name: implementations-f8e3101206918b913f3b1307bd1caa3d193be077324b20aa8fb05607de411069 + value: '{"args":["--epochs","{{$.inputs.parameters[''num_epochs'']}}","--model_path","{{$.outputs.artifacts[''model''].uri}}","--model_metadata","{{$.outputs.artifacts[''model''].metadata}}","--model_config_path","{{$.outputs.parameters[''model_config_path''].output_file}}"],"command":["sh","run.sh"],"image":"gcr.io/my-image"}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"model":{"artifactSelectors":[{"outputArtifactKey":"model","producerSubtask":"container-with-artifact-output"}]}},"parameters":{"model_config_path":{"valueFromParameter":{"outputParameterKey":"model_config_path","producerSubtask":"container-with-artifact-output"}}}},"tasks":{"container-with-artifact-output":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-with-artifact-output"},"inputs":{"parameters":{"num_epochs":{"componentInputParameter":"num_epochs"}}},"taskInfo":{"name":"container-with-artifact-output"}}}},"inputDefinitions":{"parameters":{"num_epochs":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"model":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}},"parameters":{"model_config_path":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - container-with-artifact-output + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f8e3101206918b913f3b1307bd1caa3d193be077324b20aa8fb05607de411069}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-with-artifact-output"},"inputs":{"parameters":{"num_epochs":{"componentInputParameter":"num_epochs"}}},"taskInfo":{"name":"container-with-artifact-output"}}' + - name: container + value: '{{workflow.parameters.implementations-f8e3101206918b913f3b1307bd1caa3d193be077324b20aa8fb05607de411069}}' + - name: task-name + value: container-with-artifact-output + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: container-with-artifact-output-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.container-with-artifact-output-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.container-with-artifact-output-driver.outputs.parameters.cached-decision}}' + depends: container-with-artifact-output-driver.Succeeded + name: container-with-artifact-output + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - container-with-artifact-output + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/container_with_concat_placeholder.yaml b/test_data/compiled-workflows/container_with_concat_placeholder.yaml new file mode 100644 index 00000000000..ed8bcbf4733 --- /dev/null +++ b/test_data/compiled-workflows/container_with_concat_placeholder.yaml @@ -0,0 +1,361 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: container-with-concat-placeholder- +spec: + arguments: + parameters: + - name: components-b8fd25671b4c5e37ac7a8a2dd9fddc33d348d63dbc1c43627760eaa412e161d7 + value: '{"executorLabel":"exec-container-with-concat-placeholder","inputDefinitions":{"parameters":{"text1":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"text2":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"output_path":{"parameterType":"STRING"}}}}' + - name: implementations-b8fd25671b4c5e37ac7a8a2dd9fddc33d348d63dbc1c43627760eaa412e161d7 + value: '{"args":["--output_path","{{$.outputs.parameters[''output_path''].output_file}}"],"command":["my_program","{\"Concat\": + [\"prefix-\", \"{{$.inputs.parameters[''text1'']}}\", \"{{$.outputs.artifacts[''text2''].uri}}\"]}"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"text2":{"artifactSelectors":[{"outputArtifactKey":"text2","producerSubtask":"container-with-concat-placeholder"}]}},"parameters":{"output_path":{"valueFromParameter":{"outputParameterKey":"output_path","producerSubtask":"container-with-concat-placeholder"}}}},"tasks":{"container-with-concat-placeholder":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-with-concat-placeholder"},"inputs":{"parameters":{"text1":{"componentInputParameter":"text1"}}},"taskInfo":{"name":"container-with-concat-placeholder"}}}},"inputDefinitions":{"parameters":{"text1":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"text2":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"output_path":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - container-with-concat-placeholder + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-b8fd25671b4c5e37ac7a8a2dd9fddc33d348d63dbc1c43627760eaa412e161d7}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-with-concat-placeholder"},"inputs":{"parameters":{"text1":{"componentInputParameter":"text1"}}},"taskInfo":{"name":"container-with-concat-placeholder"}}' + - name: container + value: '{{workflow.parameters.implementations-b8fd25671b4c5e37ac7a8a2dd9fddc33d348d63dbc1c43627760eaa412e161d7}}' + - name: task-name + value: container-with-concat-placeholder + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: container-with-concat-placeholder-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.container-with-concat-placeholder-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.container-with-concat-placeholder-driver.outputs.parameters.cached-decision}}' + depends: container-with-concat-placeholder-driver.Succeeded + name: container-with-concat-placeholder + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - container-with-concat-placeholder + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/container_with_if_placeholder.yaml b/test_data/compiled-workflows/container_with_if_placeholder.yaml new file mode 100644 index 00000000000..0d0e064eeba --- /dev/null +++ b/test_data/compiled-workflows/container_with_if_placeholder.yaml @@ -0,0 +1,363 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: container-with-if-placeholder- +spec: + arguments: + parameters: + - name: components-9bb02daefc2bb7fc6a8bb6f5d1f2c8b7d7feafc2aba8e1117287f6a4a6dfb89f + value: '{"executorLabel":"exec-container-with-if-placeholder","inputDefinitions":{"parameters":{"optional_input":{"defaultValue":"default","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"output_path":{"parameterType":"STRING"}}}}' + - name: implementations-9bb02daefc2bb7fc6a8bb6f5d1f2c8b7d7feafc2aba8e1117287f6a4a6dfb89f + value: '{"args":["--output_path","{{$.outputs.parameters[''output_path''].output_file}}"],"command":["my_program","{\"IfPresent\": + {\"InputName\": \"optional_input\", \"Then\": [\"{{$.inputs.parameters[''optional_input'']}}\"], + \"Else\": [\"bye\"]}}","--dataset","{\"IfPresent\": {\"InputName\": \"optional_input\", + \"Then\": [\"{{$.outputs.artifacts[''dataset''].uri}}\"], \"Else\": [\"bye\"]}}"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"dataset":{"artifactSelectors":[{"outputArtifactKey":"dataset","producerSubtask":"container-with-if-placeholder"}]}},"parameters":{"output_path":{"valueFromParameter":{"outputParameterKey":"output_path","producerSubtask":"container-with-if-placeholder"}}}},"tasks":{"container-with-if-placeholder":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-with-if-placeholder"},"inputs":{"parameters":{"optional_input":{"componentInputParameter":"optional_input"}}},"taskInfo":{"name":"container-with-if-placeholder"}}}},"inputDefinitions":{"parameters":{"optional_input":{"defaultValue":"default","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"output_path":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - container-with-if-placeholder + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-9bb02daefc2bb7fc6a8bb6f5d1f2c8b7d7feafc2aba8e1117287f6a4a6dfb89f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-with-if-placeholder"},"inputs":{"parameters":{"optional_input":{"componentInputParameter":"optional_input"}}},"taskInfo":{"name":"container-with-if-placeholder"}}' + - name: container + value: '{{workflow.parameters.implementations-9bb02daefc2bb7fc6a8bb6f5d1f2c8b7d7feafc2aba8e1117287f6a4a6dfb89f}}' + - name: task-name + value: container-with-if-placeholder + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: container-with-if-placeholder-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.container-with-if-placeholder-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.container-with-if-placeholder-driver.outputs.parameters.cached-decision}}' + depends: container-with-if-placeholder-driver.Succeeded + name: container-with-if-placeholder + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - container-with-if-placeholder + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"optional_input":"default"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/container_with_placeholder_in_fstring.yaml b/test_data/compiled-workflows/container_with_placeholder_in_fstring.yaml new file mode 100644 index 00000000000..ccab738981c --- /dev/null +++ b/test_data/compiled-workflows/container_with_placeholder_in_fstring.yaml @@ -0,0 +1,360 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: container-with-placeholder-in-fstring- +spec: + arguments: + parameters: + - name: components-f76f16c424ed80e87d38bb55b27a483700ad3a6f1fe4c5deccb44812db875f5f + value: '{"executorLabel":"exec-container-with-placeholder-in-fstring","inputDefinitions":{"parameters":{"text1":{"defaultValue":"text!","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"output_artifact":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-f76f16c424ed80e87d38bb55b27a483700ad3a6f1fe4c5deccb44812db875f5f + value: '{"command":["my_program","prefix-{{$.inputs.parameters[''text1'']}}","{{$.outputs.artifacts[''output_artifact''].uri}}/0"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"output_artifact":{"artifactSelectors":[{"outputArtifactKey":"output_artifact","producerSubtask":"container-with-placeholder-in-fstring"}]}}},"tasks":{"container-with-placeholder-in-fstring":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-with-placeholder-in-fstring"},"inputs":{"parameters":{"text1":{"componentInputParameter":"text1"}}},"taskInfo":{"name":"container-with-placeholder-in-fstring"}}}},"inputDefinitions":{"parameters":{"text1":{"defaultValue":"text!","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"output_artifact":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - container-with-placeholder-in-fstring + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f76f16c424ed80e87d38bb55b27a483700ad3a6f1fe4c5deccb44812db875f5f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-container-with-placeholder-in-fstring"},"inputs":{"parameters":{"text1":{"componentInputParameter":"text1"}}},"taskInfo":{"name":"container-with-placeholder-in-fstring"}}' + - name: container + value: '{{workflow.parameters.implementations-f76f16c424ed80e87d38bb55b27a483700ad3a6f1fe4c5deccb44812db875f5f}}' + - name: task-name + value: container-with-placeholder-in-fstring + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: container-with-placeholder-in-fstring-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.container-with-placeholder-in-fstring-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.container-with-placeholder-in-fstring-driver.outputs.parameters.cached-decision}}' + depends: container-with-placeholder-in-fstring-driver.Succeeded + name: container-with-placeholder-in-fstring + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - container-with-placeholder-in-fstring + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"text1":"text!"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/containerized_python_component.yaml b/test_data/compiled-workflows/containerized_python_component.yaml new file mode 100644 index 00000000000..e81b10a998d --- /dev/null +++ b/test_data/compiled-workflows/containerized_python_component.yaml @@ -0,0 +1,360 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: concat-message- +spec: + arguments: + parameters: + - name: components-f5a84f596803cacaf4013e9a5ef593b3f9fbe93b7f0f58618112e8ab9541f693 + value: '{"executorLabel":"exec-concat-message","inputDefinitions":{"parameters":{"message1":{"parameterType":"STRING"},"message2":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-f5a84f596803cacaf4013e9a5ef593b3f9fbe93b7f0f58618112e8ab9541f693 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","concat_message"],"command":["python3","-m","kfp.dsl.executor_main"],"image":"kfp-image"}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"concat-message"}}}},"tasks":{"concat-message":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-concat-message"},"inputs":{"parameters":{"message1":{"componentInputParameter":"message1"},"message2":{"componentInputParameter":"message2"}}},"taskInfo":{"name":"concat-message"}}}},"inputDefinitions":{"parameters":{"message1":{"parameterType":"STRING"},"message2":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - concat-message + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f5a84f596803cacaf4013e9a5ef593b3f9fbe93b7f0f58618112e8ab9541f693}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-concat-message"},"inputs":{"parameters":{"message1":{"componentInputParameter":"message1"},"message2":{"componentInputParameter":"message2"}}},"taskInfo":{"name":"concat-message"}}' + - name: container + value: '{{workflow.parameters.implementations-f5a84f596803cacaf4013e9a5ef593b3f9fbe93b7f0f58618112e8ab9541f693}}' + - name: task-name + value: concat-message + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: concat-message-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.concat-message-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.concat-message-driver.outputs.parameters.cached-decision}}' + depends: concat-message-driver.Succeeded + name: concat-message + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - concat-message + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/create_pod_metadata_complex.yaml b/test_data/compiled-workflows/create_pod_metadata_complex.yaml new file mode 100644 index 00000000000..b13da8d058f --- /dev/null +++ b/test_data/compiled-workflows/create_pod_metadata_complex.yaml @@ -0,0 +1,721 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-pod-metadata- +spec: + arguments: + parameters: + - name: components-c78f6e9b1302a8641c86310a2b303eb6fe40bc7225ccb38e15cd89cd412a77e1 + value: '{"executorLabel":"exec-validate-no-pod-metadata","inputDefinitions":{"parameters":{"annotation_path":{"parameterType":"STRING"},"label_path":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"BOOLEAN"}}}}' + - name: implementations-c78f6e9b1302a8641c86310a2b303eb6fe40bc7225ccb38e15cd89cd412a77e1 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","validate_no_pod_metadata"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + validate_no_pod_metadata(annotation_path: str, label_path: str) -\u003e bool:\n import + os\n annotation = os.getenv(annotation_path)\n if annotation != \"\":\n raise + ValueError(\n f\"Pod annotation is {annotation} but is supposed + to be None.\"\n )\n label = os.getenv(label_path)\n if label + != \"\":\n raise ValueError(\n f\"Pod label is {label} but + is supposed to be None.\"\n )\n return True\n\n"],"image":"python:3.9"}' + - name: kubernetes-comp-validate-pod-metadata + value: '{"fieldPathAsEnv":[{"fieldPath":"metadata.annotations[''task-annotation'']","name":"POD_TASK_ANNOTATION"},{"fieldPath":"metadata.labels[''task-label-1'']","name":"POD_TASK_LABEL_1"},{"fieldPath":"metadata.labels[''task-label-2'']","name":"POD_TASK_LABEL_2"}],"podMetadata":{"annotations":{"task-annotation":"annotation"},"labels":{"task-label-1":"label-1","task-label-2":"label-2"}}}' + - name: components-41f6db675508ce85c4c81ffc8f4f63358027f85ec0bf31f36946cf1ede3c43cc + value: '{"executorLabel":"exec-validate-pod-metadata","inputDefinitions":{"parameters":{"annotation_exp_val_1":{"isOptional":true,"parameterType":"STRING"},"annotation_exp_val_2":{"isOptional":true,"parameterType":"STRING"},"annotation_path_1":{"isOptional":true,"parameterType":"STRING"},"annotation_path_2":{"isOptional":true,"parameterType":"STRING"},"label_exp_val_1":{"isOptional":true,"parameterType":"STRING"},"label_exp_val_2":{"isOptional":true,"parameterType":"STRING"},"label_path_1":{"isOptional":true,"parameterType":"STRING"},"label_path_2":{"isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"BOOLEAN"}}}}' + - name: implementations-41f6db675508ce85c4c81ffc8f4f63358027f85ec0bf31f36946cf1ede3c43cc + value: '{"args":["--executor_input","{{$}}","--function_to_execute","validate_pod_metadata"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + validate_pod_metadata(annotation_path_1: str = None,\n annotation_exp_val_1: + str = None,\n annotation_path_2: str = None,\n annotation_exp_val_2: + str = None,\n label_path_1: str = None,\n label_exp_val_1: + str = None,\n label_path_2: str = None,\n label_exp_val_2: + str = None) -\u003e bool:\n import os\n\n validated_metadata_count = + 0\n if annotation_path_1 is not None:\n annotation_val_1 = os.getenv(annotation_path_1)\n if + annotation_val_1 is not None and annotation_val_1 != annotation_exp_val_1:\n raise + ValueError(\n f\"Pod annotation is {annotation_val_1} but is + supposed to be {annotation_exp_val_1}.\"\n )\n validated_metadata_count+=1\n if + annotation_path_2 is not None:\n annotation_val_2 = os.getenv(annotation_path_2)\n if + annotation_val_2 is not None and annotation_val_2 != annotation_exp_val_2:\n raise + ValueError(\n f\"Pod annotation is {annotation_val_2} but is + supposed to be {annotation_exp_val_2}.\"\n )\n validated_metadata_count+=1\n if + label_path_1 is not None:\n label_val_1 = os.getenv(label_path_1)\n if + label_val_1 is not None and label_val_1 != label_exp_val_1:\n raise + ValueError(\n f\"Pod label is {label_val_1} but is supposed + to be {label_exp_val_1}.\"\n )\n validated_metadata_count+=1\n if + label_path_2 is not None:\n label_val_2 = os.getenv(label_path_2)\n if + label_val_2 is not None and label_val_2 != label_exp_val_2:\n raise + ValueError(\n f\"Pod label is {label_val_2} but is supposed + to be {label_exp_val_2}.\"\n )\n validated_metadata_count+=1\n if + validated_metadata_count \u003c1:\n raise RuntimeError(f\"No pod metadata + found to validate.\")\n return True\n\n"],"image":"python:3.9"}' + - name: kubernetes-comp-validate-pod-metadata-2 + value: '{"fieldPathAsEnv":[{"fieldPath":"metadata.annotations[''task-annotation-1'']","name":"POD_TASK_ANNOTATION_1"},{"fieldPath":"metadata.annotations[''task-annotation-2'']","name":"POD_TASK_ANNOTATION_2"}],"podMetadata":{"annotations":{"task-annotation-1":"annotation-1","task-annotation-2":"annotation-2"}}}' + - name: components-root + value: '{"dag":{"tasks":{"validate-no-pod-metadata":{"cachingOptions":{},"componentRef":{"name":"comp-validate-no-pod-metadata"},"inputs":{"parameters":{"annotation_path":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION"}},"label_path":{"runtimeValue":{"constant":"POD_TASK_LABEL"}}}},"taskInfo":{"name":"validate-no-pod-metadata"}},"validate-pod-metadata":{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata"},"inputs":{"parameters":{"annotation_exp_val_1":{"runtimeValue":{"constant":"annotation"}},"annotation_path_1":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION"}},"label_exp_val_1":{"runtimeValue":{"constant":"label-1"}},"label_exp_val_2":{"runtimeValue":{"constant":"label-2"}},"label_path_1":{"runtimeValue":{"constant":"POD_TASK_LABEL_1"}},"label_path_2":{"runtimeValue":{"constant":"POD_TASK_LABEL_2"}}}},"taskInfo":{"name":"validate-pod-metadata"}},"validate-pod-metadata-2":{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata-2"},"inputs":{"parameters":{"annotation_exp_val_1":{"runtimeValue":{"constant":"annotation-1"}},"annotation_exp_val_2":{"runtimeValue":{"constant":"annotation-2"}},"annotation_path_1":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_1"}},"annotation_path_2":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_2"}}}},"taskInfo":{"name":"validate-pod-metadata-2"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-pod-metadata + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + - name: pod-metadata-annotation-key + value: '{{inputs.parameters.pod-metadata-annotation-key}}' + - name: pod-metadata-annotation-val + value: '{{inputs.parameters.pod-metadata-annotation-val}}' + - name: pod-metadata-label-key-1 + value: '{{inputs.parameters.pod-metadata-label-key-1}}' + - name: pod-metadata-label-val-1 + value: '{{inputs.parameters.pod-metadata-label-val-1}}' + - name: pod-metadata-label-key-2 + value: '{{inputs.parameters.pod-metadata-label-key-2}}' + - name: pod-metadata-label-val-2 + value: '{{inputs.parameters.pod-metadata-label-val-2}}' + name: executor + template: metadata-1-2-system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + - name: pod-metadata-annotation-key + - name: pod-metadata-annotation-val + - name: pod-metadata-label-key-1 + - name: pod-metadata-label-val-1 + - name: pod-metadata-label-key-2 + - name: pod-metadata-label-val-2 + metadata: {} + name: metadata-1-2-system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + - name: pod-metadata-annotation-key + - name: pod-metadata-annotation-val + - name: pod-metadata-label-key-1 + - name: pod-metadata-label-val-1 + - name: pod-metadata-label-key-2 + - name: pod-metadata-label-val-2 + metadata: + annotations: + '{{inputs.parameters.pod-metadata-annotation-key}}': '{{inputs.parameters.pod-metadata-annotation-val}}' + labels: + '{{inputs.parameters.pod-metadata-label-key-1}}': '{{inputs.parameters.pod-metadata-label-val-1}}' + '{{inputs.parameters.pod-metadata-label-key-2}}': '{{inputs.parameters.pod-metadata-label-val-2}}' + name: metadata-1-2-system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + - name: pod-metadata-annotation-key-1 + value: '{{inputs.parameters.pod-metadata-annotation-key-1}}' + - name: pod-metadata-annotation-val-1 + value: '{{inputs.parameters.pod-metadata-annotation-val-1}}' + - name: pod-metadata-annotation-key-2 + value: '{{inputs.parameters.pod-metadata-annotation-key-2}}' + - name: pod-metadata-annotation-val-2 + value: '{{inputs.parameters.pod-metadata-annotation-val-2}}' + name: executor + template: metadata-2-0-system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + - name: pod-metadata-annotation-key-1 + - name: pod-metadata-annotation-val-1 + - name: pod-metadata-annotation-key-2 + - name: pod-metadata-annotation-val-2 + metadata: {} + name: metadata-2-0-system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + - name: pod-metadata-annotation-key-1 + - name: pod-metadata-annotation-val-1 + - name: pod-metadata-annotation-key-2 + - name: pod-metadata-annotation-val-2 + metadata: + annotations: + '{{inputs.parameters.pod-metadata-annotation-key-1}}': '{{inputs.parameters.pod-metadata-annotation-val-1}}' + '{{inputs.parameters.pod-metadata-annotation-key-2}}': '{{inputs.parameters.pod-metadata-annotation-val-2}}' + name: metadata-2-0-system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c78f6e9b1302a8641c86310a2b303eb6fe40bc7225ccb38e15cd89cd412a77e1}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-validate-no-pod-metadata"},"inputs":{"parameters":{"annotation_path":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION"}},"label_path":{"runtimeValue":{"constant":"POD_TASK_LABEL"}}}},"taskInfo":{"name":"validate-no-pod-metadata"}}' + - name: container + value: '{{workflow.parameters.implementations-c78f6e9b1302a8641c86310a2b303eb6fe40bc7225ccb38e15cd89cd412a77e1}}' + - name: task-name + value: validate-no-pod-metadata + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: validate-no-pod-metadata-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.validate-no-pod-metadata-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.validate-no-pod-metadata-driver.outputs.parameters.cached-decision}}' + depends: validate-no-pod-metadata-driver.Succeeded + name: validate-no-pod-metadata + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-41f6db675508ce85c4c81ffc8f4f63358027f85ec0bf31f36946cf1ede3c43cc}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata"},"inputs":{"parameters":{"annotation_exp_val_1":{"runtimeValue":{"constant":"annotation"}},"annotation_path_1":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION"}},"label_exp_val_1":{"runtimeValue":{"constant":"label-1"}},"label_exp_val_2":{"runtimeValue":{"constant":"label-2"}},"label_path_1":{"runtimeValue":{"constant":"POD_TASK_LABEL_1"}},"label_path_2":{"runtimeValue":{"constant":"POD_TASK_LABEL_2"}}}},"taskInfo":{"name":"validate-pod-metadata"}}' + - name: container + value: '{{workflow.parameters.implementations-41f6db675508ce85c4c81ffc8f4f63358027f85ec0bf31f36946cf1ede3c43cc}}' + - name: task-name + value: validate-pod-metadata + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata}}' + name: validate-pod-metadata-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.validate-pod-metadata-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.validate-pod-metadata-driver.outputs.parameters.cached-decision}}' + - name: pod-metadata-annotation-key + value: task-annotation + - name: pod-metadata-annotation-val + value: annotation + - name: pod-metadata-label-key-1 + value: task-label-1 + - name: pod-metadata-label-val-1 + value: label-1 + - name: pod-metadata-label-key-2 + value: task-label-2 + - name: pod-metadata-label-val-2 + value: label-2 + depends: validate-pod-metadata-driver.Succeeded + name: validate-pod-metadata + template: metadata-1-2-system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-41f6db675508ce85c4c81ffc8f4f63358027f85ec0bf31f36946cf1ede3c43cc}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata-2"},"inputs":{"parameters":{"annotation_exp_val_1":{"runtimeValue":{"constant":"annotation-1"}},"annotation_exp_val_2":{"runtimeValue":{"constant":"annotation-2"}},"annotation_path_1":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_1"}},"annotation_path_2":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_2"}}}},"taskInfo":{"name":"validate-pod-metadata-2"}}' + - name: container + value: '{{workflow.parameters.implementations-41f6db675508ce85c4c81ffc8f4f63358027f85ec0bf31f36946cf1ede3c43cc}}' + - name: task-name + value: validate-pod-metadata-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata-2}}' + name: validate-pod-metadata-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.validate-pod-metadata-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.validate-pod-metadata-2-driver.outputs.parameters.cached-decision}}' + - name: pod-metadata-annotation-key-1 + value: task-annotation-1 + - name: pod-metadata-annotation-val-1 + value: annotation-1 + - name: pod-metadata-annotation-key-2 + value: task-annotation-2 + - name: pod-metadata-annotation-val-2 + value: annotation-2 + depends: validate-pod-metadata-2-driver.Succeeded + name: validate-pod-metadata-2 + template: metadata-2-0-system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-pod-metadata + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/cross_loop_after_topology.yaml b/test_data/compiled-workflows/cross_loop_after_topology.yaml new file mode 100644 index 00000000000..d4e4b3aa430 --- /dev/null +++ b/test_data/compiled-workflows/cross_loop_after_topology.yaml @@ -0,0 +1,1087 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: my-pipeline- +spec: + arguments: + parameters: + - name: components-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac + value: '{"executorLabel":"exec-print-op-5","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(message: str):\n print(message)\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-10 + value: '{"dag":{"tasks":{"print-op-5":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-5"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"five"}}}},"taskInfo":{"name":"print-op-5"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-9":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-for-loop-14 + value: '{"dag":{"tasks":{"print-op-7":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-7"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"seven"}}}},"taskInfo":{"name":"print-op-7"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-13":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-for-loop-12 + value: '{"dag":{"tasks":{"for-loop-14":{"componentRef":{"name":"comp-for-loop-14"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-13","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-14"}},"print-op-8":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-8"},"dependentTasks":["for-loop-14"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"eight"}}}},"taskInfo":{"name":"print-op-8"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-11":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-for-loop-2 + value: '{"dag":{"tasks":{"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"one"}}}},"taskInfo":{"name":"print-op"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-for-loop-4 + value: '{"dag":{"tasks":{"print-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"two"}}}},"taskInfo":{"name":"print-op-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-3":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-for-loop-8 + value: '{"dag":{"tasks":{"print-op-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-4"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"four"}}}},"taskInfo":{"name":"print-op-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-7":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-for-loop-6 + value: '{"dag":{"tasks":{"for-loop-8":{"componentRef":{"name":"comp-for-loop-8"},"dependentTasks":["print-op-3"],"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-7","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-8"}},"print-op-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"three"}}}},"taskInfo":{"name":"print-op-3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-5":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"for-loop-10":{"componentRef":{"name":"comp-for-loop-10"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-9","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-10"}},"for-loop-12":{"componentRef":{"name":"comp-for-loop-12"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-11","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-12"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-2"}},"for-loop-4":{"componentRef":{"name":"comp-for-loop-4"},"dependentTasks":["for-loop-2"],"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-4"}},"for-loop-6":{"componentRef":{"name":"comp-for-loop-6"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-6"}},"print-op-6":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-6"},"dependentTasks":["for-loop-10"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"six"}}}},"taskInfo":{"name":"print-op-6"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - my-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-5"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"five"}}}},"taskInfo":{"name":"print-op-5"}}' + - name: container + value: '{{workflow.parameters.implementations-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task-name + value: print-op-5 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-5-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-5-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-5-driver.outputs.parameters.cached-decision}}' + depends: print-op-5-driver.Succeeded + name: print-op-5 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-10 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-7"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"seven"}}}},"taskInfo":{"name":"print-op-7"}}' + - name: container + value: '{{workflow.parameters.implementations-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task-name + value: print-op-7 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-7-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-7-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-7-driver.outputs.parameters.cached-decision}}' + depends: print-op-7-driver.Succeeded + name: print-op-7 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-14 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - my-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-14}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-14"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-13","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-14"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-14 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-14-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-14}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-14"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-13","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-14"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-14-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-14-for-loop-14-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-14 + template: comp-for-loop-14-for-loop-14-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-8"},"dependentTasks":["for-loop-14"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"eight"}}}},"taskInfo":{"name":"print-op-8"}}' + - name: container + value: '{{workflow.parameters.implementations-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task-name + value: print-op-8 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-14.Succeeded + name: print-op-8-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-8-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-8-driver.outputs.parameters.cached-decision}}' + depends: print-op-8-driver.Succeeded + name: print-op-8 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-12 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"one"}}}},"taskInfo":{"name":"print-op"}}' + - name: container + value: '{{workflow.parameters.implementations-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"two"}}}},"taskInfo":{"name":"print-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task-name + value: print-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' + depends: print-op-2-driver.Succeeded + name: print-op-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-4"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"four"}}}},"taskInfo":{"name":"print-op-4"}}' + - name: container + value: '{{workflow.parameters.implementations-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task-name + value: print-op-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-4-driver.outputs.parameters.cached-decision}}' + depends: print-op-4-driver.Succeeded + name: print-op-4 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-8 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-8}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-8"},"dependentTasks":["print-op-3"],"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-7","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-8"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-8 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-8-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-8}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-8"},"dependentTasks":["print-op-3"],"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-7","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-8"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-8-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-8-for-loop-8-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: print-op-3.Succeeded + name: for-loop-8 + template: comp-for-loop-8-for-loop-8-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"three"}}}},"taskInfo":{"name":"print-op-3"}}' + - name: container + value: '{{workflow.parameters.implementations-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task-name + value: print-op-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-3-driver.outputs.parameters.cached-decision}}' + depends: print-op-3-driver.Succeeded + name: print-op-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-6 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-10}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-10"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-9","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-10"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-10 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-10-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-10}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-10"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-9","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-10"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-10-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-10-for-loop-10-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-12}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-12"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-11","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-12"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-12 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-12-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-12}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-12"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-11","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-12"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-12-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-12-for-loop-12-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"dependentTasks":["for-loop-2"],"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-4 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-4-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"dependentTasks":["for-loop-2"],"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-4-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4-for-loop-4-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-6}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-6"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-6"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-6 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-6-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-6}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-6"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-6"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-6-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-6-for-loop-6-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-10 + template: comp-for-loop-10-for-loop-10-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-12 + template: comp-for-loop-12-for-loop-12-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-6 + template: comp-for-loop-6-for-loop-6-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-6"},"dependentTasks":["for-loop-10"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"six"}}}},"taskInfo":{"name":"print-op-6"}}' + - name: container + value: '{{workflow.parameters.implementations-e12164b597ed5229b5b77b574180112348cfc79cceecf6121b99dfb6b742f5ac}}' + - name: task-name + value: print-op-6 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-10.Succeeded + name: print-op-6-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-6-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-6-driver.outputs.parameters.cached-decision}}' + depends: print-op-6-driver.Succeeded + name: print-op-6 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/dict_input.yaml b/test_data/compiled-workflows/dict_input.yaml new file mode 100644 index 00000000000..131d003f185 --- /dev/null +++ b/test_data/compiled-workflows/dict_input.yaml @@ -0,0 +1,369 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: dict-input- +spec: + arguments: + parameters: + - name: components-e11c23af71b1b107d01e28b0c31e9994727d3eff7eef1dbb87fdd8868ff80e84 + value: '{"executorLabel":"exec-dict-input","inputDefinitions":{"parameters":{"struct":{"parameterType":"STRUCT"}}}}' + - name: implementations-e11c23af71b1b107d01e28b0c31e9994727d3eff7eef1dbb87fdd8868ff80e84 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","dict_input"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + dict_input(struct: Dict):\n print(struct)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"dict-input":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-dict-input"},"inputs":{"parameters":{"struct":{"componentInputParameter":"struct"}}},"taskInfo":{"name":"dict-input"}}}},"inputDefinitions":{"parameters":{"struct":{"parameterType":"STRUCT"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - dict-input + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e11c23af71b1b107d01e28b0c31e9994727d3eff7eef1dbb87fdd8868ff80e84}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-dict-input"},"inputs":{"parameters":{"struct":{"componentInputParameter":"struct"}}},"taskInfo":{"name":"dict-input"}}' + - name: container + value: '{{workflow.parameters.implementations-e11c23af71b1b107d01e28b0c31e9994727d3eff7eef1dbb87fdd8868ff80e84}}' + - name: task-name + value: dict-input + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: dict-input-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.dict-input-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.dict-input-driver.outputs.parameters.cached-decision}}' + depends: dict-input-driver.Succeeded + name: dict-input + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - dict-input + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/embedded_artifact.yaml b/test_data/compiled-workflows/embedded_artifact.yaml new file mode 100644 index 00000000000..58a906007ad --- /dev/null +++ b/test_data/compiled-workflows/embedded_artifact.yaml @@ -0,0 +1,434 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: nb-simple- +spec: + arguments: + parameters: + - name: components-228cf9424bf53a0e969ad4299e33fa7d2c0cebe754627c59bd00c710b00f88fa + value: '{"executorLabel":"exec-read-embedded-artifact-dir"}' + - name: implementations-228cf9424bf53a0e969ad4299e33fa7d2c0cebe754627c59bd00c710b00f88fa + value: '{"args":["--executor_input","{{$}}","--function_to_execute","read_embedded_artifact_dir"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\n__KFP_EMBEDDED_ARCHIVE_B64 + = ''H4sIAD/g42gC/+3UzYrCMBQF4Kx9irgf0ntjbq4uBJcufYVCSylEC7ViH38yuphBKMMsKuicb5NfyCKc4wpX7A7luK/Lqu7NLOhuaiRS/z3/2mfyHIwdzRNczkPZ5+fN/8RiL221DZ43ca2LvGx+LP3aHof2WG9ZZaMxSFg5Yi8UdWHgDbhi/jduEReZzn+es5CSxnxRc/45N4Cx8sz8n87tqUnl5L3fzl/2/9H/6P+/9v9KPfr/TfKfusYN4zBz/8cQpvPP8tD/PngyltD/s9vXKXUf9tr1qVoiDgAAAAAAAAAAAAAAAAAv7ROs57gWACgAAA==''\n\nimport + base64 as __kfp_b64\nimport io as __kfp_io\nimport os as __kfp_os\nimport + sys as __kfp_sys\nimport tarfile as __kfp_tarfile\nimport tempfile as __kfp_tempfile\n\n# + Extract embedded archive at import time to ensure sys.path and globals are + set\n__kfp_tmpdir = __kfp_tempfile.TemporaryDirectory()\n__KFP_EMBEDDED_ASSET_DIR + = __kfp_tmpdir.name\ntry:\n __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode(''ascii''))\n with + __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode=''r:gz'') as + __kfp_tar:\n __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR)\nexcept + Exception as __kfp_e:\n raise RuntimeError(f''Failed to extract embedded + archive: {__kfp_e}'')\n\n# Always prepend the extracted directory to sys.path + for import resolution\nif __KFP_EMBEDDED_ASSET_DIR not in __kfp_sys.path:\n __kfp_sys.path.insert(0, + __KFP_EMBEDDED_ASSET_DIR)\n\n\n\n\ndef read_embedded_artifact_dir(artifact: + dsl.EmbeddedInput[dsl.Dataset]):\n import os\n\n with open(os.path.join(artifact.path, + \"log.txt\"), \"r\", encoding=\"utf-8\") as f:\n log = f.read()\n\n assert + log == \"Hello, world!\"\n\n"],"image":"python:3.9"}' + - name: components-302f28ee42e44910a1eb45593566a12b430150b643fb02ded3ae7838c0cea331 + value: '{"executorLabel":"exec-read-embedded-artifact-file"}' + - name: implementations-302f28ee42e44910a1eb45593566a12b430150b643fb02ded3ae7838c0cea331 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","read_embedded_artifact_file"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\n__KFP_EMBEDDED_ARCHIVE_B64 + = ''H4sIAD/g42gC/+3UvQrCMBSG4cxeRdylJml+dCg4OnoLhZZSiBZqxV6+KQ66FCcF9X2W5CSBs5wv2Tpb7w7luK/Lqu7FW6i7uVWpYB776Vwro62Qo/iAy3ko+9Re/Cft5KWtCmv01m/CIpXNU2k28ji0x7rQwW2Dt87mmdLG5cEsBH5A7JpsGIe39phC7a2dz792aQ5VUMGnvyCk/BvjcyHVJ/N/OrenJpaz717df6l9HWO3kteuj9WSOAAAAAAAAAAAAAAAAHydG2KO630AKAAA''\n\nimport + base64 as __kfp_b64\nimport io as __kfp_io\nimport os as __kfp_os\nimport + sys as __kfp_sys\nimport tarfile as __kfp_tarfile\nimport tempfile as __kfp_tempfile\n\n# + Extract embedded archive at import time to ensure sys.path and globals are + set\n__kfp_tmpdir = __kfp_tempfile.TemporaryDirectory()\n__KFP_EMBEDDED_ASSET_DIR + = __kfp_tmpdir.name\ntry:\n __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode(''ascii''))\n with + __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode=''r:gz'') as + __kfp_tar:\n __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR)\nexcept + Exception as __kfp_e:\n raise RuntimeError(f''Failed to extract embedded + archive: {__kfp_e}'')\n\n# Always prepend the extracted directory to sys.path + for import resolution\nif __KFP_EMBEDDED_ASSET_DIR not in __kfp_sys.path:\n __kfp_sys.path.insert(0, + __KFP_EMBEDDED_ASSET_DIR)\n\n__KFP_EMBEDDED_ASSET_FILE = __kfp_os.path.join(__KFP_EMBEDDED_ASSET_DIR, + ''log.txt'')\n\n\n\n\ndef read_embedded_artifact_file(artifact: dsl.EmbeddedInput[dsl.Dataset]):\n with + open(artifact.path, \"r\", encoding=\"utf-8\") as f:\n log = f.read()\n\n assert + log == \"Hello, world!\"\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"read-embedded-artifact-dir":{"cachingOptions":{},"componentRef":{"name":"comp-read-embedded-artifact-dir"},"taskInfo":{"name":"read-embedded-artifact-dir"}},"read-embedded-artifact-file":{"cachingOptions":{},"componentRef":{"name":"comp-read-embedded-artifact-file"},"taskInfo":{"name":"read-embedded-artifact-file"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - nb-simple + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-228cf9424bf53a0e969ad4299e33fa7d2c0cebe754627c59bd00c710b00f88fa}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-read-embedded-artifact-dir"},"taskInfo":{"name":"read-embedded-artifact-dir"}}' + - name: container + value: '{{workflow.parameters.implementations-228cf9424bf53a0e969ad4299e33fa7d2c0cebe754627c59bd00c710b00f88fa}}' + - name: task-name + value: read-embedded-artifact-dir + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: read-embedded-artifact-dir-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.read-embedded-artifact-dir-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.read-embedded-artifact-dir-driver.outputs.parameters.cached-decision}}' + depends: read-embedded-artifact-dir-driver.Succeeded + name: read-embedded-artifact-dir + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-302f28ee42e44910a1eb45593566a12b430150b643fb02ded3ae7838c0cea331}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-read-embedded-artifact-file"},"taskInfo":{"name":"read-embedded-artifact-file"}}' + - name: container + value: '{{workflow.parameters.implementations-302f28ee42e44910a1eb45593566a12b430150b643fb02ded3ae7838c0cea331}}' + - name: task-name + value: read-embedded-artifact-file + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: read-embedded-artifact-file-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.read-embedded-artifact-file-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.read-embedded-artifact-file-driver.outputs.parameters.cached-decision}}' + depends: read-embedded-artifact-file-driver.Succeeded + name: read-embedded-artifact-file + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - nb-simple + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/env-var.yaml b/test_data/compiled-workflows/env-var.yaml new file mode 100644 index 00000000000..fa9cbaf186e --- /dev/null +++ b/test_data/compiled-workflows/env-var.yaml @@ -0,0 +1,371 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: test-env-exists- +spec: + arguments: + parameters: + - name: components-5993ccc78137b54f60188169af3d4eed06dd024defc1904818d928e904ae14e6 + value: '{"executorLabel":"exec-comp","inputDefinitions":{"parameters":{"env_var":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-5993ccc78137b54f60188169af3d4eed06dd024defc1904818d928e904ae14e6 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.12.1'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + comp(env_var: str) -\u003e str:\n import os\n\n value = os.getenv(env_var, + \"\")\n\n if value == \"\":\n raise Exception(\"Env var is not set\")\n\n return + value\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"comp"}}}},"tasks":{"comp":{"cachingOptions":{},"componentRef":{"name":"comp-comp"},"inputs":{"parameters":{"env_var":{"componentInputParameter":"env_var"}}},"taskInfo":{"name":"comp"}}}},"inputDefinitions":{"parameters":{"env_var":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - test-env-exists + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5993ccc78137b54f60188169af3d4eed06dd024defc1904818d928e904ae14e6}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-comp"},"inputs":{"parameters":{"env_var":{"componentInputParameter":"env_var"}}},"taskInfo":{"name":"comp"}}' + - name: container + value: '{{workflow.parameters.implementations-5993ccc78137b54f60188169af3d4eed06dd024defc1904818d928e904ae14e6}}' + - name: task-name + value: comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.comp-driver.outputs.parameters.cached-decision}}' + depends: comp-driver.Succeeded + name: comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - test-env-exists + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/fail_v2.yaml b/test_data/compiled-workflows/fail_v2.yaml new file mode 100644 index 00000000000..bc5f1ad5a4e --- /dev/null +++ b/test_data/compiled-workflows/fail_v2.yaml @@ -0,0 +1,369 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: fail-pipeline- +spec: + arguments: + parameters: + - name: components-b568fbdaaed9b6c215b65a277dc1107af4099ba40aa61cffdbae497666a77ba4 + value: '{"executorLabel":"exec-fail"}' + - name: implementations-b568fbdaaed9b6c215b65a277dc1107af4099ba40aa61cffdbae497666a77ba4 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","fail"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + fail():\n ''''''Fails''''''\n import sys\n sys.exit(1)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"fail":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail"},"taskInfo":{"name":"fail"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - fail-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-b568fbdaaed9b6c215b65a277dc1107af4099ba40aa61cffdbae497666a77ba4}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail"},"taskInfo":{"name":"fail"}}' + - name: container + value: '{{workflow.parameters.implementations-b568fbdaaed9b6c215b65a277dc1107af4099ba40aa61cffdbae497666a77ba4}}' + - name: task-name + value: fail + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: fail-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.fail-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.fail-driver.outputs.parameters.cached-decision}}' + depends: fail-driver.Succeeded + name: fail + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - fail-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/flip_coin.yaml b/test_data/compiled-workflows/flip_coin.yaml new file mode 100644 index 00000000000..13aa7478103 --- /dev/null +++ b/test_data/compiled-workflows/flip_coin.yaml @@ -0,0 +1,770 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: conditional-execution-pipeline- +spec: + arguments: + parameters: + - name: components-66f7b70943b22b0e93ab97e737d33d87c19e743933c63d7573ec6f55617cb766 + value: '{"executorLabel":"exec-print-msg","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: implementations-66f7b70943b22b0e93ab97e737d33d87c19e743933c63d7573ec6f55617cb766 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_msg"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_msg(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-2 + value: '{"dag":{"tasks":{"print-msg":{"cachingOptions":{},"componentRef":{"name":"comp-print-msg"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"heads + and {{$.inputs.parameters[''pipelinechannel--random-num-Output'']}} \u003e + 5!"}},"pipelinechannel--random-num-Output":{"componentInputParameter":"pipelinechannel--random-num-Output"}}},"taskInfo":{"name":"print-msg"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"},"pipelinechannel--random-num-Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-condition-3 + value: '{"dag":{"tasks":{"print-msg-2":{"cachingOptions":{},"componentRef":{"name":"comp-print-msg-2"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"heads + and {{$.inputs.parameters[''pipelinechannel--random-num-Output'']}} \u003c= + 5!"}},"pipelinechannel--random-num-Output":{"componentInputParameter":"pipelinechannel--random-num-Output"}}},"taskInfo":{"name":"print-msg-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"},"pipelinechannel--random-num-Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-e6fdb657c103ed2cbd67c3611182d3961f70724e37166e8b94304cf95b0c736b + value: '{"executorLabel":"exec-random-num","inputDefinitions":{"parameters":{"high":{"parameterType":"NUMBER_INTEGER"},"low":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-e6fdb657c103ed2cbd67c3611182d3961f70724e37166e8b94304cf95b0c736b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","random_num"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + random_num(low: int, high: int) -\u003e int:\n \"\"\"Generate a random + number between low and high.\"\"\"\n import random # noqa: PLC0415\n\n result + = random.randint(low, high)\n print(result)\n return result\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-1 + value: '{"dag":{"tasks":{"condition-2":{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["random-num"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num"}}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-Output'']) + \u003e 5"}},"condition-3":{"componentRef":{"name":"comp-condition-3"},"dependentTasks":["random-num"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num"}}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-Output'']) + \u003c= 5"}},"random-num":{"cachingOptions":{},"componentRef":{"name":"comp-random-num"},"inputs":{"parameters":{"high":{"runtimeValue":{"constant":9}},"low":{"runtimeValue":{"constant":0}}}},"taskInfo":{"name":"random-num"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-5 + value: '{"dag":{"tasks":{"print-msg-3":{"cachingOptions":{},"componentRef":{"name":"comp-print-msg-3"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"tails + and {{$.inputs.parameters[''pipelinechannel--random-num-2-Output'']}} \u003e + 15!"}},"pipelinechannel--random-num-2-Output":{"componentInputParameter":"pipelinechannel--random-num-2-Output"}}},"taskInfo":{"name":"print-msg-3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"},"pipelinechannel--random-num-2-Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-condition-6 + value: '{"dag":{"tasks":{"print-msg-4":{"cachingOptions":{},"componentRef":{"name":"comp-print-msg-4"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"tails + and {{$.inputs.parameters[''pipelinechannel--random-num-2-Output'']}} \u003c= + 15!"}},"pipelinechannel--random-num-2-Output":{"componentInputParameter":"pipelinechannel--random-num-2-Output"}}},"taskInfo":{"name":"print-msg-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"},"pipelinechannel--random-num-2-Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-condition-4 + value: '{"dag":{"tasks":{"condition-5":{"componentRef":{"name":"comp-condition-5"},"dependentTasks":["random-num-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num-2"}}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-2-Output'']) + \u003e 15"}},"condition-6":{"componentRef":{"name":"comp-condition-6"},"dependentTasks":["random-num-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num-2"}}}},"taskInfo":{"name":"condition-6"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-2-Output'']) + \u003c= 15"}},"random-num-2":{"cachingOptions":{},"componentRef":{"name":"comp-random-num-2"},"inputs":{"parameters":{"high":{"runtimeValue":{"constant":19}},"low":{"runtimeValue":{"constant":10}}}},"taskInfo":{"name":"random-num-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}}}' + - name: components-8febb76ff31d4bcdea4ae0c2f886b818c1d766cfe9bec311de0f57970d0ccb9b + value: '{"executorLabel":"exec-flip-coin","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-8febb76ff31d4bcdea4ae0c2f886b818c1d766cfe9bec311de0f57970d0ccb9b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","flip_coin"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + flip_coin() -\u003e str:\n \"\"\"Flip a coin and output heads or tails + randomly.\"\"\"\n import random # noqa: PLC0415\n\n result = \"heads\" + if random.randint(0, 1) == 0 else \"tails\"\n print(result)\n return + result\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"condition-1":{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads''"}},"condition-4":{"componentRef":{"name":"comp-condition-4"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''tails''"}},"flip-coin":{"cachingOptions":{},"componentRef":{"name":"comp-flip-coin"},"taskInfo":{"name":"flip-coin"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - conditional-execution-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-66f7b70943b22b0e93ab97e737d33d87c19e743933c63d7573ec6f55617cb766}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-print-msg"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"heads + and {{$.inputs.parameters[''pipelinechannel--random-num-Output'']}} + \u003e 5!"}},"pipelinechannel--random-num-Output":{"componentInputParameter":"pipelinechannel--random-num-Output"}}},"taskInfo":{"name":"print-msg"}}' + - name: container + value: '{{workflow.parameters.implementations-66f7b70943b22b0e93ab97e737d33d87c19e743933c63d7573ec6f55617cb766}}' + - name: task-name + value: print-msg + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-msg-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-msg-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-msg-driver.outputs.parameters.cached-decision}}' + depends: print-msg-driver.Succeeded + name: print-msg + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-66f7b70943b22b0e93ab97e737d33d87c19e743933c63d7573ec6f55617cb766}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-print-msg-2"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"heads + and {{$.inputs.parameters[''pipelinechannel--random-num-Output'']}} + \u003c= 5!"}},"pipelinechannel--random-num-Output":{"componentInputParameter":"pipelinechannel--random-num-Output"}}},"taskInfo":{"name":"print-msg-2"}}' + - name: container + value: '{{workflow.parameters.implementations-66f7b70943b22b0e93ab97e737d33d87c19e743933c63d7573ec6f55617cb766}}' + - name: task-name + value: print-msg-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-msg-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-msg-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-msg-2-driver.outputs.parameters.cached-decision}}' + depends: print-msg-2-driver.Succeeded + name: print-msg-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-3 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - conditional-execution-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["random-num"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num"}}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-Output'']) + \u003e 5"}}' + - name: task-name + value: condition-2 + depends: random-num.Succeeded + name: condition-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' + depends: condition-2-driver.Succeeded + name: condition-2 + template: comp-condition-2 + when: '{{tasks.condition-2-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-3}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-3"},"dependentTasks":["random-num"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num"}}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-Output'']) + \u003c= 5"}}' + - name: task-name + value: condition-3 + depends: random-num.Succeeded + name: condition-3-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + depends: condition-3-driver.Succeeded + name: condition-3 + template: comp-condition-3 + when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e6fdb657c103ed2cbd67c3611182d3961f70724e37166e8b94304cf95b0c736b}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-random-num"},"inputs":{"parameters":{"high":{"runtimeValue":{"constant":9}},"low":{"runtimeValue":{"constant":0}}}},"taskInfo":{"name":"random-num"}}' + - name: container + value: '{{workflow.parameters.implementations-e6fdb657c103ed2cbd67c3611182d3961f70724e37166e8b94304cf95b0c736b}}' + - name: task-name + value: random-num + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: random-num-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.random-num-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.random-num-driver.outputs.parameters.cached-decision}}' + depends: random-num-driver.Succeeded + name: random-num + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-66f7b70943b22b0e93ab97e737d33d87c19e743933c63d7573ec6f55617cb766}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-print-msg-3"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"tails + and {{$.inputs.parameters[''pipelinechannel--random-num-2-Output'']}} + \u003e 15!"}},"pipelinechannel--random-num-2-Output":{"componentInputParameter":"pipelinechannel--random-num-2-Output"}}},"taskInfo":{"name":"print-msg-3"}}' + - name: container + value: '{{workflow.parameters.implementations-66f7b70943b22b0e93ab97e737d33d87c19e743933c63d7573ec6f55617cb766}}' + - name: task-name + value: print-msg-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-msg-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-msg-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-msg-3-driver.outputs.parameters.cached-decision}}' + depends: print-msg-3-driver.Succeeded + name: print-msg-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-5 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-66f7b70943b22b0e93ab97e737d33d87c19e743933c63d7573ec6f55617cb766}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-print-msg-4"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"tails + and {{$.inputs.parameters[''pipelinechannel--random-num-2-Output'']}} + \u003c= 15!"}},"pipelinechannel--random-num-2-Output":{"componentInputParameter":"pipelinechannel--random-num-2-Output"}}},"taskInfo":{"name":"print-msg-4"}}' + - name: container + value: '{{workflow.parameters.implementations-66f7b70943b22b0e93ab97e737d33d87c19e743933c63d7573ec6f55617cb766}}' + - name: task-name + value: print-msg-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-msg-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-msg-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-msg-4-driver.outputs.parameters.cached-decision}}' + depends: print-msg-4-driver.Succeeded + name: print-msg-4 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-6 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-5}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-5"},"dependentTasks":["random-num-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num-2"}}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-2-Output'']) + \u003e 15"}}' + - name: task-name + value: condition-5 + depends: random-num-2.Succeeded + name: condition-5-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-5-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-5-driver.outputs.parameters.condition}}' + depends: condition-5-driver.Succeeded + name: condition-5 + template: comp-condition-5 + when: '{{tasks.condition-5-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-6}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-6"},"dependentTasks":["random-num-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num-2"}}}},"taskInfo":{"name":"condition-6"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-2-Output'']) + \u003c= 15"}}' + - name: task-name + value: condition-6 + depends: random-num-2.Succeeded + name: condition-6-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-6-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-6-driver.outputs.parameters.condition}}' + depends: condition-6-driver.Succeeded + name: condition-6 + template: comp-condition-6 + when: '{{tasks.condition-6-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e6fdb657c103ed2cbd67c3611182d3961f70724e37166e8b94304cf95b0c736b}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-random-num-2"},"inputs":{"parameters":{"high":{"runtimeValue":{"constant":19}},"low":{"runtimeValue":{"constant":10}}}},"taskInfo":{"name":"random-num-2"}}' + - name: container + value: '{{workflow.parameters.implementations-e6fdb657c103ed2cbd67c3611182d3961f70724e37166e8b94304cf95b0c736b}}' + - name: task-name + value: random-num-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: random-num-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.random-num-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.random-num-2-driver.outputs.parameters.cached-decision}}' + depends: random-num-2-driver.Succeeded + name: random-num-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-4 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads''"}}' + - name: task-name + value: condition-1 + depends: flip-coin.Succeeded + name: condition-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' + depends: condition-1-driver.Succeeded + name: condition-1 + template: comp-condition-1 + when: '{{tasks.condition-1-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-4"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''tails''"}}' + - name: task-name + value: condition-4 + depends: flip-coin.Succeeded + name: condition-4-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-4-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-4-driver.outputs.parameters.condition}}' + depends: condition-4-driver.Succeeded + name: condition-4 + template: comp-condition-4 + when: '{{tasks.condition-4-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-8febb76ff31d4bcdea4ae0c2f886b818c1d766cfe9bec311de0f57970d0ccb9b}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-flip-coin"},"taskInfo":{"name":"flip-coin"}}' + - name: container + value: '{{workflow.parameters.implementations-8febb76ff31d4bcdea4ae0c2f886b818c1d766cfe9bec311de0f57970d0ccb9b}}' + - name: task-name + value: flip-coin + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-driver.Succeeded + name: flip-coin + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/hello-world.yaml b/test_data/compiled-workflows/hello-world.yaml new file mode 100644 index 00000000000..7d2068271e0 --- /dev/null +++ b/test_data/compiled-workflows/hello-world.yaml @@ -0,0 +1,360 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: echo- +spec: + arguments: + parameters: + - name: components-cf9c81ac9e6ab0dcdd92cb89ed717317e681cb0645cb5ddfc4824b1de14346b3 + value: '{"executorLabel":"exec-echo"}' + - name: implementations-cf9c81ac9e6ab0dcdd92cb89ed717317e681cb0645cb5ddfc4824b1de14346b3 + value: '{"args":["hello world"],"command":["echo"],"image":"public.ecr.aws/docker/library/python:3.12"}' + - name: components-root + value: '{"dag":{"tasks":{"echo":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"taskInfo":{"name":"echo"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - echo + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-cf9c81ac9e6ab0dcdd92cb89ed717317e681cb0645cb5ddfc4824b1de14346b3}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"taskInfo":{"name":"echo"}}' + - name: container + value: '{{workflow.parameters.implementations-cf9c81ac9e6ab0dcdd92cb89ed717317e681cb0645cb5ddfc4824b1de14346b3}}' + - name: task-name + value: echo + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: echo-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.echo-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.echo-driver.outputs.parameters.cached-decision}}' + depends: echo-driver.Succeeded + name: echo + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - echo + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/identity.yaml b/test_data/compiled-workflows/identity.yaml new file mode 100644 index 00000000000..495bf3be9c9 --- /dev/null +++ b/test_data/compiled-workflows/identity.yaml @@ -0,0 +1,369 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: identity- +spec: + arguments: + parameters: + - name: components-ba6f3c33ed7f8f19467251611934d428f27109368234070d61411c092f2ccf11 + value: '{"executorLabel":"exec-identity","inputDefinitions":{"parameters":{"value":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-ba6f3c33ed7f8f19467251611934d428f27109368234070d61411c092f2ccf11 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","identity"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + identity(value: str) -\u003e str:\n return value\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"identity"}}}},"tasks":{"identity":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-identity"},"inputs":{"parameters":{"value":{"componentInputParameter":"value"}}},"taskInfo":{"name":"identity"}}}},"inputDefinitions":{"parameters":{"value":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - identity + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ba6f3c33ed7f8f19467251611934d428f27109368234070d61411c092f2ccf11}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-identity"},"inputs":{"parameters":{"value":{"componentInputParameter":"value"}}},"taskInfo":{"name":"identity"}}' + - name: container + value: '{{workflow.parameters.implementations-ba6f3c33ed7f8f19467251611934d428f27109368234070d61411c092f2ccf11}}' + - name: task-name + value: identity + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: identity-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.identity-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.identity-driver.outputs.parameters.cached-decision}}' + depends: identity-driver.Succeeded + name: identity + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - identity + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/if_elif_else_complex.yaml b/test_data/compiled-workflows/if_elif_else_complex.yaml new file mode 100644 index 00000000000..efc29fc1fa5 --- /dev/null +++ b/test_data/compiled-workflows/if_elif_else_complex.yaml @@ -0,0 +1,1308 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: lucky-number-pipeline- +spec: + arguments: + parameters: + - name: components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac + value: '{"executorLabel":"exec-print-and-return","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_and_return"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_and_return(text: str) -\u003e str:\n print(text)\n return text\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-3 + value: '{"dag":{"tasks":{"print-and-return":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Adding + drumroll on last trial!"}}}},"taskInfo":{"name":"print-and-return"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--add_drumroll":{"parameterType":"BOOLEAN"},"pipelinechannel--trials-loop-item":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-condition-2 + value: '{"dag":{"tasks":{"condition-3":{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"pipelinechannel--add_drumroll"},"pipelinechannel--trials-loop-item":{"componentInputParameter":"pipelinechannel--trials-loop-item"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--trials-loop-item'']) + == 3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--add_drumroll":{"parameterType":"BOOLEAN"},"pipelinechannel--trials-loop-item":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-for-loop-16 + value: '{"dag":{"tasks":{"print-and-return-9":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-9"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Announcing + again: Got the lucky number 5000! A one in 10,000 chance."}}}},"taskInfo":{"name":"print-and-return-9"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--loop-item-param-15":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--repeat_if_lucky_number":{"parameterType":"BOOLEAN"}}}}' + - name: components-comp-condition-14 + value: '{"dag":{"tasks":{"for-loop-16":{"componentRef":{"name":"comp-for-loop-16"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-15","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-16"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--repeat_if_lucky_number":{"parameterType":"BOOLEAN"}}}}' + - name: components-comp-condition-13 + value: '{"dag":{"tasks":{"condition-14":{"componentRef":{"name":"comp-condition-14"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"taskInfo":{"name":"condition-14"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--repeat_if_lucky_number''] + == true"}},"print-and-return-8":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-8"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Announcing: + Got the lucky number 5000! A one in 10,000 chance."}}}},"taskInfo":{"name":"print-and-return-8"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--repeat_if_lucky_number":{"parameterType":"BOOLEAN"}}}}' + - name: components-comp-condition-6 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--print-and-return-2-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-and-return-2"}}}},"tasks":{"print-and-return-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-2"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + a low even number!"}}}},"taskInfo":{"name":"print-and-return-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--is-even-or-odd-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--print-and-return-2-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-7 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--print-and-return-3-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-and-return-3"}}}},"tasks":{"print-and-return-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-3"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + a low odd number!"}}}},"taskInfo":{"name":"print-and-return-3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--is-even-or-odd-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--print-and-return-3-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-branches-5 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--condition-branches-5-oneof-1":{"valueFromOneof":{"parameterSelectors":[{"outputParameterKey":"pipelinechannel--print-and-return-2-Output","producerSubtask":"condition-6"},{"outputParameterKey":"pipelinechannel--print-and-return-3-Output","producerSubtask":"condition-7"}]}}}},"tasks":{"condition-6":{"componentRef":{"name":"comp-condition-6"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-Output"}}},"taskInfo":{"name":"condition-6"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--is-even-or-odd-Output''] + == ''even''"}},"condition-7":{"componentRef":{"name":"comp-condition-7"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-Output"}}},"taskInfo":{"name":"condition-7"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--is-even-or-odd-Output''] + == ''even'')"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--is-even-or-odd-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--condition-branches-5-oneof-1":{"parameterType":"STRING"}}}}' + - name: components-07f4c47d015aff01f4f9e8a60254c059f980d3b1c2afd8798b34b0fece648e3f + value: '{"executorLabel":"exec-is-even-or-odd","inputDefinitions":{"parameters":{"num":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-07f4c47d015aff01f4f9e8a60254c059f980d3b1c2afd8798b34b0fece648e3f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","is_even_or_odd"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + is_even_or_odd(num: int) -\u003e str:\n return ''odd'' if num % 2 else + ''even''\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-8 + value: '{"dag":{"tasks":{"condition-branches-5":{"componentRef":{"name":"comp-condition-branches-5"},"dependentTasks":["is-even-or-odd"],"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"is-even-or-odd"}}}},"taskInfo":{"name":"condition-branches-5"}},"is-even-or-odd":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-is-even-or-odd"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"}}},"taskInfo":{"name":"is-even-or-odd"}},"print-and-return-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-4"},"dependentTasks":["condition-branches-5"],"inputs":{"parameters":{"text":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--condition-branches-5-oneof-1","producerTask":"condition-branches-5"}}}},"taskInfo":{"name":"print-and-return-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-condition-11 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--print-and-return-5-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-and-return-5"}}}},"tasks":{"print-and-return-5":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-5"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + a high even number!"}}}},"taskInfo":{"name":"print-and-return-5"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--is-even-or-odd-2-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--print-and-return-5-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-12 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--print-and-return-6-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-and-return-6"}}}},"tasks":{"print-and-return-6":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-6"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + a high odd number!"}}}},"taskInfo":{"name":"print-and-return-6"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--is-even-or-odd-2-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--print-and-return-6-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-branches-10 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--condition-branches-10-oneof-1":{"valueFromOneof":{"parameterSelectors":[{"outputParameterKey":"pipelinechannel--print-and-return-5-Output","producerSubtask":"condition-11"},{"outputParameterKey":"pipelinechannel--print-and-return-6-Output","producerSubtask":"condition-12"}]}}}},"tasks":{"condition-11":{"componentRef":{"name":"comp-condition-11"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-2-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-2-Output"}}},"taskInfo":{"name":"condition-11"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--is-even-or-odd-2-Output''] + == ''even''"}},"condition-12":{"componentRef":{"name":"comp-condition-12"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-2-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-2-Output"}}},"taskInfo":{"name":"condition-12"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--is-even-or-odd-2-Output''] + == ''even'')"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--is-even-or-odd-2-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--condition-branches-10-oneof-1":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-9 + value: '{"dag":{"tasks":{"condition-branches-10":{"componentRef":{"name":"comp-condition-branches-10"},"dependentTasks":["is-even-or-odd-2"],"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"is-even-or-odd-2"}}}},"taskInfo":{"name":"condition-branches-10"}},"is-even-or-odd-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-is-even-or-odd-2"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"}}},"taskInfo":{"name":"is-even-or-odd-2"}},"print-and-return-7":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-7"},"dependentTasks":["condition-branches-10"],"inputs":{"parameters":{"text":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--condition-branches-10-oneof-1","producerTask":"condition-branches-10"}}}},"taskInfo":{"name":"print-and-return-7"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-condition-branches-4 + value: '{"dag":{"tasks":{"condition-13":{"componentRef":{"name":"comp-condition-13"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"taskInfo":{"name":"condition-13"},"triggerPolicy":{"condition":"!(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + \u003c 5000) \u0026\u0026 !(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + \u003e 5000)"}},"condition-8":{"componentRef":{"name":"comp-condition-8"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"}}},"taskInfo":{"name":"condition-8"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + \u003c 5000"}},"condition-9":{"componentRef":{"name":"comp-condition-9"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"}}},"taskInfo":{"name":"condition-9"},"triggerPolicy":{"condition":"!(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + \u003c 5000) \u0026\u0026 int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + \u003e 5000"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--repeat_if_lucky_number":{"parameterType":"BOOLEAN"}}}}' + - name: components-dc6d5e95e0c0be0e93cfc8471078d7b7474f5d54cf510b6f3643e0364e578c0e + value: '{"executorLabel":"exec-int-0-to-9999","outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-dc6d5e95e0c0be0e93cfc8471078d7b7474f5d54cf510b6f3643e0364e578c0e + value: '{"args":["--executor_input","{{$}}","--function_to_execute","int_0_to_9999"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + int_0_to_9999() -\u003e int:\n import random\n return random.randint(0, + 9999)\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-1 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"int-0-to-9999"}}}},"tasks":{"condition-2":{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"pipelinechannel--add_drumroll"},"pipelinechannel--trials-loop-item":{"componentInputParameter":"pipelinechannel--trials-loop-item"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--add_drumroll''] + == true"}},"condition-branches-4":{"componentRef":{"name":"comp-condition-branches-4"},"dependentTasks":["int-0-to-9999"],"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"int-0-to-9999"}},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"taskInfo":{"name":"condition-branches-4"}},"int-0-to-9999":{"cachingOptions":{},"componentRef":{"name":"comp-int-0-to-9999"},"taskInfo":{"name":"int-0-to-9999"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--add_drumroll":{"parameterType":"BOOLEAN"},"pipelinechannel--repeat_if_lucky_number":{"parameterType":"BOOLEAN"},"pipelinechannel--trials":{"parameterType":"LIST"},"pipelinechannel--trials-loop-item":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"parameterType":"LIST"}}}}' + - name: components-c48160ebe824a2b5355816c99d1f335ad55b8ba40e2f53c260c844c16bd2f6df + value: '{"executorLabel":"exec-print-ints","inputDefinitions":{"parameters":{"ints":{"parameterType":"LIST"}}}}' + - name: implementations-c48160ebe824a2b5355816c99d1f335ad55b8ba40e2f53c260c844c16bd2f6df + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_ints"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_ints(ints: List[int]):\n print(ints)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"for-loop-1":{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"add_drumroll"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"repeat_if_lucky_number"},"pipelinechannel--trials":{"componentInputParameter":"trials"}}},"parameterIterator":{"itemInput":"pipelinechannel--trials-loop-item","items":{"inputParameter":"pipelinechannel--trials"}},"taskInfo":{"name":"for-loop-1"}},"print-ints":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-ints"},"dependentTasks":["for-loop-1"],"inputs":{"parameters":{"ints":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--int-0-to-9999-Output","producerTask":"for-loop-1"}}}},"taskInfo":{"name":"print-ints"}}}},"inputDefinitions":{"parameters":{"add_drumroll":{"defaultValue":true,"isOptional":true,"parameterType":"BOOLEAN"},"repeat_if_lucky_number":{"defaultValue":true,"isOptional":true,"parameterType":"BOOLEAN"},"trials":{"defaultValue":[1,2,3],"isOptional":true,"parameterType":"LIST"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - lucky-number-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Adding + drumroll on last trial!"}}}},"taskInfo":{"name":"print-and-return"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-driver.Succeeded + name: print-and-return + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-3 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - lucky-number-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-3}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"pipelinechannel--add_drumroll"},"pipelinechannel--trials-loop-item":{"componentInputParameter":"pipelinechannel--trials-loop-item"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--trials-loop-item'']) + == 3"}}' + - name: task-name + value: condition-3 + name: condition-3-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + depends: condition-3-driver.Succeeded + name: condition-3 + template: comp-condition-3 + when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-9"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Announcing + again: Got the lucky number 5000! A one in 10,000 chance."}}}},"taskInfo":{"name":"print-and-return-9"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-9 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-9-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-9-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-9-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-9-driver.Succeeded + name: print-and-return-9 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-16 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-16}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-16"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-15","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-16"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-16 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-16-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-16}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-16"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-15","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-16"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-16-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-16-for-loop-16-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-16 + template: comp-for-loop-16-for-loop-16-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-14 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-14}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-14"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"taskInfo":{"name":"condition-14"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--repeat_if_lucky_number''] + == true"}}' + - name: task-name + value: condition-14 + name: condition-14-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-14-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-14-driver.outputs.parameters.condition}}' + depends: condition-14-driver.Succeeded + name: condition-14 + template: comp-condition-14 + when: '{{tasks.condition-14-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-8"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Announcing: + Got the lucky number 5000! A one in 10,000 chance."}}}},"taskInfo":{"name":"print-and-return-8"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-8 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-8-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-8-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-8-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-8-driver.Succeeded + name: print-and-return-8 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-13 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-2"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + a low even number!"}}}},"taskInfo":{"name":"print-and-return-2"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-2-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-2-driver.Succeeded + name: print-and-return-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-6 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-3"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + a low odd number!"}}}},"taskInfo":{"name":"print-and-return-3"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-3-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-3-driver.Succeeded + name: print-and-return-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-7 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-6}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-6"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-Output"}}},"taskInfo":{"name":"condition-6"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--is-even-or-odd-Output''] + == ''even''"}}' + - name: task-name + value: condition-6 + name: condition-6-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-6-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-6-driver.outputs.parameters.condition}}' + depends: condition-6-driver.Succeeded + name: condition-6 + template: comp-condition-6 + when: '{{tasks.condition-6-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-7}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-7"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-Output"}}},"taskInfo":{"name":"condition-7"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--is-even-or-odd-Output''] + == ''even'')"}}' + - name: task-name + value: condition-7 + name: condition-7-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-7-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-7-driver.outputs.parameters.condition}}' + depends: condition-7-driver.Succeeded + name: condition-7 + template: comp-condition-7 + when: '{{tasks.condition-7-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-branches-5 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-branches-5}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-branches-5"},"dependentTasks":["is-even-or-odd"],"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"is-even-or-odd"}}}},"taskInfo":{"name":"condition-branches-5"}}' + - name: task-name + value: condition-branches-5 + depends: is-even-or-odd.Succeeded + name: condition-branches-5-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-branches-5-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-branches-5-driver.outputs.parameters.condition}}' + depends: condition-branches-5-driver.Succeeded + name: condition-branches-5 + template: comp-condition-branches-5 + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-07f4c47d015aff01f4f9e8a60254c059f980d3b1c2afd8798b34b0fece648e3f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-is-even-or-odd"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"}}},"taskInfo":{"name":"is-even-or-odd"}}' + - name: container + value: '{{workflow.parameters.implementations-07f4c47d015aff01f4f9e8a60254c059f980d3b1c2afd8798b34b0fece648e3f}}' + - name: task-name + value: is-even-or-odd + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: is-even-or-odd-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.is-even-or-odd-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.is-even-or-odd-driver.outputs.parameters.cached-decision}}' + depends: is-even-or-odd-driver.Succeeded + name: is-even-or-odd + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-4"},"dependentTasks":["condition-branches-5"],"inputs":{"parameters":{"text":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--condition-branches-5-oneof-1","producerTask":"condition-branches-5"}}}},"taskInfo":{"name":"print-and-return-4"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: condition-branches-5.Succeeded + name: print-and-return-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-4-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-4-driver.Succeeded + name: print-and-return-4 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-8 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-5"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + a high even number!"}}}},"taskInfo":{"name":"print-and-return-5"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-5 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-5-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-5-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-5-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-5-driver.Succeeded + name: print-and-return-5 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-11 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-6"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + a high odd number!"}}}},"taskInfo":{"name":"print-and-return-6"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-6 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-6-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-6-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-6-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-6-driver.Succeeded + name: print-and-return-6 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-12 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-11}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-11"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-2-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-2-Output"}}},"taskInfo":{"name":"condition-11"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--is-even-or-odd-2-Output''] + == ''even''"}}' + - name: task-name + value: condition-11 + name: condition-11-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-11-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-11-driver.outputs.parameters.condition}}' + depends: condition-11-driver.Succeeded + name: condition-11 + template: comp-condition-11 + when: '{{tasks.condition-11-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-12}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-12"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-2-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-2-Output"}}},"taskInfo":{"name":"condition-12"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--is-even-or-odd-2-Output''] + == ''even'')"}}' + - name: task-name + value: condition-12 + name: condition-12-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-12-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-12-driver.outputs.parameters.condition}}' + depends: condition-12-driver.Succeeded + name: condition-12 + template: comp-condition-12 + when: '{{tasks.condition-12-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-branches-10 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-branches-10}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-branches-10"},"dependentTasks":["is-even-or-odd-2"],"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"is-even-or-odd-2"}}}},"taskInfo":{"name":"condition-branches-10"}}' + - name: task-name + value: condition-branches-10 + depends: is-even-or-odd-2.Succeeded + name: condition-branches-10-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-branches-10-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-branches-10-driver.outputs.parameters.condition}}' + depends: condition-branches-10-driver.Succeeded + name: condition-branches-10 + template: comp-condition-branches-10 + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-07f4c47d015aff01f4f9e8a60254c059f980d3b1c2afd8798b34b0fece648e3f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-is-even-or-odd-2"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"}}},"taskInfo":{"name":"is-even-or-odd-2"}}' + - name: container + value: '{{workflow.parameters.implementations-07f4c47d015aff01f4f9e8a60254c059f980d3b1c2afd8798b34b0fece648e3f}}' + - name: task-name + value: is-even-or-odd-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: is-even-or-odd-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.is-even-or-odd-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.is-even-or-odd-2-driver.outputs.parameters.cached-decision}}' + depends: is-even-or-odd-2-driver.Succeeded + name: is-even-or-odd-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-7"},"dependentTasks":["condition-branches-10"],"inputs":{"parameters":{"text":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--condition-branches-10-oneof-1","producerTask":"condition-branches-10"}}}},"taskInfo":{"name":"print-and-return-7"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-7 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: condition-branches-10.Succeeded + name: print-and-return-7-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-7-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-7-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-7-driver.Succeeded + name: print-and-return-7 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-9 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-13}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-13"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"taskInfo":{"name":"condition-13"},"triggerPolicy":{"condition":"!(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + \u003c 5000) \u0026\u0026 !(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + \u003e 5000)"}}' + - name: task-name + value: condition-13 + name: condition-13-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-13-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-13-driver.outputs.parameters.condition}}' + depends: condition-13-driver.Succeeded + name: condition-13 + template: comp-condition-13 + when: '{{tasks.condition-13-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-8}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-8"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"}}},"taskInfo":{"name":"condition-8"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + \u003c 5000"}}' + - name: task-name + value: condition-8 + name: condition-8-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-8-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-8-driver.outputs.parameters.condition}}' + depends: condition-8-driver.Succeeded + name: condition-8 + template: comp-condition-8 + when: '{{tasks.condition-8-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-9}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-9"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"}}},"taskInfo":{"name":"condition-9"},"triggerPolicy":{"condition":"!(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + \u003c 5000) \u0026\u0026 int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + \u003e 5000"}}' + - name: task-name + value: condition-9 + name: condition-9-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-9-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-9-driver.outputs.parameters.condition}}' + depends: condition-9-driver.Succeeded + name: condition-9 + template: comp-condition-9 + when: '{{tasks.condition-9-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-branches-4 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"pipelinechannel--add_drumroll"},"pipelinechannel--trials-loop-item":{"componentInputParameter":"pipelinechannel--trials-loop-item"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--add_drumroll''] + == true"}}' + - name: task-name + value: condition-2 + name: condition-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' + depends: condition-2-driver.Succeeded + name: condition-2 + template: comp-condition-2 + when: '{{tasks.condition-2-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-branches-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-branches-4"},"dependentTasks":["int-0-to-9999"],"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"int-0-to-9999"}},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"taskInfo":{"name":"condition-branches-4"}}' + - name: task-name + value: condition-branches-4 + depends: int-0-to-9999.Succeeded + name: condition-branches-4-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-branches-4-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-branches-4-driver.outputs.parameters.condition}}' + depends: condition-branches-4-driver.Succeeded + name: condition-branches-4 + template: comp-condition-branches-4 + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-dc6d5e95e0c0be0e93cfc8471078d7b7474f5d54cf510b6f3643e0364e578c0e}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-int-0-to-9999"},"taskInfo":{"name":"int-0-to-9999"}}' + - name: container + value: '{{workflow.parameters.implementations-dc6d5e95e0c0be0e93cfc8471078d7b7474f5d54cf510b6f3643e0364e578c0e}}' + - name: task-name + value: int-0-to-9999 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: int-0-to-9999-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.int-0-to-9999-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.int-0-to-9999-driver.outputs.parameters.cached-decision}}' + depends: int-0-to-9999-driver.Succeeded + name: int-0-to-9999 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"add_drumroll"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"repeat_if_lucky_number"},"pipelinechannel--trials":{"componentInputParameter":"trials"}}},"parameterIterator":{"itemInput":"pipelinechannel--trials-loop-item","items":{"inputParameter":"pipelinechannel--trials"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-1 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-1-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"add_drumroll"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"repeat_if_lucky_number"},"pipelinechannel--trials":{"componentInputParameter":"trials"}}},"parameterIterator":{"itemInput":"pipelinechannel--trials-loop-item","items":{"inputParameter":"pipelinechannel--trials"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-1-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1-for-loop-1-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-1 + template: comp-for-loop-1-for-loop-1-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c48160ebe824a2b5355816c99d1f335ad55b8ba40e2f53c260c844c16bd2f6df}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-ints"},"dependentTasks":["for-loop-1"],"inputs":{"parameters":{"ints":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--int-0-to-9999-Output","producerTask":"for-loop-1"}}}},"taskInfo":{"name":"print-ints"}}' + - name: container + value: '{{workflow.parameters.implementations-c48160ebe824a2b5355816c99d1f335ad55b8ba40e2f53c260c844c16bd2f6df}}' + - name: task-name + value: print-ints + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-1.Succeeded + name: print-ints-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-ints-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-ints-driver.outputs.parameters.cached-decision}}' + depends: print-ints-driver.Succeeded + name: print-ints + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"add_drumroll":true,"repeat_if_lucky_number":true,"trials":[1,2,3]}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/if_elif_else_with_oneof_parameters.yaml b/test_data/compiled-workflows/if_elif_else_with_oneof_parameters.yaml new file mode 100644 index 00000000000..79680f0da9a --- /dev/null +++ b/test_data/compiled-workflows/if_elif_else_with_oneof_parameters.yaml @@ -0,0 +1,668 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: outer-pipeline- +spec: + arguments: + parameters: + - name: components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac + value: '{"executorLabel":"exec-print-and-return-3","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_and_return"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_and_return(text: str) -\u003e str:\n print(text)\n return text\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-2 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--print-and-return-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-and-return"}}}},"tasks":{"print-and-return":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + heads!"}}}},"taskInfo":{"name":"print-and-return"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--print-and-return-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-3 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--print-and-return-2-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-and-return-2"}}}},"tasks":{"print-and-return-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-2"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + tails!"}}}},"taskInfo":{"name":"print-and-return-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--print-and-return-2-Output":{"parameterType":"STRING"}}}}' + - name: components-6dff0af7f5de9b87fab66963c0544a7a050f6a297f26187648f401e15ba49cfd + value: '{"executorLabel":"exec-special-print-and-return","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"output_key":{"parameterType":"STRING"}}}}' + - name: implementations-6dff0af7f5de9b87fab66963c0544a7a050f6a297f26187648f401e15ba49cfd + value: '{"args":["--executor_input","{{$}}","--function_to_execute","special_print_and_return"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + special_print_and_return(text: str, output_key: dsl.OutputPath(str)):\n print(''Got + the special state:'', text)\n with open(output_key, ''w'') as f:\n f.write(text)\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-4 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--special-print-and-return-output_key":{"valueFromParameter":{"outputParameterKey":"output_key","producerSubtask":"special-print-and-return"}}}},"tasks":{"special-print-and-return":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-special-print-and-return"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Draw!"}}}},"taskInfo":{"name":"special-print-and-return"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--special-print-and-return-output_key":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-branches-1 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--condition-branches-1-oneof-1":{"valueFromOneof":{"parameterSelectors":[{"outputParameterKey":"pipelinechannel--print-and-return-Output","producerSubtask":"condition-2"},{"outputParameterKey":"pipelinechannel--print-and-return-2-Output","producerSubtask":"condition-3"},{"outputParameterKey":"pipelinechannel--special-print-and-return-output_key","producerSubtask":"condition-4"}]}}}},"tasks":{"condition-2":{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"componentInputParameter":"pipelinechannel--flip-three-sided-die-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''heads''"}},"condition-3":{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"componentInputParameter":"pipelinechannel--flip-three-sided-die-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''heads'') \u0026\u0026 inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''tails''"}},"condition-4":{"componentRef":{"name":"comp-condition-4"},"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"componentInputParameter":"pipelinechannel--flip-three-sided-die-Output"}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''heads'') \u0026\u0026 !(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''tails'')"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--condition-branches-1-oneof-1":{"parameterType":"STRING"}}}}' + - name: components-9a6ceddb3a88b2813013f31610e27e091dab5dc13510b28f7b468fdc3ccdfb86 + value: '{"executorLabel":"exec-flip-three-sided-die","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-9a6ceddb3a88b2813013f31610e27e091dab5dc13510b28f7b468fdc3ccdfb86 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","flip_three_sided_die"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + flip_three_sided_die() -\u003e str:\n import random\n val = random.randint(0, + 2)\n\n if val == 0:\n return ''heads''\n elif val == 1:\n return + ''tails''\n else:\n return ''draw''\n\n"],"image":"python:3.9"}' + - name: components-comp-roll-die-pipeline + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--condition-branches-1-oneof-1","producerSubtask":"condition-branches-1"}}}},"tasks":{"condition-branches-1":{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-three-sided-die"],"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-three-sided-die"}}}},"taskInfo":{"name":"condition-branches-1"}},"flip-three-sided-die":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-three-sided-die"},"taskInfo":{"name":"flip-three-sided-die"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-and-return"}}}},"tasks":{"print-and-return":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-3"},"dependentTasks":["roll-die-pipeline"],"inputs":{"parameters":{"text":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"roll-die-pipeline"}}}},"taskInfo":{"name":"print-and-return"}},"roll-die-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-roll-die-pipeline"},"taskInfo":{"name":"roll-die-pipeline"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - outer-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + heads!"}}}},"taskInfo":{"name":"print-and-return"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-driver.Succeeded + name: print-and-return + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-2"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + tails!"}}}},"taskInfo":{"name":"print-and-return-2"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-2-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-2-driver.Succeeded + name: print-and-return-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-3 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-6dff0af7f5de9b87fab66963c0544a7a050f6a297f26187648f401e15ba49cfd}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-special-print-and-return"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Draw!"}}}},"taskInfo":{"name":"special-print-and-return"}}' + - name: container + value: '{{workflow.parameters.implementations-6dff0af7f5de9b87fab66963c0544a7a050f6a297f26187648f401e15ba49cfd}}' + - name: task-name + value: special-print-and-return + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: special-print-and-return-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.special-print-and-return-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.special-print-and-return-driver.outputs.parameters.cached-decision}}' + depends: special-print-and-return-driver.Succeeded + name: special-print-and-return + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-4 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - outer-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"componentInputParameter":"pipelinechannel--flip-three-sided-die-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''heads''"}}' + - name: task-name + value: condition-2 + name: condition-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' + depends: condition-2-driver.Succeeded + name: condition-2 + template: comp-condition-2 + when: '{{tasks.condition-2-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-3}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"componentInputParameter":"pipelinechannel--flip-three-sided-die-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''heads'') \u0026\u0026 inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''tails''"}}' + - name: task-name + value: condition-3 + name: condition-3-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + depends: condition-3-driver.Succeeded + name: condition-3 + template: comp-condition-3 + when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-4"},"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"componentInputParameter":"pipelinechannel--flip-three-sided-die-Output"}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''heads'') \u0026\u0026 !(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''tails'')"}}' + - name: task-name + value: condition-4 + name: condition-4-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-4-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-4-driver.outputs.parameters.condition}}' + depends: condition-4-driver.Succeeded + name: condition-4 + template: comp-condition-4 + when: '{{tasks.condition-4-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-branches-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-branches-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-three-sided-die"],"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-three-sided-die"}}}},"taskInfo":{"name":"condition-branches-1"}}' + - name: task-name + value: condition-branches-1 + depends: flip-three-sided-die.Succeeded + name: condition-branches-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-branches-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-branches-1-driver.outputs.parameters.condition}}' + depends: condition-branches-1-driver.Succeeded + name: condition-branches-1 + template: comp-condition-branches-1 + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-9a6ceddb3a88b2813013f31610e27e091dab5dc13510b28f7b468fdc3ccdfb86}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-three-sided-die"},"taskInfo":{"name":"flip-three-sided-die"}}' + - name: container + value: '{{workflow.parameters.implementations-9a6ceddb3a88b2813013f31610e27e091dab5dc13510b28f7b468fdc3ccdfb86}}' + - name: task-name + value: flip-three-sided-die + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-three-sided-die-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-three-sided-die-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-three-sided-die-driver.outputs.parameters.cached-decision}}' + depends: flip-three-sided-die-driver.Succeeded + name: flip-three-sided-die + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-roll-die-pipeline + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-3"},"dependentTasks":["roll-die-pipeline"],"inputs":{"parameters":{"text":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"roll-die-pipeline"}}}},"taskInfo":{"name":"print-and-return"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: roll-die-pipeline.Succeeded + name: print-and-return-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-driver.Succeeded + name: print-and-return + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-roll-die-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-roll-die-pipeline"},"taskInfo":{"name":"roll-die-pipeline"}}' + - name: task-name + value: roll-die-pipeline + name: roll-die-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.roll-die-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.roll-die-pipeline-driver.outputs.parameters.condition}}' + depends: roll-die-pipeline-driver.Succeeded + name: roll-die-pipeline + template: comp-roll-die-pipeline + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/if_else_with_oneof_artifacts.yaml b/test_data/compiled-workflows/if_else_with_oneof_artifacts.yaml new file mode 100644 index 00000000000..967ca9341f7 --- /dev/null +++ b/test_data/compiled-workflows/if_else_with_oneof_artifacts.yaml @@ -0,0 +1,626 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: outer-pipeline- +spec: + arguments: + parameters: + - name: components-c3727009f00beb8b5919fc00c4c334adbf50ee929c9f2bb4f16182c1cd2e3cbc + value: '{"executorLabel":"exec-param-to-artifact","inputDefinitions":{"parameters":{"val":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"a":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-c3727009f00beb8b5919fc00c4c334adbf50ee929c9f2bb4f16182c1cd2e3cbc + value: '{"args":["--executor_input","{{$}}","--function_to_execute","param_to_artifact"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + param_to_artifact(val: str, a: Output[Artifact]):\n with open(a.path, ''w'') + as f:\n f.write(val)\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-2 + value: '{"dag":{"outputs":{"artifacts":{"pipelinechannel--param-to-artifact-a":{"artifactSelectors":[{"outputArtifactKey":"a","producerSubtask":"param-to-artifact"}]}}},"tasks":{"param-to-artifact":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-param-to-artifact"},"inputs":{"parameters":{"val":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"param-to-artifact"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"pipelinechannel--param-to-artifact-a":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: components-comp-condition-3 + value: '{"dag":{"outputs":{"artifacts":{"pipelinechannel--param-to-artifact-2-a":{"artifactSelectors":[{"outputArtifactKey":"a","producerSubtask":"param-to-artifact-2"}]}}},"tasks":{"param-to-artifact-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-param-to-artifact-2"},"inputs":{"parameters":{"val":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"param-to-artifact-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"pipelinechannel--param-to-artifact-2-a":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: components-comp-condition-branches-1 + value: '{"dag":{"outputs":{"artifacts":{"pipelinechannel--condition-branches-1-oneof-1":{"artifactSelectors":[{"outputArtifactKey":"pipelinechannel--param-to-artifact-a","producerSubtask":"condition-2"},{"outputArtifactKey":"pipelinechannel--param-to-artifact-2-a","producerSubtask":"condition-3"}]}}},"tasks":{"condition-2":{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads''"}},"condition-3":{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads'')"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"pipelinechannel--condition-branches-1-oneof-1":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: components-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14 + value: '{"executorLabel":"exec-flip-coin","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","flip_coin"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + flip_coin() -\u003e str:\n import random\n return ''heads'' if random.randint(0, + 1) == 0 else ''tails''\n\n"],"image":"python:3.9"}' + - name: components-4b25ff0a49eab000c257f8f4a0117403198c888052ec86137349df44a41d04fe + value: '{"executorLabel":"exec-print-artifact","inputDefinitions":{"artifacts":{"a":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-4b25ff0a49eab000c257f8f4a0117403198c888052ec86137349df44a41d04fe + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_artifact"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_artifact(a: Input[Artifact]):\n with open(a.path) as f:\n print(f.read())\n\n"],"image":"python:3.9"}' + - name: components-comp-flip-coin-pipeline + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"pipelinechannel--condition-branches-1-oneof-1","producerSubtask":"condition-branches-1"}]}}},"tasks":{"condition-branches-1":{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-branches-1"}},"flip-coin":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin"},"taskInfo":{"name":"flip-coin"}},"print-artifact":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-artifact"},"dependentTasks":["condition-branches-1"],"inputs":{"artifacts":{"a":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--condition-branches-1-oneof-1","producerTask":"condition-branches-1"}}}},"taskInfo":{"name":"print-artifact"}}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: components-root + value: '{"dag":{"tasks":{"flip-coin-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-pipeline"},"taskInfo":{"name":"flip-coin-pipeline"}},"print-artifact":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-artifact-2"},"dependentTasks":["flip-coin-pipeline"],"inputs":{"artifacts":{"a":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"flip-coin-pipeline"}}}},"taskInfo":{"name":"print-artifact"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - outer-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c3727009f00beb8b5919fc00c4c334adbf50ee929c9f2bb4f16182c1cd2e3cbc}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-param-to-artifact"},"inputs":{"parameters":{"val":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"param-to-artifact"}}' + - name: container + value: '{{workflow.parameters.implementations-c3727009f00beb8b5919fc00c4c334adbf50ee929c9f2bb4f16182c1cd2e3cbc}}' + - name: task-name + value: param-to-artifact + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: param-to-artifact-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.param-to-artifact-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.param-to-artifact-driver.outputs.parameters.cached-decision}}' + depends: param-to-artifact-driver.Succeeded + name: param-to-artifact + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c3727009f00beb8b5919fc00c4c334adbf50ee929c9f2bb4f16182c1cd2e3cbc}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-param-to-artifact-2"},"inputs":{"parameters":{"val":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"param-to-artifact-2"}}' + - name: container + value: '{{workflow.parameters.implementations-c3727009f00beb8b5919fc00c4c334adbf50ee929c9f2bb4f16182c1cd2e3cbc}}' + - name: task-name + value: param-to-artifact-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: param-to-artifact-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.param-to-artifact-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.param-to-artifact-2-driver.outputs.parameters.cached-decision}}' + depends: param-to-artifact-2-driver.Succeeded + name: param-to-artifact-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-3 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - outer-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads''"}}' + - name: task-name + value: condition-2 + name: condition-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' + depends: condition-2-driver.Succeeded + name: condition-2 + template: comp-condition-2 + when: '{{tasks.condition-2-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-3}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads'')"}}' + - name: task-name + value: condition-3 + name: condition-3-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + depends: condition-3-driver.Succeeded + name: condition-3 + template: comp-condition-3 + when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-branches-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-branches-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-branches-1"}}' + - name: task-name + value: condition-branches-1 + depends: flip-coin.Succeeded + name: condition-branches-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-branches-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-branches-1-driver.outputs.parameters.condition}}' + depends: condition-branches-1-driver.Succeeded + name: condition-branches-1 + template: comp-condition-branches-1 + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin"},"taskInfo":{"name":"flip-coin"}}' + - name: container + value: '{{workflow.parameters.implementations-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14}}' + - name: task-name + value: flip-coin + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-driver.Succeeded + name: flip-coin + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4b25ff0a49eab000c257f8f4a0117403198c888052ec86137349df44a41d04fe}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-artifact"},"dependentTasks":["condition-branches-1"],"inputs":{"artifacts":{"a":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--condition-branches-1-oneof-1","producerTask":"condition-branches-1"}}}},"taskInfo":{"name":"print-artifact"}}' + - name: container + value: '{{workflow.parameters.implementations-4b25ff0a49eab000c257f8f4a0117403198c888052ec86137349df44a41d04fe}}' + - name: task-name + value: print-artifact + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: condition-branches-1.Succeeded + name: print-artifact-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-artifact-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-artifact-driver.outputs.parameters.cached-decision}}' + depends: print-artifact-driver.Succeeded + name: print-artifact + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-flip-coin-pipeline + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-flip-coin-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-pipeline"},"taskInfo":{"name":"flip-coin-pipeline"}}' + - name: task-name + value: flip-coin-pipeline + name: flip-coin-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.flip-coin-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.flip-coin-pipeline-driver.outputs.parameters.condition}}' + depends: flip-coin-pipeline-driver.Succeeded + name: flip-coin-pipeline + template: comp-flip-coin-pipeline + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4b25ff0a49eab000c257f8f4a0117403198c888052ec86137349df44a41d04fe}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-artifact-2"},"dependentTasks":["flip-coin-pipeline"],"inputs":{"artifacts":{"a":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"flip-coin-pipeline"}}}},"taskInfo":{"name":"print-artifact"}}' + - name: container + value: '{{workflow.parameters.implementations-4b25ff0a49eab000c257f8f4a0117403198c888052ec86137349df44a41d04fe}}' + - name: task-name + value: print-artifact + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: flip-coin-pipeline.Succeeded + name: print-artifact-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-artifact-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-artifact-driver.outputs.parameters.cached-decision}}' + depends: print-artifact-driver.Succeeded + name: print-artifact + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/if_else_with_oneof_parameters.yaml b/test_data/compiled-workflows/if_else_with_oneof_parameters.yaml new file mode 100644 index 00000000000..6534f966afc --- /dev/null +++ b/test_data/compiled-workflows/if_else_with_oneof_parameters.yaml @@ -0,0 +1,560 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: flip-coin-pipeline- +spec: + arguments: + parameters: + - name: components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac + value: '{"executorLabel":"exec-print-and-return","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_and_return"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_and_return(text: str) -\u003e str:\n print(text)\n return text\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-2 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--print-and-return-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-and-return"}}}},"tasks":{"print-and-return":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + heads!"}}}},"taskInfo":{"name":"print-and-return"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--print-and-return-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-3 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--print-and-return-2-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-and-return-2"}}}},"tasks":{"print-and-return-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-2"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + tails!"}}}},"taskInfo":{"name":"print-and-return-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--print-and-return-2-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-branches-1 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--condition-branches-1-oneof-1":{"valueFromOneof":{"parameterSelectors":[{"outputParameterKey":"pipelinechannel--print-and-return-Output","producerSubtask":"condition-2"},{"outputParameterKey":"pipelinechannel--print-and-return-2-Output","producerSubtask":"condition-3"}]}}}},"tasks":{"condition-2":{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads''"}},"condition-3":{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads'')"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--condition-branches-1-oneof-1":{"parameterType":"STRING"}}}}' + - name: components-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14 + value: '{"executorLabel":"exec-flip-coin","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","flip_coin"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + flip_coin() -\u003e str:\n import random\n return ''heads'' if random.randint(0, + 1) == 0 else ''tails''\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--condition-branches-1-oneof-1","producerSubtask":"condition-branches-1"}}}},"tasks":{"condition-branches-1":{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-branches-1"}},"flip-coin":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin"},"taskInfo":{"name":"flip-coin"}},"print-and-return-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-3"},"dependentTasks":["condition-branches-1"],"inputs":{"parameters":{"text":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--condition-branches-1-oneof-1","producerTask":"condition-branches-1"}}}},"taskInfo":{"name":"print-and-return-3"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - flip-coin-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + heads!"}}}},"taskInfo":{"name":"print-and-return"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-driver.Succeeded + name: print-and-return + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-2"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"Got + tails!"}}}},"taskInfo":{"name":"print-and-return-2"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-and-return-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-2-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-2-driver.Succeeded + name: print-and-return-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-3 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - flip-coin-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads''"}}' + - name: task-name + value: condition-2 + name: condition-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' + depends: condition-2-driver.Succeeded + name: condition-2 + template: comp-condition-2 + when: '{{tasks.condition-2-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-3}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads'')"}}' + - name: task-name + value: condition-3 + name: condition-3-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + depends: condition-3-driver.Succeeded + name: condition-3 + template: comp-condition-3 + when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-branches-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-branches-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-branches-1"}}' + - name: task-name + value: condition-branches-1 + depends: flip-coin.Succeeded + name: condition-branches-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-branches-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-branches-1-driver.outputs.parameters.condition}}' + depends: condition-branches-1-driver.Succeeded + name: condition-branches-1 + template: comp-condition-branches-1 + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin"},"taskInfo":{"name":"flip-coin"}}' + - name: container + value: '{{workflow.parameters.implementations-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14}}' + - name: task-name + value: flip-coin + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-driver.Succeeded + name: flip-coin + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-and-return-3"},"dependentTasks":["condition-branches-1"],"inputs":{"parameters":{"text":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--condition-branches-1-oneof-1","producerTask":"condition-branches-1"}}}},"taskInfo":{"name":"print-and-return-3"}}' + - name: container + value: '{{workflow.parameters.implementations-d4d51356e36512e82fa2af8e14375269db87299669cfb9b3cfa0baacf11fecac}}' + - name: task-name + value: print-and-return-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: condition-branches-1.Succeeded + name: print-and-return-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-and-return-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-and-return-3-driver.outputs.parameters.cached-decision}}' + depends: print-and-return-3-driver.Succeeded + name: print-and-return-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/input_artifact.yaml b/test_data/compiled-workflows/input_artifact.yaml new file mode 100644 index 00000000000..6e02cfed8af --- /dev/null +++ b/test_data/compiled-workflows/input_artifact.yaml @@ -0,0 +1,369 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: input-artifact- +spec: + arguments: + parameters: + - name: components-da3c6808bf551f2963106f4d6e053e4d70b827b3960f6e6d581fa72f4c1191fe + value: '{"executorLabel":"exec-input-artifact","inputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-da3c6808bf551f2963106f4d6e053e4d70b827b3960f6e6d581fa72f4c1191fe + value: '{"args":["--executor_input","{{$}}","--function_to_execute","input_artifact"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + input_artifact(data: Input[Dataset]):\n print(data.name)\n print(data.uri)\n print(data.metadata)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"input-artifact":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-input-artifact"},"inputs":{"artifacts":{"data":{"componentInputArtifact":"data"}}},"taskInfo":{"name":"input-artifact"}}}},"inputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - input-artifact + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-da3c6808bf551f2963106f4d6e053e4d70b827b3960f6e6d581fa72f4c1191fe}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-input-artifact"},"inputs":{"artifacts":{"data":{"componentInputArtifact":"data"}}},"taskInfo":{"name":"input-artifact"}}' + - name: container + value: '{{workflow.parameters.implementations-da3c6808bf551f2963106f4d6e053e4d70b827b3960f6e6d581fa72f4c1191fe}}' + - name: task-name + value: input-artifact + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: input-artifact-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.input-artifact-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.input-artifact-driver.outputs.parameters.cached-decision}}' + depends: input-artifact-driver.Succeeded + name: input-artifact + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - input-artifact + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/iris_pipeline_compiled.yaml b/test_data/compiled-workflows/iris_pipeline_compiled.yaml new file mode 100644 index 00000000000..d9b171895ba --- /dev/null +++ b/test_data/compiled-workflows/iris_pipeline_compiled.yaml @@ -0,0 +1,472 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: iris-training-pipeline- +spec: + arguments: + parameters: + - name: components-2af7169887551fdca1e52d152b52ec11ea10d8009f09023395873eb7f04f41c2 + value: '{"executorLabel":"exec-create-dataset","outputDefinitions":{"artifacts":{"iris_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-2af7169887551fdca1e52d152b52ec11ea10d8009f09023395873eb7f04f41c2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","create_dataset"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''pandas==2.2.0'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + create_dataset(iris_dataset: Output[Dataset]):\n from io import StringIO # + noqa: PLC0415\n\n import pandas as pd # noqa: PLC0415\n\n data = \"\"\"\n 5.1,3.5,1.4,0.2,Iris-setosa\n 4.9,3.0,1.4,0.2,Iris-setosa\n 4.7,3.2,1.3,0.2,Iris-setosa\n 4.6,3.1,1.5,0.2,Iris-setosa\n 5.0,3.6,1.4,0.2,Iris-setosa\n 5.7,3.8,1.7,0.3,Iris-setosa\n 5.1,3.8,1.5,0.3,Iris-setosa\n 5.4,3.4,1.7,0.2,Iris-setosa\n 5.1,3.7,1.5,0.4,Iris-setosa\n 5.1,3.4,1.5,0.2,Iris-setosa\n 5.0,3.5,1.3,0.3,Iris-setosa\n 4.5,2.3,1.3,0.3,Iris-setosa\n 4.4,3.2,1.3,0.2,Iris-setosa\n 5.0,3.5,1.6,0.6,Iris-setosa\n 5.1,3.8,1.9,0.4,Iris-setosa\n 4.8,3.0,1.4,0.3,Iris-setosa\n 5.1,3.8,1.6,0.2,Iris-setosa\n 4.6,3.2,1.4,0.2,Iris-setosa\n 5.3,3.7,1.5,0.2,Iris-setosa\n 5.0,3.3,1.4,0.2,Iris-setosa\n 7.0,3.2,4.7,1.4,Iris-versicolor\n 6.4,3.2,4.5,1.5,Iris-versicolor\n 6.9,3.1,4.9,1.5,Iris-versicolor\n 5.5,2.3,4.0,1.3,Iris-versicolor\n 6.5,2.8,4.6,1.5,Iris-versicolor\n 6.2,2.2,4.5,1.5,Iris-versicolor\n 5.6,2.5,3.9,1.1,Iris-versicolor\n 5.9,3.2,4.8,1.8,Iris-versicolor\n 6.1,2.8,4.0,1.3,Iris-versicolor\n 6.3,2.5,4.9,1.5,Iris-versicolor\n 6.1,2.8,4.7,1.2,Iris-versicolor\n 6.4,2.9,4.3,1.3,Iris-versicolor\n 6.6,3.0,4.4,1.4,Iris-versicolor\n 5.6,2.7,4.2,1.3,Iris-versicolor\n 5.7,3.0,4.2,1.2,Iris-versicolor\n 5.7,2.9,4.2,1.3,Iris-versicolor\n 6.2,2.9,4.3,1.3,Iris-versicolor\n 5.1,2.5,3.0,1.1,Iris-versicolor\n 5.7,2.8,4.1,1.3,Iris-versicolor\n 6.3,3.3,6.0,2.5,Iris-virginica\n 5.8,2.7,5.1,1.9,Iris-virginica\n 7.1,3.0,5.9,2.1,Iris-virginica\n 6.3,2.9,5.6,1.8,Iris-virginica\n 6.5,3.0,5.8,2.2,Iris-virginica\n 6.9,3.1,5.1,2.3,Iris-virginica\n 5.8,2.7,5.1,1.9,Iris-virginica\n 6.8,3.2,5.9,2.3,Iris-virginica\n 6.7,3.3,5.7,2.5,Iris-virginica\n 6.7,3.0,5.2,2.3,Iris-virginica\n 6.3,2.5,5.0,1.9,Iris-virginica\n 6.5,3.0,5.2,2.0,Iris-virginica\n 6.2,3.4,5.4,2.3,Iris-virginica\n 5.9,3.0,5.1,1.8,Iris-virginica\n \"\"\"\n col_names + = [\"Sepal_Length\", \"Sepal_Width\", \"Petal_Length\", \"Petal_Width\", \"Labels\"]\n df + = pd.read_csv(StringIO(data), names=col_names)\n\n with open(iris_dataset.path, + \"w\") as f:\n df.to_csv(f)\n\n"],"image":"python:3.9"}' + - name: components-e21502083257f62c5b8d5e6dddd74ed2fa77a7f3e5ebb318e474eb94b51d71bb + value: '{"executorLabel":"exec-normalize-dataset","inputDefinitions":{"artifacts":{"input_iris_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"standard_scaler":{"parameterType":"BOOLEAN"}}},"outputDefinitions":{"artifacts":{"normalized_iris_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-e21502083257f62c5b8d5e6dddd74ed2fa77a7f3e5ebb318e474eb94b51d71bb + value: '{"args":["--executor_input","{{$}}","--function_to_execute","normalize_dataset"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''pandas==2.2.0'' ''scikit-learn==1.4.0'' + \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf + \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true + python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + normalize_dataset(\n input_iris_dataset: Input[Dataset],\n normalized_iris_dataset: + Output[Dataset],\n standard_scaler: bool,\n):\n import pandas as pd # + noqa: PLC0415\n from sklearn.preprocessing import MinMaxScaler, StandardScaler # + noqa: PLC0415\n\n with open(input_iris_dataset.path) as f:\n df + = pd.read_csv(f)\n labels = df.pop(\"Labels\")\n\n scaler = StandardScaler() + if standard_scaler else MinMaxScaler()\n\n df = pd.DataFrame(scaler.fit_transform(df))\n df[\"Labels\"] + = labels\n normalized_iris_dataset.metadata[\"state\"] = \"Normalized\"\n with + open(normalized_iris_dataset.path, \"w\") as f:\n df.to_csv(f)\n\n"],"image":"python:3.9"}' + - name: components-e6e50647ba805f535e80fde81f76312e593390b70b360c1069a9b7766bfb5997 + value: '{"executorLabel":"exec-train-model","inputDefinitions":{"artifacts":{"normalized_iris_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"n_neighbors":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"metrics":{"artifactType":{"schemaTitle":"system.ClassificationMetrics","schemaVersion":"0.0.1"}},"model":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}}}' + - name: implementations-e6e50647ba805f535e80fde81f76312e593390b70b360c1069a9b7766bfb5997 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","train_model"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''pandas==2.2.0'' ''scikit-learn==1.4.0'' + \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf + \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true + python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + train_model(\n normalized_iris_dataset: Input[Dataset],\n model: Output[Model],\n metrics: + Output[ClassificationMetrics],\n n_neighbors: int,\n):\n import pickle # + noqa: PLC0415\n\n import pandas as pd # noqa: PLC0415\n from sklearn.metrics + import confusion_matrix # noqa: PLC0415\n from sklearn.model_selection + import cross_val_predict, train_test_split # noqa: PLC0415\n from sklearn.neighbors + import KNeighborsClassifier # noqa: PLC0415\n\n with open(normalized_iris_dataset.path) + as f:\n df = pd.read_csv(f)\n\n y = df.pop(\"Labels\")\n X = + df\n\n X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) # + noqa: F841\n\n clf = KNeighborsClassifier(n_neighbors=n_neighbors)\n clf.fit(X_train, + y_train)\n\n predictions = cross_val_predict(clf, X_train, y_train, cv=3)\n metrics.log_confusion_matrix(\n [\"Iris-Setosa\", + \"Iris-Versicolour\", \"Iris-Virginica\"],\n confusion_matrix(y_train, + predictions).tolist(), # .tolist() to convert np array to list.\n )\n\n model.metadata[\"framework\"] + = \"scikit-learn\"\n with open(model.path, \"wb\") as f:\n pickle.dump(clf, + f)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"create-dataset":{"cachingOptions":{},"componentRef":{"name":"comp-create-dataset"},"taskInfo":{"name":"create-dataset"}},"normalize-dataset":{"cachingOptions":{},"componentRef":{"name":"comp-normalize-dataset"},"dependentTasks":["create-dataset"],"inputs":{"artifacts":{"input_iris_dataset":{"taskOutputArtifact":{"outputArtifactKey":"iris_dataset","producerTask":"create-dataset"}}},"parameters":{"standard_scaler":{"componentInputParameter":"standard_scaler"}}},"taskInfo":{"name":"normalize-dataset"}},"train-model":{"cachingOptions":{},"componentRef":{"name":"comp-train-model"},"dependentTasks":["normalize-dataset"],"inputs":{"artifacts":{"normalized_iris_dataset":{"taskOutputArtifact":{"outputArtifactKey":"normalized_iris_dataset","producerTask":"normalize-dataset"}}},"parameters":{"n_neighbors":{"componentInputParameter":"neighbors"}}},"taskInfo":{"name":"train-model"}}}},"inputDefinitions":{"parameters":{"neighbors":{"defaultValue":3,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"standard_scaler":{"defaultValue":true,"isOptional":true,"parameterType":"BOOLEAN"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - iris-training-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2af7169887551fdca1e52d152b52ec11ea10d8009f09023395873eb7f04f41c2}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-create-dataset"},"taskInfo":{"name":"create-dataset"}}' + - name: container + value: '{{workflow.parameters.implementations-2af7169887551fdca1e52d152b52ec11ea10d8009f09023395873eb7f04f41c2}}' + - name: task-name + value: create-dataset + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: create-dataset-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.create-dataset-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.create-dataset-driver.outputs.parameters.cached-decision}}' + depends: create-dataset-driver.Succeeded + name: create-dataset + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e21502083257f62c5b8d5e6dddd74ed2fa77a7f3e5ebb318e474eb94b51d71bb}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-normalize-dataset"},"dependentTasks":["create-dataset"],"inputs":{"artifacts":{"input_iris_dataset":{"taskOutputArtifact":{"outputArtifactKey":"iris_dataset","producerTask":"create-dataset"}}},"parameters":{"standard_scaler":{"componentInputParameter":"standard_scaler"}}},"taskInfo":{"name":"normalize-dataset"}}' + - name: container + value: '{{workflow.parameters.implementations-e21502083257f62c5b8d5e6dddd74ed2fa77a7f3e5ebb318e474eb94b51d71bb}}' + - name: task-name + value: normalize-dataset + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: create-dataset.Succeeded + name: normalize-dataset-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.normalize-dataset-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.normalize-dataset-driver.outputs.parameters.cached-decision}}' + depends: normalize-dataset-driver.Succeeded + name: normalize-dataset + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e6e50647ba805f535e80fde81f76312e593390b70b360c1069a9b7766bfb5997}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-train-model"},"dependentTasks":["normalize-dataset"],"inputs":{"artifacts":{"normalized_iris_dataset":{"taskOutputArtifact":{"outputArtifactKey":"normalized_iris_dataset","producerTask":"normalize-dataset"}}},"parameters":{"n_neighbors":{"componentInputParameter":"neighbors"}}},"taskInfo":{"name":"train-model"}}' + - name: container + value: '{{workflow.parameters.implementations-e6e50647ba805f535e80fde81f76312e593390b70b360c1069a9b7766bfb5997}}' + - name: task-name + value: train-model + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: normalize-dataset.Succeeded + name: train-model-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.train-model-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.train-model-driver.outputs.parameters.cached-decision}}' + depends: train-model-driver.Succeeded + name: train-model + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - iris-training-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"neighbors":3,"standard_scaler":true}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/lightweight_python_functions_pipeline.yaml b/test_data/compiled-workflows/lightweight_python_functions_pipeline.yaml new file mode 100644 index 00000000000..f52a230171b --- /dev/null +++ b/test_data/compiled-workflows/lightweight_python_functions_pipeline.yaml @@ -0,0 +1,449 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: my-test-pipeline-beta- +spec: + arguments: + parameters: + - name: components-58eb44fee654093cb441c465d8ce6209048c161178ed5ecd93354b97de8df48f + value: '{"executorLabel":"exec-preprocess","inputDefinitions":{"parameters":{"input_dict_parameter":{"parameterType":"STRUCT"},"input_list_parameter":{"parameterType":"LIST"},"message":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"output_dataset_one":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"output_dataset_two_path":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"output_bool_parameter_path":{"parameterType":"BOOLEAN"},"output_dict_parameter_path":{"parameterType":"STRUCT"},"output_list_parameter_path":{"parameterType":"LIST"},"output_parameter_path":{"parameterType":"STRING"}}}}' + - name: implementations-58eb44fee654093cb441c465d8ce6209048c161178ed5ecd93354b97de8df48f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","preprocess"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + preprocess(\n # An input parameter of type string.\n message: str,\n # + An input parameter of type dict.\n input_dict_parameter: Dict[str, int],\n # + An input parameter of type list.\n input_list_parameter: List[str],\n # + Use Output[T] to get a metadata-rich handle to the output artifact\n # + of type `Dataset`.\n output_dataset_one: Output[Dataset],\n # A locally + accessible filepath for another output artifact of type\n # `Dataset`.\n output_dataset_two_path: + OutputPath(''Dataset''),\n # A locally accessible filepath for an output + parameter of type string.\n output_parameter_path: OutputPath(str),\n # + A locally accessible filepath for an output parameter of type bool.\n output_bool_parameter_path: + OutputPath(bool),\n # A locally accessible filepath for an output parameter + of type dict.\n output_dict_parameter_path: OutputPath(Dict[str, int]),\n # + A locally accessible filepath for an output parameter of type list.\n output_list_parameter_path: + OutputPath(List[str]),\n):\n \"\"\"Dummy preprocessing step.\"\"\"\n\n # + Use Dataset.path to access a local file path for writing.\n # One can also + use Dataset.uri to access the actual URI file path.\n with open(output_dataset_one.path, + ''w'') as f:\n f.write(message)\n\n # OutputPath is used to just + pass the local file path of the output artifact\n # to the function.\n with + open(output_dataset_two_path, ''w'') as f:\n f.write(message)\n\n with + open(output_parameter_path, ''w'') as f:\n f.write(message)\n\n with + open(output_bool_parameter_path, ''w'') as f:\n f.write(\n str(True)) # + use either `str()` or `json.dumps()` for bool values.\n\n import json\n with + open(output_dict_parameter_path, ''w'') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with + open(output_list_parameter_path, ''w'') as f:\n f.write(json.dumps(input_list_parameter))\n\n"],"image":"python:3.9"}' + - name: components-c146bad05c050c176402e7a5065dafbcb507ddacd77264720b5199dd9a8b1711 + value: '{"executorLabel":"exec-train","inputDefinitions":{"artifacts":{"dataset_one_path":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"dataset_two":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"input_bool":{"parameterType":"BOOLEAN"},"input_dict":{"parameterType":"STRUCT"},"input_list":{"parameterType":"LIST"},"message":{"parameterType":"STRING"},"num_steps":{"defaultValue":100,"isOptional":true,"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"model":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}}}' + - name: implementations-c146bad05c050c176402e7a5065dafbcb507ddacd77264720b5199dd9a8b1711 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","train"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + train(\n # Use InputPath to get a locally accessible path for the input + artifact\n # of type `Dataset`.\n dataset_one_path: InputPath(''Dataset''),\n # + Use Input[T] to get a metadata-rich handle to the input artifact\n # of + type `Dataset`.\n dataset_two: Input[Dataset],\n # An input parameter + of type string.\n message: str,\n # Use Output[T] to get a metadata-rich + handle to the output artifact\n # of type `Dataset`.\n model: Output[Model],\n # + An input parameter of type bool.\n input_bool: bool,\n # An input parameter + of type dict.\n input_dict: Dict[str, int],\n # An input parameter of + type List[str].\n input_list: List[str],\n # An input parameter of type + int with a default value.\n num_steps: int = 100,\n):\n \"\"\"Dummy + Training step.\"\"\"\n with open(dataset_one_path) as input_file:\n dataset_one_contents + = input_file.read()\n\n with open(dataset_two.path) as input_file:\n dataset_two_contents + = input_file.read()\n\n line = (f''dataset_one_contents: {dataset_one_contents} + || ''\n f''dataset_two_contents: {dataset_two_contents} || ''\n f''message: + {message} || ''\n f''input_bool: {input_bool}, type {type(input_bool)} + || ''\n f''input_dict: {input_dict}, type {type(input_dict)} || + ''\n f''input_list: {input_list}, type {type(input_list)} \\n'')\n\n with + open(model.path, ''w'') as output_file:\n for i in range(num_steps):\n output_file.write(f''Step + {i}\\n{line}\\n=====\\n'')\n\n # model is an instance of Model artifact, + which has a .metadata dictionary\n # to store arbitrary metadata for the + output artifact.\n model.metadata[''accuracy''] = 0.9\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"preprocess":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-preprocess"},"inputs":{"parameters":{"input_dict_parameter":{"componentInputParameter":"input_dict"},"input_list_parameter":{"runtimeValue":{"constant":["a","b","c"]}},"message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"preprocess"}},"train":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-train"},"dependentTasks":["preprocess"],"inputs":{"artifacts":{"dataset_one_path":{"taskOutputArtifact":{"outputArtifactKey":"output_dataset_one","producerTask":"preprocess"}},"dataset_two":{"taskOutputArtifact":{"outputArtifactKey":"output_dataset_two_path","producerTask":"preprocess"}}},"parameters":{"input_bool":{"taskOutputParameter":{"outputParameterKey":"output_bool_parameter_path","producerTask":"preprocess"}},"input_dict":{"taskOutputParameter":{"outputParameterKey":"output_dict_parameter_path","producerTask":"preprocess"}},"input_list":{"taskOutputParameter":{"outputParameterKey":"output_list_parameter_path","producerTask":"preprocess"}},"message":{"taskOutputParameter":{"outputParameterKey":"output_parameter_path","producerTask":"preprocess"}}}},"taskInfo":{"name":"train"}}}},"inputDefinitions":{"parameters":{"input_dict":{"defaultValue":{"A":1,"B":2},"isOptional":true,"parameterType":"STRUCT"},"message":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - my-test-pipeline-beta + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-58eb44fee654093cb441c465d8ce6209048c161178ed5ecd93354b97de8df48f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-preprocess"},"inputs":{"parameters":{"input_dict_parameter":{"componentInputParameter":"input_dict"},"input_list_parameter":{"runtimeValue":{"constant":["a","b","c"]}},"message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"preprocess"}}' + - name: container + value: '{{workflow.parameters.implementations-58eb44fee654093cb441c465d8ce6209048c161178ed5ecd93354b97de8df48f}}' + - name: task-name + value: preprocess + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: preprocess-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.preprocess-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.preprocess-driver.outputs.parameters.cached-decision}}' + depends: preprocess-driver.Succeeded + name: preprocess + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c146bad05c050c176402e7a5065dafbcb507ddacd77264720b5199dd9a8b1711}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-train"},"dependentTasks":["preprocess"],"inputs":{"artifacts":{"dataset_one_path":{"taskOutputArtifact":{"outputArtifactKey":"output_dataset_one","producerTask":"preprocess"}},"dataset_two":{"taskOutputArtifact":{"outputArtifactKey":"output_dataset_two_path","producerTask":"preprocess"}}},"parameters":{"input_bool":{"taskOutputParameter":{"outputParameterKey":"output_bool_parameter_path","producerTask":"preprocess"}},"input_dict":{"taskOutputParameter":{"outputParameterKey":"output_dict_parameter_path","producerTask":"preprocess"}},"input_list":{"taskOutputParameter":{"outputParameterKey":"output_list_parameter_path","producerTask":"preprocess"}},"message":{"taskOutputParameter":{"outputParameterKey":"output_parameter_path","producerTask":"preprocess"}}}},"taskInfo":{"name":"train"}}' + - name: container + value: '{{workflow.parameters.implementations-c146bad05c050c176402e7a5065dafbcb507ddacd77264720b5199dd9a8b1711}}' + - name: task-name + value: train + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: preprocess.Succeeded + name: train-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.train-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.train-driver.outputs.parameters.cached-decision}}' + depends: train-driver.Succeeded + name: train + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - my-test-pipeline-beta + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"input_dict":{"A":1,"B":2}}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/lightweight_python_functions_with_outputs.yaml b/test_data/compiled-workflows/lightweight_python_functions_with_outputs.yaml new file mode 100644 index 00000000000..c13ed5ae04c --- /dev/null +++ b/test_data/compiled-workflows/lightweight_python_functions_with_outputs.yaml @@ -0,0 +1,490 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: functions-with-outputs- +spec: + arguments: + parameters: + - name: components-8870f0a2f68d8cd4568555a23e74ba662d25e8d5d7206d8c0b3d03e9d3afce64 + value: '{"executorLabel":"exec-add-numbers","inputDefinitions":{"parameters":{"first":{"parameterType":"NUMBER_INTEGER"},"second":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-8870f0a2f68d8cd4568555a23e74ba662d25e8d5d7206d8c0b3d03e9d3afce64 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add_numbers"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.9.0'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add_numbers(first: int, second: int) -\u003e int:\n return first + second\n\n"],"image":"python:3.9"}' + - name: components-f927c43072fb8baf32c3241fad2cbf970008046a26a48f20f75c4226e4a16cd8 + value: '{"executorLabel":"exec-concat-message","inputDefinitions":{"parameters":{"first":{"parameterType":"STRING"},"second":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-f927c43072fb8baf32c3241fad2cbf970008046a26a48f20f75c4226e4a16cd8 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","concat_message"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.9.0'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + concat_message(first: str, second: str) -\u003e str:\n return first + second\n\n"],"image":"python:3.9"}' + - name: components-057891112980ba235e08bd912537a46f32f24c44d2eadd1f97e1f5f4ea3094c2 + value: '{"executorLabel":"exec-output-artifact","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"},"number":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-057891112980ba235e08bd912537a46f32f24c44d2eadd1f97e1f5f4ea3094c2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","output_artifact"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.9.0'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + output_artifact(number: int, message: str) -\u003e Dataset:\n result = + [message for _ in range(number)]\n return ''\\n''.join(result)\n\n"],"image":"python:3.9"}' + - name: components-88a0f7f1dfae7eede46c958ca3094899fb994cfa854fd93e13a067becc26bac6 + value: '{"executorLabel":"exec-output-named-tuple","inputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"metrics":{"artifactType":{"schemaTitle":"system.Metrics","schemaVersion":"0.0.1"}},"model":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}},"parameters":{"scalar":{"parameterType":"STRING"}}}}' + - name: implementations-88a0f7f1dfae7eede46c958ca3094899fb994cfa854fd93e13a067becc26bac6 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","output_named_tuple"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.9.0'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + output_named_tuple(\n artifact: Input[Dataset]\n) -\u003e NamedTuple(''Outputs'', + [\n (''scalar'', str),\n (''metrics'', Metrics),\n (''model'', Model),\n]):\n scalar + = ''123''\n\n import json\n metrics = json.dumps({\n ''metrics'': + [{\n ''name'': ''accuracy'',\n ''numberValue'': 0.9,\n ''format'': + ''PERCENTAGE'',\n }]\n })\n\n with open(artifact.path) as f:\n artifact_contents + = f.read()\n model = ''Model contents: '' + artifact_contents\n\n from + collections import namedtuple\n output = namedtuple(''Outputs'', [''scalar'', + ''metrics'', ''model''])\n return output(scalar, metrics, model)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"add-numbers":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-numbers"},"inputs":{"parameters":{"first":{"componentInputParameter":"first_number"},"second":{"componentInputParameter":"second_number"}}},"taskInfo":{"name":"add-numbers"}},"concat-message":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-concat-message"},"inputs":{"parameters":{"first":{"componentInputParameter":"first_message"},"second":{"componentInputParameter":"second_message"}}},"taskInfo":{"name":"concat-message"}},"output-artifact":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-output-artifact"},"dependentTasks":["add-numbers","concat-message"],"inputs":{"parameters":{"message":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"concat-message"}},"number":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"add-numbers"}}}},"taskInfo":{"name":"output-artifact"}},"output-named-tuple":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-output-named-tuple"},"dependentTasks":["output-artifact"],"inputs":{"artifacts":{"artifact":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"output-artifact"}}}},"taskInfo":{"name":"output-named-tuple"}}}},"inputDefinitions":{"parameters":{"first_message":{"parameterType":"STRING"},"first_number":{"parameterType":"NUMBER_INTEGER"},"second_message":{"parameterType":"STRING"},"second_number":{"parameterType":"NUMBER_INTEGER"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - functions-with-outputs + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-8870f0a2f68d8cd4568555a23e74ba662d25e8d5d7206d8c0b3d03e9d3afce64}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-numbers"},"inputs":{"parameters":{"first":{"componentInputParameter":"first_number"},"second":{"componentInputParameter":"second_number"}}},"taskInfo":{"name":"add-numbers"}}' + - name: container + value: '{{workflow.parameters.implementations-8870f0a2f68d8cd4568555a23e74ba662d25e8d5d7206d8c0b3d03e9d3afce64}}' + - name: task-name + value: add-numbers + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: add-numbers-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-numbers-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-numbers-driver.outputs.parameters.cached-decision}}' + depends: add-numbers-driver.Succeeded + name: add-numbers + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f927c43072fb8baf32c3241fad2cbf970008046a26a48f20f75c4226e4a16cd8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-concat-message"},"inputs":{"parameters":{"first":{"componentInputParameter":"first_message"},"second":{"componentInputParameter":"second_message"}}},"taskInfo":{"name":"concat-message"}}' + - name: container + value: '{{workflow.parameters.implementations-f927c43072fb8baf32c3241fad2cbf970008046a26a48f20f75c4226e4a16cd8}}' + - name: task-name + value: concat-message + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: concat-message-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.concat-message-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.concat-message-driver.outputs.parameters.cached-decision}}' + depends: concat-message-driver.Succeeded + name: concat-message + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-057891112980ba235e08bd912537a46f32f24c44d2eadd1f97e1f5f4ea3094c2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-output-artifact"},"dependentTasks":["add-numbers","concat-message"],"inputs":{"parameters":{"message":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"concat-message"}},"number":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"add-numbers"}}}},"taskInfo":{"name":"output-artifact"}}' + - name: container + value: '{{workflow.parameters.implementations-057891112980ba235e08bd912537a46f32f24c44d2eadd1f97e1f5f4ea3094c2}}' + - name: task-name + value: output-artifact + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: add-numbers.Succeeded && concat-message.Succeeded + name: output-artifact-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.output-artifact-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.output-artifact-driver.outputs.parameters.cached-decision}}' + depends: output-artifact-driver.Succeeded + name: output-artifact + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-88a0f7f1dfae7eede46c958ca3094899fb994cfa854fd93e13a067becc26bac6}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-output-named-tuple"},"dependentTasks":["output-artifact"],"inputs":{"artifacts":{"artifact":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"output-artifact"}}}},"taskInfo":{"name":"output-named-tuple"}}' + - name: container + value: '{{workflow.parameters.implementations-88a0f7f1dfae7eede46c958ca3094899fb994cfa854fd93e13a067becc26bac6}}' + - name: task-name + value: output-named-tuple + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: output-artifact.Succeeded + name: output-named-tuple-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.output-named-tuple-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.output-named-tuple-driver.outputs.parameters.cached-decision}}' + depends: output-named-tuple-driver.Succeeded + name: output-named-tuple + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - functions-with-outputs + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/log_streaming_compiled.yaml b/test_data/compiled-workflows/log_streaming_compiled.yaml new file mode 100644 index 00000000000..2e662cf61ec --- /dev/null +++ b/test_data/compiled-workflows/log_streaming_compiled.yaml @@ -0,0 +1,373 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: log-streaming-pipeline- +spec: + arguments: + parameters: + - name: components-888809574b5309659ec40b386743894e8953566f7d2130458f4fb24e267dfb9c + value: '{"executorLabel":"exec-print-message","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-888809574b5309659ec40b386743894e8953566f7d2130458f4fb24e267dfb9c + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_message"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_message(message: str):\n import datetime # noqa: PLC0415\n import + time # noqa: PLC0415\n\n t_end = time.time() + 60\n while time.time() + \u003c t_end:\n print(message + \" (\" + str(datetime.datetime.now()) + + \")\")\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"print-message":{"cachingOptions":{},"componentRef":{"name":"comp-print-message"},"inputs":{"parameters":{"message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"print-message"}}}},"inputDefinitions":{"parameters":{"message":{"defaultValue":"Hello + world","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - log-streaming-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-888809574b5309659ec40b386743894e8953566f7d2130458f4fb24e267dfb9c}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-print-message"},"inputs":{"parameters":{"message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"print-message"}}' + - name: container + value: '{{workflow.parameters.implementations-888809574b5309659ec40b386743894e8953566f7d2130458f4fb24e267dfb9c}}' + - name: task-name + value: print-message + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-message-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-message-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-message-driver.outputs.parameters.cached-decision}}' + depends: print-message-driver.Succeeded + name: print-message + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - log-streaming-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"message":"Hello world"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/long-running.yaml b/test_data/compiled-workflows/long-running.yaml new file mode 100644 index 00000000000..0ea0b219bf6 --- /dev/null +++ b/test_data/compiled-workflows/long-running.yaml @@ -0,0 +1,385 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: wait-awhile- +spec: + arguments: + parameters: + - name: components-50416a9fa23210403fe0e780d1dbd6667e4f73dfed854d15c7319fdd6ce87af8 + value: '{"executorLabel":"exec-wait-op"}' + - name: implementations-50416a9fa23210403fe0e780d1dbd6667e4f73dfed854d15c7319fdd6ce87af8 + value: '{"args":["echo step-1 sleeping for 5m; sleep 300; echo done1"],"command":["sh","-c"],"image":"alpine:latest"}' + - name: components-root + value: '{"dag":{"tasks":{"wait-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-wait-op"},"taskInfo":{"name":"wait-op"}},"wait-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-wait-op-2"},"dependentTasks":["wait-op"],"taskInfo":{"name":"wait-op-2"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - wait-awhile + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-50416a9fa23210403fe0e780d1dbd6667e4f73dfed854d15c7319fdd6ce87af8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-wait-op"},"taskInfo":{"name":"wait-op"}}' + - name: container + value: '{{workflow.parameters.implementations-50416a9fa23210403fe0e780d1dbd6667e4f73dfed854d15c7319fdd6ce87af8}}' + - name: task-name + value: wait-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: wait-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.wait-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.wait-op-driver.outputs.parameters.cached-decision}}' + depends: wait-op-driver.Succeeded + name: wait-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-50416a9fa23210403fe0e780d1dbd6667e4f73dfed854d15c7319fdd6ce87af8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-wait-op-2"},"dependentTasks":["wait-op"],"taskInfo":{"name":"wait-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-50416a9fa23210403fe0e780d1dbd6667e4f73dfed854d15c7319fdd6ce87af8}}' + - name: task-name + value: wait-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: wait-op.Succeeded + name: wait-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.wait-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.wait-op-2-driver.outputs.parameters.cached-decision}}' + depends: wait-op-2-driver.Succeeded + name: wait-op-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - wait-awhile + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/loop_consume_upstream.yaml b/test_data/compiled-workflows/loop_consume_upstream.yaml new file mode 100644 index 00000000000..6062526f4e1 --- /dev/null +++ b/test_data/compiled-workflows/loop_consume_upstream.yaml @@ -0,0 +1,569 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: loop-consume-upstream- +spec: + arguments: + parameters: + - name: components-ed5ffb9d2b8fff6bccdc76c9370a0e8bcdb7e239acce180cc6ce4a8eca4d1972 + value: '{"executorLabel":"exec-create-file","inputDefinitions":{"parameters":{"content":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"file":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-ed5ffb9d2b8fff6bccdc76c9370a0e8bcdb7e239acce180cc6ce4a8eca4d1972 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","create_file"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + create_file(file: Output[Artifact], content: str):\n with open(file.path, + ''w'') as f:\n f.write(content)\n\n"],"image":"python:3.9"}' + - name: components-711e8b6e3ae4585024167b39bc927d87c5c6dd39e2ab20b0613ea96fd049dc4a + value: '{"executorLabel":"exec-read-file","inputDefinitions":{"artifacts":{"file":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-711e8b6e3ae4585024167b39bc927d87c5c6dd39e2ab20b0613ea96fd049dc4a + value: '{"args":["--executor_input","{{$}}","--function_to_execute","read_file"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + read_file(file: Input[Artifact]) -\u003e str:\n with open(file.path, ''r'') + as f:\n print(f.read())\n return file.path\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-1 + value: '{"dag":{"tasks":{"create-file":{"cachingOptions":{},"componentRef":{"name":"comp-create-file"},"inputs":{"parameters":{"content":{"componentInputParameter":"pipelinechannel--split-input-Output-loop-item"}}},"taskInfo":{"name":"same + display name"}},"read-file":{"cachingOptions":{},"componentRef":{"name":"comp-read-file"},"dependentTasks":["create-file"],"inputs":{"artifacts":{"file":{"taskOutputArtifact":{"outputArtifactKey":"file","producerTask":"create-file"}}}},"taskInfo":{"name":"same + display name"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--split-input-Output":{"parameterType":"LIST"},"pipelinechannel--split-input-Output-loop-item":{"parameterType":"STRING"}}}}' + - name: components-ca2a87b6895df4cb47f1b6631daadcc4a5634c47ac3aadb0d316b2910f3b7794 + value: '{"executorLabel":"exec-print-input","inputDefinitions":{"parameters":{"input":{"parameterType":"LIST"}}}}' + - name: implementations-ca2a87b6895df4cb47f1b6631daadcc4a5634c47ac3aadb0d316b2910f3b7794 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_input"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_input(input: list):\n for item in input:\n print(f''Input + item: {item}'')\n\n"],"image":"python:3.9"}' + - name: components-ed7019fac34ed83d85bd9c5b5fd36b6f73f1211ad397845e5f71e73680a4d5a6 + value: '{"executorLabel":"exec-split-input","inputDefinitions":{"parameters":{"input":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-ed7019fac34ed83d85bd9c5b5fd36b6f73f1211ad397845e5f71e73680a4d5a6 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","split_input"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + split_input(input: str) -\u003e list:\n return input.split('','')\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"for-loop-1":{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-input"],"inputs":{"parameters":{"pipelinechannel--split-input-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-input"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-input-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-input-Output"}},"taskInfo":{"name":"for-loop-1"}},"print-input":{"cachingOptions":{},"componentRef":{"name":"comp-print-input"},"dependentTasks":["split-input"],"inputs":{"parameters":{"input":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-input"}}}},"taskInfo":{"name":"same + display name"}},"split-input":{"cachingOptions":{},"componentRef":{"name":"comp-split-input"},"inputs":{"parameters":{"input":{"runtimeValue":{"constant":"component1,component2,component3"}}}},"taskInfo":{"name":"same + display name"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - loop-consume-upstream + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ed5ffb9d2b8fff6bccdc76c9370a0e8bcdb7e239acce180cc6ce4a8eca4d1972}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-create-file"},"inputs":{"parameters":{"content":{"componentInputParameter":"pipelinechannel--split-input-Output-loop-item"}}},"taskInfo":{"name":"same + display name"}}' + - name: container + value: '{{workflow.parameters.implementations-ed5ffb9d2b8fff6bccdc76c9370a0e8bcdb7e239acce180cc6ce4a8eca4d1972}}' + - name: task-name + value: create-file + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: create-file-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.create-file-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.create-file-driver.outputs.parameters.cached-decision}}' + depends: create-file-driver.Succeeded + name: create-file + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-711e8b6e3ae4585024167b39bc927d87c5c6dd39e2ab20b0613ea96fd049dc4a}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-read-file"},"dependentTasks":["create-file"],"inputs":{"artifacts":{"file":{"taskOutputArtifact":{"outputArtifactKey":"file","producerTask":"create-file"}}}},"taskInfo":{"name":"same + display name"}}' + - name: container + value: '{{workflow.parameters.implementations-711e8b6e3ae4585024167b39bc927d87c5c6dd39e2ab20b0613ea96fd049dc4a}}' + - name: task-name + value: read-file + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: create-file.Succeeded + name: read-file-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.read-file-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.read-file-driver.outputs.parameters.cached-decision}}' + depends: read-file-driver.Succeeded + name: read-file + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - loop-consume-upstream + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-input"],"inputs":{"parameters":{"pipelinechannel--split-input-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-input"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-input-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-input-Output"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-1 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-1-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-input"],"inputs":{"parameters":{"pipelinechannel--split-input-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-input"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-input-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-input-Output"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-1-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1-for-loop-1-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: split-input.Succeeded + name: for-loop-1 + template: comp-for-loop-1-for-loop-1-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ca2a87b6895df4cb47f1b6631daadcc4a5634c47ac3aadb0d316b2910f3b7794}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-print-input"},"dependentTasks":["split-input"],"inputs":{"parameters":{"input":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-input"}}}},"taskInfo":{"name":"same + display name"}}' + - name: container + value: '{{workflow.parameters.implementations-ca2a87b6895df4cb47f1b6631daadcc4a5634c47ac3aadb0d316b2910f3b7794}}' + - name: task-name + value: print-input + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: split-input.Succeeded + name: print-input-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-input-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-input-driver.outputs.parameters.cached-decision}}' + depends: print-input-driver.Succeeded + name: print-input + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ed7019fac34ed83d85bd9c5b5fd36b6f73f1211ad397845e5f71e73680a4d5a6}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-split-input"},"inputs":{"parameters":{"input":{"runtimeValue":{"constant":"component1,component2,component3"}}}},"taskInfo":{"name":"same + display name"}}' + - name: container + value: '{{workflow.parameters.implementations-ed7019fac34ed83d85bd9c5b5fd36b6f73f1211ad397845e5f71e73680a4d5a6}}' + - name: task-name + value: split-input + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: split-input-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.split-input-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.split-input-driver.outputs.parameters.cached-decision}}' + depends: split-input-driver.Succeeded + name: split-input + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/metrics_visualization_v2.yaml b/test_data/compiled-workflows/metrics_visualization_v2.yaml new file mode 100644 index 00000000000..cdeab0d7c5e --- /dev/null +++ b/test_data/compiled-workflows/metrics_visualization_v2.yaml @@ -0,0 +1,557 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: metrics-visualization-pipeline- +spec: + arguments: + parameters: + - name: components-1e13061c086c82fca98c3bec1d9f3c373acd890377eb9145cfdb7f69de426019 + value: '{"executorLabel":"exec-digit-classification","outputDefinitions":{"artifacts":{"metrics":{"artifactType":{"schemaTitle":"system.Metrics","schemaVersion":"0.0.1"}}}}}' + - name: implementations-1e13061c086c82fca98c3bec1d9f3c373acd890377eb9145cfdb7f69de426019 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","digit_classification"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''scikit-learn'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + digit_classification(metrics: Output[Metrics]):\n from sklearn import model_selection\n from + sklearn.linear_model import LogisticRegression\n from sklearn import datasets\n from + sklearn.metrics import accuracy_score\n\n # Load digits dataset\n iris + = datasets.load_iris()\n\n # # Create feature matrix\n X = iris.data\n\n # + Create target vector\n y = iris.target\n\n #test size\n test_size + = 0.33\n\n seed = 7\n #cross-validation settings\n kfold = model_selection.KFold(n_splits=10, + random_state=seed, shuffle=True)\n\n #Model instance\n model = LogisticRegression()\n scoring + = ''accuracy''\n results = model_selection.cross_val_score(\n model, + X, y, cv=kfold, scoring=scoring)\n\n #split data\n X_train, X_test, + y_train, y_test = model_selection.train_test_split(\n X, y, test_size=test_size, + random_state=seed)\n #fit model\n model.fit(X_train, y_train)\n\n #accuracy + on test set\n result = model.score(X_test, y_test)\n metrics.log_metric(''accuracy'', + (result * 100.0))\n\n"],"image":"python:3.9"}' + - name: components-507141586ba39388ea617640b16454b71ab71dfefbfda1d7add5fd6b13fc6f36 + value: '{"executorLabel":"exec-html-visualization","outputDefinitions":{"artifacts":{"html_artifact":{"artifactType":{"schemaTitle":"system.HTML","schemaVersion":"0.0.1"}}}}}' + - name: implementations-507141586ba39388ea617640b16454b71ab71dfefbfda1d7add5fd6b13fc6f36 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","html_visualization"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + html_visualization(html_artifact: Output[HTML]):\n html_content = ''\u003c!DOCTYPE + html\u003e\u003chtml\u003e\u003cbody\u003e\u003ch1\u003eHello world\u003c/h1\u003e\u003c/body\u003e\u003c/html\u003e''\n with + open(html_artifact.path, ''w'') as f:\n f.write(html_content)\n\n"],"image":"python:3.9"}' + - name: components-070c6090f000ab4579ea9970fdbe93fd311d1b7a3d4e4c9735898c872ed91a8f + value: '{"executorLabel":"exec-iris-sgdclassifier","inputDefinitions":{"parameters":{"test_samples_fraction":{"parameterType":"NUMBER_DOUBLE"}}},"outputDefinitions":{"artifacts":{"metrics":{"artifactType":{"schemaTitle":"system.ClassificationMetrics","schemaVersion":"0.0.1"}}}}}' + - name: implementations-070c6090f000ab4579ea9970fdbe93fd311d1b7a3d4e4c9735898c872ed91a8f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","iris_sgdclassifier"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''scikit-learn'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + iris_sgdclassifier(test_samples_fraction: float,\n metrics: + Output[ClassificationMetrics]):\n from sklearn import datasets, model_selection\n from + sklearn.linear_model import SGDClassifier\n from sklearn.metrics import + confusion_matrix\n\n iris_dataset = datasets.load_iris()\n train_x, + test_x, train_y, test_y = model_selection.train_test_split(\n iris_dataset[''data''],\n iris_dataset[''target''],\n test_size=test_samples_fraction)\n\n classifier + = SGDClassifier()\n classifier.fit(train_x, train_y)\n predictions = + model_selection.cross_val_predict(\n classifier, train_x, train_y, + cv=3)\n metrics.log_confusion_matrix(\n [''Setosa'', ''Versicolour'', + ''Virginica''],\n confusion_matrix(\n train_y,\n predictions).tolist() # + .tolist() to convert np array to list.\n )\n\n"],"image":"python:3.9"}' + - name: components-837e3b5eebdb2f6e1895391076d11f242320d5ee805a3712c681d87af27a0322 + value: '{"executorLabel":"exec-markdown-visualization","outputDefinitions":{"artifacts":{"markdown_artifact":{"artifactType":{"schemaTitle":"system.Markdown","schemaVersion":"0.0.1"}}}}}' + - name: implementations-837e3b5eebdb2f6e1895391076d11f242320d5ee805a3712c681d87af27a0322 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","markdown_visualization"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + markdown_visualization(markdown_artifact: Output[Markdown]):\n markdown_content + = ''## Hello world \\n\\n Markdown content''\n with open(markdown_artifact.path, + ''w'') as f:\n f.write(markdown_content)\n\n"],"image":"python:3.9"}' + - name: components-cedc2096e3940b72ca9724db5ab3d0ab9680a848e3c724b60de8adea53f2b7c8 + value: '{"executorLabel":"exec-wine-classification","outputDefinitions":{"artifacts":{"metrics":{"artifactType":{"schemaTitle":"system.ClassificationMetrics","schemaVersion":"0.0.1"}}}}}' + - name: implementations-cedc2096e3940b72ca9724db5ab3d0ab9680a848e3c724b60de8adea53f2b7c8 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","wine_classification"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''scikit-learn'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + wine_classification(metrics: Output[ClassificationMetrics]):\n from sklearn.ensemble + import RandomForestClassifier\n from sklearn.metrics import roc_curve\n from + sklearn.datasets import load_wine\n from sklearn.model_selection import + train_test_split, cross_val_predict\n\n X, y = load_wine(return_X_y=True)\n # + Binary classification problem for label 1.\n y = y == 1\n\n X_train, + X_test, y_train, y_test = train_test_split(X, y, random_state=42)\n rfc + = RandomForestClassifier(n_estimators=10, random_state=42)\n rfc.fit(X_train, + y_train)\n y_scores = cross_val_predict(\n rfc, X_train, y_train, + cv=3, method=''predict_proba'')\n y_predict = cross_val_predict(rfc, X_train, + y_train, cv=3, method=''predict'')\n fpr, tpr, thresholds = roc_curve(\n y_true=y_train, + y_score=y_scores[:, 1], pos_label=True)\n\n # avoid inf thresholds\n epsilon + = 1e-6\n thresholds = [1 - epsilon if t == float(''inf'') else t for t + in thresholds]\n\n metrics.log_roc_curve(fpr, tpr, thresholds)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"digit-classification":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-digit-classification"},"taskInfo":{"name":"digit-classification"}},"html-visualization":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-html-visualization"},"taskInfo":{"name":"html-visualization"}},"iris-sgdclassifier":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-iris-sgdclassifier"},"inputs":{"parameters":{"test_samples_fraction":{"runtimeValue":{"constant":0.3}}}},"taskInfo":{"name":"iris-sgdclassifier"}},"markdown-visualization":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-markdown-visualization"},"taskInfo":{"name":"markdown-visualization"}},"wine-classification":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-wine-classification"},"taskInfo":{"name":"wine-classification"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - metrics-visualization-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-1e13061c086c82fca98c3bec1d9f3c373acd890377eb9145cfdb7f69de426019}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-digit-classification"},"taskInfo":{"name":"digit-classification"}}' + - name: container + value: '{{workflow.parameters.implementations-1e13061c086c82fca98c3bec1d9f3c373acd890377eb9145cfdb7f69de426019}}' + - name: task-name + value: digit-classification + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: digit-classification-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.digit-classification-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.digit-classification-driver.outputs.parameters.cached-decision}}' + depends: digit-classification-driver.Succeeded + name: digit-classification + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-507141586ba39388ea617640b16454b71ab71dfefbfda1d7add5fd6b13fc6f36}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-html-visualization"},"taskInfo":{"name":"html-visualization"}}' + - name: container + value: '{{workflow.parameters.implementations-507141586ba39388ea617640b16454b71ab71dfefbfda1d7add5fd6b13fc6f36}}' + - name: task-name + value: html-visualization + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: html-visualization-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.html-visualization-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.html-visualization-driver.outputs.parameters.cached-decision}}' + depends: html-visualization-driver.Succeeded + name: html-visualization + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-070c6090f000ab4579ea9970fdbe93fd311d1b7a3d4e4c9735898c872ed91a8f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-iris-sgdclassifier"},"inputs":{"parameters":{"test_samples_fraction":{"runtimeValue":{"constant":0.3}}}},"taskInfo":{"name":"iris-sgdclassifier"}}' + - name: container + value: '{{workflow.parameters.implementations-070c6090f000ab4579ea9970fdbe93fd311d1b7a3d4e4c9735898c872ed91a8f}}' + - name: task-name + value: iris-sgdclassifier + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: iris-sgdclassifier-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.iris-sgdclassifier-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.iris-sgdclassifier-driver.outputs.parameters.cached-decision}}' + depends: iris-sgdclassifier-driver.Succeeded + name: iris-sgdclassifier + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-837e3b5eebdb2f6e1895391076d11f242320d5ee805a3712c681d87af27a0322}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-markdown-visualization"},"taskInfo":{"name":"markdown-visualization"}}' + - name: container + value: '{{workflow.parameters.implementations-837e3b5eebdb2f6e1895391076d11f242320d5ee805a3712c681d87af27a0322}}' + - name: task-name + value: markdown-visualization + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: markdown-visualization-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.markdown-visualization-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.markdown-visualization-driver.outputs.parameters.cached-decision}}' + depends: markdown-visualization-driver.Succeeded + name: markdown-visualization + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-cedc2096e3940b72ca9724db5ab3d0ab9680a848e3c724b60de8adea53f2b7c8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-wine-classification"},"taskInfo":{"name":"wine-classification"}}' + - name: container + value: '{{workflow.parameters.implementations-cedc2096e3940b72ca9724db5ab3d0ab9680a848e3c724b60de8adea53f2b7c8}}' + - name: task-name + value: wine-classification + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: wine-classification-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.wine-classification-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.wine-classification-driver.outputs.parameters.cached-decision}}' + depends: wine-classification-driver.Succeeded + name: wine-classification + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - metrics-visualization-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/mixed_parameters.yaml b/test_data/compiled-workflows/mixed_parameters.yaml new file mode 100644 index 00000000000..7ac1bf98cbf --- /dev/null +++ b/test_data/compiled-workflows/mixed_parameters.yaml @@ -0,0 +1,469 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: mixed-parameters-pipeline- +spec: + arguments: + parameters: + - name: components-b5a2e54f451e167f623c082d02fe4e7719610dd2654a83c57f404de3898fc086 + value: '{"executorLabel":"exec-crust-comp","inputDefinitions":{"parameters":{"x":{"parameterType":"NUMBER_INTEGER"},"y":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-b5a2e54f451e167f623c082d02fe4e7719610dd2654a83c57f404de3898fc086 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","crust_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + crust_comp(x: int, y: int):\n print(''sum :'', x + y)\n\n"],"image":"python:3.9"}' + - name: components-551cca49c1bbb847a0aaa9b53908a3b2c671ec76f99094d68fcb82705869d508 + value: '{"executorLabel":"exec-core-comp","outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-551cca49c1bbb847a0aaa9b53908a3b2c671ec76f99094d68fcb82705869d508 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","core_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + core_comp() -\u003e int:\n return 1\n\n"],"image":"python:3.9"}' + - name: components-comp-core + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"core-comp"}}}},"tasks":{"core-comp":{"cachingOptions":{},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-mantle + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"core"}}}},"tasks":{"core":{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"crust-comp":{"cachingOptions":{},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"parameters":{"x":{"runtimeValue":{"constant":2}},"y":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}},"mantle":{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - mixed-parameters-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-551cca49c1bbb847a0aaa9b53908a3b2c671ec76f99094d68fcb82705869d508}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-551cca49c1bbb847a0aaa9b53908a3b2c671ec76f99094d68fcb82705869d508}}' + - name: task-name + value: core-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: core-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.core-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.core-comp-driver.outputs.parameters.cached-decision}}' + depends: core-comp-driver.Succeeded + name: core-comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-core + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - mixed-parameters-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-core}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' + - name: task-name + value: core + name: core-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.core-driver.outputs.parameters.condition}}' + depends: core-driver.Succeeded + name: core + template: comp-core + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-mantle + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-b5a2e54f451e167f623c082d02fe4e7719610dd2654a83c57f404de3898fc086}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"parameters":{"x":{"runtimeValue":{"constant":2}},"y":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-b5a2e54f451e167f623c082d02fe4e7719610dd2654a83c57f404de3898fc086}}' + - name: task-name + value: crust-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: mantle.Succeeded + name: crust-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.crust-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.crust-comp-driver.outputs.parameters.cached-decision}}' + depends: crust-comp-driver.Succeeded + name: crust-comp + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-mantle}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' + - name: task-name + value: mantle + name: mantle-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.mantle-driver.outputs.parameters.condition}}' + depends: mantle-driver.Succeeded + name: mantle + template: comp-mantle + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/modelcar.yaml b/test_data/compiled-workflows/modelcar.yaml new file mode 100644 index 00000000000..75bba04b629 --- /dev/null +++ b/test_data/compiled-workflows/modelcar.yaml @@ -0,0 +1,492 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-modelcar-model- +spec: + arguments: + parameters: + - name: components-00fe5acf7a3f52f8f54524edb73f7d6272194b96173074dd3cdd140327308371 + value: '{"executorLabel":"exec-build-model-car","outputDefinitions":{"artifacts":{"model":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}}}' + - name: implementations-00fe5acf7a3f52f8f54524edb73f7d6272194b96173074dd3cdd140327308371 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","build_model_car"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + build_model_car(model: dsl.Output[dsl.Model]):\n # Simulate pushing the + Modelcar to an OCI registry\n model.uri = \"oci://registry.domain.local/org/repo:v1.0\"\n\n"],"image":"python:3.9"}' + - name: components-426fe71cde5719d1162c36199e1686f1b2df4551d1fa8fa13a093333ea09a456 + value: '{"executorLabel":"exec-get-model-files-list","inputDefinitions":{"artifacts":{"input_model":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-426fe71cde5719d1162c36199e1686f1b2df4551d1fa8fa13a093333ea09a456 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","get_model_files_list"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + get_model_files_list(input_model: dsl.Input[dsl.Model]) -\u003e str:\n import + os\n import os.path\n\n if not os.path.exists(input_model.path):\n raise + RuntimeError(f\"The model does not exist at: {input_model.path}\")\n\n expected_files + = {\n \"added_tokens.json\",\n \"config.json\",\n \"generation_config.json\",\n \"merges.txt\",\n \"model.safetensors\",\n \"normalizer.json\",\n \"preprocessor_config.json\",\n \"special_tokens_map.json\",\n \"tokenizer.json\",\n \"tokenizer_config.json\",\n \"vocab.json\",\n }\n\n filesInPath + = set(os.listdir(input_model.path))\n\n if not filesInPath.issuperset(expected_files):\n raise + RuntimeError(\n \"The model does not have expected files: \"\n + + \", \".join(sorted(expected_files.difference(filesInPath)))\n )\n\n return + \", \".join(sorted(filesInPath))\n\n"],"image":"python:3.9"}' + - name: components-comp-importer + value: '{"executorLabel":"exec-importer","inputDefinitions":{"parameters":{"uri":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}}}' + - name: implementations-comp-importer + value: '{"artifactUri":{"runtimeParameter":"uri"},"typeSchema":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}' + - name: components-root + value: '{"dag":{"tasks":{"build-model-car":{"cachingOptions":{},"componentRef":{"name":"comp-build-model-car"},"taskInfo":{"name":"build-model-car"}},"get-model-files-list":{"cachingOptions":{},"componentRef":{"name":"comp-get-model-files-list"},"dependentTasks":["importer"],"inputs":{"artifacts":{"input_model":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"get-model-files-list"}},"importer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"componentInputParameter":"model_uri"}}},"taskInfo":{"name":"importer"}}}},"inputDefinitions":{"parameters":{"model_uri":{"defaultValue":"oci://registry.domain.local/modelcar:test","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-modelcar-model + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - container: + args: + - --executor_type + - importer + - --task_spec + - '{{inputs.parameters.task}}' + - --component_spec + - '{{inputs.parameters.component}}' + - --importer_spec + - '{{inputs.parameters.importer}}' + - --pipeline_name + - pipeline-with-modelcar-model + - --run_id + - '{{workflow.uid}}' + - --parent_dag_id + - '{{inputs.parameters.parent-dag-id}}' + - --pod_name + - $(KFP_POD_NAME) + - --pod_uid + - $(KFP_POD_UID) + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - launcher-v2 + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: ghcr.io/kubeflow/kfp-launcher:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: task + - name: component + - name: importer + - name: parent-dag-id + metadata: {} + name: system-importer + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-00fe5acf7a3f52f8f54524edb73f7d6272194b96173074dd3cdd140327308371}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-build-model-car"},"taskInfo":{"name":"build-model-car"}}' + - name: container + value: '{{workflow.parameters.implementations-00fe5acf7a3f52f8f54524edb73f7d6272194b96173074dd3cdd140327308371}}' + - name: task-name + value: build-model-car + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: build-model-car-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.build-model-car-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.build-model-car-driver.outputs.parameters.cached-decision}}' + depends: build-model-car-driver.Succeeded + name: build-model-car + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-426fe71cde5719d1162c36199e1686f1b2df4551d1fa8fa13a093333ea09a456}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-get-model-files-list"},"dependentTasks":["importer"],"inputs":{"artifacts":{"input_model":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"get-model-files-list"}}' + - name: container + value: '{{workflow.parameters.implementations-426fe71cde5719d1162c36199e1686f1b2df4551d1fa8fa13a093333ea09a456}}' + - name: task-name + value: get-model-files-list + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: importer.Succeeded + name: get-model-files-list-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.get-model-files-list-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.get-model-files-list-driver.outputs.parameters.cached-decision}}' + depends: get-model-files-list-driver.Succeeded + name: get-model-files-list + template: system-container-executor + - arguments: + parameters: + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"componentInputParameter":"model_uri"}}},"taskInfo":{"name":"importer"}}' + - name: component + value: '{{workflow.parameters.components-comp-importer}}' + - name: importer + value: '{{workflow.parameters.implementations-comp-importer}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: importer + template: system-importer + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-modelcar-model + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"model_uri":"oci://registry.domain.local/modelcar:test"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/multiple_artifacts_namedtuple.yaml b/test_data/compiled-workflows/multiple_artifacts_namedtuple.yaml new file mode 100644 index 00000000000..048da9913ae --- /dev/null +++ b/test_data/compiled-workflows/multiple_artifacts_namedtuple.yaml @@ -0,0 +1,473 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: multiple-artifacts-namedtuple-pipeline- +spec: + arguments: + parameters: + - name: components-415b11a96a5d71255274b3e811e186d556046f1ffe8a8dfd4e13193f62447e13 + value: '{"executorLabel":"exec-crust-comp","inputDefinitions":{"artifacts":{"ds1":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"ds2":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-415b11a96a5d71255274b3e811e186d556046f1ffe8a8dfd4e13193f62447e13 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","crust_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + crust_comp(\n ds1: dsl.Dataset,\n ds2: dsl.Dataset,\n):\n with open(ds1.path, + ''r'') as f:\n print(''ds1: '', f.read())\n with open(ds2.path, + ''r'') as f:\n print(''ds2: '', f.read())\n\n"],"image":"python:3.9"}' + - name: components-0ac03a39a38b1a3cad2a272deff551384098d69749e6673beb6cfdc3a1e59015 + value: '{"executorLabel":"exec-core-comp","outputDefinitions":{"artifacts":{"ds1":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"ds2":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-0ac03a39a38b1a3cad2a272deff551384098d69749e6673beb6cfdc3a1e59015 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","core_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + core_comp(ds1: dsl.Output[dsl.Dataset], ds2: dsl.Output[dsl.Dataset]):\n with + open(ds1.path, ''w'') as f:\n f.write(''foo'')\n with open(ds2.path, + ''w'') as f:\n f.write(''bar'')\n\n"],"image":"python:3.9"}' + - name: components-comp-core + value: '{"dag":{"outputs":{"artifacts":{"ds1":{"artifactSelectors":[{"outputArtifactKey":"ds1","producerSubtask":"core-comp"}]},"ds2":{"artifactSelectors":[{"outputArtifactKey":"ds2","producerSubtask":"core-comp"}]}}},"tasks":{"core-comp":{"cachingOptions":{},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}}},"outputDefinitions":{"artifacts":{"ds1":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"ds2":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: components-comp-mantle + value: '{"dag":{"outputs":{"artifacts":{"ds1":{"artifactSelectors":[{"outputArtifactKey":"ds1","producerSubtask":"core"}]},"ds2":{"artifactSelectors":[{"outputArtifactKey":"ds2","producerSubtask":"core"}]}}},"tasks":{"core":{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}}},"outputDefinitions":{"artifacts":{"ds1":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"ds2":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: components-root + value: '{"dag":{"tasks":{"crust-comp":{"cachingOptions":{},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"artifacts":{"ds1":{"taskOutputArtifact":{"outputArtifactKey":"ds1","producerTask":"mantle"}},"ds2":{"taskOutputArtifact":{"outputArtifactKey":"ds2","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}},"mantle":{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - multiple-artifacts-namedtuple-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0ac03a39a38b1a3cad2a272deff551384098d69749e6673beb6cfdc3a1e59015}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-0ac03a39a38b1a3cad2a272deff551384098d69749e6673beb6cfdc3a1e59015}}' + - name: task-name + value: core-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: core-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.core-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.core-comp-driver.outputs.parameters.cached-decision}}' + depends: core-comp-driver.Succeeded + name: core-comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-core + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - multiple-artifacts-namedtuple-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-core}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' + - name: task-name + value: core + name: core-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.core-driver.outputs.parameters.condition}}' + depends: core-driver.Succeeded + name: core + template: comp-core + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-mantle + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-415b11a96a5d71255274b3e811e186d556046f1ffe8a8dfd4e13193f62447e13}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"artifacts":{"ds1":{"taskOutputArtifact":{"outputArtifactKey":"ds1","producerTask":"mantle"}},"ds2":{"taskOutputArtifact":{"outputArtifactKey":"ds2","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-415b11a96a5d71255274b3e811e186d556046f1ffe8a8dfd4e13193f62447e13}}' + - name: task-name + value: crust-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: mantle.Succeeded + name: crust-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.crust-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.crust-comp-driver.outputs.parameters.cached-decision}}' + depends: crust-comp-driver.Succeeded + name: crust-comp + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-mantle}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' + - name: task-name + value: mantle + name: mantle-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.mantle-driver.outputs.parameters.condition}}' + depends: mantle-driver.Succeeded + name: mantle + template: comp-mantle + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/multiple_parameters_namedtuple.yaml b/test_data/compiled-workflows/multiple_parameters_namedtuple.yaml new file mode 100644 index 00000000000..da05ae82a76 --- /dev/null +++ b/test_data/compiled-workflows/multiple_parameters_namedtuple.yaml @@ -0,0 +1,472 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: multiple-parameters-namedtuple-pipeline- +spec: + arguments: + parameters: + - name: components-54461b7769754ecea4f755e24938448cd07c365048c2ee6e16271652b11205d8 + value: '{"executorLabel":"exec-crust-comp","inputDefinitions":{"parameters":{"val1":{"parameterType":"STRING"},"val2":{"parameterType":"STRING"}}}}' + - name: implementations-54461b7769754ecea4f755e24938448cd07c365048c2ee6e16271652b11205d8 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","crust_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + crust_comp(val1: str, val2: str):\n print(''val1: '', val1)\n print(''val2: + '', val2)\n\n"],"image":"python:3.9"}' + - name: components-6d13d1db365eee59a2cdc2d89bfaa4a0c32572a64b1743f50115e2614b7b60bd + value: '{"executorLabel":"exec-core-comp","outputDefinitions":{"parameters":{"val1":{"parameterType":"STRING"},"val2":{"parameterType":"STRING"}}}}' + - name: implementations-6d13d1db365eee59a2cdc2d89bfaa4a0c32572a64b1743f50115e2614b7b60bd + value: '{"args":["--executor_input","{{$}}","--function_to_execute","core_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + core_comp() -\u003e NamedTuple(''outputs'', val1=str, val2=str): # type: + ignore\n outputs = NamedTuple(''outputs'', val1=str, val2=str)\n return + outputs(''foo'', ''bar'')\n\n"],"image":"python:3.9"}' + - name: components-comp-core + value: '{"dag":{"outputs":{"parameters":{"val1":{"valueFromParameter":{"outputParameterKey":"val1","producerSubtask":"core-comp"}},"val2":{"valueFromParameter":{"outputParameterKey":"val2","producerSubtask":"core-comp"}}}},"tasks":{"core-comp":{"cachingOptions":{},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}}},"outputDefinitions":{"parameters":{"val1":{"parameterType":"STRING"},"val2":{"parameterType":"STRING"}}}}' + - name: components-comp-mantle + value: '{"dag":{"outputs":{"parameters":{"val1":{"valueFromParameter":{"outputParameterKey":"val1","producerSubtask":"core"}},"val2":{"valueFromParameter":{"outputParameterKey":"val2","producerSubtask":"core"}}}},"tasks":{"core":{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}}},"outputDefinitions":{"parameters":{"val1":{"parameterType":"STRING"},"val2":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"crust-comp":{"cachingOptions":{},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"parameters":{"val1":{"taskOutputParameter":{"outputParameterKey":"val1","producerTask":"mantle"}},"val2":{"taskOutputParameter":{"outputParameterKey":"val2","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}},"mantle":{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - multiple-parameters-namedtuple-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-6d13d1db365eee59a2cdc2d89bfaa4a0c32572a64b1743f50115e2614b7b60bd}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-6d13d1db365eee59a2cdc2d89bfaa4a0c32572a64b1743f50115e2614b7b60bd}}' + - name: task-name + value: core-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: core-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.core-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.core-comp-driver.outputs.parameters.cached-decision}}' + depends: core-comp-driver.Succeeded + name: core-comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-core + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - multiple-parameters-namedtuple-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-core}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' + - name: task-name + value: core + name: core-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.core-driver.outputs.parameters.condition}}' + depends: core-driver.Succeeded + name: core + template: comp-core + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-mantle + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-54461b7769754ecea4f755e24938448cd07c365048c2ee6e16271652b11205d8}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"parameters":{"val1":{"taskOutputParameter":{"outputParameterKey":"val1","producerTask":"mantle"}},"val2":{"taskOutputParameter":{"outputParameterKey":"val2","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-54461b7769754ecea4f755e24938448cd07c365048c2ee6e16271652b11205d8}}' + - name: task-name + value: crust-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: mantle.Succeeded + name: crust-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.crust-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.crust-comp-driver.outputs.parameters.cached-decision}}' + depends: crust-comp-driver.Succeeded + name: crust-comp + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-mantle}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' + - name: task-name + value: mantle + name: mantle-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.mantle-driver.outputs.parameters.condition}}' + depends: mantle-driver.Succeeded + name: mantle + template: comp-mantle + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/nested_pipeline_opt_input_child_level_compiled.yaml b/test_data/compiled-workflows/nested_pipeline_opt_input_child_level_compiled.yaml new file mode 100644 index 00000000000..c694cd97985 --- /dev/null +++ b/test_data/compiled-workflows/nested_pipeline_opt_input_child_level_compiled.yaml @@ -0,0 +1,599 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: nested-pipeline-opt-input-child-level- +spec: + arguments: + parameters: + - name: components-1592d3a8b70d65e90cbff58d5735d5d60ce6590d27348fa31fb79fd35cdadd33 + value: '{"executorLabel":"exec-component-a-bool","inputDefinitions":{"parameters":{"componentInputBool":{"isOptional":true,"parameterType":"BOOLEAN"}}}}' + - name: implementations-1592d3a8b70d65e90cbff58d5735d5d60ce6590d27348fa31fb79fd35cdadd33 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_a_bool"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_a_bool(componentInputBool: bool = None):\n if componentInputBool + != True:\n raise ValueError(f\"componentInputBool should be True but + is {componentInputBool}\")\n\n"],"image":"python:3.9"}' + - name: components-0750c052d89cc0d094b20f49a157c30478a4f9d958c44d579ec072576221664e + value: '{"executorLabel":"exec-component-a-int","inputDefinitions":{"parameters":{"componentInputInt":{"isOptional":true,"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-0750c052d89cc0d094b20f49a157c30478a4f9d958c44d579ec072576221664e + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_a_int"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_a_int(componentInputInt: int = None):\n if componentInputInt + != 1:\n raise ValueError(f\"componentInputInt should be 1 but is {componentInputInt}\")\n\n"],"image":"python:3.9"}' + - name: components-7a7918ee3f5d6cb0b49dc19dfc1ce5915b1657ab8e4f1b5ebdc62dc7ec335694 + value: '{"executorLabel":"exec-component-a-str","inputDefinitions":{"parameters":{"componentInputStr":{"isOptional":true,"parameterType":"STRING"}}}}' + - name: implementations-7a7918ee3f5d6cb0b49dc19dfc1ce5915b1657ab8e4f1b5ebdc62dc7ec335694 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_a_str"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_a_str(componentInputStr: str = None):\n if componentInputStr + != ''Input - pipeline'':\n raise ValueError(f\"componentInputStr should + be ''Input - pipeline'' but is {componentInputStr}\")\n\n"],"image":"python:3.9"}' + - name: components-95008ca85079a1497c324a0e7fb96cb610e8a736dcf259de169d00e6435c22fe + value: '{"executorLabel":"exec-component-b-bool","inputDefinitions":{"parameters":{"componentInputBool":{"isOptional":true,"parameterType":"BOOLEAN"}}}}' + - name: implementations-95008ca85079a1497c324a0e7fb96cb610e8a736dcf259de169d00e6435c22fe + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_b_bool"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_b_bool(componentInputBool: bool = None):\n if componentInputBool + != False:\n raise ValueError(f\"componentInputBool should be False + but is {componentInputBool}\")\n\n"],"image":"python:3.9"}' + - name: components-35127cd40fb981e7d7fbeaec732ca8402a0254b636d841b3482369aaa0f0529f + value: '{"executorLabel":"exec-component-b-int","inputDefinitions":{"parameters":{"componentInputInt":{"isOptional":true,"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-35127cd40fb981e7d7fbeaec732ca8402a0254b636d841b3482369aaa0f0529f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_b_int"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_b_int(componentInputInt: int = None):\n if componentInputInt + != 0:\n raise ValueError(f\"componentInputInt should be 0 but is {componentInputInt}\")\n\n"],"image":"python:3.9"}' + - name: components-f295cc45bba61d2a9dd59d4ee817c93c137713ba2b99356f4eb98907fbfde36e + value: '{"executorLabel":"exec-component-b-str","inputDefinitions":{"parameters":{"componentInputStr":{"isOptional":true,"parameterType":"STRING"}}}}' + - name: implementations-f295cc45bba61d2a9dd59d4ee817c93c137713ba2b99356f4eb98907fbfde36e + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_b_str"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_b_str(componentInputStr: str = None):\n if componentInputStr + != ''Input 2 - nested pipeline'':\n raise ValueError(f\"componentInputStr + should be ''Input 2 - nested pipeline'' but is {componentInputStr}\")\n\n"],"image":"python:3.9"}' + - name: components-comp-nested-pipeline + value: '{"dag":{"tasks":{"component-a-bool":{"cachingOptions":{},"componentRef":{"name":"comp-component-a-bool"},"inputs":{"parameters":{"componentInputBool":{"componentInputParameter":"nestedInputBool1"}}},"taskInfo":{"name":"component-a-bool"}},"component-a-int":{"cachingOptions":{},"componentRef":{"name":"comp-component-a-int"},"inputs":{"parameters":{"componentInputInt":{"componentInputParameter":"nestedInputInt1"}}},"taskInfo":{"name":"component-a-int"}},"component-a-str":{"cachingOptions":{},"componentRef":{"name":"comp-component-a-str"},"inputs":{"parameters":{"componentInputStr":{"componentInputParameter":"nestedInputStr1"}}},"taskInfo":{"name":"component-a-str"}},"component-b-bool":{"cachingOptions":{},"componentRef":{"name":"comp-component-b-bool"},"inputs":{"parameters":{"componentInputBool":{"componentInputParameter":"nestedInputBool2"}}},"taskInfo":{"name":"component-b-bool"}},"component-b-int":{"cachingOptions":{},"componentRef":{"name":"comp-component-b-int"},"inputs":{"parameters":{"componentInputInt":{"componentInputParameter":"nestedInputInt2"}}},"taskInfo":{"name":"component-b-int"}},"component-b-str":{"cachingOptions":{},"componentRef":{"name":"comp-component-b-str"},"inputs":{"parameters":{"componentInputStr":{"componentInputParameter":"nestedInputStr2"}}},"taskInfo":{"name":"component-b-str"}}}},"inputDefinitions":{"parameters":{"nestedInputBool1":{"defaultValue":false,"isOptional":true,"parameterType":"BOOLEAN"},"nestedInputBool2":{"defaultValue":false,"isOptional":true,"parameterType":"BOOLEAN"},"nestedInputInt1":{"defaultValue":0,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"nestedInputInt2":{"defaultValue":0,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"nestedInputStr1":{"defaultValue":"Input + 1 - nested pipeline","isOptional":true,"parameterType":"STRING"},"nestedInputStr2":{"defaultValue":"Input + 2 - nested pipeline","isOptional":true,"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"nested-pipeline":{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline"},"inputs":{"parameters":{"nestedInputBool1":{"runtimeValue":{"constant":true}},"nestedInputInt1":{"runtimeValue":{"constant":1}},"nestedInputStr1":{"runtimeValue":{"constant":"Input + - pipeline"}}}},"taskInfo":{"name":"nested-pipeline"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - nested-pipeline-opt-input-child-level + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-1592d3a8b70d65e90cbff58d5735d5d60ce6590d27348fa31fb79fd35cdadd33}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-a-bool"},"inputs":{"parameters":{"componentInputBool":{"componentInputParameter":"nestedInputBool1"}}},"taskInfo":{"name":"component-a-bool"}}' + - name: container + value: '{{workflow.parameters.implementations-1592d3a8b70d65e90cbff58d5735d5d60ce6590d27348fa31fb79fd35cdadd33}}' + - name: task-name + value: component-a-bool + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-a-bool-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-a-bool-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-a-bool-driver.outputs.parameters.cached-decision}}' + depends: component-a-bool-driver.Succeeded + name: component-a-bool + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0750c052d89cc0d094b20f49a157c30478a4f9d958c44d579ec072576221664e}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-a-int"},"inputs":{"parameters":{"componentInputInt":{"componentInputParameter":"nestedInputInt1"}}},"taskInfo":{"name":"component-a-int"}}' + - name: container + value: '{{workflow.parameters.implementations-0750c052d89cc0d094b20f49a157c30478a4f9d958c44d579ec072576221664e}}' + - name: task-name + value: component-a-int + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-a-int-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-a-int-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-a-int-driver.outputs.parameters.cached-decision}}' + depends: component-a-int-driver.Succeeded + name: component-a-int + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7a7918ee3f5d6cb0b49dc19dfc1ce5915b1657ab8e4f1b5ebdc62dc7ec335694}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-a-str"},"inputs":{"parameters":{"componentInputStr":{"componentInputParameter":"nestedInputStr1"}}},"taskInfo":{"name":"component-a-str"}}' + - name: container + value: '{{workflow.parameters.implementations-7a7918ee3f5d6cb0b49dc19dfc1ce5915b1657ab8e4f1b5ebdc62dc7ec335694}}' + - name: task-name + value: component-a-str + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-a-str-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-a-str-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-a-str-driver.outputs.parameters.cached-decision}}' + depends: component-a-str-driver.Succeeded + name: component-a-str + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-95008ca85079a1497c324a0e7fb96cb610e8a736dcf259de169d00e6435c22fe}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-b-bool"},"inputs":{"parameters":{"componentInputBool":{"componentInputParameter":"nestedInputBool2"}}},"taskInfo":{"name":"component-b-bool"}}' + - name: container + value: '{{workflow.parameters.implementations-95008ca85079a1497c324a0e7fb96cb610e8a736dcf259de169d00e6435c22fe}}' + - name: task-name + value: component-b-bool + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-b-bool-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-b-bool-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-b-bool-driver.outputs.parameters.cached-decision}}' + depends: component-b-bool-driver.Succeeded + name: component-b-bool + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-35127cd40fb981e7d7fbeaec732ca8402a0254b636d841b3482369aaa0f0529f}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-b-int"},"inputs":{"parameters":{"componentInputInt":{"componentInputParameter":"nestedInputInt2"}}},"taskInfo":{"name":"component-b-int"}}' + - name: container + value: '{{workflow.parameters.implementations-35127cd40fb981e7d7fbeaec732ca8402a0254b636d841b3482369aaa0f0529f}}' + - name: task-name + value: component-b-int + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-b-int-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-b-int-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-b-int-driver.outputs.parameters.cached-decision}}' + depends: component-b-int-driver.Succeeded + name: component-b-int + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f295cc45bba61d2a9dd59d4ee817c93c137713ba2b99356f4eb98907fbfde36e}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-b-str"},"inputs":{"parameters":{"componentInputStr":{"componentInputParameter":"nestedInputStr2"}}},"taskInfo":{"name":"component-b-str"}}' + - name: container + value: '{{workflow.parameters.implementations-f295cc45bba61d2a9dd59d4ee817c93c137713ba2b99356f4eb98907fbfde36e}}' + - name: task-name + value: component-b-str + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-b-str-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-b-str-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-b-str-driver.outputs.parameters.cached-decision}}' + depends: component-b-str-driver.Succeeded + name: component-b-str + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-nested-pipeline + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - nested-pipeline-opt-input-child-level + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-nested-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline"},"inputs":{"parameters":{"nestedInputBool1":{"runtimeValue":{"constant":true}},"nestedInputInt1":{"runtimeValue":{"constant":1}},"nestedInputStr1":{"runtimeValue":{"constant":"Input + - pipeline"}}}},"taskInfo":{"name":"nested-pipeline"}}' + - name: task-name + value: nested-pipeline + name: nested-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.nested-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.nested-pipeline-driver.outputs.parameters.condition}}' + depends: nested-pipeline-driver.Succeeded + name: nested-pipeline + template: comp-nested-pipeline + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/nested_pipeline_opt_inputs_nil_compiled.yaml b/test_data/compiled-workflows/nested_pipeline_opt_inputs_nil_compiled.yaml new file mode 100644 index 00000000000..0215127997c --- /dev/null +++ b/test_data/compiled-workflows/nested_pipeline_opt_inputs_nil_compiled.yaml @@ -0,0 +1,477 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: nested-pipeline-opt-inputs-nil- +spec: + arguments: + parameters: + - name: components-b5ace85a97ac2c9fe463181c3fa7619708a2480314d25cc045b43cddd03697ce + value: '{"executorLabel":"exec-component-bool","inputDefinitions":{"parameters":{"componentInput":{"isOptional":true,"parameterType":"BOOLEAN"}}}}' + - name: implementations-b5ace85a97ac2c9fe463181c3fa7619708a2480314d25cc045b43cddd03697ce + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_bool"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_bool(componentInput: bool = None):\n if componentInput is not + None:\n raise ValueError(f\"componentInput should be None but is {componentInput}\")\n\n"],"image":"python:3.9"}' + - name: components-59f4469e2d42fbdeb573096da5dac9ff779db3814151f043995cab017ecab934 + value: '{"executorLabel":"exec-component-int","inputDefinitions":{"parameters":{"componentInput":{"isOptional":true,"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-59f4469e2d42fbdeb573096da5dac9ff779db3814151f043995cab017ecab934 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_int"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_int(componentInput: int = None):\n if componentInput is not None:\n raise + ValueError(f\"componentInput should be None but is {componentInput}\")\n\n"],"image":"python:3.9"}' + - name: components-023f29bcfa009ada5559e8172a659767b0cddfb1beb199ca609fb7d00cba5ce2 + value: '{"executorLabel":"exec-component-str","inputDefinitions":{"parameters":{"componentInput":{"isOptional":true,"parameterType":"STRING"}}}}' + - name: implementations-023f29bcfa009ada5559e8172a659767b0cddfb1beb199ca609fb7d00cba5ce2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_str"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_str(componentInput: str = None):\n if componentInput is not None:\n raise + ValueError(f\"componentInput should be None but is {componentInput}\")\n\n"],"image":"python:3.9"}' + - name: components-comp-nested-pipeline + value: '{"dag":{"tasks":{"component-bool":{"cachingOptions":{},"componentRef":{"name":"comp-component-bool"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputBool"}}},"taskInfo":{"name":"component-bool"}},"component-int":{"cachingOptions":{},"componentRef":{"name":"comp-component-int"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputInt"}}},"taskInfo":{"name":"component-int"}},"component-str":{"cachingOptions":{},"componentRef":{"name":"comp-component-str"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputStr"}}},"taskInfo":{"name":"component-str"}}}},"inputDefinitions":{"parameters":{"nestedInputBool":{"isOptional":true,"parameterType":"BOOLEAN"},"nestedInputInt":{"isOptional":true,"parameterType":"NUMBER_INTEGER"},"nestedInputStr":{"isOptional":true,"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"nested-pipeline":{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline"},"taskInfo":{"name":"nested-pipeline"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - nested-pipeline-opt-inputs-nil + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-b5ace85a97ac2c9fe463181c3fa7619708a2480314d25cc045b43cddd03697ce}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-bool"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputBool"}}},"taskInfo":{"name":"component-bool"}}' + - name: container + value: '{{workflow.parameters.implementations-b5ace85a97ac2c9fe463181c3fa7619708a2480314d25cc045b43cddd03697ce}}' + - name: task-name + value: component-bool + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-bool-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-bool-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-bool-driver.outputs.parameters.cached-decision}}' + depends: component-bool-driver.Succeeded + name: component-bool + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-59f4469e2d42fbdeb573096da5dac9ff779db3814151f043995cab017ecab934}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-int"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputInt"}}},"taskInfo":{"name":"component-int"}}' + - name: container + value: '{{workflow.parameters.implementations-59f4469e2d42fbdeb573096da5dac9ff779db3814151f043995cab017ecab934}}' + - name: task-name + value: component-int + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-int-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-int-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-int-driver.outputs.parameters.cached-decision}}' + depends: component-int-driver.Succeeded + name: component-int + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-023f29bcfa009ada5559e8172a659767b0cddfb1beb199ca609fb7d00cba5ce2}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-str"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputStr"}}},"taskInfo":{"name":"component-str"}}' + - name: container + value: '{{workflow.parameters.implementations-023f29bcfa009ada5559e8172a659767b0cddfb1beb199ca609fb7d00cba5ce2}}' + - name: task-name + value: component-str + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-str-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-str-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-str-driver.outputs.parameters.cached-decision}}' + depends: component-str-driver.Succeeded + name: component-str + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-nested-pipeline + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - nested-pipeline-opt-inputs-nil + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-nested-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline"},"taskInfo":{"name":"nested-pipeline"}}' + - name: task-name + value: nested-pipeline + name: nested-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.nested-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.nested-pipeline-driver.outputs.parameters.condition}}' + depends: nested-pipeline-driver.Succeeded + name: nested-pipeline + template: comp-nested-pipeline + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/nested_pipeline_opt_inputs_parent_level_compiled.yaml b/test_data/compiled-workflows/nested_pipeline_opt_inputs_parent_level_compiled.yaml new file mode 100644 index 00000000000..4d8c534d10c --- /dev/null +++ b/test_data/compiled-workflows/nested_pipeline_opt_inputs_parent_level_compiled.yaml @@ -0,0 +1,631 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: nested-pipeline-opt-inputs-parent-level- +spec: + arguments: + parameters: + - name: components-4a30dca88fda7931eeb4feb50afec0eace855d5b8d3bbe5f2932c20743957731 + value: '{"executorLabel":"exec-component-nil-bool-default","inputDefinitions":{"parameters":{"componentInput":{"isOptional":true,"parameterType":"BOOLEAN"}}}}' + - name: implementations-4a30dca88fda7931eeb4feb50afec0eace855d5b8d3bbe5f2932c20743957731 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_nil_bool_default"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_nil_bool_default(componentInput: bool = None):\n if componentInput + != True:\n raise ValueError(f\"componentInput should be True but is + {componentInput}\")\n\n"],"image":"python:3.9"}' + - name: components-9df5116e23bd19dbe520aeb571b4e8e7d639536a628a8608988c6424e43e453e + value: '{"executorLabel":"exec-component-nil-int-default","inputDefinitions":{"parameters":{"componentInput":{"isOptional":true,"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-9df5116e23bd19dbe520aeb571b4e8e7d639536a628a8608988c6424e43e453e + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_nil_int_default"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_nil_int_default(componentInput: int = None):\n if componentInput + != 1:\n raise ValueError(f\"componentInput should be 1 but is {componentInput}\")\n\n"],"image":"python:3.9"}' + - name: components-4767fec1d25c0d4ccee124435b615e8f733fad285ae87247472a210f2f5d359d + value: '{"executorLabel":"exec-component-nil-str-default","inputDefinitions":{"parameters":{"componentInput":{"isOptional":true,"parameterType":"STRING"}}}}' + - name: implementations-4767fec1d25c0d4ccee124435b615e8f733fad285ae87247472a210f2f5d359d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_nil_str_default"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_nil_str_default(componentInput: str = None):\n if componentInput + != ''Input - parent pipeline'':\n raise ValueError(f\"componentInput + should be ''Input - parent pipeline'' but is {componentInput}\")\n\n"],"image":"python:3.9"}' + - name: components-comp-nested-pipeline-nil-defaults + value: '{"dag":{"tasks":{"component-nil-bool-default":{"cachingOptions":{},"componentRef":{"name":"comp-component-nil-bool-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputBool"}}},"taskInfo":{"name":"component-nil-bool-default"}},"component-nil-int-default":{"cachingOptions":{},"componentRef":{"name":"comp-component-nil-int-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputInt"}}},"taskInfo":{"name":"component-nil-int-default"}},"component-nil-str-default":{"cachingOptions":{},"componentRef":{"name":"comp-component-nil-str-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputStr"}}},"taskInfo":{"name":"component-nil-str-default"}}}},"inputDefinitions":{"parameters":{"nestedInputBool":{"isOptional":true,"parameterType":"BOOLEAN"},"nestedInputInt":{"isOptional":true,"parameterType":"NUMBER_INTEGER"},"nestedInputStr":{"defaultValue":"Input + - nested pipeline","isOptional":true,"parameterType":"STRING"}}}}' + - name: components-9fd2d7643bca745728790c89fea0cb45165152855e2d2c76db913d0a46987c90 + value: '{"executorLabel":"exec-component-bool-default","inputDefinitions":{"parameters":{"componentInput":{"defaultValue":false,"isOptional":true,"parameterType":"BOOLEAN"}}}}' + - name: implementations-9fd2d7643bca745728790c89fea0cb45165152855e2d2c76db913d0a46987c90 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_bool_default"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_bool_default(componentInput: bool = False):\n if componentInput + != True:\n raise ValueError(f\"componentInput should be True but is + {componentInput}\")\n\n"],"image":"python:3.9"}' + - name: components-f2229e13b10e8a9cba43225f9f37de307a58f77689b763ddca00e6c1a91ebeb9 + value: '{"executorLabel":"exec-component-int-default","inputDefinitions":{"parameters":{"componentInput":{"defaultValue":0,"isOptional":true,"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-f2229e13b10e8a9cba43225f9f37de307a58f77689b763ddca00e6c1a91ebeb9 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_int_default"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_int_default(componentInput: int = 0):\n if componentInput != + 1:\n raise ValueError(f\"componentInput should be 1 but is {componentInput}\")\n\n"],"image":"python:3.9"}' + - name: components-0a219828dd0af40be7391d96b166bbb7aea267342dde59869750e8b85c6306ff + value: '{"executorLabel":"exec-component-str-default","inputDefinitions":{"parameters":{"componentInput":{"defaultValue":"Input + - component","isOptional":true,"parameterType":"STRING"}}}}' + - name: implementations-0a219828dd0af40be7391d96b166bbb7aea267342dde59869750e8b85c6306ff + value: '{"args":["--executor_input","{{$}}","--function_to_execute","component_str_default"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + component_str_default(componentInput: str = ''Input - component''):\n if + componentInput != ''Input - parent pipeline'':\n raise ValueError(f\"componentInput + should be ''Input - parent pipeline'' but is {componentInput}\")\n\n"],"image":"python:3.9"}' + - name: components-comp-nested-pipeline-non-nil-defaults + value: '{"dag":{"tasks":{"component-bool-default":{"cachingOptions":{},"componentRef":{"name":"comp-component-bool-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputBool"}}},"taskInfo":{"name":"component-bool-default"}},"component-int-default":{"cachingOptions":{},"componentRef":{"name":"comp-component-int-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputInt"}}},"taskInfo":{"name":"component-int-default"}},"component-str-default":{"cachingOptions":{},"componentRef":{"name":"comp-component-str-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputStr"}}},"taskInfo":{"name":"component-str-default"}}}},"inputDefinitions":{"parameters":{"nestedInputBool":{"defaultValue":false,"isOptional":true,"parameterType":"BOOLEAN"},"nestedInputInt":{"defaultValue":0,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"nestedInputStr":{"defaultValue":"Input + - nested pipeline","isOptional":true,"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"nested-pipeline-nil-defaults":{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline-nil-defaults"},"inputs":{"parameters":{"nestedInputBool":{"componentInputParameter":"inputBool"},"nestedInputInt":{"componentInputParameter":"inputInt"},"nestedInputStr":{"componentInputParameter":"inputStr"}}},"taskInfo":{"name":"nested-pipeline-nil-defaults"}},"nested-pipeline-non-nil-defaults":{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline-non-nil-defaults"},"inputs":{"parameters":{"nestedInputBool":{"componentInputParameter":"inputBool"},"nestedInputInt":{"componentInputParameter":"inputInt"},"nestedInputStr":{"componentInputParameter":"inputStr"}}},"taskInfo":{"name":"nested-pipeline-non-nil-defaults"}}}},"inputDefinitions":{"parameters":{"inputBool":{"defaultValue":true,"isOptional":true,"parameterType":"BOOLEAN"},"inputInt":{"defaultValue":1,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"inputStr":{"defaultValue":"Input + - parent pipeline","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - nested-pipeline-opt-inputs-parent-level + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4a30dca88fda7931eeb4feb50afec0eace855d5b8d3bbe5f2932c20743957731}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-nil-bool-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputBool"}}},"taskInfo":{"name":"component-nil-bool-default"}}' + - name: container + value: '{{workflow.parameters.implementations-4a30dca88fda7931eeb4feb50afec0eace855d5b8d3bbe5f2932c20743957731}}' + - name: task-name + value: component-nil-bool-default + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-nil-bool-default-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-nil-bool-default-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-nil-bool-default-driver.outputs.parameters.cached-decision}}' + depends: component-nil-bool-default-driver.Succeeded + name: component-nil-bool-default + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-9df5116e23bd19dbe520aeb571b4e8e7d639536a628a8608988c6424e43e453e}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-nil-int-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputInt"}}},"taskInfo":{"name":"component-nil-int-default"}}' + - name: container + value: '{{workflow.parameters.implementations-9df5116e23bd19dbe520aeb571b4e8e7d639536a628a8608988c6424e43e453e}}' + - name: task-name + value: component-nil-int-default + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-nil-int-default-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-nil-int-default-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-nil-int-default-driver.outputs.parameters.cached-decision}}' + depends: component-nil-int-default-driver.Succeeded + name: component-nil-int-default + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4767fec1d25c0d4ccee124435b615e8f733fad285ae87247472a210f2f5d359d}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-nil-str-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputStr"}}},"taskInfo":{"name":"component-nil-str-default"}}' + - name: container + value: '{{workflow.parameters.implementations-4767fec1d25c0d4ccee124435b615e8f733fad285ae87247472a210f2f5d359d}}' + - name: task-name + value: component-nil-str-default + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-nil-str-default-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-nil-str-default-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-nil-str-default-driver.outputs.parameters.cached-decision}}' + depends: component-nil-str-default-driver.Succeeded + name: component-nil-str-default + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-nested-pipeline-nil-defaults + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-9fd2d7643bca745728790c89fea0cb45165152855e2d2c76db913d0a46987c90}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-bool-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputBool"}}},"taskInfo":{"name":"component-bool-default"}}' + - name: container + value: '{{workflow.parameters.implementations-9fd2d7643bca745728790c89fea0cb45165152855e2d2c76db913d0a46987c90}}' + - name: task-name + value: component-bool-default + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-bool-default-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-bool-default-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-bool-default-driver.outputs.parameters.cached-decision}}' + depends: component-bool-default-driver.Succeeded + name: component-bool-default + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f2229e13b10e8a9cba43225f9f37de307a58f77689b763ddca00e6c1a91ebeb9}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-int-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputInt"}}},"taskInfo":{"name":"component-int-default"}}' + - name: container + value: '{{workflow.parameters.implementations-f2229e13b10e8a9cba43225f9f37de307a58f77689b763ddca00e6c1a91ebeb9}}' + - name: task-name + value: component-int-default + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-int-default-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-int-default-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-int-default-driver.outputs.parameters.cached-decision}}' + depends: component-int-default-driver.Succeeded + name: component-int-default + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0a219828dd0af40be7391d96b166bbb7aea267342dde59869750e8b85c6306ff}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-component-str-default"},"inputs":{"parameters":{"componentInput":{"componentInputParameter":"nestedInputStr"}}},"taskInfo":{"name":"component-str-default"}}' + - name: container + value: '{{workflow.parameters.implementations-0a219828dd0af40be7391d96b166bbb7aea267342dde59869750e8b85c6306ff}}' + - name: task-name + value: component-str-default + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-str-default-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-str-default-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-str-default-driver.outputs.parameters.cached-decision}}' + depends: component-str-default-driver.Succeeded + name: component-str-default + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-nested-pipeline-non-nil-defaults + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - nested-pipeline-opt-inputs-parent-level + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-nested-pipeline-nil-defaults}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline-nil-defaults"},"inputs":{"parameters":{"nestedInputBool":{"componentInputParameter":"inputBool"},"nestedInputInt":{"componentInputParameter":"inputInt"},"nestedInputStr":{"componentInputParameter":"inputStr"}}},"taskInfo":{"name":"nested-pipeline-nil-defaults"}}' + - name: task-name + value: nested-pipeline-nil-defaults + name: nested-pipeline-nil-defaults-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.nested-pipeline-nil-defaults-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.nested-pipeline-nil-defaults-driver.outputs.parameters.condition}}' + depends: nested-pipeline-nil-defaults-driver.Succeeded + name: nested-pipeline-nil-defaults + template: comp-nested-pipeline-nil-defaults + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-nested-pipeline-non-nil-defaults}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline-non-nil-defaults"},"inputs":{"parameters":{"nestedInputBool":{"componentInputParameter":"inputBool"},"nestedInputInt":{"componentInputParameter":"inputInt"},"nestedInputStr":{"componentInputParameter":"inputStr"}}},"taskInfo":{"name":"nested-pipeline-non-nil-defaults"}}' + - name: task-name + value: nested-pipeline-non-nil-defaults + name: nested-pipeline-non-nil-defaults-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.nested-pipeline-non-nil-defaults-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.nested-pipeline-non-nil-defaults-driver.outputs.parameters.condition}}' + depends: nested-pipeline-non-nil-defaults-driver.Succeeded + name: nested-pipeline-non-nil-defaults + template: comp-nested-pipeline-non-nil-defaults + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"inputBool":true,"inputInt":1,"inputStr":"Input + - parent pipeline"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/nested_return.yaml b/test_data/compiled-workflows/nested_return.yaml new file mode 100644 index 00000000000..d0b81562bf0 --- /dev/null +++ b/test_data/compiled-workflows/nested_return.yaml @@ -0,0 +1,370 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: nested-return- +spec: + arguments: + parameters: + - name: components-9206f02c3bc8c7317ae618f681a47d92020ce2a4461722611ba616d3ca7af069 + value: '{"executorLabel":"exec-nested-return","outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-9206f02c3bc8c7317ae618f681a47d92020ce2a4461722611ba616d3ca7af069 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","nested_return"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + nested_return() -\u003e List[Dict[str, str]]:\n return [{''A_a'': ''1'', + ''B_b'': ''2''}, {''A_a'': ''10'', ''B_b'': ''20''}]\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"nested-return"}}}},"tasks":{"nested-return":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-return"},"taskInfo":{"name":"nested-return"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - nested-return + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-9206f02c3bc8c7317ae618f681a47d92020ce2a4461722611ba616d3ca7af069}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-return"},"taskInfo":{"name":"nested-return"}}' + - name: container + value: '{{workflow.parameters.implementations-9206f02c3bc8c7317ae618f681a47d92020ce2a4461722611ba616d3ca7af069}}' + - name: task-name + value: nested-return + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: nested-return-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.nested-return-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.nested-return-driver.outputs.parameters.cached-decision}}' + depends: nested-return-driver.Succeeded + name: nested-return + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - nested-return + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/nested_with_parameters.yaml b/test_data/compiled-workflows/nested_with_parameters.yaml new file mode 100644 index 00000000000..85b2e46a26a --- /dev/null +++ b/test_data/compiled-workflows/nested_with_parameters.yaml @@ -0,0 +1,626 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: math-pipeline- +spec: + arguments: + parameters: + - name: components-278f161477820f25fd603ee488b03203504f1efb0101e60e3fae0c7f63cd9a61 + value: '{"executorLabel":"exec-add","inputDefinitions":{"parameters":{"nums":{"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-278f161477820f25fd603ee488b03203504f1efb0101e60e3fae0c7f63cd9a61 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add(nums: List[List[int]]) -\u003e int:\n import itertools\n return + sum(itertools.chain(*nums))\n\n"],"image":"python:3.9"}' + - name: components-354039a8e11ca05821b8894c847eb4ea3388b9500e7db0411d4a9e68acfe16c6 + value: '{"executorLabel":"exec-add-two-nums","inputDefinitions":{"parameters":{"x":{"parameterType":"NUMBER_INTEGER"},"y":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-354039a8e11ca05821b8894c847eb4ea3388b9500e7db0411d4a9e68acfe16c6 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add_two_nums"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add_two_nums(x: int, y: int) -\u003e int:\n return x + y\n\n"],"image":"python:3.9"}' + - name: components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44 + value: '{"executorLabel":"exec-double","inputDefinitions":{"parameters":{"num":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","double"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + double(num: int) -\u003e int:\n return 2 * num\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-4 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--add-two-nums-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"add-two-nums"}}}},"tasks":{"add-two-nums":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-two-nums"},"dependentTasks":["double","double-2"],"inputs":{"parameters":{"x":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"double"}},"y":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"double-2"}}}},"taskInfo":{"name":"add-two-nums"}},"double":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}},"double-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double-2"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"double-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--loop-item-param-3":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--add-two-nums-Output":{"parameterType":"LIST"}}}}' + - name: components-comp-for-loop-2 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--add-two-nums-Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--add-two-nums-Output","producerSubtask":"for-loop-4"}}}},"tasks":{"for-loop-4":{"componentRef":{"name":"comp-for-loop-4"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-1":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--add-two-nums-Output":{"parameterType":"LIST"}}}}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--add-two-nums-Output","producerSubtask":"for-loop-2"}}}},"tasks":{"add":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--add-two-nums-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-354039a8e11ca05821b8894c847eb4ea3388b9500e7db0411d4a9e68acfe16c6}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-two-nums"},"dependentTasks":["double","double-2"],"inputs":{"parameters":{"x":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"double"}},"y":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"double-2"}}}},"taskInfo":{"name":"add-two-nums"}}' + - name: container + value: '{{workflow.parameters.implementations-354039a8e11ca05821b8894c847eb4ea3388b9500e7db0411d4a9e68acfe16c6}}' + - name: task-name + value: add-two-nums + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: double.Succeeded && double-2.Succeeded + name: add-two-nums-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-two-nums-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-two-nums-driver.outputs.parameters.cached-decision}}' + depends: add-two-nums-driver.Succeeded + name: add-two-nums + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}}' + - name: container + value: '{{workflow.parameters.implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task-name + value: double + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: double-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.double-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + depends: double-driver.Succeeded + name: double + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double-2"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"double-2"}}' + - name: container + value: '{{workflow.parameters.implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task-name + value: double-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: double-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.double-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.double-2-driver.outputs.parameters.cached-decision}}' + depends: double-2-driver.Succeeded + name: double-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-1":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-4 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-4-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-1":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-4-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4-for-loop-4-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-278f161477820f25fd603ee488b03203504f1efb0101e60e3fae0c7f63cd9a61}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--add-two-nums-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add"}}' + - name: container + value: '{{workflow.parameters.implementations-278f161477820f25fd603ee488b03203504f1efb0101e60e3fae0c7f63cd9a61}}' + - name: task-name + value: add + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: add-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-driver.outputs.parameters.cached-decision}}' + depends: add-driver.Succeeded + name: add + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/notebook_component_mixed.yaml b/test_data/compiled-workflows/notebook_component_mixed.yaml new file mode 100644 index 00000000000..0f11209a324 --- /dev/null +++ b/test_data/compiled-workflows/notebook_component_mixed.yaml @@ -0,0 +1,648 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: nb-mixed- +spec: + arguments: + parameters: + - name: components-43c0d13fb84d9ca119bdf5124174bd3678698b085ea5c314339233390502736a + value: '{"executorLabel":"exec-evaluate-model","inputDefinitions":{"artifacts":{"model_text":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"metrics":{"artifactType":{"schemaTitle":"system.Metrics","schemaVersion":"0.0.1"}}}}}' + - name: implementations-43c0d13fb84d9ca119bdf5124174bd3678698b085ea5c314339233390502736a + value: '{"args":["--executor_input","{{$}}","--function_to_execute","evaluate_model"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''nbclient\u003e=0.10,\u003c1'' + ''ipykernel\u003e=6,\u003c7'' ''jupyter_client\u003e=7,\u003c9'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\n__KFP_EMBEDDED_ARCHIVE_B64 + = ''H4sIACSp1mgC/+2Vy46bMBSGWecpLG9IJEogQGAiRequXXbR3WSEHGISN2BbtukkivLuNYQkk05HlarOqJfzLQz8x+f3wRfwx/74/Sey+0jJiirnVQhOvHQNgii63rd6GEzC0EE75w1otCHKDu/8n0xSVBtW03mYJll2lwVZ6meTOEzvBg7w78OXOf1KqrymRrFC+0zu+fIVzv80jp0gCqIkTK7XE2GcJk6YBNMkmUTTpD3/8TSNHBS85fmvpSKb6uV+P4v/pRwGCOGCVpXGM3RvHxA6dG0v52YvqQ3hQqwo9s4huqNFY5jgeSEabmwH3lTVJWx3E1kRQ6x+drOqIevrKL0miSK2N1UaX+SH/u548RONkY3pkh8uohaNKuiNIa5tlbZmujNojlx3wc+uJ8/e8Te/4S/UyWoplEFCe+iLFtzW6V2DQvs12dIVU3rojk0tx9tS5vak9vauh+iOaZOL7fyzaujoNr0/ynYCDq4uhKLuDJWVIGZYUT68ztBodLxNlIpxM1zgD8KgLhEtsId6u/sF7rQFfvhuvEdmNkhI6/2jYsfnL0v7nrZy97Etn9vJZnw9dxtTvsvcESIalbNb37Zpk/xVU8thb+OhcvRsVW3bTffzfYcrwtcNWdOc8VI82Y6Y223Xrrrcm43oDVujbjExX5ZC1aRd9vhGyGvGhbLyZHCEPyQAAAAAAAAAAAAAAAAAAAAAAAAAAADwp/ANkcfbmgAoAAA=''\n__KFP_NOTEBOOK_REL_PATH + = ''nb_eval_metrics.ipynb''\n\nimport base64 as __kfp_b64\nimport gzip as + __kfp_gzip\nimport io as __kfp_io\nimport os as __kfp_os\nimport sys as __kfp_sys\nimport + tarfile as __kfp_tarfile\nimport tempfile as __kfp_tempfile\nfrom nbclient + import NotebookClient\n\n# Extract embedded archive at import time to ensure + sys.path and globals are set\nprint(''[KFP] Extracting embedded notebook archive...'', + flush=True)\n__kfp_tmpdir = __kfp_tempfile.TemporaryDirectory()\n__KFP_EMBEDDED_ASSET_DIR + = __kfp_tmpdir.name\ntry:\n __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode(''ascii''))\n with + __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode=''r:gz'') as + __kfp_tar:\n __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR)\n print(f''[KFP] + Notebook archive extracted to: {__KFP_EMBEDDED_ASSET_DIR}'', flush=True)\nexcept + Exception as __kfp_e:\n raise RuntimeError(f''Failed to extract embedded + notebook archive: {__kfp_e}'')\n\n# Always prepend the extracted directory + to sys.path for import resolution\nif __KFP_EMBEDDED_ASSET_DIR not in __kfp_sys.path:\n __kfp_sys.path.insert(0, + __KFP_EMBEDDED_ASSET_DIR)\n print(f''[KFP] Added notebook archive directory + to Python path'', flush=True)\n\n# Optional convenience for generic embedded + file variable name\n__KFP_EMBEDDED_ASSET_FILE = __kfp_os.path.join(__KFP_EMBEDDED_ASSET_DIR, + __KFP_NOTEBOOK_REL_PATH)\n\n\nclass KFPStreamingNotebookClient(NotebookClient):\n # + Streams outputs in real-time by emitting outputs during message processing.\n def + process_message(self, msg, cell, cell_index):\n # Call the parent implementation + to handle the message normally\n output = super().process_message(msg, + cell, cell_index)\n\n # If an output was created, stream it immediately\n if + output is not None:\n _kfp_stream_single_output(output, cell_index)\n\n return + output\n\ndef __kfp_write_parameters_cell(nb, params):\n \"\"\"Inject parameters + following Papermill semantics.\n\n - If a cell tagged with ''parameters'' + exists, insert an overriding\n ''injected-parameters'' cell immediately + after it.\n - Otherwise, insert the ''injected-parameters'' cell at the + top.\n \"\"\"\n import json\n\n import nbformat\n\n if not params:\n return\n\n # + Build the injected parameters cell\n assignments = []\n for key, value + in params.items():\n serialized = json.dumps(value)\n assignments.append(key + + '' = json.loads('' + repr(serialized) + '')'')\n source = ''import json\\n'' + + ''\\n''.join(assignments) + ''\\n''\n cell = nbformat.v4.new_code_cell(source=source)\n cell.metadata.setdefault(''tags'', + [])\n if ''injected-parameters'' not in cell.metadata[''tags'']:\n cell.metadata[''tags''].append(''injected-parameters'')\n\n # + Locate the first ''parameters'' tagged cell\n insert_idx = 0\n for idx, + existing in enumerate(nb.get(''cells'', [])):\n if existing.get(''cell_type'') + != ''code'':\n continue\n tags = existing.get(''metadata'', + {}).get(''tags'', []) or []\n if ''parameters'' in tags:\n insert_idx + = idx + 1\n break\n\n nb.cells.insert(insert_idx, cell)\n\ndef + _kfp_stream_single_output(output, cell_idx):\n \"\"\"Stream a single notebook + output immediately during execution.\n\n Prints stdout/stderr and text/plain + display outputs to the console so users\n see cell output as it happens + (no need to wait until the notebook finishes).\n \"\"\"\n import sys\n output_type + = output.get(''output_type'')\n\n if output_type == ''stream'':\n text + = output.get(''text'', '''')\n if text:\n try:\n print(f''[nb + cell {cell_idx} stream] '', end='''', flush=False)\n except Exception:\n pass\n print(text, + end='''' if text.endswith(''\\n'') else ''\\n'', flush=True)\n elif output_type + == ''error'':\n for line in output.get(''traceback'', []):\n print(line, + file=sys.stderr, flush=True)\n else:\n # Handle display_data and + execute_result\n data = output.get(''data'', {})\n if ''text/plain'' + in data:\n print(data[''text/plain''], flush=True)\n elif + ''application/json'' in data:\n try:\n import json + as __kfp_json\n parsed = data[''application/json'']\n # + Some kernels send JSON as string; try to parse if needed\n if + isinstance(parsed, str):\n try:\n parsed + = __kfp_json.loads(parsed)\n except Exception:\n pass\n print(__kfp_json.dumps(parsed, + indent=2, ensure_ascii=False), flush=True)\n except Exception:\n # + Fallback to raw\n print(str(data.get(''application/json'')), + flush=True)\n elif ''text/markdown'' in data:\n # Print + markdown as-is; frontends may render, logs will show raw markdown\n print(data[''text/markdown''], + flush=True)\n\ndef kfp_run_notebook(**kwargs):\n \"\"\"Execute the embedded + notebook with injected parameters.\n\n Parameters provided via kwargs are + injected into the notebook following\n Papermill semantics (after a parameters + cell if present, otherwise at top).\n Execution uses a Python kernel; nbclient + and ipykernel must be available at\n runtime (installed via packages_to_install + for notebook components).\n \"\"\"\n import os\n import subprocess\n import + sys\n\n from nbclient import NotebookClient\n import nbformat\n\n # + Ensure a usable ''python3'' kernel is present; install kernelspec if missing\n print(''[KFP + Notebook] Checking for Python kernel...'', flush=True)\n try:\n from + jupyter_client.kernelspec import KernelSpecManager # type: ignore\n ksm + = KernelSpecManager()\n have_py3 = ''python3'' in ksm.find_kernel_specs()\n if + not have_py3:\n print(\n ''[KFP Notebook] Python3 + kernel not found, installing...'',\n flush=True)\n try:\n subprocess.run([\n sys.executable, + ''-m'', ''ipykernel'', ''install'', ''--user'',\n ''--name'', + ''python3'', ''--display-name'', ''Python 3''\n ],\n check=True,\n stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL)\n print(\n ''[KFP + Notebook] Python3 kernel installed successfully'',\n flush=True)\n except + subprocess.CalledProcessError as e:\n raise RuntimeError(\n \"Failed + to install ''python3'' kernelspec for ipykernel. \"\n \"Ensure + ipykernel is available in the environment or include it via packages_to_install. + \"\n f\"Error: {e}\") from e\n else:\n print(''[KFP + Notebook] Python3 kernel found'', flush=True)\n except ImportError as e:\n raise + RuntimeError(\n \"jupyter_client is not available. Ensure it''s + installed in the environment or include it via packages_to_install. \"\n f\"Error: + {e}\") from e\n\n nb_path = os.path.join(__KFP_EMBEDDED_ASSET_DIR, __KFP_NOTEBOOK_REL_PATH)\n\n try:\n nb + = nbformat.read(nb_path, as_version=4)\n except Exception as e:\n raise + RuntimeError(\n f''Failed to read notebook {nb_path}. Ensure it + is a valid Jupyter notebook. Error: {e}''\n ) from e\n\n try:\n __kfp_write_parameters_cell(nb, + kwargs)\n print(\n f''[KFP Notebook] Executing notebook + with {len(nb.get(\"cells\", []))} cells'',\n flush=True)\n\n # + Use our custom streaming client for real-time output (defined in the\n # + generated ephemeral source)\n client = KFPStreamingNotebookClient(\n nb,\n timeout=None,\n allow_errors=False,\n store_widget_state=False,\n kernel_name=''python3'')\n client.execute(cwd=__KFP_EMBEDDED_ASSET_DIR)\n\n print(''[KFP + Notebook] Execution complete'', flush=True)\n\n except Exception as e:\n raise + RuntimeError(f''Notebook execution failed. Error: {e}'') from e\n\n\n# Bind + helper into dsl namespace so user code can call dsl.run_notebook(...)\ndsl.run_notebook + = kfp_run_notebook\n\n\ndef evaluate_model(model_text: dsl.Input[dsl.Model], + metrics: dsl.Output[dsl.Metrics]):\n import json\n\n with open(model_text.path, + \"r\", encoding=\"utf-8\") as f:\n model_text = f.read()\n\n dsl.run_notebook(model_text=model_text)\n with + open(\"/tmp/kfp_nb_outputs/metrics.json\", \"r\", encoding=\"utf-8\") as f:\n metrics_dict + = json.load(f)\n\n assert metrics_dict == {\"score\": float(len(model_text))}\n\n for + metric_name, metric_value in metrics_dict.items():\n metrics.log_metric(metric_name, + metric_value)\n\n"],"image":"python:3.9"}' + - name: components-de426933840df1d5b1966245bfd35532f6a11ccaffa3dbb2311a6b7dbf3ec609 + value: '{"executorLabel":"exec-preprocess","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-de426933840df1d5b1966245bfd35532f6a11ccaffa3dbb2311a6b7dbf3ec609 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","preprocess"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\n\n\ndef + preprocess(text: str, dataset: dsl.Output[dsl.Dataset]):\n import re\n\n cleaned_text + = re.sub(r\"\\s+\", \" \", text).strip()\n with open(dataset.path, \"w\", + encoding=\"utf-8\") as f:\n f.write(cleaned_text)\n\n"],"image":"python:3.9"}' + - name: components-0e704495704924f0f854d4c513ac164f987781625d13c72aabf7e58c7fc1d08c + value: '{"executorLabel":"exec-train-model","inputDefinitions":{"artifacts":{"cleaned_text":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"model":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}}}' + - name: implementations-0e704495704924f0f854d4c513ac164f987781625d13c72aabf7e58c7fc1d08c + value: '{"args":["--executor_input","{{$}}","--function_to_execute","train_model"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''nbclient\u003e=0.10,\u003c1'' + ''ipykernel\u003e=6,\u003c7'' ''jupyter_client\u003e=7,\u003c9'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\n__KFP_EMBEDDED_ARCHIVE_B64 + = ''H4sIACSp1mgC/+2VTY/aMBCGc+ZXWLkEJBqSQAJdCanH9tZDpR7KKjKJAxaJbTmTErTiv9cJ4auwWrXartR2noMT3onnneAZcEfu6MNnWn9kNGXa+iN4B567et54cr5vdN8L/MAitfUGVCVQbeyt/5NgRgrgBZv703A2ez/zosgNwsgfh0HPQv55xDIGTbmItxzWsaKaFqXL1U4sX3f+o4mZ8bE3Dv3wfD3gT73Q8kMvCsNgHAWBmf9wGkws4r3l/BdK03X+/HMvxf9SnnqE2AnL89J+IN/MB0Ke2rWTY9gpZkJ2IlNmD48hVrOkAi5FnMhKgHlAVHl+ChcMaEqBGv2YzahAV2eXTms7jgHTpX2SH7u7/SmfrEBV0G5+PImlrHTCrhLaSc6oYGkMrAYyJ06xI51EGslZiKPNwaSzeOVX/o3CeaGkBiJLU+HwLMvSLeiGpVyXfWcEhRptMhWboe0SO0PCal5CLDfzL7pig+vtSnMB/YX9VXPgYkVAkjtJRoV50dyFGhb2TwmaXwUiFRN33c8bTR3OtilGmC/NOM2dCrJ3M2dAaEmyh+ukzZK5W1MT618emFspxXR/MPilU9J0a7/QeN/L9igvNaPmVKwqumKf0mOaU3R/04H3zuymTLO2p3xbxsks5iKTF5XYwrR/4692sJZdwiZRa22LZSZ1QZtum1wJccGF1EYOenv8o0YQBEEQBEEQBEEQBEEQBEEQBEEQBEF+APurSW0AKAAA''\n__KFP_NOTEBOOK_REL_PATH + = ''nb_train_with_params.ipynb''\n\nimport base64 as __kfp_b64\nimport gzip + as __kfp_gzip\nimport io as __kfp_io\nimport os as __kfp_os\nimport sys as + __kfp_sys\nimport tarfile as __kfp_tarfile\nimport tempfile as __kfp_tempfile\nfrom + nbclient import NotebookClient\n\n# Extract embedded archive at import time + to ensure sys.path and globals are set\nprint(''[KFP] Extracting embedded + notebook archive...'', flush=True)\n__kfp_tmpdir = __kfp_tempfile.TemporaryDirectory()\n__KFP_EMBEDDED_ASSET_DIR + = __kfp_tmpdir.name\ntry:\n __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode(''ascii''))\n with + __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode=''r:gz'') as + __kfp_tar:\n __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR)\n print(f''[KFP] + Notebook archive extracted to: {__KFP_EMBEDDED_ASSET_DIR}'', flush=True)\nexcept + Exception as __kfp_e:\n raise RuntimeError(f''Failed to extract embedded + notebook archive: {__kfp_e}'')\n\n# Always prepend the extracted directory + to sys.path for import resolution\nif __KFP_EMBEDDED_ASSET_DIR not in __kfp_sys.path:\n __kfp_sys.path.insert(0, + __KFP_EMBEDDED_ASSET_DIR)\n print(f''[KFP] Added notebook archive directory + to Python path'', flush=True)\n\n# Optional convenience for generic embedded + file variable name\n__KFP_EMBEDDED_ASSET_FILE = __kfp_os.path.join(__KFP_EMBEDDED_ASSET_DIR, + __KFP_NOTEBOOK_REL_PATH)\n\n\nclass KFPStreamingNotebookClient(NotebookClient):\n # + Streams outputs in real-time by emitting outputs during message processing.\n def + process_message(self, msg, cell, cell_index):\n # Call the parent implementation + to handle the message normally\n output = super().process_message(msg, + cell, cell_index)\n\n # If an output was created, stream it immediately\n if + output is not None:\n _kfp_stream_single_output(output, cell_index)\n\n return + output\n\ndef __kfp_write_parameters_cell(nb, params):\n \"\"\"Inject parameters + following Papermill semantics.\n\n - If a cell tagged with ''parameters'' + exists, insert an overriding\n ''injected-parameters'' cell immediately + after it.\n - Otherwise, insert the ''injected-parameters'' cell at the + top.\n \"\"\"\n import json\n\n import nbformat\n\n if not params:\n return\n\n # + Build the injected parameters cell\n assignments = []\n for key, value + in params.items():\n serialized = json.dumps(value)\n assignments.append(key + + '' = json.loads('' + repr(serialized) + '')'')\n source = ''import json\\n'' + + ''\\n''.join(assignments) + ''\\n''\n cell = nbformat.v4.new_code_cell(source=source)\n cell.metadata.setdefault(''tags'', + [])\n if ''injected-parameters'' not in cell.metadata[''tags'']:\n cell.metadata[''tags''].append(''injected-parameters'')\n\n # + Locate the first ''parameters'' tagged cell\n insert_idx = 0\n for idx, + existing in enumerate(nb.get(''cells'', [])):\n if existing.get(''cell_type'') + != ''code'':\n continue\n tags = existing.get(''metadata'', + {}).get(''tags'', []) or []\n if ''parameters'' in tags:\n insert_idx + = idx + 1\n break\n\n nb.cells.insert(insert_idx, cell)\n\ndef + _kfp_stream_single_output(output, cell_idx):\n \"\"\"Stream a single notebook + output immediately during execution.\n\n Prints stdout/stderr and text/plain + display outputs to the console so users\n see cell output as it happens + (no need to wait until the notebook finishes).\n \"\"\"\n import sys\n output_type + = output.get(''output_type'')\n\n if output_type == ''stream'':\n text + = output.get(''text'', '''')\n if text:\n try:\n print(f''[nb + cell {cell_idx} stream] '', end='''', flush=False)\n except Exception:\n pass\n print(text, + end='''' if text.endswith(''\\n'') else ''\\n'', flush=True)\n elif output_type + == ''error'':\n for line in output.get(''traceback'', []):\n print(line, + file=sys.stderr, flush=True)\n else:\n # Handle display_data and + execute_result\n data = output.get(''data'', {})\n if ''text/plain'' + in data:\n print(data[''text/plain''], flush=True)\n elif + ''application/json'' in data:\n try:\n import json + as __kfp_json\n parsed = data[''application/json'']\n # + Some kernels send JSON as string; try to parse if needed\n if + isinstance(parsed, str):\n try:\n parsed + = __kfp_json.loads(parsed)\n except Exception:\n pass\n print(__kfp_json.dumps(parsed, + indent=2, ensure_ascii=False), flush=True)\n except Exception:\n # + Fallback to raw\n print(str(data.get(''application/json'')), + flush=True)\n elif ''text/markdown'' in data:\n # Print + markdown as-is; frontends may render, logs will show raw markdown\n print(data[''text/markdown''], + flush=True)\n\ndef kfp_run_notebook(**kwargs):\n \"\"\"Execute the embedded + notebook with injected parameters.\n\n Parameters provided via kwargs are + injected into the notebook following\n Papermill semantics (after a parameters + cell if present, otherwise at top).\n Execution uses a Python kernel; nbclient + and ipykernel must be available at\n runtime (installed via packages_to_install + for notebook components).\n \"\"\"\n import os\n import subprocess\n import + sys\n\n from nbclient import NotebookClient\n import nbformat\n\n # + Ensure a usable ''python3'' kernel is present; install kernelspec if missing\n print(''[KFP + Notebook] Checking for Python kernel...'', flush=True)\n try:\n from + jupyter_client.kernelspec import KernelSpecManager # type: ignore\n ksm + = KernelSpecManager()\n have_py3 = ''python3'' in ksm.find_kernel_specs()\n if + not have_py3:\n print(\n ''[KFP Notebook] Python3 + kernel not found, installing...'',\n flush=True)\n try:\n subprocess.run([\n sys.executable, + ''-m'', ''ipykernel'', ''install'', ''--user'',\n ''--name'', + ''python3'', ''--display-name'', ''Python 3''\n ],\n check=True,\n stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL)\n print(\n ''[KFP + Notebook] Python3 kernel installed successfully'',\n flush=True)\n except + subprocess.CalledProcessError as e:\n raise RuntimeError(\n \"Failed + to install ''python3'' kernelspec for ipykernel. \"\n \"Ensure + ipykernel is available in the environment or include it via packages_to_install. + \"\n f\"Error: {e}\") from e\n else:\n print(''[KFP + Notebook] Python3 kernel found'', flush=True)\n except ImportError as e:\n raise + RuntimeError(\n \"jupyter_client is not available. Ensure it''s + installed in the environment or include it via packages_to_install. \"\n f\"Error: + {e}\") from e\n\n nb_path = os.path.join(__KFP_EMBEDDED_ASSET_DIR, __KFP_NOTEBOOK_REL_PATH)\n\n try:\n nb + = nbformat.read(nb_path, as_version=4)\n except Exception as e:\n raise + RuntimeError(\n f''Failed to read notebook {nb_path}. Ensure it + is a valid Jupyter notebook. Error: {e}''\n ) from e\n\n try:\n __kfp_write_parameters_cell(nb, + kwargs)\n print(\n f''[KFP Notebook] Executing notebook + with {len(nb.get(\"cells\", []))} cells'',\n flush=True)\n\n # + Use our custom streaming client for real-time output (defined in the\n # + generated ephemeral source)\n client = KFPStreamingNotebookClient(\n nb,\n timeout=None,\n allow_errors=False,\n store_widget_state=False,\n kernel_name=''python3'')\n client.execute(cwd=__KFP_EMBEDDED_ASSET_DIR)\n\n print(''[KFP + Notebook] Execution complete'', flush=True)\n\n except Exception as e:\n raise + RuntimeError(f''Notebook execution failed. Error: {e}'') from e\n\n\n# Bind + helper into dsl namespace so user code can call dsl.run_notebook(...)\ndsl.run_notebook + = kfp_run_notebook\n\n\ndef train_model(cleaned_text: dsl.Input[dsl.Dataset], + model: dsl.Output[dsl.Model]):\n import shutil\n\n with open(cleaned_text.path, + \"r\", encoding=\"utf-8\") as f:\n cleaned_text = f.read()\n\n dsl.run_notebook(cleaned_text=cleaned_text)\n\n # + Notebook writes its model into /tmp/kfp_nb_outputs/model.txt\n shutil.copy(\"/tmp/kfp_nb_outputs/model.txt\", + model.path)\n\n with open(model.path, \"r\", encoding=\"utf-8\") as f:\n model_text + = f.read()\n\n assert model_text == cleaned_text.upper()\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"evaluate-model":{"cachingOptions":{},"componentRef":{"name":"comp-evaluate-model"},"dependentTasks":["train-model"],"inputs":{"artifacts":{"model_text":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"train-model"}}}},"taskInfo":{"name":"evaluate-model"}},"preprocess":{"cachingOptions":{},"componentRef":{"name":"comp-preprocess"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"preprocess"}},"train-model":{"cachingOptions":{},"componentRef":{"name":"comp-train-model"},"dependentTasks":["preprocess"],"inputs":{"artifacts":{"cleaned_text":{"taskOutputArtifact":{"outputArtifactKey":"dataset","producerTask":"preprocess"}}}},"taskInfo":{"name":"train-model"}}}},"inputDefinitions":{"parameters":{"text":{"defaultValue":"Hello world","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - nb-mixed + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-43c0d13fb84d9ca119bdf5124174bd3678698b085ea5c314339233390502736a}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-evaluate-model"},"dependentTasks":["train-model"],"inputs":{"artifacts":{"model_text":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"train-model"}}}},"taskInfo":{"name":"evaluate-model"}}' + - name: container + value: '{{workflow.parameters.implementations-43c0d13fb84d9ca119bdf5124174bd3678698b085ea5c314339233390502736a}}' + - name: task-name + value: evaluate-model + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: train-model.Succeeded + name: evaluate-model-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.evaluate-model-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.evaluate-model-driver.outputs.parameters.cached-decision}}' + depends: evaluate-model-driver.Succeeded + name: evaluate-model + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-de426933840df1d5b1966245bfd35532f6a11ccaffa3dbb2311a6b7dbf3ec609}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-preprocess"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"preprocess"}}' + - name: container + value: '{{workflow.parameters.implementations-de426933840df1d5b1966245bfd35532f6a11ccaffa3dbb2311a6b7dbf3ec609}}' + - name: task-name + value: preprocess + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: preprocess-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.preprocess-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.preprocess-driver.outputs.parameters.cached-decision}}' + depends: preprocess-driver.Succeeded + name: preprocess + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0e704495704924f0f854d4c513ac164f987781625d13c72aabf7e58c7fc1d08c}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-train-model"},"dependentTasks":["preprocess"],"inputs":{"artifacts":{"cleaned_text":{"taskOutputArtifact":{"outputArtifactKey":"dataset","producerTask":"preprocess"}}}},"taskInfo":{"name":"train-model"}}' + - name: container + value: '{{workflow.parameters.implementations-0e704495704924f0f854d4c513ac164f987781625d13c72aabf7e58c7fc1d08c}}' + - name: task-name + value: train-model + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: preprocess.Succeeded + name: train-model-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.train-model-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.train-model-driver.outputs.parameters.cached-decision}}' + depends: train-model-driver.Succeeded + name: train-model + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - nb-mixed + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"text":"Hello world"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/notebook_component_simple.yaml b/test_data/compiled-workflows/notebook_component_simple.yaml new file mode 100644 index 00000000000..a9af9dfe367 --- /dev/null +++ b/test_data/compiled-workflows/notebook_component_simple.yaml @@ -0,0 +1,467 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: nb-simple- +spec: + arguments: + parameters: + - name: components-6cb96369cb79941c813048ffcedc1b2b4dc48a280367899eadca221221c54b92 + value: '{"executorLabel":"exec-run-train-notebook","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}}}' + - name: implementations-6cb96369cb79941c813048ffcedc1b2b4dc48a280367899eadca221221c54b92 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","run_train_notebook"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''nbclient\u003e=0.10,\u003c1'' + ''ipykernel\u003e=6,\u003c7'' ''jupyter_client\u003e=7,\u003c9'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\n__KFP_EMBEDDED_ARCHIVE_B64 + = ''H4sIACap1mgC/+2VTW+jMBCGOedXWFxI1S7fJLRSpD3ucQ8r7WFTIQdMYgVsZMYKUdT/voZSKqpWe2pXu51HyIZ3PB/yaITrud7X77T7xmjBlPUu+I+8tft+FD+/93rgh0Fokc76AHQLVJn01uckTEkNvGabYJ2k6W3qJ4kb366DIEoWFvLfI3YZKMpF1vK6qZjLm7PYvcP8r2Iz45EfJUHyvD8SRKFvBYm/SpIwWvlrM//xah1ZxP/I+a8bRQ/V2+f+ZP9HuSwIsXNWVa19R36ZD0IuwzrKGZwbZkx2Lgtm3zyZWMdyDVyKLJdagDkgdFVN5poBLShQo18eJlVqaDQMie4nsZVa5WxKPmjAOgjJhjgFK6muIHTs0Xg/7GPIv1+omRmpgMh2K6aUvX/r1vTICq7apeNB3XjHssnMqI2BnRvCOt5CJo+bH0qzq7l7o7iA5db+qThwsScgyStBvEruXehga79wP3E4ENkw8WruJzdTg3PqCxHmwkyWjaOh/JI6V4S2pLybh+yX0j2Zetiybw65Jlu7f67J0Ku+gnmHzDpc3eyGB5NdUbHXdM8yLko5ycYgaD10sDnDQY4B+0BDY2yxK6Wqad/CeCZkNRdSGTlcPOA/C0EQBEEQBEEQBEEQBEEQBEEQBEEQBPmU/AbrYhlkACgAAA==''\n__KFP_NOTEBOOK_REL_PATH + = ''nb_train_simple.ipynb''\n\nimport base64 as __kfp_b64\nimport gzip as + __kfp_gzip\nimport io as __kfp_io\nimport os as __kfp_os\nimport sys as __kfp_sys\nimport + tarfile as __kfp_tarfile\nimport tempfile as __kfp_tempfile\nfrom nbclient + import NotebookClient\n\n# Extract embedded archive at import time to ensure + sys.path and globals are set\nprint(''[KFP] Extracting embedded notebook archive...'', + flush=True)\n__kfp_tmpdir = __kfp_tempfile.TemporaryDirectory()\n__KFP_EMBEDDED_ASSET_DIR + = __kfp_tmpdir.name\ntry:\n __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode(''ascii''))\n with + __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode=''r:gz'') as + __kfp_tar:\n __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR)\n print(f''[KFP] + Notebook archive extracted to: {__KFP_EMBEDDED_ASSET_DIR}'', flush=True)\nexcept + Exception as __kfp_e:\n raise RuntimeError(f''Failed to extract embedded + notebook archive: {__kfp_e}'')\n\n# Always prepend the extracted directory + to sys.path for import resolution\nif __KFP_EMBEDDED_ASSET_DIR not in __kfp_sys.path:\n __kfp_sys.path.insert(0, + __KFP_EMBEDDED_ASSET_DIR)\n print(f''[KFP] Added notebook archive directory + to Python path'', flush=True)\n\n# Optional convenience for generic embedded + file variable name\n__KFP_EMBEDDED_ASSET_FILE = __kfp_os.path.join(__KFP_EMBEDDED_ASSET_DIR, + __KFP_NOTEBOOK_REL_PATH)\n\n\nclass KFPStreamingNotebookClient(NotebookClient):\n # + Streams outputs in real-time by emitting outputs during message processing.\n def + process_message(self, msg, cell, cell_index):\n # Call the parent implementation + to handle the message normally\n output = super().process_message(msg, + cell, cell_index)\n\n # If an output was created, stream it immediately\n if + output is not None:\n _kfp_stream_single_output(output, cell_index)\n\n return + output\n\ndef __kfp_write_parameters_cell(nb, params):\n \"\"\"Inject parameters + following Papermill semantics.\n\n - If a cell tagged with ''parameters'' + exists, insert an overriding\n ''injected-parameters'' cell immediately + after it.\n - Otherwise, insert the ''injected-parameters'' cell at the + top.\n \"\"\"\n import json\n\n import nbformat\n\n if not params:\n return\n\n # + Build the injected parameters cell\n assignments = []\n for key, value + in params.items():\n serialized = json.dumps(value)\n assignments.append(key + + '' = json.loads('' + repr(serialized) + '')'')\n source = ''import json\\n'' + + ''\\n''.join(assignments) + ''\\n''\n cell = nbformat.v4.new_code_cell(source=source)\n cell.metadata.setdefault(''tags'', + [])\n if ''injected-parameters'' not in cell.metadata[''tags'']:\n cell.metadata[''tags''].append(''injected-parameters'')\n\n # + Locate the first ''parameters'' tagged cell\n insert_idx = 0\n for idx, + existing in enumerate(nb.get(''cells'', [])):\n if existing.get(''cell_type'') + != ''code'':\n continue\n tags = existing.get(''metadata'', + {}).get(''tags'', []) or []\n if ''parameters'' in tags:\n insert_idx + = idx + 1\n break\n\n nb.cells.insert(insert_idx, cell)\n\ndef + _kfp_stream_single_output(output, cell_idx):\n \"\"\"Stream a single notebook + output immediately during execution.\n\n Prints stdout/stderr and text/plain + display outputs to the console so users\n see cell output as it happens + (no need to wait until the notebook finishes).\n \"\"\"\n import sys\n output_type + = output.get(''output_type'')\n\n if output_type == ''stream'':\n text + = output.get(''text'', '''')\n if text:\n try:\n print(f''[nb + cell {cell_idx} stream] '', end='''', flush=False)\n except Exception:\n pass\n print(text, + end='''' if text.endswith(''\\n'') else ''\\n'', flush=True)\n elif output_type + == ''error'':\n for line in output.get(''traceback'', []):\n print(line, + file=sys.stderr, flush=True)\n else:\n # Handle display_data and + execute_result\n data = output.get(''data'', {})\n if ''text/plain'' + in data:\n print(data[''text/plain''], flush=True)\n elif + ''application/json'' in data:\n try:\n import json + as __kfp_json\n parsed = data[''application/json'']\n # + Some kernels send JSON as string; try to parse if needed\n if + isinstance(parsed, str):\n try:\n parsed + = __kfp_json.loads(parsed)\n except Exception:\n pass\n print(__kfp_json.dumps(parsed, + indent=2, ensure_ascii=False), flush=True)\n except Exception:\n # + Fallback to raw\n print(str(data.get(''application/json'')), + flush=True)\n elif ''text/markdown'' in data:\n # Print + markdown as-is; frontends may render, logs will show raw markdown\n print(data[''text/markdown''], + flush=True)\n\ndef kfp_run_notebook(**kwargs):\n \"\"\"Execute the embedded + notebook with injected parameters.\n\n Parameters provided via kwargs are + injected into the notebook following\n Papermill semantics (after a parameters + cell if present, otherwise at top).\n Execution uses a Python kernel; nbclient + and ipykernel must be available at\n runtime (installed via packages_to_install + for notebook components).\n \"\"\"\n import os\n import subprocess\n import + sys\n\n from nbclient import NotebookClient\n import nbformat\n\n # + Ensure a usable ''python3'' kernel is present; install kernelspec if missing\n print(''[KFP + Notebook] Checking for Python kernel...'', flush=True)\n try:\n from + jupyter_client.kernelspec import KernelSpecManager # type: ignore\n ksm + = KernelSpecManager()\n have_py3 = ''python3'' in ksm.find_kernel_specs()\n if + not have_py3:\n print(\n ''[KFP Notebook] Python3 + kernel not found, installing...'',\n flush=True)\n try:\n subprocess.run([\n sys.executable, + ''-m'', ''ipykernel'', ''install'', ''--user'',\n ''--name'', + ''python3'', ''--display-name'', ''Python 3''\n ],\n check=True,\n stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL)\n print(\n ''[KFP + Notebook] Python3 kernel installed successfully'',\n flush=True)\n except + subprocess.CalledProcessError as e:\n raise RuntimeError(\n \"Failed + to install ''python3'' kernelspec for ipykernel. \"\n \"Ensure + ipykernel is available in the environment or include it via packages_to_install. + \"\n f\"Error: {e}\") from e\n else:\n print(''[KFP + Notebook] Python3 kernel found'', flush=True)\n except ImportError as e:\n raise + RuntimeError(\n \"jupyter_client is not available. Ensure it''s + installed in the environment or include it via packages_to_install. \"\n f\"Error: + {e}\") from e\n\n nb_path = os.path.join(__KFP_EMBEDDED_ASSET_DIR, __KFP_NOTEBOOK_REL_PATH)\n\n try:\n nb + = nbformat.read(nb_path, as_version=4)\n except Exception as e:\n raise + RuntimeError(\n f''Failed to read notebook {nb_path}. Ensure it + is a valid Jupyter notebook. Error: {e}''\n ) from e\n\n try:\n __kfp_write_parameters_cell(nb, + kwargs)\n print(\n f''[KFP Notebook] Executing notebook + with {len(nb.get(\"cells\", []))} cells'',\n flush=True)\n\n # + Use our custom streaming client for real-time output (defined in the\n # + generated ephemeral source)\n client = KFPStreamingNotebookClient(\n nb,\n timeout=None,\n allow_errors=False,\n store_widget_state=False,\n kernel_name=''python3'')\n client.execute(cwd=__KFP_EMBEDDED_ASSET_DIR)\n\n print(''[KFP + Notebook] Execution complete'', flush=True)\n\n except Exception as e:\n raise + RuntimeError(f''Notebook execution failed. Error: {e}'') from e\n\n\n# Bind + helper into dsl namespace so user code can call dsl.run_notebook(...)\ndsl.run_notebook + = kfp_run_notebook\n\n\ndef run_train_notebook(text: str):\n # text is + not defined in the notebook but text2 is defined\n dsl.run_notebook(text=text)\n\n with + open(\"/tmp/kfp_nb_outputs/log.txt\", \"r\", encoding=\"utf-8\") as f:\n log + = f.read()\n\n assert log == text + \" \" + \"default2\"\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"run-train-notebook":{"cachingOptions":{},"componentRef":{"name":"comp-run-train-notebook"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"run-train-notebook"}}}},"inputDefinitions":{"parameters":{"text":{"defaultValue":"hello","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - nb-simple + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-6cb96369cb79941c813048ffcedc1b2b4dc48a280367899eadca221221c54b92}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-run-train-notebook"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"run-train-notebook"}}' + - name: container + value: '{{workflow.parameters.implementations-6cb96369cb79941c813048ffcedc1b2b4dc48a280367899eadca221221c54b92}}' + - name: task-name + value: run-train-notebook + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: run-train-notebook-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.run-train-notebook-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.run-train-notebook-driver.outputs.parameters.cached-decision}}' + depends: run-train-notebook-driver.Succeeded + name: run-train-notebook + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - nb-simple + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"text":"hello"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/output_metrics.yaml b/test_data/compiled-workflows/output_metrics.yaml new file mode 100644 index 00000000000..91f6968b262 --- /dev/null +++ b/test_data/compiled-workflows/output_metrics.yaml @@ -0,0 +1,371 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: output-metrics- +spec: + arguments: + parameters: + - name: components-33c6ad9ab035611571f521208ada9a5231b3729ee087168ce0eeb8765d17fcc4 + value: '{"executorLabel":"exec-output-metrics","outputDefinitions":{"artifacts":{"metrics":{"artifactType":{"schemaTitle":"system.Metrics","schemaVersion":"0.0.1"}}}}}' + - name: implementations-33c6ad9ab035611571f521208ada9a5231b3729ee087168ce0eeb8765d17fcc4 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","output_metrics"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.9.0'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + output_metrics(metrics: Output[Metrics]):\n \"\"\"Dummy component that + outputs metrics with a random accuracy.\"\"\"\n import random\n result + = random.randint(0, 100)\n metrics.log_metric(''accuracy'', result)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"metrics":{"artifactSelectors":[{"outputArtifactKey":"metrics","producerSubtask":"output-metrics"}]}}},"tasks":{"output-metrics":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-output-metrics"},"taskInfo":{"name":"output-metrics"}}}},"outputDefinitions":{"artifacts":{"metrics":{"artifactType":{"schemaTitle":"system.Metrics","schemaVersion":"0.0.1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - output-metrics + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-33c6ad9ab035611571f521208ada9a5231b3729ee087168ce0eeb8765d17fcc4}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-output-metrics"},"taskInfo":{"name":"output-metrics"}}' + - name: container + value: '{{workflow.parameters.implementations-33c6ad9ab035611571f521208ada9a5231b3729ee087168ce0eeb8765d17fcc4}}' + - name: task-name + value: output-metrics + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: output-metrics-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.output-metrics-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.output-metrics-driver.outputs.parameters.cached-decision}}' + depends: output-metrics-driver.Succeeded + name: output-metrics + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - output-metrics + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/parallel_for_after_dependency.yaml b/test_data/compiled-workflows/parallel_for_after_dependency.yaml new file mode 100644 index 00000000000..a5add3785c8 --- /dev/null +++ b/test_data/compiled-workflows/parallel_for_after_dependency.yaml @@ -0,0 +1,497 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: loop-with-after-dependency-set- +spec: + arguments: + parameters: + - name: components-d58719d25d427e9e486606802de7140a76dc85a7da1c1753cd07c1433e7a90a5 + value: '{"executorLabel":"exec-print-op","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-d58719d25d427e9e486606802de7140a76dc85a7da1c1753cd07c1433e7a90a5 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(message: str) -\u003e str:\n print(message)\n return message\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-2 + value: '{"dag":{"tasks":{"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"foo"}}}},"taskInfo":{"name":"print-op"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}},"print-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"bar"}}}},"taskInfo":{"name":"print-op-2"}},"print-op-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"baz"}}}},"taskInfo":{"name":"print-op-3"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - loop-with-after-dependency-set + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d58719d25d427e9e486606802de7140a76dc85a7da1c1753cd07c1433e7a90a5}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"foo"}}}},"taskInfo":{"name":"print-op"}}' + - name: container + value: '{{workflow.parameters.implementations-d58719d25d427e9e486606802de7140a76dc85a7da1c1753cd07c1433e7a90a5}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - loop-with-after-dependency-set + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d58719d25d427e9e486606802de7140a76dc85a7da1c1753cd07c1433e7a90a5}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"bar"}}}},"taskInfo":{"name":"print-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-d58719d25d427e9e486606802de7140a76dc85a7da1c1753cd07c1433e7a90a5}}' + - name: task-name + value: print-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: print-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' + depends: print-op-2-driver.Succeeded + name: print-op-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d58719d25d427e9e486606802de7140a76dc85a7da1c1753cd07c1433e7a90a5}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"baz"}}}},"taskInfo":{"name":"print-op-3"}}' + - name: container + value: '{{workflow.parameters.implementations-d58719d25d427e9e486606802de7140a76dc85a7da1c1753cd07c1433e7a90a5}}' + - name: task-name + value: print-op-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: print-op-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-3-driver.outputs.parameters.cached-decision}}' + depends: print-op-3-driver.Succeeded + name: print-op-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/parameter_cache.yaml b/test_data/compiled-workflows/parameter_cache.yaml new file mode 100644 index 00000000000..3788a4ebfd5 --- /dev/null +++ b/test_data/compiled-workflows/parameter_cache.yaml @@ -0,0 +1,469 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: parameter-cache-pipeline- +spec: + arguments: + parameters: + - name: components-2f694fdbcf4a9d3446e12aa9fca457c7e2882290abceb728d2f702c4d450df8d + value: '{"executorLabel":"exec-crust-comp","inputDefinitions":{"parameters":{"input":{"parameterType":"STRING"}}}}' + - name: implementations-2f694fdbcf4a9d3446e12aa9fca457c7e2882290abceb728d2f702c4d450df8d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","crust_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + crust_comp(input: str):\n print(''input :'', input)\n\n"],"image":"python:3.9"}' + - name: components-ee0778c93e3b1661b503b59214cb8f679694b2ecc4aa2e53dd25ba5c03529348 + value: '{"executorLabel":"exec-core-comp","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-ee0778c93e3b1661b503b59214cb8f679694b2ecc4aa2e53dd25ba5c03529348 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","core_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + core_comp() -\u003e str:\n return ''foo''\n\n"],"image":"python:3.9"}' + - name: components-comp-core + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"core-comp"}}}},"tasks":{"core-comp":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: components-comp-mantle + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"core"}}}},"tasks":{"core":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"crust-comp":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"parameters":{"input":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}},"mantle":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - parameter-cache-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ee0778c93e3b1661b503b59214cb8f679694b2ecc4aa2e53dd25ba5c03529348}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-core-comp"},"taskInfo":{"name":"core-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-ee0778c93e3b1661b503b59214cb8f679694b2ecc4aa2e53dd25ba5c03529348}}' + - name: task-name + value: core-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: core-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.core-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.core-comp-driver.outputs.parameters.cached-decision}}' + depends: core-comp-driver.Succeeded + name: core-comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-core + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - parameter-cache-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-core}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' + - name: task-name + value: core + name: core-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.core-driver.outputs.parameters.condition}}' + depends: core-driver.Succeeded + name: core + template: comp-core + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-mantle + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2f694fdbcf4a9d3446e12aa9fca457c7e2882290abceb728d2f702c4d450df8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"parameters":{"input":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-2f694fdbcf4a9d3446e12aa9fca457c7e2882290abceb728d2f702c4d450df8d}}' + - name: task-name + value: crust-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: mantle.Succeeded + name: crust-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.crust-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.crust-comp-driver.outputs.parameters.cached-decision}}' + depends: crust-comp-driver.Succeeded + name: crust-comp + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-mantle}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' + - name: task-name + value: mantle + name: mantle-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.mantle-driver.outputs.parameters.condition}}' + depends: mantle-driver.Succeeded + name: mantle + template: comp-mantle + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/parameter_oneof.yaml b/test_data/compiled-workflows/parameter_oneof.yaml new file mode 100644 index 00000000000..1b0f6342414 --- /dev/null +++ b/test_data/compiled-workflows/parameter_oneof.yaml @@ -0,0 +1,650 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: parameter-oneof-pipeline- +spec: + arguments: + parameters: + - name: components-2f694fdbcf4a9d3446e12aa9fca457c7e2882290abceb728d2f702c4d450df8d + value: '{"executorLabel":"exec-crust-comp","inputDefinitions":{"parameters":{"input":{"parameterType":"STRING"}}}}' + - name: implementations-2f694fdbcf4a9d3446e12aa9fca457c7e2882290abceb728d2f702c4d450df8d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","crust_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + crust_comp(input: str):\n print(''input :'', input)\n\n"],"image":"python:3.9"}' + - name: components-a7bb13e035d75f1941d4a901687d98eb7be27da4abc34edc24ecdb00bb99d745 + value: '{"executorLabel":"exec-core-comp","inputDefinitions":{"parameters":{"input":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-a7bb13e035d75f1941d4a901687d98eb7be27da4abc34edc24ecdb00bb99d745 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","core_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + core_comp(input: str) -\u003e str:\n print(''input :'', input)\n return + input\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-2 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--core-comp-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"core-comp"}}}},"tasks":{"core-comp":{"cachingOptions":{},"componentRef":{"name":"comp-core-comp"},"inputs":{"parameters":{"input":{"runtimeValue":{"constant":"Got + heads!"}}}},"taskInfo":{"name":"core-comp"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--core-comp-Output":{"parameterType":"STRING"}}}}' + - name: components-991bfbeac7bb7d91fb9d139f44d5e5b5f220227052409ae8a4c3f88b486f84dc + value: '{"executorLabel":"exec-core-output-comp","inputDefinitions":{"parameters":{"input":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"output_key":{"parameterType":"STRING"}}}}' + - name: implementations-991bfbeac7bb7d91fb9d139f44d5e5b5f220227052409ae8a4c3f88b486f84dc + value: '{"args":["--executor_input","{{$}}","--function_to_execute","core_output_comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + core_output_comp(input: str, output_key: dsl.OutputPath(str)):\n print(''input + :'', input)\n with open(output_key, ''w'') as f:\n f.write(input)\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-3 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--core-output-comp-output_key":{"valueFromParameter":{"outputParameterKey":"output_key","producerSubtask":"core-output-comp"}}}},"tasks":{"core-output-comp":{"cachingOptions":{},"componentRef":{"name":"comp-core-output-comp"},"inputs":{"parameters":{"input":{"runtimeValue":{"constant":"Got + tails!"}}}},"taskInfo":{"name":"core-output-comp"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--core-output-comp-output_key":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-branches-1 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--condition-branches-1-oneof-1":{"valueFromOneof":{"parameterSelectors":[{"outputParameterKey":"pipelinechannel--core-comp-Output","producerSubtask":"condition-2"},{"outputParameterKey":"pipelinechannel--core-output-comp-output_key","producerSubtask":"condition-3"}]}}}},"tasks":{"condition-2":{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads''"}},"condition-3":{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads'')"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-Output":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--condition-branches-1-oneof-1":{"parameterType":"STRING"}}}}' + - name: components-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14 + value: '{"executorLabel":"exec-flip-coin","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","flip_coin"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + flip_coin() -\u003e str:\n import random\n return ''heads'' if random.randint(0, + 1) == 0 else ''tails''\n\n"],"image":"python:3.9"}' + - name: components-comp-core + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--condition-branches-1-oneof-1","producerSubtask":"condition-branches-1"}}}},"tasks":{"condition-branches-1":{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-branches-1"}},"flip-coin":{"cachingOptions":{},"componentRef":{"name":"comp-flip-coin"},"taskInfo":{"name":"flip-coin"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: components-comp-mantle + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"core"}}}},"tasks":{"core":{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"crust-comp":{"cachingOptions":{},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"parameters":{"input":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}},"mantle":{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - parameter-oneof-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-a7bb13e035d75f1941d4a901687d98eb7be27da4abc34edc24ecdb00bb99d745}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core-comp"},"inputs":{"parameters":{"input":{"runtimeValue":{"constant":"Got + heads!"}}}},"taskInfo":{"name":"core-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-a7bb13e035d75f1941d4a901687d98eb7be27da4abc34edc24ecdb00bb99d745}}' + - name: task-name + value: core-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: core-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.core-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.core-comp-driver.outputs.parameters.cached-decision}}' + depends: core-comp-driver.Succeeded + name: core-comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-991bfbeac7bb7d91fb9d139f44d5e5b5f220227052409ae8a4c3f88b486f84dc}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core-output-comp"},"inputs":{"parameters":{"input":{"runtimeValue":{"constant":"Got + tails!"}}}},"taskInfo":{"name":"core-output-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-991bfbeac7bb7d91fb9d139f44d5e5b5f220227052409ae8a4c3f88b486f84dc}}' + - name: task-name + value: core-output-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: core-output-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.core-output-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.core-output-comp-driver.outputs.parameters.cached-decision}}' + depends: core-output-comp-driver.Succeeded + name: core-output-comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-3 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - parameter-oneof-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads''"}}' + - name: task-name + value: condition-2 + name: condition-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' + depends: condition-2-driver.Succeeded + name: condition-2 + template: comp-condition-2 + when: '{{tasks.condition-2-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-3}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads'')"}}' + - name: task-name + value: condition-3 + name: condition-3-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + depends: condition-3-driver.Succeeded + name: condition-3 + template: comp-condition-3 + when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-branches-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-branches-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-branches-1"}}' + - name: task-name + value: condition-branches-1 + depends: flip-coin.Succeeded + name: condition-branches-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-branches-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-branches-1-driver.outputs.parameters.condition}}' + depends: condition-branches-1-driver.Succeeded + name: condition-branches-1 + template: comp-condition-branches-1 + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-flip-coin"},"taskInfo":{"name":"flip-coin"}}' + - name: container + value: '{{workflow.parameters.implementations-63c60d1934695443fbf91b71abd177ac69246c9328099025670d1d75e5f45f14}}' + - name: task-name + value: flip-coin + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-driver.Succeeded + name: flip-coin + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-core + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-core}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' + - name: task-name + value: core + name: core-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.core-driver.outputs.parameters.condition}}' + depends: core-driver.Succeeded + name: core + template: comp-core + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-mantle + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2f694fdbcf4a9d3446e12aa9fca457c7e2882290abceb728d2f702c4d450df8d}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-crust-comp"},"dependentTasks":["mantle"],"inputs":{"parameters":{"input":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"mantle"}}}},"taskInfo":{"name":"crust-comp"}}' + - name: container + value: '{{workflow.parameters.implementations-2f694fdbcf4a9d3446e12aa9fca457c7e2882290abceb728d2f702c4d450df8d}}' + - name: task-name + value: crust-comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: mantle.Succeeded + name: crust-comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.crust-comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.crust-comp-driver.outputs.parameters.cached-decision}}' + depends: crust-comp-driver.Succeeded + name: crust-comp + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-mantle}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' + - name: task-name + value: mantle + name: mantle-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.mantle-driver.outputs.parameters.condition}}' + depends: mantle-driver.Succeeded + name: mantle + template: comp-mantle + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/parameters_complex.yaml b/test_data/compiled-workflows/parameters_complex.yaml new file mode 100644 index 00000000000..76ce9316936 --- /dev/null +++ b/test_data/compiled-workflows/parameters_complex.yaml @@ -0,0 +1,792 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: math-pipeline- +spec: + arguments: + parameters: + - name: components-934af9f49289ce10e90d26c3cf160534c673f4da28e91bc67331b4a9ae08a463 + value: '{"executorLabel":"exec-add-two-numbers","inputDefinitions":{"parameters":{"x":{"parameterType":"LIST"},"y":{"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-934af9f49289ce10e90d26c3cf160534c673f4da28e91bc67331b4a9ae08a463 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add_two_numbers"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add_two_numbers(x: List[int], y: List[int]) -\u003e int:\n return sum(x) + + sum(y)\n\n"],"image":"python:3.9"}' + - name: components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44 + value: '{"executorLabel":"exec-double","inputDefinitions":{"parameters":{"num":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","double"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + double(num: int) -\u003e int:\n return 2 * num\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-4 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--double-2-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"double-2"}}}},"tasks":{"double-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double-2"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"double-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-3":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--double-2-Output":{"parameterType":"LIST"}}}}' + - name: components-843abeeb4ef17bcaea1bd40a0b5348966bd310914d8b2fadc13215c136bf3964 + value: '{"executorLabel":"exec-simple-add","inputDefinitions":{"parameters":{"nums":{"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-843abeeb4ef17bcaea1bd40a0b5348966bd310914d8b2fadc13215c136bf3964 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","simple_add"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + simple_add(nums: List[int]) -\u003e int:\n return sum(nums)\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-2 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--double-2-Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--double-2-Output","producerSubtask":"for-loop-4"}},"pipelinechannel--double-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"double"}}}},"tasks":{"double":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}},"for-loop-4":{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[4, + 5, 6]"}},"taskInfo":{"name":"for-loop-4"}},"simple-add":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-simple-add"},"dependentTasks":["for-loop-4"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-2-Output","producerTask":"for-loop-4"}}}},"taskInfo":{"name":"simple-add"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--double-2-Output":{"parameterType":"LIST"},"pipelinechannel--double-Output":{"parameterType":"LIST"}}}}' + - name: components-305f913c03b5666bf1a6fdd8cb4d3bfa17dde66bcd297bfb8827c23f3ed92dce + value: '{"executorLabel":"exec-nested-add-2","inputDefinitions":{"parameters":{"nums":{"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-305f913c03b5666bf1a6fdd8cb4d3bfa17dde66bcd297bfb8827c23f3ed92dce + value: '{"args":["--executor_input","{{$}}","--function_to_execute","nested_add"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + nested_add(nums: List[List[int]]) -\u003e int:\n import itertools\n return + sum(itertools.chain(*nums))\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-6 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--nested-add-2-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"nested-add-2"}},"pipelinechannel--simple-add-2-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"simple-add-2"}}}},"tasks":{"nested-add-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-add-2"},"inputs":{"parameters":{"nums":{"componentInputParameter":"pipelinechannel--for-loop-2-pipelinechannel--double-2-Output"}}},"taskInfo":{"name":"nested-add-2"}},"simple-add-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-simple-add-2"},"inputs":{"parameters":{"nums":{"componentInputParameter":"pipelinechannel--for-loop-2-pipelinechannel--double-Output"}}},"taskInfo":{"name":"simple-add-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--for-loop-2-pipelinechannel--double-2-Output":{"parameterType":"LIST"},"pipelinechannel--for-loop-2-pipelinechannel--double-Output":{"parameterType":"LIST"},"pipelinechannel--loop-item-param-5":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--nested-add-2-Output":{"parameterType":"LIST"},"pipelinechannel--simple-add-2-Output":{"parameterType":"LIST"}}}}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"add-two-numbers"}}}},"tasks":{"add-two-numbers":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-two-numbers"},"dependentTasks":["for-loop-6"],"inputs":{"parameters":{"x":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--simple-add-2-Output","producerTask":"for-loop-6"}},"y":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--nested-add-2-Output","producerTask":"for-loop-6"}}}},"taskInfo":{"name":"add-two-numbers"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}},"for-loop-6":{"componentRef":{"name":"comp-for-loop-6"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"pipelinechannel--for-loop-2-pipelinechannel--double-2-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-2-Output","producerTask":"for-loop-2"}},"pipelinechannel--for-loop-2-pipelinechannel--double-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[0, + 0, 0]"}},"taskInfo":{"name":"for-loop-6"}},"nested-add":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-add"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-2-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"nested-add"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double-2"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"double-2"}}' + - name: container + value: '{{workflow.parameters.implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task-name + value: double-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: double-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.double-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.double-2-driver.outputs.parameters.cached-decision}}' + depends: double-2-driver.Succeeded + name: double-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[4, + 5, 6]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-4 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-4-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[4, + 5, 6]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-4-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4-for-loop-4-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}}' + - name: container + value: '{{workflow.parameters.implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task-name + value: double + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: double-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.double-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + depends: double-driver.Succeeded + name: double + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-843abeeb4ef17bcaea1bd40a0b5348966bd310914d8b2fadc13215c136bf3964}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-simple-add"},"dependentTasks":["for-loop-4"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-2-Output","producerTask":"for-loop-4"}}}},"taskInfo":{"name":"simple-add"}}' + - name: container + value: '{{workflow.parameters.implementations-843abeeb4ef17bcaea1bd40a0b5348966bd310914d8b2fadc13215c136bf3964}}' + - name: task-name + value: simple-add + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-4.Succeeded + name: simple-add-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.simple-add-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.simple-add-driver.outputs.parameters.cached-decision}}' + depends: simple-add-driver.Succeeded + name: simple-add + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-305f913c03b5666bf1a6fdd8cb4d3bfa17dde66bcd297bfb8827c23f3ed92dce}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-add-2"},"inputs":{"parameters":{"nums":{"componentInputParameter":"pipelinechannel--for-loop-2-pipelinechannel--double-2-Output"}}},"taskInfo":{"name":"nested-add-2"}}' + - name: container + value: '{{workflow.parameters.implementations-305f913c03b5666bf1a6fdd8cb4d3bfa17dde66bcd297bfb8827c23f3ed92dce}}' + - name: task-name + value: nested-add-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: nested-add-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.nested-add-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.nested-add-2-driver.outputs.parameters.cached-decision}}' + depends: nested-add-2-driver.Succeeded + name: nested-add-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-843abeeb4ef17bcaea1bd40a0b5348966bd310914d8b2fadc13215c136bf3964}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-simple-add-2"},"inputs":{"parameters":{"nums":{"componentInputParameter":"pipelinechannel--for-loop-2-pipelinechannel--double-Output"}}},"taskInfo":{"name":"simple-add-2"}}' + - name: container + value: '{{workflow.parameters.implementations-843abeeb4ef17bcaea1bd40a0b5348966bd310914d8b2fadc13215c136bf3964}}' + - name: task-name + value: simple-add-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: simple-add-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.simple-add-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.simple-add-2-driver.outputs.parameters.cached-decision}}' + depends: simple-add-2-driver.Succeeded + name: simple-add-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-6 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-6}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-6"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"pipelinechannel--for-loop-2-pipelinechannel--double-2-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-2-Output","producerTask":"for-loop-2"}},"pipelinechannel--for-loop-2-pipelinechannel--double-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[0, + 0, 0]"}},"taskInfo":{"name":"for-loop-6"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-6 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-6-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-6}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-6"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"pipelinechannel--for-loop-2-pipelinechannel--double-2-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-2-Output","producerTask":"for-loop-2"}},"pipelinechannel--for-loop-2-pipelinechannel--double-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[0, + 0, 0]"}},"taskInfo":{"name":"for-loop-6"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-6-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-6-for-loop-6-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-934af9f49289ce10e90d26c3cf160534c673f4da28e91bc67331b4a9ae08a463}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-two-numbers"},"dependentTasks":["for-loop-6"],"inputs":{"parameters":{"x":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--simple-add-2-Output","producerTask":"for-loop-6"}},"y":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--nested-add-2-Output","producerTask":"for-loop-6"}}}},"taskInfo":{"name":"add-two-numbers"}}' + - name: container + value: '{{workflow.parameters.implementations-934af9f49289ce10e90d26c3cf160534c673f4da28e91bc67331b4a9ae08a463}}' + - name: task-name + value: add-two-numbers + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-6.Succeeded + name: add-two-numbers-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-two-numbers-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-two-numbers-driver.outputs.parameters.cached-decision}}' + depends: add-two-numbers-driver.Succeeded + name: add-two-numbers + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: for-loop-6 + template: comp-for-loop-6-for-loop-6-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-305f913c03b5666bf1a6fdd8cb4d3bfa17dde66bcd297bfb8827c23f3ed92dce}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-nested-add"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-2-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"nested-add"}}' + - name: container + value: '{{workflow.parameters.implementations-305f913c03b5666bf1a6fdd8cb4d3bfa17dde66bcd297bfb8827c23f3ed92dce}}' + - name: task-name + value: nested-add + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: nested-add-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.nested-add-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.nested-add-driver.outputs.parameters.cached-decision}}' + depends: nested-add-driver.Succeeded + name: nested-add + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/parameters_simple.yaml b/test_data/compiled-workflows/parameters_simple.yaml new file mode 100644 index 00000000000..5cba1952d50 --- /dev/null +++ b/test_data/compiled-workflows/parameters_simple.yaml @@ -0,0 +1,515 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: math-pipeline- +spec: + arguments: + parameters: + - name: components-0a346d821b1e474f2aea8ab24806f75b1afd4d6f7764be16c00f645cc08d5428 + value: '{"executorLabel":"exec-add","inputDefinitions":{"parameters":{"nums":{"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-0a346d821b1e474f2aea8ab24806f75b1afd4d6f7764be16c00f645cc08d5428 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add(nums: List[int]) -\u003e int:\n return sum(nums)\n\n"],"image":"python:3.9"}' + - name: components-a84a92e43cc5061b9721809172326190f017aa27a15f7b894c7be7cb01d2e7e4 + value: '{"executorLabel":"exec-add-container","inputDefinitions":{"parameters":{"nums":{"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"sum":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-a84a92e43cc5061b9721809172326190f017aa27a15f7b894c7be7cb01d2e7e4 + value: '{"args":["\n set -ex\n mkdir -p $(dirname {{$.outputs.parameters[''sum''].output_file}})\n echo + {{$.inputs.parameters[''nums'']}} | jq ''add'' \u003e {{$.outputs.parameters[''sum''].output_file}}\n "],"command":["sh","-c"],"image":"stedolan/jq"}' + - name: components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44 + value: '{"executorLabel":"exec-double","inputDefinitions":{"parameters":{"num":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","double"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + double(num: int) -\u003e int:\n return 2 * num\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-2 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--double-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"double"}}}},"tasks":{"double":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--double-Output":{"parameterType":"LIST"}}}}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerSubtask":"for-loop-2"}}}},"tasks":{"add":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add"}},"add-container":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-container"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add-container"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"double"}}' + - name: container + value: '{{workflow.parameters.implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task-name + value: double + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: double-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.double-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + depends: double-driver.Succeeded + name: double + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0a346d821b1e474f2aea8ab24806f75b1afd4d6f7764be16c00f645cc08d5428}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add"}}' + - name: container + value: '{{workflow.parameters.implementations-0a346d821b1e474f2aea8ab24806f75b1afd4d6f7764be16c00f645cc08d5428}}' + - name: task-name + value: add + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: add-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-driver.outputs.parameters.cached-decision}}' + depends: add-driver.Succeeded + name: add + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-a84a92e43cc5061b9721809172326190f017aa27a15f7b894c7be7cb01d2e7e4}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-container"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add-container"}}' + - name: container + value: '{{workflow.parameters.implementations-a84a92e43cc5061b9721809172326190f017aa27a15f7b894c7be7cb01d2e7e4}}' + - name: task-name + value: add-container + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-2.Succeeded + name: add-container-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-container-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-container-driver.outputs.parameters.cached-decision}}' + depends: add-container-driver.Succeeded + name: add-container + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_as_exit_task.yaml b/test_data/compiled-workflows/pipeline_as_exit_task.yaml new file mode 100644 index 00000000000..4db3c60cfc9 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_as_exit_task.yaml @@ -0,0 +1,585 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-task-final-status-conditional- +spec: + arguments: + parameters: + - name: components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d + value: '{"executorLabel":"exec-print-op-2","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-1 + value: '{"dag":{"tasks":{"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"notify + task failure."}}}},"taskInfo":{"name":"print-op"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--get-run-state-Output":{"parameterType":"STRING"}}}}' + - name: components-2784bcbcbd64b218e0f38de5fe3ce3aecd9c19875e77fb40612978303a5983b8 + value: '{"executorLabel":"exec-get-run-state","inputDefinitions":{"parameters":{"status":{"parameterType":"STRUCT"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-2784bcbcbd64b218e0f38de5fe3ce3aecd9c19875e77fb40612978303a5983b8 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","get_run_state"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + get_run_state(status: dict) -\u003e str:\n print(''Pipeline status: '', + status)\n return status[''state'']\n\n"],"image":"python:3.9"}' + - name: components-comp-conditional-notification + value: '{"dag":{"tasks":{"condition-1":{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["get-run-state"],"inputs":{"parameters":{"pipelinechannel--get-run-state-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"get-run-state"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--get-run-state-Output''] + == ''FAILED''"}},"get-run-state":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-get-run-state"},"inputs":{"parameters":{"status":{"componentInputParameter":"status"}}},"taskInfo":{"name":"get-run-state"}}}},"inputDefinitions":{"parameters":{"status":{"isOptional":true,"parameterType":"TASK_FINAL_STATUS"}}}}' + - name: components-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf + value: '{"executorLabel":"exec-fail-op","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf + value: '{"args":["--executor_input","{{$}}","--function_to_execute","fail_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n print(message)\n sys.exit(1)\n\n"],"image":"python:3.9"}' + - name: components-comp-exit-handler-1 + value: '{"dag":{"tasks":{"fail-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Task + failed."}}}},"taskInfo":{"name":"fail-op"}},"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--message":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"conditional-notification":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-conditional-notification"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"status":{"taskFinalStatus":{"producerTask":"exit-handler-1"}}}},"taskInfo":{"name":"conditional-notification"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}},"exit-handler-1":{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"my-pipeline"}}}},"inputDefinitions":{"parameters":{"message":{"defaultValue":"Hello + World!","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-task-final-status-conditional + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"notify + task failure."}}}},"taskInfo":{"name":"print-op"}}' + - name: container + value: '{{workflow.parameters.implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-1 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-task-final-status-conditional + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["get-run-state"],"inputs":{"parameters":{"pipelinechannel--get-run-state-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"get-run-state"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--get-run-state-Output''] + == ''FAILED''"}}' + - name: task-name + value: condition-1 + depends: get-run-state.Succeeded + name: condition-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' + depends: condition-1-driver.Succeeded + name: condition-1 + template: comp-condition-1 + when: '{{tasks.condition-1-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2784bcbcbd64b218e0f38de5fe3ce3aecd9c19875e77fb40612978303a5983b8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-get-run-state"},"inputs":{"parameters":{"status":{"componentInputParameter":"status"}}},"taskInfo":{"name":"get-run-state"}}' + - name: container + value: '{{workflow.parameters.implementations-2784bcbcbd64b218e0f38de5fe3ce3aecd9c19875e77fb40612978303a5983b8}}' + - name: task-name + value: get-run-state + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: get-run-state-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.get-run-state-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.get-run-state-driver.outputs.parameters.cached-decision}}' + depends: get-run-state-driver.Succeeded + name: get-run-state + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-conditional-notification + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Task + failed."}}}},"taskInfo":{"name":"fail-op"}}' + - name: container + value: '{{workflow.parameters.implementations-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf}}' + - name: task-name + value: fail-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: fail-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.fail-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.fail-op-driver.outputs.parameters.cached-decision}}' + depends: fail-op-driver.Succeeded + name: fail-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op"}}' + - name: container + value: '{{workflow.parameters.implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-exit-handler-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-conditional-notification}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-conditional-notification"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"status":{"taskFinalStatus":{"producerTask":"exit-handler-1"}}}},"taskInfo":{"name":"conditional-notification"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}' + - name: task-name + value: conditional-notification + name: conditional-notification-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.conditional-notification-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.conditional-notification-driver.outputs.parameters.condition}}' + depends: conditional-notification-driver.Succeeded + name: conditional-notification + template: comp-conditional-notification + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: exit-hook-root-conditional-notification + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-exit-handler-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"my-pipeline"}}' + - name: task-name + value: exit-handler-1 + name: exit-handler-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' + depends: exit-handler-1-driver.Succeeded + hooks: + exit: + arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + template: exit-hook-root-conditional-notification + name: exit-handler-1 + template: comp-exit-handler-1 + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"message":"Hello World!"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_in_pipeline.yaml b/test_data/compiled-workflows/pipeline_in_pipeline.yaml new file mode 100644 index 00000000000..917996c1671 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_in_pipeline.yaml @@ -0,0 +1,453 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-in-pipeline- +spec: + arguments: + parameters: + - name: components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2 + value: '{"executorLabel":"exec-print-op1-2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op1"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op1(msg: str) -\u003e str:\n print(msg)\n return msg\n\n"],"image":"python:3.9"}' + - name: components-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348 + value: '{"executorLabel":"exec-print-op2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: implementations-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348 + value: '{"command":["echo","{{$.inputs.parameters[''msg'']}}"],"image":"alpine"}' + - name: components-comp-inner-pipeline + value: '{"dag":{"tasks":{"print-op1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}},"print-op2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op2"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"print-op2"}}}},"inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"inner-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"inner-pipeline"}},"print-op1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"Hello"}}}},"taskInfo":{"name":"print-op1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-in-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}' + - name: container + value: '{{workflow.parameters.implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task-name + value: print-op1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op1-driver.outputs.parameters.cached-decision}}' + depends: print-op1-driver.Succeeded + name: print-op1 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op2"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"print-op2"}}' + - name: container + value: '{{workflow.parameters.implementations-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348}}' + - name: task-name + value: print-op2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: print-op1.Succeeded + name: print-op2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op2-driver.outputs.parameters.cached-decision}}' + depends: print-op2-driver.Succeeded + name: print-op2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-inner-pipeline + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-in-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-inner-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"inner-pipeline"}}' + - name: task-name + value: inner-pipeline + name: inner-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.inner-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.inner-pipeline-driver.outputs.parameters.condition}}' + depends: inner-pipeline-driver.Succeeded + name: inner-pipeline + template: comp-inner-pipeline + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"Hello"}}}},"taskInfo":{"name":"print-op1"}}' + - name: container + value: '{{workflow.parameters.implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task-name + value: print-op1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op1-driver.outputs.parameters.cached-decision}}' + depends: print-op1-driver.Succeeded + name: print-op1 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_in_pipeline_complex.yaml b/test_data/compiled-workflows/pipeline_in_pipeline_complex.yaml new file mode 100644 index 00000000000..fa0bdb8309a --- /dev/null +++ b/test_data/compiled-workflows/pipeline_in_pipeline_complex.yaml @@ -0,0 +1,624 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-in-pipeline-complex- +spec: + arguments: + parameters: + - name: components-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348 + value: '{"executorLabel":"exec-print-op2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: implementations-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348 + value: '{"command":["echo","{{$.inputs.parameters[''msg'']}}"],"image":"alpine"}' + - name: components-comp-condition-1 + value: '{"dag":{"tasks":{"print-op2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op2"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"print-op2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--print-op1-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-2 + value: '{"dag":{"tasks":{"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"Bye!"}}}},"taskInfo":{"name":"print-op"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--print-op1-Output":{"parameterType":"STRING"}}}}' + - name: components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2 + value: '{"executorLabel":"exec-print-op1-2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op1"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op1(msg: str) -\u003e str:\n print(msg)\n return msg\n\n"],"image":"python:3.9"}' + - name: components-comp-inner-pipeline + value: '{"dag":{"tasks":{"condition-1":{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"pipelinechannel--print-op1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--print-op1-Output''] + == ''Hello''"}},"condition-2":{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"pipelinechannel--print-op1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--print-op1-Output''] + != ''Hello''"}},"print-op1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}}},"inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-2 + value: '{"dag":{"tasks":{"inner-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"inner-pipeline"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"Hello\", + \"world!\"]"}},"taskInfo":{"name":"for-loop-2"}},"print-op1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}}},"inputDefinitions":{"parameters":{"msg":{"defaultValue":"Hello","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-in-pipeline-complex + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op2"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"print-op2"}}' + - name: container + value: '{{workflow.parameters.implementations-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348}}' + - name: task-name + value: print-op2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op2-driver.outputs.parameters.cached-decision}}' + depends: print-op2-driver.Succeeded + name: print-op2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"Bye!"}}}},"taskInfo":{"name":"print-op"}}' + - name: container + value: '{{workflow.parameters.implementations-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-2 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-in-pipeline-complex + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"pipelinechannel--print-op1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--print-op1-Output''] + == ''Hello''"}}' + - name: task-name + value: condition-1 + depends: print-op1.Succeeded + name: condition-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' + depends: condition-1-driver.Succeeded + name: condition-1 + template: comp-condition-1 + when: '{{tasks.condition-1-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"pipelinechannel--print-op1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--print-op1-Output''] + != ''Hello''"}}' + - name: task-name + value: condition-2 + depends: print-op1.Succeeded + name: condition-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' + depends: condition-2-driver.Succeeded + name: condition-2 + template: comp-condition-2 + when: '{{tasks.condition-2-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}' + - name: container + value: '{{workflow.parameters.implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task-name + value: print-op1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op1-driver.outputs.parameters.cached-decision}}' + depends: print-op1-driver.Succeeded + name: print-op1 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-inner-pipeline + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-inner-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"inner-pipeline"}}' + - name: task-name + value: inner-pipeline + name: inner-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.inner-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.inner-pipeline-driver.outputs.parameters.condition}}' + depends: inner-pipeline-driver.Succeeded + name: inner-pipeline + template: comp-inner-pipeline + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"Hello\", + \"world!\"]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"Hello\", + \"world!\"]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}' + - name: container + value: '{{workflow.parameters.implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task-name + value: print-op1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op1-driver.outputs.parameters.cached-decision}}' + depends: print-op1-driver.Succeeded + name: print-op1 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"msg":"Hello"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_in_pipeline_loaded_from_yaml.yaml b/test_data/compiled-workflows/pipeline_in_pipeline_loaded_from_yaml.yaml new file mode 100644 index 00000000000..e9c5b114684 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_in_pipeline_loaded_from_yaml.yaml @@ -0,0 +1,523 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-in-pipeline- +spec: + arguments: + parameters: + - name: components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2 + value: '{"executorLabel":"exec-print-op1-2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op1"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op1(msg: str) -\u003e str:\n print(msg)\n return msg\n\n"],"image":"python:3.9"}' + - name: components-3f2ba2471e2638f8b27a01f4563316e6e3e8c02bae402875e95b5b86e8de1ddc + value: '{"executorLabel":"exec-print-op2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-3f2ba2471e2638f8b27a01f4563316e6e3e8c02bae402875e95b5b86e8de1ddc + value: '{"args":["{{$.inputs.parameters[''msg'']}}","{{$.outputs.artifacts[''data''].path}}"],"command":["sh","-c","mkdir + --parents $(dirname \"$1\") \u0026\u0026 echo \"$0\" \u003e \"$1\""],"image":"alpine"}' + - name: components-comp-inner-pipeline + value: '{"dag":{"outputs":{"artifacts":{"data":{"artifactSelectors":[{"outputArtifactKey":"data","producerSubtask":"print-op2"}]}},"parameters":{"msg":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-op1"}}}},"tasks":{"print-op1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}},"print-op2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op2"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"print-op2"}}}},"inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}},"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: components-comp-pipeline-in-pipeline + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"data","producerSubtask":"inner-pipeline"}]}}},"tasks":{"inner-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"inner-pipeline"}},"print-op1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}}},"inputDefinitions":{"parameters":{"msg":{"defaultValue":"Hello","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: components-f310a0bf82b254e5d513a6030f358a14c0199a92aa17fb0534416a5f13d29b31 + value: '{"executorLabel":"exec-print-op1-3","inputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-f310a0bf82b254e5d513a6030f358a14c0199a92aa17fb0534416a5f13d29b31 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op1"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op1(data: Input[Artifact]):\n with open(data.path, ''r'') as f:\n print(f.read())\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"pipeline-in-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-pipeline-in-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"Hello"}}}},"taskInfo":{"name":"pipeline-in-pipeline"}},"print-op1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1-3"},"dependentTasks":["pipeline-in-pipeline"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"pipeline-in-pipeline"}}}},"taskInfo":{"name":"print-op1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-in-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}' + - name: container + value: '{{workflow.parameters.implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task-name + value: print-op1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op1-driver.outputs.parameters.cached-decision}}' + depends: print-op1-driver.Succeeded + name: print-op1 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-3f2ba2471e2638f8b27a01f4563316e6e3e8c02bae402875e95b5b86e8de1ddc}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op2"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"print-op2"}}' + - name: container + value: '{{workflow.parameters.implementations-3f2ba2471e2638f8b27a01f4563316e6e3e8c02bae402875e95b5b86e8de1ddc}}' + - name: task-name + value: print-op2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: print-op1.Succeeded + name: print-op2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op2-driver.outputs.parameters.cached-decision}}' + depends: print-op2-driver.Succeeded + name: print-op2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-inner-pipeline + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-in-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-inner-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"inner-pipeline"}}' + - name: task-name + value: inner-pipeline + name: inner-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.inner-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.inner-pipeline-driver.outputs.parameters.condition}}' + depends: inner-pipeline-driver.Succeeded + name: inner-pipeline + template: comp-inner-pipeline + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}' + - name: container + value: '{{workflow.parameters.implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task-name + value: print-op1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op1-driver.outputs.parameters.cached-decision}}' + depends: print-op1-driver.Succeeded + name: print-op1 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-pipeline-in-pipeline + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-pipeline-in-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-pipeline-in-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"Hello"}}}},"taskInfo":{"name":"pipeline-in-pipeline"}}' + - name: task-name + value: pipeline-in-pipeline + name: pipeline-in-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.pipeline-in-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.pipeline-in-pipeline-driver.outputs.parameters.condition}}' + depends: pipeline-in-pipeline-driver.Succeeded + name: pipeline-in-pipeline + template: comp-pipeline-in-pipeline + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f310a0bf82b254e5d513a6030f358a14c0199a92aa17fb0534416a5f13d29b31}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1-3"},"dependentTasks":["pipeline-in-pipeline"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"pipeline-in-pipeline"}}}},"taskInfo":{"name":"print-op1"}}' + - name: container + value: '{{workflow.parameters.implementations-f310a0bf82b254e5d513a6030f358a14c0199a92aa17fb0534416a5f13d29b31}}' + - name: task-name + value: print-op1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: pipeline-in-pipeline.Succeeded + name: print-op1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op1-driver.outputs.parameters.cached-decision}}' + depends: print-op1-driver.Succeeded + name: print-op1 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_producer_consumer.yaml b/test_data/compiled-workflows/pipeline_producer_consumer.yaml new file mode 100644 index 00000000000..530eceaa796 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_producer_consumer.yaml @@ -0,0 +1,779 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: math-pipeline- +spec: + arguments: + parameters: + - name: components-278f161477820f25fd603ee488b03203504f1efb0101e60e3fae0c7f63cd9a61 + value: '{"executorLabel":"exec-add","inputDefinitions":{"parameters":{"nums":{"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-278f161477820f25fd603ee488b03203504f1efb0101e60e3fae0c7f63cd9a61 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add(nums: List[List[int]]) -\u003e int:\n import itertools\n return + sum(itertools.chain(*nums))\n\n"],"image":"python:3.9"}' + - name: components-820c6b84550cd793dda8df1e7a9d6e31a46ac14583a0ed36ad9bf2844d5f25b4 + value: '{"executorLabel":"exec-echo-and-return","inputDefinitions":{"parameters":{"string":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-820c6b84550cd793dda8df1e7a9d6e31a46ac14583a0ed36ad9bf2844d5f25b4 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","echo_and_return"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + echo_and_return(string: str) -\u003e str:\n print(string)\n return string\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-2-2 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--echo-and-return-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"echo-and-return"}}}},"tasks":{"echo-and-return":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo-and-return"},"inputs":{"parameters":{"string":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"echo-and-return"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"pipelinechannel--echo-and-return-Output":{"parameterType":"LIST"}}}}' + - name: components-comp-add-pipeline + value: '{"dag":{"outputs":{"parameters":{"out1":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"add"}},"out2":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--echo-and-return-Output","producerSubtask":"for-loop-2"}}}},"tasks":{"add":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"inputs":{"parameters":{"nums":{"componentInputParameter":"nums"}}},"taskInfo":{"name":"add"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"m\", + \"a\", \"t\", \"h\"]"}},"taskInfo":{"name":"for-loop-2"}}}},"inputDefinitions":{"parameters":{"nums":{"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"out1":{"parameterType":"NUMBER_INTEGER"},"out2":{"parameterType":"LIST"}}}}' + - name: components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44 + value: '{"executorLabel":"exec-double","inputDefinitions":{"parameters":{"num":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","double"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + double(num: int) -\u003e int:\n return 2 * num\n\n"],"image":"python:3.9"}' + - name: components-comp-double-pipeline + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"double"}}}},"tasks":{"double":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"num"}}},"taskInfo":{"name":"double"}}}},"inputDefinitions":{"parameters":{"num":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-for-loop-4 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--double-pipeline-Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"double-pipeline"}}}},"tasks":{"double-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double-pipeline"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"double-pipeline"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-3":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--double-pipeline-Output":{"parameterType":"LIST"}}}}' + - name: components-comp-for-loop-2 + value: '{"dag":{"outputs":{"parameters":{"pipelinechannel--double-pipeline-Output":{"valueFromParameter":{"outputParameterKey":"pipelinechannel--double-pipeline-Output","producerSubtask":"for-loop-4"}}}},"tasks":{"for-loop-4":{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"pipelinechannel--double-pipeline-Output":{"parameterType":"LIST"}}}}' + - name: components-4a0d5e75bec2b5be6dfc3d4ef02083b53d09e3e48bbb8b4346d4e919bdc999a2 + value: '{"executorLabel":"exec-join-and-print","inputDefinitions":{"parameters":{"strings":{"parameterType":"LIST"}}}}' + - name: implementations-4a0d5e75bec2b5be6dfc3d4ef02083b53d09e3e48bbb8b4346d4e919bdc999a2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","join_and_print"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + join_and_print(strings: List[str]):\n print(''''.join(strings))\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"out1","producerSubtask":"add-pipeline"}}}},"tasks":{"add-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-pipeline"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-pipeline-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add-pipeline"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}},"join-and-print":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-join-and-print"},"dependentTasks":["add-pipeline"],"inputs":{"parameters":{"strings":{"taskOutputParameter":{"outputParameterKey":"out2","producerTask":"add-pipeline"}}}},"taskInfo":{"name":"join-and-print"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-820c6b84550cd793dda8df1e7a9d6e31a46ac14583a0ed36ad9bf2844d5f25b4}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo-and-return"},"inputs":{"parameters":{"string":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"echo-and-return"}}' + - name: container + value: '{{workflow.parameters.implementations-820c6b84550cd793dda8df1e7a9d6e31a46ac14583a0ed36ad9bf2844d5f25b4}}' + - name: task-name + value: echo-and-return + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: echo-and-return-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.echo-and-return-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.echo-and-return-driver.outputs.parameters.cached-decision}}' + depends: echo-and-return-driver.Succeeded + name: echo-and-return + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-2 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - math-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"m\", + \"a\", \"t\", \"h\"]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"m\", + \"a\", \"t\", \"h\"]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-278f161477820f25fd603ee488b03203504f1efb0101e60e3fae0c7f63cd9a61}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"inputs":{"parameters":{"nums":{"componentInputParameter":"nums"}}},"taskInfo":{"name":"add"}}' + - name: container + value: '{{workflow.parameters.implementations-278f161477820f25fd603ee488b03203504f1efb0101e60e3fae0c7f63cd9a61}}' + - name: task-name + value: add + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: add-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-driver.outputs.parameters.cached-decision}}' + depends: add-driver.Succeeded + name: add + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-2-for-loop-2-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-add-pipeline + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double"},"inputs":{"parameters":{"num":{"componentInputParameter":"num"}}},"taskInfo":{"name":"double"}}' + - name: container + value: '{{workflow.parameters.implementations-ce925a4ef98c92ae630145dae1155afc9d381ebef3b3f22030ab4257c4910f44}}' + - name: task-name + value: double + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: double-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.double-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + depends: double-driver.Succeeded + name: double + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-double-pipeline + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-double-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double-pipeline"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"double-pipeline"}}' + - name: task-name + value: double-pipeline + name: double-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.double-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.double-pipeline-driver.outputs.parameters.condition}}' + depends: double-pipeline-driver.Succeeded + name: double-pipeline + template: comp-double-pipeline + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-4 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-4-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-4-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4-for-loop-4-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-add-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-pipeline"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-pipeline-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add-pipeline"}}' + - name: task-name + value: add-pipeline + depends: for-loop-2.Succeeded + name: add-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.add-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.add-pipeline-driver.outputs.parameters.condition}}' + depends: add-pipeline-driver.Succeeded + name: add-pipeline + template: comp-add-pipeline + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4a0d5e75bec2b5be6dfc3d4ef02083b53d09e3e48bbb8b4346d4e919bdc999a2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-join-and-print"},"dependentTasks":["add-pipeline"],"inputs":{"parameters":{"strings":{"taskOutputParameter":{"outputParameterKey":"out2","producerTask":"add-pipeline"}}}},"taskInfo":{"name":"join-and-print"}}' + - name: container + value: '{{workflow.parameters.implementations-4a0d5e75bec2b5be6dfc3d4ef02083b53d09e3e48bbb8b4346d4e919bdc999a2}}' + - name: task-name + value: join-and-print + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: add-pipeline.Succeeded + name: join-and-print-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.join-and-print-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.join-and-print-driver.outputs.parameters.cached-decision}}' + depends: join-and-print-driver.Succeeded + name: join-and-print + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_after.yaml b/test_data/compiled-workflows/pipeline_with_after.yaml new file mode 100644 index 00000000000..60aaffd772c --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_after.yaml @@ -0,0 +1,416 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-after- +spec: + arguments: + parameters: + - name: components-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806 + value: '{"executorLabel":"exec-print-text","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}}}' + - name: implementations-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806 + value: '{"command":["sh","-c","set -e -x\necho \"$0\"\n","{{$.inputs.parameters[''text'']}}"],"image":"alpine"}' + - name: components-root + value: '{"dag":{"tasks":{"print-text":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"1st + task"}}}},"taskInfo":{"name":"print-text"}},"print-text-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-2"},"dependentTasks":["print-text"],"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"2nd + task"}}}},"taskInfo":{"name":"print-text-2"}},"print-text-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-3"},"dependentTasks":["print-text","print-text-2"],"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"3rd + task"}}}},"taskInfo":{"name":"print-text-3"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-after + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text"},"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"1st + task"}}}},"taskInfo":{"name":"print-text"}}' + - name: container + value: '{{workflow.parameters.implementations-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806}}' + - name: task-name + value: print-text + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-driver.outputs.parameters.cached-decision}}' + depends: print-text-driver.Succeeded + name: print-text + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-2"},"dependentTasks":["print-text"],"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"2nd + task"}}}},"taskInfo":{"name":"print-text-2"}}' + - name: container + value: '{{workflow.parameters.implementations-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806}}' + - name: task-name + value: print-text-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: print-text.Succeeded + name: print-text-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-2-driver.outputs.parameters.cached-decision}}' + depends: print-text-2-driver.Succeeded + name: print-text-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-3"},"dependentTasks":["print-text","print-text-2"],"inputs":{"parameters":{"text":{"runtimeValue":{"constant":"3rd + task"}}}},"taskInfo":{"name":"print-text-3"}}' + - name: container + value: '{{workflow.parameters.implementations-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806}}' + - name: task-name + value: print-text-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: print-text.Succeeded && print-text-2.Succeeded + name: print-text-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-3-driver.outputs.parameters.cached-decision}}' + depends: print-text-3-driver.Succeeded + name: print-text-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-after + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_artifact_upload_download.yaml b/test_data/compiled-workflows/pipeline_with_artifact_upload_download.yaml new file mode 100644 index 00000000000..127c6ea1a53 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_artifact_upload_download.yaml @@ -0,0 +1,415 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-datasets- +spec: + arguments: + parameters: + - name: components-05e0c7c24c705bcaac86e732ffd5d85e67d9a184ae4703d789ccfe1715a452f2 + value: '{"executorLabel":"exec-download-dataset-and-upload-as-artifact","inputDefinitions":{"parameters":{"dataset_name":{"parameterType":"STRING"},"dataset_repo":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"output_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-05e0c7c24c705bcaac86e732ffd5d85e67d9a184ae4703d789ccfe1715a452f2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","download_dataset_and_upload_as_artifact"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''datasets==4.0.0'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + download_dataset_and_upload_as_artifact(dataset_repo: str, dataset_name: str, + output_dataset: Output[Dataset]):\n from datasets import load_dataset\n # + Load data set from hugging face\n ds = load_dataset(dataset_repo, dataset_name)\n print(\"Downloaded + Hugging Face data\")\n print(f\"Now saving to {output_dataset.path}\")\n ds.save_to_disk(output_dataset.path)\n print(f\"Saved + to {output_dataset.path}\")\n\n"],"image":"python:3.9"}' + - name: components-d0f4b4d95b964a5d78a841aa20e3d529d81189e4c90f9b39a31332bb8c8d437c + value: '{"executorLabel":"exec-print-dataset-info","inputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-d0f4b4d95b964a5d78a841aa20e3d529d81189e4c90f9b39a31332bb8c8d437c + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_dataset_info"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_dataset_info(dataset: Dataset):\n print(''Information about the artifact'')\n print(''Name:'', + dataset.name)\n print(''URI:'', dataset.uri)\n assert \"download-dataset-and-upload-as-artifact\" + in dataset.uri, \"The URI of the downloaded artifact does not match the expected + function''s name that generated it\"\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"download-dataset-and-upload-as-artifact":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-download-dataset-and-upload-as-artifact"},"inputs":{"parameters":{"dataset_name":{"componentInputParameter":"dataset_name"},"dataset_repo":{"componentInputParameter":"dataset_repo"}}},"taskInfo":{"name":"download-dataset-and-upload-as-artifact"}},"print-dataset-info":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-dataset-info"},"dependentTasks":["download-dataset-and-upload-as-artifact"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"output_dataset","producerTask":"download-dataset-and-upload-as-artifact"}}}},"taskInfo":{"name":"print-dataset-info"}}}},"inputDefinitions":{"parameters":{"dataset_name":{"defaultValue":"","isOptional":true,"parameterType":"STRING"},"dataset_repo":{"defaultValue":"google/frames-benchmark","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-datasets + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-05e0c7c24c705bcaac86e732ffd5d85e67d9a184ae4703d789ccfe1715a452f2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-download-dataset-and-upload-as-artifact"},"inputs":{"parameters":{"dataset_name":{"componentInputParameter":"dataset_name"},"dataset_repo":{"componentInputParameter":"dataset_repo"}}},"taskInfo":{"name":"download-dataset-and-upload-as-artifact"}}' + - name: container + value: '{{workflow.parameters.implementations-05e0c7c24c705bcaac86e732ffd5d85e67d9a184ae4703d789ccfe1715a452f2}}' + - name: task-name + value: download-dataset-and-upload-as-artifact + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: download-dataset-and-upload-as-artifact-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.download-dataset-and-upload-as-artifact-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.download-dataset-and-upload-as-artifact-driver.outputs.parameters.cached-decision}}' + depends: download-dataset-and-upload-as-artifact-driver.Succeeded + name: download-dataset-and-upload-as-artifact + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d0f4b4d95b964a5d78a841aa20e3d529d81189e4c90f9b39a31332bb8c8d437c}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-dataset-info"},"dependentTasks":["download-dataset-and-upload-as-artifact"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"output_dataset","producerTask":"download-dataset-and-upload-as-artifact"}}}},"taskInfo":{"name":"print-dataset-info"}}' + - name: container + value: '{{workflow.parameters.implementations-d0f4b4d95b964a5d78a841aa20e3d529d81189e4c90f9b39a31332bb8c8d437c}}' + - name: task-name + value: print-dataset-info + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: download-dataset-and-upload-as-artifact.Succeeded + name: print-dataset-info-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-dataset-info-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-dataset-info-driver.outputs.parameters.cached-decision}}' + depends: print-dataset-info-driver.Succeeded + name: print-dataset-info + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-datasets + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"dataset_name":"","dataset_repo":"google/frames-benchmark"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_concat_placeholder.yaml b/test_data/compiled-workflows/pipeline_with_concat_placeholder.yaml new file mode 100644 index 00000000000..76b006ef9bb --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_concat_placeholder.yaml @@ -0,0 +1,362 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: one-step-pipeline-with-concat-placeholder- +spec: + arguments: + parameters: + - name: components-7c45df434efc6e206df6b6c77b2f4f28cdab5c72c61199ad34c83ad5f3ecd460 + value: '{"executorLabel":"exec-component-with-concat-placeholder","inputDefinitions":{"parameters":{"input_one":{"parameterType":"STRING"},"input_two":{"parameterType":"STRING"}}}}' + - name: implementations-7c45df434efc6e206df6b6c77b2f4f28cdab5c72c61199ad34c83ad5f3ecd460 + value: '{"args":["echo \"$0\" \u003e /tmp/test \u0026\u0026 [[ \"$0\" == ''one+two=three'' + ]]","{\"Concat\": [\"{{$.inputs.parameters[''input_one'']}}\", \"+\", \"{{$.inputs.parameters[''input_two'']}}\", + \"=three\"]}"],"command":["sh","-ec"],"image":"ghcr.io/containerd/busybox"}' + - name: components-root + value: '{"dag":{"tasks":{"component-with-concat-placeholder":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-with-concat-placeholder"},"inputs":{"parameters":{"input_one":{"runtimeValue":{"constant":"one"}},"input_two":{"runtimeValue":{"constant":"two"}}}},"taskInfo":{"name":"component-with-concat-placeholder"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - one-step-pipeline-with-concat-placeholder + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7c45df434efc6e206df6b6c77b2f4f28cdab5c72c61199ad34c83ad5f3ecd460}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-with-concat-placeholder"},"inputs":{"parameters":{"input_one":{"runtimeValue":{"constant":"one"}},"input_two":{"runtimeValue":{"constant":"two"}}}},"taskInfo":{"name":"component-with-concat-placeholder"}}' + - name: container + value: '{{workflow.parameters.implementations-7c45df434efc6e206df6b6c77b2f4f28cdab5c72c61199ad34c83ad5f3ecd460}}' + - name: task-name + value: component-with-concat-placeholder + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-with-concat-placeholder-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-with-concat-placeholder-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-with-concat-placeholder-driver.outputs.parameters.cached-decision}}' + depends: component-with-concat-placeholder-driver.Succeeded + name: component-with-concat-placeholder + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - one-step-pipeline-with-concat-placeholder + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_condition.yaml b/test_data/compiled-workflows/pipeline_with_condition.yaml new file mode 100644 index 00000000000..24214bbb314 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_condition.yaml @@ -0,0 +1,518 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: single-condition-pipeline- +spec: + arguments: + parameters: + - name: components-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339 + value: '{"executorLabel":"exec-flip-coin-op-2","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","flip_coin_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + flip_coin_op() -\u003e str:\n \"\"\"Flip a coin and output heads or tails + randomly.\"\"\"\n import random\n result = ''heads'' if random.randint(0, + 1) == 0 else ''tails''\n return result\n\n"],"image":"python:3.9"}' + - name: components-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823 + value: '{"executorLabel":"exec-print-op-2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: implementations-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-1 + value: '{"dag":{"tasks":{"flip-coin-op-2":{"cachingOptions":{},"componentRef":{"name":"comp-flip-coin-op-2"},"taskInfo":{"name":"flip-coin-op-2"}},"print-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"dependentTasks":["flip-coin-op-2"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-2"}}}},"taskInfo":{"name":"print-op-2"}},"print-op-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--text"}}},"taskInfo":{"name":"print-op-3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"},"pipelinechannel--text":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"condition-1":{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin-op"],"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}},"pipelinechannel--text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] + == ''heads''"}},"flip-coin-op":{"cachingOptions":{},"componentRef":{"name":"comp-flip-coin-op"},"taskInfo":{"name":"flip-coin-op"}},"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"dependentTasks":["flip-coin-op"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}}}},"taskInfo":{"name":"print-op"}}}},"inputDefinitions":{"parameters":{"text":{"defaultValue":"condition + test","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - single-condition-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-flip-coin-op-2"},"taskInfo":{"name":"flip-coin-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task-name + value: flip-coin-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-op-2-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-op-2-driver.Succeeded + name: flip-coin-op-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"dependentTasks":["flip-coin-op-2"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-2"}}}},"taskInfo":{"name":"print-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task-name + value: print-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: flip-coin-op-2.Succeeded + name: print-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' + depends: print-op-2-driver.Succeeded + name: print-op-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--text"}}},"taskInfo":{"name":"print-op-3"}}' + - name: container + value: '{{workflow.parameters.implementations-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task-name + value: print-op-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-3-driver.outputs.parameters.cached-decision}}' + depends: print-op-3-driver.Succeeded + name: print-op-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-1 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - single-condition-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin-op"],"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}},"pipelinechannel--text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] + == ''heads''"}}' + - name: task-name + value: condition-1 + depends: flip-coin-op.Succeeded + name: condition-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' + depends: condition-1-driver.Succeeded + name: condition-1 + template: comp-condition-1 + when: '{{tasks.condition-1-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-flip-coin-op"},"taskInfo":{"name":"flip-coin-op"}}' + - name: container + value: '{{workflow.parameters.implementations-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task-name + value: flip-coin-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-op-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-op-driver.Succeeded + name: flip-coin-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"dependentTasks":["flip-coin-op"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}}}},"taskInfo":{"name":"print-op"}}' + - name: container + value: '{{workflow.parameters.implementations-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: flip-coin-op.Succeeded + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"text":"condition test"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml b/test_data/compiled-workflows/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml new file mode 100644 index 00000000000..33c7a5d0ad1 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml @@ -0,0 +1,614 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-dynamic-condition-output- +spec: + arguments: + parameters: + - name: components-4ff7c4319829a8830447ba783584f0f40734ff010482a6b9b234ceda17ba214c + value: '{"executorLabel":"exec-accelerator-count","outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-4ff7c4319829a8830447ba783584f0f40734ff010482a6b9b234ceda17ba214c + value: '{"args":["--executor_input","{{$}}","--function_to_execute","accelerator_count"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + accelerator_count() -\u003e int:\n return 1\n\n"],"image":"python:3.9"}' + - name: components-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b + value: '{"executorLabel":"exec-accelerator-type","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","accelerator_type"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + accelerator_type() -\u003e str:\n return ''NVIDIA_TESLA_P4''\n\n"],"image":"python:3.9"}' + - name: components-5c3db471413bc5e6b0f2f0865964176bec80d4d3e1dca8ac3332029d1bd0e525 + value: '{"executorLabel":"exec-custom-training-job","inputDefinitions":{"parameters":{"base_output_directory":{"defaultValue":"","description":"The + Cloud Storage location to store the output of this CustomJob or HyperparameterTuningJob. + See [more information ](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/GcsDestination).","isOptional":true,"parameterType":"STRING"},"display_name":{"description":"The + name of the CustomJob.","parameterType":"STRING"},"enable_web_access":{"defaultValue":false,"description":"Whether + you want Vertex AI to enable [interactive shell access ](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) + to training containers. If `True`, you can access interactive shells at the + URIs given by [CustomJob.web_access_uris][].","isOptional":true,"parameterType":"BOOLEAN"},"encryption_spec_key_name":{"defaultValue":"","description":"Customer-managed + encryption key options for the CustomJob. If this is set, then all resources + created by the CustomJob will be encrypted with the provided encryption key.","isOptional":true,"parameterType":"STRING"},"labels":{"defaultValue":{},"description":"The + labels with user-defined metadata to organize the CustomJob. See [more information](https://goo.gl/xmQnxf).","isOptional":true,"parameterType":"STRUCT"},"location":{"defaultValue":"{{$.pipeline_google_cloud_location}}","description":"Location + for creating the custom training job. If not set, default to the location + where the PipelineJob is run.","isOptional":true,"parameterType":"STRING"},"max_wait_duration":{"defaultValue":"86400s","description":"The + maximum time to wait for the custom training job to be scheduled only if the + scheduling strategy is set to FLEX_START. If set to 0, the job will wait indefinitely. + The default is 24 hours. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#Strategy).","isOptional":true,"parameterType":"STRING"},"network":{"defaultValue":"","description":"The + full name of the Compute Engine network to which the job should be peered. + For example, `projects/12345/global/networks/myVPC`. Format is of the form + `projects/{project}/global/networks/{network}`. Where `{project}` is a project + number, as in `12345`, and `{network}` is a network name. Private services + access must already be configured for the network. If left unspecified, the + job is not peered with any network.","isOptional":true,"parameterType":"STRING"},"persistent_resource_id":{"defaultValue":"{{$.pipeline_persistent_resource_id}}","description":"The + ID of the PersistentResource in the same Project and Location which to run. + The default value is a placeholder that will be resolved to the PipelineJob + [RuntimeConfig](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.pipelineJobs#PipelineJob.RuntimeConfig)''s + persistent resource id at runtime. However, if the PipelineJob doesn''t set + Persistent Resource as the job level runtime, the placedholder will be resolved + to an empty string and the custom job will be run on demand. If the value + is set explicitly, the custom job will runs in the specified persistent resource, + in this case, please note the network and CMEK configs on the job should be + consistent with those on the PersistentResource, otherwise, the job will be + rejected.","isOptional":true,"parameterType":"STRING"},"project":{"defaultValue":"{{$.pipeline_google_cloud_project_id}}","description":"Project + to create the custom training job in. Defaults to the project in which the + PipelineJob is run.","isOptional":true,"parameterType":"STRING"},"psc_interface_config":{"defaultValue":{},"description":"Configuration + CustomJob with PSC-I. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#PscInterfaceConfig).","isOptional":true,"parameterType":"STRUCT"},"reserved_ip_ranges":{"defaultValue":[],"description":"A + list of names for the reserved IP ranges under the VPC network that can be + used for this job. If set, we will deploy the job within the provided IP ranges. + Otherwise, the job will be deployed to any IP ranges under the provided VPC + network.","isOptional":true,"parameterType":"LIST"},"restart_job_on_worker_restart":{"defaultValue":false,"description":"Restarts + the entire CustomJob if a worker gets restarted. This feature can be used + by distributed training jobs that are not resilient to workers leaving and + joining a job.","isOptional":true,"parameterType":"BOOLEAN"},"service_account":{"defaultValue":"","description":"Sets + the default service account for workload run-as account. The [service account + ](https://cloud.google.com/vertex-ai/docs/pipelines/configure-project#service-account) + running the pipeline submitting jobs must have act-as permission on this run-as + account. If unspecified, the Vertex AI Custom Code [Service Agent ](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) + for the CustomJob''s project.","isOptional":true,"parameterType":"STRING"},"strategy":{"defaultValue":"STANDARD","description":"The + strategy to use for the custom training job. The default is ''STANDARD''. + See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#Strategy).","isOptional":true,"parameterType":"STRING"},"tensorboard":{"defaultValue":"","description":"The + name of a Vertex AI TensorBoard resource to which this CustomJob will upload + TensorBoard logs.","isOptional":true,"parameterType":"STRING"},"timeout":{"defaultValue":"604800s","description":"The + maximum job running time. The default is 7 days. A duration in seconds with + up to nine fractional digits, terminated by ''s'', for example: \"3.5s\".","isOptional":true,"parameterType":"STRING"},"worker_pool_specs":{"defaultValue":[],"description":"Serialized + json spec of the worker pools including machine type and Docker image. All + worker pools except the first one are optional and can be skipped by providing + an empty value. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#WorkerPoolSpec).","isOptional":true,"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"gcp_resources":{"description":"Serialized + JSON of `gcp_resources` [proto](https://github.com/kubeflow/pipelines/tree/master/components/google-cloud/google_cloud_pipeline_components/proto) + which tracks the CustomJob.","parameterType":"STRING"}}}}' + - name: implementations-5c3db471413bc5e6b0f2f0865964176bec80d4d3e1dca8ac3332029d1bd0e525 + value: '{"args":["--type","CustomJob","--payload","{\"display_name\": \"{{$.inputs.parameters[''display_name'']}}\", + \"job_spec\": {\"worker_pool_specs\": {{$.inputs.parameters[''worker_pool_specs'']}}, + \"scheduling\": {\"timeout\": \"{{$.inputs.parameters[''timeout'']}}\", \"restart_job_on_worker_restart\": + {{$.inputs.parameters[''restart_job_on_worker_restart'']}}, \"strategy\": + \"{{$.inputs.parameters[''strategy'']}}\", \"max_wait_duration\": \"{{$.inputs.parameters[''max_wait_duration'']}}\"}, + \"service_account\": \"{{$.inputs.parameters[''service_account'']}}\", \"tensorboard\": + \"{{$.inputs.parameters[''tensorboard'']}}\", \"enable_web_access\": {{$.inputs.parameters[''enable_web_access'']}}, + \"network\": \"{{$.inputs.parameters[''network'']}}\", \"reserved_ip_ranges\": + {{$.inputs.parameters[''reserved_ip_ranges'']}}, \"base_output_directory\": + {\"output_uri_prefix\": \"{{$.inputs.parameters[''base_output_directory'']}}\"}, + \"persistent_resource_id\": \"{{$.inputs.parameters[''persistent_resource_id'']}}\", + \"psc_interface_config\": {{$.inputs.parameters[''psc_interface_config'']}}}, + \"labels\": {{$.inputs.parameters[''labels'']}}, \"encryption_spec\": {\"kms_key_name\": + \"{{$.inputs.parameters[''encryption_spec_key_name'']}}\"}}","--project","{{$.inputs.parameters[''project'']}}","--location","{{$.inputs.parameters[''location'']}}","--gcp_resources","{{$.outputs.parameters[''gcp_resources''].output_file}}"],"command":["python3","-u","-m","google_cloud_pipeline_components.container.v1.custom_job.launcher"],"image":"gcr.io/ml-pipeline/google-cloud-pipeline-components:2.21.0"}' + - name: components-comp-condition-1 + value: '{"dag":{"tasks":{"custom-training-job":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-custom-training-job"},"inputs":{"parameters":{"display_name":{"runtimeValue":{"constant":"add-numbers"}},"encryption_spec_key_name":{"componentInputParameter":"pipelinechannel--encryption_spec_key_name"},"location":{"componentInputParameter":"pipelinechannel--location"},"pipelinechannel--accelerator-count-Output":{"componentInputParameter":"pipelinechannel--accelerator-count-Output"},"pipelinechannel--accelerator-type-Output":{"componentInputParameter":"pipelinechannel--accelerator-type-Output"},"pipelinechannel--machine-type-Output":{"componentInputParameter":"pipelinechannel--machine-type-Output"},"project":{"componentInputParameter":"pipelinechannel--project"},"worker_pool_specs":{"runtimeValue":{"constant":[{"container_spec":{"args":["foo"],"command":["echo"],"image_uri":"gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0"},"machine_spec":{"accelerator_count":"{{$.inputs.parameters[''pipelinechannel--accelerator-count-Output'']}}","accelerator_type":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}","machine_type":"{{$.inputs.parameters[''pipelinechannel--machine-type-Output'']}}"},"replica_count":1}]}}}},"taskInfo":{"name":"custom-training-job"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--accelerator-count-Output":{"parameterType":"NUMBER_INTEGER"},"pipelinechannel--accelerator-type-Output":{"parameterType":"STRING"},"pipelinechannel--encryption_spec_key_name":{"parameterType":"STRING"},"pipelinechannel--flip-biased-coin-op-Output":{"parameterType":"STRING"},"pipelinechannel--location":{"parameterType":"STRING"},"pipelinechannel--machine-type-Output":{"parameterType":"STRING"},"pipelinechannel--project":{"parameterType":"STRING"}}}}' + - name: components-51b3f8664f2ff23be29076a65342b8eb091ec17bd73e4295c973baea3dd5f073 + value: '{"executorLabel":"exec-flip-biased-coin-op","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-51b3f8664f2ff23be29076a65342b8eb091ec17bd73e4295c973baea3dd5f073 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","flip_biased_coin_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + flip_biased_coin_op() -\u003e str:\n \"\"\"Flip a coin and output heads.\"\"\"\n return + ''heads''\n\n"],"image":"python:3.9"}' + - name: components-cc24d59ed416d6169227e7ef954078b09337657b6eb903c1ee2afe6e0d9a5972 + value: '{"executorLabel":"exec-machine-type","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-cc24d59ed416d6169227e7ef954078b09337657b6eb903c1ee2afe6e0d9a5972 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","machine_type"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + machine_type() -\u003e str:\n return ''n1-standard-4''\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"accelerator-count":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-count"},"taskInfo":{"name":"accelerator-count"}},"accelerator-type":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-type"},"taskInfo":{"name":"accelerator-type"}},"condition-1":{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["accelerator-count","accelerator-type","flip-biased-coin-op","machine-type"],"inputs":{"parameters":{"pipelinechannel--accelerator-count-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-count"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--encryption_spec_key_name":{"componentInputParameter":"encryption_spec_key_name"},"pipelinechannel--flip-biased-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-biased-coin-op"}},"pipelinechannel--location":{"componentInputParameter":"location"},"pipelinechannel--machine-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"machine-type"}},"pipelinechannel--project":{"componentInputParameter":"project"}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-biased-coin-op-Output''] + == ''heads''"}},"flip-biased-coin-op":{"cachingOptions":{},"componentRef":{"name":"comp-flip-biased-coin-op"},"taskInfo":{"name":"flip-biased-coin-op"}},"machine-type":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-machine-type"},"taskInfo":{"name":"machine-type"}}}},"inputDefinitions":{"parameters":{"encryption_spec_key_name":{"defaultValue":"","isOptional":true,"parameterType":"STRING"},"location":{"parameterType":"STRING"},"project":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-dynamic-condition-output + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5c3db471413bc5e6b0f2f0865964176bec80d4d3e1dca8ac3332029d1bd0e525}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-custom-training-job"},"inputs":{"parameters":{"display_name":{"runtimeValue":{"constant":"add-numbers"}},"encryption_spec_key_name":{"componentInputParameter":"pipelinechannel--encryption_spec_key_name"},"location":{"componentInputParameter":"pipelinechannel--location"},"pipelinechannel--accelerator-count-Output":{"componentInputParameter":"pipelinechannel--accelerator-count-Output"},"pipelinechannel--accelerator-type-Output":{"componentInputParameter":"pipelinechannel--accelerator-type-Output"},"pipelinechannel--machine-type-Output":{"componentInputParameter":"pipelinechannel--machine-type-Output"},"project":{"componentInputParameter":"pipelinechannel--project"},"worker_pool_specs":{"runtimeValue":{"constant":[{"container_spec":{"args":["foo"],"command":["echo"],"image_uri":"gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0"},"machine_spec":{"accelerator_count":"{{$.inputs.parameters[''pipelinechannel--accelerator-count-Output'']}}","accelerator_type":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}","machine_type":"{{$.inputs.parameters[''pipelinechannel--machine-type-Output'']}}"},"replica_count":1}]}}}},"taskInfo":{"name":"custom-training-job"}}' + - name: container + value: '{{workflow.parameters.implementations-5c3db471413bc5e6b0f2f0865964176bec80d4d3e1dca8ac3332029d1bd0e525}}' + - name: task-name + value: custom-training-job + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: custom-training-job-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.custom-training-job-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.custom-training-job-driver.outputs.parameters.cached-decision}}' + depends: custom-training-job-driver.Succeeded + name: custom-training-job + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-1 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-dynamic-condition-output + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4ff7c4319829a8830447ba783584f0f40734ff010482a6b9b234ceda17ba214c}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-count"},"taskInfo":{"name":"accelerator-count"}}' + - name: container + value: '{{workflow.parameters.implementations-4ff7c4319829a8830447ba783584f0f40734ff010482a6b9b234ceda17ba214c}}' + - name: task-name + value: accelerator-count + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: accelerator-count-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.accelerator-count-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.accelerator-count-driver.outputs.parameters.cached-decision}}' + depends: accelerator-count-driver.Succeeded + name: accelerator-count + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-type"},"taskInfo":{"name":"accelerator-type"}}' + - name: container + value: '{{workflow.parameters.implementations-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b}}' + - name: task-name + value: accelerator-type + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: accelerator-type-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.accelerator-type-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.accelerator-type-driver.outputs.parameters.cached-decision}}' + depends: accelerator-type-driver.Succeeded + name: accelerator-type + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["accelerator-count","accelerator-type","flip-biased-coin-op","machine-type"],"inputs":{"parameters":{"pipelinechannel--accelerator-count-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-count"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--encryption_spec_key_name":{"componentInputParameter":"encryption_spec_key_name"},"pipelinechannel--flip-biased-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-biased-coin-op"}},"pipelinechannel--location":{"componentInputParameter":"location"},"pipelinechannel--machine-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"machine-type"}},"pipelinechannel--project":{"componentInputParameter":"project"}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-biased-coin-op-Output''] + == ''heads''"}}' + - name: task-name + value: condition-1 + depends: accelerator-count.Succeeded && accelerator-type.Succeeded && flip-biased-coin-op.Succeeded + && machine-type.Succeeded + name: condition-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' + depends: condition-1-driver.Succeeded + name: condition-1 + template: comp-condition-1 + when: '{{tasks.condition-1-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-51b3f8664f2ff23be29076a65342b8eb091ec17bd73e4295c973baea3dd5f073}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-flip-biased-coin-op"},"taskInfo":{"name":"flip-biased-coin-op"}}' + - name: container + value: '{{workflow.parameters.implementations-51b3f8664f2ff23be29076a65342b8eb091ec17bd73e4295c973baea3dd5f073}}' + - name: task-name + value: flip-biased-coin-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-biased-coin-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-biased-coin-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-biased-coin-op-driver.outputs.parameters.cached-decision}}' + depends: flip-biased-coin-op-driver.Succeeded + name: flip-biased-coin-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-cc24d59ed416d6169227e7ef954078b09337657b6eb903c1ee2afe6e0d9a5972}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-machine-type"},"taskInfo":{"name":"machine-type"}}' + - name: container + value: '{{workflow.parameters.implementations-cc24d59ed416d6169227e7ef954078b09337657b6eb903c1ee2afe6e0d9a5972}}' + - name: task-name + value: machine-type + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: machine-type-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.machine-type-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.machine-type-driver.outputs.parameters.cached-decision}}' + depends: machine-type-driver.Succeeded + name: machine-type + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"encryption_spec_key_name":""}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_dynamic_importer_metadata.yaml b/test_data/compiled-workflows/pipeline_with_dynamic_importer_metadata.yaml new file mode 100644 index 00000000000..d6b87c7d4a4 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_dynamic_importer_metadata.yaml @@ -0,0 +1,462 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-importer- +spec: + arguments: + parameters: + - name: components-comp-importer + value: '{"executorLabel":"exec-importer","inputDefinitions":{"parameters":{"metadata":{"parameterType":"STRING"},"metadata-2":{"parameterType":"STRING"},"uri":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-comp-importer + value: '{"artifactUri":{"runtimeParameter":"uri"},"metadata":{"containerSpec":{"imageUri":"{{$.inputs.parameters[''metadata-2'']}}"},"name":["{{$.inputs.parameters[''metadata'']}}","alias-name"]},"typeSchema":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}' + - name: components-comp-importer-2 + value: '{"executorLabel":"exec-importer-2","inputDefinitions":{"parameters":{"metadata":{"parameterType":"STRING"},"metadata-2":{"parameterType":"STRING"},"metadata-3":{"parameterType":"NUMBER_INTEGER"},"uri":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-comp-importer-2 + value: '{"artifactUri":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"},"metadata":{"containerSpec":{"imageUri":"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest"},"list-of-data":["{{$.inputs.parameters[''metadata'']}}","{{$.inputs.parameters[''metadata-2'']}}","{{$.inputs.parameters[''metadata-3'']}}"],"name":"prefix-{{$.inputs.parameters[''metadata'']}}","{{$.inputs.parameters[''metadata'']}}":"{{$.inputs.parameters[''metadata'']}}","{{$.inputs.parameters[''metadata-2'']}}":"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest"},"typeSchema":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}' + - name: components-958372f93b012956585660b34fdee7e4ee56bc8305edc68a225e8d9fd0e274f2 + value: '{"executorLabel":"exec-make-name","inputDefinitions":{"parameters":{"name":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-958372f93b012956585660b34fdee7e4ee56bc8305edc68a225e8d9fd0e274f2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","make_name"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + make_name(name: str) -\u003e str:\n return name\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"importer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"metadata":{"componentInputParameter":"name"},"metadata-2":{"componentInputParameter":"pipeline_input_image_uri"},"uri":{"componentInputParameter":"pipeline_input_artifact_uri"}}},"taskInfo":{"name":"importer"}},"importer-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer-2"},"dependentTasks":["make-name"],"inputs":{"parameters":{"metadata":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"make-name"}},"metadata-2":{"componentInputParameter":"name"},"metadata-3":{"componentInputParameter":"int_input"},"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer-2"}},"make-name":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-make-name"},"inputs":{"parameters":{"name":{"runtimeValue":{"constant":"a-different-name"}}}},"taskInfo":{"name":"make-name"}}}},"inputDefinitions":{"parameters":{"int_input":{"defaultValue":1,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"name":{"defaultValue":"default-name","isOptional":true,"parameterType":"STRING"},"pipeline_input_artifact_uri":{"defaultValue":"gs://ml-pipeline-playground/shakespeare1.txt","isOptional":true,"parameterType":"STRING"},"pipeline_input_image_uri":{"defaultValue":"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --executor_type + - importer + - --task_spec + - '{{inputs.parameters.task}}' + - --component_spec + - '{{inputs.parameters.component}}' + - --importer_spec + - '{{inputs.parameters.importer}}' + - --pipeline_name + - pipeline-with-importer + - --run_id + - '{{workflow.uid}}' + - --parent_dag_id + - '{{inputs.parameters.parent-dag-id}}' + - --pod_name + - $(KFP_POD_NAME) + - --pod_uid + - $(KFP_POD_UID) + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - launcher-v2 + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: ghcr.io/kubeflow/kfp-launcher:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: task + - name: component + - name: importer + - name: parent-dag-id + metadata: {} + name: system-importer + outputs: {} + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-importer + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"metadata":{"componentInputParameter":"name"},"metadata-2":{"componentInputParameter":"pipeline_input_image_uri"},"uri":{"componentInputParameter":"pipeline_input_artifact_uri"}}},"taskInfo":{"name":"importer"}}' + - name: component + value: '{{workflow.parameters.components-comp-importer}}' + - name: importer + value: '{{workflow.parameters.implementations-comp-importer}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: importer + template: system-importer + - arguments: + parameters: + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer-2"},"dependentTasks":["make-name"],"inputs":{"parameters":{"metadata":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"make-name"}},"metadata-2":{"componentInputParameter":"name"},"metadata-3":{"componentInputParameter":"int_input"},"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer-2"}}' + - name: component + value: '{{workflow.parameters.components-comp-importer-2}}' + - name: importer + value: '{{workflow.parameters.implementations-comp-importer-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: importer-2 + template: system-importer + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-958372f93b012956585660b34fdee7e4ee56bc8305edc68a225e8d9fd0e274f2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-make-name"},"inputs":{"parameters":{"name":{"runtimeValue":{"constant":"a-different-name"}}}},"taskInfo":{"name":"make-name"}}' + - name: container + value: '{{workflow.parameters.implementations-958372f93b012956585660b34fdee7e4ee56bc8305edc68a225e8d9fd0e274f2}}' + - name: task-name + value: make-name + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: make-name-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.make-name-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.make-name-driver.outputs.parameters.cached-decision}}' + depends: make-name-driver.Succeeded + name: make-name + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-importer + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"int_input":1,"name":"default-name","pipeline_input_artifact_uri":"gs://ml-pipeline-playground/shakespeare1.txt","pipeline_input_image_uri":"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_dynamic_task_output_custom_training_job.yaml b/test_data/compiled-workflows/pipeline_with_dynamic_task_output_custom_training_job.yaml new file mode 100644 index 00000000000..990d0d11544 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_dynamic_task_output_custom_training_job.yaml @@ -0,0 +1,520 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline- +spec: + arguments: + parameters: + - name: components-4ff7c4319829a8830447ba783584f0f40734ff010482a6b9b234ceda17ba214c + value: '{"executorLabel":"exec-accelerator-count","outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-4ff7c4319829a8830447ba783584f0f40734ff010482a6b9b234ceda17ba214c + value: '{"args":["--executor_input","{{$}}","--function_to_execute","accelerator_count"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + accelerator_count() -\u003e int:\n return 1\n\n"],"image":"python:3.9"}' + - name: components-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b + value: '{"executorLabel":"exec-accelerator-type","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","accelerator_type"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + accelerator_type() -\u003e str:\n return ''NVIDIA_TESLA_P4''\n\n"],"image":"python:3.9"}' + - name: components-52a7e68811aea0ab261ad8816e36b43d20ed63e304bf491e77d3a1ac7b6e5871 + value: '{"executorLabel":"exec-custom-training-job","inputDefinitions":{"parameters":{"base_output_directory":{"defaultValue":"","description":"The + Cloud Storage location to store the output of this CustomJob or HyperparameterTuningJob. + See [more information ](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/GcsDestination).","isOptional":true,"parameterType":"STRING"},"display_name":{"description":"The + name of the CustomJob.","parameterType":"STRING"},"enable_web_access":{"defaultValue":false,"description":"Whether + you want Vertex AI to enable [interactive shell access ](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) + to training containers. If `True`, you can access interactive shells at the + URIs given by [CustomJob.web_access_uris][].","isOptional":true,"parameterType":"BOOLEAN"},"encryption_spec_key_name":{"defaultValue":"","description":"Customer-managed + encryption key options for the CustomJob. If this is set, then all resources + created by the CustomJob will be encrypted with the provided encryption key.","isOptional":true,"parameterType":"STRING"},"labels":{"defaultValue":{},"description":"The + labels with user-defined metadata to organize the CustomJob. See [more information](https://goo.gl/xmQnxf).","isOptional":true,"parameterType":"STRUCT"},"location":{"defaultValue":"us-central1","description":"Location + for creating the custom training job. If not set, default to us-central1.","isOptional":true,"parameterType":"STRING"},"network":{"defaultValue":"","description":"The + full name of the Compute Engine network to which the job should be peered. + For example, `projects/12345/global/networks/myVPC`. Format is of the form + `projects/{project}/global/networks/{network}`. Where `{project}` is a project + number, as in `12345`, and `{network}` is a network name. Private services + access must already be configured for the network. If left unspecified, the + job is not peered with any network.","isOptional":true,"parameterType":"STRING"},"project":{"defaultValue":"{{$.pipeline_google_cloud_project_id}}","description":"Project + to create the custom training job in. Defaults to the project in which the + PipelineJob is run.","isOptional":true,"parameterType":"STRING"},"reserved_ip_ranges":{"defaultValue":[],"description":"A + list of names for the reserved IP ranges under the VPC network that can be + used for this job. If set, we will deploy the job within the provided IP ranges. + Otherwise, the job will be deployed to any IP ranges under the provided VPC + network.","isOptional":true,"parameterType":"LIST"},"restart_job_on_worker_restart":{"defaultValue":false,"description":"Restarts + the entire CustomJob if a worker gets restarted. This feature can be used + by distributed training jobs that are not resilient to workers leaving and + joining a job.","isOptional":true,"parameterType":"BOOLEAN"},"service_account":{"defaultValue":"","description":"Sets + the default service account for workload run-as account. The [service account + ](https://cloud.google.com/vertex-ai/docs/pipelines/configure-project#service-account) + running the pipeline submitting jobs must have act-as permission on this run-as + account. If unspecified, the Vertex AI Custom Code [Service Agent ](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) + for the CustomJob''s project.","isOptional":true,"parameterType":"STRING"},"tensorboard":{"defaultValue":"","description":"The + name of a Vertex AI TensorBoard resource to which this CustomJob will upload + TensorBoard logs.","isOptional":true,"parameterType":"STRING"},"timeout":{"defaultValue":"604800s","description":"The + maximum job running time. The default is 7 days. A duration in seconds with + up to nine fractional digits, terminated by ''s'', for example: \"3.5s\".","isOptional":true,"parameterType":"STRING"},"worker_pool_specs":{"defaultValue":[],"description":"Serialized + json spec of the worker pools including machine type and Docker image. All + worker pools except the first one are optional and can be skipped by providing + an empty value. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#WorkerPoolSpec).","isOptional":true,"parameterType":"LIST"}}},"outputDefinitions":{"parameters":{"gcp_resources":{"description":"Serialized + JSON of `gcp_resources` [proto](https://github.com/kubeflow/pipelines/tree/master/components/google-cloud/google_cloud_pipeline_components/proto) + which tracks the CustomJob.","parameterType":"STRING"}}}}' + - name: implementations-52a7e68811aea0ab261ad8816e36b43d20ed63e304bf491e77d3a1ac7b6e5871 + value: '{"args":["--type","CustomJob","--payload","{\"display_name\": \"{{$.inputs.parameters[''display_name'']}}\", + \"job_spec\": {\"worker_pool_specs\": {{$.inputs.parameters[''worker_pool_specs'']}}, + \"scheduling\": {\"timeout\": \"{{$.inputs.parameters[''timeout'']}}\", \"restart_job_on_worker_restart\": + {{$.inputs.parameters[''restart_job_on_worker_restart'']}}}, \"service_account\": + \"{{$.inputs.parameters[''service_account'']}}\", \"tensorboard\": \"{{$.inputs.parameters[''tensorboard'']}}\", + \"enable_web_access\": {{$.inputs.parameters[''enable_web_access'']}}, \"network\": + \"{{$.inputs.parameters[''network'']}}\", \"reserved_ip_ranges\": {{$.inputs.parameters[''reserved_ip_ranges'']}}, + \"base_output_directory\": {\"output_uri_prefix\": \"{{$.inputs.parameters[''base_output_directory'']}}\"}}, + \"labels\": {{$.inputs.parameters[''labels'']}}, \"encryption_spec\": {\"kms_key_name\": + \"{{$.inputs.parameters[''encryption_spec_key_name'']}}\"}}","--project","{{$.inputs.parameters[''project'']}}","--location","{{$.inputs.parameters[''location'']}}","--gcp_resources","{{$.outputs.parameters[''gcp_resources''].output_file}}"],"command":["python3","-u","-m","google_cloud_pipeline_components.container.v1.custom_job.launcher"],"image":"gcr.io/ml-pipeline/google-cloud-pipeline-components:2.14.1"}' + - name: components-cc24d59ed416d6169227e7ef954078b09337657b6eb903c1ee2afe6e0d9a5972 + value: '{"executorLabel":"exec-machine-type","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-cc24d59ed416d6169227e7ef954078b09337657b6eb903c1ee2afe6e0d9a5972 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","machine_type"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + machine_type() -\u003e str:\n return ''n1-standard-4''\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"accelerator-count":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-count"},"taskInfo":{"name":"accelerator-count"}},"accelerator-type":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-type"},"taskInfo":{"name":"accelerator-type"}},"custom-training-job":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-custom-training-job"},"dependentTasks":["accelerator-count","accelerator-type","machine-type"],"inputs":{"parameters":{"display_name":{"runtimeValue":{"constant":"add-numbers"}},"encryption_spec_key_name":{"componentInputParameter":"encryption_spec_key_name"},"location":{"componentInputParameter":"location"},"pipelinechannel--accelerator-count-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-count"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--machine-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"machine-type"}},"project":{"componentInputParameter":"project"},"worker_pool_specs":{"runtimeValue":{"constant":[{"container_spec":{"args":["foo"],"command":["echo"],"image_uri":"gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0"},"machine_spec":{"accelerator_count":"{{$.inputs.parameters[''pipelinechannel--accelerator-count-Output'']}}","accelerator_type":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}","machine_type":"{{$.inputs.parameters[''pipelinechannel--machine-type-Output'']}}"},"replica_count":1}]}}}},"taskInfo":{"name":"custom-training-job"}},"machine-type":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-machine-type"},"taskInfo":{"name":"machine-type"}}}},"inputDefinitions":{"parameters":{"encryption_spec_key_name":{"defaultValue":"","isOptional":true,"parameterType":"STRING"},"location":{"parameterType":"STRING"},"project":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4ff7c4319829a8830447ba783584f0f40734ff010482a6b9b234ceda17ba214c}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-count"},"taskInfo":{"name":"accelerator-count"}}' + - name: container + value: '{{workflow.parameters.implementations-4ff7c4319829a8830447ba783584f0f40734ff010482a6b9b234ceda17ba214c}}' + - name: task-name + value: accelerator-count + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: accelerator-count-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.accelerator-count-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.accelerator-count-driver.outputs.parameters.cached-decision}}' + depends: accelerator-count-driver.Succeeded + name: accelerator-count + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-type"},"taskInfo":{"name":"accelerator-type"}}' + - name: container + value: '{{workflow.parameters.implementations-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b}}' + - name: task-name + value: accelerator-type + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: accelerator-type-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.accelerator-type-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.accelerator-type-driver.outputs.parameters.cached-decision}}' + depends: accelerator-type-driver.Succeeded + name: accelerator-type + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-52a7e68811aea0ab261ad8816e36b43d20ed63e304bf491e77d3a1ac7b6e5871}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-custom-training-job"},"dependentTasks":["accelerator-count","accelerator-type","machine-type"],"inputs":{"parameters":{"display_name":{"runtimeValue":{"constant":"add-numbers"}},"encryption_spec_key_name":{"componentInputParameter":"encryption_spec_key_name"},"location":{"componentInputParameter":"location"},"pipelinechannel--accelerator-count-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-count"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--machine-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"machine-type"}},"project":{"componentInputParameter":"project"},"worker_pool_specs":{"runtimeValue":{"constant":[{"container_spec":{"args":["foo"],"command":["echo"],"image_uri":"gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0"},"machine_spec":{"accelerator_count":"{{$.inputs.parameters[''pipelinechannel--accelerator-count-Output'']}}","accelerator_type":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}","machine_type":"{{$.inputs.parameters[''pipelinechannel--machine-type-Output'']}}"},"replica_count":1}]}}}},"taskInfo":{"name":"custom-training-job"}}' + - name: container + value: '{{workflow.parameters.implementations-52a7e68811aea0ab261ad8816e36b43d20ed63e304bf491e77d3a1ac7b6e5871}}' + - name: task-name + value: custom-training-job + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: accelerator-count.Succeeded && accelerator-type.Succeeded && machine-type.Succeeded + name: custom-training-job-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.custom-training-job-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.custom-training-job-driver.outputs.parameters.cached-decision}}' + depends: custom-training-job-driver.Succeeded + name: custom-training-job + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-cc24d59ed416d6169227e7ef954078b09337657b6eb903c1ee2afe6e0d9a5972}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-machine-type"},"taskInfo":{"name":"machine-type"}}' + - name: container + value: '{{workflow.parameters.implementations-cc24d59ed416d6169227e7ef954078b09337657b6eb903c1ee2afe6e0d9a5972}}' + - name: task-name + value: machine-type + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: machine-type-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.machine-type-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.machine-type-driver.outputs.parameters.cached-decision}}' + depends: machine-type-driver.Succeeded + name: machine-type + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"encryption_spec_key_name":""}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_env.yaml b/test_data/compiled-workflows/pipeline_with_env.yaml new file mode 100644 index 00000000000..993c32f31b3 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_env.yaml @@ -0,0 +1,399 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-env- +spec: + arguments: + parameters: + - name: components-507ec52d0dba96e88845f3ca614b4262d4b5b44fa204a91cd13cb60c8a78d4fe + value: '{"executorLabel":"exec-print-env"}' + - name: implementations-507ec52d0dba96e88845f3ca614b4262d4b5b44fa204a91cd13cb60c8a78d4fe + value: '{"command":["sh","-c","set -e -x\necho \"$ENV1\"\necho \"$ENV2\"\necho + \"$ENV3\"\n"],"env":[{"name":"ENV1","value":"val0"},{"name":"ENV2","value":"val2"},{"name":"ENV3","value":"val3"}],"image":"alpine"}' + - name: components-62080971c5693dd5ee364f93012c4d84ed64ae7e434d0b5b1e61b3ab408ccc53 + value: '{"executorLabel":"exec-print-env-op"}' + - name: implementations-62080971c5693dd5ee364f93012c4d84ed64ae7e434d0b5b1e61b3ab408ccc53 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_env_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_env_op():\n import os\n print(''ENV1'', os.environ.get(''ENV1''))\n print(''ENV2'', + os.environ.get(''ENV2''))\n\n"],"env":[{"name":"ENV1","value":"val1"}],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"print-env":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-env"},"taskInfo":{"name":"print-env"}},"print-env-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-env-op"},"taskInfo":{"name":"print-env-op"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-env + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-507ec52d0dba96e88845f3ca614b4262d4b5b44fa204a91cd13cb60c8a78d4fe}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-env"},"taskInfo":{"name":"print-env"}}' + - name: container + value: '{{workflow.parameters.implementations-507ec52d0dba96e88845f3ca614b4262d4b5b44fa204a91cd13cb60c8a78d4fe}}' + - name: task-name + value: print-env + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-env-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-env-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-env-driver.outputs.parameters.cached-decision}}' + depends: print-env-driver.Succeeded + name: print-env + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-62080971c5693dd5ee364f93012c4d84ed64ae7e434d0b5b1e61b3ab408ccc53}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-env-op"},"taskInfo":{"name":"print-env-op"}}' + - name: container + value: '{{workflow.parameters.implementations-62080971c5693dd5ee364f93012c4d84ed64ae7e434d0b5b1e61b3ab408ccc53}}' + - name: task-name + value: print-env-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-env-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-env-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-env-op-driver.outputs.parameters.cached-decision}}' + depends: print-env-op-driver.Succeeded + name: print-env-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-env + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_exit_handler.yaml b/test_data/compiled-workflows/pipeline_with_exit_handler.yaml new file mode 100644 index 00000000000..60baae91e6f --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_exit_handler.yaml @@ -0,0 +1,481 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-exit-handler- +spec: + arguments: + parameters: + - name: components-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf + value: '{"executorLabel":"exec-fail-op","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf + value: '{"args":["--executor_input","{{$}}","--function_to_execute","fail_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n print(message)\n sys.exit(1)\n\n"],"image":"python:3.9"}' + - name: components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d + value: '{"executorLabel":"exec-print-op-2","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + - name: components-comp-exit-handler-1 + value: '{"dag":{"tasks":{"fail-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Task + failed."}}}},"taskInfo":{"name":"fail-op"}},"print-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--message":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"exit-handler-1":{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-1"}},"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Exit + handler has worked!"}}}},"taskInfo":{"name":"print-op"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}}},"inputDefinitions":{"parameters":{"message":{"defaultValue":"Hello + World!","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-exit-handler + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Task + failed."}}}},"taskInfo":{"name":"fail-op"}}' + - name: container + value: '{{workflow.parameters.implementations-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf}}' + - name: task-name + value: fail-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: fail-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.fail-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.fail-op-driver.outputs.parameters.cached-decision}}' + depends: fail-op-driver.Succeeded + name: fail-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task-name + value: print-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' + depends: print-op-2-driver.Succeeded + name: print-op-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-exit-handler-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Exit + handler has worked!"}}}},"taskInfo":{"name":"print-op"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}' + - name: container + value: '{{workflow.parameters.implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: exit-hook-root-print-op + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-exit-handler + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-exit-handler-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-1"}}' + - name: task-name + value: exit-handler-1 + name: exit-handler-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' + depends: exit-handler-1-driver.Succeeded + hooks: + exit: + arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + template: exit-hook-root-print-op + name: exit-handler-1 + template: comp-exit-handler-1 + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"message":"Hello World!"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_google_artifact_type.yaml b/test_data/compiled-workflows/pipeline_with_google_artifact_type.yaml new file mode 100644 index 00000000000..2d9cc84e512 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_google_artifact_type.yaml @@ -0,0 +1,493 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-google-types- +spec: + arguments: + parameters: + - name: components-comp-importer + value: '{"executorLabel":"exec-importer","inputDefinitions":{"parameters":{"uri":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"google.VertexDataset","schemaVersion":"0.0.0"}}}}}' + - name: implementations-comp-importer + value: '{"artifactUri":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"},"metadata":{"key":"value"},"typeSchema":{"schemaTitle":"google.VertexDataset","schemaVersion":"0.0.0"}}' + - name: components-294c7093bd6007beda02527c7a120966fc8e353153d9512485700a047f484f0b + value: '{"executorLabel":"exec-model-consumer","inputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"google.VertexDataset","schemaVersion":"0.0.0"}},"model":{"artifactType":{"schemaTitle":"google.VertexModel","schemaVersion":"0.0.0"}}}}}' + - name: implementations-294c7093bd6007beda02527c7a120966fc8e353153d9512485700a047f484f0b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","model_consumer"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''aiplatform'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\nfrom + aiplatform import VertexModel\nfrom aiplatform import VertexDataset\n\ndef + model_consumer(model: Input[VertexModel],\n dataset: Input[VertexDataset]):\n print(''Model'')\n print(''artifact.type: + '', type(model))\n print(''artifact.name: '', model.name)\n print(''artifact.uri: + '', model.uri)\n print(''artifact.metadata: '', model.metadata)\n\n print(''Dataset'')\n print(''artifact.type: + '', type(dataset))\n print(''artifact.name: '', dataset.name)\n print(''artifact.uri: + '', dataset.uri)\n print(''artifact.metadata: '', dataset.metadata)\n\n"],"image":"python:3.9"}' + - name: components-3d4953dfcdcfe41d72b761df76bfc76edf185ecf8415d539fd17c4c6f8f29200 + value: '{"executorLabel":"exec-model-producer","outputDefinitions":{"artifacts":{"model":{"artifactType":{"schemaTitle":"google.VertexModel","schemaVersion":"0.0.0"}}}}}' + - name: implementations-3d4953dfcdcfe41d72b761df76bfc76edf185ecf8415d539fd17c4c6f8f29200 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","model_producer"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''aiplatform'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\nimport + aiplatform\n\ndef model_producer(model: Output[aiplatform.VertexModel]):\n\n assert + isinstance(model, aiplatform.VertexModel), type(model)\n with open(model.path, + ''w'') as f:\n f.write(''my model'')\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"importer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}},"model-consumer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-model-consumer"},"dependentTasks":["importer","model-producer"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}},"model":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"model-producer"}}}},"taskInfo":{"name":"model-consumer"}},"model-producer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-model-producer"},"taskInfo":{"name":"model-producer"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --executor_type + - importer + - --task_spec + - '{{inputs.parameters.task}}' + - --component_spec + - '{{inputs.parameters.component}}' + - --importer_spec + - '{{inputs.parameters.importer}}' + - --pipeline_name + - pipeline-with-google-types + - --run_id + - '{{workflow.uid}}' + - --parent_dag_id + - '{{inputs.parameters.parent-dag-id}}' + - --pod_name + - $(KFP_POD_NAME) + - --pod_uid + - $(KFP_POD_UID) + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - launcher-v2 + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: ghcr.io/kubeflow/kfp-launcher:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: task + - name: component + - name: importer + - name: parent-dag-id + metadata: {} + name: system-importer + outputs: {} + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-google-types + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}' + - name: component + value: '{{workflow.parameters.components-comp-importer}}' + - name: importer + value: '{{workflow.parameters.implementations-comp-importer}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: importer + template: system-importer + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-294c7093bd6007beda02527c7a120966fc8e353153d9512485700a047f484f0b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-model-consumer"},"dependentTasks":["importer","model-producer"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}},"model":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"model-producer"}}}},"taskInfo":{"name":"model-consumer"}}' + - name: container + value: '{{workflow.parameters.implementations-294c7093bd6007beda02527c7a120966fc8e353153d9512485700a047f484f0b}}' + - name: task-name + value: model-consumer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: importer.Succeeded && model-producer.Succeeded + name: model-consumer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.model-consumer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.model-consumer-driver.outputs.parameters.cached-decision}}' + depends: model-consumer-driver.Succeeded + name: model-consumer + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-3d4953dfcdcfe41d72b761df76bfc76edf185ecf8415d539fd17c4c6f8f29200}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-model-producer"},"taskInfo":{"name":"model-producer"}}' + - name: container + value: '{{workflow.parameters.implementations-3d4953dfcdcfe41d72b761df76bfc76edf185ecf8415d539fd17c4c6f8f29200}}' + - name: task-name + value: model-producer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: model-producer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.model-producer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.model-producer-driver.outputs.parameters.cached-decision}}' + depends: model-producer-driver.Succeeded + name: model-producer + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-google-types + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_importer.yaml b/test_data/compiled-workflows/pipeline_with_importer.yaml new file mode 100644 index 00000000000..97a280496cb --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_importer.yaml @@ -0,0 +1,528 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-importer- +spec: + arguments: + parameters: + - name: components-comp-importer-2 + value: '{"executorLabel":"exec-importer-2","inputDefinitions":{"parameters":{"uri":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-comp-importer-2 + value: '{"artifactUri":{"runtimeParameter":"uri"},"reimport":true,"typeSchema":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}' + - name: components-af321da91af730e40295efe7bfd4833eb55c7f74f04f43f373927c69fb74e9ad + value: '{"executorLabel":"exec-train-2","inputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"model":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}},"parameters":{"scalar":{"parameterType":"STRING"}}}}' + - name: implementations-af321da91af730e40295efe7bfd4833eb55c7f74f04f43f373927c69fb74e9ad + value: '{"args":["--executor_input","{{$}}","--function_to_execute","train"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + train(\n dataset: Input[Dataset]\n) -\u003e NamedTuple(''Outputs'', [\n (''scalar'', + str),\n (''model'', Model),\n]):\n \"\"\"Dummy Training step.\"\"\"\n with + open(dataset.path) as f:\n data = f.read()\n print(''Dataset:'', + data)\n\n scalar = ''123''\n model = f''My model trained using data: + {data}''\n\n from collections import namedtuple\n output = namedtuple(''Outputs'', + [''scalar'', ''model''])\n return output(scalar, model)\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-1 + value: '{"dag":{"tasks":{"importer-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer-2"},"inputs":{"parameters":{"uri":{"componentInputParameter":"pipelinechannel--dataset2"}}},"taskInfo":{"name":"importer-2"}},"train-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-train-2"},"dependentTasks":["importer-2"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer-2"}}}},"taskInfo":{"name":"train-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--dataset2":{"parameterType":"STRING"},"pipelinechannel--train-scalar":{"parameterType":"STRING"}}}}' + - name: components-comp-importer + value: '{"executorLabel":"exec-importer","inputDefinitions":{"parameters":{"uri":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-comp-importer + value: '{"artifactUri":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"},"metadata":{"key":"value"},"typeSchema":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}' + - name: components-root + value: '{"dag":{"tasks":{"condition-1":{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["train"],"inputs":{"parameters":{"pipelinechannel--dataset2":{"componentInputParameter":"dataset2"},"pipelinechannel--train-scalar":{"taskOutputParameter":{"outputParameterKey":"scalar","producerTask":"train"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--train-scalar''] + == ''123''"}},"importer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}},"train":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-train"},"dependentTasks":["importer"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"train"}}}},"inputDefinitions":{"parameters":{"dataset2":{"defaultValue":"gs://ml-pipeline-playground/shakespeare2.txt","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --executor_type + - importer + - --task_spec + - '{{inputs.parameters.task}}' + - --component_spec + - '{{inputs.parameters.component}}' + - --importer_spec + - '{{inputs.parameters.importer}}' + - --pipeline_name + - pipeline-with-importer + - --run_id + - '{{workflow.uid}}' + - --parent_dag_id + - '{{inputs.parameters.parent-dag-id}}' + - --pod_name + - $(KFP_POD_NAME) + - --pod_uid + - $(KFP_POD_UID) + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - launcher-v2 + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: ghcr.io/kubeflow/kfp-launcher:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: task + - name: component + - name: importer + - name: parent-dag-id + metadata: {} + name: system-importer + outputs: {} + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-importer + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer-2"},"inputs":{"parameters":{"uri":{"componentInputParameter":"pipelinechannel--dataset2"}}},"taskInfo":{"name":"importer-2"}}' + - name: component + value: '{{workflow.parameters.components-comp-importer-2}}' + - name: importer + value: '{{workflow.parameters.implementations-comp-importer-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: importer-2 + template: system-importer + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-af321da91af730e40295efe7bfd4833eb55c7f74f04f43f373927c69fb74e9ad}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-train-2"},"dependentTasks":["importer-2"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer-2"}}}},"taskInfo":{"name":"train-2"}}' + - name: container + value: '{{workflow.parameters.implementations-af321da91af730e40295efe7bfd4833eb55c7f74f04f43f373927c69fb74e9ad}}' + - name: task-name + value: train-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: importer-2.Succeeded + name: train-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.train-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.train-2-driver.outputs.parameters.cached-decision}}' + depends: train-2-driver.Succeeded + name: train-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-1 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-importer + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["train"],"inputs":{"parameters":{"pipelinechannel--dataset2":{"componentInputParameter":"dataset2"},"pipelinechannel--train-scalar":{"taskOutputParameter":{"outputParameterKey":"scalar","producerTask":"train"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--train-scalar''] + == ''123''"}}' + - name: task-name + value: condition-1 + depends: train.Succeeded + name: condition-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' + depends: condition-1-driver.Succeeded + name: condition-1 + template: comp-condition-1 + when: '{{tasks.condition-1-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}' + - name: component + value: '{{workflow.parameters.components-comp-importer}}' + - name: importer + value: '{{workflow.parameters.implementations-comp-importer}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: importer + template: system-importer + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-af321da91af730e40295efe7bfd4833eb55c7f74f04f43f373927c69fb74e9ad}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-train"},"dependentTasks":["importer"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"train"}}' + - name: container + value: '{{workflow.parameters.implementations-af321da91af730e40295efe7bfd4833eb55c7f74f04f43f373927c69fb74e9ad}}' + - name: task-name + value: train + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: importer.Succeeded + name: train-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.train-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.train-driver.outputs.parameters.cached-decision}}' + depends: train-driver.Succeeded + name: train + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"dataset2":"gs://ml-pipeline-playground/shakespeare2.txt"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_importer_and_gcpc_types.yaml b/test_data/compiled-workflows/pipeline_with_importer_and_gcpc_types.yaml new file mode 100644 index 00000000000..52ffb58ce52 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_importer_and_gcpc_types.yaml @@ -0,0 +1,438 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-importer-and-gcpc-type- +spec: + arguments: + parameters: + - name: components-9422f7f39ffcd22b988c9b7896b4fe8a0944c019f8cfb5ddd8f1b1ef8f5dbe18 + value: '{"executorLabel":"exec-consumer-op","inputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"google.VertexDataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-9422f7f39ffcd22b988c9b7896b4fe8a0944c019f8cfb5ddd8f1b1ef8f5dbe18 + value: '{"args":["{{$.inputs.artifacts[''dataset''].path}}"],"command":["cmd"],"image":"dummy"}' + - name: components-comp-importer + value: '{"executorLabel":"exec-importer","inputDefinitions":{"parameters":{"uri":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"google.VertexDataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-comp-importer + value: '{"artifactUri":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"},"metadata":{"key":"value"},"typeSchema":{"schemaTitle":"google.VertexDataset","schemaVersion":"0.0.1"}}' + - name: components-root + value: '{"dag":{"tasks":{"consumer-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-consumer-op"},"dependentTasks":["importer"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"consumer-op"}},"importer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-importer-and-gcpc-type + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - container: + args: + - --executor_type + - importer + - --task_spec + - '{{inputs.parameters.task}}' + - --component_spec + - '{{inputs.parameters.component}}' + - --importer_spec + - '{{inputs.parameters.importer}}' + - --pipeline_name + - pipeline-with-importer-and-gcpc-type + - --run_id + - '{{workflow.uid}}' + - --parent_dag_id + - '{{inputs.parameters.parent-dag-id}}' + - --pod_name + - $(KFP_POD_NAME) + - --pod_uid + - $(KFP_POD_UID) + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - launcher-v2 + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: ghcr.io/kubeflow/kfp-launcher:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: task + - name: component + - name: importer + - name: parent-dag-id + metadata: {} + name: system-importer + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-9422f7f39ffcd22b988c9b7896b4fe8a0944c019f8cfb5ddd8f1b1ef8f5dbe18}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-consumer-op"},"dependentTasks":["importer"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"consumer-op"}}' + - name: container + value: '{{workflow.parameters.implementations-9422f7f39ffcd22b988c9b7896b4fe8a0944c019f8cfb5ddd8f1b1ef8f5dbe18}}' + - name: task-name + value: consumer-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: importer.Succeeded + name: consumer-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.consumer-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.consumer-op-driver.outputs.parameters.cached-decision}}' + depends: consumer-op-driver.Succeeded + name: consumer-op + template: system-container-executor + - arguments: + parameters: + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}' + - name: component + value: '{{workflow.parameters.components-comp-importer}}' + - name: importer + value: '{{workflow.parameters.implementations-comp-importer}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: importer + template: system-importer + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-importer-and-gcpc-type + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_input_status_state.yaml b/test_data/compiled-workflows/pipeline_with_input_status_state.yaml new file mode 100644 index 00000000000..5e2ae202d7e --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_input_status_state.yaml @@ -0,0 +1,455 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: status-state-pipeline- +spec: + arguments: + parameters: + - name: components-ad5c8365040a29a8273ef9e77f6fa5ace6c5a32f68feaac6a452c8a7c214c21f + value: '{"executorLabel":"exec-echo-state","inputDefinitions":{"parameters":{"status":{"isOptional":true,"parameterType":"TASK_FINAL_STATUS"}}}}' + - name: implementations-ad5c8365040a29a8273ef9e77f6fa5ace6c5a32f68feaac6a452c8a7c214c21f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","echo_state"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + echo_state(status: dsl.PipelineTaskFinalStatus):\n assert(status.state + == ''COMPLETE'')\n assert(''status-state-pipeline'' in status.pipeline_job_resource_name)\n assert(status.pipeline_task_name + == ''exit-handler-1'')\n #TODO: Add assert statements to validate status.error_code + and status.error_message values once those fields have been implemented.\n\n"],"image":"python:3.9"}' + - name: components-72cacb6e63f200dc565307f11e5792a0bcc013519ea2a8d43b76245d33942566 + value: '{"executorLabel":"exec-some-task"}' + - name: implementations-72cacb6e63f200dc565307f11e5792a0bcc013519ea2a8d43b76245d33942566 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","some_task"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + some_task():\n print(''Executing some_task()...'')\n\n"],"image":"python:3.9"}' + - name: components-comp-exit-handler-1 + value: '{"dag":{"tasks":{"some-task":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-some-task"},"taskInfo":{"name":"some-task"}}}}}' + - name: components-root + value: '{"dag":{"tasks":{"echo-state":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo-state"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"status":{"taskFinalStatus":{"producerTask":"exit-handler-1"}}}},"taskInfo":{"name":"echo-state"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}},"exit-handler-1":{"componentRef":{"name":"comp-exit-handler-1"},"taskInfo":{"name":"exit-handler-1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - status-state-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-72cacb6e63f200dc565307f11e5792a0bcc013519ea2a8d43b76245d33942566}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-some-task"},"taskInfo":{"name":"some-task"}}' + - name: container + value: '{{workflow.parameters.implementations-72cacb6e63f200dc565307f11e5792a0bcc013519ea2a8d43b76245d33942566}}' + - name: task-name + value: some-task + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: some-task-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.some-task-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.some-task-driver.outputs.parameters.cached-decision}}' + depends: some-task-driver.Succeeded + name: some-task + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-exit-handler-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ad5c8365040a29a8273ef9e77f6fa5ace6c5a32f68feaac6a452c8a7c214c21f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo-state"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"status":{"taskFinalStatus":{"producerTask":"exit-handler-1"}}}},"taskInfo":{"name":"echo-state"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}' + - name: container + value: '{{workflow.parameters.implementations-ad5c8365040a29a8273ef9e77f6fa5ace6c5a32f68feaac6a452c8a7c214c21f}}' + - name: task-name + value: echo-state + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: echo-state-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.echo-state-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.echo-state-driver.outputs.parameters.cached-decision}}' + depends: echo-state-driver.Succeeded + name: echo-state + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: exit-hook-root-echo-state + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - status-state-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-exit-handler-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-exit-handler-1"},"taskInfo":{"name":"exit-handler-1"}}' + - name: task-name + value: exit-handler-1 + name: exit-handler-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' + depends: exit-handler-1-driver.Succeeded + hooks: + exit: + arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + template: exit-hook-root-echo-state + name: exit-handler-1 + template: comp-exit-handler-1 + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_loops.yaml b/test_data/compiled-workflows/pipeline_with_loops.yaml new file mode 100644 index 00000000000..f38375437cf --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_loops.yaml @@ -0,0 +1,793 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-loops- +spec: + arguments: + parameters: + - name: components-16999c93ab894bbf74840a65c88ede65c740b966f18912c02bc4c89745879c66 + value: '{"executorLabel":"exec-args-generator-op","outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-16999c93ab894bbf74840a65c88ede65c740b966f18912c02bc4c89745879c66 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","args_generator_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + args_generator_op() -\u003e List[Dict[str, str]]:\n return [{''A_a'': ''1'', + ''B_b'': ''2''}, {''A_a'': ''10'', ''B_b'': ''20''}]\n\n"],"image":"python:3.9"}' + - name: components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8 + value: '{"executorLabel":"exec-print-text","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_text"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_text(msg: str):\n print(msg)\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-1 + value: '{"dag":{"tasks":{"print-text":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"taskInfo":{"name":"print-text"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop_parameter":{"parameterType":"LIST"},"pipelinechannel--loop_parameter-loop-item":{"parameterType":"STRING"}}}}' + - name: components-7872878deb112caffddcb40890e034f7c4e309067293f861e38dce761812e890 + value: '{"executorLabel":"exec-print-struct","inputDefinitions":{"parameters":{"struct":{"parameterType":"STRUCT"}}}}' + - name: implementations-7872878deb112caffddcb40890e034f7c4e309067293f861e38dce761812e890 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_struct"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_struct(struct: Dict):\n print(struct)\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-2 + value: '{"dag":{"tasks":{"print-struct":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-struct"},"inputs":{"parameters":{"struct":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"}}},"taskInfo":{"name":"print-struct"}},"print-text-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-text-2"}},"print-text-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-3"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--args-generator-op-Output":{"parameterType":"LIST"},"pipelinechannel--args-generator-op-Output-loop-item":{"parameterType":"STRUCT"}}}}' + - name: components-comp-for-loop-4 + value: '{"dag":{"tasks":{"print-struct-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-struct-2"},"inputs":{"parameters":{"struct":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"print-struct-2"}},"print-text-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-4"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-text-4"}},"print-text-5":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-5"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-5"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-3":{"parameterType":"STRUCT"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"args-generator-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-args-generator-op"},"taskInfo":{"name":"args-generator-op"}},"for-loop-1":{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"dependentTasks":["args-generator-op"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op"}}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output"}},"taskInfo":{"name":"for-loop-2"}},"for-loop-4":{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": + \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]"}},"taskInfo":{"name":"for-loop-4"}}}},"inputDefinitions":{"parameters":{"loop_parameter":{"parameterType":"LIST"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-loops + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"taskInfo":{"name":"print-text"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-driver.outputs.parameters.cached-decision}}' + depends: print-text-driver.Succeeded + name: print-text + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7872878deb112caffddcb40890e034f7c4e309067293f861e38dce761812e890}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-struct"},"inputs":{"parameters":{"struct":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"}}},"taskInfo":{"name":"print-struct"}}' + - name: container + value: '{{workflow.parameters.implementations-7872878deb112caffddcb40890e034f7c4e309067293f861e38dce761812e890}}' + - name: task-name + value: print-struct + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-struct-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-struct-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-struct-driver.outputs.parameters.cached-decision}}' + depends: print-struct-driver.Succeeded + name: print-struct + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-text-2"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-2-driver.outputs.parameters.cached-decision}}' + depends: print-text-2-driver.Succeeded + name: print-text-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-3"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-3"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-3-driver.outputs.parameters.cached-decision}}' + depends: print-text-3-driver.Succeeded + name: print-text-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7872878deb112caffddcb40890e034f7c4e309067293f861e38dce761812e890}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-struct-2"},"inputs":{"parameters":{"struct":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"print-struct-2"}}' + - name: container + value: '{{workflow.parameters.implementations-7872878deb112caffddcb40890e034f7c4e309067293f861e38dce761812e890}}' + - name: task-name + value: print-struct-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-struct-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-struct-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-struct-2-driver.outputs.parameters.cached-decision}}' + depends: print-struct-2-driver.Succeeded + name: print-struct-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-4"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-text-4"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-4-driver.outputs.parameters.cached-decision}}' + depends: print-text-4-driver.Succeeded + name: print-text-4 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-5"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-5"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text-5 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-5-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-5-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-5-driver.outputs.parameters.cached-decision}}' + depends: print-text-5-driver.Succeeded + name: print-text-5 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-loops + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-1 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-1-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-1-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1-for-loop-1-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"dependentTasks":["args-generator-op"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op"}}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"dependentTasks":["args-generator-op"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op"}}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": + \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-4 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-4-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": + \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-4-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4-for-loop-4-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-16999c93ab894bbf74840a65c88ede65c740b966f18912c02bc4c89745879c66}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-args-generator-op"},"taskInfo":{"name":"args-generator-op"}}' + - name: container + value: '{{workflow.parameters.implementations-16999c93ab894bbf74840a65c88ede65c740b966f18912c02bc4c89745879c66}}' + - name: task-name + value: args-generator-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: args-generator-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.args-generator-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.args-generator-op-driver.outputs.parameters.cached-decision}}' + depends: args-generator-op-driver.Succeeded + name: args-generator-op + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-1 + template: comp-for-loop-1-for-loop-1-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: args-generator-op.Succeeded + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_loops_and_conditions.yaml b/test_data/compiled-workflows/pipeline_with_loops_and_conditions.yaml new file mode 100644 index 00000000000..febd438222a --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_loops_and_conditions.yaml @@ -0,0 +1,1513 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-loops-and-conditions-multi-layers- +spec: + arguments: + parameters: + - name: components-efceadb4fda65c2cf2180d4b55f8552bb47943172d073cf46fae6237e4d5c873 + value: '{"executorLabel":"exec-args-generator-op","outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-efceadb4fda65c2cf2180d4b55f8552bb47943172d073cf46fae6237e4d5c873 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","args_generator_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + args_generator_op() -\u003e list:\n return [\n {\n ''A_a'': + ''1'',\n ''B_b'': [''2'', ''20''],\n },\n {\n ''A_a'': + ''10'',\n ''B_b'': [''22'', ''222''],\n },\n ]\n\n"],"image":"python:3.9"}' + - name: components-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8 + value: '{"executorLabel":"exec-print-text-2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"},"msg2":{"isOptional":true,"parameterType":"STRING"}}}}' + - name: implementations-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_text"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_text(msg: str, msg2: Optional[str] = None):\n print(f''msg: {msg}, + msg2: {msg2}'')\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-3 + value: '{"dag":{"tasks":{"print-text-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"parameterType":"STRING"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a":{"parameterType":"STRING"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-4 + value: '{"dag":{"tasks":{"print-text-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-3"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"parameterType":"STRING"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"}}}}' + - name: components-ce00364eb1c3a39a23f7aede7572897a04347337501043c04e984fc1c1768cce + value: '{"executorLabel":"exec-print-struct","inputDefinitions":{"parameters":{"struct":{"parameterType":"STRUCT"}}}}' + - name: implementations-ce00364eb1c3a39a23f7aede7572897a04347337501043c04e984fc1c1768cce + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_struct"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_struct(struct: dict):\n print(struct)\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-7 + value: '{"dag":{"tasks":{"print-struct":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-struct"},"inputs":{"parameters":{"struct":{"componentInputParameter":"pipelinechannel--loop-item-param-6"}}},"taskInfo":{"name":"print-struct"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"parameterType":"STRING"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"},"pipelinechannel--loop-item-param-6":{"parameterType":"STRUCT"}}}}' + - name: components-comp-condition-5 + value: '{"dag":{"tasks":{"for-loop-7":{"componentRef":{"name":"comp-for-loop-7"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-6","items":{"raw":"[{\"a\": + \"-1\"}, {\"a\": \"-2\"}]"}},"taskInfo":{"name":"for-loop-7"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"parameterType":"STRING"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a":{"parameterType":"STRING"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-13 + value: '{"dag":{"tasks":{"print-text-8":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-8"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"1"}}}},"taskInfo":{"name":"print-text-8"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"},"pipelinechannel--loop-item-param-11":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-12 + value: '{"dag":{"tasks":{"condition-13":{"componentRef":{"name":"comp-condition-13"},"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop-item-param-11":{"componentInputParameter":"pipelinechannel--loop-item-param-11"}}},"taskInfo":{"name":"condition-13"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--loop-item-param-11''] + == ''1''"}},"print-text-7":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-7"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-11"}}},"taskInfo":{"name":"print-text-7"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"},"pipelinechannel--loop-item-param-11":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-8 + value: '{"dag":{"tasks":{"print-text-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-4"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item"}}},"taskInfo":{"name":"print-text-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"parameterType":"STRING"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b":{"parameterType":"STRING"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item":{"parameterType":"STRING"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-10 + value: '{"dag":{"tasks":{"print-text-6":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-6"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"msg2":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-text-6"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"parameterType":"LIST"},"pipelinechannel--args-generator-op-2-Output-loop-item":{"parameterType":"STRING"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"},"pipelinechannel--loop_parameter-loop-item":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-9 + value: '{"dag":{"tasks":{"for-loop-10":{"componentRef":{"name":"comp-for-loop-10"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-2-Output"}},"taskInfo":{"name":"for-loop-10"}},"print-text-5":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-5"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"taskInfo":{"name":"print-text-5"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"parameterType":"LIST"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"},"pipelinechannel--loop_parameter":{"parameterType":"LIST"},"pipelinechannel--loop_parameter-loop-item":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-2 + value: '{"dag":{"tasks":{"condition-3":{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a''] + == ''heads''"}},"condition-4":{"componentRef":{"name":"comp-condition-4"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] + == ''heads''"}},"condition-5":{"componentRef":{"name":"comp-condition-5"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a''] + == ''tails''"}},"for-loop-12":{"componentRef":{"name":"comp-for-loop-12"},"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-11","items":{"raw":"[\"1\", + \"2\"]"}},"taskInfo":{"name":"for-loop-12"}},"for-loop-8":{"componentRef":{"name":"comp-for-loop-8"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b"}},"taskInfo":{"name":"for-loop-8"}},"for-loop-9":{"componentRef":{"name":"comp-for-loop-9"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-9"}},"print-text":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--msg"}}},"taskInfo":{"name":"print-text"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"parameterType":"LIST"},"pipelinechannel--args-generator-op-Output":{"parameterType":"LIST"},"pipelinechannel--args-generator-op-Output-loop-item":{"parameterType":"STRING"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"},"pipelinechannel--loop_parameter":{"parameterType":"LIST"},"pipelinechannel--msg":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-1 + value: '{"dag":{"tasks":{"args-generator-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-args-generator-op-2"},"taskInfo":{"name":"args-generator-op-2"}},"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"dependentTasks":["args-generator-op-2"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op-2"}},"pipelinechannel--args-generator-op-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"},"pipelinechannel--msg":{"componentInputParameter":"pipelinechannel--msg"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output"}},"taskInfo":{"name":"for-loop-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--args-generator-op-Output":{"parameterType":"LIST"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"},"pipelinechannel--loop_parameter":{"parameterType":"LIST"},"pipelinechannel--msg":{"parameterType":"STRING"}}}}' + - name: components-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339 + value: '{"executorLabel":"exec-flip-coin-op","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","flip_coin_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + flip_coin_op() -\u003e str:\n \"\"\"Flip a coin and output heads or tails + randomly.\"\"\"\n import random\n result = ''heads'' if random.randint(0, + 1) == 0 else ''tails''\n return result\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-16 + value: '{"dag":{"tasks":{"print-text-9":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-9"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item"}}},"taskInfo":{"name":"print-text-9"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"parameterType":"STRING"},"pipelinechannel--loop_parameter-loop-item-subvar-B_b":{"parameterType":"STRING"},"pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-15 + value: '{"dag":{"tasks":{"for-loop-16":{"componentRef":{"name":"comp-for-loop-16"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-B_b":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-B_b"}},"taskInfo":{"name":"for-loop-16"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"parameterType":"STRING"},"pipelinechannel--loop_parameter-loop-item-subvar-A_a":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-14 + value: '{"dag":{"tasks":{"condition-15":{"componentRef":{"name":"comp-condition-15"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-A_a":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"condition-15"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--loop_parameter-loop-item-subvar-A_a''] + == ''heads''"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop_parameter":{"parameterType":"LIST"},"pipelinechannel--loop_parameter-loop-item":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"args-generator-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-args-generator-op"},"taskInfo":{"name":"args-generator-op"}},"condition-1":{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["args-generator-op","flip-coin-op"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op"}},"pipelinechannel--flip-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}},"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"},"pipelinechannel--msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] + != ''no-such-result''"}},"flip-coin-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-op"},"taskInfo":{"name":"flip-coin-op"}},"for-loop-14":{"componentRef":{"name":"comp-for-loop-14"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-14"}}}},"inputDefinitions":{"parameters":{"loop_parameter":{"defaultValue":[{"A_a":"heads","B_b":["A","B"]},{"A_a":"tails","B_b":["X","Y","Z"]}],"isOptional":true,"parameterType":"LIST"},"msg":{"defaultValue":"hello","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-loops-and-conditions-multi-layers + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-2"}}' + - name: container + value: '{{workflow.parameters.implementations-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task-name + value: print-text-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-2-driver.outputs.parameters.cached-decision}}' + depends: print-text-2-driver.Succeeded + name: print-text-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-3 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-3"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-3"}}' + - name: container + value: '{{workflow.parameters.implementations-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task-name + value: print-text-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-3-driver.outputs.parameters.cached-decision}}' + depends: print-text-3-driver.Succeeded + name: print-text-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-4 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ce00364eb1c3a39a23f7aede7572897a04347337501043c04e984fc1c1768cce}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-struct"},"inputs":{"parameters":{"struct":{"componentInputParameter":"pipelinechannel--loop-item-param-6"}}},"taskInfo":{"name":"print-struct"}}' + - name: container + value: '{{workflow.parameters.implementations-ce00364eb1c3a39a23f7aede7572897a04347337501043c04e984fc1c1768cce}}' + - name: task-name + value: print-struct + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-struct-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-struct-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-struct-driver.outputs.parameters.cached-decision}}' + depends: print-struct-driver.Succeeded + name: print-struct + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-7 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-loops-and-conditions-multi-layers + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-7}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-7"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-6","items":{"raw":"[{\"a\": + \"-1\"}, {\"a\": \"-2\"}]"}},"taskInfo":{"name":"for-loop-7"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-7 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-7-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-7}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-7"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-6","items":{"raw":"[{\"a\": + \"-1\"}, {\"a\": \"-2\"}]"}},"taskInfo":{"name":"for-loop-7"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-7-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-7-for-loop-7-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-7 + template: comp-for-loop-7-for-loop-7-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-5 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-8"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"1"}}}},"taskInfo":{"name":"print-text-8"}}' + - name: container + value: '{{workflow.parameters.implementations-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task-name + value: print-text-8 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-8-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-8-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-8-driver.outputs.parameters.cached-decision}}' + depends: print-text-8-driver.Succeeded + name: print-text-8 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-13 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-13}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-13"},"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop-item-param-11":{"componentInputParameter":"pipelinechannel--loop-item-param-11"}}},"taskInfo":{"name":"condition-13"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--loop-item-param-11''] + == ''1''"}}' + - name: task-name + value: condition-13 + name: condition-13-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-13-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-13-driver.outputs.parameters.condition}}' + depends: condition-13-driver.Succeeded + name: condition-13 + template: comp-condition-13 + when: '{{tasks.condition-13-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-7"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-11"}}},"taskInfo":{"name":"print-text-7"}}' + - name: container + value: '{{workflow.parameters.implementations-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task-name + value: print-text-7 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-7-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-7-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-7-driver.outputs.parameters.cached-decision}}' + depends: print-text-7-driver.Succeeded + name: print-text-7 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-12 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-4"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item"}}},"taskInfo":{"name":"print-text-4"}}' + - name: container + value: '{{workflow.parameters.implementations-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task-name + value: print-text-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-4-driver.outputs.parameters.cached-decision}}' + depends: print-text-4-driver.Succeeded + name: print-text-4 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-8 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-6"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"msg2":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-text-6"}}' + - name: container + value: '{{workflow.parameters.implementations-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task-name + value: print-text-6 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-6-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-6-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-6-driver.outputs.parameters.cached-decision}}' + depends: print-text-6-driver.Succeeded + name: print-text-6 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-10 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-10}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-10"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-2-Output"}},"taskInfo":{"name":"for-loop-10"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-10 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-10-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-10}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-10"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-2-Output"}},"taskInfo":{"name":"for-loop-10"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-10-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-10-for-loop-10-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-10 + template: comp-for-loop-10-for-loop-10-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-5"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"taskInfo":{"name":"print-text-5"}}' + - name: container + value: '{{workflow.parameters.implementations-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task-name + value: print-text-5 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-5-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-5-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-5-driver.outputs.parameters.cached-decision}}' + depends: print-text-5-driver.Succeeded + name: print-text-5 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-9 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-12}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-12"},"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-11","items":{"raw":"[\"1\", + \"2\"]"}},"taskInfo":{"name":"for-loop-12"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-12 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-12-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-12}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-12"},"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-11","items":{"raw":"[\"1\", + \"2\"]"}},"taskInfo":{"name":"for-loop-12"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-12-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-12-for-loop-12-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-8}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-8"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b"}},"taskInfo":{"name":"for-loop-8"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-8 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-8-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-8}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-8"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b"}},"taskInfo":{"name":"for-loop-8"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-8-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-8-for-loop-8-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-9}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-9"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-9"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-9 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-9-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-9}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-9"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-9"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-9-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-9-for-loop-9-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-3}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a''] + == ''heads''"}}' + - name: task-name + value: condition-3 + name: condition-3-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + depends: condition-3-driver.Succeeded + name: condition-3 + template: comp-condition-3 + when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-4"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] + == ''heads''"}}' + - name: task-name + value: condition-4 + name: condition-4-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-4-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-4-driver.outputs.parameters.condition}}' + depends: condition-4-driver.Succeeded + name: condition-4 + template: comp-condition-4 + when: '{{tasks.condition-4-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-5}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-5"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a''] + == ''tails''"}}' + - name: task-name + value: condition-5 + name: condition-5-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-5-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-5-driver.outputs.parameters.condition}}' + depends: condition-5-driver.Succeeded + name: condition-5 + template: comp-condition-5 + when: '{{tasks.condition-5-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-12 + template: comp-for-loop-12-for-loop-12-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-8 + template: comp-for-loop-8-for-loop-8-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-9 + template: comp-for-loop-9-for-loop-9-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--msg"}}},"taskInfo":{"name":"print-text"}}' + - name: container + value: '{{workflow.parameters.implementations-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task-name + value: print-text + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-driver.outputs.parameters.cached-decision}}' + depends: print-text-driver.Succeeded + name: print-text + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"dependentTasks":["args-generator-op-2"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op-2"}},"pipelinechannel--args-generator-op-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"},"pipelinechannel--msg":{"componentInputParameter":"pipelinechannel--msg"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"dependentTasks":["args-generator-op-2"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op-2"}},"pipelinechannel--args-generator-op-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"},"pipelinechannel--msg":{"componentInputParameter":"pipelinechannel--msg"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-efceadb4fda65c2cf2180d4b55f8552bb47943172d073cf46fae6237e4d5c873}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-args-generator-op-2"},"taskInfo":{"name":"args-generator-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-efceadb4fda65c2cf2180d4b55f8552bb47943172d073cf46fae6237e4d5c873}}' + - name: task-name + value: args-generator-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: args-generator-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.args-generator-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.args-generator-op-2-driver.outputs.parameters.cached-decision}}' + depends: args-generator-op-2-driver.Succeeded + name: args-generator-op-2 + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: args-generator-op-2.Succeeded + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-9"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item"}}},"taskInfo":{"name":"print-text-9"}}' + - name: container + value: '{{workflow.parameters.implementations-5cc139171a458f30e91631e3d693f08f057723d3c0c51040446f26fb0b5fd6c8}}' + - name: task-name + value: print-text-9 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-9-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-9-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-9-driver.outputs.parameters.cached-decision}}' + depends: print-text-9-driver.Succeeded + name: print-text-9 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-16 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-16}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-16"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-B_b":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-B_b"}},"taskInfo":{"name":"for-loop-16"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-16 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-16-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-16}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-16"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-B_b":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-B_b"}},"taskInfo":{"name":"for-loop-16"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-16-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-16-for-loop-16-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-16 + template: comp-for-loop-16-for-loop-16-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-15 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-15}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-15"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-A_a":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"condition-15"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--loop_parameter-loop-item-subvar-A_a''] + == ''heads''"}}' + - name: task-name + value: condition-15 + name: condition-15-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-15-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-15-driver.outputs.parameters.condition}}' + depends: condition-15-driver.Succeeded + name: condition-15 + template: comp-condition-15 + when: '{{tasks.condition-15-driver.outputs.parameters.condition}} != false' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-14 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-14}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-14"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-14"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-14 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-14-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-14}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-14"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-14"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-14-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-14-for-loop-14-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-efceadb4fda65c2cf2180d4b55f8552bb47943172d073cf46fae6237e4d5c873}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-args-generator-op"},"taskInfo":{"name":"args-generator-op"}}' + - name: container + value: '{{workflow.parameters.implementations-efceadb4fda65c2cf2180d4b55f8552bb47943172d073cf46fae6237e4d5c873}}' + - name: task-name + value: args-generator-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: args-generator-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.args-generator-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.args-generator-op-driver.outputs.parameters.cached-decision}}' + depends: args-generator-op-driver.Succeeded + name: args-generator-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["args-generator-op","flip-coin-op"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op"}},"pipelinechannel--flip-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}},"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"},"pipelinechannel--msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] + != ''no-such-result''"}}' + - name: task-name + value: condition-1 + depends: args-generator-op.Succeeded && flip-coin-op.Succeeded + name: condition-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' + depends: condition-1-driver.Succeeded + name: condition-1 + template: comp-condition-1 + when: '{{tasks.condition-1-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-op"},"taskInfo":{"name":"flip-coin-op"}}' + - name: container + value: '{{workflow.parameters.implementations-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task-name + value: flip-coin-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-op-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-op-driver.Succeeded + name: flip-coin-op + template: system-container-executor + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-14 + template: comp-for-loop-14-for-loop-14-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"loop_parameter":[{"A_a":"heads","B_b":["A","B"]},{"A_a":"tails","B_b":["X","Y","Z"]}],"msg":"hello"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_metadata_fields.yaml b/test_data/compiled-workflows/pipeline_with_metadata_fields.yaml new file mode 100644 index 00000000000..50872327460 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_metadata_fields.yaml @@ -0,0 +1,425 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: dataset-concatenator- +spec: + arguments: + parameters: + - name: components-3e4e63ec82d130c2d86a0de3eb781d6a47ff58129bf5f5aa0aa8891ef448dc7f + value: '{"executorLabel":"exec-dataset-joiner","inputDefinitions":{"artifacts":{"dataset_a":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"First + dataset."},"dataset_b":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"Second + dataset."}}},"outputDefinitions":{"artifacts":{"out_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"The + concatenated dataset."}},"parameters":{"Output":{"description":"The concatenated + string.","parameterType":"STRING"}}}}' + - name: implementations-3e4e63ec82d130c2d86a0de3eb781d6a47ff58129bf5f5aa0aa8891ef448dc7f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","dataset_joiner"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + dataset_joiner(\n dataset_a: Input[Dataset],\n dataset_b: Input[Dataset],\n out_dataset: + Output[Dataset],\n) -\u003e str:\n \"\"\"Concatenate dataset_a and dataset_b.\n\n Also + returns the concatenated string.\n\n Args:\n dataset_a: First dataset.\n dataset_b: + Second dataset.\n\n Returns:\n out_dataset: The concatenated dataset.\n Output: + The concatenated string.\n \"\"\"\n with open(dataset_a.path) as f:\n content_a + = f.read()\n\n with open(dataset_b.path) as f:\n content_b = f.read()\n\n concatenated_string + = content_a + content_b\n with open(out_dataset.path, ''w'') as f:\n f.write(concatenated_string)\n\n return + concatenated_string\n\n"],"image":"python:3.9"}' + - name: components-cf015d965d4dbce23b2fa6520b6fd9409bc8c0e0de1513abaf9d5e1c03d3835d + value: '{"executorLabel":"exec-str-to-dataset","inputDefinitions":{"parameters":{"string":{"description":"The + string.","parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"The + dataset."}}}}' + - name: implementations-cf015d965d4dbce23b2fa6520b6fd9409bc8c0e0de1513abaf9d5e1c03d3835d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","str_to_dataset"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + str_to_dataset(string: str, dataset: Output[Dataset]):\n \"\"\"Convert + string to dataset.\n\n Args:\n string: The string.\n\n Returns:\n dataset: + The dataset.\n \"\"\"\n with open(dataset.path, ''w'') as f:\n f.write(string)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"out_dataset","producerSubtask":"dataset-joiner"}]}}},"tasks":{"dataset-joiner":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-dataset-joiner"},"dependentTasks":["str-to-dataset"],"inputs":{"artifacts":{"dataset_a":{"taskOutputArtifact":{"outputArtifactKey":"dataset","producerTask":"str-to-dataset"}},"dataset_b":{"componentInputArtifact":"in_dataset"}}},"taskInfo":{"name":"dataset-joiner"}},"str-to-dataset":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-str-to-dataset"},"inputs":{"parameters":{"string":{"componentInputParameter":"string"}}},"taskInfo":{"name":"str-to-dataset"}}}},"inputDefinitions":{"artifacts":{"in_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"Dataset + to which to concatenate string."}},"parameters":{"string":{"description":"String + to concatenate to in_artifact.","parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"description":"The + final concatenated dataset."}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - dataset-concatenator + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-3e4e63ec82d130c2d86a0de3eb781d6a47ff58129bf5f5aa0aa8891ef448dc7f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-dataset-joiner"},"dependentTasks":["str-to-dataset"],"inputs":{"artifacts":{"dataset_a":{"taskOutputArtifact":{"outputArtifactKey":"dataset","producerTask":"str-to-dataset"}},"dataset_b":{"componentInputArtifact":"in_dataset"}}},"taskInfo":{"name":"dataset-joiner"}}' + - name: container + value: '{{workflow.parameters.implementations-3e4e63ec82d130c2d86a0de3eb781d6a47ff58129bf5f5aa0aa8891ef448dc7f}}' + - name: task-name + value: dataset-joiner + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: str-to-dataset.Succeeded + name: dataset-joiner-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.dataset-joiner-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.dataset-joiner-driver.outputs.parameters.cached-decision}}' + depends: dataset-joiner-driver.Succeeded + name: dataset-joiner + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-cf015d965d4dbce23b2fa6520b6fd9409bc8c0e0de1513abaf9d5e1c03d3835d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-str-to-dataset"},"inputs":{"parameters":{"string":{"componentInputParameter":"string"}}},"taskInfo":{"name":"str-to-dataset"}}' + - name: container + value: '{{workflow.parameters.implementations-cf015d965d4dbce23b2fa6520b6fd9409bc8c0e0de1513abaf9d5e1c03d3835d}}' + - name: task-name + value: str-to-dataset + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: str-to-dataset-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.str-to-dataset-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.str-to-dataset-driver.outputs.parameters.cached-decision}}' + depends: str-to-dataset-driver.Succeeded + name: str-to-dataset + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - dataset-concatenator + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_metrics_outputs.yaml b/test_data/compiled-workflows/pipeline_with_metrics_outputs.yaml new file mode 100644 index 00000000000..376d69f07d3 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_metrics_outputs.yaml @@ -0,0 +1,473 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-metrics-outputs- +spec: + arguments: + parameters: + - name: components-33c6ad9ab035611571f521208ada9a5231b3729ee087168ce0eeb8765d17fcc4 + value: '{"executorLabel":"exec-output-metrics-2","outputDefinitions":{"artifacts":{"metrics":{"artifactType":{"schemaTitle":"system.Metrics","schemaVersion":"0.0.1"}}}}}' + - name: implementations-33c6ad9ab035611571f521208ada9a5231b3729ee087168ce0eeb8765d17fcc4 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","output_metrics"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.9.0'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + output_metrics(metrics: Output[Metrics]):\n \"\"\"Dummy component that + outputs metrics with a random accuracy.\"\"\"\n import random\n result + = random.randint(0, 100)\n metrics.log_metric(''accuracy'', result)\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-2 + value: '{"dag":{"tasks":{"output-metrics-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-output-metrics-2"},"taskInfo":{"name":"output-metrics-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-2"}},"output-metrics":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-output-metrics"},"taskInfo":{"name":"output-metrics"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-metrics-outputs + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-33c6ad9ab035611571f521208ada9a5231b3729ee087168ce0eeb8765d17fcc4}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-output-metrics-2"},"taskInfo":{"name":"output-metrics-2"}}' + - name: container + value: '{{workflow.parameters.implementations-33c6ad9ab035611571f521208ada9a5231b3729ee087168ce0eeb8765d17fcc4}}' + - name: task-name + value: output-metrics-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: output-metrics-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.output-metrics-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.output-metrics-2-driver.outputs.parameters.cached-decision}}' + depends: output-metrics-2-driver.Succeeded + name: output-metrics-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-metrics-outputs + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, + 2]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-33c6ad9ab035611571f521208ada9a5231b3729ee087168ce0eeb8765d17fcc4}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-output-metrics"},"taskInfo":{"name":"output-metrics"}}' + - name: container + value: '{{workflow.parameters.implementations-33c6ad9ab035611571f521208ada9a5231b3729ee087168ce0eeb8765d17fcc4}}' + - name: task-name + value: output-metrics + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: output-metrics-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.output-metrics-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.output-metrics-driver.outputs.parameters.cached-decision}}' + depends: output-metrics-driver.Succeeded + name: output-metrics + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_multiple_exit_handlers.yaml b/test_data/compiled-workflows/pipeline_with_multiple_exit_handlers.yaml new file mode 100644 index 00000000000..bc2ffb01843 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_multiple_exit_handlers.yaml @@ -0,0 +1,673 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-multiple-exit-handlers- +spec: + arguments: + parameters: + - name: components-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf + value: '{"executorLabel":"exec-fail-op","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf + value: '{"args":["--executor_input","{{$}}","--function_to_execute","fail_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n print(message)\n sys.exit(1)\n\n"],"image":"python:3.9"}' + - name: components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d + value: '{"executorLabel":"exec-print-op-2","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + - name: components-comp-exit-handler-1 + value: '{"dag":{"tasks":{"fail-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Task + failed."}}}},"taskInfo":{"name":"fail-op"}},"print-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--message":{"parameterType":"STRING"}}}}' + - name: components-comp-exit-handler-2 + value: '{"dag":{"tasks":{"print-op-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-4"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--message":{"parameterType":"STRING"}}}}' + - name: components-comp-exit-handler-3 + value: '{"dag":{"tasks":{"print-op-6":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-6"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op-6"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--message":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"exit-handler-1":{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-1"}},"exit-handler-2":{"componentRef":{"name":"comp-exit-handler-2"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-2"}},"exit-handler-3":{"componentRef":{"name":"comp-exit-handler-3"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-3"}},"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"First + exit handler has worked!"}}}},"taskInfo":{"name":"print-op"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}},"print-op-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"dependentTasks":["exit-handler-2"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Second + exit handler has worked!"}}}},"taskInfo":{"name":"print-op-3"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}},"print-op-5":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-5"},"dependentTasks":["exit-handler-3"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Third + exit handler has worked!"}}}},"taskInfo":{"name":"print-op-5"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}}},"inputDefinitions":{"parameters":{"message":{"defaultValue":"Hello + World!","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-multiple-exit-handlers + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Task + failed."}}}},"taskInfo":{"name":"fail-op"}}' + - name: container + value: '{{workflow.parameters.implementations-de343ed68bcc74a4b31b2b5a8230f35dfb47ca8d90eabe51d37dc3849d7acecf}}' + - name: task-name + value: fail-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: fail-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.fail-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.fail-op-driver.outputs.parameters.cached-decision}}' + depends: fail-op-driver.Succeeded + name: fail-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task-name + value: print-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' + depends: print-op-2-driver.Succeeded + name: print-op-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-exit-handler-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-4"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op-4"}}' + - name: container + value: '{{workflow.parameters.implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task-name + value: print-op-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-4-driver.outputs.parameters.cached-decision}}' + depends: print-op-4-driver.Succeeded + name: print-op-4 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-exit-handler-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-6"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op-6"}}' + - name: container + value: '{{workflow.parameters.implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task-name + value: print-op-6 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-6-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-6-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-6-driver.outputs.parameters.cached-decision}}' + depends: print-op-6-driver.Succeeded + name: print-op-6 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-exit-handler-3 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"First + exit handler has worked!"}}}},"taskInfo":{"name":"print-op"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}' + - name: container + value: '{{workflow.parameters.implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: exit-hook-root-print-op + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"dependentTasks":["exit-handler-2"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Second + exit handler has worked!"}}}},"taskInfo":{"name":"print-op-3"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}' + - name: container + value: '{{workflow.parameters.implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task-name + value: print-op-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-3-driver.outputs.parameters.cached-decision}}' + depends: print-op-3-driver.Succeeded + name: print-op-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: exit-hook-root-print-op-3 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-5"},"dependentTasks":["exit-handler-3"],"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Third + exit handler has worked!"}}}},"taskInfo":{"name":"print-op-5"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}' + - name: container + value: '{{workflow.parameters.implementations-49c335a1a327c8628eed2e87d0e06fd7a9721bf1e50c93c2a624bd17d2277d8d}}' + - name: task-name + value: print-op-5 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-5-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-5-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-5-driver.outputs.parameters.cached-decision}}' + depends: print-op-5-driver.Succeeded + name: print-op-5 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: exit-hook-root-print-op-5 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-multiple-exit-handlers + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-exit-handler-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-1"}}' + - name: task-name + value: exit-handler-1 + name: exit-handler-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' + depends: exit-handler-1-driver.Succeeded + hooks: + exit: + arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + template: exit-hook-root-print-op + name: exit-handler-1 + template: comp-exit-handler-1 + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-exit-handler-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-exit-handler-2"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-2"}}' + - name: task-name + value: exit-handler-2 + name: exit-handler-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.exit-handler-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.exit-handler-2-driver.outputs.parameters.condition}}' + depends: exit-handler-2-driver.Succeeded + hooks: + exit: + arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + template: exit-hook-root-print-op-3 + name: exit-handler-2 + template: comp-exit-handler-2 + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-exit-handler-3}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-exit-handler-3"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-3"}}' + - name: task-name + value: exit-handler-3 + name: exit-handler-3-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.exit-handler-3-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.exit-handler-3-driver.outputs.parameters.condition}}' + depends: exit-handler-3-driver.Succeeded + hooks: + exit: + arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + template: exit-hook-root-print-op-5 + name: exit-handler-3 + template: comp-exit-handler-3 + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"message":"Hello World!"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_nested_conditions.yaml b/test_data/compiled-workflows/pipeline_with_nested_conditions.yaml new file mode 100644 index 00000000000..edf5cecaa1f --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_nested_conditions.yaml @@ -0,0 +1,626 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: nested-conditions-pipeline- +spec: + arguments: + parameters: + - name: components-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339 + value: '{"executorLabel":"exec-flip-coin-op-4","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","flip_coin_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + flip_coin_op() -\u003e str:\n \"\"\"Flip a coin and output heads or tails + randomly.\"\"\"\n import random\n result = ''heads'' if random.randint(0, + 1) == 0 else ''tails''\n return result\n\n"],"image":"python:3.9"}' + - name: components-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823 + value: '{"executorLabel":"exec-print-op-4","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: implementations-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\n"],"image":"python:3.9"}' + - name: components-comp-condition-2 + value: '{"dag":{"tasks":{"flip-coin-op-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-op-4"},"taskInfo":{"name":"flip-coin-op-4"}},"print-op-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-4"},"dependentTasks":["flip-coin-op-4"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-4"}}}},"taskInfo":{"name":"print-op-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-op-2-Output":{"parameterType":"STRING"},"pipelinechannel--flip-coin-op-3-Output":{"parameterType":"STRING"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-1 + value: '{"dag":{"tasks":{"condition-2":{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["flip-coin-op-3"],"inputs":{"parameters":{"pipelinechannel--flip-coin-op-2-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-2-Output"},"pipelinechannel--flip-coin-op-3-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-3"}},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-2-Output''] + == inputs.parameter_values[''pipelinechannel--flip-coin-op-3-Output'']"}},"flip-coin-op-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-op-3"},"taskInfo":{"name":"flip-coin-op-3"}},"print-op-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"dependentTasks":["flip-coin-op-3"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-3"}}}},"taskInfo":{"name":"print-op-3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-op-2-Output":{"parameterType":"STRING"},"pipelinechannel--flip-coin-op-Output":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"condition-1":{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin-op","flip-coin-op-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-op-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-2"}},"pipelinechannel--flip-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] + != ''no-such-result''"}},"flip-coin-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-op"},"taskInfo":{"name":"flip-coin-op"}},"flip-coin-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-op-2"},"taskInfo":{"name":"flip-coin-op-2"}},"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"dependentTasks":["flip-coin-op"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}}}},"taskInfo":{"name":"print-op"}},"print-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"dependentTasks":["flip-coin-op-2"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-2"}}}},"taskInfo":{"name":"print-op-2"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - nested-conditions-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-op-4"},"taskInfo":{"name":"flip-coin-op-4"}}' + - name: container + value: '{{workflow.parameters.implementations-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task-name + value: flip-coin-op-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-op-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-op-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-op-4-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-op-4-driver.Succeeded + name: flip-coin-op-4 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-4"},"dependentTasks":["flip-coin-op-4"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-4"}}}},"taskInfo":{"name":"print-op-4"}}' + - name: container + value: '{{workflow.parameters.implementations-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task-name + value: print-op-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: flip-coin-op-4.Succeeded + name: print-op-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-4-driver.outputs.parameters.cached-decision}}' + depends: print-op-4-driver.Succeeded + name: print-op-4 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-2 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - nested-conditions-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["flip-coin-op-3"],"inputs":{"parameters":{"pipelinechannel--flip-coin-op-2-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-2-Output"},"pipelinechannel--flip-coin-op-3-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-3"}},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-2-Output''] + == inputs.parameter_values[''pipelinechannel--flip-coin-op-3-Output'']"}}' + - name: task-name + value: condition-2 + depends: flip-coin-op-3.Succeeded + name: condition-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' + depends: condition-2-driver.Succeeded + name: condition-2 + template: comp-condition-2 + when: '{{tasks.condition-2-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-op-3"},"taskInfo":{"name":"flip-coin-op-3"}}' + - name: container + value: '{{workflow.parameters.implementations-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task-name + value: flip-coin-op-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-op-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-op-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-op-3-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-op-3-driver.Succeeded + name: flip-coin-op-3 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"dependentTasks":["flip-coin-op-3"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-3"}}}},"taskInfo":{"name":"print-op-3"}}' + - name: container + value: '{{workflow.parameters.implementations-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task-name + value: print-op-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: flip-coin-op-3.Succeeded + name: print-op-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-3-driver.outputs.parameters.cached-decision}}' + depends: print-op-3-driver.Succeeded + name: print-op-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin-op","flip-coin-op-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-op-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-2"}},"pipelinechannel--flip-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] + != ''no-such-result''"}}' + - name: task-name + value: condition-1 + depends: flip-coin-op.Succeeded && flip-coin-op-2.Succeeded + name: condition-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' + depends: condition-1-driver.Succeeded + name: condition-1 + template: comp-condition-1 + when: '{{tasks.condition-1-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-op"},"taskInfo":{"name":"flip-coin-op"}}' + - name: container + value: '{{workflow.parameters.implementations-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task-name + value: flip-coin-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-op-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-op-driver.Succeeded + name: flip-coin-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-op-2"},"taskInfo":{"name":"flip-coin-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-74c752241e4386e530a02be0534451962c07a3c96d9c8f0a3b3433cad25b0339}}' + - name: task-name + value: flip-coin-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-op-2-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-op-2-driver.Succeeded + name: flip-coin-op-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"dependentTasks":["flip-coin-op"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}}}},"taskInfo":{"name":"print-op"}}' + - name: container + value: '{{workflow.parameters.implementations-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: flip-coin-op.Succeeded + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"dependentTasks":["flip-coin-op-2"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-2"}}}},"taskInfo":{"name":"print-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-ea4c10718a561edf4521c83916c18a873e35e86dce27f6050d1677d0da06b823}}' + - name: task-name + value: print-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: flip-coin-op-2.Succeeded + name: print-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' + depends: print-op-2-driver.Succeeded + name: print-op-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_nested_conditions_yaml.yaml b/test_data/compiled-workflows/pipeline_with_nested_conditions_yaml.yaml new file mode 100644 index 00000000000..870705b021d --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_nested_conditions_yaml.yaml @@ -0,0 +1,746 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: conditional-execution-pipeline- +spec: + arguments: + parameters: + - name: components-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8 + value: '{"executorLabel":"exec-print","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: implementations-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8 + value: '{"command":["echo","{{$.inputs.parameters[''msg'']}}"],"image":"python:alpine3.9"}' + - name: components-comp-condition-2 + value: '{"dag":{"tasks":{"print":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"heads + and {{$.inputs.parameters[''pipelinechannel--generate-random-number-output'']}} + \u003e 5!"}},"pipelinechannel--generate-random-number-output":{"componentInputParameter":"pipelinechannel--generate-random-number-output"}}},"taskInfo":{"name":"print"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-output":{"parameterType":"STRING"},"pipelinechannel--generate-random-number-output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-condition-3 + value: '{"dag":{"tasks":{"print-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-2"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"heads + and {{$.inputs.parameters[''pipelinechannel--generate-random-number-output'']}} + \u003c= 5!"}},"pipelinechannel--generate-random-number-output":{"componentInputParameter":"pipelinechannel--generate-random-number-output"}}},"taskInfo":{"name":"print-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-output":{"parameterType":"STRING"},"pipelinechannel--generate-random-number-output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-789e9158be0ce0b20b6e4fa73b0cecf695bedfb779d240bbf5774c468c94d421 + value: '{"executorLabel":"exec-generate-random-number","outputDefinitions":{"parameters":{"output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-789e9158be0ce0b20b6e4fa73b0cecf695bedfb779d240bbf5774c468c94d421 + value: '{"args":["mkdir -p \"$(dirname $2)\" \u0026\u0026 python -c \"import + random; print(random.randint($0, $1), end='''')\" | tee $2","0","9","{{$.outputs.parameters[''output''].output_file}}"],"command":["sh","-c"],"image":"python:alpine3.9"}' + - name: components-comp-condition-1 + value: '{"dag":{"tasks":{"condition-2":{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["generate-random-number"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number"}}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-output'']) + \u003e 5"}},"condition-3":{"componentRef":{"name":"comp-condition-3"},"dependentTasks":["generate-random-number"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number"}}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-output'']) + \u003c= 5"}},"generate-random-number":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-generate-random-number"},"taskInfo":{"name":"generate-random-number"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-output":{"parameterType":"STRING"}}}}' + - name: components-comp-condition-5 + value: '{"dag":{"tasks":{"print-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-3"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"tails + and {{$.inputs.parameters[''pipelinechannel--generate-random-number-2-output'']}} + \u003e 15!"}},"pipelinechannel--generate-random-number-2-output":{"componentInputParameter":"pipelinechannel--generate-random-number-2-output"}}},"taskInfo":{"name":"print-3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-output":{"parameterType":"STRING"},"pipelinechannel--generate-random-number-2-output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-comp-condition-6 + value: '{"dag":{"tasks":{"print-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-4"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"tails + and {{$.inputs.parameters[''pipelinechannel--generate-random-number-2-output'']}} + \u003c= 15!"}},"pipelinechannel--generate-random-number-2-output":{"componentInputParameter":"pipelinechannel--generate-random-number-2-output"}}},"taskInfo":{"name":"print-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-output":{"parameterType":"STRING"},"pipelinechannel--generate-random-number-2-output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: components-fa0ad72f3897e2b1e2e1ce0644d93bd42b4bb665a29b5acdd23005c215f2c8fc + value: '{"executorLabel":"exec-generate-random-number-2","outputDefinitions":{"parameters":{"output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-fa0ad72f3897e2b1e2e1ce0644d93bd42b4bb665a29b5acdd23005c215f2c8fc + value: '{"args":["mkdir -p \"$(dirname $2)\" \u0026\u0026 python -c \"import + random; print(random.randint($0, $1), end='''')\" | tee $2","10","19","{{$.outputs.parameters[''output''].output_file}}"],"command":["sh","-c"],"image":"python:alpine3.9"}' + - name: components-comp-condition-4 + value: '{"dag":{"tasks":{"condition-5":{"componentRef":{"name":"comp-condition-5"},"dependentTasks":["generate-random-number-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-2-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number-2"}}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-2-output'']) + \u003e 15"}},"condition-6":{"componentRef":{"name":"comp-condition-6"},"dependentTasks":["generate-random-number-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-2-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number-2"}}}},"taskInfo":{"name":"condition-6"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-2-output'']) + \u003c= 15"}},"generate-random-number-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-generate-random-number-2"},"taskInfo":{"name":"generate-random-number-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--flip-coin-output":{"parameterType":"STRING"}}}}' + - name: components-4a03f280b1b5aa6f490a19d8d2607c2a76f999db6f303a0716d80a3e8abd97d6 + value: '{"executorLabel":"exec-flip-coin","outputDefinitions":{"parameters":{"output":{"parameterType":"STRING"}}}}' + - name: implementations-4a03f280b1b5aa6f490a19d8d2607c2a76f999db6f303a0716d80a3e8abd97d6 + value: '{"args":["mkdir -p \"$(dirname $0)\" \u0026\u0026 python -c \"import + random; result = ''heads'' if random.randint(0,1) == 0 else ''tails''; print(result, + end='''')\" | tee $0","{{$.outputs.parameters[''output''].output_file}}"],"command":["sh","-c"],"image":"python:alpine3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"condition-1":{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-output''] + == ''heads''"}},"condition-4":{"componentRef":{"name":"comp-condition-4"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-output''] + == ''tails''"}},"flip-coin":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin"},"taskInfo":{"name":"flip-coin"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - conditional-execution-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"heads + and {{$.inputs.parameters[''pipelinechannel--generate-random-number-output'']}} + \u003e 5!"}},"pipelinechannel--generate-random-number-output":{"componentInputParameter":"pipelinechannel--generate-random-number-output"}}},"taskInfo":{"name":"print"}}' + - name: container + value: '{{workflow.parameters.implementations-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' + - name: task-name + value: print + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-driver.outputs.parameters.cached-decision}}' + depends: print-driver.Succeeded + name: print + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-2 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-2"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"heads + and {{$.inputs.parameters[''pipelinechannel--generate-random-number-output'']}} + \u003c= 5!"}},"pipelinechannel--generate-random-number-output":{"componentInputParameter":"pipelinechannel--generate-random-number-output"}}},"taskInfo":{"name":"print-2"}}' + - name: container + value: '{{workflow.parameters.implementations-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' + - name: task-name + value: print-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-2-driver.outputs.parameters.cached-decision}}' + depends: print-2-driver.Succeeded + name: print-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-3 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - conditional-execution-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["generate-random-number"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number"}}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-output'']) + \u003e 5"}}' + - name: task-name + value: condition-2 + depends: generate-random-number.Succeeded + name: condition-2-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' + depends: condition-2-driver.Succeeded + name: condition-2 + template: comp-condition-2 + when: '{{tasks.condition-2-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-3}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-3"},"dependentTasks":["generate-random-number"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number"}}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-output'']) + \u003c= 5"}}' + - name: task-name + value: condition-3 + depends: generate-random-number.Succeeded + name: condition-3-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + depends: condition-3-driver.Succeeded + name: condition-3 + template: comp-condition-3 + when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-789e9158be0ce0b20b6e4fa73b0cecf695bedfb779d240bbf5774c468c94d421}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-generate-random-number"},"taskInfo":{"name":"generate-random-number"}}' + - name: container + value: '{{workflow.parameters.implementations-789e9158be0ce0b20b6e4fa73b0cecf695bedfb779d240bbf5774c468c94d421}}' + - name: task-name + value: generate-random-number + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: generate-random-number-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.generate-random-number-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.generate-random-number-driver.outputs.parameters.cached-decision}}' + depends: generate-random-number-driver.Succeeded + name: generate-random-number + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-3"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"tails + and {{$.inputs.parameters[''pipelinechannel--generate-random-number-2-output'']}} + \u003e 15!"}},"pipelinechannel--generate-random-number-2-output":{"componentInputParameter":"pipelinechannel--generate-random-number-2-output"}}},"taskInfo":{"name":"print-3"}}' + - name: container + value: '{{workflow.parameters.implementations-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' + - name: task-name + value: print-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-3-driver.outputs.parameters.cached-decision}}' + depends: print-3-driver.Succeeded + name: print-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-5 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-4"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"tails + and {{$.inputs.parameters[''pipelinechannel--generate-random-number-2-output'']}} + \u003c= 15!"}},"pipelinechannel--generate-random-number-2-output":{"componentInputParameter":"pipelinechannel--generate-random-number-2-output"}}},"taskInfo":{"name":"print-4"}}' + - name: container + value: '{{workflow.parameters.implementations-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' + - name: task-name + value: print-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-4-driver.outputs.parameters.cached-decision}}' + depends: print-4-driver.Succeeded + name: print-4 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-6 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-5}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-5"},"dependentTasks":["generate-random-number-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-2-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number-2"}}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-2-output'']) + \u003e 15"}}' + - name: task-name + value: condition-5 + depends: generate-random-number-2.Succeeded + name: condition-5-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-5-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-5-driver.outputs.parameters.condition}}' + depends: condition-5-driver.Succeeded + name: condition-5 + template: comp-condition-5 + when: '{{tasks.condition-5-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-6}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-6"},"dependentTasks":["generate-random-number-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-2-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number-2"}}}},"taskInfo":{"name":"condition-6"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-2-output'']) + \u003c= 15"}}' + - name: task-name + value: condition-6 + depends: generate-random-number-2.Succeeded + name: condition-6-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-6-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-6-driver.outputs.parameters.condition}}' + depends: condition-6-driver.Succeeded + name: condition-6 + template: comp-condition-6 + when: '{{tasks.condition-6-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-fa0ad72f3897e2b1e2e1ce0644d93bd42b4bb665a29b5acdd23005c215f2c8fc}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-generate-random-number-2"},"taskInfo":{"name":"generate-random-number-2"}}' + - name: container + value: '{{workflow.parameters.implementations-fa0ad72f3897e2b1e2e1ce0644d93bd42b4bb665a29b5acdd23005c215f2c8fc}}' + - name: task-name + value: generate-random-number-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: generate-random-number-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.generate-random-number-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.generate-random-number-2-driver.outputs.parameters.cached-decision}}' + depends: generate-random-number-2-driver.Succeeded + name: generate-random-number-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-condition-4 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-output''] + == ''heads''"}}' + - name: task-name + value: condition-1 + depends: flip-coin.Succeeded + name: condition-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' + depends: condition-1-driver.Succeeded + name: condition-1 + template: comp-condition-1 + when: '{{tasks.condition-1-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-condition-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-condition-4"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-output''] + == ''tails''"}}' + - name: task-name + value: condition-4 + depends: flip-coin.Succeeded + name: condition-4-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.condition-4-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.condition-4-driver.outputs.parameters.condition}}' + depends: condition-4-driver.Succeeded + name: condition-4 + template: comp-condition-4 + when: '{{tasks.condition-4-driver.outputs.parameters.condition}} != false' + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4a03f280b1b5aa6f490a19d8d2607c2a76f999db6f303a0716d80a3e8abd97d6}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin"},"taskInfo":{"name":"flip-coin"}}' + - name: container + value: '{{workflow.parameters.implementations-4a03f280b1b5aa6f490a19d8d2607c2a76f999db6f303a0716d80a3e8abd97d6}}' + - name: task-name + value: flip-coin + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: flip-coin-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.flip-coin-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.flip-coin-driver.outputs.parameters.cached-decision}}' + depends: flip-coin-driver.Succeeded + name: flip-coin + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_nested_loops.yaml b/test_data/compiled-workflows/pipeline_with_nested_loops.yaml new file mode 100644 index 00000000000..4d5504e334e --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_nested_loops.yaml @@ -0,0 +1,724 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-nested-loops- +spec: + arguments: + parameters: + - name: components-7205c5edce36aade15ea9a24eba3c9a21dcee0e50400f096ce31ecb24d439c9f + value: '{"executorLabel":"exec-print-op","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"},"msg2":{"isOptional":true,"parameterType":"STRING"}}}}' + - name: implementations-7205c5edce36aade15ea9a24eba3c9a21dcee0e50400f096ce31ecb24d439c9f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(msg: str, msg2: Optional[str] = None):\n print(f''msg: {msg}, + msg2: {msg2}'')\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-2 + value: '{"dag":{"tasks":{"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"q_a\"]"}}},"taskInfo":{"name":"print-op"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"parameterType":"STRING"},"pipelinechannel--loop_parameter-loop-item-subvar-p_a":{"parameterType":"STRING"},"pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-1 + value: '{"dag":{"tasks":{"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-p_a":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"p_a\"]"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-p_a"}},"taskInfo":{"name":"for-loop-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop_parameter":{"parameterType":"LIST"},"pipelinechannel--loop_parameter-loop-item":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-6 + value: '{"dag":{"tasks":{"print-op-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3"},"msg2":{"componentInputParameter":"pipelinechannel--loop-item-param-5"}}},"taskInfo":{"name":"print-op-3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-3":{"parameterType":"STRING"},"pipelinechannel--loop-item-param-5":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-4 + value: '{"dag":{"tasks":{"for-loop-6":{"componentRef":{"name":"comp-for-loop-6"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-3":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[\"100\", + \"200\", \"300\"]"}},"taskInfo":{"name":"for-loop-6"}},"print-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"print-op-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-3":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"tasks":{"for-loop-1":{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}},"for-loop-4":{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[\"1\", + \"2\"]"}},"taskInfo":{"name":"for-loop-4"}}}},"inputDefinitions":{"parameters":{"loop_parameter":{"defaultValue":[{"p_a":[{"q_a":"1"},{"q_a":"2"}],"p_b":"hello"},{"p_a":[{"q_a":"11"},{"q_a":"22"}],"p_b":"halo"}],"isOptional":true,"parameterType":"LIST"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-nested-loops + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7205c5edce36aade15ea9a24eba3c9a21dcee0e50400f096ce31ecb24d439c9f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"q_a\"]"}}},"taskInfo":{"name":"print-op"}}' + - name: container + value: '{{workflow.parameters.implementations-7205c5edce36aade15ea9a24eba3c9a21dcee0e50400f096ce31ecb24d439c9f}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-nested-loops + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-p_a":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"p_a\"]"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-p_a"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-p_a":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"p_a\"]"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-p_a"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7205c5edce36aade15ea9a24eba3c9a21dcee0e50400f096ce31ecb24d439c9f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-3"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3"},"msg2":{"componentInputParameter":"pipelinechannel--loop-item-param-5"}}},"taskInfo":{"name":"print-op-3"}}' + - name: container + value: '{{workflow.parameters.implementations-7205c5edce36aade15ea9a24eba3c9a21dcee0e50400f096ce31ecb24d439c9f}}' + - name: task-name + value: print-op-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-3-driver.outputs.parameters.cached-decision}}' + depends: print-op-3-driver.Succeeded + name: print-op-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-6 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-6}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-6"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-3":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[\"100\", + \"200\", \"300\"]"}},"taskInfo":{"name":"for-loop-6"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-6 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-6-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-6}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-6"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-3":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[\"100\", + \"200\", \"300\"]"}},"taskInfo":{"name":"for-loop-6"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-6-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-6-for-loop-6-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-6 + template: comp-for-loop-6-for-loop-6-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7205c5edce36aade15ea9a24eba3c9a21dcee0e50400f096ce31ecb24d439c9f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"print-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-7205c5edce36aade15ea9a24eba3c9a21dcee0e50400f096ce31ecb24d439c9f}}' + - name: task-name + value: print-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' + depends: print-op-2-driver.Succeeded + name: print-op-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-1 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-1-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-1-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1-for-loop-1-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[\"1\", + \"2\"]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-4 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-4-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[\"1\", + \"2\"]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-4-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4-for-loop-4-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-1 + template: comp-for-loop-1-for-loop-1-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"loop_parameter":[{"p_a":[{"q_a":"1"},{"q_a":"2"}],"p_b":"hello"},{"p_a":[{"q_a":"11"},{"q_a":"22"}],"p_b":"halo"}]}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_only_display_name.yaml b/test_data/compiled-workflows/pipeline_with_only_display_name.yaml new file mode 100644 index 00000000000..bb5d71202b0 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_only_display_name.yaml @@ -0,0 +1,360 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: echo-name- +spec: + arguments: + parameters: + - name: components-cf9c81ac9e6ab0dcdd92cb89ed717317e681cb0645cb5ddfc4824b1de14346b3 + value: '{"executorLabel":"exec-echo"}' + - name: implementations-cf9c81ac9e6ab0dcdd92cb89ed717317e681cb0645cb5ddfc4824b1de14346b3 + value: '{"args":["hello world"],"command":["echo"],"image":"public.ecr.aws/docker/library/python:3.12"}' + - name: components-root + value: '{"dag":{"tasks":{"echo":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"taskInfo":{"name":"echo"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - echo-name + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-cf9c81ac9e6ab0dcdd92cb89ed717317e681cb0645cb5ddfc4824b1de14346b3}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"taskInfo":{"name":"echo"}}' + - name: container + value: '{{workflow.parameters.implementations-cf9c81ac9e6ab0dcdd92cb89ed717317e681cb0645cb5ddfc4824b1de14346b3}}' + - name: task-name + value: echo + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: echo-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.echo-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.echo-driver.outputs.parameters.cached-decision}}' + depends: echo-driver.Succeeded + name: echo + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - echo-name + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_outputs.yaml b/test_data/compiled-workflows/pipeline_with_outputs.yaml new file mode 100644 index 00000000000..838f388df51 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_outputs.yaml @@ -0,0 +1,454 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-in-pipeline- +spec: + arguments: + parameters: + - name: components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2 + value: '{"executorLabel":"exec-print-op1-2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op1"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op1(msg: str) -\u003e str:\n print(msg)\n return msg\n\n"],"image":"python:3.9"}' + - name: components-3f2ba2471e2638f8b27a01f4563316e6e3e8c02bae402875e95b5b86e8de1ddc + value: '{"executorLabel":"exec-print-op2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-3f2ba2471e2638f8b27a01f4563316e6e3e8c02bae402875e95b5b86e8de1ddc + value: '{"args":["{{$.inputs.parameters[''msg'']}}","{{$.outputs.artifacts[''data''].path}}"],"command":["sh","-c","mkdir + --parents $(dirname \"$1\") \u0026\u0026 echo \"$0\" \u003e \"$1\""],"image":"alpine"}' + - name: components-comp-inner-pipeline + value: '{"dag":{"outputs":{"artifacts":{"data":{"artifactSelectors":[{"outputArtifactKey":"data","producerSubtask":"print-op2"}]}},"parameters":{"msg":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"print-op1"}}}},"tasks":{"print-op1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}},"print-op2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op2"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"print-op2"}}}},"inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}},"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"data","producerSubtask":"inner-pipeline"}]}}},"tasks":{"inner-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"inner-pipeline"}},"print-op1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}}},"inputDefinitions":{"parameters":{"msg":{"defaultValue":"Hello","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-in-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}' + - name: container + value: '{{workflow.parameters.implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task-name + value: print-op1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op1-driver.outputs.parameters.cached-decision}}' + depends: print-op1-driver.Succeeded + name: print-op1 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-3f2ba2471e2638f8b27a01f4563316e6e3e8c02bae402875e95b5b86e8de1ddc}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op2"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"msg":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"print-op2"}}' + - name: container + value: '{{workflow.parameters.implementations-3f2ba2471e2638f8b27a01f4563316e6e3e8c02bae402875e95b5b86e8de1ddc}}' + - name: task-name + value: print-op2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: print-op1.Succeeded + name: print-op2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op2-driver.outputs.parameters.cached-decision}}' + depends: print-op2-driver.Succeeded + name: print-op2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-inner-pipeline + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-in-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-inner-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"inner-pipeline"}}' + - name: task-name + value: inner-pipeline + name: inner-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.inner-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.inner-pipeline-driver.outputs.parameters.condition}}' + depends: inner-pipeline-driver.Succeeded + name: inner-pipeline + template: comp-inner-pipeline + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op1"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"print-op1"}}' + - name: container + value: '{{workflow.parameters.implementations-0136b0f27f214b43cb8217d67d795da9568e61f9dbcd5fbda26cb0f0254cd0d2}}' + - name: task-name + value: print-op1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op1-driver.outputs.parameters.cached-decision}}' + depends: print-op1-driver.Succeeded + name: print-op1 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"msg":"Hello"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_parallelfor_parallelism.yaml b/test_data/compiled-workflows/pipeline_with_parallelfor_parallelism.yaml new file mode 100644 index 00000000000..50e286a265a --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_parallelfor_parallelism.yaml @@ -0,0 +1,1595 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-loops- +spec: + arguments: + parameters: + - name: components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8 + value: '{"executorLabel":"exec-print-text-2","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_text"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_text(msg: str):\n print(msg)\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-2 + value: '{"dag":{"tasks":{"print-text-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"taskInfo":{"name":"print-text-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop_parameter":{"parameterType":"LIST"},"pipelinechannel--loop_parameter-loop-item":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-1 + value: '{"dag":{"tasks":{"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-2"}},"print-text":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"taskInfo":{"name":"print-text"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop_parameter":{"parameterType":"LIST"},"pipelinechannel--loop_parameter-loop-item":{"parameterType":"STRING"}}}}' + - name: components-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a + value: '{"executorLabel":"exec-print-int-3","inputDefinitions":{"parameters":{"x":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_int"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_int(x: int):\n print(x)\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-10 + value: '{"dag":{"tasks":{"print-int-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int-3"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--list-dict-maker-1-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int-3"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--list-dict-maker-1-Output":{"parameterType":"LIST"},"pipelinechannel--list-dict-maker-1-Output-loop-item":{"parameterType":"STRUCT"}}}}' + - name: components-comp-for-loop-11 + value: '{"dag":{"tasks":{"print-int-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int-4"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--list-dict-maker-2-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--list-dict-maker-2-Output":{"parameterType":"LIST"},"pipelinechannel--list-dict-maker-2-Output-loop-item":{"parameterType":"STRUCT"}}}}' + - name: components-comp-for-loop-12 + value: '{"dag":{"tasks":{"print-int-5":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int-5"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--list-dict-maker-3-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int-5"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--list-dict-maker-3-Output":{"parameterType":"LIST"},"pipelinechannel--list-dict-maker-3-Output-loop-item":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-13 + value: '{"dag":{"tasks":{"print-int-6":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int-6"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--list-dict-maker-1-2-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int-6"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--list-dict-maker-1-2-Output":{"parameterType":"LIST"},"pipelinechannel--list-dict-maker-1-2-Output-loop-item":{"parameterType":"STRING"}}}}' + - name: components-comp-for-loop-6 + value: '{"dag":{"tasks":{"print-text-5":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-5"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-5","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-text-5"}},"print-text-6":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-6"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-5","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-6"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-5":{"parameterType":"STRUCT"}}}}' + - name: components-comp-for-loop-4 + value: '{"dag":{"tasks":{"for-loop-6":{"componentRef":{"name":"comp-for-loop-6"},"iteratorPolicy":{"parallelismLimit":1},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[{\"A_a\": + \"10\", \"B_b\": \"20\"}, {\"A_a\": \"100\", \"B_b\": \"200\"}]"}},"taskInfo":{"name":"for-loop-6"}},"print-text-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-3"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-text-3"}},"print-text-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-4"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-4"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-3":{"parameterType":"STRUCT"}}}}' + - name: components-comp-for-loop-8 + value: '{"dag":{"tasks":{"print-int":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--loop-item-param-7","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-7":{"parameterType":"STRUCT"}}}}' + - name: components-comp-for-loop-9 + value: '{"dag":{"tasks":{"print-int-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int-2"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--list-dict-maker-0-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int-2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--list-dict-maker-0-Output":{"parameterType":"LIST"},"pipelinechannel--list-dict-maker-0-Output-loop-item":{"parameterType":"STRUCT"}}}}' + - name: components-836a4c37a90c1e5ed46b6d5da20665c829d3deeca0e86399d01477c361d28d9b + value: '{"executorLabel":"exec-list-dict-maker-0","outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-836a4c37a90c1e5ed46b6d5da20665c829d3deeca0e86399d01477c361d28d9b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","list_dict_maker_0"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + list_dict_maker_0() -\u003e List[Dict[str, int]]:\n \"\"\"Enforces strict + type checking - returns a list of dictionaries \n where keys are strings + and values are integers. For testing type \n handling during compilation.\"\"\"\n return + [{''a'': 1, ''b'': 2}, {''a'': 2, ''b'': 3}, {''a'': 3, ''b'': 4}]\n\n"],"image":"python:3.9"}' + - name: components-1ae8be565131b8c7ec5ffc8e03ac7af15afb6dcd95ef8d33087aca373a4b70df + value: '{"executorLabel":"exec-list-dict-maker-1","outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-1ae8be565131b8c7ec5ffc8e03ac7af15afb6dcd95ef8d33087aca373a4b70df + value: '{"args":["--executor_input","{{$}}","--function_to_execute","list_dict_maker_1"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + list_dict_maker_1() -\u003e List[Dict]:\n \"\"\"Utilizes generic dictionary + typing (no enforcement of specific key or\n value types).\n\n Tests + flexibility in type handling.\n \"\"\"\n return [{''a'': 1, ''b'': 2}, + {''a'': 2, ''b'': 3}, {''a'': 3, ''b'': 4}]\n\n"],"image":"python:3.9"}' + - name: components-8d525622f8b91622073787310058be85ff563a019f2e268d47bffa49d10b0342 + value: '{"executorLabel":"exec-list-dict-maker-2","outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-8d525622f8b91622073787310058be85ff563a019f2e268d47bffa49d10b0342 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","list_dict_maker_2"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + list_dict_maker_2() -\u003e List[dict]:\n \"\"\"Returns a list of dictionaries + without type enforcement.\n\n Tests flexibility in type handling.\n \"\"\"\n return + [{''a'': 1, ''b'': 2}, {''a'': 2, ''b'': 3}, {''a'': 3, ''b'': 4}]\n\n"],"image":"python:3.9"}' + - name: components-c48bb447bbb57c3886a493ebda03842c1ef51f6d4dd651635e917f508d1175dd + value: '{"executorLabel":"exec-list-dict-maker-3","outputDefinitions":{"parameters":{"Output":{"parameterType":"LIST"}}}}' + - name: implementations-c48bb447bbb57c3886a493ebda03842c1ef51f6d4dd651635e917f508d1175dd + value: '{"args":["--executor_input","{{$}}","--function_to_execute","list_dict_maker_3"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + list_dict_maker_3() -\u003e List:\n \"\"\"Returns a basic list (no typing + or structure guarantees).\n\n Tests the limits of compiler type handling.\n \"\"\"\n return + [{''a'': 1, ''b'': 2}, {''a'': 2, ''b'': 3}, {''a'': 3, ''b'': 4}]\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"for-loop-1":{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"iteratorPolicy":{"parallelismLimit":2},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}},"for-loop-10":{"componentRef":{"name":"comp-for-loop-10"},"dependentTasks":["list-dict-maker-1"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-1"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-1-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-1-Output"}},"taskInfo":{"name":"for-loop-10"}},"for-loop-11":{"componentRef":{"name":"comp-for-loop-11"},"dependentTasks":["list-dict-maker-2"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-2-Output"}},"taskInfo":{"name":"for-loop-11"}},"for-loop-12":{"componentRef":{"name":"comp-for-loop-12"},"dependentTasks":["list-dict-maker-3"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-3-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-3"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-3-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-3-Output"}},"taskInfo":{"name":"for-loop-12"}},"for-loop-13":{"componentRef":{"name":"comp-for-loop-13"},"dependentTasks":["list-dict-maker-1-2"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-1-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-1-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-1-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-1-2-Output"}},"taskInfo":{"name":"for-loop-13"}},"for-loop-4":{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": + \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]"}},"taskInfo":{"name":"for-loop-4"}},"for-loop-8":{"componentRef":{"name":"comp-for-loop-8"},"iteratorPolicy":{"parallelismLimit":1},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-7","items":{"raw":"[{\"a\": + 1, \"b\": 2}, {\"a\": 2, \"b\": 3}, {\"a\": 3, \"b\": 4}]"}},"taskInfo":{"name":"for-loop-8"}},"for-loop-9":{"componentRef":{"name":"comp-for-loop-9"},"dependentTasks":["list-dict-maker-0"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-0-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-0"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-0-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-0-Output"}},"taskInfo":{"name":"for-loop-9"}},"list-dict-maker-0":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-list-dict-maker-0"},"taskInfo":{"name":"list-dict-maker-0"}},"list-dict-maker-1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-list-dict-maker-1"},"taskInfo":{"name":"list-dict-maker-1"}},"list-dict-maker-1-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-list-dict-maker-1-2"},"taskInfo":{"name":"list-dict-maker-1-2"}},"list-dict-maker-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-list-dict-maker-2"},"taskInfo":{"name":"list-dict-maker-2"}},"list-dict-maker-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-list-dict-maker-3"},"taskInfo":{"name":"list-dict-maker-3"}}}},"inputDefinitions":{"parameters":{"loop_parameter":{"parameterType":"LIST"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-loops + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-2"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"taskInfo":{"name":"print-text-2"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-2-driver.outputs.parameters.cached-decision}}' + depends: print-text-2-driver.Succeeded + name: print-text-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-loops + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"taskInfo":{"name":"print-text"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-driver.outputs.parameters.cached-decision}}' + depends: print-text-driver.Succeeded + name: print-text + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int-3"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--list-dict-maker-1-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int-3"}}' + - name: container + value: '{{workflow.parameters.implementations-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task-name + value: print-int-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-int-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-int-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-int-3-driver.outputs.parameters.cached-decision}}' + depends: print-int-3-driver.Succeeded + name: print-int-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-10 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int-4"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--list-dict-maker-2-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int-4"}}' + - name: container + value: '{{workflow.parameters.implementations-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task-name + value: print-int-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-int-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-int-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-int-4-driver.outputs.parameters.cached-decision}}' + depends: print-int-4-driver.Succeeded + name: print-int-4 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-11 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int-5"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--list-dict-maker-3-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int-5"}}' + - name: container + value: '{{workflow.parameters.implementations-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task-name + value: print-int-5 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-int-5-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-int-5-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-int-5-driver.outputs.parameters.cached-decision}}' + depends: print-int-5-driver.Succeeded + name: print-int-5 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-12 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int-6"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--list-dict-maker-1-2-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int-6"}}' + - name: container + value: '{{workflow.parameters.implementations-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task-name + value: print-int-6 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-int-6-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-int-6-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-int-6-driver.outputs.parameters.cached-decision}}' + depends: print-int-6-driver.Succeeded + name: print-int-6 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-13 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-5"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-5","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-text-5"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text-5 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-5-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-5-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-5-driver.outputs.parameters.cached-decision}}' + depends: print-text-5-driver.Succeeded + name: print-text-5 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-6"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-5","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-6"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text-6 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-6-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-6-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-6-driver.outputs.parameters.cached-decision}}' + depends: print-text-6-driver.Succeeded + name: print-text-6 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-6 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-6}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-6"},"iteratorPolicy":{"parallelismLimit":1},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[{\"A_a\": + \"10\", \"B_b\": \"20\"}, {\"A_a\": \"100\", \"B_b\": \"200\"}]"}},"taskInfo":{"name":"for-loop-6"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-6 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-6-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-6}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-6"},"iteratorPolicy":{"parallelismLimit":1},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[{\"A_a\": + \"10\", \"B_b\": \"20\"}, {\"A_a\": \"100\", \"B_b\": \"200\"}]"}},"taskInfo":{"name":"for-loop-6"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-6-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-6-for-loop-6-iterator + outputs: {} + parallelism: 1 + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-6 + template: comp-for-loop-6-for-loop-6-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-3"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"print-text-3"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-3-driver.outputs.parameters.cached-decision}}' + depends: print-text-3-driver.Succeeded + name: print-text-3 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-text-4"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-3","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"taskInfo":{"name":"print-text-4"}}' + - name: container + value: '{{workflow.parameters.implementations-c70825d1fe54b54a70f9bc27f8c81631450492bb07eeab3259d332992c5e48d8}}' + - name: task-name + value: print-text-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-text-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-text-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-text-4-driver.outputs.parameters.cached-decision}}' + depends: print-text-4-driver.Succeeded + name: print-text-4 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--loop-item-param-7","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int"}}' + - name: container + value: '{{workflow.parameters.implementations-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task-name + value: print-int + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-int-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-int-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-int-driver.outputs.parameters.cached-decision}}' + depends: print-int-driver.Succeeded + name: print-int + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-8 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-int-2"},"inputs":{"parameters":{"x":{"componentInputParameter":"pipelinechannel--list-dict-maker-0-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"a\"]"}}},"taskInfo":{"name":"print-int-2"}}' + - name: container + value: '{{workflow.parameters.implementations-7c6688357d7777b7dadbdb7acda6f1774e7c9fc71f6d002466243931bf300c2a}}' + - name: task-name + value: print-int-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-int-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-int-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-int-2-driver.outputs.parameters.cached-decision}}' + depends: print-int-2-driver.Succeeded + name: print-int-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-9 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"iteratorPolicy":{"parallelismLimit":2},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-1 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-1-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"iteratorPolicy":{"parallelismLimit":2},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-1-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1-for-loop-1-iterator + outputs: {} + parallelism: 2 + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-10}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-10"},"dependentTasks":["list-dict-maker-1"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-1"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-1-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-1-Output"}},"taskInfo":{"name":"for-loop-10"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-10 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-10-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-10}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-10"},"dependentTasks":["list-dict-maker-1"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-1"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-1-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-1-Output"}},"taskInfo":{"name":"for-loop-10"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-10-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-10-for-loop-10-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-11}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-11"},"dependentTasks":["list-dict-maker-2"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-2-Output"}},"taskInfo":{"name":"for-loop-11"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-11 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-11-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-11}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-11"},"dependentTasks":["list-dict-maker-2"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-2-Output"}},"taskInfo":{"name":"for-loop-11"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-11-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-11-for-loop-11-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-12}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-12"},"dependentTasks":["list-dict-maker-3"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-3-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-3"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-3-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-3-Output"}},"taskInfo":{"name":"for-loop-12"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-12 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-12-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-12}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-12"},"dependentTasks":["list-dict-maker-3"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-3-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-3"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-3-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-3-Output"}},"taskInfo":{"name":"for-loop-12"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-12-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-12-for-loop-12-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-13}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-13"},"dependentTasks":["list-dict-maker-1-2"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-1-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-1-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-1-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-1-2-Output"}},"taskInfo":{"name":"for-loop-13"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-13 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-13-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-13}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-13"},"dependentTasks":["list-dict-maker-1-2"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-1-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-1-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-1-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-1-2-Output"}},"taskInfo":{"name":"for-loop-13"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-13-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-13-for-loop-13-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": + \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-4 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-4-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-4}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": + \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]"}},"taskInfo":{"name":"for-loop-4"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-4-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-4-for-loop-4-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-8}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-8"},"iteratorPolicy":{"parallelismLimit":1},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-7","items":{"raw":"[{\"a\": + 1, \"b\": 2}, {\"a\": 2, \"b\": 3}, {\"a\": 3, \"b\": 4}]"}},"taskInfo":{"name":"for-loop-8"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-8 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-8-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-8}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-8"},"iteratorPolicy":{"parallelismLimit":1},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-7","items":{"raw":"[{\"a\": + 1, \"b\": 2}, {\"a\": 2, \"b\": 3}, {\"a\": 3, \"b\": 4}]"}},"taskInfo":{"name":"for-loop-8"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-8-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-8-for-loop-8-iterator + outputs: {} + parallelism: 1 + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-9}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-9"},"dependentTasks":["list-dict-maker-0"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-0-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-0"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-0-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-0-Output"}},"taskInfo":{"name":"for-loop-9"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-9 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-9-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-9}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-9"},"dependentTasks":["list-dict-maker-0"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-0-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-0"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-0-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-0-Output"}},"taskInfo":{"name":"for-loop-9"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-9-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-9-for-loop-9-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-1 + template: comp-for-loop-1-for-loop-1-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: list-dict-maker-1.Succeeded + name: for-loop-10 + template: comp-for-loop-10-for-loop-10-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: list-dict-maker-2.Succeeded + name: for-loop-11 + template: comp-for-loop-11-for-loop-11-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: list-dict-maker-3.Succeeded + name: for-loop-12 + template: comp-for-loop-12-for-loop-12-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: list-dict-maker-1-2.Succeeded + name: for-loop-13 + template: comp-for-loop-13-for-loop-13-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-8 + template: comp-for-loop-8-for-loop-8-iterator + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: list-dict-maker-0.Succeeded + name: for-loop-9 + template: comp-for-loop-9-for-loop-9-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-836a4c37a90c1e5ed46b6d5da20665c829d3deeca0e86399d01477c361d28d9b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-list-dict-maker-0"},"taskInfo":{"name":"list-dict-maker-0"}}' + - name: container + value: '{{workflow.parameters.implementations-836a4c37a90c1e5ed46b6d5da20665c829d3deeca0e86399d01477c361d28d9b}}' + - name: task-name + value: list-dict-maker-0 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: list-dict-maker-0-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.list-dict-maker-0-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.list-dict-maker-0-driver.outputs.parameters.cached-decision}}' + depends: list-dict-maker-0-driver.Succeeded + name: list-dict-maker-0 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-1ae8be565131b8c7ec5ffc8e03ac7af15afb6dcd95ef8d33087aca373a4b70df}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-list-dict-maker-1"},"taskInfo":{"name":"list-dict-maker-1"}}' + - name: container + value: '{{workflow.parameters.implementations-1ae8be565131b8c7ec5ffc8e03ac7af15afb6dcd95ef8d33087aca373a4b70df}}' + - name: task-name + value: list-dict-maker-1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: list-dict-maker-1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.list-dict-maker-1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.list-dict-maker-1-driver.outputs.parameters.cached-decision}}' + depends: list-dict-maker-1-driver.Succeeded + name: list-dict-maker-1 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-1ae8be565131b8c7ec5ffc8e03ac7af15afb6dcd95ef8d33087aca373a4b70df}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-list-dict-maker-1-2"},"taskInfo":{"name":"list-dict-maker-1-2"}}' + - name: container + value: '{{workflow.parameters.implementations-1ae8be565131b8c7ec5ffc8e03ac7af15afb6dcd95ef8d33087aca373a4b70df}}' + - name: task-name + value: list-dict-maker-1-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: list-dict-maker-1-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.list-dict-maker-1-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.list-dict-maker-1-2-driver.outputs.parameters.cached-decision}}' + depends: list-dict-maker-1-2-driver.Succeeded + name: list-dict-maker-1-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-8d525622f8b91622073787310058be85ff563a019f2e268d47bffa49d10b0342}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-list-dict-maker-2"},"taskInfo":{"name":"list-dict-maker-2"}}' + - name: container + value: '{{workflow.parameters.implementations-8d525622f8b91622073787310058be85ff563a019f2e268d47bffa49d10b0342}}' + - name: task-name + value: list-dict-maker-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: list-dict-maker-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.list-dict-maker-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.list-dict-maker-2-driver.outputs.parameters.cached-decision}}' + depends: list-dict-maker-2-driver.Succeeded + name: list-dict-maker-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c48bb447bbb57c3886a493ebda03842c1ef51f6d4dd651635e917f508d1175dd}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-list-dict-maker-3"},"taskInfo":{"name":"list-dict-maker-3"}}' + - name: container + value: '{{workflow.parameters.implementations-c48bb447bbb57c3886a493ebda03842c1ef51f6d4dd651635e917f508d1175dd}}' + - name: task-name + value: list-dict-maker-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: list-dict-maker-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.list-dict-maker-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.list-dict-maker-3-driver.outputs.parameters.cached-decision}}' + depends: list-dict-maker-3-driver.Succeeded + name: list-dict-maker-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_params_containing_format.yaml b/test_data/compiled-workflows/pipeline_with_params_containing_format.yaml new file mode 100644 index 00000000000..be0564487bb --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_params_containing_format.yaml @@ -0,0 +1,516 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-pipelineparam-containing-format- +spec: + arguments: + parameters: + - name: components-bdc598acad507af0648cefd0773f42b1236c6c71afe115dd49ee06605c3e7a6e + value: '{"executorLabel":"exec-print-op2","inputDefinitions":{"parameters":{"text1":{"parameterType":"STRING"},"text2":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-bdc598acad507af0648cefd0773f42b1236c6c71afe115dd49ee06605c3e7a6e + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op2"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op2(text1: str, text2: str) -\u003e str:\n print(text1 + text2)\n return + text1 + text2\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-2 + value: '{"dag":{"tasks":{"print-op2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op2"},"inputs":{"parameters":{"pipelinechannel--name":{"componentInputParameter":"pipelinechannel--name"},"text1":{"componentInputParameter":"pipelinechannel--loop-item-param-1"},"text2":{"runtimeValue":{"constant":" + and {{$.inputs.parameters[''pipelinechannel--name'']}}."}}}},"taskInfo":{"name":"print-op2"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--loop-item-param-1":{"parameterType":"STRING"},"pipelinechannel--name":{"parameterType":"STRING"}}}}' + - name: components-984f43d1ff549827aa35bc616c4b5b6479714c835e683349f065c2da19055b4f + value: '{"executorLabel":"exec-print-op","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-984f43d1ff549827aa35bc616c4b5b6479714c835e683349f065c2da19055b4f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(text: str) -\u003e str:\n print(text)\n return text\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"for-loop-2":{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--name":{"componentInputParameter":"name"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"1\", + \"2\"]"}},"taskInfo":{"name":"for-loop-2"}},"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"pipelinechannel--name":{"componentInputParameter":"name"},"text":{"runtimeValue":{"constant":"Hello + {{$.inputs.parameters[''pipelinechannel--name'']}}"}}}},"taskInfo":{"name":"print-op"}},"print-op-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"dependentTasks":["print-op"],"inputs":{"parameters":{"pipelinechannel--print-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op"}},"text":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--print-op-Output'']}}, + again."}}}},"taskInfo":{"name":"print-op-2"}}}},"inputDefinitions":{"parameters":{"name":{"defaultValue":"KFP","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-pipelineparam-containing-format + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-bdc598acad507af0648cefd0773f42b1236c6c71afe115dd49ee06605c3e7a6e}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op2"},"inputs":{"parameters":{"pipelinechannel--name":{"componentInputParameter":"pipelinechannel--name"},"text1":{"componentInputParameter":"pipelinechannel--loop-item-param-1"},"text2":{"runtimeValue":{"constant":" + and {{$.inputs.parameters[''pipelinechannel--name'']}}."}}}},"taskInfo":{"name":"print-op2"}}' + - name: container + value: '{{workflow.parameters.implementations-bdc598acad507af0648cefd0773f42b1236c6c71afe115dd49ee06605c3e7a6e}}' + - name: task-name + value: print-op2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op2-driver.outputs.parameters.cached-decision}}' + depends: print-op2-driver.Succeeded + name: print-op2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-pipelineparam-containing-format + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--name":{"componentInputParameter":"name"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"1\", + \"2\"]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-2 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-2-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--name":{"componentInputParameter":"name"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"1\", + \"2\"]"}},"taskInfo":{"name":"for-loop-2"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-2-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-2-for-loop-2-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-2 + template: comp-for-loop-2-for-loop-2-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-984f43d1ff549827aa35bc616c4b5b6479714c835e683349f065c2da19055b4f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"pipelinechannel--name":{"componentInputParameter":"name"},"text":{"runtimeValue":{"constant":"Hello + {{$.inputs.parameters[''pipelinechannel--name'']}}"}}}},"taskInfo":{"name":"print-op"}}' + - name: container + value: '{{workflow.parameters.implementations-984f43d1ff549827aa35bc616c4b5b6479714c835e683349f065c2da19055b4f}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-984f43d1ff549827aa35bc616c4b5b6479714c835e683349f065c2da19055b4f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op-2"},"dependentTasks":["print-op"],"inputs":{"parameters":{"pipelinechannel--print-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op"}},"text":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--print-op-Output'']}}, + again."}}}},"taskInfo":{"name":"print-op-2"}}' + - name: container + value: '{{workflow.parameters.implementations-984f43d1ff549827aa35bc616c4b5b6479714c835e683349f065c2da19055b4f}}' + - name: task-name + value: print-op-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: print-op.Succeeded + name: print-op-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' + depends: print-op-2-driver.Succeeded + name: print-op-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"name":"KFP"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_placeholders.yaml b/test_data/compiled-workflows/pipeline_with_placeholders.yaml new file mode 100644 index 00000000000..8c489fef542 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_placeholders.yaml @@ -0,0 +1,376 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-placeholders- +spec: + arguments: + parameters: + - name: components-1c2d9ee5193f6bab323886b46d1c14a22e2f07a25c79fd4e264b5f797275a7cb + value: '{"executorLabel":"exec-print-all-placeholders","inputDefinitions":{"parameters":{"job_id":{"parameterType":"STRING"},"job_name":{"parameterType":"STRING"},"job_resource_name":{"parameterType":"STRING"},"task_id":{"parameterType":"STRING"},"task_name":{"parameterType":"STRING"}}}}' + - name: implementations-1c2d9ee5193f6bab323886b46d1c14a22e2f07a25c79fd4e264b5f797275a7cb + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_all_placeholders"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_all_placeholders(\n job_name: str,\n job_resource_name: + str,\n job_id: str,\n task_name: str,\n task_id: str,\n):\n allPlaceholders + = [job_name, job_resource_name, job_id, task_name, task_id]\n\n for placeholder + in allPlaceholders:\n if \"\\{\\{\" in placeholder or placeholder == + \"\":\n raise RuntimeError(\n \"Expected the placeholder + to be replaced with a value: \" + placeholder\n )\n\n assert + task_name == \"print-all-placeholders\"\n assert job_resource_name.startswith(\"pipeline-with-placeholders-\")\n\n output + = \", \".join(allPlaceholders)\n print(output)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"print-all-placeholders":{"cachingOptions":{},"componentRef":{"name":"comp-print-all-placeholders"},"inputs":{"parameters":{"job_id":{"runtimeValue":{"constant":"{{$.pipeline_job_uuid}}"}},"job_name":{"runtimeValue":{"constant":"{{$.pipeline_job_name}}"}},"job_resource_name":{"runtimeValue":{"constant":"{{$.pipeline_job_resource_name}}"}},"task_id":{"runtimeValue":{"constant":"{{$.pipeline_task_uuid}}"}},"task_name":{"runtimeValue":{"constant":"{{$.pipeline_task_name}}"}}}},"taskInfo":{"name":"print-all-placeholders"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-placeholders + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-1c2d9ee5193f6bab323886b46d1c14a22e2f07a25c79fd4e264b5f797275a7cb}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-print-all-placeholders"},"inputs":{"parameters":{"job_id":{"runtimeValue":{"constant":"{{$.pipeline_job_uuid}}"}},"job_name":{"runtimeValue":{"constant":"{{$.pipeline_job_name}}"}},"job_resource_name":{"runtimeValue":{"constant":"{{$.pipeline_job_resource_name}}"}},"task_id":{"runtimeValue":{"constant":"{{$.pipeline_task_uuid}}"}},"task_name":{"runtimeValue":{"constant":"{{$.pipeline_task_name}}"}}}},"taskInfo":{"name":"print-all-placeholders"}}' + - name: container + value: '{{workflow.parameters.implementations-1c2d9ee5193f6bab323886b46d1c14a22e2f07a25c79fd4e264b5f797275a7cb}}' + - name: task-name + value: print-all-placeholders + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-all-placeholders-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-all-placeholders-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-all-placeholders-driver.outputs.parameters.cached-decision}}' + depends: print-all-placeholders-driver.Succeeded + name: print-all-placeholders + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-placeholders + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_pod_metadata.yaml b/test_data/compiled-workflows/pipeline_with_pod_metadata.yaml new file mode 100644 index 00000000000..21ca38d57fa --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_pod_metadata.yaml @@ -0,0 +1,1174 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-pod-metadata- +spec: + arguments: + parameters: + - name: kubernetes-comp-validate-no-pod-metadata + value: '{"fieldPathAsEnv":[{"fieldPath":"metadata.annotations[''task-annotation'']","name":"POD_TASK_ANNOTATION"},{"fieldPath":"metadata.annotations[''task-annotation-1'']","name":"POD_TASK_ANNOTATION_1"},{"fieldPath":"metadata.labels[''task-label'']","name":"POD_TASK_LABEL"},{"fieldPath":"metadata.labels[''task-label-1'']","name":"POD_TASK_LABEL_1"}]}' + - name: components-3d278faa61c986d801a417f587e50005053b72d5108dc720e6ce32593c050c24 + value: '{"executorLabel":"exec-validate-no-pod-metadata","outputDefinitions":{"parameters":{"Output":{"parameterType":"BOOLEAN"}}}}' + - name: implementations-3d278faa61c986d801a417f587e50005053b72d5108dc720e6ce32593c050c24 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","validate_no_pod_metadata"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + validate_no_pod_metadata() -\u003e bool:\n import os\n annotation = + os.getenv(''POD_TASK_ANNOTATION'')\n annotation_2 = os.getenv(''POD_TASK_ANNOTATION_1'')\n if + annotation != '''' or annotation_2 != '''':\n raise ValueError(\n f''Pod + annotation is {annotation} but is supposed to be None.''\n )\n label + = os.getenv(''POD_TASK_LABEL'')\n label_2 = os.getenv(''POD_TASK_LABEL_1'')\n if + label != '''' or label_2 != '''':\n raise ValueError(\n f''Pod + label is {label} but is supposed to be None.''\n )\n return True\n\n"],"image":"python:3.9"}' + - name: kubernetes-comp-validate-pod-metadata-task-a + value: '{"fieldPathAsEnv":[{"fieldPath":"metadata.annotations[''task-annotation'']","name":"POD_TASK_ANNOTATION"},{"fieldPath":"metadata.labels[''task-label-1'']","name":"POD_TASK_LABEL_1"},{"fieldPath":"metadata.labels[''task-label-2'']","name":"POD_TASK_LABEL_2"}],"podMetadata":{"annotations":{"task-annotation":"annotation"},"labels":{"task-label-1":"label-1","task-label-2":"label-2"}}}' + - name: components-a58a8ba91674b53e9cfd2a56b122fa32c32733648c4337cfd4d489147baa0351 + value: '{"executorLabel":"exec-validate-pod-metadata-task-a","inputDefinitions":{"parameters":{"annotation_exp_val":{"parameterType":"STRING"},"annotation_path":{"parameterType":"STRING"},"label_exp_val_1":{"parameterType":"STRING"},"label_exp_val_2":{"parameterType":"STRING"},"label_path_1":{"parameterType":"STRING"},"label_path_2":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"BOOLEAN"}}}}' + - name: implementations-a58a8ba91674b53e9cfd2a56b122fa32c32733648c4337cfd4d489147baa0351 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","validate_pod_metadata_task_a"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + validate_pod_metadata_task_a(annotation_path: str,\n annotation_exp_val: + str,\n label_path_1: str,\n label_exp_val_1: + str,\n label_path_2: str,\n label_exp_val_2: + str) -\u003e bool:\n import os\n annotation_val = os.getenv(annotation_path)\n if + annotation_val != annotation_exp_val:\n raise ValueError(\n f''Pod + annotation is {annotation_val} but is supposed to be {annotation_exp_val}.''\n )\n\n label_val_1 + = os.getenv(label_path_1)\n if label_val_1 != label_exp_val_1:\n raise + ValueError(\n f''Pod label is {label_val_1} but is supposed to + be {label_exp_val_1}.''\n )\n\n label_val_2 = os.getenv(label_path_2)\n if + label_val_2 != label_exp_val_2:\n raise ValueError(\n f''Pod + label is {label_val_2} but is supposed to be {label_exp_val_2}.''\n )\n return + True\n\n"],"image":"python:3.9"}' + - name: kubernetes-comp-validate-pod-metadata-task-b + value: '{"fieldPathAsEnv":[{"fieldPath":"metadata.annotations[''task-annotation-1'']","name":"POD_TASK_ANNOTATION_1"},{"fieldPath":"metadata.annotations[''task-annotation-2'']","name":"POD_TASK_ANNOTATION_2"},{"fieldPath":"metadata.annotations[''task-annotation-3'']","name":"POD_TASK_ANNOTATION_3"},{"fieldPath":"metadata.annotations[''task-annotation-4'']","name":"POD_TASK_ANNOTATION_4"},{"fieldPath":"metadata.labels[''task-label-1'']","name":"POD_TASK_LABEL_1"},{"fieldPath":"metadata.labels[''task-label-2'']","name":"POD_TASK_LABEL_2"},{"fieldPath":"metadata.labels[''task-label-3'']","name":"POD_TASK_LABEL_3"}],"podMetadata":{"annotations":{"task-annotation-1":"annotation-1","task-annotation-2":"annotation-2","task-annotation-3":"annotation-3","task-annotation-4":"annotation-4"},"labels":{"task-label-1":"label-1","task-label-2":"label-2","task-label-3":"label-3"}}}' + - name: components-ff4adfd20fbb1ccd7f6e9330882576b7e34ffba16bc568acc10956b8652c28d9 + value: '{"executorLabel":"exec-validate-pod-metadata-task-b","inputDefinitions":{"parameters":{"annotation_exp_val_1":{"parameterType":"STRING"},"annotation_exp_val_2":{"parameterType":"STRING"},"annotation_exp_val_3":{"parameterType":"STRING"},"annotation_exp_val_4":{"parameterType":"STRING"},"annotation_path_1":{"parameterType":"STRING"},"annotation_path_2":{"parameterType":"STRING"},"annotation_path_3":{"parameterType":"STRING"},"annotation_path_4":{"parameterType":"STRING"},"label_exp_val_1":{"parameterType":"STRING"},"label_exp_val_2":{"parameterType":"STRING"},"label_exp_val_3":{"parameterType":"STRING"},"label_path_1":{"parameterType":"STRING"},"label_path_2":{"parameterType":"STRING"},"label_path_3":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"BOOLEAN"}}}}' + - name: implementations-ff4adfd20fbb1ccd7f6e9330882576b7e34ffba16bc568acc10956b8652c28d9 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","validate_pod_metadata_task_b"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + validate_pod_metadata_task_b(annotation_path_1: str,\n annotation_exp_val_1: + str,\n annotation_path_2: str,\n annotation_exp_val_2: + str,\n annotation_path_3: str,\n annotation_exp_val_3: + str,\n annotation_path_4: str,\n annotation_exp_val_4: + str,\n label_path_1: str,\n label_exp_val_1: + str,\n label_path_2: str,\n label_exp_val_2: + str,\n label_path_3: str,\n label_exp_val_3: + str) -\u003e bool:\n import os\n annotation_val_1 = os.getenv(annotation_path_1)\n if + annotation_val_1 != annotation_exp_val_1:\n raise ValueError(\n f''Pod + annotation is {annotation_val_1} but is supposed to be {annotation_exp_val_1}.''\n )\n annotation_val_2 + = os.getenv(annotation_path_2)\n if annotation_val_2 != annotation_exp_val_2:\n raise + ValueError(\n f''Pod annotation is {annotation_val_2} but is supposed + to be {annotation_exp_val_2}.''\n )\n annotation_val_3 = os.getenv(annotation_path_3)\n if + annotation_val_3 != annotation_exp_val_3:\n raise ValueError(\n f''Pod + annotation is {annotation_val_3} but is supposed to be {annotation_exp_val_3}.''\n )\n annotation_val_4 + = os.getenv(annotation_path_4)\n if annotation_val_4 != annotation_exp_val_4:\n raise + ValueError(\n f''Pod annotation is {annotation_val_4} but is supposed + to be {annotation_exp_val_4}.''\n )\n label_val_1 = os.getenv(label_path_1)\n if + label_val_1 != label_exp_val_1:\n raise ValueError(\n f''Pod + label is {label_val_1} but is supposed to be {label_exp_val_1}.''\n )\n label_val_2 + = os.getenv(label_path_2)\n if label_val_2 != label_exp_val_2:\n raise + ValueError(\n f''Pod label is {label_val_2} but is supposed to + be {label_exp_val_2}.''\n )\n label_val_3 = os.getenv(label_path_3)\n if + label_val_3 != label_exp_val_3:\n raise ValueError(\n f''Pod + label is {label_val_3} but is supposed to be {label_exp_val_3}.''\n )\n return + True\n\n"],"image":"python:3.9"}' + - name: kubernetes-comp-validate-pod-metadata-task-c + value: '{"fieldPathAsEnv":[{"fieldPath":"metadata.annotations[''task-annotation-1'']","name":"POD_TASK_ANNOTATION_1"},{"fieldPath":"metadata.annotations[''task-annotation-2'']","name":"POD_TASK_ANNOTATION_2"}],"podMetadata":{"annotations":{"task-annotation-1":"annotation-1","task-annotation-2":"annotation-2"}}}' + - name: components-0fe8b44e6cf954e8e447976b0b620a7737c5a0371bfd14ca4329c665328fa1d6 + value: '{"executorLabel":"exec-validate-pod-metadata-task-c","inputDefinitions":{"parameters":{"annotation_exp_val_1":{"parameterType":"STRING"},"annotation_exp_val_2":{"parameterType":"STRING"},"annotation_path_1":{"parameterType":"STRING"},"annotation_path_2":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"BOOLEAN"}}}}' + - name: implementations-0fe8b44e6cf954e8e447976b0b620a7737c5a0371bfd14ca4329c665328fa1d6 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","validate_pod_metadata_task_c"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + validate_pod_metadata_task_c(annotation_path_1: str,\n annotation_exp_val_1: + str,\n annotation_path_2: str,\n annotation_exp_val_2: + str) -\u003e bool:\n import os\n annotation_val_1 = os.getenv(annotation_path_1)\n if + annotation_val_1 != annotation_exp_val_1:\n raise ValueError(\n f''Pod + annotation is {annotation_val_1} but is supposed to be {annotation_exp_val_1}.''\n )\n\n annotation_val_2 + = os.getenv(annotation_path_2)\n if annotation_val_2 != annotation_exp_val_2:\n raise + ValueError(\n f''Pod annotation is {annotation_val_2} but is supposed + to be {annotation_exp_val_2}.''\n )\n return True\n\n"],"image":"python:3.9"}' + - name: kubernetes-comp-validate-pod-metadata-task-d + value: '{"fieldPathAsEnv":[{"fieldPath":"metadata.labels[''task-label-1'']","name":"POD_TASK_LABEL_1"},{"fieldPath":"metadata.labels[''task-label-2'']","name":"POD_TASK_LABEL_2"},{"fieldPath":"metadata.labels[''task-label-3'']","name":"POD_TASK_LABEL_3"}],"podMetadata":{"labels":{"task-label-1":"label-1","task-label-2":"label-2","task-label-3":"label-3"}}}' + - name: components-9ed49be6257bd027ea2d0882c606eddc7d5d2e83c82c63774ddd209b876bc316 + value: '{"executorLabel":"exec-validate-pod-metadata-task-d","inputDefinitions":{"parameters":{"label_exp_val_1":{"parameterType":"STRING"},"label_exp_val_2":{"parameterType":"STRING"},"label_exp_val_3":{"parameterType":"STRING"},"label_path_1":{"parameterType":"STRING"},"label_path_2":{"parameterType":"STRING"},"label_path_3":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"BOOLEAN"}}}}' + - name: implementations-9ed49be6257bd027ea2d0882c606eddc7d5d2e83c82c63774ddd209b876bc316 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","validate_pod_metadata_task_d"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + validate_pod_metadata_task_d(label_path_1: str,\n label_exp_val_1: + str,\n label_path_2: str,\n label_exp_val_2: + str,\n label_path_3: str,\n label_exp_val_3: + str) -\u003e bool:\n import os\n label_val_1 = os.getenv(label_path_1)\n if + label_val_1 != label_exp_val_1:\n raise ValueError(\n f''Pod + label is {label_val_1} but is supposed to be {label_exp_val_1}.''\n )\n label_val_2 + = os.getenv(label_path_2)\n if label_val_2 != label_exp_val_2:\n raise + ValueError(\n f''Pod label is {label_val_2} but is supposed to + be {label_exp_val_2}.''\n )\n label_val_3 = os.getenv(label_path_3)\n if + label_val_3 != label_exp_val_3:\n raise ValueError(\n f''Pod + label is {label_val_3} but is supposed to be {label_exp_val_3}.''\n )\n return + True\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"validate-no-pod-metadata":{"cachingOptions":{},"componentRef":{"name":"comp-validate-no-pod-metadata"},"taskInfo":{"name":"validate-no-pod-metadata"}},"validate-pod-metadata-task-a":{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata-task-a"},"inputs":{"parameters":{"annotation_exp_val":{"runtimeValue":{"constant":"annotation"}},"annotation_path":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION"}},"label_exp_val_1":{"runtimeValue":{"constant":"label-1"}},"label_exp_val_2":{"runtimeValue":{"constant":"label-2"}},"label_path_1":{"runtimeValue":{"constant":"POD_TASK_LABEL_1"}},"label_path_2":{"runtimeValue":{"constant":"POD_TASK_LABEL_2"}}}},"taskInfo":{"name":"validate-pod-metadata-task-a"}},"validate-pod-metadata-task-b":{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata-task-b"},"inputs":{"parameters":{"annotation_exp_val_1":{"runtimeValue":{"constant":"annotation-1"}},"annotation_exp_val_2":{"runtimeValue":{"constant":"annotation-2"}},"annotation_exp_val_3":{"runtimeValue":{"constant":"annotation-3"}},"annotation_exp_val_4":{"runtimeValue":{"constant":"annotation-4"}},"annotation_path_1":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_1"}},"annotation_path_2":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_2"}},"annotation_path_3":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_3"}},"annotation_path_4":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_4"}},"label_exp_val_1":{"runtimeValue":{"constant":"label-1"}},"label_exp_val_2":{"runtimeValue":{"constant":"label-2"}},"label_exp_val_3":{"runtimeValue":{"constant":"label-3"}},"label_path_1":{"runtimeValue":{"constant":"POD_TASK_LABEL_1"}},"label_path_2":{"runtimeValue":{"constant":"POD_TASK_LABEL_2"}},"label_path_3":{"runtimeValue":{"constant":"POD_TASK_LABEL_3"}}}},"taskInfo":{"name":"validate-pod-metadata-task-b"}},"validate-pod-metadata-task-c":{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata-task-c"},"inputs":{"parameters":{"annotation_exp_val_1":{"runtimeValue":{"constant":"annotation-1"}},"annotation_exp_val_2":{"runtimeValue":{"constant":"annotation-2"}},"annotation_path_1":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_1"}},"annotation_path_2":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_2"}}}},"taskInfo":{"name":"validate-pod-metadata-task-c"}},"validate-pod-metadata-task-d":{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata-task-d"},"inputs":{"parameters":{"label_exp_val_1":{"runtimeValue":{"constant":"label-1"}},"label_exp_val_2":{"runtimeValue":{"constant":"label-2"}},"label_exp_val_3":{"runtimeValue":{"constant":"label-3"}},"label_path_1":{"runtimeValue":{"constant":"POD_TASK_LABEL_1"}},"label_path_2":{"runtimeValue":{"constant":"POD_TASK_LABEL_2"}},"label_path_3":{"runtimeValue":{"constant":"POD_TASK_LABEL_3"}}}},"taskInfo":{"name":"validate-pod-metadata-task-d"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-pod-metadata + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + - name: pod-metadata-annotation-key + value: '{{inputs.parameters.pod-metadata-annotation-key}}' + - name: pod-metadata-annotation-val + value: '{{inputs.parameters.pod-metadata-annotation-val}}' + - name: pod-metadata-label-key-1 + value: '{{inputs.parameters.pod-metadata-label-key-1}}' + - name: pod-metadata-label-val-1 + value: '{{inputs.parameters.pod-metadata-label-val-1}}' + - name: pod-metadata-label-key-2 + value: '{{inputs.parameters.pod-metadata-label-key-2}}' + - name: pod-metadata-label-val-2 + value: '{{inputs.parameters.pod-metadata-label-val-2}}' + name: executor + template: metadata-1-2-system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + - name: pod-metadata-annotation-key + - name: pod-metadata-annotation-val + - name: pod-metadata-label-key-1 + - name: pod-metadata-label-val-1 + - name: pod-metadata-label-key-2 + - name: pod-metadata-label-val-2 + metadata: {} + name: metadata-1-2-system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + - name: pod-metadata-annotation-key + - name: pod-metadata-annotation-val + - name: pod-metadata-label-key-1 + - name: pod-metadata-label-val-1 + - name: pod-metadata-label-key-2 + - name: pod-metadata-label-val-2 + metadata: + annotations: + '{{inputs.parameters.pod-metadata-annotation-key}}': '{{inputs.parameters.pod-metadata-annotation-val}}' + labels: + '{{inputs.parameters.pod-metadata-label-key-1}}': '{{inputs.parameters.pod-metadata-label-val-1}}' + '{{inputs.parameters.pod-metadata-label-key-2}}': '{{inputs.parameters.pod-metadata-label-val-2}}' + name: metadata-1-2-system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + - name: pod-metadata-annotation-key-1 + value: '{{inputs.parameters.pod-metadata-annotation-key-1}}' + - name: pod-metadata-annotation-val-1 + value: '{{inputs.parameters.pod-metadata-annotation-val-1}}' + - name: pod-metadata-annotation-key-2 + value: '{{inputs.parameters.pod-metadata-annotation-key-2}}' + - name: pod-metadata-annotation-val-2 + value: '{{inputs.parameters.pod-metadata-annotation-val-2}}' + - name: pod-metadata-annotation-key-3 + value: '{{inputs.parameters.pod-metadata-annotation-key-3}}' + - name: pod-metadata-annotation-val-3 + value: '{{inputs.parameters.pod-metadata-annotation-val-3}}' + - name: pod-metadata-annotation-key-4 + value: '{{inputs.parameters.pod-metadata-annotation-key-4}}' + - name: pod-metadata-annotation-val-4 + value: '{{inputs.parameters.pod-metadata-annotation-val-4}}' + - name: pod-metadata-label-key-1 + value: '{{inputs.parameters.pod-metadata-label-key-1}}' + - name: pod-metadata-label-val-1 + value: '{{inputs.parameters.pod-metadata-label-val-1}}' + - name: pod-metadata-label-key-2 + value: '{{inputs.parameters.pod-metadata-label-key-2}}' + - name: pod-metadata-label-val-2 + value: '{{inputs.parameters.pod-metadata-label-val-2}}' + - name: pod-metadata-label-key-3 + value: '{{inputs.parameters.pod-metadata-label-key-3}}' + - name: pod-metadata-label-val-3 + value: '{{inputs.parameters.pod-metadata-label-val-3}}' + name: executor + template: metadata-4-3-system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + - name: pod-metadata-annotation-key-1 + - name: pod-metadata-annotation-val-1 + - name: pod-metadata-annotation-key-2 + - name: pod-metadata-annotation-val-2 + - name: pod-metadata-annotation-key-3 + - name: pod-metadata-annotation-val-3 + - name: pod-metadata-annotation-key-4 + - name: pod-metadata-annotation-val-4 + - name: pod-metadata-label-key-1 + - name: pod-metadata-label-val-1 + - name: pod-metadata-label-key-2 + - name: pod-metadata-label-val-2 + - name: pod-metadata-label-key-3 + - name: pod-metadata-label-val-3 + metadata: {} + name: metadata-4-3-system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + - name: pod-metadata-annotation-key-1 + - name: pod-metadata-annotation-val-1 + - name: pod-metadata-annotation-key-2 + - name: pod-metadata-annotation-val-2 + - name: pod-metadata-annotation-key-3 + - name: pod-metadata-annotation-val-3 + - name: pod-metadata-annotation-key-4 + - name: pod-metadata-annotation-val-4 + - name: pod-metadata-label-key-1 + - name: pod-metadata-label-val-1 + - name: pod-metadata-label-key-2 + - name: pod-metadata-label-val-2 + - name: pod-metadata-label-key-3 + - name: pod-metadata-label-val-3 + metadata: + annotations: + '{{inputs.parameters.pod-metadata-annotation-key-1}}': '{{inputs.parameters.pod-metadata-annotation-val-1}}' + '{{inputs.parameters.pod-metadata-annotation-key-2}}': '{{inputs.parameters.pod-metadata-annotation-val-2}}' + '{{inputs.parameters.pod-metadata-annotation-key-3}}': '{{inputs.parameters.pod-metadata-annotation-val-3}}' + '{{inputs.parameters.pod-metadata-annotation-key-4}}': '{{inputs.parameters.pod-metadata-annotation-val-4}}' + labels: + '{{inputs.parameters.pod-metadata-label-key-1}}': '{{inputs.parameters.pod-metadata-label-val-1}}' + '{{inputs.parameters.pod-metadata-label-key-2}}': '{{inputs.parameters.pod-metadata-label-val-2}}' + '{{inputs.parameters.pod-metadata-label-key-3}}': '{{inputs.parameters.pod-metadata-label-val-3}}' + name: metadata-4-3-system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + - name: pod-metadata-annotation-key-1 + value: '{{inputs.parameters.pod-metadata-annotation-key-1}}' + - name: pod-metadata-annotation-val-1 + value: '{{inputs.parameters.pod-metadata-annotation-val-1}}' + - name: pod-metadata-annotation-key-2 + value: '{{inputs.parameters.pod-metadata-annotation-key-2}}' + - name: pod-metadata-annotation-val-2 + value: '{{inputs.parameters.pod-metadata-annotation-val-2}}' + name: executor + template: metadata-2-0-system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + - name: pod-metadata-annotation-key-1 + - name: pod-metadata-annotation-val-1 + - name: pod-metadata-annotation-key-2 + - name: pod-metadata-annotation-val-2 + metadata: {} + name: metadata-2-0-system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + - name: pod-metadata-annotation-key-1 + - name: pod-metadata-annotation-val-1 + - name: pod-metadata-annotation-key-2 + - name: pod-metadata-annotation-val-2 + metadata: + annotations: + '{{inputs.parameters.pod-metadata-annotation-key-1}}': '{{inputs.parameters.pod-metadata-annotation-val-1}}' + '{{inputs.parameters.pod-metadata-annotation-key-2}}': '{{inputs.parameters.pod-metadata-annotation-val-2}}' + name: metadata-2-0-system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + - name: pod-metadata-label-key-1 + value: '{{inputs.parameters.pod-metadata-label-key-1}}' + - name: pod-metadata-label-val-1 + value: '{{inputs.parameters.pod-metadata-label-val-1}}' + - name: pod-metadata-label-key-2 + value: '{{inputs.parameters.pod-metadata-label-key-2}}' + - name: pod-metadata-label-val-2 + value: '{{inputs.parameters.pod-metadata-label-val-2}}' + - name: pod-metadata-label-key-3 + value: '{{inputs.parameters.pod-metadata-label-key-3}}' + - name: pod-metadata-label-val-3 + value: '{{inputs.parameters.pod-metadata-label-val-3}}' + name: executor + template: metadata-0-3-system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + - name: pod-metadata-label-key-1 + - name: pod-metadata-label-val-1 + - name: pod-metadata-label-key-2 + - name: pod-metadata-label-val-2 + - name: pod-metadata-label-key-3 + - name: pod-metadata-label-val-3 + metadata: {} + name: metadata-0-3-system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + - name: pod-metadata-label-key-1 + - name: pod-metadata-label-val-1 + - name: pod-metadata-label-key-2 + - name: pod-metadata-label-val-2 + - name: pod-metadata-label-key-3 + - name: pod-metadata-label-val-3 + metadata: + labels: + '{{inputs.parameters.pod-metadata-label-key-1}}': '{{inputs.parameters.pod-metadata-label-val-1}}' + '{{inputs.parameters.pod-metadata-label-key-2}}': '{{inputs.parameters.pod-metadata-label-val-2}}' + '{{inputs.parameters.pod-metadata-label-key-3}}': '{{inputs.parameters.pod-metadata-label-val-3}}' + name: metadata-0-3-system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-3d278faa61c986d801a417f587e50005053b72d5108dc720e6ce32593c050c24}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-validate-no-pod-metadata"},"taskInfo":{"name":"validate-no-pod-metadata"}}' + - name: container + value: '{{workflow.parameters.implementations-3d278faa61c986d801a417f587e50005053b72d5108dc720e6ce32593c050c24}}' + - name: task-name + value: validate-no-pod-metadata + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-validate-no-pod-metadata}}' + name: validate-no-pod-metadata-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.validate-no-pod-metadata-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.validate-no-pod-metadata-driver.outputs.parameters.cached-decision}}' + depends: validate-no-pod-metadata-driver.Succeeded + name: validate-no-pod-metadata + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-a58a8ba91674b53e9cfd2a56b122fa32c32733648c4337cfd4d489147baa0351}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata-task-a"},"inputs":{"parameters":{"annotation_exp_val":{"runtimeValue":{"constant":"annotation"}},"annotation_path":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION"}},"label_exp_val_1":{"runtimeValue":{"constant":"label-1"}},"label_exp_val_2":{"runtimeValue":{"constant":"label-2"}},"label_path_1":{"runtimeValue":{"constant":"POD_TASK_LABEL_1"}},"label_path_2":{"runtimeValue":{"constant":"POD_TASK_LABEL_2"}}}},"taskInfo":{"name":"validate-pod-metadata-task-a"}}' + - name: container + value: '{{workflow.parameters.implementations-a58a8ba91674b53e9cfd2a56b122fa32c32733648c4337cfd4d489147baa0351}}' + - name: task-name + value: validate-pod-metadata-task-a + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata-task-a}}' + name: validate-pod-metadata-task-a-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.validate-pod-metadata-task-a-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.validate-pod-metadata-task-a-driver.outputs.parameters.cached-decision}}' + - name: pod-metadata-annotation-key + value: task-annotation + - name: pod-metadata-annotation-val + value: annotation + - name: pod-metadata-label-key-1 + value: task-label-1 + - name: pod-metadata-label-val-1 + value: label-1 + - name: pod-metadata-label-key-2 + value: task-label-2 + - name: pod-metadata-label-val-2 + value: label-2 + depends: validate-pod-metadata-task-a-driver.Succeeded + name: validate-pod-metadata-task-a + template: metadata-1-2-system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ff4adfd20fbb1ccd7f6e9330882576b7e34ffba16bc568acc10956b8652c28d9}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata-task-b"},"inputs":{"parameters":{"annotation_exp_val_1":{"runtimeValue":{"constant":"annotation-1"}},"annotation_exp_val_2":{"runtimeValue":{"constant":"annotation-2"}},"annotation_exp_val_3":{"runtimeValue":{"constant":"annotation-3"}},"annotation_exp_val_4":{"runtimeValue":{"constant":"annotation-4"}},"annotation_path_1":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_1"}},"annotation_path_2":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_2"}},"annotation_path_3":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_3"}},"annotation_path_4":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_4"}},"label_exp_val_1":{"runtimeValue":{"constant":"label-1"}},"label_exp_val_2":{"runtimeValue":{"constant":"label-2"}},"label_exp_val_3":{"runtimeValue":{"constant":"label-3"}},"label_path_1":{"runtimeValue":{"constant":"POD_TASK_LABEL_1"}},"label_path_2":{"runtimeValue":{"constant":"POD_TASK_LABEL_2"}},"label_path_3":{"runtimeValue":{"constant":"POD_TASK_LABEL_3"}}}},"taskInfo":{"name":"validate-pod-metadata-task-b"}}' + - name: container + value: '{{workflow.parameters.implementations-ff4adfd20fbb1ccd7f6e9330882576b7e34ffba16bc568acc10956b8652c28d9}}' + - name: task-name + value: validate-pod-metadata-task-b + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata-task-b}}' + name: validate-pod-metadata-task-b-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.validate-pod-metadata-task-b-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.validate-pod-metadata-task-b-driver.outputs.parameters.cached-decision}}' + - name: pod-metadata-annotation-key-1 + value: task-annotation-1 + - name: pod-metadata-annotation-val-1 + value: annotation-1 + - name: pod-metadata-annotation-key-2 + value: task-annotation-2 + - name: pod-metadata-annotation-val-2 + value: annotation-2 + - name: pod-metadata-annotation-key-3 + value: task-annotation-3 + - name: pod-metadata-annotation-val-3 + value: annotation-3 + - name: pod-metadata-annotation-key-4 + value: task-annotation-4 + - name: pod-metadata-annotation-val-4 + value: annotation-4 + - name: pod-metadata-label-key-1 + value: task-label-1 + - name: pod-metadata-label-val-1 + value: label-1 + - name: pod-metadata-label-key-2 + value: task-label-2 + - name: pod-metadata-label-val-2 + value: label-2 + - name: pod-metadata-label-key-3 + value: task-label-3 + - name: pod-metadata-label-val-3 + value: label-3 + depends: validate-pod-metadata-task-b-driver.Succeeded + name: validate-pod-metadata-task-b + template: metadata-4-3-system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0fe8b44e6cf954e8e447976b0b620a7737c5a0371bfd14ca4329c665328fa1d6}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata-task-c"},"inputs":{"parameters":{"annotation_exp_val_1":{"runtimeValue":{"constant":"annotation-1"}},"annotation_exp_val_2":{"runtimeValue":{"constant":"annotation-2"}},"annotation_path_1":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_1"}},"annotation_path_2":{"runtimeValue":{"constant":"POD_TASK_ANNOTATION_2"}}}},"taskInfo":{"name":"validate-pod-metadata-task-c"}}' + - name: container + value: '{{workflow.parameters.implementations-0fe8b44e6cf954e8e447976b0b620a7737c5a0371bfd14ca4329c665328fa1d6}}' + - name: task-name + value: validate-pod-metadata-task-c + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata-task-c}}' + name: validate-pod-metadata-task-c-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.validate-pod-metadata-task-c-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.validate-pod-metadata-task-c-driver.outputs.parameters.cached-decision}}' + - name: pod-metadata-annotation-key-1 + value: task-annotation-1 + - name: pod-metadata-annotation-val-1 + value: annotation-1 + - name: pod-metadata-annotation-key-2 + value: task-annotation-2 + - name: pod-metadata-annotation-val-2 + value: annotation-2 + depends: validate-pod-metadata-task-c-driver.Succeeded + name: validate-pod-metadata-task-c + template: metadata-2-0-system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-9ed49be6257bd027ea2d0882c606eddc7d5d2e83c82c63774ddd209b876bc316}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-validate-pod-metadata-task-d"},"inputs":{"parameters":{"label_exp_val_1":{"runtimeValue":{"constant":"label-1"}},"label_exp_val_2":{"runtimeValue":{"constant":"label-2"}},"label_exp_val_3":{"runtimeValue":{"constant":"label-3"}},"label_path_1":{"runtimeValue":{"constant":"POD_TASK_LABEL_1"}},"label_path_2":{"runtimeValue":{"constant":"POD_TASK_LABEL_2"}},"label_path_3":{"runtimeValue":{"constant":"POD_TASK_LABEL_3"}}}},"taskInfo":{"name":"validate-pod-metadata-task-d"}}' + - name: container + value: '{{workflow.parameters.implementations-9ed49be6257bd027ea2d0882c606eddc7d5d2e83c82c63774ddd209b876bc316}}' + - name: task-name + value: validate-pod-metadata-task-d + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata-task-d}}' + name: validate-pod-metadata-task-d-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.validate-pod-metadata-task-d-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.validate-pod-metadata-task-d-driver.outputs.parameters.cached-decision}}' + - name: pod-metadata-label-key-1 + value: task-label-1 + - name: pod-metadata-label-val-1 + value: label-1 + - name: pod-metadata-label-key-2 + value: task-label-2 + - name: pod-metadata-label-val-2 + value: label-2 + - name: pod-metadata-label-key-3 + value: task-label-3 + - name: pod-metadata-label-val-3 + value: label-3 + depends: validate-pod-metadata-task-d-driver.Succeeded + name: validate-pod-metadata-task-d + template: metadata-0-3-system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-pod-metadata + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_retry.yaml b/test_data/compiled-workflows/pipeline_with_retry.yaml new file mode 100644 index 00000000000..12d218c27ea --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_retry.yaml @@ -0,0 +1,403 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: test-pipeline- +spec: + arguments: + parameters: + - name: components-134a4a180bb4bfeacb7a318916bac3b02f8ba93e7aca4257373412cf9e37ca3b + value: '{"executorLabel":"exec-add","inputDefinitions":{"parameters":{"a":{"parameterType":"NUMBER_DOUBLE"},"b":{"parameterType":"NUMBER_DOUBLE"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_DOUBLE"}}}}' + - name: implementations-134a4a180bb4bfeacb7a318916bac3b02f8ba93e7aca4257373412cf9e37ca3b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add(a: float, b: float) -\u003e float:\n return a + b\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"add":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"inputs":{"parameters":{"a":{"componentInputParameter":"a"},"b":{"componentInputParameter":"b"}}},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":3},"taskInfo":{"name":"add"}}}},"inputDefinitions":{"parameters":{"a":{"defaultValue":1,"isOptional":true,"parameterType":"NUMBER_DOUBLE"},"b":{"defaultValue":7,"isOptional":true,"parameterType":"NUMBER_DOUBLE"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - test-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + - name: retry-max-count + value: '{{inputs.parameters.retry-max-count}}' + - name: retry-backoff-duration + value: '{{inputs.parameters.retry-backoff-duration}}' + - name: retry-backoff-factor + value: '{{inputs.parameters.retry-backoff-factor}}' + - name: retry-backoff-max-duration + value: '{{inputs.parameters.retry-backoff-max-duration}}' + name: executor + template: retry-system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + - default: "0" + name: retry-max-count + - default: "0" + name: retry-backoff-duration + - default: "0" + name: retry-backoff-factor + - default: "0" + name: retry-backoff-max-duration + metadata: {} + name: retry-system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + - name: retry-max-count + - name: retry-backoff-duration + - name: retry-backoff-factor + - name: retry-backoff-max-duration + metadata: {} + name: retry-system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + retryStrategy: + backoff: + duration: '{{inputs.parameters.retry-backoff-duration}}' + factor: '{{inputs.parameters.retry-backoff-factor}}' + maxDuration: '{{inputs.parameters.retry-backoff-max-duration}}' + limit: '{{inputs.parameters.retry-max-count}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-134a4a180bb4bfeacb7a318916bac3b02f8ba93e7aca4257373412cf9e37ca3b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add"},"inputs":{"parameters":{"a":{"componentInputParameter":"a"},"b":{"componentInputParameter":"b"}}},"retryPolicy":{"backoffDuration":"0s","backoffFactor":2,"backoffMaxDuration":"3600s","maxRetryCount":3},"taskInfo":{"name":"add"}}' + - name: container + value: '{{workflow.parameters.implementations-134a4a180bb4bfeacb7a318916bac3b02f8ba93e7aca4257373412cf9e37ca3b}}' + - name: task-name + value: add + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: add-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-driver.outputs.parameters.cached-decision}}' + - name: retry-max-count + value: "3" + - name: retry-backoff-duration + value: "0" + - name: retry-backoff-factor + value: "2" + - name: retry-backoff-max-duration + value: "3600" + depends: add-driver.Succeeded + name: add + template: retry-system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - test-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"a":1,"b":7}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_reused_component.yaml b/test_data/compiled-workflows/pipeline_with_reused_component.yaml new file mode 100644 index 00000000000..9a25575be01 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_reused_component.yaml @@ -0,0 +1,419 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: add-pipeline- +spec: + arguments: + parameters: + - name: components-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a + value: '{"executorLabel":"exec-add-numbers","inputDefinitions":{"parameters":{"a":{"parameterType":"NUMBER_INTEGER"},"b":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a + value: '{"args":["--executor_input","{{$}}","--function_to_execute","add_numbers"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + add_numbers(a: int, b: int) -\u003e int:\n return a + b\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"add-numbers":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-numbers"},"inputs":{"parameters":{"a":{"componentInputParameter":"a"},"b":{"runtimeValue":{"constant":3}}}},"taskInfo":{"name":"add-numbers"}},"add-numbers-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-numbers-2"},"dependentTasks":["add-numbers"],"inputs":{"parameters":{"a":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"add-numbers"}},"b":{"componentInputParameter":"b"}}},"taskInfo":{"name":"add-numbers-2"}},"add-numbers-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-numbers-3"},"dependentTasks":["add-numbers-2"],"inputs":{"parameters":{"a":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"add-numbers-2"}},"b":{"runtimeValue":{"constant":7}}}},"taskInfo":{"name":"add-numbers-3"}}}},"inputDefinitions":{"parameters":{"a":{"defaultValue":2,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"b":{"defaultValue":5,"isOptional":true,"parameterType":"NUMBER_INTEGER"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - add-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-numbers"},"inputs":{"parameters":{"a":{"componentInputParameter":"a"},"b":{"runtimeValue":{"constant":3}}}},"taskInfo":{"name":"add-numbers"}}' + - name: container + value: '{{workflow.parameters.implementations-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a}}' + - name: task-name + value: add-numbers + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: add-numbers-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-numbers-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-numbers-driver.outputs.parameters.cached-decision}}' + depends: add-numbers-driver.Succeeded + name: add-numbers + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-numbers-2"},"dependentTasks":["add-numbers"],"inputs":{"parameters":{"a":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"add-numbers"}},"b":{"componentInputParameter":"b"}}},"taskInfo":{"name":"add-numbers-2"}}' + - name: container + value: '{{workflow.parameters.implementations-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a}}' + - name: task-name + value: add-numbers-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: add-numbers.Succeeded + name: add-numbers-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-numbers-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-numbers-2-driver.outputs.parameters.cached-decision}}' + depends: add-numbers-2-driver.Succeeded + name: add-numbers-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-numbers-3"},"dependentTasks":["add-numbers-2"],"inputs":{"parameters":{"a":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"add-numbers-2"}},"b":{"runtimeValue":{"constant":7}}}},"taskInfo":{"name":"add-numbers-3"}}' + - name: container + value: '{{workflow.parameters.implementations-23c2b9f7ae92700a0e5c955580d24cd301258f0f455d131467d52707529b4c8a}}' + - name: task-name + value: add-numbers-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: add-numbers-2.Succeeded + name: add-numbers-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.add-numbers-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.add-numbers-3-driver.outputs.parameters.cached-decision}}' + depends: add-numbers-3-driver.Succeeded + name: add-numbers-3 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - add-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"a":2,"b":5}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_secret_as_env.yaml b/test_data/compiled-workflows/pipeline_with_secret_as_env.yaml new file mode 100644 index 00000000000..fbd8d28cd90 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_secret_as_env.yaml @@ -0,0 +1,415 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-secret-env- +spec: + arguments: + parameters: + - name: kubernetes-comp-comp + value: '{"secretAsEnv":[{"keyToEnv":[{"envVar":"USER_NAME","secretKey":"username"}],"optional":false,"secretNameParameter":{"componentInputParameter":"secret_parm"}},{"keyToEnv":[{"envVar":"PASSWORD_VAR1","secretKey":"password"}],"optional":false,"secretName":"test-secret-2","secretNameParameter":{"runtimeValue":{"constant":"test-secret-2"}}},{"keyToEnv":[{"envVar":"PASSWORD_VAR2","secretKey":"password"}],"optional":false,"secretNameParameter":{"taskOutputParameter":{"outputParameterKey":"some_output","producerTask":"generate-secret-name"}}}]}' + - name: components-3d5f1ec2aa4c8fdc12bc567d3a3c13b3a5b868b3420fa4689975e7ba43e403ab + value: '{"executorLabel":"exec-comp"}' + - name: implementations-3d5f1ec2aa4c8fdc12bc567d3a3c13b3a5b868b3420fa4689975e7ba43e403ab + value: '{"args":["--executor_input","{{$}}","--function_to_execute","comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + comp():\n import os\n username = os.getenv(\"USER_NAME\", \"\")\n psw1 + = os.getenv(\"PASSWORD_VAR1\", \"\")\n psw2 = os.getenv(\"PASSWORD_VAR2\", + \"\")\n assert username == \"user1\"\n assert psw1 == \"psw1\"\n assert + psw2 == \"psw2\"\n\n"],"image":"python:3.9"}' + - name: components-124566c8aba26be4fd91eef9431ac261402d69907711380549b6e49e0eef904c + value: '{"executorLabel":"exec-generate-secret-name","outputDefinitions":{"parameters":{"some_output":{"parameterType":"STRING"}}}}' + - name: implementations-124566c8aba26be4fd91eef9431ac261402d69907711380549b6e49e0eef904c + value: '{"args":["--executor_input","{{$}}","--function_to_execute","generate_secret_name"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + generate_secret_name(some_output: OutputPath(str)):\n secret_name = \"test-secret-3\"\n with + open(some_output, ''w'') as f:\n f.write(secret_name)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"comp":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-comp"},"dependentTasks":["generate-secret-name"],"taskInfo":{"name":"comp"}},"generate-secret-name":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-generate-secret-name"},"taskInfo":{"name":"generate-secret-name"}}}},"inputDefinitions":{"parameters":{"secret_parm":{"defaultValue":"test-secret-1","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-secret-env + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-3d5f1ec2aa4c8fdc12bc567d3a3c13b3a5b868b3420fa4689975e7ba43e403ab}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-comp"},"dependentTasks":["generate-secret-name"],"taskInfo":{"name":"comp"}}' + - name: container + value: '{{workflow.parameters.implementations-3d5f1ec2aa4c8fdc12bc567d3a3c13b3a5b868b3420fa4689975e7ba43e403ab}}' + - name: task-name + value: comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-comp}}' + depends: generate-secret-name.Succeeded + name: comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.comp-driver.outputs.parameters.cached-decision}}' + depends: comp-driver.Succeeded + name: comp + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-124566c8aba26be4fd91eef9431ac261402d69907711380549b6e49e0eef904c}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-generate-secret-name"},"taskInfo":{"name":"generate-secret-name"}}' + - name: container + value: '{{workflow.parameters.implementations-124566c8aba26be4fd91eef9431ac261402d69907711380549b6e49e0eef904c}}' + - name: task-name + value: generate-secret-name + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: generate-secret-name-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.generate-secret-name-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.generate-secret-name-driver.outputs.parameters.cached-decision}}' + depends: generate-secret-name-driver.Succeeded + name: generate-secret-name + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-secret-env + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"secret_parm":"test-secret-1"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_secret_as_volume.yaml b/test_data/compiled-workflows/pipeline_with_secret_as_volume.yaml new file mode 100644 index 00000000000..d5fbc1bbd78 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_secret_as_volume.yaml @@ -0,0 +1,378 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-secret-volume- +spec: + arguments: + parameters: + - name: kubernetes-comp-comp + value: '{"secretAsVolume":[{"mountPath":"/mnt/my_vol","optional":false,"secretNameParameter":{"componentInputParameter":"secret_param"}}]}' + - name: components-cc440df32ad452fe9f1028b109b0e39809977643b2f7a2e744913ac9b1714074 + value: '{"executorLabel":"exec-comp"}' + - name: implementations-cc440df32ad452fe9f1028b109b0e39809977643b2f7a2e744913ac9b1714074 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + comp():\n import os\n import sys\n username_path = os.path.join(''/mnt/my_vol'', + \"username\")\n\n # Check if the secret exists\n if not os.path.exists(username_path):\n raise + Exception(''Secret not found'')\n\n # Open the secret\n with open(username_path, + ''rb'') as secret_file:\n username = secret_file.read()\n\n # Decode + the secret\n username = username.decode(''utf-8'')\n\n # Print the secret\n print(f\"username: + {username}\")\n assert username == \"user1\"\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"comp":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-comp"},"taskInfo":{"name":"comp"}}}},"inputDefinitions":{"parameters":{"secret_param":{"defaultValue":"test-secret-1","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-secret-volume + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-cc440df32ad452fe9f1028b109b0e39809977643b2f7a2e744913ac9b1714074}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-comp"},"taskInfo":{"name":"comp"}}' + - name: container + value: '{{workflow.parameters.implementations-cc440df32ad452fe9f1028b109b0e39809977643b2f7a2e744913ac9b1714074}}' + - name: task-name + value: comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-comp}}' + name: comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.comp-driver.outputs.parameters.cached-decision}}' + depends: comp-driver.Succeeded + name: comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-secret-volume + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"secret_param":"test-secret-1"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_semaphore.yaml b/test_data/compiled-workflows/pipeline_with_semaphore.yaml new file mode 100644 index 00000000000..d80d33e6cff --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_semaphore.yaml @@ -0,0 +1,369 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-semaphore- +spec: + arguments: + parameters: + - name: components-539df5f72ca5a96ea4048c58d4da2d2582e1354734df9db466b2772e959f4b59 + value: '{"executorLabel":"exec-comp"}' + - name: implementations-539df5f72ca5a96ea4048c58d4da2d2582e1354734df9db466b2772e959f4b59 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","comp"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + comp():\n pass\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"comp":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-comp"},"taskInfo":{"name":"comp"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-semaphore + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-539df5f72ca5a96ea4048c58d4da2d2582e1354734df9db466b2772e959f4b59}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-comp"},"taskInfo":{"name":"comp"}}' + - name: container + value: '{{workflow.parameters.implementations-539df5f72ca5a96ea4048c58d4da2d2582e1354734df9db466b2772e959f4b59}}' + - name: task-name + value: comp + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: comp-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.comp-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.comp-driver.outputs.parameters.cached-decision}}' + depends: comp-driver.Succeeded + name: comp + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-semaphore + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_string_machine_fields_pipeline_input.yaml b/test_data/compiled-workflows/pipeline_with_string_machine_fields_pipeline_input.yaml new file mode 100644 index 00000000000..ff88e81d49d --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_string_machine_fields_pipeline_input.yaml @@ -0,0 +1,369 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline- +spec: + arguments: + parameters: + - name: components-5c76ebcd4e091481fc4c07f0360039dad12c412bc1b099337a4f415a0888be3d + value: '{"executorLabel":"exec-sum-numbers","inputDefinitions":{"parameters":{"a":{"parameterType":"NUMBER_INTEGER"},"b":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-5c76ebcd4e091481fc4c07f0360039dad12c412bc1b099337a4f415a0888be3d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","sum_numbers"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + sum_numbers(a: int, b: int) -\u003e int:\n return a + b\n\n"],"image":"python:3.9","resources":{"accelerator":{"resourceCount":"{{$.inputs.parameters[''pipelinechannel--accelerator_limit'']}}","resourceType":"{{$.inputs.parameters[''pipelinechannel--accelerator_type'']}}"},"resourceCpuLimit":"{{$.inputs.parameters[''pipelinechannel--cpu_limit'']}}","resourceMemoryLimit":"{{$.inputs.parameters[''pipelinechannel--memory_limit'']}}"}}' + - name: components-root + value: '{"dag":{"tasks":{"sum-numbers":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-sum-numbers"},"inputs":{"parameters":{"a":{"runtimeValue":{"constant":1}},"accelerator_count":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator_limit'']}}"}},"accelerator_type":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator_type'']}}"}},"b":{"runtimeValue":{"constant":2}},"cpu_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--cpu_limit'']}}"}},"memory_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--memory_limit'']}}"}},"pipelinechannel--accelerator_limit":{"componentInputParameter":"accelerator_limit"},"pipelinechannel--accelerator_type":{"componentInputParameter":"accelerator_type"},"pipelinechannel--cpu_limit":{"componentInputParameter":"cpu_limit"},"pipelinechannel--memory_limit":{"componentInputParameter":"memory_limit"}}},"taskInfo":{"name":"sum-numbers"}}}},"inputDefinitions":{"parameters":{"accelerator_limit":{"defaultValue":"1","isOptional":true,"parameterType":"STRING"},"accelerator_type":{"defaultValue":"NVIDIA_TESLA_P4","isOptional":true,"parameterType":"STRING"},"cpu_limit":{"defaultValue":"4000m","isOptional":true,"parameterType":"STRING"},"memory_limit":{"defaultValue":"15G","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5c76ebcd4e091481fc4c07f0360039dad12c412bc1b099337a4f415a0888be3d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-sum-numbers"},"inputs":{"parameters":{"a":{"runtimeValue":{"constant":1}},"accelerator_count":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator_limit'']}}"}},"accelerator_type":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator_type'']}}"}},"b":{"runtimeValue":{"constant":2}},"cpu_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--cpu_limit'']}}"}},"memory_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--memory_limit'']}}"}},"pipelinechannel--accelerator_limit":{"componentInputParameter":"accelerator_limit"},"pipelinechannel--accelerator_type":{"componentInputParameter":"accelerator_type"},"pipelinechannel--cpu_limit":{"componentInputParameter":"cpu_limit"},"pipelinechannel--memory_limit":{"componentInputParameter":"memory_limit"}}},"taskInfo":{"name":"sum-numbers"}}' + - name: container + value: '{{workflow.parameters.implementations-5c76ebcd4e091481fc4c07f0360039dad12c412bc1b099337a4f415a0888be3d}}' + - name: task-name + value: sum-numbers + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: sum-numbers-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.sum-numbers-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.sum-numbers-driver.outputs.parameters.cached-decision}}' + depends: sum-numbers-driver.Succeeded + name: sum-numbers + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"accelerator_limit":"1","accelerator_type":"NVIDIA_TESLA_P4","cpu_limit":"4000m","memory_limit":"15G"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_string_machine_fields_task_output.yaml b/test_data/compiled-workflows/pipeline_with_string_machine_fields_task_output.yaml new file mode 100644 index 00000000000..692c32fb9fc --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_string_machine_fields_task_output.yaml @@ -0,0 +1,519 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline- +spec: + arguments: + parameters: + - name: components-5ce77d07cd95a5f71e354d597878e3e0bc89527d3d81450cd270ed6183594740 + value: '{"executorLabel":"exec-accelerator-limit","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-5ce77d07cd95a5f71e354d597878e3e0bc89527d3d81450cd270ed6183594740 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","accelerator_limit"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + accelerator_limit() -\u003e str:\n return ''1''\n\n"],"image":"python:3.9"}' + - name: components-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b + value: '{"executorLabel":"exec-accelerator-type","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","accelerator_type"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + accelerator_type() -\u003e str:\n return ''NVIDIA_TESLA_P4''\n\n"],"image":"python:3.9"}' + - name: components-a78005c8cf24945a5b595201f36dc4905feff000fc7a59dc0aabaa7c0dc1413b + value: '{"executorLabel":"exec-cpu-limit","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-a78005c8cf24945a5b595201f36dc4905feff000fc7a59dc0aabaa7c0dc1413b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","cpu_limit"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + cpu_limit() -\u003e str:\n return ''4000m''\n\n"],"image":"python:3.9"}' + - name: components-a3d64b3ea9d79d1b1dc471faada5215bf21142914244bbfd26fa4aec93a232d7 + value: '{"executorLabel":"exec-memory-limit","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-a3d64b3ea9d79d1b1dc471faada5215bf21142914244bbfd26fa4aec93a232d7 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","memory_limit"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + memory_limit() -\u003e str:\n return ''15G''\n\n"],"image":"python:3.9"}' + - name: components-0886dd8babaca613b2fca24c656568012544ec9eccff86938a7b3859deae5309 + value: '{"executorLabel":"exec-sum-numbers","inputDefinitions":{"parameters":{"a":{"parameterType":"NUMBER_INTEGER"},"b":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-0886dd8babaca613b2fca24c656568012544ec9eccff86938a7b3859deae5309 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","sum_numbers"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + sum_numbers(a: int, b: int) -\u003e int:\n return a + b\n\n"],"image":"python:3.9","resources":{"accelerator":{"resourceCount":"{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}","resourceType":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}"},"resourceCpuLimit":"{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}","resourceMemoryLimit":"{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}"}}' + - name: components-root + value: '{"dag":{"tasks":{"accelerator-limit":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-limit"},"taskInfo":{"name":"accelerator-limit"}},"accelerator-type":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-type"},"taskInfo":{"name":"accelerator-type"}},"cpu-limit":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-cpu-limit"},"taskInfo":{"name":"cpu-limit"}},"memory-limit":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-memory-limit"},"taskInfo":{"name":"memory-limit"}},"sum-numbers":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-sum-numbers"},"dependentTasks":["accelerator-limit","accelerator-type","cpu-limit","memory-limit"],"inputs":{"parameters":{"a":{"runtimeValue":{"constant":1}},"accelerator_count":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}"}},"accelerator_type":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}"}},"b":{"runtimeValue":{"constant":2}},"cpu_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}"}},"memory_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}"}},"pipelinechannel--accelerator-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-limit"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--cpu-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"cpu-limit"}},"pipelinechannel--memory-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"memory-limit"}}}},"taskInfo":{"name":"sum-numbers"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5ce77d07cd95a5f71e354d597878e3e0bc89527d3d81450cd270ed6183594740}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-limit"},"taskInfo":{"name":"accelerator-limit"}}' + - name: container + value: '{{workflow.parameters.implementations-5ce77d07cd95a5f71e354d597878e3e0bc89527d3d81450cd270ed6183594740}}' + - name: task-name + value: accelerator-limit + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: accelerator-limit-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.accelerator-limit-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.accelerator-limit-driver.outputs.parameters.cached-decision}}' + depends: accelerator-limit-driver.Succeeded + name: accelerator-limit + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-type"},"taskInfo":{"name":"accelerator-type"}}' + - name: container + value: '{{workflow.parameters.implementations-2212d778fdcf0682a268c7b100e165b811b818a3b68c0bfd48dd5681e999936b}}' + - name: task-name + value: accelerator-type + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: accelerator-type-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.accelerator-type-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.accelerator-type-driver.outputs.parameters.cached-decision}}' + depends: accelerator-type-driver.Succeeded + name: accelerator-type + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-a78005c8cf24945a5b595201f36dc4905feff000fc7a59dc0aabaa7c0dc1413b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-cpu-limit"},"taskInfo":{"name":"cpu-limit"}}' + - name: container + value: '{{workflow.parameters.implementations-a78005c8cf24945a5b595201f36dc4905feff000fc7a59dc0aabaa7c0dc1413b}}' + - name: task-name + value: cpu-limit + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: cpu-limit-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.cpu-limit-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.cpu-limit-driver.outputs.parameters.cached-decision}}' + depends: cpu-limit-driver.Succeeded + name: cpu-limit + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-a3d64b3ea9d79d1b1dc471faada5215bf21142914244bbfd26fa4aec93a232d7}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-memory-limit"},"taskInfo":{"name":"memory-limit"}}' + - name: container + value: '{{workflow.parameters.implementations-a3d64b3ea9d79d1b1dc471faada5215bf21142914244bbfd26fa4aec93a232d7}}' + - name: task-name + value: memory-limit + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: memory-limit-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.memory-limit-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.memory-limit-driver.outputs.parameters.cached-decision}}' + depends: memory-limit-driver.Succeeded + name: memory-limit + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0886dd8babaca613b2fca24c656568012544ec9eccff86938a7b3859deae5309}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-sum-numbers"},"dependentTasks":["accelerator-limit","accelerator-type","cpu-limit","memory-limit"],"inputs":{"parameters":{"a":{"runtimeValue":{"constant":1}},"accelerator_count":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}"}},"accelerator_type":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}"}},"b":{"runtimeValue":{"constant":2}},"cpu_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}"}},"memory_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}"}},"pipelinechannel--accelerator-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-limit"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--cpu-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"cpu-limit"}},"pipelinechannel--memory-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"memory-limit"}}}},"taskInfo":{"name":"sum-numbers"}}' + - name: container + value: '{{workflow.parameters.implementations-0886dd8babaca613b2fca24c656568012544ec9eccff86938a7b3859deae5309}}' + - name: task-name + value: sum-numbers + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: accelerator-limit.Succeeded && accelerator-type.Succeeded && cpu-limit.Succeeded + && memory-limit.Succeeded + name: sum-numbers-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.sum-numbers-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.sum-numbers-driver.outputs.parameters.cached-decision}}' + depends: sum-numbers-driver.Succeeded + name: sum-numbers + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_task_final_status.yaml b/test_data/compiled-workflows/pipeline_with_task_final_status.yaml new file mode 100644 index 00000000000..6a64d620627 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_task_final_status.yaml @@ -0,0 +1,496 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-task-final-status- +spec: + arguments: + parameters: + - name: components-ee01610b5099d6bfa9c7d87fa1c59efff070025700504b54cf163c39ef70648b + value: '{"executorLabel":"exec-fail-op","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-ee01610b5099d6bfa9c7d87fa1c59efff070025700504b54cf163c39ef70648b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","fail_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.1.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n print(message)\n sys.exit(1)\n\n"],"image":"python:3.9"}' + - name: components-e867d2e58d704ce4a73fa5b50e0068c7b54a23f058c93b8dc7d73a854b7786a2 + value: '{"executorLabel":"exec-print-op","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-e867d2e58d704ce4a73fa5b50e0068c7b54a23f058c93b8dc7d73a854b7786a2 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.1.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + - name: components-comp-exit-handler-1 + value: '{"dag":{"tasks":{"fail-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Task + failed."}}}},"taskInfo":{"name":"fail-op"}},"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--message":{"parameterType":"STRING"}}}}' + - name: components-0920327c9e00f705cfd5d7191535f63964abc5d4ca6fb04e69052c264d8ed9a4 + value: '{"executorLabel":"exec-exit-op","inputDefinitions":{"parameters":{"status":{"isOptional":true,"parameterType":"TASK_FINAL_STATUS"},"user_input":{"parameterType":"STRING"}}}}' + - name: implementations-0920327c9e00f705cfd5d7191535f63964abc5d4ca6fb04e69052c264d8ed9a4 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","exit_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.1.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + exit_op(user_input: str, status: PipelineTaskFinalStatus):\n \"\"\"Checks + pipeline run status.\"\"\"\n print(''Pipeline status: '', status.state)\n print(''Job + resource name: '', status.pipeline_job_resource_name)\n print(''Pipeline + task name: '', status.pipeline_task_name)\n print(''Error code: '', status.error_code)\n print(''Error + message: '', status.error_message)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"exit-handler-1":{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"my-pipeline"}},"exit-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-exit-op"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"status":{"taskFinalStatus":{"producerTask":"exit-handler-1"}},"user_input":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-op"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}}},"inputDefinitions":{"parameters":{"message":{"defaultValue":"Hello + World!","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-task-final-status + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ee01610b5099d6bfa9c7d87fa1c59efff070025700504b54cf163c39ef70648b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"runtimeValue":{"constant":"Task + failed."}}}},"taskInfo":{"name":"fail-op"}}' + - name: container + value: '{{workflow.parameters.implementations-ee01610b5099d6bfa9c7d87fa1c59efff070025700504b54cf163c39ef70648b}}' + - name: task-name + value: fail-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: fail-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.fail-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.fail-op-driver.outputs.parameters.cached-decision}}' + depends: fail-op-driver.Succeeded + name: fail-op + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e867d2e58d704ce4a73fa5b50e0068c7b54a23f058c93b8dc7d73a854b7786a2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"inputs":{"parameters":{"message":{"componentInputParameter":"pipelinechannel--message"}}},"taskInfo":{"name":"print-op"}}' + - name: container + value: '{{workflow.parameters.implementations-e867d2e58d704ce4a73fa5b50e0068c7b54a23f058c93b8dc7d73a854b7786a2}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-exit-handler-1 + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0920327c9e00f705cfd5d7191535f63964abc5d4ca6fb04e69052c264d8ed9a4}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-exit-op"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"status":{"taskFinalStatus":{"producerTask":"exit-handler-1"}},"user_input":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-op"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}' + - name: container + value: '{{workflow.parameters.implementations-0920327c9e00f705cfd5d7191535f63964abc5d4ca6fb04e69052c264d8ed9a4}}' + - name: task-name + value: exit-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: exit-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.exit-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.exit-op-driver.outputs.parameters.cached-decision}}' + depends: exit-op-driver.Succeeded + name: exit-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: exit-hook-root-exit-op + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-task-final-status + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-exit-handler-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"my-pipeline"}}' + - name: task-name + value: exit-handler-1 + name: exit-handler-1-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' + depends: exit-handler-1-driver.Succeeded + hooks: + exit: + arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + template: exit-hook-root-exit-op + name: exit-handler-1 + template: comp-exit-handler-1 + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"message":"Hello World!"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_task_using_ignore_upstream_failure.yaml b/test_data/compiled-workflows/pipeline_with_task_using_ignore_upstream_failure.yaml new file mode 100644 index 00000000000..61d589fc7b3 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_task_using_ignore_upstream_failure.yaml @@ -0,0 +1,422 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: my-pipeline- +spec: + arguments: + parameters: + - name: components-59806fd7437e0169671c5386035d1c9e16abc59cf47bf73a4a88c9c72f9b89c5 + value: '{"executorLabel":"exec-fail-op","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-59806fd7437e0169671c5386035d1c9e16abc59cf47bf73a4a88c9c72f9b89c5 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","fail_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + fail_op(message: str) -\u003e str:\n \"\"\"Fails.\"\"\"\n import sys\n print(message)\n sys.exit(1)\n return + message\n\n"],"image":"python:3.9"}' + - name: components-b206be17c679a17aaa7a27bfbdc4c9282eb4a6dae575e3758a3af57a18424138 + value: '{"executorLabel":"exec-print-op","inputDefinitions":{"parameters":{"message":{"defaultValue":"default","isOptional":true,"parameterType":"STRING"}}}}' + - name: implementations-b206be17c679a17aaa7a27bfbdc4c9282eb4a6dae575e3758a3af57a18424138 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","print_op"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + print_op(message: str = ''default''):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"fail-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"componentInputParameter":"sample_input"}}},"taskInfo":{"name":"fail-op"}},"print-op":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"dependentTasks":["fail-op"],"inputs":{"parameters":{"message":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"fail-op"}}}},"taskInfo":{"name":"print-op"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}}},"inputDefinitions":{"parameters":{"sample_input":{"defaultValue":"message","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - my-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-b206be17c679a17aaa7a27bfbdc4c9282eb4a6dae575e3758a3af57a18424138}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-print-op"},"dependentTasks":["fail-op"],"inputs":{"parameters":{"message":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"fail-op"}}}},"taskInfo":{"name":"print-op"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}' + - name: container + value: '{{workflow.parameters.implementations-b206be17c679a17aaa7a27bfbdc4c9282eb4a6dae575e3758a3af57a18424138}}' + - name: task-name + value: print-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: print-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.print-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + depends: print-op-driver.Succeeded + name: print-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: exit-hook-root-print-op + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-59806fd7437e0169671c5386035d1c9e16abc59cf47bf73a4a88c9c72f9b89c5}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-fail-op"},"inputs":{"parameters":{"message":{"componentInputParameter":"sample_input"}}},"taskInfo":{"name":"fail-op"}}' + - name: container + value: '{{workflow.parameters.implementations-59806fd7437e0169671c5386035d1c9e16abc59cf47bf73a4a88c9c72f9b89c5}}' + - name: task-name + value: fail-op + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: fail-op-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.fail-op-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.fail-op-driver.outputs.parameters.cached-decision}}' + depends: fail-op-driver.Succeeded + hooks: + exit: + arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + template: exit-hook-root-print-op + name: fail-op + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - my-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"sample_input":"message"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_utils.yaml b/test_data/compiled-workflows/pipeline_with_utils.yaml new file mode 100644 index 00000000000..d87780c76ac --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_utils.yaml @@ -0,0 +1,373 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-utils- +spec: + arguments: + parameters: + - name: components-257c3d2b996441cda9547359972f80c521a24efee58aff87d48282b55f7782bd + value: '{"executorLabel":"exec-echo","inputDefinitions":{"parameters":{"msg":{"parameterType":"STRING"}}}}' + - name: implementations-257c3d2b996441cda9547359972f80c521a24efee58aff87d48282b55f7782bd + value: '{"args":["--executor_input","{{$}}","--function_to_execute","echo"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + util_func(msg: str) -\u003e str:\n return f\"{msg} from util_func\"\n\n\ndef + util_func2(msg: str) -\u003e str:\n return f\"{msg} from util_func2\"\n\n\ndef + echo(msg: str):\n assert util_func(msg) == f\"{msg} from util_func\"\n assert + util_func2(msg) == f\"{msg} from util_func2\"\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"echo":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"echo"}}}},"inputDefinitions":{"parameters":{"msg":{"defaultValue":"Hello, + World!","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-utils + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-257c3d2b996441cda9547359972f80c521a24efee58aff87d48282b55f7782bd}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"inputs":{"parameters":{"msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"echo"}}' + - name: container + value: '{{workflow.parameters.implementations-257c3d2b996441cda9547359972f80c521a24efee58aff87d48282b55f7782bd}}' + - name: task-name + value: echo + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: echo-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.echo-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.echo-driver.outputs.parameters.cached-decision}}' + depends: echo-driver.Succeeded + name: echo + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-utils + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"msg":"Hello, World!"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_various_io_types.yaml b/test_data/compiled-workflows/pipeline_with_various_io_types.yaml new file mode 100644 index 00000000000..b8430845b74 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_various_io_types.yaml @@ -0,0 +1,389 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-various-types- +spec: + arguments: + parameters: + - name: components-47f8556d9d5209135f0475cec07f23896de64558ee7eb4299cc3ebffcad12306 + value: '{"executorLabel":"exec-downstream","inputDefinitions":{"artifacts":{"input_b":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}},"input_c":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"input_d":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}},"input_e":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"input_f":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"input_g":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"input_h":{"artifactType":{"schemaTitle":"system.HTML","schemaVersion":"0.0.1"}},"input_i":{"artifactType":{"schemaTitle":"google.BQMLModel","schemaVersion":"0.0.1"}}},"parameters":{"input_a":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-47f8556d9d5209135f0475cec07f23896de64558ee7eb4299cc3ebffcad12306 + value: '{"args":["{{$.inputs.parameters[''input_a'']}}","{{$.inputs.artifacts[''input_b''].uri}}","{{$.inputs.artifacts[''input_c''].path}}","{{$.inputs.artifacts[''input_d''].uri}}","{{$.inputs.artifacts[''input_e''].uri}}","{{$.inputs.artifacts[''input_f''].path}}","{{$.inputs.artifacts[''input_g''].path}}","{{$.inputs.artifacts[''input_h''].path}}"],"image":"gcr.io/image"}' + - name: components-3dd78b16c97f19cec3a05c91c163d2ef6f3cdee55ffd014e9fc805751d9f2ace + value: '{"executorLabel":"exec-upstream","inputDefinitions":{"artifacts":{"input_3":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}},"parameters":{"input_1":{"parameterType":"STRING"},"input_2":{"parameterType":"NUMBER_DOUBLE"},"input_4":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"output_2":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}},"output_3":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"output_4":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}},"output_5":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"output_6":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"output_7":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"output_8":{"artifactType":{"schemaTitle":"system.HTML","schemaVersion":"0.0.1"}},"output_9":{"artifactType":{"schemaTitle":"google.BQMLModel","schemaVersion":"0.0.1"}}},"parameters":{"output_1":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-3dd78b16c97f19cec3a05c91c163d2ef6f3cdee55ffd014e9fc805751d9f2ace + value: '{"args":["{{$.inputs.parameters[''input_1'']}}","{{$.inputs.parameters[''input_2'']}}","{{$.inputs.artifacts[''input_3''].path}}","{{$.inputs.parameters[''input_4'']}}","{{$.outputs.parameters[''output_1''].output_file}}","{{$.outputs.artifacts[''output_2''].uri}}","{{$.outputs.artifacts[''output_3''].path}}","{{$.outputs.artifacts[''output_4''].uri}}","{{$.outputs.artifacts[''output_5''].uri}}","{{$.outputs.artifacts[''output_6''].path}}","{{$.outputs.artifacts[''output_7''].path}}","{{$.outputs.artifacts[''output_8''].path}}"],"image":"gcr.io/image"}' + - name: components-root + value: '{"dag":{"tasks":{"downstream":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-downstream"},"dependentTasks":["upstream"],"inputs":{"artifacts":{"input_b":{"taskOutputArtifact":{"outputArtifactKey":"output_2","producerTask":"upstream"}},"input_c":{"taskOutputArtifact":{"outputArtifactKey":"output_3","producerTask":"upstream"}},"input_d":{"taskOutputArtifact":{"outputArtifactKey":"output_4","producerTask":"upstream"}},"input_e":{"taskOutputArtifact":{"outputArtifactKey":"output_5","producerTask":"upstream"}},"input_f":{"taskOutputArtifact":{"outputArtifactKey":"output_6","producerTask":"upstream"}},"input_g":{"taskOutputArtifact":{"outputArtifactKey":"output_7","producerTask":"upstream"}},"input_h":{"taskOutputArtifact":{"outputArtifactKey":"output_8","producerTask":"upstream"}},"input_i":{"taskOutputArtifact":{"outputArtifactKey":"output_9","producerTask":"upstream"}}},"parameters":{"input_a":{"taskOutputParameter":{"outputParameterKey":"output_1","producerTask":"upstream"}}}},"taskInfo":{"name":"downstream"}},"upstream":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-upstream"},"inputs":{"artifacts":{"input_3":{"componentInputArtifact":"input3"}},"parameters":{"input_1":{"componentInputParameter":"input1"},"input_2":{"runtimeValue":{"constant":3.1415926}},"input_4":{"componentInputParameter":"input4"}}},"taskInfo":{"name":"upstream"}}}},"inputDefinitions":{"artifacts":{"input3":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}},"parameters":{"input1":{"parameterType":"STRING"},"input4":{"defaultValue":"","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-various-types + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-47f8556d9d5209135f0475cec07f23896de64558ee7eb4299cc3ebffcad12306}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-downstream"},"dependentTasks":["upstream"],"inputs":{"artifacts":{"input_b":{"taskOutputArtifact":{"outputArtifactKey":"output_2","producerTask":"upstream"}},"input_c":{"taskOutputArtifact":{"outputArtifactKey":"output_3","producerTask":"upstream"}},"input_d":{"taskOutputArtifact":{"outputArtifactKey":"output_4","producerTask":"upstream"}},"input_e":{"taskOutputArtifact":{"outputArtifactKey":"output_5","producerTask":"upstream"}},"input_f":{"taskOutputArtifact":{"outputArtifactKey":"output_6","producerTask":"upstream"}},"input_g":{"taskOutputArtifact":{"outputArtifactKey":"output_7","producerTask":"upstream"}},"input_h":{"taskOutputArtifact":{"outputArtifactKey":"output_8","producerTask":"upstream"}},"input_i":{"taskOutputArtifact":{"outputArtifactKey":"output_9","producerTask":"upstream"}}},"parameters":{"input_a":{"taskOutputParameter":{"outputParameterKey":"output_1","producerTask":"upstream"}}}},"taskInfo":{"name":"downstream"}}' + - name: container + value: '{{workflow.parameters.implementations-47f8556d9d5209135f0475cec07f23896de64558ee7eb4299cc3ebffcad12306}}' + - name: task-name + value: downstream + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: upstream.Succeeded + name: downstream-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.downstream-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.downstream-driver.outputs.parameters.cached-decision}}' + depends: downstream-driver.Succeeded + name: downstream + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-3dd78b16c97f19cec3a05c91c163d2ef6f3cdee55ffd014e9fc805751d9f2ace}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-upstream"},"inputs":{"artifacts":{"input_3":{"componentInputArtifact":"input3"}},"parameters":{"input_1":{"componentInputParameter":"input1"},"input_2":{"runtimeValue":{"constant":3.1415926}},"input_4":{"componentInputParameter":"input4"}}},"taskInfo":{"name":"upstream"}}' + - name: container + value: '{{workflow.parameters.implementations-3dd78b16c97f19cec3a05c91c163d2ef6f3cdee55ffd014e9fc805751d9f2ace}}' + - name: task-name + value: upstream + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: upstream-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.upstream-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.upstream-driver.outputs.parameters.cached-decision}}' + depends: upstream-driver.Succeeded + name: upstream + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-various-types + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"input4":""}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_volume.yaml b/test_data/compiled-workflows/pipeline_with_volume.yaml new file mode 100644 index 00000000000..e725b6dedf0 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_volume.yaml @@ -0,0 +1,476 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-volume- +spec: + arguments: + parameters: + - name: kubernetes-comp-consumer + value: '{"pvcMount":[{"mountPath":"/data","pvcNameParameter":{"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}},"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}]}' + - name: components-522286ff8480712f50be32c25dedca8517bb73693bea29170288afdd11255e1d + value: '{"executorLabel":"exec-consumer","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-522286ff8480712f50be32c25dedca8517bb73693bea29170288afdd11255e1d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","consumer"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + consumer() -\u003e str:\n with open(''/data/file.txt'', ''r'') as file:\n content + = file.read()\n print(content)\n return content\n\n"],"image":"python:3.9"}' + - name: components-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767 + value: '{"executorLabel":"exec-createpvc","inputDefinitions":{"parameters":{"access_modes":{"description":"AccessModes + to request for the provisioned PVC. May\nbe one or more of ``''ReadWriteOnce''``, + ``''ReadOnlyMany''``, ``''ReadWriteMany''``, or\n``''ReadWriteOncePod''``. + Corresponds to `PersistentVolumeClaim.spec.accessModes \u003chttps://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes\u003e`_.","parameterType":"LIST"},"annotations":{"description":"Annotations + for the PVC''s metadata. Corresponds to `PersistentVolumeClaim.metadata.annotations + \u003chttps://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim\u003e`_.","isOptional":true,"parameterType":"STRUCT"},"pvc_name":{"description":"Name + of the PVC. Corresponds to `PersistentVolumeClaim.metadata.name \u003chttps://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim\u003e`_. + Only one of ``pvc_name`` and ``pvc_name_suffix`` can\nbe provided.","isOptional":true,"parameterType":"STRING"},"pvc_name_suffix":{"description":"Prefix + to use for a dynamically generated name, which\nwill take the form ``\u003cargo-workflow-name\u003e-\u003cpvc_name_suffix\u003e``. + Only one\nof ``pvc_name`` and ``pvc_name_suffix`` can be provided.","isOptional":true,"parameterType":"STRING"},"size":{"description":"The + size of storage requested by the PVC that will be provisioned. For example, + ``''5Gi''``. Corresponds to `PersistentVolumeClaim.spec.resources.requests.storage + \u003chttps://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec\u003e`_.","parameterType":"STRING"},"storage_class_name":{"defaultValue":"","description":"Name + of StorageClass from which to provision the PV\nto back the PVC. ``None`` + indicates to use the cluster''s default\nstorage_class_name. Set to ``''''`` + for a statically specified PVC.","isOptional":true,"parameterType":"STRING"},"volume_name":{"description":"Pre-existing + PersistentVolume that should back the\nprovisioned PersistentVolumeClaim. + Used for statically\nspecified PV only. Corresponds to `PersistentVolumeClaim.spec.volumeName + \u003chttps://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec\u003e`_.","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"name":{"parameterType":"STRING"}}}}' + - name: implementations-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767 + value: '{"image":"argostub/createpvc"}' + - name: components-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725 + value: '{"executorLabel":"exec-deletepvc","inputDefinitions":{"parameters":{"pvc_name":{"description":"Name + of the PVC to delete. Supports passing a runtime-generated name, such as a + name provided by ``kubernetes.CreatePvcOp().outputs[''name'']``.","parameterType":"STRING"}}}}' + - name: implementations-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725 + value: '{"image":"argostub/deletepvc"}' + - name: kubernetes-comp-producer + value: '{"pvcMount":[{"mountPath":"/data","pvcNameParameter":{"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}},"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}]}' + - name: components-c83a4f299993efdf1e9a883b7d05cb035219013b529f03866c0be2efd4da651d + value: '{"executorLabel":"exec-producer","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-c83a4f299993efdf1e9a883b7d05cb035219013b529f03866c0be2efd4da651d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","producer"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + producer() -\u003e str:\n with open(''/data/file.txt'', ''w'') as file:\n file.write(''Hello + world'')\n with open(''/data/file.txt'', ''r'') as file:\n content + = file.read()\n print(content)\n return content\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"consumer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-consumer"},"dependentTasks":["createpvc","producer"],"taskInfo":{"name":"consumer"}},"createpvc":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-createpvc"},"inputs":{"parameters":{"access_modes":{"runtimeValue":{"constant":["ReadWriteOnce"]}},"pvc_name_suffix":{"runtimeValue":{"constant":"-my-pvc"}},"size":{"runtimeValue":{"constant":"5Mi"}},"storage_class_name":{"runtimeValue":{"constant":"standard"}}}},"taskInfo":{"name":"createpvc"}},"deletepvc":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-deletepvc"},"dependentTasks":["consumer","createpvc"],"inputs":{"parameters":{"pvc_name":{"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}}},"taskInfo":{"name":"deletepvc"}},"producer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-producer"},"dependentTasks":["createpvc"],"taskInfo":{"name":"producer"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-volume + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-522286ff8480712f50be32c25dedca8517bb73693bea29170288afdd11255e1d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-consumer"},"dependentTasks":["createpvc","producer"],"taskInfo":{"name":"consumer"}}' + - name: container + value: '{{workflow.parameters.implementations-522286ff8480712f50be32c25dedca8517bb73693bea29170288afdd11255e1d}}' + - name: task-name + value: consumer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-consumer}}' + depends: createpvc.Succeeded && producer.Succeeded + name: consumer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.consumer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.consumer-driver.outputs.parameters.cached-decision}}' + depends: consumer-driver.Succeeded + name: consumer + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-createpvc"},"inputs":{"parameters":{"access_modes":{"runtimeValue":{"constant":["ReadWriteOnce"]}},"pvc_name_suffix":{"runtimeValue":{"constant":"-my-pvc"}},"size":{"runtimeValue":{"constant":"5Mi"}},"storage_class_name":{"runtimeValue":{"constant":"standard"}}}},"taskInfo":{"name":"createpvc"}}' + - name: container + value: '{{workflow.parameters.implementations-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767}}' + - name: task-name + value: createpvc + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: createpvc + template: system-container-driver + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-deletepvc"},"dependentTasks":["consumer","createpvc"],"inputs":{"parameters":{"pvc_name":{"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}}},"taskInfo":{"name":"deletepvc"}}' + - name: container + value: '{{workflow.parameters.implementations-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725}}' + - name: task-name + value: deletepvc + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: consumer.Succeeded && createpvc.Succeeded + name: deletepvc + template: system-container-driver + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c83a4f299993efdf1e9a883b7d05cb035219013b529f03866c0be2efd4da651d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-producer"},"dependentTasks":["createpvc"],"taskInfo":{"name":"producer"}}' + - name: container + value: '{{workflow.parameters.implementations-c83a4f299993efdf1e9a883b7d05cb035219013b529f03866c0be2efd4da651d}}' + - name: task-name + value: producer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-producer}}' + depends: createpvc.Succeeded + name: producer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.producer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.producer-driver.outputs.parameters.cached-decision}}' + depends: producer-driver.Succeeded + name: producer + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-volume + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_volume_no_cache.yaml b/test_data/compiled-workflows/pipeline_with_volume_no_cache.yaml new file mode 100644 index 00000000000..8fdb303a838 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_volume_no_cache.yaml @@ -0,0 +1,476 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-volume-no-cache- +spec: + arguments: + parameters: + - name: kubernetes-comp-consumer + value: '{"pvcMount":[{"mountPath":"/data","pvcNameParameter":{"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}},"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}]}' + - name: components-522286ff8480712f50be32c25dedca8517bb73693bea29170288afdd11255e1d + value: '{"executorLabel":"exec-consumer","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-522286ff8480712f50be32c25dedca8517bb73693bea29170288afdd11255e1d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","consumer"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + consumer() -\u003e str:\n with open(''/data/file.txt'', ''r'') as file:\n content + = file.read()\n print(content)\n return content\n\n"],"image":"python:3.9"}' + - name: components-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767 + value: '{"executorLabel":"exec-createpvc","inputDefinitions":{"parameters":{"access_modes":{"description":"AccessModes + to request for the provisioned PVC. May\nbe one or more of ``''ReadWriteOnce''``, + ``''ReadOnlyMany''``, ``''ReadWriteMany''``, or\n``''ReadWriteOncePod''``. + Corresponds to `PersistentVolumeClaim.spec.accessModes \u003chttps://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes\u003e`_.","parameterType":"LIST"},"annotations":{"description":"Annotations + for the PVC''s metadata. Corresponds to `PersistentVolumeClaim.metadata.annotations + \u003chttps://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim\u003e`_.","isOptional":true,"parameterType":"STRUCT"},"pvc_name":{"description":"Name + of the PVC. Corresponds to `PersistentVolumeClaim.metadata.name \u003chttps://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim\u003e`_. + Only one of ``pvc_name`` and ``pvc_name_suffix`` can\nbe provided.","isOptional":true,"parameterType":"STRING"},"pvc_name_suffix":{"description":"Prefix + to use for a dynamically generated name, which\nwill take the form ``\u003cargo-workflow-name\u003e-\u003cpvc_name_suffix\u003e``. + Only one\nof ``pvc_name`` and ``pvc_name_suffix`` can be provided.","isOptional":true,"parameterType":"STRING"},"size":{"description":"The + size of storage requested by the PVC that will be provisioned. For example, + ``''5Gi''``. Corresponds to `PersistentVolumeClaim.spec.resources.requests.storage + \u003chttps://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec\u003e`_.","parameterType":"STRING"},"storage_class_name":{"defaultValue":"","description":"Name + of StorageClass from which to provision the PV\nto back the PVC. ``None`` + indicates to use the cluster''s default\nstorage_class_name. Set to ``''''`` + for a statically specified PVC.","isOptional":true,"parameterType":"STRING"},"volume_name":{"description":"Pre-existing + PersistentVolume that should back the\nprovisioned PersistentVolumeClaim. + Used for statically\nspecified PV only. Corresponds to `PersistentVolumeClaim.spec.volumeName + \u003chttps://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec\u003e`_.","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"name":{"parameterType":"STRING"}}}}' + - name: implementations-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767 + value: '{"image":"argostub/createpvc"}' + - name: components-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725 + value: '{"executorLabel":"exec-deletepvc","inputDefinitions":{"parameters":{"pvc_name":{"description":"Name + of the PVC to delete. Supports passing a runtime-generated name, such as a + name provided by ``kubernetes.CreatePvcOp().outputs[''name'']``.","parameterType":"STRING"}}}}' + - name: implementations-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725 + value: '{"image":"argostub/deletepvc"}' + - name: kubernetes-comp-producer + value: '{"pvcMount":[{"mountPath":"/data","pvcNameParameter":{"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}},"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}]}' + - name: components-c83a4f299993efdf1e9a883b7d05cb035219013b529f03866c0be2efd4da651d + value: '{"executorLabel":"exec-producer","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-c83a4f299993efdf1e9a883b7d05cb035219013b529f03866c0be2efd4da651d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","producer"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + producer() -\u003e str:\n with open(''/data/file.txt'', ''w'') as file:\n file.write(''Hello + world'')\n with open(''/data/file.txt'', ''r'') as file:\n content + = file.read()\n print(content)\n return content\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"consumer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-consumer"},"dependentTasks":["createpvc","producer"],"taskInfo":{"name":"consumer"}},"createpvc":{"cachingOptions":{},"componentRef":{"name":"comp-createpvc"},"inputs":{"parameters":{"access_modes":{"runtimeValue":{"constant":["ReadWriteOnce"]}},"pvc_name_suffix":{"runtimeValue":{"constant":"-my-pvc"}},"size":{"runtimeValue":{"constant":"5Mi"}},"storage_class_name":{"runtimeValue":{"constant":"standard"}}}},"taskInfo":{"name":"createpvc"}},"deletepvc":{"cachingOptions":{},"componentRef":{"name":"comp-deletepvc"},"dependentTasks":["consumer","createpvc"],"inputs":{"parameters":{"pvc_name":{"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}}},"taskInfo":{"name":"deletepvc"}},"producer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-producer"},"dependentTasks":["createpvc"],"taskInfo":{"name":"producer"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-volume-no-cache + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-522286ff8480712f50be32c25dedca8517bb73693bea29170288afdd11255e1d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-consumer"},"dependentTasks":["createpvc","producer"],"taskInfo":{"name":"consumer"}}' + - name: container + value: '{{workflow.parameters.implementations-522286ff8480712f50be32c25dedca8517bb73693bea29170288afdd11255e1d}}' + - name: task-name + value: consumer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-consumer}}' + depends: createpvc.Succeeded && producer.Succeeded + name: consumer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.consumer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.consumer-driver.outputs.parameters.cached-decision}}' + depends: consumer-driver.Succeeded + name: consumer + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-createpvc"},"inputs":{"parameters":{"access_modes":{"runtimeValue":{"constant":["ReadWriteOnce"]}},"pvc_name_suffix":{"runtimeValue":{"constant":"-my-pvc"}},"size":{"runtimeValue":{"constant":"5Mi"}},"storage_class_name":{"runtimeValue":{"constant":"standard"}}}},"taskInfo":{"name":"createpvc"}}' + - name: container + value: '{{workflow.parameters.implementations-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767}}' + - name: task-name + value: createpvc + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: createpvc + template: system-container-driver + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-deletepvc"},"dependentTasks":["consumer","createpvc"],"inputs":{"parameters":{"pvc_name":{"taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}}},"taskInfo":{"name":"deletepvc"}}' + - name: container + value: '{{workflow.parameters.implementations-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725}}' + - name: task-name + value: deletepvc + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: consumer.Succeeded && createpvc.Succeeded + name: deletepvc + template: system-container-driver + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c83a4f299993efdf1e9a883b7d05cb035219013b529f03866c0be2efd4da651d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-producer"},"dependentTasks":["createpvc"],"taskInfo":{"name":"producer"}}' + - name: container + value: '{{workflow.parameters.implementations-c83a4f299993efdf1e9a883b7d05cb035219013b529f03866c0be2efd4da651d}}' + - name: task-name + value: producer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-producer}}' + depends: createpvc.Succeeded + name: producer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.producer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.producer-driver.outputs.parameters.cached-decision}}' + depends: producer-driver.Succeeded + name: producer + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-volume-no-cache + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pipeline_with_workspace.yaml b/test_data/compiled-workflows/pipeline_with_workspace.yaml new file mode 100644 index 00000000000..79d717e9601 --- /dev/null +++ b/test_data/compiled-workflows/pipeline_with_workspace.yaml @@ -0,0 +1,428 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pipeline-with-workspace- +spec: + arguments: + parameters: + - name: components-72f73bfd1c06ca202f77c1bc36f3e0e5fba88ef0bca17fb76a71700051b8ac63 + value: '{"executorLabel":"exec-read-from-workspace","inputDefinitions":{"parameters":{"file_path":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-72f73bfd1c06ca202f77c1bc36f3e0e5fba88ef0bca17fb76a71700051b8ac63 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","read_from_workspace"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + read_from_workspace(file_path: str) -\u003e str:\n \"\"\"Read a file from + the workspace using the provided file path.\"\"\" \n import os\n\n if + os.path.exists(file_path):\n with open(file_path, \"r\") as f:\n content + = f.read()\n print(f\"Read content from: {file_path}\")\n print(f\"Content: + {content}\")\n assert content == \"Hello from workspace!\"\n return + content\n else:\n print(f\"File not found at: {file_path}\")\n return + \"File not found\"\n\n"],"image":"python:3.9"}' + - name: components-64aebdd242571626248d5281bdedd9ce0fa8953ccf40f22c7d37ba51574f7e23 + value: '{"executorLabel":"exec-write-to-workspace","inputDefinitions":{"parameters":{"workspace_path":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-64aebdd242571626248d5281bdedd9ce0fa8953ccf40f22c7d37ba51574f7e23 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","write_to_workspace"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + write_to_workspace(workspace_path: str) -\u003e str:\n \"\"\"Write a file + to the workspace.\"\"\" \n import os\n\n # Create a file in the workspace\n file_path + = os.path.join(workspace_path, \"data\", \"test_file.txt\")\n os.makedirs(os.path.dirname(file_path), + exist_ok=True)\n\n with open(file_path, \"w\") as f:\n f.write(\"Hello + from workspace!\")\n\n print(f\"Wrote file to: {file_path}\")\n return + file_path\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"parameters":{"Output":{"valueFromParameter":{"outputParameterKey":"Output","producerSubtask":"read-from-workspace"}}}},"tasks":{"read-from-workspace":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-read-from-workspace"},"dependentTasks":["write-to-workspace"],"inputs":{"parameters":{"file_path":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"write-to-workspace"}}}},"taskInfo":{"name":"read-from-workspace"}},"write-to-workspace":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-write-to-workspace"},"inputs":{"parameters":{"workspace_path":{"runtimeValue":{"constant":"{{$.workspace_path}}"}}}},"taskInfo":{"name":"write-to-workspace"}}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pipeline-with-workspace + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-72f73bfd1c06ca202f77c1bc36f3e0e5fba88ef0bca17fb76a71700051b8ac63}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-read-from-workspace"},"dependentTasks":["write-to-workspace"],"inputs":{"parameters":{"file_path":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"write-to-workspace"}}}},"taskInfo":{"name":"read-from-workspace"}}' + - name: container + value: '{{workflow.parameters.implementations-72f73bfd1c06ca202f77c1bc36f3e0e5fba88ef0bca17fb76a71700051b8ac63}}' + - name: task-name + value: read-from-workspace + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: write-to-workspace.Succeeded + name: read-from-workspace-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.read-from-workspace-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.read-from-workspace-driver.outputs.parameters.cached-decision}}' + depends: read-from-workspace-driver.Succeeded + name: read-from-workspace + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-64aebdd242571626248d5281bdedd9ce0fa8953ccf40f22c7d37ba51574f7e23}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-write-to-workspace"},"inputs":{"parameters":{"workspace_path":{"runtimeValue":{"constant":"{{$.workspace_path}}"}}}},"taskInfo":{"name":"write-to-workspace"}}' + - name: container + value: '{{workflow.parameters.implementations-64aebdd242571626248d5281bdedd9ce0fa8953ccf40f22c7d37ba51574f7e23}}' + - name: task-name + value: write-to-workspace + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: write-to-workspace-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.write-to-workspace-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.write-to-workspace-driver.outputs.parameters.cached-decision}}' + depends: write-to-workspace-driver.Succeeded + name: write-to-workspace + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pipeline-with-workspace + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} + volumeClaimTemplates: + - metadata: + creationTimestamp: null + name: kfp-workspace + spec: + resources: + requests: + storage: 1Gi + storageClassName: standard + status: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/placeholder_with_if_placeholder_none_input_value.yaml b/test_data/compiled-workflows/placeholder_with_if_placeholder_none_input_value.yaml new file mode 100644 index 00000000000..5fdb5a3be1b --- /dev/null +++ b/test_data/compiled-workflows/placeholder_with_if_placeholder_none_input_value.yaml @@ -0,0 +1,363 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: one-step-pipeline-with-if-placeholder-supply-none- +spec: + arguments: + parameters: + - name: components-685cc8721e4d47ea4a7b1a8012621f5d72de4fa023d17bce3261c2c91f95ee2d + value: '{"executorLabel":"exec-component-with-optional-inputs","inputDefinitions":{"parameters":{"optional_input_1":{"isOptional":true,"parameterType":"STRING"},"optional_input_2":{"isOptional":true,"parameterType":"STRING"},"required_input":{"parameterType":"STRING"}}}}' + - name: implementations-685cc8721e4d47ea4a7b1a8012621f5d72de4fa023d17bce3261c2c91f95ee2d + value: '{"args":["--arg0","{{$.inputs.parameters[''required_input'']}}","{\"IfPresent\": + {\"InputName\": \"optional_input_1\", \"Then\": [\"--arg1\", \"{{$.inputs.parameters[''optional_input_1'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"optional_input_2\", \"Then\": [\"--arg2\", \"{{$.inputs.parameters[''optional_input_2'']}}\"], + \"Else\": [\"--arg2\", \"default value\"]}}"],"command":["echo"],"image":"ghcr.io/containerd/busybox"}' + - name: components-root + value: '{"dag":{"tasks":{"component-with-optional-inputs":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-with-optional-inputs"},"inputs":{"parameters":{"required_input":{"componentInputParameter":"input0"}}},"taskInfo":{"name":"component-with-optional-inputs"}}}},"inputDefinitions":{"parameters":{"input0":{"defaultValue":"input0","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - one-step-pipeline-with-if-placeholder-supply-none + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-685cc8721e4d47ea4a7b1a8012621f5d72de4fa023d17bce3261c2c91f95ee2d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component-with-optional-inputs"},"inputs":{"parameters":{"required_input":{"componentInputParameter":"input0"}}},"taskInfo":{"name":"component-with-optional-inputs"}}' + - name: container + value: '{{workflow.parameters.implementations-685cc8721e4d47ea4a7b1a8012621f5d72de4fa023d17bce3261c2c91f95ee2d}}' + - name: task-name + value: component-with-optional-inputs + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component-with-optional-inputs-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component-with-optional-inputs-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component-with-optional-inputs-driver.outputs.parameters.cached-decision}}' + depends: component-with-optional-inputs-driver.Succeeded + name: component-with-optional-inputs + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - one-step-pipeline-with-if-placeholder-supply-none + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"input0":"input0"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/preprocess.yaml b/test_data/compiled-workflows/preprocess.yaml new file mode 100644 index 00000000000..6222b7679ea --- /dev/null +++ b/test_data/compiled-workflows/preprocess.yaml @@ -0,0 +1,391 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: preprocess- +spec: + arguments: + parameters: + - name: components-58eb44fee654093cb441c465d8ce6209048c161178ed5ecd93354b97de8df48f + value: '{"executorLabel":"exec-preprocess","inputDefinitions":{"parameters":{"input_dict_parameter":{"parameterType":"STRUCT"},"input_list_parameter":{"parameterType":"LIST"},"message":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"output_dataset_one":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"output_dataset_two_path":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"output_bool_parameter_path":{"parameterType":"BOOLEAN"},"output_dict_parameter_path":{"parameterType":"STRUCT"},"output_list_parameter_path":{"parameterType":"LIST"},"output_parameter_path":{"parameterType":"STRING"}}}}' + - name: implementations-58eb44fee654093cb441c465d8ce6209048c161178ed5ecd93354b97de8df48f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","preprocess"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + preprocess(\n # An input parameter of type string.\n message: str,\n # + An input parameter of type dict.\n input_dict_parameter: Dict[str, int],\n # + An input parameter of type list.\n input_list_parameter: List[str],\n # + Use Output[T] to get a metadata-rich handle to the output artifact\n # + of type `Dataset`.\n output_dataset_one: Output[Dataset],\n # A locally + accessible filepath for another output artifact of type\n # `Dataset`.\n output_dataset_two_path: + OutputPath(''Dataset''),\n # A locally accessible filepath for an output + parameter of type string.\n output_parameter_path: OutputPath(str),\n # + A locally accessible filepath for an output parameter of type bool.\n output_bool_parameter_path: + OutputPath(bool),\n # A locally accessible filepath for an output parameter + of type dict.\n output_dict_parameter_path: OutputPath(Dict[str, int]),\n # + A locally accessible filepath for an output parameter of type list.\n output_list_parameter_path: + OutputPath(List[str]),\n):\n \"\"\"Dummy preprocessing step.\"\"\"\n\n # + Use Dataset.path to access a local file path for writing.\n # One can also + use Dataset.uri to access the actual URI file path.\n with open(output_dataset_one.path, + ''w'') as f:\n f.write(message)\n\n # OutputPath is used to just + pass the local file path of the output artifact\n # to the function.\n with + open(output_dataset_two_path, ''w'') as f:\n f.write(message)\n\n with + open(output_parameter_path, ''w'') as f:\n f.write(message)\n\n with + open(output_bool_parameter_path, ''w'') as f:\n f.write(\n str(True)) # + use either `str()` or `json.dumps()` for bool values.\n\n import json\n with + open(output_dict_parameter_path, ''w'') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with + open(output_list_parameter_path, ''w'') as f:\n f.write(json.dumps(input_list_parameter))\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"output_dataset_one":{"artifactSelectors":[{"outputArtifactKey":"output_dataset_one","producerSubtask":"preprocess"}]},"output_dataset_two_path":{"artifactSelectors":[{"outputArtifactKey":"output_dataset_two_path","producerSubtask":"preprocess"}]}},"parameters":{"output_bool_parameter_path":{"valueFromParameter":{"outputParameterKey":"output_bool_parameter_path","producerSubtask":"preprocess"}},"output_dict_parameter_path":{"valueFromParameter":{"outputParameterKey":"output_dict_parameter_path","producerSubtask":"preprocess"}},"output_list_parameter_path":{"valueFromParameter":{"outputParameterKey":"output_list_parameter_path","producerSubtask":"preprocess"}},"output_parameter_path":{"valueFromParameter":{"outputParameterKey":"output_parameter_path","producerSubtask":"preprocess"}}}},"tasks":{"preprocess":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-preprocess"},"inputs":{"parameters":{"input_dict_parameter":{"componentInputParameter":"input_dict_parameter"},"input_list_parameter":{"componentInputParameter":"input_list_parameter"},"message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"preprocess"}}}},"inputDefinitions":{"parameters":{"input_dict_parameter":{"parameterType":"STRUCT"},"input_list_parameter":{"parameterType":"LIST"},"message":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"output_dataset_one":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"output_dataset_two_path":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}},"parameters":{"output_bool_parameter_path":{"parameterType":"BOOLEAN"},"output_dict_parameter_path":{"parameterType":"STRUCT"},"output_list_parameter_path":{"parameterType":"LIST"},"output_parameter_path":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - preprocess + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-58eb44fee654093cb441c465d8ce6209048c161178ed5ecd93354b97de8df48f}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-preprocess"},"inputs":{"parameters":{"input_dict_parameter":{"componentInputParameter":"input_dict_parameter"},"input_list_parameter":{"componentInputParameter":"input_list_parameter"},"message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"preprocess"}}' + - name: container + value: '{{workflow.parameters.implementations-58eb44fee654093cb441c465d8ce6209048c161178ed5ecd93354b97de8df48f}}' + - name: task-name + value: preprocess + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: preprocess-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.preprocess-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.preprocess-driver.outputs.parameters.cached-decision}}' + depends: preprocess-driver.Succeeded + name: preprocess + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - preprocess + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/producer_consumer_param_pipeline.yaml b/test_data/compiled-workflows/producer_consumer_param_pipeline.yaml new file mode 100644 index 00000000000..799b89a8caa --- /dev/null +++ b/test_data/compiled-workflows/producer_consumer_param_pipeline.yaml @@ -0,0 +1,392 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: producer-consumer-param-pipeline- +spec: + arguments: + parameters: + - name: components-2de1cef765955c7d3216c1e98967e2fec098b8f68d6ee6836a324944ade35ee2 + value: '{"executorLabel":"exec-consumer","inputDefinitions":{"parameters":{"input_value":{"parameterType":"STRING"}}}}' + - name: implementations-2de1cef765955c7d3216c1e98967e2fec098b8f68d6ee6836a324944ade35ee2 + value: '{"command":["sh","-c","set -e -x\necho \"Read from an input parameter: + \" \u0026\u0026 echo \"$0\"\n","{{$.inputs.parameters[''input_value'']}}"],"image":"google/cloud-sdk:latest"}' + - name: components-3111b7872fff0cb1b5ac026c55d212223c3295efb99dee1efb0ab793c8883ca6 + value: '{"executorLabel":"exec-producer","inputDefinitions":{"parameters":{"input_text":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"output_value":{"parameterType":"STRING"}}}}' + - name: implementations-3111b7872fff0cb1b5ac026c55d212223c3295efb99dee1efb0ab793c8883ca6 + value: '{"command":["sh","-c","set -e -x\necho \"$0, this is an output parameter\" + | gsutil cp - \"$1\"\n","{{$.inputs.parameters[''input_text'']}}","{{$.outputs.parameters[''output_value''].output_file}}"],"image":"google/cloud-sdk:latest"}' + - name: components-root + value: '{"dag":{"tasks":{"consumer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-consumer"},"dependentTasks":["producer"],"inputs":{"parameters":{"input_value":{"taskOutputParameter":{"outputParameterKey":"output_value","producerTask":"producer"}}}},"taskInfo":{"name":"consumer"}},"producer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-producer"},"inputs":{"parameters":{"input_text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"producer"}}}},"inputDefinitions":{"parameters":{"text":{"defaultValue":"Hello + world","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - producer-consumer-param-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2de1cef765955c7d3216c1e98967e2fec098b8f68d6ee6836a324944ade35ee2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-consumer"},"dependentTasks":["producer"],"inputs":{"parameters":{"input_value":{"taskOutputParameter":{"outputParameterKey":"output_value","producerTask":"producer"}}}},"taskInfo":{"name":"consumer"}}' + - name: container + value: '{{workflow.parameters.implementations-2de1cef765955c7d3216c1e98967e2fec098b8f68d6ee6836a324944ade35ee2}}' + - name: task-name + value: consumer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: producer.Succeeded + name: consumer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.consumer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.consumer-driver.outputs.parameters.cached-decision}}' + depends: consumer-driver.Succeeded + name: consumer + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-3111b7872fff0cb1b5ac026c55d212223c3295efb99dee1efb0ab793c8883ca6}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-producer"},"inputs":{"parameters":{"input_text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"producer"}}' + - name: container + value: '{{workflow.parameters.implementations-3111b7872fff0cb1b5ac026c55d212223c3295efb99dee1efb0ab793c8883ca6}}' + - name: task-name + value: producer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: producer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.producer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.producer-driver.outputs.parameters.cached-decision}}' + depends: producer-driver.Succeeded + name: producer + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - producer-consumer-param-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"text":"Hello world"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pvc_mount.yaml b/test_data/compiled-workflows/pvc_mount.yaml new file mode 100644 index 00000000000..1ac16e1c60e --- /dev/null +++ b/test_data/compiled-workflows/pvc_mount.yaml @@ -0,0 +1,416 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pvc-mount-pipeline- +spec: + arguments: + parameters: + - name: kubernetes-comp-consumer + value: '{"pvcMount":[{"componentInputParameter":"pvc_name","mountPath":"/data","pvcNameParameter":{"componentInputParameter":"pvc_name"}}]}' + - name: components-dc9efe4dbc12a5d21a9be59b86f7103d95eaa794480a8f8d10d5199a0badc9d4 + value: '{"executorLabel":"exec-consumer"}' + - name: implementations-dc9efe4dbc12a5d21a9be59b86f7103d95eaa794480a8f8d10d5199a0badc9d4 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","consumer"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + consumer() -\u003e None:\n with open(''/data/file.txt'', ''r'') as f:\n print(f.read())\n\n"],"image":"python:3.9"}' + - name: kubernetes-comp-producer + value: '{"pvcMount":[{"componentInputParameter":"pvc_name","mountPath":"/data","pvcNameParameter":{"componentInputParameter":"pvc_name"}}]}' + - name: components-ad6bd24309f0be281c426daa392d4491a6d223e3a5a56f10dfb7ae7c162f9f26 + value: '{"executorLabel":"exec-producer","outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-ad6bd24309f0be281c426daa392d4491a6d223e3a5a56f10dfb7ae7c162f9f26 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","producer"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.2'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + producer() -\u003e str:\n with open(''/data/file.txt'', ''w'') as f:\n f.write(''hello'')\n with + open(''/data/file.txt'', ''r'') as f:\n return f.read()\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"consumer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-consumer"},"dependentTasks":["producer"],"taskInfo":{"name":"consumer"}},"producer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-producer"},"taskInfo":{"name":"producer"}}}},"inputDefinitions":{"parameters":{"pvc_name":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pvc-mount-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-dc9efe4dbc12a5d21a9be59b86f7103d95eaa794480a8f8d10d5199a0badc9d4}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-consumer"},"dependentTasks":["producer"],"taskInfo":{"name":"consumer"}}' + - name: container + value: '{{workflow.parameters.implementations-dc9efe4dbc12a5d21a9be59b86f7103d95eaa794480a8f8d10d5199a0badc9d4}}' + - name: task-name + value: consumer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-consumer}}' + depends: producer.Succeeded + name: consumer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.consumer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.consumer-driver.outputs.parameters.cached-decision}}' + depends: consumer-driver.Succeeded + name: consumer + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-ad6bd24309f0be281c426daa392d4491a6d223e3a5a56f10dfb7ae7c162f9f26}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-producer"},"taskInfo":{"name":"producer"}}' + - name: container + value: '{{workflow.parameters.implementations-ad6bd24309f0be281c426daa392d4491a6d223e3a5a56f10dfb7ae7c162f9f26}}' + - name: task-name + value: producer + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: kubernetes-config + value: '{{workflow.parameters.kubernetes-comp-producer}}' + name: producer-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.producer-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.producer-driver.outputs.parameters.cached-decision}}' + depends: producer-driver.Succeeded + name: producer + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pvc-mount-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pythonic_artifact_with_single_return.yaml b/test_data/compiled-workflows/pythonic_artifact_with_single_return.yaml new file mode 100644 index 00000000000..0e79131239c --- /dev/null +++ b/test_data/compiled-workflows/pythonic_artifact_with_single_return.yaml @@ -0,0 +1,454 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: make-language-model-pipeline- +spec: + arguments: + parameters: + - name: components-comp-importer + value: '{"executorLabel":"exec-importer","inputDefinitions":{"parameters":{"uri":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"artifact":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-comp-importer + value: '{"artifactUri":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"},"metadata":{"key":"value"},"typeSchema":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}' + - name: components-c8e04e29003554b439081c46cb32438ef1f27bc6030910c9c95c7df619a4084b + value: '{"executorLabel":"exec-make-language-model","inputDefinitions":{"artifacts":{"text_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}}}' + - name: implementations-c8e04e29003554b439081c46cb32438ef1f27bc6030910c9c95c7df619a4084b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","make_language_model"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''dill==0.3.7'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + make_language_model(text_dataset: Dataset) -\u003e Model:\n # dill allows + pickling objects belonging to a function''s local namespace\n import dill\n\n with + open(text_dataset.path) as f:\n text = f.read()\n\n # insert train + on text here #\n\n def dummy_model(x: str) -\u003e str:\n return + x\n\n model = Model(\n uri=dsl.get_uri(suffix=''model''),\n metadata={''data'': + text_dataset.name},\n )\n\n with open(model.path, ''wb'') as f:\n dill.dump(dummy_model, + f)\n\n return model\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"Output","producerSubtask":"make-language-model"}]}}},"tasks":{"importer":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}},"make-language-model":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-make-language-model"},"dependentTasks":["importer"],"inputs":{"artifacts":{"text_dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"make-language-model"}}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --executor_type + - importer + - --task_spec + - '{{inputs.parameters.task}}' + - --component_spec + - '{{inputs.parameters.component}}' + - --importer_spec + - '{{inputs.parameters.importer}}' + - --pipeline_name + - make-language-model-pipeline + - --run_id + - '{{workflow.uid}}' + - --parent_dag_id + - '{{inputs.parameters.parent-dag-id}}' + - --pod_name + - $(KFP_POD_NAME) + - --pod_uid + - $(KFP_POD_UID) + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - launcher-v2 + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: ghcr.io/kubeflow/kfp-launcher:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: task + - name: component + - name: importer + - name: parent-dag-id + metadata: {} + name: system-importer + outputs: {} + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - make-language-model-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}' + - name: component + value: '{{workflow.parameters.components-comp-importer}}' + - name: importer + value: '{{workflow.parameters.implementations-comp-importer}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: importer + template: system-importer + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c8e04e29003554b439081c46cb32438ef1f27bc6030910c9c95c7df619a4084b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-make-language-model"},"dependentTasks":["importer"],"inputs":{"artifacts":{"text_dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"make-language-model"}}' + - name: container + value: '{{workflow.parameters.implementations-c8e04e29003554b439081c46cb32438ef1f27bc6030910c9c95c7df619a4084b}}' + - name: task-name + value: make-language-model + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: importer.Succeeded + name: make-language-model-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.make-language-model-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.make-language-model-driver.outputs.parameters.cached-decision}}' + depends: make-language-model-driver.Succeeded + name: make-language-model + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - make-language-model-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pythonic_artifacts_test_pipeline.yaml b/test_data/compiled-workflows/pythonic_artifacts_test_pipeline.yaml new file mode 100644 index 00000000000..e8d0a887850 --- /dev/null +++ b/test_data/compiled-workflows/pythonic_artifacts_test_pipeline.yaml @@ -0,0 +1,413 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: pythonic-artifacts-test- +spec: + arguments: + parameters: + - name: components-f03144e0835f90a75b071337bbe5e305fd22a078d3fe93a595dab07e9c256d93 + value: '{"executorLabel":"exec-gen-data","outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-f03144e0835f90a75b071337bbe5e305fd22a078d3fe93a595dab07e9c256d93 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","gen_data"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + gen_data() -\u003e Dataset:\n dataset = Dataset(uri=dsl.get_uri())\n with + open(dataset.path, \"w\") as f:\n f.write(\"some data\")\n\n dataset.metadata[\"length\"] + = len(\"some data\")\n return dataset\n\n"],"image":"python:3.9"}' + - name: components-4271229df0e3741657d9f339d415cee57460bed8e7b47f53f0b87b0c71b90ea8 + value: '{"executorLabel":"exec-train-model","inputDefinitions":{"artifacts":{"dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Model","schemaVersion":"0.0.1"}}}}}' + - name: implementations-4271229df0e3741657d9f339d415cee57460bed8e7b47f53f0b87b0c71b90ea8 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","train_model"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + train_model(dataset: Dataset) -\u003e Model:\n with open(dataset.path) + as f:\n lines = f.read()\n\n assert lines == \"some data\"\n assert + dataset.metadata[\"length\"] == len(\"some data\")\n\n model_artifact = + Model(uri=dsl.get_uri(\"model\"))\n with open(model_artifact.path, \"w\") + as f:\n f.write(\"model trained\")\n\n return model_artifact\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"gen-data":{"cachingOptions":{},"componentRef":{"name":"comp-gen-data"},"taskInfo":{"name":"gen-data"}},"train-model":{"cachingOptions":{},"componentRef":{"name":"comp-train-model"},"dependentTasks":["gen-data"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"gen-data"}}}},"taskInfo":{"name":"train-model"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - pythonic-artifacts-test + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-f03144e0835f90a75b071337bbe5e305fd22a078d3fe93a595dab07e9c256d93}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-gen-data"},"taskInfo":{"name":"gen-data"}}' + - name: container + value: '{{workflow.parameters.implementations-f03144e0835f90a75b071337bbe5e305fd22a078d3fe93a595dab07e9c256d93}}' + - name: task-name + value: gen-data + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: gen-data-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.gen-data-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.gen-data-driver.outputs.parameters.cached-decision}}' + depends: gen-data-driver.Succeeded + name: gen-data + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4271229df0e3741657d9f339d415cee57460bed8e7b47f53f0b87b0c71b90ea8}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-train-model"},"dependentTasks":["gen-data"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"gen-data"}}}},"taskInfo":{"name":"train-model"}}' + - name: container + value: '{{workflow.parameters.implementations-4271229df0e3741657d9f339d415cee57460bed8e7b47f53f0b87b0c71b90ea8}}' + - name: task-name + value: train-model + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: gen-data.Succeeded + name: train-model-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.train-model-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.train-model-driver.outputs.parameters.cached-decision}}' + depends: train-model-driver.Succeeded + name: train-model + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - pythonic-artifacts-test + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pythonic_artifacts_with_list_of_artifacts.yaml b/test_data/compiled-workflows/pythonic_artifacts_with_list_of_artifacts.yaml new file mode 100644 index 00000000000..e886a835ebe --- /dev/null +++ b/test_data/compiled-workflows/pythonic_artifacts_with_list_of_artifacts.yaml @@ -0,0 +1,487 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: make-and-join-datasets- +spec: + arguments: + parameters: + - name: components-78078b3cb8d4867a5748fe50ca82fd5a6078e1d7d26a34603c77a1d8bc21c3cb + value: '{"executorLabel":"exec-make-dataset","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-78078b3cb8d4867a5748fe50ca82fd5a6078e1d7d26a34603c77a1d8bc21c3cb + value: '{"args":["--executor_input","{{$}}","--function_to_execute","make_dataset"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + make_dataset(text: str) -\u003e Dataset:\n dataset = Dataset(uri=dsl.get_uri(), + metadata={''length'': len(text)})\n with open(dataset.path, ''w'') as f:\n f.write(text)\n return + dataset\n\n"],"image":"python:3.9"}' + - name: components-comp-for-loop-1 + value: '{"dag":{"outputs":{"artifacts":{"pipelinechannel--make-dataset-Output":{"artifactSelectors":[{"outputArtifactKey":"Output","producerSubtask":"make-dataset"}]}}},"tasks":{"make-dataset":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-make-dataset"},"inputs":{"parameters":{"text":{"componentInputParameter":"pipelinechannel--texts-loop-item"}}},"taskInfo":{"name":"make-dataset"}}}},"inputDefinitions":{"parameters":{"pipelinechannel--texts":{"parameterType":"LIST"},"pipelinechannel--texts-loop-item":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"pipelinechannel--make-dataset-Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}}}' + - name: components-326818c936509d860f5df651a290f7146d2c51d6c6ecf522e82815b71bc56419 + value: '{"executorLabel":"exec-join-datasets","inputDefinitions":{"artifacts":{"datasets":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"},"isArtifactList":true}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-326818c936509d860f5df651a290f7146d2c51d6c6ecf522e82815b71bc56419 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","join_datasets"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + join_datasets(datasets: List[Dataset]) -\u003e Dataset:\n texts = []\n for + dataset in datasets:\n with open(dataset.path, ''r'') as f:\n texts.append(f.read())\n\n return + ''''.join(texts)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"Output","producerSubtask":"join-datasets"}]}}},"tasks":{"for-loop-1":{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--texts":{"componentInputParameter":"texts"}}},"parameterIterator":{"itemInput":"pipelinechannel--texts-loop-item","items":{"inputParameter":"pipelinechannel--texts"}},"taskInfo":{"name":"for-loop-1"}},"join-datasets":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-join-datasets"},"dependentTasks":["for-loop-1"],"inputs":{"artifacts":{"datasets":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--make-dataset-Output","producerTask":"for-loop-1"}}}},"taskInfo":{"name":"join-datasets"}}}},"inputDefinitions":{"parameters":{"texts":{"defaultValue":["Hello",","," + ","world!"],"isOptional":true,"parameterType":"LIST"}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - make-and-join-datasets + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-78078b3cb8d4867a5748fe50ca82fd5a6078e1d7d26a34603c77a1d8bc21c3cb}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-make-dataset"},"inputs":{"parameters":{"text":{"componentInputParameter":"pipelinechannel--texts-loop-item"}}},"taskInfo":{"name":"make-dataset"}}' + - name: container + value: '{{workflow.parameters.implementations-78078b3cb8d4867a5748fe50ca82fd5a6078e1d7d26a34603c77a1d8bc21c3cb}}' + - name: task-name + value: make-dataset + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: make-dataset-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.make-dataset-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.make-dataset-driver.outputs.parameters.cached-decision}}' + depends: make-dataset-driver.Succeeded + name: make-dataset + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1 + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - make-and-join-datasets + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--texts":{"componentInputParameter":"texts"}}},"parameterIterator":{"itemInput":"pipelinechannel--texts-loop-item","items":{"inputParameter":"pipelinechannel--texts"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-item-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' + depends: iteration-item-driver.Succeeded + name: iteration-item + template: comp-for-loop-1 + inputs: + parameters: + - name: parent-dag-id + - name: iteration-index + metadata: {} + name: comp-for-loop-1-iteration + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-for-loop-1}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--texts":{"componentInputParameter":"texts"}}},"parameterIterator":{"itemInput":"pipelinechannel--texts-loop-item","items":{"inputParameter":"pipelinechannel--texts"}},"taskInfo":{"name":"for-loop-1"}}' + name: iteration-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: iteration-index + value: '{{item}}' + depends: iteration-driver.Succeeded + name: iteration-iterations + template: comp-for-loop-1-iteration + withSequence: + count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-for-loop-1-for-loop-1-iterator + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: for-loop-1 + template: comp-for-loop-1-for-loop-1-iterator + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-326818c936509d860f5df651a290f7146d2c51d6c6ecf522e82815b71bc56419}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-join-datasets"},"dependentTasks":["for-loop-1"],"inputs":{"artifacts":{"datasets":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--make-dataset-Output","producerTask":"for-loop-1"}}}},"taskInfo":{"name":"join-datasets"}}' + - name: container + value: '{{workflow.parameters.implementations-326818c936509d860f5df651a290f7146d2c51d6c6ecf522e82815b71bc56419}}' + - name: task-name + value: join-datasets + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: for-loop-1.Succeeded + name: join-datasets-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.join-datasets-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.join-datasets-driver.outputs.parameters.cached-decision}}' + depends: join-datasets-driver.Succeeded + name: join-datasets + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"texts":["Hello",","," ","world!"]}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/pythonic_artifacts_with_multiple_returns.yaml b/test_data/compiled-workflows/pythonic_artifacts_with_multiple_returns.yaml new file mode 100644 index 00000000000..6443e68afe1 --- /dev/null +++ b/test_data/compiled-workflows/pythonic_artifacts_with_multiple_returns.yaml @@ -0,0 +1,448 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: split-datasets-and-return-first- +spec: + arguments: + parameters: + - name: components-86b07190b395429f683afe3cd56a6974f473d3ea998451027e7bfa62aa1be472 + value: '{"executorLabel":"exec-make-dataset","outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-86b07190b395429f683afe3cd56a6974f473d3ea998451027e7bfa62aa1be472 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","make_dataset"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + make_dataset() -\u003e Artifact:\n artifact = Artifact(uri=dsl.get_uri(''dataset''))\n with + open(artifact.path, ''w'') as f:\n f.write(''Hello, world'')\n return + artifact\n\n"],"image":"python:3.9"}' + - name: components-efab22a295fc4d308178d42f46828ee08d7a03921b278b3061e33b418da4a0d6 + value: '{"executorLabel":"exec-dataset-splitter","inputDefinitions":{"artifacts":{"in_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"dataset1":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"dataset2":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-efab22a295fc4d308178d42f46828ee08d7a03921b278b3061e33b418da4a0d6 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","dataset_splitter"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + dataset_splitter(\n in_dataset: Dataset\n) -\u003e NamedTuple(\n ''outputs'',\n dataset1=Dataset,\n dataset2=Dataset,\n):\n\n with + open(in_dataset.path) as f:\n in_data = f.read()\n\n out_data1, + out_data2 = in_data[:len(in_data) // 2], in_data[len(in_data) //\n 2:]\n\n dataset1 + = Dataset(\n uri=dsl.get_uri(suffix=''dataset1''),\n metadata={''original_data'': + in_dataset.name},\n )\n with open(dataset1.path, ''w'') as f:\n f.write(out_data1)\n\n dataset2 + = Dataset(\n uri=dsl.get_uri(suffix=''dataset2''),\n metadata={''original_data'': + in_dataset.name},\n )\n with open(dataset2.path, ''w'') as f:\n f.write(out_data2)\n\n outputs + = NamedTuple(\n ''outputs'',\n dataset1=Dataset,\n dataset2=Dataset,\n )\n return + outputs(dataset1=dataset1, dataset2=dataset2)\n\n"],"image":"python:3.9"}' + - name: components-comp-splitter-pipeline + value: '{"dag":{"outputs":{"artifacts":{"dataset1":{"artifactSelectors":[{"outputArtifactKey":"dataset1","producerSubtask":"dataset-splitter"}]},"dataset2":{"artifactSelectors":[{"outputArtifactKey":"dataset1","producerSubtask":"dataset-splitter"}]}}},"tasks":{"dataset-splitter":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-dataset-splitter"},"inputs":{"artifacts":{"in_dataset":{"componentInputArtifact":"in_dataset"}}},"taskInfo":{"name":"dataset-splitter"}}}},"inputDefinitions":{"artifacts":{"in_dataset":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"dataset1":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}},"dataset2":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: components-root + value: '{"dag":{"outputs":{"artifacts":{"Output":{"artifactSelectors":[{"outputArtifactKey":"dataset1","producerSubtask":"splitter-pipeline"}]}}},"tasks":{"make-dataset":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-make-dataset"},"taskInfo":{"name":"make-dataset"}},"splitter-pipeline":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-splitter-pipeline"},"dependentTasks":["make-dataset"],"inputs":{"artifacts":{"in_dataset":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"make-dataset"}}}},"taskInfo":{"name":"splitter-pipeline"}}}},"outputDefinitions":{"artifacts":{"Output":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - split-datasets-and-return-first + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-efab22a295fc4d308178d42f46828ee08d7a03921b278b3061e33b418da4a0d6}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-dataset-splitter"},"inputs":{"artifacts":{"in_dataset":{"componentInputArtifact":"in_dataset"}}},"taskInfo":{"name":"dataset-splitter"}}' + - name: container + value: '{{workflow.parameters.implementations-efab22a295fc4d308178d42f46828ee08d7a03921b278b3061e33b418da4a0d6}}' + - name: task-name + value: dataset-splitter + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: dataset-splitter-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.dataset-splitter-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.dataset-splitter-driver.outputs.parameters.cached-decision}}' + depends: dataset-splitter-driver.Succeeded + name: dataset-splitter + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: comp-splitter-pipeline + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - split-datasets-and-return-first + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-86b07190b395429f683afe3cd56a6974f473d3ea998451027e7bfa62aa1be472}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-make-dataset"},"taskInfo":{"name":"make-dataset"}}' + - name: container + value: '{{workflow.parameters.implementations-86b07190b395429f683afe3cd56a6974f473d3ea998451027e7bfa62aa1be472}}' + - name: task-name + value: make-dataset + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: make-dataset-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.make-dataset-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.make-dataset-driver.outputs.parameters.cached-decision}}' + depends: make-dataset-driver.Succeeded + name: make-dataset + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-comp-splitter-pipeline}}' + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-splitter-pipeline"},"dependentTasks":["make-dataset"],"inputs":{"artifacts":{"in_dataset":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"make-dataset"}}}},"taskInfo":{"name":"splitter-pipeline"}}' + - name: task-name + value: splitter-pipeline + depends: make-dataset.Succeeded + name: splitter-pipeline-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.splitter-pipeline-driver.outputs.parameters.execution-id}}' + - name: condition + value: '{{tasks.splitter-pipeline-driver.outputs.parameters.condition}}' + depends: splitter-pipeline-driver.Succeeded + name: splitter-pipeline + template: comp-splitter-pipeline + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/ray_integration_compiled.yaml b/test_data/compiled-workflows/ray_integration_compiled.yaml new file mode 100644 index 00000000000..2eb4a2144b8 --- /dev/null +++ b/test_data/compiled-workflows/ray_integration_compiled.yaml @@ -0,0 +1,383 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: ray-integration-test- +spec: + arguments: + parameters: + - name: components-e324569752b9b88e93853985e0f3840219baf7ccad191a166bd7e23ebcd3b5e4 + value: '{"executorLabel":"exec-ray-fn","outputDefinitions":{"parameters":{"Output":{"parameterType":"NUMBER_INTEGER"}}}}' + - name: implementations-e324569752b9b88e93853985e0f3840219baf7ccad191a166bd7e23ebcd3b5e4 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","ray_fn"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''codeflare-sdk==v0.28.1'' + \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf + \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true + python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + ray_fn() -\u003e int:\n import ray # noqa: PLC0415\n from codeflare_sdk + import generate_cert # noqa: PLC0415\n from codeflare_sdk.ray.cluster + import Cluster, ClusterConfiguration # noqa: PLC0415\n\n cluster = Cluster(\n ClusterConfiguration(\n name=\"raytest\",\n num_workers=1,\n head_cpu_requests=1,\n head_cpu_limits=1,\n head_memory_requests=4,\n head_memory_limits=4,\n worker_cpu_requests=1,\n worker_cpu_limits=1,\n worker_memory_requests=1,\n worker_memory_limits=2,\n image=\"quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707\",\n verify_tls=False\n )\n )\n\n # + always clean the resources\n cluster.down()\n print(cluster.status())\n cluster.up()\n cluster.wait_ready()\n print(cluster.status())\n print(cluster.details())\n\n ray_dashboard_uri + = cluster.cluster_dashboard_uri()\n ray_cluster_uri = cluster.cluster_uri()\n print(ray_dashboard_uri)\n print(ray_cluster_uri)\n\n # + before proceeding make sure the cluster exists and the uri is not empty\n assert + ray_cluster_uri, \"Ray cluster needs to be started and set before proceeding\"\n\n # + reset the ray context in case there''s already one.\n ray.shutdown()\n # + establish connection to ray cluster\n generate_cert.generate_tls_cert(cluster.config.name, + cluster.config.namespace)\n generate_cert.export_env(cluster.config.name, + cluster.config.namespace)\n ray.init(address=cluster.cluster_uri(), logging_level=\"DEBUG\")\n print(\"Ray + cluster is up and running: \", ray.is_initialized())\n\n @ray.remote\n def + train_fn():\n return 100\n\n result = ray.get(train_fn.remote())\n assert + 100 == result\n ray.shutdown()\n cluster.down()\n return result\n\n"],"image":"registry.redhat.io/ubi9/python-311@sha256:82a16d7c4da926081c0a4cc72a84d5ce37859b50a371d2f9364313f66b89adf7"}' + - name: components-root + value: '{"dag":{"tasks":{"ray-fn":{"cachingOptions":{},"componentRef":{"name":"comp-ray-fn"},"taskInfo":{"name":"ray-fn"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - ray-integration-test + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e324569752b9b88e93853985e0f3840219baf7ccad191a166bd7e23ebcd3b5e4}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-ray-fn"},"taskInfo":{"name":"ray-fn"}}' + - name: container + value: '{{workflow.parameters.implementations-e324569752b9b88e93853985e0f3840219baf7ccad191a166bd7e23ebcd3b5e4}}' + - name: task-name + value: ray-fn + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: ray-fn-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.ray-fn-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.ray-fn-driver.outputs.parameters.cached-decision}}' + depends: ray-fn-driver.Succeeded + name: ray-fn + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - ray-integration-test + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/ray_job_integration_compiled.yaml b/test_data/compiled-workflows/ray_job_integration_compiled.yaml new file mode 100644 index 00000000000..888b5a6f7e3 --- /dev/null +++ b/test_data/compiled-workflows/ray_job_integration_compiled.yaml @@ -0,0 +1,496 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: ray-integration-test- +spec: + arguments: + parameters: + - name: components-95a95a4327246937049bccce9f1ac13270fef5003171f5e78e231b5aab3aee6d + value: '{"executorLabel":"exec-ray-fn","inputDefinitions":{"parameters":{"AWS_ACCESS_KEY_ID":{"parameterType":"STRING"},"AWS_DEFAULT_ENDPOINT":{"parameterType":"STRING"},"AWS_SECRET_ACCESS_KEY":{"parameterType":"STRING"},"AWS_STORAGE_BUCKET":{"parameterType":"STRING"},"AWS_STORAGE_BUCKET_MNIST_DIR":{"parameterType":"STRING"}}}}' + - name: implementations-95a95a4327246937049bccce9f1ac13270fef5003171f5e78e231b5aab3aee6d + value: '{"args":["--executor_input","{{$}}","--function_to_execute","ray_fn"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''codeflare-sdk==v0.28.1'' + \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf + \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true + python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + ray_fn(\n AWS_DEFAULT_ENDPOINT: str,\n AWS_STORAGE_BUCKET: str,\n AWS_ACCESS_KEY_ID: + str,\n AWS_SECRET_ACCESS_KEY: str,\n AWS_STORAGE_BUCKET_MNIST_DIR: str\n) + -\u003e None:\n import openshift\n import subprocess\n import ray # + noqa: PLC0415\n import tempfile\n from codeflare_sdk import generate_cert # + noqa: PLC0415\n from codeflare_sdk.ray.cluster import Cluster, ClusterConfiguration # + noqa: PLC0415\n from codeflare_sdk.ray.client import RayJobClient\n from + time import sleep\n\n training_script = \"\"\"\nimport os\n\nimport torch\nimport + requests\nfrom pytorch_lightning import LightningModule, Trainer\nfrom pytorch_lightning.callbacks.progress + import TQDMProgressBar\nfrom torch import nn\nfrom torch.nn import functional + as F\nfrom torch.utils.data import DataLoader, random_split, RandomSampler\nfrom + torchmetrics import Accuracy\nfrom torchvision import transforms\nfrom torchvision.datasets + import MNIST\nimport gzip\nimport shutil\nfrom minio import Minio\n\nPATH_DATASETS + = os.environ.get(\"PATH_DATASETS\", \".\")\nBATCH_SIZE = 256 if torch.cuda.is_available() + else 64\n\nlocal_mnist_path = os.path.dirname(os.path.abspath(__file__))\n\nprint(\"prior + to running the trainer\")\nprint(\"MASTER_ADDR: is \", os.getenv(\"MASTER_ADDR\"))\nprint(\"MASTER_PORT: + is \", os.getenv(\"MASTER_PORT\"))\n\nSTORAGE_BUCKET_EXISTS = \"AWS_DEFAULT_ENDPOINT\" + in os.environ\nprint(\"STORAGE_BUCKET_EXISTS: \", STORAGE_BUCKET_EXISTS)\n\nprint(f''Storage_Bucket_Default_Endpoint + : is {os.environ.get(\"AWS_DEFAULT_ENDPOINT\")}'' if \"AWS_DEFAULT_ENDPOINT\" + in os.environ else \"\")\nprint(f''Storage_Bucket_Name : is {os.environ.get(\"AWS_STORAGE_BUCKET\")}'' + if \"AWS_STORAGE_BUCKET\" in os.environ else \"\")\nprint(f''Storage_Bucket_Mnist_Directory + : is {os.environ.get(\"AWS_STORAGE_BUCKET_MNIST_DIR\")}'' if \"AWS_STORAGE_BUCKET_MNIST_DIR\" + in os.environ else \"\")\n\n\nclass LitMNIST(LightningModule):\n def __init__(self, + data_dir=PATH_DATASETS, hidden_size=64, learning_rate=2e-4):\n super().__init__()\n\n # + Set our init args as class attributes\n self.data_dir = data_dir\n self.hidden_size + = hidden_size\n self.learning_rate = learning_rate\n\n # Hardcode + some dataset specific attributes\n self.num_classes = 10\n self.dims + = (1, 28, 28)\n channels, width, height = self.dims\n self.transform + = transforms.Compose(\n [\n transforms.ToTensor(),\n transforms.Normalize((0.1307,), + (0.3081,)),\n ]\n )\n\n # Define PyTorch model\n self.model + = nn.Sequential(\n nn.Flatten(),\n nn.Linear(channels + * width * height, hidden_size),\n nn.ReLU(),\n nn.Dropout(0.1),\n nn.Linear(hidden_size, + hidden_size),\n nn.ReLU(),\n nn.Dropout(0.1),\n nn.Linear(hidden_size, + self.num_classes),\n )\n\n self.val_accuracy = Accuracy(task=\"multiclass\", + num_classes=10)\n self.test_accuracy = Accuracy(task=\"multiclass\", + num_classes=10)\n\n def forward(self, x):\n x = self.model(x)\n return + F.log_softmax(x, dim=1)\n\n def training_step(self, batch, batch_idx):\n x, + y = batch\n logits = self(x)\n loss = F.nll_loss(logits, y)\n return + loss\n\n def validation_step(self, batch, batch_idx):\n x, y = batch\n logits + = self(x)\n loss = F.nll_loss(logits, y)\n preds = torch.argmax(logits, + dim=1)\n self.val_accuracy.update(preds, y)\n\n # Calling self.log + will surface up scalars for you in TensorBoard\n self.log(\"val_loss\", + loss, prog_bar=True)\n self.log(\"val_acc\", self.val_accuracy, prog_bar=True)\n\n def + test_step(self, batch, batch_idx):\n x, y = batch\n logits = + self(x)\n loss = F.nll_loss(logits, y)\n preds = torch.argmax(logits, + dim=1)\n self.test_accuracy.update(preds, y)\n\n # Calling self.log + will surface up scalars for you in TensorBoard\n self.log(\"test_loss\", + loss, prog_bar=True)\n self.log(\"test_acc\", self.test_accuracy, prog_bar=True)\n\n def + configure_optimizers(self):\n optimizer = torch.optim.Adam(self.parameters(), + lr=self.learning_rate)\n return optimizer\n\n ####################\n # + DATA RELATED HOOKS\n ####################\n\n def prepare_data(self):\n # + download\n print(\"Downloading MNIST dataset...\")\n\n if (\n STORAGE_BUCKET_EXISTS\n and + os.environ.get(\"AWS_DEFAULT_ENDPOINT\") != \"\"\n and os.environ.get(\"AWS_DEFAULT_ENDPOINT\") + != None\n ):\n print(\"Using storage bucket to download + datasets...\")\n\n dataset_dir = os.path.join(self.data_dir, \"MNIST/raw\")\n endpoint + = os.environ.get(\"AWS_DEFAULT_ENDPOINT\")\n access_key = os.environ.get(\"AWS_ACCESS_KEY_ID\")\n secret_key + = os.environ.get(\"AWS_SECRET_ACCESS_KEY\")\n bucket_name = os.environ.get(\"AWS_STORAGE_BUCKET\")\n\n # + remove prefix if specified in storage bucket endpoint url\n secure + = True\n if endpoint.startswith(\"https://\"):\n endpoint + = endpoint[len(\"https://\") :]\n elif endpoint.startswith(\"http://\"):\n endpoint + = endpoint[len(\"http://\") :]\n secure = False\n\n client + = Minio(\n endpoint,\n access_key=access_key,\n secret_key=secret_key,\n cert_check=False,\n secure=secure,\n )\n\n if + not os.path.exists(dataset_dir):\n os.makedirs(dataset_dir)\n else:\n print(f\"Directory + ''{dataset_dir}'' already exists\")\n\n # To download datasets + from storage bucket''s specific directory, use prefix to provide directory + name\n prefix = os.environ.get(\"AWS_STORAGE_BUCKET_MNIST_DIR\")\n # + download all files from prefix folder of storage bucket recursively\n for + item in client.list_objects(bucket_name, prefix=prefix, recursive=True):\n file_name + = item.object_name[len(prefix) + 1 :]\n dataset_file_path = + os.path.join(dataset_dir, file_name)\n if not os.path.exists(dataset_file_path):\n client.fget_object(bucket_name, + item.object_name, dataset_file_path)\n else:\n print(f\"File-path + ''{dataset_file_path}'' already exists\")\n # Unzip files\n with + gzip.open(dataset_file_path, \"rb\") as f_in:\n with open(dataset_file_path.split(\".\")[:-1][0], + \"wb\") as f_out:\n shutil.copyfileobj(f_in, f_out)\n # + delete zip file\n os.remove(dataset_file_path)\n unzipped_filepath + = dataset_file_path.split(\".\")[0]\n if os.path.exists(unzipped_filepath):\n print(\n f\"Unzipped + and saved dataset file to path - {unzipped_filepath}\"\n )\n download_datasets + = False\n\n else:\n print(\"Using default MNIST mirror reference + to download datasets...\")\n download_datasets = True\n\n MNIST(self.data_dir, + train=True, download=download_datasets)\n MNIST(self.data_dir, train=False, + download=download_datasets)\n\n def setup(self, stage=None):\n # + Assign train/val datasets for use in dataloaders\n if stage == \"fit\" + or stage is None:\n mnist_full = MNIST(\n self.data_dir, + train=True, transform=self.transform, download=False\n )\n self.mnist_train, + self.mnist_val = random_split(mnist_full, [55000, 5000])\n\n # Assign + test dataset for use in dataloader(s)\n if stage == \"test\" or stage + is None:\n self.mnist_test = MNIST(\n self.data_dir, + train=False, transform=self.transform, download=False\n )\n\n def + train_dataloader(self):\n return DataLoader(\n self.mnist_train,\n batch_size=BATCH_SIZE,\n sampler=RandomSampler(self.mnist_train, + num_samples=1000),\n )\n\n def val_dataloader(self):\n return + DataLoader(self.mnist_val, batch_size=BATCH_SIZE)\n\n def test_dataloader(self):\n return + DataLoader(self.mnist_test, batch_size=BATCH_SIZE)\n\n# Init DataLoader from + MNIST Dataset\n\nmodel = LitMNIST(data_dir=local_mnist_path)\n\nprint(\"GROUP: + \", int(os.environ.get(\"GROUP_WORLD_SIZE\", 1)))\nprint(\"LOCAL: \", int(os.environ.get(\"LOCAL_WORLD_SIZE\", + 1)))\n\n# Initialize a trainer\ntrainer = Trainer(\n # devices=1 if torch.cuda.is_available() + else None, # limiting got iPython runs\n max_epochs=3,\n callbacks=[TQDMProgressBar(refresh_rate=20)],\n num_nodes=int(os.environ.get(\"GROUP_WORLD_SIZE\", + 1)),\n devices=int(os.environ.get(\"LOCAL_WORLD_SIZE\", 1)),\n strategy=\"ddp\",\n)\n\n# + Train the model\ntrainer.fit(model)\n\"\"\"\n\n pip_requirements = \"\"\"\npytorch_lightning==2.4.0\ntorchmetrics==1.6.0\ntorchvision==0.20.1\nminio\n\"\"\"\n\n def + assert_job_completion(status):\n if status == \"SUCCEEDED\":\n print(f\"Job + has completed: ''{status}''\")\n assert True\n else:\n print(f\"Job + has completed: ''{status}''\")\n assert False\n\n def assert_jobsubmit_withlogin(cluster, + mnist_directory):\n with open(\"/run/secrets/kubernetes.io/serviceaccount/token\") + as token_file:\n auth_token = token_file.read()\n print(\"Auth + token: \" + auth_token)\n ray_dashboard = cluster.cluster_dashboard_uri()\n header + = {\"Authorization\": f\"Bearer {auth_token}\"}\n client = RayJobClient(address=ray_dashboard, + headers=header, verify=False)\n\n submission_id = client.submit_job(\n entrypoint=\"python + mnist.py\",\n runtime_env={\n \"working_dir\": mnist_directory,\n \"pip\": + mnist_directory + \"/mnist_pip_requirements.txt\",\n \"env_vars\": + {\n \"AWS_DEFAULT_ENDPOINT\": AWS_DEFAULT_ENDPOINT,\n \"AWS_STORAGE_BUCKET\": + AWS_STORAGE_BUCKET,\n \"AWS_ACCESS_KEY_ID\": AWS_ACCESS_KEY_ID,\n \"AWS_SECRET_ACCESS_KEY\": + AWS_SECRET_ACCESS_KEY,\n \"AWS_STORAGE_BUCKET_MNIST_DIR\": + AWS_STORAGE_BUCKET_MNIST_DIR\n },\n },\n entrypoint_num_cpus=1,\n )\n print(f\"Submitted + job with ID: {submission_id}\")\n done = False\n time = 0\n timeout + = 900\n while not done:\n status = client.get_job_status(submission_id)\n if + status.is_terminal():\n break\n if not done:\n print(status)\n if + timeout and time \u003e= timeout:\n raise TimeoutError(f\"job + has timed out after waiting {timeout}s\")\n sleep(5)\n time + += 5\n\n logs = client.get_job_logs(submission_id)\n print(logs)\n\n assert_job_completion(status)\n\n client.delete_job(submission_id)\n\n cluster.down()\n\n cluster + = Cluster(\n ClusterConfiguration(\n name=\"raytest\",\n num_workers=1,\n head_cpu_requests=1,\n head_cpu_limits=1,\n head_memory_requests=4,\n head_memory_limits=4,\n worker_cpu_requests=1,\n worker_cpu_limits=1,\n worker_memory_requests=1,\n worker_memory_limits=2,\n image=\"quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707\",\n verify_tls=False\n )\n )\n\n # + always clean the resources\n cluster.down()\n print(cluster.status())\n cluster.up()\n cluster.wait_ready()\n print(cluster.status())\n print(cluster.details())\n\n ray_dashboard_uri + = cluster.cluster_dashboard_uri()\n ray_cluster_uri = cluster.cluster_uri()\n print(ray_dashboard_uri)\n print(ray_cluster_uri)\n\n # + before proceeding make sure the cluster exists and the uri is not empty\n assert + ray_cluster_uri, \"Ray cluster needs to be started and set before proceeding\"\n assert + ray_dashboard_uri, \"Ray dashboard needs to be started and set before proceeding\"\n\n mnist_directory + = tempfile.mkdtemp(prefix=\"mnist-dir\")\n with open(mnist_directory + + \"/mnist.py\", \"w\") as mnist_file:\n mnist_file.write(training_script)\n with + open(mnist_directory + \"/mnist_pip_requirements.txt\", \"w\") as pip_requirements_file:\n pip_requirements_file.write(pip_requirements)\n\n assert_jobsubmit_withlogin(cluster, + mnist_directory)\n\n cluster.down()\n\n"],"image":"registry.redhat.io/ubi9/python-311@sha256:82a16d7c4da926081c0a4cc72a84d5ce37859b50a371d2f9364313f66b89adf7"}' + - name: components-root + value: '{"dag":{"tasks":{"ray-fn":{"cachingOptions":{},"componentRef":{"name":"comp-ray-fn"},"inputs":{"parameters":{"AWS_ACCESS_KEY_ID":{"componentInputParameter":"AWS_ACCESS_KEY_ID"},"AWS_DEFAULT_ENDPOINT":{"componentInputParameter":"AWS_DEFAULT_ENDPOINT"},"AWS_SECRET_ACCESS_KEY":{"componentInputParameter":"AWS_SECRET_ACCESS_KEY"},"AWS_STORAGE_BUCKET":{"componentInputParameter":"AWS_STORAGE_BUCKET"},"AWS_STORAGE_BUCKET_MNIST_DIR":{"componentInputParameter":"AWS_STORAGE_BUCKET_MNIST_DIR"}}},"taskInfo":{"name":"ray-fn"}}}},"inputDefinitions":{"parameters":{"AWS_ACCESS_KEY_ID":{"parameterType":"STRING"},"AWS_DEFAULT_ENDPOINT":{"parameterType":"STRING"},"AWS_SECRET_ACCESS_KEY":{"parameterType":"STRING"},"AWS_STORAGE_BUCKET":{"parameterType":"STRING"},"AWS_STORAGE_BUCKET_MNIST_DIR":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - ray-integration-test + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-95a95a4327246937049bccce9f1ac13270fef5003171f5e78e231b5aab3aee6d}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-ray-fn"},"inputs":{"parameters":{"AWS_ACCESS_KEY_ID":{"componentInputParameter":"AWS_ACCESS_KEY_ID"},"AWS_DEFAULT_ENDPOINT":{"componentInputParameter":"AWS_DEFAULT_ENDPOINT"},"AWS_SECRET_ACCESS_KEY":{"componentInputParameter":"AWS_SECRET_ACCESS_KEY"},"AWS_STORAGE_BUCKET":{"componentInputParameter":"AWS_STORAGE_BUCKET"},"AWS_STORAGE_BUCKET_MNIST_DIR":{"componentInputParameter":"AWS_STORAGE_BUCKET_MNIST_DIR"}}},"taskInfo":{"name":"ray-fn"}}' + - name: container + value: '{{workflow.parameters.implementations-95a95a4327246937049bccce9f1ac13270fef5003171f5e78e231b5aab3aee6d}}' + - name: task-name + value: ray-fn + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: ray-fn-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.ray-fn-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.ray-fn-driver.outputs.parameters.cached-decision}}' + depends: ray-fn-driver.Succeeded + name: ray-fn + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - ray-integration-test + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/sequential_v1.yaml b/test_data/compiled-workflows/sequential_v1.yaml new file mode 100644 index 00000000000..2dfe270f4fa --- /dev/null +++ b/test_data/compiled-workflows/sequential_v1.yaml @@ -0,0 +1,384 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: sequential- +spec: + arguments: + parameters: + - name: components-eaac86761cafbf4600ca777dc7022d3e2c469dd34f22ba18fcd4b75a384b615d + value: '{"executorLabel":"exec-echo","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-eaac86761cafbf4600ca777dc7022d3e2c469dd34f22ba18fcd4b75a384b615d + value: '{"args":["echo {{$.inputs.parameters[''message'']}}"],"command":["sh","-c"],"image":"library/bash"}' + - name: components-root + value: '{"dag":{"tasks":{"echo":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"inputs":{"parameters":{"message":{"componentInputParameter":"param1"}}},"taskInfo":{"name":"echo"}},"echo-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo-2"},"inputs":{"parameters":{"message":{"componentInputParameter":"param2"}}},"taskInfo":{"name":"echo-2"}}}},"inputDefinitions":{"parameters":{"param1":{"parameterType":"STRING"},"param2":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - sequential + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-eaac86761cafbf4600ca777dc7022d3e2c469dd34f22ba18fcd4b75a384b615d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"inputs":{"parameters":{"message":{"componentInputParameter":"param1"}}},"taskInfo":{"name":"echo"}}' + - name: container + value: '{{workflow.parameters.implementations-eaac86761cafbf4600ca777dc7022d3e2c469dd34f22ba18fcd4b75a384b615d}}' + - name: task-name + value: echo + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: echo-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.echo-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.echo-driver.outputs.parameters.cached-decision}}' + depends: echo-driver.Succeeded + name: echo + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-eaac86761cafbf4600ca777dc7022d3e2c469dd34f22ba18fcd4b75a384b615d}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo-2"},"inputs":{"parameters":{"message":{"componentInputParameter":"param2"}}},"taskInfo":{"name":"echo-2"}}' + - name: container + value: '{{workflow.parameters.implementations-eaac86761cafbf4600ca777dc7022d3e2c469dd34f22ba18fcd4b75a384b615d}}' + - name: task-name + value: echo-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: echo-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.echo-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.echo-2-driver.outputs.parameters.cached-decision}}' + depends: echo-2-driver.Succeeded + name: echo-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - sequential + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/sequential_v2.yaml b/test_data/compiled-workflows/sequential_v2.yaml new file mode 100644 index 00000000000..a4cd42db0f5 --- /dev/null +++ b/test_data/compiled-workflows/sequential_v2.yaml @@ -0,0 +1,389 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: sequential- +spec: + arguments: + parameters: + - name: components-b5daddb26b6451a9de844c9aa33f01e8c04af37d7ac3dcd260612d1979b6a08b + value: '{"executorLabel":"exec-download","inputDefinitions":{"parameters":{"url":{"parameterType":"STRING"}}},"outputDefinitions":{"parameters":{"downloaded":{"parameterType":"STRING"}}}}' + - name: implementations-b5daddb26b6451a9de844c9aa33f01e8c04af37d7ac3dcd260612d1979b6a08b + value: '{"args":["gsutil cp {{$.inputs.parameters[''url'']}} {{$.outputs.parameters[''downloaded''].output_file}}"],"command":["sh","-c"],"image":"google/cloud-sdk"}' + - name: components-db4626120aacf0a2094a96782ad9a93dcdc01de0abd3898f2ec96001765c7d47 + value: '{"executorLabel":"exec-echo","inputDefinitions":{"parameters":{"downloaded":{"parameterType":"STRING"}}}}' + - name: implementations-db4626120aacf0a2094a96782ad9a93dcdc01de0abd3898f2ec96001765c7d47 + value: '{"args":["echo {{$.inputs.parameters[''downloaded'']}}"],"command":["sh","-c"],"image":"library/bash"}' + - name: components-root + value: '{"dag":{"tasks":{"download":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-download"},"inputs":{"parameters":{"url":{"componentInputParameter":"url"}}},"taskInfo":{"name":"download"}},"echo":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"dependentTasks":["download"],"inputs":{"parameters":{"downloaded":{"taskOutputParameter":{"outputParameterKey":"downloaded","producerTask":"download"}}}},"taskInfo":{"name":"echo"}}}},"inputDefinitions":{"parameters":{"url":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - sequential + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-b5daddb26b6451a9de844c9aa33f01e8c04af37d7ac3dcd260612d1979b6a08b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-download"},"inputs":{"parameters":{"url":{"componentInputParameter":"url"}}},"taskInfo":{"name":"download"}}' + - name: container + value: '{{workflow.parameters.implementations-b5daddb26b6451a9de844c9aa33f01e8c04af37d7ac3dcd260612d1979b6a08b}}' + - name: task-name + value: download + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: download-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.download-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.download-driver.outputs.parameters.cached-decision}}' + depends: download-driver.Succeeded + name: download + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-db4626120aacf0a2094a96782ad9a93dcdc01de0abd3898f2ec96001765c7d47}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-echo"},"dependentTasks":["download"],"inputs":{"parameters":{"downloaded":{"taskOutputParameter":{"outputParameterKey":"downloaded","producerTask":"download"}}}},"taskInfo":{"name":"echo"}}' + - name: container + value: '{{workflow.parameters.implementations-db4626120aacf0a2094a96782ad9a93dcdc01de0abd3898f2ec96001765c7d47}}' + - name: task-name + value: echo + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: download.Succeeded + name: echo-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.echo-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.echo-driver.outputs.parameters.cached-decision}}' + depends: echo-driver.Succeeded + name: echo + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - sequential + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/take_nap_compiled.yaml b/test_data/compiled-workflows/take_nap_compiled.yaml new file mode 100644 index 00000000000..a9a6d2c3096 --- /dev/null +++ b/test_data/compiled-workflows/take_nap_compiled.yaml @@ -0,0 +1,410 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: take-nap-pipeline- +spec: + arguments: + parameters: + - name: components-8cf797f23380487bd1f1fb96644cbde025b64d18dae83a7b814083646a23230b + value: '{"executorLabel":"exec-take-nap","inputDefinitions":{"parameters":{"naptime_secs":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-8cf797f23380487bd1f1fb96644cbde025b64d18dae83a7b814083646a23230b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","take_nap"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + take_nap(naptime_secs: int) -\u003e str:\n \"\"\"Sleeps for secs\"\"\"\n from + time import sleep # noqa: PLC0415\n\n print(f\"Sleeping for {naptime_secs} + seconds: Zzzzzz ...\")\n sleep(naptime_secs)\n return \"I''m awake now. + Did I snore?\"\n\n"],"image":"python:3.9"}' + - name: components-5d8466fde111ff505f28c4c8a093c971b044637d9e786d6aa26ff7e24346b224 + value: '{"executorLabel":"exec-wake-up","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-5d8466fde111ff505f28c4c8a093c971b044637d9e786d6aa26ff7e24346b224 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","wake_up"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + wake_up(message: str):\n \"\"\"Wakes up from nap printing a message\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"take-nap":{"cachingOptions":{},"componentRef":{"name":"comp-take-nap"},"inputs":{"parameters":{"naptime_secs":{"componentInputParameter":"naptime_secs"}}},"taskInfo":{"name":"take-nap"}},"wake-up":{"cachingOptions":{},"componentRef":{"name":"comp-wake-up"},"dependentTasks":["take-nap"],"inputs":{"parameters":{"message":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"take-nap"}}}},"taskInfo":{"name":"wake-up"}}}},"inputDefinitions":{"parameters":{"naptime_secs":{"defaultValue":900,"isOptional":true,"parameterType":"NUMBER_INTEGER"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - take-nap-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-8cf797f23380487bd1f1fb96644cbde025b64d18dae83a7b814083646a23230b}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-take-nap"},"inputs":{"parameters":{"naptime_secs":{"componentInputParameter":"naptime_secs"}}},"taskInfo":{"name":"take-nap"}}' + - name: container + value: '{{workflow.parameters.implementations-8cf797f23380487bd1f1fb96644cbde025b64d18dae83a7b814083646a23230b}}' + - name: task-name + value: take-nap + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: take-nap-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.take-nap-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.take-nap-driver.outputs.parameters.cached-decision}}' + depends: take-nap-driver.Succeeded + name: take-nap + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5d8466fde111ff505f28c4c8a093c971b044637d9e786d6aa26ff7e24346b224}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-wake-up"},"dependentTasks":["take-nap"],"inputs":{"parameters":{"message":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"take-nap"}}}},"taskInfo":{"name":"wake-up"}}' + - name: container + value: '{{workflow.parameters.implementations-5d8466fde111ff505f28c4c8a093c971b044637d9e786d6aa26ff7e24346b224}}' + - name: task-name + value: wake-up + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: take-nap.Succeeded + name: wake-up-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.wake-up-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.wake-up-driver.outputs.parameters.cached-decision}}' + depends: wake-up-driver.Succeeded + name: wake-up + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - take-nap-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"naptime_secs":900}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/take_nap_pipeline_root_compiled.yaml b/test_data/compiled-workflows/take_nap_pipeline_root_compiled.yaml new file mode 100644 index 00000000000..a9a6d2c3096 --- /dev/null +++ b/test_data/compiled-workflows/take_nap_pipeline_root_compiled.yaml @@ -0,0 +1,410 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: take-nap-pipeline- +spec: + arguments: + parameters: + - name: components-8cf797f23380487bd1f1fb96644cbde025b64d18dae83a7b814083646a23230b + value: '{"executorLabel":"exec-take-nap","inputDefinitions":{"parameters":{"naptime_secs":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"parameters":{"Output":{"parameterType":"STRING"}}}}' + - name: implementations-8cf797f23380487bd1f1fb96644cbde025b64d18dae83a7b814083646a23230b + value: '{"args":["--executor_input","{{$}}","--function_to_execute","take_nap"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + take_nap(naptime_secs: int) -\u003e str:\n \"\"\"Sleeps for secs\"\"\"\n from + time import sleep # noqa: PLC0415\n\n print(f\"Sleeping for {naptime_secs} + seconds: Zzzzzz ...\")\n sleep(naptime_secs)\n return \"I''m awake now. + Did I snore?\"\n\n"],"image":"python:3.9"}' + - name: components-5d8466fde111ff505f28c4c8a093c971b044637d9e786d6aa26ff7e24346b224 + value: '{"executorLabel":"exec-wake-up","inputDefinitions":{"parameters":{"message":{"parameterType":"STRING"}}}}' + - name: implementations-5d8466fde111ff505f28c4c8a093c971b044637d9e786d6aa26ff7e24346b224 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","wake_up"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + wake_up(message: str):\n \"\"\"Wakes up from nap printing a message\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"take-nap":{"cachingOptions":{},"componentRef":{"name":"comp-take-nap"},"inputs":{"parameters":{"naptime_secs":{"componentInputParameter":"naptime_secs"}}},"taskInfo":{"name":"take-nap"}},"wake-up":{"cachingOptions":{},"componentRef":{"name":"comp-wake-up"},"dependentTasks":["take-nap"],"inputs":{"parameters":{"message":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"take-nap"}}}},"taskInfo":{"name":"wake-up"}}}},"inputDefinitions":{"parameters":{"naptime_secs":{"defaultValue":900,"isOptional":true,"parameterType":"NUMBER_INTEGER"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - take-nap-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-8cf797f23380487bd1f1fb96644cbde025b64d18dae83a7b814083646a23230b}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-take-nap"},"inputs":{"parameters":{"naptime_secs":{"componentInputParameter":"naptime_secs"}}},"taskInfo":{"name":"take-nap"}}' + - name: container + value: '{{workflow.parameters.implementations-8cf797f23380487bd1f1fb96644cbde025b64d18dae83a7b814083646a23230b}}' + - name: task-name + value: take-nap + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: take-nap-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.take-nap-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.take-nap-driver.outputs.parameters.cached-decision}}' + depends: take-nap-driver.Succeeded + name: take-nap + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-5d8466fde111ff505f28c4c8a093c971b044637d9e786d6aa26ff7e24346b224}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-wake-up"},"dependentTasks":["take-nap"],"inputs":{"parameters":{"message":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"take-nap"}}}},"taskInfo":{"name":"wake-up"}}' + - name: container + value: '{{workflow.parameters.implementations-5d8466fde111ff505f28c4c8a093c971b044637d9e786d6aa26ff7e24346b224}}' + - name: task-name + value: wake-up + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: take-nap.Succeeded + name: wake-up-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.wake-up-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.wake-up-driver.outputs.parameters.cached-decision}}' + depends: wake-up-driver.Succeeded + name: wake-up + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - take-nap-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"naptime_secs":900}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/two_step_pipeline.yaml b/test_data/compiled-workflows/two_step_pipeline.yaml new file mode 100644 index 00000000000..3cfb1820124 --- /dev/null +++ b/test_data/compiled-workflows/two_step_pipeline.yaml @@ -0,0 +1,390 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: simple-two-step-pipeline- +spec: + arguments: + parameters: + - name: components-27a1ed0385660429e07f843557e4216aa3a45bcf5cf5dc468af6aab6e63d1c5e + value: '{"executorLabel":"exec-read-from-gcs","inputDefinitions":{"artifacts":{"input_gcs_path":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-27a1ed0385660429e07f843557e4216aa3a45bcf5cf5dc468af6aab6e63d1c5e + value: '{"command":["sh","-c","set -e -x\ngsutil cat \"$0\"\n","{{$.inputs.artifacts[''input_gcs_path''].uri}}"],"image":"google/cloud-sdk:slim"}' + - name: components-b290238f9f93331c1d7d9f765948cdc6af4420f14f5ebd4ee26539c3055a1493 + value: '{"executorLabel":"exec-write-to-gcs","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"output_gcs_path":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-b290238f9f93331c1d7d9f765948cdc6af4420f14f5ebd4ee26539c3055a1493 + value: '{"command":["sh","-c","set -e -x\necho \"$0\" | gsutil cp - \"$1\"\n","{{$.inputs.parameters[''text'']}}","{{$.outputs.artifacts[''output_gcs_path''].uri}}"],"image":"google/cloud-sdk:slim"}' + - name: components-root + value: '{"dag":{"tasks":{"read-from-gcs":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-read-from-gcs"},"dependentTasks":["write-to-gcs"],"inputs":{"artifacts":{"input_gcs_path":{"taskOutputArtifact":{"outputArtifactKey":"output_gcs_path","producerTask":"write-to-gcs"}}}},"taskInfo":{"name":"Consumer"}},"write-to-gcs":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-write-to-gcs"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"Producer"}}}},"inputDefinitions":{"parameters":{"text":{"defaultValue":"Hello + KFP!","isOptional":true,"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - simple-two-step-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-27a1ed0385660429e07f843557e4216aa3a45bcf5cf5dc468af6aab6e63d1c5e}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-read-from-gcs"},"dependentTasks":["write-to-gcs"],"inputs":{"artifacts":{"input_gcs_path":{"taskOutputArtifact":{"outputArtifactKey":"output_gcs_path","producerTask":"write-to-gcs"}}}},"taskInfo":{"name":"Consumer"}}' + - name: container + value: '{{workflow.parameters.implementations-27a1ed0385660429e07f843557e4216aa3a45bcf5cf5dc468af6aab6e63d1c5e}}' + - name: task-name + value: read-from-gcs + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: write-to-gcs.Succeeded + name: read-from-gcs-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.read-from-gcs-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.read-from-gcs-driver.outputs.parameters.cached-decision}}' + depends: read-from-gcs-driver.Succeeded + name: read-from-gcs + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-b290238f9f93331c1d7d9f765948cdc6af4420f14f5ebd4ee26539c3055a1493}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-write-to-gcs"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"Producer"}}' + - name: container + value: '{{workflow.parameters.implementations-b290238f9f93331c1d7d9f765948cdc6af4420f14f5ebd4ee26539c3055a1493}}' + - name: task-name + value: write-to-gcs + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: write-to-gcs-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.write-to-gcs-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.write-to-gcs-driver.outputs.parameters.cached-decision}}' + depends: write-to-gcs-driver.Succeeded + name: write-to-gcs + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - simple-two-step-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"text":"Hello KFP!"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/two_step_pipeline_containerized.yaml b/test_data/compiled-workflows/two_step_pipeline_containerized.yaml new file mode 100644 index 00000000000..d8418a6fe79 --- /dev/null +++ b/test_data/compiled-workflows/two_step_pipeline_containerized.yaml @@ -0,0 +1,391 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: containerized-two-step-pipeline- +spec: + arguments: + parameters: + - name: components-475bac6023d1f786e45ff58ef0f9941e93c80c64a148e28d3506f4a41ec6e102 + value: '{"executorLabel":"exec-component1","inputDefinitions":{"parameters":{"text":{"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"output_gcs":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-475bac6023d1f786e45ff58ef0f9941e93c80c64a148e28d3506f4a41ec6e102 + value: '{"args":["{{$.inputs.parameters[''text'']}}","{{$.outputs.artifacts[''output_gcs''].path}}"],"command":["sh","-c","mkdir + --parents $(dirname \"$1\") \u0026\u0026 echo \"$0\" \u003e \"$1\""],"image":"alpine"}' + - name: components-a251b98b01e63019fbe86cccecda6133320b517f80c2594d7119857a86df7195 + value: '{"executorLabel":"exec-component2","inputDefinitions":{"artifacts":{"input_gcs":{"artifactType":{"schemaTitle":"system.Dataset","schemaVersion":"0.0.1"}}}}}' + - name: implementations-a251b98b01e63019fbe86cccecda6133320b517f80c2594d7119857a86df7195 + value: '{"args":["{{$.inputs.artifacts[''input_gcs''].path}}"],"command":["cat"],"image":"alpine"}' + - name: components-root + value: '{"dag":{"tasks":{"component1":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component1"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"component1"}},"component2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component2"},"dependentTasks":["component1"],"inputs":{"artifacts":{"input_gcs":{"taskOutputArtifact":{"outputArtifactKey":"output_gcs","producerTask":"component1"}}}},"taskInfo":{"name":"component2"}}}},"inputDefinitions":{"parameters":{"text":{"defaultValue":"Hello + KFP Containerized!","parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - containerized-two-step-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-475bac6023d1f786e45ff58ef0f9941e93c80c64a148e28d3506f4a41ec6e102}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component1"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"component1"}}' + - name: container + value: '{{workflow.parameters.implementations-475bac6023d1f786e45ff58ef0f9941e93c80c64a148e28d3506f4a41ec6e102}}' + - name: task-name + value: component1 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: component1-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component1-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component1-driver.outputs.parameters.cached-decision}}' + depends: component1-driver.Succeeded + name: component1 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-a251b98b01e63019fbe86cccecda6133320b517f80c2594d7119857a86df7195}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-component2"},"dependentTasks":["component1"],"inputs":{"artifacts":{"input_gcs":{"taskOutputArtifact":{"outputArtifactKey":"output_gcs","producerTask":"component1"}}}},"taskInfo":{"name":"component2"}}' + - name: container + value: '{{workflow.parameters.implementations-a251b98b01e63019fbe86cccecda6133320b517f80c2594d7119857a86df7195}}' + - name: task-name + value: component2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: component1.Succeeded + name: component2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.component2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.component2-driver.outputs.parameters.cached-decision}}' + depends: component2-driver.Succeeded + name: component2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - containerized-two-step-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{"parameterValues":{"text":"Hello KFP Containerized!"}}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/upload_download_compiled.yaml b/test_data/compiled-workflows/upload_download_compiled.yaml new file mode 100644 index 00000000000..9944e08c39c --- /dev/null +++ b/test_data/compiled-workflows/upload_download_compiled.yaml @@ -0,0 +1,474 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: test-data-passing-pipeline-1- +spec: + arguments: + parameters: + - name: components-e5d34ee36841e876565d23eaf4d49fb55df421558117e42fb4f0f56dfb8eb7cb + value: '{"executorLabel":"exec-receive-file","inputDefinitions":{"artifacts":{"incomingfile":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"saveartifact":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-e5d34ee36841e876565d23eaf4d49fb55df421558117e42fb4f0f56dfb8eb7cb + value: '{"args":["--executor_input","{{$}}","--function_to_execute","receive_file"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + receive_file(\n incomingfile: dsl.InputPath(),\n saveartifact: dsl.OutputPath(),\n):\n import + os # noqa: PLC0415\n import shutil # noqa: PLC0415\n\n print(\"reading + %s, size is %s\" % (incomingfile, os.path.getsize(incomingfile)))\n\n with + open(incomingfile, \"rb\") as f:\n b = f.read(1)\n print(\"read + byte: %s\" % b)\n f.close()\n\n print(\"copying in %s to out %s\" + % (incomingfile, saveartifact))\n shutil.copyfile(incomingfile, saveartifact)\n\n"],"image":"python:3.9"}' + - name: components-2baa105dc921c5395c01625982411e9ae576e4eeaed37130715ad12ac007288f + value: '{"executorLabel":"exec-send-file","inputDefinitions":{"parameters":{"file_size_bytes":{"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"outgoingfile":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-2baa105dc921c5395c01625982411e9ae576e4eeaed37130715ad12ac007288f + value: '{"args":["--executor_input","{{$}}","--function_to_execute","send_file"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 + \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" + \u003e \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 + -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + send_file(\n file_size_bytes: int,\n outgoingfile: dsl.OutputPath(),\n):\n import + os # noqa: PLC0415\n import zipfile # noqa: PLC0415\n\n def create_large_file(file_path, + size_in_bytes):\n with open(file_path, \"wb\") as f:\n f.write(os.urandom(size_in_bytes))\n\n def + zip_file(input_file_path, output_zip_path):\n with zipfile.ZipFile(output_zip_path, + \"w\", compression=zipfile.ZIP_DEFLATED) as zipf:\n zipf.write(input_file_path, + os.path.basename(input_file_path))\n\n print(\"starting creating the file...\")\n file_path + = \"/tmp/large_file.txt\"\n create_large_file(file_path, file_size_bytes)\n zip_file(file_path, + outgoingfile)\n print(f\"saved: {outgoingfile}\")\n\n"],"image":"python:3.9"}' + - name: components-55196ea4ba5ecbd374c5ad86dbe244930e8558effdeb5a02e2a7f98d71dbbdc6 + value: '{"executorLabel":"exec-test-uploaded-artifact","inputDefinitions":{"artifacts":{"previous_step":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}},"parameters":{"bucket_name":{"parameterType":"STRING"},"file_size_bytes":{"parameterType":"NUMBER_INTEGER"},"mlpipeline_minio_artifact_secret":{"parameterType":"STRING"}}}}' + - name: implementations-55196ea4ba5ecbd374c5ad86dbe244930e8558effdeb5a02e2a7f98d71dbbdc6 + value: '{"args":["--executor_input","{{$}}","--function_to_execute","test_uploaded_artifact"],"command":["sh","-c","\nif + ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 + -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''kfp==2.14.3'' ''--no-deps'' + ''typing-extensions\u003e=3.7.4,\u003c5; python_version\u003c\"3.9\"'' \u0026\u0026 python3 + -m pip install --quiet --no-warn-script-location ''minio'' \u0026\u0026 \"$0\" + \"$@\"\n","sh","-ec","program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" \u003e + \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport + kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef + test_uploaded_artifact(\n previous_step: dsl.InputPath(),\n file_size_bytes: + int,\n mlpipeline_minio_artifact_secret: str,\n bucket_name: str,\n):\n import + base64 # noqa: PLC0415\n import json # noqa: PLC0415\n\n from minio + import Minio # noqa: PLC0415\n\n def inner_decode(my_str):\n return + base64.b64decode(my_str).decode(\"utf-8\")\n\n mlpipeline_minio_artifact_secret + = json.loads(mlpipeline_minio_artifact_secret.replace(\"''\", ''\"''))\n host + = inner_decode(mlpipeline_minio_artifact_secret[\"host\"])\n port = inner_decode(mlpipeline_minio_artifact_secret[\"port\"])\n access_key + = inner_decode(mlpipeline_minio_artifact_secret[\"accesskey\"])\n secret_key + = inner_decode(mlpipeline_minio_artifact_secret[\"secretkey\"])\n secure + = inner_decode(mlpipeline_minio_artifact_secret[\"secure\"])\n secure = + secure.lower() == \"true\"\n client = Minio(f\"{host}:{port}\", access_key=access_key, + secret_key=secret_key, secure=secure)\n\n store_object = previous_step.replace(f\"/s3/{bucket_name}/\", + \"\")\n print(f\"parsing {previous_step} to {store_object} \")\n data + = client.get_object(bucket_name, store_object)\n\n with open(\"my-testfile\", + \"wb\") as file_data:\n for d in data.stream(32 * 1024):\n file_data.write(d)\n bytes_written + = file_data.tell()\n\n print(file_size_bytes, bytes_written)\n diff + = round((bytes_written / file_size_bytes) - 1, 3)\n print(diff)\n # + if not matching, the test will fail\n assert diff == 0\n\n"],"image":"python:3.9"}' + - name: components-root + value: '{"dag":{"tasks":{"receive-file":{"cachingOptions":{},"componentRef":{"name":"comp-receive-file"},"dependentTasks":["send-file"],"inputs":{"artifacts":{"incomingfile":{"taskOutputArtifact":{"outputArtifactKey":"outgoingfile","producerTask":"send-file"}}}},"taskInfo":{"name":"receive-file"}},"send-file":{"cachingOptions":{},"componentRef":{"name":"comp-send-file"},"inputs":{"parameters":{"file_size_bytes":{"runtimeValue":{"constant":20971520}}}},"taskInfo":{"name":"send-file"}},"test-uploaded-artifact":{"cachingOptions":{},"componentRef":{"name":"comp-test-uploaded-artifact"},"dependentTasks":["receive-file"],"inputs":{"artifacts":{"previous_step":{"taskOutputArtifact":{"outputArtifactKey":"saveartifact","producerTask":"receive-file"}}},"parameters":{"bucket_name":{"componentInputParameter":"bucket_name"},"file_size_bytes":{"runtimeValue":{"constant":20971520}},"mlpipeline_minio_artifact_secret":{"componentInputParameter":"mlpipeline_minio_artifact_secret"}}},"taskInfo":{"name":"test-uploaded-artifact"}}}},"inputDefinitions":{"parameters":{"bucket_name":{"parameterType":"STRING"},"mlpipeline_minio_artifact_secret":{"parameterType":"STRING"}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - test-data-passing-pipeline-1 + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-e5d34ee36841e876565d23eaf4d49fb55df421558117e42fb4f0f56dfb8eb7cb}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-receive-file"},"dependentTasks":["send-file"],"inputs":{"artifacts":{"incomingfile":{"taskOutputArtifact":{"outputArtifactKey":"outgoingfile","producerTask":"send-file"}}}},"taskInfo":{"name":"receive-file"}}' + - name: container + value: '{{workflow.parameters.implementations-e5d34ee36841e876565d23eaf4d49fb55df421558117e42fb4f0f56dfb8eb7cb}}' + - name: task-name + value: receive-file + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: send-file.Succeeded + name: receive-file-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.receive-file-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.receive-file-driver.outputs.parameters.cached-decision}}' + depends: receive-file-driver.Succeeded + name: receive-file + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-2baa105dc921c5395c01625982411e9ae576e4eeaed37130715ad12ac007288f}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-send-file"},"inputs":{"parameters":{"file_size_bytes":{"runtimeValue":{"constant":20971520}}}},"taskInfo":{"name":"send-file"}}' + - name: container + value: '{{workflow.parameters.implementations-2baa105dc921c5395c01625982411e9ae576e4eeaed37130715ad12ac007288f}}' + - name: task-name + value: send-file + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: send-file-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.send-file-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.send-file-driver.outputs.parameters.cached-decision}}' + depends: send-file-driver.Succeeded + name: send-file + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-55196ea4ba5ecbd374c5ad86dbe244930e8558effdeb5a02e2a7f98d71dbbdc6}}' + - name: task + value: '{"cachingOptions":{},"componentRef":{"name":"comp-test-uploaded-artifact"},"dependentTasks":["receive-file"],"inputs":{"artifacts":{"previous_step":{"taskOutputArtifact":{"outputArtifactKey":"saveartifact","producerTask":"receive-file"}}},"parameters":{"bucket_name":{"componentInputParameter":"bucket_name"},"file_size_bytes":{"runtimeValue":{"constant":20971520}},"mlpipeline_minio_artifact_secret":{"componentInputParameter":"mlpipeline_minio_artifact_secret"}}},"taskInfo":{"name":"test-uploaded-artifact"}}' + - name: container + value: '{{workflow.parameters.implementations-55196ea4ba5ecbd374c5ad86dbe244930e8558effdeb5a02e2a7f98d71dbbdc6}}' + - name: task-name + value: test-uploaded-artifact + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: receive-file.Succeeded + name: test-uploaded-artifact-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.test-uploaded-artifact-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.test-uploaded-artifact-driver.outputs.parameters.cached-decision}}' + depends: test-uploaded-artifact-driver.Succeeded + name: test-uploaded-artifact + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - test-data-passing-pipeline-1 + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/compiled-workflows/xgboost_sample_pipeline.yaml b/test_data/compiled-workflows/xgboost_sample_pipeline.yaml new file mode 100644 index 00000000000..c45391f47ae --- /dev/null +++ b/test_data/compiled-workflows/xgboost_sample_pipeline.yaml @@ -0,0 +1,804 @@ +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + creationTimestamp: null + generateName: xgboost-sample-pipeline- +spec: + arguments: + parameters: + - name: components-7ad329a2ac89be6e1debabcd3d4912ec1f9f56f8d4b9fe479da5dd1ed4fc45e2 + value: '{"executorLabel":"exec-chicago-taxi-trips-dataset","inputDefinitions":{"parameters":{"format":{"defaultValue":"csv","isOptional":true,"parameterType":"STRING"},"limit":{"defaultValue":1000,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"select":{"defaultValue":"trip_id,taxi_id,trip_start_timestamp,trip_end_timestamp,trip_seconds,trip_miles,pickup_census_tract,dropoff_census_tract,pickup_community_area,dropoff_community_area,fare,tips,tolls,extras,trip_total,payment_type,company,pickup_centroid_latitude,pickup_centroid_longitude,pickup_centroid_location,dropoff_centroid_latitude,dropoff_centroid_longitude,dropoff_centroid_location","isOptional":true,"parameterType":"STRING"},"where":{"defaultValue":"trip_start_timestamp\u003e=\"1900-01-01\" + AND trip_start_timestamp\u003c\"2100-01-01\"","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"table":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-7ad329a2ac89be6e1debabcd3d4912ec1f9f56f8d4b9fe479da5dd1ed4fc45e2 + value: '{"command":["sh","-c","set -e -x -o pipefail\noutput_path=\"$0\"\nselect=\"$1\"\nwhere=\"$2\"\nlimit=\"$3\"\nformat=\"$4\"\nmkdir + -p \"$(dirname \"$output_path\")\"\ncurl --get ''https://data.cityofchicago.org/resource/wrvz-psew.''\"${format}\" + \\\n --data-urlencode ''$limit=''\"${limit}\" \\\n --data-urlencode + ''$where=''\"${where}\" \\\n --data-urlencode ''$select=''\"${select}\" + \\\n | tr -d ''\"'' \u003e \"$output_path\" # Removing unneeded quotes + around all numbers\n","{{$.outputs.artifacts[''table''].path}}","{{$.inputs.parameters[''select'']}}","{{$.inputs.parameters[''where'']}}","{{$.inputs.parameters[''limit'']}}","{{$.inputs.parameters[''format'']}}"],"image":"byrnedo/alpine-curl@sha256:548379d0a4a0c08b9e55d9d87a592b7d35d9ab3037f4936f5ccd09d0b625a342"}' + - name: components-4c133da6a0e29aa7910d0718119ec596bd6681678360ab60e38dae9339b3efb9 + value: '{"executorLabel":"exec-convert-csv-to-apache-parquet","inputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}},"outputDefinitions":{"artifacts":{"output_data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-4c133da6a0e29aa7910d0718119ec596bd6681678360ab60e38dae9339b3efb9 + value: '{"args":["--data","{{$.inputs.artifacts[''data''].path}}","--output-data","{{$.outputs.artifacts[''output_data''].path}}"],"command":["sh","-c","(PIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''pyarrow==0.17.1'' + || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + ''pyarrow==0.17.1'' --user) \u0026\u0026 \"$0\" \"$@\"","python3","-u","-c","def + _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), + exist_ok=True)\n return file_path\n\ndef convert_csv_to_apache_parquet(\n data_path,\n output_data_path,\n):\n ''''''Converts + CSV table to Apache Parquet.\n\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: + Alexey Volkov \u003calexey.volkov@ark-kun.com\u003e\n ''''''\n from + pyarrow import csv, parquet\n\n table = csv.read_csv(data_path)\n parquet.write_table(table, + output_data_path)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Convert + csv to apache parquet'', description=''Converts CSV table to Apache Parquet.\\n\\n [Apache + Parquet](https://parquet.apache.org/)\\n\\n Annotations:\\n author: + Alexey Volkov \u003calexey.volkov@ark-kun.com\u003e'')\n_parser.add_argument(\"--data\", + dest=\"data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--output-data\", + dest=\"output_data_path\", type=_make_parent_dirs_and_return_path, required=True, + default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n_output_files + = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = convert_csv_to_apache_parquet(**_parsed_args)\n\n_output_serializers + = [\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except + OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],"image":"python:3.7"}' + - name: components-d1f109ba389501999e4d468354015c7d3a7947affcf7e4697c201c9c1bd6f88b + value: '{"executorLabel":"exec-xgboost-predict","inputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"model":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}},"parameters":{"label_column":{"isOptional":true,"parameterType":"NUMBER_INTEGER"}}},"outputDefinitions":{"artifacts":{"predictions":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-d1f109ba389501999e4d468354015c7d3a7947affcf7e4697c201c9c1bd6f88b + value: '{"args":["--data","{{$.inputs.artifacts[''data''].path}}","--model","{{$.inputs.artifacts[''model''].path}}","{\"IfPresent\": + {\"InputName\": \"label_column\", \"Then\": [\"--label-column\", \"{{$.inputs.parameters[''label_column'']}}\"]}}","--predictions","{{$.outputs.artifacts[''predictions''].path}}"],"command":["sh","-c","(PIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''xgboost==1.1.1'' + ''pandas==1.0.5'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install + --quiet --no-warn-script-location ''xgboost==1.1.1'' ''pandas==1.0.5'' --user) + \u0026\u0026 \"$0\" \"$@\"","python3","-u","-c","def _make_parent_dirs_and_return_path(file_path: + str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return + file_path\n\ndef xgboost_predict(\n data_path, # Also supports LibSVM\n model_path,\n predictions_path,\n label_column + = None,\n):\n ''''''Make predictions using a trained XGBoost model.\n\n Args:\n data_path: + Path for the feature data in CSV format.\n model_path: Path for the + trained model in binary XGBoost format.\n predictions_path: Output + path for the predictions.\n label_column: Column containing the label + data.\n\n Annotations:\n author: Alexey Volkov \u003calexey.volkov@ark-kun.com\u003e\n ''''''\n from + pathlib import Path\n\n import numpy\n import pandas\n import xgboost\n\n df + = pandas.read_csv(\n data_path,\n )\n\n if label_column is not + None:\n df = df.drop(columns=[df.columns[label_column]])\n\n testing_data + = xgboost.DMatrix(\n data=df,\n )\n\n model = xgboost.Booster(model_file=model_path)\n\n predictions + = model.predict(testing_data)\n\n Path(predictions_path).parent.mkdir(parents=True, + exist_ok=True)\n numpy.savetxt(predictions_path, predictions)\n\nimport + argparse\n_parser = argparse.ArgumentParser(prog=''Xgboost predict'', description=''Make + predictions using a trained XGBoost model.\\n\\n Args:\\n data_path: + Path for the feature data in CSV format.\\n model_path: Path for the + trained model in binary XGBoost format.\\n predictions_path: Output + path for the predictions.\\n label_column: Column containing the label + data.\\n\\n Annotations:\\n author: Alexey Volkov \u003calexey.volkov@ark-kun.com\u003e'')\n_parser.add_argument(\"--data\", + dest=\"data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", + dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column\", + dest=\"label_column\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--predictions\", + dest=\"predictions_path\", type=_make_parent_dirs_and_return_path, required=True, + default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs + = xgboost_predict(**_parsed_args)\n"],"image":"python:3.7"}' + - name: components-c62f35abca448022418477855ae1839ecc171d48bb1830ab69e1f75c0649519a + value: '{"executorLabel":"exec-xgboost-predict-2","inputDefinitions":{"artifacts":{"data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"model":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}},"parameters":{"label_column_name":{"isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"predictions":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-c62f35abca448022418477855ae1839ecc171d48bb1830ab69e1f75c0649519a + value: '{"args":["--data","{{$.inputs.artifacts[''data''].path}}","--model","{{$.inputs.artifacts[''model''].path}}","{\"IfPresent\": + {\"InputName\": \"label_column_name\", \"Then\": [\"--label-column-name\", + \"{{$.inputs.parameters[''label_column_name'']}}\"]}}","--predictions","{{$.outputs.artifacts[''predictions''].path}}"],"command":["sh","-c","(PIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''xgboost==1.1.1'' + ''pandas==1.0.5'' ''pyarrow==0.17.1'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 + -m pip install --quiet --no-warn-script-location ''xgboost==1.1.1'' ''pandas==1.0.5'' + ''pyarrow==0.17.1'' --user) \u0026\u0026 \"$0\" \"$@\"","python3","-u","-c","def + _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), + exist_ok=True)\n return file_path\n\ndef xgboost_predict(\n data_path,\n model_path,\n predictions_path,\n label_column_name + = None,\n):\n ''''''Make predictions using a trained XGBoost model.\n\n Args:\n data_path: + Path for the feature data in Apache Parquet format.\n model_path: Path + for the trained model in binary XGBoost format.\n predictions_path: + Output path for the predictions.\n label_column_name: Optional. Name + of the column containing the label data that is excluded during the prediction.\n\n Annotations:\n author: + Alexey Volkov \u003calexey.volkov@ark-kun.com\u003e\n ''''''\n from + pathlib import Path\n\n import numpy\n import pandas\n import xgboost\n\n # + Loading data\n df = pandas.read_parquet(data_path)\n if label_column_name:\n df + = df.drop(columns=[label_column_name])\n\n evaluation_data = xgboost.DMatrix(\n data=df,\n )\n\n # + Training\n model = xgboost.Booster(model_file=model_path)\n\n predictions + = model.predict(evaluation_data)\n\n Path(predictions_path).parent.mkdir(parents=True, + exist_ok=True)\n numpy.savetxt(predictions_path, predictions)\n\nimport + argparse\n_parser = argparse.ArgumentParser(prog=''Xgboost predict'', description=''Make + predictions using a trained XGBoost model.\\n\\n Args:\\n data_path: + Path for the feature data in Apache Parquet format.\\n model_path: + Path for the trained model in binary XGBoost format.\\n predictions_path: + Output path for the predictions.\\n label_column_name: Optional. Name + of the column containing the label data that is excluded during the prediction.\\n\\n Annotations:\\n author: + Alexey Volkov \u003calexey.volkov@ark-kun.com\u003e'')\n_parser.add_argument(\"--data\", + dest=\"data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", + dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column-name\", + dest=\"label_column_name\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--predictions\", + dest=\"predictions_path\", type=_make_parent_dirs_and_return_path, required=True, + default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs + = xgboost_predict(**_parsed_args)\n"],"image":"python:3.7"}' + - name: components-0543e77f07b487833b7b7adaedea608d72a11fd2a76cfe13a80ea1431ce92901 + value: '{"executorLabel":"exec-xgboost-train","inputDefinitions":{"artifacts":{"starting_model":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"},"isOptional":true},"training_data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}},"parameters":{"booster":{"defaultValue":"gbtree","isOptional":true,"parameterType":"STRING"},"booster_params":{"isOptional":true,"parameterType":"STRUCT"},"label_column":{"defaultValue":0,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"learning_rate":{"defaultValue":0.3,"isOptional":true,"parameterType":"NUMBER_DOUBLE"},"max_depth":{"defaultValue":6,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"min_split_loss":{"defaultValue":0,"isOptional":true,"parameterType":"NUMBER_DOUBLE"},"num_iterations":{"defaultValue":10,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"objective":{"defaultValue":"reg:squarederror","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"model":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"model_config":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-0543e77f07b487833b7b7adaedea608d72a11fd2a76cfe13a80ea1431ce92901 + value: '{"args":["--training-data","{{$.inputs.artifacts[''training_data''].path}}","{\"IfPresent\": + {\"InputName\": \"starting_model\", \"Then\": [\"--starting-model\", \"{{$.inputs.artifacts[''starting_model''].path}}\"]}}","{\"IfPresent\": + {\"InputName\": \"label_column\", \"Then\": [\"--label-column\", \"{{$.inputs.parameters[''label_column'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"num_iterations\", \"Then\": [\"--num-iterations\", \"{{$.inputs.parameters[''num_iterations'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"booster_params\", \"Then\": [\"--booster-params\", \"{{$.inputs.parameters[''booster_params'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"objective\", \"Then\": [\"--objective\", \"{{$.inputs.parameters[''objective'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"booster\", \"Then\": [\"--booster\", \"{{$.inputs.parameters[''booster'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"learning_rate\", \"Then\": [\"--learning-rate\", \"{{$.inputs.parameters[''learning_rate'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"min_split_loss\", \"Then\": [\"--min-split-loss\", \"{{$.inputs.parameters[''min_split_loss'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"max_depth\", \"Then\": [\"--max-depth\", \"{{$.inputs.parameters[''max_depth'']}}\"]}}","--model","{{$.outputs.artifacts[''model''].path}}","--model-config","{{$.outputs.artifacts[''model_config''].path}}"],"command":["sh","-c","(PIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''xgboost==1.1.1'' + ''pandas==1.0.5'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install + --quiet --no-warn-script-location ''xgboost==1.1.1'' ''pandas==1.0.5'' --user) + \u0026\u0026 \"$0\" \"$@\"","python3","-u","-c","def _make_parent_dirs_and_return_path(file_path: + str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return + file_path\n\ndef xgboost_train(\n training_data_path, # Also supports + LibSVM\n model_path,\n model_config_path,\n starting_model_path = + None,\n\n label_column = 0,\n num_iterations = 10,\n booster_params + = None,\n\n # Booster parameters\n objective = ''reg:squarederror'',\n booster + = ''gbtree'',\n learning_rate = 0.3,\n min_split_loss = 0,\n max_depth + = 6,\n):\n ''''''Train an XGBoost model.\n\n Args:\n training_data_path: + Path for the training data in CSV format.\n model_path: Output path + for the trained model in binary XGBoost format.\n model_config_path: + Output path for the internal parameter configuration of Booster as a JSON + string.\n starting_model_path: Path for the existing trained model + to start from.\n label_column: Column containing the label data.\n num_boost_rounds: + Number of boosting iterations.\n booster_params: Parameters for the + booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\n objective: + The learning task and the corresponding learning objective.\n See + https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n The + most common values are:\n \"reg:squarederror\" - Regression with + squared loss (default).\n \"reg:logistic\" - Logistic regression.\n \"binary:logistic\" + - Logistic regression for binary classification, output probability.\n \"binary:logitraw\" + - Logistic regression for binary classification, output score before logistic + transformation\n \"rank:pairwise\" - Use LambdaMART to perform + pairwise ranking where the pairwise loss is minimized\n \"rank:ndcg\" + - Use LambdaMART to perform list-wise ranking where Normalized Discounted + Cumulative Gain (NDCG) is maximized\n\n Annotations:\n author: Alexey + Volkov \u003calexey.volkov@ark-kun.com\u003e\n ''''''\n import pandas\n import + xgboost\n\n df = pandas.read_csv(\n training_data_path,\n )\n\n training_data + = xgboost.DMatrix(\n data=df.drop(columns=[df.columns[label_column]]),\n label=df[df.columns[label_column]],\n )\n\n booster_params + = booster_params or {}\n booster_params.setdefault(''objective'', objective)\n booster_params.setdefault(''booster'', + booster)\n booster_params.setdefault(''learning_rate'', learning_rate)\n booster_params.setdefault(''min_split_loss'', + min_split_loss)\n booster_params.setdefault(''max_depth'', max_depth)\n\n starting_model + = None\n if starting_model_path:\n starting_model = xgboost.Booster(model_file=starting_model_path)\n\n model + = xgboost.train(\n params=booster_params,\n dtrain=training_data,\n num_boost_round=num_iterations,\n xgb_model=starting_model\n )\n\n # + Saving the model in binary format\n model.save_model(model_path)\n\n model_config_str + = model.save_config()\n with open(model_config_path, ''w'') as model_config_file:\n model_config_file.write(model_config_str)\n\nimport + json\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Xgboost train'', + description=''Train an XGBoost model.\\n\\n Args:\\n training_data_path: + Path for the training data in CSV format.\\n model_path: Output path + for the trained model in binary XGBoost format.\\n model_config_path: + Output path for the internal parameter configuration of Booster as a JSON + string.\\n starting_model_path: Path for the existing trained model + to start from.\\n label_column: Column containing the label data.\\n num_boost_rounds: + Number of boosting iterations.\\n booster_params: Parameters for the + booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\\n objective: + The learning task and the corresponding learning objective.\\n See + https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\\n The + most common values are:\\n \"reg:squarederror\" - Regression with + squared loss (default).\\n \"reg:logistic\" - Logistic regression.\\n \"binary:logistic\" + - Logistic regression for binary classification, output probability.\\n \"binary:logitraw\" + - Logistic regression for binary classification, output score before logistic + transformation\\n \"rank:pairwise\" - Use LambdaMART to perform + pairwise ranking where the pairwise loss is minimized\\n \"rank:ndcg\" + - Use LambdaMART to perform list-wise ranking where Normalized Discounted + Cumulative Gain (NDCG) is maximized\\n\\n Annotations:\\n author: + Alexey Volkov \u003calexey.volkov@ark-kun.com\u003e'')\n_parser.add_argument(\"--training-data\", + dest=\"training_data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--starting-model\", + dest=\"starting_model_path\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column\", + dest=\"label_column\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--num-iterations\", + dest=\"num_iterations\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--booster-params\", + dest=\"booster_params\", type=json.loads, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--objective\", + dest=\"objective\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--booster\", + dest=\"booster\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--learning-rate\", + dest=\"learning_rate\", type=float, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--min-split-loss\", + dest=\"min_split_loss\", type=float, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--max-depth\", + dest=\"max_depth\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", + dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True, + default=argparse.SUPPRESS)\n_parser.add_argument(\"--model-config\", dest=\"model_config_path\", + type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args + = vars(_parser.parse_args())\n\n_outputs = xgboost_train(**_parsed_args)\n"],"image":"python:3.7"}' + - name: components-0c2f0a07f4057d4f8388562ccd9bbf6ea50c2603b7efb45cadb1ddc872a26cb3 + value: '{"executorLabel":"exec-xgboost-train-2","inputDefinitions":{"artifacts":{"starting_model":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"},"isOptional":true},"training_data":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}},"parameters":{"booster":{"defaultValue":"gbtree","isOptional":true,"parameterType":"STRING"},"booster_params":{"isOptional":true,"parameterType":"STRUCT"},"label_column_name":{"parameterType":"STRING"},"learning_rate":{"defaultValue":0.3,"isOptional":true,"parameterType":"NUMBER_DOUBLE"},"max_depth":{"defaultValue":6,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"min_split_loss":{"defaultValue":0,"isOptional":true,"parameterType":"NUMBER_DOUBLE"},"num_iterations":{"defaultValue":10,"isOptional":true,"parameterType":"NUMBER_INTEGER"},"objective":{"defaultValue":"reg:squarederror","isOptional":true,"parameterType":"STRING"}}},"outputDefinitions":{"artifacts":{"model":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}},"model_config":{"artifactType":{"schemaTitle":"system.Artifact","schemaVersion":"0.0.1"}}}}}' + - name: implementations-0c2f0a07f4057d4f8388562ccd9bbf6ea50c2603b7efb45cadb1ddc872a26cb3 + value: '{"args":["--training-data","{{$.inputs.artifacts[''training_data''].path}}","--label-column-name","{{$.inputs.parameters[''label_column_name'']}}","{\"IfPresent\": + {\"InputName\": \"starting_model\", \"Then\": [\"--starting-model\", \"{{$.inputs.artifacts[''starting_model''].path}}\"]}}","{\"IfPresent\": + {\"InputName\": \"num_iterations\", \"Then\": [\"--num-iterations\", \"{{$.inputs.parameters[''num_iterations'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"booster_params\", \"Then\": [\"--booster-params\", \"{{$.inputs.parameters[''booster_params'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"objective\", \"Then\": [\"--objective\", \"{{$.inputs.parameters[''objective'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"booster\", \"Then\": [\"--booster\", \"{{$.inputs.parameters[''booster'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"learning_rate\", \"Then\": [\"--learning-rate\", \"{{$.inputs.parameters[''learning_rate'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"min_split_loss\", \"Then\": [\"--min-split-loss\", \"{{$.inputs.parameters[''min_split_loss'']}}\"]}}","{\"IfPresent\": + {\"InputName\": \"max_depth\", \"Then\": [\"--max-depth\", \"{{$.inputs.parameters[''max_depth'']}}\"]}}","--model","{{$.outputs.artifacts[''model''].path}}","--model-config","{{$.outputs.artifacts[''model_config''].path}}"],"command":["sh","-c","(PIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location ''xgboost==1.1.1'' + ''pandas==1.0.5'' ''pyarrow==0.17.1'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 + -m pip install --quiet --no-warn-script-location ''xgboost==1.1.1'' ''pandas==1.0.5'' + ''pyarrow==0.17.1'' --user) \u0026\u0026 \"$0\" \"$@\"","python3","-u","-c","def + _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), + exist_ok=True)\n return file_path\n\ndef xgboost_train(\n training_data_path,\n model_path,\n model_config_path,\n label_column_name,\n\n starting_model_path + = None,\n\n num_iterations = 10,\n booster_params = None,\n\n # Booster + parameters\n objective = ''reg:squarederror'',\n booster = ''gbtree'',\n learning_rate + = 0.3,\n min_split_loss = 0,\n max_depth = 6,\n):\n ''''''Train an + XGBoost model.\n\n Args:\n training_data_path: Path for the training + data in Apache Parquet format.\n model_path: Output path for the trained + model in binary XGBoost format.\n model_config_path: Output path for + the internal parameter configuration of Booster as a JSON string.\n starting_model_path: + Path for the existing trained model to start from.\n label_column_name: + Name of the column containing the label data.\n num_boost_rounds: Number + of boosting iterations.\n booster_params: Parameters for the booster. + See https://xgboost.readthedocs.io/en/latest/parameter.html\n objective: + The learning task and the corresponding learning objective.\n See + https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n The + most common values are:\n \"reg:squarederror\" - Regression with + squared loss (default).\n \"reg:logistic\" - Logistic regression.\n \"binary:logistic\" + - Logistic regression for binary classification, output probability.\n \"binary:logitraw\" + - Logistic regression for binary classification, output score before logistic + transformation\n \"rank:pairwise\" - Use LambdaMART to perform + pairwise ranking where the pairwise loss is minimized\n \"rank:ndcg\" + - Use LambdaMART to perform list-wise ranking where Normalized Discounted + Cumulative Gain (NDCG) is maximized\n\n Annotations:\n author: Alexey + Volkov \u003calexey.volkov@ark-kun.com\u003e\n ''''''\n import pandas\n import + xgboost\n\n # Loading data\n df = pandas.read_parquet(training_data_path)\n training_data + = xgboost.DMatrix(\n data=df.drop(columns=[label_column_name]),\n label=df[[label_column_name]],\n )\n # + Training\n booster_params = booster_params or {}\n booster_params.setdefault(''objective'', + objective)\n booster_params.setdefault(''booster'', booster)\n booster_params.setdefault(''learning_rate'', + learning_rate)\n booster_params.setdefault(''min_split_loss'', min_split_loss)\n booster_params.setdefault(''max_depth'', + max_depth)\n\n starting_model = None\n if starting_model_path:\n starting_model + = xgboost.Booster(model_file=starting_model_path)\n\n model = xgboost.train(\n params=booster_params,\n dtrain=training_data,\n num_boost_round=num_iterations,\n xgb_model=starting_model\n )\n\n # + Saving the model in binary format\n model.save_model(model_path)\n\n model_config_str + = model.save_config()\n with open(model_config_path, ''w'') as model_config_file:\n model_config_file.write(model_config_str)\n\nimport + json\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Xgboost train'', + description=''Train an XGBoost model.\\n\\n Args:\\n training_data_path: + Path for the training data in Apache Parquet format.\\n model_path: + Output path for the trained model in binary XGBoost format.\\n model_config_path: + Output path for the internal parameter configuration of Booster as a JSON + string.\\n starting_model_path: Path for the existing trained model + to start from.\\n label_column_name: Name of the column containing + the label data.\\n num_boost_rounds: Number of boosting iterations.\\n booster_params: + Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\\n objective: + The learning task and the corresponding learning objective.\\n See + https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\\n The + most common values are:\\n \"reg:squarederror\" - Regression with + squared loss (default).\\n \"reg:logistic\" - Logistic regression.\\n \"binary:logistic\" + - Logistic regression for binary classification, output probability.\\n \"binary:logitraw\" + - Logistic regression for binary classification, output score before logistic + transformation\\n \"rank:pairwise\" - Use LambdaMART to perform + pairwise ranking where the pairwise loss is minimized\\n \"rank:ndcg\" + - Use LambdaMART to perform list-wise ranking where Normalized Discounted + Cumulative Gain (NDCG) is maximized\\n\\n Annotations:\\n author: + Alexey Volkov \u003calexey.volkov@ark-kun.com\u003e'')\n_parser.add_argument(\"--training-data\", + dest=\"training_data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column-name\", + dest=\"label_column_name\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--starting-model\", + dest=\"starting_model_path\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--num-iterations\", + dest=\"num_iterations\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--booster-params\", + dest=\"booster_params\", type=json.loads, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--objective\", + dest=\"objective\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--booster\", + dest=\"booster\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--learning-rate\", + dest=\"learning_rate\", type=float, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--min-split-loss\", + dest=\"min_split_loss\", type=float, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--max-depth\", + dest=\"max_depth\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", + dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True, + default=argparse.SUPPRESS)\n_parser.add_argument(\"--model-config\", dest=\"model_config_path\", + type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args + = vars(_parser.parse_args())\n\n_outputs = xgboost_train(**_parsed_args)\n"],"image":"python:3.7"}' + - name: components-root + value: '{"dag":{"tasks":{"chicago-taxi-trips-dataset":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-chicago-taxi-trips-dataset"},"inputs":{"parameters":{"limit":{"runtimeValue":{"constant":10000}},"select":{"runtimeValue":{"constant":"tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total"}},"where":{"runtimeValue":{"constant":"trip_start_timestamp + \u003e= \"2019-01-01\" AND trip_start_timestamp \u003c \"2019-02-01\""}}}},"taskInfo":{"name":"chicago-taxi-trips-dataset"}},"convert-csv-to-apache-parquet":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-convert-csv-to-apache-parquet"},"dependentTasks":["chicago-taxi-trips-dataset"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"table","producerTask":"chicago-taxi-trips-dataset"}}}},"taskInfo":{"name":"convert-csv-to-apache-parquet"}},"xgboost-predict":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-predict"},"dependentTasks":["chicago-taxi-trips-dataset","xgboost-train"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"table","producerTask":"chicago-taxi-trips-dataset"}},"model":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"xgboost-train"}}},"parameters":{"label_column":{"runtimeValue":{"constant":0}}}},"taskInfo":{"name":"xgboost-predict"}},"xgboost-predict-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-predict-2"},"dependentTasks":["convert-csv-to-apache-parquet","xgboost-train-2"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"output_data","producerTask":"convert-csv-to-apache-parquet"}},"model":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"xgboost-train-2"}}},"parameters":{"label_column_name":{"runtimeValue":{"constant":"tips"}}}},"taskInfo":{"name":"xgboost-predict-2"}},"xgboost-predict-3":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-predict-3"},"dependentTasks":["convert-csv-to-apache-parquet","xgboost-train"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"output_data","producerTask":"convert-csv-to-apache-parquet"}},"model":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"xgboost-train"}}},"parameters":{"label_column_name":{"runtimeValue":{"constant":"tips"}}}},"taskInfo":{"name":"xgboost-predict-3"}},"xgboost-predict-4":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-predict-4"},"dependentTasks":["chicago-taxi-trips-dataset","xgboost-train-2"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"table","producerTask":"chicago-taxi-trips-dataset"}},"model":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"xgboost-train-2"}}},"parameters":{"label_column":{"runtimeValue":{"constant":0}}}},"taskInfo":{"name":"xgboost-predict-4"}},"xgboost-train":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-train"},"dependentTasks":["chicago-taxi-trips-dataset"],"inputs":{"artifacts":{"training_data":{"taskOutputArtifact":{"outputArtifactKey":"table","producerTask":"chicago-taxi-trips-dataset"}}},"parameters":{"label_column":{"runtimeValue":{"constant":0}},"num_iterations":{"runtimeValue":{"constant":200}},"objective":{"runtimeValue":{"constant":"reg:squarederror"}}}},"taskInfo":{"name":"xgboost-train"}},"xgboost-train-2":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-train-2"},"dependentTasks":["convert-csv-to-apache-parquet"],"inputs":{"artifacts":{"training_data":{"taskOutputArtifact":{"outputArtifactKey":"output_data","producerTask":"convert-csv-to-apache-parquet"}}},"parameters":{"label_column_name":{"runtimeValue":{"constant":"tips"}},"num_iterations":{"runtimeValue":{"constant":200}},"objective":{"runtimeValue":{"constant":"reg:squarederror"}}}},"taskInfo":{"name":"xgboost-train-2"}}}}}' + entrypoint: entrypoint + podMetadata: + annotations: + pipelines.kubeflow.org/v2_component: "true" + labels: + pipelines.kubeflow.org/v2_component: "true" + serviceAccountName: pipeline-runner + templates: + - container: + args: + - --type + - CONTAINER + - --pipeline_name + - xgboost-sample-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --container + - '{{inputs.parameters.container}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --cached_decision_path + - '{{outputs.parameters.cached-decision.path}}' + - --pod_spec_patch_path + - '{{outputs.parameters.pod-spec-patch.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --kubernetes_config + - '{{inputs.parameters.kubernetes-config}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - name: task + - name: container + - name: task-name + - name: parent-dag-id + - default: "-1" + name: iteration-index + - default: "" + name: kubernetes-config + metadata: {} + name: system-container-driver + outputs: + parameters: + - name: pod-spec-patch + valueFrom: + default: "" + path: /tmp/outputs/pod-spec-patch + - default: "false" + name: cached-decision + valueFrom: + default: "false" + path: /tmp/outputs/cached-decision + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: pod-spec-patch + value: '{{inputs.parameters.pod-spec-patch}}' + name: executor + template: system-container-impl + when: '{{inputs.parameters.cached-decision}} != true' + inputs: + parameters: + - name: pod-spec-patch + - default: "false" + name: cached-decision + metadata: {} + name: system-container-executor + outputs: {} + - container: + command: + - should-be-overridden-during-runtime + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + envFrom: + - configMapRef: + name: metadata-grpc-configmap + optional: true + image: gcr.io/ml-pipeline/should-be-overridden-during-runtime + name: "" + resources: {} + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + - mountPath: /gcs + name: gcs-scratch + - mountPath: /s3 + name: s3-scratch + - mountPath: /minio + name: minio-scratch + - mountPath: /.local + name: dot-local-scratch + - mountPath: /.cache + name: dot-cache-scratch + - mountPath: /.config + name: dot-config-scratch + initContainers: + - args: + - --copy + - /kfp-launcher/launch + command: + - launcher-v2 + image: ghcr.io/kubeflow/kfp-launcher:latest + name: kfp-launcher + resources: + limits: + cpu: 500m + memory: 128Mi + requests: + cpu: 100m + volumeMounts: + - mountPath: /kfp-launcher + name: kfp-launcher + inputs: + parameters: + - name: pod-spec-patch + metadata: {} + name: system-container-impl + outputs: {} + podSpecPatch: '{{inputs.parameters.pod-spec-patch}}' + volumes: + - emptyDir: {} + name: kfp-launcher + - emptyDir: {} + name: gcs-scratch + - emptyDir: {} + name: s3-scratch + - emptyDir: {} + name: minio-scratch + - emptyDir: {} + name: dot-local-scratch + - emptyDir: {} + name: dot-cache-scratch + - emptyDir: {} + name: dot-config-scratch + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-7ad329a2ac89be6e1debabcd3d4912ec1f9f56f8d4b9fe479da5dd1ed4fc45e2}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-chicago-taxi-trips-dataset"},"inputs":{"parameters":{"limit":{"runtimeValue":{"constant":10000}},"select":{"runtimeValue":{"constant":"tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total"}},"where":{"runtimeValue":{"constant":"trip_start_timestamp + \u003e= \"2019-01-01\" AND trip_start_timestamp \u003c \"2019-02-01\""}}}},"taskInfo":{"name":"chicago-taxi-trips-dataset"}}' + - name: container + value: '{{workflow.parameters.implementations-7ad329a2ac89be6e1debabcd3d4912ec1f9f56f8d4b9fe479da5dd1ed4fc45e2}}' + - name: task-name + value: chicago-taxi-trips-dataset + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + name: chicago-taxi-trips-dataset-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.chicago-taxi-trips-dataset-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.chicago-taxi-trips-dataset-driver.outputs.parameters.cached-decision}}' + depends: chicago-taxi-trips-dataset-driver.Succeeded + name: chicago-taxi-trips-dataset + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-4c133da6a0e29aa7910d0718119ec596bd6681678360ab60e38dae9339b3efb9}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-convert-csv-to-apache-parquet"},"dependentTasks":["chicago-taxi-trips-dataset"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"table","producerTask":"chicago-taxi-trips-dataset"}}}},"taskInfo":{"name":"convert-csv-to-apache-parquet"}}' + - name: container + value: '{{workflow.parameters.implementations-4c133da6a0e29aa7910d0718119ec596bd6681678360ab60e38dae9339b3efb9}}' + - name: task-name + value: convert-csv-to-apache-parquet + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: chicago-taxi-trips-dataset.Succeeded + name: convert-csv-to-apache-parquet-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.convert-csv-to-apache-parquet-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.convert-csv-to-apache-parquet-driver.outputs.parameters.cached-decision}}' + depends: convert-csv-to-apache-parquet-driver.Succeeded + name: convert-csv-to-apache-parquet + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d1f109ba389501999e4d468354015c7d3a7947affcf7e4697c201c9c1bd6f88b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-predict"},"dependentTasks":["chicago-taxi-trips-dataset","xgboost-train"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"table","producerTask":"chicago-taxi-trips-dataset"}},"model":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"xgboost-train"}}},"parameters":{"label_column":{"runtimeValue":{"constant":0}}}},"taskInfo":{"name":"xgboost-predict"}}' + - name: container + value: '{{workflow.parameters.implementations-d1f109ba389501999e4d468354015c7d3a7947affcf7e4697c201c9c1bd6f88b}}' + - name: task-name + value: xgboost-predict + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: chicago-taxi-trips-dataset.Succeeded && xgboost-train.Succeeded + name: xgboost-predict-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.xgboost-predict-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.xgboost-predict-driver.outputs.parameters.cached-decision}}' + depends: xgboost-predict-driver.Succeeded + name: xgboost-predict + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c62f35abca448022418477855ae1839ecc171d48bb1830ab69e1f75c0649519a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-predict-2"},"dependentTasks":["convert-csv-to-apache-parquet","xgboost-train-2"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"output_data","producerTask":"convert-csv-to-apache-parquet"}},"model":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"xgboost-train-2"}}},"parameters":{"label_column_name":{"runtimeValue":{"constant":"tips"}}}},"taskInfo":{"name":"xgboost-predict-2"}}' + - name: container + value: '{{workflow.parameters.implementations-c62f35abca448022418477855ae1839ecc171d48bb1830ab69e1f75c0649519a}}' + - name: task-name + value: xgboost-predict-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: convert-csv-to-apache-parquet.Succeeded && xgboost-train-2.Succeeded + name: xgboost-predict-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.xgboost-predict-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.xgboost-predict-2-driver.outputs.parameters.cached-decision}}' + depends: xgboost-predict-2-driver.Succeeded + name: xgboost-predict-2 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-c62f35abca448022418477855ae1839ecc171d48bb1830ab69e1f75c0649519a}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-predict-3"},"dependentTasks":["convert-csv-to-apache-parquet","xgboost-train"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"output_data","producerTask":"convert-csv-to-apache-parquet"}},"model":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"xgboost-train"}}},"parameters":{"label_column_name":{"runtimeValue":{"constant":"tips"}}}},"taskInfo":{"name":"xgboost-predict-3"}}' + - name: container + value: '{{workflow.parameters.implementations-c62f35abca448022418477855ae1839ecc171d48bb1830ab69e1f75c0649519a}}' + - name: task-name + value: xgboost-predict-3 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: convert-csv-to-apache-parquet.Succeeded && xgboost-train.Succeeded + name: xgboost-predict-3-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.xgboost-predict-3-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.xgboost-predict-3-driver.outputs.parameters.cached-decision}}' + depends: xgboost-predict-3-driver.Succeeded + name: xgboost-predict-3 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-d1f109ba389501999e4d468354015c7d3a7947affcf7e4697c201c9c1bd6f88b}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-predict-4"},"dependentTasks":["chicago-taxi-trips-dataset","xgboost-train-2"],"inputs":{"artifacts":{"data":{"taskOutputArtifact":{"outputArtifactKey":"table","producerTask":"chicago-taxi-trips-dataset"}},"model":{"taskOutputArtifact":{"outputArtifactKey":"model","producerTask":"xgboost-train-2"}}},"parameters":{"label_column":{"runtimeValue":{"constant":0}}}},"taskInfo":{"name":"xgboost-predict-4"}}' + - name: container + value: '{{workflow.parameters.implementations-d1f109ba389501999e4d468354015c7d3a7947affcf7e4697c201c9c1bd6f88b}}' + - name: task-name + value: xgboost-predict-4 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: chicago-taxi-trips-dataset.Succeeded && xgboost-train-2.Succeeded + name: xgboost-predict-4-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.xgboost-predict-4-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.xgboost-predict-4-driver.outputs.parameters.cached-decision}}' + depends: xgboost-predict-4-driver.Succeeded + name: xgboost-predict-4 + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0543e77f07b487833b7b7adaedea608d72a11fd2a76cfe13a80ea1431ce92901}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-train"},"dependentTasks":["chicago-taxi-trips-dataset"],"inputs":{"artifacts":{"training_data":{"taskOutputArtifact":{"outputArtifactKey":"table","producerTask":"chicago-taxi-trips-dataset"}}},"parameters":{"label_column":{"runtimeValue":{"constant":0}},"num_iterations":{"runtimeValue":{"constant":200}},"objective":{"runtimeValue":{"constant":"reg:squarederror"}}}},"taskInfo":{"name":"xgboost-train"}}' + - name: container + value: '{{workflow.parameters.implementations-0543e77f07b487833b7b7adaedea608d72a11fd2a76cfe13a80ea1431ce92901}}' + - name: task-name + value: xgboost-train + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: chicago-taxi-trips-dataset.Succeeded + name: xgboost-train-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.xgboost-train-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.xgboost-train-driver.outputs.parameters.cached-decision}}' + depends: xgboost-train-driver.Succeeded + name: xgboost-train + template: system-container-executor + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-0c2f0a07f4057d4f8388562ccd9bbf6ea50c2603b7efb45cadb1ddc872a26cb3}}' + - name: task + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-xgboost-train-2"},"dependentTasks":["convert-csv-to-apache-parquet"],"inputs":{"artifacts":{"training_data":{"taskOutputArtifact":{"outputArtifactKey":"output_data","producerTask":"convert-csv-to-apache-parquet"}}},"parameters":{"label_column_name":{"runtimeValue":{"constant":"tips"}},"num_iterations":{"runtimeValue":{"constant":200}},"objective":{"runtimeValue":{"constant":"reg:squarederror"}}}},"taskInfo":{"name":"xgboost-train-2"}}' + - name: container + value: '{{workflow.parameters.implementations-0c2f0a07f4057d4f8388562ccd9bbf6ea50c2603b7efb45cadb1ddc872a26cb3}}' + - name: task-name + value: xgboost-train-2 + - name: parent-dag-id + value: '{{inputs.parameters.parent-dag-id}}' + depends: convert-csv-to-apache-parquet.Succeeded + name: xgboost-train-2-driver + template: system-container-driver + - arguments: + parameters: + - name: pod-spec-patch + value: '{{tasks.xgboost-train-2-driver.outputs.parameters.pod-spec-patch}}' + - default: "false" + name: cached-decision + value: '{{tasks.xgboost-train-2-driver.outputs.parameters.cached-decision}}' + depends: xgboost-train-2-driver.Succeeded + name: xgboost-train-2 + template: system-container-executor + inputs: + parameters: + - name: parent-dag-id + metadata: {} + name: root + outputs: {} + - container: + args: + - --type + - '{{inputs.parameters.driver-type}}' + - --pipeline_name + - xgboost-sample-pipeline + - --run_id + - '{{workflow.uid}}' + - --run_name + - '{{workflow.name}}' + - --run_display_name + - "" + - --dag_execution_id + - '{{inputs.parameters.parent-dag-id}}' + - --component + - '{{inputs.parameters.component}}' + - --task + - '{{inputs.parameters.task}}' + - --task_name + - '{{inputs.parameters.task-name}}' + - --runtime_config + - '{{inputs.parameters.runtime-config}}' + - --iteration_index + - '{{inputs.parameters.iteration-index}}' + - --execution_id_path + - '{{outputs.parameters.execution-id.path}}' + - --iteration_count_path + - '{{outputs.parameters.iteration-count.path}}' + - --condition_path + - '{{outputs.parameters.condition.path}}' + - --http_proxy + - "" + - --https_proxy + - "" + - --no_proxy + - "" + - --mlPipelineServiceTLSEnabled + - "false" + - --mlmd_server_address + - metadata-grpc-service + - --mlmd_server_port + - "8080" + - --metadataTLSEnabled + - "false" + - --ca_cert_path + - "" + command: + - driver + env: + - name: ML_PIPELINE_SERVICE_HOST + value: ml-pipeline.kubeflow.svc.cluster.local + - name: ML_PIPELINE_SERVICE_PORT_GRPC + value: "8887" + image: ghcr.io/kubeflow/kfp-driver:latest + name: "" + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 64Mi + inputs: + parameters: + - name: component + - default: "" + name: runtime-config + - default: "" + name: task + - default: "" + name: task-name + - default: "0" + name: parent-dag-id + - default: "-1" + name: iteration-index + - default: DAG + name: driver-type + metadata: {} + name: system-dag-driver + outputs: + parameters: + - name: execution-id + valueFrom: + path: /tmp/outputs/execution-id + - name: iteration-count + valueFrom: + default: "0" + path: /tmp/outputs/iteration-count + - name: condition + valueFrom: + default: "true" + path: /tmp/outputs/condition + - dag: + tasks: + - arguments: + parameters: + - name: component + value: '{{workflow.parameters.components-root}}' + - name: runtime-config + value: '{}' + - name: driver-type + value: ROOT_DAG + name: root-driver + template: system-dag-driver + - arguments: + parameters: + - name: parent-dag-id + value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: condition + value: "" + depends: root-driver.Succeeded + name: root + template: root + inputs: {} + metadata: {} + name: entrypoint + outputs: {} +status: + finishedAt: null + startedAt: null diff --git a/test_data/kubernetes/seaweedfs/allow-user-namespace-access.yaml b/test_data/kubernetes/seaweedfs/allow-user-namespace-access.yaml new file mode 100644 index 00000000000..3b99091c81d --- /dev/null +++ b/test_data/kubernetes/seaweedfs/allow-user-namespace-access.yaml @@ -0,0 +1,18 @@ +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: allow-user-namespace-access + namespace: kubeflow +spec: + podSelector: {} + policyTypes: + - Ingress + ingress: + - from: + - namespaceSelector: + matchExpressions: + - key: kubernetes.io/metadata.name + operator: In + values: ["kubeflow-user-example-com"] + - from: + - podSelector: {} diff --git a/test_data/kubernetes/seaweedfs/kubeflow-edit-clusterrole.yaml b/test_data/kubernetes/seaweedfs/kubeflow-edit-clusterrole.yaml new file mode 100644 index 00000000000..bdf84eb8391 --- /dev/null +++ b/test_data/kubernetes/seaweedfs/kubeflow-edit-clusterrole.yaml @@ -0,0 +1,46 @@ +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: kubeflow-edit +aggregationRule: + clusterRoleSelectors: + - matchLabels: + rbac.authorization.kubeflow.org/aggregate-to-kubeflow-edit: "true" +rules: [] +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: kubeflow-edit-basic-permissions + labels: + rbac.authorization.kubeflow.org/aggregate-to-kubeflow-edit: "true" +rules: +- apiGroups: + - "" + resources: + - configmaps + - pods + - pods/log + - secrets + - events + - persistentvolumeclaims + - services + verbs: + - get + - list + - create + - update + - patch + - delete +- apiGroups: + - "apps" + resources: + - deployments + - replicasets + verbs: + - get + - list + - create + - update + - patch + - delete diff --git a/test_data/kubernetes/seaweedfs/test-profiles.yaml b/test_data/kubernetes/seaweedfs/test-profiles.yaml new file mode 100644 index 00000000000..2408e02e0a2 --- /dev/null +++ b/test_data/kubernetes/seaweedfs/test-profiles.yaml @@ -0,0 +1,8 @@ +apiVersion: kubeflow.org/v1beta1 +kind: Profile +metadata: + name: kubeflow-user-example-com +spec: + owner: + kind: User + name: user@example.com diff --git a/samples/v2/pre-requisites/test-secrets.yaml b/test_data/kubernetes/secrets/test-secrets.yaml similarity index 100% rename from samples/v2/pre-requisites/test-secrets.yaml rename to test_data/kubernetes/secrets/test-secrets.yaml diff --git a/test_data/sdk_compiled_pipelines/__init__.py b/test_data/sdk_compiled_pipelines/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test_data/sdk_compiled_pipelines/invalid/empty_file.yaml b/test_data/sdk_compiled_pipelines/invalid/empty_file.yaml new file mode 100644 index 00000000000..4314f06ce4d --- /dev/null +++ b/test_data/sdk_compiled_pipelines/invalid/empty_file.yaml @@ -0,0 +1,2 @@ +# PIPELINE DEFINITION +# Name: echo diff --git a/test_data/sdk_compiled_pipelines/invalid/empty_zip.zip b/test_data/sdk_compiled_pipelines/invalid/empty_zip.zip new file mode 100644 index 00000000000..d9fcd7153e4 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/invalid/empty_zip.zip @@ -0,0 +1,2 @@ +# PIPELINE DEFINITION +# Name: echo \ No newline at end of file diff --git a/test_data/sdk_compiled_pipelines/invalid/invalid-zip.zip b/test_data/sdk_compiled_pipelines/invalid/invalid-zip.zip new file mode 100644 index 00000000000..1e4515fb0fa Binary files /dev/null and b/test_data/sdk_compiled_pipelines/invalid/invalid-zip.zip differ diff --git a/test_data/sdk_compiled_pipelines/invalid/invalid_yaml.yaml b/test_data/sdk_compiled_pipelines/invalid/invalid_yaml.yaml new file mode 100644 index 00000000000..f8f6096efd1 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/invalid/invalid_yaml.yaml @@ -0,0 +1,28 @@ +# PIPELINE DEFINITION +# Name: echo +components_1: + comp-echo: + executorLabel: exec-echo +deploymentSpec: + executors: + exec-echo: + container: + args: + - hello world + command: + - echo + image: public.ecr.aws/docker/library/python:3.12 +pipelineInfo: + name: echo +root: + dag: + tasks: + echo: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo + taskInfo: + name: echo +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/invalid/no_name.yaml b/test_data/sdk_compiled_pipelines/invalid/no_name.yaml new file mode 100644 index 00000000000..9754513cb4b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/invalid/no_name.yaml @@ -0,0 +1,28 @@ +# PIPELINE DEFINITION +# Name: echo +components: + comp-echo: + executorLabel: exec-echo +deploymentSpec: + executors: + exec-echo: + container: + args: + - hello world + command: + - echo + image: public.ecr.aws/docker/library/python:3.12 +pipelineInfo: + name: +root: + dag: + tasks: + echo: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo + taskInfo: + name: echo +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/invalid/wrong_format.png b/test_data/sdk_compiled_pipelines/invalid/wrong_format.png new file mode 100644 index 00000000000..d9fcd7153e4 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/invalid/wrong_format.png @@ -0,0 +1,2 @@ +# PIPELINE DEFINITION +# Name: echo \ No newline at end of file diff --git a/test_data/sdk_compiled_pipelines/valid/__init__.py b/test_data/sdk_compiled_pipelines/valid/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test_data/sdk_compiled_pipelines/valid/arguments-parameters.yaml b/test_data/sdk_compiled_pipelines/valid/arguments-parameters.yaml new file mode 100644 index 00000000000..8e99b9e155e --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/arguments-parameters.yaml @@ -0,0 +1,50 @@ +# PIPELINE DEFINITION +# Name: echo +# Inputs: +# param1: str [Default: 'hello'] +# param2: str +components: + comp-echo: + executorLabel: exec-echo + inputDefinitions: + parameters: + param1: + parameterType: STRING + param2: + parameterType: STRING +deploymentSpec: + executors: + exec-echo: + container: + args: + - '{{$.inputs.parameters[''param1'']}}-{{$.inputs.parameters[''param2'']}}' + command: + - echo + image: public.ecr.aws/docker/library/python:3.12 +pipelineInfo: + name: echo +root: + dag: + tasks: + echo: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo + inputs: + parameters: + param1: + componentInputParameter: param1 + param2: + componentInputParameter: param2 + taskInfo: + name: echo + inputDefinitions: + parameters: + param1: + defaultValue: hello + parameterType: STRING + param2: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/arguments.pipeline.zip b/test_data/sdk_compiled_pipelines/valid/arguments.pipeline.zip new file mode 100644 index 00000000000..593e9cc54df Binary files /dev/null and b/test_data/sdk_compiled_pipelines/valid/arguments.pipeline.zip differ diff --git a/backend/test/v2/resources/arguments-parameters.py b/test_data/sdk_compiled_pipelines/valid/arguments_parameters.py similarity index 100% rename from backend/test/v2/resources/arguments-parameters.py rename to test_data/sdk_compiled_pipelines/valid/arguments_parameters.py diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_complex.py b/test_data/sdk_compiled_pipelines/valid/artifacts_complex.py similarity index 100% rename from sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_complex.py rename to test_data/sdk_compiled_pipelines/valid/artifacts_complex.py diff --git a/test_data/sdk_compiled_pipelines/valid/artifacts_complex.yaml b/test_data/sdk_compiled_pipelines/valid/artifacts_complex.yaml new file mode 100644 index 00000000000..cd17aefa59c --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/artifacts_complex.yaml @@ -0,0 +1,497 @@ +# PIPELINE DEFINITION +# Name: math-pipeline +# Inputs: +# threshold: int [Default: 2.0] +# Outputs: +# datasets: system.Dataset +# sum: system.Dataset +components: + comp-add: + executorLabel: exec-add + inputDefinitions: + artifacts: + in_datasets: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-add-2: + executorLabel: exec-add-2 + inputDefinitions: + artifacts: + in_datasets: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-add-two-ints: + executorLabel: exec-add-two-ints + inputDefinitions: + artifacts: + in_dataset1: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + in_dataset2: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-add-two-lists-of-datasets: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: out_dataset + producerSubtask: add-two-ints + tasks: + add: + cachingOptions: + enableCache: true + componentRef: + name: comp-add + inputs: + artifacts: + in_datasets: + componentInputArtifact: in_datasets1 + taskInfo: + name: add + add-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-2 + inputs: + artifacts: + in_datasets: + componentInputArtifact: in_datasets2 + taskInfo: + name: add-2 + add-two-ints: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-two-ints + dependentTasks: + - add + - add-2 + inputs: + artifacts: + in_dataset1: + taskOutputArtifact: + outputArtifactKey: out_dataset + producerTask: add + in_dataset2: + taskOutputArtifact: + outputArtifactKey: out_dataset + producerTask: add-2 + taskInfo: + name: add-two-ints + inputDefinitions: + artifacts: + in_datasets1: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + in_datasets2: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-condition-5: + dag: + outputs: + artifacts: + pipelinechannel--double-2-out_dataset: + artifactSelectors: + - outputArtifactKey: out_dataset + producerSubtask: double-2 + tasks: + double-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-double-2 + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--loop-item-param-3 + taskInfo: + name: double-2 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-3: + parameterType: NUMBER_INTEGER + pipelinechannel--threshold: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + pipelinechannel--double-2-out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + comp-double: + executorLabel: exec-double + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-double-2: + executorLabel: exec-double-2 + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-for-loop-2: + dag: + outputs: + artifacts: + pipelinechannel--double-2-out_dataset: + artifactSelectors: + - outputArtifactKey: pipelinechannel--double-2-out_dataset + producerSubtask: for-loop-4 + pipelinechannel--double-out_dataset: + artifactSelectors: + - outputArtifactKey: out_dataset + producerSubtask: double + tasks: + double: + cachingOptions: + enableCache: true + componentRef: + name: comp-double + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--loop-item-param-1 + taskInfo: + name: double + for-loop-4: + componentRef: + name: comp-for-loop-4 + inputs: + parameters: + pipelinechannel--threshold: + componentInputParameter: pipelinechannel--threshold + parameterIterator: + itemInput: pipelinechannel--loop-item-param-3 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-4 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + pipelinechannel--threshold: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + pipelinechannel--double-2-out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + pipelinechannel--double-out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + comp-for-loop-4: + dag: + outputs: + artifacts: + pipelinechannel--double-2-out_dataset: + artifactSelectors: + - outputArtifactKey: pipelinechannel--double-2-out_dataset + producerSubtask: condition-5 + tasks: + condition-5: + componentRef: + name: comp-condition-5 + inputs: + parameters: + pipelinechannel--loop-item-param-3: + componentInputParameter: pipelinechannel--loop-item-param-3 + pipelinechannel--threshold: + componentInputParameter: pipelinechannel--threshold + taskInfo: + name: condition-5 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--loop-item-param-3']) + >= int(inputs.parameter_values['pipelinechannel--threshold']) + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-3: + parameterType: NUMBER_INTEGER + pipelinechannel--threshold: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + pipelinechannel--double-2-out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true +deploymentSpec: + executors: + exec-add: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add(\n in_datasets: Input[List[Dataset]],\n out_dataset:\ + \ Output[Dataset],\n):\n nums = []\n for dataset in in_datasets:\n\ + \ with open(dataset.path) as f:\n nums.append(int(f.read()))\n\ + \ with open(out_dataset.path, 'w') as f:\n f.write(str(sum(nums)))\n\ + \n" + image: python:3.9 + exec-add-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add(\n in_datasets: Input[List[Dataset]],\n out_dataset:\ + \ Output[Dataset],\n):\n nums = []\n for dataset in in_datasets:\n\ + \ with open(dataset.path) as f:\n nums.append(int(f.read()))\n\ + \ with open(out_dataset.path, 'w') as f:\n f.write(str(sum(nums)))\n\ + \n" + image: python:3.9 + exec-add-two-ints: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add_two_ints + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add_two_ints(\n in_dataset1: Input[Dataset],\n in_dataset2:\ + \ Input[Dataset],\n out_dataset: Output[Dataset],\n):\n with open(in_dataset1.path)\ + \ as f:\n in_dataset1 = int(f.read())\n\n with open(in_dataset2.path)\ + \ as f:\n in_dataset2 = int(f.read())\n\n with open(out_dataset.path,\ + \ 'w') as f:\n f.write(str(in_dataset1 + in_dataset2))\n\n" + image: python:3.9 + exec-double: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - double + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef double(\n num: int,\n out_dataset: Output[Dataset],\n):\n\ + \ with open(out_dataset.path, 'w') as f:\n f.write(str(2 * num))\n\ + \n" + image: python:3.9 + exec-double-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - double + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef double(\n num: int,\n out_dataset: Output[Dataset],\n):\n\ + \ with open(out_dataset.path, 'w') as f:\n f.write(str(2 * num))\n\ + \n" + image: python:3.9 +pipelineInfo: + name: math-pipeline +root: + dag: + outputs: + artifacts: + datasets: + artifactSelectors: + - outputArtifactKey: pipelinechannel--double-out_dataset + producerSubtask: for-loop-2 + sum: + artifactSelectors: + - outputArtifactKey: Output + producerSubtask: add-two-lists-of-datasets + tasks: + add-two-lists-of-datasets: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-two-lists-of-datasets + dependentTasks: + - for-loop-2 + inputs: + artifacts: + in_datasets1: + taskOutputArtifact: + outputArtifactKey: pipelinechannel--double-out_dataset + producerTask: for-loop-2 + in_datasets2: + taskOutputArtifact: + outputArtifactKey: pipelinechannel--double-2-out_dataset + producerTask: for-loop-2 + taskInfo: + name: add-two-lists-of-datasets + for-loop-2: + componentRef: + name: comp-for-loop-2 + inputs: + parameters: + pipelinechannel--threshold: + componentInputParameter: threshold + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-2 + inputDefinitions: + parameters: + threshold: + defaultValue: 2.0 + isOptional: true + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + datasets: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + sum: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.py b/test_data/sdk_compiled_pipelines/valid/artifacts_simple.py similarity index 100% rename from sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.py rename to test_data/sdk_compiled_pipelines/valid/artifacts_simple.py diff --git a/test_data/sdk_compiled_pipelines/valid/artifacts_simple.yaml b/test_data/sdk_compiled_pipelines/valid/artifacts_simple.yaml new file mode 100644 index 00000000000..b4ac1e08207 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/artifacts_simple.yaml @@ -0,0 +1,216 @@ +# PIPELINE DEFINITION +# Name: math-pipeline +# Outputs: +# Output: system.Dataset +components: + comp-add: + executorLabel: exec-add + inputDefinitions: + artifacts: + in_datasets: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-add-container: + executorLabel: exec-add-container + inputDefinitions: + artifacts: + in_datasets: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-double: + executorLabel: exec-double + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-for-loop-2: + dag: + outputs: + artifacts: + pipelinechannel--double-out_dataset: + artifactSelectors: + - outputArtifactKey: out_dataset + producerSubtask: double + tasks: + double: + cachingOptions: + enableCache: true + componentRef: + name: comp-double + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--loop-item-param-1 + taskInfo: + name: double + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + pipelinechannel--double-out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true +deploymentSpec: + executors: + exec-add: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add(in_datasets: Input[List[Dataset]], out_dataset: Output[Dataset]):\n\ + \ nums = []\n for dataset in in_datasets:\n with open(dataset.path)\ + \ as f:\n nums.append(int(f.read()))\n with open(out_dataset.path,\ + \ 'w') as f:\n f.write(str(sum(nums)))\n\n" + image: python:3.9 + exec-add-container: + container: + args: + - "\nimport argparse\nimport json\nimport os\n\ndef main(in_datasets, out_dataset_uri):\n\ + \ in_dicts = json.loads(in_datasets)\n uris = [d['uri'] for d in in_dicts]\n\ + \ total = 0\n for uri in uris:\n with open(uri.replace('gs://',\ + \ '/gcs/')) as f:\n total += int(f.read())\n\n outpath = out_dataset_uri.replace('gs://',\ + \ '/gcs/')\n os.makedirs(os.path.dirname(outpath), exist_ok=True)\n \ + \ with open(outpath, 'w') as f:\n f.write(str(total))\n\nparser\ + \ = argparse.ArgumentParser()\nparser.add_argument('in_datasets')\nparser.add_argument('out_dataset_uri')\n\ + args = parser.parse_args()\n\nmain(args.in_datasets, args.out_dataset_uri)\n" + - '{{$.inputs.artifacts[''in_datasets'']}}' + - '{{$.outputs.artifacts[''out_dataset''].uri}}' + command: + - python + - -c + image: python:3.9 + exec-double: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - double + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef double(num: int, out_dataset: Output[Dataset]):\n with open(out_dataset.path,\ + \ 'w') as f:\n f.write(str(2 * num))\n\n" + image: python:3.9 +pipelineInfo: + name: math-pipeline +root: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: pipelinechannel--double-out_dataset + producerSubtask: for-loop-2 + tasks: + add: + cachingOptions: + enableCache: true + componentRef: + name: comp-add + dependentTasks: + - for-loop-2 + inputs: + artifacts: + in_datasets: + taskOutputArtifact: + outputArtifactKey: pipelinechannel--double-out_dataset + producerTask: for-loop-2 + taskInfo: + name: add + add-container: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-container + dependentTasks: + - for-loop-2 + inputs: + artifacts: + in_datasets: + taskOutputArtifact: + outputArtifactKey: pipelinechannel--double-out_dataset + producerTask: for-loop-2 + taskInfo: + name: add-container + for-loop-2: + componentRef: + name: comp-for-loop-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-2 + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/samples/v2/collected_artifacts.py b/test_data/sdk_compiled_pipelines/valid/collected_artifacts.py similarity index 100% rename from samples/v2/collected_artifacts.py rename to test_data/sdk_compiled_pipelines/valid/collected_artifacts.py diff --git a/test_data/sdk_compiled_pipelines/valid/collected_artifacts.yaml b/test_data/sdk_compiled_pipelines/valid/collected_artifacts.yaml new file mode 100644 index 00000000000..c8293e73bc0 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/collected_artifacts.yaml @@ -0,0 +1,676 @@ +# PIPELINE DEFINITION +# Name: collected-artifact-pipeline +components: + comp-collecting-artifacts: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: pipelinechannel--read-single-dataset-generate-model-results + producerSubtask: for-loop-1 + tasks: + for-loop-1: + componentRef: + name: comp-for-loop-1 + dependentTasks: + - split-chars + - split-ids + inputs: + parameters: + pipelinechannel--split-chars-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: split-chars + pipelinechannel--split-ids-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: split-ids + parameterIterator: + itemInput: pipelinechannel--split-ids-Output-loop-item + items: + inputParameter: pipelinechannel--split-ids-Output + taskInfo: + name: for-loop-1 + split-chars: + cachingOptions: {} + componentRef: + name: comp-split-chars + inputs: + parameters: + model_ids: + componentInputParameter: model_chars + taskInfo: + name: split-chars + split-ids: + cachingOptions: {} + componentRef: + name: comp-split-ids + inputs: + parameters: + model_ids: + componentInputParameter: model_ids + taskInfo: + name: split-ids + inputDefinitions: + parameters: + model_chars: + defaultValue: '' + isOptional: true + parameterType: STRING + model_ids: + defaultValue: '' + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + isArtifactList: true + comp-create-dataset: + executorLabel: exec-create-dataset + inputDefinitions: + parameters: + content: + parameterType: STRING + outputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-create-file: + executorLabel: exec-create-file + inputDefinitions: + parameters: + content: + parameterType: STRING + outputDefinitions: + artifacts: + file: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-for-loop-1: + dag: + outputs: + artifacts: + pipelinechannel--read-single-dataset-generate-model-results: + artifactSelectors: + - outputArtifactKey: pipelinechannel--read-single-dataset-generate-model-results + producerSubtask: for-loop-2 + tasks: + create-file: + cachingOptions: {} + componentRef: + name: comp-create-file + inputs: + parameters: + content: + componentInputParameter: pipelinechannel--split-ids-Output-loop-item + taskInfo: + name: create-file + for-loop-2: + componentRef: + name: comp-for-loop-2 + inputs: + parameters: + pipelinechannel--split-chars-Output: + componentInputParameter: pipelinechannel--split-chars-Output + pipelinechannel--split-ids-Output-loop-item: + componentInputParameter: pipelinechannel--split-ids-Output-loop-item + parameterIterator: + itemInput: pipelinechannel--split-chars-Output-loop-item + items: + inputParameter: pipelinechannel--split-chars-Output + taskInfo: + name: for-loop-2 + read-datasets: + cachingOptions: {} + componentRef: + name: comp-read-datasets + dependentTasks: + - for-loop-2 + inputs: + artifacts: + data: + taskOutputArtifact: + outputArtifactKey: pipelinechannel--single-node-dag-Output + producerTask: for-loop-2 + taskInfo: + name: read-datasets + read-models: + cachingOptions: {} + componentRef: + name: comp-read-models + dependentTasks: + - for-loop-2 + inputs: + artifacts: + models: + taskOutputArtifact: + outputArtifactKey: pipelinechannel--read-single-dataset-generate-model-results + producerTask: for-loop-2 + taskInfo: + name: read-models + read-single-file: + cachingOptions: {} + componentRef: + name: comp-read-single-file + dependentTasks: + - create-file + inputs: + artifacts: + file: + taskOutputArtifact: + outputArtifactKey: file + producerTask: create-file + taskInfo: + name: read-single-file + inputDefinitions: + parameters: + pipelinechannel--split-chars-Output: + parameterType: LIST + pipelinechannel--split-ids-Output: + parameterType: LIST + pipelinechannel--split-ids-Output-loop-item: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--read-single-dataset-generate-model-results: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + isArtifactList: true + comp-for-loop-2: + dag: + outputs: + artifacts: + pipelinechannel--read-single-dataset-generate-model-results: + artifactSelectors: + - outputArtifactKey: results + producerSubtask: read-single-dataset-generate-model + pipelinechannel--single-node-dag-Output: + artifactSelectors: + - outputArtifactKey: Output + producerSubtask: single-node-dag + tasks: + read-single-dataset-generate-model: + cachingOptions: {} + componentRef: + name: comp-read-single-dataset-generate-model + dependentTasks: + - single-node-dag + inputs: + artifacts: + data: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: single-node-dag + parameters: + id: + componentInputParameter: pipelinechannel--split-ids-Output-loop-item + taskInfo: + name: read-single-dataset-generate-model + single-node-dag: + cachingOptions: {} + componentRef: + name: comp-single-node-dag + inputs: + parameters: + char: + componentInputParameter: pipelinechannel--split-chars-Output-loop-item + taskInfo: + name: single-node-dag + inputDefinitions: + parameters: + pipelinechannel--split-chars-Output: + parameterType: LIST + pipelinechannel--split-chars-Output-loop-item: + parameterType: STRING + pipelinechannel--split-ids-Output-loop-item: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--read-single-dataset-generate-model-results: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + isArtifactList: true + pipelinechannel--single-node-dag-Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + comp-read-datasets: + executorLabel: exec-read-datasets + inputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-read-models: + executorLabel: exec-read-models + inputDefinitions: + artifacts: + models: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + isArtifactList: true + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-read-models-2: + executorLabel: exec-read-models-2 + inputDefinitions: + artifacts: + models: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + isArtifactList: true + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-read-single-dataset-generate-model: + executorLabel: exec-read-single-dataset-generate-model + inputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + id: + parameterType: STRING + outputDefinitions: + artifacts: + results: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + comp-read-single-file: + executorLabel: exec-read-single-file + inputDefinitions: + artifacts: + file: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-single-node-dag: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: data + producerSubtask: create-dataset + tasks: + create-dataset: + cachingOptions: {} + componentRef: + name: comp-create-dataset + inputs: + parameters: + content: + componentInputParameter: char + taskInfo: + name: create-dataset + inputDefinitions: + parameters: + char: + parameterType: STRING + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-split-chars: + executorLabel: exec-split-chars + inputDefinitions: + parameters: + model_ids: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-split-ids: + executorLabel: exec-split-ids + inputDefinitions: + parameters: + model_ids: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: LIST +deploymentSpec: + executors: + exec-create-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - create_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef create_dataset(data: Output[Dataset], content: str):\n print(f'Creating\ + \ file with content: {content}')\n with open(data.path, 'w') as f:\n\ + \ f.write(content)\n\n" + image: python:3.9 + exec-create-file: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - create_file + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef create_file(file: Output[Artifact], content: str):\n print(f'Creating\ + \ file with content: {content}')\n with open(file.path, 'w') as f:\n\ + \ f.write(content)\n\n" + image: python:3.9 + exec-read-datasets: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_datasets + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef read_datasets(data: List[Dataset]) -> str:\n for d in data:\n\ + \ print(f'Reading dataset {d.name} file: {d.path}')\n with\ + \ open(d.path, 'r') as f:\n print(f.read())\n\n return 'files\ + \ read'\n\n" + image: python:3.9 + exec-read-models: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_models + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef read_models(models: List[Model],) -> str:\n for m in models:\n\ + \ print(f'Reading model {m.name} file: {m.path}')\n with open(m.path,\ + \ 'r') as f:\n info = f.read()\n print(f\"Model raw\ + \ data: {info}\")\n print(f\"Model metadata: {m.metadata}\")\n\ + \ return 'models read'\n\n" + image: python:3.9 + exec-read-models-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_models + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef read_models(models: List[Model],) -> str:\n for m in models:\n\ + \ print(f'Reading model {m.name} file: {m.path}')\n with open(m.path,\ + \ 'r') as f:\n info = f.read()\n print(f\"Model raw\ + \ data: {info}\")\n print(f\"Model metadata: {m.metadata}\")\n\ + \ return 'models read'\n\n" + image: python:3.9 + exec-read-single-dataset-generate-model: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_single_dataset_generate_model + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef read_single_dataset_generate_model(data: Dataset, id: str, results:Output[Model]):\n\ + \ print(f'Reading file: {data.path}')\n with open(data.path, 'r')\ + \ as f:\n info = f.read()\n with open(results.path, 'w') as\ + \ f2:\n f2.write(f\"{info}-{id}\")\n results.metadata['model']\ + \ = info\n results.metadata['model_name'] = f\"model-artifact-inner-iteration-{info}-{id}\"\ + \n\n" + image: python:3.9 + exec-read-single-file: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_single_file + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef read_single_file(file: Artifact) -> str:\n print(f'Reading\ + \ file: {file.path}')\n with open(file.path, 'r') as f:\n print(f.read())\n\ + \n return file.uri\n\n" + image: python:3.9 + exec-split-chars: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - split_chars + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef split_chars(model_ids: str) -> list:\n return model_ids.split(',')\n\ + \n" + image: python:3.9 + exec-split-ids: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - split_ids + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef split_ids(model_ids: str) -> list:\n return model_ids.split(',')\n\ + \n" + image: python:3.9 +pipelineInfo: + name: collected-artifact-pipeline +root: + dag: + tasks: + collecting-artifacts: + cachingOptions: {} + componentRef: + name: comp-collecting-artifacts + inputs: + parameters: + model_chars: + runtimeValue: + constant: x,y,z + model_ids: + runtimeValue: + constant: s1,s2,s3 + taskInfo: + name: collecting-artifacts + read-models: + cachingOptions: {} + componentRef: + name: comp-read-models-2 + dependentTasks: + - collecting-artifacts + inputs: + artifacts: + models: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: collecting-artifacts + taskInfo: + name: read-models +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/components/component_with_metadata_fields.py b/test_data/sdk_compiled_pipelines/valid/component_with_metadata_fields.py similarity index 100% rename from sdk/python/test_data/components/component_with_metadata_fields.py rename to test_data/sdk_compiled_pipelines/valid/component_with_metadata_fields.py diff --git a/test_data/sdk_compiled_pipelines/valid/component_with_metadata_fields.yaml b/test_data/sdk_compiled_pipelines/valid/component_with_metadata_fields.yaml new file mode 100644 index 00000000000..6fc7319f7bc --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/component_with_metadata_fields.yaml @@ -0,0 +1,129 @@ +# PIPELINE DEFINITION +# Name: dataset-joiner +# Description: Concatenate dataset_a and dataset_b. +# Also returns the concatenated string. +# Inputs: +# dataset_a: system.Dataset +# dataset_b: system.Dataset +# Outputs: +# Output: str +# out_dataset: system.Dataset +components: + comp-dataset-joiner: + executorLabel: exec-dataset-joiner + inputDefinitions: + artifacts: + dataset_a: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: First dataset. + dataset_b: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: Second dataset. + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: The concatenated dataset. + parameters: + Output: + description: The concatenated string. + parameterType: STRING +deploymentSpec: + executors: + exec-dataset-joiner: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - dataset_joiner + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef dataset_joiner(\n dataset_a: Input[Dataset],\n dataset_b:\ + \ Input[Dataset],\n out_dataset: Output[Dataset],\n) -> str:\n \"\"\ + \"Concatenate dataset_a and dataset_b.\n\n Also returns the concatenated\ + \ string.\n\n Args:\n dataset_a: First dataset.\n dataset_b:\ + \ Second dataset.\n\n Returns:\n out_dataset: The concatenated\ + \ dataset.\n Output: The concatenated string.\n \"\"\"\n with\ + \ open(dataset_a.path) as f:\n content_a = f.read()\n\n with open(dataset_b.path)\ + \ as f:\n content_b = f.read()\n\n concatenated_string = content_a\ + \ + content_b\n with open(out_dataset.path, 'w') as f:\n f.write(concatenated_string)\n\ + \n return concatenated_string\n\n" + image: python:3.9 +pipelineInfo: + name: dataset-joiner +root: + dag: + outputs: + artifacts: + out_dataset: + artifactSelectors: + - outputArtifactKey: out_dataset + producerSubtask: dataset-joiner + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: dataset-joiner + tasks: + dataset-joiner: + cachingOptions: + enableCache: true + componentRef: + name: comp-dataset-joiner + inputs: + artifacts: + dataset_a: + componentInputArtifact: dataset_a + dataset_b: + componentInputArtifact: dataset_b + taskInfo: + name: dataset-joiner + inputDefinitions: + artifacts: + dataset_a: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: First dataset. + dataset_b: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: Second dataset. + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: The concatenated dataset. + parameters: + Output: + description: The concatenated string. + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/components/component_with_task_final_status.py b/test_data/sdk_compiled_pipelines/valid/component_with_task_final_status.py similarity index 100% rename from sdk/python/test_data/components/component_with_task_final_status.py rename to test_data/sdk_compiled_pipelines/valid/component_with_task_final_status.py diff --git a/test_data/sdk_compiled_pipelines/valid/component_with_task_final_status_GH-12033.yaml b/test_data/sdk_compiled_pipelines/valid/component_with_task_final_status_GH-12033.yaml new file mode 100644 index 00000000000..b0e830bea66 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/component_with_task_final_status_GH-12033.yaml @@ -0,0 +1,66 @@ +# PIPELINE DEFINITION +# Name: exit-comp +# Inputs: +# status: PipelineTaskFinalStatus +components: + comp-exit-comp: + executorLabel: exec-exit-comp + inputDefinitions: + parameters: + status: + isOptional: true + parameterType: TASK_FINAL_STATUS +deploymentSpec: + executors: + exec-exit-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - exit_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef exit_comp(status: dsl.PipelineTaskFinalStatus):\n print(status)\n\ + \n" + image: python:3.9 +pipelineInfo: + name: exit-comp +root: + dag: + tasks: + exit-comp: + cachingOptions: + enableCache: true + componentRef: + name: comp-exit-comp + inputs: + parameters: + status: + componentInputParameter: status + taskInfo: + name: exit-comp + inputDefinitions: + parameters: + status: + isOptional: true + parameterType: TASK_FINAL_STATUS +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/components_with_optional_artifacts.py b/test_data/sdk_compiled_pipelines/valid/components_with_optional_artifacts.py similarity index 100% rename from sdk/python/test_data/pipelines/components_with_optional_artifacts.py rename to test_data/sdk_compiled_pipelines/valid/components_with_optional_artifacts.py diff --git a/test_data/sdk_compiled_pipelines/valid/components_with_optional_artifacts.yaml b/test_data/sdk_compiled_pipelines/valid/components_with_optional_artifacts.yaml new file mode 100644 index 00000000000..23213d69662 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/components_with_optional_artifacts.yaml @@ -0,0 +1,244 @@ +# PIPELINE DEFINITION +# Name: optional-artifact-pipeline +# Inputs: +# dataset1: system.Dataset +components: + comp-custom-artifact-printer: + executorLabel: exec-custom-artifact-printer + inputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isOptional: true + comp-custom-artifact-printer-2: + executorLabel: exec-custom-artifact-printer-2 + inputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isOptional: true + comp-importer: + executorLabel: exec-importer + inputDefinitions: + parameters: + uri: + parameterType: STRING + outputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-inner-pipeline: + dag: + tasks: + python-artifact-printer: + cachingOptions: + enableCache: true + componentRef: + name: comp-python-artifact-printer + inputs: + artifacts: + artifact: + componentInputArtifact: dataset + taskInfo: + name: python-artifact-printer + inputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isOptional: true + comp-inner-pipeline-2: + dag: + tasks: + python-artifact-printer: + cachingOptions: + enableCache: true + componentRef: + name: comp-python-artifact-printer-2 + inputs: + artifacts: + artifact: + componentInputArtifact: dataset + taskInfo: + name: python-artifact-printer + inputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isOptional: true + comp-python-artifact-printer: + executorLabel: exec-python-artifact-printer + inputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isOptional: true + comp-python-artifact-printer-2: + executorLabel: exec-python-artifact-printer-2 + inputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isOptional: true +deploymentSpec: + executors: + exec-custom-artifact-printer: + container: + command: + - '{"IfPresent": {"InputName": "artifact", "Then": ["echo", "{{$.inputs.artifacts[''artifact''].uri}}"], + "Else": ["echo", "No artifact provided!"]}}' + image: alpine + exec-custom-artifact-printer-2: + container: + command: + - '{"IfPresent": {"InputName": "artifact", "Then": ["echo", "{{$.inputs.artifacts[''artifact''].uri}}"], + "Else": ["echo", "No artifact provided!"]}}' + image: alpine + exec-importer: + importer: + artifactUri: + constant: gs://ml-pipeline-playground/shakespeare1.txt + typeSchema: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + exec-python-artifact-printer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - python_artifact_printer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef python_artifact_printer(artifact: Optional[Input[Artifact]] =\ + \ None):\n if artifact is not None:\n print(artifact.name)\n \ + \ print(artifact.uri)\n print(artifact.metadata)\n else:\n\ + \ print('No artifact provided!')\n\n" + image: python:3.9 + exec-python-artifact-printer-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - python_artifact_printer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef python_artifact_printer(artifact: Optional[Input[Artifact]] =\ + \ None):\n if artifact is not None:\n print(artifact.name)\n \ + \ print(artifact.uri)\n print(artifact.metadata)\n else:\n\ + \ print('No artifact provided!')\n\n" + image: python:3.9 +pipelineInfo: + name: optional-artifact-pipeline +root: + dag: + tasks: + custom-artifact-printer: + cachingOptions: + enableCache: true + componentRef: + name: comp-custom-artifact-printer + inputs: + artifacts: + artifact: + componentInputArtifact: dataset1 + taskInfo: + name: custom-artifact-printer + custom-artifact-printer-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-custom-artifact-printer-2 + taskInfo: + name: custom-artifact-printer-2 + importer: + cachingOptions: + enableCache: true + componentRef: + name: comp-importer + inputs: + parameters: + uri: + runtimeValue: + constant: gs://ml-pipeline-playground/shakespeare1.txt + taskInfo: + name: importer + inner-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-inner-pipeline + dependentTasks: + - importer + inputs: + artifacts: + dataset: + taskOutputArtifact: + outputArtifactKey: artifact + producerTask: importer + taskInfo: + name: inner-pipeline + inner-pipeline-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-inner-pipeline-2 + taskInfo: + name: inner-pipeline-2 + inputDefinitions: + artifacts: + dataset1: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isOptional: true +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/conditional_producer_and_consumers.py b/test_data/sdk_compiled_pipelines/valid/conditional_producer_and_consumers.py similarity index 100% rename from sdk/python/test_data/pipelines/parallelfor_fan_in/conditional_producer_and_consumers.py rename to test_data/sdk_compiled_pipelines/valid/conditional_producer_and_consumers.py diff --git a/test_data/sdk_compiled_pipelines/valid/conditional_producer_and_consumers.yaml b/test_data/sdk_compiled_pipelines/valid/conditional_producer_and_consumers.yaml new file mode 100644 index 00000000000..693f163dd5b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/conditional_producer_and_consumers.yaml @@ -0,0 +1,232 @@ +# PIPELINE DEFINITION +# Name: math-pipeline +# Inputs: +# threshold: int [Default: 2.0] +# Outputs: +# Output: list +components: + comp-add: + executorLabel: exec-add + inputDefinitions: + parameters: + nums: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-condition-3: + dag: + outputs: + parameters: + pipelinechannel--double-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: double + tasks: + double: + cachingOptions: + enableCache: true + componentRef: + name: comp-double + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--loop-item-param-1 + taskInfo: + name: double + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + pipelinechannel--threshold: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--double-Output: + parameterType: LIST + comp-condition-4: + dag: + outputs: + parameters: + pipelinechannel--add-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: add + tasks: + add: + cachingOptions: + enableCache: true + componentRef: + name: comp-add + inputs: + parameters: + nums: + componentInputParameter: pipelinechannel--for-loop-2-pipelinechannel--double-Output + taskInfo: + name: add + inputDefinitions: + parameters: + pipelinechannel--for-loop-2-pipelinechannel--double-Output: + parameterType: LIST + pipelinechannel--threshold: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--add-Output: + parameterType: LIST + comp-double: + executorLabel: exec-double + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-for-loop-2: + dag: + outputs: + parameters: + pipelinechannel--double-Output: + valueFromParameter: + outputParameterKey: pipelinechannel--double-Output + producerSubtask: condition-3 + tasks: + condition-3: + componentRef: + name: comp-condition-3 + inputs: + parameters: + pipelinechannel--loop-item-param-1: + componentInputParameter: pipelinechannel--loop-item-param-1 + pipelinechannel--threshold: + componentInputParameter: pipelinechannel--threshold + taskInfo: + name: condition-3 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--loop-item-param-1']) + >= int(inputs.parameter_values['pipelinechannel--threshold']) + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + pipelinechannel--threshold: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--double-Output: + parameterType: LIST +deploymentSpec: + executors: + exec-add: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add(nums: List[int]) -> int:\n return sum(nums)\n\n" + image: python:3.9 + exec-double: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - double + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" + image: python:3.9 +pipelineInfo: + name: math-pipeline +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: pipelinechannel--add-Output + producerSubtask: condition-4 + tasks: + condition-4: + componentRef: + name: comp-condition-4 + dependentTasks: + - for-loop-2 + inputs: + parameters: + pipelinechannel--for-loop-2-pipelinechannel--double-Output: + taskOutputParameter: + outputParameterKey: pipelinechannel--double-Output + producerTask: for-loop-2 + pipelinechannel--threshold: + componentInputParameter: threshold + taskInfo: + name: condition-4 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--threshold']) == + 2 + for-loop-2: + componentRef: + name: comp-for-loop-2 + inputs: + parameters: + pipelinechannel--threshold: + componentInputParameter: threshold + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-2 + inputDefinitions: + parameters: + threshold: + defaultValue: 2.0 + isOptional: true + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: LIST +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/components/container_io.py b/test_data/sdk_compiled_pipelines/valid/container_io.py similarity index 100% rename from sdk/python/test_data/components/container_io.py rename to test_data/sdk_compiled_pipelines/valid/container_io.py diff --git a/test_data/sdk_compiled_pipelines/valid/container_io.yaml b/test_data/sdk_compiled_pipelines/valid/container_io.yaml new file mode 100644 index 00000000000..2f0e6a2cd6c --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/container_io.yaml @@ -0,0 +1,60 @@ +# PIPELINE DEFINITION +# Name: container-io +# Inputs: +# text: str +# Outputs: +# output_path: str +components: + comp-container-io: + executorLabel: exec-container-io + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + output_path: + parameterType: STRING +deploymentSpec: + executors: + exec-container-io: + container: + args: + - --output_path + - '{{$.outputs.parameters[''output_path''].output_file}}' + command: + - my_program + - '{{$.inputs.parameters[''text'']}}' + image: python:3.9 +pipelineInfo: + name: container-io +root: + dag: + outputs: + parameters: + output_path: + valueFromParameter: + outputParameterKey: output_path + producerSubtask: container-io + tasks: + container-io: + cachingOptions: + enableCache: true + componentRef: + name: comp-container-io + inputs: + parameters: + text: + componentInputParameter: text + taskInfo: + name: container-io + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + output_path: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/components/container_with_artifact_output.py b/test_data/sdk_compiled_pipelines/valid/container_with_artifact_output.py similarity index 100% rename from sdk/python/test_data/components/container_with_artifact_output.py rename to test_data/sdk_compiled_pipelines/valid/container_with_artifact_output.py diff --git a/test_data/sdk_compiled_pipelines/valid/container_with_artifact_output.yaml b/test_data/sdk_compiled_pipelines/valid/container_with_artifact_output.yaml new file mode 100644 index 00000000000..3b34674a878 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/container_with_artifact_output.yaml @@ -0,0 +1,82 @@ +# PIPELINE DEFINITION +# Name: container-with-artifact-output +# Inputs: +# num_epochs: int +# Outputs: +# model: system.Model +# model_config_path: str +components: + comp-container-with-artifact-output: + executorLabel: exec-container-with-artifact-output + inputDefinitions: + parameters: + num_epochs: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + model: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + parameters: + model_config_path: + parameterType: STRING +deploymentSpec: + executors: + exec-container-with-artifact-output: + container: + args: + - --epochs + - '{{$.inputs.parameters[''num_epochs'']}}' + - --model_path + - '{{$.outputs.artifacts[''model''].uri}}' + - --model_metadata + - '{{$.outputs.artifacts[''model''].metadata}}' + - --model_config_path + - '{{$.outputs.parameters[''model_config_path''].output_file}}' + command: + - sh + - run.sh + image: gcr.io/my-image +pipelineInfo: + name: container-with-artifact-output +root: + dag: + outputs: + artifacts: + model: + artifactSelectors: + - outputArtifactKey: model + producerSubtask: container-with-artifact-output + parameters: + model_config_path: + valueFromParameter: + outputParameterKey: model_config_path + producerSubtask: container-with-artifact-output + tasks: + container-with-artifact-output: + cachingOptions: + enableCache: true + componentRef: + name: comp-container-with-artifact-output + inputs: + parameters: + num_epochs: + componentInputParameter: num_epochs + taskInfo: + name: container-with-artifact-output + inputDefinitions: + parameters: + num_epochs: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + model: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + parameters: + model_config_path: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/components/container_with_concat_placeholder.py b/test_data/sdk_compiled_pipelines/valid/container_with_concat_placeholder.py similarity index 100% rename from sdk/python/test_data/components/container_with_concat_placeholder.py rename to test_data/sdk_compiled_pipelines/valid/container_with_concat_placeholder.py diff --git a/test_data/sdk_compiled_pipelines/valid/container_with_concat_placeholder.yaml b/test_data/sdk_compiled_pipelines/valid/container_with_concat_placeholder.yaml new file mode 100644 index 00000000000..3e3258479de --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/container_with_concat_placeholder.yaml @@ -0,0 +1,76 @@ +# PIPELINE DEFINITION +# Name: container-with-concat-placeholder +# Inputs: +# text1: str +# Outputs: +# output_path: str +# text2: system.Dataset +components: + comp-container-with-concat-placeholder: + executorLabel: exec-container-with-concat-placeholder + inputDefinitions: + parameters: + text1: + parameterType: STRING + outputDefinitions: + artifacts: + text2: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + output_path: + parameterType: STRING +deploymentSpec: + executors: + exec-container-with-concat-placeholder: + container: + args: + - --output_path + - '{{$.outputs.parameters[''output_path''].output_file}}' + command: + - my_program + - '{"Concat": ["prefix-", "{{$.inputs.parameters[''text1'']}}", "{{$.outputs.artifacts[''text2''].uri}}"]}' + image: python:3.9 +pipelineInfo: + name: container-with-concat-placeholder +root: + dag: + outputs: + artifacts: + text2: + artifactSelectors: + - outputArtifactKey: text2 + producerSubtask: container-with-concat-placeholder + parameters: + output_path: + valueFromParameter: + outputParameterKey: output_path + producerSubtask: container-with-concat-placeholder + tasks: + container-with-concat-placeholder: + cachingOptions: + enableCache: true + componentRef: + name: comp-container-with-concat-placeholder + inputs: + parameters: + text1: + componentInputParameter: text1 + taskInfo: + name: container-with-concat-placeholder + inputDefinitions: + parameters: + text1: + parameterType: STRING + outputDefinitions: + artifacts: + text2: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + output_path: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/components/container_with_if_placeholder.py b/test_data/sdk_compiled_pipelines/valid/container_with_if_placeholder.py similarity index 100% rename from sdk/python/test_data/components/container_with_if_placeholder.py rename to test_data/sdk_compiled_pipelines/valid/container_with_if_placeholder.py diff --git a/test_data/sdk_compiled_pipelines/valid/container_with_if_placeholder.yaml b/test_data/sdk_compiled_pipelines/valid/container_with_if_placeholder.yaml new file mode 100644 index 00000000000..5523d0ff473 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/container_with_if_placeholder.yaml @@ -0,0 +1,84 @@ +# PIPELINE DEFINITION +# Name: container-with-if-placeholder +# Inputs: +# optional_input: str [Default: 'default'] +# Outputs: +# dataset: system.Dataset +# output_path: str +components: + comp-container-with-if-placeholder: + executorLabel: exec-container-with-if-placeholder + inputDefinitions: + parameters: + optional_input: + defaultValue: default + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + output_path: + parameterType: STRING +deploymentSpec: + executors: + exec-container-with-if-placeholder: + container: + args: + - --output_path + - '{{$.outputs.parameters[''output_path''].output_file}}' + command: + - my_program + - '{"IfPresent": {"InputName": "optional_input", "Then": ["{{$.inputs.parameters[''optional_input'']}}"], + "Else": ["bye"]}}' + - --dataset + - '{"IfPresent": {"InputName": "optional_input", "Then": ["{{$.outputs.artifacts[''dataset''].uri}}"], + "Else": ["bye"]}}' + image: python:3.9 +pipelineInfo: + name: container-with-if-placeholder +root: + dag: + outputs: + artifacts: + dataset: + artifactSelectors: + - outputArtifactKey: dataset + producerSubtask: container-with-if-placeholder + parameters: + output_path: + valueFromParameter: + outputParameterKey: output_path + producerSubtask: container-with-if-placeholder + tasks: + container-with-if-placeholder: + cachingOptions: + enableCache: true + componentRef: + name: comp-container-with-if-placeholder + inputs: + parameters: + optional_input: + componentInputParameter: optional_input + taskInfo: + name: container-with-if-placeholder + inputDefinitions: + parameters: + optional_input: + defaultValue: default + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + output_path: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/components/container_with_placeholder_in_fstring.py b/test_data/sdk_compiled_pipelines/valid/container_with_placeholder_in_fstring.py similarity index 100% rename from sdk/python/test_data/components/container_with_placeholder_in_fstring.py rename to test_data/sdk_compiled_pipelines/valid/container_with_placeholder_in_fstring.py diff --git a/test_data/sdk_compiled_pipelines/valid/container_with_placeholder_in_fstring.yaml b/test_data/sdk_compiled_pipelines/valid/container_with_placeholder_in_fstring.yaml new file mode 100644 index 00000000000..ac17854c778 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/container_with_placeholder_in_fstring.yaml @@ -0,0 +1,66 @@ +# PIPELINE DEFINITION +# Name: container-with-placeholder-in-fstring +# Inputs: +# text1: str [Default: 'text!'] +# Outputs: +# output_artifact: system.Artifact +components: + comp-container-with-placeholder-in-fstring: + executorLabel: exec-container-with-placeholder-in-fstring + inputDefinitions: + parameters: + text1: + defaultValue: text! + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + output_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-container-with-placeholder-in-fstring: + container: + command: + - my_program + - prefix-{{$.inputs.parameters['text1']}} + - '{{$.outputs.artifacts[''output_artifact''].uri}}/0' + image: python:3.9 +pipelineInfo: + name: container-with-placeholder-in-fstring +root: + dag: + outputs: + artifacts: + output_artifact: + artifactSelectors: + - outputArtifactKey: output_artifact + producerSubtask: container-with-placeholder-in-fstring + tasks: + container-with-placeholder-in-fstring: + cachingOptions: + enableCache: true + componentRef: + name: comp-container-with-placeholder-in-fstring + inputs: + parameters: + text1: + componentInputParameter: text1 + taskInfo: + name: container-with-placeholder-in-fstring + inputDefinitions: + parameters: + text1: + defaultValue: text! + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + output_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/components/containerized_python_component.py b/test_data/sdk_compiled_pipelines/valid/containerized_python_component.py similarity index 100% rename from sdk/python/test_data/components/containerized_python_component.py rename to test_data/sdk_compiled_pipelines/valid/containerized_python_component.py diff --git a/test_data/sdk_compiled_pipelines/valid/containerized_python_component.yaml b/test_data/sdk_compiled_pipelines/valid/containerized_python_component.yaml new file mode 100644 index 00000000000..959336b74e8 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/containerized_python_component.yaml @@ -0,0 +1,70 @@ +# PIPELINE DEFINITION +# Name: concat-message +# Inputs: +# message1: str +# message2: str +# Outputs: +# Output: str +components: + comp-concat-message: + executorLabel: exec-concat-message + inputDefinitions: + parameters: + message1: + parameterType: STRING + message2: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-concat-message: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - concat_message + command: + - python3 + - -m + - kfp.dsl.executor_main + image: kfp-image +pipelineInfo: + name: concat-message +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: concat-message + tasks: + concat-message: + cachingOptions: + enableCache: true + componentRef: + name: comp-concat-message + inputs: + parameters: + message1: + componentInputParameter: message1 + message2: + componentInputParameter: message2 + taskInfo: + name: concat-message + inputDefinitions: + parameters: + message1: + parameterType: STRING + message2: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/create_pod_metadata_complex.py b/test_data/sdk_compiled_pipelines/valid/create_pod_metadata_complex.py new file mode 100644 index 00000000000..509be6a5ff3 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/create_pod_metadata_complex.py @@ -0,0 +1,105 @@ +from kfp import dsl, compiler +from kfp.kubernetes import add_pod_annotation +from kfp.kubernetes import add_pod_label +from kfp.kubernetes import use_field_path_as_env + + +@dsl.component +def validate_pod_metadata(annotation_path_1: str = None, + annotation_exp_val_1: str = None, + annotation_path_2: str = None, + annotation_exp_val_2: str = None, + label_path_1: str = None, + label_exp_val_1: str = None, + label_path_2: str = None, + label_exp_val_2: str = None) -> bool: + import os + + validated_metadata_count = 0 + if annotation_path_1 is not None: + annotation_val_1 = os.getenv(annotation_path_1) + if annotation_val_1 is not None and annotation_val_1 != annotation_exp_val_1: + raise ValueError( + f"Pod annotation is {annotation_val_1} but is supposed to be {annotation_exp_val_1}." + ) + validated_metadata_count+=1 + if annotation_path_2 is not None: + annotation_val_2 = os.getenv(annotation_path_2) + if annotation_val_2 is not None and annotation_val_2 != annotation_exp_val_2: + raise ValueError( + f"Pod annotation is {annotation_val_2} but is supposed to be {annotation_exp_val_2}." + ) + validated_metadata_count+=1 + if label_path_1 is not None: + label_val_1 = os.getenv(label_path_1) + if label_val_1 is not None and label_val_1 != label_exp_val_1: + raise ValueError( + f"Pod label is {label_val_1} but is supposed to be {label_exp_val_1}." + ) + validated_metadata_count+=1 + if label_path_2 is not None: + label_val_2 = os.getenv(label_path_2) + if label_val_2 is not None and label_val_2 != label_exp_val_2: + raise ValueError( + f"Pod label is {label_val_2} but is supposed to be {label_exp_val_2}." + ) + validated_metadata_count+=1 + if validated_metadata_count <1: + raise RuntimeError(f"No pod metadata found to validate.") + return True + +@dsl.component +def validate_no_pod_metadata(annotation_path: str, label_path: str) -> bool: + import os + annotation = os.getenv(annotation_path) + if annotation != "": + raise ValueError( + f"Pod annotation is {annotation} but is supposed to be None." + ) + label = os.getenv(label_path) + if label != "": + raise ValueError( + f"Pod label is {label} but is supposed to be None." + ) + return True + + +@dsl.pipeline +def pipeline_with_pod_metadata(): + # task_a and task_b are set with different metadata and the separate pods representing each component should have + # separate, different metadata. + task_a = validate_pod_metadata( + annotation_path_1="POD_TASK_ANNOTATION", + annotation_exp_val_1='annotation', + label_path_1="POD_TASK_LABEL_1", + label_exp_val_1='label-1', + label_path_2="POD_TASK_LABEL_2", + label_exp_val_2="label-2").set_caching_options(False) + add_pod_label(task_a, 'task-label-1', 'label-1') + add_pod_label(task_a, 'task-label-2', 'label-2') + add_pod_annotation(task_a, 'task-annotation', 'annotation') + # expose pod metadata annotation and label in container + use_field_path_as_env(task_a, 'POD_TASK_ANNOTATION', + "metadata.annotations['task-annotation']") + use_field_path_as_env(task_a, 'POD_TASK_LABEL_1', "metadata.labels['task-label-1']") + use_field_path_as_env(task_a, "POD_TASK_LABEL_2", "metadata.labels['task-label-2']") + + task_b = validate_pod_metadata( + annotation_path_1="POD_TASK_ANNOTATION_1", + annotation_exp_val_1='annotation-1', + annotation_path_2="POD_TASK_ANNOTATION_2", + annotation_exp_val_2="annotation-2").set_caching_options(False) + add_pod_annotation(task_b, 'task-annotation-1', 'annotation-1') + add_pod_annotation(task_b, 'task-annotation-2', 'annotation-2') + + use_field_path_as_env(task_b, 'POD_TASK_ANNOTATION_1', "metadata.annotations['task-annotation-1']") + use_field_path_as_env(task_b, "POD_TASK_ANNOTATION_2", "metadata.annotations['task-annotation-2']") + + # task c is set with no metadata + task_c = validate_no_pod_metadata(annotation_path="POD_TASK_ANNOTATION", label_path="POD_TASK_LABEL").set_caching_options(False) + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=pipeline_with_pod_metadata, + package_path=__file__.replace('.py', '.yaml')) + diff --git a/test_data/sdk_compiled_pipelines/valid/create_pod_metadata_complex.yaml b/test_data/sdk_compiled_pipelines/valid/create_pod_metadata_complex.yaml new file mode 100644 index 00000000000..ca4729b41de --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/create_pod_metadata_complex.yaml @@ -0,0 +1,319 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-pod-metadata +components: + comp-validate-no-pod-metadata: + executorLabel: exec-validate-no-pod-metadata + inputDefinitions: + parameters: + annotation_path: + parameterType: STRING + label_path: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: BOOLEAN + comp-validate-pod-metadata: + executorLabel: exec-validate-pod-metadata + inputDefinitions: + parameters: + annotation_exp_val_1: + isOptional: true + parameterType: STRING + annotation_exp_val_2: + isOptional: true + parameterType: STRING + annotation_path_1: + isOptional: true + parameterType: STRING + annotation_path_2: + isOptional: true + parameterType: STRING + label_exp_val_1: + isOptional: true + parameterType: STRING + label_exp_val_2: + isOptional: true + parameterType: STRING + label_path_1: + isOptional: true + parameterType: STRING + label_path_2: + isOptional: true + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: BOOLEAN + comp-validate-pod-metadata-2: + executorLabel: exec-validate-pod-metadata-2 + inputDefinitions: + parameters: + annotation_exp_val_1: + isOptional: true + parameterType: STRING + annotation_exp_val_2: + isOptional: true + parameterType: STRING + annotation_path_1: + isOptional: true + parameterType: STRING + annotation_path_2: + isOptional: true + parameterType: STRING + label_exp_val_1: + isOptional: true + parameterType: STRING + label_exp_val_2: + isOptional: true + parameterType: STRING + label_path_1: + isOptional: true + parameterType: STRING + label_path_2: + isOptional: true + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: BOOLEAN +deploymentSpec: + executors: + exec-validate-no-pod-metadata: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - validate_no_pod_metadata + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef validate_no_pod_metadata(annotation_path: str, label_path: str)\ + \ -> bool:\n import os\n annotation = os.getenv(annotation_path)\n\ + \ if annotation != \"\":\n raise ValueError(\n f\"\ + Pod annotation is {annotation} but is supposed to be None.\"\n )\n\ + \ label = os.getenv(label_path)\n if label != \"\":\n raise\ + \ ValueError(\n f\"Pod label is {label} but is supposed to be\ + \ None.\"\n )\n return True\n\n" + image: python:3.9 + exec-validate-pod-metadata: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - validate_pod_metadata + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef validate_pod_metadata(annotation_path_1: str = None,\n \ + \ annotation_exp_val_1: str = None,\n \ + \ annotation_path_2: str = None,\n annotation_exp_val_2:\ + \ str = None,\n label_path_1: str = None,\n \ + \ label_exp_val_1: str = None,\n \ + \ label_path_2: str = None,\n label_exp_val_2:\ + \ str = None) -> bool:\n import os\n\n validated_metadata_count =\ + \ 0\n if annotation_path_1 is not None:\n annotation_val_1 = os.getenv(annotation_path_1)\n\ + \ if annotation_val_1 is not None and annotation_val_1 != annotation_exp_val_1:\n\ + \ raise ValueError(\n f\"Pod annotation is {annotation_val_1}\ + \ but is supposed to be {annotation_exp_val_1}.\"\n )\n \ + \ validated_metadata_count+=1\n if annotation_path_2 is not None:\n\ + \ annotation_val_2 = os.getenv(annotation_path_2)\n if annotation_val_2\ + \ is not None and annotation_val_2 != annotation_exp_val_2:\n \ + \ raise ValueError(\n f\"Pod annotation is {annotation_val_2}\ + \ but is supposed to be {annotation_exp_val_2}.\"\n )\n \ + \ validated_metadata_count+=1\n if label_path_1 is not None:\n \ + \ label_val_1 = os.getenv(label_path_1)\n if label_val_1 is not\ + \ None and label_val_1 != label_exp_val_1:\n raise ValueError(\n\ + \ f\"Pod label is {label_val_1} but is supposed to be {label_exp_val_1}.\"\ + \n )\n validated_metadata_count+=1\n if label_path_2\ + \ is not None:\n label_val_2 = os.getenv(label_path_2)\n if\ + \ label_val_2 is not None and label_val_2 != label_exp_val_2:\n \ + \ raise ValueError(\n f\"Pod label is {label_val_2} but\ + \ is supposed to be {label_exp_val_2}.\"\n )\n validated_metadata_count+=1\n\ + \ if validated_metadata_count <1:\n raise RuntimeError(f\"No pod\ + \ metadata found to validate.\")\n return True\n\n" + image: python:3.9 + exec-validate-pod-metadata-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - validate_pod_metadata + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef validate_pod_metadata(annotation_path_1: str = None,\n \ + \ annotation_exp_val_1: str = None,\n \ + \ annotation_path_2: str = None,\n annotation_exp_val_2:\ + \ str = None,\n label_path_1: str = None,\n \ + \ label_exp_val_1: str = None,\n \ + \ label_path_2: str = None,\n label_exp_val_2:\ + \ str = None) -> bool:\n import os\n\n validated_metadata_count =\ + \ 0\n if annotation_path_1 is not None:\n annotation_val_1 = os.getenv(annotation_path_1)\n\ + \ if annotation_val_1 is not None and annotation_val_1 != annotation_exp_val_1:\n\ + \ raise ValueError(\n f\"Pod annotation is {annotation_val_1}\ + \ but is supposed to be {annotation_exp_val_1}.\"\n )\n \ + \ validated_metadata_count+=1\n if annotation_path_2 is not None:\n\ + \ annotation_val_2 = os.getenv(annotation_path_2)\n if annotation_val_2\ + \ is not None and annotation_val_2 != annotation_exp_val_2:\n \ + \ raise ValueError(\n f\"Pod annotation is {annotation_val_2}\ + \ but is supposed to be {annotation_exp_val_2}.\"\n )\n \ + \ validated_metadata_count+=1\n if label_path_1 is not None:\n \ + \ label_val_1 = os.getenv(label_path_1)\n if label_val_1 is not\ + \ None and label_val_1 != label_exp_val_1:\n raise ValueError(\n\ + \ f\"Pod label is {label_val_1} but is supposed to be {label_exp_val_1}.\"\ + \n )\n validated_metadata_count+=1\n if label_path_2\ + \ is not None:\n label_val_2 = os.getenv(label_path_2)\n if\ + \ label_val_2 is not None and label_val_2 != label_exp_val_2:\n \ + \ raise ValueError(\n f\"Pod label is {label_val_2} but\ + \ is supposed to be {label_exp_val_2}.\"\n )\n validated_metadata_count+=1\n\ + \ if validated_metadata_count <1:\n raise RuntimeError(f\"No pod\ + \ metadata found to validate.\")\n return True\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-pod-metadata +root: + dag: + tasks: + validate-no-pod-metadata: + cachingOptions: {} + componentRef: + name: comp-validate-no-pod-metadata + inputs: + parameters: + annotation_path: + runtimeValue: + constant: POD_TASK_ANNOTATION + label_path: + runtimeValue: + constant: POD_TASK_LABEL + taskInfo: + name: validate-no-pod-metadata + validate-pod-metadata: + cachingOptions: {} + componentRef: + name: comp-validate-pod-metadata + inputs: + parameters: + annotation_exp_val_1: + runtimeValue: + constant: annotation + annotation_path_1: + runtimeValue: + constant: POD_TASK_ANNOTATION + label_exp_val_1: + runtimeValue: + constant: label-1 + label_exp_val_2: + runtimeValue: + constant: label-2 + label_path_1: + runtimeValue: + constant: POD_TASK_LABEL_1 + label_path_2: + runtimeValue: + constant: POD_TASK_LABEL_2 + taskInfo: + name: validate-pod-metadata + validate-pod-metadata-2: + cachingOptions: {} + componentRef: + name: comp-validate-pod-metadata-2 + inputs: + parameters: + annotation_exp_val_1: + runtimeValue: + constant: annotation-1 + annotation_exp_val_2: + runtimeValue: + constant: annotation-2 + annotation_path_1: + runtimeValue: + constant: POD_TASK_ANNOTATION_1 + annotation_path_2: + runtimeValue: + constant: POD_TASK_ANNOTATION_2 + taskInfo: + name: validate-pod-metadata-2 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 +--- +platforms: + kubernetes: + deploymentSpec: + executors: + exec-validate-pod-metadata: + fieldPathAsEnv: + - fieldPath: metadata.annotations['task-annotation'] + name: POD_TASK_ANNOTATION + - fieldPath: metadata.labels['task-label-1'] + name: POD_TASK_LABEL_1 + - fieldPath: metadata.labels['task-label-2'] + name: POD_TASK_LABEL_2 + podMetadata: + annotations: + task-annotation: annotation + labels: + task-label-1: label-1 + task-label-2: label-2 + exec-validate-pod-metadata-2: + fieldPathAsEnv: + - fieldPath: metadata.annotations['task-annotation-1'] + name: POD_TASK_ANNOTATION_1 + - fieldPath: metadata.annotations['task-annotation-2'] + name: POD_TASK_ANNOTATION_2 + podMetadata: + annotations: + task-annotation-1: annotation-1 + task-annotation-2: annotation-2 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/__init__.py b/test_data/sdk_compiled_pipelines/valid/critical/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/test_data/components/add_numbers.py b/test_data/sdk_compiled_pipelines/valid/critical/add_numbers.py similarity index 100% rename from sdk/python/test_data/components/add_numbers.py rename to test_data/sdk_compiled_pipelines/valid/critical/add_numbers.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/add_numbers.yaml b/test_data/sdk_compiled_pipelines/valid/critical/add_numbers.yaml new file mode 100644 index 00000000000..c180c2d02a6 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/add_numbers.yaml @@ -0,0 +1,86 @@ +# PIPELINE DEFINITION +# Name: add-numbers +# Inputs: +# a: int +# b: int +# Outputs: +# Output: int +components: + comp-add-numbers: + executorLabel: exec-add-numbers + inputDefinitions: + parameters: + a: + parameterType: NUMBER_INTEGER + b: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +deploymentSpec: + executors: + exec-add-numbers: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add_numbers + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add_numbers(a: int, b: int) -> int:\n return a + b\n\n" + image: python:3.9 +pipelineInfo: + name: add-numbers +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: add-numbers + tasks: + add-numbers: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-numbers + inputs: + parameters: + a: + componentInputParameter: a + b: + componentInputParameter: b + taskInfo: + name: add-numbers + inputDefinitions: + parameters: + a: + parameterType: NUMBER_INTEGER + b: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/samples/v2/subdagio/artifact_cache.py b/test_data/sdk_compiled_pipelines/valid/critical/artifact_cache.py similarity index 100% rename from samples/v2/subdagio/artifact_cache.py rename to test_data/sdk_compiled_pipelines/valid/critical/artifact_cache.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/artifact_cache.yaml b/test_data/sdk_compiled_pipelines/valid/critical/artifact_cache.yaml new file mode 100644 index 00000000000..455ee6ae3ec --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/artifact_cache.yaml @@ -0,0 +1,152 @@ +# PIPELINE DEFINITION +# Name: artifact-cache-pipeline +components: + comp-core: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: dataset + producerSubtask: core-comp + tasks: + core-comp: + cachingOptions: + enableCache: true + componentRef: + name: comp-core-comp + taskInfo: + name: core-comp + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-core-comp: + executorLabel: exec-core-comp + outputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-crust-comp: + executorLabel: exec-crust-comp + inputDefinitions: + artifacts: + input: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-mantle: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: Output + producerSubtask: core + tasks: + core: + cachingOptions: + enableCache: true + componentRef: + name: comp-core + taskInfo: + name: core + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-core-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - core_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef core_comp(dataset: dsl.Output[dsl.Dataset]):\n with open(dataset.path,\ + \ 'w') as f:\n f.write('foo')\n\n" + image: python:3.9 + exec-crust-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - crust_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef crust_comp(input: dsl.Dataset):\n with open(input.path, 'r')\ + \ as f:\n print('input: ', f.read())\n\n" + image: python:3.9 +pipelineInfo: + name: artifact-cache-pipeline +root: + dag: + tasks: + crust-comp: + cachingOptions: + enableCache: true + componentRef: + name: comp-crust-comp + dependentTasks: + - mantle + inputs: + artifacts: + input: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: mantle + taskInfo: + name: crust-comp + mantle: + cachingOptions: + enableCache: true + componentRef: + name: comp-mantle + taskInfo: + name: mantle +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/artifact_crust.py b/test_data/sdk_compiled_pipelines/valid/critical/artifact_crust.py new file mode 100644 index 00000000000..3c06ffcd127 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/artifact_crust.py @@ -0,0 +1,48 @@ +import os + +from kfp import Client +from kfp import dsl +from kfp.compiler import Compiler + + +@dsl.component +def core_comp(dataset: dsl.Output[dsl.Dataset]): + with open(dataset.path, 'w') as f: + f.write('foo') + + +@dsl.component +def crust_comp(input: dsl.Dataset): + with open(input.path, 'r') as f: + print('input: ', f.read()) + + +@dsl.pipeline +def core() -> dsl.Dataset: + task = core_comp() + task.set_caching_options(False) + + return task.output + + +@dsl.pipeline +def mantle() -> dsl.Dataset: + dag_task = core() + dag_task.set_caching_options(False) + + return dag_task.output + + +@dsl.pipeline(name=os.path.basename(__file__).removesuffix('.py') + '-pipeline') +def crust(): + dag_task = mantle() + dag_task.set_caching_options(False) + + task = crust_comp(input=dag_task.output) + task.set_caching_options(False) + + +if __name__ == '__main__': + Compiler().compile(pipeline_func=crust, package_path=f"{__file__.removesuffix('.py')}.yaml") + # client = Client() + # client.create_run_from_pipeline_func(crust) diff --git a/test_data/sdk_compiled_pipelines/valid/critical/artifact_crust.yaml b/test_data/sdk_compiled_pipelines/valid/critical/artifact_crust.yaml new file mode 100644 index 00000000000..8bfb71fe1e9 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/artifact_crust.yaml @@ -0,0 +1,148 @@ +# PIPELINE DEFINITION +# Name: artifact-pipeline +components: + comp-core: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: dataset + producerSubtask: core-comp + tasks: + core-comp: + cachingOptions: {} + componentRef: + name: comp-core-comp + taskInfo: + name: core-comp + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-core-comp: + executorLabel: exec-core-comp + outputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-crust-comp: + executorLabel: exec-crust-comp + inputDefinitions: + artifacts: + input: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-mantle: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: Output + producerSubtask: core + tasks: + core: + cachingOptions: {} + componentRef: + name: comp-core + taskInfo: + name: core + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-core-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - core_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef core_comp(dataset: dsl.Output[dsl.Dataset]):\n with open(dataset.path,\ + \ 'w') as f:\n f.write('foo')\n\n" + image: python:3.9 + exec-crust-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - crust_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef crust_comp(input: dsl.Dataset):\n with open(input.path, 'r')\ + \ as f:\n print('input: ', f.read())\n\n" + image: python:3.9 +pipelineInfo: + name: artifact-pipeline +root: + dag: + tasks: + crust-comp: + cachingOptions: {} + componentRef: + name: comp-crust-comp + dependentTasks: + - mantle + inputs: + artifacts: + input: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: mantle + taskInfo: + name: crust-comp + mantle: + cachingOptions: {} + componentRef: + name: comp-mantle + taskInfo: + name: mantle +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/samples/v2/collected_parameters.py b/test_data/sdk_compiled_pipelines/valid/critical/collected_parameters.py similarity index 100% rename from samples/v2/collected_parameters.py rename to test_data/sdk_compiled_pipelines/valid/critical/collected_parameters.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/collected_parameters.yaml b/test_data/sdk_compiled_pipelines/valid/critical/collected_parameters.yaml new file mode 100644 index 00000000000..3a772330981 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/collected_parameters.yaml @@ -0,0 +1,335 @@ +# PIPELINE DEFINITION +# Name: collected-param-pipeline +components: + comp-collecting-parameters: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: pipelinechannel--prepend-id-Output + producerSubtask: for-loop-1 + tasks: + consume-ids: + cachingOptions: {} + componentRef: + name: comp-consume-ids + dependentTasks: + - for-loop-1 + inputs: + parameters: + ids: + taskOutputParameter: + outputParameterKey: pipelinechannel--prepend-id-Output + producerTask: for-loop-1 + taskInfo: + name: consume-ids + for-loop-1: + componentRef: + name: comp-for-loop-1 + dependentTasks: + - split-ids + inputs: + parameters: + pipelinechannel--split-ids-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: split-ids + parameterIterator: + itemInput: pipelinechannel--split-ids-Output-loop-item + items: + inputParameter: pipelinechannel--split-ids-Output + taskInfo: + name: for-loop-1 + split-ids: + cachingOptions: + enableCache: true + componentRef: + name: comp-split-ids + inputs: + parameters: + ids: + componentInputParameter: model_ids + taskInfo: + name: split-ids + inputDefinitions: + parameters: + model_ids: + defaultValue: '' + isOptional: true + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-consume-ids: + executorLabel: exec-consume-ids + inputDefinitions: + parameters: + ids: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-consume-ids-2: + executorLabel: exec-consume-ids-2 + inputDefinitions: + parameters: + ids: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-consume-single-id: + executorLabel: exec-consume-single-id + inputDefinitions: + parameters: + id: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-for-loop-1: + dag: + outputs: + parameters: + pipelinechannel--prepend-id-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: prepend-id + tasks: + consume-single-id: + cachingOptions: {} + componentRef: + name: comp-consume-single-id + dependentTasks: + - prepend-id + inputs: + parameters: + id: + taskOutputParameter: + outputParameterKey: Output + producerTask: prepend-id + taskInfo: + name: consume-single-id + prepend-id: + cachingOptions: {} + componentRef: + name: comp-prepend-id + inputs: + parameters: + content: + componentInputParameter: pipelinechannel--split-ids-Output-loop-item + taskInfo: + name: prepend-id + inputDefinitions: + parameters: + pipelinechannel--split-ids-Output: + parameterType: LIST + pipelinechannel--split-ids-Output-loop-item: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--prepend-id-Output: + parameterType: LIST + comp-prepend-id: + executorLabel: exec-prepend-id + inputDefinitions: + parameters: + content: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-split-ids: + executorLabel: exec-split-ids + inputDefinitions: + parameters: + ids: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: LIST +deploymentSpec: + executors: + exec-consume-ids: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - consume_ids + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef consume_ids(ids: List[str]) -> str:\n for id in ids:\n \ + \ print(f'Consuming: {id}')\n return 'completed'\n\n" + image: python:3.9 + exec-consume-ids-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - consume_ids + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef consume_ids(ids: List[str]) -> str:\n for id in ids:\n \ + \ print(f'Consuming: {id}')\n return 'completed'\n\n" + image: python:3.9 + exec-consume-single-id: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - consume_single_id + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef consume_single_id(id: str) -> str:\n print(f'Consuming single:\ + \ {id}')\n return 'completed'\n\n" + image: python:3.9 + exec-prepend-id: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - prepend_id + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef prepend_id(content: str) -> str:\n print(f\"prepending: {content}\ + \ with 'model_id'\")\n return f'model_id_{content}'\n\n" + image: python:3.9 + exec-split-ids: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - split_ids + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef split_ids(ids: str) -> list:\n return ids.split(',')\n\n" + image: python:3.9 +pipelineInfo: + name: collected-param-pipeline +root: + dag: + tasks: + collecting-parameters: + cachingOptions: {} + componentRef: + name: comp-collecting-parameters + inputs: + parameters: + model_ids: + runtimeValue: + constant: s1,s2,s3 + taskInfo: + name: collecting-parameters + consume-ids: + cachingOptions: {} + componentRef: + name: comp-consume-ids-2 + dependentTasks: + - collecting-parameters + inputs: + parameters: + ids: + taskOutputParameter: + outputParameterKey: Output + producerTask: collecting-parameters + taskInfo: + name: consume-ids +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.2 diff --git a/sdk/python/test_data/pipelines/component_with_optional_inputs.py b/test_data/sdk_compiled_pipelines/valid/critical/component_with_optional_inputs.py similarity index 100% rename from sdk/python/test_data/pipelines/component_with_optional_inputs.py rename to test_data/sdk_compiled_pipelines/valid/critical/component_with_optional_inputs.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/component_with_optional_inputs.yaml b/test_data/sdk_compiled_pipelines/valid/critical/component_with_optional_inputs.yaml new file mode 100644 index 00000000000..716b616d28b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/component_with_optional_inputs.yaml @@ -0,0 +1,73 @@ +# PIPELINE DEFINITION +# Name: v2-component-optional-input +components: + comp-component-op: + executorLabel: exec-component-op + inputDefinitions: + parameters: + input1: + defaultValue: default value + isOptional: true + parameterType: STRING + input2: + isOptional: true + parameterType: STRING + input3: + isOptional: true + parameterType: STRING +deploymentSpec: + executors: + exec-component-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_op(\n input1: str = 'default value',\n input2:\ + \ Optional[str] = None,\n input3: Optional[str] = None,\n):\n print(f'input1:\ + \ {input1}, type: {type(input1)}')\n print(f'input2: {input2}, type:\ + \ {type(input2)}')\n print(f'input3: {input3}, type: {type(input3)}')\n\ + \n" + image: python:3.9 +pipelineInfo: + name: v2-component-optional-input +root: + dag: + tasks: + component-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-component-op + inputs: + parameters: + input1: + runtimeValue: + constant: Hello + input2: + runtimeValue: + constant: World + taskInfo: + name: component-op +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/container_component_with_no_inputs.py b/test_data/sdk_compiled_pipelines/valid/critical/container_component_with_no_inputs.py similarity index 100% rename from sdk/python/test_data/pipelines/container_component_with_no_inputs.py rename to test_data/sdk_compiled_pipelines/valid/critical/container_component_with_no_inputs.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/container_component_with_no_inputs.yaml b/test_data/sdk_compiled_pipelines/valid/critical/container_component_with_no_inputs.yaml new file mode 100644 index 00000000000..7c2f0f3aed4 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/container_component_with_no_inputs.yaml @@ -0,0 +1,27 @@ +# PIPELINE DEFINITION +# Name: v2-container-component-no-input +components: + comp-hello-world-container: + executorLabel: exec-hello-world-container +deploymentSpec: + executors: + exec-hello-world-container: + container: + command: + - echo + - hello world + image: python:3.9 +pipelineInfo: + name: v2-container-component-no-input +root: + dag: + tasks: + hello-world-container: + cachingOptions: + enableCache: true + componentRef: + name: comp-hello-world-container + taskInfo: + name: hello-world-container +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/flip_coin.py b/test_data/sdk_compiled_pipelines/valid/critical/flip_coin.py new file mode 100644 index 00000000000..264b1de5a78 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/flip_coin.py @@ -0,0 +1,65 @@ +# Copyright 2022 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from kfp import compiler, dsl + + +@dsl.component() +def random_num(low: int, high: int) -> int: + """Generate a random number between low and high.""" + import random # noqa: PLC0415 + + result = random.randint(low, high) + print(result) + return result + + +@dsl.component() +def flip_coin() -> str: + """Flip a coin and output heads or tails randomly.""" + import random # noqa: PLC0415 + + result = "heads" if random.randint(0, 1) == 0 else "tails" + print(result) + return result + + +@dsl.component() +def print_msg(msg: str): + """Print a message.""" + print(msg) + + +@dsl.pipeline( + name="conditional-execution-pipeline", + description="Shows how to use dsl.If().", +) +def flipcoin_pipeline(): + flip = flip_coin().set_caching_options(False) + with dsl.If(flip.output == "heads"): + random_num_head = random_num(low=0, high=9).set_caching_options(False) + with dsl.If(random_num_head.output > 5): + print_msg(msg="heads and %s > 5!" % random_num_head.output).set_caching_options(False) + with dsl.If(random_num_head.output <= 5): + print_msg(msg="heads and %s <= 5!" % random_num_head.output).set_caching_options(False) + + with dsl.If(flip.output == "tails"): + random_num_tail = random_num(low=10, high=19).set_caching_options(False) + with dsl.If(random_num_tail.output > 15): + print_msg(msg="tails and %s > 15!" % random_num_tail.output).set_caching_options(False) + with dsl.If(random_num_tail.output <= 15): + print_msg(msg="tails and %s <= 15!" % random_num_tail.output).set_caching_options(False) + + +if __name__ == "__main__": + compiler.Compiler().compile(flipcoin_pipeline, package_path=__file__.replace(".py", ".yaml")) diff --git a/test_data/sdk_compiled_pipelines/valid/critical/flip_coin.yaml b/test_data/sdk_compiled_pipelines/valid/critical/flip_coin.yaml new file mode 100644 index 00000000000..5dd95963f8d --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/flip_coin.yaml @@ -0,0 +1,521 @@ +# PIPELINE DEFINITION +# Name: conditional-execution-pipeline +# Description: Shows how to use dsl.If(). +components: + comp-condition-1: + dag: + tasks: + condition-2: + componentRef: + name: comp-condition-2 + dependentTasks: + - random-num + inputs: + parameters: + pipelinechannel--flip-coin-Output: + componentInputParameter: pipelinechannel--flip-coin-Output + pipelinechannel--random-num-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: random-num + taskInfo: + name: condition-2 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--random-num-Output']) + > 5 + condition-3: + componentRef: + name: comp-condition-3 + dependentTasks: + - random-num + inputs: + parameters: + pipelinechannel--flip-coin-Output: + componentInputParameter: pipelinechannel--flip-coin-Output + pipelinechannel--random-num-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: random-num + taskInfo: + name: condition-3 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--random-num-Output']) + <= 5 + random-num: + cachingOptions: {} + componentRef: + name: comp-random-num + inputs: + parameters: + high: + runtimeValue: + constant: 9.0 + low: + runtimeValue: + constant: 0.0 + taskInfo: + name: random-num + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + comp-condition-2: + dag: + tasks: + print-msg: + cachingOptions: {} + componentRef: + name: comp-print-msg + inputs: + parameters: + msg: + runtimeValue: + constant: heads and {{$.inputs.parameters['pipelinechannel--random-num-Output']}} + > 5! + pipelinechannel--random-num-Output: + componentInputParameter: pipelinechannel--random-num-Output + taskInfo: + name: print-msg + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + pipelinechannel--random-num-Output: + parameterType: NUMBER_INTEGER + comp-condition-3: + dag: + tasks: + print-msg-2: + cachingOptions: {} + componentRef: + name: comp-print-msg-2 + inputs: + parameters: + msg: + runtimeValue: + constant: heads and {{$.inputs.parameters['pipelinechannel--random-num-Output']}} + <= 5! + pipelinechannel--random-num-Output: + componentInputParameter: pipelinechannel--random-num-Output + taskInfo: + name: print-msg-2 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + pipelinechannel--random-num-Output: + parameterType: NUMBER_INTEGER + comp-condition-4: + dag: + tasks: + condition-5: + componentRef: + name: comp-condition-5 + dependentTasks: + - random-num-2 + inputs: + parameters: + pipelinechannel--flip-coin-Output: + componentInputParameter: pipelinechannel--flip-coin-Output + pipelinechannel--random-num-2-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: random-num-2 + taskInfo: + name: condition-5 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--random-num-2-Output']) + > 15 + condition-6: + componentRef: + name: comp-condition-6 + dependentTasks: + - random-num-2 + inputs: + parameters: + pipelinechannel--flip-coin-Output: + componentInputParameter: pipelinechannel--flip-coin-Output + pipelinechannel--random-num-2-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: random-num-2 + taskInfo: + name: condition-6 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--random-num-2-Output']) + <= 15 + random-num-2: + cachingOptions: {} + componentRef: + name: comp-random-num-2 + inputs: + parameters: + high: + runtimeValue: + constant: 19.0 + low: + runtimeValue: + constant: 10.0 + taskInfo: + name: random-num-2 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + comp-condition-5: + dag: + tasks: + print-msg-3: + cachingOptions: {} + componentRef: + name: comp-print-msg-3 + inputs: + parameters: + msg: + runtimeValue: + constant: tails and {{$.inputs.parameters['pipelinechannel--random-num-2-Output']}} + > 15! + pipelinechannel--random-num-2-Output: + componentInputParameter: pipelinechannel--random-num-2-Output + taskInfo: + name: print-msg-3 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + pipelinechannel--random-num-2-Output: + parameterType: NUMBER_INTEGER + comp-condition-6: + dag: + tasks: + print-msg-4: + cachingOptions: {} + componentRef: + name: comp-print-msg-4 + inputs: + parameters: + msg: + runtimeValue: + constant: tails and {{$.inputs.parameters['pipelinechannel--random-num-2-Output']}} + <= 15! + pipelinechannel--random-num-2-Output: + componentInputParameter: pipelinechannel--random-num-2-Output + taskInfo: + name: print-msg-4 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + pipelinechannel--random-num-2-Output: + parameterType: NUMBER_INTEGER + comp-flip-coin: + executorLabel: exec-flip-coin + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-msg: + executorLabel: exec-print-msg + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-msg-2: + executorLabel: exec-print-msg-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-msg-3: + executorLabel: exec-print-msg-3 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-msg-4: + executorLabel: exec-print-msg-4 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-random-num: + executorLabel: exec-random-num + inputDefinitions: + parameters: + high: + parameterType: NUMBER_INTEGER + low: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-random-num-2: + executorLabel: exec-random-num-2 + inputDefinitions: + parameters: + high: + parameterType: NUMBER_INTEGER + low: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +deploymentSpec: + executors: + exec-flip-coin: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin() -> str:\n \"\"\"Flip a coin and output heads or\ + \ tails randomly.\"\"\"\n import random # noqa: PLC0415\n\n result\ + \ = \"heads\" if random.randint(0, 1) == 0 else \"tails\"\n print(result)\n\ + \ return result\n\n" + image: python:3.9 + exec-print-msg: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_msg + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_msg(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 + exec-print-msg-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_msg + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_msg(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 + exec-print-msg-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_msg + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_msg(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 + exec-print-msg-4: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_msg + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_msg(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 + exec-random-num: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - random_num + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef random_num(low: int, high: int) -> int:\n \"\"\"Generate a\ + \ random number between low and high.\"\"\"\n import random # noqa:\ + \ PLC0415\n\n result = random.randint(low, high)\n print(result)\n\ + \ return result\n\n" + image: python:3.9 + exec-random-num-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - random_num + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef random_num(low: int, high: int) -> int:\n \"\"\"Generate a\ + \ random number between low and high.\"\"\"\n import random # noqa:\ + \ PLC0415\n\n result = random.randint(low, high)\n print(result)\n\ + \ return result\n\n" + image: python:3.9 +pipelineInfo: + description: Shows how to use dsl.If(). + name: conditional-execution-pipeline +root: + dag: + tasks: + condition-1: + componentRef: + name: comp-condition-1 + dependentTasks: + - flip-coin + inputs: + parameters: + pipelinechannel--flip-coin-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin + taskInfo: + name: condition-1 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-Output'] + == 'heads' + condition-4: + componentRef: + name: comp-condition-4 + dependentTasks: + - flip-coin + inputs: + parameters: + pipelinechannel--flip-coin-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin + taskInfo: + name: condition-4 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-Output'] + == 'tails' + flip-coin: + cachingOptions: {} + componentRef: + name: comp-flip-coin + taskInfo: + name: flip-coin +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/loop_consume_upstream.py b/test_data/sdk_compiled_pipelines/valid/critical/loop_consume_upstream.py new file mode 100644 index 00000000000..8b955d1b4b0 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/loop_consume_upstream.py @@ -0,0 +1,58 @@ +# This pipeline tests the ability to consume outputs from upstream components in +# a loop context as well as having the inputs resolve when set_display_name is +# used within the pipeline. +from kfp import Client, compiler +from kfp import dsl +from kfp.dsl import Artifact +from kfp.dsl import Input +from kfp.dsl import Output + + +@dsl.component +def split_input(input: str) -> list: + return input.split(',') + + +@dsl.component +def create_file(file: Output[Artifact], content: str): + with open(file.path, 'w') as f: + f.write(content) + + +@dsl.component +def read_file(file: Input[Artifact]) -> str: + with open(file.path, 'r') as f: + print(f.read()) + return file.path + + +@dsl.component +def print_input(input: list): + for item in input: + print(f'Input item: {item}') + + +@dsl.pipeline(display_name="Loop Consume Upstream") +def loop_consume_upstream(): + model_ids_split_op = split_input(input='component1,component2,component3') + model_ids_split_op.set_caching_options(False) + model_ids_split_op.set_display_name('same display name') + with dsl.ParallelFor(model_ids_split_op.output) as model_id: + create_file_op = create_file(content=model_id) + create_file_op.set_caching_options(False) + create_file_op.set_display_name('same display name') + # Consume the output from a op in the loop iteration DAG context + read_file_op = read_file(file=create_file_op.outputs['file']) + read_file_op.set_caching_options(False) + read_file_op.set_display_name('same display name') + + print_input_op = print_input(input=model_ids_split_op.output) + print_input_op.set_caching_options(False) + print_input_op.set_display_name('same display name') + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=loop_consume_upstream, package_path=__file__.replace('.py', '.yaml')) + # client = Client() + # run = client.create_run_from_pipeline_func(loop_consume_upstream) diff --git a/test_data/sdk_compiled_pipelines/valid/critical/loop_consume_upstream.yaml b/test_data/sdk_compiled_pipelines/valid/critical/loop_consume_upstream.yaml new file mode 100644 index 00000000000..988b0482a4f --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/loop_consume_upstream.yaml @@ -0,0 +1,244 @@ +# PIPELINE DEFINITION +# Name: loop-consume-upstream +components: + comp-create-file: + executorLabel: exec-create-file + inputDefinitions: + parameters: + content: + parameterType: STRING + outputDefinitions: + artifacts: + file: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-for-loop-1: + dag: + tasks: + create-file: + cachingOptions: {} + componentRef: + name: comp-create-file + inputs: + parameters: + content: + componentInputParameter: pipelinechannel--split-input-Output-loop-item + taskInfo: + name: same display name + read-file: + cachingOptions: {} + componentRef: + name: comp-read-file + dependentTasks: + - create-file + inputs: + artifacts: + file: + taskOutputArtifact: + outputArtifactKey: file + producerTask: create-file + taskInfo: + name: same display name + inputDefinitions: + parameters: + pipelinechannel--split-input-Output: + parameterType: LIST + pipelinechannel--split-input-Output-loop-item: + parameterType: STRING + comp-print-input: + executorLabel: exec-print-input + inputDefinitions: + parameters: + input: + parameterType: LIST + comp-read-file: + executorLabel: exec-read-file + inputDefinitions: + artifacts: + file: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-split-input: + executorLabel: exec-split-input + inputDefinitions: + parameters: + input: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: LIST +deploymentSpec: + executors: + exec-create-file: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - create_file + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef create_file(file: Output[Artifact], content: str):\n with\ + \ open(file.path, 'w') as f:\n f.write(content)\n\n" + image: python:3.9 + exec-print-input: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_input + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_input(input: list):\n for item in input:\n print(f'Input\ + \ item: {item}')\n\n" + image: python:3.9 + exec-read-file: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_file + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef read_file(file: Input[Artifact]) -> str:\n with open(file.path,\ + \ 'r') as f:\n print(f.read())\n return file.path\n\n" + image: python:3.9 + exec-split-input: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - split_input + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef split_input(input: str) -> list:\n return input.split(',')\n\ + \n" + image: python:3.9 +pipelineInfo: + displayName: Loop Consume Upstream + name: loop-consume-upstream +root: + dag: + tasks: + for-loop-1: + componentRef: + name: comp-for-loop-1 + dependentTasks: + - split-input + inputs: + parameters: + pipelinechannel--split-input-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: split-input + parameterIterator: + itemInput: pipelinechannel--split-input-Output-loop-item + items: + inputParameter: pipelinechannel--split-input-Output + taskInfo: + name: for-loop-1 + print-input: + cachingOptions: {} + componentRef: + name: comp-print-input + dependentTasks: + - split-input + inputs: + parameters: + input: + taskOutputParameter: + outputParameterKey: Output + producerTask: split-input + taskInfo: + name: same display name + split-input: + cachingOptions: {} + componentRef: + name: comp-split-input + inputs: + parameters: + input: + runtimeValue: + constant: component1,component2,component3 + taskInfo: + name: same display name +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/samples/v2/subdagio/mixed_parameters.py b/test_data/sdk_compiled_pipelines/valid/critical/mixed_parameters.py similarity index 91% rename from samples/v2/subdagio/mixed_parameters.py rename to test_data/sdk_compiled_pipelines/valid/critical/mixed_parameters.py index 0a660d335d9..903b036b81c 100644 --- a/samples/v2/subdagio/mixed_parameters.py +++ b/test_data/sdk_compiled_pipelines/valid/critical/mixed_parameters.py @@ -44,5 +44,5 @@ def crust(): Compiler().compile( pipeline_func=crust, package_path=f"{__file__.removesuffix('.py')}.yaml") - client = Client() - client.create_run_from_pipeline_func(crust) + # client = Client() + # client.create_run_from_pipeline_func(crust) diff --git a/test_data/sdk_compiled_pipelines/valid/critical/mixed_parameters.yaml b/test_data/sdk_compiled_pipelines/valid/critical/mixed_parameters.yaml new file mode 100644 index 00000000000..ea516dfa225 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/mixed_parameters.yaml @@ -0,0 +1,143 @@ +# PIPELINE DEFINITION +# Name: mixed-parameters-pipeline +components: + comp-core: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: core-comp + tasks: + core-comp: + cachingOptions: {} + componentRef: + name: comp-core-comp + taskInfo: + name: core-comp + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-core-comp: + executorLabel: exec-core-comp + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-crust-comp: + executorLabel: exec-crust-comp + inputDefinitions: + parameters: + x: + parameterType: NUMBER_INTEGER + y: + parameterType: NUMBER_INTEGER + comp-mantle: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: core + tasks: + core: + cachingOptions: {} + componentRef: + name: comp-core + taskInfo: + name: core + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +deploymentSpec: + executors: + exec-core-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - core_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef core_comp() -> int:\n return 1\n\n" + image: python:3.9 + exec-crust-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - crust_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef crust_comp(x: int, y: int):\n print('sum :', x + y)\n\n" + image: python:3.9 +pipelineInfo: + name: mixed-parameters-pipeline +root: + dag: + tasks: + crust-comp: + cachingOptions: {} + componentRef: + name: comp-crust-comp + dependentTasks: + - mantle + inputs: + parameters: + x: + runtimeValue: + constant: 2.0 + y: + taskOutputParameter: + outputParameterKey: Output + producerTask: mantle + taskInfo: + name: crust-comp + mantle: + cachingOptions: {} + componentRef: + name: comp-mantle + taskInfo: + name: mantle +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/modelcar.yaml b/test_data/sdk_compiled_pipelines/valid/critical/modelcar.yaml new file mode 100644 index 00000000000..d56fc38ad65 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/modelcar.yaml @@ -0,0 +1,160 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-modelcar-model +# Inputs: +# model_uri: str [Default: 'oci://registry.domain.local/modelcar:test'] +components: + comp-build-model-car: + executorLabel: exec-build-model-car + outputDefinitions: + artifacts: + model: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + comp-get-model-files-list: + executorLabel: exec-get-model-files-list + inputDefinitions: + artifacts: + input_model: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-importer: + executorLabel: exec-importer + inputDefinitions: + parameters: + uri: + parameterType: STRING + outputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-build-model-car: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - build_model_car + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef build_model_car(model: dsl.Output[dsl.Model]):\n # Simulate\ + \ pushing the Modelcar to an OCI registry\n model.uri = \"oci://registry.domain.local/org/repo:v1.0\"\ + \n\n" + image: python:3.9 + exec-get-model-files-list: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - get_model_files_list + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef get_model_files_list(input_model: dsl.Input[dsl.Model]) -> str:\n\ + \ import os\n import os.path\n\n if not os.path.exists(input_model.path):\n\ + \ raise RuntimeError(f\"The model does not exist at: {input_model.path}\"\ + )\n\n expected_files = {\n \"added_tokens.json\",\n \"\ + config.json\",\n \"generation_config.json\",\n \"merges.txt\"\ + ,\n \"model.safetensors\",\n \"normalizer.json\",\n \ + \ \"preprocessor_config.json\",\n \"special_tokens_map.json\",\n\ + \ \"tokenizer.json\",\n \"tokenizer_config.json\",\n \ + \ \"vocab.json\",\n }\n\n filesInPath = set(os.listdir(input_model.path))\n\ + \n if not filesInPath.issuperset(expected_files):\n raise RuntimeError(\n\ + \ \"The model does not have expected files: \"\n +\ + \ \", \".join(sorted(expected_files.difference(filesInPath)))\n )\n\ + \n return \", \".join(sorted(filesInPath))\n\n" + image: python:3.9 + exec-importer: + importer: + artifactUri: + runtimeParameter: uri + typeSchema: + schemaTitle: system.Model + schemaVersion: 0.0.1 +pipelineInfo: + name: pipeline-with-modelcar-model +root: + dag: + tasks: + build-model-car: + cachingOptions: {} + componentRef: + name: comp-build-model-car + taskInfo: + name: build-model-car + get-model-files-list: + cachingOptions: {} + componentRef: + name: comp-get-model-files-list + dependentTasks: + - importer + inputs: + artifacts: + input_model: + taskOutputArtifact: + outputArtifactKey: artifact + producerTask: importer + taskInfo: + name: get-model-files-list + importer: + cachingOptions: + enableCache: true + componentRef: + name: comp-importer + inputs: + parameters: + uri: + componentInputParameter: model_uri + taskInfo: + name: importer + inputDefinitions: + parameters: + model_uri: + defaultValue: oci://registry.domain.local/modelcar:test + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/samples/v2/modelcar/Dockerfile b/test_data/sdk_compiled_pipelines/valid/critical/modelcar/Dockerfile similarity index 100% rename from samples/v2/modelcar/Dockerfile rename to test_data/sdk_compiled_pipelines/valid/critical/modelcar/Dockerfile diff --git a/test_data/sdk_compiled_pipelines/valid/critical/modelcar/__init__.py b/test_data/sdk_compiled_pipelines/valid/critical/modelcar/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/samples/v2/modelcar/modelcar.py b/test_data/sdk_compiled_pipelines/valid/critical/modelcar/modelcar.py similarity index 88% rename from samples/v2/modelcar/modelcar.py rename to test_data/sdk_compiled_pipelines/valid/critical/modelcar/modelcar.py index 4db6658049e..908b169bb18 100755 --- a/samples/v2/modelcar/modelcar.py +++ b/test_data/sdk_compiled_pipelines/valid/critical/modelcar/modelcar.py @@ -20,16 +20,12 @@ from kfp import dsl from kfp.dsl import component -# In tests, we install a KFP package from the PR under test. Users should not -# normally need to specify `kfp_package_path` in their component definitions. -_KFP_PACKAGE_PATH = os.getenv("KFP_PACKAGE_PATH") - -@dsl.component(kfp_package_path=_KFP_PACKAGE_PATH) +@dsl.component() def build_model_car(model: dsl.Output[dsl.Model]): # Simulate pushing the Modelcar to an OCI registry model.uri = "oci://registry.domain.local/org/repo:v1.0" -@dsl.component(kfp_package_path=_KFP_PACKAGE_PATH) +@dsl.component() def get_model_files_list(input_model: dsl.Input[dsl.Model]) -> str: import os import os.path diff --git a/samples/v2/subdagio/multiple_artifacts_namedtuple.py b/test_data/sdk_compiled_pipelines/valid/critical/multiple_artifacts_namedtuple.py similarity index 100% rename from samples/v2/subdagio/multiple_artifacts_namedtuple.py rename to test_data/sdk_compiled_pipelines/valid/critical/multiple_artifacts_namedtuple.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/multiple_artifacts_namedtuple.yaml b/test_data/sdk_compiled_pipelines/valid/critical/multiple_artifacts_namedtuple.yaml new file mode 100644 index 00000000000..3d9ec439ed1 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/multiple_artifacts_namedtuple.yaml @@ -0,0 +1,178 @@ +# PIPELINE DEFINITION +# Name: multiple-artifacts-namedtuple-pipeline +components: + comp-core: + dag: + outputs: + artifacts: + ds1: + artifactSelectors: + - outputArtifactKey: ds1 + producerSubtask: core-comp + ds2: + artifactSelectors: + - outputArtifactKey: ds2 + producerSubtask: core-comp + tasks: + core-comp: + cachingOptions: {} + componentRef: + name: comp-core-comp + taskInfo: + name: core-comp + outputDefinitions: + artifacts: + ds1: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + ds2: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-core-comp: + executorLabel: exec-core-comp + outputDefinitions: + artifacts: + ds1: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + ds2: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-crust-comp: + executorLabel: exec-crust-comp + inputDefinitions: + artifacts: + ds1: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + ds2: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-mantle: + dag: + outputs: + artifacts: + ds1: + artifactSelectors: + - outputArtifactKey: ds1 + producerSubtask: core + ds2: + artifactSelectors: + - outputArtifactKey: ds2 + producerSubtask: core + tasks: + core: + cachingOptions: {} + componentRef: + name: comp-core + taskInfo: + name: core + outputDefinitions: + artifacts: + ds1: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + ds2: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-core-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - core_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef core_comp(ds1: dsl.Output[dsl.Dataset], ds2: dsl.Output[dsl.Dataset]):\n\ + \ with open(ds1.path, 'w') as f:\n f.write('foo')\n with open(ds2.path,\ + \ 'w') as f:\n f.write('bar')\n\n" + image: python:3.9 + exec-crust-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - crust_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef crust_comp(\n ds1: dsl.Dataset,\n ds2: dsl.Dataset,\n):\n\ + \ with open(ds1.path, 'r') as f:\n print('ds1: ', f.read())\n\ + \ with open(ds2.path, 'r') as f:\n print('ds2: ', f.read())\n\n" + image: python:3.9 +pipelineInfo: + name: multiple-artifacts-namedtuple-pipeline +root: + dag: + tasks: + crust-comp: + cachingOptions: {} + componentRef: + name: comp-crust-comp + dependentTasks: + - mantle + inputs: + artifacts: + ds1: + taskOutputArtifact: + outputArtifactKey: ds1 + producerTask: mantle + ds2: + taskOutputArtifact: + outputArtifactKey: ds2 + producerTask: mantle + taskInfo: + name: crust-comp + mantle: + cachingOptions: {} + componentRef: + name: comp-mantle + taskInfo: + name: mantle +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/samples/v2/subdagio/multiple_parameters_namedtuple.py b/test_data/sdk_compiled_pipelines/valid/critical/multiple_parameters_namedtuple.py similarity index 100% rename from samples/v2/subdagio/multiple_parameters_namedtuple.py rename to test_data/sdk_compiled_pipelines/valid/critical/multiple_parameters_namedtuple.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/multiple_parameters_namedtuple.yaml b/test_data/sdk_compiled_pipelines/valid/critical/multiple_parameters_namedtuple.yaml new file mode 100644 index 00000000000..123b9e2282b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/multiple_parameters_namedtuple.yaml @@ -0,0 +1,161 @@ +# PIPELINE DEFINITION +# Name: multiple-parameters-namedtuple-pipeline +components: + comp-core: + dag: + outputs: + parameters: + val1: + valueFromParameter: + outputParameterKey: val1 + producerSubtask: core-comp + val2: + valueFromParameter: + outputParameterKey: val2 + producerSubtask: core-comp + tasks: + core-comp: + cachingOptions: {} + componentRef: + name: comp-core-comp + taskInfo: + name: core-comp + outputDefinitions: + parameters: + val1: + parameterType: STRING + val2: + parameterType: STRING + comp-core-comp: + executorLabel: exec-core-comp + outputDefinitions: + parameters: + val1: + parameterType: STRING + val2: + parameterType: STRING + comp-crust-comp: + executorLabel: exec-crust-comp + inputDefinitions: + parameters: + val1: + parameterType: STRING + val2: + parameterType: STRING + comp-mantle: + dag: + outputs: + parameters: + val1: + valueFromParameter: + outputParameterKey: val1 + producerSubtask: core + val2: + valueFromParameter: + outputParameterKey: val2 + producerSubtask: core + tasks: + core: + cachingOptions: {} + componentRef: + name: comp-core + taskInfo: + name: core + outputDefinitions: + parameters: + val1: + parameterType: STRING + val2: + parameterType: STRING +deploymentSpec: + executors: + exec-core-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - core_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef core_comp() -> NamedTuple('outputs', val1=str, val2=str): #\ + \ type: ignore\n outputs = NamedTuple('outputs', val1=str, val2=str)\n\ + \ return outputs('foo', 'bar')\n\n" + image: python:3.9 + exec-crust-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - crust_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef crust_comp(val1: str, val2: str):\n print('val1: ', val1)\n\ + \ print('val2: ', val2)\n\n" + image: python:3.9 +pipelineInfo: + name: multiple-parameters-namedtuple-pipeline +root: + dag: + tasks: + crust-comp: + cachingOptions: {} + componentRef: + name: comp-crust-comp + dependentTasks: + - mantle + inputs: + parameters: + val1: + taskOutputParameter: + outputParameterKey: val1 + producerTask: mantle + val2: + taskOutputParameter: + outputParameterKey: val2 + producerTask: mantle + taskInfo: + name: crust-comp + mantle: + cachingOptions: {} + componentRef: + name: comp-mantle + taskInfo: + name: mantle +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_input_child_level.py b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_input_child_level.py new file mode 100644 index 00000000000..8e2f2bd410f --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_input_child_level.py @@ -0,0 +1,60 @@ + +from kfp import compiler +from kfp import dsl + + +@dsl.component() +def component_a_str(componentInputStr: str = None): + if componentInputStr != 'Input - pipeline': + raise ValueError(f"componentInputStr should be 'Input - pipeline' but is {componentInputStr}") + + +@dsl.component() +def component_b_str(componentInputStr: str = None): + if componentInputStr != 'Input 2 - nested pipeline': + raise ValueError(f"componentInputStr should be 'Input 2 - nested pipeline' but is {componentInputStr}") + +@dsl.component() +def component_a_int(componentInputInt: int = None): + if componentInputInt != 1: + raise ValueError(f"componentInputInt should be 1 but is {componentInputInt}") + +@dsl.component() +def component_b_int(componentInputInt: int = None): + if componentInputInt != 0: + raise ValueError(f"componentInputInt should be 0 but is {componentInputInt}") + +@dsl.component() +def component_a_bool(componentInputBool: bool = None): + if componentInputBool != True: + raise ValueError(f"componentInputBool should be True but is {componentInputBool}") + +@dsl.component() +def component_b_bool(componentInputBool: bool = None): + if componentInputBool != False: + raise ValueError(f"componentInputBool should be False but is {componentInputBool}") + +@dsl.pipeline() +def nested_pipeline(nestedInputStr1: str = 'Input 1 - nested pipeline', nestedInputStr2: str = 'Input 2 - nested pipeline', + nestedInputInt1: int = 0, nestedInputInt2: int = 0, + nestedInputBool1: bool = False, nestedInputBool2: bool = False): + component_a_str(componentInputStr=nestedInputStr1).set_caching_options(False) + component_b_str(componentInputStr=nestedInputStr2).set_caching_options(False) + + component_a_int(componentInputInt=nestedInputInt1).set_caching_options(False) + component_b_int(componentInputInt=nestedInputInt2).set_caching_options(False) + + component_a_bool(componentInputBool=nestedInputBool1).set_caching_options(False) + component_b_bool(componentInputBool=nestedInputBool2).set_caching_options(False) + + +@dsl.pipeline() +def nested_pipeline_opt_input_child_level(): + # validate that input value overrides default value, and that when input is not provided, default is used. + nested_pipeline(nestedInputStr1='Input - pipeline', nestedInputInt1=1, nestedInputBool1=True).set_caching_options(False) + + + +if __name__ == '__main__': + compiler.Compiler().compile(pipeline_func=nested_pipeline_opt_input_child_level, package_path=__file__.replace('.py', '_compiled.yaml')) + diff --git a/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_input_child_level_compiled.yaml b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_input_child_level_compiled.yaml new file mode 100644 index 00000000000..b2cbc610ebb --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_input_child_level_compiled.yaml @@ -0,0 +1,340 @@ +# PIPELINE DEFINITION +# Name: nested-pipeline-opt-input-child-level +components: + comp-component-a-bool: + executorLabel: exec-component-a-bool + inputDefinitions: + parameters: + componentInputBool: + isOptional: true + parameterType: BOOLEAN + comp-component-a-int: + executorLabel: exec-component-a-int + inputDefinitions: + parameters: + componentInputInt: + isOptional: true + parameterType: NUMBER_INTEGER + comp-component-a-str: + executorLabel: exec-component-a-str + inputDefinitions: + parameters: + componentInputStr: + isOptional: true + parameterType: STRING + comp-component-b-bool: + executorLabel: exec-component-b-bool + inputDefinitions: + parameters: + componentInputBool: + isOptional: true + parameterType: BOOLEAN + comp-component-b-int: + executorLabel: exec-component-b-int + inputDefinitions: + parameters: + componentInputInt: + isOptional: true + parameterType: NUMBER_INTEGER + comp-component-b-str: + executorLabel: exec-component-b-str + inputDefinitions: + parameters: + componentInputStr: + isOptional: true + parameterType: STRING + comp-nested-pipeline: + dag: + tasks: + component-a-bool: + cachingOptions: {} + componentRef: + name: comp-component-a-bool + inputs: + parameters: + componentInputBool: + componentInputParameter: nestedInputBool1 + taskInfo: + name: component-a-bool + component-a-int: + cachingOptions: {} + componentRef: + name: comp-component-a-int + inputs: + parameters: + componentInputInt: + componentInputParameter: nestedInputInt1 + taskInfo: + name: component-a-int + component-a-str: + cachingOptions: {} + componentRef: + name: comp-component-a-str + inputs: + parameters: + componentInputStr: + componentInputParameter: nestedInputStr1 + taskInfo: + name: component-a-str + component-b-bool: + cachingOptions: {} + componentRef: + name: comp-component-b-bool + inputs: + parameters: + componentInputBool: + componentInputParameter: nestedInputBool2 + taskInfo: + name: component-b-bool + component-b-int: + cachingOptions: {} + componentRef: + name: comp-component-b-int + inputs: + parameters: + componentInputInt: + componentInputParameter: nestedInputInt2 + taskInfo: + name: component-b-int + component-b-str: + cachingOptions: {} + componentRef: + name: comp-component-b-str + inputs: + parameters: + componentInputStr: + componentInputParameter: nestedInputStr2 + taskInfo: + name: component-b-str + inputDefinitions: + parameters: + nestedInputBool1: + defaultValue: false + isOptional: true + parameterType: BOOLEAN + nestedInputBool2: + defaultValue: false + isOptional: true + parameterType: BOOLEAN + nestedInputInt1: + defaultValue: 0.0 + isOptional: true + parameterType: NUMBER_INTEGER + nestedInputInt2: + defaultValue: 0.0 + isOptional: true + parameterType: NUMBER_INTEGER + nestedInputStr1: + defaultValue: Input 1 - nested pipeline + isOptional: true + parameterType: STRING + nestedInputStr2: + defaultValue: Input 2 - nested pipeline + isOptional: true + parameterType: STRING +deploymentSpec: + executors: + exec-component-a-bool: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_a_bool + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_a_bool(componentInputBool: bool = None):\n if componentInputBool\ + \ != True:\n raise ValueError(f\"componentInputBool should be True\ + \ but is {componentInputBool}\")\n\n" + image: python:3.9 + exec-component-a-int: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_a_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_a_int(componentInputInt: int = None):\n if componentInputInt\ + \ != 1:\n raise ValueError(f\"componentInputInt should be 1 but is\ + \ {componentInputInt}\")\n\n" + image: python:3.9 + exec-component-a-str: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_a_str + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_a_str(componentInputStr: str = None):\n if componentInputStr\ + \ != 'Input - pipeline':\n raise ValueError(f\"componentInputStr\ + \ should be 'Input - pipeline' but is {componentInputStr}\")\n\n" + image: python:3.9 + exec-component-b-bool: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_b_bool + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_b_bool(componentInputBool: bool = None):\n if componentInputBool\ + \ != False:\n raise ValueError(f\"componentInputBool should be False\ + \ but is {componentInputBool}\")\n\n" + image: python:3.9 + exec-component-b-int: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_b_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_b_int(componentInputInt: int = None):\n if componentInputInt\ + \ != 0:\n raise ValueError(f\"componentInputInt should be 0 but is\ + \ {componentInputInt}\")\n\n" + image: python:3.9 + exec-component-b-str: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_b_str + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_b_str(componentInputStr: str = None):\n if componentInputStr\ + \ != 'Input 2 - nested pipeline':\n raise ValueError(f\"componentInputStr\ + \ should be 'Input 2 - nested pipeline' but is {componentInputStr}\")\n\n" + image: python:3.9 +pipelineInfo: + name: nested-pipeline-opt-input-child-level +root: + dag: + tasks: + nested-pipeline: + cachingOptions: {} + componentRef: + name: comp-nested-pipeline + inputs: + parameters: + nestedInputBool1: + runtimeValue: + constant: true + nestedInputInt1: + runtimeValue: + constant: 1.0 + nestedInputStr1: + runtimeValue: + constant: Input - pipeline + taskInfo: + name: nested-pipeline +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.2 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_nil.py b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_nil.py new file mode 100644 index 00000000000..9c10add2c8b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_nil.py @@ -0,0 +1,35 @@ + +from kfp import compiler +from kfp import dsl + + +@dsl.component() +def component_str(componentInput: str = None): + if componentInput is not None: + raise ValueError(f"componentInput should be None but is {componentInput}") + +@dsl.component() +def component_int(componentInput: int = None): + if componentInput is not None: + raise ValueError(f"componentInput should be None but is {componentInput}") + +@dsl.component() +def component_bool(componentInput: bool = None): + if componentInput is not None: + raise ValueError(f"componentInput should be None but is {componentInput}") + +@dsl.pipeline() +def nested_pipeline(nestedInputStr: str = None, nestedInputInt: int = None, nestedInputBool: bool = None): + component_str(componentInput=nestedInputStr).set_caching_options(False) + component_int(componentInput=nestedInputInt).set_caching_options(False) + component_bool(componentInput=nestedInputBool).set_caching_options(False) + + +@dsl.pipeline() +def nested_pipeline_opt_inputs_nil(): + nested_pipeline().set_caching_options(False) + + +if __name__ == '__main__': + compiler.Compiler().compile(pipeline_func=nested_pipeline_opt_inputs_nil, package_path=__file__.replace('.py', '_compiled.yaml')) + diff --git a/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_nil_compiled.yaml b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_nil_compiled.yaml new file mode 100644 index 00000000000..993a54c95da --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_nil_compiled.yaml @@ -0,0 +1,173 @@ +# PIPELINE DEFINITION +# Name: nested-pipeline-opt-inputs-nil +components: + comp-component-bool: + executorLabel: exec-component-bool + inputDefinitions: + parameters: + componentInput: + isOptional: true + parameterType: BOOLEAN + comp-component-int: + executorLabel: exec-component-int + inputDefinitions: + parameters: + componentInput: + isOptional: true + parameterType: NUMBER_INTEGER + comp-component-str: + executorLabel: exec-component-str + inputDefinitions: + parameters: + componentInput: + isOptional: true + parameterType: STRING + comp-nested-pipeline: + dag: + tasks: + component-bool: + cachingOptions: {} + componentRef: + name: comp-component-bool + inputs: + parameters: + componentInput: + componentInputParameter: nestedInputBool + taskInfo: + name: component-bool + component-int: + cachingOptions: {} + componentRef: + name: comp-component-int + inputs: + parameters: + componentInput: + componentInputParameter: nestedInputInt + taskInfo: + name: component-int + component-str: + cachingOptions: {} + componentRef: + name: comp-component-str + inputs: + parameters: + componentInput: + componentInputParameter: nestedInputStr + taskInfo: + name: component-str + inputDefinitions: + parameters: + nestedInputBool: + isOptional: true + parameterType: BOOLEAN + nestedInputInt: + isOptional: true + parameterType: NUMBER_INTEGER + nestedInputStr: + isOptional: true + parameterType: STRING +deploymentSpec: + executors: + exec-component-bool: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_bool + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_bool(componentInput: bool = None):\n if componentInput\ + \ is not None:\n raise ValueError(f\"componentInput should be None\ + \ but is {componentInput}\")\n\n" + image: python:3.9 + exec-component-int: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_int(componentInput: int = None):\n if componentInput\ + \ is not None:\n raise ValueError(f\"componentInput should be None\ + \ but is {componentInput}\")\n\n" + image: python:3.9 + exec-component-str: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_str + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_str(componentInput: str = None):\n if componentInput\ + \ is not None:\n raise ValueError(f\"componentInput should be None\ + \ but is {componentInput}\")\n\n" + image: python:3.9 +pipelineInfo: + name: nested-pipeline-opt-inputs-nil +root: + dag: + tasks: + nested-pipeline: + cachingOptions: {} + componentRef: + name: comp-nested-pipeline + taskInfo: + name: nested-pipeline +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.2 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_parent_level.py b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_parent_level.py new file mode 100644 index 00000000000..b0cbeff602c --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_parent_level.py @@ -0,0 +1,58 @@ +from typing import Optional + +from kfp import compiler, dsl + + +@dsl.component() +def component_nil_str_default(componentInput: str = None): + if componentInput != 'Input - parent pipeline': + raise ValueError(f"componentInput should be 'Input - parent pipeline' but is {componentInput}") + +@dsl.component() +def component_str_default(componentInput: str = 'Input - component'): + if componentInput != 'Input - parent pipeline': + raise ValueError(f"componentInput should be 'Input - parent pipeline' but is {componentInput}") + +@dsl.component() +def component_nil_int_default(componentInput: int = None): + if componentInput != 1: + raise ValueError(f"componentInput should be 1 but is {componentInput}") + +@dsl.component() +def component_int_default(componentInput: int = 0): + if componentInput != 1: + raise ValueError(f"componentInput should be 1 but is {componentInput}") + +@dsl.component() +def component_nil_bool_default(componentInput: bool = None): + if componentInput != True: + raise ValueError(f"componentInput should be True but is {componentInput}") + +@dsl.component() +def component_bool_default(componentInput: bool = False): + if componentInput != True: + raise ValueError(f"componentInput should be True but is {componentInput}") + +@dsl.pipeline() +def nested_pipeline_non_nil_defaults(nestedInputStr: str = 'Input - nested pipeline', nestedInputInt: int = 0, nestedInputBool: bool = False): + component_str_default(componentInput=nestedInputStr).set_caching_options(False) + component_int_default(componentInput=nestedInputInt).set_caching_options(False) + component_bool_default(componentInput=nestedInputBool).set_caching_options(False) + +@dsl.pipeline() +def nested_pipeline_nil_defaults(nestedInputStr: str = 'Input - nested pipeline', nestedInputInt: int = None, nestedInputBool: bool = None): + component_nil_str_default(componentInput=nestedInputStr).set_caching_options(False) + component_nil_int_default(componentInput=nestedInputInt).set_caching_options(False) + component_nil_bool_default(componentInput=nestedInputBool).set_caching_options(False) + + +@dsl.pipeline() +def nested_pipeline_opt_inputs_parent_level(inputStr: str = 'Input - parent pipeline', inputInt: int = 1, inputBool: bool = True): + # verifies that the parent pipeline input overrides both nested pipeline-level and component-level default values. + nested_pipeline_non_nil_defaults(nestedInputStr=inputStr, nestedInputInt=inputInt, nestedInputBool=inputBool).set_caching_options(False) + # verifies that the parent pipeline input overrides both nested pipeline-level & component-level nil default input values. + nested_pipeline_nil_defaults(nestedInputStr=inputStr, nestedInputInt=inputInt, nestedInputBool=inputBool).set_caching_options(False) + +if __name__ == "__main__": + compiler.Compiler().compile(pipeline_func=nested_pipeline_opt_inputs_parent_level, package_path=__file__.replace(".py", "_compiled.yaml")) + diff --git a/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_parent_level_compiled.yaml b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_parent_level_compiled.yaml new file mode 100644 index 00000000000..e8eeeb92fd3 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/nested_pipeline_opt_inputs_parent_level_compiled.yaml @@ -0,0 +1,377 @@ +# PIPELINE DEFINITION +# Name: nested-pipeline-opt-inputs-parent-level +# Inputs: +# inputBool: bool [Default: True] +# inputInt: int [Default: 1.0] +# inputStr: str [Default: 'Input - parent pipeline'] +components: + comp-component-bool-default: + executorLabel: exec-component-bool-default + inputDefinitions: + parameters: + componentInput: + defaultValue: false + isOptional: true + parameterType: BOOLEAN + comp-component-int-default: + executorLabel: exec-component-int-default + inputDefinitions: + parameters: + componentInput: + defaultValue: 0.0 + isOptional: true + parameterType: NUMBER_INTEGER + comp-component-nil-bool-default: + executorLabel: exec-component-nil-bool-default + inputDefinitions: + parameters: + componentInput: + isOptional: true + parameterType: BOOLEAN + comp-component-nil-int-default: + executorLabel: exec-component-nil-int-default + inputDefinitions: + parameters: + componentInput: + isOptional: true + parameterType: NUMBER_INTEGER + comp-component-nil-str-default: + executorLabel: exec-component-nil-str-default + inputDefinitions: + parameters: + componentInput: + isOptional: true + parameterType: STRING + comp-component-str-default: + executorLabel: exec-component-str-default + inputDefinitions: + parameters: + componentInput: + defaultValue: Input - component + isOptional: true + parameterType: STRING + comp-nested-pipeline-nil-defaults: + dag: + tasks: + component-nil-bool-default: + cachingOptions: {} + componentRef: + name: comp-component-nil-bool-default + inputs: + parameters: + componentInput: + componentInputParameter: nestedInputBool + taskInfo: + name: component-nil-bool-default + component-nil-int-default: + cachingOptions: {} + componentRef: + name: comp-component-nil-int-default + inputs: + parameters: + componentInput: + componentInputParameter: nestedInputInt + taskInfo: + name: component-nil-int-default + component-nil-str-default: + cachingOptions: {} + componentRef: + name: comp-component-nil-str-default + inputs: + parameters: + componentInput: + componentInputParameter: nestedInputStr + taskInfo: + name: component-nil-str-default + inputDefinitions: + parameters: + nestedInputBool: + isOptional: true + parameterType: BOOLEAN + nestedInputInt: + isOptional: true + parameterType: NUMBER_INTEGER + nestedInputStr: + defaultValue: Input - nested pipeline + isOptional: true + parameterType: STRING + comp-nested-pipeline-non-nil-defaults: + dag: + tasks: + component-bool-default: + cachingOptions: {} + componentRef: + name: comp-component-bool-default + inputs: + parameters: + componentInput: + componentInputParameter: nestedInputBool + taskInfo: + name: component-bool-default + component-int-default: + cachingOptions: {} + componentRef: + name: comp-component-int-default + inputs: + parameters: + componentInput: + componentInputParameter: nestedInputInt + taskInfo: + name: component-int-default + component-str-default: + cachingOptions: {} + componentRef: + name: comp-component-str-default + inputs: + parameters: + componentInput: + componentInputParameter: nestedInputStr + taskInfo: + name: component-str-default + inputDefinitions: + parameters: + nestedInputBool: + defaultValue: false + isOptional: true + parameterType: BOOLEAN + nestedInputInt: + defaultValue: 0.0 + isOptional: true + parameterType: NUMBER_INTEGER + nestedInputStr: + defaultValue: Input - nested pipeline + isOptional: true + parameterType: STRING +deploymentSpec: + executors: + exec-component-bool-default: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_bool_default + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_bool_default(componentInput: bool = False):\n if\ + \ componentInput != True:\n raise ValueError(f\"componentInput should\ + \ be True but is {componentInput}\")\n\n" + image: python:3.9 + exec-component-int-default: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_int_default + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_int_default(componentInput: int = 0):\n if componentInput\ + \ != 1:\n raise ValueError(f\"componentInput should be 1 but is {componentInput}\"\ + )\n\n" + image: python:3.9 + exec-component-nil-bool-default: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_nil_bool_default + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_nil_bool_default(componentInput: bool = None):\n \ + \ if componentInput != True:\n raise ValueError(f\"componentInput\ + \ should be True but is {componentInput}\")\n\n" + image: python:3.9 + exec-component-nil-int-default: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_nil_int_default + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_nil_int_default(componentInput: int = None):\n if\ + \ componentInput != 1:\n raise ValueError(f\"componentInput should\ + \ be 1 but is {componentInput}\")\n\n" + image: python:3.9 + exec-component-nil-str-default: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_nil_str_default + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_nil_str_default(componentInput: str = None):\n if\ + \ componentInput != 'Input - parent pipeline':\n raise ValueError(f\"\ + componentInput should be 'Input - parent pipeline' but is {componentInput}\"\ + )\n\n" + image: python:3.9 + exec-component-str-default: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_str_default + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_str_default(componentInput: str = 'Input - component'):\n\ + \ if componentInput != 'Input - parent pipeline':\n raise ValueError(f\"\ + componentInput should be 'Input - parent pipeline' but is {componentInput}\"\ + )\n\n" + image: python:3.9 +pipelineInfo: + name: nested-pipeline-opt-inputs-parent-level +root: + dag: + tasks: + nested-pipeline-nil-defaults: + cachingOptions: {} + componentRef: + name: comp-nested-pipeline-nil-defaults + inputs: + parameters: + nestedInputBool: + componentInputParameter: inputBool + nestedInputInt: + componentInputParameter: inputInt + nestedInputStr: + componentInputParameter: inputStr + taskInfo: + name: nested-pipeline-nil-defaults + nested-pipeline-non-nil-defaults: + cachingOptions: {} + componentRef: + name: comp-nested-pipeline-non-nil-defaults + inputs: + parameters: + nestedInputBool: + componentInputParameter: inputBool + nestedInputInt: + componentInputParameter: inputInt + nestedInputStr: + componentInputParameter: inputStr + taskInfo: + name: nested-pipeline-non-nil-defaults + inputDefinitions: + parameters: + inputBool: + defaultValue: true + isOptional: true + parameterType: BOOLEAN + inputInt: + defaultValue: 1.0 + isOptional: true + parameterType: NUMBER_INTEGER + inputStr: + defaultValue: Input - parent pipeline + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.2 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/notebook_component_mixed.py b/test_data/sdk_compiled_pipelines/valid/critical/notebook_component_mixed.py new file mode 100644 index 00000000000..02b5bc43d99 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/notebook_component_mixed.py @@ -0,0 +1,66 @@ +import json +import os + +from kfp import dsl, compiler + + +NB_DIR = os.path.join(os.path.dirname(__file__), "notebooks") + + +@dsl.component +def preprocess(text: str, dataset: dsl.Output[dsl.Dataset]): + import re + + cleaned_text = re.sub(r"\s+", " ", text).strip() + with open(dataset.path, "w", encoding="utf-8") as f: + f.write(cleaned_text) + + +@dsl.notebook_component( + notebook_path=os.path.join(NB_DIR, "nb_train_with_params.ipynb") +) +def train_model(cleaned_text: dsl.Input[dsl.Dataset], model: dsl.Output[dsl.Model]): + import shutil + + with open(cleaned_text.path, "r", encoding="utf-8") as f: + cleaned_text = f.read() + + dsl.run_notebook(cleaned_text=cleaned_text) + + # Notebook writes its model into /tmp/kfp_nb_outputs/model.txt + shutil.copy("/tmp/kfp_nb_outputs/model.txt", model.path) + + with open(model.path, "r", encoding="utf-8") as f: + model_text = f.read() + + assert model_text == cleaned_text.upper() + + +@dsl.notebook_component(notebook_path=os.path.join(NB_DIR, "nb_eval_metrics.ipynb")) +def evaluate_model(model_text: dsl.Input[dsl.Model], metrics: dsl.Output[dsl.Metrics]): + import json + + with open(model_text.path, "r", encoding="utf-8") as f: + model_text = f.read() + + dsl.run_notebook(model_text=model_text) + with open("/tmp/kfp_nb_outputs/metrics.json", "r", encoding="utf-8") as f: + metrics_dict = json.load(f) + + assert metrics_dict == {"score": float(len(model_text))} + + for metric_name, metric_value in metrics_dict.items(): + metrics.log_metric(metric_name, metric_value) + + +@dsl.pipeline(name="nb-mixed") +def pipeline(text: str = "Hello world"): + p = preprocess(text=text).set_caching_options(False) + t = train_model(cleaned_text=p.output).set_caching_options(False) + evaluate_model(model_text=t.output).set_caching_options(False) + + +if __name__ == "__main__": + compiler.Compiler().compile( + pipeline_func=pipeline, package_path=__file__.replace(".py", ".yaml") + ) diff --git a/test_data/sdk_compiled_pipelines/valid/critical/notebook_component_simple.py b/test_data/sdk_compiled_pipelines/valid/critical/notebook_component_simple.py new file mode 100644 index 00000000000..c33a590bed7 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/notebook_component_simple.py @@ -0,0 +1,27 @@ +from kfp import dsl, compiler +import os + + +NB_DIR = os.path.join(os.path.dirname(__file__), "notebooks") + + +@dsl.notebook_component(notebook_path=os.path.join(NB_DIR, "nb_train_simple.ipynb")) +def run_train_notebook(text: str): + # text is not defined in the notebook but text2 is defined + dsl.run_notebook(text=text) + + with open("/tmp/kfp_nb_outputs/log.txt", "r", encoding="utf-8") as f: + log = f.read() + + assert log == text + " " + "default2" + + +@dsl.pipeline(name="nb-simple") +def pipeline(text: str = "hello"): + run_train_notebook(text=text).set_caching_options(False) + + +if __name__ == "__main__": + compiler.Compiler().compile( + pipeline_func=pipeline, package_path=__file__.replace(".py", ".yaml") + ) diff --git a/test_data/sdk_compiled_pipelines/valid/critical/notebook_component_simple.yaml b/test_data/sdk_compiled_pipelines/valid/critical/notebook_component_simple.yaml new file mode 100644 index 00000000000..1e226889802 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/notebook_component_simple.yaml @@ -0,0 +1,181 @@ +# PIPELINE DEFINITION +# Name: nb-simple +# Inputs: +# text: str [Default: 'hello'] +components: + comp-run-train-notebook: + executorLabel: exec-run-train-notebook + inputDefinitions: + parameters: + text: + parameterType: STRING +deploymentSpec: + executors: + exec-run-train-notebook: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - run_train_notebook + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'nbclient>=0.10,<1'\ + \ 'ipykernel>=6,<7' 'jupyter_client>=7,<9' && python3 -m pip install --quiet\ + \ --no-warn-script-location 'kfp==2.14.2' '--no-deps' 'typing-extensions>=3.7.4,<5;\ + \ python_version<\"3.9\"' && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\n__KFP_EMBEDDED_ARCHIVE_B64 = 'H4sIACap1mgC/+2VTW+jMBCGOedXWFxI1S7fJLRSpD3ucQ8r7WFTIQdMYgVsZMYKUdT/voZSKqpWe2pXu51HyIZ3PB/yaITrud7X77T7xmjBlPUu+I+8tft+FD+/93rgh0Fokc76AHQLVJn01uckTEkNvGabYJ2k6W3qJ4kb366DIEoWFvLfI3YZKMpF1vK6qZjLm7PYvcP8r2Iz45EfJUHyvD8SRKFvBYm/SpIwWvlrM//xah1ZxP/I+a8bRQ/V2+f+ZP9HuSwIsXNWVa19R36ZD0IuwzrKGZwbZkx2Lgtm3zyZWMdyDVyKLJdagDkgdFVN5poBLShQo18eJlVqaDQMie4nsZVa5WxKPmjAOgjJhjgFK6muIHTs0Xg/7GPIv1+omRmpgMh2K6aUvX/r1vTICq7apeNB3XjHssnMqI2BnRvCOt5CJo+bH0qzq7l7o7iA5db+qThwsScgyStBvEruXehga79wP3E4ENkw8WruJzdTg3PqCxHmwkyWjaOh/JI6V4S2pLybh+yX0j2Zetiybw65Jlu7f67J0Ku+gnmHzDpc3eyGB5NdUbHXdM8yLko5ycYgaD10sDnDQY4B+0BDY2yxK6Wqad/CeCZkNRdSGTlcPOA/C0EQBEEQBEEQBEEQBEEQBEEQBEEQBPmU/AbrYhlkACgAAA=='\n\ + __KFP_NOTEBOOK_REL_PATH = 'nb_train_simple.ipynb'\n\nimport base64 as __kfp_b64\n\ + import gzip as __kfp_gzip\nimport io as __kfp_io\nimport os as __kfp_os\n\ + import sys as __kfp_sys\nimport tarfile as __kfp_tarfile\nimport tempfile\ + \ as __kfp_tempfile\nfrom nbclient import NotebookClient\n\n# Extract embedded\ + \ archive at import time to ensure sys.path and globals are set\nprint('[KFP]\ + \ Extracting embedded notebook archive...', flush=True)\n__kfp_tmpdir =\ + \ __kfp_tempfile.TemporaryDirectory()\n__KFP_EMBEDDED_ASSET_DIR = __kfp_tmpdir.name\n\ + try:\n __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode('ascii'))\n\ + \ with __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode='r:gz')\ + \ as __kfp_tar:\n __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR)\n\ + \ print(f'[KFP] Notebook archive extracted to: {__KFP_EMBEDDED_ASSET_DIR}',\ + \ flush=True)\nexcept Exception as __kfp_e:\n raise RuntimeError(f'Failed\ + \ to extract embedded notebook archive: {__kfp_e}')\n\n# Always prepend\ + \ the extracted directory to sys.path for import resolution\nif __KFP_EMBEDDED_ASSET_DIR\ + \ not in __kfp_sys.path:\n __kfp_sys.path.insert(0, __KFP_EMBEDDED_ASSET_DIR)\n\ + \ print(f'[KFP] Added notebook archive directory to Python path', flush=True)\n\ + \n# Optional convenience for generic embedded file variable name\n__KFP_EMBEDDED_ASSET_FILE\ + \ = __kfp_os.path.join(__KFP_EMBEDDED_ASSET_DIR, __KFP_NOTEBOOK_REL_PATH)\n\ + \n\nclass KFPStreamingNotebookClient(NotebookClient):\n # Streams outputs\ + \ in real-time by emitting outputs during message processing.\n def process_message(self,\ + \ msg, cell, cell_index):\n # Call the parent implementation to handle\ + \ the message normally\n output = super().process_message(msg, cell,\ + \ cell_index)\n\n # If an output was created, stream it immediately\n\ + \ if output is not None:\n _kfp_stream_single_output(output,\ + \ cell_index)\n\n return output\n\ndef __kfp_write_parameters_cell(nb,\ + \ params):\n \"\"\"Inject parameters following Papermill semantics.\n\ + \n - If a cell tagged with 'parameters' exists, insert an overriding\n\ + \ 'injected-parameters' cell immediately after it.\n - Otherwise,\ + \ insert the 'injected-parameters' cell at the top.\n \"\"\"\n import\ + \ json\n\n import nbformat\n\n if not params:\n return\n\n\ + \ # Build the injected parameters cell\n assignments = []\n for\ + \ key, value in params.items():\n serialized = json.dumps(value)\n\ + \ assignments.append(key + ' = json.loads(' + repr(serialized) +\ + \ ')')\n source = 'import json\\n' + '\\n'.join(assignments) + '\\n'\n\ + \ cell = nbformat.v4.new_code_cell(source=source)\n cell.metadata.setdefault('tags',\ + \ [])\n if 'injected-parameters' not in cell.metadata['tags']:\n \ + \ cell.metadata['tags'].append('injected-parameters')\n\n # Locate\ + \ the first 'parameters' tagged cell\n insert_idx = 0\n for idx, existing\ + \ in enumerate(nb.get('cells', [])):\n if existing.get('cell_type')\ + \ != 'code':\n continue\n tags = existing.get('metadata',\ + \ {}).get('tags', []) or []\n if 'parameters' in tags:\n \ + \ insert_idx = idx + 1\n break\n\n nb.cells.insert(insert_idx,\ + \ cell)\n\ndef _kfp_stream_single_output(output, cell_idx):\n \"\"\"\ + Stream a single notebook output immediately during execution.\n\n Prints\ + \ stdout/stderr and text/plain display outputs to the console so users\n\ + \ see cell output as it happens (no need to wait until the notebook finishes).\n\ + \ \"\"\"\n import sys\n output_type = output.get('output_type')\n\ + \n if output_type == 'stream':\n text = output.get('text', '')\n\ + \ if text:\n try:\n print(f'[nb cell {cell_idx}\ + \ stream] ', end='', flush=False)\n except Exception:\n \ + \ pass\n print(text, end='' if text.endswith('\\n')\ + \ else '\\n', flush=True)\n elif output_type == 'error':\n for\ + \ line in output.get('traceback', []):\n print(line, file=sys.stderr,\ + \ flush=True)\n else:\n # Handle display_data and execute_result\n\ + \ data = output.get('data', {})\n if 'text/plain' in data:\n\ + \ print(data['text/plain'], flush=True)\n elif 'application/json'\ + \ in data:\n try:\n import json as __kfp_json\n\ + \ parsed = data['application/json']\n # Some\ + \ kernels send JSON as string; try to parse if needed\n if\ + \ isinstance(parsed, str):\n try:\n \ + \ parsed = __kfp_json.loads(parsed)\n except Exception:\n\ + \ pass\n print(__kfp_json.dumps(parsed,\ + \ indent=2, ensure_ascii=False), flush=True)\n except Exception:\n\ + \ # Fallback to raw\n print(str(data.get('application/json')),\ + \ flush=True)\n elif 'text/markdown' in data:\n # Print\ + \ markdown as-is; frontends may render, logs will show raw markdown\n \ + \ print(data['text/markdown'], flush=True)\n\ndef kfp_run_notebook(**kwargs):\n\ + \ \"\"\"Execute the embedded notebook with injected parameters.\n\n \ + \ Parameters provided via kwargs are injected into the notebook following\n\ + \ Papermill semantics (after a parameters cell if present, otherwise\ + \ at top).\n Execution uses a Python kernel; nbclient and ipykernel must\ + \ be available at\n runtime (installed via packages_to_install for notebook\ + \ components).\n \"\"\"\n import os\n import subprocess\n import\ + \ sys\n\n from nbclient import NotebookClient\n import nbformat\n\n\ + \ # Ensure a usable 'python3' kernel is present; install kernelspec if\ + \ missing\n print('[KFP Notebook] Checking for Python kernel...', flush=True)\n\ + \ try:\n from jupyter_client.kernelspec import KernelSpecManager\ + \ # type: ignore\n ksm = KernelSpecManager()\n have_py3 =\ + \ 'python3' in ksm.find_kernel_specs()\n if not have_py3:\n \ + \ print(\n '[KFP Notebook] Python3 kernel not found,\ + \ installing...',\n flush=True)\n try:\n \ + \ subprocess.run([\n sys.executable, '-m',\ + \ 'ipykernel', 'install', '--user',\n '--name', 'python3',\ + \ '--display-name', 'Python 3'\n ],\n \ + \ check=True,\n stdout=subprocess.DEVNULL,\n\ + \ stderr=subprocess.DEVNULL)\n \ + \ print(\n '[KFP Notebook] Python3 kernel installed\ + \ successfully',\n flush=True)\n except subprocess.CalledProcessError\ + \ as e:\n raise RuntimeError(\n \"Failed\ + \ to install 'python3' kernelspec for ipykernel. \"\n \ + \ \"Ensure ipykernel is available in the environment or include it via\ + \ packages_to_install. \"\n f\"Error: {e}\") from e\n\ + \ else:\n print('[KFP Notebook] Python3 kernel found',\ + \ flush=True)\n except ImportError as e:\n raise RuntimeError(\n\ + \ \"jupyter_client is not available. Ensure it's installed in\ + \ the environment or include it via packages_to_install. \"\n \ + \ f\"Error: {e}\") from e\n\n nb_path = os.path.join(__KFP_EMBEDDED_ASSET_DIR,\ + \ __KFP_NOTEBOOK_REL_PATH)\n\n try:\n nb = nbformat.read(nb_path,\ + \ as_version=4)\n except Exception as e:\n raise RuntimeError(\n\ + \ f'Failed to read notebook {nb_path}. Ensure it is a valid Jupyter\ + \ notebook. Error: {e}'\n ) from e\n\n try:\n __kfp_write_parameters_cell(nb,\ + \ kwargs)\n print(\n f'[KFP Notebook] Executing notebook\ + \ with {len(nb.get(\"cells\", []))} cells',\n flush=True)\n\n\ + \ # Use our custom streaming client for real-time output (defined\ + \ in the\n # generated ephemeral source)\n client = KFPStreamingNotebookClient(\n\ + \ nb,\n timeout=None,\n allow_errors=False,\n\ + \ store_widget_state=False,\n kernel_name='python3')\n\ + \ client.execute(cwd=__KFP_EMBEDDED_ASSET_DIR)\n\n print('[KFP\ + \ Notebook] Execution complete', flush=True)\n\n except Exception as\ + \ e:\n raise RuntimeError(f'Notebook execution failed. Error: {e}')\ + \ from e\n\n\n# Bind helper into dsl namespace so user code can call dsl.run_notebook(...)\n\ + dsl.run_notebook = kfp_run_notebook\n\n\ndef run_train_notebook(text: str):\n\ + \ # text is not defined in the notebook but text2 is defined\n dsl.run_notebook(text=text)\n\ + \n with open(\"/tmp/kfp_nb_outputs/log.txt\", \"r\", encoding=\"utf-8\"\ + ) as f:\n log = f.read()\n\n assert log == text + \" \" + \"default2\"\ + \n\n" + image: python:3.9 +pipelineInfo: + name: nb-simple +root: + dag: + tasks: + run-train-notebook: + cachingOptions: {} + componentRef: + name: comp-run-train-notebook + inputs: + parameters: + text: + componentInputParameter: text + taskInfo: + name: run-train-notebook + inputDefinitions: + parameters: + text: + defaultValue: hello + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.2 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/notebooks/README.md b/test_data/sdk_compiled_pipelines/valid/critical/notebooks/README.md new file mode 100644 index 00000000000..bdd29e60135 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/notebooks/README.md @@ -0,0 +1,6 @@ +# Sample Notebooks + +These are simple notebooks leveraged by the sample pipelines to illustrate native Jupyter Notebook integration: + +- [notebook_component_mixed.py](../notebook_component_mixed.py) +- [notebook_component_simple.py](../notebook_component_simple.py) diff --git a/test_data/sdk_compiled_pipelines/valid/critical/notebooks/nb_eval_metrics.ipynb b/test_data/sdk_compiled_pipelines/valid/critical/notebooks/nb_eval_metrics.ipynb new file mode 100644 index 00000000000..f65ad25f4f8 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/notebooks/nb_eval_metrics.ipynb @@ -0,0 +1,38 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [ + "parameters" + ] + }, + "outputs": [], + "source": [ + "model_text = ''\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os, json\n", + "os.makedirs('/tmp/kfp_nb_outputs', exist_ok=True)\n", + "metrics = {'score': float(len(model_text))}\n", + "print(\"Got score \", metrics[\"score\"])\n", + "with open('/tmp/kfp_nb_outputs/metrics.json', 'w', encoding='utf-8') as f:\n", + " json.dump(metrics, f)\n" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/test_data/sdk_compiled_pipelines/valid/critical/notebooks/nb_train_simple.ipynb b/test_data/sdk_compiled_pipelines/valid/critical/notebooks/nb_train_simple.ipynb new file mode 100644 index 00000000000..40b2c131e27 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/notebooks/nb_train_simple.ipynb @@ -0,0 +1,33 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "text2 = 'default2'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "os.makedirs('/tmp/kfp_nb_outputs', exist_ok=True)\n", + "print(\"Writing to /tmp/kfp_nb_outputs/log.txt\")\n", + "with open('/tmp/kfp_nb_outputs/log.txt', 'w', encoding='utf-8') as f:\n", + " f.write(text + \" \" + text2)\n" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/test_data/sdk_compiled_pipelines/valid/critical/notebooks/nb_train_with_params.ipynb b/test_data/sdk_compiled_pipelines/valid/critical/notebooks/nb_train_with_params.ipynb new file mode 100644 index 00000000000..044e1a2d900 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/notebooks/nb_train_with_params.ipynb @@ -0,0 +1,48 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [ + "parameters" + ] + }, + "outputs": [], + "source": [ + "cleaned_text = 'my cleaned text'\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "os.makedirs('/tmp/kfp_nb_outputs', exist_ok=True)\n", + "print(\"Writing to /tmp/kfp_nb_outputs/model.txt\")\n", + "with open('/tmp/kfp_nb_outputs/model.txt', 'w', encoding='utf-8') as f:\n", + " f.write(cleaned_text.upper())\n" + ] + }, + { + "cell_type": "raw", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "\n" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/samples/v2/parallel_after_dependency.py b/test_data/sdk_compiled_pipelines/valid/critical/parallel_for_after_dependency.py similarity index 100% rename from samples/v2/parallel_after_dependency.py rename to test_data/sdk_compiled_pipelines/valid/critical/parallel_for_after_dependency.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/parallel_for_after_dependency.yaml b/test_data/sdk_compiled_pipelines/valid/critical/parallel_for_after_dependency.yaml new file mode 100644 index 00000000000..761f788b207 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/parallel_for_after_dependency.yaml @@ -0,0 +1,185 @@ +# PIPELINE DEFINITION +# Name: loop-with-after-dependency-set +components: + comp-for-loop-2: + dag: + tasks: + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + inputs: + parameters: + message: + runtimeValue: + constant: foo + taskInfo: + name: print-op + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + message: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op-2: + executorLabel: exec-print-op-2 + inputDefinitions: + parameters: + message: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op-3: + executorLabel: exec-print-op-3 + inputDefinitions: + parameters: + message: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-print-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str) -> str:\n print(message)\n return\ + \ message\n\n" + image: python:3.9 + exec-print-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str) -> str:\n print(message)\n return\ + \ message\n\n" + image: python:3.9 + exec-print-op-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str) -> str:\n print(message)\n return\ + \ message\n\n" + image: python:3.9 +pipelineInfo: + name: loop-with-after-dependency-set +root: + dag: + tasks: + for-loop-2: + componentRef: + name: comp-for-loop-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-2 + print-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-2 + dependentTasks: + - for-loop-2 + inputs: + parameters: + message: + runtimeValue: + constant: bar + taskInfo: + name: print-op-2 + print-op-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-3 + dependentTasks: + - for-loop-2 + inputs: + parameters: + message: + runtimeValue: + constant: baz + taskInfo: + name: print-op-3 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/samples/v2/subdagio/parameter_cache.py b/test_data/sdk_compiled_pipelines/valid/critical/parameter_cache.py similarity index 100% rename from samples/v2/subdagio/parameter_cache.py rename to test_data/sdk_compiled_pipelines/valid/critical/parameter_cache.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/parameter_cache.yaml b/test_data/sdk_compiled_pipelines/valid/critical/parameter_cache.yaml new file mode 100644 index 00000000000..30032a66ea4 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/parameter_cache.yaml @@ -0,0 +1,142 @@ +# PIPELINE DEFINITION +# Name: parameter-cache-pipeline +components: + comp-core: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: core-comp + tasks: + core-comp: + cachingOptions: + enableCache: true + componentRef: + name: comp-core-comp + taskInfo: + name: core-comp + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-core-comp: + executorLabel: exec-core-comp + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-crust-comp: + executorLabel: exec-crust-comp + inputDefinitions: + parameters: + input: + parameterType: STRING + comp-mantle: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: core + tasks: + core: + cachingOptions: + enableCache: true + componentRef: + name: comp-core + taskInfo: + name: core + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-core-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - core_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef core_comp() -> str:\n return 'foo'\n\n" + image: python:3.9 + exec-crust-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - crust_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef crust_comp(input: str):\n print('input :', input)\n\n" + image: python:3.9 +pipelineInfo: + name: parameter-cache-pipeline +root: + dag: + tasks: + crust-comp: + cachingOptions: + enableCache: true + componentRef: + name: comp-crust-comp + dependentTasks: + - mantle + inputs: + parameters: + input: + taskOutputParameter: + outputParameterKey: Output + producerTask: mantle + taskInfo: + name: crust-comp + mantle: + cachingOptions: + enableCache: true + componentRef: + name: comp-mantle + taskInfo: + name: mantle +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/samples/v2/subdagio/parameter_oneof.py b/test_data/sdk_compiled_pipelines/valid/critical/parameter_oneof.py similarity index 100% rename from samples/v2/subdagio/parameter_oneof.py rename to test_data/sdk_compiled_pipelines/valid/critical/parameter_oneof.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/parameter_oneof.yaml b/test_data/sdk_compiled_pipelines/valid/critical/parameter_oneof.yaml new file mode 100644 index 00000000000..bb508e500b0 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/parameter_oneof.yaml @@ -0,0 +1,331 @@ +# PIPELINE DEFINITION +# Name: parameter-oneof-pipeline +components: + comp-condition-2: + dag: + outputs: + parameters: + pipelinechannel--core-comp-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: core-comp + tasks: + core-comp: + cachingOptions: {} + componentRef: + name: comp-core-comp + inputs: + parameters: + input: + runtimeValue: + constant: Got heads! + taskInfo: + name: core-comp + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--core-comp-Output: + parameterType: STRING + comp-condition-3: + dag: + outputs: + parameters: + pipelinechannel--core-output-comp-output_key: + valueFromParameter: + outputParameterKey: output_key + producerSubtask: core-output-comp + tasks: + core-output-comp: + cachingOptions: {} + componentRef: + name: comp-core-output-comp + inputs: + parameters: + input: + runtimeValue: + constant: Got tails! + taskInfo: + name: core-output-comp + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--core-output-comp-output_key: + parameterType: STRING + comp-condition-branches-1: + dag: + outputs: + parameters: + pipelinechannel--condition-branches-1-oneof-1: + valueFromOneof: + parameterSelectors: + - outputParameterKey: pipelinechannel--core-comp-Output + producerSubtask: condition-2 + - outputParameterKey: pipelinechannel--core-output-comp-output_key + producerSubtask: condition-3 + tasks: + condition-2: + componentRef: + name: comp-condition-2 + inputs: + parameters: + pipelinechannel--flip-coin-Output: + componentInputParameter: pipelinechannel--flip-coin-Output + taskInfo: + name: condition-2 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-Output'] + == 'heads' + condition-3: + componentRef: + name: comp-condition-3 + inputs: + parameters: + pipelinechannel--flip-coin-Output: + componentInputParameter: pipelinechannel--flip-coin-Output + taskInfo: + name: condition-3 + triggerPolicy: + condition: '!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads'')' + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--condition-branches-1-oneof-1: + parameterType: STRING + comp-core: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: pipelinechannel--condition-branches-1-oneof-1 + producerSubtask: condition-branches-1 + tasks: + condition-branches-1: + componentRef: + name: comp-condition-branches-1 + dependentTasks: + - flip-coin + inputs: + parameters: + pipelinechannel--flip-coin-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin + taskInfo: + name: condition-branches-1 + flip-coin: + cachingOptions: {} + componentRef: + name: comp-flip-coin + taskInfo: + name: flip-coin + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-core-comp: + executorLabel: exec-core-comp + inputDefinitions: + parameters: + input: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-core-output-comp: + executorLabel: exec-core-output-comp + inputDefinitions: + parameters: + input: + parameterType: STRING + outputDefinitions: + parameters: + output_key: + parameterType: STRING + comp-crust-comp: + executorLabel: exec-crust-comp + inputDefinitions: + parameters: + input: + parameterType: STRING + comp-flip-coin: + executorLabel: exec-flip-coin + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-mantle: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: core + tasks: + core: + cachingOptions: {} + componentRef: + name: comp-core + taskInfo: + name: core + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-core-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - core_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef core_comp(input: str) -> str:\n print('input :', input)\n\ + \ return input\n\n" + image: python:3.9 + exec-core-output-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - core_output_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef core_output_comp(input: str, output_key: dsl.OutputPath(str)):\n\ + \ print('input :', input)\n with open(output_key, 'w') as f:\n \ + \ f.write(input)\n\n" + image: python:3.9 + exec-crust-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - crust_comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef crust_comp(input: str):\n print('input :', input)\n\n" + image: python:3.9 + exec-flip-coin: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin() -> str:\n import random\n return 'heads' if\ + \ random.randint(0, 1) == 0 else 'tails'\n\n" + image: python:3.9 +pipelineInfo: + name: parameter-oneof-pipeline +root: + dag: + tasks: + crust-comp: + cachingOptions: {} + componentRef: + name: comp-crust-comp + dependentTasks: + - mantle + inputs: + parameters: + input: + taskOutputParameter: + outputParameterKey: Output + producerTask: mantle + taskInfo: + name: crust-comp + mantle: + cachingOptions: {} + componentRef: + name: comp-mantle + taskInfo: + name: mantle +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_simple.py b/test_data/sdk_compiled_pipelines/valid/critical/parameters_simple.py similarity index 100% rename from sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_simple.py rename to test_data/sdk_compiled_pipelines/valid/critical/parameters_simple.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/parameters_simple.yaml b/test_data/sdk_compiled_pipelines/valid/critical/parameters_simple.yaml new file mode 100644 index 00000000000..962bc68ed06 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/parameters_simple.yaml @@ -0,0 +1,187 @@ +# PIPELINE DEFINITION +# Name: math-pipeline +# Outputs: +# Output: list +components: + comp-add: + executorLabel: exec-add + inputDefinitions: + parameters: + nums: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-add-container: + executorLabel: exec-add-container + inputDefinitions: + parameters: + nums: + parameterType: LIST + outputDefinitions: + parameters: + sum: + parameterType: NUMBER_INTEGER + comp-double: + executorLabel: exec-double + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-for-loop-2: + dag: + outputs: + parameters: + pipelinechannel--double-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: double + tasks: + double: + cachingOptions: + enableCache: true + componentRef: + name: comp-double + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--loop-item-param-1 + taskInfo: + name: double + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--double-Output: + parameterType: LIST +deploymentSpec: + executors: + exec-add: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add(nums: List[int]) -> int:\n return sum(nums)\n\n" + image: python:3.9 + exec-add-container: + container: + args: + - "\n set -ex\n mkdir -p $(dirname {{$.outputs.parameters['sum'].output_file}})\n\ + \ echo {{$.inputs.parameters['nums']}} | jq 'add' > {{$.outputs.parameters['sum'].output_file}}\n\ + \ " + command: + - sh + - -c + image: stedolan/jq + exec-double: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - double + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" + image: python:3.9 +pipelineInfo: + name: math-pipeline +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: pipelinechannel--double-Output + producerSubtask: for-loop-2 + tasks: + add: + cachingOptions: + enableCache: true + componentRef: + name: comp-add + dependentTasks: + - for-loop-2 + inputs: + parameters: + nums: + taskOutputParameter: + outputParameterKey: pipelinechannel--double-Output + producerTask: for-loop-2 + taskInfo: + name: add + add-container: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-container + dependentTasks: + - for-loop-2 + inputs: + parameters: + nums: + taskOutputParameter: + outputParameterKey: pipelinechannel--double-Output + producerTask: for-loop-2 + taskInfo: + name: add-container + for-loop-2: + componentRef: + name: comp-for-loop-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-2 + outputDefinitions: + parameters: + Output: + parameterType: LIST +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_artifact_upload_download.py b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_artifact_upload_download.py new file mode 100644 index 00000000000..c2c525deb32 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_artifact_upload_download.py @@ -0,0 +1,48 @@ +# Copyright 2022 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Pipeline using ExitHandler with PipelineTaskFinalStatus.""" + +from kfp import compiler +from kfp import dsl +from kfp.dsl import Dataset, Output + +@dsl.component(packages_to_install=["datasets==4.0.0"]) +def download_dataset_and_upload_as_artifact(dataset_repo: str, dataset_name: str, output_dataset: Output[Dataset]): + from datasets import load_dataset + # Load data set from hugging face + ds = load_dataset(dataset_repo, dataset_name) + print("Downloaded Hugging Face data") + print(f"Now saving to {output_dataset.path}") + ds.save_to_disk(output_dataset.path) + print(f"Saved to {output_dataset.path}") + + +@dsl.component +def print_dataset_info(dataset: Dataset): + print('Information about the artifact') + print('Name:', dataset.name) + print('URI:', dataset.uri) + assert "download-dataset-and-upload-as-artifact" in dataset.uri, "The URI of the downloaded artifact does not match the expected function's name that generated it" + + + +@dsl.pipeline(name='pipeline-with-datasets', description="Download Hugging Face Data Set, upload it as an artifact and print its metadata") +def my_pipeline(dataset_repo: str = "google/frames-benchmark", dataset_name: str = ""): + downloaded_dataset = download_dataset_and_upload_as_artifact(dataset_repo=dataset_repo, dataset_name=dataset_name) + print_dataset_info(dataset=downloaded_dataset.output) + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=my_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_artifact_upload_download.yaml b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_artifact_upload_download.yaml new file mode 100644 index 00000000000..baaa51fbc4a --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_artifact_upload_download.yaml @@ -0,0 +1,144 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-datasets +# Description: Download Hugging Face Data Set, upload it as an artifact and print its metadata +# Inputs: +# dataset_name: str [Default: ''] +# dataset_repo: str [Default: 'google/frames-benchmark'] +components: + comp-download-dataset-and-upload-as-artifact: + executorLabel: exec-download-dataset-and-upload-as-artifact + inputDefinitions: + parameters: + dataset_name: + parameterType: STRING + dataset_repo: + parameterType: STRING + outputDefinitions: + artifacts: + output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-print-dataset-info: + executorLabel: exec-print-dataset-info + inputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-download-dataset-and-upload-as-artifact: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - download_dataset_and_upload_as_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'datasets==4.0.0'\ + \ && python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef download_dataset_and_upload_as_artifact(dataset_repo: str, dataset_name:\ + \ str, output_dataset: Output[Dataset]):\n from datasets import load_dataset\n\ + \ # Load data set from hugging face\n ds = load_dataset(dataset_repo,\ + \ dataset_name)\n print(\"Downloaded Hugging Face data\")\n print(f\"\ + Now saving to {output_dataset.path}\")\n ds.save_to_disk(output_dataset.path)\n\ + \ print(f\"Saved to {output_dataset.path}\")\n\n" + image: python:3.9 + exec-print-dataset-info: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_dataset_info + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_dataset_info(dataset: Dataset):\n print('Information\ + \ about the artifact')\n print('Name:', dataset.name)\n print('URI:',\ + \ dataset.uri)\n assert \"download-dataset-and-upload-as-artifact\" in\ + \ dataset.uri, \"The URI of the downloaded artifact does not match the expected\ + \ function's name that generated it\"\n\n" + image: python:3.9 +pipelineInfo: + description: Download Hugging Face Data Set, upload it as an artifact and print + its metadata + name: pipeline-with-datasets +root: + dag: + tasks: + download-dataset-and-upload-as-artifact: + cachingOptions: + enableCache: true + componentRef: + name: comp-download-dataset-and-upload-as-artifact + inputs: + parameters: + dataset_name: + componentInputParameter: dataset_name + dataset_repo: + componentInputParameter: dataset_repo + taskInfo: + name: download-dataset-and-upload-as-artifact + print-dataset-info: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-dataset-info + dependentTasks: + - download-dataset-and-upload-as-artifact + inputs: + artifacts: + dataset: + taskOutputArtifact: + outputArtifactKey: output_dataset + producerTask: download-dataset-and-upload-as-artifact + taskInfo: + name: print-dataset-info + inputDefinitions: + parameters: + dataset_name: + defaultValue: '' + isOptional: true + parameterType: STRING + dataset_repo: + defaultValue: google/frames-benchmark + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pipeline_with_env.py b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_env.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_env.py rename to test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_env.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_env.yaml b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_env.yaml new file mode 100644 index 00000000000..c0fb8a775ed --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_env.yaml @@ -0,0 +1,84 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-env +components: + comp-print-env: + executorLabel: exec-print-env + comp-print-env-op: + executorLabel: exec-print-env-op +deploymentSpec: + executors: + exec-print-env: + container: + command: + - sh + - -c + - 'set -e -x + + echo "$ENV1" + + echo "$ENV2" + + echo "$ENV3" + + ' + env: + - name: ENV1 + value: val0 + - name: ENV2 + value: val2 + - name: ENV3 + value: val3 + image: alpine + exec-print-env-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_env_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_env_op():\n import os\n print('ENV1', os.environ.get('ENV1'))\n\ + \ print('ENV2', os.environ.get('ENV2'))\n\n" + env: + - name: ENV1 + value: val1 + image: python:3.9 +pipelineInfo: + name: pipeline-with-env +root: + dag: + tasks: + print-env: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-env + taskInfo: + name: print-env + print-env-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-env-op + taskInfo: + name: print-env-op +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_input_status_state.py b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_input_status_state.py new file mode 100644 index 00000000000..9bc407d2d2e --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_input_status_state.py @@ -0,0 +1,20 @@ + +from kfp import dsl + + +@dsl.component +def echo_state(status: dsl.PipelineTaskFinalStatus): + assert(status.state == 'COMPLETE') + assert('status-state-pipeline' in status.pipeline_job_resource_name) + assert(status.pipeline_task_name == 'exit-handler-1') + #TODO: Add assert statements to validate status.error_code and status.error_message values once those fields have been implemented. + +@dsl.component +def some_task(): + print('Executing some_task()...') + +@dsl.pipeline +def status_state_pipeline(): + echo_state_task = echo_state() + with dsl.ExitHandler(exit_task=echo_state_task): + some_task() diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_input_status_state.yaml b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_input_status_state.yaml new file mode 100644 index 00000000000..4dd27499f0d --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_input_status_state.yaml @@ -0,0 +1,112 @@ +# PIPELINE DEFINITION +# Name: status-state-pipeline +components: + comp-echo-state: + executorLabel: exec-echo-state + inputDefinitions: + parameters: + status: + isOptional: true + parameterType: TASK_FINAL_STATUS + comp-exit-handler-1: + dag: + tasks: + some-task: + cachingOptions: + enableCache: true + componentRef: + name: comp-some-task + taskInfo: + name: some-task + comp-some-task: + executorLabel: exec-some-task +deploymentSpec: + executors: + exec-echo-state: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - echo_state + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef echo_state(status: dsl.PipelineTaskFinalStatus):\n assert(status.state\ + \ == 'COMPLETE')\n assert('status-state-pipeline' in status.pipeline_job_resource_name)\n\ + \ assert(status.pipeline_task_name == 'exit-handler-1')\n #TODO: Add\ + \ assert statements to validate status.error_code and status.error_message\ + \ values once those fields have been implemented.\n\n" + image: python:3.9 + exec-some-task: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - some_task + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef some_task():\n print('Executing some_task()...')\n\n" + image: python:3.9 +pipelineInfo: + name: status-state-pipeline +root: + dag: + tasks: + echo-state: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo-state + dependentTasks: + - exit-handler-1 + inputs: + parameters: + status: + taskFinalStatus: + producerTask: exit-handler-1 + taskInfo: + name: echo-state + triggerPolicy: + strategy: ALL_UPSTREAM_TASKS_COMPLETED + exit-handler-1: + componentRef: + name: comp-exit-handler-1 + taskInfo: + name: exit-handler-1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_placeholders.py b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_placeholders.py new file mode 100644 index 00000000000..b0e7f1afce0 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_placeholders.py @@ -0,0 +1,58 @@ +# Copyright 2025 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from kfp import compiler +from kfp import dsl +from kfp.dsl import component + + +@component +def print_all_placeholders( + job_name: str, + job_resource_name: str, + job_id: str, + task_name: str, + task_id: str, +): + allPlaceholders = [job_name, job_resource_name, job_id, task_name, task_id] + + for placeholder in allPlaceholders: + if "\{\{" in placeholder or placeholder == "": + raise RuntimeError( + "Expected the placeholder to be replaced with a value: " + placeholder + ) + + assert task_name == "print-all-placeholders" + assert job_resource_name.startswith("pipeline-with-placeholders-") + + output = ", ".join(allPlaceholders) + print(output) + + +@dsl.pipeline(name="pipeline-with-placeholders") +def pipeline_with_placeholders(): + print_all_placeholders( + job_name=dsl.PIPELINE_JOB_NAME_PLACEHOLDER, + job_resource_name=dsl.PIPELINE_JOB_RESOURCE_NAME_PLACEHOLDER, + job_id=dsl.PIPELINE_JOB_ID_PLACEHOLDER, + task_name=dsl.PIPELINE_TASK_NAME_PLACEHOLDER, + task_id=dsl.PIPELINE_TASK_ID_PLACEHOLDER, + ).set_caching_options(False) + + +if __name__ == "__main__": + compiler.Compiler().compile( + pipeline_func=pipeline_with_placeholders, + package_path=__file__.replace(".py", ".yaml"), + ) \ No newline at end of file diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_placeholders.yaml b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_placeholders.yaml new file mode 100644 index 00000000000..dc2ba8e4851 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_placeholders.yaml @@ -0,0 +1,86 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-placeholders +components: + comp-print-all-placeholders: + executorLabel: exec-print-all-placeholders + inputDefinitions: + parameters: + job_id: + parameterType: STRING + job_name: + parameterType: STRING + job_resource_name: + parameterType: STRING + task_id: + parameterType: STRING + task_name: + parameterType: STRING +deploymentSpec: + executors: + exec-print-all-placeholders: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_all_placeholders + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_all_placeholders(\n job_name: str,\n job_resource_name:\ + \ str,\n job_id: str,\n task_name: str,\n task_id:\ + \ str,\n):\n allPlaceholders = [job_name, job_resource_name, job_id,\ + \ task_name, task_id]\n\n for placeholder in allPlaceholders:\n \ + \ if \"\\{\\{\" in placeholder or placeholder == \"\":\n raise\ + \ RuntimeError(\n \"Expected the placeholder to be replaced\ + \ with a value: \" + placeholder\n )\n\n assert task_name\ + \ == \"print-all-placeholders\"\n assert job_resource_name.startswith(\"\ + pipeline-with-placeholders-\")\n\n output = \", \".join(allPlaceholders)\n\ + \ print(output)\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-placeholders +root: + dag: + tasks: + print-all-placeholders: + cachingOptions: {} + componentRef: + name: comp-print-all-placeholders + inputs: + parameters: + job_id: + runtimeValue: + constant: '{{$.pipeline_job_uuid}}' + job_name: + runtimeValue: + constant: '{{$.pipeline_job_name}}' + job_resource_name: + runtimeValue: + constant: '{{$.pipeline_job_resource_name}}' + task_id: + runtimeValue: + constant: '{{$.pipeline_task_uuid}}' + task_name: + runtimeValue: + constant: '{{$.pipeline_task_name}}' + taskInfo: + name: print-all-placeholders +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_pod_metadata.py b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_pod_metadata.py new file mode 100644 index 00000000000..e7a47dfb22a --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_pod_metadata.py @@ -0,0 +1,251 @@ +from kfp import dsl +from kfp.compiler import compiler +from kfp.kubernetes import add_pod_annotation +from kfp.kubernetes import add_pod_label +from kfp.kubernetes import use_field_path_as_env + + +# Component is specific to task_a() and validates that task_a() contains one annotation and two labels. +@dsl.component +def validate_pod_metadata_task_a(annotation_path: str, + annotation_exp_val: str, + label_path_1: str, + label_exp_val_1: str, + label_path_2: str, + label_exp_val_2: str) -> bool: + import os + annotation_val = os.getenv(annotation_path) + if annotation_val != annotation_exp_val: + raise ValueError( + f'Pod annotation is {annotation_val} but is supposed to be {annotation_exp_val}.' + ) + + label_val_1 = os.getenv(label_path_1) + if label_val_1 != label_exp_val_1: + raise ValueError( + f'Pod label is {label_val_1} but is supposed to be {label_exp_val_1}.' + ) + + label_val_2 = os.getenv(label_path_2) + if label_val_2 != label_exp_val_2: + raise ValueError( + f'Pod label is {label_val_2} but is supposed to be {label_exp_val_2}.' + ) + return True + +# Component is specific to task_b() and validates that task_b() contains four annotations and three labels. +@dsl.component +def validate_pod_metadata_task_b(annotation_path_1: str, + annotation_exp_val_1: str, + annotation_path_2: str, + annotation_exp_val_2: str, + annotation_path_3: str, + annotation_exp_val_3: str, + annotation_path_4: str, + annotation_exp_val_4: str, + label_path_1: str, + label_exp_val_1: str, + label_path_2: str, + label_exp_val_2: str, + label_path_3: str, + label_exp_val_3: str) -> bool: + import os + annotation_val_1 = os.getenv(annotation_path_1) + if annotation_val_1 != annotation_exp_val_1: + raise ValueError( + f'Pod annotation is {annotation_val_1} but is supposed to be {annotation_exp_val_1}.' + ) + annotation_val_2 = os.getenv(annotation_path_2) + if annotation_val_2 != annotation_exp_val_2: + raise ValueError( + f'Pod annotation is {annotation_val_2} but is supposed to be {annotation_exp_val_2}.' + ) + annotation_val_3 = os.getenv(annotation_path_3) + if annotation_val_3 != annotation_exp_val_3: + raise ValueError( + f'Pod annotation is {annotation_val_3} but is supposed to be {annotation_exp_val_3}.' + ) + annotation_val_4 = os.getenv(annotation_path_4) + if annotation_val_4 != annotation_exp_val_4: + raise ValueError( + f'Pod annotation is {annotation_val_4} but is supposed to be {annotation_exp_val_4}.' + ) + label_val_1 = os.getenv(label_path_1) + if label_val_1 != label_exp_val_1: + raise ValueError( + f'Pod label is {label_val_1} but is supposed to be {label_exp_val_1}.' + ) + label_val_2 = os.getenv(label_path_2) + if label_val_2 != label_exp_val_2: + raise ValueError( + f'Pod label is {label_val_2} but is supposed to be {label_exp_val_2}.' + ) + label_val_3 = os.getenv(label_path_3) + if label_val_3 != label_exp_val_3: + raise ValueError( + f'Pod label is {label_val_3} but is supposed to be {label_exp_val_3}.' + ) + return True + +# Component is specific to task_c() and validates that task_c() contains two annotations and zero labels. +@dsl.component +def validate_pod_metadata_task_c(annotation_path_1: str, + annotation_exp_val_1: str, + annotation_path_2: str, + annotation_exp_val_2: str) -> bool: + import os + annotation_val_1 = os.getenv(annotation_path_1) + if annotation_val_1 != annotation_exp_val_1: + raise ValueError( + f'Pod annotation is {annotation_val_1} but is supposed to be {annotation_exp_val_1}.' + ) + + annotation_val_2 = os.getenv(annotation_path_2) + if annotation_val_2 != annotation_exp_val_2: + raise ValueError( + f'Pod annotation is {annotation_val_2} but is supposed to be {annotation_exp_val_2}.' + ) + return True + + +# Component is specific to task_d() and validates that task_c() contains zero annotations and one label. +@dsl.component +def validate_pod_metadata_task_d(label_path_1: str, + label_exp_val_1: str, + label_path_2: str, + label_exp_val_2: str, + label_path_3: str, + label_exp_val_3: str) -> bool: + import os + label_val_1 = os.getenv(label_path_1) + if label_val_1 != label_exp_val_1: + raise ValueError( + f'Pod label is {label_val_1} but is supposed to be {label_exp_val_1}.' + ) + label_val_2 = os.getenv(label_path_2) + if label_val_2 != label_exp_val_2: + raise ValueError( + f'Pod label is {label_val_2} but is supposed to be {label_exp_val_2}.' + ) + label_val_3 = os.getenv(label_path_3) + if label_val_3 != label_exp_val_3: + raise ValueError( + f'Pod label is {label_val_3} but is supposed to be {label_exp_val_3}.' + ) + return True + +# Validates that the task contains zero annotations or labels. +@dsl.component +def validate_no_pod_metadata() -> bool: + import os + annotation = os.getenv('POD_TASK_ANNOTATION') + annotation_2 = os.getenv('POD_TASK_ANNOTATION_1') + if annotation != '' or annotation_2 != '': + raise ValueError( + f'Pod annotation is {annotation} but is supposed to be None.' + ) + label = os.getenv('POD_TASK_LABEL') + label_2 = os.getenv('POD_TASK_LABEL_1') + if label != '' or label_2 != '': + raise ValueError( + f'Pod label is {label} but is supposed to be None.' + ) + return True + + +@dsl.pipeline +def pipeline_with_pod_metadata(): + # tasks a-d are set with different metadata and the separate pods representing each component should have + # separate, different metadata. + + # task_a is set with one annotation and two labels. + task_a = validate_pod_metadata_task_a( + annotation_path = 'POD_TASK_ANNOTATION', + annotation_exp_val = 'annotation', + label_path_1 = 'POD_TASK_LABEL_1', + label_exp_val_1 = 'label-1', + label_path_2 = 'POD_TASK_LABEL_2', + label_exp_val_2 = 'label-2').set_caching_options(False) + add_pod_label(task_a, 'task-label-1', 'label-1') + add_pod_label(task_a, 'task-label-2', 'label-2') + add_pod_annotation(task_a, 'task-annotation', 'annotation') + # expose pod metadata annotation and label in container + use_field_path_as_env(task_a, 'POD_TASK_ANNOTATION', + "metadata.annotations['task-annotation']") + use_field_path_as_env(task_a, 'POD_TASK_LABEL_1', "metadata.labels['task-label-1']") + use_field_path_as_env(task_a, 'POD_TASK_LABEL_2', "metadata.labels['task-label-2']") + + # task_b is set with four annotations and three labels. + task_b = validate_pod_metadata_task_b( + annotation_path_1 = 'POD_TASK_ANNOTATION_1', + annotation_exp_val_1 = 'annotation-1', + annotation_path_2 = 'POD_TASK_ANNOTATION_2', + annotation_exp_val_2 = 'annotation-2', + annotation_path_3 = 'POD_TASK_ANNOTATION_3', + annotation_exp_val_3 = 'annotation-3', + annotation_path_4 = 'POD_TASK_ANNOTATION_4', + annotation_exp_val_4 = 'annotation-4', + label_path_1 = 'POD_TASK_LABEL_1', + label_exp_val_1 = 'label-1', + label_path_2 = 'POD_TASK_LABEL_2', + label_exp_val_2 = 'label-2', + label_path_3 = 'POD_TASK_LABEL_3', + label_exp_val_3 = 'label-3' + ).set_caching_options(False) + add_pod_annotation(task_b, 'task-annotation-1', 'annotation-1') + add_pod_annotation(task_b, 'task-annotation-2', 'annotation-2') + add_pod_annotation(task_b, 'task-annotation-3', 'annotation-3') + add_pod_annotation(task_b, 'task-annotation-4', 'annotation-4') + add_pod_label(task_b, 'task-label-1', 'label-1') + add_pod_label(task_b, 'task-label-2', 'label-2') + add_pod_label(task_b, 'task-label-3', 'label-3') + use_field_path_as_env(task_b, 'POD_TASK_ANNOTATION_1', + "metadata.annotations['task-annotation-1']") + use_field_path_as_env(task_b, 'POD_TASK_ANNOTATION_2', + "metadata.annotations['task-annotation-2']") + use_field_path_as_env(task_b, 'POD_TASK_ANNOTATION_3', + "metadata.annotations['task-annotation-3']") + use_field_path_as_env(task_b, 'POD_TASK_ANNOTATION_4', + "metadata.annotations['task-annotation-4']") + use_field_path_as_env(task_b, 'POD_TASK_LABEL_1', "metadata.labels['task-label-1']") + use_field_path_as_env(task_b, 'POD_TASK_LABEL_2', "metadata.labels['task-label-2']") + use_field_path_as_env(task_b, 'POD_TASK_LABEL_3', "metadata.labels['task-label-3']") + + # task_c is set with two annotations and zero labels. + task_c = validate_pod_metadata_task_c( + annotation_path_1 = 'POD_TASK_ANNOTATION_1', + annotation_exp_val_1 = 'annotation-1', + annotation_path_2 = 'POD_TASK_ANNOTATION_2', + annotation_exp_val_2 = 'annotation-2').set_caching_options(False) + add_pod_annotation(task_c, 'task-annotation-1', 'annotation-1') + add_pod_annotation(task_c, 'task-annotation-2', 'annotation-2') + use_field_path_as_env(task_c, 'POD_TASK_ANNOTATION_1', "metadata.annotations['task-annotation-1']") + use_field_path_as_env(task_c, 'POD_TASK_ANNOTATION_2', "metadata.annotations['task-annotation-2']") + + # task_d is set with zero annotations and three labels. + task_d = validate_pod_metadata_task_d( + label_path_1 = 'POD_TASK_LABEL_1', + label_exp_val_1 = 'label-1', + label_path_2 = 'POD_TASK_LABEL_2', + label_exp_val_2 = 'label-2', + label_path_3 = 'POD_TASK_LABEL_3', + label_exp_val_3 = 'label-3', + ).set_caching_options(False) + add_pod_label(task_d, 'task-label-1', 'label-1') + add_pod_label(task_d, 'task-label-2', 'label-2') + add_pod_label(task_d, 'task-label-3', 'label-3') + use_field_path_as_env(task_d, 'POD_TASK_LABEL_1', "metadata.labels['task-label-1']") + use_field_path_as_env(task_d, 'POD_TASK_LABEL_2', "metadata.labels['task-label-2']") + use_field_path_as_env(task_d, 'POD_TASK_LABEL_3', "metadata.labels['task-label-3']") + +# task e is set with no metadata. + task_e = validate_no_pod_metadata().set_caching_options(False) + use_field_path_as_env(task_e, 'POD_TASK_ANNOTATION', "metadata.annotations['task-annotation']") + use_field_path_as_env(task_e, 'POD_TASK_ANNOTATION_1', "metadata.annotations['task-annotation-1']") + use_field_path_as_env(task_e, 'POD_TASK_LABEL', "metadata.labels['task-label']") + use_field_path_as_env(task_e, 'POD_TASK_LABEL_1', "metadata.labels['task-label-1']") + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=pipeline_with_pod_metadata, + package_path=__file__.replace('.py', '.yaml')) diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_pod_metadata.yaml b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_pod_metadata.yaml new file mode 100644 index 00000000000..574ad15fd98 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_pod_metadata.yaml @@ -0,0 +1,530 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-pod-metadata +components: + comp-validate-no-pod-metadata: + executorLabel: exec-validate-no-pod-metadata + outputDefinitions: + parameters: + Output: + parameterType: BOOLEAN + comp-validate-pod-metadata-task-a: + executorLabel: exec-validate-pod-metadata-task-a + inputDefinitions: + parameters: + annotation_exp_val: + parameterType: STRING + annotation_path: + parameterType: STRING + label_exp_val_1: + parameterType: STRING + label_exp_val_2: + parameterType: STRING + label_path_1: + parameterType: STRING + label_path_2: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: BOOLEAN + comp-validate-pod-metadata-task-b: + executorLabel: exec-validate-pod-metadata-task-b + inputDefinitions: + parameters: + annotation_exp_val_1: + parameterType: STRING + annotation_exp_val_2: + parameterType: STRING + annotation_exp_val_3: + parameterType: STRING + annotation_exp_val_4: + parameterType: STRING + annotation_path_1: + parameterType: STRING + annotation_path_2: + parameterType: STRING + annotation_path_3: + parameterType: STRING + annotation_path_4: + parameterType: STRING + label_exp_val_1: + parameterType: STRING + label_exp_val_2: + parameterType: STRING + label_exp_val_3: + parameterType: STRING + label_path_1: + parameterType: STRING + label_path_2: + parameterType: STRING + label_path_3: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: BOOLEAN + comp-validate-pod-metadata-task-c: + executorLabel: exec-validate-pod-metadata-task-c + inputDefinitions: + parameters: + annotation_exp_val_1: + parameterType: STRING + annotation_exp_val_2: + parameterType: STRING + annotation_path_1: + parameterType: STRING + annotation_path_2: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: BOOLEAN + comp-validate-pod-metadata-task-d: + executorLabel: exec-validate-pod-metadata-task-d + inputDefinitions: + parameters: + label_exp_val_1: + parameterType: STRING + label_exp_val_2: + parameterType: STRING + label_exp_val_3: + parameterType: STRING + label_path_1: + parameterType: STRING + label_path_2: + parameterType: STRING + label_path_3: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: BOOLEAN +deploymentSpec: + executors: + exec-validate-no-pod-metadata: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - validate_no_pod_metadata + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef validate_no_pod_metadata() -> bool:\n import os\n annotation\ + \ = os.getenv('POD_TASK_ANNOTATION')\n annotation_2 = os.getenv('POD_TASK_ANNOTATION_1')\n\ + \ if annotation != '' or annotation_2 != '':\n raise ValueError(\n\ + \ f'Pod annotation is {annotation} but is supposed to be None.'\n\ + \ )\n label = os.getenv('POD_TASK_LABEL')\n label_2 = os.getenv('POD_TASK_LABEL_1')\n\ + \ if label != '' or label_2 != '':\n raise ValueError(\n \ + \ f'Pod label is {label} but is supposed to be None.'\n )\n\ + \ return True\n\n" + image: python:3.9 + exec-validate-pod-metadata-task-a: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - validate_pod_metadata_task_a + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef validate_pod_metadata_task_a(annotation_path: str,\n \ + \ annotation_exp_val: str,\n \ + \ label_path_1: str,\n label_exp_val_1:\ + \ str,\n label_path_2: str,\n \ + \ label_exp_val_2: str) -> bool:\n import os\n\ + \ annotation_val = os.getenv(annotation_path)\n if annotation_val\ + \ != annotation_exp_val:\n raise ValueError(\n f'Pod annotation\ + \ is {annotation_val} but is supposed to be {annotation_exp_val}.'\n \ + \ )\n\n label_val_1 = os.getenv(label_path_1)\n if label_val_1\ + \ != label_exp_val_1:\n raise ValueError(\n f'Pod label\ + \ is {label_val_1} but is supposed to be {label_exp_val_1}.'\n )\n\ + \n label_val_2 = os.getenv(label_path_2)\n if label_val_2 != label_exp_val_2:\n\ + \ raise ValueError(\n f'Pod label is {label_val_2} but\ + \ is supposed to be {label_exp_val_2}.'\n )\n return True\n\n" + image: python:3.9 + exec-validate-pod-metadata-task-b: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - validate_pod_metadata_task_b + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef validate_pod_metadata_task_b(annotation_path_1: str,\n \ + \ annotation_exp_val_1: str,\n \ + \ annotation_path_2: str,\n \ + \ annotation_exp_val_2: str,\n annotation_path_3:\ + \ str,\n annotation_exp_val_3: str,\n \ + \ annotation_path_4: str,\n \ + \ annotation_exp_val_4: str,\n \ + \ label_path_1: str,\n label_exp_val_1:\ + \ str,\n label_path_2: str,\n \ + \ label_exp_val_2: str,\n \ + \ label_path_3: str,\n label_exp_val_3:\ + \ str) -> bool:\n import os\n annotation_val_1 = os.getenv(annotation_path_1)\n\ + \ if annotation_val_1 != annotation_exp_val_1:\n raise ValueError(\n\ + \ f'Pod annotation is {annotation_val_1} but is supposed to be\ + \ {annotation_exp_val_1}.'\n )\n annotation_val_2 = os.getenv(annotation_path_2)\n\ + \ if annotation_val_2 != annotation_exp_val_2:\n raise ValueError(\n\ + \ f'Pod annotation is {annotation_val_2} but is supposed to be\ + \ {annotation_exp_val_2}.'\n )\n annotation_val_3 = os.getenv(annotation_path_3)\n\ + \ if annotation_val_3 != annotation_exp_val_3:\n raise ValueError(\n\ + \ f'Pod annotation is {annotation_val_3} but is supposed to be\ + \ {annotation_exp_val_3}.'\n )\n annotation_val_4 = os.getenv(annotation_path_4)\n\ + \ if annotation_val_4 != annotation_exp_val_4:\n raise ValueError(\n\ + \ f'Pod annotation is {annotation_val_4} but is supposed to be\ + \ {annotation_exp_val_4}.'\n )\n label_val_1 = os.getenv(label_path_1)\n\ + \ if label_val_1 != label_exp_val_1:\n raise ValueError(\n \ + \ f'Pod label is {label_val_1} but is supposed to be {label_exp_val_1}.'\n\ + \ )\n label_val_2 = os.getenv(label_path_2)\n if label_val_2\ + \ != label_exp_val_2:\n raise ValueError(\n f'Pod label\ + \ is {label_val_2} but is supposed to be {label_exp_val_2}.'\n )\n\ + \ label_val_3 = os.getenv(label_path_3)\n if label_val_3 != label_exp_val_3:\n\ + \ raise ValueError(\n f'Pod label is {label_val_3} but\ + \ is supposed to be {label_exp_val_3}.'\n )\n return True\n\n" + image: python:3.9 + exec-validate-pod-metadata-task-c: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - validate_pod_metadata_task_c + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef validate_pod_metadata_task_c(annotation_path_1: str,\n \ + \ annotation_exp_val_1: str,\n \ + \ annotation_path_2: str,\n \ + \ annotation_exp_val_2: str) -> bool:\n import os\n annotation_val_1\ + \ = os.getenv(annotation_path_1)\n if annotation_val_1 != annotation_exp_val_1:\n\ + \ raise ValueError(\n f'Pod annotation is {annotation_val_1}\ + \ but is supposed to be {annotation_exp_val_1}.'\n )\n\n annotation_val_2\ + \ = os.getenv(annotation_path_2)\n if annotation_val_2 != annotation_exp_val_2:\n\ + \ raise ValueError(\n f'Pod annotation is {annotation_val_2}\ + \ but is supposed to be {annotation_exp_val_2}.'\n )\n return\ + \ True\n\n" + image: python:3.9 + exec-validate-pod-metadata-task-d: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - validate_pod_metadata_task_d + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef validate_pod_metadata_task_d(label_path_1: str,\n \ + \ label_exp_val_1: str,\n \ + \ label_path_2: str,\n label_exp_val_2:\ + \ str,\n label_path_3: str,\n \ + \ label_exp_val_3: str) -> bool:\n import os\n\ + \ label_val_1 = os.getenv(label_path_1)\n if label_val_1 != label_exp_val_1:\n\ + \ raise ValueError(\n f'Pod label is {label_val_1} but\ + \ is supposed to be {label_exp_val_1}.'\n )\n label_val_2 = os.getenv(label_path_2)\n\ + \ if label_val_2 != label_exp_val_2:\n raise ValueError(\n \ + \ f'Pod label is {label_val_2} but is supposed to be {label_exp_val_2}.'\n\ + \ )\n label_val_3 = os.getenv(label_path_3)\n if label_val_3\ + \ != label_exp_val_3:\n raise ValueError(\n f'Pod label\ + \ is {label_val_3} but is supposed to be {label_exp_val_3}.'\n )\n\ + \ return True\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-pod-metadata +root: + dag: + tasks: + validate-no-pod-metadata: + cachingOptions: {} + componentRef: + name: comp-validate-no-pod-metadata + taskInfo: + name: validate-no-pod-metadata + validate-pod-metadata-task-a: + cachingOptions: {} + componentRef: + name: comp-validate-pod-metadata-task-a + inputs: + parameters: + annotation_exp_val: + runtimeValue: + constant: annotation + annotation_path: + runtimeValue: + constant: POD_TASK_ANNOTATION + label_exp_val_1: + runtimeValue: + constant: label-1 + label_exp_val_2: + runtimeValue: + constant: label-2 + label_path_1: + runtimeValue: + constant: POD_TASK_LABEL_1 + label_path_2: + runtimeValue: + constant: POD_TASK_LABEL_2 + taskInfo: + name: validate-pod-metadata-task-a + validate-pod-metadata-task-b: + cachingOptions: {} + componentRef: + name: comp-validate-pod-metadata-task-b + inputs: + parameters: + annotation_exp_val_1: + runtimeValue: + constant: annotation-1 + annotation_exp_val_2: + runtimeValue: + constant: annotation-2 + annotation_exp_val_3: + runtimeValue: + constant: annotation-3 + annotation_exp_val_4: + runtimeValue: + constant: annotation-4 + annotation_path_1: + runtimeValue: + constant: POD_TASK_ANNOTATION_1 + annotation_path_2: + runtimeValue: + constant: POD_TASK_ANNOTATION_2 + annotation_path_3: + runtimeValue: + constant: POD_TASK_ANNOTATION_3 + annotation_path_4: + runtimeValue: + constant: POD_TASK_ANNOTATION_4 + label_exp_val_1: + runtimeValue: + constant: label-1 + label_exp_val_2: + runtimeValue: + constant: label-2 + label_exp_val_3: + runtimeValue: + constant: label-3 + label_path_1: + runtimeValue: + constant: POD_TASK_LABEL_1 + label_path_2: + runtimeValue: + constant: POD_TASK_LABEL_2 + label_path_3: + runtimeValue: + constant: POD_TASK_LABEL_3 + taskInfo: + name: validate-pod-metadata-task-b + validate-pod-metadata-task-c: + cachingOptions: {} + componentRef: + name: comp-validate-pod-metadata-task-c + inputs: + parameters: + annotation_exp_val_1: + runtimeValue: + constant: annotation-1 + annotation_exp_val_2: + runtimeValue: + constant: annotation-2 + annotation_path_1: + runtimeValue: + constant: POD_TASK_ANNOTATION_1 + annotation_path_2: + runtimeValue: + constant: POD_TASK_ANNOTATION_2 + taskInfo: + name: validate-pod-metadata-task-c + validate-pod-metadata-task-d: + cachingOptions: {} + componentRef: + name: comp-validate-pod-metadata-task-d + inputs: + parameters: + label_exp_val_1: + runtimeValue: + constant: label-1 + label_exp_val_2: + runtimeValue: + constant: label-2 + label_exp_val_3: + runtimeValue: + constant: label-3 + label_path_1: + runtimeValue: + constant: POD_TASK_LABEL_1 + label_path_2: + runtimeValue: + constant: POD_TASK_LABEL_2 + label_path_3: + runtimeValue: + constant: POD_TASK_LABEL_3 + taskInfo: + name: validate-pod-metadata-task-d +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 +--- +platforms: + kubernetes: + deploymentSpec: + executors: + exec-validate-no-pod-metadata: + fieldPathAsEnv: + - fieldPath: metadata.annotations['task-annotation'] + name: POD_TASK_ANNOTATION + - fieldPath: metadata.annotations['task-annotation-1'] + name: POD_TASK_ANNOTATION_1 + - fieldPath: metadata.labels['task-label'] + name: POD_TASK_LABEL + - fieldPath: metadata.labels['task-label-1'] + name: POD_TASK_LABEL_1 + exec-validate-pod-metadata-task-a: + fieldPathAsEnv: + - fieldPath: metadata.annotations['task-annotation'] + name: POD_TASK_ANNOTATION + - fieldPath: metadata.labels['task-label-1'] + name: POD_TASK_LABEL_1 + - fieldPath: metadata.labels['task-label-2'] + name: POD_TASK_LABEL_2 + podMetadata: + annotations: + task-annotation: annotation + labels: + task-label-1: label-1 + task-label-2: label-2 + exec-validate-pod-metadata-task-b: + fieldPathAsEnv: + - fieldPath: metadata.annotations['task-annotation-1'] + name: POD_TASK_ANNOTATION_1 + - fieldPath: metadata.annotations['task-annotation-2'] + name: POD_TASK_ANNOTATION_2 + - fieldPath: metadata.annotations['task-annotation-3'] + name: POD_TASK_ANNOTATION_3 + - fieldPath: metadata.annotations['task-annotation-4'] + name: POD_TASK_ANNOTATION_4 + - fieldPath: metadata.labels['task-label-1'] + name: POD_TASK_LABEL_1 + - fieldPath: metadata.labels['task-label-2'] + name: POD_TASK_LABEL_2 + - fieldPath: metadata.labels['task-label-3'] + name: POD_TASK_LABEL_3 + podMetadata: + annotations: + task-annotation-1: annotation-1 + task-annotation-2: annotation-2 + task-annotation-3: annotation-3 + task-annotation-4: annotation-4 + labels: + task-label-1: label-1 + task-label-2: label-2 + task-label-3: label-3 + exec-validate-pod-metadata-task-c: + fieldPathAsEnv: + - fieldPath: metadata.annotations['task-annotation-1'] + name: POD_TASK_ANNOTATION_1 + - fieldPath: metadata.annotations['task-annotation-2'] + name: POD_TASK_ANNOTATION_2 + podMetadata: + annotations: + task-annotation-1: annotation-1 + task-annotation-2: annotation-2 + exec-validate-pod-metadata-task-d: + fieldPathAsEnv: + - fieldPath: metadata.labels['task-label-1'] + name: POD_TASK_LABEL_1 + - fieldPath: metadata.labels['task-label-2'] + name: POD_TASK_LABEL_2 + - fieldPath: metadata.labels['task-label-3'] + name: POD_TASK_LABEL_3 + podMetadata: + labels: + task-label-1: label-1 + task-label-2: label-2 + task-label-3: label-3 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_retry.yaml b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_retry.yaml new file mode 100644 index 00000000000..012ccc619cc --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_retry.yaml @@ -0,0 +1,83 @@ +# PIPELINE DEFINITION +# Name: test-pipeline +# Inputs: +# a: float [Default: 1.0] +# b: float [Default: 7.0] +components: + comp-add: + executorLabel: exec-add + inputDefinitions: + parameters: + a: + parameterType: NUMBER_DOUBLE + b: + parameterType: NUMBER_DOUBLE + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_DOUBLE +deploymentSpec: + executors: + exec-add: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add(a: float, b: float) -> float:\n return a + b\n\n" + image: python:3.9 +pipelineInfo: + name: test-pipeline +root: + dag: + tasks: + add: + cachingOptions: + enableCache: true + componentRef: + name: comp-add + inputs: + parameters: + a: + componentInputParameter: a + b: + componentInputParameter: b + retryPolicy: + backoffDuration: 0s + backoffFactor: 2.0 + backoffMaxDuration: 3600s + maxRetryCount: 3 + taskInfo: + name: add + inputDefinitions: + parameters: + a: + defaultValue: 1.0 + isOptional: true + parameterType: NUMBER_DOUBLE + b: + defaultValue: 7.0 + isOptional: true + parameterType: NUMBER_DOUBLE +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/samples/v2/pipeline_with_secret_as_env.py b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_secret_as_env.py similarity index 100% rename from samples/v2/pipeline_with_secret_as_env.py rename to test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_secret_as_env.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_secret_as_env.yaml b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_secret_as_env.yaml new file mode 100644 index 00000000000..5d4e3a16879 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_secret_as_env.yaml @@ -0,0 +1,134 @@ +# PIPELINE DEFINITION +# Name: pipeline-secret-env +# Inputs: +# secret_parm: str [Default: 'test-secret-1'] +components: + comp-comp: + executorLabel: exec-comp + comp-generate-secret-name: + executorLabel: exec-generate-secret-name + outputDefinitions: + parameters: + some_output: + parameterType: STRING +deploymentSpec: + executors: + exec-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef comp():\n import os\n username = os.getenv(\"USER_NAME\"\ + , \"\")\n psw1 = os.getenv(\"PASSWORD_VAR1\", \"\")\n psw2 = os.getenv(\"\ + PASSWORD_VAR2\", \"\")\n assert username == \"user1\"\n assert psw1\ + \ == \"psw1\"\n assert psw2 == \"psw2\"\n\n" + image: python:3.9 + exec-generate-secret-name: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - generate_secret_name + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef generate_secret_name(some_output: OutputPath(str)):\n secret_name\ + \ = \"test-secret-3\"\n with open(some_output, 'w') as f:\n f.write(secret_name)\n\ + \n" + image: python:3.9 +pipelineInfo: + name: pipeline-secret-env +root: + dag: + tasks: + comp: + cachingOptions: + enableCache: true + componentRef: + name: comp-comp + dependentTasks: + - generate-secret-name + taskInfo: + name: comp + generate-secret-name: + cachingOptions: + enableCache: true + componentRef: + name: comp-generate-secret-name + taskInfo: + name: generate-secret-name + inputDefinitions: + parameters: + secret_parm: + defaultValue: test-secret-1 + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 +--- +platforms: + kubernetes: + deploymentSpec: + executors: + exec-comp: + secretAsEnv: + - keyToEnv: + - envVar: USER_NAME + secretKey: username + optional: false + secretNameParameter: + componentInputParameter: secret_parm + - keyToEnv: + - envVar: PASSWORD_VAR1 + secretKey: password + optional: false + secretName: test-secret-2 + secretNameParameter: + runtimeValue: + constant: test-secret-2 + - keyToEnv: + - envVar: PASSWORD_VAR2 + secretKey: password + optional: false + secretNameParameter: + taskOutputParameter: + outputParameterKey: some_output + producerTask: generate-secret-name \ No newline at end of file diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_workspace.py b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_workspace.py new file mode 100644 index 00000000000..6738ac4a2e8 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_workspace.py @@ -0,0 +1,83 @@ +# Copyright 2025 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A pipeline using workspace functionality.""" +from kfp import dsl, compiler + + +@dsl.component +def write_to_workspace(workspace_path: str) -> str: + """Write a file to the workspace.""" + import os + + # Create a file in the workspace + file_path = os.path.join(workspace_path, "data", "test_file.txt") + os.makedirs(os.path.dirname(file_path), exist_ok=True) + + with open(file_path, "w") as f: + f.write("Hello from workspace!") + + print(f"Wrote file to: {file_path}") + return file_path + + +@dsl.component +def read_from_workspace(file_path: str) -> str: + """Read a file from the workspace using the provided file path.""" + import os + + if os.path.exists(file_path): + with open(file_path, "r") as f: + content = f.read() + print(f"Read content from: {file_path}") + print(f"Content: {content}") + assert content == "Hello from workspace!" + return content + else: + print(f"File not found at: {file_path}") + return "File not found" + + +@dsl.pipeline( + name="pipeline-with-workspace", + description="A pipeline that demonstrates workspace functionality", + pipeline_config=dsl.PipelineConfig( + workspace=dsl.WorkspaceConfig( + size='1Gi', + kubernetes=dsl.KubernetesWorkspaceConfig( + pvcSpecPatch={'storageClassName': 'standard'} + ) + ), + ), +) +def pipeline_with_workspace() -> str: + """A pipeline using workspace functionality with write and read components.""" + + # Write to workspace + write_task = write_to_workspace( + workspace_path=dsl.WORKSPACE_PATH_PLACEHOLDER + ) + + # Read from workspace + read_task = read_from_workspace( + file_path=write_task.output + ) + + return read_task.output + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=pipeline_with_workspace, + package_path=__file__.replace('.py', '.yaml')) \ No newline at end of file diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_workspace.yaml b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_workspace.yaml new file mode 100644 index 00000000000..db933f09cac --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pipeline_with_workspace.yaml @@ -0,0 +1,150 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-workspace +# Description: A pipeline that demonstrates workspace functionality +# Outputs: +# Output: str +components: + comp-read-from-workspace: + executorLabel: exec-read-from-workspace + inputDefinitions: + parameters: + file_path: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-write-to-workspace: + executorLabel: exec-write-to-workspace + inputDefinitions: + parameters: + workspace_path: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-read-from-workspace: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_from_workspace + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef read_from_workspace(file_path: str) -> str:\n \"\"\"Read a\ + \ file from the workspace using the provided file path.\"\"\" \n import\ + \ os\n\n if os.path.exists(file_path):\n with open(file_path,\ + \ \"r\") as f:\n content = f.read()\n print(f\"Read content\ + \ from: {file_path}\")\n print(f\"Content: {content}\")\n \ + \ assert content == \"Hello from workspace!\"\n return content\n\ + \ else:\n print(f\"File not found at: {file_path}\")\n \ + \ return \"File not found\"\n\n" + image: python:3.9 + exec-write-to-workspace: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - write_to_workspace + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef write_to_workspace(workspace_path: str) -> str:\n \"\"\"Write\ + \ a file to the workspace.\"\"\" \n import os\n\n # Create a file\ + \ in the workspace\n file_path = os.path.join(workspace_path, \"data\"\ + , \"test_file.txt\")\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n\ + \n with open(file_path, \"w\") as f:\n f.write(\"Hello from workspace!\"\ + )\n\n print(f\"Wrote file to: {file_path}\")\n return file_path\n\n" + image: python:3.9 +pipelineInfo: + description: A pipeline that demonstrates workspace functionality + name: pipeline-with-workspace +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: read-from-workspace + tasks: + read-from-workspace: + cachingOptions: + enableCache: true + componentRef: + name: comp-read-from-workspace + dependentTasks: + - write-to-workspace + inputs: + parameters: + file_path: + taskOutputParameter: + outputParameterKey: Output + producerTask: write-to-workspace + taskInfo: + name: read-from-workspace + write-to-workspace: + cachingOptions: + enableCache: true + componentRef: + name: comp-write-to-workspace + inputs: + parameters: + workspace_path: + runtimeValue: + constant: '{{$.workspace_path}}' + taskInfo: + name: write-to-workspace + outputDefinitions: + parameters: + Output: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 +--- +platforms: + kubernetes: + pipelineConfig: + workspace: + kubernetes: + pvcSpecPatch: + storageClassName: standard + size: 1Gi diff --git a/samples/v2/producer_consumer_param.py b/test_data/sdk_compiled_pipelines/valid/critical/producer_consumer_param.py similarity index 100% rename from samples/v2/producer_consumer_param.py rename to test_data/sdk_compiled_pipelines/valid/critical/producer_consumer_param.py diff --git a/test_data/sdk_compiled_pipelines/valid/critical/producer_consumer_param_pipeline.yaml b/test_data/sdk_compiled_pipelines/valid/critical/producer_consumer_param_pipeline.yaml new file mode 100644 index 00000000000..9600b68d411 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/producer_consumer_param_pipeline.yaml @@ -0,0 +1,87 @@ +# PIPELINE DEFINITION +# Name: producer-consumer-param-pipeline +# Inputs: +# text: str [Default: 'Hello world'] +components: + comp-consumer: + executorLabel: exec-consumer + inputDefinitions: + parameters: + input_value: + parameterType: STRING + comp-producer: + executorLabel: exec-producer + inputDefinitions: + parameters: + input_text: + parameterType: STRING + outputDefinitions: + parameters: + output_value: + parameterType: STRING +deploymentSpec: + executors: + exec-consumer: + container: + command: + - sh + - -c + - 'set -e -x + + echo "Read from an input parameter: " && echo "$0" + + ' + - '{{$.inputs.parameters[''input_value'']}}' + image: google/cloud-sdk:latest + exec-producer: + container: + command: + - sh + - -c + - 'set -e -x + + echo "$0, this is an output parameter" | gsutil cp - "$1" + + ' + - '{{$.inputs.parameters[''input_text'']}}' + - '{{$.outputs.parameters[''output_value''].output_file}}' + image: google/cloud-sdk:latest +pipelineInfo: + name: producer-consumer-param-pipeline +root: + dag: + tasks: + consumer: + cachingOptions: + enableCache: true + componentRef: + name: comp-consumer + dependentTasks: + - producer + inputs: + parameters: + input_value: + taskOutputParameter: + outputParameterKey: output_value + producerTask: producer + taskInfo: + name: consumer + producer: + cachingOptions: + enableCache: true + componentRef: + name: comp-producer + inputs: + parameters: + input_text: + componentInputParameter: text + taskInfo: + name: producer + inputDefinitions: + parameters: + text: + defaultValue: Hello world + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.2 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pythonic_artifacts_test_pipeline.py b/test_data/sdk_compiled_pipelines/valid/critical/pythonic_artifacts_test_pipeline.py new file mode 100644 index 00000000000..fd81a0265e3 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pythonic_artifacts_test_pipeline.py @@ -0,0 +1,41 @@ +from kfp import dsl +from kfp import compiler +from kfp.dsl import Dataset, Model + + +@dsl.component +def gen_data() -> Dataset: + dataset = Dataset(uri=dsl.get_uri()) + with open(dataset.path, "w") as f: + f.write("some data") + + dataset.metadata["length"] = len("some data") + return dataset + + +@dsl.component +def train_model(dataset: Dataset) -> Model: + with open(dataset.path) as f: + lines = f.read() + + assert lines == "some data" + assert dataset.metadata["length"] == len("some data") + + model_artifact = Model(uri=dsl.get_uri("model")) + with open(model_artifact.path, "w") as f: + f.write("model trained") + + return model_artifact + + +@dsl.pipeline(name="pythonic-artifacts-test") +def pythonic_artifacts_test_pipeline(): + t1 = gen_data().set_caching_options(False) + train_model(dataset=t1.output).set_caching_options(False) + + +if __name__ == "__main__": + compiler.Compiler().compile( + pipeline_func=pythonic_artifacts_test_pipeline, + package_path=__file__.replace(".py", ".yaml"), + ) \ No newline at end of file diff --git a/test_data/sdk_compiled_pipelines/valid/critical/pythonic_artifacts_test_pipeline.yaml b/test_data/sdk_compiled_pipelines/valid/critical/pythonic_artifacts_test_pipeline.yaml new file mode 100644 index 00000000000..2e12ac6ea74 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/pythonic_artifacts_test_pipeline.yaml @@ -0,0 +1,118 @@ +# PIPELINE DEFINITION +# Name: pythonic-artifacts-test +components: + comp-gen-data: + executorLabel: exec-gen-data + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-train-model: + executorLabel: exec-train-model + inputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-gen-data: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - gen_data + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef gen_data() -> Dataset:\n dataset = Dataset(uri=dsl.get_uri())\n\ + \ with open(dataset.path, \"w\") as f:\n f.write(\"some data\"\ + )\n\n dataset.metadata[\"length\"] = len(\"some data\")\n return dataset\n\ + \n" + image: python:3.9 + exec-train-model: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - train_model + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef train_model(dataset: Dataset) -> Model:\n with open(dataset.path)\ + \ as f:\n lines = f.read()\n\n assert lines == \"some data\"\n\ + \ assert dataset.metadata[\"length\"] == len(\"some data\")\n\n model_artifact\ + \ = Model(uri=dsl.get_uri(\"model\"))\n with open(model_artifact.path,\ + \ \"w\") as f:\n f.write(\"model trained\")\n\n return model_artifact\n\ + \n" + image: python:3.9 +pipelineInfo: + name: pythonic-artifacts-test +root: + dag: + tasks: + gen-data: + cachingOptions: {} + componentRef: + name: comp-gen-data + taskInfo: + name: gen-data + train-model: + cachingOptions: {} + componentRef: + name: comp-train-model + dependentTasks: + - gen-data + inputs: + artifacts: + dataset: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: gen-data + taskInfo: + name: train-model +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/test_data/sdk_compiled_pipelines/valid/critical/two_step_pipeline_containerized.py b/test_data/sdk_compiled_pipelines/valid/critical/two_step_pipeline_containerized.py new file mode 100644 index 00000000000..36ff28f000a --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/two_step_pipeline_containerized.py @@ -0,0 +1,47 @@ +# Copyright 2022 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from kfp import compiler +from kfp import dsl + + +@dsl.container_component +def component1(text: str, output_gcs: dsl.Output[dsl.Dataset]): + return dsl.ContainerSpec( + image='alpine', + command=[ + 'sh', + '-c', + 'mkdir --parents $(dirname "$1") && echo "$0" > "$1"', + ], + args=[text, output_gcs.path]) + + +@dsl.container_component +def component2(input_gcs: dsl.Input[dsl.Dataset]): + return dsl.ContainerSpec( + image='alpine', command=['cat'], args=[input_gcs.path]) + + +@dsl.pipeline(name='containerized-two-step-pipeline') +def my_pipeline(text: str): + component_1 = component1(text=text) + component_2 = component2(input_gcs=component_1.outputs['output_gcs']) + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=my_pipeline, + pipeline_parameters={'text': 'Hello KFP Containerized!'}, + package_path=__file__.replace('.py', '.yaml')) diff --git a/test_data/sdk_compiled_pipelines/valid/critical/two_step_pipeline_containerized.yaml b/test_data/sdk_compiled_pipelines/valid/critical/two_step_pipeline_containerized.yaml new file mode 100644 index 00000000000..3c877d5ca97 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/critical/two_step_pipeline_containerized.yaml @@ -0,0 +1,82 @@ +# PIPELINE DEFINITION +# Name: containerized-two-step-pipeline +# Inputs: +# text: str [Default: 'Hello KFP Containerized!'] +components: + comp-component1: + executorLabel: exec-component1 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + artifacts: + output_gcs: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-component2: + executorLabel: exec-component2 + inputDefinitions: + artifacts: + input_gcs: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-component1: + container: + args: + - '{{$.inputs.parameters[''text'']}}' + - '{{$.outputs.artifacts[''output_gcs''].path}}' + command: + - sh + - -c + - mkdir --parents $(dirname "$1") && echo "$0" > "$1" + image: alpine + exec-component2: + container: + args: + - '{{$.inputs.artifacts[''input_gcs''].path}}' + command: + - cat + image: alpine +pipelineInfo: + name: containerized-two-step-pipeline +root: + dag: + tasks: + component1: + cachingOptions: + enableCache: true + componentRef: + name: comp-component1 + inputs: + parameters: + text: + componentInputParameter: text + taskInfo: + name: component1 + component2: + cachingOptions: + enableCache: true + componentRef: + name: comp-component2 + dependentTasks: + - component1 + inputs: + artifacts: + input_gcs: + taskOutputArtifact: + outputArtifactKey: output_gcs + producerTask: component1 + taskInfo: + name: component2 + inputDefinitions: + parameters: + text: + defaultValue: Hello KFP Containerized! + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/cross_loop_after_topology.py b/test_data/sdk_compiled_pipelines/valid/cross_loop_after_topology.py similarity index 100% rename from sdk/python/test_data/pipelines/cross_loop_after_topology.py rename to test_data/sdk_compiled_pipelines/valid/cross_loop_after_topology.py diff --git a/test_data/sdk_compiled_pipelines/valid/cross_loop_after_topology.yaml b/test_data/sdk_compiled_pipelines/valid/cross_loop_after_topology.yaml new file mode 100644 index 00000000000..445c720a2ce --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/cross_loop_after_topology.yaml @@ -0,0 +1,500 @@ +# PIPELINE DEFINITION +# Name: my-pipeline +components: + comp-for-loop-10: + dag: + tasks: + print-op-5: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-5 + inputs: + parameters: + message: + runtimeValue: + constant: five + taskInfo: + name: print-op-5 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-9: + parameterType: NUMBER_INTEGER + comp-for-loop-12: + dag: + tasks: + for-loop-14: + componentRef: + name: comp-for-loop-14 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-13 + items: + raw: '[1, 2]' + taskInfo: + name: for-loop-14 + print-op-8: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-8 + dependentTasks: + - for-loop-14 + inputs: + parameters: + message: + runtimeValue: + constant: eight + taskInfo: + name: print-op-8 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-11: + parameterType: NUMBER_INTEGER + comp-for-loop-14: + dag: + tasks: + print-op-7: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-7 + inputs: + parameters: + message: + runtimeValue: + constant: seven + taskInfo: + name: print-op-7 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-13: + parameterType: NUMBER_INTEGER + comp-for-loop-2: + dag: + tasks: + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + inputs: + parameters: + message: + runtimeValue: + constant: one + taskInfo: + name: print-op + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + comp-for-loop-4: + dag: + tasks: + print-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-2 + inputs: + parameters: + message: + runtimeValue: + constant: two + taskInfo: + name: print-op-2 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-3: + parameterType: NUMBER_INTEGER + comp-for-loop-6: + dag: + tasks: + for-loop-8: + componentRef: + name: comp-for-loop-8 + dependentTasks: + - print-op-3 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-7 + items: + raw: '[1, 2]' + taskInfo: + name: for-loop-8 + print-op-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-3 + inputs: + parameters: + message: + runtimeValue: + constant: three + taskInfo: + name: print-op-3 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-5: + parameterType: NUMBER_INTEGER + comp-for-loop-8: + dag: + tasks: + print-op-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-4 + inputs: + parameters: + message: + runtimeValue: + constant: four + taskInfo: + name: print-op-4 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-7: + parameterType: NUMBER_INTEGER + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-2: + executorLabel: exec-print-op-2 + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-3: + executorLabel: exec-print-op-3 + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-4: + executorLabel: exec-print-op-4 + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-5: + executorLabel: exec-print-op-5 + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-6: + executorLabel: exec-print-op-6 + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-7: + executorLabel: exec-print-op-7 + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-8: + executorLabel: exec-print-op-8 + inputDefinitions: + parameters: + message: + parameterType: STRING +deploymentSpec: + executors: + exec-print-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n print(message)\n\n" + image: python:3.9 + exec-print-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n print(message)\n\n" + image: python:3.9 + exec-print-op-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n print(message)\n\n" + image: python:3.9 + exec-print-op-4: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n print(message)\n\n" + image: python:3.9 + exec-print-op-5: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n print(message)\n\n" + image: python:3.9 + exec-print-op-6: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n print(message)\n\n" + image: python:3.9 + exec-print-op-7: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n print(message)\n\n" + image: python:3.9 + exec-print-op-8: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n print(message)\n\n" + image: python:3.9 +pipelineInfo: + name: my-pipeline +root: + dag: + tasks: + for-loop-10: + componentRef: + name: comp-for-loop-10 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-9 + items: + raw: '[1, 2]' + taskInfo: + name: for-loop-10 + for-loop-12: + componentRef: + name: comp-for-loop-12 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-11 + items: + raw: '[1, 2]' + taskInfo: + name: for-loop-12 + for-loop-2: + componentRef: + name: comp-for-loop-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '[1, 2]' + taskInfo: + name: for-loop-2 + for-loop-4: + componentRef: + name: comp-for-loop-4 + dependentTasks: + - for-loop-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-3 + items: + raw: '[1, 2]' + taskInfo: + name: for-loop-4 + for-loop-6: + componentRef: + name: comp-for-loop-6 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-5 + items: + raw: '[1, 2]' + taskInfo: + name: for-loop-6 + print-op-6: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-6 + dependentTasks: + - for-loop-10 + inputs: + parameters: + message: + runtimeValue: + constant: six + taskInfo: + name: print-op-6 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/components/dict_input.py b/test_data/sdk_compiled_pipelines/valid/dict_input.py similarity index 100% rename from sdk/python/test_data/components/dict_input.py rename to test_data/sdk_compiled_pipelines/valid/dict_input.py diff --git a/test_data/sdk_compiled_pipelines/valid/dict_input.yaml b/test_data/sdk_compiled_pipelines/valid/dict_input.yaml new file mode 100644 index 00000000000..308db5f70b4 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/dict_input.yaml @@ -0,0 +1,63 @@ +# PIPELINE DEFINITION +# Name: dict-input +# Inputs: +# struct: dict +components: + comp-dict-input: + executorLabel: exec-dict-input + inputDefinitions: + parameters: + struct: + parameterType: STRUCT +deploymentSpec: + executors: + exec-dict-input: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - dict_input + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef dict_input(struct: Dict):\n print(struct)\n\n" + image: python:3.9 +pipelineInfo: + name: dict-input +root: + dag: + tasks: + dict-input: + cachingOptions: + enableCache: true + componentRef: + name: comp-dict-input + inputs: + parameters: + struct: + componentInputParameter: struct + taskInfo: + name: dict-input + inputDefinitions: + parameters: + struct: + parameterType: STRUCT +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/test_data/sdk_compiled_pipelines/valid/env-var.yaml b/test_data/sdk_compiled_pipelines/valid/env-var.yaml new file mode 100644 index 00000000000..a9456af593b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/env-var.yaml @@ -0,0 +1,80 @@ +# PIPELINE DEFINITION +# Name: test-env-exists +# Inputs: +# env_var: str +# Outputs: +# Output: str +components: + comp-comp: + executorLabel: exec-comp + inputDefinitions: + parameters: + env_var: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.12.1'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef comp(env_var: str) -> str:\n import os\n\n value = os.getenv(env_var,\ + \ \"\")\n\n if value == \"\":\n raise Exception(\"Env var is not\ + \ set\")\n\n return value\n\n" + image: public.ecr.aws/docker/library/python:3.12 +pipelineInfo: + name: test-env-exists +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: comp + tasks: + comp: + cachingOptions: {} + componentRef: + name: comp-comp + inputs: + parameters: + env_var: + componentInputParameter: env_var + taskInfo: + name: comp + inputDefinitions: + parameters: + env_var: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.12.1 diff --git a/backend/test/v2/resources/env-var.py b/test_data/sdk_compiled_pipelines/valid/env_var.py similarity index 100% rename from backend/test/v2/resources/env-var.py rename to test_data/sdk_compiled_pipelines/valid/env_var.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/__init__.py b/test_data/sdk_compiled_pipelines/valid/essential/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/test_data/pipelines/component_with_pip_index_urls.py b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_index_urls.py similarity index 78% rename from sdk/python/test_data/pipelines/component_with_pip_index_urls.py rename to test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_index_urls.py index 47a85129a05..e1d21e24610 100644 --- a/sdk/python/test_data/pipelines/component_with_pip_index_urls.py +++ b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_index_urls.py @@ -11,14 +11,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import os from kfp import compiler from kfp import dsl from kfp.dsl import component +PACKAGES_TO_INSTALL = ['yapf'] +if 'KFP_PIPELINE_SPEC_PACKAGE_PATH' in os.environ: + PACKAGES_TO_INSTALL.append(os.environ['KFP_PIPELINE_SPEC_PACKAGE_PATH']) + @component( - pip_index_urls=['https://pypi.org/simple'], packages_to_install=['yapf']) + pip_index_urls=['https://pypi.org/simple'], + packages_to_install=PACKAGES_TO_INSTALL) def component_op(): import yapf print(dir(yapf)) diff --git a/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_index_urls.yaml b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_index_urls.yaml new file mode 100644 index 00000000000..7bbbcd9ed7a --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_index_urls.yaml @@ -0,0 +1,51 @@ +# PIPELINE DEFINITION +# Name: v2-component-pip-index-urls +components: + comp-component-op: + executorLabel: exec-component-op +deploymentSpec: + executors: + exec-component-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple\ + \ --trusted-host https://pypi.org/simple 'yapf' && python3 -m pip install\ + \ --quiet --no-warn-script-location --index-url https://pypi.org/simple\ + \ --trusted-host https://pypi.org/simple 'kfp==2.14.3' '--no-deps' 'typing-extensions>=3.7.4,<5;\ + \ python_version<\"3.9\"' && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_op():\n import yapf\n print(dir(yapf))\n\n" + image: python:3.9 +pipelineInfo: + name: v2-component-pip-index-urls +root: + dag: + tasks: + component-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-component-op + taskInfo: + name: component-op +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/components/component_with_pip_install.py b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install.py similarity index 77% rename from sdk/python/test_data/components/component_with_pip_install.py rename to test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install.py index 35af32d5fb3..6ed8c770750 100644 --- a/sdk/python/test_data/components/component_with_pip_install.py +++ b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install.py @@ -11,11 +11,18 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import os + from kfp.dsl import component +PACKAGES_TO_INSTALL = ['yapf'] +if 'KFP_PIPELINE_SPEC_PACKAGE_PATH' in os.environ: + PACKAGES_TO_INSTALL.append(os.environ['KFP_PIPELINE_SPEC_PACKAGE_PATH']) + @component( - pip_index_urls=['https://pypi.org/simple'], packages_to_install=['yapf']) + pip_index_urls=['https://pypi.org/simple'], + packages_to_install=PACKAGES_TO_INSTALL) def component_with_pip_install(): import yapf print(dir(yapf)) diff --git a/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install.yaml b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install.yaml new file mode 100644 index 00000000000..4db48882c2b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install.yaml @@ -0,0 +1,52 @@ +# PIPELINE DEFINITION +# Name: component-with-pip-install +components: + comp-component-with-pip-install: + executorLabel: exec-component-with-pip-install +deploymentSpec: + executors: + exec-component-with-pip-install: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_with_pip_install + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple\ + \ --trusted-host https://pypi.org/simple 'yapf' && python3 -m pip install\ + \ --quiet --no-warn-script-location --index-url https://pypi.org/simple\ + \ --trusted-host https://pypi.org/simple 'kfp==2.14.3' '--no-deps' 'typing-extensions>=3.7.4,<5;\ + \ python_version<\"3.9\"' && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_with_pip_install():\n import yapf\n print(dir(yapf))\n\ + \n" + image: python:3.9 +pipelineInfo: + name: component-with-pip-install +root: + dag: + tasks: + component-with-pip-install: + cachingOptions: + enableCache: true + componentRef: + name: comp-component-with-pip-install + taskInfo: + name: component-with-pip-install +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install_in_venv.py b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install_in_venv.py new file mode 100644 index 00000000000..960a4d57214 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install_in_venv.py @@ -0,0 +1,40 @@ +# Copyright 2024 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os + +from kfp.dsl import component + +PACKAGES_TO_INSTALL = ['yapf'] +if 'KFP_PIPELINE_SPEC_PACKAGE_PATH' in os.environ: + PACKAGES_TO_INSTALL.append(os.environ['KFP_PIPELINE_SPEC_PACKAGE_PATH']) + + +@component( + pip_index_urls=['https://pypi.org/simple'], + packages_to_install=['yapf'], + use_venv=True, +) +def component_with_pip_install(): + import yapf + + print(dir(yapf)) + + +if __name__ == '__main__': + from kfp import compiler + + compiler.Compiler().compile( + pipeline_func=component_with_pip_install, + package_path=__file__.replace('.py', '.yaml'), + ) diff --git a/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install_in_venv.yaml b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install_in_venv.yaml new file mode 100644 index 00000000000..d80a97723de --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/component_with_pip_install_in_venv.yaml @@ -0,0 +1,54 @@ +# PIPELINE DEFINITION +# Name: component-with-pip-install +components: + comp-component-with-pip-install: + executorLabel: exec-component-with-pip-install +deploymentSpec: + executors: + exec-component-with-pip-install: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_with_pip_install + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location --index-url https://pypi.org/simple\ + \ --trusted-host https://pypi.org/simple 'yapf' && \nexport PIP_DISABLE_PIP_VERSION_CHECK=1\n\ + tmp=$(mktemp -d)\npython3 -m venv \"$tmp/venv\" --system-site-packages\n\ + . \"$tmp/venv/bin/activate\"\n python3 -m pip install --quiet --no-warn-script-location\ + \ --index-url https://pypi.org/simple --trusted-host https://pypi.org/simple\ + \ 'kfp==2.14.3' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"\ + 3.9\"' && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_with_pip_install():\n import yapf\n\n print(dir(yapf))\n\ + \n" + image: python:3.9 +pipelineInfo: + name: component-with-pip-install +root: + dag: + tasks: + component-with-pip-install: + cachingOptions: + enableCache: true + componentRef: + name: comp-component-with-pip-install + taskInfo: + name: component-with-pip-install +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/components/concat_message.py b/test_data/sdk_compiled_pipelines/valid/essential/concat_message.py similarity index 100% rename from sdk/python/test_data/components/concat_message.py rename to test_data/sdk_compiled_pipelines/valid/essential/concat_message.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/concat_message.yaml b/test_data/sdk_compiled_pipelines/valid/essential/concat_message.yaml new file mode 100644 index 00000000000..6c28c9a9c34 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/concat_message.yaml @@ -0,0 +1,87 @@ +# PIPELINE DEFINITION +# Name: concat-message +# Inputs: +# message1: str +# message2: str +# Outputs: +# Output: str +components: + comp-concat-message: + executorLabel: exec-concat-message + inputDefinitions: + parameters: + message1: + parameterType: STRING + message2: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-concat-message: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - concat_message + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef concat_message(message1: str, message2: str) -> str:\n return\ + \ message1 + message2\n\n" + image: python:3.9 +pipelineInfo: + name: concat-message +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: concat-message + tasks: + concat-message: + cachingOptions: + enableCache: true + componentRef: + name: comp-concat-message + inputs: + parameters: + message1: + componentInputParameter: message1 + message2: + componentInputParameter: message2 + taskInfo: + name: concat-message + inputDefinitions: + parameters: + message1: + parameterType: STRING + message2: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/components/container_no_input.py b/test_data/sdk_compiled_pipelines/valid/essential/container_no_input.py similarity index 100% rename from sdk/python/test_data/components/container_no_input.py rename to test_data/sdk_compiled_pipelines/valid/essential/container_no_input.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/container_no_input.yaml b/test_data/sdk_compiled_pipelines/valid/essential/container_no_input.yaml new file mode 100644 index 00000000000..44d21e33edc --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/container_no_input.yaml @@ -0,0 +1,27 @@ +# PIPELINE DEFINITION +# Name: container-no-input +components: + comp-container-no-input: + executorLabel: exec-container-no-input +deploymentSpec: + executors: + exec-container-no-input: + container: + command: + - echo + - hello world + image: python:3.9 +pipelineInfo: + name: container-no-input +root: + dag: + tasks: + container-no-input: + cachingOptions: + enableCache: true + componentRef: + name: comp-container-no-input + taskInfo: + name: container-no-input +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/essential/dict_input.yaml b/test_data/sdk_compiled_pipelines/valid/essential/dict_input.yaml new file mode 100644 index 00000000000..a8194c148e1 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/dict_input.yaml @@ -0,0 +1,63 @@ +# PIPELINE DEFINITION +# Name: dict-input +# Inputs: +# struct: dict +components: + comp-dict-input: + executorLabel: exec-dict-input + inputDefinitions: + parameters: + struct: + parameterType: STRUCT +deploymentSpec: + executors: + exec-dict-input: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - dict_input + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef dict_input(struct: Dict):\n print(struct)\n\n" + image: python:3.9 +pipelineInfo: + name: dict-input +root: + dag: + tasks: + dict-input: + cachingOptions: + enableCache: true + componentRef: + name: comp-dict-input + inputs: + parameters: + struct: + componentInputParameter: struct + taskInfo: + name: dict-input + inputDefinitions: + parameters: + struct: + parameterType: STRUCT +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/essential/embedded_artifact.py b/test_data/sdk_compiled_pipelines/valid/essential/embedded_artifact.py new file mode 100644 index 00000000000..21acf82c0da --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/embedded_artifact.py @@ -0,0 +1,36 @@ +from kfp import dsl, compiler +import os +import tempfile + +with tempfile.TemporaryDirectory() as tmpdir: + tmpdir_path = os.path.join(tmpdir, "artifact") + os.makedirs(tmpdir_path, exist_ok=True) + with open(os.path.join(tmpdir_path, "log.txt"), "w", encoding="utf-8") as f: + f.write("Hello, world!") + + @dsl.component(embedded_artifact_path=tmpdir_path) + def read_embedded_artifact_dir(artifact: dsl.EmbeddedInput[dsl.Dataset]): + import os + + with open(os.path.join(artifact.path, "log.txt"), "r", encoding="utf-8") as f: + log = f.read() + + assert log == "Hello, world!" + + + @dsl.component(embedded_artifact_path=os.path.join(tmpdir_path, "log.txt")) + def read_embedded_artifact_file(artifact: dsl.EmbeddedInput[dsl.Dataset]): + with open(artifact.path, "r", encoding="utf-8") as f: + log = f.read() + + assert log == "Hello, world!" + + @dsl.pipeline(name="nb-simple") + def pipeline(): + read_embedded_artifact_dir().set_caching_options(False) + read_embedded_artifact_file().set_caching_options(False) + + if __name__ == "__main__": + compiler.Compiler().compile( + pipeline_func=pipeline, package_path=__file__.replace(".py", ".yaml") + ) diff --git a/test_data/sdk_compiled_pipelines/valid/essential/embedded_artifact.yaml b/test_data/sdk_compiled_pipelines/valid/essential/embedded_artifact.yaml new file mode 100644 index 00000000000..2e4994533eb --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/embedded_artifact.yaml @@ -0,0 +1,114 @@ +# PIPELINE DEFINITION +# Name: nb-simple +components: + comp-read-embedded-artifact-dir: + executorLabel: exec-read-embedded-artifact-dir + comp-read-embedded-artifact-file: + executorLabel: exec-read-embedded-artifact-file +deploymentSpec: + executors: + exec-read-embedded-artifact-dir: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_embedded_artifact_dir + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\n__KFP_EMBEDDED_ARCHIVE_B64 = 'H4sIAD/g42gC/+3UzYrCMBQF4Kx9irgf0ntjbq4uBJcufYVCSylEC7ViH38yuphBKMMsKuicb5NfyCKc4wpX7A7luK/Lqu7NLOhuaiRS/z3/2mfyHIwdzRNczkPZ5+fN/8RiL221DZ43ca2LvGx+LP3aHof2WG9ZZaMxSFg5Yi8UdWHgDbhi/jduEReZzn+es5CSxnxRc/45N4Cx8sz8n87tqUnl5L3fzl/2/9H/6P+/9v9KPfr/TfKfusYN4zBz/8cQpvPP8tD/PngyltD/s9vXKXUf9tr1qVoiDgAAAAAAAAAAAAAAAAAv7ROs57gWACgAAA=='\n\ + \nimport base64 as __kfp_b64\nimport io as __kfp_io\nimport os as __kfp_os\n\ + import sys as __kfp_sys\nimport tarfile as __kfp_tarfile\nimport tempfile\ + \ as __kfp_tempfile\n\n# Extract embedded archive at import time to ensure\ + \ sys.path and globals are set\n__kfp_tmpdir = __kfp_tempfile.TemporaryDirectory()\n\ + __KFP_EMBEDDED_ASSET_DIR = __kfp_tmpdir.name\ntry:\n __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode('ascii'))\n\ + \ with __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode='r:gz')\ + \ as __kfp_tar:\n __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR)\n\ + except Exception as __kfp_e:\n raise RuntimeError(f'Failed to extract\ + \ embedded archive: {__kfp_e}')\n\n# Always prepend the extracted directory\ + \ to sys.path for import resolution\nif __KFP_EMBEDDED_ASSET_DIR not in\ + \ __kfp_sys.path:\n __kfp_sys.path.insert(0, __KFP_EMBEDDED_ASSET_DIR)\n\ + \n\n\n\ndef read_embedded_artifact_dir(artifact: dsl.EmbeddedInput[dsl.Dataset]):\n\ + \ import os\n\n with open(os.path.join(artifact.path, \"log.txt\"\ + ), \"r\", encoding=\"utf-8\") as f:\n log = f.read()\n\n assert\ + \ log == \"Hello, world!\"\n\n" + image: python:3.9 + exec-read-embedded-artifact-file: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_embedded_artifact_file + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\n__KFP_EMBEDDED_ARCHIVE_B64 = 'H4sIAD/g42gC/+3UvQrCMBSG4cxeRdylJml+dCg4OnoLhZZSiBZqxV6+KQ66FCcF9X2W5CSBs5wv2Tpb7w7luK/Lqu7FW6i7uVWpYB776Vwro62Qo/iAy3ko+9Re/Cft5KWtCmv01m/CIpXNU2k28ji0x7rQwW2Dt87mmdLG5cEsBH5A7JpsGIe39phC7a2dz792aQ5VUMGnvyCk/BvjcyHVJ/N/OrenJpaz717df6l9HWO3kteuj9WSOAAAAAAAAAAAAAAAAHydG2KO630AKAAA'\n\ + \nimport base64 as __kfp_b64\nimport io as __kfp_io\nimport os as __kfp_os\n\ + import sys as __kfp_sys\nimport tarfile as __kfp_tarfile\nimport tempfile\ + \ as __kfp_tempfile\n\n# Extract embedded archive at import time to ensure\ + \ sys.path and globals are set\n__kfp_tmpdir = __kfp_tempfile.TemporaryDirectory()\n\ + __KFP_EMBEDDED_ASSET_DIR = __kfp_tmpdir.name\ntry:\n __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode('ascii'))\n\ + \ with __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode='r:gz')\ + \ as __kfp_tar:\n __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR)\n\ + except Exception as __kfp_e:\n raise RuntimeError(f'Failed to extract\ + \ embedded archive: {__kfp_e}')\n\n# Always prepend the extracted directory\ + \ to sys.path for import resolution\nif __KFP_EMBEDDED_ASSET_DIR not in\ + \ __kfp_sys.path:\n __kfp_sys.path.insert(0, __KFP_EMBEDDED_ASSET_DIR)\n\ + \n__KFP_EMBEDDED_ASSET_FILE = __kfp_os.path.join(__KFP_EMBEDDED_ASSET_DIR,\ + \ 'log.txt')\n\n\n\n\ndef read_embedded_artifact_file(artifact: dsl.EmbeddedInput[dsl.Dataset]):\n\ + \ with open(artifact.path, \"r\", encoding=\"utf-8\") as f:\n \ + \ log = f.read()\n\n assert log == \"Hello, world!\"\n\n" + image: python:3.9 +pipelineInfo: + name: nb-simple +root: + dag: + tasks: + read-embedded-artifact-dir: + cachingOptions: {} + componentRef: + name: comp-read-embedded-artifact-dir + taskInfo: + name: read-embedded-artifact-dir + read-embedded-artifact-file: + cachingOptions: {} + componentRef: + name: comp-read-embedded-artifact-file + taskInfo: + name: read-embedded-artifact-file +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/test_data/sdk_compiled_pipelines/valid/essential/iris_pipeline.py b/test_data/sdk_compiled_pipelines/valid/essential/iris_pipeline.py new file mode 100644 index 00000000000..d4912fc490b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/iris_pipeline.py @@ -0,0 +1,159 @@ +from kfp import compiler, dsl +from kfp.dsl import ClassificationMetrics, Dataset, Input, Model, Output + +common_base_image = ( + "registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61" +) +# common_base_image = "quay.io/opendatahub/ds-pipelines-sample-base:v1.0" + + +@dsl.component(base_image=common_base_image, packages_to_install=["pandas==2.2.0"]) +def create_dataset(iris_dataset: Output[Dataset]): + from io import StringIO # noqa: PLC0415 + + import pandas as pd # noqa: PLC0415 + + data = """ + 5.1,3.5,1.4,0.2,Iris-setosa + 4.9,3.0,1.4,0.2,Iris-setosa + 4.7,3.2,1.3,0.2,Iris-setosa + 4.6,3.1,1.5,0.2,Iris-setosa + 5.0,3.6,1.4,0.2,Iris-setosa + 5.7,3.8,1.7,0.3,Iris-setosa + 5.1,3.8,1.5,0.3,Iris-setosa + 5.4,3.4,1.7,0.2,Iris-setosa + 5.1,3.7,1.5,0.4,Iris-setosa + 5.1,3.4,1.5,0.2,Iris-setosa + 5.0,3.5,1.3,0.3,Iris-setosa + 4.5,2.3,1.3,0.3,Iris-setosa + 4.4,3.2,1.3,0.2,Iris-setosa + 5.0,3.5,1.6,0.6,Iris-setosa + 5.1,3.8,1.9,0.4,Iris-setosa + 4.8,3.0,1.4,0.3,Iris-setosa + 5.1,3.8,1.6,0.2,Iris-setosa + 4.6,3.2,1.4,0.2,Iris-setosa + 5.3,3.7,1.5,0.2,Iris-setosa + 5.0,3.3,1.4,0.2,Iris-setosa + 7.0,3.2,4.7,1.4,Iris-versicolor + 6.4,3.2,4.5,1.5,Iris-versicolor + 6.9,3.1,4.9,1.5,Iris-versicolor + 5.5,2.3,4.0,1.3,Iris-versicolor + 6.5,2.8,4.6,1.5,Iris-versicolor + 6.2,2.2,4.5,1.5,Iris-versicolor + 5.6,2.5,3.9,1.1,Iris-versicolor + 5.9,3.2,4.8,1.8,Iris-versicolor + 6.1,2.8,4.0,1.3,Iris-versicolor + 6.3,2.5,4.9,1.5,Iris-versicolor + 6.1,2.8,4.7,1.2,Iris-versicolor + 6.4,2.9,4.3,1.3,Iris-versicolor + 6.6,3.0,4.4,1.4,Iris-versicolor + 5.6,2.7,4.2,1.3,Iris-versicolor + 5.7,3.0,4.2,1.2,Iris-versicolor + 5.7,2.9,4.2,1.3,Iris-versicolor + 6.2,2.9,4.3,1.3,Iris-versicolor + 5.1,2.5,3.0,1.1,Iris-versicolor + 5.7,2.8,4.1,1.3,Iris-versicolor + 6.3,3.3,6.0,2.5,Iris-virginica + 5.8,2.7,5.1,1.9,Iris-virginica + 7.1,3.0,5.9,2.1,Iris-virginica + 6.3,2.9,5.6,1.8,Iris-virginica + 6.5,3.0,5.8,2.2,Iris-virginica + 6.9,3.1,5.1,2.3,Iris-virginica + 5.8,2.7,5.1,1.9,Iris-virginica + 6.8,3.2,5.9,2.3,Iris-virginica + 6.7,3.3,5.7,2.5,Iris-virginica + 6.7,3.0,5.2,2.3,Iris-virginica + 6.3,2.5,5.0,1.9,Iris-virginica + 6.5,3.0,5.2,2.0,Iris-virginica + 6.2,3.4,5.4,2.3,Iris-virginica + 5.9,3.0,5.1,1.8,Iris-virginica + """ + col_names = ["Sepal_Length", "Sepal_Width", "Petal_Length", "Petal_Width", "Labels"] + df = pd.read_csv(StringIO(data), names=col_names) + + with open(iris_dataset.path, "w") as f: + df.to_csv(f) + + +@dsl.component( + base_image=common_base_image, + packages_to_install=["pandas==2.2.0", "scikit-learn==1.4.0"], +) +def normalize_dataset( + input_iris_dataset: Input[Dataset], + normalized_iris_dataset: Output[Dataset], + standard_scaler: bool, +): + import pandas as pd # noqa: PLC0415 + from sklearn.preprocessing import MinMaxScaler, StandardScaler # noqa: PLC0415 + + with open(input_iris_dataset.path) as f: + df = pd.read_csv(f) + labels = df.pop("Labels") + + scaler = StandardScaler() if standard_scaler else MinMaxScaler() + + df = pd.DataFrame(scaler.fit_transform(df)) + df["Labels"] = labels + normalized_iris_dataset.metadata["state"] = "Normalized" + with open(normalized_iris_dataset.path, "w") as f: + df.to_csv(f) + + +@dsl.component( + base_image=common_base_image, + packages_to_install=["pandas==2.2.0", "scikit-learn==1.4.0"], +) +def train_model( + normalized_iris_dataset: Input[Dataset], + model: Output[Model], + metrics: Output[ClassificationMetrics], + n_neighbors: int, +): + import pickle # noqa: PLC0415 + + import pandas as pd # noqa: PLC0415 + from sklearn.metrics import confusion_matrix # noqa: PLC0415 + from sklearn.model_selection import cross_val_predict, train_test_split # noqa: PLC0415 + from sklearn.neighbors import KNeighborsClassifier # noqa: PLC0415 + + with open(normalized_iris_dataset.path) as f: + df = pd.read_csv(f) + + y = df.pop("Labels") + X = df + + X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) # noqa: F841 + + clf = KNeighborsClassifier(n_neighbors=n_neighbors) + clf.fit(X_train, y_train) + + predictions = cross_val_predict(clf, X_train, y_train, cv=3) + metrics.log_confusion_matrix( + ["Iris-Setosa", "Iris-Versicolour", "Iris-Virginica"], + confusion_matrix(y_train, predictions).tolist(), # .tolist() to convert np array to list. + ) + + model.metadata["framework"] = "scikit-learn" + with open(model.path, "wb") as f: + pickle.dump(clf, f) + + +@dsl.pipeline(name="iris-training-pipeline") +def my_pipeline( + standard_scaler: bool = True, + neighbors: int = 3, +): + create_dataset_task = create_dataset().set_caching_options(False) + + normalize_dataset_task = normalize_dataset( + input_iris_dataset=create_dataset_task.outputs["iris_dataset"], standard_scaler=standard_scaler + ).set_caching_options(False) + + train_model( + normalized_iris_dataset=normalize_dataset_task.outputs["normalized_iris_dataset"], n_neighbors=neighbors + ).set_caching_options(False) + + +if __name__ == "__main__": + compiler.Compiler().compile(my_pipeline, package_path=__file__.replace(".py", "_compiled.yaml")) diff --git a/test_data/sdk_compiled_pipelines/valid/essential/iris_pipeline_compiled.yaml b/test_data/sdk_compiled_pipelines/valid/essential/iris_pipeline_compiled.yaml new file mode 100644 index 00000000000..cfccf10b81d --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/iris_pipeline_compiled.yaml @@ -0,0 +1,250 @@ +# PIPELINE DEFINITION +# Name: iris-training-pipeline +# Inputs: +# neighbors: int [Default: 3.0] +# standard_scaler: bool [Default: True] +components: + comp-create-dataset: + executorLabel: exec-create-dataset + outputDefinitions: + artifacts: + iris_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-normalize-dataset: + executorLabel: exec-normalize-dataset + inputDefinitions: + artifacts: + input_iris_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + standard_scaler: + parameterType: BOOLEAN + outputDefinitions: + artifacts: + normalized_iris_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-train-model: + executorLabel: exec-train-model + inputDefinitions: + artifacts: + normalized_iris_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + n_neighbors: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + metrics: + artifactType: + schemaTitle: system.ClassificationMetrics + schemaVersion: 0.0.1 + model: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-create-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - create_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ + \ python3 -m pip install --quiet --no-warn-script-location 'pandas==2.2.0'\ + \ && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef create_dataset(iris_dataset: Output[Dataset]):\n from io import\ + \ StringIO # noqa: PLC0415\n\n import pandas as pd # noqa: PLC0415\n\ + \n data = \"\"\"\n 5.1,3.5,1.4,0.2,Iris-setosa\n 4.9,3.0,1.4,0.2,Iris-setosa\n\ + \ 4.7,3.2,1.3,0.2,Iris-setosa\n 4.6,3.1,1.5,0.2,Iris-setosa\n 5.0,3.6,1.4,0.2,Iris-setosa\n\ + \ 5.7,3.8,1.7,0.3,Iris-setosa\n 5.1,3.8,1.5,0.3,Iris-setosa\n 5.4,3.4,1.7,0.2,Iris-setosa\n\ + \ 5.1,3.7,1.5,0.4,Iris-setosa\n 5.1,3.4,1.5,0.2,Iris-setosa\n 5.0,3.5,1.3,0.3,Iris-setosa\n\ + \ 4.5,2.3,1.3,0.3,Iris-setosa\n 4.4,3.2,1.3,0.2,Iris-setosa\n 5.0,3.5,1.6,0.6,Iris-setosa\n\ + \ 5.1,3.8,1.9,0.4,Iris-setosa\n 4.8,3.0,1.4,0.3,Iris-setosa\n 5.1,3.8,1.6,0.2,Iris-setosa\n\ + \ 4.6,3.2,1.4,0.2,Iris-setosa\n 5.3,3.7,1.5,0.2,Iris-setosa\n 5.0,3.3,1.4,0.2,Iris-setosa\n\ + \ 7.0,3.2,4.7,1.4,Iris-versicolor\n 6.4,3.2,4.5,1.5,Iris-versicolor\n\ + \ 6.9,3.1,4.9,1.5,Iris-versicolor\n 5.5,2.3,4.0,1.3,Iris-versicolor\n\ + \ 6.5,2.8,4.6,1.5,Iris-versicolor\n 6.2,2.2,4.5,1.5,Iris-versicolor\n\ + \ 5.6,2.5,3.9,1.1,Iris-versicolor\n 5.9,3.2,4.8,1.8,Iris-versicolor\n\ + \ 6.1,2.8,4.0,1.3,Iris-versicolor\n 6.3,2.5,4.9,1.5,Iris-versicolor\n\ + \ 6.1,2.8,4.7,1.2,Iris-versicolor\n 6.4,2.9,4.3,1.3,Iris-versicolor\n\ + \ 6.6,3.0,4.4,1.4,Iris-versicolor\n 5.6,2.7,4.2,1.3,Iris-versicolor\n\ + \ 5.7,3.0,4.2,1.2,Iris-versicolor\n 5.7,2.9,4.2,1.3,Iris-versicolor\n\ + \ 6.2,2.9,4.3,1.3,Iris-versicolor\n 5.1,2.5,3.0,1.1,Iris-versicolor\n\ + \ 5.7,2.8,4.1,1.3,Iris-versicolor\n 6.3,3.3,6.0,2.5,Iris-virginica\n\ + \ 5.8,2.7,5.1,1.9,Iris-virginica\n 7.1,3.0,5.9,2.1,Iris-virginica\n\ + \ 6.3,2.9,5.6,1.8,Iris-virginica\n 6.5,3.0,5.8,2.2,Iris-virginica\n\ + \ 6.9,3.1,5.1,2.3,Iris-virginica\n 5.8,2.7,5.1,1.9,Iris-virginica\n\ + \ 6.8,3.2,5.9,2.3,Iris-virginica\n 6.7,3.3,5.7,2.5,Iris-virginica\n\ + \ 6.7,3.0,5.2,2.3,Iris-virginica\n 6.3,2.5,5.0,1.9,Iris-virginica\n\ + \ 6.5,3.0,5.2,2.0,Iris-virginica\n 6.2,3.4,5.4,2.3,Iris-virginica\n\ + \ 5.9,3.0,5.1,1.8,Iris-virginica\n \"\"\"\n col_names = [\"Sepal_Length\"\ + , \"Sepal_Width\", \"Petal_Length\", \"Petal_Width\", \"Labels\"]\n df\ + \ = pd.read_csv(StringIO(data), names=col_names)\n\n with open(iris_dataset.path,\ + \ \"w\") as f:\n df.to_csv(f)\n\n" + image: python:3.9 + exec-normalize-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - normalize_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ + \ python3 -m pip install --quiet --no-warn-script-location 'pandas==2.2.0'\ + \ 'scikit-learn==1.4.0' && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef normalize_dataset(\n input_iris_dataset: Input[Dataset],\n\ + \ normalized_iris_dataset: Output[Dataset],\n standard_scaler: bool,\n\ + ):\n import pandas as pd # noqa: PLC0415\n from sklearn.preprocessing\ + \ import MinMaxScaler, StandardScaler # noqa: PLC0415\n\n with open(input_iris_dataset.path)\ + \ as f:\n df = pd.read_csv(f)\n labels = df.pop(\"Labels\")\n\n\ + \ scaler = StandardScaler() if standard_scaler else MinMaxScaler()\n\n\ + \ df = pd.DataFrame(scaler.fit_transform(df))\n df[\"Labels\"] = labels\n\ + \ normalized_iris_dataset.metadata[\"state\"] = \"Normalized\"\n with\ + \ open(normalized_iris_dataset.path, \"w\") as f:\n df.to_csv(f)\n\ + \n" + image: python:3.9 + exec-train-model: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - train_model + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ + \ python3 -m pip install --quiet --no-warn-script-location 'pandas==2.2.0'\ + \ 'scikit-learn==1.4.0' && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef train_model(\n normalized_iris_dataset: Input[Dataset],\n\ + \ model: Output[Model],\n metrics: Output[ClassificationMetrics],\n\ + \ n_neighbors: int,\n):\n import pickle # noqa: PLC0415\n\n import\ + \ pandas as pd # noqa: PLC0415\n from sklearn.metrics import confusion_matrix\ + \ # noqa: PLC0415\n from sklearn.model_selection import cross_val_predict,\ + \ train_test_split # noqa: PLC0415\n from sklearn.neighbors import KNeighborsClassifier\ + \ # noqa: PLC0415\n\n with open(normalized_iris_dataset.path) as f:\n\ + \ df = pd.read_csv(f)\n\n y = df.pop(\"Labels\")\n X = df\n\ + \n X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)\ + \ # noqa: F841\n\n clf = KNeighborsClassifier(n_neighbors=n_neighbors)\n\ + \ clf.fit(X_train, y_train)\n\n predictions = cross_val_predict(clf,\ + \ X_train, y_train, cv=3)\n metrics.log_confusion_matrix(\n [\"\ + Iris-Setosa\", \"Iris-Versicolour\", \"Iris-Virginica\"],\n confusion_matrix(y_train,\ + \ predictions).tolist(), # .tolist() to convert np array to list.\n \ + \ )\n\n model.metadata[\"framework\"] = \"scikit-learn\"\n with open(model.path,\ + \ \"wb\") as f:\n pickle.dump(clf, f)\n\n" + image: python:3.9 +pipelineInfo: + name: iris-training-pipeline +root: + dag: + tasks: + create-dataset: + cachingOptions: {} + componentRef: + name: comp-create-dataset + taskInfo: + name: create-dataset + normalize-dataset: + cachingOptions: {} + componentRef: + name: comp-normalize-dataset + dependentTasks: + - create-dataset + inputs: + artifacts: + input_iris_dataset: + taskOutputArtifact: + outputArtifactKey: iris_dataset + producerTask: create-dataset + parameters: + standard_scaler: + componentInputParameter: standard_scaler + taskInfo: + name: normalize-dataset + train-model: + cachingOptions: {} + componentRef: + name: comp-train-model + dependentTasks: + - normalize-dataset + inputs: + artifacts: + normalized_iris_dataset: + taskOutputArtifact: + outputArtifactKey: normalized_iris_dataset + producerTask: normalize-dataset + parameters: + n_neighbors: + componentInputParameter: neighbors + taskInfo: + name: train-model + inputDefinitions: + parameters: + neighbors: + defaultValue: 3.0 + isOptional: true + parameterType: NUMBER_INTEGER + standard_scaler: + defaultValue: true + isOptional: true + parameterType: BOOLEAN +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/lightweight_python_functions_pipeline.py b/test_data/sdk_compiled_pipelines/valid/essential/lightweight_python_functions_pipeline.py similarity index 100% rename from sdk/python/test_data/pipelines/lightweight_python_functions_pipeline.py rename to test_data/sdk_compiled_pipelines/valid/essential/lightweight_python_functions_pipeline.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/lightweight_python_functions_pipeline.yaml b/test_data/sdk_compiled_pipelines/valid/essential/lightweight_python_functions_pipeline.yaml new file mode 100644 index 00000000000..f9087b137ee --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/lightweight_python_functions_pipeline.yaml @@ -0,0 +1,245 @@ +# PIPELINE DEFINITION +# Name: my-test-pipeline-beta +# Inputs: +# input_dict: dict [Default: {'A': 1.0, 'B': 2.0}] +# message: str +components: + comp-preprocess: + executorLabel: exec-preprocess + inputDefinitions: + parameters: + input_dict_parameter: + parameterType: STRUCT + input_list_parameter: + parameterType: LIST + message: + parameterType: STRING + outputDefinitions: + artifacts: + output_dataset_one: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + output_dataset_two_path: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + output_bool_parameter_path: + parameterType: BOOLEAN + output_dict_parameter_path: + parameterType: STRUCT + output_list_parameter_path: + parameterType: LIST + output_parameter_path: + parameterType: STRING + comp-train: + executorLabel: exec-train + inputDefinitions: + artifacts: + dataset_one_path: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + dataset_two: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + input_bool: + parameterType: BOOLEAN + input_dict: + parameterType: STRUCT + input_list: + parameterType: LIST + message: + parameterType: STRING + num_steps: + defaultValue: 100.0 + isOptional: true + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + model: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-preprocess: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - preprocess + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef preprocess(\n # An input parameter of type string.\n message:\ + \ str,\n # An input parameter of type dict.\n input_dict_parameter:\ + \ Dict[str, int],\n # An input parameter of type list.\n input_list_parameter:\ + \ List[str],\n # Use Output[T] to get a metadata-rich handle to the output\ + \ artifact\n # of type `Dataset`.\n output_dataset_one: Output[Dataset],\n\ + \ # A locally accessible filepath for another output artifact of type\n\ + \ # `Dataset`.\n output_dataset_two_path: OutputPath('Dataset'),\n\ + \ # A locally accessible filepath for an output parameter of type string.\n\ + \ output_parameter_path: OutputPath(str),\n # A locally accessible\ + \ filepath for an output parameter of type bool.\n output_bool_parameter_path:\ + \ OutputPath(bool),\n # A locally accessible filepath for an output parameter\ + \ of type dict.\n output_dict_parameter_path: OutputPath(Dict[str, int]),\n\ + \ # A locally accessible filepath for an output parameter of type list.\n\ + \ output_list_parameter_path: OutputPath(List[str]),\n):\n \"\"\"\ + Dummy preprocessing step.\"\"\"\n\n # Use Dataset.path to access a local\ + \ file path for writing.\n # One can also use Dataset.uri to access the\ + \ actual URI file path.\n with open(output_dataset_one.path, 'w') as\ + \ f:\n f.write(message)\n\n # OutputPath is used to just pass\ + \ the local file path of the output artifact\n # to the function.\n \ + \ with open(output_dataset_two_path, 'w') as f:\n f.write(message)\n\ + \n with open(output_parameter_path, 'w') as f:\n f.write(message)\n\ + \n with open(output_bool_parameter_path, 'w') as f:\n f.write(\n\ + \ str(True)) # use either `str()` or `json.dumps()` for bool\ + \ values.\n\n import json\n with open(output_dict_parameter_path,\ + \ 'w') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with\ + \ open(output_list_parameter_path, 'w') as f:\n f.write(json.dumps(input_list_parameter))\n\ + \n" + image: python:3.9 + exec-train: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - train + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef train(\n # Use InputPath to get a locally accessible path\ + \ for the input artifact\n # of type `Dataset`.\n dataset_one_path:\ + \ InputPath('Dataset'),\n # Use Input[T] to get a metadata-rich handle\ + \ to the input artifact\n # of type `Dataset`.\n dataset_two: Input[Dataset],\n\ + \ # An input parameter of type string.\n message: str,\n # Use\ + \ Output[T] to get a metadata-rich handle to the output artifact\n #\ + \ of type `Dataset`.\n model: Output[Model],\n # An input parameter\ + \ of type bool.\n input_bool: bool,\n # An input parameter of type\ + \ dict.\n input_dict: Dict[str, int],\n # An input parameter of type\ + \ List[str].\n input_list: List[str],\n # An input parameter of type\ + \ int with a default value.\n num_steps: int = 100,\n):\n \"\"\"Dummy\ + \ Training step.\"\"\"\n with open(dataset_one_path) as input_file:\n\ + \ dataset_one_contents = input_file.read()\n\n with open(dataset_two.path)\ + \ as input_file:\n dataset_two_contents = input_file.read()\n\n \ + \ line = (f'dataset_one_contents: {dataset_one_contents} || '\n \ + \ f'dataset_two_contents: {dataset_two_contents} || '\n \ + \ f'message: {message} || '\n f'input_bool: {input_bool}, type\ + \ {type(input_bool)} || '\n f'input_dict: {input_dict}, type\ + \ {type(input_dict)} || '\n f'input_list: {input_list}, type\ + \ {type(input_list)} \\n')\n\n with open(model.path, 'w') as output_file:\n\ + \ for i in range(num_steps):\n output_file.write(f'Step\ + \ {i}\\n{line}\\n=====\\n')\n\n # model is an instance of Model artifact,\ + \ which has a .metadata dictionary\n # to store arbitrary metadata for\ + \ the output artifact.\n model.metadata['accuracy'] = 0.9\n\n" + image: python:3.9 +pipelineInfo: + name: my-test-pipeline-beta +root: + dag: + tasks: + preprocess: + cachingOptions: + enableCache: true + componentRef: + name: comp-preprocess + inputs: + parameters: + input_dict_parameter: + componentInputParameter: input_dict + input_list_parameter: + runtimeValue: + constant: + - a + - b + - c + message: + componentInputParameter: message + taskInfo: + name: preprocess + train: + cachingOptions: + enableCache: true + componentRef: + name: comp-train + dependentTasks: + - preprocess + inputs: + artifacts: + dataset_one_path: + taskOutputArtifact: + outputArtifactKey: output_dataset_one + producerTask: preprocess + dataset_two: + taskOutputArtifact: + outputArtifactKey: output_dataset_two_path + producerTask: preprocess + parameters: + input_bool: + taskOutputParameter: + outputParameterKey: output_bool_parameter_path + producerTask: preprocess + input_dict: + taskOutputParameter: + outputParameterKey: output_dict_parameter_path + producerTask: preprocess + input_list: + taskOutputParameter: + outputParameterKey: output_list_parameter_path + producerTask: preprocess + message: + taskOutputParameter: + outputParameterKey: output_parameter_path + producerTask: preprocess + taskInfo: + name: train + inputDefinitions: + parameters: + input_dict: + defaultValue: + A: 1.0 + B: 2.0 + isOptional: true + parameterType: STRUCT + message: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/lightweight_python_functions_with_outputs.py b/test_data/sdk_compiled_pipelines/valid/essential/lightweight_python_functions_with_outputs.py similarity index 100% rename from sdk/python/test_data/pipelines/lightweight_python_functions_with_outputs.py rename to test_data/sdk_compiled_pipelines/valid/essential/lightweight_python_functions_with_outputs.py diff --git a/sdk/python/test_data/pipelines/lightweight_python_functions_with_outputs.yaml b/test_data/sdk_compiled_pipelines/valid/essential/lightweight_python_functions_with_outputs.yaml similarity index 100% rename from sdk/python/test_data/pipelines/lightweight_python_functions_with_outputs.yaml rename to test_data/sdk_compiled_pipelines/valid/essential/lightweight_python_functions_with_outputs.yaml diff --git a/sdk/python/test_data/pipelines/pipeline_in_pipeline.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_in_pipeline.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline.yaml new file mode 100644 index 00000000000..56e8d2c2c0f --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline.yaml @@ -0,0 +1,159 @@ +# PIPELINE DEFINITION +# Name: pipeline-in-pipeline +components: + comp-inner-pipeline: + dag: + tasks: + print-op1: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op1-2 + inputs: + parameters: + msg: + componentInputParameter: msg + taskInfo: + name: print-op1 + print-op2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op2 + dependentTasks: + - print-op1 + inputs: + parameters: + msg: + taskOutputParameter: + outputParameterKey: Output + producerTask: print-op1 + taskInfo: + name: print-op2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-op1: + executorLabel: exec-print-op1 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op1-2: + executorLabel: exec-print-op1-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op2: + executorLabel: exec-print-op2 + inputDefinitions: + parameters: + msg: + parameterType: STRING +deploymentSpec: + executors: + exec-print-op1: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ + \n" + image: python:3.9 + exec-print-op1-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ + \n" + image: python:3.9 + exec-print-op2: + container: + command: + - echo + - '{{$.inputs.parameters[''msg'']}}' + image: alpine +pipelineInfo: + name: pipeline-in-pipeline +root: + dag: + tasks: + inner-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-inner-pipeline + inputs: + parameters: + msg: + runtimeValue: + constant: world + taskInfo: + name: inner-pipeline + print-op1: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op1 + inputs: + parameters: + msg: + runtimeValue: + constant: Hello + taskInfo: + name: print-op1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_in_pipeline_complex.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_complex.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_in_pipeline_complex.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_complex.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_complex.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_complex.yaml new file mode 100644 index 00000000000..64e1d9346a9 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_complex.yaml @@ -0,0 +1,248 @@ +# PIPELINE DEFINITION +# Name: pipeline-in-pipeline-complex +# Inputs: +# msg: str [Default: 'Hello'] +components: + comp-condition-1: + dag: + tasks: + print-op2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op2 + inputs: + parameters: + msg: + runtimeValue: + constant: world + taskInfo: + name: print-op2 + inputDefinitions: + parameters: + pipelinechannel--print-op1-Output: + parameterType: STRING + comp-condition-2: + dag: + tasks: + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + inputs: + parameters: + msg: + runtimeValue: + constant: Bye! + taskInfo: + name: print-op + inputDefinitions: + parameters: + pipelinechannel--print-op1-Output: + parameterType: STRING + comp-for-loop-2: + dag: + tasks: + inner-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-inner-pipeline + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop-item-param-1 + taskInfo: + name: inner-pipeline + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: STRING + comp-inner-pipeline: + dag: + tasks: + condition-1: + componentRef: + name: comp-condition-1 + dependentTasks: + - print-op1 + inputs: + parameters: + pipelinechannel--print-op1-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: print-op1 + taskInfo: + name: condition-1 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--print-op1-Output'] + == 'Hello' + condition-2: + componentRef: + name: comp-condition-2 + dependentTasks: + - print-op1 + inputs: + parameters: + pipelinechannel--print-op1-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: print-op1 + taskInfo: + name: condition-2 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--print-op1-Output'] + != 'Hello' + print-op1: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op1-2 + inputs: + parameters: + msg: + componentInputParameter: msg + taskInfo: + name: print-op1 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-op1: + executorLabel: exec-print-op1 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op1-2: + executorLabel: exec-print-op1-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op2: + executorLabel: exec-print-op2 + inputDefinitions: + parameters: + msg: + parameterType: STRING +deploymentSpec: + executors: + exec-print-op: + container: + command: + - echo + - '{{$.inputs.parameters[''msg'']}}' + image: alpine + exec-print-op1: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ + \n" + image: python:3.9 + exec-print-op1-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ + \n" + image: python:3.9 + exec-print-op2: + container: + command: + - echo + - '{{$.inputs.parameters[''msg'']}}' + image: alpine +pipelineInfo: + name: pipeline-in-pipeline-complex +root: + dag: + tasks: + for-loop-2: + componentRef: + name: comp-for-loop-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '["Hello", "world!"]' + taskInfo: + name: for-loop-2 + print-op1: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op1 + inputs: + parameters: + msg: + componentInputParameter: msg + taskInfo: + name: print-op1 + inputDefinitions: + parameters: + msg: + defaultValue: Hello + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_in_pipeline_loaded_from_yaml.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_loaded_from_yaml.py similarity index 82% rename from sdk/python/test_data/pipelines/pipeline_in_pipeline_loaded_from_yaml.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_loaded_from_yaml.py index 491375a946b..65bade78899 100644 --- a/sdk/python/test_data/pipelines/pipeline_in_pipeline_loaded_from_yaml.py +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_loaded_from_yaml.py @@ -19,6 +19,7 @@ from kfp import dsl from kfp.dsl import Artifact from kfp.dsl import Input +from sdk.python.test.test_utils.file_utils import FileUtils @dsl.component @@ -26,9 +27,8 @@ def print_op1(data: Input[Artifact]): with open(data.path, 'r') as f: print(f.read()) - -reuse_yaml_pipeline = components.load_component_from_file( - pathlib.Path(__file__).parent / 'pipeline_with_outputs.yaml') +PIPELINE_FILES_DIR = FileUtils.VALID_PIPELINE_FILES +reuse_yaml_pipeline = components.load_component_from_file(file_path=f'{PIPELINE_FILES_DIR}/essential/pipeline_with_outputs.yaml') @dsl.pipeline(name='pipeline-in-pipeline') diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_loaded_from_yaml.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_loaded_from_yaml.yaml new file mode 100644 index 00000000000..5a752ff0390 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_in_pipeline_loaded_from_yaml.yaml @@ -0,0 +1,273 @@ +# PIPELINE DEFINITION +# Name: pipeline-in-pipeline +components: + comp-inner-pipeline: + dag: + outputs: + artifacts: + data: + artifactSelectors: + - outputArtifactKey: data + producerSubtask: print-op2 + parameters: + msg: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-op1 + tasks: + print-op1: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op1-2 + inputs: + parameters: + msg: + componentInputParameter: msg + taskInfo: + name: print-op1 + print-op2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op2 + dependentTasks: + - print-op1 + inputs: + parameters: + msg: + taskOutputParameter: + outputParameterKey: Output + producerTask: print-op1 + taskInfo: + name: print-op2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + msg: + parameterType: STRING + comp-pipeline-in-pipeline: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: data + producerSubtask: inner-pipeline + tasks: + inner-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-inner-pipeline + inputs: + parameters: + msg: + runtimeValue: + constant: world + taskInfo: + name: inner-pipeline + print-op1: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op1 + inputs: + parameters: + msg: + componentInputParameter: msg + taskInfo: + name: print-op1 + inputDefinitions: + parameters: + msg: + defaultValue: Hello + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-print-op1: + executorLabel: exec-print-op1 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op1-2: + executorLabel: exec-print-op1-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op1-3: + executorLabel: exec-print-op1-3 + inputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-print-op2: + executorLabel: exec-print-op2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-print-op1: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ + \n" + image: python:3.9 + exec-print-op1-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ + \n" + image: python:3.9 + exec-print-op1-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op1(data: Input[Artifact]):\n with open(data.path, 'r')\ + \ as f:\n print(f.read())\n\n" + image: python:3.9 + exec-print-op2: + container: + args: + - '{{$.inputs.parameters[''msg'']}}' + - '{{$.outputs.artifacts[''data''].path}}' + command: + - sh + - -c + - mkdir --parents $(dirname "$1") && echo "$0" > "$1" + image: alpine +pipelineInfo: + name: pipeline-in-pipeline +root: + dag: + tasks: + pipeline-in-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-pipeline-in-pipeline + inputs: + parameters: + msg: + runtimeValue: + constant: Hello + taskInfo: + name: pipeline-in-pipeline + print-op1: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op1-3 + dependentTasks: + - pipeline-in-pipeline + inputs: + artifacts: + data: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: pipeline-in-pipeline + taskInfo: + name: print-op1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pipeline_with_after.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_after.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_after.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_after.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_after.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_after.yaml new file mode 100644 index 00000000000..8de3af88e35 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_after.yaml @@ -0,0 +1,107 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-after +components: + comp-print-text: + executorLabel: exec-print-text + inputDefinitions: + parameters: + text: + parameterType: STRING + comp-print-text-2: + executorLabel: exec-print-text-2 + inputDefinitions: + parameters: + text: + parameterType: STRING + comp-print-text-3: + executorLabel: exec-print-text-3 + inputDefinitions: + parameters: + text: + parameterType: STRING +deploymentSpec: + executors: + exec-print-text: + container: + command: + - sh + - -c + - 'set -e -x + + echo "$0" + + ' + - '{{$.inputs.parameters[''text'']}}' + image: alpine + exec-print-text-2: + container: + command: + - sh + - -c + - 'set -e -x + + echo "$0" + + ' + - '{{$.inputs.parameters[''text'']}}' + image: alpine + exec-print-text-3: + container: + command: + - sh + - -c + - 'set -e -x + + echo "$0" + + ' + - '{{$.inputs.parameters[''text'']}}' + image: alpine +pipelineInfo: + name: pipeline-with-after +root: + dag: + tasks: + print-text: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text + inputs: + parameters: + text: + runtimeValue: + constant: 1st task + taskInfo: + name: print-text + print-text-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-2 + dependentTasks: + - print-text + inputs: + parameters: + text: + runtimeValue: + constant: 2nd task + taskInfo: + name: print-text-2 + print-text-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-3 + dependentTasks: + - print-text + - print-text-2 + inputs: + parameters: + text: + runtimeValue: + constant: 3rd task + taskInfo: + name: print-text-3 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_condition.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_condition.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_condition.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_condition.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_condition.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_condition.yaml new file mode 100644 index 00000000000..e7e1e4709ba --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_condition.yaml @@ -0,0 +1,277 @@ +# PIPELINE DEFINITION +# Name: single-condition-pipeline +# Inputs: +# text: str [Default: 'condition test'] +components: + comp-condition-1: + dag: + tasks: + flip-coin-op-2: + cachingOptions: {} + componentRef: + name: comp-flip-coin-op-2 + taskInfo: + name: flip-coin-op-2 + print-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-2 + dependentTasks: + - flip-coin-op-2 + inputs: + parameters: + msg: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op-2 + taskInfo: + name: print-op-2 + print-op-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-3 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--text + taskInfo: + name: print-op-3 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + pipelinechannel--text: + parameterType: STRING + comp-flip-coin-op: + executorLabel: exec-flip-coin-op + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-flip-coin-op-2: + executorLabel: exec-flip-coin-op-2 + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-op-2: + executorLabel: exec-print-op-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-op-3: + executorLabel: exec-print-op-3 + inputDefinitions: + parameters: + msg: + parameterType: STRING +deploymentSpec: + executors: + exec-flip-coin-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ + \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ + \ 1) == 0 else 'tails'\n return result\n\n" + image: python:3.9 + exec-flip-coin-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ + \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ + \ 1) == 0 else 'tails'\n return result\n\n" + image: python:3.9 + exec-print-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 + exec-print-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 + exec-print-op-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 +pipelineInfo: + name: single-condition-pipeline +root: + dag: + tasks: + condition-1: + componentRef: + name: comp-condition-1 + dependentTasks: + - flip-coin-op + inputs: + parameters: + pipelinechannel--flip-coin-op-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op + pipelinechannel--text: + componentInputParameter: text + taskInfo: + name: condition-1 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-op-Output'] + == 'heads' + flip-coin-op: + cachingOptions: {} + componentRef: + name: comp-flip-coin-op + taskInfo: + name: flip-coin-op + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + dependentTasks: + - flip-coin-op + inputs: + parameters: + msg: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op + taskInfo: + name: print-op + inputDefinitions: + parameters: + text: + defaultValue: condition test + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_if_placeholder.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_if_placeholder.py new file mode 100644 index 00000000000..e6db9ad3286 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_if_placeholder.py @@ -0,0 +1,71 @@ +# Copyright 2020,2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from kfp import dsl, components + +component_op = components.load_component_from_text(''' +name: Component with optional inputs +inputs: +- {name: required_input, type: String, optional: false} +- {name: optional_input_1, type: String, optional: true} +- {name: optional_input_2, type: String, optional: true} +implementation: + container: + image: ghcr.io/containerd/busybox + command: + - echo + args: + - --arg0 + - {inputValue: required_input} + - if: + cond: + isPresent: optional_input_1 + then: + - --arg1 + - {inputValue: optional_input_1} + - if: + cond: + isPresent: optional_input_2 + then: + - --arg2 + - {inputValue: optional_input_2} + else: + - --arg2 + - 'default value' +''') + + +@dsl.pipeline(name='one-step-pipeline-with-if-placeholder-supply-both') +def pipeline_both(input0: str = 'input0', + input1: str = 'input1', + input2: str = 'input2'): + # supply both optional_input_1 and optional_input_2 + component = component_op( + required_input=input0, optional_input_1=input1, optional_input_2=input2) + + +@dsl.pipeline(name='one-step-pipeline-with-if-placeholder-supply-none') +def pipeline_none(input0: str = 'input0'): + # supply neither optional_input_1 nor optional_input_2 + # Note, KFP only supports compile-time optional arguments, e.g. it's not + # supported to write a pipeline that supplies both inputs and pass None + # at runtime -- in that case, the input arguments will be interpreted as + # the raw text "None". + component = component_op(required_input=input0) + + +if __name__ == '__main__': + from kfp import compiler + compiler.Compiler().compile( + pipeline_func=pipeline_none, package_path=__file__.replace('.py', '.yaml')) \ No newline at end of file diff --git a/sdk/python/test_data/pipelines/pipeline_with_loops.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_loops.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_loops.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_loops.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_loops.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_loops.yaml new file mode 100644 index 00000000000..c4e1e7af9b0 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_loops.yaml @@ -0,0 +1,443 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-loops +# Inputs: +# loop_parameter: list +components: + comp-args-generator-op: + executorLabel: exec-args-generator-op + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-for-loop-1: + dag: + tasks: + print-text: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + taskInfo: + name: print-text + inputDefinitions: + parameters: + pipelinechannel--loop_parameter: + parameterType: LIST + pipelinechannel--loop_parameter-loop-item: + parameterType: STRING + comp-for-loop-2: + dag: + tasks: + print-struct: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-struct + inputs: + parameters: + struct: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + taskInfo: + name: print-struct + print-text-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-2 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["A_a"] + taskInfo: + name: print-text-2 + print-text-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-3 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["B_b"] + taskInfo: + name: print-text-3 + inputDefinitions: + parameters: + pipelinechannel--args-generator-op-Output: + parameterType: LIST + pipelinechannel--args-generator-op-Output-loop-item: + parameterType: STRUCT + comp-for-loop-4: + dag: + tasks: + print-struct-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-struct-2 + inputs: + parameters: + struct: + componentInputParameter: pipelinechannel--loop-item-param-3 + taskInfo: + name: print-struct-2 + print-text-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-4 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop-item-param-3 + parameterExpressionSelector: parseJson(string_value)["A_a"] + taskInfo: + name: print-text-4 + print-text-5: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-5 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop-item-param-3 + parameterExpressionSelector: parseJson(string_value)["B_b"] + taskInfo: + name: print-text-5 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-3: + parameterType: STRUCT + comp-print-struct: + executorLabel: exec-print-struct + inputDefinitions: + parameters: + struct: + parameterType: STRUCT + comp-print-struct-2: + executorLabel: exec-print-struct-2 + inputDefinitions: + parameters: + struct: + parameterType: STRUCT + comp-print-text: + executorLabel: exec-print-text + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-text-2: + executorLabel: exec-print-text-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-text-3: + executorLabel: exec-print-text-3 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-text-4: + executorLabel: exec-print-text-4 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-text-5: + executorLabel: exec-print-text-5 + inputDefinitions: + parameters: + msg: + parameterType: STRING +deploymentSpec: + executors: + exec-args-generator-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - args_generator_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef args_generator_op() -> List[Dict[str, str]]:\n return [{'A_a':\ + \ '1', 'B_b': '2'}, {'A_a': '10', 'B_b': '20'}]\n\n" + image: python:3.9 + exec-print-struct: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_struct + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_struct(struct: Dict):\n print(struct)\n\n" + image: python:3.9 + exec-print-struct-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_struct + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_struct(struct: Dict):\n print(struct)\n\n" + image: python:3.9 + exec-print-text: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 + exec-print-text-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 + exec-print-text-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 + exec-print-text-4: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 + exec-print-text-5: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-loops +root: + dag: + tasks: + args-generator-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-args-generator-op + taskInfo: + name: args-generator-op + for-loop-1: + componentRef: + name: comp-for-loop-1 + inputs: + parameters: + pipelinechannel--loop_parameter: + componentInputParameter: loop_parameter + parameterIterator: + itemInput: pipelinechannel--loop_parameter-loop-item + items: + inputParameter: pipelinechannel--loop_parameter + taskInfo: + name: for-loop-1 + for-loop-2: + componentRef: + name: comp-for-loop-2 + dependentTasks: + - args-generator-op + inputs: + parameters: + pipelinechannel--args-generator-op-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: args-generator-op + parameterIterator: + itemInput: pipelinechannel--args-generator-op-Output-loop-item + items: + inputParameter: pipelinechannel--args-generator-op-Output + taskInfo: + name: for-loop-2 + for-loop-4: + componentRef: + name: comp-for-loop-4 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-3 + items: + raw: '[{"A_a": "1", "B_b": "2"}, {"A_a": "10", "B_b": "20"}]' + taskInfo: + name: for-loop-4 + inputDefinitions: + parameters: + loop_parameter: + parameterType: LIST +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_metrics_outputs.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_metrics_outputs.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_metrics_outputs.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_metrics_outputs.py diff --git a/sdk/python/test_data/pipelines/pipeline_with_metrics_outputs.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_metrics_outputs.yaml similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_metrics_outputs.yaml rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_metrics_outputs.yaml diff --git a/sdk/python/test_data/pipelines/pipeline_with_nested_conditions.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_nested_conditions.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_nested_conditions.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_nested_conditions.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_nested_conditions.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_nested_conditions.yaml new file mode 100644 index 00000000000..86333309ea3 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_nested_conditions.yaml @@ -0,0 +1,445 @@ +# PIPELINE DEFINITION +# Name: nested-conditions-pipeline +components: + comp-condition-1: + dag: + tasks: + condition-2: + componentRef: + name: comp-condition-2 + dependentTasks: + - flip-coin-op-3 + inputs: + parameters: + pipelinechannel--flip-coin-op-2-Output: + componentInputParameter: pipelinechannel--flip-coin-op-2-Output + pipelinechannel--flip-coin-op-3-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op-3 + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + taskInfo: + name: condition-2 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-op-2-Output'] + == inputs.parameter_values['pipelinechannel--flip-coin-op-3-Output'] + flip-coin-op-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-flip-coin-op-3 + taskInfo: + name: flip-coin-op-3 + print-op-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-3 + dependentTasks: + - flip-coin-op-3 + inputs: + parameters: + msg: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op-3 + taskInfo: + name: print-op-3 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-op-2-Output: + parameterType: STRING + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + comp-condition-2: + dag: + tasks: + flip-coin-op-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-flip-coin-op-4 + taskInfo: + name: flip-coin-op-4 + print-op-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-4 + dependentTasks: + - flip-coin-op-4 + inputs: + parameters: + msg: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op-4 + taskInfo: + name: print-op-4 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-op-2-Output: + parameterType: STRING + pipelinechannel--flip-coin-op-3-Output: + parameterType: STRING + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + comp-flip-coin-op: + executorLabel: exec-flip-coin-op + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-flip-coin-op-2: + executorLabel: exec-flip-coin-op-2 + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-flip-coin-op-3: + executorLabel: exec-flip-coin-op-3 + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-flip-coin-op-4: + executorLabel: exec-flip-coin-op-4 + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-op-2: + executorLabel: exec-print-op-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-op-3: + executorLabel: exec-print-op-3 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-op-4: + executorLabel: exec-print-op-4 + inputDefinitions: + parameters: + msg: + parameterType: STRING +deploymentSpec: + executors: + exec-flip-coin-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ + \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ + \ 1) == 0 else 'tails'\n return result\n\n" + image: python:3.9 + exec-flip-coin-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ + \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ + \ 1) == 0 else 'tails'\n return result\n\n" + image: python:3.9 + exec-flip-coin-op-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ + \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ + \ 1) == 0 else 'tails'\n return result\n\n" + image: python:3.9 + exec-flip-coin-op-4: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ + \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ + \ 1) == 0 else 'tails'\n return result\n\n" + image: python:3.9 + exec-print-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 + exec-print-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 + exec-print-op-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 + exec-print-op-4: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ + \n" + image: python:3.9 +pipelineInfo: + name: nested-conditions-pipeline +root: + dag: + tasks: + condition-1: + componentRef: + name: comp-condition-1 + dependentTasks: + - flip-coin-op + - flip-coin-op-2 + inputs: + parameters: + pipelinechannel--flip-coin-op-2-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op-2 + pipelinechannel--flip-coin-op-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op + taskInfo: + name: condition-1 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-op-Output'] + != 'no-such-result' + flip-coin-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-flip-coin-op + taskInfo: + name: flip-coin-op + flip-coin-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-flip-coin-op-2 + taskInfo: + name: flip-coin-op-2 + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + dependentTasks: + - flip-coin-op + inputs: + parameters: + msg: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op + taskInfo: + name: print-op + print-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-2 + dependentTasks: + - flip-coin-op-2 + inputs: + parameters: + msg: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op-2 + taskInfo: + name: print-op-2 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_nested_conditions_yaml.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_nested_conditions_yaml.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_nested_conditions_yaml.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_nested_conditions_yaml.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_nested_conditions_yaml.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_nested_conditions_yaml.yaml new file mode 100644 index 00000000000..c9ce564d15d --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_nested_conditions_yaml.yaml @@ -0,0 +1,350 @@ +# PIPELINE DEFINITION +# Name: conditional-execution-pipeline +# Description: Shows how to use dsl.Condition(). +components: + comp-condition-1: + dag: + tasks: + condition-2: + componentRef: + name: comp-condition-2 + dependentTasks: + - generate-random-number + inputs: + parameters: + pipelinechannel--flip-coin-output: + componentInputParameter: pipelinechannel--flip-coin-output + pipelinechannel--generate-random-number-output: + taskOutputParameter: + outputParameterKey: output + producerTask: generate-random-number + taskInfo: + name: condition-2 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--generate-random-number-output']) + > 5 + condition-3: + componentRef: + name: comp-condition-3 + dependentTasks: + - generate-random-number + inputs: + parameters: + pipelinechannel--flip-coin-output: + componentInputParameter: pipelinechannel--flip-coin-output + pipelinechannel--generate-random-number-output: + taskOutputParameter: + outputParameterKey: output + producerTask: generate-random-number + taskInfo: + name: condition-3 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--generate-random-number-output']) + <= 5 + generate-random-number: + cachingOptions: + enableCache: true + componentRef: + name: comp-generate-random-number + taskInfo: + name: generate-random-number + inputDefinitions: + parameters: + pipelinechannel--flip-coin-output: + parameterType: STRING + comp-condition-2: + dag: + tasks: + print: + cachingOptions: + enableCache: true + componentRef: + name: comp-print + inputs: + parameters: + msg: + runtimeValue: + constant: heads and {{$.inputs.parameters['pipelinechannel--generate-random-number-output']}} + > 5! + pipelinechannel--generate-random-number-output: + componentInputParameter: pipelinechannel--generate-random-number-output + taskInfo: + name: print + inputDefinitions: + parameters: + pipelinechannel--flip-coin-output: + parameterType: STRING + pipelinechannel--generate-random-number-output: + parameterType: NUMBER_INTEGER + comp-condition-3: + dag: + tasks: + print-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-2 + inputs: + parameters: + msg: + runtimeValue: + constant: heads and {{$.inputs.parameters['pipelinechannel--generate-random-number-output']}} + <= 5! + pipelinechannel--generate-random-number-output: + componentInputParameter: pipelinechannel--generate-random-number-output + taskInfo: + name: print-2 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-output: + parameterType: STRING + pipelinechannel--generate-random-number-output: + parameterType: NUMBER_INTEGER + comp-condition-4: + dag: + tasks: + condition-5: + componentRef: + name: comp-condition-5 + dependentTasks: + - generate-random-number-2 + inputs: + parameters: + pipelinechannel--flip-coin-output: + componentInputParameter: pipelinechannel--flip-coin-output + pipelinechannel--generate-random-number-2-output: + taskOutputParameter: + outputParameterKey: output + producerTask: generate-random-number-2 + taskInfo: + name: condition-5 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--generate-random-number-2-output']) + > 15 + condition-6: + componentRef: + name: comp-condition-6 + dependentTasks: + - generate-random-number-2 + inputs: + parameters: + pipelinechannel--flip-coin-output: + componentInputParameter: pipelinechannel--flip-coin-output + pipelinechannel--generate-random-number-2-output: + taskOutputParameter: + outputParameterKey: output + producerTask: generate-random-number-2 + taskInfo: + name: condition-6 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--generate-random-number-2-output']) + <= 15 + generate-random-number-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-generate-random-number-2 + taskInfo: + name: generate-random-number-2 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-output: + parameterType: STRING + comp-condition-5: + dag: + tasks: + print-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-3 + inputs: + parameters: + msg: + runtimeValue: + constant: tails and {{$.inputs.parameters['pipelinechannel--generate-random-number-2-output']}} + > 15! + pipelinechannel--generate-random-number-2-output: + componentInputParameter: pipelinechannel--generate-random-number-2-output + taskInfo: + name: print-3 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-output: + parameterType: STRING + pipelinechannel--generate-random-number-2-output: + parameterType: NUMBER_INTEGER + comp-condition-6: + dag: + tasks: + print-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-4 + inputs: + parameters: + msg: + runtimeValue: + constant: tails and {{$.inputs.parameters['pipelinechannel--generate-random-number-2-output']}} + <= 15! + pipelinechannel--generate-random-number-2-output: + componentInputParameter: pipelinechannel--generate-random-number-2-output + taskInfo: + name: print-4 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-output: + parameterType: STRING + pipelinechannel--generate-random-number-2-output: + parameterType: NUMBER_INTEGER + comp-flip-coin: + executorLabel: exec-flip-coin + outputDefinitions: + parameters: + output: + parameterType: STRING + comp-generate-random-number: + executorLabel: exec-generate-random-number + outputDefinitions: + parameters: + output: + parameterType: NUMBER_INTEGER + comp-generate-random-number-2: + executorLabel: exec-generate-random-number-2 + outputDefinitions: + parameters: + output: + parameterType: NUMBER_INTEGER + comp-print: + executorLabel: exec-print + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-2: + executorLabel: exec-print-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-3: + executorLabel: exec-print-3 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-4: + executorLabel: exec-print-4 + inputDefinitions: + parameters: + msg: + parameterType: STRING +deploymentSpec: + executors: + exec-flip-coin: + container: + args: + - mkdir -p "$(dirname $0)" && python -c "import random; result = 'heads' if + random.randint(0,1) == 0 else 'tails'; print(result, end='')" | tee $0 + - '{{$.outputs.parameters[''output''].output_file}}' + command: + - sh + - -c + image: python:alpine3.9 + exec-generate-random-number: + container: + args: + - mkdir -p "$(dirname $2)" && python -c "import random; print(random.randint($0, + $1), end='')" | tee $2 + - '0' + - '9' + - '{{$.outputs.parameters[''output''].output_file}}' + command: + - sh + - -c + image: python:alpine3.9 + exec-generate-random-number-2: + container: + args: + - mkdir -p "$(dirname $2)" && python -c "import random; print(random.randint($0, + $1), end='')" | tee $2 + - '10' + - '19' + - '{{$.outputs.parameters[''output''].output_file}}' + command: + - sh + - -c + image: python:alpine3.9 + exec-print: + container: + command: + - echo + - '{{$.inputs.parameters[''msg'']}}' + image: python:alpine3.9 + exec-print-2: + container: + command: + - echo + - '{{$.inputs.parameters[''msg'']}}' + image: python:alpine3.9 + exec-print-3: + container: + command: + - echo + - '{{$.inputs.parameters[''msg'']}}' + image: python:alpine3.9 + exec-print-4: + container: + command: + - echo + - '{{$.inputs.parameters[''msg'']}}' + image: python:alpine3.9 +pipelineInfo: + description: Shows how to use dsl.Condition(). + displayName: Conditional execution pipeline. + name: conditional-execution-pipeline +root: + dag: + tasks: + condition-1: + componentRef: + name: comp-condition-1 + dependentTasks: + - flip-coin + inputs: + parameters: + pipelinechannel--flip-coin-output: + taskOutputParameter: + outputParameterKey: output + producerTask: flip-coin + taskInfo: + name: condition-1 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-output'] + == 'heads' + condition-4: + componentRef: + name: comp-condition-4 + dependentTasks: + - flip-coin + inputs: + parameters: + pipelinechannel--flip-coin-output: + taskOutputParameter: + outputParameterKey: output + producerTask: flip-coin + taskInfo: + name: condition-4 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-output'] + == 'tails' + flip-coin: + cachingOptions: + enableCache: true + componentRef: + name: comp-flip-coin + taskInfo: + name: flip-coin +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_outputs.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_outputs.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_outputs.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_outputs.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_outputs.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_outputs.yaml new file mode 100644 index 00000000000..63bc0169161 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_outputs.yaml @@ -0,0 +1,210 @@ +# PIPELINE DEFINITION +# Name: pipeline-in-pipeline +# Inputs: +# msg: str [Default: 'Hello'] +# Outputs: +# Output: system.Artifact +components: + comp-inner-pipeline: + dag: + outputs: + artifacts: + data: + artifactSelectors: + - outputArtifactKey: data + producerSubtask: print-op2 + parameters: + msg: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-op1 + tasks: + print-op1: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op1-2 + inputs: + parameters: + msg: + componentInputParameter: msg + taskInfo: + name: print-op1 + print-op2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op2 + dependentTasks: + - print-op1 + inputs: + parameters: + msg: + taskOutputParameter: + outputParameterKey: Output + producerTask: print-op1 + taskInfo: + name: print-op2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + msg: + parameterType: STRING + comp-print-op1: + executorLabel: exec-print-op1 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op1-2: + executorLabel: exec-print-op1-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op2: + executorLabel: exec-print-op2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + outputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-print-op1: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ + \n" + image: python:3.9 + exec-print-op1-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ + \n" + image: python:3.9 + exec-print-op2: + container: + args: + - '{{$.inputs.parameters[''msg'']}}' + - '{{$.outputs.artifacts[''data''].path}}' + command: + - sh + - -c + - mkdir --parents $(dirname "$1") && echo "$0" > "$1" + image: alpine +pipelineInfo: + name: pipeline-in-pipeline +root: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: data + producerSubtask: inner-pipeline + tasks: + inner-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-inner-pipeline + inputs: + parameters: + msg: + runtimeValue: + constant: world + taskInfo: + name: inner-pipeline + print-op1: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op1 + inputs: + parameters: + msg: + componentInputParameter: msg + taskInfo: + name: print-op1 + inputDefinitions: + parameters: + msg: + defaultValue: Hello + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_params_containing_format.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_params_containing_format.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_params_containing_format.py rename to test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_params_containing_format.py diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_params_containing_format.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_params_containing_format.yaml new file mode 100644 index 00000000000..58e08a6fb3f --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_params_containing_format.yaml @@ -0,0 +1,210 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-pipelineparam-containing-format +# Inputs: +# name: str [Default: 'KFP'] +components: + comp-for-loop-2: + dag: + tasks: + print-op2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op2 + inputs: + parameters: + pipelinechannel--name: + componentInputParameter: pipelinechannel--name + text1: + componentInputParameter: pipelinechannel--loop-item-param-1 + text2: + runtimeValue: + constant: ' and {{$.inputs.parameters[''pipelinechannel--name'']}}.' + taskInfo: + name: print-op2 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: STRING + pipelinechannel--name: + parameterType: STRING + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op-2: + executorLabel: exec-print-op-2 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op2: + executorLabel: exec-print-op2 + inputDefinitions: + parameters: + text1: + parameterType: STRING + text2: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-print-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(text: str) -> str:\n print(text)\n return text\n\ + \n" + image: python:3.9 + exec-print-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(text: str) -> str:\n print(text)\n return text\n\ + \n" + image: python:3.9 + exec-print-op2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op2 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op2(text1: str, text2: str) -> str:\n print(text1 +\ + \ text2)\n return text1 + text2\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-pipelineparam-containing-format +root: + dag: + tasks: + for-loop-2: + componentRef: + name: comp-for-loop-2 + inputs: + parameters: + pipelinechannel--name: + componentInputParameter: name + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '["1", "2"]' + taskInfo: + name: for-loop-2 + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + inputs: + parameters: + pipelinechannel--name: + componentInputParameter: name + text: + runtimeValue: + constant: Hello {{$.inputs.parameters['pipelinechannel--name']}} + taskInfo: + name: print-op + print-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-2 + dependentTasks: + - print-op + inputs: + parameters: + pipelinechannel--print-op-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: print-op + text: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--print-op-Output'']}}, + again.' + taskInfo: + name: print-op-2 + inputDefinitions: + parameters: + name: + defaultValue: KFP + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_reused_component.py b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_reused_component.py new file mode 100644 index 00000000000..7d8cb9f020e --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_reused_component.py @@ -0,0 +1,40 @@ +# Copyright 2020 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import pathlib + +from kfp import compiler +from kfp import components +from kfp import dsl +from sdk.python.test.test_utils.file_utils import FileUtils + +test_data_dir = FileUtils.TEST_DATA +add_op = components.load_component_from_file( + str(os.path.join(test_data_dir, "sdk_compiled_pipelines", "valid", "critical",'add_numbers.yaml'))) + + +@dsl.pipeline(name='add-pipeline') +def my_pipeline( + a: int = 2, + b: int = 5, +): + first_add_task = add_op(a=a, b=3) + second_add_task = add_op(a=first_add_task.outputs['Output'], b=b) + third_add_task = add_op(a=second_add_task.outputs['Output'], b=7) + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=my_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_reused_component.yaml b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_reused_component.yaml new file mode 100644 index 00000000000..4bfa5bd50f2 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/pipeline_with_reused_component.yaml @@ -0,0 +1,194 @@ +# PIPELINE DEFINITION +# Name: add-pipeline +# Inputs: +# a: int [Default: 2.0] +# b: int [Default: 5.0] +components: + comp-add-numbers: + executorLabel: exec-add-numbers + inputDefinitions: + parameters: + a: + parameterType: NUMBER_INTEGER + b: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-add-numbers-2: + executorLabel: exec-add-numbers-2 + inputDefinitions: + parameters: + a: + parameterType: NUMBER_INTEGER + b: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-add-numbers-3: + executorLabel: exec-add-numbers-3 + inputDefinitions: + parameters: + a: + parameterType: NUMBER_INTEGER + b: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +deploymentSpec: + executors: + exec-add-numbers: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add_numbers + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add_numbers(a: int, b: int) -> int:\n return a + b\n\n" + image: python:3.9 + exec-add-numbers-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add_numbers + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add_numbers(a: int, b: int) -> int:\n return a + b\n\n" + image: python:3.9 + exec-add-numbers-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add_numbers + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add_numbers(a: int, b: int) -> int:\n return a + b\n\n" + image: python:3.9 +pipelineInfo: + name: add-pipeline +root: + dag: + tasks: + add-numbers: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-numbers + inputs: + parameters: + a: + componentInputParameter: a + b: + runtimeValue: + constant: 3.0 + taskInfo: + name: add-numbers + add-numbers-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-numbers-2 + dependentTasks: + - add-numbers + inputs: + parameters: + a: + taskOutputParameter: + outputParameterKey: Output + producerTask: add-numbers + b: + componentInputParameter: b + taskInfo: + name: add-numbers-2 + add-numbers-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-numbers-3 + dependentTasks: + - add-numbers-2 + inputs: + parameters: + a: + taskOutputParameter: + outputParameterKey: Output + producerTask: add-numbers-2 + b: + runtimeValue: + constant: 7.0 + taskInfo: + name: add-numbers-3 + inputDefinitions: + parameters: + a: + defaultValue: 2.0 + isOptional: true + parameterType: NUMBER_INTEGER + b: + defaultValue: 5.0 + isOptional: true + parameterType: NUMBER_INTEGER +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/test_data/sdk_compiled_pipelines/valid/essential/placeholder_with_if_placeholder_none_input_value.yaml b/test_data/sdk_compiled_pipelines/valid/essential/placeholder_with_if_placeholder_none_input_value.yaml new file mode 100644 index 00000000000..3d1743eb049 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/essential/placeholder_with_if_placeholder_none_input_value.yaml @@ -0,0 +1,54 @@ +# PIPELINE DEFINITION +# Name: one-step-pipeline-with-if-placeholder-supply-none +# Inputs: +# input0: str [Default: 'input0'] +components: + comp-component-with-optional-inputs: + executorLabel: exec-component-with-optional-inputs + inputDefinitions: + parameters: + optional_input_1: + isOptional: true + parameterType: STRING + optional_input_2: + isOptional: true + parameterType: STRING + required_input: + parameterType: STRING +deploymentSpec: + executors: + exec-component-with-optional-inputs: + container: + args: + - --arg0 + - '{{$.inputs.parameters[''required_input'']}}' + - '{"IfPresent": {"InputName": "optional_input_1", "Then": ["--arg1", "{{$.inputs.parameters[''optional_input_1'']}}"]}}' + - '{"IfPresent": {"InputName": "optional_input_2", "Then": ["--arg2", "{{$.inputs.parameters[''optional_input_2'']}}"], + "Else": ["--arg2", "default value"]}}' + command: + - echo + image: ghcr.io/containerd/busybox +pipelineInfo: + name: one-step-pipeline-with-if-placeholder-supply-none +root: + dag: + tasks: + component-with-optional-inputs: + cachingOptions: + enableCache: true + componentRef: + name: comp-component-with-optional-inputs + inputs: + parameters: + required_input: + componentInputParameter: input0 + taskInfo: + name: component-with-optional-inputs + inputDefinitions: + parameters: + input0: + defaultValue: input0 + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 \ No newline at end of file diff --git a/test_data/sdk_compiled_pipelines/valid/failing/__init__.py b/test_data/sdk_compiled_pipelines/valid/failing/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/samples/test/fail_v2.py b/test_data/sdk_compiled_pipelines/valid/failing/fail_v2.py similarity index 81% rename from samples/test/fail_v2.py rename to test_data/sdk_compiled_pipelines/valid/failing/fail_v2.py index 94a6b1efcce..9f4b5a67895 100644 --- a/samples/test/fail_v2.py +++ b/test_data/sdk_compiled_pipelines/valid/failing/fail_v2.py @@ -26,3 +26,8 @@ def fail(): @dsl.pipeline(name='fail-pipeline') def fail_pipeline(): fail_task = fail() + +if __name__ == '__main__': + from kfp import compiler + compiler.Compiler().compile( + pipeline_func=fail_pipeline, package_path=__file__.replace('.py', '.yaml')) diff --git a/test_data/sdk_compiled_pipelines/valid/failing/fail_v2.yaml b/test_data/sdk_compiled_pipelines/valid/failing/fail_v2.yaml new file mode 100644 index 00000000000..f0fff944e55 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/failing/fail_v2.yaml @@ -0,0 +1,49 @@ +# PIPELINE DEFINITION +# Name: fail-pipeline +components: + comp-fail: + executorLabel: exec-fail +deploymentSpec: + executors: + exec-fail: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - fail + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef fail():\n '''Fails'''\n import sys\n sys.exit(1)\n\n" + image: python:3.9 +pipelineInfo: + name: fail-pipeline +root: + dag: + tasks: + fail: + cachingOptions: + enableCache: true + componentRef: + name: comp-fail + taskInfo: + name: fail +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pipeline_with_exit_handler.py b/test_data/sdk_compiled_pipelines/valid/failing/pipeline_with_exit_handler.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_exit_handler.py rename to test_data/sdk_compiled_pipelines/valid/failing/pipeline_with_exit_handler.py diff --git a/test_data/sdk_compiled_pipelines/valid/failing/pipeline_with_exit_handler.yaml b/test_data/sdk_compiled_pipelines/valid/failing/pipeline_with_exit_handler.yaml new file mode 100644 index 00000000000..708129f8939 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/failing/pipeline_with_exit_handler.yaml @@ -0,0 +1,180 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-exit-handler +# Inputs: +# message: str [Default: 'Hello World!'] +components: + comp-exit-handler-1: + dag: + tasks: + fail-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-fail-op + inputs: + parameters: + message: + runtimeValue: + constant: Task failed. + taskInfo: + name: fail-op + print-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-2 + inputs: + parameters: + message: + componentInputParameter: pipelinechannel--message + taskInfo: + name: print-op-2 + inputDefinitions: + parameters: + pipelinechannel--message: + parameterType: STRING + comp-fail-op: + executorLabel: exec-fail-op + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-2: + executorLabel: exec-print-op-2 + inputDefinitions: + parameters: + message: + parameterType: STRING +deploymentSpec: + executors: + exec-fail-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - fail_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ + \ print(message)\n sys.exit(1)\n\n" + image: python:3.9 + exec-print-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ + \ print(message)\n\n" + image: python:3.9 + exec-print-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ + \ print(message)\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-exit-handler +root: + dag: + tasks: + exit-handler-1: + componentRef: + name: comp-exit-handler-1 + inputs: + parameters: + pipelinechannel--message: + componentInputParameter: message + taskInfo: + name: exit-handler-1 + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + dependentTasks: + - exit-handler-1 + inputs: + parameters: + message: + runtimeValue: + constant: Exit handler has worked! + taskInfo: + name: print-op + triggerPolicy: + strategy: ALL_UPSTREAM_TASKS_COMPLETED + inputDefinitions: + parameters: + message: + defaultValue: Hello World! + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_multiple_exit_handlers.py b/test_data/sdk_compiled_pipelines/valid/failing/pipeline_with_multiple_exit_handlers.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_multiple_exit_handlers.py rename to test_data/sdk_compiled_pipelines/valid/failing/pipeline_with_multiple_exit_handlers.py diff --git a/test_data/sdk_compiled_pipelines/valid/failing/pipeline_with_multiple_exit_handlers.yaml b/test_data/sdk_compiled_pipelines/valid/failing/pipeline_with_multiple_exit_handlers.yaml new file mode 100644 index 00000000000..5c7eb1f60ea --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/failing/pipeline_with_multiple_exit_handlers.yaml @@ -0,0 +1,406 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-multiple-exit-handlers +# Inputs: +# message: str [Default: 'Hello World!'] +components: + comp-exit-handler-1: + dag: + tasks: + fail-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-fail-op + inputs: + parameters: + message: + runtimeValue: + constant: Task failed. + taskInfo: + name: fail-op + print-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-2 + inputs: + parameters: + message: + componentInputParameter: pipelinechannel--message + taskInfo: + name: print-op-2 + inputDefinitions: + parameters: + pipelinechannel--message: + parameterType: STRING + comp-exit-handler-2: + dag: + tasks: + print-op-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-4 + inputs: + parameters: + message: + componentInputParameter: pipelinechannel--message + taskInfo: + name: print-op-4 + inputDefinitions: + parameters: + pipelinechannel--message: + parameterType: STRING + comp-exit-handler-3: + dag: + tasks: + print-op-6: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-6 + inputs: + parameters: + message: + componentInputParameter: pipelinechannel--message + taskInfo: + name: print-op-6 + inputDefinitions: + parameters: + pipelinechannel--message: + parameterType: STRING + comp-fail-op: + executorLabel: exec-fail-op + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-2: + executorLabel: exec-print-op-2 + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-3: + executorLabel: exec-print-op-3 + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-4: + executorLabel: exec-print-op-4 + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-5: + executorLabel: exec-print-op-5 + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-6: + executorLabel: exec-print-op-6 + inputDefinitions: + parameters: + message: + parameterType: STRING +deploymentSpec: + executors: + exec-fail-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - fail_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ + \ print(message)\n sys.exit(1)\n\n" + image: python:3.9 + exec-print-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ + \ print(message)\n\n" + image: python:3.9 + exec-print-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ + \ print(message)\n\n" + image: python:3.9 + exec-print-op-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ + \ print(message)\n\n" + image: python:3.9 + exec-print-op-4: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ + \ print(message)\n\n" + image: python:3.9 + exec-print-op-5: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ + \ print(message)\n\n" + image: python:3.9 + exec-print-op-6: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ + \ print(message)\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-multiple-exit-handlers +root: + dag: + tasks: + exit-handler-1: + componentRef: + name: comp-exit-handler-1 + inputs: + parameters: + pipelinechannel--message: + componentInputParameter: message + taskInfo: + name: exit-handler-1 + exit-handler-2: + componentRef: + name: comp-exit-handler-2 + inputs: + parameters: + pipelinechannel--message: + componentInputParameter: message + taskInfo: + name: exit-handler-2 + exit-handler-3: + componentRef: + name: comp-exit-handler-3 + inputs: + parameters: + pipelinechannel--message: + componentInputParameter: message + taskInfo: + name: exit-handler-3 + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + dependentTasks: + - exit-handler-1 + inputs: + parameters: + message: + runtimeValue: + constant: First exit handler has worked! + taskInfo: + name: print-op + triggerPolicy: + strategy: ALL_UPSTREAM_TASKS_COMPLETED + print-op-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-3 + dependentTasks: + - exit-handler-2 + inputs: + parameters: + message: + runtimeValue: + constant: Second exit handler has worked! + taskInfo: + name: print-op-3 + triggerPolicy: + strategy: ALL_UPSTREAM_TASKS_COMPLETED + print-op-5: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-5 + dependentTasks: + - exit-handler-3 + inputs: + parameters: + message: + runtimeValue: + constant: Third exit handler has worked! + taskInfo: + name: print-op-5 + triggerPolicy: + strategy: ALL_UPSTREAM_TASKS_COMPLETED + inputDefinitions: + parameters: + message: + defaultValue: Hello World! + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/hello-world.yaml b/test_data/sdk_compiled_pipelines/valid/hello-world.yaml new file mode 100644 index 00000000000..0ffe8110209 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/hello-world.yaml @@ -0,0 +1,28 @@ +# PIPELINE DEFINITION +# Name: echo +components: + comp-echo: + executorLabel: exec-echo +deploymentSpec: + executors: + exec-echo: + container: + args: + - hello world + command: + - echo + image: public.ecr.aws/docker/library/python:3.12 +pipelineInfo: + name: echo +root: + dag: + tasks: + echo: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo + taskInfo: + name: echo +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/backend/test/v2/resources/hello-world.py b/test_data/sdk_compiled_pipelines/valid/hello_world.py similarity index 100% rename from backend/test/v2/resources/hello-world.py rename to test_data/sdk_compiled_pipelines/valid/hello_world.py diff --git a/sdk/python/test_data/components/identity.py b/test_data/sdk_compiled_pipelines/valid/identity.py similarity index 100% rename from sdk/python/test_data/components/identity.py rename to test_data/sdk_compiled_pipelines/valid/identity.py diff --git a/test_data/sdk_compiled_pipelines/valid/identity.yaml b/test_data/sdk_compiled_pipelines/valid/identity.yaml new file mode 100644 index 00000000000..43bb467fba0 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/identity.yaml @@ -0,0 +1,79 @@ +# PIPELINE DEFINITION +# Name: identity +# Inputs: +# value: str +# Outputs: +# Output: str +components: + comp-identity: + executorLabel: exec-identity + inputDefinitions: + parameters: + value: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-identity: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - identity + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef identity(value: str) -> str:\n return value\n\n" + image: python:3.9 +pipelineInfo: + name: identity +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: identity + tasks: + identity: + cachingOptions: + enableCache: true + componentRef: + name: comp-identity + inputs: + parameters: + value: + componentInputParameter: value + taskInfo: + name: identity + inputDefinitions: + parameters: + value: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/if_elif_else_complex.py b/test_data/sdk_compiled_pipelines/valid/if_elif_else_complex.py similarity index 100% rename from sdk/python/test_data/pipelines/if_elif_else_complex.py rename to test_data/sdk_compiled_pipelines/valid/if_elif_else_complex.py diff --git a/test_data/sdk_compiled_pipelines/valid/if_elif_else_complex.yaml b/test_data/sdk_compiled_pipelines/valid/if_elif_else_complex.yaml new file mode 100644 index 00000000000..8cbfc427e7f --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/if_elif_else_complex.yaml @@ -0,0 +1,1118 @@ +# PIPELINE DEFINITION +# Name: lucky-number-pipeline +# Inputs: +# add_drumroll: bool [Default: True] +# repeat_if_lucky_number: bool [Default: True] +# trials: list [Default: [1.0, 2.0, 3.0]] +components: + comp-condition-11: + dag: + outputs: + parameters: + pipelinechannel--print-and-return-5-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-and-return-5 + tasks: + print-and-return-5: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-5 + inputs: + parameters: + text: + runtimeValue: + constant: Got a high even number! + taskInfo: + name: print-and-return-5 + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--is-even-or-odd-2-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--print-and-return-5-Output: + parameterType: STRING + comp-condition-12: + dag: + outputs: + parameters: + pipelinechannel--print-and-return-6-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-and-return-6 + tasks: + print-and-return-6: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-6 + inputs: + parameters: + text: + runtimeValue: + constant: Got a high odd number! + taskInfo: + name: print-and-return-6 + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--is-even-or-odd-2-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--print-and-return-6-Output: + parameterType: STRING + comp-condition-13: + dag: + tasks: + condition-14: + componentRef: + name: comp-condition-14 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + pipelinechannel--repeat_if_lucky_number: + componentInputParameter: pipelinechannel--repeat_if_lucky_number + taskInfo: + name: condition-14 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--repeat_if_lucky_number'] + == true + print-and-return-8: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-8 + inputs: + parameters: + text: + runtimeValue: + constant: 'Announcing: Got the lucky number 5000! A one in 10,000 + chance.' + taskInfo: + name: print-and-return-8 + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--repeat_if_lucky_number: + parameterType: BOOLEAN + comp-condition-14: + dag: + tasks: + for-loop-16: + componentRef: + name: comp-for-loop-16 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + pipelinechannel--repeat_if_lucky_number: + componentInputParameter: pipelinechannel--repeat_if_lucky_number + parameterIterator: + itemInput: pipelinechannel--loop-item-param-15 + items: + raw: '[1, 2]' + taskInfo: + name: for-loop-16 + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--repeat_if_lucky_number: + parameterType: BOOLEAN + comp-condition-2: + dag: + tasks: + condition-3: + componentRef: + name: comp-condition-3 + inputs: + parameters: + pipelinechannel--add_drumroll: + componentInputParameter: pipelinechannel--add_drumroll + pipelinechannel--trials-loop-item: + componentInputParameter: pipelinechannel--trials-loop-item + taskInfo: + name: condition-3 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--trials-loop-item']) + == 3 + inputDefinitions: + parameters: + pipelinechannel--add_drumroll: + parameterType: BOOLEAN + pipelinechannel--trials-loop-item: + parameterType: NUMBER_INTEGER + comp-condition-3: + dag: + tasks: + print-and-return: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return + inputs: + parameters: + text: + runtimeValue: + constant: Adding drumroll on last trial! + taskInfo: + name: print-and-return + inputDefinitions: + parameters: + pipelinechannel--add_drumroll: + parameterType: BOOLEAN + pipelinechannel--trials-loop-item: + parameterType: NUMBER_INTEGER + comp-condition-6: + dag: + outputs: + parameters: + pipelinechannel--print-and-return-2-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-and-return-2 + tasks: + print-and-return-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-2 + inputs: + parameters: + text: + runtimeValue: + constant: Got a low even number! + taskInfo: + name: print-and-return-2 + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--is-even-or-odd-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--print-and-return-2-Output: + parameterType: STRING + comp-condition-7: + dag: + outputs: + parameters: + pipelinechannel--print-and-return-3-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-and-return-3 + tasks: + print-and-return-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-3 + inputs: + parameters: + text: + runtimeValue: + constant: Got a low odd number! + taskInfo: + name: print-and-return-3 + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--is-even-or-odd-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--print-and-return-3-Output: + parameterType: STRING + comp-condition-8: + dag: + tasks: + condition-branches-5: + componentRef: + name: comp-condition-branches-5 + dependentTasks: + - is-even-or-odd + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + pipelinechannel--is-even-or-odd-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: is-even-or-odd + taskInfo: + name: condition-branches-5 + is-even-or-odd: + cachingOptions: + enableCache: true + componentRef: + name: comp-is-even-or-odd + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + taskInfo: + name: is-even-or-odd + print-and-return-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-4 + dependentTasks: + - condition-branches-5 + inputs: + parameters: + text: + taskOutputParameter: + outputParameterKey: pipelinechannel--condition-branches-5-oneof-1 + producerTask: condition-branches-5 + taskInfo: + name: print-and-return-4 + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + comp-condition-9: + dag: + tasks: + condition-branches-10: + componentRef: + name: comp-condition-branches-10 + dependentTasks: + - is-even-or-odd-2 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + pipelinechannel--is-even-or-odd-2-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: is-even-or-odd-2 + taskInfo: + name: condition-branches-10 + is-even-or-odd-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-is-even-or-odd-2 + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + taskInfo: + name: is-even-or-odd-2 + print-and-return-7: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-7 + dependentTasks: + - condition-branches-10 + inputs: + parameters: + text: + taskOutputParameter: + outputParameterKey: pipelinechannel--condition-branches-10-oneof-1 + producerTask: condition-branches-10 + taskInfo: + name: print-and-return-7 + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + comp-condition-branches-10: + dag: + outputs: + parameters: + pipelinechannel--condition-branches-10-oneof-1: + valueFromOneof: + parameterSelectors: + - outputParameterKey: pipelinechannel--print-and-return-5-Output + producerSubtask: condition-11 + - outputParameterKey: pipelinechannel--print-and-return-6-Output + producerSubtask: condition-12 + tasks: + condition-11: + componentRef: + name: comp-condition-11 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + pipelinechannel--is-even-or-odd-2-Output: + componentInputParameter: pipelinechannel--is-even-or-odd-2-Output + taskInfo: + name: condition-11 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--is-even-or-odd-2-Output'] + == 'even' + condition-12: + componentRef: + name: comp-condition-12 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + pipelinechannel--is-even-or-odd-2-Output: + componentInputParameter: pipelinechannel--is-even-or-odd-2-Output + taskInfo: + name: condition-12 + triggerPolicy: + condition: '!(inputs.parameter_values[''pipelinechannel--is-even-or-odd-2-Output''] + == ''even'')' + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--is-even-or-odd-2-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--condition-branches-10-oneof-1: + parameterType: STRING + comp-condition-branches-4: + dag: + tasks: + condition-13: + componentRef: + name: comp-condition-13 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + pipelinechannel--repeat_if_lucky_number: + componentInputParameter: pipelinechannel--repeat_if_lucky_number + taskInfo: + name: condition-13 + triggerPolicy: + condition: '!(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + < 5000) && !(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + > 5000)' + condition-8: + componentRef: + name: comp-condition-8 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + taskInfo: + name: condition-8 + triggerPolicy: + condition: int(inputs.parameter_values['pipelinechannel--int-0-to-9999-Output']) + < 5000 + condition-9: + componentRef: + name: comp-condition-9 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + taskInfo: + name: condition-9 + triggerPolicy: + condition: '!(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + < 5000) && int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) + > 5000' + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--repeat_if_lucky_number: + parameterType: BOOLEAN + comp-condition-branches-5: + dag: + outputs: + parameters: + pipelinechannel--condition-branches-5-oneof-1: + valueFromOneof: + parameterSelectors: + - outputParameterKey: pipelinechannel--print-and-return-2-Output + producerSubtask: condition-6 + - outputParameterKey: pipelinechannel--print-and-return-3-Output + producerSubtask: condition-7 + tasks: + condition-6: + componentRef: + name: comp-condition-6 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + pipelinechannel--is-even-or-odd-Output: + componentInputParameter: pipelinechannel--is-even-or-odd-Output + taskInfo: + name: condition-6 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--is-even-or-odd-Output'] + == 'even' + condition-7: + componentRef: + name: comp-condition-7 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + componentInputParameter: pipelinechannel--int-0-to-9999-Output + pipelinechannel--is-even-or-odd-Output: + componentInputParameter: pipelinechannel--is-even-or-odd-Output + taskInfo: + name: condition-7 + triggerPolicy: + condition: '!(inputs.parameter_values[''pipelinechannel--is-even-or-odd-Output''] + == ''even'')' + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--is-even-or-odd-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--condition-branches-5-oneof-1: + parameterType: STRING + comp-for-loop-1: + dag: + outputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: int-0-to-9999 + tasks: + condition-2: + componentRef: + name: comp-condition-2 + inputs: + parameters: + pipelinechannel--add_drumroll: + componentInputParameter: pipelinechannel--add_drumroll + pipelinechannel--trials-loop-item: + componentInputParameter: pipelinechannel--trials-loop-item + taskInfo: + name: condition-2 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--add_drumroll'] == + true + condition-branches-4: + componentRef: + name: comp-condition-branches-4 + dependentTasks: + - int-0-to-9999 + inputs: + parameters: + pipelinechannel--int-0-to-9999-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: int-0-to-9999 + pipelinechannel--repeat_if_lucky_number: + componentInputParameter: pipelinechannel--repeat_if_lucky_number + taskInfo: + name: condition-branches-4 + int-0-to-9999: + cachingOptions: {} + componentRef: + name: comp-int-0-to-9999 + taskInfo: + name: int-0-to-9999 + inputDefinitions: + parameters: + pipelinechannel--add_drumroll: + parameterType: BOOLEAN + pipelinechannel--repeat_if_lucky_number: + parameterType: BOOLEAN + pipelinechannel--trials: + parameterType: LIST + pipelinechannel--trials-loop-item: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: LIST + comp-for-loop-16: + dag: + tasks: + print-and-return-9: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-9 + inputs: + parameters: + text: + runtimeValue: + constant: 'Announcing again: Got the lucky number 5000! A one in + 10,000 chance.' + taskInfo: + name: print-and-return-9 + inputDefinitions: + parameters: + pipelinechannel--int-0-to-9999-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--loop-item-param-15: + parameterType: NUMBER_INTEGER + pipelinechannel--repeat_if_lucky_number: + parameterType: BOOLEAN + comp-int-0-to-9999: + executorLabel: exec-int-0-to-9999 + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-is-even-or-odd: + executorLabel: exec-is-even-or-odd + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-is-even-or-odd-2: + executorLabel: exec-is-even-or-odd-2 + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return: + executorLabel: exec-print-and-return + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-2: + executorLabel: exec-print-and-return-2 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-3: + executorLabel: exec-print-and-return-3 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-4: + executorLabel: exec-print-and-return-4 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-5: + executorLabel: exec-print-and-return-5 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-6: + executorLabel: exec-print-and-return-6 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-7: + executorLabel: exec-print-and-return-7 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-8: + executorLabel: exec-print-and-return-8 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-9: + executorLabel: exec-print-and-return-9 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-ints: + executorLabel: exec-print-ints + inputDefinitions: + parameters: + ints: + parameterType: LIST +deploymentSpec: + executors: + exec-int-0-to-9999: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - int_0_to_9999 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef int_0_to_9999() -> int:\n import random\n return random.randint(0,\ + \ 9999)\n\n" + image: python:3.9 + exec-is-even-or-odd: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - is_even_or_odd + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef is_even_or_odd(num: int) -> str:\n return 'odd' if num % 2\ + \ else 'even'\n\n" + image: python:3.9 + exec-is-even-or-odd-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - is_even_or_odd + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef is_even_or_odd(num: int) -> str:\n return 'odd' if num % 2\ + \ else 'even'\n\n" + image: python:3.9 + exec-print-and-return: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-4: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-5: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-6: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-7: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-8: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-9: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-ints: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_ints + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_ints(ints: List[int]):\n print(ints)\n\n" + image: python:3.9 +pipelineInfo: + name: lucky-number-pipeline +root: + dag: + tasks: + for-loop-1: + componentRef: + name: comp-for-loop-1 + inputs: + parameters: + pipelinechannel--add_drumroll: + componentInputParameter: add_drumroll + pipelinechannel--repeat_if_lucky_number: + componentInputParameter: repeat_if_lucky_number + pipelinechannel--trials: + componentInputParameter: trials + parameterIterator: + itemInput: pipelinechannel--trials-loop-item + items: + inputParameter: pipelinechannel--trials + taskInfo: + name: for-loop-1 + print-ints: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-ints + dependentTasks: + - for-loop-1 + inputs: + parameters: + ints: + taskOutputParameter: + outputParameterKey: pipelinechannel--int-0-to-9999-Output + producerTask: for-loop-1 + taskInfo: + name: print-ints + inputDefinitions: + parameters: + add_drumroll: + defaultValue: true + isOptional: true + parameterType: BOOLEAN + repeat_if_lucky_number: + defaultValue: true + isOptional: true + parameterType: BOOLEAN + trials: + defaultValue: + - 1.0 + - 2.0 + - 3.0 + isOptional: true + parameterType: LIST +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/if_elif_else_with_oneof_parameters.py b/test_data/sdk_compiled_pipelines/valid/if_elif_else_with_oneof_parameters.py similarity index 100% rename from sdk/python/test_data/pipelines/if_elif_else_with_oneof_parameters.py rename to test_data/sdk_compiled_pipelines/valid/if_elif_else_with_oneof_parameters.py diff --git a/test_data/sdk_compiled_pipelines/valid/if_elif_else_with_oneof_parameters.yaml b/test_data/sdk_compiled_pipelines/valid/if_elif_else_with_oneof_parameters.yaml new file mode 100644 index 00000000000..31e5c613d3d --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/if_elif_else_with_oneof_parameters.yaml @@ -0,0 +1,420 @@ +# PIPELINE DEFINITION +# Name: outer-pipeline +# Outputs: +# Output: str +components: + comp-condition-2: + dag: + outputs: + parameters: + pipelinechannel--print-and-return-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-and-return + tasks: + print-and-return: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return + inputs: + parameters: + text: + runtimeValue: + constant: Got heads! + taskInfo: + name: print-and-return + inputDefinitions: + parameters: + pipelinechannel--flip-three-sided-die-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--print-and-return-Output: + parameterType: STRING + comp-condition-3: + dag: + outputs: + parameters: + pipelinechannel--print-and-return-2-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-and-return-2 + tasks: + print-and-return-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-2 + inputs: + parameters: + text: + runtimeValue: + constant: Got tails! + taskInfo: + name: print-and-return-2 + inputDefinitions: + parameters: + pipelinechannel--flip-three-sided-die-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--print-and-return-2-Output: + parameterType: STRING + comp-condition-4: + dag: + outputs: + parameters: + pipelinechannel--special-print-and-return-output_key: + valueFromParameter: + outputParameterKey: output_key + producerSubtask: special-print-and-return + tasks: + special-print-and-return: + cachingOptions: + enableCache: true + componentRef: + name: comp-special-print-and-return + inputs: + parameters: + text: + runtimeValue: + constant: Draw! + taskInfo: + name: special-print-and-return + inputDefinitions: + parameters: + pipelinechannel--flip-three-sided-die-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--special-print-and-return-output_key: + parameterType: STRING + comp-condition-branches-1: + dag: + outputs: + parameters: + pipelinechannel--condition-branches-1-oneof-1: + valueFromOneof: + parameterSelectors: + - outputParameterKey: pipelinechannel--print-and-return-Output + producerSubtask: condition-2 + - outputParameterKey: pipelinechannel--print-and-return-2-Output + producerSubtask: condition-3 + - outputParameterKey: pipelinechannel--special-print-and-return-output_key + producerSubtask: condition-4 + tasks: + condition-2: + componentRef: + name: comp-condition-2 + inputs: + parameters: + pipelinechannel--flip-three-sided-die-Output: + componentInputParameter: pipelinechannel--flip-three-sided-die-Output + taskInfo: + name: condition-2 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-three-sided-die-Output'] + == 'heads' + condition-3: + componentRef: + name: comp-condition-3 + inputs: + parameters: + pipelinechannel--flip-three-sided-die-Output: + componentInputParameter: pipelinechannel--flip-three-sided-die-Output + taskInfo: + name: condition-3 + triggerPolicy: + condition: '!(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''heads'') && inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''tails''' + condition-4: + componentRef: + name: comp-condition-4 + inputs: + parameters: + pipelinechannel--flip-three-sided-die-Output: + componentInputParameter: pipelinechannel--flip-three-sided-die-Output + taskInfo: + name: condition-4 + triggerPolicy: + condition: '!(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''heads'') && !(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] + == ''tails'')' + inputDefinitions: + parameters: + pipelinechannel--flip-three-sided-die-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--condition-branches-1-oneof-1: + parameterType: STRING + comp-flip-three-sided-die: + executorLabel: exec-flip-three-sided-die + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return: + executorLabel: exec-print-and-return + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-2: + executorLabel: exec-print-and-return-2 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-3: + executorLabel: exec-print-and-return-3 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-roll-die-pipeline: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: pipelinechannel--condition-branches-1-oneof-1 + producerSubtask: condition-branches-1 + tasks: + condition-branches-1: + componentRef: + name: comp-condition-branches-1 + dependentTasks: + - flip-three-sided-die + inputs: + parameters: + pipelinechannel--flip-three-sided-die-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-three-sided-die + taskInfo: + name: condition-branches-1 + flip-three-sided-die: + cachingOptions: + enableCache: true + componentRef: + name: comp-flip-three-sided-die + taskInfo: + name: flip-three-sided-die + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-special-print-and-return: + executorLabel: exec-special-print-and-return + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + output_key: + parameterType: STRING +deploymentSpec: + executors: + exec-flip-three-sided-die: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_three_sided_die + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_three_sided_die() -> str:\n import random\n val =\ + \ random.randint(0, 2)\n\n if val == 0:\n return 'heads'\n \ + \ elif val == 1:\n return 'tails'\n else:\n return 'draw'\n\ + \n" + image: python:3.9 + exec-print-and-return: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-special-print-and-return: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - special_print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef special_print_and_return(text: str, output_key: dsl.OutputPath(str)):\n\ + \ print('Got the special state:', text)\n with open(output_key, 'w')\ + \ as f:\n f.write(text)\n\n" + image: python:3.9 +pipelineInfo: + name: outer-pipeline +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-and-return + tasks: + print-and-return: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-3 + dependentTasks: + - roll-die-pipeline + inputs: + parameters: + text: + taskOutputParameter: + outputParameterKey: Output + producerTask: roll-die-pipeline + taskInfo: + name: print-and-return + roll-die-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-roll-die-pipeline + taskInfo: + name: roll-die-pipeline + outputDefinitions: + parameters: + Output: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/if_else_with_oneof_artifacts.py b/test_data/sdk_compiled_pipelines/valid/if_else_with_oneof_artifacts.py similarity index 100% rename from sdk/python/test_data/pipelines/if_else_with_oneof_artifacts.py rename to test_data/sdk_compiled_pipelines/valid/if_else_with_oneof_artifacts.py diff --git a/test_data/sdk_compiled_pipelines/valid/if_else_with_oneof_artifacts.yaml b/test_data/sdk_compiled_pipelines/valid/if_else_with_oneof_artifacts.yaml new file mode 100644 index 00000000000..65e8f0fb448 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/if_else_with_oneof_artifacts.yaml @@ -0,0 +1,380 @@ +# PIPELINE DEFINITION +# Name: outer-pipeline +components: + comp-condition-2: + dag: + outputs: + artifacts: + pipelinechannel--param-to-artifact-a: + artifactSelectors: + - outputArtifactKey: a + producerSubtask: param-to-artifact + tasks: + param-to-artifact: + cachingOptions: + enableCache: true + componentRef: + name: comp-param-to-artifact + inputs: + parameters: + val: + componentInputParameter: pipelinechannel--flip-coin-Output + taskInfo: + name: param-to-artifact + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--param-to-artifact-a: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-condition-3: + dag: + outputs: + artifacts: + pipelinechannel--param-to-artifact-2-a: + artifactSelectors: + - outputArtifactKey: a + producerSubtask: param-to-artifact-2 + tasks: + param-to-artifact-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-param-to-artifact-2 + inputs: + parameters: + val: + componentInputParameter: pipelinechannel--flip-coin-Output + taskInfo: + name: param-to-artifact-2 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--param-to-artifact-2-a: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-condition-branches-1: + dag: + outputs: + artifacts: + pipelinechannel--condition-branches-1-oneof-1: + artifactSelectors: + - outputArtifactKey: pipelinechannel--param-to-artifact-a + producerSubtask: condition-2 + - outputArtifactKey: pipelinechannel--param-to-artifact-2-a + producerSubtask: condition-3 + tasks: + condition-2: + componentRef: + name: comp-condition-2 + inputs: + parameters: + pipelinechannel--flip-coin-Output: + componentInputParameter: pipelinechannel--flip-coin-Output + taskInfo: + name: condition-2 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-Output'] + == 'heads' + condition-3: + componentRef: + name: comp-condition-3 + inputs: + parameters: + pipelinechannel--flip-coin-Output: + componentInputParameter: pipelinechannel--flip-coin-Output + taskInfo: + name: condition-3 + triggerPolicy: + condition: '!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads'')' + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--condition-branches-1-oneof-1: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-flip-coin: + executorLabel: exec-flip-coin + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-flip-coin-pipeline: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: pipelinechannel--condition-branches-1-oneof-1 + producerSubtask: condition-branches-1 + tasks: + condition-branches-1: + componentRef: + name: comp-condition-branches-1 + dependentTasks: + - flip-coin + inputs: + parameters: + pipelinechannel--flip-coin-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin + taskInfo: + name: condition-branches-1 + flip-coin: + cachingOptions: + enableCache: true + componentRef: + name: comp-flip-coin + taskInfo: + name: flip-coin + print-artifact: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-artifact + dependentTasks: + - condition-branches-1 + inputs: + artifacts: + a: + taskOutputArtifact: + outputArtifactKey: pipelinechannel--condition-branches-1-oneof-1 + producerTask: condition-branches-1 + taskInfo: + name: print-artifact + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-param-to-artifact: + executorLabel: exec-param-to-artifact + inputDefinitions: + parameters: + val: + parameterType: STRING + outputDefinitions: + artifacts: + a: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-param-to-artifact-2: + executorLabel: exec-param-to-artifact-2 + inputDefinitions: + parameters: + val: + parameterType: STRING + outputDefinitions: + artifacts: + a: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-print-artifact: + executorLabel: exec-print-artifact + inputDefinitions: + artifacts: + a: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-print-artifact-2: + executorLabel: exec-print-artifact-2 + inputDefinitions: + artifacts: + a: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-flip-coin: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin() -> str:\n import random\n return 'heads' if\ + \ random.randint(0, 1) == 0 else 'tails'\n\n" + image: python:3.9 + exec-param-to-artifact: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - param_to_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef param_to_artifact(val: str, a: Output[Artifact]):\n with open(a.path,\ + \ 'w') as f:\n f.write(val)\n\n" + image: python:3.9 + exec-param-to-artifact-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - param_to_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef param_to_artifact(val: str, a: Output[Artifact]):\n with open(a.path,\ + \ 'w') as f:\n f.write(val)\n\n" + image: python:3.9 + exec-print-artifact: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_artifact(a: Input[Artifact]):\n with open(a.path) as\ + \ f:\n print(f.read())\n\n" + image: python:3.9 + exec-print-artifact-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_artifact(a: Input[Artifact]):\n with open(a.path) as\ + \ f:\n print(f.read())\n\n" + image: python:3.9 +pipelineInfo: + name: outer-pipeline +root: + dag: + tasks: + flip-coin-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-flip-coin-pipeline + taskInfo: + name: flip-coin-pipeline + print-artifact: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-artifact-2 + dependentTasks: + - flip-coin-pipeline + inputs: + artifacts: + a: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: flip-coin-pipeline + taskInfo: + name: print-artifact +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/if_else_with_oneof_parameters.py b/test_data/sdk_compiled_pipelines/valid/if_else_with_oneof_parameters.py similarity index 100% rename from sdk/python/test_data/pipelines/if_else_with_oneof_parameters.py rename to test_data/sdk_compiled_pipelines/valid/if_else_with_oneof_parameters.py diff --git a/test_data/sdk_compiled_pipelines/valid/if_else_with_oneof_parameters.yaml b/test_data/sdk_compiled_pipelines/valid/if_else_with_oneof_parameters.yaml new file mode 100644 index 00000000000..04ff338058d --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/if_else_with_oneof_parameters.yaml @@ -0,0 +1,313 @@ +# PIPELINE DEFINITION +# Name: flip-coin-pipeline +# Outputs: +# Output: str +components: + comp-condition-2: + dag: + outputs: + parameters: + pipelinechannel--print-and-return-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-and-return + tasks: + print-and-return: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return + inputs: + parameters: + text: + runtimeValue: + constant: Got heads! + taskInfo: + name: print-and-return + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--print-and-return-Output: + parameterType: STRING + comp-condition-3: + dag: + outputs: + parameters: + pipelinechannel--print-and-return-2-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: print-and-return-2 + tasks: + print-and-return-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-2 + inputs: + parameters: + text: + runtimeValue: + constant: Got tails! + taskInfo: + name: print-and-return-2 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--print-and-return-2-Output: + parameterType: STRING + comp-condition-branches-1: + dag: + outputs: + parameters: + pipelinechannel--condition-branches-1-oneof-1: + valueFromOneof: + parameterSelectors: + - outputParameterKey: pipelinechannel--print-and-return-Output + producerSubtask: condition-2 + - outputParameterKey: pipelinechannel--print-and-return-2-Output + producerSubtask: condition-3 + tasks: + condition-2: + componentRef: + name: comp-condition-2 + inputs: + parameters: + pipelinechannel--flip-coin-Output: + componentInputParameter: pipelinechannel--flip-coin-Output + taskInfo: + name: condition-2 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-Output'] + == 'heads' + condition-3: + componentRef: + name: comp-condition-3 + inputs: + parameters: + pipelinechannel--flip-coin-Output: + componentInputParameter: pipelinechannel--flip-coin-Output + taskInfo: + name: condition-3 + triggerPolicy: + condition: '!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] + == ''heads'')' + inputDefinitions: + parameters: + pipelinechannel--flip-coin-Output: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--condition-branches-1-oneof-1: + parameterType: STRING + comp-flip-coin: + executorLabel: exec-flip-coin + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return: + executorLabel: exec-print-and-return + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-2: + executorLabel: exec-print-and-return-2 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-and-return-3: + executorLabel: exec-print-and-return-3 + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-flip-coin: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin() -> str:\n import random\n return 'heads' if\ + \ random.randint(0, 1) == 0 else 'tails'\n\n" + image: python:3.9 + exec-print-and-return: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 + exec-print-and-return-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ + \ text\n\n" + image: python:3.9 +pipelineInfo: + name: flip-coin-pipeline +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: pipelinechannel--condition-branches-1-oneof-1 + producerSubtask: condition-branches-1 + tasks: + condition-branches-1: + componentRef: + name: comp-condition-branches-1 + dependentTasks: + - flip-coin + inputs: + parameters: + pipelinechannel--flip-coin-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin + taskInfo: + name: condition-branches-1 + flip-coin: + cachingOptions: + enableCache: true + componentRef: + name: comp-flip-coin + taskInfo: + name: flip-coin + print-and-return-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-and-return-3 + dependentTasks: + - condition-branches-1 + inputs: + parameters: + text: + taskOutputParameter: + outputParameterKey: pipelinechannel--condition-branches-1-oneof-1 + producerTask: condition-branches-1 + taskInfo: + name: print-and-return-3 + outputDefinitions: + parameters: + Output: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/components/input_artifact.py b/test_data/sdk_compiled_pipelines/valid/input_artifact.py similarity index 100% rename from sdk/python/test_data/components/input_artifact.py rename to test_data/sdk_compiled_pipelines/valid/input_artifact.py diff --git a/test_data/sdk_compiled_pipelines/valid/input_artifact.yaml b/test_data/sdk_compiled_pipelines/valid/input_artifact.yaml new file mode 100644 index 00000000000..5479462c5b5 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/input_artifact.yaml @@ -0,0 +1,68 @@ +# PIPELINE DEFINITION +# Name: input-artifact +# Inputs: +# data: system.Dataset +components: + comp-input-artifact: + executorLabel: exec-input-artifact + inputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-input-artifact: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - input_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef input_artifact(data: Input[Dataset]):\n print(data.name)\n\ + \ print(data.uri)\n print(data.metadata)\n\n" + image: python:3.9 +pipelineInfo: + name: input-artifact +root: + dag: + tasks: + input-artifact: + cachingOptions: + enableCache: true + componentRef: + name: comp-input-artifact + inputs: + artifacts: + data: + componentInputArtifact: data + taskInfo: + name: input-artifact + inputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/log_streaming.py b/test_data/sdk_compiled_pipelines/valid/log_streaming.py new file mode 100644 index 00000000000..f5391aa351c --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/log_streaming.py @@ -0,0 +1,27 @@ +from kfp import compiler, dsl + +common_base_image = ( + "registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61" +) + + +@dsl.component(base_image=common_base_image) +def print_message(message: str): + import datetime # noqa: PLC0415 + import time # noqa: PLC0415 + + t_end = time.time() + 60 + while time.time() < t_end: + print(message + " (" + str(datetime.datetime.now()) + ")") + + +@dsl.pipeline( + name="log-streaming-pipeline", + description="Pipeline that prints a hello message in a loop to test log streaming in Dashboard", +) +def log_streaming_pipeline(message: str = "Hello world"): + print_message(message=message).set_caching_options(False) + + +if __name__ == "__main__": + compiler.Compiler().compile(log_streaming_pipeline, package_path=__file__.replace(".py", "_compiled.yaml")) diff --git a/test_data/sdk_compiled_pipelines/valid/log_streaming_compiled.yaml b/test_data/sdk_compiled_pipelines/valid/log_streaming_compiled.yaml new file mode 100644 index 00000000000..efb3cd6615b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/log_streaming_compiled.yaml @@ -0,0 +1,70 @@ +# PIPELINE DEFINITION +# Name: log-streaming-pipeline +# Description: Pipeline that prints a hello message in a loop to test log streaming in Dashboard +# Inputs: +# message: str [Default: 'Hello world'] +components: + comp-print-message: + executorLabel: exec-print-message + inputDefinitions: + parameters: + message: + parameterType: STRING +deploymentSpec: + executors: + exec-print-message: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_message + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_message(message: str):\n import datetime # noqa: PLC0415\n\ + \ import time # noqa: PLC0415\n\n t_end = time.time() + 60\n while\ + \ time.time() < t_end:\n print(message + \" (\" + str(datetime.datetime.now())\ + \ + \")\")\n\n" + image: python:3.9 +pipelineInfo: + description: Pipeline that prints a hello message in a loop to test log streaming + in Dashboard + name: log-streaming-pipeline +root: + dag: + tasks: + print-message: + cachingOptions: {} + componentRef: + name: comp-print-message + inputs: + parameters: + message: + componentInputParameter: message + taskInfo: + name: print-message + inputDefinitions: + parameters: + message: + defaultValue: Hello world + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/long-running.yaml b/test_data/sdk_compiled_pipelines/valid/long-running.yaml new file mode 100644 index 00000000000..48fa1233a4f --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/long-running.yaml @@ -0,0 +1,48 @@ +# PIPELINE DEFINITION +# Name: wait-awhile +components: + comp-wait-op: + executorLabel: exec-wait-op + comp-wait-op-2: + executorLabel: exec-wait-op-2 +deploymentSpec: + executors: + exec-wait-op: + container: + args: + - echo step-1 sleeping for 5m; sleep 300; echo done1 + command: + - sh + - -c + image: alpine:latest + exec-wait-op-2: + container: + args: + - echo step-1 sleeping for 5m; sleep 300; echo done1 + command: + - sh + - -c + image: alpine:latest +pipelineInfo: + name: wait-awhile +root: + dag: + tasks: + wait-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-wait-op + taskInfo: + name: wait-op + wait-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-wait-op-2 + dependentTasks: + - wait-op + taskInfo: + name: wait-op-2 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.0.0-beta.13 diff --git a/backend/test/v2/resources/long-running.py b/test_data/sdk_compiled_pipelines/valid/long_running.py similarity index 100% rename from backend/test/v2/resources/long-running.py rename to test_data/sdk_compiled_pipelines/valid/long_running.py diff --git a/samples/test/metrics_visualization_v2.py b/test_data/sdk_compiled_pipelines/valid/metrics_visualization_v2.py similarity index 96% rename from samples/test/metrics_visualization_v2.py rename to test_data/sdk_compiled_pipelines/valid/metrics_visualization_v2.py index f192b667292..8cc8f68d533 100644 --- a/samples/test/metrics_visualization_v2.py +++ b/test_data/sdk_compiled_pipelines/valid/metrics_visualization_v2.py @@ -13,7 +13,7 @@ # limitations under the License. import os -from kfp import dsl +from kfp import dsl, compiler from kfp.dsl import (component, Output, ClassificationMetrics, Metrics, HTML, Markdown) @@ -149,3 +149,9 @@ def metrics_visualization_pipeline(): digit_classification_op = digit_classification() html_visualization_op = html_visualization() markdown_visualization_op = markdown_visualization() + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=metrics_visualization_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/test_data/sdk_compiled_pipelines/valid/metrics_visualization_v2.yaml b/test_data/sdk_compiled_pipelines/valid/metrics_visualization_v2.yaml new file mode 100644 index 00000000000..58d5114e2a5 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/metrics_visualization_v2.yaml @@ -0,0 +1,284 @@ +# PIPELINE DEFINITION +# Name: metrics-visualization-pipeline +components: + comp-digit-classification: + executorLabel: exec-digit-classification + outputDefinitions: + artifacts: + metrics: + artifactType: + schemaTitle: system.Metrics + schemaVersion: 0.0.1 + comp-html-visualization: + executorLabel: exec-html-visualization + outputDefinitions: + artifacts: + html_artifact: + artifactType: + schemaTitle: system.HTML + schemaVersion: 0.0.1 + comp-iris-sgdclassifier: + executorLabel: exec-iris-sgdclassifier + inputDefinitions: + parameters: + test_samples_fraction: + parameterType: NUMBER_DOUBLE + outputDefinitions: + artifacts: + metrics: + artifactType: + schemaTitle: system.ClassificationMetrics + schemaVersion: 0.0.1 + comp-markdown-visualization: + executorLabel: exec-markdown-visualization + outputDefinitions: + artifacts: + markdown_artifact: + artifactType: + schemaTitle: system.Markdown + schemaVersion: 0.0.1 + comp-wine-classification: + executorLabel: exec-wine-classification + outputDefinitions: + artifacts: + metrics: + artifactType: + schemaTitle: system.ClassificationMetrics + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-digit-classification: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - digit_classification + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'scikit-learn'\ + \ && python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef digit_classification(metrics: Output[Metrics]):\n from sklearn\ + \ import model_selection\n from sklearn.linear_model import LogisticRegression\n\ + \ from sklearn import datasets\n from sklearn.metrics import accuracy_score\n\ + \n # Load digits dataset\n iris = datasets.load_iris()\n\n # #\ + \ Create feature matrix\n X = iris.data\n\n # Create target vector\n\ + \ y = iris.target\n\n #test size\n test_size = 0.33\n\n seed\ + \ = 7\n #cross-validation settings\n kfold = model_selection.KFold(n_splits=10,\ + \ random_state=seed, shuffle=True)\n\n #Model instance\n model = LogisticRegression()\n\ + \ scoring = 'accuracy'\n results = model_selection.cross_val_score(\n\ + \ model, X, y, cv=kfold, scoring=scoring)\n\n #split data\n \ + \ X_train, X_test, y_train, y_test = model_selection.train_test_split(\n\ + \ X, y, test_size=test_size, random_state=seed)\n #fit model\n\ + \ model.fit(X_train, y_train)\n\n #accuracy on test set\n result\ + \ = model.score(X_test, y_test)\n metrics.log_metric('accuracy', (result\ + \ * 100.0))\n\n" + image: python:3.9 + exec-html-visualization: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - html_visualization + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef html_visualization(html_artifact: Output[HTML]):\n html_content\ + \ = '

            Hello world

            '\n \ + \ with open(html_artifact.path, 'w') as f:\n f.write(html_content)\n\ + \n" + image: python:3.9 + exec-iris-sgdclassifier: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - iris_sgdclassifier + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'scikit-learn'\ + \ && python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef iris_sgdclassifier(test_samples_fraction: float,\n \ + \ metrics: Output[ClassificationMetrics]):\n from sklearn\ + \ import datasets, model_selection\n from sklearn.linear_model import\ + \ SGDClassifier\n from sklearn.metrics import confusion_matrix\n\n \ + \ iris_dataset = datasets.load_iris()\n train_x, test_x, train_y, test_y\ + \ = model_selection.train_test_split(\n iris_dataset['data'],\n \ + \ iris_dataset['target'],\n test_size=test_samples_fraction)\n\ + \n classifier = SGDClassifier()\n classifier.fit(train_x, train_y)\n\ + \ predictions = model_selection.cross_val_predict(\n classifier,\ + \ train_x, train_y, cv=3)\n metrics.log_confusion_matrix(\n ['Setosa',\ + \ 'Versicolour', 'Virginica'],\n confusion_matrix(\n train_y,\n\ + \ predictions).tolist() # .tolist() to convert np array to list.\n\ + \ )\n\n" + image: python:3.9 + exec-markdown-visualization: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - markdown_visualization + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef markdown_visualization(markdown_artifact: Output[Markdown]):\n\ + \ markdown_content = '## Hello world \\n\\n Markdown content'\n with\ + \ open(markdown_artifact.path, 'w') as f:\n f.write(markdown_content)\n\ + \n" + image: python:3.9 + exec-wine-classification: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - wine_classification + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'scikit-learn'\ + \ && python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef wine_classification(metrics: Output[ClassificationMetrics]):\n\ + \ from sklearn.ensemble import RandomForestClassifier\n from sklearn.metrics\ + \ import roc_curve\n from sklearn.datasets import load_wine\n from\ + \ sklearn.model_selection import train_test_split, cross_val_predict\n\n\ + \ X, y = load_wine(return_X_y=True)\n # Binary classification problem\ + \ for label 1.\n y = y == 1\n\n X_train, X_test, y_train, y_test =\ + \ train_test_split(X, y, random_state=42)\n rfc = RandomForestClassifier(n_estimators=10,\ + \ random_state=42)\n rfc.fit(X_train, y_train)\n y_scores = cross_val_predict(\n\ + \ rfc, X_train, y_train, cv=3, method='predict_proba')\n y_predict\ + \ = cross_val_predict(rfc, X_train, y_train, cv=3, method='predict')\n \ + \ fpr, tpr, thresholds = roc_curve(\n y_true=y_train, y_score=y_scores[:,\ + \ 1], pos_label=True)\n\n # avoid inf thresholds\n epsilon = 1e-6\n\ + \ thresholds = [1 - epsilon if t == float('inf') else t for t in thresholds]\n\ + \n metrics.log_roc_curve(fpr, tpr, thresholds)\n\n" + image: python:3.9 +pipelineInfo: + name: metrics-visualization-pipeline +root: + dag: + tasks: + digit-classification: + cachingOptions: + enableCache: true + componentRef: + name: comp-digit-classification + taskInfo: + name: digit-classification + html-visualization: + cachingOptions: + enableCache: true + componentRef: + name: comp-html-visualization + taskInfo: + name: html-visualization + iris-sgdclassifier: + cachingOptions: + enableCache: true + componentRef: + name: comp-iris-sgdclassifier + inputs: + parameters: + test_samples_fraction: + runtimeValue: + constant: 0.3 + taskInfo: + name: iris-sgdclassifier + markdown-visualization: + cachingOptions: + enableCache: true + componentRef: + name: comp-markdown-visualization + taskInfo: + name: markdown-visualization + wine-classification: + cachingOptions: + enableCache: true + componentRef: + name: comp-wine-classification + taskInfo: + name: wine-classification +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/components/nested_return.py b/test_data/sdk_compiled_pipelines/valid/nested_return.py similarity index 100% rename from sdk/python/test_data/components/nested_return.py rename to test_data/sdk_compiled_pipelines/valid/nested_return.py diff --git a/test_data/sdk_compiled_pipelines/valid/nested_return.yaml b/test_data/sdk_compiled_pipelines/valid/nested_return.yaml new file mode 100644 index 00000000000..4036968620b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/nested_return.yaml @@ -0,0 +1,66 @@ +# PIPELINE DEFINITION +# Name: nested-return +# Outputs: +# Output: list +components: + comp-nested-return: + executorLabel: exec-nested-return + outputDefinitions: + parameters: + Output: + parameterType: LIST +deploymentSpec: + executors: + exec-nested-return: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - nested_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef nested_return() -> List[Dict[str, str]]:\n return [{'A_a':\ + \ '1', 'B_b': '2'}, {'A_a': '10', 'B_b': '20'}]\n\n" + image: python:3.9 +pipelineInfo: + name: nested-return +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: nested-return + tasks: + nested-return: + cachingOptions: + enableCache: true + componentRef: + name: comp-nested-return + taskInfo: + name: nested-return + outputDefinitions: + parameters: + Output: + parameterType: LIST +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/nested_with_parameters.py b/test_data/sdk_compiled_pipelines/valid/nested_with_parameters.py similarity index 100% rename from sdk/python/test_data/pipelines/parallelfor_fan_in/nested_with_parameters.py rename to test_data/sdk_compiled_pipelines/valid/nested_with_parameters.py diff --git a/test_data/sdk_compiled_pipelines/valid/nested_with_parameters.yaml b/test_data/sdk_compiled_pipelines/valid/nested_with_parameters.yaml new file mode 100644 index 00000000000..6d6f5ebce4d --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/nested_with_parameters.yaml @@ -0,0 +1,294 @@ +# PIPELINE DEFINITION +# Name: math-pipeline +# Outputs: +# Output: list +components: + comp-add: + executorLabel: exec-add + inputDefinitions: + parameters: + nums: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-add-two-nums: + executorLabel: exec-add-two-nums + inputDefinitions: + parameters: + x: + parameterType: NUMBER_INTEGER + y: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-double: + executorLabel: exec-double + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-double-2: + executorLabel: exec-double-2 + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-for-loop-2: + dag: + outputs: + parameters: + pipelinechannel--add-two-nums-Output: + valueFromParameter: + outputParameterKey: pipelinechannel--add-two-nums-Output + producerSubtask: for-loop-4 + tasks: + for-loop-4: + componentRef: + name: comp-for-loop-4 + inputs: + parameters: + pipelinechannel--loop-item-param-1: + componentInputParameter: pipelinechannel--loop-item-param-1 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-3 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-4 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--add-two-nums-Output: + parameterType: LIST + comp-for-loop-4: + dag: + outputs: + parameters: + pipelinechannel--add-two-nums-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: add-two-nums + tasks: + add-two-nums: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-two-nums + dependentTasks: + - double + - double-2 + inputs: + parameters: + x: + taskOutputParameter: + outputParameterKey: Output + producerTask: double + y: + taskOutputParameter: + outputParameterKey: Output + producerTask: double-2 + taskInfo: + name: add-two-nums + double: + cachingOptions: + enableCache: true + componentRef: + name: comp-double + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--loop-item-param-1 + taskInfo: + name: double + double-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-double-2 + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--loop-item-param-3 + taskInfo: + name: double-2 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + pipelinechannel--loop-item-param-3: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--add-two-nums-Output: + parameterType: LIST +deploymentSpec: + executors: + exec-add: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add(nums: List[List[int]]) -> int:\n import itertools\n \ + \ return sum(itertools.chain(*nums))\n\n" + image: python:3.9 + exec-add-two-nums: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add_two_nums + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add_two_nums(x: int, y: int) -> int:\n return x + y\n\n" + image: python:3.9 + exec-double: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - double + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" + image: python:3.9 + exec-double-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - double + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" + image: python:3.9 +pipelineInfo: + name: math-pipeline +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: pipelinechannel--add-two-nums-Output + producerSubtask: for-loop-2 + tasks: + add: + cachingOptions: + enableCache: true + componentRef: + name: comp-add + dependentTasks: + - for-loop-2 + inputs: + parameters: + nums: + taskOutputParameter: + outputParameterKey: pipelinechannel--add-two-nums-Output + producerTask: for-loop-2 + taskInfo: + name: add + for-loop-2: + componentRef: + name: comp-for-loop-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-2 + outputDefinitions: + parameters: + Output: + parameterType: LIST +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/notebook_component_mixed.yaml b/test_data/sdk_compiled_pipelines/valid/notebook_component_mixed.yaml new file mode 100644 index 00000000000..ea0b4ce623c --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/notebook_component_mixed.yaml @@ -0,0 +1,426 @@ +# PIPELINE DEFINITION +# Name: nb-mixed +# Inputs: +# text: str [Default: 'Hello world'] +components: + comp-evaluate-model: + executorLabel: exec-evaluate-model + inputDefinitions: + artifacts: + model_text: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + metrics: + artifactType: + schemaTitle: system.Metrics + schemaVersion: 0.0.1 + comp-preprocess: + executorLabel: exec-preprocess + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-train-model: + executorLabel: exec-train-model + inputDefinitions: + artifacts: + cleaned_text: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + model: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-evaluate-model: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - evaluate_model + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'nbclient>=0.10,<1'\ + \ 'ipykernel>=6,<7' 'jupyter_client>=7,<9' && python3 -m pip install --quiet\ + \ --no-warn-script-location 'kfp==2.14.2' '--no-deps' 'typing-extensions>=3.7.4,<5;\ + \ python_version<\"3.9\"' && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\n__KFP_EMBEDDED_ARCHIVE_B64 = 'H4sIACSp1mgC/+2Vy46bMBSGWecpLG9IJEogQGAiRequXXbR3WSEHGISN2BbtukkivLuNYQkk05HlarOqJfzLQz8x+f3wRfwx/74/Sey+0jJiirnVQhOvHQNgii63rd6GEzC0EE75w1otCHKDu/8n0xSVBtW03mYJll2lwVZ6meTOEzvBg7w78OXOf1KqrymRrFC+0zu+fIVzv80jp0gCqIkTK7XE2GcJk6YBNMkmUTTpD3/8TSNHBS85fmvpSKb6uV+P4v/pRwGCOGCVpXGM3RvHxA6dG0v52YvqQ3hQqwo9s4huqNFY5jgeSEabmwH3lTVJWx3E1kRQ6x+drOqIevrKL0miSK2N1UaX+SH/u548RONkY3pkh8uohaNKuiNIa5tlbZmujNojlx3wc+uJ8/e8Te/4S/UyWoplEFCe+iLFtzW6V2DQvs12dIVU3rojk0tx9tS5vak9vauh+iOaZOL7fyzaujoNr0/ynYCDq4uhKLuDJWVIGZYUT68ztBodLxNlIpxM1zgD8KgLhEtsId6u/sF7rQFfvhuvEdmNkhI6/2jYsfnL0v7nrZy97Etn9vJZnw9dxtTvsvcESIalbNb37Zpk/xVU8thb+OhcvRsVW3bTffzfYcrwtcNWdOc8VI82Y6Y223Xrrrcm43oDVujbjExX5ZC1aRd9vhGyGvGhbLyZHCEPyQAAAAAAAAAAAAAAAAAAAAAAAAAAADwp/ANkcfbmgAoAAA='\n\ + __KFP_NOTEBOOK_REL_PATH = 'nb_eval_metrics.ipynb'\n\nimport base64 as __kfp_b64\n\ + import gzip as __kfp_gzip\nimport io as __kfp_io\nimport os as __kfp_os\n\ + import sys as __kfp_sys\nimport tarfile as __kfp_tarfile\nimport tempfile\ + \ as __kfp_tempfile\nfrom nbclient import NotebookClient\n\n# Extract embedded\ + \ archive at import time to ensure sys.path and globals are set\nprint('[KFP]\ + \ Extracting embedded notebook archive...', flush=True)\n__kfp_tmpdir =\ + \ __kfp_tempfile.TemporaryDirectory()\n__KFP_EMBEDDED_ASSET_DIR = __kfp_tmpdir.name\n\ + try:\n __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode('ascii'))\n\ + \ with __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode='r:gz')\ + \ as __kfp_tar:\n __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR)\n\ + \ print(f'[KFP] Notebook archive extracted to: {__KFP_EMBEDDED_ASSET_DIR}',\ + \ flush=True)\nexcept Exception as __kfp_e:\n raise RuntimeError(f'Failed\ + \ to extract embedded notebook archive: {__kfp_e}')\n\n# Always prepend\ + \ the extracted directory to sys.path for import resolution\nif __KFP_EMBEDDED_ASSET_DIR\ + \ not in __kfp_sys.path:\n __kfp_sys.path.insert(0, __KFP_EMBEDDED_ASSET_DIR)\n\ + \ print(f'[KFP] Added notebook archive directory to Python path', flush=True)\n\ + \n# Optional convenience for generic embedded file variable name\n__KFP_EMBEDDED_ASSET_FILE\ + \ = __kfp_os.path.join(__KFP_EMBEDDED_ASSET_DIR, __KFP_NOTEBOOK_REL_PATH)\n\ + \n\nclass KFPStreamingNotebookClient(NotebookClient):\n # Streams outputs\ + \ in real-time by emitting outputs during message processing.\n def process_message(self,\ + \ msg, cell, cell_index):\n # Call the parent implementation to handle\ + \ the message normally\n output = super().process_message(msg, cell,\ + \ cell_index)\n\n # If an output was created, stream it immediately\n\ + \ if output is not None:\n _kfp_stream_single_output(output,\ + \ cell_index)\n\n return output\n\ndef __kfp_write_parameters_cell(nb,\ + \ params):\n \"\"\"Inject parameters following Papermill semantics.\n\ + \n - If a cell tagged with 'parameters' exists, insert an overriding\n\ + \ 'injected-parameters' cell immediately after it.\n - Otherwise,\ + \ insert the 'injected-parameters' cell at the top.\n \"\"\"\n import\ + \ json\n\n import nbformat\n\n if not params:\n return\n\n\ + \ # Build the injected parameters cell\n assignments = []\n for\ + \ key, value in params.items():\n serialized = json.dumps(value)\n\ + \ assignments.append(key + ' = json.loads(' + repr(serialized) +\ + \ ')')\n source = 'import json\\n' + '\\n'.join(assignments) + '\\n'\n\ + \ cell = nbformat.v4.new_code_cell(source=source)\n cell.metadata.setdefault('tags',\ + \ [])\n if 'injected-parameters' not in cell.metadata['tags']:\n \ + \ cell.metadata['tags'].append('injected-parameters')\n\n # Locate\ + \ the first 'parameters' tagged cell\n insert_idx = 0\n for idx, existing\ + \ in enumerate(nb.get('cells', [])):\n if existing.get('cell_type')\ + \ != 'code':\n continue\n tags = existing.get('metadata',\ + \ {}).get('tags', []) or []\n if 'parameters' in tags:\n \ + \ insert_idx = idx + 1\n break\n\n nb.cells.insert(insert_idx,\ + \ cell)\n\ndef _kfp_stream_single_output(output, cell_idx):\n \"\"\"\ + Stream a single notebook output immediately during execution.\n\n Prints\ + \ stdout/stderr and text/plain display outputs to the console so users\n\ + \ see cell output as it happens (no need to wait until the notebook finishes).\n\ + \ \"\"\"\n import sys\n output_type = output.get('output_type')\n\ + \n if output_type == 'stream':\n text = output.get('text', '')\n\ + \ if text:\n try:\n print(f'[nb cell {cell_idx}\ + \ stream] ', end='', flush=False)\n except Exception:\n \ + \ pass\n print(text, end='' if text.endswith('\\n')\ + \ else '\\n', flush=True)\n elif output_type == 'error':\n for\ + \ line in output.get('traceback', []):\n print(line, file=sys.stderr,\ + \ flush=True)\n else:\n # Handle display_data and execute_result\n\ + \ data = output.get('data', {})\n if 'text/plain' in data:\n\ + \ print(data['text/plain'], flush=True)\n elif 'application/json'\ + \ in data:\n try:\n import json as __kfp_json\n\ + \ parsed = data['application/json']\n # Some\ + \ kernels send JSON as string; try to parse if needed\n if\ + \ isinstance(parsed, str):\n try:\n \ + \ parsed = __kfp_json.loads(parsed)\n except Exception:\n\ + \ pass\n print(__kfp_json.dumps(parsed,\ + \ indent=2, ensure_ascii=False), flush=True)\n except Exception:\n\ + \ # Fallback to raw\n print(str(data.get('application/json')),\ + \ flush=True)\n elif 'text/markdown' in data:\n # Print\ + \ markdown as-is; frontends may render, logs will show raw markdown\n \ + \ print(data['text/markdown'], flush=True)\n\ndef kfp_run_notebook(**kwargs):\n\ + \ \"\"\"Execute the embedded notebook with injected parameters.\n\n \ + \ Parameters provided via kwargs are injected into the notebook following\n\ + \ Papermill semantics (after a parameters cell if present, otherwise\ + \ at top).\n Execution uses a Python kernel; nbclient and ipykernel must\ + \ be available at\n runtime (installed via packages_to_install for notebook\ + \ components).\n \"\"\"\n import os\n import subprocess\n import\ + \ sys\n\n from nbclient import NotebookClient\n import nbformat\n\n\ + \ # Ensure a usable 'python3' kernel is present; install kernelspec if\ + \ missing\n print('[KFP Notebook] Checking for Python kernel...', flush=True)\n\ + \ try:\n from jupyter_client.kernelspec import KernelSpecManager\ + \ # type: ignore\n ksm = KernelSpecManager()\n have_py3 =\ + \ 'python3' in ksm.find_kernel_specs()\n if not have_py3:\n \ + \ print(\n '[KFP Notebook] Python3 kernel not found,\ + \ installing...',\n flush=True)\n try:\n \ + \ subprocess.run([\n sys.executable, '-m',\ + \ 'ipykernel', 'install', '--user',\n '--name', 'python3',\ + \ '--display-name', 'Python 3'\n ],\n \ + \ check=True,\n stdout=subprocess.DEVNULL,\n\ + \ stderr=subprocess.DEVNULL)\n \ + \ print(\n '[KFP Notebook] Python3 kernel installed\ + \ successfully',\n flush=True)\n except subprocess.CalledProcessError\ + \ as e:\n raise RuntimeError(\n \"Failed\ + \ to install 'python3' kernelspec for ipykernel. \"\n \ + \ \"Ensure ipykernel is available in the environment or include it via\ + \ packages_to_install. \"\n f\"Error: {e}\") from e\n\ + \ else:\n print('[KFP Notebook] Python3 kernel found',\ + \ flush=True)\n except ImportError as e:\n raise RuntimeError(\n\ + \ \"jupyter_client is not available. Ensure it's installed in\ + \ the environment or include it via packages_to_install. \"\n \ + \ f\"Error: {e}\") from e\n\n nb_path = os.path.join(__KFP_EMBEDDED_ASSET_DIR,\ + \ __KFP_NOTEBOOK_REL_PATH)\n\n try:\n nb = nbformat.read(nb_path,\ + \ as_version=4)\n except Exception as e:\n raise RuntimeError(\n\ + \ f'Failed to read notebook {nb_path}. Ensure it is a valid Jupyter\ + \ notebook. Error: {e}'\n ) from e\n\n try:\n __kfp_write_parameters_cell(nb,\ + \ kwargs)\n print(\n f'[KFP Notebook] Executing notebook\ + \ with {len(nb.get(\"cells\", []))} cells',\n flush=True)\n\n\ + \ # Use our custom streaming client for real-time output (defined\ + \ in the\n # generated ephemeral source)\n client = KFPStreamingNotebookClient(\n\ + \ nb,\n timeout=None,\n allow_errors=False,\n\ + \ store_widget_state=False,\n kernel_name='python3')\n\ + \ client.execute(cwd=__KFP_EMBEDDED_ASSET_DIR)\n\n print('[KFP\ + \ Notebook] Execution complete', flush=True)\n\n except Exception as\ + \ e:\n raise RuntimeError(f'Notebook execution failed. Error: {e}')\ + \ from e\n\n\n# Bind helper into dsl namespace so user code can call dsl.run_notebook(...)\n\ + dsl.run_notebook = kfp_run_notebook\n\n\ndef evaluate_model(model_text:\ + \ dsl.Input[dsl.Model], metrics: dsl.Output[dsl.Metrics]):\n import json\n\ + \n with open(model_text.path, \"r\", encoding=\"utf-8\") as f:\n \ + \ model_text = f.read()\n\n dsl.run_notebook(model_text=model_text)\n\ + \ with open(\"/tmp/kfp_nb_outputs/metrics.json\", \"r\", encoding=\"\ + utf-8\") as f:\n metrics_dict = json.load(f)\n\n assert metrics_dict\ + \ == {\"score\": float(len(model_text))}\n\n for metric_name, metric_value\ + \ in metrics_dict.items():\n metrics.log_metric(metric_name, metric_value)\n\ + \n" + image: python:3.9 + exec-preprocess: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - preprocess + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\n\n\ndef preprocess(text: str, dataset: dsl.Output[dsl.Dataset]):\n\ + \ import re\n\n cleaned_text = re.sub(r\"\\s+\", \" \", text).strip()\n\ + \ with open(dataset.path, \"w\", encoding=\"utf-8\") as f:\n f.write(cleaned_text)\n\ + \n" + image: python:3.9 + exec-train-model: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - train_model + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'nbclient>=0.10,<1'\ + \ 'ipykernel>=6,<7' 'jupyter_client>=7,<9' && python3 -m pip install --quiet\ + \ --no-warn-script-location 'kfp==2.14.2' '--no-deps' 'typing-extensions>=3.7.4,<5;\ + \ python_version<\"3.9\"' && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\n__KFP_EMBEDDED_ARCHIVE_B64 = 'H4sIACSp1mgC/+2VTY/aMBCGc+ZXWLkEJBqSQAJdCanH9tZDpR7KKjKJAxaJbTmTErTiv9cJ4auwWrXartR2noMT3onnneAZcEfu6MNnWn9kNGXa+iN4B567et54cr5vdN8L/MAitfUGVCVQbeyt/5NgRgrgBZv703A2ez/zosgNwsgfh0HPQv55xDIGTbmItxzWsaKaFqXL1U4sX3f+o4mZ8bE3Dv3wfD3gT73Q8kMvCsNgHAWBmf9wGkws4r3l/BdK03X+/HMvxf9SnnqE2AnL89J+IN/MB0Ke2rWTY9gpZkJ2IlNmD48hVrOkAi5FnMhKgHlAVHl+ChcMaEqBGv2YzahAV2eXTms7jgHTpX2SH7u7/SmfrEBV0G5+PImlrHTCrhLaSc6oYGkMrAYyJ06xI51EGslZiKPNwaSzeOVX/o3CeaGkBiJLU+HwLMvSLeiGpVyXfWcEhRptMhWboe0SO0PCal5CLDfzL7pig+vtSnMB/YX9VXPgYkVAkjtJRoV50dyFGhb2TwmaXwUiFRN33c8bTR3OtilGmC/NOM2dCrJ3M2dAaEmyh+ukzZK5W1MT618emFspxXR/MPilU9J0a7/QeN/L9igvNaPmVKwqumKf0mOaU3R/04H3zuymTLO2p3xbxsks5iKTF5XYwrR/4692sJZdwiZRa22LZSZ1QZtum1wJccGF1EYOenv8o0YQBEEQBEEQBEEQBEEQBEEQBEEQBEF+APurSW0AKAAA'\n\ + __KFP_NOTEBOOK_REL_PATH = 'nb_train_with_params.ipynb'\n\nimport base64\ + \ as __kfp_b64\nimport gzip as __kfp_gzip\nimport io as __kfp_io\nimport\ + \ os as __kfp_os\nimport sys as __kfp_sys\nimport tarfile as __kfp_tarfile\n\ + import tempfile as __kfp_tempfile\nfrom nbclient import NotebookClient\n\ + \n# Extract embedded archive at import time to ensure sys.path and globals\ + \ are set\nprint('[KFP] Extracting embedded notebook archive...', flush=True)\n\ + __kfp_tmpdir = __kfp_tempfile.TemporaryDirectory()\n__KFP_EMBEDDED_ASSET_DIR\ + \ = __kfp_tmpdir.name\ntry:\n __kfp_bytes = __kfp_b64.b64decode(__KFP_EMBEDDED_ARCHIVE_B64.encode('ascii'))\n\ + \ with __kfp_tarfile.open(fileobj=__kfp_io.BytesIO(__kfp_bytes), mode='r:gz')\ + \ as __kfp_tar:\n __kfp_tar.extractall(path=__KFP_EMBEDDED_ASSET_DIR)\n\ + \ print(f'[KFP] Notebook archive extracted to: {__KFP_EMBEDDED_ASSET_DIR}',\ + \ flush=True)\nexcept Exception as __kfp_e:\n raise RuntimeError(f'Failed\ + \ to extract embedded notebook archive: {__kfp_e}')\n\n# Always prepend\ + \ the extracted directory to sys.path for import resolution\nif __KFP_EMBEDDED_ASSET_DIR\ + \ not in __kfp_sys.path:\n __kfp_sys.path.insert(0, __KFP_EMBEDDED_ASSET_DIR)\n\ + \ print(f'[KFP] Added notebook archive directory to Python path', flush=True)\n\ + \n# Optional convenience for generic embedded file variable name\n__KFP_EMBEDDED_ASSET_FILE\ + \ = __kfp_os.path.join(__KFP_EMBEDDED_ASSET_DIR, __KFP_NOTEBOOK_REL_PATH)\n\ + \n\nclass KFPStreamingNotebookClient(NotebookClient):\n # Streams outputs\ + \ in real-time by emitting outputs during message processing.\n def process_message(self,\ + \ msg, cell, cell_index):\n # Call the parent implementation to handle\ + \ the message normally\n output = super().process_message(msg, cell,\ + \ cell_index)\n\n # If an output was created, stream it immediately\n\ + \ if output is not None:\n _kfp_stream_single_output(output,\ + \ cell_index)\n\n return output\n\ndef __kfp_write_parameters_cell(nb,\ + \ params):\n \"\"\"Inject parameters following Papermill semantics.\n\ + \n - If a cell tagged with 'parameters' exists, insert an overriding\n\ + \ 'injected-parameters' cell immediately after it.\n - Otherwise,\ + \ insert the 'injected-parameters' cell at the top.\n \"\"\"\n import\ + \ json\n\n import nbformat\n\n if not params:\n return\n\n\ + \ # Build the injected parameters cell\n assignments = []\n for\ + \ key, value in params.items():\n serialized = json.dumps(value)\n\ + \ assignments.append(key + ' = json.loads(' + repr(serialized) +\ + \ ')')\n source = 'import json\\n' + '\\n'.join(assignments) + '\\n'\n\ + \ cell = nbformat.v4.new_code_cell(source=source)\n cell.metadata.setdefault('tags',\ + \ [])\n if 'injected-parameters' not in cell.metadata['tags']:\n \ + \ cell.metadata['tags'].append('injected-parameters')\n\n # Locate\ + \ the first 'parameters' tagged cell\n insert_idx = 0\n for idx, existing\ + \ in enumerate(nb.get('cells', [])):\n if existing.get('cell_type')\ + \ != 'code':\n continue\n tags = existing.get('metadata',\ + \ {}).get('tags', []) or []\n if 'parameters' in tags:\n \ + \ insert_idx = idx + 1\n break\n\n nb.cells.insert(insert_idx,\ + \ cell)\n\ndef _kfp_stream_single_output(output, cell_idx):\n \"\"\"\ + Stream a single notebook output immediately during execution.\n\n Prints\ + \ stdout/stderr and text/plain display outputs to the console so users\n\ + \ see cell output as it happens (no need to wait until the notebook finishes).\n\ + \ \"\"\"\n import sys\n output_type = output.get('output_type')\n\ + \n if output_type == 'stream':\n text = output.get('text', '')\n\ + \ if text:\n try:\n print(f'[nb cell {cell_idx}\ + \ stream] ', end='', flush=False)\n except Exception:\n \ + \ pass\n print(text, end='' if text.endswith('\\n')\ + \ else '\\n', flush=True)\n elif output_type == 'error':\n for\ + \ line in output.get('traceback', []):\n print(line, file=sys.stderr,\ + \ flush=True)\n else:\n # Handle display_data and execute_result\n\ + \ data = output.get('data', {})\n if 'text/plain' in data:\n\ + \ print(data['text/plain'], flush=True)\n elif 'application/json'\ + \ in data:\n try:\n import json as __kfp_json\n\ + \ parsed = data['application/json']\n # Some\ + \ kernels send JSON as string; try to parse if needed\n if\ + \ isinstance(parsed, str):\n try:\n \ + \ parsed = __kfp_json.loads(parsed)\n except Exception:\n\ + \ pass\n print(__kfp_json.dumps(parsed,\ + \ indent=2, ensure_ascii=False), flush=True)\n except Exception:\n\ + \ # Fallback to raw\n print(str(data.get('application/json')),\ + \ flush=True)\n elif 'text/markdown' in data:\n # Print\ + \ markdown as-is; frontends may render, logs will show raw markdown\n \ + \ print(data['text/markdown'], flush=True)\n\ndef kfp_run_notebook(**kwargs):\n\ + \ \"\"\"Execute the embedded notebook with injected parameters.\n\n \ + \ Parameters provided via kwargs are injected into the notebook following\n\ + \ Papermill semantics (after a parameters cell if present, otherwise\ + \ at top).\n Execution uses a Python kernel; nbclient and ipykernel must\ + \ be available at\n runtime (installed via packages_to_install for notebook\ + \ components).\n \"\"\"\n import os\n import subprocess\n import\ + \ sys\n\n from nbclient import NotebookClient\n import nbformat\n\n\ + \ # Ensure a usable 'python3' kernel is present; install kernelspec if\ + \ missing\n print('[KFP Notebook] Checking for Python kernel...', flush=True)\n\ + \ try:\n from jupyter_client.kernelspec import KernelSpecManager\ + \ # type: ignore\n ksm = KernelSpecManager()\n have_py3 =\ + \ 'python3' in ksm.find_kernel_specs()\n if not have_py3:\n \ + \ print(\n '[KFP Notebook] Python3 kernel not found,\ + \ installing...',\n flush=True)\n try:\n \ + \ subprocess.run([\n sys.executable, '-m',\ + \ 'ipykernel', 'install', '--user',\n '--name', 'python3',\ + \ '--display-name', 'Python 3'\n ],\n \ + \ check=True,\n stdout=subprocess.DEVNULL,\n\ + \ stderr=subprocess.DEVNULL)\n \ + \ print(\n '[KFP Notebook] Python3 kernel installed\ + \ successfully',\n flush=True)\n except subprocess.CalledProcessError\ + \ as e:\n raise RuntimeError(\n \"Failed\ + \ to install 'python3' kernelspec for ipykernel. \"\n \ + \ \"Ensure ipykernel is available in the environment or include it via\ + \ packages_to_install. \"\n f\"Error: {e}\") from e\n\ + \ else:\n print('[KFP Notebook] Python3 kernel found',\ + \ flush=True)\n except ImportError as e:\n raise RuntimeError(\n\ + \ \"jupyter_client is not available. Ensure it's installed in\ + \ the environment or include it via packages_to_install. \"\n \ + \ f\"Error: {e}\") from e\n\n nb_path = os.path.join(__KFP_EMBEDDED_ASSET_DIR,\ + \ __KFP_NOTEBOOK_REL_PATH)\n\n try:\n nb = nbformat.read(nb_path,\ + \ as_version=4)\n except Exception as e:\n raise RuntimeError(\n\ + \ f'Failed to read notebook {nb_path}. Ensure it is a valid Jupyter\ + \ notebook. Error: {e}'\n ) from e\n\n try:\n __kfp_write_parameters_cell(nb,\ + \ kwargs)\n print(\n f'[KFP Notebook] Executing notebook\ + \ with {len(nb.get(\"cells\", []))} cells',\n flush=True)\n\n\ + \ # Use our custom streaming client for real-time output (defined\ + \ in the\n # generated ephemeral source)\n client = KFPStreamingNotebookClient(\n\ + \ nb,\n timeout=None,\n allow_errors=False,\n\ + \ store_widget_state=False,\n kernel_name='python3')\n\ + \ client.execute(cwd=__KFP_EMBEDDED_ASSET_DIR)\n\n print('[KFP\ + \ Notebook] Execution complete', flush=True)\n\n except Exception as\ + \ e:\n raise RuntimeError(f'Notebook execution failed. Error: {e}')\ + \ from e\n\n\n# Bind helper into dsl namespace so user code can call dsl.run_notebook(...)\n\ + dsl.run_notebook = kfp_run_notebook\n\n\ndef train_model(cleaned_text: dsl.Input[dsl.Dataset],\ + \ model: dsl.Output[dsl.Model]):\n import shutil\n\n with open(cleaned_text.path,\ + \ \"r\", encoding=\"utf-8\") as f:\n cleaned_text = f.read()\n\n\ + \ dsl.run_notebook(cleaned_text=cleaned_text)\n\n # Notebook writes\ + \ its model into /tmp/kfp_nb_outputs/model.txt\n shutil.copy(\"/tmp/kfp_nb_outputs/model.txt\"\ + , model.path)\n\n with open(model.path, \"r\", encoding=\"utf-8\") as\ + \ f:\n model_text = f.read()\n\n assert model_text == cleaned_text.upper()\n\ + \n" + image: python:3.9 +pipelineInfo: + name: nb-mixed +root: + dag: + tasks: + evaluate-model: + cachingOptions: {} + componentRef: + name: comp-evaluate-model + dependentTasks: + - train-model + inputs: + artifacts: + model_text: + taskOutputArtifact: + outputArtifactKey: model + producerTask: train-model + taskInfo: + name: evaluate-model + preprocess: + cachingOptions: {} + componentRef: + name: comp-preprocess + inputs: + parameters: + text: + componentInputParameter: text + taskInfo: + name: preprocess + train-model: + cachingOptions: {} + componentRef: + name: comp-train-model + dependentTasks: + - preprocess + inputs: + artifacts: + cleaned_text: + taskOutputArtifact: + outputArtifactKey: dataset + producerTask: preprocess + taskInfo: + name: train-model + inputDefinitions: + parameters: + text: + defaultValue: Hello world + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.2 diff --git a/sdk/python/test_data/components/output_metrics.py b/test_data/sdk_compiled_pipelines/valid/output_metrics.py similarity index 100% rename from sdk/python/test_data/components/output_metrics.py rename to test_data/sdk_compiled_pipelines/valid/output_metrics.py diff --git a/sdk/python/test_data/components/output_metrics.yaml b/test_data/sdk_compiled_pipelines/valid/output_metrics.yaml similarity index 100% rename from sdk/python/test_data/components/output_metrics.yaml rename to test_data/sdk_compiled_pipelines/valid/output_metrics.yaml diff --git a/samples/v2/subdagio/parameter.py b/test_data/sdk_compiled_pipelines/valid/parameter.py similarity index 100% rename from samples/v2/subdagio/parameter.py rename to test_data/sdk_compiled_pipelines/valid/parameter.py diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_complex.py b/test_data/sdk_compiled_pipelines/valid/parameters_complex.py similarity index 100% rename from sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_complex.py rename to test_data/sdk_compiled_pipelines/valid/parameters_complex.py diff --git a/test_data/sdk_compiled_pipelines/valid/parameters_complex.yaml b/test_data/sdk_compiled_pipelines/valid/parameters_complex.yaml new file mode 100644 index 00000000000..a8d9b48f618 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/parameters_complex.yaml @@ -0,0 +1,494 @@ +# PIPELINE DEFINITION +# Name: math-pipeline +# Outputs: +# Output: int +components: + comp-add-two-numbers: + executorLabel: exec-add-two-numbers + inputDefinitions: + parameters: + x: + parameterType: LIST + y: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-double: + executorLabel: exec-double + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-double-2: + executorLabel: exec-double-2 + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-for-loop-2: + dag: + outputs: + parameters: + pipelinechannel--double-2-Output: + valueFromParameter: + outputParameterKey: pipelinechannel--double-2-Output + producerSubtask: for-loop-4 + pipelinechannel--double-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: double + tasks: + double: + cachingOptions: + enableCache: true + componentRef: + name: comp-double + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--loop-item-param-1 + taskInfo: + name: double + for-loop-4: + componentRef: + name: comp-for-loop-4 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-3 + items: + raw: '[4, 5, 6]' + taskInfo: + name: for-loop-4 + simple-add: + cachingOptions: + enableCache: true + componentRef: + name: comp-simple-add + dependentTasks: + - for-loop-4 + inputs: + parameters: + nums: + taskOutputParameter: + outputParameterKey: pipelinechannel--double-2-Output + producerTask: for-loop-4 + taskInfo: + name: simple-add + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--double-2-Output: + parameterType: LIST + pipelinechannel--double-Output: + parameterType: LIST + comp-for-loop-4: + dag: + outputs: + parameters: + pipelinechannel--double-2-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: double-2 + tasks: + double-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-double-2 + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--loop-item-param-3 + taskInfo: + name: double-2 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-3: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--double-2-Output: + parameterType: LIST + comp-for-loop-6: + dag: + outputs: + parameters: + pipelinechannel--nested-add-2-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: nested-add-2 + pipelinechannel--simple-add-2-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: simple-add-2 + tasks: + nested-add-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-nested-add-2 + inputs: + parameters: + nums: + componentInputParameter: pipelinechannel--for-loop-2-pipelinechannel--double-2-Output + taskInfo: + name: nested-add-2 + simple-add-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-simple-add-2 + inputs: + parameters: + nums: + componentInputParameter: pipelinechannel--for-loop-2-pipelinechannel--double-Output + taskInfo: + name: simple-add-2 + inputDefinitions: + parameters: + pipelinechannel--for-loop-2-pipelinechannel--double-2-Output: + parameterType: LIST + pipelinechannel--for-loop-2-pipelinechannel--double-Output: + parameterType: LIST + pipelinechannel--loop-item-param-5: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--nested-add-2-Output: + parameterType: LIST + pipelinechannel--simple-add-2-Output: + parameterType: LIST + comp-nested-add: + executorLabel: exec-nested-add + inputDefinitions: + parameters: + nums: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-nested-add-2: + executorLabel: exec-nested-add-2 + inputDefinitions: + parameters: + nums: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-simple-add: + executorLabel: exec-simple-add + inputDefinitions: + parameters: + nums: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-simple-add-2: + executorLabel: exec-simple-add-2 + inputDefinitions: + parameters: + nums: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +deploymentSpec: + executors: + exec-add-two-numbers: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add_two_numbers + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add_two_numbers(x: List[int], y: List[int]) -> int:\n return\ + \ sum(x) + sum(y)\n\n" + image: python:3.9 + exec-double: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - double + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" + image: python:3.9 + exec-double-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - double + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" + image: python:3.9 + exec-nested-add: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - nested_add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef nested_add(nums: List[List[int]]) -> int:\n import itertools\n\ + \ return sum(itertools.chain(*nums))\n\n" + image: python:3.9 + exec-nested-add-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - nested_add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef nested_add(nums: List[List[int]]) -> int:\n import itertools\n\ + \ return sum(itertools.chain(*nums))\n\n" + image: python:3.9 + exec-simple-add: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - simple_add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef simple_add(nums: List[int]) -> int:\n return sum(nums)\n\n" + image: python:3.9 + exec-simple-add-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - simple_add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef simple_add(nums: List[int]) -> int:\n return sum(nums)\n\n" + image: python:3.9 +pipelineInfo: + name: math-pipeline +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: add-two-numbers + tasks: + add-two-numbers: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-two-numbers + dependentTasks: + - for-loop-6 + inputs: + parameters: + x: + taskOutputParameter: + outputParameterKey: pipelinechannel--simple-add-2-Output + producerTask: for-loop-6 + y: + taskOutputParameter: + outputParameterKey: pipelinechannel--nested-add-2-Output + producerTask: for-loop-6 + taskInfo: + name: add-two-numbers + for-loop-2: + componentRef: + name: comp-for-loop-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-2 + for-loop-6: + componentRef: + name: comp-for-loop-6 + dependentTasks: + - for-loop-2 + inputs: + parameters: + pipelinechannel--for-loop-2-pipelinechannel--double-2-Output: + taskOutputParameter: + outputParameterKey: pipelinechannel--double-2-Output + producerTask: for-loop-2 + pipelinechannel--for-loop-2-pipelinechannel--double-Output: + taskOutputParameter: + outputParameterKey: pipelinechannel--double-Output + producerTask: for-loop-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-5 + items: + raw: '[0, 0, 0]' + taskInfo: + name: for-loop-6 + nested-add: + cachingOptions: + enableCache: true + componentRef: + name: comp-nested-add + dependentTasks: + - for-loop-2 + inputs: + parameters: + nums: + taskOutputParameter: + outputParameterKey: pipelinechannel--double-2-Output + producerTask: for-loop-2 + taskInfo: + name: nested-add + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_as_exit_task.py b/test_data/sdk_compiled_pipelines/valid/pipeline_as_exit_task.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_as_exit_task.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_as_exit_task.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_as_exit_task.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_as_exit_task.yaml new file mode 100644 index 00000000000..57619355145 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_as_exit_task.yaml @@ -0,0 +1,273 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-task-final-status-conditional +# Inputs: +# message: str [Default: 'Hello World!'] +components: + comp-condition-1: + dag: + tasks: + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-2 + inputs: + parameters: + message: + runtimeValue: + constant: notify task failure. + taskInfo: + name: print-op + inputDefinitions: + parameters: + pipelinechannel--get-run-state-Output: + parameterType: STRING + comp-conditional-notification: + dag: + tasks: + condition-1: + componentRef: + name: comp-condition-1 + dependentTasks: + - get-run-state + inputs: + parameters: + pipelinechannel--get-run-state-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: get-run-state + taskInfo: + name: condition-1 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--get-run-state-Output'] + == 'FAILED' + get-run-state: + cachingOptions: + enableCache: true + componentRef: + name: comp-get-run-state + inputs: + parameters: + status: + componentInputParameter: status + taskInfo: + name: get-run-state + inputDefinitions: + parameters: + status: + isOptional: true + parameterType: TASK_FINAL_STATUS + comp-exit-handler-1: + dag: + tasks: + fail-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-fail-op + inputs: + parameters: + message: + runtimeValue: + constant: Task failed. + taskInfo: + name: fail-op + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + inputs: + parameters: + message: + componentInputParameter: pipelinechannel--message + taskInfo: + name: print-op + inputDefinitions: + parameters: + pipelinechannel--message: + parameterType: STRING + comp-fail-op: + executorLabel: exec-fail-op + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-get-run-state: + executorLabel: exec-get-run-state + inputDefinitions: + parameters: + status: + parameterType: STRUCT + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-print-op-2: + executorLabel: exec-print-op-2 + inputDefinitions: + parameters: + message: + parameterType: STRING +deploymentSpec: + executors: + exec-fail-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - fail_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ + \ print(message)\n sys.exit(1)\n\n" + image: python:3.9 + exec-get-run-state: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - get_run_state + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef get_run_state(status: dict) -> str:\n print('Pipeline status:\ + \ ', status)\n return status['state']\n\n" + image: python:3.9 + exec-print-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ + \ print(message)\n\n" + image: python:3.9 + exec-print-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ + \ print(message)\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-task-final-status-conditional +root: + dag: + tasks: + conditional-notification: + cachingOptions: + enableCache: true + componentRef: + name: comp-conditional-notification + dependentTasks: + - exit-handler-1 + inputs: + parameters: + status: + taskFinalStatus: + producerTask: exit-handler-1 + taskInfo: + name: conditional-notification + triggerPolicy: + strategy: ALL_UPSTREAM_TASKS_COMPLETED + exit-handler-1: + componentRef: + name: comp-exit-handler-1 + inputs: + parameters: + pipelinechannel--message: + componentInputParameter: message + taskInfo: + name: my-pipeline + inputDefinitions: + parameters: + message: + defaultValue: Hello World! + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/samples/v2/pipeline_container_no_input.py b/test_data/sdk_compiled_pipelines/valid/pipeline_container_no_input.py similarity index 100% rename from samples/v2/pipeline_container_no_input.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_container_no_input.py diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/pipeline_producer_consumer.py b/test_data/sdk_compiled_pipelines/valid/pipeline_producer_consumer.py similarity index 100% rename from sdk/python/test_data/pipelines/parallelfor_fan_in/pipeline_producer_consumer.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_producer_consumer.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_producer_consumer.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_producer_consumer.yaml new file mode 100644 index 00000000000..068d30f88e7 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_producer_consumer.yaml @@ -0,0 +1,367 @@ +# PIPELINE DEFINITION +# Name: math-pipeline +# Outputs: +# Output: int +components: + comp-add: + executorLabel: exec-add + inputDefinitions: + parameters: + nums: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-add-pipeline: + dag: + outputs: + parameters: + out1: + valueFromParameter: + outputParameterKey: Output + producerSubtask: add + out2: + valueFromParameter: + outputParameterKey: pipelinechannel--echo-and-return-Output + producerSubtask: for-loop-2 + tasks: + add: + cachingOptions: + enableCache: true + componentRef: + name: comp-add + inputs: + parameters: + nums: + componentInputParameter: nums + taskInfo: + name: add + for-loop-2: + componentRef: + name: comp-for-loop-2-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '["m", "a", "t", "h"]' + taskInfo: + name: for-loop-2 + inputDefinitions: + parameters: + nums: + parameterType: LIST + outputDefinitions: + parameters: + out1: + parameterType: NUMBER_INTEGER + out2: + parameterType: LIST + comp-double: + executorLabel: exec-double + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-double-pipeline: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: double + tasks: + double: + cachingOptions: + enableCache: true + componentRef: + name: comp-double + inputs: + parameters: + num: + componentInputParameter: num + taskInfo: + name: double + inputDefinitions: + parameters: + num: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-echo-and-return: + executorLabel: exec-echo-and-return + inputDefinitions: + parameters: + string: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-for-loop-2: + dag: + outputs: + parameters: + pipelinechannel--double-pipeline-Output: + valueFromParameter: + outputParameterKey: pipelinechannel--double-pipeline-Output + producerSubtask: for-loop-4 + tasks: + for-loop-4: + componentRef: + name: comp-for-loop-4 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-3 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-4 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--double-pipeline-Output: + parameterType: LIST + comp-for-loop-2-2: + dag: + outputs: + parameters: + pipelinechannel--echo-and-return-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: echo-and-return + tasks: + echo-and-return: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo-and-return + inputs: + parameters: + string: + componentInputParameter: pipelinechannel--loop-item-param-1 + taskInfo: + name: echo-and-return + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-1: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--echo-and-return-Output: + parameterType: LIST + comp-for-loop-4: + dag: + outputs: + parameters: + pipelinechannel--double-pipeline-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: double-pipeline + tasks: + double-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-double-pipeline + inputs: + parameters: + num: + componentInputParameter: pipelinechannel--loop-item-param-3 + taskInfo: + name: double-pipeline + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-3: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + pipelinechannel--double-pipeline-Output: + parameterType: LIST + comp-join-and-print: + executorLabel: exec-join-and-print + inputDefinitions: + parameters: + strings: + parameterType: LIST +deploymentSpec: + executors: + exec-add: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - add + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef add(nums: List[List[int]]) -> int:\n import itertools\n \ + \ return sum(itertools.chain(*nums))\n\n" + image: python:3.9 + exec-double: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - double + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" + image: python:3.9 + exec-echo-and-return: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - echo_and_return + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef echo_and_return(string: str) -> str:\n print(string)\n \ + \ return string\n\n" + image: python:3.9 + exec-join-and-print: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - join_and_print + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef join_and_print(strings: List[str]):\n print(''.join(strings))\n\ + \n" + image: python:3.9 +pipelineInfo: + name: math-pipeline +root: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: out1 + producerSubtask: add-pipeline + tasks: + add-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-add-pipeline + dependentTasks: + - for-loop-2 + inputs: + parameters: + nums: + taskOutputParameter: + outputParameterKey: pipelinechannel--double-pipeline-Output + producerTask: for-loop-2 + taskInfo: + name: add-pipeline + for-loop-2: + componentRef: + name: comp-for-loop-2 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '[1, 2, 3]' + taskInfo: + name: for-loop-2 + join-and-print: + cachingOptions: + enableCache: true + componentRef: + name: comp-join-and-print + dependentTasks: + - add-pipeline + inputs: + parameters: + strings: + taskOutputParameter: + outputParameterKey: out2 + producerTask: add-pipeline + taskInfo: + name: join-and-print + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/samples/v2/pipeline_with_env.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_component_from_text.py similarity index 100% rename from samples/v2/pipeline_with_env.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_component_from_text.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_concat_placeholder.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_concat_placeholder.py new file mode 100644 index 00000000000..17654bef4b7 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_concat_placeholder.py @@ -0,0 +1,44 @@ +# Copyright 2020 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from kfp import components +from kfp import dsl +import kfp.compiler as compiler + +component_op = components.load_component_from_text(""" +name: Component with concat placeholder +inputs: +- {name: input_one, type: String} +- {name: input_two, type: String} +implementation: + container: + image: ghcr.io/containerd/busybox + command: + - sh + - -ec + args: + - echo "$0" > /tmp/test && [[ "$0" == 'one+two=three' ]] + - concat: [{inputValue: input_one}, '+', {inputValue: input_two}, '=three'] +""") + + +@dsl.pipeline(name='one-step-pipeline-with-concat-placeholder') +def pipeline_with_concat_placeholder(): + component = component_op(input_one='one', input_two='two') + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=pipeline_with_concat_placeholder, + package_path='pipeline_with_concat_placeholder.yaml') \ No newline at end of file diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_concat_placeholder.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_concat_placeholder.yaml new file mode 100644 index 00000000000..78a41485539 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_concat_placeholder.yaml @@ -0,0 +1,45 @@ +# PIPELINE DEFINITION +# Name: one-step-pipeline-with-concat-placeholder +components: + comp-component-with-concat-placeholder: + executorLabel: exec-component-with-concat-placeholder + inputDefinitions: + parameters: + input_one: + parameterType: STRING + input_two: + parameterType: STRING +deploymentSpec: + executors: + exec-component-with-concat-placeholder: + container: + args: + - echo "$0" > /tmp/test && [[ "$0" == 'one+two=three' ]] + - '{"Concat": ["{{$.inputs.parameters[''input_one'']}}", "+", "{{$.inputs.parameters[''input_two'']}}", + "=three"]}' + command: + - sh + - -ec + image: ghcr.io/containerd/busybox +pipelineInfo: + name: one-step-pipeline-with-concat-placeholder +root: + dag: + tasks: + component-with-concat-placeholder: + cachingOptions: + enableCache: true + componentRef: + name: comp-component-with-concat-placeholder + inputs: + parameters: + input_one: + runtimeValue: + constant: one + input_two: + runtimeValue: + constant: two + taskInfo: + name: component-with-concat-placeholder +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pipeline_with_condition_dynamic_task_output_custom_training_job.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_condition_dynamic_task_output_custom_training_job.py similarity index 91% rename from sdk/python/test_data/pipelines/pipeline_with_condition_dynamic_task_output_custom_training_job.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_condition_dynamic_task_output_custom_training_job.py index d112232478f..b62ba52a291 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_condition_dynamic_task_output_custom_training_job.py +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_condition_dynamic_task_output_custom_training_job.py @@ -24,7 +24,7 @@ def accelerator_count() -> int: @dsl.pipeline -def pipeline( +def pipeline_with_dynamic_condition_output( project: str, location: str, encryption_spec_key_name: str = '', @@ -61,4 +61,4 @@ def pipeline( if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile( - pipeline_func=pipeline, package_path=__file__.replace('.py', '.yaml')) + pipeline_func=pipeline_with_dynamic_condition_output, package_path=__file__.replace('.py', '.yaml')) diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml new file mode 100644 index 00000000000..36c6a36ae81 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml @@ -0,0 +1,456 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-dynamic-condition-output +# Inputs: +# encryption_spec_key_name: str [Default: ''] +# location: str +# project: str +components: + comp-accelerator-count: + executorLabel: exec-accelerator-count + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-accelerator-type: + executorLabel: exec-accelerator-type + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-condition-1: + dag: + tasks: + custom-training-job: + cachingOptions: + enableCache: true + componentRef: + name: comp-custom-training-job + inputs: + parameters: + display_name: + runtimeValue: + constant: add-numbers + encryption_spec_key_name: + componentInputParameter: pipelinechannel--encryption_spec_key_name + location: + componentInputParameter: pipelinechannel--location + pipelinechannel--accelerator-count-Output: + componentInputParameter: pipelinechannel--accelerator-count-Output + pipelinechannel--accelerator-type-Output: + componentInputParameter: pipelinechannel--accelerator-type-Output + pipelinechannel--machine-type-Output: + componentInputParameter: pipelinechannel--machine-type-Output + project: + componentInputParameter: pipelinechannel--project + worker_pool_specs: + runtimeValue: + constant: + - container_spec: + args: + - foo + command: + - echo + image_uri: gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0 + machine_spec: + accelerator_count: '{{$.inputs.parameters[''pipelinechannel--accelerator-count-Output'']}}' + accelerator_type: '{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}' + machine_type: '{{$.inputs.parameters[''pipelinechannel--machine-type-Output'']}}' + replica_count: 1.0 + taskInfo: + name: custom-training-job + inputDefinitions: + parameters: + pipelinechannel--accelerator-count-Output: + parameterType: NUMBER_INTEGER + pipelinechannel--accelerator-type-Output: + parameterType: STRING + pipelinechannel--encryption_spec_key_name: + parameterType: STRING + pipelinechannel--flip-biased-coin-op-Output: + parameterType: STRING + pipelinechannel--location: + parameterType: STRING + pipelinechannel--machine-type-Output: + parameterType: STRING + pipelinechannel--project: + parameterType: STRING + comp-custom-training-job: + executorLabel: exec-custom-training-job + inputDefinitions: + parameters: + base_output_directory: + defaultValue: '' + description: The Cloud Storage location to store the output of this CustomJob + or HyperparameterTuningJob. See [more information ](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/GcsDestination). + isOptional: true + parameterType: STRING + display_name: + description: The name of the CustomJob. + parameterType: STRING + enable_web_access: + defaultValue: false + description: Whether you want Vertex AI to enable [interactive shell access + ](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) + to training containers. If `True`, you can access interactive shells at + the URIs given by [CustomJob.web_access_uris][]. + isOptional: true + parameterType: BOOLEAN + encryption_spec_key_name: + defaultValue: '' + description: Customer-managed encryption key options for the CustomJob. + If this is set, then all resources created by the CustomJob will be encrypted + with the provided encryption key. + isOptional: true + parameterType: STRING + labels: + defaultValue: {} + description: The labels with user-defined metadata to organize the CustomJob. + See [more information](https://goo.gl/xmQnxf). + isOptional: true + parameterType: STRUCT + location: + defaultValue: '{{$.pipeline_google_cloud_location}}' + description: Location for creating the custom training job. If not set, + default to the location where the PipelineJob is run. + isOptional: true + parameterType: STRING + max_wait_duration: + defaultValue: 86400s + description: The maximum time to wait for the custom training job to be + scheduled only if the scheduling strategy is set to FLEX_START. If set + to 0, the job will wait indefinitely. The default is 24 hours. See [more + information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#Strategy). + isOptional: true + parameterType: STRING + network: + defaultValue: '' + description: The full name of the Compute Engine network to which the job + should be peered. For example, `projects/12345/global/networks/myVPC`. + Format is of the form `projects/{project}/global/networks/{network}`. + Where `{project}` is a project number, as in `12345`, and `{network}` + is a network name. Private services access must already be configured + for the network. If left unspecified, the job is not peered with any network. + isOptional: true + parameterType: STRING + persistent_resource_id: + defaultValue: '{{$.pipeline_persistent_resource_id}}' + description: The ID of the PersistentResource in the same Project and Location + which to run. The default value is a placeholder that will be resolved + to the PipelineJob [RuntimeConfig](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.pipelineJobs#PipelineJob.RuntimeConfig)'s + persistent resource id at runtime. However, if the PipelineJob doesn't + set Persistent Resource as the job level runtime, the placedholder will + be resolved to an empty string and the custom job will be run on demand. + If the value is set explicitly, the custom job will runs in the specified + persistent resource, in this case, please note the network and CMEK configs + on the job should be consistent with those on the PersistentResource, + otherwise, the job will be rejected. + isOptional: true + parameterType: STRING + project: + defaultValue: '{{$.pipeline_google_cloud_project_id}}' + description: Project to create the custom training job in. Defaults to the + project in which the PipelineJob is run. + isOptional: true + parameterType: STRING + psc_interface_config: + defaultValue: {} + description: Configuration CustomJob with PSC-I. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#PscInterfaceConfig). + isOptional: true + parameterType: STRUCT + reserved_ip_ranges: + defaultValue: [] + description: A list of names for the reserved IP ranges under the VPC network + that can be used for this job. If set, we will deploy the job within the + provided IP ranges. Otherwise, the job will be deployed to any IP ranges + under the provided VPC network. + isOptional: true + parameterType: LIST + restart_job_on_worker_restart: + defaultValue: false + description: Restarts the entire CustomJob if a worker gets restarted. This + feature can be used by distributed training jobs that are not resilient + to workers leaving and joining a job. + isOptional: true + parameterType: BOOLEAN + service_account: + defaultValue: '' + description: Sets the default service account for workload run-as account. + The [service account ](https://cloud.google.com/vertex-ai/docs/pipelines/configure-project#service-account) + running the pipeline submitting jobs must have act-as permission on this + run-as account. If unspecified, the Vertex AI Custom Code [Service Agent + ](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) + for the CustomJob's project. + isOptional: true + parameterType: STRING + strategy: + defaultValue: STANDARD + description: The strategy to use for the custom training job. The default + is 'STANDARD'. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#Strategy). + isOptional: true + parameterType: STRING + tensorboard: + defaultValue: '' + description: The name of a Vertex AI TensorBoard resource to which this + CustomJob will upload TensorBoard logs. + isOptional: true + parameterType: STRING + timeout: + defaultValue: 604800s + description: 'The maximum job running time. The default is 7 days. A duration + in seconds with up to nine fractional digits, terminated by ''s'', for + example: "3.5s".' + isOptional: true + parameterType: STRING + worker_pool_specs: + defaultValue: [] + description: Serialized json spec of the worker pools including machine + type and Docker image. All worker pools except the first one are optional + and can be skipped by providing an empty value. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#WorkerPoolSpec). + isOptional: true + parameterType: LIST + outputDefinitions: + parameters: + gcp_resources: + description: Serialized JSON of `gcp_resources` [proto](https://github.com/kubeflow/pipelines/tree/master/components/google-cloud/google_cloud_pipeline_components/proto) + which tracks the CustomJob. + parameterType: STRING + comp-flip-biased-coin-op: + executorLabel: exec-flip-biased-coin-op + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-machine-type: + executorLabel: exec-machine-type + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-accelerator-count: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - accelerator_count + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef accelerator_count() -> int:\n return 1\n\n" + image: python:3.9 + exec-accelerator-type: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - accelerator_type + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef accelerator_type() -> str:\n return 'NVIDIA_TESLA_P4'\n\n" + image: python:3.9 + exec-custom-training-job: + container: + args: + - --type + - CustomJob + - --payload + - '{"display_name": "{{$.inputs.parameters[''display_name'']}}", "job_spec": + {"worker_pool_specs": {{$.inputs.parameters[''worker_pool_specs'']}}, "scheduling": + {"timeout": "{{$.inputs.parameters[''timeout'']}}", "restart_job_on_worker_restart": + {{$.inputs.parameters[''restart_job_on_worker_restart'']}}, "strategy": + "{{$.inputs.parameters[''strategy'']}}", "max_wait_duration": "{{$.inputs.parameters[''max_wait_duration'']}}"}, + "service_account": "{{$.inputs.parameters[''service_account'']}}", "tensorboard": + "{{$.inputs.parameters[''tensorboard'']}}", "enable_web_access": {{$.inputs.parameters[''enable_web_access'']}}, + "network": "{{$.inputs.parameters[''network'']}}", "reserved_ip_ranges": + {{$.inputs.parameters[''reserved_ip_ranges'']}}, "base_output_directory": + {"output_uri_prefix": "{{$.inputs.parameters[''base_output_directory'']}}"}, + "persistent_resource_id": "{{$.inputs.parameters[''persistent_resource_id'']}}", + "psc_interface_config": {{$.inputs.parameters[''psc_interface_config'']}}}, + "labels": {{$.inputs.parameters[''labels'']}}, "encryption_spec": {"kms_key_name": + "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}}' + - --project + - '{{$.inputs.parameters[''project'']}}' + - --location + - '{{$.inputs.parameters[''location'']}}' + - --gcp_resources + - '{{$.outputs.parameters[''gcp_resources''].output_file}}' + command: + - python3 + - -u + - -m + - google_cloud_pipeline_components.container.v1.custom_job.launcher + image: gcr.io/ml-pipeline/google-cloud-pipeline-components:2.21.0 + exec-flip-biased-coin-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_biased_coin_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_biased_coin_op() -> str:\n \"\"\"Flip a coin and output\ + \ heads.\"\"\"\n return 'heads'\n\n" + image: python:3.9 + exec-machine-type: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - machine_type + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef machine_type() -> str:\n return 'n1-standard-4'\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-dynamic-condition-output +root: + dag: + tasks: + accelerator-count: + cachingOptions: + enableCache: true + componentRef: + name: comp-accelerator-count + taskInfo: + name: accelerator-count + accelerator-type: + cachingOptions: + enableCache: true + componentRef: + name: comp-accelerator-type + taskInfo: + name: accelerator-type + condition-1: + componentRef: + name: comp-condition-1 + dependentTasks: + - accelerator-count + - accelerator-type + - flip-biased-coin-op + - machine-type + inputs: + parameters: + pipelinechannel--accelerator-count-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: accelerator-count + pipelinechannel--accelerator-type-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: accelerator-type + pipelinechannel--encryption_spec_key_name: + componentInputParameter: encryption_spec_key_name + pipelinechannel--flip-biased-coin-op-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-biased-coin-op + pipelinechannel--location: + componentInputParameter: location + pipelinechannel--machine-type-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: machine-type + pipelinechannel--project: + componentInputParameter: project + taskInfo: + name: condition-1 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-biased-coin-op-Output'] + == 'heads' + flip-biased-coin-op: + cachingOptions: {} + componentRef: + name: comp-flip-biased-coin-op + taskInfo: + name: flip-biased-coin-op + machine-type: + cachingOptions: + enableCache: true + componentRef: + name: comp-machine-type + taskInfo: + name: machine-type + inputDefinitions: + parameters: + encryption_spec_key_name: + defaultValue: '' + isOptional: true + parameterType: STRING + location: + parameterType: STRING + project: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pipeline_with_dynamic_importer_metadata.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_dynamic_importer_metadata.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_dynamic_importer_metadata.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_dynamic_importer_metadata.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_dynamic_importer_metadata.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_dynamic_importer_metadata.yaml new file mode 100644 index 00000000000..6966ea280ba --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_dynamic_importer_metadata.yaml @@ -0,0 +1,186 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-importer +# Inputs: +# int_input: int [Default: 1.0] +# name: str [Default: 'default-name'] +# pipeline_input_artifact_uri: str [Default: 'gs://ml-pipeline-playground/shakespeare1.txt'] +# pipeline_input_image_uri: str [Default: 'us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest'] +components: + comp-importer: + executorLabel: exec-importer + inputDefinitions: + parameters: + metadata: + parameterType: STRING + metadata-2: + parameterType: STRING + uri: + parameterType: STRING + outputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-importer-2: + executorLabel: exec-importer-2 + inputDefinitions: + parameters: + metadata: + parameterType: STRING + metadata-2: + parameterType: STRING + metadata-3: + parameterType: NUMBER_INTEGER + uri: + parameterType: STRING + outputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-make-name: + executorLabel: exec-make-name + inputDefinitions: + parameters: + name: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-importer: + importer: + artifactUri: + runtimeParameter: uri + metadata: + containerSpec: + imageUri: '{{$.inputs.parameters[''metadata-2'']}}' + name: + - '{{$.inputs.parameters[''metadata'']}}' + - alias-name + typeSchema: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + exec-importer-2: + importer: + artifactUri: + constant: gs://ml-pipeline-playground/shakespeare1.txt + metadata: + containerSpec: + imageUri: us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest + list-of-data: + - '{{$.inputs.parameters[''metadata'']}}' + - '{{$.inputs.parameters[''metadata-2'']}}' + - '{{$.inputs.parameters[''metadata-3'']}}' + name: prefix-{{$.inputs.parameters['metadata']}} + '{{$.inputs.parameters[''metadata'']}}': '{{$.inputs.parameters[''metadata'']}}' + '{{$.inputs.parameters[''metadata-2'']}}': us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest + typeSchema: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + exec-make-name: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - make_name + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef make_name(name: str) -> str:\n return name\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-importer +root: + dag: + tasks: + importer: + cachingOptions: + enableCache: true + componentRef: + name: comp-importer + inputs: + parameters: + metadata: + componentInputParameter: name + metadata-2: + componentInputParameter: pipeline_input_image_uri + uri: + componentInputParameter: pipeline_input_artifact_uri + taskInfo: + name: importer + importer-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-importer-2 + dependentTasks: + - make-name + inputs: + parameters: + metadata: + taskOutputParameter: + outputParameterKey: Output + producerTask: make-name + metadata-2: + componentInputParameter: name + metadata-3: + componentInputParameter: int_input + uri: + runtimeValue: + constant: gs://ml-pipeline-playground/shakespeare1.txt + taskInfo: + name: importer-2 + make-name: + cachingOptions: + enableCache: true + componentRef: + name: comp-make-name + inputs: + parameters: + name: + runtimeValue: + constant: a-different-name + taskInfo: + name: make-name + inputDefinitions: + parameters: + int_input: + defaultValue: 1.0 + isOptional: true + parameterType: NUMBER_INTEGER + name: + defaultValue: default-name + isOptional: true + parameterType: STRING + pipeline_input_artifact_uri: + defaultValue: gs://ml-pipeline-playground/shakespeare1.txt + isOptional: true + parameterType: STRING + pipeline_input_image_uri: + defaultValue: us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_dynamic_task_output_custom_training_job.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_dynamic_task_output_custom_training_job.yaml new file mode 100644 index 00000000000..d10d4d9f55c --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_dynamic_task_output_custom_training_job.yaml @@ -0,0 +1,332 @@ +# PIPELINE DEFINITION +# Name: pipeline +# Inputs: +# encryption_spec_key_name: str [Default: ''] +# location: str +# project: str +components: + comp-accelerator-count: + executorLabel: exec-accelerator-count + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER + comp-accelerator-type: + executorLabel: exec-accelerator-type + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-custom-training-job: + executorLabel: exec-custom-training-job + inputDefinitions: + parameters: + base_output_directory: + defaultValue: '' + description: The Cloud Storage location to store the output of this CustomJob + or HyperparameterTuningJob. See [more information ](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/GcsDestination). + isOptional: true + parameterType: STRING + display_name: + description: The name of the CustomJob. + parameterType: STRING + enable_web_access: + defaultValue: false + description: Whether you want Vertex AI to enable [interactive shell access + ](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) + to training containers. If `True`, you can access interactive shells at + the URIs given by [CustomJob.web_access_uris][]. + isOptional: true + parameterType: BOOLEAN + encryption_spec_key_name: + defaultValue: '' + description: Customer-managed encryption key options for the CustomJob. + If this is set, then all resources created by the CustomJob will be encrypted + with the provided encryption key. + isOptional: true + parameterType: STRING + labels: + defaultValue: {} + description: The labels with user-defined metadata to organize the CustomJob. + See [more information](https://goo.gl/xmQnxf). + isOptional: true + parameterType: STRUCT + location: + defaultValue: us-central1 + description: Location for creating the custom training job. If not set, + default to us-central1. + isOptional: true + parameterType: STRING + network: + defaultValue: '' + description: The full name of the Compute Engine network to which the job + should be peered. For example, `projects/12345/global/networks/myVPC`. + Format is of the form `projects/{project}/global/networks/{network}`. + Where `{project}` is a project number, as in `12345`, and `{network}` + is a network name. Private services access must already be configured + for the network. If left unspecified, the job is not peered with any network. + isOptional: true + parameterType: STRING + project: + defaultValue: '{{$.pipeline_google_cloud_project_id}}' + description: Project to create the custom training job in. Defaults to the + project in which the PipelineJob is run. + isOptional: true + parameterType: STRING + reserved_ip_ranges: + defaultValue: [] + description: A list of names for the reserved IP ranges under the VPC network + that can be used for this job. If set, we will deploy the job within the + provided IP ranges. Otherwise, the job will be deployed to any IP ranges + under the provided VPC network. + isOptional: true + parameterType: LIST + restart_job_on_worker_restart: + defaultValue: false + description: Restarts the entire CustomJob if a worker gets restarted. This + feature can be used by distributed training jobs that are not resilient + to workers leaving and joining a job. + isOptional: true + parameterType: BOOLEAN + service_account: + defaultValue: '' + description: Sets the default service account for workload run-as account. + The [service account ](https://cloud.google.com/vertex-ai/docs/pipelines/configure-project#service-account) + running the pipeline submitting jobs must have act-as permission on this + run-as account. If unspecified, the Vertex AI Custom Code [Service Agent + ](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) + for the CustomJob's project. + isOptional: true + parameterType: STRING + tensorboard: + defaultValue: '' + description: The name of a Vertex AI TensorBoard resource to which this + CustomJob will upload TensorBoard logs. + isOptional: true + parameterType: STRING + timeout: + defaultValue: 604800s + description: 'The maximum job running time. The default is 7 days. A duration + in seconds with up to nine fractional digits, terminated by ''s'', for + example: "3.5s".' + isOptional: true + parameterType: STRING + worker_pool_specs: + defaultValue: [] + description: Serialized json spec of the worker pools including machine + type and Docker image. All worker pools except the first one are optional + and can be skipped by providing an empty value. See [more information](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/CustomJobSpec#WorkerPoolSpec). + isOptional: true + parameterType: LIST + outputDefinitions: + parameters: + gcp_resources: + description: Serialized JSON of `gcp_resources` [proto](https://github.com/kubeflow/pipelines/tree/master/components/google-cloud/google_cloud_pipeline_components/proto) + which tracks the CustomJob. + parameterType: STRING + comp-machine-type: + executorLabel: exec-machine-type + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-accelerator-count: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - accelerator_count + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef accelerator_count() -> int:\n return 1\n\n" + image: python:3.9 + exec-accelerator-type: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - accelerator_type + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef accelerator_type() -> str:\n return 'NVIDIA_TESLA_P4'\n\n" + image: python:3.9 + exec-custom-training-job: + container: + args: + - --type + - CustomJob + - --payload + - '{"display_name": "{{$.inputs.parameters[''display_name'']}}", "job_spec": + {"worker_pool_specs": {{$.inputs.parameters[''worker_pool_specs'']}}, "scheduling": + {"timeout": "{{$.inputs.parameters[''timeout'']}}", "restart_job_on_worker_restart": + {{$.inputs.parameters[''restart_job_on_worker_restart'']}}}, "service_account": + "{{$.inputs.parameters[''service_account'']}}", "tensorboard": "{{$.inputs.parameters[''tensorboard'']}}", + "enable_web_access": {{$.inputs.parameters[''enable_web_access'']}}, "network": + "{{$.inputs.parameters[''network'']}}", "reserved_ip_ranges": {{$.inputs.parameters[''reserved_ip_ranges'']}}, + "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[''base_output_directory'']}}"}}, + "labels": {{$.inputs.parameters[''labels'']}}, "encryption_spec": {"kms_key_name": + "{{$.inputs.parameters[''encryption_spec_key_name'']}}"}}' + - --project + - '{{$.inputs.parameters[''project'']}}' + - --location + - '{{$.inputs.parameters[''location'']}}' + - --gcp_resources + - '{{$.outputs.parameters[''gcp_resources''].output_file}}' + command: + - python3 + - -u + - -m + - google_cloud_pipeline_components.container.v1.custom_job.launcher + image: gcr.io/ml-pipeline/google-cloud-pipeline-components:2.14.1 + exec-machine-type: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - machine_type + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef machine_type() -> str:\n return 'n1-standard-4'\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline +root: + dag: + tasks: + accelerator-count: + cachingOptions: + enableCache: true + componentRef: + name: comp-accelerator-count + taskInfo: + name: accelerator-count + accelerator-type: + cachingOptions: + enableCache: true + componentRef: + name: comp-accelerator-type + taskInfo: + name: accelerator-type + custom-training-job: + cachingOptions: + enableCache: true + componentRef: + name: comp-custom-training-job + dependentTasks: + - accelerator-count + - accelerator-type + - machine-type + inputs: + parameters: + display_name: + runtimeValue: + constant: add-numbers + encryption_spec_key_name: + componentInputParameter: encryption_spec_key_name + location: + componentInputParameter: location + pipelinechannel--accelerator-count-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: accelerator-count + pipelinechannel--accelerator-type-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: accelerator-type + pipelinechannel--machine-type-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: machine-type + project: + componentInputParameter: project + worker_pool_specs: + runtimeValue: + constant: + - container_spec: + args: + - foo + command: + - echo + image_uri: gcr.io/ml-pipeline/google-cloud-pipeline-components:2.5.0 + machine_spec: + accelerator_count: '{{$.inputs.parameters[''pipelinechannel--accelerator-count-Output'']}}' + accelerator_type: '{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}' + machine_type: '{{$.inputs.parameters[''pipelinechannel--machine-type-Output'']}}' + replica_count: 1.0 + taskInfo: + name: custom-training-job + machine-type: + cachingOptions: + enableCache: true + componentRef: + name: comp-machine-type + taskInfo: + name: machine-type + inputDefinitions: + parameters: + encryption_spec_key_name: + defaultValue: '' + isOptional: true + parameterType: STRING + location: + parameterType: STRING + project: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_google_artifact_type.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_google_artifact_type.py similarity index 96% rename from sdk/python/test_data/pipelines/pipeline_with_google_artifact_type.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_google_artifact_type.py index 91ce6659741..d75c81ce4d2 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_google_artifact_type.py +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_google_artifact_type.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import os import sys import tempfile @@ -76,6 +77,8 @@ def path(self) -> str: from kfp.dsl import Output PACKAGES_TO_INSTALL = ['aiplatform'] + if 'KFP_PIPELINE_SPEC_PACKAGE_PATH' in os.environ: + PACKAGES_TO_INSTALL.append(os.environ['KFP_PIPELINE_SPEC_PACKAGE_PATH']) @dsl.component(packages_to_install=PACKAGES_TO_INSTALL) def model_producer(model: Output[aiplatform.VertexModel]): diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_google_artifact_type.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_google_artifact_type.yaml new file mode 100644 index 00000000000..d09851bdef2 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_google_artifact_type.yaml @@ -0,0 +1,159 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-google-types +components: + comp-importer: + executorLabel: exec-importer + inputDefinitions: + parameters: + uri: + parameterType: STRING + outputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: google.VertexDataset + schemaVersion: 0.0.0 + comp-model-consumer: + executorLabel: exec-model-consumer + inputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: google.VertexDataset + schemaVersion: 0.0.0 + model: + artifactType: + schemaTitle: google.VertexModel + schemaVersion: 0.0.0 + comp-model-producer: + executorLabel: exec-model-producer + outputDefinitions: + artifacts: + model: + artifactType: + schemaTitle: google.VertexModel + schemaVersion: 0.0.0 +deploymentSpec: + executors: + exec-importer: + importer: + artifactUri: + constant: gs://ml-pipeline-playground/shakespeare1.txt + metadata: + key: value + typeSchema: + schemaTitle: google.VertexDataset + schemaVersion: 0.0.0 + exec-model-consumer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - model_consumer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'aiplatform'\ + \ && python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\nfrom aiplatform import VertexModel\nfrom aiplatform import VertexDataset\n\ + \ndef model_consumer(model: Input[VertexModel],\n dataset:\ + \ Input[VertexDataset]):\n print('Model')\n print('artifact.type:\ + \ ', type(model))\n print('artifact.name: ', model.name)\n print('artifact.uri:\ + \ ', model.uri)\n print('artifact.metadata: ', model.metadata)\n\n \ + \ print('Dataset')\n print('artifact.type: ', type(dataset))\n print('artifact.name:\ + \ ', dataset.name)\n print('artifact.uri: ', dataset.uri)\n print('artifact.metadata:\ + \ ', dataset.metadata)\n\n" + image: python:3.9 + exec-model-producer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - model_producer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'aiplatform'\ + \ && python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\nimport aiplatform\n\ndef model_producer(model: Output[aiplatform.VertexModel]):\n\ + \n assert isinstance(model, aiplatform.VertexModel), type(model)\n \ + \ with open(model.path, 'w') as f:\n f.write('my model')\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-google-types +root: + dag: + tasks: + importer: + cachingOptions: + enableCache: true + componentRef: + name: comp-importer + inputs: + parameters: + uri: + runtimeValue: + constant: gs://ml-pipeline-playground/shakespeare1.txt + taskInfo: + name: importer + model-consumer: + cachingOptions: + enableCache: true + componentRef: + name: comp-model-consumer + dependentTasks: + - importer + - model-producer + inputs: + artifacts: + dataset: + taskOutputArtifact: + outputArtifactKey: artifact + producerTask: importer + model: + taskOutputArtifact: + outputArtifactKey: model + producerTask: model-producer + taskInfo: + name: model-consumer + model-producer: + cachingOptions: + enableCache: true + componentRef: + name: comp-model-producer + taskInfo: + name: model-producer +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer.json b/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer.json new file mode 100644 index 00000000000..88a13886300 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer.json @@ -0,0 +1,319 @@ +{ + "components": { + "comp-condition-1": { + "dag": { + "tasks": { + "importer-2": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-importer-2" + }, + "inputs": { + "parameters": { + "uri": { + "componentInputParameter": "pipelinechannel--dataset2" + } + } + }, + "taskInfo": { + "name": "importer-2" + } + }, + "train-2": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-train-2" + }, + "dependentTasks": [ + "importer-2" + ], + "inputs": { + "artifacts": { + "dataset": { + "taskOutputArtifact": { + "outputArtifactKey": "artifact", + "producerTask": "importer-2" + } + } + } + }, + "taskInfo": { + "name": "train-2" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--dataset2": { + "parameterType": "STRING" + }, + "pipelinechannel--train-scalar": { + "parameterType": "STRING" + } + } + } + }, + "comp-importer": { + "executorLabel": "exec-importer", + "inputDefinitions": { + "parameters": { + "uri": { + "parameterType": "STRING" + } + } + }, + "outputDefinitions": { + "artifacts": { + "artifact": { + "artifactType": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + } + } + } + }, + "comp-importer-2": { + "executorLabel": "exec-importer-2", + "inputDefinitions": { + "parameters": { + "uri": { + "parameterType": "STRING" + } + } + }, + "outputDefinitions": { + "artifacts": { + "artifact": { + "artifactType": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + } + } + } + }, + "comp-train": { + "executorLabel": "exec-train", + "inputDefinitions": { + "artifacts": { + "dataset": { + "artifactType": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + } + } + }, + "outputDefinitions": { + "artifacts": { + "model": { + "artifactType": { + "schemaTitle": "system.Model", + "schemaVersion": "0.0.1" + } + } + }, + "parameters": { + "scalar": { + "parameterType": "STRING" + } + } + } + }, + "comp-train-2": { + "executorLabel": "exec-train-2", + "inputDefinitions": { + "artifacts": { + "dataset": { + "artifactType": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + } + } + }, + "outputDefinitions": { + "artifacts": { + "model": { + "artifactType": { + "schemaTitle": "system.Model", + "schemaVersion": "0.0.1" + } + } + }, + "parameters": { + "scalar": { + "parameterType": "STRING" + } + } + } + } + }, + "deploymentSpec": { + "executors": { + "exec-importer": { + "importer": { + "artifactUri": { + "constant": "gs://ml-pipeline-playground/shakespeare1.txt" + }, + "metadata": { + "key": "value" + }, + "typeSchema": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + } + }, + "exec-importer-2": { + "importer": { + "artifactUri": { + "runtimeParameter": "uri" + }, + "reimport": true, + "typeSchema": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + } + }, + "exec-train": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "train" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef train(\n dataset: Input[Dataset]\n) -> NamedTuple('Outputs', [\n ('scalar', str),\n ('model', Model),\n]):\n \"\"\"Dummy Training step.\"\"\"\n with open(dataset.path) as f:\n data = f.read()\n print('Dataset:', data)\n\n scalar = '123'\n model = f'My model trained using data: {data}'\n\n from collections import namedtuple\n output = namedtuple('Outputs', ['scalar', 'model'])\n return output(scalar, model)\n\n" + ], + "image": "python:3.9" + } + }, + "exec-train-2": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "train" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef train(\n dataset: Input[Dataset]\n) -> NamedTuple('Outputs', [\n ('scalar', str),\n ('model', Model),\n]):\n \"\"\"Dummy Training step.\"\"\"\n with open(dataset.path) as f:\n data = f.read()\n print('Dataset:', data)\n\n scalar = '123'\n model = f'My model trained using data: {data}'\n\n from collections import namedtuple\n output = namedtuple('Outputs', ['scalar', 'model'])\n return output(scalar, model)\n\n" + ], + "image": "python:3.9" + } + } + } + }, + "pipelineInfo": { + "name": "pipeline-with-importer" + }, + "root": { + "dag": { + "tasks": { + "condition-1": { + "componentRef": { + "name": "comp-condition-1" + }, + "dependentTasks": [ + "train" + ], + "inputs": { + "parameters": { + "pipelinechannel--dataset2": { + "componentInputParameter": "dataset2" + }, + "pipelinechannel--train-scalar": { + "taskOutputParameter": { + "outputParameterKey": "scalar", + "producerTask": "train" + } + } + } + }, + "taskInfo": { + "name": "condition-1" + }, + "triggerPolicy": { + "condition": "inputs.parameter_values['pipelinechannel--train-scalar'] == '123'" + } + }, + "importer": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-importer" + }, + "inputs": { + "parameters": { + "uri": { + "runtimeValue": { + "constant": "gs://ml-pipeline-playground/shakespeare1.txt" + } + } + } + }, + "taskInfo": { + "name": "importer" + } + }, + "train": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-train" + }, + "dependentTasks": [ + "importer" + ], + "inputs": { + "artifacts": { + "dataset": { + "taskOutputArtifact": { + "outputArtifactKey": "artifact", + "producerTask": "importer" + } + } + } + }, + "taskInfo": { + "name": "train" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "dataset2": { + "defaultValue": "gs://ml-pipeline-playground/shakespeare2.txt", + "isOptional": true, + "parameterType": "STRING" + } + } + } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-2.14.3" +} \ No newline at end of file diff --git a/sdk/python/test_data/pipelines/pipeline_with_importer.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_importer.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_importer.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer.yaml new file mode 100644 index 00000000000..e71d6193dc0 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer.yaml @@ -0,0 +1,242 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-importer +# Inputs: +# dataset2: str [Default: 'gs://ml-pipeline-playground/shakespeare2.txt'] +components: + comp-condition-1: + dag: + tasks: + importer-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-importer-2 + inputs: + parameters: + uri: + componentInputParameter: pipelinechannel--dataset2 + taskInfo: + name: importer-2 + train-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-train-2 + dependentTasks: + - importer-2 + inputs: + artifacts: + dataset: + taskOutputArtifact: + outputArtifactKey: artifact + producerTask: importer-2 + taskInfo: + name: train-2 + inputDefinitions: + parameters: + pipelinechannel--dataset2: + parameterType: STRING + pipelinechannel--train-scalar: + parameterType: STRING + comp-importer: + executorLabel: exec-importer + inputDefinitions: + parameters: + uri: + parameterType: STRING + outputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-importer-2: + executorLabel: exec-importer-2 + inputDefinitions: + parameters: + uri: + parameterType: STRING + outputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-train: + executorLabel: exec-train + inputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + model: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + parameters: + scalar: + parameterType: STRING + comp-train-2: + executorLabel: exec-train-2 + inputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + model: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + parameters: + scalar: + parameterType: STRING +deploymentSpec: + executors: + exec-importer: + importer: + artifactUri: + constant: gs://ml-pipeline-playground/shakespeare1.txt + metadata: + key: value + typeSchema: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + exec-importer-2: + importer: + artifactUri: + runtimeParameter: uri + reimport: true + typeSchema: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + exec-train: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - train + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef train(\n dataset: Input[Dataset]\n) -> NamedTuple('Outputs',\ + \ [\n ('scalar', str),\n ('model', Model),\n]):\n \"\"\"Dummy Training\ + \ step.\"\"\"\n with open(dataset.path) as f:\n data = f.read()\n\ + \ print('Dataset:', data)\n\n scalar = '123'\n model = f'My model\ + \ trained using data: {data}'\n\n from collections import namedtuple\n\ + \ output = namedtuple('Outputs', ['scalar', 'model'])\n return output(scalar,\ + \ model)\n\n" + image: python:3.9 + exec-train-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - train + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef train(\n dataset: Input[Dataset]\n) -> NamedTuple('Outputs',\ + \ [\n ('scalar', str),\n ('model', Model),\n]):\n \"\"\"Dummy Training\ + \ step.\"\"\"\n with open(dataset.path) as f:\n data = f.read()\n\ + \ print('Dataset:', data)\n\n scalar = '123'\n model = f'My model\ + \ trained using data: {data}'\n\n from collections import namedtuple\n\ + \ output = namedtuple('Outputs', ['scalar', 'model'])\n return output(scalar,\ + \ model)\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-importer +root: + dag: + tasks: + condition-1: + componentRef: + name: comp-condition-1 + dependentTasks: + - train + inputs: + parameters: + pipelinechannel--dataset2: + componentInputParameter: dataset2 + pipelinechannel--train-scalar: + taskOutputParameter: + outputParameterKey: scalar + producerTask: train + taskInfo: + name: condition-1 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--train-scalar'] == '123' + importer: + cachingOptions: + enableCache: true + componentRef: + name: comp-importer + inputs: + parameters: + uri: + runtimeValue: + constant: gs://ml-pipeline-playground/shakespeare1.txt + taskInfo: + name: importer + train: + cachingOptions: + enableCache: true + componentRef: + name: comp-train + dependentTasks: + - importer + inputs: + artifacts: + dataset: + taskOutputArtifact: + outputArtifactKey: artifact + producerTask: importer + taskInfo: + name: train + inputDefinitions: + parameters: + dataset2: + defaultValue: gs://ml-pipeline-playground/shakespeare2.txt + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pipeline_with_importer_and_gcpc_types.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer_and_gcpc_types.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_importer_and_gcpc_types.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_importer_and_gcpc_types.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer_and_gcpc_types.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer_and_gcpc_types.yaml new file mode 100644 index 00000000000..cfefcd7c0d6 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_importer_and_gcpc_types.yaml @@ -0,0 +1,75 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-importer-and-gcpc-type +components: + comp-consumer-op: + executorLabel: exec-consumer-op + inputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: google.VertexDataset + schemaVersion: 0.0.1 + comp-importer: + executorLabel: exec-importer + inputDefinitions: + parameters: + uri: + parameterType: STRING + outputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: google.VertexDataset + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-consumer-op: + container: + args: + - '{{$.inputs.artifacts[''dataset''].path}}' + command: + - cmd + image: dummy + exec-importer: + importer: + artifactUri: + constant: gs://ml-pipeline-playground/shakespeare1.txt + metadata: + key: value + typeSchema: + schemaTitle: google.VertexDataset + schemaVersion: 0.0.1 +pipelineInfo: + name: pipeline-with-importer-and-gcpc-type +root: + dag: + tasks: + consumer-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-consumer-op + dependentTasks: + - importer + inputs: + artifacts: + dataset: + taskOutputArtifact: + outputArtifactKey: artifact + producerTask: importer + taskInfo: + name: consumer-op + importer: + cachingOptions: + enableCache: true + componentRef: + name: comp-importer + inputs: + parameters: + uri: + runtimeValue: + constant: gs://ml-pipeline-playground/shakespeare1.txt + taskInfo: + name: importer +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_loops_and_conditions.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_loops_and_conditions.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_loops_and_conditions.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_loops_and_conditions.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_loops_and_conditions.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_loops_and_conditions.yaml new file mode 100644 index 00000000000..7368df7932c --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_loops_and_conditions.yaml @@ -0,0 +1,1051 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-loops-and-conditions-multi-layers +# Inputs: +# loop_parameter: list [Default: [{'B_b': ['A', 'B'], 'A_a': 'heads'}, {'B_b': ['X', 'Y', 'Z'], 'A_a': 'tails'}]] +# msg: str [Default: 'hello'] +components: + comp-args-generator-op: + executorLabel: exec-args-generator-op + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-args-generator-op-2: + executorLabel: exec-args-generator-op-2 + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-condition-1: + dag: + tasks: + args-generator-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-args-generator-op-2 + taskInfo: + name: args-generator-op-2 + for-loop-2: + componentRef: + name: comp-for-loop-2 + dependentTasks: + - args-generator-op-2 + inputs: + parameters: + pipelinechannel--args-generator-op-2-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: args-generator-op-2 + pipelinechannel--args-generator-op-Output: + componentInputParameter: pipelinechannel--args-generator-op-Output + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + pipelinechannel--loop_parameter: + componentInputParameter: pipelinechannel--loop_parameter + pipelinechannel--msg: + componentInputParameter: pipelinechannel--msg + parameterIterator: + itemInput: pipelinechannel--args-generator-op-Output-loop-item + items: + inputParameter: pipelinechannel--args-generator-op-Output + taskInfo: + name: for-loop-2 + inputDefinitions: + parameters: + pipelinechannel--args-generator-op-Output: + parameterType: LIST + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + pipelinechannel--loop_parameter: + parameterType: LIST + pipelinechannel--msg: + parameterType: STRING + comp-condition-13: + dag: + tasks: + print-text-8: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-8 + inputs: + parameters: + msg: + runtimeValue: + constant: '1' + taskInfo: + name: print-text-8 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + pipelinechannel--loop-item-param-11: + parameterType: STRING + comp-condition-15: + dag: + tasks: + for-loop-16: + componentRef: + name: comp-for-loop-16 + inputs: + parameters: + pipelinechannel--loop_parameter-loop-item: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + pipelinechannel--loop_parameter-loop-item-subvar-B_b: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + parameterExpressionSelector: parseJson(string_value)["B_b"] + parameterIterator: + itemInput: pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item + items: + inputParameter: pipelinechannel--loop_parameter-loop-item-subvar-B_b + taskInfo: + name: for-loop-16 + inputDefinitions: + parameters: + pipelinechannel--loop_parameter-loop-item: + parameterType: STRING + pipelinechannel--loop_parameter-loop-item-subvar-A_a: + parameterType: STRING + comp-condition-3: + dag: + tasks: + print-text-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-2 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["B_b"] + taskInfo: + name: print-text-2 + inputDefinitions: + parameters: + pipelinechannel--args-generator-op-Output-loop-item: + parameterType: STRING + pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a: + parameterType: STRING + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + comp-condition-4: + dag: + tasks: + print-text-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-3 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["B_b"] + taskInfo: + name: print-text-3 + inputDefinitions: + parameters: + pipelinechannel--args-generator-op-Output-loop-item: + parameterType: STRING + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + comp-condition-5: + dag: + tasks: + for-loop-7: + componentRef: + name: comp-for-loop-7 + inputs: + parameters: + pipelinechannel--args-generator-op-Output-loop-item: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + parameterIterator: + itemInput: pipelinechannel--loop-item-param-6 + items: + raw: '[{"a": "-1"}, {"a": "-2"}]' + taskInfo: + name: for-loop-7 + inputDefinitions: + parameters: + pipelinechannel--args-generator-op-Output-loop-item: + parameterType: STRING + pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a: + parameterType: STRING + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + comp-flip-coin-op: + executorLabel: exec-flip-coin-op + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-for-loop-10: + dag: + tasks: + print-text-6: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-6 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + msg2: + componentInputParameter: pipelinechannel--args-generator-op-2-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["A_a"] + taskInfo: + name: print-text-6 + inputDefinitions: + parameters: + pipelinechannel--args-generator-op-2-Output: + parameterType: LIST + pipelinechannel--args-generator-op-2-Output-loop-item: + parameterType: STRING + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + pipelinechannel--loop_parameter-loop-item: + parameterType: STRING + comp-for-loop-12: + dag: + tasks: + condition-13: + componentRef: + name: comp-condition-13 + inputs: + parameters: + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + pipelinechannel--loop-item-param-11: + componentInputParameter: pipelinechannel--loop-item-param-11 + taskInfo: + name: condition-13 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--loop-item-param-11'] + == '1' + print-text-7: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-7 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop-item-param-11 + taskInfo: + name: print-text-7 + inputDefinitions: + parameters: + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + pipelinechannel--loop-item-param-11: + parameterType: STRING + comp-for-loop-14: + dag: + tasks: + condition-15: + componentRef: + name: comp-condition-15 + inputs: + parameters: + pipelinechannel--loop_parameter-loop-item: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + pipelinechannel--loop_parameter-loop-item-subvar-A_a: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + parameterExpressionSelector: parseJson(string_value)["A_a"] + taskInfo: + name: condition-15 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--loop_parameter-loop-item-subvar-A_a'] + == 'heads' + inputDefinitions: + parameters: + pipelinechannel--loop_parameter: + parameterType: LIST + pipelinechannel--loop_parameter-loop-item: + parameterType: STRING + comp-for-loop-16: + dag: + tasks: + print-text-9: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-9 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item + taskInfo: + name: print-text-9 + inputDefinitions: + parameters: + pipelinechannel--loop_parameter-loop-item: + parameterType: STRING + pipelinechannel--loop_parameter-loop-item-subvar-B_b: + parameterType: STRING + pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item: + parameterType: STRING + comp-for-loop-2: + dag: + tasks: + condition-3: + componentRef: + name: comp-condition-3 + inputs: + parameters: + pipelinechannel--args-generator-op-Output-loop-item: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["A_a"] + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + taskInfo: + name: condition-3 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a'] + == 'heads' + condition-4: + componentRef: + name: comp-condition-4 + inputs: + parameters: + pipelinechannel--args-generator-op-Output-loop-item: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + taskInfo: + name: condition-4 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-op-Output'] + == 'heads' + condition-5: + componentRef: + name: comp-condition-5 + inputs: + parameters: + pipelinechannel--args-generator-op-Output-loop-item: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["A_a"] + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + taskInfo: + name: condition-5 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a'] + == 'tails' + for-loop-12: + componentRef: + name: comp-for-loop-12 + inputs: + parameters: + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + parameterIterator: + itemInput: pipelinechannel--loop-item-param-11 + items: + raw: '["1", "2"]' + taskInfo: + name: for-loop-12 + for-loop-8: + componentRef: + name: comp-for-loop-8 + inputs: + parameters: + pipelinechannel--args-generator-op-Output-loop-item: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["B_b"] + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + parameterIterator: + itemInput: pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item + items: + inputParameter: pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b + taskInfo: + name: for-loop-8 + for-loop-9: + componentRef: + name: comp-for-loop-9 + inputs: + parameters: + pipelinechannel--args-generator-op-2-Output: + componentInputParameter: pipelinechannel--args-generator-op-2-Output + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + pipelinechannel--loop_parameter: + componentInputParameter: pipelinechannel--loop_parameter + parameterIterator: + itemInput: pipelinechannel--loop_parameter-loop-item + items: + inputParameter: pipelinechannel--loop_parameter + taskInfo: + name: for-loop-9 + print-text: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--msg + taskInfo: + name: print-text + inputDefinitions: + parameters: + pipelinechannel--args-generator-op-2-Output: + parameterType: LIST + pipelinechannel--args-generator-op-Output: + parameterType: LIST + pipelinechannel--args-generator-op-Output-loop-item: + parameterType: STRING + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + pipelinechannel--loop_parameter: + parameterType: LIST + pipelinechannel--msg: + parameterType: STRING + comp-for-loop-7: + dag: + tasks: + print-struct: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-struct + inputs: + parameters: + struct: + componentInputParameter: pipelinechannel--loop-item-param-6 + taskInfo: + name: print-struct + inputDefinitions: + parameters: + pipelinechannel--args-generator-op-Output-loop-item: + parameterType: STRING + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + pipelinechannel--loop-item-param-6: + parameterType: STRUCT + comp-for-loop-8: + dag: + tasks: + print-text-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-4 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item + taskInfo: + name: print-text-4 + inputDefinitions: + parameters: + pipelinechannel--args-generator-op-Output-loop-item: + parameterType: STRING + pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b: + parameterType: STRING + pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item: + parameterType: STRING + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + comp-for-loop-9: + dag: + tasks: + for-loop-10: + componentRef: + name: comp-for-loop-10 + inputs: + parameters: + pipelinechannel--args-generator-op-2-Output: + componentInputParameter: pipelinechannel--args-generator-op-2-Output + pipelinechannel--flip-coin-op-Output: + componentInputParameter: pipelinechannel--flip-coin-op-Output + pipelinechannel--loop_parameter-loop-item: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + parameterIterator: + itemInput: pipelinechannel--args-generator-op-2-Output-loop-item + items: + inputParameter: pipelinechannel--args-generator-op-2-Output + taskInfo: + name: for-loop-10 + print-text-5: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-5 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + taskInfo: + name: print-text-5 + inputDefinitions: + parameters: + pipelinechannel--args-generator-op-2-Output: + parameterType: LIST + pipelinechannel--flip-coin-op-Output: + parameterType: STRING + pipelinechannel--loop_parameter: + parameterType: LIST + pipelinechannel--loop_parameter-loop-item: + parameterType: STRING + comp-print-struct: + executorLabel: exec-print-struct + inputDefinitions: + parameters: + struct: + parameterType: STRUCT + comp-print-text: + executorLabel: exec-print-text + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING + comp-print-text-2: + executorLabel: exec-print-text-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING + comp-print-text-3: + executorLabel: exec-print-text-3 + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING + comp-print-text-4: + executorLabel: exec-print-text-4 + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING + comp-print-text-5: + executorLabel: exec-print-text-5 + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING + comp-print-text-6: + executorLabel: exec-print-text-6 + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING + comp-print-text-7: + executorLabel: exec-print-text-7 + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING + comp-print-text-8: + executorLabel: exec-print-text-8 + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING + comp-print-text-9: + executorLabel: exec-print-text-9 + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING +deploymentSpec: + executors: + exec-args-generator-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - args_generator_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef args_generator_op() -> list:\n return [\n {\n \ + \ 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n \ + \ {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n \ + \ },\n ]\n\n" + image: python:3.9 + exec-args-generator-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - args_generator_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef args_generator_op() -> list:\n return [\n {\n \ + \ 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n \ + \ {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n \ + \ },\n ]\n\n" + image: python:3.9 + exec-flip-coin-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - flip_coin_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ + \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ + \ 1) == 0 else 'tails'\n return result\n\n" + image: python:3.9 + exec-print-struct: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_struct + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_struct(struct: dict):\n print(struct)\n\n" + image: python:3.9 + exec-print-text: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 + exec-print-text-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 + exec-print-text-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 + exec-print-text-4: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 + exec-print-text-5: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 + exec-print-text-6: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 + exec-print-text-7: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 + exec-print-text-8: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 + exec-print-text-9: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-loops-and-conditions-multi-layers +root: + dag: + tasks: + args-generator-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-args-generator-op + taskInfo: + name: args-generator-op + condition-1: + componentRef: + name: comp-condition-1 + dependentTasks: + - args-generator-op + - flip-coin-op + inputs: + parameters: + pipelinechannel--args-generator-op-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: args-generator-op + pipelinechannel--flip-coin-op-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: flip-coin-op + pipelinechannel--loop_parameter: + componentInputParameter: loop_parameter + pipelinechannel--msg: + componentInputParameter: msg + taskInfo: + name: condition-1 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--flip-coin-op-Output'] + != 'no-such-result' + flip-coin-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-flip-coin-op + taskInfo: + name: flip-coin-op + for-loop-14: + componentRef: + name: comp-for-loop-14 + inputs: + parameters: + pipelinechannel--loop_parameter: + componentInputParameter: loop_parameter + parameterIterator: + itemInput: pipelinechannel--loop_parameter-loop-item + items: + inputParameter: pipelinechannel--loop_parameter + taskInfo: + name: for-loop-14 + inputDefinitions: + parameters: + loop_parameter: + defaultValue: + - A_a: heads + B_b: + - A + - B + - A_a: tails + B_b: + - X + - Y + - Z + isOptional: true + parameterType: LIST + msg: + defaultValue: hello + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_metadata_fields.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_metadata_fields.py similarity index 98% rename from sdk/python/test_data/pipelines/pipeline_with_metadata_fields.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_metadata_fields.py index a5c465e7fce..3f0412f1b4e 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_metadata_fields.py +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_metadata_fields.py @@ -65,7 +65,6 @@ def dataset_joiner( @dsl.pipeline( - display_name='Concatenation pipeline', description='A pipeline that joins string to in_dataset.', ) def dataset_concatenator( diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_metadata_fields.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_metadata_fields.yaml new file mode 100644 index 00000000000..750f5043b04 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_metadata_fields.yaml @@ -0,0 +1,178 @@ +# PIPELINE DEFINITION +# Name: dataset-concatenator +# Description: A pipeline that joins string to in_dataset. +# Inputs: +# in_dataset: system.Dataset +# string: str +# Outputs: +# Output: system.Dataset +components: + comp-dataset-joiner: + executorLabel: exec-dataset-joiner + inputDefinitions: + artifacts: + dataset_a: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: First dataset. + dataset_b: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: Second dataset. + outputDefinitions: + artifacts: + out_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: The concatenated dataset. + parameters: + Output: + description: The concatenated string. + parameterType: STRING + comp-str-to-dataset: + executorLabel: exec-str-to-dataset + inputDefinitions: + parameters: + string: + description: The string. + parameterType: STRING + outputDefinitions: + artifacts: + dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: The dataset. +deploymentSpec: + executors: + exec-dataset-joiner: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - dataset_joiner + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef dataset_joiner(\n dataset_a: Input[Dataset],\n dataset_b:\ + \ Input[Dataset],\n out_dataset: Output[Dataset],\n) -> str:\n \"\"\ + \"Concatenate dataset_a and dataset_b.\n\n Also returns the concatenated\ + \ string.\n\n Args:\n dataset_a: First dataset.\n dataset_b:\ + \ Second dataset.\n\n Returns:\n out_dataset: The concatenated\ + \ dataset.\n Output: The concatenated string.\n \"\"\"\n with\ + \ open(dataset_a.path) as f:\n content_a = f.read()\n\n with open(dataset_b.path)\ + \ as f:\n content_b = f.read()\n\n concatenated_string = content_a\ + \ + content_b\n with open(out_dataset.path, 'w') as f:\n f.write(concatenated_string)\n\ + \n return concatenated_string\n\n" + image: python:3.9 + exec-str-to-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - str_to_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef str_to_dataset(string: str, dataset: Output[Dataset]):\n \"\ + \"\"Convert string to dataset.\n\n Args:\n string: The string.\n\ + \n Returns:\n dataset: The dataset.\n \"\"\"\n with open(dataset.path,\ + \ 'w') as f:\n f.write(string)\n\n" + image: python:3.9 +pipelineInfo: + description: A pipeline that joins string to in_dataset. + name: dataset-concatenator +root: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: out_dataset + producerSubtask: dataset-joiner + tasks: + dataset-joiner: + cachingOptions: + enableCache: true + componentRef: + name: comp-dataset-joiner + dependentTasks: + - str-to-dataset + inputs: + artifacts: + dataset_a: + taskOutputArtifact: + outputArtifactKey: dataset + producerTask: str-to-dataset + dataset_b: + componentInputArtifact: in_dataset + taskInfo: + name: dataset-joiner + str-to-dataset: + cachingOptions: + enableCache: true + componentRef: + name: comp-str-to-dataset + inputs: + parameters: + string: + componentInputParameter: string + taskInfo: + name: str-to-dataset + inputDefinitions: + artifacts: + in_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: Dataset to which to concatenate string. + parameters: + string: + description: String to concatenate to in_artifact. + parameterType: STRING + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + description: The final concatenated dataset. +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pipeline_with_nested_loops.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_nested_loops.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_nested_loops.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_nested_loops.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_nested_loops.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_nested_loops.yaml new file mode 100644 index 00000000000..16015edebdc --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_nested_loops.yaml @@ -0,0 +1,265 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-nested-loops +# Inputs: +# loop_parameter: list [Default: [{'p_b': 'hello', 'p_a': [{'q_a': '1'}, {'q_a': '2'}]}, {'p_b': 'halo', 'p_a': [{'q_a': '11'}, {'q_a': '22'}]}]] +components: + comp-for-loop-1: + dag: + tasks: + for-loop-2: + componentRef: + name: comp-for-loop-2 + inputs: + parameters: + pipelinechannel--loop_parameter-loop-item: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + pipelinechannel--loop_parameter-loop-item-subvar-p_a: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + parameterExpressionSelector: parseJson(string_value)["p_a"] + parameterIterator: + itemInput: pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item + items: + inputParameter: pipelinechannel--loop_parameter-loop-item-subvar-p_a + taskInfo: + name: for-loop-2 + inputDefinitions: + parameters: + pipelinechannel--loop_parameter: + parameterType: LIST + pipelinechannel--loop_parameter-loop-item: + parameterType: STRING + comp-for-loop-2: + dag: + tasks: + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item + parameterExpressionSelector: parseJson(string_value)["q_a"] + taskInfo: + name: print-op + inputDefinitions: + parameters: + pipelinechannel--loop_parameter-loop-item: + parameterType: STRING + pipelinechannel--loop_parameter-loop-item-subvar-p_a: + parameterType: STRING + pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item: + parameterType: STRING + comp-for-loop-4: + dag: + tasks: + for-loop-6: + componentRef: + name: comp-for-loop-6 + inputs: + parameters: + pipelinechannel--loop-item-param-3: + componentInputParameter: pipelinechannel--loop-item-param-3 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-5 + items: + raw: '["100", "200", "300"]' + taskInfo: + name: for-loop-6 + print-op-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-2 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop-item-param-3 + taskInfo: + name: print-op-2 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-3: + parameterType: STRING + comp-for-loop-6: + dag: + tasks: + print-op-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op-3 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop-item-param-3 + msg2: + componentInputParameter: pipelinechannel--loop-item-param-5 + taskInfo: + name: print-op-3 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-3: + parameterType: STRING + pipelinechannel--loop-item-param-5: + parameterType: STRING + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING + comp-print-op-2: + executorLabel: exec-print-op-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING + comp-print-op-3: + executorLabel: exec-print-op-3 + inputDefinitions: + parameters: + msg: + parameterType: STRING + msg2: + isOptional: true + parameterType: STRING +deploymentSpec: + executors: + exec-print-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 + exec-print-op-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 + exec-print-op-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ + \ {msg}, msg2: {msg2}')\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-nested-loops +root: + dag: + tasks: + for-loop-1: + componentRef: + name: comp-for-loop-1 + inputs: + parameters: + pipelinechannel--loop_parameter: + componentInputParameter: loop_parameter + parameterIterator: + itemInput: pipelinechannel--loop_parameter-loop-item + items: + inputParameter: pipelinechannel--loop_parameter + taskInfo: + name: for-loop-1 + for-loop-4: + componentRef: + name: comp-for-loop-4 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-3 + items: + raw: '["1", "2"]' + taskInfo: + name: for-loop-4 + inputDefinitions: + parameters: + loop_parameter: + defaultValue: + - p_a: + - q_a: '1' + - q_a: '2' + p_b: hello + - p_a: + - q_a: '11' + - q_a: '22' + p_b: halo + isOptional: true + parameterType: LIST +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_only_display_name.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_only_display_name.yaml new file mode 100644 index 00000000000..37664b381be --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_only_display_name.yaml @@ -0,0 +1,29 @@ +# PIPELINE DEFINITION +# Name: echo +components: + comp-echo: + executorLabel: exec-echo +deploymentSpec: + executors: + exec-echo: + container: + args: + - hello world + command: + - echo + image: public.ecr.aws/docker/library/python:3.12 +pipelineInfo: + displayName: echo-displayname + name: echo-name +root: + dag: + tasks: + echo: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo + taskInfo: + name: echo +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_parallelfor_list_artifacts.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_parallelfor_list_artifacts.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_parallelfor_list_artifacts.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_parallelfor_list_artifacts.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_parallelfor_list_artifacts_GH-12033.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_parallelfor_list_artifacts_GH-12033.yaml new file mode 100644 index 00000000000..50abb4878d3 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_parallelfor_list_artifacts_GH-12033.yaml @@ -0,0 +1,420 @@ +# PIPELINE DEFINITION +# Name: pipeline-parallelfor-artifacts +components: + comp-for-loop-1: + dag: + tasks: + print-artifact-name: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-artifact-name + inputs: + artifacts: + artifact: + componentInputArtifact: pipelinechannel--make-artifacts-Output-loop-item + taskInfo: + name: print-artifact-name + inputDefinitions: + artifacts: + pipelinechannel--make-artifacts-Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isArtifactList: true + pipelinechannel--make-artifacts-Output-loop-item: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-for-loop-1-2: + dag: + outputs: + artifacts: + pipelinechannel--make-artifact-Output: + artifactSelectors: + - outputArtifactKey: Output + producerSubtask: make-artifact + tasks: + make-artifact: + cachingOptions: + enableCache: true + componentRef: + name: comp-make-artifact + inputs: + parameters: + data: + componentInputParameter: pipelinechannel--texts-loop-item + taskInfo: + name: make-artifact + inputDefinitions: + parameters: + pipelinechannel--texts: + parameterType: LIST + pipelinechannel--texts-loop-item: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--make-artifact-Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isArtifactList: true + comp-for-loop-1-3: + dag: + outputs: + artifacts: + pipelinechannel--make-dataset-Output: + artifactSelectors: + - outputArtifactKey: Output + producerSubtask: make-dataset + tasks: + make-dataset: + cachingOptions: + enableCache: true + componentRef: + name: comp-make-dataset + inputs: + parameters: + data: + componentInputParameter: pipelinechannel--texts-loop-item + taskInfo: + name: make-dataset + inputDefinitions: + parameters: + pipelinechannel--texts: + parameterType: LIST + pipelinechannel--texts-loop-item: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--make-dataset-Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + comp-for-loop-2: + dag: + tasks: + print-artifact-name-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-artifact-name-2 + inputs: + artifacts: + artifact: + componentInputArtifact: pipelinechannel--make-datasets-Output-loop-item + taskInfo: + name: print-artifact-name-2 + inputDefinitions: + artifacts: + pipelinechannel--make-datasets-Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + pipelinechannel--make-datasets-Output-loop-item: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-make-artifact: + executorLabel: exec-make-artifact + inputDefinitions: + parameters: + data: + parameterType: STRING + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-make-artifacts: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: pipelinechannel--make-artifact-Output + producerSubtask: for-loop-1 + tasks: + for-loop-1: + componentRef: + name: comp-for-loop-1-2 + inputs: + parameters: + pipelinechannel--texts: + componentInputParameter: texts + parameterIterator: + itemInput: pipelinechannel--texts-loop-item + items: + inputParameter: pipelinechannel--texts + taskInfo: + name: for-loop-1 + inputDefinitions: + parameters: + texts: + defaultValue: + - Hello + - ',' + - ' ' + - world! + isOptional: true + parameterType: LIST + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isArtifactList: true + comp-make-dataset: + executorLabel: exec-make-dataset + inputDefinitions: + parameters: + data: + parameterType: STRING + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-make-datasets: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: pipelinechannel--make-dataset-Output + producerSubtask: for-loop-1 + tasks: + for-loop-1: + componentRef: + name: comp-for-loop-1-3 + inputs: + parameters: + pipelinechannel--texts: + componentInputParameter: texts + parameterIterator: + itemInput: pipelinechannel--texts-loop-item + items: + inputParameter: pipelinechannel--texts + taskInfo: + name: for-loop-1 + inputDefinitions: + parameters: + texts: + defaultValue: + - Hello + - ',' + - ' ' + - world! + isOptional: true + parameterType: LIST + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + comp-print-artifact-name: + executorLabel: exec-print-artifact-name + inputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-artifact-name-2: + executorLabel: exec-print-artifact-name-2 + inputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-make-artifact: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - make_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef make_artifact(data: str) -> Artifact:\n artifact = Artifact(uri=dsl.get_uri(),\ + \ metadata={'length': len(data)})\n with open(artifact.path, 'w') as\ + \ f:\n f.write(data)\n return artifact\n\n" + image: python:3.9 + exec-make-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - make_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef make_dataset(data: str) -> Dataset:\n dataset = Dataset(uri=dsl.get_uri(),\ + \ metadata={'length': len(data)})\n with open(dataset.path, 'w') as f:\n\ + \ f.write(data)\n return dataset\n\n" + image: python:3.9 + exec-print-artifact-name: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_artifact_name + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_artifact_name(artifact: Artifact) -> str:\n print(artifact.name)\n\ + \ return artifact.name\n\n" + image: python:3.9 + exec-print-artifact-name-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_artifact_name + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_artifact_name(artifact: Artifact) -> str:\n print(artifact.name)\n\ + \ return artifact.name\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-parallelfor-artifacts +root: + dag: + tasks: + for-loop-1: + artifactIterator: + itemInput: pipelinechannel--make-artifacts-Output-loop-item + items: + inputArtifact: pipelinechannel--make-artifacts-Output + componentRef: + name: comp-for-loop-1 + dependentTasks: + - make-artifacts + inputs: + artifacts: + pipelinechannel--make-artifacts-Output: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: make-artifacts + taskInfo: + name: for-loop-1 + for-loop-2: + artifactIterator: + itemInput: pipelinechannel--make-datasets-Output-loop-item + items: + inputArtifact: pipelinechannel--make-datasets-Output + componentRef: + name: comp-for-loop-2 + dependentTasks: + - make-datasets + inputs: + artifacts: + pipelinechannel--make-datasets-Output: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: make-datasets + taskInfo: + name: for-loop-2 + make-artifacts: + cachingOptions: + enableCache: true + componentRef: + name: comp-make-artifacts + taskInfo: + name: make-artifacts + make-datasets: + cachingOptions: + enableCache: true + componentRef: + name: comp-make-datasets + taskInfo: + name: make-datasets +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_parallelfor_parallelism.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_parallelfor_parallelism.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_parallelfor_parallelism.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_parallelfor_parallelism.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_parallelfor_parallelism.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_parallelfor_parallelism.yaml new file mode 100644 index 00000000000..7c8542ea783 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_parallelfor_parallelism.yaml @@ -0,0 +1,1017 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-loops +# Inputs: +# loop_parameter: list +components: + comp-for-loop-1: + dag: + tasks: + for-loop-2: + componentRef: + name: comp-for-loop-2 + inputs: + parameters: + pipelinechannel--loop_parameter: + componentInputParameter: pipelinechannel--loop_parameter + parameterIterator: + itemInput: pipelinechannel--loop_parameter-loop-item + items: + inputParameter: pipelinechannel--loop_parameter + taskInfo: + name: for-loop-2 + print-text: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + taskInfo: + name: print-text + inputDefinitions: + parameters: + pipelinechannel--loop_parameter: + parameterType: LIST + pipelinechannel--loop_parameter-loop-item: + parameterType: STRING + comp-for-loop-10: + dag: + tasks: + print-int-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-int-3 + inputs: + parameters: + x: + componentInputParameter: pipelinechannel--list-dict-maker-1-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["a"] + taskInfo: + name: print-int-3 + inputDefinitions: + parameters: + pipelinechannel--list-dict-maker-1-Output: + parameterType: LIST + pipelinechannel--list-dict-maker-1-Output-loop-item: + parameterType: STRUCT + comp-for-loop-11: + dag: + tasks: + print-int-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-int-4 + inputs: + parameters: + x: + componentInputParameter: pipelinechannel--list-dict-maker-2-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["a"] + taskInfo: + name: print-int-4 + inputDefinitions: + parameters: + pipelinechannel--list-dict-maker-2-Output: + parameterType: LIST + pipelinechannel--list-dict-maker-2-Output-loop-item: + parameterType: STRUCT + comp-for-loop-12: + dag: + tasks: + print-int-5: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-int-5 + inputs: + parameters: + x: + componentInputParameter: pipelinechannel--list-dict-maker-3-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["a"] + taskInfo: + name: print-int-5 + inputDefinitions: + parameters: + pipelinechannel--list-dict-maker-3-Output: + parameterType: LIST + pipelinechannel--list-dict-maker-3-Output-loop-item: + parameterType: STRING + comp-for-loop-13: + dag: + tasks: + print-int-6: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-int-6 + inputs: + parameters: + x: + componentInputParameter: pipelinechannel--list-dict-maker-1-2-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["a"] + taskInfo: + name: print-int-6 + inputDefinitions: + parameters: + pipelinechannel--list-dict-maker-1-2-Output: + parameterType: LIST + pipelinechannel--list-dict-maker-1-2-Output-loop-item: + parameterType: STRING + comp-for-loop-2: + dag: + tasks: + print-text-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-2 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop_parameter-loop-item + taskInfo: + name: print-text-2 + inputDefinitions: + parameters: + pipelinechannel--loop_parameter: + parameterType: LIST + pipelinechannel--loop_parameter-loop-item: + parameterType: STRING + comp-for-loop-4: + dag: + tasks: + for-loop-6: + componentRef: + name: comp-for-loop-6 + iteratorPolicy: + parallelismLimit: 1 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-5 + items: + raw: '[{"A_a": "10", "B_b": "20"}, {"A_a": "100", "B_b": "200"}]' + taskInfo: + name: for-loop-6 + print-text-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-3 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop-item-param-3 + parameterExpressionSelector: parseJson(string_value)["A_a"] + taskInfo: + name: print-text-3 + print-text-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-4 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop-item-param-3 + parameterExpressionSelector: parseJson(string_value)["B_b"] + taskInfo: + name: print-text-4 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-3: + parameterType: STRUCT + comp-for-loop-6: + dag: + tasks: + print-text-5: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-5 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop-item-param-5 + parameterExpressionSelector: parseJson(string_value)["A_a"] + taskInfo: + name: print-text-5 + print-text-6: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-text-6 + inputs: + parameters: + msg: + componentInputParameter: pipelinechannel--loop-item-param-5 + parameterExpressionSelector: parseJson(string_value)["B_b"] + taskInfo: + name: print-text-6 + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-5: + parameterType: STRUCT + comp-for-loop-8: + dag: + tasks: + print-int: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-int + inputs: + parameters: + x: + componentInputParameter: pipelinechannel--loop-item-param-7 + parameterExpressionSelector: parseJson(string_value)["a"] + taskInfo: + name: print-int + inputDefinitions: + parameters: + pipelinechannel--loop-item-param-7: + parameterType: STRUCT + comp-for-loop-9: + dag: + tasks: + print-int-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-int-2 + inputs: + parameters: + x: + componentInputParameter: pipelinechannel--list-dict-maker-0-Output-loop-item + parameterExpressionSelector: parseJson(string_value)["a"] + taskInfo: + name: print-int-2 + inputDefinitions: + parameters: + pipelinechannel--list-dict-maker-0-Output: + parameterType: LIST + pipelinechannel--list-dict-maker-0-Output-loop-item: + parameterType: STRUCT + comp-list-dict-maker-0: + executorLabel: exec-list-dict-maker-0 + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-list-dict-maker-1: + executorLabel: exec-list-dict-maker-1 + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-list-dict-maker-1-2: + executorLabel: exec-list-dict-maker-1-2 + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-list-dict-maker-2: + executorLabel: exec-list-dict-maker-2 + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-list-dict-maker-3: + executorLabel: exec-list-dict-maker-3 + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-print-int: + executorLabel: exec-print-int + inputDefinitions: + parameters: + x: + parameterType: NUMBER_INTEGER + comp-print-int-2: + executorLabel: exec-print-int-2 + inputDefinitions: + parameters: + x: + parameterType: NUMBER_INTEGER + comp-print-int-3: + executorLabel: exec-print-int-3 + inputDefinitions: + parameters: + x: + parameterType: NUMBER_INTEGER + comp-print-int-4: + executorLabel: exec-print-int-4 + inputDefinitions: + parameters: + x: + parameterType: NUMBER_INTEGER + comp-print-int-5: + executorLabel: exec-print-int-5 + inputDefinitions: + parameters: + x: + parameterType: NUMBER_INTEGER + comp-print-int-6: + executorLabel: exec-print-int-6 + inputDefinitions: + parameters: + x: + parameterType: NUMBER_INTEGER + comp-print-text: + executorLabel: exec-print-text + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-text-2: + executorLabel: exec-print-text-2 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-text-3: + executorLabel: exec-print-text-3 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-text-4: + executorLabel: exec-print-text-4 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-text-5: + executorLabel: exec-print-text-5 + inputDefinitions: + parameters: + msg: + parameterType: STRING + comp-print-text-6: + executorLabel: exec-print-text-6 + inputDefinitions: + parameters: + msg: + parameterType: STRING +deploymentSpec: + executors: + exec-list-dict-maker-0: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - list_dict_maker_0 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef list_dict_maker_0() -> List[Dict[str, int]]:\n \"\"\"Enforces\ + \ strict type checking - returns a list of dictionaries \n where keys\ + \ are strings and values are integers. For testing type \n handling during\ + \ compilation.\"\"\"\n return [{'a': 1, 'b': 2}, {'a': 2, 'b': 3}, {'a':\ + \ 3, 'b': 4}]\n\n" + image: python:3.9 + exec-list-dict-maker-1: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - list_dict_maker_1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef list_dict_maker_1() -> List[Dict]:\n \"\"\"Utilizes generic\ + \ dictionary typing (no enforcement of specific key or\n value types).\n\ + \n Tests flexibility in type handling.\n \"\"\"\n return [{'a':\ + \ 1, 'b': 2}, {'a': 2, 'b': 3}, {'a': 3, 'b': 4}]\n\n" + image: python:3.9 + exec-list-dict-maker-1-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - list_dict_maker_1 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef list_dict_maker_1() -> List[Dict]:\n \"\"\"Utilizes generic\ + \ dictionary typing (no enforcement of specific key or\n value types).\n\ + \n Tests flexibility in type handling.\n \"\"\"\n return [{'a':\ + \ 1, 'b': 2}, {'a': 2, 'b': 3}, {'a': 3, 'b': 4}]\n\n" + image: python:3.9 + exec-list-dict-maker-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - list_dict_maker_2 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef list_dict_maker_2() -> List[dict]:\n \"\"\"Returns a list\ + \ of dictionaries without type enforcement.\n\n Tests flexibility in\ + \ type handling.\n \"\"\"\n return [{'a': 1, 'b': 2}, {'a': 2, 'b':\ + \ 3}, {'a': 3, 'b': 4}]\n\n" + image: python:3.9 + exec-list-dict-maker-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - list_dict_maker_3 + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef list_dict_maker_3() -> List:\n \"\"\"Returns a basic list\ + \ (no typing or structure guarantees).\n\n Tests the limits of compiler\ + \ type handling.\n \"\"\"\n return [{'a': 1, 'b': 2}, {'a': 2, 'b':\ + \ 3}, {'a': 3, 'b': 4}]\n\n" + image: python:3.9 + exec-print-int: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_int(x: int):\n print(x)\n\n" + image: python:3.9 + exec-print-int-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_int(x: int):\n print(x)\n\n" + image: python:3.9 + exec-print-int-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_int(x: int):\n print(x)\n\n" + image: python:3.9 + exec-print-int-4: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_int(x: int):\n print(x)\n\n" + image: python:3.9 + exec-print-int-5: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_int(x: int):\n print(x)\n\n" + image: python:3.9 + exec-print-int-6: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_int(x: int):\n print(x)\n\n" + image: python:3.9 + exec-print-text: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 + exec-print-text-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 + exec-print-text-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 + exec-print-text-4: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 + exec-print-text-5: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 + exec-print-text-6: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_text + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-loops +root: + dag: + tasks: + for-loop-1: + componentRef: + name: comp-for-loop-1 + inputs: + parameters: + pipelinechannel--loop_parameter: + componentInputParameter: loop_parameter + iteratorPolicy: + parallelismLimit: 2 + parameterIterator: + itemInput: pipelinechannel--loop_parameter-loop-item + items: + inputParameter: pipelinechannel--loop_parameter + taskInfo: + name: for-loop-1 + for-loop-10: + componentRef: + name: comp-for-loop-10 + dependentTasks: + - list-dict-maker-1 + inputs: + parameters: + pipelinechannel--list-dict-maker-1-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: list-dict-maker-1 + parameterIterator: + itemInput: pipelinechannel--list-dict-maker-1-Output-loop-item + items: + inputParameter: pipelinechannel--list-dict-maker-1-Output + taskInfo: + name: for-loop-10 + for-loop-11: + componentRef: + name: comp-for-loop-11 + dependentTasks: + - list-dict-maker-2 + inputs: + parameters: + pipelinechannel--list-dict-maker-2-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: list-dict-maker-2 + parameterIterator: + itemInput: pipelinechannel--list-dict-maker-2-Output-loop-item + items: + inputParameter: pipelinechannel--list-dict-maker-2-Output + taskInfo: + name: for-loop-11 + for-loop-12: + componentRef: + name: comp-for-loop-12 + dependentTasks: + - list-dict-maker-3 + inputs: + parameters: + pipelinechannel--list-dict-maker-3-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: list-dict-maker-3 + parameterIterator: + itemInput: pipelinechannel--list-dict-maker-3-Output-loop-item + items: + inputParameter: pipelinechannel--list-dict-maker-3-Output + taskInfo: + name: for-loop-12 + for-loop-13: + componentRef: + name: comp-for-loop-13 + dependentTasks: + - list-dict-maker-1-2 + inputs: + parameters: + pipelinechannel--list-dict-maker-1-2-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: list-dict-maker-1-2 + parameterIterator: + itemInput: pipelinechannel--list-dict-maker-1-2-Output-loop-item + items: + inputParameter: pipelinechannel--list-dict-maker-1-2-Output + taskInfo: + name: for-loop-13 + for-loop-4: + componentRef: + name: comp-for-loop-4 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-3 + items: + raw: '[{"A_a": "1", "B_b": "2"}, {"A_a": "10", "B_b": "20"}]' + taskInfo: + name: for-loop-4 + for-loop-8: + componentRef: + name: comp-for-loop-8 + iteratorPolicy: + parallelismLimit: 1 + parameterIterator: + itemInput: pipelinechannel--loop-item-param-7 + items: + raw: '[{"a": 1, "b": 2}, {"a": 2, "b": 3}, {"a": 3, "b": 4}]' + taskInfo: + name: for-loop-8 + for-loop-9: + componentRef: + name: comp-for-loop-9 + dependentTasks: + - list-dict-maker-0 + inputs: + parameters: + pipelinechannel--list-dict-maker-0-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: list-dict-maker-0 + parameterIterator: + itemInput: pipelinechannel--list-dict-maker-0-Output-loop-item + items: + inputParameter: pipelinechannel--list-dict-maker-0-Output + taskInfo: + name: for-loop-9 + list-dict-maker-0: + cachingOptions: + enableCache: true + componentRef: + name: comp-list-dict-maker-0 + taskInfo: + name: list-dict-maker-0 + list-dict-maker-1: + cachingOptions: + enableCache: true + componentRef: + name: comp-list-dict-maker-1 + taskInfo: + name: list-dict-maker-1 + list-dict-maker-1-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-list-dict-maker-1-2 + taskInfo: + name: list-dict-maker-1-2 + list-dict-maker-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-list-dict-maker-2 + taskInfo: + name: list-dict-maker-2 + list-dict-maker-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-list-dict-maker-3 + taskInfo: + name: list-dict-maker-3 + inputDefinitions: + parameters: + loop_parameter: + parameterType: LIST +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pipeline_with_retry.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_retry.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_retry.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_retry.py diff --git a/samples/v2/pipeline_with_secret_as_volume.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_secret_as_volume.py similarity index 100% rename from samples/v2/pipeline_with_secret_as_volume.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_secret_as_volume.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_secret_as_volume.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_secret_as_volume.yaml new file mode 100644 index 00000000000..bbb28c08124 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_secret_as_volume.yaml @@ -0,0 +1,74 @@ +# PIPELINE DEFINITION +# Name: pipeline-secret-volume +# Inputs: +# secret_param: str [Default: 'test-secret-1'] +components: + comp-comp: + executorLabel: exec-comp +deploymentSpec: + executors: + exec-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef comp():\n import os\n import sys\n username_path = os.path.join('/mnt/my_vol',\ + \ \"username\")\n\n # Check if the secret exists\n if not os.path.exists(username_path):\n\ + \ raise Exception('Secret not found')\n\n # Open the secret\n\ + \ with open(username_path, 'rb') as secret_file:\n username =\ + \ secret_file.read()\n\n # Decode the secret\n username = username.decode('utf-8')\n\ + \n # Print the secret\n print(f\"username: {username}\")\n assert\ + \ username == \"user1\"\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-secret-volume +root: + dag: + tasks: + comp: + cachingOptions: + enableCache: true + componentRef: + name: comp-comp + taskInfo: + name: comp + inputDefinitions: + parameters: + secret_param: + defaultValue: test-secret-1 + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 +--- +platforms: + kubernetes: + deploymentSpec: + executors: + exec-comp: + secretAsVolume: + - mountPath: /mnt/my_vol + optional: false + secretNameParameter: + componentInputParameter: secret_param \ No newline at end of file diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_semaphore.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_semaphore.yaml new file mode 100644 index 00000000000..298029d7b43 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_semaphore.yaml @@ -0,0 +1,54 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-semaphore +components: + comp-comp: + executorLabel: exec-comp +deploymentSpec: + executors: + exec-comp: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - comp + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef comp():\n pass\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-semaphore +root: + dag: + tasks: + comp: + cachingOptions: + enableCache: true + componentRef: + name: comp-comp + taskInfo: + name: comp +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 +--- +platforms: + kubernetes: + pipelineConfig: + semaphoreKey: semaphore diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_semphore.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_semphore.py new file mode 100644 index 00000000000..dac4cba751c --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_semphore.py @@ -0,0 +1,19 @@ +from kfp import dsl, compiler +from kfp.dsl import PipelineConfig + +config = PipelineConfig() +config.semaphore_key = 'semaphore' + +@dsl.component +def comp(): + pass + +@dsl.pipeline(pipeline_config=config) +def pipeline_with_semaphore(): + task = comp() + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=pipeline_with_semaphore, + package_path=__file__.replace('.py', '.yaml')) \ No newline at end of file diff --git a/sdk/python/test_data/pipelines/pipeline_with_string_machine_fields_pipeline_input.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_string_machine_fields_pipeline_input.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_string_machine_fields_pipeline_input.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_string_machine_fields_pipeline_input.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_string_machine_fields_pipeline_input.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_string_machine_fields_pipeline_input.yaml new file mode 100644 index 00000000000..2e01f7bea38 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_string_machine_fields_pipeline_input.yaml @@ -0,0 +1,116 @@ +# PIPELINE DEFINITION +# Name: pipeline +# Inputs: +# accelerator_limit: str [Default: '1'] +# accelerator_type: str [Default: 'NVIDIA_TESLA_P4'] +# cpu_limit: str [Default: '4000m'] +# memory_limit: str [Default: '15G'] +components: + comp-sum-numbers: + executorLabel: exec-sum-numbers + inputDefinitions: + parameters: + a: + parameterType: NUMBER_INTEGER + b: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +deploymentSpec: + executors: + exec-sum-numbers: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - sum_numbers + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef sum_numbers(a: int, b: int) -> int:\n return a + b\n\n" + image: python:3.9 + resources: + accelerator: + resourceCount: '{{$.inputs.parameters[''pipelinechannel--accelerator_limit'']}}' + resourceType: '{{$.inputs.parameters[''pipelinechannel--accelerator_type'']}}' + resourceCpuLimit: '{{$.inputs.parameters[''pipelinechannel--cpu_limit'']}}' + resourceMemoryLimit: '{{$.inputs.parameters[''pipelinechannel--memory_limit'']}}' +pipelineInfo: + name: pipeline +root: + dag: + tasks: + sum-numbers: + cachingOptions: + enableCache: true + componentRef: + name: comp-sum-numbers + inputs: + parameters: + a: + runtimeValue: + constant: 1.0 + accelerator_count: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--accelerator_limit'']}}' + accelerator_type: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--accelerator_type'']}}' + b: + runtimeValue: + constant: 2.0 + cpu_limit: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--cpu_limit'']}}' + memory_limit: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--memory_limit'']}}' + pipelinechannel--accelerator_limit: + componentInputParameter: accelerator_limit + pipelinechannel--accelerator_type: + componentInputParameter: accelerator_type + pipelinechannel--cpu_limit: + componentInputParameter: cpu_limit + pipelinechannel--memory_limit: + componentInputParameter: memory_limit + taskInfo: + name: sum-numbers + inputDefinitions: + parameters: + accelerator_limit: + defaultValue: '1' + isOptional: true + parameterType: STRING + accelerator_type: + defaultValue: NVIDIA_TESLA_P4 + isOptional: true + parameterType: STRING + cpu_limit: + defaultValue: 4000m + isOptional: true + parameterType: STRING + memory_limit: + defaultValue: 15G + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pipeline_with_string_machine_fields_task_output.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_string_machine_fields_task_output.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_string_machine_fields_task_output.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_string_machine_fields_task_output.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_string_machine_fields_task_output.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_string_machine_fields_task_output.yaml new file mode 100644 index 00000000000..eb062b2ed76 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_string_machine_fields_task_output.yaml @@ -0,0 +1,265 @@ +# PIPELINE DEFINITION +# Name: pipeline +components: + comp-accelerator-limit: + executorLabel: exec-accelerator-limit + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-accelerator-type: + executorLabel: exec-accelerator-type + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-cpu-limit: + executorLabel: exec-cpu-limit + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-memory-limit: + executorLabel: exec-memory-limit + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-sum-numbers: + executorLabel: exec-sum-numbers + inputDefinitions: + parameters: + a: + parameterType: NUMBER_INTEGER + b: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +deploymentSpec: + executors: + exec-accelerator-limit: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - accelerator_limit + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef accelerator_limit() -> str:\n return '1'\n\n" + image: python:3.9 + exec-accelerator-type: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - accelerator_type + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef accelerator_type() -> str:\n return 'NVIDIA_TESLA_P4'\n\n" + image: python:3.9 + exec-cpu-limit: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - cpu_limit + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef cpu_limit() -> str:\n return '4000m'\n\n" + image: python:3.9 + exec-memory-limit: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - memory_limit + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef memory_limit() -> str:\n return '15G'\n\n" + image: python:3.9 + exec-sum-numbers: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - sum_numbers + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef sum_numbers(a: int, b: int) -> int:\n return a + b\n\n" + image: python:3.9 + resources: + accelerator: + resourceCount: '{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}' + resourceType: '{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}' + resourceCpuLimit: '{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}' + resourceMemoryLimit: '{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}' +pipelineInfo: + name: pipeline +root: + dag: + tasks: + accelerator-limit: + cachingOptions: + enableCache: true + componentRef: + name: comp-accelerator-limit + taskInfo: + name: accelerator-limit + accelerator-type: + cachingOptions: + enableCache: true + componentRef: + name: comp-accelerator-type + taskInfo: + name: accelerator-type + cpu-limit: + cachingOptions: + enableCache: true + componentRef: + name: comp-cpu-limit + taskInfo: + name: cpu-limit + memory-limit: + cachingOptions: + enableCache: true + componentRef: + name: comp-memory-limit + taskInfo: + name: memory-limit + sum-numbers: + cachingOptions: + enableCache: true + componentRef: + name: comp-sum-numbers + inputs: + parameters: + a: + runtimeValue: + constant: 1.0 + accelerator_count: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}' + accelerator_type: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}' + b: + runtimeValue: + constant: 2.0 + cpu_limit: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}' + memory_limit: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}' + pipelinechannel--accelerator-limit-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: accelerator-limit + pipelinechannel--accelerator-type-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: accelerator-type + pipelinechannel--cpu-limit-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: cpu-limit + pipelinechannel--memory-limit-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: memory-limit + taskInfo: + name: sum-numbers +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pipeline_with_task_final_status.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_task_final_status.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_task_final_status.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_task_final_status.py diff --git a/sdk/runtime_tests/test_data/pipeline_with_task_final_status.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_task_final_status.yaml similarity index 100% rename from sdk/runtime_tests/test_data/pipeline_with_task_final_status.yaml rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_task_final_status.yaml diff --git a/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_task_final_status_yaml.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_task_final_status_yaml.py diff --git a/sdk/python/test_data/pipelines/pipeline_with_task_using_ignore_upstream_failure.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_task_using_ignore_upstream_failure.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_task_using_ignore_upstream_failure.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_task_using_ignore_upstream_failure.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_task_using_ignore_upstream_failure.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_task_using_ignore_upstream_failure.yaml new file mode 100644 index 00000000000..3f8294172b3 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_task_using_ignore_upstream_failure.yaml @@ -0,0 +1,124 @@ +# PIPELINE DEFINITION +# Name: my-pipeline +# Inputs: +# sample_input: str [Default: 'message'] +components: + comp-fail-op: + executorLabel: exec-fail-op + inputDefinitions: + parameters: + message: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-print-op: + executorLabel: exec-print-op + inputDefinitions: + parameters: + message: + defaultValue: default + isOptional: true + parameterType: STRING +deploymentSpec: + executors: + exec-fail-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - fail_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef fail_op(message: str) -> str:\n \"\"\"Fails.\"\"\"\n import\ + \ sys\n print(message)\n sys.exit(1)\n return message\n\n" + image: python:3.9 + exec-print-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - print_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef print_op(message: str = 'default'):\n \"\"\"Prints a message.\"\ + \"\"\n print(message)\n\n" + image: python:3.9 +pipelineInfo: + name: my-pipeline +root: + dag: + tasks: + fail-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-fail-op + inputs: + parameters: + message: + componentInputParameter: sample_input + taskInfo: + name: fail-op + print-op: + cachingOptions: + enableCache: true + componentRef: + name: comp-print-op + dependentTasks: + - fail-op + inputs: + parameters: + message: + taskOutputParameter: + outputParameterKey: Output + producerTask: fail-op + taskInfo: + name: print-op + triggerPolicy: + strategy: ALL_UPSTREAM_TASKS_COMPLETED + inputDefinitions: + parameters: + sample_input: + defaultValue: message + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_utils.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_utils.py new file mode 100644 index 00000000000..211e9bf8356 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_utils.py @@ -0,0 +1,42 @@ +# Copyright 2025 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from kfp import compiler, dsl + + +def util_func(msg: str) -> str: + return f"{msg} from util_func" + + +def util_func2(msg: str) -> str: + return f"{msg} from util_func2" + + +@dsl.component( + additional_funcs=[util_func, util_func2]) +def echo(msg: str): + assert util_func(msg) == f"{msg} from util_func" + assert util_func2(msg) == f"{msg} from util_func2" + + +@dsl.pipeline( + name="pipeline-with-utils", description="A simple hello world pipeline") +def pipeline_with_utils(msg: str = "Hello, World!"): + echo(msg=msg) + + +if __name__ == "__main__": + compiler.Compiler().compile( + pipeline_func=pipeline_with_utils, + package_path=__file__.replace(".py", ".yaml"), + ) diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_utils.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_utils.yaml new file mode 100644 index 00000000000..2b56e016542 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_utils.yaml @@ -0,0 +1,70 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-utils +# Description: A simple hello world pipeline +# Inputs: +# msg: str [Default: 'Hello, World!'] +components: + comp-echo: + executorLabel: exec-echo + inputDefinitions: + parameters: + msg: + parameterType: STRING +deploymentSpec: + executors: + exec-echo: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - echo + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef util_func(msg: str) -> str:\n return f\"{msg} from util_func\"\ + \n\n\ndef util_func2(msg: str) -> str:\n return f\"{msg} from util_func2\"\ + \n\n\ndef echo(msg: str):\n assert util_func(msg) == f\"{msg} from util_func\"\ + \n assert util_func2(msg) == f\"{msg} from util_func2\"\n\n" + image: python:3.9 +pipelineInfo: + description: A simple hello world pipeline + name: pipeline-with-utils +root: + dag: + tasks: + echo: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo + inputs: + parameters: + msg: + componentInputParameter: msg + taskInfo: + name: echo + inputDefinitions: + parameters: + msg: + defaultValue: Hello, World! + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pipeline_with_various_io_types.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_various_io_types.py similarity index 100% rename from sdk/python/test_data/pipelines/pipeline_with_various_io_types.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_various_io_types.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_various_io_types.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_various_io_types.yaml new file mode 100644 index 00000000000..728a44e43f2 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_various_io_types.yaml @@ -0,0 +1,215 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-various-types +# Inputs: +# input1: str +# input3: system.Artifact +# input4: str [Default: ''] +components: + comp-downstream: + executorLabel: exec-downstream + inputDefinitions: + artifacts: + input_b: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + input_c: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + input_d: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + input_e: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + input_f: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + input_g: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + input_h: + artifactType: + schemaTitle: system.HTML + schemaVersion: 0.0.1 + input_i: + artifactType: + schemaTitle: google.BQMLModel + schemaVersion: 0.0.1 + parameters: + input_a: + parameterType: NUMBER_INTEGER + comp-upstream: + executorLabel: exec-upstream + inputDefinitions: + artifacts: + input_3: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + input_1: + parameterType: STRING + input_2: + parameterType: NUMBER_DOUBLE + input_4: + parameterType: STRING + outputDefinitions: + artifacts: + output_2: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + output_3: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + output_4: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 + output_5: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + output_6: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + output_7: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + output_8: + artifactType: + schemaTitle: system.HTML + schemaVersion: 0.0.1 + output_9: + artifactType: + schemaTitle: google.BQMLModel + schemaVersion: 0.0.1 + parameters: + output_1: + parameterType: NUMBER_INTEGER +deploymentSpec: + executors: + exec-downstream: + container: + args: + - '{{$.inputs.parameters[''input_a'']}}' + - '{{$.inputs.artifacts[''input_b''].uri}}' + - '{{$.inputs.artifacts[''input_c''].path}}' + - '{{$.inputs.artifacts[''input_d''].uri}}' + - '{{$.inputs.artifacts[''input_e''].uri}}' + - '{{$.inputs.artifacts[''input_f''].path}}' + - '{{$.inputs.artifacts[''input_g''].path}}' + - '{{$.inputs.artifacts[''input_h''].path}}' + image: gcr.io/image + exec-upstream: + container: + args: + - '{{$.inputs.parameters[''input_1'']}}' + - '{{$.inputs.parameters[''input_2'']}}' + - '{{$.inputs.artifacts[''input_3''].path}}' + - '{{$.inputs.parameters[''input_4'']}}' + - '{{$.outputs.parameters[''output_1''].output_file}}' + - '{{$.outputs.artifacts[''output_2''].uri}}' + - '{{$.outputs.artifacts[''output_3''].path}}' + - '{{$.outputs.artifacts[''output_4''].uri}}' + - '{{$.outputs.artifacts[''output_5''].uri}}' + - '{{$.outputs.artifacts[''output_6''].path}}' + - '{{$.outputs.artifacts[''output_7''].path}}' + - '{{$.outputs.artifacts[''output_8''].path}}' + image: gcr.io/image +pipelineInfo: + name: pipeline-with-various-types +root: + dag: + tasks: + downstream: + cachingOptions: + enableCache: true + componentRef: + name: comp-downstream + dependentTasks: + - upstream + inputs: + artifacts: + input_b: + taskOutputArtifact: + outputArtifactKey: output_2 + producerTask: upstream + input_c: + taskOutputArtifact: + outputArtifactKey: output_3 + producerTask: upstream + input_d: + taskOutputArtifact: + outputArtifactKey: output_4 + producerTask: upstream + input_e: + taskOutputArtifact: + outputArtifactKey: output_5 + producerTask: upstream + input_f: + taskOutputArtifact: + outputArtifactKey: output_6 + producerTask: upstream + input_g: + taskOutputArtifact: + outputArtifactKey: output_7 + producerTask: upstream + input_h: + taskOutputArtifact: + outputArtifactKey: output_8 + producerTask: upstream + input_i: + taskOutputArtifact: + outputArtifactKey: output_9 + producerTask: upstream + parameters: + input_a: + taskOutputParameter: + outputParameterKey: output_1 + producerTask: upstream + taskInfo: + name: downstream + upstream: + cachingOptions: + enableCache: true + componentRef: + name: comp-upstream + inputs: + artifacts: + input_3: + componentInputArtifact: input3 + parameters: + input_1: + componentInputParameter: input1 + input_2: + runtimeValue: + constant: 3.1415926 + input_4: + componentInputParameter: input4 + taskInfo: + name: upstream + inputDefinitions: + artifacts: + input3: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + input1: + parameterType: STRING + input4: + defaultValue: '' + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/samples/v2/pipeline_with_volume.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_volume.py similarity index 100% rename from samples/v2/pipeline_with_volume.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_volume.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_volume.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_volume.yaml new file mode 100644 index 00000000000..d7b63f2f86b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_volume.yaml @@ -0,0 +1,242 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-volume +components: + comp-consumer: + executorLabel: exec-consumer + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-createpvc: + executorLabel: exec-createpvc + inputDefinitions: + parameters: + access_modes: + description: 'AccessModes to request for the provisioned PVC. May + + be one or more of ``''ReadWriteOnce''``, ``''ReadOnlyMany''``, ``''ReadWriteMany''``, + or + + ``''ReadWriteOncePod''``. Corresponds to `PersistentVolumeClaim.spec.accessModes + `_.' + parameterType: LIST + annotations: + description: Annotations for the PVC's metadata. Corresponds to `PersistentVolumeClaim.metadata.annotations + `_. + isOptional: true + parameterType: STRUCT + pvc_name: + description: 'Name of the PVC. Corresponds to `PersistentVolumeClaim.metadata.name + `_. + Only one of ``pvc_name`` and ``pvc_name_suffix`` can + + be provided.' + isOptional: true + parameterType: STRING + pvc_name_suffix: + description: 'Prefix to use for a dynamically generated name, which + + will take the form ``-``. Only one + + of ``pvc_name`` and ``pvc_name_suffix`` can be provided.' + isOptional: true + parameterType: STRING + size: + description: The size of storage requested by the PVC that will be provisioned. + For example, ``'5Gi'``. Corresponds to `PersistentVolumeClaim.spec.resources.requests.storage + `_. + parameterType: STRING + storage_class_name: + defaultValue: '' + description: 'Name of StorageClass from which to provision the PV + + to back the PVC. ``None`` indicates to use the cluster''s default + + storage_class_name. Set to ``''''`` for a statically specified PVC.' + isOptional: true + parameterType: STRING + volume_name: + description: 'Pre-existing PersistentVolume that should back the + + provisioned PersistentVolumeClaim. Used for statically + + specified PV only. Corresponds to `PersistentVolumeClaim.spec.volumeName + `_.' + isOptional: true + parameterType: STRING + outputDefinitions: + parameters: + name: + parameterType: STRING + comp-deletepvc: + executorLabel: exec-deletepvc + inputDefinitions: + parameters: + pvc_name: + description: Name of the PVC to delete. Supports passing a runtime-generated + name, such as a name provided by ``kubernetes.CreatePvcOp().outputs['name']``. + parameterType: STRING + comp-producer: + executorLabel: exec-producer + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-consumer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - consumer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef consumer() -> str:\n with open('/data/file.txt', 'r') as file:\n\ + \ content = file.read()\n print(content)\n return content\n\ + \n" + image: python:3.9 + exec-createpvc: + container: + image: argostub/createpvc + exec-deletepvc: + container: + image: argostub/deletepvc + exec-producer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - producer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef producer() -> str:\n with open('/data/file.txt', 'w') as file:\n\ + \ file.write('Hello world')\n with open('/data/file.txt', 'r')\ + \ as file:\n content = file.read()\n print(content)\n return\ + \ content\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-volume +root: + dag: + tasks: + consumer: + cachingOptions: + enableCache: true + componentRef: + name: comp-consumer + dependentTasks: + - createpvc + - producer + taskInfo: + name: consumer + createpvc: + cachingOptions: + enableCache: true + componentRef: + name: comp-createpvc + inputs: + parameters: + access_modes: + runtimeValue: + constant: + - ReadWriteOnce + pvc_name_suffix: + runtimeValue: + constant: -my-pvc + size: + runtimeValue: + constant: 5Mi + storage_class_name: + runtimeValue: + constant: standard + taskInfo: + name: createpvc + deletepvc: + cachingOptions: + enableCache: true + componentRef: + name: comp-deletepvc + dependentTasks: + - consumer + - createpvc + inputs: + parameters: + pvc_name: + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + taskInfo: + name: deletepvc + producer: + cachingOptions: + enableCache: true + componentRef: + name: comp-producer + dependentTasks: + - createpvc + taskInfo: + name: producer +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 +--- +platforms: + kubernetes: + deploymentSpec: + executors: + exec-consumer: + pvcMount: + - mountPath: /data + pvcNameParameter: + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + exec-producer: + pvcMount: + - mountPath: /data + pvcNameParameter: + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc diff --git a/samples/v2/pipeline_with_volume_no_cache.py b/test_data/sdk_compiled_pipelines/valid/pipeline_with_volume_no_cache.py similarity index 100% rename from samples/v2/pipeline_with_volume_no_cache.py rename to test_data/sdk_compiled_pipelines/valid/pipeline_with_volume_no_cache.py diff --git a/test_data/sdk_compiled_pipelines/valid/pipeline_with_volume_no_cache.yaml b/test_data/sdk_compiled_pipelines/valid/pipeline_with_volume_no_cache.yaml new file mode 100644 index 00000000000..3496c4ca9a4 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pipeline_with_volume_no_cache.yaml @@ -0,0 +1,240 @@ +# PIPELINE DEFINITION +# Name: pipeline-with-volume-no-cache +components: + comp-consumer: + executorLabel: exec-consumer + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-createpvc: + executorLabel: exec-createpvc + inputDefinitions: + parameters: + access_modes: + description: 'AccessModes to request for the provisioned PVC. May + + be one or more of ``''ReadWriteOnce''``, ``''ReadOnlyMany''``, ``''ReadWriteMany''``, + or + + ``''ReadWriteOncePod''``. Corresponds to `PersistentVolumeClaim.spec.accessModes + `_.' + parameterType: LIST + annotations: + description: Annotations for the PVC's metadata. Corresponds to `PersistentVolumeClaim.metadata.annotations + `_. + isOptional: true + parameterType: STRUCT + pvc_name: + description: 'Name of the PVC. Corresponds to `PersistentVolumeClaim.metadata.name + `_. + Only one of ``pvc_name`` and ``pvc_name_suffix`` can + + be provided.' + isOptional: true + parameterType: STRING + pvc_name_suffix: + description: 'Prefix to use for a dynamically generated name, which + + will take the form ``-``. Only one + + of ``pvc_name`` and ``pvc_name_suffix`` can be provided.' + isOptional: true + parameterType: STRING + size: + description: The size of storage requested by the PVC that will be provisioned. + For example, ``'5Gi'``. Corresponds to `PersistentVolumeClaim.spec.resources.requests.storage + `_. + parameterType: STRING + storage_class_name: + defaultValue: '' + description: 'Name of StorageClass from which to provision the PV + + to back the PVC. ``None`` indicates to use the cluster''s default + + storage_class_name. Set to ``''''`` for a statically specified PVC.' + isOptional: true + parameterType: STRING + volume_name: + description: 'Pre-existing PersistentVolume that should back the + + provisioned PersistentVolumeClaim. Used for statically + + specified PV only. Corresponds to `PersistentVolumeClaim.spec.volumeName + `_.' + isOptional: true + parameterType: STRING + outputDefinitions: + parameters: + name: + parameterType: STRING + comp-deletepvc: + executorLabel: exec-deletepvc + inputDefinitions: + parameters: + pvc_name: + description: Name of the PVC to delete. Supports passing a runtime-generated + name, such as a name provided by ``kubernetes.CreatePvcOp().outputs['name']``. + parameterType: STRING + comp-producer: + executorLabel: exec-producer + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-consumer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - consumer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef consumer() -> str:\n with open('/data/file.txt', 'r') as file:\n\ + \ content = file.read()\n print(content)\n return content\n\ + \n" + image: python:3.9 + exec-createpvc: + container: + image: argostub/createpvc + exec-deletepvc: + container: + image: argostub/deletepvc + exec-producer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - producer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef producer() -> str:\n with open('/data/file.txt', 'w') as file:\n\ + \ file.write('Hello world')\n with open('/data/file.txt', 'r')\ + \ as file:\n content = file.read()\n print(content)\n return\ + \ content\n\n" + image: python:3.9 +pipelineInfo: + name: pipeline-with-volume-no-cache +root: + dag: + tasks: + consumer: + cachingOptions: + enableCache: true + componentRef: + name: comp-consumer + dependentTasks: + - createpvc + - producer + taskInfo: + name: consumer + createpvc: + cachingOptions: {} + componentRef: + name: comp-createpvc + inputs: + parameters: + access_modes: + runtimeValue: + constant: + - ReadWriteOnce + pvc_name_suffix: + runtimeValue: + constant: -my-pvc + size: + runtimeValue: + constant: 5Mi + storage_class_name: + runtimeValue: + constant: standard + taskInfo: + name: createpvc + deletepvc: + cachingOptions: {} + componentRef: + name: comp-deletepvc + dependentTasks: + - consumer + - createpvc + inputs: + parameters: + pvc_name: + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + taskInfo: + name: deletepvc + producer: + cachingOptions: + enableCache: true + componentRef: + name: comp-producer + dependentTasks: + - createpvc + taskInfo: + name: producer +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 +--- +platforms: + kubernetes: + deploymentSpec: + executors: + exec-consumer: + pvcMount: + - mountPath: /data + pvcNameParameter: + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + exec-producer: + pvcMount: + - mountPath: /data + pvcNameParameter: + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc diff --git a/sdk/python/test_data/components/preprocess.py b/test_data/sdk_compiled_pipelines/valid/preprocess.py similarity index 100% rename from sdk/python/test_data/components/preprocess.py rename to test_data/sdk_compiled_pipelines/valid/preprocess.py diff --git a/test_data/sdk_compiled_pipelines/valid/preprocess.yaml b/test_data/sdk_compiled_pipelines/valid/preprocess.yaml new file mode 100644 index 00000000000..f0e808fc3b5 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/preprocess.yaml @@ -0,0 +1,176 @@ +# PIPELINE DEFINITION +# Name: preprocess +# Description: Dummy preprocessing step. +# Inputs: +# input_dict_parameter: dict +# input_list_parameter: list +# message: str +# Outputs: +# output_bool_parameter_path: bool +# output_dataset_one: system.Dataset +# output_dataset_two_path: system.Dataset +# output_dict_parameter_path: dict +# output_list_parameter_path: list +# output_parameter_path: str +components: + comp-preprocess: + executorLabel: exec-preprocess + inputDefinitions: + parameters: + input_dict_parameter: + parameterType: STRUCT + input_list_parameter: + parameterType: LIST + message: + parameterType: STRING + outputDefinitions: + artifacts: + output_dataset_one: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + output_dataset_two_path: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + output_bool_parameter_path: + parameterType: BOOLEAN + output_dict_parameter_path: + parameterType: STRUCT + output_list_parameter_path: + parameterType: LIST + output_parameter_path: + parameterType: STRING +deploymentSpec: + executors: + exec-preprocess: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - preprocess + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef preprocess(\n # An input parameter of type string.\n message:\ + \ str,\n # An input parameter of type dict.\n input_dict_parameter:\ + \ Dict[str, int],\n # An input parameter of type list.\n input_list_parameter:\ + \ List[str],\n # Use Output[T] to get a metadata-rich handle to the output\ + \ artifact\n # of type `Dataset`.\n output_dataset_one: Output[Dataset],\n\ + \ # A locally accessible filepath for another output artifact of type\n\ + \ # `Dataset`.\n output_dataset_two_path: OutputPath('Dataset'),\n\ + \ # A locally accessible filepath for an output parameter of type string.\n\ + \ output_parameter_path: OutputPath(str),\n # A locally accessible\ + \ filepath for an output parameter of type bool.\n output_bool_parameter_path:\ + \ OutputPath(bool),\n # A locally accessible filepath for an output parameter\ + \ of type dict.\n output_dict_parameter_path: OutputPath(Dict[str, int]),\n\ + \ # A locally accessible filepath for an output parameter of type list.\n\ + \ output_list_parameter_path: OutputPath(List[str]),\n):\n \"\"\"\ + Dummy preprocessing step.\"\"\"\n\n # Use Dataset.path to access a local\ + \ file path for writing.\n # One can also use Dataset.uri to access the\ + \ actual URI file path.\n with open(output_dataset_one.path, 'w') as\ + \ f:\n f.write(message)\n\n # OutputPath is used to just pass\ + \ the local file path of the output artifact\n # to the function.\n \ + \ with open(output_dataset_two_path, 'w') as f:\n f.write(message)\n\ + \n with open(output_parameter_path, 'w') as f:\n f.write(message)\n\ + \n with open(output_bool_parameter_path, 'w') as f:\n f.write(\n\ + \ str(True)) # use either `str()` or `json.dumps()` for bool\ + \ values.\n\n import json\n with open(output_dict_parameter_path,\ + \ 'w') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with\ + \ open(output_list_parameter_path, 'w') as f:\n f.write(json.dumps(input_list_parameter))\n\ + \n" + image: python:3.9 +pipelineInfo: + name: preprocess +root: + dag: + outputs: + artifacts: + output_dataset_one: + artifactSelectors: + - outputArtifactKey: output_dataset_one + producerSubtask: preprocess + output_dataset_two_path: + artifactSelectors: + - outputArtifactKey: output_dataset_two_path + producerSubtask: preprocess + parameters: + output_bool_parameter_path: + valueFromParameter: + outputParameterKey: output_bool_parameter_path + producerSubtask: preprocess + output_dict_parameter_path: + valueFromParameter: + outputParameterKey: output_dict_parameter_path + producerSubtask: preprocess + output_list_parameter_path: + valueFromParameter: + outputParameterKey: output_list_parameter_path + producerSubtask: preprocess + output_parameter_path: + valueFromParameter: + outputParameterKey: output_parameter_path + producerSubtask: preprocess + tasks: + preprocess: + cachingOptions: + enableCache: true + componentRef: + name: comp-preprocess + inputs: + parameters: + input_dict_parameter: + componentInputParameter: input_dict_parameter + input_list_parameter: + componentInputParameter: input_list_parameter + message: + componentInputParameter: message + taskInfo: + name: preprocess + inputDefinitions: + parameters: + input_dict_parameter: + parameterType: STRUCT + input_list_parameter: + parameterType: LIST + message: + parameterType: STRING + outputDefinitions: + artifacts: + output_dataset_one: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + output_dataset_two_path: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + output_bool_parameter_path: + parameterType: BOOLEAN + output_dict_parameter_path: + parameterType: STRUCT + output_list_parameter_path: + parameterType: LIST + output_parameter_path: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/pvc_mount.py b/test_data/sdk_compiled_pipelines/valid/pvc_mount.py new file mode 100644 index 00000000000..6fab8db398a --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pvc_mount.py @@ -0,0 +1,52 @@ +# Copyright 2025 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Small pipeline that mounts an existing PVC (by name) and exercises caching.""" + +from kfp import dsl +from kfp import kubernetes + + +@dsl.component +def producer() -> str: + with open('/data/file.txt', 'w') as f: + f.write('hello') + with open('/data/file.txt', 'r') as f: + return f.read() + + +@dsl.component +def consumer() -> None: + with open('/data/file.txt', 'r') as f: + print(f.read()) + + +@dsl.pipeline(name='pvc-mount-pipeline') +def pvc_mount_pipeline(pvc_name: str): + p = producer() + c = consumer().after(p) + + # Mount the provided PVC name to both tasks at /data + kubernetes.mount_pvc(p, pvc_name=pvc_name, mount_path='/data') + kubernetes.mount_pvc(c, pvc_name=pvc_name, mount_path='/data') + + +if __name__ == '__main__': + from kfp import compiler + + compiler.Compiler().compile( + pipeline_func=pvc_mount_pipeline, + package_path=__file__.replace('.py', '.yaml'), + ) + + diff --git a/test_data/sdk_compiled_pipelines/valid/pvc_mount.yaml b/test_data/sdk_compiled_pipelines/valid/pvc_mount.yaml new file mode 100644 index 00000000000..73137fc1a57 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pvc_mount.yaml @@ -0,0 +1,118 @@ +# PIPELINE DEFINITION +# Name: pvc-mount-pipeline +# Inputs: +# pvc_name: str +components: + comp-consumer: + executorLabel: exec-consumer + comp-producer: + executorLabel: exec-producer + outputDefinitions: + parameters: + Output: + parameterType: STRING +deploymentSpec: + executors: + exec-consumer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - consumer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef consumer() -> None:\n with open('/data/file.txt', 'r') as\ + \ f:\n print(f.read())\n\n" + image: python:3.9 + exec-producer: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - producer + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.2'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef producer() -> str:\n with open('/data/file.txt', 'w') as f:\n\ + \ f.write('hello')\n with open('/data/file.txt', 'r') as f:\n\ + \ return f.read()\n\n" + image: python:3.9 +pipelineInfo: + name: pvc-mount-pipeline +root: + dag: + tasks: + consumer: + cachingOptions: + enableCache: true + componentRef: + name: comp-consumer + dependentTasks: + - producer + taskInfo: + name: consumer + producer: + cachingOptions: + enableCache: true + componentRef: + name: comp-producer + taskInfo: + name: producer + inputDefinitions: + parameters: + pvc_name: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.2 +--- +platforms: + kubernetes: + deploymentSpec: + executors: + exec-consumer: + pvcMount: + - componentInputParameter: pvc_name + mountPath: /data + pvcNameParameter: + componentInputParameter: pvc_name + exec-producer: + pvcMount: + - componentInputParameter: pvc_name + mountPath: /data + pvcNameParameter: + componentInputParameter: pvc_name diff --git a/sdk/python/test_data/pipelines/pythonic_artifact_with_single_return.py b/test_data/sdk_compiled_pipelines/valid/pythonic_artifact_with_single_return.py similarity index 87% rename from sdk/python/test_data/pipelines/pythonic_artifact_with_single_return.py rename to test_data/sdk_compiled_pipelines/valid/pythonic_artifact_with_single_return.py index 31353e852b7..c88ce2b86e4 100644 --- a/sdk/python/test_data/pipelines/pythonic_artifact_with_single_return.py +++ b/test_data/sdk_compiled_pipelines/valid/pythonic_artifact_with_single_return.py @@ -11,13 +11,18 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import os from kfp import dsl from kfp.dsl import Dataset from kfp.dsl import Model +PACKAGES_TO_INSTALL = ['dill==0.3.7'] +if 'KFP_PIPELINE_SPEC_PACKAGE_PATH' in os.environ: + PACKAGES_TO_INSTALL.append(os.environ['KFP_PIPELINE_SPEC_PACKAGE_PATH']) -@dsl.component(packages_to_install=['dill==0.3.7']) + +@dsl.component(packages_to_install=PACKAGES_TO_INSTALL) def make_language_model(text_dataset: Dataset) -> Model: # dill allows pickling objects belonging to a function's local namespace import dill diff --git a/test_data/sdk_compiled_pipelines/valid/pythonic_artifact_with_single_return.yaml b/test_data/sdk_compiled_pipelines/valid/pythonic_artifact_with_single_return.yaml new file mode 100644 index 00000000000..d8d354265c4 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pythonic_artifact_with_single_return.yaml @@ -0,0 +1,123 @@ +# PIPELINE DEFINITION +# Name: make-language-model-pipeline +# Outputs: +# Output: system.Model +components: + comp-importer: + executorLabel: exec-importer + inputDefinitions: + parameters: + uri: + parameterType: STRING + outputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-make-language-model: + executorLabel: exec-make-language-model + inputDefinitions: + artifacts: + text_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-importer: + importer: + artifactUri: + constant: gs://ml-pipeline-playground/shakespeare1.txt + metadata: + key: value + typeSchema: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + exec-make-language-model: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - make_language_model + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.7'\ + \ && python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef make_language_model(text_dataset: Dataset) -> Model:\n # dill\ + \ allows pickling objects belonging to a function's local namespace\n \ + \ import dill\n\n with open(text_dataset.path) as f:\n text =\ + \ f.read()\n\n # insert train on text here #\n\n def dummy_model(x:\ + \ str) -> str:\n return x\n\n model = Model(\n uri=dsl.get_uri(suffix='model'),\n\ + \ metadata={'data': text_dataset.name},\n )\n\n with open(model.path,\ + \ 'wb') as f:\n dill.dump(dummy_model, f)\n\n return model\n\n" + image: python:3.9 +pipelineInfo: + name: make-language-model-pipeline +root: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: Output + producerSubtask: make-language-model + tasks: + importer: + cachingOptions: + enableCache: true + componentRef: + name: comp-importer + inputs: + parameters: + uri: + runtimeValue: + constant: gs://ml-pipeline-playground/shakespeare1.txt + taskInfo: + name: importer + make-language-model: + cachingOptions: + enableCache: true + componentRef: + name: comp-make-language-model + dependentTasks: + - importer + inputs: + artifacts: + text_dataset: + taskOutputArtifact: + outputArtifactKey: artifact + producerTask: importer + taskInfo: + name: make-language-model + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Model + schemaVersion: 0.0.1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/sdk/python/test_data/pipelines/pythonic_artifacts_with_list_of_artifacts.py b/test_data/sdk_compiled_pipelines/valid/pythonic_artifacts_with_list_of_artifacts.py similarity index 100% rename from sdk/python/test_data/pipelines/pythonic_artifacts_with_list_of_artifacts.py rename to test_data/sdk_compiled_pipelines/valid/pythonic_artifacts_with_list_of_artifacts.py diff --git a/test_data/sdk_compiled_pipelines/valid/pythonic_artifacts_with_list_of_artifacts.yaml b/test_data/sdk_compiled_pipelines/valid/pythonic_artifacts_with_list_of_artifacts.yaml new file mode 100644 index 00000000000..966c6432bb4 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pythonic_artifacts_with_list_of_artifacts.yaml @@ -0,0 +1,187 @@ +# PIPELINE DEFINITION +# Name: make-and-join-datasets +# Inputs: +# texts: list [Default: ['Hello', ',', ' ', 'world!']] +# Outputs: +# Output: system.Dataset +components: + comp-for-loop-1: + dag: + outputs: + artifacts: + pipelinechannel--make-dataset-Output: + artifactSelectors: + - outputArtifactKey: Output + producerSubtask: make-dataset + tasks: + make-dataset: + cachingOptions: + enableCache: true + componentRef: + name: comp-make-dataset + inputs: + parameters: + text: + componentInputParameter: pipelinechannel--texts-loop-item + taskInfo: + name: make-dataset + inputDefinitions: + parameters: + pipelinechannel--texts: + parameterType: LIST + pipelinechannel--texts-loop-item: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--make-dataset-Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + comp-join-datasets: + executorLabel: exec-join-datasets + inputDefinitions: + artifacts: + datasets: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + isArtifactList: true + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-make-dataset: + executorLabel: exec-make-dataset + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-join-datasets: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - join_datasets + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef join_datasets(datasets: List[Dataset]) -> Dataset:\n texts\ + \ = []\n for dataset in datasets:\n with open(dataset.path, 'r')\ + \ as f:\n texts.append(f.read())\n\n return ''.join(texts)\n\ + \n" + image: python:3.9 + exec-make-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - make_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef make_dataset(text: str) -> Dataset:\n dataset = Dataset(uri=dsl.get_uri(),\ + \ metadata={'length': len(text)})\n with open(dataset.path, 'w') as f:\n\ + \ f.write(text)\n return dataset\n\n" + image: python:3.9 +pipelineInfo: + name: make-and-join-datasets +root: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: Output + producerSubtask: join-datasets + tasks: + for-loop-1: + componentRef: + name: comp-for-loop-1 + inputs: + parameters: + pipelinechannel--texts: + componentInputParameter: texts + parameterIterator: + itemInput: pipelinechannel--texts-loop-item + items: + inputParameter: pipelinechannel--texts + taskInfo: + name: for-loop-1 + join-datasets: + cachingOptions: + enableCache: true + componentRef: + name: comp-join-datasets + dependentTasks: + - for-loop-1 + inputs: + artifacts: + datasets: + taskOutputArtifact: + outputArtifactKey: pipelinechannel--make-dataset-Output + producerTask: for-loop-1 + taskInfo: + name: join-datasets + inputDefinitions: + parameters: + texts: + defaultValue: + - Hello + - ',' + - ' ' + - world! + isOptional: true + parameterType: LIST + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/pythonic_artifacts_with_multiple_returns.py b/test_data/sdk_compiled_pipelines/valid/pythonic_artifacts_with_multiple_returns.py similarity index 100% rename from sdk/python/test_data/pipelines/pythonic_artifacts_with_multiple_returns.py rename to test_data/sdk_compiled_pipelines/valid/pythonic_artifacts_with_multiple_returns.py diff --git a/test_data/sdk_compiled_pipelines/valid/pythonic_artifacts_with_multiple_returns.yaml b/test_data/sdk_compiled_pipelines/valid/pythonic_artifacts_with_multiple_returns.yaml new file mode 100644 index 00000000000..723cd964087 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/pythonic_artifacts_with_multiple_returns.yaml @@ -0,0 +1,184 @@ +# PIPELINE DEFINITION +# Name: split-datasets-and-return-first +# Outputs: +# Output: system.Dataset +components: + comp-dataset-splitter: + executorLabel: exec-dataset-splitter + inputDefinitions: + artifacts: + in_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + dataset1: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + dataset2: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-make-dataset: + executorLabel: exec-make-dataset + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-splitter-pipeline: + dag: + outputs: + artifacts: + dataset1: + artifactSelectors: + - outputArtifactKey: dataset1 + producerSubtask: dataset-splitter + dataset2: + artifactSelectors: + - outputArtifactKey: dataset1 + producerSubtask: dataset-splitter + tasks: + dataset-splitter: + cachingOptions: + enableCache: true + componentRef: + name: comp-dataset-splitter + inputs: + artifacts: + in_dataset: + componentInputArtifact: in_dataset + taskInfo: + name: dataset-splitter + inputDefinitions: + artifacts: + in_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + dataset1: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + dataset2: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-dataset-splitter: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - dataset_splitter + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef dataset_splitter(\n in_dataset: Dataset\n) -> NamedTuple(\n\ + \ 'outputs',\n dataset1=Dataset,\n dataset2=Dataset,\n\ + ):\n\n with open(in_dataset.path) as f:\n in_data = f.read()\n\ + \n out_data1, out_data2 = in_data[:len(in_data) // 2], in_data[len(in_data)\ + \ //\n 2:]\n\ + \n dataset1 = Dataset(\n uri=dsl.get_uri(suffix='dataset1'),\n\ + \ metadata={'original_data': in_dataset.name},\n )\n with open(dataset1.path,\ + \ 'w') as f:\n f.write(out_data1)\n\n dataset2 = Dataset(\n \ + \ uri=dsl.get_uri(suffix='dataset2'),\n metadata={'original_data':\ + \ in_dataset.name},\n )\n with open(dataset2.path, 'w') as f:\n \ + \ f.write(out_data2)\n\n outputs = NamedTuple(\n 'outputs',\n\ + \ dataset1=Dataset,\n dataset2=Dataset,\n )\n return\ + \ outputs(dataset1=dataset1, dataset2=dataset2)\n\n" + image: python:3.9 + exec-make-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - make_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef make_dataset() -> Artifact:\n artifact = Artifact(uri=dsl.get_uri('dataset'))\n\ + \ with open(artifact.path, 'w') as f:\n f.write('Hello, world')\n\ + \ return artifact\n\n" + image: python:3.9 +pipelineInfo: + name: split-datasets-and-return-first +root: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: dataset1 + producerSubtask: splitter-pipeline + tasks: + make-dataset: + cachingOptions: + enableCache: true + componentRef: + name: comp-make-dataset + taskInfo: + name: make-dataset + splitter-pipeline: + cachingOptions: + enableCache: true + componentRef: + name: comp-splitter-pipeline + dependentTasks: + - make-dataset + inputs: + artifacts: + in_dataset: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: make-dataset + taskInfo: + name: splitter-pipeline + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/ray_integration.py b/test_data/sdk_compiled_pipelines/valid/ray_integration.py new file mode 100644 index 00000000000..d59b619a902 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/ray_integration.py @@ -0,0 +1,76 @@ +from kfp import compiler, dsl + +common_base_image = ( + "registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61" +) + + +# image and the sdk has a fixed value because the version matters +@dsl.component(packages_to_install=["codeflare-sdk==0.21.1"], base_image=common_base_image) +def ray_fn() -> int: + import ray # noqa: PLC0415 + from codeflare_sdk import generate_cert # noqa: PLC0415 + from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration # noqa: PLC0415 + + cluster = Cluster( + ClusterConfiguration( + name="raytest", + num_workers=1, + head_cpu_requests=1, + head_cpu_limits=1, + head_memory_requests=4, + head_memory_limits=4, + worker_cpu_requests=1, + worker_cpu_limits=1, + worker_memory_requests=1, + worker_memory_limits=2, + image="quay.io/modh/ray:2.35.0-py39-cu121", + verify_tls=False + ) + ) + + # always clean the resources + cluster.down() + print(cluster.status()) + cluster.up() + cluster.wait_ready() + print(cluster.status()) + print(cluster.details()) + + ray_dashboard_uri = cluster.cluster_dashboard_uri() + ray_cluster_uri = cluster.cluster_uri() + print(ray_dashboard_uri) + print(ray_cluster_uri) + + # before proceeding make sure the cluster exists and the uri is not empty + assert ray_cluster_uri, "Ray cluster needs to be started and set before proceeding" + + # reset the ray context in case there's already one. + ray.shutdown() + # establish connection to ray cluster + generate_cert.generate_tls_cert(cluster.config.name, cluster.config.namespace) + generate_cert.export_env(cluster.config.name, cluster.config.namespace) + ray.init(address=cluster.cluster_uri(), logging_level="DEBUG") + print("Ray cluster is up and running: ", ray.is_initialized()) + + @ray.remote + def train_fn(): + return 100 + + result = ray.get(train_fn.remote()) + assert 100 == result + ray.shutdown() + cluster.down() + return result + + +@dsl.pipeline( + name="Ray Integration Test", + description="Ray Integration Test", +) +def ray_integration(): + ray_fn().set_caching_options(False) + + +if __name__ == "__main__": + compiler.Compiler().compile(ray_integration, package_path=__file__.replace(".py", "_compiled.yaml")) diff --git a/test_data/sdk_compiled_pipelines/valid/ray_integration_compiled.yaml b/test_data/sdk_compiled_pipelines/valid/ray_integration_compiled.yaml new file mode 100644 index 00000000000..b867051764b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/ray_integration_compiled.yaml @@ -0,0 +1,79 @@ +# PIPELINE DEFINITION +# Name: ray-integration-test +# Description: Ray Integration Test +components: + comp-ray-fn: + executorLabel: exec-ray-fn + outputDefinitions: + parameters: + Output: + parameterType: NUMBER_INTEGER +deploymentSpec: + executors: + exec-ray-fn: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - ray_fn + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ + \ python3 -m pip install --quiet --no-warn-script-location 'codeflare-sdk==v0.28.1'\ + \ && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef ray_fn() -> int:\n import ray # noqa: PLC0415\n from codeflare_sdk\ + \ import generate_cert # noqa: PLC0415\n from codeflare_sdk.ray.cluster\ + \ import Cluster, ClusterConfiguration # noqa: PLC0415\n\n cluster =\ + \ Cluster(\n ClusterConfiguration(\n name=\"raytest\"\ + ,\n num_workers=1,\n head_cpu_requests=1,\n \ + \ head_cpu_limits=1,\n head_memory_requests=4,\n \ + \ head_memory_limits=4,\n worker_cpu_requests=1,\n \ + \ worker_cpu_limits=1,\n worker_memory_requests=1,\n \ + \ worker_memory_limits=2,\n image=\"quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707\"\ + ,\n verify_tls=False\n )\n )\n\n # always clean\ + \ the resources\n cluster.down()\n print(cluster.status())\n cluster.up()\n\ + \ cluster.wait_ready()\n print(cluster.status())\n print(cluster.details())\n\ + \n ray_dashboard_uri = cluster.cluster_dashboard_uri()\n ray_cluster_uri\ + \ = cluster.cluster_uri()\n print(ray_dashboard_uri)\n print(ray_cluster_uri)\n\ + \n # before proceeding make sure the cluster exists and the uri is not\ + \ empty\n assert ray_cluster_uri, \"Ray cluster needs to be started and\ + \ set before proceeding\"\n\n # reset the ray context in case there's\ + \ already one.\n ray.shutdown()\n # establish connection to ray cluster\n\ + \ generate_cert.generate_tls_cert(cluster.config.name, cluster.config.namespace)\n\ + \ generate_cert.export_env(cluster.config.name, cluster.config.namespace)\n\ + \ ray.init(address=cluster.cluster_uri(), logging_level=\"DEBUG\")\n\ + \ print(\"Ray cluster is up and running: \", ray.is_initialized())\n\n\ + \ @ray.remote\n def train_fn():\n return 100\n\n result\ + \ = ray.get(train_fn.remote())\n assert 100 == result\n ray.shutdown()\n\ + \ cluster.down()\n return result\n\n" + image: registry.redhat.io/ubi9/python-311@sha256:82a16d7c4da926081c0a4cc72a84d5ce37859b50a371d2f9364313f66b89adf7 +pipelineInfo: + description: Ray Integration Test + name: ray-integration-test +root: + dag: + tasks: + ray-fn: + cachingOptions: {} + componentRef: + name: comp-ray-fn + taskInfo: + name: ray-fn +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/ray_job_integration_compiled.yaml b/test_data/sdk_compiled_pipelines/valid/ray_job_integration_compiled.yaml new file mode 100644 index 00000000000..96b69042a41 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/ray_job_integration_compiled.yaml @@ -0,0 +1,262 @@ +# PIPELINE DEFINITION +# Name: ray-integration-test +# Description: Ray Integration Test +# Inputs: +# AWS_ACCESS_KEY_ID: str +# AWS_DEFAULT_ENDPOINT: str +# AWS_SECRET_ACCESS_KEY: str +# AWS_STORAGE_BUCKET: str +# AWS_STORAGE_BUCKET_MNIST_DIR: str +components: + comp-ray-fn: + executorLabel: exec-ray-fn + inputDefinitions: + parameters: + AWS_ACCESS_KEY_ID: + parameterType: STRING + AWS_DEFAULT_ENDPOINT: + parameterType: STRING + AWS_SECRET_ACCESS_KEY: + parameterType: STRING + AWS_STORAGE_BUCKET: + parameterType: STRING + AWS_STORAGE_BUCKET_MNIST_DIR: + parameterType: STRING +deploymentSpec: + executors: + exec-ray-fn: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - ray_fn + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ + \ python3 -m pip install --quiet --no-warn-script-location 'codeflare-sdk==v0.28.1'\ + \ && \"$0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef ray_fn(\n AWS_DEFAULT_ENDPOINT: str,\n AWS_STORAGE_BUCKET:\ + \ str,\n AWS_ACCESS_KEY_ID: str,\n AWS_SECRET_ACCESS_KEY: str,\n \ + \ AWS_STORAGE_BUCKET_MNIST_DIR: str\n) -> None:\n import openshift\n\ + \ import subprocess\n import ray # noqa: PLC0415\n import tempfile\n\ + \ from codeflare_sdk import generate_cert # noqa: PLC0415\n from\ + \ codeflare_sdk.ray.cluster import Cluster, ClusterConfiguration # noqa:\ + \ PLC0415\n from codeflare_sdk.ray.client import RayJobClient\n from\ + \ time import sleep\n\n training_script = \"\"\"\nimport os\n\nimport\ + \ torch\nimport requests\nfrom pytorch_lightning import LightningModule,\ + \ Trainer\nfrom pytorch_lightning.callbacks.progress import TQDMProgressBar\n\ + from torch import nn\nfrom torch.nn import functional as F\nfrom torch.utils.data\ + \ import DataLoader, random_split, RandomSampler\nfrom torchmetrics import\ + \ Accuracy\nfrom torchvision import transforms\nfrom torchvision.datasets\ + \ import MNIST\nimport gzip\nimport shutil\nfrom minio import Minio\n\n\ + PATH_DATASETS = os.environ.get(\"PATH_DATASETS\", \".\")\nBATCH_SIZE = 256\ + \ if torch.cuda.is_available() else 64\n\nlocal_mnist_path = os.path.dirname(os.path.abspath(__file__))\n\ + \nprint(\"prior to running the trainer\")\nprint(\"MASTER_ADDR: is \", os.getenv(\"\ + MASTER_ADDR\"))\nprint(\"MASTER_PORT: is \", os.getenv(\"MASTER_PORT\"))\n\ + \nSTORAGE_BUCKET_EXISTS = \"AWS_DEFAULT_ENDPOINT\" in os.environ\nprint(\"\ + STORAGE_BUCKET_EXISTS: \", STORAGE_BUCKET_EXISTS)\n\nprint(f'Storage_Bucket_Default_Endpoint\ + \ : is {os.environ.get(\"AWS_DEFAULT_ENDPOINT\")}' if \"AWS_DEFAULT_ENDPOINT\"\ + \ in os.environ else \"\")\nprint(f'Storage_Bucket_Name : is {os.environ.get(\"\ + AWS_STORAGE_BUCKET\")}' if \"AWS_STORAGE_BUCKET\" in os.environ else \"\"\ + )\nprint(f'Storage_Bucket_Mnist_Directory : is {os.environ.get(\"AWS_STORAGE_BUCKET_MNIST_DIR\"\ + )}' if \"AWS_STORAGE_BUCKET_MNIST_DIR\" in os.environ else \"\")\n\n\nclass\ + \ LitMNIST(LightningModule):\n def __init__(self, data_dir=PATH_DATASETS,\ + \ hidden_size=64, learning_rate=2e-4):\n super().__init__()\n\n \ + \ # Set our init args as class attributes\n self.data_dir =\ + \ data_dir\n self.hidden_size = hidden_size\n self.learning_rate\ + \ = learning_rate\n\n # Hardcode some dataset specific attributes\n\ + \ self.num_classes = 10\n self.dims = (1, 28, 28)\n \ + \ channels, width, height = self.dims\n self.transform = transforms.Compose(\n\ + \ [\n transforms.ToTensor(),\n \ + \ transforms.Normalize((0.1307,), (0.3081,)),\n ]\n )\n\ + \n # Define PyTorch model\n self.model = nn.Sequential(\n\ + \ nn.Flatten(),\n nn.Linear(channels * width * height,\ + \ hidden_size),\n nn.ReLU(),\n nn.Dropout(0.1),\n\ + \ nn.Linear(hidden_size, hidden_size),\n nn.ReLU(),\n\ + \ nn.Dropout(0.1),\n nn.Linear(hidden_size, self.num_classes),\n\ + \ )\n\n self.val_accuracy = Accuracy(task=\"multiclass\",\ + \ num_classes=10)\n self.test_accuracy = Accuracy(task=\"multiclass\"\ + , num_classes=10)\n\n def forward(self, x):\n x = self.model(x)\n\ + \ return F.log_softmax(x, dim=1)\n\n def training_step(self, batch,\ + \ batch_idx):\n x, y = batch\n logits = self(x)\n loss\ + \ = F.nll_loss(logits, y)\n return loss\n\n def validation_step(self,\ + \ batch, batch_idx):\n x, y = batch\n logits = self(x)\n \ + \ loss = F.nll_loss(logits, y)\n preds = torch.argmax(logits,\ + \ dim=1)\n self.val_accuracy.update(preds, y)\n\n # Calling\ + \ self.log will surface up scalars for you in TensorBoard\n self.log(\"\ + val_loss\", loss, prog_bar=True)\n self.log(\"val_acc\", self.val_accuracy,\ + \ prog_bar=True)\n\n def test_step(self, batch, batch_idx):\n \ + \ x, y = batch\n logits = self(x)\n loss = F.nll_loss(logits,\ + \ y)\n preds = torch.argmax(logits, dim=1)\n self.test_accuracy.update(preds,\ + \ y)\n\n # Calling self.log will surface up scalars for you in TensorBoard\n\ + \ self.log(\"test_loss\", loss, prog_bar=True)\n self.log(\"\ + test_acc\", self.test_accuracy, prog_bar=True)\n\n def configure_optimizers(self):\n\ + \ optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate)\n\ + \ return optimizer\n\n ####################\n # DATA RELATED\ + \ HOOKS\n ####################\n\n def prepare_data(self):\n \ + \ # download\n print(\"Downloading MNIST dataset...\")\n\n \ + \ if (\n STORAGE_BUCKET_EXISTS\n and os.environ.get(\"\ + AWS_DEFAULT_ENDPOINT\") != \"\"\n and os.environ.get(\"AWS_DEFAULT_ENDPOINT\"\ + ) != None\n ):\n print(\"Using storage bucket to download\ + \ datasets...\")\n\n dataset_dir = os.path.join(self.data_dir,\ + \ \"MNIST/raw\")\n endpoint = os.environ.get(\"AWS_DEFAULT_ENDPOINT\"\ + )\n access_key = os.environ.get(\"AWS_ACCESS_KEY_ID\")\n \ + \ secret_key = os.environ.get(\"AWS_SECRET_ACCESS_KEY\")\n \ + \ bucket_name = os.environ.get(\"AWS_STORAGE_BUCKET\")\n\n \ + \ # remove prefix if specified in storage bucket endpoint url\n \ + \ secure = True\n if endpoint.startswith(\"https://\"):\n\ + \ endpoint = endpoint[len(\"https://\") :]\n elif\ + \ endpoint.startswith(\"http://\"):\n endpoint = endpoint[len(\"\ + http://\") :]\n secure = False\n\n client = Minio(\n\ + \ endpoint,\n access_key=access_key,\n \ + \ secret_key=secret_key,\n cert_check=False,\n\ + \ secure=secure,\n )\n\n if not os.path.exists(dataset_dir):\n\ + \ os.makedirs(dataset_dir)\n else:\n \ + \ print(f\"Directory '{dataset_dir}' already exists\")\n\n \ + \ # To download datasets from storage bucket's specific directory, use\ + \ prefix to provide directory name\n prefix = os.environ.get(\"\ + AWS_STORAGE_BUCKET_MNIST_DIR\")\n # download all files from prefix\ + \ folder of storage bucket recursively\n for item in client.list_objects(bucket_name,\ + \ prefix=prefix, recursive=True):\n file_name = item.object_name[len(prefix)\ + \ + 1 :]\n dataset_file_path = os.path.join(dataset_dir,\ + \ file_name)\n if not os.path.exists(dataset_file_path):\n\ + \ client.fget_object(bucket_name, item.object_name, dataset_file_path)\n\ + \ else:\n print(f\"File-path '{dataset_file_path}'\ + \ already exists\")\n # Unzip files\n with\ + \ gzip.open(dataset_file_path, \"rb\") as f_in:\n with\ + \ open(dataset_file_path.split(\".\")[:-1][0], \"wb\") as f_out:\n \ + \ shutil.copyfileobj(f_in, f_out)\n # delete\ + \ zip file\n os.remove(dataset_file_path)\n \ + \ unzipped_filepath = dataset_file_path.split(\".\")[0]\n \ + \ if os.path.exists(unzipped_filepath):\n print(\n\ + \ f\"Unzipped and saved dataset file to path - {unzipped_filepath}\"\ + \n )\n download_datasets = False\n\n \ + \ else:\n print(\"Using default MNIST mirror reference to download\ + \ datasets...\")\n download_datasets = True\n\n MNIST(self.data_dir,\ + \ train=True, download=download_datasets)\n MNIST(self.data_dir,\ + \ train=False, download=download_datasets)\n\n def setup(self, stage=None):\n\ + \ # Assign train/val datasets for use in dataloaders\n if\ + \ stage == \"fit\" or stage is None:\n mnist_full = MNIST(\n\ + \ self.data_dir, train=True, transform=self.transform, download=False\n\ + \ )\n self.mnist_train, self.mnist_val = random_split(mnist_full,\ + \ [55000, 5000])\n\n # Assign test dataset for use in dataloader(s)\n\ + \ if stage == \"test\" or stage is None:\n self.mnist_test\ + \ = MNIST(\n self.data_dir, train=False, transform=self.transform,\ + \ download=False\n )\n\n def train_dataloader(self):\n \ + \ return DataLoader(\n self.mnist_train,\n batch_size=BATCH_SIZE,\n\ + \ sampler=RandomSampler(self.mnist_train, num_samples=1000),\n\ + \ )\n\n def val_dataloader(self):\n return DataLoader(self.mnist_val,\ + \ batch_size=BATCH_SIZE)\n\n def test_dataloader(self):\n return\ + \ DataLoader(self.mnist_test, batch_size=BATCH_SIZE)\n\n# Init DataLoader\ + \ from MNIST Dataset\n\nmodel = LitMNIST(data_dir=local_mnist_path)\n\n\ + print(\"GROUP: \", int(os.environ.get(\"GROUP_WORLD_SIZE\", 1)))\nprint(\"\ + LOCAL: \", int(os.environ.get(\"LOCAL_WORLD_SIZE\", 1)))\n\n# Initialize\ + \ a trainer\ntrainer = Trainer(\n # devices=1 if torch.cuda.is_available()\ + \ else None, # limiting got iPython runs\n max_epochs=3,\n callbacks=[TQDMProgressBar(refresh_rate=20)],\n\ + \ num_nodes=int(os.environ.get(\"GROUP_WORLD_SIZE\", 1)),\n devices=int(os.environ.get(\"\ + LOCAL_WORLD_SIZE\", 1)),\n strategy=\"ddp\",\n)\n\n# Train the model\n\ + trainer.fit(model)\n\"\"\"\n\n pip_requirements = \"\"\"\npytorch_lightning==2.4.0\n\ + torchmetrics==1.6.0\ntorchvision==0.20.1\nminio\n\"\"\"\n\n def assert_job_completion(status):\n\ + \ if status == \"SUCCEEDED\":\n print(f\"Job has completed:\ + \ '{status}'\")\n assert True\n else:\n print(f\"\ + Job has completed: '{status}'\")\n assert False\n\n def assert_jobsubmit_withlogin(cluster,\ + \ mnist_directory):\n with open(\"/run/secrets/kubernetes.io/serviceaccount/token\"\ + ) as token_file:\n auth_token = token_file.read()\n print(\"\ + Auth token: \" + auth_token)\n ray_dashboard = cluster.cluster_dashboard_uri()\n\ + \ header = {\"Authorization\": f\"Bearer {auth_token}\"}\n \ + \ client = RayJobClient(address=ray_dashboard, headers=header, verify=False)\n\ + \n submission_id = client.submit_job(\n entrypoint=\"\ + python mnist.py\",\n runtime_env={\n \"working_dir\"\ + : mnist_directory,\n \"pip\": mnist_directory + \"/mnist_pip_requirements.txt\"\ + ,\n \"env_vars\": {\n \"AWS_DEFAULT_ENDPOINT\"\ + : AWS_DEFAULT_ENDPOINT,\n \"AWS_STORAGE_BUCKET\": AWS_STORAGE_BUCKET,\n\ + \ \"AWS_ACCESS_KEY_ID\": AWS_ACCESS_KEY_ID,\n \ + \ \"AWS_SECRET_ACCESS_KEY\": AWS_SECRET_ACCESS_KEY,\n \ + \ \"AWS_STORAGE_BUCKET_MNIST_DIR\": AWS_STORAGE_BUCKET_MNIST_DIR\n\ + \ },\n },\n entrypoint_num_cpus=1,\n\ + \ )\n print(f\"Submitted job with ID: {submission_id}\")\n\ + \ done = False\n time = 0\n timeout = 900\n \ + \ while not done:\n status = client.get_job_status(submission_id)\n\ + \ if status.is_terminal():\n break\n \ + \ if not done:\n print(status)\n if timeout\ + \ and time >= timeout:\n raise TimeoutError(f\"job has\ + \ timed out after waiting {timeout}s\")\n sleep(5)\n \ + \ time += 5\n\n logs = client.get_job_logs(submission_id)\n\ + \ print(logs)\n\n assert_job_completion(status)\n\n \ + \ client.delete_job(submission_id)\n\n cluster.down()\n\n cluster\ + \ = Cluster(\n ClusterConfiguration(\n name=\"raytest\"\ + ,\n num_workers=1,\n head_cpu_requests=1,\n \ + \ head_cpu_limits=1,\n head_memory_requests=4,\n \ + \ head_memory_limits=4,\n worker_cpu_requests=1,\n \ + \ worker_cpu_limits=1,\n worker_memory_requests=1,\n \ + \ worker_memory_limits=2,\n image=\"quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707\"\ + ,\n verify_tls=False\n )\n )\n\n # always clean\ + \ the resources\n cluster.down()\n print(cluster.status())\n cluster.up()\n\ + \ cluster.wait_ready()\n print(cluster.status())\n print(cluster.details())\n\ + \n ray_dashboard_uri = cluster.cluster_dashboard_uri()\n ray_cluster_uri\ + \ = cluster.cluster_uri()\n print(ray_dashboard_uri)\n print(ray_cluster_uri)\n\ + \n # before proceeding make sure the cluster exists and the uri is not\ + \ empty\n assert ray_cluster_uri, \"Ray cluster needs to be started and\ + \ set before proceeding\"\n assert ray_dashboard_uri, \"Ray dashboard\ + \ needs to be started and set before proceeding\"\n\n mnist_directory\ + \ = tempfile.mkdtemp(prefix=\"mnist-dir\")\n with open(mnist_directory\ + \ + \"/mnist.py\", \"w\") as mnist_file:\n mnist_file.write(training_script)\n\ + \ with open(mnist_directory + \"/mnist_pip_requirements.txt\", \"w\"\ + ) as pip_requirements_file:\n pip_requirements_file.write(pip_requirements)\n\ + \n assert_jobsubmit_withlogin(cluster, mnist_directory)\n\n cluster.down()\n\ + \n" + image: registry.redhat.io/ubi9/python-311@sha256:82a16d7c4da926081c0a4cc72a84d5ce37859b50a371d2f9364313f66b89adf7 +pipelineInfo: + description: Ray Integration Test + name: ray-integration-test +root: + dag: + tasks: + ray-fn: + cachingOptions: {} + componentRef: + name: comp-ray-fn + inputs: + parameters: + AWS_ACCESS_KEY_ID: + componentInputParameter: AWS_ACCESS_KEY_ID + AWS_DEFAULT_ENDPOINT: + componentInputParameter: AWS_DEFAULT_ENDPOINT + AWS_SECRET_ACCESS_KEY: + componentInputParameter: AWS_SECRET_ACCESS_KEY + AWS_STORAGE_BUCKET: + componentInputParameter: AWS_STORAGE_BUCKET + AWS_STORAGE_BUCKET_MNIST_DIR: + componentInputParameter: AWS_STORAGE_BUCKET_MNIST_DIR + taskInfo: + name: ray-fn + inputDefinitions: + parameters: + AWS_ACCESS_KEY_ID: + parameterType: STRING + AWS_DEFAULT_ENDPOINT: + parameterType: STRING + AWS_SECRET_ACCESS_KEY: + parameterType: STRING + AWS_STORAGE_BUCKET: + parameterType: STRING + AWS_STORAGE_BUCKET_MNIST_DIR: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/backend/test/v2/resources/sequential.py b/test_data/sdk_compiled_pipelines/valid/sequential_v1.py similarity index 100% rename from backend/test/v2/resources/sequential.py rename to test_data/sdk_compiled_pipelines/valid/sequential_v1.py diff --git a/test_data/sdk_compiled_pipelines/valid/sequential_v1.yaml b/test_data/sdk_compiled_pipelines/valid/sequential_v1.yaml new file mode 100644 index 00000000000..406d1f7fcb7 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/sequential_v1.yaml @@ -0,0 +1,71 @@ +# PIPELINE DEFINITION +# Name: sequential +# Inputs: +# param1: str +# param2: str +components: + comp-echo: + executorLabel: exec-echo + inputDefinitions: + parameters: + message: + parameterType: STRING + comp-echo-2: + executorLabel: exec-echo-2 + inputDefinitions: + parameters: + message: + parameterType: STRING +deploymentSpec: + executors: + exec-echo: + container: + args: + - echo {{$.inputs.parameters['message']}} + command: + - sh + - -c + image: library/bash + exec-echo-2: + container: + args: + - echo {{$.inputs.parameters['message']}} + command: + - sh + - -c + image: library/bash +pipelineInfo: + name: sequential +root: + dag: + tasks: + echo: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo + inputs: + parameters: + message: + componentInputParameter: param1 + taskInfo: + name: echo + echo-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo-2 + inputs: + parameters: + message: + componentInputParameter: param2 + taskInfo: + name: echo-2 + inputDefinitions: + parameters: + param1: + parameterType: STRING + param2: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/backend/test/v2/resources/sequential-v2.py b/test_data/sdk_compiled_pipelines/valid/sequential_v2.py similarity index 100% rename from backend/test/v2/resources/sequential-v2.py rename to test_data/sdk_compiled_pipelines/valid/sequential_v2.py diff --git a/test_data/sdk_compiled_pipelines/valid/sequential_v2.yaml b/test_data/sdk_compiled_pipelines/valid/sequential_v2.yaml new file mode 100644 index 00000000000..7cbf111ea8d --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/sequential_v2.yaml @@ -0,0 +1,76 @@ +# PIPELINE DEFINITION +# Name: sequential +# Inputs: +# url: str +components: + comp-download: + executorLabel: exec-download + inputDefinitions: + parameters: + url: + parameterType: STRING + outputDefinitions: + parameters: + downloaded: + parameterType: STRING + comp-echo: + executorLabel: exec-echo + inputDefinitions: + parameters: + downloaded: + parameterType: STRING +deploymentSpec: + executors: + exec-download: + container: + args: + - gsutil cp {{$.inputs.parameters['url']}} {{$.outputs.parameters['downloaded'].output_file}} + command: + - sh + - -c + image: google/cloud-sdk + exec-echo: + container: + args: + - echo {{$.inputs.parameters['downloaded']}} + command: + - sh + - -c + image: library/bash +pipelineInfo: + name: sequential +root: + dag: + tasks: + download: + cachingOptions: + enableCache: true + componentRef: + name: comp-download + inputs: + parameters: + url: + componentInputParameter: url + taskInfo: + name: download + echo: + cachingOptions: + enableCache: true + componentRef: + name: comp-echo + dependentTasks: + - download + inputs: + parameters: + downloaded: + taskOutputParameter: + outputParameterKey: downloaded + producerTask: download + taskInfo: + name: echo + inputDefinitions: + parameters: + url: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/take_nap.py b/test_data/sdk_compiled_pipelines/valid/take_nap.py new file mode 100644 index 00000000000..0659ada07d6 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/take_nap.py @@ -0,0 +1,31 @@ +from kfp import compiler, dsl + +common_base_image = ( + "registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61" +) + + +@dsl.component(base_image=common_base_image) +def take_nap(naptime_secs: int) -> str: + """Sleeps for secs""" + from time import sleep # noqa: PLC0415 + + print(f"Sleeping for {naptime_secs} seconds: Zzzzzz ...") + sleep(naptime_secs) + return "I'm awake now. Did I snore?" + + +@dsl.component(base_image=common_base_image) +def wake_up(message: str): + """Wakes up from nap printing a message""" + print(message) + + +@dsl.pipeline(name="take-nap-pipeline", description="Pipeline that sleeps for 15 mins (900 secs)") +def take_nap_pipeline(naptime_secs: int = 900): + take_nap_task = take_nap(naptime_secs=naptime_secs).set_caching_options(False) + wake_up(message=take_nap_task.output).set_caching_options(False) + + +if __name__ == "__main__": + compiler.Compiler().compile(take_nap_pipeline, package_path=__file__.replace(".py", "_compiled.yaml")) diff --git a/test_data/sdk_compiled_pipelines/valid/take_nap_compiled.yaml b/test_data/sdk_compiled_pipelines/valid/take_nap_compiled.yaml new file mode 100644 index 00000000000..ff6ff70e1a2 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/take_nap_compiled.yaml @@ -0,0 +1,122 @@ +# PIPELINE DEFINITION +# Name: take-nap-pipeline +# Description: Pipeline that sleeps for 15 mins (900 secs) +# Inputs: +# naptime_secs: int [Default: 900.0] +components: + comp-take-nap: + executorLabel: exec-take-nap + inputDefinitions: + parameters: + naptime_secs: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-wake-up: + executorLabel: exec-wake-up + inputDefinitions: + parameters: + message: + parameterType: STRING +deploymentSpec: + executors: + exec-take-nap: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - take_nap + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef take_nap(naptime_secs: int) -> str:\n \"\"\"Sleeps for secs\"\ + \"\"\n from time import sleep # noqa: PLC0415\n\n print(f\"Sleeping\ + \ for {naptime_secs} seconds: Zzzzzz ...\")\n sleep(naptime_secs)\n \ + \ return \"I'm awake now. Did I snore?\"\n\n" + image: python:3.9 + exec-wake-up: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - wake_up + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef wake_up(message: str):\n \"\"\"Wakes up from nap printing\ + \ a message\"\"\"\n print(message)\n\n" + image: python:3.9 +pipelineInfo: + description: Pipeline that sleeps for 15 mins (900 secs) + name: take-nap-pipeline +root: + dag: + tasks: + take-nap: + cachingOptions: {} + componentRef: + name: comp-take-nap + inputs: + parameters: + naptime_secs: + componentInputParameter: naptime_secs + taskInfo: + name: take-nap + wake-up: + cachingOptions: {} + componentRef: + name: comp-wake-up + dependentTasks: + - take-nap + inputs: + parameters: + message: + taskOutputParameter: + outputParameterKey: Output + producerTask: take-nap + taskInfo: + name: wake-up + inputDefinitions: + parameters: + naptime_secs: + defaultValue: 900.0 + isOptional: true + parameterType: NUMBER_INTEGER +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_compiled_pipelines/valid/take_nap_pipeline_root.py b/test_data/sdk_compiled_pipelines/valid/take_nap_pipeline_root.py new file mode 100644 index 00000000000..54949638ffa --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/take_nap_pipeline_root.py @@ -0,0 +1,33 @@ +from kfp import compiler, dsl + +common_base_image = ( + "registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61" +) + + +@dsl.component(base_image=common_base_image) +def take_nap(naptime_secs: int) -> str: + """Sleeps for secs""" + from time import sleep # noqa: PLC0415 + + print(f"Sleeping for {naptime_secs} seconds: Zzzzzz ...") + sleep(naptime_secs) + return "I'm awake now. Did I snore?" + + +@dsl.component(base_image=common_base_image) +def wake_up(message: str): + """Wakes up from nap printing a message""" + print(message) + + +@dsl.pipeline( + name="take-nap-pipeline", description="Pipeline that sleeps for 15 mins (900 secs)", pipeline_root="s3://change/me" +) +def take_nap_pipeline(naptime_secs: int = 900): + take_nap_task = take_nap(naptime_secs=naptime_secs).set_caching_options(False) + wake_up(message=take_nap_task.output).set_caching_options(False) + + +if __name__ == "__main__": + compiler.Compiler().compile(take_nap_pipeline, package_path=__file__.replace(".py", "_compiled.yaml")) diff --git a/test_data/sdk_compiled_pipelines/valid/take_nap_pipeline_root_compiled.yaml b/test_data/sdk_compiled_pipelines/valid/take_nap_pipeline_root_compiled.yaml new file mode 100644 index 00000000000..ee4dbbf9a9b --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/take_nap_pipeline_root_compiled.yaml @@ -0,0 +1,123 @@ +# PIPELINE DEFINITION +# Name: take-nap-pipeline +# Description: Pipeline that sleeps for 15 mins (900 secs) +# Inputs: +# naptime_secs: int [Default: 900.0] +components: + comp-take-nap: + executorLabel: exec-take-nap + inputDefinitions: + parameters: + naptime_secs: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-wake-up: + executorLabel: exec-wake-up + inputDefinitions: + parameters: + message: + parameterType: STRING +defaultPipelineRoot: s3://change/me +deploymentSpec: + executors: + exec-take-nap: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - take_nap + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef take_nap(naptime_secs: int) -> str:\n \"\"\"Sleeps for secs\"\ + \"\"\n from time import sleep # noqa: PLC0415\n\n print(f\"Sleeping\ + \ for {naptime_secs} seconds: Zzzzzz ...\")\n sleep(naptime_secs)\n \ + \ return \"I'm awake now. Did I snore?\"\n\n" + image: python:3.9 + exec-wake-up: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - wake_up + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef wake_up(message: str):\n \"\"\"Wakes up from nap printing\ + \ a message\"\"\"\n print(message)\n\n" + image: python:3.9 +pipelineInfo: + description: Pipeline that sleeps for 15 mins (900 secs) + name: take-nap-pipeline +root: + dag: + tasks: + take-nap: + cachingOptions: {} + componentRef: + name: comp-take-nap + inputs: + parameters: + naptime_secs: + componentInputParameter: naptime_secs + taskInfo: + name: take-nap + wake-up: + cachingOptions: {} + componentRef: + name: comp-wake-up + dependentTasks: + - take-nap + inputs: + parameters: + message: + taskOutputParameter: + outputParameterKey: Output + producerTask: take-nap + taskInfo: + name: wake-up + inputDefinitions: + parameters: + naptime_secs: + defaultValue: 900.0 + isOptional: true + parameterType: NUMBER_INTEGER +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/two_step_pipeline.py b/test_data/sdk_compiled_pipelines/valid/two_step_pipeline.py similarity index 100% rename from sdk/python/test_data/pipelines/two_step_pipeline.py rename to test_data/sdk_compiled_pipelines/valid/two_step_pipeline.py diff --git a/test_data/sdk_compiled_pipelines/valid/two_step_pipeline.yaml b/test_data/sdk_compiled_pipelines/valid/two_step_pipeline.yaml new file mode 100644 index 00000000000..002e2368a49 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/two_step_pipeline.yaml @@ -0,0 +1,91 @@ +# PIPELINE DEFINITION +# Name: simple-two-step-pipeline +# Inputs: +# text: str [Default: 'Hello KFP!'] +components: + comp-read-from-gcs: + executorLabel: exec-read-from-gcs + inputDefinitions: + artifacts: + input_gcs_path: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-write-to-gcs: + executorLabel: exec-write-to-gcs + inputDefinitions: + parameters: + text: + parameterType: STRING + outputDefinitions: + artifacts: + output_gcs_path: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-read-from-gcs: + container: + command: + - sh + - -c + - 'set -e -x + + gsutil cat "$0" + + ' + - '{{$.inputs.artifacts[''input_gcs_path''].uri}}' + image: google/cloud-sdk:slim + exec-write-to-gcs: + container: + command: + - sh + - -c + - 'set -e -x + + echo "$0" | gsutil cp - "$1" + + ' + - '{{$.inputs.parameters[''text'']}}' + - '{{$.outputs.artifacts[''output_gcs_path''].uri}}' + image: google/cloud-sdk:slim +pipelineInfo: + name: simple-two-step-pipeline +root: + dag: + tasks: + read-from-gcs: + cachingOptions: + enableCache: true + componentRef: + name: comp-read-from-gcs + dependentTasks: + - write-to-gcs + inputs: + artifacts: + input_gcs_path: + taskOutputArtifact: + outputArtifactKey: output_gcs_path + producerTask: write-to-gcs + taskInfo: + name: Consumer + write-to-gcs: + cachingOptions: + enableCache: true + componentRef: + name: comp-write-to-gcs + inputs: + parameters: + text: + componentInputParameter: text + taskInfo: + name: Producer + inputDefinitions: + parameters: + text: + defaultValue: Hello KFP! + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.3 diff --git a/samples/test/two_step_with_uri_placeholder.py b/test_data/sdk_compiled_pipelines/valid/two_step_with_uri_placeholder.py similarity index 100% rename from samples/test/two_step_with_uri_placeholder.py rename to test_data/sdk_compiled_pipelines/valid/two_step_with_uri_placeholder.py diff --git a/test_data/sdk_compiled_pipelines/valid/upload_download_compiled.yaml b/test_data/sdk_compiled_pipelines/valid/upload_download_compiled.yaml new file mode 100644 index 00000000000..88c33338118 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/upload_download_compiled.yaml @@ -0,0 +1,229 @@ +# PIPELINE DEFINITION +# Name: test-data-passing-pipeline-1 +# Inputs: +# bucket_name: str +# mlpipeline_minio_artifact_secret: str +components: + comp-receive-file: + executorLabel: exec-receive-file + inputDefinitions: + artifacts: + incomingfile: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + saveartifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-send-file: + executorLabel: exec-send-file + inputDefinitions: + parameters: + file_size_bytes: + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + outgoingfile: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-test-uploaded-artifact: + executorLabel: exec-test-uploaded-artifact + inputDefinitions: + artifacts: + previous_step: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + bucket_name: + parameterType: STRING + file_size_bytes: + parameterType: NUMBER_INTEGER + mlpipeline_minio_artifact_secret: + parameterType: STRING +deploymentSpec: + executors: + exec-receive-file: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - receive_file + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef receive_file(\n incomingfile: dsl.InputPath(),\n saveartifact:\ + \ dsl.OutputPath(),\n):\n import os # noqa: PLC0415\n import shutil\ + \ # noqa: PLC0415\n\n print(\"reading %s, size is %s\" % (incomingfile,\ + \ os.path.getsize(incomingfile)))\n\n with open(incomingfile, \"rb\"\ + ) as f:\n b = f.read(1)\n print(\"read byte: %s\" % b)\n \ + \ f.close()\n\n print(\"copying in %s to out %s\" % (incomingfile,\ + \ saveartifact))\n shutil.copyfile(incomingfile, saveartifact)\n\n" + image: python:3.9 + exec-send-file: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - send_file + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef send_file(\n file_size_bytes: int,\n outgoingfile: dsl.OutputPath(),\n\ + ):\n import os # noqa: PLC0415\n import zipfile # noqa: PLC0415\n\ + \n def create_large_file(file_path, size_in_bytes):\n with open(file_path,\ + \ \"wb\") as f:\n f.write(os.urandom(size_in_bytes))\n\n def\ + \ zip_file(input_file_path, output_zip_path):\n with zipfile.ZipFile(output_zip_path,\ + \ \"w\", compression=zipfile.ZIP_DEFLATED) as zipf:\n zipf.write(input_file_path,\ + \ os.path.basename(input_file_path))\n\n print(\"starting creating the\ + \ file...\")\n file_path = \"/tmp/large_file.txt\"\n create_large_file(file_path,\ + \ file_size_bytes)\n zip_file(file_path, outgoingfile)\n print(f\"\ + saved: {outgoingfile}\")\n\n" + image: python:3.9 + exec-test-uploaded-artifact: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - test_uploaded_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.3'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ + \ python3 -m pip install --quiet --no-warn-script-location 'minio' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef test_uploaded_artifact(\n previous_step: dsl.InputPath(),\n\ + \ file_size_bytes: int,\n mlpipeline_minio_artifact_secret: str,\n\ + \ bucket_name: str,\n):\n import base64 # noqa: PLC0415\n import\ + \ json # noqa: PLC0415\n\n from minio import Minio # noqa: PLC0415\n\ + \n def inner_decode(my_str):\n return base64.b64decode(my_str).decode(\"\ + utf-8\")\n\n mlpipeline_minio_artifact_secret = json.loads(mlpipeline_minio_artifact_secret.replace(\"\ + '\", '\"'))\n host = inner_decode(mlpipeline_minio_artifact_secret[\"\ + host\"])\n port = inner_decode(mlpipeline_minio_artifact_secret[\"port\"\ + ])\n access_key = inner_decode(mlpipeline_minio_artifact_secret[\"accesskey\"\ + ])\n secret_key = inner_decode(mlpipeline_minio_artifact_secret[\"secretkey\"\ + ])\n secure = inner_decode(mlpipeline_minio_artifact_secret[\"secure\"\ + ])\n secure = secure.lower() == \"true\"\n client = Minio(f\"{host}:{port}\"\ + , access_key=access_key, secret_key=secret_key, secure=secure)\n\n store_object\ + \ = previous_step.replace(f\"/s3/{bucket_name}/\", \"\")\n print(f\"\ + parsing {previous_step} to {store_object} \")\n data = client.get_object(bucket_name,\ + \ store_object)\n\n with open(\"my-testfile\", \"wb\") as file_data:\n\ + \ for d in data.stream(32 * 1024):\n file_data.write(d)\n\ + \ bytes_written = file_data.tell()\n\n print(file_size_bytes,\ + \ bytes_written)\n diff = round((bytes_written / file_size_bytes) - 1,\ + \ 3)\n print(diff)\n # if not matching, the test will fail\n assert\ + \ diff == 0\n\n" + image: python:3.9 +pipelineInfo: + name: test-data-passing-pipeline-1 +root: + dag: + tasks: + receive-file: + cachingOptions: {} + componentRef: + name: comp-receive-file + dependentTasks: + - send-file + inputs: + artifacts: + incomingfile: + taskOutputArtifact: + outputArtifactKey: outgoingfile + producerTask: send-file + taskInfo: + name: receive-file + send-file: + cachingOptions: {} + componentRef: + name: comp-send-file + inputs: + parameters: + file_size_bytes: + runtimeValue: + constant: 20971520.0 + taskInfo: + name: send-file + test-uploaded-artifact: + cachingOptions: {} + componentRef: + name: comp-test-uploaded-artifact + dependentTasks: + - receive-file + inputs: + artifacts: + previous_step: + taskOutputArtifact: + outputArtifactKey: saveartifact + producerTask: receive-file + parameters: + bucket_name: + componentInputParameter: bucket_name + file_size_bytes: + runtimeValue: + constant: 20971520.0 + mlpipeline_minio_artifact_secret: + componentInputParameter: mlpipeline_minio_artifact_secret + taskInfo: + name: test-uploaded-artifact + inputDefinitions: + parameters: + bucket_name: + parameterType: STRING + mlpipeline_minio_artifact_secret: + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/sdk/python/test_data/pipelines/xgboost_sample_pipeline.py b/test_data/sdk_compiled_pipelines/valid/xgboost_sample_pipeline.py similarity index 100% rename from sdk/python/test_data/pipelines/xgboost_sample_pipeline.py rename to test_data/sdk_compiled_pipelines/valid/xgboost_sample_pipeline.py diff --git a/test_data/sdk_compiled_pipelines/valid/xgboost_sample_pipeline.yaml b/test_data/sdk_compiled_pipelines/valid/xgboost_sample_pipeline.yaml new file mode 100644 index 00000000000..ae1fd78a740 --- /dev/null +++ b/test_data/sdk_compiled_pipelines/valid/xgboost_sample_pipeline.yaml @@ -0,0 +1,926 @@ +# PIPELINE DEFINITION +# Name: xgboost-sample-pipeline +components: + comp-chicago-taxi-trips-dataset: + executorLabel: exec-chicago-taxi-trips-dataset + inputDefinitions: + parameters: + format: + defaultValue: csv + isOptional: true + parameterType: STRING + limit: + defaultValue: 1000.0 + isOptional: true + parameterType: NUMBER_INTEGER + select: + defaultValue: trip_id,taxi_id,trip_start_timestamp,trip_end_timestamp,trip_seconds,trip_miles,pickup_census_tract,dropoff_census_tract,pickup_community_area,dropoff_community_area,fare,tips,tolls,extras,trip_total,payment_type,company,pickup_centroid_latitude,pickup_centroid_longitude,pickup_centroid_location,dropoff_centroid_latitude,dropoff_centroid_longitude,dropoff_centroid_location + isOptional: true + parameterType: STRING + where: + defaultValue: trip_start_timestamp>="1900-01-01" AND trip_start_timestamp<"2100-01-01" + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + table: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-convert-csv-to-apache-parquet: + executorLabel: exec-convert-csv-to-apache-parquet + inputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + output_data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-xgboost-predict: + executorLabel: exec-xgboost-predict + inputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + model: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + label_column: + isOptional: true + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + predictions: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-xgboost-predict-2: + executorLabel: exec-xgboost-predict-2 + inputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + model: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + label_column_name: + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + predictions: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-xgboost-predict-3: + executorLabel: exec-xgboost-predict-3 + inputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + model: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + label_column_name: + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + predictions: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-xgboost-predict-4: + executorLabel: exec-xgboost-predict-4 + inputDefinitions: + artifacts: + data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + model: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + label_column: + isOptional: true + parameterType: NUMBER_INTEGER + outputDefinitions: + artifacts: + predictions: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-xgboost-train: + executorLabel: exec-xgboost-train + inputDefinitions: + artifacts: + starting_model: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isOptional: true + training_data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + booster: + defaultValue: gbtree + isOptional: true + parameterType: STRING + booster_params: + isOptional: true + parameterType: STRUCT + label_column: + defaultValue: 0.0 + isOptional: true + parameterType: NUMBER_INTEGER + learning_rate: + defaultValue: 0.3 + isOptional: true + parameterType: NUMBER_DOUBLE + max_depth: + defaultValue: 6.0 + isOptional: true + parameterType: NUMBER_INTEGER + min_split_loss: + defaultValue: 0.0 + isOptional: true + parameterType: NUMBER_DOUBLE + num_iterations: + defaultValue: 10.0 + isOptional: true + parameterType: NUMBER_INTEGER + objective: + defaultValue: reg:squarederror + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + model: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + model_config: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-xgboost-train-2: + executorLabel: exec-xgboost-train-2 + inputDefinitions: + artifacts: + starting_model: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isOptional: true + training_data: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + booster: + defaultValue: gbtree + isOptional: true + parameterType: STRING + booster_params: + isOptional: true + parameterType: STRUCT + label_column_name: + parameterType: STRING + learning_rate: + defaultValue: 0.3 + isOptional: true + parameterType: NUMBER_DOUBLE + max_depth: + defaultValue: 6.0 + isOptional: true + parameterType: NUMBER_INTEGER + min_split_loss: + defaultValue: 0.0 + isOptional: true + parameterType: NUMBER_DOUBLE + num_iterations: + defaultValue: 10.0 + isOptional: true + parameterType: NUMBER_INTEGER + objective: + defaultValue: reg:squarederror + isOptional: true + parameterType: STRING + outputDefinitions: + artifacts: + model: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + model_config: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-chicago-taxi-trips-dataset: + container: + command: + - sh + - -c + - "set -e -x -o pipefail\noutput_path=\"$0\"\nselect=\"$1\"\nwhere=\"$2\"\n\ + limit=\"$3\"\nformat=\"$4\"\nmkdir -p \"$(dirname \"$output_path\")\"\n\ + curl --get 'https://data.cityofchicago.org/resource/wrvz-psew.'\"${format}\"\ + \ \\\n --data-urlencode '$limit='\"${limit}\" \\\n --data-urlencode\ + \ '$where='\"${where}\" \\\n --data-urlencode '$select='\"${select}\"\ + \ \\\n | tr -d '\"' > \"$output_path\" # Removing unneeded quotes around\ + \ all numbers\n" + - '{{$.outputs.artifacts[''table''].path}}' + - '{{$.inputs.parameters[''select'']}}' + - '{{$.inputs.parameters[''where'']}}' + - '{{$.inputs.parameters[''limit'']}}' + - '{{$.inputs.parameters[''format'']}}' + image: byrnedo/alpine-curl@sha256:548379d0a4a0c08b9e55d9d87a592b7d35d9ab3037f4936f5ccd09d0b625a342 + exec-convert-csv-to-apache-parquet: + container: + args: + - --data + - '{{$.inputs.artifacts[''data''].path}}' + - --output-data + - '{{$.outputs.artifacts[''output_data''].path}}' + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install + --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" + - python3 + - -u + - -c + - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ + \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ + \ file_path\n\ndef convert_csv_to_apache_parquet(\n data_path,\n output_data_path,\n\ + ):\n '''Converts CSV table to Apache Parquet.\n\n [Apache Parquet](https://parquet.apache.org/)\n\ + \n Annotations:\n author: Alexey Volkov \n\ + \ '''\n from pyarrow import csv, parquet\n\n table = csv.read_csv(data_path)\n\ + \ parquet.write_table(table, output_data_path)\n\nimport argparse\n_parser\ + \ = argparse.ArgumentParser(prog='Convert csv to apache parquet', description='Converts\ + \ CSV table to Apache Parquet.\\n\\n [Apache Parquet](https://parquet.apache.org/)\\\ + n\\n Annotations:\\n author: Alexey Volkov ')\n\ + _parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--output-data\", dest=\"\ + output_data_path\", type=_make_parent_dirs_and_return_path, required=True,\ + \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ + _output_files = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = convert_csv_to_apache_parquet(**_parsed_args)\n\ + \n_output_serializers = [\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n\ + \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ + \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ + \ f.write(_output_serializers[idx](_outputs[idx]))\n" + image: python:3.7 + exec-xgboost-predict: + container: + args: + - --data + - '{{$.inputs.artifacts[''data''].path}}' + - --model + - '{{$.inputs.artifacts[''model''].path}}' + - '{"IfPresent": {"InputName": "label_column", "Then": ["--label-column", + "{{$.inputs.parameters[''label_column'']}}"]}}' + - --predictions + - '{{$.outputs.artifacts[''predictions''].path}}' + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'xgboost==1.1.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 + -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' + --user) && "$0" "$@" + - python3 + - -u + - -c + - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ + \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ + \ file_path\n\ndef xgboost_predict(\n data_path, # Also supports LibSVM\n\ + \ model_path,\n predictions_path,\n label_column = None,\n):\n\ + \ '''Make predictions using a trained XGBoost model.\n\n Args:\n \ + \ data_path: Path for the feature data in CSV format.\n model_path:\ + \ Path for the trained model in binary XGBoost format.\n predictions_path:\ + \ Output path for the predictions.\n label_column: Column containing\ + \ the label data.\n\n Annotations:\n author: Alexey Volkov \n\ + \ '''\n from pathlib import Path\n\n import numpy\n import pandas\n\ + \ import xgboost\n\n df = pandas.read_csv(\n data_path,\n \ + \ )\n\n if label_column is not None:\n df = df.drop(columns=[df.columns[label_column]])\n\ + \n testing_data = xgboost.DMatrix(\n data=df,\n )\n\n model\ + \ = xgboost.Booster(model_file=model_path)\n\n predictions = model.predict(testing_data)\n\ + \n Path(predictions_path).parent.mkdir(parents=True, exist_ok=True)\n\ + \ numpy.savetxt(predictions_path, predictions)\n\nimport argparse\n_parser\ + \ = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions\ + \ using a trained XGBoost model.\\n\\n Args:\\n data_path: Path\ + \ for the feature data in CSV format.\\n model_path: Path for the\ + \ trained model in binary XGBoost format.\\n predictions_path: Output\ + \ path for the predictions.\\n label_column: Column containing the\ + \ label data.\\n\\n Annotations:\\n author: Alexey Volkov ')\n\ + _parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"\ + model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ + --label-column\", dest=\"label_column\", type=int, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path,\ + \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ + \n_outputs = xgboost_predict(**_parsed_args)\n" + image: python:3.7 + exec-xgboost-predict-2: + container: + args: + - --data + - '{{$.inputs.artifacts[''data''].path}}' + - --model + - '{{$.inputs.artifacts[''model''].path}}' + - '{"IfPresent": {"InputName": "label_column_name", "Then": ["--label-column-name", + "{{$.inputs.parameters[''label_column_name'']}}"]}}' + - --predictions + - '{{$.outputs.artifacts[''predictions''].path}}' + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'xgboost==1.1.1' 'pandas==1.0.5' 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' + 'pandas==1.0.5' 'pyarrow==0.17.1' --user) && "$0" "$@" + - python3 + - -u + - -c + - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ + \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ + \ file_path\n\ndef xgboost_predict(\n data_path,\n model_path,\n \ + \ predictions_path,\n label_column_name = None,\n):\n '''Make predictions\ + \ using a trained XGBoost model.\n\n Args:\n data_path: Path for\ + \ the feature data in Apache Parquet format.\n model_path: Path for\ + \ the trained model in binary XGBoost format.\n predictions_path:\ + \ Output path for the predictions.\n label_column_name: Optional.\ + \ Name of the column containing the label data that is excluded during the\ + \ prediction.\n\n Annotations:\n author: Alexey Volkov \n\ + \ '''\n from pathlib import Path\n\n import numpy\n import pandas\n\ + \ import xgboost\n\n # Loading data\n df = pandas.read_parquet(data_path)\n\ + \ if label_column_name:\n df = df.drop(columns=[label_column_name])\n\ + \n evaluation_data = xgboost.DMatrix(\n data=df,\n )\n\n \ + \ # Training\n model = xgboost.Booster(model_file=model_path)\n\n \ + \ predictions = model.predict(evaluation_data)\n\n Path(predictions_path).parent.mkdir(parents=True,\ + \ exist_ok=True)\n numpy.savetxt(predictions_path, predictions)\n\nimport\ + \ argparse\n_parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make\ + \ predictions using a trained XGBoost model.\\n\\n Args:\\n data_path:\ + \ Path for the feature data in Apache Parquet format.\\n model_path:\ + \ Path for the trained model in binary XGBoost format.\\n predictions_path:\ + \ Output path for the predictions.\\n label_column_name: Optional.\ + \ Name of the column containing the label data that is excluded during the\ + \ prediction.\\n\\n Annotations:\\n author: Alexey Volkov ')\n\ + _parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"\ + model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ + --label-column-name\", dest=\"label_column_name\", type=str, required=False,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--predictions\", dest=\"\ + predictions_path\", type=_make_parent_dirs_and_return_path, required=True,\ + \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ + \n_outputs = xgboost_predict(**_parsed_args)\n" + image: python:3.7 + exec-xgboost-predict-3: + container: + args: + - --data + - '{{$.inputs.artifacts[''data''].path}}' + - --model + - '{{$.inputs.artifacts[''model''].path}}' + - '{"IfPresent": {"InputName": "label_column_name", "Then": ["--label-column-name", + "{{$.inputs.parameters[''label_column_name'']}}"]}}' + - --predictions + - '{{$.outputs.artifacts[''predictions''].path}}' + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'xgboost==1.1.1' 'pandas==1.0.5' 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' + 'pandas==1.0.5' 'pyarrow==0.17.1' --user) && "$0" "$@" + - python3 + - -u + - -c + - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ + \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ + \ file_path\n\ndef xgboost_predict(\n data_path,\n model_path,\n \ + \ predictions_path,\n label_column_name = None,\n):\n '''Make predictions\ + \ using a trained XGBoost model.\n\n Args:\n data_path: Path for\ + \ the feature data in Apache Parquet format.\n model_path: Path for\ + \ the trained model in binary XGBoost format.\n predictions_path:\ + \ Output path for the predictions.\n label_column_name: Optional.\ + \ Name of the column containing the label data that is excluded during the\ + \ prediction.\n\n Annotations:\n author: Alexey Volkov \n\ + \ '''\n from pathlib import Path\n\n import numpy\n import pandas\n\ + \ import xgboost\n\n # Loading data\n df = pandas.read_parquet(data_path)\n\ + \ if label_column_name:\n df = df.drop(columns=[label_column_name])\n\ + \n evaluation_data = xgboost.DMatrix(\n data=df,\n )\n\n \ + \ # Training\n model = xgboost.Booster(model_file=model_path)\n\n \ + \ predictions = model.predict(evaluation_data)\n\n Path(predictions_path).parent.mkdir(parents=True,\ + \ exist_ok=True)\n numpy.savetxt(predictions_path, predictions)\n\nimport\ + \ argparse\n_parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make\ + \ predictions using a trained XGBoost model.\\n\\n Args:\\n data_path:\ + \ Path for the feature data in Apache Parquet format.\\n model_path:\ + \ Path for the trained model in binary XGBoost format.\\n predictions_path:\ + \ Output path for the predictions.\\n label_column_name: Optional.\ + \ Name of the column containing the label data that is excluded during the\ + \ prediction.\\n\\n Annotations:\\n author: Alexey Volkov ')\n\ + _parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"\ + model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ + --label-column-name\", dest=\"label_column_name\", type=str, required=False,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--predictions\", dest=\"\ + predictions_path\", type=_make_parent_dirs_and_return_path, required=True,\ + \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ + \n_outputs = xgboost_predict(**_parsed_args)\n" + image: python:3.7 + exec-xgboost-predict-4: + container: + args: + - --data + - '{{$.inputs.artifacts[''data''].path}}' + - --model + - '{{$.inputs.artifacts[''model''].path}}' + - '{"IfPresent": {"InputName": "label_column", "Then": ["--label-column", + "{{$.inputs.parameters[''label_column'']}}"]}}' + - --predictions + - '{{$.outputs.artifacts[''predictions''].path}}' + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'xgboost==1.1.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 + -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' + --user) && "$0" "$@" + - python3 + - -u + - -c + - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ + \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ + \ file_path\n\ndef xgboost_predict(\n data_path, # Also supports LibSVM\n\ + \ model_path,\n predictions_path,\n label_column = None,\n):\n\ + \ '''Make predictions using a trained XGBoost model.\n\n Args:\n \ + \ data_path: Path for the feature data in CSV format.\n model_path:\ + \ Path for the trained model in binary XGBoost format.\n predictions_path:\ + \ Output path for the predictions.\n label_column: Column containing\ + \ the label data.\n\n Annotations:\n author: Alexey Volkov \n\ + \ '''\n from pathlib import Path\n\n import numpy\n import pandas\n\ + \ import xgboost\n\n df = pandas.read_csv(\n data_path,\n \ + \ )\n\n if label_column is not None:\n df = df.drop(columns=[df.columns[label_column]])\n\ + \n testing_data = xgboost.DMatrix(\n data=df,\n )\n\n model\ + \ = xgboost.Booster(model_file=model_path)\n\n predictions = model.predict(testing_data)\n\ + \n Path(predictions_path).parent.mkdir(parents=True, exist_ok=True)\n\ + \ numpy.savetxt(predictions_path, predictions)\n\nimport argparse\n_parser\ + \ = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions\ + \ using a trained XGBoost model.\\n\\n Args:\\n data_path: Path\ + \ for the feature data in CSV format.\\n model_path: Path for the\ + \ trained model in binary XGBoost format.\\n predictions_path: Output\ + \ path for the predictions.\\n label_column: Column containing the\ + \ label data.\\n\\n Annotations:\\n author: Alexey Volkov ')\n\ + _parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"\ + model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ + --label-column\", dest=\"label_column\", type=int, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path,\ + \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ + \n_outputs = xgboost_predict(**_parsed_args)\n" + image: python:3.7 + exec-xgboost-train: + container: + args: + - --training-data + - '{{$.inputs.artifacts[''training_data''].path}}' + - '{"IfPresent": {"InputName": "starting_model", "Then": ["--starting-model", + "{{$.inputs.artifacts[''starting_model''].path}}"]}}' + - '{"IfPresent": {"InputName": "label_column", "Then": ["--label-column", + "{{$.inputs.parameters[''label_column'']}}"]}}' + - '{"IfPresent": {"InputName": "num_iterations", "Then": ["--num-iterations", + "{{$.inputs.parameters[''num_iterations'']}}"]}}' + - '{"IfPresent": {"InputName": "booster_params", "Then": ["--booster-params", + "{{$.inputs.parameters[''booster_params'']}}"]}}' + - '{"IfPresent": {"InputName": "objective", "Then": ["--objective", "{{$.inputs.parameters[''objective'']}}"]}}' + - '{"IfPresent": {"InputName": "booster", "Then": ["--booster", "{{$.inputs.parameters[''booster'']}}"]}}' + - '{"IfPresent": {"InputName": "learning_rate", "Then": ["--learning-rate", + "{{$.inputs.parameters[''learning_rate'']}}"]}}' + - '{"IfPresent": {"InputName": "min_split_loss", "Then": ["--min-split-loss", + "{{$.inputs.parameters[''min_split_loss'']}}"]}}' + - '{"IfPresent": {"InputName": "max_depth", "Then": ["--max-depth", "{{$.inputs.parameters[''max_depth'']}}"]}}' + - --model + - '{{$.outputs.artifacts[''model''].path}}' + - --model-config + - '{{$.outputs.artifacts[''model_config''].path}}' + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'xgboost==1.1.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 + -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' + --user) && "$0" "$@" + - python3 + - -u + - -c + - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ + \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ + \ file_path\n\ndef xgboost_train(\n training_data_path, # Also supports\ + \ LibSVM\n model_path,\n model_config_path,\n starting_model_path\ + \ = None,\n\n label_column = 0,\n num_iterations = 10,\n booster_params\ + \ = None,\n\n # Booster parameters\n objective = 'reg:squarederror',\n\ + \ booster = 'gbtree',\n learning_rate = 0.3,\n min_split_loss =\ + \ 0,\n max_depth = 6,\n):\n '''Train an XGBoost model.\n\n Args:\n\ + \ training_data_path: Path for the training data in CSV format.\n\ + \ model_path: Output path for the trained model in binary XGBoost\ + \ format.\n model_config_path: Output path for the internal parameter\ + \ configuration of Booster as a JSON string.\n starting_model_path:\ + \ Path for the existing trained model to start from.\n label_column:\ + \ Column containing the label data.\n num_boost_rounds: Number of\ + \ boosting iterations.\n booster_params: Parameters for the booster.\ + \ See https://xgboost.readthedocs.io/en/latest/parameter.html\n objective:\ + \ The learning task and the corresponding learning objective.\n \ + \ See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n\ + \ The most common values are:\n \"reg:squarederror\"\ + \ - Regression with squared loss (default).\n \"reg:logistic\"\ + \ - Logistic regression.\n \"binary:logistic\" - Logistic regression\ + \ for binary classification, output probability.\n \"binary:logitraw\"\ + \ - Logistic regression for binary classification, output score before logistic\ + \ transformation\n \"rank:pairwise\" - Use LambdaMART to perform\ + \ pairwise ranking where the pairwise loss is minimized\n \"\ + rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized\ + \ Discounted Cumulative Gain (NDCG) is maximized\n\n Annotations:\n \ + \ author: Alexey Volkov \n '''\n \ + \ import pandas\n import xgboost\n\n df = pandas.read_csv(\n \ + \ training_data_path,\n )\n\n training_data = xgboost.DMatrix(\n\ + \ data=df.drop(columns=[df.columns[label_column]]),\n label=df[df.columns[label_column]],\n\ + \ )\n\n booster_params = booster_params or {}\n booster_params.setdefault('objective',\ + \ objective)\n booster_params.setdefault('booster', booster)\n booster_params.setdefault('learning_rate',\ + \ learning_rate)\n booster_params.setdefault('min_split_loss', min_split_loss)\n\ + \ booster_params.setdefault('max_depth', max_depth)\n\n starting_model\ + \ = None\n if starting_model_path:\n starting_model = xgboost.Booster(model_file=starting_model_path)\n\ + \n model = xgboost.train(\n params=booster_params,\n dtrain=training_data,\n\ + \ num_boost_round=num_iterations,\n xgb_model=starting_model\n\ + \ )\n\n # Saving the model in binary format\n model.save_model(model_path)\n\ + \n model_config_str = model.save_config()\n with open(model_config_path,\ + \ 'w') as model_config_file:\n model_config_file.write(model_config_str)\n\ + \nimport json\nimport argparse\n_parser = argparse.ArgumentParser(prog='Xgboost\ + \ train', description='Train an XGBoost model.\\n\\n Args:\\n \ + \ training_data_path: Path for the training data in CSV format.\\n \ + \ model_path: Output path for the trained model in binary XGBoost format.\\\ + n model_config_path: Output path for the internal parameter configuration\ + \ of Booster as a JSON string.\\n starting_model_path: Path for the\ + \ existing trained model to start from.\\n label_column: Column containing\ + \ the label data.\\n num_boost_rounds: Number of boosting iterations.\\\ + n booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\\\ + n objective: The learning task and the corresponding learning objective.\\\ + n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\\\ + n The most common values are:\\n \"reg:squarederror\"\ + \ - Regression with squared loss (default).\\n \"reg:logistic\"\ + \ - Logistic regression.\\n \"binary:logistic\" - Logistic regression\ + \ for binary classification, output probability.\\n \"binary:logitraw\"\ + \ - Logistic regression for binary classification, output score before logistic\ + \ transformation\\n \"rank:pairwise\" - Use LambdaMART to perform\ + \ pairwise ranking where the pairwise loss is minimized\\n \"\ + rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized\ + \ Discounted Cumulative Gain (NDCG) is maximized\\n\\n Annotations:\\\ + n author: Alexey Volkov ')\n_parser.add_argument(\"\ + --training-data\", dest=\"training_data_path\", type=str, required=True,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--starting-model\"\ + , dest=\"starting_model_path\", type=str, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--label-column\", dest=\"label_column\", type=int,\ + \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--num-iterations\"\ + , dest=\"num_iterations\", type=int, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--booster-params\", dest=\"booster_params\", type=json.loads,\ + \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--objective\"\ + , dest=\"objective\", type=str, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--booster\", dest=\"booster\", type=str, required=False,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--learning-rate\",\ + \ dest=\"learning_rate\", type=float, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--min-split-loss\", dest=\"min_split_loss\", type=float,\ + \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--max-depth\"\ + , dest=\"max_depth\", type=int, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--model\", dest=\"model_path\", type=_make_parent_dirs_and_return_path,\ + \ required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model-config\"\ + , dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True,\ + \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ + \n_outputs = xgboost_train(**_parsed_args)\n" + image: python:3.7 + exec-xgboost-train-2: + container: + args: + - --training-data + - '{{$.inputs.artifacts[''training_data''].path}}' + - --label-column-name + - '{{$.inputs.parameters[''label_column_name'']}}' + - '{"IfPresent": {"InputName": "starting_model", "Then": ["--starting-model", + "{{$.inputs.artifacts[''starting_model''].path}}"]}}' + - '{"IfPresent": {"InputName": "num_iterations", "Then": ["--num-iterations", + "{{$.inputs.parameters[''num_iterations'']}}"]}}' + - '{"IfPresent": {"InputName": "booster_params", "Then": ["--booster-params", + "{{$.inputs.parameters[''booster_params'']}}"]}}' + - '{"IfPresent": {"InputName": "objective", "Then": ["--objective", "{{$.inputs.parameters[''objective'']}}"]}}' + - '{"IfPresent": {"InputName": "booster", "Then": ["--booster", "{{$.inputs.parameters[''booster'']}}"]}}' + - '{"IfPresent": {"InputName": "learning_rate", "Then": ["--learning-rate", + "{{$.inputs.parameters[''learning_rate'']}}"]}}' + - '{"IfPresent": {"InputName": "min_split_loss", "Then": ["--min-split-loss", + "{{$.inputs.parameters[''min_split_loss'']}}"]}}' + - '{"IfPresent": {"InputName": "max_depth", "Then": ["--max-depth", "{{$.inputs.parameters[''max_depth'']}}"]}}' + - --model + - '{{$.outputs.artifacts[''model''].path}}' + - --model-config + - '{{$.outputs.artifacts[''model_config''].path}}' + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'xgboost==1.1.1' 'pandas==1.0.5' 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 + python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' + 'pandas==1.0.5' 'pyarrow==0.17.1' --user) && "$0" "$@" + - python3 + - -u + - -c + - "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n\ + \ os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return\ + \ file_path\n\ndef xgboost_train(\n training_data_path,\n model_path,\n\ + \ model_config_path,\n label_column_name,\n\n starting_model_path\ + \ = None,\n\n num_iterations = 10,\n booster_params = None,\n\n \ + \ # Booster parameters\n objective = 'reg:squarederror',\n booster\ + \ = 'gbtree',\n learning_rate = 0.3,\n min_split_loss = 0,\n max_depth\ + \ = 6,\n):\n '''Train an XGBoost model.\n\n Args:\n training_data_path:\ + \ Path for the training data in Apache Parquet format.\n model_path:\ + \ Output path for the trained model in binary XGBoost format.\n model_config_path:\ + \ Output path for the internal parameter configuration of Booster as a JSON\ + \ string.\n starting_model_path: Path for the existing trained model\ + \ to start from.\n label_column_name: Name of the column containing\ + \ the label data.\n num_boost_rounds: Number of boosting iterations.\n\ + \ booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\n\ + \ objective: The learning task and the corresponding learning objective.\n\ + \ See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n\ + \ The most common values are:\n \"reg:squarederror\"\ + \ - Regression with squared loss (default).\n \"reg:logistic\"\ + \ - Logistic regression.\n \"binary:logistic\" - Logistic regression\ + \ for binary classification, output probability.\n \"binary:logitraw\"\ + \ - Logistic regression for binary classification, output score before logistic\ + \ transformation\n \"rank:pairwise\" - Use LambdaMART to perform\ + \ pairwise ranking where the pairwise loss is minimized\n \"\ + rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized\ + \ Discounted Cumulative Gain (NDCG) is maximized\n\n Annotations:\n \ + \ author: Alexey Volkov \n '''\n \ + \ import pandas\n import xgboost\n\n # Loading data\n df = pandas.read_parquet(training_data_path)\n\ + \ training_data = xgboost.DMatrix(\n data=df.drop(columns=[label_column_name]),\n\ + \ label=df[[label_column_name]],\n )\n # Training\n booster_params\ + \ = booster_params or {}\n booster_params.setdefault('objective', objective)\n\ + \ booster_params.setdefault('booster', booster)\n booster_params.setdefault('learning_rate',\ + \ learning_rate)\n booster_params.setdefault('min_split_loss', min_split_loss)\n\ + \ booster_params.setdefault('max_depth', max_depth)\n\n starting_model\ + \ = None\n if starting_model_path:\n starting_model = xgboost.Booster(model_file=starting_model_path)\n\ + \n model = xgboost.train(\n params=booster_params,\n dtrain=training_data,\n\ + \ num_boost_round=num_iterations,\n xgb_model=starting_model\n\ + \ )\n\n # Saving the model in binary format\n model.save_model(model_path)\n\ + \n model_config_str = model.save_config()\n with open(model_config_path,\ + \ 'w') as model_config_file:\n model_config_file.write(model_config_str)\n\ + \nimport json\nimport argparse\n_parser = argparse.ArgumentParser(prog='Xgboost\ + \ train', description='Train an XGBoost model.\\n\\n Args:\\n \ + \ training_data_path: Path for the training data in Apache Parquet format.\\\ + n model_path: Output path for the trained model in binary XGBoost\ + \ format.\\n model_config_path: Output path for the internal parameter\ + \ configuration of Booster as a JSON string.\\n starting_model_path:\ + \ Path for the existing trained model to start from.\\n label_column_name:\ + \ Name of the column containing the label data.\\n num_boost_rounds:\ + \ Number of boosting iterations.\\n booster_params: Parameters for\ + \ the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\\\ + n objective: The learning task and the corresponding learning objective.\\\ + n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\\\ + n The most common values are:\\n \"reg:squarederror\"\ + \ - Regression with squared loss (default).\\n \"reg:logistic\"\ + \ - Logistic regression.\\n \"binary:logistic\" - Logistic regression\ + \ for binary classification, output probability.\\n \"binary:logitraw\"\ + \ - Logistic regression for binary classification, output score before logistic\ + \ transformation\\n \"rank:pairwise\" - Use LambdaMART to perform\ + \ pairwise ranking where the pairwise loss is minimized\\n \"\ + rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized\ + \ Discounted Cumulative Gain (NDCG) is maximized\\n\\n Annotations:\\\ + n author: Alexey Volkov ')\n_parser.add_argument(\"\ + --training-data\", dest=\"training_data_path\", type=str, required=True,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column-name\"\ + , dest=\"label_column_name\", type=str, required=True, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--starting-model\", dest=\"starting_model_path\"\ + , type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"\ + --num-iterations\", dest=\"num_iterations\", type=int, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--booster-params\", dest=\"booster_params\", type=json.loads,\ + \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--objective\"\ + , dest=\"objective\", type=str, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--booster\", dest=\"booster\", type=str, required=False,\ + \ default=argparse.SUPPRESS)\n_parser.add_argument(\"--learning-rate\",\ + \ dest=\"learning_rate\", type=float, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--min-split-loss\", dest=\"min_split_loss\", type=float,\ + \ required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--max-depth\"\ + , dest=\"max_depth\", type=int, required=False, default=argparse.SUPPRESS)\n\ + _parser.add_argument(\"--model\", dest=\"model_path\", type=_make_parent_dirs_and_return_path,\ + \ required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model-config\"\ + , dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True,\ + \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ + \n_outputs = xgboost_train(**_parsed_args)\n" + image: python:3.7 +pipelineInfo: + name: xgboost-sample-pipeline +root: + dag: + tasks: + chicago-taxi-trips-dataset: + cachingOptions: + enableCache: true + componentRef: + name: comp-chicago-taxi-trips-dataset + inputs: + parameters: + limit: + runtimeValue: + constant: 10000.0 + select: + runtimeValue: + constant: tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total + where: + runtimeValue: + constant: trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp + < "2019-02-01" + taskInfo: + name: chicago-taxi-trips-dataset + convert-csv-to-apache-parquet: + cachingOptions: + enableCache: true + componentRef: + name: comp-convert-csv-to-apache-parquet + dependentTasks: + - chicago-taxi-trips-dataset + inputs: + artifacts: + data: + taskOutputArtifact: + outputArtifactKey: table + producerTask: chicago-taxi-trips-dataset + taskInfo: + name: convert-csv-to-apache-parquet + xgboost-predict: + cachingOptions: + enableCache: true + componentRef: + name: comp-xgboost-predict + dependentTasks: + - chicago-taxi-trips-dataset + - xgboost-train + inputs: + artifacts: + data: + taskOutputArtifact: + outputArtifactKey: table + producerTask: chicago-taxi-trips-dataset + model: + taskOutputArtifact: + outputArtifactKey: model + producerTask: xgboost-train + parameters: + label_column: + runtimeValue: + constant: 0.0 + taskInfo: + name: xgboost-predict + xgboost-predict-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-xgboost-predict-2 + dependentTasks: + - convert-csv-to-apache-parquet + - xgboost-train-2 + inputs: + artifacts: + data: + taskOutputArtifact: + outputArtifactKey: output_data + producerTask: convert-csv-to-apache-parquet + model: + taskOutputArtifact: + outputArtifactKey: model + producerTask: xgboost-train-2 + parameters: + label_column_name: + runtimeValue: + constant: tips + taskInfo: + name: xgboost-predict-2 + xgboost-predict-3: + cachingOptions: + enableCache: true + componentRef: + name: comp-xgboost-predict-3 + dependentTasks: + - convert-csv-to-apache-parquet + - xgboost-train + inputs: + artifacts: + data: + taskOutputArtifact: + outputArtifactKey: output_data + producerTask: convert-csv-to-apache-parquet + model: + taskOutputArtifact: + outputArtifactKey: model + producerTask: xgboost-train + parameters: + label_column_name: + runtimeValue: + constant: tips + taskInfo: + name: xgboost-predict-3 + xgboost-predict-4: + cachingOptions: + enableCache: true + componentRef: + name: comp-xgboost-predict-4 + dependentTasks: + - chicago-taxi-trips-dataset + - xgboost-train-2 + inputs: + artifacts: + data: + taskOutputArtifact: + outputArtifactKey: table + producerTask: chicago-taxi-trips-dataset + model: + taskOutputArtifact: + outputArtifactKey: model + producerTask: xgboost-train-2 + parameters: + label_column: + runtimeValue: + constant: 0.0 + taskInfo: + name: xgboost-predict-4 + xgboost-train: + cachingOptions: + enableCache: true + componentRef: + name: comp-xgboost-train + dependentTasks: + - chicago-taxi-trips-dataset + inputs: + artifacts: + training_data: + taskOutputArtifact: + outputArtifactKey: table + producerTask: chicago-taxi-trips-dataset + parameters: + label_column: + runtimeValue: + constant: 0.0 + num_iterations: + runtimeValue: + constant: 200.0 + objective: + runtimeValue: + constant: reg:squarederror + taskInfo: + name: xgboost-train + xgboost-train-2: + cachingOptions: + enableCache: true + componentRef: + name: comp-xgboost-train-2 + dependentTasks: + - convert-csv-to-apache-parquet + inputs: + artifacts: + training_data: + taskOutputArtifact: + outputArtifactKey: output_data + producerTask: convert-csv-to-apache-parquet + parameters: + label_column_name: + runtimeValue: + constant: tips + num_iterations: + runtimeValue: + constant: 200.0 + objective: + runtimeValue: + constant: reg:squarederror + taskInfo: + name: xgboost-train-2 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.13.0 diff --git a/test_data/sdk_uncompiled_pipelines/README.md b/test_data/sdk_uncompiled_pipelines/README.md new file mode 100644 index 00000000000..e630fa72e92 --- /dev/null +++ b/test_data/sdk_uncompiled_pipelines/README.md @@ -0,0 +1,5 @@ +## sdk_uncompiled_pipelines + +This package contains: +1. V2 Python pipeline functions that are not yet currently used in any of the tests - need to checked for duplication and either move these to `sdk_compiled_pipelines` or delete it from here +2. V1 Components and python pipeline functions - Not used anywhere, will be dropped in the future \ No newline at end of file diff --git a/test_data/sdk_uncompiled_pipelines/hello-world-with-returning-component.py b/test_data/sdk_uncompiled_pipelines/hello-world-with-returning-component.py new file mode 100644 index 00000000000..0d383a544af --- /dev/null +++ b/test_data/sdk_uncompiled_pipelines/hello-world-with-returning-component.py @@ -0,0 +1,12 @@ +from kfp import dsl + + +@dsl.component(base_image="public.ecr.aws/docker/library/python:3.12") +def comp(message: str) -> str: + print(message) + return message + + +@dsl.pipeline +def my_pipeline(message: str) -> str: + return comp(message=message).output diff --git a/samples/test/lightweight_python_functions_v2_pipeline.py b/test_data/sdk_uncompiled_pipelines/lightweight_python_functions_v2_pipeline.py similarity index 100% rename from samples/test/lightweight_python_functions_v2_pipeline.py rename to test_data/sdk_uncompiled_pipelines/lightweight_python_functions_v2_pipeline.py diff --git a/samples/test/lightweight_python_functions_v2_with_outputs.py b/test_data/sdk_uncompiled_pipelines/lightweight_python_functions_v2_with_outputs.py similarity index 100% rename from samples/test/lightweight_python_functions_v2_with_outputs.py rename to test_data/sdk_uncompiled_pipelines/lightweight_python_functions_v2_with_outputs.py diff --git a/backend/src/v2/compiler/testdata/nested_pipeline_all_level_retry.py b/test_data/sdk_uncompiled_pipelines/nested_pipeline_all_level_retry.py similarity index 100% rename from backend/src/v2/compiler/testdata/nested_pipeline_all_level_retry.py rename to test_data/sdk_uncompiled_pipelines/nested_pipeline_all_level_retry.py diff --git a/backend/src/v2/compiler/testdata/nested_pipeline_pipeline_retry.py b/test_data/sdk_uncompiled_pipelines/nested_pipeline_pipeline_retry.py similarity index 100% rename from backend/src/v2/compiler/testdata/nested_pipeline_pipeline_retry.py rename to test_data/sdk_uncompiled_pipelines/nested_pipeline_pipeline_retry.py diff --git a/backend/src/v2/compiler/testdata/nested_pipeline_sub_component_retry.py b/test_data/sdk_uncompiled_pipelines/nested_pipeline_sub_component_retry.py similarity index 100% rename from backend/src/v2/compiler/testdata/nested_pipeline_sub_component_retry.py rename to test_data/sdk_uncompiled_pipelines/nested_pipeline_sub_component_retry.py diff --git a/test_data/sdk_uncompiled_pipelines/task_config.py b/test_data/sdk_uncompiled_pipelines/task_config.py new file mode 100644 index 00000000000..1ee845b7a06 --- /dev/null +++ b/test_data/sdk_uncompiled_pipelines/task_config.py @@ -0,0 +1,175 @@ +# Copyright 2025 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from kfp import dsl +from kfp import compiler +from kfp import kubernetes + + +@dsl.component( + task_config_passthroughs=[ + dsl.TaskConfigPassthrough(field=dsl.TaskConfigField.ENV, apply_to_task=True), + dsl.TaskConfigField.RESOURCES, + dsl.TaskConfigField.KUBERNETES_VOLUMES, + dsl.TaskConfigField.KUBERNETES_NODE_SELECTOR, + dsl.TaskConfigField.KUBERNETES_TOLERATIONS, + dsl.TaskConfigField.KUBERNETES_AFFINITY, + ], +) +def echo_task_config(workspace_path: str, task_config: dsl.TaskConfig): + import dataclasses + import os + import pprint + + assert task_config is not None + + actual = dataclasses.asdict(task_config) + pprint.pprint(actual) + + workspace_pvc_name = None + for volume in actual['volumes']: + if volume['name'] == 'kfp-workspace': + workspace_pvc_name = volume['persistentVolumeClaim']['claimName'] + break + assert workspace_pvc_name is not None + + expected = { + 'affinity': { + 'nodeAffinity': { + 'requiredDuringSchedulingIgnoredDuringExecution': { + 'nodeSelectorTerms': [{ + 'matchExpressions': [{ + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + }] + }] + } + } + }, + 'env': [{ + 'name': 'ENV1', + 'value': 'val1' + }, { + 'name': 'ENV2', + 'value': 'val2' + }], + 'node_selector': { + 'disktype': 'ssd' + }, + 'resources': { + 'limits': { + 'cpu': '100m', + 'memory': '100Mi', + 'nvidia.com/gpu': '1' + }, + 'requests': { + 'cpu': '100m', + 'memory': '100Mi' + } + }, + 'tolerations': [{ + 'effect': 'NoExecute', + 'key': 'example-key', + 'operator': 'Exists', + 'tolerationSeconds': 3600 + }], + 'volume_mounts': [{ + 'mountPath': '/kfp-workspace', + 'name': 'kfp-workspace' + }, { + 'mountPath': '/data', + 'name': 'kubernetes-task-config-pvc' + }], + 'volumes': [{ + 'name': 'kfp-workspace', + 'persistentVolumeClaim': { + 'claimName': workspace_pvc_name + } + }, { + 'name': 'kubernetes-task-config-pvc', + 'persistentVolumeClaim': { + 'claimName': 'kubernetes-task-config-pvc' + } + }] + } + + assert actual == expected + + assert os.getenv('ENV1') == 'val1' + assert os.getenv('ENV2') == 'val2' + + +@dsl.pipeline( + name='task-config', + description='A simple intro pipeline', + pipeline_config=dsl.PipelineConfig( + workspace=dsl.WorkspaceConfig( + size='5Mi', + kubernetes=dsl.KubernetesWorkspaceConfig( + pvcSpecPatch={"storageClassName": "standard"})))) +def pipeline_task_config(): + """Pipeline that leverages dsl.TaskConfig.""" + pvc1 = kubernetes.CreatePVC( + pvc_name='kubernetes-task-config-pvc', + access_modes=['ReadWriteOnce'], + size='5Mi', + storage_class_name='standard', + ).set_caching_options(False) + echo_task_config_task = echo_task_config( + workspace_path=dsl.WORKSPACE_PATH_PLACEHOLDER).set_caching_options( + False).set_cpu_request('100m').set_memory_request( + '100Mi').set_cpu_limit('100m').set_memory_limit( + '100Mi').set_accelerator_type( + 'nvidia.com/gpu').set_accelerator_limit(1) + + kubernetes.mount_pvc( + echo_task_config_task, + pvc_name=pvc1.outputs['name'], + mount_path='/data', + ) + + kubernetes.add_node_selector( + echo_task_config_task, + label_key='disktype', + label_value='ssd', + ) + + kubernetes.add_toleration( + echo_task_config_task, + key='example-key', + operator='Exists', + effect='NoExecute', + toleration_seconds=3600, + ) + + kubernetes.add_node_affinity( + echo_task_config_task, + match_expressions=[{ + 'key': 'disktype', + 'operator': 'In', + 'values': ['ssd'] + }]) + + echo_task_config_task.set_env_variable(name='ENV1', value='val1') + echo_task_config_task.set_env_variable(name='ENV2', value='val2') + + delete_pvc1 = kubernetes.DeletePVC(pvc_name=pvc1.outputs['name']).after( + echo_task_config_task).set_caching_options(False) + + +if __name__ == "__main__": + # execute only if run as a script + compiler.Compiler().compile( + pipeline_func=pipeline_task_config, + package_path=__file__.replace('.py', '.yaml')) diff --git a/sdk/python/test_data/v1_component_yaml/add_component.yaml b/test_data/sdk_uncompiled_pipelines/v1/add_component.yaml similarity index 100% rename from sdk/python/test_data/v1_component_yaml/add_component.yaml rename to test_data/sdk_uncompiled_pipelines/v1/add_component.yaml diff --git a/sdk/python/test_data/v1_component_yaml/concat_placeholder_component.yaml b/test_data/sdk_uncompiled_pipelines/v1/concat_placeholder_component.yaml similarity index 100% rename from sdk/python/test_data/v1_component_yaml/concat_placeholder_component.yaml rename to test_data/sdk_uncompiled_pipelines/v1/concat_placeholder_component.yaml diff --git a/sdk/python/test_data/v1_component_yaml/fancy_trainer_component.yaml b/test_data/sdk_uncompiled_pipelines/v1/fancy_trainer_component.yaml similarity index 100% rename from sdk/python/test_data/v1_component_yaml/fancy_trainer_component.yaml rename to test_data/sdk_uncompiled_pipelines/v1/fancy_trainer_component.yaml diff --git a/sdk/python/test_data/v1_component_yaml/if_placeholder_component.yaml b/test_data/sdk_uncompiled_pipelines/v1/if_placeholder_component.yaml similarity index 100% rename from sdk/python/test_data/v1_component_yaml/if_placeholder_component.yaml rename to test_data/sdk_uncompiled_pipelines/v1/if_placeholder_component.yaml diff --git a/sdk/python/test_data/v1_component_yaml/ingestion_component.yaml b/test_data/sdk_uncompiled_pipelines/v1/ingestion_component.yaml similarity index 100% rename from sdk/python/test_data/v1_component_yaml/ingestion_component.yaml rename to test_data/sdk_uncompiled_pipelines/v1/ingestion_component.yaml diff --git a/sdk/python/test_data/pipelines/pipeline_with_ontology.py b/test_data/sdk_uncompiled_pipelines/v1/pipeline_with_ontology.py similarity index 81% rename from sdk/python/test_data/pipelines/pipeline_with_ontology.py rename to test_data/sdk_uncompiled_pipelines/v1/pipeline_with_ontology.py index 63dc89f2148..af85ad810ea 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_ontology.py +++ b/test_data/sdk_uncompiled_pipelines/v1/pipeline_with_ontology.py @@ -18,13 +18,9 @@ from kfp import components from kfp import dsl -test_data_dir = pathlib.Path(__file__).parent.parent / 'v1_component_yaml' +ingestion_op = components.load_component_from_file('ingestion_component.yaml') -ingestion_op = components.load_component_from_file( - str(test_data_dir / 'ingestion_component.yaml')) - -training_op = components.load_component_from_file( - str(test_data_dir / 'fancy_trainer_component.yaml')) +training_op = components.load_component_from_file('fancy_trainer_component.yaml') @dsl.pipeline( diff --git a/sdk/python/test_data/pipelines/pipeline_with_resource_spec.py b/test_data/sdk_uncompiled_pipelines/v1/pipeline_with_resource_spec.py similarity index 83% rename from sdk/python/test_data/pipelines/pipeline_with_resource_spec.py rename to test_data/sdk_uncompiled_pipelines/v1/pipeline_with_resource_spec.py index 29e1cc72ec4..848d3222c9d 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_resource_spec.py +++ b/test_data/sdk_uncompiled_pipelines/v1/pipeline_with_resource_spec.py @@ -18,13 +18,9 @@ from kfp import components from kfp import dsl -test_data_dir = pathlib.Path(__file__).parent.parent / 'v1_component_yaml' +ingestion_op = components.load_component_from_file('ingestion_component.yaml') -ingestion_op = components.load_component_from_file( - str(test_data_dir / 'ingestion_component.yaml')) - -training_op = components.load_component_from_file( - str(test_data_dir / 'fancy_trainer_component.yaml')) +training_op = components.load_component_from_file('fancy_trainer_component.yaml') @dsl.pipeline( diff --git a/sdk/python/test_data/v1_component_yaml/serving_component.yaml b/test_data/sdk_uncompiled_pipelines/v1/serving_component.yaml similarity index 100% rename from sdk/python/test_data/v1_component_yaml/serving_component.yaml rename to test_data/sdk_uncompiled_pipelines/v1/serving_component.yaml diff --git a/sdk/python/test_data/v1_component_yaml/trainer_component.yaml b/test_data/sdk_uncompiled_pipelines/v1/trainer_component.yaml similarity index 100% rename from sdk/python/test_data/v1_component_yaml/trainer_component.yaml rename to test_data/sdk_uncompiled_pipelines/v1/trainer_component.yaml diff --git a/third_party/argo/README.md b/third_party/argo/README.md index daadb72ed40..f25d10fc353 100644 --- a/third_party/argo/README.md +++ b/third_party/argo/README.md @@ -13,7 +13,7 @@ Instructions: 1. Set version of argo you want to upgrade to, for example: ```bash - ARGO_TAG=v3.5.14 + ARGO_TAG=v3.6.7 ``` 1. ```bash @@ -23,6 +23,10 @@ Instructions: NOTE: At this time, release.sh is a no-op included only for maintaining consistency with other third-party dependencies +1. Update the versions listed in the compatibility matrix in [README.md](../../README.md). + +1. Consider bumping the minimum Argo version used in the GitHub workflows to match the prior version. + 1. Update [manifests](../../manifests) and other places in the code base that still uses the old argo image tag. * Upgrade [Argo upstream manifests](https://github.com/kubeflow/pipelines/blob/master/manifests/kustomize/third-party/argo/README.md#upgrade-argo). * Search for the old argo versions in the repo and update them to new versions based on the reference. diff --git a/third_party/argo/VERSION b/third_party/argo/VERSION index 9b7b834fddb..08d6564b75e 100644 --- a/third_party/argo/VERSION +++ b/third_party/argo/VERSION @@ -1 +1 @@ -v3.5.14 +v3.6.7 diff --git a/tools/k8s-native/migration.py b/tools/k8s-native/migration.py index 5b692a9fe8c..56212b44e2f 100644 --- a/tools/k8s-native/migration.py +++ b/tools/k8s-native/migration.py @@ -133,9 +133,9 @@ def convert_to_k8s_format(pipeline, pipeline_versions, add_prefix, namespace): "annotations": { "pipelines.kubeflow.org/original-id": original_id, }, - "spec": { - "displayName": display_name - } + }, + "spec": { + "displayName": display_name } } k8s_objects.append(pipeline_obj)